From 14760f664ba58d33ef6990c0516b44ba146d77ec Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 18 Apr 2018 13:07:11 -0700 Subject: [PATCH 0001/1339] API Client Generator for Python (#1) This is the initial, proof of concept API client generator for Python. This is implemented as a `protoc` plugin, and generates a client library based on an API defined by protocol buffers. Some additional annotations are required in the proto, which are discussed in the accompanying documentation. --- packages/gapic-generator/.circleci/config.yml | 35 + packages/gapic-generator/.coveragerc | 13 + packages/gapic-generator/.gitignore | 61 ++ packages/gapic-generator/.readthedocs.yml | 6 + packages/gapic-generator/CONTRIBUTING.rst | 22 + packages/gapic-generator/LICENSE | 202 +++++ packages/gapic-generator/Pipfile | 24 + packages/gapic-generator/Pipfile.lock | 115 +++ packages/gapic-generator/README.rst | 44 + .../gapic-generator/api_factory/__init__.py | 13 + .../gapic-generator/api_factory/cli/dump.py | 46 + .../api_factory/cli/generate.py | 46 + .../api_factory/generator/__init__.py | 26 + .../api_factory/generator/files/.flake8 | 6 + .../api_factory/generator/files/MANIFEST.in | 4 + .../api_factory/generator/files/setup.cfg | 2 + .../api_factory/generator/generator.py | 172 ++++ .../api_factory/generator/loader.py | 72 ++ .../generator/templates/README.rst.j2 | 29 + .../generator/templates/_base.py.j2 | 9 + .../templates/service/__init__.py.j2 | 9 + .../generator/templates/service/client.py.j2 | 102 +++ .../service/transports/__init__.py.j2 | 52 ++ .../templates/service/transports/base.py.j2 | 54 ++ .../templates/service/transports/grpc.py.j2 | 99 +++ .../templates/service/transports/http.py.j2 | 78 ++ .../generator/templates/setup.py.j2 | 44 + .../api_factory/schema/__init__.py | 32 + .../gapic-generator/api_factory/schema/api.py | 311 +++++++ .../api_factory/schema/metadata.py | 84 ++ .../api_factory/schema/pb/client_pb2.py | 186 ++++ .../schema/pb/expr/v1/syntax_pb2.py | 832 ++++++++++++++++++ .../schema/pb/expr/v1/value_pb2.py | 326 +++++++ .../api_factory/schema/pb/headers_pb2.py | 42 + .../api_factory/schema/pb/lro_pb2.py | 89 ++ .../api_factory/schema/pb/overload_pb2.py | 178 ++++ .../api_factory/schema/pb/resources_pb2.py | 99 +++ .../api_factory/schema/pb/stability_pb2.py | 120 +++ .../api_factory/schema/wrappers.py | 181 ++++ .../api_factory/utils/__init__.py | 28 + .../api_factory/utils/cache.py | 43 + .../gapic-generator/api_factory/utils/case.py | 47 + .../api_factory/utils/lines.py | 96 ++ .../api_factory/utils/placeholder.py | 23 + packages/gapic-generator/docs/Makefile | 20 + .../docs/api-configuration.rst | 135 +++ packages/gapic-generator/docs/conf.py | 168 ++++ .../gapic-generator/docs/getting-started.rst | 123 +++ packages/gapic-generator/docs/index.rst | 26 + packages/gapic-generator/docs/installing.rst | 71 ++ packages/gapic-generator/docs/process.rst | 112 +++ .../docs/reference/generator.rst | 10 + .../gapic-generator/docs/reference/index.rst | 29 + .../gapic-generator/docs/reference/schema.rst | 22 + .../gapic-generator/docs/reference/utils.rst | 8 + packages/gapic-generator/docs/status.rst | 24 + packages/gapic-generator/nox.py | 54 ++ packages/gapic-generator/setup.py | 65 ++ .../tests/unit/generator/test_generator.py | 215 +++++ .../tests/unit/generator/test_loader.py | 39 + .../tests/unit/schema/test_api.py | 295 +++++++ .../tests/unit/schema/test_metadata.py | 75 ++ .../tests/unit/schema/wrappers/test_enums.py | 45 + .../tests/unit/schema/wrappers/test_field.py | 44 + .../unit/schema/wrappers/test_message.py | 47 + .../tests/unit/schema/wrappers/test_method.py | 72 ++ .../unit/schema/wrappers/test_service.py | 119 +++ .../tests/unit/utils/test_cache.py | 33 + .../tests/unit/utils/test_case.py | 27 + .../tests/unit/utils/test_lines.py | 58 ++ .../tests/unit/utils/test_placeholder.py | 23 + 71 files changed, 6061 insertions(+) create mode 100644 packages/gapic-generator/.circleci/config.yml create mode 100644 packages/gapic-generator/.coveragerc create mode 100644 packages/gapic-generator/.gitignore create mode 100644 packages/gapic-generator/.readthedocs.yml create mode 100644 packages/gapic-generator/CONTRIBUTING.rst create mode 100644 packages/gapic-generator/LICENSE create mode 100644 packages/gapic-generator/Pipfile create mode 100644 packages/gapic-generator/Pipfile.lock create mode 100644 packages/gapic-generator/README.rst create mode 100644 packages/gapic-generator/api_factory/__init__.py create mode 100644 packages/gapic-generator/api_factory/cli/dump.py create mode 100644 packages/gapic-generator/api_factory/cli/generate.py create mode 100644 packages/gapic-generator/api_factory/generator/__init__.py create mode 100644 packages/gapic-generator/api_factory/generator/files/.flake8 create mode 100644 packages/gapic-generator/api_factory/generator/files/MANIFEST.in create mode 100644 packages/gapic-generator/api_factory/generator/files/setup.cfg create mode 100644 packages/gapic-generator/api_factory/generator/generator.py create mode 100644 packages/gapic-generator/api_factory/generator/loader.py create mode 100644 packages/gapic-generator/api_factory/generator/templates/README.rst.j2 create mode 100644 packages/gapic-generator/api_factory/generator/templates/_base.py.j2 create mode 100644 packages/gapic-generator/api_factory/generator/templates/service/__init__.py.j2 create mode 100644 packages/gapic-generator/api_factory/generator/templates/service/client.py.j2 create mode 100644 packages/gapic-generator/api_factory/generator/templates/service/transports/__init__.py.j2 create mode 100644 packages/gapic-generator/api_factory/generator/templates/service/transports/base.py.j2 create mode 100644 packages/gapic-generator/api_factory/generator/templates/service/transports/grpc.py.j2 create mode 100644 packages/gapic-generator/api_factory/generator/templates/service/transports/http.py.j2 create mode 100644 packages/gapic-generator/api_factory/generator/templates/setup.py.j2 create mode 100644 packages/gapic-generator/api_factory/schema/__init__.py create mode 100644 packages/gapic-generator/api_factory/schema/api.py create mode 100644 packages/gapic-generator/api_factory/schema/metadata.py create mode 100644 packages/gapic-generator/api_factory/schema/pb/client_pb2.py create mode 100644 packages/gapic-generator/api_factory/schema/pb/expr/v1/syntax_pb2.py create mode 100644 packages/gapic-generator/api_factory/schema/pb/expr/v1/value_pb2.py create mode 100644 packages/gapic-generator/api_factory/schema/pb/headers_pb2.py create mode 100644 packages/gapic-generator/api_factory/schema/pb/lro_pb2.py create mode 100644 packages/gapic-generator/api_factory/schema/pb/overload_pb2.py create mode 100644 packages/gapic-generator/api_factory/schema/pb/resources_pb2.py create mode 100644 packages/gapic-generator/api_factory/schema/pb/stability_pb2.py create mode 100644 packages/gapic-generator/api_factory/schema/wrappers.py create mode 100644 packages/gapic-generator/api_factory/utils/__init__.py create mode 100644 packages/gapic-generator/api_factory/utils/cache.py create mode 100644 packages/gapic-generator/api_factory/utils/case.py create mode 100644 packages/gapic-generator/api_factory/utils/lines.py create mode 100644 packages/gapic-generator/api_factory/utils/placeholder.py create mode 100644 packages/gapic-generator/docs/Makefile create mode 100644 packages/gapic-generator/docs/api-configuration.rst create mode 100644 packages/gapic-generator/docs/conf.py create mode 100644 packages/gapic-generator/docs/getting-started.rst create mode 100644 packages/gapic-generator/docs/index.rst create mode 100644 packages/gapic-generator/docs/installing.rst create mode 100644 packages/gapic-generator/docs/process.rst create mode 100644 packages/gapic-generator/docs/reference/generator.rst create mode 100644 packages/gapic-generator/docs/reference/index.rst create mode 100644 packages/gapic-generator/docs/reference/schema.rst create mode 100644 packages/gapic-generator/docs/reference/utils.rst create mode 100644 packages/gapic-generator/docs/status.rst create mode 100644 packages/gapic-generator/nox.py create mode 100644 packages/gapic-generator/setup.py create mode 100644 packages/gapic-generator/tests/unit/generator/test_generator.py create mode 100644 packages/gapic-generator/tests/unit/generator/test_loader.py create mode 100644 packages/gapic-generator/tests/unit/schema/test_api.py create mode 100644 packages/gapic-generator/tests/unit/schema/test_metadata.py create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_field.py create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_message.py create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_method.py create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_service.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_cache.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_case.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_lines.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_placeholder.py diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml new file mode 100644 index 000000000000..5d4f489edabc --- /dev/null +++ b/packages/gapic-generator/.circleci/config.yml @@ -0,0 +1,35 @@ +--- +version: 2 +workflows: + version: 2 + tests: + jobs: + - unit + - docs +jobs: + unit: + docker: + - image: 'python:3.6' + steps: + - checkout + - run: + name: Install nox and codecov. + command: pip install nox-automation codecov + - run: + name: Run unit tests. + command: nox -s unit + - run: + name: Submit coverage data to codecov. + command: codecov + when: always + docs: + docker: + - image: 'python:3.6' + steps: + - checkout + - run: + name: Install nox. + command: pip install nox-automation + - run: + name: Build the documentation. + command: nox -s docs diff --git a/packages/gapic-generator/.coveragerc b/packages/gapic-generator/.coveragerc new file mode 100644 index 000000000000..b0654cf735fa --- /dev/null +++ b/packages/gapic-generator/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True +omit = + *_pb2.py + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/gapic-generator/.gitignore b/packages/gapic-generator/.gitignore new file mode 100644 index 000000000000..f2185a4d2b5a --- /dev/null +++ b/packages/gapic-generator/.gitignore @@ -0,0 +1,61 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.tox +.cache +.pytest_cache +htmlcov + +# Translations +*.mo + +# Mac +.DS_Store + +# Mr Developer +.mr.developer.cfg +.project +.pydevproject + +# JetBrains +.idea + +# Built documentation +docs/_build +docs/_build_doc2dash + +# Virtual environment +env/ +coverage.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/gapic-generator/.readthedocs.yml b/packages/gapic-generator/.readthedocs.yml new file mode 100644 index 000000000000..ebea21ff0c06 --- /dev/null +++ b/packages/gapic-generator/.readthedocs.yml @@ -0,0 +1,6 @@ +--- +build: + image: latest +python: + pip_install: true + version: 3.6 diff --git a/packages/gapic-generator/CONTRIBUTING.rst b/packages/gapic-generator/CONTRIBUTING.rst new file mode 100644 index 000000000000..2e0d8efd6790 --- /dev/null +++ b/packages/gapic-generator/CONTRIBUTING.rst @@ -0,0 +1,22 @@ +Contributing +============ + +We are thrilled that you are interested in contributing to this project. +Please open an issue or pull request with your ideas. + + +Contributor License Agreements +------------------------------ + +Before we can accept your pull requests, you will need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/gapic-generator/LICENSE b/packages/gapic-generator/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/gapic-generator/Pipfile b/packages/gapic-generator/Pipfile new file mode 100644 index 000000000000..ff6edced0cbf --- /dev/null +++ b/packages/gapic-generator/Pipfile @@ -0,0 +1,24 @@ +[[source]] + +url = "https://pypi.python.org/simple" +verify_ssl = true +name = "pypi" + + +[packages] + +grpcio = "*" +protobuf = "*" +"jinja2" = "*" +dataclasses = "*" +click = "*" +googleapis-common-protos = "*" + + +[dev-packages] + + + +[requires] + +python_version = "3.6" diff --git a/packages/gapic-generator/Pipfile.lock b/packages/gapic-generator/Pipfile.lock new file mode 100644 index 000000000000..ea961d7f622e --- /dev/null +++ b/packages/gapic-generator/Pipfile.lock @@ -0,0 +1,115 @@ +{ + "_meta": { + "hash": { + "sha256": "de73dab8d1077b288091934773dd8b6a3ca2e375296de78466a98607d7df0fc9" + }, + "host-environment-markers": { + "implementation_name": "cpython", + "implementation_version": "3.6.3", + "os_name": "posix", + "platform_machine": "x86_64", + "platform_python_implementation": "CPython", + "platform_release": "4.9.0-5-amd64", + "platform_system": "Linux", + "platform_version": "#1 SMP Debian 4.9.65-3+deb9u2 (2018-01-04)", + "python_full_version": "3.6.3", + "python_version": "3.6", + "sys_platform": "linux" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.6" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.python.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "click": { + "hashes": [ + "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", + "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" + ], + "version": "==6.7" + }, + "dataclasses": { + "hashes": [ + "sha256:068953b730c80cbb13ca6aac6ceedaa5d483fb6081a372ac4788aa5179ed9597", + "sha256:0f75133f21f6c9bd0fe82bc75d9908e46f531682911c9cffa75bce0e40ef09ef" + ], + "version": "==0.4" + }, + "googleapis-common-protos": { + "hashes": [ + "sha256:c075eddaa2628ab519e01b7d75b76e66c40eaa50fc52758d8225f84708950ef2" + ], + "version": "==1.5.3" + }, + "grpcio": { + "hashes": [ + "sha256:210cccde7b2e1e19dbde16660079973e26738db4a513943dd155bbcfbfcf62b5", + "sha256:96e621c962c65162edff0eac6bfac9fae99efe80fc6834952d87d05d077fe455", + "sha256:28212798de5567e5a5e1b2e2851819d4a414286a4fe76258132a1c14bae0f1de", + "sha256:9d43ba86f79de04f24dfbd3e62e8817f34eb11789758687a69dc6d94182044db", + "sha256:76e6d1e510c67afd59599d6c9797bb7a84a696d461b0ab5e42803b110e5052ed", + "sha256:6f3bc94db73e688cf8c25d80c15118f87b86c220cf284a2b29d6f1c4426e4550", + "sha256:1ed24f893ff74f7761c448942798cd2fa7194052dae8f2c521f30bff2eedce67", + "sha256:f5d1ff94fbbc0be64991d1da935cf42a09635c71cf41137b35fbee5d9658f637", + "sha256:74bdc3c632e8104c6f269fed1e950a0dd4800e605754f2121f2a555e3e1af131", + "sha256:dec40532a9889b2b645550adabb89644e8865b29a2922db9a8e9856298595104", + "sha256:84757cf49728455ed7cf1c36b0a9e06f658eb5297a206cd79174fc89d8e0ddf5", + "sha256:123cdff8c1306edcdf4a261b4fd7bd1efe07b679e6c1b6019789d7401b2dd1e6", + "sha256:9f7c8489b4a8c6be3022ea085387abe3d4973f21c7ef3641fbfc8f0e0c5e591f", + "sha256:bbbcd2a00c52e8af9924621cc2f7f735323ed7c8ca90cb5c40645e379b8d46ea", + "sha256:32950a2edcf919999e97885d14011efb276fdb6c712a9a0739abfa6c357fc0db", + "sha256:f4255661045038e1a424c7e2b1a93ed58222bf36363bd698f4ee687060767e5b", + "sha256:7ab975ae3af94f66c61a968d71f5da1d8cdbd5c31220d0eb195bd14c3384f3a1", + "sha256:8e7e7e464b66cf9da50f2f503a8be4b9fa99aae90e361c3b24f5e16b597f0f89", + "sha256:6ec617d4ce4cdccfbcb677b3dbf2eb45a0b521fa7fe3eec8412f342928aa60d7", + "sha256:5bdfb3eaff04e121b6d34b88a1a55d97c816961b7f94730a3e7aa2d880943e84", + "sha256:6099cbf59f487ebe0662ba938a3a7f786be97d081bbe3c0a1616466b6081a697", + "sha256:b2582d74e236c32a325b51013b40479cc7a80c9a87d2d02ad85f98a2201aa098", + "sha256:e7c43b5619deff48cc177c1b0618c4beeb2797f910f160e3c2035d5baf790a5d" + ], + "version": "==1.9.1" + }, + "jinja2": { + "hashes": [ + "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", + "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" + ], + "version": "==2.10" + }, + "markupsafe": { + "hashes": [ + "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" + ], + "version": "==1.0" + }, + "protobuf": { + "hashes": [ + "sha256:11788df3e176f44e0375fe6361342d7258a457b346504ea259a21b77ffc18a90", + "sha256:50c24f0d00b7efb3a72ae638ddc118e713cfe8cef40527afe24f7ebcb878e46d", + "sha256:41661f9a442eba2f1967f15333ebe9ecc7e7c51bcbaa2972303ad33a4ca0168e", + "sha256:06ec363b74bceb7d018f2171e0892f03ab6816530e2b0f77d725a58264551e48", + "sha256:b20f861b55efd8206428c13e017cc8e2c34b40b2a714446eb202bbf0ff7597a6", + "sha256:c1f9c36004a7ae6f1ce4a23f06070f6b07f57495f251851aa15cc4da16d08378", + "sha256:4d2e665410b0a278d2eb2c0a529ca2366bb325eb2ae34e189a826b71fb1b28cd", + "sha256:95b78959572de7d7fafa3acb718ed71f482932ddddddbd29ba8319c10639d863" + ], + "version": "==3.5.1" + }, + "six": { + "hashes": [ + "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb", + "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" + ], + "version": "==1.11.0" + } + }, + "develop": {} +} diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst new file mode 100644 index 000000000000..2132297304de --- /dev/null +++ b/packages/gapic-generator/README.rst @@ -0,0 +1,44 @@ +API Client Generator for Python +=============================== + +|release level| |ci| |docs| |codecov| + + A generator for protocol buffer described APIs for and in Python 3. + +This is a proof-of-concept generator for API client libraries for APIs +specified by `protocol buffers`_, such as those inside Google. +It takes a protocol buffer (with particular annotations) and uses it +to generate a client library. + +.. _protocol buffers: https://developers.google.com/protocol-buffers/ + +Purpose +------- + +This library primarily exists to facilitate experimentation, particularly +regarding: + +- An explicit normalized format for specifying APIs. +- Light weight, in-language code generators. + +Documentation +------------- + +To learn more, consult the `documentation`_. + +.. _documentation: https://gapic-generator-python.readthedocs.io/ + +Disclaimer +---------- + +This is not an official Google product. + + +.. |release level| image:: https://img.shields.io/badge/release%20level-pre%20alpha-red.svg?style=flat + :target: https://cloud.google.com/terms/launch-stages +.. |docs| image:: https://readthedocs.org/projects/gapic-generator-python/badge/?version=latest + :target: https://gapic-generator-python.readthedocs.io/ +.. |ci| image:: https://circleci.com/gh/googleapis/gapic-generator-python.svg?style=shield + :target: https://circleci.com/gh/googleapis/gapic-generator-python +.. |codecov| image:: https://codecov.io/gh/googleapis/gapic-generator-python/graph/badge.svg + :target: https://codecov.io/gh/googleapis/gapic-generator-python diff --git a/packages/gapic-generator/api_factory/__init__.py b/packages/gapic-generator/api_factory/__init__.py new file mode 100644 index 000000000000..b0c7da3d7725 --- /dev/null +++ b/packages/gapic-generator/api_factory/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/gapic-generator/api_factory/cli/dump.py b/packages/gapic-generator/api_factory/cli/dump.py new file mode 100644 index 000000000000..31390e52fcf2 --- /dev/null +++ b/packages/gapic-generator/api_factory/cli/dump.py @@ -0,0 +1,46 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import sys +import typing + +import click + +from google.protobuf.compiler import plugin_pb2 + + +@click.command() +@click.option('--request', type=click.File('rb'), default=sys.stdin.buffer, + help='Location of the `CodeGeneratorRequest` to be dumped. ' + 'This defaults to stdin (which is what protoc uses) ' + 'but this option can be set for testing/debugging.') +def dump(request: typing.BinaryIO) -> None: + """Dump the CodeGeneratorRequest, unmodified, to the given output.""" + # Ideally, this would output a CodeGeneratorResponse with the content + # of the CodeGeneratorRequest. Sadly, that does not work because + # the `content` field is a string, not a bytes, and requests are not + # valid utf-8. + + # Dump the CodeGeneratorRequest to disk. + with io.open('request.desc', 'wb+') as output: + output.write(request.read()) + + # Log what happened. + click.secho( + 'Request dumped to `request.desc`. ' + 'This script will now exit 1 to satisfy protoc.', + file=sys.stderr, color='green', + ) + sys.exit(1) diff --git a/packages/gapic-generator/api_factory/cli/generate.py b/packages/gapic-generator/api_factory/cli/generate.py new file mode 100644 index 000000000000..8dfc4aeaeef9 --- /dev/null +++ b/packages/gapic-generator/api_factory/cli/generate.py @@ -0,0 +1,46 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import typing + +import click + +from google.protobuf.compiler import plugin_pb2 + +from api_factory import generator + + +@click.command() +@click.option('--request', type=click.File('rb'), default=sys.stdin.buffer, + help='Location of the `CodeGeneratorRequest` to be processed. ' + 'This defaults to stdin (which is what protoc uses) ' + 'but this option can be set for testing/debugging.') +@click.option('--output', type=click.File('wb'), default=sys.stdout.buffer, + help='Where to output the `CodeGeneratorResponse`. ' + 'Defaults to stdout.') +def generate( + request: typing.BinaryIO, + output: typing.BinaryIO) -> None: + """Generate a full API client description.""" + + # Load the protobuf CodeGeneratorRequest. + req = plugin_pb2.CodeGeneratorRequest.FromString(request.read()) + + # Translate into a protobuf CodeGeneratorResponse; + # if there are issues, error out appropriately. + res = generator.Generator(req).get_response() + + # Output the serialized response. + output.write(res.SerializeToString()) diff --git a/packages/gapic-generator/api_factory/generator/__init__.py b/packages/gapic-generator/api_factory/generator/__init__.py new file mode 100644 index 000000000000..3172c0b549d4 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/__init__.py @@ -0,0 +1,26 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The ``generator`` module contains the code generation logic. + +The core of this work is around the :class:`~.generator.Generator` class, +which divides up the processing of individual templates. +""" + +from .generator import Generator + + +__all__ = ( + 'Generator', +) diff --git a/packages/gapic-generator/api_factory/generator/files/.flake8 b/packages/gapic-generator/api_factory/generator/files/.flake8 new file mode 100644 index 000000000000..25168dc87605 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/files/.flake8 @@ -0,0 +1,6 @@ +[flake8] +exclude = + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/api_factory/generator/files/MANIFEST.in b/packages/gapic-generator/api_factory/generator/files/MANIFEST.in new file mode 100644 index 000000000000..fc77f8c82ff0 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/files/MANIFEST.in @@ -0,0 +1,4 @@ +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include tests * +global-exclude *.pyc __pycache__ diff --git a/packages/gapic-generator/api_factory/generator/files/setup.cfg b/packages/gapic-generator/api_factory/generator/files/setup.cfg new file mode 100644 index 000000000000..2a9acf13daa9 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/files/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py new file mode 100644 index 000000000000..27e16179428a --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -0,0 +1,172 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os +from typing import Any, Callable, Iterable, Mapping, Sequence + +import jinja2 + +from google.protobuf.compiler.plugin_pb2 import CodeGeneratorRequest +from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse + +from api_factory.schema.api import API +from api_factory.generator.loader import TemplateLoader +from api_factory import utils + + +class Generator: + """A protoc code generator for client libraries. + + This class receives a :class:`~.plugin_pb2.CodeGeneratorRequest` (as per + the protoc plugin contract), and provides an interface for getting + a :class:`~.plugin_pb2.CodeGeneratorResponse`. + + That request with one or more protocol buffers which collectively + describe an API. + + Args: + request (CodeGeneratorRequest): A request protocol buffer as provided + by protoc. See ``plugin.proto``. + """ + def __init__(self, request: CodeGeneratorRequest) -> None: + # Parse the CodeGeneratorRequest into this plugin's internal schema. + self._api = API() + for fdp in request.proto_file: + self._api.load(fdp) + + # Create the jinja environment with which to render templates. + self._env = jinja2.Environment(loader=TemplateLoader( + searchpath=os.path.join(_dirname, 'templates'), + )) + + # Add filters which templates require. + self._env.filters['snake_case'] = utils.to_snake_case + self._env.filters['subsequent_indent'] = utils.subsequent_indent + self._env.filters['wrap'] = utils.wrap + + def get_response(self) -> CodeGeneratorResponse: + """Return a :class:`~.CodeGeneratorResponse` for this library. + + This is a complete response to be written to (usually) stdout, and + thus read by ``protoc``. + + Returns: + ~.CodeGeneratorResponse: A response describing appropriate + files and contents. See ``plugin.proto``. + """ + output_files = [] + + # Some templates are rendered once per API client library. + # These are generally boilerplate packaging and metadata files. + output_files += self._render_templates(self._env.loader.api_templates) + + # Some templates are rendered once per service (an API may have + # one or more services). + for service in self._api.services.values(): + output_files += self._render_templates( + self._env.loader.service_templates, + transform_filename=service.transform_filename, + additional_context={'service': service}, + ) + + # Some files are direct files and not templates; simply read them + # into output files directly. + # + # Rather than expect an enumeration of these, we simply grab everything + # in the `files/` directory automatically. + output_files += self._read_flat_files(os.path.join(_dirname, 'files')) + + # Return the CodeGeneratorResponse output. + return CodeGeneratorResponse(file=output_files) + + def _render_templates( + self, + templates: Iterable[str], *, + transform_filename: Callable[[str], str] = lambda fn: fn, + additional_context: Mapping[str, Any] = None, + ) -> Sequence[CodeGeneratorResponse.File]: + """Render the requested templates. + + Args: + templates (Iterable[str]): The set of templates to be rendered. + It is expected that these come from the methods on + :class:`~.loader.TemplateLoader`, and they should be + able to be set to the :meth:`jinja2.Environment.get_template` + method. + transform_filename (Callable[str, str]): A callable to + rename the resulting file from the template name. + Note that the `.j2` suffix is stripped automatically. + additional_context (Mapping[str, Any]): Additional variables + to be sent to the templates. The ``api`` variable + is always available. + + Returns: + Sequence[~.CodeGeneratorResponse.File]: A sequence of File + objects for inclusion in the final response. + """ + answer = [] + additional_context = additional_context or {} + + # Iterate over the provided templates and generate a File object + # for each. + for template_name in templates: + # Get the appropriate output filename. + output_filename = transform_filename(template_name[:-len('.j2')]) + + # Generate the File object. + answer.append(CodeGeneratorResponse.File( + content=self._env.get_template(template_name).render( + api=self._api, + **additional_context + ).strip() + '\n', + name=output_filename, + )) + + # Done; return the File objects based on these templates. + return answer + + def _read_flat_files( + self, + target_dir: str, + ) -> Sequence[CodeGeneratorResponse.File]: + answer = [] + + # Iterate over all files in the directory. + for path, _, filenames in os.walk(target_dir): + relative_path = path[len(target_dir):] + for filename in filenames: + # Determine the "relative filename" (the filename against the + # files/ subdirectory and repository root). + relative_filename = filename + if relative_path: + relative_filename = os.path.join(relative_path, filename) + + # Read the file from disk and create an appropriate OutputFile. + with io.open(os.path.join(path, filename), 'r') as f: + answer.append(CodeGeneratorResponse.File( + content=f.read(), + name=relative_filename, + )) + + # Done; return the File objects. + return answer + + +_dirname = os.path.realpath(os.path.dirname(__file__)) + + +__all__ = ( + 'Generator', +) diff --git a/packages/gapic-generator/api_factory/generator/loader.py b/packages/gapic-generator/api_factory/generator/loader.py new file mode 100644 index 000000000000..7bd04582df22 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/loader.py @@ -0,0 +1,72 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import typing + +import jinja2 + +from api_factory.utils import cached_property + + +class TemplateLoader(jinja2.FileSystemLoader): + """A jinja2 template loader that tracks what is left to be loaded. + + This class behaves identically to :class:`jinja2.FileSystemLoader` + but provides methods to return templates segmented by type. + + There are two types of templates: templates that describe the API as a + whole (and for which the template is rendered once per API), and templates + describing a service (which are rendered once per service in the API). + """ + @cached_property + def api_templates(self) -> typing.Set[str]: + """Return the (public) templates tied to the API as a whole. + + All templates in the ``templates/`` directory are included except: + + * Templates corresponding to services (in the ``service/`` + subdirectory) are excluded. See :meth:`service_templates`. + * Templates beginning with ``_`` are excluded. + + When these templates are rendered, they are expected to be sent + one and only one variable: an :class:`~.API` object spelled ``api``. + + Returns: + Set[str]: A set of templates. + """ + # Start with the full list of templates, excluding private ones, + # but exclude templates from other methods on this loader. + return set( + [t for t in self.list_templates() if not t.startswith('_')] + ).difference(self.service_templates) + + @cached_property + def service_templates(self): + """Return the templates specific to each service. + + This corresponds to all of the templates in the + ``templates/service/`` subdirectory (relative to this file). + + When these templates are rendered, they are expected to be sent + two variables: an :class:`~.API` object spelled ``api``, and the + :class:`~.wrappers.Service` object being iterated over, spelled + ``service``. These templates are rendered once per service, with + a distinct ``service`` variable each time. + + Returns: + Set[str]: A list of service templates. + """ + return set( + [t for t in self.list_templates() if t.startswith('service/')] + ) diff --git a/packages/gapic-generator/api_factory/generator/templates/README.rst.j2 b/packages/gapic-generator/api_factory/generator/templates/README.rst.j2 new file mode 100644 index 000000000000..3c5bba222208 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/README.rst.j2 @@ -0,0 +1,29 @@ +{{ api.long_name }} +{{ '=' * api.long_name|length }} + +{% if api.client.documentation.summary -%} +*{{ api.client.documentation.summary }}* +{%- endif %} + +{% if api.client.documentation.overview -%} +{{ api.client.documentation.overview|wrap(72) }} +{%- endif %} + +{% if api.client.documentation.documentation_root_url -%} +- `API Documentation <{{ api.client.documentation.documentation_root_url }}>`_ +{%- endif %} + +Installing +---------- + +The recommended way to use the {{ api.client.name }} API is by installing +this client library; do so using `pip`_: + +.. code-block:: shell + + $ pip install {{ api.warehouse_package_name }} + +.. note:: + + If you are doing local development, we recommend installing this package + inside a `virtualenv`_. diff --git a/packages/gapic-generator/api_factory/generator/templates/_base.py.j2 b/packages/gapic-generator/api_factory/generator/templates/_base.py.j2 new file mode 100644 index 000000000000..f1e8e49db186 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/_base.py.j2 @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +{%- if api.copyright %} +# Copyright {{ api.copyright.year }} {{ api.copyright.label }} +{%- if api.copyright.license %} +# +# {{ api.copyright.license.boilerplate_notice|subsequent_indent('# ') }} +{%- endif %}{% endif %} +{% block content %} +{% endblock %} diff --git a/packages/gapic-generator/api_factory/generator/templates/service/__init__.py.j2 b/packages/gapic-generator/api_factory/generator/templates/service/__init__.py.j2 new file mode 100644 index 000000000000..b6cea2a62e7b --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/service/__init__.py.j2 @@ -0,0 +1,9 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from .client import {{ service.name }} + +__all__ = ( + '{{ service.name }}', +) +{% endblock %} diff --git a/packages/gapic-generator/api_factory/generator/templates/service/client.py.j2 b/packages/gapic-generator/api_factory/generator/templates/service/client.py.j2 new file mode 100644 index 000000000000..42a89d9e638c --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/service/client.py.j2 @@ -0,0 +1,102 @@ +{% extends '_base.py.j2' %} + +{% block content %} +import functools +import pkg_resources +from typing import Mapping, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import retry +from google.auth import credentials + +{% for package, pb2_module in service.pb2_modules -%} +from {{ package }} import {{ pb2_module }} +{% endfor %} +from .transports import get_transport_class +from .transports import {{ service.name }}Transport + + +# LIBRARY_VERSION: str = pkg_resources.get_distribution( +# '{{ api.warehouse_package_name }}', +# ).version + + +class {{ service.name }}: + """{{ service.meta.doc|wrap(width=72, subsequent_indent=' ') }} + """ + def __init__(self, *, + credentials: credentials.Credentials = None, + transport: Union[str, {{ service.name }}Transport] = None, + ) -> None: + """Instantiate the {{ (service.name|snake_case).replace('_', ' ') }}. + + Args: + credentials (Optional[google.auth.credentials.Credential]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.{{ service.name }}Transport): The + transport to use. If set to None, a transport is chosen + automatically. + """ + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, {{ service.name }}Transport): + if credentials: + raise ValueError('When providing a transport instance, ' + 'provide its credentials directly.') + self._transport = transport + else: + Transport = get_transport_class(transport) + self._transport = Transport(credentials=credentials) + + {% for method in service.methods.values() -%} + @functools.singledispatch + def {{ method.name|snake_case }}(self, + request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, + retry: retry.Retry = None, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: + """{{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} + + Args: + request (~.{{ method.input.pb2_module }}.{{ method.input.name }}): + The request object. {{ method.input.meta.doc|wrap(width=72, + initial_width=36, subsequent_indent=' ' * 16) }} + retry (~.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent alont with the request as metadata. + + Returns: + ~.{{ method.output.pb2_module }}.{{ method.output.name }}: + {{ method.output.meta.doc|wrap(width=72, initial_width=56, + subsequent_indent=' ' * 16) }} + """ + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.{{ method.name|snake_case }}, + default_retry=None, # FIXME + default_timeout=None, # FIXME + client_info=self.client_info, + ) + + # Send the request and return the response. + return rpc(request, retry=retry, timeout=timeout, metadata=metadata) + {%- endfor %} + + @property + def client_info(self) -> gapic_v1.client_info.ClientInfo: + """Return information about this client (for metrics). + + Returns: + client_info.ClientInfo: An object with metadata about this + client library. + """ + return gapic_v1.client_info.ClientInfo() +{% endblock %} diff --git a/packages/gapic-generator/api_factory/generator/templates/service/transports/__init__.py.j2 b/packages/gapic-generator/api_factory/generator/templates/service/transports/__init__.py.j2 new file mode 100644 index 000000000000..5db3dc32b83d --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/service/transports/__init__.py.j2 @@ -0,0 +1,52 @@ +{% extends '_base.py.j2' %} + +{% block content %} +import collections +import typing + +from .base import {{ service.name }}Transport +from .http import {{ service.name }}HttpTransport + +# Compile a registry of transports. +_registry = collections.OrderedDict() + +# gRPC is not guaranteed to be available, because `grpcio` may or may not +# be installed. If it is available, however, it should be the "first in" +# (default). +try: + from .grpc import {{ service.name }}GrpcTransport + _registry['grpc'] = {{ service.name }}GrpcTransport +except ImportError: + pass + +# Always provide an HTTP/1.1 transport. +_registry['http'] = {{ service.name }}HttpTransport + + +# Provide a function for the client to get the appropriate transport. +def get_transport_class( + label: str = None, + ) -> typing.Type[{{ service.name }}Transport]: + """Return an appropriate transport class. + + Args: + label (str): The name of the desired transport. If none is provided, + then the first transport in the registry is used. + + Returns: + Type[{{ service.label }}Transport]: The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return _registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(_registry.values())) + + +__all__ = ( + '{{ service.name }}Transport', + 'get_transport_class', +) +{% endblock %} diff --git a/packages/gapic-generator/api_factory/generator/templates/service/transports/base.py.j2 b/packages/gapic-generator/api_factory/generator/templates/service/transports/base.py.j2 new file mode 100644 index 000000000000..a951395015f9 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/service/transports/base.py.j2 @@ -0,0 +1,54 @@ +{% extends '_base.py.j2' %} + +{% block content %} +import abc +import typing + +from google import auth +from google.auth import credentials + +{% for package, pb2_module in service.pb2_modules -%} +from {{ package }} import {{ pb2_module }} +{% endfor %} + +class {{ service.name }}Transport(metaclass=abc.ABCMeta): + """Abstract transport class for {{ service.name }}.""" + + SERVICE_ADDRESS: str = '{{ service.host }}' + + AUTH_SCOPES: typing.Tuple[str] = ( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ) + + def __init__( + self, *, + credentials: credentials.Credentials = None, + ) -> None: + """Instantiate the transport. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + """ + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials is None: + credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + + # Save the credentials. + self._credentials = credentials + + {% for method in service.methods.values() -%} + @abc.abstractmethod + def {{ method.name|snake_case }}( + self, + request: {{ method.input.pb2_module }}.{{ method.input.name }}, + ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: + raise NotImplementedError + {% endfor -%} +{% endblock %} diff --git a/packages/gapic-generator/api_factory/generator/templates/service/transports/grpc.py.j2 b/packages/gapic-generator/api_factory/generator/templates/service/transports/grpc.py.j2 new file mode 100644 index 000000000000..40a8d27941aa --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/service/transports/grpc.py.j2 @@ -0,0 +1,99 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from typing import Sequence, Tuple + +from google.api_core import grpc_helpers +from google.auth import credentials + +import grpc + +{% for package, pb2_module in service.pb2_modules -%} +from {{ package }} import {{ pb2_module }} +{% endfor %} +from .base import {{ service.name }}Transport + + +class {{ service.name }}GrpcTransport({{ service.name }}Transport): + """gRPC backend transport for {{ service.name }}. + + {{ service.meta.doc|wrap(width=72, subsequent_indent=' ') }} + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + def __init__(self, *, credentials: credentials.Credentials = None) -> None: + """Instantiate the transport. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + """ + super().__init__(credentials=credentials) + self._stubs = {} + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if 'grpc_channel' not in self.__dict__: + self.__dict__['grpc_channel'] = grpc_helpers.create_channel( + '{0}:443'.format(self.SERVICE_ADDRESS), + credentials=self._credentials, + scopes=self.AUTH_SCOPES, + ) + + # Return the channel from cache. + return self.__dict__['grpc_channel'] + + {% for method in service.methods.values() -%} + def {{ method.name|snake_case }}(self, + request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, + metadata: Sequence[Tuple[str, str]] = (), + ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: + """Call the {{ (method.name|snake_case).replace('_',' ')|wrap(width=70, + initial_width=25, subsequent_indent=" ") }} method over gRPC. + + {{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} + + Args: + request (~.{{ method.input.pb2_module }}.{{ method.input.name }}: + The request object. {{ method.input.meta.doc|wrap(width=72, + initial_width=36, subsequent_indent=' ' * 16) }} + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent alont with the request as metadata. + + Returns: + ~.{{ method.output.pb2_module }}.{{ method.output.name }}: + {{ method.output.meta.doc|wrap(width=72, initial_width=56, + subsequent_indent=' ' * 16) }} + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '{{ method.name|snake_case }}' not in self._stubs: + self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.unary_unary( + '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', + request_serializer={{ method.input.pb2_module }}.{{ method.input.name }}.SerializeToString, + response_deserializer={{ method.output.pb2_module }}.{{ method.output.name }}.FromString, + ) + stub = self._stubs['{{ method.name|snake_case }}'] + + # Return the response. + return stub(request, metadata=metadata) + + {% endfor -%} +{%- endblock -%} diff --git a/packages/gapic-generator/api_factory/generator/templates/service/transports/http.py.j2 b/packages/gapic-generator/api_factory/generator/templates/service/transports/http.py.j2 new file mode 100644 index 000000000000..1098af384caa --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/service/transports/http.py.j2 @@ -0,0 +1,78 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from typing import Sequence, Tuple + +from google.auth import credentials +from google.auth.transport.requests import AuthorizedSession + +{% for package, pb2_module in service.pb2_modules -%} +from {{ package }} import {{ pb2_module }} +{% endfor %} +from .base import {{ service.name }}Transport + + +class {{ service.name }}HttpTransport({{ service.name }}Transport): + """HTTP backend transport for {{ service.name }}. + + {{ service.meta.doc|wrap(width=72, subsequent_indent=' ') }} + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire, but uses HTTP/1.1. + """ + def __init__(self, *, + credentials: credentials.Credentials = None, + ) -> None: + """Instantiate the transport. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + """ + super().__init__(credentials=credentials) + self._session = AuthorizedSession(self._credentials) + + {% for method in service.methods.values() -%} + def {{ method.name|snake_case }}(self, + request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, + metadata: Sequence[Tuple[str, str]] = (), + ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: + """Call the {{ (method.name|snake_case).replace('_',' ')|wrap(width=70, + initial_width=25, subsequent_indent=" ") }} method over HTTP. + + Args: + request (~.{{ method.input.pb2_module }}.{{ method.input.name }}: + The request object. {{ method.input.meta.doc|wrap(width=72, + initial_width=36, subsequent_indent=' ' * 16) }} + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent alont with the request as metadata. + + Returns: + ~.{{ method.output.pb2_module }}.{{ method.output.name }}: + {{ method.output.meta.doc|wrap(width=72, initial_width=56, + subsequent_indent=' ' * 16) }} + """ + # Serialize the input. + data = request.SerializeToString() + + # Send the request. + response = self._session.post( + f'https://{self.SERVICE_ADDRESS}/$rpc/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', + data=data, + headers={ + 'content-type': 'application/x-protobuf', + }, + ) + + # Return the response. + return {{ method.output.pb2_module }}.{{ method.output.name }}.FromString( + response.content, + ) + {% endfor %} +{% endblock %} diff --git a/packages/gapic-generator/api_factory/generator/templates/setup.py.j2 b/packages/gapic-generator/api_factory/generator/templates/setup.py.j2 new file mode 100644 index 000000000000..7168b2499b20 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/setup.py.j2 @@ -0,0 +1,44 @@ +{% extends '_base.py.j2' %} + +{% block content %} +import io +import os + +from setuptools import setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) +with io.open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: + README = file_obj.read() + + +setup( + name='{{ api.warehouse_package_name }}', + author='{{ api.client.copyright.fullname }}', + version='0.0.1', + description='{{ api.client.documentation.tagline|wrap(width=70, initial_width=50, subsequent_indent=" '", antecedent_trailer=" '") }}', + long_description=README, + license={% if api.client.license %}'{{ api.client.license.replace('-', ' ') }}'{% else %}None{% endif %}, + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core >= 0.1.4, < 0.2.0dev', + 'grpcio >= 1.10.0', + ), + classifiers=( + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + {%- if api.client.license %} + 'License :: OSI Approved :: {{ api.client.license }}', + {%- endif %} + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ), + zip_safe=False, +) +{% endblock %} diff --git a/packages/gapic-generator/api_factory/schema/__init__.py b/packages/gapic-generator/api_factory/schema/__init__.py new file mode 100644 index 000000000000..6c43fc560383 --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/__init__.py @@ -0,0 +1,32 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The ``schema`` module provides a normalized API representation. + +In general, this module can be considered in three parts: wrappers, +metadata, and a roll-up view of an API as a whole. + +These three parts are divided into the three component modules. +""" + +from api_factory.schema.api import API +from api_factory.schema import metadata +from api_factory.schema import wrappers + + +__all__ = ( + 'api', + 'metadata', + 'wrappers', +) diff --git a/packages/gapic-generator/api_factory/schema/api.py b/packages/gapic-generator/api_factory/schema/api.py new file mode 100644 index 000000000000..d7205e4847c5 --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/api.py @@ -0,0 +1,311 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module contains the "roll-up" class, :class:`~.API`. +Everything else in the :mod:`~.schema` module is usually accessed +through an :class:`~.API` object. +""" + +import collections +import dataclasses +import sys +from typing import Callable, List, Mapping + +from google.protobuf import descriptor_pb2 + +from api_factory import utils +from api_factory.schema import metadata +from api_factory.schema import wrappers +from api_factory.schema.pb import client_pb2 + + +@dataclasses.dataclass +class API: + """A representation of a full API. + + This represents a top-down view of a complete API, as loaded from a + set of protocol buffer files. Once the descriptors are loaded + (see :meth:`load`), this object contains every message, method, service, + and everything else needed to write a client library. + + An instance of this object is made available to every template + (as ``api``). + """ + client: client_pb2.Client = dataclasses.field( + default_factory=client_pb2.Client, + ) + services: Mapping[str, wrappers.Service] = dataclasses.field( + default_factory=dict, + ) + messages: Mapping[str, wrappers.MessageType] = dataclasses.field( + default_factory=dict, + ) + enums: Mapping[str, wrappers.EnumType] = dataclasses.field( + default_factory=dict, + ) + + @property + def long_name(self) -> str: + """Return an appropriate title-cased long name.""" + return ' '.join(list(self.client.namespace) + [self.client.name]) + + @property + def warehouse_package_name(self) -> str: + """Return the appropriate Python package name for Warehouse.""" + # Sanity check: If no name is provided, use a clearly placeholder + # default that is not a valid name on Warehouse. + if not self.client.name: + return utils.Placeholder('<<< PACKAGE NAME >>>') + + # Piece the name and namespace together to come up with the + # proper package name. + answer = list(self.client.namespace) + [self.client.name] + return '-'.join(answer).lower() + + def load(self, fdp: descriptor_pb2.FileDescriptorProto) -> None: + """Load the provided FileDescriptorProto into this object. + + This method iterates over the complete descriptor and loads all + of its members (services, methods, messages, enums, etc.) into + this object, wrapping each descriptor in a wrapper to preserve + metadata. + + This method modifies the :class:`~.API` object in-place. + + Args: + fdp (~.descriptor_pb2.FileDescriptorProto): The + :class:`FileDescriptorProto` object; this is usually provided + as a list in :class:`CodeGeneratorRequest`. + """ + # Compile together the comments from the source code. + # This creates a nested diciontary structure sorted by the + # location paths. So, a location with path [4, 1, 2, 7] will end + # up being in `comments_by_path` under [4][1][2][7]['TERMINAL']. + # + # The purpose of always ending with 'TERMINAL' is because there + # always could be something nested deeper. + comments_by_path = {} + for loc in fdp.source_code_info.location: + cursor = comments_by_path + for p in loc.path: + cursor.setdefault(p, {}) + cursor = cursor[p] + cursor['TERMINAL'] = loc + + # Now iterate over the FileDescriptorProto and grab the relevant + # source documentation from the dictionary created above and + # add this to the `self.comments` dictionary, which is sorted by + # fully-qualfied proto identifiers. + # + # The hard-coded keys here are based on how descriptor.proto + # works; it uses the proto message number of the pieces of each + # message (e.g. the hard-code `4` for `message_type` immediately + # below is because `repeated DescriptorProto message_type = 4;` in + # descriptor.proto itself). + address = metadata.Address( + module=fdp.name.split('/')[-1][:-len('.proto')], + package=fdp.package.split('.'), + ) + self._load_children(fdp.message_type, loader=self._load_descriptor, + address=address, info=comments_by_path.get(4, {})) + self._load_children(fdp.enum_type, loader=self._load_enum, + address=address, info=comments_by_path.get(5, {})) + self._load_children(fdp.service, loader=self._load_service, + address=address, info=comments_by_path.get(6, {})) + # self._load_children(fdp.extension, loader=self._load_field, + # address=address, info=comments_by_path.get(7, {})) + + # Merge any client directives with what we have going so far. + self.client.MergeFrom(fdp.options.Extensions[client_pb2.client]) + + def _load_children(self, children: list, loader: Callable, + address: metadata.Address, info: dict) -> None: + """Load arbitrary children from a Descriptor. + + Args: + children (list): A sequence of children of the given field to + be loaded. For example, a FileDescriptorProto contains the + lists ``message_type``, ``enum_type``, etc.; these are valid + inputs for this argument. + loader (Callable[Message, Address, dict]): The function able + to load the kind of message in ``children``. This should + be one of the ``_load_{noun}`` methods on this class + (e.g. ``_load_descriptor``). + address (~.metadata.Address): The address up to this point. + This will include the package and may include outer messages. + info (dict): A dictionary of comment information corresponding to + the messages for which being laoded. In other words, this is + the segment of the source info that has paths matching + or within ``children``. + """ + # Iterate over the list of children provided and call the + # applicable loader function on each. + for child, i in zip(children, range(0, sys.maxsize)): + loader(child, address=address, info=info.get(i, {})) + + def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], + address: metadata.Address, info: dict, + ) -> Mapping[str, wrappers.Field]: + """Return a dictionary of wrapped fields for the given message. + + Args: + fields (Sequence[~.descriptor_pb2.FieldDescriptorProto]): A + sequence of protobuf field objects. + address (~.metadata.Address): An address object denoting the + location of these fields. + info (dict): The appropriate slice of proto comments + corresponding to these fields. + + Returns: + Mapping[str, ~.wrappers.Field]: A ordered mapping of + :class:`~.wrappers.Field` objects. + """ + # Iterate over the fields and collect them into a dictionary. + answer = collections.OrderedDict() + for field_pb, i in zip(field_pbs, range(0, sys.maxsize)): + answer[field_pb.name] = wrappers.Field( + field_pb=field_pb, + meta=metadata.Metadata( + address=address, + documentation=info.get(i, {}).get( + 'TERMINAL', + descriptor_pb2.SourceCodeInfo.Location(), + ), + ), + ) + + # Done; return the answer. + return answer + + def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], + address: metadata.Address, info: dict, + ) -> Mapping[str, wrappers.Method]: + """Return a dictionary of wrapped methods for the given service. + + Args: + methods (Sequence[~.descriptor_pb2.MethodDescriptorProto]): A + sequence of protobuf method objects. + address (~.metadata.Address): An address object denoting the + location of these methods. + info (dict): The appropriate slice of proto comments + corresponding to these methods. + + Returns: + Mapping[str, ~.wrappers.Method]: A ordered mapping of + :class:`~.wrappers.Method` objects. + """ + # Iterate over the methods and collect them into a dictionary. + answer = collections.OrderedDict() + for method_pb, i in zip(methods, range(0, sys.maxsize)): + answer[method_pb.name] = wrappers.Method( + input=self.messages[method_pb.input_type.lstrip('.')], + method_pb=method_pb, + meta=metadata.Metadata( + address=address, + documentation=info.get(i, {}).get( + 'TERMINAL', + descriptor_pb2.SourceCodeInfo.Location(), + ), + ), + output=self.messages[method_pb.output_type.lstrip('.')], + ) + + # Done; return the answer. + return answer + + def _load_descriptor(self, message_pb: descriptor_pb2.DescriptorProto, + address: metadata.Address, info: dict) -> None: + """Load message descriptions from DescriptorProtos.""" + ident = f'{str(address)}.{message_pb.name}' + nested_addr = address.child(message_pb.name) + + # Create a dictionary of all the fields for this message. + fields = self._get_fields( + message_pb.field, + address=nested_addr, + info=info.get(2, {}), + ) + fields.update(self._get_fields( + message_pb.extension, + address=nested_addr, + info=info.get(6, {}), + )) + + # Create a message correspoding to this descriptor. + self.messages[ident] = wrappers.MessageType( + fields=fields, + message_pb=message_pb, + meta=metadata.Metadata(address=address, documentation=info.get( + 'TERMINAL', + descriptor_pb2.SourceCodeInfo.Location(), + )), + ) + + # Load all nested items. + self._load_children(message_pb.nested_type, address=nested_addr, + loader=self._load_descriptor, info=info.get(3, {})) + self._load_children(message_pb.enum_type, address=nested_addr, + loader=self._load_enum, info=info.get(4, {})) + # self._load_children(message.oneof_decl, loader=self._load_field, + # address=nested_addr, info=info.get(8, {})) + + def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, + address: metadata.Address, info: dict) -> None: + """Load enum descriptions from EnumDescriptorProtos.""" + # Put together wrapped objects for the enum values. + values = [] + for enum_value, i in zip(enum.value, range(0, sys.maxsize)): + values.append(wrappers.EnumValueType( + enum_value_pb=enum_value, + meta=metadata.Metadata( + address=address, + documentation=info.get(2, {}).get( + 'TERMINAL', + descriptor_pb2.SourceCodeInfo.Location(), + ), + ), + )) + + # Load the enum itself. + ident = f'{str(address)}.{enum.name}' + self.enums[ident] = wrappers.EnumType( + enum_pb=enum, + meta=metadata.Metadata(address=address, documentation=info.get( + 'TERMINAL', + descriptor_pb2.SourceCodeInfo.Location(), + )), + values=values, + ) + + def _load_service(self, service: descriptor_pb2.ServiceDescriptorProto, + address: metadata.Address, info: dict) -> None: + """Load comments for a service and its methods.""" + service_addr = address.child(service.name) + + # Put together a dictionary of the service's methods. + methods = self._get_methods( + service.method, + address=service_addr, + info=info.get(2, {}), + ) + + # Load the comments for the service itself. + self.services[f'{str(address)}.{service.name}'] = wrappers.Service( + meta=metadata.Metadata(address=address, documentation=info.get( + 'TERMINAL', + descriptor_pb2.SourceCodeInfo.Location(), + )), + methods=methods, + service_pb=service, + ) diff --git a/packages/gapic-generator/api_factory/schema/metadata.py b/packages/gapic-generator/api_factory/schema/metadata.py new file mode 100644 index 000000000000..bf6cd1c6b7d6 --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/metadata.py @@ -0,0 +1,84 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""The ``metadata`` module defines schema for where data was parsed from. +This library places every protocol buffer descriptor in a wrapper class +(see :mod:`~.wrappers`) before loading it into the :class:`~.API` object. + +As we iterate over descriptors during the loading process, it is important +to know where they came from, because sometimes protocol buffer types are +referenced by fully-qualified string (e.g. ``method.input_type``), and we +want to resolve those references. + +Additionally, protocol buffers stores data from the comments in the ``.proto`` +in a separate structure, and this object model re-connects the comments +with the things they describe for easy access in templates. +""" + +import copy +import dataclasses +from typing import List + +from google.protobuf import descriptor_pb2 + + +@dataclasses.dataclass(frozen=True) +class Address: + package: List[str] = dataclasses.field(default_factory=list) + module: str = '' + parent: List[str] = dataclasses.field(default_factory=list) + + def __str__(self): + return '.'.join(self.package + self.parent) + + def child(self, child_name: str) -> 'Address': + """Return a new Address with ``child_name`` appended to its parent. + + Args: + child_name (str): The child name to be appended to ``parent``. + The period-separator is added automatically if ``parent`` + is non-empty. + + Returns: + ~.Address: The new address object. + """ + answer = copy.deepcopy(self) + answer.parent.append(child_name) + return answer + + +@dataclasses.dataclass(frozen=True) +class Metadata: + address: Address = dataclasses.field(default_factory=Address) + documentation: descriptor_pb2.SourceCodeInfo.Location = dataclasses.field( + default_factory=descriptor_pb2.SourceCodeInfo.Location, + ) + + @property + def doc(self): + """Return the best comment. + + This property prefers the leading comment if one is available, + and falls back to a trailing comment or a detached comment otherwise. + + If there are no comments, return empty string. (This means a template + is always guaranteed to get a string.) + """ + if self.documentation.leading_comments: + return self.documentation.leading_comments.strip() + if self.documentation.trailing_comments: + return self.documentation.trailing_comments.strip() + if self.documentation.leading_detached_comments: + return '\n\n'.join(self.documentation.leading_detached_comments) + return '' diff --git a/packages/gapic-generator/api_factory/schema/pb/client_pb2.py b/packages/gapic-generator/api_factory/schema/pb/client_pb2.py new file mode 100644 index 000000000000..4cb10c7d5e0a --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/pb/client_pb2.py @@ -0,0 +1,186 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/experimental/client.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import documentation_pb2 as google_dot_api_dot_documentation__pb2 +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/api/experimental/client.proto', + package='google.api.experimental', + syntax='proto3', + serialized_pb=_b('\n$google/api/experimental/client.proto\x12\x17google.api.experimental\x1a\x1egoogle/api/documentation.proto\x1a google/protobuf/descriptor.proto\"+\n\tCopyright\x12\x10\n\x08\x66ullname\x18\x01 \x01(\t\x12\x0c\n\x04year\x18\x02 \x01(\t\"\xb4\x01\n\x06\x43lient\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tnamespace\x18\x02 \x03(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x30\n\rdocumentation\x18\n \x01(\x0b\x32\x19.google.api.Documentation\x12\x35\n\tcopyright\x18\x0b \x01(\x0b\x32\".google.api.experimental.Copyright\x12\x0f\n\x07license\x18\x0c \x01(\t:O\n\x06\x63lient\x12\x1c.google.protobuf.FileOptions\x18\xfc\x88\x03 \x01(\x0b\x32\x1f.google.api.experimental.Client:/\n\x04host\x12\x1f.google.protobuf.ServiceOptions\x18\xfd\x88\x03 \x01(\t:7\n\x0coauth_scopes\x12\x1f.google.protobuf.ServiceOptions\x18\xfe\x88\x03 \x03(\tb\x06proto3') + , + dependencies=[google_dot_api_dot_documentation__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) + + +CLIENT_FIELD_NUMBER = 50300 +client = _descriptor.FieldDescriptor( + name='client', full_name='google.api.experimental.client', index=0, + number=50300, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) +HOST_FIELD_NUMBER = 50301 +host = _descriptor.FieldDescriptor( + name='host', full_name='google.api.experimental.host', index=1, + number=50301, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) +OAUTH_SCOPES_FIELD_NUMBER = 50302 +oauth_scopes = _descriptor.FieldDescriptor( + name='oauth_scopes', full_name='google.api.experimental.oauth_scopes', index=2, + number=50302, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) + + +_COPYRIGHT = _descriptor.Descriptor( + name='Copyright', + full_name='google.api.experimental.Copyright', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fullname', full_name='google.api.experimental.Copyright.fullname', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='year', full_name='google.api.experimental.Copyright.year', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=131, + serialized_end=174, +) + + +_CLIENT = _descriptor.Descriptor( + name='Client', + full_name='google.api.experimental.Client', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.api.experimental.Client.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='namespace', full_name='google.api.experimental.Client.namespace', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='google.api.experimental.Client.version', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='documentation', full_name='google.api.experimental.Client.documentation', index=3, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='copyright', full_name='google.api.experimental.Client.copyright', index=4, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='license', full_name='google.api.experimental.Client.license', index=5, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=177, + serialized_end=357, +) + +_CLIENT.fields_by_name['documentation'].message_type = google_dot_api_dot_documentation__pb2._DOCUMENTATION +_CLIENT.fields_by_name['copyright'].message_type = _COPYRIGHT +DESCRIPTOR.message_types_by_name['Copyright'] = _COPYRIGHT +DESCRIPTOR.message_types_by_name['Client'] = _CLIENT +DESCRIPTOR.extensions_by_name['client'] = client +DESCRIPTOR.extensions_by_name['host'] = host +DESCRIPTOR.extensions_by_name['oauth_scopes'] = oauth_scopes +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Copyright = _reflection.GeneratedProtocolMessageType('Copyright', (_message.Message,), dict( + DESCRIPTOR = _COPYRIGHT, + __module__ = 'google.api.experimental.client_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.Copyright) + )) +_sym_db.RegisterMessage(Copyright) + +Client = _reflection.GeneratedProtocolMessageType('Client', (_message.Message,), dict( + DESCRIPTOR = _CLIENT, + __module__ = 'google.api.experimental.client_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.Client) + )) +_sym_db.RegisterMessage(Client) + +client.message_type = _CLIENT +google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(client) +google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(host) +google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(oauth_scopes) + +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/expr/v1/syntax_pb2.py b/packages/gapic-generator/api_factory/schema/pb/expr/v1/syntax_pb2.py new file mode 100644 index 000000000000..5da32a22f90f --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/pb/expr/v1/syntax_pb2.py @@ -0,0 +1,832 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/expr/v1/syntax.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/api/expr/v1/syntax.proto', + package='google.api.expr.v1', + syntax='proto3', + serialized_pb=_b('\n\x1fgoogle/api/expr/v1/syntax.proto\x12\x12google.api.expr.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"o\n\nParsedExpr\x12&\n\x04\x65xpr\x18\x02 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\x33\n\x0bsource_info\x18\x03 \x01(\x0b\x32\x1e.google.api.expr.v1.SourceInfoJ\x04\x08\x01\x10\x02\"\xd9\t\n\x04\x45xpr\x12\n\n\x02id\x18\x02 \x01(\x03\x12\x32\n\nconst_expr\x18\x03 \x01(\x0b\x32\x1c.google.api.expr.v1.ConstantH\x00\x12\x34\n\nident_expr\x18\x04 \x01(\x0b\x32\x1e.google.api.expr.v1.Expr.IdentH\x00\x12\x36\n\x0bselect_expr\x18\x05 \x01(\x0b\x32\x1f.google.api.expr.v1.Expr.SelectH\x00\x12\x32\n\tcall_expr\x18\x06 \x01(\x0b\x32\x1d.google.api.expr.v1.Expr.CallH\x00\x12\x38\n\tlist_expr\x18\x07 \x01(\x0b\x32#.google.api.expr.v1.Expr.CreateListH\x00\x12<\n\x0bstruct_expr\x18\x08 \x01(\x0b\x32%.google.api.expr.v1.Expr.CreateStructH\x00\x12\x44\n\x12\x63omprehension_expr\x18\t \x01(\x0b\x32&.google.api.expr.v1.Expr.ComprehensionH\x00\x1a\x15\n\x05Ident\x12\x0c\n\x04name\x18\x01 \x01(\t\x1aU\n\x06Select\x12)\n\x07operand\x18\x01 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\r\n\x05\x66ield\x18\x02 \x01(\t\x12\x11\n\ttest_only\x18\x03 \x01(\x08\x1aj\n\x04\x43\x61ll\x12(\n\x06target\x18\x01 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\x10\n\x08\x66unction\x18\x02 \x01(\t\x12&\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x18.google.api.expr.v1.Expr\x1a\x38\n\nCreateList\x12*\n\x08\x65lements\x18\x01 \x03(\x0b\x32\x18.google.api.expr.v1.Expr\x1a\xef\x01\n\x0c\x43reateStruct\x12\x14\n\x0cmessage_name\x18\x01 \x01(\t\x12<\n\x07\x65ntries\x18\x02 \x03(\x0b\x32+.google.api.expr.v1.Expr.CreateStruct.Entry\x1a\x8a\x01\n\x05\x45ntry\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x13\n\tfield_key\x18\x02 \x01(\tH\x00\x12+\n\x07map_key\x18\x03 \x01(\x0b\x32\x18.google.api.expr.v1.ExprH\x00\x12\'\n\x05value\x18\x04 \x01(\x0b\x32\x18.google.api.expr.v1.ExprB\n\n\x08key_kind\x1a\x97\x02\n\rComprehension\x12\x10\n\x08iter_var\x18\x01 \x01(\t\x12,\n\niter_range\x18\x02 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\x10\n\x08\x61\x63\x63u_var\x18\x03 \x01(\t\x12+\n\taccu_init\x18\x04 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\x30\n\x0eloop_condition\x18\x05 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12+\n\tloop_step\x18\x06 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12(\n\x06result\x18\x07 \x01(\x0b\x32\x18.google.api.expr.v1.ExprB\x0b\n\texpr_kindJ\x04\x08\x01\x10\x02\"\xc5\x02\n\x08\x43onstant\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x15\n\x0bint64_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0cuint64_value\x18\x04 \x01(\x04H\x00\x12\x16\n\x0c\x64ouble_value\x18\x05 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x12\x33\n\x0e\x64uration_value\x18\x08 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x35\n\x0ftimestamp_value\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x0f\n\rconstant_kind\"\xc0\x01\n\nSourceInfo\x12\x16\n\x0esyntax_version\x18\x01 \x01(\t\x12\x10\n\x08location\x18\x02 \x01(\t\x12\x14\n\x0cline_offsets\x18\x03 \x03(\x05\x12@\n\tpositions\x18\x04 \x03(\x0b\x32-.google.api.expr.v1.SourceInfo.PositionsEntry\x1a\x30\n\x0ePositionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42*\n\x16\x63om.google.api.expr.v1B\x0bSyntaxProtoP\x01\xf8\x01\x01\x62\x06proto3') + , + dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + + + + +_PARSEDEXPR = _descriptor.Descriptor( + name='ParsedExpr', + full_name='google.api.expr.v1.ParsedExpr', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='expr', full_name='google.api.expr.v1.ParsedExpr.expr', index=0, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='source_info', full_name='google.api.expr.v1.ParsedExpr.source_info', index=1, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=150, + serialized_end=261, +) + + +_EXPR_IDENT = _descriptor.Descriptor( + name='Ident', + full_name='google.api.expr.v1.Expr.Ident', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.api.expr.v1.Expr.Ident.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=688, + serialized_end=709, +) + +_EXPR_SELECT = _descriptor.Descriptor( + name='Select', + full_name='google.api.expr.v1.Expr.Select', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='operand', full_name='google.api.expr.v1.Expr.Select.operand', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='field', full_name='google.api.expr.v1.Expr.Select.field', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='test_only', full_name='google.api.expr.v1.Expr.Select.test_only', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=711, + serialized_end=796, +) + +_EXPR_CALL = _descriptor.Descriptor( + name='Call', + full_name='google.api.expr.v1.Expr.Call', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='target', full_name='google.api.expr.v1.Expr.Call.target', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='function', full_name='google.api.expr.v1.Expr.Call.function', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='args', full_name='google.api.expr.v1.Expr.Call.args', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=798, + serialized_end=904, +) + +_EXPR_CREATELIST = _descriptor.Descriptor( + name='CreateList', + full_name='google.api.expr.v1.Expr.CreateList', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='elements', full_name='google.api.expr.v1.Expr.CreateList.elements', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=906, + serialized_end=962, +) + +_EXPR_CREATESTRUCT_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='google.api.expr.v1.Expr.CreateStruct.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.id', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='field_key', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.field_key', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='map_key', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.map_key', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.value', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='key_kind', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.key_kind', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1066, + serialized_end=1204, +) + +_EXPR_CREATESTRUCT = _descriptor.Descriptor( + name='CreateStruct', + full_name='google.api.expr.v1.Expr.CreateStruct', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message_name', full_name='google.api.expr.v1.Expr.CreateStruct.message_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='entries', full_name='google.api.expr.v1.Expr.CreateStruct.entries', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_EXPR_CREATESTRUCT_ENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=965, + serialized_end=1204, +) + +_EXPR_COMPREHENSION = _descriptor.Descriptor( + name='Comprehension', + full_name='google.api.expr.v1.Expr.Comprehension', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='iter_var', full_name='google.api.expr.v1.Expr.Comprehension.iter_var', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='iter_range', full_name='google.api.expr.v1.Expr.Comprehension.iter_range', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='accu_var', full_name='google.api.expr.v1.Expr.Comprehension.accu_var', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='accu_init', full_name='google.api.expr.v1.Expr.Comprehension.accu_init', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='loop_condition', full_name='google.api.expr.v1.Expr.Comprehension.loop_condition', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='loop_step', full_name='google.api.expr.v1.Expr.Comprehension.loop_step', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='result', full_name='google.api.expr.v1.Expr.Comprehension.result', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1207, + serialized_end=1486, +) + +_EXPR = _descriptor.Descriptor( + name='Expr', + full_name='google.api.expr.v1.Expr', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='google.api.expr.v1.Expr.id', index=0, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='const_expr', full_name='google.api.expr.v1.Expr.const_expr', index=1, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='ident_expr', full_name='google.api.expr.v1.Expr.ident_expr', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='select_expr', full_name='google.api.expr.v1.Expr.select_expr', index=3, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='call_expr', full_name='google.api.expr.v1.Expr.call_expr', index=4, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='list_expr', full_name='google.api.expr.v1.Expr.list_expr', index=5, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='struct_expr', full_name='google.api.expr.v1.Expr.struct_expr', index=6, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='comprehension_expr', full_name='google.api.expr.v1.Expr.comprehension_expr', index=7, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_EXPR_IDENT, _EXPR_SELECT, _EXPR_CALL, _EXPR_CREATELIST, _EXPR_CREATESTRUCT, _EXPR_COMPREHENSION, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='expr_kind', full_name='google.api.expr.v1.Expr.expr_kind', + index=0, containing_type=None, fields=[]), + ], + serialized_start=264, + serialized_end=1505, +) + + +_CONSTANT = _descriptor.Descriptor( + name='Constant', + full_name='google.api.expr.v1.Constant', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='null_value', full_name='google.api.expr.v1.Constant.null_value', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bool_value', full_name='google.api.expr.v1.Constant.bool_value', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='int64_value', full_name='google.api.expr.v1.Constant.int64_value', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='uint64_value', full_name='google.api.expr.v1.Constant.uint64_value', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.api.expr.v1.Constant.double_value', index=4, + number=5, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.api.expr.v1.Constant.string_value', index=5, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bytes_value', full_name='google.api.expr.v1.Constant.bytes_value', index=6, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='duration_value', full_name='google.api.expr.v1.Constant.duration_value', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='timestamp_value', full_name='google.api.expr.v1.Constant.timestamp_value', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='constant_kind', full_name='google.api.expr.v1.Constant.constant_kind', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1508, + serialized_end=1833, +) + + +_SOURCEINFO_POSITIONSENTRY = _descriptor.Descriptor( + name='PositionsEntry', + full_name='google.api.expr.v1.SourceInfo.PositionsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.api.expr.v1.SourceInfo.PositionsEntry.key', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.api.expr.v1.SourceInfo.PositionsEntry.value', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1980, + serialized_end=2028, +) + +_SOURCEINFO = _descriptor.Descriptor( + name='SourceInfo', + full_name='google.api.expr.v1.SourceInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='syntax_version', full_name='google.api.expr.v1.SourceInfo.syntax_version', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='location', full_name='google.api.expr.v1.SourceInfo.location', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='line_offsets', full_name='google.api.expr.v1.SourceInfo.line_offsets', index=2, + number=3, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='positions', full_name='google.api.expr.v1.SourceInfo.positions', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_SOURCEINFO_POSITIONSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1836, + serialized_end=2028, +) + +_PARSEDEXPR.fields_by_name['expr'].message_type = _EXPR +_PARSEDEXPR.fields_by_name['source_info'].message_type = _SOURCEINFO +_EXPR_IDENT.containing_type = _EXPR +_EXPR_SELECT.fields_by_name['operand'].message_type = _EXPR +_EXPR_SELECT.containing_type = _EXPR +_EXPR_CALL.fields_by_name['target'].message_type = _EXPR +_EXPR_CALL.fields_by_name['args'].message_type = _EXPR +_EXPR_CALL.containing_type = _EXPR +_EXPR_CREATELIST.fields_by_name['elements'].message_type = _EXPR +_EXPR_CREATELIST.containing_type = _EXPR +_EXPR_CREATESTRUCT_ENTRY.fields_by_name['map_key'].message_type = _EXPR +_EXPR_CREATESTRUCT_ENTRY.fields_by_name['value'].message_type = _EXPR +_EXPR_CREATESTRUCT_ENTRY.containing_type = _EXPR_CREATESTRUCT +_EXPR_CREATESTRUCT_ENTRY.oneofs_by_name['key_kind'].fields.append( + _EXPR_CREATESTRUCT_ENTRY.fields_by_name['field_key']) +_EXPR_CREATESTRUCT_ENTRY.fields_by_name['field_key'].containing_oneof = _EXPR_CREATESTRUCT_ENTRY.oneofs_by_name['key_kind'] +_EXPR_CREATESTRUCT_ENTRY.oneofs_by_name['key_kind'].fields.append( + _EXPR_CREATESTRUCT_ENTRY.fields_by_name['map_key']) +_EXPR_CREATESTRUCT_ENTRY.fields_by_name['map_key'].containing_oneof = _EXPR_CREATESTRUCT_ENTRY.oneofs_by_name['key_kind'] +_EXPR_CREATESTRUCT.fields_by_name['entries'].message_type = _EXPR_CREATESTRUCT_ENTRY +_EXPR_CREATESTRUCT.containing_type = _EXPR +_EXPR_COMPREHENSION.fields_by_name['iter_range'].message_type = _EXPR +_EXPR_COMPREHENSION.fields_by_name['accu_init'].message_type = _EXPR +_EXPR_COMPREHENSION.fields_by_name['loop_condition'].message_type = _EXPR +_EXPR_COMPREHENSION.fields_by_name['loop_step'].message_type = _EXPR +_EXPR_COMPREHENSION.fields_by_name['result'].message_type = _EXPR +_EXPR_COMPREHENSION.containing_type = _EXPR +_EXPR.fields_by_name['const_expr'].message_type = _CONSTANT +_EXPR.fields_by_name['ident_expr'].message_type = _EXPR_IDENT +_EXPR.fields_by_name['select_expr'].message_type = _EXPR_SELECT +_EXPR.fields_by_name['call_expr'].message_type = _EXPR_CALL +_EXPR.fields_by_name['list_expr'].message_type = _EXPR_CREATELIST +_EXPR.fields_by_name['struct_expr'].message_type = _EXPR_CREATESTRUCT +_EXPR.fields_by_name['comprehension_expr'].message_type = _EXPR_COMPREHENSION +_EXPR.oneofs_by_name['expr_kind'].fields.append( + _EXPR.fields_by_name['const_expr']) +_EXPR.fields_by_name['const_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] +_EXPR.oneofs_by_name['expr_kind'].fields.append( + _EXPR.fields_by_name['ident_expr']) +_EXPR.fields_by_name['ident_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] +_EXPR.oneofs_by_name['expr_kind'].fields.append( + _EXPR.fields_by_name['select_expr']) +_EXPR.fields_by_name['select_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] +_EXPR.oneofs_by_name['expr_kind'].fields.append( + _EXPR.fields_by_name['call_expr']) +_EXPR.fields_by_name['call_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] +_EXPR.oneofs_by_name['expr_kind'].fields.append( + _EXPR.fields_by_name['list_expr']) +_EXPR.fields_by_name['list_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] +_EXPR.oneofs_by_name['expr_kind'].fields.append( + _EXPR.fields_by_name['struct_expr']) +_EXPR.fields_by_name['struct_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] +_EXPR.oneofs_by_name['expr_kind'].fields.append( + _EXPR.fields_by_name['comprehension_expr']) +_EXPR.fields_by_name['comprehension_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] +_CONSTANT.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE +_CONSTANT.fields_by_name['duration_value'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_CONSTANT.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['null_value']) +_CONSTANT.fields_by_name['null_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['bool_value']) +_CONSTANT.fields_by_name['bool_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['int64_value']) +_CONSTANT.fields_by_name['int64_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['uint64_value']) +_CONSTANT.fields_by_name['uint64_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['double_value']) +_CONSTANT.fields_by_name['double_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['string_value']) +_CONSTANT.fields_by_name['string_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['bytes_value']) +_CONSTANT.fields_by_name['bytes_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['duration_value']) +_CONSTANT.fields_by_name['duration_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_CONSTANT.oneofs_by_name['constant_kind'].fields.append( + _CONSTANT.fields_by_name['timestamp_value']) +_CONSTANT.fields_by_name['timestamp_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] +_SOURCEINFO_POSITIONSENTRY.containing_type = _SOURCEINFO +_SOURCEINFO.fields_by_name['positions'].message_type = _SOURCEINFO_POSITIONSENTRY +DESCRIPTOR.message_types_by_name['ParsedExpr'] = _PARSEDEXPR +DESCRIPTOR.message_types_by_name['Expr'] = _EXPR +DESCRIPTOR.message_types_by_name['Constant'] = _CONSTANT +DESCRIPTOR.message_types_by_name['SourceInfo'] = _SOURCEINFO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +ParsedExpr = _reflection.GeneratedProtocolMessageType('ParsedExpr', (_message.Message,), dict( + DESCRIPTOR = _PARSEDEXPR, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.ParsedExpr) + )) +_sym_db.RegisterMessage(ParsedExpr) + +Expr = _reflection.GeneratedProtocolMessageType('Expr', (_message.Message,), dict( + + Ident = _reflection.GeneratedProtocolMessageType('Ident', (_message.Message,), dict( + DESCRIPTOR = _EXPR_IDENT, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.Ident) + )) + , + + Select = _reflection.GeneratedProtocolMessageType('Select', (_message.Message,), dict( + DESCRIPTOR = _EXPR_SELECT, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.Select) + )) + , + + Call = _reflection.GeneratedProtocolMessageType('Call', (_message.Message,), dict( + DESCRIPTOR = _EXPR_CALL, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.Call) + )) + , + + CreateList = _reflection.GeneratedProtocolMessageType('CreateList', (_message.Message,), dict( + DESCRIPTOR = _EXPR_CREATELIST, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.CreateList) + )) + , + + CreateStruct = _reflection.GeneratedProtocolMessageType('CreateStruct', (_message.Message,), dict( + + Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict( + DESCRIPTOR = _EXPR_CREATESTRUCT_ENTRY, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.CreateStruct.Entry) + )) + , + DESCRIPTOR = _EXPR_CREATESTRUCT, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.CreateStruct) + )) + , + + Comprehension = _reflection.GeneratedProtocolMessageType('Comprehension', (_message.Message,), dict( + DESCRIPTOR = _EXPR_COMPREHENSION, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.Comprehension) + )) + , + DESCRIPTOR = _EXPR, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr) + )) +_sym_db.RegisterMessage(Expr) +_sym_db.RegisterMessage(Expr.Ident) +_sym_db.RegisterMessage(Expr.Select) +_sym_db.RegisterMessage(Expr.Call) +_sym_db.RegisterMessage(Expr.CreateList) +_sym_db.RegisterMessage(Expr.CreateStruct) +_sym_db.RegisterMessage(Expr.CreateStruct.Entry) +_sym_db.RegisterMessage(Expr.Comprehension) + +Constant = _reflection.GeneratedProtocolMessageType('Constant', (_message.Message,), dict( + DESCRIPTOR = _CONSTANT, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Constant) + )) +_sym_db.RegisterMessage(Constant) + +SourceInfo = _reflection.GeneratedProtocolMessageType('SourceInfo', (_message.Message,), dict( + + PositionsEntry = _reflection.GeneratedProtocolMessageType('PositionsEntry', (_message.Message,), dict( + DESCRIPTOR = _SOURCEINFO_POSITIONSENTRY, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.SourceInfo.PositionsEntry) + )) + , + DESCRIPTOR = _SOURCEINFO, + __module__ = 'google.api.expr.v1.syntax_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.SourceInfo) + )) +_sym_db.RegisterMessage(SourceInfo) +_sym_db.RegisterMessage(SourceInfo.PositionsEntry) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026com.google.api.expr.v1B\013SyntaxProtoP\001\370\001\001')) +_SOURCEINFO_POSITIONSENTRY.has_options = True +_SOURCEINFO_POSITIONSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/expr/v1/value_pb2.py b/packages/gapic-generator/api_factory/schema/pb/expr/v1/value_pb2.py new file mode 100644 index 000000000000..d097dc007296 --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/pb/expr/v1/value_pb2.py @@ -0,0 +1,326 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/expr/v1/value.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/api/expr/v1/value.proto', + package='google.api.expr.v1', + syntax='proto3', + serialized_pb=_b('\n\x1egoogle/api/expr/v1/value.proto\x12\x12google.api.expr.v1\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd0\x03\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x15\n\x0bint64_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0cuint64_value\x18\x04 \x01(\x04H\x00\x12\x16\n\x0c\x64ouble_value\x18\x05 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x12\x33\n\x0e\x64uration_value\x18\x08 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x35\n\x0ftimestamp_value\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12-\n\rmessage_value\x18\n \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x31\n\tmap_value\x18\x0b \x01(\x0b\x32\x1c.google.api.expr.v1.MapValueH\x00\x12\x33\n\nlist_value\x18\x0c \x01(\x0b\x32\x1d.google.api.expr.v1.ListValueH\x00\x42\x06\n\x04kind\"6\n\tListValue\x12)\n\x06values\x18\x01 \x03(\x0b\x32\x19.google.api.expr.v1.Value\"\x9a\x01\n\x08MapValue\x12\x33\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\".google.api.expr.v1.MapValue.Entry\x1aY\n\x05\x45ntry\x12&\n\x03key\x18\x01 \x01(\x0b\x32\x19.google.api.expr.v1.Value\x12(\n\x05value\x18\x02 \x01(\x0b\x32\x19.google.api.expr.v1.ValueB)\n\x16\x63om.google.api.expr.v1B\nValueProtoP\x01\xf8\x01\x01\x62\x06proto3') + , + dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + + + + +_VALUE = _descriptor.Descriptor( + name='Value', + full_name='google.api.expr.v1.Value', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='null_value', full_name='google.api.expr.v1.Value.null_value', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bool_value', full_name='google.api.expr.v1.Value.bool_value', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='int64_value', full_name='google.api.expr.v1.Value.int64_value', index=2, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='uint64_value', full_name='google.api.expr.v1.Value.uint64_value', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='double_value', full_name='google.api.expr.v1.Value.double_value', index=4, + number=5, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='string_value', full_name='google.api.expr.v1.Value.string_value', index=5, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='bytes_value', full_name='google.api.expr.v1.Value.bytes_value', index=6, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='duration_value', full_name='google.api.expr.v1.Value.duration_value', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='timestamp_value', full_name='google.api.expr.v1.Value.timestamp_value', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='message_value', full_name='google.api.expr.v1.Value.message_value', index=9, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='map_value', full_name='google.api.expr.v1.Value.map_value', index=10, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='list_value', full_name='google.api.expr.v1.Value.list_value', index=11, + number=12, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='kind', full_name='google.api.expr.v1.Value.kind', + index=0, containing_type=None, fields=[]), + ], + serialized_start=177, + serialized_end=641, +) + + +_LISTVALUE = _descriptor.Descriptor( + name='ListValue', + full_name='google.api.expr.v1.ListValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='values', full_name='google.api.expr.v1.ListValue.values', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=643, + serialized_end=697, +) + + +_MAPVALUE_ENTRY = _descriptor.Descriptor( + name='Entry', + full_name='google.api.expr.v1.MapValue.Entry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.api.expr.v1.MapValue.Entry.key', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.api.expr.v1.MapValue.Entry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=765, + serialized_end=854, +) + +_MAPVALUE = _descriptor.Descriptor( + name='MapValue', + full_name='google.api.expr.v1.MapValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entries', full_name='google.api.expr.v1.MapValue.entries', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_MAPVALUE_ENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=700, + serialized_end=854, +) + +_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE +_VALUE.fields_by_name['duration_value'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_VALUE.fields_by_name['message_value'].message_type = google_dot_protobuf_dot_any__pb2._ANY +_VALUE.fields_by_name['map_value'].message_type = _MAPVALUE +_VALUE.fields_by_name['list_value'].message_type = _LISTVALUE +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['null_value']) +_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['bool_value']) +_VALUE.fields_by_name['bool_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['int64_value']) +_VALUE.fields_by_name['int64_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['uint64_value']) +_VALUE.fields_by_name['uint64_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['double_value']) +_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['string_value']) +_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['bytes_value']) +_VALUE.fields_by_name['bytes_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['duration_value']) +_VALUE.fields_by_name['duration_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['timestamp_value']) +_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['message_value']) +_VALUE.fields_by_name['message_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['map_value']) +_VALUE.fields_by_name['map_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_VALUE.oneofs_by_name['kind'].fields.append( + _VALUE.fields_by_name['list_value']) +_VALUE.fields_by_name['list_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] +_LISTVALUE.fields_by_name['values'].message_type = _VALUE +_MAPVALUE_ENTRY.fields_by_name['key'].message_type = _VALUE +_MAPVALUE_ENTRY.fields_by_name['value'].message_type = _VALUE +_MAPVALUE_ENTRY.containing_type = _MAPVALUE +_MAPVALUE.fields_by_name['entries'].message_type = _MAPVALUE_ENTRY +DESCRIPTOR.message_types_by_name['Value'] = _VALUE +DESCRIPTOR.message_types_by_name['ListValue'] = _LISTVALUE +DESCRIPTOR.message_types_by_name['MapValue'] = _MAPVALUE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( + DESCRIPTOR = _VALUE, + __module__ = 'google.api.expr.v1.value_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.Value) + )) +_sym_db.RegisterMessage(Value) + +ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), dict( + DESCRIPTOR = _LISTVALUE, + __module__ = 'google.api.expr.v1.value_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.ListValue) + )) +_sym_db.RegisterMessage(ListValue) + +MapValue = _reflection.GeneratedProtocolMessageType('MapValue', (_message.Message,), dict( + + Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict( + DESCRIPTOR = _MAPVALUE_ENTRY, + __module__ = 'google.api.expr.v1.value_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.MapValue.Entry) + )) + , + DESCRIPTOR = _MAPVALUE, + __module__ = 'google.api.expr.v1.value_pb2' + # @@protoc_insertion_point(class_scope:google.api.expr.v1.MapValue) + )) +_sym_db.RegisterMessage(MapValue) +_sym_db.RegisterMessage(MapValue.Entry) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026com.google.api.expr.v1B\nValueProtoP\001\370\001\001')) +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py b/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py new file mode 100644 index 000000000000..82f1206d5802 --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py @@ -0,0 +1,42 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/experimental/headers.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/api/experimental/headers.proto', + package='google.api.experimental', + syntax='proto3', + serialized_pb=_b('\n%google/api/experimental/headers.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto:5\n\x0cheader_param\x12\x1d.google.protobuf.FieldOptions\x18\xf0\x8c\x03 \x01(\tb\x06proto3') + , + dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) + + +HEADER_PARAM_FIELD_NUMBER = 50800 +header_param = _descriptor.FieldDescriptor( + name='header_param', full_name='google.api.experimental.header_param', index=0, + number=50800, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) + +DESCRIPTOR.extensions_by_name['header_param'] = header_param +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(header_param) + +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py b/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py new file mode 100644 index 000000000000..108ba7da473e --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py @@ -0,0 +1,89 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/experimental/lro.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/api/experimental/lro.proto', + package='google.api.experimental', + syntax='proto3', + serialized_pb=_b('\n!google/api/experimental/lro.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\"<\n\x0eOperationTypes\x12\x13\n\x0breturn_type\x18\x01 \x01(\t\x12\x15\n\rmetadata_type\x18\x02 \x01(\t:X\n\x05types\x12\x1e.google.protobuf.MethodOptions\x18\x80\x90\x03 \x01(\x0b\x32\'.google.api.experimental.OperationTypesb\x06proto3') + , + dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) + + +TYPES_FIELD_NUMBER = 51200 +types = _descriptor.FieldDescriptor( + name='types', full_name='google.api.experimental.types', index=0, + number=51200, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) + + +_OPERATIONTYPES = _descriptor.Descriptor( + name='OperationTypes', + full_name='google.api.experimental.OperationTypes', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='return_type', full_name='google.api.experimental.OperationTypes.return_type', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='metadata_type', full_name='google.api.experimental.OperationTypes.metadata_type', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=96, + serialized_end=156, +) + +DESCRIPTOR.message_types_by_name['OperationTypes'] = _OPERATIONTYPES +DESCRIPTOR.extensions_by_name['types'] = types +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +OperationTypes = _reflection.GeneratedProtocolMessageType('OperationTypes', (_message.Message,), dict( + DESCRIPTOR = _OPERATIONTYPES, + __module__ = 'google.api.experimental.lro_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.OperationTypes) + )) +_sym_db.RegisterMessage(OperationTypes) + +types.message_type = _OPERATIONTYPES +google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(types) + +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/overload_pb2.py b/packages/gapic-generator/api_factory/schema/pb/overload_pb2.py new file mode 100644 index 000000000000..1adb2b3b6cf5 --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/pb/overload_pb2.py @@ -0,0 +1,178 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/experimental/overload.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from api_factory.schema.pb.expr.v1 import syntax_pb2 as google_dot_api_dot_expr_dot_v1_dot_syntax__pb2 +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/api/experimental/overload.proto', + package='google.api.experimental', + syntax='proto3', + serialized_pb=_b('\n&google/api/experimental/overload.proto\x12\x17google.api.experimental\x1a\x1fgoogle/api/expr/v1/syntax.proto\x1a google/protobuf/descriptor.proto\"\x89\x02\n\x08Overload\x12\x0c\n\x04name\x18\x01 \x01(\t\x12K\n\x10\x63omponent_fields\x18\n \x01(\x0b\x32\x31.google.api.experimental.Overload.ComponentFields\x1a\xa1\x01\n\x0f\x43omponentFields\x12G\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x37.google.api.experimental.Overload.ComponentFields.Field\x1a\x45\n\x05\x46ield\x12\x0c\n\x04path\x18\x01 \x01(\t\x12.\n\x0c\x64\x65\x66\x61ult_expr\x18\x02 \x01(\x0b\x32\x18.google.api.expr.v1.Expr:V\n\toverloads\x12\x1e.google.protobuf.MethodOptions\x18\xac\x92\x03 \x03(\x0b\x32!.google.api.experimental.Overloadb\x06proto3') + , + dependencies=[google_dot_api_dot_expr_dot_v1_dot_syntax__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) + + +OVERLOADS_FIELD_NUMBER = 51500 +overloads = _descriptor.FieldDescriptor( + name='overloads', full_name='google.api.experimental.overloads', index=0, + number=51500, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) + + +_OVERLOAD_COMPONENTFIELDS_FIELD = _descriptor.Descriptor( + name='Field', + full_name='google.api.experimental.Overload.ComponentFields.Field', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='google.api.experimental.Overload.ComponentFields.Field.path', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='default_expr', full_name='google.api.experimental.Overload.ComponentFields.Field.default_expr', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=331, + serialized_end=400, +) + +_OVERLOAD_COMPONENTFIELDS = _descriptor.Descriptor( + name='ComponentFields', + full_name='google.api.experimental.Overload.ComponentFields', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fields', full_name='google.api.experimental.Overload.ComponentFields.fields', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_OVERLOAD_COMPONENTFIELDS_FIELD, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=239, + serialized_end=400, +) + +_OVERLOAD = _descriptor.Descriptor( + name='Overload', + full_name='google.api.experimental.Overload', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.api.experimental.Overload.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='component_fields', full_name='google.api.experimental.Overload.component_fields', index=1, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_OVERLOAD_COMPONENTFIELDS, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=135, + serialized_end=400, +) + +_OVERLOAD_COMPONENTFIELDS_FIELD.fields_by_name['default_expr'].message_type = google_dot_api_dot_expr_dot_v1_dot_syntax__pb2._EXPR +_OVERLOAD_COMPONENTFIELDS_FIELD.containing_type = _OVERLOAD_COMPONENTFIELDS +_OVERLOAD_COMPONENTFIELDS.fields_by_name['fields'].message_type = _OVERLOAD_COMPONENTFIELDS_FIELD +_OVERLOAD_COMPONENTFIELDS.containing_type = _OVERLOAD +_OVERLOAD.fields_by_name['component_fields'].message_type = _OVERLOAD_COMPONENTFIELDS +DESCRIPTOR.message_types_by_name['Overload'] = _OVERLOAD +DESCRIPTOR.extensions_by_name['overloads'] = overloads +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Overload = _reflection.GeneratedProtocolMessageType('Overload', (_message.Message,), dict( + + ComponentFields = _reflection.GeneratedProtocolMessageType('ComponentFields', (_message.Message,), dict( + + Field = _reflection.GeneratedProtocolMessageType('Field', (_message.Message,), dict( + DESCRIPTOR = _OVERLOAD_COMPONENTFIELDS_FIELD, + __module__ = 'google.api.experimental.overload_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.Overload.ComponentFields.Field) + )) + , + DESCRIPTOR = _OVERLOAD_COMPONENTFIELDS, + __module__ = 'google.api.experimental.overload_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.Overload.ComponentFields) + )) + , + DESCRIPTOR = _OVERLOAD, + __module__ = 'google.api.experimental.overload_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.Overload) + )) +_sym_db.RegisterMessage(Overload) +_sym_db.RegisterMessage(Overload.ComponentFields) +_sym_db.RegisterMessage(Overload.ComponentFields.Field) + +overloads.message_type = _OVERLOAD +google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(overloads) + +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/resources_pb2.py b/packages/gapic-generator/api_factory/schema/pb/resources_pb2.py new file mode 100644 index 000000000000..3b22b69f8cae --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/pb/resources_pb2.py @@ -0,0 +1,99 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/experimental/resources.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/api/experimental/resources.proto', + package='google.api.experimental', + syntax='proto3', + serialized_pb=_b('\n\'google/api/experimental/resources.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\">\n\x11ResourceReference\x12\x11\n\ttype_name\x18\x01 \x01(\t\x12\x16\n\x0eresource_paths\x18\x02 \x03(\t:9\n\x0eresource_paths\x12\x1f.google.protobuf.MessageOptions\x18\xd8\x94\x03 \x03(\t:a\n\x0cresource_ref\x12\x1d.google.protobuf.FieldOptions\x18\xd9\x94\x03 \x01(\x0b\x32*.google.api.experimental.ResourceReferenceb\x06proto3') + , + dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) + + +RESOURCE_PATHS_FIELD_NUMBER = 51800 +resource_paths = _descriptor.FieldDescriptor( + name='resource_paths', full_name='google.api.experimental.resource_paths', index=0, + number=51800, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) +RESOURCE_REF_FIELD_NUMBER = 51801 +resource_ref = _descriptor.FieldDescriptor( + name='resource_ref', full_name='google.api.experimental.resource_ref', index=1, + number=51801, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) + + +_RESOURCEREFERENCE = _descriptor.Descriptor( + name='ResourceReference', + full_name='google.api.experimental.ResourceReference', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type_name', full_name='google.api.experimental.ResourceReference.type_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='resource_paths', full_name='google.api.experimental.ResourceReference.resource_paths', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=102, + serialized_end=164, +) + +DESCRIPTOR.message_types_by_name['ResourceReference'] = _RESOURCEREFERENCE +DESCRIPTOR.extensions_by_name['resource_paths'] = resource_paths +DESCRIPTOR.extensions_by_name['resource_ref'] = resource_ref +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +ResourceReference = _reflection.GeneratedProtocolMessageType('ResourceReference', (_message.Message,), dict( + DESCRIPTOR = _RESOURCEREFERENCE, + __module__ = 'google.api.experimental.resources_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.ResourceReference) + )) +_sym_db.RegisterMessage(ResourceReference) + +google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(resource_paths) +resource_ref.message_type = _RESOURCEREFERENCE +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(resource_ref) + +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/stability_pb2.py b/packages/gapic-generator/api_factory/schema/pb/stability_pb2.py new file mode 100644 index 000000000000..c4b5089483ba --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/pb/stability_pb2.py @@ -0,0 +1,120 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/experimental/stability.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/api/experimental/stability.proto', + package='google.api.experimental', + syntax='proto3', + serialized_pb=_b('\n\'google/api/experimental/stability.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\"5\n\tStability\x12\x0e\n\x04\x62\x65ta\x18\x01 \x01(\tH\x00\x12\x0f\n\x05\x61lpha\x18\x02 \x01(\tH\x00\x42\x07\n\x05level:`\n\x11service_stability\x12\x1f.google.protobuf.ServiceOptions\x18\xbc\x95\x03 \x01(\x0b\x32\".google.api.experimental.Stability:^\n\x10method_stability\x12\x1e.google.protobuf.MethodOptions\x18\xbd\x95\x03 \x01(\x0b\x32\".google.api.experimental.Stability:\\\n\x0f\x66ield_stability\x12\x1d.google.protobuf.FieldOptions\x18\xbe\x95\x03 \x01(\x0b\x32\".google.api.experimental.Stabilityb\x06proto3') + , + dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) + + +SERVICE_STABILITY_FIELD_NUMBER = 51900 +service_stability = _descriptor.FieldDescriptor( + name='service_stability', full_name='google.api.experimental.service_stability', index=0, + number=51900, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) +METHOD_STABILITY_FIELD_NUMBER = 51901 +method_stability = _descriptor.FieldDescriptor( + name='method_stability', full_name='google.api.experimental.method_stability', index=1, + number=51901, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) +FIELD_STABILITY_FIELD_NUMBER = 51902 +field_stability = _descriptor.FieldDescriptor( + name='field_stability', full_name='google.api.experimental.field_stability', index=2, + number=51902, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + options=None, file=DESCRIPTOR) + + +_STABILITY = _descriptor.Descriptor( + name='Stability', + full_name='google.api.experimental.Stability', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='beta', full_name='google.api.experimental.Stability.beta', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='alpha', full_name='google.api.experimental.Stability.alpha', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='level', full_name='google.api.experimental.Stability.level', + index=0, containing_type=None, fields=[]), + ], + serialized_start=102, + serialized_end=155, +) + +_STABILITY.oneofs_by_name['level'].fields.append( + _STABILITY.fields_by_name['beta']) +_STABILITY.fields_by_name['beta'].containing_oneof = _STABILITY.oneofs_by_name['level'] +_STABILITY.oneofs_by_name['level'].fields.append( + _STABILITY.fields_by_name['alpha']) +_STABILITY.fields_by_name['alpha'].containing_oneof = _STABILITY.oneofs_by_name['level'] +DESCRIPTOR.message_types_by_name['Stability'] = _STABILITY +DESCRIPTOR.extensions_by_name['service_stability'] = service_stability +DESCRIPTOR.extensions_by_name['method_stability'] = method_stability +DESCRIPTOR.extensions_by_name['field_stability'] = field_stability +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Stability = _reflection.GeneratedProtocolMessageType('Stability', (_message.Message,), dict( + DESCRIPTOR = _STABILITY, + __module__ = 'google.api.experimental.stability_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.Stability) + )) +_sym_db.RegisterMessage(Stability) + +service_stability.message_type = _STABILITY +google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(service_stability) +method_stability.message_type = _STABILITY +google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(method_stability) +field_stability.message_type = _STABILITY +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field_stability) + +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/wrappers.py b/packages/gapic-generator/api_factory/schema/wrappers.py new file mode 100644 index 000000000000..69cb432757c9 --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/wrappers.py @@ -0,0 +1,181 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Module containing wrapper classes around meta-descriptors. + +This module contains dataclasses which wrap the descriptor protos +defined in google/protobuf/descriptor.proto (which are descriptors that +describe descriptors). + +These wrappers exist in order to provide useful helper methods and +generally ease access to things in templates (in particular, documentation, +certain aggregate views of things, etc.) + +Reading of underlying descriptor properties in templates *is* okay, a +``__getattr__`` method which consistently routes in this way is provided. +Documentation is consistently at ``{thing}.meta.doc``. +""" + +import dataclasses +from typing import List, Mapping, Sequence, Tuple + +from google.protobuf import descriptor_pb2 + +from api_factory import utils +from api_factory.schema.metadata import Metadata +from api_factory.schema.pb import client_pb2 +from api_factory.schema.pb import overload_pb2 + + +@dataclasses.dataclass(frozen=True) +class Field: + """Description of a field.""" + field_pb: descriptor_pb2.FieldDescriptorProto + meta: Metadata = dataclasses.field(default_factory=Metadata) + + def __getattr__(self, name): + return getattr(self.field_pb, name) + + +@dataclasses.dataclass(frozen=True) +class MessageType: + """Description of a message (defined with the ``message`` keyword).""" + message_pb: descriptor_pb2.DescriptorProto + fields: Mapping[str, Field] + meta: Metadata = dataclasses.field(default_factory=Metadata) + + def __getattr__(self, name): + return getattr(self.message_pb, name) + + @property + def pb2_module(self) -> str: + """Return the name of the Python pb2 module.""" + return f'{self.meta.address.module}_pb2' + + @property + def proto_path(self) -> str: + """Return the fully qualfied proto path as a string.""" + return f'{str(self.meta.address)}.{self.name}' + + +@dataclasses.dataclass(frozen=True) +class EnumValueType: + """Description of an enum value.""" + enum_value_pb: descriptor_pb2.EnumValueDescriptorProto + meta: Metadata = dataclasses.field(default_factory=Metadata) + + def __getattr__(self, name): + return getattr(self.enum_value_pb, name) + + +@dataclasses.dataclass(frozen=True) +class EnumType: + """Description of an enum (defined with the ``enum`` keyword.)""" + enum_pb: descriptor_pb2.EnumDescriptorProto + values: List[EnumValueType] + meta: Metadata = dataclasses.field(default_factory=Metadata) + + def __getattr__(self, name): + return getattr(self.enum_pb, name) + + +@dataclasses.dataclass(frozen=True) +class Method: + """Description of a method (defined with the ``rpc`` keyword).""" + method_pb: descriptor_pb2.MethodDescriptorProto + input: MessageType + output: MessageType + meta: Metadata = dataclasses.field(default_factory=Metadata) + + def __getattr__(self, name): + return getattr(self.method_pb, name) + + @property + def overloads(self): + """Return the overloads defined for this method.""" + return self.method_pb.options.Extensions[overload_pb2.overloads] + + +@dataclasses.dataclass(frozen=True) +class Service: + """Description of a service (defined with the ``service`` keyword).""" + service_pb: descriptor_pb2.ServiceDescriptorProto + methods: Mapping[str, Method] + meta: Metadata = dataclasses.field(default_factory=Metadata) + + def __getattr__(self, name): + return getattr(self.service_pb, name) + + @property + def host(self) -> str: + """Return the hostname for this service, if specified. + + Returns: + str: The hostname, with no protocol and no trailing ``/``. + """ + if self.service_pb.options.Extensions[client_pb2.host]: + return self.service_pb.options.Extensions[client_pb2.host] + return utils.Placeholder('<<< HOSTNAME >>>') + + @property + def oauth_scopes(self) -> Sequence[str]: + """Return a sequence of oauth scopes, if applicable. + + Returns: + Sequence[str]: A sequence of OAuth scopes. + """ + if self.service_pb.options.Extensions[client_pb2.oauth_scopes]: + return self.service_pb.options.Extensions[client_pb2.oauth_scopes] + return () + + @property + def pb2_modules(self) -> Sequence[Tuple[str, str]]: + """Return a sequence of pb2 modules, for import. + + The results of this method are in alphabetical order (by package, + then module), and do not contain duplicates. + + Returns: + Sequence[str, str]: The package and pb2_module pair, intended + for use in a ``from package import pb2_module`` type + of statement. + """ + answer = set() + for method in self.methods.values(): + answer.add(( + '.'.join(method.input.meta.address.package), + method.input.pb2_module, + )) + answer.add(( + '.'.join(method.output.meta.address.package), + method.output.pb2_module, + )) + return sorted(answer) + + def transform_filename(self, original: str) -> str: + """Transforms a filename to be appropriate for this service. + + This essentially replaces the ``service/`` prefix with the + snake-cased directory for the service name. + + Args: + original (str): The filename to be transformed. + + Returns: + str: The transformed directory name. + """ + return original.replace( + 'service/', + f'{utils.to_snake_case(self.name)}/', + ) diff --git a/packages/gapic-generator/api_factory/utils/__init__.py b/packages/gapic-generator/api_factory/utils/__init__.py new file mode 100644 index 000000000000..a3dc1be070bc --- /dev/null +++ b/packages/gapic-generator/api_factory/utils/__init__.py @@ -0,0 +1,28 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_factory.utils.cache import cached_property +from api_factory.utils.case import to_snake_case +from api_factory.utils.lines import subsequent_indent +from api_factory.utils.lines import wrap +from api_factory.utils.placeholder import Placeholder + + +__all__ = ( + 'cached_property', + 'Placeholder', + 'subsequent_indent', + 'to_snake_case', + 'wrap', +) diff --git a/packages/gapic-generator/api_factory/utils/cache.py b/packages/gapic-generator/api_factory/utils/cache.py new file mode 100644 index 000000000000..3db4ab6af0da --- /dev/null +++ b/packages/gapic-generator/api_factory/utils/cache.py @@ -0,0 +1,43 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools + + +def cached_property(fx): + """Make the callable into a cached property. + + Similar to @property, but the function will only be called once per + object. + + Args: + fx (Callable[]): The property function. + + Returns: + Callable[]: The wrapped function. + """ + @functools.wraps(fx) + def inner(self): + # Sanity check: If there is no cache at all, create an empty cache. + if not hasattr(self, '_cached_values'): + setattr(self, '_cached_values', {}) + + # If and only if the function's result is not in the cache, + # run the function. + if fx.__name__ not in self._cached_values: + self._cached_values[fx.__name__] = fx(self) + + # Return the value from cache. + return self._cached_values[fx.__name__] + return property(inner) diff --git a/packages/gapic-generator/api_factory/utils/case.py b/packages/gapic-generator/api_factory/utils/case.py new file mode 100644 index 000000000000..a7552e2abab3 --- /dev/null +++ b/packages/gapic-generator/api_factory/utils/case.py @@ -0,0 +1,47 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + + +def to_snake_case(s: str) -> str: + """Convert any string to snake case. + + This is provided to templates as the ``snake_case`` filter. + + Args: + s (str): The input string, provided in any sane case system. + + Returns: + str: The string in snake case (and all lower-cased). + """ + # Replace all capital letters that are preceded by a lower-case letter. + s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) + + # Find all capital letters that are followed by a lower-case letter, + # and are preceded by any character other than underscore. + # (Note: This also excludes beginning-of-string.) + s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) + + # Numbers are a weird case; the goal is to spot when they _start_ + # some kind of name or acronym (e.g. 2FA, 3M). + # + # Find cases of a number preceded by a lower-case letter _and_ + # followed by at least two capital letters or a single capital and + # end of string. + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) + + # Done; return the camel-cased string. + return s.lower() diff --git a/packages/gapic-generator/api_factory/utils/lines.py b/packages/gapic-generator/api_factory/utils/lines.py new file mode 100644 index 000000000000..4b82a4965a8e --- /dev/null +++ b/packages/gapic-generator/api_factory/utils/lines.py @@ -0,0 +1,96 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + + +def subsequent_indent(text: str, prefix: str) -> str: + """Decorates the text string with the given prefix on hanging lines. + + A "hanging" line is any line except for the first one. After prefixing, + if any lines end in whitespace, that whitespace is stripped. + + This is provided to all templates as the ``subsequent_indent`` filter. + + Args: + text (str): The text string. + prefix (str): The prefix to use. + + Returns: + str: The string with all hanging lines prefixed. + """ + lines = text.split('\n') + lines[1:] = [f'{prefix}{s}'.rstrip() for s in lines[1:]] + return '\n'.join(lines) + + +def wrap(text: str, width: int, initial_width: int = None, + subsequent_indent: str = '', antecedent_trailer: str = '') -> str: + """Wrap the given string to the given width. + + This uses :meth:`textwrap.fill` under the hood, but provides functionality + for the initial width, as well as a common line ending for every line + but the last. + + This is provided to all templates as the ``wrap`` filter. + + Args: + text (str): The initial text string. + width (int): The width at which to wrap the text. If either + ``subsequent_indent`` or ``antecedent_trailer`` are provided, + their width will be automatically counted against this. + initial_width (int): Optional. The width of the first line, if + different. Defaults to the value of ``width``. + subsequent_indent (str): A string to be prepended to every line + except the first. + antecedent_trailer (str): A string to be appended to every line + except the last. + + Returns: + str: The wrapped string. + """ + initial_width = initial_width or width + + # Sanity check: If there is empty text, abort. + if not text: + return '' + + # Reduce the values by the length of the trailing string, if any. + width -= len(antecedent_trailer) + initial_width -= len(antecedent_trailer) + + # If the initial width is different, break off the beginning of the + # string. + first = '' + if initial_width != width: + initial = textwrap.wrap(text, width=initial_width) + first = f'{initial[0]}\n' + text = ' '.join(initial[1:]) + + # Sanity check: If that was the only line, abort here, *without* + # the antecedent trailer. + if not text: + return initial[0] + + # Wrap the remainder of the string at the desired width. + text = first + textwrap.fill( + initial_indent=subsequent_indent if first else '', + subsequent_indent=subsequent_indent, + text=text, + width=width, + ) + + # Replace all the line endings with the antecedent trailer, + # and return the resulting string. + return text.replace('\n', f'{antecedent_trailer}\n') diff --git a/packages/gapic-generator/api_factory/utils/placeholder.py b/packages/gapic-generator/api_factory/utils/placeholder.py new file mode 100644 index 000000000000..eb7edad94aa3 --- /dev/null +++ b/packages/gapic-generator/api_factory/utils/placeholder.py @@ -0,0 +1,23 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class Placeholder(str): + """A string which always has a boolean value of False. + + Used for sending placeholder strings to templates, such that the string + is meaningful if used, but an if check will resolve to False. + """ + def __bool__(self): + return False diff --git a/packages/gapic-generator/docs/Makefile b/packages/gapic-generator/docs/Makefile new file mode 100644 index 000000000000..f9f9a54ca7dc --- /dev/null +++ b/packages/gapic-generator/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = APIClientGeneratorforPython +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/packages/gapic-generator/docs/api-configuration.rst b/packages/gapic-generator/docs/api-configuration.rst new file mode 100644 index 000000000000..92c16efed352 --- /dev/null +++ b/packages/gapic-generator/docs/api-configuration.rst @@ -0,0 +1,135 @@ +.. _api-configuration: + +API Configuration +----------------- + +This code generator relies on some configuration not specified in many +published protocol buffers. + +.. warning:: + + In fact, this is intended to serve as a reference implementation for + proposed configuration, so as of this writing it is not published anywhere, + and is subject to change. + +This plugin *will* successfully publish a library on a valid protobuf API +even without any additional information set, but may require some +post-processing work by a human in this case before the resulting client +library will install or work. (Look for values enclosed by ``<<<`` and +``>>>`` to quickly spot these.) + +Reading further assumes you are at least nominally familiar with protocol +buffers and their syntax. You may not be familiar with `options`_ yet; it is +recommended to read up on them before continuing. (As a note, no need to +learn about creating custom options; being able to set options that are +already defined is sufficient.) + +.. _options: https://developers.google.com/protocol-buffers/docs/proto3#options + + +Annotations and Imports +~~~~~~~~~~~~~~~~~~~~~~~ + +As mentioned above, this tool uses a interface specification that is +currently experimental. + +When specifying an annotation, your proto will need to import the file +where the annotation is defined. If you try to use an annotation without +importing the dependency proto, then ``protoc`` will give you an error. + +All of the protos discussed here are in the `googleapis`_ repository, +on the ``annotated`` branch, and they are consistently in the +``google.api.experimental`` package. While this remains experimental, +the best course is probably to clone the repository: + +.. code-block:: shell + + $ git clone git@github.com:googleapis/googleapis.git + $ cd googleapis + $ git checkout --track -b annotated origin/annotated + +Once this is done, you will need to specify the root of this repository +on disk as a ``--proto_path`` whenever invoking ``protoc``. + +.. _googleapis: https://github.com/googleapis/googleapis/ + + +API Client Information +~~~~~~~~~~~~~~~~~~~~~~ + +The most important piece of information this plugin requires is information +about the client library itself: what should it be called, how is it licensed, +and so on. + +This is rolled up into a strcuture spelled ``Client``, and the annotation +is defined in `google/api/experimental/client.proto`_. + +The option may be defined as a full structure at the top level of the proto +file. It is recommended that this be declared other under ``option`` +directives, and above services or messages. + +You really need ``name`` (otherwise the plugin will use a placeholder), but +everything else is fundamentally optional. Here is a complete annotation: + +.. code-block:: protobuf + + option (google.api.experimental.client) = { + name: "News" + namespace: ["Daily Planet"] + version: "v1" + documentation: { + overview: "The News API allows you to retrieve and search articles posted to the Daily Planet, the most trusted newspaper in Metropolis." + summary: "All the news fit to print." + documentation_root_url: "bogus://dailyplanet.com/news-api/docs/" + } + copyright: { fullname: "Perry White" year: "2018" } + license: "Apache-2.0" + }; + + +Service Information +~~~~~~~~~~~~~~~~~~~ + +In order to properly connect to an API, the client library needs to know +where the API service is running, as well as what (if anything) else is +required in order to properly connect. + +This plugin understands two options for this, which are also defined in +`google/api/experimental/client.proto`_. Rather than being options on +top level files, however, these are both options on `services`_. If an API +defines more than one service, these options do *not* need to match between +them. + +The first option is the **host** where the service can be reached: + +.. code-block:: protobuf + + service News { + option (google.api.experimental.host) = "newsapi.dailyplanet.com" + } + +The second option is any oauth scopes which are needed. Google's auth +libraries (such as `google-auth`_ in Python, which code generated by +this plugin uses) expect that credentials declare what scopes they believe +they need, and the auth libraries do the right thing in the situation where +authorization is needed, access has been revoked, and so on. + +These are a list, which is accomplished by specifying the option more than +once: + +.. code-block:: protobuf + + service News { + option (google.api.experimental.oauth_scopes) = "https://newsapi.dailyplanet.com/auth/list-articles" + option (google.api.experimental.oauth_scopes) = "https://newsapi.dailyplanet.com/auth/read-article" + } + +.. _services: https://developers.google.com/protocol-buffers/docs/proto3#services +.. _google/api/experimental/client.proto: https://github.com/googleapis/googleapis/blob/annotated/google/api/experimental/client.proto#L35 +.. _google-auth: https://github.com/GoogleCloudPlatform/google-auth-library-python + +Future Work +~~~~~~~~~~~ + +Support for other annotated behavior, such as overloads, long-running +operations, samples, and header values is a work in progress. diff --git a/packages/gapic-generator/docs/conf.py b/packages/gapic-generator/docs/conf.py new file mode 100644 index 000000000000..7f0bf1656ee9 --- /dev/null +++ b/packages/gapic-generator/docs/conf.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/stable/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + + +# -- Project information ----------------------------------------------------- + +project = 'API Client Generator for Python' +copyright = '2018, Google LLC' +author = 'Luke Sneeringer' + +# The short X.Y version +version = '0.0.2' +# The full version, including alpha/beta/rc tags +release = '0.0.2' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.intersphinx', + 'sphinx.ext.viewcode', + 'sphinx.ext.githubpages', + 'sphinx.ext.napoleon', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = [] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'gapic-generator-python' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'APIClientGeneratorforPython.tex', 'API Client Generator for Python Documentation', + 'Luke Sneeringer', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'apiclientgeneratorforpython', 'API Client Generator for Python Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'APIClientGeneratorforPython', 'API Client Generator for Python Documentation', + author, 'APIClientGeneratorforPython', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Extension configuration ------------------------------------------------- + +# -- Options for intersphinx extension --------------------------------------- + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'https://docs.python.org/3/': None} diff --git a/packages/gapic-generator/docs/getting-started.rst b/packages/gapic-generator/docs/getting-started.rst new file mode 100644 index 000000000000..6e18de87e2b8 --- /dev/null +++ b/packages/gapic-generator/docs/getting-started.rst @@ -0,0 +1,123 @@ +Getting Started +--------------- + +To use this plugin, you will need an API which is specified using a +protocol buffer. Additionally, this plugin makes some assumptions at the +margins according to `Google API design conventions`_, so following those +conventions is recommended. + +Example +~~~~~~~ + +If you want to experiment with an already-existing API, one example is +available. (Reminder that this is still considered experimental, so apologies +for this part being a bit strange.) + +You need to clone the `googleapis`_ repository from GitHub, and change to +a special branch: + +.. code-block:: shell + + $ git clone git@github.com:googleapis/googleapis.git + $ cd googleapis + $ git checkout --track -b annotated origin/annotated + +The API available as an example (thus far) is the `Google Cloud Vision`_ API, +available in the ``google/cloud/vision/v1/`` subdirectory. This will be used +for the remainder of the examples on this page. + +This branch also makes available the proto specification of the configuration +itself, which is explained in :ref:`api-configuration`. + +.. _googleapis: https://github.com/googleapis/googleapis/ +.. _Google Cloud Vision: https://cloud.google.com/vision/ + + +Compiling an API +~~~~~~~~~~~~~~~~ + +To get a client library, you need to both compile the proto descriptors +into compiled message types (which is functionality built into ``protoc``) +and also into a client library (which is what this plugin does). + +These can be done in the same step. ``protoc`` requires an output destination +for each plugin invoked; you just want these to match: + +.. code-block:: shell + + $ protoc google/cloud/vision/v1/*.proto \ + --python_out=/dest/ \ + --pyclient_out=/dest/ + +.. note:: + + **A reminder about paths.** + + Remember that ``protoc`` is particular about paths. It expects to be run + from the "base path" used for imports within the protos. If you are + running ``protoc`` from any other location, you will need to provide + ``--proto_path``. + + +Running a Client Library +~~~~~~~~~~~~~~~~~~~~~~~~ + +Once you have compiled a client library, it is time for the fun part: +actually running it! + +Create a virtual environment for the library: + +.. code-block:: shell + + $ virtualenv ~/.local/client-lib --python=`which python3.6` + $ source ~/.local/client-lib/bin/activate + +Next, install the library: + +.. code-block:: shell + + $ cd /dest/ + $ pip install --editable . + +Now it is time to play with it! +Here is a test script: + +.. code-block:: python + + # These are the compiled protocol buffer types generated by + # `protoc --python_out`. + from google.cloud.vision.v1 import image_annotator_pb2 as types + + # This is the client library generated by this plugin. + import image_annotator + + # Instantiate the client. + # + # If you need to manually specify credentials, do so here. + # More info: https://cloud.google.com/docs/authentication/getting-started + # + # If you wish, you can send `transport='grpc'` or `transport='http'` + # to change which underlying transport layer is being used. + ia = image_annotator.ImageAnnotator() + + # Piece together the request object. + request = types.BatchAnnotateImagesRequest(requests=[ + types.AnnotateImageRequest( + features=[types.Feature( + type=types.Feature.Type.Value('LABEL_DETECTION'), + )], + image=types.Image(source=types.ImageSource( + image_uri='https://s3.amazonaws.com/cdn0.michiganbulb.com' + '/images/350/66623.jpg', + )), + ), + ]) + + # Send the request to the server and get the response. + response = ia.batch_annotate_images(request) + print(response) + + + + +.. _Google API design conventions: https://cloud.google.com/apis/design/ diff --git a/packages/gapic-generator/docs/index.rst b/packages/gapic-generator/docs/index.rst new file mode 100644 index 000000000000..0ec3699bb16d --- /dev/null +++ b/packages/gapic-generator/docs/index.rst @@ -0,0 +1,26 @@ +API Client Generator for Python +=============================== + + A generator for protocol buffer described APIs for and in Python 3. + +This program accepts an API specified in `protocol buffers`_ and generates +a client library, which can be used to interact with that API. It is +implemented as a plugin to ``protoc``, the protocol buffer compiler. + +.. warning:: + + This tool is a proof of concept and is being iterated on rapidly. + Feedback is welcome, but please do not try to use this in some kind of + system where stability is an expectation. + +.. _protocol buffers: https://developers.google.com/protocol-buffers/ + +.. toctree:: + :maxdepth: 2 + + installing + getting-started + api-configuration + process + status + reference/index diff --git a/packages/gapic-generator/docs/installing.rst b/packages/gapic-generator/docs/installing.rst new file mode 100644 index 000000000000..0b42ccfa379c --- /dev/null +++ b/packages/gapic-generator/docs/installing.rst @@ -0,0 +1,71 @@ +Installing +---------- + +protoc +~~~~~~ + +This tool is implemented as a plugin to the `protocol buffers`_ compiler, so +in order to use it, you will need to have the ``protoc`` command available. + +The `release page`_ on GitHub contains the download you need. + +.. note:: + + You may notice both packages that designate languages (e.g. + ``protobuf-python-X.Y.Z.tar.gz``) as well as packages that + designate architectures (e.g. ``protoc-X.Y.Z-linux-x86_64.zip``). You want + the one that designates an architecture; your goal here is to have a CLI + command. + +.. _protocol buffers: https://developers.google.com/protocol-buffers/ +.. _release page: https://github.com/google/protobuf/releases + +It is likely preferable to install ``protoc`` somewhere on your shell's path, +but this is not a strict requirement (as you will be invoking it directly). + +To ensure it is installed propertly: + +.. code-block:: shell + + $ protoc --version + libprotoc 3.5.1 + + +API Generator for Python +~~~~~~~~~~~~~~~~~~~~~~~~ + +This package is provided as a standard Python library, and can be installed +the usual ways. It fundamentally provides a CLI command, +``protoc-gen-pyclient``, so you will want to install using a mechanism +that is conducive to making CLI commands available. + +Additionally, this program currently only runs against Python 3.6, so you +will need that installed. (Most Linux distributions ship with earlier +versions.) Use `pyenv`_ to get Python 3.6 installed in a friendly way. + +As for this library itself, the recommended installation approach is +`pipsi`_. + +.. code-block:: shell + + # Due to its experimental state, this tool is not published to a + # package manager, and pip can not install from git-on-borg; + # you should clone it. + git clone sso://team/apiclient-eng/python-client-generator + cd python-client-generator/ + + # Install the tool. This will handle the virtualenv for you, and + # make an appropriately-aliased executable. + # The `--editable` flag is only necessary if you want to work on the + # tool (as opposed to just use it). + pipsi install --editable --python=`which python3.6` . + +To ensure the tool is installed properly: + +.. code-block:: shell + + $ which protoc-gen-pyclient + /path/to/protoc-gen-pyclient + +.. _pyenv: https://github.com/pyenv/pyenv +.. _pipsi: https://github.com/mitsuhiko/pipsi diff --git a/packages/gapic-generator/docs/process.rst b/packages/gapic-generator/docs/process.rst new file mode 100644 index 000000000000..b24fa3368841 --- /dev/null +++ b/packages/gapic-generator/docs/process.rst @@ -0,0 +1,112 @@ +How Code Generation Works +------------------------- + +This page gives a brief decription of how *this* code generator works. +It is not intended to be the final treatise on how to write *any* code +generator. It is meant to be a reference for those who wish to contribute +to this effort, or to use it as a reference implementation. + +There are two steps: a **parse** step which essentially involves reorganizing +data to make it more friendly to templates, and a **translation** step which +sends information about the API to templates, which ultimately write the +library. + +The protoc contract +~~~~~~~~~~~~~~~~~~~ + +This code generator is written as a ``protoc`` plugin, which operates on +a defined contract. The contract is straightforward: a plugin must +accept a `CodeGeneratorRequest `_ (essentially a sequence of +`FileDescriptor `_ objects) and output a +`CodeGeneratorResponse `_. + +If you are unfamiliar with ``protoc`` plugins, welcome! That last paragraph +likely sounded not as straightforward as claimed. It may be useful to read +`plugin.proto`_ and `descriptor.proto`_ before continuing on. The former +describes the contract with plugins (such as this one) and is relatively +easy to digest, the latter describes protocol buffer files themselves and is +rather dense. The key point to grasp is that each ``.proto`` *file* compiles +into one of these proto messages (called *descriptors*), and this plugin's +job is to parse those descriptors. + +.. _plugin.proto: https://github.com/google/protobuf/blob/master/src/google/protobuf/compiler/plugin.proto +.. _descriptor.proto: https://github.com/google/protobuf/blob/master/src/google/protobuf/descriptor.proto + +Parse +~~~~~ + +As mentioned, this plugin is divided into two steps. The first step is +parsing. The guts of this is handled by the :class:`~.schema.api.API` object, +which is this plugin's internal representation of the full API client. + +In particular, this class has a :meth:`~.schema.api.API.load` method which +accepts a `FileDescriptor`_ (remember, this is ``protoc``'s internal +representation of each proto file). The method is called once for each proto +file you send to be compiled as well as each dependency. (``protoc`` itself +is smart enough to de-duplicate and send everything in the right order.) + +The :class:`~.schema.api.API` object's primary purpose is to make sure all +the information from the proto files is in one place, and reasonably +accessible by `Jinja`_ templates (which by design are not allowed to call +arbitrary Python code). Mostly, it tries to avoid creating an entirely +duplicate structure, and simply wraps the descriptor representations. +However, some data needs to be moved around to get it into a structure +useful for templates (in particular, descriptors have an unfriendly approach +to sorting protobuf comments, and this parsing step places these back +alongside their referent objects). + +The internal data model does use wrapper classes around most of the +descriptors, such as :class:`~.schema.wrappers.Service` and +:class:`~.schema.wrappers.MessageType`. These consistently contain their +original descriptor (which is always spelled with a ``_pb`` suffix, e.g. +the ``Service`` wrapper class has a ``service_pb`` instance variable). +These exist to handle bringing along additional relevant data (such as the +protobuf comments as mentioned above) and handling resolution of references +(for example, allowing a :class:`~.schema.wrappers.Method` to reference its +input and output types, rather than just the strings). + +These wrapper classes follow a consistent structure: + +* They define a ``__getattr__`` method that defaults to the wrapped + desctiptor unless the wrapper itself provides something, making the wrappers + themselves transparent to templates. +* They provide a ``meta`` attribute with metadata (package information and + documentation). + +Translation +~~~~~~~~~~~ + +The translation step follows a straightfoward process to write the contents +of client library files. + +First, it loads every template in the ``generator/templates/`` directory. +These are `Jinja`_ templates. There is no master list of templates; +it is assumed that every template in this directory should be rendered +(unless its name begins with an underscore), and that the name of the +resulting file should be the same as the template's file name with the +``.j2`` suffix truncated. + +Every template receives **one** variable, spelled ``api``. It is the +:class:`~.schema.api.API` object that was pieced together in the parsing step. + +There is one caveat to the above, which is that an API can have more than +one service. Therefore, the ``generator/templates/service/`` directory +is a special case. These files are rendered *once per service*, with the +``service`` directory name changed to the name of the service itself +(in snake case, because this is Python). Additionally, these templates +receive two variables: the ``api`` variable discussed above, as well as a +variable spelled ``service``, which corresponds to the +:class:`~/schema.wrappers.Service` currently being iterated over. + +.. note:: + + The Jinja environment also receives a small number of filters useful + for writing properly formatted templates (e.g. a ``snake_case`` filter); + these are defined in :meth:`~.generator.generate` where the environment is + created. + +After all templates are processed, any files in the ``generator/files/`` +directory are written. These are not templates, and they are read into +memory and eventually written with no processing whatsoever. + +.. _Jinja: http://jinja.pocoo.org/docs/2.10/ diff --git a/packages/gapic-generator/docs/reference/generator.rst b/packages/gapic-generator/docs/reference/generator.rst new file mode 100644 index 000000000000..722d35b5d0b6 --- /dev/null +++ b/packages/gapic-generator/docs/reference/generator.rst @@ -0,0 +1,10 @@ +generator +--------- + +.. automodule:: api_factory.generator + +.. automodule:: api_factory.generator.generator + :members: + +.. automodule:: api_factory.generator.loader + :members: diff --git a/packages/gapic-generator/docs/reference/index.rst b/packages/gapic-generator/docs/reference/index.rst new file mode 100644 index 000000000000..a8b65b7ca489 --- /dev/null +++ b/packages/gapic-generator/docs/reference/index.rst @@ -0,0 +1,29 @@ +Reference +--------- + +Below is a reference for the major classes and functions within this +module. + +It is split into three main sections: + +- The ``schema`` module contains data classes that make up the internal + representation for an :class:`~.API`. The API contains thin wrappers + around protocol buffer descriptors; the goal of the wrappers is to + mostly expose the underlying descriptors, but make some of the more + complicated access and references easier in templates. +- The ``generator`` module contains most of the logic. Its + :class:`~.Generator` class is the thing that takes a request from ``protoc`` + and gives it back a response. +- The ``utils`` module contains utility functions needed elsewhere, + including some functions that are sent to all templates as Jinja filters. + +.. note:: + + Templates are housed within the ``generator`` module's directory. + +.. toctree:: + :maxdepth: 4 + + generator + schema + utils diff --git a/packages/gapic-generator/docs/reference/schema.rst b/packages/gapic-generator/docs/reference/schema.rst new file mode 100644 index 000000000000..7080a0db00f0 --- /dev/null +++ b/packages/gapic-generator/docs/reference/schema.rst @@ -0,0 +1,22 @@ +schema +------ + +.. automodule:: api_factory.schema + +api +~~~ + +.. automodule:: api_factory.schema.api + :members: + +metadata +~~~~~~~~ + +.. automodule:: api_factory.schema.metadata + :members: + +wrappers +~~~~~~~~ + +.. automodule:: api_factory.schema.wrappers + :members: diff --git a/packages/gapic-generator/docs/reference/utils.rst b/packages/gapic-generator/docs/reference/utils.rst new file mode 100644 index 000000000000..232157f7065c --- /dev/null +++ b/packages/gapic-generator/docs/reference/utils.rst @@ -0,0 +1,8 @@ +utils +----- + +.. automodule:: api_factory.utils.case + :members: + +.. automodule:: api_factory.utils.lines + :members: diff --git a/packages/gapic-generator/docs/status.rst b/packages/gapic-generator/docs/status.rst new file mode 100644 index 000000000000..7f836e293525 --- /dev/null +++ b/packages/gapic-generator/docs/status.rst @@ -0,0 +1,24 @@ +Features and Limitations +------------------------ + +Nice things this client does: + +- Implemented in pure Python, with language-idiomatic templating tools. +- It supports multiple transports: both gRPC and protobuf over HTTP/1.1. + A JSON-based transport would be easy to add. +- It uses a lighter-weight configuration, specified in the protocol + buffer itself. + +As this is experimental work, please note the following limitations: + +- The output only works on Python 3.4 and above. +- The configuration annotations are experimental and provided in + `an awkward location`_. +- gRPC must be installed even if you are not using it (this is due to + some minor issues in ``api-core``). +- Only unary calls are implemented at this point. +- No support for GAPIC features (e.g. LRO, method argument flattening) yet. +- No support for samples yet. +- No tests are implemented. + +.. _an awkward location: https://github.com/googleapis/googleapis/blob/annotated/google/api/experimental/ diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py new file mode 100644 index 000000000000..00ccd960f648 --- /dev/null +++ b/packages/gapic-generator/nox.py @@ -0,0 +1,54 @@ +# Copyright 2017, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import os + +import nox + + +@nox.session +def unit(session, python_version='3.6'): + """Run the unit test suite.""" + + session.interpreter = 'python{0}'.format(python_version) + + session.virtualenv_dirname = 'unit-{0}'.format(python_version) + + session.install('coverage', 'pytest', 'pytest-cov') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=api_factory', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit'), + ) + + +@nox.session +def docs(session): + """Build the docs.""" + + session.interpreter = 'python3.6' + session.install('sphinx', 'sphinx_rtd_theme') + session.install('.') + + # Build the docs! + session.run('rm', '-rf', 'docs/_build/') + session.run('sphinx-build', '-W', '-b', 'html', '-d', + 'docs/_build/doctrees', 'docs/', 'docs/_build/html/') diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py new file mode 100644 index 000000000000..38f031050e48 --- /dev/null +++ b/packages/gapic-generator/setup.py @@ -0,0 +1,65 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +from setuptools import find_packages, setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + +with io.open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: + README = file_obj.read() + +setup( + name='python-api-factory', + version='0.0.2', + license='Apache 2.0', + author='Luke Sneeringer', + author_email='lukesneeringer@google.com', + url='https://github.com/googleapis/python-api-factory.git', + packages=find_packages(exclude=['protos', 'tests']), + description='Python client library generator for APIs defined by protocol' + 'buffers', + long_description=README, + entry_points="""[console_scripts] + protoc-gen-dump=api_factory.cli.dump:dump + protoc-gen-pyclient=api_factory.cli.generate:generate + """, + platforms='Posix; MacOS X', + include_package_data=True, + install_requires=( + 'click >= 6.7', + 'googleapis-common-protos >= 1.5.3', + 'grpcio >= 1.9.1', + 'jinja2 >= 2.10', + 'protobuf >= 3.5.1', + ), + extras_require={ + ':python_version<"3.7"': ('dataclasses >= 0.4',), + }, + classifiers=( + 'Development Status :: 2 - Pre-Alpha', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: POSIX', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Topic :: Software Development :: Code Generators', + 'Topic :: Software Development :: Libraries :: Python Modules', + ), + zip_safe=False, +) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py new file mode 100644 index 000000000000..5ffd391baba8 --- /dev/null +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -0,0 +1,215 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os +from unittest import mock + +import jinja2 + +from google.protobuf import descriptor_pb2 +from google.protobuf.compiler import plugin_pb2 + +from api_factory.generator import generator +from api_factory.schema.api import API +from api_factory.schema import wrappers +from api_factory import utils + + +def test_constructor(): + # Crete a bogue and very stripped down request. + request = plugin_pb2.CodeGeneratorRequest(proto_file=[ + # We are just going to prove that each file is loaded, + # so it does not matter what is in them. + descriptor_pb2.FileDescriptorProto(), + descriptor_pb2.FileDescriptorProto(), + ]) + + # Create a generator, prove it has an API. + # This is somewhat internal implementation baseball, but realistically + # the only reasonable way to write these tests is to split them up by + # internal segment. + with mock.patch.object(API, 'load') as load: + g = generator.Generator(request) + assert load.call_count == 2 + assert isinstance(g._api, API) + + # Assert we have a Jinja environment also, with the expected filters. + # Still internal implementation baseball, but this is the best place + # to establish this and templates will depend on it. + assert isinstance(g._env, jinja2.Environment) + assert 'snake_case' in g._env.filters + assert 'subsequent_indent' in g._env.filters + assert 'wrap' in g._env.filters + + +def test_get_response(): + # Create a generator with mock data. + # + # We want to ensure that templates are rendered for each service, + # which we prove by sending two services. + file_pb2 = descriptor_pb2.FileDescriptorProto( + name='bacon.proto', + package='foo.bar.v1', + service=[descriptor_pb2.ServiceDescriptorProto(name='SpamService'), + descriptor_pb2.ServiceDescriptorProto(name='EggsService')], + ) + g = make_generator(proto_file=[file_pb2]) + + # Mock all the rendering methods. + with mock.patch.object(g, '_render_templates') as _render_templates: + _render_templates.return_value = [ + plugin_pb2.CodeGeneratorResponse.File( + name='template_file', + content='This was a template.', + ), + ] + with mock.patch.object(g, '_read_flat_files') as _read_flat_files: + _read_flat_files.return_value = [ + plugin_pb2.CodeGeneratorResponse.File( + name='flat_file', + content='This was a flat file.', + ), + ] + + # Okay, now run the `get_response` method. + response = g.get_response() + + # First and foremost, we care that we got a valid response + # object back (albeit not so much what is in it). + assert isinstance(response, plugin_pb2.CodeGeneratorResponse) + + # Next, determine that flat files were read. + assert _read_flat_files.call_count == 1 + _, args, _ = _read_flat_files.mock_calls[0] + assert args[0].endswith('files') + + # Next, determine that the general API templates and service + # templates were both called; the method should be called + # once per service plus one for the API as a whole. + assert _render_templates.call_count == len(file_pb2.service) + 1 + + # The service templates should have been called with the + # filename transformation and the additional `service` variable. + for call in _render_templates.mock_calls: + _, args, kwargs = call + if args[0] != g._env.loader.service_templates: + continue + service = kwargs['additional_context']['service'] + assert isinstance(service, wrappers.Service) + assert kwargs['transform_filename']( + 'service/foo', + ) == f'{utils.to_snake_case(service.name)}/foo' + + +def test_render_templates(): + g = make_generator() + + # Determine the templates to be rendered. + templates = ('foo.j2', 'bar.j2') + with mock.patch.object(jinja2.Environment, 'get_template') as get_template: + get_template.side_effect = lambda t: jinja2.Template( + f'Hello, I am `{t}`.', + ) + + # Render the templates. + files = g._render_templates(templates) + + # Test that we get back the expected content for each template. + assert len(files) == 2 + assert files[0].name == 'foo' + assert files[1].name == 'bar' + assert files[0].content == 'Hello, I am `foo.j2`.\n' + assert files[1].content == 'Hello, I am `bar.j2`.\n' + + +def test_render_templates_additional_context(): + g = make_generator() + + # Determine the templates to be rendered. + templates = ('foo.j2',) + with mock.patch.object(jinja2.Environment, 'get_template') as get_template: + get_template.return_value = jinja2.Template('A {{ thing }}!') + + # Render the templates. + files = g._render_templates(templates, additional_context={ + 'thing': 'bird', + }) + + # Test that we get back the expected content for each template. + assert len(files) == 1 + assert files[0].name == 'foo' + assert files[0].content == 'A bird!\n' + + +def test_render_templates_filename_rename(): + g = make_generator() + + # Determine the templates to be rendered. + templates = ('service/foo.py.j2', 'service/bar.py.j2', 'plain.py.j2') + with mock.patch.object(jinja2.Environment, 'get_template') as get_template: + get_template.return_value = jinja2.Template('Template body.') + + # Render the templates. + files = g._render_templates( + templates, + transform_filename=lambda f: f.replace('service/', 'each/'), + ) + + # Test that we get back the expected content for each template. + assert len(files) == 3 + assert files[0].name == 'each/foo.py' + assert files[1].name == 'each/bar.py' + assert files[2].name == 'plain.py' + assert all([f.content == 'Template body.\n' for f in files]) + + +def test_read_flat_files(): + g = make_generator() + + # This function walks over a directory on the operating system; + # even though that directory is actually in this repo, fake it. + with mock.patch.object(os, 'walk') as walk: + walk.return_value = ( + ('files/', [], ['foo.ext']), + ('files/other/', [], ['bar.ext']), + ) + + # This function also reads files from disk, fake that too. + with mock.patch.object(io, 'open') as open: + open.side_effect = lambda fn, mode: io.StringIO(f'abc-{fn}-{mode}') + + # Okay, now we can run the function. + files = g._read_flat_files('files/') + + # Each file should have been opened, so one call to `io.open` + # per file. + assert open.call_count == len(walk.return_value) + + # `os.walk` should have been called once and exactly once, + # with unmodified input. + walk.assert_called_once_with('files/') + + # Lastly, we should have gotten one file back for each file + # yielded by walk, and each one should have the expected contents + # (the 'abc' prefix and then the filename and read mode). + assert len(files) == 2 + assert files[0].name == 'foo.ext' + assert files[1].name == 'other/bar.ext' + assert files[0].content == 'abc-files/foo.ext-r' + assert files[1].content == 'abc-files/other/bar.ext-r' + + +def make_generator(**kwargs): + return generator.Generator(plugin_pb2.CodeGeneratorRequest(**kwargs)) diff --git a/packages/gapic-generator/tests/unit/generator/test_loader.py b/packages/gapic-generator/tests/unit/generator/test_loader.py new file mode 100644 index 000000000000..056b72d80fb7 --- /dev/null +++ b/packages/gapic-generator/tests/unit/generator/test_loader.py @@ -0,0 +1,39 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +from api_factory.generator.loader import TemplateLoader + + +def test_service_templates(): + loader = TemplateLoader(searchpath='<<< IRRELEVANT >>>') + with mock.patch.object(loader, 'list_templates') as list_templates: + list_templates.return_value = [ + '_base.j2', 'foo.j2', 'bar.j2', + 'service/spam.j2', 'service/eggs.j2', 'service/py/spameggs.j2', + ] + assert loader.service_templates == { + 'service/spam.j2', 'service/eggs.j2', 'service/py/spameggs.j2', + } + + +def test_api_templates(): + loader = TemplateLoader(searchpath='<<< IRRELEVANT >>>') + with mock.patch.object(loader, 'list_templates') as list_templates: + list_templates.return_value = [ + '_base.j2', 'foo.j2', 'bar.j2', + 'service/spam.j2', 'service/eggs.j2', 'service/py/spameggs.j2', + ] + assert loader.api_templates == {'foo.j2', 'bar.j2'} diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py new file mode 100644 index 000000000000..d95b8026edbd --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -0,0 +1,295 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pytest + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import metadata +from api_factory.schema import wrappers +from api_factory.schema.api import API +from api_factory.schema.pb import client_pb2 + + +def test_long_name(): + api = make_api( + client=make_client(name='Genie', namespace=['Agrabah', 'Lamp']), + ) + assert api.long_name == 'Agrabah Lamp Genie' + + +def test_warehouse_package_name_placeholder(): + api = make_api(client=make_client(name='')) + assert api.warehouse_package_name == '<<< PACKAGE NAME >>>' + assert bool(api.warehouse_package_name) is False + + +def test_warehouse_package_name_no_namespace(): + api = make_api(client=make_client(name='BigQuery', namespace=[])) + assert api.warehouse_package_name == 'bigquery' + + +def test_warehouse_package_name_with_namespace(): + api = make_api(client=make_client( + name='BigQuery', + namespace=('Google', 'Cloud'), + )) + assert api.warehouse_package_name == 'google-cloud-bigquery' + + +def test_load(): + sentinel_message = descriptor_pb2.DescriptorProto() + sentinel_enum = descriptor_pb2.EnumDescriptorProto() + sentinel_service = descriptor_pb2.ServiceDescriptorProto() + + # Create a file descriptor proto. It does not matter that none + # of the sentinels have actual data because this test just ensures + # they are sent off to the correct methods unmodified. + fdp = descriptor_pb2.FileDescriptorProto( + name='my_proto_file.proto', + package='google.example.v1', + message_type=[sentinel_message], + enum_type=[sentinel_enum], + service=[sentinel_service], + ) + + # Create an API object. + api = make_api() + + # Test the load function. + with mock.patch.object(api, '_load_children') as lc: + api.load(fdp) + + # There should be three total calls to load the different types + # of children. + assert lc.call_count == 3 + + # The message type should come first. + _, args, kwargs = lc.mock_calls[0] + assert args[0][0] == sentinel_message + assert kwargs['loader'] == api._load_descriptor + + # The enum type should come second. + _, args, kwargs = lc.mock_calls[1] + assert args[0][0] == sentinel_enum + assert kwargs['loader'] == api._load_enum + + # The services should come third. + _, args, kwargs = lc.mock_calls[2] + assert args[0][0] == sentinel_service + assert kwargs['loader'] == api._load_service + + +def test_load_comments_top_level(): + L = descriptor_pb2.SourceCodeInfo.Location + + # Create a file descriptor proto. + # This has comments which should be largely sharded and ferried off to the + # correct sub-methods. + locations = [ + L(path=[4, 0], leading_comments='foo'), + L(path=[4, 0, 2, 0], leading_comments='bar'), + L(path=[6, 0], leading_comments='baz'), + ] + fdp = descriptor_pb2.FileDescriptorProto( + name='my_proto_file.proto', + package='google.example.v1', + source_code_info=descriptor_pb2.SourceCodeInfo(location=locations) + ) + + # Create an API object. + api = make_api() + + # Test the load function. This sends empty arrays to each of the + # individual child-processing function, but sends meaningful slices of + # documentation (which is what this test is trying to confirm). + with mock.patch.object(api, '_load_children') as lc: + api.load(fdp) + + # There are still three total calls, like above. + assert lc.call_count == 3 + + # The `message_type` field has the ID of 4 in `FileDescriptorProto`, + # so the two whose path begins with 4 should be sent, and in the + # ad hoc dictionary that this method creates. + _, args, kwargs = lc.mock_calls[0] + assert kwargs['loader'] == api._load_descriptor + assert kwargs['info'] == { + 0: {'TERMINAL': locations[0], 2: {0: {'TERMINAL': locations[1]}}}, + } + + # The `enum_type` field has the ID of 5 in `FileDescriptorProto`, + # but no location objects were sent with a matching path, so it + # will just get an empty dictionary. + _, args, kwargs = lc.mock_calls[1] + assert kwargs['loader'] == api._load_enum + assert kwargs['info'] == {} + + # The `service_type` field has the ID of 6 in `FileDescriptorProto`, + # so it will get the one location object that begins with 6. + _, args, kwargs = lc.mock_calls[2] + assert kwargs['loader'] == api._load_service + assert kwargs['info'] == {0: {'TERMINAL': locations[2]}} + + +def test_load_children(): + # Set up the data to be sent to the method. + children = (mock.sentinel.child_zero, mock.sentinel.child_one) + address = metadata.Address() + info = {0: mock.sentinel.info_zero, 1: mock.sentinel.info_one} + loader = mock.Mock(create_autospec=lambda child, address, info: None) + + # Run the `_load_children` method. + make_api()._load_children(children, loader, address, info) + + # Assert that the loader ran in the expected way (twice, once per child). + assert loader.call_count == 2 + _, args, kwargs = loader.mock_calls[0] + assert args[0] == mock.sentinel.child_zero + assert kwargs['info'] == mock.sentinel.info_zero + _, args, kwargs = loader.mock_calls[1] + assert args[0] == mock.sentinel.child_one + assert kwargs['info'] == mock.sentinel.info_one + + +def test_get_fields(): + L = descriptor_pb2.SourceCodeInfo.Location + + # Set up data to test with. + field_pbs = [ + descriptor_pb2.FieldDescriptorProto(name='spam'), + descriptor_pb2.FieldDescriptorProto(name='eggs'), + ] + address = metadata.Address(package=['foo', 'bar'], module='baz') + info = {1: {'TERMINAL': L(leading_comments='Eggs.')}} + + # Run the method under test. + fields = make_api()._get_fields(field_pbs, address=address, info=info) + + # Test that we get two field objects back. + assert len(fields) == 2 + for field in fields.values(): + assert isinstance(field, wrappers.Field) + items = iter(fields.items()) + + # Test that the first field is spam, and it has no documentation + # (since `info` has no `0` key). + field_name, field = next(items) + assert field_name == 'spam' + assert field.meta.doc == '' + + # Test that the second field is eggs, and it does have documentation + # (since `info` has a `1` key). + field_name, field = next(items) + assert field_name == 'eggs' + assert field.meta.doc == 'Eggs.' + + # Done. + with pytest.raises(StopIteration): + next(items) + + +def test_get_methods(): + L = descriptor_pb2.SourceCodeInfo.Location + + # Start with an empty API object. + api = make_api() + + # Load the input and output type for a method into the API object. + address = metadata.Address(package=['foo', 'bar'], module='baz') + api._load_descriptor(descriptor_pb2.DescriptorProto(name='In'), + address=address, info={}) + api._load_descriptor(descriptor_pb2.DescriptorProto(name='Out'), + address=address, info={}) + + # Run the method under test. + method_pb = descriptor_pb2.MethodDescriptorProto( + name='DoThings', + input_type='foo.bar.In', + output_type='foo.bar.Out', + ) + methods = api._get_methods([method_pb], address=address, info={}) + + # Test that we get a method object back. + assert len(methods) == 1 + for method in methods.values(): + assert isinstance(method, wrappers.Method) + items = iter(methods.items()) + + # Test that the method has what we expect, an input and output type + # and appropriate name. + method_key, method = next(items) + assert method_key == 'DoThings' + assert isinstance(method.input, wrappers.MessageType) + assert method.input.name == 'In' + assert isinstance(method.output, wrappers.MessageType) + assert method.output.name == 'Out' + + # Done. + with pytest.raises(StopIteration): + next(items) + + +def test_load_descriptor(): + message_pb = descriptor_pb2.DescriptorProto(name='Riddle') + address = metadata.Address(package=['foo', 'bar', 'v1'], module='baz') + api = make_api() + api._load_descriptor(message_pb=message_pb, address=address, info={}) + assert 'foo.bar.v1.Riddle' in api.messages + assert isinstance(api.messages['foo.bar.v1.Riddle'], wrappers.MessageType) + assert api.messages['foo.bar.v1.Riddle'].message_pb == message_pb + + +def test_load_enum(): + # Set up the appropriate protos. + enum_value_pb = descriptor_pb2.EnumValueDescriptorProto(name='A', number=0) + enum_pb = descriptor_pb2.EnumDescriptorProto( + name='Enum', + value=[enum_value_pb], + ) + + # Load it into the API. + address = metadata.Address(package=['foo', 'bar', 'v1'], module='baz') + api = make_api() + api._load_enum(enum_pb, address=address, info={}) + + # Assert we got back the right stuff. + assert 'foo.bar.v1.Enum' in api.enums + assert isinstance(api.enums['foo.bar.v1.Enum'], wrappers.EnumType) + assert api.enums['foo.bar.v1.Enum'].enum_pb == enum_pb + assert len(api.enums['foo.bar.v1.Enum'].values) == 1 + + +def test_load_service(): + service_pb = descriptor_pb2.ServiceDescriptorProto(name='RiddleService') + address = metadata.Address(package=['foo', 'bar', 'v1'], module='baz') + api = make_api() + api._load_service(service_pb, address=address, info={}) + assert 'foo.bar.v1.RiddleService' in api.services + assert isinstance(api.services['foo.bar.v1.RiddleService'], + wrappers.Service) + assert api.services['foo.bar.v1.RiddleService'].service_pb == service_pb + + +def make_api(client: client_pb2.Client = None) -> API: + return API(client=client or make_client()) + + +def make_client(**kwargs) -> client_pb2.Client: + kwargs.setdefault('name', 'Hatstand') + kwargs.setdefault('namespace', ('Google', 'Cloud')) + kwargs.setdefault('version', 'v1') + return client_pb2.Client(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py new file mode 100644 index 000000000000..162a75ba39cc --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -0,0 +1,75 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import typing + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import metadata + + +def test_address_str_no_parent(): + addr = metadata.Address(package=['foo', 'bar'], module='baz') + assert str(addr) == 'foo.bar' + + +def test_address_str_parent(): + addr = metadata.Address(package=['foo', 'bar'], module='baz', + parent=['spam', 'eggs']) + assert str(addr) == 'foo.bar.spam.eggs' + + +def test_address_child(): + addr = metadata.Address(package=['foo', 'bar'], module='baz') + child = addr.child('bacon') + assert child.parent == ['bacon'] + assert str(child) == 'foo.bar.bacon' + grandchild = child.child('ham') + assert grandchild.parent == ['bacon', 'ham'] + assert str(grandchild) == 'foo.bar.bacon.ham' + + +def test_doc_nothing(): + meta = metadata.Metadata() + assert meta.doc == '' + + +def test_doc_leading_trumps_all(): + meta = make_doc_meta(leading='foo', trailing='bar', detached=['baz']) + assert meta.doc == 'foo' + + +def test_doc_trailing_trumps_detached(): + meta = make_doc_meta(trailing='spam', detached=['eggs']) + assert meta.doc == 'spam' + + +def test_doc_detached_joined(): + meta = make_doc_meta(detached=['foo', 'bar']) + assert meta.doc == 'foo\n\nbar' + + +def make_doc_meta( + *, + leading: str = '', + trailing: str = '', + detached: typing.List[str] = [], + ) -> descriptor_pb2.SourceCodeInfo.Location: + return metadata.Metadata( + documentation=descriptor_pb2.SourceCodeInfo.Location( + leading_comments=leading, + trailing_comments=trailing, + leading_detached_comments=detached, + ), + ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py new file mode 100644 index 000000000000..97efd127fe2b --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -0,0 +1,45 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import wrappers + + +def get_enum() -> wrappers.EnumType: + enum_value_pbs = [ + descriptor_pb2.EnumValueDescriptorProto(name='RED', number=1), + descriptor_pb2.EnumValueDescriptorProto(name='GREEN', number=2), + descriptor_pb2.EnumValueDescriptorProto(name='BLUE', number=3), + ] + enum_pb = descriptor_pb2.EnumDescriptorProto( + name='Color', + value=enum_value_pbs, + ) + return wrappers.EnumType( + enum_pb=enum_pb, + values=[wrappers.EnumValueType(enum_value_pb=evpb) + for evpb in enum_value_pbs], + ) + + +def test_enum_properties(): + enum_type = get_enum() + assert enum_type.name == 'Color' + + +def test_enum_value_properties(): + enum_type = get_enum() + for ev, expected in zip(enum_type.values, ('RED', 'GREEN', 'BLUE')): + assert ev.name == expected diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py new file mode 100644 index 000000000000..e531f3331c91 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -0,0 +1,44 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import wrappers +from api_factory.schema.metadata import Address, Metadata + + +def get_field() -> wrappers.Field: + field_pb = descriptor_pb2.FieldDescriptorProto( + name='my_field', + number=1, + type=descriptor_pb2.FieldDescriptorProto.Type.Value('TYPE_BOOL'), + ) + return wrappers.Field(field_pb=field_pb, meta=Metadata( + address=Address(package=['foo', 'bar'], module='baz'), + documentation=descriptor_pb2.SourceCodeInfo.Location( + leading_comments='Lorem ipsum dolor set amet', + ), + )) + + +def test_field_properties(): + field = get_field() + assert field.name == 'my_field' + assert field.number == 1 + assert field.type == 8 + + +def test_field_metadata(): + field = get_field() + assert field.meta.doc == 'Lorem ipsum dolor set amet' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py new file mode 100644 index 000000000000..19ad71b8350c --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -0,0 +1,47 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import metadata +from api_factory.schema import wrappers + + +def get_message() -> wrappers.MessageType: + message_pb = descriptor_pb2.DescriptorProto(name='MyMessage') + return wrappers.MessageType( + fields=[], + message_pb=message_pb, + meta=metadata.Metadata( + address=metadata.Address(package=['foo', 'bar'], module='baz'), + documentation=descriptor_pb2.SourceCodeInfo.Location( + leading_comments='Lorem ipsum dolor set amet', + ), + ), + ) + + +def test_message_properties(): + message = get_message() + assert message.name == 'MyMessage' + + +def test_message_docstring(): + message = get_message() + assert message.meta.doc == 'Lorem ipsum dolor set amet' + + +def test_message_python_package(): + message = get_message() + assert message.pb2_module == 'baz_pb2' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py new file mode 100644 index 000000000000..076754417d24 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -0,0 +1,72 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import metadata +from api_factory.schema import wrappers +from api_factory.schema.pb import overload_pb2 + + +def get_method() -> wrappers.Method: + # Create the address where this method lives, and the types live, + # and make them distinct. + method_addr = metadata.Address(package=['foo', 'bar'], module='baz') + types_addr = metadata.Address(package=['foo', 'bar'], module='bacon') + + # Create the method pb2 and set an overload in it. + method_pb = descriptor_pb2.MethodDescriptorProto( + name='DoTheThings', + input_type='foo.bar.Input', + output_type='foo.bar.Output', + ) + + # Set an overload in the method descriptor. + ext_key = overload_pb2.overloads + method_pb.options.Extensions[ext_key].extend([overload_pb2.Overload()]) + + # Instantiate the wrapper class. + return wrappers.Method( + method_pb=method_pb, + input=wrappers.MessageType( + fields=[], + message_pb=descriptor_pb2.DescriptorProto(name='Input'), + meta=metadata.Metadata(address=types_addr), + ), + output=wrappers.MessageType( + fields=[], + message_pb=descriptor_pb2.DescriptorProto(name='Output'), + meta=metadata.Metadata(address=types_addr), + ), + meta=metadata.Metadata(address=method_addr), + ) + + +def test_method_properties(): + method = get_method() + assert method.name == 'DoTheThings' + + +def test_method_types(): + method = get_method() + assert method.input.name == 'Input' + assert method.input.pb2_module == 'bacon_pb2' + assert method.output.name == 'Output' + assert method.output.pb2_module == 'bacon_pb2' + + +def test_method_overloads(): + method = get_method() + for overload in method.overloads: + assert isinstance(overload, overload_pb2.Overload) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py new file mode 100644 index 000000000000..f8abe6013568 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -0,0 +1,119 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import typing + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import metadata +from api_factory.schema import wrappers +from api_factory.schema.pb import client_pb2 + + +def test_service_properties(): + service = make_service(name='ThingDoer') + assert service.name == 'ThingDoer' + + +def test_service_host(): + service = make_service(host='thingdoer.googleapis.com') + assert service.host == 'thingdoer.googleapis.com' + + +def test_service_no_host(): + service = make_service() + assert service.host == '<<< HOSTNAME >>>' + assert bool(service.host) is False + + +def test_service_scopes(): + service = make_service(scopes=('https://foo/user/', 'https://foo/admin/')) + assert 'https://foo/user/' in service.oauth_scopes + assert 'https://foo/admin/' in service.oauth_scopes + + +def test_service_no_scopes(): + service = make_service() + assert len(service.oauth_scopes) == 0 + + +def test_service_pb2_modules(): + service = make_service() + assert service.pb2_modules == [ + ('a.b.v1', 'c_pb2'), + ('foo', 'bacon_pb2'), + ('foo', 'bar_pb2'), + ('foo', 'baz_pb2'), + ('x.y.v1', 'z_pb2'), + ] + + +def test_service_transform_filename(): + service = make_service() + assert service.transform_filename('foobar') == 'foobar' + assert service.transform_filename('service/foobar') == 'placeholder/foobar' + + +def make_service(name: str = 'Placeholder', host: str = '', + scopes: typing.Tuple[str] = ()) -> wrappers.Service: + # Declare a few methods, with messages in distinct packages. + methods = ( + get_method('DoThing', 'foo.bar.ThingRequest', 'foo.baz.ThingResponse'), + get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), + get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), + ) + + # Define a service descriptor, and set a host and oauth scopes if + # appropriate. + service_pb = descriptor_pb2.ServiceDescriptorProto(name=name) + if host: + service_pb.options.Extensions[client_pb2.host] = host + if scopes: + service_pb.options.Extensions[client_pb2.oauth_scopes].extend(scopes) + + # Return a service object to test. + return wrappers.Service( + service_pb=service_pb, + methods={m.name: m for m in methods}, + ) + + +def get_method(name: str, in_type: str, out_type: str) -> wrappers.Method: + input_ = get_message(in_type) + output = get_message(out_type) + method_pb = descriptor_pb2.MethodDescriptorProto( + name=name, + input_type=input_.proto_path, + output_type=output.proto_path, + ) + return wrappers.Method(method_pb=method_pb, input=input_, output=output) + + +def get_message(dot_path: str) -> wrappers.MessageType: + # Note: The `dot_path` here is distinct from the canonical proto path + # because it includes the module, which the proto path does not. + # + # So, if trying to test the DescriptorProto message here, the path + # would be google.protobuf.descriptor.DescriptorProto (whereas the proto + # path is just google.protobuf.DescriptorProto). + pieces = dot_path.split('.') + pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] + return wrappers.MessageType( + fields={}, + message_pb=descriptor_pb2.DescriptorProto(name=name), + meta=metadata.Metadata(address=metadata.Address( + package=pkg, + module=module, + )), + ) diff --git a/packages/gapic-generator/tests/unit/utils/test_cache.py b/packages/gapic-generator/tests/unit/utils/test_cache.py new file mode 100644 index 000000000000..d43f09acd283 --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_cache.py @@ -0,0 +1,33 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_factory.utils import cache + + +def test_cached_property(): + class Foo: + def __init__(self): + self.call_count = 0 + + @cache.cached_property + def bar(self): + self.call_count += 1 + return 42 + + foo = Foo() + assert foo.call_count == 0 + assert foo.bar == 42 + assert foo.call_count == 1 + assert foo.bar == 42 + assert foo.call_count == 1 diff --git a/packages/gapic-generator/tests/unit/utils/test_case.py b/packages/gapic-generator/tests/unit/utils/test_case.py new file mode 100644 index 000000000000..efaae5566410 --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_case.py @@ -0,0 +1,27 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_factory.utils import case + + +def test_pascal_to_snake(): + assert case.to_snake_case('PascalCaseThing') == 'pascal_case_thing' + + +def test_camel_to_snake(): + assert case.to_snake_case('camelCaseThing') == 'camel_case_thing' + + +def test_constant_to_snake(): + assert case.to_snake_case('CONSTANT_CASE_THING') == 'constant_case_thing' diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py new file mode 100644 index 000000000000..955906769187 --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -0,0 +1,58 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_factory.utils import lines + + +def test_subsequent_indent(): + assert lines.subsequent_indent( + text='# foo\nbar\nbaz', + prefix='# ', + ) == '# foo\n# bar\n# baz' + + +def test_wrap_noop(): + assert lines.wrap('foo bar baz', width=80) == 'foo bar baz' + + +def test_wrap_empty_text(): + assert lines.wrap('', width=80) == '' + + +def test_wrap_simple(): + assert lines.wrap('foo bar baz', width=5) == 'foo\nbar\nbaz' + + +def test_wrap_strips(): + assert lines.wrap('foo bar baz ', width=80) == 'foo bar baz' + + +def test_wrap_subsequent_indent(): + assert lines.wrap( + '# foo bar baz', + width=5, + subsequent_indent='# ', + ) == '# foo\n# bar\n# baz' + + +def test_wrap_initial_width(): + assert lines.wrap( + 'The hail in Wales falls mainly on the snails.', + width=20, + initial_width=8, + ) == 'The hail\nin Wales falls\nmainly on the\nsnails.' + + +def test_wrap_initial_width_short(): + assert lines.wrap('foo bar', width=30, initial_width=20) == 'foo bar' diff --git a/packages/gapic-generator/tests/unit/utils/test_placeholder.py b/packages/gapic-generator/tests/unit/utils/test_placeholder.py new file mode 100644 index 000000000000..4c26b49a4ef4 --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_placeholder.py @@ -0,0 +1,23 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_factory.utils import placeholder + + +def test_placeholder_str_eq(): + assert placeholder.Placeholder('foo') == 'foo' + + +def test_placeholder_falsity(): + assert not placeholder.Placeholder('foo') From 3d12c6fc9cd41c07b4dd28eb1b84c2c2b42dd721 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 20 Apr 2018 12:08:27 -0700 Subject: [PATCH 0002/1339] Output appropriate client library namespace. (#2) This commit makes it such that library code is output into an appropriate import structure. Prior to this, each service was written to the output directory at the top level. Now, the API's namespace and name are used. Therefore, for an API named "Foo" with version "v1", which has `service Bar`... * Before: `import bar` * Now: `from foo_v1 import bar` And if the namespace is `("Spam", "Eggs")`... * Before: `import bar` * Now: `from spam.eggs.foo_v1 import bar` Additionally, the API's name is directly importable, with and without the version: * Now: `from spam.eggs import foo` * Now: `from spam.eggs import foo_v1` These are actually the preferred import approach. Internally, this commit changes the file rename approach from just renaming filenames with templates beginning with `service/` to now substituting four variables (as documented). --- .../api_factory/generator/generator.py | 58 +++++++++++++--- .../api_factory/generator/loader.py | 8 +-- .../templates/$namespace/$name/__init__.py.j2 | 12 ++++ .../$name_$version/$service}/__init__.py.j2 | 0 .../$name_$version/$service}/client.py.j2 | 0 .../$service}/transports/__init__.py.j2 | 0 .../$service}/transports/base.py.j2 | 0 .../$service}/transports/grpc.py.j2 | 0 .../$service}/transports/http.py.j2 | 0 .../$namespace/$name_$version/__init__.py.j2 | 12 ++++ .../gapic-generator/api_factory/schema/api.py | 20 ++++++ .../api_factory/schema/wrappers.py | 26 +++---- .../api_factory/utils/__init__.py | 4 ++ .../api_factory/utils/filename.py | 44 ++++++++++++ packages/gapic-generator/docs/process.rst | 25 +++++-- .../tests/unit/generator/test_generator.py | 67 +++++++++++-------- .../tests/unit/generator/test_loader.py | 6 +- .../tests/unit/schema/test_api.py | 28 +++++++- .../unit/schema/wrappers/test_service.py | 9 ++- .../tests/unit/utils/test_filename.py | 29 ++++++++ 20 files changed, 275 insertions(+), 73 deletions(-) create mode 100644 packages/gapic-generator/api_factory/generator/templates/$namespace/$name/__init__.py.j2 rename packages/gapic-generator/api_factory/generator/templates/{service => $namespace/$name_$version/$service}/__init__.py.j2 (100%) rename packages/gapic-generator/api_factory/generator/templates/{service => $namespace/$name_$version/$service}/client.py.j2 (100%) rename packages/gapic-generator/api_factory/generator/templates/{service => $namespace/$name_$version/$service}/transports/__init__.py.j2 (100%) rename packages/gapic-generator/api_factory/generator/templates/{service => $namespace/$name_$version/$service}/transports/base.py.j2 (100%) rename packages/gapic-generator/api_factory/generator/templates/{service => $namespace/$name_$version/$service}/transports/grpc.py.j2 (100%) rename packages/gapic-generator/api_factory/generator/templates/{service => $namespace/$name_$version/$service}/transports/http.py.j2 (100%) create mode 100644 packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/__init__.py.j2 create mode 100644 packages/gapic-generator/api_factory/utils/filename.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_filename.py diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py index 27e16179428a..f22d2aa09335 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -77,7 +77,6 @@ def get_response(self) -> CodeGeneratorResponse: for service in self._api.services.values(): output_files += self._render_templates( self._env.loader.service_templates, - transform_filename=service.transform_filename, additional_context={'service': service}, ) @@ -94,7 +93,6 @@ def get_response(self) -> CodeGeneratorResponse: def _render_templates( self, templates: Iterable[str], *, - transform_filename: Callable[[str], str] = lambda fn: fn, additional_context: Mapping[str, Any] = None, ) -> Sequence[CodeGeneratorResponse.File]: """Render the requested templates. @@ -105,9 +103,6 @@ def _render_templates( :class:`~.loader.TemplateLoader`, and they should be able to be set to the :meth:`jinja2.Environment.get_template` method. - transform_filename (Callable[str, str]): A callable to - rename the resulting file from the template name. - Note that the `.j2` suffix is stripped automatically. additional_context (Mapping[str, Any]): Additional variables to be sent to the templates. The ``api`` variable is always available. @@ -122,16 +117,16 @@ def _render_templates( # Iterate over the provided templates and generate a File object # for each. for template_name in templates: - # Get the appropriate output filename. - output_filename = transform_filename(template_name[:-len('.j2')]) - # Generate the File object. answer.append(CodeGeneratorResponse.File( content=self._env.get_template(template_name).render( api=self._api, **additional_context ).strip() + '\n', - name=output_filename, + name=self._get_output_filename( + template_name, + context=additional_context, + ), )) # Done; return the File objects based on these templates. @@ -163,6 +158,51 @@ def _read_flat_files( # Done; return the File objects. return answer + def _get_output_filename( + self, + template_name: str, *, + context: dict = None, + ) -> str: + """Return the appropriate output filename for this template. + + This entails running the template name through a series of + replacements to replace the "filename variables" (``$name``, + ``$service``, etc.). + + Additionally, any of these variables may be substituted with an + empty value, and we should do the right thing in this case. + (The exception to this is ``$service``, which is guaranteed to be + set if it is needed.) + + Args: + template_name (str): The filename of the template, from the + filesystem, relative to ``templates/``. + context (Mapping): Additional context being sent to the template. + + Returns: + str: The appropriate output filename. + """ + filename = template_name[:-len('.j2')] \ + + # Replace the $namespace variable. + filename = filename.replace( + '$namespace', + '/'.join([i.lower() for i in self._api.client.namespace]), + ).lstrip('/') + + # Replace the $name and $version variables. + filename = filename.replace('$name_$version', + self._api.versioned_module_name) + filename = filename.replace('$name', self._api.module_name) + + # Replace the $service variable if applicable. + if context and 'service' in context: + filename = filename.replace('$service', + context['service'].module_name) + + # Done, return the filename. + return filename + _dirname = os.path.realpath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/api_factory/generator/loader.py b/packages/gapic-generator/api_factory/generator/loader.py index 7bd04582df22..059cec53f250 100644 --- a/packages/gapic-generator/api_factory/generator/loader.py +++ b/packages/gapic-generator/api_factory/generator/loader.py @@ -35,7 +35,7 @@ def api_templates(self) -> typing.Set[str]: All templates in the ``templates/`` directory are included except: - * Templates corresponding to services (in the ``service/`` + * Templates corresponding to services (in a ``$service/`` subdirectory) are excluded. See :meth:`service_templates`. * Templates beginning with ``_`` are excluded. @@ -55,8 +55,8 @@ def api_templates(self) -> typing.Set[str]: def service_templates(self): """Return the templates specific to each service. - This corresponds to all of the templates in the - ``templates/service/`` subdirectory (relative to this file). + This corresponds to all of the templates in a ``$service/`` + subdirectory (this does _not_ need to be at the top level). When these templates are rendered, they are expected to be sent two variables: an :class:`~.API` object spelled ``api``, and the @@ -68,5 +68,5 @@ def service_templates(self): Set[str]: A list of service templates. """ return set( - [t for t in self.list_templates() if t.startswith('service/')] + [t for t in self.list_templates() if '$service/' in t] ) diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name/__init__.py.j2 new file mode 100644 index 000000000000..23447abe9852 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name/__init__.py.j2 @@ -0,0 +1,12 @@ +{% extends '_base.py.j2' %} + +{% block content %} +{% for service in api.services.values() -%} +from ..{{ api.versioned_module_name }}.{{ service.name|snake_case }} import {{ service.name }} +{% endfor %} +__all__ = ( + {%- for service in api.services.values() %} + '{{ service.name }}', + {%- endfor %} +) +{% endblock %} diff --git a/packages/gapic-generator/api_factory/generator/templates/service/__init__.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/service/__init__.py.j2 rename to packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/service/client.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/service/client.py.j2 rename to packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/service/transports/__init__.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/service/transports/__init__.py.j2 rename to packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/service/transports/base.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/service/transports/base.py.j2 rename to packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/service/transports/grpc.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/service/transports/grpc.py.j2 rename to packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/service/transports/http.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/service/transports/http.py.j2 rename to packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/__init__.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/__init__.py.j2 new file mode 100644 index 000000000000..38df57bae002 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/__init__.py.j2 @@ -0,0 +1,12 @@ +{% extends '_base.py.j2' %} + +{% block content %} +{% for service in api.services.values() -%} +from .{{ service.name|snake_case }} import {{ service.name }} +{% endfor %} +__all__ = ( + {%- for service in api.services.values() %} + '{{ service.name }}', + {%- endfor %} +) +{% endblock %} diff --git a/packages/gapic-generator/api_factory/schema/api.py b/packages/gapic-generator/api_factory/schema/api.py index d7205e4847c5..27e89d375d2d 100644 --- a/packages/gapic-generator/api_factory/schema/api.py +++ b/packages/gapic-generator/api_factory/schema/api.py @@ -60,6 +60,26 @@ def long_name(self) -> str: """Return an appropriate title-cased long name.""" return ' '.join(list(self.client.namespace) + [self.client.name]) + @property + def module_name(self) -> str: + """Return the appropriate Python module name.""" + return utils.to_valid_module_name(self.client.name) + + @property + def version(self) -> str: + """Return the appropriate API version.""" + return utils.to_valid_module_name(self.client.version) + + @property + def versioned_module_name(self) -> str: + """Return the versiond module name (e.g. ``apiname_v1``). + + If there is no version, this is the same as ``module_name``. + """ + if self.version: + return f'{self.module_name}_{self.version}' + return self.module_name + @property def warehouse_package_name(self) -> str: """Return the appropriate Python package name for Warehouse.""" diff --git a/packages/gapic-generator/api_factory/schema/wrappers.py b/packages/gapic-generator/api_factory/schema/wrappers.py index 69cb432757c9..991779d39df3 100644 --- a/packages/gapic-generator/api_factory/schema/wrappers.py +++ b/packages/gapic-generator/api_factory/schema/wrappers.py @@ -139,6 +139,15 @@ def oauth_scopes(self) -> Sequence[str]: return self.service_pb.options.Extensions[client_pb2.oauth_scopes] return () + @property + def module_name(self) -> str: + """Return the appropriate module name for this service. + + Returns: + str: The service name, in snake case. + """ + return utils.to_snake_case(self.name) + @property def pb2_modules(self) -> Sequence[Tuple[str, str]]: """Return a sequence of pb2 modules, for import. @@ -162,20 +171,3 @@ def pb2_modules(self) -> Sequence[Tuple[str, str]]: method.output.pb2_module, )) return sorted(answer) - - def transform_filename(self, original: str) -> str: - """Transforms a filename to be appropriate for this service. - - This essentially replaces the ``service/`` prefix with the - snake-cased directory for the service name. - - Args: - original (str): The filename to be transformed. - - Returns: - str: The transformed directory name. - """ - return original.replace( - 'service/', - f'{utils.to_snake_case(self.name)}/', - ) diff --git a/packages/gapic-generator/api_factory/utils/__init__.py b/packages/gapic-generator/api_factory/utils/__init__.py index a3dc1be070bc..2ec96ecf5118 100644 --- a/packages/gapic-generator/api_factory/utils/__init__.py +++ b/packages/gapic-generator/api_factory/utils/__init__.py @@ -14,6 +14,8 @@ from api_factory.utils.cache import cached_property from api_factory.utils.case import to_snake_case +from api_factory.utils.filename import to_valid_filename +from api_factory.utils.filename import to_valid_module_name from api_factory.utils.lines import subsequent_indent from api_factory.utils.lines import wrap from api_factory.utils.placeholder import Placeholder @@ -24,5 +26,7 @@ 'Placeholder', 'subsequent_indent', 'to_snake_case', + 'to_valid_filename', + 'to_valid_module_name', 'wrap', ) diff --git a/packages/gapic-generator/api_factory/utils/filename.py b/packages/gapic-generator/api_factory/utils/filename.py new file mode 100644 index 000000000000..9901d0745f1d --- /dev/null +++ b/packages/gapic-generator/api_factory/utils/filename.py @@ -0,0 +1,44 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + + +def to_valid_filename(filename: str) -> str: + """Given any string, return a valid filename. + + For this purpose, filenames are expected to be all lower-cased, + and we err on the side of being more restrictive with allowed characters, + including not allowing space. + + Args: + filename (str): The input filename. + + Returns: + str: A valid filename. + """ + return re.sub(r'[^a-z0-9.%_-]+', '-', filename.lower()) + + +def to_valid_module_name(module_name: str) -> str: + """Given any string, return a valid Python module name. + + Args: + module_name (str): The input filename + + Returns: + str: A valid module name. Extensions (e.g. *.py), if present, + are untouched. + """ + return to_valid_filename(module_name).replace('-', '_') diff --git a/packages/gapic-generator/docs/process.rst b/packages/gapic-generator/docs/process.rst index b24fa3368841..fe9ef70d70f7 100644 --- a/packages/gapic-generator/docs/process.rst +++ b/packages/gapic-generator/docs/process.rst @@ -82,17 +82,30 @@ of client library files. First, it loads every template in the ``generator/templates/`` directory. These are `Jinja`_ templates. There is no master list of templates; it is assumed that every template in this directory should be rendered -(unless its name begins with an underscore), and that the name of the -resulting file should be the same as the template's file name with the -``.j2`` suffix truncated. +(unless its name begins with an underscore). + +The name of the output file is based on the name of the template, with +the following string replacements applied: + +* The ``.j2`` suffix is removed. +* ``$namespace`` is replaced with the namespace specified in the client, + converted to appropriate Python module case. If there is no namespace, + this segment is dropped. +* ``$name`` is replaced with the client name. This is expected to be + present. +* ``$version`` is replaced with the client version (the version of the API). + If there is no specified version, this is dropped. +* ``$service`` is replaced with the service name, converted to appropriate + Python module case. There may be more than one service in an API; read on + for more about this. Every template receives **one** variable, spelled ``api``. It is the :class:`~.schema.api.API` object that was pieced together in the parsing step. There is one caveat to the above, which is that an API can have more than -one service. Therefore, the ``generator/templates/service/`` directory -is a special case. These files are rendered *once per service*, with the -``service`` directory name changed to the name of the service itself +one service. Therefore, templates with ``$service/`` in their name +are a special case. These files are rendered *once per service*, with the +``$service`` directory name changed to the name of the service itself (in snake case, because this is Python). Additionally, these templates receive two variables: the ``api`` variable discussed above, as well as a variable spelled ``service``, which corresponds to the diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 5ffd391baba8..966e77df424c 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -22,9 +22,9 @@ from google.protobuf.compiler import plugin_pb2 from api_factory.generator import generator -from api_factory.schema.api import API from api_factory.schema import wrappers -from api_factory import utils +from api_factory.schema.api import API +from api_factory.schema.pb import client_pb2 def test_constructor(): @@ -108,9 +108,6 @@ def test_get_response(): continue service = kwargs['additional_context']['service'] assert isinstance(service, wrappers.Service) - assert kwargs['transform_filename']( - 'service/foo', - ) == f'{utils.to_snake_case(service.name)}/foo' def test_render_templates(): @@ -153,28 +150,6 @@ def test_render_templates_additional_context(): assert files[0].content == 'A bird!\n' -def test_render_templates_filename_rename(): - g = make_generator() - - # Determine the templates to be rendered. - templates = ('service/foo.py.j2', 'service/bar.py.j2', 'plain.py.j2') - with mock.patch.object(jinja2.Environment, 'get_template') as get_template: - get_template.return_value = jinja2.Template('Template body.') - - # Render the templates. - files = g._render_templates( - templates, - transform_filename=lambda f: f.replace('service/', 'each/'), - ) - - # Test that we get back the expected content for each template. - assert len(files) == 3 - assert files[0].name == 'each/foo.py' - assert files[1].name == 'each/bar.py' - assert files[2].name == 'plain.py' - assert all([f.content == 'Template body.\n' for f in files]) - - def test_read_flat_files(): g = make_generator() @@ -211,5 +186,43 @@ def test_read_flat_files(): assert files[1].content == 'abc-files/other/bar.ext-r' +def test_get_output_filename(): + g = make_generator(proto_file=[make_proto_file(name='Spam', version='v2')]) + template_name = '$namespace/$name_$version/foo.py.j2' + assert g._get_output_filename(template_name) == 'spam_v2/foo.py' + + +def test_get_output_filename_with_namespace(): + g = make_generator(proto_file=[make_proto_file( + name='Spam', + namespace=['Ham', 'Bacon'], + version='v2', + )]) + template_name = '$namespace/$name_$version/foo.py.j2' + assert g._get_output_filename(template_name) == 'ham/bacon/spam_v2/foo.py' + + +def test_get_output_filename_with_service(): + g = make_generator(proto_file=[make_proto_file(name='spam', version='v2')]) + template_name = '$name/$service/foo.py.j2' + assert g._get_output_filename( + template_name, + context={ + 'service': wrappers.Service( + methods=[], + service_pb=descriptor_pb2.ServiceDescriptorProto(name='Eggs'), + ), + } + ) == 'spam/eggs/foo.py' + + def make_generator(**kwargs): return generator.Generator(plugin_pb2.CodeGeneratorRequest(**kwargs)) + + +def make_proto_file(**kwargs): + proto_file = descriptor_pb2.FileDescriptorProto() + proto_file.options.Extensions[client_pb2.client].MergeFrom( + client_pb2.Client(**kwargs), + ) + return proto_file diff --git a/packages/gapic-generator/tests/unit/generator/test_loader.py b/packages/gapic-generator/tests/unit/generator/test_loader.py index 056b72d80fb7..27f8011dda11 100644 --- a/packages/gapic-generator/tests/unit/generator/test_loader.py +++ b/packages/gapic-generator/tests/unit/generator/test_loader.py @@ -22,10 +22,10 @@ def test_service_templates(): with mock.patch.object(loader, 'list_templates') as list_templates: list_templates.return_value = [ '_base.j2', 'foo.j2', 'bar.j2', - 'service/spam.j2', 'service/eggs.j2', 'service/py/spameggs.j2', + '$service/spam.j2', '$service/eggs.j2', '$service/py/spameggs.j2', ] assert loader.service_templates == { - 'service/spam.j2', 'service/eggs.j2', 'service/py/spameggs.j2', + '$service/spam.j2', '$service/eggs.j2', '$service/py/spameggs.j2', } @@ -34,6 +34,6 @@ def test_api_templates(): with mock.patch.object(loader, 'list_templates') as list_templates: list_templates.return_value = [ '_base.j2', 'foo.j2', 'bar.j2', - 'service/spam.j2', 'service/eggs.j2', 'service/py/spameggs.j2', + '$service/spam.j2', '$service/eggs.j2', '$service/py/spameggs.j2', ] assert loader.api_templates == {'foo.j2', 'bar.j2'} diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index d95b8026edbd..340403cd8c87 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -31,6 +31,32 @@ def test_long_name(): assert api.long_name == 'Agrabah Lamp Genie' +def test_module_name(): + api = make_api(client=make_client( + name='Genie', + namespace=['Agrabah', 'Lamp'], + )) + assert api.module_name == 'genie' + + +def test_versioned_module_name_no_version(): + api = make_api(client=make_client( + name='Genie', + namespace=['Agrabah', 'Lamp'], + version='', + )) + assert api.versioned_module_name == 'genie' + + +def test_versioned_module_name(): + api = make_api(client=make_client( + name='Genie', + namespace=['Agrabah', 'Lamp'], + version='v2', + )) + assert api.versioned_module_name == 'genie_v2' + + def test_warehouse_package_name_placeholder(): api = make_api(client=make_client(name='')) assert api.warehouse_package_name == '<<< PACKAGE NAME >>>' @@ -203,8 +229,6 @@ def test_get_fields(): def test_get_methods(): - L = descriptor_pb2.SourceCodeInfo.Location - # Start with an empty API object. api = make_api() diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index f8abe6013568..7c98a4a53166 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -59,14 +59,13 @@ def test_service_pb2_modules(): ] -def test_service_transform_filename(): - service = make_service() - assert service.transform_filename('foobar') == 'foobar' - assert service.transform_filename('service/foobar') == 'placeholder/foobar' +def test_module_name(): + service = make_service(name='MyService') + assert service.module_name == 'my_service' def make_service(name: str = 'Placeholder', host: str = '', - scopes: typing.Tuple[str] = ()) -> wrappers.Service: + scopes: typing.Tuple[str] = ()) -> wrappers.Service: # Declare a few methods, with messages in distinct packages. methods = ( get_method('DoThing', 'foo.bar.ThingRequest', 'foo.baz.ThingResponse'), diff --git a/packages/gapic-generator/tests/unit/utils/test_filename.py b/packages/gapic-generator/tests/unit/utils/test_filename.py new file mode 100644 index 000000000000..fe8596074398 --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_filename.py @@ -0,0 +1,29 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from api_factory.utils import filename + + +def test_to_valid_filename(): + assert filename.to_valid_filename('foo bar.py') == 'foo-bar.py' + assert filename.to_valid_filename('FOO') == 'foo' + assert filename.to_valid_filename('nom$&nom@nom.py') == 'nom-nom-nom.py' + assert filename.to_valid_filename('num_bear.py') == 'num_bear.py' + + +def test_to_valid_module_name(): + assert filename.to_valid_module_name('foo bar.py') == 'foo_bar.py' + assert filename.to_valid_module_name('FOO') == 'foo' + assert filename.to_valid_module_name('nom$&nom.py') == 'nom_nom.py' + assert filename.to_valid_module_name('num_bear.py') == 'num_bear.py' From af972368b3dcfc3e47c1ea0a959691e880a25c2f Mon Sep 17 00:00:00 2001 From: Evawere Ogbe Date: Mon, 30 Apr 2018 12:52:00 -0700 Subject: [PATCH 0003/1339] Update docs (#3) --- packages/gapic-generator/docs/getting-started.rst | 14 +++++++++++++- packages/gapic-generator/docs/installing.rst | 8 ++++---- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/docs/getting-started.rst b/packages/gapic-generator/docs/getting-started.rst index 6e18de87e2b8..3a3b0c2b0868 100644 --- a/packages/gapic-generator/docs/getting-started.rst +++ b/packages/gapic-generator/docs/getting-started.rst @@ -58,6 +58,18 @@ for each plugin invoked; you just want these to match: running ``protoc`` from any other location, you will need to provide ``--proto_path``. +Because the generator is experimental, you need to compile the experimental +version of google-common-protos as well: + +.. code-block:: shell + + $ protoc google/api/*.proto \ + google/api/experimental/*.proto \ + google/api/expr/v1/*.proto \ + --python_out=/dest/ \ + +Create a blank file ``/dest/google/api/__init__.py`` to use ``google.api`` as a +package. Running a Client Library ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -89,7 +101,7 @@ Here is a test script: from google.cloud.vision.v1 import image_annotator_pb2 as types # This is the client library generated by this plugin. - import image_annotator + from google.cloud.vision_v1 import image_annotator # Instantiate the client. # diff --git a/packages/gapic-generator/docs/installing.rst b/packages/gapic-generator/docs/installing.rst index 0b42ccfa379c..29190f7f48e4 100644 --- a/packages/gapic-generator/docs/installing.rst +++ b/packages/gapic-generator/docs/installing.rst @@ -49,10 +49,10 @@ As for this library itself, the recommended installation approach is .. code-block:: shell # Due to its experimental state, this tool is not published to a - # package manager, and pip can not install from git-on-borg; - # you should clone it. - git clone sso://team/apiclient-eng/python-client-generator - cd python-client-generator/ + # package manager; you should clone it. + # (You can pip install it from GitHub, not not if you want to tinker.) + git clone git@github.com:googleapis/gapic-generator-python.git + cd gapic-generator-python/ # Install the tool. This will handle the virtualenv for you, and # make an appropriately-aliased executable. From 83d914e437f00d84bc89317bd31aa6b643b11786 Mon Sep 17 00:00:00 2001 From: Evawere Ogbe Date: Wed, 16 May 2018 12:50:35 -0700 Subject: [PATCH 0004/1339] Add LRO (#4) * Add LRO * Move operations client to transport * Add operations_client implementation to http transport --- .../$name_$version/$service/client.py.j2 | 29 +++++++++- .../$service/transports/base.py.j2 | 10 ++++ .../$service/transports/grpc.py.j2 | 21 +++++++ .../$service/transports/http.py.j2 | 27 ++++++++- .../gapic-generator/api_factory/schema/api.py | 6 +- .../api_factory/schema/pb/lro_pb2.py | 26 ++++----- .../api_factory/schema/wrappers.py | 17 ++++++ .../tests/unit/schema/test_api.py | 47 ++++++++++++++++ .../unit/schema/wrappers/test_service.py | 56 ++++++++++++++++++- 9 files changed, 220 insertions(+), 19 deletions(-) diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 index 42a89d9e638c..4a6f76bf62dd 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 @@ -6,6 +6,9 @@ import pkg_resources from typing import Mapping, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 +{%- if service.has_lro %} +from google.api_core import operation +{%- endif %} from google.api_core import retry from google.auth import credentials @@ -59,7 +62,11 @@ class {{ service.name }}: retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), + {%- if method.lro_payload %} + ) -> operation.Operation: + {%- else %} ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: + {%- endif %} """{{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} Args: @@ -73,9 +80,14 @@ class {{ service.name }}: sent alont with the request as metadata. Returns: + {%- if method.lro_payload %} + ~.operation.Operation: + A :class:`google.api_core.operation.Operation` instance. + {%- else %} ~.{{ method.output.pb2_module }}.{{ method.output.name }}: {{ method.output.meta.doc|wrap(width=72, initial_width=56, subsequent_indent=' ' * 16) }} + {%- endif %} """ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -86,8 +98,21 @@ class {{ service.name }}: client_info=self.client_info, ) - # Send the request and return the response. - return rpc(request, retry=retry, timeout=timeout, metadata=metadata) + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) + {%- if method.lro_payload %} + + # Wrap the response in an operation future + response = operation.from_gapic( + response, + self._transport.operations_client, + {{ method.lro_payload.pb2_module }}.{{ method.lro_payload.name }}, + metadata_type={{ method.lro_metadata.pb2_module }}.{{ method.lro_metadata.name }} + ) + {%- endif %} + + # Done; return the response. + return response {%- endfor %} @property diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 index a951395015f9..3c9cc07b06d2 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 @@ -5,6 +5,9 @@ import abc import typing from google import auth +{%- if service.has_lro %} +from google.api_core import operations_v1 +{%- endif %} from google.auth import credentials {% for package, pb2_module in service.pb2_modules -%} @@ -42,6 +45,13 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): # Save the credentials. self._credentials = credentials + {%- if service.has_lro %} + + @abc.abstractproperty + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError + {%- endif %} {% for method in service.methods.values() -%} @abc.abstractmethod diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 index 40a8d27941aa..55c14b79649e 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 @@ -4,6 +4,9 @@ from typing import Sequence, Tuple from google.api_core import grpc_helpers +{%- if service.has_lro %} +from google.api_core import operations_v1 +{%- endif %} from google.auth import credentials import grpc @@ -57,6 +60,24 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Return the channel from cache. return self.__dict__['grpc_channel'] + {%- if service.has_lro %} + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if 'operations_client' not in self.__dict__: + self.__dict__['operations_client'] = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self.__dict__['operations_client'] + {%- endif %} {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 index 1098af384caa..2940673bf035 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 @@ -2,7 +2,9 @@ {% block content %} from typing import Sequence, Tuple - +{% if service.has_lro %} +from google.api_core import operations_v1 +{%- endif %} from google.auth import credentials from google.auth.transport.requests import AuthorizedSession @@ -37,6 +39,29 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): """ super().__init__(credentials=credentials) self._session = AuthorizedSession(self._credentials) + {%- if service.has_lro %} + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if 'operations_client' not in self.__dict__: + from google.api_core import grpc_helpers + self.__dict__['operations_client'] = operations_v1.OperationsClient( + grpc_helpers.create_channel( + '{0}:443'.format(self.SERVICE_ADDRESS), + credentials=self._credentials, + scopes=self.AUTH_SCOPES, + ) + ) + + # Return the client from cache. + return self.__dict__['operations_client'] + {%- endif %} {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, diff --git a/packages/gapic-generator/api_factory/schema/api.py b/packages/gapic-generator/api_factory/schema/api.py index 27e89d375d2d..23cc3427b663 100644 --- a/packages/gapic-generator/api_factory/schema/api.py +++ b/packages/gapic-generator/api_factory/schema/api.py @@ -28,6 +28,7 @@ from api_factory.schema import metadata from api_factory.schema import wrappers from api_factory.schema.pb import client_pb2 +from api_factory.schema.pb import lro_pb2 @dataclasses.dataclass @@ -90,7 +91,7 @@ def warehouse_package_name(self) -> str: # Piece the name and namespace together to come up with the # proper package name. - answer = list(self.client.namespace) + [self.client.name] + answer = list(self.client.namespace) + self.client.name.split(' ') return '-'.join(answer).lower() def load(self, fdp: descriptor_pb2.FileDescriptorProto) -> None: @@ -228,8 +229,11 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], # Iterate over the methods and collect them into a dictionary. answer = collections.OrderedDict() for method_pb, i in zip(methods, range(0, sys.maxsize)): + types = method_pb.options.Extensions[lro_pb2.types] answer[method_pb.name] = wrappers.Method( input=self.messages[method_pb.input_type.lstrip('.')], + lro_metadata=self.messages.get(types.lro_metadata_type, None), + lro_payload=self.messages.get(types.lro_return_type, None), method_pb=method_pb, meta=metadata.Metadata( address=address, diff --git a/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py b/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py index 108ba7da473e..cbaf9cb1077e 100644 --- a/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py +++ b/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py @@ -20,7 +20,7 @@ name='google/api/experimental/lro.proto', package='google.api.experimental', syntax='proto3', - serialized_pb=_b('\n!google/api/experimental/lro.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\"<\n\x0eOperationTypes\x12\x13\n\x0breturn_type\x18\x01 \x01(\t\x12\x15\n\rmetadata_type\x18\x02 \x01(\t:X\n\x05types\x12\x1e.google.protobuf.MethodOptions\x18\x80\x90\x03 \x01(\x0b\x32\'.google.api.experimental.OperationTypesb\x06proto3') + serialized_pb=_b('\n!google/api/experimental/lro.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\"A\n\x0bMethodTypes\x12\x17\n\x0flro_return_type\x18\x01 \x01(\t\x12\x19\n\x11lro_metadata_type\x18\x02 \x01(\t:U\n\x05types\x12\x1e.google.protobuf.MethodOptions\x18\x80\x90\x03 \x01(\x0b\x32$.google.api.experimental.MethodTypesb\x06proto3') , dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) @@ -35,22 +35,22 @@ options=None, file=DESCRIPTOR) -_OPERATIONTYPES = _descriptor.Descriptor( - name='OperationTypes', - full_name='google.api.experimental.OperationTypes', +_METHODTYPES = _descriptor.Descriptor( + name='MethodTypes', + full_name='google.api.experimental.MethodTypes', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name='return_type', full_name='google.api.experimental.OperationTypes.return_type', index=0, + name='lro_return_type', full_name='google.api.experimental.MethodTypes.lro_return_type', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='metadata_type', full_name='google.api.experimental.OperationTypes.metadata_type', index=1, + name='lro_metadata_type', full_name='google.api.experimental.MethodTypes.lro_metadata_type', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, @@ -69,21 +69,21 @@ oneofs=[ ], serialized_start=96, - serialized_end=156, + serialized_end=161, ) -DESCRIPTOR.message_types_by_name['OperationTypes'] = _OPERATIONTYPES +DESCRIPTOR.message_types_by_name['MethodTypes'] = _METHODTYPES DESCRIPTOR.extensions_by_name['types'] = types _sym_db.RegisterFileDescriptor(DESCRIPTOR) -OperationTypes = _reflection.GeneratedProtocolMessageType('OperationTypes', (_message.Message,), dict( - DESCRIPTOR = _OPERATIONTYPES, +MethodTypes = _reflection.GeneratedProtocolMessageType('MethodTypes', (_message.Message,), dict( + DESCRIPTOR = _METHODTYPES, __module__ = 'google.api.experimental.lro_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.OperationTypes) + # @@protoc_insertion_point(class_scope:google.api.experimental.MethodTypes) )) -_sym_db.RegisterMessage(OperationTypes) +_sym_db.RegisterMessage(MethodTypes) -types.message_type = _OPERATIONTYPES +types.message_type = _METHODTYPES google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(types) # @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/wrappers.py b/packages/gapic-generator/api_factory/schema/wrappers.py index 991779d39df3..72a31e1783a0 100644 --- a/packages/gapic-generator/api_factory/schema/wrappers.py +++ b/packages/gapic-generator/api_factory/schema/wrappers.py @@ -96,6 +96,8 @@ class Method: method_pb: descriptor_pb2.MethodDescriptorProto input: MessageType output: MessageType + lro_payload: MessageType = None + lro_metadata: MessageType = None meta: Metadata = dataclasses.field(default_factory=Metadata) def __getattr__(self, name): @@ -170,4 +172,19 @@ def pb2_modules(self) -> Sequence[Tuple[str, str]]: '.'.join(method.output.meta.address.package), method.output.pb2_module, )) + if method.lro_payload: + answer.add(( + '.'.join(method.lro_payload.meta.address.package), + method.lro_payload.pb2_module, + )) + if method.lro_metadata: + answer.add(( + '.'.join(method.lro_metadata.meta.address.package), + method.lro_metadata.pb2_module, + )) return sorted(answer) + + @property + def has_lro(self) -> bool: + """Return whether the service has a long-running method.""" + return any(method.lro_payload for method in self.methods.values()) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 340403cd8c87..5ef29198eae9 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -22,6 +22,7 @@ from api_factory.schema import wrappers from api_factory.schema.api import API from api_factory.schema.pb import client_pb2 +from api_factory.schema.pb import lro_pb2 def test_long_name(): @@ -76,6 +77,11 @@ def test_warehouse_package_name_with_namespace(): assert api.warehouse_package_name == 'google-cloud-bigquery' +def test_warehouse_package_name_multiple_words(): + api = make_api(client=make_client(name='Big Query', namespace=[])) + assert api.warehouse_package_name == 'big-query' + + def test_load(): sentinel_message = descriptor_pb2.DescriptorProto() sentinel_enum = descriptor_pb2.EnumDescriptorProto() @@ -267,6 +273,47 @@ def test_get_methods(): next(items) +def test_get_methods_lro(): + # Start with an empty API object. + api = make_api() + + # Load the message types for a method into the API object, including LRO + # payload and metadata. + address = metadata.Address(package=['foo', 'bar'], module='baz') + api._load_descriptor(descriptor_pb2.DescriptorProto(name='In'), + address=address, info={}) + api._load_descriptor(descriptor_pb2.DescriptorProto(name='Out'), + address=address, info={}) + api._load_descriptor(descriptor_pb2.DescriptorProto(name='Progress'), + address=address, info={}) + operations_address = metadata.Address( + package=['google', 'longrunning'], + module='operations', + ) + api._load_descriptor(descriptor_pb2.DescriptorProto(name='Operation'), + address=operations_address, info={}) + method_pb = descriptor_pb2.MethodDescriptorProto( + name='DoBigThings', + input_type='foo.bar.In', + output_type='google.longrunning.Operation', + ) + method_pb.options.Extensions[lro_pb2.types].MergeFrom(lro_pb2.MethodTypes( + lro_return_type='foo.bar.Out', + lro_metadata_type='foo.bar.Progress', + )) + + # Run the method under test. + methods = api._get_methods([method_pb], address=address, info={}) + + # Test that the method has the expected lro output, payload, and metadata. + method = next(iter(methods.values())) + assert method.output.name == 'Operation' + assert isinstance(method.lro_payload, wrappers.MessageType) + assert method.lro_payload.name == 'Out' + assert isinstance(method.lro_metadata, wrappers.MessageType) + assert method.lro_metadata.name == 'Progress' + + def test_load_descriptor(): message_pb = descriptor_pb2.DescriptorProto(name='Riddle') address = metadata.Address(package=['foo', 'bar', 'v1'], module='baz') diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 7c98a4a53166..42cdbb7a073b 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -59,6 +59,26 @@ def test_service_pb2_modules(): ] +def test_service_pb2_modules_lro(): + service = make_lro_service() + assert service.pb2_modules == [ + ('foo', 'bar_pb2'), + ('foo', 'baz_pb2'), + ('foo', 'qux_pb2'), + ('google.longrunning', 'operations_pb2'), + ] + + +def test_service_no_lro(): + service = make_service() + assert service.has_lro is False + + +def test_service_has_lro(): + service = make_lro_service() + assert service.has_lro + + def test_module_name(): service = make_service(name='MyService') assert service.module_name == 'my_service' @@ -88,15 +108,47 @@ def make_service(name: str = 'Placeholder', host: str = '', ) -def get_method(name: str, in_type: str, out_type: str) -> wrappers.Method: +def make_lro_service() -> wrappers.Service: + # Declare a long-running method. + method = get_method( + 'DoBigThing', + 'foo.bar.ThingRequest', + 'google.longrunning.operations.Operation', + lro_payload_type='foo.baz.ThingResponse', + lro_metadata_type='foo.qux.ThingMetadata', + ) + + # Define a service descriptor. + service_pb = descriptor_pb2.ServiceDescriptorProto(name='ThingDoer') + + # Return a service object to test. + return wrappers.Service( + service_pb=service_pb, + methods={method.name: method}, + ) + + +def get_method(name: str, + in_type: str, + out_type: str, + lro_payload_type: str = '', + lro_metadata_type: str = '') -> wrappers.Method: input_ = get_message(in_type) output = get_message(out_type) + lro_payload = get_message(lro_payload_type) if lro_payload_type else None + lro_metadata = get_message(lro_metadata_type) if lro_metadata_type else None method_pb = descriptor_pb2.MethodDescriptorProto( name=name, input_type=input_.proto_path, output_type=output.proto_path, ) - return wrappers.Method(method_pb=method_pb, input=input_, output=output) + return wrappers.Method( + method_pb=method_pb, + input=input_, + lro_metadata=lro_metadata, + lro_payload=lro_payload, + output=output, + ) def get_message(dot_path: str) -> wrappers.MessageType: From 0a99ca20c7a1ab9b8cd16dedacf8295a7ad73419 Mon Sep 17 00:00:00 2001 From: Evawere Ogbe Date: Tue, 3 Jul 2018 16:48:56 -0700 Subject: [PATCH 0005/1339] Add field headers (#5) --- .../api_factory/generator/generator.py | 2 +- .../$name_$version/$service/client.py.j2 | 28 ++++++++- .../$service/transports/base.py.j2 | 4 +- .../$service/transports/grpc.py.j2 | 5 +- .../$service/transports/http.py.j2 | 4 +- .../api_factory/schema/pb/headers_pb2.py | 63 ++++++++++++++++--- .../api_factory/schema/wrappers.py | 28 ++++++++- .../tests/unit/schema/wrappers/test_method.py | 11 ++++ .../unit/schema/wrappers/test_service.py | 36 ++++++++--- 9 files changed, 154 insertions(+), 27 deletions(-) diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py index f22d2aa09335..9fbf00ea4c15 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -14,7 +14,7 @@ import io import os -from typing import Any, Callable, Iterable, Mapping, Sequence +from typing import Any, Iterable, Mapping, Sequence import jinja2 diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 index 4a6f76bf62dd..c9d79c5e7242 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 @@ -4,6 +4,9 @@ import functools import pkg_resources from typing import Mapping, Optional, Sequence, Tuple, Union +{%- if service.has_field_headers %} +from urllib import parse +{%- endif %} from google.api_core import gapic_v1 {%- if service.has_lro %} @@ -54,8 +57,8 @@ class {{ service.name }}: else: Transport = get_transport_class(transport) self._transport = Transport(credentials=credentials) + {%- for method in service.methods.values() %} - {% for method in service.methods.values() -%} @functools.singledispatch def {{ method.name|snake_case }}(self, request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, @@ -97,6 +100,29 @@ class {{ service.name }}: default_timeout=None, # FIXME client_info=self.client_info, ) + {%- if method.field_headers %} + + # Ensure metadata is a mutable sequence. + metadata = list(metadata) + + # Add request headers to metadata. + {%- for field_header in method.field_headers %} + + try: + header_value = request.{{ field_header.field }} + except AttributeError: + # Suppress when attribute missing. + pass + else: + metadata.append(( + '{{ field_header.header }}', + '{}={}'.format( + parse.quote_plus('{{ field_header.field }}'), + parse.quote_plus(header_value), + ), + )) + {%- endfor %} + {%- endif %} # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 index 3c9cc07b06d2..44dc3e508c77 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 @@ -52,13 +52,13 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): """Return the client designed to process long-running operations.""" raise NotImplementedError {%- endif %} + {%- for method in service.methods.values() %} - {% for method in service.methods.values() -%} @abc.abstractmethod def {{ method.name|snake_case }}( self, request: {{ method.input.pb2_module }}.{{ method.input.name }}, ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: raise NotImplementedError - {% endfor -%} + {%- endfor %} {% endblock %} diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 index 55c14b79649e..8a7052ed583f 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 @@ -78,8 +78,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Return the client from cache. return self.__dict__['operations_client'] {%- endif %} + {%- for method in service.methods.values() %} - {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, metadata: Sequence[Tuple[str, str]] = (), @@ -115,6 +115,5 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Return the response. return stub(request, metadata=metadata) - - {% endfor -%} + {%- endfor %} {%- endblock -%} diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 index 2940673bf035..bf9e18896ae5 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 +++ b/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 @@ -62,8 +62,8 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): # Return the client from cache. return self.__dict__['operations_client'] {%- endif %} + {%- for method in service.methods.values() %} - {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, metadata: Sequence[Tuple[str, str]] = (), @@ -99,5 +99,5 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): return {{ method.output.pb2_module }}.{{ method.output.name }}.FromString( response.content, ) - {% endfor %} + {%- endfor %} {% endblock %} diff --git a/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py b/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py index 82f1206d5802..ee77e08f3921 100644 --- a/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py +++ b/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py @@ -20,23 +20,70 @@ name='google/api/experimental/headers.proto', package='google.api.experimental', syntax='proto3', - serialized_pb=_b('\n%google/api/experimental/headers.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto:5\n\x0cheader_param\x12\x1d.google.protobuf.FieldOptions\x18\xf0\x8c\x03 \x01(\tb\x06proto3') + serialized_pb=_b('\n%google/api/experimental/headers.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\",\n\x0b\x46ieldHeader\x12\r\n\x05\x66ield\x18\x01 \x01(\t\x12\x0e\n\x06header\x18\x02 \x01(\t:]\n\rfield_headers\x12\x1e.google.protobuf.MethodOptions\x18\xf0\x8c\x03 \x03(\x0b\x32$.google.api.experimental.FieldHeaderb\x06proto3') , dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) -HEADER_PARAM_FIELD_NUMBER = 50800 -header_param = _descriptor.FieldDescriptor( - name='header_param', full_name='google.api.experimental.header_param', index=0, - number=50800, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), +FIELD_HEADERS_FIELD_NUMBER = 50800 +field_headers = _descriptor.FieldDescriptor( + name='field_headers', full_name='google.api.experimental.field_headers', index=0, + number=50800, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=True, extension_scope=None, options=None, file=DESCRIPTOR) -DESCRIPTOR.extensions_by_name['header_param'] = header_param + +_FIELDHEADER = _descriptor.Descriptor( + name='FieldHeader', + full_name='google.api.experimental.FieldHeader', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field', full_name='google.api.experimental.FieldHeader.field', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='header', full_name='google.api.experimental.FieldHeader.header', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=100, + serialized_end=144, +) + +DESCRIPTOR.message_types_by_name['FieldHeader'] = _FIELDHEADER +DESCRIPTOR.extensions_by_name['field_headers'] = field_headers _sym_db.RegisterFileDescriptor(DESCRIPTOR) -google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(header_param) +FieldHeader = _reflection.GeneratedProtocolMessageType('FieldHeader', (_message.Message,), dict( + DESCRIPTOR = _FIELDHEADER, + __module__ = 'google.api.experimental.headers_pb2' + # @@protoc_insertion_point(class_scope:google.api.experimental.FieldHeader) + )) +_sym_db.RegisterMessage(FieldHeader) + +field_headers.message_type = _FIELDHEADER +google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(field_headers) # @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/wrappers.py b/packages/gapic-generator/api_factory/schema/wrappers.py index 72a31e1783a0..a7a425b5ecf8 100644 --- a/packages/gapic-generator/api_factory/schema/wrappers.py +++ b/packages/gapic-generator/api_factory/schema/wrappers.py @@ -28,13 +28,14 @@ """ import dataclasses -from typing import List, Mapping, Sequence, Tuple +from typing import Callable, List, Mapping, Sequence, Tuple from google.protobuf import descriptor_pb2 from api_factory import utils from api_factory.schema.metadata import Metadata from api_factory.schema.pb import client_pb2 +from api_factory.schema.pb import headers_pb2 from api_factory.schema.pb import overload_pb2 @@ -108,6 +109,11 @@ def overloads(self): """Return the overloads defined for this method.""" return self.method_pb.options.Extensions[overload_pb2.overloads] + @property + def field_headers(self): + """Return the field headers defined for this method.""" + return self.method_pb.options.Extensions[headers_pb2.field_headers] + @dataclasses.dataclass(frozen=True) class Service: @@ -187,4 +193,22 @@ def pb2_modules(self) -> Sequence[Tuple[str, str]]: @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" - return any(method.lro_payload for method in self.methods.values()) + return self._any_method(lambda m: getattr(m, 'lro_payload')) + + @property + def has_field_headers(self) -> bool: + """Return whether the service has a method containing field headers.""" + return self._any_method(lambda m: getattr(m, 'field_headers')) + + def _any_method(self, predicate: Callable) -> bool: + """Return whether the service has a method that fulfills ``predicate``. + + Args: + predicate (Callable[Method]): Function specifying the criteria + testing the methods in the service. + + Returns: + bool: True if any method of the service contains the specified + attribute. + """ + return any(predicate(method) for method in self.methods.values()) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 076754417d24..0c04aca429dd 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -16,6 +16,7 @@ from api_factory.schema import metadata from api_factory.schema import wrappers +from api_factory.schema.pb import headers_pb2 from api_factory.schema.pb import overload_pb2 @@ -36,6 +37,10 @@ def get_method() -> wrappers.Method: ext_key = overload_pb2.overloads method_pb.options.Extensions[ext_key].extend([overload_pb2.Overload()]) + # Set a field header in the method descriptor. + ext_key = headers_pb2.field_headers + method_pb.options.Extensions[ext_key].extend([headers_pb2.FieldHeader()]) + # Instantiate the wrapper class. return wrappers.Method( method_pb=method_pb, @@ -70,3 +75,9 @@ def test_method_overloads(): method = get_method() for overload in method.overloads: assert isinstance(overload, overload_pb2.Overload) + + +def test_method_field_headers(): + method = get_method() + for field_header in method.field_headers: + assert isinstance(field_header, headers_pb2.FieldHeader) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 42cdbb7a073b..3669ea493430 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -19,6 +19,7 @@ from api_factory.schema import metadata from api_factory.schema import wrappers from api_factory.schema.pb import client_pb2 +from api_factory.schema.pb import headers_pb2 def test_service_properties(): @@ -60,7 +61,7 @@ def test_service_pb2_modules(): def test_service_pb2_modules_lro(): - service = make_lro_service() + service = make_service_with_method_options() assert service.pb2_modules == [ ('foo', 'bar_pb2'), ('foo', 'baz_pb2'), @@ -75,10 +76,20 @@ def test_service_no_lro(): def test_service_has_lro(): - service = make_lro_service() + service = make_service_with_method_options() assert service.has_lro +def test_service_no_field_headers(): + service = make_service() + assert service.has_field_headers is False + + +def test_service_has_field_headers(): + service = make_service_with_method_options() + assert service.has_field_headers + + def test_module_name(): service = make_service(name='MyService') assert service.module_name == 'my_service' @@ -108,14 +119,16 @@ def make_service(name: str = 'Placeholder', host: str = '', ) -def make_lro_service() -> wrappers.Service: - # Declare a long-running method. +def make_service_with_method_options() -> wrappers.Service: + # Declare a method with options enabled for long-running operations and + # field headers. method = get_method( 'DoBigThing', 'foo.bar.ThingRequest', 'google.longrunning.operations.Operation', lro_payload_type='foo.baz.ThingResponse', lro_metadata_type='foo.qux.ThingMetadata', + field_headers=(headers_pb2.FieldHeader(),) ) # Define a service descriptor. @@ -129,19 +142,26 @@ def make_lro_service() -> wrappers.Service: def get_method(name: str, - in_type: str, - out_type: str, - lro_payload_type: str = '', - lro_metadata_type: str = '') -> wrappers.Method: + in_type: str, + out_type: str, + lro_payload_type: str = '', + lro_metadata_type: str = '', + field_headers: typing.Tuple[str] = (), + ) -> wrappers.Method: input_ = get_message(in_type) output = get_message(out_type) lro_payload = get_message(lro_payload_type) if lro_payload_type else None lro_metadata = get_message(lro_metadata_type) if lro_metadata_type else None + + # Define a method descriptor. Set the field headers if appropriate. method_pb = descriptor_pb2.MethodDescriptorProto( name=name, input_type=input_.proto_path, output_type=output.proto_path, ) + ext_key = headers_pb2.field_headers + method_pb.options.Extensions[ext_key].extend(field_headers) + return wrappers.Method( method_pb=method_pb, input=input_, From 6217b1416c1cfd99eb626965d29b7443b33795cc Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 13 Jul 2018 11:05:19 -0700 Subject: [PATCH 0006/1339] Refactor to support the finalized input format. (#6) This PR does a lot of things: * Uses the new annotations. * Moves templates to `api_factory/templates/` in the project root. * Removes support for flat files, which are no longer needed with @garrettjonesgoogle's package generation work. * It refactors the `API` class to maintain some separation between the source proto files. (I want this because I eventually may want some per-proto functionality.) * `API` and `Proto` are now frozen dataclasses. In order to support this, there are build class methods on both. * A new `Naming` class is added, and naming-based things are moved there (from `API`). * Updates documentation to be current. * Officially supports Python 3.7. --- packages/gapic-generator/.circleci/config.yml | 22 +- packages/gapic-generator/.flake8 | 8 + packages/gapic-generator/Pipfile | 24 - packages/gapic-generator/Pipfile.lock | 115 --- .../api_factory/cli/generate.py | 23 +- .../api_factory/generator/files/.flake8 | 6 - .../api_factory/generator/files/MANIFEST.in | 4 - .../api_factory/generator/files/setup.cfg | 2 - .../api_factory/generator/generator.py | 52 +- .../generator/templates/README.rst.j2 | 29 - .../gapic-generator/api_factory/schema/api.py | 358 ++++---- .../api_factory/schema/naming.py | 182 ++++ .../api_factory/schema/pb/client_pb2.py | 186 ---- .../schema/pb/expr/v1/syntax_pb2.py | 832 ------------------ .../schema/pb/expr/v1/value_pb2.py | 326 ------- .../api_factory/schema/pb/headers_pb2.py | 89 -- .../api_factory/schema/pb/lro_pb2.py | 89 -- .../api_factory/schema/pb/overload_pb2.py | 178 ---- .../api_factory/schema/pb/resources_pb2.py | 99 --- .../api_factory/schema/pb/stability_pb2.py | 120 --- .../api_factory/schema/wrappers.py | 58 +- .../templates/$namespace/$name/__init__.py.j2 | 2 +- .../$name_$version/$service/__init__.py.j2 | 0 .../$name_$version/$service/client.py.j2 | 0 .../$service/transports/__init__.py.j2 | 0 .../$service/transports/base.py.j2 | 0 .../$service/transports/grpc.py.j2 | 28 +- .../$service/transports/http.py.j2 | 9 +- .../$namespace/$name_$version/__init__.py.j2 | 0 .../{generator => }/templates/_base.py.j2 | 0 .../{generator => }/templates/setup.py.j2 | 15 +- .../api_factory/utils/cache.py | 2 +- .../docs/api-configuration.rst | 130 ++- packages/gapic-generator/docs/conf.py | 4 +- .../gapic-generator/docs/getting-started.rst | 47 +- packages/gapic-generator/docs/installing.rst | 13 +- packages/gapic-generator/docs/process.rst | 48 +- .../gapic-generator/docs/reference/schema.rst | 7 + packages/gapic-generator/docs/status.rst | 6 +- packages/gapic-generator/nox.py | 3 +- packages/gapic-generator/setup.py | 12 +- .../tests/unit/generator/test_generator.py | 149 ++-- .../tests/unit/schema/test_api.py | 592 +++++++------ .../tests/unit/schema/test_naming.py | 178 ++++ .../tests/unit/schema/wrappers/test_method.py | 21 +- .../unit/schema/wrappers/test_service.py | 39 +- 46 files changed, 1238 insertions(+), 2869 deletions(-) create mode 100644 packages/gapic-generator/.flake8 delete mode 100644 packages/gapic-generator/Pipfile delete mode 100644 packages/gapic-generator/Pipfile.lock delete mode 100644 packages/gapic-generator/api_factory/generator/files/.flake8 delete mode 100644 packages/gapic-generator/api_factory/generator/files/MANIFEST.in delete mode 100644 packages/gapic-generator/api_factory/generator/files/setup.cfg delete mode 100644 packages/gapic-generator/api_factory/generator/templates/README.rst.j2 create mode 100644 packages/gapic-generator/api_factory/schema/naming.py delete mode 100644 packages/gapic-generator/api_factory/schema/pb/client_pb2.py delete mode 100644 packages/gapic-generator/api_factory/schema/pb/expr/v1/syntax_pb2.py delete mode 100644 packages/gapic-generator/api_factory/schema/pb/expr/v1/value_pb2.py delete mode 100644 packages/gapic-generator/api_factory/schema/pb/headers_pb2.py delete mode 100644 packages/gapic-generator/api_factory/schema/pb/lro_pb2.py delete mode 100644 packages/gapic-generator/api_factory/schema/pb/overload_pb2.py delete mode 100644 packages/gapic-generator/api_factory/schema/pb/resources_pb2.py delete mode 100644 packages/gapic-generator/api_factory/schema/pb/stability_pb2.py rename packages/gapic-generator/api_factory/{generator => }/templates/$namespace/$name/__init__.py.j2 (68%) rename packages/gapic-generator/api_factory/{generator => }/templates/$namespace/$name_$version/$service/__init__.py.j2 (100%) rename packages/gapic-generator/api_factory/{generator => }/templates/$namespace/$name_$version/$service/client.py.j2 (100%) rename packages/gapic-generator/api_factory/{generator => }/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 (100%) rename packages/gapic-generator/api_factory/{generator => }/templates/$namespace/$name_$version/$service/transports/base.py.j2 (100%) rename packages/gapic-generator/api_factory/{generator => }/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 (80%) rename packages/gapic-generator/api_factory/{generator => }/templates/$namespace/$name_$version/$service/transports/http.py.j2 (92%) rename packages/gapic-generator/api_factory/{generator => }/templates/$namespace/$name_$version/__init__.py.j2 (100%) rename packages/gapic-generator/api_factory/{generator => }/templates/_base.py.j2 (100%) rename packages/gapic-generator/api_factory/{generator => }/templates/setup.py.j2 (54%) create mode 100644 packages/gapic-generator/tests/unit/schema/test_naming.py diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 5d4f489edabc..8e852c644b67 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -4,10 +4,11 @@ workflows: version: 2 tests: jobs: - - unit + - unit-3.6 + - unit-3.7 - docs jobs: - unit: + unit-3.6: docker: - image: 'python:3.6' steps: @@ -17,7 +18,22 @@ jobs: command: pip install nox-automation codecov - run: name: Run unit tests. - command: nox -s unit + command: nox -s "unit(python_version='3.6')" + - run: + name: Submit coverage data to codecov. + command: codecov + when: always + unit-3.7: + docker: + - image: 'python:3.7' + steps: + - checkout + - run: + name: Install nox and codecov. + command: pip install nox-automation codecov + - run: + name: Run unit tests. + command: nox -s "unit(python_version='3.7')" - run: name: Submit coverage data to codecov. command: codecov diff --git a/packages/gapic-generator/.flake8 b/packages/gapic-generator/.flake8 new file mode 100644 index 000000000000..5ae57fde1d72 --- /dev/null +++ b/packages/gapic-generator/.flake8 @@ -0,0 +1,8 @@ +[flake8] +ignore = + # Closing bracket mismatches opening bracket's line. + # This works poorly with type annotations in method declarations. + E123, E124 + # Line over-indented for visual indent. + # This works poorly with type annotations in method declarations. + E128 diff --git a/packages/gapic-generator/Pipfile b/packages/gapic-generator/Pipfile deleted file mode 100644 index ff6edced0cbf..000000000000 --- a/packages/gapic-generator/Pipfile +++ /dev/null @@ -1,24 +0,0 @@ -[[source]] - -url = "https://pypi.python.org/simple" -verify_ssl = true -name = "pypi" - - -[packages] - -grpcio = "*" -protobuf = "*" -"jinja2" = "*" -dataclasses = "*" -click = "*" -googleapis-common-protos = "*" - - -[dev-packages] - - - -[requires] - -python_version = "3.6" diff --git a/packages/gapic-generator/Pipfile.lock b/packages/gapic-generator/Pipfile.lock deleted file mode 100644 index ea961d7f622e..000000000000 --- a/packages/gapic-generator/Pipfile.lock +++ /dev/null @@ -1,115 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "de73dab8d1077b288091934773dd8b6a3ca2e375296de78466a98607d7df0fc9" - }, - "host-environment-markers": { - "implementation_name": "cpython", - "implementation_version": "3.6.3", - "os_name": "posix", - "platform_machine": "x86_64", - "platform_python_implementation": "CPython", - "platform_release": "4.9.0-5-amd64", - "platform_system": "Linux", - "platform_version": "#1 SMP Debian 4.9.65-3+deb9u2 (2018-01-04)", - "python_full_version": "3.6.3", - "python_version": "3.6", - "sys_platform": "linux" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.6" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.python.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "click": { - "hashes": [ - "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", - "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" - ], - "version": "==6.7" - }, - "dataclasses": { - "hashes": [ - "sha256:068953b730c80cbb13ca6aac6ceedaa5d483fb6081a372ac4788aa5179ed9597", - "sha256:0f75133f21f6c9bd0fe82bc75d9908e46f531682911c9cffa75bce0e40ef09ef" - ], - "version": "==0.4" - }, - "googleapis-common-protos": { - "hashes": [ - "sha256:c075eddaa2628ab519e01b7d75b76e66c40eaa50fc52758d8225f84708950ef2" - ], - "version": "==1.5.3" - }, - "grpcio": { - "hashes": [ - "sha256:210cccde7b2e1e19dbde16660079973e26738db4a513943dd155bbcfbfcf62b5", - "sha256:96e621c962c65162edff0eac6bfac9fae99efe80fc6834952d87d05d077fe455", - "sha256:28212798de5567e5a5e1b2e2851819d4a414286a4fe76258132a1c14bae0f1de", - "sha256:9d43ba86f79de04f24dfbd3e62e8817f34eb11789758687a69dc6d94182044db", - "sha256:76e6d1e510c67afd59599d6c9797bb7a84a696d461b0ab5e42803b110e5052ed", - "sha256:6f3bc94db73e688cf8c25d80c15118f87b86c220cf284a2b29d6f1c4426e4550", - "sha256:1ed24f893ff74f7761c448942798cd2fa7194052dae8f2c521f30bff2eedce67", - "sha256:f5d1ff94fbbc0be64991d1da935cf42a09635c71cf41137b35fbee5d9658f637", - "sha256:74bdc3c632e8104c6f269fed1e950a0dd4800e605754f2121f2a555e3e1af131", - "sha256:dec40532a9889b2b645550adabb89644e8865b29a2922db9a8e9856298595104", - "sha256:84757cf49728455ed7cf1c36b0a9e06f658eb5297a206cd79174fc89d8e0ddf5", - "sha256:123cdff8c1306edcdf4a261b4fd7bd1efe07b679e6c1b6019789d7401b2dd1e6", - "sha256:9f7c8489b4a8c6be3022ea085387abe3d4973f21c7ef3641fbfc8f0e0c5e591f", - "sha256:bbbcd2a00c52e8af9924621cc2f7f735323ed7c8ca90cb5c40645e379b8d46ea", - "sha256:32950a2edcf919999e97885d14011efb276fdb6c712a9a0739abfa6c357fc0db", - "sha256:f4255661045038e1a424c7e2b1a93ed58222bf36363bd698f4ee687060767e5b", - "sha256:7ab975ae3af94f66c61a968d71f5da1d8cdbd5c31220d0eb195bd14c3384f3a1", - "sha256:8e7e7e464b66cf9da50f2f503a8be4b9fa99aae90e361c3b24f5e16b597f0f89", - "sha256:6ec617d4ce4cdccfbcb677b3dbf2eb45a0b521fa7fe3eec8412f342928aa60d7", - "sha256:5bdfb3eaff04e121b6d34b88a1a55d97c816961b7f94730a3e7aa2d880943e84", - "sha256:6099cbf59f487ebe0662ba938a3a7f786be97d081bbe3c0a1616466b6081a697", - "sha256:b2582d74e236c32a325b51013b40479cc7a80c9a87d2d02ad85f98a2201aa098", - "sha256:e7c43b5619deff48cc177c1b0618c4beeb2797f910f160e3c2035d5baf790a5d" - ], - "version": "==1.9.1" - }, - "jinja2": { - "hashes": [ - "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", - "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" - ], - "version": "==2.10" - }, - "markupsafe": { - "hashes": [ - "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" - ], - "version": "==1.0" - }, - "protobuf": { - "hashes": [ - "sha256:11788df3e176f44e0375fe6361342d7258a457b346504ea259a21b77ffc18a90", - "sha256:50c24f0d00b7efb3a72ae638ddc118e713cfe8cef40527afe24f7ebcb878e46d", - "sha256:41661f9a442eba2f1967f15333ebe9ecc7e7c51bcbaa2972303ad33a4ca0168e", - "sha256:06ec363b74bceb7d018f2171e0892f03ab6816530e2b0f77d725a58264551e48", - "sha256:b20f861b55efd8206428c13e017cc8e2c34b40b2a714446eb202bbf0ff7597a6", - "sha256:c1f9c36004a7ae6f1ce4a23f06070f6b07f57495f251851aa15cc4da16d08378", - "sha256:4d2e665410b0a278d2eb2c0a529ca2366bb325eb2ae34e189a826b71fb1b28cd", - "sha256:95b78959572de7d7fafa3acb718ed71f482932ddddddbd29ba8319c10639d863" - ], - "version": "==3.5.1" - }, - "six": { - "hashes": [ - "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb", - "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" - ], - "version": "==1.11.0" - } - }, - "develop": {} -} diff --git a/packages/gapic-generator/api_factory/cli/generate.py b/packages/gapic-generator/api_factory/cli/generate.py index 8dfc4aeaeef9..44221a088896 100644 --- a/packages/gapic-generator/api_factory/cli/generate.py +++ b/packages/gapic-generator/api_factory/cli/generate.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import sys import typing @@ -20,6 +21,7 @@ from google.protobuf.compiler import plugin_pb2 from api_factory import generator +from api_factory.schema import api @click.command() @@ -38,9 +40,24 @@ def generate( # Load the protobuf CodeGeneratorRequest. req = plugin_pb2.CodeGeneratorRequest.FromString(request.read()) - # Translate into a protobuf CodeGeneratorResponse; - # if there are issues, error out appropriately. - res = generator.Generator(req).get_response() + # Determine the appropriate package. + # This generator uses a slightly different mechanism for determining + # which files to generate; it tracks at package level rather than file + # level. + package = os.path.commonprefix([i.package for i in filter( + lambda p: p.name in req.file_to_generate, + req.proto_file, + )]).rstrip('.') + + # Build the API model object. + # This object is a frozen representation of the whole API, and is sent + # to each template in the rendering step. + api_schema = api.API.build(req.proto_file, package=package) + + # Translate into a protobuf CodeGeneratorResponse; this reads the + # individual templates and renders them. + # If there are issues, error out appropriately. + res = generator.Generator(api_schema=api_schema).get_response() # Output the serialized response. output.write(res.SerializeToString()) diff --git a/packages/gapic-generator/api_factory/generator/files/.flake8 b/packages/gapic-generator/api_factory/generator/files/.flake8 deleted file mode 100644 index 25168dc87605..000000000000 --- a/packages/gapic-generator/api_factory/generator/files/.flake8 +++ /dev/null @@ -1,6 +0,0 @@ -[flake8] -exclude = - __pycache__, - .git, - *.pyc, - conf.py diff --git a/packages/gapic-generator/api_factory/generator/files/MANIFEST.in b/packages/gapic-generator/api_factory/generator/files/MANIFEST.in deleted file mode 100644 index fc77f8c82ff0..000000000000 --- a/packages/gapic-generator/api_factory/generator/files/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include README.rst LICENSE -recursive-include google *.json *.proto -recursive-include tests * -global-exclude *.pyc __pycache__ diff --git a/packages/gapic-generator/api_factory/generator/files/setup.cfg b/packages/gapic-generator/api_factory/generator/files/setup.cfg deleted file mode 100644 index 2a9acf13daa9..000000000000 --- a/packages/gapic-generator/api_factory/generator/files/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[bdist_wheel] -universal = 1 diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py index 9fbf00ea4c15..4292ebcf7cf5 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -21,9 +21,9 @@ from google.protobuf.compiler.plugin_pb2 import CodeGeneratorRequest from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse -from api_factory.schema.api import API -from api_factory.generator.loader import TemplateLoader from api_factory import utils +from api_factory.generator.loader import TemplateLoader +from api_factory.schema import api class Generator: @@ -40,15 +40,12 @@ class Generator: request (CodeGeneratorRequest): A request protocol buffer as provided by protoc. See ``plugin.proto``. """ - def __init__(self, request: CodeGeneratorRequest) -> None: - # Parse the CodeGeneratorRequest into this plugin's internal schema. - self._api = API() - for fdp in request.proto_file: - self._api.load(fdp) + def __init__(self, api_schema: api.API) -> None: + self._api = api_schema # Create the jinja environment with which to render templates. self._env = jinja2.Environment(loader=TemplateLoader( - searchpath=os.path.join(_dirname, 'templates'), + searchpath=os.path.join(_dirname, '..', 'templates'), )) # Add filters which templates require. @@ -80,13 +77,6 @@ def get_response(self) -> CodeGeneratorResponse: additional_context={'service': service}, ) - # Some files are direct files and not templates; simply read them - # into output files directly. - # - # Rather than expect an enumeration of these, we simply grab everything - # in the `files/` directory automatically. - output_files += self._read_flat_files(os.path.join(_dirname, 'files')) - # Return the CodeGeneratorResponse output. return CodeGeneratorResponse(file=output_files) @@ -132,32 +122,6 @@ def _render_templates( # Done; return the File objects based on these templates. return answer - def _read_flat_files( - self, - target_dir: str, - ) -> Sequence[CodeGeneratorResponse.File]: - answer = [] - - # Iterate over all files in the directory. - for path, _, filenames in os.walk(target_dir): - relative_path = path[len(target_dir):] - for filename in filenames: - # Determine the "relative filename" (the filename against the - # files/ subdirectory and repository root). - relative_filename = filename - if relative_path: - relative_filename = os.path.join(relative_path, filename) - - # Read the file from disk and create an appropriate OutputFile. - with io.open(os.path.join(path, filename), 'r') as f: - answer.append(CodeGeneratorResponse.File( - content=f.read(), - name=relative_filename, - )) - - # Done; return the File objects. - return answer - def _get_output_filename( self, template_name: str, *, @@ -187,13 +151,13 @@ def _get_output_filename( # Replace the $namespace variable. filename = filename.replace( '$namespace', - '/'.join([i.lower() for i in self._api.client.namespace]), + '/'.join([i.lower() for i in self._api.naming.namespace]), ).lstrip('/') # Replace the $name and $version variables. filename = filename.replace('$name_$version', - self._api.versioned_module_name) - filename = filename.replace('$name', self._api.module_name) + self._api.naming.versioned_module_name) + filename = filename.replace('$name', self._api.naming.module_name) # Replace the $service variable if applicable. if context and 'service' in context: diff --git a/packages/gapic-generator/api_factory/generator/templates/README.rst.j2 b/packages/gapic-generator/api_factory/generator/templates/README.rst.j2 deleted file mode 100644 index 3c5bba222208..000000000000 --- a/packages/gapic-generator/api_factory/generator/templates/README.rst.j2 +++ /dev/null @@ -1,29 +0,0 @@ -{{ api.long_name }} -{{ '=' * api.long_name|length }} - -{% if api.client.documentation.summary -%} -*{{ api.client.documentation.summary }}* -{%- endif %} - -{% if api.client.documentation.overview -%} -{{ api.client.documentation.overview|wrap(72) }} -{%- endif %} - -{% if api.client.documentation.documentation_root_url -%} -- `API Documentation <{{ api.client.documentation.documentation_root_url }}>`_ -{%- endif %} - -Installing ----------- - -The recommended way to use the {{ api.client.name }} API is by installing -this client library; do so using `pip`_: - -.. code-block:: shell - - $ pip install {{ api.warehouse_package_name }} - -.. note:: - - If you are doing local development, we recommend installing this package - inside a `virtualenv`_. diff --git a/packages/gapic-generator/api_factory/schema/api.py b/packages/gapic-generator/api_factory/schema/api.py index 23cc3427b663..5f32342f2f46 100644 --- a/packages/gapic-generator/api_factory/schema/api.py +++ b/packages/gapic-generator/api_factory/schema/api.py @@ -20,18 +20,51 @@ import collections import dataclasses import sys -from typing import Callable, List, Mapping +from typing import Callable, List, Mapping, Sequence, Tuple +from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 -from api_factory import utils from api_factory.schema import metadata +from api_factory.schema import naming from api_factory.schema import wrappers -from api_factory.schema.pb import client_pb2 -from api_factory.schema.pb import lro_pb2 +from api_factory.utils import cached_property -@dataclasses.dataclass +@dataclasses.dataclass(frozen=True) +class Proto: + """A representation of a particular proto file within an API.""" + + file_pb2: descriptor_pb2.FileDescriptorProto + services: Mapping[str, wrappers.Service] + messages: Mapping[str, wrappers.MessageType] + enums: Mapping[str, wrappers.EnumType] + file_to_generate: bool + + def __getattr__(self, name: str): + return getattr(self.file_pb2, name) + + @classmethod + def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, + file_to_generate: bool, prior_protos: Mapping[str, 'Proto'] = None, + ) -> 'Proto': + """Build and return a Proto instance. + + Args: + file_descriptor (~.FileDescriptorProto): The protocol buffer + object describing the proto file. + file_to_generate (bool): Whether this is a file which is + to be directly generated, or a dependency. + prior_protos (~.Proto): Previous, already processed protos. + These are needed to look up messages in imported protos. + """ + return _ProtoBuilder(file_descriptor, + file_to_generate=file_to_generate, + prior_protos=prior_protos or {}, + ).proto + + +@dataclasses.dataclass(frozen=True) class API: """A representation of a full API. @@ -43,140 +76,172 @@ class API: An instance of this object is made available to every template (as ``api``). """ - client: client_pb2.Client = dataclasses.field( - default_factory=client_pb2.Client, - ) - services: Mapping[str, wrappers.Service] = dataclasses.field( - default_factory=dict, - ) - messages: Mapping[str, wrappers.MessageType] = dataclasses.field( - default_factory=dict, - ) - enums: Mapping[str, wrappers.EnumType] = dataclasses.field( - default_factory=dict, - ) + naming: naming.Naming + protos: Mapping[str, Proto] - @property - def long_name(self) -> str: - """Return an appropriate title-cased long name.""" - return ' '.join(list(self.client.namespace) + [self.client.name]) + @classmethod + def build(cls, + file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], + package: str = '') -> 'API': + """Build the internal API schema based on the request. - @property - def module_name(self) -> str: - """Return the appropriate Python module name.""" - return utils.to_valid_module_name(self.client.name) - - @property - def version(self) -> str: - """Return the appropriate API version.""" - return utils.to_valid_module_name(self.client.version) + Args: + file_descriptors (Sequence[~.FileDescriptorProto]): A list of + :class:`~.FileDescriptorProto` objects describing the + API. + package (str): A protocol buffer package, as a string, for which + code should be explicitly generated (including subpackages). + Protos with packages outside this list are considered imports + rather than explicit targets. + """ + # Save information about the overall naming for this API. + n = naming.Naming.build(*filter( + lambda fd: fd.package.startswith(package), + file_descriptors, + )) - @property - def versioned_module_name(self) -> str: - """Return the versiond module name (e.g. ``apiname_v1``). + # Iterate over each FileDescriptorProto and fill out a Proto + # object describing it, and save these to the instance. + protos = {} + for fd in file_descriptors: + protos[fd.name] = _ProtoBuilder( + file_descriptor=fd, + file_to_generate=fd.package.startswith(package), + prior_protos=protos, + ).proto + + # Done; return the API. + return cls(naming=n, protos=protos) + + @cached_property + def enums(self) -> Mapping[str, wrappers.EnumType]: + """Return a map of all enums available in the API.""" + return collections.ChainMap({}, + *[p.enums for p in self.protos.values()], + ) - If there is no version, this is the same as ``module_name``. - """ - if self.version: - return f'{self.module_name}_{self.version}' - return self.module_name + @cached_property + def messages(self) -> Mapping[str, wrappers.MessageType]: + """Return a map of all messages available in the API.""" + return collections.ChainMap({}, + *[p.messages for p in self.protos.values()], + ) - @property - def warehouse_package_name(self) -> str: - """Return the appropriate Python package name for Warehouse.""" - # Sanity check: If no name is provided, use a clearly placeholder - # default that is not a valid name on Warehouse. - if not self.client.name: - return utils.Placeholder('<<< PACKAGE NAME >>>') + @cached_property + def services(self) -> Mapping[str, wrappers.Service]: + """Return a map of all services available in the API.""" + return collections.ChainMap({}, + *[p.services for p in self.protos.values()], + ) - # Piece the name and namespace together to come up with the - # proper package name. - answer = list(self.client.namespace) + self.client.name.split(' ') - return '-'.join(answer).lower() - def load(self, fdp: descriptor_pb2.FileDescriptorProto) -> None: - """Load the provided FileDescriptorProto into this object. +class _ProtoBuilder: + """A "builder class" for Proto objects. - This method iterates over the complete descriptor and loads all - of its members (services, methods, messages, enums, etc.) into - this object, wrapping each descriptor in a wrapper to preserve - metadata. + The sole purpose of this class is to accept the information from the + file descriptor and "piece together" the components of the :class:`~.Proto` + object in-place. - This method modifies the :class:`~.API` object in-place. + This allows the public :class:`~.Proto` object to be frozen, and free + of the setup machinations. - Args: - fdp (~.descriptor_pb2.FileDescriptorProto): The - :class:`FileDescriptorProto` object; this is usually provided - as a list in :class:`CodeGeneratorRequest`. - """ - # Compile together the comments from the source code. - # This creates a nested diciontary structure sorted by the - # location paths. So, a location with path [4, 1, 2, 7] will end - # up being in `comments_by_path` under [4][1][2][7]['TERMINAL']. + The correct usage of this class is always to create an instance, call + the :attr:`proto` property, and then throw the builder away. Additionally, + there should be no reason to use this class outside of this module. + """ + EMPTY = descriptor_pb2.SourceCodeInfo.Location() + + def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, + file_to_generate: bool, + prior_protos: Mapping[str, Proto] = None): + self.messages = {} + self.enums = {} + self.services = {} + self.file_descriptor = file_descriptor + self.file_to_generate = file_to_generate + self.prior_protos = prior_protos or {} + + # Iterate over the documentation and place it into a dictionary. # - # The purpose of always ending with 'TERMINAL' is because there - # always could be something nested deeper. - comments_by_path = {} - for loc in fdp.source_code_info.location: - cursor = comments_by_path - for p in loc.path: - cursor.setdefault(p, {}) - cursor = cursor[p] - cursor['TERMINAL'] = loc - - # Now iterate over the FileDescriptorProto and grab the relevant - # source documentation from the dictionary created above and - # add this to the `self.comments` dictionary, which is sorted by - # fully-qualfied proto identifiers. + # The comments in protocol buffers are sorted by a concept called + # the "path", which is a sequence of integers described in more + # detail below; this code simply shifts from a list to a dict, + # with tuples of paths as the dictionary keys. + self.docs = {} + for location in file_descriptor.source_code_info.location: + self.docs[tuple(location.path)] = location + + # Everything has an "address", which is the proto where the thing + # was declared. # - # The hard-coded keys here are based on how descriptor.proto + # We put this together by a baton pass of sorts: everything in + # this file *starts with* this address, which is appended to + # for each item as it is loaded. + address = metadata.Address( + module=file_descriptor.name.split('/')[-1][:-len('.proto')], + package=file_descriptor.package.split('.'), + ) + + # Now iterate over the FileDescriptorProto and pull out each of + # the messages, enums, and services. + # + # The hard-coded path keys sent here are based on how descriptor.proto # works; it uses the proto message number of the pieces of each # message (e.g. the hard-code `4` for `message_type` immediately # below is because `repeated DescriptorProto message_type = 4;` in # descriptor.proto itself). - address = metadata.Address( - module=fdp.name.split('/')[-1][:-len('.proto')], - package=fdp.package.split('.'), + self._load_children(file_descriptor.message_type, self._load_message, + address=address, path=(4,)) + self._load_children(file_descriptor.enum_type, self._load_enum, + address=address, path=(5,)) + if file_to_generate: + self._load_children(file_descriptor.service, self._load_service, + address=address, path=(6,)) + # TODO(lukesneeringer): oneofs are on path 7. + + @property + def proto(self) -> Proto: + """Return a Proto dataclass object.""" + return Proto( + enums=self.enums, + file_pb2=self.file_descriptor, + file_to_generate=self.file_to_generate, + messages=self.messages, + services=self.services, + ) + + @cached_property + def all_messages(self) -> Sequence[wrappers.MessageType]: + return collections.ChainMap({}, self.messages, + *[p.messages for p in self.prior_protos.values()], ) - self._load_children(fdp.message_type, loader=self._load_descriptor, - address=address, info=comments_by_path.get(4, {})) - self._load_children(fdp.enum_type, loader=self._load_enum, - address=address, info=comments_by_path.get(5, {})) - self._load_children(fdp.service, loader=self._load_service, - address=address, info=comments_by_path.get(6, {})) - # self._load_children(fdp.extension, loader=self._load_field, - # address=address, info=comments_by_path.get(7, {})) - - # Merge any client directives with what we have going so far. - self.client.MergeFrom(fdp.options.Extensions[client_pb2.client]) - - def _load_children(self, children: list, loader: Callable, - address: metadata.Address, info: dict) -> None: - """Load arbitrary children from a Descriptor. + + def _load_children(self, children: Sequence, loader: Callable, *, + address: metadata.Address, path: Tuple[int]) -> None: + """Return wrapped versions of arbitrary children from a Descriptor. Args: children (list): A sequence of children of the given field to be loaded. For example, a FileDescriptorProto contains the lists ``message_type``, ``enum_type``, etc.; these are valid inputs for this argument. - loader (Callable[Message, Address, dict]): The function able + loader (Callable[Message, Address, Tuple[int]]): The function able to load the kind of message in ``children``. This should be one of the ``_load_{noun}`` methods on this class (e.g. ``_load_descriptor``). address (~.metadata.Address): The address up to this point. This will include the package and may include outer messages. - info (dict): A dictionary of comment information corresponding to - the messages for which being laoded. In other words, this is - the segment of the source info that has paths matching - or within ``children``. + path (Tuple[int]): The location path up to this point. This is + used to correspond to documentation in + ``SourceCodeInfo.Location`` in ``descriptor.proto``. """ # Iterate over the list of children provided and call the # applicable loader function on each. for child, i in zip(children, range(0, sys.maxsize)): - loader(child, address=address, info=info.get(i, {})) + loader(child, address=address, path=path + (i,)) def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], - address: metadata.Address, info: dict, + address: metadata.Address, path: Tuple[int], ) -> Mapping[str, wrappers.Field]: """Return a dictionary of wrapped fields for the given message. @@ -185,8 +250,8 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], sequence of protobuf field objects. address (~.metadata.Address): An address object denoting the location of these fields. - info (dict): The appropriate slice of proto comments - corresponding to these fields. + path (Tuple[int]): The source location path thus far, as + understood by ``SourceCodeInfo.Location``. Returns: Mapping[str, ~.wrappers.Field]: A ordered mapping of @@ -199,10 +264,7 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], field_pb=field_pb, meta=metadata.Metadata( address=address, - documentation=info.get(i, {}).get( - 'TERMINAL', - descriptor_pb2.SourceCodeInfo.Location(), - ), + documentation=self.docs.get(path + (i,), self.EMPTY), ), ) @@ -210,7 +272,7 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], return answer def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], - address: metadata.Address, info: dict, + address: metadata.Address, path: Tuple[int], ) -> Mapping[str, wrappers.Method]: """Return a dictionary of wrapped methods for the given service. @@ -219,8 +281,8 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], sequence of protobuf method objects. address (~.metadata.Address): An address object denoting the location of these methods. - info (dict): The appropriate slice of proto comments - corresponding to these methods. + path (Tuple[int]): The source location path thus far, as understood + by ``SourceCodeInfo.Location``. Returns: Mapping[str, ~.wrappers.Method]: A ordered mapping of @@ -228,28 +290,25 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], """ # Iterate over the methods and collect them into a dictionary. answer = collections.OrderedDict() - for method_pb, i in zip(methods, range(0, sys.maxsize)): - types = method_pb.options.Extensions[lro_pb2.types] - answer[method_pb.name] = wrappers.Method( - input=self.messages[method_pb.input_type.lstrip('.')], - lro_metadata=self.messages.get(types.lro_metadata_type, None), - lro_payload=self.messages.get(types.lro_return_type, None), - method_pb=method_pb, + for meth_pb, i in zip(methods, range(0, sys.maxsize)): + types = meth_pb.options.Extensions[operations_pb2.operation_types] + answer[meth_pb.name] = wrappers.Method( + input=self.all_messages[meth_pb.input_type.lstrip('.')], + lro_metadata=self.all_messages.get(types.metadata, None), + lro_payload=self.all_messages.get(types.response, None), + method_pb=meth_pb, meta=metadata.Metadata( address=address, - documentation=info.get(i, {}).get( - 'TERMINAL', - descriptor_pb2.SourceCodeInfo.Location(), - ), + documentation=self.docs.get(path + (i,), self.EMPTY), ), - output=self.messages[method_pb.output_type.lstrip('.')], + output=self.all_messages[meth_pb.output_type.lstrip('.')], ) # Done; return the answer. return answer - def _load_descriptor(self, message_pb: descriptor_pb2.DescriptorProto, - address: metadata.Address, info: dict) -> None: + def _load_message(self, message_pb: descriptor_pb2.DescriptorProto, + address: metadata.Address, path: Tuple[int]) -> None: """Load message descriptions from DescriptorProtos.""" ident = f'{str(address)}.{message_pb.name}' nested_addr = address.child(message_pb.name) @@ -258,34 +317,34 @@ def _load_descriptor(self, message_pb: descriptor_pb2.DescriptorProto, fields = self._get_fields( message_pb.field, address=nested_addr, - info=info.get(2, {}), + path=path + (2,), ) fields.update(self._get_fields( message_pb.extension, address=nested_addr, - info=info.get(6, {}), + path=path + (6,), )) # Create a message correspoding to this descriptor. self.messages[ident] = wrappers.MessageType( fields=fields, message_pb=message_pb, - meta=metadata.Metadata(address=address, documentation=info.get( - 'TERMINAL', - descriptor_pb2.SourceCodeInfo.Location(), - )), + meta=metadata.Metadata( + address=address, + documentation=self.docs.get(path, self.EMPTY), + ), ) # Load all nested items. self._load_children(message_pb.nested_type, address=nested_addr, - loader=self._load_descriptor, info=info.get(3, {})) + loader=self._load_message, path=path + (3,)) self._load_children(message_pb.enum_type, address=nested_addr, - loader=self._load_enum, info=info.get(4, {})) + loader=self._load_enum, path=path + (4,)) # self._load_children(message.oneof_decl, loader=self._load_field, # address=nested_addr, info=info.get(8, {})) def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, - address: metadata.Address, info: dict) -> None: + address: metadata.Address, path: Tuple[int]) -> None: """Load enum descriptions from EnumDescriptorProtos.""" # Put together wrapped objects for the enum values. values = [] @@ -294,10 +353,7 @@ def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, enum_value_pb=enum_value, meta=metadata.Metadata( address=address, - documentation=info.get(2, {}).get( - 'TERMINAL', - descriptor_pb2.SourceCodeInfo.Location(), - ), + documentation=self.docs.get(path + (2, i), self.EMPTY), ), )) @@ -305,15 +361,15 @@ def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, ident = f'{str(address)}.{enum.name}' self.enums[ident] = wrappers.EnumType( enum_pb=enum, - meta=metadata.Metadata(address=address, documentation=info.get( - 'TERMINAL', - descriptor_pb2.SourceCodeInfo.Location(), - )), + meta=metadata.Metadata( + address=address, + documentation=self.docs.get(path, self.EMPTY), + ), values=values, ) def _load_service(self, service: descriptor_pb2.ServiceDescriptorProto, - address: metadata.Address, info: dict) -> None: + address: metadata.Address, path: Tuple[int]) -> None: """Load comments for a service and its methods.""" service_addr = address.child(service.name) @@ -321,15 +377,15 @@ def _load_service(self, service: descriptor_pb2.ServiceDescriptorProto, methods = self._get_methods( service.method, address=service_addr, - info=info.get(2, {}), + path=path + (2,), ) # Load the comments for the service itself. self.services[f'{str(address)}.{service.name}'] = wrappers.Service( - meta=metadata.Metadata(address=address, documentation=info.get( - 'TERMINAL', - descriptor_pb2.SourceCodeInfo.Location(), - )), + meta=metadata.Metadata( + address=address, + documentation=self.docs.get(path, self.EMPTY), + ), methods=methods, service_pb=service, ) diff --git a/packages/gapic-generator/api_factory/schema/naming.py b/packages/gapic-generator/api_factory/schema/naming.py new file mode 100644 index 000000000000..614d07c66763 --- /dev/null +++ b/packages/gapic-generator/api_factory/schema/naming.py @@ -0,0 +1,182 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import dataclasses +import os +import re +from typing import Iterable, Tuple + +from google.api import annotations_pb2 +from google.protobuf import descriptor_pb2 + +from api_factory import utils + + +@dataclasses.dataclass(frozen=True) +class Naming: + """Naming data for an API. + + This class contains the naming nomenclature used for this API + within templates. + + An instance of this object is made available to every template + (as ``api.naming``). + """ + name: str + namespace: Tuple[str] + version: str + product_name: str + product_url: str + + @classmethod + def build(cls, + *file_descriptors: Iterable[descriptor_pb2.FileDescriptorProto] + ) -> 'Naming': + """Return a full APINaming instance based on these file descriptors. + + This is pieced together from the proto package names as well as the + ``google.api.metadata`` file annotation. This information may be + present in one or many files; this method is tolerant as long as + the data does not conflict. + + Args: + file_descriptors (Iterable[~.FileDescriptorProto]): A list of + file descriptor protos. This list should only include the + files actually targeted for output (not their imports). + + Returns: + ~.Naming: A :class:`~.Naming` instance which is provided to + templates as part of the :class:`~.API`. + + Raises: + ValueError: If the provided file descriptors contain contradictory + information. + """ + # Determine the set of proto packages. + proto_packages = {fd.package for fd in file_descriptors} + root_package = os.path.commonprefix(tuple(proto_packages)).rstrip('.') + + # Sanity check: If there is no common ground in the package, + # we are obviously in trouble. + if not root_package: + raise ValueError('Protos provided have entirely different ' + 'proto packages.') + + # Define the valid regex to split the package. + # + # It is not necessary for the regex to be as particular about package + # name validity (e.g. avoiding .. or segments starting with numbers) + # because protoc is guaranteed to give us valid package names. + pattern = r'^((?P[a-z0-9_.]+)\.)?(?P[a-z0-9_]+)' + + # Only require the version portion of the regex if the version is + # present. + # + # This code may look counter-intuitive (why not use ? to make it + # optional), but the engine's greediness routine will decide that + # the version is the name, which is not what we want. + version = r'\.(?Pv[0-9]+(p[0-9]+)?((alpha|beta|test)[0-9]*)?)' + if re.search(version, root_package): + pattern += version + + # Okay, do the match + match = re.search(pattern=pattern, string=root_package).groupdict() + match['namespace'] = match['namespace'] or '' + package_info = cls( + name=match['name'].capitalize(), + namespace=tuple([i.capitalize() + for i in match['namespace'].split('.')]), + product_name=match['name'].capitalize(), + product_url='', + version=match.get('version', ''), + ) + + # Sanity check: Ensure that the package directives all inferred + # the same information. + if not package_info.version and len(proto_packages) > 1: + raise ValueError('All protos must have the same proto package ' + 'up to and including the version.') + + # Iterate over the metadata annotations and collect the package + # information from there. + # + # This creates a naming class non-empty metadata annotation and + # uses Python's set logic to de-duplicate. There should only be one. + metadata_info = set() + for fd in file_descriptors: + meta = fd.options.Extensions[annotations_pb2.metadata] + naming = cls( + name=meta.package_name or meta.product_name, + namespace=tuple(meta.package_namespace), + product_name=meta.product_name, + product_url=meta.product_url, + version='', + ) + if naming: + metadata_info.add(naming) + + # Sanity check: Ensure that any google.api.metadata provisions were + # consistent. + if len(metadata_info) > 1: + raise ValueError( + 'If the google.api.metadata annotation is provided in more ' + 'than one file, it must be consistent.', + ) + + # Merge the package naming information and the metadata naming + # information, with the latter being preferred. + # Return a Naming object which effectively merges them. + answer = package_info + if len(metadata_info): + for k, v in dataclasses.asdict(metadata_info.pop()).items(): + # Sanity check: We only want to overwrite anything if the + # new value is truthy. + if v: + answer = dataclasses.replace(answer, **{k: v}) + return answer + + def __bool__(self): + """Return True if any of the fields are truthy, False otherwise.""" + return any( + [getattr(self, i.name) for i in dataclasses.fields(self)], + ) + + @property + def long_name(self) -> str: + """Return an appropriate title-cased long name.""" + return ' '.join(tuple(self.namespace) + (self.name,)) + + @property + def module_name(self) -> str: + """Return the appropriate Python module name.""" + return utils.to_valid_module_name(self.name) + + @property + def versioned_module_name(self) -> str: + """Return the versiond module name (e.g. ``apiname_v1``). + + If there is no version, this is the same as ``module_name``. + """ + if self.version: + return f'{self.module_name}_{self.version}' + return self.module_name + + @property + def warehouse_package_name(self) -> str: + """Return the appropriate Python package name for Warehouse.""" + + # Piece the name and namespace together to come up with the + # proper package name. + answer = list(self.namespace) + self.name.split(' ') + return '-'.join(answer).lower() diff --git a/packages/gapic-generator/api_factory/schema/pb/client_pb2.py b/packages/gapic-generator/api_factory/schema/pb/client_pb2.py deleted file mode 100644 index 4cb10c7d5e0a..000000000000 --- a/packages/gapic-generator/api_factory/schema/pb/client_pb2.py +++ /dev/null @@ -1,186 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/api/experimental/client.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import documentation_pb2 as google_dot_api_dot_documentation__pb2 -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/api/experimental/client.proto', - package='google.api.experimental', - syntax='proto3', - serialized_pb=_b('\n$google/api/experimental/client.proto\x12\x17google.api.experimental\x1a\x1egoogle/api/documentation.proto\x1a google/protobuf/descriptor.proto\"+\n\tCopyright\x12\x10\n\x08\x66ullname\x18\x01 \x01(\t\x12\x0c\n\x04year\x18\x02 \x01(\t\"\xb4\x01\n\x06\x43lient\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tnamespace\x18\x02 \x03(\t\x12\x0f\n\x07version\x18\x03 \x01(\t\x12\x30\n\rdocumentation\x18\n \x01(\x0b\x32\x19.google.api.Documentation\x12\x35\n\tcopyright\x18\x0b \x01(\x0b\x32\".google.api.experimental.Copyright\x12\x0f\n\x07license\x18\x0c \x01(\t:O\n\x06\x63lient\x12\x1c.google.protobuf.FileOptions\x18\xfc\x88\x03 \x01(\x0b\x32\x1f.google.api.experimental.Client:/\n\x04host\x12\x1f.google.protobuf.ServiceOptions\x18\xfd\x88\x03 \x01(\t:7\n\x0coauth_scopes\x12\x1f.google.protobuf.ServiceOptions\x18\xfe\x88\x03 \x03(\tb\x06proto3') - , - dependencies=[google_dot_api_dot_documentation__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - - -CLIENT_FIELD_NUMBER = 50300 -client = _descriptor.FieldDescriptor( - name='client', full_name='google.api.experimental.client', index=0, - number=50300, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) -HOST_FIELD_NUMBER = 50301 -host = _descriptor.FieldDescriptor( - name='host', full_name='google.api.experimental.host', index=1, - number=50301, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) -OAUTH_SCOPES_FIELD_NUMBER = 50302 -oauth_scopes = _descriptor.FieldDescriptor( - name='oauth_scopes', full_name='google.api.experimental.oauth_scopes', index=2, - number=50302, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) - - -_COPYRIGHT = _descriptor.Descriptor( - name='Copyright', - full_name='google.api.experimental.Copyright', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='fullname', full_name='google.api.experimental.Copyright.fullname', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='year', full_name='google.api.experimental.Copyright.year', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=131, - serialized_end=174, -) - - -_CLIENT = _descriptor.Descriptor( - name='Client', - full_name='google.api.experimental.Client', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.api.experimental.Client.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='namespace', full_name='google.api.experimental.Client.namespace', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='version', full_name='google.api.experimental.Client.version', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='documentation', full_name='google.api.experimental.Client.documentation', index=3, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='copyright', full_name='google.api.experimental.Client.copyright', index=4, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='license', full_name='google.api.experimental.Client.license', index=5, - number=12, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=177, - serialized_end=357, -) - -_CLIENT.fields_by_name['documentation'].message_type = google_dot_api_dot_documentation__pb2._DOCUMENTATION -_CLIENT.fields_by_name['copyright'].message_type = _COPYRIGHT -DESCRIPTOR.message_types_by_name['Copyright'] = _COPYRIGHT -DESCRIPTOR.message_types_by_name['Client'] = _CLIENT -DESCRIPTOR.extensions_by_name['client'] = client -DESCRIPTOR.extensions_by_name['host'] = host -DESCRIPTOR.extensions_by_name['oauth_scopes'] = oauth_scopes -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Copyright = _reflection.GeneratedProtocolMessageType('Copyright', (_message.Message,), dict( - DESCRIPTOR = _COPYRIGHT, - __module__ = 'google.api.experimental.client_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.Copyright) - )) -_sym_db.RegisterMessage(Copyright) - -Client = _reflection.GeneratedProtocolMessageType('Client', (_message.Message,), dict( - DESCRIPTOR = _CLIENT, - __module__ = 'google.api.experimental.client_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.Client) - )) -_sym_db.RegisterMessage(Client) - -client.message_type = _CLIENT -google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(client) -google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(host) -google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(oauth_scopes) - -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/expr/v1/syntax_pb2.py b/packages/gapic-generator/api_factory/schema/pb/expr/v1/syntax_pb2.py deleted file mode 100644 index 5da32a22f90f..000000000000 --- a/packages/gapic-generator/api_factory/schema/pb/expr/v1/syntax_pb2.py +++ /dev/null @@ -1,832 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/api/expr/v1/syntax.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/api/expr/v1/syntax.proto', - package='google.api.expr.v1', - syntax='proto3', - serialized_pb=_b('\n\x1fgoogle/api/expr/v1/syntax.proto\x12\x12google.api.expr.v1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"o\n\nParsedExpr\x12&\n\x04\x65xpr\x18\x02 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\x33\n\x0bsource_info\x18\x03 \x01(\x0b\x32\x1e.google.api.expr.v1.SourceInfoJ\x04\x08\x01\x10\x02\"\xd9\t\n\x04\x45xpr\x12\n\n\x02id\x18\x02 \x01(\x03\x12\x32\n\nconst_expr\x18\x03 \x01(\x0b\x32\x1c.google.api.expr.v1.ConstantH\x00\x12\x34\n\nident_expr\x18\x04 \x01(\x0b\x32\x1e.google.api.expr.v1.Expr.IdentH\x00\x12\x36\n\x0bselect_expr\x18\x05 \x01(\x0b\x32\x1f.google.api.expr.v1.Expr.SelectH\x00\x12\x32\n\tcall_expr\x18\x06 \x01(\x0b\x32\x1d.google.api.expr.v1.Expr.CallH\x00\x12\x38\n\tlist_expr\x18\x07 \x01(\x0b\x32#.google.api.expr.v1.Expr.CreateListH\x00\x12<\n\x0bstruct_expr\x18\x08 \x01(\x0b\x32%.google.api.expr.v1.Expr.CreateStructH\x00\x12\x44\n\x12\x63omprehension_expr\x18\t \x01(\x0b\x32&.google.api.expr.v1.Expr.ComprehensionH\x00\x1a\x15\n\x05Ident\x12\x0c\n\x04name\x18\x01 \x01(\t\x1aU\n\x06Select\x12)\n\x07operand\x18\x01 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\r\n\x05\x66ield\x18\x02 \x01(\t\x12\x11\n\ttest_only\x18\x03 \x01(\x08\x1aj\n\x04\x43\x61ll\x12(\n\x06target\x18\x01 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\x10\n\x08\x66unction\x18\x02 \x01(\t\x12&\n\x04\x61rgs\x18\x03 \x03(\x0b\x32\x18.google.api.expr.v1.Expr\x1a\x38\n\nCreateList\x12*\n\x08\x65lements\x18\x01 \x03(\x0b\x32\x18.google.api.expr.v1.Expr\x1a\xef\x01\n\x0c\x43reateStruct\x12\x14\n\x0cmessage_name\x18\x01 \x01(\t\x12<\n\x07\x65ntries\x18\x02 \x03(\x0b\x32+.google.api.expr.v1.Expr.CreateStruct.Entry\x1a\x8a\x01\n\x05\x45ntry\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x13\n\tfield_key\x18\x02 \x01(\tH\x00\x12+\n\x07map_key\x18\x03 \x01(\x0b\x32\x18.google.api.expr.v1.ExprH\x00\x12\'\n\x05value\x18\x04 \x01(\x0b\x32\x18.google.api.expr.v1.ExprB\n\n\x08key_kind\x1a\x97\x02\n\rComprehension\x12\x10\n\x08iter_var\x18\x01 \x01(\t\x12,\n\niter_range\x18\x02 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\x10\n\x08\x61\x63\x63u_var\x18\x03 \x01(\t\x12+\n\taccu_init\x18\x04 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12\x30\n\x0eloop_condition\x18\x05 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12+\n\tloop_step\x18\x06 \x01(\x0b\x32\x18.google.api.expr.v1.Expr\x12(\n\x06result\x18\x07 \x01(\x0b\x32\x18.google.api.expr.v1.ExprB\x0b\n\texpr_kindJ\x04\x08\x01\x10\x02\"\xc5\x02\n\x08\x43onstant\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x15\n\x0bint64_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0cuint64_value\x18\x04 \x01(\x04H\x00\x12\x16\n\x0c\x64ouble_value\x18\x05 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x12\x33\n\x0e\x64uration_value\x18\x08 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x35\n\x0ftimestamp_value\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x0f\n\rconstant_kind\"\xc0\x01\n\nSourceInfo\x12\x16\n\x0esyntax_version\x18\x01 \x01(\t\x12\x10\n\x08location\x18\x02 \x01(\t\x12\x14\n\x0cline_offsets\x18\x03 \x03(\x05\x12@\n\tpositions\x18\x04 \x03(\x0b\x32-.google.api.expr.v1.SourceInfo.PositionsEntry\x1a\x30\n\x0ePositionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x03\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\x42*\n\x16\x63om.google.api.expr.v1B\x0bSyntaxProtoP\x01\xf8\x01\x01\x62\x06proto3') - , - dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - - - -_PARSEDEXPR = _descriptor.Descriptor( - name='ParsedExpr', - full_name='google.api.expr.v1.ParsedExpr', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='expr', full_name='google.api.expr.v1.ParsedExpr.expr', index=0, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='source_info', full_name='google.api.expr.v1.ParsedExpr.source_info', index=1, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=150, - serialized_end=261, -) - - -_EXPR_IDENT = _descriptor.Descriptor( - name='Ident', - full_name='google.api.expr.v1.Expr.Ident', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.api.expr.v1.Expr.Ident.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=688, - serialized_end=709, -) - -_EXPR_SELECT = _descriptor.Descriptor( - name='Select', - full_name='google.api.expr.v1.Expr.Select', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='operand', full_name='google.api.expr.v1.Expr.Select.operand', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='field', full_name='google.api.expr.v1.Expr.Select.field', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='test_only', full_name='google.api.expr.v1.Expr.Select.test_only', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=711, - serialized_end=796, -) - -_EXPR_CALL = _descriptor.Descriptor( - name='Call', - full_name='google.api.expr.v1.Expr.Call', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='target', full_name='google.api.expr.v1.Expr.Call.target', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='function', full_name='google.api.expr.v1.Expr.Call.function', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='args', full_name='google.api.expr.v1.Expr.Call.args', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=798, - serialized_end=904, -) - -_EXPR_CREATELIST = _descriptor.Descriptor( - name='CreateList', - full_name='google.api.expr.v1.Expr.CreateList', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='elements', full_name='google.api.expr.v1.Expr.CreateList.elements', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=906, - serialized_end=962, -) - -_EXPR_CREATESTRUCT_ENTRY = _descriptor.Descriptor( - name='Entry', - full_name='google.api.expr.v1.Expr.CreateStruct.Entry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.id', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='field_key', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.field_key', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='map_key', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.map_key', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.value', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='key_kind', full_name='google.api.expr.v1.Expr.CreateStruct.Entry.key_kind', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1066, - serialized_end=1204, -) - -_EXPR_CREATESTRUCT = _descriptor.Descriptor( - name='CreateStruct', - full_name='google.api.expr.v1.Expr.CreateStruct', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='message_name', full_name='google.api.expr.v1.Expr.CreateStruct.message_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entries', full_name='google.api.expr.v1.Expr.CreateStruct.entries', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_EXPR_CREATESTRUCT_ENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=965, - serialized_end=1204, -) - -_EXPR_COMPREHENSION = _descriptor.Descriptor( - name='Comprehension', - full_name='google.api.expr.v1.Expr.Comprehension', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='iter_var', full_name='google.api.expr.v1.Expr.Comprehension.iter_var', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='iter_range', full_name='google.api.expr.v1.Expr.Comprehension.iter_range', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='accu_var', full_name='google.api.expr.v1.Expr.Comprehension.accu_var', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='accu_init', full_name='google.api.expr.v1.Expr.Comprehension.accu_init', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='loop_condition', full_name='google.api.expr.v1.Expr.Comprehension.loop_condition', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='loop_step', full_name='google.api.expr.v1.Expr.Comprehension.loop_step', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='result', full_name='google.api.expr.v1.Expr.Comprehension.result', index=6, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1207, - serialized_end=1486, -) - -_EXPR = _descriptor.Descriptor( - name='Expr', - full_name='google.api.expr.v1.Expr', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='google.api.expr.v1.Expr.id', index=0, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='const_expr', full_name='google.api.expr.v1.Expr.const_expr', index=1, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ident_expr', full_name='google.api.expr.v1.Expr.ident_expr', index=2, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='select_expr', full_name='google.api.expr.v1.Expr.select_expr', index=3, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='call_expr', full_name='google.api.expr.v1.Expr.call_expr', index=4, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='list_expr', full_name='google.api.expr.v1.Expr.list_expr', index=5, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='struct_expr', full_name='google.api.expr.v1.Expr.struct_expr', index=6, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='comprehension_expr', full_name='google.api.expr.v1.Expr.comprehension_expr', index=7, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_EXPR_IDENT, _EXPR_SELECT, _EXPR_CALL, _EXPR_CREATELIST, _EXPR_CREATESTRUCT, _EXPR_COMPREHENSION, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='expr_kind', full_name='google.api.expr.v1.Expr.expr_kind', - index=0, containing_type=None, fields=[]), - ], - serialized_start=264, - serialized_end=1505, -) - - -_CONSTANT = _descriptor.Descriptor( - name='Constant', - full_name='google.api.expr.v1.Constant', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='null_value', full_name='google.api.expr.v1.Constant.null_value', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bool_value', full_name='google.api.expr.v1.Constant.bool_value', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int64_value', full_name='google.api.expr.v1.Constant.int64_value', index=2, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uint64_value', full_name='google.api.expr.v1.Constant.uint64_value', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='google.api.expr.v1.Constant.double_value', index=4, - number=5, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='string_value', full_name='google.api.expr.v1.Constant.string_value', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bytes_value', full_name='google.api.expr.v1.Constant.bytes_value', index=6, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='duration_value', full_name='google.api.expr.v1.Constant.duration_value', index=7, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestamp_value', full_name='google.api.expr.v1.Constant.timestamp_value', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='constant_kind', full_name='google.api.expr.v1.Constant.constant_kind', - index=0, containing_type=None, fields=[]), - ], - serialized_start=1508, - serialized_end=1833, -) - - -_SOURCEINFO_POSITIONSENTRY = _descriptor.Descriptor( - name='PositionsEntry', - full_name='google.api.expr.v1.SourceInfo.PositionsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.api.expr.v1.SourceInfo.PositionsEntry.key', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.api.expr.v1.SourceInfo.PositionsEntry.value', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1980, - serialized_end=2028, -) - -_SOURCEINFO = _descriptor.Descriptor( - name='SourceInfo', - full_name='google.api.expr.v1.SourceInfo', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='syntax_version', full_name='google.api.expr.v1.SourceInfo.syntax_version', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='location', full_name='google.api.expr.v1.SourceInfo.location', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='line_offsets', full_name='google.api.expr.v1.SourceInfo.line_offsets', index=2, - number=3, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='positions', full_name='google.api.expr.v1.SourceInfo.positions', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_SOURCEINFO_POSITIONSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1836, - serialized_end=2028, -) - -_PARSEDEXPR.fields_by_name['expr'].message_type = _EXPR -_PARSEDEXPR.fields_by_name['source_info'].message_type = _SOURCEINFO -_EXPR_IDENT.containing_type = _EXPR -_EXPR_SELECT.fields_by_name['operand'].message_type = _EXPR -_EXPR_SELECT.containing_type = _EXPR -_EXPR_CALL.fields_by_name['target'].message_type = _EXPR -_EXPR_CALL.fields_by_name['args'].message_type = _EXPR -_EXPR_CALL.containing_type = _EXPR -_EXPR_CREATELIST.fields_by_name['elements'].message_type = _EXPR -_EXPR_CREATELIST.containing_type = _EXPR -_EXPR_CREATESTRUCT_ENTRY.fields_by_name['map_key'].message_type = _EXPR -_EXPR_CREATESTRUCT_ENTRY.fields_by_name['value'].message_type = _EXPR -_EXPR_CREATESTRUCT_ENTRY.containing_type = _EXPR_CREATESTRUCT -_EXPR_CREATESTRUCT_ENTRY.oneofs_by_name['key_kind'].fields.append( - _EXPR_CREATESTRUCT_ENTRY.fields_by_name['field_key']) -_EXPR_CREATESTRUCT_ENTRY.fields_by_name['field_key'].containing_oneof = _EXPR_CREATESTRUCT_ENTRY.oneofs_by_name['key_kind'] -_EXPR_CREATESTRUCT_ENTRY.oneofs_by_name['key_kind'].fields.append( - _EXPR_CREATESTRUCT_ENTRY.fields_by_name['map_key']) -_EXPR_CREATESTRUCT_ENTRY.fields_by_name['map_key'].containing_oneof = _EXPR_CREATESTRUCT_ENTRY.oneofs_by_name['key_kind'] -_EXPR_CREATESTRUCT.fields_by_name['entries'].message_type = _EXPR_CREATESTRUCT_ENTRY -_EXPR_CREATESTRUCT.containing_type = _EXPR -_EXPR_COMPREHENSION.fields_by_name['iter_range'].message_type = _EXPR -_EXPR_COMPREHENSION.fields_by_name['accu_init'].message_type = _EXPR -_EXPR_COMPREHENSION.fields_by_name['loop_condition'].message_type = _EXPR -_EXPR_COMPREHENSION.fields_by_name['loop_step'].message_type = _EXPR -_EXPR_COMPREHENSION.fields_by_name['result'].message_type = _EXPR -_EXPR_COMPREHENSION.containing_type = _EXPR -_EXPR.fields_by_name['const_expr'].message_type = _CONSTANT -_EXPR.fields_by_name['ident_expr'].message_type = _EXPR_IDENT -_EXPR.fields_by_name['select_expr'].message_type = _EXPR_SELECT -_EXPR.fields_by_name['call_expr'].message_type = _EXPR_CALL -_EXPR.fields_by_name['list_expr'].message_type = _EXPR_CREATELIST -_EXPR.fields_by_name['struct_expr'].message_type = _EXPR_CREATESTRUCT -_EXPR.fields_by_name['comprehension_expr'].message_type = _EXPR_COMPREHENSION -_EXPR.oneofs_by_name['expr_kind'].fields.append( - _EXPR.fields_by_name['const_expr']) -_EXPR.fields_by_name['const_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] -_EXPR.oneofs_by_name['expr_kind'].fields.append( - _EXPR.fields_by_name['ident_expr']) -_EXPR.fields_by_name['ident_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] -_EXPR.oneofs_by_name['expr_kind'].fields.append( - _EXPR.fields_by_name['select_expr']) -_EXPR.fields_by_name['select_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] -_EXPR.oneofs_by_name['expr_kind'].fields.append( - _EXPR.fields_by_name['call_expr']) -_EXPR.fields_by_name['call_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] -_EXPR.oneofs_by_name['expr_kind'].fields.append( - _EXPR.fields_by_name['list_expr']) -_EXPR.fields_by_name['list_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] -_EXPR.oneofs_by_name['expr_kind'].fields.append( - _EXPR.fields_by_name['struct_expr']) -_EXPR.fields_by_name['struct_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] -_EXPR.oneofs_by_name['expr_kind'].fields.append( - _EXPR.fields_by_name['comprehension_expr']) -_EXPR.fields_by_name['comprehension_expr'].containing_oneof = _EXPR.oneofs_by_name['expr_kind'] -_CONSTANT.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE -_CONSTANT.fields_by_name['duration_value'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_CONSTANT.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['null_value']) -_CONSTANT.fields_by_name['null_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['bool_value']) -_CONSTANT.fields_by_name['bool_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['int64_value']) -_CONSTANT.fields_by_name['int64_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['uint64_value']) -_CONSTANT.fields_by_name['uint64_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['double_value']) -_CONSTANT.fields_by_name['double_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['string_value']) -_CONSTANT.fields_by_name['string_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['bytes_value']) -_CONSTANT.fields_by_name['bytes_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['duration_value']) -_CONSTANT.fields_by_name['duration_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_CONSTANT.oneofs_by_name['constant_kind'].fields.append( - _CONSTANT.fields_by_name['timestamp_value']) -_CONSTANT.fields_by_name['timestamp_value'].containing_oneof = _CONSTANT.oneofs_by_name['constant_kind'] -_SOURCEINFO_POSITIONSENTRY.containing_type = _SOURCEINFO -_SOURCEINFO.fields_by_name['positions'].message_type = _SOURCEINFO_POSITIONSENTRY -DESCRIPTOR.message_types_by_name['ParsedExpr'] = _PARSEDEXPR -DESCRIPTOR.message_types_by_name['Expr'] = _EXPR -DESCRIPTOR.message_types_by_name['Constant'] = _CONSTANT -DESCRIPTOR.message_types_by_name['SourceInfo'] = _SOURCEINFO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ParsedExpr = _reflection.GeneratedProtocolMessageType('ParsedExpr', (_message.Message,), dict( - DESCRIPTOR = _PARSEDEXPR, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.ParsedExpr) - )) -_sym_db.RegisterMessage(ParsedExpr) - -Expr = _reflection.GeneratedProtocolMessageType('Expr', (_message.Message,), dict( - - Ident = _reflection.GeneratedProtocolMessageType('Ident', (_message.Message,), dict( - DESCRIPTOR = _EXPR_IDENT, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.Ident) - )) - , - - Select = _reflection.GeneratedProtocolMessageType('Select', (_message.Message,), dict( - DESCRIPTOR = _EXPR_SELECT, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.Select) - )) - , - - Call = _reflection.GeneratedProtocolMessageType('Call', (_message.Message,), dict( - DESCRIPTOR = _EXPR_CALL, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.Call) - )) - , - - CreateList = _reflection.GeneratedProtocolMessageType('CreateList', (_message.Message,), dict( - DESCRIPTOR = _EXPR_CREATELIST, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.CreateList) - )) - , - - CreateStruct = _reflection.GeneratedProtocolMessageType('CreateStruct', (_message.Message,), dict( - - Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict( - DESCRIPTOR = _EXPR_CREATESTRUCT_ENTRY, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.CreateStruct.Entry) - )) - , - DESCRIPTOR = _EXPR_CREATESTRUCT, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.CreateStruct) - )) - , - - Comprehension = _reflection.GeneratedProtocolMessageType('Comprehension', (_message.Message,), dict( - DESCRIPTOR = _EXPR_COMPREHENSION, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr.Comprehension) - )) - , - DESCRIPTOR = _EXPR, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Expr) - )) -_sym_db.RegisterMessage(Expr) -_sym_db.RegisterMessage(Expr.Ident) -_sym_db.RegisterMessage(Expr.Select) -_sym_db.RegisterMessage(Expr.Call) -_sym_db.RegisterMessage(Expr.CreateList) -_sym_db.RegisterMessage(Expr.CreateStruct) -_sym_db.RegisterMessage(Expr.CreateStruct.Entry) -_sym_db.RegisterMessage(Expr.Comprehension) - -Constant = _reflection.GeneratedProtocolMessageType('Constant', (_message.Message,), dict( - DESCRIPTOR = _CONSTANT, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Constant) - )) -_sym_db.RegisterMessage(Constant) - -SourceInfo = _reflection.GeneratedProtocolMessageType('SourceInfo', (_message.Message,), dict( - - PositionsEntry = _reflection.GeneratedProtocolMessageType('PositionsEntry', (_message.Message,), dict( - DESCRIPTOR = _SOURCEINFO_POSITIONSENTRY, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.SourceInfo.PositionsEntry) - )) - , - DESCRIPTOR = _SOURCEINFO, - __module__ = 'google.api.expr.v1.syntax_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.SourceInfo) - )) -_sym_db.RegisterMessage(SourceInfo) -_sym_db.RegisterMessage(SourceInfo.PositionsEntry) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026com.google.api.expr.v1B\013SyntaxProtoP\001\370\001\001')) -_SOURCEINFO_POSITIONSENTRY.has_options = True -_SOURCEINFO_POSITIONSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/expr/v1/value_pb2.py b/packages/gapic-generator/api_factory/schema/pb/expr/v1/value_pb2.py deleted file mode 100644 index d097dc007296..000000000000 --- a/packages/gapic-generator/api_factory/schema/pb/expr/v1/value_pb2.py +++ /dev/null @@ -1,326 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/api/expr/v1/value.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/api/expr/v1/value.proto', - package='google.api.expr.v1', - syntax='proto3', - serialized_pb=_b('\n\x1egoogle/api/expr/v1/value.proto\x12\x12google.api.expr.v1\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd0\x03\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x15\n\x0bint64_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0cuint64_value\x18\x04 \x01(\x04H\x00\x12\x16\n\x0c\x64ouble_value\x18\x05 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x12\x33\n\x0e\x64uration_value\x18\x08 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x35\n\x0ftimestamp_value\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12-\n\rmessage_value\x18\n \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x31\n\tmap_value\x18\x0b \x01(\x0b\x32\x1c.google.api.expr.v1.MapValueH\x00\x12\x33\n\nlist_value\x18\x0c \x01(\x0b\x32\x1d.google.api.expr.v1.ListValueH\x00\x42\x06\n\x04kind\"6\n\tListValue\x12)\n\x06values\x18\x01 \x03(\x0b\x32\x19.google.api.expr.v1.Value\"\x9a\x01\n\x08MapValue\x12\x33\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\".google.api.expr.v1.MapValue.Entry\x1aY\n\x05\x45ntry\x12&\n\x03key\x18\x01 \x01(\x0b\x32\x19.google.api.expr.v1.Value\x12(\n\x05value\x18\x02 \x01(\x0b\x32\x19.google.api.expr.v1.ValueB)\n\x16\x63om.google.api.expr.v1B\nValueProtoP\x01\xf8\x01\x01\x62\x06proto3') - , - dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - - - -_VALUE = _descriptor.Descriptor( - name='Value', - full_name='google.api.expr.v1.Value', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='null_value', full_name='google.api.expr.v1.Value.null_value', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bool_value', full_name='google.api.expr.v1.Value.bool_value', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='int64_value', full_name='google.api.expr.v1.Value.int64_value', index=2, - number=3, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='uint64_value', full_name='google.api.expr.v1.Value.uint64_value', index=3, - number=4, type=4, cpp_type=4, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='double_value', full_name='google.api.expr.v1.Value.double_value', index=4, - number=5, type=1, cpp_type=5, label=1, - has_default_value=False, default_value=float(0), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='string_value', full_name='google.api.expr.v1.Value.string_value', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='bytes_value', full_name='google.api.expr.v1.Value.bytes_value', index=6, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='duration_value', full_name='google.api.expr.v1.Value.duration_value', index=7, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='timestamp_value', full_name='google.api.expr.v1.Value.timestamp_value', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='message_value', full_name='google.api.expr.v1.Value.message_value', index=9, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='map_value', full_name='google.api.expr.v1.Value.map_value', index=10, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='list_value', full_name='google.api.expr.v1.Value.list_value', index=11, - number=12, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='kind', full_name='google.api.expr.v1.Value.kind', - index=0, containing_type=None, fields=[]), - ], - serialized_start=177, - serialized_end=641, -) - - -_LISTVALUE = _descriptor.Descriptor( - name='ListValue', - full_name='google.api.expr.v1.ListValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='values', full_name='google.api.expr.v1.ListValue.values', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=643, - serialized_end=697, -) - - -_MAPVALUE_ENTRY = _descriptor.Descriptor( - name='Entry', - full_name='google.api.expr.v1.MapValue.Entry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.api.expr.v1.MapValue.Entry.key', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.api.expr.v1.MapValue.Entry.value', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=765, - serialized_end=854, -) - -_MAPVALUE = _descriptor.Descriptor( - name='MapValue', - full_name='google.api.expr.v1.MapValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entries', full_name='google.api.expr.v1.MapValue.entries', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_MAPVALUE_ENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=700, - serialized_end=854, -) - -_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE -_VALUE.fields_by_name['duration_value'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VALUE.fields_by_name['message_value'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_VALUE.fields_by_name['map_value'].message_type = _MAPVALUE -_VALUE.fields_by_name['list_value'].message_type = _LISTVALUE -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['null_value']) -_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['bool_value']) -_VALUE.fields_by_name['bool_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['int64_value']) -_VALUE.fields_by_name['int64_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['uint64_value']) -_VALUE.fields_by_name['uint64_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['double_value']) -_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['string_value']) -_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['bytes_value']) -_VALUE.fields_by_name['bytes_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['duration_value']) -_VALUE.fields_by_name['duration_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['timestamp_value']) -_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['message_value']) -_VALUE.fields_by_name['message_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['map_value']) -_VALUE.fields_by_name['map_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_VALUE.oneofs_by_name['kind'].fields.append( - _VALUE.fields_by_name['list_value']) -_VALUE.fields_by_name['list_value'].containing_oneof = _VALUE.oneofs_by_name['kind'] -_LISTVALUE.fields_by_name['values'].message_type = _VALUE -_MAPVALUE_ENTRY.fields_by_name['key'].message_type = _VALUE -_MAPVALUE_ENTRY.fields_by_name['value'].message_type = _VALUE -_MAPVALUE_ENTRY.containing_type = _MAPVALUE -_MAPVALUE.fields_by_name['entries'].message_type = _MAPVALUE_ENTRY -DESCRIPTOR.message_types_by_name['Value'] = _VALUE -DESCRIPTOR.message_types_by_name['ListValue'] = _LISTVALUE -DESCRIPTOR.message_types_by_name['MapValue'] = _MAPVALUE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict( - DESCRIPTOR = _VALUE, - __module__ = 'google.api.expr.v1.value_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.Value) - )) -_sym_db.RegisterMessage(Value) - -ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), dict( - DESCRIPTOR = _LISTVALUE, - __module__ = 'google.api.expr.v1.value_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.ListValue) - )) -_sym_db.RegisterMessage(ListValue) - -MapValue = _reflection.GeneratedProtocolMessageType('MapValue', (_message.Message,), dict( - - Entry = _reflection.GeneratedProtocolMessageType('Entry', (_message.Message,), dict( - DESCRIPTOR = _MAPVALUE_ENTRY, - __module__ = 'google.api.expr.v1.value_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.MapValue.Entry) - )) - , - DESCRIPTOR = _MAPVALUE, - __module__ = 'google.api.expr.v1.value_pb2' - # @@protoc_insertion_point(class_scope:google.api.expr.v1.MapValue) - )) -_sym_db.RegisterMessage(MapValue) -_sym_db.RegisterMessage(MapValue.Entry) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\026com.google.api.expr.v1B\nValueProtoP\001\370\001\001')) -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py b/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py deleted file mode 100644 index ee77e08f3921..000000000000 --- a/packages/gapic-generator/api_factory/schema/pb/headers_pb2.py +++ /dev/null @@ -1,89 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/api/experimental/headers.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/api/experimental/headers.proto', - package='google.api.experimental', - syntax='proto3', - serialized_pb=_b('\n%google/api/experimental/headers.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\",\n\x0b\x46ieldHeader\x12\r\n\x05\x66ield\x18\x01 \x01(\t\x12\x0e\n\x06header\x18\x02 \x01(\t:]\n\rfield_headers\x12\x1e.google.protobuf.MethodOptions\x18\xf0\x8c\x03 \x03(\x0b\x32$.google.api.experimental.FieldHeaderb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - - -FIELD_HEADERS_FIELD_NUMBER = 50800 -field_headers = _descriptor.FieldDescriptor( - name='field_headers', full_name='google.api.experimental.field_headers', index=0, - number=50800, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) - - -_FIELDHEADER = _descriptor.Descriptor( - name='FieldHeader', - full_name='google.api.experimental.FieldHeader', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field', full_name='google.api.experimental.FieldHeader.field', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='header', full_name='google.api.experimental.FieldHeader.header', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=100, - serialized_end=144, -) - -DESCRIPTOR.message_types_by_name['FieldHeader'] = _FIELDHEADER -DESCRIPTOR.extensions_by_name['field_headers'] = field_headers -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -FieldHeader = _reflection.GeneratedProtocolMessageType('FieldHeader', (_message.Message,), dict( - DESCRIPTOR = _FIELDHEADER, - __module__ = 'google.api.experimental.headers_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.FieldHeader) - )) -_sym_db.RegisterMessage(FieldHeader) - -field_headers.message_type = _FIELDHEADER -google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(field_headers) - -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py b/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py deleted file mode 100644 index cbaf9cb1077e..000000000000 --- a/packages/gapic-generator/api_factory/schema/pb/lro_pb2.py +++ /dev/null @@ -1,89 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/api/experimental/lro.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/api/experimental/lro.proto', - package='google.api.experimental', - syntax='proto3', - serialized_pb=_b('\n!google/api/experimental/lro.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\"A\n\x0bMethodTypes\x12\x17\n\x0flro_return_type\x18\x01 \x01(\t\x12\x19\n\x11lro_metadata_type\x18\x02 \x01(\t:U\n\x05types\x12\x1e.google.protobuf.MethodOptions\x18\x80\x90\x03 \x01(\x0b\x32$.google.api.experimental.MethodTypesb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - - -TYPES_FIELD_NUMBER = 51200 -types = _descriptor.FieldDescriptor( - name='types', full_name='google.api.experimental.types', index=0, - number=51200, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) - - -_METHODTYPES = _descriptor.Descriptor( - name='MethodTypes', - full_name='google.api.experimental.MethodTypes', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='lro_return_type', full_name='google.api.experimental.MethodTypes.lro_return_type', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='lro_metadata_type', full_name='google.api.experimental.MethodTypes.lro_metadata_type', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=96, - serialized_end=161, -) - -DESCRIPTOR.message_types_by_name['MethodTypes'] = _METHODTYPES -DESCRIPTOR.extensions_by_name['types'] = types -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -MethodTypes = _reflection.GeneratedProtocolMessageType('MethodTypes', (_message.Message,), dict( - DESCRIPTOR = _METHODTYPES, - __module__ = 'google.api.experimental.lro_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.MethodTypes) - )) -_sym_db.RegisterMessage(MethodTypes) - -types.message_type = _METHODTYPES -google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(types) - -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/overload_pb2.py b/packages/gapic-generator/api_factory/schema/pb/overload_pb2.py deleted file mode 100644 index 1adb2b3b6cf5..000000000000 --- a/packages/gapic-generator/api_factory/schema/pb/overload_pb2.py +++ /dev/null @@ -1,178 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/api/experimental/overload.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from api_factory.schema.pb.expr.v1 import syntax_pb2 as google_dot_api_dot_expr_dot_v1_dot_syntax__pb2 -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/api/experimental/overload.proto', - package='google.api.experimental', - syntax='proto3', - serialized_pb=_b('\n&google/api/experimental/overload.proto\x12\x17google.api.experimental\x1a\x1fgoogle/api/expr/v1/syntax.proto\x1a google/protobuf/descriptor.proto\"\x89\x02\n\x08Overload\x12\x0c\n\x04name\x18\x01 \x01(\t\x12K\n\x10\x63omponent_fields\x18\n \x01(\x0b\x32\x31.google.api.experimental.Overload.ComponentFields\x1a\xa1\x01\n\x0f\x43omponentFields\x12G\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x37.google.api.experimental.Overload.ComponentFields.Field\x1a\x45\n\x05\x46ield\x12\x0c\n\x04path\x18\x01 \x01(\t\x12.\n\x0c\x64\x65\x66\x61ult_expr\x18\x02 \x01(\x0b\x32\x18.google.api.expr.v1.Expr:V\n\toverloads\x12\x1e.google.protobuf.MethodOptions\x18\xac\x92\x03 \x03(\x0b\x32!.google.api.experimental.Overloadb\x06proto3') - , - dependencies=[google_dot_api_dot_expr_dot_v1_dot_syntax__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - - -OVERLOADS_FIELD_NUMBER = 51500 -overloads = _descriptor.FieldDescriptor( - name='overloads', full_name='google.api.experimental.overloads', index=0, - number=51500, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) - - -_OVERLOAD_COMPONENTFIELDS_FIELD = _descriptor.Descriptor( - name='Field', - full_name='google.api.experimental.Overload.ComponentFields.Field', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='path', full_name='google.api.experimental.Overload.ComponentFields.Field.path', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='default_expr', full_name='google.api.experimental.Overload.ComponentFields.Field.default_expr', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=331, - serialized_end=400, -) - -_OVERLOAD_COMPONENTFIELDS = _descriptor.Descriptor( - name='ComponentFields', - full_name='google.api.experimental.Overload.ComponentFields', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='fields', full_name='google.api.experimental.Overload.ComponentFields.fields', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_OVERLOAD_COMPONENTFIELDS_FIELD, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=239, - serialized_end=400, -) - -_OVERLOAD = _descriptor.Descriptor( - name='Overload', - full_name='google.api.experimental.Overload', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.api.experimental.Overload.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='component_fields', full_name='google.api.experimental.Overload.component_fields', index=1, - number=10, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_OVERLOAD_COMPONENTFIELDS, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=135, - serialized_end=400, -) - -_OVERLOAD_COMPONENTFIELDS_FIELD.fields_by_name['default_expr'].message_type = google_dot_api_dot_expr_dot_v1_dot_syntax__pb2._EXPR -_OVERLOAD_COMPONENTFIELDS_FIELD.containing_type = _OVERLOAD_COMPONENTFIELDS -_OVERLOAD_COMPONENTFIELDS.fields_by_name['fields'].message_type = _OVERLOAD_COMPONENTFIELDS_FIELD -_OVERLOAD_COMPONENTFIELDS.containing_type = _OVERLOAD -_OVERLOAD.fields_by_name['component_fields'].message_type = _OVERLOAD_COMPONENTFIELDS -DESCRIPTOR.message_types_by_name['Overload'] = _OVERLOAD -DESCRIPTOR.extensions_by_name['overloads'] = overloads -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Overload = _reflection.GeneratedProtocolMessageType('Overload', (_message.Message,), dict( - - ComponentFields = _reflection.GeneratedProtocolMessageType('ComponentFields', (_message.Message,), dict( - - Field = _reflection.GeneratedProtocolMessageType('Field', (_message.Message,), dict( - DESCRIPTOR = _OVERLOAD_COMPONENTFIELDS_FIELD, - __module__ = 'google.api.experimental.overload_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.Overload.ComponentFields.Field) - )) - , - DESCRIPTOR = _OVERLOAD_COMPONENTFIELDS, - __module__ = 'google.api.experimental.overload_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.Overload.ComponentFields) - )) - , - DESCRIPTOR = _OVERLOAD, - __module__ = 'google.api.experimental.overload_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.Overload) - )) -_sym_db.RegisterMessage(Overload) -_sym_db.RegisterMessage(Overload.ComponentFields) -_sym_db.RegisterMessage(Overload.ComponentFields.Field) - -overloads.message_type = _OVERLOAD -google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(overloads) - -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/resources_pb2.py b/packages/gapic-generator/api_factory/schema/pb/resources_pb2.py deleted file mode 100644 index 3b22b69f8cae..000000000000 --- a/packages/gapic-generator/api_factory/schema/pb/resources_pb2.py +++ /dev/null @@ -1,99 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/api/experimental/resources.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/api/experimental/resources.proto', - package='google.api.experimental', - syntax='proto3', - serialized_pb=_b('\n\'google/api/experimental/resources.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\">\n\x11ResourceReference\x12\x11\n\ttype_name\x18\x01 \x01(\t\x12\x16\n\x0eresource_paths\x18\x02 \x03(\t:9\n\x0eresource_paths\x12\x1f.google.protobuf.MessageOptions\x18\xd8\x94\x03 \x03(\t:a\n\x0cresource_ref\x12\x1d.google.protobuf.FieldOptions\x18\xd9\x94\x03 \x01(\x0b\x32*.google.api.experimental.ResourceReferenceb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - - -RESOURCE_PATHS_FIELD_NUMBER = 51800 -resource_paths = _descriptor.FieldDescriptor( - name='resource_paths', full_name='google.api.experimental.resource_paths', index=0, - number=51800, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) -RESOURCE_REF_FIELD_NUMBER = 51801 -resource_ref = _descriptor.FieldDescriptor( - name='resource_ref', full_name='google.api.experimental.resource_ref', index=1, - number=51801, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) - - -_RESOURCEREFERENCE = _descriptor.Descriptor( - name='ResourceReference', - full_name='google.api.experimental.ResourceReference', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='type_name', full_name='google.api.experimental.ResourceReference.type_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='resource_paths', full_name='google.api.experimental.ResourceReference.resource_paths', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=102, - serialized_end=164, -) - -DESCRIPTOR.message_types_by_name['ResourceReference'] = _RESOURCEREFERENCE -DESCRIPTOR.extensions_by_name['resource_paths'] = resource_paths -DESCRIPTOR.extensions_by_name['resource_ref'] = resource_ref -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ResourceReference = _reflection.GeneratedProtocolMessageType('ResourceReference', (_message.Message,), dict( - DESCRIPTOR = _RESOURCEREFERENCE, - __module__ = 'google.api.experimental.resources_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.ResourceReference) - )) -_sym_db.RegisterMessage(ResourceReference) - -google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(resource_paths) -resource_ref.message_type = _RESOURCEREFERENCE -google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(resource_ref) - -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/pb/stability_pb2.py b/packages/gapic-generator/api_factory/schema/pb/stability_pb2.py deleted file mode 100644 index c4b5089483ba..000000000000 --- a/packages/gapic-generator/api_factory/schema/pb/stability_pb2.py +++ /dev/null @@ -1,120 +0,0 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/api/experimental/stability.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/api/experimental/stability.proto', - package='google.api.experimental', - syntax='proto3', - serialized_pb=_b('\n\'google/api/experimental/stability.proto\x12\x17google.api.experimental\x1a google/protobuf/descriptor.proto\"5\n\tStability\x12\x0e\n\x04\x62\x65ta\x18\x01 \x01(\tH\x00\x12\x0f\n\x05\x61lpha\x18\x02 \x01(\tH\x00\x42\x07\n\x05level:`\n\x11service_stability\x12\x1f.google.protobuf.ServiceOptions\x18\xbc\x95\x03 \x01(\x0b\x32\".google.api.experimental.Stability:^\n\x10method_stability\x12\x1e.google.protobuf.MethodOptions\x18\xbd\x95\x03 \x01(\x0b\x32\".google.api.experimental.Stability:\\\n\x0f\x66ield_stability\x12\x1d.google.protobuf.FieldOptions\x18\xbe\x95\x03 \x01(\x0b\x32\".google.api.experimental.Stabilityb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - - -SERVICE_STABILITY_FIELD_NUMBER = 51900 -service_stability = _descriptor.FieldDescriptor( - name='service_stability', full_name='google.api.experimental.service_stability', index=0, - number=51900, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) -METHOD_STABILITY_FIELD_NUMBER = 51901 -method_stability = _descriptor.FieldDescriptor( - name='method_stability', full_name='google.api.experimental.method_stability', index=1, - number=51901, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) -FIELD_STABILITY_FIELD_NUMBER = 51902 -field_stability = _descriptor.FieldDescriptor( - name='field_stability', full_name='google.api.experimental.field_stability', index=2, - number=51902, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - options=None, file=DESCRIPTOR) - - -_STABILITY = _descriptor.Descriptor( - name='Stability', - full_name='google.api.experimental.Stability', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='beta', full_name='google.api.experimental.Stability.beta', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='alpha', full_name='google.api.experimental.Stability.alpha', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='level', full_name='google.api.experimental.Stability.level', - index=0, containing_type=None, fields=[]), - ], - serialized_start=102, - serialized_end=155, -) - -_STABILITY.oneofs_by_name['level'].fields.append( - _STABILITY.fields_by_name['beta']) -_STABILITY.fields_by_name['beta'].containing_oneof = _STABILITY.oneofs_by_name['level'] -_STABILITY.oneofs_by_name['level'].fields.append( - _STABILITY.fields_by_name['alpha']) -_STABILITY.fields_by_name['alpha'].containing_oneof = _STABILITY.oneofs_by_name['level'] -DESCRIPTOR.message_types_by_name['Stability'] = _STABILITY -DESCRIPTOR.extensions_by_name['service_stability'] = service_stability -DESCRIPTOR.extensions_by_name['method_stability'] = method_stability -DESCRIPTOR.extensions_by_name['field_stability'] = field_stability -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Stability = _reflection.GeneratedProtocolMessageType('Stability', (_message.Message,), dict( - DESCRIPTOR = _STABILITY, - __module__ = 'google.api.experimental.stability_pb2' - # @@protoc_insertion_point(class_scope:google.api.experimental.Stability) - )) -_sym_db.RegisterMessage(Stability) - -service_stability.message_type = _STABILITY -google_dot_protobuf_dot_descriptor__pb2.ServiceOptions.RegisterExtension(service_stability) -method_stability.message_type = _STABILITY -google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(method_stability) -field_stability.message_type = _STABILITY -google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(field_stability) - -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/api_factory/schema/wrappers.py b/packages/gapic-generator/api_factory/schema/wrappers.py index a7a425b5ecf8..d9e095d10194 100644 --- a/packages/gapic-generator/api_factory/schema/wrappers.py +++ b/packages/gapic-generator/api_factory/schema/wrappers.py @@ -28,15 +28,15 @@ """ import dataclasses -from typing import Callable, List, Mapping, Sequence, Tuple +import re +from typing import List, Mapping, Sequence, Tuple +from google.api import annotations_pb2 +from google.api import signature_pb2 from google.protobuf import descriptor_pb2 from api_factory import utils from api_factory.schema.metadata import Metadata -from api_factory.schema.pb import client_pb2 -from api_factory.schema.pb import headers_pb2 -from api_factory.schema.pb import overload_pb2 @dataclasses.dataclass(frozen=True) @@ -105,14 +105,17 @@ def __getattr__(self, name): return getattr(self.method_pb, name) @property - def overloads(self): - """Return the overloads defined for this method.""" - return self.method_pb.options.Extensions[overload_pb2.overloads] + def field_headers(self) -> Sequence[str]: + """Return the field headers defined for this method.""" + http = self.options.Extensions[annotations_pb2.http] + if http.get: + return tuple(re.findall(r'\{([a-z][\w\d_.]+)=', http.get)) + return () @property - def field_headers(self): - """Return the field headers defined for this method.""" - return self.method_pb.options.Extensions[headers_pb2.field_headers] + def signature(self) -> signature_pb2.MethodSignature: + """Return the signature defined for this method.""" + return self.options.Extensions[annotations_pb2.method_signature] @dataclasses.dataclass(frozen=True) @@ -132,9 +135,9 @@ def host(self) -> str: Returns: str: The hostname, with no protocol and no trailing ``/``. """ - if self.service_pb.options.Extensions[client_pb2.host]: - return self.service_pb.options.Extensions[client_pb2.host] - return utils.Placeholder('<<< HOSTNAME >>>') + if self.options.Extensions[annotations_pb2.default_host]: + return self.options.Extensions[annotations_pb2.default_host] + return utils.Placeholder('<<< SERVICE ADDRESS >>>') @property def oauth_scopes(self) -> Sequence[str]: @@ -143,9 +146,8 @@ def oauth_scopes(self) -> Sequence[str]: Returns: Sequence[str]: A sequence of OAuth scopes. """ - if self.service_pb.options.Extensions[client_pb2.oauth_scopes]: - return self.service_pb.options.Extensions[client_pb2.oauth_scopes] - return () + oauth = self.options.Extensions[annotations_pb2.oauth] + return tuple(oauth.scopes) @property def module_name(self) -> str: @@ -170,6 +172,8 @@ def pb2_modules(self) -> Sequence[Tuple[str, str]]: """ answer = set() for method in self.methods.values(): + # Add the module containing both the request and response + # messages. (These are usually the same, but not necessarily.) answer.add(( '.'.join(method.input.meta.address.package), method.input.pb2_module, @@ -178,6 +182,9 @@ def pb2_modules(self) -> Sequence[Tuple[str, str]]: '.'.join(method.output.meta.address.package), method.output.pb2_module, )) + + # If this method has LRO, it is possible (albeit unlikely) that + # the LRO messages reside in a different module. if method.lro_payload: answer.add(( '.'.join(method.lro_payload.meta.address.package), @@ -188,27 +195,14 @@ def pb2_modules(self) -> Sequence[Tuple[str, str]]: '.'.join(method.lro_metadata.meta.address.package), method.lro_metadata.pb2_module, )) - return sorted(answer) + return tuple(sorted(answer)) @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" - return self._any_method(lambda m: getattr(m, 'lro_payload')) + return any([m.lro_payload for m in self.methods.values()]) @property def has_field_headers(self) -> bool: """Return whether the service has a method containing field headers.""" - return self._any_method(lambda m: getattr(m, 'field_headers')) - - def _any_method(self, predicate: Callable) -> bool: - """Return whether the service has a method that fulfills ``predicate``. - - Args: - predicate (Callable[Method]): Function specifying the criteria - testing the methods in the service. - - Returns: - bool: True if any method of the service contains the specified - attribute. - """ - return any(predicate(method) for method in self.methods.values()) + return any([m.field_headers for m in self.methods.values()]) diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name/__init__.py.j2 similarity index 68% rename from packages/gapic-generator/api_factory/generator/templates/$namespace/$name/__init__.py.j2 rename to packages/gapic-generator/api_factory/templates/$namespace/$name/__init__.py.j2 index 23447abe9852..3a65a7f66745 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name/__init__.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name/__init__.py.j2 @@ -2,7 +2,7 @@ {% block content %} {% for service in api.services.values() -%} -from ..{{ api.versioned_module_name }}.{{ service.name|snake_case }} import {{ service.name }} +from ..{{ api.naming.versioned_module_name }}.{{ service.name|snake_case }} import {{ service.name }} {% endfor %} __all__ = ( {%- for service in api.services.values() %} diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/__init__.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/__init__.py.j2 rename to packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/client.py.j2 rename to packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 rename to packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/base.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/base.py.j2 rename to packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/base.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 similarity index 80% rename from packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 rename to packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 index 8a7052ed583f..faa2919b393f 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 @@ -1,7 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} -from typing import Sequence, Tuple +from typing import Callable, Sequence, Tuple from google.api_core import grpc_helpers {%- if service.has_lro %} @@ -80,24 +80,19 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): {%- endif %} {%- for method in service.methods.values() %} - def {{ method.name|snake_case }}(self, - request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, - metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: - """Call the {{ (method.name|snake_case).replace('_',' ')|wrap(width=70, + @property + def {{ method.name|snake_case }}(self) -> Callable[ + [{{ method.input.pb2_module }}.{{ method.input.name }}], + {{ method.output.pb2_module }}.{{ method.output.name }}]: + """Return a callable for the + {{- ' ' + (method.name|snake_case).replace('_',' ')|wrap(width=70, initial_width=25, subsequent_indent=" ") }} method over gRPC. {{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} - Args: - request (~.{{ method.input.pb2_module }}.{{ method.input.name }}: - The request object. {{ method.input.meta.doc|wrap(width=72, - initial_width=36, subsequent_indent=' ' * 16) }} - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent alont with the request as metadata. - Returns: - ~.{{ method.output.pb2_module }}.{{ method.output.name }}: + Callable[[~.{{ method.input.name }}], + ~.{{ method.output.name }}]: {{ method.output.meta.doc|wrap(width=72, initial_width=56, subsequent_indent=' ' * 16) }} """ @@ -111,9 +106,6 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): request_serializer={{ method.input.pb2_module }}.{{ method.input.name }}.SerializeToString, response_deserializer={{ method.output.pb2_module }}.{{ method.output.name }}.FromString, ) - stub = self._stubs['{{ method.name|snake_case }}'] - - # Return the response. - return stub(request, metadata=metadata) + return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} {%- endblock -%} diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 similarity index 92% rename from packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 rename to packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 index bf9e18896ae5..cfb354ac5379 100644 --- a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/$service/transports/http.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 @@ -72,7 +72,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): initial_width=25, subsequent_indent=" ") }} method over HTTP. Args: - request (~.{{ method.input.pb2_module }}.{{ method.input.name }}: + request (~.{{ method.input.pb2_module }}.{{ method.input.name }}): The request object. {{ method.input.meta.doc|wrap(width=72, initial_width=36, subsequent_indent=' ' * 16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -88,7 +88,12 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): # Send the request. response = self._session.post( - f'https://{self.SERVICE_ADDRESS}/$rpc/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', + 'https://{host}/$rpc/{package}.{service}/{method}'.format( + host=self.SERVICE_ADDRESS, + method='{{ method.name }}', + package='{{ '.'.join(method.meta.address.package) }}', + service='{{ service.name }}', + ), data=data, headers={ 'content-type': 'application/x-protobuf', diff --git a/packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/__init__.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/$namespace/$name_$version/__init__.py.j2 rename to packages/gapic-generator/api_factory/templates/$namespace/$name_$version/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/_base.py.j2 b/packages/gapic-generator/api_factory/templates/_base.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/generator/templates/_base.py.j2 rename to packages/gapic-generator/api_factory/templates/_base.py.j2 diff --git a/packages/gapic-generator/api_factory/generator/templates/setup.py.j2 b/packages/gapic-generator/api_factory/templates/setup.py.j2 similarity index 54% rename from packages/gapic-generator/api_factory/generator/templates/setup.py.j2 rename to packages/gapic-generator/api_factory/templates/setup.py.j2 index 7168b2499b20..8750e75338f6 100644 --- a/packages/gapic-generator/api_factory/generator/templates/setup.py.j2 +++ b/packages/gapic-generator/api_factory/templates/setup.py.j2 @@ -7,30 +7,19 @@ import os from setuptools import setup -PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -with io.open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: - README = file_obj.read() - - setup( - name='{{ api.warehouse_package_name }}', - author='{{ api.client.copyright.fullname }}', + name='{{ api.naming.warehouse_package_name }}', version='0.0.1', - description='{{ api.client.documentation.tagline|wrap(width=70, initial_width=50, subsequent_indent=" '", antecedent_trailer=" '") }}', - long_description=README, - license={% if api.client.license %}'{{ api.client.license.replace('-', ' ') }}'{% else %}None{% endif %}, platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( 'google-api-core >= 0.1.4, < 0.2.0dev', + 'googleapis-common-protos >= 1.6.0b4', 'grpcio >= 1.10.0', ), classifiers=( 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', - {%- if api.client.license %} - 'License :: OSI Approved :: {{ api.client.license }}', - {%- endif %} 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', diff --git a/packages/gapic-generator/api_factory/utils/cache.py b/packages/gapic-generator/api_factory/utils/cache.py index 3db4ab6af0da..1537ea83b2d8 100644 --- a/packages/gapic-generator/api_factory/utils/cache.py +++ b/packages/gapic-generator/api_factory/utils/cache.py @@ -31,7 +31,7 @@ def cached_property(fx): def inner(self): # Sanity check: If there is no cache at all, create an empty cache. if not hasattr(self, '_cached_values'): - setattr(self, '_cached_values', {}) + object.__setattr__(self, '_cached_values', {}) # If and only if the function's result is not in the cache, # run the function. diff --git a/packages/gapic-generator/docs/api-configuration.rst b/packages/gapic-generator/docs/api-configuration.rst index 92c16efed352..44f9e7dec62f 100644 --- a/packages/gapic-generator/docs/api-configuration.rst +++ b/packages/gapic-generator/docs/api-configuration.rst @@ -15,8 +15,11 @@ published protocol buffers. This plugin *will* successfully publish a library on a valid protobuf API even without any additional information set, but may require some post-processing work by a human in this case before the resulting client -library will install or work. (Look for values enclosed by ``<<<`` and -``>>>`` to quickly spot these.) +library will work. + +Look for values enclosed by ``<<<`` and ``>>>`` to quickly spot these. +As of this writing, these are the ``SERVICE_ADDRESS`` and ``OAUTH_SCOPES`` +constants defined in ``base.py`` files. Reading further assumes you are at least nominally familiar with protocol buffers and their syntax. You may not be familiar with `options`_ yet; it is @@ -37,55 +40,67 @@ When specifying an annotation, your proto will need to import the file where the annotation is defined. If you try to use an annotation without importing the dependency proto, then ``protoc`` will give you an error. -All of the protos discussed here are in the `googleapis`_ repository, -on the ``annotated`` branch, and they are consistently in the -``google.api.experimental`` package. While this remains experimental, -the best course is probably to clone the repository: +These protos live on the ``input-contract`` branch in the +`api-common-protos`_ repository. + +Your best bet is to likely clone this repository: .. code-block:: shell - $ git clone git@github.com:googleapis/googleapis.git - $ cd googleapis - $ git checkout --track -b annotated origin/annotated + $ git clone git@github.com:googleapis/api-common-protos.git + $ cd api-common-protos + $ git checkout --track -b input-contract origin/input-contract Once this is done, you will need to specify the root of this repository on disk as a ``--proto_path`` whenever invoking ``protoc``. -.. _googleapis: https://github.com/googleapis/googleapis/ +.. _api-common-protos: https://github.com/googleapis/api-common-protos/tree/input-contract + API Client Information ~~~~~~~~~~~~~~~~~~~~~~ The most important piece of information this plugin requires is information -about the client library itself: what should it be called, how is it licensed, -and so on. +about the client library itself: what should it be called, what is its proper +namespace, and so on. -This is rolled up into a strcuture spelled ``Client``, and the annotation -is defined in `google/api/experimental/client.proto`_. +This is rolled up into a structure called ``Metadata``, and the annotation +is defined in `google/api/metadata.proto`_. The option may be defined as a full structure at the top level of the proto file. It is recommended that this be declared other under ``option`` directives, and above services or messages. -You really need ``name`` (otherwise the plugin will use a placeholder), but -everything else is fundamentally optional. Here is a complete annotation: +This annotation is optional, and you may not need it. The generator will +infer a proper name, namespace and version from the ``package`` statement: .. code-block:: protobuf - option (google.api.experimental.client) = { - name: "News" - namespace: ["Daily Planet"] - version: "v1" - documentation: { - overview: "The News API allows you to retrieve and search articles posted to the Daily Planet, the most trusted newspaper in Metropolis." - summary: "All the news fit to print." - documentation_root_url: "bogus://dailyplanet.com/news-api/docs/" - } - copyright: { fullname: "Perry White" year: "2018" } - license: "Apache-2.0" + // This will come out to be: + // package namespace: ['Acme', 'Manufacturing'] + // package name: 'Anvils' + // version: 'v1' + package acme.manufacturing.anvils.v1; + +If the inferred package name is wrong for some reason, then the annotation +is important. + +.. code-block:: protobuf + + package acme.anvils.v1; + + // The namespace provided here will take precedence over the + // inferred one. + option (google.api.metadata) = { + "package_namespace": ["Acme", "Manufacturing"] }; +.. note:: + + The ``google.api.metadata`` annotation can be used to specify a + namespace or name, but the version *must* be specified in the proto package. + Service Information ~~~~~~~~~~~~~~~~~~~ @@ -95,7 +110,7 @@ where the API service is running, as well as what (if anything) else is required in order to properly connect. This plugin understands two options for this, which are also defined in -`google/api/experimental/client.proto`_. Rather than being options on +`google/api/metadata.proto`_. Rather than being options on top level files, however, these are both options on `services`_. If an API defines more than one service, these options do *not* need to match between them. @@ -104,8 +119,8 @@ The first option is the **host** where the service can be reached: .. code-block:: protobuf - service News { - option (google.api.experimental.host) = "newsapi.dailyplanet.com" + service AnvilService { + option (google.api.default_host) = "anvils.acme.com" } The second option is any oauth scopes which are needed. Google's auth @@ -114,22 +129,59 @@ this plugin uses) expect that credentials declare what scopes they believe they need, and the auth libraries do the right thing in the situation where authorization is needed, access has been revoked, and so on. -These are a list, which is accomplished by specifying the option more than -once: - .. code-block:: protobuf - service News { - option (google.api.experimental.oauth_scopes) = "https://newsapi.dailyplanet.com/auth/list-articles" - option (google.api.experimental.oauth_scopes) = "https://newsapi.dailyplanet.com/auth/read-article" + service AnvilService { + option (google.api.oauth) = { + scopes: ["https://anvils.acme.com/auth/browse-anvils", + "https://anvils.acme.com/auth/drop-anvils"] + }; } .. _services: https://developers.google.com/protocol-buffers/docs/proto3#services -.. _google/api/experimental/client.proto: https://github.com/googleapis/googleapis/blob/annotated/google/api/experimental/client.proto#L35 +.. _google/api/metadata.proto: https://github.com/googleapis/api-common-protos/blob/input-contract/google/api/metadata.proto .. _google-auth: https://github.com/GoogleCloudPlatform/google-auth-library-python + +Long-Running Operations +~~~~~~~~~~~~~~~~~~~~~~~ + +Occasionally, API requests may take a long time. In this case, APIs may +run a task in the background and provide the client with a token to +retrieve the result later. + +The ``google.longrunning.Operation`` message is intended for this purpose. +It is defined in `google/longrunning/operations.proto`_ and can be used +as the return type of an RPC. + +However, when doing this, the ultimate return type is lost. Therefore, +it is important to annotate the return type (and metadata type, if applicable) +so that client libraries are able to deserialize the message. + +.. code-block:: protobuf + + import "google/longrunning/operations.proto"; + + package acme.anvils.v1; + + service AnvilService { + rpc DeliverAnvil(DeliverAnvilRequest) + returns (google.longrunning.Operation) { + option (google.longrunning.operation_types) = { + response: "acme.anvils.v1.DeliverAnvilResponse" + metadata: "acme.anvils.v1.DeliverAnvilMetadata" + }; + } + } + +The ``response`` field here is mandatory; the ``metadata`` field is optional, +and ``google.longrunning.OperationMetadata`` is assumed if it is not set. + +.. _google/longrunning/operations.proto: https://github.com/googleapis/api-common-protos/blob/input-contract/google/longrunning/operations.proto + + Future Work ~~~~~~~~~~~ -Support for other annotated behavior, such as overloads, long-running -operations, samples, and header values is a work in progress. +Support for other annotated behavior, such as overloads, samples, and +header values is a work in progress. diff --git a/packages/gapic-generator/docs/conf.py b/packages/gapic-generator/docs/conf.py index 7f0bf1656ee9..dffe343df0f3 100644 --- a/packages/gapic-generator/docs/conf.py +++ b/packages/gapic-generator/docs/conf.py @@ -24,9 +24,9 @@ author = 'Luke Sneeringer' # The short X.Y version -version = '0.0.2' +version = '0.0.4' # The full version, including alpha/beta/rc tags -release = '0.0.2' +release = '0.0.4' # -- General configuration --------------------------------------------------- diff --git a/packages/gapic-generator/docs/getting-started.rst b/packages/gapic-generator/docs/getting-started.rst index 3a3b0c2b0868..be788548ec70 100644 --- a/packages/gapic-generator/docs/getting-started.rst +++ b/packages/gapic-generator/docs/getting-started.rst @@ -20,16 +20,26 @@ a special branch: $ git clone git@github.com:googleapis/googleapis.git $ cd googleapis - $ git checkout --track -b annotated origin/annotated + $ git checkout --track -b input-contract origin/input-contract + $ cd .. The API available as an example (thus far) is the `Google Cloud Vision`_ API, available in the ``google/cloud/vision/v1/`` subdirectory. This will be used for the remainder of the examples on this page. -This branch also makes available the proto specification of the configuration -itself, which is explained in :ref:`api-configuration`. +You will also need the common protos, currently in experimental status, +which define certain client-specific annotations. These are in the +`api-common-protos`_ repository. Clone this from GitHub also: -.. _googleapis: https://github.com/googleapis/googleapis/ +.. code-block:: shell + + $ git clone git@github.com:googleapis/api-common-protos.git + $ cd api-common-protos + $ git checkout --track -b input-contract origin/input-contract + $ cd .. + +.. _googleapis: https://github.com/googleapis/googleapis/tree/input-contract +.. _api-common-protos: https://github.com/googleapis/api-common-protos/tree/input-contract .. _Google Cloud Vision: https://cloud.google.com/vision/ @@ -45,31 +55,20 @@ for each plugin invoked; you just want these to match: .. code-block:: shell + # This is assumed to be in the `googleapis` project root, and we also + # assume that api-common-protos is next to it. $ protoc google/cloud/vision/v1/*.proto \ - --python_out=/dest/ \ - --pyclient_out=/dest/ + --proto_path=../api-common-protos/ --proto_path=. \ + --python_out=/dest/ --pyclient_out=/dest/ .. note:: **A reminder about paths.** - Remember that ``protoc`` is particular about paths. It expects to be run - from the "base path" used for imports within the protos. If you are - running ``protoc`` from any other location, you will need to provide - ``--proto_path``. + Remember that ``protoc`` is particular about paths. It requires all paths + where it expects to find protos, and *order matters*. In this case, + the common protos must come first, and then the path to the API being built. -Because the generator is experimental, you need to compile the experimental -version of google-common-protos as well: - -.. code-block:: shell - - $ protoc google/api/*.proto \ - google/api/experimental/*.proto \ - google/api/expr/v1/*.proto \ - --python_out=/dest/ \ - -Create a blank file ``/dest/google/api/__init__.py`` to use ``google.api`` as a -package. Running a Client Library ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -81,7 +80,7 @@ Create a virtual environment for the library: .. code-block:: shell - $ virtualenv ~/.local/client-lib --python=`which python3.6` + $ virtualenv ~/.local/client-lib --python=`which python3.7` $ source ~/.local/client-lib/bin/activate Next, install the library: @@ -130,6 +129,4 @@ Here is a test script: print(response) - - .. _Google API design conventions: https://cloud.google.com/apis/design/ diff --git a/packages/gapic-generator/docs/installing.rst b/packages/gapic-generator/docs/installing.rst index 29190f7f48e4..09855186fde9 100644 --- a/packages/gapic-generator/docs/installing.rst +++ b/packages/gapic-generator/docs/installing.rst @@ -22,13 +22,15 @@ The `release page`_ on GitHub contains the download you need. It is likely preferable to install ``protoc`` somewhere on your shell's path, but this is not a strict requirement (as you will be invoking it directly). +``protoc`` is also quirky about how it handles well-known protos; you probably +also want to copy them into ``/usr/local/include`` To ensure it is installed propertly: .. code-block:: shell $ protoc --version - libprotoc 3.5.1 + libprotoc 3.6.0 API Generator for Python @@ -39,9 +41,10 @@ the usual ways. It fundamentally provides a CLI command, ``protoc-gen-pyclient``, so you will want to install using a mechanism that is conducive to making CLI commands available. -Additionally, this program currently only runs against Python 3.6, so you -will need that installed. (Most Linux distributions ship with earlier -versions.) Use `pyenv`_ to get Python 3.6 installed in a friendly way. +Additionally, this program currently only runs against Python 3.6 or +Python 3.7, so you will need that installed. (Most Linux distributions ship +with earlier versions.) Use `pyenv`_ to get Python 3.7 installed in a +friendly way. As for this library itself, the recommended installation approach is `pipsi`_. @@ -58,7 +61,7 @@ As for this library itself, the recommended installation approach is # make an appropriately-aliased executable. # The `--editable` flag is only necessary if you want to work on the # tool (as opposed to just use it). - pipsi install --editable --python=`which python3.6` . + pipsi install --editable --python=`which python3.7` . To ensure the tool is installed properly: diff --git a/packages/gapic-generator/docs/process.rst b/packages/gapic-generator/docs/process.rst index fe9ef70d70f7..cc6858224a66 100644 --- a/packages/gapic-generator/docs/process.rst +++ b/packages/gapic-generator/docs/process.rst @@ -29,9 +29,22 @@ rather dense. The key point to grasp is that each ``.proto`` *file* compiles into one of these proto messages (called *descriptors*), and this plugin's job is to parse those descriptors. +That said, you should not need to know the ins and outs of the ``protoc`` +contract model to be able to follow what this library is doing. + .. _plugin.proto: https://github.com/google/protobuf/blob/master/src/google/protobuf/compiler/plugin.proto .. _descriptor.proto: https://github.com/google/protobuf/blob/master/src/google/protobuf/descriptor.proto + +Entry Point +~~~~~~~~~~~ + +The entry point to this tool is ``api_factory/cli/generate.py``. The function +in this module is responsible for accepting CLI input, building the internal +API schema, and then rendering templates and using them to build a response +object. + + Parse ~~~~~ @@ -39,11 +52,17 @@ As mentioned, this plugin is divided into two steps. The first step is parsing. The guts of this is handled by the :class:`~.schema.api.API` object, which is this plugin's internal representation of the full API client. -In particular, this class has a :meth:`~.schema.api.API.load` method which -accepts a `FileDescriptor`_ (remember, this is ``protoc``'s internal -representation of each proto file). The method is called once for each proto -file you send to be compiled as well as each dependency. (``protoc`` itself -is smart enough to de-duplicate and send everything in the right order.) +In particular, this class has a :meth:`~.schema.api.API.build` method which +accepts a sequence of `FileDescriptor`_ objects (remember, this is ``protoc``'s +internal representation of each proto file). That method iterates over each +file and creates a :class:`~.schema.api.Proto` object for each one. + +.. note:: + + An :class:`~.schema.api.API` object will not only be given the descriptors + for the files you specify, but also all of their dependencies. + ``protoc`` is smart enough to de-duplicate and send everything in the + correct order. The :class:`~.schema.api.API` object's primary purpose is to make sure all the information from the proto files is in one place, and reasonably @@ -71,7 +90,10 @@ These wrapper classes follow a consistent structure: desctiptor unless the wrapper itself provides something, making the wrappers themselves transparent to templates. * They provide a ``meta`` attribute with metadata (package information and - documentation). + documentation). That means templates can consistently access the name + for the module where an object can be found, or an object's documentation, + in predictable and consistent places (``thing.meta.doc``, for example, + prints the comments for ``thing``). Translation ~~~~~~~~~~~ @@ -79,10 +101,10 @@ Translation The translation step follows a straightfoward process to write the contents of client library files. -First, it loads every template in the ``generator/templates/`` directory. -These are `Jinja`_ templates. There is no master list of templates; +First, it loads every template in the ``templates/`` directory. +These are `Jinja`_ templates. **There is no master list of templates**; it is assumed that every template in this directory should be rendered -(unless its name begins with an underscore). +(unless its name begins with a single underscore). The name of the output file is based on the name of the template, with the following string replacements applied: @@ -90,11 +112,17 @@ the following string replacements applied: * The ``.j2`` suffix is removed. * ``$namespace`` is replaced with the namespace specified in the client, converted to appropriate Python module case. If there is no namespace, - this segment is dropped. + this segment is dropped. If the namespace has more than one element, + this is expanded out in the directory structure. (For example, a namespace + of ``['Acme', 'Manufacturing']`` will translate into ``acme/manufacturing/`` + directories.) * ``$name`` is replaced with the client name. This is expected to be present. * ``$version`` is replaced with the client version (the version of the API). If there is no specified version, this is dropped. +* ``$name_$version`` is a special case: It is replaced with the client + name, followed by the version. However, if there is no version, both it + and the underscore are dropped. * ``$service`` is replaced with the service name, converted to appropriate Python module case. There may be more than one service in an API; read on for more about this. diff --git a/packages/gapic-generator/docs/reference/schema.rst b/packages/gapic-generator/docs/reference/schema.rst index 7080a0db00f0..33e633a1ee29 100644 --- a/packages/gapic-generator/docs/reference/schema.rst +++ b/packages/gapic-generator/docs/reference/schema.rst @@ -15,6 +15,13 @@ metadata .. automodule:: api_factory.schema.metadata :members: +naming +~~~~~~ + +.. automodule:: api_factory.schema.naming + :members: + + wrappers ~~~~~~~~ diff --git a/packages/gapic-generator/docs/status.rst b/packages/gapic-generator/docs/status.rst index 7f836e293525..3b79caf59820 100644 --- a/packages/gapic-generator/docs/status.rst +++ b/packages/gapic-generator/docs/status.rst @@ -17,8 +17,10 @@ As this is experimental work, please note the following limitations: - gRPC must be installed even if you are not using it (this is due to some minor issues in ``api-core``). - Only unary calls are implemented at this point. -- No support for GAPIC features (e.g. LRO, method argument flattening) yet. +- No support for method argument flattening yet. - No support for samples yet. +- Request headers based on ``google.api.http`` annotations are not implemented + yet. - No tests are implemented. -.. _an awkward location: https://github.com/googleapis/googleapis/blob/annotated/google/api/experimental/ +.. _an awkward location: https://github.com/googleapis/api-common-protos/blob/input-contract/google/api/ diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 00ccd960f648..acdba84aedb4 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -19,7 +19,8 @@ @nox.session -def unit(session, python_version='3.6'): +@nox.parametrize('python_version', ['3.6', '3.7']) +def unit(session, python_version='3.7'): """Run the unit test suite.""" session.interpreter = 'python{0}'.format(python_version) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 38f031050e48..81d17088ab84 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -24,13 +24,13 @@ README = file_obj.read() setup( - name='python-api-factory', - version='0.0.2', + name='gapic-generator', + version='0.0.4', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', - url='https://github.com/googleapis/python-api-factory.git', - packages=find_packages(exclude=['protos', 'tests']), + url='https://github.com/googleapis/gapic-generator-python.git', + packages=find_packages(exclude=['docs', 'tests']), description='Python client library generator for APIs defined by protocol' 'buffers', long_description=README, @@ -42,7 +42,7 @@ include_package_data=True, install_requires=( 'click >= 6.7', - 'googleapis-common-protos >= 1.5.3', + 'googleapis-common-protos >= 1.6.0b4', 'grpcio >= 1.9.1', 'jinja2 >= 2.10', 'protobuf >= 3.5.1', @@ -56,8 +56,8 @@ 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Libraries :: Python Modules', ), diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 966e77df424c..8573bad52763 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -14,6 +14,7 @@ import io import os +from typing import Mapping from unittest import mock import jinja2 @@ -22,31 +23,18 @@ from google.protobuf.compiler import plugin_pb2 from api_factory.generator import generator +from api_factory.schema import api +from api_factory.schema import naming from api_factory.schema import wrappers -from api_factory.schema.api import API -from api_factory.schema.pb import client_pb2 - - -def test_constructor(): - # Crete a bogue and very stripped down request. - request = plugin_pb2.CodeGeneratorRequest(proto_file=[ - # We are just going to prove that each file is loaded, - # so it does not matter what is in them. - descriptor_pb2.FileDescriptorProto(), - descriptor_pb2.FileDescriptorProto(), - ]) - - # Create a generator, prove it has an API. - # This is somewhat internal implementation baseball, but realistically - # the only reasonable way to write these tests is to split them up by - # internal segment. - with mock.patch.object(API, 'load') as load: - g = generator.Generator(request) - assert load.call_count == 2 - assert isinstance(g._api, API) + + +def test_proto_builder_constructor(): + # Create a generator. + g = generator.Generator(api_schema=make_api()) + assert isinstance(g._api, api.API) # Assert we have a Jinja environment also, with the expected filters. - # Still internal implementation baseball, but this is the best place + # This is internal implementation baseball, but this is the best place # to establish this and templates will depend on it. assert isinstance(g._env, jinja2.Environment) assert 'snake_case' in g._env.filters @@ -65,7 +53,8 @@ def test_get_response(): service=[descriptor_pb2.ServiceDescriptorProto(name='SpamService'), descriptor_pb2.ServiceDescriptorProto(name='EggsService')], ) - g = make_generator(proto_file=[file_pb2]) + api_schema = make_api(make_proto(file_pb2)) + g = generator.Generator(api_schema=api_schema) # Mock all the rendering methods. with mock.patch.object(g, '_render_templates') as _render_templates: @@ -75,25 +64,13 @@ def test_get_response(): content='This was a template.', ), ] - with mock.patch.object(g, '_read_flat_files') as _read_flat_files: - _read_flat_files.return_value = [ - plugin_pb2.CodeGeneratorResponse.File( - name='flat_file', - content='This was a flat file.', - ), - ] - - # Okay, now run the `get_response` method. - response = g.get_response() - - # First and foremost, we care that we got a valid response - # object back (albeit not so much what is in it). - assert isinstance(response, plugin_pb2.CodeGeneratorResponse) - - # Next, determine that flat files were read. - assert _read_flat_files.call_count == 1 - _, args, _ = _read_flat_files.mock_calls[0] - assert args[0].endswith('files') + + # Okay, now run the `get_response` method. + response = g.get_response() + + # First and foremost, we care that we got a valid response + # object back (albeit not so much what is in it). + assert isinstance(response, plugin_pb2.CodeGeneratorResponse) # Next, determine that the general API templates and service # templates were both called; the method should be called @@ -111,7 +88,7 @@ def test_get_response(): def test_render_templates(): - g = make_generator() + g = generator.Generator(api_schema=make_api()) # Determine the templates to be rendered. templates = ('foo.j2', 'bar.j2') @@ -132,7 +109,7 @@ def test_render_templates(): def test_render_templates_additional_context(): - g = make_generator() + g = generator.Generator(api_schema=make_api()) # Determine the templates to be rendered. templates = ('foo.j2',) @@ -150,60 +127,30 @@ def test_render_templates_additional_context(): assert files[0].content == 'A bird!\n' -def test_read_flat_files(): - g = make_generator() - - # This function walks over a directory on the operating system; - # even though that directory is actually in this repo, fake it. - with mock.patch.object(os, 'walk') as walk: - walk.return_value = ( - ('files/', [], ['foo.ext']), - ('files/other/', [], ['bar.ext']), - ) - - # This function also reads files from disk, fake that too. - with mock.patch.object(io, 'open') as open: - open.side_effect = lambda fn, mode: io.StringIO(f'abc-{fn}-{mode}') - - # Okay, now we can run the function. - files = g._read_flat_files('files/') - - # Each file should have been opened, so one call to `io.open` - # per file. - assert open.call_count == len(walk.return_value) - - # `os.walk` should have been called once and exactly once, - # with unmodified input. - walk.assert_called_once_with('files/') - - # Lastly, we should have gotten one file back for each file - # yielded by walk, and each one should have the expected contents - # (the 'abc' prefix and then the filename and read mode). - assert len(files) == 2 - assert files[0].name == 'foo.ext' - assert files[1].name == 'other/bar.ext' - assert files[0].content == 'abc-files/foo.ext-r' - assert files[1].content == 'abc-files/other/bar.ext-r' - - def test_get_output_filename(): - g = make_generator(proto_file=[make_proto_file(name='Spam', version='v2')]) + g = generator.Generator(api_schema=make_api( + naming=make_naming(namespace=(), name='Spam', version='v2'), + )) template_name = '$namespace/$name_$version/foo.py.j2' assert g._get_output_filename(template_name) == 'spam_v2/foo.py' def test_get_output_filename_with_namespace(): - g = make_generator(proto_file=[make_proto_file( - name='Spam', - namespace=['Ham', 'Bacon'], - version='v2', - )]) + g = generator.Generator(api_schema=make_api( + naming=make_naming( + name='Spam', + namespace=('Ham', 'Bacon'), + version='v2', + ), + )) template_name = '$namespace/$name_$version/foo.py.j2' assert g._get_output_filename(template_name) == 'ham/bacon/spam_v2/foo.py' def test_get_output_filename_with_service(): - g = make_generator(proto_file=[make_proto_file(name='spam', version='v2')]) + g = generator.Generator(api_schema=make_api( + naming=make_naming(namespace=(), name='Spam', version='v2'), + )) template_name = '$name/$service/foo.py.j2' assert g._get_output_filename( template_name, @@ -216,13 +163,27 @@ def test_get_output_filename_with_service(): ) == 'spam/eggs/foo.py' -def make_generator(**kwargs): - return generator.Generator(plugin_pb2.CodeGeneratorRequest(**kwargs)) +def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, + file_to_generate: bool = True, prior_protos: Mapping = None, + ) -> api.Proto: + prior_protos = prior_protos or {} + return api._ProtoBuilder(file_pb, + file_to_generate=file_to_generate, + prior_protos=prior_protos, + ).proto -def make_proto_file(**kwargs): - proto_file = descriptor_pb2.FileDescriptorProto() - proto_file.options.Extensions[client_pb2.client].MergeFrom( - client_pb2.Client(**kwargs), +def make_api(*protos, naming: naming.Naming = None) -> api.API: + return api.API( + naming=naming or make_naming(), + protos={i.name: i for i in protos}, ) - return proto_file + + +def make_naming(**kwargs) -> naming.Naming: + kwargs.setdefault('name', 'Hatstand') + kwargs.setdefault('namespace', ('Google', 'Cloud')) + kwargs.setdefault('version', 'v1') + kwargs.setdefault('product_name', 'Hatstand') + kwargs.setdefault('product_url', 'https://cloud.google.com/hatstand/') + return naming.Naming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 5ef29198eae9..a37c73e5cd52 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -12,77 +12,76 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Sequence from unittest import mock -import pytest - +from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 -from api_factory.schema import metadata +from api_factory.schema import api +from api_factory.schema import naming from api_factory.schema import wrappers -from api_factory.schema.api import API -from api_factory.schema.pb import client_pb2 -from api_factory.schema.pb import lro_pb2 -def test_long_name(): - api = make_api( - client=make_client(name='Genie', namespace=['Agrabah', 'Lamp']), +def test_api_build(): + # Put together a couple of minimal protos. + fd = ( + make_file_pb2( + name='dep.proto', + package='google.dep', + messages=(make_message_pb2(name='ImportedMessage', fields=()),), + ), + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=()), + make_message_pb2(name='GetFooRequest', fields=( + make_field_pb2(name='imported_message', number=1, + type_name='google.dep.ImportedMessasge'), + )), + make_message_pb2(name='GetFooResponse', fields=( + make_field_pb2(name='foo', number=1, + type_name='google.example.v1.Foo'), + )), + ), + services=(descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v1.GetFooRequest', + output_type='google.example.v1.GetFooResponse', + ), + ), + ),), + ), ) - assert api.long_name == 'Agrabah Lamp Genie' - - -def test_module_name(): - api = make_api(client=make_client( - name='Genie', - namespace=['Agrabah', 'Lamp'], - )) - assert api.module_name == 'genie' - -def test_versioned_module_name_no_version(): - api = make_api(client=make_client( - name='Genie', - namespace=['Agrabah', 'Lamp'], - version='', - )) - assert api.versioned_module_name == 'genie' - - -def test_versioned_module_name(): - api = make_api(client=make_client( - name='Genie', - namespace=['Agrabah', 'Lamp'], - version='v2', - )) - assert api.versioned_module_name == 'genie_v2' + # Create an API with those protos. + api_schema = api.API.build(fd, package='google.example.v1') + # Establish that the API has the data expected. + assert isinstance(api_schema, api.API) + assert len(api_schema.protos) == 2 + assert 'google.dep.ImportedMessage' in api_schema.messages + assert 'google.example.v1.Foo' in api_schema.messages + assert 'google.example.v1.GetFooRequest' in api_schema.messages + assert 'google.example.v1.GetFooResponse' in api_schema.messages + assert 'google.example.v1.FooService' in api_schema.services + assert len(api_schema.enums) == 0 -def test_warehouse_package_name_placeholder(): - api = make_api(client=make_client(name='')) - assert api.warehouse_package_name == '<<< PACKAGE NAME >>>' - assert bool(api.warehouse_package_name) is False - -def test_warehouse_package_name_no_namespace(): - api = make_api(client=make_client(name='BigQuery', namespace=[])) - assert api.warehouse_package_name == 'bigquery' - - -def test_warehouse_package_name_with_namespace(): - api = make_api(client=make_client( - name='BigQuery', - namespace=('Google', 'Cloud'), - )) - assert api.warehouse_package_name == 'google-cloud-bigquery' - - -def test_warehouse_package_name_multiple_words(): - api = make_api(client=make_client(name='Big Query', namespace=[])) - assert api.warehouse_package_name == 'big-query' +def test_proto_build(): + fdp = descriptor_pb2.FileDescriptorProto( + name='my_proto_file.proto', + package='google.example.v1', + ) + proto = api.Proto.build(fdp, file_to_generate=True) + assert isinstance(proto, api.Proto) -def test_load(): +def test_proto_builder_constructor(): sentinel_message = descriptor_pb2.DescriptorProto() sentinel_enum = descriptor_pb2.EnumDescriptorProto() sentinel_service = descriptor_pb2.ServiceDescriptorProto() @@ -90,277 +89,300 @@ def test_load(): # Create a file descriptor proto. It does not matter that none # of the sentinels have actual data because this test just ensures # they are sent off to the correct methods unmodified. - fdp = descriptor_pb2.FileDescriptorProto( - name='my_proto_file.proto', - package='google.example.v1', - message_type=[sentinel_message], - enum_type=[sentinel_enum], - service=[sentinel_service], + fdp = make_file_pb2( + messages=(sentinel_message,), + enums=(sentinel_enum,), + services=(sentinel_service,), ) - # Create an API object. - api = make_api() - # Test the load function. - with mock.patch.object(api, '_load_children') as lc: - api.load(fdp) + with mock.patch.object(api._ProtoBuilder, '_load_children') as lc: + pb = api._ProtoBuilder(fdp, file_to_generate=True) # There should be three total calls to load the different types # of children. assert lc.call_count == 3 # The message type should come first. - _, args, kwargs = lc.mock_calls[0] + _, args, _ = lc.mock_calls[0] assert args[0][0] == sentinel_message - assert kwargs['loader'] == api._load_descriptor + assert args[1] == pb._load_message # The enum type should come second. - _, args, kwargs = lc.mock_calls[1] + _, args, _ = lc.mock_calls[1] assert args[0][0] == sentinel_enum - assert kwargs['loader'] == api._load_enum + assert args[1] == pb._load_enum # The services should come third. - _, args, kwargs = lc.mock_calls[2] + _, args, _ = lc.mock_calls[2] assert args[0][0] == sentinel_service - assert kwargs['loader'] == api._load_service + assert args[1] == pb._load_service + + +def test_not_target_file(): + """Establish that services are not ignored for untargeted protos.""" + message_pb = make_message_pb2(name='Foo', + fields=(make_field_pb2(name='bar', type=3, number=1),) + ) + service_pb = descriptor_pb2.ServiceDescriptorProto() + fdp = make_file_pb2(messages=(message_pb,), services=(service_pb,)) + + # Actually make the proto object. + proto = api.Proto.build(fdp, file_to_generate=False) + # The proto object should have the message, but no service. + assert len(proto.messages) == 1 + assert len(proto.services) == 0 -def test_load_comments_top_level(): + +def test_messages(): L = descriptor_pb2.SourceCodeInfo.Location - # Create a file descriptor proto. - # This has comments which should be largely sharded and ferried off to the - # correct sub-methods. - locations = [ - L(path=[4, 0], leading_comments='foo'), - L(path=[4, 0, 2, 0], leading_comments='bar'), - L(path=[6, 0], leading_comments='baz'), - ] - fdp = descriptor_pb2.FileDescriptorProto( - name='my_proto_file.proto', - package='google.example.v1', - source_code_info=descriptor_pb2.SourceCodeInfo(location=locations) + message_pb = make_message_pb2(name='Foo', + fields=(make_field_pb2(name='bar', type=3, number=1),) + ) + locations = ( + L(path=(4, 0), leading_comments='This is the Foo message.'), + L(path=(4, 0, 2, 0), leading_comments='This is the bar field.'), + ) + fdp = make_file_pb2( + messages=(message_pb,), + locations=locations, + package='google.example.v2', ) - # Create an API object. - api = make_api() + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True) - # Test the load function. This sends empty arrays to each of the - # individual child-processing function, but sends meaningful slices of - # documentation (which is what this test is trying to confirm). - with mock.patch.object(api, '_load_children') as lc: - api.load(fdp) + # Get the message. + assert len(proto.messages) == 1 + message = proto.messages['google.example.v2.Foo'] + assert isinstance(message, wrappers.MessageType) + assert message.meta.doc == 'This is the Foo message.' + assert len(message.fields) == 1 + assert message.fields['bar'].meta.doc == 'This is the bar field.' - # There are still three total calls, like above. - assert lc.call_count == 3 - # The `message_type` field has the ID of 4 in `FileDescriptorProto`, - # so the two whose path begins with 4 should be sent, and in the - # ad hoc dictionary that this method creates. - _, args, kwargs = lc.mock_calls[0] - assert kwargs['loader'] == api._load_descriptor - assert kwargs['info'] == { - 0: {'TERMINAL': locations[0], 2: {0: {'TERMINAL': locations[1]}}}, - } - - # The `enum_type` field has the ID of 5 in `FileDescriptorProto`, - # but no location objects were sent with a matching path, so it - # will just get an empty dictionary. - _, args, kwargs = lc.mock_calls[1] - assert kwargs['loader'] == api._load_enum - assert kwargs['info'] == {} - - # The `service_type` field has the ID of 6 in `FileDescriptorProto`, - # so it will get the one location object that begins with 6. - _, args, kwargs = lc.mock_calls[2] - assert kwargs['loader'] == api._load_service - assert kwargs['info'] == {0: {'TERMINAL': locations[2]}} - - -def test_load_children(): - # Set up the data to be sent to the method. - children = (mock.sentinel.child_zero, mock.sentinel.child_one) - address = metadata.Address() - info = {0: mock.sentinel.info_zero, 1: mock.sentinel.info_one} - loader = mock.Mock(create_autospec=lambda child, address, info: None) - - # Run the `_load_children` method. - make_api()._load_children(children, loader, address, info) - - # Assert that the loader ran in the expected way (twice, once per child). - assert loader.call_count == 2 - _, args, kwargs = loader.mock_calls[0] - assert args[0] == mock.sentinel.child_zero - assert kwargs['info'] == mock.sentinel.info_zero - _, args, kwargs = loader.mock_calls[1] - assert args[0] == mock.sentinel.child_one - assert kwargs['info'] == mock.sentinel.info_one - - -def test_get_fields(): +def test_services(): L = descriptor_pb2.SourceCodeInfo.Location - # Set up data to test with. - field_pbs = [ - descriptor_pb2.FieldDescriptorProto(name='spam'), - descriptor_pb2.FieldDescriptorProto(name='eggs'), - ] - address = metadata.Address(package=['foo', 'bar'], module='baz') - info = {1: {'TERMINAL': L(leading_comments='Eggs.')}} - - # Run the method under test. - fields = make_api()._get_fields(field_pbs, address=address, info=info) - - # Test that we get two field objects back. - assert len(fields) == 2 - for field in fields.values(): - assert isinstance(field, wrappers.Field) - items = iter(fields.items()) - - # Test that the first field is spam, and it has no documentation - # (since `info` has no `0` key). - field_name, field = next(items) - assert field_name == 'spam' - assert field.meta.doc == '' - - # Test that the second field is eggs, and it does have documentation - # (since `info` has a `1` key). - field_name, field = next(items) - assert field_name == 'eggs' - assert field.meta.doc == 'Eggs.' - - # Done. - with pytest.raises(StopIteration): - next(items) - - -def test_get_methods(): - # Start with an empty API object. - api = make_api() - - # Load the input and output type for a method into the API object. - address = metadata.Address(package=['foo', 'bar'], module='baz') - api._load_descriptor(descriptor_pb2.DescriptorProto(name='In'), - address=address, info={}) - api._load_descriptor(descriptor_pb2.DescriptorProto(name='Out'), - address=address, info={}) - - # Run the method under test. - method_pb = descriptor_pb2.MethodDescriptorProto( - name='DoThings', - input_type='foo.bar.In', - output_type='foo.bar.Out', + # Set up messages for our RPC. + request_message_pb = make_message_pb2(name='GetFooRequest', + fields=(make_field_pb2(name='name', type=9, number=1),) + ) + response_message_pb = make_message_pb2(name='GetFooResponse', fields=()) + + # Set up the service with an RPC. + service_pb = descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=(descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v2.GetFooRequest', + output_type='google.example.v2.GetFooResponse', + ),), + ) + + # Fake-document our fake stuff. + locations = ( + L(path=(6, 0), leading_comments='This is the FooService service.'), + L(path=(6, 0, 2, 0), leading_comments='This is the GetFoo method.'), + L(path=(4, 0), leading_comments='This is the GetFooRequest message.'), + L(path=(4, 1), leading_comments='This is the GetFooResponse message.'), ) - methods = api._get_methods([method_pb], address=address, info={}) - - # Test that we get a method object back. - assert len(methods) == 1 - for method in methods.values(): - assert isinstance(method, wrappers.Method) - items = iter(methods.items()) - - # Test that the method has what we expect, an input and output type - # and appropriate name. - method_key, method = next(items) - assert method_key == 'DoThings' + + # Finally, set up the file that encompasses these. + fdp = make_file_pb2( + package='google.example.v2', + messages=(request_message_pb, response_message_pb), + services=(service_pb,), + locations=locations, + ) + + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True) + + # Establish that our data looks correct. + assert len(proto.services) == 1 + assert len(proto.messages) == 2 + service = proto.services['google.example.v2.FooService'] + assert service.meta.doc == 'This is the FooService service.' + assert len(service.methods) == 1 + method = service.methods['GetFoo'] + assert method.meta.doc == 'This is the GetFoo method.' assert isinstance(method.input, wrappers.MessageType) - assert method.input.name == 'In' assert isinstance(method.output, wrappers.MessageType) - assert method.output.name == 'Out' - - # Done. - with pytest.raises(StopIteration): - next(items) - - -def test_get_methods_lro(): - # Start with an empty API object. - api = make_api() - - # Load the message types for a method into the API object, including LRO - # payload and metadata. - address = metadata.Address(package=['foo', 'bar'], module='baz') - api._load_descriptor(descriptor_pb2.DescriptorProto(name='In'), - address=address, info={}) - api._load_descriptor(descriptor_pb2.DescriptorProto(name='Out'), - address=address, info={}) - api._load_descriptor(descriptor_pb2.DescriptorProto(name='Progress'), - address=address, info={}) - operations_address = metadata.Address( - package=['google', 'longrunning'], - module='operations', + assert method.input.name == 'GetFooRequest' + assert method.input.meta.doc == 'This is the GetFooRequest message.' + assert method.output.name == 'GetFooResponse' + assert method.output.meta.doc == 'This is the GetFooResponse message.' + + +def test_prior_protos(): + L = descriptor_pb2.SourceCodeInfo.Location + + # Set up a prior proto that mimics google/protobuf/empty.proto + empty_proto = api.Proto.build(make_file_pb2( + name='empty.proto', package='google.protobuf', + messages=(make_message_pb2(name='Empty'),), + ), file_to_generate=False) + + # Set up the service with an RPC. + service_pb = descriptor_pb2.ServiceDescriptorProto( + name='PingService', + method=(descriptor_pb2.MethodDescriptorProto( + name='Ping', + input_type='google.protobuf.Empty', + output_type='google.protobuf.Empty', + ),), + ) + + # Fake-document our fake stuff. + locations = ( + L(path=(6, 0), leading_comments='This is the PingService service.'), + L(path=(6, 0, 2, 0), leading_comments='This is the Ping method.'), + ) + + # Finally, set up the file that encompasses these. + fdp = make_file_pb2( + package='google.example.v1', + services=(service_pb,), + locations=locations, ) - api._load_descriptor(descriptor_pb2.DescriptorProto(name='Operation'), - address=operations_address, info={}) - method_pb = descriptor_pb2.MethodDescriptorProto( - name='DoBigThings', - input_type='foo.bar.In', + + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ + 'google/protobuf/empty.proto': empty_proto, + }) + + # Establish that our data looks correct. + assert len(proto.services) == 1 + assert len(empty_proto.messages) == 1 + assert len(proto.messages) == 0 + service = proto.services['google.example.v1.PingService'] + assert service.meta.doc == 'This is the PingService service.' + assert len(service.methods) == 1 + method = service.methods['Ping'] + assert isinstance(method.input, wrappers.MessageType) + assert isinstance(method.output, wrappers.MessageType) + assert method.input.name == 'Empty' + assert method.output.name == 'Empty' + assert method.meta.doc == 'This is the Ping method.' + + +def test_lro(): + # Set up a prior proto that mimics google/protobuf/empty.proto + lro_proto = api.Proto.build(make_file_pb2( + name='operations.proto', package='google.longrunning', + messages=(make_message_pb2(name='Operation'),), + ), file_to_generate=False) + + # Set up a method with LRO annotations. + method_pb2 = descriptor_pb2.MethodDescriptorProto( + name='AsyncDoThing', + input_type='google.example.v3.AsyncDoThingRequest', output_type='google.longrunning.Operation', ) - method_pb.options.Extensions[lro_pb2.types].MergeFrom(lro_pb2.MethodTypes( - lro_return_type='foo.bar.Out', - lro_metadata_type='foo.bar.Progress', - )) + method_pb2.options.Extensions[operations_pb2.operation_types].MergeFrom( + operations_pb2.OperationTypes( + response='google.example.v3.AsyncDoThingResponse', + metadata='google.example.v3.AsyncDoThingMetadata', + ), + ) + + # Set up the service with an RPC. + service_pb = descriptor_pb2.ServiceDescriptorProto( + name='LongRunningService', + method=(method_pb2,), + ) + + # Set up the messages, including the annotated ones. + messages = ( + make_message_pb2(name='AsyncDoThingRequest', fields=()), + make_message_pb2(name='AsyncDoThingResponse', fields=()), + make_message_pb2(name='AsyncDoThingMetadata', fields=()), + ) - # Run the method under test. - methods = api._get_methods([method_pb], address=address, info={}) - - # Test that the method has the expected lro output, payload, and metadata. - method = next(iter(methods.values())) - assert method.output.name == 'Operation' - assert isinstance(method.lro_payload, wrappers.MessageType) - assert method.lro_payload.name == 'Out' - assert isinstance(method.lro_metadata, wrappers.MessageType) - assert method.lro_metadata.name == 'Progress' - - -def test_load_descriptor(): - message_pb = descriptor_pb2.DescriptorProto(name='Riddle') - address = metadata.Address(package=['foo', 'bar', 'v1'], module='baz') - api = make_api() - api._load_descriptor(message_pb=message_pb, address=address, info={}) - assert 'foo.bar.v1.Riddle' in api.messages - assert isinstance(api.messages['foo.bar.v1.Riddle'], wrappers.MessageType) - assert api.messages['foo.bar.v1.Riddle'].message_pb == message_pb - - -def test_load_enum(): - # Set up the appropriate protos. - enum_value_pb = descriptor_pb2.EnumValueDescriptorProto(name='A', number=0) - enum_pb = descriptor_pb2.EnumDescriptorProto( - name='Enum', - value=[enum_value_pb], + # Finally, set up the file that encompasses these. + fdp = make_file_pb2( + package='google.example.v3', + messages=messages, + services=(service_pb,), ) - # Load it into the API. - address = metadata.Address(package=['foo', 'bar', 'v1'], module='baz') - api = make_api() - api._load_enum(enum_pb, address=address, info={}) + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ + 'google/longrunning/operations.proto': lro_proto, + }) + + # Establish that our data looks correct. + assert len(proto.services) == 1 + assert len(proto.messages) == 3 + assert len(lro_proto.messages) == 1 - # Assert we got back the right stuff. - assert 'foo.bar.v1.Enum' in api.enums - assert isinstance(api.enums['foo.bar.v1.Enum'], wrappers.EnumType) - assert api.enums['foo.bar.v1.Enum'].enum_pb == enum_pb - assert len(api.enums['foo.bar.v1.Enum'].values) == 1 + +def test_enums(): + L = descriptor_pb2.SourceCodeInfo.Location + enum_pb = descriptor_pb2.EnumDescriptorProto(name='Silly', value=( + descriptor_pb2.EnumValueDescriptorProto(name='ZERO', number=0), + descriptor_pb2.EnumValueDescriptorProto(name='ONE', number=1), + descriptor_pb2.EnumValueDescriptorProto(name='THREE', number=3), + )) + fdp = make_file_pb2(package='google.enum.v1', enums=(enum_pb,), locations=( + L(path=(5, 0), leading_comments='This is the Silly enum.'), + L(path=(5, 0, 2, 0), leading_comments='This is the zero value.'), + L(path=(5, 0, 2, 1), leading_comments='This is the one value.'), + )) + proto = api.Proto.build(fdp, file_to_generate=True) + assert len(proto.enums) == 1 + enum = proto.enums['google.enum.v1.Silly'] + assert enum.meta.doc == 'This is the Silly enum.' + assert isinstance(enum, wrappers.EnumType) + assert len(enum.values) == 3 + assert all([isinstance(i, wrappers.EnumValueType) for i in enum.values]) + assert enum.values[0].name == 'ZERO' + assert enum.values[0].meta.doc == 'This is the zero value.' + assert enum.values[1].name == 'ONE' + assert enum.values[1].meta.doc == 'This is the one value.' + assert enum.values[2].name == 'THREE' + assert enum.values[2].meta.doc == '' + + +def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, + messages: Sequence[descriptor_pb2.DescriptorProto] = (), + enums: Sequence[descriptor_pb2.EnumDescriptorProto] = (), + services: Sequence[descriptor_pb2.ServiceDescriptorProto] = (), + locations: Sequence[descriptor_pb2.SourceCodeInfo.Location] = (), + ) -> descriptor_pb2.FileDescriptorProto: + return descriptor_pb2.FileDescriptorProto( + name=name, + package=package, + message_type=messages, + enum_type=enums, + service=services, + source_code_info=descriptor_pb2.SourceCodeInfo(location=locations), + ) -def test_load_service(): - service_pb = descriptor_pb2.ServiceDescriptorProto(name='RiddleService') - address = metadata.Address(package=['foo', 'bar', 'v1'], module='baz') - api = make_api() - api._load_service(service_pb, address=address, info={}) - assert 'foo.bar.v1.RiddleService' in api.services - assert isinstance(api.services['foo.bar.v1.RiddleService'], - wrappers.Service) - assert api.services['foo.bar.v1.RiddleService'].service_pb == service_pb +def make_message_pb2(name: str, fields=()) -> descriptor_pb2.DescriptorProto: + return descriptor_pb2.DescriptorProto(name=name, field=fields) -def make_api(client: client_pb2.Client = None) -> API: - return API(client=client or make_client()) +def make_field_pb2(name: str, number: int, + type: int = 11, # 11 == message + type_name: str = None, + ) -> descriptor_pb2.FieldDescriptorProto: + return descriptor_pb2.FieldDescriptorProto( + name=name, + number=number, + type=type, + type_name=type_name, + ) -def make_client(**kwargs) -> client_pb2.Client: +def make_naming(**kwargs) -> naming.Naming: kwargs.setdefault('name', 'Hatstand') kwargs.setdefault('namespace', ('Google', 'Cloud')) kwargs.setdefault('version', 'v1') - return client_pb2.Client(**kwargs) + kwargs.setdefault('product_name', 'Hatstand') + kwargs.setdefault('product_url', 'https://cloud.google.com/hatstand/') + return naming.Naming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py new file mode 100644 index 000000000000..64496a47bc88 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -0,0 +1,178 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.api import annotations_pb2 +from google.api import metadata_pb2 +from google.protobuf import descriptor_pb2 + +from api_factory.schema import naming + + +def test_long_name(): + n = make_naming(name='Genie', namespace=['Agrabah', 'Lamp']) + assert n.long_name == 'Agrabah Lamp Genie' + + +def test_module_name(): + n = make_naming( + name='Genie', + namespace=['Agrabah', 'Lamp'], + version='v2', + ) + assert n.module_name == 'genie' + + +def test_versioned_module_name_no_version(): + n = make_naming( + name='Genie', + namespace=['Agrabah', 'Lamp'], + version='', + ) + assert n.versioned_module_name == 'genie' + + +def test_versioned_module_name(): + n = make_naming( + name='Genie', + namespace=['Agrabah', 'Lamp'], + version='v2', + ) + assert n.versioned_module_name == 'genie_v2' + + +def test_warehouse_package_name_no_namespace(): + n = make_naming(name='BigQuery', namespace=[]) + assert n.warehouse_package_name == 'bigquery' + + +def test_warehouse_package_name_with_namespace(): + n = make_naming( + name='BigQuery', + namespace=('Google', 'Cloud'), + ) + assert n.warehouse_package_name == 'google-cloud-bigquery' + + +def test_warehouse_package_name_multiple_words(): + n = make_naming(name='Big Query', namespace=[]) + assert n.warehouse_package_name == 'big-query' + + +def test_build_no_annotations(): + protos = ( + descriptor_pb2.FileDescriptorProto( + name='baz_service.proto', + package='foo.bar.baz.v1', + ), + descriptor_pb2.FileDescriptorProto( + name='baz_common.proto', + package='foo.bar.baz.v1', + ), + ) + n = naming.Naming.build(*protos) + assert n.name == 'Baz' + assert n.namespace == ('Foo', 'Bar') + assert n.version == 'v1' + assert n.product_name == 'Baz' + + +def test_build_no_annotations_no_version(): + protos = ( + descriptor_pb2.FileDescriptorProto( + name='baz_service.proto', + package='foo.bar', + ), + descriptor_pb2.FileDescriptorProto( + name='baz_common.proto', + package='foo.bar', + ), + ) + n = naming.Naming.build(*protos) + assert n.name == 'Bar' + assert n.namespace == ('Foo',) + assert n.version == '' + + +def test_build_with_annotations(): + proto = descriptor_pb2.FileDescriptorProto( + name='spanner.proto', + package='google.spanner.v1', + ) + proto.options.Extensions[annotations_pb2.metadata].MergeFrom( + metadata_pb2.Metadata(package_namespace=['Google', 'Cloud']), + ) + n = naming.Naming.build(proto) + assert n.name == 'Spanner' + assert n.namespace == ('Google', 'Cloud') + assert n.version == 'v1' + assert n.product_name == 'Spanner' + + +def test_inconsistent_metadata_error(): + # Set up the first proto. + proto1 = descriptor_pb2.FileDescriptorProto( + name='spanner.proto', + package='google.spanner.v1', + ) + proto1.options.Extensions[annotations_pb2.metadata].MergeFrom( + metadata_pb2.Metadata(package_namespace=['Google', 'Cloud']), + ) + + # Set up the second proto. + # Note that + proto2 = descriptor_pb2.FileDescriptorProto( + name='spanner2.proto', + package='google.spanner.v1', + ) + proto2.options.Extensions[annotations_pb2.metadata].MergeFrom( + metadata_pb2.Metadata(package_namespace=['Google', 'Cloud'], + package_name='Spanner'), + ) + + # This should error. Even though the data in the metadata is consistent, + # it is expected to exactly match, and it does not. + with pytest.raises(ValueError): + naming.Naming.build(proto1, proto2) + + +def test_inconsistent_package_error(): + proto1 = descriptor_pb2.FileDescriptorProto(package='google.spanner.v1') + proto2 = descriptor_pb2.FileDescriptorProto(package='spanner.v1') + proto3 = descriptor_pb2.FileDescriptorProto(package='google.spanner.v2') + + # These should all error against one another. + with pytest.raises(ValueError): + naming.Naming.build(proto1, proto2) + with pytest.raises(ValueError): + naming.Naming.build(proto1, proto3) + + +def test_subpackages(): + proto1 = descriptor_pb2.FileDescriptorProto(package='google.ads.v0.foo') + proto2 = descriptor_pb2.FileDescriptorProto(package='google.ads.v0.bar') + n = naming.Naming.build(proto1, proto2) + assert n.name == 'Ads' + assert n.namespace == ('Google',) + assert n.version == 'v0' + + +def make_naming(**kwargs) -> naming.Naming: + kwargs.setdefault('name', 'Hatstand') + kwargs.setdefault('namespace', ('Google', 'Cloud')) + kwargs.setdefault('version', 'v1') + kwargs.setdefault('product_name', 'Hatstand') + kwargs.setdefault('product_url', 'https://cloud.google.com/hatstand/') + return naming.Naming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 0c04aca429dd..48ecf7bbf902 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections + +from google.api import signature_pb2 from google.protobuf import descriptor_pb2 from api_factory.schema import metadata from api_factory.schema import wrappers -from api_factory.schema.pb import headers_pb2 -from api_factory.schema.pb import overload_pb2 def get_method() -> wrappers.Method: @@ -33,14 +34,6 @@ def get_method() -> wrappers.Method: output_type='foo.bar.Output', ) - # Set an overload in the method descriptor. - ext_key = overload_pb2.overloads - method_pb.options.Extensions[ext_key].extend([overload_pb2.Overload()]) - - # Set a field header in the method descriptor. - ext_key = headers_pb2.field_headers - method_pb.options.Extensions[ext_key].extend([headers_pb2.FieldHeader()]) - # Instantiate the wrapper class. return wrappers.Method( method_pb=method_pb, @@ -71,13 +64,11 @@ def test_method_types(): assert method.output.pb2_module == 'bacon_pb2' -def test_method_overloads(): +def test_method_signature(): method = get_method() - for overload in method.overloads: - assert isinstance(overload, overload_pb2.Overload) + assert isinstance(method.signature, signature_pb2.MethodSignature) def test_method_field_headers(): method = get_method() - for field_header in method.field_headers: - assert isinstance(field_header, headers_pb2.FieldHeader) + assert isinstance(method.field_headers, collections.Sequence) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 3669ea493430..9f6e8e608c8e 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -14,12 +14,12 @@ import typing +from google.api import annotations_pb2 +from google.api import http_pb2 from google.protobuf import descriptor_pb2 from api_factory.schema import metadata from api_factory.schema import wrappers -from api_factory.schema.pb import client_pb2 -from api_factory.schema.pb import headers_pb2 def test_service_properties(): @@ -34,7 +34,7 @@ def test_service_host(): def test_service_no_host(): service = make_service() - assert service.host == '<<< HOSTNAME >>>' + assert service.host == '<<< SERVICE ADDRESS >>>' assert bool(service.host) is False @@ -51,23 +51,23 @@ def test_service_no_scopes(): def test_service_pb2_modules(): service = make_service() - assert service.pb2_modules == [ + assert service.pb2_modules == ( ('a.b.v1', 'c_pb2'), ('foo', 'bacon_pb2'), ('foo', 'bar_pb2'), ('foo', 'baz_pb2'), ('x.y.v1', 'z_pb2'), - ] + ) def test_service_pb2_modules_lro(): service = make_service_with_method_options() - assert service.pb2_modules == [ + assert service.pb2_modules == ( ('foo', 'bar_pb2'), ('foo', 'baz_pb2'), ('foo', 'qux_pb2'), ('google.longrunning', 'operations_pb2'), - ] + ) def test_service_no_lro(): @@ -86,7 +86,8 @@ def test_service_no_field_headers(): def test_service_has_field_headers(): - service = make_service_with_method_options() + http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') + service = make_service_with_method_options(http_rule=http_rule) assert service.has_field_headers @@ -108,9 +109,8 @@ def make_service(name: str = 'Placeholder', host: str = '', # appropriate. service_pb = descriptor_pb2.ServiceDescriptorProto(name=name) if host: - service_pb.options.Extensions[client_pb2.host] = host - if scopes: - service_pb.options.Extensions[client_pb2.oauth_scopes].extend(scopes) + service_pb.options.Extensions[annotations_pb2.default_host] = host + service_pb.options.Extensions[annotations_pb2.oauth].scopes.extend(scopes) # Return a service object to test. return wrappers.Service( @@ -119,7 +119,9 @@ def make_service(name: str = 'Placeholder', host: str = '', ) -def make_service_with_method_options() -> wrappers.Service: +def make_service_with_method_options(*, + http_rule: http_pb2.HttpRule = None, + ) -> wrappers.Service: # Declare a method with options enabled for long-running operations and # field headers. method = get_method( @@ -128,7 +130,7 @@ def make_service_with_method_options() -> wrappers.Service: 'google.longrunning.operations.Operation', lro_payload_type='foo.baz.ThingResponse', lro_metadata_type='foo.qux.ThingMetadata', - field_headers=(headers_pb2.FieldHeader(),) + http_rule=http_rule, ) # Define a service descriptor. @@ -146,12 +148,12 @@ def get_method(name: str, out_type: str, lro_payload_type: str = '', lro_metadata_type: str = '', - field_headers: typing.Tuple[str] = (), + http_rule: http_pb2.HttpRule = None, ) -> wrappers.Method: input_ = get_message(in_type) output = get_message(out_type) lro_payload = get_message(lro_payload_type) if lro_payload_type else None - lro_metadata = get_message(lro_metadata_type) if lro_metadata_type else None + lro_meta = get_message(lro_metadata_type) if lro_metadata_type else None # Define a method descriptor. Set the field headers if appropriate. method_pb = descriptor_pb2.MethodDescriptorProto( @@ -159,13 +161,14 @@ def get_method(name: str, input_type=input_.proto_path, output_type=output.proto_path, ) - ext_key = headers_pb2.field_headers - method_pb.options.Extensions[ext_key].extend(field_headers) + if http_rule: + ext_key = annotations_pb2.http + method_pb.options.Extensions[ext_key].MergeFrom(http_rule) return wrappers.Method( method_pb=method_pb, input=input_, - lro_metadata=lro_metadata, + lro_metadata=lro_meta, lro_payload=lro_payload, output=output, ) From f011346f185d14eafdf93688afd096307cd6616e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 25 Jul 2018 10:19:57 -0700 Subject: [PATCH 0007/1339] Flattening implementation (#10) This adds a flattening implementation. Specifically: * It adds a `MethodSignature` and `MethodSignatures` wrapper object to reason about the `google.api.method_signature` annotation. * It refactors the LRO implementation slightly to use an `OperationType`; this means templates have far fewer "if LRO" forks. (Fixes #9.) * It fixes the routine headers implementation, which was broken (the joys of pre-alpha). * It creates the names `python_ident` and `sphinx_ident` for use in templates. * These replace the use of `{{ thing.python_module }}.{{ thing.name }}` and the manual use of the `~.` prefix for sphinx. * This is useful because sometimes these things are primitives, so this avoids template `if` logic. --- .../api_factory/generator/generator.py | 1 + .../gapic-generator/api_factory/schema/api.py | 112 ++++- .../api_factory/schema/metadata.py | 38 +- .../api_factory/schema/wrappers.py | 391 ++++++++++++++++-- .../$name_$version/$service/client.py.j2 | 123 +++--- .../$service/transports/base.py.j2 | 8 +- .../$service/transports/grpc.py.j2 | 12 +- .../$service/transports/http.py.j2 | 14 +- .../$namespace/$name_$version/utils.py.j2 | 22 + .../api_factory/utils/lines.py | 11 +- packages/gapic-generator/docs/index.rst | 1 + packages/gapic-generator/docs/process.rst | 76 +--- packages/gapic-generator/docs/status.rst | 4 - packages/gapic-generator/docs/templates.rst | 86 ++++ .../tests/unit/schema/test_api.py | 69 +++- .../tests/unit/schema/test_metadata.py | 19 +- .../tests/unit/schema/wrappers/test_enums.py | 53 ++- .../tests/unit/schema/wrappers/test_field.py | 145 ++++++- .../unit/schema/wrappers/test_message.py | 117 +++++- .../tests/unit/schema/wrappers/test_method.py | 129 ++++-- .../unit/schema/wrappers/test_operation.py | 40 ++ .../unit/schema/wrappers/test_service.py | 27 +- .../unit/schema/wrappers/test_signature.py | 85 ++++ 23 files changed, 1284 insertions(+), 299 deletions(-) create mode 100644 packages/gapic-generator/api_factory/templates/$namespace/$name_$version/utils.py.j2 create mode 100644 packages/gapic-generator/docs/templates.rst create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py index 4292ebcf7cf5..e4be4cde8828 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -111,6 +111,7 @@ def _render_templates( answer.append(CodeGeneratorResponse.File( content=self._env.get_template(template_name).render( api=self._api, + len=len, **additional_context ).strip() + '\n', name=self._get_output_filename( diff --git a/packages/gapic-generator/api_factory/schema/api.py b/packages/gapic-generator/api_factory/schema/api.py index 5f32342f2f46..c2700b7c4f04 100644 --- a/packages/gapic-generator/api_factory/schema/api.py +++ b/packages/gapic-generator/api_factory/schema/api.py @@ -190,10 +190,31 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # message (e.g. the hard-code `4` for `message_type` immediately # below is because `repeated DescriptorProto message_type = 4;` in # descriptor.proto itself). - self._load_children(file_descriptor.message_type, self._load_message, - address=address, path=(4,)) self._load_children(file_descriptor.enum_type, self._load_enum, address=address, path=(5,)) + self._load_children(file_descriptor.message_type, self._load_message, + address=address, path=(4,)) + + # Edge case: Protocol buffers is not particularly picky about + # ordering, and it is possible that a message will have had a field + # referencing another message which appears later in the file + # (or itself, recursively). + # + # In this situation, we would not have come across the message yet, + # and the field would have its original textual reference to the + # message (`type_name`) but not its resolved message wrapper. + for message in self.messages.values(): + for field in message.fields.values(): + if field.type_name and not any((field.message, field.enum)): + object.__setattr__( + field, 'message', + self.messages[field.type_name.lstrip('.')], + ) + + # Only generate the service if this is a target file to be generated. + # This prevents us from generating common services (e.g. LRO) when + # they are being used as an import just to get types declared in the + # same files. if file_to_generate: self._load_children(file_descriptor.service, self._load_service, address=address, path=(6,)) @@ -211,11 +232,38 @@ def proto(self) -> Proto: ) @cached_property - def all_messages(self) -> Sequence[wrappers.MessageType]: + def all_enums(self) -> Mapping[str, wrappers.EnumType]: + return collections.ChainMap({}, self.enums, + *[p.enums for p in self.prior_protos.values()], + ) + + @cached_property + def all_messages(self) -> Mapping[str, wrappers.MessageType]: return collections.ChainMap({}, self.messages, *[p.messages for p in self.prior_protos.values()], ) + def _get_operation_type(self, + response_type: wrappers.Method, + metadata_type: wrappers.Method = None, + ) -> wrappers.PythonType: + """Return a wrapper around Operation that designates the end result. + + Args: + response_type (~.wrappers.Method): The response type that + the Operation ultimately uses. + metadata_type (~.wrappers.Method): The metadata type that + the Operation ultimately uses, if any. + + Returns: + ~.wrappers.OperationType: An OperationType object, which is + sent down to templates, and aware of the LRO types used. + """ + return wrappers.OperationType( + lro_response=response_type, + lro_metadata=metadata_type, + ) + def _load_children(self, children: Sequence, loader: Callable, *, address: metadata.Address, path: Tuple[int]) -> None: """Return wrapped versions of arbitrary children from a Descriptor. @@ -258,10 +306,21 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], :class:`~.wrappers.Field` objects. """ # Iterate over the fields and collect them into a dictionary. + # + # The saving of the enum and message types rely on protocol buffers' + # naming rules to trust that they will never collide. + # + # Note: If this field is a recursive reference to its own message, + # then the message will not be in `all_messages` yet (because the + # message wrapper is not yet created, because it needs this object + # first) and this will be None. This case is addressed in the + # `_load_message` method. answer = collections.OrderedDict() for field_pb, i in zip(field_pbs, range(0, sys.maxsize)): answer[field_pb.name] = wrappers.Field( field_pb=field_pb, + enum=self.all_enums.get(field_pb.type_name.lstrip('.')), + message=self.all_messages.get(field_pb.type_name.lstrip('.')), meta=metadata.Metadata( address=address, documentation=self.docs.get(path + (i,), self.EMPTY), @@ -292,16 +351,29 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], answer = collections.OrderedDict() for meth_pb, i in zip(methods, range(0, sys.maxsize)): types = meth_pb.options.Extensions[operations_pb2.operation_types] + + # If the output type is google.longrunning.Operation, we use + # a specialized object in its place. + output_type = self.all_messages[meth_pb.output_type.lstrip('.')] + if meth_pb.output_type.endswith('google.longrunning.Operation'): + output_type = self._get_operation_type( + response_type=self.all_messages[ + address.resolve(types.response) + ], + metadata_type=self.all_messages.get( + address.resolve(types.metadata), + ), + ) + + # Create the method wrapper object. answer[meth_pb.name] = wrappers.Method( input=self.all_messages[meth_pb.input_type.lstrip('.')], - lro_metadata=self.all_messages.get(types.metadata, None), - lro_payload=self.all_messages.get(types.response, None), method_pb=meth_pb, meta=metadata.Metadata( address=address, documentation=self.docs.get(path + (i,), self.EMPTY), ), - output=self.all_messages[meth_pb.output_type.lstrip('.')], + output=output_type, ) # Done; return the answer. @@ -311,17 +383,31 @@ def _load_message(self, message_pb: descriptor_pb2.DescriptorProto, address: metadata.Address, path: Tuple[int]) -> None: """Load message descriptions from DescriptorProtos.""" ident = f'{str(address)}.{message_pb.name}' - nested_addr = address.child(message_pb.name) + message_addr = address.child(message_pb.name) + + # Load all nested items. + # + # Note: This occurs before piecing together this message's fields + # because if nested types are present, they are generally the + # type of one of this message's fields, and they need to be in + # the registry for the field's message or enum attributes to be + # set correctly. + self._load_children(message_pb.enum_type, address=message_addr, + loader=self._load_enum, path=path + (4,)) + self._load_children(message_pb.nested_type, address=message_addr, + loader=self._load_message, path=path + (3,)) + # self._load_children(message.oneof_decl, loader=self._load_field, + # address=nested_addr, info=info.get(8, {})) # Create a dictionary of all the fields for this message. fields = self._get_fields( message_pb.field, - address=nested_addr, + address=message_addr, path=path + (2,), ) fields.update(self._get_fields( message_pb.extension, - address=nested_addr, + address=message_addr, path=path + (6,), )) @@ -335,14 +421,6 @@ def _load_message(self, message_pb: descriptor_pb2.DescriptorProto, ), ) - # Load all nested items. - self._load_children(message_pb.nested_type, address=nested_addr, - loader=self._load_message, path=path + (3,)) - self._load_children(message_pb.enum_type, address=nested_addr, - loader=self._load_enum, path=path + (4,)) - # self._load_children(message.oneof_decl, loader=self._load_field, - # address=nested_addr, info=info.get(8, {})) - def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, address: metadata.Address, path: Tuple[int]) -> None: """Load enum descriptions from EnumDescriptorProtos.""" diff --git a/packages/gapic-generator/api_factory/schema/metadata.py b/packages/gapic-generator/api_factory/schema/metadata.py index bf6cd1c6b7d6..48f3267e0856 100644 --- a/packages/gapic-generator/api_factory/schema/metadata.py +++ b/packages/gapic-generator/api_factory/schema/metadata.py @@ -26,21 +26,20 @@ with the things they describe for easy access in templates. """ -import copy import dataclasses -from typing import List +from typing import Tuple from google.protobuf import descriptor_pb2 @dataclasses.dataclass(frozen=True) class Address: - package: List[str] = dataclasses.field(default_factory=list) + package: Tuple[str] = dataclasses.field(default_factory=tuple) module: str = '' - parent: List[str] = dataclasses.field(default_factory=list) + parent: Tuple[str] = dataclasses.field(default_factory=tuple) def __str__(self): - return '.'.join(self.package + self.parent) + return '.'.join(tuple(self.package) + tuple(self.parent)) def child(self, child_name: str) -> 'Address': """Return a new Address with ``child_name`` appended to its parent. @@ -53,9 +52,32 @@ def child(self, child_name: str) -> 'Address': Returns: ~.Address: The new address object. """ - answer = copy.deepcopy(self) - answer.parent.append(child_name) - return answer + return type(self)( + module=self.module, + package=self.package, + parent=self.parent + (child_name,), + ) + + def resolve(self, selector: str) -> str: + """Resolve a potentially-relative protobuf selector. + + This takes a protobuf selector which may be fully-qualified + (e.g. `foo.bar.v1.Baz`) or may be relative (`Baz`) and + returns the fully-qualified version. + + This method is naive and does not check to see if the message + actually exists. + + Args: + selector (str): A protobuf selector, either fully-qualified + or relative. + + Returns: + str: An absolute selector. + """ + if '.' not in selector: + return f'{".".join(self.package)}.{selector}' + return selector @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/api_factory/schema/wrappers.py b/packages/gapic-generator/api_factory/schema/wrappers.py index d9e095d10194..1063a8d7e1ba 100644 --- a/packages/gapic-generator/api_factory/schema/wrappers.py +++ b/packages/gapic-generator/api_factory/schema/wrappers.py @@ -27,54 +27,199 @@ Documentation is consistently at ``{thing}.meta.doc``. """ +import collections import dataclasses import re -from typing import List, Mapping, Sequence, Tuple +from typing import Iterable, List, Mapping, Sequence, Tuple, Union from google.api import annotations_pb2 from google.api import signature_pb2 from google.protobuf import descriptor_pb2 from api_factory import utils -from api_factory.schema.metadata import Metadata +from api_factory.schema import metadata @dataclasses.dataclass(frozen=True) class Field: """Description of a field.""" field_pb: descriptor_pb2.FieldDescriptorProto - meta: Metadata = dataclasses.field(default_factory=Metadata) + message: 'MessageType' = None + enum: 'EnumType' = None + meta: metadata.Metadata = dataclasses.field( + default_factory=metadata.Metadata, + ) def __getattr__(self, name): return getattr(self.field_pb, name) + @property + def is_primitive(self) -> bool: + """Return True if the field is a primitive, False otherwise.""" + return isinstance(self.type, PythonType) + + @property + def python_ident(self) -> str: + """Return the identifier to be used in templates. + + Because we import modules as a whole, rather than individual + members from modules, this is consistently `module.Name`. + + This property also adds the Sequence[] notation for repeated fields. + """ + if self.repeated: + return f'Sequence[{self.type.python_ident}]' + return self.type.python_ident + + @property + def repeated(self) -> bool: + """Return True if this is a repeated field, False otherwise. + + Returns: + bool: Whether this field is repeated. + """ + return self.label == \ + descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') + + @property + def required(self) -> bool: + """Return True if this is a required field, False otherwise. + + Returns: + bool: Whether this field is required. + """ + return bool(self.options.Extensions[annotations_pb2.required]) + + @property + def sphinx_ident(self) -> str: + """Return the identifier to be used in templates for Sphinx. + + This property also adds the Sequence[] notation for repeated fields. + """ + if self.repeated: + return f'Sequence[{self.type.sphinx_ident}]' + return self.type.sphinx_ident + + @utils.cached_property + def type(self) -> Union['MessageType', 'EnumType', 'PythonType']: + """Return the type of this field.""" + # If this is a message or enum, return the appropriate thing. + if self.type_name and self.message: + return self.message + if self.type_name and self.enum: + return self.enum + + # This is a primitive. Return the corresponding Python type. + # The enum values used here are defined in: + # Repository: https://github.com/google/protobuf/ + # Path: src/google/protobuf/descriptor.proto + # + # The values are used here because the code would be excessively + # verbose otherwise, and this is guaranteed never to change. + # + # 10, 11, and 14 are intentionally missing. They correspond to + # group (unused), message (covered above), and enum (covered above). + if self.field_pb.type in (1, 2): + return PythonType(python_type=float) + if self.field_pb.type in (3, 4, 5, 6, 7, 13, 15, 16, 17, 18): + return PythonType(python_type=int) + if self.field_pb.type == 8: + return PythonType(python_type=bool) + if self.field_pb.type == 9: + return PythonType(python_type=str) + if self.field_pb.type == 12: + return PythonType(python_type=bytes) + + # This should never happen. + raise TypeError('Unrecognized protobuf type. This code should ' + 'not be reachable; please file a bug.') + @dataclasses.dataclass(frozen=True) class MessageType: """Description of a message (defined with the ``message`` keyword).""" message_pb: descriptor_pb2.DescriptorProto fields: Mapping[str, Field] - meta: Metadata = dataclasses.field(default_factory=Metadata) + meta: metadata.Metadata = dataclasses.field( + default_factory=metadata.Metadata, + ) def __getattr__(self, name): return getattr(self.message_pb, name) - @property - def pb2_module(self) -> str: - """Return the name of the Python pb2 module.""" - return f'{self.meta.address.module}_pb2' + def get_field(self, *field_path: Sequence[str]) -> Field: + """Return a field arbitrarily deep in this message's structure. + + This method recursively traverses the message tree to return the + requested inner-field. + + Traversing through repeated fields is not supported; a repeated field + may be specified if and only if it is the last field in the path. + + Args: + field_path (Sequence[str]): The field path. + + Returns: + ~.Field: A field object. + + Raises: + KeyError: If a repeated field is used in the non-terminal position + in the path. + """ + # Get the first field in the path. + cursor = self.fields[field_path[0]] + + # Base case: If this is the last field in the path, return it outright. + if len(field_path) == 1: + return cursor + + # Sanity check: If cursor is a repeated field, then raise an exception. + # Repeated fields are only permitted in the terminal position. + if cursor.repeated: + raise KeyError( + f'The {cursor.name} field is repeated; unable to use ' + '`get_field` to retrieve its children.\n' + 'This exception usually indicates that a ' + 'google.api.method_signature annotation uses a repeated field ' + 'in the fields list in a position other than the end.', + ) + + # Recursion case: Pass the remainder of the path to the sub-field's + # message. + return cursor.message.get_field(*field_path[1:]) @property def proto_path(self) -> str: """Return the fully qualfied proto path as a string.""" return f'{str(self.meta.address)}.{self.name}' + @property + def python_ident(self) -> str: + """Return the identifier to be used in templates. + + Because we import modules as a whole, rather than individual + members from modules, this is consistently `module.Name`. + """ + return f'{self.python_module}.{self.name}' + + @property + def python_module(self) -> str: + """Return the name of the Python pb2 module.""" + return f'{self.meta.address.module}_pb2' + + @property + def sphinx_ident(self) -> str: + """Return the identifier to be used in templates for Sphinx.""" + return f'~.{self.python_ident}' + @dataclasses.dataclass(frozen=True) class EnumValueType: """Description of an enum value.""" enum_value_pb: descriptor_pb2.EnumValueDescriptorProto - meta: Metadata = dataclasses.field(default_factory=Metadata) + meta: metadata.Metadata = dataclasses.field( + default_factory=metadata.Metadata, + ) def __getattr__(self, name): return getattr(self.enum_value_pb, name) @@ -85,11 +230,123 @@ class EnumType: """Description of an enum (defined with the ``enum`` keyword.)""" enum_pb: descriptor_pb2.EnumDescriptorProto values: List[EnumValueType] - meta: Metadata = dataclasses.field(default_factory=Metadata) + meta: metadata.Metadata = dataclasses.field( + default_factory=metadata.Metadata, + ) def __getattr__(self, name): return getattr(self.enum_pb, name) + @property + def python_ident(self) -> str: + """Return the identifier to be used in templates. + + Because we import modules as a whole, rather than individual + members from modules, this is consistently `module.Name`. + """ + return f'{self.python_module}.{self.name}' + + @property + def python_module(self) -> str: + """Return the name of the Python pb2 module.""" + return f'{self.meta.address.module}_pb2' + + @property + def sphinx_ident(self) -> str: + """Return the identifier to be used in templates for Sphinx.""" + return f'~.{self.python_ident}' + + +@dataclasses.dataclass(frozen=True) +class PythonType: + """Wrapper class for Python types. + + This exists for interface consistency, so that methods like + :meth:`Field.type` can return an object and the caller can be confident + that a ``name`` property will be present. + """ + python_type: type + + @property + def name(self) -> str: + return self.python_type.__name__ + + @property + def python_ident(self) -> str: + """Return the identifier to be used in templates. + + Primitives have no import, and no module to reference, so this + is simply the name of the class (e.g. "int", "str"). + """ + return self.name + + @property + def sphinx_ident(self) -> str: + """Return the identifier to be used in templates for Sphinx.""" + return f'{self.python_ident}' + + +@dataclasses.dataclass(frozen=True) +class OperationType: + """Wrapper class for :class:`~.operations.Operation`. + + This exists for interface consistency, so Operations can be used + alongside :class:`~.MessageType` instances. + """ + lro_response: MessageType + lro_metadata: MessageType = None + + @utils.cached_property + def meta(self) -> metadata.Metadata: + """Return a Metadata object.""" + return metadata.Metadata( + address=metadata.Address( + module='operation', + package=('google', 'api_core'), + ), + documentation=descriptor_pb2.SourceCodeInfo.Location( + leading_comments='An object representing a long-running ' + 'operation. \n\n' + 'The result type for the operation will be ' + ':class:`~.{module}.{name}`: {doc}'.format( + doc=self.lro_response.meta.doc, + module=self.lro_response.python_module, + name=self.lro_response.name, + ), + ), + ) + + @property + def name(self) -> str: + """Return the class name.""" + # This is always "Operation", because it is always a reference to + # `google.api_core.operation.Operation`. + # + # This is hard-coded rather than subclassing PythonType (above) so + # that this generator is not forced to take an entire dependency + # on google.api_core just to get these strings. + return 'Operation' + + @property + def python_ident(self) -> str: + """Return the identifier to be used in templates.""" + return f'{self.python_module}.{self.name}' + + @property + def python_module(self) -> str: + """Return the name of the Python module.""" + # This is always "operation", because it is always a reference to + # `google.api_core.operation.Operation`. + # + # This is hard-coded rather than subclassing PythonType (above) so + # that this generator is not forced to take an entire dependency + # on google.api_core just to get these strings. + return self.meta.address.module + + @property + def sphinx_ident(self) -> str: + return f'~.{self.python_ident}' + @dataclasses.dataclass(frozen=True) class Method: @@ -97,9 +354,9 @@ class Method: method_pb: descriptor_pb2.MethodDescriptorProto input: MessageType output: MessageType - lro_payload: MessageType = None - lro_metadata: MessageType = None - meta: Metadata = dataclasses.field(default_factory=Metadata) + meta: metadata.Metadata = dataclasses.field( + default_factory=metadata.Metadata, + ) def __getattr__(self, name): return getattr(self.method_pb, name) @@ -112,10 +369,81 @@ def field_headers(self) -> Sequence[str]: return tuple(re.findall(r'\{([a-z][\w\d_.]+)=', http.get)) return () - @property - def signature(self) -> signature_pb2.MethodSignature: + @utils.cached_property + def signatures(self) -> Tuple[signature_pb2.MethodSignature]: """Return the signature defined for this method.""" - return self.options.Extensions[annotations_pb2.method_signature] + sig_pb2 = self.options.Extensions[annotations_pb2.method_signature] + + # Sanity check: If there are no signatures (which should be by far + # the common case), just abort now. + if len(sig_pb2.fields) == 0: + return () + + # Signatures are annotated with an `additional_signatures` key that + # allows for specifying additional signatures. This is an uncommon + # case but we still want to deal with it. + answer = [] + for sig in (sig_pb2,) + tuple(sig_pb2.additional_signatures): + # Build a MethodSignature object with the appropriate name + # and fields. The fields are field objects, retrieved from + # the method's `input` message. + answer.append(MethodSignature( + name=sig.function_name if sig.function_name else self.name, + fields=collections.OrderedDict([ + (f.split('.')[-1], self.input.get_field(f)) + for f in sig.fields + ]), + )) + + # Done; return a tuple of signatures. + return MethodSignatures(all=tuple(answer)) + + +@dataclasses.dataclass(frozen=True) +class MethodSignature: + name: str + fields: Mapping[str, Field] + + @utils.cached_property + def dispatch_field(self) -> Union[MessageType, EnumType, PythonType]: + """Return the first field. + + This is what is used for `functools.singledispatch`.""" + return next(iter(self.fields.values())) + + +@dataclasses.dataclass(frozen=True) +class MethodSignatures: + all: Tuple[MethodSignature] + + def __getitem__(self, key: Union[int, slice]) -> MethodSignature: + return self.all[key] + + def __iter__(self) -> Iterable[MethodSignature]: + return iter(self.all) + + def __len__(self) -> int: + return len(self.all) + + @utils.cached_property + def single_dispatch(self) -> Tuple[MethodSignature]: + """Return a tuple of signatures, grouped and deduped by dispatch type. + + In the Python 3 templates, we only honor at most one method + signature per initial argument type, and only for primitives. + + This method groups and deduplicates signatures and sends back only + the signatures that the template actually wants. + + Returns: + Tuple[MethodSignature]: Method signatures to be used with + "single dispatch" routing. + """ + answer = collections.OrderedDict() + for sig in [i for i in self.all + if isinstance(i.dispatch_field.type, PythonType)]: + answer.setdefault(sig.dispatch_field.python_ident, sig) + return tuple(answer.values()) @dataclasses.dataclass(frozen=True) @@ -123,7 +451,9 @@ class Service: """Description of a service (defined with the ``service`` keyword).""" service_pb: descriptor_pb2.ServiceDescriptorProto methods: Mapping[str, Method] - meta: Metadata = dataclasses.field(default_factory=Metadata) + meta: metadata.Metadata = dataclasses.field( + default_factory=metadata.Metadata, + ) def __getattr__(self, name): return getattr(self.service_pb, name) @@ -159,15 +489,15 @@ def module_name(self) -> str: return utils.to_snake_case(self.name) @property - def pb2_modules(self) -> Sequence[Tuple[str, str]]: - """Return a sequence of pb2 modules, for import. + def python_modules(self) -> Sequence[Tuple[str, str]]: + """Return a sequence of Python modules, for import. The results of this method are in alphabetical order (by package, then module), and do not contain duplicates. Returns: - Sequence[str, str]: The package and pb2_module pair, intended - for use in a ``from package import pb2_module`` type + Sequence[str, str]: The package and module pair, intended + for use in a ``from package import module`` type of statement. """ answer = set() @@ -176,31 +506,32 @@ def pb2_modules(self) -> Sequence[Tuple[str, str]]: # messages. (These are usually the same, but not necessarily.) answer.add(( '.'.join(method.input.meta.address.package), - method.input.pb2_module, + method.input.python_module, )) answer.add(( '.'.join(method.output.meta.address.package), - method.output.pb2_module, + method.output.python_module, )) # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. - if method.lro_payload: + if getattr(method.output, 'lro_response', None): answer.add(( - '.'.join(method.lro_payload.meta.address.package), - method.lro_payload.pb2_module, + '.'.join(method.output.lro_response.meta.address.package), + method.output.lro_response.python_module, )) - if method.lro_metadata: + if getattr(method.output, 'lro_metadata', None): answer.add(( - '.'.join(method.lro_metadata.meta.address.package), - method.lro_metadata.pb2_module, + '.'.join(method.output.lro_metadata.meta.address.package), + method.output.lro_metadata.python_module, )) return tuple(sorted(answer)) @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" - return any([m.lro_payload for m in self.methods.values()]) + return any([hasattr(m.output, 'lro_response') + for m in self.methods.values()]) @property def has_field_headers(self) -> bool: diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 index c9d79c5e7242..f415c53e1256 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 @@ -1,23 +1,17 @@ {% extends '_base.py.j2' %} {% block content %} -import functools import pkg_resources from typing import Mapping, Optional, Sequence, Tuple, Union -{%- if service.has_field_headers %} -from urllib import parse -{%- endif %} from google.api_core import gapic_v1 -{%- if service.has_lro %} -from google.api_core import operation -{%- endif %} from google.api_core import retry from google.auth import credentials -{% for package, pb2_module in service.pb2_modules -%} -from {{ package }} import {{ pb2_module }} +{% for package, python_module in service.python_modules -%} +from {{ package }} import {{ python_module }} {% endfor %} +from ..utils import dispatch from .transports import get_transport_class from .transports import {{ service.name }}Transport @@ -57,23 +51,21 @@ class {{ service.name }}: else: Transport = get_transport_class(transport) self._transport = Transport(credentials=credentials) - {%- for method in service.methods.values() %} - @functools.singledispatch + {% for method in service.methods.values() -%} + {%- if method.signatures.single_dispatch -%} + @dispatch + {% endif -%} def {{ method.name|snake_case }}(self, - request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, + request: {{ method.input.python_ident }}, *, retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - {%- if method.lro_payload %} - ) -> operation.Operation: - {%- else %} - ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: - {%- endif %} + ) -> {{ method.output.python_ident }}: """{{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} Args: - request (~.{{ method.input.pb2_module }}.{{ method.input.name }}): + request ({{ method.input.sphinx_ident }}): The request object. {{ method.input.meta.doc|wrap(width=72, initial_width=36, subsequent_indent=' ' * 16) }} retry (~.retry.Retry): Designation of what errors, if any, @@ -83,15 +75,14 @@ class {{ service.name }}: sent alont with the request as metadata. Returns: - {%- if method.lro_payload %} - ~.operation.Operation: - A :class:`google.api_core.operation.Operation` instance. - {%- else %} - ~.{{ method.output.pb2_module }}.{{ method.output.name }}: + {{ method.output.sphinx_ident }}: {{ method.output.meta.doc|wrap(width=72, initial_width=56, subsequent_indent=' ' * 16) }} - {%- endif %} """ + # Coerce the request to the protocol buffer object. + if not isinstance(request, {{ method.input.python_ident }}): + request = {{ method.input.python_ident }}(**request) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method.wrap_method( @@ -102,44 +93,78 @@ class {{ service.name }}: ) {%- if method.field_headers %} - # Ensure metadata is a mutable sequence. - metadata = list(metadata) - - # Add request headers to metadata. - {%- for field_header in method.field_headers %} - - try: - header_value = request.{{ field_header.field }} - except AttributeError: - # Suppress when attribute missing. - pass - else: - metadata.append(( - '{{ field_header.header }}', - '{}={}'.format( - parse.quote_plus('{{ field_header.field }}'), - parse.quote_plus(header_value), - ), - )) - {%- endfor %} + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata({ + {%- for field_header in method.field_headers %} + '{{ field_header }}': request.{{ field_header }}, + {%- endfor %} + }), + ) {%- endif %} # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) - {%- if method.lro_payload %} + response = rpc(request, retry=retry, + timeout=timeout, metadata=metadata) + {%- if method.output.lro_response %} # Wrap the response in an operation future response = operation.from_gapic( response, self._transport.operations_client, - {{ method.lro_payload.pb2_module }}.{{ method.lro_payload.name }}, - metadata_type={{ method.lro_metadata.pb2_module }}.{{ method.lro_metadata.name }} + {{ method.output.lro_response.python_ident }}, + {%- if method.output.lro_metadata %} + metadata_type={{ method.output.lro_metadata.python_ident }}, + {%- endif %} ) {%- endif %} # Done; return the response. return response - {%- endfor %} + + {% for signature in method.signatures.single_dispatch -%} + @{{ method.name|snake_case }}.register + def _{{ method.name|snake_case }}_with_{{ signature.dispatch_field.name|snake_case }}(self, + {%- for field in signature.fields.values() %} + {{ field.name }}: {{ field.python_ident }}{% if loop.index0 > 0 and not field.required %} = None{% endif %}, + {%- endfor %} + *, + retry: retry.Retry = None, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> {{ method.output.python_ident }}: + """{{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} + + Args: + {%- for field in signature.fields.values() %} + {{ field.name }} ({{ field.sphinx_ident }}): + {{ field.meta.doc|wrap(width=72, initial_width=56, + subsequent_indent=' ' * 16) }} + {%- endfor %} + retry (~.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent alont with the request as metadata. + + Returns: + {{ method.output.sphinx_ident }}: + {{ method.output.meta.doc|wrap(width=72, initial_width=56, + subsequent_indent=' ' * 16) }} + """ + return self.{{ method.name|snake_case }}( + {{ method.input.python_ident }}( + {%- for field in signature.fields.values() %} + {{ field.name }}={{ field.name }}, + {%- endfor %} + ), + retry=retry, + timeout=timeout, + metadata=metadata, + ) + {% endfor %} + {% endfor -%} @property def client_info(self) -> gapic_v1.client_info.ClientInfo: diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/base.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/base.py.j2 index 44dc3e508c77..9d0d4f955f33 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/base.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/base.py.j2 @@ -10,8 +10,8 @@ from google.api_core import operations_v1 {%- endif %} from google.auth import credentials -{% for package, pb2_module in service.pb2_modules -%} -from {{ package }} import {{ pb2_module }} +{% for package, python_module in service.python_modules -%} +from {{ package }} import {{ python_module }} {% endfor %} class {{ service.name }}Transport(metaclass=abc.ABCMeta): @@ -57,8 +57,8 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): @abc.abstractmethod def {{ method.name|snake_case }}( self, - request: {{ method.input.pb2_module }}.{{ method.input.name }}, - ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: + request: {{ method.input.python_ident }}, + ) -> {{ method.output.python_ident }}: raise NotImplementedError {%- endfor %} {% endblock %} diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 index faa2919b393f..7b7400daf777 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 @@ -11,8 +11,8 @@ from google.auth import credentials import grpc -{% for package, pb2_module in service.pb2_modules -%} -from {{ package }} import {{ pb2_module }} +{% for package, python_module in service.python_modules -%} +from {{ package }} import {{ python_module }} {% endfor %} from .base import {{ service.name }}Transport @@ -82,8 +82,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @property def {{ method.name|snake_case }}(self) -> Callable[ - [{{ method.input.pb2_module }}.{{ method.input.name }}], - {{ method.output.pb2_module }}.{{ method.output.name }}]: + [{{ method.input.python_ident }}], + {{ method.output.python_ident }}]: """Return a callable for the {{- ' ' + (method.name|snake_case).replace('_',' ')|wrap(width=70, initial_width=25, subsequent_indent=" ") }} method over gRPC. @@ -103,8 +103,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if '{{ method.name|snake_case }}' not in self._stubs: self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.unary_unary( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', - request_serializer={{ method.input.pb2_module }}.{{ method.input.name }}.SerializeToString, - response_deserializer={{ method.output.pb2_module }}.{{ method.output.name }}.FromString, + request_serializer={{ method.input.python_module }}.{{ method.input.name }}.SerializeToString, + response_deserializer={{ method.output.python_module }}.{{ method.output.name }}.FromString, ) return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 index cfb354ac5379..7eb0ac3335ae 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 @@ -8,8 +8,8 @@ from google.api_core import operations_v1 from google.auth import credentials from google.auth.transport.requests import AuthorizedSession -{% for package, pb2_module in service.pb2_modules -%} -from {{ package }} import {{ pb2_module }} +{% for package, python_module in service.python_modules -%} +from {{ package }} import {{ python_module }} {% endfor %} from .base import {{ service.name }}Transport @@ -65,21 +65,21 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): {%- for method in service.methods.values() %} def {{ method.name|snake_case }}(self, - request: {{ method.input.pb2_module }}.{{ method.input.name }}, *, + request: {{ method.input.python_module }}.{{ method.input.name }}, *, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.pb2_module }}.{{ method.output.name }}: + ) -> {{ method.output.python_module }}.{{ method.output.name }}: """Call the {{ (method.name|snake_case).replace('_',' ')|wrap(width=70, initial_width=25, subsequent_indent=" ") }} method over HTTP. Args: - request (~.{{ method.input.pb2_module }}.{{ method.input.name }}): + request (~.{{ method.input.python_ident }}): The request object. {{ method.input.meta.doc|wrap(width=72, initial_width=36, subsequent_indent=' ' * 16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent alont with the request as metadata. Returns: - ~.{{ method.output.pb2_module }}.{{ method.output.name }}: + ~.{{ method.output.python_ident }}: {{ method.output.meta.doc|wrap(width=72, initial_width=56, subsequent_indent=' ' * 16) }} """ @@ -101,7 +101,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): ) # Return the response. - return {{ method.output.pb2_module }}.{{ method.output.name }}.FromString( + return {{ method.output.python_ident }}.FromString( response.content, ) {%- endfor %} diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/utils.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/utils.py.j2 new file mode 100644 index 000000000000..9c80ecf69647 --- /dev/null +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/utils.py.j2 @@ -0,0 +1,22 @@ +{% extends '_base.py.j2' %} + +{% block content %} +import functools + + +def dispatch(func): + base_dispatcher = functools.singledispatch(func) + + # Define a wrapper function that works off args[1] instead of args[0]. + # This is needed because we are overloading *methods*, and their first + # argument is always `self`. + @functools.wraps(base_dispatcher) + def wrapper(*args, **kwargs): + return base_dispatcher.dispatch(args[1].__class__)(*args, **kwargs) + + # The register function is not changed, so let singledispatch do the work. + wrapper.register = base_dispatcher.register + + # Done; return the decorated method. + return wrapper +{% endblock %} diff --git a/packages/gapic-generator/api_factory/utils/lines.py b/packages/gapic-generator/api_factory/utils/lines.py index 4b82a4965a8e..669c9484c72a 100644 --- a/packages/gapic-generator/api_factory/utils/lines.py +++ b/packages/gapic-generator/api_factory/utils/lines.py @@ -70,11 +70,19 @@ def wrap(text: str, width: int, initial_width: int = None, width -= len(antecedent_trailer) initial_width -= len(antecedent_trailer) + # Protocol buffers preserves single initial spaces after line breaks + # when parsing comments (such as the space before the "w" in "when" here). + # Re-wrapping causes these to be two spaces; correct for this. + text = text.replace('\n ', '\n') + # If the initial width is different, break off the beginning of the # string. first = '' if initial_width != width: - initial = textwrap.wrap(text, width=initial_width) + initial = textwrap.wrap(text, + break_long_words=False, + width=initial_width, + ) first = f'{initial[0]}\n' text = ' '.join(initial[1:]) @@ -85,6 +93,7 @@ def wrap(text: str, width: int, initial_width: int = None, # Wrap the remainder of the string at the desired width. text = first + textwrap.fill( + break_long_words=False, initial_indent=subsequent_indent if first else '', subsequent_indent=subsequent_indent, text=text, diff --git a/packages/gapic-generator/docs/index.rst b/packages/gapic-generator/docs/index.rst index 0ec3699bb16d..33b4c12d0464 100644 --- a/packages/gapic-generator/docs/index.rst +++ b/packages/gapic-generator/docs/index.rst @@ -22,5 +22,6 @@ implemented as a plugin to ``protoc``, the protocol buffer compiler. getting-started api-configuration process + templates status reference/index diff --git a/packages/gapic-generator/docs/process.rst b/packages/gapic-generator/docs/process.rst index cc6858224a66..4e3f3c583132 100644 --- a/packages/gapic-generator/docs/process.rst +++ b/packages/gapic-generator/docs/process.rst @@ -14,16 +14,16 @@ library. The protoc contract ~~~~~~~~~~~~~~~~~~~ -This code generator is written as a ``protoc`` plugin, which operates on +This code generator is written as a :command:`protoc` plugin, which operates on a defined contract. The contract is straightforward: a plugin must -accept a `CodeGeneratorRequest `_ (essentially a sequence of -`FileDescriptor `_ objects) and output a -`CodeGeneratorResponse `_. - -If you are unfamiliar with ``protoc`` plugins, welcome! That last paragraph -likely sounded not as straightforward as claimed. It may be useful to read -`plugin.proto`_ and `descriptor.proto`_ before continuing on. The former -describes the contract with plugins (such as this one) and is relatively +accept a ``CodeGeneratorRequest`` (essentially a sequence of +``FileDescriptor`` objects) and output a +``CodeGeneratorResponse``. + +If you are unfamiliar with :command:`protoc` plugins, welcome! That last +paragraph likely sounded not as straightforward as claimed. It may be useful +to read `plugin.proto`_ and `descriptor.proto`_ before continuing on. The +former describes the contract with plugins (such as this one) and is relatively easy to digest, the latter describes protocol buffer files themselves and is rather dense. The key point to grasp is that each ``.proto`` *file* compiles into one of these proto messages (called *descriptors*), and this plugin's @@ -53,7 +53,7 @@ parsing. The guts of this is handled by the :class:`~.schema.api.API` object, which is this plugin's internal representation of the full API client. In particular, this class has a :meth:`~.schema.api.API.build` method which -accepts a sequence of `FileDescriptor`_ objects (remember, this is ``protoc``'s +accepts a sequence of ``FileDescriptor`` objects (remember, this is ``protoc``'s internal representation of each proto file). That method iterates over each file and creates a :class:`~.schema.api.Proto` object for each one. @@ -101,53 +101,19 @@ Translation The translation step follows a straightfoward process to write the contents of client library files. -First, it loads every template in the ``templates/`` directory. -These are `Jinja`_ templates. **There is no master list of templates**; -it is assumed that every template in this directory should be rendered -(unless its name begins with a single underscore). - -The name of the output file is based on the name of the template, with -the following string replacements applied: - -* The ``.j2`` suffix is removed. -* ``$namespace`` is replaced with the namespace specified in the client, - converted to appropriate Python module case. If there is no namespace, - this segment is dropped. If the namespace has more than one element, - this is expanded out in the directory structure. (For example, a namespace - of ``['Acme', 'Manufacturing']`` will translate into ``acme/manufacturing/`` - directories.) -* ``$name`` is replaced with the client name. This is expected to be - present. -* ``$version`` is replaced with the client version (the version of the API). - If there is no specified version, this is dropped. -* ``$name_$version`` is a special case: It is replaced with the client - name, followed by the version. However, if there is no version, both it - and the underscore are dropped. -* ``$service`` is replaced with the service name, converted to appropriate - Python module case. There may be more than one service in an API; read on - for more about this. - -Every template receives **one** variable, spelled ``api``. It is the -:class:`~.schema.api.API` object that was pieced together in the parsing step. - -There is one caveat to the above, which is that an API can have more than -one service. Therefore, templates with ``$service/`` in their name -are a special case. These files are rendered *once per service*, with the -``$service`` directory name changed to the name of the service itself -(in snake case, because this is Python). Additionally, these templates -receive two variables: the ``api`` variable discussed above, as well as a -variable spelled ``service``, which corresponds to the -:class:`~/schema.wrappers.Service` currently being iterated over. +This works by reading in and rendering `Jinja`_ templates into a string. +The file path of the Jinja template is used to determine the filename +in the resulting client library. -.. note:: +More details on authoring templates is discussed on the :doc:`templates` +page. - The Jinja environment also receives a small number of filters useful - for writing properly formatted templates (e.g. a ``snake_case`` filter); - these are defined in :meth:`~.generator.generate` where the environment is - created. +Exit Point +~~~~~~~~~~ -After all templates are processed, any files in the ``generator/files/`` -directory are written. These are not templates, and they are read into -memory and eventually written with no processing whatsoever. +Once the individual strings corresponding to each file to be generated +is collected into memory, these are pieced together into a +``CodeGeneratorResponse`` object, which is serialized +and written to stdout. .. _Jinja: http://jinja.pocoo.org/docs/2.10/ diff --git a/packages/gapic-generator/docs/status.rst b/packages/gapic-generator/docs/status.rst index 3b79caf59820..c577528b3c4a 100644 --- a/packages/gapic-generator/docs/status.rst +++ b/packages/gapic-generator/docs/status.rst @@ -17,10 +17,6 @@ As this is experimental work, please note the following limitations: - gRPC must be installed even if you are not using it (this is due to some minor issues in ``api-core``). - Only unary calls are implemented at this point. -- No support for method argument flattening yet. - No support for samples yet. -- Request headers based on ``google.api.http`` annotations are not implemented - yet. -- No tests are implemented. .. _an awkward location: https://github.com/googleapis/api-common-protos/blob/input-contract/google/api/ diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst new file mode 100644 index 000000000000..e7da81ee56d2 --- /dev/null +++ b/packages/gapic-generator/docs/templates.rst @@ -0,0 +1,86 @@ +Templates +--------- + +This page provides a description of templates: how to write them, what +variables they receive, and so on and so forth. + +In many cases, it should be possible to provide alternative Python libraries +based on protocol buffers by only editing templates (or authoring new ones), +with no requirement to alter the primary codebase itself. + +Jinja +~~~~~ + +All templates are implemented in `Jinja`_, Armin Ronacher's excellent +templating library for Python. This document assumes that you are already +familiar with the basics of writing Jinja templates, and does not seek to +cover that here. + + +Locating Templates +~~~~~~~~~~~~~~~~~~ + +Templates are included in output simply on the basis that they exist. +**There is no master list of templates**; it is assumed that every template +should be rendered (unless its name begins with a single underscore). + +The name of the output file is based on the name of the template, with +the following string replacements applied: + +* The ``.j2`` suffix is removed. +* ``$namespace`` is replaced with the namespace specified in the client, + converted to appropriate Python module case. If there is no namespace, + this segment is dropped. If the namespace has more than one element, + this is expanded out in the directory structure. (For example, a namespace + of ``['Acme', 'Manufacturing']`` will translate into ``acme/manufacturing/`` + directories.) +* ``$name`` is replaced with the client name. This is expected to be + present. +* ``$version`` is replaced with the client version (the version of the API). + If there is no specified version, this is dropped. +* ``$service`` is replaced with the service name, converted to appropriate + Python module case. There may be more than one service in an API; read on + for more about this. + +.. note:: + + ``$name_$version`` is a special case: It is replaced with the client + name, followed by the version. However, if there is no version, both it + and the underscore are dropped. + +Context (Variables) +~~~~~~~~~~~~~~~~~~~ + +Every template receives **one** variable, spelled ``api``. It is the +:class:`~.schema.api.API` object that was pieced together in the parsing step. + +APIs can (and often do) have more than one service. Therefore, templates +with ``$service/`` in their name are a special case. These files are +rendered *once per service*, with the ``$service`` directory name changed to +the name of the service itself (in snake case, because this is Python). +Additionally, these templates receive two variables: the ``api`` variable +discussed above, as well as a variable spelled ``service``, which corresponds +to the :class:`~.schema.wrappers.Service` currently being iterated over. + +.. note:: + + Templates technically receive one additional variable, ``len``; this is + the ``len`` function from the standard library. + + +Filters +~~~~~~~ + +Additionally, templates receive a limited number of filters useful for +writing properly formatted templates. + +These are: + +* ``snake_case`` (:meth:`~.utils.case.to_snake_case`): Converts a string in + any sane case system to snake case. +* ``wrap`` (:meth:`~.utils.lines.wrap`): Wraps arbitrary text. Keyword + arguments on this method such as ``subsequent_indent`` and ``initial_width`` + should make it relatively easy to take an arbitrary string and make it + wrap to 79 characters appropriately. + +.. _Jinja: http://jinja.pocoo.org/docs/2.10/ diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index a37c73e5cd52..5805fe072aa8 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -38,11 +38,11 @@ def test_api_build(): make_message_pb2(name='Foo', fields=()), make_message_pb2(name='GetFooRequest', fields=( make_field_pb2(name='imported_message', number=1, - type_name='google.dep.ImportedMessasge'), + type_name='.google.dep.ImportedMessage'), )), make_message_pb2(name='GetFooResponse', fields=( make_field_pb2(name='foo', number=1, - type_name='google.example.v1.Foo'), + type_name='.google.example.v1.Foo'), )), ), services=(descriptor_pb2.ServiceDescriptorProto( @@ -103,16 +103,16 @@ def test_proto_builder_constructor(): # of children. assert lc.call_count == 3 - # The message type should come first. + # The enum type should come first. _, args, _ = lc.mock_calls[0] - assert args[0][0] == sentinel_message - assert args[1] == pb._load_message - - # The enum type should come second. - _, args, _ = lc.mock_calls[1] assert args[0][0] == sentinel_enum assert args[1] == pb._load_enum + # The message type should come second. + _, args, _ = lc.mock_calls[1] + assert args[0][0] == sentinel_message + assert args[1] == pb._load_message + # The services should come third. _, args, _ = lc.mock_calls[2] assert args[0][0] == sentinel_service @@ -163,6 +163,55 @@ def test_messages(): assert message.fields['bar'].meta.doc == 'This is the bar field.' +def test_messages_reverse_declaration_order(): + # Test that if a message is used as a field higher in the same file, + # that things still work. + message_pbs = ( + make_message_pb2(name='Foo', fields=( + make_field_pb2(name='bar', number=1, + type_name='.google.example.v3.Bar'), + ), + ), + make_message_pb2(name='Bar'), + ) + fdp = make_file_pb2( + messages=message_pbs, + package='google.example.v3', + ) + + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True) + + # Get the message. + assert len(proto.messages) == 2 + Foo = proto.messages['google.example.v3.Foo'] + assert Foo.fields['bar'].message == proto.messages['google.example.v3.Bar'] + + +def test_messages_recursive(): + # Test that if a message is used as a field higher in the same file, + # that things still work. + message_pbs = ( + make_message_pb2(name='Foo', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v3.Foo'), + ), + ), + ) + fdp = make_file_pb2( + messages=message_pbs, + package='google.example.v3', + ) + + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True) + + # Get the message. + assert len(proto.messages) == 1 + Foo = proto.messages['google.example.v3.Foo'] + assert Foo.fields['foo'].message == proto.messages['google.example.v3.Foo'] + + def test_services(): L = descriptor_pb2.SourceCodeInfo.Location @@ -362,6 +411,10 @@ def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, source_code_info=descriptor_pb2.SourceCodeInfo(location=locations), ) + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ + 'google/longrunning/operations.proto': lro_proto, + }) def make_message_pb2(name: str, fields=()) -> descriptor_pb2.DescriptorProto: return descriptor_pb2.DescriptorProto(name=name, field=fields) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 162a75ba39cc..7b825f2ba754 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -20,26 +20,33 @@ def test_address_str_no_parent(): - addr = metadata.Address(package=['foo', 'bar'], module='baz') + addr = metadata.Address(package=('foo', 'bar'), module='baz') assert str(addr) == 'foo.bar' def test_address_str_parent(): - addr = metadata.Address(package=['foo', 'bar'], module='baz', - parent=['spam', 'eggs']) + addr = metadata.Address(package=('foo', 'bar'), module='baz', + parent=('spam', 'eggs')) assert str(addr) == 'foo.bar.spam.eggs' def test_address_child(): - addr = metadata.Address(package=['foo', 'bar'], module='baz') + addr = metadata.Address(package=('foo', 'bar'), module='baz') child = addr.child('bacon') - assert child.parent == ['bacon'] + assert child.parent == ('bacon',) assert str(child) == 'foo.bar.bacon' grandchild = child.child('ham') - assert grandchild.parent == ['bacon', 'ham'] + assert grandchild.parent == ('bacon', 'ham') assert str(grandchild) == 'foo.bar.bacon.ham' +def test_address_resolve(): + addr = metadata.Address(package=('foo', 'bar'), module='baz') + assert addr.resolve('Bacon') == 'foo.bar.Bacon' + assert addr.resolve('foo.bar.Bacon') == 'foo.bar.Bacon' + assert addr.resolve('google.example.Bacon') == 'google.example.Bacon' + + def test_doc_nothing(): meta = metadata.Metadata() assert meta.doc == '' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index 97efd127fe2b..bcae6035b0d6 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -12,34 +12,55 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Tuple + from google.protobuf import descriptor_pb2 +from api_factory.schema import metadata from api_factory.schema import wrappers -def get_enum() -> wrappers.EnumType: +def test_enum_properties(): + enum_type = make_enum(name='Color') + assert enum_type.name == 'Color' + + +def test_enum_value_properties(): + enum_type = make_enum(name='Irrelevant', values=( + ('RED', 1), ('GREEN', 2), ('BLUE', 3), + )) + assert len(enum_type.values) == 3 + for ev, expected in zip(enum_type.values, ('RED', 'GREEN', 'BLUE')): + assert ev.name == expected + + +def test_enum_python_ident(): + message = make_enum('Baz', package='foo.v1', module='bar') + assert message.python_ident == 'bar_pb2.Baz' + + +def test_enum_sphinx_ident(): + message = make_enum('Baz', package='foo.v1', module='bar') + assert message.sphinx_ident == '~.bar_pb2.Baz' + + +def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', + values: Tuple[str, int] = (), meta: metadata.Metadata = None, + ) -> wrappers.EnumType: enum_value_pbs = [ - descriptor_pb2.EnumValueDescriptorProto(name='RED', number=1), - descriptor_pb2.EnumValueDescriptorProto(name='GREEN', number=2), - descriptor_pb2.EnumValueDescriptorProto(name='BLUE', number=3), + descriptor_pb2.EnumValueDescriptorProto(name=i[0], number=i[1]) + for i in values ] enum_pb = descriptor_pb2.EnumDescriptorProto( - name='Color', + name=name, value=enum_value_pbs, ) return wrappers.EnumType( enum_pb=enum_pb, values=[wrappers.EnumValueType(enum_value_pb=evpb) for evpb in enum_value_pbs], + meta=meta or metadata.Metadata(address=metadata.Address( + package=tuple(package.split('.')), + module=module, + )), ) - - -def test_enum_properties(): - enum_type = get_enum() - assert enum_type.name == 'Color' - - -def test_enum_value_properties(): - enum_type = get_enum() - for ev, expected in zip(enum_type.values, ('RED', 'GREEN', 'BLUE')): - assert ev.name == expected diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index e531f3331c91..cb6d198f3a37 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -12,33 +12,140 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + +from google.api import annotations_pb2 from google.protobuf import descriptor_pb2 from api_factory.schema import wrappers from api_factory.schema.metadata import Address, Metadata -def get_field() -> wrappers.Field: - field_pb = descriptor_pb2.FieldDescriptorProto( - name='my_field', - number=1, - type=descriptor_pb2.FieldDescriptorProto.Type.Value('TYPE_BOOL'), - ) - return wrappers.Field(field_pb=field_pb, meta=Metadata( - address=Address(package=['foo', 'bar'], module='baz'), - documentation=descriptor_pb2.SourceCodeInfo.Location( - leading_comments='Lorem ipsum dolor set amet', - ), - )) - - def test_field_properties(): - field = get_field() + Type = descriptor_pb2.FieldDescriptorProto.Type + field = make_field(name='my_field', number=1, type=Type.Value('TYPE_BOOL')) assert field.name == 'my_field' assert field.number == 1 - assert field.type == 8 + assert field.type.python_type == bool + + +def test_field_is_primitive(): + Type = descriptor_pb2.FieldDescriptorProto.Type + primitive_field = make_field(type=Type.Value('TYPE_INT32')) + assert primitive_field.is_primitive + + +def test_field_not_primitive(): + Type = descriptor_pb2.FieldDescriptorProto.Type + message = wrappers.MessageType( + fields={}, + message_pb=descriptor_pb2.DescriptorProto(), + ) + non_primitive_field = make_field( + type=Type.Value('TYPE_MESSAGE'), + type_name='bogus.Message', + message=message, + ) + assert not non_primitive_field.is_primitive + + +def test_python_ident(): + Type = descriptor_pb2.FieldDescriptorProto.Type + field = make_field(type=Type.Value('TYPE_BOOL')) + assert field.python_ident == 'bool' + + +def test_python_ident_repeated(): + Type = descriptor_pb2.FieldDescriptorProto.Type + REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') + field = make_field(type=Type.Value('TYPE_BOOL'), label=REP) + assert field.python_ident == 'Sequence[bool]' + + +def test_repeated(): + REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') + field = make_field(label=REP) + assert field.repeated + + +def test_not_repeated(): + OPT = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_OPTIONAL') + field = make_field(label=OPT) + assert not field.repeated + + +def test_required(): + field = make_field() + field.options.Extensions[annotations_pb2.required] = True + assert field.required -def test_field_metadata(): - field = get_field() - assert field.meta.doc == 'Lorem ipsum dolor set amet' +def test_not_required(): + field = make_field() + assert not field.required + + +def test_sphinx_ident(): + Type = descriptor_pb2.FieldDescriptorProto.Type + field = make_field(type=Type.Value('TYPE_BOOL')) + assert field.sphinx_ident == 'bool' + + +def test_sphinx_ident_repeated(): + Type = descriptor_pb2.FieldDescriptorProto.Type + REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') + field = make_field(type=Type.Value('TYPE_BOOL'), label=REP) + assert field.sphinx_ident == 'Sequence[bool]' + + +def test_type_primitives(): + T = descriptor_pb2.FieldDescriptorProto.Type + assert make_field(type=T.Value('TYPE_FLOAT')).type.python_type == float + assert make_field(type=T.Value('TYPE_INT64')).type.python_type == int + assert make_field(type=T.Value('TYPE_BOOL')).type.python_type == bool + assert make_field(type=T.Value('TYPE_STRING')).type.python_type == str + assert make_field(type=T.Value('TYPE_BYTES')).type.python_type == bytes + + +def test_type_message(): + T = descriptor_pb2.FieldDescriptorProto.Type + message = wrappers.MessageType( + fields={}, + message_pb=descriptor_pb2.DescriptorProto(), + ) + field = make_field( + type=T.Value('TYPE_MESSAGE'), + type_name='bogus.Message', + message=message, + ) + assert field.type == message + + +def test_type_enum(): + T = descriptor_pb2.FieldDescriptorProto.Type + enum = wrappers.EnumType( + values={}, + enum_pb=descriptor_pb2.EnumDescriptorProto(), + ) + field = make_field( + type=T.Value('TYPE_ENUM'), + type_name='bogus.Enumerable', + enum=enum, + ) + assert field.type == enum + + +def test_type_invalid(): + T = descriptor_pb2.FieldDescriptorProto.Type + with pytest.raises(TypeError): + make_field(type=T.Value('TYPE_GROUP')).type + + +def make_field(*, message=None, enum=None, **kwargs) -> wrappers.Field: + kwargs.setdefault('name', 'my_field') + kwargs.setdefault('number', 1) + kwargs.setdefault('type', + descriptor_pb2.FieldDescriptorProto.Type.Value('TYPE_BOOL'), + ) + field_pb = descriptor_pb2.FieldDescriptorProto(**kwargs) + return wrappers.Field(field_pb=field_pb, message=message, enum=enum) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 19ad71b8350c..50f4f1ff5d20 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -12,36 +12,115 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections +from typing import Sequence + +import pytest + from google.protobuf import descriptor_pb2 from api_factory.schema import metadata from api_factory.schema import wrappers -def get_message() -> wrappers.MessageType: - message_pb = descriptor_pb2.DescriptorProto(name='MyMessage') - return wrappers.MessageType( - fields=[], - message_pb=message_pb, - meta=metadata.Metadata( - address=metadata.Address(package=['foo', 'bar'], module='baz'), - documentation=descriptor_pb2.SourceCodeInfo.Location( - leading_comments='Lorem ipsum dolor set amet', - ), - ), - ) - - def test_message_properties(): - message = get_message() + message = make_message('MyMessage') assert message.name == 'MyMessage' def test_message_docstring(): - message = get_message() - assert message.meta.doc == 'Lorem ipsum dolor set amet' + L = descriptor_pb2.SourceCodeInfo.Location + + meta = metadata.Metadata(documentation=L( + leading_comments='Lorem ipsum', + trailing_comments='dolor set amet', + )) + message = make_message('Name', meta=meta) + assert message.meta.doc == 'Lorem ipsum' def test_message_python_package(): - message = get_message() - assert message.pb2_module == 'baz_pb2' + message = make_message('Spam', module='eggs') + assert message.python_module == 'eggs_pb2' + + +def test_message_python_ident(): + message = make_message('Baz', package='foo.v1', module='bar') + assert message.python_ident == 'bar_pb2.Baz' + + +def test_message_sphinx_ident(): + message = make_message('Baz', package='foo.v1', module='bar') + assert message.sphinx_ident == '~.bar_pb2.Baz' + + +def test_get_field(): + fields = (make_field('field_one'), make_field('field_two')) + message = make_message('Message', fields=fields) + field_one = message.get_field('field_one') + assert isinstance(field_one, wrappers.Field) + assert field_one.name == 'field_one' + + +def test_get_field_recursive(): + # Create the inner message. + inner_fields = (make_field('zero'), make_field('one')) + inner = make_message('Inner', fields=inner_fields, package='foo.v1') + + # Create the outer message, which contains an Inner as a field. + outer_field = make_field('inner', message=inner) + outer = make_message('Outer', fields=(outer_field,)) + + # Assert that a recusive retrieval works. + assert outer.get_field('inner', 'zero') == inner_fields[0] + assert outer.get_field('inner', 'one') == inner_fields[1] + + +def test_get_field_nonterminal_repeated_error(): + # Create the inner message. + inner_fields = (make_field('zero'), make_field('one')) + inner = make_message('Inner', fields=inner_fields, package='foo.v1') + + # Create the outer message, which contains an Inner as a field. + outer_field = make_field('inner', message=inner, repeated=True) + outer = make_message('Outer', fields=(outer_field,)) + + # Assert that a recusive retrieval fails. + with pytest.raises(KeyError): + assert outer.get_field('inner', 'zero') == inner_fields[0] + with pytest.raises(KeyError): + assert outer.get_field('inner', 'one') == inner_fields[1] + + +def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', + fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, + ) -> wrappers.MessageType: + message_pb = descriptor_pb2.DescriptorProto( + name=name, + field=[i.field_pb for i in fields], + ) + return wrappers.MessageType( + message_pb=message_pb, + fields=collections.OrderedDict((i.name, i) for i in fields), + meta=meta or metadata.Metadata(address=metadata.Address( + package=tuple(package.split('.')), + module=module, + )), + ) + + +def make_field(name: str, repeated: bool = False, + message: wrappers.MessageType = None, + meta: metadata.Metadata = None, **kwargs) -> wrappers.Method: + if message: + kwargs['type_name'] = str(message.meta.address) + field_pb = descriptor_pb2.FieldDescriptorProto( + name=name, + label=3 if repeated else 1, + **kwargs + ) + return wrappers.Field( + field_pb=field_pb, + message=message, + meta=meta or metadata.Metadata(), + ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 48ecf7bbf902..156b3c896401 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -13,7 +13,9 @@ # limitations under the License. import collections +from typing import Sequence +from google.api import annotations_pb2 from google.api import signature_pb2 from google.protobuf import descriptor_pb2 @@ -21,54 +23,103 @@ from api_factory.schema import wrappers -def get_method() -> wrappers.Method: - # Create the address where this method lives, and the types live, - # and make them distinct. - method_addr = metadata.Address(package=['foo', 'bar'], module='baz') - types_addr = metadata.Address(package=['foo', 'bar'], module='bacon') +def test_method_types(): + input_msg = make_message(name='Input', module='baz') + output_msg = make_message(name='Output', module='baz') + method = make_method('DoSomething', input_msg, output_msg, + package='foo.bar', module='bacon') + assert method.name == 'DoSomething' + assert method.input.name == 'Input' + assert method.input.python_module == 'baz_pb2' + assert method.output.name == 'Output' + assert method.output.python_module == 'baz_pb2' + + +def test_method_signature(): + # Set up a meaningful input message. + input_msg = make_message(name='Input', fields=( + make_field('int_field', type=5), + make_field('bool_field', type=8), + make_field('float_field', type=2), + )) + + # Create the method. + method = make_method('SendStuff', input_message=input_msg) + + # Edit the underlying method pb2 post-hoc to add the appropriate annotation + # (google.api.signature). + method.options.Extensions[annotations_pb2.method_signature].MergeFrom( + signature_pb2.MethodSignature(fields=['int_field', 'float_field']) + ) + + # We should get back just those two fields as part of the signature. + assert len(method.signatures) == 1 + signature = method.signatures[0] + assert tuple(signature.fields.keys()) == ('int_field', 'float_field') + + +def test_method_no_signature(): + assert len(make_method('Ping').signatures) == 0 + + +def test_method_field_headers(): + method = make_method('DoSomething') + assert isinstance(method.field_headers, collections.Sequence) - # Create the method pb2 and set an overload in it. + +def make_method( + name: str, input_message: wrappers.MessageType = None, + output_message: wrappers.MessageType = None, + package: str = 'foo.bar.v1', module: str = 'baz', + **kwargs) -> wrappers.Method: + # Use default input and output messages if they are not provided. + input_message = input_message or make_message('MethodInput') + output_message = output_message or make_message('MethodOutput') + + # Create the method pb2. method_pb = descriptor_pb2.MethodDescriptorProto( - name='DoTheThings', - input_type='foo.bar.Input', - output_type='foo.bar.Output', + name=name, + input_type=str(input_message.meta.address), + output_type=str(output_message.meta.address), ) # Instantiate the wrapper class. return wrappers.Method( method_pb=method_pb, - input=wrappers.MessageType( - fields=[], - message_pb=descriptor_pb2.DescriptorProto(name='Input'), - meta=metadata.Metadata(address=types_addr), - ), - output=wrappers.MessageType( - fields=[], - message_pb=descriptor_pb2.DescriptorProto(name='Output'), - meta=metadata.Metadata(address=types_addr), - ), - meta=metadata.Metadata(address=method_addr), + input=input_message, + output=output_message, + meta=metadata.Metadata(address=metadata.Address( + package=package, + module=module, + )), ) -def test_method_properties(): - method = get_method() - assert method.name == 'DoTheThings' - - -def test_method_types(): - method = get_method() - assert method.input.name == 'Input' - assert method.input.pb2_module == 'bacon_pb2' - assert method.output.name == 'Output' - assert method.output.pb2_module == 'bacon_pb2' - - -def test_method_signature(): - method = get_method() - assert isinstance(method.signature, signature_pb2.MethodSignature) +def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', + fields: Sequence[wrappers.Field] = (), + ) -> wrappers.MessageType: + message_pb = descriptor_pb2.DescriptorProto( + name=name, + field=[i.field_pb for i in fields], + ) + return wrappers.MessageType( + message_pb=message_pb, + fields=collections.OrderedDict((i.name, i) for i in fields), + meta=metadata.Metadata(address=metadata.Address( + package=tuple(package.split('.')), + module=module, + )), + ) -def test_method_field_headers(): - method = get_method() - assert isinstance(method.field_headers, collections.Sequence) +def make_field(name: str, repeated: bool = False, + meta: metadata.Metadata = None, **kwargs) -> wrappers.Method: + field_pb = descriptor_pb2.FieldDescriptorProto( + name=name, + label=3 if repeated else 1, + **kwargs + ) + return wrappers.Field( + field_pb=field_pb, + meta=meta or metadata.Metadata(), + ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py new file mode 100644 index 000000000000..357674ad7774 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py @@ -0,0 +1,40 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import metadata +from api_factory.schema import wrappers + + +def test_operation(): + lro_response = wrappers.MessageType( + fields={}, + message_pb=descriptor_pb2.DescriptorProto(name='LroResponse'), + ) + operation = wrappers.OperationType(lro_response=lro_response) + assert operation.name == 'Operation' + assert operation.python_ident == 'operation.Operation' + assert operation.sphinx_ident == '~.operation.Operation' + + +def test_operation_meta(): + lro_response = wrappers.MessageType( + fields={}, + message_pb=descriptor_pb2.DescriptorProto(name='LroResponse'), + meta=metadata.Metadata(address=metadata.Address(module='foo')), + ) + operation = wrappers.OperationType(lro_response=lro_response) + assert 'representing a long-running operation' in operation.meta.doc + assert ':class:`~.foo_pb2.LroResponse`' in operation.meta.doc diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 9f6e8e608c8e..48ae7972628f 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -49,9 +49,9 @@ def test_service_no_scopes(): assert len(service.oauth_scopes) == 0 -def test_service_pb2_modules(): +def test_service_python_modules(): service = make_service() - assert service.pb2_modules == ( + assert service.python_modules == ( ('a.b.v1', 'c_pb2'), ('foo', 'bacon_pb2'), ('foo', 'bar_pb2'), @@ -60,13 +60,13 @@ def test_service_pb2_modules(): ) -def test_service_pb2_modules_lro(): +def test_service_python_modules_lro(): service = make_service_with_method_options() - assert service.pb2_modules == ( + assert service.python_modules == ( ('foo', 'bar_pb2'), ('foo', 'baz_pb2'), ('foo', 'qux_pb2'), - ('google.longrunning', 'operations_pb2'), + ('google.api_core', 'operation'), ) @@ -128,7 +128,7 @@ def make_service_with_method_options(*, 'DoBigThing', 'foo.bar.ThingRequest', 'google.longrunning.operations.Operation', - lro_payload_type='foo.baz.ThingResponse', + lro_response_type='foo.baz.ThingResponse', lro_metadata_type='foo.qux.ThingMetadata', http_rule=http_rule, ) @@ -146,14 +146,12 @@ def make_service_with_method_options(*, def get_method(name: str, in_type: str, out_type: str, - lro_payload_type: str = '', + lro_response_type: str = '', lro_metadata_type: str = '', http_rule: http_pb2.HttpRule = None, ) -> wrappers.Method: input_ = get_message(in_type) output = get_message(out_type) - lro_payload = get_message(lro_payload_type) if lro_payload_type else None - lro_meta = get_message(lro_metadata_type) if lro_metadata_type else None # Define a method descriptor. Set the field headers if appropriate. method_pb = descriptor_pb2.MethodDescriptorProto( @@ -161,6 +159,11 @@ def get_method(name: str, input_type=input_.proto_path, output_type=output.proto_path, ) + if lro_response_type: + output = wrappers.OperationType( + lro_response=get_message(lro_response_type), + lro_metadata=get_message(lro_metadata_type), + ) if http_rule: ext_key = annotations_pb2.http method_pb.options.Extensions[ext_key].MergeFrom(http_rule) @@ -168,13 +171,15 @@ def get_method(name: str, return wrappers.Method( method_pb=method_pb, input=input_, - lro_metadata=lro_meta, - lro_payload=lro_payload, output=output, ) def get_message(dot_path: str) -> wrappers.MessageType: + # Pass explicit None through (for lro_metadata). + if dot_path is None: + return None + # Note: The `dot_path` here is distinct from the canonical proto path # because it includes the module, which the proto path does not. # diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py new file mode 100644 index 000000000000..afcefd25c7d9 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py @@ -0,0 +1,85 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections + +from google.protobuf import descriptor_pb2 + +from api_factory.schema import wrappers + + +def test_signature_dispatch_field(): + T = descriptor_pb2.FieldDescriptorProto.Type + fields = collections.OrderedDict(( + ('foo', make_field(name='foo', type=T.Value('TYPE_INT32'))), + ('bar', make_field(name='bar', type=T.Value('TYPE_BOOL'))), + )) + signature = wrappers.MethodSignature(name='spam', fields=fields) + assert signature.dispatch_field == fields['foo'] + + +def test_signatures_magic_methods(): + T = descriptor_pb2.FieldDescriptorProto.Type + fields = collections.OrderedDict(( + ('foo', make_field(name='foo', type=T.Value('TYPE_INT32'))), + ('bar', make_field(name='bar', type=T.Value('TYPE_BOOL'))), + )) + signatures = wrappers.MethodSignatures(all=( + wrappers.MethodSignature(name='spam', fields=fields), + wrappers.MethodSignature(name='eggs', fields={ + 'foo': fields['foo'], + }), + )) + assert len(signatures) == 2 + assert tuple([i for i in signatures]) == signatures.all + assert signatures[0] == signatures.all[0] + + +def test_signatures_single_dispatch(): + T = descriptor_pb2.FieldDescriptorProto.Type + fields = ( + ('foo', make_field( + message=wrappers.MessageType( + fields={}, + message_pb=descriptor_pb2.DescriptorProto(name='Bacon'), + ), + name='bar', + type=T.Value('TYPE_MESSAGE'), + type_name='bogus.Message', + )), + ('bar', make_field(name='foo', type=T.Value('TYPE_INT32'))), + ) + signatures = wrappers.MethodSignatures(all=( + wrappers.MethodSignature( + name='spam', + fields=collections.OrderedDict(fields), + ), + wrappers.MethodSignature( + name='eggs', + fields=collections.OrderedDict(reversed(fields)), + ), + )) + assert len(signatures) == 2 + assert len(signatures.single_dispatch) == 1 + assert signatures.single_dispatch[0] == signatures[1] + + +def make_field(*, message=None, enum=None, **kwargs) -> wrappers.Field: + kwargs.setdefault('name', 'my_field') + kwargs.setdefault('number', 1) + kwargs.setdefault('type', + descriptor_pb2.FieldDescriptorProto.Type.Value('TYPE_BOOL'), + ) + field_pb = descriptor_pb2.FieldDescriptorProto(**kwargs) + return wrappers.Field(field_pb=field_pb, message=message, enum=enum) From 84110f057751b48b40fed5e8528a87160f09be1d Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 25 Jul 2018 11:36:15 -0700 Subject: [PATCH 0008/1339] Switch Jinja to strict undefined. (#11) This means any use of an undefined variable will raise an exception immediately, rather than silently printing empty string. --- .../gapic-generator/api_factory/generator/generator.py | 9 ++++++--- packages/gapic-generator/api_factory/schema/wrappers.py | 2 +- .../$namespace/$name_$version/$service/client.py.j2 | 7 +------ .../$name_$version/$service/transports/__init__.py.j2 | 2 +- .../gapic-generator/api_factory/templates/_base.py.j2 | 6 ------ 5 files changed, 9 insertions(+), 17 deletions(-) diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py index e4be4cde8828..bd9f94c4e2a9 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -44,9 +44,12 @@ def __init__(self, api_schema: api.API) -> None: self._api = api_schema # Create the jinja environment with which to render templates. - self._env = jinja2.Environment(loader=TemplateLoader( - searchpath=os.path.join(_dirname, '..', 'templates'), - )) + self._env = jinja2.Environment( + loader=TemplateLoader( + searchpath=os.path.join(_dirname, '..', 'templates'), + ), + undefined=jinja2.StrictUndefined, + ) # Add filters which templates require. self._env.filters['snake_case'] = utils.to_snake_case diff --git a/packages/gapic-generator/api_factory/schema/wrappers.py b/packages/gapic-generator/api_factory/schema/wrappers.py index 1063a8d7e1ba..3e0c0083d513 100644 --- a/packages/gapic-generator/api_factory/schema/wrappers.py +++ b/packages/gapic-generator/api_factory/schema/wrappers.py @@ -377,7 +377,7 @@ def signatures(self) -> Tuple[signature_pb2.MethodSignature]: # Sanity check: If there are no signatures (which should be by far # the common case), just abort now. if len(sig_pb2.fields) == 0: - return () + return MethodSignatures(all=()) # Signatures are annotated with an `additional_signatures` key that # allows for specifying additional signatures. This is an uncommon diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 index f415c53e1256..31e25ee0b5bf 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 @@ -16,11 +16,6 @@ from .transports import get_transport_class from .transports import {{ service.name }}Transport -# LIBRARY_VERSION: str = pkg_resources.get_distribution( -# '{{ api.warehouse_package_name }}', -# ).version - - class {{ service.name }}: """{{ service.meta.doc|wrap(width=72, subsequent_indent=' ') }} """ @@ -107,7 +102,7 @@ class {{ service.name }}: # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata) - {%- if method.output.lro_response %} + {%- if method.output.lro_response is defined %} # Wrap the response in an operation future response = operation.from_gapic( diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 index 5db3dc32b83d..1c77c8605280 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 @@ -34,7 +34,7 @@ def get_transport_class( then the first transport in the registry is used. Returns: - Type[{{ service.label }}Transport]: The transport class to use. + Type[{{ service.name }}Transport]: The transport class to use. """ # If a specific transport is requested, return that one. if label: diff --git a/packages/gapic-generator/api_factory/templates/_base.py.j2 b/packages/gapic-generator/api_factory/templates/_base.py.j2 index f1e8e49db186..0ced14207001 100644 --- a/packages/gapic-generator/api_factory/templates/_base.py.j2 +++ b/packages/gapic-generator/api_factory/templates/_base.py.j2 @@ -1,9 +1,3 @@ # -*- coding: utf-8 -*- -{%- if api.copyright %} -# Copyright {{ api.copyright.year }} {{ api.copyright.label }} -{%- if api.copyright.license %} -# -# {{ api.copyright.license.boilerplate_notice|subsequent_indent('# ') }} -{%- endif %}{% endif %} {% block content %} {% endblock %} From a29e60b58918b828dcfe5c66e47253d0b9ced4c0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 25 Jul 2018 11:49:49 -0700 Subject: [PATCH 0009/1339] Remove the antecedent_trailer argument from wrap. (#13) It is unused. --- .../api_factory/utils/lines.py | 34 ++++++------------- 1 file changed, 11 insertions(+), 23 deletions(-) diff --git a/packages/gapic-generator/api_factory/utils/lines.py b/packages/gapic-generator/api_factory/utils/lines.py index 669c9484c72a..1a6188cc6112 100644 --- a/packages/gapic-generator/api_factory/utils/lines.py +++ b/packages/gapic-generator/api_factory/utils/lines.py @@ -36,7 +36,7 @@ def subsequent_indent(text: str, prefix: str) -> str: def wrap(text: str, width: int, initial_width: int = None, - subsequent_indent: str = '', antecedent_trailer: str = '') -> str: + subsequent_indent: str = '') -> str: """Wrap the given string to the given width. This uses :meth:`textwrap.fill` under the hood, but provides functionality @@ -54,8 +54,6 @@ def wrap(text: str, width: int, initial_width: int = None, different. Defaults to the value of ``width``. subsequent_indent (str): A string to be prepended to every line except the first. - antecedent_trailer (str): A string to be appended to every line - except the last. Returns: str: The wrapped string. @@ -66,10 +64,6 @@ def wrap(text: str, width: int, initial_width: int = None, if not text: return '' - # Reduce the values by the length of the trailing string, if any. - width -= len(antecedent_trailer) - initial_width -= len(antecedent_trailer) - # Protocol buffers preserves single initial spaces after line breaks # when parsing comments (such as the space before the "w" in "when" here). # Re-wrapping causes these to be two spaces; correct for this. @@ -86,20 +80,14 @@ def wrap(text: str, width: int, initial_width: int = None, first = f'{initial[0]}\n' text = ' '.join(initial[1:]) - # Sanity check: If that was the only line, abort here, *without* - # the antecedent trailer. - if not text: - return initial[0] - # Wrap the remainder of the string at the desired width. - text = first + textwrap.fill( - break_long_words=False, - initial_indent=subsequent_indent if first else '', - subsequent_indent=subsequent_indent, - text=text, - width=width, - ) - - # Replace all the line endings with the antecedent trailer, - # and return the resulting string. - return text.replace('\n', f'{antecedent_trailer}\n') + return '{first}{text}'.format( + first=first, + text=textwrap.fill( + break_long_words=False, + initial_indent=subsequent_indent if first else '', + subsequent_indent=subsequent_indent, + text=text, + width=width, + ), + ).rstrip('\n') From f3f54426d2acb0b344b7381a9716cb76809e300a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 25 Jul 2018 11:50:53 -0700 Subject: [PATCH 0010/1339] Remove sending len to Jinja. (#12) The length filter exists, and we are no longer using len anyway. --- packages/gapic-generator/api_factory/generator/generator.py | 1 - packages/gapic-generator/docs/templates.rst | 5 ----- 2 files changed, 6 deletions(-) diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py index bd9f94c4e2a9..a11affcd177c 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -114,7 +114,6 @@ def _render_templates( answer.append(CodeGeneratorResponse.File( content=self._env.get_template(template_name).render( api=self._api, - len=len, **additional_context ).strip() + '\n', name=self._get_output_filename( diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst index e7da81ee56d2..d9008cf8c63f 100644 --- a/packages/gapic-generator/docs/templates.rst +++ b/packages/gapic-generator/docs/templates.rst @@ -62,11 +62,6 @@ Additionally, these templates receive two variables: the ``api`` variable discussed above, as well as a variable spelled ``service``, which corresponds to the :class:`~.schema.wrappers.Service` currently being iterated over. -.. note:: - - Templates technically receive one additional variable, ``len``; this is - the ``len`` function from the standard library. - Filters ~~~~~~~ From 88ba4488d600f33ed271ac7ad9293663e9e9e3b4 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 25 Jul 2018 12:46:41 -0700 Subject: [PATCH 0011/1339] Remove subsequent_indent filter. (#14) It was only used for the code to print the Apache license, which is gone now. --- .../api_factory/generator/generator.py | 1 - .../api_factory/utils/__init__.py | 1 - .../api_factory/utils/lines.py | 20 ------------------- .../tests/unit/generator/test_generator.py | 3 +-- .../tests/unit/utils/test_lines.py | 7 ------- 5 files changed, 1 insertion(+), 31 deletions(-) diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py index a11affcd177c..651e22eec10a 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -53,7 +53,6 @@ def __init__(self, api_schema: api.API) -> None: # Add filters which templates require. self._env.filters['snake_case'] = utils.to_snake_case - self._env.filters['subsequent_indent'] = utils.subsequent_indent self._env.filters['wrap'] = utils.wrap def get_response(self) -> CodeGeneratorResponse: diff --git a/packages/gapic-generator/api_factory/utils/__init__.py b/packages/gapic-generator/api_factory/utils/__init__.py index 2ec96ecf5118..a10eede32145 100644 --- a/packages/gapic-generator/api_factory/utils/__init__.py +++ b/packages/gapic-generator/api_factory/utils/__init__.py @@ -16,7 +16,6 @@ from api_factory.utils.case import to_snake_case from api_factory.utils.filename import to_valid_filename from api_factory.utils.filename import to_valid_module_name -from api_factory.utils.lines import subsequent_indent from api_factory.utils.lines import wrap from api_factory.utils.placeholder import Placeholder diff --git a/packages/gapic-generator/api_factory/utils/lines.py b/packages/gapic-generator/api_factory/utils/lines.py index 1a6188cc6112..b654aca8ad81 100644 --- a/packages/gapic-generator/api_factory/utils/lines.py +++ b/packages/gapic-generator/api_factory/utils/lines.py @@ -15,26 +15,6 @@ import textwrap -def subsequent_indent(text: str, prefix: str) -> str: - """Decorates the text string with the given prefix on hanging lines. - - A "hanging" line is any line except for the first one. After prefixing, - if any lines end in whitespace, that whitespace is stripped. - - This is provided to all templates as the ``subsequent_indent`` filter. - - Args: - text (str): The text string. - prefix (str): The prefix to use. - - Returns: - str: The string with all hanging lines prefixed. - """ - lines = text.split('\n') - lines[1:] = [f'{prefix}{s}'.rstrip() for s in lines[1:]] - return '\n'.join(lines) - - def wrap(text: str, width: int, initial_width: int = None, subsequent_indent: str = '') -> str: """Wrap the given string to the given width. diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 8573bad52763..ff3af2a2438b 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -28,7 +28,7 @@ from api_factory.schema import wrappers -def test_proto_builder_constructor(): +def test_constructor(): # Create a generator. g = generator.Generator(api_schema=make_api()) assert isinstance(g._api, api.API) @@ -38,7 +38,6 @@ def test_proto_builder_constructor(): # to establish this and templates will depend on it. assert isinstance(g._env, jinja2.Environment) assert 'snake_case' in g._env.filters - assert 'subsequent_indent' in g._env.filters assert 'wrap' in g._env.filters diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 955906769187..ad4352e88541 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -15,13 +15,6 @@ from api_factory.utils import lines -def test_subsequent_indent(): - assert lines.subsequent_indent( - text='# foo\nbar\nbaz', - prefix='# ', - ) == '# foo\n# bar\n# baz' - - def test_wrap_noop(): assert lines.wrap('foo bar baz', width=80) == 'foo bar baz' From b2f184579a6d96752e44d9ce0c439141df41f1c6 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 26 Jul 2018 09:40:02 -0700 Subject: [PATCH 0012/1339] Add simple whitespace post-processing. (#16) This commit adds very basic whitespace formatting to nullify certain whitespace issues that are sufficiently difficult to get right in Jinja (particularly when `if` and `for` are involved). This is not meant as a replacement for `yapf`. It is completely reasonable to want to run the resulting code through `yapf`, but I do not want to force this on all template authors. This commit is more humble and just applies very specific fixes that are more or less universally adopted and sufficiently difficult to do. --- .../api_factory/generator/formatter.py | 41 +++++++++ .../api_factory/generator/generator.py | 15 +-- .../tests/unit/generator/test_formatter.py | 92 +++++++++++++++++++ 3 files changed, 142 insertions(+), 6 deletions(-) create mode 100644 packages/gapic-generator/api_factory/generator/formatter.py create mode 100644 packages/gapic-generator/tests/unit/generator/test_formatter.py diff --git a/packages/gapic-generator/api_factory/generator/formatter.py b/packages/gapic-generator/api_factory/generator/formatter.py new file mode 100644 index 000000000000..2aae59a63da7 --- /dev/null +++ b/packages/gapic-generator/api_factory/generator/formatter.py @@ -0,0 +1,41 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + + +def fix_whitespace(code: str) -> str: + """Perform basic whitespace post-processing. + + This corrects a couple of formatting issues that Jinja templates + may struggle with (particularly blank line count, which is tough to + get consistently right when ``if`` or ``for`` are involved). + + Args: + code (str): A string of code to be formatted. + + Returns + str: Formatted code. + """ + # Remove trailing whitespace from any line. + code = re.sub(r'[ ]+\n', '\n', code) + + # Ensure at most two blank lines before top level definitions. + code = re.sub(r'\s+\n\n\n(class|def|@)', r'\n\n\n\1', code) + + # Ensure at most one line before nested definitions. + code = re.sub(r'\s+\n\n( )+(class|def|@)', r'\n\n\1\2', code) + + # All files shall end in one and exactly one line break. + return f'{code.rstrip()}\n' diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/api_factory/generator/generator.py index 651e22eec10a..d3147a005b8d 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/api_factory/generator/generator.py @@ -22,7 +22,8 @@ from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse from api_factory import utils -from api_factory.generator.loader import TemplateLoader +from api_factory.generator import formatter +from api_factory.generator import loader from api_factory.schema import api @@ -45,7 +46,7 @@ def __init__(self, api_schema: api.API) -> None: # Create the jinja environment with which to render templates. self._env = jinja2.Environment( - loader=TemplateLoader( + loader=loader.TemplateLoader( searchpath=os.path.join(_dirname, '..', 'templates'), ), undefined=jinja2.StrictUndefined, @@ -111,10 +112,12 @@ def _render_templates( for template_name in templates: # Generate the File object. answer.append(CodeGeneratorResponse.File( - content=self._env.get_template(template_name).render( - api=self._api, - **additional_context - ).strip() + '\n', + content=formatter.fix_whitespace( + self._env.get_template(template_name).render( + api=self._api, + **additional_context + ), + ), name=self._get_output_filename( template_name, context=additional_context, diff --git a/packages/gapic-generator/tests/unit/generator/test_formatter.py b/packages/gapic-generator/tests/unit/generator/test_formatter.py new file mode 100644 index 000000000000..bede0949db24 --- /dev/null +++ b/packages/gapic-generator/tests/unit/generator/test_formatter.py @@ -0,0 +1,92 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import textwrap + +from api_factory.generator import formatter + + +def test_fix_whitespace_top_level(): + assert formatter.fix_whitespace(textwrap.dedent("""\ + import something + + + class Correct: + pass + + + + class TooFarDown: + pass + + class TooClose: # remains too close + pass + """)) == textwrap.dedent("""\ + import something + + + class Correct: + pass + + + class TooFarDown: + pass + + class TooClose: # remains too close + pass + """) + + +def test_fix_whitespace_nested(): + assert formatter.fix_whitespace(textwrap.dedent("""\ + class JustAClass: + def foo(self): + pass + + + def too_far_down(self): + pass + """)) == textwrap.dedent("""\ + class JustAClass: + def foo(self): + pass + + def too_far_down(self): + pass + """) + + +def test_fix_whitespace_decorators(): + assert formatter.fix_whitespace(textwrap.dedent("""\ + class JustAClass: + def foo(self): + pass + + + @property + def too_far_down(self): + return 42 + """)) == textwrap.dedent("""\ + class JustAClass: + def foo(self): + pass + + @property + def too_far_down(self): + return 42 + """) + + +def test_file_newline_ending(): + assert formatter.fix_whitespace('') == '\n' From dcba21f2b69d4c227fec5757af59be2af63c5b45 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 26 Jul 2018 12:05:52 -0700 Subject: [PATCH 0013/1339] Refactor the wrap filter. (#15) This replaces the `initial_width` and `subsequent_indent` keyword arguments with `offset` and `indent`, both of which are ints. The former refers to positioning for the first line; the latter to subsequent indenting. This does remove the ability to have a `subsequent_indent` which is anything other than spaces, which did not end up being used in real life (at least not yet). If it is needed, the `subsequent_indent` stand-alone filter could be resurrected (see #14). This change markedly improves readability of templates where `wrap` is being used. Similarly, the syntactic change from "initial width" to "initial offset" ends up being far more intuitive (I had used "initial width" incorrectly about half the time and had to debug it). --- .../$name_$version/$service/client.py.j2 | 19 ++++----- .../$service/transports/grpc.py.j2 | 14 +++---- .../$service/transports/http.py.j2 | 15 +++---- .../api_factory/utils/lines.py | 42 ++++++++++--------- .../tests/unit/utils/test_lines.py | 19 ++++----- 5 files changed, 53 insertions(+), 56 deletions(-) diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 index 31e25ee0b5bf..e9322adf3f44 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 @@ -17,7 +17,7 @@ from .transports import {{ service.name }}Transport class {{ service.name }}: - """{{ service.meta.doc|wrap(width=72, subsequent_indent=' ') }} + """{{ service.meta.doc|wrap(width=72, offset=7, indent=4) }} """ def __init__(self, *, credentials: credentials.Credentials = None, @@ -57,12 +57,12 @@ class {{ service.name }}: timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.python_ident }}: - """{{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} + """{{ method.meta.doc|wrap(width=72, offset=11, indent=8) }} Args: request ({{ method.input.sphinx_ident }}): - The request object. {{ method.input.meta.doc|wrap(width=72, - initial_width=36, subsequent_indent=' ' * 16) }} + The request object.{{ ' ' -}} + {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} retry (~.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -71,8 +71,7 @@ class {{ service.name }}: Returns: {{ method.output.sphinx_ident }}: - {{ method.output.meta.doc|wrap(width=72, initial_width=56, - subsequent_indent=' ' * 16) }} + {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ # Coerce the request to the protocol buffer object. if not isinstance(request, {{ method.input.python_ident }}): @@ -129,13 +128,12 @@ class {{ service.name }}: timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.python_ident }}: - """{{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} + """{{ method.meta.doc|wrap(width=72, offset=11, indent=8) }} Args: {%- for field in signature.fields.values() %} {{ field.name }} ({{ field.sphinx_ident }}): - {{ field.meta.doc|wrap(width=72, initial_width=56, - subsequent_indent=' ' * 16) }} + {{ field.meta.doc|wrap(width=72, indent=16) }} {%- endfor %} retry (~.retry.Retry): Designation of what errors, if any, should be retried. @@ -145,8 +143,7 @@ class {{ service.name }}: Returns: {{ method.output.sphinx_ident }}: - {{ method.output.meta.doc|wrap(width=72, initial_width=56, - subsequent_indent=' ' * 16) }} + {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ return self.{{ method.name|snake_case }}( {{ method.input.python_ident }}( diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 index 7b7400daf777..33072d479331 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 @@ -20,7 +20,7 @@ from .base import {{ service.name }}Transport class {{ service.name }}GrpcTransport({{ service.name }}Transport): """gRPC backend transport for {{ service.name }}. - {{ service.meta.doc|wrap(width=72, subsequent_indent=' ') }} + {{ service.meta.doc|wrap(width=72, indent=4) }} This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -84,17 +84,17 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.python_ident }}], {{ method.output.python_ident }}]: - """Return a callable for the - {{- ' ' + (method.name|snake_case).replace('_',' ')|wrap(width=70, - initial_width=25, subsequent_indent=" ") }} method over gRPC. + """Return a callable for the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=40, indent=8) }} + {{- ' ' -}} method over gRPC. - {{ method.meta.doc|wrap(width=72, subsequent_indent=' ' * 8) }} + {{ method.meta.doc|wrap(width=72, indent=8) }} Returns: Callable[[~.{{ method.input.name }}], ~.{{ method.output.name }}]: - {{ method.output.meta.doc|wrap(width=72, initial_width=56, - subsequent_indent=' ' * 16) }} + {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ # Generate a "stub function" on-the-fly which will actually make # the request. diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 index 7eb0ac3335ae..5abf586d54d7 100644 --- a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 +++ b/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 @@ -17,7 +17,7 @@ from .base import {{ service.name }}Transport class {{ service.name }}HttpTransport({{ service.name }}Transport): """HTTP backend transport for {{ service.name }}. - {{ service.meta.doc|wrap(width=72, subsequent_indent=' ') }} + {{ service.meta.doc|wrap(width=72, indent=4) }} This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -68,20 +68,21 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): request: {{ method.input.python_module }}.{{ method.input.name }}, *, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.python_module }}.{{ method.output.name }}: - """Call the {{ (method.name|snake_case).replace('_',' ')|wrap(width=70, - initial_width=25, subsequent_indent=" ") }} method over HTTP. + """Call the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. Args: request (~.{{ method.input.python_ident }}): - The request object. {{ method.input.meta.doc|wrap(width=72, - initial_width=36, subsequent_indent=' ' * 16) }} + The request object. {{- ' ' -}} + {{ method.input.meta.doc|wrap(width=72, offset=36 indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent alont with the request as metadata. Returns: ~.{{ method.output.python_ident }}: - {{ method.output.meta.doc|wrap(width=72, initial_width=56, - subsequent_indent=' ' * 16) }} + {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ # Serialize the input. data = request.SerializeToString() diff --git a/packages/gapic-generator/api_factory/utils/lines.py b/packages/gapic-generator/api_factory/utils/lines.py index b654aca8ad81..f459fd34444f 100644 --- a/packages/gapic-generator/api_factory/utils/lines.py +++ b/packages/gapic-generator/api_factory/utils/lines.py @@ -15,47 +15,49 @@ import textwrap -def wrap(text: str, width: int, initial_width: int = None, - subsequent_indent: str = '') -> str: +def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: """Wrap the given string to the given width. - This uses :meth:`textwrap.fill` under the hood, but provides functionality - for the initial width, as well as a common line ending for every line - but the last. + This uses :meth:`textwrap.fill` under the hood, but provides useful + offset functionality for Jinja templates. This is provided to all templates as the ``wrap`` filter. Args: text (str): The initial text string. - width (int): The width at which to wrap the text. If either - ``subsequent_indent`` or ``antecedent_trailer`` are provided, - their width will be automatically counted against this. - initial_width (int): Optional. The width of the first line, if - different. Defaults to the value of ``width``. - subsequent_indent (str): A string to be prepended to every line - except the first. + width (int): The width at which to wrap the text. If offset is + provided, these are automatically counted against this. + offset (int): The offset for the first line of text. + This value is subtracted from ``width`` for the first line + only, and is intended to represent the vertical position of + the first line as already present in the template. + Defaults to the value of ``indent``. + indent (int): The number of spaces to indent all lines after the + first one. Returns: str: The wrapped string. """ - initial_width = initial_width or width - # Sanity check: If there is empty text, abort. if not text: return '' + # If the offset is None, default it to the indent value. + if offset is None: + offset = indent + # Protocol buffers preserves single initial spaces after line breaks # when parsing comments (such as the space before the "w" in "when" here). # Re-wrapping causes these to be two spaces; correct for this. text = text.replace('\n ', '\n') - # If the initial width is different, break off the beginning of the - # string. + # If the initial width is different (in other words, the initial offset + # is non-zero), break off the beginning of the string. first = '' - if initial_width != width: + if offset > 0: initial = textwrap.wrap(text, break_long_words=False, - width=initial_width, + width=width - offset, ) first = f'{initial[0]}\n' text = ' '.join(initial[1:]) @@ -65,8 +67,8 @@ def wrap(text: str, width: int, initial_width: int = None, first=first, text=textwrap.fill( break_long_words=False, - initial_indent=subsequent_indent if first else '', - subsequent_indent=subsequent_indent, + initial_indent=' ' * indent if first else '', + subsequent_indent=' ' * indent, text=text, width=width, ), diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index ad4352e88541..853a0ec39de7 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -31,21 +31,18 @@ def test_wrap_strips(): assert lines.wrap('foo bar baz ', width=80) == 'foo bar baz' -def test_wrap_subsequent_indent(): - assert lines.wrap( - '# foo bar baz', - width=5, - subsequent_indent='# ', - ) == '# foo\n# bar\n# baz' +def test_wrap_subsequent_offset(): + assert lines.wrap('foo bar baz', + width=5, offset=0, indent=2, + ) == 'foo\n bar\n baz' -def test_wrap_initial_width(): +def test_wrap_initial_offset(): assert lines.wrap( 'The hail in Wales falls mainly on the snails.', - width=20, - initial_width=8, + width=20, offset=12, indent=0, ) == 'The hail\nin Wales falls\nmainly on the\nsnails.' -def test_wrap_initial_width_short(): - assert lines.wrap('foo bar', width=30, initial_width=20) == 'foo bar' +def test_wrap_indent_short(): + assert lines.wrap('foo bar', width=30, indent=10) == 'foo bar' From 320db0fc2f180bc9382f7afd2585a2b93907d347 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 26 Jul 2018 12:25:42 -0700 Subject: [PATCH 0014/1339] Rename api_factory module to gapic. (#17) --- packages/gapic-generator/docs/process.rst | 2 +- .../gapic-generator/docs/reference/generator.rst | 6 +++--- packages/gapic-generator/docs/reference/schema.rst | 10 +++++----- packages/gapic-generator/docs/reference/utils.rst | 4 ++-- .../{api_factory => gapic}/__init__.py | 0 .../{api_factory => gapic}/cli/dump.py | 0 .../{api_factory => gapic}/cli/generate.py | 4 ++-- .../{api_factory => gapic}/generator/__init__.py | 0 .../{api_factory => gapic}/generator/formatter.py | 0 .../{api_factory => gapic}/generator/generator.py | 8 ++++---- .../{api_factory => gapic}/generator/loader.py | 2 +- .../{api_factory => gapic}/schema/__init__.py | 6 +++--- .../{api_factory => gapic}/schema/api.py | 8 ++++---- .../{api_factory => gapic}/schema/metadata.py | 0 .../{api_factory => gapic}/schema/naming.py | 2 +- .../{api_factory => gapic}/schema/wrappers.py | 4 ++-- .../templates/$namespace/$name/__init__.py.j2 | 0 .../$name_$version/$service/__init__.py.j2 | 0 .../$namespace/$name_$version/$service/client.py.j2 | 0 .../$service/transports/__init__.py.j2 | 0 .../$name_$version/$service/transports/base.py.j2 | 0 .../$name_$version/$service/transports/grpc.py.j2 | 0 .../$name_$version/$service/transports/http.py.j2 | 0 .../$namespace/$name_$version/__init__.py.j2 | 0 .../templates/$namespace/$name_$version/utils.py.j2 | 0 .../{api_factory => gapic}/templates/_base.py.j2 | 0 .../{api_factory => gapic}/templates/setup.py.j2 | 0 .../{api_factory => gapic}/utils/__init__.py | 12 ++++++------ .../{api_factory => gapic}/utils/cache.py | 0 .../{api_factory => gapic}/utils/case.py | 0 .../{api_factory => gapic}/utils/filename.py | 0 .../{api_factory => gapic}/utils/lines.py | 0 .../{api_factory => gapic}/utils/placeholder.py | 0 packages/gapic-generator/nox.py | 2 +- packages/gapic-generator/setup.py | 4 ++-- .../tests/unit/generator/test_formatter.py | 2 +- .../tests/unit/generator/test_generator.py | 8 ++++---- .../tests/unit/generator/test_loader.py | 2 +- .../gapic-generator/tests/unit/schema/test_api.py | 6 +++--- .../tests/unit/schema/test_metadata.py | 2 +- .../gapic-generator/tests/unit/schema/test_naming.py | 2 +- .../tests/unit/schema/wrappers/test_enums.py | 4 ++-- .../tests/unit/schema/wrappers/test_field.py | 4 ++-- .../tests/unit/schema/wrappers/test_message.py | 4 ++-- .../tests/unit/schema/wrappers/test_method.py | 4 ++-- .../tests/unit/schema/wrappers/test_operation.py | 4 ++-- .../tests/unit/schema/wrappers/test_service.py | 4 ++-- .../tests/unit/schema/wrappers/test_signature.py | 2 +- .../gapic-generator/tests/unit/utils/test_cache.py | 2 +- .../gapic-generator/tests/unit/utils/test_case.py | 2 +- .../tests/unit/utils/test_filename.py | 2 +- .../gapic-generator/tests/unit/utils/test_lines.py | 2 +- .../tests/unit/utils/test_placeholder.py | 2 +- 53 files changed, 66 insertions(+), 66 deletions(-) rename packages/gapic-generator/{api_factory => gapic}/__init__.py (100%) rename packages/gapic-generator/{api_factory => gapic}/cli/dump.py (100%) rename packages/gapic-generator/{api_factory => gapic}/cli/generate.py (97%) rename packages/gapic-generator/{api_factory => gapic}/generator/__init__.py (100%) rename packages/gapic-generator/{api_factory => gapic}/generator/formatter.py (100%) rename packages/gapic-generator/{api_factory => gapic}/generator/generator.py (97%) rename packages/gapic-generator/{api_factory => gapic}/generator/loader.py (98%) rename packages/gapic-generator/{api_factory => gapic}/schema/__init__.py (88%) rename packages/gapic-generator/{api_factory => gapic}/schema/api.py (99%) rename packages/gapic-generator/{api_factory => gapic}/schema/metadata.py (100%) rename packages/gapic-generator/{api_factory => gapic}/schema/naming.py (99%) rename packages/gapic-generator/{api_factory => gapic}/schema/wrappers.py (99%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name/__init__.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name_$version/$service/__init__.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name_$version/$service/client.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name_$version/$service/transports/base.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name_$version/$service/transports/http.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name_$version/__init__.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/$namespace/$name_$version/utils.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/_base.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/templates/setup.py.j2 (100%) rename packages/gapic-generator/{api_factory => gapic}/utils/__init__.py (70%) rename packages/gapic-generator/{api_factory => gapic}/utils/cache.py (100%) rename packages/gapic-generator/{api_factory => gapic}/utils/case.py (100%) rename packages/gapic-generator/{api_factory => gapic}/utils/filename.py (100%) rename packages/gapic-generator/{api_factory => gapic}/utils/lines.py (100%) rename packages/gapic-generator/{api_factory => gapic}/utils/placeholder.py (100%) diff --git a/packages/gapic-generator/docs/process.rst b/packages/gapic-generator/docs/process.rst index 4e3f3c583132..13c7b6faae28 100644 --- a/packages/gapic-generator/docs/process.rst +++ b/packages/gapic-generator/docs/process.rst @@ -39,7 +39,7 @@ contract model to be able to follow what this library is doing. Entry Point ~~~~~~~~~~~ -The entry point to this tool is ``api_factory/cli/generate.py``. The function +The entry point to this tool is ``gapic/cli/generate.py``. The function in this module is responsible for accepting CLI input, building the internal API schema, and then rendering templates and using them to build a response object. diff --git a/packages/gapic-generator/docs/reference/generator.rst b/packages/gapic-generator/docs/reference/generator.rst index 722d35b5d0b6..cf441d517a89 100644 --- a/packages/gapic-generator/docs/reference/generator.rst +++ b/packages/gapic-generator/docs/reference/generator.rst @@ -1,10 +1,10 @@ generator --------- -.. automodule:: api_factory.generator +.. automodule:: gapic.generator -.. automodule:: api_factory.generator.generator +.. automodule:: gapic.generator.generator :members: -.. automodule:: api_factory.generator.loader +.. automodule:: gapic.generator.loader :members: diff --git a/packages/gapic-generator/docs/reference/schema.rst b/packages/gapic-generator/docs/reference/schema.rst index 33e633a1ee29..e7d9252fdb54 100644 --- a/packages/gapic-generator/docs/reference/schema.rst +++ b/packages/gapic-generator/docs/reference/schema.rst @@ -1,29 +1,29 @@ schema ------ -.. automodule:: api_factory.schema +.. automodule:: gapic.schema api ~~~ -.. automodule:: api_factory.schema.api +.. automodule:: gapic.schema.api :members: metadata ~~~~~~~~ -.. automodule:: api_factory.schema.metadata +.. automodule:: gapic.schema.metadata :members: naming ~~~~~~ -.. automodule:: api_factory.schema.naming +.. automodule:: gapic.schema.naming :members: wrappers ~~~~~~~~ -.. automodule:: api_factory.schema.wrappers +.. automodule:: gapic.schema.wrappers :members: diff --git a/packages/gapic-generator/docs/reference/utils.rst b/packages/gapic-generator/docs/reference/utils.rst index 232157f7065c..3d0d728cb93f 100644 --- a/packages/gapic-generator/docs/reference/utils.rst +++ b/packages/gapic-generator/docs/reference/utils.rst @@ -1,8 +1,8 @@ utils ----- -.. automodule:: api_factory.utils.case +.. automodule:: gapic.utils.case :members: -.. automodule:: api_factory.utils.lines +.. automodule:: gapic.utils.lines :members: diff --git a/packages/gapic-generator/api_factory/__init__.py b/packages/gapic-generator/gapic/__init__.py similarity index 100% rename from packages/gapic-generator/api_factory/__init__.py rename to packages/gapic-generator/gapic/__init__.py diff --git a/packages/gapic-generator/api_factory/cli/dump.py b/packages/gapic-generator/gapic/cli/dump.py similarity index 100% rename from packages/gapic-generator/api_factory/cli/dump.py rename to packages/gapic-generator/gapic/cli/dump.py diff --git a/packages/gapic-generator/api_factory/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py similarity index 97% rename from packages/gapic-generator/api_factory/cli/generate.py rename to packages/gapic-generator/gapic/cli/generate.py index 44221a088896..614f1d9b0466 100644 --- a/packages/gapic-generator/api_factory/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -20,8 +20,8 @@ from google.protobuf.compiler import plugin_pb2 -from api_factory import generator -from api_factory.schema import api +from gapic import generator +from gapic.schema import api @click.command() diff --git a/packages/gapic-generator/api_factory/generator/__init__.py b/packages/gapic-generator/gapic/generator/__init__.py similarity index 100% rename from packages/gapic-generator/api_factory/generator/__init__.py rename to packages/gapic-generator/gapic/generator/__init__.py diff --git a/packages/gapic-generator/api_factory/generator/formatter.py b/packages/gapic-generator/gapic/generator/formatter.py similarity index 100% rename from packages/gapic-generator/api_factory/generator/formatter.py rename to packages/gapic-generator/gapic/generator/formatter.py diff --git a/packages/gapic-generator/api_factory/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py similarity index 97% rename from packages/gapic-generator/api_factory/generator/generator.py rename to packages/gapic-generator/gapic/generator/generator.py index d3147a005b8d..f079835f75a2 100644 --- a/packages/gapic-generator/api_factory/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -21,10 +21,10 @@ from google.protobuf.compiler.plugin_pb2 import CodeGeneratorRequest from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse -from api_factory import utils -from api_factory.generator import formatter -from api_factory.generator import loader -from api_factory.schema import api +from gapic import utils +from gapic.generator import formatter +from gapic.generator import loader +from gapic.schema import api class Generator: diff --git a/packages/gapic-generator/api_factory/generator/loader.py b/packages/gapic-generator/gapic/generator/loader.py similarity index 98% rename from packages/gapic-generator/api_factory/generator/loader.py rename to packages/gapic-generator/gapic/generator/loader.py index 059cec53f250..c001feadd6c3 100644 --- a/packages/gapic-generator/api_factory/generator/loader.py +++ b/packages/gapic-generator/gapic/generator/loader.py @@ -16,7 +16,7 @@ import jinja2 -from api_factory.utils import cached_property +from gapic.utils import cached_property class TemplateLoader(jinja2.FileSystemLoader): diff --git a/packages/gapic-generator/api_factory/schema/__init__.py b/packages/gapic-generator/gapic/schema/__init__.py similarity index 88% rename from packages/gapic-generator/api_factory/schema/__init__.py rename to packages/gapic-generator/gapic/schema/__init__.py index 6c43fc560383..f3d2d01ef198 100644 --- a/packages/gapic-generator/api_factory/schema/__init__.py +++ b/packages/gapic-generator/gapic/schema/__init__.py @@ -20,9 +20,9 @@ These three parts are divided into the three component modules. """ -from api_factory.schema.api import API -from api_factory.schema import metadata -from api_factory.schema import wrappers +from gapic.schema.api import API +from gapic.schema import metadata +from gapic.schema import wrappers __all__ = ( diff --git a/packages/gapic-generator/api_factory/schema/api.py b/packages/gapic-generator/gapic/schema/api.py similarity index 99% rename from packages/gapic-generator/api_factory/schema/api.py rename to packages/gapic-generator/gapic/schema/api.py index c2700b7c4f04..06ebdfac6cb0 100644 --- a/packages/gapic-generator/api_factory/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -25,10 +25,10 @@ from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 -from api_factory.schema import metadata -from api_factory.schema import naming -from api_factory.schema import wrappers -from api_factory.utils import cached_property +from gapic.schema import metadata +from gapic.schema import naming +from gapic.schema import wrappers +from gapic.utils import cached_property @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/api_factory/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py similarity index 100% rename from packages/gapic-generator/api_factory/schema/metadata.py rename to packages/gapic-generator/gapic/schema/metadata.py diff --git a/packages/gapic-generator/api_factory/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py similarity index 99% rename from packages/gapic-generator/api_factory/schema/naming.py rename to packages/gapic-generator/gapic/schema/naming.py index 614d07c66763..be35c949beff 100644 --- a/packages/gapic-generator/api_factory/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -20,7 +20,7 @@ from google.api import annotations_pb2 from google.protobuf import descriptor_pb2 -from api_factory import utils +from gapic import utils @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/api_factory/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py similarity index 99% rename from packages/gapic-generator/api_factory/schema/wrappers.py rename to packages/gapic-generator/gapic/schema/wrappers.py index 3e0c0083d513..329d5ea676b5 100644 --- a/packages/gapic-generator/api_factory/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -36,8 +36,8 @@ from google.api import signature_pb2 from google.protobuf import descriptor_pb2 -from api_factory import utils -from api_factory.schema import metadata +from gapic import utils +from gapic.schema import metadata @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/client.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/base.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/base.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/base.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/http.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name_$version/$service/transports/http.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/http.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name_$version/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/$namespace/$name_$version/utils.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/utils.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/$namespace/$name_$version/utils.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/utils.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/_base.py.j2 b/packages/gapic-generator/gapic/templates/_base.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/_base.py.j2 rename to packages/gapic-generator/gapic/templates/_base.py.j2 diff --git a/packages/gapic-generator/api_factory/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 similarity index 100% rename from packages/gapic-generator/api_factory/templates/setup.py.j2 rename to packages/gapic-generator/gapic/templates/setup.py.j2 diff --git a/packages/gapic-generator/api_factory/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py similarity index 70% rename from packages/gapic-generator/api_factory/utils/__init__.py rename to packages/gapic-generator/gapic/utils/__init__.py index a10eede32145..cd2201429d78 100644 --- a/packages/gapic-generator/api_factory/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -12,12 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from api_factory.utils.cache import cached_property -from api_factory.utils.case import to_snake_case -from api_factory.utils.filename import to_valid_filename -from api_factory.utils.filename import to_valid_module_name -from api_factory.utils.lines import wrap -from api_factory.utils.placeholder import Placeholder +from gapic.utils.cache import cached_property +from gapic.utils.case import to_snake_case +from gapic.utils.filename import to_valid_filename +from gapic.utils.filename import to_valid_module_name +from gapic.utils.lines import wrap +from gapic.utils.placeholder import Placeholder __all__ = ( diff --git a/packages/gapic-generator/api_factory/utils/cache.py b/packages/gapic-generator/gapic/utils/cache.py similarity index 100% rename from packages/gapic-generator/api_factory/utils/cache.py rename to packages/gapic-generator/gapic/utils/cache.py diff --git a/packages/gapic-generator/api_factory/utils/case.py b/packages/gapic-generator/gapic/utils/case.py similarity index 100% rename from packages/gapic-generator/api_factory/utils/case.py rename to packages/gapic-generator/gapic/utils/case.py diff --git a/packages/gapic-generator/api_factory/utils/filename.py b/packages/gapic-generator/gapic/utils/filename.py similarity index 100% rename from packages/gapic-generator/api_factory/utils/filename.py rename to packages/gapic-generator/gapic/utils/filename.py diff --git a/packages/gapic-generator/api_factory/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py similarity index 100% rename from packages/gapic-generator/api_factory/utils/lines.py rename to packages/gapic-generator/gapic/utils/lines.py diff --git a/packages/gapic-generator/api_factory/utils/placeholder.py b/packages/gapic-generator/gapic/utils/placeholder.py similarity index 100% rename from packages/gapic-generator/api_factory/utils/placeholder.py rename to packages/gapic-generator/gapic/utils/placeholder.py diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index acdba84aedb4..2d468eca2687 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -33,7 +33,7 @@ def unit(session, python_version='3.7'): session.run( 'py.test', '--quiet', - '--cov=api_factory', + '--cov=gapic', '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 81d17088ab84..c71181a0597f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -35,8 +35,8 @@ 'buffers', long_description=README, entry_points="""[console_scripts] - protoc-gen-dump=api_factory.cli.dump:dump - protoc-gen-pyclient=api_factory.cli.generate:generate + protoc-gen-dump=gapic.cli.dump:dump + protoc-gen-pyclient=gapic.cli.generate:generate """, platforms='Posix; MacOS X', include_package_data=True, diff --git a/packages/gapic-generator/tests/unit/generator/test_formatter.py b/packages/gapic-generator/tests/unit/generator/test_formatter.py index bede0949db24..2e72ad44d7de 100644 --- a/packages/gapic-generator/tests/unit/generator/test_formatter.py +++ b/packages/gapic-generator/tests/unit/generator/test_formatter.py @@ -14,7 +14,7 @@ import textwrap -from api_factory.generator import formatter +from gapic.generator import formatter def test_fix_whitespace_top_level(): diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index ff3af2a2438b..9d08e5418eca 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -22,10 +22,10 @@ from google.protobuf import descriptor_pb2 from google.protobuf.compiler import plugin_pb2 -from api_factory.generator import generator -from api_factory.schema import api -from api_factory.schema import naming -from api_factory.schema import wrappers +from gapic.generator import generator +from gapic.schema import api +from gapic.schema import naming +from gapic.schema import wrappers def test_constructor(): diff --git a/packages/gapic-generator/tests/unit/generator/test_loader.py b/packages/gapic-generator/tests/unit/generator/test_loader.py index 27f8011dda11..ec392de23f60 100644 --- a/packages/gapic-generator/tests/unit/generator/test_loader.py +++ b/packages/gapic-generator/tests/unit/generator/test_loader.py @@ -14,7 +14,7 @@ from unittest import mock -from api_factory.generator.loader import TemplateLoader +from gapic.generator.loader import TemplateLoader def test_service_templates(): diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 5805fe072aa8..9bd5e418f481 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -18,9 +18,9 @@ from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 -from api_factory.schema import api -from api_factory.schema import naming -from api_factory.schema import wrappers +from gapic.schema import api +from gapic.schema import naming +from gapic.schema import wrappers def test_api_build(): diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 7b825f2ba754..eaa88224fe01 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -16,7 +16,7 @@ from google.protobuf import descriptor_pb2 -from api_factory.schema import metadata +from gapic.schema import metadata def test_address_str_no_parent(): diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 64496a47bc88..f179ef7e2101 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -18,7 +18,7 @@ from google.api import metadata_pb2 from google.protobuf import descriptor_pb2 -from api_factory.schema import naming +from gapic.schema import naming def test_long_name(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index bcae6035b0d6..34d7ae73f76b 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -16,8 +16,8 @@ from google.protobuf import descriptor_pb2 -from api_factory.schema import metadata -from api_factory.schema import wrappers +from gapic.schema import metadata +from gapic.schema import wrappers def test_enum_properties(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index cb6d198f3a37..4b819cafbc5a 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -17,8 +17,8 @@ from google.api import annotations_pb2 from google.protobuf import descriptor_pb2 -from api_factory.schema import wrappers -from api_factory.schema.metadata import Address, Metadata +from gapic.schema import wrappers +from gapic.schema.metadata import Address, Metadata def test_field_properties(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 50f4f1ff5d20..aea25aaabb8f 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -19,8 +19,8 @@ from google.protobuf import descriptor_pb2 -from api_factory.schema import metadata -from api_factory.schema import wrappers +from gapic.schema import metadata +from gapic.schema import wrappers def test_message_properties(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 156b3c896401..7078541ca293 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -19,8 +19,8 @@ from google.api import signature_pb2 from google.protobuf import descriptor_pb2 -from api_factory.schema import metadata -from api_factory.schema import wrappers +from gapic.schema import metadata +from gapic.schema import wrappers def test_method_types(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py index 357674ad7774..4d4723e59efe 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py @@ -14,8 +14,8 @@ from google.protobuf import descriptor_pb2 -from api_factory.schema import metadata -from api_factory.schema import wrappers +from gapic.schema import metadata +from gapic.schema import wrappers def test_operation(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 48ae7972628f..3292f72ce3cb 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -18,8 +18,8 @@ from google.api import http_pb2 from google.protobuf import descriptor_pb2 -from api_factory.schema import metadata -from api_factory.schema import wrappers +from gapic.schema import metadata +from gapic.schema import wrappers def test_service_properties(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py index afcefd25c7d9..a80a4f8641c0 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py @@ -16,7 +16,7 @@ from google.protobuf import descriptor_pb2 -from api_factory.schema import wrappers +from gapic.schema import wrappers def test_signature_dispatch_field(): diff --git a/packages/gapic-generator/tests/unit/utils/test_cache.py b/packages/gapic-generator/tests/unit/utils/test_cache.py index d43f09acd283..eee6865e6df1 100644 --- a/packages/gapic-generator/tests/unit/utils/test_cache.py +++ b/packages/gapic-generator/tests/unit/utils/test_cache.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from api_factory.utils import cache +from gapic.utils import cache def test_cached_property(): diff --git a/packages/gapic-generator/tests/unit/utils/test_case.py b/packages/gapic-generator/tests/unit/utils/test_case.py index efaae5566410..93b86ea76350 100644 --- a/packages/gapic-generator/tests/unit/utils/test_case.py +++ b/packages/gapic-generator/tests/unit/utils/test_case.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from api_factory.utils import case +from gapic.utils import case def test_pascal_to_snake(): diff --git a/packages/gapic-generator/tests/unit/utils/test_filename.py b/packages/gapic-generator/tests/unit/utils/test_filename.py index fe8596074398..2e88dc2abc41 100644 --- a/packages/gapic-generator/tests/unit/utils/test_filename.py +++ b/packages/gapic-generator/tests/unit/utils/test_filename.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from api_factory.utils import filename +from gapic.utils import filename def test_to_valid_filename(): diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 853a0ec39de7..24b732f31091 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from api_factory.utils import lines +from gapic.utils import lines def test_wrap_noop(): diff --git a/packages/gapic-generator/tests/unit/utils/test_placeholder.py b/packages/gapic-generator/tests/unit/utils/test_placeholder.py index 4c26b49a4ef4..8463446a6140 100644 --- a/packages/gapic-generator/tests/unit/utils/test_placeholder.py +++ b/packages/gapic-generator/tests/unit/utils/test_placeholder.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from api_factory.utils import placeholder +from gapic.utils import placeholder def test_placeholder_str_eq(): From 98dcfe6bb7d640ec30f70c0fc55304e731f06167 Mon Sep 17 00:00:00 2001 From: michaelbausor Date: Thu, 2 Aug 2018 09:27:09 -0700 Subject: [PATCH 0015/1339] Fix typo in client.py.j2 (#18) --- .../templates/$namespace/$name_$version/$service/client.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 index e9322adf3f44..aaca06628af3 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 @@ -67,7 +67,7 @@ class {{ service.name }}: should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be - sent alont with the request as metadata. + sent along with the request as metadata. Returns: {{ method.output.sphinx_ident }}: From e097230d7f3fc5db687f7ccc52286edb0dece3a5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 2 Aug 2018 12:45:08 -0700 Subject: [PATCH 0016/1339] Fix http.py.j2 typo. (#19) Template testing sorely needed. :-) --- .../$namespace/$name_$version/$service/transports/http.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/http.py.j2 index 5abf586d54d7..c23bf4482e12 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/http.py.j2 @@ -76,7 +76,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): Args: request (~.{{ method.input.python_ident }}): The request object. {{- ' ' -}} - {{ method.input.meta.doc|wrap(width=72, offset=36 indent=16) }} + {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent alont with the request as metadata. From 81e1a581d41eb8c446ff1672c331ca0845b58c86 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 2 Aug 2018 12:46:09 -0700 Subject: [PATCH 0017/1339] Pluggable templates (#20) --- .../gapic-generator/gapic/cli/generate.py | 9 +++-- .../gapic/generator/generator.py | 36 ++++++++++--------- .../tests/unit/generator/test_generator.py | 8 +++++ 3 files changed, 34 insertions(+), 19 deletions(-) diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index 614f1d9b0466..471fc42e12f6 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -32,9 +32,14 @@ @click.option('--output', type=click.File('wb'), default=sys.stdout.buffer, help='Where to output the `CodeGeneratorResponse`. ' 'Defaults to stdout.') +@click.option('--templates', type=click.Path(exists=True), default=None, + help='Which templates to use to generate a library. ' + 'Defaults to the templates included in gapic-generator, ' + 'which generate client libraries for Python 3.4 and up.') def generate( request: typing.BinaryIO, - output: typing.BinaryIO) -> None: + output: typing.BinaryIO, + templates: str = None) -> None: """Generate a full API client description.""" # Load the protobuf CodeGeneratorRequest. @@ -57,7 +62,7 @@ def generate( # Translate into a protobuf CodeGeneratorResponse; this reads the # individual templates and renders them. # If there are issues, error out appropriately. - res = generator.Generator(api_schema=api_schema).get_response() + res = generator.Generator(api_schema, templates=templates).get_response() # Output the serialized response. output.write(res.SerializeToString()) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index f079835f75a2..c71ef37683d4 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -12,13 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import io import os from typing import Any, Iterable, Mapping, Sequence import jinja2 -from google.protobuf.compiler.plugin_pb2 import CodeGeneratorRequest from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse from gapic import utils @@ -30,25 +28,32 @@ class Generator: """A protoc code generator for client libraries. - This class receives a :class:`~.plugin_pb2.CodeGeneratorRequest` (as per - the protoc plugin contract), and provides an interface for getting - a :class:`~.plugin_pb2.CodeGeneratorResponse`. - - That request with one or more protocol buffers which collectively - describe an API. + This class receives a :class:`~.api.API`, a representation of the API + schema, and provides an interface for getting a + :class:`~.plugin_pb2.CodeGeneratorResponse` (which it does through + rendering templates). Args: - request (CodeGeneratorRequest): A request protocol buffer as provided - by protoc. See ``plugin.proto``. + api_schema (~.API): An API schema object, which is sent to every + template as the ``api`` variable. + templates (str): Optional. Path to the templates to be + rendered. If this is not provided, the templates included with + this application are used. """ - def __init__(self, api_schema: api.API) -> None: + def __init__(self, api_schema: api.API, *, + templates: str = None) -> None: self._api = api_schema + # If explicit templates were not provided, use our default. + if not templates: + templates = os.path.join( + os.path.realpath(os.path.dirname(__file__)), + '..', 'templates', + ) + # Create the jinja environment with which to render templates. self._env = jinja2.Environment( - loader=loader.TemplateLoader( - searchpath=os.path.join(_dirname, '..', 'templates'), - ), + loader=loader.TemplateLoader(searchpath=templates), undefined=jinja2.StrictUndefined, ) @@ -173,9 +178,6 @@ def _get_output_filename( return filename -_dirname = os.path.realpath(os.path.dirname(__file__)) - - __all__ = ( 'Generator', ) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 9d08e5418eca..f4c80abecc08 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -41,6 +41,14 @@ def test_constructor(): assert 'wrap' in g._env.filters +def test_custom_template_directory(): + # Create a generator. + g = generator.Generator(api_schema=make_api(), templates='/templates/') + + # Assert that the Jinja loader will pull from the correct location. + assert g._env.loader.searchpath == ['/templates/'] + + def test_get_response(): # Create a generator with mock data. # From 1e7355fa1767c2922d772a8fddfdbb4c4120b0a0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 6 Aug 2018 15:22:19 -0700 Subject: [PATCH 0018/1339] Add streaming support. (#21) --- .../gapic-generator/gapic/schema/wrappers.py | 8 +++++++ .../$service/transports/grpc.py.j2 | 2 +- .../tests/unit/schema/wrappers/test_method.py | 21 +++++++++++++++++++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 329d5ea676b5..1e307591e776 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -369,6 +369,14 @@ def field_headers(self) -> Sequence[str]: return tuple(re.findall(r'\{([a-z][\w\d_.]+)=', http.get)) return () + @property + def grpc_stub_type(self) -> str: + """Return the type of gRPC stub to use.""" + return '{client}_{server}'.format( + client='stream' if self.client_streaming else 'unary', + server='stream' if self.server_streaming else 'unary', + ) + @utils.cached_property def signatures(self) -> Tuple[signature_pb2.MethodSignature]: """Return the signature defined for this method.""" diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 index 33072d479331..185f3946c8cc 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 @@ -101,7 +101,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if '{{ method.name|snake_case }}' not in self._stubs: - self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.unary_unary( + self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.python_module }}.{{ method.input.name }}.SerializeToString, response_deserializer={{ method.output.python_module }}.{{ method.output.name }}.FromString, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 7078541ca293..4d42351a0ba8 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -67,6 +67,26 @@ def test_method_field_headers(): assert isinstance(method.field_headers, collections.Sequence) +def test_method_unary_unary(): + method = make_method('F', client_streaming=False, server_streaming=False) + assert method.grpc_stub_type == 'unary_unary' + + +def test_method_unary_stream(): + method = make_method('F', client_streaming=False, server_streaming=True) + assert method.grpc_stub_type == 'unary_stream' + + +def test_method_stream_unary(): + method = make_method('F', client_streaming=True, server_streaming=False) + assert method.grpc_stub_type == 'stream_unary' + + +def test_method_stream_stream(): + method = make_method('F', client_streaming=True, server_streaming=True) + assert method.grpc_stub_type == 'stream_stream' + + def make_method( name: str, input_message: wrappers.MessageType = None, output_message: wrappers.MessageType = None, @@ -81,6 +101,7 @@ def make_method( name=name, input_type=str(input_message.meta.address), output_type=str(output_message.meta.address), + **kwargs ) # Instantiate the wrapper class. From 70c0f24bcef0cef139359fb7ec8264b0ee060f40 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 10 Aug 2018 10:36:52 -0700 Subject: [PATCH 0019/1339] Use the nox pre-release. (#24) This will be needed for showcase tests (see #23), and switching back and forth is difficult. --- packages/gapic-generator/.circleci/config.yml | 14 +++++++++----- packages/gapic-generator/nox.py | 12 +++--------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 8e852c644b67..8e39f118b59e 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -15,10 +15,12 @@ jobs: - checkout - run: name: Install nox and codecov. - command: pip install nox-automation codecov + command: | + pip install --pre nox-automation + pip install codecov - run: name: Run unit tests. - command: nox -s "unit(python_version='3.6')" + command: nox -s unit-3.6 - run: name: Submit coverage data to codecov. command: codecov @@ -30,10 +32,12 @@ jobs: - checkout - run: name: Install nox and codecov. - command: pip install nox-automation codecov + command: | + pip install --pre nox-automation + pip install codecov - run: name: Run unit tests. - command: nox -s "unit(python_version='3.7')" + command: nox -s unit-3.7 - run: name: Submit coverage data to codecov. command: codecov @@ -45,7 +49,7 @@ jobs: - checkout - run: name: Install nox. - command: pip install nox-automation + command: pip install --pre nox-automation - run: name: Build the documentation. command: nox -s docs diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 2d468eca2687..1cc1fa3c3329 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -18,15 +18,10 @@ import nox -@nox.session -@nox.parametrize('python_version', ['3.6', '3.7']) -def unit(session, python_version='3.7'): +@nox.session(python=['3.6', '3.7']) +def unit(session): """Run the unit test suite.""" - session.interpreter = 'python{0}'.format(python_version) - - session.virtualenv_dirname = 'unit-{0}'.format(python_version) - session.install('coverage', 'pytest', 'pytest-cov') session.install('-e', '.') @@ -41,11 +36,10 @@ def unit(session, python_version='3.7'): ) -@nox.session +@nox.session(python='3.6') def docs(session): """Build the docs.""" - session.interpreter = 'python3.6' session.install('sphinx', 'sphinx_rtd_theme') session.install('.') From 14dc9e5a4f854aba29651126fc719998b64786c9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 10 Aug 2018 10:47:52 -0700 Subject: [PATCH 0020/1339] Add Python namespace incantation. (#22) This commit adds the appropriate Python namespace incantation throughout the namespace tree when appropriate. --- .../gapic/generator/generator.py | 54 +++++++++++-------- .../gapic-generator/gapic/schema/naming.py | 8 +++ .../gapic/templates/_base.py.j2 | 2 +- .../gapic/templates/setup.py.j2 | 18 ++++--- .../tests/unit/generator/test_generator.py | 28 +++++++--- .../tests/unit/schema/test_naming.py | 5 ++ 6 files changed, 79 insertions(+), 36 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index c71ef37683d4..a266d368a9b5 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -13,7 +13,7 @@ # limitations under the License. import os -from typing import Any, Iterable, Mapping, Sequence +from typing import Any, Iterable, Mapping, Sequence, Tuple import jinja2 @@ -110,34 +110,32 @@ def _render_templates( objects for inclusion in the final response. """ answer = [] - additional_context = additional_context or {} + context = additional_context or {} # Iterate over the provided templates and generate a File object # for each. for template_name in templates: - # Generate the File object. - answer.append(CodeGeneratorResponse.File( - content=formatter.fix_whitespace( - self._env.get_template(template_name).render( - api=self._api, - **additional_context + for fn in self._get_filenames(template_name, context=context): + # Generate the File object. + answer.append(CodeGeneratorResponse.File( + content=formatter.fix_whitespace( + self._env.get_template(template_name).render( + api=self._api, + **context + ), ), - ), - name=self._get_output_filename( - template_name, - context=additional_context, - ), - )) + name=fn, + )) # Done; return the File objects based on these templates. return answer - def _get_output_filename( + def _get_filenames( self, template_name: str, *, context: dict = None, - ) -> str: - """Return the appropriate output filename for this template. + ) -> Tuple[str]: + """Return the appropriate output filenames for this template. This entails running the template name through a series of replacements to replace the "filename variables" (``$name``, @@ -154,15 +152,27 @@ def _get_output_filename( context (Mapping): Additional context being sent to the template. Returns: - str: The appropriate output filename. + Tuple[str]: The appropriate output filenames. """ - filename = template_name[:-len('.j2')] \ + filename = template_name[:-len('.j2')] + + # Special case: If the filename is `$namespace/__init__.py`, we + # need this exact file to be part of every individual directory + # in the namespace tree. Handle this special case. + # + # For more information, see: + # https://packaging.python.org/guides/packaging-namespace-packages/ + if filename == os.path.join('$namespace', '__init__.py'): + return tuple([ + os.path.sep.join(i.split('.') + ['__init__.py']) + for i in self._api.naming.namespace_packages + ]) # Replace the $namespace variable. filename = filename.replace( '$namespace', - '/'.join([i.lower() for i in self._api.naming.namespace]), - ).lstrip('/') + os.path.sep.join([i.lower() for i in self._api.naming.namespace]), + ).lstrip(os.path.sep) # Replace the $name and $version variables. filename = filename.replace('$name_$version', @@ -175,7 +185,7 @@ def _get_output_filename( context['service'].module_name) # Done, return the filename. - return filename + return (filename,) __all__ = ( diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index be35c949beff..10988eb6389b 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -162,6 +162,14 @@ def module_name(self) -> str: """Return the appropriate Python module name.""" return utils.to_valid_module_name(self.name) + @property + def namespace_packages(self) -> Tuple[str]: + """Return the appropriate Python namespace packages.""" + answer = [] + for cursor in [i.lower() for i in self.namespace]: + answer.append(f'{answer[-1]}.{cursor}' if answer else cursor) + return tuple(answer) + @property def versioned_module_name(self) -> str: """Return the versiond module name (e.g. ``apiname_v1``). diff --git a/packages/gapic-generator/gapic/templates/_base.py.j2 b/packages/gapic-generator/gapic/templates/_base.py.j2 index 0ced14207001..5a9ea62f8bc1 100644 --- a/packages/gapic-generator/gapic/templates/_base.py.j2 +++ b/packages/gapic-generator/gapic/templates/_base.py.j2 @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- -{% block content %} +{%- block content %} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 8750e75338f6..e1ad7b42c16a 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -4,30 +4,36 @@ import io import os -from setuptools import setup +import setuptools -setup( +setuptools.setup( name='{{ api.naming.warehouse_package_name }}', version='0.0.1', + {% if api.naming.namespace -%} + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages={{ api.naming.namespace_packages }}, + {% else -%} + packages=setuptools.find_packages(), + {% endif -%} platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core >= 0.1.4, < 0.2.0dev', + 'google-api-core >= 1.3.0, < 2.0.0dev', 'googleapis-common-protos >= 1.6.0b4', 'grpcio >= 1.10.0', ), - classifiers=( + classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', - ), + ], zip_safe=False, ) {% endblock %} diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index f4c80abecc08..39dbdb684932 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -134,15 +134,15 @@ def test_render_templates_additional_context(): assert files[0].content == 'A bird!\n' -def test_get_output_filename(): +def test_get_filenames(): g = generator.Generator(api_schema=make_api( naming=make_naming(namespace=(), name='Spam', version='v2'), )) template_name = '$namespace/$name_$version/foo.py.j2' - assert g._get_output_filename(template_name) == 'spam_v2/foo.py' + assert g._get_filenames(template_name) == ('spam_v2/foo.py',) -def test_get_output_filename_with_namespace(): +def test_get_filenames_with_namespace(): g = generator.Generator(api_schema=make_api( naming=make_naming( name='Spam', @@ -151,15 +151,15 @@ def test_get_output_filename_with_namespace(): ), )) template_name = '$namespace/$name_$version/foo.py.j2' - assert g._get_output_filename(template_name) == 'ham/bacon/spam_v2/foo.py' + assert g._get_filenames(template_name) == ('ham/bacon/spam_v2/foo.py',) -def test_get_output_filename_with_service(): +def test_get_filenames_with_service(): g = generator.Generator(api_schema=make_api( naming=make_naming(namespace=(), name='Spam', version='v2'), )) template_name = '$name/$service/foo.py.j2' - assert g._get_output_filename( + assert g._get_filenames( template_name, context={ 'service': wrappers.Service( @@ -167,7 +167,21 @@ def test_get_output_filename_with_service(): service_pb=descriptor_pb2.ServiceDescriptorProto(name='Eggs'), ), } - ) == 'spam/eggs/foo.py' + ) == ('spam/eggs/foo.py',) + + +def test_get_filenames_with_namespace_init(): + g = generator.Generator(api_schema=make_api(naming=make_naming( + namespace=('Foo', 'Bar', 'Baz'), + name='Spam', + version='v2', + ))) + template_name = '$namespace/__init__.py.j2' + assert g._get_filenames(template_name) == ( + 'foo/__init__.py', + 'foo/bar/__init__.py', + 'foo/bar/baz/__init__.py', + ) def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index f179ef7e2101..7db80bc6d82f 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -53,6 +53,11 @@ def test_versioned_module_name(): assert n.versioned_module_name == 'genie_v2' +def test_namespace_packages(): + n = make_naming(name='BigQuery', namespace=('Google', 'Cloud')) + assert n.namespace_packages == ('google', 'google.cloud') + + def test_warehouse_package_name_no_namespace(): n = make_naming(name='BigQuery', namespace=[]) assert n.warehouse_package_name == 'bigquery' From 69127a45cc814d8958dad59262c8bb454792fbb2 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 10 Aug 2018 12:19:33 -0700 Subject: [PATCH 0021/1339] Import modules for method signatures. (#25) This fixes an issue with modules for signature fields' not being imported. --- .../gapic-generator/gapic/schema/wrappers.py | 13 ++++ .../unit/schema/wrappers/test_service.py | 63 +++++++++++++++---- 2 files changed, 63 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 1e307591e776..0b501d1a121d 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -521,6 +521,19 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: method.output.python_module, )) + # If this method has flattening that is honored, add its + # modules. + # + # This entails adding the module for any field on the signature + # unless the field is a primitive. + for sig in method.signatures.single_dispatch: + for field in sig.fields.values(): + if not isinstance(field.type, PythonType): + answer.add(( + '.'.join(field.type.meta.address.package), + field.type.python_module, + )) + # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. if getattr(method.output, 'lro_response', None): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 3292f72ce3cb..dea88b44e6b1 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 from google.api import http_pb2 +from google.api import signature_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import metadata @@ -50,7 +51,11 @@ def test_service_no_scopes(): def test_service_python_modules(): - service = make_service() + service = make_service(methods=( + get_method('DoThing', 'foo.bar.ThingRequest', 'foo.baz.ThingResponse'), + get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), + get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), + )) assert service.python_modules == ( ('a.b.v1', 'c_pb2'), ('foo', 'bacon_pb2'), @@ -70,6 +75,28 @@ def test_service_python_modules_lro(): ) +def test_service_python_modules_signature(): + service = make_service_with_method_options( + in_fields=( + descriptor_pb2.FieldDescriptorProto(name='secs', type=5), + descriptor_pb2.FieldDescriptorProto( + name='d', + type=11, # message + type_name='a.b.c.v2.D', + ), + ), + method_signature=signature_pb2.MethodSignature(fields=['secs', 'd']), + ) + # type=5 is int, so nothing is added. + assert service.python_modules == ( + ('a.b.c', 'v2_pb2'), + ('foo', 'bar_pb2'), + ('foo', 'baz_pb2'), + ('foo', 'qux_pb2'), + ('google.api_core', 'operation'), + ) + + def test_service_no_lro(): service = make_service() assert service.has_lro is False @@ -97,14 +124,8 @@ def test_module_name(): def make_service(name: str = 'Placeholder', host: str = '', + methods: typing.Tuple[wrappers.Method] = (), scopes: typing.Tuple[str] = ()) -> wrappers.Service: - # Declare a few methods, with messages in distinct packages. - methods = ( - get_method('DoThing', 'foo.bar.ThingRequest', 'foo.baz.ThingResponse'), - get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), - get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), - ) - # Define a service descriptor, and set a host and oauth scopes if # appropriate. service_pb = descriptor_pb2.ServiceDescriptorProto(name=name) @@ -119,8 +140,12 @@ def make_service(name: str = 'Placeholder', host: str = '', ) +# FIXME (lukesneeringer): This test method is convoluted and it makes these +# tests difficult to understand and maintain. def make_service_with_method_options(*, http_rule: http_pb2.HttpRule = None, + method_signature: signature_pb2.MethodSignature = None, + in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = () ) -> wrappers.Service: # Declare a method with options enabled for long-running operations and # field headers. @@ -130,7 +155,9 @@ def make_service_with_method_options(*, 'google.longrunning.operations.Operation', lro_response_type='foo.baz.ThingResponse', lro_metadata_type='foo.qux.ThingMetadata', + in_fields=in_fields, http_rule=http_rule, + method_signature=method_signature, ) # Define a service descriptor. @@ -147,10 +174,12 @@ def get_method(name: str, in_type: str, out_type: str, lro_response_type: str = '', - lro_metadata_type: str = '', + lro_metadata_type: str = '', *, + in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), http_rule: http_pb2.HttpRule = None, + method_signature: signature_pb2.MethodSignature = None, ) -> wrappers.Method: - input_ = get_message(in_type) + input_ = get_message(in_type, fields=in_fields) output = get_message(out_type) # Define a method descriptor. Set the field headers if appropriate. @@ -167,6 +196,9 @@ def get_method(name: str, if http_rule: ext_key = annotations_pb2.http method_pb.options.Extensions[ext_key].MergeFrom(http_rule) + if method_signature: + ext_key = annotations_pb2.method_signature + method_pb.options.Extensions[ext_key].MergeFrom(method_signature) return wrappers.Method( method_pb=method_pb, @@ -175,7 +207,9 @@ def get_method(name: str, ) -def get_message(dot_path: str) -> wrappers.MessageType: +def get_message(dot_path: str, *, + fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), + ) -> wrappers.MessageType: # Pass explicit None through (for lro_metadata). if dot_path is None: return None @@ -189,8 +223,11 @@ def get_message(dot_path: str) -> wrappers.MessageType: pieces = dot_path.split('.') pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] return wrappers.MessageType( - fields={}, - message_pb=descriptor_pb2.DescriptorProto(name=name), + fields={i.name: wrappers.Field( + field_pb=i, + message=get_message(i.type_name) if i.type_name else None, + ) for i in fields}, + message_pb=descriptor_pb2.DescriptorProto(name=name, field=fields), meta=metadata.Metadata(address=metadata.Address( package=pkg, module=module, From 3e82438bdd73b6d7b8c36aa48c8b35bcd5aee24b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 10 Aug 2018 14:02:08 -0700 Subject: [PATCH 0022/1339] Add Showcase testing. (#23) This adds [Showcase](https://github.com/googleapis/gapic-showcase) testing to gapic-generator-python, which essentially is the beginning of our integration testing strategy. --- packages/gapic-generator/.circleci/config.yml | 37 +++++++++++++++ packages/gapic-generator/nox.py | 45 +++++++++++++++++++ .../gapic-generator/tests/system/conftest.py | 32 +++++++++++++ .../tests/system/test_grpc_unary.py | 37 +++++++++++++++ 4 files changed, 151 insertions(+) create mode 100644 packages/gapic-generator/tests/system/conftest.py create mode 100644 packages/gapic-generator/tests/system/test_grpc_unary.py diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 8e39f118b59e..d12e5dc5fd1c 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -6,6 +6,10 @@ workflows: jobs: - unit-3.6 - unit-3.7 + - showcase: + requires: + - unit-3.6 + - unit-3.7 - docs jobs: unit-3.6: @@ -42,6 +46,39 @@ jobs: name: Submit coverage data to codecov. command: codecov when: always + showcase: + docker: + - image: 'python:3.7' + steps: + - checkout + - run: + name: Install nox. + command: pip install --pre nox-automation + - run: + name: Install unzip. + command: | + apt-get update + apt-get install unzip + - run: + name: Install protoc 3.6.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.6.1/protoc-3.6.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.6.1.zip + cd /usr/src/protoc/ + unzip protoc-3.6.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Download Showcase. + command: | + curl --location https://github.com/googleapis/gapic-showcase/releases/download/v0.0.3/gapic-showcase-v1alpha1-0.0.3-linux-amd64 --output /usr/local/bin/showcase + chmod a+x /usr/local/bin/showcase + - run: + name: Run Showcase. + command: /usr/local/bin/showcase + background: true + - run: + name: Run showcase tests. + command: nox -s showcase docs: docker: - image: 'python:3.6' diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 1cc1fa3c3329..3e3625d2c820 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -14,6 +14,7 @@ from __future__ import absolute_import import os +import tempfile import nox @@ -36,6 +37,50 @@ def unit(session): ) +@nox.session(python='3.7') +def showcase(session): + """Run the Showcase test suite.""" + + # Try to make it clear if Showcase is not running, so that + # people do not end up with tons of difficult-to-debug failures over + # an obvious problem. + if not os.environ.get('CIRCLECI'): + session.log('-' * 70) + session.log('Note: Showcase must be running for these tests to work.') + session.log('See https://github.com/googleapis/gapic-showcase') + session.log('-' * 70) + session.run('netstat', '-plnt', '|', 'grep', ':7469', silent=True) + + # Install pytest and gapic-generator-python + session.install('pytest') + session.install('-e', '.') + + # Install a client library for Showcase. + with tempfile.TemporaryDirectory() as tmp_dir: + showcase_version = '0.0.3' + + # Download the Showcase descriptor. + session.run( + 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' + f'download/v{showcase_version}/' + f'gapic-showcase-v1alpha1-{showcase_version}.desc', + '-L', '--output', os.path.join(tmp_dir, 'showcase.desc'), + silent=True, + ) + + # Write out a client library for Showcase. + session.run('protoc', + f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', + f'--python_out={tmp_dir}', f'--pyclient_out={tmp_dir}', + 'google/showcase/v1alpha1/showcase.proto', + ) + + # Install the library. + session.install(tmp_dir) + + session.run('py.test', '--quiet', os.path.join('tests', 'system')) + + @nox.session(python='3.6') def docs(session): """Build the docs.""" diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py new file mode 100644 index 000000000000..286b9f309f99 --- /dev/null +++ b/packages/gapic-generator/tests/system/conftest.py @@ -0,0 +1,32 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.auth.credentials import AnonymousCredentials +from google.showcase import Showcase +from google.showcase_v1alpha1.showcase.transports.grpc import ( + ShowcaseGrpcTransport, +) + +import grpc + + +@pytest.fixture +def showcase(): + transport = ShowcaseGrpcTransport(credentials=AnonymousCredentials()) + transport.__dict__['grpc_channel'] = grpc.insecure_channel( + transport.SERVICE_ADDRESS, + ) + return Showcase(transport=transport) diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_grpc_unary.py new file mode 100644 index 000000000000..04c05a1e08b3 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_grpc_unary.py @@ -0,0 +1,37 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.api_core import exceptions +from google.rpc import code_pb2 + + +def test_unary(showcase): + response = showcase.echo({ + 'content': 'The hail in Wales falls mainly on the snails.', + }) + assert response.content == 'The hail in Wales falls mainly on the snails.' + + +def test_unary_error(showcase): + with pytest.raises(exceptions.InvalidArgument) as exc: + showcase.echo({ + 'error': { + 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), + 'message': 'Bad things! Bad things!', + }, + }) + assert exc.value.code == 400 + assert exc.value.message == 'Bad things! Bad things!' From 8de9fdcd1089471e48287057279fea7b92b4f04c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 6 Sep 2018 14:02:44 -0700 Subject: [PATCH 0023/1339] Fix the package name when namespace is empty. (#30) --- packages/gapic-generator/gapic/schema/naming.py | 2 +- .../tests/unit/schema/test_naming.py | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 10988eb6389b..333c4eeae31a 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -96,7 +96,7 @@ def build(cls, package_info = cls( name=match['name'].capitalize(), namespace=tuple([i.capitalize() - for i in match['namespace'].split('.')]), + for i in match['namespace'].split('.') if i]), product_name=match['name'].capitalize(), product_url='', version=match.get('version', ''), diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 7db80bc6d82f..ad64c912e7c7 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -126,6 +126,20 @@ def test_build_with_annotations(): assert n.product_name == 'Spanner' +def test_build_no_namespace(): + protos = ( + descriptor_pb2.FileDescriptorProto( + name='foo_service.proto', + package='foo', + ), + ) + n = naming.Naming.build(*protos) + assert n.name == 'Foo' + assert n.namespace == () + assert n.version == '' + assert n.product_name == 'Foo' + + def test_inconsistent_metadata_error(): # Set up the first proto. proto1 = descriptor_pb2.FileDescriptorProto( From 392658ad9d8560017092fe520fd1fdcdbc0d9d88 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 10 Sep 2018 14:07:12 -0700 Subject: [PATCH 0024/1339] Be tolerant of no version. (#27) --- .../gapic/generator/generator.py | 19 +++++---- .../tests/unit/generator/test_generator.py | 41 ++++++++++++++----- 2 files changed, 41 insertions(+), 19 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index a266d368a9b5..c6302b4a0846 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections import os from typing import Any, Iterable, Mapping, Sequence, Tuple @@ -71,22 +72,24 @@ def get_response(self) -> CodeGeneratorResponse: ~.CodeGeneratorResponse: A response describing appropriate files and contents. See ``plugin.proto``. """ - output_files = [] + output_files = collections.OrderedDict() # Some templates are rendered once per API client library. # These are generally boilerplate packaging and metadata files. - output_files += self._render_templates(self._env.loader.api_templates) + output_files.update( + self._render_templates(self._env.loader.api_templates), + ) # Some templates are rendered once per service (an API may have # one or more services). for service in self._api.services.values(): - output_files += self._render_templates( + output_files.update(self._render_templates( self._env.loader.service_templates, additional_context={'service': service}, - ) + )) # Return the CodeGeneratorResponse output. - return CodeGeneratorResponse(file=output_files) + return CodeGeneratorResponse(file=[i for i in output_files.values()]) def _render_templates( self, @@ -109,7 +112,7 @@ def _render_templates( Sequence[~.CodeGeneratorResponse.File]: A sequence of File objects for inclusion in the final response. """ - answer = [] + answer = collections.OrderedDict() context = additional_context or {} # Iterate over the provided templates and generate a File object @@ -117,7 +120,7 @@ def _render_templates( for template_name in templates: for fn in self._get_filenames(template_name, context=context): # Generate the File object. - answer.append(CodeGeneratorResponse.File( + answer[fn] = CodeGeneratorResponse.File( content=formatter.fix_whitespace( self._env.get_template(template_name).render( api=self._api, @@ -125,7 +128,7 @@ def _render_templates( ), ), name=fn, - )) + ) # Done; return the File objects based on these templates. return answer diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 39dbdb684932..a89719f0bd7c 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import io -import os from typing import Mapping from unittest import mock @@ -65,12 +63,12 @@ def test_get_response(): # Mock all the rendering methods. with mock.patch.object(g, '_render_templates') as _render_templates: - _render_templates.return_value = [ - plugin_pb2.CodeGeneratorResponse.File( + _render_templates.return_value = { + 'template_file': plugin_pb2.CodeGeneratorResponse.File( name='template_file', content='This was a template.', ), - ] + } # Okay, now run the `get_response` method. response = g.get_response() @@ -109,10 +107,31 @@ def test_render_templates(): # Test that we get back the expected content for each template. assert len(files) == 2 - assert files[0].name == 'foo' - assert files[1].name == 'bar' - assert files[0].content == 'Hello, I am `foo.j2`.\n' - assert files[1].content == 'Hello, I am `bar.j2`.\n' + assert files['foo'].name == 'foo' + assert files['bar'].name == 'bar' + assert files['foo'].content == 'Hello, I am `foo.j2`.\n' + assert files['bar'].content == 'Hello, I am `bar.j2`.\n' + + +def test_render_templates_duplicate(): + g = generator.Generator(api_schema=make_api()) + + # Determine the templates to be rendered. + # In the case of duplication, we want the last one encountered to win. + templates = ('foo.j2', 'foo.j2') + with mock.patch.object(jinja2.Environment, 'get_template') as get_template: + get_template.side_effect = ( + jinja2.Template(f'Hello, I am the first.'), + jinja2.Template(f'Hello, I am the second.'), + ) + + # Render the templates. + files = g._render_templates(templates) + + # Test that we get back the expected content for each template. + assert len(files) == 1 + assert files['foo'].name == 'foo' + assert files['foo'].content == 'Hello, I am the second.\n' def test_render_templates_additional_context(): @@ -130,8 +149,8 @@ def test_render_templates_additional_context(): # Test that we get back the expected content for each template. assert len(files) == 1 - assert files[0].name == 'foo' - assert files[0].content == 'A bird!\n' + assert files['foo'].name == 'foo' + assert files['foo'].content == 'A bird!\n' def test_get_filenames(): From 64096dc78bb6773d618a94493b67b14190197cf7 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 17 Sep 2018 15:24:50 -0700 Subject: [PATCH 0025/1339] Pin Sphinx below 1.8.0. (#32) Sphinx 1.8.0 introduces a bug in string-specified type annotations which I can not work around at the moment. --- packages/gapic-generator/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 3e3625d2c820..6f7a71c6a53e 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -85,7 +85,7 @@ def showcase(session): def docs(session): """Build the docs.""" - session.install('sphinx', 'sphinx_rtd_theme') + session.install('sphinx < 1.8', 'sphinx_rtd_theme') session.install('.') # Build the docs! From 7acd75a8e06922dd0aba23c438fc15f2c9a74b60 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 17 Sep 2018 15:36:57 -0700 Subject: [PATCH 0026/1339] Make a minor adjustment to the whitespace stripper. (#31) --- .../gapic/generator/formatter.py | 4 +- .../tests/unit/generator/test_formatter.py | 39 +++++++++++++++++++ 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/formatter.py b/packages/gapic-generator/gapic/generator/formatter.py index 2aae59a63da7..ca9c3112c38a 100644 --- a/packages/gapic-generator/gapic/generator/formatter.py +++ b/packages/gapic-generator/gapic/generator/formatter.py @@ -32,10 +32,10 @@ def fix_whitespace(code: str) -> str: code = re.sub(r'[ ]+\n', '\n', code) # Ensure at most two blank lines before top level definitions. - code = re.sub(r'\s+\n\n\n(class|def|@)', r'\n\n\n\1', code) + code = re.sub(r'\s+\n\s*\n\s*\n(class|def|@|#)', r'\n\n\n\1', code) # Ensure at most one line before nested definitions. - code = re.sub(r'\s+\n\n( )+(class|def|@)', r'\n\n\1\2', code) + code = re.sub(r'\s+\n\s*\n( )+(class|def|@|#)', r'\n\n\1\2', code) # All files shall end in one and exactly one line break. return f'{code.rstrip()}\n' diff --git a/packages/gapic-generator/tests/unit/generator/test_formatter.py b/packages/gapic-generator/tests/unit/generator/test_formatter.py index 2e72ad44d7de..1f7b73dfe7e0 100644 --- a/packages/gapic-generator/tests/unit/generator/test_formatter.py +++ b/packages/gapic-generator/tests/unit/generator/test_formatter.py @@ -88,5 +88,44 @@ def too_far_down(self): """) +def test_fix_whitespace_intermediate_whitespace(): + assert formatter.fix_whitespace(textwrap.dedent("""\ + class JustAClass: + def foo(self): + pass + \ + + + @property + def too_far_down(self): + return 42 + """)) == textwrap.dedent("""\ + class JustAClass: + def foo(self): + pass + + @property + def too_far_down(self): + return 42 + """) + + +def test_fix_whitespace_comment(): + assert formatter.fix_whitespace(textwrap.dedent("""\ + def do_something(): + do_first_thing() + + + # Something something something. + do_second_thing() + """)) == textwrap.dedent("""\ + def do_something(): + do_first_thing() + + # Something something something. + do_second_thing() + """) + + def test_file_newline_ending(): assert formatter.fix_whitespace('') == '\n' From 0f9980e2d62f515b8ad9ae58fe73f1dc83952625 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 21 Sep 2018 14:00:39 -0700 Subject: [PATCH 0027/1339] Move services into a "services" super-module. (#33) --- packages/gapic-generator/gapic/generator/generator.py | 6 ++++++ .../gapic/templates/$namespace/$name/__init__.py.j2 | 5 ++++- .../templates/$namespace/$name_$version/__init__.py.j2 | 2 +- .../$name_$version/{ => services}/$service/__init__.py.j2 | 0 .../$name_$version/{ => services}/$service/client.py.j2 | 2 +- .../{ => services}/$service/transports/__init__.py.j2 | 0 .../{ => services}/$service/transports/base.py.j2 | 0 .../{ => services}/$service/transports/grpc.py.j2 | 0 .../{ => services}/$service/transports/http.py.j2 | 0 packages/gapic-generator/tests/system/conftest.py | 2 +- 10 files changed, 13 insertions(+), 4 deletions(-) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => services}/$service/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => services}/$service/client.py.j2 (99%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => services}/$service/transports/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => services}/$service/transports/base.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => services}/$service/transports/grpc.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => services}/$service/transports/http.py.j2 (100%) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index c6302b4a0846..c76cb35a8ece 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -14,6 +14,7 @@ import collections import os +import re from typing import Any, Iterable, Mapping, Sequence, Tuple import jinja2 @@ -180,6 +181,7 @@ def _get_filenames( # Replace the $name and $version variables. filename = filename.replace('$name_$version', self._api.naming.versioned_module_name) + filename = filename.replace('$version', self._api.naming.version) filename = filename.replace('$name', self._api.naming.module_name) # Replace the $service variable if applicable. @@ -187,6 +189,10 @@ def _get_filenames( filename = filename.replace('$service', context['service'].module_name) + # Paths may have empty path segments if components are empty + # (e.g. no $version); handle this. + filename = re.sub(r'/+', '/', filename) + # Done, return the filename. return (filename,) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 index 3a65a7f66745..b8ec42e4b82d 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 @@ -1,10 +1,13 @@ {% extends '_base.py.j2' %} {% block content %} +# Import each service from {{ api.naming.version }} into the unversioned namespace. {% for service in api.services.values() -%} -from ..{{ api.naming.versioned_module_name }}.{{ service.name|snake_case }} import {{ service.name }} +from ..{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }} import {{ service.name }} {% endfor %} + __all__ = ( + '{{ api.naming.version }}', {%- for service in api.services.values() %} '{{ service.name }}', {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 index 38df57bae002..23a768754161 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 @@ -2,7 +2,7 @@ {% block content %} {% for service in api.services.values() -%} -from .{{ service.name|snake_case }} import {{ service.name }} +from .services.{{ service.name|snake_case }} import {{ service.name }} {% endfor %} __all__ = ( {%- for service in api.services.values() %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 similarity index 99% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 index aaca06628af3..0f2b3f4ee697 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 @@ -11,7 +11,7 @@ from google.auth import credentials {% for package, python_module in service.python_modules -%} from {{ package }} import {{ python_module }} {% endfor %} -from ..utils import dispatch +from ...utils import dispatch from .transports import get_transport_class from .transports import {{ service.name }}Transport diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/base.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/grpc.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$service/transports/http.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 286b9f309f99..2121a9d6ce13 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -16,7 +16,7 @@ from google.auth.credentials import AnonymousCredentials from google.showcase import Showcase -from google.showcase_v1alpha1.showcase.transports.grpc import ( +from google.showcase_v1alpha1.services.showcase.transports.grpc import ( ShowcaseGrpcTransport, ) From 77e621c7453095a0fc1e4b06aa69f62030a15636 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Sep 2018 18:43:02 -0500 Subject: [PATCH 0028/1339] Table-setting for generating protos. (#35) This commit makes several schema edits and additions in order to facilitate generating pb2 replacements directly. In particular, it: * Adds support for a `$proto` rewrite for filenames (such files receive a `proto` variable), and adds loader support for such. * Refactors `Address` to include the `name` of the object it is attached to, enabling it to be used as the `ident` object. * Refactors the various `*_ident` properties into the address, and re-orders template use (e.g. `sphinx_ident` is now `ident.sphinx`). * Additionally, `python_ident` is now just `ident` in templates. * Adds a `rel` method for getting identifiers relative to the file being generated. This deals with a problem that will surface in replacing pb2s where a file would attempt to import or reference itself. LROs are slightly broken in a slightly different way than before (note that they were broken in the status quo ante due to a bug in `OperationType`), and `OperationType` turned out to be a mistake. Filed #34 to track this, and it will be fixed after pb2 replacements are introduced (since they will significantly alter what the fix is). --- .../gapic/generator/generator.py | 27 +++- .../gapic-generator/gapic/generator/loader.py | 35 +++- packages/gapic-generator/gapic/schema/api.py | 104 +++++++++--- .../gapic-generator/gapic/schema/metadata.py | 81 +++++++++- .../gapic-generator/gapic/schema/wrappers.py | 151 ++++++------------ .../services/$service/client.py.j2 | 26 +-- .../services/$service/transports/base.py.j2 | 4 +- .../services/$service/transports/grpc.py.j2 | 8 +- .../services/$service/transports/http.py.j2 | 10 +- packages/gapic-generator/gapic/utils/lines.py | 3 +- .../tests/unit/generator/test_generator.py | 54 ++++++- .../tests/unit/schema/test_api.py | 46 +++++- .../tests/unit/schema/test_metadata.py | 48 ++++-- .../tests/unit/schema/wrappers/test_enums.py | 11 +- .../tests/unit/schema/wrappers/test_field.py | 27 ++-- .../unit/schema/wrappers/test_message.py | 18 +-- .../tests/unit/schema/wrappers/test_method.py | 6 +- .../unit/schema/wrappers/test_operation.py | 13 +- .../unit/schema/wrappers/test_service.py | 9 +- .../unit/schema/wrappers/test_signature.py | 2 + 20 files changed, 459 insertions(+), 224 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index c76cb35a8ece..c504f07c0f0d 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -81,6 +81,16 @@ def get_response(self) -> CodeGeneratorResponse: self._render_templates(self._env.loader.api_templates), ) + # Some templates are rendered once per proto (and API may have + # one or more protos). + for proto in self._api.protos.values(): + if not proto.file_to_generate: + continue + output_files.update(self._render_templates( + self._env.loader.proto_templates, + additional_context={'proto': proto}, + )) + # Some templates are rendered once per service (an API may have # one or more services). for service in self._api.services.values(): @@ -186,8 +196,21 @@ def _get_filenames( # Replace the $service variable if applicable. if context and 'service' in context: - filename = filename.replace('$service', - context['service'].module_name) + filename = filename.replace( + '$service', + context['service'].module_name, + ) + + # Replace the $proto variable if appliable. + if context and 'proto' in context: + filename = filename.replace( + '$proto', + context['proto'].module_name, + ) + + # Paths may have empty path segments if components are empty + # (e.g. no $version); handle this. + filename = re.sub(r'/+', '/', filename) # Paths may have empty path segments if components are empty # (e.g. no $version); handle this. diff --git a/packages/gapic-generator/gapic/generator/loader.py b/packages/gapic-generator/gapic/generator/loader.py index c001feadd6c3..46f6e56b96fa 100644 --- a/packages/gapic-generator/gapic/generator/loader.py +++ b/packages/gapic-generator/gapic/generator/loader.py @@ -48,15 +48,15 @@ def api_templates(self) -> typing.Set[str]: # Start with the full list of templates, excluding private ones, # but exclude templates from other methods on this loader. return set( - [t for t in self.list_templates() if not t.startswith('_')] - ).difference(self.service_templates) + [t for t in self.list_templates() if not self.is_private(t)] + ).difference(self.service_templates).difference(self.proto_templates) @cached_property def service_templates(self): """Return the templates specific to each service. - This corresponds to all of the templates in a ``$service/`` - subdirectory (this does _not_ need to be at the top level). + This corresponds to all of the templates with ``$service`` + in the filename or path. When these templates are rendered, they are expected to be sent two variables: an :class:`~.API` object spelled ``api``, and the @@ -68,5 +68,30 @@ def service_templates(self): Set[str]: A list of service templates. """ return set( - [t for t in self.list_templates() if '$service/' in t] + [t for t in self.list_templates() if '$service' in t] ) + + @cached_property + def proto_templates(self): + """Return the templates specific to each proto. + + This corresponds to all of the templates with ``$proto`` + in the filename or path. + + When these templates are rendered, they are expected to be sent + two variables: an :class:`~.API` object spelled ``api``, and the + :class:`~.wrappers.Proto` object being iterated over, spelled + ``proto``. These templates are rendered once per proto, with + a distinct ``proto`` variable each time. + + Returns: + Set[str]: A list of proto templates. + """ + return set( + [t for t in self.list_templates() if '$proto' in t] + ) + + def is_private(self, path): + """Return True if ``path`` is a private template, False otherwise.""" + filename = path.split('/')[-1] + return filename != '__init__.py.j2' and filename.startswith('_') diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 06ebdfac6cb0..89d4a6fa78a1 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -29,6 +29,7 @@ from gapic.schema import naming from gapic.schema import wrappers from gapic.utils import cached_property +from gapic.utils import to_snake_case @dataclasses.dataclass(frozen=True) @@ -63,6 +64,34 @@ def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, prior_protos=prior_protos or {}, ).proto + @property + def module_name(self) -> str: + """Return the appropriate module name for this service. + + Returns: + str: The module name for this service (which is the service + name in snake case). + """ + return to_snake_case(self.name.split('/')[-1][:-len('.proto')]) + + @cached_property + def top(self) -> 'Proto': + """Return a proto shim which is only aware of top-level objects. + + This is useful in a situation where a template wishes to iterate + over only those messages and enums that are at the top level of the + file. + """ + return type(self)( + file_pb2=self.file_pb2, + services=self.services, + messages={k: v for k, v in self.messages.items() + if not v.meta.address.parent}, + enums={k: v for k, v in self.enums.items() + if not v.meta.address.parent}, + file_to_generate=False, + ) + @dataclasses.dataclass(frozen=True) class API: @@ -179,7 +208,7 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # for each item as it is loaded. address = metadata.Address( module=file_descriptor.name.split('/')[-1][:-len('.proto')], - package=file_descriptor.package.split('.'), + package=tuple(file_descriptor.package.split('.')), ) # Now iterate over the FileDescriptorProto and pull out each of @@ -265,7 +294,7 @@ def _get_operation_type(self, ) def _load_children(self, children: Sequence, loader: Callable, *, - address: metadata.Address, path: Tuple[int]) -> None: + address: metadata.Address, path: Tuple[int]) -> Mapping: """Return wrapped versions of arbitrary children from a Descriptor. Args: @@ -282,11 +311,18 @@ def _load_children(self, children: Sequence, loader: Callable, *, path (Tuple[int]): The location path up to this point. This is used to correspond to documentation in ``SourceCodeInfo.Location`` in ``descriptor.proto``. + + Return: + Mapping[str, Union[~.MessageType, ~.Service, ~.EnumType]]: A + sequence of the objects that were loaded. """ # Iterate over the list of children provided and call the # applicable loader function on each. + answer = {} for child, i in zip(children, range(0, sys.maxsize)): - loader(child, address=address, path=path + (i,)) + wrapped = loader(child, address=address, path=path + (i,)) + answer[wrapped.name] = wrapped + return answer def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], address: metadata.Address, path: Tuple[int], @@ -379,11 +415,13 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], # Done; return the answer. return answer - def _load_message(self, message_pb: descriptor_pb2.DescriptorProto, - address: metadata.Address, path: Tuple[int]) -> None: + def _load_message(self, + message_pb: descriptor_pb2.DescriptorProto, + address: metadata.Address, + path: Tuple[int], + ) -> wrappers.MessageType: """Load message descriptions from DescriptorProtos.""" - ident = f'{str(address)}.{message_pb.name}' - message_addr = address.child(message_pb.name) + address = address.child(message_pb.name) # Load all nested items. # @@ -392,38 +430,54 @@ def _load_message(self, message_pb: descriptor_pb2.DescriptorProto, # type of one of this message's fields, and they need to be in # the registry for the field's message or enum attributes to be # set correctly. - self._load_children(message_pb.enum_type, address=message_addr, - loader=self._load_enum, path=path + (4,)) - self._load_children(message_pb.nested_type, address=message_addr, - loader=self._load_message, path=path + (3,)) + nested_enums = self._load_children( + message_pb.enum_type, + address=address, + loader=self._load_enum, + path=path + (4,), + ) + nested_messages = self._load_children( + message_pb.nested_type, + address=address, + loader=self._load_message, + path=path + (3,), + ) # self._load_children(message.oneof_decl, loader=self._load_field, # address=nested_addr, info=info.get(8, {})) # Create a dictionary of all the fields for this message. fields = self._get_fields( message_pb.field, - address=message_addr, + address=address, path=path + (2,), ) fields.update(self._get_fields( message_pb.extension, - address=message_addr, + address=address, path=path + (6,), )) # Create a message correspoding to this descriptor. - self.messages[ident] = wrappers.MessageType( + self.messages[address.proto] = wrappers.MessageType( fields=fields, message_pb=message_pb, + nested_enums=nested_enums, + nested_messages=nested_messages, meta=metadata.Metadata( address=address, documentation=self.docs.get(path, self.EMPTY), ), ) + return self.messages[address.proto] - def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, - address: metadata.Address, path: Tuple[int]) -> None: + def _load_enum(self, + enum: descriptor_pb2.EnumDescriptorProto, + address: metadata.Address, + path: Tuple[int], + ) -> wrappers.EnumType: """Load enum descriptions from EnumDescriptorProtos.""" + address = address.child(enum.name) + # Put together wrapped objects for the enum values. values = [] for enum_value, i in zip(enum.value, range(0, sys.maxsize)): @@ -436,8 +490,7 @@ def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, )) # Load the enum itself. - ident = f'{str(address)}.{enum.name}' - self.enums[ident] = wrappers.EnumType( + self.enums[address.proto] = wrappers.EnumType( enum_pb=enum, meta=metadata.Metadata( address=address, @@ -445,21 +498,25 @@ def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, ), values=values, ) + return self.enums[address.proto] - def _load_service(self, service: descriptor_pb2.ServiceDescriptorProto, - address: metadata.Address, path: Tuple[int]) -> None: + def _load_service(self, + service: descriptor_pb2.ServiceDescriptorProto, + address: metadata.Address, + path: Tuple[int], + ) -> wrappers.Service: """Load comments for a service and its methods.""" - service_addr = address.child(service.name) + address = address.child(service.name) # Put together a dictionary of the service's methods. methods = self._get_methods( service.method, - address=service_addr, + address=address, path=path + (2,), ) # Load the comments for the service itself. - self.services[f'{str(address)}.{service.name}'] = wrappers.Service( + self.services[address.proto] = wrappers.Service( meta=metadata.Metadata( address=address, documentation=self.docs.get(path, self.EMPTY), @@ -467,3 +524,4 @@ def _load_service(self, service: descriptor_pb2.ServiceDescriptorProto, methods=methods, service_pb=service, ) + return self.services[address.proto] diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 48f3267e0856..c2ead49509f6 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -34,30 +34,78 @@ @dataclasses.dataclass(frozen=True) class Address: - package: Tuple[str] = dataclasses.field(default_factory=tuple) + name: str = '' module: str = '' + package: Tuple[str] = dataclasses.field(default_factory=tuple) parent: Tuple[str] = dataclasses.field(default_factory=tuple) - def __str__(self): - return '.'.join(tuple(self.package) + tuple(self.parent)) + def __str__(self) -> str: + """Return the Python identifier for this type. + + Because we import modules as a whole, rather than individual + members from modules, this is consistently `module.Name`. + """ + # TODO(#34): Special cases are not special enough to break the rules. + # Allowing this temporarily because it will be fixed by + # refactoring proto generation and/or OperationType. + if self.package == ('google', 'api_core'): + return f'{self.module}.{self.name}' + if self.module: + return f'{self.module}_pb2.{self.name}' + return self.name + + @property + def proto(self) -> str: + """Return the proto selector for this type.""" + return '.'.join(self.package + self.parent + (self.name,)) + + @property + def proto_package(self) -> str: + """Return the proto package for this type.""" + return '.'.join(self.package) + + @property + def sphinx(self) -> str: + """Return the Sphinx identifier for this type.""" + if self.module: + return f'~.{self}' + return self.name def child(self, child_name: str) -> 'Address': - """Return a new Address with ``child_name`` appended to its parent. + """Return a new child of the current Address. Args: - child_name (str): The child name to be appended to ``parent``. - The period-separator is added automatically if ``parent`` - is non-empty. + child_name (str): The name of the child node. + This address' name is appended to ``parent``. Returns: ~.Address: The new address object. """ return type(self)( + name=child_name, module=self.module, package=self.package, - parent=self.parent + (child_name,), + parent=self.parent + (self.name,) if self.name else self.parent, ) + def rel(self, address: 'Address') -> str: + """Return an identifier for this type, relative to the given address. + + Similar to :meth:`__str__`, but accepts an address (expected to be the + module being written) and truncates the beginning module if the + address matches the identifier's address. Templates can use this in + situations where otherwise they would refer to themselves. + + Args: + address (~.metadata.Address): The address to compare against. + + Returns: + str: The appropriate identifier. + """ + if self.package == address.package and self.module == address.module: + return self.name + return str(self) + def resolve(self, selector: str) -> str: """Resolve a potentially-relative protobuf selector. @@ -104,3 +152,20 @@ def doc(self): if self.documentation.leading_detached_comments: return '\n\n'.join(self.documentation.leading_detached_comments) return '' + + +@dataclasses.dataclass(frozen=True) +class FieldIdentifier: + ident: Address + repeated: bool + + def __str__(self) -> str: + if self.repeated: + return f'Sequence[{self.ident}]' + return str(self.ident) + + @property + def sphinx(self) -> str: + if self.repeated: + return f'Sequence[{self.ident.sphinx}]' + return self.ident.sphinx diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 0b501d1a121d..56a5cf274953 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -53,23 +53,25 @@ class Field: def __getattr__(self, name): return getattr(self.field_pb, name) + @utils.cached_property + def ident(self) -> metadata.FieldIdentifier: + """Return the identifier to be used in templates.""" + return metadata.FieldIdentifier( + ident=self.type.ident, + repeated=self.repeated, + ) + @property def is_primitive(self) -> bool: """Return True if the field is a primitive, False otherwise.""" return isinstance(self.type, PythonType) @property - def python_ident(self) -> str: - """Return the identifier to be used in templates. - - Because we import modules as a whole, rather than individual - members from modules, this is consistently `module.Name`. - - This property also adds the Sequence[] notation for repeated fields. - """ - if self.repeated: - return f'Sequence[{self.type.python_ident}]' - return self.type.python_ident + def proto_type(self) -> str: + """Return the proto type constant to be used in templates.""" + return descriptor_pb2.FieldDescriptorProto.Type.Name( + self.field_pb.type, + )[len('TYPE_'):] @property def repeated(self) -> bool: @@ -90,16 +92,6 @@ def required(self) -> bool: """ return bool(self.options.Extensions[annotations_pb2.required]) - @property - def sphinx_ident(self) -> str: - """Return the identifier to be used in templates for Sphinx. - - This property also adds the Sequence[] notation for repeated fields. - """ - if self.repeated: - return f'Sequence[{self.type.sphinx_ident}]' - return self.type.sphinx_ident - @utils.cached_property def type(self) -> Union['MessageType', 'EnumType', 'PythonType']: """Return the type of this field.""" @@ -140,6 +132,8 @@ class MessageType: """Description of a message (defined with the ``message`` keyword).""" message_pb: descriptor_pb2.DescriptorProto fields: Mapping[str, Field] + nested_enums: Mapping[str, 'EnumType'] + nested_messages: Mapping[str, 'MessageType'] meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) @@ -189,28 +183,9 @@ def get_field(self, *field_path: Sequence[str]) -> Field: return cursor.message.get_field(*field_path[1:]) @property - def proto_path(self) -> str: - """Return the fully qualfied proto path as a string.""" - return f'{str(self.meta.address)}.{self.name}' - - @property - def python_ident(self) -> str: - """Return the identifier to be used in templates. - - Because we import modules as a whole, rather than individual - members from modules, this is consistently `module.Name`. - """ - return f'{self.python_module}.{self.name}' - - @property - def python_module(self) -> str: - """Return the name of the Python pb2 module.""" - return f'{self.meta.address.module}_pb2' - - @property - def sphinx_ident(self) -> str: - """Return the identifier to be used in templates for Sphinx.""" - return f'~.{self.python_ident}' + def ident(self) -> metadata.Address: + """Return the identifier data to be used in templates.""" + return self.meta.address @dataclasses.dataclass(frozen=True) @@ -238,23 +213,9 @@ def __getattr__(self, name): return getattr(self.enum_pb, name) @property - def python_ident(self) -> str: - """Return the identifier to be used in templates. - - Because we import modules as a whole, rather than individual - members from modules, this is consistently `module.Name`. - """ - return f'{self.python_module}.{self.name}' - - @property - def python_module(self) -> str: - """Return the name of the Python pb2 module.""" - return f'{self.meta.address.module}_pb2' - - @property - def sphinx_ident(self) -> str: - """Return the identifier to be used in templates for Sphinx.""" - return f'~.{self.python_ident}' + def ident(self) -> metadata.Address: + """Return the identifier data to be used in templates.""" + return self.meta.address @dataclasses.dataclass(frozen=True) @@ -271,19 +232,14 @@ class PythonType: def name(self) -> str: return self.python_type.__name__ - @property - def python_ident(self) -> str: + @utils.cached_property + def ident(self) -> metadata.Address: """Return the identifier to be used in templates. Primitives have no import, and no module to reference, so this is simply the name of the class (e.g. "int", "str"). """ - return self.name - - @property - def sphinx_ident(self) -> str: - """Return the identifier to be used in templates for Sphinx.""" - return f'{self.python_ident}' + return metadata.Address(name=self.name) @dataclasses.dataclass(frozen=True) @@ -296,11 +252,16 @@ class OperationType: lro_response: MessageType lro_metadata: MessageType = None + @property + def ident(self) -> metadata.Address: + return self.meta.address + @utils.cached_property def meta(self) -> metadata.Metadata: """Return a Metadata object.""" return metadata.Metadata( address=metadata.Address( + name='Operation', module='operation', package=('google', 'api_core'), ), @@ -308,10 +269,9 @@ def meta(self) -> metadata.Metadata: leading_comments='An object representing a long-running ' 'operation. \n\n' 'The result type for the operation will be ' - ':class:`~.{module}.{name}`: {doc}'.format( + ':class:`{ident}`: {doc}'.format( doc=self.lro_response.meta.doc, - module=self.lro_response.python_module, - name=self.lro_response.name, + ident=self.lro_response.ident.sphinx, ), ), ) @@ -327,26 +287,6 @@ def name(self) -> str: # on google.api_core just to get these strings. return 'Operation' - @property - def python_ident(self) -> str: - """Return the identifier to be used in templates.""" - return f'{self.python_module}.{self.name}' - - @property - def python_module(self) -> str: - """Return the name of the Python module.""" - # This is always "operation", because it is always a reference to - # `google.api_core.operation.Operation`. - # - # This is hard-coded rather than subclassing PythonType (above) so - # that this generator is not forced to take an entire dependency - # on google.api_core just to get these strings. - return self.meta.address.module - - @property - def sphinx_ident(self) -> str: - return f'~.{self.python_ident}' - @dataclasses.dataclass(frozen=True) class Method: @@ -450,7 +390,7 @@ def single_dispatch(self) -> Tuple[MethodSignature]: answer = collections.OrderedDict() for sig in [i for i in self.all if isinstance(i.dispatch_field.type, PythonType)]: - answer.setdefault(sig.dispatch_field.python_ident, sig) + answer.setdefault(sig.dispatch_field.ident, sig) return tuple(answer.values()) @@ -513,12 +453,19 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: # Add the module containing both the request and response # messages. (These are usually the same, but not necessarily.) answer.add(( - '.'.join(method.input.meta.address.package), - method.input.python_module, + '.'.join(method.input.ident.package), + method.input.ident.module + '_pb2', )) answer.add(( - '.'.join(method.output.meta.address.package), - method.output.python_module, + '.'.join(method.output.ident.package), + # TODO(#34): This is obviously unacceptable and gross and + # generally vomit-inducing. + # + # I am not fixing this right now because *_pb2 + # is about to go away. + method.output.ident.module + '_pb2' + if not getattr(method.output, 'lro_response', None) + else method.output.ident.module, )) # If this method has flattening that is honored, add its @@ -530,21 +477,21 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: for field in sig.fields.values(): if not isinstance(field.type, PythonType): answer.add(( - '.'.join(field.type.meta.address.package), - field.type.python_module, + '.'.join(field.type.ident.package), + field.type.ident.module + '_pb2', )) # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. if getattr(method.output, 'lro_response', None): answer.add(( - '.'.join(method.output.lro_response.meta.address.package), - method.output.lro_response.python_module, + '.'.join(method.output.lro_response.ident.package), + method.output.lro_response.ident.module + '_pb2', )) if getattr(method.output, 'lro_metadata', None): answer.add(( - '.'.join(method.output.lro_metadata.meta.address.package), - method.output.lro_metadata.python_module, + '.'.join(method.output.lro_metadata.ident.package), + method.output.lro_metadata.ident.module + '_pb2', )) return tuple(sorted(answer)) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 index 0f2b3f4ee697..f54460dae045 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 @@ -52,15 +52,15 @@ class {{ service.name }}: @dispatch {% endif -%} def {{ method.name|snake_case }}(self, - request: {{ method.input.python_ident }}, *, + request: {{ method.input.ident }}, *, retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.python_ident }}: + ) -> {{ method.output.ident }}: """{{ method.meta.doc|wrap(width=72, offset=11, indent=8) }} Args: - request ({{ method.input.sphinx_ident }}): + request ({{ method.input.ident.sphinx }}): The request object.{{ ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} retry (~.retry.Retry): Designation of what errors, if any, @@ -70,12 +70,12 @@ class {{ service.name }}: sent along with the request as metadata. Returns: - {{ method.output.sphinx_ident }}: + {{ method.output.ident.sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ # Coerce the request to the protocol buffer object. - if not isinstance(request, {{ method.input.python_ident }}): - request = {{ method.input.python_ident }}(**request) + if not isinstance(request, {{ method.input.ident }}): + request = {{ method.input.ident }}(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -107,9 +107,9 @@ class {{ service.name }}: response = operation.from_gapic( response, self._transport.operations_client, - {{ method.output.lro_response.python_ident }}, + {{ method.output.lro_response.ident }}, {%- if method.output.lro_metadata %} - metadata_type={{ method.output.lro_metadata.python_ident }}, + metadata_type={{ method.output.lro_metadata.ident }}, {%- endif %} ) {%- endif %} @@ -121,18 +121,18 @@ class {{ service.name }}: @{{ method.name|snake_case }}.register def _{{ method.name|snake_case }}_with_{{ signature.dispatch_field.name|snake_case }}(self, {%- for field in signature.fields.values() %} - {{ field.name }}: {{ field.python_ident }}{% if loop.index0 > 0 and not field.required %} = None{% endif %}, + {{ field.name }}: {{ field.ident }}{% if loop.index0 > 0 and not field.required %} = None{% endif %}, {%- endfor %} *, retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.python_ident }}: + ) -> {{ method.output.ident }}: """{{ method.meta.doc|wrap(width=72, offset=11, indent=8) }} Args: {%- for field in signature.fields.values() %} - {{ field.name }} ({{ field.sphinx_ident }}): + {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|wrap(width=72, indent=16) }} {%- endfor %} retry (~.retry.Retry): Designation of what errors, if any, @@ -142,11 +142,11 @@ class {{ service.name }}: sent alont with the request as metadata. Returns: - {{ method.output.sphinx_ident }}: + {{ method.output.ident.sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ return self.{{ method.name|snake_case }}( - {{ method.input.python_ident }}( + {{ method.input.ident }}( {%- for field in signature.fields.values() %} {{ field.name }}={{ field.name }}, {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 index 9d0d4f955f33..45dcc892c3c6 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 @@ -57,8 +57,8 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): @abc.abstractmethod def {{ method.name|snake_case }}( self, - request: {{ method.input.python_ident }}, - ) -> {{ method.output.python_ident }}: + request: {{ method.input.ident }}, + ) -> {{ method.output.ident }}: raise NotImplementedError {%- endfor %} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 index 185f3946c8cc..75dc1979f0c1 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 @@ -82,8 +82,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @property def {{ method.name|snake_case }}(self) -> Callable[ - [{{ method.input.python_ident }}], - {{ method.output.python_ident }}]: + [{{ method.input.ident }}], + {{ method.output.ident }}]: """Return a callable for the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) }} @@ -103,8 +103,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if '{{ method.name|snake_case }}' not in self._stubs: self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', - request_serializer={{ method.input.python_module }}.{{ method.input.name }}.SerializeToString, - response_deserializer={{ method.output.python_module }}.{{ method.output.name }}.FromString, + request_serializer={{ method.input.ident }}.SerializeToString, + response_deserializer={{ method.output.ident }}.FromString, ) return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 index c23bf4482e12..3853bca80033 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 @@ -65,23 +65,23 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): {%- for method in service.methods.values() %} def {{ method.name|snake_case }}(self, - request: {{ method.input.python_module }}.{{ method.input.name }}, *, + request: {{ method.input.ident }}, *, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.python_module }}.{{ method.output.name }}: + ) -> {{ method.output.ident }}: """Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} {{- ' ' -}} method over HTTP. Args: - request (~.{{ method.input.python_ident }}): + request (~.{{ method.input.ident }}): The request object. {{- ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent alont with the request as metadata. Returns: - ~.{{ method.output.python_ident }}: + ~.{{ method.output.ident }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ # Serialize the input. @@ -102,7 +102,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): ) # Return the response. - return {{ method.output.python_ident }}.FromString( + return {{ method.output.ident }}.FromString( response.content, ) {%- endfor %} diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index f459fd34444f..49fd584ee0a4 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -51,8 +51,7 @@ def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: # Re-wrapping causes these to be two spaces; correct for this. text = text.replace('\n ', '\n') - # If the initial width is different (in other words, the initial offset - # is non-zero), break off the beginning of the string. + # Break off the first line of the string to address non-zero offsets. first = '' if offset > 0: initial = textwrap.wrap(text, diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index a89719f0bd7c..6150e8f63af1 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -79,8 +79,12 @@ def test_get_response(): # Next, determine that the general API templates and service # templates were both called; the method should be called - # once per service plus one for the API as a whole. - assert _render_templates.call_count == len(file_pb2.service) + 1 + # once per service, once per proto, plus one for the API as a whole. + assert _render_templates.call_count == sum([ + 1, # for the API as a whole + len(api_schema.services), + len(api_schema.protos), + ]) # The service templates should have been called with the # filename transformation and the additional `service` variable. @@ -92,6 +96,35 @@ def test_get_response(): assert isinstance(service, wrappers.Service) +def test_get_response_skipped_proto(): + # Create a generator with mock data. + # + # We want to ensure that templates are rendered for each service, + # which we prove by sending two services. + file_pb2 = descriptor_pb2.FileDescriptorProto( + name='bacon.proto', + package='foo.bar.v1', + ) + api_schema = make_api(make_proto(file_pb2, file_to_generate=False)) + g = generator.Generator(api_schema=api_schema) + + # Mock all the rendering methods. + with mock.patch.object(g, '_render_templates') as _render_templates: + _render_templates.return_value = { + 'template_file': plugin_pb2.CodeGeneratorResponse.File( + name='template_file', + content='This was a template.', + ), + } + + # Okay, now run the `get_response` method. + g.get_response() + + # Since there are no protos and no services, only the rollup, API-wide + # call should have happened. + assert _render_templates.call_count == 1 + + def test_render_templates(): g = generator.Generator(api_schema=make_api()) @@ -189,6 +222,23 @@ def test_get_filenames_with_service(): ) == ('spam/eggs/foo.py',) +def test_get_filenames_with_proto(): + file_pb2 = descriptor_pb2.FileDescriptorProto( + name='bacon.proto', + package='foo.bar.v1', + ) + api = make_api( + make_proto(file_pb2), + naming=make_naming(namespace=(), name='Spam', version='v2'), + ) + + g = generator.Generator(api_schema=api) + assert g._get_filenames( + '$name/types/$proto.py.j2', + context={'proto': api.protos['bacon.proto']}, + ) == ('spam/types/bacon.py',) + + def test_get_filenames_with_namespace_init(): g = generator.Generator(api_schema=make_api(naming=make_naming( namespace=('Foo', 'Bar', 'Baz'), diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 9bd5e418f481..3d5d45f6166e 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -189,8 +189,8 @@ def test_messages_reverse_declaration_order(): def test_messages_recursive(): - # Test that if a message is used as a field higher in the same file, - # that things still work. + # Test that if a message is used inside itself, that things will still + # work. message_pbs = ( make_message_pb2(name='Foo', fields=( make_field_pb2(name='foo', number=1, @@ -212,6 +212,36 @@ def test_messages_recursive(): assert Foo.fields['foo'].message == proto.messages['google.example.v3.Foo'] +def test_messages_nested(): + # Test that a nested message works properly. + message_pbs = ( + make_message_pb2(name='Foo', nested_type=( + make_message_pb2(name='Bar'), + )), + ) + fdp = make_file_pb2( + messages=message_pbs, + package='google.example.v3', + ) + + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True) + + # Set short variables for the names. + foo = 'google.example.v3.Foo' + bar = 'google.example.v3.Foo.Bar' + + # Get the message. + assert len(proto.messages) == 2 + assert proto.messages[foo].name == 'Foo' + assert proto.messages[bar].name == 'Bar' + + # Assert that the `top` shim only shows top-level messages. + assert len(proto.top.messages) == 1 + assert proto.top.messages[foo] is proto.messages[foo] + assert bar not in proto.top.messages + + def test_services(): L = descriptor_pb2.SourceCodeInfo.Location @@ -411,13 +441,13 @@ def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, source_code_info=descriptor_pb2.SourceCodeInfo(location=locations), ) - # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ - 'google/longrunning/operations.proto': lro_proto, - }) -def make_message_pb2(name: str, fields=()) -> descriptor_pb2.DescriptorProto: - return descriptor_pb2.DescriptorProto(name=name, field=fields) +def make_message_pb2( + name: str, + fields: tuple = (), + **kwargs + ) -> descriptor_pb2.DescriptorProto: + return descriptor_pb2.DescriptorProto(name=name, field=fields, **kwargs) def make_field_pb2(name: str, number: int, diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index eaa88224fe01..5363af96f5bf 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -20,28 +20,52 @@ def test_address_str_no_parent(): - addr = metadata.Address(package=('foo', 'bar'), module='baz') - assert str(addr) == 'foo.bar' + addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') + assert str(addr) == 'baz_pb2.Bacon' def test_address_str_parent(): - addr = metadata.Address(package=('foo', 'bar'), module='baz', + addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon', parent=('spam', 'eggs')) - assert str(addr) == 'foo.bar.spam.eggs' + assert str(addr) == 'baz_pb2.Bacon' + + +def test_address_proto(): + addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') + assert addr.proto == 'foo.bar.Bacon' + assert addr.proto_package == 'foo.bar' -def test_address_child(): +def test_address_child_no_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') - child = addr.child('bacon') - assert child.parent == ('bacon',) - assert str(child) == 'foo.bar.bacon' - grandchild = child.child('ham') - assert grandchild.parent == ('bacon', 'ham') - assert str(grandchild) == 'foo.bar.bacon.ham' + child = addr.child('Bacon') + assert child.name == 'Bacon' + assert child.parent == () -def test_address_resolve(): +def test_address_child_with_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') + child = addr.child('Bacon') + grandchild = child.child('Ham') + assert grandchild.parent == ('Bacon',) + assert grandchild.name == 'Ham' + + +def test_address_rel(): + addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') + assert addr.rel( + metadata.Address(package=('foo', 'bar'), module='baz'), + ) == 'Bacon' + assert addr.rel( + metadata.Address(package=('foo', 'not_bar'), module='baz'), + ) == 'baz_pb2.Bacon' + assert addr.rel( + metadata.Address(package=('foo', 'bar'), module='not_baz'), + ) == 'baz_pb2.Bacon' + + +def test_address_resolve(): + addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Qux') assert addr.resolve('Bacon') == 'foo.bar.Bacon' assert addr.resolve('foo.bar.Bacon') == 'foo.bar.Bacon' assert addr.resolve('google.example.Bacon') == 'google.example.Bacon' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index 34d7ae73f76b..96fec051d9a9 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -34,14 +34,10 @@ def test_enum_value_properties(): assert ev.name == expected -def test_enum_python_ident(): +def test_enum_ident(): message = make_enum('Baz', package='foo.v1', module='bar') - assert message.python_ident == 'bar_pb2.Baz' - - -def test_enum_sphinx_ident(): - message = make_enum('Baz', package='foo.v1', module='bar') - assert message.sphinx_ident == '~.bar_pb2.Baz' + assert str(message.ident) == 'bar_pb2.Baz' + assert message.ident.sphinx == '~.bar_pb2.Baz' def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', @@ -60,6 +56,7 @@ def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', values=[wrappers.EnumValueType(enum_value_pb=evpb) for evpb in enum_value_pbs], meta=meta or metadata.Metadata(address=metadata.Address( + name=name, package=tuple(package.split('.')), module=module, )), diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 4b819cafbc5a..777450cfa2e0 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -18,7 +18,6 @@ from google.protobuf import descriptor_pb2 from gapic.schema import wrappers -from gapic.schema.metadata import Address, Metadata def test_field_properties(): @@ -35,10 +34,18 @@ def test_field_is_primitive(): assert primitive_field.is_primitive +def test_field_proto_type(): + Type = descriptor_pb2.FieldDescriptorProto.Type + primitive_field = make_field(type=Type.Value('TYPE_INT32')) + assert primitive_field.proto_type == 'INT32' + + def test_field_not_primitive(): Type = descriptor_pb2.FieldDescriptorProto.Type message = wrappers.MessageType( fields={}, + nested_messages={}, + nested_enums={}, message_pb=descriptor_pb2.DescriptorProto(), ) non_primitive_field = make_field( @@ -49,17 +56,17 @@ def test_field_not_primitive(): assert not non_primitive_field.is_primitive -def test_python_ident(): +def test_ident(): Type = descriptor_pb2.FieldDescriptorProto.Type field = make_field(type=Type.Value('TYPE_BOOL')) - assert field.python_ident == 'bool' + assert str(field.ident) == 'bool' -def test_python_ident_repeated(): +def test_ident_repeated(): Type = descriptor_pb2.FieldDescriptorProto.Type REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') field = make_field(type=Type.Value('TYPE_BOOL'), label=REP) - assert field.python_ident == 'Sequence[bool]' + assert str(field.ident) == 'Sequence[bool]' def test_repeated(): @@ -85,17 +92,17 @@ def test_not_required(): assert not field.required -def test_sphinx_ident(): +def test_ident_sphinx(): Type = descriptor_pb2.FieldDescriptorProto.Type field = make_field(type=Type.Value('TYPE_BOOL')) - assert field.sphinx_ident == 'bool' + assert field.ident.sphinx == 'bool' -def test_sphinx_ident_repeated(): +def test_ident_sphinx_repeated(): Type = descriptor_pb2.FieldDescriptorProto.Type REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') field = make_field(type=Type.Value('TYPE_BOOL'), label=REP) - assert field.sphinx_ident == 'Sequence[bool]' + assert field.ident.sphinx == 'Sequence[bool]' def test_type_primitives(): @@ -111,6 +118,8 @@ def test_type_message(): T = descriptor_pb2.FieldDescriptorProto.Type message = wrappers.MessageType( fields={}, + nested_messages={}, + nested_enums={}, message_pb=descriptor_pb2.DescriptorProto(), ) field = make_field( diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index aea25aaabb8f..1585304bc857 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -39,19 +39,10 @@ def test_message_docstring(): assert message.meta.doc == 'Lorem ipsum' -def test_message_python_package(): - message = make_message('Spam', module='eggs') - assert message.python_module == 'eggs_pb2' - - -def test_message_python_ident(): - message = make_message('Baz', package='foo.v1', module='bar') - assert message.python_ident == 'bar_pb2.Baz' - - -def test_message_sphinx_ident(): +def test_message_ident(): message = make_message('Baz', package='foo.v1', module='bar') - assert message.sphinx_ident == '~.bar_pb2.Baz' + assert str(message.ident) == 'bar_pb2.Baz' + assert message.ident.sphinx == '~.bar_pb2.Baz' def test_get_field(): @@ -102,7 +93,10 @@ def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', return wrappers.MessageType( message_pb=message_pb, fields=collections.OrderedDict((i.name, i) for i in fields), + nested_messages={}, + nested_enums={}, meta=meta or metadata.Metadata(address=metadata.Address( + name=name, package=tuple(package.split('.')), module=module, )), diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 4d42351a0ba8..1e1bc5d2835b 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -30,9 +30,7 @@ def test_method_types(): package='foo.bar', module='bacon') assert method.name == 'DoSomething' assert method.input.name == 'Input' - assert method.input.python_module == 'baz_pb2' assert method.output.name == 'Output' - assert method.output.python_module == 'baz_pb2' def test_method_signature(): @@ -110,6 +108,7 @@ def make_method( input=input_message, output=output_message, meta=metadata.Metadata(address=metadata.Address( + name=name, package=package, module=module, )), @@ -125,8 +124,11 @@ def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', ) return wrappers.MessageType( message_pb=message_pb, + nested_messages={}, + nested_enums={}, fields=collections.OrderedDict((i.name, i) for i in fields), meta=metadata.Metadata(address=metadata.Address( + name=name, package=tuple(package.split('.')), module=module, )), diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py index 4d4723e59efe..bdb66bf5dc21 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py @@ -21,19 +21,26 @@ def test_operation(): lro_response = wrappers.MessageType( fields={}, + nested_messages={}, + nested_enums={}, message_pb=descriptor_pb2.DescriptorProto(name='LroResponse'), ) operation = wrappers.OperationType(lro_response=lro_response) assert operation.name == 'Operation' - assert operation.python_ident == 'operation.Operation' - assert operation.sphinx_ident == '~.operation.Operation' + assert str(operation.ident) == 'operation.Operation' + assert operation.ident.sphinx == '~.operation.Operation' def test_operation_meta(): lro_response = wrappers.MessageType( fields={}, + nested_messages={}, + nested_enums={}, message_pb=descriptor_pb2.DescriptorProto(name='LroResponse'), - meta=metadata.Metadata(address=metadata.Address(module='foo')), + meta=metadata.Metadata(address=metadata.Address( + name='LroResponse', + module='foo', + )), ) operation = wrappers.OperationType(lro_response=lro_response) assert 'representing a long-running operation' in operation.meta.doc diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index dea88b44e6b1..226c5a90e50c 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -185,8 +185,8 @@ def get_method(name: str, # Define a method descriptor. Set the field headers if appropriate. method_pb = descriptor_pb2.MethodDescriptorProto( name=name, - input_type=input_.proto_path, - output_type=output.proto_path, + input_type=input_.ident.proto, + output_type=output.ident.proto, ) if lro_response_type: output = wrappers.OperationType( @@ -227,9 +227,12 @@ def get_message(dot_path: str, *, field_pb=i, message=get_message(i.type_name) if i.type_name else None, ) for i in fields}, + nested_messages={}, + nested_enums={}, message_pb=descriptor_pb2.DescriptorProto(name=name, field=fields), meta=metadata.Metadata(address=metadata.Address( - package=pkg, + name=name, + package=tuple(pkg), module=module, )), ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py index a80a4f8641c0..0bb40affd445 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py @@ -53,6 +53,8 @@ def test_signatures_single_dispatch(): message=wrappers.MessageType( fields={}, message_pb=descriptor_pb2.DescriptorProto(name='Bacon'), + nested_enums={}, + nested_messages={}, ), name='bar', type=T.Value('TYPE_MESSAGE'), From 0790e4165581913d4c2344535ef28dad01013e3d Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 29 Sep 2018 05:28:46 -0700 Subject: [PATCH 0029/1339] Update to the latest common protos. (#36) Update from [googleapis-common-protos](https://pypi.org/project/googleapis-common-protos) 1.6.0 beta 4 to 1.6.0 beta 6. --- packages/gapic-generator/gapic/schema/api.py | 8 ++++---- packages/gapic-generator/gapic/schema/naming.py | 2 +- packages/gapic-generator/setup.py | 2 +- .../gapic-generator/tests/unit/schema/test_api.py | 11 ++++++----- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 89d4a6fa78a1..b311cd1447fc 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -22,7 +22,7 @@ import sys from typing import Callable, List, Mapping, Sequence, Tuple -from google.longrunning import operations_pb2 +from google.api import annotations_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import metadata @@ -386,7 +386,7 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], # Iterate over the methods and collect them into a dictionary. answer = collections.OrderedDict() for meth_pb, i in zip(methods, range(0, sys.maxsize)): - types = meth_pb.options.Extensions[operations_pb2.operation_types] + lro = meth_pb.options.Extensions[annotations_pb2.operation] # If the output type is google.longrunning.Operation, we use # a specialized object in its place. @@ -394,10 +394,10 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], if meth_pb.output_type.endswith('google.longrunning.Operation'): output_type = self._get_operation_type( response_type=self.all_messages[ - address.resolve(types.response) + address.resolve(lro.response_type) ], metadata_type=self.all_messages.get( - address.resolve(types.metadata), + address.resolve(lro.metadata_type), ), ) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 333c4eeae31a..cb291423fcba 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -120,7 +120,7 @@ def build(cls, name=meta.package_name or meta.product_name, namespace=tuple(meta.package_namespace), product_name=meta.product_name, - product_url=meta.product_url, + product_url=meta.product_uri, version='', ) if naming: diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index c71181a0597f..a05c284d363e 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -42,7 +42,7 @@ include_package_data=True, install_requires=( 'click >= 6.7', - 'googleapis-common-protos >= 1.6.0b4', + 'googleapis-common-protos >= 1.6.0b6', 'grpcio >= 1.9.1', 'jinja2 >= 2.10', 'protobuf >= 3.5.1', diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 3d5d45f6166e..ace9258a8da5 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -15,7 +15,8 @@ from typing import Sequence from unittest import mock -from google.longrunning import operations_pb2 +from google.api import annotations_pb2 +from google.api import longrunning_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import api @@ -361,10 +362,10 @@ def test_lro(): input_type='google.example.v3.AsyncDoThingRequest', output_type='google.longrunning.Operation', ) - method_pb2.options.Extensions[operations_pb2.operation_types].MergeFrom( - operations_pb2.OperationTypes( - response='google.example.v3.AsyncDoThingResponse', - metadata='google.example.v3.AsyncDoThingMetadata', + method_pb2.options.Extensions[annotations_pb2.operation].MergeFrom( + longrunning_pb2.OperationData( + response_type='google.example.v3.AsyncDoThingResponse', + metadata_type='google.example.v3.AsyncDoThingMetadata', ), ) From abf407d6c2a4794b7a4489892f8419d28d173887 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 08:25:28 -0700 Subject: [PATCH 0030/1339] Remove `subsequent_indent` from utils.__all__ (#38) --- packages/gapic-generator/gapic/utils/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index cd2201429d78..a64f87e2f73d 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -23,7 +23,6 @@ __all__ = ( 'cached_property', 'Placeholder', - 'subsequent_indent', 'to_snake_case', 'to_valid_filename', 'to_valid_module_name', From 41610fbb9ec05edcb94876d51ef5690ecd32b0c1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 08:28:50 -0700 Subject: [PATCH 0031/1339] Make `has_lro` use `getattr`, not `hasattr`. (#43) --- packages/gapic-generator/gapic/schema/wrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 56a5cf274953..9e3da1af4749 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -498,7 +498,7 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" - return any([hasattr(m.output, 'lro_response') + return any([getattr(m.output, 'lro_response', None) for m in self.methods.values()]) @property From 28f602d000a2ce9b3eda928a4b689c80314dcd07 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 08:30:42 -0700 Subject: [PATCH 0032/1339] Remove unused has_field_headers property. (#42) --- packages/gapic-generator/gapic/schema/wrappers.py | 5 ----- .../tests/unit/schema/wrappers/test_method.py | 15 ++++++++++++++- .../tests/unit/schema/wrappers/test_service.py | 11 ----------- 3 files changed, 14 insertions(+), 17 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 9e3da1af4749..f80147e58747 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -500,8 +500,3 @@ def has_lro(self) -> bool: """Return whether the service has a long-running method.""" return any([getattr(m.output, 'lro_response', None) for m in self.methods.values()]) - - @property - def has_field_headers(self) -> bool: - """Return whether the service has a method containing field headers.""" - return any([m.field_headers for m in self.methods.values()]) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 1e1bc5d2835b..3df431d7b8d3 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -16,6 +16,7 @@ from typing import Sequence from google.api import annotations_pb2 +from google.api import http_pb2 from google.api import signature_pb2 from google.protobuf import descriptor_pb2 @@ -60,11 +61,17 @@ def test_method_no_signature(): assert len(make_method('Ping').signatures) == 0 -def test_method_field_headers(): +def test_method_field_headers_none(): method = make_method('DoSomething') assert isinstance(method.field_headers, collections.Sequence) +def test_service_field_headers_present(): + http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') + service = make_method('DoSomething', http_rule=http_rule) + assert service.field_headers == ('parent',) + + def test_method_unary_unary(): method = make_method('F', client_streaming=False, server_streaming=False) assert method.grpc_stub_type == 'unary_unary' @@ -89,6 +96,7 @@ def make_method( name: str, input_message: wrappers.MessageType = None, output_message: wrappers.MessageType = None, package: str = 'foo.bar.v1', module: str = 'baz', + http_rule: http_pb2.HttpRule = None, **kwargs) -> wrappers.Method: # Use default input and output messages if they are not provided. input_message = input_message or make_message('MethodInput') @@ -102,6 +110,11 @@ def make_method( **kwargs ) + # If there is an HTTP rule, process it. + if http_rule: + ext_key = annotations_pb2.http + method_pb.options.Extensions[ext_key].MergeFrom(http_rule) + # Instantiate the wrapper class. return wrappers.Method( method_pb=method_pb, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 226c5a90e50c..62d7f597cdf5 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -107,17 +107,6 @@ def test_service_has_lro(): assert service.has_lro -def test_service_no_field_headers(): - service = make_service() - assert service.has_field_headers is False - - -def test_service_has_field_headers(): - http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') - service = make_service_with_method_options(http_rule=http_rule) - assert service.has_field_headers - - def test_module_name(): service = make_service(name='MyService') assert service.module_name == 'my_service' From 80790e7776a012940eda2de51dc1db7d8f1702ef Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 08:32:17 -0700 Subject: [PATCH 0033/1339] Update wrap documentation in narrative docs. (#39) --- packages/gapic-generator/docs/templates.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst index d9008cf8c63f..d95b0d8ce976 100644 --- a/packages/gapic-generator/docs/templates.rst +++ b/packages/gapic-generator/docs/templates.rst @@ -74,8 +74,8 @@ These are: * ``snake_case`` (:meth:`~.utils.case.to_snake_case`): Converts a string in any sane case system to snake case. * ``wrap`` (:meth:`~.utils.lines.wrap`): Wraps arbitrary text. Keyword - arguments on this method such as ``subsequent_indent`` and ``initial_width`` - should make it relatively easy to take an arbitrary string and make it - wrap to 79 characters appropriately. + arguments on this method such as ``offset`` and ``indent`` should make it + relatively easy to take an arbitrary string and make it wrap to 79 + characters appropriately. .. _Jinja: http://jinja.pocoo.org/docs/2.10/ From 53bfbca5be32f1a910e20006c325073d537c1ca5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 08:34:18 -0700 Subject: [PATCH 0034/1339] Update status doc. (#40) --- packages/gapic-generator/docs/status.rst | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/gapic-generator/docs/status.rst b/packages/gapic-generator/docs/status.rst index c577528b3c4a..808066b6707d 100644 --- a/packages/gapic-generator/docs/status.rst +++ b/packages/gapic-generator/docs/status.rst @@ -11,12 +11,11 @@ Nice things this client does: As this is experimental work, please note the following limitations: -- The output only works on Python 3.4 and above. +- The output only works on Python 3.5 and above. - The configuration annotations are experimental and provided in `an awkward location`_. - gRPC must be installed even if you are not using it (this is due to some minor issues in ``api-core``). -- Only unary calls are implemented at this point. - No support for samples yet. .. _an awkward location: https://github.com/googleapis/api-common-protos/blob/input-contract/google/api/ From d4d661a7ce4def978d3a7c06d69de69761c3661c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 08:36:24 -0700 Subject: [PATCH 0035/1339] Update location of templates in narrative docs. (#41) --- packages/gapic-generator/docs/reference/index.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/docs/reference/index.rst b/packages/gapic-generator/docs/reference/index.rst index a8b65b7ca489..bd785a32ef9e 100644 --- a/packages/gapic-generator/docs/reference/index.rst +++ b/packages/gapic-generator/docs/reference/index.rst @@ -19,7 +19,8 @@ It is split into three main sections: .. note:: - Templates are housed within the ``generator`` module's directory. + Templates are housed in the ``templates`` directory, which is a sibling + to the modules listed above. .. toctree:: :maxdepth: 4 From be26306f04b3338b312bccc2a37855805d97211a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 09:52:03 -0700 Subject: [PATCH 0036/1339] Write proto types. (#37) --- .../gapic/generator/formatter.py | 4 +- packages/gapic-generator/gapic/schema/api.py | 72 +++++++++++++---- .../gapic-generator/gapic/schema/metadata.py | 81 ++++++++++++++++--- .../gapic-generator/gapic/schema/naming.py | 24 +++--- .../gapic-generator/gapic/schema/wrappers.py | 34 ++------ .../templates/$namespace/$name/__init__.py.j2 | 4 +- .../services/$service/client.py.j2 | 6 ++ .../services/$service/transports/base.py.j2 | 6 ++ .../services/$service/transports/grpc.py.j2 | 10 ++- .../services/$service/transports/http.py.j2 | 6 ++ .../$name_$version/types/$proto.py.j2 | 33 ++++++++ .../$name_$version/types/_enum.py.j2 | 7 ++ .../$name_$version/types/_message.py.j2 | 27 +++++++ .../gapic/templates/setup.py.j2 | 4 +- packages/gapic-generator/nox.py | 2 +- .../tests/unit/generator/test_generator.py | 1 + .../tests/unit/schema/test_api.py | 33 +++++--- .../tests/unit/schema/test_metadata.py | 53 ++++++++++-- .../tests/unit/schema/wrappers/test_enums.py | 4 +- .../unit/schema/wrappers/test_message.py | 4 +- .../unit/schema/wrappers/test_operation.py | 2 +- .../unit/schema/wrappers/test_service.py | 24 +++--- 22 files changed, 332 insertions(+), 109 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 diff --git a/packages/gapic-generator/gapic/generator/formatter.py b/packages/gapic-generator/gapic/generator/formatter.py index ca9c3112c38a..e860e19a6869 100644 --- a/packages/gapic-generator/gapic/generator/formatter.py +++ b/packages/gapic-generator/gapic/generator/formatter.py @@ -32,10 +32,10 @@ def fix_whitespace(code: str) -> str: code = re.sub(r'[ ]+\n', '\n', code) # Ensure at most two blank lines before top level definitions. - code = re.sub(r'\s+\n\s*\n\s*\n(class|def|@|#)', r'\n\n\n\1', code) + code = re.sub(r'\s+\n\s*\n\s*\n(class|def|@|#|_)', r'\n\n\n\1', code) # Ensure at most one line before nested definitions. - code = re.sub(r'\s+\n\s*\n( )+(class|def|@|#)', r'\n\n\1\2', code) + code = re.sub(r'\s+\n\s*\n(( )+)(\w|_|@|#)', r'\n\n\1\3', code) # All files shall end in one and exactly one line break. return f'{code.rstrip()}\n' diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index b311cd1447fc..1c0139bc138b 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -26,8 +26,8 @@ from google.protobuf import descriptor_pb2 from gapic.schema import metadata -from gapic.schema import naming from gapic.schema import wrappers +from gapic.schema import naming as api_naming from gapic.utils import cached_property from gapic.utils import to_snake_case @@ -41,13 +41,17 @@ class Proto: messages: Mapping[str, wrappers.MessageType] enums: Mapping[str, wrappers.EnumType] file_to_generate: bool + meta: metadata.Metadata = dataclasses.field( + default_factory=metadata.Metadata, + ) def __getattr__(self, name: str): return getattr(self.file_pb2, name) @classmethod def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, - file_to_generate: bool, prior_protos: Mapping[str, 'Proto'] = None, + file_to_generate: bool, naming: api_naming.Naming, + prior_protos: Mapping[str, 'Proto'] = None, ) -> 'Proto': """Build and return a Proto instance. @@ -56,11 +60,14 @@ def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, object describing the proto file. file_to_generate (bool): Whether this is a file which is to be directly generated, or a dependency. + naming (~.Naming): The :class:`~.Naming` instance associated + with the API. prior_protos (~.Proto): Previous, already processed protos. These are needed to look up messages in imported protos. """ return _ProtoBuilder(file_descriptor, file_to_generate=file_to_generate, + naming=naming, prior_protos=prior_protos or {}, ).proto @@ -74,6 +81,36 @@ def module_name(self) -> str: """ return to_snake_case(self.name.split('/')[-1][:-len('.proto')]) + @cached_property + def python_modules(self) -> Sequence[Tuple[str, str]]: + """Return a sequence of Python modules, for import. + + The results of this method are in alphabetical order (by package, + then module), and do not contain duplicates. + + Returns: + Sequence[Tuple[str, str]]: The package and module pair, intended + for use in a ``from package import module`` type + of statement. + """ + answer = set() + for message in self.messages.values(): + for field in message.fields.values(): + # We only need to add imports for fields that + # are messages or enums. + if not field.message and not field.enum: + continue + + # Add the appropriate Python import for the field. + answer.add(field.type.ident.python_import) + + # We may have gotten an import for this proto. + # Obviously no Python module may import itself; get rid of that. + answer = answer.difference({self.meta.address.python_import}) + + # Done; return the sorted sequence. + return tuple(sorted(list(answer))) + @cached_property def top(self) -> 'Proto': """Return a proto shim which is only aware of top-level objects. @@ -90,6 +127,7 @@ def top(self) -> 'Proto': enums={k: v for k, v in self.enums.items() if not v.meta.address.parent}, file_to_generate=False, + meta=self.meta, ) @@ -105,7 +143,7 @@ class API: An instance of this object is made available to every template (as ``api``). """ - naming: naming.Naming + naming: api_naming.Naming protos: Mapping[str, Proto] @classmethod @@ -124,7 +162,7 @@ def build(cls, rather than explicit targets. """ # Save information about the overall naming for this API. - n = naming.Naming.build(*filter( + naming = api_naming.Naming.build(*filter( lambda fd: fd.package.startswith(package), file_descriptors, )) @@ -136,11 +174,12 @@ def build(cls, protos[fd.name] = _ProtoBuilder( file_descriptor=fd, file_to_generate=fd.package.startswith(package), + naming=naming, prior_protos=protos, ).proto # Done; return the API. - return cls(naming=n, protos=protos) + return cls(naming=naming, protos=protos) @cached_property def enums(self) -> Mapping[str, wrappers.EnumType]: @@ -182,6 +221,7 @@ class _ProtoBuilder: def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, file_to_generate: bool, + naming: api_naming.Naming, prior_protos: Mapping[str, Proto] = None): self.messages = {} self.enums = {} @@ -206,7 +246,8 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # We put this together by a baton pass of sorts: everything in # this file *starts with* this address, which is appended to # for each item as it is loaded. - address = metadata.Address( + self.address = metadata.Address( + api_naming=naming, module=file_descriptor.name.split('/')[-1][:-len('.proto')], package=tuple(file_descriptor.package.split('.')), ) @@ -220,9 +261,9 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # below is because `repeated DescriptorProto message_type = 4;` in # descriptor.proto itself). self._load_children(file_descriptor.enum_type, self._load_enum, - address=address, path=(5,)) + address=self.address, path=(5,)) self._load_children(file_descriptor.message_type, self._load_message, - address=address, path=(4,)) + address=self.address, path=(4,)) # Edge case: Protocol buffers is not particularly picky about # ordering, and it is possible that a message will have had a field @@ -246,7 +287,7 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # same files. if file_to_generate: self._load_children(file_descriptor.service, self._load_service, - address=address, path=(6,)) + address=self.address, path=(6,)) # TODO(lukesneeringer): oneofs are on path 7. @property @@ -258,6 +299,9 @@ def proto(self) -> Proto: file_to_generate=self.file_to_generate, messages=self.messages, services=self.services, + meta=metadata.Metadata( + address=self.address, + ), ) @cached_property @@ -358,7 +402,7 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], enum=self.all_enums.get(field_pb.type_name.lstrip('.')), message=self.all_messages.get(field_pb.type_name.lstrip('.')), meta=metadata.Metadata( - address=address, + address=address.child(field_pb.name, path + (i,)), documentation=self.docs.get(path + (i,), self.EMPTY), ), ) @@ -406,7 +450,7 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], input=self.all_messages[meth_pb.input_type.lstrip('.')], method_pb=meth_pb, meta=metadata.Metadata( - address=address, + address=address.child(meth_pb.name, path + (i,)), documentation=self.docs.get(path + (i,), self.EMPTY), ), output=output_type, @@ -421,7 +465,7 @@ def _load_message(self, path: Tuple[int], ) -> wrappers.MessageType: """Load message descriptions from DescriptorProtos.""" - address = address.child(message_pb.name) + address = address.child(message_pb.name, path) # Load all nested items. # @@ -476,7 +520,7 @@ def _load_enum(self, path: Tuple[int], ) -> wrappers.EnumType: """Load enum descriptions from EnumDescriptorProtos.""" - address = address.child(enum.name) + address = address.child(enum.name, path) # Put together wrapped objects for the enum values. values = [] @@ -506,7 +550,7 @@ def _load_service(self, path: Tuple[int], ) -> wrappers.Service: """Load comments for a service and its methods.""" - address = address.child(service.name) + address = address.child(service.name, path) # Put together a dictionary of the service's methods. methods = self._get_methods( diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index c2ead49509f6..5ca27a1d0dfc 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -27,17 +27,23 @@ """ import dataclasses -from typing import Tuple +from typing import Sequence, Tuple from google.protobuf import descriptor_pb2 +from gapic.schema import naming + @dataclasses.dataclass(frozen=True) class Address: name: str = '' module: str = '' + module_path: Tuple[int] = dataclasses.field(default_factory=tuple) package: Tuple[str] = dataclasses.field(default_factory=tuple) parent: Tuple[str] = dataclasses.field(default_factory=tuple) + api_naming: naming.Naming = dataclasses.field( + default_factory=naming.Naming, + ) def __str__(self) -> str: """Return the Python identifier for this type. @@ -45,14 +51,9 @@ def __str__(self) -> str: Because we import modules as a whole, rather than individual members from modules, this is consistently `module.Name`. """ - # TODO(#34): Special cases are not special enough to break the rules. - # Allowing this temporarily because it will be fixed by - # refactoring proto generation and/or OperationType. - if self.package == ('google', 'api_core'): - return f'{self.module}.{self.name}' if self.module: - return f'{self.module}_pb2.{self.name}' - return self.name + return '.'.join((self.module,) + self.parent + (self.name,)) + return '.'.join(self.parent + (self.name,)) @property def proto(self) -> str: @@ -64,6 +65,28 @@ def proto_package(self) -> str: """Return the proto package for this type.""" return '.'.join(self.package) + @property + def python_import(self) -> Tuple[Sequence[str], str]: + """Return the Python import for this type.""" + # If there is no naming object, this is a special case for operation. + # FIXME(#34): OperationType does not work well. Fix or expunge it. + if not self.api_naming: + return ('.'.join(self.package), self.module) + + # If this is part of the proto package that we are generating, + # rewrite the package to our structure. + if self.proto_package.startswith(self.api_naming.proto_package): + return ( + '.'.join(self.api_naming.module_namespace + ( + self.api_naming.versioned_module_name, + 'types', + )), + self.module, + ) + + # Return the standard import. + return ('.'.join(self.package), f'{self.module}_pb2') + @property def sphinx(self) -> str: """Return the Sphinx identifier for this type.""" @@ -71,7 +94,7 @@ def sphinx(self) -> str: return f'~.{self}' return self.name - def child(self, child_name: str) -> 'Address': + def child(self, child_name: str, path: Tuple[int]) -> 'Address': """Return a new child of the current Address. Args: @@ -82,8 +105,10 @@ def child(self, child_name: str) -> 'Address': ~.Address: The new address object. """ return type(self)( - name=child_name, + api_naming=self.api_naming, module=self.module, + module_path=self.module_path + path, + name=child_name, package=self.package, parent=self.parent + (self.name,) if self.name else self.parent, ) @@ -102,9 +127,41 @@ def rel(self, address: 'Address') -> str: Returns: str: The appropriate identifier. """ + # Is this referencing a message in the same proto file? if self.package == address.package and self.module == address.module: - return self.name - return str(self) + # It is possible that a field references a message that has + # not yet been declared. If so, send its name enclosed in quotes + # (a string) instead. + if (len(self.module_path) == len(address.module_path) and + self.module_path > address.module_path or + self == address): + return f"'{self.name}'" + + # Edge case: If two (or more) messages are nested under a common + # parent message, and one references another, then return that + # enclosed in quotes. + # + # The reason for this is that each nested class creates a new + # scope in Python, without reference to the parent class being + # created, so there is no way for one nested class to reference + # another at class instantiation time. + if (self.parent and address.parent and + self.parent[0] == address.parent[0]): + return f"'{'.'.join(self.parent)}.{self.name}'" + + # Edge case: Similar to above, if this is a message that is + # referencing a nested message that it contains, we need + # the message to be referenced relative to this message's + # namespace. + if self.parent and self.parent[0] == address.name: + return '.'.join(self.parent[1:] + (self.name,)) + + # This is a message in the same module, already declared. + # Send its name. + return '.'.join(self.parent + (self.name,)) + + # Return the usual `module.Name`. + return f'_.{str(self)}' def resolve(self, selector: str) -> str: """Resolve a potentially-relative protobuf selector. diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index cb291423fcba..6d36e465581d 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -15,7 +15,7 @@ import dataclasses import os import re -from typing import Iterable, Tuple +from typing import Iterable, Sequence, Tuple from google.api import annotations_pb2 from google.protobuf import descriptor_pb2 @@ -33,17 +33,18 @@ class Naming: An instance of this object is made available to every template (as ``api.naming``). """ - name: str - namespace: Tuple[str] - version: str - product_name: str - product_url: str + name: str = '' + namespace: Tuple[str] = dataclasses.field(default_factory=tuple) + version: str = '' + product_name: str = '' + product_url: str = '' + proto_package: str = '' @classmethod def build(cls, *file_descriptors: Iterable[descriptor_pb2.FileDescriptorProto] ) -> 'Naming': - """Return a full APINaming instance based on these file descriptors. + """Return a full Naming instance based on these file descriptors. This is pieced together from the proto package names as well as the ``google.api.metadata`` file annotation. This information may be @@ -86,7 +87,7 @@ def build(cls, # This code may look counter-intuitive (why not use ? to make it # optional), but the engine's greediness routine will decide that # the version is the name, which is not what we want. - version = r'\.(?Pv[0-9]+(p[0-9]+)?((alpha|beta|test)[0-9]*)?)' + version = r'\.(?Pv[0-9]+(p[0-9]+)?((alpha|beta)[0-9]+)?)' if re.search(version, root_package): pattern += version @@ -98,7 +99,7 @@ def build(cls, namespace=tuple([i.capitalize() for i in match['namespace'].split('.') if i]), product_name=match['name'].capitalize(), - product_url='', + proto_package=root_package, version=match.get('version', ''), ) @@ -162,6 +163,11 @@ def module_name(self) -> str: """Return the appropriate Python module name.""" return utils.to_valid_module_name(self.name) + @property + def module_namespace(self) -> Sequence[str]: + """Return the appropriate Python module namespace as a tuple.""" + return tuple(utils.to_valid_module_name(i) for i in self.namespace) + @property def namespace_packages(self) -> Tuple[str]: """Return the appropriate Python namespace packages.""" diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index f80147e58747..ce9b594d31be 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -444,7 +444,7 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: then module), and do not contain duplicates. Returns: - Sequence[str, str]: The package and module pair, intended + Sequence[Tuple[str, str]]: The package and module pair, intended for use in a ``from package import module`` type of statement. """ @@ -452,21 +452,8 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: for method in self.methods.values(): # Add the module containing both the request and response # messages. (These are usually the same, but not necessarily.) - answer.add(( - '.'.join(method.input.ident.package), - method.input.ident.module + '_pb2', - )) - answer.add(( - '.'.join(method.output.ident.package), - # TODO(#34): This is obviously unacceptable and gross and - # generally vomit-inducing. - # - # I am not fixing this right now because *_pb2 - # is about to go away. - method.output.ident.module + '_pb2' - if not getattr(method.output, 'lro_response', None) - else method.output.ident.module, - )) + answer.add(method.input.ident.python_import) + answer.add(method.output.ident.python_import) # If this method has flattening that is honored, add its # modules. @@ -476,23 +463,14 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: for sig in method.signatures.single_dispatch: for field in sig.fields.values(): if not isinstance(field.type, PythonType): - answer.add(( - '.'.join(field.type.ident.package), - field.type.ident.module + '_pb2', - )) + answer.add(field.type.ident.python_import) # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. if getattr(method.output, 'lro_response', None): - answer.add(( - '.'.join(method.output.lro_response.ident.package), - method.output.lro_response.ident.module + '_pb2', - )) + answer.add(method.output.lro_response.ident.python_import) if getattr(method.output, 'lro_metadata', None): - answer.add(( - '.'.join(method.output.lro_metadata.ident.package), - method.output.lro_metadata.ident.module + '_pb2', - )) + answer.add(method.output.lro_metadata.ident.python_import) return tuple(sorted(answer)) @property diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 index b8ec42e4b82d..5146dcf5efd5 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 @@ -1,15 +1,15 @@ {% extends '_base.py.j2' %} {% block content %} -# Import each service from {{ api.naming.version }} into the unversioned namespace. +from ..{{ api.naming.versioned_module_name }} import types {% for service in api.services.values() -%} from ..{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }} import {{ service.name }} {% endfor %} __all__ = ( - '{{ api.naming.version }}', {%- for service in api.services.values() %} '{{ service.name }}', {%- endfor %} + 'types', ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 index f54460dae045..3079ec6c8761 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 @@ -10,6 +10,7 @@ from google.auth import credentials {% for package, python_module in service.python_modules -%} from {{ package }} import {{ python_module }} +{%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} {% endfor %} from ...utils import dispatch from .transports import get_transport_class @@ -167,4 +168,9 @@ class {{ service.name }}: client library. """ return gapic_v1.client_info.ClientInfo() + + +__all__ = ( + '{{ service.name }}', +) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 index 45dcc892c3c6..df25c26fe41f 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 @@ -12,6 +12,7 @@ from google.auth import credentials {% for package, python_module in service.python_modules -%} from {{ package }} import {{ python_module }} +{%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} {% endfor %} class {{ service.name }}Transport(metaclass=abc.ABCMeta): @@ -61,4 +62,9 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): ) -> {{ method.output.ident }}: raise NotImplementedError {%- endfor %} + + +__all__ = ( + '{{ service.name }}Transport', +) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 index 75dc1979f0c1..a69e1454665c 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 @@ -13,6 +13,7 @@ import grpc {% for package, python_module in service.python_modules -%} from {{ package }} import {{ python_module }} +{%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} {% endfor %} from .base import {{ service.name }}Transport @@ -103,9 +104,14 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if '{{ method.name|snake_case }}' not in self._stubs: self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', - request_serializer={{ method.input.ident }}.SerializeToString, - response_deserializer={{ method.output.ident }}.FromString, + request_serializer={{ method.input.ident }}.serialize, + response_deserializer={{ method.output.ident }}.deserialize, ) return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} + + +__all__ = ( + '{{ service.name }}GrpcTransport', +) {%- endblock -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 index 3853bca80033..79b478210b26 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 @@ -10,6 +10,7 @@ from google.auth.transport.requests import AuthorizedSession {% for package, python_module in service.python_modules -%} from {{ package }} import {{ python_module }} +{%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} {% endfor %} from .base import {{ service.name }}Transport @@ -106,4 +107,9 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): response.content, ) {%- endfor %} + + +__all__ = ( + '{{ service.name }}HttpTransport', +) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 new file mode 100644 index 000000000000..bc599962ad23 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 @@ -0,0 +1,33 @@ +{% extends "_base.py.j2" %} + +{% block content %} +class _: + # Note: This strange "import within the `_` class" is to avoid potential + # naming collisions; since we can not control the names chosen for + # fields, we do this instead. + import enum + import proto + + {% for package, python_module in proto.python_modules -%} + from {{ package }} import {{ python_module }} + {%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} + {% endfor %} + + +{% for enum in proto.top.enums.values() -%} + {% include '$namespace/$name_$version/types/_enum.py.j2' %} +{% endfor %} + +{% for message in proto.top.messages.values() -%} + {% include "$namespace/$name_$version/types/_message.py.j2" %} +{% endfor %} + +__all__ = ( + {%- for enum in proto.top.enums.values() %} + '{{ enum.name }}', + {%- endfor -%} + {%- for message in proto.top.messages.values() %} + '{{ message.name }}', + {%- endfor %} +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 new file mode 100644 index 000000000000..c8f416422ea0 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 @@ -0,0 +1,7 @@ +class {{ enum.name }}(_.enum.IntEnum): + """{{ enum.meta.doc|wrap(width=72, indent=4) }} + """ + {% for enum_value in enum.values -%} + {{ enum_value.name }} = {{ enum_value.number }} + {% endfor -%} +{{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 new file mode 100644 index 000000000000..33c7040abb11 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 @@ -0,0 +1,27 @@ +class {{ message.name }}(_.proto.Message): + """{{ message.meta.doc|wrap(width=72, indent=4) }} + """ + {# Iterate over nested enums. -#} + {% for enum in message.nested_enums.values() %}{% filter indent -%} + {% include '$namespace/$name_$version/types/_enum.py.j2' %} + {% endfilter %}{% endfor -%} + + {#- Iterate over nested messages. -#} + {% for submessage in message.nested_messages.values() -%} + {% with message = submessage %}{% filter indent %} + {%- include '$namespace/$name_$version/types/_message.py.j2' %} + {% endfilter %}{% endwith %} + {% endfor -%} + + {# Iterate over fields. -#} + {%- for field in message.fields.values() %} + {{ field.name }} = _.proto.{% if field.repeated %}Repeated{% endif %}Field(_.proto.{{ field.proto_type }}, number={{ field.number }} + {%- if field.enum or field.message %}, + {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, + {% endif %}) + """{{ field.meta.doc|wrap(width=72, offset=4, indent=4) }}""" + {% endfor %} + + class Meta: + package = '{{ message.ident.proto_package }}' +{{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index e1ad7b42c16a..5c74f400398e 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -20,14 +20,14 @@ setuptools.setup( include_package_data=True, install_requires=( 'google-api-core >= 1.3.0, < 2.0.0dev', - 'googleapis-common-protos >= 1.6.0b4', + 'googleapis-common-protos >= 1.6.0b6', 'grpcio >= 1.10.0', + 'proto-plus >= 0.1.0a1', ), classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 6f7a71c6a53e..148f87945316 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -71,7 +71,7 @@ def showcase(session): # Write out a client library for Showcase. session.run('protoc', f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', - f'--python_out={tmp_dir}', f'--pyclient_out={tmp_dir}', + f'--pyclient_out={tmp_dir}', 'google/showcase/v1alpha1/showcase.proto', ) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 6150e8f63af1..7ef1aac0b67b 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -259,6 +259,7 @@ def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, prior_protos = prior_protos or {} return api._ProtoBuilder(file_pb, file_to_generate=file_to_generate, + naming=make_naming(), prior_protos=prior_protos, ).proto diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index ace9258a8da5..d63907434649 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -40,6 +40,7 @@ def test_api_build(): make_message_pb2(name='GetFooRequest', fields=( make_field_pb2(name='imported_message', number=1, type_name='.google.dep.ImportedMessage'), + make_field_pb2(name='primitive', number=2, type=1), )), make_message_pb2(name='GetFooResponse', fields=( make_field_pb2(name='foo', number=1, @@ -71,6 +72,9 @@ def test_api_build(): assert 'google.example.v1.GetFooResponse' in api_schema.messages assert 'google.example.v1.FooService' in api_schema.services assert len(api_schema.enums) == 0 + assert api_schema.protos['foo.proto'].python_modules == ( + ('google.dep', 'dep_pb2'), + ) def test_proto_build(): @@ -78,7 +82,7 @@ def test_proto_build(): name='my_proto_file.proto', package='google.example.v1', ) - proto = api.Proto.build(fdp, file_to_generate=True) + proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) assert isinstance(proto, api.Proto) @@ -98,7 +102,10 @@ def test_proto_builder_constructor(): # Test the load function. with mock.patch.object(api._ProtoBuilder, '_load_children') as lc: - pb = api._ProtoBuilder(fdp, file_to_generate=True) + pb = api._ProtoBuilder(fdp, + file_to_generate=True, + naming=make_naming(), + ) # There should be three total calls to load the different types # of children. @@ -129,7 +136,7 @@ def test_not_target_file(): fdp = make_file_pb2(messages=(message_pb,), services=(service_pb,)) # Actually make the proto object. - proto = api.Proto.build(fdp, file_to_generate=False) + proto = api.Proto.build(fdp, file_to_generate=False, naming=make_naming()) # The proto object should have the message, but no service. assert len(proto.messages) == 1 @@ -153,7 +160,7 @@ def test_messages(): ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True) + proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Get the message. assert len(proto.messages) == 1 @@ -181,7 +188,7 @@ def test_messages_reverse_declaration_order(): ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True) + proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Get the message. assert len(proto.messages) == 2 @@ -205,7 +212,7 @@ def test_messages_recursive(): ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True) + proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Get the message. assert len(proto.messages) == 1 @@ -226,7 +233,7 @@ def test_messages_nested(): ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True) + proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Set short variables for the names. foo = 'google.example.v3.Foo' @@ -279,7 +286,7 @@ def test_services(): ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True) + proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Establish that our data looks correct. assert len(proto.services) == 1 @@ -304,7 +311,7 @@ def test_prior_protos(): empty_proto = api.Proto.build(make_file_pb2( name='empty.proto', package='google.protobuf', messages=(make_message_pb2(name='Empty'),), - ), file_to_generate=False) + ), file_to_generate=False, naming=make_naming()) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( @@ -332,7 +339,7 @@ def test_prior_protos(): # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ 'google/protobuf/empty.proto': empty_proto, - }) + }, naming=make_naming()) # Establish that our data looks correct. assert len(proto.services) == 1 @@ -354,7 +361,7 @@ def test_lro(): lro_proto = api.Proto.build(make_file_pb2( name='operations.proto', package='google.longrunning', messages=(make_message_pb2(name='Operation'),), - ), file_to_generate=False) + ), file_to_generate=False, naming=make_naming()) # Set up a method with LRO annotations. method_pb2 = descriptor_pb2.MethodDescriptorProto( @@ -392,7 +399,7 @@ def test_lro(): # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ 'google/longrunning/operations.proto': lro_proto, - }) + }, naming=make_naming()) # Establish that our data looks correct. assert len(proto.services) == 1 @@ -412,7 +419,7 @@ def test_enums(): L(path=(5, 0, 2, 0), leading_comments='This is the zero value.'), L(path=(5, 0, 2, 1), leading_comments='This is the one value.'), )) - proto = api.Proto.build(fdp, file_to_generate=True) + proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) assert len(proto.enums) == 1 enum = proto.enums['google.enum.v1.Silly'] assert enum.meta.doc == 'This is the Silly enum.' diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 5363af96f5bf..f8d0ee1c388a 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -21,13 +21,13 @@ def test_address_str_no_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') - assert str(addr) == 'baz_pb2.Bacon' + assert str(addr) == 'baz.Bacon' def test_address_str_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon', parent=('spam', 'eggs')) - assert str(addr) == 'baz_pb2.Bacon' + assert str(addr) == 'baz.spam.eggs.Bacon' def test_address_proto(): @@ -38,17 +38,19 @@ def test_address_proto(): def test_address_child_no_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') - child = addr.child('Bacon') + child = addr.child('Bacon', path=(4, 0)) assert child.name == 'Bacon' assert child.parent == () + assert child.module_path == (4, 0) def test_address_child_with_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') - child = addr.child('Bacon') - grandchild = child.child('Ham') + child = addr.child('Bacon', path=(4, 0)) + grandchild = child.child('Ham', path=(2, 0)) assert grandchild.parent == ('Bacon',) assert grandchild.name == 'Ham' + assert grandchild.module_path == (4, 0, 2, 0) def test_address_rel(): @@ -56,12 +58,49 @@ def test_address_rel(): assert addr.rel( metadata.Address(package=('foo', 'bar'), module='baz'), ) == 'Bacon' + + +def test_address_rel_other(): + addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.rel( metadata.Address(package=('foo', 'not_bar'), module='baz'), - ) == 'baz_pb2.Bacon' + ) == '_.baz.Bacon' assert addr.rel( metadata.Address(package=('foo', 'bar'), module='not_baz'), - ) == 'baz_pb2.Bacon' + ) == '_.baz.Bacon' + + +def test_address_rel_later(): + addr = metadata.Address( + module='baz', module_path=(4, 1), + name='Bacon', package=('foo', 'bar'), + ) + other = metadata.Address( + module='baz', module_path=(4, 0), + name='Ham', package=('foo', 'bar'), + ) + assert addr.rel(other) == "'Bacon'" + + +def test_address_rel_nested_sibling(): + addr = metadata.Address( + module='baz', name='Bacon', + package=('foo', 'bar'), parent=('Spam',) + ) + other = metadata.Address( + module='baz', name='Ham', + package=('foo', 'bar'), parent=('Spam',) + ) + assert addr.rel(other) == "'Spam.Bacon'" + + +def test_address_rel_nested_parent(): + parent = metadata.Address(module='baz', name='Ham', package=('foo', 'bar')) + child = metadata.Address( + module='baz', name='Bacon', + package=('foo', 'bar'), parent=('Ham',) + ) + assert child.rel(parent) == 'Bacon' def test_address_resolve(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index 96fec051d9a9..60339beaac8b 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -36,8 +36,8 @@ def test_enum_value_properties(): def test_enum_ident(): message = make_enum('Baz', package='foo.v1', module='bar') - assert str(message.ident) == 'bar_pb2.Baz' - assert message.ident.sphinx == '~.bar_pb2.Baz' + assert str(message.ident) == 'bar.Baz' + assert message.ident.sphinx == '~.bar.Baz' def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 1585304bc857..a27ae5caf15a 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -41,8 +41,8 @@ def test_message_docstring(): def test_message_ident(): message = make_message('Baz', package='foo.v1', module='bar') - assert str(message.ident) == 'bar_pb2.Baz' - assert message.ident.sphinx == '~.bar_pb2.Baz' + assert str(message.ident) == 'bar.Baz' + assert message.ident.sphinx == '~.bar.Baz' def test_get_field(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py index bdb66bf5dc21..7f3a79d1a8b4 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py @@ -44,4 +44,4 @@ def test_operation_meta(): ) operation = wrappers.OperationType(lro_response=lro_response) assert 'representing a long-running operation' in operation.meta.doc - assert ':class:`~.foo_pb2.LroResponse`' in operation.meta.doc + assert ':class:`~.foo.LroResponse`' in operation.meta.doc diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 62d7f597cdf5..b8cf693bcac3 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -57,20 +57,20 @@ def test_service_python_modules(): get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), )) assert service.python_modules == ( - ('a.b.v1', 'c_pb2'), - ('foo', 'bacon_pb2'), - ('foo', 'bar_pb2'), - ('foo', 'baz_pb2'), - ('x.y.v1', 'z_pb2'), + ('a.b.v1', 'c'), + ('foo', 'bacon'), + ('foo', 'bar'), + ('foo', 'baz'), + ('x.y.v1', 'z'), ) def test_service_python_modules_lro(): service = make_service_with_method_options() assert service.python_modules == ( - ('foo', 'bar_pb2'), - ('foo', 'baz_pb2'), - ('foo', 'qux_pb2'), + ('foo', 'bar'), + ('foo', 'baz'), + ('foo', 'qux'), ('google.api_core', 'operation'), ) @@ -89,10 +89,10 @@ def test_service_python_modules_signature(): ) # type=5 is int, so nothing is added. assert service.python_modules == ( - ('a.b.c', 'v2_pb2'), - ('foo', 'bar_pb2'), - ('foo', 'baz_pb2'), - ('foo', 'qux_pb2'), + ('a.b.c', 'v2'), + ('foo', 'bar'), + ('foo', 'baz'), + ('foo', 'qux'), ('google.api_core', 'operation'), ) From fa17e4d1cb60d7232eda4d7fa4111ebc2b316f1a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 10:53:39 -0700 Subject: [PATCH 0037/1339] Add sorting of imports. (#44) --- .../gapic/templates/$namespace/$name/__init__.py.j2 | 4 ++-- .../gapic/templates/$namespace/$name_$version/__init__.py.j2 | 4 ++-- .../templates/$namespace/$name_$version/types/$proto.py.j2 | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 index 5146dcf5efd5..e398fae49bbf 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 @@ -2,12 +2,12 @@ {% block content %} from ..{{ api.naming.versioned_module_name }} import types -{% for service in api.services.values() -%} +{% for service in api.services.values()|sort(attribute='name') -%} from ..{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }} import {{ service.name }} {% endfor %} __all__ = ( - {%- for service in api.services.values() %} + {%- for service in api.services.values()|sort(attribute='name') %} '{{ service.name }}', {%- endfor %} 'types', diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 index 23a768754161..230645e45f33 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 @@ -1,11 +1,11 @@ {% extends '_base.py.j2' %} {% block content %} -{% for service in api.services.values() -%} +{% for service in api.services.values()|sort(attribute='name') -%} from .services.{{ service.name|snake_case }} import {{ service.name }} {% endfor %} __all__ = ( - {%- for service in api.services.values() %} + {%- for service in api.services.values()|sort(attribute='name') %} '{{ service.name }}', {%- endfor %} ) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 index bc599962ad23..6fdce02e5166 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 @@ -8,7 +8,7 @@ class _: import enum import proto - {% for package, python_module in proto.python_modules -%} + {% for package, python_module in proto.python_modules|sort -%} from {{ package }} import {{ python_module }} {%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} {% endfor %} From f45bd6f4589d3330f2ea5697819a110e0fd9271a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 4 Oct 2018 13:01:28 -0700 Subject: [PATCH 0038/1339] Convert documentation to ReStructured Text (RST). (#45) --- packages/gapic-generator/.circleci/config.yml | 14 ++++- packages/gapic-generator/docs/templates.rst | 3 + .../gapic/generator/generator.py | 1 + .../services/$service/client.py.j2 | 7 +-- .../services/$service/transports/grpc.py.j2 | 6 +- .../services/$service/transports/http.py.j2 | 8 +-- .../$name_$version/types/_enum.py.j2 | 3 +- .../$name_$version/types/_message.py.j2 | 5 +- .../gapic-generator/gapic/utils/__init__.py | 2 + packages/gapic-generator/gapic/utils/lines.py | 35 +++++++++--- packages/gapic-generator/gapic/utils/rst.py | 56 +++++++++++++++++++ packages/gapic-generator/setup.py | 3 +- .../tests/unit/utils/test_lines.py | 4 ++ .../tests/unit/utils/test_rst.py | 41 ++++++++++++++ 14 files changed, 161 insertions(+), 27 deletions(-) create mode 100644 packages/gapic-generator/gapic/utils/rst.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_rst.py diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index d12e5dc5fd1c..2e92d0e3f89a 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -17,6 +17,11 @@ jobs: - image: 'python:3.6' steps: - checkout + - run: + name: Install pandoc + command: | + apt-get update + apt-get install -y pandoc - run: name: Install nox and codecov. command: | @@ -34,6 +39,11 @@ jobs: - image: 'python:3.7' steps: - checkout + - run: + name: Install pandoc + command: | + apt-get update + apt-get install -y pandoc - run: name: Install nox and codecov. command: | @@ -55,10 +65,10 @@ jobs: name: Install nox. command: pip install --pre nox-automation - run: - name: Install unzip. + name: Install pandoc and unzip. command: | apt-get update - apt-get install unzip + apt-get install -y pandoc unzip - run: name: Install protoc 3.6.1. command: | diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst index d95b0d8ce976..ce8416ec4bf2 100644 --- a/packages/gapic-generator/docs/templates.rst +++ b/packages/gapic-generator/docs/templates.rst @@ -71,6 +71,9 @@ writing properly formatted templates. These are: +* ``rst`` (:meth:`~.utils.rst.rst`): Converts a string to ReStructured Text. + If the string appears not to be formatted (contains no obvious Markdown + syntax characters), then this method forwards to ``wrap``. * ``snake_case`` (:meth:`~.utils.case.to_snake_case`): Converts a string in any sane case system to snake case. * ``wrap`` (:meth:`~.utils.lines.wrap`): Wraps arbitrary text. Keyword diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index c504f07c0f0d..d9a9ff0dd995 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -60,6 +60,7 @@ def __init__(self, api_schema: api.API, *, ) # Add filters which templates require. + self._env.filters['rst'] = utils.rst self._env.filters['snake_case'] = utils.to_snake_case self._env.filters['wrap'] = utils.wrap diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 index 3079ec6c8761..13fbd5a3ce11 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 @@ -18,8 +18,7 @@ from .transports import {{ service.name }}Transport class {{ service.name }}: - """{{ service.meta.doc|wrap(width=72, offset=7, indent=4) }} - """ + """{{ service.meta.doc|rst(width=72, indent=4) }}""" def __init__(self, *, credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = None, @@ -58,7 +57,7 @@ class {{ service.name }}: timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: - """{{ method.meta.doc|wrap(width=72, offset=11, indent=8) }} + """{{ method.meta.doc|rst(width=72, indent=8) }} Args: request ({{ method.input.ident.sphinx }}): @@ -129,7 +128,7 @@ class {{ service.name }}: timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: - """{{ method.meta.doc|wrap(width=72, offset=11, indent=8) }} + """{{ method.meta.doc|rst(width=72, indent=8) }} Args: {%- for field in signature.fields.values() %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 index a69e1454665c..8d61b571b965 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 @@ -21,7 +21,7 @@ from .base import {{ service.name }}Transport class {{ service.name }}GrpcTransport({{ service.name }}Transport): """gRPC backend transport for {{ service.name }}. - {{ service.meta.doc|wrap(width=72, indent=4) }} + {{ service.meta.doc|rst(width=72, indent=4) }} This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -90,12 +90,12 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): width=70, offset=40, indent=8) }} {{- ' ' -}} method over gRPC. - {{ method.meta.doc|wrap(width=72, indent=8) }} + {{ method.meta.doc|rst(width=72, indent=8) }} Returns: Callable[[~.{{ method.input.name }}], ~.{{ method.output.name }}]: - {{ method.output.meta.doc|wrap(width=72, indent=16) }} + {{ method.output.meta.doc|rst(width=72, indent=16) }} """ # Generate a "stub function" on-the-fly which will actually make # the request. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 index 79b478210b26..cab0107a355a 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 @@ -18,7 +18,7 @@ from .base import {{ service.name }}Transport class {{ service.name }}HttpTransport({{ service.name }}Transport): """HTTP backend transport for {{ service.name }}. - {{ service.meta.doc|wrap(width=72, indent=4) }} + {{ service.meta.doc|rst(width=72, indent=4) }} This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -76,14 +76,14 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): Args: request (~.{{ method.input.ident }}): - The request object. {{- ' ' -}} - {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + The request object. + {{ method.input.meta.doc|rst(width=72, indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent alont with the request as metadata. Returns: ~.{{ method.output.ident }}: - {{ method.output.meta.doc|wrap(width=72, indent=16) }} + {{ method.output.meta.doc|rst(width=72, indent=16) }} """ # Serialize the input. data = request.SerializeToString() diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 index c8f416422ea0..6755ecfe3125 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 @@ -1,6 +1,5 @@ class {{ enum.name }}(_.enum.IntEnum): - """{{ enum.meta.doc|wrap(width=72, indent=4) }} - """ + """{{ enum.meta.doc|rst(width=72, indent=4) }}""" {% for enum_value in enum.values -%} {{ enum_value.name }} = {{ enum_value.number }} {% endfor -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 index 33c7040abb11..fb8e3a875a77 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 @@ -1,6 +1,5 @@ class {{ message.name }}(_.proto.Message): - """{{ message.meta.doc|wrap(width=72, indent=4) }} - """ + """{{ message.meta.doc|rst(indent=4) }}""" {# Iterate over nested enums. -#} {% for enum in message.nested_enums.values() %}{% filter indent -%} {% include '$namespace/$name_$version/types/_enum.py.j2' %} @@ -19,7 +18,7 @@ class {{ message.name }}(_.proto.Message): {%- if field.enum or field.message %}, {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, {% endif %}) - """{{ field.meta.doc|wrap(width=72, offset=4, indent=4) }}""" + """{{ field.meta.doc|rst(indent=4) }}""" {% endfor %} class Meta: diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index a64f87e2f73d..cd575a1f78f3 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -18,11 +18,13 @@ from gapic.utils.filename import to_valid_module_name from gapic.utils.lines import wrap from gapic.utils.placeholder import Placeholder +from gapic.utils.rst import rst __all__ = ( 'cached_property', 'Placeholder', + 'rst', 'to_snake_case', 'to_valid_filename', 'to_valid_module_name', diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 49fd584ee0a4..8ea2a581af4b 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -52,23 +52,42 @@ def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: text = text.replace('\n ', '\n') # Break off the first line of the string to address non-zero offsets. - first = '' - if offset > 0: - initial = textwrap.wrap(text, + first = text.split('\n')[0] + '\n' + if len(first) > width - offset: + initial = textwrap.wrap(first, break_long_words=False, width=width - offset, ) + # Strip the first \n from the text so it is not misidentified as an + # intentionally short line below. + text = text.replace('\n', ' ', 1) + + # Save the new `first` line. first = f'{initial[0]}\n' - text = ' '.join(initial[1:]) + text = text[len(first):].strip() + if not text: + return first.strip() + + # Tokenize the rest of the text to try to preserve line breaks + # that semantically matter. + tokens = [] + token = '' + for line in text.split('\n'): + token += line + '\n' + if len(line) < width * 0.75: + tokens.append(token) + token = '' + if token: + tokens.append(token) # Wrap the remainder of the string at the desired width. return '{first}{text}'.format( first=first, - text=textwrap.fill( + text='\n'.join([textwrap.fill( break_long_words=False, - initial_indent=' ' * indent if first else '', + initial_indent=' ' * indent, subsequent_indent=' ' * indent, - text=text, + text=token, width=width, - ), + ) for token in tokens]), ).rstrip('\n') diff --git a/packages/gapic-generator/gapic/utils/rst.py b/packages/gapic-generator/gapic/utils/rst.py new file mode 100644 index 000000000000..3ce965092b6d --- /dev/null +++ b/packages/gapic-generator/gapic/utils/rst.py @@ -0,0 +1,56 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +import pypandoc + +from gapic.utils.lines import wrap + + +def rst(text, width=72, indent=0, source_format='commonmark'): + """Convert the given text to ReStructured Text. + + Args: + text (str): The text to convert. + width (int): The number of columns. + source_format (str): The source format. This is ``commonmark`` by + default, which is what is used by convention in protocol buffers. + + Returns: + str: The same text, in RST format. + """ + # Sanity check: If the text block does not appear to have any formatting, + # do not convert it. + # (This makes code generation significantly faster; calling out to pandoc + # is by far the most expensive thing we do.) + if not re.search(r'[|*`_[\]]', text): + answer = wrap(text, width=width, indent=indent, offset=indent + 3) + else: + # Convert from CommonMark to ReStructured Text. + answer = pypandoc.convert_text(text, 'rst', + format=source_format, + extra_args=['--columns=%d' % width], + ).strip().replace('\n', f"\n{' ' * indent}") + + # Add a newline to the end of the document if any line breaks are + # already present. + # + # This causes the closing """ to be on the subsequent line only when + # appropriate. + if '\n' in answer: + answer += '\n' + ' ' * indent + + # Done; return the answer. + return answer diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a05c284d363e..a597a074f210 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -43,9 +43,10 @@ install_requires=( 'click >= 6.7', 'googleapis-common-protos >= 1.6.0b6', - 'grpcio >= 1.9.1', + 'grpcio >= 1.10.0', 'jinja2 >= 2.10', 'protobuf >= 3.5.1', + 'pypandoc >= 1.4', ), extras_require={ ':python_version<"3.7"': ('dataclasses >= 0.4',), diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 24b732f31091..85e96b5d4204 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -46,3 +46,7 @@ def test_wrap_initial_offset(): def test_wrap_indent_short(): assert lines.wrap('foo bar', width=30, indent=10) == 'foo bar' + + +def test_wrap_short_line_preserved(): + assert lines.wrap('foo\nbar\nbaz', width=80) == 'foo\nbar\nbaz' diff --git a/packages/gapic-generator/tests/unit/utils/test_rst.py b/packages/gapic-generator/tests/unit/utils/test_rst.py new file mode 100644 index 000000000000..e7116d5e0f7c --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_rst.py @@ -0,0 +1,41 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pypandoc + +from gapic import utils + + +def test_rst_unformatted(): + with mock.patch.object(pypandoc, 'convert_text') as convert_text: + assert utils.rst('The hail in Wales') == 'The hail in Wales' + assert convert_text.call_count == 0 + + +def test_rst_formatted(): + with mock.patch.object(pypandoc, 'convert_text') as convert_text: + convert_text.side_effect = lambda *a, **kw: a[0].replace('`', '``') + assert utils.rst('The hail in `Wales`') == 'The hail in ``Wales``' + assert convert_text.call_count == 1 + assert convert_text.mock_calls[0][1][1] == 'rst' + assert convert_text.mock_calls[0][2]['format'] == 'commonmark' + + +def test_rst_add_newline(): + with mock.patch.object(pypandoc, 'convert_text') as convert_text: + s = 'The hail in Wales\nfalls mainly on the snails.' + assert utils.rst(s) == s + '\n' + assert convert_text.call_count == 0 From a82b52168d745535e696009c8cf9c5d2963be9f3 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 9 Oct 2018 18:28:03 -0700 Subject: [PATCH 0039/1339] Remove dependency on gRPC. (#47) --- packages/gapic-generator/setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a597a074f210..efc6410f207b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -43,7 +43,6 @@ install_requires=( 'click >= 6.7', 'googleapis-common-protos >= 1.6.0b6', - 'grpcio >= 1.10.0', 'jinja2 >= 2.10', 'protobuf >= 3.5.1', 'pypandoc >= 1.4', From 467fe26e11f17c477d5b80a20def1242af2be7c5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 9 Oct 2018 19:52:13 -0700 Subject: [PATCH 0040/1339] Update docs. (#50) This commit updates the documentation to fix a few things that had drifted out of date. --- .../docs/api-configuration.rst | 18 +++------- .../gapic-generator/docs/getting-started.rst | 33 ++++++++----------- packages/gapic-generator/docs/installing.rst | 14 ++++++++ packages/gapic-generator/docs/templates.rst | 26 ++++++++++----- 4 files changed, 48 insertions(+), 43 deletions(-) diff --git a/packages/gapic-generator/docs/api-configuration.rst b/packages/gapic-generator/docs/api-configuration.rst index 44f9e7dec62f..dd8615b1eaeb 100644 --- a/packages/gapic-generator/docs/api-configuration.rst +++ b/packages/gapic-generator/docs/api-configuration.rst @@ -160,28 +160,18 @@ so that client libraries are able to deserialize the message. .. code-block:: protobuf - import "google/longrunning/operations.proto"; + import "google/api/annotations.proto"; package acme.anvils.v1; service AnvilService { rpc DeliverAnvil(DeliverAnvilRequest) returns (google.longrunning.Operation) { - option (google.longrunning.operation_types) = { - response: "acme.anvils.v1.DeliverAnvilResponse" - metadata: "acme.anvils.v1.DeliverAnvilMetadata" + option (google.api.operation) = { + response_type: "acme.anvils.v1.DeliverAnvilResponse" + metadata_type: "acme.anvils.v1.DeliverAnvilMetadata" }; } } -The ``response`` field here is mandatory; the ``metadata`` field is optional, -and ``google.longrunning.OperationMetadata`` is assumed if it is not set. - .. _google/longrunning/operations.proto: https://github.com/googleapis/api-common-protos/blob/input-contract/google/longrunning/operations.proto - - -Future Work -~~~~~~~~~~~ - -Support for other annotated behavior, such as overloads, samples, and -header values is a work in progress. diff --git a/packages/gapic-generator/docs/getting-started.rst b/packages/gapic-generator/docs/getting-started.rst index be788548ec70..8091ac0b58e9 100644 --- a/packages/gapic-generator/docs/getting-started.rst +++ b/packages/gapic-generator/docs/getting-started.rst @@ -95,12 +95,8 @@ Here is a test script: .. code-block:: python - # These are the compiled protocol buffer types generated by - # `protoc --python_out`. - from google.cloud.vision.v1 import image_annotator_pb2 as types - # This is the client library generated by this plugin. - from google.cloud.vision_v1 import image_annotator + from google.cloud import vision # Instantiate the client. # @@ -109,23 +105,20 @@ Here is a test script: # # If you wish, you can send `transport='grpc'` or `transport='http'` # to change which underlying transport layer is being used. - ia = image_annotator.ImageAnnotator() - - # Piece together the request object. - request = types.BatchAnnotateImagesRequest(requests=[ - types.AnnotateImageRequest( - features=[types.Feature( - type=types.Feature.Type.Value('LABEL_DETECTION'), - )], - image=types.Image(source=types.ImageSource( - image_uri='https://s3.amazonaws.com/cdn0.michiganbulb.com' - '/images/350/66623.jpg', - )), - ), - ]) + ia = vision.ImageAnnotator() # Send the request to the server and get the response. - response = ia.batch_annotate_images(request) + response = ia.batch_annotate_images({ + 'requests': [{ + 'features': [{ + 'type': vision.types.image_annotator.Feature.Type.LABEL_DETECTION + }], + 'image': {'source': { + 'image_uri': 'https://s3.amazonaws.com/cdn0.michiganbulb.com' + '/images/350/66623.jpg', + }}, + }], + }) print(response) diff --git a/packages/gapic-generator/docs/installing.rst b/packages/gapic-generator/docs/installing.rst index 09855186fde9..69786d2cf091 100644 --- a/packages/gapic-generator/docs/installing.rst +++ b/packages/gapic-generator/docs/installing.rst @@ -33,6 +33,20 @@ To ensure it is installed propertly: libprotoc 3.6.0 +pandoc +~~~~~~ + +This generator relies on `pandoc`_ to convert from Markdown (the *lingua +franca* for documentation in protocol buffers) into ReStructured Text (the +*lingua franca* for documentation in Python). + +Install this using an appropriate mechanism for your operating system. +Multiple installation paths are documented on the `pandoc installation page`_. + +.. _pandoc: https://pandoc.org/ +.. _pandoc installation page: https://pandoc.org/installing.html + + API Generator for Python ~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst index ce8416ec4bf2..acd13d35ac28 100644 --- a/packages/gapic-generator/docs/templates.rst +++ b/packages/gapic-generator/docs/templates.rst @@ -51,17 +51,25 @@ the following string replacements applied: Context (Variables) ~~~~~~~~~~~~~~~~~~~ -Every template receives **one** variable, spelled ``api``. It is the +Every template receives one variable, spelled ``api``. It is the :class:`~.schema.api.API` object that was pieced together in the parsing step. -APIs can (and often do) have more than one service. Therefore, templates -with ``$service/`` in their name are a special case. These files are -rendered *once per service*, with the ``$service`` directory name changed to -the name of the service itself (in snake case, because this is Python). -Additionally, these templates receive two variables: the ``api`` variable -discussed above, as well as a variable spelled ``service``, which corresponds -to the :class:`~.schema.wrappers.Service` currently being iterated over. - +Most APIs also receive one additional variable depending on what piece of the +API structure is being iterated over: + + * **Services.** APIs can (and often do) have more than one service. + Therefore, templates with ``$service`` in their name are + rendered *once per service*, with the ``$service`` string changed to + the name of the service itself (in snake case, because this is Python). + These templates receive a ``service`` variable (an instance of + :class:`~.schema.wrappers.Service`) corresponding to the service currently + being iterated over. + * **Protos.** Similarly, APIs can (and often do) have more than one proto + file containing messages. Therefore, templates with ``$proto`` in their + name are rendered *once per proto*, with the ``$proto``string changed to + the name of the proto file. These templates receive a ``proto`` variable + (an instance of :class:`~.schema.api.Proto`) corresponding to the proto + currently being iterated over. Filters ~~~~~~~ From 07f7b53554ffcb7dfa1444e6cbd61858e17d79a5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 10 Oct 2018 12:50:22 -0700 Subject: [PATCH 0041/1339] Naming collision resolution. (#51) This adds a system for resolving naming conflicts between imported modules and field or method names. --- packages/gapic-generator/.flake8 | 2 +- packages/gapic-generator/gapic/schema/api.py | 61 +++++++--- packages/gapic-generator/gapic/schema/imp.py | 34 ++++++ .../gapic-generator/gapic/schema/metadata.py | 83 +++++++++++--- .../gapic-generator/gapic/schema/wrappers.py | 105 +++++++++++++----- .../services/$service/client.py.j2 | 31 +++--- .../services/$service/transports/base.py.j2 | 9 +- .../services/$service/transports/grpc.py.j2 | 13 +-- .../services/$service/transports/http.py.j2 | 15 ++- .../$name_$version/types/$proto.py.j2 | 29 ++--- .../$name_$version/types/_enum.py.j2 | 2 +- .../$name_$version/types/_message.py.j2 | 6 +- .../tests/unit/schema/test_api.py | 78 ++++++++++++- .../tests/unit/schema/test_imp.py | 43 +++++++ .../tests/unit/schema/test_metadata.py | 22 +++- .../unit/schema/wrappers/test_message.py | 48 +++++++- .../unit/schema/wrappers/test_service.py | 47 +++++--- 17 files changed, 497 insertions(+), 131 deletions(-) create mode 100644 packages/gapic-generator/gapic/schema/imp.py create mode 100644 packages/gapic-generator/tests/unit/schema/test_imp.py diff --git a/packages/gapic-generator/.flake8 b/packages/gapic-generator/.flake8 index 5ae57fde1d72..fe63398b556d 100644 --- a/packages/gapic-generator/.flake8 +++ b/packages/gapic-generator/.flake8 @@ -5,4 +5,4 @@ ignore = E123, E124 # Line over-indented for visual indent. # This works poorly with type annotations in method declarations. - E128 + E128, E131 diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 1c0139bc138b..b65d54cd9a2e 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -20,7 +20,8 @@ import collections import dataclasses import sys -from typing import Callable, List, Mapping, Sequence, Tuple +from itertools import chain +from typing import Callable, List, Mapping, Sequence, Set, Tuple from google.api import annotations_pb2 from google.protobuf import descriptor_pb2 @@ -81,6 +82,32 @@ def module_name(self) -> str: """ return to_snake_case(self.name.split('/')[-1][:-len('.proto')]) + @cached_property + def names(self) -> Set[str]: + """Return a set of names used by this proto. + + This is used for detecting naming collisions in the module names + used for imports. + """ + # Add names of all enums, messages, and fields. + answer = {e.name for e in self.enums.values()} + for message in self.messages.values(): + answer = answer.union({f.name for f in message.fields.values()}) + answer.add(message.name) + + # Identify any import module names where the same module name is used + # from distinct packages. + modules = {} + for t in chain(*[m.field_types for m in self.messages.values()]): + modules.setdefault(t.ident.module, set()) + modules[t.ident.module].add(t.ident.package) + for module_name, packages in modules.items(): + if len(packages) > 1: + answer.add(module_name) + + # Return the set of collision names. + return frozenset(answer) + @cached_property def python_modules(self) -> Sequence[Tuple[str, str]]: """Return a sequence of Python modules, for import. @@ -94,19 +121,14 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: of statement. """ answer = set() - for message in self.messages.values(): - for field in message.fields.values(): - # We only need to add imports for fields that - # are messages or enums. - if not field.message and not field.enum: - continue - - # Add the appropriate Python import for the field. - answer.add(field.type.ident.python_import) - - # We may have gotten an import for this proto. - # Obviously no Python module may import itself; get rid of that. - answer = answer.difference({self.meta.address.python_import}) + self_reference = self.meta.address.context(self).python_import + for t in chain(*[m.field_types for m in self.messages.values()]): + # Add the appropriate Python import for the field. + # Sanity check: We do make sure that we are not trying to have + # a module import itself. + imp = t.ident.context(self).python_import + if imp != self_reference: + answer.add(imp) # Done; return the sorted sequence. return tuple(sorted(list(answer))) @@ -130,6 +152,17 @@ def top(self) -> 'Proto': meta=self.meta, ) + def disambiguate(self, string: str) -> str: + """Return a disambiguated string for the context of this proto. + + This is used for avoiding naming collisions. Generally, this method + returns the same string, but it returns a modified version if + it will cause a naming collision with messages or fields in this proto. + """ + if string in self.names: + return self.disambiguate(f'_{string}') + return string + @dataclasses.dataclass(frozen=True) class API: diff --git a/packages/gapic-generator/gapic/schema/imp.py b/packages/gapic-generator/gapic/schema/imp.py new file mode 100644 index 000000000000..562cdfebed99 --- /dev/null +++ b/packages/gapic-generator/gapic/schema/imp.py @@ -0,0 +1,34 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import dataclasses +from typing import Tuple + + +@dataclasses.dataclass(frozen=True, order=True) +class Import: + package: Tuple[str] + module: str + alias: str = '' + + def __eq__(self, other) -> bool: + return self.package == other.package and self.module == other.module + + def __str__(self) -> str: + answer = f'import {self.module}' + if self.package: + answer = f"from {'.'.join(self.package)} {answer}" + if self.alias: + answer += f' as {self.alias}' + return answer diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 5ca27a1d0dfc..2fb252190080 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -27,11 +27,13 @@ """ import dataclasses -from typing import Sequence, Tuple +from typing import Tuple, Set from google.protobuf import descriptor_pb2 +from gapic.schema import imp from gapic.schema import naming +from gapic.utils import cached_property @dataclasses.dataclass(frozen=True) @@ -44,6 +46,11 @@ class Address: api_naming: naming.Naming = dataclasses.field( default_factory=naming.Naming, ) + collisions: Set[str] = dataclasses.field(default_factory=frozenset) + + def __eq__(self, other) -> bool: + return all([getattr(self, i) == getattr(other, i) for i + in ('name', 'module', 'module_path', 'package', 'parent')]) def __str__(self) -> str: """Return the Python identifier for this type. @@ -52,9 +59,36 @@ def __str__(self) -> str: members from modules, this is consistently `module.Name`. """ if self.module: - return '.'.join((self.module,) + self.parent + (self.name,)) + # If collisions are registered and conflict with our module, + # use the module alias instead. + module_name = self.module + if self.module_alias: + module_name = self.module_alias + + # Return the dot-separated Python identifier. + return '.'.join((module_name,) + self.parent + (self.name,)) + + # Return the Python identifier for this module-less identifier. return '.'.join(self.parent + (self.name,)) + @property + def module_alias(self) -> str: + """Return an appropriate module alias if necessary. + + If the module name is not a collision, return empty string. + + This method provides a mechanism for resolving naming conflicts, + while still providing names that are fundamentally readable + to users (albeit looking auto-generated). + """ + if self.module in self.collisions: + return '_'.join(( + ''.join([i[0] for i in self.package + if i != self.api_naming.version]), + self.module, + )) + return '' + @property def proto(self) -> str: """Return the proto selector for this type.""" @@ -65,27 +99,36 @@ def proto_package(self) -> str: """Return the proto package for this type.""" return '.'.join(self.package) - @property - def python_import(self) -> Tuple[Sequence[str], str]: + @cached_property + def python_import(self) -> imp.Import: """Return the Python import for this type.""" # If there is no naming object, this is a special case for operation. # FIXME(#34): OperationType does not work well. Fix or expunge it. if not self.api_naming: - return ('.'.join(self.package), self.module) + return imp.Import( + package=self.package, + module=self.module, + alias=self.module_alias, + ) # If this is part of the proto package that we are generating, # rewrite the package to our structure. if self.proto_package.startswith(self.api_naming.proto_package): - return ( - '.'.join(self.api_naming.module_namespace + ( + return imp.Import( + package=self.api_naming.module_namespace + ( self.api_naming.versioned_module_name, 'types', - )), - self.module, + ), + module=self.module, + alias=self.module_alias, ) # Return the standard import. - return ('.'.join(self.package), f'{self.module}_pb2') + return imp.Import( + package=self.package, + module=f'{self.module}_pb2', + alias=self.module_alias if self.module_alias else self.module, + ) @property def sphinx(self) -> str: @@ -104,15 +147,21 @@ def child(self, child_name: str, path: Tuple[int]) -> 'Address': Returns: ~.Address: The new address object. """ - return type(self)( - api_naming=self.api_naming, - module=self.module, + return dataclasses.replace(self, module_path=self.module_path + path, name=child_name, - package=self.package, parent=self.parent + (self.name,) if self.name else self.parent, ) + def context(self, context) -> 'Address': + """Return a derivative of this address with the provided context. + + This method is used to address naming collisions. The returned + ``Address`` object aliases module names to avoid naming collisions in + the file being written. + """ + return dataclasses.replace(self, collisions=frozenset(context.names)) + def rel(self, address: 'Address') -> str: """Return an identifier for this type, relative to the given address. @@ -161,7 +210,7 @@ def rel(self, address: 'Address') -> str: return '.'.join(self.parent + (self.name,)) # Return the usual `module.Name`. - return f'_.{str(self)}' + return str(self) def resolve(self, selector: str) -> str: """Resolve a potentially-relative protobuf selector. @@ -226,3 +275,7 @@ def sphinx(self) -> str: if self.repeated: return f'Sequence[{self.ident.sphinx}]' return self.ident.sphinx + + def context(self, arg) -> 'FieldIdentifier': + """Return self. Provided for compatibility with Address.""" + return self diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index ce9b594d31be..11e90d34d8e9 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -30,13 +30,15 @@ import collections import dataclasses import re -from typing import Iterable, List, Mapping, Sequence, Tuple, Union +from itertools import chain +from typing import Iterable, List, Mapping, Sequence, Set, Tuple, Union from google.api import annotations_pb2 from google.api import signature_pb2 from google.protobuf import descriptor_pb2 from gapic import utils +from gapic.schema import imp from gapic.schema import metadata @@ -141,6 +143,15 @@ class MessageType: def __getattr__(self, name): return getattr(self.message_pb, name) + @utils.cached_property + def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: + """Return all composite fields used in this proto's messages.""" + answer = [] + for field in self.fields.values(): + if field.message or field.enum: + answer.append(field.type) + return tuple(answer) + def get_field(self, *field_path: Sequence[str]) -> Field: """Return a field arbitrarily deep in this message's structure. @@ -317,6 +328,30 @@ def grpc_stub_type(self) -> str: server='stream' if self.server_streaming else 'unary', ) + @utils.cached_property + def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: + """Return types referenced by this method.""" + # Begin with the input (request) and output (response) messages. + answer = [self.input, self.output] + + # If this method has flattening that is honored, add its + # composite types. + # + # This entails adding the module for any field on the signature + # unless the field is a primitive. + for sig in self.signatures.single_dispatch: + answer += sig.composite_types + + # If this method has LRO, it is possible (albeit unlikely) that + # the LRO messages reside in a different module. + if getattr(self.output, 'lro_response', None): + answer.append(self.output.lro_response) + if getattr(self.output, 'lro_metadata', None): + answer.append(self.output.lro_metadata) + + # Done; return the answer. + return tuple(answer) + @utils.cached_property def signatures(self) -> Tuple[signature_pb2.MethodSignature]: """Return the signature defined for this method.""" @@ -359,6 +394,15 @@ def dispatch_field(self) -> Union[MessageType, EnumType, PythonType]: This is what is used for `functools.singledispatch`.""" return next(iter(self.fields.values())) + @utils.cached_property + def composite_types(self) -> Sequence[Union[MessageType, EnumType]]: + """Return all composite types used in this signature.""" + answer = [] + for field in self.fields.values(): + if field.message or field.enum: + answer.append(field.type) + return answer + @dataclasses.dataclass(frozen=True) class MethodSignatures: @@ -436,42 +480,47 @@ def module_name(self) -> str: """ return utils.to_snake_case(self.name) - @property - def python_modules(self) -> Sequence[Tuple[str, str]]: + @utils.cached_property + def names(self) -> Set[str]: + """Return a set of names used in this service. + + This is used for detecting naming collisions in the module names + used for imports. + """ + # Put together a set of the service and method names. + answer = {self.name}.union( + {utils.to_snake_case(i.name) for i in self.methods.values()} + ) + + # Identify any import module names where the same module name is used + # from distinct packages. + modules = {} + for t in chain(*[m.ref_types for m in self.methods.values()]): + modules.setdefault(t.ident.module, set()) + modules[t.ident.module].add(t.ident.package) + for module_name, packages in modules.items(): + if len(packages) > 1: + answer.add(module_name) + + # Done; return the answer. + return frozenset(answer) + + @utils.cached_property + def python_modules(self) -> Sequence[imp.Import]: """Return a sequence of Python modules, for import. The results of this method are in alphabetical order (by package, then module), and do not contain duplicates. Returns: - Sequence[Tuple[str, str]]: The package and module pair, intended - for use in a ``from package import module`` type - of statement. + Sequence[~.imp.Import]: The package and module, intended for + use in templates. """ answer = set() for method in self.methods.values(): - # Add the module containing both the request and response - # messages. (These are usually the same, but not necessarily.) - answer.add(method.input.ident.python_import) - answer.add(method.output.ident.python_import) - - # If this method has flattening that is honored, add its - # modules. - # - # This entails adding the module for any field on the signature - # unless the field is a primitive. - for sig in method.signatures.single_dispatch: - for field in sig.fields.values(): - if not isinstance(field.type, PythonType): - answer.add(field.type.ident.python_import) - - # If this method has LRO, it is possible (albeit unlikely) that - # the LRO messages reside in a different module. - if getattr(method.output, 'lro_response', None): - answer.add(method.output.lro_response.ident.python_import) - if getattr(method.output, 'lro_metadata', None): - answer.add(method.output.lro_metadata.ident.python_import) - return tuple(sorted(answer)) + for t in method.ref_types: + answer.add(t.ident.context(self).python_import) + return tuple(sorted(list(answer))) @property def has_lro(self) -> bool: diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 index 13fbd5a3ce11..a1f549d224c2 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 @@ -8,9 +8,8 @@ from google.api_core import gapic_v1 from google.api_core import retry from google.auth import credentials -{% for package, python_module in service.python_modules -%} -from {{ package }} import {{ python_module }} -{%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} +{% for import_ in service.python_modules -%} +{{ import_ }} {% endfor %} from ...utils import dispatch from .transports import get_transport_class @@ -52,15 +51,15 @@ class {{ service.name }}: @dispatch {% endif -%} def {{ method.name|snake_case }}(self, - request: {{ method.input.ident }}, *, + request: {{ method.input.ident.context(service) }}, *, retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident }}: + ) -> {{ method.output.ident.context(service) }}: """{{ method.meta.doc|rst(width=72, indent=8) }} Args: - request ({{ method.input.ident.sphinx }}): + request ({{ method.input.ident.context(service).sphinx }}): The request object.{{ ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} retry (~.retry.Retry): Designation of what errors, if any, @@ -70,12 +69,12 @@ class {{ service.name }}: sent along with the request as metadata. Returns: - {{ method.output.ident.sphinx }}: + {{ method.output.ident.context(service).sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ # Coerce the request to the protocol buffer object. - if not isinstance(request, {{ method.input.ident }}): - request = {{ method.input.ident }}(**request) + if not isinstance(request, {{ method.input.ident.context(service) }}): + request = {{ method.input.ident.context(service) }}(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -107,9 +106,9 @@ class {{ service.name }}: response = operation.from_gapic( response, self._transport.operations_client, - {{ method.output.lro_response.ident }}, + {{ method.output.lro_response.ident.context(service) }}, {%- if method.output.lro_metadata %} - metadata_type={{ method.output.lro_metadata.ident }}, + metadata_type={{ method.output.lro_metadata.ident.context(service) }}, {%- endif %} ) {%- endif %} @@ -121,18 +120,18 @@ class {{ service.name }}: @{{ method.name|snake_case }}.register def _{{ method.name|snake_case }}_with_{{ signature.dispatch_field.name|snake_case }}(self, {%- for field in signature.fields.values() %} - {{ field.name }}: {{ field.ident }}{% if loop.index0 > 0 and not field.required %} = None{% endif %}, + {{ field.name }}: {{ field.ident.context(service) }}{% if loop.index0 > 0 and not field.required %} = None{% endif %}, {%- endfor %} *, retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident }}: + ) -> {{ method.output.ident.context(service) }}: """{{ method.meta.doc|rst(width=72, indent=8) }} Args: {%- for field in signature.fields.values() %} - {{ field.name }} ({{ field.ident.sphinx }}): + {{ field.name }} ({{ field.ident.context(service).sphinx }}): {{ field.meta.doc|wrap(width=72, indent=16) }} {%- endfor %} retry (~.retry.Retry): Designation of what errors, if any, @@ -142,11 +141,11 @@ class {{ service.name }}: sent alont with the request as metadata. Returns: - {{ method.output.ident.sphinx }}: + {{ method.output.ident.context(service).sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ return self.{{ method.name|snake_case }}( - {{ method.input.ident }}( + {{ method.input.ident.context(service) }}( {%- for field in signature.fields.values() %} {{ field.name }}={{ field.name }}, {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 index df25c26fe41f..72f0ba6862bb 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 @@ -10,9 +10,8 @@ from google.api_core import operations_v1 {%- endif %} from google.auth import credentials -{% for package, python_module in service.python_modules -%} -from {{ package }} import {{ python_module }} -{%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} +{% for import_ in service.python_modules -%} +{{ import_ }} {% endfor %} class {{ service.name }}Transport(metaclass=abc.ABCMeta): @@ -58,8 +57,8 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): @abc.abstractmethod def {{ method.name|snake_case }}( self, - request: {{ method.input.ident }}, - ) -> {{ method.output.ident }}: + request: {{ method.input.ident.context(service) }}, + ) -> {{ method.output.ident.context(service) }}: raise NotImplementedError {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 index 8d61b571b965..c3e6715fdb18 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 @@ -11,9 +11,8 @@ from google.auth import credentials import grpc -{% for package, python_module in service.python_modules -%} -from {{ package }} import {{ python_module }} -{%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} +{% for import_ in service.python_modules -%} +{{ import_ }} {% endfor %} from .base import {{ service.name }}Transport @@ -83,8 +82,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @property def {{ method.name|snake_case }}(self) -> Callable[ - [{{ method.input.ident }}], - {{ method.output.ident }}]: + [{{ method.input.ident.context(service) }}], + {{ method.output.ident.context(service) }}]: """Return a callable for the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) }} @@ -104,8 +103,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if '{{ method.name|snake_case }}' not in self._stubs: self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', - request_serializer={{ method.input.ident }}.serialize, - response_deserializer={{ method.output.ident }}.deserialize, + request_serializer={{ method.input.ident.context(service) }}.serialize, + response_deserializer={{ method.output.ident.context(service) }}.deserialize, ) return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 index cab0107a355a..e67928baef89 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 @@ -8,9 +8,8 @@ from google.api_core import operations_v1 from google.auth import credentials from google.auth.transport.requests import AuthorizedSession -{% for package, python_module in service.python_modules -%} -from {{ package }} import {{ python_module }} -{%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} +{% for import_ in service.python_modules -%} +{{ import_ }} {% endfor %} from .base import {{ service.name }}Transport @@ -66,23 +65,23 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): {%- for method in service.methods.values() %} def {{ method.name|snake_case }}(self, - request: {{ method.input.ident }}, *, + request: {{ method.input.ident.context(service) }}, *, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident }}: + ) -> {{ method.output.ident.context(service) }}: """Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} {{- ' ' -}} method over HTTP. Args: - request (~.{{ method.input.ident }}): + request (~.{{ method.input.ident.context(service) }}): The request object. {{ method.input.meta.doc|rst(width=72, indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent alont with the request as metadata. Returns: - ~.{{ method.output.ident }}: + ~.{{ method.output.ident.context(service) }}: {{ method.output.meta.doc|rst(width=72, indent=16) }} """ # Serialize the input. @@ -103,7 +102,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): ) # Return the response. - return {{ method.output.ident }}.FromString( + return {{ method.output.ident.context(service) }}.FromString( response.content, ) {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 index 6fdce02e5166..5bbc2a60dfeb 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 @@ -1,25 +1,26 @@ {% extends "_base.py.j2" %} -{% block content %} -class _: - # Note: This strange "import within the `_` class" is to avoid potential - # naming collisions; since we can not control the names chosen for - # fields, we do this instead. - import enum - import proto +{% block content -%} +{% with e = proto.disambiguate('enum'), p = proto.disambiguate('proto') %} +{% if proto.enums|length -%} +import enum{% if e != 'enum' %} as {{ e }}{% endif %} - {% for package, python_module in proto.python_modules|sort -%} - from {{ package }} import {{ python_module }} - {%- if python_module.endswith('_pb2') %} as {{ python_module[:-4] }}{% endif %} - {% endfor %} +{% endif -%} +{% if proto.messages|length -%} +import proto{% if p != 'proto' %} as {{ p }}{% endif -%} +{% endif %} + +{% for import_ in proto.python_modules -%} +{{ import_ }} +{% endfor %} {% for enum in proto.top.enums.values() -%} - {% include '$namespace/$name_$version/types/_enum.py.j2' %} + {% include '$namespace/$name_$version/types/_enum.py.j2' with context %} {% endfor %} {% for message in proto.top.messages.values() -%} - {% include "$namespace/$name_$version/types/_message.py.j2" %} + {% include "$namespace/$name_$version/types/_message.py.j2" with context %} {% endfor %} __all__ = ( @@ -30,4 +31,4 @@ __all__ = ( '{{ message.name }}', {%- endfor %} ) -{% endblock %} +{% endwith %}{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 index 6755ecfe3125..e4c9f98eda3e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 @@ -1,4 +1,4 @@ -class {{ enum.name }}(_.enum.IntEnum): +class {{ enum.name }}({{ e }}.IntEnum): """{{ enum.meta.doc|rst(width=72, indent=4) }}""" {% for enum_value in enum.values -%} {{ enum_value.name }} = {{ enum_value.number }} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 index fb8e3a875a77..f3350beea392 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 @@ -1,4 +1,4 @@ -class {{ message.name }}(_.proto.Message): +class {{ message.name }}({{ p }}.Message): """{{ message.meta.doc|rst(indent=4) }}""" {# Iterate over nested enums. -#} {% for enum in message.nested_enums.values() %}{% filter indent -%} @@ -14,9 +14,9 @@ class {{ message.name }}(_.proto.Message): {# Iterate over fields. -#} {%- for field in message.fields.values() %} - {{ field.name }} = _.proto.{% if field.repeated %}Repeated{% endif %}Field(_.proto.{{ field.proto_type }}, number={{ field.number }} + {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field({{ p }}.{{ field.proto_type }}, number={{ field.number }} {%- if field.enum or field.message %}, - {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, + {{ field.proto_type.lower() }}={{ field.type.ident.context(proto).rel(message.ident) }}, {% endif %}) """{{ field.meta.doc|rst(indent=4) }}""" {% endfor %} diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index d63907434649..7a4b0453e742 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -20,6 +20,7 @@ from google.protobuf import descriptor_pb2 from gapic.schema import api +from gapic.schema import imp from gapic.schema import naming from gapic.schema import wrappers @@ -73,7 +74,7 @@ def test_api_build(): assert 'google.example.v1.FooService' in api_schema.services assert len(api_schema.enums) == 0 assert api_schema.protos['foo.proto'].python_modules == ( - ('google.dep', 'dep_pb2'), + imp.Import(package=('google', 'dep'), module='dep_pb2'), ) @@ -86,6 +87,81 @@ def test_proto_build(): assert isinstance(proto, api.Proto) +def test_proto_names(): + # Put together a couple of minimal protos. + fd = ( + make_file_pb2( + name='dep.proto', + package='google.dep', + messages=(make_message_pb2(name='ImportedMessage', fields=()),), + ), + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=()), + make_message_pb2(name='Bar', fields=( + make_field_pb2(name='imported_message', number=1, + type_name='.google.dep.ImportedMessage'), + make_field_pb2(name='primitive', number=2, type=1), + )), + make_message_pb2(name='Baz', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v1.Foo'), + )), + ), + ), + ) + + # Create an API with those protos. + api_schema = api.API.build(fd, package='google.example.v1') + proto = api_schema.protos['foo.proto'] + assert proto.names == {'Foo', 'Bar', 'Baz', 'foo', 'imported_message', + 'primitive'} + assert proto.disambiguate('enum') == 'enum' + assert proto.disambiguate('foo') == '_foo' + + +def test_proto_names_import_collision(): + # Put together a couple of minimal protos. + fd = ( + make_file_pb2( + name='a/b/c/spam.proto', + package='a.b.c', + messages=(make_message_pb2(name='ImportedMessage', fields=()),), + ), + make_file_pb2( + name='x/y/z/spam.proto', + package='x.y.z', + messages=(make_message_pb2(name='OtherMessage', fields=()),), + ), + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=()), + make_message_pb2(name='Bar', fields=( + make_field_pb2(name='imported_message', number=1, + type_name='.a.b.c.ImportedMessage'), + make_field_pb2(name='other_message', number=2, + type_name='.x.y.z.OtherMessage'), + make_field_pb2(name='primitive', number=3, type=1), + )), + make_message_pb2(name='Baz', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v1.Foo'), + )), + ), + ), + ) + + # Create an API with those protos. + api_schema = api.API.build(fd, package='google.example.v1') + proto = api_schema.protos['foo.proto'] + assert proto.names == {'Foo', 'Bar', 'Baz', 'foo', 'imported_message', + 'other_message', 'primitive', 'spam'} + + def test_proto_builder_constructor(): sentinel_message = descriptor_pb2.DescriptorProto() sentinel_enum = descriptor_pb2.EnumDescriptorProto() diff --git a/packages/gapic-generator/tests/unit/schema/test_imp.py b/packages/gapic-generator/tests/unit/schema/test_imp.py new file mode 100644 index 000000000000..a196c64120ab --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/test_imp.py @@ -0,0 +1,43 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gapic.schema import imp + + +def test_str(): + i = imp.Import(package=('foo', 'bar'), module='baz') + assert str(i) == 'from foo.bar import baz' + + +def test_str_no_package(): + i = imp.Import(package=(), module='baz') + assert str(i) == 'import baz' + + +def test_str_alias(): + i = imp.Import(package=('foo', 'bar'), module='baz', alias='bacon') + assert str(i) == 'from foo.bar import baz as bacon' + + +def test_str_eq(): + i1 = imp.Import(package=('foo', 'bar'), module='baz') + i2 = imp.Import(package=('foo', 'bar'), module='baz') + i3 = imp.Import(package=('foo', 'bar'), module='baz', alias='bacon') + j1 = imp.Import(package=('foo', 'bar'), module='not_baz') + k1 = imp.Import(package=('spam', 'eggs'), module='baz') + assert i1 == i2 + assert i1 == i3 + assert i2 == i3 + assert i1 != j1 + assert i1 != k1 diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index f8d0ee1c388a..92b0f5219b8e 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections import typing from google.protobuf import descriptor_pb2 @@ -19,11 +20,21 @@ from gapic.schema import metadata -def test_address_str_no_parent(): +def test_address_str(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert str(addr) == 'baz.Bacon' +def test_address_str_context(): + Names = collections.namedtuple('Names', ['names']) + addr = metadata.Address( + package=('foo', 'bar'), + module='baz', + name='Bacon', + ).context(Names(names={'baz'})) + assert str(addr) == 'fb_baz.Bacon' + + def test_address_str_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon', parent=('spam', 'eggs')) @@ -64,10 +75,10 @@ def test_address_rel_other(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.rel( metadata.Address(package=('foo', 'not_bar'), module='baz'), - ) == '_.baz.Bacon' + ) == 'baz.Bacon' assert addr.rel( metadata.Address(package=('foo', 'bar'), module='not_baz'), - ) == '_.baz.Bacon' + ) == 'baz.Bacon' def test_address_rel_later(): @@ -130,6 +141,11 @@ def test_doc_detached_joined(): assert meta.doc == 'foo\n\nbar' +def test_field_identifier_context(): + fi = metadata.FieldIdentifier(ident=metadata.Address(), repeated=False) + assert fi.context(None) is fi + + def make_doc_meta( *, leading: str = '', diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index a27ae5caf15a..88c27ce30d95 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -13,7 +13,7 @@ # limitations under the License. import collections -from typing import Sequence +from typing import Sequence, Tuple import pytest @@ -53,6 +53,25 @@ def test_get_field(): assert field_one.name == 'field_one' +def test_field_types(): + # Create the inner message. + inner_msg = make_message('InnerMessage', fields=()) + inner_enum = make_enum('InnerEnum') + + # Create the outer message, which contains an Inner as a field. + fields = ( + make_field('inner_message', message=inner_msg), + make_field('inner_enum', enum=inner_enum), + make_field('not_interesting'), + ) + outer = make_message('Outer', fields=fields) + + # Assert that composite field types are recognized but primitives are not. + assert len(outer.field_types) == 2 + assert inner_msg in outer.field_types + assert inner_enum in outer.field_types + + def test_get_field_recursive(): # Create the inner message. inner_fields = (make_field('zero'), make_field('one')) @@ -105,16 +124,43 @@ def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', def make_field(name: str, repeated: bool = False, message: wrappers.MessageType = None, + enum: wrappers.EnumType = None, meta: metadata.Metadata = None, **kwargs) -> wrappers.Method: if message: kwargs['type_name'] = str(message.meta.address) + if enum: + kwargs['type_name'] = str(enum.meta.address) field_pb = descriptor_pb2.FieldDescriptorProto( name=name, label=3 if repeated else 1, **kwargs ) return wrappers.Field( + enum=enum, field_pb=field_pb, message=message, meta=meta or metadata.Metadata(), ) + + +def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', + values: Tuple[str, int] = (), meta: metadata.Metadata = None, + ) -> wrappers.EnumType: + enum_value_pbs = [ + descriptor_pb2.EnumValueDescriptorProto(name=i[0], number=i[1]) + for i in values + ] + enum_pb = descriptor_pb2.EnumDescriptorProto( + name=name, + value=enum_value_pbs, + ) + return wrappers.EnumType( + enum_pb=enum_pb, + values=[wrappers.EnumValueType(enum_value_pb=evpb) + for evpb in enum_value_pbs], + meta=meta or metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(package.split('.')), + module=module, + )), + ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index b8cf693bcac3..81f6dc065aa4 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -19,6 +19,7 @@ from google.api import signature_pb2 from google.protobuf import descriptor_pb2 +from gapic.schema import imp from gapic.schema import metadata from gapic.schema import wrappers @@ -45,6 +46,24 @@ def test_service_scopes(): assert 'https://foo/admin/' in service.oauth_scopes +def test_service_names(): + service = make_service(name='ThingDoer', methods=( + get_method('DoThing', 'foo.bar.ThingRequest', 'foo.baz.ThingResponse'), + get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), + get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), + )) + assert service.names == {'ThingDoer', 'do_thing', 'jump', 'yawn'} + + +def test_service_name_colliding_modules(): + service = make_service(name='ThingDoer', methods=( + get_method('DoThing', 'foo.bar.ThingRequest', 'foo.bar.ThingResponse'), + get_method('Jump', 'bacon.bar.JumpRequest', 'bacon.bar.JumpResponse'), + get_method('Yawn', 'a.b.v1.c.YawnRequest', 'a.b.v1.c.YawnResponse'), + )) + assert service.names == {'ThingDoer', 'do_thing', 'jump', 'yawn', 'bar'} + + def test_service_no_scopes(): service = make_service() assert len(service.oauth_scopes) == 0 @@ -57,21 +76,21 @@ def test_service_python_modules(): get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), )) assert service.python_modules == ( - ('a.b.v1', 'c'), - ('foo', 'bacon'), - ('foo', 'bar'), - ('foo', 'baz'), - ('x.y.v1', 'z'), + imp.Import(package=('a', 'b', 'v1'), module='c'), + imp.Import(package=('foo',), module='bacon'), + imp.Import(package=('foo',), module='bar'), + imp.Import(package=('foo',), module='baz'), + imp.Import(package=('x', 'y', 'v1'), module='z'), ) def test_service_python_modules_lro(): service = make_service_with_method_options() assert service.python_modules == ( - ('foo', 'bar'), - ('foo', 'baz'), - ('foo', 'qux'), - ('google.api_core', 'operation'), + imp.Import(package=('foo',), module='bar'), + imp.Import(package=('foo',), module='baz'), + imp.Import(package=('foo',), module='qux'), + imp.Import(package=('google', 'api_core'), module='operation'), ) @@ -89,11 +108,11 @@ def test_service_python_modules_signature(): ) # type=5 is int, so nothing is added. assert service.python_modules == ( - ('a.b.c', 'v2'), - ('foo', 'bar'), - ('foo', 'baz'), - ('foo', 'qux'), - ('google.api_core', 'operation'), + imp.Import(package=('a', 'b', 'c'), module='v2'), + imp.Import(package=('foo',), module='bar'), + imp.Import(package=('foo',), module='baz'), + imp.Import(package=('foo',), module='qux'), + imp.Import(package=('google', 'api_core'), module='operation'), ) From a4bc87307a730b81d6c40bcd636054aa0f06423c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 10 Oct 2018 13:59:06 -0700 Subject: [PATCH 0042/1339] Showcase 0.0.7 (#52) --- packages/gapic-generator/.circleci/config.yml | 9 ++++++--- packages/gapic-generator/nox.py | 6 +++--- packages/gapic-generator/tests/system/conftest.py | 12 ++++++------ .../gapic-generator/tests/system/test_grpc_unary.py | 8 ++++---- 4 files changed, 19 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 2e92d0e3f89a..5d2d11dae5ec 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -78,13 +78,16 @@ jobs: unzip protoc-3.6.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: - name: Download Showcase. + name: Download and extract Showcase. command: | - curl --location https://github.com/googleapis/gapic-showcase/releases/download/v0.0.3/gapic-showcase-v1alpha1-0.0.3-linux-amd64 --output /usr/local/bin/showcase + curl --location https://github.com/googleapis/gapic-showcase/releases/download/v0.0.7/gapic-showcase-0.0.7-linux-amd64.tar.gz --output /tmp/gapic-showcase.tar.gz + cd /tmp + tar xvfz /tmp/gapic-showcase.tar.gz + mv /tmp/gapic-showcase /usr/local/bin/showcase chmod a+x /usr/local/bin/showcase - run: name: Run Showcase. - command: /usr/local/bin/showcase + command: /usr/local/bin/showcase start background: true - run: name: Run showcase tests. diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 148f87945316..d57bf1872bea 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -57,13 +57,13 @@ def showcase(session): # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: - showcase_version = '0.0.3' + showcase_version = '0.0.7' # Download the Showcase descriptor. session.run( 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' f'download/v{showcase_version}/' - f'gapic-showcase-v1alpha1-{showcase_version}.desc', + f'gapic-showcase-{showcase_version}.desc', '-L', '--output', os.path.join(tmp_dir, 'showcase.desc'), silent=True, ) @@ -72,7 +72,7 @@ def showcase(session): session.run('protoc', f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', f'--pyclient_out={tmp_dir}', - 'google/showcase/v1alpha1/showcase.proto', + 'google/showcase/v1alpha2/echo.proto', ) # Install the library. diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 2121a9d6ce13..65edc953c71f 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -15,18 +15,18 @@ import pytest from google.auth.credentials import AnonymousCredentials -from google.showcase import Showcase -from google.showcase_v1alpha1.services.showcase.transports.grpc import ( - ShowcaseGrpcTransport, +from google.showcase import Echo +from google.showcase_v1alpha2.services.echo.transports.grpc import ( + EchoGrpcTransport, ) import grpc @pytest.fixture -def showcase(): - transport = ShowcaseGrpcTransport(credentials=AnonymousCredentials()) +def echo(): + transport = EchoGrpcTransport(credentials=AnonymousCredentials()) transport.__dict__['grpc_channel'] = grpc.insecure_channel( transport.SERVICE_ADDRESS, ) - return Showcase(transport=transport) + return Echo(transport=transport) diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_grpc_unary.py index 04c05a1e08b3..a6ab9e4d5982 100644 --- a/packages/gapic-generator/tests/system/test_grpc_unary.py +++ b/packages/gapic-generator/tests/system/test_grpc_unary.py @@ -18,16 +18,16 @@ from google.rpc import code_pb2 -def test_unary(showcase): - response = showcase.echo({ +def test_unary(echo): + response = echo.echo({ 'content': 'The hail in Wales falls mainly on the snails.', }) assert response.content == 'The hail in Wales falls mainly on the snails.' -def test_unary_error(showcase): +def test_unary_error(echo): with pytest.raises(exceptions.InvalidArgument) as exc: - showcase.echo({ + echo.echo({ 'error': { 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), 'message': 'Bad things! Bad things!', From 3c25cdcc330af4b4fd1e1ed1f169f93f22823b34 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 11 Oct 2018 13:07:36 -0700 Subject: [PATCH 0043/1339] Reword the documentation link on the front README. (#55) --- packages/gapic-generator/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index 2132297304de..38aa2a61e4fe 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -24,7 +24,7 @@ regarding: Documentation ------------- -To learn more, consult the `documentation`_. +`Documentation`_ is available on Read the Docs. .. _documentation: https://gapic-generator-python.readthedocs.io/ From e57bc9f3f5bb84104f9af9a433d587d9b7f82080 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 11 Oct 2018 13:09:07 -0700 Subject: [PATCH 0044/1339] Update the executable name. (#54) This makes the executable name be `protoc-gen-python_gapic` (yes, the mixture of `kebob-case` and `snake_case` is a crime against all that is good, right, and salutary; all alternatives within the `protoc` constraints are worse) and the plugin switch therefore becomes `--python_gapic_out`. --- packages/gapic-generator/docs/getting-started.rst | 2 +- packages/gapic-generator/docs/installing.rst | 7 ++++--- packages/gapic-generator/nox.py | 2 +- packages/gapic-generator/setup.py | 2 +- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/docs/getting-started.rst b/packages/gapic-generator/docs/getting-started.rst index 8091ac0b58e9..4d07ca3ac40d 100644 --- a/packages/gapic-generator/docs/getting-started.rst +++ b/packages/gapic-generator/docs/getting-started.rst @@ -59,7 +59,7 @@ for each plugin invoked; you just want these to match: # assume that api-common-protos is next to it. $ protoc google/cloud/vision/v1/*.proto \ --proto_path=../api-common-protos/ --proto_path=. \ - --python_out=/dest/ --pyclient_out=/dest/ + --python_out=/dest/ --python_gapic_out=/dest/ .. note:: diff --git a/packages/gapic-generator/docs/installing.rst b/packages/gapic-generator/docs/installing.rst index 69786d2cf091..2496ae77dcdb 100644 --- a/packages/gapic-generator/docs/installing.rst +++ b/packages/gapic-generator/docs/installing.rst @@ -52,7 +52,8 @@ API Generator for Python This package is provided as a standard Python library, and can be installed the usual ways. It fundamentally provides a CLI command, -``protoc-gen-pyclient``, so you will want to install using a mechanism +``protoc-gen-python_gapic``, (yes, the mismatch of ``kebob-case`` and +``snake_case`` is weird, sorry), so you will want to install using a mechanism that is conducive to making CLI commands available. Additionally, this program currently only runs against Python 3.6 or @@ -81,8 +82,8 @@ To ensure the tool is installed properly: .. code-block:: shell - $ which protoc-gen-pyclient - /path/to/protoc-gen-pyclient + $ which protoc-gen-python_gapic + /path/to/protoc-gen-python_gapic .. _pyenv: https://github.com/pyenv/pyenv .. _pipsi: https://github.com/mitsuhiko/pipsi diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index d57bf1872bea..0427bb0ad4bb 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -71,7 +71,7 @@ def showcase(session): # Write out a client library for Showcase. session.run('protoc', f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', - f'--pyclient_out={tmp_dir}', + f'--python_gapic_out={tmp_dir}', 'google/showcase/v1alpha2/echo.proto', ) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index efc6410f207b..e6703be31c30 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -36,7 +36,7 @@ long_description=README, entry_points="""[console_scripts] protoc-gen-dump=gapic.cli.dump:dump - protoc-gen-pyclient=gapic.cli.generate:generate + protoc-gen-python_gapic=gapic.cli.generate:generate """, platforms='Posix; MacOS X', include_package_data=True, From 04c7a8f339d72f60d21b5c89c097149d5f2e9c17 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 11 Oct 2018 13:10:38 -0700 Subject: [PATCH 0045/1339] Bump proto-plus to 0.1.0a2 in generated code. (#53) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 5c74f400398e..f69d28f255f6 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -22,7 +22,7 @@ setuptools.setup( 'google-api-core >= 1.3.0, < 2.0.0dev', 'googleapis-common-protos >= 1.6.0b6', 'grpcio >= 1.10.0', - 'proto-plus >= 0.1.0a1', + 'proto-plus >= 0.1.0a2', ), classifiers=[ 'Development Status :: 3 - Alpha', From e873432af558136752c9a83a4768a821db7cf133 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 17 Oct 2018 13:32:29 -0700 Subject: [PATCH 0046/1339] Determine naming collisions up front. (#58) This commit determines and resolves naming collisions logically prior to templates (in other words, so templates do not have to worry about it and they just get the "right thing"). --- packages/gapic-generator/.flake8 | 2 +- packages/gapic-generator/gapic/schema/api.py | 48 +++++-- .../gapic-generator/gapic/schema/metadata.py | 33 +++-- .../gapic-generator/gapic/schema/wrappers.py | 125 ++++++++++++++++-- .../services/$service/client.py.j2 | 26 ++-- .../services/$service/transports/base.py.j2 | 4 +- .../services/$service/transports/grpc.py.j2 | 8 +- .../services/$service/transports/http.py.j2 | 10 +- .../$name_$version/types/_message.py.j2 | 2 +- .../tests/unit/schema/test_metadata.py | 18 +-- .../unit/schema/wrappers/test_message.py | 8 ++ 11 files changed, 219 insertions(+), 65 deletions(-) diff --git a/packages/gapic-generator/.flake8 b/packages/gapic-generator/.flake8 index fe63398b556d..092a2c36d96b 100644 --- a/packages/gapic-generator/.flake8 +++ b/packages/gapic-generator/.flake8 @@ -5,4 +5,4 @@ ignore = E123, E124 # Line over-indented for visual indent. # This works poorly with type annotations in method declarations. - E128, E131 + E126, E128, E131 diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index b65d54cd9a2e..1f0152a3eab3 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -121,14 +121,13 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: of statement. """ answer = set() - self_reference = self.meta.address.context(self).python_import + self_reference = self.meta.address.python_import for t in chain(*[m.field_types for m in self.messages.values()]): # Add the appropriate Python import for the field. # Sanity check: We do make sure that we are not trying to have # a module import itself. - imp = t.ident.context(self).python_import - if imp != self_reference: - answer.add(imp) + if t.ident.python_import != self_reference: + answer.add(t.ident.python_import) # Done; return the sorted sequence. return tuple(sorted(list(answer))) @@ -144,10 +143,14 @@ def top(self) -> 'Proto': return type(self)( file_pb2=self.file_pb2, services=self.services, - messages={k: v for k, v in self.messages.items() - if not v.meta.address.parent}, - enums={k: v for k, v in self.enums.items() - if not v.meta.address.parent}, + messages=collections.OrderedDict([ + (k, v) for k, v in self.messages.items() + if not v.meta.address.parent + ]), + enums=collections.OrderedDict([ + (k, v) for k, v in self.enums.items() + if not v.meta.address.parent + ]), file_to_generate=False, meta=self.meta, ) @@ -326,7 +329,10 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, @property def proto(self) -> Proto: """Return a Proto dataclass object.""" - return Proto( + # Create a "context-naïve" proto. + # This has everything but is ignorant of naming collisions in the + # ultimate file that will be written. + naive = Proto( enums=self.enums, file_pb2=self.file_descriptor, file_to_generate=self.file_to_generate, @@ -337,6 +343,30 @@ def proto(self) -> Proto: ), ) + # If this is not a file being generated, we do not need to + # do anything else. + if not self.file_to_generate: + return naive + + # Return a context-aware proto object. + # Note: The services bind to themselves, because services get their + # own output files. + return dataclasses.replace(naive, + enums=collections.OrderedDict([ + (k, v.with_context(collisions=naive.names)) + for k, v in naive.enums.items() + ]), + messages=collections.OrderedDict([ + (k, v.with_context(collisions=naive.names)) + for k, v in naive.messages.items() + ]), + services=collections.OrderedDict([ + (k, v.with_context(collisions=v.names)) + for k, v in naive.services.items() + ]), + meta=naive.meta.with_context(collisions=naive.names), + ) + @cached_property def all_enums(self) -> Mapping[str, wrappers.EnumType]: return collections.ChainMap({}, self.enums, diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 2fb252190080..98fdccc13025 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -153,15 +153,6 @@ def child(self, child_name: str, path: Tuple[int]) -> 'Address': parent=self.parent + (self.name,) if self.name else self.parent, ) - def context(self, context) -> 'Address': - """Return a derivative of this address with the provided context. - - This method is used to address naming collisions. The returned - ``Address`` object aliases module names to avoid naming collisions in - the file being written. - """ - return dataclasses.replace(self, collisions=frozenset(context.names)) - def rel(self, address: 'Address') -> str: """Return an identifier for this type, relative to the given address. @@ -233,6 +224,15 @@ def resolve(self, selector: str) -> str: return f'{".".join(self.package)}.{selector}' return selector + def with_context(self, *, collisions: Set[str]) -> 'Address': + """Return a derivative of this address with the provided context. + + This method is used to address naming collisions. The returned + ``Address`` object aliases module names to avoid naming collisions in + the file being written. + """ + return dataclasses.replace(self, collisions=frozenset(collisions)) + @dataclasses.dataclass(frozen=True) class Metadata: @@ -259,6 +259,17 @@ def doc(self): return '\n\n'.join(self.documentation.leading_detached_comments) return '' + def with_context(self, *, collisions: Set[str]) -> 'Metadata': + """Return a derivative of this metadata with the provided context. + + This method is used to address naming collisions. The returned + ``Address`` object aliases module names to avoid naming collisions in + the file being written. + """ + return dataclasses.replace(self, + address=self.address.with_context(collisions=collisions), + ) + @dataclasses.dataclass(frozen=True) class FieldIdentifier: @@ -275,7 +286,3 @@ def sphinx(self) -> str: if self.repeated: return f'Sequence[{self.ident.sphinx}]' return self.ident.sphinx - - def context(self, arg) -> 'FieldIdentifier': - """Return self. Provided for compatibility with Address.""" - return self diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 11e90d34d8e9..1f0c9d56352d 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -128,6 +128,23 @@ def type(self) -> Union['MessageType', 'EnumType', 'PythonType']: raise TypeError('Unrecognized protobuf type. This code should ' 'not be reachable; please file a bug.') + def with_context(self, *, collisions: Set[str]) -> 'Field': + """Return a derivative of this field with the provided context. + + This method is used to address naming collisions. The returned + ``Field`` object aliases module names to avoid naming collisions + in the file being written. + """ + return dataclasses.replace(self, + message=self.message.with_context( + collisions=collisions, + skip_fields=True, + ) if self.message else None, + enum=self.enum.with_context(collisions=collisions) + if self.enum else None, + meta=self.meta.with_context(collisions=collisions), + ) + @dataclasses.dataclass(frozen=True) class MessageType: @@ -152,7 +169,13 @@ def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: answer.append(field.type) return tuple(answer) - def get_field(self, *field_path: Sequence[str]) -> Field: + @property + def ident(self) -> metadata.Address: + """Return the identifier data to be used in templates.""" + return self.meta.address + + def get_field(self, *field_path: Sequence[str], + collisions: Set[str] = frozenset()) -> Field: """Return a field arbitrarily deep in this message's structure. This method recursively traverses the message tree to return the @@ -171,12 +194,21 @@ def get_field(self, *field_path: Sequence[str]) -> Field: KeyError: If a repeated field is used in the non-terminal position in the path. """ + # If collisions are not explicitly specified, retrieve them + # from this message's address. + # This ensures that calls to `get_field` will return a field with + # the same context, regardless of the number of levels through the + # chain (in order to avoid infinite recursion on circular references, + # we only shallowly bind message references held by fields; this + # binds deeply in the one spot where that might be a problem). + collisions = collisions or self.meta.address.collisions + # Get the first field in the path. cursor = self.fields[field_path[0]] # Base case: If this is the last field in the path, return it outright. if len(field_path) == 1: - return cursor + return cursor.with_context(collisions=collisions) # Sanity check: If cursor is a repeated field, then raise an exception. # Repeated fields are only permitted in the terminal position. @@ -191,12 +223,37 @@ def get_field(self, *field_path: Sequence[str]) -> Field: # Recursion case: Pass the remainder of the path to the sub-field's # message. - return cursor.message.get_field(*field_path[1:]) + return cursor.message.get_field(*field_path[1:], collisions=collisions) - @property - def ident(self) -> metadata.Address: - """Return the identifier data to be used in templates.""" - return self.meta.address + def with_context(self, *, + collisions: Set[str], + skip_fields: bool = False, + ) -> 'MessageType': + """Return a derivative of this message with the provided context. + + This method is used to address naming collisions. The returned + ``MessageType`` object aliases module names to avoid naming collisions + in the file being written. + + The ``skip_fields`` argument will omit applying the context to the + underlying fields. This provides for an "exit" in the case of circular + references. + """ + return dataclasses.replace(self, + fields=collections.OrderedDict([ + (k, v.with_context(collisions=collisions)) + for k, v in self.fields.items() + ]) if not skip_fields else self.fields, + nested_enums=collections.OrderedDict([ + (k, v.with_context(collisions=collisions)) + for k, v in self.nested_enums.items() + ]), + nested_messages=collections.OrderedDict([(k, v.with_context( + collisions=collisions, + skip_fields=skip_fields, + )) for k, v in self.nested_messages.items()]), + meta=self.meta.with_context(collisions=collisions), + ) @dataclasses.dataclass(frozen=True) @@ -228,6 +285,17 @@ def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" return self.meta.address + def with_context(self, *, collisions: Set[str]) -> 'EnumType': + """Return a derivative of this enum with the provided context. + + This method is used to address naming collisions. The returned + ``EnumType`` object aliases module names to avoid naming collisions in + the file being written. + """ + return dataclasses.replace(self, + meta=self.meta.with_context(collisions=collisions), + ) + @dataclasses.dataclass(frozen=True) class PythonType: @@ -275,6 +343,7 @@ def meta(self) -> metadata.Metadata: name='Operation', module='operation', package=('google', 'api_core'), + collisions=self.lro_response.meta.address.collisions, ), documentation=descriptor_pb2.SourceCodeInfo.Location( leading_comments='An object representing a long-running ' @@ -298,6 +367,18 @@ def name(self) -> str: # on google.api_core just to get these strings. return 'Operation' + def with_context(self, *, collisions: Set[str]) -> 'OperationType': + """Return a derivative of this operation with the provided context. + + This method is used to address naming collisions. The returned + ``OperationType`` object aliases module names to avoid naming + collisions in the file being written. + """ + return dataclasses.replace(self, + lro_response=self.lro_response.with_context(collisions=collisions), + lro_metadata=self.lro_metadata.with_context(collisions=collisions), + ) + @dataclasses.dataclass(frozen=True) class Method: @@ -381,6 +462,19 @@ def signatures(self) -> Tuple[signature_pb2.MethodSignature]: # Done; return a tuple of signatures. return MethodSignatures(all=tuple(answer)) + def with_context(self, *, collisions: Set[str]) -> 'Method': + """Return a derivative of this method with the provided context. + + This method is used to address naming collisions. The returned + ``Method`` object aliases module names to avoid naming collisions + in the file being written. + """ + return dataclasses.replace(self, + input=self.input.with_context(collisions=collisions), + output=self.output.with_context(collisions=collisions), + meta=self.meta.with_context(collisions=collisions), + ) + @dataclasses.dataclass(frozen=True) class MethodSignature: @@ -519,7 +613,7 @@ def python_modules(self) -> Sequence[imp.Import]: answer = set() for method in self.methods.values(): for t in method.ref_types: - answer.add(t.ident.context(self).python_import) + answer.add(t.ident.python_import) return tuple(sorted(list(answer))) @property @@ -527,3 +621,18 @@ def has_lro(self) -> bool: """Return whether the service has a long-running method.""" return any([getattr(m.output, 'lro_response', None) for m in self.methods.values()]) + + def with_context(self, *, collisions: Set[str]) -> 'Service': + """Return a derivative of this service with the provided context. + + This method is used to address naming collisions. The returned + ``Service`` object aliases module names to avoid naming collisions + in the file being written. + """ + return dataclasses.replace(self, + methods=collections.OrderedDict([ + (k, v.with_context(collisions=collisions)) + for k, v in self.methods.items() + ]), + meta=self.meta.with_context(collisions=collisions), + ) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 index a1f549d224c2..0a5bcef75043 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 @@ -51,15 +51,15 @@ class {{ service.name }}: @dispatch {% endif -%} def {{ method.name|snake_case }}(self, - request: {{ method.input.ident.context(service) }}, *, + request: {{ method.input.ident }}, *, retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident.context(service) }}: + ) -> {{ method.output.ident }}: """{{ method.meta.doc|rst(width=72, indent=8) }} Args: - request ({{ method.input.ident.context(service).sphinx }}): + request ({{ method.input.ident.sphinx }}): The request object.{{ ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} retry (~.retry.Retry): Designation of what errors, if any, @@ -69,12 +69,12 @@ class {{ service.name }}: sent along with the request as metadata. Returns: - {{ method.output.ident.context(service).sphinx }}: + {{ method.output.ident.sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ # Coerce the request to the protocol buffer object. - if not isinstance(request, {{ method.input.ident.context(service) }}): - request = {{ method.input.ident.context(service) }}(**request) + if not isinstance(request, {{ method.input.ident }}): + request = {{ method.input.ident }}(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -106,9 +106,9 @@ class {{ service.name }}: response = operation.from_gapic( response, self._transport.operations_client, - {{ method.output.lro_response.ident.context(service) }}, + {{ method.output.lro_response.ident }}, {%- if method.output.lro_metadata %} - metadata_type={{ method.output.lro_metadata.ident.context(service) }}, + metadata_type={{ method.output.lro_metadata.ident }}, {%- endif %} ) {%- endif %} @@ -120,18 +120,18 @@ class {{ service.name }}: @{{ method.name|snake_case }}.register def _{{ method.name|snake_case }}_with_{{ signature.dispatch_field.name|snake_case }}(self, {%- for field in signature.fields.values() %} - {{ field.name }}: {{ field.ident.context(service) }}{% if loop.index0 > 0 and not field.required %} = None{% endif %}, + {{ field.name }}: {{ field.ident }}{% if loop.index0 > 0 and not field.required %} = None{% endif %}, {%- endfor %} *, retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident.context(service) }}: + ) -> {{ method.output.ident }}: """{{ method.meta.doc|rst(width=72, indent=8) }} Args: {%- for field in signature.fields.values() %} - {{ field.name }} ({{ field.ident.context(service).sphinx }}): + {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|wrap(width=72, indent=16) }} {%- endfor %} retry (~.retry.Retry): Designation of what errors, if any, @@ -141,11 +141,11 @@ class {{ service.name }}: sent alont with the request as metadata. Returns: - {{ method.output.ident.context(service).sphinx }}: + {{ method.output.ident.sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ return self.{{ method.name|snake_case }}( - {{ method.input.ident.context(service) }}( + {{ method.input.ident }}( {%- for field in signature.fields.values() %} {{ field.name }}={{ field.name }}, {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 index 72f0ba6862bb..71cd91c7505f 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 @@ -57,8 +57,8 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): @abc.abstractmethod def {{ method.name|snake_case }}( self, - request: {{ method.input.ident.context(service) }}, - ) -> {{ method.output.ident.context(service) }}: + request: {{ method.input.ident }}, + ) -> {{ method.output.ident }}: raise NotImplementedError {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 index c3e6715fdb18..6e0335fd2b1e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 @@ -82,8 +82,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @property def {{ method.name|snake_case }}(self) -> Callable[ - [{{ method.input.ident.context(service) }}], - {{ method.output.ident.context(service) }}]: + [{{ method.input.ident }}], + {{ method.output.ident }}]: """Return a callable for the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) }} @@ -103,8 +103,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if '{{ method.name|snake_case }}' not in self._stubs: self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', - request_serializer={{ method.input.ident.context(service) }}.serialize, - response_deserializer={{ method.output.ident.context(service) }}.deserialize, + request_serializer={{ method.input.ident }}.serialize, + response_deserializer={{ method.output.ident }}.deserialize, ) return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 index e67928baef89..c894d928cc22 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 @@ -65,23 +65,23 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): {%- for method in service.methods.values() %} def {{ method.name|snake_case }}(self, - request: {{ method.input.ident.context(service) }}, *, + request: {{ method.input.ident }}, *, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident.context(service) }}: + ) -> {{ method.output.ident }}: """Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} {{- ' ' -}} method over HTTP. Args: - request (~.{{ method.input.ident.context(service) }}): + request (~.{{ method.input.ident }}): The request object. {{ method.input.meta.doc|rst(width=72, indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent alont with the request as metadata. Returns: - ~.{{ method.output.ident.context(service) }}: + ~.{{ method.output.ident }}: {{ method.output.meta.doc|rst(width=72, indent=16) }} """ # Serialize the input. @@ -102,7 +102,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): ) # Return the response. - return {{ method.output.ident.context(service) }}.FromString( + return {{ method.output.ident }}.FromString( response.content, ) {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 index f3350beea392..0d569bf1b271 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 @@ -16,7 +16,7 @@ class {{ message.name }}({{ p }}.Message): {%- for field in message.fields.values() %} {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field({{ p }}.{{ field.proto_type }}, number={{ field.number }} {%- if field.enum or field.message %}, - {{ field.proto_type.lower() }}={{ field.type.ident.context(proto).rel(message.ident) }}, + {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, {% endif %}) """{{ field.meta.doc|rst(indent=4) }}""" {% endfor %} diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 92b0f5219b8e..3c714126543b 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import collections import typing from google.protobuf import descriptor_pb2 @@ -25,13 +24,12 @@ def test_address_str(): assert str(addr) == 'baz.Bacon' -def test_address_str_context(): - Names = collections.namedtuple('Names', ['names']) +def test_address_str_with_context(): addr = metadata.Address( package=('foo', 'bar'), module='baz', name='Bacon', - ).context(Names(names={'baz'})) + ).with_context(collisions={'baz'}) assert str(addr) == 'fb_baz.Bacon' @@ -121,6 +119,13 @@ def test_address_resolve(): assert addr.resolve('google.example.Bacon') == 'google.example.Bacon' +def test_metadata_with_context(): + meta = metadata.Metadata() + assert meta.with_context( + collisions={'foo', 'bar'}, + ).address.collisions == {'foo', 'bar'} + + def test_doc_nothing(): meta = metadata.Metadata() assert meta.doc == '' @@ -141,11 +146,6 @@ def test_doc_detached_joined(): assert meta.doc == 'foo\n\nbar' -def test_field_identifier_context(): - fi = metadata.FieldIdentifier(ident=metadata.Address(), repeated=False) - assert fi.context(None) is fi - - def make_doc_meta( *, leading: str = '', diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 88c27ce30d95..7ef9e4610f0d 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -45,6 +45,14 @@ def test_message_ident(): assert message.ident.sphinx == '~.bar.Baz' +def test_message_ident_collisions(): + message = make_message('Baz', package='foo.v1', module='bar').with_context( + collisions={'bar'}, + ) + assert str(message.ident) == 'fv_bar.Baz' + assert message.ident.sphinx == '~.fv_bar.Baz' + + def test_get_field(): fields = (make_field('field_one'), make_field('field_two')) message = make_message('Message', fields=fields) From 4ddbc6ccb384710ebaa55d2f3443aca1e7a660e1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 18 Oct 2018 08:50:47 -0700 Subject: [PATCH 0047/1339] Update root package error message. (#56) --- packages/gapic-generator/gapic/schema/naming.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 6d36e465581d..5bcfebe9fbe0 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -71,8 +71,12 @@ def build(cls, # Sanity check: If there is no common ground in the package, # we are obviously in trouble. if not root_package: - raise ValueError('Protos provided have entirely different ' - 'proto packages.') + raise ValueError( + 'The protos provided do not share a common root package. ' + 'Ensure that all explicitly-specified protos are for a ' + 'single API. ' + f'The packages we got are: {", ".join(proto_packages)}' + ) # Define the valid regex to split the package. # From 6e7a6dce059279b91493235032a92987c40cc1ee Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 18 Oct 2018 15:53:38 -0700 Subject: [PATCH 0048/1339] Add support for subpackages. (#59) Adds support for maintaining subpackage segments when generating protos. --- .../gapic/generator/generator.py | 4 +++ .../gapic-generator/gapic/schema/metadata.py | 9 ++++++- .../types/{ => $sub}/$proto.py.j2 | 4 +-- .../types/{ => $sub}/_enum.py.j2 | 0 .../types/{ => $sub}/_message.py.j2 | 4 +-- .../tests/unit/generator/test_generator.py | 26 ++++++++++++++++++- .../tests/unit/schema/test_metadata.py | 25 ++++++++++++++++++ 7 files changed, 66 insertions(+), 6 deletions(-) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/{ => $sub}/$proto.py.j2 (81%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/{ => $sub}/_enum.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/{ => $sub}/_message.py.j2 (86%) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index d9a9ff0dd995..3e9175eb2af8 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -203,10 +203,14 @@ def _get_filenames( ) # Replace the $proto variable if appliable. + # In the cases of protos, we also honor subpackages. if context and 'proto' in context: filename = filename.replace( '$proto', context['proto'].module_name, + ).replace( + '$sub', + '/'.join(context['proto'].meta.address.subpackage), ) # Paths may have empty path segments if components are empty diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 98fdccc13025..dfbe2b0e8e27 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -118,7 +118,7 @@ def python_import(self) -> imp.Import: package=self.api_naming.module_namespace + ( self.api_naming.versioned_module_name, 'types', - ), + ) + self.subpackage, module=self.module, alias=self.module_alias, ) @@ -137,6 +137,13 @@ def sphinx(self) -> str: return f'~.{self}' return self.name + @property + def subpackage(self) -> Tuple[str]: + """Return the subpackage below the versioned module name, if any.""" + return tuple( + self.package[len(self.api_naming.proto_package.split('.')):] + ) + def child(self, child_name: str, path: Tuple[int]) -> 'Address': """Return a new child of the current Address. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 similarity index 81% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 index 5bbc2a60dfeb..2cb4ad09fb30 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 @@ -16,11 +16,11 @@ import proto{% if p != 'proto' %} as {{ p }}{% endif -%} {% for enum in proto.top.enums.values() -%} - {% include '$namespace/$name_$version/types/_enum.py.j2' with context %} + {% include '$namespace/$name_$version/types/$sub/_enum.py.j2' with context %} {% endfor %} {% for message in proto.top.messages.values() -%} - {% include "$namespace/$name_$version/types/_message.py.j2" with context %} + {% include "$namespace/$name_$version/types/$sub/_message.py.j2" with context %} {% endfor %} __all__ = ( diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_enum.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_enum.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_enum.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 similarity index 86% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 index 0d569bf1b271..a537aa2f46e7 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 @@ -2,13 +2,13 @@ class {{ message.name }}({{ p }}.Message): """{{ message.meta.doc|rst(indent=4) }}""" {# Iterate over nested enums. -#} {% for enum in message.nested_enums.values() %}{% filter indent -%} - {% include '$namespace/$name_$version/types/_enum.py.j2' %} + {% include '$namespace/$name_$version/types/$sub/_enum.py.j2' %} {% endfilter %}{% endfor -%} {#- Iterate over nested messages. -#} {% for submessage in message.nested_messages.values() -%} {% with message = submessage %}{% filter indent %} - {%- include '$namespace/$name_$version/types/_message.py.j2' %} + {%- include '$namespace/$name_$version/types/$sub/_message.py.j2' %} {% endfilter %}{% endwith %} {% endfor -%} diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 7ef1aac0b67b..806465bb58cc 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -239,6 +239,29 @@ def test_get_filenames_with_proto(): ) == ('spam/types/bacon.py',) +def test_get_filenames_with_proto_and_sub(): + file_pb2 = descriptor_pb2.FileDescriptorProto( + name='bacon.proto', + package='foo.bar.v2.baz', + ) + naming = make_naming( + namespace=('Foo',), + name='Bar', + proto_package='foo.bar.v2', + version='v2', + ) + api = make_api( + make_proto(file_pb2, naming=naming), + naming=naming, + ) + + g = generator.Generator(api_schema=api) + assert g._get_filenames( + '$name/types/$sub/$proto.py.j2', + context={'proto': api.protos['bacon.proto']}, + ) == ('bar/types/baz/bacon.py',) + + def test_get_filenames_with_namespace_init(): g = generator.Generator(api_schema=make_api(naming=make_naming( namespace=('Foo', 'Bar', 'Baz'), @@ -255,11 +278,12 @@ def test_get_filenames_with_namespace_init(): def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, file_to_generate: bool = True, prior_protos: Mapping = None, + naming: naming.Naming = None, ) -> api.Proto: prior_protos = prior_protos or {} return api._ProtoBuilder(file_pb, file_to_generate=file_to_generate, - naming=make_naming(), + naming=naming or make_naming(), prior_protos=prior_protos, ).proto diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 3c714126543b..3ed4657446ad 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -17,6 +17,7 @@ from google.protobuf import descriptor_pb2 from gapic.schema import metadata +from gapic.schema import naming def test_address_str(): @@ -119,6 +120,30 @@ def test_address_resolve(): assert addr.resolve('google.example.Bacon') == 'google.example.Bacon' +def test_address_subpackage(): + addr = metadata.Address( + package=('foo', 'bar', 'baz', 'v1', 'spam', 'eggs'), + api_naming=naming.Naming(proto_package='foo.bar.baz.v1'), + ) + assert addr.subpackage == ('spam', 'eggs') + + +def test_address_subpackage_no_version(): + addr = metadata.Address( + package=('foo', 'bar', 'baz', 'spam', 'eggs'), + api_naming=naming.Naming(proto_package='foo.bar.baz'), + ) + assert addr.subpackage == ('spam', 'eggs') + + +def test_address_subpackage_empty(): + addr = metadata.Address( + package=('foo', 'bar', 'baz', 'v1'), + api_naming=naming.Naming(proto_package='foo.bar.baz.v1'), + ) + assert addr.subpackage == () + + def test_metadata_with_context(): meta = metadata.Metadata() assert meta.with_context( From f00cf4721f12983c54441018ee4ae9fcf3c1d5e1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 18 Oct 2018 15:56:31 -0700 Subject: [PATCH 0049/1339] Only collapse double-slashes once. (#60) This fixes what was probably an erroneous merge conflict resolution whereby we were collapsing double slashes more than once. It was idempotent, but superfluous. --- packages/gapic-generator/gapic/generator/generator.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 3e9175eb2af8..885c573afea1 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -217,10 +217,6 @@ def _get_filenames( # (e.g. no $version); handle this. filename = re.sub(r'/+', '/', filename) - # Paths may have empty path segments if components are empty - # (e.g. no $version); handle this. - filename = re.sub(r'/+', '/', filename) - # Done, return the filename. return (filename,) From 7b8723ef15f3548d387180986d0a6de47fe463e0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 5 Nov 2018 06:52:53 -0800 Subject: [PATCH 0050/1339] Add __init__.py file so find_packages picks up gapic.cli. (#62) --- packages/gapic-generator/.coveragerc | 1 + packages/gapic-generator/gapic/cli/__init__.py | 13 +++++++++++++ packages/gapic-generator/nox.py | 3 +++ packages/gapic-generator/setup.py | 2 +- 4 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/gapic/cli/__init__.py diff --git a/packages/gapic-generator/.coveragerc b/packages/gapic-generator/.coveragerc index b0654cf735fa..950f01c104a6 100644 --- a/packages/gapic-generator/.coveragerc +++ b/packages/gapic-generator/.coveragerc @@ -1,6 +1,7 @@ [run] branch = True omit = + gapic/cli/*.py *_pb2.py [report] diff --git a/packages/gapic-generator/gapic/cli/__init__.py b/packages/gapic-generator/gapic/cli/__init__.py new file mode 100644 index 000000000000..b0c7da3d7725 --- /dev/null +++ b/packages/gapic-generator/gapic/cli/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 0427bb0ad4bb..89578a17a518 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -51,6 +51,9 @@ def showcase(session): session.log('-' * 70) session.run('netstat', '-plnt', '|', 'grep', ':7469', silent=True) + # Use the Python implementation of protocol buffers. + session.env['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'python' + # Install pytest and gapic-generator-python session.install('pytest') session.install('-e', '.') diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e6703be31c30..be98ef389652 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.0.4', + version='0.0.5', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 938c7e660496109b053757d36b1c8c80abd380ae Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 28 Nov 2018 15:41:10 -0800 Subject: [PATCH 0051/1339] Add MANIFEST.in. (#63) This is needed for templates to persist through a standard `pip install`. --- packages/gapic-generator/MANIFEST.in | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 packages/gapic-generator/MANIFEST.in diff --git a/packages/gapic-generator/MANIFEST.in b/packages/gapic-generator/MANIFEST.in new file mode 100644 index 000000000000..999e442e0fe1 --- /dev/null +++ b/packages/gapic-generator/MANIFEST.in @@ -0,0 +1,6 @@ +include README.rst LICENSE +recursive-include gapic *.json *.proto +recursive-include gapic/templates *.j2 +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ From 0ade015afa7fc239934a97b70fac6bc29ffe30f6 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 4 Dec 2018 12:16:28 -0800 Subject: [PATCH 0052/1339] Docker (#64) --- packages/gapic-generator/.circleci/config.yml | 18 +-- packages/gapic-generator/.dockerignore | 61 +++++++++ packages/gapic-generator/.gitignore | 3 - packages/gapic-generator/Dockerfile | 22 +++ packages/gapic-generator/docs/conf.py | 8 +- .../gapic-generator/docs/getting-started.rst | 125 ------------------ .../docs/getting-started/_example.rst | 20 +++ .../docs/getting-started/_usage_intro.rst | 6 + .../docs/getting-started/_verifying.rst | 50 +++++++ .../docs/getting-started/docker-shortcut.rst | 28 ++++ .../docs/getting-started/docker.rst | 116 ++++++++++++++++ .../docs/getting-started/index.rst | 26 ++++ .../local.rst} | 67 ++++++++++ packages/gapic-generator/docs/index.rst | 3 +- packages/gapic-generator/gapic.sh | 101 ++++++++++++++ 15 files changed, 513 insertions(+), 141 deletions(-) create mode 100644 packages/gapic-generator/.dockerignore create mode 100644 packages/gapic-generator/Dockerfile delete mode 100644 packages/gapic-generator/docs/getting-started.rst create mode 100644 packages/gapic-generator/docs/getting-started/_example.rst create mode 100644 packages/gapic-generator/docs/getting-started/_usage_intro.rst create mode 100644 packages/gapic-generator/docs/getting-started/_verifying.rst create mode 100644 packages/gapic-generator/docs/getting-started/docker-shortcut.rst create mode 100644 packages/gapic-generator/docs/getting-started/docker.rst create mode 100644 packages/gapic-generator/docs/getting-started/index.rst rename packages/gapic-generator/docs/{installing.rst => getting-started/local.rst} (61%) create mode 100755 packages/gapic-generator/gapic.sh diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 5d2d11dae5ec..756620446287 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -14,7 +14,7 @@ workflows: jobs: unit-3.6: docker: - - image: 'python:3.6' + - image: python:3.6-slim steps: - checkout - run: @@ -36,7 +36,7 @@ jobs: when: always unit-3.7: docker: - - image: 'python:3.7' + - image: python:3.7-slim steps: - checkout - run: @@ -58,17 +58,17 @@ jobs: when: always showcase: docker: - - image: 'python:3.7' + - image: python:3.7-slim steps: - checkout - run: - name: Install nox. - command: pip install --pre nox-automation - - run: - name: Install pandoc and unzip. + name: Install system dependencies. command: | apt-get update - apt-get install -y pandoc unzip + apt-get install -y curl pandoc unzip + - run: + name: Install nox. + command: pip install --pre nox-automation - run: name: Install protoc 3.6.1. command: | @@ -94,7 +94,7 @@ jobs: command: nox -s showcase docs: docker: - - image: 'python:3.6' + - image: python:3.6-slim steps: - checkout - run: diff --git a/packages/gapic-generator/.dockerignore b/packages/gapic-generator/.dockerignore new file mode 100644 index 000000000000..67cb2311afa1 --- /dev/null +++ b/packages/gapic-generator/.dockerignore @@ -0,0 +1,61 @@ +# Version control scaffolding +.git +.gitignore + +# Docker scaffolding +Dockerfile +.dockerignore + +# Python scaffolding +*.py[cod] +*.egg +*.egg-info +dist +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.tox +.cache +.pytest_cache +htmlcov + +# Translations +*.mo + +# Mac +.DS_Store + +# Mr Developer +.mr.developer.cfg +.project +.pydevproject + +# JetBrains +.idea + +# Built documentation +docs/_build +docs/_build_doc2dash + +# Virtual environment +env/ +coverage.xml + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test diff --git a/packages/gapic-generator/.gitignore b/packages/gapic-generator/.gitignore index f2185a4d2b5a..a9fe758f615a 100644 --- a/packages/gapic-generator/.gitignore +++ b/packages/gapic-generator/.gitignore @@ -53,9 +53,6 @@ docs/_build_doc2dash env/ coverage.xml -# System test environment variables. -system_tests/local_test_setup - # Make sure a generated file isn't accidentally committed. pylintrc pylintrc.test diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile new file mode 100644 index 000000000000..eada6a905033 --- /dev/null +++ b/packages/gapic-generator/Dockerfile @@ -0,0 +1,22 @@ +FROM python:3.7-slim + +# Install system packages. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + pandoc \ + && rm -rf /var/lib/apt/lists/* + +# Add protoc and our common protos. +COPY --from=gcr.io/gapic-images/api-common-protos:latest /usr/local/bin/protoc /usr/local/bin/protoc +COPY --from=gcr.io/gapic-images/api-common-protos:latest /protos/ /protos/ + +# Add our code to the Docker image. +ADD . /usr/src/gapic-generator-python/ + +# Install the tool within the image. +RUN pip install /usr/src/gapic-generator-python + +# Define the generator as an entry point. +ENTRYPOINT protoc --proto_path=/protos/ --proto_path=/in/ \ + --python_gapic_out=/out/ \ + `find /in/ -name *.proto` diff --git a/packages/gapic-generator/docs/conf.py b/packages/gapic-generator/docs/conf.py index dffe343df0f3..92849ebac27e 100644 --- a/packages/gapic-generator/docs/conf.py +++ b/packages/gapic-generator/docs/conf.py @@ -24,10 +24,14 @@ author = 'Luke Sneeringer' # The short X.Y version -version = '0.0.4' +version = os.environ.get('CIRCLE_TAG', 'latest') # The full version, including alpha/beta/rc tags -release = '0.0.4' +release = os.environ.get('CIRCLE_TAG', 'latest') +# Replace |version| in the docs with the actual version string. +rst_epilog = """ +.. |version| replace:: {version} +""".format(version=version) # -- General configuration --------------------------------------------------- diff --git a/packages/gapic-generator/docs/getting-started.rst b/packages/gapic-generator/docs/getting-started.rst deleted file mode 100644 index 4d07ca3ac40d..000000000000 --- a/packages/gapic-generator/docs/getting-started.rst +++ /dev/null @@ -1,125 +0,0 @@ -Getting Started ---------------- - -To use this plugin, you will need an API which is specified using a -protocol buffer. Additionally, this plugin makes some assumptions at the -margins according to `Google API design conventions`_, so following those -conventions is recommended. - -Example -~~~~~~~ - -If you want to experiment with an already-existing API, one example is -available. (Reminder that this is still considered experimental, so apologies -for this part being a bit strange.) - -You need to clone the `googleapis`_ repository from GitHub, and change to -a special branch: - -.. code-block:: shell - - $ git clone git@github.com:googleapis/googleapis.git - $ cd googleapis - $ git checkout --track -b input-contract origin/input-contract - $ cd .. - -The API available as an example (thus far) is the `Google Cloud Vision`_ API, -available in the ``google/cloud/vision/v1/`` subdirectory. This will be used -for the remainder of the examples on this page. - -You will also need the common protos, currently in experimental status, -which define certain client-specific annotations. These are in the -`api-common-protos`_ repository. Clone this from GitHub also: - -.. code-block:: shell - - $ git clone git@github.com:googleapis/api-common-protos.git - $ cd api-common-protos - $ git checkout --track -b input-contract origin/input-contract - $ cd .. - -.. _googleapis: https://github.com/googleapis/googleapis/tree/input-contract -.. _api-common-protos: https://github.com/googleapis/api-common-protos/tree/input-contract -.. _Google Cloud Vision: https://cloud.google.com/vision/ - - -Compiling an API -~~~~~~~~~~~~~~~~ - -To get a client library, you need to both compile the proto descriptors -into compiled message types (which is functionality built into ``protoc``) -and also into a client library (which is what this plugin does). - -These can be done in the same step. ``protoc`` requires an output destination -for each plugin invoked; you just want these to match: - -.. code-block:: shell - - # This is assumed to be in the `googleapis` project root, and we also - # assume that api-common-protos is next to it. - $ protoc google/cloud/vision/v1/*.proto \ - --proto_path=../api-common-protos/ --proto_path=. \ - --python_out=/dest/ --python_gapic_out=/dest/ - -.. note:: - - **A reminder about paths.** - - Remember that ``protoc`` is particular about paths. It requires all paths - where it expects to find protos, and *order matters*. In this case, - the common protos must come first, and then the path to the API being built. - - -Running a Client Library -~~~~~~~~~~~~~~~~~~~~~~~~ - -Once you have compiled a client library, it is time for the fun part: -actually running it! - -Create a virtual environment for the library: - -.. code-block:: shell - - $ virtualenv ~/.local/client-lib --python=`which python3.7` - $ source ~/.local/client-lib/bin/activate - -Next, install the library: - -.. code-block:: shell - - $ cd /dest/ - $ pip install --editable . - -Now it is time to play with it! -Here is a test script: - -.. code-block:: python - - # This is the client library generated by this plugin. - from google.cloud import vision - - # Instantiate the client. - # - # If you need to manually specify credentials, do so here. - # More info: https://cloud.google.com/docs/authentication/getting-started - # - # If you wish, you can send `transport='grpc'` or `transport='http'` - # to change which underlying transport layer is being used. - ia = vision.ImageAnnotator() - - # Send the request to the server and get the response. - response = ia.batch_annotate_images({ - 'requests': [{ - 'features': [{ - 'type': vision.types.image_annotator.Feature.Type.LABEL_DETECTION - }], - 'image': {'source': { - 'image_uri': 'https://s3.amazonaws.com/cdn0.michiganbulb.com' - '/images/350/66623.jpg', - }}, - }], - }) - print(response) - - -.. _Google API design conventions: https://cloud.google.com/apis/design/ diff --git a/packages/gapic-generator/docs/getting-started/_example.rst b/packages/gapic-generator/docs/getting-started/_example.rst new file mode 100644 index 000000000000..1bed548dcc2c --- /dev/null +++ b/packages/gapic-generator/docs/getting-started/_example.rst @@ -0,0 +1,20 @@ +If you want to experiment with an already-existing API, one example is +available. (Reminder that this is still considered experimental, so apologies +for this part being a bit strange.) + +You need to clone the `googleapis`_ repository from GitHub, and change to +a special branch: + +.. code-block:: shell + + $ git clone git@github.com:googleapis/googleapis.git + $ cd googleapis + $ git checkout --track -b input-contract origin/input-contract + $ cd .. + +The API available as an example (thus far) is the `Google Cloud Vision`_ API, +available in the ``google/cloud/vision/v1/`` subdirectory. This will be used +for the remainder of the examples on this page. + +.. _googleapis: https://github.com/googleapis/googleapis/tree/input-contract +.. _Google Cloud Vision: https://cloud.google.com/vision/ diff --git a/packages/gapic-generator/docs/getting-started/_usage_intro.rst b/packages/gapic-generator/docs/getting-started/_usage_intro.rst new file mode 100644 index 000000000000..cad09dc6d3d3 --- /dev/null +++ b/packages/gapic-generator/docs/getting-started/_usage_intro.rst @@ -0,0 +1,6 @@ +To use this plugin, you will need an API which is specified using +protocol buffers. Additionally, this plugin makes some assumptions at the +margins according to `Google API design conventions`_, so following those +conventions is recommended. + +.. _Google API design conventions: https://cloud.google.com/apis/design/ diff --git a/packages/gapic-generator/docs/getting-started/_verifying.rst b/packages/gapic-generator/docs/getting-started/_verifying.rst new file mode 100644 index 000000000000..e25eafa15912 --- /dev/null +++ b/packages/gapic-generator/docs/getting-started/_verifying.rst @@ -0,0 +1,50 @@ +Verifying the Library +--------------------- + +Once you have compiled a client library, whether using a Docker image or +a local installation, it is time for the fun part: actually running it! + +Create a virtual environment for the library: + +.. code-block:: shell + + $ virtualenv ~/.local/client-lib --python=`which python3.7` + $ source ~/.local/client-lib/bin/activate + +Next, install the library: + +.. code-block:: shell + + $ cd /dest/ + $ pip install --editable . + +Now it is time to play with it! +Here is a test script: + +.. code-block:: python + + # This is the client library generated by this plugin. + from google.cloud import vision + + # Instantiate the client. + # + # If you need to manually specify credentials, do so here. + # More info: https://cloud.google.com/docs/authentication/getting-started + # + # If you wish, you can send `transport='grpc'` or `transport='http'` + # to change which underlying transport layer is being used. + ia = vision.ImageAnnotator() + + # Send the request to the server and get the response. + response = ia.batch_annotate_images({ + 'requests': [{ + 'features': [{ + 'type': vision.types.image_annotator.Feature.Type.LABEL_DETECTION, + }], + 'image': {'source': { + 'image_uri': 'https://s3.amazonaws.com/cdn0.michiganbulb.com' + '/images/350/66623.jpg', + }}, + }], + }) + print(response) diff --git a/packages/gapic-generator/docs/getting-started/docker-shortcut.rst b/packages/gapic-generator/docs/getting-started/docker-shortcut.rst new file mode 100644 index 000000000000..88f3ef80967c --- /dev/null +++ b/packages/gapic-generator/docs/getting-started/docker-shortcut.rst @@ -0,0 +1,28 @@ +:orphan: + +.. _docker-shortcut: + +Docker Shortcut Script +---------------------- + +Because code generation requires two mounts from the host machine into +the Docker image, and because the paths are somewhat pedantic, you may +find this shortcut script to be handy: + +.. literalinclude:: ../../gapic.sh + :language: shell + +Place it somewhere on your system, marked executable. + +Once available, it can be invoked using: + +.. code-block:: shell + + # This is assumed to be from the "proto root" directory. + $ gapic.sh --image gcr.io/gapic-images/gapic-generator-python \ + --in path/to/src/protos/ + --out dest/ + + +It will work not only with the Python code generator, but all of our code +generators that implement this Docker interface. diff --git a/packages/gapic-generator/docs/getting-started/docker.rst b/packages/gapic-generator/docs/getting-started/docker.rst new file mode 100644 index 000000000000..9e1dd355923b --- /dev/null +++ b/packages/gapic-generator/docs/getting-started/docker.rst @@ -0,0 +1,116 @@ +.. _getting-started/docker: + +Docker Image +============ + +If you are just getting started with code generation for protobuf-based APIs, +or if you do not have a robust Python environment already available, we +recommend using our `Docker`_ image to build client libraries. + +However, this tool offers first-class support for local execution using +protoc: :ref:`getting-started/local`. It is still reasonably easy, but +initial setup will take a bit longer. + +.. note:: + + If you are interested in contributing, using a local installation + is recommended. + +.. _Docker: https://docker.com/ + + +Installing +---------- + +Docker +~~~~~~ + +In order to use a Docker image, you must have `Docker`_ installed. +Docker is a container management service, and is available on Linux, Mac, +and Windows (although most of these instructions will be biased toward +Linux and Mac). + +Install Docker according to their `installation instructions`_. + +.. note:: + + This image requires Docker 17.05 or later. + +.. _installation instructions: https://docs.docker.com/install/ + +Pull the Docker Image +~~~~~~~~~~~~~~~~~~~~~ + +Once Docker is installed, simply pull the Docker image for this tool: + +.. parsed-literal:: + + $ docker pull gcr.io/gapic-images/gapic-generator-python:\ |version|\ + + +Usage +----- + +.. include:: _usage_intro.rst + +Example +~~~~~~~ + +.. include:: _example.rst + + +Compiling an API +~~~~~~~~~~~~~~~~ + +.. note:: + + If you are running code generation repeatedly, executing the + long ``docker run`` command may be cumbersome. While you should ensure + you understand this section, a :ref:`shortcut script` + is available to make iterative work easier. + +Compile the API into a client library by invoking the Docker image. + +It is worth noting that the image must interact with the host machine +(your local machine) for two things: reading in the protos you wish to compile, +and writing the output. This means that when you run the image, two mount +points are required in order for anything useful to happen. + +In particular, the input protos are expected to be mounted into ``/in/``, +and the desired output location is expected to be mounted into ``/out/``. +The output directory must also be writable. + +.. note:: + + The ``/in/`` and ``/out/`` directories inside the image are + hard-coded; they can not be altered where they appear in the command + below. + +Perform that step with ``docker run``: + +.. code-block:: shell + + # This is assumed to be run from the `googleapis` project root. + $ docker run \ + --mount type=bind,source=google/cloud/vision/v1/,destination=/in/google/cloud/vision/v1/,readonly \ + --mount type=bind,source=dest/,destination=/out/ \ + --rm \ + --user $UID \ + gcr.io/gapic-images/gapic-generator-python + +.. warning:: + + ``protoc`` is *very* picky about paths, and the exact construction here + matters a lot. The source is ``google/cloud/vision/v1/``, and then + the destination is that full directory path after the ``/in/`` root; + therefore: ``/in/google/cloud/vision/v1/``. + + This matters because of how proto imports are resolved. The ``import`` + statement imports a *file*, relative to a base directory or set of + base directories, called the ``proto_path``. This is assumed + (and hard-coded) to ``/in/`` in the Docker image, and so any directory + structure present in the imports of the proto files must be preserved + beneath this for compilation to succeed. + + +.. include:: _verifying.rst diff --git a/packages/gapic-generator/docs/getting-started/index.rst b/packages/gapic-generator/docs/getting-started/index.rst new file mode 100644 index 000000000000..f3d5e3ff6462 --- /dev/null +++ b/packages/gapic-generator/docs/getting-started/index.rst @@ -0,0 +1,26 @@ +Getting Started +--------------- + +This code generator is implemented as a plugin to ``protoc``, the compiler +for `protocol buffers`_, and will run in any environment that Python 3.6+ and +protocol buffers do. + +Because dependency management and such can be a significant undertaking, we +offer a Docker image and interface which requires you only to have Docker +installed and provide the protos for your API. + +It is also possible to install the tool locally and run it through ``protoc``, +and this approach is fully supported. + +.. note:: + + The Docker approach is recommended for users new to this ecosystem, or + those which do not have a robust Python environment available. + +.. _protocol buffers: https://developers.google.com/protocol-buffers/ + +.. toctree:: + :maxdepth: 4 + + docker + local diff --git a/packages/gapic-generator/docs/installing.rst b/packages/gapic-generator/docs/getting-started/local.rst similarity index 61% rename from packages/gapic-generator/docs/installing.rst rename to packages/gapic-generator/docs/getting-started/local.rst index 2496ae77dcdb..61107150ba33 100644 --- a/packages/gapic-generator/docs/installing.rst +++ b/packages/gapic-generator/docs/getting-started/local.rst @@ -1,3 +1,22 @@ +.. _getting-started/local: + +Local Installation +================== + +If you are just getting started with code generation for protobuf-based APIs, +or if you do not have a robust Python environment already available, it is +probably easier to get started using Docker: :ref:`getting-started/docker` + +However, this tool offers first-class support for local execution using +``protoc``. It is still reasonably easy, but initial setup will take a bit +longer. + +.. note:: + + If you are interested in contributing, setup according to these steps + is recommended. + + Installing ---------- @@ -87,3 +106,51 @@ To ensure the tool is installed properly: .. _pyenv: https://github.com/pyenv/pyenv .. _pipsi: https://github.com/mitsuhiko/pipsi + +Usage +----- + +.. include:: _usage_intro.rst + +Example +~~~~~~~ + +.. include:: _example.rst + +You will also need the common protos, currently in experimental status, +which define certain client-specific annotations. These are in the +`api-common-protos`_ repository. Clone this from GitHub also: + +.. code-block:: shell + + $ git clone git@github.com:googleapis/api-common-protos.git + $ cd api-common-protos + $ git checkout --track -b input-contract origin/input-contract + $ cd .. + +.. _api-common-protos: https://github.com/googleapis/api-common-protos/tree/input-contract + + +Compiling an API +~~~~~~~~~~~~~~~~ + +Compile the API into a client library by invoking ``protoc`` directly. +This plugin is invoked under the hood via. the ``--python_gapic_out`` switch. + +.. code-block:: shell + + # This is assumed to be in the `googleapis` project root, and we also + # assume that api-common-protos is next to it. + $ protoc google/cloud/vision/v1/*.proto \ + --proto_path=../api-common-protos/ --proto_path=. \ + --python_gapic_out=/dest/ + +.. note:: + + **A reminder about paths.** + + Remember that ``protoc`` is particular about paths. It requires all paths + where it expects to find protos, and *order matters*. In this case, + the common protos must come first, and then the path to the API being built. + +.. include:: _verifying.rst diff --git a/packages/gapic-generator/docs/index.rst b/packages/gapic-generator/docs/index.rst index 33b4c12d0464..9f05fc87f7d2 100644 --- a/packages/gapic-generator/docs/index.rst +++ b/packages/gapic-generator/docs/index.rst @@ -18,8 +18,7 @@ implemented as a plugin to ``protoc``, the protocol buffer compiler. .. toctree:: :maxdepth: 2 - installing - getting-started + getting-started/index api-configuration process templates diff --git a/packages/gapic-generator/gapic.sh b/packages/gapic-generator/gapic.sh new file mode 100755 index 000000000000..6102f05f6b5c --- /dev/null +++ b/packages/gapic-generator/gapic.sh @@ -0,0 +1,101 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +CMD="$0" + +# Set variables used by this script. +# All of these are set in options below, and all but $PATH are required. +IMAGE= +IN= +OUT= +PROTO_PATH=`pwd` + +# Print help and exit. +function show_help { + echo "Usage: $CMD --image IMAGE --in IN_DIR --out OUT_DIR [--path PATH_DIR]" + echo "" + echo "Required arguments:" + echo " --image The Docker image to use. The script will attempt to pull" + echo " it if it is not present." + echo " -i, --in A directory containing the protos describing the API" + echo " to be generated." + echo " -o, --out Destination directory for the completed client library." + echo "" + echo "Optional arguments:" + echo " -p, --path The base import path for the protos. Assumed to be the" + echo " current working directory if unspecified." + echo " -h, --help This help information." + exit 0 +} + +# Parse out options. +while true; do + case "$1" in + -h | --help ) show_help ;; + --image ) IMAGE="$2"; shift 2 ;; + -i | --in ) IN="$2"; shift 2 ;; + -o | --out ) OUT="$2"; shift 2 ;; + -p | --path ) PROTO_PATH=$2; shift 2 ;; + -- ) shift; break; ;; + * ) break ;; + esac +done + +# Ensure that all required options are set. +if [ -z "$IMAGE" ] || [ -z "$IN" ] || [ -z "$OUT" ]; then + >&2 echo "Required argument missing." + >&2 echo "The --image, --in, and --out arguments are all required." + >&2 echo "Run $CMD --help for more information." + exit 64 +fi + +# Ensure that the input directory exists (and is a directory). +if ! [ -d $IN ]; then + >&2 echo "Directory does not exist: $IN" + exit 2 +fi + +# Ensure Docker is running and seems healthy. +# This is mostly a check to bubble useful errors quickly. +if ! docker ps > /dev/null; then + exit $? +fi + +# If the output directory does not exist, create it. +if ! mkdir -p $OUT ; then + exit $? +fi + +# If the output directory is not empty, warn (but continue). +if [ "$(ls -A $OUT )"]; then + >&2 echo "Warning: Output directory is not empty." +fi + +# If the image is not yet on the machine, pull it. +if ! docker images $IMAGE > /dev/null; then + echo "Image $IMAGE not found; pulling." + if ! docker pull $IMAGE; then + exit $? + fi +fi + +# Generate the client library. +docker run \ + --mount type=bind,source=${PROTO_PATH}/${IN},destination=/in/${IN},readonly \ + --mount type=bind,source=$OUT,destination=/out \ + --rm \ + --user $UID \ + $IMAGE +exit $? From b8ccc87a2bcb293e37676004b3f0ccc131602355 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 7 Dec 2018 14:44:03 -0800 Subject: [PATCH 0053/1339] [chore] Automated publication to PyPI, GCR (#65) This wires up automated publication when release tags are added, to both PyPI and a release of the Docker image to gcr.io. --- .../gapic-generator/.circleci/.pypirc.enc | 2 + packages/gapic-generator/.circleci/config.yml | 93 ++++++++++++++++++- packages/gapic-generator/README.rst | 2 +- packages/gapic-generator/setup.py | 4 +- 4 files changed, 95 insertions(+), 6 deletions(-) create mode 100644 packages/gapic-generator/.circleci/.pypirc.enc diff --git a/packages/gapic-generator/.circleci/.pypirc.enc b/packages/gapic-generator/.circleci/.pypirc.enc new file mode 100644 index 000000000000..880107333821 --- /dev/null +++ b/packages/gapic-generator/.circleci/.pypirc.enc @@ -0,0 +1,2 @@ +Salted__ykÌ^Å7l#på×¾ä¼ßPn—.¢6c.æ»ÙÁ;ÅÀ­1 cý1ßàdýÊ`âÐöÄ_ \ñå Ëô¡Ÿ•R +–jáÖÝú…Qõ85t°%»¥ËH e \ No newline at end of file diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 756620446287..13a157ed75f0 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -4,13 +4,47 @@ workflows: version: 2 tests: jobs: - - unit-3.6 - - unit-3.7 + - unit-3.6: + filters: + tags: + only: /^\d+\.\d+\.\d+$/ + - unit-3.7: + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - showcase: requires: - unit-3.6 - unit-3.7 - - docs + filters: + tags: + only: /^\d+\.\d+\.\d+$/ + - docs: + filters: + tags: + only: /^\d+\.\d+\.\d+$/ + - publish_package: + requires: + - unit-3.6 + - unit-3.7 + - showcase + - docs + filters: + branches: + ignore: /.*/ + tags: + only: /^\d+\.\d+\.\d+$/ + - publish_image: + requires: + - unit-3.6 + - unit-3.7 + - showcase + - docs + filters: + branches: + ignore: /.*/ + tags: + only: /^\d+\.\d+\.\d+$/ jobs: unit-3.6: docker: @@ -103,3 +137,56 @@ jobs: - run: name: Build the documentation. command: nox -s docs + publish_package: + docker: + - image: python:3.7-slim + steps: + - checkout + - run: + name: Decrypt the PyPI key. + command: | + openssl aes-256-cbc -d \ + -in .circleci/.pypirc.enc \ + -out ~/.pypirc \ + -k "${PYPIRC_ENCRYPTION_KEY}" + - run: + name: Publish to PyPI. + command: python setup.py sdist upload + publish_image: + docker: + - image: docker + steps: + - checkout + - setup_remote_docker + - run: + name: Build Docker image. + command: docker build . -t gcr.io/gapic-images/gapic-generator-python:latest + - run: + name: Download curl + command: apk add --no-cache curl + - run: + name: Download the GCR credential helper. + command: | + curl -fsSL https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v1.5.0/docker-credential-gcr_linux_amd64-1.5.0.tar.gz \ + | tar xz --to-stdout ./docker-credential-gcr \ + > /usr/bin/docker-credential-gcr && chmod a+x /usr/bin/docker-credential-gcr + - run: + name: Set up authentication to Google Container Registry. + command: | + echo ${GCLOUD_SERVICE_KEY} > ${GOOGLE_APPLICATION_CREDENTIALS} + docker-credential-gcr configure-docker + - run: + name: Tag the Docker image and push it to Google Container Registry. + command: | + if [ -n "$CIRCLE_TAG" ]; then + export MAJOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $1; }'` + export MINOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $2; }'` + export PATCH=`echo $CIRCLE_TAG | awk -F '.' '{ print $3; }'` + docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH + docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR + docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR + docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH + docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR + docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR + fi + docker push gcr.io/gapic-images/gapic-generator-python:latest diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index 38aa2a61e4fe..00db5e5917dd 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -34,7 +34,7 @@ Disclaimer This is not an official Google product. -.. |release level| image:: https://img.shields.io/badge/release%20level-pre%20alpha-red.svg?style=flat +.. |release level| image:: https://img.shields.io/badge/release%20level-alpha-orange.svg?style=flat :target: https://cloud.google.com/terms/launch-stages .. |docs| image:: https://readthedocs.org/projects/gapic-generator-python/badge/?version=latest :target: https://gapic-generator-python.readthedocs.io/ diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index be98ef389652..0ac428d12669 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.0.5', + version='0.1.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', @@ -51,7 +51,7 @@ ':python_version<"3.7"': ('dataclasses >= 0.4',), }, classifiers=( - 'Development Status :: 2 - Pre-Alpha', + 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', From 6ac27f21e5e11ad0493634357699468a5db3e8d7 Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Mon, 17 Dec 2018 09:17:07 -0800 Subject: [PATCH 0054/1339] [feat] Add generic options flag to gapic script (#66) --- packages/gapic-generator/Dockerfile | 1 + packages/gapic-generator/gapic.sh | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile index eada6a905033..5af539d320d1 100644 --- a/packages/gapic-generator/Dockerfile +++ b/packages/gapic-generator/Dockerfile @@ -19,4 +19,5 @@ RUN pip install /usr/src/gapic-generator-python # Define the generator as an entry point. ENTRYPOINT protoc --proto_path=/protos/ --proto_path=/in/ \ --python_gapic_out=/out/ \ + --python_gapic_opt=$PLUGIN_OPTIONS \ `find /in/ -name *.proto` diff --git a/packages/gapic-generator/gapic.sh b/packages/gapic-generator/gapic.sh index 6102f05f6b5c..8ab14191d633 100755 --- a/packages/gapic-generator/gapic.sh +++ b/packages/gapic-generator/gapic.sh @@ -20,11 +20,12 @@ CMD="$0" IMAGE= IN= OUT= +PLUGIN_OPTIONS= PROTO_PATH=`pwd` # Print help and exit. function show_help { - echo "Usage: $CMD --image IMAGE --in IN_DIR --out OUT_DIR [--path PATH_DIR]" + echo "Usage: $CMD --image IMAGE --in IN_DIR --out OUT_DIR [--options PLUGIN_OPTIONS --path PATH_DIR]" echo "" echo "Required arguments:" echo " --image The Docker image to use. The script will attempt to pull" @@ -34,9 +35,10 @@ function show_help { echo " -o, --out Destination directory for the completed client library." echo "" echo "Optional arguments:" - echo " -p, --path The base import path for the protos. Assumed to be the" - echo " current working directory if unspecified." - echo " -h, --help This help information." + echo " --options Options to be passed to the generator plugin" + echo " -p, --path The base import path for the protos. Assumed to be the" + echo " current working directory if unspecified." + echo " -h, --help This help information." exit 0 } @@ -47,6 +49,7 @@ while true; do --image ) IMAGE="$2"; shift 2 ;; -i | --in ) IN="$2"; shift 2 ;; -o | --out ) OUT="$2"; shift 2 ;; + --options ) PLUGIN_OPTIONS="$2"; shift 2 ;; -p | --path ) PROTO_PATH=$2; shift 2 ;; -- ) shift; break; ;; * ) break ;; @@ -97,5 +100,6 @@ docker run \ --mount type=bind,source=$OUT,destination=/out \ --rm \ --user $UID \ + --env "PLUGIN_OPTIONS=$PLUGIN_OPTIONS" \ $IMAGE exit $? From e7ff7e71794ad1851b80c41b1df5c7ae10a673e5 Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Mon, 17 Dec 2018 09:25:17 -0800 Subject: [PATCH 0055/1339] [refactor] Remove docker pull from gapic.sh (#67) The `if` check did not work, but the functionality is unnecessary anyway (`docker run` will pull the image if it is not present). --- packages/gapic-generator/gapic.sh | 8 -------- 1 file changed, 8 deletions(-) diff --git a/packages/gapic-generator/gapic.sh b/packages/gapic-generator/gapic.sh index 8ab14191d633..09d785dc5724 100755 --- a/packages/gapic-generator/gapic.sh +++ b/packages/gapic-generator/gapic.sh @@ -86,14 +86,6 @@ if [ "$(ls -A $OUT )"]; then >&2 echo "Warning: Output directory is not empty." fi -# If the image is not yet on the machine, pull it. -if ! docker images $IMAGE > /dev/null; then - echo "Image $IMAGE not found; pulling." - if ! docker pull $IMAGE; then - exit $? - fi -fi - # Generate the client library. docker run \ --mount type=bind,source=${PROTO_PATH}/${IN},destination=/in/${IN},readonly \ From f9219985972851517989c17dd122b1ef64100bf5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 18 Dec 2018 13:55:31 -0800 Subject: [PATCH 0056/1339] [refactor] Hostname (#68) [refactor] Hostname This commit makes it such that the client and transport classes accept a `host` argument, which makes it much easier to override the hostname. It also makes it such that if no default host is specified, the `host` argument becomes required (which should be useful in sharded API situations). --- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../services/$service/client.py.j2 | 7 +++-- .../services/$service/transports/base.py.j2 | 10 +++++-- .../services/$service/transports/grpc.py.j2 | 30 +++++++++++++++---- .../services/$service/transports/http.py.j2 | 13 ++++---- .../gapic-generator/gapic/utils/__init__.py | 2 -- .../gapic/utils/placeholder.py | 23 -------------- .../gapic-generator/tests/system/conftest.py | 6 ++-- .../unit/schema/wrappers/test_service.py | 3 +- .../tests/unit/utils/test_placeholder.py | 23 -------------- 10 files changed, 49 insertions(+), 70 deletions(-) delete mode 100644 packages/gapic-generator/gapic/utils/placeholder.py delete mode 100644 packages/gapic-generator/tests/unit/utils/test_placeholder.py diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 1f0c9d56352d..eb09a10f9f06 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -553,7 +553,7 @@ def host(self) -> str: """ if self.options.Extensions[annotations_pb2.default_host]: return self.options.Extensions[annotations_pb2.default_host] - return utils.Placeholder('<<< SERVICE ADDRESS >>>') + return None @property def oauth_scopes(self) -> Sequence[str]: diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 index 0a5bcef75043..d99f31c93a0c 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 @@ -19,18 +19,21 @@ from .transports import {{ service.name }}Transport class {{ service.name }}: """{{ service.meta.doc|rst(width=72, indent=4) }}""" def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = None, ) -> None: """Instantiate the {{ (service.name|snake_case).replace('_', ' ') }}. Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{- ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credential]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.{{ service.name }}Transport): The + transport (Union[str, ~.{{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. """ @@ -44,7 +47,7 @@ class {{ service.name }}: self._transport = transport else: Transport = get_transport_class(transport) - self._transport = Transport(credentials=credentials) + self._transport = Transport(credentials=credentials, host=host) {% for method in service.methods.values() -%} {%- if method.signatures.single_dispatch -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 index 71cd91c7505f..74542794967d 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 @@ -17,8 +17,6 @@ from google.auth import credentials class {{ service.name }}Transport(metaclass=abc.ABCMeta): """Abstract transport class for {{ service.name }}.""" - SERVICE_ADDRESS: str = '{{ service.host }}' - AUTH_SCOPES: typing.Tuple[str] = ( {%- for scope in service.oauth_scopes %} '{{ scope }}', @@ -27,17 +25,25 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): def __init__( self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, ) -> None: """Instantiate the transport. Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{- ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + # If no credentials are provided, then determine the appropriate # defaults. if credentials is None: diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 index 6e0335fd2b1e..3261dbda18c4 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 @@ -29,19 +29,37 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ - def __init__(self, *, credentials: credentials.Credentials = None) -> None: + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + channel: grpc.Channel = None) -> None: """Instantiate the transport. Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{- ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. + This argument is ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. """ - super().__init__(credentials=credentials) + # Sanity check: Ensure that channel and credentials are not both + # provided. + if channel: + credentials = False + + # Run the base constructor. + super().__init__(host=host, credentials=credentials) self._stubs = {} + # If a channel was explicitly provided, set it. + if channel: + self._grpc_channel = channel + @property def grpc_channel(self) -> grpc.Channel: """Create the channel designed to connect to this service. @@ -51,15 +69,15 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): """ # Sanity check: Only create a new channel if we do not already # have one. - if 'grpc_channel' not in self.__dict__: - self.__dict__['grpc_channel'] = grpc_helpers.create_channel( - '{0}:443'.format(self.SERVICE_ADDRESS), + if not hasattr(self, '_grpc_channel'): + self._grpc_channel = grpc_helpers.create_channel( + self._host, credentials=self._credentials, scopes=self.AUTH_SCOPES, ) # Return the channel from cache. - return self.__dict__['grpc_channel'] + return self._grpc_channel {%- if service.has_lro %} @property diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 index c894d928cc22..a9ec4512216f 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 @@ -26,18 +26,21 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): It sends protocol buffers over the wire, but uses HTTP/1.1. """ def __init__(self, *, - credentials: credentials.Credentials = None, - ) -> None: + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + ) -> None: """Instantiate the transport. Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{- ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. """ - super().__init__(credentials=credentials) + super().__init__(host=host, credentials=credentials) self._session = AuthorizedSession(self._credentials) {%- if service.has_lro %} @@ -53,7 +56,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): from google.api_core import grpc_helpers self.__dict__['operations_client'] = operations_v1.OperationsClient( grpc_helpers.create_channel( - '{0}:443'.format(self.SERVICE_ADDRESS), + self._host, credentials=self._credentials, scopes=self.AUTH_SCOPES, ) @@ -90,7 +93,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): # Send the request. response = self._session.post( 'https://{host}/$rpc/{package}.{service}/{method}'.format( - host=self.SERVICE_ADDRESS, + host=self._host, method='{{ method.name }}', package='{{ '.'.join(method.meta.address.package) }}', service='{{ service.name }}', diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index cd575a1f78f3..38610c0ab838 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -17,13 +17,11 @@ from gapic.utils.filename import to_valid_filename from gapic.utils.filename import to_valid_module_name from gapic.utils.lines import wrap -from gapic.utils.placeholder import Placeholder from gapic.utils.rst import rst __all__ = ( 'cached_property', - 'Placeholder', 'rst', 'to_snake_case', 'to_valid_filename', diff --git a/packages/gapic-generator/gapic/utils/placeholder.py b/packages/gapic-generator/gapic/utils/placeholder.py deleted file mode 100644 index eb7edad94aa3..000000000000 --- a/packages/gapic-generator/gapic/utils/placeholder.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -class Placeholder(str): - """A string which always has a boolean value of False. - - Used for sending placeholder strings to templates, such that the string - is meaningful if used, but an if check will resolve to False. - """ - def __bool__(self): - return False diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 65edc953c71f..5fd0fdc831fa 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -14,7 +14,6 @@ import pytest -from google.auth.credentials import AnonymousCredentials from google.showcase import Echo from google.showcase_v1alpha2.services.echo.transports.grpc import ( EchoGrpcTransport, @@ -25,8 +24,7 @@ @pytest.fixture def echo(): - transport = EchoGrpcTransport(credentials=AnonymousCredentials()) - transport.__dict__['grpc_channel'] = grpc.insecure_channel( - transport.SERVICE_ADDRESS, + transport = EchoGrpcTransport( + channel=grpc.insecure_channel('localhost:7469'), ) return Echo(transport=transport) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 81f6dc065aa4..158ef4b6a15f 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -36,8 +36,7 @@ def test_service_host(): def test_service_no_host(): service = make_service() - assert service.host == '<<< SERVICE ADDRESS >>>' - assert bool(service.host) is False + assert service.host is None def test_service_scopes(): diff --git a/packages/gapic-generator/tests/unit/utils/test_placeholder.py b/packages/gapic-generator/tests/unit/utils/test_placeholder.py deleted file mode 100644 index 8463446a6140..000000000000 --- a/packages/gapic-generator/tests/unit/utils/test_placeholder.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from gapic.utils import placeholder - - -def test_placeholder_str_eq(): - assert placeholder.Placeholder('foo') == 'foo' - - -def test_placeholder_falsity(): - assert not placeholder.Placeholder('foo') From 2e0c3f3c4658545d4f4e399ac3b0274bb86a4d63 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 18 Dec 2018 14:42:11 -0800 Subject: [PATCH 0057/1339] [bugfix] Fix dotted fields in method signatures. (#70) --- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../tests/unit/schema/wrappers/test_method.py | 31 ++++++++++++++++++- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index eb09a10f9f06..3da9d6cbee41 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -454,7 +454,7 @@ def signatures(self) -> Tuple[signature_pb2.MethodSignature]: answer.append(MethodSignature( name=sig.function_name if sig.function_name else self.name, fields=collections.OrderedDict([ - (f.split('.')[-1], self.input.get_field(f)) + (f.split('.')[-1], self.input.get_field(*f.split('.'))) for f in sig.fields ]), )) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 3df431d7b8d3..7d8f983c5c64 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -57,6 +57,32 @@ def test_method_signature(): assert tuple(signature.fields.keys()) == ('int_field', 'float_field') +def test_method_signature_nested(): + # Set up a meaningful input message. + inner_msg = make_message(name='Inner', fields=( + make_field('int_field', type=5), + make_field('bool_field', type=8), + make_field('float_field', type=2), + )) + outer_msg = make_message(name='Outer', fields=( + make_field('inner', type=9, message=inner_msg), + )) + + # Create the method. + method = make_method('SendStuff', input_message=outer_msg) + + # Edit the underlying method pb2 post-hoc to add the appropriate annotation + # (google.api.signature). + method.options.Extensions[annotations_pb2.method_signature].MergeFrom( + signature_pb2.MethodSignature(fields=['inner.int_field']) + ) + + # We should get back just those two fields as part of the signature. + assert len(method.signatures) == 1 + signature = method.signatures[0] + assert tuple(signature.fields.keys()) == ('int_field',) + + def test_method_no_signature(): assert len(make_method('Ping').signatures) == 0 @@ -149,7 +175,9 @@ def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', def make_field(name: str, repeated: bool = False, - meta: metadata.Metadata = None, **kwargs) -> wrappers.Method: + meta: metadata.Metadata = None, + message: wrappers.MessageType = None, + **kwargs) -> wrappers.Method: field_pb = descriptor_pb2.FieldDescriptorProto( name=name, label=3 if repeated else 1, @@ -157,5 +185,6 @@ def make_field(name: str, repeated: bool = False, ) return wrappers.Field( field_pb=field_pb, + message=message, meta=meta or metadata.Metadata(), ) From b53d3d44523d39a27cb609afff99182a7d4343df Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Tue, 18 Dec 2018 21:07:51 -0800 Subject: [PATCH 0058/1339] change shortcut script option parsin (#73) --- packages/gapic-generator/gapic.sh | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic.sh b/packages/gapic-generator/gapic.sh index 09d785dc5724..bf207a943106 100755 --- a/packages/gapic-generator/gapic.sh +++ b/packages/gapic-generator/gapic.sh @@ -25,7 +25,7 @@ PROTO_PATH=`pwd` # Print help and exit. function show_help { - echo "Usage: $CMD --image IMAGE --in IN_DIR --out OUT_DIR [--options PLUGIN_OPTIONS --path PATH_DIR]" + echo "Usage: $CMD --image IMAGE --in IN_DIR --out OUT_DIR [--path PATH_DIR]" echo "" echo "Required arguments:" echo " --image The Docker image to use. The script will attempt to pull" @@ -35,7 +35,6 @@ function show_help { echo " -o, --out Destination directory for the completed client library." echo "" echo "Optional arguments:" - echo " --options Options to be passed to the generator plugin" echo " -p, --path The base import path for the protos. Assumed to be the" echo " current working directory if unspecified." echo " -h, --help This help information." @@ -49,8 +48,8 @@ while true; do --image ) IMAGE="$2"; shift 2 ;; -i | --in ) IN="$2"; shift 2 ;; -o | --out ) OUT="$2"; shift 2 ;; - --options ) PLUGIN_OPTIONS="$2"; shift 2 ;; -p | --path ) PROTO_PATH=$2; shift 2 ;; + --* ) PLUGIN_OPTIONS="$PLUGIN_OPTIONS $1 $2"; shift 2 ;; -- ) shift; break; ;; * ) break ;; esac @@ -92,6 +91,6 @@ docker run \ --mount type=bind,source=$OUT,destination=/out \ --rm \ --user $UID \ - --env "PLUGIN_OPTIONS=$PLUGIN_OPTIONS" \ - $IMAGE + $IMAGE \ + $PLUGIN_OPTIONS exit $? From b51f145048867c68be42c1032549633d591b846b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 18 Dec 2018 21:11:04 -0800 Subject: [PATCH 0059/1339] [refactor] Add guard clause for LRO type annotations. (#71) --- packages/gapic-generator/gapic/schema/api.py | 6 +++ .../tests/unit/schema/test_api.py | 41 +++++++++++++++++++ 2 files changed, 47 insertions(+) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 1f0152a3eab3..174cf7ec8444 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -499,6 +499,12 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], # a specialized object in its place. output_type = self.all_messages[meth_pb.output_type.lstrip('.')] if meth_pb.output_type.endswith('google.longrunning.Operation'): + if not lro.response_type or not lro.metadata_type: + raise TypeError( + f'rpc {meth_pb.name} returns a google.longrunning.' + 'Operation, but is missing a response type or ' + 'metadata type.', + ) output_type = self._get_operation_type( response_type=self.all_messages[ address.resolve(lro.response_type) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 7a4b0453e742..a0be5b7fe399 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -15,6 +15,8 @@ from typing import Sequence from unittest import mock +import pytest + from google.api import annotations_pb2 from google.api import longrunning_pb2 from google.protobuf import descriptor_pb2 @@ -483,6 +485,45 @@ def test_lro(): assert len(lro_proto.messages) == 1 +def test_lro_missing_annotation(): + # Set up a prior proto that mimics google/protobuf/empty.proto + lro_proto = api.Proto.build(make_file_pb2( + name='operations.proto', package='google.longrunning', + messages=(make_message_pb2(name='Operation'),), + ), file_to_generate=False, naming=make_naming()) + + # Set up a method with an LRO but no annotation. + method_pb2 = descriptor_pb2.MethodDescriptorProto( + name='AsyncDoThing', + input_type='google.example.v3.AsyncDoThingRequest', + output_type='google.longrunning.Operation', + ) + + # Set up the service with an RPC. + service_pb = descriptor_pb2.ServiceDescriptorProto( + name='LongRunningService', + method=(method_pb2,), + ) + + # Set up the messages, including the annotated ones. + messages = ( + make_message_pb2(name='AsyncDoThingRequest', fields=()), + ) + + # Finally, set up the file that encompasses these. + fdp = make_file_pb2( + package='google.example.v3', + messages=messages, + services=(service_pb,), + ) + + # Make the proto object. + with pytest.raises(TypeError): + proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ + 'google/longrunning/operations.proto': lro_proto, + }, naming=make_naming()) + + def test_enums(): L = descriptor_pb2.SourceCodeInfo.Location enum_pb = descriptor_pb2.EnumDescriptorProto(name='Silly', value=( From 799dc80e0a2ebe17e6638067a1344a1463a7700b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 19 Dec 2018 09:24:37 -0800 Subject: [PATCH 0060/1339] [chore] Run CI Showcase in its own image. (#72) --- packages/gapic-generator/.circleci/config.yml | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 13a157ed75f0..41e1ee09096f 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -93,6 +93,7 @@ jobs: showcase: docker: - image: python:3.7-slim + - image: gcr.io/gapic-showcase/gapic-showcase:0.0.7 steps: - checkout - run: @@ -111,18 +112,6 @@ jobs: cd /usr/src/protoc/ unzip protoc-3.6.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Download and extract Showcase. - command: | - curl --location https://github.com/googleapis/gapic-showcase/releases/download/v0.0.7/gapic-showcase-0.0.7-linux-amd64.tar.gz --output /tmp/gapic-showcase.tar.gz - cd /tmp - tar xvfz /tmp/gapic-showcase.tar.gz - mv /tmp/gapic-showcase /usr/local/bin/showcase - chmod a+x /usr/local/bin/showcase - - run: - name: Run Showcase. - command: /usr/local/bin/showcase start - background: true - run: name: Run showcase tests. command: nox -s showcase From 14bba13909054e684a0ccb3fe73009acfe465675 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Dec 2018 18:06:58 -0800 Subject: [PATCH 0061/1339] [dep] Pin proto-plus to >= 0.1.0 (#74) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index f69d28f255f6..15fbac5e0c5e 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -22,7 +22,7 @@ setuptools.setup( 'google-api-core >= 1.3.0, < 2.0.0dev', 'googleapis-common-protos >= 1.6.0b6', 'grpcio >= 1.10.0', - 'proto-plus >= 0.1.0a2', + 'proto-plus >= 0.1.0', ), classifiers=[ 'Development Status :: 3 - Alpha', From 1e89d4fc1da9dae4dd3f8f800b11ac1d57f115d4 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Dec 2018 10:29:12 -0800 Subject: [PATCH 0062/1339] [fix] Allow comments to end in close-quote. (#78) This commit fixes an error where a short comment that ended with a double-quote character would cause a parse error when placed inside a docstring. --- packages/gapic-generator/gapic/utils/rst.py | 6 ++++++ packages/gapic-generator/tests/unit/utils/test_rst.py | 7 +++++++ 2 files changed, 13 insertions(+) diff --git a/packages/gapic-generator/gapic/utils/rst.py b/packages/gapic-generator/gapic/utils/rst.py index 3ce965092b6d..68026cbf107e 100644 --- a/packages/gapic-generator/gapic/utils/rst.py +++ b/packages/gapic-generator/gapic/utils/rst.py @@ -52,5 +52,11 @@ def rst(text, width=72, indent=0, source_format='commonmark'): if '\n' in answer: answer += '\n' + ' ' * indent + # If the text ends in a double-quote, append a period. + # This ensures that we do not get a parse error when this output is + # followed by triple-quotes. + if answer.endswith('"'): + answer += '.' + # Done; return the answer. return answer diff --git a/packages/gapic-generator/tests/unit/utils/test_rst.py b/packages/gapic-generator/tests/unit/utils/test_rst.py index e7116d5e0f7c..876768293fa1 100644 --- a/packages/gapic-generator/tests/unit/utils/test_rst.py +++ b/packages/gapic-generator/tests/unit/utils/test_rst.py @@ -39,3 +39,10 @@ def test_rst_add_newline(): s = 'The hail in Wales\nfalls mainly on the snails.' assert utils.rst(s) == s + '\n' assert convert_text.call_count == 0 + + +def test_rst_pad_close_quote(): + with mock.patch.object(pypandoc, 'convert_text') as convert_text: + s = 'A value, as in "foo"' + assert utils.rst(s) == s + '.' + assert convert_text.call_count == 0 From 66d8ada1b663b100b11a1faede9308b9974b43b6 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Dec 2018 15:20:10 -0800 Subject: [PATCH 0063/1339] [chore] Add Showcase test for unary-stream methods. (#76) --- packages/gapic-generator/nox.py | 5 +--- .../tests/system/test_grpc_streams.py | 25 +++++++++++++++++++ 2 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 packages/gapic-generator/tests/system/test_grpc_streams.py diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 89578a17a518..15bbed0ad9c6 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -49,10 +49,7 @@ def showcase(session): session.log('Note: Showcase must be running for these tests to work.') session.log('See https://github.com/googleapis/gapic-showcase') session.log('-' * 70) - session.run('netstat', '-plnt', '|', 'grep', ':7469', silent=True) - - # Use the Python implementation of protocol buffers. - session.env['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'python' + session.run('lsof', '-i', '4tcp:7469', '-sTCP:LISTEN', silent=True) # Install pytest and gapic-generator-python session.install('pytest') diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py new file mode 100644 index 000000000000..7a1c70b15dc2 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -0,0 +1,25 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +def test_unary_stream(echo): + content = 'The hail in Wales falls mainly on the snails.' + responses = echo.expand({ + 'content': content, + }) + + # Consume the response and ensure it matches what we expect. + # with pytest.raises(exceptions.NotFound) as exc: + for ground_truth, response in zip(content.split(' '), responses): + assert response.content == ground_truth + assert ground_truth == 'snails.' From d58448bc5d5b74e7eee6204d26c98553af2c582f Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Dec 2018 15:21:37 -0800 Subject: [PATCH 0064/1339] [fix] Add MANIFEST.in to generated output. (#79) --- packages/gapic-generator/gapic/templates/MANIFEST.in.j2 | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 packages/gapic-generator/gapic/templates/MANIFEST.in.j2 diff --git a/packages/gapic-generator/gapic/templates/MANIFEST.in.j2 b/packages/gapic-generator/gapic/templates/MANIFEST.in.j2 new file mode 100644 index 000000000000..b7239d5404a9 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/MANIFEST.in.j2 @@ -0,0 +1,2 @@ +recursive-include {{ '/'.join(api.naming.module_namespace + (api.naming.module_name,)) }} *.py +recursive-include {{ '/'.join(api.naming.module_namespace + (api.naming.versioned_module_name,)) }} *.py From a80ac7ec630c4e3d912daf10c08d292303b5a42d Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Dec 2018 15:23:19 -0800 Subject: [PATCH 0065/1339] [refactor] Explicitly declare the marshal using the package name. (#81) This makes it so that each installed package uses a distinct proto-plus marshal (allowing the ability for advanced customization without stomping on one another). --- .../$name_$version/types/$sub/$proto.py.j2 | 17 ++++++++--------- .../$name_$version/types/$sub/_message.py.j2 | 1 + .../gapic-generator/gapic/templates/setup.py.j2 | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 index 2cb4ad09fb30..ba4215d9f070 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 @@ -14,15 +14,6 @@ import proto{% if p != 'proto' %} as {{ p }}{% endif -%} {{ import_ }} {% endfor %} - -{% for enum in proto.top.enums.values() -%} - {% include '$namespace/$name_$version/types/$sub/_enum.py.j2' with context %} -{% endfor %} - -{% for message in proto.top.messages.values() -%} - {% include "$namespace/$name_$version/types/$sub/_message.py.j2" with context %} -{% endfor %} - __all__ = ( {%- for enum in proto.top.enums.values() %} '{{ enum.name }}', @@ -31,4 +22,12 @@ __all__ = ( '{{ message.name }}', {%- endfor %} ) + +{% for enum in proto.top.enums.values() -%} + {% include '$namespace/$name_$version/types/$sub/_enum.py.j2' with context %} +{% endfor %} + +{% for message in proto.top.messages.values() -%} + {% include "$namespace/$name_$version/types/$sub/_message.py.j2" with context %} +{% endfor %} {% endwith %}{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 index a537aa2f46e7..a8a9e51103f2 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 @@ -23,4 +23,5 @@ class {{ message.name }}({{ p }}.Message): class Meta: package = '{{ message.ident.proto_package }}' + marshal = '{{ api.naming.warehouse_package_name }}' {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 15fbac5e0c5e..4d005b9e676a 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -22,7 +22,7 @@ setuptools.setup( 'google-api-core >= 1.3.0, < 2.0.0dev', 'googleapis-common-protos >= 1.6.0b6', 'grpcio >= 1.10.0', - 'proto-plus >= 0.1.0', + 'proto-plus >= 0.2.1', ), classifiers=[ 'Development Status :: 3 - Alpha', From 6accde608144b2d7faa59659eb01f3a75aab1c29 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Dec 2018 15:25:14 -0800 Subject: [PATCH 0066/1339] [refactor] Remove explicit coercion to protobuf in GAPIC. (#77) This is now handled by proto-plus under the hood. --- .../$namespace/$name_$version/services/$service/client.py.j2 | 4 ---- 1 file changed, 4 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 index d99f31c93a0c..85e9f3eebf70 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 @@ -75,10 +75,6 @@ class {{ service.name }}: {{ method.output.ident.sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} """ - # Coerce the request to the protocol buffer object. - if not isinstance(request, {{ method.input.ident }}): - request = {{ method.input.ident }}(**request) - # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method.wrap_method( From 38a74f8011450ed856b88403f4f312aaca10a5ad Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Dec 2018 15:26:59 -0800 Subject: [PATCH 0067/1339] [fix] Nested messages (string form) should always begin at root. (#80) This commit makes it so that when referring to nested messages as strings, the full qualified name is always used. It fixes a bug where a partial name would be used if the referenced message was declared later. --- packages/gapic-generator/gapic/schema/metadata.py | 14 ++++++-------- .../tests/unit/schema/test_metadata.py | 12 ++++++++++++ 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index dfbe2b0e8e27..ec5c788680be 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -176,14 +176,6 @@ def rel(self, address: 'Address') -> str: """ # Is this referencing a message in the same proto file? if self.package == address.package and self.module == address.module: - # It is possible that a field references a message that has - # not yet been declared. If so, send its name enclosed in quotes - # (a string) instead. - if (len(self.module_path) == len(address.module_path) and - self.module_path > address.module_path or - self == address): - return f"'{self.name}'" - # Edge case: If two (or more) messages are nested under a common # parent message, and one references another, then return that # enclosed in quotes. @@ -203,6 +195,12 @@ def rel(self, address: 'Address') -> str: if self.parent and self.parent[0] == address.name: return '.'.join(self.parent[1:] + (self.name,)) + # It is possible that a field references a message that has + # not yet been declared. If so, send its name enclosed in quotes + # (a string) instead. + if self.module_path > address.module_path or self == address: + return f"'{'.'.join(self.parent + (self.name,))}'" + # This is a message in the same module, already declared. # Send its name. return '.'.join(self.parent + (self.name,)) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 3ed4657446ad..76744e142a32 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -104,6 +104,18 @@ def test_address_rel_nested_sibling(): assert addr.rel(other) == "'Spam.Bacon'" +def test_address_rel_nested_sibling_later(): + addr = metadata.Address( + module='baz', name='Bacon', module_path=(4, 0, 3, 1), + package=('foo', 'bar'), parent=('Spam',) + ) + other = metadata.Address( + module='baz', name='Ham', module_path=(4, 0, 3, 0), + package=('foo', 'bar'), parent=('Spam',) + ) + assert addr.rel(other) == "'Spam.Bacon'" + + def test_address_rel_nested_parent(): parent = metadata.Address(module='baz', name='Ham', package=('foo', 'bar')) child = metadata.Address( From 45ab5fd9801b20c8512b62add69929da83097354 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Dec 2018 15:31:45 -0800 Subject: [PATCH 0068/1339] [chore] Bump version to 0.2.0 (#75) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 0ac428d12669..473f78e0ebb8 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.1.0', + version='0.2.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 60a64c63c4b228445dadb294add93980421d06b2 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 28 Dec 2018 15:40:26 -0800 Subject: [PATCH 0069/1339] [refactor] Separate rendered protos from unrendered ones. This commit makes it such that API.protos provides only the protos being rendered, not dependencies. (The complete set of protos is on the `all_protos` property.) --- .../gapic-generator/gapic/generator/generator.py | 2 -- packages/gapic-generator/gapic/schema/api.py | 15 +++++++++++++-- .../tests/unit/generator/test_generator.py | 2 +- .../gapic-generator/tests/unit/schema/test_api.py | 5 +++-- 4 files changed, 17 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 885c573afea1..db4ad1954749 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -85,8 +85,6 @@ def get_response(self) -> CodeGeneratorResponse: # Some templates are rendered once per proto (and API may have # one or more protos). for proto in self._api.protos.values(): - if not proto.file_to_generate: - continue output_files.update(self._render_templates( self._env.loader.proto_templates, additional_context={'proto': proto}, diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 174cf7ec8444..c5298c9205b9 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -180,7 +180,7 @@ class API: (as ``api``). """ naming: api_naming.Naming - protos: Mapping[str, Proto] + all_protos: Mapping[str, Proto] @classmethod def build(cls, @@ -215,7 +215,7 @@ def build(cls, ).proto # Done; return the API. - return cls(naming=naming, protos=protos) + return cls(naming=naming, all_protos=protos) @cached_property def enums(self) -> Mapping[str, wrappers.EnumType]: @@ -231,6 +231,17 @@ def messages(self) -> Mapping[str, wrappers.MessageType]: *[p.messages for p in self.protos.values()], ) + @cached_property + def protos(self) -> Mapping[str, Proto]: + """Return a map of all protos specific to this API. + + This property excludes imported protos that are dependencies + of this API but not being directly generated. + """ + return collections.OrderedDict([ + (k, v) for k, v in self.all_protos.items() if v.file_to_generate + ]) + @cached_property def services(self) -> Mapping[str, wrappers.Service]: """Return a map of all services available in the API.""" diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 806465bb58cc..b4f969aaf5f8 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -291,7 +291,7 @@ def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, def make_api(*protos, naming: naming.Naming = None) -> api.API: return api.API( naming=naming or make_naming(), - protos={i.name: i for i in protos}, + all_protos={i.name: i for i in protos}, ) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index a0be5b7fe399..a7cc58d08e69 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -68,8 +68,9 @@ def test_api_build(): # Establish that the API has the data expected. assert isinstance(api_schema, api.API) - assert len(api_schema.protos) == 2 - assert 'google.dep.ImportedMessage' in api_schema.messages + assert len(api_schema.all_protos) == 2 + assert len(api_schema.protos) == 1 + assert 'google.dep.ImportedMessage' not in api_schema.messages assert 'google.example.v1.Foo' in api_schema.messages assert 'google.example.v1.GetFooRequest' in api_schema.messages assert 'google.example.v1.GetFooResponse' in api_schema.messages From 3bf8f8a862d4a93f4089021c9fe8b287a43e0beb Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 28 Dec 2018 17:36:59 -0800 Subject: [PATCH 0070/1339] [refactor] Add a subpackages component on API. This adds a subpackages method on the API object, which will provide slices of the API object corresponding to each subpackage. --- packages/gapic-generator/gapic/schema/api.py | 30 ++++++++++++++++++- .../tests/unit/schema/test_api.py | 19 ++++++++++-- 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index c5298c9205b9..8f0ace6cfcae 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -181,6 +181,7 @@ class API: """ naming: api_naming.Naming all_protos: Mapping[str, Proto] + subpackage_view: Tuple[str] = dataclasses.field(default_factory=tuple) @classmethod def build(cls, @@ -238,8 +239,11 @@ def protos(self) -> Mapping[str, Proto]: This property excludes imported protos that are dependencies of this API but not being directly generated. """ + view = self.subpackage_view return collections.OrderedDict([ - (k, v) for k, v in self.all_protos.items() if v.file_to_generate + (k, v) for k, v in self.all_protos.items() + if v.file_to_generate and + v.meta.address.subpackage[:len(view)] == view ]) @cached_property @@ -249,6 +253,30 @@ def services(self) -> Mapping[str, wrappers.Service]: *[p.services for p in self.protos.values()], ) + @cached_property + def subpackages(self) -> Mapping[str, 'API']: + """Return a map of all subpackages, if any. + + Each value in the mapping is another API object, but the ``protos`` + property only shows protos belonging to the subpackage. + """ + answer = collections.OrderedDict() + + # Get the actual subpackages we have. + # + # Note that this intentionally only goes one level deep; nested + # subpackages can be accessed by requesting subpackages of the + # derivative API objects returned here. + level = len(self.subpackage_view) + for subpkg_name in sorted({p.meta.address.subpackage[0] + for p in self.protos.values() + if len(p.meta.address.subpackage) > level and + p.meta.address.subpackage[:level] == self.subpackage_view}): + answer[subpkg_name] = dataclasses.replace(self, + subpackage_view=self.subpackage_view + (subpkg_name,), + ) + return answer + class _ProtoBuilder: """A "builder class" for Proto objects. diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index a7cc58d08e69..8f5fe07e8d38 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -35,6 +35,11 @@ def test_api_build(): package='google.dep', messages=(make_message_pb2(name='ImportedMessage', fields=()),), ), + make_file_pb2( + name='common.proto', + package='google.example.v1.common', + messages=(make_message_pb2(name='Bar'),), + ), make_file_pb2( name='foo.proto', package='google.example.v1', @@ -68,9 +73,10 @@ def test_api_build(): # Establish that the API has the data expected. assert isinstance(api_schema, api.API) - assert len(api_schema.all_protos) == 2 - assert len(api_schema.protos) == 1 + assert len(api_schema.all_protos) == 3 + assert len(api_schema.protos) == 2 assert 'google.dep.ImportedMessage' not in api_schema.messages + assert 'google.example.v1.common.Bar' in api_schema.messages assert 'google.example.v1.Foo' in api_schema.messages assert 'google.example.v1.GetFooRequest' in api_schema.messages assert 'google.example.v1.GetFooResponse' in api_schema.messages @@ -80,6 +86,13 @@ def test_api_build(): imp.Import(package=('google', 'dep'), module='dep_pb2'), ) + # Establish that the subpackages work. + assert 'common' in api_schema.subpackages + sub = api_schema.subpackages['common'] + assert len(sub.protos) == 1 + assert 'google.example.v1.common.Bar' in sub.messages + assert 'google.example.v1.Foo' not in sub.messages + def test_proto_build(): fdp = descriptor_pb2.FileDescriptorProto( @@ -520,7 +533,7 @@ def test_lro_missing_annotation(): # Make the proto object. with pytest.raises(TypeError): - proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ + api.Proto.build(fdp, file_to_generate=True, prior_protos={ 'google/longrunning/operations.proto': lro_proto, }, naming=make_naming()) From ced63b9b8d9a9fe8bb97782b7cbb9fbed54c6801 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 28 Dec 2018 18:28:18 -0800 Subject: [PATCH 0071/1339] [refactor] Restore the 1:1 relationship between template and file. Maintaining this as a single commit if we decide to use explicit namespace packages in the future (it will be easy to reverse). --- .../gapic/generator/generator.py | 42 +++++++------------ .../tests/unit/generator/test_generator.py | 40 ++++++------------ 2 files changed, 28 insertions(+), 54 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index db4ad1954749..76a90d0433d4 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -128,27 +128,27 @@ def _render_templates( # Iterate over the provided templates and generate a File object # for each. for template_name in templates: - for fn in self._get_filenames(template_name, context=context): - # Generate the File object. - answer[fn] = CodeGeneratorResponse.File( - content=formatter.fix_whitespace( - self._env.get_template(template_name).render( - api=self._api, - **context - ), + # Generate the File object. + fn = self._get_filename(template_name, context=context) + answer[fn] = CodeGeneratorResponse.File( + content=formatter.fix_whitespace( + self._env.get_template(template_name).render( + api=self._api, + **context ), - name=fn, - ) + ), + name=fn, + ) # Done; return the File objects based on these templates. return answer - def _get_filenames( + def _get_filename( self, template_name: str, *, context: dict = None, - ) -> Tuple[str]: - """Return the appropriate output filenames for this template. + ) -> str: + """Return the appropriate output filename for this template. This entails running the template name through a series of replacements to replace the "filename variables" (``$name``, @@ -165,22 +165,10 @@ def _get_filenames( context (Mapping): Additional context being sent to the template. Returns: - Tuple[str]: The appropriate output filenames. + str: The appropriate output filename. """ filename = template_name[:-len('.j2')] - # Special case: If the filename is `$namespace/__init__.py`, we - # need this exact file to be part of every individual directory - # in the namespace tree. Handle this special case. - # - # For more information, see: - # https://packaging.python.org/guides/packaging-namespace-packages/ - if filename == os.path.join('$namespace', '__init__.py'): - return tuple([ - os.path.sep.join(i.split('.') + ['__init__.py']) - for i in self._api.naming.namespace_packages - ]) - # Replace the $namespace variable. filename = filename.replace( '$namespace', @@ -216,7 +204,7 @@ def _get_filenames( filename = re.sub(r'/+', '/', filename) # Done, return the filename. - return (filename,) + return filename __all__ = ( diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index b4f969aaf5f8..7ea5b4ae3278 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -186,15 +186,15 @@ def test_render_templates_additional_context(): assert files['foo'].content == 'A bird!\n' -def test_get_filenames(): +def test_get_filename(): g = generator.Generator(api_schema=make_api( naming=make_naming(namespace=(), name='Spam', version='v2'), )) template_name = '$namespace/$name_$version/foo.py.j2' - assert g._get_filenames(template_name) == ('spam_v2/foo.py',) + assert g._get_filename(template_name) == 'spam_v2/foo.py' -def test_get_filenames_with_namespace(): +def test_get_filename_with_namespace(): g = generator.Generator(api_schema=make_api( naming=make_naming( name='Spam', @@ -203,15 +203,15 @@ def test_get_filenames_with_namespace(): ), )) template_name = '$namespace/$name_$version/foo.py.j2' - assert g._get_filenames(template_name) == ('ham/bacon/spam_v2/foo.py',) + assert g._get_filename(template_name) == 'ham/bacon/spam_v2/foo.py' -def test_get_filenames_with_service(): +def test_get_filename_with_service(): g = generator.Generator(api_schema=make_api( naming=make_naming(namespace=(), name='Spam', version='v2'), )) template_name = '$name/$service/foo.py.j2' - assert g._get_filenames( + assert g._get_filename( template_name, context={ 'service': wrappers.Service( @@ -219,10 +219,10 @@ def test_get_filenames_with_service(): service_pb=descriptor_pb2.ServiceDescriptorProto(name='Eggs'), ), } - ) == ('spam/eggs/foo.py',) + ) == 'spam/eggs/foo.py' -def test_get_filenames_with_proto(): +def test_get_filename_with_proto(): file_pb2 = descriptor_pb2.FileDescriptorProto( name='bacon.proto', package='foo.bar.v1', @@ -233,13 +233,13 @@ def test_get_filenames_with_proto(): ) g = generator.Generator(api_schema=api) - assert g._get_filenames( + assert g._get_filename( '$name/types/$proto.py.j2', context={'proto': api.protos['bacon.proto']}, - ) == ('spam/types/bacon.py',) + ) == 'spam/types/bacon.py' -def test_get_filenames_with_proto_and_sub(): +def test_get_filename_with_proto_and_sub(): file_pb2 = descriptor_pb2.FileDescriptorProto( name='bacon.proto', package='foo.bar.v2.baz', @@ -256,24 +256,10 @@ def test_get_filenames_with_proto_and_sub(): ) g = generator.Generator(api_schema=api) - assert g._get_filenames( + assert g._get_filename( '$name/types/$sub/$proto.py.j2', context={'proto': api.protos['bacon.proto']}, - ) == ('bar/types/baz/bacon.py',) - - -def test_get_filenames_with_namespace_init(): - g = generator.Generator(api_schema=make_api(naming=make_naming( - namespace=('Foo', 'Bar', 'Baz'), - name='Spam', - version='v2', - ))) - template_name = '$namespace/__init__.py.j2' - assert g._get_filenames(template_name) == ( - 'foo/__init__.py', - 'foo/bar/__init__.py', - 'foo/bar/baz/__init__.py', - ) + ) == 'bar/types/baz/bacon.py' def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, From 1a7e8cff2aeb024382e5627f687bc64efa9b95d9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 28 Dec 2018 20:43:04 -0800 Subject: [PATCH 0072/1339] [fix] Refactor the template loader. This fixes several issues with how subpackages and service/proto replacement logic interacted with one another. The new refactor should be much more resilient. This commit also refactors many (but not all) tests that instrumented the applicable code to avoid calling private API. --- .../docs/reference/generator.rst | 3 - .../gapic-generator/gapic/cli/generate.py | 2 +- .../gapic/generator/generator.py | 174 ++++++---- .../gapic-generator/gapic/generator/loader.py | 97 ------ .../tests/unit/generator/test_generator.py | 306 ++++++++---------- .../tests/unit/generator/test_loader.py | 39 --- 6 files changed, 242 insertions(+), 379 deletions(-) delete mode 100644 packages/gapic-generator/gapic/generator/loader.py delete mode 100644 packages/gapic-generator/tests/unit/generator/test_loader.py diff --git a/packages/gapic-generator/docs/reference/generator.rst b/packages/gapic-generator/docs/reference/generator.rst index cf441d517a89..eb2192fb2c05 100644 --- a/packages/gapic-generator/docs/reference/generator.rst +++ b/packages/gapic-generator/docs/reference/generator.rst @@ -5,6 +5,3 @@ generator .. automodule:: gapic.generator.generator :members: - -.. automodule:: gapic.generator.loader - :members: diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index 471fc42e12f6..e0895f4a8695 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -62,7 +62,7 @@ def generate( # Translate into a protobuf CodeGeneratorResponse; this reads the # individual templates and renders them. # If there are issues, error out appropriately. - res = generator.Generator(api_schema, templates=templates).get_response() + res = generator.Generator(templates).get_response(api_schema) # Output the serialized response. output.write(res.SerializeToString()) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 76a90d0433d4..a87c50beddc1 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -15,7 +15,7 @@ import collections import os import re -from typing import Any, Iterable, Mapping, Sequence, Tuple +from typing import Mapping, Sequence import jinja2 @@ -23,29 +23,22 @@ from gapic import utils from gapic.generator import formatter -from gapic.generator import loader from gapic.schema import api class Generator: """A protoc code generator for client libraries. - This class receives a :class:`~.api.API`, a representation of the API - schema, and provides an interface for getting a - :class:`~.plugin_pb2.CodeGeneratorResponse` (which it does through - rendering templates). + This class provides an interface for getting a + :class:`~.plugin_pb2.CodeGeneratorResponse` for an :class:`~api.API` + schema object (which it does through rendering templates). Args: - api_schema (~.API): An API schema object, which is sent to every - template as the ``api`` variable. templates (str): Optional. Path to the templates to be rendered. If this is not provided, the templates included with this application are used. """ - def __init__(self, api_schema: api.API, *, - templates: str = None) -> None: - self._api = api_schema - + def __init__(self, templates: str = None) -> None: # If explicit templates were not provided, use our default. if not templates: templates = os.path.join( @@ -55,7 +48,7 @@ def __init__(self, api_schema: api.API, *, # Create the jinja environment with which to render templates. self._env = jinja2.Environment( - loader=loader.TemplateLoader(searchpath=templates), + loader=jinja2.FileSystemLoader(searchpath=templates), undefined=jinja2.StrictUndefined, ) @@ -64,88 +57,129 @@ def __init__(self, api_schema: api.API, *, self._env.filters['snake_case'] = utils.to_snake_case self._env.filters['wrap'] = utils.wrap - def get_response(self) -> CodeGeneratorResponse: + def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: """Return a :class:`~.CodeGeneratorResponse` for this library. This is a complete response to be written to (usually) stdout, and thus read by ``protoc``. + Args: + api_schema (~api.API): An API schema object. + Returns: ~.CodeGeneratorResponse: A response describing appropriate files and contents. See ``plugin.proto``. """ output_files = collections.OrderedDict() - # Some templates are rendered once per API client library. - # These are generally boilerplate packaging and metadata files. - output_files.update( - self._render_templates(self._env.loader.api_templates), - ) - - # Some templates are rendered once per proto (and API may have - # one or more protos). - for proto in self._api.protos.values(): - output_files.update(self._render_templates( - self._env.loader.proto_templates, - additional_context={'proto': proto}, - )) - - # Some templates are rendered once per service (an API may have - # one or more services). - for service in self._api.services.values(): - output_files.update(self._render_templates( - self._env.loader.service_templates, - additional_context={'service': service}, + # Iterate over each template and add the appropriate output files + # based on that template. + for template_name in self._env.loader.list_templates(): + # Sanity check: Skip "private" templates. + filename = template_name.split('/')[-1] + if filename.startswith('_') and filename != '__init__.py.j2': + continue + + # Append to the output files dictionary. + output_files.update(self._render_template(template_name, + api_schema=api_schema, )) # Return the CodeGeneratorResponse output. return CodeGeneratorResponse(file=[i for i in output_files.values()]) - def _render_templates( + def _render_template( self, - templates: Iterable[str], *, - additional_context: Mapping[str, Any] = None, + template_name: str, *, + api_schema: api.API, ) -> Sequence[CodeGeneratorResponse.File]: """Render the requested templates. Args: - templates (Iterable[str]): The set of templates to be rendered. - It is expected that these come from the methods on - :class:`~.loader.TemplateLoader`, and they should be - able to be set to the :meth:`jinja2.Environment.get_template` + template_name (str): The template to be rendered. + It is expected that these come from + :class:`jinja2.FileSystemLoader`, and they should be + able to be sent to the :meth:`jinja2.Environment.get_template` method. - additional_context (Mapping[str, Any]): Additional variables - to be sent to the templates. The ``api`` variable - is always available. + api_schema (~.api.API): An API schema object. Returns: Sequence[~.CodeGeneratorResponse.File]: A sequence of File objects for inclusion in the final response. """ answer = collections.OrderedDict() - context = additional_context or {} - - # Iterate over the provided templates and generate a File object - # for each. - for template_name in templates: - # Generate the File object. - fn = self._get_filename(template_name, context=context) - answer[fn] = CodeGeneratorResponse.File( - content=formatter.fix_whitespace( - self._env.get_template(template_name).render( - api=self._api, - **context - ), - ), - name=fn, - ) - - # Done; return the File objects based on these templates. + skip_subpackages = False + + # Sanity check: Rendering per service and per proto would be a + # combinatorial explosion and is almost certainly not what anyone + # ever wants. Error colorfully on it. + if '$service' in template_name and '$proto' in template_name: + raise ValueError('Template files may live under a $proto or ' + '$service directory, but not both.') + + # If this template should be rendered for subpackages, process it + # for all subpackages and set the strict flag (restricting what + # services and protos we pull from for the remainder of the method). + if '$sub' in template_name: + for subpackage in api_schema.subpackages.values(): + answer.update(self._render_template(template_name, + api_schema=subpackage, + )) + skip_subpackages = True + + # If this template should be rendered once per proto, iterate over + # all protos to be rendered + if '$proto' in template_name: + for proto in api_schema.protos.values(): + if (skip_subpackages and proto.meta.address.subpackage != + api_schema.subpackage_view): + continue + answer.update(self._get_file(template_name, + api_schema=api_schema, + proto=proto + )) + return answer + + # If this template should be rendered once per service, iterate + # over all services to be rendered. + if '$service' in template_name: + for service in api_schema.services.values(): + if (skip_subpackages and service.meta.address.subpackage != + api_schema.subpackage_view): + continue + answer.update(self._get_file(template_name, + api_schema=api_schema, + service=service, + )) + return answer + + # This file is not iterating over anything else; return back + # the one applicable file. + answer.update(self._get_file(template_name, api_schema=api_schema)) return answer + def _get_file(self, template_name: str, *, + api_schema=api.API, + **context: Mapping): + """Render a template to a protobuf plugin File object.""" + fn = self._get_filename(template_name, + api_schema=api_schema, + context=context, + ) + return {fn: CodeGeneratorResponse.File( + content=formatter.fix_whitespace( + self._env.get_template(template_name).render( + api=api_schema, + **context + ), + ), + name=fn, + )} + def _get_filename( self, template_name: str, *, + api_schema: api.API, context: dict = None, ) -> str: """Return the appropriate output filename for this template. @@ -162,6 +196,7 @@ def _get_filename( Args: template_name (str): The filename of the template, from the filesystem, relative to ``templates/``. + api_schema (~.api.API): An API schema object. context (Mapping): Additional context being sent to the template. Returns: @@ -172,14 +207,16 @@ def _get_filename( # Replace the $namespace variable. filename = filename.replace( '$namespace', - os.path.sep.join([i.lower() for i in self._api.naming.namespace]), + os.path.sep.join([i.lower() for i in api_schema.naming.namespace]), ).lstrip(os.path.sep) - # Replace the $name and $version variables. + # Replace the $name, $version, and $sub variables. filename = filename.replace('$name_$version', - self._api.naming.versioned_module_name) - filename = filename.replace('$version', self._api.naming.version) - filename = filename.replace('$name', self._api.naming.module_name) + api_schema.naming.versioned_module_name) + filename = filename.replace('$version', api_schema.naming.version) + filename = filename.replace('$name', api_schema.naming.module_name) + filename = filename.replace('$sub', + '/'.join(api_schema.subpackage_view)) # Replace the $service variable if applicable. if context and 'service' in context: @@ -194,9 +231,6 @@ def _get_filename( filename = filename.replace( '$proto', context['proto'].module_name, - ).replace( - '$sub', - '/'.join(context['proto'].meta.address.subpackage), ) # Paths may have empty path segments if components are empty diff --git a/packages/gapic-generator/gapic/generator/loader.py b/packages/gapic-generator/gapic/generator/loader.py deleted file mode 100644 index 46f6e56b96fa..000000000000 --- a/packages/gapic-generator/gapic/generator/loader.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import typing - -import jinja2 - -from gapic.utils import cached_property - - -class TemplateLoader(jinja2.FileSystemLoader): - """A jinja2 template loader that tracks what is left to be loaded. - - This class behaves identically to :class:`jinja2.FileSystemLoader` - but provides methods to return templates segmented by type. - - There are two types of templates: templates that describe the API as a - whole (and for which the template is rendered once per API), and templates - describing a service (which are rendered once per service in the API). - """ - @cached_property - def api_templates(self) -> typing.Set[str]: - """Return the (public) templates tied to the API as a whole. - - All templates in the ``templates/`` directory are included except: - - * Templates corresponding to services (in a ``$service/`` - subdirectory) are excluded. See :meth:`service_templates`. - * Templates beginning with ``_`` are excluded. - - When these templates are rendered, they are expected to be sent - one and only one variable: an :class:`~.API` object spelled ``api``. - - Returns: - Set[str]: A set of templates. - """ - # Start with the full list of templates, excluding private ones, - # but exclude templates from other methods on this loader. - return set( - [t for t in self.list_templates() if not self.is_private(t)] - ).difference(self.service_templates).difference(self.proto_templates) - - @cached_property - def service_templates(self): - """Return the templates specific to each service. - - This corresponds to all of the templates with ``$service`` - in the filename or path. - - When these templates are rendered, they are expected to be sent - two variables: an :class:`~.API` object spelled ``api``, and the - :class:`~.wrappers.Service` object being iterated over, spelled - ``service``. These templates are rendered once per service, with - a distinct ``service`` variable each time. - - Returns: - Set[str]: A list of service templates. - """ - return set( - [t for t in self.list_templates() if '$service' in t] - ) - - @cached_property - def proto_templates(self): - """Return the templates specific to each proto. - - This corresponds to all of the templates with ``$proto`` - in the filename or path. - - When these templates are rendered, they are expected to be sent - two variables: an :class:`~.API` object spelled ``api``, and the - :class:`~.wrappers.Proto` object being iterated over, spelled - ``proto``. These templates are rendered once per proto, with - a distinct ``proto`` variable each time. - - Returns: - Set[str]: A list of proto templates. - """ - return set( - [t for t in self.list_templates() if '$proto' in t] - ) - - def is_private(self, path): - """Return True if ``path`` is a private template, False otherwise.""" - filename = path.split('/')[-1] - return filename != '__init__.py.j2' and filename.startswith('_') diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 7ea5b4ae3278..211c9c950242 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -17,8 +17,9 @@ import jinja2 +import pytest + from google.protobuf import descriptor_pb2 -from google.protobuf.compiler import plugin_pb2 from gapic.generator import generator from gapic.schema import api @@ -26,193 +27,156 @@ from gapic.schema import wrappers -def test_constructor(): - # Create a generator. - g = generator.Generator(api_schema=make_api()) - assert isinstance(g._api, api.API) - - # Assert we have a Jinja environment also, with the expected filters. - # This is internal implementation baseball, but this is the best place - # to establish this and templates will depend on it. - assert isinstance(g._env, jinja2.Environment) - assert 'snake_case' in g._env.filters - assert 'wrap' in g._env.filters - - def test_custom_template_directory(): # Create a generator. - g = generator.Generator(api_schema=make_api(), templates='/templates/') + g = generator.Generator(templates='/templates/') # Assert that the Jinja loader will pull from the correct location. assert g._env.loader.searchpath == ['/templates/'] def test_get_response(): - # Create a generator with mock data. - # - # We want to ensure that templates are rendered for each service, - # which we prove by sending two services. - file_pb2 = descriptor_pb2.FileDescriptorProto( - name='bacon.proto', - package='foo.bar.v1', - service=[descriptor_pb2.ServiceDescriptorProto(name='SpamService'), - descriptor_pb2.ServiceDescriptorProto(name='EggsService')], - ) - api_schema = make_api(make_proto(file_pb2)) - g = generator.Generator(api_schema=api_schema) - - # Mock all the rendering methods. - with mock.patch.object(g, '_render_templates') as _render_templates: - _render_templates.return_value = { - 'template_file': plugin_pb2.CodeGeneratorResponse.File( - name='template_file', - content='This was a template.', - ), - } - - # Okay, now run the `get_response` method. - response = g.get_response() - - # First and foremost, we care that we got a valid response - # object back (albeit not so much what is in it). - assert isinstance(response, plugin_pb2.CodeGeneratorResponse) - - # Next, determine that the general API templates and service - # templates were both called; the method should be called - # once per service, once per proto, plus one for the API as a whole. - assert _render_templates.call_count == sum([ - 1, # for the API as a whole - len(api_schema.services), - len(api_schema.protos), - ]) - - # The service templates should have been called with the - # filename transformation and the additional `service` variable. - for call in _render_templates.mock_calls: - _, args, kwargs = call - if args[0] != g._env.loader.service_templates: - continue - service = kwargs['additional_context']['service'] - assert isinstance(service, wrappers.Service) - - -def test_get_response_skipped_proto(): - # Create a generator with mock data. - # - # We want to ensure that templates are rendered for each service, - # which we prove by sending two services. - file_pb2 = descriptor_pb2.FileDescriptorProto( - name='bacon.proto', - package='foo.bar.v1', - ) - api_schema = make_api(make_proto(file_pb2, file_to_generate=False)) - g = generator.Generator(api_schema=api_schema) - - # Mock all the rendering methods. - with mock.patch.object(g, '_render_templates') as _render_templates: - _render_templates.return_value = { - 'template_file': plugin_pb2.CodeGeneratorResponse.File( - name='template_file', - content='This was a template.', - ), - } - - # Okay, now run the `get_response` method. - g.get_response() - - # Since there are no protos and no services, only the rollup, API-wide - # call should have happened. - assert _render_templates.call_count == 1 - - -def test_render_templates(): - g = generator.Generator(api_schema=make_api()) - - # Determine the templates to be rendered. - templates = ('foo.j2', 'bar.j2') - with mock.patch.object(jinja2.Environment, 'get_template') as get_template: - get_template.side_effect = lambda t: jinja2.Template( - f'Hello, I am `{t}`.', - ) - - # Render the templates. - files = g._render_templates(templates) - - # Test that we get back the expected content for each template. - assert len(files) == 2 - assert files['foo'].name == 'foo' - assert files['bar'].name == 'bar' - assert files['foo'].content == 'Hello, I am `foo.j2`.\n' - assert files['bar'].content == 'Hello, I am `bar.j2`.\n' - - -def test_render_templates_duplicate(): - g = generator.Generator(api_schema=make_api()) - - # Determine the templates to be rendered. - # In the case of duplication, we want the last one encountered to win. - templates = ('foo.j2', 'foo.j2') - with mock.patch.object(jinja2.Environment, 'get_template') as get_template: - get_template.side_effect = ( - jinja2.Template(f'Hello, I am the first.'), - jinja2.Template(f'Hello, I am the second.'), - ) - - # Render the templates. - files = g._render_templates(templates) - - # Test that we get back the expected content for each template. - assert len(files) == 1 - assert files['foo'].name == 'foo' - assert files['foo'].content == 'Hello, I am the second.\n' - - -def test_render_templates_additional_context(): - g = generator.Generator(api_schema=make_api()) - - # Determine the templates to be rendered. - templates = ('foo.j2',) - with mock.patch.object(jinja2.Environment, 'get_template') as get_template: - get_template.return_value = jinja2.Template('A {{ thing }}!') - - # Render the templates. - files = g._render_templates(templates, additional_context={ - 'thing': 'bird', - }) - - # Test that we get back the expected content for each template. - assert len(files) == 1 - assert files['foo'].name == 'foo' - assert files['foo'].content == 'A bird!\n' + g = generator.Generator() + with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + lt.return_value = ['foo/bar/baz.py.j2'] + with mock.patch.object(jinja2.Environment, 'get_template') as gt: + gt.return_value = jinja2.Template('I am a template result.') + cgr = g.get_response(api_schema=make_api()) + lt.assert_called_once() + gt.assert_called_once() + assert len(cgr.file) == 1 + assert cgr.file[0].name == 'foo/bar/baz.py' + assert cgr.file[0].content == 'I am a template result.\n' + + +def test_get_response_ignores_private_files(): + g = generator.Generator() + with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + lt.return_value = ['foo/bar/baz.py.j2', 'foo/bar/_base.py.j2'] + with mock.patch.object(jinja2.Environment, 'get_template') as gt: + gt.return_value = jinja2.Template('I am a template result.') + cgr = g.get_response(api_schema=make_api()) + lt.assert_called_once() + gt.assert_called_once() + assert len(cgr.file) == 1 + assert cgr.file[0].name == 'foo/bar/baz.py' + assert cgr.file[0].content == 'I am a template result.\n' + + +def test_get_response_fails_invalid_file_paths(): + g = generator.Generator() + with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + lt.return_value = ['foo/bar/$service/$proto/baz.py.j2'] + with pytest.raises(ValueError) as ex: + g.get_response(api_schema=make_api()) + assert '$proto' in str(ex) and '$service' in str(ex) + + +def test_get_response_enumerates_services(): + g = generator.Generator() + with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + lt.return_value = ['foo/$service/baz.py.j2'] + with mock.patch.object(jinja2.Environment, 'get_template') as gt: + gt.return_value = jinja2.Template('Service: {{ service.name }}') + cgr = g.get_response(api_schema=make_api(make_proto( + descriptor_pb2.FileDescriptorProto(service=[ + descriptor_pb2.ServiceDescriptorProto(name='Spam'), + descriptor_pb2.ServiceDescriptorProto(name='EggsService'), + ]), + ))) + assert len(cgr.file) == 2 + assert {i.name for i in cgr.file} == { + 'foo/spam/baz.py', + 'foo/eggs_service/baz.py', + } + + +def test_get_response_enumerates_proto(): + g = generator.Generator() + with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + lt.return_value = ['foo/$proto.py.j2'] + with mock.patch.object(jinja2.Environment, 'get_template') as gt: + gt.return_value = jinja2.Template('Proto: {{ proto.module_name }}') + cgr = g.get_response(api_schema=make_api( + make_proto(descriptor_pb2.FileDescriptorProto(name='a.proto')), + make_proto(descriptor_pb2.FileDescriptorProto(name='b.proto')), + )) + assert len(cgr.file) == 2 + assert {i.name for i in cgr.file} == {'foo/a.py', 'foo/b.py'} + + +def test_get_response_divides_subpackages(): + g = generator.Generator() + api_schema = api.API.build([ + descriptor_pb2.FileDescriptorProto( + name='top.proto', + package='foo.v1', + service=[descriptor_pb2.ServiceDescriptorProto(name='Top')], + ), + descriptor_pb2.FileDescriptorProto( + name='a/spam/ham.proto', + package='foo.v1.spam', + service=[descriptor_pb2.ServiceDescriptorProto(name='Bacon')], + ), + descriptor_pb2.FileDescriptorProto( + name='a/eggs/yolk.proto', + package='foo.v1.eggs', + service=[descriptor_pb2.ServiceDescriptorProto(name='Scramble')], + ), + ], package='foo.v1') + with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + lt.return_value = [ + 'foo/$sub/types/$proto.py.j2', + 'foo/$sub/services/$service.py.j2', + ] + with mock.patch.object(jinja2.Environment, 'get_template') as gt: + gt.return_value = jinja2.Template(""" + {{- '' }}Subpackage: {{ '.'.join(api.subpackage_view) }} + """.strip()) + cgr = g.get_response(api_schema=api_schema) + assert len(cgr.file) == 6 + assert {i.name for i in cgr.file} == { + 'foo/types/top.py', + 'foo/services/top.py', + 'foo/spam/types/ham.py', + 'foo/spam/services/bacon.py', + 'foo/eggs/types/yolk.py', + 'foo/eggs/services/scramble.py', + } def test_get_filename(): - g = generator.Generator(api_schema=make_api( - naming=make_naming(namespace=(), name='Spam', version='v2'), - )) + g = generator.Generator() template_name = '$namespace/$name_$version/foo.py.j2' - assert g._get_filename(template_name) == 'spam_v2/foo.py' + assert g._get_filename(template_name, + api_schema=make_api( + naming=make_naming(namespace=(), name='Spam', version='v2'), + ) + ) == 'spam_v2/foo.py' def test_get_filename_with_namespace(): - g = generator.Generator(api_schema=make_api( - naming=make_naming( - name='Spam', - namespace=('Ham', 'Bacon'), - version='v2', - ), - )) + g = generator.Generator() template_name = '$namespace/$name_$version/foo.py.j2' - assert g._get_filename(template_name) == 'ham/bacon/spam_v2/foo.py' + assert g._get_filename(template_name, + api_schema=make_api( + naming=make_naming( + name='Spam', + namespace=('Ham', 'Bacon'), + version='v2', + ), + ), + ) == 'ham/bacon/spam_v2/foo.py' def test_get_filename_with_service(): - g = generator.Generator(api_schema=make_api( - naming=make_naming(namespace=(), name='Spam', version='v2'), - )) + g = generator.Generator() template_name = '$name/$service/foo.py.j2' assert g._get_filename( template_name, + api_schema=make_api( + naming=make_naming(namespace=(), name='Spam', version='v2'), + ), context={ 'service': wrappers.Service( methods=[], @@ -232,9 +196,10 @@ def test_get_filename_with_proto(): naming=make_naming(namespace=(), name='Spam', version='v2'), ) - g = generator.Generator(api_schema=api) + g = generator.Generator() assert g._get_filename( '$name/types/$proto.py.j2', + api_schema=api, context={'proto': api.protos['bacon.proto']}, ) == 'spam/types/bacon.py' @@ -253,11 +218,13 @@ def test_get_filename_with_proto_and_sub(): api = make_api( make_proto(file_pb2, naming=naming), naming=naming, + subpackage_view=('baz',), ) - g = generator.Generator(api_schema=api) + g = generator.Generator() assert g._get_filename( '$name/types/$sub/$proto.py.j2', + api_schema=api, context={'proto': api.protos['bacon.proto']}, ) == 'bar/types/baz/bacon.py' @@ -274,10 +241,11 @@ def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, ).proto -def make_api(*protos, naming: naming.Naming = None) -> api.API: +def make_api(*protos, naming: naming.Naming = None, **kwargs) -> api.API: return api.API( naming=naming or make_naming(), all_protos={i.name: i for i in protos}, + **kwargs ) diff --git a/packages/gapic-generator/tests/unit/generator/test_loader.py b/packages/gapic-generator/tests/unit/generator/test_loader.py deleted file mode 100644 index ec392de23f60..000000000000 --- a/packages/gapic-generator/tests/unit/generator/test_loader.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock - -from gapic.generator.loader import TemplateLoader - - -def test_service_templates(): - loader = TemplateLoader(searchpath='<<< IRRELEVANT >>>') - with mock.patch.object(loader, 'list_templates') as list_templates: - list_templates.return_value = [ - '_base.j2', 'foo.j2', 'bar.j2', - '$service/spam.j2', '$service/eggs.j2', '$service/py/spameggs.j2', - ] - assert loader.service_templates == { - '$service/spam.j2', '$service/eggs.j2', '$service/py/spameggs.j2', - } - - -def test_api_templates(): - loader = TemplateLoader(searchpath='<<< IRRELEVANT >>>') - with mock.patch.object(loader, 'list_templates') as list_templates: - list_templates.return_value = [ - '_base.j2', 'foo.j2', 'bar.j2', - '$service/spam.j2', '$service/eggs.j2', '$service/py/spameggs.j2', - ] - assert loader.api_templates == {'foo.j2', 'bar.j2'} From ab48284dbe78fcd3137e5715b89288460b1520d3 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 29 Dec 2018 15:01:59 -0800 Subject: [PATCH 0073/1339] [feat] Import API types into the types module. This commit imports all the individual API's types into the types module, so the user can just import that module and access everything from there. It honors subpackages, and subpackages are retained (the outer types module imports the subpackages, which in turn import the modules within them). --- .../$name_$version/types/$sub/__init__.py.j2 | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 new file mode 100644 index 000000000000..0829a110c5ee --- /dev/null +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 @@ -0,0 +1,31 @@ +{% extends "_base.py.j2" %} + +{% block content %} +{# Import subpackages. -#} +{% for subpackage in api.subpackages.keys() -%} +from . import {{ subpackage }} +{% endfor -%} + +{# Import messages from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. +-#} +{% for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view -%} +{% for message in proto.messages.values()|sort(attribute='name') -%} +from .{{ proto.module_name }} import {{ message.name }} +{% endfor %}{% endfor %} + + +__all__ = ( + {%- for subpackage in api.subpackages.keys() %} + '{{ subpackage }}', + {%- endfor %} + {%- for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view %} + {%- for message in proto.messages.values()|sort(attribute='name') %} + '{{ message.name }}', + {%- endfor %}{% endfor %} +) +{%- endblock %} From e44497a41a57695a8576259326b8bf7a3093122a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 29 Dec 2018 21:25:42 -0800 Subject: [PATCH 0074/1339] [chore] Version number bump to 0.3.0 --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 473f78e0ebb8..5ec77247c586 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.2.0', + version='0.3.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 67ba84f76a1669cfc0251c4cffda3a5cf97639f9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sun, 30 Dec 2018 13:43:27 -0800 Subject: [PATCH 0075/1339] [fix] Address bad import against nested messages. (#83) This commit fixes a spurious import that tried to import nested messages directly from module scope. --- .../$namespace/$name_$version/types/$sub/__init__.py.j2 | 4 ++-- packages/gapic-generator/setup.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 index 0829a110c5ee..b0372068d4c1 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 @@ -13,7 +13,7 @@ from . import {{ subpackage }} -#} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.messages.values()|sort(attribute='name') -%} +{% for message in proto.top.messages.values()|sort(attribute='name') -%} from .{{ proto.module_name }} import {{ message.name }} {% endfor %}{% endfor %} @@ -24,7 +24,7 @@ __all__ = ( {%- endfor %} {%- for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view %} - {%- for message in proto.messages.values()|sort(attribute='name') %} + {%- for message in proto.top.messages.values()|sort(attribute='name') %} '{{ message.name }}', {%- endfor %}{% endfor %} ) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 5ec77247c586..8eaa8a40fd65 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.3.0', + version='0.3.1', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 75ecd3a0a72b0dd7515b8c206ca8171a729fc653 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 31 Dec 2018 19:39:28 -0800 Subject: [PATCH 0076/1339] [chore] Bump Showcase to 0.0.9. (#69) --- packages/gapic-generator/.circleci/config.yml | 2 +- packages/gapic-generator/nox.py | 4 ++-- packages/gapic-generator/tests/system/conftest.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 41e1ee09096f..6a30a49157a2 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -93,7 +93,7 @@ jobs: showcase: docker: - image: python:3.7-slim - - image: gcr.io/gapic-showcase/gapic-showcase:0.0.7 + - image: gcr.io/gapic-showcase/gapic-showcase:0.0.9 steps: - checkout - run: diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index 15bbed0ad9c6..bc4ccfb7ddda 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -57,7 +57,7 @@ def showcase(session): # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: - showcase_version = '0.0.7' + showcase_version = '0.0.9' # Download the Showcase descriptor. session.run( @@ -72,7 +72,7 @@ def showcase(session): session.run('protoc', f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1alpha2/echo.proto', + 'google/showcase/v1alpha3/echo.proto', ) # Install the library. diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 5fd0fdc831fa..6df91c603373 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -15,7 +15,7 @@ import pytest from google.showcase import Echo -from google.showcase_v1alpha2.services.echo.transports.grpc import ( +from google.showcase_v1alpha3.services.echo.transports.grpc import ( EchoGrpcTransport, ) From d8af4b7c18aa3ec22b56c78ec31cd4e87b838ac7 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 1 Jan 2019 11:34:54 -0800 Subject: [PATCH 0077/1339] [refactor] Change library structure, import everything in root module. (#85) This commit changes the structure of the resulting libraries to be more consistent when subpackages are in use: everything (both services and messages) goes under the subpackage: ``` $name_$version/ $sub/ services/ types/ ``` Imports are changed so that subpackages are imported directly into the root module, and all services and messages are imported directly into either the root module (if they are not in a subpackage) or the subpackage module (if they are). The new structure is simpler (everything works the same way) and safer (no possibility of naming collisions on services). --- .../docs/getting-started/_verifying.rst | 2 +- .../gapic-generator/gapic/schema/metadata.py | 3 +- .../templates/$namespace/$name/__init__.py.j2 | 39 ++++++++++++++++--- .../{types => }/$sub/__init__.py.j2 | 23 ++++++++--- .../services/$service/__init__.py.j2 | 0 .../{ => $sub}/services/$service/client.py.j2 | 2 +- .../$service/transports/__init__.py.j2 | 0 .../services/$service/transports/base.py.j2 | 0 .../services/$service/transports/grpc.py.j2 | 0 .../services/$service/transports/http.py.j2 | 0 .../{types/$sub => $sub/types}/$proto.py.j2 | 4 +- .../{types/$sub => $sub/types}/_enum.py.j2 | 0 .../{types/$sub => $sub/types}/_message.py.j2 | 4 +- .../$namespace/$name_$version/__init__.py.j2 | 12 ------ 14 files changed, 59 insertions(+), 30 deletions(-) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{types => }/$sub/__init__.py.j2 (57%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => $sub}/services/$service/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => $sub}/services/$service/client.py.j2 (98%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => $sub}/services/$service/transports/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => $sub}/services/$service/transports/base.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => $sub}/services/$service/transports/grpc.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{ => $sub}/services/$service/transports/http.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{types/$sub => $sub/types}/$proto.py.j2 (85%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{types/$sub => $sub/types}/_enum.py.j2 (100%) rename packages/gapic-generator/gapic/templates/$namespace/$name_$version/{types/$sub => $sub/types}/_message.py.j2 (89%) delete mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 diff --git a/packages/gapic-generator/docs/getting-started/_verifying.rst b/packages/gapic-generator/docs/getting-started/_verifying.rst index e25eafa15912..12b90bb518e7 100644 --- a/packages/gapic-generator/docs/getting-started/_verifying.rst +++ b/packages/gapic-generator/docs/getting-started/_verifying.rst @@ -39,7 +39,7 @@ Here is a test script: response = ia.batch_annotate_images({ 'requests': [{ 'features': [{ - 'type': vision.types.image_annotator.Feature.Type.LABEL_DETECTION, + 'type': vision.Feature.Type.LABEL_DETECTION, }], 'image': {'source': { 'image_uri': 'https://s3.amazonaws.com/cdn0.michiganbulb.com' diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index ec5c788680be..e13e34a9684a 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -117,8 +117,7 @@ def python_import(self) -> imp.Import: return imp.Import( package=self.api_naming.module_namespace + ( self.api_naming.versioned_module_name, - 'types', - ) + self.subpackage, + ) + self.subpackage + ('types',), module=self.module, alias=self.module_alias, ) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 index e398fae49bbf..c8965b7996ad 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 @@ -1,15 +1,44 @@ {% extends '_base.py.j2' %} {% block content %} -from ..{{ api.naming.versioned_module_name }} import types -{% for service in api.services.values()|sort(attribute='name') -%} +{# Import subpackages. -#} +{% for subpackage in api.subpackages.keys() -%} +from ..{{ api.naming.versioned_module_name }} import {{ subpackage }} +{% endfor -%} + +{# Import services for this package. -#} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view -%} from ..{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }} import {{ service.name }} -{% endfor %} +{% endfor -%} + +{# Import messages from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. +-#} +{% for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view -%} +{% for message in proto.top.messages.values()|sort(attribute='name') -%} +from ..{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} +{% endfor %}{% endfor %} +{# Define __all__. + This requires the full set of imported names, so we iterate over + them again. +-#} __all__ = ( - {%- for service in api.services.values()|sort(attribute='name') %} + {%- for subpackage in api.subpackages.keys() %} + '{{ subpackage }}', + {%- endfor %} + {%- for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view %} '{{ service.name }}', {%- endfor %} - 'types', + {%- for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view %} + {%- for message in proto.top.messages.values()|sort(attribute='name') %} + '{{ message.name }}', + {%- endfor %}{% endfor %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 similarity index 57% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 index b0372068d4c1..00b097838854 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 @@ -1,11 +1,17 @@ -{% extends "_base.py.j2" %} +{% extends '_base.py.j2' %} {% block content %} -{# Import subpackages. -#} +{# Import subpackages. -#} {% for subpackage in api.subpackages.keys() -%} from . import {{ subpackage }} {% endfor -%} +{# Import services for this package. -#} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view -%} +from .services.{{ service.name|snake_case }} import {{ service.name }} +{% endfor -%} + {# Import messages from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within @@ -14,18 +20,25 @@ from . import {{ subpackage }} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} {% for message in proto.top.messages.values()|sort(attribute='name') -%} -from .{{ proto.module_name }} import {{ message.name }} +from .types.{{ proto.module_name }} import {{ message.name }} {% endfor %}{% endfor %} - +{# Define __all__. + This requires the full set of imported names, so we iterate over + them again. +-#} __all__ = ( {%- for subpackage in api.subpackages.keys() %} '{{ subpackage }}', {%- endfor %} + {%- for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view %} + '{{ service.name }}', + {%- endfor %} {%- for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view %} {%- for message in proto.top.messages.values()|sort(attribute='name') %} '{{ message.name }}', {%- endfor %}{% endfor %} ) -{%- endblock %} +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 similarity index 98% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 85e9f3eebf70..1e5a14eecfe1 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -11,7 +11,7 @@ from google.auth import credentials {% for import_ in service.python_modules -%} {{ import_ }} {% endfor %} -from ...utils import dispatch +from ...{{ '.' * api.subpackage_view|length }}utils import dispatch from .transports import get_transport_class from .transports import {{ service.name }}Transport diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/base.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/grpc.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/services/$service/transports/http.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 similarity index 85% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 index ba4215d9f070..2f296e7c2765 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 @@ -24,10 +24,10 @@ __all__ = ( ) {% for enum in proto.top.enums.values() -%} - {% include '$namespace/$name_$version/types/$sub/_enum.py.j2' with context %} + {% include '$namespace/$name_$version/$sub/types/_enum.py.j2' with context %} {% endfor %} {% for message in proto.top.messages.values() -%} - {% include "$namespace/$name_$version/types/$sub/_message.py.j2" with context %} + {% include "$namespace/$name_$version/$sub/types/_message.py.j2" with context %} {% endfor %} {% endwith %}{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_enum.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_enum.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 similarity index 89% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 rename to packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 index a8a9e51103f2..04aa01161fa5 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/types/$sub/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 @@ -2,13 +2,13 @@ class {{ message.name }}({{ p }}.Message): """{{ message.meta.doc|rst(indent=4) }}""" {# Iterate over nested enums. -#} {% for enum in message.nested_enums.values() %}{% filter indent -%} - {% include '$namespace/$name_$version/types/$sub/_enum.py.j2' %} + {% include '$namespace/$name_$version/$sub/types/_enum.py.j2' %} {% endfilter %}{% endfor -%} {#- Iterate over nested messages. -#} {% for submessage in message.nested_messages.values() -%} {% with message = submessage %}{% filter indent %} - {%- include '$namespace/$name_$version/types/$sub/_message.py.j2' %} + {%- include '$namespace/$name_$version/$sub/types/_message.py.j2' %} {% endfilter %}{% endwith %} {% endfor -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 deleted file mode 100644 index 230645e45f33..000000000000 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/__init__.py.j2 +++ /dev/null @@ -1,12 +0,0 @@ -{% extends '_base.py.j2' %} - -{% block content %} -{% for service in api.services.values()|sort(attribute='name') -%} -from .services.{{ service.name|snake_case }} import {{ service.name }} -{% endfor %} -__all__ = ( - {%- for service in api.services.values()|sort(attribute='name') %} - '{{ service.name }}', - {%- endfor %} -) -{% endblock %} From cecefdaf40bd49b47d31d3d8aec51b06bdeda02a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 1 Jan 2019 19:17:17 -0800 Subject: [PATCH 0078/1339] [refactor] Use proto-plus 0.3.0. This moves from `Meta` inner classes to a property on the module, which is an overall improvement (not repeating the same thing over and over, no weird `__all__` magic). --- .../$name_$version/$sub/types/$proto.py.j2 | 20 ++++++++++++++----- .../$name_$version/$sub/types/_message.py.j2 | 4 ---- .../gapic/templates/setup.py.j2 | 2 +- 3 files changed, 16 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 index 2f296e7c2765..c9592e9b2dec 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 @@ -14,15 +14,22 @@ import proto{% if p != 'proto' %} as {{ p }}{% endif -%} {{ import_ }} {% endfor %} -__all__ = ( +__protobuf__ = {{ p }}.module( + package='{{ '.'.join(proto.meta.address.package) }}', + {% if api.naming.proto_package != '.'.join(proto.meta.address.package) -%} + marshal='{{ api.naming.proto_package }}', + {% endif -%} + manifest={ {%- for enum in proto.top.enums.values() %} - '{{ enum.name }}', - {%- endfor -%} + '{{ enum.name }}', + {%- endfor %} {%- for message in proto.top.messages.values() %} - '{{ message.name }}', + '{{ message.name }}', {%- endfor %} + }, ) + {% for enum in proto.top.enums.values() -%} {% include '$namespace/$name_$version/$sub/types/_enum.py.j2' with context %} {% endfor %} @@ -30,4 +37,7 @@ __all__ = ( {% for message in proto.top.messages.values() -%} {% include "$namespace/$name_$version/$sub/types/_message.py.j2" with context %} {% endfor %} -{% endwith %}{% endblock %} +{% endwith %} + +__all__ = tuple(sorted(__protobuf__.manifest)) +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 index 04aa01161fa5..9103d84de53e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 @@ -20,8 +20,4 @@ class {{ message.name }}({{ p }}.Message): {% endif %}) """{{ field.meta.doc|rst(indent=4) }}""" {% endfor %} - - class Meta: - package = '{{ message.ident.proto_package }}' - marshal = '{{ api.naming.warehouse_package_name }}' {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 4d005b9e676a..355ac6bfc087 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -22,7 +22,7 @@ setuptools.setup( 'google-api-core >= 1.3.0, < 2.0.0dev', 'googleapis-common-protos >= 1.6.0b6', 'grpcio >= 1.10.0', - 'proto-plus >= 0.2.1', + 'proto-plus >= 0.3.0', ), classifiers=[ 'Development Status :: 3 - Alpha', From e0ac5b0c1b4a15885d9f96d71055d06fab7adb54 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 1 Jan 2019 19:24:19 -0800 Subject: [PATCH 0079/1339] [chore] Version bump to 0.4.0 --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 8eaa8a40fd65..e40257bef4f0 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.3.1', + version='0.4.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 2177ea88e8ab5cf06605cd465d10be5e56669e47 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 2 Jan 2019 14:55:46 -0800 Subject: [PATCH 0080/1339] [feat] Make `get_transport_class` more convenient. (#87) This adds the `get_transport_class` method to clients, which require significantly less plumbing to import. --- .../$sub/services/$service/client.py.j2 | 24 +++++++++++++-- .../$service/transports/__init__.py.j2 | 29 ++----------------- 2 files changed, 25 insertions(+), 28 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 1e5a14eecfe1..7d11db0247c6 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -12,7 +12,7 @@ from google.auth import credentials {{ import_ }} {% endfor %} from ...{{ '.' * api.subpackage_view|length }}utils import dispatch -from .transports import get_transport_class +from .transports import _transport_registry from .transports import {{ service.name }}Transport @@ -46,7 +46,7 @@ class {{ service.name }}: 'provide its credentials directly.') self._transport = transport else: - Transport = get_transport_class(transport) + Transport = self.get_transport_class(transport) self._transport = Transport(credentials=credentials, host=host) {% for method in service.methods.values() -%} @@ -156,6 +156,26 @@ class {{ service.name }}: {% endfor %} {% endfor -%} + @classmethod + def get_transport_class(cls, + label: str = None) -> {{ service.name }}Transport: + """Return an appropriate transport class. + + Args: + label (str): The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + Type[{{ service.name }}Transport]: The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return _transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(_transport_registry.values())) + @property def client_info(self) -> gapic_v1.client_info.ClientInfo: """Return information about this client (for metrics). diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 index 1c77c8605280..1a17f003021d 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 @@ -8,45 +8,22 @@ from .base import {{ service.name }}Transport from .http import {{ service.name }}HttpTransport # Compile a registry of transports. -_registry = collections.OrderedDict() +_transport_registry = collections.OrderedDict() # gRPC is not guaranteed to be available, because `grpcio` may or may not # be installed. If it is available, however, it should be the "first in" # (default). try: from .grpc import {{ service.name }}GrpcTransport - _registry['grpc'] = {{ service.name }}GrpcTransport + _transport_registry['grpc'] = {{ service.name }}GrpcTransport except ImportError: pass # Always provide an HTTP/1.1 transport. -_registry['http'] = {{ service.name }}HttpTransport - - -# Provide a function for the client to get the appropriate transport. -def get_transport_class( - label: str = None, - ) -> typing.Type[{{ service.name }}Transport]: - """Return an appropriate transport class. - - Args: - label (str): The name of the desired transport. If none is provided, - then the first transport in the registry is used. - - Returns: - Type[{{ service.name }}Transport]: The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return _registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(_registry.values())) +_transport_registry['http'] = {{ service.name }}HttpTransport __all__ = ( '{{ service.name }}Transport', - 'get_transport_class', ) {% endblock %} From 7abf579c74bdaf2b5701d3d0faa62219d1bcd913 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 2 Jan 2019 16:39:42 -0800 Subject: [PATCH 0081/1339] [feat] Multiple template sources. This commit allows for multiple sources for templates, which are searched in order. --- packages/gapic-generator/Dockerfile | 5 +- packages/gapic-generator/docker-entrypoint.sh | 39 +++++++++ .../docs/reference/generator.rst | 3 + packages/gapic-generator/docs/templates.rst | 77 +++++++++++++++-- .../gapic-generator/gapic/cli/generate.py | 13 ++- .../gapic/generator/generator.py | 13 +-- .../gapic/generator/options.py | 82 +++++++++++++++++++ .../tests/unit/generator/test_generator.py | 30 ++++--- .../tests/unit/generator/test_options.py | 48 +++++++++++ 9 files changed, 273 insertions(+), 37 deletions(-) create mode 100755 packages/gapic-generator/docker-entrypoint.sh create mode 100644 packages/gapic-generator/gapic/generator/options.py create mode 100644 packages/gapic-generator/tests/unit/generator/test_options.py diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile index 5af539d320d1..750c79a1c115 100644 --- a/packages/gapic-generator/Dockerfile +++ b/packages/gapic-generator/Dockerfile @@ -17,7 +17,4 @@ ADD . /usr/src/gapic-generator-python/ RUN pip install /usr/src/gapic-generator-python # Define the generator as an entry point. -ENTRYPOINT protoc --proto_path=/protos/ --proto_path=/in/ \ - --python_gapic_out=/out/ \ - --python_gapic_opt=$PLUGIN_OPTIONS \ - `find /in/ -name *.proto` +ENTRYPOINT ["/usr/src/gapic-generator-python/docker-entrypoint.sh"] diff --git a/packages/gapic-generator/docker-entrypoint.sh b/packages/gapic-generator/docker-entrypoint.sh new file mode 100755 index 000000000000..23ce48be093d --- /dev/null +++ b/packages/gapic-generator/docker-entrypoint.sh @@ -0,0 +1,39 @@ +#!/bin/bash +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +PLUGIN_OPTIONS="" + +# Parse out options. +while [ -n "$1" ]; do + case "$1" in + --python-gapic-templates ) + PLUGIN_OPTIONS="$PLUGIN_OPTIONS,python-gapic-templates=$2" + shift 2 + ;; + -- ) + shift + break + ;; + * ) + # Ignore anything we do not recognize. + shift + ;; + esac +done + +protoc --proto_path=/protos/ --proto_path=/in/ \ + --python_gapic_out=/out/ \ + --python_gapic_opt=${PLUGIN_OPTIONS:1} \ + `find /in/ -name *.proto` diff --git a/packages/gapic-generator/docs/reference/generator.rst b/packages/gapic-generator/docs/reference/generator.rst index eb2192fb2c05..088a09232674 100644 --- a/packages/gapic-generator/docs/reference/generator.rst +++ b/packages/gapic-generator/docs/reference/generator.rst @@ -5,3 +5,6 @@ generator .. automodule:: gapic.generator.generator :members: + +.. automodule:: gapic.generator.options + :members: diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst index acd13d35ac28..451b828748ac 100644 --- a/packages/gapic-generator/docs/templates.rst +++ b/packages/gapic-generator/docs/templates.rst @@ -1,5 +1,5 @@ Templates ---------- +========= This page provides a description of templates: how to write them, what variables they receive, and so on and so forth. @@ -9,7 +9,7 @@ based on protocol buffers by only editing templates (or authoring new ones), with no requirement to alter the primary codebase itself. Jinja -~~~~~ +----- All templates are implemented in `Jinja`_, Armin Ronacher's excellent templating library for Python. This document assumes that you are already @@ -18,12 +18,18 @@ cover that here. Locating Templates -~~~~~~~~~~~~~~~~~~ +------------------ Templates are included in output simply on the basis that they exist. **There is no master list of templates**; it is assumed that every template should be rendered (unless its name begins with a single underscore). +.. note:: + + Files beginning with an underscore (``_``) are not rendered by default. + This is to allow them to be used with ``extends`` and ``include``. + However, ``__init__.py.j2`` is rendered. + The name of the output file is based on the name of the template, with the following string replacements applied: @@ -49,7 +55,7 @@ the following string replacements applied: and the underscore are dropped. Context (Variables) -~~~~~~~~~~~~~~~~~~~ +------------------- Every template receives one variable, spelled ``api``. It is the :class:`~.schema.api.API` object that was pieced together in the parsing step. @@ -72,7 +78,7 @@ API structure is being iterated over: currently being iterated over. Filters -~~~~~~~ +------- Additionally, templates receive a limited number of filters useful for writing properly formatted templates. @@ -89,4 +95,65 @@ These are: relatively easy to take an arbitrary string and make it wrap to 79 characters appropriately. +Custom templates +---------------- + +It is possible to provide your own templates. + +To do so, you need a folder with Jinja templates. Each template must have +a ``.j2`` extension (which will be stripped by this software when writing +the final file; see above). Additionally, when you provide your own templates, +the filename substitutions described above still occur. + +Building Locally +~~~~~~~~~~~~~~~~ + +To specify templates, you need to provide a ``--python_gapic_opt`` argument +to ``protoc``, with a key-value pair that looks like: + + --python_gapic_opt="python-gapic-templates=/path/to/templates" + +It is *also* possible to specify more than one directory for templates +(in which case they are searched in order); to do this, provide the argument +multiple times: + + --python_gapic_opt="python-gapic-templates=/path/to/templates" + --python_gapic_opt="python-gapic-templates=/other/path" + +If you provide your own templates, the default templates are no longer +consulted. If you want to add your own templates on top of the default ones +provided by this library, use the special `DEFAULT` string: + + --python_gapic_opt="python-gapic-templates=/path/to/templates" + --python_gapic_opt="python-gapic-templates=DEFAULT" + +Building with Docker +~~~~~~~~~~~~~~~~~~~~ + +When building with Docker, you instead provide the ``--python-gapic-templates`` +argument after the ``docker run`` command: + +.. code-block:: shell + + $ docker run \ + --mount type=bind,source=google/cloud/vision/v1/,destination=/in/google/cloud/vision/v1/,readonly \ + --mount type=bind,source=dest/,destination=/out/ \ + --mount type=bind,source=/path/to/templates,destination=/templates/,readonly \ + --rm \ + --user $UID \ + gcr.io/gapic-images/gapic-generator-python \ + --python-gapic-templates /templates/ \ + --python-gapic-templates DEFAULT + +As before, to provide more than one location for templates, specify the +argument more than once. + +.. warning:: + + If you are using custom templates with Docker, be sure to also mount + the directory with the templates into the Docker image; otherwise + the generator will not be able to read that directory. When specifying + the ``--python-gapic-templates`` argument, it is the path *inside* + the Docker image that matters! + .. _Jinja: http://jinja.pocoo.org/docs/2.10/ diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index e0895f4a8695..0cd1f7df1b09 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -22,6 +22,7 @@ from gapic import generator from gapic.schema import api +from gapic.generator import options @click.command() @@ -32,19 +33,17 @@ @click.option('--output', type=click.File('wb'), default=sys.stdout.buffer, help='Where to output the `CodeGeneratorResponse`. ' 'Defaults to stdout.') -@click.option('--templates', type=click.Path(exists=True), default=None, - help='Which templates to use to generate a library. ' - 'Defaults to the templates included in gapic-generator, ' - 'which generate client libraries for Python 3.4 and up.') def generate( request: typing.BinaryIO, - output: typing.BinaryIO, - templates: str = None) -> None: + output: typing.BinaryIO) -> None: """Generate a full API client description.""" # Load the protobuf CodeGeneratorRequest. req = plugin_pb2.CodeGeneratorRequest.FromString(request.read()) + # Pull apart arguments in the request. + opts = options.Options.build(req.parameter) + # Determine the appropriate package. # This generator uses a slightly different mechanism for determining # which files to generate; it tracks at package level rather than file @@ -62,7 +61,7 @@ def generate( # Translate into a protobuf CodeGeneratorResponse; this reads the # individual templates and renders them. # If there are issues, error out appropriately. - res = generator.Generator(templates).get_response(api_schema) + res = generator.Generator(opts).get_response(api_schema) # Output the serialized response. output.write(res.SerializeToString()) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index a87c50beddc1..ff745d92d6e2 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -23,6 +23,7 @@ from gapic import utils from gapic.generator import formatter +from gapic.generator import options from gapic.schema import api @@ -34,21 +35,15 @@ class Generator: schema object (which it does through rendering templates). Args: + opts (~.options.Options): An options instance. templates (str): Optional. Path to the templates to be rendered. If this is not provided, the templates included with this application are used. """ - def __init__(self, templates: str = None) -> None: - # If explicit templates were not provided, use our default. - if not templates: - templates = os.path.join( - os.path.realpath(os.path.dirname(__file__)), - '..', 'templates', - ) - + def __init__(self, opts: options.Options) -> None: # Create the jinja environment with which to render templates. self._env = jinja2.Environment( - loader=jinja2.FileSystemLoader(searchpath=templates), + loader=jinja2.FileSystemLoader(searchpath=opts.templates), undefined=jinja2.StrictUndefined, ) diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py new file mode 100644 index 000000000000..810780df0e33 --- /dev/null +++ b/packages/gapic-generator/gapic/generator/options.py @@ -0,0 +1,82 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Tuple +import dataclasses +import os +import warnings + + +@dataclasses.dataclass(frozen=True) +class Options: + """A representation of CLI options passed through protoc. + + To maximize interoperability with other languages, we are permissive + on unrecognized arguments (essentially, we throw them away, but we do + warn if it looks like it was meant for us). + """ + templates: Tuple[str] + + @classmethod + def build(cls, opt_string: str) -> 'Options': + """Build an Options instance based on a protoc opt string. + + Args: + opt_string (str): A string, as passed from the protoc interface + (through ``--python_gapic_opt``). If multiple options are + passed, then protoc joins the values with ``,``. + By convention, we use ``key=value`` strings for such + options, with an absent value defaulting to ``True``. + + Returns: + ~.Options: The Options instance. + """ + # Parse out every option beginning with `python-gapic` + opts = {} + for opt in opt_string.split(','): + # Parse out the key and value. + value = True + if '=' in opt: + opt, value = opt.split('=') + + # Throw away options not meant for us. + if not opt.startswith('python-gapic-'): + continue + + # Set the option. + # Just assume everything is a list at this point, and the + # final instantiation step can de-list-ify where appropriate. + opts.setdefault(opt, []) + opts[opt].append(value) + + # If templates are specified, one of the specified directories + # may be our default; perform that replacement. + templates = opts.pop('python-gapic-templates', ['DEFAULT']) + while 'DEFAULT' in templates: + templates[templates.index('DEFAULT')] = os.path.realpath( + os.path.join(os.path.dirname(__file__), '..', 'templates'), + ) + + # Build the options instance. + answer = Options( + templates=tuple([os.path.expanduser(i) for i in templates]), + ) + + # If there are any options remaining, then we failed to recognize + # them -- complain. + for key in opts.keys(): + warnings.warn(f'Unrecognized option: `{key}`.') + + # Done; return the built options. + return answer diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 211c9c950242..ca16e8ea6b6a 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -22,6 +22,7 @@ from google.protobuf import descriptor_pb2 from gapic.generator import generator +from gapic.generator import options from gapic.schema import api from gapic.schema import naming from gapic.schema import wrappers @@ -29,14 +30,15 @@ def test_custom_template_directory(): # Create a generator. - g = generator.Generator(templates='/templates/') + opts = options.Options.build('python-gapic-templates=/templates/') + g = generator.Generator(opts) # Assert that the Jinja loader will pull from the correct location. assert g._env.loader.searchpath == ['/templates/'] def test_get_response(): - g = generator.Generator() + g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = ['foo/bar/baz.py.j2'] with mock.patch.object(jinja2.Environment, 'get_template') as gt: @@ -50,7 +52,7 @@ def test_get_response(): def test_get_response_ignores_private_files(): - g = generator.Generator() + g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = ['foo/bar/baz.py.j2', 'foo/bar/_base.py.j2'] with mock.patch.object(jinja2.Environment, 'get_template') as gt: @@ -64,7 +66,7 @@ def test_get_response_ignores_private_files(): def test_get_response_fails_invalid_file_paths(): - g = generator.Generator() + g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = ['foo/bar/$service/$proto/baz.py.j2'] with pytest.raises(ValueError) as ex: @@ -73,7 +75,7 @@ def test_get_response_fails_invalid_file_paths(): def test_get_response_enumerates_services(): - g = generator.Generator() + g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = ['foo/$service/baz.py.j2'] with mock.patch.object(jinja2.Environment, 'get_template') as gt: @@ -92,7 +94,7 @@ def test_get_response_enumerates_services(): def test_get_response_enumerates_proto(): - g = generator.Generator() + g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = ['foo/$proto.py.j2'] with mock.patch.object(jinja2.Environment, 'get_template') as gt: @@ -106,7 +108,7 @@ def test_get_response_enumerates_proto(): def test_get_response_divides_subpackages(): - g = generator.Generator() + g = make_generator() api_schema = api.API.build([ descriptor_pb2.FileDescriptorProto( name='top.proto', @@ -146,7 +148,7 @@ def test_get_response_divides_subpackages(): def test_get_filename(): - g = generator.Generator() + g = make_generator() template_name = '$namespace/$name_$version/foo.py.j2' assert g._get_filename(template_name, api_schema=make_api( @@ -156,7 +158,7 @@ def test_get_filename(): def test_get_filename_with_namespace(): - g = generator.Generator() + g = make_generator() template_name = '$namespace/$name_$version/foo.py.j2' assert g._get_filename(template_name, api_schema=make_api( @@ -170,7 +172,7 @@ def test_get_filename_with_namespace(): def test_get_filename_with_service(): - g = generator.Generator() + g = make_generator() template_name = '$name/$service/foo.py.j2' assert g._get_filename( template_name, @@ -196,7 +198,7 @@ def test_get_filename_with_proto(): naming=make_naming(namespace=(), name='Spam', version='v2'), ) - g = generator.Generator() + g = make_generator() assert g._get_filename( '$name/types/$proto.py.j2', api_schema=api, @@ -221,7 +223,7 @@ def test_get_filename_with_proto_and_sub(): subpackage_view=('baz',), ) - g = generator.Generator() + g = make_generator() assert g._get_filename( '$name/types/$sub/$proto.py.j2', api_schema=api, @@ -229,6 +231,10 @@ def test_get_filename_with_proto_and_sub(): ) == 'bar/types/baz/bacon.py' +def make_generator(opts_str: str = '') -> generator.Generator: + return generator.Generator(options.Options.build(opts_str)) + + def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, file_to_generate: bool = True, prior_protos: Mapping = None, naming: naming.Naming = None, diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py new file mode 100644 index 000000000000..342cd4837a40 --- /dev/null +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -0,0 +1,48 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock +import warnings + +from gapic.generator import options + + +def test_options_empty(): + opts = options.Options.build('') + assert len(opts.templates) == 1 + assert opts.templates[0].endswith('gapic/templates') + + +def test_options_replace_templates(): + opts = options.Options.build('python-gapic-templates=/foo/') + assert len(opts.templates) == 1 + assert opts.templates[0] == '/foo/' + + +def test_options_unrecognized(): + with mock.patch.object(warnings, 'warn') as warn: + options.Options.build('python-gapic-abc=xyz') + warn.assert_called_once_with('Unrecognized option: `python-gapic-abc`.') + + +def test_flags_unrecognized(): + with mock.patch.object(warnings, 'warn') as warn: + options.Options.build('python-gapic-abc') + warn.assert_called_once_with('Unrecognized option: `python-gapic-abc`.') + + +def test_options_unrecognized_likely_typo(): + with mock.patch.object(warnings, 'warn') as warn: + options.Options.build('go-gapic-abc=xyz') + assert len(warn.mock_calls) == 0 From 1144868c567ec724c296ef3eb40f94faf5c1f936 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 3 Jan 2019 18:58:32 -0800 Subject: [PATCH 0082/1339] [chore] Version bump to 0.5.0 --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e40257bef4f0..66f95ab34871 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.4.0', + version='0.5.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 34e1f89b278839a90f716f7a229e01262e209e76 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 16 Jan 2019 13:54:03 -0800 Subject: [PATCH 0083/1339] Improve docstrings on proto messages. (#89) Change the output of proto messages to include the attribute descriptions in the overall class. This is a bit clearer and also improves the output in Sphinx. --- .../$sub/services/$service/client.py.j2 | 10 ++++---- .../$name_$version/$sub/types/_enum.py.j2 | 2 +- .../$name_$version/$sub/types/_message.py.j2 | 25 +++++++++++++------ packages/gapic-generator/gapic/utils/rst.py | 18 ++++++++++--- .../tests/unit/utils/test_rst.py | 14 +++++++++++ 5 files changed, 51 insertions(+), 18 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 7d11db0247c6..3c852050ddd7 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -2,7 +2,7 @@ {% block content %} import pkg_resources -from typing import Mapping, Optional, Sequence, Tuple, Union +from typing import Mapping, Optional, Sequence, Tuple, Type, Union from google.api_core import gapic_v1 from google.api_core import retry @@ -62,7 +62,7 @@ class {{ service.name }}: """{{ method.meta.doc|rst(width=72, indent=8) }} Args: - request ({{ method.input.ident.sphinx }}): + request (:class:`{{ method.input.ident.sphinx }}`): The request object.{{ ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} retry (~.retry.Retry): Designation of what errors, if any, @@ -158,15 +158,15 @@ class {{ service.name }}: @classmethod def get_transport_class(cls, - label: str = None) -> {{ service.name }}Transport: + label: str = None) -> Type[{{ service.name }}Transport]: """Return an appropriate transport class. Args: - label (str): The name of the desired transport. If none is + label: The name of the desired transport. If none is provided, then the first transport in the registry is used. Returns: - Type[{{ service.name }}Transport]: The transport class to use. + The transport class to use. """ # If a specific transport is requested, return that one. if label: diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 index e4c9f98eda3e..35356c93a9fc 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 @@ -1,5 +1,5 @@ class {{ enum.name }}({{ e }}.IntEnum): - """{{ enum.meta.doc|rst(width=72, indent=4) }}""" + """{{ enum.meta.doc|rst(indent=4) }}""" {% for enum_value in enum.values -%} {{ enum_value.name }} = {{ enum_value.number }} {% endfor -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 index 9103d84de53e..07984cc0d4fd 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 @@ -1,11 +1,21 @@ class {{ message.name }}({{ p }}.Message): - """{{ message.meta.doc|rst(indent=4) }}""" + """{{ message.meta.doc|rst(indent=4) }}{% if message.fields|length %} + + Attributes: + {%- for field in message.fields.values() %} + {{ field.name }} ({{ field.ident.sphinx }}): + {{ field.meta.doc|rst(indent=12, nl=False) }} + {%- endfor %} + {% endif -%} + """ {# Iterate over nested enums. -#} - {% for enum in message.nested_enums.values() %}{% filter indent -%} - {% include '$namespace/$name_$version/$sub/types/_enum.py.j2' %} - {% endfilter %}{% endfor -%} + {% for enum in message.nested_enums.values() -%} + {% filter indent %} + {%- include '$namespace/$name_$version/$sub/types/_enum.py.j2' %} + {% endfilter %} + {% endfor -%} - {#- Iterate over nested messages. -#} + {# Iterate over nested messages. -#} {% for submessage in message.nested_messages.values() -%} {% with message = submessage %}{% filter indent %} {%- include '$namespace/$name_$version/$sub/types/_message.py.j2' %} @@ -13,11 +23,10 @@ class {{ message.name }}({{ p }}.Message): {% endfor -%} {# Iterate over fields. -#} - {%- for field in message.fields.values() %} + {% for field in message.fields.values() -%} {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field({{ p }}.{{ field.proto_type }}, number={{ field.number }} {%- if field.enum or field.message %}, {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, {% endif %}) - """{{ field.meta.doc|rst(indent=4) }}""" - {% endfor %} + {% endfor -%} {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/utils/rst.py b/packages/gapic-generator/gapic/utils/rst.py index 68026cbf107e..6ae886519e0d 100644 --- a/packages/gapic-generator/gapic/utils/rst.py +++ b/packages/gapic-generator/gapic/utils/rst.py @@ -19,12 +19,18 @@ from gapic.utils.lines import wrap -def rst(text, width=72, indent=0, source_format='commonmark'): +def rst(text: str, width: int = 72, indent: int = 0, nl: bool = None, + source_format: str = 'commonmark'): """Convert the given text to ReStructured Text. Args: text (str): The text to convert. width (int): The number of columns. + indent (int): The number of columns to indent each line of text + (except the first). + nl (bool): Whether to append a trailing newline. + Defaults to appending a newline if the result is more than + one line long. source_format (str): The source format. This is ``commonmark`` by default, which is what is used by convention in protocol buffers. @@ -36,12 +42,16 @@ def rst(text, width=72, indent=0, source_format='commonmark'): # (This makes code generation significantly faster; calling out to pandoc # is by far the most expensive thing we do.) if not re.search(r'[|*`_[\]]', text): - answer = wrap(text, width=width, indent=indent, offset=indent + 3) + answer = wrap(text, + indent=indent, + offset=indent + 3, + width=width - indent, + ) else: # Convert from CommonMark to ReStructured Text. answer = pypandoc.convert_text(text, 'rst', format=source_format, - extra_args=['--columns=%d' % width], + extra_args=['--columns=%d' % (width - indent)], ).strip().replace('\n', f"\n{' ' * indent}") # Add a newline to the end of the document if any line breaks are @@ -49,7 +59,7 @@ def rst(text, width=72, indent=0, source_format='commonmark'): # # This causes the closing """ to be on the subsequent line only when # appropriate. - if '\n' in answer: + if nl or ('\n' in answer and nl is None): answer += '\n' + ' ' * indent # If the text ends in a double-quote, append a period. diff --git a/packages/gapic-generator/tests/unit/utils/test_rst.py b/packages/gapic-generator/tests/unit/utils/test_rst.py index 876768293fa1..f99894429999 100644 --- a/packages/gapic-generator/tests/unit/utils/test_rst.py +++ b/packages/gapic-generator/tests/unit/utils/test_rst.py @@ -41,6 +41,20 @@ def test_rst_add_newline(): assert convert_text.call_count == 0 +def test_rst_force_add_newline(): + with mock.patch.object(pypandoc, 'convert_text') as convert_text: + s = 'The hail in Wales' + assert utils.rst(s, nl=True) == s + '\n' + assert convert_text.call_count == 0 + + +def test_rst_disable_add_newline(): + with mock.patch.object(pypandoc, 'convert_text') as convert_text: + s = 'The hail in Wales\nfalls mainly on the snails.' + assert utils.rst(s, nl=False) == s + assert convert_text.call_count == 0 + + def test_rst_pad_close_quote(): with mock.patch.object(pypandoc, 'convert_text') as convert_text: s = 'A value, as in "foo"' From f8bec5c4c364b2e87c40237fa1c39d841a0dfdc1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Jan 2019 14:03:09 -0800 Subject: [PATCH 0084/1339] [refactor] Remove the `proto.top` attribute. (#90) There is essentially never a point in template writing where it is *not* desirable to use top, so just use it all the time. Refactors the original `messages` and `enums` properties to `all_messages` and `all_enums`. Fixes #84. --- packages/gapic-generator/gapic/schema/api.py | 115 ++++++++---------- .../templates/$namespace/$name/__init__.py.j2 | 4 +- .../$name_$version/$sub/__init__.py.j2 | 4 +- .../$name_$version/$sub/types/$proto.py.j2 | 8 +- .../tests/unit/schema/test_api.py | 14 +-- 5 files changed, 69 insertions(+), 76 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 8f0ace6cfcae..0aaa67c8589b 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -39,8 +39,8 @@ class Proto: file_pb2: descriptor_pb2.FileDescriptorProto services: Mapping[str, wrappers.Service] - messages: Mapping[str, wrappers.MessageType] - enums: Mapping[str, wrappers.EnumType] + all_messages: Mapping[str, wrappers.MessageType] + all_enums: Mapping[str, wrappers.EnumType] file_to_generate: bool meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, @@ -72,6 +72,22 @@ def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, prior_protos=prior_protos or {}, ).proto + @cached_property + def enums(self) -> Mapping[str, wrappers.EnumType]: + """Return top-level enums on the proto.""" + return collections.OrderedDict([ + (k, v) for k, v in self.all_enums.items() + if not v.meta.address.parent + ]) + + @cached_property + def messages(self) -> Mapping[str, wrappers.MessageType]: + """Return top-level messages on the proto.""" + return collections.OrderedDict([ + (k, v) for k, v in self.all_messages.items() + if not v.meta.address.parent + ]) + @property def module_name(self) -> str: """Return the appropriate module name for this service. @@ -90,15 +106,15 @@ def names(self) -> Set[str]: used for imports. """ # Add names of all enums, messages, and fields. - answer = {e.name for e in self.enums.values()} - for message in self.messages.values(): + answer = {e.name for e in self.all_enums.values()} + for message in self.all_messages.values(): answer = answer.union({f.name for f in message.fields.values()}) answer.add(message.name) # Identify any import module names where the same module name is used # from distinct packages. modules = {} - for t in chain(*[m.field_types for m in self.messages.values()]): + for t in chain(*[m.field_types for m in self.all_messages.values()]): modules.setdefault(t.ident.module, set()) modules[t.ident.module].add(t.ident.package) for module_name, packages in modules.items(): @@ -132,29 +148,6 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: # Done; return the sorted sequence. return tuple(sorted(list(answer))) - @cached_property - def top(self) -> 'Proto': - """Return a proto shim which is only aware of top-level objects. - - This is useful in a situation where a template wishes to iterate - over only those messages and enums that are at the top level of the - file. - """ - return type(self)( - file_pb2=self.file_pb2, - services=self.services, - messages=collections.OrderedDict([ - (k, v) for k, v in self.messages.items() - if not v.meta.address.parent - ]), - enums=collections.OrderedDict([ - (k, v) for k, v in self.enums.items() - if not v.meta.address.parent - ]), - file_to_generate=False, - meta=self.meta, - ) - def disambiguate(self, string: str) -> str: """Return a disambiguated string for the context of this proto. @@ -222,14 +215,14 @@ def build(cls, def enums(self) -> Mapping[str, wrappers.EnumType]: """Return a map of all enums available in the API.""" return collections.ChainMap({}, - *[p.enums for p in self.protos.values()], + *[p.all_enums for p in self.protos.values()], ) @cached_property def messages(self) -> Mapping[str, wrappers.MessageType]: """Return a map of all messages available in the API.""" return collections.ChainMap({}, - *[p.messages for p in self.protos.values()], + *[p.all_messages for p in self.protos.values()], ) @cached_property @@ -298,9 +291,9 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, file_to_generate: bool, naming: api_naming.Naming, prior_protos: Mapping[str, Proto] = None): - self.messages = {} - self.enums = {} - self.services = {} + self.proto_messages = {} + self.proto_enums = {} + self.proto_services = {} self.file_descriptor = file_descriptor self.file_to_generate = file_to_generate self.prior_protos = prior_protos or {} @@ -348,12 +341,12 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # In this situation, we would not have come across the message yet, # and the field would have its original textual reference to the # message (`type_name`) but not its resolved message wrapper. - for message in self.messages.values(): + for message in self.proto_messages.values(): for field in message.fields.values(): if field.type_name and not any((field.message, field.enum)): object.__setattr__( field, 'message', - self.messages[field.type_name.lstrip('.')], + self.proto_messages[field.type_name.lstrip('.')], ) # Only generate the service if this is a target file to be generated. @@ -372,11 +365,11 @@ def proto(self) -> Proto: # This has everything but is ignorant of naming collisions in the # ultimate file that will be written. naive = Proto( - enums=self.enums, + all_enums=self.proto_enums, + all_messages=self.proto_messages, file_pb2=self.file_descriptor, file_to_generate=self.file_to_generate, - messages=self.messages, - services=self.services, + services=self.proto_services, meta=metadata.Metadata( address=self.address, ), @@ -391,13 +384,13 @@ def proto(self) -> Proto: # Note: The services bind to themselves, because services get their # own output files. return dataclasses.replace(naive, - enums=collections.OrderedDict([ + all_enums=collections.OrderedDict([ (k, v.with_context(collisions=naive.names)) - for k, v in naive.enums.items() + for k, v in naive.all_enums.items() ]), - messages=collections.OrderedDict([ + all_messages=collections.OrderedDict([ (k, v.with_context(collisions=naive.names)) - for k, v in naive.messages.items() + for k, v in naive.all_messages.items() ]), services=collections.OrderedDict([ (k, v.with_context(collisions=v.names)) @@ -407,15 +400,15 @@ def proto(self) -> Proto: ) @cached_property - def all_enums(self) -> Mapping[str, wrappers.EnumType]: - return collections.ChainMap({}, self.enums, - *[p.enums for p in self.prior_protos.values()], + def api_enums(self) -> Mapping[str, wrappers.EnumType]: + return collections.ChainMap({}, self.proto_enums, + *[p.all_enums for p in self.prior_protos.values()], ) @cached_property - def all_messages(self) -> Mapping[str, wrappers.MessageType]: - return collections.ChainMap({}, self.messages, - *[p.messages for p in self.prior_protos.values()], + def api_messages(self) -> Mapping[str, wrappers.MessageType]: + return collections.ChainMap({}, self.proto_messages, + *[p.all_messages for p in self.prior_protos.values()], ) def _get_operation_type(self, @@ -493,7 +486,7 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], # naming rules to trust that they will never collide. # # Note: If this field is a recursive reference to its own message, - # then the message will not be in `all_messages` yet (because the + # then the message will not be in `api_messages` yet (because the # message wrapper is not yet created, because it needs this object # first) and this will be None. This case is addressed in the # `_load_message` method. @@ -501,8 +494,8 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], for field_pb, i in zip(field_pbs, range(0, sys.maxsize)): answer[field_pb.name] = wrappers.Field( field_pb=field_pb, - enum=self.all_enums.get(field_pb.type_name.lstrip('.')), - message=self.all_messages.get(field_pb.type_name.lstrip('.')), + enum=self.api_enums.get(field_pb.type_name.lstrip('.')), + message=self.api_messages.get(field_pb.type_name.lstrip('.')), meta=metadata.Metadata( address=address.child(field_pb.name, path + (i,)), documentation=self.docs.get(path + (i,), self.EMPTY), @@ -536,7 +529,7 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], # If the output type is google.longrunning.Operation, we use # a specialized object in its place. - output_type = self.all_messages[meth_pb.output_type.lstrip('.')] + output_type = self.api_messages[meth_pb.output_type.lstrip('.')] if meth_pb.output_type.endswith('google.longrunning.Operation'): if not lro.response_type or not lro.metadata_type: raise TypeError( @@ -545,17 +538,17 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], 'metadata type.', ) output_type = self._get_operation_type( - response_type=self.all_messages[ + response_type=self.api_messages[ address.resolve(lro.response_type) ], - metadata_type=self.all_messages.get( + metadata_type=self.api_messages.get( address.resolve(lro.metadata_type), ), ) # Create the method wrapper object. answer[meth_pb.name] = wrappers.Method( - input=self.all_messages[meth_pb.input_type.lstrip('.')], + input=self.api_messages[meth_pb.input_type.lstrip('.')], method_pb=meth_pb, meta=metadata.Metadata( address=address.child(meth_pb.name, path + (i,)), @@ -610,7 +603,7 @@ def _load_message(self, )) # Create a message correspoding to this descriptor. - self.messages[address.proto] = wrappers.MessageType( + self.proto_messages[address.proto] = wrappers.MessageType( fields=fields, message_pb=message_pb, nested_enums=nested_enums, @@ -620,7 +613,7 @@ def _load_message(self, documentation=self.docs.get(path, self.EMPTY), ), ) - return self.messages[address.proto] + return self.proto_messages[address.proto] def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, @@ -642,7 +635,7 @@ def _load_enum(self, )) # Load the enum itself. - self.enums[address.proto] = wrappers.EnumType( + self.proto_enums[address.proto] = wrappers.EnumType( enum_pb=enum, meta=metadata.Metadata( address=address, @@ -650,7 +643,7 @@ def _load_enum(self, ), values=values, ) - return self.enums[address.proto] + return self.proto_enums[address.proto] def _load_service(self, service: descriptor_pb2.ServiceDescriptorProto, @@ -668,7 +661,7 @@ def _load_service(self, ) # Load the comments for the service itself. - self.services[address.proto] = wrappers.Service( + self.proto_services[address.proto] = wrappers.Service( meta=metadata.Metadata( address=address, documentation=self.docs.get(path, self.EMPTY), @@ -676,4 +669,4 @@ def _load_service(self, methods=methods, service_pb=service, ) - return self.services[address.proto] + return self.proto_services[address.proto] diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 index c8965b7996ad..53c1d431c79c 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 @@ -19,7 +19,7 @@ from ..{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_cas -#} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.top.messages.values()|sort(attribute='name') -%} +{% for message in proto.messages.values()|sort(attribute='name') -%} from ..{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} {% endfor %}{% endfor %} @@ -37,7 +37,7 @@ __all__ = ( {%- endfor %} {%- for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view %} - {%- for message in proto.top.messages.values()|sort(attribute='name') %} + {%- for message in proto.messages.values()|sort(attribute='name') %} '{{ message.name }}', {%- endfor %}{% endfor %} ) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 index 00b097838854..ab1c0fb99fe2 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 @@ -19,7 +19,7 @@ from .services.{{ service.name|snake_case }} import {{ service.name }} -#} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.top.messages.values()|sort(attribute='name') -%} +{% for message in proto.messages.values()|sort(attribute='name') -%} from .types.{{ proto.module_name }} import {{ message.name }} {% endfor %}{% endfor %} @@ -37,7 +37,7 @@ __all__ = ( {%- endfor %} {%- for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view %} - {%- for message in proto.top.messages.values()|sort(attribute='name') %} + {%- for message in proto.messages.values()|sort(attribute='name') %} '{{ message.name }}', {%- endfor %}{% endfor %} ) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 index c9592e9b2dec..fa69d3476920 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 @@ -20,21 +20,21 @@ __protobuf__ = {{ p }}.module( marshal='{{ api.naming.proto_package }}', {% endif -%} manifest={ - {%- for enum in proto.top.enums.values() %} + {%- for enum in proto.enums.values() %} '{{ enum.name }}', {%- endfor %} - {%- for message in proto.top.messages.values() %} + {%- for message in proto.messages.values() %} '{{ message.name }}', {%- endfor %} }, ) -{% for enum in proto.top.enums.values() -%} +{% for enum in proto.enums.values() -%} {% include '$namespace/$name_$version/$sub/types/_enum.py.j2' with context %} {% endfor %} -{% for message in proto.top.messages.values() -%} +{% for message in proto.messages.values() -%} {% include "$namespace/$name_$version/$sub/types/_message.py.j2" with context %} {% endfor %} {% endwith %} diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 8f5fe07e8d38..be2c1b80fe7f 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -332,14 +332,14 @@ def test_messages_nested(): bar = 'google.example.v3.Foo.Bar' # Get the message. - assert len(proto.messages) == 2 - assert proto.messages[foo].name == 'Foo' - assert proto.messages[bar].name == 'Bar' + assert len(proto.all_messages) == 2 + assert proto.all_messages[foo].name == 'Foo' + assert proto.all_messages[bar].name == 'Bar' - # Assert that the `top` shim only shows top-level messages. - assert len(proto.top.messages) == 1 - assert proto.top.messages[foo] is proto.messages[foo] - assert bar not in proto.top.messages + # Assert that the `messages` property only shows top-level messages. + assert len(proto.messages) == 1 + assert proto.messages[foo] is proto.messages[foo] + assert bar not in proto.messages def test_services(): From 3d807c271b3150ff2b0c5665d3f832164e8c411e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 29 Jan 2019 06:59:06 -0800 Subject: [PATCH 0085/1339] [chore] Remove Showcase check in nox. (#91) It does not detect running Showcase in Docker. --- packages/gapic-generator/nox.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/nox.py index bc4ccfb7ddda..1e57c80f8467 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/nox.py @@ -49,7 +49,6 @@ def showcase(session): session.log('Note: Showcase must be running for these tests to work.') session.log('See https://github.com/googleapis/gapic-showcase') session.log('-' * 70) - session.run('lsof', '-i', '4tcp:7469', '-sTCP:LISTEN', silent=True) # Install pytest and gapic-generator-python session.install('pytest') From 3ce03b58f578733297a08f788b0f0246b00cbaf9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 20 Feb 2019 20:45:44 -0800 Subject: [PATCH 0086/1339] [refactor] New annotations (#92) This updates gapic-generator-python to use the current annotations. --- packages/gapic-generator/.circleci/config.yml | 10 ++--- packages/gapic-generator/Dockerfile | 4 +- packages/gapic-generator/gapic/schema/api.py | 4 +- .../gapic-generator/gapic/schema/naming.py | 40 +++++++++---------- .../gapic-generator/gapic/schema/wrappers.py | 31 +++++++------- .../gapic/templates/setup.py.j2 | 2 +- .../gapic-generator/{nox.py => noxfile.py} | 2 +- packages/gapic-generator/setup.py | 2 +- .../tests/unit/generator/test_generator.py | 1 - .../tests/unit/schema/test_api.py | 8 ++-- .../tests/unit/schema/test_naming.py | 21 +++++----- .../tests/unit/schema/wrappers/test_field.py | 6 ++- .../tests/unit/schema/wrappers/test_method.py | 13 +++--- .../unit/schema/wrappers/test_service.py | 16 ++++---- .../unit/schema/wrappers/test_signature.py | 10 ++--- 15 files changed, 81 insertions(+), 89 deletions(-) rename packages/gapic-generator/{nox.py => noxfile.py} (98%) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 6a30a49157a2..0fb392bc0ae2 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -59,7 +59,7 @@ jobs: - run: name: Install nox and codecov. command: | - pip install --pre nox-automation + pip install nox pip install codecov - run: name: Run unit tests. @@ -81,7 +81,7 @@ jobs: - run: name: Install nox and codecov. command: | - pip install --pre nox-automation + pip install nox pip install codecov - run: name: Run unit tests. @@ -93,7 +93,7 @@ jobs: showcase: docker: - image: python:3.7-slim - - image: gcr.io/gapic-showcase/gapic-showcase:0.0.9 + - image: gcr.io/gapic-showcase/gapic-showcase:0.0.12 steps: - checkout - run: @@ -103,7 +103,7 @@ jobs: apt-get install -y curl pandoc unzip - run: name: Install nox. - command: pip install --pre nox-automation + command: pip install nox - run: name: Install protoc 3.6.1. command: | @@ -122,7 +122,7 @@ jobs: - checkout - run: name: Install nox. - command: pip install --pre nox-automation + command: pip install nox - run: name: Build the documentation. command: nox -s docs diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile index 750c79a1c115..b7a54001024a 100644 --- a/packages/gapic-generator/Dockerfile +++ b/packages/gapic-generator/Dockerfile @@ -7,8 +7,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* # Add protoc and our common protos. -COPY --from=gcr.io/gapic-images/api-common-protos:latest /usr/local/bin/protoc /usr/local/bin/protoc -COPY --from=gcr.io/gapic-images/api-common-protos:latest /protos/ /protos/ +COPY --from=gcr.io/gapic-images/api-common-protos:beta /usr/local/bin/protoc /usr/local/bin/protoc +COPY --from=gcr.io/gapic-images/api-common-protos:beta /protos/ /protos/ # Add our code to the Docker image. ADD . /usr/src/gapic-generator-python/ diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 0aaa67c8589b..7c8b7d36711b 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -23,7 +23,7 @@ from itertools import chain from typing import Callable, List, Mapping, Sequence, Set, Tuple -from google.api import annotations_pb2 +from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import metadata @@ -525,7 +525,7 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], # Iterate over the methods and collect them into a dictionary. answer = collections.OrderedDict() for meth_pb, i in zip(methods, range(0, sys.maxsize)): - lro = meth_pb.options.Extensions[annotations_pb2.operation] + lro = meth_pb.options.Extensions[operations_pb2.operation_info] # If the output type is google.longrunning.Operation, we use # a specialized object in its place. diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 5bcfebe9fbe0..f8d924edd63b 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -17,7 +17,7 @@ import re from typing import Iterable, Sequence, Tuple -from google.api import annotations_pb2 +from google.api import client_pb2 from google.protobuf import descriptor_pb2 from gapic import utils @@ -37,9 +37,12 @@ class Naming: namespace: Tuple[str] = dataclasses.field(default_factory=tuple) version: str = '' product_name: str = '' - product_url: str = '' proto_package: str = '' + def __post_init__(self): + if not self.product_name: + self.__dict__['product_name'] = self.name + @classmethod def build(cls, *file_descriptors: Iterable[descriptor_pb2.FileDescriptorProto] @@ -118,38 +121,33 @@ def build(cls, # # This creates a naming class non-empty metadata annotation and # uses Python's set logic to de-duplicate. There should only be one. - metadata_info = set() + explicit_pkgs = set() for fd in file_descriptors: - meta = fd.options.Extensions[annotations_pb2.metadata] + pkg = fd.options.Extensions[client_pb2.client_package] naming = cls( - name=meta.package_name or meta.product_name, - namespace=tuple(meta.package_namespace), - product_name=meta.product_name, - product_url=meta.product_uri, - version='', + name=pkg.title or pkg.product_title, + namespace=tuple(pkg.namespace), + version=pkg.version, ) if naming: - metadata_info.add(naming) + explicit_pkgs.add(naming) # Sanity check: Ensure that any google.api.metadata provisions were # consistent. - if len(metadata_info) > 1: + if len(explicit_pkgs) > 1: raise ValueError( - 'If the google.api.metadata annotation is provided in more ' - 'than one file, it must be consistent.', + 'If the google.api.client_package annotation is provided in ' + 'more than one file, it must be consistent.', ) # Merge the package naming information and the metadata naming # information, with the latter being preferred. # Return a Naming object which effectively merges them. - answer = package_info - if len(metadata_info): - for k, v in dataclasses.asdict(metadata_info.pop()).items(): - # Sanity check: We only want to overwrite anything if the - # new value is truthy. - if v: - answer = dataclasses.replace(answer, **{k: v}) - return answer + if len(explicit_pkgs): + return dataclasses.replace(package_info, + **dataclasses.asdict(explicit_pkgs.pop()), + ) + return package_info def __bool__(self): """Return True if any of the fields are truthy, False otherwise.""" diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 3da9d6cbee41..177df3851e8a 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -34,7 +34,8 @@ from typing import Iterable, List, Mapping, Sequence, Set, Tuple, Union from google.api import annotations_pb2 -from google.api import signature_pb2 +from google.api import client_pb2 +from google.api import field_behavior_pb2 from google.protobuf import descriptor_pb2 from gapic import utils @@ -92,7 +93,8 @@ def required(self) -> bool: Returns: bool: Whether this field is required. """ - return bool(self.options.Extensions[annotations_pb2.required]) + return (field_behavior_pb2.FieldBehavior.Value('REQUIRED') in + self.options.Extensions[field_behavior_pb2.field_behavior]) @utils.cached_property def type(self) -> Union['MessageType', 'EnumType', 'PythonType']: @@ -434,28 +436,22 @@ def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: return tuple(answer) @utils.cached_property - def signatures(self) -> Tuple[signature_pb2.MethodSignature]: + def signatures(self) -> 'MethodSignatures': """Return the signature defined for this method.""" - sig_pb2 = self.options.Extensions[annotations_pb2.method_signature] - - # Sanity check: If there are no signatures (which should be by far - # the common case), just abort now. - if len(sig_pb2.fields) == 0: - return MethodSignatures(all=()) + signatures = self.options.Extensions[client_pb2.method_signature] # Signatures are annotated with an `additional_signatures` key that # allows for specifying additional signatures. This is an uncommon # case but we still want to deal with it. answer = [] - for sig in (sig_pb2,) + tuple(sig_pb2.additional_signatures): + for sig in signatures: # Build a MethodSignature object with the appropriate name # and fields. The fields are field objects, retrieved from # the method's `input` message. answer.append(MethodSignature( - name=sig.function_name if sig.function_name else self.name, fields=collections.OrderedDict([ (f.split('.')[-1], self.input.get_field(*f.split('.'))) - for f in sig.fields + for f in sig.split(',') ]), )) @@ -478,7 +474,6 @@ def with_context(self, *, collisions: Set[str]) -> 'Method': @dataclasses.dataclass(frozen=True) class MethodSignature: - name: str fields: Mapping[str, Field] @utils.cached_property @@ -551,8 +546,8 @@ def host(self) -> str: Returns: str: The hostname, with no protocol and no trailing ``/``. """ - if self.options.Extensions[annotations_pb2.default_host]: - return self.options.Extensions[annotations_pb2.default_host] + if self.options.Extensions[client_pb2.default_host]: + return self.options.Extensions[client_pb2.default_host] return None @property @@ -562,8 +557,10 @@ def oauth_scopes(self) -> Sequence[str]: Returns: Sequence[str]: A sequence of OAuth scopes. """ - oauth = self.options.Extensions[annotations_pb2.oauth] - return tuple(oauth.scopes) + # Return the OAuth scopes, split on comma. + return tuple([i.strip() for i in + self.options.Extensions[client_pb2.oauth_scopes].split(',') + if i]) @property def module_name(self) -> str: diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 355ac6bfc087..c1696fdb4567 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -20,7 +20,7 @@ setuptools.setup( include_package_data=True, install_requires=( 'google-api-core >= 1.3.0, < 2.0.0dev', - 'googleapis-common-protos >= 1.6.0b6', + 'googleapis-common-protos >= 1.6.0b7', 'grpcio >= 1.10.0', 'proto-plus >= 0.3.0', ), diff --git a/packages/gapic-generator/nox.py b/packages/gapic-generator/noxfile.py similarity index 98% rename from packages/gapic-generator/nox.py rename to packages/gapic-generator/noxfile.py index 1e57c80f8467..53ba3fb3c956 100644 --- a/packages/gapic-generator/nox.py +++ b/packages/gapic-generator/noxfile.py @@ -56,7 +56,7 @@ def showcase(session): # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: - showcase_version = '0.0.9' + showcase_version = '0.0.12' # Download the Showcase descriptor. session.run( diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 66f95ab34871..a872393686dc 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -42,7 +42,7 @@ include_package_data=True, install_requires=( 'click >= 6.7', - 'googleapis-common-protos >= 1.6.0b6', + 'googleapis-common-protos >= 1.6.0b7', 'jinja2 >= 2.10', 'protobuf >= 3.5.1', 'pypandoc >= 1.4', diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index ca16e8ea6b6a..aef23eddde26 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -260,5 +260,4 @@ def make_naming(**kwargs) -> naming.Naming: kwargs.setdefault('namespace', ('Google', 'Cloud')) kwargs.setdefault('version', 'v1') kwargs.setdefault('product_name', 'Hatstand') - kwargs.setdefault('product_url', 'https://cloud.google.com/hatstand/') return naming.Naming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index be2c1b80fe7f..e1a355da373b 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -17,8 +17,7 @@ import pytest -from google.api import annotations_pb2 -from google.api import longrunning_pb2 +from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import api @@ -461,8 +460,8 @@ def test_lro(): input_type='google.example.v3.AsyncDoThingRequest', output_type='google.longrunning.Operation', ) - method_pb2.options.Extensions[annotations_pb2.operation].MergeFrom( - longrunning_pb2.OperationData( + method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( + operations_pb2.OperationInfo( response_type='google.example.v3.AsyncDoThingResponse', metadata_type='google.example.v3.AsyncDoThingMetadata', ), @@ -606,5 +605,4 @@ def make_naming(**kwargs) -> naming.Naming: kwargs.setdefault('namespace', ('Google', 'Cloud')) kwargs.setdefault('version', 'v1') kwargs.setdefault('product_name', 'Hatstand') - kwargs.setdefault('product_url', 'https://cloud.google.com/hatstand/') return naming.Naming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index ad64c912e7c7..f3ad0e09ecac 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -14,8 +14,7 @@ import pytest -from google.api import annotations_pb2 -from google.api import metadata_pb2 +from google.api import client_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import naming @@ -116,8 +115,12 @@ def test_build_with_annotations(): name='spanner.proto', package='google.spanner.v1', ) - proto.options.Extensions[annotations_pb2.metadata].MergeFrom( - metadata_pb2.Metadata(package_namespace=['Google', 'Cloud']), + proto.options.Extensions[client_pb2.client_package].MergeFrom( + client_pb2.Package( + namespace=['Google', 'Cloud'], + title='Spanner', + version='v1', + ), ) n = naming.Naming.build(proto) assert n.name == 'Spanner' @@ -146,8 +149,8 @@ def test_inconsistent_metadata_error(): name='spanner.proto', package='google.spanner.v1', ) - proto1.options.Extensions[annotations_pb2.metadata].MergeFrom( - metadata_pb2.Metadata(package_namespace=['Google', 'Cloud']), + proto1.options.Extensions[client_pb2.client_package].MergeFrom( + client_pb2.Package(namespace=['Google', 'Cloud']), ) # Set up the second proto. @@ -156,9 +159,8 @@ def test_inconsistent_metadata_error(): name='spanner2.proto', package='google.spanner.v1', ) - proto2.options.Extensions[annotations_pb2.metadata].MergeFrom( - metadata_pb2.Metadata(package_namespace=['Google', 'Cloud'], - package_name='Spanner'), + proto2.options.Extensions[client_pb2.client_package].MergeFrom( + client_pb2.Package(title='Spanner', namespace=['Google', 'Cloud']), ) # This should error. Even though the data in the metadata is consistent, @@ -193,5 +195,4 @@ def make_naming(**kwargs) -> naming.Naming: kwargs.setdefault('namespace', ('Google', 'Cloud')) kwargs.setdefault('version', 'v1') kwargs.setdefault('product_name', 'Hatstand') - kwargs.setdefault('product_url', 'https://cloud.google.com/hatstand/') return naming.Naming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 777450cfa2e0..f60f64227a4b 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -14,7 +14,7 @@ import pytest -from google.api import annotations_pb2 +from google.api import field_behavior_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import wrappers @@ -83,7 +83,9 @@ def test_not_repeated(): def test_required(): field = make_field() - field.options.Extensions[annotations_pb2.required] = True + field.options.Extensions[field_behavior_pb2.field_behavior].append( + field_behavior_pb2.FieldBehavior.Value('REQUIRED') + ) assert field.required diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 7d8f983c5c64..5aa16a601d9c 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -16,8 +16,8 @@ from typing import Sequence from google.api import annotations_pb2 +from google.api import client_pb2 from google.api import http_pb2 -from google.api import signature_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import metadata @@ -47,9 +47,10 @@ def test_method_signature(): # Edit the underlying method pb2 post-hoc to add the appropriate annotation # (google.api.signature). - method.options.Extensions[annotations_pb2.method_signature].MergeFrom( - signature_pb2.MethodSignature(fields=['int_field', 'float_field']) - ) + method.options.Extensions[client_pb2.method_signature].append(','.join(( + 'int_field', + 'float_field', + ))) # We should get back just those two fields as part of the signature. assert len(method.signatures) == 1 @@ -73,8 +74,8 @@ def test_method_signature_nested(): # Edit the underlying method pb2 post-hoc to add the appropriate annotation # (google.api.signature). - method.options.Extensions[annotations_pb2.method_signature].MergeFrom( - signature_pb2.MethodSignature(fields=['inner.int_field']) + method.options.Extensions[client_pb2.method_signature].append( + 'inner.int_field', ) # We should get back just those two fields as part of the signature. diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 158ef4b6a15f..3da253d0c330 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -15,8 +15,8 @@ import typing from google.api import annotations_pb2 +from google.api import client_pb2 from google.api import http_pb2 -from google.api import signature_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import imp @@ -103,7 +103,7 @@ def test_service_python_modules_signature(): type_name='a.b.c.v2.D', ), ), - method_signature=signature_pb2.MethodSignature(fields=['secs', 'd']), + method_signature='secs,d', ) # type=5 is int, so nothing is added. assert service.python_modules == ( @@ -137,8 +137,8 @@ def make_service(name: str = 'Placeholder', host: str = '', # appropriate. service_pb = descriptor_pb2.ServiceDescriptorProto(name=name) if host: - service_pb.options.Extensions[annotations_pb2.default_host] = host - service_pb.options.Extensions[annotations_pb2.oauth].scopes.extend(scopes) + service_pb.options.Extensions[client_pb2.default_host] = host + service_pb.options.Extensions[client_pb2.oauth_scopes] = ','.join(scopes) # Return a service object to test. return wrappers.Service( @@ -151,7 +151,7 @@ def make_service(name: str = 'Placeholder', host: str = '', # tests difficult to understand and maintain. def make_service_with_method_options(*, http_rule: http_pb2.HttpRule = None, - method_signature: signature_pb2.MethodSignature = None, + method_signature: str = '', in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = () ) -> wrappers.Service: # Declare a method with options enabled for long-running operations and @@ -184,7 +184,7 @@ def get_method(name: str, lro_metadata_type: str = '', *, in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), http_rule: http_pb2.HttpRule = None, - method_signature: signature_pb2.MethodSignature = None, + method_signature: str = '', ) -> wrappers.Method: input_ = get_message(in_type, fields=in_fields) output = get_message(out_type) @@ -204,8 +204,8 @@ def get_method(name: str, ext_key = annotations_pb2.http method_pb.options.Extensions[ext_key].MergeFrom(http_rule) if method_signature: - ext_key = annotations_pb2.method_signature - method_pb.options.Extensions[ext_key].MergeFrom(method_signature) + ext_key = client_pb2.method_signature + method_pb.options.Extensions[ext_key].append(method_signature) return wrappers.Method( method_pb=method_pb, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py index 0bb40affd445..e9a22b0b6882 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py @@ -25,7 +25,7 @@ def test_signature_dispatch_field(): ('foo', make_field(name='foo', type=T.Value('TYPE_INT32'))), ('bar', make_field(name='bar', type=T.Value('TYPE_BOOL'))), )) - signature = wrappers.MethodSignature(name='spam', fields=fields) + signature = wrappers.MethodSignature(fields=fields) assert signature.dispatch_field == fields['foo'] @@ -36,10 +36,8 @@ def test_signatures_magic_methods(): ('bar', make_field(name='bar', type=T.Value('TYPE_BOOL'))), )) signatures = wrappers.MethodSignatures(all=( - wrappers.MethodSignature(name='spam', fields=fields), - wrappers.MethodSignature(name='eggs', fields={ - 'foo': fields['foo'], - }), + wrappers.MethodSignature(fields=fields), + wrappers.MethodSignature(fields={'foo': fields['foo']}), )) assert len(signatures) == 2 assert tuple([i for i in signatures]) == signatures.all @@ -64,11 +62,9 @@ def test_signatures_single_dispatch(): ) signatures = wrappers.MethodSignatures(all=( wrappers.MethodSignature( - name='spam', fields=collections.OrderedDict(fields), ), wrappers.MethodSignature( - name='eggs', fields=collections.OrderedDict(reversed(fields)), ), )) From 209813d4e31e36d417d9d6d5c155fdcec2ee0b92 Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Fri, 22 Feb 2019 13:27:23 -0800 Subject: [PATCH 0087/1339] [chore] Update showcase image in CI (#94) --- packages/gapic-generator/.circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 0fb392bc0ae2..3a8430cd8d9e 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -93,7 +93,7 @@ jobs: showcase: docker: - image: python:3.7-slim - - image: gcr.io/gapic-showcase/gapic-showcase:0.0.12 + - image: gcr.io/gapic-images/gapic-showcase:0.0.12 steps: - checkout - run: From 28ff78fe3f41d87f3114e5aea5e2796430c3c290 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 22 Feb 2019 13:30:21 -0800 Subject: [PATCH 0088/1339] [chore] Version number bump to 0.6.0. (#93) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a872393686dc..97206ec20e07 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.5.0', + version='0.6.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 1f6e671eb772be4e7a655439ec9589ae306e07a3 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 23 Feb 2019 14:25:57 -0800 Subject: [PATCH 0089/1339] [chore] Explicitly declare external commands in noxfile. (#95) --- packages/gapic-generator/noxfile.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 53ba3fb3c956..a5968282a690 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -64,6 +64,7 @@ def showcase(session): f'download/v{showcase_version}/' f'gapic-showcase-{showcase_version}.desc', '-L', '--output', os.path.join(tmp_dir, 'showcase.desc'), + external=True, silent=True, ) @@ -72,6 +73,7 @@ def showcase(session): f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', f'--python_gapic_out={tmp_dir}', 'google/showcase/v1alpha3/echo.proto', + external=True, ) # Install the library. From 5e00207d70adc95c0b223e4ac4247eeae21f2fd3 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 23 Feb 2019 17:17:09 -0800 Subject: [PATCH 0090/1339] [fix] Docstrings with generated content should be raw. (#96) This fixes an issue where comments with backslashes can cause a syntax error. --- .../$name_$version/$sub/services/$service/client.py.j2 | 4 ++-- .../$sub/services/$service/transports/grpc.py.j2 | 2 +- .../$sub/services/$service/transports/http.py.j2 | 2 +- .../$namespace/$name_$version/$sub/types/_enum.py.j2 | 2 +- .../$namespace/$name_$version/$sub/types/_message.py.j2 | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 3c852050ddd7..d89465f2d92e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -59,7 +59,7 @@ class {{ service.name }}: timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: - """{{ method.meta.doc|rst(width=72, indent=8) }} + r"""{{ method.meta.doc|rst(width=72, indent=8) }} Args: request (:class:`{{ method.input.ident.sphinx }}`): @@ -126,7 +126,7 @@ class {{ service.name }}: timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: - """{{ method.meta.doc|rst(width=72, indent=8) }} + r"""{{ method.meta.doc|rst(width=72, indent=8) }} Args: {%- for field in signature.fields.values() %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 index 3261dbda18c4..eb39f7c09f8e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 @@ -102,7 +102,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], {{ method.output.ident }}]: - """Return a callable for the {{- ' ' -}} + r"""Return a callable for the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) }} {{- ' ' -}} method over gRPC. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 index a9ec4512216f..fd51bf498d0a 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 @@ -71,7 +71,7 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): request: {{ method.input.ident }}, *, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: - """Call the {{- ' ' -}} + r"""Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} {{- ' ' -}} method over HTTP. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 index 35356c93a9fc..3f1a8a27d28f 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 @@ -1,5 +1,5 @@ class {{ enum.name }}({{ e }}.IntEnum): - """{{ enum.meta.doc|rst(indent=4) }}""" + r"""{{ enum.meta.doc|rst(indent=4) }}""" {% for enum_value in enum.values -%} {{ enum_value.name }} = {{ enum_value.number }} {% endfor -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 index 07984cc0d4fd..f00123d6d9b4 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 @@ -1,5 +1,5 @@ class {{ message.name }}({{ p }}.Message): - """{{ message.meta.doc|rst(indent=4) }}{% if message.fields|length %} + r"""{{ message.meta.doc|rst(indent=4) }}{% if message.fields|length %} Attributes: {%- for field in message.fields.values() %} From 158919f411bd2ace86cd24ea5cc1c755405a0a9b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 23 Feb 2019 17:18:35 -0800 Subject: [PATCH 0091/1339] [test] Add explicit proto object and dict unary tests. (#97) --- .../gapic-generator/tests/system/test_grpc_streams.py | 1 + .../gapic-generator/tests/system/test_grpc_unary.py | 11 ++++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py index 7a1c70b15dc2..07b2e4302625 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + def test_unary_stream(echo): content = 'The hail in Wales falls mainly on the snails.' responses = echo.expand({ diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_grpc_unary.py index a6ab9e4d5982..c42765aa66f2 100644 --- a/packages/gapic-generator/tests/system/test_grpc_unary.py +++ b/packages/gapic-generator/tests/system/test_grpc_unary.py @@ -17,8 +17,17 @@ from google.api_core import exceptions from google.rpc import code_pb2 +from google import showcase -def test_unary(echo): + +def test_unary_with_request_object(echo): + response = echo.echo(showcase.EchoRequest( + content='The hail in Wales falls mainly on the snails.', + )) + assert response.content == 'The hail in Wales falls mainly on the snails.' + + +def test_unary_with_dict(echo): response = echo.echo({ 'content': 'The hail in Wales falls mainly on the snails.', }) From fbc091fef8200e7c9e6bdbc6d4af68d4249588a2 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 23 Feb 2019 17:20:54 -0800 Subject: [PATCH 0092/1339] [feat] RPCs that return Empty should return None. (#100) This adds explicit support for void methods. If an RPC returns a `google.protobuf.Empty` object, we return `None` instead (and suppress any return value documentation). --- .../gapic-generator/gapic/schema/metadata.py | 5 +++++ .../gapic-generator/gapic/schema/wrappers.py | 9 ++++++++- .../$sub/services/$service/client.py.j2 | 17 ++++++++++++++--- .../services/$service/transports/grpc.py.j2 | 9 +++++---- .../services/$service/transports/http.py.j2 | 14 +++++++++++--- .../tests/unit/schema/test_metadata.py | 9 +++++++++ .../tests/unit/schema/wrappers/test_method.py | 12 ++++++++++++ 7 files changed, 64 insertions(+), 11 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index e13e34a9684a..a2203c5a2a01 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -58,6 +58,11 @@ def __str__(self) -> str: Because we import modules as a whole, rather than individual members from modules, this is consistently `module.Name`. """ + # Edge case: For google.protobuf.Empty, use None instead. + if self.proto == 'google.protobuf.Empty': + return 'None' + + # Most (but not all) types are in a module. if self.module: # If collisions are registered and conflict with our module, # use the module alias instead. diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 177df3851e8a..4a3017dfce0b 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -415,7 +415,9 @@ def grpc_stub_type(self) -> str: def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: """Return types referenced by this method.""" # Begin with the input (request) and output (response) messages. - answer = [self.input, self.output] + answer = [self.input] + if not self.void: + answer.append(self.output) # If this method has flattening that is honored, add its # composite types. @@ -458,6 +460,11 @@ def signatures(self) -> 'MethodSignatures': # Done; return a tuple of signatures. return MethodSignatures(all=tuple(answer)) + @property + def void(self) -> bool: + """Return True if this method has no return value, False otherwise.""" + return self.output.ident.proto == 'google.protobuf.Empty' + def with_context(self, *, collisions: Set[str]) -> 'Method': """Return a derivative of this method with the provided context. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index d89465f2d92e..fee8395ddb57 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -70,10 +70,12 @@ class {{ service.name }}: timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + {%- if not method.void %} Returns: {{ method.output.ident.sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} + {%- endif %} """ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -97,8 +99,12 @@ class {{ service.name }}: {%- endif %} # Send the request. - response = rpc(request, retry=retry, - timeout=timeout, metadata=metadata) + {% if not method.void %}response = {% endif %}rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) {%- if method.output.lro_response is defined %} # Wrap the response in an operation future @@ -111,9 +117,11 @@ class {{ service.name }}: {%- endif %} ) {%- endif %} + {%- if not method.void %} # Done; return the response. return response + {%- endif %} {% for signature in method.signatures.single_dispatch -%} @{{ method.name|snake_case }}.register @@ -138,10 +146,12 @@ class {{ service.name }}: timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent alont with the request as metadata. + {%- if not method.void %} Returns: {{ method.output.ident.sphinx }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} + {%- endif %} """ return self.{{ method.name|snake_case }}( {{ method.input.ident }}( @@ -158,7 +168,8 @@ class {{ service.name }}: @classmethod def get_transport_class(cls, - label: str = None) -> Type[{{ service.name }}Transport]: + label: str = None, + ) -> Type[{{ service.name }}Transport]: """Return an appropriate transport class. Args: diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 index eb39f7c09f8e..76fcb4a702d2 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 @@ -111,8 +111,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): Returns: Callable[[~.{{ method.input.name }}], - ~.{{ method.output.name }}]: - {{ method.output.meta.doc|rst(width=72, indent=16) }} + {% if method.void %}None{% else %}~.{{ method.output.name }}{% endif %}]: + A function that, when called, will call the underlying RPC + on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. @@ -121,8 +122,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if '{{ method.name|snake_case }}' not in self._stubs: self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', - request_serializer={{ method.input.ident }}.serialize, - response_deserializer={{ method.output.ident }}.deserialize, + request_serializer={{ method.input.ident }}.{% if method.input.ident.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, + response_deserializer={{ method.output.ident }}.{% if method.input.ident.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 index fd51bf498d0a..6fb9c79f117c 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 @@ -81,17 +81,23 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): The request object. {{ method.input.meta.doc|rst(width=72, indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be - sent alont with the request as metadata. + sent along with the request as metadata. + {%- if not method.void %} Returns: ~.{{ method.output.ident }}: {{ method.output.meta.doc|rst(width=72, indent=16) }} + {%- endif %} """ # Serialize the input. + {%- if method.input.ident.module.endswith('_pb2') %} data = request.SerializeToString() + {%- else %} + data = {{ method.input.ident }}.serialize(request) + {%- endif %} # Send the request. - response = self._session.post( + {% if not method.void %}response = {% endif %}self._session.post( 'https://{host}/$rpc/{package}.{service}/{method}'.format( host=self._host, method='{{ method.name }}', @@ -103,11 +109,13 @@ class {{ service.name }}HttpTransport({{ service.name }}Transport): 'content-type': 'application/x-protobuf', }, ) + {%- if not method.void %} # Return the response. - return {{ method.output.ident }}.FromString( + return {{ method.output.ident }}.{% if method.input.ident.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}( response.content, ) + {%- endif %} {%- endfor %} diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 76744e142a32..b42b17d7fc8e 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -40,6 +40,15 @@ def test_address_str_parent(): assert str(addr) == 'baz.spam.eggs.Bacon' +def test_address_str_empty(): + addr = metadata.Address( + package=('google', 'protobuf'), + module='empty_pb2', + name='Empty', + ) + assert str(addr) == 'None' + + def test_address_proto(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.proto == 'foo.bar.Bacon' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 5aa16a601d9c..1e262b1331fa 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -88,6 +88,18 @@ def test_method_no_signature(): assert len(make_method('Ping').signatures) == 0 +def test_method_void(): + empty = make_message(name='Empty', package='google.protobuf') + method = make_method('Meh', output_message=empty) + assert method.void + + +def test_method_not_void(): + not_empty = make_message(name='OutputMessage', package='foo.bar.v1') + method = make_method('Meh', output_message=not_empty) + assert not method.void + + def test_method_field_headers_none(): method = make_method('DoSomething') assert isinstance(method.field_headers, collections.Sequence) From b6cd16d0f08b97c90514b0fa8d9e5de5380c7e20 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 23 Feb 2019 17:22:23 -0800 Subject: [PATCH 0093/1339] [chore] Silence W504 on flake8. (#99) PEP 8 explicitly encourages line breaks after binary operators as a means of breaking up long if statements. --- packages/gapic-generator/.flake8 | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/gapic-generator/.flake8 b/packages/gapic-generator/.flake8 index 092a2c36d96b..673525b82a04 100644 --- a/packages/gapic-generator/.flake8 +++ b/packages/gapic-generator/.flake8 @@ -6,3 +6,7 @@ ignore = # Line over-indented for visual indent. # This works poorly with type annotations in method declarations. E126, E128, E131 + # Line break after binary operator. + # This catches line breaks after "and" / "or" as a means of breaking up + # long if statements, which PEP 8 explicitly encourages. + W504 From 5132a65840b28f4f639fe8ce6cd8e4a2e1f0a0fe Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 23 Feb 2019 18:16:15 -0800 Subject: [PATCH 0094/1339] [test] Test CRUD operations. (#98) Add Showcase tests for resource CRUD operations. --- .../$sub/services/$service/client.py.j2 | 3 ++ .../services/$service/transports/grpc.py.j2 | 7 +++++ packages/gapic-generator/noxfile.py | 1 + .../gapic-generator/tests/system/conftest.py | 14 ++++++--- .../tests/system/test_resource_crud.py | 30 +++++++++++++++++++ 5 files changed, 51 insertions(+), 4 deletions(-) create mode 100644 packages/gapic-generator/tests/system/test_resource_crud.py diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index fee8395ddb57..f3d4e484676a 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -77,6 +77,9 @@ class {{ service.name }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} {%- endif %} """ + # If we got a dictionary, coerce it. + request = {{ method.input.ident }}(request) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method.wrap_method( diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 index 76fcb4a702d2..00e7aa0bdc03 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 @@ -8,6 +8,9 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 {%- endif %} from google.auth import credentials +{%- if service.methods.values()|selectattr('void')|list|length %} +from google.protobuf import empty_pb2 +{%- endif %} import grpc @@ -123,7 +126,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.ident }}.{% if method.input.ident.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, + {% if method.void -%} + response_deserializer=empty_pb2.Empty.FromString, + {% else -%} response_deserializer={{ method.output.ident }}.{% if method.input.ident.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, + {% endif -%} ) return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index a5968282a690..5b8ef4b444d9 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -73,6 +73,7 @@ def showcase(session): f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', f'--python_gapic_out={tmp_dir}', 'google/showcase/v1alpha3/echo.proto', + 'google/showcase/v1alpha3/identity.proto', external=True, ) diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 6df91c603373..423d037619ef 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -15,16 +15,22 @@ import pytest from google.showcase import Echo -from google.showcase_v1alpha3.services.echo.transports.grpc import ( - EchoGrpcTransport, -) +from google.showcase import Identity import grpc @pytest.fixture def echo(): - transport = EchoGrpcTransport( + transport = Echo.get_transport_class('grpc')( channel=grpc.insecure_channel('localhost:7469'), ) return Echo(transport=transport) + + +@pytest.fixture +def identity(): + transport = Identity.get_transport_class('grpc')( + channel=grpc.insecure_channel('localhost:7469'), + ) + return Identity(transport=transport) diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py new file mode 100644 index 000000000000..9ca60b678bf2 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -0,0 +1,30 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def test_crud_with_request(identity): + count = len(identity.list_users({}).users) + user = identity.create_user({'user': { + 'display_name': 'Guido van Rossum', + 'email': 'guido@guido.fake', + }}) + try: + assert user.display_name == 'Guido van Rossum' + assert user.email == 'guido@guido.fake' + assert len(identity.list_users({}).users) == count + 1 + assert identity.get_user({ + 'name': user.name, + }).display_name == 'Guido van Rossum' + finally: + identity.delete_user({'name': user.name}) From 0273687c227073e5bc542f49aa277348eac77cf7 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 25 Feb 2019 10:54:06 -0800 Subject: [PATCH 0095/1339] [fix] Fix LRO support, add Showcase test. (#101) This commit fixes long-standing issues with LRO and adds a Showcase test for them. --- .../gapic/templates/setup.py.j2 | 4 +-- packages/gapic-generator/setup.py | 2 +- .../tests/system/test_grpc_lro.py | 29 +++++++++++++++++++ 3 files changed, 32 insertions(+), 3 deletions(-) create mode 100644 packages/gapic-generator/tests/system/test_grpc_lro.py diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index c1696fdb4567..f5b3617af128 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -19,8 +19,8 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core >= 1.3.0, < 2.0.0dev', - 'googleapis-common-protos >= 1.6.0b7', + 'google-api-core >= 1.8.0, < 2.0.0dev', + 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', 'proto-plus >= 0.3.0', ), diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 97206ec20e07..c5682aee1248 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -42,7 +42,7 @@ include_package_data=True, install_requires=( 'click >= 6.7', - 'googleapis-common-protos >= 1.6.0b7', + 'googleapis-common-protos >= 1.6.0b8', 'jinja2 >= 2.10', 'protobuf >= 3.5.1', 'pypandoc >= 1.4', diff --git a/packages/gapic-generator/tests/system/test_grpc_lro.py b/packages/gapic-generator/tests/system/test_grpc_lro.py new file mode 100644 index 000000000000..ba61336ccf78 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_grpc_lro.py @@ -0,0 +1,29 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime, timedelta, timezone + +from google import showcase_v1alpha3 + + +def test_lro(echo): + wait_request = { + 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), + 'success': {'content': 'The hail in Wales falls mainly ' + 'on the snails...eventually.'}, + } + future = echo.wait(wait_request) + response = future.result() + assert isinstance(response, showcase_v1alpha3.WaitResponse) + assert response.content.endswith('the snails...eventually.') From f833eab794c462c9c5c39e134c30cf3bd2b00910 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 26 Feb 2019 18:44:48 -0800 Subject: [PATCH 0096/1339] [feat/fix/refactor] Method signature flattening. (#102) This changes method signature flattening significantly. * One method, no more dispatcher. * Keyword arguments now required rather than prohibited. * Nesting actually works correctly. --- .../gapic-generator/gapic/schema/wrappers.py | 110 +++++------------- .../$sub/services/$service/client.py.j2 | 77 +++++------- .../$namespace/$name_$version/utils.py.j2 | 22 ---- .../tests/system/test_resource_crud.py | 17 +++ .../tests/unit/schema/wrappers/test_method.py | 78 ++++--------- .../unit/schema/wrappers/test_service.py | 20 +++- .../unit/schema/wrappers/test_signature.py | 83 ------------- 7 files changed, 116 insertions(+), 291 deletions(-) delete mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/utils.py.j2 delete mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 4a3017dfce0b..23bd0752c458 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -31,7 +31,7 @@ import dataclasses import re from itertools import chain -from typing import Iterable, List, Mapping, Sequence, Set, Tuple, Union +from typing import List, Mapping, Sequence, Set, Union from google.api import annotations_pb2 from google.api import client_pb2 @@ -403,6 +403,31 @@ def field_headers(self) -> Sequence[str]: return tuple(re.findall(r'\{([a-z][\w\d_.]+)=', http.get)) return () + @utils.cached_property + def flattened_fields(self) -> Mapping[str, Field]: + """Return the signature defined for this method.""" + answer = collections.OrderedDict() + signatures = self.options.Extensions[client_pb2.method_signature] + + # Iterate over each signature and add the appropriate fields. + for sig in signatures: + # Get all of the individual fields. + fields = collections.OrderedDict([ + (f, self.input.get_field(*f.split('.'))) + for f in sig.split(',') + ]) + + # Sanity check: If any fields contain a message, we ignore the + # entire signature. + if any([i.message for i in fields.values()]): + continue + + # Add the fields to the answer. + answer.update(fields) + + # Done; return the flattened fields + return answer + @property def grpc_stub_type(self) -> str: """Return the type of gRPC stub to use.""" @@ -424,8 +449,9 @@ def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: # # This entails adding the module for any field on the signature # unless the field is a primitive. - for sig in self.signatures.single_dispatch: - answer += sig.composite_types + for field in self.flattened_fields.values(): + if field.message or field.enum: + answer.append(field.type) # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. @@ -437,29 +463,6 @@ def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: # Done; return the answer. return tuple(answer) - @utils.cached_property - def signatures(self) -> 'MethodSignatures': - """Return the signature defined for this method.""" - signatures = self.options.Extensions[client_pb2.method_signature] - - # Signatures are annotated with an `additional_signatures` key that - # allows for specifying additional signatures. This is an uncommon - # case but we still want to deal with it. - answer = [] - for sig in signatures: - # Build a MethodSignature object with the appropriate name - # and fields. The fields are field objects, retrieved from - # the method's `input` message. - answer.append(MethodSignature( - fields=collections.OrderedDict([ - (f.split('.')[-1], self.input.get_field(*f.split('.'))) - for f in sig.split(',') - ]), - )) - - # Done; return a tuple of signatures. - return MethodSignatures(all=tuple(answer)) - @property def void(self) -> bool: """Return True if this method has no return value, False otherwise.""" @@ -479,61 +482,6 @@ def with_context(self, *, collisions: Set[str]) -> 'Method': ) -@dataclasses.dataclass(frozen=True) -class MethodSignature: - fields: Mapping[str, Field] - - @utils.cached_property - def dispatch_field(self) -> Union[MessageType, EnumType, PythonType]: - """Return the first field. - - This is what is used for `functools.singledispatch`.""" - return next(iter(self.fields.values())) - - @utils.cached_property - def composite_types(self) -> Sequence[Union[MessageType, EnumType]]: - """Return all composite types used in this signature.""" - answer = [] - for field in self.fields.values(): - if field.message or field.enum: - answer.append(field.type) - return answer - - -@dataclasses.dataclass(frozen=True) -class MethodSignatures: - all: Tuple[MethodSignature] - - def __getitem__(self, key: Union[int, slice]) -> MethodSignature: - return self.all[key] - - def __iter__(self) -> Iterable[MethodSignature]: - return iter(self.all) - - def __len__(self) -> int: - return len(self.all) - - @utils.cached_property - def single_dispatch(self) -> Tuple[MethodSignature]: - """Return a tuple of signatures, grouped and deduped by dispatch type. - - In the Python 3 templates, we only honor at most one method - signature per initial argument type, and only for primitives. - - This method groups and deduplicates signatures and sends back only - the signatures that the template actually wants. - - Returns: - Tuple[MethodSignature]: Method signatures to be used with - "single dispatch" routing. - """ - answer = collections.OrderedDict() - for sig in [i for i in self.all - if isinstance(i.dispatch_field.type, PythonType)]: - answer.setdefault(sig.dispatch_field.ident, sig) - return tuple(answer.values()) - - @dataclasses.dataclass(frozen=True) class Service: """Description of a service (defined with the ``service`` keyword).""" diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index f3d4e484676a..efa16d81622f 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -11,7 +11,6 @@ from google.auth import credentials {% for import_ in service.python_modules -%} {{ import_ }} {% endfor %} -from ...{{ '.' * api.subpackage_view|length }}utils import dispatch from .transports import _transport_registry from .transports import {{ service.name }}Transport @@ -50,11 +49,11 @@ class {{ service.name }}: self._transport = Transport(credentials=credentials, host=host) {% for method in service.methods.values() -%} - {%- if method.signatures.single_dispatch -%} - @dispatch - {% endif -%} def {{ method.name|snake_case }}(self, - request: {{ method.input.ident }}, *, + request: {{ method.input.ident }} = None, *, + {% for field in method.flattened_fields.values() -%} + {{ field.name }}: {{ field.ident }} = None, + {% endfor -%} retry: retry.Retry = None, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -65,6 +64,13 @@ class {{ service.name }}: request (:class:`{{ method.input.ident.sphinx }}`): The request object.{{ ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {% for key, field in method.flattened_fields.items() -%} + {{ field.name }} (:class:`{{ field.ident.sphinx }}`): + {{ field.meta.doc|rst(width=72, indent=16, nl=False) }} + This corresponds to the ``{{ key }}`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + {% endfor -%} retry (~.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -77,8 +83,24 @@ class {{ service.name }}: {{ method.output.meta.doc|wrap(width=72, indent=16) }} {%- endif %} """ - # If we got a dictionary, coerce it. + {%- if method.flattened_fields %} + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): + raise TypeError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + {%- endif %} + + # Create or coerce a protobuf request object. + {% if method.flattened_fields -%} + # If we have keyword arguments corresponding to fields on the + # request, apply these. + {% endif -%} request = {{ method.input.ident }}(request) + {%- for key, field in method.flattened_fields.items() %} + if {{ field.name }} is not None: + request.{{ key }} = {{ field.name }} + {%- endfor %} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -125,48 +147,7 @@ class {{ service.name }}: # Done; return the response. return response {%- endif %} - - {% for signature in method.signatures.single_dispatch -%} - @{{ method.name|snake_case }}.register - def _{{ method.name|snake_case }}_with_{{ signature.dispatch_field.name|snake_case }}(self, - {%- for field in signature.fields.values() %} - {{ field.name }}: {{ field.ident }}{% if loop.index0 > 0 and not field.required %} = None{% endif %}, - {%- endfor %} - *, - retry: retry.Retry = None, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident }}: - r"""{{ method.meta.doc|rst(width=72, indent=8) }} - - Args: - {%- for field in signature.fields.values() %} - {{ field.name }} ({{ field.ident.sphinx }}): - {{ field.meta.doc|wrap(width=72, indent=16) }} - {%- endfor %} - retry (~.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent alont with the request as metadata. - {%- if not method.void %} - - Returns: - {{ method.output.ident.sphinx }}: - {{ method.output.meta.doc|wrap(width=72, indent=16) }} - {%- endif %} - """ - return self.{{ method.name|snake_case }}( - {{ method.input.ident }}( - {%- for field in signature.fields.values() %} - {{ field.name }}={{ field.name }}, - {%- endfor %} - ), - retry=retry, - timeout=timeout, - metadata=metadata, - ) - {% endfor %} + {{ '\n' }} {% endfor -%} @classmethod diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/utils.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/utils.py.j2 deleted file mode 100644 index 9c80ecf69647..000000000000 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/utils.py.j2 +++ /dev/null @@ -1,22 +0,0 @@ -{% extends '_base.py.j2' %} - -{% block content %} -import functools - - -def dispatch(func): - base_dispatcher = functools.singledispatch(func) - - # Define a wrapper function that works off args[1] instead of args[0]. - # This is needed because we are overloading *methods*, and their first - # argument is always `self`. - @functools.wraps(base_dispatcher) - def wrapper(*args, **kwargs): - return base_dispatcher.dispatch(args[1].__class__)(*args, **kwargs) - - # The register function is not changed, so let singledispatch do the work. - wrapper.register = base_dispatcher.register - - # Done; return the decorated method. - return wrapper -{% endblock %} diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 9ca60b678bf2..67a8d50e9273 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -28,3 +28,20 @@ def test_crud_with_request(identity): }).display_name == 'Guido van Rossum' finally: identity.delete_user({'name': user.name}) + + +def test_crud_flattened(identity): + count = len(identity.list_users({}).users) + user = identity.create_user( + display_name='Monty Python', + email='monty@python.org', + ) + try: + assert user.display_name == 'Monty Python' + assert user.email == 'monty@python.org' + assert len(identity.list_users({}).users) == count + 1 + assert identity.get_user({ + 'name': user.name, + }).display_name == 'Monty Python' + finally: + identity.delete_user({'name': user.name}) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 1e262b1331fa..15305ee9c7d3 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -34,60 +34,6 @@ def test_method_types(): assert method.output.name == 'Output' -def test_method_signature(): - # Set up a meaningful input message. - input_msg = make_message(name='Input', fields=( - make_field('int_field', type=5), - make_field('bool_field', type=8), - make_field('float_field', type=2), - )) - - # Create the method. - method = make_method('SendStuff', input_message=input_msg) - - # Edit the underlying method pb2 post-hoc to add the appropriate annotation - # (google.api.signature). - method.options.Extensions[client_pb2.method_signature].append(','.join(( - 'int_field', - 'float_field', - ))) - - # We should get back just those two fields as part of the signature. - assert len(method.signatures) == 1 - signature = method.signatures[0] - assert tuple(signature.fields.keys()) == ('int_field', 'float_field') - - -def test_method_signature_nested(): - # Set up a meaningful input message. - inner_msg = make_message(name='Inner', fields=( - make_field('int_field', type=5), - make_field('bool_field', type=8), - make_field('float_field', type=2), - )) - outer_msg = make_message(name='Outer', fields=( - make_field('inner', type=9, message=inner_msg), - )) - - # Create the method. - method = make_method('SendStuff', input_message=outer_msg) - - # Edit the underlying method pb2 post-hoc to add the appropriate annotation - # (google.api.signature). - method.options.Extensions[client_pb2.method_signature].append( - 'inner.int_field', - ) - - # We should get back just those two fields as part of the signature. - assert len(method.signatures) == 1 - signature = method.signatures[0] - assert tuple(signature.fields.keys()) == ('int_field',) - - -def test_method_no_signature(): - assert len(make_method('Ping').signatures) == 0 - - def test_method_void(): empty = make_message(name='Empty', package='google.protobuf') method = make_method('Meh', output_message=empty) @@ -131,11 +77,30 @@ def test_method_stream_stream(): assert method.grpc_stub_type == 'stream_stream' +def test_method_flattened_fields(): + a = make_field('a', type=5) # int + b = make_field('b', type=5) + input_msg = make_message('Z', fields=(a, b)) + method = make_method('F', input_message=input_msg, signatures=('a,b',)) + assert len(method.flattened_fields) == 2 + assert 'a' in method.flattened_fields + assert 'b' in method.flattened_fields + + +def test_method_ignored_flattened_fields(): + a = make_field('a', type=5) + b = make_field('b', type=11, message=make_message('Eggs')) + input_msg = make_message('Z', fields=(a, b)) + method = make_method('F', input_message=input_msg, signatures=('a,b',)) + assert len(method.flattened_fields) == 0 + + def make_method( name: str, input_message: wrappers.MessageType = None, output_message: wrappers.MessageType = None, package: str = 'foo.bar.v1', module: str = 'baz', http_rule: http_pb2.HttpRule = None, + signatures: Sequence[str] = (), **kwargs) -> wrappers.Method: # Use default input and output messages if they are not provided. input_message = input_message or make_message('MethodInput') @@ -154,6 +119,11 @@ def make_method( ext_key = annotations_pb2.http method_pb.options.Extensions[ext_key].MergeFrom(http_rule) + # If there are signatures, include them. + for sig in signatures: + ext_key = client_pb2.method_signature + method_pb.options.Extensions[ext_key].append(sig) + # Instantiate the wrapper class. return wrappers.Method( method_pb=method_pb, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 3da253d0c330..1dd6c05eef55 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -99,7 +99,7 @@ def test_service_python_modules_signature(): descriptor_pb2.FieldDescriptorProto(name='secs', type=5), descriptor_pb2.FieldDescriptorProto( name='d', - type=11, # message + type=14, # enum type_name='a.b.c.v2.D', ), ), @@ -159,7 +159,7 @@ def make_service_with_method_options(*, method = get_method( 'DoBigThing', 'foo.bar.ThingRequest', - 'google.longrunning.operations.Operation', + 'google.longrunning.operations_pb2.Operation', lro_response_type='foo.baz.ThingResponse', lro_metadata_type='foo.qux.ThingMetadata', in_fields=in_fields, @@ -232,7 +232,7 @@ def get_message(dot_path: str, *, return wrappers.MessageType( fields={i.name: wrappers.Field( field_pb=i, - message=get_message(i.type_name) if i.type_name else None, + enum=get_enum(i.type_name) if i.type_name else None, ) for i in fields}, nested_messages={}, nested_enums={}, @@ -243,3 +243,17 @@ def get_message(dot_path: str, *, module=module, )), ) + + +def get_enum(dot_path: str) -> wrappers.EnumType: + pieces = dot_path.split('.') + pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] + return wrappers.EnumType( + enum_pb=descriptor_pb2.EnumDescriptorProto(name=name), + meta=metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(pkg), + module=module, + )), + values=[], + ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py deleted file mode 100644 index e9a22b0b6882..000000000000 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_signature.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import collections - -from google.protobuf import descriptor_pb2 - -from gapic.schema import wrappers - - -def test_signature_dispatch_field(): - T = descriptor_pb2.FieldDescriptorProto.Type - fields = collections.OrderedDict(( - ('foo', make_field(name='foo', type=T.Value('TYPE_INT32'))), - ('bar', make_field(name='bar', type=T.Value('TYPE_BOOL'))), - )) - signature = wrappers.MethodSignature(fields=fields) - assert signature.dispatch_field == fields['foo'] - - -def test_signatures_magic_methods(): - T = descriptor_pb2.FieldDescriptorProto.Type - fields = collections.OrderedDict(( - ('foo', make_field(name='foo', type=T.Value('TYPE_INT32'))), - ('bar', make_field(name='bar', type=T.Value('TYPE_BOOL'))), - )) - signatures = wrappers.MethodSignatures(all=( - wrappers.MethodSignature(fields=fields), - wrappers.MethodSignature(fields={'foo': fields['foo']}), - )) - assert len(signatures) == 2 - assert tuple([i for i in signatures]) == signatures.all - assert signatures[0] == signatures.all[0] - - -def test_signatures_single_dispatch(): - T = descriptor_pb2.FieldDescriptorProto.Type - fields = ( - ('foo', make_field( - message=wrappers.MessageType( - fields={}, - message_pb=descriptor_pb2.DescriptorProto(name='Bacon'), - nested_enums={}, - nested_messages={}, - ), - name='bar', - type=T.Value('TYPE_MESSAGE'), - type_name='bogus.Message', - )), - ('bar', make_field(name='foo', type=T.Value('TYPE_INT32'))), - ) - signatures = wrappers.MethodSignatures(all=( - wrappers.MethodSignature( - fields=collections.OrderedDict(fields), - ), - wrappers.MethodSignature( - fields=collections.OrderedDict(reversed(fields)), - ), - )) - assert len(signatures) == 2 - assert len(signatures.single_dispatch) == 1 - assert signatures.single_dispatch[0] == signatures[1] - - -def make_field(*, message=None, enum=None, **kwargs) -> wrappers.Field: - kwargs.setdefault('name', 'my_field') - kwargs.setdefault('number', 1) - kwargs.setdefault('type', - descriptor_pb2.FieldDescriptorProto.Type.Value('TYPE_BOOL'), - ) - field_pb = descriptor_pb2.FieldDescriptorProto(**kwargs) - return wrappers.Field(field_pb=field_pb, message=message, enum=enum) From e653523d3189b4f61516c78c715f365dfd0da8e9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 1 Mar 2019 21:05:06 -0800 Subject: [PATCH 0097/1339] [chore] Bump Showcase to 0.0.13 (#106) --- packages/gapic-generator/.circleci/config.yml | 2 +- packages/gapic-generator/noxfile.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 3a8430cd8d9e..8ef7ec94d3eb 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -93,7 +93,7 @@ jobs: showcase: docker: - image: python:3.7-slim - - image: gcr.io/gapic-images/gapic-showcase:0.0.12 + - image: gcr.io/gapic-images/gapic-showcase:0.0.13 steps: - checkout - run: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 5b8ef4b444d9..4b2014c792e3 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -56,7 +56,7 @@ def showcase(session): # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: - showcase_version = '0.0.12' + showcase_version = '0.0.13' # Download the Showcase descriptor. session.run( From 872ccd831ea7031d780059e39ac1d14286619152 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 1 Mar 2019 21:06:41 -0800 Subject: [PATCH 0098/1339] [refactor] Refactors the client info invocation. (#104) It now instantiates once per service, not once per request, and gets the `gapic/-` portion of the metadata sent. --- .../$sub/services/$service/client.py.j2 | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index efa16d81622f..05061200f92d 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -108,7 +108,7 @@ class {{ service.name }}: self._transport.{{ method.name|snake_case }}, default_retry=None, # FIXME default_timeout=None, # FIXME - client_info=self.client_info, + client_info=_client_info, ) {%- if method.field_headers %} @@ -171,15 +171,15 @@ class {{ service.name }}: # in the dictionary). return next(iter(_transport_registry.values())) - @property - def client_info(self) -> gapic_v1.client_info.ClientInfo: - """Return information about this client (for metrics). - Returns: - client_info.ClientInfo: An object with metadata about this - client library. - """ - return gapic_v1.client_info.ClientInfo() +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + '{{ api.naming.warehouse_package_name }}', + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() __all__ = ( From fa87a34bb7f89d519d8843857d81d6868955a99a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 1 Mar 2019 23:38:46 -0800 Subject: [PATCH 0099/1339] [feat] Retries. (#103) This commit adds automatic retries for both idempotent and non-idempotent requests. --- .../gapic-generator/gapic/schema/wrappers.py | 10 +++ .../$sub/services/$service/client.py.j2 | 19 +++-- .../tests/system/test_retry.py | 69 +++++++++++++++++++ .../tests/unit/schema/wrappers/test_method.py | 23 ++++++- 4 files changed, 113 insertions(+), 8 deletions(-) create mode 100644 packages/gapic-generator/tests/system/test_retry.py diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 23bd0752c458..692d73d2d81c 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -436,6 +436,16 @@ def grpc_stub_type(self) -> str: server='stream' if self.server_streaming else 'unary', ) + @utils.cached_property + def idempotent(self) -> bool: + """Return True if we know this method is idempotent, False otherwise. + + Note: We are intentionally conservative here. It is far less bad + to falsely believe an idempotent method is non-idempotent than + the converse. + """ + return bool(self.options.Extensions[annotations_pb2.http].get) + @utils.cached_property def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: """Return types referenced by this method.""" diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 05061200f92d..3f2b9afe5ffe 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -4,8 +4,9 @@ import pkg_resources from typing import Mapping, Optional, Sequence, Tuple, Type, Union +from google.api_core import exceptions from google.api_core import gapic_v1 -from google.api_core import retry +from google.api_core import retry as retries from google.auth import credentials {% for import_ in service.python_modules -%} @@ -54,7 +55,7 @@ class {{ service.name }}: {% for field in method.flattened_fields.values() -%} {{ field.name }}: {{ field.ident }} = None, {% endfor -%} - retry: retry.Retry = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: @@ -71,7 +72,7 @@ class {{ service.name }}: on the ``request`` instance; if ``request`` is provided, this should not be set. {% endfor -%} - retry (~.retry.Retry): Designation of what errors, if any, + retry (~.retries.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -106,8 +107,16 @@ class {{ service.name }}: # and friendly error handling. rpc = gapic_v1.method.wrap_method( self._transport.{{ method.name|snake_case }}, - default_retry=None, # FIXME - default_timeout=None, # FIXME + default_retry=retries.Retry(predicate=retries.if_exception_type( + {%- if method.idempotent %} + exceptions.Aborted, + {%- endif %} + exceptions.ServiceUnavailable, + {%- if method.idempotent %} + exceptions.Unknown, + {%- endif %} + )), + default_timeout=None, client_info=_client_info, ) {%- if method.field_headers %} diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py new file mode 100644 index 000000000000..0cb50eb49332 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -0,0 +1,69 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +from unittest import mock + +import pytest + +from google import showcase_v1alpha3 +from google.api_core import exceptions +from google.rpc import code_pb2 + + +def test_retry_nonidempotent(echo): + # Define our error and OK responses. + err = exceptions.ServiceUnavailable(message='whups') + ok = showcase_v1alpha3.EchoResponse(content='foo') + server = mock.Mock(side_effect=(err, err, ok)) + + # Mock the transport to send back the error responses followed by a + # success response. + transport = type(echo).get_transport_class() + with mock.patch.object(transport, 'echo', + new_callable=mock.PropertyMock(return_value=server)): + with mock.patch.object(time, 'sleep'): + response = echo.echo({'content': 'bar'}) + assert response.content == 'foo' + assert server.call_count == 3 + + +def test_retry_idempotent(identity): + # Define our error and OK responses. + err409 = exceptions.Aborted(message='derp de derp') + err503 = exceptions.ServiceUnavailable(message='whups') + errwtf = exceptions.Unknown(message='huh?') + ok = showcase_v1alpha3.User(name='users/0', display_name='Guido') + server = mock.Mock(side_effect=(err409, err503, errwtf, ok)) + + # Mock the transport to send back the error responses followed by a + # success response. + transport = type(identity).get_transport_class() + with mock.patch.object(transport, 'get_user', + new_callable=mock.PropertyMock(return_value=server)): + with mock.patch.object(time, 'sleep'): + response = identity.get_user({'name': 'users/0'}) + assert response.name == 'users/0' + assert response.display_name == 'Guido' + assert server.call_count == 4 + + +def test_retry_bubble(echo): + with pytest.raises(exceptions.DeadlineExceeded): + echo.echo({ + 'error': { + 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), + 'message': 'This took longer than you said it should.', + }, + }) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 15305ee9c7d3..6b36ba798f50 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -51,10 +51,27 @@ def test_method_field_headers_none(): assert isinstance(method.field_headers, collections.Sequence) -def test_service_field_headers_present(): +def test_method_field_headers_present(): http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') - service = make_method('DoSomething', http_rule=http_rule) - assert service.field_headers == ('parent',) + method = make_method('DoSomething', http_rule=http_rule) + assert method.field_headers == ('parent',) + + +def test_method_idempotent_yes(): + http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') + method = make_method('DoSomething', http_rule=http_rule) + assert method.idempotent is True + + +def test_method_idempotent_no(): + http_rule = http_pb2.HttpRule(post='/v1/{parent=projects/*}/topics') + method = make_method('DoSomething', http_rule=http_rule) + assert method.idempotent is False + + +def test_method_idempotent_no_http_rule(): + method = make_method('DoSomething') + assert method.idempotent is False def test_method_unary_unary(): From 658c064e925107fc498fb28084e3d565c0056cc5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 1 Mar 2019 23:42:46 -0800 Subject: [PATCH 0100/1339] [chore] Bump to 0.7.0, beta. (#105) As best as I can tell, this is feature complete (every "must" in the spec is fulfilled). I still want to add more tests and particularly generated unit tests, but I feel confident bumping this to beta at this point. --- packages/gapic-generator/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index c5682aee1248..14839f02ec88 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.6.0', + version='0.7.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', @@ -51,7 +51,7 @@ ':python_version<"3.7"': ('dataclasses >= 0.4',), }, classifiers=( - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', From 88f4d82a817decc016e970c25467a28bf93c4d6a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 1 Mar 2019 23:46:04 -0800 Subject: [PATCH 0101/1339] [chore] Bump badge in README to beta. --- packages/gapic-generator/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index 00db5e5917dd..545ae1e8c134 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -34,7 +34,7 @@ Disclaimer This is not an official Google product. -.. |release level| image:: https://img.shields.io/badge/release%20level-alpha-orange.svg?style=flat +.. |release level| image:: https://img.shields.io/badge/release%20level-beta-yellow.svg?style=flat :target: https://cloud.google.com/terms/launch-stages .. |docs| image:: https://readthedocs.org/projects/gapic-generator-python/badge/?version=latest :target: https://gapic-generator-python.readthedocs.io/ From 7f5f088102d9c02b1b607cde70404abdaafa52f9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 6 Mar 2019 17:07:38 -0800 Subject: [PATCH 0102/1339] [fix] Fix import error if a proto has only nested enums. (#107) --- .../templates/$namespace/$name_$version/$sub/types/$proto.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 index fa69d3476920..b9f3ab91a2c6 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 @@ -2,7 +2,7 @@ {% block content -%} {% with e = proto.disambiguate('enum'), p = proto.disambiguate('proto') %} -{% if proto.enums|length -%} +{% if proto.all_enums|length -%} import enum{% if e != 'enum' %} as {{ e }}{% endif %} {% endif -%} From 0dbac2e9766e467ee59e974b8ea51f21f97918ba Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 6 Mar 2019 17:08:59 -0800 Subject: [PATCH 0103/1339] [docs] Fix issues from crwilcox@ friction log. (#108) --- .../docs/getting-started/_verifying.rst | 2 +- .../gapic-generator/docs/getting-started/docker.rst | 13 ++++++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/docs/getting-started/_verifying.rst b/packages/gapic-generator/docs/getting-started/_verifying.rst index 12b90bb518e7..a22cb7c57091 100644 --- a/packages/gapic-generator/docs/getting-started/_verifying.rst +++ b/packages/gapic-generator/docs/getting-started/_verifying.rst @@ -15,7 +15,7 @@ Next, install the library: .. code-block:: shell - $ cd /dest/ + $ cd dest/ $ pip install --editable . Now it is time to play with it! diff --git a/packages/gapic-generator/docs/getting-started/docker.rst b/packages/gapic-generator/docs/getting-started/docker.rst index 9e1dd355923b..1513f3237200 100644 --- a/packages/gapic-generator/docs/getting-started/docker.rst +++ b/packages/gapic-generator/docs/getting-started/docker.rst @@ -86,14 +86,21 @@ The output directory must also be writable. hard-coded; they can not be altered where they appear in the command below. -Perform that step with ``docker run``: +Docker requires the output directory to pre-exist; create a directory where +you want the generated code to go: + +.. code-block:: sh + + $ mkdir dest/ + +Perform the actual code generation step with ``docker run``: .. code-block:: shell # This is assumed to be run from the `googleapis` project root. $ docker run \ - --mount type=bind,source=google/cloud/vision/v1/,destination=/in/google/cloud/vision/v1/,readonly \ - --mount type=bind,source=dest/,destination=/out/ \ + --mount type=bind,source=$(pwd)/google/cloud/vision/v1/,destination=/in/google/cloud/vision/v1/,readonly \ + --mount type=bind,source=$(pwd)/dest/,destination=/out/ \ --rm \ --user $UID \ gcr.io/gapic-images/gapic-generator-python From 6f494dffdb677d1471c2fb2e955999862a18365e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 6 Mar 2019 17:11:31 -0800 Subject: [PATCH 0104/1339] [chore] Version number bump to 0.7.1 (#109) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 14839f02ec88..979c6c57ae4a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.7.0', + version='0.7.1', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 5e10bdd1e5497b38f34c00c737b8e776aa46fe1c Mon Sep 17 00:00:00 2001 From: Jaffrey Joy Date: Tue, 26 Mar 2019 22:29:51 +0530 Subject: [PATCH 0105/1339] [docs] Fix image resource in verifying library docs (#110) The example script for verifying the library [over here][1] has a link to a resource whose access is denied which results in the following error on running: > "The URL does not appear to be accessible by us. Please double check > or download the content and pass it in." The new link added gives a proper response from the service on running. Output can be found here: https://pastebin.com/U0XGFcrt [1]: https://gapic-generator-python.readthedocs.io/en/0.7.1/getting-started/local.html#verifying-the-library --- packages/gapic-generator/docs/getting-started/_verifying.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/docs/getting-started/_verifying.rst b/packages/gapic-generator/docs/getting-started/_verifying.rst index a22cb7c57091..37dcc482bb74 100644 --- a/packages/gapic-generator/docs/getting-started/_verifying.rst +++ b/packages/gapic-generator/docs/getting-started/_verifying.rst @@ -42,8 +42,8 @@ Here is a test script: 'type': vision.Feature.Type.LABEL_DETECTION, }], 'image': {'source': { - 'image_uri': 'https://s3.amazonaws.com/cdn0.michiganbulb.com' - '/images/350/66623.jpg', + 'image_uri': 'https://images.pexels.com/photos/67636' + '/rose-blue-flower-rose-blooms-67636.jpeg', }}, }], }) From 96606dec3f9bf8a35ac2c1936df966d7d01b60a8 Mon Sep 17 00:00:00 2001 From: Anup Kumar Panwar <1anuppanwar@gmail.com> Date: Sat, 30 Mar 2019 03:00:29 +0530 Subject: [PATCH 0106/1339] [docs] HTTPS for git clone (#112) --- packages/gapic-generator/docs/api-configuration.rst | 2 +- packages/gapic-generator/docs/getting-started/_example.rst | 2 +- packages/gapic-generator/docs/getting-started/local.rst | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/docs/api-configuration.rst b/packages/gapic-generator/docs/api-configuration.rst index dd8615b1eaeb..33d95462b95f 100644 --- a/packages/gapic-generator/docs/api-configuration.rst +++ b/packages/gapic-generator/docs/api-configuration.rst @@ -47,7 +47,7 @@ Your best bet is to likely clone this repository: .. code-block:: shell - $ git clone git@github.com:googleapis/api-common-protos.git + $ git clone https://github.com/googleapis/api-common-protos.git $ cd api-common-protos $ git checkout --track -b input-contract origin/input-contract diff --git a/packages/gapic-generator/docs/getting-started/_example.rst b/packages/gapic-generator/docs/getting-started/_example.rst index 1bed548dcc2c..e769caa341db 100644 --- a/packages/gapic-generator/docs/getting-started/_example.rst +++ b/packages/gapic-generator/docs/getting-started/_example.rst @@ -7,7 +7,7 @@ a special branch: .. code-block:: shell - $ git clone git@github.com:googleapis/googleapis.git + $ git clone https://github.com/googleapis/googleapis.git $ cd googleapis $ git checkout --track -b input-contract origin/input-contract $ cd .. diff --git a/packages/gapic-generator/docs/getting-started/local.rst b/packages/gapic-generator/docs/getting-started/local.rst index 61107150ba33..c3978fc55cc9 100644 --- a/packages/gapic-generator/docs/getting-started/local.rst +++ b/packages/gapic-generator/docs/getting-started/local.rst @@ -88,7 +88,7 @@ As for this library itself, the recommended installation approach is # Due to its experimental state, this tool is not published to a # package manager; you should clone it. # (You can pip install it from GitHub, not not if you want to tinker.) - git clone git@github.com:googleapis/gapic-generator-python.git + git clone https://github.com/googleapis/gapic-generator-python.git cd gapic-generator-python/ # Install the tool. This will handle the virtualenv for you, and @@ -123,7 +123,7 @@ which define certain client-specific annotations. These are in the .. code-block:: shell - $ git clone git@github.com:googleapis/api-common-protos.git + $ git clone https://github.com/googleapis/api-common-protos.git $ cd api-common-protos $ git checkout --track -b input-contract origin/input-contract $ cd .. From 2b9c02fa0d2dc0a9723b18f6377cd88d522c8620 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 30 Mar 2019 11:18:38 -0700 Subject: [PATCH 0107/1339] [refactor] Remove HTTP/1.1 (gRPC-fallback) support. (#114) We do not yet have a credible testing strategy for gRPC fallback support in generated clients. We will reintroduce this feature once we are able to test it. --- .../$service/transports/__init__.py.j2 | 18 +-- .../services/$service/transports/http.py.j2 | 125 ------------------ 2 files changed, 4 insertions(+), 139 deletions(-) delete mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 index 1a17f003021d..7ffc5cb98c90 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 @@ -2,28 +2,18 @@ {% block content %} import collections -import typing from .base import {{ service.name }}Transport -from .http import {{ service.name }}HttpTransport +from .grpc import {{ service.name }}GrpcTransport + # Compile a registry of transports. _transport_registry = collections.OrderedDict() - -# gRPC is not guaranteed to be available, because `grpcio` may or may not -# be installed. If it is available, however, it should be the "first in" -# (default). -try: - from .grpc import {{ service.name }}GrpcTransport - _transport_registry['grpc'] = {{ service.name }}GrpcTransport -except ImportError: - pass - -# Always provide an HTTP/1.1 transport. -_transport_registry['http'] = {{ service.name }}HttpTransport +_transport_registry['grpc'] = {{ service.name }}GrpcTransport __all__ = ( '{{ service.name }}Transport', + '{{ service.name }}GrpcTransport', ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 deleted file mode 100644 index 6fb9c79f117c..000000000000 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/http.py.j2 +++ /dev/null @@ -1,125 +0,0 @@ -{% extends '_base.py.j2' %} - -{% block content %} -from typing import Sequence, Tuple -{% if service.has_lro %} -from google.api_core import operations_v1 -{%- endif %} -from google.auth import credentials -from google.auth.transport.requests import AuthorizedSession - -{% for import_ in service.python_modules -%} -{{ import_ }} -{% endfor %} -from .base import {{ service.name }}Transport - - -class {{ service.name }}HttpTransport({{ service.name }}Transport): - """HTTP backend transport for {{ service.name }}. - - {{ service.meta.doc|rst(width=72, indent=4) }} - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire, but uses HTTP/1.1. - """ - def __init__(self, *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, - ) -> None: - """Instantiate the transport. - - Args: - host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{- ' ' }}The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - """ - super().__init__(host=host, credentials=credentials) - self._session = AuthorizedSession(self._credentials) - {%- if service.has_lro %} - - @property - def operations_client(self) -> operations_v1.OperationsClient: - """Create the client designed to process long-running operations. - - This property caches on the instance; repeated calls return the same - client. - """ - # Sanity check: Only create a new client if we do not already have one. - if 'operations_client' not in self.__dict__: - from google.api_core import grpc_helpers - self.__dict__['operations_client'] = operations_v1.OperationsClient( - grpc_helpers.create_channel( - self._host, - credentials=self._credentials, - scopes=self.AUTH_SCOPES, - ) - ) - - # Return the client from cache. - return self.__dict__['operations_client'] - {%- endif %} - {%- for method in service.methods.values() %} - - def {{ method.name|snake_case }}(self, - request: {{ method.input.ident }}, *, - metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident }}: - r"""Call the {{- ' ' -}} - {{ (method.name|snake_case).replace('_',' ')|wrap( - width=70, offset=45, indent=8) }} - {{- ' ' -}} method over HTTP. - - Args: - request (~.{{ method.input.ident }}): - The request object. - {{ method.input.meta.doc|rst(width=72, indent=16) }} - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - {%- if not method.void %} - - Returns: - ~.{{ method.output.ident }}: - {{ method.output.meta.doc|rst(width=72, indent=16) }} - {%- endif %} - """ - # Serialize the input. - {%- if method.input.ident.module.endswith('_pb2') %} - data = request.SerializeToString() - {%- else %} - data = {{ method.input.ident }}.serialize(request) - {%- endif %} - - # Send the request. - {% if not method.void %}response = {% endif %}self._session.post( - 'https://{host}/$rpc/{package}.{service}/{method}'.format( - host=self._host, - method='{{ method.name }}', - package='{{ '.'.join(method.meta.address.package) }}', - service='{{ service.name }}', - ), - data=data, - headers={ - 'content-type': 'application/x-protobuf', - }, - ) - {%- if not method.void %} - - # Return the response. - return {{ method.output.ident }}.{% if method.input.ident.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}( - response.content, - ) - {%- endif %} - {%- endfor %} - - -__all__ = ( - '{{ service.name }}HttpTransport', -) -{% endblock %} From a12f51ed9518cab4dfe3a2f8d4c975597c6a273b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 2 Apr 2019 09:56:54 -0700 Subject: [PATCH 0108/1339] [chore] Bump Showcase to 0.0.16 (#116) --- packages/gapic-generator/.circleci/config.yml | 2 +- packages/gapic-generator/noxfile.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 8ef7ec94d3eb..910e16955238 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -93,7 +93,7 @@ jobs: showcase: docker: - image: python:3.7-slim - - image: gcr.io/gapic-images/gapic-showcase:0.0.13 + - image: gcr.io/gapic-images/gapic-showcase:0.0.16 steps: - checkout - run: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 4b2014c792e3..bd49f5e198ad 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -56,7 +56,7 @@ def showcase(session): # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: - showcase_version = '0.0.13' + showcase_version = '0.0.16' # Download the Showcase descriptor. session.run( From 2fb253573085f8cd395d04eae3faeee14eacc7b7 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 2 Apr 2019 09:58:33 -0700 Subject: [PATCH 0109/1339] [chore] Update to protoc 3.7.1 (#117) --- packages/gapic-generator/.circleci/config.yml | 6 +++--- packages/gapic-generator/setup.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 910e16955238..acc7411aaff2 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -105,12 +105,12 @@ jobs: name: Install nox. command: pip install nox - run: - name: Install protoc 3.6.1. + name: Install protoc 3.7.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.6.1/protoc-3.6.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.6.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip cd /usr/src/protoc/ - unzip protoc-3.6.1.zip + unzip protoc-3.7.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Run showcase tests. diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 979c6c57ae4a..101652b56e9d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -44,7 +44,7 @@ 'click >= 6.7', 'googleapis-common-protos >= 1.6.0b8', 'jinja2 >= 2.10', - 'protobuf >= 3.5.1', + 'protobuf >= 3.7.1', 'pypandoc >= 1.4', ), extras_require={ From be2b4c68e13c6e49200939d564c7f086bae1b0b6 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 2 Apr 2019 10:01:34 -0700 Subject: [PATCH 0110/1339] [feat] Unit tests. (#115) This commit adds full unit tests (with full coverage) to the output from the generator. --- packages/gapic-generator/.circleci/config.yml | 202 ++++++++----- packages/gapic-generator/.coveragerc | 2 + .../gapic-generator/gapic/schema/metadata.py | 3 +- .../gapic-generator/gapic/schema/wrappers.py | 50 +++- .../$sub/services/$service/client.py.j2 | 12 +- .../services/$service/transports/base.py.j2 | 3 +- .../gapic/templates/.coveragerc.j2 | 16 + .../gapic/templates/noxfile.py.j2 | 25 ++ .../$name_$version/$sub/test_$service.py.j2 | 278 ++++++++++++++++++ packages/gapic-generator/noxfile.py | 48 ++- .../tests/unit/schema/wrappers/test_field.py | 130 ++++++-- 11 files changed, 653 insertions(+), 116 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/.coveragerc.j2 create mode 100644 packages/gapic-generator/gapic/templates/noxfile.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index acc7411aaff2..6535167f48ab 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -12,21 +12,33 @@ workflows: filters: tags: only: /^\d+\.\d+\.\d+$/ - - showcase: + - showcase-unit-3.6: + requires: + - unit-3.6 + - unit-3.7 + filters: + tags: + only: /^\d+\.\d+\.\d+$/ + - showcase-unit-3.7: requires: - unit-3.6 - unit-3.7 filters: tags: only: /^\d+\.\d+\.\d+$/ + - showcase: + requires: + - showcase-unit-3.6 + - showcase-unit-3.7 + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - docs: filters: tags: only: /^\d+\.\d+\.\d+$/ - publish_package: requires: - - unit-3.6 - - unit-3.7 - showcase - docs filters: @@ -36,8 +48,6 @@ workflows: only: /^\d+\.\d+\.\d+$/ - publish_image: requires: - - unit-3.6 - - unit-3.7 - showcase - docs filters: @@ -46,50 +56,70 @@ workflows: tags: only: /^\d+\.\d+\.\d+$/ jobs: - unit-3.6: + docs: docker: - image: python:3.6-slim steps: - checkout - run: - name: Install pandoc - command: | - apt-get update - apt-get install -y pandoc + name: Install nox. + command: pip install nox - run: - name: Install nox and codecov. + name: Build the documentation. + command: nox -s docs + publish_image: + docker: + - image: docker + steps: + - checkout + - setup_remote_docker + - run: + name: Build Docker image. + command: docker build . -t gcr.io/gapic-images/gapic-generator-python:latest + - run: + name: Download curl + command: apk add --no-cache curl + - run: + name: Download the GCR credential helper. command: | - pip install nox - pip install codecov + curl -fsSL https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v1.5.0/docker-credential-gcr_linux_amd64-1.5.0.tar.gz \ + | tar xz --to-stdout ./docker-credential-gcr \ + > /usr/bin/docker-credential-gcr && chmod a+x /usr/bin/docker-credential-gcr - run: - name: Run unit tests. - command: nox -s unit-3.6 + name: Set up authentication to Google Container Registry. + command: | + echo ${GCLOUD_SERVICE_KEY} > ${GOOGLE_APPLICATION_CREDENTIALS} + docker-credential-gcr configure-docker - run: - name: Submit coverage data to codecov. - command: codecov - when: always - unit-3.7: + name: Tag the Docker image and push it to Google Container Registry. + command: | + if [ -n "$CIRCLE_TAG" ]; then + export MAJOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $1; }'` + export MINOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $2; }'` + export PATCH=`echo $CIRCLE_TAG | awk -F '.' '{ print $3; }'` + docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH + docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR + docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR + docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH + docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR + docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR + fi + docker push gcr.io/gapic-images/gapic-generator-python:latest + publish_package: docker: - image: python:3.7-slim steps: - checkout - run: - name: Install pandoc - command: | - apt-get update - apt-get install -y pandoc - - run: - name: Install nox and codecov. + name: Decrypt the PyPI key. command: | - pip install nox - pip install codecov - - run: - name: Run unit tests. - command: nox -s unit-3.7 + openssl aes-256-cbc -d \ + -in .circleci/.pypirc.enc \ + -out ~/.pypirc \ + -k "${PYPIRC_ENCRYPTION_KEY}" - run: - name: Submit coverage data to codecov. - command: codecov - when: always + name: Publish to PyPI. + command: python setup.py sdist upload showcase: docker: - image: python:3.7-slim @@ -115,67 +145,95 @@ jobs: - run: name: Run showcase tests. command: nox -s showcase - docs: + showcase-unit-3.6: docker: - image: python:3.6-slim steps: - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. command: pip install nox - run: - name: Build the documentation. - command: nox -s docs - publish_package: + name: Run unit tests. + command: nox -s showcase_unit-3.6 + showcase-unit-3.7: docker: - image: python:3.7-slim steps: - checkout - run: - name: Decrypt the PyPI key. + name: Install system dependencies. command: | - openssl aes-256-cbc -d \ - -in .circleci/.pypirc.enc \ - -out ~/.pypirc \ - -k "${PYPIRC_ENCRYPTION_KEY}" + apt-get update + apt-get install -y curl pandoc unzip - run: - name: Publish to PyPI. - command: python setup.py sdist upload - publish_image: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Run unit tests. + command: nox -s showcase_unit-3.7 + unit-3.6: docker: - - image: docker + - image: python:3.6-slim steps: - checkout - - setup_remote_docker - - run: - name: Build Docker image. - command: docker build . -t gcr.io/gapic-images/gapic-generator-python:latest - run: - name: Download curl - command: apk add --no-cache curl + name: Install pandoc + command: | + apt-get update + apt-get install -y pandoc - run: - name: Download the GCR credential helper. + name: Install nox and codecov. command: | - curl -fsSL https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v1.5.0/docker-credential-gcr_linux_amd64-1.5.0.tar.gz \ - | tar xz --to-stdout ./docker-credential-gcr \ - > /usr/bin/docker-credential-gcr && chmod a+x /usr/bin/docker-credential-gcr + pip install nox + pip install codecov - run: - name: Set up authentication to Google Container Registry. + name: Run unit tests. + command: nox -s unit-3.6 + - run: + name: Submit coverage data to codecov. + command: codecov + when: always + unit-3.7: + docker: + - image: python:3.7-slim + steps: + - checkout + - run: + name: Install pandoc command: | - echo ${GCLOUD_SERVICE_KEY} > ${GOOGLE_APPLICATION_CREDENTIALS} - docker-credential-gcr configure-docker + apt-get update + apt-get install -y pandoc - run: - name: Tag the Docker image and push it to Google Container Registry. + name: Install nox and codecov. command: | - if [ -n "$CIRCLE_TAG" ]; then - export MAJOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $1; }'` - export MINOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $2; }'` - export PATCH=`echo $CIRCLE_TAG | awk -F '.' '{ print $3; }'` - docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH - docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR - docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR - docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH - docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR - docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR - fi - docker push gcr.io/gapic-images/gapic-generator-python:latest + pip install nox + pip install codecov + - run: + name: Run unit tests. + command: nox -s unit-3.7 + - run: + name: Submit coverage data to codecov. + command: codecov + when: always diff --git a/packages/gapic-generator/.coveragerc b/packages/gapic-generator/.coveragerc index 950f01c104a6..008cfe8d3280 100644 --- a/packages/gapic-generator/.coveragerc +++ b/packages/gapic-generator/.coveragerc @@ -10,5 +10,7 @@ show_missing = True exclude_lines = # Re-enable the standard pragma pragma: NO COVER + # Enable an English version. + Impossible; skip coverage checks. # Ignore debug-only repr def __repr__ diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index a2203c5a2a01..fc62d98421ff 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -73,7 +73,8 @@ def __str__(self) -> str: # Return the dot-separated Python identifier. return '.'.join((module_name,) + self.parent + (self.name,)) - # Return the Python identifier for this module-less identifier. + # This type does not have a module (most common for PythonType). + # Return the Python identifier. return '.'.join(self.parent + (self.name,)) @property diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 692d73d2d81c..de39e2b001b0 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -69,6 +69,48 @@ def is_primitive(self) -> bool: """Return True if the field is a primitive, False otherwise.""" return isinstance(self.type, PythonType) + @utils.cached_property + def mock_value(self) -> str: + """Return a repr of a valid, usually truthy mock value.""" + # For primitives, send a truthy value computed from the + # field name. + answer = 'None' + if isinstance(self.type, PythonType): + if self.type.python_type == bool: + answer = 'True' + elif self.type.python_type == str: + answer = f"'{self.name}_value'" + elif self.type.python_type == bytes: + answer = f"b'{self.name}_blob'" + elif self.type.python_type == int: + answer = f'{sum([ord(i) for i in self.name])}' + elif self.type.python_type == float: + answer = f'0.{sum([ord(i) for i in self.name])}' + else: # Impossible; skip coverage checks. + raise TypeError('Unrecognized PythonType. This should ' + 'never happen; please file an issue.') + + # If this is an enum, select the first truthy value (or the zero + # value if nothing else exists). + if isinstance(self.type, EnumType): + # Note: The slightly-goofy [:2][-1] lets us gracefully fall + # back to index 0 if there is only one element. + mock_value = self.type.values[:2][-1] + answer = f'{self.type.ident}.{mock_value.name}' + + # If this is another message, set one value on the message. + if isinstance(self.type, MessageType) and len(self.type.fields): + sub = next(iter(self.type.fields.values())) + answer = f'{self.type.ident}({sub.name}={sub.mock_value})' + + # If this is a repeated field, then the mock answer should + # be a list. + if self.repeated: + answer = f'[{answer}]' + + # Done; return the mock value. + return answer + @property def proto_type(self) -> str: """Return the proto type constant to be used in templates.""" @@ -446,6 +488,11 @@ def idempotent(self) -> bool: """ return bool(self.options.Extensions[annotations_pb2.http].get) + @property + def lro(self) -> bool: + """Return True if this is an LRO method, False otherwise.""" + return getattr(self.output, 'lro_response', None) + @utils.cached_property def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: """Return types referenced by this method.""" @@ -581,8 +628,7 @@ def python_modules(self) -> Sequence[imp.Import]: @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" - return any([getattr(m.output, 'lro_response', None) - for m in self.methods.values()]) + return any([m.lro for m in self.methods.values()]) def with_context(self, *, collisions: Set[str]) -> 'Service': """Return a derivative of this service with the provided context. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 3f2b9afe5ffe..a8fbc4bdb7aa 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -87,9 +87,9 @@ class {{ service.name }}: {%- if method.flattened_fields %} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): - raise TypeError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') {%- endif %} # Create or coerce a protobuf request object. @@ -124,11 +124,11 @@ class {{ service.name }}: # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata({ + gapic_v1.routing_header.to_grpc_metadata(( {%- for field_header in method.field_headers %} - '{{ field_header }}': request.{{ field_header }}, + ('{{ field_header }}', request.{{ field_header }}), {%- endfor %} - }), + )), ) {%- endif %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 index 74542794967d..4d514fbd06e7 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 @@ -53,14 +53,13 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): self._credentials = credentials {%- if service.has_lro %} - @abc.abstractproperty + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" raise NotImplementedError {%- endif %} {%- for method in service.methods.values() %} - @abc.abstractmethod def {{ method.name|snake_case }}( self, request: {{ method.input.ident }}, diff --git a/packages/gapic-generator/gapic/templates/.coveragerc.j2 b/packages/gapic-generator/gapic/templates/.coveragerc.j2 new file mode 100644 index 000000000000..4d5b53abf4a8 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/.coveragerc.j2 @@ -0,0 +1,16 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 new file mode 100644 index 000000000000..c774cc4e0a82 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -0,0 +1,25 @@ +{% extends "_base.py.j2" %} + +{% block content %} +import os + +import nox + + +@nox.session(python=['3.6', '3.7']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov={{ api.naming.module_namespace|join("/") }}/{{ api.naming.versioned_module_name }}/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', '{{ api.naming.versioned_module_name }}'), + ) +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 new file mode 100644 index 000000000000..e429bb22db69 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 @@ -0,0 +1,278 @@ +{% extends "_base.py.j2" %} + +{% block content %} +from unittest import mock + +import grpc + +import pytest + +{# Import the service itself as well as every proto module that it imports. -#} +from google import auth +from google.auth import credentials +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.name }} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports +{% if service.has_lro -%} +from google.api_core import future +from google.api_core import operations_v1 +from google.longrunning import operations_pb2 +{% endif -%} +{% for import_ in service.python_modules -%} +{{ import_ }} +{% endfor %} + + +{% for method in service.methods.values() -%} +def test_{{ method.name|snake_case }}(transport: str = 'grpc'): + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = {{ method.input.ident }}() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif -%} + response = client.{{ method.name|snake_case }}(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + {% if method.void -%} + assert response is None + {% elif method.lro -%} + assert isinstance(response, future.Future) + {% elif method.server_streaming -%} + for message in response: + assert isinstance(message, {{ method.output.ident }}) + {% else -%} + assert isinstance(response, {{ method.output.ident }}) + {% endif %} + +{% if method.field_headers %} +def test_{{ method.name|snake_case }}_field_headers(): + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = {{ method.input.ident }}( + {%- for field_header in method.field_headers %} + {{ field_header }}='{{ field_header }}/value', + {%- endfor %} + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + call.return_value = {{ method.output.ident }}() + response = client.{{ method.name|snake_case }}(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + '{% for field_header in method.field_headers -%} + {{ field_header }}={{ field_header }}/value + {%- if not loop.last %}&{% endif -%} + {%- endfor %}', + ) in kw['metadata'] +{% endif %} {#- method.field_headers #} + +{% if method.flattened_fields %} +def test_{{ method.name|snake_case }}_flattened(): + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif %} + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = client.{{ method.name|snake_case }}( + {%- for key, field in method.flattened_fields.items() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + {% for key, field in method.flattened_fields.items() -%} + assert args[0].{{ key }} == {{ field.mock_value }} + {% endfor %} + + +def test_{{ method.name|snake_case }}_flattened_error(): + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method.name|snake_case }}( + {{ method.input.ident }}(), + {%- for key, field in method.flattened_fields.items() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) +{% endif %} {#- method.flattened_fields #} + +{% endfor -%} {#- method in methods #} + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = {{ service.name }}(transport=transport) + assert client._transport is transport + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client._transport, + transports.{{ service.name }}GrpcTransport, + ) + + +def test_{{ service.name|snake_case }}_base_transport(): + # Instantiate the base transport. + transport = transports.{{ service.name }}Transport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + {% for method in service.methods.values() -%} + '{{ method.name|snake_case }}', + {% endfor -%} + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + {% if service.has_lro -%} + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + {% endif %} + + +def test_{{ service.name|snake_case }}_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, 'default') as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + client = {{ service.name }}() + adc.assert_called_once_with(scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + )) + + +def test_{{ service.name|snake_case }}_host_no_port(): + {% with host = (service.host|default('localhost')).split(':')[0] -%} + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + host='{{ host }}', + transport='grpc', + ) + assert client._transport._host == '{{ host }}:443' + {% endwith %} + + +def test_{{ service.name|snake_case }}_host_with_port(): + {% with host = (service.host|default('localhost')).split(':')[0] -%} + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + host='{{ host }}:8000', + transport='grpc', + ) + assert client._transport._host == '{{ host }}:8000' + {% endwith %} + + +def test_{{ service.name|snake_case }}_grpc_transport_channel(): + channel = grpc.insecure_channel('http://localhost/') + transport = transports.{{ service.name }}GrpcTransport( + channel=channel, + ) + assert transport.grpc_channel is channel + + +{% if service.has_lro -%} +def test_{{ service.name|snake_case }}_grpc_lro_client(): + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client._transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +{% endif -%} +{% endblock %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index bd49f5e198ad..cf78abec9fee 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -19,6 +19,9 @@ import nox +showcase_version = '0.0.16' + + @nox.session(python=['3.6', '3.7']) def unit(session): """Run the unit test suite.""" @@ -56,8 +59,6 @@ def showcase(session): # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: - showcase_version = '0.0.16' - # Download the Showcase descriptor. session.run( 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' @@ -83,6 +84,49 @@ def showcase(session): session.run('py.test', '--quiet', os.path.join('tests', 'system')) +@nox.session(python=['3.6', '3.7']) +def showcase_unit(session): + """Run the generated unit tests against the Showcase library.""" + + # Install pytest and gapic-generator-python + session.install('coverage', 'pytest', 'pytest-cov') + session.install('.') + + # Install a client library for Showcase. + with tempfile.TemporaryDirectory() as tmp_dir: + + # Download the Showcase descriptor. + session.run( + 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' + f'download/v{showcase_version}/' + f'gapic-showcase-{showcase_version}.desc', + '-L', '--output', os.path.join(tmp_dir, 'showcase.desc'), + external=True, + silent=True, + ) + + # Write out a client library for Showcase. + session.run('protoc', + f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + 'google/showcase/v1alpha3/echo.proto', + 'google/showcase/v1alpha3/identity.proto', + 'google/showcase/v1alpha3/messaging.proto', + 'google/showcase/v1alpha3/testing.proto', + external=True, + ) + + # Install the library. + session.install(tmp_dir) + + # Run the tests. + session.run( + 'py.test', + '--quiet', + os.path.join(tmp_dir, 'tests', 'unit'), + ) + + @nox.session(python='3.6') def docs(session): """Build the docs.""" diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index f60f64227a4b..ffea8f4ed1a2 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -12,36 +12,35 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections + import pytest from google.api import field_behavior_pb2 from google.protobuf import descriptor_pb2 +from gapic.schema import metadata from gapic.schema import wrappers def test_field_properties(): - Type = descriptor_pb2.FieldDescriptorProto.Type - field = make_field(name='my_field', number=1, type=Type.Value('TYPE_BOOL')) + field = make_field(name='my_field', number=1, type='TYPE_BOOL') assert field.name == 'my_field' assert field.number == 1 assert field.type.python_type == bool def test_field_is_primitive(): - Type = descriptor_pb2.FieldDescriptorProto.Type - primitive_field = make_field(type=Type.Value('TYPE_INT32')) + primitive_field = make_field(type='TYPE_INT32') assert primitive_field.is_primitive def test_field_proto_type(): - Type = descriptor_pb2.FieldDescriptorProto.Type - primitive_field = make_field(type=Type.Value('TYPE_INT32')) + primitive_field = make_field(type='TYPE_INT32') assert primitive_field.proto_type == 'INT32' def test_field_not_primitive(): - Type = descriptor_pb2.FieldDescriptorProto.Type message = wrappers.MessageType( fields={}, nested_messages={}, @@ -49,7 +48,7 @@ def test_field_not_primitive(): message_pb=descriptor_pb2.DescriptorProto(), ) non_primitive_field = make_field( - type=Type.Value('TYPE_MESSAGE'), + type='TYPE_MESSAGE', type_name='bogus.Message', message=message, ) @@ -57,15 +56,13 @@ def test_field_not_primitive(): def test_ident(): - Type = descriptor_pb2.FieldDescriptorProto.Type - field = make_field(type=Type.Value('TYPE_BOOL')) + field = make_field(type='TYPE_BOOL') assert str(field.ident) == 'bool' def test_ident_repeated(): - Type = descriptor_pb2.FieldDescriptorProto.Type REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') - field = make_field(type=Type.Value('TYPE_BOOL'), label=REP) + field = make_field(type='TYPE_BOOL', label=REP) assert str(field.ident) == 'Sequence[bool]' @@ -95,29 +92,25 @@ def test_not_required(): def test_ident_sphinx(): - Type = descriptor_pb2.FieldDescriptorProto.Type - field = make_field(type=Type.Value('TYPE_BOOL')) + field = make_field(type='TYPE_BOOL') assert field.ident.sphinx == 'bool' def test_ident_sphinx_repeated(): - Type = descriptor_pb2.FieldDescriptorProto.Type REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') - field = make_field(type=Type.Value('TYPE_BOOL'), label=REP) + field = make_field(type='TYPE_BOOL', label=REP) assert field.ident.sphinx == 'Sequence[bool]' def test_type_primitives(): - T = descriptor_pb2.FieldDescriptorProto.Type - assert make_field(type=T.Value('TYPE_FLOAT')).type.python_type == float - assert make_field(type=T.Value('TYPE_INT64')).type.python_type == int - assert make_field(type=T.Value('TYPE_BOOL')).type.python_type == bool - assert make_field(type=T.Value('TYPE_STRING')).type.python_type == str - assert make_field(type=T.Value('TYPE_BYTES')).type.python_type == bytes + assert make_field(type='TYPE_FLOAT').type.python_type == float + assert make_field(type='TYPE_INT64').type.python_type == int + assert make_field(type='TYPE_BOOL').type.python_type == bool + assert make_field(type='TYPE_STRING').type.python_type == str + assert make_field(type='TYPE_BYTES').type.python_type == bytes def test_type_message(): - T = descriptor_pb2.FieldDescriptorProto.Type message = wrappers.MessageType( fields={}, nested_messages={}, @@ -125,7 +118,7 @@ def test_type_message(): message_pb=descriptor_pb2.DescriptorProto(), ) field = make_field( - type=T.Value('TYPE_MESSAGE'), + type='TYPE_MESSAGE', type_name='bogus.Message', message=message, ) @@ -133,13 +126,12 @@ def test_type_message(): def test_type_enum(): - T = descriptor_pb2.FieldDescriptorProto.Type enum = wrappers.EnumType( values={}, enum_pb=descriptor_pb2.EnumDescriptorProto(), ) field = make_field( - type=T.Value('TYPE_ENUM'), + type='TYPE_ENUM', type_name='bogus.Enumerable', enum=enum, ) @@ -147,16 +139,92 @@ def test_type_enum(): def test_type_invalid(): - T = descriptor_pb2.FieldDescriptorProto.Type with pytest.raises(TypeError): - make_field(type=T.Value('TYPE_GROUP')).type + make_field(type='TYPE_GROUP').type + + +def test_mock_value_int(): + field = make_field(name='foo_bar', type='TYPE_INT32') + assert field.mock_value == '728' + + +def test_mock_value_float(): + field = make_field(name='foo_bar', type='TYPE_DOUBLE') + assert field.mock_value == '0.728' + + +def test_mock_value_bool(): + field = make_field(name='foo_bar', type='TYPE_BOOL') + assert field.mock_value == 'True' + + +def test_mock_value_str(): + field = make_field(name='foo_bar', type='TYPE_STRING') + assert field.mock_value == "'foo_bar_value'" + + +def test_mock_value_bytes(): + field = make_field(name='foo_bar', type='TYPE_BYTES') + assert field.mock_value == "b'foo_bar_blob'" + + +def test_mock_value_repeated(): + field = make_field(name='foo_bar', type='TYPE_STRING', label=3) + assert field.mock_value == "['foo_bar_value']" + + +def test_mock_value_enum(): + values = [ + descriptor_pb2.EnumValueDescriptorProto(name='UNSPECIFIED', number=0), + descriptor_pb2.EnumValueDescriptorProto(name='SPECIFIED', number=1), + ] + enum = wrappers.EnumType( + values=[wrappers.EnumValueType(enum_value_pb=i) for i in values], + enum_pb=descriptor_pb2.EnumDescriptorProto(value=values), + meta=metadata.Metadata(address=metadata.Address( + module='bogus', + name='Enumerable', + )), + ) + field = make_field( + type='TYPE_ENUM', + type_name='bogus.Enumerable', + enum=enum, + ) + assert field.mock_value == 'bogus.Enumerable.SPECIFIED' + + +def test_mock_value_message(): + subfields = collections.OrderedDict(( + ('foo', make_field(name='foo', type='TYPE_INT32')), + ('bar', make_field(name='bar', type='TYPE_STRING')) + )) + message = wrappers.MessageType( + fields=subfields, + message_pb=descriptor_pb2.DescriptorProto(name='Message', field=[ + i.field_pb for i in subfields.values() + ]), + meta=metadata.Metadata(address=metadata.Address( + module='bogus', + name='Message', + )), + nested_enums={}, + nested_messages={}, + ) + field = make_field( + type='TYPE_MESSAGE', + type_name='bogus.Message', + message=message, + ) + assert field.mock_value == 'bogus.Message(foo=324)' def make_field(*, message=None, enum=None, **kwargs) -> wrappers.Field: + T = descriptor_pb2.FieldDescriptorProto.Type kwargs.setdefault('name', 'my_field') kwargs.setdefault('number', 1) - kwargs.setdefault('type', - descriptor_pb2.FieldDescriptorProto.Type.Value('TYPE_BOOL'), - ) + kwargs.setdefault('type', T.Value('TYPE_BOOL')) + if isinstance(kwargs['type'], str): + kwargs['type'] = T.Value(kwargs['type']) field_pb = descriptor_pb2.FieldDescriptorProto(**kwargs) return wrappers.Field(field_pb=field_pb, message=message, enum=enum) From ed8eabd63cb2e2109077a860cc007dac1aaa277c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 2 Apr 2019 10:09:20 -0700 Subject: [PATCH 0111/1339] [chore] Bump version to 0.8.0 (#118) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 101652b56e9d..727bafb8d6b6 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.7.1', + version='0.8.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 44eea1f9fc32b439b730a500aac546cdcdf56607 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 16 May 2019 16:12:57 -0400 Subject: [PATCH 0112/1339] [fix] Fix an incorrect __all__. (#119) --- packages/gapic-generator/gapic/schema/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/__init__.py b/packages/gapic-generator/gapic/schema/__init__.py index f3d2d01ef198..56a1998909e0 100644 --- a/packages/gapic-generator/gapic/schema/__init__.py +++ b/packages/gapic-generator/gapic/schema/__init__.py @@ -26,7 +26,7 @@ __all__ = ( - 'api', + 'API', 'metadata', 'wrappers', ) From 80ba8fbe012c182ad74bbb39135424dea2debe4c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 17 May 2019 09:13:42 -0400 Subject: [PATCH 0113/1339] [fix] Fix unit test generation for services with no default host. (#120) --- .../tests/unit/$name_$version/$sub/test_$service.py.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 index e429bb22db69..e9137fa458ff 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 @@ -228,7 +228,7 @@ def test_{{ service.name|snake_case }}_auth_adc(): def test_{{ service.name|snake_case }}_host_no_port(): - {% with host = (service.host|default('localhost')).split(':')[0] -%} + {% with host = (service.host|default('localhost', true)).split(':')[0] -%} client = {{ service.name }}( credentials=credentials.AnonymousCredentials(), host='{{ host }}', @@ -239,7 +239,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): def test_{{ service.name|snake_case }}_host_with_port(): - {% with host = (service.host|default('localhost')).split(':')[0] -%} + {% with host = (service.host|default('localhost', true)).split(':')[0] -%} client = {{ service.name }}( credentials=credentials.AnonymousCredentials(), host='{{ host }}:8000', From 59d293418dc39af47aa20837a1c98abfa0227ca2 Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Mon, 20 May 2019 13:12:22 -0500 Subject: [PATCH 0114/1339] [chore] Update Dockerfile api-common-proto version (#123) --- packages/gapic-generator/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile index b7a54001024a..4b30a6ba257e 100644 --- a/packages/gapic-generator/Dockerfile +++ b/packages/gapic-generator/Dockerfile @@ -7,8 +7,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* # Add protoc and our common protos. -COPY --from=gcr.io/gapic-images/api-common-protos:beta /usr/local/bin/protoc /usr/local/bin/protoc -COPY --from=gcr.io/gapic-images/api-common-protos:beta /protos/ /protos/ +COPY --from=gcr.io/gapic-images/api-common-protos:0.1.0 /usr/local/bin/protoc /usr/local/bin/protoc +COPY --from=gcr.io/gapic-images/api-common-protos:0.1.0 /protos/ /protos/ # Add our code to the Docker image. ADD . /usr/src/gapic-generator-python/ From 84420e6612a2eb5385faa403ae987781a7bb128c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 22 May 2019 21:59:49 -0400 Subject: [PATCH 0115/1339] [feat] Add automatic pagination support. (#121) This commit adds support for automatic pagination. Paginated methods now return a "Pager" class that wraps the underlying response, but adds an `__iter__` method which returns a generator that yields results until no more results remain. --- packages/gapic-generator/docs/templates.rst | 3 + .../gapic/generator/generator.py | 16 +- packages/gapic-generator/gapic/schema/api.py | 44 +--- .../gapic-generator/gapic/schema/metadata.py | 14 +- .../gapic-generator/gapic/schema/wrappers.py | 241 ++++++++++-------- .../$sub/services/$service/client.py.j2 | 39 ++- .../$sub/services/$service/pagers.py.j2 | 77 ++++++ .../services/$service/transports/base.py.j2 | 9 +- .../services/$service/transports/grpc.py.j2 | 22 +- .../gapic/templates/.coveragerc.j2 | 2 + .../$name_$version/$sub/test_$service.py.j2 | 68 ++++- .../gapic-generator/gapic/utils/__init__.py | 6 + packages/gapic-generator/gapic/utils/code.py | 23 ++ packages/gapic-generator/gapic/utils/doc.py | 25 ++ packages/gapic-generator/gapic/utils/lines.py | 23 ++ packages/gapic-generator/noxfile.py | 7 +- .../tests/system/test_pagination.py | 26 ++ .../tests/unit/generator/test_generator.py | 12 + .../tests/unit/schema/test_metadata.py | 9 - .../tests/unit/schema/wrappers/test_method.py | 67 +++++ .../unit/schema/wrappers/test_operation.py | 47 ---- .../tests/unit/schema/wrappers/test_python.py | 45 ++++ .../unit/schema/wrappers/test_service.py | 31 ++- .../tests/unit/utils/test_code.py | 36 +++ .../tests/unit/utils/test_lines.py | 37 +++ 25 files changed, 686 insertions(+), 243 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 create mode 100644 packages/gapic-generator/gapic/utils/code.py create mode 100644 packages/gapic-generator/gapic/utils/doc.py create mode 100644 packages/gapic-generator/tests/system/test_pagination.py delete mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_python.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_code.py diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst index 451b828748ac..702160551a9a 100644 --- a/packages/gapic-generator/docs/templates.rst +++ b/packages/gapic-generator/docs/templates.rst @@ -88,6 +88,9 @@ These are: * ``rst`` (:meth:`~.utils.rst.rst`): Converts a string to ReStructured Text. If the string appears not to be formatted (contains no obvious Markdown syntax characters), then this method forwards to ``wrap``. +* ``sort_lines`` (:meth:`~.utils.lines.sort_lines`): Sorts lines of text, + optionally de-duplicating if there are duplicates. This works best with + the Jinja ``{% filter sort_lines %}`` style syntax. * ``snake_case`` (:meth:`~.utils.case.to_snake_case`): Converts a string in any sane case system to snake case. * ``wrap`` (:meth:`~.utils.lines.wrap`): Wraps arbitrary text. Keyword diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index ff745d92d6e2..e8696b4de094 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -50,6 +50,7 @@ def __init__(self, opts: options.Options) -> None: # Add filters which templates require. self._env.filters['rst'] = utils.rst self._env.filters['snake_case'] = utils.to_snake_case + self._env.filters['sort_lines'] = utils.sort_lines self._env.filters['wrap'] = utils.wrap def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: @@ -157,11 +158,14 @@ def _get_file(self, template_name: str, *, api_schema=api.API, **context: Mapping): """Render a template to a protobuf plugin File object.""" + # Determine the target filename. fn = self._get_filename(template_name, api_schema=api_schema, context=context, ) - return {fn: CodeGeneratorResponse.File( + + # Render the file contents. + cgr_file = CodeGeneratorResponse.File( content=formatter.fix_whitespace( self._env.get_template(template_name).render( api=api_schema, @@ -169,7 +173,15 @@ def _get_file(self, template_name: str, *, ), ), name=fn, - )} + ) + + # Sanity check: Do not render empty files. + if utils.empty(cgr_file.content) and not fn.endswith('__init__.py'): + return {} + + # Return the filename and content in a length-1 dictionary + # (because we track output files overall in a dictionary). + return {fn: cgr_file} def _get_filename( self, diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 7c8b7d36711b..082bfe15262c 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -411,27 +411,6 @@ def api_messages(self) -> Mapping[str, wrappers.MessageType]: *[p.all_messages for p in self.prior_protos.values()], ) - def _get_operation_type(self, - response_type: wrappers.Method, - metadata_type: wrappers.Method = None, - ) -> wrappers.PythonType: - """Return a wrapper around Operation that designates the end result. - - Args: - response_type (~.wrappers.Method): The response type that - the Operation ultimately uses. - metadata_type (~.wrappers.Method): The metadata type that - the Operation ultimately uses, if any. - - Returns: - ~.wrappers.OperationType: An OperationType object, which is - sent down to templates, and aware of the LRO types used. - """ - return wrappers.OperationType( - lro_response=response_type, - lro_metadata=metadata_type, - ) - def _load_children(self, children: Sequence, loader: Callable, *, address: metadata.Address, path: Tuple[int]) -> Mapping: """Return wrapped versions of arbitrary children from a Descriptor. @@ -525,36 +504,37 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], # Iterate over the methods and collect them into a dictionary. answer = collections.OrderedDict() for meth_pb, i in zip(methods, range(0, sys.maxsize)): - lro = meth_pb.options.Extensions[operations_pb2.operation_info] + lro = None # If the output type is google.longrunning.Operation, we use # a specialized object in its place. - output_type = self.api_messages[meth_pb.output_type.lstrip('.')] if meth_pb.output_type.endswith('google.longrunning.Operation'): - if not lro.response_type or not lro.metadata_type: + op = meth_pb.options.Extensions[operations_pb2.operation_info] + if not op.response_type or not op.metadata_type: raise TypeError( f'rpc {meth_pb.name} returns a google.longrunning.' 'Operation, but is missing a response type or ' 'metadata type.', ) - output_type = self._get_operation_type( - response_type=self.api_messages[ - address.resolve(lro.response_type) - ], - metadata_type=self.api_messages.get( - address.resolve(lro.metadata_type), - ), + lro = wrappers.OperationInfo( + response_type=self.api_messages[address.resolve( + op.response_type, + )], + metadata_type=self.api_messages[address.resolve( + op.metadata_type, + )], ) # Create the method wrapper object. answer[meth_pb.name] = wrappers.Method( input=self.api_messages[meth_pb.input_type.lstrip('.')], + lro=lro, method_pb=meth_pb, meta=metadata.Metadata( address=address.child(meth_pb.name, path + (i,)), documentation=self.docs.get(path + (i,), self.EMPTY), ), - output=output_type, + output=self.api_messages[meth_pb.output_type.lstrip('.')], ) # Done; return the answer. diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index fc62d98421ff..e49037d08456 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -58,10 +58,6 @@ def __str__(self) -> str: Because we import modules as a whole, rather than individual members from modules, this is consistently `module.Name`. """ - # Edge case: For google.protobuf.Empty, use None instead. - if self.proto == 'google.protobuf.Empty': - return 'None' - # Most (but not all) types are in a module. if self.module: # If collisions are registered and conflict with our module, @@ -108,8 +104,14 @@ def proto_package(self) -> str: @cached_property def python_import(self) -> imp.Import: """Return the Python import for this type.""" - # If there is no naming object, this is a special case for operation. - # FIXME(#34): OperationType does not work well. Fix or expunge it. + # If there is no naming object, then this is a special case for + # Python types. + # + # FIXME: This does not attempt to do an isinstance check on PythonType + # to avoid a circular dependency. + # That part is fine, but a check for the absence of `api_naming` is + # less than ideal; the condition works, but it is a weak correlation + # that may not hold up over time. if not self.api_naming: return imp.Import( package=self.package, diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index de39e2b001b0..7fe5e1abfc39 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -31,7 +31,7 @@ import dataclasses import re from itertools import chain -from typing import List, Mapping, Sequence, Set, Union +from typing import List, Mapping, Optional, Sequence, Set, Union from google.api import annotations_pb2 from google.api import client_pb2 @@ -39,7 +39,6 @@ from google.protobuf import descriptor_pb2 from gapic import utils -from gapic.schema import imp from gapic.schema import metadata @@ -67,7 +66,7 @@ def ident(self) -> metadata.FieldIdentifier: @property def is_primitive(self) -> bool: """Return True if the field is a primitive, False otherwise.""" - return isinstance(self.type, PythonType) + return isinstance(self.type, PrimitiveType) @utils.cached_property def mock_value(self) -> str: @@ -75,7 +74,7 @@ def mock_value(self) -> str: # For primitives, send a truthy value computed from the # field name. answer = 'None' - if isinstance(self.type, PythonType): + if isinstance(self.type, PrimitiveType): if self.type.python_type == bool: answer = 'True' elif self.type.python_type == str: @@ -87,7 +86,7 @@ def mock_value(self) -> str: elif self.type.python_type == float: answer = f'0.{sum([ord(i) for i in self.name])}' else: # Impossible; skip coverage checks. - raise TypeError('Unrecognized PythonType. This should ' + raise TypeError('Unrecognized PrimitiveType. This should ' 'never happen; please file an issue.') # If this is an enum, select the first truthy value (or the zero @@ -139,7 +138,7 @@ def required(self) -> bool: self.options.Extensions[field_behavior_pb2.field_behavior]) @utils.cached_property - def type(self) -> Union['MessageType', 'EnumType', 'PythonType']: + def type(self) -> Union['MessageType', 'EnumType', 'PrimitiveType']: """Return the type of this field.""" # If this is a message or enum, return the appropriate thing. if self.type_name and self.message: @@ -158,15 +157,15 @@ def type(self) -> Union['MessageType', 'EnumType', 'PythonType']: # 10, 11, and 14 are intentionally missing. They correspond to # group (unused), message (covered above), and enum (covered above). if self.field_pb.type in (1, 2): - return PythonType(python_type=float) + return PrimitiveType.build(float) if self.field_pb.type in (3, 4, 5, 6, 7, 13, 15, 16, 17, 18): - return PythonType(python_type=int) + return PrimitiveType.build(int) if self.field_pb.type == 8: - return PythonType(python_type=bool) + return PrimitiveType.build(bool) if self.field_pb.type == 9: - return PythonType(python_type=str) + return PrimitiveType.build(str) if self.field_pb.type == 12: - return PythonType(python_type=bytes) + return PrimitiveType.build(bytes) # This should never happen. raise TypeError('Unrecognized protobuf type. This code should ' @@ -349,79 +348,60 @@ class PythonType: :meth:`Field.type` can return an object and the caller can be confident that a ``name`` property will be present. """ - python_type: type + meta: metadata.Metadata - @property - def name(self) -> str: - return self.python_type.__name__ + def __eq__(self, other): + return self.meta == other.meta + + def __ne__(self, other): + return not self == other @utils.cached_property def ident(self) -> metadata.Address: - """Return the identifier to be used in templates. + """Return the identifier to be used in templates.""" + return self.meta.address - Primitives have no import, and no module to reference, so this - is simply the name of the class (e.g. "int", "str"). - """ - return metadata.Address(name=self.name) + @property + def name(self) -> str: + return self.ident.name @dataclasses.dataclass(frozen=True) -class OperationType: - """Wrapper class for :class:`~.operations.Operation`. +class PrimitiveType(PythonType): + """A representation of a Python primitive type.""" + python_type: type - This exists for interface consistency, so Operations can be used - alongside :class:`~.MessageType` instances. - """ - lro_response: MessageType - lro_metadata: MessageType = None + @classmethod + def build(cls, primitive_type: type): + """Return a PrimitiveType object for the given Python primitive type. - @property - def ident(self) -> metadata.Address: - return self.meta.address + Args: + primitive_type (cls): A Python primitive type, such as + :class:`int` or :class:`str`. Despite not being a type, + ``None`` is also accepted here. - @utils.cached_property - def meta(self) -> metadata.Metadata: - """Return a Metadata object.""" - return metadata.Metadata( - address=metadata.Address( - name='Operation', - module='operation', - package=('google', 'api_core'), - collisions=self.lro_response.meta.address.collisions, - ), - documentation=descriptor_pb2.SourceCodeInfo.Location( - leading_comments='An object representing a long-running ' - 'operation. \n\n' - 'The result type for the operation will be ' - ':class:`{ident}`: {doc}'.format( - doc=self.lro_response.meta.doc, - ident=self.lro_response.ident.sphinx, - ), - ), - ) + Returns: + ~.PrimitiveType: The instantiated PrimitiveType object. + """ + # Primitives have no import, and no module to reference, so the + # address just uses the name of the class (e.g. "int", "str"). + return cls(meta=metadata.Metadata(address=metadata.Address( + name='None' if primitive_type is None else primitive_type.__name__, + )), python_type=primitive_type) - @property - def name(self) -> str: - """Return the class name.""" - # This is always "Operation", because it is always a reference to - # `google.api_core.operation.Operation`. - # - # This is hard-coded rather than subclassing PythonType (above) so - # that this generator is not forced to take an entire dependency - # on google.api_core just to get these strings. - return 'Operation' + def __eq__(self, other): + # If we are sent the actual Python type (not the PrimitiveType object), + # claim to be equal to that. + if not hasattr(other, 'meta'): + return self.python_type is other + return super().__eq__(other) - def with_context(self, *, collisions: Set[str]) -> 'OperationType': - """Return a derivative of this operation with the provided context. - This method is used to address naming collisions. The returned - ``OperationType`` object aliases module names to avoid naming - collisions in the file being written. - """ - return dataclasses.replace(self, - lro_response=self.lro_response.with_context(collisions=collisions), - lro_metadata=self.lro_metadata.with_context(collisions=collisions), - ) +@dataclasses.dataclass(frozen=True) +class OperationInfo: + """Representation of long-running operation info.""" + response_type: MessageType + metadata_type: MessageType @dataclasses.dataclass(frozen=True) @@ -430,6 +410,7 @@ class Method: method_pb: descriptor_pb2.MethodDescriptorProto input: MessageType output: MessageType + lro: OperationInfo = dataclasses.field(default=None) meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) @@ -437,6 +418,64 @@ class Method: def __getattr__(self, name): return getattr(self.method_pb, name) + @utils.cached_property + def client_output(self): + """Return the output from the client layer. + + This takes into account transformations made by the outer GAPIC + client to transform the output from the transport. + + Returns: + Union[~.MessageType, ~.PythonType]: + A description of the return type. + """ + # Void messages ultimately return None. + if self.void: + return PrimitiveType.build(None) + + # If this method is an LRO, return a PythonType instance representing + # that. + if self.lro: + return PythonType(meta=metadata.Metadata( + address=metadata.Address( + name='Operation', + module='operation', + package=('google', 'api_core'), + collisions=self.lro.response_type.ident.collisions, + ), + documentation=utils.doc( + 'An object representing a long-running operation. \n\n' + 'The result type for the operation will be ' + ':class:`{ident}`: {doc}'.format( + doc=self.lro.response_type.meta.doc, + ident=self.lro.response_type.ident.sphinx, + ), + ), + )) + + # If this method is paginated, return that method's pager class. + if self.paged_result_field: + return PythonType(meta=metadata.Metadata( + address=metadata.Address( + name=f'{self.name}Pager', + package=self.ident.api_naming.module_namespace + ( + self.ident.api_naming.versioned_module_name, + 'services', + utils.to_snake_case(self.ident.parent[-1]), + ), + module='pagers', + collisions=self.input.ident.collisions, + ), + documentation=utils.doc( + f'{self.output.meta.doc}\n\n' + 'Iterating over this object will yield results and ' + 'resolve additional pages automatically.', + ), + )) + + # Return the usual output. + return self.output + @property def field_headers(self) -> Sequence[str]: """Return the field headers defined for this method.""" @@ -489,9 +528,29 @@ def idempotent(self) -> bool: return bool(self.options.Extensions[annotations_pb2.http].get) @property - def lro(self) -> bool: - """Return True if this is an LRO method, False otherwise.""" - return getattr(self.output, 'lro_response', None) + def ident(self) -> metadata.Address: + """Return the identifier data to be used in templates.""" + return self.meta.address + + @utils.cached_property + def paged_result_field(self) -> Optional[Field]: + """Return the response pagination field if the method is paginated.""" + # If the request field lacks any of the expected pagination fields, + # then the method is not paginated. + for page_field in ((self.input, int, 'page_size'), + (self.input, str, 'page_token'), + (self.output, str, 'next_page_token')): + field = page_field[0].fields.get(page_field[2], None) + if not field or field.type != page_field[1]: + return None + + # Return the first repeated field. + for field in self.output.fields.values(): + if field.repeated and field.message: + return field + + # We found no repeated fields. Return None. + return None @utils.cached_property def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: @@ -499,7 +558,7 @@ def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: # Begin with the input (request) and output (response) messages. answer = [self.input] if not self.void: - answer.append(self.output) + answer.append(self.client_output) # If this method has flattening that is honored, add its # composite types. @@ -512,10 +571,9 @@ def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. - if getattr(self.output, 'lro_response', None): - answer.append(self.output.lro_response) - if getattr(self.output, 'lro_metadata', None): - answer.append(self.output.lro_metadata) + if self.lro: + answer.append(self.lro.response_type) + answer.append(self.lro.metadata_type) # Done; return the answer. return tuple(answer) @@ -551,6 +609,11 @@ class Service: def __getattr__(self, name): return getattr(self.service_pb, name) + @property + def has_lro(self) -> bool: + """Return whether the service has a long-running method.""" + return any([m.lro for m in self.methods.values()]) + @property def host(self) -> str: """Return the hostname for this service, if specified. @@ -608,28 +671,6 @@ def names(self) -> Set[str]: # Done; return the answer. return frozenset(answer) - @utils.cached_property - def python_modules(self) -> Sequence[imp.Import]: - """Return a sequence of Python modules, for import. - - The results of this method are in alphabetical order (by package, - then module), and do not contain duplicates. - - Returns: - Sequence[~.imp.Import]: The package and module, intended for - use in templates. - """ - answer = set() - for method in self.methods.values(): - for t in method.ref_types: - answer.add(t.ident.python_import) - return tuple(sorted(list(answer))) - - @property - def has_lro(self) -> bool: - """Return whether the service has a long-running method.""" - return any([m.lro for m in self.methods.values()]) - def with_context(self, *, collisions: Set[str]) -> 'Service': """Return a derivative of this service with the provided context. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index a8fbc4bdb7aa..3390ddf5a42e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -9,9 +9,13 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials -{% for import_ in service.python_modules -%} -{{ import_ }} -{% endfor %} +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{% for ref_type in method.ref_types -%} +{{ ref_type.ident.python_import }} +{% endfor -%} +{% endfor -%} +{% endfilter %} from .transports import _transport_registry from .transports import {{ service.name }}Transport @@ -58,7 +62,7 @@ class {{ service.name }}: retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident }}: + ) -> {{ method.client_output.ident }}: r"""{{ method.meta.doc|rst(width=72, indent=8) }} Args: @@ -80,18 +84,18 @@ class {{ service.name }}: {%- if not method.void %} Returns: - {{ method.output.ident.sphinx }}: - {{ method.output.meta.doc|wrap(width=72, indent=16) }} + {{ method.client_output.ident.sphinx }}: + {{ method.client_output.meta.doc|wrap(width=72, indent=16) }} {%- endif %} """ - {%- if method.flattened_fields %} + {% if method.flattened_fields -%} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - {%- endif %} + {% endif -%} # Create or coerce a protobuf request object. {% if method.flattened_fields -%} # If we have keyword arguments corresponding to fields on the @@ -139,16 +143,23 @@ class {{ service.name }}: timeout=timeout, metadata=metadata, ) - {%- if method.output.lro_response is defined %} + {%- if method.lro %} - # Wrap the response in an operation future + # Wrap the response in an operation future. response = operation.from_gapic( response, self._transport.operations_client, - {{ method.output.lro_response.ident }}, - {%- if method.output.lro_metadata %} - metadata_type={{ method.output.lro_metadata.ident }}, - {%- endif %} + {{ method.lro.response_type.ident }}, + metadata_type={{ method.lro.metadata_type.ident }}, + ) + {%- elif method.paged_result_field %} + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = {{ method.client_output.ident }}( + method=rpc, + request=request, + response=response, ) {%- endif %} {%- if not method.void %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 new file mode 100644 index 000000000000..df6a48fb9787 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 @@ -0,0 +1,77 @@ +{% extends '_base.py.j2' %} + +{% block content %} +{% for method in service.methods.values() | selectattr('paged_result_field') -%} +{% if loop.first -%} +{# This lives within the loop in order to ensure that this template + is empty if there are no paged methods. + -#} +import copy +from typing import Any, Callable, Iterable + +{% filter sort_lines -%} +{% for method in service.methods.values() | selectattr('paged_result_field') -%} +{{ method.input.ident.python_import }} +{{ method.output.ident.python_import }} +{% endfor %} +{% endfilter -%} +{% endif %} + +class {{ method.name }}Pager: + """A pager for iterating through ``{{ method.name|snake_case }}`` requests. + + This class thinly wraps an initial + :class:`{{ method.output.ident.sphinx }}` object, and + provides an ``__iter__`` method to iterate through its + ``{{ method.paged_result_field.name }}`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``{{ method.name }}`` requests and continue to iterate + through the ``{{ method.paged_result_field.name }}`` field on the + corresponding responses. + + All the usual :class:`{{ method.output.ident.sphinx }}` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[[{{ method.input.ident }}], + {{ method.output.ident }}], + request: {{ method.input.ident }}, + response: {{ method.output.ident }}): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`{{ method.input.ident.sphinx }}`): + The initial request object. + response (:class:`{{ method.output.ident.sphinx }}`): + The initial response object. + """ + self._method = method + self._request = {{ method.input.ident }}(request) + self._response = response + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: + while True: + # Iterate through the results on this response. + for result in self._response.{{ method.paged_result_field.name }}: + yield result + + # Sanity check: Is this the last page? If so, we are done. + if not self._response.next_page_token: + break + + # Get the next page. + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request) + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + +{% endfor %} +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 index 4d514fbd06e7..a3380cdc481e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 @@ -10,9 +10,12 @@ from google.api_core import operations_v1 {%- endif %} from google.auth import credentials -{% for import_ in service.python_modules -%} -{{ import_ }} -{% endfor %} +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{{ method.input.ident.python_import }} +{{ method.output.ident.python_import }} +{% endfor -%} +{% endfilter %} class {{ service.name }}Transport(metaclass=abc.ABCMeta): """Abstract transport class for {{ service.name }}.""" diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 index 00e7aa0bdc03..3d90c774327d 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 @@ -8,15 +8,15 @@ from google.api_core import grpc_helpers from google.api_core import operations_v1 {%- endif %} from google.auth import credentials -{%- if service.methods.values()|selectattr('void')|list|length %} -from google.protobuf import empty_pb2 -{%- endif %} import grpc -{% for import_ in service.python_modules -%} -{{ import_ }} -{% endfor %} +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{{ method.input.ident.python_import }} +{{ method.output.ident.python_import }} +{% endfor -%} +{% endfilter %} from .base import {{ service.name }}Transport @@ -114,7 +114,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): Returns: Callable[[~.{{ method.input.name }}], - {% if method.void %}None{% else %}~.{{ method.output.name }}{% endif %}]: + ~.{{ method.output.name }}]: A function that, when called, will call the underlying RPC on the server. """ @@ -125,12 +125,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if '{{ method.name|snake_case }}' not in self._stubs: self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', - request_serializer={{ method.input.ident }}.{% if method.input.ident.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, - {% if method.void -%} - response_deserializer=empty_pb2.Empty.FromString, - {% else -%} - response_deserializer={{ method.output.ident }}.{% if method.input.ident.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, - {% endif -%} + request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, + response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/.coveragerc.j2 b/packages/gapic-generator/gapic/templates/.coveragerc.j2 index 4d5b53abf4a8..f2ac95dda9d7 100644 --- a/packages/gapic-generator/gapic/templates/.coveragerc.j2 +++ b/packages/gapic-generator/gapic/templates/.coveragerc.j2 @@ -4,6 +4,8 @@ branch = True [report] fail_under = 100 show_missing = True +omit = + {{ api.naming.module_namespace|join("/") }}/{{ api.naming.module_name }}/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 index e9137fa458ff..031c7b912ea0 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 @@ -8,6 +8,7 @@ import grpc import pytest {# Import the service itself as well as every proto module that it imports. -#} +{% filter sort_lines -%} from google import auth from google.auth import credentials from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.name }} @@ -17,9 +18,12 @@ from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% endif -%} -{% for import_ in service.python_modules -%} -{{ import_ }} -{% endfor %} +{% for method in service.methods.values() -%} +{% for ref_type in method.ref_types -%} +{{ ref_type.ident.python_import }} +{% endfor -%} +{% endfor -%} +{% endfilter %} {% for method in service.methods.values() -%} @@ -45,7 +49,11 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% elif method.server_streaming -%} call.return_value = iter([{{ method.output.ident }}()]) {% else -%} - call.return_value = {{ method.output.ident }}() + call.return_value = {{ method.output.ident }}( + {%- for field in method.output.fields.values() | rejectattr('message') %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) {% endif -%} response = client.{{ method.name|snake_case }}(request) @@ -63,7 +71,10 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): for message in response: assert isinstance(message, {{ method.output.ident }}) {% else -%} - assert isinstance(response, {{ method.output.ident }}) + assert isinstance(response, {{ method.client_output.ident }}) + {% for field in method.output.fields.values() | rejectattr('message') -%} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endfor %} {% endif %} {% if method.field_headers %} @@ -156,6 +167,53 @@ def test_{{ method.name|snake_case }}_flattened_error(): ) {% endif %} {#- method.flattened_fields #} + +{% if method.paged_result_field %} +def test_{{ method.name|snake_case }}_pager(): + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + ), + RuntimeError, + ) + results = [i for i in client.{{ method.name|snake_case }}( + request={}, + )] + assert len(results) == 6 + assert all([isinstance(i, {{ method.paged_result_field.message.ident }}) + for i in results]) +{% endif %} {#- method.paged_response_field #} + {% endfor -%} {#- method in methods #} def test_credentials_transport_error(): diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 38610c0ab838..1e9c95d00a0a 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -14,15 +14,21 @@ from gapic.utils.cache import cached_property from gapic.utils.case import to_snake_case +from gapic.utils.code import empty +from gapic.utils.doc import doc from gapic.utils.filename import to_valid_filename from gapic.utils.filename import to_valid_module_name +from gapic.utils.lines import sort_lines from gapic.utils.lines import wrap from gapic.utils.rst import rst __all__ = ( 'cached_property', + 'doc', + 'empty', 'rst', + 'sort_lines', 'to_snake_case', 'to_valid_filename', 'to_valid_module_name', diff --git a/packages/gapic-generator/gapic/utils/code.py b/packages/gapic-generator/gapic/utils/code.py new file mode 100644 index 000000000000..7e0df14fc6b6 --- /dev/null +++ b/packages/gapic-generator/gapic/utils/code.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def empty(content: str) -> bool: + """Return True if this file has no Python statements, False otherwise. + + Args: + content (str): A string containing Python code (or a lack thereof). + """ + return not any([i.lstrip() and not i.lstrip().startswith('#') + for i in content.split('\n')]) diff --git a/packages/gapic-generator/gapic/utils/doc.py b/packages/gapic-generator/gapic/utils/doc.py new file mode 100644 index 000000000000..3c98a024d53d --- /dev/null +++ b/packages/gapic-generator/gapic/utils/doc.py @@ -0,0 +1,25 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import descriptor_pb2 + + +def doc(text: str) -> descriptor_pb2.SourceCodeInfo.Location: + """Return a Location object with the given documentation. + + This convenience method instantates a protobuf location object, + which is expected by the Metadata class, and allows for classes + not based on protobuf locations to easily conform to the interface. + """ + return descriptor_pb2.SourceCodeInfo.Location(leading_comments=text) diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 8ea2a581af4b..8d26e1bafccb 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -15,6 +15,29 @@ import textwrap +def sort_lines(text: str, dedupe: bool = True) -> str: + """Sort the individual lines of a block of text. + + Args: + dedupe (bool): Remove duplicate lines with the same text. + Useful for dealing with import statements in templates. + """ + # Preserve leading or trailing newlines. + leading = '\n' if text.startswith('\n') else '' + trailing = '\n' if text.endswith('\n') else '' + + # Split the text into individual lines, throwing away any empty lines. + lines = [i for i in text.strip().split('\n') if i.strip()] + + # De-duplicate the lines if requested. + if dedupe: + lines = list(set(lines)) + + # Return the final string. + answer = '\n'.join(sorted(lines)) + return f'{leading}{answer}{trailing}' + + def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: """Wrap the given string to the given width. diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index cf78abec9fee..3f53f01b8043 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -117,13 +117,16 @@ def showcase_unit(session): ) # Install the library. - session.install(tmp_dir) + session.chdir(tmp_dir) + session.install('-e', tmp_dir) # Run the tests. session.run( 'py.test', '--quiet', - os.path.join(tmp_dir, 'tests', 'unit'), + '--cov=google', + '--cov-report=term', + os.path.join('tests', 'unit'), ) diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py new file mode 100644 index 000000000000..a0f316a6dd17 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -0,0 +1,26 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google import showcase + + +def test_pagination(echo): + text = 'The rain in Wales falls mainly on the snails.' + results = [i for i in echo.paged_expand({ + 'content': text, + 'page_size': 3, + })] + assert len(results) == 9 + assert results == [showcase.EchoResponse(content=i) + for i in text.split(' ')] diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index aef23eddde26..a7d1dbc1ae18 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -51,6 +51,18 @@ def test_get_response(): assert cgr.file[0].content == 'I am a template result.\n' +def test_get_response_ignores_empty_files(): + g = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + lt.return_value = ['foo/bar/baz.py.j2'] + with mock.patch.object(jinja2.Environment, 'get_template') as gt: + gt.return_value = jinja2.Template('# Meaningless comment') + cgr = g.get_response(api_schema=make_api()) + lt.assert_called_once() + gt.assert_called_once() + assert len(cgr.file) == 0 + + def test_get_response_ignores_private_files(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index b42b17d7fc8e..76744e142a32 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -40,15 +40,6 @@ def test_address_str_parent(): assert str(addr) == 'baz.spam.eggs.Bacon' -def test_address_str_empty(): - addr = metadata.Address( - package=('google', 'protobuf'), - module='empty_pb2', - name='Empty', - ) - assert str(addr) == 'None' - - def test_address_proto(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.proto == 'foo.bar.Bacon' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 6b36ba798f50..3a05c32ff39e 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -46,6 +46,72 @@ def test_method_not_void(): assert not method.void +def test_method_client_output(): + output = make_message(name='Input', module='baz') + method = make_method('DoStuff', output_message=output) + assert method.client_output is method.output + + +def test_method_client_output_empty(): + empty = make_message(name='Empty', package='google.protobuf') + method = make_method('Meh', output_message=empty) + assert method.client_output == wrappers.PrimitiveType.build(None) + + +def test_method_client_output_paged(): + paged = make_field(name='foos', message=make_message('Foo'), repeated=True) + input_msg = make_message(name='ListFoosRequest', fields=( + make_field(name='parent', type=9), # str + make_field(name='page_size', type=5), # int + make_field(name='page_token', type=9), # str + )) + output_msg = make_message(name='ListFoosResponse', fields=( + paged, + make_field(name='next_page_token', type=9), # str + )) + method = make_method('ListFoos', + input_message=input_msg, + output_message=output_msg, + ) + assert method.paged_result_field == paged + assert method.client_output.ident.name == 'ListFoosPager' + + +def test_method_paged_result_field_not_first(): + paged = make_field(name='foos', message=make_message('Foo'), repeated=True) + input_msg = make_message(name='ListFoosRequest', fields=( + make_field(name='parent', type=9), # str + make_field(name='page_size', type=5), # int + make_field(name='page_token', type=9), # str + )) + output_msg = make_message(name='ListFoosResponse', fields=( + make_field(name='next_page_token', type=9), # str + paged, + )) + method = make_method('ListFoos', + input_message=input_msg, + output_message=output_msg, + ) + assert method.paged_result_field == paged + + +def test_method_paged_result_field_no_page_field(): + input_msg = make_message(name='ListFoosRequest', fields=( + make_field(name='parent', type=9), # str + make_field(name='page_size', type=5), # int + make_field(name='page_token', type=9), # str + )) + output_msg = make_message(name='ListFoosResponse', fields=( + make_field(name='foos', message=make_message('Foo'), repeated=False), + make_field(name='next_page_token', type=9), # str + )) + method = make_method('ListFoos', + input_message=input_msg, + output_message=output_msg, + ) + assert method.paged_result_field is None + + def test_method_field_headers_none(): method = make_method('DoSomething') assert isinstance(method.field_headers, collections.Sequence) @@ -150,6 +216,7 @@ def make_method( name=name, package=package, module=module, + parent=(f'{name}Service',), )), ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py deleted file mode 100644 index 7f3a79d1a8b4..000000000000 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_operation.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.protobuf import descriptor_pb2 - -from gapic.schema import metadata -from gapic.schema import wrappers - - -def test_operation(): - lro_response = wrappers.MessageType( - fields={}, - nested_messages={}, - nested_enums={}, - message_pb=descriptor_pb2.DescriptorProto(name='LroResponse'), - ) - operation = wrappers.OperationType(lro_response=lro_response) - assert operation.name == 'Operation' - assert str(operation.ident) == 'operation.Operation' - assert operation.ident.sphinx == '~.operation.Operation' - - -def test_operation_meta(): - lro_response = wrappers.MessageType( - fields={}, - nested_messages={}, - nested_enums={}, - message_pb=descriptor_pb2.DescriptorProto(name='LroResponse'), - meta=metadata.Metadata(address=metadata.Address( - name='LroResponse', - module='foo', - )), - ) - operation = wrappers.OperationType(lro_response=lro_response) - assert 'representing a long-running operation' in operation.meta.doc - assert ':class:`~.foo.LroResponse`' in operation.meta.doc diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py new file mode 100644 index 000000000000..7e5082aedca7 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py @@ -0,0 +1,45 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +from gapic.schema import metadata +from gapic.schema import wrappers + + +def test_python_eq(): + meta = metadata.Metadata(address=metadata.Address( + name='Foo', module='bar', package=('google', 'api'), + )) + assert wrappers.PythonType(meta=meta) == wrappers.PythonType( + meta=copy.copy(meta), + ) + assert wrappers.PythonType(meta=metadata.Metadata( + address=metadata.Address(name='Baz', module='bar', package=()), + )) != wrappers.PythonType(meta=meta) + + +def test_primitive_eq(): + assert wrappers.PrimitiveType.build(None) == None # noqa: E711 + assert wrappers.PrimitiveType.build(int) == int + assert wrappers.PrimitiveType.build(str) == str + assert wrappers.PrimitiveType.build(bytes) == bytes + assert wrappers.PrimitiveType.build(str) != bytes + assert wrappers.PrimitiveType.build(int) == wrappers.PrimitiveType.build( + int, + ) + + +def test_primitive_name(): + assert wrappers.PrimitiveType.build(int).name == 'int' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 1dd6c05eef55..1b8d4fb07394 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -74,28 +74,34 @@ def test_service_python_modules(): get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), )) - assert service.python_modules == ( + imports = set() + for m in service.methods.values(): + imports = imports.union({i.ident.python_import for i in m.ref_types}) + assert imports == { imp.Import(package=('a', 'b', 'v1'), module='c'), imp.Import(package=('foo',), module='bacon'), imp.Import(package=('foo',), module='bar'), imp.Import(package=('foo',), module='baz'), imp.Import(package=('x', 'y', 'v1'), module='z'), - ) + } def test_service_python_modules_lro(): service = make_service_with_method_options() - assert service.python_modules == ( + method = service.methods['DoBigThing'] + imports = {i.ident.python_import for i in method.ref_types} + assert imports == { imp.Import(package=('foo',), module='bar'), imp.Import(package=('foo',), module='baz'), imp.Import(package=('foo',), module='qux'), imp.Import(package=('google', 'api_core'), module='operation'), - ) + } def test_service_python_modules_signature(): service = make_service_with_method_options( in_fields=( + # type=5 is int, so nothing is added. descriptor_pb2.FieldDescriptorProto(name='secs', type=5), descriptor_pb2.FieldDescriptorProto( name='d', @@ -105,14 +111,17 @@ def test_service_python_modules_signature(): ), method_signature='secs,d', ) - # type=5 is int, so nothing is added. - assert service.python_modules == ( + + # Ensure that the service will have the expected imports. + method = service.methods['DoBigThing'] + imports = {i.ident.python_import for i in method.ref_types} + assert imports == { imp.Import(package=('a', 'b', 'c'), module='v2'), imp.Import(package=('foo',), module='bar'), imp.Import(package=('foo',), module='baz'), imp.Import(package=('foo',), module='qux'), imp.Import(package=('google', 'api_core'), module='operation'), - ) + } def test_service_no_lro(): @@ -188,6 +197,7 @@ def get_method(name: str, ) -> wrappers.Method: input_ = get_message(in_type, fields=in_fields) output = get_message(out_type) + lro = None # Define a method descriptor. Set the field headers if appropriate. method_pb = descriptor_pb2.MethodDescriptorProto( @@ -196,9 +206,9 @@ def get_method(name: str, output_type=output.ident.proto, ) if lro_response_type: - output = wrappers.OperationType( - lro_response=get_message(lro_response_type), - lro_metadata=get_message(lro_metadata_type), + lro = wrappers.OperationInfo( + response_type=get_message(lro_response_type), + metadata_type=get_message(lro_metadata_type), ) if http_rule: ext_key = annotations_pb2.http @@ -211,6 +221,7 @@ def get_method(name: str, method_pb=method_pb, input=input_, output=output, + lro=lro, ) diff --git a/packages/gapic-generator/tests/unit/utils/test_code.py b/packages/gapic-generator/tests/unit/utils/test_code.py new file mode 100644 index 000000000000..1069443f7b94 --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_code.py @@ -0,0 +1,36 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from gapic.utils import code + + +def test_empty_empty(): + assert code.empty('') + + +def test_empty_comments(): + assert code.empty('# The rain in Wales...\n# falls mainly...') + + +def test_empty_whitespace(): + assert code.empty(' ') + + +def test_empty_whitespace_comments(): + assert code.empty(' # The rain in Wales...') + + +def test_empty_code(): + assert not code.empty('import this') diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 85e96b5d4204..69218cc64bf3 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -15,6 +15,43 @@ from gapic.utils import lines +def test_sort_lines(): + assert lines.sort_lines( + 'import foo\nimport bar', + ) == 'import bar\nimport foo' + + +def test_sort_lines_keeps_leading_newline(): + assert lines.sort_lines( + '\nimport foo\nimport bar', + ) == '\nimport bar\nimport foo' + + +def test_sort_lines_keeps_trailing_newline(): + assert lines.sort_lines( + 'import foo\nimport bar\n', + ) == 'import bar\nimport foo\n' + + +def test_sort_lines_eliminates_blank_lines(): + assert lines.sort_lines( + 'import foo\n\n\nimport bar', + ) == 'import bar\nimport foo' + + +def test_sort_lines_dedupe(): + assert lines.sort_lines( + 'import foo\nimport bar\nimport foo', + ) == 'import bar\nimport foo' + + +def test_sort_lines_no_dedupe(): + assert lines.sort_lines( + 'import foo\nimport bar\nimport foo', + dedupe=False, + ) == 'import bar\nimport foo\nimport foo' + + def test_wrap_noop(): assert lines.wrap('foo bar baz', width=80) == 'foo bar baz' From c16088bc4ecff6e92c2c93ec50aa590033a6f7b1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 23 May 2019 10:30:03 -0500 Subject: [PATCH 0116/1339] [chore] Remove package metadata (#125) --- .../gapic-generator/gapic/schema/naming.py | 32 ------------- packages/gapic-generator/setup.py | 2 +- .../tests/unit/schema/test_naming.py | 46 ------------------- 3 files changed, 1 insertion(+), 79 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index f8d924edd63b..af09805a5989 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -17,7 +17,6 @@ import re from typing import Iterable, Sequence, Tuple -from google.api import client_pb2 from google.protobuf import descriptor_pb2 from gapic import utils @@ -116,37 +115,6 @@ def build(cls, raise ValueError('All protos must have the same proto package ' 'up to and including the version.') - # Iterate over the metadata annotations and collect the package - # information from there. - # - # This creates a naming class non-empty metadata annotation and - # uses Python's set logic to de-duplicate. There should only be one. - explicit_pkgs = set() - for fd in file_descriptors: - pkg = fd.options.Extensions[client_pb2.client_package] - naming = cls( - name=pkg.title or pkg.product_title, - namespace=tuple(pkg.namespace), - version=pkg.version, - ) - if naming: - explicit_pkgs.add(naming) - - # Sanity check: Ensure that any google.api.metadata provisions were - # consistent. - if len(explicit_pkgs) > 1: - raise ValueError( - 'If the google.api.client_package annotation is provided in ' - 'more than one file, it must be consistent.', - ) - - # Merge the package naming information and the metadata naming - # information, with the latter being preferred. - # Return a Naming object which effectively merges them. - if len(explicit_pkgs): - return dataclasses.replace(package_info, - **dataclasses.asdict(explicit_pkgs.pop()), - ) return package_info def __bool__(self): diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 727bafb8d6b6..2e9897284969 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -42,7 +42,7 @@ include_package_data=True, install_requires=( 'click >= 6.7', - 'googleapis-common-protos >= 1.6.0b8', + 'googleapis-common-protos >= 1.6.0', 'jinja2 >= 2.10', 'protobuf >= 3.7.1', 'pypandoc >= 1.4', diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index f3ad0e09ecac..763358d12b18 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -14,7 +14,6 @@ import pytest -from google.api import client_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import naming @@ -110,25 +109,6 @@ def test_build_no_annotations_no_version(): assert n.version == '' -def test_build_with_annotations(): - proto = descriptor_pb2.FileDescriptorProto( - name='spanner.proto', - package='google.spanner.v1', - ) - proto.options.Extensions[client_pb2.client_package].MergeFrom( - client_pb2.Package( - namespace=['Google', 'Cloud'], - title='Spanner', - version='v1', - ), - ) - n = naming.Naming.build(proto) - assert n.name == 'Spanner' - assert n.namespace == ('Google', 'Cloud') - assert n.version == 'v1' - assert n.product_name == 'Spanner' - - def test_build_no_namespace(): protos = ( descriptor_pb2.FileDescriptorProto( @@ -143,32 +123,6 @@ def test_build_no_namespace(): assert n.product_name == 'Foo' -def test_inconsistent_metadata_error(): - # Set up the first proto. - proto1 = descriptor_pb2.FileDescriptorProto( - name='spanner.proto', - package='google.spanner.v1', - ) - proto1.options.Extensions[client_pb2.client_package].MergeFrom( - client_pb2.Package(namespace=['Google', 'Cloud']), - ) - - # Set up the second proto. - # Note that - proto2 = descriptor_pb2.FileDescriptorProto( - name='spanner2.proto', - package='google.spanner.v1', - ) - proto2.options.Extensions[client_pb2.client_package].MergeFrom( - client_pb2.Package(title='Spanner', namespace=['Google', 'Cloud']), - ) - - # This should error. Even though the data in the metadata is consistent, - # it is expected to exactly match, and it does not. - with pytest.raises(ValueError): - naming.Naming.build(proto1, proto2) - - def test_inconsistent_package_error(): proto1 = descriptor_pb2.FileDescriptorProto(package='google.spanner.v1') proto2 = descriptor_pb2.FileDescriptorProto(package='spanner.v1') From aac7a60bc1f15c0cdd47a63ba0e9358568da6742 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 23 May 2019 14:45:26 -0500 Subject: [PATCH 0117/1339] [chore] Bump version to 0.9.0. (#124) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2e9897284969..d9acf3267f47 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.8.0', + version='0.9.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 48e82cec1b30aabd23ac74fa1385e888433bf5b5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sun, 26 May 2019 10:26:30 -0700 Subject: [PATCH 0118/1339] [feat] Add a CLI option to set the Python module (#126) This allows us to translate between the actual proto package and the desired Python module when automatic inference does the wrong thing (e.g. think `google.spanner.v1` -> `google.cloud.spanner_v1`). --- .../gapic-generator/gapic/cli/generate.py | 2 +- .../gapic/generator/options.py | 18 +++-- packages/gapic-generator/gapic/schema/api.py | 7 +- .../gapic-generator/gapic/schema/naming.py | 23 ++++++- .../tests/unit/schema/test_naming.py | 67 +++++++++++++++++++ 5 files changed, 107 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index 0cd1f7df1b09..db69367e5ddc 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -56,7 +56,7 @@ def generate( # Build the API model object. # This object is a frozen representation of the whole API, and is sent # to each template in the rendering step. - api_schema = api.API.build(req.proto_file, package=package) + api_schema = api.API.build(req.proto_file, opts=opts, package=package) # Translate into a protobuf CodeGeneratorResponse; this reads the # individual templates and renders them. diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index 810780df0e33..dcf00dfe0684 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -26,7 +26,9 @@ class Options: on unrecognized arguments (essentially, we throw them away, but we do warn if it looks like it was meant for us). """ - templates: Tuple[str] + templates: Tuple[str] = dataclasses.field(default=('DEFAULT',)) + namespace: Tuple[str] = dataclasses.field(default=()) + name: str = '' @classmethod def build(cls, opt_string: str) -> 'Options': @@ -54,15 +56,17 @@ def build(cls, opt_string: str) -> 'Options': if not opt.startswith('python-gapic-'): continue - # Set the option. + # Set the option, using a key with the "python-gapic-" prefix + # stripped. + # # Just assume everything is a list at this point, and the # final instantiation step can de-list-ify where appropriate. - opts.setdefault(opt, []) - opts[opt].append(value) + opts.setdefault(opt[len('python-gapic-'):], []) + opts[opt[len('python-gapic-'):]].append(value) # If templates are specified, one of the specified directories # may be our default; perform that replacement. - templates = opts.pop('python-gapic-templates', ['DEFAULT']) + templates = opts.pop('templates', ['DEFAULT']) while 'DEFAULT' in templates: templates[templates.index('DEFAULT')] = os.path.realpath( os.path.join(os.path.dirname(__file__), '..', 'templates'), @@ -70,13 +74,15 @@ def build(cls, opt_string: str) -> 'Options': # Build the options instance. answer = Options( + name=opts.pop('name', ['']).pop(), + namespace=tuple(opts.pop('namespace', [])), templates=tuple([os.path.expanduser(i) for i in templates]), ) # If there are any options remaining, then we failed to recognize # them -- complain. for key in opts.keys(): - warnings.warn(f'Unrecognized option: `{key}`.') + warnings.warn(f'Unrecognized option: `python-gapic-{key}`.') # Done; return the built options. return answer diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 082bfe15262c..51b05b8e4fbc 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -26,6 +26,7 @@ from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 +from gapic.generator import options from gapic.schema import metadata from gapic.schema import wrappers from gapic.schema import naming as api_naming @@ -179,7 +180,8 @@ class API: @classmethod def build(cls, file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], - package: str = '') -> 'API': + package: str = '', + opts: options.Options = options.Options()) -> 'API': """Build the internal API schema based on the request. Args: @@ -190,12 +192,13 @@ def build(cls, code should be explicitly generated (including subpackages). Protos with packages outside this list are considered imports rather than explicit targets. + opts (~.options.Options): CLI options passed to the generator. """ # Save information about the overall naming for this API. naming = api_naming.Naming.build(*filter( lambda fd: fd.package.startswith(package), file_descriptors, - )) + ), opts=opts) # Iterate over each FileDescriptorProto and fill out a Proto # object describing it, and save these to the instance. diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index af09805a5989..18b55bdaa813 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -20,6 +20,7 @@ from google.protobuf import descriptor_pb2 from gapic import utils +from gapic.generator import options @dataclasses.dataclass(frozen=True) @@ -44,7 +45,8 @@ def __post_init__(self): @classmethod def build(cls, - *file_descriptors: Iterable[descriptor_pb2.FileDescriptorProto] + *file_descriptors: Iterable[descriptor_pb2.FileDescriptorProto], + opts: options.Options = options.Options(), ) -> 'Naming': """Return a full Naming instance based on these file descriptors. @@ -115,6 +117,25 @@ def build(cls, raise ValueError('All protos must have the same proto package ' 'up to and including the version.') + # If a naming information was provided on the CLI, override the naming + # value. + # + # We are liberal about what formats we take on the CLI; it will + # likely make sense to many users to use dot-separated namespaces and + # snake case, so handle that and do the right thing. + if opts.name: + package_info = dataclasses.replace(package_info, name=' '.join([ + i.capitalize() for i in opts.name.replace('_', ' ').split(' ') + ])) + if opts.namespace: + package_info = dataclasses.replace(package_info, namespace=tuple([ + # The join-and-split on "." here causes us to expand out + # dot notation that we may have been sent; e.g. a one-tuple + # with ('x.y',) will become a two-tuple: ('x', 'y') + i.capitalize() for i in '.'.join(opts.namespace).split('.') + ])) + + # Done; return the naming information. return package_info def __bool__(self): diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 763358d12b18..4fcbf609fe1e 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -16,6 +16,7 @@ from google.protobuf import descriptor_pb2 +from gapic.generator import options from gapic.schema import naming @@ -144,6 +145,72 @@ def test_subpackages(): assert n.version == 'v0' +def test_cli_override_name(): + FileDesc = descriptor_pb2.FileDescriptorProto + proto1 = FileDesc(package='google.cloud.videointelligence.v1') + n = naming.Naming.build(proto1, + opts=options.Options(name='Video Intelligence'), + ) + assert n.namespace == ('Google', 'Cloud') + assert n.name == 'Video Intelligence' + assert n.version == 'v1' + + +def test_cli_override_name_underscores(): + FileDesc = descriptor_pb2.FileDescriptorProto + proto1 = FileDesc(package='google.cloud.videointelligence.v1') + n = naming.Naming.build(proto1, + opts=options.Options(name='video_intelligence'), + ) + assert n.namespace == ('Google', 'Cloud') + assert n.name == 'Video Intelligence' + assert n.version == 'v1' + + +def test_cli_override_namespace(): + FileDesc = descriptor_pb2.FileDescriptorProto + proto1 = FileDesc(package='google.spanner.v1') + n = naming.Naming.build(proto1, + opts=options.Options(namespace=('google', 'cloud')), + ) + assert n.namespace == ('Google', 'Cloud') + assert n.name == 'Spanner' + assert n.version == 'v1' + + +def test_cli_override_namespace_dotted(): + FileDesc = descriptor_pb2.FileDescriptorProto + proto1 = FileDesc(package='google.spanner.v1') + n = naming.Naming.build(proto1, + opts=options.Options(namespace=('google.cloud',)), + ) + assert n.namespace == ('Google', 'Cloud') + assert n.name == 'Spanner' + assert n.version == 'v1' + + +def test_cli_override_name_and_namespace(): + FileDesc = descriptor_pb2.FileDescriptorProto + proto1 = FileDesc(package='google.translation.v2') + n = naming.Naming.build(proto1, + opts=options.Options(namespace=('google', 'cloud'), name='translate'), + ) + assert n.namespace == ('Google', 'Cloud') + assert n.name == 'Translate' + assert n.version == 'v2' + + +def test_cli_override_name_and_namespace_versionless(): + FileDesc = descriptor_pb2.FileDescriptorProto + proto1 = FileDesc(package='google.translation') + n = naming.Naming.build(proto1, + opts=options.Options(namespace=('google', 'cloud'), name='translate'), + ) + assert n.namespace == ('Google', 'Cloud') + assert n.name == 'Translate' + assert not n.version + + def make_naming(**kwargs) -> naming.Naming: kwargs.setdefault('name', 'Hatstand') kwargs.setdefault('namespace', ('Google', 'Cloud')) From b6637cee6a57e2a48a0e41899b67fb2621d80c60 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 28 May 2019 09:22:18 -0700 Subject: [PATCH 0119/1339] [chore] Update to Showcase latest (0.2.0). (#128) This commit updates to the most recent version of Showcase, and goes from v1alpha3 to v1beta1. --- packages/gapic-generator/.circleci/config.yml | 2 +- packages/gapic-generator/noxfile.py | 14 +++++++------- .../gapic-generator/tests/system/test_grpc_lro.py | 4 ++-- .../gapic-generator/tests/system/test_retry.py | 6 +++--- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 6535167f48ab..8a215e20e413 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -123,7 +123,7 @@ jobs: showcase: docker: - image: python:3.7-slim - - image: gcr.io/gapic-images/gapic-showcase:0.0.16 + - image: gcr.io/gapic-images/gapic-showcase:0.2.0 steps: - checkout - run: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 3f53f01b8043..bb1cbf59269e 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -19,7 +19,7 @@ import nox -showcase_version = '0.0.16' +showcase_version = '0.2.0' @nox.session(python=['3.6', '3.7']) @@ -73,8 +73,8 @@ def showcase(session): session.run('protoc', f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1alpha3/echo.proto', - 'google/showcase/v1alpha3/identity.proto', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', external=True, ) @@ -109,10 +109,10 @@ def showcase_unit(session): session.run('protoc', f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1alpha3/echo.proto', - 'google/showcase/v1alpha3/identity.proto', - 'google/showcase/v1alpha3/messaging.proto', - 'google/showcase/v1alpha3/testing.proto', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + 'google/showcase/v1beta1/messaging.proto', + 'google/showcase/v1beta1/testing.proto', external=True, ) diff --git a/packages/gapic-generator/tests/system/test_grpc_lro.py b/packages/gapic-generator/tests/system/test_grpc_lro.py index ba61336ccf78..c0e07f9a5d7f 100644 --- a/packages/gapic-generator/tests/system/test_grpc_lro.py +++ b/packages/gapic-generator/tests/system/test_grpc_lro.py @@ -14,7 +14,7 @@ from datetime import datetime, timedelta, timezone -from google import showcase_v1alpha3 +from google import showcase_v1beta1 def test_lro(echo): @@ -25,5 +25,5 @@ def test_lro(echo): } future = echo.wait(wait_request) response = future.result() - assert isinstance(response, showcase_v1alpha3.WaitResponse) + assert isinstance(response, showcase_v1beta1.WaitResponse) assert response.content.endswith('the snails...eventually.') diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 0cb50eb49332..5acb3e3df42a 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -17,7 +17,7 @@ import pytest -from google import showcase_v1alpha3 +from google import showcase_v1beta1 from google.api_core import exceptions from google.rpc import code_pb2 @@ -25,7 +25,7 @@ def test_retry_nonidempotent(echo): # Define our error and OK responses. err = exceptions.ServiceUnavailable(message='whups') - ok = showcase_v1alpha3.EchoResponse(content='foo') + ok = showcase_v1beta1.EchoResponse(content='foo') server = mock.Mock(side_effect=(err, err, ok)) # Mock the transport to send back the error responses followed by a @@ -44,7 +44,7 @@ def test_retry_idempotent(identity): err409 = exceptions.Aborted(message='derp de derp') err503 = exceptions.ServiceUnavailable(message='whups') errwtf = exceptions.Unknown(message='huh?') - ok = showcase_v1alpha3.User(name='users/0', display_name='Guido') + ok = showcase_v1beta1.User(name='users/0', display_name='Guido') server = mock.Mock(side_effect=(err409, err503, errwtf, ok)) # Mock the transport to send back the error responses followed by a From 63bd5b1e96ea7bed73f3a7e4ef7fe088706fddf5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 28 May 2019 09:40:51 -0700 Subject: [PATCH 0120/1339] [feat] Improve enums. (#127) This uses proto-plus enums, which add enum support to the marshal and cause enums (rather than ints) to be returned, and add support for accepting strings. --- .../$namespace/$name_$version/$sub/types/$proto.py.j2 | 8 ++------ .../$namespace/$name_$version/$sub/types/_enum.py.j2 | 2 +- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- 3 files changed, 4 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 index b9f3ab91a2c6..b9fc360b9a1d 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 @@ -1,12 +1,8 @@ {% extends "_base.py.j2" %} {% block content -%} -{% with e = proto.disambiguate('enum'), p = proto.disambiguate('proto') %} -{% if proto.all_enums|length -%} -import enum{% if e != 'enum' %} as {{ e }}{% endif %} - -{% endif -%} -{% if proto.messages|length -%} +{% with p = proto.disambiguate('proto') %} +{% if proto.messages|length or proto.all_enums|length -%} import proto{% if p != 'proto' %} as {{ p }}{% endif -%} {% endif %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 index 3f1a8a27d28f..c9f4cb0c4f0c 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 @@ -1,4 +1,4 @@ -class {{ enum.name }}({{ e }}.IntEnum): +class {{ enum.name }}({{ p }}.Enum): r"""{{ enum.meta.doc|rst(indent=4) }}""" {% for enum_value in enum.values -%} {{ enum_value.name }} = {{ enum_value.number }} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index f5b3617af128..dc1448bc3eef 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -22,7 +22,7 @@ setuptools.setup( 'google-api-core >= 1.8.0, < 2.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', - 'proto-plus >= 0.3.0', + 'proto-plus >= 0.4.0', ), classifiers=[ 'Development Status :: 3 - Alpha', From 6c84edb5e31b9017889d93b6a53249c10c8c44c6 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 28 May 2019 09:54:34 -0700 Subject: [PATCH 0121/1339] [fix] Do not silently drop `--python-gapic-` options. (#129) This is a follow-up to #126, where I added a CLI option, but neglected to update the Docker entrypoint script to actually pass it on to the generator, making the option useless when using Docker (the common case for now). This updates the script to be more robust, and check the prefix rather than an index of every known option. --- packages/gapic-generator/docker-entrypoint.sh | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/docker-entrypoint.sh b/packages/gapic-generator/docker-entrypoint.sh index 23ce48be093d..6cef2957a529 100755 --- a/packages/gapic-generator/docker-entrypoint.sh +++ b/packages/gapic-generator/docker-entrypoint.sh @@ -18,17 +18,23 @@ PLUGIN_OPTIONS="" # Parse out options. while [ -n "$1" ]; do case "$1" in - --python-gapic-templates ) - PLUGIN_OPTIONS="$PLUGIN_OPTIONS,python-gapic-templates=$2" - shift 2 - ;; -- ) shift break ;; * ) - # Ignore anything we do not recognize. - shift + # If this switch begins with "--python-gapic-" or "--gapic-", then it is + # meant for us. + if [[ $1 == --python-gapic-* ]]; then + PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" + shift 2 + else if [[ $1 == --gapic-* ]]; then + PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" + shift 2 + else + # Ignore anything we do not recognize. + shift + fi ;; esac done From 72b885743caaed557a66a805fe431cad9cba7ccc Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 30 May 2019 16:58:52 -0700 Subject: [PATCH 0122/1339] [feat] Type-check the generator. (#130) This adds type-checking to the generator, and fixes a lot of erroneous or incomplete type annotations. This does _not_ add type-checking to the generated code (that will come next). --- packages/gapic-generator/.circleci/config.yml | 19 ++++++- packages/gapic-generator/.gitignore | 3 ++ packages/gapic-generator/gapic/cli/dump.py | 4 +- .../gapic/generator/generator.py | 10 ++-- .../gapic/generator/options.py | 10 ++-- packages/gapic-generator/gapic/schema/api.py | 48 ++++++++++-------- packages/gapic-generator/gapic/schema/imp.py | 2 +- .../gapic-generator/gapic/schema/metadata.py | 18 +++---- .../gapic-generator/gapic/schema/naming.py | 15 +++--- .../gapic-generator/gapic/schema/wrappers.py | 50 +++++++++++-------- packages/gapic-generator/gapic/utils/rst.py | 2 +- packages/gapic-generator/mypy.ini | 2 + packages/gapic-generator/noxfile.py | 11 +++- packages/gapic-generator/setup.py | 2 +- .../unit/schema/wrappers/test_message.py | 14 ++++++ .../unit/schema/wrappers/test_service.py | 2 +- 16 files changed, 133 insertions(+), 79 deletions(-) create mode 100644 packages/gapic-generator/mypy.ini diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 8a215e20e413..a1a1fd801adb 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -28,6 +28,8 @@ workflows: only: /^\d+\.\d+\.\d+$/ - showcase: requires: + - docs + - mypy - showcase-unit-3.6 - showcase-unit-3.7 filters: @@ -37,10 +39,13 @@ workflows: filters: tags: only: /^\d+\.\d+\.\d+$/ + - mypy: + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - publish_package: requires: - showcase - - docs filters: branches: ignore: /.*/ @@ -49,7 +54,6 @@ workflows: - publish_image: requires: - showcase - - docs filters: branches: ignore: /.*/ @@ -67,6 +71,17 @@ jobs: - run: name: Build the documentation. command: nox -s docs + mypy: + docker: + - image: python:3.7-slim + steps: + - checkout + - run: + name: Install nox. + command: pip install nox + - run: + name: Check type annotations. + command: nox -s mypy publish_image: docker: - image: docker diff --git a/packages/gapic-generator/.gitignore b/packages/gapic-generator/.gitignore index a9fe758f615a..ef4e4c2d30e2 100644 --- a/packages/gapic-generator/.gitignore +++ b/packages/gapic-generator/.gitignore @@ -56,3 +56,6 @@ coverage.xml # Make sure a generated file isn't accidentally committed. pylintrc pylintrc.test + +# Mypy +.mypy_cache diff --git a/packages/gapic-generator/gapic/cli/dump.py b/packages/gapic-generator/gapic/cli/dump.py index 31390e52fcf2..7862750a764f 100644 --- a/packages/gapic-generator/gapic/cli/dump.py +++ b/packages/gapic-generator/gapic/cli/dump.py @@ -18,8 +18,6 @@ import click -from google.protobuf.compiler import plugin_pb2 - @click.command() @click.option('--request', type=click.File('rb'), default=sys.stdin.buffer, @@ -41,6 +39,6 @@ def dump(request: typing.BinaryIO) -> None: click.secho( 'Request dumped to `request.desc`. ' 'This script will now exit 1 to satisfy protoc.', - file=sys.stderr, color='green', + file=sys.stderr, fg='green', ) sys.exit(1) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index e8696b4de094..634716715097 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import collections +from collections import OrderedDict +from typing import Dict, Mapping import os import re -from typing import Mapping, Sequence import jinja2 @@ -66,7 +66,7 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: ~.CodeGeneratorResponse: A response describing appropriate files and contents. See ``plugin.proto``. """ - output_files = collections.OrderedDict() + output_files: Dict[str, CodeGeneratorResponse.File] = OrderedDict() # Iterate over each template and add the appropriate output files # based on that template. @@ -88,7 +88,7 @@ def _render_template( self, template_name: str, *, api_schema: api.API, - ) -> Sequence[CodeGeneratorResponse.File]: + ) -> Dict[str, CodeGeneratorResponse.File]: """Render the requested templates. Args: @@ -103,7 +103,7 @@ def _render_template( Sequence[~.CodeGeneratorResponse.File]: A sequence of File objects for inclusion in the final response. """ - answer = collections.OrderedDict() + answer: Dict[str, CodeGeneratorResponse.File] = OrderedDict() skip_subpackages = False # Sanity check: Rendering per service and per proto would be a diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index dcf00dfe0684..658e80f3ed0c 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Tuple +from typing import Dict, List, Tuple import dataclasses import os import warnings @@ -26,8 +26,8 @@ class Options: on unrecognized arguments (essentially, we throw them away, but we do warn if it looks like it was meant for us). """ - templates: Tuple[str] = dataclasses.field(default=('DEFAULT',)) - namespace: Tuple[str] = dataclasses.field(default=()) + templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) + namespace: Tuple[str, ...] = dataclasses.field(default=()) name: str = '' @classmethod @@ -45,10 +45,10 @@ def build(cls, opt_string: str) -> 'Options': ~.Options: The Options instance. """ # Parse out every option beginning with `python-gapic` - opts = {} + opts: Dict[str, List[str]] = {} for opt in opt_string.split(','): # Parse out the key and value. - value = True + value = 'true' if '=' in opt: opt, value = opt.split('=') diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 51b05b8e4fbc..7fb1af6acd14 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -21,9 +21,9 @@ import dataclasses import sys from itertools import chain -from typing import Callable, List, Mapping, Sequence, Set, Tuple +from typing import Callable, Dict, FrozenSet, Mapping, Sequence, Set, Tuple -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import descriptor_pb2 from gapic.generator import options @@ -100,21 +100,21 @@ def module_name(self) -> str: return to_snake_case(self.name.split('/')[-1][:-len('.proto')]) @cached_property - def names(self) -> Set[str]: + def names(self) -> FrozenSet[str]: """Return a set of names used by this proto. This is used for detecting naming collisions in the module names used for imports. """ # Add names of all enums, messages, and fields. - answer = {e.name for e in self.all_enums.values()} + answer: Set[str] = {e.name for e in self.all_enums.values()} for message in self.all_messages.values(): answer = answer.union({f.name for f in message.fields.values()}) answer.add(message.name) # Identify any import module names where the same module name is used # from distinct packages. - modules = {} + modules: Dict[str, Set[str]] = {} for t in chain(*[m.field_types for m in self.all_messages.values()]): modules.setdefault(t.ident.module, set()) modules[t.ident.module].add(t.ident.package) @@ -175,7 +175,7 @@ class API: """ naming: api_naming.Naming all_protos: Mapping[str, Proto] - subpackage_view: Tuple[str] = dataclasses.field(default_factory=tuple) + subpackage_view: Tuple[str, ...] = dataclasses.field(default_factory=tuple) @classmethod def build(cls, @@ -202,7 +202,7 @@ def build(cls, # Iterate over each FileDescriptorProto and fill out a Proto # object describing it, and save these to the instance. - protos = {} + protos: Dict[str, Proto] = {} for fd in file_descriptors: protos[fd.name] = _ProtoBuilder( file_descriptor=fd, @@ -256,7 +256,7 @@ def subpackages(self) -> Mapping[str, 'API']: Each value in the mapping is another API object, but the ``protos`` property only shows protos belonging to the subpackage. """ - answer = collections.OrderedDict() + answer: Dict[str, API] = collections.OrderedDict() # Get the actual subpackages we have. # @@ -294,9 +294,9 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, file_to_generate: bool, naming: api_naming.Naming, prior_protos: Mapping[str, Proto] = None): - self.proto_messages = {} - self.proto_enums = {} - self.proto_services = {} + self.proto_messages: Dict[str, wrappers.MessageType] = {} + self.proto_enums: Dict[str, wrappers.EnumType] = {} + self.proto_services: Dict[str, wrappers.Service] = {} self.file_descriptor = file_descriptor self.file_to_generate = file_to_generate self.prior_protos = prior_protos or {} @@ -307,7 +307,8 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # the "path", which is a sequence of integers described in more # detail below; this code simply shifts from a list to a dict, # with tuples of paths as the dictionary keys. - self.docs = {} + self.docs: Dict[Tuple[int, ...], + descriptor_pb2.SourceCodeInfo.Location] = {} for location in file_descriptor.source_code_info.location: self.docs[tuple(location.path)] = location @@ -414,8 +415,9 @@ def api_messages(self) -> Mapping[str, wrappers.MessageType]: *[p.all_messages for p in self.prior_protos.values()], ) - def _load_children(self, children: Sequence, loader: Callable, *, - address: metadata.Address, path: Tuple[int]) -> Mapping: + def _load_children(self, + children: Sequence, loader: Callable, *, + address: metadata.Address, path: Tuple[int, ...]) -> Mapping: """Return wrapped versions of arbitrary children from a Descriptor. Args: @@ -445,9 +447,10 @@ def _load_children(self, children: Sequence, loader: Callable, *, answer[wrapped.name] = wrapped return answer - def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], - address: metadata.Address, path: Tuple[int], - ) -> Mapping[str, wrappers.Field]: + def _get_fields(self, + field_pbs: Sequence[descriptor_pb2.FieldDescriptorProto], + address: metadata.Address, path: Tuple[int, ...], + ) -> Dict[str, wrappers.Field]: """Return a dictionary of wrapped fields for the given message. Args: @@ -472,7 +475,7 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], # message wrapper is not yet created, because it needs this object # first) and this will be None. This case is addressed in the # `_load_message` method. - answer = collections.OrderedDict() + answer: Dict[str, wrappers.Field] = collections.OrderedDict() for field_pb, i in zip(field_pbs, range(0, sys.maxsize)): answer[field_pb.name] = wrappers.Field( field_pb=field_pb, @@ -487,9 +490,10 @@ def _get_fields(self, field_pbs: List[descriptor_pb2.FieldDescriptorProto], # Done; return the answer. return answer - def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], - address: metadata.Address, path: Tuple[int], - ) -> Mapping[str, wrappers.Method]: + def _get_methods(self, + methods: Sequence[descriptor_pb2.MethodDescriptorProto], + address: metadata.Address, path: Tuple[int, ...], + ) -> Mapping[str, wrappers.Method]: """Return a dictionary of wrapped methods for the given service. Args: @@ -505,7 +509,7 @@ def _get_methods(self, methods: List[descriptor_pb2.MethodDescriptorProto], :class:`~.wrappers.Method` objects. """ # Iterate over the methods and collect them into a dictionary. - answer = collections.OrderedDict() + answer: Dict[str, wrappers.Method] = collections.OrderedDict() for meth_pb, i in zip(methods, range(0, sys.maxsize)): lro = None diff --git a/packages/gapic-generator/gapic/schema/imp.py b/packages/gapic-generator/gapic/schema/imp.py index 562cdfebed99..c7a2c6182301 100644 --- a/packages/gapic-generator/gapic/schema/imp.py +++ b/packages/gapic-generator/gapic/schema/imp.py @@ -18,7 +18,7 @@ @dataclasses.dataclass(frozen=True, order=True) class Import: - package: Tuple[str] + package: Tuple[str, ...] module: str alias: str = '' diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index e49037d08456..e8ad6991a1d2 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -27,7 +27,7 @@ """ import dataclasses -from typing import Tuple, Set +from typing import FrozenSet, Tuple from google.protobuf import descriptor_pb2 @@ -40,13 +40,13 @@ class Address: name: str = '' module: str = '' - module_path: Tuple[int] = dataclasses.field(default_factory=tuple) - package: Tuple[str] = dataclasses.field(default_factory=tuple) - parent: Tuple[str] = dataclasses.field(default_factory=tuple) + module_path: Tuple[int, ...] = dataclasses.field(default_factory=tuple) + package: Tuple[str, ...] = dataclasses.field(default_factory=tuple) + parent: Tuple[str, ...] = dataclasses.field(default_factory=tuple) api_naming: naming.Naming = dataclasses.field( default_factory=naming.Naming, ) - collisions: Set[str] = dataclasses.field(default_factory=frozenset) + collisions: FrozenSet[str] = dataclasses.field(default_factory=frozenset) def __eq__(self, other) -> bool: return all([getattr(self, i) == getattr(other, i) for i @@ -145,13 +145,13 @@ def sphinx(self) -> str: return self.name @property - def subpackage(self) -> Tuple[str]: + def subpackage(self) -> Tuple[str, ...]: """Return the subpackage below the versioned module name, if any.""" return tuple( self.package[len(self.api_naming.proto_package.split('.')):] ) - def child(self, child_name: str, path: Tuple[int]) -> 'Address': + def child(self, child_name: str, path: Tuple[int, ...]) -> 'Address': """Return a new child of the current Address. Args: @@ -236,7 +236,7 @@ def resolve(self, selector: str) -> str: return f'{".".join(self.package)}.{selector}' return selector - def with_context(self, *, collisions: Set[str]) -> 'Address': + def with_context(self, *, collisions: FrozenSet[str]) -> 'Address': """Return a derivative of this address with the provided context. This method is used to address naming collisions. The returned @@ -271,7 +271,7 @@ def doc(self): return '\n\n'.join(self.documentation.leading_detached_comments) return '' - def with_context(self, *, collisions: Set[str]) -> 'Metadata': + def with_context(self, *, collisions: FrozenSet[str]) -> 'Metadata': """Return a derivative of this metadata with the provided context. This method is used to address naming collisions. The returned diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 18b55bdaa813..7ccfb7783288 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -15,7 +15,7 @@ import dataclasses import os import re -from typing import Iterable, Sequence, Tuple +from typing import cast, List, Match, Sequence, Tuple from google.protobuf import descriptor_pb2 @@ -34,7 +34,7 @@ class Naming: (as ``api.naming``). """ name: str = '' - namespace: Tuple[str] = dataclasses.field(default_factory=tuple) + namespace: Tuple[str, ...] = dataclasses.field(default_factory=tuple) version: str = '' product_name: str = '' proto_package: str = '' @@ -45,7 +45,7 @@ def __post_init__(self): @classmethod def build(cls, - *file_descriptors: Iterable[descriptor_pb2.FileDescriptorProto], + *file_descriptors: descriptor_pb2.FileDescriptorProto, opts: options.Options = options.Options(), ) -> 'Naming': """Return a full Naming instance based on these file descriptors. @@ -100,7 +100,8 @@ def build(cls, pattern += version # Okay, do the match - match = re.search(pattern=pattern, string=root_package).groupdict() + match = cast(Match, + re.search(pattern=pattern, string=root_package)).groupdict() match['namespace'] = match['namespace'] or '' package_info = cls( name=match['name'].capitalize(), @@ -155,14 +156,14 @@ def module_name(self) -> str: return utils.to_valid_module_name(self.name) @property - def module_namespace(self) -> Sequence[str]: + def module_namespace(self) -> Tuple[str, ...]: """Return the appropriate Python module namespace as a tuple.""" return tuple(utils.to_valid_module_name(i) for i in self.namespace) @property - def namespace_packages(self) -> Tuple[str]: + def namespace_packages(self) -> Tuple[str, ...]: """Return the appropriate Python namespace packages.""" - answer = [] + answer: List[str] = [] for cursor in [i.lower() for i in self.namespace]: answer.append(f'{answer[-1]}.{cursor}' if answer else cursor) return tuple(answer) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 7fe5e1abfc39..4c45d21b2904 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -31,9 +31,10 @@ import dataclasses import re from itertools import chain -from typing import List, Mapping, Optional, Sequence, Set, Union +from typing import (cast, Dict, FrozenSet, List, Mapping, Optional, + Sequence, Set, Union) -from google.api import annotations_pb2 +from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 from google.protobuf import descriptor_pb2 @@ -46,8 +47,8 @@ class Field: """Description of a field.""" field_pb: descriptor_pb2.FieldDescriptorProto - message: 'MessageType' = None - enum: 'EnumType' = None + message: Optional['MessageType'] = None + enum: Optional['EnumType'] = None meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) @@ -113,9 +114,9 @@ def mock_value(self) -> str: @property def proto_type(self) -> str: """Return the proto type constant to be used in templates.""" - return descriptor_pb2.FieldDescriptorProto.Type.Name( + return cast(str, descriptor_pb2.FieldDescriptorProto.Type.Name( self.field_pb.type, - )[len('TYPE_'):] + ))[len('TYPE_'):] @property def repeated(self) -> bool: @@ -125,7 +126,7 @@ def repeated(self) -> bool: bool: Whether this field is repeated. """ return self.label == \ - descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') + descriptor_pb2.FieldDescriptorProto.Label.Value(b'LABEL_REPEATED') @property def required(self) -> bool: @@ -171,7 +172,7 @@ def type(self) -> Union['MessageType', 'EnumType', 'PrimitiveType']: raise TypeError('Unrecognized protobuf type. This code should ' 'not be reachable; please file a bug.') - def with_context(self, *, collisions: Set[str]) -> 'Field': + def with_context(self, *, collisions: FrozenSet[str]) -> 'Field': """Return a derivative of this field with the provided context. This method is used to address naming collisions. The returned @@ -217,8 +218,8 @@ def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" return self.meta.address - def get_field(self, *field_path: Sequence[str], - collisions: Set[str] = frozenset()) -> Field: + def get_field(self, *field_path: str, + collisions: FrozenSet[str] = frozenset()) -> Field: """Return a field arbitrarily deep in this message's structure. This method recursively traverses the message tree to return the @@ -264,12 +265,19 @@ def get_field(self, *field_path: Sequence[str], 'in the fields list in a position other than the end.', ) + # Sanity check: If this cursor has no message, there is a problem. + if not cursor.message: + raise KeyError( + f'Field {".".join(field_path)} could not be resolved from ' + f'{cursor.name}.', + ) + # Recursion case: Pass the remainder of the path to the sub-field's # message. return cursor.message.get_field(*field_path[1:], collisions=collisions) def with_context(self, *, - collisions: Set[str], + collisions: FrozenSet[str], skip_fields: bool = False, ) -> 'MessageType': """Return a derivative of this message with the provided context. @@ -328,7 +336,7 @@ def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" return self.meta.address - def with_context(self, *, collisions: Set[str]) -> 'EnumType': + def with_context(self, *, collisions: FrozenSet[str]) -> 'EnumType': """Return a derivative of this enum with the provided context. This method is used to address naming collisions. The returned @@ -369,10 +377,10 @@ def name(self) -> str: @dataclasses.dataclass(frozen=True) class PrimitiveType(PythonType): """A representation of a Python primitive type.""" - python_type: type + python_type: Optional[type] @classmethod - def build(cls, primitive_type: type): + def build(cls, primitive_type: Optional[type]): """Return a PrimitiveType object for the given Python primitive type. Args: @@ -410,7 +418,7 @@ class Method: method_pb: descriptor_pb2.MethodDescriptorProto input: MessageType output: MessageType - lro: OperationInfo = dataclasses.field(default=None) + lro: Optional[OperationInfo] = dataclasses.field(default=None) meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) @@ -487,7 +495,7 @@ def field_headers(self) -> Sequence[str]: @utils.cached_property def flattened_fields(self) -> Mapping[str, Field]: """Return the signature defined for this method.""" - answer = collections.OrderedDict() + answer: Dict[str, Field] = collections.OrderedDict() signatures = self.options.Extensions[client_pb2.method_signature] # Iterate over each signature and add the appropriate fields. @@ -583,7 +591,7 @@ def void(self) -> bool: """Return True if this method has no return value, False otherwise.""" return self.output.ident.proto == 'google.protobuf.Empty' - def with_context(self, *, collisions: Set[str]) -> 'Method': + def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': """Return a derivative of this method with the provided context. This method is used to address naming collisions. The returned @@ -623,7 +631,7 @@ def host(self) -> str: """ if self.options.Extensions[client_pb2.default_host]: return self.options.Extensions[client_pb2.default_host] - return None + return '' @property def oauth_scopes(self) -> Sequence[str]: @@ -647,7 +655,7 @@ def module_name(self) -> str: return utils.to_snake_case(self.name) @utils.cached_property - def names(self) -> Set[str]: + def names(self) -> FrozenSet[str]: """Return a set of names used in this service. This is used for detecting naming collisions in the module names @@ -660,7 +668,7 @@ def names(self) -> Set[str]: # Identify any import module names where the same module name is used # from distinct packages. - modules = {} + modules: Dict[str, Set[str]] = {} for t in chain(*[m.ref_types for m in self.methods.values()]): modules.setdefault(t.ident.module, set()) modules[t.ident.module].add(t.ident.package) @@ -671,7 +679,7 @@ def names(self) -> Set[str]: # Done; return the answer. return frozenset(answer) - def with_context(self, *, collisions: Set[str]) -> 'Service': + def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': """Return a derivative of this service with the provided context. This method is used to address naming collisions. The returned diff --git a/packages/gapic-generator/gapic/utils/rst.py b/packages/gapic-generator/gapic/utils/rst.py index 6ae886519e0d..9b845c960ae5 100644 --- a/packages/gapic-generator/gapic/utils/rst.py +++ b/packages/gapic-generator/gapic/utils/rst.py @@ -14,7 +14,7 @@ import re -import pypandoc +import pypandoc # type: ignore from gapic.utils.lines import wrap diff --git a/packages/gapic-generator/mypy.ini b/packages/gapic-generator/mypy.ini new file mode 100644 index 000000000000..b3d7f6e18c49 --- /dev/null +++ b/packages/gapic-generator/mypy.ini @@ -0,0 +1,2 @@ +[mypy] +python_version = 3.6 diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index bb1cbf59269e..82cf4c781eba 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -16,7 +16,7 @@ import os import tempfile -import nox +import nox # type: ignore showcase_version = '0.2.0' @@ -141,3 +141,12 @@ def docs(session): session.run('rm', '-rf', 'docs/_build/') session.run('sphinx-build', '-W', '-b', 'html', '-d', 'docs/_build/doctrees', 'docs/', 'docs/_build/html/') + + +@nox.session(python='3.7') +def mypy(session): + """Perform typecheck analysis.""" + + session.install('mypy') + session.install('.') + session.run('mypy', 'gapic') diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index d9acf3267f47..86e273e590b5 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -15,7 +15,7 @@ import io import os -from setuptools import find_packages, setup +from setuptools import find_packages, setup # type: ignore PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 7ef9e4610f0d..6f8fd0336228 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -94,6 +94,20 @@ def test_get_field_recursive(): assert outer.get_field('inner', 'one') == inner_fields[1] +def test_get_field_nested_not_found_error(): + # Create the inner message. + inner_field = make_field('zero') + inner = make_message('Inner', fields=(inner_field,), package='foo.v1') + + # Create the outer message, which contains an Inner as a field. + outer_field = make_field('inner', message=inner) + outer = make_message('Outer', fields=(outer_field,)) + + # Assert that a recusive retrieval fails. + with pytest.raises(KeyError): + assert outer.get_field('inner', 'zero', 'beyond') + + def test_get_field_nonterminal_repeated_error(): # Create the inner message. inner_fields = (make_field('zero'), make_field('one')) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 1b8d4fb07394..b5eb04b139fa 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -36,7 +36,7 @@ def test_service_host(): def test_service_no_host(): service = make_service() - assert service.host is None + assert not service.host def test_service_scopes(): From dc243880c9c2e6c0bdebaf3bb89c66c25bf7f518 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 4 Jun 2019 02:21:33 -0400 Subject: [PATCH 0123/1339] [feat] Add mypy checks to the generated output. (#131) Many things can not be typechecked yet due to Python 2 compatibility. We will come back to those as we move everything to Python 3.5+. We also do not yet have exported type-checking in proto-plus; I will make a subsequent PR to remove the `#type: ignore` from that import once proto-plus type checking is fully in place and exported. --- packages/gapic-generator/.circleci/config.yml | 31 ++++++++ .../gapic/generator/generator.py | 3 +- packages/gapic-generator/gapic/schema/imp.py | 2 + .../templates/$namespace/$name/__init__.py.j2 | 9 ++- .../templates/$namespace/$name/py.typed.j2 | 2 + .../$sub/services/$service/client.py.j2 | 73 +++++++++++-------- .../$service/transports/__init__.py.j2 | 5 +- .../services/$service/transports/base.py.j2 | 14 ++-- .../services/$service/transports/grpc.py.j2 | 12 +-- .../$sub/services/__init__.py.j2 | 1 + .../$name_$version/$sub/types/$proto.py.j2 | 2 +- .../$namespace/$name_$version/py.typed.j2 | 2 + .../gapic/templates/mypy.ini.j2 | 3 + .../gapic/templates/noxfile.py.j2 | 17 ++++- .../gapic/templates/setup.py.j2 | 5 +- packages/gapic-generator/noxfile.py | 39 ++++++++++ .../tests/unit/schema/test_imp.py | 5 ++ 17 files changed, 169 insertions(+), 56 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name/py.typed.j2 create mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/$namespace/$name_$version/py.typed.j2 create mode 100644 packages/gapic-generator/gapic/templates/mypy.ini.j2 diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index a1a1fd801adb..d94a883d5709 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -26,12 +26,19 @@ workflows: filters: tags: only: /^\d+\.\d+\.\d+$/ + - showcase-mypy: + requires: + - mypy + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - showcase: requires: - docs - mypy - showcase-unit-3.6 - showcase-unit-3.7 + - showcase-mypy filters: tags: only: /^\d+\.\d+\.\d+$/ @@ -208,6 +215,30 @@ jobs: - run: name: Run unit tests. command: nox -s showcase_unit-3.7 + showcase-mypy: + docker: + - image: python:3.7-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Typecheck the generated output. + command: nox -s showcase_mypy unit-3.6: docker: - image: python:3.6-slim diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 634716715097..f51d4cb09edc 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -176,7 +176,8 @@ def _get_file(self, template_name: str, *, ) # Sanity check: Do not render empty files. - if utils.empty(cgr_file.content) and not fn.endswith('__init__.py'): + if (utils.empty(cgr_file.content) and + not fn.endswith(('py.typed', '__init__.py'))): return {} # Return the filename and content in a length-1 dictionary diff --git a/packages/gapic-generator/gapic/schema/imp.py b/packages/gapic-generator/gapic/schema/imp.py index c7a2c6182301..eb5d8ee83007 100644 --- a/packages/gapic-generator/gapic/schema/imp.py +++ b/packages/gapic-generator/gapic/schema/imp.py @@ -31,4 +31,6 @@ def __str__(self) -> str: answer = f"from {'.'.join(self.package)} {answer}" if self.alias: answer += f' as {self.alias}' + if self.module.endswith('_pb2'): + answer += ' # type: ignore' return answer diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 index 53c1d431c79c..279cb2b464f9 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 @@ -3,13 +3,15 @@ {% block content %} {# Import subpackages. -#} {% for subpackage in api.subpackages.keys() -%} -from ..{{ api.naming.versioned_module_name }} import {{ subpackage }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }} import {{ subpackage }} {% endfor -%} {# Import services for this package. -#} {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view -%} -from ..{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }} import {{ service.name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.name }} {% endfor -%} {# Import messages from each proto. @@ -20,7 +22,8 @@ from ..{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_cas {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} {% for message in proto.messages.values()|sort(attribute='name') -%} -from ..{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} {% endfor %}{% endfor %} {# Define __all__. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/py.typed.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/py.typed.j2 new file mode 100644 index 000000000000..58fdb544c225 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/$namespace/$name/py.typed.j2 @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The {{ api.naming.warehouse_package_name }} package uses inline types. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 3390ddf5a42e..0db9080a5f82 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -1,13 +1,14 @@ {% extends '_base.py.j2' %} {% block content %} +from collections import OrderedDict +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources -from typing import Mapping, Optional, Sequence, Tuple, Type, Union -from google.api_core import exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} @@ -16,11 +17,42 @@ from google.auth import credentials {% endfor -%} {% endfor -%} {% endfilter %} -from .transports import _transport_registry -from .transports import {{ service.name }}Transport +from .transports.base import {{ service.name }}Transport +from .transports.grpc import {{ service.name }}GrpcTransport -class {{ service.name }}: +class {{ service.name }}Meta(type): + """Metaclass for the {{ service.name }} client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] + _transport_registry['grpc'] = {{ service.name }}GrpcTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[{{ service.name }}Transport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class {{ service.name }}(metaclass={{ service.name }}Meta): """{{ service.meta.doc|rst(width=72, indent=4) }}""" def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, @@ -50,7 +82,7 @@ class {{ service.name }}: 'provide its credentials directly.') self._transport = transport else: - Transport = self.get_transport_class(transport) + Transport = type(self).get_transport_class(transport) self._transport = Transport(credentials=credentials, host=host) {% for method in service.methods.values() -%} @@ -168,28 +200,7 @@ class {{ service.name }}: return response {%- endif %} {{ '\n' }} - {% endfor -%} - - @classmethod - def get_transport_class(cls, - label: str = None, - ) -> Type[{{ service.name }}Transport]: - """Return an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return _transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(_transport_registry.values())) + {% endfor %} try: diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 index 7ffc5cb98c90..470cde5d1969 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 @@ -1,14 +1,15 @@ {% extends '_base.py.j2' %} {% block content %} -import collections +from collections import OrderedDict +from typing import Dict, Type from .base import {{ service.name }}Transport from .grpc import {{ service.name }}GrpcTransport # Compile a registry of transports. -_transport_registry = collections.OrderedDict() +_transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] _transport_registry['grpc'] = {{ service.name }}GrpcTransport diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 index a3380cdc481e..694e0a16645f 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 @@ -6,9 +6,9 @@ import typing from google import auth {%- if service.has_lro %} -from google.api_core import operations_v1 +from google.api_core import operations_v1 # type: ignore {%- endif %} -from google.auth import credentials +from google.auth import credentials # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} @@ -20,7 +20,7 @@ from google.auth import credentials class {{ service.name }}Transport(metaclass=abc.ABCMeta): """Abstract transport class for {{ service.name }}.""" - AUTH_SCOPES: typing.Tuple[str] = ( + AUTH_SCOPES = ( {%- for scope in service.oauth_scopes %} '{{ scope }}', {%- endfor %} @@ -63,10 +63,10 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): {%- endif %} {%- for method in service.methods.values() %} - def {{ method.name|snake_case }}( - self, - request: {{ method.input.ident }}, - ) -> {{ method.output.ident }}: + @property + def {{ method.name|snake_case }}(self) -> typing.Callable[ + [{{ method.input.ident }}], + {{ method.output.ident }}]: raise NotImplementedError {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 index 3d90c774327d..429c3de58db0 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 @@ -1,15 +1,15 @@ {% extends '_base.py.j2' %} {% block content %} -from typing import Callable, Sequence, Tuple +from typing import Callable, Dict, Sequence, Tuple -from google.api_core import grpc_helpers +from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} -from google.api_core import operations_v1 +from google.api_core import operations_v1 # type: ignore {%- endif %} -from google.auth import credentials +from google.auth import credentials # type: ignore -import grpc +import grpc # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} @@ -57,7 +57,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Run the base constructor. super().__init__(host=host, credentials=credentials) - self._stubs = {} + self._stubs = {} # type: Dict[str, Callable] # If a channel was explicitly provided, set it. if channel: diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/__init__.py.j2 new file mode 100644 index 000000000000..9cee1e99950c --- /dev/null +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/__init__.py.j2 @@ -0,0 +1 @@ +{% extends '_base.py.j2' %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 index b9fc360b9a1d..97d61cba2124 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 @@ -3,7 +3,7 @@ {% block content -%} {% with p = proto.disambiguate('proto') %} {% if proto.messages|length or proto.all_enums|length -%} -import proto{% if p != 'proto' %} as {{ p }}{% endif -%} +import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endif %} {% for import_ in proto.python_modules -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/py.typed.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/py.typed.j2 new file mode 100644 index 000000000000..58fdb544c225 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/py.typed.j2 @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The {{ api.naming.warehouse_package_name }} package uses inline types. diff --git a/packages/gapic-generator/gapic/templates/mypy.ini.j2 b/packages/gapic-generator/gapic/templates/mypy.ini.j2 new file mode 100644 index 000000000000..f23e6b533aad --- /dev/null +++ b/packages/gapic-generator/gapic/templates/mypy.ini.j2 @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.5 +namespace_packages = True diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index c774cc4e0a82..71f99a414481 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -3,7 +3,7 @@ {% block content %} import os -import nox +import nox # type: ignore @nox.session(python=['3.6', '3.7']) @@ -22,4 +22,19 @@ def unit(session): '--cov-report=html', os.path.join('tests', 'unit', '{{ api.naming.versioned_module_name }}'), ) + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy') + session.install('.') + session.run( + 'mypy', + {%- if api.naming.module_namespace %} + '{{ api.naming.module_namespace[0] }}', + {%- else %} + '{{ api.naming.versioned_module_name }}', + {%- endif %} + ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index dc1448bc3eef..a30c82394c83 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -1,10 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} -import io -import os - -import setuptools +import setuptools # type: ignore setuptools.setup( diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 82cf4c781eba..047a989e09ba 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -130,6 +130,45 @@ def showcase_unit(session): ) +@nox.session(python='3.7') +def showcase_mypy(session): + """Perform typecheck analysis on the generated Showcase library.""" + + # Install pytest and gapic-generator-python + session.install('mypy') + session.install('.') + + # Install a client library for Showcase. + with tempfile.TemporaryDirectory() as tmp_dir: + # Download the Showcase descriptor. + session.run( + 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' + f'download/v{showcase_version}/' + f'gapic-showcase-{showcase_version}.desc', + '-L', '--output', os.path.join(tmp_dir, 'showcase.desc'), + external=True, + silent=True, + ) + + # Write out a client library for Showcase. + session.run('protoc', + f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + 'google/showcase/v1beta1/messaging.proto', + 'google/showcase/v1beta1/testing.proto', + external=True, + ) + + # Install the library. + session.chdir(tmp_dir) + session.install('-e', tmp_dir) + + # Run the tests. + session.run('mypy', 'google') + + @nox.session(python='3.6') def docs(session): """Build the docs.""" diff --git a/packages/gapic-generator/tests/unit/schema/test_imp.py b/packages/gapic-generator/tests/unit/schema/test_imp.py index a196c64120ab..c27e4b876901 100644 --- a/packages/gapic-generator/tests/unit/schema/test_imp.py +++ b/packages/gapic-generator/tests/unit/schema/test_imp.py @@ -30,6 +30,11 @@ def test_str_alias(): assert str(i) == 'from foo.bar import baz as bacon' +def test_str_untyped_pb2(): + i = imp.Import(package=('foo', 'bar'), module='baz_pb2', alias='bacon') + assert str(i) == 'from foo.bar import baz_pb2 as bacon # type: ignore' + + def test_str_eq(): i1 = imp.Import(package=('foo', 'bar'), module='baz') i2 = imp.Import(package=('foo', 'bar'), module='baz') From 52e8c7638ab7bfbbf03ed36fafba9648b576f85c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 12 Jun 2019 20:18:43 -0700 Subject: [PATCH 0124/1339] [docs] Update the docs to reflect greater project maturity. (#132) The docs still reference experimental configuration and temporary branches, but we have progressed past that. --- .../docs/api-configuration.rst | 177 ------------------ .../docs/getting-started/_example.rst | 11 +- .../docs/getting-started/_usage_intro.rst | 6 +- .../docs/getting-started/_verifying.rst | 2 +- packages/gapic-generator/docs/index.rst | 13 +- 5 files changed, 13 insertions(+), 196 deletions(-) delete mode 100644 packages/gapic-generator/docs/api-configuration.rst diff --git a/packages/gapic-generator/docs/api-configuration.rst b/packages/gapic-generator/docs/api-configuration.rst deleted file mode 100644 index 33d95462b95f..000000000000 --- a/packages/gapic-generator/docs/api-configuration.rst +++ /dev/null @@ -1,177 +0,0 @@ -.. _api-configuration: - -API Configuration ------------------ - -This code generator relies on some configuration not specified in many -published protocol buffers. - -.. warning:: - - In fact, this is intended to serve as a reference implementation for - proposed configuration, so as of this writing it is not published anywhere, - and is subject to change. - -This plugin *will* successfully publish a library on a valid protobuf API -even without any additional information set, but may require some -post-processing work by a human in this case before the resulting client -library will work. - -Look for values enclosed by ``<<<`` and ``>>>`` to quickly spot these. -As of this writing, these are the ``SERVICE_ADDRESS`` and ``OAUTH_SCOPES`` -constants defined in ``base.py`` files. - -Reading further assumes you are at least nominally familiar with protocol -buffers and their syntax. You may not be familiar with `options`_ yet; it is -recommended to read up on them before continuing. (As a note, no need to -learn about creating custom options; being able to set options that are -already defined is sufficient.) - -.. _options: https://developers.google.com/protocol-buffers/docs/proto3#options - - -Annotations and Imports -~~~~~~~~~~~~~~~~~~~~~~~ - -As mentioned above, this tool uses a interface specification that is -currently experimental. - -When specifying an annotation, your proto will need to import the file -where the annotation is defined. If you try to use an annotation without -importing the dependency proto, then ``protoc`` will give you an error. - -These protos live on the ``input-contract`` branch in the -`api-common-protos`_ repository. - -Your best bet is to likely clone this repository: - -.. code-block:: shell - - $ git clone https://github.com/googleapis/api-common-protos.git - $ cd api-common-protos - $ git checkout --track -b input-contract origin/input-contract - -Once this is done, you will need to specify the root of this repository -on disk as a ``--proto_path`` whenever invoking ``protoc``. - -.. _api-common-protos: https://github.com/googleapis/api-common-protos/tree/input-contract - - - -API Client Information -~~~~~~~~~~~~~~~~~~~~~~ - -The most important piece of information this plugin requires is information -about the client library itself: what should it be called, what is its proper -namespace, and so on. - -This is rolled up into a structure called ``Metadata``, and the annotation -is defined in `google/api/metadata.proto`_. - -The option may be defined as a full structure at the top level of the proto -file. It is recommended that this be declared other under ``option`` -directives, and above services or messages. - -This annotation is optional, and you may not need it. The generator will -infer a proper name, namespace and version from the ``package`` statement: - -.. code-block:: protobuf - - // This will come out to be: - // package namespace: ['Acme', 'Manufacturing'] - // package name: 'Anvils' - // version: 'v1' - package acme.manufacturing.anvils.v1; - -If the inferred package name is wrong for some reason, then the annotation -is important. - -.. code-block:: protobuf - - package acme.anvils.v1; - - // The namespace provided here will take precedence over the - // inferred one. - option (google.api.metadata) = { - "package_namespace": ["Acme", "Manufacturing"] - }; - -.. note:: - - The ``google.api.metadata`` annotation can be used to specify a - namespace or name, but the version *must* be specified in the proto package. - - -Service Information -~~~~~~~~~~~~~~~~~~~ - -In order to properly connect to an API, the client library needs to know -where the API service is running, as well as what (if anything) else is -required in order to properly connect. - -This plugin understands two options for this, which are also defined in -`google/api/metadata.proto`_. Rather than being options on -top level files, however, these are both options on `services`_. If an API -defines more than one service, these options do *not* need to match between -them. - -The first option is the **host** where the service can be reached: - -.. code-block:: protobuf - - service AnvilService { - option (google.api.default_host) = "anvils.acme.com" - } - -The second option is any oauth scopes which are needed. Google's auth -libraries (such as `google-auth`_ in Python, which code generated by -this plugin uses) expect that credentials declare what scopes they believe -they need, and the auth libraries do the right thing in the situation where -authorization is needed, access has been revoked, and so on. - -.. code-block:: protobuf - - service AnvilService { - option (google.api.oauth) = { - scopes: ["https://anvils.acme.com/auth/browse-anvils", - "https://anvils.acme.com/auth/drop-anvils"] - }; - } - -.. _services: https://developers.google.com/protocol-buffers/docs/proto3#services -.. _google/api/metadata.proto: https://github.com/googleapis/api-common-protos/blob/input-contract/google/api/metadata.proto -.. _google-auth: https://github.com/GoogleCloudPlatform/google-auth-library-python - - -Long-Running Operations -~~~~~~~~~~~~~~~~~~~~~~~ - -Occasionally, API requests may take a long time. In this case, APIs may -run a task in the background and provide the client with a token to -retrieve the result later. - -The ``google.longrunning.Operation`` message is intended for this purpose. -It is defined in `google/longrunning/operations.proto`_ and can be used -as the return type of an RPC. - -However, when doing this, the ultimate return type is lost. Therefore, -it is important to annotate the return type (and metadata type, if applicable) -so that client libraries are able to deserialize the message. - -.. code-block:: protobuf - - import "google/api/annotations.proto"; - - package acme.anvils.v1; - - service AnvilService { - rpc DeliverAnvil(DeliverAnvilRequest) - returns (google.longrunning.Operation) { - option (google.api.operation) = { - response_type: "acme.anvils.v1.DeliverAnvilResponse" - metadata_type: "acme.anvils.v1.DeliverAnvilMetadata" - }; - } - } - -.. _google/longrunning/operations.proto: https://github.com/googleapis/api-common-protos/blob/input-contract/google/longrunning/operations.proto diff --git a/packages/gapic-generator/docs/getting-started/_example.rst b/packages/gapic-generator/docs/getting-started/_example.rst index e769caa341db..9b1863bcc11a 100644 --- a/packages/gapic-generator/docs/getting-started/_example.rst +++ b/packages/gapic-generator/docs/getting-started/_example.rst @@ -2,19 +2,16 @@ If you want to experiment with an already-existing API, one example is available. (Reminder that this is still considered experimental, so apologies for this part being a bit strange.) -You need to clone the `googleapis`_ repository from GitHub, and change to -a special branch: +You need to clone the `googleapis`_ repository from GitHub: .. code-block:: shell $ git clone https://github.com/googleapis/googleapis.git - $ cd googleapis - $ git checkout --track -b input-contract origin/input-contract - $ cd .. -The API available as an example (thus far) is the `Google Cloud Vision`_ API, +It is possible to generate libraries for most (possibly all) APIs described +here. The API we use as an example is the `Google Cloud Vision`_ API, available in the ``google/cloud/vision/v1/`` subdirectory. This will be used for the remainder of the examples on this page. -.. _googleapis: https://github.com/googleapis/googleapis/tree/input-contract +.. _googleapis: https://github.com/googleapis/googleapis .. _Google Cloud Vision: https://cloud.google.com/vision/ diff --git a/packages/gapic-generator/docs/getting-started/_usage_intro.rst b/packages/gapic-generator/docs/getting-started/_usage_intro.rst index cad09dc6d3d3..70fba8f7ff2f 100644 --- a/packages/gapic-generator/docs/getting-started/_usage_intro.rst +++ b/packages/gapic-generator/docs/getting-started/_usage_intro.rst @@ -1,6 +1,6 @@ To use this plugin, you will need an API which is specified using protocol buffers. Additionally, this plugin makes some assumptions at the -margins according to `Google API design conventions`_, so following those -conventions is recommended. +margins according to Google API design conventions as described in +`AIPs`_, so following those conventions is recommended. -.. _Google API design conventions: https://cloud.google.com/apis/design/ +.. _AIPs: https://aip.dev diff --git a/packages/gapic-generator/docs/getting-started/_verifying.rst b/packages/gapic-generator/docs/getting-started/_verifying.rst index 37dcc482bb74..bc33ed8303c2 100644 --- a/packages/gapic-generator/docs/getting-started/_verifying.rst +++ b/packages/gapic-generator/docs/getting-started/_verifying.rst @@ -33,7 +33,7 @@ Here is a test script: # # If you wish, you can send `transport='grpc'` or `transport='http'` # to change which underlying transport layer is being used. - ia = vision.ImageAnnotator() + ia = vision.ImageAnnotator(host='vision.googleapis.com') # Send the request to the server and get the response. response = ia.batch_annotate_images({ diff --git a/packages/gapic-generator/docs/index.rst b/packages/gapic-generator/docs/index.rst index 9f05fc87f7d2..50bfcd5807e6 100644 --- a/packages/gapic-generator/docs/index.rst +++ b/packages/gapic-generator/docs/index.rst @@ -3,23 +3,20 @@ API Client Generator for Python A generator for protocol buffer described APIs for and in Python 3. -This program accepts an API specified in `protocol buffers`_ and generates +This tool is a client library generator that implements the +`client library generators`_ specification. + +It accepts an API specified in `protocol buffers`_ and generates a client library, which can be used to interact with that API. It is implemented as a plugin to ``protoc``, the protocol buffer compiler. -.. warning:: - - This tool is a proof of concept and is being iterated on rapidly. - Feedback is welcome, but please do not try to use this in some kind of - system where stability is an expectation. - .. _protocol buffers: https://developers.google.com/protocol-buffers/ +.. _client library generators: https://aip.dev/client-libraries .. toctree:: :maxdepth: 2 getting-started/index - api-configuration process templates status From f83ffe32317cb4de58937e1900b7ba952a59fda3 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 13 Jun 2019 09:06:03 -0700 Subject: [PATCH 0125/1339] [chore] Version bumber bump to 0.10.0. (#133) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 86e273e590b5..f7f78241abbe 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.9.0', + version='0.10.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From df5f73f0868ba2ff3f177c433cbc034887333213 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 1 Jul 2019 14:10:13 -0700 Subject: [PATCH 0126/1339] Initial commit for samplegen and associated unit tests (#134) * Initial commit for samplegen and associated unit tests Add initial code (and unit tests) for validating and mutating parsed sampleconfig. Validates 'request' segments; transforms mutates 'request' to ease generation of client method parameters. Validates 'response' segments. Validation includes checking for improper use of reserved words as variable names, missing or unexpected keywords in blocks, and general malformation of the sampleconfig. Add initial jinja2 template macros and associated unit tests for sample generation. Generates 'request' setup block using values, input parameters, and file content. Generates 'response' handling blocks, using comment blocks, print statements, assignment statements, collection loops, and mapped loops. Loops are potentially recursive. Generates 'main' block and invocation. Many caveats apply: these are listed in the code with TODO. --- .../gapic/generator/generator.py | 36 +- .../gapic/samplegen/__init__.py | 21 + .../gapic/samplegen/samplegen.py | 412 ++++++++++++++++ .../gapic-generator/gapic/samplegen/utils.py | 43 ++ .../templates/examples/feature_fragments.j2 | 134 ++++++ .../tests/unit/generator/test_generator.py | 43 +- .../tests/unit/samplegen/test_samplegen.py | 454 ++++++++++++++++++ .../tests/unit/samplegen/test_template.py | 336 +++++++++++++ 8 files changed, 1442 insertions(+), 37 deletions(-) create mode 100644 packages/gapic-generator/gapic/samplegen/__init__.py create mode 100644 packages/gapic-generator/gapic/samplegen/samplegen.py create mode 100644 packages/gapic-generator/gapic/samplegen/utils.py create mode 100644 packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 create mode 100644 packages/gapic-generator/tests/unit/samplegen/test_samplegen.py create mode 100644 packages/gapic-generator/tests/unit/samplegen/test_template.py diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index f51d4cb09edc..e8ce3a300ae8 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -40,11 +40,13 @@ class Generator: rendered. If this is not provided, the templates included with this application are used. """ + def __init__(self, opts: options.Options) -> None: # Create the jinja environment with which to render templates. self._env = jinja2.Environment( loader=jinja2.FileSystemLoader(searchpath=opts.templates), undefined=jinja2.StrictUndefined, + extensions=["jinja2.ext.do"], ) # Add filters which templates require. @@ -78,8 +80,8 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: # Append to the output files dictionary. output_files.update(self._render_template(template_name, - api_schema=api_schema, - )) + api_schema=api_schema, + )) # Return the CodeGeneratorResponse output. return CodeGeneratorResponse(file=[i for i in output_files.values()]) @@ -88,7 +90,7 @@ def _render_template( self, template_name: str, *, api_schema: api.API, - ) -> Dict[str, CodeGeneratorResponse.File]: + ) -> Dict[str, CodeGeneratorResponse.File]: """Render the requested templates. Args: @@ -119,8 +121,8 @@ def _render_template( if '$sub' in template_name: for subpackage in api_schema.subpackages.values(): answer.update(self._render_template(template_name, - api_schema=subpackage, - )) + api_schema=subpackage, + )) skip_subpackages = True # If this template should be rendered once per proto, iterate over @@ -131,9 +133,9 @@ def _render_template( api_schema.subpackage_view): continue answer.update(self._get_file(template_name, - api_schema=api_schema, - proto=proto - )) + api_schema=api_schema, + proto=proto + )) return answer # If this template should be rendered once per service, iterate @@ -144,9 +146,9 @@ def _render_template( api_schema.subpackage_view): continue answer.update(self._get_file(template_name, - api_schema=api_schema, - service=service, - )) + api_schema=api_schema, + service=service, + )) return answer # This file is not iterating over anything else; return back @@ -155,14 +157,14 @@ def _render_template( return answer def _get_file(self, template_name: str, *, - api_schema=api.API, - **context: Mapping): + api_schema=api.API, + **context: Mapping): """Render a template to a protobuf plugin File object.""" # Determine the target filename. fn = self._get_filename(template_name, - api_schema=api_schema, - context=context, - ) + api_schema=api_schema, + context=context, + ) # Render the file contents. cgr_file = CodeGeneratorResponse.File( @@ -189,7 +191,7 @@ def _get_filename( template_name: str, *, api_schema: api.API, context: dict = None, - ) -> str: + ) -> str: """Return the appropriate output filename for this template. This entails running the template name through a series of diff --git a/packages/gapic-generator/gapic/samplegen/__init__.py b/packages/gapic-generator/gapic/samplegen/__init__.py new file mode 100644 index 000000000000..a793a79c5fef --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen/__init__.py @@ -0,0 +1,21 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gapic.samplegen import samplegen +from gapic.samplegen import utils + +__all__ = ( + 'samplegen', + 'utils', +) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py new file mode 100644 index 000000000000..11f1ea0de1a9 --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -0,0 +1,412 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import keyword +import re +import gapic.samplegen.utils + +from collections import (defaultdict, namedtuple) +from typing import (List, Mapping, Set) + +# Outstanding issues: +# * Neither license nor copyright are defined +# * In real sample configs, many variables are +# defined with an _implicit_ $resp variable. + +MIN_SCHEMA_VERSION = (1, 2, 0) + +VALID_CONFIG_TYPE = 'com.google.api.codegen.SampleConfigProto' + +# There are a couple of other names that should be reserved +# for sample variable assignments, e.g. 'client', but there are diminishing returns: +# the sample config author isn't a pathological adversary, they're a normal person +# who may make mistakes occasionally. +RESERVED_WORDS = frozenset(itertools.chain(keyword.kwlist, dir(__builtins__))) + +TEMPLATE_NAME = 'samplegen_template.j2' + +TransformedRequest = namedtuple("TransformedRequest", ["base", "body"]) + + +class SampleError(Exception): + pass + + +class ReservedVariableName(SampleError): + pass + + +class SchemaVersion(SampleError): + pass + + +class RpcMethodNotFound(SampleError): + pass + + +class InvalidConfigType(SampleError): + pass + + +class InvalidStatement(SampleError): + pass + + +class BadLoop(SampleError): + pass + + +class MismatchedFormatSpecifier(SampleError): + pass + + +class UndefinedVariableReference(SampleError): + pass + + +class RedefinedVariable(SampleError): + pass + + +class BadAssignment(SampleError): + pass + + +class InconsistentRequestName(SampleError): + pass + + +class InvalidRequestSetup(SampleError): + pass + + +class Validator: + """Class that validates samples. + + Contains methods that validate different segments of a sample and maintains + state that's relevant to validation across different segments. + """ + + COLL_KWORD = "collection" + VAR_KWORD = "variable" + MAP_KWORD = "map" + KEY_KWORD = "key" + VAL_KWORD = "value" + BODY_KWORD = "body" + + def __init__(self): + # The response ($resp) variable is special and guaranteed to exist. + self.var_defs_: Set[str] = {"$resp"} + + # TODO: this will eventually need the method name and the proto file + # so that it can do the correct value transformation for enums. + def validate_and_transform_request(self, + request: List[Mapping[str, str]]) -> List[TransformedRequest]: + """Validates and transforms the "request" block from a sample config. + + In the initial request, each dict has a "field" key that maps to a dotted + variable name, e.g. clam.shell. + + The only required keys in each dict are "field" and value". + Optional keys are "input_parameter" and "value_is_file". + All values in the initial request are strings + except for the value for "value_is_file", which is a bool. + + The topmost dict of the return value has two keys: "base" and "body", + where "base" maps to a variable name, and "body" maps to a list of variable + assignment definitions. The only difference in the bottommost dicts + are that "field" maps only to the second part of a dotted variable name. + Other key/value combinations in the dict are unmodified for the time being. + + Note: gRPC API methods only take one parameter (ignoring client side streaming). + The reason that GAPIC client library API methods may take multiple parameters + is a workaround to provide idiomatic protobuf support within python. + The different 'bases' are really attributes for the singular request parameter. + + TODO: properly handle subfields, indexing, and so forth. + + TODO: Conduct module lookup and expansion for protobuf enums. + Requires proto/method/message descriptors. + TODO: Permit single level field/oneof requst parameters. + Requires proto/method/message descriptors. + TODO: Add/transform to list repeated element fields. + Requires proto/method/message descriptors. + + E.g. [{"field": "clam.shell", "value": "10 kg", "input_parameter": "shell"}, + {"field": "clam.pearls", "value": "3"}, + {"field": "squid.mantle", "value": "100 kg"}] + -> + [TransformedRequest("clam", + [{"field": "shell", "value": "10 kg", "input_parameter": "shell"}, + {"field": "pearls", "value": "3"}]), + TransformedRequest("squid", [{"field": "mantle", "value": "100 kg"}])] + + The transformation makes it easier to set up request parameters in jinja + because it doesn't have to engage in prefix detection, validation, + or aggregation logic. + + + Args: + request (list[dict{str:str}]): The request body from the sample config + + Returns: + list[dict{str:(str|list[dict{str:str}])}]: The transformed request block. + + Raises: + RedefinedVariable: If an "input_parameter" attempts to redefine a + previously defined variable. + ReservedVariableName: If an "input_parameter" value or a "base" value + is a reserved word. + InvalidRequestSetup: If a dict in the request lacks a "field" key + or the corresponding value is malformed. + """ + base_param_to_attrs: Mapping[str, + List[Mapping[str, str]]] = defaultdict(list) + for field_assignment in request: + field_assignment_copy = dict(field_assignment) + input_param = field_assignment_copy.get("input_parameter") + if input_param: + self._handle_lvalue(input_param) + + field = field_assignment_copy.get("field") + if not field: + raise InvalidRequestSetup( + "No field attribute found in request setup assignment: {}".format( + field_assignment_copy)) + + # TODO: properly handle top level fields + # E.g. + # + # -field: edition + # comment: The edition of the series. + # value: '123' + # input_parameter: edition + m = re.match(r"^([a-zA-Z]\w*)\.([a-zA-Z]\w*)$", field) + if not m: + raise InvalidRequestSetup( + "Malformed request attribute description: {}".format(field)) + + base, attr = m.groups() + if base in RESERVED_WORDS: + raise ReservedVariableName( + "Tried to define '{}', which is a reserved name".format(base)) + + field_assignment_copy["field"] = attr + base_param_to_attrs[base].append(field_assignment_copy) + + return [TransformedRequest(base, body) + for base, body in base_param_to_attrs.items()] + + def validate_response(self, response): + """Validates a "response" block from a sample config. + + A full description of the response block is outside the scope of this code; + refer to the samplegen documentation. + + + Dispatches statements to sub-validators. + + Args: + response: list[dict{str:?}]: The structured data representing + the sample's response. + + Returns: + bool: True if response is valid. + + Raises: + InvalidStatement: If an unexpected key is found in a statement dict + or a statement dict has more than or less than one key. + """ + + for statement in response: + if len(statement) != 1: + raise InvalidStatement( + "Invalid statement: {}".format(statement)) + + keyword, body = next(iter(statement.items())) + validater = self.STATEMENT_DISPATCH_TABLE.get(keyword) + if not validater: + raise InvalidStatement("Invalid statement keyword: {}" + .format(keyword)) + + validater(self, body) + + def _handle_lvalue(self, lval): + """Conducts safety checks on an lvalue and adds it to the lexical scope. + + Raises: + ReservedVariableName: If an attempted lvalue is a reserved keyword. + """ + if lval in RESERVED_WORDS: + raise ReservedVariableName( + "Tried to define a variable with reserved name: {}".format(lval)) + + # Even though it's valid python to reassign variables to any rvalue, + # the samplegen spec prohibits this. + if lval in self.var_defs_: + raise RedefinedVariable( + "Tried to redefine variable: {}".format(lval)) + + self.var_defs_.add(lval) + + def _validate_format(self, body: List[str]): + """Validates a format string and corresponding arguments. + + The number of format tokens in the string must equal the + number of arguments, and each argument must be a defined variable. + + TODO: the attributes of the variable must correspond to attributes + of the variable's type. + + Raises: + MismatchedFormatSpecifier: If the number of format string segments ("%s") in + a "print" or "comment" block does not equal the + size number of strings in the block minus 1. + """ + fmt_str = body[0] + num_prints = fmt_str.count("%s") + if num_prints != len(body) - 1: + raise MismatchedFormatSpecifier( + "Expected {} expresssions in format string but received {}" + .format(num_prints, len(body) - 1)) + + for expression in body[1:]: + var = expression.split(".")[0] + if var not in self.var_defs_: + raise UndefinedVariableReference("Reference to undefined variable: {}" + .format(var)) + + def _validate_define(self, body: str): + """"Validates 'define' statements. + + Adds the defined lvalue to the lexical scope. + Other statements can reference it. + + Raises: + BadAssignment: If a "define" statement is badly formed lexically. + UndefinedVariableReference: If an attempted rvalue base is a previously + undeclared variable. + """ + # TODO: Need to validate the attributes of the response + # based on the method return type. + # TODO: Need to check the defined variables + # if the rhs references a non-response variable. + # TODO: Need to rework the regex to allow for subfields, + # indexing, and so forth. + # + # Note: really checking for safety would be equivalent to + # re-implementing the python interpreter. + m = re.match(r"^([a-zA-Z]\w*)=([^=]+)$", body) + if not m: + raise BadAssignment("Bad assignment statement: {}".format(body)) + + lval, rval = m.groups() + self._handle_lvalue(lval) + + rval_base = rval.split(".")[0] + if not rval_base in self.var_defs_: + raise UndefinedVariableReference("Reference to undefined variable: {}" + .format(rval_base)) + + def _validate_loop(self, body): + """Validates loop headers and statement bodies. + + Checks for correctly defined loop constructs, + either 'collection' loops with a collection and iteration variable, + or 'map' loops with a map and at least one of 'key' or 'value'. + Loops also have a 'body', which contains statments that may + use the variables from the header. + + The body statements are validated recursively. + The iteration variable(s) is/are added to the lexical scope + before validating the statements in the loop body. + + Raises: + UndefinedVariableReference: If an attempted rvalue base is a previously + undeclared variable. + BadLoop: If a "loop" segments has unexpected keywords + or keyword combinatations. + + """ + segments = set(body.keys()) + map_args = {self.MAP_KWORD, self.BODY_KWORD} + + # Even though it's valid python to use a variable outside of the lexical + # scope it was defined in, + # + # i.e. + # for m in molluscs: + # handle(m) + # print("Last mollusc: {}".format(m)) + # + # is allowed, the samplegen spec requires that errors are raised + # if strict lexical scoping is violated. + previous_defs = set(self.var_defs_) + + if {self.COLL_KWORD, self.VAR_KWORD, self.BODY_KWORD} == segments: + collection_name = body[self.COLL_KWORD].split(".")[0] + # TODO: Once proto info is being passed in, validate the + # [1:] in the collection name. + # TODO: resolve the implicit $resp dilemma + # if collection_name.startswith("."): + # collection_name = "$resp" + collection_name + if collection_name not in self.var_defs_: + raise UndefinedVariableReference("Reference to undefined variable: {}" + .format(collection_name)) + + var = body[self.VAR_KWORD] + self._handle_lvalue(var) + + elif map_args <= segments: + segments -= map_args + segments -= {self.KEY_KWORD, self.VAL_KWORD} + if segments: + raise BadLoop("Unexpected keywords in loop statement: {}" + .format(segments)) + + map_name_base = body[self.MAP_KWORD].split(".")[0] + if map_name_base not in self.var_defs_: + raise UndefinedVariableReference("Reference to undefined variable: {}" + .format(map_name_base)) + + key = body.get(self.KEY_KWORD) + if key: + self._handle_lvalue(key) + + val = body.get(self.VAL_KWORD) + if val: + self._handle_lvalue(val) + + if not (key or val): + raise BadLoop( + "Need at least one of 'key' or 'value' in a map loop") + + else: + raise BadLoop("Unexpected loop form: {}".format(segments)) + + self.validate_response(body[self.BODY_KWORD]) + # Restore the previous lexical scope. + # This is stricter than python scope rules + # because the samplegen spec mandates it. + self.var_defs_ = previous_defs + + # Add new statement keywords to this table. + # TODO: add write_file validator and entry (and tests). + STATEMENT_DISPATCH_TABLE = { + "define": _validate_define, + "print": _validate_format, + "comment": _validate_format, + "loop": _validate_loop, + } diff --git a/packages/gapic-generator/gapic/samplegen/utils.py b/packages/gapic-generator/gapic/samplegen/utils.py new file mode 100644 index 000000000000..4df003a89e75 --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen/utils.py @@ -0,0 +1,43 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Module containing miscellaneous utilities +that will eventually move somewhere else (probably).""" + +from enum import Enum, auto + + +class CallingForm(Enum): + Request = auto() + RequestPaged = auto() + LongRunningRequestAsync = auto() + RequestStreamingClient = auto() + RequestStreamingServer = auto() + RequestStreamingBidi = auto() + RequestPagedAll = auto() + LongRunningPromise = auto() + + @classmethod + def method_default(cls, m): + if m.lro: + return cls.LongRunningRequestAsync + if m.paged_result_field: + return cls.RequestPagedAll + if m.client_streaming: + return (cls.RequestStreamingBidi if m.server_streaming else + cls.RequestStreamingClient) + if m.server_streaming: + return cls.RequestStreamingServer + + return cls.Request diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 new file mode 100644 index 000000000000..679812d809e7 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -0,0 +1,134 @@ +{# + # Copyright (C) 2019 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + #} + +{# +A careful reader may comment that there is duplication of effort +between the python verification step and the dispatch/rendering here. +There is a little, but not enough for it to be important because +1) Other python artifacts (client libraries, unit tests, and so forth) + are generated using templates, so doing the same for generated samples is consistent. +2) Using jinja for anything requiring real logic or data structures is a bad idea. +#} + +{# response handling macros #} + +{% macro renderPrint(elts) %} + {# First elment is a format string, remaining elements are the format string parameters #} + {# Validating that the number of format params equals #} + {# the number of remaining params is handled by real python code #} + {% with fmtStr = ('\"' + elts[0] + '\"') |replace("%s", "{}") %} +print({{ ([fmtStr] + elts[1:])|join(', ') }}) + {% endwith -%} +{% endmacro %} + +{% macro renderComment(elts) %} + {# First elment is a format string, remaining elements are the format string parameters #} + {# Validating that the number of format params equals #} + {# the number of remaining params is handled by real python code #} + {% with fmtStr = elts[0] %} +# {{ fmtStr|format(*elts[1:]) }} + {% endwith %} +{% endmacro %} + +{% macro renderDefine(statement) %} +{# Python code already verified the form, no need to check #} +{% with lvalue, rvalue = statement.split("=") %} +{{ lvalue }} = {{ rvalue }} +{% endwith %} +{% endmacro %} + +{% macro renderCollectionLoop(statement) %} +for {{ statement.variable }} in {{ statement.collection }}: + {{ dispatchStatement(statement.body) -}} +{% endmacro %} + +{% macro renderMapLoop(statement) %} + {# At least one of key and value exist; validated in python #} +{% if "key" not in statement %} +for {{ statement.value }} in {{ statement.map }}.values(): +{% elif "value" not in statement %} +for {{ statement.key }} in {{ statement.map }}.keys(): +{% else %} +for {{statement.key }}, {{ statement.value }} in {{ statement.map }}.items(): +{% endif %} + {{ dispatchStatement(statement.body) -}} +{% endmacro %} + +{% macro dispatchStatement(statement) %} +{# Each statement is a dict with a single key/value pair #} +{% if "print" in statement %} +{{ renderPrint(statement["print"]) -}} +{% elif "comment" in statement %} +{{ renderComment(statement["comment"]) -}} +{% elif "loop" in statement %} + {% with loop = statement["loop"] %} + {% if "collection" in loop %} +{{ renderCollectionLoop(loop) -}} + {% else %} +{{ renderMapLoop(loop) -}} + {% endif %} + {% endwith %} +{% endif %} +{% endmacro %} + +{% macro renderRequestAttr(baseName, attr) %} +{# Note: python code will have manipulated the value #} +{# to be the correct enum from the right module, if necessary. #} +{# Python is also responsible for verifying that each input parameter is unique,#} +{# no parameter is a reserved keyword #} + {% if "input_parameter" in attr %} +# {{ attr.input_parameter }} = "{{ attr.value }}" + {% if "value_is_file" in attr and attr.value_is_file %} +with open({{ attr.input_parameter }}, "rb") as f: + {{ baseName }}["{{ attr.field }}"] = f.read() + {% else %} +{{ baseName }}["{{ attr.field }}"] = {{ attr.input_parameter }} + {% endif %} + {% else %} +{{ baseName }}["{{ attr.field }}"] = {{ attr.value }} + {% endif %} +{% endmacro %} + +{% macro renderRequest(request) %} + {% for parameterBlock in request %} +{{ parameterBlock.base }} = {} + {% for attr in parameterBlock.body %} +{{ renderRequestAttr(parameterBlock.base, attr) }} + {% endfor %} + {% endfor %} +{% endmacro %} + +{% macro renderMainBlock(methodName, requestBlock) %} +def main(): + import argparse + + parser = argparse.ArgumentParser() +{% with arg_list = [] %} +{% for attr in requestBlock if "input_parameter" in attr %} + parser.add_argument("--{{ attr.input_parameter }}", + type=str, + default="{{ attr.value }}") +{% do arg_list.append("args." + attr.input_parameter) -%} +{% endfor %} + args = parser.parse_args() + + sample_{{ methodName|snake_case }}({{ arg_list|join(", ") }}) + + +if __name__ == "__main__": + main() +{% endwith %} +{% endmacro %} diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index a7d1dbc1ae18..96482ea95d88 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -83,7 +83,9 @@ def test_get_response_fails_invalid_file_paths(): lt.return_value = ['foo/bar/$service/$proto/baz.py.j2'] with pytest.raises(ValueError) as ex: g.get_response(api_schema=make_api()) - assert '$proto' in str(ex) and '$service' in str(ex) + + ex_str = str(ex.value) + assert '$proto' in ex_str and '$service' in ex_str def test_get_response_enumerates_services(): @@ -163,24 +165,25 @@ def test_get_filename(): g = make_generator() template_name = '$namespace/$name_$version/foo.py.j2' assert g._get_filename(template_name, - api_schema=make_api( - naming=make_naming(namespace=(), name='Spam', version='v2'), - ) - ) == 'spam_v2/foo.py' + api_schema=make_api( + naming=make_naming( + namespace=(), name='Spam', version='v2'), + ) + ) == 'spam_v2/foo.py' def test_get_filename_with_namespace(): g = make_generator() template_name = '$namespace/$name_$version/foo.py.j2' assert g._get_filename(template_name, - api_schema=make_api( - naming=make_naming( - name='Spam', - namespace=('Ham', 'Bacon'), - version='v2', - ), - ), - ) == 'ham/bacon/spam_v2/foo.py' + api_schema=make_api( + naming=make_naming( + name='Spam', + namespace=('Ham', 'Bacon'), + version='v2', + ), + ), + ) == 'ham/bacon/spam_v2/foo.py' def test_get_filename_with_service(): @@ -248,15 +251,15 @@ def make_generator(opts_str: str = '') -> generator.Generator: def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, - file_to_generate: bool = True, prior_protos: Mapping = None, - naming: naming.Naming = None, - ) -> api.Proto: + file_to_generate: bool = True, prior_protos: Mapping = None, + naming: naming.Naming = None, + ) -> api.Proto: prior_protos = prior_protos or {} return api._ProtoBuilder(file_pb, - file_to_generate=file_to_generate, - naming=naming or make_naming(), - prior_protos=prior_protos, - ).proto + file_to_generate=file_to_generate, + naming=naming or make_naming(), + prior_protos=prior_protos, + ).proto def make_api(*protos, naming: naming.Naming = None, **kwargs) -> api.API: diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py new file mode 100644 index 000000000000..0e78e76e1fbf --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -0,0 +1,454 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from collections import namedtuple + +import gapic.samplegen.samplegen as samplegen +import gapic.samplegen.utils as utils + +# validate_response tests + + +def test_define(): + define = {"define": "squid=$resp"} + samplegen.Validator().validate_response([define]) + + +def test_define_undefined_var(): + define = {"define": "squid=humboldt"} + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response([define]) + + +def test_define_reserved_varname(): + define = {"define": "class=$resp"} + with pytest.raises(samplegen.ReservedVariableName): + samplegen.Validator().validate_response([define]) + + +def test_define_add_var(): + samplegen.Validator().validate_response([{"define": "squid=$resp"}, + {"define": "name=squid.name"}]) + + +def test_define_bad_form(): + define = {"define": "mollusc=$resp.squid=$resp.clam"} + with pytest.raises(samplegen.BadAssignment): + samplegen.Validator().validate_response([define]) + + +def test_define_redefinition(): + statements = [{"define": "molluscs=$resp.molluscs"}, + {"define": "molluscs=$resp.molluscs"}] + with pytest.raises(samplegen.RedefinedVariable): + samplegen.Validator().validate_response(statements) + + +def test_define_input_param(): + validator = samplegen.Validator() + validator.validate_and_transform_request([{"field": "squid.mantle_length", + "value": "100 cm", + "input_parameter": "mantle_length"}]) + validator.validate_response([{"define": "length=mantle_length"}]) + + +def test_define_input_param_redefinition(): + validator = samplegen.Validator() + validator.validate_and_transform_request([{"field": "squid.mantle_length", + "value": "100 cm", + "input_parameter": "mantle_length"}]) + with pytest.raises(samplegen.RedefinedVariable): + validator.validate_response( + [{"define": "mantle_length=mantle_length"}]) + + +def test_print_basic(): + print_statement = {"print": ["This is a squid"]} + samplegen.Validator().validate_response([print_statement]) + + +def test_print_fmt_str(): + print_statement = {"print": ["This is a squid named %s", "$resp.name"]} + samplegen.Validator().validate_response([print_statement]) + + +def test_print_fmt_mismatch(): + print_statement = {"print": ["This is a squid named %s"]} + with pytest.raises(samplegen.MismatchedFormatSpecifier): + samplegen.Validator().validate_response([print_statement]) + + +def test_print_fmt_mismatch2(): + print_statement = {"print": ["This is a squid", "$resp.name"]} + with pytest.raises(samplegen.MismatchedFormatSpecifier): + samplegen.Validator().validate_response([print_statement]) + + +def test_print_undefined_var(): + print_statement = {"print": ["This mollusc is a %s", "mollusc.type"]} + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response([print_statement]) + + +def test_comment(): + comment = {"comment": ["This is a mollusc"]} + samplegen.Validator().validate_response([comment]) + + +def test_comment_fmt_str(): + comment = {"comment": ["This is a mollusc of class %s", "$resp.class"]} + samplegen.Validator().validate_response([comment]) + + +def test_comment_fmt_undefined_var(): + comment = {"comment": ["This is a mollusc of class %s", "cephalopod"]} + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response([comment]) + + +def test_comment_fmt_mismatch(): + comment = {"comment": ["This is a mollusc of class %s"]} + with pytest.raises(samplegen.MismatchedFormatSpecifier): + samplegen.Validator().validate_response([comment]) + + +def test_comment_fmt_mismatch2(): + comment = {"comment": ["This is a mollusc of class ", "$resp.class"]} + with pytest.raises(samplegen.MismatchedFormatSpecifier): + samplegen.Validator().validate_response([comment]) + + +def test_loop_collection(): + loop = {"loop": {"collection": "$resp.molluscs", + "variable": "m", + "body": [{"print": + ["Mollusc of class: %s", "m.class"]}]}} + samplegen.Validator().validate_response([loop]) + + +def test_loop_collection_redefinition(): + statements = [{"define": "m=$resp.molluscs"}, + {"loop": {"collection": "$resp.molluscs", + "variable": "m", + "body": [{"print": ["Mollusc of class: %s", + "m.class"]}]}}] + with pytest.raises(samplegen.RedefinedVariable): + samplegen.Validator().validate_response(statements) + + +def test_loop_undefined_collection(): + loop = {"loop": {"collection": "squid", + "variable": "s", + "body": [{"print": + ["Squid: %s", "s"]}]}} + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response([loop]) + + +def test_loop_collection_extra_kword(): + loop = {"loop": {"collection": "$resp.molluscs", + "squid": "$resp.squids", + "variable": "m", + "body": [{"print": + ["Mollusc of class: %s", "m.class"]}]}} + with pytest.raises(samplegen.BadLoop): + samplegen.Validator().validate_response([loop]) + + +def test_loop_collection_missing_kword(): + loop = {"loop": {"collection": "$resp.molluscs", + "body": [{"print": + ["Mollusc of class: %s", "m.class"]}]}} + with pytest.raises(samplegen.BadLoop): + samplegen.Validator().validate_response([loop]) + + +def test_loop_collection_missing_kword2(): + loop = {"loop": {"collection": "$resp.molluscs", + "body": [{"print": + ["Mollusc: %s", "m.class"]}]}} + with pytest.raises(samplegen.BadLoop): + samplegen.Validator().validate_response([loop]) + + +def test_loop_collection_missing_kword3(): + loop = {"loop": {"collection": "$resp.molluscs", + "variable": "r"}} + with pytest.raises(samplegen.BadLoop): + samplegen.Validator().validate_response([loop]) + + +def test_loop_collection_reserved_loop_var(): + loop = {"loop": {"collection": "$resp.molluscs", + "variable": "class", + "body": [{"print": + ["Mollusc: %s", "class.name"]}]}} + with pytest.raises(samplegen.ReservedVariableName): + samplegen.Validator().validate_response([loop]) + + +def test_loop_map(): + loop = {"loop": {"map": "$resp.molluscs", + "key": "cls", + "value": "mollusc", + "body": [{"print": ["A %s is a %s", "mollusc", "cls"]}]}} + samplegen.Validator().validate_response([loop]) + + +def test_collection_loop_lexical_scope_variable(): + statements = [{"loop": {"collection": "$resp.molluscs", + "variable": "m", + "body": [{"define": "squid=m"}]}}, + {"define": "cephalopod=m"}] + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response(statements) + + +def test_collection_loop_lexical_scope_inline(): + statements = [{"loop": {"collection": "$resp.molluscs", + "variable": "m", + "body": [{"define": "squid=m"}]}}, + {"define": "cephalopod=squid"}] + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response(statements) + + +def test_map_loop_lexical_scope_key(): + statements = [{"loop": {"map": "$resp.molluscs", + "key": "cls", + "value": "order", + "body": [{"define": "tmp=cls"}]}}, + {"define": "last_cls=cls"}] + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response(statements) + + +def test_map_loop_lexical_scope_value(): + statements = [{"loop": {"map": "$resp.molluscs", + "key": "cls", + "value": "order", + "body": [{"define": "tmp=order"}]}}, + {"define": "last_order=order"}] + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response(statements) + + +def test_map_loop_lexical_scope_inline(): + statements = [{"loop": {"map": "$resp.molluscs", + "key": "cls", + "value": "order", + "body": [{"define": "tmp=order"}]}}, + {"define": "last_order=tmp"}] + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response(statements) + + +def test_loop_map_reserved_key(): + loop = {"loop": {"map": "$resp.molluscs", + "key": "class", + "value": "mollusc", + "body": [{"print": ["A %s is a %s", "mollusc", "class"]}]}} + with pytest.raises(samplegen.ReservedVariableName): + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_reserved_val(): + loop = {"loop": {"map": "$resp.molluscs", + "key": "m", + "value": "class", + "body": [{"print": ["A %s is a %s", "m", "class"]}]}} + with pytest.raises(samplegen.ReservedVariableName): + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_undefined(): + loop = {"loop": {"map": "molluscs", + "key": "name", + "value": "mollusc", + "body": [{"print": ["A %s is a %s", "mollusc", "name"]}]}} + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_no_key(): + loop = {"loop": {"map": "$resp.molluscs", + "value": "mollusc", + "body": [{"print": ["Mollusc: %s", "mollusc"]}]}} + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_no_value(): + loop = {"loop": {"map": "$resp.molluscs", + "key": "mollusc", + "body": [{"print": ["Mollusc: %s", "mollusc"]}]}} + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_no_key_or_value(): + loop = {"loop": {"map": "$resp.molluscs", + "body": [{"print": ["Dead loop"]}]}} + with pytest.raises(samplegen.BadLoop): + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_no_map(): + loop = {"loop": {"key": "name", + "value": "mollusc", + "body": [{"print": ["A %s is a %s", "mollusc", "name"]}]}} + with pytest.raises(samplegen.BadLoop): + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_no_body(): + loop = {"loop": {"map": "$resp.molluscs", + "key": "name", + "value": "mollusc"}} + with pytest.raises(samplegen.BadLoop): + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_extra_kword(): + loop = {"loop": {"map": "$resp.molluscs", + "key": "name", + "value": "mollusc", + "phylum": "$resp.phylum", + "body": [{"print": ["A %s is a %s", "mollusc", "name"]}]}} + with pytest.raises(samplegen.BadLoop): + samplegen.Validator().validate_response([loop]) + + +def test_loop_map_redefined_key(): + statements = [{"define": "mollusc=$resp.molluscs"}, + {"loop": {"map": "$resp.molluscs", + "key": "mollusc", + "body": [{"print": ["Mollusc: %s", "mollusc"]}]}}] + with pytest.raises(samplegen.RedefinedVariable): + samplegen.Validator().validate_response(statements) + + +def test_loop_map_redefined_value(): + statements = [{"define": "mollusc=$resp.molluscs"}, + {"loop": {"map": "$resp.molluscs", + "value": "mollusc", + "body": [{"print": ["Mollusc: %s", "mollusc"]}]}}] + with pytest.raises(samplegen.RedefinedVariable): + samplegen.Validator().validate_response(statements) + + +def test_invalid_statement(): + statement = {"print": ["Name"], "comment": ["Value"]} + with pytest.raises(samplegen.InvalidStatement): + samplegen.Validator().validate_response([statement]) + + +def test_invalid_statement2(): + statement = {"squidify": ["Statement body"]} + with pytest.raises(samplegen.InvalidStatement): + samplegen.Validator().validate_response([statement]) + + +# validate_and_transform_request tests +def test_validate_request_basic(): + assert samplegen.Validator().validate_and_transform_request( + [{"field": "squid.mantle_length", + "value": "100 cm"}, + {"field": "squid.mantle_mass", + "value": "10 kg"}]) == [ + samplegen.TransformedRequest("squid", + [{"field": "mantle_length", + "value": "100 cm"}, + {"field": "mantle_mass", + "value": "10 kg"}])] + + +def test_validate_request_no_field_parameter(): + with pytest.raises(samplegen.InvalidRequestSetup): + samplegen.Validator().validate_and_transform_request( + [{"squid": "humboldt"}]) + + +def test_validate_request_malformed_field_attr(): + with pytest.raises(samplegen.InvalidRequestSetup): + samplegen.Validator().validate_and_transform_request( + [{"field": "squid"}]) + + +def test_validate_request_multiple_arguments(): + assert samplegen.Validator().validate_and_transform_request( + [{"field": "squid.mantle_length", + "value": "100 cm", + "value_is_file": True}, + {"field": "clam.shell_mass", + "value": "100 kg", + "comment": "Clams can be large"}]) == [ + samplegen.TransformedRequest("squid", + [{"field": "mantle_length", + "value": "100 cm", + "value_is_file": True}]), + samplegen.TransformedRequest("clam", + [{"field": "shell_mass", + "value": "100 kg", + "comment": "Clams can be large"}])] + + +def test_validate_request_reserved_request_name(): + with pytest.raises(samplegen.ReservedVariableName): + samplegen.Validator().validate_and_transform_request( + [{"field": "class.order", "value": "coleoidea"}]) + + +def test_validate_request_duplicate_input_param(): + with pytest.raises(samplegen.RedefinedVariable): + samplegen.Validator().validate_and_transform_request( + [{"field": "squid.mantle_mass", + "value": "10 kg", + "input_parameter": "mantle_mass"}, + {"field": "clam.mantle_mass", + "value": "1 kg", + "input_parameter": "mantle_mass"}]) + + +def test_validate_request_reserved_input_param(): + with pytest.raises(samplegen.ReservedVariableName): + samplegen.Validator().validate_and_transform_request( + [{"field": "mollusc.class", + "value": "cephalopoda", + "input_parameter": "class"}]) + + +def test_validate_request_calling_form(): + DummyMethod = namedtuple("DummyMethod", + ["lro", + "paged_result_field", + "client_streaming", + "server_streaming"]) + + assert utils.CallingForm.method_default(DummyMethod( + True, False, False, False)) == utils.CallingForm.LongRunningRequestAsync + + assert utils.CallingForm.method_default(DummyMethod( + False, True, False, False)) == utils.CallingForm.RequestPagedAll + + assert utils.CallingForm.method_default(DummyMethod( + False, False, True, False)) == utils.CallingForm.RequestStreamingClient + + assert utils.CallingForm.method_default(DummyMethod( + False, False, False, True)) == utils.CallingForm.RequestStreamingServer + + assert utils.CallingForm.method_default(DummyMethod( + False, False, False, False)) == utils.CallingForm.Request + + assert utils.CallingForm.method_default(DummyMethod( + False, False, True, True)) == utils.CallingForm.RequestStreamingBidi diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py new file mode 100644 index 000000000000..5fd84dc33a20 --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -0,0 +1,336 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import jinja2 +import os.path as path +import gapic.samplegen.samplegen as samplegen +import gapic.utils as utils + +from textwrap import dedent + + +def check_template(template_fragment, expected_output): + # Making a new environment for every unit test seems wasteful, + # but the obvious alternative (make env an instance attribute + # and passing a FunctionLoader whose load function returns + # a constantly reassigned string attribute) isn't any faster + # and is less clear. + env = jinja2.Environment( + loader=jinja2.ChoiceLoader( + [jinja2.FileSystemLoader( + searchpath=path.realpath(path.join(path.dirname(__file__), + "..", "..", "..", + "gapic", "templates", "examples"))), + jinja2.DictLoader( + {"template_fragment": dedent(template_fragment)}), + ]), + + undefined=jinja2.StrictUndefined, + extensions=["jinja2.ext.do"], + trim_blocks=True, lstrip_blocks=True + ) + + env.filters['snake_case'] = utils.to_snake_case + + template = env.get_template("template_fragment") + text = template.render() + assert text == dedent(expected_output) + + +def test_render_attr_value(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderRequestAttr("mollusc", + {"field": "order", + "value": "Molluscs.Cephalopoda.Coleoidea"}) }} + ''', + '\nmollusc["order"] = Molluscs.Cephalopoda.Coleoidea\n' + ) + + +def test_render_attr_input_parameter(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderRequestAttr("squid", {"field": "species", + "value": "Humboldt", + "input_parameter": "species"}) }} + ''', + '\n# species = "Humboldt"\nsquid["species"] = species\n') + + +def test_render_attr_file(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderRequestAttr("classify_mollusc_request", + {"field": "mollusc_video", + "value": "path/to/mollusc/video.mkv", + "input_parameter" : "mollusc_video_path", + "value_is_file": True}) }} + ''', + ''' + # mollusc_video_path = "path/to/mollusc/video.mkv" + with open(mollusc_video_path, "rb") as f: + classify_mollusc_request["mollusc_video"] = f.read() + ''') + + +def test_render_request_basic(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderRequest([{"base": "cephalopod", + "body": [{"field": "mantle_mass", + "value": "10 kg", + "input_parameter": "cephalopod_mass"}, + {"field": "photo", + "value": "path/to/cephalopod/photo.jpg", + "input_parameter": "photo_path", + "value_is_file": True}, + {"field": "order", + "value": "Molluscs.Cephalopoda.Coleoidea"}, ]}, + {"base": "gastropod", + "body": [{"field": "mantle_mass", + "value": "1 kg", + "input_parameter": "gastropod_mass"}, + {"field": "order", + "value": "Molluscs.Gastropoda.Pulmonata"}, + {"field": "movie", + "value": "path/to/gastropod/movie.mkv", + "input_parameter": "movie_path", + "value_is_file": True}]}, ]) }} + ''', + ''' + cephalopod = {} + # cephalopod_mass = "10 kg" + cephalopod["mantle_mass"] = cephalopod_mass + + # photo_path = "path/to/cephalopod/photo.jpg" + with open(photo_path, "rb") as f: + cephalopod["photo"] = f.read() + + cephalopod["order"] = Molluscs.Cephalopoda.Coleoidea + + gastropod = {} + # gastropod_mass = "1 kg" + gastropod["mantle_mass"] = gastropod_mass + + gastropod["order"] = Molluscs.Gastropoda.Pulmonata + + # movie_path = "path/to/gastropod/movie.mkv" + with open(movie_path, "rb") as f: + gastropod["movie"] = f.read() + + ''' + ) + + +def test_render_print(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderPrint(["Mollusc"]) }} + ''', + '\nprint("Mollusc")\n' + ) + + +def test_render_print_args(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderPrint(["Molluscs: %s, %s, %s", "squid", "clam", "whelk"]) }} + ''', + '\nprint("Molluscs: {}, {}, {}", squid, clam, whelk)\n' + ) + + +def test_render_comment(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderComment(["Mollusc"]) }} + ''', + '\n# Mollusc\n' + ) + + +def test_render_comment_args(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderComment(["Molluscs: %s, %s, %s", "squid", "clam", "whelk"]) }} + ''', + '\n# Molluscs: squid, clam, whelk\n' + ) + + +def test_define(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderDefine("squid=humboldt") }} + ''', + '\nsquid = humboldt\n' + ) + + +def test_dispatch_print(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.dispatchStatement({"print" : ["Squid"] }) }} +''', + '\nprint("Squid")\n' + ) + + +def test_dispatch_comment(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.dispatchStatement({"comment" : ["Squid"] }) }} + ''', + '\n# Squid\n' + ) + + +def test_collection_loop(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderCollectionLoop({"collection": "molluscs", + "variable": "m", + "body": {"print": ["Mollusc: %s", "m"]}}) }}''', + ''' + for m in molluscs: + print("Mollusc: {}", m) + ''' + ) + + +def test_dispatch_collection_loop(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.dispatchStatement({"loop": {"collection": "molluscs", + "variable": "m", + "body": {"print": ["Mollusc: %s", "m"]}}}) }}''', + ''' + for m in molluscs: + print("Mollusc: {}", m) + ''' + ) + + +def test_map_loop(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderMapLoop({"map": "molluscs", + "key":"cls", + "value":"example", + "body": {"print": ["A %s is a %s", "example", "cls"] }}) + }}''', + ''' + for cls, example in molluscs.items(): + print("A {} is a {}", example, cls) + ''' + ) + + +def test_map_loop_no_key(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderMapLoop({"map": "molluscs", + "value":"example", + "body": {"print": ["A %s is a mollusc", "example"] }}) + }}''', + ''' + for example in molluscs.values(): + print("A {} is a mollusc", example) + ''' + ) + + +def test_map_loop_no_value(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderMapLoop({"map": "molluscs", + "key":"cls", + "body": {"print": ["A %s is a mollusc", "cls"] }}) + }}''', + ''' + for cls in molluscs.keys(): + print("A {} is a mollusc", cls) + ''' + ) + + +def test_dispatch_map_loop(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.dispatchStatement({"loop":{"map": "molluscs", + "key":"cls", + "value":"example", + "body": { + "print": ["A %s is a %s", "example", "cls"] }}}) + }} + ''', + ''' + for cls, example in molluscs.items(): + print("A {} is a {}", example, cls) + ''' + ) + + +def test_main_block(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderMainBlock("ListMolluscs", [{"field": "list_molluscs.order", + "value": "coleoidea", + "input_parameter": "order"}, + {"field ": "list_molluscs.mass", + "value": "60kg", + "input_parameter": "mass"}, + {"field": "list_molluscs.zone", + "value": "MESOPELAGIC"},]) }} + ''', + ''' + def main(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--order", + type=str, + default="coleoidea") + parser.add_argument("--mass", + type=str, + default="60kg") + args = parser.parse_args() + + sample_list_molluscs(args.order, args.mass) + + + if __name__ == "__main__": + main() + ''' + ) From 28f463da105eff93ecb30fad10eba03b7c4c783e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 3 Jul 2019 11:30:58 -0700 Subject: [PATCH 0127/1339] Small fix to docker-entrypoint.sh (#136) --- packages/gapic-generator/docker-entrypoint.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/docker-entrypoint.sh b/packages/gapic-generator/docker-entrypoint.sh index 6cef2957a529..a44d59eaa6d0 100755 --- a/packages/gapic-generator/docker-entrypoint.sh +++ b/packages/gapic-generator/docker-entrypoint.sh @@ -28,7 +28,7 @@ while [ -n "$1" ]; do if [[ $1 == --python-gapic-* ]]; then PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" shift 2 - else if [[ $1 == --gapic-* ]]; then + elif [[ $1 == --gapic-* ]]; then PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" shift 2 else From 74180b13ff7dcc23a7dcfa63f2a327670ed86704 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 11 Jul 2019 11:29:22 -0700 Subject: [PATCH 0128/1339] Initial commit for generating an integrated sample (#137) Adds basic functionality and tests for end-to-end sample generation. Includes tests for comprehensive sample generation Conduct translation from special variable '$resp' to 'response' in template rendering Includes macros to set up method invocation, adds code to include license and copyright information, Note: sample generation is still ignorant of method and message signatures, types, attributes, and so forth. More full-featured semantic analysis will be conducted in subsequent PRs. Includes minor tweaks to various names, tests, and macros. Add code and associated tests for validating and rendering 'write_file' directives --- .../gapic/generator/generator.py | 9 +- .../gapic/samplegen/samplegen.py | 135 +++++++-- .../gapic-generator/gapic/samplegen/utils.py | 4 +- packages/gapic-generator/gapic/schema/api.py | 122 ++++---- .../templates/examples/feature_fragments.j2 | 122 +++++++- .../gapic/templates/examples/sample.py.j2 | 47 ++++ .../gapic-generator/gapic/utils/__init__.py | 2 + packages/gapic-generator/gapic/utils/code.py | 29 ++ .../tests/unit/samplegen/test_integration.py | 135 +++++++++ .../tests/unit/samplegen/test_samplegen.py | 183 ++++++++---- .../tests/unit/samplegen/test_template.py | 261 ++++++++++++++++-- 11 files changed, 877 insertions(+), 172 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/examples/sample.py.j2 create mode 100644 packages/gapic-generator/tests/unit/samplegen/test_integration.py diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index e8ce3a300ae8..b10a4d814140 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -24,6 +24,7 @@ from gapic import utils from gapic.generator import formatter from gapic.generator import options +from gapic.samplegen import samplegen from gapic.schema import api @@ -54,6 +55,7 @@ def __init__(self, opts: options.Options) -> None: self._env.filters['snake_case'] = utils.to_snake_case self._env.filters['sort_lines'] = utils.sort_lines self._env.filters['wrap'] = utils.wrap + self._env.filters['coerce_response_name'] = samplegen.coerce_response_name def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: """Return a :class:`~.CodeGeneratorResponse` for this library. @@ -70,9 +72,14 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: """ output_files: Dict[str, CodeGeneratorResponse.File] = OrderedDict() + # TODO: handle sample_templates specially, generate samples. + sample_templates, client_templates = utils.partition( + lambda fname: os.path.basename(fname) == samplegen.TEMPLATE_NAME, + self._env.loader.list_templates()) + # Iterate over each template and add the appropriate output files # based on that template. - for template_name in self._env.loader.list_templates(): + for template_name in client_templates: # Sanity check: Skip "private" templates. filename = template_name.split('/')[-1] if filename.startswith('_') and filename != '__init__.py.j2': diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 11f1ea0de1a9..6da90d8bcb8b 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -13,15 +13,17 @@ # limitations under the License. import itertools +import jinja2 import keyword import re -import gapic.samplegen.utils + +from gapic.samplegen import utils from collections import (defaultdict, namedtuple) -from typing import (List, Mapping, Set) +from textwrap import dedent +from typing import (Dict, List, Mapping, Set, Tuple) # Outstanding issues: -# * Neither license nor copyright are defined # * In real sample configs, many variables are # defined with an _implicit_ $resp variable. @@ -29,13 +31,23 @@ VALID_CONFIG_TYPE = 'com.google.api.codegen.SampleConfigProto' -# There are a couple of other names that should be reserved -# for sample variable assignments, e.g. 'client', but there are diminishing returns: -# the sample config author isn't a pathological adversary, they're a normal person -# who may make mistakes occasionally. -RESERVED_WORDS = frozenset(itertools.chain(keyword.kwlist, dir(__builtins__))) +# TODO: read in copyright and license from files. +FILE_HEADER: Dict[str, str] = {"copyright": "TODO: add a copyright", + "license": "TODO: add a license"} + +RESERVED_WORDS = frozenset( + itertools.chain(keyword.kwlist, + dir(__builtins__), + {"client", + "f", # parameter used in file I/O statements + "operation", # temporary used in LROs + "page", # used in paginated responses + "page_result", # used in paginated responses + "response", # basic 'response' + "stream", # used in server and bidi streaming + })) -TEMPLATE_NAME = 'samplegen_template.j2' +TEMPLATE_NAME = "sample.py.j2" TransformedRequest = namedtuple("TransformedRequest", ["base", "body"]) @@ -48,15 +60,15 @@ class ReservedVariableName(SampleError): pass -class SchemaVersion(SampleError): +class RpcMethodNotFound(SampleError): pass -class RpcMethodNotFound(SampleError): +class UnknownService(SampleError): pass -class InvalidConfigType(SampleError): +class InvalidConfig(SampleError): pass @@ -92,6 +104,20 @@ class InvalidRequestSetup(SampleError): pass +def coerce_response_name(s: str) -> str: + # In the sample config, the "$resp" keyword is used to refer to the + # item of interest as received by the corresponding calling form. + # For a 'regular', i.e. unary, synchronous, non-long-running method, + # it's the return value; for a server-streaming method, it's the iteration + # variable in the for loop that iterates over the return value, and for + # a long running promise, the user calls result on the method return value to + # resolve the future. + # + # The sample schema uses '$resp' as the special variable, + # but in the samples the 'response' variable is used instead. + return s.replace("$resp", "response") + + class Validator: """Class that validates samples. @@ -113,6 +139,7 @@ def __init__(self): # TODO: this will eventually need the method name and the proto file # so that it can do the correct value transformation for enums. def validate_and_transform_request(self, + calling_form: utils.CallingForm, request: List[Mapping[str, str]]) -> List[TransformedRequest]: """Validates and transforms the "request" block from a sample config. @@ -130,7 +157,7 @@ def validate_and_transform_request(self, are that "field" maps only to the second part of a dotted variable name. Other key/value combinations in the dict are unmodified for the time being. - Note: gRPC API methods only take one parameter (ignoring client side streaming). + Note: gRPC API methods only take one parameter (ignoring client-side streaming). The reason that GAPIC client library API methods may take multiple parameters is a workaround to provide idiomatic protobuf support within python. The different 'bases' are really attributes for the singular request parameter. @@ -174,6 +201,7 @@ def validate_and_transform_request(self, """ base_param_to_attrs: Mapping[str, List[Mapping[str, str]]] = defaultdict(list) + for field_assignment in request: field_assignment_copy = dict(field_assignment) input_param = field_assignment_copy.get("input_parameter") @@ -206,6 +234,12 @@ def validate_and_transform_request(self, field_assignment_copy["field"] = attr base_param_to_attrs[base].append(field_assignment_copy) + if (calling_form in {utils.CallingForm.RequestStreamingClient, + utils.CallingForm.RequestStreamingBidi} and + len(base_param_to_attrs) > 1): + raise InvalidRequestSetup(("There can be at most 1 base request in a sample" + " for a method with client side streaming")) + return [TransformedRequest(base, body) for base, body in base_param_to_attrs.items()] @@ -222,9 +256,6 @@ def validate_response(self, response): response: list[dict{str:?}]: The structured data representing the sample's response. - Returns: - bool: True if response is valid. - Raises: InvalidStatement: If an unexpected key is found in a statement dict or a statement dict has more than or less than one key. @@ -268,7 +299,7 @@ def _validate_format(self, body: List[str]): number of arguments, and each argument must be a defined variable. TODO: the attributes of the variable must correspond to attributes - of the variable's type. + of the variable's type. Raises: MismatchedFormatSpecifier: If the number of format string segments ("%s") in @@ -320,6 +351,40 @@ def _validate_define(self, body: str): raise UndefinedVariableReference("Reference to undefined variable: {}" .format(rval_base)) + def _validate_write_file(self, body): + """Validate 'write_file' statements. + + The body of a 'write_file' statement is a two-element dict + with known keys: 'filename' and 'contents'. + 'filename' maps to a list of strings which constitute a format string + and variable-based rvalues defining the fields. + 'contents' maps to a single variable-based rvalue. + + Raises: + MismatchedFormatSpecifier: If the filename formatstring is badly formed. + UndefinedVariableReference: If any of the formatstring variables + or the file contents variable are undefined. + InvalidStatement: If either 'filename' or 'contents' are absent keys. + """ + + fname_fmt = body.get("filename") + if not fname_fmt: + raise InvalidStatement( + "Missing key in 'write_file' statement: 'filename'") + + self._validate_format(fname_fmt) + + contents_var = body.get("contents") + if not contents_var: + raise InvalidStatement( + "Missing key in 'write_file' statement: 'contents'") + + # TODO: check the rest of the elements for valid subfield attribute + base = contents_var.split(".")[0] + if base not in self.var_defs_: + raise UndefinedVariableReference("Reference to undefined variable: {}" + .format(base)) + def _validate_loop(self, body): """Validates loop headers and statement bodies. @@ -408,5 +473,41 @@ def _validate_loop(self, body): "define": _validate_define, "print": _validate_format, "comment": _validate_format, + "write_file": _validate_write_file, "loop": _validate_loop, } + + +def generate_sample(sample, + env: jinja2.environment.Environment, + api_schema) -> Tuple[str, jinja2.environment.TemplateStream]: + sample_template = env.get_template(TEMPLATE_NAME) + + service_name = sample["service"] + service = api_schema.services.get(service_name) + if not service: + raise UnknownService("Unknown service: {}", service_name) + + rpc_name = sample["rpc"] + rpc = service.methods.get(rpc_name) + if not rpc: + raise RpcMethodNotFound( + "Could not find rpc in service {}: {}".format(service_name, rpc_name)) + + calling_form = utils.CallingForm.method_default(rpc) + + v = Validator() + sample["request"] = v.validate_and_transform_request(calling_form, + sample["request"]) + v.validate_response(sample["response"]) + + sample_id = sample["id"] + sample_fpath = sample_id + str(calling_form) + ".py" + + sample["package_name"] = api_schema.naming.warehouse_package_name + + return sample_fpath, sample_template.stream(fileHeader=FILE_HEADER, + sample=sample, + imports=[], + callingForm=calling_form, + callingFormEnum=utils.CallingForm) diff --git a/packages/gapic-generator/gapic/samplegen/utils.py b/packages/gapic-generator/gapic/samplegen/utils.py index 4df003a89e75..959dd4472ae0 100644 --- a/packages/gapic-generator/gapic/samplegen/utils.py +++ b/packages/gapic-generator/gapic/samplegen/utils.py @@ -26,12 +26,12 @@ class CallingForm(Enum): RequestStreamingServer = auto() RequestStreamingBidi = auto() RequestPagedAll = auto() - LongRunningPromise = auto() + LongRunningRequestPromise = auto() @classmethod def method_default(cls, m): if m.lro: - return cls.LongRunningRequestAsync + return cls.LongRunningRequestPromise if m.paged_result_field: return cls.RequestPagedAll if m.client_streaming: diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 7fb1af6acd14..0aae0638f393 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -52,9 +52,9 @@ def __getattr__(self, name: str): @classmethod def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, - file_to_generate: bool, naming: api_naming.Naming, - prior_protos: Mapping[str, 'Proto'] = None, - ) -> 'Proto': + file_to_generate: bool, naming: api_naming.Naming, + prior_protos: Mapping[str, 'Proto'] = None, + ) -> 'Proto': """Build and return a Proto instance. Args: @@ -68,10 +68,10 @@ def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, These are needed to look up messages in imported protos. """ return _ProtoBuilder(file_descriptor, - file_to_generate=file_to_generate, - naming=naming, - prior_protos=prior_protos or {}, - ).proto + file_to_generate=file_to_generate, + naming=naming, + prior_protos=prior_protos or {}, + ).proto @cached_property def enums(self) -> Mapping[str, wrappers.EnumType]: @@ -179,9 +179,9 @@ class API: @classmethod def build(cls, - file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], - package: str = '', - opts: options.Options = options.Options()) -> 'API': + file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], + package: str = '', + opts: options.Options = options.Options()) -> 'API': """Build the internal API schema based on the request. Args: @@ -218,15 +218,15 @@ def build(cls, def enums(self) -> Mapping[str, wrappers.EnumType]: """Return a map of all enums available in the API.""" return collections.ChainMap({}, - *[p.all_enums for p in self.protos.values()], - ) + *[p.all_enums for p in self.protos.values()], + ) @cached_property def messages(self) -> Mapping[str, wrappers.MessageType]: """Return a map of all messages available in the API.""" return collections.ChainMap({}, - *[p.all_messages for p in self.protos.values()], - ) + *[p.all_messages for p in self.protos.values()], + ) @cached_property def protos(self) -> Mapping[str, Proto]: @@ -246,8 +246,8 @@ def protos(self) -> Mapping[str, Proto]: def services(self) -> Mapping[str, wrappers.Service]: """Return a map of all services available in the API.""" return collections.ChainMap({}, - *[p.services for p in self.protos.values()], - ) + *[p.services for p in self.protos.values()], + ) @cached_property def subpackages(self) -> Mapping[str, 'API']: @@ -265,12 +265,13 @@ def subpackages(self) -> Mapping[str, 'API']: # derivative API objects returned here. level = len(self.subpackage_view) for subpkg_name in sorted({p.meta.address.subpackage[0] - for p in self.protos.values() - if len(p.meta.address.subpackage) > level and - p.meta.address.subpackage[:level] == self.subpackage_view}): + for p in self.protos.values() + if len(p.meta.address.subpackage) > level and + p.meta.address.subpackage[:level] == self.subpackage_view}): answer[subpkg_name] = dataclasses.replace(self, - subpackage_view=self.subpackage_view + (subpkg_name,), - ) + subpackage_view=self.subpackage_view + + (subpkg_name,), + ) return answer @@ -308,7 +309,7 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # detail below; this code simply shifts from a list to a dict, # with tuples of paths as the dictionary keys. self.docs: Dict[Tuple[int, ...], - descriptor_pb2.SourceCodeInfo.Location] = {} + descriptor_pb2.SourceCodeInfo.Location] = {} for location in file_descriptor.source_code_info.location: self.docs[tuple(location.path)] = location @@ -388,36 +389,37 @@ def proto(self) -> Proto: # Note: The services bind to themselves, because services get their # own output files. return dataclasses.replace(naive, - all_enums=collections.OrderedDict([ - (k, v.with_context(collisions=naive.names)) - for k, v in naive.all_enums.items() - ]), - all_messages=collections.OrderedDict([ - (k, v.with_context(collisions=naive.names)) - for k, v in naive.all_messages.items() - ]), - services=collections.OrderedDict([ - (k, v.with_context(collisions=v.names)) - for k, v in naive.services.items() - ]), - meta=naive.meta.with_context(collisions=naive.names), - ) + all_enums=collections.OrderedDict([ + (k, v.with_context(collisions=naive.names)) + for k, v in naive.all_enums.items() + ]), + all_messages=collections.OrderedDict([ + (k, v.with_context(collisions=naive.names)) + for k, v in naive.all_messages.items() + ]), + services=collections.OrderedDict([ + (k, v.with_context(collisions=v.names)) + for k, v in naive.services.items() + ]), + meta=naive.meta.with_context( + collisions=naive.names), + ) @cached_property def api_enums(self) -> Mapping[str, wrappers.EnumType]: return collections.ChainMap({}, self.proto_enums, - *[p.all_enums for p in self.prior_protos.values()], - ) + *[p.all_enums for p in self.prior_protos.values()], + ) @cached_property def api_messages(self) -> Mapping[str, wrappers.MessageType]: return collections.ChainMap({}, self.proto_messages, - *[p.all_messages for p in self.prior_protos.values()], - ) + *[p.all_messages for p in self.prior_protos.values()], + ) def _load_children(self, - children: Sequence, loader: Callable, *, - address: metadata.Address, path: Tuple[int, ...]) -> Mapping: + children: Sequence, loader: Callable, *, + address: metadata.Address, path: Tuple[int, ...]) -> Mapping: """Return wrapped versions of arbitrary children from a Descriptor. Args: @@ -448,9 +450,9 @@ def _load_children(self, return answer def _get_fields(self, - field_pbs: Sequence[descriptor_pb2.FieldDescriptorProto], - address: metadata.Address, path: Tuple[int, ...], - ) -> Dict[str, wrappers.Field]: + field_pbs: Sequence[descriptor_pb2.FieldDescriptorProto], + address: metadata.Address, path: Tuple[int, ...], + ) -> Dict[str, wrappers.Field]: """Return a dictionary of wrapped fields for the given message. Args: @@ -491,9 +493,9 @@ def _get_fields(self, return answer def _get_methods(self, - methods: Sequence[descriptor_pb2.MethodDescriptorProto], - address: metadata.Address, path: Tuple[int, ...], - ) -> Mapping[str, wrappers.Method]: + methods: Sequence[descriptor_pb2.MethodDescriptorProto], + address: metadata.Address, path: Tuple[int, ...], + ) -> Mapping[str, wrappers.Method]: """Return a dictionary of wrapped methods for the given service. Args: @@ -548,10 +550,10 @@ def _get_methods(self, return answer def _load_message(self, - message_pb: descriptor_pb2.DescriptorProto, - address: metadata.Address, - path: Tuple[int], - ) -> wrappers.MessageType: + message_pb: descriptor_pb2.DescriptorProto, + address: metadata.Address, + path: Tuple[int], + ) -> wrappers.MessageType: """Load message descriptions from DescriptorProtos.""" address = address.child(message_pb.name, path) @@ -603,10 +605,10 @@ def _load_message(self, return self.proto_messages[address.proto] def _load_enum(self, - enum: descriptor_pb2.EnumDescriptorProto, - address: metadata.Address, - path: Tuple[int], - ) -> wrappers.EnumType: + enum: descriptor_pb2.EnumDescriptorProto, + address: metadata.Address, + path: Tuple[int], + ) -> wrappers.EnumType: """Load enum descriptions from EnumDescriptorProtos.""" address = address.child(enum.name, path) @@ -633,10 +635,10 @@ def _load_enum(self, return self.proto_enums[address.proto] def _load_service(self, - service: descriptor_pb2.ServiceDescriptorProto, - address: metadata.Address, - path: Tuple[int], - ) -> wrappers.Service: + service: descriptor_pb2.ServiceDescriptorProto, + address: metadata.Address, + path: Tuple[int], + ) -> wrappers.Service: """Load comments for a service and its methods.""" address = address.child(service.name, path) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 679812d809e7..6889ff451398 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -25,13 +25,46 @@ There is a little, but not enough for it to be important because {# response handling macros #} +{% macro sample_header(fileHeader, sample, callingForm) %} +{% for line in fileHeader["copyright"].split("\n") %} +# {{ line }} +{% endfor %} +{% for line in fileHeader["license"].split("\n") %} +# {{ line }} +{% endfor %} +# +# DO NOT EDIT! This is a generated sample ("{{ callingForm }}", "{{ sample.id }}") +# +# To install the latest published package dependency, execute the following: +# pip3 install {{ sample.package_name }} +{% endmacro %} + +{% macro print_string_formatting(string_list) %} +{% if string_list|length == 1 %} +"{{ string_list[0]|replace("%s", "{}") }}" +{% else %} +"{{ string_list[0]|replace("%s", "{}") }}".format({{ string_list[1:]|map("coerce_response_name")|join(", ") }}) +{% endif %} +{% endmacro %} + +{% macro printInputParams(requests) %} +{% with input_parameters = [] %} +{% for request in requests %} + {% for element in request.body %} + {% if "input_parameter" in element %} + {% do input_parameters.append(element["input_parameter"]) %} + {% endif %} + {% endfor %} +{% endfor %} +{{ input_parameters|join(", ") -}} +{% endwith %} +{% endmacro %} + {% macro renderPrint(elts) %} {# First elment is a format string, remaining elements are the format string parameters #} {# Validating that the number of format params equals #} {# the number of remaining params is handled by real python code #} - {% with fmtStr = ('\"' + elts[0] + '\"') |replace("%s", "{}") %} -print({{ ([fmtStr] + elts[1:])|join(', ') }}) - {% endwith -%} +print({{ print_string_formatting(elts)|trim }}) {% endmacro %} {% macro renderComment(elts) %} @@ -39,34 +72,44 @@ print({{ ([fmtStr] + elts[1:])|join(', ') }}) {# Validating that the number of format params equals #} {# the number of remaining params is handled by real python code #} {% with fmtStr = elts[0] %} -# {{ fmtStr|format(*elts[1:]) }} - {% endwith %} + {% with params = elts[1:]|map("coerce_response_name")|list %} +# {{ fmtStr|format(*params) }} +{% endwith %} +{% endwith %} {% endmacro %} {% macro renderDefine(statement) %} {# Python code already verified the form, no need to check #} {% with lvalue, rvalue = statement.split("=") %} -{{ lvalue }} = {{ rvalue }} +{{ lvalue }} = {{ rvalue|coerce_response_name }} {% endwith %} {% endmacro %} {% macro renderCollectionLoop(statement) %} -for {{ statement.variable }} in {{ statement.collection }}: +for {{ statement.variable }} in {{ statement.collection|coerce_response_name }}: {{ dispatchStatement(statement.body) -}} {% endmacro %} {% macro renderMapLoop(statement) %} {# At least one of key and value exist; validated in python #} {% if "key" not in statement %} -for {{ statement.value }} in {{ statement.map }}.values(): +for {{ statement.value }} in {{ statement.map|coerce_response_name }}.values(): {% elif "value" not in statement %} -for {{ statement.key }} in {{ statement.map }}.keys(): +for {{ statement.key }} in {{ statement.map|coerce_response_name }}.keys(): {% else %} -for {{statement.key }}, {{ statement.value }} in {{ statement.map }}.items(): +for {{statement.key }}, {{ statement.value }} in {{ statement.map|coerce_response_name }}.items(): {% endif %} {{ dispatchStatement(statement.body) -}} {% endmacro %} +{% macro render_write_file(statement) %} + {% with contents_rval = statement["contents"]|coerce_response_name %} +with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: + f.write({{ contents_rval }}) + + {% endwith %} +{% endmacro %} + {% macro dispatchStatement(statement) %} {# Each statement is a dict with a single key/value pair #} {% if "print" in statement %} @@ -81,6 +124,8 @@ for {{statement.key }}, {{ statement.value }} in {{ statement.map }}.items(): {{ renderMapLoop(loop) -}} {% endif %} {% endwith %} +{% elif "write_file" in statement %} +{{ render_write_file(statement["write_file"]) -}} {% endif %} {% endmacro %} @@ -111,6 +156,61 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endfor %} {% endmacro %} +{% macro renderMethodCall(sample, callingForm, callingFormEnum) %} +{# Note: this doesn't deal with enums or unions #} +{% if callingForm not in [callingFormEnum.RequestStreamingBidi, +callingFormEnum.RequestStreamingClient] %} +client.{{ sample.rpc|snake_case }}({{ sample.request|map(attribute="base")|join(", ") }}) +{% else %} +{# TODO: set up client streaming once some questions are answered #} +client.{{ sample.rpc|snake_case }}([{{ sample.request|map(attribute="base")|join("") }}]) +{% endif %} +{% endmacro %} + +{# Setting up the method invocation is the responsibility of the caller: #} +{# it's just easier to set up client side streaming and other things from outside this macro. #} +{% macro renderCallingForm(methodInvocationText, callingForm, callingFormEnum, responseStatements ) %} +{% if callingForm == callingFormEnum.Request %} +response = {{ methodInvocationText }} +{% for statement in responseStatements %} +{{ dispatchStatement(statement ) }} +{% endfor %} +{% elif callingForm == callingFormEnum.RequestPagedAll %} +page_result = {{ methodInvocationText }} +for response in page_result: + {% for statement in responseStatements %} + {{ dispatchStatement(statement ) }} + {% endfor %} +{% elif callingForm == callingFormEnum.RequestPaged %} +page_result = {{ methodInvocationText }} +for page in page_result.pages(): + for response in page: + {% for statement in responseStatements %} + {{ dispatchStatement(statement ) }} + {% endfor %} +{% elif callingForm in [callingFormEnum.RequestStreamingServer, + callingFormEnum.RequestStreamingBidi] %} +stream = {{ methodInvocationText }} +for response in stream: + {% for statement in responseStatements %} + {{ dispatchStatement(statement ) }} + {% endfor %} +{% elif callingForm == callingFormEnum.LongRunningRequestPromise %} +operation = {{ methodInvocationText }} + +print("Waiting for operation to complete...") + +response = operation.result() +{% for statement in responseStatements %} +{{ dispatchStatement(statement ) }} +{% endfor %} +{% endif %} +{% endmacro %} + +{% macro renderMethodName(methodName) %} +{{ methodName|snake_case -}} +{% endmacro %} + {% macro renderMainBlock(methodName, requestBlock) %} def main(): import argparse @@ -125,7 +225,7 @@ def main(): {% endfor %} args = parser.parse_args() - sample_{{ methodName|snake_case }}({{ arg_list|join(", ") }}) + sample_{{ renderMethodName(methodName) }}({{ arg_list|join(", ") }}) if __name__ == "__main__": diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 new file mode 100644 index 000000000000..36acf4209486 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -0,0 +1,47 @@ +{# + # Copyright (C) 2019 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} +{# Input parameters: sample #} +{# fileHeader#} +{# imports #} +{# callingForm #} +{# callingFormEnum #} +{# Note: this sample template is WILDLY INACCURATE AND INCOMPLETE #} +{# It does not correctly enums, unions, top level attributes, or various other things #} +{% import "feature_fragments.j2" as frags %} +{{ frags.sample_header(fileHeader, sample, callingForm) }} + +# [START {{ sample.id }}] +{# python code is responsible for all transformations: all we do here is render #} +{% for importStatement in imports %} +{{ importStatement }} +{% endfor %} + +{# also need calling form #} +def sample_{{ frags.renderMethodName(sample.rpc) }}({{ frags.printInputParams(sample.request) }}): + """{{ sample.description }}""" + + client = {{ sample.service.split(".")[-3:-1]| + map("lower")| + join("_") }}.{{ sample.service.split(".")[-1] }}Client() + + {{ frags.renderRequest(sample.request)|indent }} +{% with methodCall = frags.renderMethodCall(sample, callingForm, callingFormEnum) %} + {{ frags.renderCallingForm(methodCall, callingForm, callingFormEnum, sample.response)|indent -}} +{% endwith %} + +# [END {{ sample.id }}] + +{{ frags.renderMainBlock(sample.rpc, sample.request) }} diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 1e9c95d00a0a..70603228b4b6 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -15,6 +15,7 @@ from gapic.utils.cache import cached_property from gapic.utils.case import to_snake_case from gapic.utils.code import empty +from gapic.utils.code import partition from gapic.utils.doc import doc from gapic.utils.filename import to_valid_filename from gapic.utils.filename import to_valid_module_name @@ -27,6 +28,7 @@ 'cached_property', 'doc', 'empty', + 'partition', 'rst', 'sort_lines', 'to_snake_case', diff --git a/packages/gapic-generator/gapic/utils/code.py b/packages/gapic-generator/gapic/utils/code.py index 7e0df14fc6b6..1a09d8f4a13b 100644 --- a/packages/gapic-generator/gapic/utils/code.py +++ b/packages/gapic-generator/gapic/utils/code.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import (Any, Callable, Iterable, List, Tuple, TypeVar) + def empty(content: str) -> bool: """Return True if this file has no Python statements, False otherwise. @@ -21,3 +23,30 @@ def empty(content: str) -> bool: """ return not any([i.lstrip() and not i.lstrip().startswith('#') for i in content.split('\n')]) + + +T = TypeVar('T') + + +def partition(predicate: Callable[[T], bool], + iterator: Iterable[T]) -> Tuple[List[T], List[T]]: + """Partitions an iterable into two lists based on a predicate + + Args: + predicate (Callable[[T], bool]) : A callable predicate on a single argument + of whatever type is in iterator. + iterator (Iterable(T)): An iterable on any type. + + + Returns: + Tuple[List[T], List[T]]: The contents of iterator partitioned into two lists. + The first list contains the "true" elements + and the second contains the "false" elements. + """ + results: Tuple[List[T], List[T]] = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py new file mode 100644 index 000000000000..0059e0bccf96 --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -0,0 +1,135 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import jinja2 +import os.path as path +import pytest + +import gapic.samplegen.samplegen as samplegen +import gapic.utils as utils + +from collections import namedtuple +from textwrap import dedent + +# Injected dummy test types +dummy_method_fields = ["lro", + "paged_result_field", + "client_streaming", + "server_streaming"] +DummyMethod = namedtuple("DummyMethod", + dummy_method_fields) +DummyMethod.__new__.__defaults__ = (False,)*len(dummy_method_fields) + +DummyService = namedtuple("DummyService", ["methods"]) + +DummyApiSchema = namedtuple("DummyApiSchema", ["services", "naming"]) + +DummyNaming = namedtuple("DummyNaming", ["warehouse_package_name"]) + + +env = jinja2.Environment( + loader=jinja2.FileSystemLoader( + searchpath=path.realpath(path.join(path.dirname(__file__), + "..", "..", "..", + "gapic", "templates", "examples"))), + undefined=jinja2.StrictUndefined, + extensions=["jinja2.ext.do"], + trim_blocks=True, lstrip_blocks=True +) +env.filters['snake_case'] = utils.to_snake_case +env.filters['coerce_response_name'] = samplegen.coerce_response_name + + +def test_generate_sample_basic(): + # Note: the sample integration tests are needfully large + # and difficult to eyeball parse. They are intended to be integration tests + # that catch errors in behavior that is emergent from combining smaller features + # or in features that are sufficiently small and trivial that it doesn't make sense + # to have standalone tests. + schema = DummyApiSchema( + {"animalia.mollusca.v1.Mollusc": DummyService( + {"Classify": DummyMethod()})}, + DummyNaming("molluscs-v1-mollusc")) + + sample = {"service": "animalia.mollusca.v1.Mollusc", + "rpc": "Classify", + "id": "mollusc_classify_sync", + "description": "Determine the full taxonomy of input mollusc", + "request": [{"field": "classify_request.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True}], + "response": [{"print": ["Mollusc is a %s", "$resp.taxonomy"]}]} + + fpath, template_stream = samplegen.generate_sample(sample, env, schema) + sample_str = "".join(iter(template_stream)) + + assert sample_str == '''# TODO: add a copyright +# TODO: add a license +# +# DO NOT EDIT! This is a generated sample ("CallingForm.Request", "mollusc_classify_sync") +# +# To install the latest published package dependency, execute the following: +# pip3 install molluscs-v1-mollusc + + +# [START mollusc_classify_sync] + +def sample_classify(video): + """Determine the full taxonomy of input mollusc""" + + client = mollusca_v1.MolluscClient() + + classify_request = {} + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_request["video"] = f.read() + + + response = client.classify(classify_request) + + print("Mollusc is a {}".format(response.taxonomy)) + + +# [END mollusc_classify_sync] + +def main(): + import argparse + + parser = argparse.ArgumentParser() + args = parser.parse_args() + + sample_classify() + + +if __name__ == "__main__": + main() +''' + + +def test_generate_sample_service_not_found(): + schema = DummyApiSchema({}, DummyNaming("pkg_name")) + sample = {"service": "Mollusc"} + + with pytest.raises(samplegen.UnknownService): + samplegen.generate_sample(sample, env, schema) + + +def test_generate_sample_rpc_not_found(): + schema = DummyApiSchema( + {"Mollusc": DummyService({})}, DummyNaming("pkg_name")) + sample = {"service": "Mollusc", "rpc": "Classify"} + + with pytest.raises(samplegen.RpcMethodNotFound): + samplegen.generate_sample(sample, env, schema) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 0e78e76e1fbf..542eaa1803d7 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -16,7 +16,8 @@ from collections import namedtuple import gapic.samplegen.samplegen as samplegen -import gapic.samplegen.utils as utils + +from gapic.samplegen import utils # validate_response tests @@ -58,7 +59,8 @@ def test_define_redefinition(): def test_define_input_param(): validator = samplegen.Validator() - validator.validate_and_transform_request([{"field": "squid.mantle_length", + validator.validate_and_transform_request(utils.CallingForm.Request, + [{"field": "squid.mantle_length", "value": "100 cm", "input_parameter": "mantle_length"}]) validator.validate_response([{"define": "length=mantle_length"}]) @@ -66,7 +68,8 @@ def test_define_input_param(): def test_define_input_param_redefinition(): validator = samplegen.Validator() - validator.validate_and_transform_request([{"field": "squid.mantle_length", + validator.validate_and_transform_request(utils.CallingForm.Request, + [{"field": "squid.mantle_length", "value": "100 cm", "input_parameter": "mantle_length"}]) with pytest.raises(samplegen.RedefinedVariable): @@ -175,21 +178,6 @@ def test_loop_collection_missing_kword(): samplegen.Validator().validate_response([loop]) -def test_loop_collection_missing_kword2(): - loop = {"loop": {"collection": "$resp.molluscs", - "body": [{"print": - ["Mollusc: %s", "m.class"]}]}} - with pytest.raises(samplegen.BadLoop): - samplegen.Validator().validate_response([loop]) - - -def test_loop_collection_missing_kword3(): - loop = {"loop": {"collection": "$resp.molluscs", - "variable": "r"}} - with pytest.raises(samplegen.BadLoop): - samplegen.Validator().validate_response([loop]) - - def test_loop_collection_reserved_loop_var(): loop = {"loop": {"collection": "$resp.molluscs", "variable": "class", @@ -347,6 +335,47 @@ def test_loop_map_redefined_value(): samplegen.Validator().validate_response(statements) +def test_validate_write_file(): + samplegen.Validator().validate_response( + [{"write_file": {"filename": ["specimen-%s", "$resp.species"], + "contents": "$resp.photo"}}]) + + +def test_validate_write_file_fname_fmt(): + with pytest.raises(samplegen.MismatchedFormatSpecifier): + samplegen.Validator().validate_response( + [{"write_file": {"filename": ["specimen-%s"], + "contents": "$resp.photo"}}]) + + +def test_validate_write_file_fname_bad_var(): + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response( + [{"write_file": {"filename": ["specimen-%s", "squid.species"], + "contents": "$resp.photo"}}]) + + +def test_validate_write_file_missing_fname(): + with pytest.raises(samplegen.InvalidStatement): + samplegen.Validator().validate_response( + [{"write_file": {"contents": "$resp.photo"}}] + ) + + +def test_validate_write_file_missing_contents(): + with pytest.raises(samplegen.InvalidStatement): + samplegen.Validator().validate_response( + [{"write_file": {"filename": ["specimen-%s", "$resp.species"]}}] + ) + + +def test_validate_write_file_bad_contents_var(): + with pytest.raises(samplegen.UndefinedVariableReference): + samplegen.Validator().validate_response( + [{"write_file": {"filename": ["specimen-%s", "$resp.species"], + "contents": "squid.photo"}}]) + + def test_invalid_statement(): statement = {"print": ["Name"], "comment": ["Value"]} with pytest.raises(samplegen.InvalidStatement): @@ -361,42 +390,42 @@ def test_invalid_statement2(): # validate_and_transform_request tests def test_validate_request_basic(): - assert samplegen.Validator().validate_and_transform_request( - [{"field": "squid.mantle_length", - "value": "100 cm"}, - {"field": "squid.mantle_mass", - "value": "10 kg"}]) == [ - samplegen.TransformedRequest("squid", - [{"field": "mantle_length", - "value": "100 cm"}, - {"field": "mantle_mass", - "value": "10 kg"}])] + assert samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, + [{"field": "squid.mantle_length", + "value": "100 cm"}, + {"field": "squid.mantle_mass", + "value": "10 kg"}]) == [ + samplegen.TransformedRequest("squid", + [{"field": "mantle_length", + "value": "100 cm"}, + {"field": "mantle_mass", + "value": "10 kg"}])] def test_validate_request_no_field_parameter(): with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator().validate_and_transform_request( - [{"squid": "humboldt"}]) + samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, + [{"squid": "humboldt"}]) def test_validate_request_malformed_field_attr(): with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator().validate_and_transform_request( - [{"field": "squid"}]) + samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, + [{"field": "squid"}]) def test_validate_request_multiple_arguments(): - assert samplegen.Validator().validate_and_transform_request( - [{"field": "squid.mantle_length", - "value": "100 cm", - "value_is_file": True}, - {"field": "clam.shell_mass", - "value": "100 kg", - "comment": "Clams can be large"}]) == [ - samplegen.TransformedRequest("squid", - [{"field": "mantle_length", - "value": "100 cm", - "value_is_file": True}]), + assert samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, + [{"field": "squid.mantle_length", + "value": "100 cm", + "value_is_file": True}, + {"field": "clam.shell_mass", + "value": "100 kg", + "comment": "Clams can be large"}]) == [ + samplegen.TransformedRequest("squid", + [{"field": "mantle_length", + "value": "100 cm", + "value_is_file": True}]), samplegen.TransformedRequest("clam", [{"field": "shell_mass", "value": "100 kg", @@ -405,27 +434,63 @@ def test_validate_request_multiple_arguments(): def test_validate_request_reserved_request_name(): with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator().validate_and_transform_request( - [{"field": "class.order", "value": "coleoidea"}]) + samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, + [{"field": "class.order", "value": "coleoidea"}]) def test_validate_request_duplicate_input_param(): with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator().validate_and_transform_request( - [{"field": "squid.mantle_mass", - "value": "10 kg", - "input_parameter": "mantle_mass"}, - {"field": "clam.mantle_mass", - "value": "1 kg", - "input_parameter": "mantle_mass"}]) + samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, + [{"field": "squid.mantle_mass", + "value": "10 kg", + "input_parameter": "mantle_mass"}, + {"field": "clam.mantle_mass", + "value": "1 kg", + "input_parameter": "mantle_mass"}]) def test_validate_request_reserved_input_param(): with pytest.raises(samplegen.ReservedVariableName): + samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, + [{"field": "mollusc.class", + "value": "cephalopoda", + "input_parameter": "class"}]) + + +def test_single_request_client_streaming(): + # Each API client method really only takes one parameter: + # either a single protobuf message or an iterable of protobuf messages. + # With unary request methods, python lets us describe attributes as positional + # and keyword parameters, which simplifies request construction. + # The 'base' in the transformed request refers to an attribute, and the + # 'field's refer to sub-attributes. + # Client streaming and bidirectional streaming methods can't use this notation, + # and generate an exception if there is more than one 'base'. + with pytest.raises(samplegen.InvalidRequestSetup): samplegen.Validator().validate_and_transform_request( - [{"field": "mollusc.class", - "value": "cephalopoda", - "input_parameter": "class"}]) + utils.CallingForm.RequestStreamingClient, + [{"field": "cephalopod.order", + "value": "cephalopoda"}, + {"field": "gastropod.order", + "value": "pulmonata"}]) + + +def test_single_request_bidi_streaming(): + # Each API client method really only takes one parameter: + # either a single protobuf message or an iterable of protobuf messages. + # With unary request methods, python lets us describe attributes as positional + # and keyword parameters, which simplifies request construction. + # The 'base' in the transformed request refers to an attribute, and the + # 'field's refer to sub-attributes. + # Client streaming and bidirectional streaming methods can't use this notation, + # and generate an exception if there is more than one 'base'. + with pytest.raises(samplegen.InvalidRequestSetup): + samplegen.Validator().validate_and_transform_request( + utils.CallingForm.RequestStreamingBidi, + [{"field": "cephalopod.order", + "value": "cephalopoda"}, + {"field": "gastropod.order", + "value": "pulmonata"}]) def test_validate_request_calling_form(): @@ -436,7 +501,7 @@ def test_validate_request_calling_form(): "server_streaming"]) assert utils.CallingForm.method_default(DummyMethod( - True, False, False, False)) == utils.CallingForm.LongRunningRequestAsync + True, False, False, False)) == utils.CallingForm.LongRunningRequestPromise assert utils.CallingForm.method_default(DummyMethod( False, True, False, False)) == utils.CallingForm.RequestPagedAll @@ -452,3 +517,9 @@ def test_validate_request_calling_form(): assert utils.CallingForm.method_default(DummyMethod( False, False, True, True)) == utils.CallingForm.RequestStreamingBidi + + +def test_coerce_response_name(): + # Don't really need a test, but it shuts up code coverage. + assert samplegen.coerce_response_name("$resp.squid") == "response.squid" + assert samplegen.coerce_response_name("mollusc.squid") == "mollusc.squid" diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 5fd84dc33a20..e32c5c27a40b 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -18,10 +18,11 @@ import gapic.samplegen.samplegen as samplegen import gapic.utils as utils +from gapic.samplegen.utils import CallingForm from textwrap import dedent -def check_template(template_fragment, expected_output): +def check_template(template_fragment, expected_output, **kwargs): # Making a new environment for every unit test seems wasteful, # but the obvious alternative (make env an instance attribute # and passing a FunctionLoader whose load function returns @@ -43,9 +44,10 @@ def check_template(template_fragment, expected_output): ) env.filters['snake_case'] = utils.to_snake_case + env.filters['coerce_response_name'] = samplegen.coerce_response_name template = env.get_template("template_fragment") - text = template.render() + text = template.render(**kwargs) assert text == dedent(expected_output) @@ -57,7 +59,9 @@ def test_render_attr_value(): {"field": "order", "value": "Molluscs.Cephalopoda.Coleoidea"}) }} ''', - '\nmollusc["order"] = Molluscs.Cephalopoda.Coleoidea\n' + ''' + mollusc["order"] = Molluscs.Cephalopoda.Coleoidea + ''' ) @@ -69,7 +73,10 @@ def test_render_attr_input_parameter(): "value": "Humboldt", "input_parameter": "species"}) }} ''', - '\n# species = "Humboldt"\nsquid["species"] = species\n') + ''' + # species = "Humboldt" + squid["species"] = species + ''') def test_render_attr_file(): @@ -135,7 +142,7 @@ def test_render_request_basic(): with open(movie_path, "rb") as f: gastropod["movie"] = f.read() - ''' + ''' ) @@ -153,9 +160,9 @@ def test_render_print_args(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderPrint(["Molluscs: %s, %s, %s", "squid", "clam", "whelk"]) }} + {{ frags.renderPrint(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} ''', - '\nprint("Molluscs: {}, {}, {}", squid, clam, whelk)\n' + '\nprint("$resp {} {}".format(response.squids, response.clams))\n' ) @@ -173,9 +180,9 @@ def test_render_comment_args(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderComment(["Molluscs: %s, %s, %s", "squid", "clam", "whelk"]) }} + {{ frags.renderComment(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} ''', - '\n# Molluscs: squid, clam, whelk\n' + '\n# $resp response.squids response.clams\n' ) @@ -189,6 +196,16 @@ def test_define(): ) +def test_define_resp(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderDefine("squid=$resp.squid") }} + ''', + '\nsquid = response.squid\n' + ) + + def test_dispatch_print(): check_template( ''' @@ -209,16 +226,50 @@ def test_dispatch_comment(): ) +def test_write_file(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_write_file({"filename": ["specimen-%s", + "$resp.species"], + "contents": "$resp.photo"}) }} + ''', + ''' + with open("specimen-{}".format(response.species), "wb") as f: + f.write(response.photo) + + ''' + ) + + +def test_dispatch_write_file(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.dispatchStatement({"write_file": + {"filename": ["specimen-%s", + "$resp.species"], + "contents": "$resp.photo"}})}} + ''', + ''' + with open("specimen-{}".format(response.species), "wb") as f: + f.write(response.photo) + + ''' + ) + + def test_collection_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderCollectionLoop({"collection": "molluscs", - "variable": "m", - "body": {"print": ["Mollusc: %s", "m"]}}) }}''', + {{ frags.renderCollectionLoop({"collection": "$resp.molluscs", + "variable": "m", + "body": {"print": ["Mollusc: %s", "m"]}})}} + ''', ''' - for m in molluscs: - print("Mollusc: {}", m) + for m in response.molluscs: + print("Mollusc: {}".format(m)) ''' ) @@ -232,7 +283,7 @@ def test_dispatch_collection_loop(): "body": {"print": ["Mollusc: %s", "m"]}}}) }}''', ''' for m in molluscs: - print("Mollusc: {}", m) + print("Mollusc: {}".format(m)) ''' ) @@ -241,14 +292,14 @@ def test_map_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMapLoop({"map": "molluscs", + {{ frags.renderMapLoop({"map": "$resp.molluscs", "key":"cls", "value":"example", "body": {"print": ["A %s is a %s", "example", "cls"] }}) }}''', ''' - for cls, example in molluscs.items(): - print("A {} is a {}", example, cls) + for cls, example in response.molluscs.items(): + print("A {} is a {}".format(example, cls)) ''' ) @@ -257,13 +308,13 @@ def test_map_loop_no_key(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMapLoop({"map": "molluscs", + {{ frags.renderMapLoop({"map": "$resp.molluscs", "value":"example", "body": {"print": ["A %s is a mollusc", "example"] }}) }}''', ''' - for example in molluscs.values(): - print("A {} is a mollusc", example) + for example in response.molluscs.values(): + print("A {} is a mollusc".format(example)) ''' ) @@ -272,13 +323,13 @@ def test_map_loop_no_value(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMapLoop({"map": "molluscs", + {{ frags.renderMapLoop({"map": "$resp.molluscs", "key":"cls", "body": {"print": ["A %s is a mollusc", "cls"] }}) }}''', ''' - for cls in molluscs.keys(): - print("A {} is a mollusc", cls) + for cls in response.molluscs.keys(): + print("A {} is a mollusc".format(cls)) ''' ) @@ -296,8 +347,168 @@ def test_dispatch_map_loop(): ''', ''' for cls, example in molluscs.items(): - print("A {} is a {}", example, cls) + print("A {} is a {}".format(example, cls)) + ''' + ) + + +def test_print_input_params(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.printInputParams([{"base": "squid", + "body": [{"field": "mass", + "value": "10 kg", + "input_parameter": "mass"}, + {"field": "length", + "value": "20 m", + "input_parameter": "length"}]}, + {"base": "clam", + "body": [{"field": "diameter", + "value": "10 cm"}]}, + {"base": "whelk", + "body": [{"field": "color", + "value": "red", + "input_parameter": "color"}]}, + ]) }} + ''', + "\nmass, length, color" + ) + + +CALLING_FORM_TEMPLATE_TEST_STR = ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderCallingForm("TEST_INVOCATION_TXT", callingForm, + callingFormEnum, + [{"print": ["Test print statement"]}]) }} + ''' + + +def test_render_calling_form_request(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + response = TEST_INVOCATION_TXT + print("Test print statement") + + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.Request) + + +def test_render_calling_form_paged_all(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + page_result = TEST_INVOCATION_TXT + for response in page_result: + print("Test print statement") + + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.RequestPagedAll) + + +def test_render_calling_form_paged(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + page_result = TEST_INVOCATION_TXT + for page in page_result.pages(): + for response in page: + print("Test print statement") + + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.RequestPaged) + + +def test_render_calling_form_streaming_server(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + stream = TEST_INVOCATION_TXT + for response in stream: + print("Test print statement") + + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.RequestStreamingServer) + + +def test_render_calling_form_streaming_bidi(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + stream = TEST_INVOCATION_TXT + for response in stream: + print("Test print statement") + + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.RequestStreamingBidi) + + +def test_render_calling_form_longrunning(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + operation = TEST_INVOCATION_TXT + + print("Waiting for operation to complete...") + + response = operation.result() + print("Test print statement") + + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.LongRunningRequestPromise) + + +def test_render_method_call_basic(): + # The callingForm and callingFormEnum parameters are dummies, + # which we can get away with because of duck typing in the template. + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderMethodCall({"rpc": "CategorizeMollusc", "request": [{"base": "video"}, + {"base": "audio"}, + {"base": "guess"}]}, + callingForm, callingFormEnum) }} + ''', + ''' + client.categorize_mollusc(video, audio, guess) + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.Request + ) + + +def test_render_method_call_bidi(): + # The callingForm and callingFormEnum parameters are dummies, + # which we can get away with because of duck typing in the template. + check_template( ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderMethodCall({"rpc": "CategorizeMollusc", "request": [{"base": "video"}]}, + callingForm, callingFormEnum) }} + ''', + ''' + client.categorize_mollusc([video]) + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.RequestStreamingBidi + ) + + +def test_render_method_call_client(): + # The callingForm and callingFormEnum parameters are dummies, + # which we can get away with because of duck typing in the template. + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.renderMethodCall({"rpc": "CategorizeMollusc", "request": [{"base": "video"}]}, + callingForm, callingFormEnum) }} + ''', + ''' + client.categorize_mollusc([video]) + ''', + callingFormEnum=CallingForm, + callingForm=CallingForm.RequestStreamingClient ) From fe1c923cbd9ed4190f8043bf8e7f74f18472e5fc Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 11 Jul 2019 12:33:57 -0700 Subject: [PATCH 0129/1339] Variable naming cleanup (#145) --- .../gapic/samplegen/samplegen.py | 6 +- .../templates/examples/feature_fragments.j2 | 113 ++++++------- .../gapic/templates/examples/sample.py.j2 | 16 +- .../tests/unit/samplegen/test_template.py | 153 ++++++++++-------- 4 files changed, 154 insertions(+), 134 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 6da90d8bcb8b..ea83c9ea30d8 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -506,8 +506,8 @@ def generate_sample(sample, sample["package_name"] = api_schema.naming.warehouse_package_name - return sample_fpath, sample_template.stream(fileHeader=FILE_HEADER, + return sample_fpath, sample_template.stream(file_header=FILE_HEADER, sample=sample, imports=[], - callingForm=calling_form, - callingFormEnum=utils.CallingForm) + calling_form=calling_form, + calling_form_enum=utils.CallingForm) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 6889ff451398..9360ff124778 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -25,15 +25,15 @@ There is a little, but not enough for it to be important because {# response handling macros #} -{% macro sample_header(fileHeader, sample, callingForm) %} -{% for line in fileHeader["copyright"].split("\n") %} +{% macro sample_header(file_header, sample, calling_form) %} +{% for line in file_header["copyright"].split("\n") %} # {{ line }} {% endfor %} -{% for line in fileHeader["license"].split("\n") %} +{% for line in file_header["license"].split("\n") %} # {{ line }} {% endfor %} # -# DO NOT EDIT! This is a generated sample ("{{ callingForm }}", "{{ sample.id }}") +# DO NOT EDIT! This is a generated sample ("{{ calling_form }}", "{{ sample.id }}") # # To install the latest published package dependency, execute the following: # pip3 install {{ sample.package_name }} @@ -47,7 +47,7 @@ There is a little, but not enough for it to be important because {% endif %} {% endmacro %} -{% macro printInputParams(requests) %} +{% macro print_input_params(requests) %} {% with input_parameters = [] %} {% for request in requests %} {% for element in request.body %} @@ -60,14 +60,14 @@ There is a little, but not enough for it to be important because {% endwith %} {% endmacro %} -{% macro renderPrint(elts) %} +{% macro render_print(elts) %} {# First elment is a format string, remaining elements are the format string parameters #} {# Validating that the number of format params equals #} {# the number of remaining params is handled by real python code #} print({{ print_string_formatting(elts)|trim }}) {% endmacro %} -{% macro renderComment(elts) %} +{% macro render_comment(elts) %} {# First elment is a format string, remaining elements are the format string parameters #} {# Validating that the number of format params equals #} {# the number of remaining params is handled by real python code #} @@ -78,19 +78,19 @@ print({{ print_string_formatting(elts)|trim }}) {% endwith %} {% endmacro %} -{% macro renderDefine(statement) %} +{% macro render_define(statement) %} {# Python code already verified the form, no need to check #} {% with lvalue, rvalue = statement.split("=") %} {{ lvalue }} = {{ rvalue|coerce_response_name }} {% endwith %} {% endmacro %} -{% macro renderCollectionLoop(statement) %} +{% macro render_collection_loop(statement) %} for {{ statement.variable }} in {{ statement.collection|coerce_response_name }}: - {{ dispatchStatement(statement.body) -}} + {{ dispatch_statement(statement.body) -}} {% endmacro %} -{% macro renderMapLoop(statement) %} +{% macro render_map_loop(statement) %} {# At least one of key and value exist; validated in python #} {% if "key" not in statement %} for {{ statement.value }} in {{ statement.map|coerce_response_name }}.values(): @@ -99,7 +99,7 @@ for {{ statement.key }} in {{ statement.map|coerce_response_name }}.keys(): {% else %} for {{statement.key }}, {{ statement.value }} in {{ statement.map|coerce_response_name }}.items(): {% endif %} - {{ dispatchStatement(statement.body) -}} + {{ dispatch_statement(statement.body) -}} {% endmacro %} {% macro render_write_file(statement) %} @@ -110,18 +110,18 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {% endwith %} {% endmacro %} -{% macro dispatchStatement(statement) %} +{% macro dispatch_statement(statement) %} {# Each statement is a dict with a single key/value pair #} {% if "print" in statement %} -{{ renderPrint(statement["print"]) -}} +{{ render_print(statement["print"]) -}} {% elif "comment" in statement %} -{{ renderComment(statement["comment"]) -}} +{{ render_comment(statement["comment"]) -}} {% elif "loop" in statement %} {% with loop = statement["loop"] %} {% if "collection" in loop %} -{{ renderCollectionLoop(loop) -}} +{{ render_collection_loop(loop) -}} {% else %} -{{ renderMapLoop(loop) -}} +{{ render_map_loop(loop) -}} {% endif %} {% endwith %} {% elif "write_file" in statement %} @@ -129,7 +129,7 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {% endif %} {% endmacro %} -{% macro renderRequestAttr(baseName, attr) %} +{% macro render_request_attr(base_name, attr) %} {# Note: python code will have manipulated the value #} {# to be the correct enum from the right module, if necessary. #} {# Python is also responsible for verifying that each input parameter is unique,#} @@ -138,28 +138,28 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: # {{ attr.input_parameter }} = "{{ attr.value }}" {% if "value_is_file" in attr and attr.value_is_file %} with open({{ attr.input_parameter }}, "rb") as f: - {{ baseName }}["{{ attr.field }}"] = f.read() + {{ base_name }}["{{ attr.field }}"] = f.read() {% else %} -{{ baseName }}["{{ attr.field }}"] = {{ attr.input_parameter }} +{{ base_name }}["{{ attr.field }}"] = {{ attr.input_parameter }} {% endif %} {% else %} -{{ baseName }}["{{ attr.field }}"] = {{ attr.value }} +{{ base_name }}["{{ attr.field }}"] = {{ attr.value }} {% endif %} {% endmacro %} -{% macro renderRequest(request) %} - {% for parameterBlock in request %} -{{ parameterBlock.base }} = {} - {% for attr in parameterBlock.body %} -{{ renderRequestAttr(parameterBlock.base, attr) }} +{% macro render_request(request) %} + {% for parameter_block in request %} +{{ parameter_block.base }} = {} + {% for attr in parameter_block.body %} +{{ render_request_attr(parameter_block.base, attr) }} {% endfor %} {% endfor %} {% endmacro %} -{% macro renderMethodCall(sample, callingForm, callingFormEnum) %} +{% macro render_method_call(sample, calling_form, calling_form_enum) %} {# Note: this doesn't deal with enums or unions #} -{% if callingForm not in [callingFormEnum.RequestStreamingBidi, -callingFormEnum.RequestStreamingClient] %} +{% if calling_form not in [calling_form_enum.RequestStreamingBidi, +calling_form_enum.RequestStreamingClient] %} client.{{ sample.rpc|snake_case }}({{ sample.request|map(attribute="base")|join(", ") }}) {% else %} {# TODO: set up client streaming once some questions are answered #} @@ -169,55 +169,55 @@ client.{{ sample.rpc|snake_case }}([{{ sample.request|map(attribute="base")|join {# Setting up the method invocation is the responsibility of the caller: #} {# it's just easier to set up client side streaming and other things from outside this macro. #} -{% macro renderCallingForm(methodInvocationText, callingForm, callingFormEnum, responseStatements ) %} -{% if callingForm == callingFormEnum.Request %} -response = {{ methodInvocationText }} -{% for statement in responseStatements %} -{{ dispatchStatement(statement ) }} +{% macro render_calling_form(method_invocation_text, calling_form, calling_form_enum, response_statements ) %} +{% if calling_form == calling_form_enum.Request %} +response = {{ method_invocation_text }} +{% for statement in response_statements %} +{{ dispatch_statement(statement ) }} {% endfor %} -{% elif callingForm == callingFormEnum.RequestPagedAll %} -page_result = {{ methodInvocationText }} +{% elif calling_form == calling_form_enum.RequestPagedAll %} +page_result = {{ method_invocation_text }} for response in page_result: - {% for statement in responseStatements %} - {{ dispatchStatement(statement ) }} + {% for statement in response_statements %} + {{ dispatch_statement(statement ) }} {% endfor %} -{% elif callingForm == callingFormEnum.RequestPaged %} -page_result = {{ methodInvocationText }} +{% elif calling_form == calling_form_enum.RequestPaged %} +page_result = {{ method_invocation_text }} for page in page_result.pages(): for response in page: - {% for statement in responseStatements %} - {{ dispatchStatement(statement ) }} + {% for statement in response_statements %} + {{ dispatch_statement(statement ) }} {% endfor %} -{% elif callingForm in [callingFormEnum.RequestStreamingServer, - callingFormEnum.RequestStreamingBidi] %} -stream = {{ methodInvocationText }} +{% elif calling_form in [calling_form_enum.RequestStreamingServer, + calling_form_enum.RequestStreamingBidi] %} +stream = {{ method_invocation_text }} for response in stream: - {% for statement in responseStatements %} - {{ dispatchStatement(statement ) }} + {% for statement in response_statements %} + {{ dispatch_statement(statement ) }} {% endfor %} -{% elif callingForm == callingFormEnum.LongRunningRequestPromise %} -operation = {{ methodInvocationText }} +{% elif calling_form == calling_form_enum.LongRunningRequestPromise %} +operation = {{ method_invocation_text }} print("Waiting for operation to complete...") response = operation.result() -{% for statement in responseStatements %} -{{ dispatchStatement(statement ) }} +{% for statement in response_statements %} +{{ dispatch_statement(statement ) }} {% endfor %} {% endif %} {% endmacro %} -{% macro renderMethodName(methodName) %} -{{ methodName|snake_case -}} +{% macro render_method_name(method_name) %} +{{ method_name|snake_case -}} {% endmacro %} -{% macro renderMainBlock(methodName, requestBlock) %} +{% macro render_main_block(method_name, request_block) %} def main(): import argparse parser = argparse.ArgumentParser() {% with arg_list = [] %} -{% for attr in requestBlock if "input_parameter" in attr %} +{% for attr in request_block if "input_parameter" in attr %} parser.add_argument("--{{ attr.input_parameter }}", type=str, default="{{ attr.value }}") @@ -225,10 +225,11 @@ def main(): {% endfor %} args = parser.parse_args() - sample_{{ renderMethodName(methodName) }}({{ arg_list|join(", ") }}) + sample_{{ render_method_name(method_name) }}({{ arg_list|join(", ") }}) if __name__ == "__main__": main() {% endwith %} {% endmacro %} +
diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index 36acf4209486..b5ab7cd5758e 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -21,27 +21,27 @@ {# Note: this sample template is WILDLY INACCURATE AND INCOMPLETE #} {# It does not correctly enums, unions, top level attributes, or various other things #} {% import "feature_fragments.j2" as frags %} -{{ frags.sample_header(fileHeader, sample, callingForm) }} +{{ frags.sample_header(file_header, sample, calling_form) }} # [START {{ sample.id }}] {# python code is responsible for all transformations: all we do here is render #} -{% for importStatement in imports %} -{{ importStatement }} +{% for import_statement in imports %} +{{ import_statement }} {% endfor %} {# also need calling form #} -def sample_{{ frags.renderMethodName(sample.rpc) }}({{ frags.printInputParams(sample.request) }}): +def sample_{{ frags.render_method_name(sample.rpc) }}({{ frags.print_input_params(sample.request) }}): """{{ sample.description }}""" client = {{ sample.service.split(".")[-3:-1]| map("lower")| join("_") }}.{{ sample.service.split(".")[-1] }}Client() - {{ frags.renderRequest(sample.request)|indent }} -{% with methodCall = frags.renderMethodCall(sample, callingForm, callingFormEnum) %} - {{ frags.renderCallingForm(methodCall, callingForm, callingFormEnum, sample.response)|indent -}} + {{ frags.render_request(sample.request)|indent }} +{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} + {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response)|indent -}} {% endwith %} # [END {{ sample.id }}] -{{ frags.renderMainBlock(sample.rpc, sample.request) }} +{{ frags.render_main_block(sample.rpc, sample.request) }} diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index e32c5c27a40b..63d19c95e258 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -55,7 +55,7 @@ def test_render_attr_value(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderRequestAttr("mollusc", + {{ frags.render_request_attr("mollusc", {"field": "order", "value": "Molluscs.Cephalopoda.Coleoidea"}) }} ''', @@ -69,7 +69,7 @@ def test_render_attr_input_parameter(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderRequestAttr("squid", {"field": "species", + {{ frags.render_request_attr("squid", {"field": "species", "value": "Humboldt", "input_parameter": "species"}) }} ''', @@ -82,17 +82,17 @@ def test_render_attr_input_parameter(): def test_render_attr_file(): check_template( ''' - {% import "feature_fragments.j2" as frags %} - {{ frags.renderRequestAttr("classify_mollusc_request", - {"field": "mollusc_video", - "value": "path/to/mollusc/video.mkv", - "input_parameter" : "mollusc_video_path", - "value_is_file": True}) }} + {% import "feature_fragments.j2" as frags %} + {{ frags.render_request_attr("classify_mollusc_request", + {"field": "mollusc_video", + "value": "path/to/mollusc/video.mkv", + "input_parameter" : "mollusc_video_path", + "value_is_file": True}) }} ''', ''' - # mollusc_video_path = "path/to/mollusc/video.mkv" - with open(mollusc_video_path, "rb") as f: - classify_mollusc_request["mollusc_video"] = f.read() + # mollusc_video_path = "path/to/mollusc/video.mkv" + with open(mollusc_video_path, "rb") as f: + classify_mollusc_request["mollusc_video"] = f.read() ''') @@ -100,7 +100,7 @@ def test_render_request_basic(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderRequest([{"base": "cephalopod", + {{ frags.render_request([{"base": "cephalopod", "body": [{"field": "mantle_mass", "value": "10 kg", "input_parameter": "cephalopod_mass"}, @@ -150,9 +150,11 @@ def test_render_print(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderPrint(["Mollusc"]) }} + {{ frags.render_print(["Mollusc"]) }} ''', - '\nprint("Mollusc")\n' + ''' + print("Mollusc") + ''' ) @@ -160,9 +162,11 @@ def test_render_print_args(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderPrint(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} + {{ frags.render_print(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} ''', - '\nprint("$resp {} {}".format(response.squids, response.clams))\n' + ''' + print("$resp {} {}".format(response.squids, response.clams)) + ''' ) @@ -170,9 +174,11 @@ def test_render_comment(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderComment(["Mollusc"]) }} + {{ frags.render_comment(["Mollusc"]) }} ''', - '\n# Mollusc\n' + ''' + # Mollusc + ''' ) @@ -180,9 +186,11 @@ def test_render_comment_args(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderComment(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} + {{ frags.render_comment(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} ''', - '\n# $resp response.squids response.clams\n' + ''' + # $resp response.squids response.clams + ''' ) @@ -190,9 +198,11 @@ def test_define(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderDefine("squid=humboldt") }} + {{ frags.render_define("squid=humboldt") }} ''', - '\nsquid = humboldt\n' + ''' + squid = humboldt + ''' ) @@ -200,9 +210,11 @@ def test_define_resp(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderDefine("squid=$resp.squid") }} + {{ frags.render_define("squid=$resp.squid") }} ''', - '\nsquid = response.squid\n' + ''' + squid = response.squid + ''' ) @@ -210,9 +222,11 @@ def test_dispatch_print(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.dispatchStatement({"print" : ["Squid"] }) }} -''', - '\nprint("Squid")\n' + {{ frags.dispatch_statement({"print" : ["Squid"] }) }} + ''', + ''' + print("Squid") + ''' ) @@ -220,9 +234,11 @@ def test_dispatch_comment(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.dispatchStatement({"comment" : ["Squid"] }) }} + {{ frags.dispatch_statement({"comment" : ["Squid"] }) }} ''', - '\n# Squid\n' + ''' + # Squid + ''' ) @@ -246,7 +262,7 @@ def test_dispatch_write_file(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.dispatchStatement({"write_file": + {{ frags.dispatch_statement({"write_file": {"filename": ["specimen-%s", "$resp.species"], "contents": "$resp.photo"}})}} @@ -263,7 +279,7 @@ def test_collection_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderCollectionLoop({"collection": "$resp.molluscs", + {{ frags.render_collection_loop({"collection": "$resp.molluscs", "variable": "m", "body": {"print": ["Mollusc: %s", "m"]}})}} ''', @@ -278,7 +294,7 @@ def test_dispatch_collection_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.dispatchStatement({"loop": {"collection": "molluscs", + {{ frags.dispatch_statement({"loop": {"collection": "molluscs", "variable": "m", "body": {"print": ["Mollusc: %s", "m"]}}}) }}''', ''' @@ -292,7 +308,7 @@ def test_map_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMapLoop({"map": "$resp.molluscs", + {{ frags.render_map_loop({"map": "$resp.molluscs", "key":"cls", "value":"example", "body": {"print": ["A %s is a %s", "example", "cls"] }}) @@ -308,10 +324,11 @@ def test_map_loop_no_key(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMapLoop({"map": "$resp.molluscs", + {{ frags.render_map_loop({"map": "$resp.molluscs", "value":"example", "body": {"print": ["A %s is a mollusc", "example"] }}) - }}''', + }} + ''', ''' for example in response.molluscs.values(): print("A {} is a mollusc".format(example)) @@ -323,10 +340,11 @@ def test_map_loop_no_value(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMapLoop({"map": "$resp.molluscs", + {{ frags.render_map_loop({"map": "$resp.molluscs", "key":"cls", "body": {"print": ["A %s is a mollusc", "cls"] }}) - }}''', + }} + ''', ''' for cls in response.molluscs.keys(): print("A {} is a mollusc".format(cls)) @@ -338,7 +356,7 @@ def test_dispatch_map_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.dispatchStatement({"loop":{"map": "molluscs", + {{ frags.dispatch_statement({"loop":{"map": "molluscs", "key":"cls", "value":"example", "body": { @@ -356,7 +374,7 @@ def test_print_input_params(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.printInputParams([{"base": "squid", + {{ frags.print_input_params([{"base": "squid", "body": [{"field": "mass", "value": "10 kg", "input_parameter": "mass"}, @@ -372,14 +390,15 @@ def test_print_input_params(): "input_parameter": "color"}]}, ]) }} ''', - "\nmass, length, color" + ''' + mass, length, color''' ) CALLING_FORM_TEMPLATE_TEST_STR = ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderCallingForm("TEST_INVOCATION_TXT", callingForm, - callingFormEnum, + {{ frags.render_calling_form("TEST_INVOCATION_TXT", calling_form, + calling_form_enum, [{"print": ["Test print statement"]}]) }} ''' @@ -391,8 +410,8 @@ def test_render_calling_form_request(): print("Test print statement") ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.Request) + calling_form_enum=CallingForm, + calling_form=CallingForm.Request) def test_render_calling_form_paged_all(): @@ -403,8 +422,8 @@ def test_render_calling_form_paged_all(): print("Test print statement") ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.RequestPagedAll) + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestPagedAll) def test_render_calling_form_paged(): @@ -416,8 +435,8 @@ def test_render_calling_form_paged(): print("Test print statement") ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.RequestPaged) + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestPaged) def test_render_calling_form_streaming_server(): @@ -428,8 +447,8 @@ def test_render_calling_form_streaming_server(): print("Test print statement") ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.RequestStreamingServer) + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingServer) def test_render_calling_form_streaming_bidi(): @@ -440,8 +459,8 @@ def test_render_calling_form_streaming_bidi(): print("Test print statement") ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.RequestStreamingBidi) + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingBidi) def test_render_calling_form_longrunning(): @@ -455,8 +474,8 @@ def test_render_calling_form_longrunning(): print("Test print statement") ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.LongRunningRequestPromise) + calling_form_enum=CallingForm, + calling_form=CallingForm.LongRunningRequestPromise) def test_render_method_call_basic(): @@ -465,16 +484,16 @@ def test_render_method_call_basic(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMethodCall({"rpc": "CategorizeMollusc", "request": [{"base": "video"}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": [{"base": "video"}, {"base": "audio"}, {"base": "guess"}]}, - callingForm, callingFormEnum) }} + calling_form, calling_form_enum) }} ''', ''' client.categorize_mollusc(video, audio, guess) ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.Request + calling_form_enum=CallingForm, + calling_form=CallingForm.Request ) @@ -484,14 +503,14 @@ def test_render_method_call_bidi(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMethodCall({"rpc": "CategorizeMollusc", "request": [{"base": "video"}]}, - callingForm, callingFormEnum) }} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": [{"base": "video"}]}, + calling_form, calling_form_enum) }} ''', ''' client.categorize_mollusc([video]) ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.RequestStreamingBidi + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingBidi ) @@ -501,14 +520,14 @@ def test_render_method_call_client(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMethodCall({"rpc": "CategorizeMollusc", "request": [{"base": "video"}]}, - callingForm, callingFormEnum) }} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": [{"base": "video"}]}, + calling_form, calling_form_enum) }} ''', ''' client.categorize_mollusc([video]) ''', - callingFormEnum=CallingForm, - callingForm=CallingForm.RequestStreamingClient + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingClient ) @@ -516,7 +535,7 @@ def test_main_block(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.renderMainBlock("ListMolluscs", [{"field": "list_molluscs.order", + {{ frags.render_main_block("ListMolluscs", [{"field": "list_molluscs.order", "value": "coleoidea", "input_parameter": "order"}, {"field ": "list_molluscs.mass", From d037423ccf4a507747f4a3ae40e517db07e76cf8 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 16 Jul 2019 09:32:24 -0700 Subject: [PATCH 0130/1339] Add an autopep8 CI task and run autopep8 on all py files (#146) --- packages/gapic-generator/.circleci/config.yml | 27 +++++++++++++++++++ .../gapic-generator/gapic/schema/metadata.py | 2 +- .../gapic-generator/gapic/schema/naming.py | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 8 +++--- packages/gapic-generator/gapic/utils/lines.py | 2 +- packages/gapic-generator/gapic/utils/rst.py | 2 +- .../tests/unit/samplegen/test_integration.py | 2 +- .../tests/unit/schema/test_api.py | 6 ++--- .../tests/unit/schema/test_naming.py | 12 ++++----- .../tests/unit/schema/wrappers/test_enums.py | 2 +- .../unit/schema/wrappers/test_message.py | 4 +-- .../tests/unit/schema/wrappers/test_method.py | 8 +++--- .../unit/schema/wrappers/test_service.py | 4 +-- .../tests/unit/utils/test_lines.py | 2 +- 14 files changed, 55 insertions(+), 28 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index d94a883d5709..0ae82ff953e4 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -4,6 +4,10 @@ workflows: version: 2 tests: jobs: + - style-check: + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - unit-3.6: filters: tags: @@ -53,6 +57,7 @@ workflows: - publish_package: requires: - showcase + - style-check filters: branches: ignore: /.*/ @@ -61,6 +66,7 @@ workflows: - publish_image: requires: - showcase + - style-check filters: branches: ignore: /.*/ @@ -283,3 +289,24 @@ jobs: name: Submit coverage data to codecov. command: codecov when: always + style-check: + docker: + - image: python:3.6-alpine + steps: + - checkout + - run: + name: Install git + command: | + apk add git + - run: + name: Install autopep8 + command: | + pip install autopep8 + - run: + name: Format files + command: | + find gapic tests -name "*.py" | xargs autopep8 --in-place + - run: + name: Check diff + command: | + git diff --ignore-submodules=all --color --exit-code . diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index e8ad6991a1d2..c6fabd181d59 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -280,7 +280,7 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Metadata': """ return dataclasses.replace(self, address=self.address.with_context(collisions=collisions), - ) + ) @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 7ccfb7783288..d2bf329a7116 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -47,7 +47,7 @@ def __post_init__(self): def build(cls, *file_descriptors: descriptor_pb2.FileDescriptorProto, opts: options.Options = options.Options(), - ) -> 'Naming': + ) -> 'Naming': """Return a full Naming instance based on these file descriptors. This is pieced together from the proto package names as well as the diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 4c45d21b2904..5e8b0eabf84d 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -185,7 +185,7 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Field': skip_fields=True, ) if self.message else None, enum=self.enum.with_context(collisions=collisions) - if self.enum else None, + if self.enum else None, meta=self.meta.with_context(collisions=collisions), ) @@ -279,7 +279,7 @@ def get_field(self, *field_path: str, def with_context(self, *, collisions: FrozenSet[str], skip_fields: bool = False, - ) -> 'MessageType': + ) -> 'MessageType': """Return a derivative of this message with the provided context. This method is used to address naming collisions. The returned @@ -345,7 +345,7 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'EnumType': """ return dataclasses.replace(self, meta=self.meta.with_context(collisions=collisions), - ) + ) @dataclasses.dataclass(frozen=True) @@ -602,7 +602,7 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': input=self.input.with_context(collisions=collisions), output=self.output.with_context(collisions=collisions), meta=self.meta.with_context(collisions=collisions), - ) + ) @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 8d26e1bafccb..680b583e58cf 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -80,7 +80,7 @@ def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: initial = textwrap.wrap(first, break_long_words=False, width=width - offset, - ) + ) # Strip the first \n from the text so it is not misidentified as an # intentionally short line below. text = text.replace('\n', ' ', 1) diff --git a/packages/gapic-generator/gapic/utils/rst.py b/packages/gapic-generator/gapic/utils/rst.py index 9b845c960ae5..cd47e82f7eba 100644 --- a/packages/gapic-generator/gapic/utils/rst.py +++ b/packages/gapic-generator/gapic/utils/rst.py @@ -46,7 +46,7 @@ def rst(text: str, width: int = 72, indent: int = 0, nl: bool = None, indent=indent, offset=indent + 3, width=width - indent, - ) + ) else: # Convert from CommonMark to ReStructured Text. answer = pypandoc.convert_text(text, 'rst', diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 0059e0bccf96..8366c8c34e6e 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -29,7 +29,7 @@ "server_streaming"] DummyMethod = namedtuple("DummyMethod", dummy_method_fields) -DummyMethod.__new__.__defaults__ = (False,)*len(dummy_method_fields) +DummyMethod.__new__.__defaults__ = (False,) * len(dummy_method_fields) DummyService = namedtuple("DummyService", ["methods"]) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index e1a355da373b..6cd9dbe9791e 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -196,7 +196,7 @@ def test_proto_builder_constructor(): pb = api._ProtoBuilder(fdp, file_to_generate=True, naming=make_naming(), - ) + ) # There should be three total calls to load the different types # of children. @@ -569,7 +569,7 @@ def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, enums: Sequence[descriptor_pb2.EnumDescriptorProto] = (), services: Sequence[descriptor_pb2.ServiceDescriptorProto] = (), locations: Sequence[descriptor_pb2.SourceCodeInfo.Location] = (), - ) -> descriptor_pb2.FileDescriptorProto: + ) -> descriptor_pb2.FileDescriptorProto: return descriptor_pb2.FileDescriptorProto( name=name, package=package, @@ -591,7 +591,7 @@ def make_message_pb2( def make_field_pb2(name: str, number: int, type: int = 11, # 11 == message type_name: str = None, - ) -> descriptor_pb2.FieldDescriptorProto: + ) -> descriptor_pb2.FieldDescriptorProto: return descriptor_pb2.FieldDescriptorProto( name=name, number=number, diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 4fcbf609fe1e..0463fb2704f4 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -150,7 +150,7 @@ def test_cli_override_name(): proto1 = FileDesc(package='google.cloud.videointelligence.v1') n = naming.Naming.build(proto1, opts=options.Options(name='Video Intelligence'), - ) + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Video Intelligence' assert n.version == 'v1' @@ -161,7 +161,7 @@ def test_cli_override_name_underscores(): proto1 = FileDesc(package='google.cloud.videointelligence.v1') n = naming.Naming.build(proto1, opts=options.Options(name='video_intelligence'), - ) + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Video Intelligence' assert n.version == 'v1' @@ -172,7 +172,7 @@ def test_cli_override_namespace(): proto1 = FileDesc(package='google.spanner.v1') n = naming.Naming.build(proto1, opts=options.Options(namespace=('google', 'cloud')), - ) + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Spanner' assert n.version == 'v1' @@ -183,7 +183,7 @@ def test_cli_override_namespace_dotted(): proto1 = FileDesc(package='google.spanner.v1') n = naming.Naming.build(proto1, opts=options.Options(namespace=('google.cloud',)), - ) + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Spanner' assert n.version == 'v1' @@ -194,7 +194,7 @@ def test_cli_override_name_and_namespace(): proto1 = FileDesc(package='google.translation.v2') n = naming.Naming.build(proto1, opts=options.Options(namespace=('google', 'cloud'), name='translate'), - ) + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Translate' assert n.version == 'v2' @@ -205,7 +205,7 @@ def test_cli_override_name_and_namespace_versionless(): proto1 = FileDesc(package='google.translation') n = naming.Naming.build(proto1, opts=options.Options(namespace=('google', 'cloud'), name='translate'), - ) + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Translate' assert not n.version diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index 60339beaac8b..37d0aeacd586 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -42,7 +42,7 @@ def test_enum_ident(): def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', values: Tuple[str, int] = (), meta: metadata.Metadata = None, - ) -> wrappers.EnumType: + ) -> wrappers.EnumType: enum_value_pbs = [ descriptor_pb2.EnumValueDescriptorProto(name=i[0], number=i[1]) for i in values diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 6f8fd0336228..14c7d001bec0 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -126,7 +126,7 @@ def test_get_field_nonterminal_repeated_error(): def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, - ) -> wrappers.MessageType: + ) -> wrappers.MessageType: message_pb = descriptor_pb2.DescriptorProto( name=name, field=[i.field_pb for i in fields], @@ -167,7 +167,7 @@ def make_field(name: str, repeated: bool = False, def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', values: Tuple[str, int] = (), meta: metadata.Metadata = None, - ) -> wrappers.EnumType: + ) -> wrappers.EnumType: enum_value_pbs = [ descriptor_pb2.EnumValueDescriptorProto(name=i[0], number=i[1]) for i in values diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 3a05c32ff39e..4b39b669cb68 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -72,7 +72,7 @@ def test_method_client_output_paged(): method = make_method('ListFoos', input_message=input_msg, output_message=output_msg, - ) + ) assert method.paged_result_field == paged assert method.client_output.ident.name == 'ListFoosPager' @@ -91,7 +91,7 @@ def test_method_paged_result_field_not_first(): method = make_method('ListFoos', input_message=input_msg, output_message=output_msg, - ) + ) assert method.paged_result_field == paged @@ -108,7 +108,7 @@ def test_method_paged_result_field_no_page_field(): method = make_method('ListFoos', input_message=input_msg, output_message=output_msg, - ) + ) assert method.paged_result_field is None @@ -223,7 +223,7 @@ def make_method( def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', fields: Sequence[wrappers.Field] = (), - ) -> wrappers.MessageType: + ) -> wrappers.MessageType: message_pb = descriptor_pb2.DescriptorProto( name=name, field=[i.field_pb for i in fields], diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index b5eb04b139fa..80ad6f8952ca 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -194,7 +194,7 @@ def get_method(name: str, in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), http_rule: http_pb2.HttpRule = None, method_signature: str = '', - ) -> wrappers.Method: + ) -> wrappers.Method: input_ = get_message(in_type, fields=in_fields) output = get_message(out_type) lro = None @@ -227,7 +227,7 @@ def get_method(name: str, def get_message(dot_path: str, *, fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), - ) -> wrappers.MessageType: + ) -> wrappers.MessageType: # Pass explicit None through (for lro_metadata). if dot_path is None: return None diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 69218cc64bf3..65df6f1c36b6 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -71,7 +71,7 @@ def test_wrap_strips(): def test_wrap_subsequent_offset(): assert lines.wrap('foo bar baz', width=5, offset=0, indent=2, - ) == 'foo\n bar\n baz' + ) == 'foo\n bar\n baz' def test_wrap_initial_offset(): From ba0223292d88dff44509858dee497eeb434925d6 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 18 Jul 2019 10:49:21 -0700 Subject: [PATCH 0131/1339] Test and impl for generating samplegen manifest structure (#148) Generate the samplegen manifest describing the relationship between generated sample files and the environment in which they should be run. Used for sample tester. Create some classes that engage in custom, domain specific yaml rendering. Render the sample manifest in test. --- packages/gapic-generator/.coveragerc | 3 + .../gapic/samplegen/__init__.py | 2 + .../gapic/samplegen/samplegen.py | 235 +++++++++++++----- .../gapic-generator/gapic/samplegen/yaml.py | 101 ++++++++ packages/gapic-generator/setup.py | 1 + .../tests/unit/samplegen/test_integration.py | 7 +- .../tests/unit/samplegen/test_samplegen.py | 122 +++++++++ 7 files changed, 405 insertions(+), 66 deletions(-) create mode 100644 packages/gapic-generator/gapic/samplegen/yaml.py diff --git a/packages/gapic-generator/.coveragerc b/packages/gapic-generator/.coveragerc index 008cfe8d3280..70d8fb79ce87 100644 --- a/packages/gapic-generator/.coveragerc +++ b/packages/gapic-generator/.coveragerc @@ -3,6 +3,7 @@ branch = True omit = gapic/cli/*.py *_pb2.py + # Abstract methods play hob with coverage [report] fail_under = 100 @@ -14,3 +15,5 @@ exclude_lines = Impossible; skip coverage checks. # Ignore debug-only repr def __repr__ + # Abstract methods by definition are not invoked + @abstractmethod \ No newline at end of file diff --git a/packages/gapic-generator/gapic/samplegen/__init__.py b/packages/gapic-generator/gapic/samplegen/__init__.py index a793a79c5fef..15da4b948082 100644 --- a/packages/gapic-generator/gapic/samplegen/__init__.py +++ b/packages/gapic-generator/gapic/samplegen/__init__.py @@ -14,8 +14,10 @@ from gapic.samplegen import samplegen from gapic.samplegen import utils +from gapic.samplegen import yaml __all__ = ( 'samplegen', 'utils', + 'yaml', ) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index ea83c9ea30d8..3f7feb4cf141 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -16,12 +16,13 @@ import jinja2 import keyword import re +import time -from gapic.samplegen import utils +from gapic.samplegen import utils, yaml -from collections import (defaultdict, namedtuple) -from textwrap import dedent -from typing import (Dict, List, Mapping, Set, Tuple) + +from collections import defaultdict, namedtuple +from typing import Dict, List, Mapping, Optional, Set, Tuple # Outstanding issues: # * In real sample configs, many variables are @@ -29,26 +30,33 @@ MIN_SCHEMA_VERSION = (1, 2, 0) -VALID_CONFIG_TYPE = 'com.google.api.codegen.SampleConfigProto' +VALID_CONFIG_TYPE = "com.google.api.codegen.SampleConfigProto" # TODO: read in copyright and license from files. -FILE_HEADER: Dict[str, str] = {"copyright": "TODO: add a copyright", - "license": "TODO: add a license"} +FILE_HEADER: Dict[str, str] = { + "copyright": "TODO: add a copyright", + "license": "TODO: add a license", +} RESERVED_WORDS = frozenset( - itertools.chain(keyword.kwlist, - dir(__builtins__), - {"client", - "f", # parameter used in file I/O statements - "operation", # temporary used in LROs - "page", # used in paginated responses - "page_result", # used in paginated responses - "response", # basic 'response' - "stream", # used in server and bidi streaming - })) + itertools.chain( + keyword.kwlist, + dir(__builtins__), + { + "client", + "f", # parameter used in file I/O statements + "operation", # temporary used in LROs + "page", # used in paginated responses + "page_result", # used in paginated responses + "response", # basic 'response' + "stream", # used in server and bidi streaming + }, + ) +) TEMPLATE_NAME = "sample.py.j2" + TransformedRequest = namedtuple("TransformedRequest", ["base", "body"]) @@ -138,9 +146,9 @@ def __init__(self): # TODO: this will eventually need the method name and the proto file # so that it can do the correct value transformation for enums. - def validate_and_transform_request(self, - calling_form: utils.CallingForm, - request: List[Mapping[str, str]]) -> List[TransformedRequest]: + def validate_and_transform_request( + self, calling_form: utils.CallingForm, request: List[Mapping[str, str]] + ) -> List[TransformedRequest]: """Validates and transforms the "request" block from a sample config. In the initial request, each dict has a "field" key that maps to a dotted @@ -212,7 +220,9 @@ def validate_and_transform_request(self, if not field: raise InvalidRequestSetup( "No field attribute found in request setup assignment: {}".format( - field_assignment_copy)) + field_assignment_copy + ) + ) # TODO: properly handle top level fields # E.g. @@ -224,24 +234,37 @@ def validate_and_transform_request(self, m = re.match(r"^([a-zA-Z]\w*)\.([a-zA-Z]\w*)$", field) if not m: raise InvalidRequestSetup( - "Malformed request attribute description: {}".format(field)) + "Malformed request attribute description: {}".format(field) + ) base, attr = m.groups() if base in RESERVED_WORDS: raise ReservedVariableName( - "Tried to define '{}', which is a reserved name".format(base)) + "Tried to define '{}', which is a reserved name".format( + base) + ) field_assignment_copy["field"] = attr base_param_to_attrs[base].append(field_assignment_copy) - if (calling_form in {utils.CallingForm.RequestStreamingClient, - utils.CallingForm.RequestStreamingBidi} and - len(base_param_to_attrs) > 1): - raise InvalidRequestSetup(("There can be at most 1 base request in a sample" - " for a method with client side streaming")) - - return [TransformedRequest(base, body) - for base, body in base_param_to_attrs.items()] + if ( + calling_form + in { + utils.CallingForm.RequestStreamingClient, + utils.CallingForm.RequestStreamingBidi, + } + and len(base_param_to_attrs) > 1 + ): + raise InvalidRequestSetup( + ( + "There can be at most 1 base request in a sample" + " for a method with client side streaming" + ) + ) + + return [ + TransformedRequest(base, body) for base, body in base_param_to_attrs.items() + ] def validate_response(self, response): """Validates a "response" block from a sample config. @@ -253,8 +276,8 @@ def validate_response(self, response): Dispatches statements to sub-validators. Args: - response: list[dict{str:?}]: The structured data representing - the sample's response. + response: list[dict{str:Any}]: The structured data representing + the sample's response. Raises: InvalidStatement: If an unexpected key is found in a statement dict @@ -269,8 +292,8 @@ def validate_response(self, response): keyword, body = next(iter(statement.items())) validater = self.STATEMENT_DISPATCH_TABLE.get(keyword) if not validater: - raise InvalidStatement("Invalid statement keyword: {}" - .format(keyword)) + raise InvalidStatement( + "Invalid statement keyword: {}".format(keyword)) validater(self, body) @@ -282,7 +305,9 @@ def _handle_lvalue(self, lval): """ if lval in RESERVED_WORDS: raise ReservedVariableName( - "Tried to define a variable with reserved name: {}".format(lval)) + "Tried to define a variable with reserved name: {}".format( + lval) + ) # Even though it's valid python to reassign variables to any rvalue, # the samplegen spec prohibits this. @@ -310,14 +335,17 @@ def _validate_format(self, body: List[str]): num_prints = fmt_str.count("%s") if num_prints != len(body) - 1: raise MismatchedFormatSpecifier( - "Expected {} expresssions in format string but received {}" - .format(num_prints, len(body) - 1)) + "Expected {} expresssions in format string but received {}".format( + num_prints, len(body) - 1 + ) + ) for expression in body[1:]: var = expression.split(".")[0] if var not in self.var_defs_: - raise UndefinedVariableReference("Reference to undefined variable: {}" - .format(var)) + raise UndefinedVariableReference( + "Reference to undefined variable: {}".format(var) + ) def _validate_define(self, body: str): """"Validates 'define' statements. @@ -348,8 +376,9 @@ def _validate_define(self, body: str): rval_base = rval.split(".")[0] if not rval_base in self.var_defs_: - raise UndefinedVariableReference("Reference to undefined variable: {}" - .format(rval_base)) + raise UndefinedVariableReference( + "Reference to undefined variable: {}".format(rval_base) + ) def _validate_write_file(self, body): """Validate 'write_file' statements. @@ -382,8 +411,9 @@ def _validate_write_file(self, body): # TODO: check the rest of the elements for valid subfield attribute base = contents_var.split(".")[0] if base not in self.var_defs_: - raise UndefinedVariableReference("Reference to undefined variable: {}" - .format(base)) + raise UndefinedVariableReference( + "Reference to undefined variable: {}".format(base) + ) def _validate_loop(self, body): """Validates loop headers and statement bodies. @@ -428,8 +458,10 @@ def _validate_loop(self, body): # if collection_name.startswith("."): # collection_name = "$resp" + collection_name if collection_name not in self.var_defs_: - raise UndefinedVariableReference("Reference to undefined variable: {}" - .format(collection_name)) + raise UndefinedVariableReference( + "Reference to undefined variable: {}".format( + collection_name) + ) var = body[self.VAR_KWORD] self._handle_lvalue(var) @@ -438,13 +470,16 @@ def _validate_loop(self, body): segments -= map_args segments -= {self.KEY_KWORD, self.VAL_KWORD} if segments: - raise BadLoop("Unexpected keywords in loop statement: {}" - .format(segments)) + raise BadLoop( + "Unexpected keywords in loop statement: {}".format( + segments) + ) map_name_base = body[self.MAP_KWORD].split(".")[0] if map_name_base not in self.var_defs_: - raise UndefinedVariableReference("Reference to undefined variable: {}" - .format(map_name_base)) + raise UndefinedVariableReference( + "Reference to undefined variable: {}".format(map_name_base) + ) key = body.get(self.KEY_KWORD) if key: @@ -478,9 +513,9 @@ def _validate_loop(self, body): } -def generate_sample(sample, - env: jinja2.environment.Environment, - api_schema) -> Tuple[str, jinja2.environment.TemplateStream]: +def generate_sample( + sample, id_is_unique: bool, env: jinja2.environment.Environment, api_schema +) -> Tuple[str, jinja2.environment.TemplateStream]: sample_template = env.get_template(TEMPLATE_NAME) service_name = sample["service"] @@ -492,22 +527,96 @@ def generate_sample(sample, rpc = service.methods.get(rpc_name) if not rpc: raise RpcMethodNotFound( - "Could not find rpc in service {}: {}".format(service_name, rpc_name)) + "Could not find rpc in service {}: {}".format( + service_name, rpc_name) + ) calling_form = utils.CallingForm.method_default(rpc) v = Validator() - sample["request"] = v.validate_and_transform_request(calling_form, - sample["request"]) + sample["request"] = v.validate_and_transform_request( + calling_form, sample["request"] + ) v.validate_response(sample["response"]) - sample_id = sample["id"] - sample_fpath = sample_id + str(calling_form) + ".py" + sample_fpath = ( + sample["id"] + (str(calling_form) if not id_is_unique else "") + ".py" + ) sample["package_name"] = api_schema.naming.warehouse_package_name - return sample_fpath, sample_template.stream(file_header=FILE_HEADER, - sample=sample, - imports=[], - calling_form=calling_form, - calling_form_enum=utils.CallingForm) + return ( + sample_fpath, + sample_template.stream( + file_header=FILE_HEADER, + sample=sample, + imports=[], + calling_form=calling_form, + calling_form_enum=utils.CallingForm, + ), + ) + + +def generate_manifest(fpaths_and_samples, api_schema, *, manifest_time: int = None): + """Generate a samplegen manifest for use by sampletest + + Args: + fpaths_and_samples (Iterable[Tuple[str, Mapping[str, Any]]]): + The file paths and samples to be listed in the manifest + + api_schema (~.api.API): An API schema object. + manifest_time (int): Optional. An override for the timestamp in the name of the manifest filename. + Primarily used for testing. + + Returns: + Tuple[str, Dict[str,Any]]: The filename of the manifest and the manifest data as a dictionary. + + """ + all_info = [ + yaml.KeyVal("type", "manifest/samples"), + yaml.KeyVal("schema_version", "3"), + yaml.Map( + name="python", + anchor_name="python", + elements=[ + yaml.KeyVal("environment", "python"), + yaml.KeyVal("bin", "python3"), + # TODO: make this the real sample base directory + yaml.KeyVal("base_path", "sample/base/directory"), + yaml.KeyVal("invocation", "'{bin} {path} @args'"), + ], + ), + yaml.Collection( + name="samples", + elements=[ + [ + yaml.Anchor("python"), + yaml.KeyVal("sample", sample["id"]), + yaml.KeyVal("path", "'{base_path}/%s'" % fpath), + yaml.KeyVal("region_tag", sample.get("region_tag", "")), + ] + for fpath, sample in fpaths_and_samples + ], + ), + ] + + dt = time.gmtime(manifest_time) + manifest_fname_template = ( + "{api}.{version}.python." + "{year:04d}{month:02d}{day:02d}." + "{hour:02d}{minute:02d}{second:02d}." + "manifest.yaml" + ) + + manifest_fname = manifest_fname_template.format( + api=api_schema.naming.name, + version=api_schema.naming.version, + year=dt.tm_year, + month=dt.tm_mon, + day=dt.tm_mday, + hour=dt.tm_hour, + minute=dt.tm_min, + second=dt.tm_sec, + ) + + return manifest_fname, all_info diff --git a/packages/gapic-generator/gapic/samplegen/yaml.py b/packages/gapic-generator/gapic/samplegen/yaml.py new file mode 100644 index 000000000000..4959889fb90a --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen/yaml.py @@ -0,0 +1,101 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import dataclasses + +from abc import abstractmethod, ABC +from textwrap import indent +from typing import List, Optional + +"""Module containing classes used for dramatically simplified yaml rendering. + +The yaml used for generating the samplegen manifest is simple and self contained, +The classes and rendering logic in this module are highly domain specific: +it is not advised for general use. +""" + + +class Element(ABC): + """Abstract element that can be rendered.""" + INDENT_SPACES: int = 2 + + @abstractmethod + def render(self, spaces: int = 0) -> str: + return "" + + +@dataclasses.dataclass(frozen=True) +class KeyVal(Element): + """A single key/value entry.""" + key: str + val: str + + def render(self, spaces: int = 0) -> str: + whitespace = " " * spaces + return f"{whitespace}{self.key}: {self.val}" + + +@dataclasses.dataclass() +class Collection(Element): + """An ordered list of subobjects.""" + name: str + elements: List[List[Element]] + + def render(self, spaces: int = 0) -> str: + # This gives us output like + # - cephalopod: squid + # bivalve: clam + # gastropod: whelk + # + # instead of + # - cephalopod: squid + # bivalve: clam + # gastropod: whelk + whitespace = " " * spaces + return f"{self.name}:\n" + "\n".join( + indent( + "-" + + "\n".join(e.render(spaces=spaces + self.INDENT_SPACES) for e in l)[ + 1: + ], + " " * (spaces), + ) + for l in self.elements + ) + + +@dataclasses.dataclass() +class Anchor(Element): + """An anchor to a map.""" + target: str + + def render(self, spaces: int = 0) -> str: + whitespace = " " * spaces + return f"{whitespace}<<: *{self.target}" + + +@dataclasses.dataclass() +class Map(Element): + """A named collection with a list of attributes.""" + name: str + anchor_name: Optional[str] + elements: List[Element] + + def render(self, spaces: int = 0): + maybe_anchor = (" &" + self.anchor_name) if self.anchor_name else "" + element_str = "\n".join( + e.render(spaces=spaces + self.INDENT_SPACES) for e in self.elements + ) + whitespace = " " * spaces + return f"{whitespace}{self.name}:{maybe_anchor}\n{element_str}" diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f7f78241abbe..e86d76154b5e 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -46,6 +46,7 @@ 'jinja2 >= 2.10', 'protobuf >= 3.7.1', 'pypandoc >= 1.4', + 'PyYAML >= 5.1.1', ), extras_require={ ':python_version<"3.7"': ('dataclasses >= 0.4',), diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 8366c8c34e6e..8a71d1f64522 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -72,7 +72,8 @@ def test_generate_sample_basic(): "value_is_file": True}], "response": [{"print": ["Mollusc is a %s", "$resp.taxonomy"]}]} - fpath, template_stream = samplegen.generate_sample(sample, env, schema) + fpath, template_stream = samplegen.generate_sample( + sample, True, env, schema) sample_str = "".join(iter(template_stream)) assert sample_str == '''# TODO: add a copyright @@ -123,7 +124,7 @@ def test_generate_sample_service_not_found(): sample = {"service": "Mollusc"} with pytest.raises(samplegen.UnknownService): - samplegen.generate_sample(sample, env, schema) + samplegen.generate_sample(sample, True, env, schema) def test_generate_sample_rpc_not_found(): @@ -132,4 +133,4 @@ def test_generate_sample_rpc_not_found(): sample = {"service": "Mollusc", "rpc": "Classify"} with pytest.raises(samplegen.RpcMethodNotFound): - samplegen.generate_sample(sample, env, schema) + samplegen.generate_sample(sample, True, env, schema) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 542eaa1803d7..cd73432bd3cd 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -13,9 +13,13 @@ # limitations under the License. import pytest +import yaml from collections import namedtuple +from textwrap import dedent import gapic.samplegen.samplegen as samplegen +import gapic.samplegen.yaml as gapic_yaml + from gapic.samplegen import utils @@ -523,3 +527,121 @@ def test_coerce_response_name(): # Don't really need a test, but it shuts up code coverage. assert samplegen.coerce_response_name("$resp.squid") == "response.squid" assert samplegen.coerce_response_name("mollusc.squid") == "mollusc.squid" + + +def test_generate_manifest(): + DummyNaming = namedtuple("DummyNaming", ["name", "version"]) + DummyApiSchema = namedtuple("DummyApiSchema", ["naming"]) + + fpath_to_dummy_sample = { + "squid_fpath.py": {"id": "squid_sample"}, + "clam_fpath.py": {"id": "clam_sample", + "region_tag": "giant_clam_sample"}, + } + + fname, info = samplegen.generate_manifest( + fpath_to_dummy_sample.items(), + DummyApiSchema(DummyNaming("Mollusc", "v1")), + # Empirically derived number such that the + # corresponding time_struct tests the zero + # padding in the returned filename. + manifest_time=4486525628 + ) + + assert fname == "Mollusc.v1.python.21120304.090708.manifest.yaml" + + expected_info = [ + gapic_yaml.KeyVal("type", "manifest/samples"), + gapic_yaml.KeyVal("schema_version", "3"), + gapic_yaml.Map(name="python", + anchor_name="python", + elements=[ + gapic_yaml.KeyVal( + "environment", "python"), + gapic_yaml.KeyVal( + "bin", "python3"), + gapic_yaml.KeyVal( + "base_path", "sample/base/directory"), + gapic_yaml.KeyVal( + "invocation", "'{bin} {path} @args'"), + ]), + gapic_yaml.Collection(name="samples", + elements=[ + [ + gapic_yaml.Anchor( + "python"), + gapic_yaml.KeyVal( + "sample", "squid_sample"), + gapic_yaml.KeyVal( + "path", "'{base_path}/squid_fpath.py'"), + gapic_yaml.KeyVal( + "region_tag", ""), + ], + [ + gapic_yaml.Anchor("python"), + gapic_yaml.KeyVal( + "sample", "clam_sample"), + gapic_yaml.KeyVal( + "path", "'{base_path}/clam_fpath.py'"), + gapic_yaml.KeyVal( + "region_tag", "giant_clam_sample") + ], + ]) + ] + + assert info == expected_info + + expected_rendering = dedent(""" + type: manifest/samples + schema_version: 3 + python: &python + environment: python + bin: python3 + base_path: sample/base/directory + invocation: '{bin} {path} @args' + samples: + - <<: *python + sample: squid_sample + path: '{base_path}/squid_fpath.py' + region_tag: + - <<: *python + sample: clam_sample + path: '{base_path}/clam_fpath.py' + region_tag: giant_clam_sample""".lstrip("\n")) + + rendered_yaml = "\n".join(e.render() for e in info) + assert rendered_yaml == expected_rendering + + expected_parsed_manifest = { + "type": "manifest/samples", + "schema_version": 3, + "python": { + "environment": "python", + "bin": "python3", + "base_path": "sample/base/directory", + "invocation": "{bin} {path} @args", + }, + "samples": [ + { + "environment": "python", + "bin": "python3", + "base_path": "sample/base/directory", + "invocation": "{bin} {path} @args", + "sample": "squid_sample", + "path": "{base_path}/squid_fpath.py", + "region_tag": None, + }, + { + "environment": "python", + "bin": "python3", + "base_path": "sample/base/directory", + "invocation": "{bin} {path} @args", + "sample": "clam_sample", + "path": "{base_path}/clam_fpath.py", + "region_tag": "giant_clam_sample", + }, + ], + } + + parsed_manifest = yaml.safe_load(rendered_yaml) + assert parsed_manifest == expected_parsed_manifest From 0eaf2d863a1c8921938c4e47ba4fa6345378009b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 19 Jul 2019 10:29:12 -0700 Subject: [PATCH 0132/1339] [fix] Remove
from `feature_fragments.j2`. (#154) It is obviously not valid Python. Also, its presence causes the file to be generated even if there are no samples. --- .../gapic/templates/examples/feature_fragments.j2 | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 9360ff124778..bdf5b79c4772 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -232,4 +232,3 @@ if __name__ == "__main__": main() {% endwith %} {% endmacro %} -
From 5485cc5363da27687707509ddef918972354a6dc Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 19 Jul 2019 10:33:34 -0700 Subject: [PATCH 0133/1339] Bump the version number to 11 (#149) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e86d76154b5e..2df73866cb36 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.10.0', + version='0.11.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 21691d91d1800ded68831cd4bdbd444d9c7790a5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 19 Jul 2019 10:41:37 -0700 Subject: [PATCH 0134/1339] [fix] Pager modules must always import the message being paged. (#153) This commit fixes an issue where a `pagers.py` file would sometimes be missing an import (it would always import the request and response message, but not necessarily the message being paged over). Fixes #150. --- .../$name_$version/$sub/services/$service/pagers.py.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 index df6a48fb9787..aac5949228a4 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 @@ -13,6 +13,7 @@ from typing import Any, Callable, Iterable {% for method in service.methods.values() | selectattr('paged_result_field') -%} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} +{{ method.paged_result_field.message.ident.python_import }} {% endfor %} {% endfilter -%} {% endif %} From b2bcd4415c019a2416580f93d8739a0c0242a594 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 19 Jul 2019 10:44:39 -0700 Subject: [PATCH 0135/1339] [fix] Alphabetically sort lines in a few more spots. (#155) --- .../$namespace/$name_$version/$sub/__init__.py.j2 | 8 +++++++- .../$namespace/$name_$version/$sub/types/$proto.py.j2 | 5 ++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 index ab1c0fb99fe2..f5534412abce 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 @@ -7,27 +7,32 @@ from . import {{ subpackage }} {% endfor -%} {# Import services for this package. -#} +{% filter sort_lines -%} {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view -%} from .services.{{ service.name|snake_case }} import {{ service.name }} {% endfor -%} +{% endfilter -%} {# Import messages from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. -#} +{% filter sort_lines -%} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} {% for message in proto.messages.values()|sort(attribute='name') -%} from .types.{{ proto.module_name }} import {{ message.name }} -{% endfor %}{% endfor %} +{% endfor %}{% endfor -%} +{% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. -#} __all__ = ( + {%- filter sort_lines %} {%- for subpackage in api.subpackages.keys() %} '{{ subpackage }}', {%- endfor %} @@ -40,5 +45,6 @@ __all__ = ( {%- for message in proto.messages.values()|sort(attribute='name') %} '{{ message.name }}', {%- endfor %}{% endfor %} + {%- endfilter %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 index 97d61cba2124..eb42eb1f0d54 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 @@ -6,9 +6,12 @@ import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endif %} +{% filter sort_lines -%} {% for import_ in proto.python_modules -%} {{ import_ }} -{% endfor %} +{% endfor -%} +{% endfilter %} + __protobuf__ = {{ p }}.module( package='{{ '.'.join(proto.meta.address.package) }}', From 3f3f51236a36956da4bdf2d76df78dc804a1c16b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 22 Jul 2019 10:35:22 -0700 Subject: [PATCH 0136/1339] Add a 'validate_expression' method (and tests) to Validator (#151) * Add a 'validate_expression' method (and tests) to Validator It validates that a dotted attribute expression, e.g. "squid.clam.whelk", references a defined variable, and that each attribute in the chain is valid, i.e. 'squid' is a defined variable, and its type has a 'clam' attribute, whose type has a 'whelk' attribute. Also, do initial attribute checking All "attribute expressions", e.g. mollusc.cephalopod.squid, are checked to make sure that the chain of attributes is valid. Validity includes: *) Only the final attribute may be repeated *) The type of an attribute at position n in the chain has a field with the name of the attribute at position n+1. Defines check that repeated fields can only come at the end of an expression. Loop statements now check that the collection is a repeated field. --- packages/gapic-generator/.coveragerc | 4 +- .../gapic/samplegen/samplegen.py | 178 +++- .../tests/unit/samplegen/common_types.py | 46 + .../tests/unit/samplegen/test_integration.py | 18 +- .../tests/unit/samplegen/test_manifest.py | 135 +++ .../tests/unit/samplegen/test_samplegen.py | 960 +++++++++++------- 6 files changed, 943 insertions(+), 398 deletions(-) create mode 100644 packages/gapic-generator/tests/unit/samplegen/common_types.py create mode 100644 packages/gapic-generator/tests/unit/samplegen/test_manifest.py diff --git a/packages/gapic-generator/.coveragerc b/packages/gapic-generator/.coveragerc index 70d8fb79ce87..e6d2ce8b2774 100644 --- a/packages/gapic-generator/.coveragerc +++ b/packages/gapic-generator/.coveragerc @@ -3,7 +3,6 @@ branch = True omit = gapic/cli/*.py *_pb2.py - # Abstract methods play hob with coverage [report] fail_under = 100 @@ -16,4 +15,5 @@ exclude_lines = # Ignore debug-only repr def __repr__ # Abstract methods by definition are not invoked - @abstractmethod \ No newline at end of file + @abstractmethod + \ No newline at end of file diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 3f7feb4cf141..952992b2758e 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -19,10 +19,11 @@ import time from gapic.samplegen import utils, yaml +from gapic.schema import (api, wrappers) - -from collections import defaultdict, namedtuple -from typing import Dict, List, Mapping, Optional, Set, Tuple +from collections import (defaultdict, namedtuple, ChainMap as chainmap) +from textwrap import dedent +from typing import (ChainMap, Dict, List, Mapping, Optional, Set, Tuple) # Outstanding issues: # * In real sample configs, many variables are @@ -96,6 +97,10 @@ class UndefinedVariableReference(SampleError): pass +class BadAttributeLookup(SampleError): + pass + + class RedefinedVariable(SampleError): pass @@ -127,7 +132,7 @@ def coerce_response_name(s: str) -> str: class Validator: - """Class that validates samples. + """Class that validates a sample. Contains methods that validate different segments of a sample and maintains state that's relevant to validation across different segments. @@ -140,15 +145,38 @@ class Validator: VAL_KWORD = "value" BODY_KWORD = "body" - def __init__(self): + def __init__(self, method: wrappers.Method): # The response ($resp) variable is special and guaranteed to exist. - self.var_defs_: Set[str] = {"$resp"} + # TODO: name lookup also involes type checking + response_type = method.output + if method.paged_result_field: + response_type = method.paged_result_field + elif method.lro: + response_type = method.lro.response_type + + # This is a shameless hack to work around the design of wrappers.Field + MockField = namedtuple("MockField", ["message", "repeated"]) + + # TODO: pass var_defs_ around during response verification + # instead of assigning/restoring. + self.var_defs_: ChainMap[str, wrappers.Field] = chainmap( + # When validating expressions we need to store the Field, + # not just the message type, because there are additional data we need: + # whether a name refers to a repeated value (or a map), + # and whether it's an enum or a message or a primitive type. + # The method call response isn't a field, so construct an artificial + # field that wraps the response. + { # type: ignore + "$resp": MockField(response_type, False) + } + ) + + def var_field(self, var_name: str) -> Optional[wrappers.Field]: + return self.var_defs_.get(var_name) - # TODO: this will eventually need the method name and the proto file - # so that it can do the correct value transformation for enums. - def validate_and_transform_request( - self, calling_form: utils.CallingForm, request: List[Mapping[str, str]] - ) -> List[TransformedRequest]: + def validate_and_transform_request(self, + calling_form: utils.CallingForm, + request: List[Mapping[str, str]]) -> List[TransformedRequest]: """Validates and transforms the "request" block from a sample config. In the initial request, each dict has a "field" key that maps to a dotted @@ -214,7 +242,12 @@ def validate_and_transform_request( field_assignment_copy = dict(field_assignment) input_param = field_assignment_copy.get("input_parameter") if input_param: - self._handle_lvalue(input_param) + # We use str as the input type because + # validate_expression just needs to know + # that the input_parameter isn't a MessageType of any kind. + # TODO: write a test about that. + # TODO: handle enums + self._handle_lvalue(input_param, str) field = field_assignment_copy.get("field") if not field: @@ -272,7 +305,6 @@ def validate_response(self, response): A full description of the response block is outside the scope of this code; refer to the samplegen documentation. - Dispatches statements to sub-validators. Args: @@ -297,7 +329,72 @@ def validate_response(self, response): validater(self, body) - def _handle_lvalue(self, lval): + def validate_expression(self, exp: str) -> wrappers.Field: + """Validate an attribute chain expression. + + Given a lookup expression, e.g. squid.clam.whelk, + recursively validate that each base has an attr with the name of the + next lookup lower down and that no attributes, besides possibly the final, + are repeated fields. + + Args: + expr: str: The attribute expression. + + Raises: + UndefinedVariableReference: If the root of the expression is not + a previously defined lvalue. + BadAttributeLookup: If an attribute other than the final is repeated OR + if an attribute in the chain is not a field of its parent. + + Returns: + wrappers.Field: The final field in the chain. + """ + # TODO: handle mapping attributes, i.e. {} + # TODO: Add resource name handling, i.e. % + indexed_exp_re = re.compile( + r"^(?P\$?\w+)(?:\[(?P\d+)\])?$") + + toks = exp.split(".") + match = indexed_exp_re.match(toks[0]) + if not match: + raise BadAttributeLookup( + f"Badly formatted attribute expression: {exp}") + + base_tok, previous_was_indexed = (match.groupdict()["attr_name"], + bool(match.groupdict()["index"])) + base = self.var_field(base_tok) + if not base: + raise UndefinedVariableReference( + "Reference to undefined variable: {}".format(base_tok)) + if previous_was_indexed and not base.repeated: + raise BadAttributeLookup( + "Cannot index non-repeated attribute: {}".format(base_tok)) + + for tok in toks[1:]: + match = indexed_exp_re.match(tok) + if not match: + raise BadAttributeLookup( + f"Badly formatted attribute expression: {tok}") + + attr_name, lookup_token = match.groups() + if base.repeated and not previous_was_indexed: + raise BadAttributeLookup( + "Cannot access attributes through repeated field: {}".format(attr_name)) + if previous_was_indexed and not base.repeated: + raise BadAttributeLookup( + "Cannot index non-repeated attribute: {}".format(attr_name)) + + # TODO: handle enums, primitives, and so forth. + attr = base.message.fields.get(attr_name) # type: ignore + if not attr: + raise BadAttributeLookup( + "No such attribute in type '{}': {}".format(base, attr_name)) + + base, previous_was_indexed = attr, bool(lookup_token) + + return base + + def _handle_lvalue(self, lval: str, type_=None): """Conducts safety checks on an lvalue and adds it to the lexical scope. Raises: @@ -315,7 +412,7 @@ def _handle_lvalue(self, lval): raise RedefinedVariable( "Tried to redefine variable: {}".format(lval)) - self.var_defs_.add(lval) + self.var_defs_[lval] = type_ def _validate_format(self, body: List[str]): """Validates a format string and corresponding arguments. @@ -330,6 +427,8 @@ def _validate_format(self, body: List[str]): MismatchedFormatSpecifier: If the number of format string segments ("%s") in a "print" or "comment" block does not equal the size number of strings in the block minus 1. + UndefinedVariableReference: If the base lvalue in an expression chain + is not a previously defined lvalue. """ fmt_str = body[0] num_prints = fmt_str.count("%s") @@ -358,8 +457,6 @@ def _validate_define(self, body: str): UndefinedVariableReference: If an attempted rvalue base is a previously undeclared variable. """ - # TODO: Need to validate the attributes of the response - # based on the method return type. # TODO: Need to check the defined variables # if the rhs references a non-response variable. # TODO: Need to rework the regex to allow for subfields, @@ -372,13 +469,9 @@ def _validate_define(self, body: str): raise BadAssignment("Bad assignment statement: {}".format(body)) lval, rval = m.groups() - self._handle_lvalue(lval) - rval_base = rval.split(".")[0] - if not rval_base in self.var_defs_: - raise UndefinedVariableReference( - "Reference to undefined variable: {}".format(rval_base) - ) + rval_type = self.validate_expression(rval) + self._handle_lvalue(lval, rval_type) def _validate_write_file(self, body): """Validate 'write_file' statements. @@ -448,20 +541,20 @@ def _validate_loop(self, body): # # is allowed, the samplegen spec requires that errors are raised # if strict lexical scoping is violated. - previous_defs = set(self.var_defs_) + self.var_defs_ = self.var_defs_.new_child() if {self.COLL_KWORD, self.VAR_KWORD, self.BODY_KWORD} == segments: - collection_name = body[self.COLL_KWORD].split(".")[0] - # TODO: Once proto info is being passed in, validate the - # [1:] in the collection name. + tokens = body[self.COLL_KWORD].split(".") + # TODO: resolve the implicit $resp dilemma # if collection_name.startswith("."): # collection_name = "$resp" + collection_name - if collection_name not in self.var_defs_: - raise UndefinedVariableReference( - "Reference to undefined variable: {}".format( - collection_name) - ) + collection_field = self.validate_expression( + body[self.COLL_KWORD]) + + if not collection_field.repeated: + raise BadLoop( + "Tried to use a non-repeated field as a collection: {}".format(tokens[-1])) var = body[self.VAR_KWORD] self._handle_lvalue(var) @@ -500,10 +593,9 @@ def _validate_loop(self, body): # Restore the previous lexical scope. # This is stricter than python scope rules # because the samplegen spec mandates it. - self.var_defs_ = previous_defs + self.var_defs_ = self.var_defs_.parents # Add new statement keywords to this table. - # TODO: add write_file validator and entry (and tests). STATEMENT_DISPATCH_TABLE = { "define": _validate_define, "print": _validate_format, @@ -513,9 +605,11 @@ def _validate_loop(self, body): } -def generate_sample( - sample, id_is_unique: bool, env: jinja2.environment.Environment, api_schema -) -> Tuple[str, jinja2.environment.TemplateStream]: +def generate_sample(sample, + id_is_unique: bool, + env: jinja2.environment.Environment, + api_schema: api.API) -> Tuple[str, jinja2.environment.TemplateStream]: + sample_template = env.get_template(TEMPLATE_NAME) service_name = sample["service"] @@ -533,14 +627,14 @@ def generate_sample( calling_form = utils.CallingForm.method_default(rpc) - v = Validator() - sample["request"] = v.validate_and_transform_request( - calling_form, sample["request"] - ) + v = Validator(rpc) + sample["request"] = v.validate_and_transform_request(calling_form, + sample["request"]) v.validate_response(sample["response"]) sample_fpath = ( - sample["id"] + (str(calling_form) if not id_is_unique else "") + ".py" + sample["id"] + (str(calling_form) + if not id_is_unique else "") + ".py" ) sample["package_name"] = api_schema.naming.warehouse_package_name diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py new file mode 100644 index 000000000000..45c5a21c7f49 --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -0,0 +1,46 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import namedtuple + +# Injected dummy test types + +DummyMethod = namedtuple( + "DummyMethod", + [ + "input", + "output", + "lro", + "paged_result_field", + "client_streaming", + "server_streaming", + ], +) + +DummyMethod.__new__.__defaults__ = (False,) * len(DummyMethod._fields) + +DummyMessage = namedtuple("DummyMessage", ["fields", "type"]) +DummyMessage.__new__.__defaults__ = (False,) * len(DummyMessage._fields) + +DummyField = namedtuple("DummyField", ["message", "repeated"]) +DummyField.__new__.__defaults__ = (False,) * len(DummyField._fields) + +DummyService = namedtuple("DummyService", ["methods"]) + +DummyApiSchema = namedtuple("DummyApiSchema", ["services", "naming"]) +DummyApiSchema.__new__.__defaults__ = (False,) * len(DummyApiSchema._fields) + +DummyNaming = namedtuple( + "DummyNaming", ["warehouse_package_name", "name", "version"]) +DummyNaming.__new__.__defaults__ = (False,) * len(DummyNaming._fields) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 8a71d1f64522..8a3fce410b93 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -19,24 +19,12 @@ import gapic.samplegen.samplegen as samplegen import gapic.utils as utils +from common_types import (DummyMethod, DummyService, + DummyApiSchema, DummyNaming) + from collections import namedtuple from textwrap import dedent -# Injected dummy test types -dummy_method_fields = ["lro", - "paged_result_field", - "client_streaming", - "server_streaming"] -DummyMethod = namedtuple("DummyMethod", - dummy_method_fields) -DummyMethod.__new__.__defaults__ = (False,) * len(dummy_method_fields) - -DummyService = namedtuple("DummyService", ["methods"]) - -DummyApiSchema = namedtuple("DummyApiSchema", ["services", "naming"]) - -DummyNaming = namedtuple("DummyNaming", ["warehouse_package_name"]) - env = jinja2.Environment( loader=jinja2.FileSystemLoader( diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py new file mode 100644 index 000000000000..1d841d073b5a --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -0,0 +1,135 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import yaml +from textwrap import dedent + +import gapic.samplegen.yaml as gapic_yaml +import gapic.samplegen.samplegen as samplegen +from common_types import DummyApiSchema, DummyNaming + + +def test_generate_manifest(): + fpath_to_dummy_sample = { + "squid_fpath.py": {"id": "squid_sample"}, + "clam_fpath.py": {"id": "clam_sample", + "region_tag": "giant_clam_sample"}, + } + + fname, info = samplegen.generate_manifest( + fpath_to_dummy_sample.items(), + DummyApiSchema(naming=DummyNaming(name="Mollusc", version="v1")), + # Empirically derived number such that the + # corresponding time_struct tests the zero + # padding in the returned filename. + manifest_time=4486525628 + ) + + assert fname == "Mollusc.v1.python.21120304.090708.manifest.yaml" + + expected_info = [ + gapic_yaml.KeyVal("type", "manifest/samples"), + gapic_yaml.KeyVal("schema_version", "3"), + gapic_yaml.Map(name="python", + anchor_name="python", + elements=[ + gapic_yaml.KeyVal( + "environment", "python"), + gapic_yaml.KeyVal( + "bin", "python3"), + gapic_yaml.KeyVal( + "base_path", "sample/base/directory"), + gapic_yaml.KeyVal( + "invocation", "'{bin} {path} @args'"), + ]), + gapic_yaml.Collection(name="samples", + elements=[ + [ + gapic_yaml.Anchor( + "python"), + gapic_yaml.KeyVal( + "sample", "squid_sample"), + gapic_yaml.KeyVal( + "path", "'{base_path}/squid_fpath.py'"), + gapic_yaml.KeyVal( + "region_tag", ""), + ], + [ + gapic_yaml.Anchor("python"), + gapic_yaml.KeyVal( + "sample", "clam_sample"), + gapic_yaml.KeyVal( + "path", "'{base_path}/clam_fpath.py'"), + gapic_yaml.KeyVal( + "region_tag", "giant_clam_sample") + ], + ]) + ] + + assert info == expected_info + + expected_rendering = dedent(""" + type: manifest/samples + schema_version: 3 + python: &python + environment: python + bin: python3 + base_path: sample/base/directory + invocation: '{bin} {path} @args' + samples: + - <<: *python + sample: squid_sample + path: '{base_path}/squid_fpath.py' + region_tag: + - <<: *python + sample: clam_sample + path: '{base_path}/clam_fpath.py' + region_tag: giant_clam_sample""".lstrip("\n")) + + rendered_yaml = "\n".join(e.render() for e in info) + assert rendered_yaml == expected_rendering + + expected_parsed_manifest = { + "type": "manifest/samples", + "schema_version": 3, + "python": { + "environment": "python", + "bin": "python3", + "base_path": "sample/base/directory", + "invocation": "{bin} {path} @args", + }, + "samples": [ + { + "environment": "python", + "bin": "python3", + "base_path": "sample/base/directory", + "invocation": "{bin} {path} @args", + "sample": "squid_sample", + "path": "{base_path}/squid_fpath.py", + "region_tag": None, + }, + { + "environment": "python", + "bin": "python3", + "base_path": "sample/base/directory", + "invocation": "{bin} {path} @args", + "sample": "clam_sample", + "path": "{base_path}/clam_fpath.py", + "region_tag": "giant_clam_sample", + }, + ], + } + + parsed_manifest = yaml.safe_load(rendered_yaml) + assert parsed_manifest == expected_parsed_manifest diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index cd73432bd3cd..b6bf22f3ffdc 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -12,70 +12,117 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest import yaml +import itertools +import pytest + +from typing import TypeVar from collections import namedtuple -from textwrap import dedent -import gapic.samplegen.samplegen as samplegen +import gapic.schema.wrappers as wrappers import gapic.samplegen.yaml as gapic_yaml +import gapic.samplegen.samplegen as samplegen - +from common_types import DummyField, DummyMessage, DummyMethod from gapic.samplegen import utils + +def message_factory(exp: str, repeated_iter=itertools.repeat(False)) -> DummyMessage: + # This mimics the structure of MessageType in the wrappers module: + # A MessageType has a map from field names to Fields, + # and a Field has an (optional) MessageType. + # The 'exp' parameter is a dotted attribute expression + # used to describe the field and type hierarchy, + # e.g. "mollusc.cephalopod.coleoid" + toks = exp.split(".") + messages = [DummyMessage({}, tok.upper() + "_TYPE") for tok in toks] + for base, field, attr_name, repeated_field in zip( + messages, messages[1:], toks[1:], repeated_iter + ): + base.fields[attr_name] = DummyField(field, repeated=repeated_field) + + return messages[0] + + # validate_response tests def test_define(): define = {"define": "squid=$resp"} - samplegen.Validator().validate_response([define]) + + samplegen.Validator( + DummyMethod(output=message_factory("mollusc")) + ).validate_response([define]) def test_define_undefined_var(): define = {"define": "squid=humboldt"} with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response([define]) + samplegen.Validator( + DummyMethod(output=message_factory("mollusc")) + ).validate_response([define]) def test_define_reserved_varname(): define = {"define": "class=$resp"} with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator().validate_response([define]) + samplegen.Validator( + DummyMethod(output=message_factory("mollusc")) + ).validate_response([define]) def test_define_add_var(): - samplegen.Validator().validate_response([{"define": "squid=$resp"}, - {"define": "name=squid.name"}]) + samplegen.Validator( + DummyMethod(output=message_factory("mollusc.name")) + ).validate_response([{"define": "squid=$resp"}, {"define": "name=squid.name"}]) def test_define_bad_form(): define = {"define": "mollusc=$resp.squid=$resp.clam"} with pytest.raises(samplegen.BadAssignment): - samplegen.Validator().validate_response([define]) + samplegen.Validator( + DummyMethod(output=message_factory("mollusc")) + ).validate_response([define]) def test_define_redefinition(): - statements = [{"define": "molluscs=$resp.molluscs"}, - {"define": "molluscs=$resp.molluscs"}] + statements = [ + {"define": "molluscs=$resp.molluscs"}, + {"define": "molluscs=$resp.molluscs"}, + ] with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator().validate_response(statements) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.molluscs", [True])) + ).validate_response(statements) def test_define_input_param(): - validator = samplegen.Validator() - validator.validate_and_transform_request(utils.CallingForm.Request, - [{"field": "squid.mantle_length", - "value": "100 cm", - "input_parameter": "mantle_length"}]) + validator = samplegen.Validator(DummyMethod()) + validator.validate_and_transform_request( + utils.CallingForm.Request, + [ + { + "field": "squid.mantle_length", + "value": "100 cm", + "input_parameter": "mantle_length", + } + ], + ) validator.validate_response([{"define": "length=mantle_length"}]) def test_define_input_param_redefinition(): - validator = samplegen.Validator() - validator.validate_and_transform_request(utils.CallingForm.Request, - [{"field": "squid.mantle_length", - "value": "100 cm", - "input_parameter": "mantle_length"}]) + validator = samplegen.Validator(DummyMethod()) + validator.validate_and_transform_request( + utils.CallingForm.Request, + [ + { + "field": "squid.mantle_length", + "value": "100 cm", + "input_parameter": "mantle_length", + } + ], + ) with pytest.raises(samplegen.RedefinedVariable): validator.validate_response( [{"define": "mantle_length=mantle_length"}]) @@ -83,382 +130,552 @@ def test_define_input_param_redefinition(): def test_print_basic(): print_statement = {"print": ["This is a squid"]} - samplegen.Validator().validate_response([print_statement]) + samplegen.Validator(DummyMethod()).validate_response([print_statement]) def test_print_fmt_str(): print_statement = {"print": ["This is a squid named %s", "$resp.name"]} - samplegen.Validator().validate_response([print_statement]) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.name")) + ).validate_response([print_statement]) def test_print_fmt_mismatch(): print_statement = {"print": ["This is a squid named %s"]} with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator().validate_response([print_statement]) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.name")) + ).validate_response([print_statement]) def test_print_fmt_mismatch2(): print_statement = {"print": ["This is a squid", "$resp.name"]} with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator().validate_response([print_statement]) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.name")) + ).validate_response([print_statement]) def test_print_undefined_var(): print_statement = {"print": ["This mollusc is a %s", "mollusc.type"]} with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response([print_statement]) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.type")) + ).validate_response([print_statement]) def test_comment(): comment = {"comment": ["This is a mollusc"]} - samplegen.Validator().validate_response([comment]) + samplegen.Validator(DummyMethod()).validate_response([comment]) def test_comment_fmt_str(): comment = {"comment": ["This is a mollusc of class %s", "$resp.class"]} - samplegen.Validator().validate_response([comment]) + samplegen.Validator(DummyMethod()).validate_response([comment]) def test_comment_fmt_undefined_var(): comment = {"comment": ["This is a mollusc of class %s", "cephalopod"]} with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response([comment]) + samplegen.Validator(DummyMethod()).validate_response([comment]) def test_comment_fmt_mismatch(): comment = {"comment": ["This is a mollusc of class %s"]} with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator().validate_response([comment]) + samplegen.Validator(DummyMethod()).validate_response([comment]) def test_comment_fmt_mismatch2(): comment = {"comment": ["This is a mollusc of class ", "$resp.class"]} with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator().validate_response([comment]) + samplegen.Validator(DummyMethod()).validate_response([comment]) def test_loop_collection(): - loop = {"loop": {"collection": "$resp.molluscs", - "variable": "m", - "body": [{"print": - ["Mollusc of class: %s", "m.class"]}]}} - samplegen.Validator().validate_response([loop]) + loop = { + "loop": { + "collection": "$resp.molluscs", + "variable": "m", + "body": [{"print": ["Mollusc of class: %s", "m.class"]}], + } + } + samplegen.Validator( + DummyMethod(output=message_factory("$resp.molluscs", [True])) + ).validate_response([loop]) def test_loop_collection_redefinition(): - statements = [{"define": "m=$resp.molluscs"}, - {"loop": {"collection": "$resp.molluscs", - "variable": "m", - "body": [{"print": ["Mollusc of class: %s", - "m.class"]}]}}] + statements = [ + {"define": "m=$resp.molluscs"}, + { + "loop": { + "collection": "$resp.molluscs", + "variable": "m", + "body": [{"print": ["Mollusc of class: %s", "m.class"]}], + } + }, + ] with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator().validate_response(statements) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.molluscs", [True])) + ).validate_response(statements) def test_loop_undefined_collection(): - loop = {"loop": {"collection": "squid", - "variable": "s", - "body": [{"print": - ["Squid: %s", "s"]}]}} + loop = { + "loop": { + "collection": "squid", + "variable": "s", + "body": [{"print": ["Squid: %s", "s"]}], + } + } with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_collection_extra_kword(): - loop = {"loop": {"collection": "$resp.molluscs", - "squid": "$resp.squids", - "variable": "m", - "body": [{"print": - ["Mollusc of class: %s", "m.class"]}]}} + loop = { + "loop": { + "collection": "$resp.molluscs", + "squid": "$resp.squids", + "variable": "m", + "body": [{"print": ["Mollusc of class: %s", "m.class"]}], + } + } with pytest.raises(samplegen.BadLoop): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_collection_missing_kword(): - loop = {"loop": {"collection": "$resp.molluscs", - "body": [{"print": - ["Mollusc of class: %s", "m.class"]}]}} + loop = { + "loop": { + "collection": "$resp.molluscs", + "body": [{"print": ["Mollusc of class: %s", "m.class"]}], + } + } with pytest.raises(samplegen.BadLoop): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_collection_reserved_loop_var(): - loop = {"loop": {"collection": "$resp.molluscs", - "variable": "class", - "body": [{"print": - ["Mollusc: %s", "class.name"]}]}} + loop = { + "loop": { + "collection": "$resp.molluscs", + "variable": "class", + "body": [{"print": ["Mollusc: %s", "class.name"]}], + } + } with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator().validate_response([loop]) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.molluscs", [True])) + ).validate_response([loop]) def test_loop_map(): - loop = {"loop": {"map": "$resp.molluscs", - "key": "cls", - "value": "mollusc", - "body": [{"print": ["A %s is a %s", "mollusc", "cls"]}]}} - samplegen.Validator().validate_response([loop]) + loop = { + "loop": { + "map": "$resp.molluscs", + "key": "cls", + "value": "mollusc", + "body": [{"print": ["A %s is a %s", "mollusc", "cls"]}], + } + } + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_collection_loop_lexical_scope_variable(): - statements = [{"loop": {"collection": "$resp.molluscs", - "variable": "m", - "body": [{"define": "squid=m"}]}}, - {"define": "cephalopod=m"}] + statements = [ + { + "loop": { + "collection": "$resp.molluscs", + "variable": "m", + "body": [{"define": "squid=m"}], + } + }, + {"define": "cephalopod=m"}, + ] with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response(statements) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.molluscs", [True])) + ).validate_response(statements) def test_collection_loop_lexical_scope_inline(): - statements = [{"loop": {"collection": "$resp.molluscs", - "variable": "m", - "body": [{"define": "squid=m"}]}}, - {"define": "cephalopod=squid"}] + statements = [ + { + "loop": { + "collection": "$resp.molluscs", + "variable": "m", + "body": [{"define": "squid=m"}], + } + }, + {"define": "cephalopod=squid"}, + ] with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response(statements) + samplegen.Validator( + DummyMethod(output=message_factory("$resp.molluscs", [True])) + ).validate_response(statements) def test_map_loop_lexical_scope_key(): - statements = [{"loop": {"map": "$resp.molluscs", - "key": "cls", - "value": "order", - "body": [{"define": "tmp=cls"}]}}, - {"define": "last_cls=cls"}] + statements = [ + { + "loop": { + "map": "$resp.molluscs", + "key": "cls", + "value": "order", + "body": [{"define": "tmp=cls"}], + } + }, + {"define": "last_cls=cls"}, + ] with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response(statements) + samplegen.Validator(DummyMethod()).validate_response(statements) def test_map_loop_lexical_scope_value(): - statements = [{"loop": {"map": "$resp.molluscs", - "key": "cls", - "value": "order", - "body": [{"define": "tmp=order"}]}}, - {"define": "last_order=order"}] + statements = [ + { + "loop": { + "map": "$resp.molluscs", + "key": "cls", + "value": "order", + "body": [{"define": "tmp=order"}], + } + }, + {"define": "last_order=order"}, + ] with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response(statements) + samplegen.Validator(DummyMethod()).validate_response(statements) def test_map_loop_lexical_scope_inline(): - statements = [{"loop": {"map": "$resp.molluscs", - "key": "cls", - "value": "order", - "body": [{"define": "tmp=order"}]}}, - {"define": "last_order=tmp"}] + statements = [ + { + "loop": { + "map": "$resp.molluscs", + "key": "cls", + "value": "order", + "body": [{"define": "tmp=order"}], + } + }, + {"define": "last_order=tmp"}, + ] with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response(statements) + samplegen.Validator(DummyMethod()).validate_response(statements) def test_loop_map_reserved_key(): - loop = {"loop": {"map": "$resp.molluscs", - "key": "class", - "value": "mollusc", - "body": [{"print": ["A %s is a %s", "mollusc", "class"]}]}} + loop = { + "loop": { + "map": "$resp.molluscs", + "key": "class", + "value": "mollusc", + "body": [{"print": ["A %s is a %s", "mollusc", "class"]}], + } + } with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_reserved_val(): - loop = {"loop": {"map": "$resp.molluscs", - "key": "m", - "value": "class", - "body": [{"print": ["A %s is a %s", "m", "class"]}]}} + loop = { + "loop": { + "map": "$resp.molluscs", + "key": "m", + "value": "class", + "body": [{"print": ["A %s is a %s", "m", "class"]}], + } + } with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_undefined(): - loop = {"loop": {"map": "molluscs", - "key": "name", - "value": "mollusc", - "body": [{"print": ["A %s is a %s", "mollusc", "name"]}]}} + loop = { + "loop": { + "map": "molluscs", + "key": "name", + "value": "mollusc", + "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], + } + } with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_no_key(): - loop = {"loop": {"map": "$resp.molluscs", - "value": "mollusc", - "body": [{"print": ["Mollusc: %s", "mollusc"]}]}} - samplegen.Validator().validate_response([loop]) + loop = { + "loop": { + "map": "$resp.molluscs", + "value": "mollusc", + "body": [{"print": ["Mollusc: %s", "mollusc"]}], + } + } + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_no_value(): - loop = {"loop": {"map": "$resp.molluscs", - "key": "mollusc", - "body": [{"print": ["Mollusc: %s", "mollusc"]}]}} - samplegen.Validator().validate_response([loop]) + loop = { + "loop": { + "map": "$resp.molluscs", + "key": "mollusc", + "body": [{"print": ["Mollusc: %s", "mollusc"]}], + } + } + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_no_key_or_value(): loop = {"loop": {"map": "$resp.molluscs", "body": [{"print": ["Dead loop"]}]}} with pytest.raises(samplegen.BadLoop): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_no_map(): - loop = {"loop": {"key": "name", - "value": "mollusc", - "body": [{"print": ["A %s is a %s", "mollusc", "name"]}]}} + loop = { + "loop": { + "key": "name", + "value": "mollusc", + "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], + } + } with pytest.raises(samplegen.BadLoop): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_no_body(): - loop = {"loop": {"map": "$resp.molluscs", - "key": "name", - "value": "mollusc"}} + loop = {"loop": {"map": "$resp.molluscs", "key": "name", "value": "mollusc"}} with pytest.raises(samplegen.BadLoop): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_extra_kword(): - loop = {"loop": {"map": "$resp.molluscs", - "key": "name", - "value": "mollusc", - "phylum": "$resp.phylum", - "body": [{"print": ["A %s is a %s", "mollusc", "name"]}]}} + loop = { + "loop": { + "map": "$resp.molluscs", + "key": "name", + "value": "mollusc", + "phylum": "$resp.phylum", + "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], + } + } with pytest.raises(samplegen.BadLoop): - samplegen.Validator().validate_response([loop]) + samplegen.Validator(DummyMethod()).validate_response([loop]) def test_loop_map_redefined_key(): - statements = [{"define": "mollusc=$resp.molluscs"}, - {"loop": {"map": "$resp.molluscs", - "key": "mollusc", - "body": [{"print": ["Mollusc: %s", "mollusc"]}]}}] + statements = [ + {"define": "mollusc=$resp.molluscs"}, + { + "loop": { + "map": "$resp.molluscs", + "key": "mollusc", + "body": [{"print": ["Mollusc: %s", "mollusc"]}], + } + }, + ] with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator().validate_response(statements) + samplegen.Validator( + DummyMethod(output=message_factory("mollusc.molluscs")) + ).validate_response(statements) def test_loop_map_redefined_value(): - statements = [{"define": "mollusc=$resp.molluscs"}, - {"loop": {"map": "$resp.molluscs", - "value": "mollusc", - "body": [{"print": ["Mollusc: %s", "mollusc"]}]}}] + statements = [ + {"define": "mollusc=$resp.molluscs"}, + { + "loop": { + "map": "$resp.molluscs", + "value": "mollusc", + "body": [{"print": ["Mollusc: %s", "mollusc"]}], + } + }, + ] with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator().validate_response(statements) + samplegen.Validator( + DummyMethod(output=message_factory("mollusc.molluscs")) + ).validate_response(statements) def test_validate_write_file(): - samplegen.Validator().validate_response( - [{"write_file": {"filename": ["specimen-%s", "$resp.species"], - "contents": "$resp.photo"}}]) + samplegen.Validator(DummyMethod()).validate_response( + [ + { + "write_file": { + "filename": ["specimen-%s", "$resp.species"], + "contents": "$resp.photo", + } + } + ] + ) def test_validate_write_file_fname_fmt(): with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator().validate_response( - [{"write_file": {"filename": ["specimen-%s"], - "contents": "$resp.photo"}}]) + samplegen.Validator(DummyMethod()).validate_response( + [{"write_file": {"filename": ["specimen-%s"], "contents": "$resp.photo"}}] + ) def test_validate_write_file_fname_bad_var(): with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response( - [{"write_file": {"filename": ["specimen-%s", "squid.species"], - "contents": "$resp.photo"}}]) + samplegen.Validator(DummyMethod()).validate_response( + [ + { + "write_file": { + "filename": ["specimen-%s", "squid.species"], + "contents": "$resp.photo", + } + } + ] + ) def test_validate_write_file_missing_fname(): with pytest.raises(samplegen.InvalidStatement): - samplegen.Validator().validate_response( + samplegen.Validator(DummyMethod()).validate_response( [{"write_file": {"contents": "$resp.photo"}}] ) def test_validate_write_file_missing_contents(): with pytest.raises(samplegen.InvalidStatement): - samplegen.Validator().validate_response( + samplegen.Validator(DummyMethod()).validate_response( [{"write_file": {"filename": ["specimen-%s", "$resp.species"]}}] ) def test_validate_write_file_bad_contents_var(): with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator().validate_response( - [{"write_file": {"filename": ["specimen-%s", "$resp.species"], - "contents": "squid.photo"}}]) + samplegen.Validator(DummyMethod()).validate_response( + [ + { + "write_file": { + "filename": ["specimen-%s", "$resp.species"], + "contents": "squid.photo", + } + } + ] + ) def test_invalid_statement(): statement = {"print": ["Name"], "comment": ["Value"]} with pytest.raises(samplegen.InvalidStatement): - samplegen.Validator().validate_response([statement]) + samplegen.Validator(DummyMethod()).validate_response([statement]) def test_invalid_statement2(): statement = {"squidify": ["Statement body"]} with pytest.raises(samplegen.InvalidStatement): - samplegen.Validator().validate_response([statement]) + samplegen.Validator(DummyMethod()).validate_response([statement]) # validate_and_transform_request tests def test_validate_request_basic(): - assert samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, - [{"field": "squid.mantle_length", - "value": "100 cm"}, - {"field": "squid.mantle_mass", - "value": "10 kg"}]) == [ - samplegen.TransformedRequest("squid", - [{"field": "mantle_length", - "value": "100 cm"}, - {"field": "mantle_mass", - "value": "10 kg"}])] + assert samplegen.Validator(DummyMethod()).validate_and_transform_request( + utils.CallingForm.Request, + [ + {"field": "squid.mantle_length", "value": "100 cm"}, + {"field": "squid.mantle_mass", "value": "10 kg"}, + ], + ) == [ + samplegen.TransformedRequest( + "squid", + [ + {"field": "mantle_length", "value": "100 cm"}, + {"field": "mantle_mass", "value": "10 kg"}, + ], + ) + ] def test_validate_request_no_field_parameter(): with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, - [{"squid": "humboldt"}]) + samplegen.Validator(DummyMethod()).validate_and_transform_request( + utils.CallingForm.Request, [{"squid": "humboldt"}] + ) def test_validate_request_malformed_field_attr(): with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, - [{"field": "squid"}]) + samplegen.Validator(DummyMethod()).validate_and_transform_request( + utils.CallingForm.Request, [{"field": "squid"}] + ) def test_validate_request_multiple_arguments(): - assert samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, - [{"field": "squid.mantle_length", - "value": "100 cm", - "value_is_file": True}, - {"field": "clam.shell_mass", - "value": "100 kg", - "comment": "Clams can be large"}]) == [ - samplegen.TransformedRequest("squid", - [{"field": "mantle_length", - "value": "100 cm", - "value_is_file": True}]), - samplegen.TransformedRequest("clam", - [{"field": "shell_mass", - "value": "100 kg", - "comment": "Clams can be large"}])] + assert samplegen.Validator(DummyMethod()).validate_and_transform_request( + utils.CallingForm.Request, + [ + {"field": "squid.mantle_length", + "value": "100 cm", "value_is_file": True}, + { + "field": "clam.shell_mass", + "value": "100 kg", + "comment": "Clams can be large", + }, + ], + ) == [ + samplegen.TransformedRequest( + "squid", + [{"field": "mantle_length", "value": "100 cm", "value_is_file": True}], + ), + samplegen.TransformedRequest( + "clam", + [ + { + "field": "shell_mass", + "value": "100 kg", + "comment": "Clams can be large", + } + ], + ), + ] def test_validate_request_reserved_request_name(): with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, - [{"field": "class.order", "value": "coleoidea"}]) + samplegen.Validator(DummyMethod()).validate_and_transform_request( + utils.CallingForm.Request, [ + {"field": "class.order", "value": "coleoidea"}] + ) def test_validate_request_duplicate_input_param(): with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, - [{"field": "squid.mantle_mass", - "value": "10 kg", - "input_parameter": "mantle_mass"}, - {"field": "clam.mantle_mass", - "value": "1 kg", - "input_parameter": "mantle_mass"}]) + samplegen.Validator(DummyMethod()).validate_and_transform_request( + utils.CallingForm.Request, + [ + { + "field": "squid.mantle_mass", + "value": "10 kg", + "input_parameter": "mantle_mass", + }, + { + "field": "clam.mantle_mass", + "value": "1 kg", + "input_parameter": "mantle_mass", + }, + ], + ) def test_validate_request_reserved_input_param(): with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator().validate_and_transform_request(utils.CallingForm.Request, - [{"field": "mollusc.class", - "value": "cephalopoda", - "input_parameter": "class"}]) + samplegen.Validator(DummyMethod()).validate_and_transform_request( + utils.CallingForm.Request, + [ + { + "field": "mollusc.class", + "value": "cephalopoda", + "input_parameter": "class", + } + ], + ) def test_single_request_client_streaming(): @@ -471,12 +688,13 @@ def test_single_request_client_streaming(): # Client streaming and bidirectional streaming methods can't use this notation, # and generate an exception if there is more than one 'base'. with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator().validate_and_transform_request( + samplegen.Validator(DummyMethod()).validate_and_transform_request( utils.CallingForm.RequestStreamingClient, - [{"field": "cephalopod.order", - "value": "cephalopoda"}, - {"field": "gastropod.order", - "value": "pulmonata"}]) + [ + {"field": "cephalopod.order", "value": "cephalopoda"}, + {"field": "gastropod.order", "value": "pulmonata"}, + ], + ) def test_single_request_bidi_streaming(): @@ -489,38 +707,45 @@ def test_single_request_bidi_streaming(): # Client streaming and bidirectional streaming methods can't use this notation, # and generate an exception if there is more than one 'base'. with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator().validate_and_transform_request( + samplegen.Validator(DummyMethod()).validate_and_transform_request( utils.CallingForm.RequestStreamingBidi, - [{"field": "cephalopod.order", - "value": "cephalopoda"}, - {"field": "gastropod.order", - "value": "pulmonata"}]) + [ + {"field": "cephalopod.order", "value": "cephalopoda"}, + {"field": "gastropod.order", "value": "pulmonata"}, + ], + ) def test_validate_request_calling_form(): - DummyMethod = namedtuple("DummyMethod", - ["lro", - "paged_result_field", - "client_streaming", - "server_streaming"]) - - assert utils.CallingForm.method_default(DummyMethod( - True, False, False, False)) == utils.CallingForm.LongRunningRequestPromise + assert ( + utils.CallingForm.method_default(DummyMethod(lro=True)) + == utils.CallingForm.LongRunningRequestPromise + ) - assert utils.CallingForm.method_default(DummyMethod( - False, True, False, False)) == utils.CallingForm.RequestPagedAll + assert ( + utils.CallingForm.method_default(DummyMethod(paged_result_field=True)) + == utils.CallingForm.RequestPagedAll + ) - assert utils.CallingForm.method_default(DummyMethod( - False, False, True, False)) == utils.CallingForm.RequestStreamingClient + assert ( + utils.CallingForm.method_default(DummyMethod(client_streaming=True)) + == utils.CallingForm.RequestStreamingClient + ) - assert utils.CallingForm.method_default(DummyMethod( - False, False, False, True)) == utils.CallingForm.RequestStreamingServer + assert ( + utils.CallingForm.method_default(DummyMethod(server_streaming=True)) + == utils.CallingForm.RequestStreamingServer + ) - assert utils.CallingForm.method_default(DummyMethod( - False, False, False, False)) == utils.CallingForm.Request + assert utils.CallingForm.method_default( + DummyMethod()) == utils.CallingForm.Request - assert utils.CallingForm.method_default(DummyMethod( - False, False, True, True)) == utils.CallingForm.RequestStreamingBidi + assert ( + utils.CallingForm.method_default( + DummyMethod(client_streaming=True, server_streaming=True) + ) + == utils.CallingForm.RequestStreamingBidi + ) def test_coerce_response_name(): @@ -529,119 +754,176 @@ def test_coerce_response_name(): assert samplegen.coerce_response_name("mollusc.squid") == "mollusc.squid" -def test_generate_manifest(): - DummyNaming = namedtuple("DummyNaming", ["name", "version"]) - DummyApiSchema = namedtuple("DummyApiSchema", ["naming"]) +def test_regular_response_type(): + OutputType = TypeVar("OutputType") + method = DummyMethod(output=OutputType) - fpath_to_dummy_sample = { - "squid_fpath.py": {"id": "squid_sample"}, - "clam_fpath.py": {"id": "clam_sample", - "region_tag": "giant_clam_sample"}, - } + v = samplegen.Validator(method) + assert v.var_field("$resp").message == OutputType + + +def test_paged_response_type(): + OutputType = TypeVar("OutputType") + PagedType = TypeVar("PagedType") + method = DummyMethod(output=OutputType, paged_result_field=PagedType) - fname, info = samplegen.generate_manifest( - fpath_to_dummy_sample.items(), - DummyApiSchema(DummyNaming("Mollusc", "v1")), - # Empirically derived number such that the - # corresponding time_struct tests the zero - # padding in the returned filename. - manifest_time=4486525628 + v = samplegen.Validator(method) + assert v.var_field("$resp").message == PagedType + + +def test_lro_response_type(): + OutputType = TypeVar("OutputType") + LroType = TypeVar("LroType") + method = DummyMethod( + output=OutputType, lro=namedtuple( + "operation", ["response_type"])(LroType) ) - assert fname == "Mollusc.v1.python.21120304.090708.manifest.yaml" - - expected_info = [ - gapic_yaml.KeyVal("type", "manifest/samples"), - gapic_yaml.KeyVal("schema_version", "3"), - gapic_yaml.Map(name="python", - anchor_name="python", - elements=[ - gapic_yaml.KeyVal( - "environment", "python"), - gapic_yaml.KeyVal( - "bin", "python3"), - gapic_yaml.KeyVal( - "base_path", "sample/base/directory"), - gapic_yaml.KeyVal( - "invocation", "'{bin} {path} @args'"), - ]), - gapic_yaml.Collection(name="samples", - elements=[ - [ - gapic_yaml.Anchor( - "python"), - gapic_yaml.KeyVal( - "sample", "squid_sample"), - gapic_yaml.KeyVal( - "path", "'{base_path}/squid_fpath.py'"), - gapic_yaml.KeyVal( - "region_tag", ""), - ], - [ - gapic_yaml.Anchor("python"), - gapic_yaml.KeyVal( - "sample", "clam_sample"), - gapic_yaml.KeyVal( - "path", "'{base_path}/clam_fpath.py'"), - gapic_yaml.KeyVal( - "region_tag", "giant_clam_sample") - ], - ]) - ] + v = samplegen.Validator(method) + assert v.var_field("$resp").message == LroType - assert info == expected_info - - expected_rendering = dedent(""" - type: manifest/samples - schema_version: 3 - python: &python - environment: python - bin: python3 - base_path: sample/base/directory - invocation: '{bin} {path} @args' - samples: - - <<: *python - sample: squid_sample - path: '{base_path}/squid_fpath.py' - region_tag: - - <<: *python - sample: clam_sample - path: '{base_path}/clam_fpath.py' - region_tag: giant_clam_sample""".lstrip("\n")) - - rendered_yaml = "\n".join(e.render() for e in info) - assert rendered_yaml == expected_rendering - - expected_parsed_manifest = { - "type": "manifest/samples", - "schema_version": 3, - "python": { - "environment": "python", - "bin": "python3", - "base_path": "sample/base/directory", - "invocation": "{bin} {path} @args", - }, - "samples": [ - { - "environment": "python", - "bin": "python3", - "base_path": "sample/base/directory", - "invocation": "{bin} {path} @args", - "sample": "squid_sample", - "path": "{base_path}/squid_fpath.py", - "region_tag": None, - }, + +def test_validate_expression(): + exp = "$resp.coleoidea.octopodiformes.octopus" + OutputType = message_factory(exp) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + + exp_type = v.validate_expression(exp) + assert exp_type.message.type == "OCTOPUS_TYPE" + + +def test_validate_expression_undefined_base(): + exp = "$resp.coleoidea.octopodiformes.octopus" + OutputType = message_factory(exp) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + + with pytest.raises(samplegen.UndefinedVariableReference): + v.validate_expression("mollusc") + + +def test_validate_expression_no_such_attr(): + OutputType = message_factory("$resp.coleoidea") + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_expression("$resp.nautiloidea") + + +def test_validate_expression_predefined(): + exp = "$resp.coleoidea.octopodiformes.octopus" + OutputType = message_factory(exp) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_response([{"define": "nautilus=$resp.nautiloidea"}]) + + +def test_validate_expression_repeated_attrs(): + # This is a little tricky: there's an attribute hierarchy + # of response/coleoidea/octopodiformes, but coleoidea is a repeated field, + # so accessing $resp.coleoidea.octopodiformes doesn't make any sense. + exp = "$resp.coleoidea.octopodiformes" + OutputType = message_factory(exp, [True, False]) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_response( + [{"define": "octopus=$resp.coleoidea.octopodiformes"}]) + + +def test_validate_expression_collection(): + exp = "$resp.molluscs" + OutputType = message_factory(exp, [True]) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + v.validate_response( + [ { - "environment": "python", - "bin": "python3", - "base_path": "sample/base/directory", - "invocation": "{bin} {path} @args", - "sample": "clam_sample", - "path": "{base_path}/clam_fpath.py", - "region_tag": "giant_clam_sample", - }, - ], + "loop": { + "collection": "$resp.molluscs", + "variable": "m", + "body": [{"print": ["%s", "m"]}], + } + } + ] + ) + + +def test_validate_expression_collection_error(): + exp = "$resp.molluscs.mollusc" + OutputType = message_factory(exp) + method = DummyMethod(output=OutputType) + + statement = { + "loop": { + "collection": "$resp.molluscs", + "variable": "m", + "body": [{"print": ["%s", "m"]}], + } } - parsed_manifest = yaml.safe_load(rendered_yaml) - assert parsed_manifest == expected_parsed_manifest + v = samplegen.Validator(method) + + # Because 'molluscs' isn't repeated + with pytest.raises(samplegen.BadLoop): + v.validate_response([statement]) + + +def test_validate_expression_repeated_lookup(): + exp = "$resp.molluscs.mantle" + OutputType = message_factory(exp, [True, False]) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + v.validate_expression("$resp.molluscs[0].mantle") + + +def test_validate_expression_repeated_lookup_invalid(): + exp = "$resp.molluscs.mantle" + OutputType = message_factory(exp) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_expression("$resp.molluscs[0].mantle") + + +def test_validate_expression_base_attr_is_repeated(): + exp = "$resp.molluscs.mantle" + OutputType = message_factory(exp, [True, False]) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + v.validate_response([{"define": "molluscs=$resp.molluscs"}]) + v.validate_expression("molluscs[0].mantle") + + +def test_validate_expresssion_lookup_unrepeated_base(): + exp = "$resp.molluscs" + OutputType = message_factory(exp) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_response([{"define": "m=$resp[0]"}]) + + +def test_validate_expression_malformed_base(): + # Note the mistype + exp = "r$esp.mollusc" + OutputType = message_factory(exp) + method = DummyMethod(OutputType) + v = samplegen.Validator(method) + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_expression(exp) + + +def test_validate_expression_malformed_attr(): + # Note the mistype + exp = "$resp.mollu$c" + OutputType = message_factory(exp) + method = DummyMethod(OutputType) + v = samplegen.Validator(method) + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_expression(exp) From cf85f3ff4992784f716f86f9b961bf647a00ade6 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 22 Jul 2019 11:19:04 -0700 Subject: [PATCH 0137/1339] Fix render_main_block macro and tests (#157) Properly parse the input parameters using argparse and feed them into the sample function. --- .../templates/examples/feature_fragments.j2 | 4 +++- .../tests/unit/samplegen/test_integration.py | 5 ++++- .../tests/unit/samplegen/test_template.py | 21 +++++++++++-------- 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index bdf5b79c4772..d1e3c36f2821 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -217,11 +217,13 @@ def main(): parser = argparse.ArgumentParser() {% with arg_list = [] %} -{% for attr in request_block if "input_parameter" in attr %} +{% for request in request_block|map(attribute="body") -%} +{% for attr in request if "input_parameter" in attr %} parser.add_argument("--{{ attr.input_parameter }}", type=str, default="{{ attr.value }}") {% do arg_list.append("args." + attr.input_parameter) -%} +{% endfor %} {% endfor %} args = parser.parse_args() diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 8a3fce410b93..0c2ba14e5f3a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -97,9 +97,12 @@ def main(): import argparse parser = argparse.ArgumentParser() + parser.add_argument("--video", + type=str, + default="path/to/mollusc/video.mkv") args = parser.parse_args() - sample_classify() + sample_classify(args.video) if __name__ == "__main__": diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 63d19c95e258..d5985e9cde73 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -535,14 +535,7 @@ def test_main_block(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_main_block("ListMolluscs", [{"field": "list_molluscs.order", - "value": "coleoidea", - "input_parameter": "order"}, - {"field ": "list_molluscs.mass", - "value": "60kg", - "input_parameter": "mass"}, - {"field": "list_molluscs.zone", - "value": "MESOPELAGIC"},]) }} + {{ frags.render_main_block("ListMolluscs", request) }} ''', ''' def main(): @@ -562,5 +555,15 @@ def main(): if __name__ == "__main__": main() - ''' + ''', + request=[ + samplegen.TransformedRequest("input_params", [{"field": "list_molluscs.order", + "value": "coleoidea", + "input_parameter": "order"}, + {"field ": "list_molluscs.mass", + "value": "60kg", + "input_parameter": "mass"}]), + samplegen.TransformedRequest("enum_param", [{"field": "list_molluscs.zone", + "value": "MESOPELAGIC"}]) + ] ) From 10c03147425212c042353bd71837f3ddd0235901 Mon Sep 17 00:00:00 2001 From: Renovate Bot Date: Fri, 26 Jul 2019 02:24:07 +0300 Subject: [PATCH 0138/1339] Add renovate.json (#159) --- packages/gapic-generator/renovate.json | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 packages/gapic-generator/renovate.json diff --git a/packages/gapic-generator/renovate.json b/packages/gapic-generator/renovate.json new file mode 100644 index 000000000000..f45d8f110c30 --- /dev/null +++ b/packages/gapic-generator/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base" + ] +} From d290dfe9bf29ccdbbf84d65df856680f2463af35 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 26 Jul 2019 11:37:48 -0700 Subject: [PATCH 0139/1339] Bug fixes and review integration from previous PRs (#158) Iterate dispatch when rendering loop bodies. Dispatch "define", add a dispatch render define test and do some whitespace changing for loop rendering tests. Tweak whitespace and formatting for the integration test. The 'Anchor' yaml class is renamed 'Alias'. --- .../gapic/samplegen/samplegen.py | 5 +- .../gapic-generator/gapic/samplegen/yaml.py | 2 +- .../templates/examples/feature_fragments.j2 | 55 +++++++++-------- .../gapic/templates/examples/sample.py.j2 | 2 +- .../tests/unit/samplegen/test_integration.py | 5 +- .../tests/unit/samplegen/test_manifest.py | 4 +- .../tests/unit/samplegen/test_samplegen.py | 8 +++ .../tests/unit/samplegen/test_template.py | 61 +++++++++++++++---- 8 files changed, 97 insertions(+), 45 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 952992b2758e..a526f6cb2b91 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -334,8 +334,7 @@ def validate_expression(self, exp: str) -> wrappers.Field: Given a lookup expression, e.g. squid.clam.whelk, recursively validate that each base has an attr with the name of the - next lookup lower down and that no attributes, besides possibly the final, - are repeated fields. + next lookup lower down and repeated attributes are indexed. Args: expr: str: The attribute expression. @@ -684,7 +683,7 @@ def generate_manifest(fpaths_and_samples, api_schema, *, manifest_time: int = No name="samples", elements=[ [ - yaml.Anchor("python"), + yaml.Alias("python"), yaml.KeyVal("sample", sample["id"]), yaml.KeyVal("path", "'{base_path}/%s'" % fpath), yaml.KeyVal("region_tag", sample.get("region_tag", "")), diff --git a/packages/gapic-generator/gapic/samplegen/yaml.py b/packages/gapic-generator/gapic/samplegen/yaml.py index 4959889fb90a..642dddbffe6e 100644 --- a/packages/gapic-generator/gapic/samplegen/yaml.py +++ b/packages/gapic-generator/gapic/samplegen/yaml.py @@ -76,7 +76,7 @@ def render(self, spaces: int = 0) -> str: @dataclasses.dataclass() -class Anchor(Element): +class Alias(Element): """An anchor to a map.""" target: str diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index d1e3c36f2821..d0d015ce08da 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -56,14 +56,14 @@ There is a little, but not enough for it to be important because {% endif %} {% endfor %} {% endfor %} -{{ input_parameters|join(", ") -}} +{{ input_parameters|join(", ") }} {% endwith %} {% endmacro %} +{# First elment is a format string, remaining elements are the format string parameters #} +{# Validating that the number of format params equals #} +{# the number of remaining params is handled by real python code #} {% macro render_print(elts) %} - {# First elment is a format string, remaining elements are the format string parameters #} - {# Validating that the number of format params equals #} - {# the number of remaining params is handled by real python code #} print({{ print_string_formatting(elts)|trim }}) {% endmacro %} @@ -87,7 +87,9 @@ print({{ print_string_formatting(elts)|trim }}) {% macro render_collection_loop(statement) %} for {{ statement.variable }} in {{ statement.collection|coerce_response_name }}: - {{ dispatch_statement(statement.body) -}} + {% for s in statement.body %} + {{ dispatch_statement(s) }} + {% endfor %} {% endmacro %} {% macro render_map_loop(statement) %} @@ -99,33 +101,36 @@ for {{ statement.key }} in {{ statement.map|coerce_response_name }}.keys(): {% else %} for {{statement.key }}, {{ statement.value }} in {{ statement.map|coerce_response_name }}.items(): {% endif %} - {{ dispatch_statement(statement.body) -}} +{% for s in statement.body %} + {{ dispatch_statement(s) }} +{% endfor %} {% endmacro %} {% macro render_write_file(statement) %} {% with contents_rval = statement["contents"]|coerce_response_name %} with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: f.write({{ contents_rval }}) - {% endwith %} {% endmacro %} {% macro dispatch_statement(statement) %} {# Each statement is a dict with a single key/value pair #} -{% if "print" in statement %} -{{ render_print(statement["print"]) -}} -{% elif "comment" in statement %} -{{ render_comment(statement["comment"]) -}} -{% elif "loop" in statement %} - {% with loop = statement["loop"] %} - {% if "collection" in loop %} -{{ render_collection_loop(loop) -}} - {% else %} -{{ render_map_loop(loop) -}} - {% endif %} - {% endwith %} -{% elif "write_file" in statement %} -{{ render_write_file(statement["write_file"]) -}} +{% if "print" in statement -%} +{{ render_print(statement["print"]) }} +{% elif "define" in statement -%} +{{ render_define(statement["define"]) }} +{% elif "comment" in statement -%} +{{ render_comment(statement["comment"]) }} +{% elif "loop" in statement -%} + {% with loop = statement["loop"] -%} + {% if "collection" in loop -%} +{{ render_collection_loop(loop) }} + {% else -%} +{{ render_map_loop(loop) }} + {% endif -%} + {% endwith -%} +{% elif "write_file" in statement -%} +{{ render_write_file(statement["write_file"]) }} {% endif %} {% endmacro %} @@ -208,7 +213,7 @@ response = operation.result() {% endmacro %} {% macro render_method_name(method_name) %} -{{ method_name|snake_case -}} +{{ method_name|snake_case }} {% endmacro %} {% macro render_main_block(method_name, request_block) %} @@ -217,17 +222,17 @@ def main(): parser = argparse.ArgumentParser() {% with arg_list = [] %} -{% for request in request_block|map(attribute="body") -%} +{% for request in request_block|map(attribute="body") %} {% for attr in request if "input_parameter" in attr %} parser.add_argument("--{{ attr.input_parameter }}", type=str, default="{{ attr.value }}") -{% do arg_list.append("args." + attr.input_parameter) -%} +{% do arg_list.append("args." + attr.input_parameter) %} {% endfor %} {% endfor %} args = parser.parse_args() - sample_{{ render_method_name(method_name) }}({{ arg_list|join(", ") }}) + sample_{{ render_method_name(method_name)|trim }}({{ arg_list|join(", ") }}) if __name__ == "__main__": diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index b5ab7cd5758e..1ddfde983823 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -30,7 +30,7 @@ {% endfor %} {# also need calling form #} -def sample_{{ frags.render_method_name(sample.rpc) }}({{ frags.print_input_params(sample.request) }}): +def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input_params(sample.request)|trim -}}): """{{ sample.description }}""" client = {{ sample.service.split(".")[-3:-1]| diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 0c2ba14e5f3a..c2e480c94cb6 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -64,7 +64,7 @@ def test_generate_sample_basic(): sample, True, env, schema) sample_str = "".join(iter(template_stream)) - assert sample_str == '''# TODO: add a copyright + expected_str = '''# TODO: add a copyright # TODO: add a license # # DO NOT EDIT! This is a generated sample ("CallingForm.Request", "mollusc_classify_sync") @@ -91,6 +91,7 @@ def sample_classify(video): print("Mollusc is a {}".format(response.taxonomy)) + # [END mollusc_classify_sync] def main(): @@ -109,6 +110,8 @@ def main(): main() ''' + assert sample_str == expected_str + def test_generate_sample_service_not_found(): schema = DummyApiSchema({}, DummyNaming("pkg_name")) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index 1d841d073b5a..893d5f95847f 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -56,7 +56,7 @@ def test_generate_manifest(): gapic_yaml.Collection(name="samples", elements=[ [ - gapic_yaml.Anchor( + gapic_yaml.Alias( "python"), gapic_yaml.KeyVal( "sample", "squid_sample"), @@ -66,7 +66,7 @@ def test_generate_manifest(): "region_tag", ""), ], [ - gapic_yaml.Anchor("python"), + gapic_yaml.Alias("python"), gapic_yaml.KeyVal( "sample", "clam_sample"), gapic_yaml.KeyVal( diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index b6bf22f3ffdc..fa0a88f6e56a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -882,6 +882,14 @@ def test_validate_expression_repeated_lookup(): v.validate_expression("$resp.molluscs[0].mantle") +def test_validate_expression_repeated_lookup_nested(): + exp = "$resp.molluscs.tentacles.club" + OutputType = message_factory(exp, [True, True, False]) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + v.validate_expression("$resp.molluscs[0].tentacles[0].club") + + def test_validate_expression_repeated_lookup_invalid(): exp = "$resp.molluscs.mantle" OutputType = message_factory(exp) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index d5985e9cde73..968fcc3062cd 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -48,7 +48,9 @@ def check_template(template_fragment, expected_output, **kwargs): template = env.get_template("template_fragment") text = template.render(**kwargs) - assert text == dedent(expected_output) + expected_output = dedent(expected_output) + + assert text == expected_output def test_render_attr_value(): @@ -226,6 +228,20 @@ def test_dispatch_print(): ''', ''' print("Squid") + + ''' + ) + + +def test_dispatch_define(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.dispatch_statement({"define": "squid=humboldt"})}} + ''', + ''' + squid = humboldt + ''' ) @@ -238,6 +254,7 @@ def test_dispatch_comment(): ''', ''' # Squid + ''' ) @@ -253,7 +270,6 @@ def test_write_file(): ''' with open("specimen-{}".format(response.species), "wb") as f: f.write(response.photo) - ''' ) @@ -281,11 +297,13 @@ def test_collection_loop(): {% import "feature_fragments.j2" as frags %} {{ frags.render_collection_loop({"collection": "$resp.molluscs", "variable": "m", - "body": {"print": ["Mollusc: %s", "m"]}})}} + "body": [{"print": ["Mollusc: %s", "m"]}]})}} ''', ''' for m in response.molluscs: print("Mollusc: {}".format(m)) + + ''' ) @@ -296,10 +314,13 @@ def test_dispatch_collection_loop(): {% import "feature_fragments.j2" as frags %} {{ frags.dispatch_statement({"loop": {"collection": "molluscs", "variable": "m", - "body": {"print": ["Mollusc: %s", "m"]}}}) }}''', + "body": [{"print": ["Mollusc: %s", "m"]}]}}) }}''', ''' for m in molluscs: print("Mollusc: {}".format(m)) + + + ''' ) @@ -311,11 +332,13 @@ def test_map_loop(): {{ frags.render_map_loop({"map": "$resp.molluscs", "key":"cls", "value":"example", - "body": {"print": ["A %s is a %s", "example", "cls"] }}) + "body": [{"print": ["A %s is a %s", "example", "cls"] }]}) }}''', ''' for cls, example in response.molluscs.items(): print("A {} is a {}".format(example, cls)) + + ''' ) @@ -326,12 +349,14 @@ def test_map_loop_no_key(): {% import "feature_fragments.j2" as frags %} {{ frags.render_map_loop({"map": "$resp.molluscs", "value":"example", - "body": {"print": ["A %s is a mollusc", "example"] }}) + "body": [{"print": ["A %s is a mollusc", "example"] }]}) }} ''', ''' for example in response.molluscs.values(): print("A {} is a mollusc".format(example)) + + ''' ) @@ -342,12 +367,14 @@ def test_map_loop_no_value(): {% import "feature_fragments.j2" as frags %} {{ frags.render_map_loop({"map": "$resp.molluscs", "key":"cls", - "body": {"print": ["A %s is a mollusc", "cls"] }}) + "body": [{"print": ["A %s is a mollusc", "cls"] }]}) }} ''', ''' for cls in response.molluscs.keys(): print("A {} is a mollusc".format(cls)) + + ''' ) @@ -359,13 +386,16 @@ def test_dispatch_map_loop(): {{ frags.dispatch_statement({"loop":{"map": "molluscs", "key":"cls", "value":"example", - "body": { - "print": ["A %s is a %s", "example", "cls"] }}}) + "body": [{ + "print": ["A %s is a %s", "example", "cls"] }]}}) }} ''', ''' for cls, example in molluscs.items(): print("A {} is a {}".format(example, cls)) + + + ''' ) @@ -391,7 +421,8 @@ def test_print_input_params(): ]) }} ''', ''' - mass, length, color''' + mass, length, color + ''' ) @@ -409,6 +440,7 @@ def test_render_calling_form_request(): response = TEST_INVOCATION_TXT print("Test print statement") + ''', calling_form_enum=CallingForm, calling_form=CallingForm.Request) @@ -420,7 +452,8 @@ def test_render_calling_form_paged_all(): page_result = TEST_INVOCATION_TXT for response in page_result: print("Test print statement") - + + ''', calling_form_enum=CallingForm, calling_form=CallingForm.RequestPagedAll) @@ -433,7 +466,8 @@ def test_render_calling_form_paged(): for page in page_result.pages(): for response in page: print("Test print statement") - + + ''', calling_form_enum=CallingForm, calling_form=CallingForm.RequestPaged) @@ -446,6 +480,7 @@ def test_render_calling_form_streaming_server(): for response in stream: print("Test print statement") + ''', calling_form_enum=CallingForm, calling_form=CallingForm.RequestStreamingServer) @@ -458,6 +493,7 @@ def test_render_calling_form_streaming_bidi(): for response in stream: print("Test print statement") + ''', calling_form_enum=CallingForm, calling_form=CallingForm.RequestStreamingBidi) @@ -473,6 +509,7 @@ def test_render_calling_form_longrunning(): response = operation.result() print("Test print statement") + ''', calling_form_enum=CallingForm, calling_form=CallingForm.LongRunningRequestPromise) From 95d09f166039b6fe1b684ddedb268f1ba2ac93c1 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 26 Jul 2019 11:50:53 -0700 Subject: [PATCH 0140/1339] Disable renovate updating the docker images (#163) The docker images as they are now give us test coverage for both python 3.6 and 3.7; the default behavior for renovate would be to update the images to the latest version, which defeats the point of backwards compatibility testing. --- packages/gapic-generator/renovate.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/renovate.json b/packages/gapic-generator/renovate.json index f45d8f110c30..33c6788ce6d8 100644 --- a/packages/gapic-generator/renovate.json +++ b/packages/gapic-generator/renovate.json @@ -1,5 +1,6 @@ { "extends": [ - "config:base" + "config:base", + "docker:disable" ] } From a73ac6d2a4fa840b44438867dde90af549bd76f3 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 26 Jul 2019 13:18:05 -0700 Subject: [PATCH 0141/1339] Test request setup against method request type (#162) Fields in the request are checked against the fields in the message type. Also includes changes for properly handling and rendering top level fields, plus associated tests. Also includes broad, shallow, standardizing formatting cleanup to samplegen unit tests --- .../gapic/samplegen/samplegen.py | 248 +++++--- .../templates/examples/feature_fragments.j2 | 56 +- .../gapic/templates/examples/sample.py.j2 | 2 +- packages/gapic-generator/noxfile.py | 47 +- .../tests/unit/samplegen/common_types.py | 48 +- .../tests/unit/samplegen/test_integration.py | 5 +- .../tests/unit/samplegen/test_samplegen.py | 530 +++++++++++------- .../tests/unit/samplegen/test_template.py | 238 +++++--- 8 files changed, 781 insertions(+), 393 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index a526f6cb2b91..e88350acc3b8 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import dataclasses import itertools import jinja2 import keyword @@ -23,7 +24,7 @@ from collections import (defaultdict, namedtuple, ChainMap as chainmap) from textwrap import dedent -from typing import (ChainMap, Dict, List, Mapping, Optional, Set, Tuple) +from typing import (ChainMap, Dict, List, Mapping, Optional, Set, Tuple, Union) # Outstanding issues: # * In real sample configs, many variables are @@ -58,7 +59,50 @@ TEMPLATE_NAME = "sample.py.j2" -TransformedRequest = namedtuple("TransformedRequest", ["base", "body"]) +@dataclasses.dataclass(frozen=True) +class AttributeRequestSetup: + """A single request-field setup description. + + If 'field' is not set, this is a top level attribute, in which case the 'base' + parameter of the owning TransformedRequest is the attribute name. + + A True 'value_is_file' indicates that 'value' is a file path, + and that the value of the attribute is the contents of that file. + + A non-empty 'input_parameter' indicates a formal parameter to the sample function + that contains the value for the attribute. + + """ + value: str + field: Optional[str] = None + value_is_file: bool = False + input_parameter: Optional[str] = None + comment: Optional[str] = None + + +@dataclasses.dataclass(frozen=True) +class TransformedRequest: + """Class representing a single field in a method call. + + A request block, as read in from the sample config, is a list of dicts that + describe field setup for the API method request. + + Fields with subfields are treated as dictionaries, with subfields as keys + and passed, read, or hardcoded subfield values as the mapped values. + These field dictionaries are passed into the client method as positional arguments. + + Fields _without_ subfields, aka top-level-fields, are passed into the method call + as keyword parameters, with their associated values assigned directly. + + A TransformedRequest describes a subfield of the API request. + It is either a top level request, in which case the 'single' attribute is filled, + or it has assigned-to subfields, in which case 'body' lists assignment setups. + + The Optional[single]/Optional[body] is workaround for not having tagged unions. + """ + base: str + single: Optional[AttributeRequestSetup] + body: Optional[List[AttributeRequestSetup]] class SampleError(Exception): @@ -117,6 +161,10 @@ class InvalidRequestSetup(SampleError): pass +class InvalidEnumVariant(SampleError): + pass + + def coerce_response_name(s: str) -> str: # In the sample config, the "$resp" keyword is used to refer to the # item of interest as received by the corresponding calling form. @@ -148,6 +196,7 @@ class Validator: def __init__(self, method: wrappers.Method): # The response ($resp) variable is special and guaranteed to exist. # TODO: name lookup also involes type checking + self.request_type_ = method.input response_type = method.output if method.paged_result_field: response_type = method.paged_result_field @@ -183,15 +232,16 @@ def validate_and_transform_request(self, variable name, e.g. clam.shell. The only required keys in each dict are "field" and value". - Optional keys are "input_parameter" and "value_is_file". - All values in the initial request are strings - except for the value for "value_is_file", which is a bool. + Optional keys are "input_parameter", "value_is_file". and "comment". + All values in the initial request are strings except for the value + for "value_is_file", which is a bool. - The topmost dict of the return value has two keys: "base" and "body", - where "base" maps to a variable name, and "body" maps to a list of variable - assignment definitions. The only difference in the bottommost dicts - are that "field" maps only to the second part of a dotted variable name. - Other key/value combinations in the dict are unmodified for the time being. + The TransformedRequest structure of the return value has three fields: + "base", "body", and "single", where "base" maps to the top level attribute name, + "body" maps to a list of subfield assignment definitions, and "single" + maps to a singleton attribute assignment structure with no "field" value. + The "field" attribute in the requests in a "body" list have their prefix stripped; + the request in a "single" attribute has no "field" attribute. Note: gRPC API methods only take one parameter (ignoring client-side streaming). The reason that GAPIC client library API methods may take multiple parameters @@ -199,22 +249,28 @@ def validate_and_transform_request(self, The different 'bases' are really attributes for the singular request parameter. TODO: properly handle subfields, indexing, and so forth. - - TODO: Conduct module lookup and expansion for protobuf enums. - Requires proto/method/message descriptors. - TODO: Permit single level field/oneof requst parameters. - Requires proto/method/message descriptors. TODO: Add/transform to list repeated element fields. Requires proto/method/message descriptors. E.g. [{"field": "clam.shell", "value": "10 kg", "input_parameter": "shell"}, {"field": "clam.pearls", "value": "3"}, - {"field": "squid.mantle", "value": "100 kg"}] + {"field": "squid.mantle", "value": "100 kg"}, + {"field": "whelk", "value": "speckled"}] -> - [TransformedRequest("clam", - [{"field": "shell", "value": "10 kg", "input_parameter": "shell"}, - {"field": "pearls", "value": "3"}]), - TransformedRequest("squid", [{"field": "mantle", "value": "100 kg"}])] + [TransformedRequest( + base="clam", + body=[AttributeRequestSetup(field="shell", + value="10 kg", + input_parameter="shell"), + AttributeRequestSetup(field="pearls", value="3")], + single=None), + TransformedRequest(base="squid", + body=[AttributeRequestSetup(field="mantle", + value="100 kg")], + single=None), + TransformedRequest(base="whelk", + body=None, + single=AttributeRequestSetup(value="speckled))] The transformation makes it easier to set up request parameters in jinja because it doesn't have to engage in prefix detection, validation, @@ -225,78 +281,104 @@ def validate_and_transform_request(self, request (list[dict{str:str}]): The request body from the sample config Returns: - list[dict{str:(str|list[dict{str:str}])}]: The transformed request block. + List[TransformedRequest]: The transformed request block. Raises: - RedefinedVariable: If an "input_parameter" attempts to redefine a - previously defined variable. - ReservedVariableName: If an "input_parameter" value or a "base" value - is a reserved word. - InvalidRequestSetup: If a dict in the request lacks a "field" key - or the corresponding value is malformed. + InvalidRequestSetup: If a dict in the request lacks a "field" key, + a "value" key, if there is an unexpected keyword, + or if more than one base parameter is given for + a client-side streaming calling form. + BadAttributeLookup: If a request field refers to a non-existent field + in the request message type. + """ - base_param_to_attrs: Mapping[str, - List[Mapping[str, str]]] = defaultdict(list) - - for field_assignment in request: - field_assignment_copy = dict(field_assignment) - input_param = field_assignment_copy.get("input_parameter") - if input_param: - # We use str as the input type because - # validate_expression just needs to know - # that the input_parameter isn't a MessageType of any kind. - # TODO: write a test about that. - # TODO: handle enums - self._handle_lvalue(input_param, str) - - field = field_assignment_copy.get("field") - if not field: - raise InvalidRequestSetup( - "No field attribute found in request setup assignment: {}".format( - field_assignment_copy - ) - ) + base_param_to_attrs: Dict[str, + List[AttributeRequestSetup]] = defaultdict(list) - # TODO: properly handle top level fields - # E.g. - # - # -field: edition - # comment: The edition of the series. - # value: '123' - # input_parameter: edition - m = re.match(r"^([a-zA-Z]\w*)\.([a-zA-Z]\w*)$", field) - if not m: + for r in request: + duplicate = dict(r) + val = duplicate.get("value") + if not val: raise InvalidRequestSetup( - "Malformed request attribute description: {}".format(field) - ) - - base, attr = m.groups() - if base in RESERVED_WORDS: - raise ReservedVariableName( - "Tried to define '{}', which is a reserved name".format( - base) - ) - - field_assignment_copy["field"] = attr - base_param_to_attrs[base].append(field_assignment_copy) + "Missing keyword in request entry: 'value'") - if ( - calling_form - in { - utils.CallingForm.RequestStreamingClient, - utils.CallingForm.RequestStreamingBidi, - } - and len(base_param_to_attrs) > 1 - ): + field = duplicate.get("field") + if not field: + raise InvalidRequestSetup( + "Missing keyword in request entry: 'field'") + + spurious_keywords = set(duplicate.keys()) - {"value", + "field", + "value_is_file", + "input_parameter", + "comment"} + if spurious_keywords: + raise InvalidRequestSetup( + "Spurious keyword(s) in request entry: {}".format( + ", ".join(f"'{kword}'" for kword in spurious_keywords))) + + input_parameter = duplicate.get("input_parameter") + if input_parameter: + self._handle_lvalue(input_parameter, str) + + attr_chain = field.split(".") + base = self.request_type_ + for attr_name in attr_chain: + attr = base.fields.get(attr_name) + if not attr: + raise BadAttributeLookup( + "Method request type {} has no attribute: '{}'".format( + self.request_type_.type, attr_name)) + + if attr.message: + base = attr.message + + else: + raise TypeError + + # TODO: uncomment this when handling enums + # if attr.enum: + # # A little bit hacky, but 'values' is a list, and this is the easiest + # # way to verify that the value is a valid enum variant. + # witness = next((e.name for e in attr.enum.values if e.name == val), None) + # if not witness: + # raise InvalidEnumVariant + # # Python code can set protobuf enums from strings. + # # This is preferable to adding the necessary import statement + # # and requires less munging of the assigned value + # duplicate["value"] = f"'{witness}'" + + # TODO: what if there's more stuff in the chain? + if len(attr_chain) > 1: + duplicate["field"] = ".".join(attr_chain[1:]) + else: + # Because of the way top level attrs get rendered, + # there can't be duplicates. + # This is admittedly a bit of a hack. + if attr_chain[0] in base_param_to_attrs: + raise InvalidRequestSetup( + "Duplicated top level field in request block: '{}'".format( + attr_chain[0])) + del duplicate["field"] + + # Mypy isn't smart enough to handle dictionary unpacking, + # so disable it for the AttributeRequestSetup ctor call. + base_param_to_attrs[attr_chain[0]].append( + AttributeRequestSetup(**duplicate)) # type: ignore + + client_streaming_forms = { + utils.CallingForm.RequestStreamingClient, + utils.CallingForm.RequestStreamingBidi, + } + + if len(base_param_to_attrs) > 1 and calling_form in client_streaming_forms: raise InvalidRequestSetup( - ( - "There can be at most 1 base request in a sample" - " for a method with client side streaming" - ) - ) + "Too many base parameters for client side streaming form") return [ - TransformedRequest(base, body) for base, body in base_param_to_attrs.items() + (TransformedRequest(base=key, body=val, single=None) if val[0].field + else TransformedRequest(base=key, body=None, single=val[0])) + for key, val in base_param_to_attrs.items() ] def validate_response(self, response): diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index d0d015ce08da..0f46a962e8c3 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -49,14 +49,16 @@ There is a little, but not enough for it to be important because {% macro print_input_params(requests) %} {% with input_parameters = [] %} -{% for request in requests %} - {% for element in request.body %} - {% if "input_parameter" in element %} - {% do input_parameters.append(element["input_parameter"]) %} + {% for request in requests %} + {% if request.body %} + {% for element in request.body if element.input_parameter %} + {% do input_parameters.append(element.input_parameter) %} + {% endfor %} + {% elif request.single and request.single.input_parameter %} + {% do input_parameters.append(request.single.input_parameter) %} {% endif %} {% endfor %} -{% endfor %} -{{ input_parameters|join(", ") }} +{{ input_parameters|join(", ") -}} {% endwith %} {% endmacro %} @@ -139,9 +141,9 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {# to be the correct enum from the right module, if necessary. #} {# Python is also responsible for verifying that each input parameter is unique,#} {# no parameter is a reserved keyword #} - {% if "input_parameter" in attr %} + {% if attr.input_parameter %} # {{ attr.input_parameter }} = "{{ attr.value }}" - {% if "value_is_file" in attr and attr.value_is_file %} + {% if attr.value_is_file %} with open({{ attr.input_parameter }}, "rb") as f: {{ base_name }}["{{ attr.field }}"] = f.read() {% else %} @@ -152,8 +154,8 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endif %} {% endmacro %} -{% macro render_request(request) %} - {% for parameter_block in request %} +{% macro render_request_setup(request) %} + {% for parameter_block in request if parameter_block.body %} {{ parameter_block.base }} = {} {% for attr in parameter_block.body %} {{ render_request_attr(parameter_block.base, attr) }} @@ -161,14 +163,27 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endfor %} {% endmacro %} +{% macro render_request_params(request) %} + {# Provide the top level parameters last and as keyword params #} + {% with params = [] -%} + {% for r in request if r.body -%} + {% do params.append(r.base) -%} + {% endfor -%} + {% for r in request if r.single -%} + {% do params.append("%s=%s"|format(r.base, r.single.value)) -%} + {% endfor -%} +{{ params|join(", ") -}} + {% endwith -%} +{% endmacro %} + {% macro render_method_call(sample, calling_form, calling_form_enum) %} {# Note: this doesn't deal with enums or unions #} -{% if calling_form not in [calling_form_enum.RequestStreamingBidi, -calling_form_enum.RequestStreamingClient] %} -client.{{ sample.rpc|snake_case }}({{ sample.request|map(attribute="base")|join(", ") }}) +{% if calling_form in [calling_form_enum.RequestStreamingBidi, + calling_form_enum.RequestStreamingClient] %} +client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request) }}]) {% else %} {# TODO: set up client streaming once some questions are answered #} -client.{{ sample.rpc|snake_case }}([{{ sample.request|map(attribute="base")|join("") }}]) +client.{{ sample.rpc|snake_case }}({{ render_request_params(sample.request) }}) {% endif %} {% endmacro %} @@ -207,7 +222,7 @@ print("Waiting for operation to complete...") response = operation.result() {% for statement in response_statements %} -{{ dispatch_statement(statement ) }} +{{ dispatch_statement(statement) }} {% endfor %} {% endif %} {% endmacro %} @@ -222,13 +237,18 @@ def main(): parser = argparse.ArgumentParser() {% with arg_list = [] %} -{% for request in request_block|map(attribute="body") %} -{% for attr in request if "input_parameter" in attr %} + {% for request in request_block if request.body -%} + {% for attr in request.body if attr.input_parameter %} parser.add_argument("--{{ attr.input_parameter }}", type=str, default="{{ attr.value }}") -{% do arg_list.append("args." + attr.input_parameter) %} +{% do arg_list.append("args." + attr.input_parameter) -%} +{% endfor -%} {% endfor %} +{% for request in request_block if request.single and request.single.input_parameter -%} + parser.add_argument("-- {{ request.single.input_parameter }}", + type=str, + default="{{ request.single.value }}") {% endfor %} args = parser.parse_args() diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index 1ddfde983823..dff95941df5a 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -37,7 +37,7 @@ def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input map("lower")| join("_") }}.{{ sample.service.split(".")[-1] }}Client() - {{ frags.render_request(sample.request)|indent }} + {{ frags.render_request_setup(sample.request)|indent }} {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response)|indent -}} {% endwith %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 047a989e09ba..d129145b6bd8 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -31,7 +31,8 @@ def unit(session): session.run( 'py.test', - '--quiet', + '-vv', + # '--quiet', '--cov=gapic', '--cov-config=.coveragerc', '--cov-report=term', @@ -71,12 +72,12 @@ def showcase(session): # Write out a client library for Showcase. session.run('protoc', - f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', - external=True, - ) + f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + external=True, + ) # Install the library. session.install(tmp_dir) @@ -107,14 +108,14 @@ def showcase_unit(session): # Write out a client library for Showcase. session.run('protoc', - f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', - 'google/showcase/v1beta1/messaging.proto', - 'google/showcase/v1beta1/testing.proto', - external=True, - ) + f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + 'google/showcase/v1beta1/messaging.proto', + 'google/showcase/v1beta1/testing.proto', + external=True, + ) # Install the library. session.chdir(tmp_dir) @@ -152,14 +153,14 @@ def showcase_mypy(session): # Write out a client library for Showcase. session.run('protoc', - f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', - 'google/showcase/v1beta1/messaging.proto', - 'google/showcase/v1beta1/testing.proto', - external=True, - ) + f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + 'google/showcase/v1beta1/messaging.proto', + 'google/showcase/v1beta1/testing.proto', + external=True, + ) # Install the library. session.chdir(tmp_dir) diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index 45c5a21c7f49..0dc220638b08 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -12,7 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import itertools + from collections import namedtuple +from typing import(Iterable, Optional) + +from gapic.schema import wrappers # Injected dummy test types @@ -33,7 +38,7 @@ DummyMessage = namedtuple("DummyMessage", ["fields", "type"]) DummyMessage.__new__.__defaults__ = (False,) * len(DummyMessage._fields) -DummyField = namedtuple("DummyField", ["message", "repeated"]) +DummyField = namedtuple("DummyField", ["message", "enum", "repeated"]) DummyField.__new__.__defaults__ = (False,) * len(DummyField._fields) DummyService = namedtuple("DummyService", ["methods"]) @@ -44,3 +49,44 @@ DummyNaming = namedtuple( "DummyNaming", ["warehouse_package_name", "name", "version"]) DummyNaming.__new__.__defaults__ = (False,) * len(DummyNaming._fields) + + +def message_factory(exp: str, + repeated_iter=itertools.repeat(False), + enum: Optional[wrappers.EnumType] = None) -> DummyMessage: + # This mimics the structure of MessageType in the wrappers module: + # A MessageType has a map from field names to Fields, + # and a Field has an (optional) MessageType. + # The 'exp' parameter is a dotted attribute expression + # used to describe the field and type hierarchy, + # e.g. "mollusc.cephalopod.coleoid" + toks = exp.split(".") + messages = [DummyMessage({}, tok.upper() + "_TYPE") for tok in toks] + if enum: + messages[-1] = enum + + for base, field, attr_name, repeated_field in zip( + messages, messages[1:], toks[1:], repeated_iter + ): + base.fields[attr_name] = (DummyField(message=field, repeated=repeated_field) + if isinstance(field, DummyMessage) + else DummyField(enum=field)) + + return messages[0] + + +def enum_factory(name: str, variants: Iterable[str]) -> wrappers.EnumType: + enum_pb = descriptor_pb2.EnumDescriptorProto( + name=name, + value=tuple( + descriptor_pb2.EnumValueDescriptorProto(name=v, number=i) + for i, v in enumerate(variants) + ) + ) + + enum = wrappers.EnumType( + enum_pb=enum_pb, + values=[wrappers.EnumValueType(enum_value_pb=v) for v in enum_pb.value] + ) + + return enum diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index c2e480c94cb6..95398c076353 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -20,7 +20,7 @@ import gapic.utils as utils from common_types import (DummyMethod, DummyService, - DummyApiSchema, DummyNaming) + DummyApiSchema, DummyNaming, message_factory, enum_factory) from collections import namedtuple from textwrap import dedent @@ -47,7 +47,8 @@ def test_generate_sample_basic(): # to have standalone tests. schema = DummyApiSchema( {"animalia.mollusca.v1.Mollusc": DummyService( - {"Classify": DummyMethod()})}, + {"Classify": DummyMethod( + input=message_factory("mollusc.classify_request.video"))})}, DummyNaming("molluscs-v1-mollusc")) sample = {"service": "animalia.mollusca.v1.Mollusc", diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index fa0a88f6e56a..82341fac7076 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -13,76 +13,56 @@ # limitations under the License. import yaml -import itertools import pytest -from typing import TypeVar +from typing import (Iterable, TypeVar) from collections import namedtuple +from google.protobuf import descriptor_pb2 import gapic.schema.wrappers as wrappers import gapic.samplegen.yaml as gapic_yaml import gapic.samplegen.samplegen as samplegen -from common_types import DummyField, DummyMessage, DummyMethod +from common_types import (DummyField, DummyMessage, + DummyMethod, message_factory) from gapic.samplegen import utils -def message_factory(exp: str, repeated_iter=itertools.repeat(False)) -> DummyMessage: - # This mimics the structure of MessageType in the wrappers module: - # A MessageType has a map from field names to Fields, - # and a Field has an (optional) MessageType. - # The 'exp' parameter is a dotted attribute expression - # used to describe the field and type hierarchy, - # e.g. "mollusc.cephalopod.coleoid" - toks = exp.split(".") - messages = [DummyMessage({}, tok.upper() + "_TYPE") for tok in toks] - for base, field, attr_name, repeated_field in zip( - messages, messages[1:], toks[1:], repeated_iter - ): - base.fields[attr_name] = DummyField(field, repeated=repeated_field) - - return messages[0] - - # validate_response tests def test_define(): define = {"define": "squid=$resp"} - - samplegen.Validator( - DummyMethod(output=message_factory("mollusc")) - ).validate_response([define]) + v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) + v.validate_response([define]) def test_define_undefined_var(): define = {"define": "squid=humboldt"} + v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator( - DummyMethod(output=message_factory("mollusc")) - ).validate_response([define]) + v.validate_response([define]) def test_define_reserved_varname(): define = {"define": "class=$resp"} + v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator( - DummyMethod(output=message_factory("mollusc")) - ).validate_response([define]) + v.validate_response([define]) def test_define_add_var(): - samplegen.Validator( - DummyMethod(output=message_factory("mollusc.name")) - ).validate_response([{"define": "squid=$resp"}, {"define": "name=squid.name"}]) + v = samplegen.Validator(DummyMethod( + output=message_factory("mollusc.name"))) + v.validate_response([{"define": "squid=$resp"}, + {"define": "name=squid.name"}]) def test_define_bad_form(): define = {"define": "mollusc=$resp.squid=$resp.clam"} + v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) with pytest.raises(samplegen.BadAssignment): - samplegen.Validator( - DummyMethod(output=message_factory("mollusc")) - ).validate_response([define]) + v.validate_response([define]) def test_define_redefinition(): @@ -90,15 +70,16 @@ def test_define_redefinition(): {"define": "molluscs=$resp.molluscs"}, {"define": "molluscs=$resp.molluscs"}, ] + v = samplegen.Validator(DummyMethod(output=message_factory("$resp.molluscs", + repeated_iter=[True]))) with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator( - DummyMethod(output=message_factory("$resp.molluscs", [True])) - ).validate_response(statements) + v.validate_response(statements) def test_define_input_param(): - validator = samplegen.Validator(DummyMethod()) - validator.validate_and_transform_request( + v = samplegen.Validator( + DummyMethod(input=message_factory("mollusc.squid.mantle_length"))) + v.validate_and_transform_request( utils.CallingForm.Request, [ { @@ -108,12 +89,13 @@ def test_define_input_param(): } ], ) - validator.validate_response([{"define": "length=mantle_length"}]) + v.validate_response([{"define": "length=mantle_length"}]) def test_define_input_param_redefinition(): - validator = samplegen.Validator(DummyMethod()) - validator.validate_and_transform_request( + v = samplegen.Validator(DummyMethod( + input=message_factory("mollusc.squid.mantle_length"))) + v.validate_and_transform_request( utils.CallingForm.Request, [ { @@ -124,7 +106,7 @@ def test_define_input_param_redefinition(): ], ) with pytest.raises(samplegen.RedefinedVariable): - validator.validate_response( + v.validate_response( [{"define": "mantle_length=mantle_length"}]) @@ -135,33 +117,29 @@ def test_print_basic(): def test_print_fmt_str(): print_statement = {"print": ["This is a squid named %s", "$resp.name"]} - samplegen.Validator( - DummyMethod(output=message_factory("$resp.name")) - ).validate_response([print_statement]) + v = samplegen.Validator(DummyMethod(output=message_factory("$resp.name"))) + v.validate_response([print_statement]) def test_print_fmt_mismatch(): print_statement = {"print": ["This is a squid named %s"]} + v = samplegen.Validator(DummyMethod(output=message_factory("$resp.name"))) with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator( - DummyMethod(output=message_factory("$resp.name")) - ).validate_response([print_statement]) + v.validate_response([print_statement]) def test_print_fmt_mismatch2(): print_statement = {"print": ["This is a squid", "$resp.name"]} + v = samplegen.Validator(DummyMethod(output=message_factory("$resp.name"))) with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator( - DummyMethod(output=message_factory("$resp.name")) - ).validate_response([print_statement]) + v.validate_response([print_statement]) def test_print_undefined_var(): print_statement = {"print": ["This mollusc is a %s", "mollusc.type"]} + v = samplegen.Validator(DummyMethod(output=message_factory("$resp.type"))) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator( - DummyMethod(output=message_factory("$resp.type")) - ).validate_response([print_statement]) + v.validate_response([print_statement]) def test_comment(): @@ -176,20 +154,23 @@ def test_comment_fmt_str(): def test_comment_fmt_undefined_var(): comment = {"comment": ["This is a mollusc of class %s", "cephalopod"]} + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator(DummyMethod()).validate_response([comment]) + v.validate_response([comment]) def test_comment_fmt_mismatch(): comment = {"comment": ["This is a mollusc of class %s"]} + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator(DummyMethod()).validate_response([comment]) + v.validate_response([comment]) def test_comment_fmt_mismatch2(): comment = {"comment": ["This is a mollusc of class ", "$resp.class"]} + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator(DummyMethod()).validate_response([comment]) + v.validate_response([comment]) def test_loop_collection(): @@ -200,9 +181,9 @@ def test_loop_collection(): "body": [{"print": ["Mollusc of class: %s", "m.class"]}], } } - samplegen.Validator( - DummyMethod(output=message_factory("$resp.molluscs", [True])) - ).validate_response([loop]) + v = samplegen.Validator(DummyMethod(output=message_factory( + "$resp.molluscs", repeated_iter=[True]))) + v.validate_response([loop]) def test_loop_collection_redefinition(): @@ -216,10 +197,10 @@ def test_loop_collection_redefinition(): } }, ] + v = samplegen.Validator( + DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True]))) with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator( - DummyMethod(output=message_factory("$resp.molluscs", [True])) - ).validate_response(statements) + v.validate_response(statements) def test_loop_undefined_collection(): @@ -230,8 +211,9 @@ def test_loop_undefined_collection(): "body": [{"print": ["Squid: %s", "s"]}], } } + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_collection_extra_kword(): @@ -243,8 +225,9 @@ def test_loop_collection_extra_kword(): "body": [{"print": ["Mollusc of class: %s", "m.class"]}], } } + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.BadLoop): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_collection_missing_kword(): @@ -254,8 +237,9 @@ def test_loop_collection_missing_kword(): "body": [{"print": ["Mollusc of class: %s", "m.class"]}], } } + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.BadLoop): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_collection_reserved_loop_var(): @@ -266,10 +250,10 @@ def test_loop_collection_reserved_loop_var(): "body": [{"print": ["Mollusc: %s", "class.name"]}], } } + v = samplegen.Validator(DummyMethod( + output=message_factory("$resp.molluscs", repeated_iter=[True]))) with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator( - DummyMethod(output=message_factory("$resp.molluscs", [True])) - ).validate_response([loop]) + v.validate_response([loop]) def test_loop_map(): @@ -295,10 +279,10 @@ def test_collection_loop_lexical_scope_variable(): }, {"define": "cephalopod=m"}, ] + v = samplegen.Validator(DummyMethod( + output=message_factory("$resp.molluscs", repeated_iter=[True]))) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator( - DummyMethod(output=message_factory("$resp.molluscs", [True])) - ).validate_response(statements) + v.validate_response(statements) def test_collection_loop_lexical_scope_inline(): @@ -312,10 +296,10 @@ def test_collection_loop_lexical_scope_inline(): }, {"define": "cephalopod=squid"}, ] + v = samplegen.Validator(DummyMethod( + output=message_factory("$resp.molluscs", repeated_iter=[True]))) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator( - DummyMethod(output=message_factory("$resp.molluscs", [True])) - ).validate_response(statements) + v.validate_response(statements) def test_map_loop_lexical_scope_key(): @@ -330,8 +314,9 @@ def test_map_loop_lexical_scope_key(): }, {"define": "last_cls=cls"}, ] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator(DummyMethod()).validate_response(statements) + v.validate_response(statements) def test_map_loop_lexical_scope_value(): @@ -346,8 +331,9 @@ def test_map_loop_lexical_scope_value(): }, {"define": "last_order=order"}, ] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator(DummyMethod()).validate_response(statements) + v.validate_response(statements) def test_map_loop_lexical_scope_inline(): @@ -362,8 +348,9 @@ def test_map_loop_lexical_scope_inline(): }, {"define": "last_order=tmp"}, ] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator(DummyMethod()).validate_response(statements) + v.validate_response(statements) def test_loop_map_reserved_key(): @@ -375,8 +362,9 @@ def test_loop_map_reserved_key(): "body": [{"print": ["A %s is a %s", "mollusc", "class"]}], } } + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_map_reserved_val(): @@ -388,8 +376,9 @@ def test_loop_map_reserved_val(): "body": [{"print": ["A %s is a %s", "m", "class"]}], } } + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_map_undefined(): @@ -401,8 +390,9 @@ def test_loop_map_undefined(): "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], } } + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_map_no_key(): @@ -430,8 +420,9 @@ def test_loop_map_no_value(): def test_loop_map_no_key_or_value(): loop = {"loop": {"map": "$resp.molluscs", "body": [{"print": ["Dead loop"]}]}} + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.BadLoop): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_map_no_map(): @@ -442,14 +433,16 @@ def test_loop_map_no_map(): "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], } } + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.BadLoop): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_map_no_body(): loop = {"loop": {"map": "$resp.molluscs", "key": "name", "value": "mollusc"}} + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.BadLoop): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_map_extra_kword(): @@ -462,8 +455,9 @@ def test_loop_map_extra_kword(): "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], } } + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.BadLoop): - samplegen.Validator(DummyMethod()).validate_response([loop]) + v.validate_response([loop]) def test_loop_map_redefined_key(): @@ -477,10 +471,10 @@ def test_loop_map_redefined_key(): } }, ] + v = samplegen.Validator(DummyMethod( + output=message_factory("mollusc.molluscs"))) with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator( - DummyMethod(output=message_factory("mollusc.molluscs")) - ).validate_response(statements) + v.validate_response(statements) def test_loop_map_redefined_value(): @@ -494,160 +488,291 @@ def test_loop_map_redefined_value(): } }, ] + v = samplegen.Validator(DummyMethod( + output=message_factory("mollusc.molluscs"))) with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator( - DummyMethod(output=message_factory("mollusc.molluscs")) - ).validate_response(statements) + v.validate_response(statements) def test_validate_write_file(): - samplegen.Validator(DummyMethod()).validate_response( - [ - { - "write_file": { - "filename": ["specimen-%s", "$resp.species"], - "contents": "$resp.photo", - } + statements = [ + { + "write_file": { + "filename": ["specimen-%s", "$resp.species"], + "contents": "$resp.photo", } - ] - ) + } + ] + samplegen.Validator(DummyMethod()).validate_response(statements) def test_validate_write_file_fname_fmt(): + statements = [{"write_file": + {"filename": ["specimen-%s"], "contents": "$resp.photo"}}] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.MismatchedFormatSpecifier): - samplegen.Validator(DummyMethod()).validate_response( - [{"write_file": {"filename": ["specimen-%s"], "contents": "$resp.photo"}}] - ) + v.validate_response(statements) def test_validate_write_file_fname_bad_var(): + statements = [{ + "write_file": { + "filename": ["specimen-%s", "squid.species"], + "contents": "$resp.photo", + } + }] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator(DummyMethod()).validate_response( - [ - { - "write_file": { - "filename": ["specimen-%s", "squid.species"], - "contents": "$resp.photo", - } - } - ] - ) + v.validate_response(statements) def test_validate_write_file_missing_fname(): + statements = [{"write_file": {"contents": "$resp.photo"}}] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.InvalidStatement): - samplegen.Validator(DummyMethod()).validate_response( - [{"write_file": {"contents": "$resp.photo"}}] - ) + v.validate_response(statements) def test_validate_write_file_missing_contents(): + statements = [{"write_file": {"filename": ["specimen-%s", + "$resp.species"]}}] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.InvalidStatement): - samplegen.Validator(DummyMethod()).validate_response( - [{"write_file": {"filename": ["specimen-%s", "$resp.species"]}}] - ) + v.validate_response(statements) def test_validate_write_file_bad_contents_var(): + statements = [{ + "write_file": { + "filename": ["specimen-%s", "$resp.species"], + "contents": "squid.photo", + } + }] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.UndefinedVariableReference): - samplegen.Validator(DummyMethod()).validate_response( - [ - { - "write_file": { - "filename": ["specimen-%s", "$resp.species"], - "contents": "squid.photo", - } - } - ] - ) + v.validate_response(statements) def test_invalid_statement(): - statement = {"print": ["Name"], "comment": ["Value"]} + statements = [{"print": ["Name"], "comment": ["Value"]}] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.InvalidStatement): - samplegen.Validator(DummyMethod()).validate_response([statement]) + v.validate_response(statements) def test_invalid_statement2(): - statement = {"squidify": ["Statement body"]} + statements = [{"squidify": ["Statement body"]}] + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.InvalidStatement): - samplegen.Validator(DummyMethod()).validate_response([statement]) + v.validate_response(statements) # validate_and_transform_request tests def test_validate_request_basic(): - assert samplegen.Validator(DummyMethod()).validate_and_transform_request( + input_type = DummyMessage( + fields={ + "squid": DummyField( + message=DummyMessage( + fields={ + "mantle_length": DummyField( + message=DummyMessage(type="LENGTH_TYPE")), + "mantle_mass": DummyField( + message=DummyMessage(type="MASS_TYPE"))}, + type="SQUID_TYPE" + ) + ) + }, + type="REQUEST_TYPE" + ) + + v = samplegen.Validator(DummyMethod(input=input_type)) + actual = v.validate_and_transform_request( utils.CallingForm.Request, [ {"field": "squid.mantle_length", "value": "100 cm"}, {"field": "squid.mantle_mass", "value": "10 kg"}, ], - ) == [ - samplegen.TransformedRequest( - "squid", - [ - {"field": "mantle_length", "value": "100 cm"}, - {"field": "mantle_mass", "value": "10 kg"}, - ], + ) + expected = [samplegen.TransformedRequest( + base="squid", + body=[ + samplegen.AttributeRequestSetup(field="mantle_length", + value="100 cm"), + samplegen.AttributeRequestSetup(field="mantle_mass", + value="10 kg"), + ], + single=None + )] + + assert actual == expected + + +def test_validate_request_no_field_parameter(): + # May need to remeove this test because it doesn't necessarily make sense any more. + v = samplegen.Validator(DummyMethod()) + with pytest.raises(samplegen.InvalidRequestSetup): + v.validate_and_transform_request( + utils.CallingForm.Request, [{"squid": "humboldt", + "value": "teuthida"}] + ) + + +def test_validate_request_no_such_attribute(): + v = samplegen.Validator(DummyMethod( + input=message_factory("mollusc.squid.mantle"))) + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_and_transform_request( + utils.CallingForm.Request, + [{"field": "clam.shell", "value": "20"}] ) + + +def test_validate_request_top_level_field(): + v = samplegen.Validator(DummyMethod( + input=message_factory("mollusc.squid"))) + actual = v.validate_and_transform_request( + utils.CallingForm.Request, + [{"field": "squid", "value": "humboldt"}] + ) + + expected = [ + samplegen.TransformedRequest(base="squid", + body=None, + single=samplegen.AttributeRequestSetup( + value="humboldt" + )) ] + assert actual == expected -def test_validate_request_no_field_parameter(): + +def test_validate_request_missing_keyword(kword="field"): + v = samplegen.Validator(DummyMethod( + input=message_factory("mollusc.squid"))) + with pytest.raises(samplegen.InvalidRequestSetup): + v.validate_and_transform_request( + utils.CallingForm.Request, + [{kword: "squid"}] + ) + + +def test_validate_request_missing_value(): + test_validate_request_missing_keyword(kword="value") + + +def test_validate_request_spurious_kword(): + v = samplegen.Validator( + DummyMethod(input=message_factory("mollusc.squid"))) with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator(DummyMethod()).validate_and_transform_request( - utils.CallingForm.Request, [{"squid": "humboldt"}] + v.validate_and_transform_request( + utils.CallingForm.Request, + [{"field": "mollusc.squid", "value": "humboldt", "order": "teuthida"}] + ) + + +def test_validate_request_unknown_field_type(): + v = samplegen.Validator(DummyMethod( + input=DummyMessage(fields={"squid": DummyField()}))) + with pytest.raises(TypeError): + v.validate_and_transform_request( + utils.CallingForm.Request, + [{"field": "squid", "value": "humboldt"}] ) -def test_validate_request_malformed_field_attr(): +def test_validate_request_duplicate_top_level_fields(): + v = samplegen.Validator(DummyMethod( + input=message_factory("mollusc.squid"))) with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator(DummyMethod()).validate_and_transform_request( - utils.CallingForm.Request, [{"field": "squid"}] + v.validate_and_transform_request( + utils.CallingForm.Request, + [{"field": "squid", "value": "humboldt"}, + {"field": "squid", "value": "bobtail"}] ) def test_validate_request_multiple_arguments(): - assert samplegen.Validator(DummyMethod()).validate_and_transform_request( + input_type = DummyMessage( + fields={ + "squid": DummyField( + message=DummyMessage( + fields={"mantle_length": DummyField( + message=DummyMessage(type="LENGTH_TYPE"))}, + type="SQUID_TYPE" + ) + ), + "clam": DummyField( + message=DummyMessage( + fields={"shell_mass": DummyField( + message=DummyMessage(type="MASS_TYPE"))}, + type="CLAM_TYPE" + ) + ), + }, + type="REQUEST_TYPE" + ) + + v = samplegen.Validator(DummyMethod(input=input_type)) + actual = v.validate_and_transform_request( utils.CallingForm.Request, [ - {"field": "squid.mantle_length", - "value": "100 cm", "value_is_file": True}, + { + "field": "squid.mantle_length", + "value": "100 cm", "value_is_file": True + }, { "field": "clam.shell_mass", "value": "100 kg", "comment": "Clams can be large", }, ], - ) == [ + ) + expected = [ samplegen.TransformedRequest( - "squid", - [{"field": "mantle_length", "value": "100 cm", "value_is_file": True}], + base="squid", + body=[samplegen.AttributeRequestSetup( + field="mantle_length", + value="100 cm", + value_is_file=True)], + single=None ), samplegen.TransformedRequest( - "clam", - [ - { - "field": "shell_mass", - "value": "100 kg", - "comment": "Clams can be large", - } - ], + base="clam", + body=[samplegen.AttributeRequestSetup( + field="shell_mass", + value="100 kg", + comment="Clams can be large")], + single=None ), ] - -def test_validate_request_reserved_request_name(): - with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator(DummyMethod()).validate_and_transform_request( - utils.CallingForm.Request, [ - {"field": "class.order", "value": "coleoidea"}] - ) + assert actual == expected def test_validate_request_duplicate_input_param(): + input_type = DummyMessage( + fields={ + "squid": DummyField( + message=DummyMessage( + fields={"mantle_mass": DummyField( + message=DummyMessage(type="MASS_TYPE"))}, + type="SQUID_TYPE" + ) + ), + "clam": DummyField( + message=DummyMessage( + fields={"mantle_mass": DummyField( + message=DummyMessage(type="MASS_TYPE"))}, + type="CLAM_TYPE" + ) + ), + }, + type="REQUEST_TYPE" + ) + + v = samplegen.Validator(DummyMethod(input=input_type)) with pytest.raises(samplegen.RedefinedVariable): - samplegen.Validator(DummyMethod()).validate_and_transform_request( + v.validate_and_transform_request( utils.CallingForm.Request, [ { @@ -665,8 +790,9 @@ def test_validate_request_duplicate_input_param(): def test_validate_request_reserved_input_param(): + v = samplegen.Validator(DummyMethod()) with pytest.raises(samplegen.ReservedVariableName): - samplegen.Validator(DummyMethod()).validate_and_transform_request( + v.validate_and_transform_request( utils.CallingForm.Request, [ { @@ -678,7 +804,8 @@ def test_validate_request_reserved_input_param(): ) -def test_single_request_client_streaming(): +def test_single_request_client_streaming( + calling_form=utils.CallingForm.RequestStreamingClient): # Each API client method really only takes one parameter: # either a single protobuf message or an iterable of protobuf messages. # With unary request methods, python lets us describe attributes as positional @@ -687,8 +814,34 @@ def test_single_request_client_streaming(): # 'field's refer to sub-attributes. # Client streaming and bidirectional streaming methods can't use this notation, # and generate an exception if there is more than one 'base'. + input_type = DummyMessage( + fields={ + "cephalopod": DummyField( + message=DummyMessage( + fields={ + "order": DummyField( + message=DummyMessage(type="ORDER_TYPE") + ) + }, + type="CEPHALOPOD_TYPE" + ) + ), + "gastropod": DummyField( + message=DummyMessage( + fields={ + "order": DummyField( + message=DummyMessage(type="ORDER_TYPE") + ) + }, + type="GASTROPOD_TYPE" + ) + ) + }, + type="MOLLUSC_TYPE" + ) + v = samplegen.Validator(DummyMethod(input=input_type)) with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator(DummyMethod()).validate_and_transform_request( + v.validate_and_transform_request( utils.CallingForm.RequestStreamingClient, [ {"field": "cephalopod.order", "value": "cephalopoda"}, @@ -698,22 +851,8 @@ def test_single_request_client_streaming(): def test_single_request_bidi_streaming(): - # Each API client method really only takes one parameter: - # either a single protobuf message or an iterable of protobuf messages. - # With unary request methods, python lets us describe attributes as positional - # and keyword parameters, which simplifies request construction. - # The 'base' in the transformed request refers to an attribute, and the - # 'field's refer to sub-attributes. - # Client streaming and bidirectional streaming methods can't use this notation, - # and generate an exception if there is more than one 'base'. - with pytest.raises(samplegen.InvalidRequestSetup): - samplegen.Validator(DummyMethod()).validate_and_transform_request( - utils.CallingForm.RequestStreamingBidi, - [ - {"field": "cephalopod.order", "value": "cephalopoda"}, - {"field": "gastropod.order", "value": "pulmonata"}, - ], - ) + test_single_request_client_streaming( + utils.CallingForm.RequestStreamingBidi) def test_validate_request_calling_form(): @@ -813,6 +952,7 @@ def test_validate_expression_no_such_attr(): def test_validate_expression_predefined(): + # TODO: can't remember what this test does exp = "$resp.coleoidea.octopodiformes.octopus" OutputType = message_factory(exp) method = DummyMethod(output=OutputType) @@ -827,7 +967,7 @@ def test_validate_expression_repeated_attrs(): # of response/coleoidea/octopodiformes, but coleoidea is a repeated field, # so accessing $resp.coleoidea.octopodiformes doesn't make any sense. exp = "$resp.coleoidea.octopodiformes" - OutputType = message_factory(exp, [True, False]) + OutputType = message_factory(exp, repeated_iter=[True, False]) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) @@ -838,7 +978,7 @@ def test_validate_expression_repeated_attrs(): def test_validate_expression_collection(): exp = "$resp.molluscs" - OutputType = message_factory(exp, [True]) + OutputType = message_factory(exp, repeated_iter=[True]) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) v.validate_response( @@ -876,7 +1016,7 @@ def test_validate_expression_collection_error(): def test_validate_expression_repeated_lookup(): exp = "$resp.molluscs.mantle" - OutputType = message_factory(exp, [True, False]) + OutputType = message_factory(exp, repeated_iter=[True, False]) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) v.validate_expression("$resp.molluscs[0].mantle") @@ -901,7 +1041,7 @@ def test_validate_expression_repeated_lookup_invalid(): def test_validate_expression_base_attr_is_repeated(): exp = "$resp.molluscs.mantle" - OutputType = message_factory(exp, [True, False]) + OutputType = message_factory(exp, repeated_iter=[True, False]) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) v.validate_response([{"define": "molluscs=$resp.molluscs"}]) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 968fcc3062cd..c2f5bd7f44f2 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -28,6 +28,7 @@ def check_template(template_fragment, expected_output, **kwargs): # and passing a FunctionLoader whose load function returns # a constantly reassigned string attribute) isn't any faster # and is less clear. + expected_output = dedent(expected_output) env = jinja2.Environment( loader=jinja2.ChoiceLoader( [jinja2.FileSystemLoader( @@ -57,13 +58,15 @@ def test_render_attr_value(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_request_attr("mollusc", - {"field": "order", - "value": "Molluscs.Cephalopoda.Coleoidea"}) }} + {{ frags.render_request_attr("mollusc", request) }} ''', ''' mollusc["order"] = Molluscs.Cephalopoda.Coleoidea - ''' + ''', + request=samplegen.AttributeRequestSetup( + field="order", + value="Molluscs.Cephalopoda.Coleoidea" + ) ) @@ -71,57 +74,40 @@ def test_render_attr_input_parameter(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_request_attr("squid", {"field": "species", - "value": "Humboldt", - "input_parameter": "species"}) }} + {{ frags.render_request_attr("squid", request) }} ''', ''' # species = "Humboldt" squid["species"] = species - ''') + ''', + request=samplegen.AttributeRequestSetup(field="species", + value="Humboldt", + input_parameter="species")) def test_render_attr_file(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_request_attr("classify_mollusc_request", - {"field": "mollusc_video", - "value": "path/to/mollusc/video.mkv", - "input_parameter" : "mollusc_video_path", - "value_is_file": True}) }} + {{ frags.render_request_attr("classify_mollusc_request", request) }} ''', ''' # mollusc_video_path = "path/to/mollusc/video.mkv" with open(mollusc_video_path, "rb") as f: classify_mollusc_request["mollusc_video"] = f.read() - ''') + ''', + request=samplegen.AttributeRequestSetup(field="mollusc_video", + value="path/to/mollusc/video.mkv", + input_parameter="mollusc_video_path", + value_is_file=True) + ) def test_render_request_basic(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_request([{"base": "cephalopod", - "body": [{"field": "mantle_mass", - "value": "10 kg", - "input_parameter": "cephalopod_mass"}, - {"field": "photo", - "value": "path/to/cephalopod/photo.jpg", - "input_parameter": "photo_path", - "value_is_file": True}, - {"field": "order", - "value": "Molluscs.Cephalopoda.Coleoidea"}, ]}, - {"base": "gastropod", - "body": [{"field": "mantle_mass", - "value": "1 kg", - "input_parameter": "gastropod_mass"}, - {"field": "order", - "value": "Molluscs.Gastropoda.Pulmonata"}, - {"field": "movie", - "value": "path/to/gastropod/movie.mkv", - "input_parameter": "movie_path", - "value_is_file": True}]}, ]) }} + {{ frags.render_request_setup(request) }} ''', ''' cephalopod = {} @@ -144,7 +130,45 @@ def test_render_request_basic(): with open(movie_path, "rb") as f: gastropod["movie"] = f.read() - ''' + ''', + request=[samplegen.TransformedRequest(base="cephalopod", + body=[ + samplegen.AttributeRequestSetup( + field="mantle_mass", + value="10 kg", + input_parameter="cephalopod_mass" + ), + samplegen.AttributeRequestSetup( + field="photo", + value="path/to/cephalopod/photo.jpg", + input_parameter="photo_path", + value_is_file=True + ), + samplegen.AttributeRequestSetup( + field="order", + value="Molluscs.Cephalopoda.Coleoidea"), + ], + single=None), + samplegen.TransformedRequest(base="gastropod", + body=[ + samplegen.AttributeRequestSetup( + field="mantle_mass", + value="1 kg", + input_parameter="gastropod_mass" + ), + samplegen.AttributeRequestSetup( + field="order", + value="Molluscs.Gastropoda.Pulmonata" + ), + samplegen.AttributeRequestSetup( + field="movie", + value="path/to/gastropod/movie.mkv", + input_parameter="movie_path", + value_is_file=True + ) + ], + single=None), + ] ) @@ -404,25 +428,38 @@ def test_print_input_params(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.print_input_params([{"base": "squid", - "body": [{"field": "mass", - "value": "10 kg", - "input_parameter": "mass"}, - {"field": "length", - "value": "20 m", - "input_parameter": "length"}]}, - {"base": "clam", - "body": [{"field": "diameter", - "value": "10 cm"}]}, - {"base": "whelk", - "body": [{"field": "color", - "value": "red", - "input_parameter": "color"}]}, - ]) }} + {{ frags.print_input_params(request) }} + ''', ''' mass, length, color - ''' + ''', + request=[samplegen.TransformedRequest(base="squid", + body=[ + samplegen.AttributeRequestSetup( + field="mass", + value="10 kg", + input_parameter="mass" + ), + samplegen.AttributeRequestSetup( + field="length", + value="20 m", + input_parameter="length" + ) + ], + single=None), + samplegen.TransformedRequest(base="diameter", + single=samplegen.AttributeRequestSetup( + value="10 cm" + ), + body=None), + samplegen.TransformedRequest(base="color", + single=samplegen.AttributeRequestSetup( + value="red", + input_parameter="color" + ), + body=None), + ] ) @@ -516,58 +553,106 @@ def test_render_calling_form_longrunning(): def test_render_method_call_basic(): - # The callingForm and callingFormEnum parameters are dummies, - # which we can get away with because of duck typing in the template. check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": [{"base": "video"}, - {"base": "audio"}, - {"base": "guess"}]}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum) }} ''', ''' client.categorize_mollusc(video, audio, guess) ''', + request=[samplegen.TransformedRequest(base="video", + body=True, + single=None), + samplegen.TransformedRequest(base="audio", + body=True, + single=None), + samplegen.TransformedRequest(base="guess", + body=True, + single=None)], calling_form_enum=CallingForm, calling_form=CallingForm.Request ) def test_render_method_call_bidi(): - # The callingForm and callingFormEnum parameters are dummies, - # which we can get away with because of duck typing in the template. check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": [{"base": "video"}]}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum) }} ''', ''' client.categorize_mollusc([video]) ''', + request=[samplegen.TransformedRequest(base="video", + body=True, + single=None)], calling_form_enum=CallingForm, calling_form=CallingForm.RequestStreamingBidi ) def test_render_method_call_client(): - # The callingForm and callingFormEnum parameters are dummies, - # which we can get away with because of duck typing in the template. check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": [{"base": "video"}]}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum) }} ''', ''' client.categorize_mollusc([video]) ''', + request=[samplegen.TransformedRequest(base="video", + body=True, + single=None)], calling_form_enum=CallingForm, calling_form=CallingForm.RequestStreamingClient ) +def test_render_request_params(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_request_params(request) }} + + ''', + ''' + mollusc, length_meters=16, order='TEUTHIDA' + ''', + request=[ + samplegen.TransformedRequest( + base="length_meters", + body=None, + single=samplegen.AttributeRequestSetup(value="16") + ), + samplegen.TransformedRequest( + base="mollusc", + body=[ + samplegen.AttributeRequestSetup( + field="video", + value="path/to/video.mkv" + ), + samplegen.AttributeRequestSetup( + field="audio", + value="path/to/audio.ogg" + ) + ], + single=None + ), + samplegen.TransformedRequest( + base="order", + body=None, + single=samplegen.AttributeRequestSetup( + value="'TEUTHIDA'" + ) + ) + ] + ) + + def test_main_block(): check_template( ''' @@ -594,13 +679,26 @@ def main(): main() ''', request=[ - samplegen.TransformedRequest("input_params", [{"field": "list_molluscs.order", - "value": "coleoidea", - "input_parameter": "order"}, - {"field ": "list_molluscs.mass", - "value": "60kg", - "input_parameter": "mass"}]), - samplegen.TransformedRequest("enum_param", [{"field": "list_molluscs.zone", - "value": "MESOPELAGIC"}]) + samplegen.TransformedRequest(base="input_params", + body=[ + samplegen.AttributeRequestSetup( + field="list_molluscs.order", + value="coleoidea", + input_parameter="order" + ), + samplegen.AttributeRequestSetup( + field="list_molluscs.mass", + value="60kg", + input_parameter="mass") + ], + single=None), + samplegen.TransformedRequest(base="enum_param", + body=[ + samplegen.AttributeRequestSetup( + field="list_molluscs.zone", + value="MESOPELAGIC" + ) + ], + single=None) ] ) From 48a9eef92622a7730a99828fabac4bd08aea2678 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 30 Jul 2019 10:26:15 -0700 Subject: [PATCH 0142/1339] Add tests and implementation for enums in requests (#165) Request setups can set enum fields via strings. Includes tests for invalid enum variants and attempting to set non-existent fields of the enum itself. --- .../gapic/samplegen/samplegen.py | 34 ++++++----- .../tests/unit/samplegen/common_types.py | 2 + .../tests/unit/samplegen/test_samplegen.py | 60 ++++++++++++++++++- 3 files changed, 80 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index e88350acc3b8..7e22be15cc94 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -323,7 +323,7 @@ def validate_and_transform_request(self, attr_chain = field.split(".") base = self.request_type_ - for attr_name in attr_chain: + for i, attr_name in enumerate(attr_chain): attr = base.fields.get(attr_name) if not attr: raise BadAttributeLookup( @@ -332,23 +332,27 @@ def validate_and_transform_request(self, if attr.message: base = attr.message - + elif attr.enum: + # A little bit hacky, but 'values' is a list, and this is the easiest + # way to verify that the value is a valid enum variant. + witness = any(e.name == val for e in attr.enum.values) + if not witness: + raise InvalidEnumVariant( + "Invalid variant for enum {}: '{}'".format(attr, val)) + # Python code can set protobuf enums from strings. + # This is preferable to adding the necessary import statement + # and requires less munging of the assigned value + duplicate["value"] = f"'{val}'" + break else: raise TypeError - # TODO: uncomment this when handling enums - # if attr.enum: - # # A little bit hacky, but 'values' is a list, and this is the easiest - # # way to verify that the value is a valid enum variant. - # witness = next((e.name for e in attr.enum.values if e.name == val), None) - # if not witness: - # raise InvalidEnumVariant - # # Python code can set protobuf enums from strings. - # # This is preferable to adding the necessary import statement - # # and requires less munging of the assigned value - # duplicate["value"] = f"'{witness}'" - - # TODO: what if there's more stuff in the chain? + if i != len(attr_chain) - 1: + # We broke out of the loop after processing an enum. + extra_attrs = ".".join(attr_chain[i:]) + raise InvalidEnumVariant( + f"Attempted to reference attributes of enum value: '{extra_attrs}'") + if len(attr_chain) > 1: duplicate["field"] = ".".join(attr_chain[1:]) else: diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index 0dc220638b08..76b139adb419 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -17,6 +17,8 @@ from collections import namedtuple from typing import(Iterable, Optional) +from google.protobuf import descriptor_pb2 + from gapic.schema import wrappers # Injected dummy test types diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 82341fac7076..e3db6f2b6b3b 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -24,7 +24,7 @@ import gapic.samplegen.samplegen as samplegen from common_types import (DummyField, DummyMessage, - DummyMethod, message_factory) + DummyMethod, message_factory, enum_factory) from gapic.samplegen import utils @@ -1075,3 +1075,61 @@ def test_validate_expression_malformed_attr(): v = samplegen.Validator(method) with pytest.raises(samplegen.BadAttributeLookup): v.validate_expression(exp) + + +def test_validate_request_enum(): + enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) + request_type = message_factory("mollusc.cephalopod.subclass", enum=enum) + + v = samplegen.Validator(DummyMethod(input=request_type)) + actual = v.validate_and_transform_request( + utils.CallingForm.Request, + [{"field": "cephalopod.subclass", "value": "COLEOIDEA"}] + ) + expected = [samplegen.TransformedRequest( + "cephalopod", + body=[samplegen.AttributeRequestSetup(field="subclass", + value="'COLEOIDEA'")], + single=None)] + assert actual == expected + + +def test_validate_request_enum_top_level(): + enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) + request_type = message_factory("mollusc.subclass", enum=enum) + + v = samplegen.Validator(DummyMethod(input=request_type)) + actual = v.validate_and_transform_request( + utils.CallingForm.Request, + [{"field": "subclass", "value": "COLEOIDEA"}] + ) + expected = [samplegen.TransformedRequest( + "subclass", + single=samplegen.AttributeRequestSetup(value="'COLEOIDEA'"), + body=None)] + assert actual == expected + + +def test_validate_request_enum_invalid_value(): + enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) + request_type = message_factory("mollusc.cephalopod.subclass", enum=enum) + v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), + input=request_type)) + with pytest.raises(samplegen.InvalidEnumVariant): + v.validate_and_transform_request( + utils.CallingForm.Request, + # Heterodonta are bivalves, not cephalopods + [{"field": "cephalopod.subclass", "value": "HETERODONTA"}] + ) + + +def test_validate_request_enum_not_last_attr(): + enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) + request_type = message_factory("mollusc.subclass", enum=enum) + v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), + input=request_type)) + with pytest.raises(samplegen.InvalidEnumVariant): + v.validate_and_transform_request( + utils.CallingForm.Request, + [{"field": "subclass.order", "value": "COLEOIDEA"}] + ) From 46156ee8bea1cc375c4fd2f7cb3b465a04d46bc2 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 30 Jul 2019 10:37:34 -0700 Subject: [PATCH 0143/1339] Whitespace cleanup and correct rendering of nested loops (#164) Collection and map loops can now be nested arbitrarily deep and be rendered with the proper level of indentation throughout. --- .../templates/examples/feature_fragments.j2 | 30 +-- packages/gapic-generator/noxfile.py | 1 - .../tests/unit/samplegen/test_integration.py | 2 - .../tests/unit/samplegen/test_template.py | 222 +++++++++++++----- 4 files changed, 173 insertions(+), 82 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 0f46a962e8c3..85e5c8d9e494 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -90,7 +90,7 @@ print({{ print_string_formatting(elts)|trim }}) {% macro render_collection_loop(statement) %} for {{ statement.variable }} in {{ statement.collection|coerce_response_name }}: {% for s in statement.body %} - {{ dispatch_statement(s) }} +{{ dispatch_statement(s, 4) }} {% endfor %} {% endmacro %} @@ -104,7 +104,7 @@ for {{ statement.key }} in {{ statement.map|coerce_response_name }}.keys(): for {{statement.key }}, {{ statement.value }} in {{ statement.map|coerce_response_name }}.items(): {% endif %} {% for s in statement.body %} - {{ dispatch_statement(s) }} +{{ dispatch_statement(s, 4) }} {% endfor %} {% endmacro %} @@ -115,24 +115,24 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {% endwith %} {% endmacro %} -{% macro dispatch_statement(statement) %} +{% macro dispatch_statement(statement, indentation=0) %} {# Each statement is a dict with a single key/value pair #} {% if "print" in statement -%} -{{ render_print(statement["print"]) }} +{{ render_print(statement["print"])|indent(width=indentation, first=True) }} {% elif "define" in statement -%} -{{ render_define(statement["define"]) }} +{{ render_define(statement["define"])|indent(width=indentation, first=True) }} {% elif "comment" in statement -%} -{{ render_comment(statement["comment"]) }} +{{ render_comment(statement["comment"])|indent(width=indentation, first=True) }} {% elif "loop" in statement -%} {% with loop = statement["loop"] -%} {% if "collection" in loop -%} -{{ render_collection_loop(loop) }} +{{ render_collection_loop(loop)|indent(width=indentation, first=True) }} {% else -%} -{{ render_map_loop(loop) }} +{{ render_map_loop(loop)|indent(width=indentation, first=True) }} {% endif -%} {% endwith -%} {% elif "write_file" in statement -%} -{{ render_write_file(statement["write_file"]) }} +{{ render_write_file(statement["write_file"])|indent(indentation, first=True) }} {% endif %} {% endmacro %} @@ -193,27 +193,27 @@ client.{{ sample.rpc|snake_case }}({{ render_request_params(sample.request) }}) {% if calling_form == calling_form_enum.Request %} response = {{ method_invocation_text }} {% for statement in response_statements %} -{{ dispatch_statement(statement ) }} +{{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form == calling_form_enum.RequestPagedAll %} page_result = {{ method_invocation_text }} for response in page_result: {% for statement in response_statements %} - {{ dispatch_statement(statement ) }} + {{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form == calling_form_enum.RequestPaged %} -page_result = {{ method_invocation_text }} +page_result = {{ method_invocation_text}} for page in page_result.pages(): for response in page: {% for statement in response_statements %} - {{ dispatch_statement(statement ) }} + {{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form in [calling_form_enum.RequestStreamingServer, calling_form_enum.RequestStreamingBidi] %} stream = {{ method_invocation_text }} for response in stream: {% for statement in response_statements %} - {{ dispatch_statement(statement ) }} + {{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form == calling_form_enum.LongRunningRequestPromise %} operation = {{ method_invocation_text }} @@ -222,7 +222,7 @@ print("Waiting for operation to complete...") response = operation.result() {% for statement in response_statements %} -{{ dispatch_statement(statement) }} +{{ dispatch_statement(statement)|trim }} {% endfor %} {% endif %} {% endmacro %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index d129145b6bd8..cff226f044ac 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -32,7 +32,6 @@ def unit(session): session.run( 'py.test', '-vv', - # '--quiet', '--cov=gapic', '--cov-config=.coveragerc', '--cov-report=term', diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 95398c076353..d3b8b012a980 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -91,8 +91,6 @@ def sample_classify(video): print("Mollusc is a {}".format(response.taxonomy)) - - # [END mollusc_classify_sync] def main(): diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index c2f5bd7f44f2..974fb8236138 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -113,23 +113,23 @@ def test_render_request_basic(): cephalopod = {} # cephalopod_mass = "10 kg" cephalopod["mantle_mass"] = cephalopod_mass - + # photo_path = "path/to/cephalopod/photo.jpg" with open(photo_path, "rb") as f: cephalopod["photo"] = f.read() - + cephalopod["order"] = Molluscs.Cephalopoda.Coleoidea - + gastropod = {} # gastropod_mass = "1 kg" gastropod["mantle_mass"] = gastropod_mass - + gastropod["order"] = Molluscs.Gastropoda.Pulmonata - + # movie_path = "path/to/gastropod/movie.mkv" with open(movie_path, "rb") as f: gastropod["movie"] = f.read() - + ''', request=[samplegen.TransformedRequest(base="cephalopod", body=[ @@ -252,7 +252,7 @@ def test_dispatch_print(): ''', ''' print("Squid") - + ''' ) @@ -265,7 +265,7 @@ def test_dispatch_define(): ''', ''' squid = humboldt - + ''' ) @@ -278,7 +278,7 @@ def test_dispatch_comment(): ''', ''' # Squid - + ''' ) @@ -302,7 +302,7 @@ def test_dispatch_write_file(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.dispatch_statement({"write_file": + {{ frags.dispatch_statement({"write_file": {"filename": ["specimen-%s", "$resp.species"], "contents": "$resp.photo"}})}} @@ -310,7 +310,7 @@ def test_dispatch_write_file(): ''' with open("specimen-{}".format(response.species), "wb") as f: f.write(response.photo) - + ''' ) @@ -319,16 +319,17 @@ def test_collection_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_collection_loop({"collection": "$resp.molluscs", - "variable": "m", - "body": [{"print": ["Mollusc: %s", "m"]}]})}} + {{ frags.render_collection_loop(collection) }} ''', ''' for m in response.molluscs: print("Mollusc: {}".format(m)) - ''' + ''', + collection={"collection": "$resp.molluscs", + "variable": "m", + "body": [{"print": ["Mollusc: %s", "m"]}]} ) @@ -336,16 +337,17 @@ def test_dispatch_collection_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.dispatch_statement({"loop": {"collection": "molluscs", - "variable": "m", - "body": [{"print": ["Mollusc: %s", "m"]}]}}) }}''', + {{ frags.dispatch_statement(statement) }}''', ''' for m in molluscs: print("Mollusc: {}".format(m)) - - - - ''' + + + + ''', + statement={"loop": {"collection": "molluscs", + "variable": "m", + "body": [{"print": ["Mollusc: %s", "m"]}]}} ) @@ -353,17 +355,18 @@ def test_map_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_map_loop({"map": "$resp.molluscs", - "key":"cls", - "value":"example", - "body": [{"print": ["A %s is a %s", "example", "cls"] }]}) + {{ frags.render_map_loop(map_loop) }}''', ''' for cls, example in response.molluscs.items(): print("A {} is a {}".format(example, cls)) - - - ''' + + + ''', + map_loop={"map": "$resp.molluscs", + "key": "cls", + "value": "example", + "body": [{"print": ["A %s is a %s", "example", "cls"]}]} ) @@ -371,17 +374,18 @@ def test_map_loop_no_key(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_map_loop({"map": "$resp.molluscs", - "value":"example", - "body": [{"print": ["A %s is a mollusc", "example"] }]}) + {{ frags.render_map_loop(map_loop) }} ''', ''' for example in response.molluscs.values(): print("A {} is a mollusc".format(example)) - - - ''' + + + ''', + map_loop={"map": "$resp.molluscs", + "value": "example", + "body": [{"print": ["A %s is a mollusc", "example"]}]} ) @@ -389,17 +393,18 @@ def test_map_loop_no_value(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_map_loop({"map": "$resp.molluscs", - "key":"cls", - "body": [{"print": ["A %s is a mollusc", "cls"] }]}) + {{ frags.render_map_loop(map_loop) }} ''', ''' for cls in response.molluscs.keys(): print("A {} is a mollusc".format(cls)) - - - ''' + + + ''', + map_loop={"map": "$resp.molluscs", + "key": "cls", + "body": [{"print": ["A %s is a mollusc", "cls"]}]} ) @@ -407,20 +412,121 @@ def test_dispatch_map_loop(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.dispatch_statement({"loop":{"map": "molluscs", - "key":"cls", - "value":"example", - "body": [{ - "print": ["A %s is a %s", "example", "cls"] }]}}) - }} + {{ frags.dispatch_statement(statement) }} ''', ''' for cls, example in molluscs.items(): print("A {} is a {}".format(example, cls)) + + + + ''', + statement={"loop": {"map": "molluscs", + "key": "cls", + "value": "example", + "body": [{"print": ["A %s is a %s", "example", "cls"]}]}} + ) + + +def test_render_nested_loop_collection(): + # Note: the vast quantity of extraneous tailing whitespace is an artifact of the + # recursive dispatch and indentation. + # The calling form macros are responsible for trimming it out. + statement = { + "loop": { + "collection": "$resp.molluscs", + "variable": "m", + "body": [ + { + "loop": { + "collection": "m.tentacles", + "variable": "t", + "body": [ + { + "loop": { + "collection": "t.suckers", + "variable": "s", + "body": [{"print": ["Sucker: %s", "s"]}], + } + } + ], + } + } + ], + } + } + check_template( + """ + {% import "feature_fragments.j2" as frags %} + {{ frags.dispatch_statement(statement) }} + """, + """ + for m in response.molluscs: + for t in m.tentacles: + for s in t.suckers: + print("Sucker: {}".format(s)) + + - ''' + + + """, + statement=statement + ) + + +def test_render_nested_loop_map(): + # Note: the vast quantity of extraneous tailing whitespace is an artifact of the + # recursive dispatch and indentation. + # The calling form macros are responsible for trimming it out. + statement = { + "loop": { + "map": "$resp.molluscs", + "key": "klass", + "value": "orders", + "body": [ + { + "loop": { + "map": "orders", + "key": "order", + "value": "families", + "body": [ + { + "loop": { + "map": "families", + "key": "family", + "value": "ex", + "body": [{"print": ["Example: %s", "ex"]}] + } + } + ] + } + } + ] + } + } + + check_template( + """ + {% import "feature_fragments.j2" as frags %} + {{ frags.dispatch_statement(statement) }} + """, + """ + for klass, orders in response.molluscs.items(): + for order, families in orders.items(): + for family, ex in families.items(): + print("Example: {}".format(ex)) + + + + + + + + """, + statement=statement ) @@ -476,8 +582,6 @@ def test_render_calling_form_request(): ''' response = TEST_INVOCATION_TXT print("Test print statement") - - ''', calling_form_enum=CallingForm, calling_form=CallingForm.Request) @@ -489,8 +593,6 @@ def test_render_calling_form_paged_all(): page_result = TEST_INVOCATION_TXT for response in page_result: print("Test print statement") - - ''', calling_form_enum=CallingForm, calling_form=CallingForm.RequestPagedAll) @@ -503,9 +605,7 @@ def test_render_calling_form_paged(): for page in page_result.pages(): for response in page: print("Test print statement") - - - ''', + ''', calling_form_enum=CallingForm, calling_form=CallingForm.RequestPaged) @@ -516,8 +616,6 @@ def test_render_calling_form_streaming_server(): stream = TEST_INVOCATION_TXT for response in stream: print("Test print statement") - - ''', calling_form_enum=CallingForm, calling_form=CallingForm.RequestStreamingServer) @@ -529,8 +627,6 @@ def test_render_calling_form_streaming_bidi(): stream = TEST_INVOCATION_TXT for response in stream: print("Test print statement") - - ''', calling_form_enum=CallingForm, calling_form=CallingForm.RequestStreamingBidi) @@ -540,13 +636,11 @@ def test_render_calling_form_longrunning(): check_template(CALLING_FORM_TEMPLATE_TEST_STR, ''' operation = TEST_INVOCATION_TXT - + print("Waiting for operation to complete...") - + response = operation.result() print("Test print statement") - - ''', calling_form_enum=CallingForm, calling_form=CallingForm.LongRunningRequestPromise) @@ -617,7 +711,7 @@ def test_render_request_params(): ''' {% import "feature_fragments.j2" as frags %} {{ frags.render_request_params(request) }} - + ''', ''' mollusc, length_meters=16, order='TEUTHIDA' From 8ebdbe319a170cab144c89441c03fae7c9c1e4c8 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 1 Aug 2019 16:14:19 -0700 Subject: [PATCH 0144/1339] All response variable/attribute expressions are validated (#166) All statements and expressions that reference the method response are validated, e.g. {"print": ["%s", "$resp.cephalopods[0].mantle"]}, will be checked to make sure that the response message type has a repeated "cephalopods" field, and that that type has a "mantle" field. Includes a major refactor of Validator.validate_expression --- .../gapic/samplegen/samplegen.py | 177 +++--- .../tests/unit/samplegen/common_types.py | 5 +- .../tests/unit/samplegen/test_integration.py | 14 +- .../tests/unit/samplegen/test_samplegen.py | 502 ++++++++++++++++-- 4 files changed, 574 insertions(+), 124 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 7e22be15cc94..36bbde7dc5c1 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -26,6 +26,8 @@ from textwrap import dedent from typing import (ChainMap, Dict, List, Mapping, Optional, Set, Tuple, Union) +from google.protobuf import descriptor_pb2 + # Outstanding issues: # * In real sample configs, many variables are # defined with an _implicit_ $resp variable. @@ -319,7 +321,8 @@ def validate_and_transform_request(self, input_parameter = duplicate.get("input_parameter") if input_parameter: - self._handle_lvalue(input_parameter, str) + self._handle_lvalue(input_parameter, wrappers.Field( + field_pb=descriptor_pb2.FieldDescriptorProto())) attr_chain = field.split(".") base = self.request_type_ @@ -434,52 +437,81 @@ def validate_expression(self, exp: str) -> wrappers.Field: Returns: wrappers.Field: The final field in the chain. """ - # TODO: handle mapping attributes, i.e. {} # TODO: Add resource name handling, i.e. % - indexed_exp_re = re.compile( - r"^(?P\$?\w+)(?:\[(?P\d+)\])?$") - - toks = exp.split(".") - match = indexed_exp_re.match(toks[0]) - if not match: - raise BadAttributeLookup( - f"Badly formatted attribute expression: {exp}") - - base_tok, previous_was_indexed = (match.groupdict()["attr_name"], - bool(match.groupdict()["index"])) - base = self.var_field(base_tok) - if not base: - raise UndefinedVariableReference( - "Reference to undefined variable: {}".format(base_tok)) - if previous_was_indexed and not base.repeated: - raise BadAttributeLookup( - "Cannot index non-repeated attribute: {}".format(base_tok)) - - for tok in toks[1:]: - match = indexed_exp_re.match(tok) + chain_link_re = re.compile( + r""" + (?P\$?\w+)(?:\[(?P\d+)\]|\{["'](?P[^"']+)["']\})?$ + """.strip()) + + def validate_recursively(expression, scope, depth=0): + first_dot = expression.find(".") + base = expression[:first_dot] if first_dot > 0 else expression + match = chain_link_re.match(base) if not match: raise BadAttributeLookup( - f"Badly formatted attribute expression: {tok}") + f"Badly formed attribute expression: {expression}") + + name, idxed, mapped = (match.groupdict()["attr_name"], + bool(match.groupdict()["index"]), + bool(match.groupdict()["key"])) + field = scope.get(name) + if not field: + exception_class = (BadAttributeLookup if depth else + UndefinedVariableReference) + raise exception_class(f"No such variable or attribute: {name}") - attr_name, lookup_token = match.groups() - if base.repeated and not previous_was_indexed: + # Invalid input + if (idxed or mapped) and not field.repeated: raise BadAttributeLookup( - "Cannot access attributes through repeated field: {}".format(attr_name)) - if previous_was_indexed and not base.repeated: + f"Collection lookup on non-repeated field: {base}") + + # Can only ignore indexing or mapping in an indexed (or mapped) field + # if it is the terminal point in the expression. + if field.repeated and not (idxed or mapped) and first_dot != -1: raise BadAttributeLookup( - "Cannot index non-repeated attribute: {}".format(attr_name)) + ("Accessing attribute on a non-terminal collection without" + f"indexing into the collection: {base}") + ) + + message = field.message + scope = dict(message.fields) if message else {} + # Can only map message types, not enums + if mapped: + # See https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto#L496 + # for a better understanding of how map attributes are handled in protobuf + if not message or not message.options.map_field: + raise BadAttributeLookup( + f"Badly formed mapped field: {base}") + + value_field = message.fields.get("value") + if not value_field: + raise BadAttributeLookup( + f"Mapped attribute has no value field: {base}") + + value_message = value_field.message + if not value_message: + raise BadAttributeLookup( + f"Mapped value field is not a message: {base}") - # TODO: handle enums, primitives, and so forth. - attr = base.message.fields.get(attr_name) # type: ignore - if not attr: + if first_dot != -1: + scope = value_message.fields + + # Terminus of the expression. + if first_dot == -1: + return field + + # Enums and primitives are only allowed at the tail of an expression. + if not message: raise BadAttributeLookup( - "No such attribute in type '{}': {}".format(base, attr_name)) + f"Non-terminal attribute is not a message: {base}") - base, previous_was_indexed = attr, bool(lookup_token) + return validate_recursively(expression[first_dot + 1:], + scope, + depth + 1) - return base + return validate_recursively(exp, self.var_defs_) - def _handle_lvalue(self, lval: str, type_=None): + def _handle_lvalue(self, lval: str, type_: wrappers.Field): """Conducts safety checks on an lvalue and adds it to the lexical scope. Raises: @@ -505,9 +537,6 @@ def _validate_format(self, body: List[str]): The number of format tokens in the string must equal the number of arguments, and each argument must be a defined variable. - TODO: the attributes of the variable must correspond to attributes - of the variable's type. - Raises: MismatchedFormatSpecifier: If the number of format string segments ("%s") in a "print" or "comment" block does not equal the @@ -525,11 +554,7 @@ def _validate_format(self, body: List[str]): ) for expression in body[1:]: - var = expression.split(".")[0] - if var not in self.var_defs_: - raise UndefinedVariableReference( - "Reference to undefined variable: {}".format(var) - ) + self.validate_expression(expression) def _validate_define(self, body: str): """"Validates 'define' statements. @@ -542,11 +567,6 @@ def _validate_define(self, body: str): UndefinedVariableReference: If an attempted rvalue base is a previously undeclared variable. """ - # TODO: Need to check the defined variables - # if the rhs references a non-response variable. - # TODO: Need to rework the regex to allow for subfields, - # indexing, and so forth. - # # Note: really checking for safety would be equivalent to # re-implementing the python interpreter. m = re.match(r"^([a-zA-Z]\w*)=([^=]+)$", body) @@ -586,14 +606,18 @@ def _validate_write_file(self, body): raise InvalidStatement( "Missing key in 'write_file' statement: 'contents'") - # TODO: check the rest of the elements for valid subfield attribute - base = contents_var.split(".")[0] - if base not in self.var_defs_: - raise UndefinedVariableReference( - "Reference to undefined variable: {}".format(base) - ) + self.validate_expression(contents_var) - def _validate_loop(self, body): + @dataclasses.dataclass(frozen=True) + class LoopParameterField(wrappers.Field): + # This class is a hack for assigning the iteration variable in a collection loop. + # In protobuf, the concept of collection is manifested as a repeated + # field of message type T. Therefore, in order to assign the correct type + # to a loop iteration parameter, we copy the field that is the collection + # but remove 'repeated'. + repeated: bool = False + + def _validate_loop(self, loop): """Validates loop headers and statement bodies. Checks for correctly defined loop constructs, @@ -613,7 +637,7 @@ def _validate_loop(self, body): or keyword combinatations. """ - segments = set(body.keys()) + segments = set(loop.keys()) map_args = {self.MAP_KWORD, self.BODY_KWORD} # Even though it's valid python to use a variable outside of the lexical @@ -629,20 +653,31 @@ def _validate_loop(self, body): self.var_defs_ = self.var_defs_.new_child() if {self.COLL_KWORD, self.VAR_KWORD, self.BODY_KWORD} == segments: - tokens = body[self.COLL_KWORD].split(".") + tokens = loop[self.COLL_KWORD].split(".") # TODO: resolve the implicit $resp dilemma # if collection_name.startswith("."): # collection_name = "$resp" + collection_name collection_field = self.validate_expression( - body[self.COLL_KWORD]) + loop[self.COLL_KWORD]) if not collection_field.repeated: raise BadLoop( - "Tried to use a non-repeated field as a collection: {}".format(tokens[-1])) - - var = body[self.VAR_KWORD] - self._handle_lvalue(var) + "Tried to use a non-repeated field as a collection: {}".format( + tokens[-1])) + + var = loop[self.VAR_KWORD] + # The collection_field is repeated, + # but the iteration parameter should not be. + self._handle_lvalue( + var, + self.LoopParameterField( + field_pb=collection_field.field_pb, + message=collection_field.message, + enum=collection_field.enum, + meta=collection_field.meta + ) + ) elif map_args <= segments: segments -= map_args @@ -653,19 +688,15 @@ def _validate_loop(self, body): segments) ) - map_name_base = body[self.MAP_KWORD].split(".")[0] - if map_name_base not in self.var_defs_: - raise UndefinedVariableReference( - "Reference to undefined variable: {}".format(map_name_base) - ) + map_field = self.validate_expression(loop[self.MAP_KWORD]) - key = body.get(self.KEY_KWORD) + key = loop.get(self.KEY_KWORD) if key: - self._handle_lvalue(key) + self._handle_lvalue(key, map_field.message.fields["key"]) - val = body.get(self.VAL_KWORD) + val = loop.get(self.VAL_KWORD) if val: - self._handle_lvalue(val) + self._handle_lvalue(val, map_field.message.fields["value"]) if not (key or val): raise BadLoop( @@ -674,7 +705,7 @@ def _validate_loop(self, body): else: raise BadLoop("Unexpected loop form: {}".format(segments)) - self.validate_response(body[self.BODY_KWORD]) + self.validate_response(loop[self.BODY_KWORD]) # Restore the previous lexical scope. # This is stricter than python scope rules # because the samplegen spec mandates it. diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index 76b139adb419..e4ae881a4198 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -37,10 +37,11 @@ DummyMethod.__new__.__defaults__ = (False,) * len(DummyMethod._fields) -DummyMessage = namedtuple("DummyMessage", ["fields", "type"]) +DummyMessage = namedtuple("DummyMessage", ["fields", "type", "options"]) DummyMessage.__new__.__defaults__ = (False,) * len(DummyMessage._fields) -DummyField = namedtuple("DummyField", ["message", "enum", "repeated"]) +DummyField = namedtuple("DummyField", + ["message", "enum", "repeated", "field_pb", "meta"]) DummyField.__new__.__defaults__ = (False,) * len(DummyField._fields) DummyService = namedtuple("DummyService", ["methods"]) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index d3b8b012a980..87f088be542a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -46,9 +46,17 @@ def test_generate_sample_basic(): # or in features that are sufficiently small and trivial that it doesn't make sense # to have standalone tests. schema = DummyApiSchema( - {"animalia.mollusca.v1.Mollusc": DummyService( - {"Classify": DummyMethod( - input=message_factory("mollusc.classify_request.video"))})}, + { + "animalia.mollusca.v1.Mollusc": DummyService( + { + "Classify": DummyMethod( + input=message_factory( + "mollusc.classify_request.video"), + output=message_factory("$resp.taxonomy") + ) + } + ) + }, DummyNaming("molluscs-v1-mollusc")) sample = {"service": "animalia.mollusca.v1.Mollusc", diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index e3db6f2b6b3b..4711691fa8bd 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -15,7 +15,7 @@ import yaml import pytest -from typing import (Iterable, TypeVar) +from typing import (TypeVar) from collections import namedtuple from google.protobuf import descriptor_pb2 @@ -148,8 +148,9 @@ def test_comment(): def test_comment_fmt_str(): - comment = {"comment": ["This is a mollusc of class %s", "$resp.class"]} - samplegen.Validator(DummyMethod()).validate_response([comment]) + comment = {"comment": ["This is a mollusc of class %s", "$resp.klass"]} + v = samplegen.Validator(DummyMethod(output=message_factory("$resp.klass"))) + v.validate_response([comment]) def test_comment_fmt_undefined_var(): @@ -181,8 +182,9 @@ def test_loop_collection(): "body": [{"print": ["Mollusc of class: %s", "m.class"]}], } } - v = samplegen.Validator(DummyMethod(output=message_factory( - "$resp.molluscs", repeated_iter=[True]))) + OutputType = message_factory( + "$resp.molluscs.class", repeated_iter=[True, False]) + v = samplegen.Validator(DummyMethod(output=OutputType)) v.validate_response([loop]) @@ -265,7 +267,29 @@ def test_loop_map(): "body": [{"print": ["A %s is a %s", "mollusc", "cls"]}], } } - samplegen.Validator(DummyMethod()).validate_response([loop]) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="MOLLUSC_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + v = samplegen.Validator(DummyMethod(output=OutputType)) + v.validate_response([loop]) def test_collection_loop_lexical_scope_variable(): @@ -308,13 +332,37 @@ def test_map_loop_lexical_scope_key(): "loop": { "map": "$resp.molluscs", "key": "cls", - "value": "order", + "value": "mollusc", "body": [{"define": "tmp=cls"}], } }, + # 'cls' is outside the visible lexical scope according to strict + # samplegen rules, even though it is valid python. {"define": "last_cls=cls"}, ] - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="MOLLUSC_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.UndefinedVariableReference): v.validate_response(statements) @@ -325,13 +373,37 @@ def test_map_loop_lexical_scope_value(): "loop": { "map": "$resp.molluscs", "key": "cls", - "value": "order", - "body": [{"define": "tmp=order"}], + "value": "mollusc", + "body": [{"define": "tmp=mollusc"}], } }, - {"define": "last_order=order"}, + # 'mollusc' is outside the visible lexical scope according to strict + # samplegen rules, even though it is valid python. + {"define": "last_mollusc=mollusc"}, ] - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="MOLLUSC_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.UndefinedVariableReference): v.validate_response(statements) @@ -342,13 +414,36 @@ def test_map_loop_lexical_scope_inline(): "loop": { "map": "$resp.molluscs", "key": "cls", - "value": "order", - "body": [{"define": "tmp=order"}], + "value": "mollusc", + "body": [{"define": "tmp=mollusc"}], } }, - {"define": "last_order=tmp"}, + # 'tmp' is outside the visible lexical scope according to strict + # samplegen rules, even though it is valid python. + {"define": "last_mollusc=tmp"}, ] - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="MOLLUSC_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.UndefinedVariableReference): v.validate_response(statements) @@ -357,12 +452,35 @@ def test_loop_map_reserved_key(): loop = { "loop": { "map": "$resp.molluscs", + # Can't use 'class' since it's a reserved keyword "key": "class", "value": "mollusc", "body": [{"print": ["A %s is a %s", "mollusc", "class"]}], } } - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="MOLLUSC_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.ReservedVariableName): v.validate_response([loop]) @@ -372,11 +490,34 @@ def test_loop_map_reserved_val(): "loop": { "map": "$resp.molluscs", "key": "m", + # Can't use 'class' since it's a reserved keyword "value": "class", "body": [{"print": ["A %s is a %s", "m", "class"]}], } } - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="CLASS_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.ReservedVariableName): v.validate_response([loop]) @@ -403,7 +544,30 @@ def test_loop_map_no_key(): "body": [{"print": ["Mollusc: %s", "mollusc"]}], } } - samplegen.Validator(DummyMethod()).validate_response([loop]) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="CLASS_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) + v.validate_response([loop]) def test_loop_map_no_value(): @@ -414,13 +578,59 @@ def test_loop_map_no_value(): "body": [{"print": ["Mollusc: %s", "mollusc"]}], } } - samplegen.Validator(DummyMethod()).validate_response([loop]) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="CLASS_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) + v.validate_response([loop]) def test_loop_map_no_key_or_value(): loop = {"loop": {"map": "$resp.molluscs", + # Need at least one of 'key' or 'value' "body": [{"print": ["Dead loop"]}]}} - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="CLASS_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.BadLoop): v.validate_response([loop]) @@ -466,13 +676,35 @@ def test_loop_map_redefined_key(): { "loop": { "map": "$resp.molluscs", + # Can't redefine mollusc, which was defined one statement above. "key": "mollusc", "body": [{"print": ["Mollusc: %s", "mollusc"]}], } }, ] - v = samplegen.Validator(DummyMethod( - output=message_factory("mollusc.molluscs"))) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="CLASS_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.RedefinedVariable): v.validate_response(statements) @@ -483,13 +715,35 @@ def test_loop_map_redefined_value(): { "loop": { "map": "$resp.molluscs", + # Can't redefine 'mollusc', which was defined one statement above. "value": "mollusc", "body": [{"print": ["Mollusc: %s", "mollusc"]}], } }, ] - v = samplegen.Validator(DummyMethod( - output=message_factory("mollusc.molluscs"))) + OutputType = DummyMessage( + fields={ + "molluscs": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={}, + type="CLASS_TYPE" + ) + ) + }, + type="MOLLUSCS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True) + ), + repeated=True + ), + }, + type="RESPONSE_TYPE" + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.RedefinedVariable): v.validate_response(statements) @@ -503,7 +757,14 @@ def test_validate_write_file(): } } ] - samplegen.Validator(DummyMethod()).validate_response(statements) + OutputType = DummyMessage( + fields={ + "species": DummyField(message=DummyMessage(fields={})), + "photo": DummyField(message=DummyMessage(fields={})) + } + ) + v = samplegen.Validator(DummyMethod(output=OutputType)) + v.validate_response(statements) def test_validate_write_file_fname_fmt(): @@ -528,7 +789,13 @@ def test_validate_write_file_fname_bad_var(): def test_validate_write_file_missing_fname(): statements = [{"write_file": {"contents": "$resp.photo"}}] - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "filename": DummyField(message=DummyMessage(fields={})), + "photo": DummyField(message=DummyMessage(fields={})) + } + ) + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.InvalidStatement): v.validate_response(statements) @@ -536,7 +803,14 @@ def test_validate_write_file_missing_fname(): def test_validate_write_file_missing_contents(): statements = [{"write_file": {"filename": ["specimen-%s", "$resp.species"]}}] - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "species": DummyField(message=DummyMessage(fields={})), + "photo": DummyField(message=DummyMessage(fields={})) + } + ) + + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.InvalidStatement): v.validate_response(statements) @@ -548,7 +822,13 @@ def test_validate_write_file_bad_contents_var(): "contents": "squid.photo", } }] - v = samplegen.Validator(DummyMethod()) + OutputType = DummyMessage( + fields={ + "species": DummyField(message=DummyMessage(fields={})), + "photo": DummyField(message=DummyMessage(fields={})) + } + ) + v = samplegen.Validator(DummyMethod(output=OutputType)) with pytest.raises(samplegen.UndefinedVariableReference): v.validate_response(statements) @@ -596,11 +876,11 @@ def test_validate_request_basic(): expected = [samplegen.TransformedRequest( base="squid", body=[ - samplegen.AttributeRequestSetup(field="mantle_length", - value="100 cm"), - samplegen.AttributeRequestSetup(field="mantle_mass", - value="10 kg"), - ], + samplegen.AttributeRequestSetup(field="mantle_length", + value="100 cm"), + samplegen.AttributeRequestSetup(field="mantle_mass", + value="10 kg"), + ], single=None )] @@ -951,18 +1231,7 @@ def test_validate_expression_no_such_attr(): v.validate_expression("$resp.nautiloidea") -def test_validate_expression_predefined(): - # TODO: can't remember what this test does - exp = "$resp.coleoidea.octopodiformes.octopus" - OutputType = message_factory(exp) - method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) - - with pytest.raises(samplegen.BadAttributeLookup): - v.validate_response([{"define": "nautilus=$resp.nautiloidea"}]) - - -def test_validate_expression_repeated_attrs(): +def test_validate_expression_non_indexed_non_terminal_repeated(): # This is a little tricky: there's an attribute hierarchy # of response/coleoidea/octopodiformes, but coleoidea is a repeated field, # so accessing $resp.coleoidea.octopodiformes doesn't make any sense. @@ -1048,6 +1317,147 @@ def test_validate_expression_base_attr_is_repeated(): v.validate_expression("molluscs[0].mantle") +def test_validate_expression_map_lookup(): + # See https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto#L475 + # for details on how mapped attributes get transformed by the protoc compiler. + OutputType = DummyMessage( + fields={ + "cephalopods": DummyField( + message=DummyMessage( + fields={ + # real type is most likely str in real protos + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={ + "mantle": DummyField( + message=DummyMessage(type="MANTLE_TYPE", + fields={}), + ) + }, + type="CEPHALOPOD_TYPE" + ) + ), + }, + type="CEPHALOPODS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True)), + repeated=True, + ) + }, + type="MOLLUSC_TYPE" + ) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + v.validate_expression('$resp.cephalopods{"squid"}.mantle') + + +def test_validate_expression_map_lookup_terminal_lookup(): + OutputType = DummyMessage( + fields={ + "cephalopods": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={ + "mantle": DummyField( + message=DummyMessage(type="MANTLE_TYPE", + fields={}), + ) + }, + type="CEPHALOPOD_TYPE" + ) + ), + }, + type="CEPHALOPODS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True)), + repeated=True, + ) + }, + type="MOLLUSC_TYPE" + ) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + v.validate_expression('$resp.cephalopods{"squid"}') + + +def test_validate_expression_mapped_no_map_field(): + OutputType = DummyMessage( + fields={ + "cephalopods": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + "value": DummyField( + message=DummyMessage( + fields={ + "mantle": DummyField( + message=DummyMessage(type="MANTLE_TYPE", + fields={}), + ) + }, + type="CEPHALOPOD_TYPE" + ) + )}, + type="CEPHALOPODS_TYPE", + # The map_field attribute in the options indicates whether + # a message type is 'really' a map or just looks like one. + options=namedtuple("MessageOptions", ["map_field"])(False)), + repeated=True, + ) + }, + type="MOLLUSC_TYPE" + ) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_expression('$resp.cephalopods{"squid"}.mantle') + + +def test_validate_expression_mapped_no_value(): + OutputType = DummyMessage( + fields={ + "cephalopods": DummyField( + message=DummyMessage( + # Maps need 'key' AND 'value' attributes. + fields={"key": DummyField()}, + type="CEPHALOPODS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True)), + repeated=True, + ) + }, + type="MOLLUSC_TYPE" + ) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_expression('$resp.cephalopods{"squid"}.mantle') + + +def test_validate_expression_mapped_no_message(): + OutputType = DummyMessage( + fields={ + "cephalopods": DummyField( + message=DummyMessage( + fields={ + "key": DummyField(), + # The value field needs a message. + "value": DummyField(), + }, + type="CEPHALOPODS_TYPE", + options=namedtuple("MessageOptions", ["map_field"])(True)), + repeated=True, + ) + }, + type="MOLLUSC_TYPE" + ) + method = DummyMethod(output=OutputType) + v = samplegen.Validator(method) + with pytest.raises(samplegen.BadAttributeLookup): + v.validate_expression('$resp.cephalopods{"squid"}.mantle') + + def test_validate_expresssion_lookup_unrepeated_base(): exp = "$resp.molluscs" OutputType = message_factory(exp) From 3d5111783a9e21f1a346cb8b84fd7fe04b14eb00 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 8 Aug 2019 16:31:41 -0700 Subject: [PATCH 0145/1339] Tweak docstring references to auth and api_core (#174) --- .../$name_$version/$sub/services/$service/client.py.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 0db9080a5f82..96af9aa3fd2f 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -64,7 +64,7 @@ class {{ service.name }}(metaclass={{ service.name }}Meta): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): {{- ' ' }}The hostname to connect to. - credentials (Optional[google.auth.credentials.Credential]): The + credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -108,7 +108,7 @@ class {{ service.name }}(metaclass={{ service.name }}Meta): on the ``request`` instance; if ``request`` is provided, this should not be set. {% endfor -%} - retry (~.retries.Retry): Designation of what errors, if any, + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be From 4694e546df5e2e531d518c350cb8386b84f60040 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 9 Aug 2019 16:27:15 -0700 Subject: [PATCH 0146/1339] Remove trailing comma after keyword only param (#175) --- .../$name_$version/$sub/services/$service/client.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 96af9aa3fd2f..47df25b55d37 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -57,7 +57,7 @@ class {{ service.name }}(metaclass={{ service.name }}Meta): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, - transport: Union[str, {{ service.name }}Transport] = None, + transport: Union[str, {{ service.name }}Transport] = None ) -> None: """Instantiate the {{ (service.name|snake_case).replace('_', ' ') }}. From c32e7dccb00bbacd44cf881e2ffb3bd014ecf499 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 12 Aug 2019 10:12:30 -0700 Subject: [PATCH 0147/1339] [fix] Remove some unused imports. (#177) This removes five imports from generated code that are consistently unused. --- .../$name_$version/$sub/services/$service/client.py.j2 | 2 +- .../$name_$version/$sub/services/$service/pagers.py.j2 | 1 - .../$name_$version/$sub/services/$service/transports/grpc.py.j2 | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 47df25b55d37..c22e8c41137c 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -2,7 +2,7 @@ {% block content %} from collections import OrderedDict -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import exceptions # type: ignore diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 index aac5949228a4..4efcdc0b465e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 @@ -6,7 +6,6 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. -#} -import copy from typing import Any, Callable, Iterable {% filter sort_lines -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 index 429c3de58db0..317664c58ce5 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 @@ -1,7 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} -from typing import Callable, Dict, Sequence, Tuple +from typing import Callable, Dict from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} From c2fe482fce3fbe9f3654e22845b3a9728f2c0393 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 12 Aug 2019 13:01:04 -0700 Subject: [PATCH 0148/1339] [fix] Convert return docstring to RST. (#176) It turns out the return type docstring was being wrapped but not RST converted, which could cause some weird issues when rendering. Fixes #170. --- .../$name_$version/$sub/services/$service/client.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index c22e8c41137c..b788fb4dfc58 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -117,7 +117,7 @@ class {{ service.name }}(metaclass={{ service.name }}Meta): Returns: {{ method.client_output.ident.sphinx }}: - {{ method.client_output.meta.doc|wrap(width=72, indent=16) }} + {{ method.client_output.meta.doc|rst(width=72, indent=16) }} {%- endif %} """ {% if method.flattened_fields -%} From 14da46e81ba1df0a5d024c9bdee27aef648db1a9 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 14 Aug 2019 18:24:38 -0700 Subject: [PATCH 0149/1339] Test and impl for end-to-end sample and manifest generation (#167) * Test and impl for end-to-end sample and manifest generation The sample outdir and paths to sample config dirs and files are read in as options; all the samples are generated, as is the manifest. Samplegen utility code is moved into a separate module to bypass circular imports. Includes various other minor cleanups and tweaks. WORK TO DO: * Provide configuration knobs for the environment and environment name in the generated sample manifest. * Provide configuration knobs for the name of the sample template. --- .../gapic/generator/generator.py | 113 +++++++- .../gapic/generator/options.py | 42 ++- .../gapic/samplegen/__init__.py | 4 - .../gapic/samplegen/samplegen.py | 252 +++++++----------- .../gapic/samplegen_utils/__init__.py | 23 ++ .../utils.py => samplegen_utils/types.py} | 67 ++++- .../gapic/samplegen_utils/utils.py | 107 ++++++++ .../{samplegen => samplegen_utils}/yaml.py | 16 +- .../gapic-generator/gapic/schema/naming.py | 8 +- packages/gapic-generator/gapic/utils/code.py | 2 +- packages/gapic-generator/noxfile.py | 2 +- packages/gapic-generator/setup.py | 3 + .../tests/unit/generator/test_generator.py | 210 ++++++++++++++- .../tests/unit/generator/test_options.py | 8 + .../tests/unit/samplegen/test_integration.py | 218 ++++++++++++++- .../tests/unit/samplegen/test_manifest.py | 28 +- .../tests/unit/samplegen/test_samplegen.py | 187 ++++++------- .../tests/unit/samplegen/test_template.py | 2 +- 18 files changed, 987 insertions(+), 305 deletions(-) create mode 100644 packages/gapic-generator/gapic/samplegen_utils/__init__.py rename packages/gapic-generator/gapic/{samplegen/utils.py => samplegen_utils/types.py} (60%) create mode 100644 packages/gapic-generator/gapic/samplegen_utils/utils.py rename packages/gapic-generator/gapic/{samplegen => samplegen_utils}/yaml.py (89%) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index b10a4d814140..9c70f5ababd2 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -12,20 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import OrderedDict -from typing import Dict, Mapping -import os -import re - import jinja2 - -from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse - -from gapic import utils -from gapic.generator import formatter -from gapic.generator import options +import yaml +import re +import os +from typing import (Any, DefaultDict, Dict, Mapping, List) +from hashlib import sha256 +from collections import (OrderedDict, defaultdict) +from gapic.samplegen_utils.utils import is_valid_sample_cfg +from gapic.samplegen_utils.types import InvalidConfig from gapic.samplegen import samplegen +from gapic.generator import options +from gapic.generator import formatter from gapic.schema import api +from gapic import utils +from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse class Generator: @@ -57,6 +58,8 @@ def __init__(self, opts: options.Options) -> None: self._env.filters['wrap'] = utils.wrap self._env.filters['coerce_response_name'] = samplegen.coerce_response_name + self._sample_configs = opts.sample_configs + def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: """Return a :class:`~.CodeGeneratorResponse` for this library. @@ -72,13 +75,15 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: """ output_files: Dict[str, CodeGeneratorResponse.File] = OrderedDict() - # TODO: handle sample_templates specially, generate samples. sample_templates, client_templates = utils.partition( lambda fname: os.path.basename(fname) == samplegen.TEMPLATE_NAME, self._env.loader.list_templates()) # Iterate over each template and add the appropriate output files # based on that template. + # Sample templates work differently: there's (usually) only one, + # and instead of iterating over it/them, we iterate over samples + # and plug those into the template. for template_name in client_templates: # Sanity check: Skip "private" templates. filename = template_name.split('/')[-1] @@ -90,9 +95,93 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: api_schema=api_schema, )) + output_files.update(self._generate_samples_and_manifest(api_schema)) + # Return the CodeGeneratorResponse output. return CodeGeneratorResponse(file=[i for i in output_files.values()]) + def _generate_samples_and_manifest( + self, + api_schema: api.API + ) -> Dict[str, CodeGeneratorResponse.File]: + """Generate samples and samplegen manifest for the API. + + Arguments: + api_schema (api.API): The schema for the API to which the samples belong. + + Returns: + Dict[str, CodeGeneratorResponse.File]: A dict mapping filepath to rendered file. + """ + id_to_samples: DefaultDict[str, List[Any]] = defaultdict(list) + for config_fpath in self._sample_configs: + with open(config_fpath) as f: + configs = yaml.safe_load_all(f.read()) + + spec_generator = ( + spec + for cfg in configs if is_valid_sample_cfg(cfg) + for spec in cfg.get("samples", []) + ) + + for spec in spec_generator: + # Every sample requires an ID, preferably provided by the + # samplegen config author. + # If no ID is provided, fall back to the region tag. + # If there's no region tag, generate a unique ID. + # + # Ideally the sample author should pick a descriptive, unique ID, + # but this may be impractical and can be error-prone. + sample_id = (spec.get("id") + or spec.get("region_tag") + or sha256(str(spec).encode('utf8')).hexdigest()[:8]) + + spec["id"] = sample_id + id_to_samples[sample_id].append(spec) + + # Interpolate the special variables in the sample_out_dir template. + out_dir = "samples" + fpath_to_spec_and_rendered = {} + for samples in id_to_samples.values(): + for spec in samples: + id_is_unique = len(samples) == 1 + # The ID is used to generate the file name and by sample tester + # to link filenames to invoked samples. It must be globally unique. + if not id_is_unique: + spec_hash = sha256( + str(spec).encode('utf8')).hexdigest()[:8] + spec["id"] += f"_{spec_hash}" + + sample = samplegen.generate_sample(spec, self._env, api_schema) + + fpath = spec["id"] + ".py" + fpath_to_spec_and_rendered[os.path.join(out_dir, fpath)] = (spec, + sample) + + output_files = { + fname: CodeGeneratorResponse.File( + content=formatter.fix_whitespace(sample), + name=fname + ) + for fname, (_, sample) in fpath_to_spec_and_rendered.items() + } + + # Only generate a manifest if we generated samples. + if output_files: + manifest_fname, manifest_doc = samplegen.generate_manifest( + ((fname, spec) + for fname, (spec, _) in fpath_to_spec_and_rendered.items()), + out_dir, + api_schema + ) + + manifest_fname = os.path.join(out_dir, manifest_fname) + output_files[manifest_fname] = CodeGeneratorResponse.File( + content=manifest_doc.render(), + name=manifest_fname + ) + + return output_files + def _render_template( self, template_name: str, *, diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index 658e80f3ed0c..dfc5e8b08ade 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -12,11 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, List, Tuple +from collections import defaultdict +from typing import DefaultDict, List, Tuple + import dataclasses import os import warnings +from gapic.samplegen_utils import (types, utils as samplegen_utils) + @dataclasses.dataclass(frozen=True) class Options: @@ -28,8 +32,13 @@ class Options: """ templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) namespace: Tuple[str, ...] = dataclasses.field(default=()) + sample_configs: Tuple[str, ...] = dataclasses.field(default=()) name: str = '' + # Class constants + SAMPLES_OPT: str = 'samples' + PYTHON_GAPIC_PREFIX: str = 'python-gapic-' + @classmethod def build(cls, opt_string: str) -> 'Options': """Build an Options instance based on a protoc opt string. @@ -43,17 +52,25 @@ def build(cls, opt_string: str) -> 'Options': Returns: ~.Options: The Options instance. + + Raises: + gapic.samplegen_utils.types.InvalidConfig: + If paths to files or directories that should contain sample + configs are passed and no valid sample config is found. """ # Parse out every option beginning with `python-gapic` - opts: Dict[str, List[str]] = {} + opts: DefaultDict[str, List[str]] = defaultdict(list) for opt in opt_string.split(','): # Parse out the key and value. value = 'true' if '=' in opt: opt, value = opt.split('=') - # Throw away options not meant for us. - if not opt.startswith('python-gapic-'): + if opt == cls.SAMPLES_OPT: + opts[cls.SAMPLES_OPT].append(value) + + # Throw away other options not meant for us. + if not opt.startswith(cls.PYTHON_GAPIC_PREFIX): continue # Set the option, using a key with the "python-gapic-" prefix @@ -61,8 +78,7 @@ def build(cls, opt_string: str) -> 'Options': # # Just assume everything is a list at this point, and the # final instantiation step can de-list-ify where appropriate. - opts.setdefault(opt[len('python-gapic-'):], []) - opts[opt[len('python-gapic-'):]].append(value) + opts[opt[len(cls.PYTHON_GAPIC_PREFIX):]].append(value) # If templates are specified, one of the specified directories # may be our default; perform that replacement. @@ -73,12 +89,24 @@ def build(cls, opt_string: str) -> 'Options': ) # Build the options instance. + sample_paths = opts.pop(cls.SAMPLES_OPT, []) answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), - templates=tuple([os.path.expanduser(i) for i in templates]), + templates=tuple(os.path.expanduser(i) for i in templates), + sample_configs=tuple( + cfg_path + for s in sample_paths + for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) + ), ) + if sample_paths and not answer.sample_configs: + raise types.InvalidConfig( + ("No valid sample config found in any of the following: " + "{}".format(", ".join(sample_paths))) + ) + # If there are any options remaining, then we failed to recognize # them -- complain. for key in opts.keys(): diff --git a/packages/gapic-generator/gapic/samplegen/__init__.py b/packages/gapic-generator/gapic/samplegen/__init__.py index 15da4b948082..520de6ba6980 100644 --- a/packages/gapic-generator/gapic/samplegen/__init__.py +++ b/packages/gapic-generator/gapic/samplegen/__init__.py @@ -13,11 +13,7 @@ # limitations under the License. from gapic.samplegen import samplegen -from gapic.samplegen import utils -from gapic.samplegen import yaml __all__ = ( 'samplegen', - 'utils', - 'yaml', ) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 36bbde7dc5c1..099987453b50 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -16,15 +16,15 @@ import itertools import jinja2 import keyword +import os import re import time -from gapic.samplegen import utils, yaml +from gapic.samplegen_utils import (types, yaml) from gapic.schema import (api, wrappers) from collections import (defaultdict, namedtuple, ChainMap as chainmap) -from textwrap import dedent -from typing import (ChainMap, Dict, List, Mapping, Optional, Set, Tuple, Union) +from typing import (ChainMap, Dict, List, Mapping, Optional, Tuple) from google.protobuf import descriptor_pb2 @@ -32,9 +32,6 @@ # * In real sample configs, many variables are # defined with an _implicit_ $resp variable. -MIN_SCHEMA_VERSION = (1, 2, 0) - -VALID_CONFIG_TYPE = "com.google.api.codegen.SampleConfigProto" # TODO: read in copyright and license from files. FILE_HEADER: Dict[str, str] = { @@ -58,6 +55,8 @@ ) ) +# TODO: configure the base template name so that +# e.g. other languages can use the same machinery. TEMPLATE_NAME = "sample.py.j2" @@ -107,66 +106,6 @@ class TransformedRequest: body: Optional[List[AttributeRequestSetup]] -class SampleError(Exception): - pass - - -class ReservedVariableName(SampleError): - pass - - -class RpcMethodNotFound(SampleError): - pass - - -class UnknownService(SampleError): - pass - - -class InvalidConfig(SampleError): - pass - - -class InvalidStatement(SampleError): - pass - - -class BadLoop(SampleError): - pass - - -class MismatchedFormatSpecifier(SampleError): - pass - - -class UndefinedVariableReference(SampleError): - pass - - -class BadAttributeLookup(SampleError): - pass - - -class RedefinedVariable(SampleError): - pass - - -class BadAssignment(SampleError): - pass - - -class InconsistentRequestName(SampleError): - pass - - -class InvalidRequestSetup(SampleError): - pass - - -class InvalidEnumVariant(SampleError): - pass - - def coerce_response_name(s: str) -> str: # In the sample config, the "$resp" keyword is used to refer to the # item of interest as received by the corresponding calling form. @@ -197,7 +136,6 @@ class Validator: def __init__(self, method: wrappers.Method): # The response ($resp) variable is special and guaranteed to exist. - # TODO: name lookup also involes type checking self.request_type_ = method.input response_type = method.output if method.paged_result_field: @@ -226,7 +164,7 @@ def var_field(self, var_name: str) -> Optional[wrappers.Field]: return self.var_defs_.get(var_name) def validate_and_transform_request(self, - calling_form: utils.CallingForm, + calling_form: types.CallingForm, request: List[Mapping[str, str]]) -> List[TransformedRequest]: """Validates and transforms the "request" block from a sample config. @@ -301,12 +239,12 @@ def validate_and_transform_request(self, duplicate = dict(r) val = duplicate.get("value") if not val: - raise InvalidRequestSetup( + raise types.InvalidRequestSetup( "Missing keyword in request entry: 'value'") field = duplicate.get("field") if not field: - raise InvalidRequestSetup( + raise types.InvalidRequestSetup( "Missing keyword in request entry: 'field'") spurious_keywords = set(duplicate.keys()) - {"value", @@ -315,7 +253,7 @@ def validate_and_transform_request(self, "input_parameter", "comment"} if spurious_keywords: - raise InvalidRequestSetup( + raise types.InvalidRequestSetup( "Spurious keyword(s) in request entry: {}".format( ", ".join(f"'{kword}'" for kword in spurious_keywords))) @@ -329,7 +267,7 @@ def validate_and_transform_request(self, for i, attr_name in enumerate(attr_chain): attr = base.fields.get(attr_name) if not attr: - raise BadAttributeLookup( + raise types.BadAttributeLookup( "Method request type {} has no attribute: '{}'".format( self.request_type_.type, attr_name)) @@ -340,7 +278,7 @@ def validate_and_transform_request(self, # way to verify that the value is a valid enum variant. witness = any(e.name == val for e in attr.enum.values) if not witness: - raise InvalidEnumVariant( + raise types.InvalidEnumVariant( "Invalid variant for enum {}: '{}'".format(attr, val)) # Python code can set protobuf enums from strings. # This is preferable to adding the necessary import statement @@ -353,7 +291,7 @@ def validate_and_transform_request(self, if i != len(attr_chain) - 1: # We broke out of the loop after processing an enum. extra_attrs = ".".join(attr_chain[i:]) - raise InvalidEnumVariant( + raise types.InvalidEnumVariant( f"Attempted to reference attributes of enum value: '{extra_attrs}'") if len(attr_chain) > 1: @@ -363,7 +301,7 @@ def validate_and_transform_request(self, # there can't be duplicates. # This is admittedly a bit of a hack. if attr_chain[0] in base_param_to_attrs: - raise InvalidRequestSetup( + raise types.InvalidRequestSetup( "Duplicated top level field in request block: '{}'".format( attr_chain[0])) del duplicate["field"] @@ -374,12 +312,12 @@ def validate_and_transform_request(self, AttributeRequestSetup(**duplicate)) # type: ignore client_streaming_forms = { - utils.CallingForm.RequestStreamingClient, - utils.CallingForm.RequestStreamingBidi, + types.CallingForm.RequestStreamingClient, + types.CallingForm.RequestStreamingBidi, } if len(base_param_to_attrs) > 1 and calling_form in client_streaming_forms: - raise InvalidRequestSetup( + raise types.InvalidRequestSetup( "Too many base parameters for client side streaming form") return [ @@ -407,13 +345,13 @@ def validate_response(self, response): for statement in response: if len(statement) != 1: - raise InvalidStatement( + raise types.InvalidStatement( "Invalid statement: {}".format(statement)) keyword, body = next(iter(statement.items())) validater = self.STATEMENT_DISPATCH_TABLE.get(keyword) if not validater: - raise InvalidStatement( + raise types.InvalidStatement( "Invalid statement keyword: {}".format(keyword)) validater(self, body) @@ -448,7 +386,7 @@ def validate_recursively(expression, scope, depth=0): base = expression[:first_dot] if first_dot > 0 else expression match = chain_link_re.match(base) if not match: - raise BadAttributeLookup( + raise types.BadAttributeLookup( f"Badly formed attribute expression: {expression}") name, idxed, mapped = (match.groupdict()["attr_name"], @@ -456,19 +394,19 @@ def validate_recursively(expression, scope, depth=0): bool(match.groupdict()["key"])) field = scope.get(name) if not field: - exception_class = (BadAttributeLookup if depth else - UndefinedVariableReference) + exception_class = (types.BadAttributeLookup if depth else + types.UndefinedVariableReference) raise exception_class(f"No such variable or attribute: {name}") # Invalid input if (idxed or mapped) and not field.repeated: - raise BadAttributeLookup( + raise types.BadAttributeLookup( f"Collection lookup on non-repeated field: {base}") # Can only ignore indexing or mapping in an indexed (or mapped) field # if it is the terminal point in the expression. if field.repeated and not (idxed or mapped) and first_dot != -1: - raise BadAttributeLookup( + raise types.BadAttributeLookup( ("Accessing attribute on a non-terminal collection without" f"indexing into the collection: {base}") ) @@ -480,17 +418,17 @@ def validate_recursively(expression, scope, depth=0): # See https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto#L496 # for a better understanding of how map attributes are handled in protobuf if not message or not message.options.map_field: - raise BadAttributeLookup( + raise types.BadAttributeLookup( f"Badly formed mapped field: {base}") value_field = message.fields.get("value") if not value_field: - raise BadAttributeLookup( + raise types.BadAttributeLookup( f"Mapped attribute has no value field: {base}") value_message = value_field.message if not value_message: - raise BadAttributeLookup( + raise types.BadAttributeLookup( f"Mapped value field is not a message: {base}") if first_dot != -1: @@ -502,7 +440,7 @@ def validate_recursively(expression, scope, depth=0): # Enums and primitives are only allowed at the tail of an expression. if not message: - raise BadAttributeLookup( + raise types.BadAttributeLookup( f"Non-terminal attribute is not a message: {base}") return validate_recursively(expression[first_dot + 1:], @@ -518,7 +456,7 @@ def _handle_lvalue(self, lval: str, type_: wrappers.Field): ReservedVariableName: If an attempted lvalue is a reserved keyword. """ if lval in RESERVED_WORDS: - raise ReservedVariableName( + raise types.ReservedVariableName( "Tried to define a variable with reserved name: {}".format( lval) ) @@ -526,7 +464,7 @@ def _handle_lvalue(self, lval: str, type_: wrappers.Field): # Even though it's valid python to reassign variables to any rvalue, # the samplegen spec prohibits this. if lval in self.var_defs_: - raise RedefinedVariable( + raise types.RedefinedVariable( "Tried to redefine variable: {}".format(lval)) self.var_defs_[lval] = type_ @@ -547,7 +485,7 @@ def _validate_format(self, body: List[str]): fmt_str = body[0] num_prints = fmt_str.count("%s") if num_prints != len(body) - 1: - raise MismatchedFormatSpecifier( + raise types.MismatchedFormatSpecifier( "Expected {} expresssions in format string but received {}".format( num_prints, len(body) - 1 ) @@ -571,7 +509,7 @@ def _validate_define(self, body: str): # re-implementing the python interpreter. m = re.match(r"^([a-zA-Z]\w*)=([^=]+)$", body) if not m: - raise BadAssignment("Bad assignment statement: {}".format(body)) + raise types.BadAssignment(f"Bad assignment statement: {body}") lval, rval = m.groups() @@ -596,14 +534,14 @@ def _validate_write_file(self, body): fname_fmt = body.get("filename") if not fname_fmt: - raise InvalidStatement( + raise types.InvalidStatement( "Missing key in 'write_file' statement: 'filename'") self._validate_format(fname_fmt) contents_var = body.get("contents") if not contents_var: - raise InvalidStatement( + raise types.InvalidStatement( "Missing key in 'write_file' statement: 'contents'") self.validate_expression(contents_var) @@ -662,7 +600,7 @@ def _validate_loop(self, loop): loop[self.COLL_KWORD]) if not collection_field.repeated: - raise BadLoop( + raise types.BadLoop( "Tried to use a non-repeated field as a collection: {}".format( tokens[-1])) @@ -683,7 +621,7 @@ def _validate_loop(self, loop): segments -= map_args segments -= {self.KEY_KWORD, self.VAL_KWORD} if segments: - raise BadLoop( + raise types.BadLoop( "Unexpected keywords in loop statement: {}".format( segments) ) @@ -699,11 +637,11 @@ def _validate_loop(self, loop): self._handle_lvalue(val, map_field.message.fields["value"]) if not (key or val): - raise BadLoop( + raise types.BadLoop( "Need at least one of 'key' or 'value' in a map loop") else: - raise BadLoop("Unexpected loop form: {}".format(segments)) + raise types.BadLoop("Unexpected loop form: {}".format(segments)) self.validate_response(loop[self.BODY_KWORD]) # Restore the previous lexical scope. @@ -722,95 +660,111 @@ def _validate_loop(self, loop): def generate_sample(sample, - id_is_unique: bool, env: jinja2.environment.Environment, - api_schema: api.API) -> Tuple[str, jinja2.environment.TemplateStream]: + api_schema: api.API) -> str: + """Generate a standalone, runnable sample. + Rendering and writing the rendered output is left for the caller. + + Args: + sample (Any): A definition for a single sample generated from parsed yaml. + env (jinja2.environment.Environment): The jinja environment used to generate + the filled template for the sample. + api_schema (api.API): The schema that defines the API to which the sample belongs. + + Returns: + str: The rendered sample. + """ sample_template = env.get_template(TEMPLATE_NAME) service_name = sample["service"] service = api_schema.services.get(service_name) if not service: - raise UnknownService("Unknown service: {}", service_name) + raise types.UnknownService("Unknown service: {}", service_name) rpc_name = sample["rpc"] rpc = service.methods.get(rpc_name) if not rpc: - raise RpcMethodNotFound( + raise types.RpcMethodNotFound( "Could not find rpc in service {}: {}".format( service_name, rpc_name) ) - calling_form = utils.CallingForm.method_default(rpc) + calling_form = types.CallingForm.method_default(rpc) v = Validator(rpc) sample["request"] = v.validate_and_transform_request(calling_form, sample["request"]) v.validate_response(sample["response"]) - sample_fpath = ( - sample["id"] + (str(calling_form) - if not id_is_unique else "") + ".py" - ) - sample["package_name"] = api_schema.naming.warehouse_package_name - return ( - sample_fpath, - sample_template.stream( - file_header=FILE_HEADER, - sample=sample, - imports=[], - calling_form=calling_form, - calling_form_enum=utils.CallingForm, - ), + return sample_template.render( + file_header=FILE_HEADER, + sample=sample, + imports=[], + calling_form=calling_form, + calling_form_enum=types.CallingForm, ) -def generate_manifest(fpaths_and_samples, api_schema, *, manifest_time: int = None): +def generate_manifest( + fpaths_and_samples, + base_path: str, + api_schema, + *, + manifest_time: int = None +) -> Tuple[str, yaml.Doc]: """Generate a samplegen manifest for use by sampletest Args: fpaths_and_samples (Iterable[Tuple[str, Mapping[str, Any]]]): The file paths and samples to be listed in the manifest - + base_path (str): The base directory where the samples are generated. api_schema (~.api.API): An API schema object. manifest_time (int): Optional. An override for the timestamp in the name of the manifest filename. Primarily used for testing. Returns: - Tuple[str, Dict[str,Any]]: The filename of the manifest and the manifest data as a dictionary. + Tuple[str, yaml.Doc]: The filename of the manifest and the manifest data as a dictionary. """ - all_info = [ - yaml.KeyVal("type", "manifest/samples"), - yaml.KeyVal("schema_version", "3"), - yaml.Map( - name="python", - anchor_name="python", - elements=[ - yaml.KeyVal("environment", "python"), - yaml.KeyVal("bin", "python3"), - # TODO: make this the real sample base directory - yaml.KeyVal("base_path", "sample/base/directory"), - yaml.KeyVal("invocation", "'{bin} {path} @args'"), - ], - ), - yaml.Collection( - name="samples", - elements=[ - [ - yaml.Alias("python"), - yaml.KeyVal("sample", sample["id"]), - yaml.KeyVal("path", "'{base_path}/%s'" % fpath), - yaml.KeyVal("region_tag", sample.get("region_tag", "")), - ] - for fpath, sample in fpaths_and_samples - ], - ), - ] + doc = yaml.Doc( + [ + yaml.KeyVal("type", "manifest/samples"), + yaml.KeyVal("schema_version", "3"), + # TODO: make the environment configurable to allow other languages + # to use the same basic machinery. + yaml.Map( + name="python", + anchor_name="python", + elements=[ + yaml.KeyVal("environment", "python"), + yaml.KeyVal("bin", "python3"), + yaml.KeyVal("base_path", base_path), + yaml.KeyVal("invocation", "'{bin} {path} @args'"), + ], + ), + yaml.Collection( + name="samples", + elements=[ + [ + yaml.Alias("python"), + yaml.KeyVal("sample", sample["id"]), + yaml.KeyVal("path", + "'{base_path}/%s'" % os.path.relpath(fpath, + base_path)), + yaml.KeyVal("region_tag", + sample.get("region_tag", "")), + ] + for fpath, sample in fpaths_and_samples + ], + ), + ] + ) dt = time.gmtime(manifest_time) + # TODO: allow other language configuration manifest_fname_template = ( "{api}.{version}.python." "{year:04d}{month:02d}{day:02d}." @@ -829,4 +783,4 @@ def generate_manifest(fpaths_and_samples, api_schema, *, manifest_time: int = No second=dt.tm_sec, ) - return manifest_fname, all_info + return manifest_fname, doc diff --git a/packages/gapic-generator/gapic/samplegen_utils/__init__.py b/packages/gapic-generator/gapic/samplegen_utils/__init__.py new file mode 100644 index 000000000000..423d58541e74 --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen_utils/__init__.py @@ -0,0 +1,23 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import gapic.samplegen_utils.types +import gapic.samplegen_utils.utils +import gapic.samplegen_utils.yaml + +__all__ = ( + 'types', + 'utils', + 'yaml', +) diff --git a/packages/gapic-generator/gapic/samplegen/utils.py b/packages/gapic-generator/gapic/samplegen_utils/types.py similarity index 60% rename from packages/gapic-generator/gapic/samplegen/utils.py rename to packages/gapic-generator/gapic/samplegen_utils/types.py index 959dd4472ae0..fad0a1503616 100644 --- a/packages/gapic-generator/gapic/samplegen/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/types.py @@ -12,10 +12,68 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Module containing miscellaneous utilities -that will eventually move somewhere else (probably).""" - from enum import Enum, auto +from gapic.utils import to_snake_case + + +class SampleError(Exception): + pass + + +class ReservedVariableName(SampleError): + pass + + +class RpcMethodNotFound(SampleError): + pass + + +class UnknownService(SampleError): + pass + + +class InvalidConfig(SampleError): + pass + + +class InvalidStatement(SampleError): + pass + + +class BadLoop(SampleError): + pass + + +class MismatchedFormatSpecifier(SampleError): + pass + + +class UndefinedVariableReference(SampleError): + pass + + +class BadAttributeLookup(SampleError): + pass + + +class RedefinedVariable(SampleError): + pass + + +class BadAssignment(SampleError): + pass + + +class InconsistentRequestName(SampleError): + pass + + +class InvalidRequestSetup(SampleError): + pass + + +class InvalidEnumVariant(SampleError): + pass class CallingForm(Enum): @@ -41,3 +99,6 @@ def method_default(cls, m): return cls.RequestStreamingServer return cls.Request + + def __str__(self): + return to_snake_case(super().__str__().split(".")[-1]) diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py new file mode 100644 index 000000000000..a8a3daaf3783 --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -0,0 +1,107 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Module containing miscellaneous utilities +that will eventually move somewhere else (probably).""" + +import os +import yaml + +from typing import (Generator, Tuple) + +from gapic.samplegen_utils import types + + +MIN_SCHEMA_VERSION = (1, 2, 0) + +VALID_CONFIG_TYPE = "com.google.api.codegen.SampleConfigProto" + + +def is_valid_sample_cfg( + doc, + min_version: Tuple[int, int, int] = MIN_SCHEMA_VERSION, + config_type: str = VALID_CONFIG_TYPE, +) -> bool: + """Predicate that takes a parsed yaml doc checks if it is a valid sampel config. + + Arguments: + doc (Any): The yaml document to be assessed + min_version (Tuple[int, int, int]): (optional) The minimum valid version for + the sample config. Uses semantic version (major, minor, bugfix). + config_type (str): (optional) The valid type of the document. + + Returns: + bool: True if doc is a valid sample config document. + + """ + def parse_version(version_str: str) -> Tuple[int, ...]: + return tuple(int(tok) for tok in version_str.split(".")) + + version_token = "schema_version" + return bool( + # Yaml may return a dict, a list, or a str + isinstance(doc, dict) + and doc.get("type") == VALID_CONFIG_TYPE + and parse_version(doc.get(version_token, "")) >= MIN_SCHEMA_VERSION + and doc.get("samples") + ) + + +def generate_all_sample_fpaths(path: str) -> Generator[str, None, None]: + """Given file or directory path, yield all valid sample config fpaths recursively. + + Arguments: + path (str): The file or directory path to check + for valid samplegen config files. + Directories are checked recursively. + + Raises: + types.InvalidConfig: If 'path' is an invalid sampleconfig file + or 'path' is not a file or directory. + + Returns: + Generator[str, None, None]: All valid samplegen config files + starting at 'path'. + """ + + # If a user passes in a directory to search for sample configs, + # it is required to ignore any non-sample-config files so as to avoid + # being unhelpfully strict. + # Directly named files, however, should generate an error, because silently + # ignoring them is less helpful than failing loudly. + if os.path.isfile(path): + if not path.endswith('.yaml'): + raise types.InvalidConfig(f"Not a yaml file: {path}") + + with open(path) as f: + if not any(is_valid_sample_cfg(doc) + for doc in yaml.safe_load_all(f.read())): + raise types.InvalidConfig( + f"No valid sample config in file: {path}") + + yield path + + elif os.path.isdir(path): + yaml_file_generator = (os.path.join(dirpath, fname) + for dirpath, _, fnames in os.walk(path) + for fname in fnames if fname.endswith(".yaml")) + + for fullpath in yaml_file_generator: + with open(fullpath) as f: + if any(is_valid_sample_cfg(doc) + for doc in yaml.safe_load_all(f.read())): + yield fullpath + + else: + raise types.InvalidConfig(f"No such file or directory: {path}") diff --git a/packages/gapic-generator/gapic/samplegen/yaml.py b/packages/gapic-generator/gapic/samplegen_utils/yaml.py similarity index 89% rename from packages/gapic-generator/gapic/samplegen/yaml.py rename to packages/gapic-generator/gapic/samplegen_utils/yaml.py index 642dddbffe6e..fac783110505 100644 --- a/packages/gapic-generator/gapic/samplegen/yaml.py +++ b/packages/gapic-generator/gapic/samplegen_utils/yaml.py @@ -46,7 +46,7 @@ def render(self, spaces: int = 0) -> str: return f"{whitespace}{self.key}: {self.val}" -@dataclasses.dataclass() +@dataclasses.dataclass(frozen=True) class Collection(Element): """An ordered list of subobjects.""" name: str @@ -62,7 +62,6 @@ def render(self, spaces: int = 0) -> str: # - cephalopod: squid # bivalve: clam # gastropod: whelk - whitespace = " " * spaces return f"{self.name}:\n" + "\n".join( indent( "-" @@ -75,7 +74,7 @@ def render(self, spaces: int = 0) -> str: ) -@dataclasses.dataclass() +@dataclasses.dataclass(frozen=True) class Alias(Element): """An anchor to a map.""" target: str @@ -85,7 +84,7 @@ def render(self, spaces: int = 0) -> str: return f"{whitespace}<<: *{self.target}" -@dataclasses.dataclass() +@dataclasses.dataclass(frozen=True) class Map(Element): """A named collection with a list of attributes.""" name: str @@ -99,3 +98,12 @@ def render(self, spaces: int = 0): ) whitespace = " " * spaces return f"{whitespace}{self.name}:{maybe_anchor}\n{element_str}" + + +@dataclasses.dataclass(frozen=True) +class Doc(Element): + """A yaml document""" + elements: List[Element] + + def render(self): + return "---\n{}".format("\n".join(e.render() for e in self.elements)) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index d2bf329a7116..c0d1e8f87d30 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -15,7 +15,7 @@ import dataclasses import os import re -from typing import cast, List, Match, Sequence, Tuple +from typing import cast, List, Match, Tuple from google.protobuf import descriptor_pb2 @@ -45,8 +45,8 @@ def __post_init__(self): @classmethod def build(cls, - *file_descriptors: descriptor_pb2.FileDescriptorProto, - opts: options.Options = options.Options(), + *file_descriptors: descriptor_pb2.FileDescriptorProto, + opts: options.Options = options.Options(), ) -> 'Naming': """Return a full Naming instance based on these file descriptors. @@ -101,7 +101,7 @@ def build(cls, # Okay, do the match match = cast(Match, - re.search(pattern=pattern, string=root_package)).groupdict() + re.search(pattern=pattern, string=root_package)).groupdict() match['namespace'] = match['namespace'] or '' package_info = cls( name=match['name'].capitalize(), diff --git a/packages/gapic-generator/gapic/utils/code.py b/packages/gapic-generator/gapic/utils/code.py index 1a09d8f4a13b..27458a999cb5 100644 --- a/packages/gapic-generator/gapic/utils/code.py +++ b/packages/gapic-generator/gapic/utils/code.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import (Any, Callable, Iterable, List, Tuple, TypeVar) +from typing import (Callable, Iterable, List, Tuple, TypeVar) def empty(content: str) -> bool: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index cff226f044ac..7d3b1620f1a9 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -26,7 +26,7 @@ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov') + session.install('coverage', 'pytest', 'pytest-cov', 'pyfakefs') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2df73866cb36..d6306bc343b2 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -51,6 +51,9 @@ extras_require={ ':python_version<"3.7"': ('dataclasses >= 0.4',), }, + tests_require=( + 'pyfakefs >= 3.6', + ), classifiers=( 'Development Status :: 4 - Beta', 'Environment :: Console', diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 96482ea95d88..846f159fdb9f 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -12,17 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +from textwrap import dedent from typing import Mapping from unittest import mock import jinja2 - import pytest from google.protobuf import descriptor_pb2 +from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse from gapic.generator import generator from gapic.generator import options +from gapic.samplegen_utils import yaml from gapic.schema import api from gapic.schema import naming from gapic.schema import wrappers @@ -246,6 +248,212 @@ def test_get_filename_with_proto_and_sub(): ) == 'bar/types/baz/bacon.py' +def test_parse_sample_paths(fs): + for fpath in [ + 'sample.yaml', + 'sampledir/sample.yaml', + 'other_sampledir/sample.yaml', + ]: + fs.create_file( + fpath, + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + ''' + ) + ) + + opts = options.Options.build( + ("samples=sample.yaml," + "samples=sampledir/," + "samples=other_sampledir")) + + expected_configs = ( + 'sample.yaml', + 'sampledir/sample.yaml', + 'other_sampledir/sample.yaml', + ) + + assert opts.sample_configs == expected_configs + + +@mock.patch( + 'gapic.samplegen.samplegen.generate_sample', + return_value='', +) +@mock.patch( + 'time.gmtime', +) +def test_samplegen_config_to_output_files(mock_gmtime, mock_generate_sample, fs): + # These time values are nothing special, + # they just need to be deterministic. + returner = mock.MagicMock() + returner.tm_year = 2112 + returner.tm_mon = 6 + returner.tm_mday = 1 + returner.tm_hour = 13 + returner.tm_min = 13 + returner.tm_sec = 13 + mock_gmtime.return_value = returner + + fs.create_file( + 'samples.yaml', + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - id: squid_sample + region_tag: humboldt_tag + rpc: get_squid_streaming + - region_tag: clam_sample + rpc: get_clam + ''' + ) + ) + + mock_generate_sample + + g = generator.Generator( + options.Options.build( + 'samples=samples.yaml', + ) + ) + api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) + actual_response = g.get_response(api_schema) + expected_response = CodeGeneratorResponse( + file=[ + CodeGeneratorResponse.File( + name="samples/squid_sample.py", + content="\n", + ), + CodeGeneratorResponse.File( + name="samples/clam_sample.py", + content="\n", + ), + CodeGeneratorResponse.File( + name="samples/Mollusc.v6.python.21120601.131313.manifest.yaml", + content=dedent("""\ + --- + type: manifest/samples + schema_version: 3 + python: &python + environment: python + bin: python3 + base_path: samples + invocation: '{bin} {path} @args' + samples: + - <<: *python + sample: squid_sample + path: '{base_path}/squid_sample.py' + region_tag: humboldt_tag + - <<: *python + sample: clam_sample + path: '{base_path}/clam_sample.py' + region_tag: clam_sample + """.rstrip()), + ) + ] + ) + + assert actual_response == expected_response + + +@mock.patch( + 'gapic.samplegen.samplegen.generate_sample', + return_value='', +) +@mock.patch( + 'time.gmtime', +) +def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): + # These time values are nothing special, + # they just need to be deterministic. + returner = mock.MagicMock() + returner.tm_year = 2112 + returner.tm_mon = 6 + returner.tm_mday = 1 + returner.tm_hour = 13 + returner.tm_min = 13 + returner.tm_sec = 13 + mock_gmtime.return_value = returner + + # Note: The first two samples will have the same nominal ID, the first by + # explicit naming and the second by falling back to the region_tag. + # The third has no id of any kind, so the generator is required to make a + # unique ID for it. + fs.create_file( + 'samples.yaml', + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - id: squid_sample + region_tag: humboldt_tag + rpc: get_squid_streaming + # Note that this region tag collides with the id of the previous sample. + - region_tag: squid_sample + rpc: get_squid_streaming + # No id or region tag. + - rpc: get_squid_streaming + ''' + ) + ) + g = generator.Generator(options.Options.build('samples=samples.yaml')) + api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) + actual_response = g.get_response(api_schema) + expected_response = CodeGeneratorResponse( + file=[ + CodeGeneratorResponse.File( + name="samples/squid_sample_91a465c6.py", + content="\n", + ), + CodeGeneratorResponse.File( + name="samples/squid_sample_c8014108.py", + content="\n", + ), + CodeGeneratorResponse.File( + name="samples/157884ee.py", + content="\n", + ), + CodeGeneratorResponse.File( + name="samples/Mollusc.v6.python.21120601.131313.manifest.yaml", + content=dedent("""\ + --- + type: manifest/samples + schema_version: 3 + python: &python + environment: python + bin: python3 + base_path: samples + invocation: '{bin} {path} @args' + samples: + - <<: *python + sample: squid_sample_91a465c6 + path: '{base_path}/squid_sample_91a465c6.py' + region_tag: humboldt_tag + - <<: *python + sample: squid_sample_c8014108 + path: '{base_path}/squid_sample_c8014108.py' + region_tag: squid_sample + - <<: *python + sample: 157884ee + path: '{base_path}/157884ee.py' + region_tag: """) + ), + ] + ) + + assert actual_response == expected_response + + def make_generator(opts_str: str = '') -> generator.Generator: return generator.Generator(options.Options.build(opts_str)) diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 342cd4837a40..6cb1de1f67e1 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest from unittest import mock import warnings from gapic.generator import options +from gapic.samplegen_utils import types def test_options_empty(): @@ -46,3 +48,9 @@ def test_options_unrecognized_likely_typo(): with mock.patch.object(warnings, 'warn') as warn: options.Options.build('go-gapic-abc=xyz') assert len(warn.mock_calls) == 0 + + +def test_options_no_valid_sample_config(fs): + fs.create_file("sampledir/not_a_config.yaml") + with pytest.raises(types.InvalidConfig): + options.Options.build("samples=sampledir/") diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 87f088be542a..3f55973aa3bf 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -16,9 +16,11 @@ import os.path as path import pytest -import gapic.samplegen.samplegen as samplegen import gapic.utils as utils +from gapic.samplegen import samplegen +from gapic.samplegen_utils import (types, utils as gapic_utils) + from common_types import (DummyMethod, DummyService, DummyApiSchema, DummyNaming, message_factory, enum_factory) @@ -69,20 +71,20 @@ def test_generate_sample_basic(): "value_is_file": True}], "response": [{"print": ["Mollusc is a %s", "$resp.taxonomy"]}]} - fpath, template_stream = samplegen.generate_sample( - sample, True, env, schema) - sample_str = "".join(iter(template_stream)) + sample_str = samplegen.generate_sample( + sample, env, schema) + sample_id = ("mollusc_classify_sync") expected_str = '''# TODO: add a copyright # TODO: add a license # -# DO NOT EDIT! This is a generated sample ("CallingForm.Request", "mollusc_classify_sync") +# DO NOT EDIT! This is a generated sample ("request", "%s") # # To install the latest published package dependency, execute the following: # pip3 install molluscs-v1-mollusc -# [START mollusc_classify_sync] +# [START %s] def sample_classify(video): """Determine the full taxonomy of input mollusc""" @@ -99,7 +101,7 @@ def sample_classify(video): print("Mollusc is a {}".format(response.taxonomy)) -# [END mollusc_classify_sync] +# [END %s] def main(): import argparse @@ -115,7 +117,7 @@ def main(): if __name__ == "__main__": main() -''' +''' % (sample_id, sample_id, sample_id) assert sample_str == expected_str @@ -124,8 +126,8 @@ def test_generate_sample_service_not_found(): schema = DummyApiSchema({}, DummyNaming("pkg_name")) sample = {"service": "Mollusc"} - with pytest.raises(samplegen.UnknownService): - samplegen.generate_sample(sample, True, env, schema) + with pytest.raises(types.UnknownService): + samplegen.generate_sample(sample, env, schema) def test_generate_sample_rpc_not_found(): @@ -133,5 +135,197 @@ def test_generate_sample_rpc_not_found(): {"Mollusc": DummyService({})}, DummyNaming("pkg_name")) sample = {"service": "Mollusc", "rpc": "Classify"} - with pytest.raises(samplegen.RpcMethodNotFound): - samplegen.generate_sample(sample, True, env, schema) + with pytest.raises(types.RpcMethodNotFound): + list(samplegen.generate_sample(sample, env, schema)) + + +def test_generate_sample_config_fpaths(fs): + expected_path = 'cfgs/sample_config.yaml' + fs.create_file( + expected_path, + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + ''' + ) + ) + actual_paths = list(gapic_utils.generate_all_sample_fpaths(expected_path)) + + assert actual_paths == [expected_path] + + +def test_generate_sample_config_fpaths_directories(fs): + good_contents = dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + ''' + ) + # We need some invalid configs in the directory as well to verify that + # they don't cause spurious failures. + bad_contents = 'bad contents' + directory = 'sampleconfig' + for p in [ + "config_1.yaml", + "config_2.yaml", + "config_notes.txt", + "subdir/config_3.yaml", + "subdir/config_4.yaml", + "subdir/nested/config_5.yaml", + ]: + fs.create_file(path.join(directory, p), contents=good_contents) + + for p in [ + "bad_config_1.yaml", + "subdir/bad_config_2.yaml", + "subdir/nested/bad_config_3.yaml", + ]: + fs.create_file(path.join(directory, p), contents=bad_contents) + + expected_paths = [ + "sampleconfig/config_1.yaml", + "sampleconfig/config_2.yaml", + "sampleconfig/subdir/config_3.yaml", + "sampleconfig/subdir/config_4.yaml", + "sampleconfig/subdir/nested/config_5.yaml", + ] + + actual_paths = sorted(gapic_utils.generate_all_sample_fpaths(directory)) + + assert actual_paths == expected_paths + + +def test_generate_sample_config_fpaths_directories_no_configs(fs): + directory = 'sampleconfig' + for f in ['a.yaml', 'b.yaml']: + fs.create_file(path.join(directory, f)) + + actual_paths = list(gapic_utils.generate_all_sample_fpaths(directory)) + + assert not actual_paths + + +def test_generate_sample_config_fpaths_not_yaml(fs): + expected_path = 'cfgs/sample_config.not_yaml' + fs.create_file(expected_path) + + with pytest.raises(types.InvalidConfig): + list(gapic_utils.generate_all_sample_fpaths(expected_path)) + + +def test_generate_sample_config_fpaths_bad_contents( + fs, + # Note the typo: SampleConfigPronto + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigPronto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + ''' + ) +): + expected_path = 'cfgs/sample_config.yaml' + fs.create_file(expected_path, contents=contents) + + with pytest.raises(types.InvalidConfig): + list(gapic_utils.generate_all_sample_fpaths(expected_path)) + + +def test_generate_sample_config_fpaths_bad_contents_old(fs): + test_generate_sample_config_fpaths_bad_contents( + fs, + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.1.0 + samples: + - service: google.cloud.language.v1.LanguageService + ''' + ) + ) + + +def test_generate_sample_config_fpaths_bad_contents_no_samples(fs): + test_generate_sample_config_fpaths_bad_contents( + fs, + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + ''' + ) + ) + + +def test_generate_sample_config_partial_config(fs): + expected_path = 'sample.yaml' + fs.create_file( + expected_path, + # Note the typo: SampleConfigPronto + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigPronto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + --- + # Note: this one IS a valid config + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + ''' + ) + ) + expected_paths = [expected_path] + + actual_paths = list(gapic_utils.generate_all_sample_fpaths(expected_path)) + + assert actual_paths == expected_paths + + +def test_generate_sample_config_partial_config_directory(fs): + directory = 'samples' + fpath = path.join(directory, 'sample.yaml') + fs.create_file( + fpath, + # Note the typo in the first sample: SampleConfigPronto + contents=dedent( + ''' + --- + # Note: this one is NOT a valid config + type: com.google.api.codegen.SampleConfigPronto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + --- + # Note: this one IS a valid config + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + ''' + ) + ) + expected_paths = [fpath] + + actual_paths = list(gapic_utils.generate_all_sample_fpaths(directory)) + + assert actual_paths == expected_paths + + +def test_generate_sample_config_fpaths_no_such_file(fs): + with pytest.raises(types.InvalidConfig): + list(gapic_utils.generate_all_sample_fpaths('cfgs/sample_config.yaml')) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index 893d5f95847f..ccc8e043a740 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -15,20 +15,21 @@ import yaml from textwrap import dedent -import gapic.samplegen.yaml as gapic_yaml +import gapic.samplegen_utils.yaml as gapic_yaml import gapic.samplegen.samplegen as samplegen from common_types import DummyApiSchema, DummyNaming def test_generate_manifest(): fpath_to_dummy_sample = { - "squid_fpath.py": {"id": "squid_sample"}, - "clam_fpath.py": {"id": "clam_sample", - "region_tag": "giant_clam_sample"}, + "samples/squid_fpath.py": {"id": "squid_sample"}, + "samples/clam_fpath.py": {"id": "clam_sample", + "region_tag": "giant_clam_sample"}, } fname, info = samplegen.generate_manifest( fpath_to_dummy_sample.items(), + "samples/", DummyApiSchema(naming=DummyNaming(name="Mollusc", version="v1")), # Empirically derived number such that the # corresponding time_struct tests the zero @@ -38,7 +39,7 @@ def test_generate_manifest(): assert fname == "Mollusc.v1.python.21120304.090708.manifest.yaml" - expected_info = [ + doc = gapic_yaml.Doc([ gapic_yaml.KeyVal("type", "manifest/samples"), gapic_yaml.KeyVal("schema_version", "3"), gapic_yaml.Map(name="python", @@ -49,7 +50,7 @@ def test_generate_manifest(): gapic_yaml.KeyVal( "bin", "python3"), gapic_yaml.KeyVal( - "base_path", "sample/base/directory"), + "base_path", "samples/"), gapic_yaml.KeyVal( "invocation", "'{bin} {path} @args'"), ]), @@ -75,17 +76,18 @@ def test_generate_manifest(): "region_tag", "giant_clam_sample") ], ]) - ] + ]) - assert info == expected_info + assert info == doc expected_rendering = dedent(""" + --- type: manifest/samples schema_version: 3 python: &python environment: python bin: python3 - base_path: sample/base/directory + base_path: samples/ invocation: '{bin} {path} @args' samples: - <<: *python @@ -97,7 +99,7 @@ def test_generate_manifest(): path: '{base_path}/clam_fpath.py' region_tag: giant_clam_sample""".lstrip("\n")) - rendered_yaml = "\n".join(e.render() for e in info) + rendered_yaml = doc.render() assert rendered_yaml == expected_rendering expected_parsed_manifest = { @@ -106,14 +108,14 @@ def test_generate_manifest(): "python": { "environment": "python", "bin": "python3", - "base_path": "sample/base/directory", + "base_path": "samples/", "invocation": "{bin} {path} @args", }, "samples": [ { "environment": "python", "bin": "python3", - "base_path": "sample/base/directory", + "base_path": "samples/", "invocation": "{bin} {path} @args", "sample": "squid_sample", "path": "{base_path}/squid_fpath.py", @@ -122,7 +124,7 @@ def test_generate_manifest(): { "environment": "python", "bin": "python3", - "base_path": "sample/base/directory", + "base_path": "samples/", "invocation": "{bin} {path} @args", "sample": "clam_sample", "path": "{base_path}/clam_fpath.py", diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 4711691fa8bd..b3efc4fa3583 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -19,13 +19,14 @@ from collections import namedtuple from google.protobuf import descriptor_pb2 -import gapic.schema.wrappers as wrappers -import gapic.samplegen.yaml as gapic_yaml import gapic.samplegen.samplegen as samplegen +import gapic.samplegen_utils.types as types +import gapic.samplegen_utils.yaml as gapic_yaml +import gapic.schema.wrappers as wrappers from common_types import (DummyField, DummyMessage, DummyMethod, message_factory, enum_factory) -from gapic.samplegen import utils +from gapic.samplegen_utils import utils # validate_response tests @@ -40,14 +41,14 @@ def test_define(): def test_define_undefined_var(): define = {"define": "squid=humboldt"} v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response([define]) def test_define_reserved_varname(): define = {"define": "class=$resp"} v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) - with pytest.raises(samplegen.ReservedVariableName): + with pytest.raises(types.ReservedVariableName): v.validate_response([define]) @@ -61,7 +62,7 @@ def test_define_add_var(): def test_define_bad_form(): define = {"define": "mollusc=$resp.squid=$resp.clam"} v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) - with pytest.raises(samplegen.BadAssignment): + with pytest.raises(types.BadAssignment): v.validate_response([define]) @@ -72,7 +73,7 @@ def test_define_redefinition(): ] v = samplegen.Validator(DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True]))) - with pytest.raises(samplegen.RedefinedVariable): + with pytest.raises(types.RedefinedVariable): v.validate_response(statements) @@ -80,7 +81,7 @@ def test_define_input_param(): v = samplegen.Validator( DummyMethod(input=message_factory("mollusc.squid.mantle_length"))) v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [ { "field": "squid.mantle_length", @@ -96,7 +97,7 @@ def test_define_input_param_redefinition(): v = samplegen.Validator(DummyMethod( input=message_factory("mollusc.squid.mantle_length"))) v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [ { "field": "squid.mantle_length", @@ -105,7 +106,7 @@ def test_define_input_param_redefinition(): } ], ) - with pytest.raises(samplegen.RedefinedVariable): + with pytest.raises(types.RedefinedVariable): v.validate_response( [{"define": "mantle_length=mantle_length"}]) @@ -124,21 +125,21 @@ def test_print_fmt_str(): def test_print_fmt_mismatch(): print_statement = {"print": ["This is a squid named %s"]} v = samplegen.Validator(DummyMethod(output=message_factory("$resp.name"))) - with pytest.raises(samplegen.MismatchedFormatSpecifier): + with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([print_statement]) def test_print_fmt_mismatch2(): print_statement = {"print": ["This is a squid", "$resp.name"]} v = samplegen.Validator(DummyMethod(output=message_factory("$resp.name"))) - with pytest.raises(samplegen.MismatchedFormatSpecifier): + with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([print_statement]) def test_print_undefined_var(): print_statement = {"print": ["This mollusc is a %s", "mollusc.type"]} v = samplegen.Validator(DummyMethod(output=message_factory("$resp.type"))) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response([print_statement]) @@ -156,21 +157,21 @@ def test_comment_fmt_str(): def test_comment_fmt_undefined_var(): comment = {"comment": ["This is a mollusc of class %s", "cephalopod"]} v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response([comment]) def test_comment_fmt_mismatch(): comment = {"comment": ["This is a mollusc of class %s"]} v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.MismatchedFormatSpecifier): + with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([comment]) def test_comment_fmt_mismatch2(): comment = {"comment": ["This is a mollusc of class ", "$resp.class"]} v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.MismatchedFormatSpecifier): + with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([comment]) @@ -201,7 +202,7 @@ def test_loop_collection_redefinition(): ] v = samplegen.Validator( DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True]))) - with pytest.raises(samplegen.RedefinedVariable): + with pytest.raises(types.RedefinedVariable): v.validate_response(statements) @@ -214,7 +215,7 @@ def test_loop_undefined_collection(): } } v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response([loop]) @@ -228,7 +229,7 @@ def test_loop_collection_extra_kword(): } } v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.BadLoop): + with pytest.raises(types.BadLoop): v.validate_response([loop]) @@ -240,7 +241,7 @@ def test_loop_collection_missing_kword(): } } v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.BadLoop): + with pytest.raises(types.BadLoop): v.validate_response([loop]) @@ -254,7 +255,7 @@ def test_loop_collection_reserved_loop_var(): } v = samplegen.Validator(DummyMethod( output=message_factory("$resp.molluscs", repeated_iter=[True]))) - with pytest.raises(samplegen.ReservedVariableName): + with pytest.raises(types.ReservedVariableName): v.validate_response([loop]) @@ -305,7 +306,7 @@ def test_collection_loop_lexical_scope_variable(): ] v = samplegen.Validator(DummyMethod( output=message_factory("$resp.molluscs", repeated_iter=[True]))) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -322,7 +323,7 @@ def test_collection_loop_lexical_scope_inline(): ] v = samplegen.Validator(DummyMethod( output=message_factory("$resp.molluscs", repeated_iter=[True]))) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -363,7 +364,7 @@ def test_map_loop_lexical_scope_key(): ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -404,7 +405,7 @@ def test_map_loop_lexical_scope_value(): ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -444,7 +445,7 @@ def test_map_loop_lexical_scope_inline(): type="RESPONSE_TYPE" ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -481,7 +482,7 @@ def test_loop_map_reserved_key(): ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.ReservedVariableName): + with pytest.raises(types.ReservedVariableName): v.validate_response([loop]) @@ -518,7 +519,7 @@ def test_loop_map_reserved_val(): ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.ReservedVariableName): + with pytest.raises(types.ReservedVariableName): v.validate_response([loop]) @@ -532,7 +533,7 @@ def test_loop_map_undefined(): } } v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response([loop]) @@ -631,7 +632,7 @@ def test_loop_map_no_key_or_value(): ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.BadLoop): + with pytest.raises(types.BadLoop): v.validate_response([loop]) @@ -644,14 +645,14 @@ def test_loop_map_no_map(): } } v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.BadLoop): + with pytest.raises(types.BadLoop): v.validate_response([loop]) def test_loop_map_no_body(): loop = {"loop": {"map": "$resp.molluscs", "key": "name", "value": "mollusc"}} v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.BadLoop): + with pytest.raises(types.BadLoop): v.validate_response([loop]) @@ -666,7 +667,7 @@ def test_loop_map_extra_kword(): } } v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.BadLoop): + with pytest.raises(types.BadLoop): v.validate_response([loop]) @@ -705,7 +706,7 @@ def test_loop_map_redefined_key(): ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.RedefinedVariable): + with pytest.raises(types.RedefinedVariable): v.validate_response(statements) @@ -744,7 +745,7 @@ def test_loop_map_redefined_value(): ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.RedefinedVariable): + with pytest.raises(types.RedefinedVariable): v.validate_response(statements) @@ -771,7 +772,7 @@ def test_validate_write_file_fname_fmt(): statements = [{"write_file": {"filename": ["specimen-%s"], "contents": "$resp.photo"}}] v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.MismatchedFormatSpecifier): + with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response(statements) @@ -783,7 +784,7 @@ def test_validate_write_file_fname_bad_var(): } }] v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -796,7 +797,7 @@ def test_validate_write_file_missing_fname(): } ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.InvalidStatement): + with pytest.raises(types.InvalidStatement): v.validate_response(statements) @@ -811,7 +812,7 @@ def test_validate_write_file_missing_contents(): ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.InvalidStatement): + with pytest.raises(types.InvalidStatement): v.validate_response(statements) @@ -829,21 +830,21 @@ def test_validate_write_file_bad_contents_var(): } ) v = samplegen.Validator(DummyMethod(output=OutputType)) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) def test_invalid_statement(): statements = [{"print": ["Name"], "comment": ["Value"]}] v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.InvalidStatement): + with pytest.raises(types.InvalidStatement): v.validate_response(statements) def test_invalid_statement2(): statements = [{"squidify": ["Statement body"]}] v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.InvalidStatement): + with pytest.raises(types.InvalidStatement): v.validate_response(statements) @@ -867,7 +868,7 @@ def test_validate_request_basic(): v = samplegen.Validator(DummyMethod(input=input_type)) actual = v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [ {"field": "squid.mantle_length", "value": "100 cm"}, {"field": "squid.mantle_mass", "value": "10 kg"}, @@ -890,9 +891,9 @@ def test_validate_request_basic(): def test_validate_request_no_field_parameter(): # May need to remeove this test because it doesn't necessarily make sense any more. v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.InvalidRequestSetup): + with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( - utils.CallingForm.Request, [{"squid": "humboldt", + types.CallingForm.Request, [{"squid": "humboldt", "value": "teuthida"}] ) @@ -900,9 +901,9 @@ def test_validate_request_no_field_parameter(): def test_validate_request_no_such_attribute(): v = samplegen.Validator(DummyMethod( input=message_factory("mollusc.squid.mantle"))) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{"field": "clam.shell", "value": "20"}] ) @@ -911,7 +912,7 @@ def test_validate_request_top_level_field(): v = samplegen.Validator(DummyMethod( input=message_factory("mollusc.squid"))) actual = v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{"field": "squid", "value": "humboldt"}] ) @@ -929,9 +930,9 @@ def test_validate_request_top_level_field(): def test_validate_request_missing_keyword(kword="field"): v = samplegen.Validator(DummyMethod( input=message_factory("mollusc.squid"))) - with pytest.raises(samplegen.InvalidRequestSetup): + with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{kword: "squid"}] ) @@ -943,9 +944,9 @@ def test_validate_request_missing_value(): def test_validate_request_spurious_kword(): v = samplegen.Validator( DummyMethod(input=message_factory("mollusc.squid"))) - with pytest.raises(samplegen.InvalidRequestSetup): + with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{"field": "mollusc.squid", "value": "humboldt", "order": "teuthida"}] ) @@ -955,7 +956,7 @@ def test_validate_request_unknown_field_type(): input=DummyMessage(fields={"squid": DummyField()}))) with pytest.raises(TypeError): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{"field": "squid", "value": "humboldt"}] ) @@ -963,9 +964,9 @@ def test_validate_request_unknown_field_type(): def test_validate_request_duplicate_top_level_fields(): v = samplegen.Validator(DummyMethod( input=message_factory("mollusc.squid"))) - with pytest.raises(samplegen.InvalidRequestSetup): + with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{"field": "squid", "value": "humboldt"}, {"field": "squid", "value": "bobtail"}] ) @@ -994,7 +995,7 @@ def test_validate_request_multiple_arguments(): v = samplegen.Validator(DummyMethod(input=input_type)) actual = v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [ { "field": "squid.mantle_length", @@ -1051,9 +1052,9 @@ def test_validate_request_duplicate_input_param(): ) v = samplegen.Validator(DummyMethod(input=input_type)) - with pytest.raises(samplegen.RedefinedVariable): + with pytest.raises(types.RedefinedVariable): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [ { "field": "squid.mantle_mass", @@ -1071,9 +1072,9 @@ def test_validate_request_duplicate_input_param(): def test_validate_request_reserved_input_param(): v = samplegen.Validator(DummyMethod()) - with pytest.raises(samplegen.ReservedVariableName): + with pytest.raises(types.ReservedVariableName): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [ { "field": "mollusc.class", @@ -1085,7 +1086,7 @@ def test_validate_request_reserved_input_param(): def test_single_request_client_streaming( - calling_form=utils.CallingForm.RequestStreamingClient): + calling_form=types.CallingForm.RequestStreamingClient): # Each API client method really only takes one parameter: # either a single protobuf message or an iterable of protobuf messages. # With unary request methods, python lets us describe attributes as positional @@ -1120,9 +1121,9 @@ def test_single_request_client_streaming( type="MOLLUSC_TYPE" ) v = samplegen.Validator(DummyMethod(input=input_type)) - with pytest.raises(samplegen.InvalidRequestSetup): + with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( - utils.CallingForm.RequestStreamingClient, + types.CallingForm.RequestStreamingClient, [ {"field": "cephalopod.order", "value": "cephalopoda"}, {"field": "gastropod.order", "value": "pulmonata"}, @@ -1132,38 +1133,38 @@ def test_single_request_client_streaming( def test_single_request_bidi_streaming(): test_single_request_client_streaming( - utils.CallingForm.RequestStreamingBidi) + types.CallingForm.RequestStreamingBidi) def test_validate_request_calling_form(): assert ( - utils.CallingForm.method_default(DummyMethod(lro=True)) - == utils.CallingForm.LongRunningRequestPromise + types.CallingForm.method_default(DummyMethod(lro=True)) + == types.CallingForm.LongRunningRequestPromise ) assert ( - utils.CallingForm.method_default(DummyMethod(paged_result_field=True)) - == utils.CallingForm.RequestPagedAll + types.CallingForm.method_default(DummyMethod(paged_result_field=True)) + == types.CallingForm.RequestPagedAll ) assert ( - utils.CallingForm.method_default(DummyMethod(client_streaming=True)) - == utils.CallingForm.RequestStreamingClient + types.CallingForm.method_default(DummyMethod(client_streaming=True)) + == types.CallingForm.RequestStreamingClient ) assert ( - utils.CallingForm.method_default(DummyMethod(server_streaming=True)) - == utils.CallingForm.RequestStreamingServer + types.CallingForm.method_default(DummyMethod(server_streaming=True)) + == types.CallingForm.RequestStreamingServer ) - assert utils.CallingForm.method_default( - DummyMethod()) == utils.CallingForm.Request + assert types.CallingForm.method_default( + DummyMethod()) == types.CallingForm.Request assert ( - utils.CallingForm.method_default( + types.CallingForm.method_default( DummyMethod(client_streaming=True, server_streaming=True) ) - == utils.CallingForm.RequestStreamingBidi + == types.CallingForm.RequestStreamingBidi ) @@ -1218,7 +1219,7 @@ def test_validate_expression_undefined_base(): method = DummyMethod(output=OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.UndefinedVariableReference): + with pytest.raises(types.UndefinedVariableReference): v.validate_expression("mollusc") @@ -1227,7 +1228,7 @@ def test_validate_expression_no_such_attr(): method = DummyMethod(output=OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_expression("$resp.nautiloidea") @@ -1240,7 +1241,7 @@ def test_validate_expression_non_indexed_non_terminal_repeated(): method = DummyMethod(output=OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_response( [{"define": "octopus=$resp.coleoidea.octopodiformes"}]) @@ -1279,7 +1280,7 @@ def test_validate_expression_collection_error(): v = samplegen.Validator(method) # Because 'molluscs' isn't repeated - with pytest.raises(samplegen.BadLoop): + with pytest.raises(types.BadLoop): v.validate_response([statement]) @@ -1304,7 +1305,7 @@ def test_validate_expression_repeated_lookup_invalid(): OutputType = message_factory(exp) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_expression("$resp.molluscs[0].mantle") @@ -1411,7 +1412,7 @@ def test_validate_expression_mapped_no_map_field(): ) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_expression('$resp.cephalopods{"squid"}.mantle') @@ -1431,7 +1432,7 @@ def test_validate_expression_mapped_no_value(): ) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_expression('$resp.cephalopods{"squid"}.mantle') @@ -1454,7 +1455,7 @@ def test_validate_expression_mapped_no_message(): ) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_expression('$resp.cephalopods{"squid"}.mantle') @@ -1463,7 +1464,7 @@ def test_validate_expresssion_lookup_unrepeated_base(): OutputType = message_factory(exp) method = DummyMethod(output=OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_response([{"define": "m=$resp[0]"}]) @@ -1473,7 +1474,7 @@ def test_validate_expression_malformed_base(): OutputType = message_factory(exp) method = DummyMethod(OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_expression(exp) @@ -1483,7 +1484,7 @@ def test_validate_expression_malformed_attr(): OutputType = message_factory(exp) method = DummyMethod(OutputType) v = samplegen.Validator(method) - with pytest.raises(samplegen.BadAttributeLookup): + with pytest.raises(types.BadAttributeLookup): v.validate_expression(exp) @@ -1493,7 +1494,7 @@ def test_validate_request_enum(): v = samplegen.Validator(DummyMethod(input=request_type)) actual = v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{"field": "cephalopod.subclass", "value": "COLEOIDEA"}] ) expected = [samplegen.TransformedRequest( @@ -1510,7 +1511,7 @@ def test_validate_request_enum_top_level(): v = samplegen.Validator(DummyMethod(input=request_type)) actual = v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{"field": "subclass", "value": "COLEOIDEA"}] ) expected = [samplegen.TransformedRequest( @@ -1525,9 +1526,9 @@ def test_validate_request_enum_invalid_value(): request_type = message_factory("mollusc.cephalopod.subclass", enum=enum) v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), input=request_type)) - with pytest.raises(samplegen.InvalidEnumVariant): + with pytest.raises(types.InvalidEnumVariant): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, # Heterodonta are bivalves, not cephalopods [{"field": "cephalopod.subclass", "value": "HETERODONTA"}] ) @@ -1538,8 +1539,8 @@ def test_validate_request_enum_not_last_attr(): request_type = message_factory("mollusc.subclass", enum=enum) v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), input=request_type)) - with pytest.raises(samplegen.InvalidEnumVariant): + with pytest.raises(types.InvalidEnumVariant): v.validate_and_transform_request( - utils.CallingForm.Request, + types.CallingForm.Request, [{"field": "subclass.order", "value": "COLEOIDEA"}] ) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 974fb8236138..52b317ddc39d 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -18,7 +18,7 @@ import gapic.samplegen.samplegen as samplegen import gapic.utils as utils -from gapic.samplegen.utils import CallingForm +from gapic.samplegen_utils.types import CallingForm from textwrap import dedent From 6c3b3cd1fb2a172d1973fc63023fdeed81170f29 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 15 Aug 2019 13:48:03 -0700 Subject: [PATCH 0150/1339] Minor cleanups for generated sample manifest (#181) Manifest does not render empty region tags Sample manifest ends in a newline --- .../gapic/samplegen/samplegen.py | 11 +++-- .../gapic/samplegen_utils/yaml.py | 16 ++++++-- .../tests/unit/generator/test_generator.py | 4 +- .../tests/unit/samplegen/test_manifest.py | 41 +++++++++---------- 4 files changed, 42 insertions(+), 30 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 099987453b50..4db76325b9f0 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -729,6 +729,7 @@ def generate_manifest( Tuple[str, yaml.Doc]: The filename of the manifest and the manifest data as a dictionary. """ + doc = yaml.Doc( [ yaml.KeyVal("type", "manifest/samples"), @@ -748,14 +749,18 @@ def generate_manifest( yaml.Collection( name="samples", elements=[ - [ + [ # type: ignore + # Mypy doesn't correctly intuit the type of the + # "region_tag" conditional expression. yaml.Alias("python"), yaml.KeyVal("sample", sample["id"]), yaml.KeyVal("path", "'{base_path}/%s'" % os.path.relpath(fpath, base_path)), - yaml.KeyVal("region_tag", - sample.get("region_tag", "")), + (yaml.KeyVal("region_tag", sample["region_tag"]) + if "region_tag" in sample else + yaml.Null), + ] for fpath, sample in fpaths_and_samples ], diff --git a/packages/gapic-generator/gapic/samplegen_utils/yaml.py b/packages/gapic-generator/gapic/samplegen_utils/yaml.py index fac783110505..87e505a1d682 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/yaml.py +++ b/packages/gapic-generator/gapic/samplegen_utils/yaml.py @@ -35,6 +35,12 @@ def render(self, spaces: int = 0) -> str: return "" +@dataclasses.dataclass(frozen=True) +class Null(Element): + def render(self, spaces: int = 0) -> str: + return "" + + @dataclasses.dataclass(frozen=True) class KeyVal(Element): """A single key/value entry.""" @@ -65,9 +71,11 @@ def render(self, spaces: int = 0) -> str: return f"{self.name}:\n" + "\n".join( indent( "-" - + "\n".join(e.render(spaces=spaces + self.INDENT_SPACES) for e in l)[ - 1: - ], + + "\n".join( + r + for r in (e.render(spaces + self.INDENT_SPACES) for e in l) + if r + )[1:], " " * (spaces), ) for l in self.elements @@ -106,4 +114,4 @@ class Doc(Element): elements: List[Element] def render(self): - return "---\n{}".format("\n".join(e.render() for e in self.elements)) + return "---\n{}\n".format("\n".join(e.render() for e in self.elements)) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 846f159fdb9f..b38e3a51954f 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -356,7 +356,7 @@ def test_samplegen_config_to_output_files(mock_gmtime, mock_generate_sample, fs) sample: clam_sample path: '{base_path}/clam_sample.py' region_tag: clam_sample - """.rstrip()), + """), ) ] ) @@ -446,7 +446,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): - <<: *python sample: 157884ee path: '{base_path}/157884ee.py' - region_tag: """) + """) ), ] ) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index ccc8e043a740..13a735c5f522 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -63,8 +63,7 @@ def test_generate_manifest(): "sample", "squid_sample"), gapic_yaml.KeyVal( "path", "'{base_path}/squid_fpath.py'"), - gapic_yaml.KeyVal( - "region_tag", ""), + gapic_yaml.Null, ], [ gapic_yaml.Alias("python"), @@ -80,24 +79,25 @@ def test_generate_manifest(): assert info == doc - expected_rendering = dedent(""" - --- - type: manifest/samples - schema_version: 3 - python: &python - environment: python - bin: python3 - base_path: samples/ - invocation: '{bin} {path} @args' - samples: - - <<: *python - sample: squid_sample - path: '{base_path}/squid_fpath.py' - region_tag: - - <<: *python - sample: clam_sample - path: '{base_path}/clam_fpath.py' - region_tag: giant_clam_sample""".lstrip("\n")) + expected_rendering = dedent( + """\ + --- + type: manifest/samples + schema_version: 3 + python: &python + environment: python + bin: python3 + base_path: samples/ + invocation: '{bin} {path} @args' + samples: + - <<: *python + sample: squid_sample + path: '{base_path}/squid_fpath.py' + - <<: *python + sample: clam_sample + path: '{base_path}/clam_fpath.py' + region_tag: giant_clam_sample + """) rendered_yaml = doc.render() assert rendered_yaml == expected_rendering @@ -119,7 +119,6 @@ def test_generate_manifest(): "invocation": "{bin} {path} @args", "sample": "squid_sample", "path": "{base_path}/squid_fpath.py", - "region_tag": None, }, { "environment": "python", From a3884d425ef97fae96f3096dad4652df7534e682 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 20 Aug 2019 13:22:35 -0700 Subject: [PATCH 0151/1339] Initial part of abstracting out python specifics from sample generation (#182) Initial movement of manifest generation to a new file Lift out python specifics Move some unrelated code to utility file --- .../gapic/generator/generator.py | 14 +- .../gapic/samplegen/__init__.py | 2 + .../gapic/samplegen/manifest.py | 125 +++++++++++++++++ .../gapic/samplegen/samplegen.py | 126 +++--------------- .../gapic/samplegen_utils/types.py | 4 + .../gapic/samplegen_utils/utils.py | 14 ++ .../gapic/samplegen_utils/yaml.py | 12 ++ .../tests/unit/samplegen/test_integration.py | 2 +- .../tests/unit/samplegen/test_manifest.py | 25 ++-- .../tests/unit/samplegen/test_samplegen.py | 24 +++- .../tests/unit/samplegen/test_template.py | 3 +- 11 files changed, 227 insertions(+), 124 deletions(-) create mode 100644 packages/gapic-generator/gapic/samplegen/manifest.py diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 9c70f5ababd2..796021c06633 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -19,9 +19,10 @@ from typing import (Any, DefaultDict, Dict, Mapping, List) from hashlib import sha256 from collections import (OrderedDict, defaultdict) -from gapic.samplegen_utils.utils import is_valid_sample_cfg +from gapic.samplegen_utils.utils import ( + coerce_response_name, is_valid_sample_cfg) from gapic.samplegen_utils.types import InvalidConfig -from gapic.samplegen import samplegen +from gapic.samplegen import (manifest, samplegen) from gapic.generator import options from gapic.generator import formatter from gapic.schema import api @@ -56,7 +57,7 @@ def __init__(self, opts: options.Options) -> None: self._env.filters['snake_case'] = utils.to_snake_case self._env.filters['sort_lines'] = utils.sort_lines self._env.filters['wrap'] = utils.wrap - self._env.filters['coerce_response_name'] = samplegen.coerce_response_name + self._env.filters['coerce_response_name'] = coerce_response_name self._sample_configs = opts.sample_configs @@ -76,7 +77,8 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: output_files: Dict[str, CodeGeneratorResponse.File] = OrderedDict() sample_templates, client_templates = utils.partition( - lambda fname: os.path.basename(fname) == samplegen.TEMPLATE_NAME, + lambda fname: os.path.basename( + fname) == samplegen.DEFAULT_TEMPLATE_NAME, self._env.loader.list_templates()) # Iterate over each template and add the appropriate output files @@ -138,7 +140,6 @@ def _generate_samples_and_manifest( spec["id"] = sample_id id_to_samples[sample_id].append(spec) - # Interpolate the special variables in the sample_out_dir template. out_dir = "samples" fpath_to_spec_and_rendered = {} for samples in id_to_samples.values(): @@ -167,10 +168,9 @@ def _generate_samples_and_manifest( # Only generate a manifest if we generated samples. if output_files: - manifest_fname, manifest_doc = samplegen.generate_manifest( + manifest_fname, manifest_doc = manifest.generate( ((fname, spec) for fname, (spec, _) in fpath_to_spec_and_rendered.items()), - out_dir, api_schema ) diff --git a/packages/gapic-generator/gapic/samplegen/__init__.py b/packages/gapic-generator/gapic/samplegen/__init__.py index 520de6ba6980..43c58c727f60 100644 --- a/packages/gapic-generator/gapic/samplegen/__init__.py +++ b/packages/gapic-generator/gapic/samplegen/__init__.py @@ -13,7 +13,9 @@ # limitations under the License. from gapic.samplegen import samplegen +from gapic.samplegen import manifest __all__ = ( + 'manifest', 'samplegen', ) diff --git a/packages/gapic-generator/gapic/samplegen/manifest.py b/packages/gapic-generator/gapic/samplegen/manifest.py new file mode 100644 index 000000000000..871e82737226 --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen/manifest.py @@ -0,0 +1,125 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time +from typing import Tuple + +from gapic.samplegen_utils import (types, yaml) + +BASE_PATH_KEY = "base_path" +DEFAULT_SAMPLE_DIR = "samples" + +# The default environment for executing python samples. +# Custom environments must adhere to the following pattern: +# they must be a yaml.Map with a defined anchor_name field, +# and 'environment', 'base_path', and 'invocation' keys must be present. +# The 'invocation' key must map to an interpolable commandline +# that will invoke the given sample. +PYTHON3_ENVIRONMENT = yaml.Map( + name="python", + anchor_name="python", + elements=[ + yaml.KeyVal("environment", "python"), + yaml.KeyVal("bin", "python3"), + yaml.KeyVal(BASE_PATH_KEY, DEFAULT_SAMPLE_DIR), + yaml.KeyVal("invocation", "'{bin} {path} @args'"), + ], +) + + +def generate( + fpaths_and_samples, + api_schema, + *, + environment: yaml.Map = PYTHON3_ENVIRONMENT, + manifest_time: int = None +) -> Tuple[str, yaml.Doc]: + """Generate a samplegen manifest for use by sampletest + + Args: + fpaths_and_samples (Iterable[Tuple[str, Mapping[str, Any]]]): + The file paths and samples to be listed in the manifest + api_schema (~.api.API): An API schema object. + environment (yaml.Map): Optional custom sample execution environment. + Set this if the samples are being generated for + a custom language. + manifest_time (int): Optional. An override for the timestamp in the name of the manifest filename. + Primarily used for testing. + + Returns: + Tuple[str, yaml.Doc]: The filename of the manifest and the manifest data as a dictionary. + + Raises: + types.InvalidSampleFpath: If any of the paths in fpaths_and_samples do not + begin with the base_path from the environment. + + """ + base_path = environment.get(BASE_PATH_KEY, DEFAULT_SAMPLE_DIR) + + def transform_path(fpath): + fpath = os.path.normpath(fpath) + if not fpath.startswith(base_path): + raise types.InvalidSampleFpath( + f"Sample fpath does not start with '{base_path}': {fpath}") + + return "'{base_path}/%s'" % os.path.relpath(fpath, base_path) + + doc = yaml.Doc( + [ + yaml.KeyVal("type", "manifest/samples"), + yaml.KeyVal("schema_version", "3"), + environment, + yaml.Collection( + name="samples", + elements=[ + [ # type: ignore + # Mypy doesn't correctly intuit the type of the + # "region_tag" conditional expression. + yaml.Alias(environment.anchor_name or ""), + yaml.KeyVal("sample", sample["id"]), + yaml.KeyVal( + "path", transform_path(fpath) + ), + (yaml.KeyVal("region_tag", sample["region_tag"]) + if "region_tag" in sample else + yaml.Null), + ] + for fpath, sample in fpaths_and_samples + ], + ), + ] + ) + + dt = time.gmtime(manifest_time) + manifest_fname_template = ( + "{api}.{version}.{language}." + "{year:04d}{month:02d}{day:02d}." + "{hour:02d}{minute:02d}{second:02d}." + "manifest.yaml" + ) + + manifest_fname = manifest_fname_template.format( + api=api_schema.naming.name, + version=api_schema.naming.version, + language=environment.name, + year=dt.tm_year, + month=dt.tm_mon, + day=dt.tm_mday, + hour=dt.tm_hour, + minute=dt.tm_min, + second=dt.tm_sec, + ) + + return manifest_fname, doc diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 4db76325b9f0..2a23e9758085 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -20,7 +20,7 @@ import re import time -from gapic.samplegen_utils import (types, yaml) +from gapic.samplegen_utils import types from gapic.schema import (api, wrappers) from collections import (defaultdict, namedtuple, ChainMap as chainmap) @@ -55,9 +55,7 @@ ) ) -# TODO: configure the base template name so that -# e.g. other languages can use the same machinery. -TEMPLATE_NAME = "sample.py.j2" +DEFAULT_TEMPLATE_NAME = "sample.py.j2" @dataclasses.dataclass(frozen=True) @@ -106,20 +104,6 @@ class TransformedRequest: body: Optional[List[AttributeRequestSetup]] -def coerce_response_name(s: str) -> str: - # In the sample config, the "$resp" keyword is used to refer to the - # item of interest as received by the corresponding calling form. - # For a 'regular', i.e. unary, synchronous, non-long-running method, - # it's the return value; for a server-streaming method, it's the iteration - # variable in the for loop that iterates over the return value, and for - # a long running promise, the user calls result on the method return value to - # resolve the future. - # - # The sample schema uses '$resp' as the special variable, - # but in the samples the 'response' variable is used instead. - return s.replace("$resp", "response") - - class Validator: """Class that validates a sample. @@ -160,6 +144,17 @@ def __init__(self, method: wrappers.Method): } ) + @staticmethod + def preprocess_sample(sample, api_schema): + """Modify a sample to set default or missing fields. + + Args: + sample (Any): A definition for a single sample generated from parsed yaml. + api_schema (api.API): The schema that defines the API to which the sample belongs. + """ + sample["package_name"] = api_schema.naming.warehouse_package_name + sample.setdefault("response", [{"print": ["%s", "$resp"]}]) + def var_field(self, var_name: str) -> Optional[wrappers.Field]: return self.var_defs_.get(var_name) @@ -661,7 +656,8 @@ def _validate_loop(self, loop): def generate_sample(sample, env: jinja2.environment.Environment, - api_schema: api.API) -> str: + api_schema: api.API, + template_name: str = DEFAULT_TEMPLATE_NAME) -> str: """Generate a standalone, runnable sample. Rendering and writing the rendered output is left for the caller. @@ -671,11 +667,13 @@ def generate_sample(sample, env (jinja2.environment.Environment): The jinja environment used to generate the filled template for the sample. api_schema (api.API): The schema that defines the API to which the sample belongs. + template_name (str): An optional override for the name of the template + used to generate the sample. Returns: str: The rendered sample. """ - sample_template = env.get_template(TEMPLATE_NAME) + sample_template = env.get_template(template_name) service_name = sample["service"] service = api_schema.services.get(service_name) @@ -693,12 +691,13 @@ def generate_sample(sample, calling_form = types.CallingForm.method_default(rpc) v = Validator(rpc) + # Tweak some small aspects of the sample to set sane defaults for optional + # fields, add fields that are required for the template, and so forth. + v.preprocess_sample(sample, api_schema) sample["request"] = v.validate_and_transform_request(calling_form, sample["request"]) v.validate_response(sample["response"]) - sample["package_name"] = api_schema.naming.warehouse_package_name - return sample_template.render( file_header=FILE_HEADER, sample=sample, @@ -706,86 +705,3 @@ def generate_sample(sample, calling_form=calling_form, calling_form_enum=types.CallingForm, ) - - -def generate_manifest( - fpaths_and_samples, - base_path: str, - api_schema, - *, - manifest_time: int = None -) -> Tuple[str, yaml.Doc]: - """Generate a samplegen manifest for use by sampletest - - Args: - fpaths_and_samples (Iterable[Tuple[str, Mapping[str, Any]]]): - The file paths and samples to be listed in the manifest - base_path (str): The base directory where the samples are generated. - api_schema (~.api.API): An API schema object. - manifest_time (int): Optional. An override for the timestamp in the name of the manifest filename. - Primarily used for testing. - - Returns: - Tuple[str, yaml.Doc]: The filename of the manifest and the manifest data as a dictionary. - - """ - - doc = yaml.Doc( - [ - yaml.KeyVal("type", "manifest/samples"), - yaml.KeyVal("schema_version", "3"), - # TODO: make the environment configurable to allow other languages - # to use the same basic machinery. - yaml.Map( - name="python", - anchor_name="python", - elements=[ - yaml.KeyVal("environment", "python"), - yaml.KeyVal("bin", "python3"), - yaml.KeyVal("base_path", base_path), - yaml.KeyVal("invocation", "'{bin} {path} @args'"), - ], - ), - yaml.Collection( - name="samples", - elements=[ - [ # type: ignore - # Mypy doesn't correctly intuit the type of the - # "region_tag" conditional expression. - yaml.Alias("python"), - yaml.KeyVal("sample", sample["id"]), - yaml.KeyVal("path", - "'{base_path}/%s'" % os.path.relpath(fpath, - base_path)), - (yaml.KeyVal("region_tag", sample["region_tag"]) - if "region_tag" in sample else - yaml.Null), - - ] - for fpath, sample in fpaths_and_samples - ], - ), - ] - ) - - dt = time.gmtime(manifest_time) - # TODO: allow other language configuration - manifest_fname_template = ( - "{api}.{version}.python." - "{year:04d}{month:02d}{day:02d}." - "{hour:02d}{minute:02d}{second:02d}." - "manifest.yaml" - ) - - manifest_fname = manifest_fname_template.format( - api=api_schema.naming.name, - version=api_schema.naming.version, - year=dt.tm_year, - month=dt.tm_mon, - day=dt.tm_mday, - hour=dt.tm_hour, - minute=dt.tm_min, - second=dt.tm_sec, - ) - - return manifest_fname, doc diff --git a/packages/gapic-generator/gapic/samplegen_utils/types.py b/packages/gapic-generator/gapic/samplegen_utils/types.py index fad0a1503616..5926e32d52f8 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/types.py +++ b/packages/gapic-generator/gapic/samplegen_utils/types.py @@ -76,6 +76,10 @@ class InvalidEnumVariant(SampleError): pass +class InvalidSampleFpath(SampleError): + pass + + class CallingForm(Enum): Request = auto() RequestPaged = auto() diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py index a8a3daaf3783..9bc4d9e43730 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -28,6 +28,20 @@ VALID_CONFIG_TYPE = "com.google.api.codegen.SampleConfigProto" +def coerce_response_name(s: str) -> str: + # In the sample config, the "$resp" keyword is used to refer to the + # item of interest as received by the corresponding calling form. + # For a 'regular', i.e. unary, synchronous, non-long-running method, + # it's the return value; for a server-streaming method, it's the iteration + # variable in the for loop that iterates over the return value, and for + # a long running promise, the user calls result on the method return value to + # resolve the future. + # + # The sample schema uses '$resp' as the special variable, + # but in the samples the 'response' variable is used instead. + return s.replace("$resp", "response") + + def is_valid_sample_cfg( doc, min_version: Tuple[int, int, int] = MIN_SCHEMA_VERSION, diff --git a/packages/gapic-generator/gapic/samplegen_utils/yaml.py b/packages/gapic-generator/gapic/samplegen_utils/yaml.py index 87e505a1d682..b1251e31a949 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/yaml.py +++ b/packages/gapic-generator/gapic/samplegen_utils/yaml.py @@ -107,6 +107,18 @@ def render(self, spaces: int = 0): whitespace = " " * spaces return f"{whitespace}{self.name}:{maybe_anchor}\n{element_str}" + def get(self, key, default=None): + # Use iter([]) instead of a generator expression due to a bug in pytest. + # See https://github.com/pytest-dev/pytest-cov/issues/310 for details. + return next( + iter( + [e.val # type: ignore + for e in self.elements + if e.key == key] # type: ignore + ), + default + ) + @dataclasses.dataclass(frozen=True) class Doc(Element): diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 3f55973aa3bf..1ac2ba18469a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -38,7 +38,7 @@ trim_blocks=True, lstrip_blocks=True ) env.filters['snake_case'] = utils.to_snake_case -env.filters['coerce_response_name'] = samplegen.coerce_response_name +env.filters['coerce_response_name'] = gapic_utils.coerce_response_name def test_generate_sample_basic(): diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index 13a735c5f522..17d78ad29740 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest import yaml from textwrap import dedent import gapic.samplegen_utils.yaml as gapic_yaml -import gapic.samplegen.samplegen as samplegen +import gapic.samplegen_utils.types as types +import gapic.samplegen.manifest as manifest from common_types import DummyApiSchema, DummyNaming @@ -27,9 +29,8 @@ def test_generate_manifest(): "region_tag": "giant_clam_sample"}, } - fname, info = samplegen.generate_manifest( + fname, info = manifest.generate( fpath_to_dummy_sample.items(), - "samples/", DummyApiSchema(naming=DummyNaming(name="Mollusc", version="v1")), # Empirically derived number such that the # corresponding time_struct tests the zero @@ -50,7 +51,7 @@ def test_generate_manifest(): gapic_yaml.KeyVal( "bin", "python3"), gapic_yaml.KeyVal( - "base_path", "samples/"), + "base_path", "samples"), gapic_yaml.KeyVal( "invocation", "'{bin} {path} @args'"), ]), @@ -87,7 +88,7 @@ def test_generate_manifest(): python: &python environment: python bin: python3 - base_path: samples/ + base_path: samples invocation: '{bin} {path} @args' samples: - <<: *python @@ -108,14 +109,14 @@ def test_generate_manifest(): "python": { "environment": "python", "bin": "python3", - "base_path": "samples/", + "base_path": "samples", "invocation": "{bin} {path} @args", }, "samples": [ { "environment": "python", "bin": "python3", - "base_path": "samples/", + "base_path": "samples", "invocation": "{bin} {path} @args", "sample": "squid_sample", "path": "{base_path}/squid_fpath.py", @@ -123,7 +124,7 @@ def test_generate_manifest(): { "environment": "python", "bin": "python3", - "base_path": "samples/", + "base_path": "samples", "invocation": "{bin} {path} @args", "sample": "clam_sample", "path": "{base_path}/clam_fpath.py", @@ -134,3 +135,11 @@ def test_generate_manifest(): parsed_manifest = yaml.safe_load(rendered_yaml) assert parsed_manifest == expected_parsed_manifest + + +def test_generate_manifest_relative_path_sanity(): + with pytest.raises(types.InvalidSampleFpath): + manifest.generate( + {"molluscs/squid.py": {"id": "squid_sample"}}.items(), + DummyApiSchema(naming=DummyNaming(name="Mollusc", version="v1")) + ) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index b3efc4fa3583..cb617e7116e1 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -22,6 +22,7 @@ import gapic.samplegen.samplegen as samplegen import gapic.samplegen_utils.types as types import gapic.samplegen_utils.yaml as gapic_yaml +from gapic.schema import (api, naming) import gapic.schema.wrappers as wrappers from common_types import (DummyField, DummyMessage, @@ -77,6 +78,25 @@ def test_define_redefinition(): v.validate_response(statements) +def test_preprocess_sample(): + # Verify that the default response is added. + sample = {} + api_schema = api.API( + naming.Naming( + namespace=("mollusc", "cephalopod", "teuthida") + ), + all_protos={}, + ) + + samplegen.Validator.preprocess_sample(sample, api_schema) + + response = sample.get("response") + assert response == [{"print": ["%s", "$resp"]}] + + package_name = sample.get("package_name") + assert package_name == "mollusc-cephalopod-teuthida-" + + def test_define_input_param(): v = samplegen.Validator( DummyMethod(input=message_factory("mollusc.squid.mantle_length"))) @@ -1170,8 +1190,8 @@ def test_validate_request_calling_form(): def test_coerce_response_name(): # Don't really need a test, but it shuts up code coverage. - assert samplegen.coerce_response_name("$resp.squid") == "response.squid" - assert samplegen.coerce_response_name("mollusc.squid") == "mollusc.squid" + assert utils.coerce_response_name("$resp.squid") == "response.squid" + assert utils.coerce_response_name("mollusc.squid") == "mollusc.squid" def test_regular_response_type(): diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 52b317ddc39d..827b2d9630ec 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -16,6 +16,7 @@ import jinja2 import os.path as path import gapic.samplegen.samplegen as samplegen +import gapic.samplegen_utils.utils as sample_utils import gapic.utils as utils from gapic.samplegen_utils.types import CallingForm @@ -45,7 +46,7 @@ def check_template(template_fragment, expected_output, **kwargs): ) env.filters['snake_case'] = utils.to_snake_case - env.filters['coerce_response_name'] = samplegen.coerce_response_name + env.filters['coerce_response_name'] = sample_utils.coerce_response_name template = env.get_template("template_fragment") text = template.render(**kwargs) From f83c7d7d99fe24ce5dd1b53e004f464c87fc1fc1 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 20 Aug 2019 15:22:28 -0700 Subject: [PATCH 0152/1339] Bump version to 0.12.0 (#156) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index d6306bc343b2..500b4043307d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.11.0', + version='0.12.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 02101a70f5a8e37c42dbdc7edd5a295d6487e166 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 21 Aug 2019 13:36:45 -0700 Subject: [PATCH 0153/1339] Initial sample generation documentation (#184) --- packages/gapic-generator/docker-entrypoint.sh | 4 ++++ .../docs/getting-started/_samplegen.rst | 14 ++++++++++++++ .../docs/getting-started/docker.rst | 16 ++++++++++++++++ .../docs/getting-started/local.rst | 14 ++++++++++++++ 4 files changed, 48 insertions(+) create mode 100644 packages/gapic-generator/docs/getting-started/_samplegen.rst diff --git a/packages/gapic-generator/docker-entrypoint.sh b/packages/gapic-generator/docker-entrypoint.sh index a44d59eaa6d0..b6737d7c8a71 100755 --- a/packages/gapic-generator/docker-entrypoint.sh +++ b/packages/gapic-generator/docker-entrypoint.sh @@ -31,6 +31,10 @@ while [ -n "$1" ]; do elif [[ $1 == --gapic-* ]]; then PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" shift 2 + elif [[ $1 == --samples* ]]; then + # --samples is a special option that all generators should recognize. + PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" + shift 2 else # Ignore anything we do not recognize. shift diff --git a/packages/gapic-generator/docs/getting-started/_samplegen.rst b/packages/gapic-generator/docs/getting-started/_samplegen.rst new file mode 100644 index 000000000000..996a9954df17 --- /dev/null +++ b/packages/gapic-generator/docs/getting-started/_samplegen.rst @@ -0,0 +1,14 @@ +Generating Samples +~~~~~~~~~~~~~~~~~~ + +In addition to generating client libraries, the generator can also create standalone executable code samples. + +The user can specify individual sample config files or can pass paths to directories that contain sample configs. Directories are searched recursively, and any file that is not a sample config is ignored. + +.. + TODO: provide documentation links when they are present + +A full description of the sample config, generated manifest, and generated samples is outside the scope of this documentation. +We will provide links to such documentation when it is ready. + +Samples and manifests are always generated in a 'samples' subdir of the destination directory. diff --git a/packages/gapic-generator/docs/getting-started/docker.rst b/packages/gapic-generator/docs/getting-started/docker.rst index 1513f3237200..e7600974c4b0 100644 --- a/packages/gapic-generator/docs/getting-started/docker.rst +++ b/packages/gapic-generator/docs/getting-started/docker.rst @@ -119,5 +119,21 @@ Perform the actual code generation step with ``docker run``: structure present in the imports of the proto files must be preserved beneath this for compilation to succeed. +.. include:: _samplegen.rst + +.. code-block:: shell + + # Multiple sample paths or directories can be passed simultaneously by duplicating + # the 'samples' option. + # If no 'samples' option is passed, the generator does not generate a manifest. + $ docker run \ + --mount type=bind,source=$(pwd)/path/to/proto/dir,destination=/in/path/to/proto,readonly \ + --mount type=bind,source=$(pwd)/dest/,destination=/out/ \ + --rm \ + --user $UID \ + gcr.io/gapic-images/gapic-generator-python \ + --samples path/to/sample/config.yaml \ + --samples path/to/sample/dir/ + .. include:: _verifying.rst diff --git a/packages/gapic-generator/docs/getting-started/local.rst b/packages/gapic-generator/docs/getting-started/local.rst index c3978fc55cc9..0d2cc8beba01 100644 --- a/packages/gapic-generator/docs/getting-started/local.rst +++ b/packages/gapic-generator/docs/getting-started/local.rst @@ -153,4 +153,18 @@ This plugin is invoked under the hood via. the ``--python_gapic_out`` switch. where it expects to find protos, and *order matters*. In this case, the common protos must come first, and then the path to the API being built. +.. include:: _samplegen.rst + +.. code-block:: shell + + # Multiple sample paths or directories can be passed simultaneously by duplicating + # the 'samples' option. Options are comma delimited. + # If no 'samples' option is passed, the generator does not generate a manifest. + $ protoc path/to/api/protos/*.proto \ + --proto_path=../api-common-protos/ \ + --proto_path=. \ + --python_gapic_opt="samples=sample_config.yaml,samples=sample_dir/" \ + --python_gapic_out=/dest/ + + .. include:: _verifying.rst From 2b7cd3392a85892afaf51620af4901459ec1f36d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 22 Aug 2019 13:17:46 -0700 Subject: [PATCH 0154/1339] Bump version to 13 (#187) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 500b4043307d..f24e6f51cd5f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.12.0', + version='0.13.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 7b7979f35d9ded491f9d65bbea069480c2702fad Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 29 Aug 2019 11:17:51 -0700 Subject: [PATCH 0155/1339] Remove support for passing directories as --sample args (#192) --- .../gapic/generator/options.py | 7 +- .../gapic/samplegen_utils/utils.py | 16 +--- .../tests/unit/generator/test_generator.py | 42 ++++------ .../tests/unit/samplegen/test_integration.py | 84 ------------------- 4 files changed, 19 insertions(+), 130 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index dfc5e8b08ade..31669c698a7b 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -101,11 +101,8 @@ def build(cls, opt_string: str) -> 'Options': ), ) - if sample_paths and not answer.sample_configs: - raise types.InvalidConfig( - ("No valid sample config found in any of the following: " - "{}".format(", ".join(sample_paths))) - ) + # Note: if we ever need to recursively check directories for sample configs, + # check that at least _one_ config is read in. # If there are any options remaining, then we failed to recognize # them -- complain. diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py index 9bc4d9e43730..c3ea9ed44f82 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -105,17 +105,7 @@ def generate_all_sample_fpaths(path: str) -> Generator[str, None, None]: f"No valid sample config in file: {path}") yield path - - elif os.path.isdir(path): - yaml_file_generator = (os.path.join(dirpath, fname) - for dirpath, _, fnames in os.walk(path) - for fname in fnames if fname.endswith(".yaml")) - - for fullpath in yaml_file_generator: - with open(fullpath) as f: - if any(is_valid_sample_cfg(doc) - for doc in yaml.safe_load_all(f.read())): - yield fullpath - + # Note: if we ever need to recursively check directories for sample configs, + # add an "elif os.path.isdir(path)" yielding from os.walk right here. else: - raise types.InvalidConfig(f"No such file or directory: {path}") + raise types.InvalidConfig(f"No such file: {path}") diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index b38e3a51954f..b771cd165831 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -24,7 +24,7 @@ from gapic.generator import generator from gapic.generator import options -from gapic.samplegen_utils import yaml +from gapic.samplegen_utils import (types, yaml) from gapic.schema import api from gapic.schema import naming from gapic.schema import wrappers @@ -249,36 +249,22 @@ def test_get_filename_with_proto_and_sub(): def test_parse_sample_paths(fs): - for fpath in [ - 'sample.yaml', - 'sampledir/sample.yaml', - 'other_sampledir/sample.yaml', - ]: - fs.create_file( - fpath, - contents=dedent( - ''' - --- - type: com.google.api.codegen.SampleConfigProto - schema_version: 1.2.0 - samples: - - service: google.cloud.language.v1.LanguageService - ''' - ) + fpath = 'sampledir/sample.yaml' + fs.create_file( + fpath, + contents=dedent( + ''' + --- + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + ''' ) - - opts = options.Options.build( - ("samples=sample.yaml," - "samples=sampledir/," - "samples=other_sampledir")) - - expected_configs = ( - 'sample.yaml', - 'sampledir/sample.yaml', - 'other_sampledir/sample.yaml', ) - assert opts.sample_configs == expected_configs + with pytest.raises(types.InvalidConfig): + options.Options.build("samples=sampledir/,") @mock.patch( diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 1ac2ba18469a..c0784efa5034 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -158,60 +158,6 @@ def test_generate_sample_config_fpaths(fs): assert actual_paths == [expected_path] -def test_generate_sample_config_fpaths_directories(fs): - good_contents = dedent( - ''' - --- - type: com.google.api.codegen.SampleConfigProto - schema_version: 1.2.0 - samples: - - service: google.cloud.language.v1.LanguageService - ''' - ) - # We need some invalid configs in the directory as well to verify that - # they don't cause spurious failures. - bad_contents = 'bad contents' - directory = 'sampleconfig' - for p in [ - "config_1.yaml", - "config_2.yaml", - "config_notes.txt", - "subdir/config_3.yaml", - "subdir/config_4.yaml", - "subdir/nested/config_5.yaml", - ]: - fs.create_file(path.join(directory, p), contents=good_contents) - - for p in [ - "bad_config_1.yaml", - "subdir/bad_config_2.yaml", - "subdir/nested/bad_config_3.yaml", - ]: - fs.create_file(path.join(directory, p), contents=bad_contents) - - expected_paths = [ - "sampleconfig/config_1.yaml", - "sampleconfig/config_2.yaml", - "sampleconfig/subdir/config_3.yaml", - "sampleconfig/subdir/config_4.yaml", - "sampleconfig/subdir/nested/config_5.yaml", - ] - - actual_paths = sorted(gapic_utils.generate_all_sample_fpaths(directory)) - - assert actual_paths == expected_paths - - -def test_generate_sample_config_fpaths_directories_no_configs(fs): - directory = 'sampleconfig' - for f in ['a.yaml', 'b.yaml']: - fs.create_file(path.join(directory, f)) - - actual_paths = list(gapic_utils.generate_all_sample_fpaths(directory)) - - assert not actual_paths - - def test_generate_sample_config_fpaths_not_yaml(fs): expected_path = 'cfgs/sample_config.not_yaml' fs.create_file(expected_path) @@ -296,36 +242,6 @@ def test_generate_sample_config_partial_config(fs): assert actual_paths == expected_paths -def test_generate_sample_config_partial_config_directory(fs): - directory = 'samples' - fpath = path.join(directory, 'sample.yaml') - fs.create_file( - fpath, - # Note the typo in the first sample: SampleConfigPronto - contents=dedent( - ''' - --- - # Note: this one is NOT a valid config - type: com.google.api.codegen.SampleConfigPronto - schema_version: 1.2.0 - samples: - - service: google.cloud.language.v1.LanguageService - --- - # Note: this one IS a valid config - type: com.google.api.codegen.SampleConfigProto - schema_version: 1.2.0 - samples: - - service: google.cloud.language.v1.LanguageService - ''' - ) - ) - expected_paths = [fpath] - - actual_paths = list(gapic_utils.generate_all_sample_fpaths(directory)) - - assert actual_paths == expected_paths - - def test_generate_sample_config_fpaths_no_such_file(fs): with pytest.raises(types.InvalidConfig): list(gapic_utils.generate_all_sample_fpaths('cfgs/sample_config.yaml')) From cc149b44980274e9e8d37e95651eb11c8b36b15f Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 5 Sep 2019 11:03:56 -0700 Subject: [PATCH 0156/1339] Fix and add regression test for #199 (#202) Argument strings were getting double quoted. --- .../templates/examples/feature_fragments.j2 | 8 +-- .../tests/unit/samplegen/test_integration.py | 55 ++++++++++++++----- .../tests/unit/samplegen/test_template.py | 32 +++++------ 3 files changed, 62 insertions(+), 33 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 85e5c8d9e494..2ca76d40e2ef 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -142,7 +142,7 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {# Python is also responsible for verifying that each input parameter is unique,#} {# no parameter is a reserved keyword #} {% if attr.input_parameter %} -# {{ attr.input_parameter }} = "{{ attr.value }}" +# {{ attr.input_parameter }} = {{ attr.value }} {% if attr.value_is_file %} with open({{ attr.input_parameter }}, "rb") as f: {{ base_name }}["{{ attr.field }}"] = f.read() @@ -241,14 +241,14 @@ def main(): {% for attr in request.body if attr.input_parameter %} parser.add_argument("--{{ attr.input_parameter }}", type=str, - default="{{ attr.value }}") + default={{ attr.value }}) {% do arg_list.append("args." + attr.input_parameter) -%} {% endfor -%} {% endfor %} {% for request in request_block if request.single and request.single.input_parameter -%} - parser.add_argument("-- {{ request.single.input_parameter }}", + parser.add_argument("--{{ request.single.input_parameter }}", type=str, - default="{{ request.single.value }}") + default={{ request.single.value }}) {% endfor %} args = parser.parse_args() diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index c0784efa5034..538cd46c505f 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -21,8 +21,8 @@ from gapic.samplegen import samplegen from gapic.samplegen_utils import (types, utils as gapic_utils) -from common_types import (DummyMethod, DummyService, - DummyApiSchema, DummyNaming, message_factory, enum_factory) +from common_types import (DummyField, DummyMessage, DummyMethod, DummyService, + DummyApiSchema, DummyNaming, enum_factory, message_factory) from collections import namedtuple from textwrap import dedent @@ -47,28 +47,51 @@ def test_generate_sample_basic(): # that catch errors in behavior that is emergent from combining smaller features # or in features that are sufficiently small and trivial that it doesn't make sense # to have standalone tests. + input_type = DummyMessage( + type="REQUEST TYPE", + fields={ + "classify_request": DummyField( + message=DummyMessage( + type="CLASSIFY TYPE", + fields={ + "video": DummyField( + message=DummyMessage(type="VIDEO TYPE"), + ), + "location_annotation": DummyField( + message=DummyMessage(type="LOCATION TYPE"), + ) + }, + ) + ) + } + ) schema = DummyApiSchema( { "animalia.mollusca.v1.Mollusc": DummyService( { "Classify": DummyMethod( - input=message_factory( - "mollusc.classify_request.video"), + input=input_type, output=message_factory("$resp.taxonomy") ) } ) }, - DummyNaming("molluscs-v1-mollusc")) + DummyNaming("molluscs-v1-mollusc") + ) sample = {"service": "animalia.mollusca.v1.Mollusc", "rpc": "Classify", "id": "mollusc_classify_sync", "description": "Determine the full taxonomy of input mollusc", - "request": [{"field": "classify_request.video", - "value": "path/to/mollusc/video.mkv", - "input_parameter": "video", - "value_is_file": True}], + "request": [ + {"field": "classify_request.video", + "value": "'path/to/mollusc/video.mkv'", + "input_parameter": "video", + "value_is_file": True}, + {"field": "classify_request.location_annotation", + "value": "'New Zealand'", + "input_parameter": "location"} + ], "response": [{"print": ["Mollusc is a %s", "$resp.taxonomy"]}]} sample_str = samplegen.generate_sample( @@ -86,16 +109,19 @@ def test_generate_sample_basic(): # [START %s] -def sample_classify(video): +def sample_classify(video, location): """Determine the full taxonomy of input mollusc""" client = mollusca_v1.MolluscClient() classify_request = {} - # video = "path/to/mollusc/video.mkv" + # video = 'path/to/mollusc/video.mkv' with open(video, "rb") as f: classify_request["video"] = f.read() + # location = 'New Zealand' + classify_request["location_annotation"] = location + response = client.classify(classify_request) @@ -109,10 +135,13 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument("--video", type=str, - default="path/to/mollusc/video.mkv") + default='path/to/mollusc/video.mkv') + parser.add_argument("--location", + type=str, + default='New Zealand') args = parser.parse_args() - sample_classify(args.video) + sample_classify(args.video, args.location) if __name__ == "__main__": diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 827b2d9630ec..3b38c8f0d113 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -78,11 +78,11 @@ def test_render_attr_input_parameter(): {{ frags.render_request_attr("squid", request) }} ''', ''' - # species = "Humboldt" + # species = 'Humboldt' squid["species"] = species ''', request=samplegen.AttributeRequestSetup(field="species", - value="Humboldt", + value="'Humboldt'", input_parameter="species")) @@ -93,12 +93,12 @@ def test_render_attr_file(): {{ frags.render_request_attr("classify_mollusc_request", request) }} ''', ''' - # mollusc_video_path = "path/to/mollusc/video.mkv" + # mollusc_video_path = 'path/to/mollusc/video.mkv' with open(mollusc_video_path, "rb") as f: classify_mollusc_request["mollusc_video"] = f.read() ''', request=samplegen.AttributeRequestSetup(field="mollusc_video", - value="path/to/mollusc/video.mkv", + value="'path/to/mollusc/video.mkv'", input_parameter="mollusc_video_path", value_is_file=True) ) @@ -112,22 +112,22 @@ def test_render_request_basic(): ''', ''' cephalopod = {} - # cephalopod_mass = "10 kg" + # cephalopod_mass = '10 kg' cephalopod["mantle_mass"] = cephalopod_mass - # photo_path = "path/to/cephalopod/photo.jpg" + # photo_path = 'path/to/cephalopod/photo.jpg' with open(photo_path, "rb") as f: cephalopod["photo"] = f.read() cephalopod["order"] = Molluscs.Cephalopoda.Coleoidea gastropod = {} - # gastropod_mass = "1 kg" + # gastropod_mass = '1 kg' gastropod["mantle_mass"] = gastropod_mass gastropod["order"] = Molluscs.Gastropoda.Pulmonata - # movie_path = "path/to/gastropod/movie.mkv" + # movie_path = 'path/to/gastropod/movie.mkv' with open(movie_path, "rb") as f: gastropod["movie"] = f.read() @@ -136,12 +136,12 @@ def test_render_request_basic(): body=[ samplegen.AttributeRequestSetup( field="mantle_mass", - value="10 kg", + value="'10 kg'", input_parameter="cephalopod_mass" ), samplegen.AttributeRequestSetup( field="photo", - value="path/to/cephalopod/photo.jpg", + value="'path/to/cephalopod/photo.jpg'", input_parameter="photo_path", value_is_file=True ), @@ -154,7 +154,7 @@ def test_render_request_basic(): body=[ samplegen.AttributeRequestSetup( field="mantle_mass", - value="1 kg", + value="'1 kg'", input_parameter="gastropod_mass" ), samplegen.AttributeRequestSetup( @@ -163,7 +163,7 @@ def test_render_request_basic(): ), samplegen.AttributeRequestSetup( field="movie", - value="path/to/gastropod/movie.mkv", + value="'path/to/gastropod/movie.mkv'", input_parameter="movie_path", value_is_file=True ) @@ -761,10 +761,10 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument("--order", type=str, - default="coleoidea") + default='coleoidea') parser.add_argument("--mass", type=str, - default="60kg") + default='60kg') args = parser.parse_args() sample_list_molluscs(args.order, args.mass) @@ -778,12 +778,12 @@ def main(): body=[ samplegen.AttributeRequestSetup( field="list_molluscs.order", - value="coleoidea", + value="'coleoidea'", input_parameter="order" ), samplegen.AttributeRequestSetup( field="list_molluscs.mass", - value="60kg", + value="'60kg'", input_parameter="mass") ], single=None), From ec01321068520780420acd539708b10dcc41f0a1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 9 Sep 2019 10:58:54 -0700 Subject: [PATCH 0157/1339] [fix] Generate Map fields properly. (#203) This PR fixes a bug whereby maps got generated as their underlying entry messages and then a repeated field. --- .../gapic-generator/gapic/schema/wrappers.py | 10 ++++++++++ .../$name_$version/$sub/types/_message.py.j2 | 6 +++++- .../tests/unit/schema/wrappers/test_message.py | 17 +++++++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 5e8b0eabf84d..848193d21acb 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -69,6 +69,11 @@ def is_primitive(self) -> bool: """Return True if the field is a primitive, False otherwise.""" return isinstance(self.type, PrimitiveType) + @property + def map(self) -> bool: + """Return True if this field is a map, False otherwise.""" + return bool(self.repeated and self.message and self.message.map) + @utils.cached_property def mock_value(self) -> str: """Return a repr of a valid, usually truthy mock value.""" @@ -213,6 +218,11 @@ def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: answer.append(field.type) return tuple(answer) + @property + def map(self) -> bool: + """Return True if the given message is a map, False otherwise.""" + return self.message_pb.options.map_entry + @property def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 index f00123d6d9b4..df068621b36e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 @@ -17,14 +17,18 @@ class {{ message.name }}({{ p }}.Message): {# Iterate over nested messages. -#} {% for submessage in message.nested_messages.values() -%} + {% if not submessage.map -%} {% with message = submessage %}{% filter indent %} {%- include '$namespace/$name_$version/$sub/types/_message.py.j2' %} {% endfilter %}{% endwith %} + {% endif %} {% endfor -%} {# Iterate over fields. -#} {% for field in message.fields.values() -%} - {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field({{ p }}.{{ field.proto_type }}, number={{ field.number }} + {{ field.name }} = {{ p }}.{% if field.map %}Map{% elif field.repeated %}Repeated{% endif %}Field( + {%- if field.map %}{{ p }}.{{ field.message.fields['key'].proto_type }}, {% endif %} + {{- p }}.{{ field.proto_type }}, number={{ field.number }} {%- if field.enum or field.message %}, {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, {% endif %}) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 14c7d001bec0..f8d514b9ceab 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -124,12 +124,29 @@ def test_get_field_nonterminal_repeated_error(): assert outer.get_field('inner', 'one') == inner_fields[1] +def test_field_map(): + # Create an Entry message. + entry_msg = make_message( + name='FooEntry', + fields=( + make_field(name='key', type=9), + make_field(name='value', type=9), + ), + options=descriptor_pb2.MessageOptions(map_entry=True), + ) + entry_field = make_field('foos', message=entry_msg, repeated=True) + assert entry_msg.map + assert entry_field.map + + def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, + options: descriptor_pb2.MethodOptions = None, ) -> wrappers.MessageType: message_pb = descriptor_pb2.DescriptorProto( name=name, field=[i.field_pb for i in fields], + options=options, ) return wrappers.MessageType( message_pb=message_pb, From 5f6155a9311f044b3e8e53a7d54f374be6c409b8 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 9 Sep 2019 11:42:52 -0700 Subject: [PATCH 0158/1339] [feat] Export enums from the top-level API namespace. (#204) Fixes #185. --- .../$name_$version/$sub/__init__.py.j2 | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 index f5534412abce..61b21978ea50 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 @@ -14,17 +14,21 @@ from .services.{{ service.name|snake_case }} import {{ service.name }} {% endfor -%} {% endfilter -%} -{# Import messages from each proto. +{# Import messages and enums from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. -#} {% filter sort_lines -%} -{% for proto in api.protos.values()|sort(attribute='module_name') +{% for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.messages.values()|sort(attribute='name') -%} +{% for message in proto.messages.values() -%} from .types.{{ proto.module_name }} import {{ message.name }} -{% endfor %}{% endfor -%} +{% endfor -%} +{% for enum in proto.enums.values() -%} +from .types.{{ proto.module_name }} import {{ enum.name }} +{% endfor -%} +{% endfor -%} {% endfilter %} {# Define __all__. @@ -36,15 +40,19 @@ __all__ = ( {%- for subpackage in api.subpackages.keys() %} '{{ subpackage }}', {%- endfor %} - {%- for service in api.services.values()|sort(attribute='name') + {%- for service in api.services.values() if service.meta.address.subpackage == api.subpackage_view %} '{{ service.name }}', {%- endfor %} - {%- for proto in api.protos.values()|sort(attribute='module_name') + {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %} - {%- for message in proto.messages.values()|sort(attribute='name') %} + {%- for message in proto.messages.values() %} '{{ message.name }}', - {%- endfor %}{% endfor %} + {%- endfor %} + {%- for enum in proto.enums.values() %} + '{{ enum.name }}', + {%- endfor %} + {%- endfor %} {%- endfilter %} ) {% endblock %} From 676264701fb74a2326a767df720b4055b9fcf5e4 Mon Sep 17 00:00:00 2001 From: Kent Wang Date: Tue, 1 Oct 2019 06:24:52 +0800 Subject: [PATCH 0159/1339] Fix map field (#206) --- .../$name_$version/$sub/types/_message.py.j2 | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 index df068621b36e..c9e464e117e0 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 @@ -26,11 +26,20 @@ class {{ message.name }}({{ p }}.Message): {# Iterate over fields. -#} {% for field in message.fields.values() -%} - {{ field.name }} = {{ p }}.{% if field.map %}Map{% elif field.repeated %}Repeated{% endif %}Field( - {%- if field.map %}{{ p }}.{{ field.message.fields['key'].proto_type }}, {% endif %} + {% if field.map -%} + {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] -%} + {{ field.name }} = {{ p }}.MapField( + {{- p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, number={{ field.number }} + {%- if value_field.enum or value_field.message %}, + {{ value_field.proto_type.lower() }}={{ value_field.type.ident.rel(message.ident) }}, + {% endif %}) + {% endwith -%} + {% else -%} + {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( {{- p }}.{{ field.proto_type }}, number={{ field.number }} {%- if field.enum or field.message %}, {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, {% endif %}) + {% endif -%} {% endfor -%} {{ '\n\n' }} From 3703e7fa8b5808c6c0f4004eed88d4b5a292314b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 1 Oct 2019 15:34:06 -0700 Subject: [PATCH 0160/1339] Correctly find the sample template when run via protoc (#198) Add a sample_template parameter to sample generation Adjust tests to pass the parameter --- .../gapic/generator/generator.py | 16 +++-- .../gapic/samplegen/manifest.py | 4 +- .../gapic/samplegen/samplegen.py | 37 ++++++------ .../templates/examples/feature_fragments.j2 | 14 ++--- .../gapic/templates/examples/sample.py.j2 | 10 ++-- .../tests/unit/generator/test_generator.py | 55 ++++++++++++++---- .../tests/unit/samplegen/test_integration.py | 58 +++++++++++++------ 7 files changed, 130 insertions(+), 64 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 796021c06633..b58271e09091 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -97,14 +97,18 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: api_schema=api_schema, )) - output_files.update(self._generate_samples_and_manifest(api_schema)) + output_files.update(self._generate_samples_and_manifest( + api_schema, + self._env.get_template(sample_templates[0]), + )) # Return the CodeGeneratorResponse output. return CodeGeneratorResponse(file=[i for i in output_files.values()]) def _generate_samples_and_manifest( - self, - api_schema: api.API + self, + api_schema: api.API, + sample_template: jinja2.Template, ) -> Dict[str, CodeGeneratorResponse.File]: """Generate samples and samplegen manifest for the API. @@ -152,7 +156,11 @@ def _generate_samples_and_manifest( str(spec).encode('utf8')).hexdigest()[:8] spec["id"] += f"_{spec_hash}" - sample = samplegen.generate_sample(spec, self._env, api_schema) + sample = samplegen.generate_sample( + spec, + api_schema, + sample_template, + ) fpath = spec["id"] + ".py" fpath_to_spec_and_rendered[os.path.join(out_dir, fpath)] = (spec, diff --git a/packages/gapic-generator/gapic/samplegen/manifest.py b/packages/gapic-generator/gapic/samplegen/manifest.py index 871e82737226..fcd29609c1d6 100644 --- a/packages/gapic-generator/gapic/samplegen/manifest.py +++ b/packages/gapic-generator/gapic/samplegen/manifest.py @@ -84,7 +84,7 @@ def transform_path(fpath): yaml.Collection( name="samples", elements=[ - [ # type: ignore + [ # Mypy doesn't correctly intuit the type of the # "region_tag" conditional expression. yaml.Alias(environment.anchor_name or ""), @@ -92,7 +92,7 @@ def transform_path(fpath): yaml.KeyVal( "path", transform_path(fpath) ), - (yaml.KeyVal("region_tag", sample["region_tag"]) + (yaml.KeyVal("region_tag", sample["region_tag"]) # type: ignore if "region_tag" in sample else yaml.Null), ] diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 2a23e9758085..f07a3a484d9e 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -21,7 +21,7 @@ import time from gapic.samplegen_utils import types -from gapic.schema import (api, wrappers) +from gapic.schema import wrappers from collections import (defaultdict, namedtuple, ChainMap as chainmap) from typing import (ChainMap, Dict, List, Mapping, Optional, Tuple) @@ -139,8 +139,8 @@ def __init__(self, method: wrappers.Method): # and whether it's an enum or a message or a primitive type. # The method call response isn't a field, so construct an artificial # field that wraps the response. - { # type: ignore - "$resp": MockField(response_type, False) + { + "$resp": MockField(response_type, False) # type: ignore } ) @@ -481,8 +481,10 @@ def _validate_format(self, body: List[str]): num_prints = fmt_str.count("%s") if num_prints != len(body) - 1: raise types.MismatchedFormatSpecifier( - "Expected {} expresssions in format string but received {}".format( - num_prints, len(body) - 1 + "Expected {} expresssions in format string '{}' but found {}".format( + num_prints, + fmt_str, + len(body) - 1 ) ) @@ -502,7 +504,7 @@ def _validate_define(self, body: str): """ # Note: really checking for safety would be equivalent to # re-implementing the python interpreter. - m = re.match(r"^([a-zA-Z]\w*)=([^=]+)$", body) + m = re.match(r"^([a-zA-Z_]\w*) *= *([^=]+)$", body) if not m: raise types.BadAssignment(f"Bad assignment statement: {body}") @@ -654,27 +656,23 @@ def _validate_loop(self, loop): } -def generate_sample(sample, - env: jinja2.environment.Environment, - api_schema: api.API, - template_name: str = DEFAULT_TEMPLATE_NAME) -> str: +def generate_sample( + sample, + api_schema, + sample_template: jinja2.Template +) -> str: """Generate a standalone, runnable sample. Rendering and writing the rendered output is left for the caller. Args: sample (Any): A definition for a single sample generated from parsed yaml. - env (jinja2.environment.Environment): The jinja environment used to generate - the filled template for the sample. api_schema (api.API): The schema that defines the API to which the sample belongs. - template_name (str): An optional override for the name of the template - used to generate the sample. + sample_template (jinja2.Template): The template representing a generic sample. Returns: str: The rendered sample. """ - sample_template = env.get_template(template_name) - service_name = sample["service"] service = api_schema.services.get(service_name) if not service: @@ -701,7 +699,12 @@ def generate_sample(sample, return sample_template.render( file_header=FILE_HEADER, sample=sample, - imports=[], + imports=[ + "from google import auth", + "from google.auth import credentials", + ], calling_form=calling_form, calling_form_enum=types.CallingForm, + api=api_schema, + service=service, ) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 2ca76d40e2ef..047ef711553a 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -191,18 +191,18 @@ client.{{ sample.rpc|snake_case }}({{ render_request_params(sample.request) }}) {# it's just easier to set up client side streaming and other things from outside this macro. #} {% macro render_calling_form(method_invocation_text, calling_form, calling_form_enum, response_statements ) %} {% if calling_form == calling_form_enum.Request %} -response = {{ method_invocation_text }} +response = {{ method_invocation_text|trim }} {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form == calling_form_enum.RequestPagedAll %} -page_result = {{ method_invocation_text }} +page_result = {{ method_invocation_text|trim }} for response in page_result: {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form == calling_form_enum.RequestPaged %} -page_result = {{ method_invocation_text}} +page_result = {{ method_invocation_text|trim }} for page in page_result.pages(): for response in page: {% for statement in response_statements %} @@ -210,13 +210,13 @@ for page in page_result.pages(): {% endfor %} {% elif calling_form in [calling_form_enum.RequestStreamingServer, calling_form_enum.RequestStreamingBidi] %} -stream = {{ method_invocation_text }} +stream = {{ method_invocation_text|trim }} for response in stream: {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form == calling_form_enum.LongRunningRequestPromise %} -operation = {{ method_invocation_text }} +operation = {{ method_invocation_text|trim }} print("Waiting for operation to complete...") @@ -237,8 +237,8 @@ def main(): parser = argparse.ArgumentParser() {% with arg_list = [] %} - {% for request in request_block if request.body -%} - {% for attr in request.body if attr.input_parameter %} +{% for request in request_block if request.body -%} +{% for attr in request.body if attr.input_parameter %} parser.add_argument("--{{ attr.input_parameter }}", type=str, default={{ attr.value }}) diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index dff95941df5a..e0585289214b 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -20,7 +20,7 @@ {# callingFormEnum #} {# Note: this sample template is WILDLY INACCURATE AND INCOMPLETE #} {# It does not correctly enums, unions, top level attributes, or various other things #} -{% import "feature_fragments.j2" as frags %} +{% import "examples/feature_fragments.j2" as frags %} {{ frags.sample_header(file_header, sample, calling_form) }} # [START {{ sample.id }}] @@ -28,14 +28,16 @@ {% for import_statement in imports %} {{ import_statement }} {% endfor %} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.name }} {# also need calling form #} def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input_params(sample.request)|trim -}}): """{{ sample.description }}""" - client = {{ sample.service.split(".")[-3:-1]| - map("lower")| - join("_") }}.{{ sample.service.split(".")[-1] }}Client() + client = {{ service.name }}( + credentials=credentials.AnonymousCredentials(), + transport="grpc", + ) {{ frags.render_request_setup(sample.request)|indent }} {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index b771cd165831..5a8729ecf5e1 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -42,12 +42,15 @@ def test_custom_template_directory(): def test_get_response(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: - lt.return_value = ['foo/bar/baz.py.j2'] + lt.return_value = ['foo/bar/baz.py.j2', 'molluscs/squid/sample.py.j2'] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('I am a template result.') cgr = g.get_response(api_schema=make_api()) lt.assert_called_once() - gt.assert_called_once() + gt.assert_has_calls([ + mock.call('foo/bar/baz.py.j2'), + mock.call('molluscs/squid/sample.py.j2') + ]) assert len(cgr.file) == 1 assert cgr.file[0].name == 'foo/bar/baz.py' assert cgr.file[0].content == 'I am a template result.\n' @@ -56,24 +59,34 @@ def test_get_response(): def test_get_response_ignores_empty_files(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: - lt.return_value = ['foo/bar/baz.py.j2'] + lt.return_value = ['foo/bar/baz.py.j2', 'molluscs/squid/sample.py.j2'] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('# Meaningless comment') cgr = g.get_response(api_schema=make_api()) lt.assert_called_once() - gt.assert_called_once() + gt.assert_has_calls([ + mock.call('foo/bar/baz.py.j2'), + mock.call('molluscs/squid/sample.py.j2') + ]) assert len(cgr.file) == 0 def test_get_response_ignores_private_files(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: - lt.return_value = ['foo/bar/baz.py.j2', 'foo/bar/_base.py.j2'] + lt.return_value = [ + 'foo/bar/baz.py.j2', + 'foo/bar/_base.py.j2', + 'molluscs/squid/sample.py.j2', + ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('I am a template result.') cgr = g.get_response(api_schema=make_api()) lt.assert_called_once() - gt.assert_called_once() + gt.assert_has_calls([ + mock.call('foo/bar/baz.py.j2'), + mock.call('molluscs/squid/sample.py.j2') + ]) assert len(cgr.file) == 1 assert cgr.file[0].name == 'foo/bar/baz.py' assert cgr.file[0].content == 'I am a template result.\n' @@ -82,7 +95,10 @@ def test_get_response_ignores_private_files(): def test_get_response_fails_invalid_file_paths(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: - lt.return_value = ['foo/bar/$service/$proto/baz.py.j2'] + lt.return_value = [ + 'foo/bar/$service/$proto/baz.py.j2', + 'molluscs/squid/sample.py.j2', + ] with pytest.raises(ValueError) as ex: g.get_response(api_schema=make_api()) @@ -93,7 +109,10 @@ def test_get_response_fails_invalid_file_paths(): def test_get_response_enumerates_services(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: - lt.return_value = ['foo/$service/baz.py.j2'] + lt.return_value = [ + 'foo/$service/baz.py.j2', + 'molluscs/squid/sample.py.j2', + ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('Service: {{ service.name }}') cgr = g.get_response(api_schema=make_api(make_proto( @@ -112,7 +131,10 @@ def test_get_response_enumerates_services(): def test_get_response_enumerates_proto(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: - lt.return_value = ['foo/$proto.py.j2'] + lt.return_value = [ + 'foo/$proto.py.j2', + 'molluscs/squid/sample.py.j2', + ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('Proto: {{ proto.module_name }}') cgr = g.get_response(api_schema=make_api( @@ -146,6 +168,7 @@ def test_get_response_divides_subpackages(): lt.return_value = [ 'foo/$sub/types/$proto.py.j2', 'foo/$sub/services/$service.py.j2', + 'molluscs/squid/sample.py.j2', ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template(""" @@ -274,7 +297,11 @@ def test_parse_sample_paths(fs): @mock.patch( 'time.gmtime', ) -def test_samplegen_config_to_output_files(mock_gmtime, mock_generate_sample, fs): +def test_samplegen_config_to_output_files( + mock_gmtime, + mock_generate_sample, + fs, +): # These time values are nothing special, # they just need to be deterministic. returner = mock.MagicMock() @@ -303,13 +330,14 @@ def test_samplegen_config_to_output_files(mock_gmtime, mock_generate_sample, fs) ) ) - mock_generate_sample - g = generator.Generator( options.Options.build( 'samples=samples.yaml', ) ) + # Need to have the sample template visible to the generator. + g._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) + api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) actual_response = g.get_response(api_schema) expected_response = CodeGeneratorResponse( @@ -393,6 +421,9 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): ) ) g = generator.Generator(options.Options.build('samples=samples.yaml')) + # Need to have the sample template visible to the generator. + g._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) + api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) actual_response = g.get_response(api_schema) expected_response = CodeGeneratorResponse( diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 538cd46c505f..8efc46810863 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -20,6 +20,7 @@ from gapic.samplegen import samplegen from gapic.samplegen_utils import (types, utils as gapic_utils) +from gapic.schema import (naming, wrappers) from common_types import (DummyField, DummyMessage, DummyMethod, DummyService, DummyApiSchema, DummyNaming, enum_factory, message_factory) @@ -32,7 +33,8 @@ loader=jinja2.FileSystemLoader( searchpath=path.realpath(path.join(path.dirname(__file__), "..", "..", "..", - "gapic", "templates", "examples"))), + "gapic", "templates") + )), undefined=jinja2.StrictUndefined, extensions=["jinja2.ext.do"], trim_blocks=True, lstrip_blocks=True @@ -65,18 +67,22 @@ def test_generate_sample_basic(): ) } ) - schema = DummyApiSchema( - { - "animalia.mollusca.v1.Mollusc": DummyService( - { - "Classify": DummyMethod( - input=input_type, - output=message_factory("$resp.taxonomy") - ) - } + + api_naming = naming.Naming( + name="MolluscClient", namespace=("molluscs", "v1")) + service = wrappers.Service( + service_pb=namedtuple('service_pb', ['name'])('MolluscClient'), + methods={ + "Classify": DummyMethod( + input=input_type, + output=message_factory("$resp.taxonomy") ) - }, - DummyNaming("molluscs-v1-mollusc") + } + ) + + schema = DummyApiSchema( + services={"animalia.mollusca.v1.Mollusc": service}, + naming=api_naming, ) sample = {"service": "animalia.mollusca.v1.Mollusc", @@ -95,7 +101,10 @@ def test_generate_sample_basic(): "response": [{"print": ["Mollusc is a %s", "$resp.taxonomy"]}]} sample_str = samplegen.generate_sample( - sample, env, schema) + sample, + schema, + env.get_template('examples/sample.py.j2') + ) sample_id = ("mollusc_classify_sync") expected_str = '''# TODO: add a copyright @@ -104,15 +113,21 @@ def test_generate_sample_basic(): # DO NOT EDIT! This is a generated sample ("request", "%s") # # To install the latest published package dependency, execute the following: -# pip3 install molluscs-v1-mollusc +# pip3 install molluscs-v1-molluscclient # [START %s] +from google import auth +from google.auth import credentials +from molluscs.v1.molluscclient.services.mollusc_client import MolluscClient def sample_classify(video, location): """Determine the full taxonomy of input mollusc""" - client = mollusca_v1.MolluscClient() + client = MolluscClient( + credentials=credentials.AnonymousCredentials(), + transport="grpc", + ) classify_request = {} # video = 'path/to/mollusc/video.mkv' @@ -124,7 +139,6 @@ def sample_classify(video, location): response = client.classify(classify_request) - print("Mollusc is a {}".format(response.taxonomy)) # [END %s] @@ -156,7 +170,11 @@ def test_generate_sample_service_not_found(): sample = {"service": "Mollusc"} with pytest.raises(types.UnknownService): - samplegen.generate_sample(sample, env, schema) + samplegen.generate_sample( + sample, + schema, + env.get_template('examples/sample.py.j2'), + ) def test_generate_sample_rpc_not_found(): @@ -165,7 +183,11 @@ def test_generate_sample_rpc_not_found(): sample = {"service": "Mollusc", "rpc": "Classify"} with pytest.raises(types.RpcMethodNotFound): - list(samplegen.generate_sample(sample, env, schema)) + list(samplegen.generate_sample( + sample, + schema, + env.get_template('examples/sample.py.j2')), + ) def test_generate_sample_config_fpaths(fs): From be39a9203c51b8a4710c19db77bae80c35427fda Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 2 Oct 2019 13:22:32 -0700 Subject: [PATCH 0161/1339] Duplicate samples generate an error (#207) Note: distinct samples with duplicate IDs (provided or derived) are disambigutated. Duplicate samples, i.e. those that are the result of a copy/paste, are the only ones that generate an error. Fix for #201 --- .../gapic/generator/generator.py | 29 ++++++++++------ .../gapic/samplegen_utils/types.py | 4 +++ .../gapic/samplegen_utils/utils.py | 2 +- .../tests/unit/generator/test_generator.py | 34 +++++++++++++++++-- .../tests/unit/samplegen/test_integration.py | 1 + 5 files changed, 56 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index b58271e09091..bc3dd09e428d 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -21,7 +21,7 @@ from collections import (OrderedDict, defaultdict) from gapic.samplegen_utils.utils import ( coerce_response_name, is_valid_sample_cfg) -from gapic.samplegen_utils.types import InvalidConfig +from gapic.samplegen_utils.types import (InvalidConfig, DuplicateSample) from gapic.samplegen import (manifest, samplegen) from gapic.generator import options from gapic.generator import formatter @@ -118,7 +118,12 @@ def _generate_samples_and_manifest( Returns: Dict[str, CodeGeneratorResponse.File]: A dict mapping filepath to rendered file. """ - id_to_samples: DefaultDict[str, List[Any]] = defaultdict(list) + # The two-layer data structure lets us do two things: + # * detect duplicate samples, which is an error + # * detect distinct samples with the same ID, which are disambiguated + id_to_hash_to_spec: DefaultDict[str, Dict[str, Any]] = defaultdict( + dict) + for config_fpath in self._sample_configs: with open(config_fpath) as f: configs = yaml.safe_load_all(f.read()) @@ -137,23 +142,27 @@ def _generate_samples_and_manifest( # # Ideally the sample author should pick a descriptive, unique ID, # but this may be impractical and can be error-prone. + spec_hash = sha256(str(spec).encode('utf8')).hexdigest()[:8] sample_id = (spec.get("id") or spec.get("region_tag") - or sha256(str(spec).encode('utf8')).hexdigest()[:8]) - + or spec_hash) spec["id"] = sample_id - id_to_samples[sample_id].append(spec) + + hash_to_spec = id_to_hash_to_spec[sample_id] + if spec_hash in hash_to_spec: + raise DuplicateSample( + f"Duplicate samplegen spec found: {spec}") + + hash_to_spec[spec_hash] = spec out_dir = "samples" fpath_to_spec_and_rendered = {} - for samples in id_to_samples.values(): - for spec in samples: - id_is_unique = len(samples) == 1 + for hash_to_spec in id_to_hash_to_spec.values(): + for spec_hash, spec in hash_to_spec.items(): + id_is_unique = len(hash_to_spec) == 1 # The ID is used to generate the file name and by sample tester # to link filenames to invoked samples. It must be globally unique. if not id_is_unique: - spec_hash = sha256( - str(spec).encode('utf8')).hexdigest()[:8] spec["id"] += f"_{spec_hash}" sample = samplegen.generate_sample( diff --git a/packages/gapic-generator/gapic/samplegen_utils/types.py b/packages/gapic-generator/gapic/samplegen_utils/types.py index 5926e32d52f8..80d85516af52 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/types.py +++ b/packages/gapic-generator/gapic/samplegen_utils/types.py @@ -80,6 +80,10 @@ class InvalidSampleFpath(SampleError): pass +class DuplicateSample(SampleError): + pass + + class CallingForm(Enum): Request = auto() RequestPaged = auto() diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py index c3ea9ed44f82..6ce31c4001eb 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -67,7 +67,7 @@ def parse_version(version_str: str) -> Tuple[int, ...]: # Yaml may return a dict, a list, or a str isinstance(doc, dict) and doc.get("type") == VALID_CONFIG_TYPE - and parse_version(doc.get(version_token, "")) >= MIN_SCHEMA_VERSION + and parse_version(doc.get(version_token, "")) >= min_version and doc.get("samples") ) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 5a8729ecf5e1..660bdd611029 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -433,7 +433,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): content="\n", ), CodeGeneratorResponse.File( - name="samples/squid_sample_c8014108.py", + name="samples/squid_sample_55051b38.py", content="\n", ), CodeGeneratorResponse.File( @@ -457,8 +457,8 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): path: '{base_path}/squid_sample_91a465c6.py' region_tag: humboldt_tag - <<: *python - sample: squid_sample_c8014108 - path: '{base_path}/squid_sample_c8014108.py' + sample: squid_sample_55051b38 + path: '{base_path}/squid_sample_55051b38.py' region_tag: squid_sample - <<: *python sample: 157884ee @@ -471,6 +471,34 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): assert actual_response == expected_response +def test_generator_duplicate_samples(fs): + config_fpath = "samples.yaml" + fs.create_file( + config_fpath, + contents=dedent( + ''' + # Note: the samples are duplicates. + type: com.google.api.codegen.SampleConfigProto + schema_version: 1.2.0 + samples: + - id: squid_sample + region_tag: humboldt_tag + rpc: get_squid + - id: squid_sample + region_tag: humboldt_tag + rpc: get_squid + ''' + ) + ) + + generator = make_generator('samples=samples.yaml') + generator._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) + api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) + + with pytest.raises(types.DuplicateSample): + generator.get_response(api_schema=api_schema) + + def make_generator(opts_str: str = '') -> generator.Generator: return generator.Generator(options.Options.build(opts_str)) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 8efc46810863..6770b303cc86 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -273,6 +273,7 @@ def test_generate_sample_config_partial_config(fs): contents=dedent( ''' --- + # Note: not a valid config because of the type. type: com.google.api.codegen.SampleConfigPronto schema_version: 1.2.0 samples: From 57b706b12ab2a4c047ac1a35460de118eab8a51b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 10 Oct 2019 14:12:40 -0700 Subject: [PATCH 0162/1339] Add tests and impl for request messages with primitive fields (#214) Fields that are strings, bools, ints, or floats are permitted. Mismatch between the type of the assigned value and type of the field is NOT handled by samplegen yet. Includes a fix for #211 --- .../gapic/samplegen/samplegen.py | 12 +- .../gapic/samplegen_utils/types.py | 4 + .../templates/examples/feature_fragments.j2 | 6 +- .../tests/unit/samplegen/common_types.py | 8 +- .../tests/unit/samplegen/test_samplegen.py | 103 +++++++++++++++++- 5 files changed, 119 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index f07a3a484d9e..2dde7e0947cc 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -280,14 +280,18 @@ def validate_and_transform_request(self, # and requires less munging of the assigned value duplicate["value"] = f"'{val}'" break + elif attr.is_primitive: + # Only valid if this is the last attribute in the chain. + break else: - raise TypeError + raise TypeError( + f"Could not handle attribute '{attr_name}' of type: {attr.type}") if i != len(attr_chain) - 1: - # We broke out of the loop after processing an enum. + # We broke out of the loop after processing an enum or a primitive. extra_attrs = ".".join(attr_chain[i:]) - raise types.InvalidEnumVariant( - f"Attempted to reference attributes of enum value: '{extra_attrs}'") + raise types.NonTerminalPrimitiveOrEnum( + f"Attempted to reference attributes of enum value or primitive type: '{extra_attrs}'") if len(attr_chain) > 1: duplicate["field"] = ".".join(attr_chain[1:]) diff --git a/packages/gapic-generator/gapic/samplegen_utils/types.py b/packages/gapic-generator/gapic/samplegen_utils/types.py index 80d85516af52..3da7eb611952 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/types.py +++ b/packages/gapic-generator/gapic/samplegen_utils/types.py @@ -76,6 +76,10 @@ class InvalidEnumVariant(SampleError): pass +class NonTerminalPrimitiveOrEnum(SampleError): + pass + + class InvalidSampleFpath(SampleError): pass diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 047ef711553a..6e4b83b41270 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -236,7 +236,7 @@ def main(): import argparse parser = argparse.ArgumentParser() -{% with arg_list = [] %} +{% with arg_list = [] -%} {% for request in request_block if request.body -%} {% for attr in request.body if attr.input_parameter %} parser.add_argument("--{{ attr.input_parameter }}", @@ -244,8 +244,8 @@ def main(): default={{ attr.value }}) {% do arg_list.append("args." + attr.input_parameter) -%} {% endfor -%} -{% endfor %} -{% for request in request_block if request.single and request.single.input_parameter -%} +{% endfor -%} +{% for request in request_block if request.single and request.single.input_parameter %} parser.add_argument("--{{ request.single.input_parameter }}", type=str, default={{ request.single.value }}) diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index e4ae881a4198..a6c1f0571010 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -41,7 +41,13 @@ DummyMessage.__new__.__defaults__ = (False,) * len(DummyMessage._fields) DummyField = namedtuple("DummyField", - ["message", "enum", "repeated", "field_pb", "meta"]) + ["message", + "enum", + "repeated", + "field_pb", + "meta", + "is_primitive", + "type"]) DummyField.__new__.__defaults__ = (False,) * len(DummyField._fields) DummyService = namedtuple("DummyService", ["methods"]) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index cb617e7116e1..cb2b86d6c060 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -15,14 +15,14 @@ import yaml import pytest -from typing import (TypeVar) -from collections import namedtuple +from typing import (TypeVar, Sequence) +from collections import (OrderedDict, namedtuple) from google.protobuf import descriptor_pb2 import gapic.samplegen.samplegen as samplegen import gapic.samplegen_utils.types as types import gapic.samplegen_utils.yaml as gapic_yaml -from gapic.schema import (api, naming) +from gapic.schema import (api, metadata, naming) import gapic.schema.wrappers as wrappers from common_types import (DummyField, DummyMessage, @@ -1555,12 +1555,103 @@ def test_validate_request_enum_invalid_value(): def test_validate_request_enum_not_last_attr(): - enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) - request_type = message_factory("mollusc.subclass", enum=enum) + # enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) + # field = make_field(name="subclass", enum=enum) + request_type = make_message( + name="mollusc", + fields=[ + make_field( + name="subclass", + enum=enum_factory( + "subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"] + ) + ) + ] + ) + + # request_type = message_factory("mollusc.subclass", enum=enum) v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), input=request_type)) - with pytest.raises(types.InvalidEnumVariant): + with pytest.raises(types.NonTerminalPrimitiveOrEnum): v.validate_and_transform_request( types.CallingForm.Request, [{"field": "subclass.order", "value": "COLEOIDEA"}] ) + + +def test_validate_request_primitive_field(): + field = make_field(name="species", type="TYPE_STRING") + request_type = make_message(name="request", fields=[field]) + + request = [{"field": "species", "value": "Architeuthis dux"}] + v = samplegen.Validator( + DummyMethod( + output=message_factory("mollusc_result"), + input=request_type + ) + ) + + actual = v.validate_and_transform_request(types.CallingForm.Request, + request) + expected = [ + samplegen.TransformedRequest( + base="species", + body=None, + single=samplegen.AttributeRequestSetup( + value="Architeuthis dux" + ) + ) + ] + + assert actual == expected + + +def test_validate_request_non_terminal_primitive_field(): + field = make_field(name="species", type="TYPE_STRING") + request_type = make_message(name="request", fields=[field]) + + request = [{"field": "species.nomenclature", "value": "Architeuthis dux"}] + v = samplegen.Validator( + DummyMethod( + output=message_factory("mollusc_result"), + input=request_type + ) + ) + + with pytest.raises(types.NonTerminalPrimitiveOrEnum): + v.validate_and_transform_request(types.CallingForm.Request, + request) + + +def make_message(name: str, package: str = 'animalia.mollusca.v1', module: str = 'cephalopoda', + fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, + options: descriptor_pb2.MethodOptions = None, + ) -> wrappers.MessageType: + message_pb = descriptor_pb2.DescriptorProto( + name=name, + field=[i.field_pb for i in fields], + options=options, + ) + return wrappers.MessageType( + message_pb=message_pb, + fields=OrderedDict((i.name, i) for i in fields), + nested_messages={}, + nested_enums={}, + meta=meta or metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(package.split('.')), + module=module, + )), + ) + + +# Borrowed from test_field.py +def make_field(*, message=None, enum=None, **kwargs) -> wrappers.Field: + T = descriptor_pb2.FieldDescriptorProto.Type + kwargs.setdefault('name', 'my_field') + kwargs.setdefault('number', 1) + kwargs.setdefault('type', T.Value('TYPE_BOOL')) + if isinstance(kwargs['type'], str): + kwargs['type'] = T.Value(kwargs['type']) + field_pb = descriptor_pb2.FieldDescriptorProto(**kwargs) + return wrappers.Field(field_pb=field_pb, message=message, enum=enum) From 0e1149bffd2d3af9fea9c181465d0d68a92b27f0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 16 Oct 2019 14:51:27 -0700 Subject: [PATCH 0163/1339] [fix] Be tolerant of whitespace in `google.api.method_signature`. (#219) --- packages/gapic-generator/gapic/schema/wrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 848193d21acb..51dd4870d728 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -512,7 +512,7 @@ def flattened_fields(self) -> Mapping[str, Field]: for sig in signatures: # Get all of the individual fields. fields = collections.OrderedDict([ - (f, self.input.get_field(*f.split('.'))) + (f.strip(), self.input.get_field(*f.strip().split('.'))) for f in sig.split(',') ]) From 61edbd623234158f335c8f086e394d57453d0d3f Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 17 Oct 2019 11:03:44 -0700 Subject: [PATCH 0164/1339] Fix for samplegen bugs discovered from the test harness (#220) Implement partial fix for #216 quotes not handled correctly FIx for #216 : lower case (snake) manifest file name --- .../gapic/samplegen/manifest.py | 3 ++- .../gapic/samplegen/samplegen.py | 16 +++++++++--- .../templates/examples/feature_fragments.j2 | 16 ++++++------ .../tests/unit/generator/test_generator.py | 4 +-- .../tests/unit/samplegen/test_integration.py | 16 ++++++------ .../tests/unit/samplegen/test_manifest.py | 2 +- .../tests/unit/samplegen/test_samplegen.py | 26 ++++++++++++------- 7 files changed, 49 insertions(+), 34 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/manifest.py b/packages/gapic-generator/gapic/samplegen/manifest.py index fcd29609c1d6..b56b0159f227 100644 --- a/packages/gapic-generator/gapic/samplegen/manifest.py +++ b/packages/gapic-generator/gapic/samplegen/manifest.py @@ -17,6 +17,7 @@ from typing import Tuple from gapic.samplegen_utils import (types, yaml) +from gapic.utils import case BASE_PATH_KEY = "base_path" DEFAULT_SAMPLE_DIR = "samples" @@ -111,7 +112,7 @@ def transform_path(fpath): ) manifest_fname = manifest_fname_template.format( - api=api_schema.naming.name, + api=case.to_snake_case(api_schema.naming.name), version=api_schema.naming.version, language=environment.name, year=dt.tm_year, diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 2dde7e0947cc..7f5aecb4e8d3 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -15,6 +15,7 @@ import dataclasses import itertools import jinja2 +import json import keyword import os import re @@ -275,10 +276,6 @@ def validate_and_transform_request(self, if not witness: raise types.InvalidEnumVariant( "Invalid variant for enum {}: '{}'".format(attr, val)) - # Python code can set protobuf enums from strings. - # This is preferable to adding the necessary import statement - # and requires less munging of the assigned value - duplicate["value"] = f"'{val}'" break elif attr.is_primitive: # Only valid if this is the last attribute in the chain. @@ -305,6 +302,17 @@ def validate_and_transform_request(self, attr_chain[0])) del duplicate["field"] + if isinstance(duplicate["value"], str): + # Passing value through json is a safe and simple way of + # making sure strings are properly wrapped and quotes escaped. + # This statement both wraps enums in quotes and escapes quotes + # in string values passed as parameters. + # + # Python code can set protobuf enums from strings. + # This is preferable to adding the necessary import statement + # and requires less munging of the assigned value + duplicate["value"] = json.dumps(duplicate["value"]) + # Mypy isn't smart enough to handle dictionary unpacking, # so disable it for the AttributeRequestSetup ctor call. base_param_to_attrs[attr_chain[0]].append( diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 6e4b83b41270..d39f3f1ed08f 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -41,9 +41,9 @@ There is a little, but not enough for it to be important because {% macro print_string_formatting(string_list) %} {% if string_list|length == 1 %} -"{{ string_list[0]|replace("%s", "{}") }}" +"{{ string_list[0]|replace("%s", "{}")|replace('\"', '\\\"') }}" {% else %} -"{{ string_list[0]|replace("%s", "{}") }}".format({{ string_list[1:]|map("coerce_response_name")|join(", ") }}) +"{{ string_list[0]|replace("%s", "{}")|replace('\"', '\\\"') }}".format({{ string_list[1:]|map("coerce_response_name")|join(", ") }}) {% endif %} {% endmacro %} @@ -177,14 +177,14 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endmacro %} {% macro render_method_call(sample, calling_form, calling_form_enum) %} -{# Note: this doesn't deal with enums or unions #} + {# Note: this doesn't deal with enums or unions #} {% if calling_form in [calling_form_enum.RequestStreamingBidi, - calling_form_enum.RequestStreamingClient] %} -client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request) }}]) -{% else %} + calling_form_enum.RequestStreamingClient] -%} +client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request)|trim -}}]) +{% else -%} {# TODO: set up client streaming once some questions are answered #} -client.{{ sample.rpc|snake_case }}({{ render_request_params(sample.request) }}) -{% endif %} +client.{{ sample.rpc|snake_case }}({{ render_request_params(sample.request)|trim -}}) +{% endif -%} {% endmacro %} {# Setting up the method invocation is the responsibility of the caller: #} diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 660bdd611029..72093c1c95d0 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -351,7 +351,7 @@ def test_samplegen_config_to_output_files( content="\n", ), CodeGeneratorResponse.File( - name="samples/Mollusc.v6.python.21120601.131313.manifest.yaml", + name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", content=dedent("""\ --- type: manifest/samples @@ -441,7 +441,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): content="\n", ), CodeGeneratorResponse.File( - name="samples/Mollusc.v6.python.21120601.131313.manifest.yaml", + name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", content=dedent("""\ --- type: manifest/samples diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 6770b303cc86..5ae53db0b87e 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -91,14 +91,14 @@ def test_generate_sample_basic(): "description": "Determine the full taxonomy of input mollusc", "request": [ {"field": "classify_request.video", - "value": "'path/to/mollusc/video.mkv'", + "value": "path/to/mollusc/video.mkv", "input_parameter": "video", "value_is_file": True}, {"field": "classify_request.location_annotation", - "value": "'New Zealand'", + "value": "New Zealand", "input_parameter": "location"} ], - "response": [{"print": ["Mollusc is a %s", "$resp.taxonomy"]}]} + "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} sample_str = samplegen.generate_sample( sample, @@ -130,16 +130,16 @@ def sample_classify(video, location): ) classify_request = {} - # video = 'path/to/mollusc/video.mkv' + # video = "path/to/mollusc/video.mkv" with open(video, "rb") as f: classify_request["video"] = f.read() - # location = 'New Zealand' + # location = "New Zealand" classify_request["location_annotation"] = location response = client.classify(classify_request) - print("Mollusc is a {}".format(response.taxonomy)) + print("Mollusc is a \\"{}\\"".format(response.taxonomy)) # [END %s] @@ -149,10 +149,10 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument("--video", type=str, - default='path/to/mollusc/video.mkv') + default="path/to/mollusc/video.mkv") parser.add_argument("--location", type=str, - default='New Zealand') + default="New Zealand") args = parser.parse_args() sample_classify(args.video, args.location) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index 17d78ad29740..d1eb23e78706 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -38,7 +38,7 @@ def test_generate_manifest(): manifest_time=4486525628 ) - assert fname == "Mollusc.v1.python.21120304.090708.manifest.yaml" + assert fname == "mollusc.v1.python.21120304.090708.manifest.yaml" doc = gapic_yaml.Doc([ gapic_yaml.KeyVal("type", "manifest/samples"), diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index cb2b86d6c060..c243298aad74 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -878,7 +878,10 @@ def test_validate_request_basic(): "mantle_length": DummyField( message=DummyMessage(type="LENGTH_TYPE")), "mantle_mass": DummyField( - message=DummyMessage(type="MASS_TYPE"))}, + message=DummyMessage(type="MASS_TYPE")), + "num_tentacles": DummyField( + message=DummyMessage(type="MASS_TYPE")) + }, type="SQUID_TYPE" ) ) @@ -890,17 +893,20 @@ def test_validate_request_basic(): actual = v.validate_and_transform_request( types.CallingForm.Request, [ - {"field": "squid.mantle_length", "value": "100 cm"}, + {"field": "squid.mantle_length", "value": '100 "cm'}, {"field": "squid.mantle_mass", "value": "10 kg"}, + {"field": "squid.num_tentacles", "value": 10}, ], ) expected = [samplegen.TransformedRequest( base="squid", body=[ samplegen.AttributeRequestSetup(field="mantle_length", - value="100 cm"), + value='"100 \\"cm"'), samplegen.AttributeRequestSetup(field="mantle_mass", - value="10 kg"), + value='"10 kg"'), + samplegen.AttributeRequestSetup(field="num_tentacles", + value=10) ], single=None )] @@ -940,7 +946,7 @@ def test_validate_request_top_level_field(): samplegen.TransformedRequest(base="squid", body=None, single=samplegen.AttributeRequestSetup( - value="humboldt" + value='"humboldt"' )) ] @@ -1033,7 +1039,7 @@ def test_validate_request_multiple_arguments(): base="squid", body=[samplegen.AttributeRequestSetup( field="mantle_length", - value="100 cm", + value='"100 cm"', value_is_file=True)], single=None ), @@ -1041,7 +1047,7 @@ def test_validate_request_multiple_arguments(): base="clam", body=[samplegen.AttributeRequestSetup( field="shell_mass", - value="100 kg", + value='"100 kg"', comment="Clams can be large")], single=None ), @@ -1520,7 +1526,7 @@ def test_validate_request_enum(): expected = [samplegen.TransformedRequest( "cephalopod", body=[samplegen.AttributeRequestSetup(field="subclass", - value="'COLEOIDEA'")], + value='"COLEOIDEA"')], single=None)] assert actual == expected @@ -1536,7 +1542,7 @@ def test_validate_request_enum_top_level(): ) expected = [samplegen.TransformedRequest( "subclass", - single=samplegen.AttributeRequestSetup(value="'COLEOIDEA'"), + single=samplegen.AttributeRequestSetup(value='"COLEOIDEA"'), body=None)] assert actual == expected @@ -1598,7 +1604,7 @@ def test_validate_request_primitive_field(): base="species", body=None, single=samplegen.AttributeRequestSetup( - value="Architeuthis dux" + value='"Architeuthis dux"' ) ) ] From 7b2e911b106037cdbdeb25771d6ab9108ed2609b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 22 Oct 2019 16:21:03 -0700 Subject: [PATCH 0165/1339] =?UTF-8?q?Test=20and=20impl=20for=20resource-na?= =?UTF-8?q?me=20requests=20(logic=20side)=20=E2=80=A6=20(#208)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Contains logic and tests for validating and transforming resource-name based requests. Also contains test and impl for the template rendering. End-to-end testing is forthcoming. Addresses #194 --- .../gapic/samplegen/samplegen.py | 327 +++++++++++++----- .../gapic/samplegen_utils/types.py | 12 + .../templates/examples/feature_fragments.j2 | 18 +- packages/gapic-generator/setup.py | 1 + .../tests/unit/samplegen/common_types.py | 3 +- .../tests/unit/samplegen/test_samplegen.py | 177 +++++++++- .../tests/unit/samplegen/test_template.py | 30 ++ 7 files changed, 476 insertions(+), 92 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 7f5aecb4e8d3..adbecef03d53 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -27,6 +27,8 @@ from collections import (defaultdict, namedtuple, ChainMap as chainmap) from typing import (ChainMap, Dict, List, Mapping, Optional, Tuple) +# There is no library stub file for this module, so ignore it. +from google.api import resource_pb2 # type: ignore from google.protobuf import descriptor_pb2 # Outstanding issues: @@ -103,6 +105,98 @@ class TransformedRequest: base: str single: Optional[AttributeRequestSetup] body: Optional[List[AttributeRequestSetup]] + pattern: Optional[str] = None + + # Resource patterns look something like + # kingdom/{kingdom}/phylum/{phylum}/class/{class} + RESOURCE_RE = re.compile(r"\{([^}/]+)\}") + + @classmethod + def build(cls, request_type: wrappers.MessageType, api_schema, base: str, + attrs: List[AttributeRequestSetup], is_resource_request: bool): + """Build a TransformedRequest based on parsed input. + + Acts as a factory to hide complicated logic for resource-based requests. + + Args: + request_type (wrappers.MessageType): The method's request message type. + api_schema (api.API): The API schema (used for looking up other messages) + base (str): the name of the base field being set. + attrs List[str]: All the attributes (or fields) being set + for the base field. + is_resource_request (bool): Indicates whether the request describes a + constructed resource name path. + + Returns: + TransformedRequest + + Raises: + NoSuchResource: If the base parameter field for a resource-name + request statement lists a resource_type for which + there is no message with the same resource type. + NoSuchResourcePattern: If all the request setup statements for a + resource name parameter do not combine to + match a valid path pattern for that resource. + """ + + # Attrs is guaranteed to be non-empty because of the construction of + # the base_param_to_attrs map in validate_and_transform_request. + # Each non-error lookup results in an append to the corresponding attrs + # list, and then the key/val pairs are passed into this factory. + if not attrs[0].field: + return cls(base=base, body=None, single=attrs[0]) + elif not is_resource_request: + return cls(base=base, body=attrs, single=None) + else: + # This is the tricky one. + # We need to determine whether the field is describing a valid resource, + # and if so, what its corresponding message type is. + # Then we need to find the pattern with parameters + # that exactly matches the attrs, if one exists. + # + # It's a precondition that the base field is + # a valid field of the request message type. + resource_typestr = (request_type. + fields[base]. + options. + Extensions[resource_pb2.resource_reference]. + type) + + resource_message_descriptor = next( + (msg.options.Extensions[resource_pb2.resource] + for msg in api_schema.messages.values() + if msg.options.Extensions[resource_pb2.resource].type == resource_typestr), + None + ) + if not resource_message_descriptor: + raise types.NoSuchResource( + f"No message exists for resource: {resource_typestr}") + + # The field is only ever empty for singleton attributes. + attr_names: List[str] = [a.field for a in attrs] # type: ignore + + # A single resource may be found under multiple paths and have many patterns. + # We want to find an _exact_ match, if one exists. + pattern = next((p + for p in resource_message_descriptor.pattern + if cls.RESOURCE_RE.findall(p) == attr_names), None) + if not pattern: + attr_name_str = ", ".join(attr_names) + raise types.NoSuchResourcePattern( + f"Resource {resource_typestr} has no pattern with params: {attr_name_str}") + + return cls(base=base, body=attrs, single=None, pattern=pattern) + + +@dataclasses.dataclass +class RequestEntry: + """Throwaway data type used in validating and transforming requests. + + Deliberatly NOT frozen: is_resource_request is mutable on purpose.""" + + is_resource_request: bool = False + attrs: List[AttributeRequestSetup] = dataclasses.field( + default_factory=list) class Validator: @@ -119,8 +213,21 @@ class Validator: VAL_KWORD = "value" BODY_KWORD = "body" - def __init__(self, method: wrappers.Method): - # The response ($resp) variable is special and guaranteed to exist. + # This regex matches each variable or attribute in the following example + # expression and indicates whether the lookup is indexed, mapped, or neither + # + # cephalopoda.coleoidea[0].suborder{"incirrina"} + EXPRESSION_ATTR_RE = re.compile( + r""" + (?P\$?\w+)(?:\[(?P\d+)\]|\{["'](?P[^"']+)["']\})?$ + """.strip()) + + VALID_REQUEST_KWORDS = frozenset( + ("value", "field", "value_is_file", "input_parameter", "comment")) + + # TODO(dovs): make the schema a required param. + def __init__(self, method: wrappers.Method, api_schema=None): + # The response ($resp) variable is special and guaranteed to exist. self.request_type_ = method.input response_type = method.output if method.paged_result_field: @@ -128,6 +235,8 @@ def __init__(self, method: wrappers.Method): elif method.lro: response_type = method.lro.response_type + self.api_schema_ = api_schema + # This is a shameless hack to work around the design of wrappers.Field MockField = namedtuple("MockField", ["message", "repeated"]) @@ -159,6 +268,80 @@ def preprocess_sample(sample, api_schema): def var_field(self, var_name: str) -> Optional[wrappers.Field]: return self.var_defs_.get(var_name) + def _normal_request_setup(self, base_param_to_attrs, val, request, field): + """validates and transforms non-resource-based request entries. + + Private method, lifted out to make validate_and_transform_request cleaner. + + Args: + base_param_to_attrs ({str:RequestEntry}): + val (str): The value to which the terminal field will be set + (only used if the terminus is an enum) + request (str:str): The request dictionary read in from the config. + field (str): The value of the "field" parameter in the request entry. + + Returns: + Tuple[str, AttributeRequestSetup] + """ + base = self.request_type_ + attr_chain = field.split(".") + for i, attr_name in enumerate(attr_chain): + attr = base.fields.get(attr_name) + if not attr: + raise types.BadAttributeLookup( + "Method request type {} has no attribute: '{}'".format( + self.request_type_, attr_name)) + + if attr.message: + base = attr.message + elif attr.enum: + # A little bit hacky, but 'values' is a list, and this is the easiest + # way to verify that the value is a valid enum variant. + witness = any(e.name == val for e in attr.enum.values) + if not witness: + raise types.InvalidEnumVariant( + "Invalid variant for enum {}: '{}'".format(attr, val)) + break + elif attr.is_primitive: + # Only valid if this is the last attribute in the chain. + break + else: + raise TypeError( + f"Could not handle attribute '{attr_name}' of type: {attr.type}") + + if i != len(attr_chain) - 1: + # We broke out of the loop after processing an enum or a primitive. + extra_attrs = ".".join(attr_chain[i:]) + raise types.NonTerminalPrimitiveOrEnum( + f"Attempted to reference attributes of enum value or primitive type: '{extra_attrs}'") + + if len(attr_chain) > 1: + request["field"] = ".".join(attr_chain[1:]) + else: + # Because of the way top level attrs get rendered, + # there can't be duplicates. + # This is admittedly a bit of a hack. + if attr_chain[0] in base_param_to_attrs: + raise types.InvalidRequestSetup( + "Duplicated top level field in request block: '{}'".format( + attr_chain[0])) + del request["field"] + + if isinstance(request["value"], str): + # Passing value through json is a safe and simple way of + # making sure strings are properly wrapped and quotes escaped. + # This statement both wraps enums in quotes and escapes quotes + # in string values passed as parameters. + # + # Python code can set protobuf enums from strings. + # This is preferable to adding the necessary import statement + # and requires less munging of the assigned value + request["value"] = json.dumps(request["value"]) + + # Mypy isn't smart enough to handle dictionary unpacking, + # so disable it for the AttributeRequestSetup ctor call. + return attr_chain[0], AttributeRequestSetup(**request) # type: ignore + def validate_and_transform_request(self, calling_form: types.CallingForm, request: List[Mapping[str, str]]) -> List[TransformedRequest]: @@ -172,10 +355,13 @@ def validate_and_transform_request(self, All values in the initial request are strings except for the value for "value_is_file", which is a bool. - The TransformedRequest structure of the return value has three fields: - "base", "body", and "single", where "base" maps to the top level attribute name, - "body" maps to a list of subfield assignment definitions, and "single" - maps to a singleton attribute assignment structure with no "field" value. + The TransformedRequest structure of the return value has four fields: + "base", "body", "single", and "pattern", + where "base" maps to the top level attribute name, + "body" maps to a list of subfield assignment definitions, "single" + maps to a singleton attribute assignment structure with no "field" value, + and "pattern" is a resource name pattern string if the request describes + resource name construction. The "field" attribute in the requests in a "body" list have their prefix stripped; the request in a "single" attribute has no "field" attribute. @@ -226,97 +412,67 @@ def validate_and_transform_request(self, a client-side streaming calling form. BadAttributeLookup: If a request field refers to a non-existent field in the request message type. + ResourceRequestMismatch: If a request attempts to describe both + attribute manipulation and resource name + construction. """ base_param_to_attrs: Dict[str, - List[AttributeRequestSetup]] = defaultdict(list) - + RequestEntry] = defaultdict(RequestEntry) for r in request: - duplicate = dict(r) - val = duplicate.get("value") + r_dup = dict(r) + val = r_dup.get("value") if not val: raise types.InvalidRequestSetup( "Missing keyword in request entry: 'value'") - field = duplicate.get("field") + field = r_dup.get("field") if not field: raise types.InvalidRequestSetup( "Missing keyword in request entry: 'field'") - spurious_keywords = set(duplicate.keys()) - {"value", - "field", - "value_is_file", - "input_parameter", - "comment"} - if spurious_keywords: + spurious_kwords = set(r_dup.keys()) - self.VALID_REQUEST_KWORDS + if spurious_kwords: raise types.InvalidRequestSetup( "Spurious keyword(s) in request entry: {}".format( - ", ".join(f"'{kword}'" for kword in spurious_keywords))) + ", ".join(f"'{kword}'" for kword in spurious_kwords))) - input_parameter = duplicate.get("input_parameter") + input_parameter = r_dup.get("input_parameter") if input_parameter: self._handle_lvalue(input_parameter, wrappers.Field( field_pb=descriptor_pb2.FieldDescriptorProto())) - attr_chain = field.split(".") - base = self.request_type_ - for i, attr_name in enumerate(attr_chain): - attr = base.fields.get(attr_name) - if not attr: + # The percentage sign is used for setting up resource based requests + percent_idx = field.find('%') + if percent_idx == -1: + base_param, attr = self._normal_request_setup( + base_param_to_attrs, val, r_dup, field) + + request_entry = base_param_to_attrs.get(base_param) + if request_entry and request_entry.is_resource_request: + raise types.ResourceRequestMismatch( + f"Request setup mismatch for base: {base_param}") + + base_param_to_attrs[base_param].attrs.append(attr) + else: + # It's a resource based request. + base_param, resource_attr = (field[:percent_idx], + field[percent_idx + 1:]) + request_entry = base_param_to_attrs.get(base_param) + if request_entry and not request_entry.is_resource_request: + raise types.ResourceRequestMismatch( + f"Request setup mismatch for base: {base_param}") + + if not self.request_type_.fields.get(base_param): raise types.BadAttributeLookup( "Method request type {} has no attribute: '{}'".format( - self.request_type_.type, attr_name)) - - if attr.message: - base = attr.message - elif attr.enum: - # A little bit hacky, but 'values' is a list, and this is the easiest - # way to verify that the value is a valid enum variant. - witness = any(e.name == val for e in attr.enum.values) - if not witness: - raise types.InvalidEnumVariant( - "Invalid variant for enum {}: '{}'".format(attr, val)) - break - elif attr.is_primitive: - # Only valid if this is the last attribute in the chain. - break - else: - raise TypeError( - f"Could not handle attribute '{attr_name}' of type: {attr.type}") - - if i != len(attr_chain) - 1: - # We broke out of the loop after processing an enum or a primitive. - extra_attrs = ".".join(attr_chain[i:]) - raise types.NonTerminalPrimitiveOrEnum( - f"Attempted to reference attributes of enum value or primitive type: '{extra_attrs}'") - - if len(attr_chain) > 1: - duplicate["field"] = ".".join(attr_chain[1:]) - else: - # Because of the way top level attrs get rendered, - # there can't be duplicates. - # This is admittedly a bit of a hack. - if attr_chain[0] in base_param_to_attrs: - raise types.InvalidRequestSetup( - "Duplicated top level field in request block: '{}'".format( - attr_chain[0])) - del duplicate["field"] - - if isinstance(duplicate["value"], str): - # Passing value through json is a safe and simple way of - # making sure strings are properly wrapped and quotes escaped. - # This statement both wraps enums in quotes and escapes quotes - # in string values passed as parameters. - # - # Python code can set protobuf enums from strings. - # This is preferable to adding the necessary import statement - # and requires less munging of the assigned value - duplicate["value"] = json.dumps(duplicate["value"]) - - # Mypy isn't smart enough to handle dictionary unpacking, - # so disable it for the AttributeRequestSetup ctor call. - base_param_to_attrs[attr_chain[0]].append( - AttributeRequestSetup(**duplicate)) # type: ignore + self.request_type_, base_param)) + + r_dup["field"] = resource_attr + request_entry = base_param_to_attrs[base_param] + request_entry.is_resource_request = True + request_entry.attrs.append( + AttributeRequestSetup(**r_dup)) # type: ignore client_streaming_forms = { types.CallingForm.RequestStreamingClient, @@ -328,8 +484,13 @@ def validate_and_transform_request(self, "Too many base parameters for client side streaming form") return [ - (TransformedRequest(base=key, body=val, single=None) if val[0].field - else TransformedRequest(base=key, body=None, single=val[0])) + TransformedRequest.build( + self.request_type_, + self.api_schema_, + key, + val.attrs, + val.is_resource_request + ) for key, val in base_param_to_attrs.items() ] @@ -382,16 +543,10 @@ def validate_expression(self, exp: str) -> wrappers.Field: Returns: wrappers.Field: The final field in the chain. """ - # TODO: Add resource name handling, i.e. % - chain_link_re = re.compile( - r""" - (?P\$?\w+)(?:\[(?P\d+)\]|\{["'](?P[^"']+)["']\})?$ - """.strip()) - def validate_recursively(expression, scope, depth=0): first_dot = expression.find(".") base = expression[:first_dot] if first_dot > 0 else expression - match = chain_link_re.match(base) + match = self.EXPRESSION_ATTR_RE.match(base) if not match: raise types.BadAttributeLookup( f"Badly formed attribute expression: {expression}") @@ -675,7 +830,7 @@ def generate_sample( ) -> str: """Generate a standalone, runnable sample. - Rendering and writing the rendered output is left for the caller. + Writing the rendered output is left for the caller. Args: sample (Any): A definition for a single sample generated from parsed yaml. diff --git a/packages/gapic-generator/gapic/samplegen_utils/types.py b/packages/gapic-generator/gapic/samplegen_utils/types.py index 3da7eb611952..dfd89c80985c 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/types.py +++ b/packages/gapic-generator/gapic/samplegen_utils/types.py @@ -88,6 +88,18 @@ class DuplicateSample(SampleError): pass +class ResourceRequestMismatch(SampleError): + pass + + +class NoSuchResource(SampleError): + pass + + +class NoSuchResourcePattern(SampleError): + pass + + class CallingForm(Enum): Request = auto() RequestPaged = auto() diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index d39f3f1ed08f..425908855d8c 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -155,12 +155,22 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endmacro %} {% macro render_request_setup(request) %} - {% for parameter_block in request if parameter_block.body %} +{% for parameter_block in request if parameter_block.body %} +{% if parameter_block.pattern -%} +{# This is a resource-name patterned lookup parameter #} +{% with formals = [] -%} +{% for attr in parameter_block.body -%} +{% do formals.append("%s=%s"|format(attr.field, attr.input_parameter or attr.value)) -%} +{% endfor -%} +{{ parameter_block.base }} = "{{parameter_block.pattern }}".format({{ formals|join(", ") }}) +{% endwith -%} +{% else -%} {{ parameter_block.base }} = {} - {% for attr in parameter_block.body %} +{% for attr in parameter_block.body %} {{ render_request_attr(parameter_block.base, attr) }} - {% endfor %} - {% endfor %} +{% endfor %} +{% endif -%} +{% endfor %} {% endmacro %} {% macro render_request_params(request) %} diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f24e6f51cd5f..1918922d95b6 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -44,6 +44,7 @@ 'click >= 6.7', 'googleapis-common-protos >= 1.6.0', 'jinja2 >= 2.10', + 'google_api >= 0.1.12', 'protobuf >= 3.7.1', 'pypandoc >= 1.4', 'PyYAML >= 5.1.1', diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index a6c1f0571010..e2b542b2c981 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -52,7 +52,8 @@ DummyService = namedtuple("DummyService", ["methods"]) -DummyApiSchema = namedtuple("DummyApiSchema", ["services", "naming"]) +DummyApiSchema = namedtuple("DummyApiSchema", + ["services", "naming", "messages"]) DummyApiSchema.__new__.__defaults__ = (False,) * len(DummyApiSchema._fields) DummyNaming = namedtuple( diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index c243298aad74..d14b01fdbeb1 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -17,6 +17,7 @@ from typing import (TypeVar, Sequence) from collections import (OrderedDict, namedtuple) +from google.api import resource_pb2 from google.protobuf import descriptor_pb2 import gapic.samplegen.samplegen as samplegen @@ -25,7 +26,7 @@ from gapic.schema import (api, metadata, naming) import gapic.schema.wrappers as wrappers -from common_types import (DummyField, DummyMessage, +from common_types import (DummyApiSchema, DummyField, DummyMessage, DummyMethod, message_factory, enum_factory) from gapic.samplegen_utils import utils @@ -1585,6 +1586,69 @@ def test_validate_request_enum_not_last_attr(): ) +def test_validate_request_resource_name(): + request = [ + {"field": "taxon%kingdom", "value": "animalia"}, + {"field": "taxon%phylum", "value": "mollusca", "input_parameter": "phylum"} + ] + + resource_type = "taxonomy.google.com/Linnaean" + taxon_field = make_field(name="taxon") + rr = taxon_field.options.Extensions[resource_pb2.resource_reference] + rr.type = resource_type + request_descriptor = make_message(name="Request", fields=[taxon_field]) + + # Strictly speaking, 'phylum' is the resource, but it's not what we're + # manipulating to let samplegen know it's the resource. + phylum_options = descriptor_pb2.MessageOptions() + resource = phylum_options.Extensions[resource_pb2.resource] + resource.type = resource_type + resource.pattern.append("kingdom/{kingdom}/phylum/{phylum}") + phylum_descriptor = make_message(name="Phylum", options=phylum_options) + + method = DummyMethod(input=request_descriptor) + # We don't actually care about the key, + # but the 'messages' property is a mapping type, + # and the implementation code expects this. + api_schema = DummyApiSchema( + messages={ + k: v + for k, v in enumerate([ + request_descriptor, + phylum_descriptor, + ]) + } + ) + + v = samplegen.Validator(method=method, api_schema=api_schema) + + actual = v.validate_and_transform_request( + types.CallingForm.Request, + request + ) + + expected = [ + samplegen.TransformedRequest( + base="taxon", + pattern="kingdom/{kingdom}/phylum/{phylum}", + single=None, + body=[ + samplegen.AttributeRequestSetup( + field="kingdom", + value="animalia", + ), + samplegen.AttributeRequestSetup( + field="phylum", + value="mollusca", + input_parameter="phylum", + ), + ] + ) + ] + + assert actual == expected + + def test_validate_request_primitive_field(): field = make_field(name="species", type="TYPE_STRING") request_type = make_message(name="request", fields=[field]) @@ -1612,6 +1676,117 @@ def test_validate_request_primitive_field(): assert actual == expected +def test_validate_request_resource_name_mixed(request=None): + # Note the mixing of resource name and non-resource name request field + request = request or [ + {"field": "taxon%kingdom", "value": "animalia"}, + {"field": "taxon.domain", "value": "eukarya"}, + ] + v = samplegen.Validator( + method=DummyMethod( + input=make_message( + name="taxonomy", + fields=[ + make_field( + name="taxon", + message=make_message( + name="Taxon", + fields=[ + make_field( + name="domain", + message=make_message(name="Domain") + ) + ] + ) + ) + ] + ), + ), + api_schema=None + ) + + with pytest.raises(types.ResourceRequestMismatch): + v.validate_and_transform_request( + types.CallingForm.Request, + request + ) + + +def test_validate_request_resource_name_mixed_reversed(): + # Again, note the mixed use of . and % + request = [ + {"field": "taxon.domain", "value": "eukarya"}, + {"field": "taxon%kingdom", "value": "animalia"}, + ] + test_validate_request_resource_name_mixed(request) + + +def test_validate_request_no_such_attr(): + request = [ + {"field": "taxon%kingdom", "value": "animalia"} + ] + method = DummyMethod(input=make_message(name="Request")) + v = samplegen.Validator(method=method) + + with pytest.raises(types.BadAttributeLookup): + v.validate_and_transform_request(types.CallingForm.Request, request) + + +def test_validate_request_no_such_resource(): + request = [ + {"field": "taxon%kingdom", "value": "animalia"} + ] + resource_type = "taxonomy.google.com/Linnaean" + taxon_field = make_field(name="taxon") + rr = taxon_field.options.Extensions[resource_pb2.resource_reference] + rr.type = resource_type + request_descriptor = make_message(name="Request", fields=[taxon_field]) + + method = DummyMethod(input=request_descriptor) + api_schema = DummyApiSchema( + messages={k: v for k, v in enumerate([request_descriptor])} + ) + + v = samplegen.Validator(method=method, api_schema=api_schema) + with pytest.raises(types.NoSuchResource): + v.validate_and_transform_request(types.CallingForm.Request, request) + + +def test_validate_request_no_such_pattern(): + request = [ + # Note that there's only the one attribute, 'phylum', and that the only + # pattern expects both 'kingdom' and 'phylum'. + {"field": "taxon%phylum", "value": "mollusca", "input_parameter": "phylum"} + ] + + resource_type = "taxonomy.google.com/Linnaean" + taxon_field = make_field(name="taxon") + rr = taxon_field.options.Extensions[resource_pb2.resource_reference] + rr.type = resource_type + request_descriptor = make_message(name="Request", fields=[taxon_field]) + + phylum_options = descriptor_pb2.MessageOptions() + resource = phylum_options.Extensions[resource_pb2.resource] + resource.type = resource_type + resource.pattern.append("kingdom/{kingdom}/phylum/{phylum}") + phylum_descriptor = make_message(name="Phylum", options=phylum_options) + + method = DummyMethod(input=request_descriptor) + api_schema = DummyApiSchema( + messages={ + k: v + for k, v in enumerate([ + request_descriptor, + phylum_descriptor, + ]) + } + ) + + v = samplegen.Validator(method=method, api_schema=api_schema) + with pytest.raises(types.NoSuchResourcePattern): + v.validate_and_transform_request(types.CallingForm.Request, request) + + def test_validate_request_non_terminal_primitive_field(): field = make_field(name="species", type="TYPE_STRING") request_type = make_message(name="request", fields=[field]) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 3b38c8f0d113..d4383d4bb753 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -173,6 +173,36 @@ def test_render_request_basic(): ) +def test_render_request_resource_name(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_request_setup(request) }} + ''', + ''' + taxon = "kingdom/{kingdom}/phylum/{phylum}".format(kingdom="animalia", phylum=mollusca) + ''', + request=[ + samplegen.TransformedRequest( + base="taxon", + single=None, + body=[ + samplegen.AttributeRequestSetup( + field="kingdom", + value='"animalia"', + ), + samplegen.AttributeRequestSetup( + field="phylum", + value="mollusca", + input_parameter="mollusca", + ) + ], + pattern="kingdom/{kingdom}/phylum/{phylum}" + ), + ] + ) + + def test_render_print(): check_template( ''' From 011659ad24068b6e5044172198708a4ed13bf995 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 24 Oct 2019 13:40:13 -0700 Subject: [PATCH 0166/1339] Change generated client class name to have 'Client' at the end (#223) Fix for #221 : Client class name changes from monolith gapic --- .../gapic-generator/gapic/schema/wrappers.py | 10 +++++-- .../templates/$namespace/$name/__init__.py.j2 | 4 +-- .../$name_$version/$sub/__init__.py.j2 | 4 +-- .../$sub/services/$service/__init__.py.j2 | 4 +-- .../$sub/services/$service/client.py.j2 | 8 +++--- .../gapic/templates/examples/sample.py.j2 | 4 +-- .../$name_$version/$sub/test_$service.py.j2 | 26 +++++++++---------- .../gapic-generator/tests/system/conftest.py | 12 ++++----- .../tests/unit/samplegen/test_integration.py | 6 ++--- .../unit/schema/wrappers/test_service.py | 9 +++++-- 10 files changed, 49 insertions(+), 38 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 51dd4870d728..5895150f3d9e 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -627,6 +627,11 @@ class Service: def __getattr__(self, name): return getattr(self.service_pb, name) + @property + def client_name(self) -> str: + """Returns the name of the generated client class""" + return self.name + "Client" + @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" @@ -672,8 +677,9 @@ def names(self) -> FrozenSet[str]: used for imports. """ # Put together a set of the service and method names. - answer = {self.name}.union( - {utils.to_snake_case(i.name) for i in self.methods.values()} + answer = {self.name, self.client_name} + answer.update( + utils.to_snake_case(i.name) for i in self.methods.values() ) # Identify any import module names where the same module name is used diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 index 279cb2b464f9..547f8d5eac16 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 @@ -11,7 +11,7 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view -%} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.name }} + {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} {% endfor -%} {# Import messages from each proto. @@ -36,7 +36,7 @@ __all__ = ( {%- endfor %} {%- for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.name }}', + '{{ service.client_name }}', {%- endfor %} {%- for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 index 61b21978ea50..df685b243801 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 @@ -10,7 +10,7 @@ from . import {{ subpackage }} {% filter sort_lines -%} {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view -%} -from .services.{{ service.name|snake_case }} import {{ service.name }} +from .services.{{ service.name|snake_case }} import {{ service.client_name }} {% endfor -%} {% endfilter -%} @@ -42,7 +42,7 @@ __all__ = ( {%- endfor %} {%- for service in api.services.values() if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.name }}', + '{{ service.client_name }}', {%- endfor %} {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/__init__.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/__init__.py.j2 index b6cea2a62e7b..f9f07d44df9a 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/__init__.py.j2 @@ -1,9 +1,9 @@ {% extends '_base.py.j2' %} {% block content %} -from .client import {{ service.name }} +from .client import {{ service.client_name }} __all__ = ( - '{{ service.name }}', + '{{ service.client_name }}', ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index b788fb4dfc58..97ec9376a179 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -21,7 +21,7 @@ from .transports.base import {{ service.name }}Transport from .transports.grpc import {{ service.name }}GrpcTransport -class {{ service.name }}Meta(type): +class {{ service.client_name }}Meta(type): """Metaclass for the {{ service.name }} client. This provides class-level methods for building and retrieving @@ -52,14 +52,14 @@ class {{ service.name }}Meta(type): return next(iter(cls._transport_registry.values())) -class {{ service.name }}(metaclass={{ service.name }}Meta): +class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """{{ service.meta.doc|rst(width=72, indent=4) }}""" def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = None ) -> None: - """Instantiate the {{ (service.name|snake_case).replace('_', ' ') }}. + """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): @@ -214,6 +214,6 @@ except pkg_resources.DistributionNotFound: __all__ = ( - '{{ service.name }}', + '{{ service.client_name }}', ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index e0585289214b..86f28b1449b3 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -28,13 +28,13 @@ {% for import_statement in imports %} {{ import_statement }} {% endfor %} -from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.name }} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} {# also need calling form #} def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input_params(sample.request)|trim -}}): """{{ sample.description }}""" - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), transport="grpc", ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 index 031c7b912ea0..ce9203007fad 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 @@ -11,7 +11,7 @@ import pytest {% filter sort_lines -%} from google import auth from google.auth import credentials -from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.name }} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports {% if service.has_lro -%} from google.api_core import future @@ -28,7 +28,7 @@ from google.longrunning import operations_pb2 {% for method in service.methods.values() -%} def test_{{ method.name|snake_case }}(transport: str = 'grpc'): - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), transport=transport, ) @@ -79,7 +79,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% if method.field_headers %} def test_{{ method.name|snake_case }}_field_headers(): - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), ) @@ -116,7 +116,7 @@ def test_{{ method.name|snake_case }}_field_headers(): {% if method.flattened_fields %} def test_{{ method.name|snake_case }}_flattened(): - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), ) @@ -152,7 +152,7 @@ def test_{{ method.name|snake_case }}_flattened(): def test_{{ method.name|snake_case }}_flattened_error(): - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), ) @@ -170,7 +170,7 @@ def test_{{ method.name|snake_case }}_flattened_error(): {% if method.paged_result_field %} def test_{{ method.name|snake_case }}_pager(): - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials, ) @@ -222,7 +222,7 @@ def test_credentials_transport_error(): credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), transport=transport, ) @@ -233,13 +233,13 @@ def test_transport_instance(): transport = transports.{{ service.name }}GrpcTransport( credentials=credentials.AnonymousCredentials(), ) - client = {{ service.name }}(transport=transport) + client = {{ service.client_name }}(transport=transport) assert client._transport is transport def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), ) assert isinstance( @@ -277,7 +277,7 @@ def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, 'default') as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - client = {{ service.name }}() + client = {{ service.client_name }}() adc.assert_called_once_with(scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', @@ -287,7 +287,7 @@ def test_{{ service.name|snake_case }}_auth_adc(): def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), host='{{ host }}', transport='grpc', @@ -298,7 +298,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): def test_{{ service.name|snake_case }}_host_with_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), host='{{ host }}:8000', transport='grpc', @@ -317,7 +317,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel(): {% if service.has_lro -%} def test_{{ service.name|snake_case }}_grpc_lro_client(): - client = {{ service.name }}( + client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), transport='grpc', ) diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 423d037619ef..f7b6ebb897cb 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -14,23 +14,23 @@ import pytest -from google.showcase import Echo -from google.showcase import Identity +from google.showcase import EchoClient +from google.showcase import IdentityClient import grpc @pytest.fixture def echo(): - transport = Echo.get_transport_class('grpc')( + transport = EchoClient.get_transport_class('grpc')( channel=grpc.insecure_channel('localhost:7469'), ) - return Echo(transport=transport) + return EchoClient(transport=transport) @pytest.fixture def identity(): - transport = Identity.get_transport_class('grpc')( + transport = IdentityClient.get_transport_class('grpc')( channel=grpc.insecure_channel('localhost:7469'), ) - return Identity(transport=transport) + return IdentityClient(transport=transport) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 5ae53db0b87e..e0be078d0eae 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -71,7 +71,7 @@ def test_generate_sample_basic(): api_naming = naming.Naming( name="MolluscClient", namespace=("molluscs", "v1")) service = wrappers.Service( - service_pb=namedtuple('service_pb', ['name'])('MolluscClient'), + service_pb=namedtuple('service_pb', ['name'])('MolluscService'), methods={ "Classify": DummyMethod( input=input_type, @@ -119,12 +119,12 @@ def test_generate_sample_basic(): # [START %s] from google import auth from google.auth import credentials -from molluscs.v1.molluscclient.services.mollusc_client import MolluscClient +from molluscs.v1.molluscclient.services.mollusc_service import MolluscServiceClient def sample_classify(video, location): """Determine the full taxonomy of input mollusc""" - client = MolluscClient( + client = MolluscServiceClient( credentials=credentials.AnonymousCredentials(), transport="grpc", ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 80ad6f8952ca..dc7ea0ecd954 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -27,6 +27,7 @@ def test_service_properties(): service = make_service(name='ThingDoer') assert service.name == 'ThingDoer' + assert service.client_name == 'ThingDoerClient' def test_service_host(): @@ -51,7 +52,9 @@ def test_service_names(): get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), )) - assert service.names == {'ThingDoer', 'do_thing', 'jump', 'yawn'} + expected_names = {'ThingDoer', 'ThingDoerClient', + 'do_thing', 'jump', 'yawn'} + assert service.names == expected_names def test_service_name_colliding_modules(): @@ -60,7 +63,9 @@ def test_service_name_colliding_modules(): get_method('Jump', 'bacon.bar.JumpRequest', 'bacon.bar.JumpResponse'), get_method('Yawn', 'a.b.v1.c.YawnRequest', 'a.b.v1.c.YawnResponse'), )) - assert service.names == {'ThingDoer', 'do_thing', 'jump', 'yawn', 'bar'} + expected_names = {'ThingDoer', 'ThingDoerClient', + 'do_thing', 'jump', 'yawn', 'bar'} + assert service.names == expected_names def test_service_no_scopes(): From be6ad16a3ddd502840e0f27fb54f7e3f9209ca44 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 28 Oct 2019 09:36:23 -0700 Subject: [PATCH 0167/1339] Fix for #224 (#225) The valid config type of a sample is now com.google.api.codegen.samplegen.v1p2.SampleConfigProto --- .../gapic-generator/gapic/samplegen_utils/utils.py | 2 +- .../tests/unit/generator/test_generator.py | 8 ++++---- .../tests/unit/samplegen/test_integration.py | 10 +++++----- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py index 6ce31c4001eb..a0d9892e9b3c 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -25,7 +25,7 @@ MIN_SCHEMA_VERSION = (1, 2, 0) -VALID_CONFIG_TYPE = "com.google.api.codegen.SampleConfigProto" +VALID_CONFIG_TYPE = "com.google.api.codegen.samplegen.v1p2.SampleConfigProto" def coerce_response_name(s: str) -> str: diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 72093c1c95d0..32d48315edcf 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -278,7 +278,7 @@ def test_parse_sample_paths(fs): contents=dedent( ''' --- - type: com.google.api.codegen.SampleConfigProto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: - service: google.cloud.language.v1.LanguageService @@ -318,7 +318,7 @@ def test_samplegen_config_to_output_files( contents=dedent( ''' --- - type: com.google.api.codegen.SampleConfigProto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: - id: squid_sample @@ -406,7 +406,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): contents=dedent( ''' --- - type: com.google.api.codegen.SampleConfigProto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: - id: squid_sample @@ -478,7 +478,7 @@ def test_generator_duplicate_samples(fs): contents=dedent( ''' # Note: the samples are duplicates. - type: com.google.api.codegen.SampleConfigProto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: - id: squid_sample diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index e0be078d0eae..71c602feb270 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -197,7 +197,7 @@ def test_generate_sample_config_fpaths(fs): contents=dedent( ''' --- - type: com.google.api.codegen.SampleConfigProto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: - service: google.cloud.language.v1.LanguageService @@ -243,7 +243,7 @@ def test_generate_sample_config_fpaths_bad_contents_old(fs): contents=dedent( ''' --- - type: com.google.api.codegen.SampleConfigProto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.1.0 samples: - service: google.cloud.language.v1.LanguageService @@ -258,7 +258,7 @@ def test_generate_sample_config_fpaths_bad_contents_no_samples(fs): contents=dedent( ''' --- - type: com.google.api.codegen.SampleConfigProto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 ''' ) @@ -274,13 +274,13 @@ def test_generate_sample_config_partial_config(fs): ''' --- # Note: not a valid config because of the type. - type: com.google.api.codegen.SampleConfigPronto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigPronto schema_version: 1.2.0 samples: - service: google.cloud.language.v1.LanguageService --- # Note: this one IS a valid config - type: com.google.api.codegen.SampleConfigProto + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: - service: google.cloud.language.v1.LanguageService From a354abc7559da149d192f688c15bc04dd859469b Mon Sep 17 00:00:00 2001 From: Alexander Fenster Date: Mon, 28 Oct 2019 15:52:00 -0700 Subject: [PATCH 0168/1339] [fix] Better path handling in gapic.sh (#226) --- packages/gapic-generator/gapic.sh | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic.sh b/packages/gapic-generator/gapic.sh index bf207a943106..17f5c563a392 100755 --- a/packages/gapic-generator/gapic.sh +++ b/packages/gapic-generator/gapic.sh @@ -64,8 +64,8 @@ if [ -z "$IMAGE" ] || [ -z "$IN" ] || [ -z "$OUT" ]; then fi # Ensure that the input directory exists (and is a directory). -if ! [ -d $IN ]; then - >&2 echo "Directory does not exist: $IN" +if ! [ -d "${PROTO_PATH}/$IN" ]; then + >&2 echo "Directory does not exist: ${PROTO_PATH}/$IN" exit 2 fi @@ -85,10 +85,19 @@ if [ "$(ls -A $OUT )"]; then >&2 echo "Warning: Output directory is not empty." fi +# Convert IN and OUT to absolute paths for Docker +CWD=`pwd` +cd ${PROTO_PATH}/$IN +ABS_IN=`pwd` +cd $CWD +cd $OUT +ABS_OUT=`pwd` +cd $CWD + # Generate the client library. docker run \ - --mount type=bind,source=${PROTO_PATH}/${IN},destination=/in/${IN},readonly \ - --mount type=bind,source=$OUT,destination=/out \ + --mount type=bind,source=${ABS_IN},destination=/in/${IN},readonly \ + --mount type=bind,source=${ABS_OUT},destination=/out \ --rm \ --user $UID \ $IMAGE \ From de2cf9e6b6d2e95c83ae64588aff94c906a17c9a Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 30 Oct 2019 16:39:28 -0700 Subject: [PATCH 0169/1339] Add a license to the generated gapic and samples (#231) Fix for #229 and #230 Manual tests show the expected license is generated in the client library files. --- .../gapic-generator/gapic/samplegen/samplegen.py | 7 ------- .../gapic-generator/gapic/templates/_base.py.j2 | 3 +++ .../gapic-generator/gapic/templates/_license.j2 | 14 ++++++++++++++ .../templates/examples/feature_fragments.j2 | 10 ++-------- .../gapic/templates/examples/sample.py.j2 | 11 +++++------ .../tests/unit/samplegen/test_integration.py | 16 ++++++++++++++-- 6 files changed, 38 insertions(+), 23 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/_license.j2 diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index adbecef03d53..10262b21e7d6 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -36,12 +36,6 @@ # defined with an _implicit_ $resp variable. -# TODO: read in copyright and license from files. -FILE_HEADER: Dict[str, str] = { - "copyright": "TODO: add a copyright", - "license": "TODO: add a license", -} - RESERVED_WORDS = frozenset( itertools.chain( keyword.kwlist, @@ -864,7 +858,6 @@ def generate_sample( v.validate_response(sample["response"]) return sample_template.render( - file_header=FILE_HEADER, sample=sample, imports=[ "from google import auth", diff --git a/packages/gapic-generator/gapic/templates/_base.py.j2 b/packages/gapic-generator/gapic/templates/_base.py.j2 index 5a9ea62f8bc1..133cf7aa5811 100644 --- a/packages/gapic-generator/gapic/templates/_base.py.j2 +++ b/packages/gapic-generator/gapic/templates/_base.py.j2 @@ -1,3 +1,6 @@ # -*- coding: utf-8 -*- +{% block license %} +{% include "_license.j2" %} +{% endblock %} {%- block content %} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/_license.j2 b/packages/gapic-generator/gapic/templates/_license.j2 new file mode 100644 index 000000000000..3264db7db75f --- /dev/null +++ b/packages/gapic-generator/gapic/templates/_license.j2 @@ -0,0 +1,14 @@ +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 425908855d8c..3f0ad569016b 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -25,14 +25,8 @@ There is a little, but not enough for it to be important because {# response handling macros #} -{% macro sample_header(file_header, sample, calling_form) %} -{% for line in file_header["copyright"].split("\n") %} -# {{ line }} -{% endfor %} -{% for line in file_header["license"].split("\n") %} -# {{ line }} -{% endfor %} -# +{% macro sample_header(sample, calling_form) %} + # DO NOT EDIT! This is a generated sample ("{{ calling_form }}", "{{ sample.id }}") # # To install the latest published package dependency, execute the following: diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index 86f28b1449b3..cab331de12d2 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -13,15 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. #} +{% extends "_base.py.j2" %} + +{% block content %} {# Input parameters: sample #} -{# fileHeader#} -{# imports #} {# callingForm #} -{# callingFormEnum #} -{# Note: this sample template is WILDLY INACCURATE AND INCOMPLETE #} -{# It does not correctly enums, unions, top level attributes, or various other things #} {% import "examples/feature_fragments.j2" as frags %} -{{ frags.sample_header(file_header, sample, calling_form) }} +{{ frags.sample_header(sample, calling_form) }} # [START {{ sample.id }}] {# python code is responsible for all transformations: all we do here is render #} @@ -47,3 +45,4 @@ def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input # [END {{ sample.id }}] {{ frags.render_main_block(sample.rpc, sample.request) }} +{%- endblock %} diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 71c602feb270..f82470e898c9 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -107,8 +107,20 @@ def test_generate_sample_basic(): ) sample_id = ("mollusc_classify_sync") - expected_str = '''# TODO: add a copyright -# TODO: add a license + expected_str = '''# -*- coding: utf-8 -*- +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # DO NOT EDIT! This is a generated sample ("request", "%s") # From 1b0b7ca09b18fd7c382987ff45edc87b72c22d2a Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 1 Nov 2019 10:01:32 -0700 Subject: [PATCH 0170/1339] Remove a spurious and cryptic dependency I can't remember adding (#233) --- packages/gapic-generator/setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1918922d95b6..f24e6f51cd5f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -44,7 +44,6 @@ 'click >= 6.7', 'googleapis-common-protos >= 1.6.0', 'jinja2 >= 2.10', - 'google_api >= 0.1.12', 'protobuf >= 3.7.1', 'pypandoc >= 1.4', 'PyYAML >= 5.1.1', From e55f865684bade972ad205d2794e38a513ad1e31 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 1 Nov 2019 13:43:22 -0700 Subject: [PATCH 0171/1339] [feat] Retries (#234) This PR ostensibly implements reading a gRPC service config and populating the appropriate retry behavior. Testing requires additions to the showcase repository and CI rules to build with a service config. This is currently unimplemented We removed the default retry behavior, so the tests correctly no longer work. These tests should be reinstated when Showcase has a gRPC service config. --- .../gapic/generator/generator.py | 4 +- .../gapic/generator/options.py | 29 ++++--- packages/gapic-generator/gapic/schema/api.py | 78 +++++++++++++++++-- .../gapic-generator/gapic/schema/wrappers.py | 13 ++++ .../$sub/services/$service/client.py.j2 | 25 +++--- packages/gapic-generator/setup.py | 2 + .../tests/system/test_retry.py | 41 ---------- .../tests/unit/schema/test_api.py | 72 +++++++++++++++-- 8 files changed, 185 insertions(+), 79 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index bc3dd09e428d..98dcb7507af5 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -16,12 +16,12 @@ import yaml import re import os -from typing import (Any, DefaultDict, Dict, Mapping, List) +from typing import (Any, DefaultDict, Dict, Mapping) from hashlib import sha256 from collections import (OrderedDict, defaultdict) from gapic.samplegen_utils.utils import ( coerce_response_name, is_valid_sample_cfg) -from gapic.samplegen_utils.types import (InvalidConfig, DuplicateSample) +from gapic.samplegen_utils.types import DuplicateSample from gapic.samplegen import (manifest, samplegen) from gapic.generator import options from gapic.generator import formatter diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index 31669c698a7b..2ed3d827e1bc 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -13,13 +13,14 @@ # limitations under the License. from collections import defaultdict -from typing import DefaultDict, List, Tuple +from typing import Any, DefaultDict, Dict, List, Optional, Tuple import dataclasses +import json import os import warnings -from gapic.samplegen_utils import (types, utils as samplegen_utils) +from gapic.samplegen_utils import utils as samplegen_utils @dataclasses.dataclass(frozen=True) @@ -30,14 +31,16 @@ class Options: on unrecognized arguments (essentially, we throw them away, but we do warn if it looks like it was meant for us). """ - templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) + name: str = '' namespace: Tuple[str, ...] = dataclasses.field(default=()) + retry: Optional[Dict[str, Any]] = None sample_configs: Tuple[str, ...] = dataclasses.field(default=()) - name: str = '' + templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) # Class constants - SAMPLES_OPT: str = 'samples' PYTHON_GAPIC_PREFIX: str = 'python-gapic-' + RETRY_OPT: str = 'retry-config' + SAMPLES_OPT: str = 'samples' @classmethod def build(cls, opt_string: str) -> 'Options': @@ -55,8 +58,8 @@ def build(cls, opt_string: str) -> 'Options': Raises: gapic.samplegen_utils.types.InvalidConfig: - If paths to files or directories that should contain sample - configs are passed and no valid sample config is found. + If paths to files or directories that should contain sample + configs are passed and no valid sample config is found. """ # Parse out every option beginning with `python-gapic` opts: DefaultDict[str, List[str]] = defaultdict(list) @@ -66,8 +69,9 @@ def build(cls, opt_string: str) -> 'Options': if '=' in opt: opt, value = opt.split('=') - if opt == cls.SAMPLES_OPT: - opts[cls.SAMPLES_OPT].append(value) + # Save known, expected keys. + if opt in (cls.RETRY_OPT, cls.SAMPLES_OPT): + opts[opt].append(value) # Throw away other options not meant for us. if not opt.startswith(cls.PYTHON_GAPIC_PREFIX): @@ -93,16 +97,17 @@ def build(cls, opt_string: str) -> 'Options': answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), - templates=tuple(os.path.expanduser(i) for i in templates), + retry=json.loads(str(opts.pop(cls.RETRY_OPT, 'null'))), sample_configs=tuple( cfg_path for s in sample_paths for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) ), + templates=tuple(os.path.expanduser(i) for i in templates), ) - # Note: if we ever need to recursively check directories for sample configs, - # check that at least _one_ config is read in. + # Note: if we ever need to recursively check directories for sample + # configs, check that at least _one_ config is read in. # If there are any options remaining, then we failed to recognize # them -- complain. diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 0aae0638f393..3eba0fcbb4ab 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -23,9 +23,12 @@ from itertools import chain from typing import Callable, Dict, FrozenSet, Mapping, Sequence, Set, Tuple +from google.api_core import exceptions # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import descriptor_pb2 +import grpc # type: ignore + from gapic.generator import options from gapic.schema import metadata from gapic.schema import wrappers @@ -208,6 +211,7 @@ def build(cls, file_descriptor=fd, file_to_generate=fd.package.startswith(package), naming=naming, + opts=opts, prior_protos=protos, ).proto @@ -294,6 +298,7 @@ class _ProtoBuilder: def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, file_to_generate: bool, naming: api_naming.Naming, + opts: options.Options = options.Options(), prior_protos: Mapping[str, Proto] = None): self.proto_messages: Dict[str, wrappers.MessageType] = {} self.proto_enums: Dict[str, wrappers.EnumType] = {} @@ -301,6 +306,7 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, self.file_descriptor = file_descriptor self.file_to_generate = file_to_generate self.prior_protos = prior_protos or {} + self.opts = opts # Iterate over the documentation and place it into a dictionary. # @@ -494,15 +500,15 @@ def _get_fields(self, def _get_methods(self, methods: Sequence[descriptor_pb2.MethodDescriptorProto], - address: metadata.Address, path: Tuple[int, ...], + service_address: metadata.Address, path: Tuple[int, ...], ) -> Mapping[str, wrappers.Method]: """Return a dictionary of wrapped methods for the given service. Args: methods (Sequence[~.descriptor_pb2.MethodDescriptorProto]): A sequence of protobuf method objects. - address (~.metadata.Address): An address object denoting the - location of these methods. + service_address (~.metadata.Address): An address object for the + service, denoting the location of these methods. path (Tuple[int]): The source location path thus far, as understood by ``SourceCodeInfo.Location``. @@ -526,24 +532,78 @@ def _get_methods(self, 'metadata type.', ) lro = wrappers.OperationInfo( - response_type=self.api_messages[address.resolve( + response_type=self.api_messages[service_address.resolve( op.response_type, )], - metadata_type=self.api_messages[address.resolve( + metadata_type=self.api_messages[service_address.resolve( op.metadata_type, )], ) + # If we got a gRPC service config, get the appropriate retry + # and timeout information from it. + retry = None + timeout = None + + # This object should be a dictionary that conforms to the + # gRPC service config proto: + # Repo: https://github.com/grpc/grpc-proto/ + # Filename: grpc/service_config/service_config.proto + # + # We only care about a small piece, so we are just leaving + # it as a dictionary and parsing accordingly. + if self.opts.retry: + # The gRPC service config uses a repeated `name` field + # with a particular format, which we match against. + # This defines the expected selector for *this* method. + selector = { + 'service': '{package}.{service_name}'.format( + package='.'.join(service_address.package), + service_name=service_address.name, + ), + 'method': meth_pb.name, + } + + # Find the method config that applies to us, if any. + mc = next((i for i in self.opts.retry.get('methodConfig', []) + if selector in i.get('name')), None) + if mc: + # Set the timeout according to this method config. + if mc.get('timeout'): + timeout = self._to_float(mc['timeout']) + + # Set the retry according to this method config. + if 'retryPolicy' in mc: + r = mc['retryPolicy'] + retry = wrappers.RetryInfo( + max_attempts=r.get('maxAttempts', 0), + initial_backoff=self._to_float( + r.get('initialBackoff', '0s'), + ), + max_backoff=self._to_float( + r.get('maxBackoff', '0s'), + ), + backoff_multiplier=r.get('backoffMultiplier', 0.0), + retryable_exceptions=frozenset( + exceptions.exception_class_for_grpc_status( + getattr(grpc.StatusCode, code), + ) + for code in r.get('retryableStatusCodes', []) + ), + ) + # Create the method wrapper object. answer[meth_pb.name] = wrappers.Method( input=self.api_messages[meth_pb.input_type.lstrip('.')], lro=lro, method_pb=meth_pb, meta=metadata.Metadata( - address=address.child(meth_pb.name, path + (i,)), + address=service_address.child(meth_pb.name, path + (i,)), documentation=self.docs.get(path + (i,), self.EMPTY), ), output=self.api_messages[meth_pb.output_type.lstrip('.')], + retry=retry, + timeout=timeout, ) # Done; return the answer. @@ -645,7 +705,7 @@ def _load_service(self, # Put together a dictionary of the service's methods. methods = self._get_methods( service.method, - address=address, + service_address=address, path=path + (2,), ) @@ -659,3 +719,7 @@ def _load_service(self, service_pb=service, ) return self.proto_services[address.proto] + + def _to_float(self, s: str) -> float: + """Convert a protobuf duration string (e.g. `"30s"`) to float.""" + return int(s[:-1]) / 1e9 if s.endswith('n') else float(s[:-1]) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 5895150f3d9e..79dca4609507 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -37,6 +37,7 @@ from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 +from google.api_core import exceptions # type: ignore from google.protobuf import descriptor_pb2 from gapic import utils @@ -422,6 +423,16 @@ class OperationInfo: metadata_type: MessageType +@dataclasses.dataclass(frozen=True) +class RetryInfo: + """Representation of the method's retry behavior.""" + max_attempts: int + initial_backoff: float + max_backoff: float + backoff_multiplier: float + retryable_exceptions: FrozenSet[exceptions.GoogleAPICallError] + + @dataclasses.dataclass(frozen=True) class Method: """Description of a method (defined with the ``rpc`` keyword).""" @@ -429,6 +440,8 @@ class Method: input: MessageType output: MessageType lro: Optional[OperationInfo] = dataclasses.field(default=None) + retry: Optional[RetryInfo] = dataclasses.field(default=None) + timeout: Optional[float] = None meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 97ec9376a179..d21758be58c2 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -143,16 +143,21 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # and friendly error handling. rpc = gapic_v1.method.wrap_method( self._transport.{{ method.name|snake_case }}, - default_retry=retries.Retry(predicate=retries.if_exception_type( - {%- if method.idempotent %} - exceptions.Aborted, - {%- endif %} - exceptions.ServiceUnavailable, - {%- if method.idempotent %} - exceptions.Unknown, - {%- endif %} - )), - default_timeout=None, + {%- if method.retry %} + default_retry=retries.Retry( + {%- if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} + {%- if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} + {%- if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} + predicate=retries.if_exception_type( + {%- filter sort_lines %} + {%- for ex in method.retry.retryable_exceptions %} + exceptions.{{ ex.__name__ }}, + {%- endfor %} + {%- endfilter %} + ), + ), + {%- endif %} + default_timeout={{ method.timeout }}, client_info=_client_info, ) {%- if method.field_headers %} diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f24e6f51cd5f..35c39e826d91 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -42,7 +42,9 @@ include_package_data=True, install_requires=( 'click >= 6.7', + 'google-api-core >= 1.14.3', 'googleapis-common-protos >= 1.6.0', + 'grpcio >= 1.24.3', 'jinja2 >= 2.10', 'protobuf >= 3.7.1', 'pypandoc >= 1.4', diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 5acb3e3df42a..bf0284294925 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -12,53 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time -from unittest import mock - import pytest -from google import showcase_v1beta1 from google.api_core import exceptions from google.rpc import code_pb2 -def test_retry_nonidempotent(echo): - # Define our error and OK responses. - err = exceptions.ServiceUnavailable(message='whups') - ok = showcase_v1beta1.EchoResponse(content='foo') - server = mock.Mock(side_effect=(err, err, ok)) - - # Mock the transport to send back the error responses followed by a - # success response. - transport = type(echo).get_transport_class() - with mock.patch.object(transport, 'echo', - new_callable=mock.PropertyMock(return_value=server)): - with mock.patch.object(time, 'sleep'): - response = echo.echo({'content': 'bar'}) - assert response.content == 'foo' - assert server.call_count == 3 - - -def test_retry_idempotent(identity): - # Define our error and OK responses. - err409 = exceptions.Aborted(message='derp de derp') - err503 = exceptions.ServiceUnavailable(message='whups') - errwtf = exceptions.Unknown(message='huh?') - ok = showcase_v1beta1.User(name='users/0', display_name='Guido') - server = mock.Mock(side_effect=(err409, err503, errwtf, ok)) - - # Mock the transport to send back the error responses followed by a - # success response. - transport = type(identity).get_transport_class() - with mock.patch.object(transport, 'get_user', - new_callable=mock.PropertyMock(return_value=server)): - with mock.patch.object(time, 'sleep'): - response = identity.get_user({'name': 'users/0'}) - assert response.name == 'users/0' - assert response.display_name == 'Guido' - assert server.call_count == 4 - - def test_retry_bubble(echo): with pytest.raises(exceptions.DeadlineExceeded): echo.echo({ diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 6cd9dbe9791e..1aaa02fd588f 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -17,9 +17,11 @@ import pytest +from google.api_core import exceptions from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 +from gapic.generator import options from gapic.schema import api from gapic.schema import imp from gapic.schema import naming @@ -344,6 +346,25 @@ def test_messages_nested(): def test_services(): L = descriptor_pb2.SourceCodeInfo.Location + # Make a silly helper method to not repeat some of the structure. + def _n(method_name: str): + return { + 'service': 'google.example.v2.FooService', + 'method': method_name, + } + + # Set up retry information. + opts = options.Options(retry={'methodConfig': [ + {'name': [_n('TimeoutableGetFoo')], 'timeout': '30s'}, + {'name': [_n('RetryableGetFoo')], 'retryPolicy': { + 'maxAttempts': 3, + 'initialBackoff': '%dn' % 1e6, + 'maxBackoff': '60s', + 'backoffMultiplier': 1.5, + 'retryableStatusCodes': ['UNAVAILABLE', 'ABORTED'], + }}, + ]}) + # Set up messages for our RPC. request_message_pb = make_message_pb2(name='GetFooRequest', fields=(make_field_pb2(name='name', type=9, number=1),) @@ -353,11 +374,23 @@ def test_services(): # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( name='FooService', - method=(descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v2.GetFooRequest', - output_type='google.example.v2.GetFooResponse', - ),), + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v2.GetFooRequest', + output_type='google.example.v2.GetFooResponse', + ), + descriptor_pb2.MethodDescriptorProto( + name='TimeoutableGetFoo', + input_type='google.example.v2.GetFooRequest', + output_type='google.example.v2.GetFooResponse', + ), + descriptor_pb2.MethodDescriptorProto( + name='RetryableGetFoo', + input_type='google.example.v2.GetFooRequest', + output_type='google.example.v2.GetFooResponse', + ), + ), ) # Fake-document our fake stuff. @@ -370,6 +403,7 @@ def test_services(): # Finally, set up the file that encompasses these. fdp = make_file_pb2( + name='test.proto', package='google.example.v2', messages=(request_message_pb, response_message_pb), services=(service_pb,), @@ -377,14 +411,18 @@ def test_services(): ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) + proto = api.API.build( + [fdp], + 'google.example.v2', + opts=opts, + ).protos['test.proto'] # Establish that our data looks correct. assert len(proto.services) == 1 assert len(proto.messages) == 2 service = proto.services['google.example.v2.FooService'] assert service.meta.doc == 'This is the FooService service.' - assert len(service.methods) == 1 + assert len(service.methods) == 3 method = service.methods['GetFoo'] assert method.meta.doc == 'This is the GetFoo method.' assert isinstance(method.input, wrappers.MessageType) @@ -393,6 +431,26 @@ def test_services(): assert method.input.meta.doc == 'This is the GetFooRequest message.' assert method.output.name == 'GetFooResponse' assert method.output.meta.doc == 'This is the GetFooResponse message.' + assert not method.timeout + assert not method.retry + + # Establish that the retry information on a timeout-able method also + # looks correct. + timeout_method = service.methods['TimeoutableGetFoo'] + assert timeout_method.timeout == pytest.approx(30.0) + assert not timeout_method.retry + + # Establish that the retry information on the retryable method also + # looks correct. + retry_method = service.methods['RetryableGetFoo'] + assert retry_method.timeout is None + assert retry_method.retry.max_attempts == 3 + assert retry_method.retry.initial_backoff == pytest.approx(0.001) + assert retry_method.retry.backoff_multiplier == pytest.approx(1.5) + assert retry_method.retry.max_backoff == pytest.approx(60.0) + assert retry_method.retry.retryable_exceptions == { + exceptions.ServiceUnavailable, exceptions.Aborted, + } def test_prior_protos(): From d324ff2739ca7acbf9ec6541f9b88c7d4c869776 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 8 Nov 2019 10:58:47 -0800 Subject: [PATCH 0172/1339] Raw page (#238) Implementation for #237 --- .../$sub/services/$service/pagers.py.j2 | 20 ++++---- .../$name_$version/$sub/types/_message.py.j2 | 6 +++ .../$name_$version/$sub/test_$service.py.j2 | 49 ++++++++++++++++++- .../tests/system/test_pagination.py | 22 ++++++++- 4 files changed, 83 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 index 4efcdc0b465e..0e7ef018a775 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 @@ -56,19 +56,17 @@ class {{ method.name }}Pager: def __getattr__(self, name: str) -> Any: return getattr(self._response, name) - def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: - while True: - # Iterate through the results on this response. - for result in self._response.{{ method.paged_result_field.name }}: - yield result - - # Sanity check: Is this the last page? If so, we are done. - if not self._response.next_page_token: - break - - # Get the next page. + @property + def pages(self) -> Iterable[{{ method.output.ident }}]: + yield self._response + while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request) + yield self._response + + def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: + for page in self.pages: + yield from page.{{ method.paged_result_field.name }} def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 index c9e464e117e0..3afab165ad87 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 @@ -24,6 +24,12 @@ class {{ message.name }}({{ p }}.Message): {% endif %} {% endfor -%} + {% if "next_page_token" in message.fields.values()|map(attribute='name') %} + @property + def raw_page(self): + return self + {% endif %} + {# Iterate over fields. -#} {% for field in message.fields.values() -%} {% if field.map -%} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 index ce9203007fad..8804d86ea414 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 @@ -211,8 +211,53 @@ def test_{{ method.name|snake_case }}_pager(): )] assert len(results) == 6 assert all([isinstance(i, {{ method.paged_result_field.message.ident }}) - for i in results]) -{% endif %} {#- method.paged_response_field #} + for i in results]) + +def test_{{ method.name|snake_case }}_pages(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + ), + RuntimeError, + ) + pages = list(client.{{ method.name|snake_case }}(request={}).pages) + for page, token in zip(pages, ['abc','def','ghi', '']): + assert page.raw_page.next_page_token == token +{% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} +def test_{{ method.name|snake_case }}_raw_page_lro(): + response = {{ method.lro.response_type.ident }}() + assert response.raw_page is response +{% endif %} {#- method.paged_result_field #} {% endfor -%} {#- method in methods #} diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py index a0f316a6dd17..781614cad466 100644 --- a/packages/gapic-generator/tests/system/test_pagination.py +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -16,7 +16,7 @@ def test_pagination(echo): - text = 'The rain in Wales falls mainly on the snails.' + text = 'The hail in Wales falls mainly on the snails.' results = [i for i in echo.paged_expand({ 'content': text, 'page_size': 3, @@ -24,3 +24,23 @@ def test_pagination(echo): assert len(results) == 9 assert results == [showcase.EchoResponse(content=i) for i in text.split(' ')] + + +def test_pagination_pages(echo): + text = "The hail in Wales falls mainly on the snails." + page_results = list(echo.paged_expand({ + 'content': text, + 'page_size': 3, + }).pages) + + assert len(page_results) == 3 + assert not page_results[-1].next_page_token + + # The monolithic surface uses a wrapper type that needs an explicit property + # for a 'raw_page': we need to duplicate that interface, even though the + # architecture is different. + assert page_results[0].raw_page is page_results[0] + + results = [r for p in page_results for r in p.responses] + assert results == [showcase.EchoResponse(content=i) + for i in text.split(' ')] From cf9863bfe2bc65314670b1fd49d30dc265c133d9 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 11 Nov 2019 10:09:41 -0800 Subject: [PATCH 0173/1339] Test and impl for only generating standalone samples if specified (#240) Some languages generate in-code samples, and some sample configs should only be generated in-code. The Python microgenerator currently does not support in-code samples, so it must filter out sample configs that are in-code only. --- .../gapic/generator/generator.py | 5 + .../tests/unit/generator/test_generator.py | 116 ++++++++++++++++++ 2 files changed, 121 insertions(+) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 98dcb7507af5..26749ce5c4b7 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -124,6 +124,7 @@ def _generate_samples_and_manifest( id_to_hash_to_spec: DefaultDict[str, Dict[str, Any]] = defaultdict( dict) + STANDALONE_TYPE = "standalone" for config_fpath in self._sample_configs: with open(config_fpath) as f: configs = yaml.safe_load_all(f.read()) @@ -132,6 +133,10 @@ def _generate_samples_and_manifest( spec for cfg in configs if is_valid_sample_cfg(cfg) for spec in cfg.get("samples", []) + # If unspecified, assume a sample config describes a standalone. + # If sample_types are specified, standalone samples must be + # explicitly enabled. + if STANDALONE_TYPE in spec.get("sample_type", [STANDALONE_TYPE]) ) for spec in spec_generator: diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 32d48315edcf..dfa566e41ab0 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -499,6 +499,122 @@ def test_generator_duplicate_samples(fs): generator.get_response(api_schema=api_schema) +@mock.patch( + 'gapic.samplegen.samplegen.generate_sample', + return_value='' +) +@mock.patch( + 'time.gmtime', +) +def test_dont_generate_in_code_samples( + mock_gmtime, + mock_generate_sample, + fs +): + # These time values are nothing special, + # they just need to be deterministic. + returner = mock.MagicMock() + returner.tm_year = 2112 + returner.tm_mon = 6 + returner.tm_mday = 1 + returner.tm_hour = 13 + returner.tm_min = 13 + returner.tm_sec = 13 + mock_gmtime.return_value = returner + + config_fpath = "samples.yaml" + fs.create_file( + config_fpath, + contents=dedent( + ''' + type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto + schema_version: 1.2.0 + samples: + - id: squid_sample + rpc: IdentifyMollusc + service: Mollusc.v1.Mollusc + sample_type: + - standalone + - incode/SQUID + - id: clam_sample + rpc: IdentifyMollusc + service: Mollusc.v1.Mollusc + sample_type: + - incode/CLAM + - id: whelk_sample + rpc: IdentifyMollusc + service: Mollusc.v1.Mollusc + sample_type: + - standalone + - id: octopus_sample + rpc: IdentifyMollusc + service: Mollusc.v1.Mollusc + ''' + ) + ) + + generator = make_generator(f'samples={config_fpath}') + generator._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) + api_schema = make_api( + make_proto( + descriptor_pb2.FileDescriptorProto( + name='mollusc.proto', + package='Mollusc.v1', + service=[descriptor_pb2.ServiceDescriptorProto( + name='Mollusc')], + ), + ), + naming=naming.Naming(name='Mollusc', version='v6'), + ) + + # Note that we do NOT expect a clam sample. + # There are four tests going on: + # 1) Just an explicit standalone sample type. + # 2) Multiple sample types, one of which is standalone. + # 3) Explicit sample types but NO standalone sample type. + # 4) Implicit standalone sample type. + expected = CodeGeneratorResponse( + file=[ + CodeGeneratorResponse.File( + name="samples/squid_sample.py", + content="\n", + ), + CodeGeneratorResponse.File( + name="samples/whelk_sample.py", + content="\n", + ), + CodeGeneratorResponse.File( + name="samples/octopus_sample.py", + content="\n", + ), + CodeGeneratorResponse.File( + name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", + content=dedent(""" --- + type: manifest/samples + schema_version: 3 + python: &python + environment: python + bin: python3 + base_path: samples + invocation: \'{bin} {path} @args\' + samples: + - <<: *python + sample: squid_sample + path: \'{base_path}/squid_sample.py\' + - <<: *python + sample: whelk_sample + path: \'{base_path}/whelk_sample.py\' + - <<: *python + sample: octopus_sample + path: \'{base_path}/octopus_sample.py\' + """) + ) + ] + ) + actual = generator.get_response(api_schema=api_schema) + assert actual == expected + + def make_generator(opts_str: str = '') -> generator.Generator: return generator.Generator(options.Options.build(opts_str)) From 5fc0635f995487c46a9c0edca71d7108ebe4994a Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 14 Nov 2019 12:46:05 -0800 Subject: [PATCH 0174/1339] Fixes for reading service config (#239) Test and impl for correctly reading and parsing a service config. Note: there is currently no end to end test for guaranteeing that the values from the config wind up modifying the relevant methods. This test is WIP. --- .../gapic/generator/options.py | 9 ++- .../$sub/services/$service/client.py.j2 | 6 +- .../tests/unit/generator/test_options.py | 61 +++++++++++++++++++ 3 files changed, 72 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index 2ed3d827e1bc..c95ac62a5e8e 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -92,12 +92,19 @@ def build(cls, opt_string: str) -> 'Options': os.path.join(os.path.dirname(__file__), '..', 'templates'), ) + retry_cfg = None + retry_paths = opts.pop(cls.RETRY_OPT, None) + if retry_paths: + # Just use the last config specified. + with open(retry_paths[-1]) as f: + retry_cfg = json.load(f) + # Build the options instance. sample_paths = opts.pop(cls.SAMPLES_OPT, []) answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), - retry=json.loads(str(opts.pop(cls.RETRY_OPT, 'null'))), + retry=retry_cfg, sample_configs=tuple( cfg_path for s in sample_paths diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index d21758be58c2..82f7c81350a9 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -145,9 +145,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self._transport.{{ method.name|snake_case }}, {%- if method.retry %} default_retry=retries.Retry( - {%- if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} - {%- if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} - {%- if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} + {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} + {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} + {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( {%- filter sort_lines %} {%- for ex in method.retry.retryable_exceptions %} diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 6cb1de1f67e1..0320e1013b80 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -54,3 +54,64 @@ def test_options_no_valid_sample_config(fs): fs.create_file("sampledir/not_a_config.yaml") with pytest.raises(types.InvalidConfig): options.Options.build("samples=sampledir/") + + +def test_options_service_config(fs): + opts = options.Options.build("") + assert opts.retry is None + + # Default of None is okay, verify build can read a config. + service_config_fpath = "service_config.json" + fs.create_file(service_config_fpath, contents="""{ + "methodConfig": [ + { + "name": [ + { + "service": "animalia.mollusca.v1beta1.Cephalopod", + "method": "IdentifySquid" + } + ], + "retryPolicy": { + "maxAttempts": 5, + "maxBackoff": "3s", + "initialBackoff": "0.2s", + "backoffMultiplier": 2, + "retryableStatusCodes": [ + "UNAVAILABLE", + "UNKNOWN" + ] + }, + "timeout": "5s" + } + ] + }""") + + opt_string = f"retry-config={service_config_fpath}" + opts = options.Options.build(opt_string) + + # Verify the config was read in correctly. + expected_cfg = { + "methodConfig": [ + { + "name": [ + { + "service": "animalia.mollusca.v1beta1.Cephalopod", + "method": "IdentifySquid", + } + ], + "retryPolicy": { + "maxAttempts": 5, + "maxBackoff": "3s", + "initialBackoff": "0.2s", + "backoffMultiplier": 2, + "retryableStatusCodes": + [ + "UNAVAILABLE", + "UNKNOWN" + ] + }, + "timeout":"5s" + } + ] + } + assert opts.retry == expected_cfg From 4cd8f5bf9f74eaf33228e0aa111fb6a393ee8bac Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 14 Nov 2019 13:36:27 -0800 Subject: [PATCH 0175/1339] Adopt legacy method flattening semantics (#245) Tests and implementation for #241 The python microgenerator transition consensus is to minimize breaking changes for the duration of the transition and revisit any breaking UI/UX improvements after the fact. As a result, the legacy flattening interface must be restored. Inspection of the monolith parameter transformer and template reveal that the legacy interface only flattens the fields of the request object one level deep; required fields are used first as positional parameters, followed by optional fields used as optional parameters. Includes changes to the client template, client unit test template, and new features and associated tests in the generator logic. The relevant source lines in the generator are https://github.com/googleapis/gapic-generator/blob/bcedba65bf930d3e35530fe5360f1c6f24d27abc/src/main/resources/com/google/api/codegen/py/main.snip#L321 and https://github.com/googleapis/gapic-generator/blob/bcedba65bf930d3e35530fe5360f1c6f24d27abc/src/main/java/com/google/api/codegen/transformer/py/PythonApiMethodParamTransformer.java#L43 . --- .../gapic-generator/gapic/schema/wrappers.py | 58 +++++-- .../$sub/services/$service/client.py.j2 | 27 ++-- .../$name_$version/$sub/test_$service.py.j2 | 150 +++++++++--------- .../tests/system/test_grpc_lro.py | 12 +- .../tests/system/test_grpc_streams.py | 6 +- .../tests/system/test_grpc_unary.py | 33 ++-- .../tests/system/test_pagination.py | 16 +- .../tests/system/test_resource_crud.py | 36 ++--- .../tests/system/test_retry.py | 6 +- .../tests/unit/schema/wrappers/test_method.py | 66 +++++++- .../unit/schema/wrappers/test_service.py | 35 ++-- 11 files changed, 262 insertions(+), 183 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 79dca4609507..c0addec593eb 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -32,7 +32,7 @@ import re from itertools import chain from typing import (cast, Dict, FrozenSet, List, Mapping, Optional, - Sequence, Set, Union) + Sequence, Set, Union) from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 @@ -185,7 +185,8 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Field': ``Field`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace(self, + return dataclasses.replace( + self, message=self.message.with_context( collisions=collisions, skip_fields=True, @@ -230,7 +231,7 @@ def ident(self) -> metadata.Address: return self.meta.address def get_field(self, *field_path: str, - collisions: FrozenSet[str] = frozenset()) -> Field: + collisions: FrozenSet[str] = frozenset()) -> Field: """Return a field arbitrarily deep in this message's structure. This method recursively traverses the message tree to return the @@ -288,8 +289,8 @@ def get_field(self, *field_path: str, return cursor.message.get_field(*field_path[1:], collisions=collisions) def with_context(self, *, - collisions: FrozenSet[str], - skip_fields: bool = False, + collisions: FrozenSet[str], + skip_fields: bool = False, ) -> 'MessageType': """Return a derivative of this message with the provided context. @@ -301,7 +302,8 @@ def with_context(self, *, underlying fields. This provides for an "exit" in the case of circular references. """ - return dataclasses.replace(self, + return dataclasses.replace( + self, fields=collections.OrderedDict([ (k, v.with_context(collisions=collisions)) for k, v in self.fields.items() @@ -354,9 +356,10 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'EnumType': ``EnumType`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace(self, + return dataclasses.replace( + self, meta=self.meta.with_context(collisions=collisions), - ) + ) @dataclasses.dataclass(frozen=True) @@ -540,6 +543,15 @@ def flattened_fields(self) -> Mapping[str, Field]: # Done; return the flattened fields return answer + @utils.cached_property + def legacy_flattened_fields(self) -> Mapping[str, Field]: + """Return the legacy flattening interface: top level fields only, + required fields first""" + required, optional = utils.partition(lambda f: f.required, + self.input.fields.values()) + return collections.OrderedDict((f.name, f) + for f in chain(required, optional)) + @property def grpc_stub_type(self) -> str: """Return the type of gRPC stub to use.""" @@ -583,8 +595,7 @@ def paged_result_field(self) -> Optional[Field]: # We found no repeated fields. Return None. return None - @utils.cached_property - def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: + def _ref_types(self, use_legacy: bool = False) -> Sequence[Union[MessageType, EnumType]]: """Return types referenced by this method.""" # Begin with the input (request) and output (response) messages. answer = [self.input] @@ -596,7 +607,8 @@ def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: # # This entails adding the module for any field on the signature # unless the field is a primitive. - for field in self.flattened_fields.values(): + flattening = self.legacy_flattened_fields if use_legacy else self.flattened_fields + for field in flattening.values(): if field.message or field.enum: answer.append(field.type) @@ -609,6 +621,14 @@ def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: # Done; return the answer. return tuple(answer) + @utils.cached_property + def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: + return self._ref_types() + + @utils.cached_property + def ref_types_legacy(self) -> Sequence[Union[MessageType, EnumType]]: + return self._ref_types(use_legacy=True) + @property def void(self) -> bool: """Return True if this method has no return value, False otherwise.""" @@ -621,11 +641,12 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': ``Method`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace(self, + return dataclasses.replace( + self, input=self.input.with_context(collisions=collisions), output=self.output.with_context(collisions=collisions), meta=self.meta.with_context(collisions=collisions), - ) + ) @dataclasses.dataclass(frozen=True) @@ -669,9 +690,11 @@ def oauth_scopes(self) -> Sequence[str]: Sequence[str]: A sequence of OAuth scopes. """ # Return the OAuth scopes, split on comma. - return tuple([i.strip() for i in - self.options.Extensions[client_pb2.oauth_scopes].split(',') - if i]) + return tuple( + i.strip() + for i in self.options.Extensions[client_pb2.oauth_scopes].split(',') + if i + ) @property def module_name(self) -> str: @@ -715,7 +738,8 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': ``Service`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace(self, + return dataclasses.replace( + self, methods=collections.OrderedDict([ (k, v.with_context(collisions=collisions)) for k, v in self.methods.items() diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 82f7c81350a9..47f3edcf3e73 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -12,7 +12,7 @@ from google.auth import credentials # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} -{% for ref_type in method.ref_types -%} +{% for ref_type in method.ref_types_legacy -%} {{ ref_type.ident.python_import }} {% endfor -%} {% endfor -%} @@ -87,10 +87,14 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, - request: {{ method.input.ident }} = None, *, - {% for field in method.flattened_fields.values() -%} + {% for field in method.legacy_flattened_fields.values() -%} + {% if field.required -%} + {{ field.name }}: {{ field.ident }}, + {% else -%} {{ field.name }}: {{ field.ident }} = None, + {% endif -%} {% endfor -%} + *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -120,23 +124,14 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ method.client_output.meta.doc|rst(width=72, indent=16) }} {%- endif %} """ - {% if method.flattened_fields -%} - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - {% endif -%} # Create or coerce a protobuf request object. - {% if method.flattened_fields -%} + {% if method.legacy_flattened_fields -%} # If we have keyword arguments corresponding to fields on the # request, apply these. {% endif -%} - request = {{ method.input.ident }}(request) - {%- for key, field in method.flattened_fields.items() %} - if {{ field.name }} is not None: - request.{{ key }} = {{ field.name }} + request = {{ method.input.ident }}() + {%- for field in method.legacy_flattened_fields.values() %} + request.{{ field.name }} = {{ field.name }} {%- endfor %} # Wrap the RPC method; this adds retry and timeout information, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 index 8804d86ea414..e3a5dd20244f 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 @@ -33,10 +33,6 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = {{ method.input.ident }}() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client._transport.{{ method.name|snake_case }}), @@ -55,12 +51,18 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {%- endfor %} ) {% endif -%} - response = client.{{ method.name|snake_case }}(request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + response = client.{{ method.name|snake_case }}( + {% for field in method.legacy_flattened_fields.values() -%} + {{ field.name }}=None, + {% endfor -%} + ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == {{ method.input.ident }}() # Establish that the response is the type that we expect. {% if method.void -%} @@ -83,24 +85,27 @@ def test_{{ method.name|snake_case }}_field_headers(): credentials=credentials.AnonymousCredentials(), ) - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}( - {%- for field_header in method.field_headers %} - {{ field_header }}='{{ field_header }}/value', - {%- endfor %} - ) - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client._transport.{{ method.name|snake_case }}), '__call__') as call: call.return_value = {{ method.output.ident }}() - response = client.{{ method.name|snake_case }}(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + response = client.{{ method.name|snake_case }}( + {%- for field_header in method.field_headers %} + {{ field_header }}='{{ field_header }}/value', + {%- endfor %} + ) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + request = {{ method.input.ident }}( + {%- for field_header in method.field_headers %} + {{ field_header }}='{{ field_header }}/value', + {%- endfor %} + ) assert args[0] == request # Establish that the field header was sent. @@ -114,58 +119,61 @@ def test_{{ method.name|snake_case }}_field_headers(): ) in kw['metadata'] {% endif %} {#- method.field_headers #} -{% if method.flattened_fields %} -def test_{{ method.name|snake_case }}_flattened(): - client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void -%} - call.return_value = None - {% elif method.lro -%} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} - call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} - call.return_value = {{ method.output.ident }}() - {% endif %} - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.{{ method.name|snake_case }}( - {%- for key, field in method.flattened_fields.items() %} - {{ field.name }}={{ field.mock_value }}, - {%- endfor %} - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() -%} - assert args[0].{{ key }} == {{ field.mock_value }} - {% endfor %} - - -def test_{{ method.name|snake_case }}_flattened_error(): - client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.{{ method.name|snake_case }}( - {{ method.input.ident }}(), - {%- for key, field in method.flattened_fields.items() %} - {{ field.name }}={{ field.mock_value }}, - {%- endfor %} - ) -{% endif %} {#- method.flattened_fields #} +{# NOTE: the backwards compatibility requirements of legacy flattening make these tests no longer relevant #} +{# If the legacy flattening is ever abandoned, these tests or something like them will be needed again. #} +{# #} +{# {% if method.flattened_fields %} #} +{# def test_{{ method.name|snake_case }}_flattened(): #} +{# client = {{ service.client_name }}( #} +{# credentials=credentials.AnonymousCredentials(), #} +{# ) #} + +{# # Mock the actual call within the gRPC stub, and fake the request. #} +{# with mock.patch.object( #} +{# type(client._transport.{{ method.name|snake_case }}), #} +{# '__call__') as call: #} +{# # Designate an appropriate return value for the call. #} +{# {% if method.void -%} #} +{# call.return_value = None #} +{# {% elif method.lro -%} #} +{# call.return_value = operations_pb2.Operation(name='operations/op') #} +{# {% elif method.server_streaming -%} #} +{# call.return_value = iter([{{ method.output.ident }}()]) #} +{# {% else -%} #} +{# call.return_value = {{ method.output.ident }}() #} +{# {% endif %} #} +{# # Call the method with a truthy value for each flattened field, #} +{# # using the keyword arguments to the method. #} +{# response = client.{{ method.name|snake_case }}( #} +{# {%- for key, field in method.flattened_fields.items() %} #} +{# {{ field.name }}={{ field.mock_value }}, #} +{# {%- endfor %} #} +{# ) #} + +{# # Establish that the underlying call was made with the expected #} +{# # request object values. #} +{# assert len(call.mock_calls) == 1 #} +{# _, args, _ = call.mock_calls[0] #} +{# {% for key, field in method.flattened_fields.items() -%} #} +{# assert args[0].{{ key }} == {{ field.mock_value }} #} +{# {% endfor %} #} + + +{# def test_{{ method.name|snake_case }}_flattened_error(): #} +{# client = {{ service.client_name }}( #} +{# credentials=credentials.AnonymousCredentials(), #} +{# ) #} + +{# # Attempting to call a method with both a request object and flattened #} +{# # fields is an error. #} +{# with pytest.raises(ValueError): #} +{# client.{{ method.name|snake_case }}( #} +{# {{ method.input.ident }}(), #} +{# {%- for key, field in method.flattened_fields.items() %} #} +{# {{ field.name }}={{ field.mock_value }}, #} +{# {%- endfor %} #} +{# ) #} +{# {% endif %} {\#- method.flattened_fields #\} #} {% if method.paged_result_field %} @@ -206,9 +214,7 @@ def test_{{ method.name|snake_case }}_pager(): ), RuntimeError, ) - results = [i for i in client.{{ method.name|snake_case }}( - request={}, - )] + results = [i for i in client.{{ method.name|snake_case }}({% for field in method.legacy_flattened_fields.values() if field.required -%}None,{% endfor -%})] assert len(results) == 6 assert all([isinstance(i, {{ method.paged_result_field.message.ident }}) for i in results]) @@ -250,7 +256,7 @@ def test_{{ method.name|snake_case }}_pages(): ), RuntimeError, ) - pages = list(client.{{ method.name|snake_case }}(request={}).pages) + pages = list(client.{{ method.name|snake_case }}({% for field in method.legacy_flattened_fields.values() if field.required -%}None,{% endfor -%}).pages) for page, token in zip(pages, ['abc','def','ghi', '']): assert page.raw_page.next_page_token == token {% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} @@ -308,7 +314,7 @@ def test_{{ service.name|snake_case }}_base_transport(): ) for method in methods: with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + getattr(transport, method)() {% if service.has_lro -%} # Additionally, the LRO client (a property) should diff --git a/packages/gapic-generator/tests/system/test_grpc_lro.py b/packages/gapic-generator/tests/system/test_grpc_lro.py index c0e07f9a5d7f..392dbd1b98b7 100644 --- a/packages/gapic-generator/tests/system/test_grpc_lro.py +++ b/packages/gapic-generator/tests/system/test_grpc_lro.py @@ -18,12 +18,12 @@ def test_lro(echo): - wait_request = { - 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), - 'success': {'content': 'The hail in Wales falls mainly ' - 'on the snails...eventually.'}, - } - future = echo.wait(wait_request) + future = echo.wait( + end_time=datetime.now(tz=timezone.utc) + timedelta(seconds=1), + success={ + 'content': 'The hail in Wales falls mainly on the snails...eventually.' + } + ) response = future.result() assert isinstance(response, showcase_v1beta1.WaitResponse) assert response.content.endswith('the snails...eventually.') diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py index 07b2e4302625..f22ba0f055f8 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -15,9 +15,9 @@ def test_unary_stream(echo): content = 'The hail in Wales falls mainly on the snails.' - responses = echo.expand({ - 'content': content, - }) + responses = echo.expand( + content=content, + ) # Consume the response and ensure it matches what we expect. # with pytest.raises(exceptions.NotFound) as exc: diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_grpc_unary.py index c42765aa66f2..8ac723e93d84 100644 --- a/packages/gapic-generator/tests/system/test_grpc_unary.py +++ b/packages/gapic-generator/tests/system/test_grpc_unary.py @@ -20,27 +20,28 @@ from google import showcase -def test_unary_with_request_object(echo): - response = echo.echo(showcase.EchoRequest( - content='The hail in Wales falls mainly on the snails.', - )) - assert response.content == 'The hail in Wales falls mainly on the snails.' +def test_unary(echo): + content = 'The hail in Wales falls mainly on the snails.' + response = echo.echo( + content=content, + ) + assert response.content == content -def test_unary_with_dict(echo): - response = echo.echo({ - 'content': 'The hail in Wales falls mainly on the snails.', - }) - assert response.content == 'The hail in Wales falls mainly on the snails.' +def test_unary_positional(echo): + content = 'The hail in Wales falls mainly on the snails.' + response = echo.echo(content,) + assert response.content == content def test_unary_error(echo): + message = 'Bad things! Bad things!' with pytest.raises(exceptions.InvalidArgument) as exc: - echo.echo({ - 'error': { + echo.echo( + error={ 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), - 'message': 'Bad things! Bad things!', + 'message': message, }, - }) - assert exc.value.code == 400 - assert exc.value.message == 'Bad things! Bad things!' + ) + assert exc.value.code == 400 + assert exc.value.message == message diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py index 781614cad466..807cbb6f5aa7 100644 --- a/packages/gapic-generator/tests/system/test_pagination.py +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -17,10 +17,10 @@ def test_pagination(echo): text = 'The hail in Wales falls mainly on the snails.' - results = [i for i in echo.paged_expand({ - 'content': text, - 'page_size': 3, - })] + results = [i for i in echo.paged_expand( + content=text, + page_size=3, + )] assert len(results) == 9 assert results == [showcase.EchoResponse(content=i) for i in text.split(' ')] @@ -28,10 +28,10 @@ def test_pagination(echo): def test_pagination_pages(echo): text = "The hail in Wales falls mainly on the snails." - page_results = list(echo.paged_expand({ - 'content': text, - 'page_size': 3, - }).pages) + page_results = list(echo.paged_expand( + content=text, + page_size=3, + ).pages) assert len(page_results) == 3 assert not page_results[-1].next_page_token diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 67a8d50e9273..5f7f7d106017 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -14,34 +14,32 @@ def test_crud_with_request(identity): - count = len(identity.list_users({}).users) - user = identity.create_user({'user': { + count = len(identity.list_users().users) + user = identity.create_user(user={ 'display_name': 'Guido van Rossum', 'email': 'guido@guido.fake', - }}) + }) try: assert user.display_name == 'Guido van Rossum' assert user.email == 'guido@guido.fake' - assert len(identity.list_users({}).users) == count + 1 - assert identity.get_user({ - 'name': user.name, - }).display_name == 'Guido van Rossum' + assert len(identity.list_users().users) == count + 1 + assert identity.get_user( + name=user.name + ).display_name == 'Guido van Rossum' finally: - identity.delete_user({'name': user.name}) + identity.delete_user(name=user.name) -def test_crud_flattened(identity): - count = len(identity.list_users({}).users) - user = identity.create_user( - display_name='Monty Python', - email='monty@python.org', - ) +def test_crud_positional(identity): + count = len(identity.list_users().users) + user = identity.create_user({ + 'display_name': 'Monty Python', + 'email': 'monty@python.org', + }) try: assert user.display_name == 'Monty Python' assert user.email == 'monty@python.org' - assert len(identity.list_users({}).users) == count + 1 - assert identity.get_user({ - 'name': user.name, - }).display_name == 'Monty Python' + assert len(identity.list_users().users) == count + 1 + assert identity.get_user(user.name).display_name == 'Monty Python' finally: - identity.delete_user({'name': user.name}) + identity.delete_user(user.name) diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index bf0284294925..588ae1170513 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -20,9 +20,9 @@ def test_retry_bubble(echo): with pytest.raises(exceptions.DeadlineExceeded): - echo.echo({ - 'error': { + echo.echo( + error={ 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), 'message': 'This took longer than you said it should.', }, - }) + ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 4b39b669cb68..0195a90886aa 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -17,6 +17,7 @@ from google.api import annotations_pb2 from google.api import client_pb2 +from google.api import field_behavior_pb2 from google.api import http_pb2 from google.protobuf import descriptor_pb2 @@ -70,8 +71,8 @@ def test_method_client_output_paged(): make_field(name='next_page_token', type=9), # str )) method = make_method('ListFoos', - input_message=input_msg, - output_message=output_msg, + input_message=input_msg, + output_message=output_msg, ) assert method.paged_result_field == paged assert method.client_output.ident.name == 'ListFoosPager' @@ -89,8 +90,8 @@ def test_method_paged_result_field_not_first(): paged, )) method = make_method('ListFoos', - input_message=input_msg, - output_message=output_msg, + input_message=input_msg, + output_message=output_msg, ) assert method.paged_result_field == paged @@ -106,8 +107,8 @@ def test_method_paged_result_field_no_page_field(): make_field(name='next_page_token', type=9), # str )) method = make_method('ListFoos', - input_message=input_msg, - output_message=output_msg, + input_message=input_msg, + output_message=output_msg, ) assert method.paged_result_field is None @@ -178,6 +179,57 @@ def test_method_ignored_flattened_fields(): assert len(method.flattened_fields) == 0 +def test_method_legacy_flattened_fields(): + required_options = descriptor_pb2.FieldOptions() + required_options.Extensions[field_behavior_pb2.field_behavior].append( + field_behavior_pb2.FieldBehavior.Value("REQUIRED")) + + # Cephalopods are required. + squid = make_field(name="squid", options=required_options) + octopus = make_field( + name="octopus", + message=make_message( + name="Octopus", + fields=[make_field(name="mass", options=required_options)] + ), + options=required_options) + + # Bivalves are optional. + clam = make_field(name="clam") + oyster = make_field( + name="oyster", + message=make_message( + name="Oyster", + fields=[make_field(name="has_pearl")] + ) + ) + + # Interleave required and optional fields to make sure + # that, in the legacy flattening, required fields are always first. + request = make_message("request", fields=[squid, clam, octopus, oyster]) + + method = make_method( + name="CreateMolluscs", + input_message=request, + # Signatures should be ignored. + signatures=[ + "squid,octopus.mass", + "squid,octopus,oyster.has_pearl" + ] + ) + + # Use an ordered dict because ordering is important: + # required fields should come first. + expected = collections.OrderedDict([ + ("squid", squid), + ("octopus", octopus), + ("clam", clam), + ("oyster", oyster) + ]) + + assert method.legacy_flattened_fields == expected + + def make_method( name: str, input_message: wrappers.MessageType = None, output_message: wrappers.MessageType = None, @@ -222,7 +274,7 @@ def make_method( def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - fields: Sequence[wrappers.Field] = (), + fields: Sequence[wrappers.Field] = (), ) -> wrappers.MessageType: message_pb = descriptor_pb2.DescriptorProto( name=name, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index dc7ea0ecd954..da7d418eb331 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -79,9 +79,11 @@ def test_service_python_modules(): get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), )) - imports = set() - for m in service.methods.values(): - imports = imports.union({i.ident.python_import for i in m.ref_types}) + imports = { + i.ident.python_import + for m in service.methods.values() + for i in m.ref_types_legacy + } assert imports == { imp.Import(package=('a', 'b', 'v1'), module='c'), imp.Import(package=('foo',), module='bacon'), @@ -163,11 +165,12 @@ def make_service(name: str = 'Placeholder', host: str = '', # FIXME (lukesneeringer): This test method is convoluted and it makes these # tests difficult to understand and maintain. -def make_service_with_method_options(*, - http_rule: http_pb2.HttpRule = None, - method_signature: str = '', - in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = () - ) -> wrappers.Service: +def make_service_with_method_options( + *, + http_rule: http_pb2.HttpRule = None, + method_signature: str = '', + in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = () +) -> wrappers.Service: # Declare a method with options enabled for long-running operations and # field headers. method = get_method( @@ -192,13 +195,13 @@ def make_service_with_method_options(*, def get_method(name: str, - in_type: str, - out_type: str, - lro_response_type: str = '', - lro_metadata_type: str = '', *, - in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), - http_rule: http_pb2.HttpRule = None, - method_signature: str = '', + in_type: str, + out_type: str, + lro_response_type: str = '', + lro_metadata_type: str = '', *, + in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), + http_rule: http_pb2.HttpRule = None, + method_signature: str = '', ) -> wrappers.Method: input_ = get_message(in_type, fields=in_fields) output = get_message(out_type) @@ -231,7 +234,7 @@ def get_method(name: str, def get_message(dot_path: str, *, - fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), + fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), ) -> wrappers.MessageType: # Pass explicit None through (for lro_metadata). if dot_path is None: From dba27a951d175fe16a46135caab4171ad3f98693 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 15 Nov 2019 14:25:20 -0800 Subject: [PATCH 0176/1339] Generated test and impl for client factory function (#246) --- .../$sub/services/$service/client.py.j2 | 26 ++++++++++++++++++- .../$name_$version/$sub/test_$service.py.j2 | 12 +++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 index 47f3edcf3e73..58bead3c462e 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 @@ -9,7 +9,8 @@ from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore - +from google.oauth2 import service_account # type: ignore + {% filter sort_lines -%} {% for method in service.methods.values() -%} {% for ref_type in method.ref_types_legacy -%} @@ -54,6 +55,29 @@ class {{ service.client_name }}Meta(type): class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """{{ service.meta.doc|rst(width=72, indent=4) }}""" + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 index e3a5dd20244f..466f20155a51 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 @@ -11,6 +11,7 @@ import pytest {% filter sort_lines -%} from google import auth from google.auth import credentials +from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports {% if service.has_lro -%} @@ -26,6 +27,17 @@ from google.longrunning import operations_pb2 {% endfilter %} +def test_{{ service.client_name|snake_case }}_from_service_account_file(): + creds = credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = {{ service.client_name }}.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = {{ service.client_name }}.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + {% for method in service.methods.values() -%} def test_{{ method.name|snake_case }}(transport: str = 'grpc'): client = {{ service.client_name }}( From 49b8d9ebe10f155a2cc606a80c1f39e3b9cbe728 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 18 Nov 2019 09:42:40 -0800 Subject: [PATCH 0177/1339] Replace every $ in all template file paths with % (#247) Unix utilities and editors attempt to expand any name beginning with $ as if it were a variable; if the name does not reference a previously defined variable, it is given a default (empty) value. This is profoundly irritating when trying to edit template files in a command-line environment. As a fix, the naming convention is changed to use % for template filename/filepath intperpolation. --- .../gapic/generator/generator.py | 42 +++++++++---------- .../$name => %namespace/%name}/__init__.py.j2 | 0 .../$name => %namespace/%name}/py.typed.j2 | 0 .../%name_%version/%sub}/__init__.py.j2 | 0 .../%sub/services/%service}/__init__.py.j2 | 0 .../%sub/services/%service}/client.py.j2 | 0 .../%sub/services/%service}/pagers.py.j2 | 0 .../%service}/transports/__init__.py.j2 | 0 .../services/%service}/transports/base.py.j2 | 0 .../services/%service}/transports/grpc.py.j2 | 0 .../%sub}/services/__init__.py.j2 | 0 .../%name_%version/%sub/types/%proto.py.j2} | 4 +- .../%name_%version/%sub}/types/_enum.py.j2 | 0 .../%name_%version/%sub}/types/_message.py.j2 | 4 +- .../%name_%version}/py.typed.j2 | 0 .../%sub/test_%service.py.j2} | 0 .../gapic-generator/gapic/utils/filename.py | 2 +- .../tests/unit/generator/test_generator.py | 22 +++++----- .../tests/unit/utils/test_filename.py | 4 +- 19 files changed, 39 insertions(+), 39 deletions(-) rename packages/gapic-generator/gapic/templates/{$namespace/$name => %namespace/%name}/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name => %namespace/%name}/py.typed.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub => %namespace/%name_%version/%sub}/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub/services/$service => %namespace/%name_%version/%sub/services/%service}/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub/services/$service => %namespace/%name_%version/%sub/services/%service}/client.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub/services/$service => %namespace/%name_%version/%sub/services/%service}/pagers.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub/services/$service => %namespace/%name_%version/%sub/services/%service}/transports/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub/services/$service => %namespace/%name_%version/%sub/services/%service}/transports/base.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub/services/$service => %namespace/%name_%version/%sub/services/%service}/transports/grpc.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub => %namespace/%name_%version/%sub}/services/__init__.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub/types/$proto.py.j2 => %namespace/%name_%version/%sub/types/%proto.py.j2} (88%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub => %namespace/%name_%version/%sub}/types/_enum.py.j2 (100%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version/$sub => %namespace/%name_%version/%sub}/types/_message.py.j2 (93%) rename packages/gapic-generator/gapic/templates/{$namespace/$name_$version => %namespace/%name_%version}/py.typed.j2 (100%) rename packages/gapic-generator/gapic/templates/tests/unit/{$name_$version/$sub/test_$service.py.j2 => %name_%version/%sub/test_%service.py.j2} (100%) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 26749ce5c4b7..2bdfe677b710 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -229,14 +229,14 @@ def _render_template( # Sanity check: Rendering per service and per proto would be a # combinatorial explosion and is almost certainly not what anyone # ever wants. Error colorfully on it. - if '$service' in template_name and '$proto' in template_name: - raise ValueError('Template files may live under a $proto or ' - '$service directory, but not both.') + if '%service' in template_name and '%proto' in template_name: + raise ValueError('Template files may live under a %proto or ' + '%service directory, but not both.') # If this template should be rendered for subpackages, process it # for all subpackages and set the strict flag (restricting what # services and protos we pull from for the remainder of the method). - if '$sub' in template_name: + if '%sub' in template_name: for subpackage in api_schema.subpackages.values(): answer.update(self._render_template(template_name, api_schema=subpackage, @@ -245,7 +245,7 @@ def _render_template( # If this template should be rendered once per proto, iterate over # all protos to be rendered - if '$proto' in template_name: + if '%proto' in template_name: for proto in api_schema.protos.values(): if (skip_subpackages and proto.meta.address.subpackage != api_schema.subpackage_view): @@ -258,7 +258,7 @@ def _render_template( # If this template should be rendered once per service, iterate # over all services to be rendered. - if '$service' in template_name: + if '%service' in template_name: for service in api_schema.services.values(): if (skip_subpackages and service.meta.address.subpackage != api_schema.subpackage_view): @@ -313,12 +313,12 @@ def _get_filename( """Return the appropriate output filename for this template. This entails running the template name through a series of - replacements to replace the "filename variables" (``$name``, - ``$service``, etc.). + replacements to replace the "filename variables" (``%name``, + ``%service``, etc.). Additionally, any of these variables may be substituted with an empty value, and we should do the right thing in this case. - (The exception to this is ``$service``, which is guaranteed to be + (The exception to this is ``%service``, which is guaranteed to be set if it is needed.) Args: @@ -332,37 +332,37 @@ def _get_filename( """ filename = template_name[:-len('.j2')] - # Replace the $namespace variable. + # Replace the %namespace variable. filename = filename.replace( - '$namespace', + '%namespace', os.path.sep.join([i.lower() for i in api_schema.naming.namespace]), ).lstrip(os.path.sep) - # Replace the $name, $version, and $sub variables. - filename = filename.replace('$name_$version', + # Replace the %name, %version, and %sub variables. + filename = filename.replace('%name_%version', api_schema.naming.versioned_module_name) - filename = filename.replace('$version', api_schema.naming.version) - filename = filename.replace('$name', api_schema.naming.module_name) - filename = filename.replace('$sub', + filename = filename.replace('%version', api_schema.naming.version) + filename = filename.replace('%name', api_schema.naming.module_name) + filename = filename.replace('%sub', '/'.join(api_schema.subpackage_view)) - # Replace the $service variable if applicable. + # Replace the %service variable if applicable. if context and 'service' in context: filename = filename.replace( - '$service', + '%service', context['service'].module_name, ) - # Replace the $proto variable if appliable. + # Replace the %proto variable if appliable. # In the cases of protos, we also honor subpackages. if context and 'proto' in context: filename = filename.replace( - '$proto', + '%proto', context['proto'].module_name, ) # Paths may have empty path segments if components are empty - # (e.g. no $version); handle this. + # (e.g. no %version); handle this. filename = re.sub(r'/+', '/', filename) # Done, return the filename. diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name/py.typed.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/py.typed.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name/py.typed.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name/py.typed.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/client.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/pagers.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/base.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/$service/transports/grpc.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/__init__.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/services/__init__.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 similarity index 88% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 index eb42eb1f0d54..9fd9353481a4 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/$proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 @@ -30,11 +30,11 @@ __protobuf__ = {{ p }}.module( {% for enum in proto.enums.values() -%} - {% include '$namespace/$name_$version/$sub/types/_enum.py.j2' with context %} + {% include '%namespace/%name_%version/%sub/types/_enum.py.j2' with context %} {% endfor %} {% for message in proto.messages.values() -%} - {% include "$namespace/$name_$version/$sub/types/_message.py.j2" with context %} + {% include "%namespace/%name_%version/%sub/types/_message.py.j2" with context %} {% endfor %} {% endwith %} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_enum.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 similarity index 93% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 3afab165ad87..e9586108c043 100644 --- a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/$sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -11,7 +11,7 @@ class {{ message.name }}({{ p }}.Message): {# Iterate over nested enums. -#} {% for enum in message.nested_enums.values() -%} {% filter indent %} - {%- include '$namespace/$name_$version/$sub/types/_enum.py.j2' %} + {%- include '%namespace/%name_%version/%sub/types/_enum.py.j2' %} {% endfilter %} {% endfor -%} @@ -19,7 +19,7 @@ class {{ message.name }}({{ p }}.Message): {% for submessage in message.nested_messages.values() -%} {% if not submessage.map -%} {% with message = submessage %}{% filter indent %} - {%- include '$namespace/$name_$version/$sub/types/_message.py.j2' %} + {%- include '%namespace/%name_%version/%sub/types/_message.py.j2' %} {% endfilter %}{% endwith %} {% endif %} {% endfor -%} diff --git a/packages/gapic-generator/gapic/templates/$namespace/$name_$version/py.typed.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/py.typed.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/$namespace/$name_$version/py.typed.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name_%version/py.typed.j2 diff --git a/packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/tests/unit/$name_$version/$sub/test_$service.py.j2 rename to packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 diff --git a/packages/gapic-generator/gapic/utils/filename.py b/packages/gapic-generator/gapic/utils/filename.py index 9901d0745f1d..f0c61c14e7e0 100644 --- a/packages/gapic-generator/gapic/utils/filename.py +++ b/packages/gapic-generator/gapic/utils/filename.py @@ -28,7 +28,7 @@ def to_valid_filename(filename: str) -> str: Returns: str: A valid filename. """ - return re.sub(r'[^a-z0-9.%_-]+', '-', filename.lower()) + return re.sub(r'[^a-z0-9.$_-]+', '-', filename.lower()) def to_valid_module_name(module_name: str) -> str: diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index dfa566e41ab0..56e50635c365 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -96,21 +96,21 @@ def test_get_response_fails_invalid_file_paths(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = [ - 'foo/bar/$service/$proto/baz.py.j2', + 'foo/bar/%service/%proto/baz.py.j2', 'molluscs/squid/sample.py.j2', ] with pytest.raises(ValueError) as ex: g.get_response(api_schema=make_api()) ex_str = str(ex.value) - assert '$proto' in ex_str and '$service' in ex_str + assert '%proto' in ex_str and '%service' in ex_str def test_get_response_enumerates_services(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = [ - 'foo/$service/baz.py.j2', + 'foo/%service/baz.py.j2', 'molluscs/squid/sample.py.j2', ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: @@ -132,7 +132,7 @@ def test_get_response_enumerates_proto(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = [ - 'foo/$proto.py.j2', + 'foo/%proto.py.j2', 'molluscs/squid/sample.py.j2', ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: @@ -166,8 +166,8 @@ def test_get_response_divides_subpackages(): ], package='foo.v1') with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: lt.return_value = [ - 'foo/$sub/types/$proto.py.j2', - 'foo/$sub/services/$service.py.j2', + 'foo/%sub/types/%proto.py.j2', + 'foo/%sub/services/%service.py.j2', 'molluscs/squid/sample.py.j2', ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: @@ -188,7 +188,7 @@ def test_get_response_divides_subpackages(): def test_get_filename(): g = make_generator() - template_name = '$namespace/$name_$version/foo.py.j2' + template_name = '%namespace/%name_%version/foo.py.j2' assert g._get_filename(template_name, api_schema=make_api( naming=make_naming( @@ -199,7 +199,7 @@ def test_get_filename(): def test_get_filename_with_namespace(): g = make_generator() - template_name = '$namespace/$name_$version/foo.py.j2' + template_name = '%namespace/%name_%version/foo.py.j2' assert g._get_filename(template_name, api_schema=make_api( naming=make_naming( @@ -213,7 +213,7 @@ def test_get_filename_with_namespace(): def test_get_filename_with_service(): g = make_generator() - template_name = '$name/$service/foo.py.j2' + template_name = '%name/%service/foo.py.j2' assert g._get_filename( template_name, api_schema=make_api( @@ -240,7 +240,7 @@ def test_get_filename_with_proto(): g = make_generator() assert g._get_filename( - '$name/types/$proto.py.j2', + '%name/types/%proto.py.j2', api_schema=api, context={'proto': api.protos['bacon.proto']}, ) == 'spam/types/bacon.py' @@ -265,7 +265,7 @@ def test_get_filename_with_proto_and_sub(): g = make_generator() assert g._get_filename( - '$name/types/$sub/$proto.py.j2', + '%name/types/%sub/%proto.py.j2', api_schema=api, context={'proto': api.protos['bacon.proto']}, ) == 'bar/types/baz/bacon.py' diff --git a/packages/gapic-generator/tests/unit/utils/test_filename.py b/packages/gapic-generator/tests/unit/utils/test_filename.py index 2e88dc2abc41..ad18869b0b57 100644 --- a/packages/gapic-generator/tests/unit/utils/test_filename.py +++ b/packages/gapic-generator/tests/unit/utils/test_filename.py @@ -18,12 +18,12 @@ def test_to_valid_filename(): assert filename.to_valid_filename('foo bar.py') == 'foo-bar.py' assert filename.to_valid_filename('FOO') == 'foo' - assert filename.to_valid_filename('nom$&nom@nom.py') == 'nom-nom-nom.py' + assert filename.to_valid_filename('nom%&nom@nom.py') == 'nom-nom-nom.py' assert filename.to_valid_filename('num_bear.py') == 'num_bear.py' def test_to_valid_module_name(): assert filename.to_valid_module_name('foo bar.py') == 'foo_bar.py' assert filename.to_valid_module_name('FOO') == 'foo' - assert filename.to_valid_module_name('nom$&nom.py') == 'nom_nom.py' + assert filename.to_valid_module_name('nom%&nom.py') == 'nom_nom.py' assert filename.to_valid_module_name('num_bear.py') == 'num_bear.py' From 914a4413f4222088b7ac68a3299c3c3cb3d97107 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 19 Nov 2019 09:22:57 -0800 Subject: [PATCH 0178/1339] Generated test and impl for client options (#248) In order to preserve the interface of legacy client construction, client constructors take a 'google.api_core.client_options.ClientOptions' parameter. Currently, this option only customizes the api endpoint, but other customizations may be enabled. Includes generated unit tests. Implementation for #227 --- .../%sub/services/%service/client.py.j2 | 28 ++++++++++------- .../%name_%version/%sub/test_%service.py.j2 | 30 +++++++++++++++++-- 2 files changed, 46 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 58bead3c462e..e4419df4cfb7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -5,11 +5,12 @@ from collections import OrderedDict from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.oauth2 import service_account # type: ignore +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} @@ -56,6 +57,8 @@ class {{ service.client_name }}Meta(type): class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """{{ service.meta.doc|rst(width=72, indent=4) }}""" + DEFAULT_OPTIONS = ClientOptions.ClientOptions({% if service.host %}api_endpoint='{{ service.host }}'{% endif %}) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -79,15 +82,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def __init__(self, *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, - transport: Union[str, {{ service.name }}Transport] = None + transport: Union[str, {{ service.name }}Transport] = None, + client_options: ClientOptions = DEFAULT_OPTIONS, ) -> None: """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. Args: - host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{- ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -96,7 +97,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport (Union[str, ~.{{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. + client_options (ClientOptions): Custom options for the client. """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. @@ -107,7 +112,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self._transport = transport else: Transport = type(self).get_transport_class(transport) - self._transport = Transport(credentials=credentials, host=host) + self._transport = Transport( + credentials=credentials, + host=client_options.api_endpoint{% if service.host %} or '{{ service.host }}'{% endif %}, + ) {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 466f20155a51..5eaa1b2ee5df 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -14,6 +14,7 @@ from google.auth import credentials from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports +from google.api_core import client_options {% if service.has_lro -%} from google.api_core import future from google.api_core import operations_v1 @@ -37,6 +38,31 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): client = {{ service.client_name }}.from_service_account_json("dummy/file/path.json") assert client._transport._credentials == creds + {% if service.host %}assert client._transport._host == '{{ service.host }}'{% endif %} + + +def test_{{ service.client_name|snake_case }}_client_options(): + # Check the default options have their expected values. + {% if service.host %}assert {{ service.client_name }}.DEFAULT_OPTIONS.api_endpoint == '{{ service.host }}'{% endif %} + + # Check that options can be customized. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: + transport = gtc.return_value = mock.MagicMock() + client = {{ service.client_name }}( + client_options=options + ) + transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + + +def test_{{ service.client_name|snake_case }}_client_options_from_dict(): + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: + transport = gtc.return_value = mock.MagicMock() + client = {{ service.client_name }}( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + {% for method in service.methods.values() -%} def test_{{ method.name|snake_case }}(transport: str = 'grpc'): @@ -352,7 +378,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), - host='{{ host }}', + client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), transport='grpc', ) assert client._transport._host == '{{ host }}:443' @@ -363,7 +389,7 @@ def test_{{ service.name|snake_case }}_host_with_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), - host='{{ host }}:8000', + client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), transport='grpc', ) assert client._transport._host == '{{ host }}:8000' From ca6fcb166e9f654331eb82432d359cefd92b4c66 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 2 Dec 2019 11:23:36 -0800 Subject: [PATCH 0179/1339] Add testing for python3.8 (#251) --- packages/gapic-generator/.circleci/config.yml | 69 +++++++++++++++++-- .../gapic-generator/gapic/schema/wrappers.py | 9 +-- packages/gapic-generator/noxfile.py | 10 +-- .../tests/unit/schema/wrappers/test_method.py | 2 +- 4 files changed, 76 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 0ae82ff953e4..7aee34b5c056 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -16,10 +16,15 @@ workflows: filters: tags: only: /^\d+\.\d+\.\d+$/ + - unit-3.8: + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - showcase-unit-3.6: requires: - unit-3.6 - unit-3.7 + - unit-3.8 filters: tags: only: /^\d+\.\d+\.\d+$/ @@ -27,6 +32,15 @@ workflows: requires: - unit-3.6 - unit-3.7 + - unit-3.8 + filters: + tags: + only: /^\d+\.\d+\.\d+$/ + - showcase-unit-3.8: + requires: + - unit-3.6 + - unit-3.7 + - unit-3.8 filters: tags: only: /^\d+\.\d+\.\d+$/ @@ -42,6 +56,7 @@ workflows: - mypy - showcase-unit-3.6 - showcase-unit-3.7 + - showcase-unit-3.8 - showcase-mypy filters: tags: @@ -86,7 +101,7 @@ jobs: command: nox -s docs mypy: docker: - - image: python:3.7-slim + - image: python:3.8-slim steps: - checkout - run: @@ -135,7 +150,7 @@ jobs: docker push gcr.io/gapic-images/gapic-generator-python:latest publish_package: docker: - - image: python:3.7-slim + - image: python:3.8-slim steps: - checkout - run: @@ -150,7 +165,7 @@ jobs: command: python setup.py sdist upload showcase: docker: - - image: python:3.7-slim + - image: python:3.8-slim - image: gcr.io/gapic-images/gapic-showcase:0.2.0 steps: - checkout @@ -221,9 +236,33 @@ jobs: - run: name: Run unit tests. command: nox -s showcase_unit-3.7 + showcase-unit-3.8: + docker: + - image: python:3.8-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Run unit tests. + command: nox -s showcase_unit-3.8 showcase-mypy: docker: - - image: python:3.7-slim + - image: python:3.8-slim steps: - checkout - run: @@ -289,6 +328,28 @@ jobs: name: Submit coverage data to codecov. command: codecov when: always + unit-3.8: + docker: + - image: python:3.8-slim + steps: + - checkout + - run: + name: Install pandoc + command: | + apt-get update + apt-get install -y pandoc + - run: + name: Install nox and codecov. + command: | + pip install nox + pip install codecov + - run: + name: Run unit tests. + command: nox -s unit-3.8 + - run: + name: Submit coverage data to codecov. + command: codecov + when: always style-check: docker: - image: python:3.6-alpine diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index c0addec593eb..42f7ba8fab82 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -34,11 +34,11 @@ from typing import (cast, Dict, FrozenSet, List, Mapping, Optional, Sequence, Set, Union) -from google.api import annotations_pb2 # type: ignore +from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 -from google.api_core import exceptions # type: ignore -from google.protobuf import descriptor_pb2 +from google.api_core import exceptions # type: ignore +from google.protobuf import descriptor_pb2 # type: ignore from gapic import utils from gapic.schema import metadata @@ -132,7 +132,8 @@ def repeated(self) -> bool: bool: Whether this field is repeated. """ return self.label == \ - descriptor_pb2.FieldDescriptorProto.Label.Value(b'LABEL_REPEATED') + descriptor_pb2.FieldDescriptorProto.Label.Value( + 'LABEL_REPEATED') # type: ignore @property def required(self) -> bool: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 7d3b1620f1a9..7ddf550507bb 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -22,7 +22,7 @@ showcase_version = '0.2.0' -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.6', '3.7', '3.8']) def unit(session): """Run the unit test suite.""" @@ -40,7 +40,7 @@ def unit(session): ) -@nox.session(python='3.7') +@nox.session(python='3.8') def showcase(session): """Run the Showcase test suite.""" @@ -84,7 +84,7 @@ def showcase(session): session.run('py.test', '--quiet', os.path.join('tests', 'system')) -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.6', '3.7', '3.8']) def showcase_unit(session): """Run the generated unit tests against the Showcase library.""" @@ -130,7 +130,7 @@ def showcase_unit(session): ) -@nox.session(python='3.7') +@nox.session(python='3.8') def showcase_mypy(session): """Perform typecheck analysis on the generated Showcase library.""" @@ -182,7 +182,7 @@ def docs(session): 'docs/_build/doctrees', 'docs/', 'docs/_build/html/') -@nox.session(python='3.7') +@nox.session(python=['3.7', '3.8']) def mypy(session): """Perform typecheck analysis.""" diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 0195a90886aa..67b0369ab9fe 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -115,7 +115,7 @@ def test_method_paged_result_field_no_page_field(): def test_method_field_headers_none(): method = make_method('DoSomething') - assert isinstance(method.field_headers, collections.Sequence) + assert isinstance(method.field_headers, collections.abc.Sequence) def test_method_field_headers_present(): From 725a6aa7e9856e02c954baeb61a67f894bf8eb00 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 2 Dec 2019 11:30:23 -0800 Subject: [PATCH 0180/1339] Update showcase dependency (#252) --- packages/gapic-generator/.circleci/config.yml | 2 +- packages/gapic-generator/noxfile.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 7aee34b5c056..0da43b33429e 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -166,7 +166,7 @@ jobs: showcase: docker: - image: python:3.8-slim - - image: gcr.io/gapic-images/gapic-showcase:0.2.0 + - image: gcr.io/gapic-images/gapic-showcase:0.6.1 steps: - checkout - run: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 7ddf550507bb..ab604a6a3c4c 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -19,7 +19,7 @@ import nox # type: ignore -showcase_version = '0.2.0' +showcase_version = '0.6.1' @nox.session(python=['3.6', '3.7', '3.8']) From 29fd85dac92f1b86a62f6b545b450c41f155469e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 5 Dec 2019 14:47:51 -0800 Subject: [PATCH 0181/1339] Tests and impl for resource path helper methods (#253) Includes generated unit tests, plumbing code unit tests, and system tests for determining whether a message is a resource, providing an accessor for one and only one path if it is a resource, and a service accessor for all message fields that are resources (only one level deep at the moment). --- .../gapic-generator/gapic/schema/wrappers.py | 34 +++++ .../%sub/services/%service/client.py.j2 | 7 + .../%name_%version/%sub/test_%service.py.j2 | 13 ++ .../tests/system/test_resource_crud.py | 7 + .../unit/schema/wrappers/test_message.py | 22 ++- .../unit/schema/wrappers/test_service.py | 136 ++++++++++++++++++ 6 files changed, 216 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 42f7ba8fab82..600e2233751b 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -37,6 +37,7 @@ from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 +from google.api import resource_pb2 from google.api_core import exceptions # type: ignore from google.protobuf import descriptor_pb2 # type: ignore @@ -212,6 +213,9 @@ class MessageType: def __getattr__(self, name): return getattr(self.message_pb, name) + def __hash__(self): + return hash(self.name) + @utils.cached_property def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: """Return all composite fields used in this proto's messages.""" @@ -231,6 +235,25 @@ def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" return self.meta.address + @property + def resource_path(self) -> Optional[str]: + """If this message describes a resource, return the path to the resource. + If there are multiple paths, returns the first one.""" + return next( + iter(self.options.Extensions[resource_pb2.resource].pattern), + None + ) + + @property + def resource_type(self) -> Optional[str]: + resource = self.options.Extensions[resource_pb2.resource] + return resource.type[resource.type.find('/') + 1:] if resource else None + + @property + def resource_path_args(self) -> Sequence[str]: + path_arg_re = re.compile(r'\{([a-zA-Z0-9_-]+)\}') + return path_arg_re.findall(self.resource_path or '') + def get_field(self, *field_path: str, collisions: FrozenSet[str] = frozenset()) -> Field: """Return a field arbitrarily deep in this message's structure. @@ -732,6 +755,17 @@ def names(self) -> FrozenSet[str]: # Done; return the answer. return frozenset(answer) + @utils.cached_property + def resource_messages(self) -> FrozenSet[MessageType]: + """Returns all the resource message types used in all + request fields in the service.""" + return frozenset( + field.message + for method in self.methods.values() + for field in method.input.fields.values() + if field.message and field.message.resource_path + ) + def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': """Return a derivative of this service with the provided context. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index e4419df4cfb7..35257d89452a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -81,6 +81,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): from_service_account_json = from_service_account_file + {% for message in service.resource_messages -%} + @staticmethod + def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: + """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" + return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + {% endfor %} + def __init__(self, *, credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = None, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 5eaa1b2ee5df..97e12493fdf1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -422,4 +422,17 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): assert transport.operations_client is transport.operations_client {% endif -%} + +{% for message in service.resource_messages -%} +{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle") -%} +def test_{{ message.name|snake_case }}_path(): + {% for arg in message.resource_path_args -%} + {{ arg }} = "{{ molluscs.next() }}" + {% endfor %} + expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + actual = {{ service.client_name }}.{{ message.name|snake_case }}_path({{message.resource_path_args|join(", ") }}) + assert expected == actual + +{% endwith -%} +{% endfor -%} {% endblock %} diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 5f7f7d106017..9ef70dccf034 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -43,3 +43,10 @@ def test_crud_positional(identity): assert identity.get_user(user.name).display_name == 'Monty Python' finally: identity.delete_user(user.name) + + +def test_path_methods(identity): + expected = "users/bdfl" + actual = identity.user_path("bdfl") + + assert expected == actual diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index f8d514b9ceab..3e1de39e6d8f 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -17,6 +17,7 @@ import pytest +from google.api import resource_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import metadata @@ -124,6 +125,21 @@ def test_get_field_nonterminal_repeated_error(): assert outer.get_field('inner', 'one') == inner_fields[1] +def test_resource_path(): + options = descriptor_pb2.MessageOptions() + resource = options.Extensions[resource_pb2.resource] + resource.pattern.append( + "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}") + resource.pattern.append( + "kingdoms/{kingdom}/divisions/{division}/classes/{klass}") + resource.type = "taxonomy.biology.com/Class" + message = make_message('Squid', options=options) + + assert message.resource_path == "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}" + assert message.resource_path_args == ["kingdom", "phylum", "klass"] + assert message.resource_type == "Class" + + def test_field_map(): # Create an Entry message. entry_msg = make_message( @@ -140,8 +156,8 @@ def test_field_map(): def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, - options: descriptor_pb2.MethodOptions = None, + fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, + options: descriptor_pb2.MethodOptions = None, ) -> wrappers.MessageType: message_pb = descriptor_pb2.DescriptorProto( name=name, @@ -183,7 +199,7 @@ def make_field(name: str, repeated: bool = False, def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - values: Tuple[str, int] = (), meta: metadata.Metadata = None, + values: Tuple[str, int] = (), meta: metadata.Metadata = None, ) -> wrappers.EnumType: enum_value_pbs = [ descriptor_pb2.EnumValueDescriptorProto(name=i[0], number=i[1]) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index da7d418eb331..7e1907a7ebe5 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections import typing from google.api import annotations_pb2 from google.api import client_pb2 from google.api import http_pb2 +from google.api import resource_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import imp @@ -146,6 +148,52 @@ def test_module_name(): assert service.module_name == 'my_service' +def test_resource_messages(): + # Resources + squid_options = descriptor_pb2.MessageOptions() + squid_options.Extensions[resource_pb2.resource].pattern.append( + "squid/{squid}") + squid_message = make_message("Squid", options=squid_options) + clam_options = descriptor_pb2.MessageOptions() + clam_options.Extensions[resource_pb2.resource].pattern.append( + "clam/{clam}") + clam_message = make_message("Clam", options=clam_options) + whelk_options = descriptor_pb2.MessageOptions() + whelk_options.Extensions[resource_pb2.resource].pattern.append( + "whelk/{whelk}") + whelk_message = make_message("Whelk", options=whelk_options) + + # Not resources + octopus_message = make_message("Octopus") + oyster_message = make_message("Oyster") + nudibranch_message = make_message("Nudibranch") + + service = make_service( + 'Molluscs', + methods=( + make_method( + f"Get{message.name}", + input_message=make_message( + f"{message.name}Request", + fields=[make_field(message.name, message=message)] + ) + ) + for message in ( + squid_message, + clam_message, + whelk_message, + octopus_message, + oyster_message, + nudibranch_message + ) + ) + ) + + expected = {squid_message, clam_message, whelk_message} + actual = service.resource_messages + assert expected == actual + + def make_service(name: str = 'Placeholder', host: str = '', methods: typing.Tuple[wrappers.Method] = (), scopes: typing.Tuple[str] = ()) -> wrappers.Service: @@ -248,6 +296,7 @@ def get_message(dot_path: str, *, # path is just google.protobuf.DescriptorProto). pieces = dot_path.split('.') pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] + return wrappers.MessageType( fields={i.name: wrappers.Field( field_pb=i, @@ -264,6 +313,93 @@ def get_message(dot_path: str, *, ) +def make_method( + name: str, input_message: wrappers.MessageType = None, + output_message: wrappers.MessageType = None, + package: str = 'foo.bar.v1', module: str = 'baz', + http_rule: http_pb2.HttpRule = None, + signatures: typing.Sequence[str] = (), + **kwargs) -> wrappers.Method: + # Use default input and output messages if they are not provided. + input_message = input_message or make_message('MethodInput') + output_message = output_message or make_message('MethodOutput') + + # Create the method pb2. + method_pb = descriptor_pb2.MethodDescriptorProto( + name=name, + input_type=str(input_message.meta.address), + output_type=str(output_message.meta.address), + **kwargs + ) + + # If there is an HTTP rule, process it. + if http_rule: + ext_key = annotations_pb2.http + method_pb.options.Extensions[ext_key].MergeFrom(http_rule) + + # If there are signatures, include them. + for sig in signatures: + ext_key = client_pb2.method_signature + method_pb.options.Extensions[ext_key].append(sig) + + # Instantiate the wrapper class. + return wrappers.Method( + method_pb=method_pb, + input=input_message, + output=output_message, + meta=metadata.Metadata(address=metadata.Address( + name=name, + package=package, + module=module, + parent=(f'{name}Service',), + )), + ) + + +def make_field(name: str, repeated: bool = False, + message: wrappers.MessageType = None, + enum: wrappers.EnumType = None, + meta: metadata.Metadata = None, **kwargs) -> wrappers.Method: + if message: + kwargs['type_name'] = str(message.meta.address) + if enum: + kwargs['type_name'] = str(enum.meta.address) + field_pb = descriptor_pb2.FieldDescriptorProto( + name=name, + label=3 if repeated else 1, + **kwargs + ) + return wrappers.Field( + enum=enum, + field_pb=field_pb, + message=message, + meta=meta or metadata.Metadata(), + ) + + +def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', + fields: typing.Sequence[wrappers.Field] = (), + meta: metadata.Metadata = None, + options: descriptor_pb2.MethodOptions = None, + ) -> wrappers.MessageType: + message_pb = descriptor_pb2.DescriptorProto( + name=name, + field=[i.field_pb for i in fields], + options=options, + ) + return wrappers.MessageType( + message_pb=message_pb, + fields=collections.OrderedDict((i.name, i) for i in fields), + nested_messages={}, + nested_enums={}, + meta=meta or metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(package.split('.')), + module=module, + )), + ) + + def get_enum(dot_path: str) -> wrappers.EnumType: pieces = dot_path.split('.') pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] From 1dcacd0d2f91e3bdc11c1f843cdd2c34a8475325 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 6 Dec 2019 14:56:42 -0800 Subject: [PATCH 0182/1339] Re-add original flattening semantics (#256) Not a strict readition: flattened fields can be non-primitives. --- .../gapic-generator/gapic/schema/wrappers.py | 22 +-- .../%sub/services/%service/client.py.j2 | 24 +-- .../%name_%version/%sub/test_%service.py.j2 | 158 +++++++++--------- .../tests/system/test_grpc_lro.py | 8 +- .../tests/system/test_grpc_streams.py | 6 +- .../tests/system/test_grpc_unary.py | 26 +-- .../tests/system/test_pagination.py | 16 +- .../tests/system/test_resource_crud.py | 26 +-- .../tests/system/test_retry.py | 6 +- .../tests/unit/schema/wrappers/test_method.py | 4 +- 10 files changed, 141 insertions(+), 155 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 600e2233751b..dece167763eb 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -545,26 +545,14 @@ def field_headers(self) -> Sequence[str]: @utils.cached_property def flattened_fields(self) -> Mapping[str, Field]: """Return the signature defined for this method.""" - answer: Dict[str, Field] = collections.OrderedDict() signatures = self.options.Extensions[client_pb2.method_signature] - # Iterate over each signature and add the appropriate fields. - for sig in signatures: - # Get all of the individual fields. - fields = collections.OrderedDict([ - (f.strip(), self.input.get_field(*f.strip().split('.'))) - for f in sig.split(',') - ]) - - # Sanity check: If any fields contain a message, we ignore the - # entire signature. - if any([i.message for i in fields.values()]): - continue - - # Add the fields to the answer. - answer.update(fields) + answer: Dict[str, Field] = collections.OrderedDict( + (f.strip(), self.input.get_field(*f.strip().split('.'))) + for sig in signatures + for f in sig.split(',') + ) - # Done; return the flattened fields return answer @utils.cached_property diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 35257d89452a..5448d9384c0d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -126,14 +126,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, - {% for field in method.legacy_flattened_fields.values() -%} - {% if field.required -%} - {{ field.name }}: {{ field.ident }}, - {% else -%} + request: {{ method.input.ident }} = None, + *, + {% for field in method.flattened_fields.values() -%} {{ field.name }}: {{ field.ident }} = None, - {% endif -%} {% endfor -%} - *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -164,13 +161,20 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {%- endif %} """ # Create or coerce a protobuf request object. - {% if method.legacy_flattened_fields -%} + {% if method.flattened_fields -%} + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + # If we have keyword arguments corresponding to fields on the # request, apply these. {% endif -%} - request = {{ method.input.ident }}() - {%- for field in method.legacy_flattened_fields.values() %} - request.{{ field.name }} = {{ field.name }} + request = {{ method.input.ident }}(request) + {%- for key, field in method.flattened_fields.items() %} + if {{ field.name }} is not None: + request.{{ key }} = {{ field.name }} {%- endfor %} # Wrap the RPC method; this adds retry and timeout information, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 97e12493fdf1..04bc2a5e858e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -71,6 +71,10 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = {{ method.input.ident }}() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client._transport.{{ method.name|snake_case }}), @@ -89,18 +93,12 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {%- endfor %} ) {% endif -%} - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - response = client.{{ method.name|snake_case }}( - {% for field in method.legacy_flattened_fields.values() -%} - {{ field.name }}=None, - {% endfor -%} - ) - + response = client.{{ method.name|snake_case }}(request) + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == {{ method.input.ident }}() + assert args[0] == request # Establish that the response is the type that we expect. {% if method.void -%} @@ -121,6 +119,14 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): def test_{{ method.name|snake_case }}_field_headers(): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = {{ method.input.ident }}( + {%- for field_header in method.field_headers %} + {{ field_header }}='{{ field_header }}/value', + {%- endfor %} ) # Mock the actual call within the gRPC stub, and fake the request. @@ -128,22 +134,11 @@ def test_{{ method.name|snake_case }}_field_headers(): type(client._transport.{{ method.name|snake_case }}), '__call__') as call: call.return_value = {{ method.output.ident }}() - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - response = client.{{ method.name|snake_case }}( - {%- for field_header in method.field_headers %} - {{ field_header }}='{{ field_header }}/value', - {%- endfor %} - ) - + response = client.{{ method.name|snake_case }}(request) + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = {{ method.input.ident }}( - {%- for field_header in method.field_headers %} - {{ field_header }}='{{ field_header }}/value', - {%- endfor %} - ) assert args[0] == request # Establish that the field header was sent. @@ -155,63 +150,60 @@ def test_{{ method.name|snake_case }}_field_headers(): {%- if not loop.last %}&{% endif -%} {%- endfor %}', ) in kw['metadata'] -{% endif %} {#- method.field_headers #} - -{# NOTE: the backwards compatibility requirements of legacy flattening make these tests no longer relevant #} -{# If the legacy flattening is ever abandoned, these tests or something like them will be needed again. #} -{# #} -{# {% if method.flattened_fields %} #} -{# def test_{{ method.name|snake_case }}_flattened(): #} -{# client = {{ service.client_name }}( #} -{# credentials=credentials.AnonymousCredentials(), #} -{# ) #} - -{# # Mock the actual call within the gRPC stub, and fake the request. #} -{# with mock.patch.object( #} -{# type(client._transport.{{ method.name|snake_case }}), #} -{# '__call__') as call: #} -{# # Designate an appropriate return value for the call. #} -{# {% if method.void -%} #} -{# call.return_value = None #} -{# {% elif method.lro -%} #} -{# call.return_value = operations_pb2.Operation(name='operations/op') #} -{# {% elif method.server_streaming -%} #} -{# call.return_value = iter([{{ method.output.ident }}()]) #} -{# {% else -%} #} -{# call.return_value = {{ method.output.ident }}() #} -{# {% endif %} #} -{# # Call the method with a truthy value for each flattened field, #} -{# # using the keyword arguments to the method. #} -{# response = client.{{ method.name|snake_case }}( #} -{# {%- for key, field in method.flattened_fields.items() %} #} -{# {{ field.name }}={{ field.mock_value }}, #} -{# {%- endfor %} #} -{# ) #} - -{# # Establish that the underlying call was made with the expected #} -{# # request object values. #} -{# assert len(call.mock_calls) == 1 #} -{# _, args, _ = call.mock_calls[0] #} -{# {% for key, field in method.flattened_fields.items() -%} #} -{# assert args[0].{{ key }} == {{ field.mock_value }} #} -{# {% endfor %} #} - - -{# def test_{{ method.name|snake_case }}_flattened_error(): #} -{# client = {{ service.client_name }}( #} -{# credentials=credentials.AnonymousCredentials(), #} -{# ) #} - -{# # Attempting to call a method with both a request object and flattened #} -{# # fields is an error. #} -{# with pytest.raises(ValueError): #} -{# client.{{ method.name|snake_case }}( #} -{# {{ method.input.ident }}(), #} -{# {%- for key, field in method.flattened_fields.items() %} #} -{# {{ field.name }}={{ field.mock_value }}, #} -{# {%- endfor %} #} -{# ) #} -{# {% endif %} {\#- method.flattened_fields #\} #} +{% endif %} + +{% if method.flattened_fields %} +def test_{{ method.name|snake_case }}_flattened(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif %} + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = client.{{ method.name|snake_case }}( + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + {% for key, field in method.flattened_fields.items() -%} + assert args[0].{{ key }} == {{ field.mock_value }} + {% endfor %} + + +def test_{{ method.name|snake_case }}_flattened_error(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method.name|snake_case }}( + {{ method.input.ident }}(), + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) +{% endif %} {% if method.paged_result_field %} @@ -252,7 +244,9 @@ def test_{{ method.name|snake_case }}_pager(): ), RuntimeError, ) - results = [i for i in client.{{ method.name|snake_case }}({% for field in method.legacy_flattened_fields.values() if field.required -%}None,{% endfor -%})] + results = [i for i in client.{{ method.name|snake_case }}( + request={}, + )] assert len(results) == 6 assert all([isinstance(i, {{ method.paged_result_field.message.ident }}) for i in results]) @@ -294,7 +288,7 @@ def test_{{ method.name|snake_case }}_pages(): ), RuntimeError, ) - pages = list(client.{{ method.name|snake_case }}({% for field in method.legacy_flattened_fields.values() if field.required -%}None,{% endfor -%}).pages) + pages = list(client.{{ method.name|snake_case }}(request={}).pages) for page, token in zip(pages, ['abc','def','ghi', '']): assert page.raw_page.next_page_token == token {% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} @@ -352,7 +346,7 @@ def test_{{ service.name|snake_case }}_base_transport(): ) for method in methods: with pytest.raises(NotImplementedError): - getattr(transport, method)() + getattr(transport, method)(request=object()) {% if service.has_lro -%} # Additionally, the LRO client (a property) should diff --git a/packages/gapic-generator/tests/system/test_grpc_lro.py b/packages/gapic-generator/tests/system/test_grpc_lro.py index 392dbd1b98b7..617163a0db1d 100644 --- a/packages/gapic-generator/tests/system/test_grpc_lro.py +++ b/packages/gapic-generator/tests/system/test_grpc_lro.py @@ -18,11 +18,11 @@ def test_lro(echo): - future = echo.wait( - end_time=datetime.now(tz=timezone.utc) + timedelta(seconds=1), - success={ + future = echo.wait({ + 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), + 'success': { 'content': 'The hail in Wales falls mainly on the snails...eventually.' - } + }} ) response = future.result() assert isinstance(response, showcase_v1beta1.WaitResponse) diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py index f22ba0f055f8..07b2e4302625 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -15,9 +15,9 @@ def test_unary_stream(echo): content = 'The hail in Wales falls mainly on the snails.' - responses = echo.expand( - content=content, - ) + responses = echo.expand({ + 'content': content, + }) # Consume the response and ensure it matches what we expect. # with pytest.raises(exceptions.NotFound) as exc: diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_grpc_unary.py index 8ac723e93d84..f8735a3a31df 100644 --- a/packages/gapic-generator/tests/system/test_grpc_unary.py +++ b/packages/gapic-generator/tests/system/test_grpc_unary.py @@ -20,28 +20,28 @@ from google import showcase -def test_unary(echo): - content = 'The hail in Wales falls mainly on the snails.' - response = echo.echo( - content=content, - ) - assert response.content == content +def test_unary_with_request_object(echo): + response = echo.echo(showcase.EchoRequest( + content='The hail in Wales falls mainly on the snails.', + )) + assert response.content == 'The hail in Wales falls mainly on the snails.' -def test_unary_positional(echo): - content = 'The hail in Wales falls mainly on the snails.' - response = echo.echo(content,) - assert response.content == content +def test_unary_with_dict(echo): + response = echo.echo({ + 'content': 'The hail in Wales falls mainly on the snails.', + }) + assert response.content == 'The hail in Wales falls mainly on the snails.' def test_unary_error(echo): message = 'Bad things! Bad things!' with pytest.raises(exceptions.InvalidArgument) as exc: - echo.echo( - error={ + echo.echo({ + 'error': { 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), 'message': message, }, - ) + }) assert exc.value.code == 400 assert exc.value.message == message diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py index 807cbb6f5aa7..781614cad466 100644 --- a/packages/gapic-generator/tests/system/test_pagination.py +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -17,10 +17,10 @@ def test_pagination(echo): text = 'The hail in Wales falls mainly on the snails.' - results = [i for i in echo.paged_expand( - content=text, - page_size=3, - )] + results = [i for i in echo.paged_expand({ + 'content': text, + 'page_size': 3, + })] assert len(results) == 9 assert results == [showcase.EchoResponse(content=i) for i in text.split(' ')] @@ -28,10 +28,10 @@ def test_pagination(echo): def test_pagination_pages(echo): text = "The hail in Wales falls mainly on the snails." - page_results = list(echo.paged_expand( - content=text, - page_size=3, - ).pages) + page_results = list(echo.paged_expand({ + 'content': text, + 'page_size': 3, + }).pages) assert len(page_results) == 3 assert not page_results[-1].next_page_token diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 9ef70dccf034..597b936ccaf9 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -15,34 +15,34 @@ def test_crud_with_request(identity): count = len(identity.list_users().users) - user = identity.create_user(user={ + user = identity.create_user(request={'user': { 'display_name': 'Guido van Rossum', 'email': 'guido@guido.fake', - }) + }}) try: assert user.display_name == 'Guido van Rossum' assert user.email == 'guido@guido.fake' assert len(identity.list_users().users) == count + 1 - assert identity.get_user( - name=user.name - ).display_name == 'Guido van Rossum' + assert identity.get_user({ + 'name': user.name + }).display_name == 'Guido van Rossum' finally: - identity.delete_user(name=user.name) + identity.delete_user({'name': user.name}) -def test_crud_positional(identity): +def test_crud_flattened(identity): count = len(identity.list_users().users) - user = identity.create_user({ - 'display_name': 'Monty Python', - 'email': 'monty@python.org', - }) + user = identity.create_user( + display_name='Monty Python', + email='monty@python.org', + ) try: assert user.display_name == 'Monty Python' assert user.email == 'monty@python.org' assert len(identity.list_users().users) == count + 1 - assert identity.get_user(user.name).display_name == 'Monty Python' + assert identity.get_user(name=user.name).display_name == 'Monty Python' finally: - identity.delete_user(user.name) + identity.delete_user(name=user.name) def test_path_methods(identity): diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 588ae1170513..bf0284294925 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -20,9 +20,9 @@ def test_retry_bubble(echo): with pytest.raises(exceptions.DeadlineExceeded): - echo.echo( - error={ + echo.echo({ + 'error': { 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), 'message': 'This took longer than you said it should.', }, - ) + }) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 67b0369ab9fe..2a296ee88826 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -171,12 +171,12 @@ def test_method_flattened_fields(): assert 'b' in method.flattened_fields -def test_method_ignored_flattened_fields(): +def test_method_include_flattened_message_fields(): a = make_field('a', type=5) b = make_field('b', type=11, message=make_message('Eggs')) input_msg = make_message('Z', fields=(a, b)) method = make_method('F', input_message=input_msg, signatures=('a,b',)) - assert len(method.flattened_fields) == 0 + assert len(method.flattened_fields) == 2 def test_method_legacy_flattened_fields(): From 6f7944a1c3ccb63469a37e76f7c99b1aadd59fe0 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 9 Dec 2019 19:11:50 -0500 Subject: [PATCH 0183/1339] Add 3.8 classifier to setup.py (#257) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index a30c82394c83..9b94e991f3ca 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -28,6 +28,7 @@ setuptools.setup( 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], From 21d3c8fb8734309a781203890d9b08bf8879941f Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 9 Dec 2019 16:17:03 -0800 Subject: [PATCH 0184/1339] Fix for #169: unused imports (#254) Fix the unused import of api_core.operation and protobuf.empty_pb2 in generated unit tests --- .../tests/unit/%name_%version/%sub/test_%service.py.j2 | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 04bc2a5e858e..0be731c98cc7 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -21,7 +21,9 @@ from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% endif -%} {% for method in service.methods.values() -%} -{% for ref_type in method.ref_types -%} +{% for ref_type in method.ref_types + if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') + or ref_type.ident.python_import.package == ('google', 'protobuf') and ref_type.ident.python_import.module == 'empty_pb2') -%} {{ ref_type.ident.python_import }} {% endfor -%} {% endfor -%} From 7483cff3b70a54dc5d4dc4646770a562cb406ddf Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 10 Dec 2019 13:23:29 -0800 Subject: [PATCH 0185/1339] Add template for a flattening conversion script (#258) The generated method flattening semantics change between the the original monolithic GAPIC generator and this GAPIC generator (aka the Python Microgenerator). User code that calls methods from GAPICs generated by the monolith will need to be converted to use the new flattening semantics. This change describes a template for an automated per-service conversion script. Cases the conversion script will NOT handle correctly include * Dynamic method dispatch * Method calls via an alias or as a free function * Star arg exapnsion in a method call (*args or **kwargs) Plain, normal, vanilla calls are the target for conversion. --- .../scripts/fixup_%service_keywords.py.j2 | 125 ++++++++++++++++++ .../gapic/templates/setup.py.j2 | 10 ++ 2 files changed, 135 insertions(+) create mode 100644 packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 new file mode 100644 index 000000000000..83d4c35d6965 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 @@ -0,0 +1,125 @@ +{% extends '_base.py.j2' %} +{% block content %} +import argparse +import os +import libcst as cst +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class {{ service.client_name }}CallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + {% for method in service.methods.values() -%} + '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), + {% endfor -%} + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + {# Inline comments and formatting are currently stripped out. #} + {# My current attempts at preverving comments and formatting #} + {# keep the comments, but the formatting is run through a log #} + {# chipper, and an extra comma gets added, which causes a #} + {# parse error. #} + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + dirs: Sequence[str], + *, + transformer={{ service.client_name }}CallTransformer(), +): + pyfile_gen = (os.path.join(root, f) + for d in dirs + for root, _, files in os.walk(d) + for f in files if os.path.splitext(f)[1] == ".py") + + for fpath in pyfile_gen: + with open(fpath, 'r+') as f: + src = f.read() + tree = cst.parse_module(src) + updated = tree.visit(transformer) + f.seek(0) + f.truncate() + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the {{ service.name }} client library. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. + + Be sure to back up your source files before running this tool and to compare the diffs. +""") + parser.add_argument( + '-d', + metavar='dir', + dest='dirs', + action='append', + help='a directory to walk for python files to fix up' + ) + args = parser.parse_args() + fix_files(args.dirs or ['.']) +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 9b94e991f3ca..5a18c4e914bd 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -21,6 +21,16 @@ setuptools.setup( 'grpcio >= 1.10.0', 'proto-plus >= 0.4.0', ), + setup_requires=[ + 'libcst >= 0.2.5', + ], + scripts=[ + {% for proto in api.all_protos.values() -%} + {% for service in proto.services.values() -%} + 'scripts/fixup_{{ service.module_name }}_keywords.py', + {% endfor -%} + {% endfor -%} + ], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', From 169381dbff5cc59eb8fa65924adbe5329f27359d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 10 Dec 2019 14:44:03 -0800 Subject: [PATCH 0186/1339] Update version in setup.py to 0.14.0 (#259) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 35c39e826d91..2b79f040c514 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.13.0', + version='0.14.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 5b2a9cab384c12c4d34b41508872d2199ae54bde Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 8 Jan 2020 13:53:05 -0800 Subject: [PATCH 0187/1339] Tests and impl for samplegen flattening awareness of unary methods (#261) Request setup segments that are flattenable (the fields are a subset of the flattenable fields of the method) are flattened. Requests that do not meet this requirement are not flattened. --- .../gapic/samplegen/samplegen.py | 45 ++- .../templates/examples/feature_fragments.j2 | 44 ++- .../gapic/templates/examples/sample.py.j2 | 10 +- .../tests/unit/samplegen/common_types.py | 26 +- .../tests/unit/samplegen/test_integration.py | 160 ++++++++- .../tests/unit/samplegen/test_samplegen.py | 157 ++++---- .../tests/unit/samplegen/test_template.py | 336 ++++++++++++------ 7 files changed, 560 insertions(+), 218 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 10262b21e7d6..df89f1e53e5e 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -21,11 +21,13 @@ import re import time +from gapic import utils + from gapic.samplegen_utils import types from gapic.schema import wrappers from collections import (defaultdict, namedtuple, ChainMap as chainmap) -from typing import (ChainMap, Dict, List, Mapping, Optional, Tuple) +from typing import (ChainMap, Dict, FrozenSet, List, Mapping, Optional, Tuple) # There is no library stub file for this module, so ignore it. from google.api import resource_pb2 # type: ignore @@ -193,6 +195,12 @@ class RequestEntry: default_factory=list) +@dataclasses.dataclass(frozen=True) +class FullRequest: + request_list: List[TransformedRequest] + flattenable: bool = False + + class Validator: """Class that validates a sample. @@ -222,6 +230,7 @@ class Validator: # TODO(dovs): make the schema a required param. def __init__(self, method: wrappers.Method, api_schema=None): # The response ($resp) variable is special and guaranteed to exist. + self.method = method self.request_type_ = method.input response_type = method.output if method.paged_result_field: @@ -259,6 +268,12 @@ def preprocess_sample(sample, api_schema): sample["package_name"] = api_schema.naming.warehouse_package_name sample.setdefault("response", [{"print": ["%s", "$resp"]}]) + @utils.cached_property + def flattenable_fields(self) -> FrozenSet[str]: + return frozenset( + field.name for field in self.method.flattened_fields.values() + ) + def var_field(self, var_name: str) -> Optional[wrappers.Field]: return self.var_defs_.get(var_name) @@ -338,7 +353,7 @@ def _normal_request_setup(self, base_param_to_attrs, val, request, field): def validate_and_transform_request(self, calling_form: types.CallingForm, - request: List[Mapping[str, str]]) -> List[TransformedRequest]: + request: List[Mapping[str, str]]) -> FullRequest: """Validates and transforms the "request" block from a sample config. In the initial request, each dict has a "field" key that maps to a dotted @@ -477,16 +492,22 @@ def validate_and_transform_request(self, raise types.InvalidRequestSetup( "Too many base parameters for client side streaming form") - return [ - TransformedRequest.build( - self.request_type_, - self.api_schema_, - key, - val.attrs, - val.is_resource_request - ) - for key, val in base_param_to_attrs.items() - ] + # We can only flatten a collection of request parameters if they're a + # subset of the flattened fields of the method. + flattenable = self.flattenable_fields >= set(base_param_to_attrs) + return FullRequest( + request_list=[ + TransformedRequest.build( + self.request_type_, + self.api_schema_, + key, + val.attrs, + val.is_resource_request + ) + for key, val in base_param_to_attrs.items() + ], + flattenable=flattenable + ) def validate_response(self, response): """Validates a "response" block from a sample config. diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 3f0ad569016b..6de840fe92bc 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -41,9 +41,9 @@ There is a little, but not enough for it to be important because {% endif %} {% endmacro %} -{% macro print_input_params(requests) %} +{% macro print_input_params(full_request) %} {% with input_parameters = [] %} - {% for request in requests %} + {% for request in full_request.request_list %} {% if request.body %} {% for element in request.body if element.input_parameter %} {% do input_parameters.append(element.input_parameter) %} @@ -148,8 +148,8 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endif %} {% endmacro %} -{% macro render_request_setup(request) %} -{% for parameter_block in request if parameter_block.body %} +{% macro render_request_setup(full_request) %} +{% for parameter_block in full_request.request_list if parameter_block.body %} {% if parameter_block.pattern -%} {# This is a resource-name patterned lookup parameter #} {% with formals = [] -%} @@ -158,13 +158,20 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endfor -%} {{ parameter_block.base }} = "{{parameter_block.pattern }}".format({{ formals|join(", ") }}) {% endwith -%} -{% else -%} +{% else -%} {# End resource name construction #} {{ parameter_block.base }} = {} {% for attr in parameter_block.body %} {{ render_request_attr(parameter_block.base, attr) }} {% endfor %} {% endif -%} {% endfor %} +{% if not full_request.flattenable -%} +request = { +{% for parameter in full_request.request_list %} + '{{ parameter.base }}': {{ parameter.base if parameter.body else parameter.single }}, +{% endfor -%} +} +{% endif -%} {% endmacro %} {% macro render_request_params(request) %} @@ -180,14 +187,29 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endwith -%} {% endmacro %} +{% macro render_request_params_unary(request) %} + {# Provide the top level parameters last and as keyword params #} + {% if request.flattenable -%} + {% with params = [] -%} + {% for r in request.request_list -%} + {% do params.append("%s=%s"|format(r.base, r.single.value if r.single else r.base)) -%} + {% endfor -%} +{{ params|join(", ") -}} + {% endwith -%} + {% else -%} +request=request + {% endif -%} +{% endmacro %} + + {% macro render_method_call(sample, calling_form, calling_form_enum) %} {# Note: this doesn't deal with enums or unions #} {% if calling_form in [calling_form_enum.RequestStreamingBidi, calling_form_enum.RequestStreamingClient] -%} -client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request)|trim -}}]) -{% else -%} +client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request.request_list)|trim -}}]) +{% else -%} {# TODO: deal with flattening #} {# TODO: set up client streaming once some questions are answered #} -client.{{ sample.rpc|snake_case }}({{ render_request_params(sample.request)|trim -}}) +client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request)|trim -}}) {% endif -%} {% endmacro %} @@ -235,13 +257,13 @@ response = operation.result() {{ method_name|snake_case }} {% endmacro %} -{% macro render_main_block(method_name, request_block) %} +{% macro render_main_block(method_name, full_request) %} def main(): import argparse parser = argparse.ArgumentParser() {% with arg_list = [] -%} -{% for request in request_block if request.body -%} +{% for request in full_request.request_list if request.body -%} {% for attr in request.body if attr.input_parameter %} parser.add_argument("--{{ attr.input_parameter }}", type=str, @@ -249,7 +271,7 @@ def main(): {% do arg_list.append("args." + attr.input_parameter) -%} {% endfor -%} {% endfor -%} -{% for request in request_block if request.single and request.single.input_parameter %} +{% for request in full_request.request_list if request.single and request.single.input_parameter %} parser.add_argument("--{{ request.single.input_parameter }}", type=str, default={{ request.single.value }}) diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index cab331de12d2..f054e2f2f0d4 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -37,12 +37,12 @@ def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input transport="grpc", ) - {{ frags.render_request_setup(sample.request)|indent }} -{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} - {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response)|indent -}} -{% endwith %} + {{ frags.render_request_setup(sample.request)|indent }} +{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} + {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response, )|indent -}} +{% endwith %} # [END {{ sample.id }}] -{{ frags.render_main_block(sample.rpc, sample.request) }} +{{ frags.render_main_block(sample.rpc, sample.request) }} {%- endblock %} diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index e2b542b2c981..e07350192317 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -12,10 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import dataclasses import itertools from collections import namedtuple -from typing import(Iterable, Optional) +from typing import(Any, Dict, Iterable, Optional) from google.protobuf import descriptor_pb2 @@ -23,19 +24,17 @@ # Injected dummy test types -DummyMethod = namedtuple( - "DummyMethod", - [ - "input", - "output", - "lro", - "paged_result_field", - "client_streaming", - "server_streaming", - ], -) -DummyMethod.__new__.__defaults__ = (False,) * len(DummyMethod._fields) +@dataclasses.dataclass(frozen=True) +class DummyMethod: + input: bool = False + output: bool = False + lro: bool = False + paged_result_field: bool = False + client_streaming: bool = False + server_streaming: bool = False + flattened_fields: Dict[str, Any] = dataclasses.field(default_factory=dict) + DummyMessage = namedtuple("DummyMessage", ["fields", "type", "options"]) DummyMessage.__new__.__defaults__ = (False,) * len(DummyMessage._fields) @@ -43,6 +42,7 @@ DummyField = namedtuple("DummyField", ["message", "enum", + "name", "repeated", "field_pb", "meta", diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index f82470e898c9..8e022e7f6199 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -52,7 +52,7 @@ def test_generate_sample_basic(): input_type = DummyMessage( type="REQUEST TYPE", fields={ - "classify_request": DummyField( + "classify_target": DummyField( message=DummyMessage( type="CLASSIFY TYPE", fields={ @@ -75,7 +75,10 @@ def test_generate_sample_basic(): methods={ "Classify": DummyMethod( input=input_type, - output=message_factory("$resp.taxonomy") + output=message_factory("$resp.taxonomy"), + flattened_fields={ + "classify_target": DummyField(name="classify_target") + } ) } ) @@ -90,11 +93,11 @@ def test_generate_sample_basic(): "id": "mollusc_classify_sync", "description": "Determine the full taxonomy of input mollusc", "request": [ - {"field": "classify_request.video", + {"field": "classify_target.video", "value": "path/to/mollusc/video.mkv", "input_parameter": "video", "value_is_file": True}, - {"field": "classify_request.location_annotation", + {"field": "classify_target.location_annotation", "value": "New Zealand", "input_parameter": "location"} ], @@ -141,17 +144,158 @@ def sample_classify(video, location): transport="grpc", ) - classify_request = {} + classify_target = {} # video = "path/to/mollusc/video.mkv" with open(video, "rb") as f: - classify_request["video"] = f.read() + classify_target["video"] = f.read() # location = "New Zealand" - classify_request["location_annotation"] = location + classify_target["location_annotation"] = location + + + response = client.classify(classify_target=classify_target) + print("Mollusc is a \\"{}\\"".format(response.taxonomy)) + + +# [END %s] + +def main(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--video", + type=str, + default="path/to/mollusc/video.mkv") + parser.add_argument("--location", + type=str, + default="New Zealand") + args = parser.parse_args() + + sample_classify(args.video, args.location) + + +if __name__ == "__main__": + main() +''' % (sample_id, sample_id, sample_id) + + assert sample_str == expected_str + + +def test_generate_sample_basic_unflattenable(): + # Note: the sample integration tests are needfully large + # and difficult to eyeball parse. They are intended to be integration tests + # that catch errors in behavior that is emergent from combining smaller features + # or in features that are sufficiently small and trivial that it doesn't make sense + # to have standalone tests. + input_type = DummyMessage( + type="REQUEST TYPE", + fields={ + "classify_target": DummyField( + message=DummyMessage( + type="CLASSIFY TYPE", + fields={ + "video": DummyField( + message=DummyMessage(type="VIDEO TYPE"), + ), + "location_annotation": DummyField( + message=DummyMessage(type="LOCATION TYPE"), + ) + }, + ) + ) + } + ) + + api_naming = naming.Naming( + name="MolluscClient", namespace=("molluscs", "v1")) + service = wrappers.Service( + service_pb=namedtuple('service_pb', ['name'])('MolluscService'), + methods={ + "Classify": DummyMethod( + input=input_type, + output=message_factory("$resp.taxonomy"), + ) + } + ) + + schema = DummyApiSchema( + services={"animalia.mollusca.v1.Mollusc": service}, + naming=api_naming, + ) + + sample = {"service": "animalia.mollusca.v1.Mollusc", + "rpc": "Classify", + "id": "mollusc_classify_sync", + "description": "Determine the full taxonomy of input mollusc", + "request": [ + {"field": "classify_target.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True}, + {"field": "classify_target.location_annotation", + "value": "New Zealand", + "input_parameter": "location"} + ], + "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} - response = client.classify(classify_request) + sample_str = samplegen.generate_sample( + sample, + schema, + env.get_template('examples/sample.py.j2') + ) + + sample_id = ("mollusc_classify_sync") + expected_str = '''# -*- coding: utf-8 -*- +# Copyright (C) 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# DO NOT EDIT! This is a generated sample ("request", "%s") +# +# To install the latest published package dependency, execute the following: +# pip3 install molluscs-v1-molluscclient + + +# [START %s] +from google import auth +from google.auth import credentials +from molluscs.v1.molluscclient.services.mollusc_service import MolluscServiceClient + +def sample_classify(video, location): + """Determine the full taxonomy of input mollusc""" + + client = MolluscServiceClient( + credentials=credentials.AnonymousCredentials(), + transport="grpc", + ) + + classify_target = {} + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_target["video"] = f.read() + + # location = "New Zealand" + classify_target["location_annotation"] = location + + request = { + 'classify_target': classify_target, + } + + + response = client.classify(request=request) print("Mollusc is a \\"{}\\"".format(response.taxonomy)) + # [END %s] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index d14b01fdbeb1..8717bf1d7794 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -899,18 +899,22 @@ def test_validate_request_basic(): {"field": "squid.num_tentacles", "value": 10}, ], ) - expected = [samplegen.TransformedRequest( - base="squid", - body=[ - samplegen.AttributeRequestSetup(field="mantle_length", - value='"100 \\"cm"'), - samplegen.AttributeRequestSetup(field="mantle_mass", - value='"10 kg"'), - samplegen.AttributeRequestSetup(field="num_tentacles", - value=10) - ], - single=None - )] + expected = samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="squid", + body=[ + samplegen.AttributeRequestSetup(field="mantle_length", + value='"100 \\"cm"'), + samplegen.AttributeRequestSetup(field="mantle_mass", + value='"10 kg"'), + samplegen.AttributeRequestSetup(field="num_tentacles", + value=10) + ], + single=None + ) + ] + ) assert actual == expected @@ -943,13 +947,15 @@ def test_validate_request_top_level_field(): [{"field": "squid", "value": "humboldt"}] ) - expected = [ - samplegen.TransformedRequest(base="squid", - body=None, - single=samplegen.AttributeRequestSetup( - value='"humboldt"' - )) - ] + expected = samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="squid", + body=None, + single=samplegen.AttributeRequestSetup( + value='"humboldt"' + )) + ] + ) assert actual == expected @@ -1035,24 +1041,26 @@ def test_validate_request_multiple_arguments(): }, ], ) - expected = [ - samplegen.TransformedRequest( - base="squid", - body=[samplegen.AttributeRequestSetup( - field="mantle_length", - value='"100 cm"', - value_is_file=True)], - single=None - ), - samplegen.TransformedRequest( - base="clam", - body=[samplegen.AttributeRequestSetup( - field="shell_mass", - value='"100 kg"', - comment="Clams can be large")], - single=None - ), - ] + expected = samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="squid", + body=[samplegen.AttributeRequestSetup( + field="mantle_length", + value='"100 cm"', + value_is_file=True)], + single=None + ), + samplegen.TransformedRequest( + base="clam", + body=[samplegen.AttributeRequestSetup( + field="shell_mass", + value='"100 kg"', + comment="Clams can be large")], + single=None + ), + ] + ) assert actual == expected @@ -1524,11 +1532,16 @@ def test_validate_request_enum(): types.CallingForm.Request, [{"field": "cephalopod.subclass", "value": "COLEOIDEA"}] ) - expected = [samplegen.TransformedRequest( - "cephalopod", - body=[samplegen.AttributeRequestSetup(field="subclass", - value='"COLEOIDEA"')], - single=None)] + expected = samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + "cephalopod", + body=[samplegen.AttributeRequestSetup(field="subclass", + value='"COLEOIDEA"')], + single=None + ) + ] + ) assert actual == expected @@ -1541,10 +1554,10 @@ def test_validate_request_enum_top_level(): types.CallingForm.Request, [{"field": "subclass", "value": "COLEOIDEA"}] ) - expected = [samplegen.TransformedRequest( + expected = samplegen.FullRequest(request_list=[samplegen.TransformedRequest( "subclass", single=samplegen.AttributeRequestSetup(value='"COLEOIDEA"'), - body=None)] + body=None)]) assert actual == expected @@ -1627,24 +1640,26 @@ def test_validate_request_resource_name(): request ) - expected = [ - samplegen.TransformedRequest( - base="taxon", - pattern="kingdom/{kingdom}/phylum/{phylum}", - single=None, - body=[ - samplegen.AttributeRequestSetup( - field="kingdom", - value="animalia", - ), - samplegen.AttributeRequestSetup( - field="phylum", - value="mollusca", - input_parameter="phylum", - ), - ] - ) - ] + expected = samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="taxon", + pattern="kingdom/{kingdom}/phylum/{phylum}", + single=None, + body=[ + samplegen.AttributeRequestSetup( + field="kingdom", + value="animalia", + ), + samplegen.AttributeRequestSetup( + field="phylum", + value="mollusca", + input_parameter="phylum", + ), + ] + ) + ] + ) assert actual == expected @@ -1663,15 +1678,17 @@ def test_validate_request_primitive_field(): actual = v.validate_and_transform_request(types.CallingForm.Request, request) - expected = [ - samplegen.TransformedRequest( - base="species", - body=None, - single=samplegen.AttributeRequestSetup( - value='"Architeuthis dux"' + expected = samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="species", + body=None, + single=samplegen.AttributeRequestSetup( + value='"Architeuthis dux"' + ) ) - ) - ] + ] + ) assert actual == expected diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index d4383d4bb753..bd3e539891fe 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -42,7 +42,8 @@ def check_template(template_fragment, expected_output, **kwargs): undefined=jinja2.StrictUndefined, extensions=["jinja2.ext.do"], - trim_blocks=True, lstrip_blocks=True + trim_blocks=True, + lstrip_blocks=True ) env.filters['snake_case'] = utils.to_snake_case @@ -132,44 +133,128 @@ def test_render_request_basic(): gastropod["movie"] = f.read() ''', - request=[samplegen.TransformedRequest(base="cephalopod", - body=[ + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="cephalopod", + body=[ + samplegen.AttributeRequestSetup( + field="mantle_mass", + value="'10 kg'", + input_parameter="cephalopod_mass" + ), + samplegen.AttributeRequestSetup( + field="photo", + value="'path/to/cephalopod/photo.jpg'", + input_parameter="photo_path", + value_is_file=True + ), + samplegen.AttributeRequestSetup( + field="order", + value="Molluscs.Cephalopoda.Coleoidea"), + ], + single=None), + samplegen.TransformedRequest(base="gastropod", + body=[ + samplegen.AttributeRequestSetup( + field="mantle_mass", + value="'1 kg'", + input_parameter="gastropod_mass" + ), + samplegen.AttributeRequestSetup( + field="order", + value="Molluscs.Gastropoda.Pulmonata" + ), + samplegen.AttributeRequestSetup( + field="movie", + value="'path/to/gastropod/movie.mkv'", + input_parameter="movie_path", + value_is_file=True + ) + ], + single=None), + ], + flattenable=True, + ) + ) + + +def test_render_request_unflattened(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_request_setup(request) }} + ''', + ''' + cephalopod = {} + # cephalopod_mass = '10 kg' + cephalopod["mantle_mass"] = cephalopod_mass + + # photo_path = 'path/to/cephalopod/photo.jpg' + with open(photo_path, "rb") as f: + cephalopod["photo"] = f.read() + + cephalopod["order"] = Molluscs.Cephalopoda.Coleoidea + + gastropod = {} + # gastropod_mass = '1 kg' + gastropod["mantle_mass"] = gastropod_mass + + gastropod["order"] = Molluscs.Gastropoda.Pulmonata + + # movie_path = 'path/to/gastropod/movie.mkv' + with open(movie_path, "rb") as f: + gastropod["movie"] = f.read() + + request = { + 'cephalopod': cephalopod, + 'gastropod': gastropod, + 'bivalve': "humboldt", + } + ''', + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="cephalopod", + body=[ samplegen.AttributeRequestSetup( field="mantle_mass", value="'10 kg'", input_parameter="cephalopod_mass" ), - samplegen.AttributeRequestSetup( + samplegen.AttributeRequestSetup( field="photo", value="'path/to/cephalopod/photo.jpg'", input_parameter="photo_path", value_is_file=True ), - samplegen.AttributeRequestSetup( + samplegen.AttributeRequestSetup( field="order", value="Molluscs.Cephalopoda.Coleoidea"), - ], - single=None), - samplegen.TransformedRequest(base="gastropod", - body=[ + ], + single=None), + samplegen.TransformedRequest(base="gastropod", + body=[ samplegen.AttributeRequestSetup( field="mantle_mass", value="'1 kg'", input_parameter="gastropod_mass" ), - samplegen.AttributeRequestSetup( + samplegen.AttributeRequestSetup( field="order", value="Molluscs.Gastropoda.Pulmonata" ), - samplegen.AttributeRequestSetup( + samplegen.AttributeRequestSetup( field="movie", value="'path/to/gastropod/movie.mkv'", input_parameter="movie_path", value_is_file=True ) - ], - single=None), - ] + ], + single=None), + samplegen.TransformedRequest(base="bivalve", + body=None, + single='"humboldt"'), + ] + ) ) @@ -182,24 +267,27 @@ def test_render_request_resource_name(): ''' taxon = "kingdom/{kingdom}/phylum/{phylum}".format(kingdom="animalia", phylum=mollusca) ''', - request=[ - samplegen.TransformedRequest( - base="taxon", - single=None, - body=[ - samplegen.AttributeRequestSetup( - field="kingdom", - value='"animalia"', - ), - samplegen.AttributeRequestSetup( - field="phylum", - value="mollusca", - input_parameter="mollusca", - ) - ], - pattern="kingdom/{kingdom}/phylum/{phylum}" - ), - ] + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="taxon", + single=None, + body=[ + samplegen.AttributeRequestSetup( + field="kingdom", + value='"animalia"', + ), + samplegen.AttributeRequestSetup( + field="phylum", + value="mollusca", + input_parameter="mollusca", + ) + ], + pattern="kingdom/{kingdom}/phylum/{phylum}" + ), + ], + flattenable=True + ) ) @@ -355,8 +443,8 @@ def test_collection_loop(): ''' for m in response.molluscs: print("Mollusc: {}".format(m)) - - + + ''', collection={"collection": "$resp.molluscs", "variable": "m", @@ -571,32 +659,35 @@ def test_print_input_params(): ''' mass, length, color ''', - request=[samplegen.TransformedRequest(base="squid", - body=[ - samplegen.AttributeRequestSetup( - field="mass", - value="10 kg", - input_parameter="mass" - ), - samplegen.AttributeRequestSetup( - field="length", - value="20 m", - input_parameter="length" - ) - ], - single=None), - samplegen.TransformedRequest(base="diameter", - single=samplegen.AttributeRequestSetup( - value="10 cm" - ), - body=None), - samplegen.TransformedRequest(base="color", - single=samplegen.AttributeRequestSetup( - value="red", - input_parameter="color" - ), - body=None), - ] + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="squid", + body=[ + samplegen.AttributeRequestSetup( + field="mass", + value="10 kg", + input_parameter="mass" + ), + samplegen.AttributeRequestSetup( + field="length", + value="20 m", + input_parameter="length" + ) + ], + single=None), + samplegen.TransformedRequest(base="diameter", + single=samplegen.AttributeRequestSetup( + value="10 cm" + ), + body=None), + samplegen.TransformedRequest(base="color", + single=samplegen.AttributeRequestSetup( + value="red", + input_parameter="color" + ), + body=None), + ] + ) ) @@ -685,17 +776,50 @@ def test_render_method_call_basic(): calling_form, calling_form_enum) }} ''', ''' - client.categorize_mollusc(video, audio, guess) + client.categorize_mollusc(request=request) ''', - request=[samplegen.TransformedRequest(base="video", - body=True, - single=None), - samplegen.TransformedRequest(base="audio", - body=True, - single=None), - samplegen.TransformedRequest(base="guess", - body=True, - single=None)], + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", + body=True, + single=None), + samplegen.TransformedRequest(base="audio", + body=True, + single=None), + samplegen.TransformedRequest(base="guess", + body=True, + single=None) + ], + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.Request + ) + + +def test_render_method_call_basic_flattenable(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + calling_form, calling_form_enum) }} + ''', + ''' + client.categorize_mollusc(video=video, audio=audio, guess=guess) + ''', + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", + body=True, + single=None), + samplegen.TransformedRequest(base="audio", + body=True, + single=None), + samplegen.TransformedRequest(base="guess", + body=True, + single=None) + ], + flattenable=True, + ), calling_form_enum=CallingForm, calling_form=CallingForm.Request ) @@ -711,9 +835,15 @@ def test_render_method_call_bidi(): ''' client.categorize_mollusc([video]) ''', - request=[samplegen.TransformedRequest(base="video", - body=True, - single=None)], + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="video", + body=True, + single=None + ) + ] + ), calling_form_enum=CallingForm, calling_form=CallingForm.RequestStreamingBidi ) @@ -729,9 +859,15 @@ def test_render_method_call_client(): ''' client.categorize_mollusc([video]) ''', - request=[samplegen.TransformedRequest(base="video", - body=True, - single=None)], + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="video", + body=True, + single=None + ) + ] + ), calling_form_enum=CallingForm, calling_form=CallingForm.RequestStreamingClient ) @@ -803,27 +939,29 @@ def main(): if __name__ == "__main__": main() ''', - request=[ - samplegen.TransformedRequest(base="input_params", - body=[ - samplegen.AttributeRequestSetup( - field="list_molluscs.order", - value="'coleoidea'", - input_parameter="order" - ), - samplegen.AttributeRequestSetup( - field="list_molluscs.mass", - value="'60kg'", - input_parameter="mass") - ], - single=None), - samplegen.TransformedRequest(base="enum_param", - body=[ - samplegen.AttributeRequestSetup( - field="list_molluscs.zone", - value="MESOPELAGIC" - ) - ], - single=None) - ] + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="input_params", + body=[ + samplegen.AttributeRequestSetup( + field="list_molluscs.order", + value="'coleoidea'", + input_parameter="order" + ), + samplegen.AttributeRequestSetup( + field="list_molluscs.mass", + value="'60kg'", + input_parameter="mass") + ], + single=None), + samplegen.TransformedRequest(base="enum_param", + body=[ + samplegen.AttributeRequestSetup( + field="list_molluscs.zone", + value="MESOPELAGIC" + ) + ], + single=None) + ] + ) ) From 0ef933557e0808da526921d6b9ce61056293d70b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 21 Jan 2020 14:46:26 -0800 Subject: [PATCH 0188/1339] Add an __init__.py file to generated unit test dir as fix for #263 (#270) --- packages/gapic-generator/gapic/samplegen/samplegen.py | 2 +- .../gapic/templates/tests/unit/%name_%version/%sub/__init__.py | 0 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/__init__.py diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index df89f1e53e5e..051419d7db80 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -229,7 +229,7 @@ class Validator: # TODO(dovs): make the schema a required param. def __init__(self, method: wrappers.Method, api_schema=None): - # The response ($resp) variable is special and guaranteed to exist. + # The response ($resp) variable is special and guaranteed to exist. self.method = method self.request_type_ = method.input response_type = method.output diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/__init__.py b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 From 96eba77e4c4ac5d3f3d08941a665cd79d47fc96d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 21 Jan 2020 15:37:33 -0800 Subject: [PATCH 0189/1339] Allow passing in individual files to the conversion script (#269) --- .../scripts/fixup_%service_keywords.py.j2 | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 index 83d4c35d6965..94e1a5883820 100644 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 @@ -3,6 +3,7 @@ import argparse import os import libcst as cst +from itertools import chain from typing import (Any, Callable, Dict, List, Sequence, Tuple) @@ -79,13 +80,16 @@ class {{ service.client_name }}CallTransformer(cst.CSTTransformer): def fix_files( dirs: Sequence[str], + files: Sequence[str], *, transformer={{ service.client_name }}CallTransformer(), ): - pyfile_gen = (os.path.join(root, f) - for d in dirs - for root, _, files in os.walk(d) - for f in files if os.path.splitext(f)[1] == ".py") + pyfile_gen = chain( + (os.path.join(root, f) + for d in dirs + for root, _, files in os.walk(d) + for f in files if os.path.splitext(f)[1] == ".py"), + files) for fpath in pyfile_gen: with open(fpath, 'r+') as f: @@ -113,13 +117,21 @@ Note: This tool operates at a best-effort level at converting positional Be sure to back up your source files before running this tool and to compare the diffs. """) - parser.add_argument( + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument( '-d', metavar='dir', dest='dirs', action='append', - help='a directory to walk for python files to fix up' + help='a directory to walk for python files to fix up', + ) + group.add_argument( + '-f', + metavar='file', + dest='files', + action='append', + help='a file to fix up via un-flattening', ) args = parser.parse_args() - fix_files(args.dirs or ['.']) + fix_files(args.dirs or [], args.files or []) {% endblock %} From 51b686132226e14f54c9fa23edbeb831302d6e51 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 22 Jan 2020 13:48:23 -0800 Subject: [PATCH 0190/1339] Add enums to the default __init__.py (#271) Fix for #265 --- .../templates/%namespace/%name/__init__.py.j2 | 50 +++++++++++++------ packages/gapic-generator/gapic/utils/lines.py | 9 ++-- 2 files changed, 39 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index 547f8d5eac16..7dce5bc781d3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -2,6 +2,7 @@ {% block content %} {# Import subpackages. -#} +{% filter sort_lines -%} {% for subpackage in api.subpackages.keys() -%} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} {{ api.naming.versioned_module_name }} import {{ subpackage }} @@ -14,34 +15,51 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} {% endfor -%} -{# Import messages from each proto. +{# Import messages and enums from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. -#} +{# Import messages from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. + -#} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.messages.values()|sort(attribute='name') -%} + {% for message in proto.messages.values()|sort(attribute='name') -%} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} -{% endfor %}{% endfor %} - +{% endfor -%} +{% for enum in proto.enums.values()|sort(attribute='name') -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} +{% endfor %}{% endfor -%} +{% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. -#} __all__ = ( - {%- for subpackage in api.subpackages.keys() %} - '{{ subpackage }}', - {%- endfor %} - {%- for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.client_name }}', - {%- endfor %} - {%- for proto in api.protos.values()|sort(attribute='module_name') - if proto.meta.address.subpackage == api.subpackage_view %} - {%- for message in proto.messages.values()|sort(attribute='name') %} - '{{ message.name }}', - {%- endfor %}{% endfor %} +{%- filter indent %} +{% filter sort_lines -%} +{% for subpackage in api.subpackages.keys() -%} +'{{ subpackage }}', +{% endfor -%} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view -%} +'{{ service.client_name }}', +{% endfor -%} +{% for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view -%} +{% for message in proto.messages.values()|sort(attribute='name') -%} +'{{ message.name }}', +{% endfor -%} +{% for enum in proto.enums.values()|sort(attribute='name') + if proto.meta.address.subpackage == api.subpackage_view -%} +'{{ enum.name }}', +{% endfor -%}{% endfor -%} +{% endfilter -%} +{% endfilter -%} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 680b583e58cf..64d32a31faef 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -13,6 +13,7 @@ # limitations under the License. import textwrap +from typing import Iterable def sort_lines(text: str, dedupe: bool = True) -> str: @@ -27,11 +28,11 @@ def sort_lines(text: str, dedupe: bool = True) -> str: trailing = '\n' if text.endswith('\n') else '' # Split the text into individual lines, throwing away any empty lines. - lines = [i for i in text.strip().split('\n') if i.strip()] + lines: Iterable[str] = (i for i in text.strip().split('\n') if i.strip()) # De-duplicate the lines if requested. if dedupe: - lines = list(set(lines)) + lines = set(lines) # Return the final string. answer = '\n'.join(sorted(lines)) @@ -78,8 +79,8 @@ def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: first = text.split('\n')[0] + '\n' if len(first) > width - offset: initial = textwrap.wrap(first, - break_long_words=False, - width=width - offset, + break_long_words=False, + width=width - offset, ) # Strip the first \n from the text so it is not misidentified as an # intentionally short line below. From 4786136bec0eea1e42952df744d627f14c42c19d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 24 Jan 2020 12:14:52 -0800 Subject: [PATCH 0191/1339] Autogenerated documentation (#272) Intended fix for #228 --- .../docs/%name_%version/services.rst.j2 | 6 + .../docs/%name_%version/types.rst.j2 | 5 + .../gapic/templates/docs/conf.py.j2 | 363 ++++++++++++++++++ .../gapic/templates/docs/index.rst.j2 | 6 + 4 files changed, 380 insertions(+) create mode 100644 packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 create mode 100644 packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 create mode 100644 packages/gapic-generator/gapic/templates/docs/conf.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/docs/index.rst.j2 diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 new file mode 100644 index 000000000000..a4c02ad53197 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 @@ -0,0 +1,6 @@ +Client for {{ api.naming.long_name }} API +{{ '=' * (14 + api.naming.long_name|length) }} + +.. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }} + :members: + :inherited-members: diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 new file mode 100644 index 000000000000..30f93dd8e3e4 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 @@ -0,0 +1,5 @@ +Types for {{ api.naming.long_name }} API +{{ '=' * (13 + api.naming.long_name|length) }} + +.. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types + :members: diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 new file mode 100644 index 000000000000..1e827b37d96c --- /dev/null +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -0,0 +1,363 @@ +{% extends '_base.py.j2' %} + +{% block content %} +# +# {{ api.naming.warehouse_package_name }} documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"{{ api.naming.warehouse_package_name }}" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "{{ api.naming.namespace|join(' ') }} Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "{{ api.naming.warehouse_package_name }}-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "{{ api.naming.warehouse_package_name }}.tex", + u"{{ api.naming.warehouse_package_name }} Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "{{ api.naming.warehouse_package_name }}", + u"{{ api.naming.long_name }} Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "{{ api.naming.warehouse_package_name }}", + u"{{ api.naming.warehouse_package_name }} Documentation", + author, + "{{ api.naming.warehouse_package_name }}", + "GAPIC library for {{ api.naming.long_name }} API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/docs/index.rst.j2 b/packages/gapic-generator/gapic/templates/docs/index.rst.j2 new file mode 100644 index 000000000000..1a4ece01a37d --- /dev/null +++ b/packages/gapic-generator/gapic/templates/docs/index.rst.j2 @@ -0,0 +1,6 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + {{ api.naming.versioned_module_name }}/services + {{ api.naming.versioned_module_name }}/types From a77b7d6a571b58dbd602a2ae7ef8989d30c1fac4 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 28 Jan 2020 14:03:21 -0800 Subject: [PATCH 0192/1339] Provide 'types' and 'enums' submodule view (#273) This is a minor hack to preserve the module interface from the monocode: types and enums for a service live in distinct submodules. Includes tests for enums --- .../%namespace/%name_%version/%sub/enums.py.j2 | 13 +++++++++++++ .../%name_%version/%sub/types/__init__.py.j2 | 13 +++++++++++++ .../unit/%name_%version/%sub/test_%service.py.j2 | 12 ++++++++++++ 3 files changed, 38 insertions(+) create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/enums.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/enums.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/enums.py.j2 new file mode 100644 index 000000000000..567e8fe1c591 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/enums.py.j2 @@ -0,0 +1,13 @@ +{% extends '_base.py.j2' %} + +{% block content %} +{% for p in api.protos.values() if p.file_to_generate and p.enums -%} +from .types.{{ p.module_name }} import ({% for e in p.enums.values() %}{{ e.name }}, {% endfor %}) +{% endfor %} + +__all__ = ( +{%- for p in api.protos.values() if p.file_to_generate %}{% for e in p.enums.values() %} + '{{ e.name }}', +{%- endfor %}{% endfor %} +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 new file mode 100644 index 000000000000..7b464a970568 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 @@ -0,0 +1,13 @@ +{% extends '_base.py.j2' %} + +{% block content %} +{% for p in api.protos.values() if p.file_to_generate and p.messages -%} +from .{{ p.module_name }} import ({% for m in p.messages.values() %}{{ m.name }}, {% endfor %}) +{% endfor %} + +__all__ = ( +{%- for p in api.protos.values() if p.file_to_generate %}{% for m in p.messages.values() %} + '{{ m.name }}', +{%- endfor %}{% endfor %} +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 0be731c98cc7..45f0b35cb88b 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -20,6 +20,9 @@ from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% endif -%} +{% if api.protos.values()|selectattr('enums') -%} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import enums +{% endif -%} {% for method in service.methods.values() -%} {% for ref_type in method.ref_types if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') @@ -431,4 +434,13 @@ def test_{{ message.name|snake_case }}_path(): {% endwith -%} {% endfor -%} + +{% with enums = [] -%}{% for p in api.protos.values() if p.file_to_generate -%}{% for e in p.enums.values() if e.ident.module == service.meta.address.module -%}{% do enums.append(e) %} +{% if enums -%} +def test_enum_path(): +{%- for e in enums %} + assert enums.{{ e.name }} == {{ e.ident.module }}.{{ e.name }} +{%- endfor %} +{% endif -%} +{% endfor -%}{% endfor -%}{% endwith -%} {% endblock %} From 866323bf0ef2cba5cd741a48da016010a6115d03 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 28 Jan 2020 14:35:29 -0800 Subject: [PATCH 0193/1339] Bump version to 0.15.0 (#275) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2b79f040c514..1c2a2c859f92 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.14.0', + version='0.15.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From c5ea6666785e9b9d50031a0f59b4c5dfe5777be9 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 28 Jan 2020 15:42:56 -0800 Subject: [PATCH 0194/1339] Add a default-no confirmation prompt to the conversion script (#277) --- .../templates/scripts/fixup_%service_keywords.py.j2 | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 index 94e1a5883820..bf23aeff1462 100644 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 @@ -133,5 +133,13 @@ Note: This tool operates at a best-effort level at converting positional help='a file to fix up via un-flattening', ) args = parser.parse_args() - fix_files(args.dirs or [], args.files or []) + print( + """It is strongly, strongly recommended that you commit outstanding changes and +back up your source tree before running this conversion script. +Please take a moment to do that now if you haven't already. +""" + ) + resp = input("Really attempt to convert sources? yes/[no]: ") + if resp == "yes": + fix_files(args.dirs or [], args.files or []) {% endblock %} From c104e7652ce9993a53833f680d9c7260b2b32369 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 30 Jan 2020 14:50:27 -0800 Subject: [PATCH 0195/1339] Lazy import feature (#278) APIs with a very large number of services may be prohibitive to import and cause excessive delay at startup time. For Python >= v3.7, there is now a generator option that allows lazy imports of services, transports, enums, and types. Lazy loading is currently limited to the api and versioned api modules. Lazily loaded modules and classes include the service modules, types, enums, clients, transports, wrapped protobuf messages, and wrapped protobuf enums. Lazily loading lower-level modules all the way down is a work in progress. Showcase system tests use lazy imports. **Note:** This feature is a work in progress. It requires explicit activation by passing '--python_gapic_opt=lazy-import' to the generator. Rough edges exist; consider yourself warned. --- .../gapic-generator/gapic/cli/generate.py | 2 +- .../gapic/generator/generator.py | 29 ++++-- .../gapic/generator/options.py | 17 ++-- .../gapic-generator/gapic/schema/wrappers.py | 8 ++ .../templates/%namespace/%name/__init__.py.j2 | 97 ++++++++++++++++++- .../%name_%version/%sub/__init__.py.j2 | 96 ++++++++++++++++++ packages/gapic-generator/noxfile.py | 1 + .../tests/unit/generator/test_generator.py | 62 +++++++++--- .../tests/unit/generator/test_options.py | 6 ++ .../unit/schema/wrappers/test_service.py | 2 + 10 files changed, 290 insertions(+), 30 deletions(-) diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index db69367e5ddc..f72066057ace 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -61,7 +61,7 @@ def generate( # Translate into a protobuf CodeGeneratorResponse; this reads the # individual templates and renders them. # If there are issues, error out appropriately. - res = generator.Generator(opts).get_response(api_schema) + res = generator.Generator(opts).get_response(api_schema, opts) # Output the serialized response. output.write(res.SerializeToString()) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 2bdfe677b710..2913dab8bc58 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -61,7 +61,11 @@ def __init__(self, opts: options.Options) -> None: self._sample_configs = opts.sample_configs - def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: + def get_response( + self, + api_schema: api.API, + opts: options.Options + ) -> CodeGeneratorResponse: """Return a :class:`~.CodeGeneratorResponse` for this library. This is a complete response to be written to (usually) stdout, and @@ -69,6 +73,7 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: Args: api_schema (~api.API): An API schema object. + opts (~.options.Options): An options instance. Returns: ~.CodeGeneratorResponse: A response describing appropriate @@ -93,9 +98,13 @@ def get_response(self, api_schema: api.API) -> CodeGeneratorResponse: continue # Append to the output files dictionary. - output_files.update(self._render_template(template_name, - api_schema=api_schema, - )) + output_files.update( + self._render_template( + template_name, + api_schema=api_schema, + opts=opts + ) + ) output_files.update(self._generate_samples_and_manifest( api_schema, @@ -208,6 +217,7 @@ def _render_template( self, template_name: str, *, api_schema: api.API, + opts: options.Options, ) -> Dict[str, CodeGeneratorResponse.File]: """Render the requested templates. @@ -240,6 +250,7 @@ def _render_template( for subpackage in api_schema.subpackages.values(): answer.update(self._render_template(template_name, api_schema=subpackage, + opts=opts )) skip_subpackages = True @@ -252,7 +263,8 @@ def _render_template( continue answer.update(self._get_file(template_name, api_schema=api_schema, - proto=proto + proto=proto, + opts=opts )) return answer @@ -266,15 +278,19 @@ def _render_template( answer.update(self._get_file(template_name, api_schema=api_schema, service=service, + opts=opts, )) return answer # This file is not iterating over anything else; return back # the one applicable file. - answer.update(self._get_file(template_name, api_schema=api_schema)) + answer.update( + self._get_file(template_name, api_schema=api_schema, opts=opts) + ) return answer def _get_file(self, template_name: str, *, + opts: options.Options, api_schema=api.API, **context: Mapping): """Render a template to a protobuf plugin File object.""" @@ -289,6 +305,7 @@ def _get_file(self, template_name: str, *, content=formatter.fix_whitespace( self._env.get_template(template_name).render( api=api_schema, + opts=opts, **context ), ), diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index c95ac62a5e8e..414e6a5c5687 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import defaultdict -from typing import Any, DefaultDict, Dict, List, Optional, Tuple +from typing import Any, DefaultDict, Dict, FrozenSet, List, Optional, Tuple import dataclasses import json @@ -36,11 +36,15 @@ class Options: retry: Optional[Dict[str, Any]] = None sample_configs: Tuple[str, ...] = dataclasses.field(default=()) templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) + lazy_import: bool = False # Class constants PYTHON_GAPIC_PREFIX: str = 'python-gapic-' - RETRY_OPT: str = 'retry-config' - SAMPLES_OPT: str = 'samples' + OPT_FLAGS: FrozenSet[str] = frozenset(( + 'retry-config', # takes a path + 'samples', # output dir + 'lazy-import', # requires >= 3.7 + )) @classmethod def build(cls, opt_string: str) -> 'Options': @@ -70,7 +74,7 @@ def build(cls, opt_string: str) -> 'Options': opt, value = opt.split('=') # Save known, expected keys. - if opt in (cls.RETRY_OPT, cls.SAMPLES_OPT): + if opt in cls.OPT_FLAGS: opts[opt].append(value) # Throw away other options not meant for us. @@ -93,14 +97,14 @@ def build(cls, opt_string: str) -> 'Options': ) retry_cfg = None - retry_paths = opts.pop(cls.RETRY_OPT, None) + retry_paths = opts.pop('retry-config', None) if retry_paths: # Just use the last config specified. with open(retry_paths[-1]) as f: retry_cfg = json.load(f) # Build the options instance. - sample_paths = opts.pop(cls.SAMPLES_OPT, []) + sample_paths = opts.pop('samples', []) answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), @@ -111,6 +115,7 @@ def build(cls, opt_string: str) -> 'Options': for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) ), templates=tuple(os.path.expanduser(i) for i in templates), + lazy_import=bool(opts.pop('lazy-import', False)) ) # Note: if we ever need to recursively check directories for sample diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index dece167763eb..72978d26cab9 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -678,6 +678,14 @@ def client_name(self) -> str: """Returns the name of the generated client class""" return self.name + "Client" + @property + def transport_name(self): + return self.name + "Transport" + + @property + def grpc_transport_name(self): + return self.name + "GrpcTransport" + @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index 7dce5bc781d3..5cdccea984d9 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -1,6 +1,100 @@ {% extends '_base.py.j2' %} - {% block content %} +{% if opts.lazy_import -%} {# lazy import #} +import importlib +import re +import sys + +from itertools import chain + +def to_snake_case(s: str) -> str: + s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) + s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) + + # Numbers are a weird case; the goal is to spot when they _start_ + # some kind of name or acronym (e.g. 2FA, 3M). + # + # Find cases of a number preceded by a lower-case letter _and_ + # followed by at least two capital letters or a single capital and + # end of string. + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) + + return s.lower() + + +def from_snake_case(s): + _CHARS_TO_UPCASE_RE = re.compile(r'(?:_|^)([a-z])') + return _CHARS_TO_UPCASE_RE.sub(lambda m: m.group().replace('_', '').upper(), s) + + +if sys.version_info < (3, 7): + raise ImportError('This module requires Python 3.7 or later.') + +_lazy_name_to_package_map = { + 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', + 'enums': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.enums', + {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} + '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', + '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.base', + '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.grpc', + {%- endfor %} {# Need to do types and enums #} +} + +_lazy_type_to_package_map = { +{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %} +{%- for enum in proto.enums.values() %} + '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.enums', +{%- endfor %}{%- endfor %} +} + + +# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ +def __getattr__(name): # Requires Python >= 3.7 + if name == '__all__': + all_names = globals()['__all__'] = sorted( + chain( + (from_snake_case(k) for k in _lazy_name_to_package_map), + _lazy_type_to_package_map, + ) + ) + return all_names + elif name.endswith('Transport'): + module = __getattr__(to_snake_case(name)) + sub_mod_class = getattr(module, name) + klass = type(name, (sub_mod_class,), {'__doc__': sub_mod_class.__doc__}) + globals()[name] = klass + return klass + elif name.endswith('Client'): + module = __getattr__(to_snake_case(name)) + sub_mod_class = getattr(module, name) + enums = __getattr__('enums') + klass = type( + name, + (sub_mod_class,), + {'__doc__': sub_mod_class.__doc__, 'enums': enums} + ) + globals()[name] = klass + return klass + elif name in _lazy_name_to_package_map: + module = importlib.import_module(f'{_lazy_name_to_package_map[name]}') + globals()[name] = module + return module + elif name in _lazy_type_to_package_map: + module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') + klass = getattr(module, name) + {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} + globals()[name] = klass + return klass + else: + raise AttributeError(f'unknown sub-module {name!r}.') + + +def __dir__(): + return globals().get('__all__') or __getattr__('__all__') +{% else -%} {# do not use lazy import #} {# Import subpackages. -#} {% filter sort_lines -%} {% for subpackage in api.subpackages.keys() -%} @@ -62,4 +156,5 @@ __all__ = ( {% endfilter -%} {% endfilter -%} ) +{% endif -%} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index df685b243801..aad42c5f7699 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -1,6 +1,101 @@ {% extends '_base.py.j2' %} {% block content %} +{% if opts.lazy_import -%} {# lazy import #} +import importlib +import re +import sys + +from itertools import chain + +def to_snake_case(s: str) -> str: + s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) + s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) + + # Numbers are a weird case; the goal is to spot when they _start_ + # some kind of name or acronym (e.g. 2FA, 3M). + # + # Find cases of a number preceded by a lower-case letter _and_ + # followed by at least two capital letters or a single capital and + # end of string. + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) + + return s.lower() + + +def from_snake_case(s): + _CHARS_TO_UPCASE_RE = re.compile(r'(?:_|^)([a-z])') + return _CHARS_TO_UPCASE_RE.sub(lambda m: m.group().replace('_', '').upper(), s) + + +if sys.version_info < (3, 7): + raise ImportError('This module requires Python 3.7 or later.') + +_lazy_name_to_package_map = { + 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', + 'enums': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.enums', + {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} + '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', + '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.base', + '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.grpc', + {%- endfor %} {# Need to do types and enums #} +} + +_lazy_type_to_package_map = { +{%- filter sort_lines %} +{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %} +{%- for enum in proto.enums.values() %} + '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.enums', +{%- endfor %}{%- endfor %}{%- endfilter %} +} + +# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ +def __getattr__(name): # Requires Python >= 3.7 + if name == '__all__': + all_names = globals()['__all__'] = sorted( + chain( + (from_snake_case(k) for k in _lazy_name_to_package_map), + _lazy_type_to_package_map, + ) + ) + return all_names + elif name.endswith('Transport'): + module = __getattr__(to_snake_case(name)) + sub_mod_class = getattr(module, name) + klass = type(name, (sub_mod_class,), {'__doc__': sub_mod_class.__doc__}) + globals()[name] = klass + return klass + elif name.endswith('Client'): + module = __getattr__(to_snake_case(name)) + sub_mod_class = getattr(module, name) + enums = __getattr__('enums') + klass = type( + name, + (sub_mod_class,), + {'__doc__': sub_mod_class.__doc__, 'enums': enums} + ) + globals()[name] = klass + return klass + elif name in _lazy_name_to_package_map: + module = importlib.import_module(f'{_lazy_name_to_package_map[name]}') + globals()[name] = module + return module + elif name in _lazy_type_to_package_map: + module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') + klass = getattr(module, name) + {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} + globals()[name] = klass + return klass + else: + raise AttributeError(f'unknown sub-module {name!r}.') + + +def __dir__(): + return globals().get('__all__') or __getattr__('__all__') +{% else -%} {# do not use lazy import #} {# Import subpackages. -#} {% for subpackage in api.subpackages.keys() -%} from . import {{ subpackage }} @@ -55,4 +150,5 @@ __all__ = ( {%- endfor %} {%- endfilter %} ) +{% endif -%} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index ab604a6a3c4c..cdd71f2709d9 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -73,6 +73,7 @@ def showcase(session): session.run('protoc', f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', f'--python_gapic_out={tmp_dir}', + '--python_gapic_opt=lazy-import,', 'google/showcase/v1beta1/echo.proto', 'google/showcase/v1beta1/identity.proto', external=True, diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 56e50635c365..2fe230073a56 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -45,7 +45,10 @@ def test_get_response(): lt.return_value = ['foo/bar/baz.py.j2', 'molluscs/squid/sample.py.j2'] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('I am a template result.') - cgr = g.get_response(api_schema=make_api()) + cgr = g.get_response( + api_schema=make_api(), + opts=options.Options.build('') + ) lt.assert_called_once() gt.assert_has_calls([ mock.call('foo/bar/baz.py.j2'), @@ -62,7 +65,10 @@ def test_get_response_ignores_empty_files(): lt.return_value = ['foo/bar/baz.py.j2', 'molluscs/squid/sample.py.j2'] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('# Meaningless comment') - cgr = g.get_response(api_schema=make_api()) + cgr = g.get_response( + api_schema=make_api(), + opts=options.Options.build('') + ) lt.assert_called_once() gt.assert_has_calls([ mock.call('foo/bar/baz.py.j2'), @@ -81,7 +87,10 @@ def test_get_response_ignores_private_files(): ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('I am a template result.') - cgr = g.get_response(api_schema=make_api()) + cgr = g.get_response( + api_schema=make_api(), + opts=options.Options.build('') + ) lt.assert_called_once() gt.assert_has_calls([ mock.call('foo/bar/baz.py.j2'), @@ -100,7 +109,10 @@ def test_get_response_fails_invalid_file_paths(): 'molluscs/squid/sample.py.j2', ] with pytest.raises(ValueError) as ex: - g.get_response(api_schema=make_api()) + g.get_response( + api_schema=make_api(), + opts=options.Options.build('') + ) ex_str = str(ex.value) assert '%proto' in ex_str and '%service' in ex_str @@ -115,12 +127,15 @@ def test_get_response_enumerates_services(): ] with mock.patch.object(jinja2.Environment, 'get_template') as gt: gt.return_value = jinja2.Template('Service: {{ service.name }}') - cgr = g.get_response(api_schema=make_api(make_proto( - descriptor_pb2.FileDescriptorProto(service=[ - descriptor_pb2.ServiceDescriptorProto(name='Spam'), - descriptor_pb2.ServiceDescriptorProto(name='EggsService'), - ]), - ))) + cgr = g.get_response( + api_schema=make_api(make_proto( + descriptor_pb2.FileDescriptorProto(service=[ + descriptor_pb2.ServiceDescriptorProto(name='Spam'), + descriptor_pb2.ServiceDescriptorProto( + name='EggsService' + ), + ]), + )), opts=options.Options.build('')) assert len(cgr.file) == 2 assert {i.name for i in cgr.file} == { 'foo/spam/baz.py', @@ -140,7 +155,7 @@ def test_get_response_enumerates_proto(): cgr = g.get_response(api_schema=make_api( make_proto(descriptor_pb2.FileDescriptorProto(name='a.proto')), make_proto(descriptor_pb2.FileDescriptorProto(name='b.proto')), - )) + ), opts=options.Options.build('')) assert len(cgr.file) == 2 assert {i.name for i in cgr.file} == {'foo/a.py', 'foo/b.py'} @@ -174,7 +189,10 @@ def test_get_response_divides_subpackages(): gt.return_value = jinja2.Template(""" {{- '' }}Subpackage: {{ '.'.join(api.subpackage_view) }} """.strip()) - cgr = g.get_response(api_schema=api_schema) + cgr = g.get_response( + api_schema=api_schema, + opts=options.Options.build('') + ) assert len(cgr.file) == 6 assert {i.name for i in cgr.file} == { 'foo/types/top.py', @@ -339,7 +357,10 @@ def test_samplegen_config_to_output_files( g._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) - actual_response = g.get_response(api_schema) + actual_response = g.get_response( + api_schema, + opts=options.Options.build('') + ) expected_response = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( @@ -425,7 +446,10 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): g._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) - actual_response = g.get_response(api_schema) + actual_response = g.get_response( + api_schema, + opts=options.Options.build('') + ) expected_response = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( @@ -496,7 +520,10 @@ def test_generator_duplicate_samples(fs): api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) with pytest.raises(types.DuplicateSample): - generator.get_response(api_schema=api_schema) + generator.get_response( + api_schema=api_schema, + opts=options.Options.build('') + ) @mock.patch( @@ -611,7 +638,10 @@ def test_dont_generate_in_code_samples( ) ] ) - actual = generator.get_response(api_schema=api_schema) + actual = generator.get_response( + api_schema=api_schema, + opts=options.Options.build('') + ) assert actual == expected diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 0320e1013b80..9a8ade2be397 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -24,6 +24,7 @@ def test_options_empty(): opts = options.Options.build('') assert len(opts.templates) == 1 assert opts.templates[0].endswith('gapic/templates') + assert not opts.lazy_import def test_options_replace_templates(): @@ -115,3 +116,8 @@ def test_options_service_config(fs): ] } assert opts.retry == expected_cfg + + +def test_options_lazy_import(): + opts = options.Options.build('lazy-import') + assert opts.lazy_import diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 7e1907a7ebe5..dfe9f5b989b8 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -30,6 +30,8 @@ def test_service_properties(): service = make_service(name='ThingDoer') assert service.name == 'ThingDoer' assert service.client_name == 'ThingDoerClient' + assert service.transport_name == 'ThingDoerTransport' + assert service.grpc_transport_name == 'ThingDoerGrpcTransport' def test_service_host(): From d5df42cd51658bf9907ddaa7913c7bb2e6249471 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Fri, 31 Jan 2020 17:14:47 -0800 Subject: [PATCH 0196/1339] fix function signature for streaming rpc (#279) * fix function signature for streaming rpc generated code should take request iterator for client streaming, and return response iterator for server streaming --- packages/gapic-generator/.gitignore | 3 ++ .../%sub/services/%service/client.py.j2 | 29 ++++++++++- .../%name_%version/%sub/test_%service.py.j2 | 13 ++++- .../tests/system/test_grpc_streams.py | 50 +++++++++++++++++++ 4 files changed, 92 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/.gitignore b/packages/gapic-generator/.gitignore index ef4e4c2d30e2..df28dce99d3c 100644 --- a/packages/gapic-generator/.gitignore +++ b/packages/gapic-generator/.gitignore @@ -59,3 +59,6 @@ pylintrc.test # Mypy .mypy_cache + +# pyenv +.python-version diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 5448d9384c0d..3463d23573f0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -2,7 +2,7 @@ {% block content %} from collections import OrderedDict -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Iterable, Iterator, Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -11,7 +11,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore - + {% filter sort_lines -%} {% for method in service.methods.values() -%} {% for ref_type in method.ref_types_legacy -%} @@ -126,18 +126,28 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, + {%- if not method.client_streaming %} request: {{ method.input.ident }} = None, *, {% for field in method.flattened_fields.values() -%} {{ field.name }}: {{ field.ident }} = None, {% endfor -%} + {%- else %} + requests: Iterator[{{ method.input.ident }}] = None, + *, + {% endif -%} retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), + {%- if not method.server_streaming %} ) -> {{ method.client_output.ident }}: + {%- else %} + ) -> Iterable[{{ method.client_output.ident }}]: + {%- endif %} r"""{{ method.meta.doc|rst(width=72, indent=8) }} Args: + {%- if not method.client_streaming %} request (:class:`{{ method.input.ident.sphinx }}`): The request object.{{ ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} @@ -148,6 +158,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): on the ``request`` instance; if ``request`` is provided, this should not be set. {% endfor -%} + {%- else %} + requests (Iterator[`{{ method.input.ident.sphinx }}`]): + The request object iterator.{{ ' ' -}} + {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {%- endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -156,10 +171,15 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {%- if not method.void %} Returns: + {%- if not method.server_streaming %} {{ method.client_output.ident.sphinx }}: + {%- else %} + Iterable[{{ method.client_output.ident.sphinx }}]: + {%- endif %} {{ method.client_output.meta.doc|rst(width=72, indent=16) }} {%- endif %} """ + {%- if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields -%} # Sanity check: If we got a request object, we should *not* have @@ -176,6 +196,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} + {%- endif %} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -213,7 +234,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Send the request. {% if not method.void %}response = {% endif %}rpc( + {%- if not method.client_streaming %} request, + {%- else %} + requests, + {%- endif %} retry=retry, timeout=timeout, metadata=metadata, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 45f0b35cb88b..5343a7c0dc4d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -79,6 +79,9 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -98,12 +101,20 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {%- endfor %} ) {% endif -%} + {% if method.client_streaming %} + response = client.{{ method.name|snake_case }}(iter(requests)) + {% else %} response = client.{{ method.name|snake_case }}(request) - + {% endif %} + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] + {% if method.client_streaming %} + assert next(args[0]) == request + {% else %} assert args[0] == request + {% endif %} # Establish that the response is the type that we expect. {% if method.void -%} diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py index 07b2e4302625..5ea52f46e565 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from google import showcase + def test_unary_stream(echo): content = 'The hail in Wales falls mainly on the snails.' @@ -24,3 +26,51 @@ def test_unary_stream(echo): for ground_truth, response in zip(content.split(' '), responses): assert response.content == ground_truth assert ground_truth == 'snails.' + + # TODO. Check responses.trailing_metadata() content once gapic-showcase + # server returns non-empty trailing metadata. + assert len(responses.trailing_metadata()) == 0 + + +def test_stream_unary(echo): + requests = [] + requests.append(showcase.EchoRequest(content="hello")) + requests.append(showcase.EchoRequest(content="world!")) + response = echo.collect(iter(requests)) + assert response.content == 'hello world!' + + +def test_stream_unary_passing_dict(echo): + requests = [{'content': 'hello'}, {'content': 'world!'}] + response = echo.collect(iter(requests)) + assert response.content == 'hello world!' + + +def test_stream_stream(echo): + requests = [] + requests.append(showcase.EchoRequest(content="hello")) + requests.append(showcase.EchoRequest(content="world!")) + responses = echo.chat(iter(requests)) + + contents = [] + for response in responses: + contents.append(response.content) + assert contents == ['hello', 'world!'] + + # TODO. Check responses.trailing_metadata() content once gapic-showcase + # server returns non-empty trailing metadata. + assert len(responses.trailing_metadata()) == 0 + + +def test_stream_stream_passing_dict(echo): + requests = [{'content': 'hello'}, {'content': 'world!'}] + responses = echo.chat(iter(requests)) + + contents = [] + for response in responses: + contents.append(response.content) + assert contents == ['hello', 'world!'] + + # TODO. Check responses.trailing_metadata() content once gapic-showcase + # server returns non-empty trailing metadata. + assert len(responses.trailing_metadata()) == 0 From 16dfd4fbcec276f4f275b540abc76d32e379f253 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 6 Feb 2020 14:27:47 -0800 Subject: [PATCH 0197/1339] Add tests for server streaming methods #280 (#281) --- .../tests/system/test_grpc_streams.py | 21 ++++++++----------- 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py index 5ea52f46e565..d0879d6a8986 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -15,11 +15,14 @@ from google import showcase +metadata = (("showcase-trailer", "hello world"),) + + def test_unary_stream(echo): content = 'The hail in Wales falls mainly on the snails.' responses = echo.expand({ 'content': content, - }) + }, metadata=metadata) # Consume the response and ensure it matches what we expect. # with pytest.raises(exceptions.NotFound) as exc: @@ -27,9 +30,7 @@ def test_unary_stream(echo): assert response.content == ground_truth assert ground_truth == 'snails.' - # TODO. Check responses.trailing_metadata() content once gapic-showcase - # server returns non-empty trailing metadata. - assert len(responses.trailing_metadata()) == 0 + assert responses.trailing_metadata() == metadata def test_stream_unary(echo): @@ -50,27 +51,23 @@ def test_stream_stream(echo): requests = [] requests.append(showcase.EchoRequest(content="hello")) requests.append(showcase.EchoRequest(content="world!")) - responses = echo.chat(iter(requests)) + responses = echo.chat(iter(requests), metadata=metadata) contents = [] for response in responses: contents.append(response.content) assert contents == ['hello', 'world!'] - # TODO. Check responses.trailing_metadata() content once gapic-showcase - # server returns non-empty trailing metadata. - assert len(responses.trailing_metadata()) == 0 + assert responses.trailing_metadata() == metadata def test_stream_stream_passing_dict(echo): requests = [{'content': 'hello'}, {'content': 'world!'}] - responses = echo.chat(iter(requests)) + responses = echo.chat(iter(requests), metadata=metadata) contents = [] for response in responses: contents.append(response.content) assert contents == ['hello', 'world!'] - # TODO. Check responses.trailing_metadata() content once gapic-showcase - # server returns non-empty trailing metadata. - assert len(responses.trailing_metadata()) == 0 + assert responses.trailing_metadata() == metadata From f7071e4ac1e1fe1ab65f89d7123619a6f62dc759 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 6 Feb 2020 14:35:21 -0800 Subject: [PATCH 0198/1339] Add a factory for grpc_channels that takes kwargs (#289) The grpc_channel underlying wrapped object takes additional constructor params via keyword args. This change adds a per-client factory function that passes kwords down the stack. --- .../services/%service/transports/grpc.py.j2 | 28 +++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 317664c58ce5..bd3b074e7414 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -63,6 +63,31 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if channel: self._grpc_channel = channel + @classmethod + def create_channel(cls, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + """ + return grpc_helpers.create_channel( + host, + credentials=credentials, + scopes=cls.AUTH_SCOPES, + **kwargs + ) + @property def grpc_channel(self) -> grpc.Channel: """Create the channel designed to connect to this service. @@ -73,10 +98,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Sanity check: Only create a new channel if we do not already # have one. if not hasattr(self, '_grpc_channel'): - self._grpc_channel = grpc_helpers.create_channel( + self._grpc_channel = self.create_channel( self._host, credentials=self._credentials, - scopes=self.AUTH_SCOPES, ) # Return the channel from cache. From 517c45b0114bdaee4a413b3a7f6aa0010ecc2dc4 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Fri, 7 Feb 2020 12:58:10 -0800 Subject: [PATCH 0199/1339] add test cases for interceptor (#282) add test cases for interceptor --- .../gapic-generator/tests/system/conftest.py | 52 +++++++++++++++++++ .../system/test_grpc_interceptor_streams.py | 46 ++++++++++++++++ 2 files changed, 98 insertions(+) create mode 100644 packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index f7b6ebb897cb..5aa782c94ca7 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections import pytest from google.showcase import EchoClient @@ -34,3 +35,54 @@ def identity(): channel=grpc.insecure_channel('localhost:7469'), ) return IdentityClient(transport=transport) + + +class MetadataClientInterceptor(grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + grpc.StreamStreamClientInterceptor): + + def __init__(self, key, value): + self._key = key + self._value = value + + def _add_metadata(self, client_call_details): + if client_call_details.metadata is not None: + client_call_details.metadata.append((self._key, self._value,)) + + def intercept_unary_unary(self, continuation, client_call_details, request): + self._add_metadata(client_call_details) + response = continuation(client_call_details, request) + return response + + def intercept_unary_stream(self, continuation, client_call_details, + request): + self._add_metadata(client_call_details) + response_it = continuation(client_call_details, request) + return response_it + + def intercept_stream_unary(self, continuation, client_call_details, + request_iterator): + self._add_metadata(client_call_details) + response = continuation(client_call_details, request_iterator) + return response + + def intercept_stream_stream(self, continuation, client_call_details, + request_iterator): + self._add_metadata(client_call_details) + response_it = continuation(client_call_details, request_iterator) + return response_it + + +@pytest.fixture +def intercepted_echo(): + # The interceptor adds 'showcase-trailer' client metadata. Showcase server + # echos any metadata with key 'showcase-trailer', so the same metadata + # should appear as trailing metadata in the response. + interceptor = MetadataClientInterceptor('showcase-trailer', 'intercepted') + channel = grpc.insecure_channel('localhost:7469') + intercept_channel = grpc.intercept_channel(channel, interceptor) + transport = EchoClient.get_transport_class('grpc')( + channel=intercept_channel, + ) + return EchoClient(transport=transport) diff --git a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py new file mode 100644 index 000000000000..4b40b7611fca --- /dev/null +++ b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py @@ -0,0 +1,46 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google import showcase + + +# intercetped_metadata will be added by the interceptor automatically, and +# showcase server will echo it (since it has key 'showcase-trailer') as trailing +# metadata. +intercepted_metadata = (('showcase-trailer', 'intercepted'),) + + +def test_unary_stream(intercepted_echo): + content = 'The hail in Wales falls mainly on the snails.' + responses = intercepted_echo.expand({ + 'content': content, + }) + + for ground_truth, response in zip(content.split(' '), responses): + assert response.content == ground_truth + assert ground_truth == 'snails.' + + assert responses.trailing_metadata() == intercepted_metadata + + +def test_stream_stream(intercepted_echo): + requests = [] + requests.append(showcase.EchoRequest(content="hello")) + requests.append(showcase.EchoRequest(content="world!")) + responses = intercepted_echo.chat(iter(requests)) + + contents = [response.content for response in responses] + assert contents == ['hello', 'world!'] + + assert responses.trailing_metadata() == intercepted_metadata From a3a462a2e8fda6ebeb33461e4b18cd003ed5cee0 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 11 Feb 2020 11:34:51 -0800 Subject: [PATCH 0200/1339] Change the behavior of the fixup script (#291) Only script parameters are a single input dir and a single output dir Fixups are no longer in-place: they are written to the corresponding path in the output dir. The files in the input dir are unmodified. Fixup script works on all services in an api concurrently instead of one script per service. It has been renamed accordingly. --- ...ce_keywords.py.j2 => fixup_keywords.py.j2} | 121 +++++++++++------- .../gapic/templates/setup.py.j2 | 6 +- 2 files changed, 75 insertions(+), 52 deletions(-) rename packages/gapic-generator/gapic/templates/scripts/{fixup_%service_keywords.py.j2 => fixup_keywords.py.j2} (58%) diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 similarity index 58% rename from packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 rename to packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 index bf23aeff1462..41c76034cc37 100644 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_%service_keywords.py.j2 +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 @@ -3,7 +3,8 @@ import argparse import os import libcst as cst -from itertools import chain +import pathlib +import sys from typing import (Any, Callable, Dict, List, Sequence, Tuple) @@ -21,14 +22,18 @@ def partition( return results[1], results[0] -class {{ service.client_name }}CallTransformer(cst.CSTTransformer): +class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - + {% with all_methods = [] -%} + {% for service in api.services.values() %}{% for method in service.methods.values() -%} + {% do all_methods.append(method) -%} + {% endfor %}{% endfor -%} METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - {% for method in service.methods.values() -%} + {% for method in all_methods|sort(attribute='name')|unique(attribute='name') -%} '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), - {% endfor -%} + {% endfor -%} } + {% endwith %} def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: try: @@ -59,11 +64,11 @@ class {{ service.client_name }}CallTransformer(cst.CSTTransformer): value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - {# Inline comments and formatting are currently stripped out. #} - {# My current attempts at preverving comments and formatting #} - {# keep the comments, but the formatting is run through a log #} - {# chipper, and an extra comma gets added, which causes a #} - {# parse error. #} + {# Inline comments and formatting are currently stripped out. -#} + {# My current attempts at preverving comments and formatting -#} + {# keep the comments, but the formatting is run through a log -#} + {# chipper, and an extra comma gets added, which causes a -#} + {# parse error. -#} cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that @@ -79,31 +84,45 @@ class {{ service.client_name }}CallTransformer(cst.CSTTransformer): def fix_files( - dirs: Sequence[str], - files: Sequence[str], + in_dir: pathlib.Path, + out_dir: pathlib.Path, *, - transformer={{ service.client_name }}CallTransformer(), + transformer={{ api.naming.module_name }}CallTransformer(), ): - pyfile_gen = chain( - (os.path.join(root, f) - for d in dirs - for root, _, files in os.walk(d) - for f in files if os.path.splitext(f)[1] == ".py"), - files) + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) for fpath in pyfile_gen: - with open(fpath, 'r+') as f: + with open(fpath, 'r') as f: src = f.read() - tree = cst.parse_module(src) - updated = tree.visit(transformer) - f.seek(0) - f.truncate() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: f.write(updated.code) if __name__ == '__main__': parser = argparse.ArgumentParser( - description="""Fix up source that uses the {{ service.name }} client library. + description="""Fix up source that uses the {{ api.naming.module_name }} client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. Note: This tool operates at a best-effort level at converting positional parameters in client method calls to keyword based parameters. @@ -114,32 +133,40 @@ Note: This tool operates at a best-effort level at converting positional These all constitute false negatives. The tool will also detect false positives when an API method shares a name with another method. - - Be sure to back up your source files before running this tool and to compare the diffs. """) - group = parser.add_mutually_exclusive_group(required=True) - group.add_argument( + parser.add_argument( '-d', - metavar='dir', - dest='dirs', - action='append', - help='a directory to walk for python files to fix up', + dest='input_dir', + help='the input directory to walk for python files to fix up', ) - group.add_argument( - '-f', - metavar='file', - dest='files', - action='append', + parser.add_argument( + '-o', + dest='output_dir', help='a file to fix up via un-flattening', ) args = parser.parse_args() - print( - """It is strongly, strongly recommended that you commit outstanding changes and -back up your source tree before running this conversion script. -Please take a moment to do that now if you haven't already. -""" - ) - resp = input("Really attempt to convert sources? yes/[no]: ") - if resp == "yes": - fix_files(args.dirs or [], args.files or []) + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 5a18c4e914bd..bf58c02c5cba 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -25,11 +25,7 @@ setuptools.setup( 'libcst >= 0.2.5', ], scripts=[ - {% for proto in api.all_protos.values() -%} - {% for service in proto.services.values() -%} - 'scripts/fixup_{{ service.module_name }}_keywords.py', - {% endfor -%} - {% endfor -%} + 'scripts/fixup_keywords.py', ], classifiers=[ 'Development Status :: 3 - Alpha', From e36cde04a0187de7d19bee1aa7c6ddad4550d232 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 11 Feb 2020 13:27:54 -0800 Subject: [PATCH 0201/1339] Add python_requires to generated setup.py (#294) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index bf58c02c5cba..163011bdd26c 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -21,6 +21,7 @@ setuptools.setup( 'grpcio >= 1.10.0', 'proto-plus >= 0.4.0', ), + python_requires='>={% if opts.lazy_import %}3.7{% else %}3.5{% endif %}',{# Lazy import requires module-level getattr #} setup_requires=[ 'libcst >= 0.2.5', ], From 3568ed55e325db5c36ddc3718cf7df425b8bcb4c Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 14 Feb 2020 15:00:28 -0800 Subject: [PATCH 0202/1339] Remove the special 'enums' submodule (#300) The creation of this submodule view was based on a misunderstanding of a desired feature: in the API in question, enums are already segregated into their own submodule, the name of which conflicts with `enums.py` --- .../gapic/templates/%namespace/%name/__init__.py.j2 | 6 ++---- .../%namespace/%name_%version/%sub/__init__.py.j2 | 8 +++----- .../%namespace/%name_%version/%sub/enums.py.j2 | 13 ------------- .../unit/%name_%version/%sub/test_%service.py.j2 | 11 ----------- 4 files changed, 5 insertions(+), 33 deletions(-) delete mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/enums.py.j2 diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index 5cdccea984d9..1ea3128c5776 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -33,7 +33,6 @@ if sys.version_info < (3, 7): _lazy_name_to_package_map = { 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', - 'enums': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.enums', {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.base', @@ -46,7 +45,7 @@ _lazy_type_to_package_map = { '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', {%- endfor %} {%- for enum in proto.enums.values() %} - '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.enums', + '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', {%- endfor %}{%- endfor %} } @@ -70,11 +69,10 @@ def __getattr__(name): # Requires Python >= 3.7 elif name.endswith('Client'): module = __getattr__(to_snake_case(name)) sub_mod_class = getattr(module, name) - enums = __getattr__('enums') klass = type( name, (sub_mod_class,), - {'__doc__': sub_mod_class.__doc__, 'enums': enums} + {'__doc__': sub_mod_class.__doc__} ) globals()[name] = klass return klass diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index aad42c5f7699..aa497667fe55 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -34,12 +34,11 @@ if sys.version_info < (3, 7): _lazy_name_to_package_map = { 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', - 'enums': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.enums', {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.base', '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.grpc', - {%- endfor %} {# Need to do types and enums #} + {%- endfor %} } _lazy_type_to_package_map = { @@ -48,7 +47,7 @@ _lazy_type_to_package_map = { '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', {%- endfor %} {%- for enum in proto.enums.values() %} - '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.enums', + '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', {%- endfor %}{%- endfor %}{%- endfilter %} } @@ -71,11 +70,10 @@ def __getattr__(name): # Requires Python >= 3.7 elif name.endswith('Client'): module = __getattr__(to_snake_case(name)) sub_mod_class = getattr(module, name) - enums = __getattr__('enums') klass = type( name, (sub_mod_class,), - {'__doc__': sub_mod_class.__doc__, 'enums': enums} + {'__doc__': sub_mod_class.__doc__} ) globals()[name] = klass return klass diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/enums.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/enums.py.j2 deleted file mode 100644 index 567e8fe1c591..000000000000 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/enums.py.j2 +++ /dev/null @@ -1,13 +0,0 @@ -{% extends '_base.py.j2' %} - -{% block content %} -{% for p in api.protos.values() if p.file_to_generate and p.enums -%} -from .types.{{ p.module_name }} import ({% for e in p.enums.values() %}{{ e.name }}, {% endfor %}) -{% endfor %} - -__all__ = ( -{%- for p in api.protos.values() if p.file_to_generate %}{% for e in p.enums.values() %} - '{{ e.name }}', -{%- endfor %}{% endfor %} -) -{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 5343a7c0dc4d..de9ebf7fe592 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -20,9 +20,6 @@ from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% endif -%} -{% if api.protos.values()|selectattr('enums') -%} -from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import enums -{% endif -%} {% for method in service.methods.values() -%} {% for ref_type in method.ref_types if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') @@ -446,12 +443,4 @@ def test_{{ message.name|snake_case }}_path(): {% endwith -%} {% endfor -%} -{% with enums = [] -%}{% for p in api.protos.values() if p.file_to_generate -%}{% for e in p.enums.values() if e.ident.module == service.meta.address.module -%}{% do enums.append(e) %} -{% if enums -%} -def test_enum_path(): -{%- for e in enums %} - assert enums.{{ e.name }} == {{ e.ident.module }}.{{ e.name }} -{%- endfor %} -{% endif -%} -{% endfor -%}{% endfor -%}{% endwith -%} {% endblock %} From 32ce306a425514a0684d93b0237fc342c89787cb Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 18 Feb 2020 09:25:44 -0800 Subject: [PATCH 0203/1339] Make '-d' and '-o' required arguments in fixup script (#301) This results in a slightly more helpful message being displayed if someone tries to run `fixup_keywords.py` with no additional context. --- .../gapic/templates/scripts/fixup_keywords.py.j2 | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 index 41c76034cc37..99681ed99225 100644 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 @@ -136,13 +136,17 @@ Note: This tool operates at a best-effort level at converting positional """) parser.add_argument( '-d', + '--input-directory', + required=True, dest='input_dir', help='the input directory to walk for python files to fix up', ) parser.add_argument( '-o', + '--output-directory', + required=True, dest='output_dir', - help='a file to fix up via un-flattening', + help='the directory to output files fixed via un-flattening', ) args = parser.parse_args() input_dir = pathlib.Path(args.input_dir) From 14dfa7f27116d151c18b3c5cc03378a04312c083 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 18 Feb 2020 16:29:16 -0800 Subject: [PATCH 0204/1339] Test and impl for nested messages generate import statements (#302) Nested message definitions may have fields that are types defined in other modules. These modules need to be imported into the module that defines the nested message def. Ancillary changes, useful for debugging, development, and testing: * generate.py can be used standalone without being invoked by protoc * Specific testfiles and tests can be passed to pytest via the noxfile --- .../gapic-generator/gapic/cli/generate.py | 4 + packages/gapic-generator/gapic/schema/api.py | 17 +-- packages/gapic-generator/noxfile.py | 8 +- .../tests/unit/schema/test_api.py | 104 +++++++++++++++--- 4 files changed, 105 insertions(+), 28 deletions(-) diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index f72066057ace..8c5a98925ba3 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -65,3 +65,7 @@ def generate( # Output the serialized response. output.write(res.SerializeToString()) + + +if __name__ == "__main__": + generate() diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 3eba0fcbb4ab..112a07d40f7a 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -87,10 +87,10 @@ def enums(self) -> Mapping[str, wrappers.EnumType]: @cached_property def messages(self) -> Mapping[str, wrappers.MessageType]: """Return top-level messages on the proto.""" - return collections.OrderedDict([ + return collections.OrderedDict( (k, v) for k, v in self.all_messages.items() if not v.meta.address.parent - ]) + ) @property def module_name(self) -> str: @@ -140,17 +140,18 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: for use in a ``from package import module`` type of statement. """ - answer = set() self_reference = self.meta.address.python_import - for t in chain(*[m.field_types for m in self.messages.values()]): - # Add the appropriate Python import for the field. + + answer = { + t.ident.python_import + for m in self.all_messages.values() # Sanity check: We do make sure that we are not trying to have # a module import itself. - if t.ident.python_import != self_reference: - answer.add(t.ident.python_import) + for t in m.field_types if t.ident.python_import != self_reference + } # Done; return the sorted sequence. - return tuple(sorted(list(answer))) + return tuple(sorted(answer)) def disambiguate(self, string: str) -> str: """Return a disambiguated string for the context of this proto. diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index cdd71f2709d9..8b2f2aa73795 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -36,7 +36,7 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit'), + *(session.posargs or [os.path.join('tests', 'unit')]), ) @@ -82,7 +82,9 @@ def showcase(session): # Install the library. session.install(tmp_dir) - session.run('py.test', '--quiet', os.path.join('tests', 'system')) + session.run( + 'py.test', '--quiet', *(session.posargs or [os.path.join('tests', 'system')]) + ) @nox.session(python=['3.6', '3.7', '3.8']) @@ -127,7 +129,7 @@ def showcase_unit(session): '--quiet', '--cov=google', '--cov-report=term', - os.path.join('tests', 'unit'), + *(session.posargs or [os.path.join('tests', 'unit')]), ) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 1aaa02fd588f..ba5729d9a48f 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -196,8 +196,8 @@ def test_proto_builder_constructor(): # Test the load function. with mock.patch.object(api._ProtoBuilder, '_load_children') as lc: pb = api._ProtoBuilder(fdp, - file_to_generate=True, - naming=make_naming(), + file_to_generate=True, + naming=make_naming(), ) # There should be three total calls to load the different types @@ -222,8 +222,8 @@ def test_proto_builder_constructor(): def test_not_target_file(): """Establish that services are not ignored for untargeted protos.""" - message_pb = make_message_pb2(name='Foo', - fields=(make_field_pb2(name='bar', type=3, number=1),) + message_pb = make_message_pb2( + name='Foo', fields=(make_field_pb2(name='bar', type=3, number=1),) ) service_pb = descriptor_pb2.ServiceDescriptorProto() fdp = make_file_pb2(messages=(message_pb,), services=(service_pb,)) @@ -239,8 +239,8 @@ def test_not_target_file(): def test_messages(): L = descriptor_pb2.SourceCodeInfo.Location - message_pb = make_message_pb2(name='Foo', - fields=(make_field_pb2(name='bar', type=3, number=1),) + message_pb = make_message_pb2( + name='Foo', fields=(make_field_pb2(name='bar', type=3, number=1),) ) locations = ( L(path=(4, 0), leading_comments='This is the Foo message.'), @@ -271,7 +271,7 @@ def test_messages_reverse_declaration_order(): make_message_pb2(name='Foo', fields=( make_field_pb2(name='bar', number=1, type_name='.google.example.v3.Bar'), - ), + ), ), make_message_pb2(name='Bar'), ) @@ -296,7 +296,7 @@ def test_messages_recursive(): make_message_pb2(name='Foo', fields=( make_field_pb2(name='foo', number=1, type_name='.google.example.v3.Foo'), - ), + ), ), ) fdp = make_file_pb2( @@ -343,6 +343,76 @@ def test_messages_nested(): assert bar not in proto.messages +def test_python_modules_nested(): + fd = ( + make_file_pb2( + name='dep.proto', + package='google.dep', + messages=(make_message_pb2(name='ImportedMessage', fields=()),), + ), + make_file_pb2( + name='common.proto', + package='google.example.v1.common', + messages=(make_message_pb2(name='Bar'),), + ), + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2( + name='GetFooRequest', + fields=( + make_field_pb2(name='primitive', number=2, type=1), + make_field_pb2( + name='foo', + number=3, + type=1, + type_name='.google.example.v1.GetFooRequest.Foo', + ), + ), + nested_type=( + make_message_pb2( + name='Foo', + fields=( + make_field_pb2( + name='imported_message', + number=1, + type_name='.google.dep.ImportedMessage'), + ), + ), + ), + ), + make_message_pb2( + name='GetFooResponse', + fields=( + make_field_pb2( + name='foo', + number=1, + type_name='.google.example.v1.GetFooRequest.Foo', + ), + ), + ), + ), + services=(descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v1.GetFooRequest', + output_type='google.example.v1.GetFooResponse', + ), + ), + ),), + ), + ) + + api_schema = api.API.build(fd, package='google.example.v1') + + assert api_schema.protos['foo.proto'].python_modules == ( + imp.Import(package=('google', 'dep'), module='dep_pb2'), + ) + + def test_services(): L = descriptor_pb2.SourceCodeInfo.Location @@ -366,8 +436,8 @@ def _n(method_name: str): ]}) # Set up messages for our RPC. - request_message_pb = make_message_pb2(name='GetFooRequest', - fields=(make_field_pb2(name='name', type=9, number=1),) + request_message_pb = make_message_pb2( + name='GetFooRequest', fields=(make_field_pb2(name='name', type=9, number=1),) ) response_message_pb = make_message_pb2(name='GetFooResponse', fields=()) @@ -623,10 +693,10 @@ def test_enums(): def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, - messages: Sequence[descriptor_pb2.DescriptorProto] = (), - enums: Sequence[descriptor_pb2.EnumDescriptorProto] = (), - services: Sequence[descriptor_pb2.ServiceDescriptorProto] = (), - locations: Sequence[descriptor_pb2.SourceCodeInfo.Location] = (), + messages: Sequence[descriptor_pb2.DescriptorProto] = (), + enums: Sequence[descriptor_pb2.EnumDescriptorProto] = (), + services: Sequence[descriptor_pb2.ServiceDescriptorProto] = (), + locations: Sequence[descriptor_pb2.SourceCodeInfo.Location] = (), ) -> descriptor_pb2.FileDescriptorProto: return descriptor_pb2.FileDescriptorProto( name=name, @@ -642,13 +712,13 @@ def make_message_pb2( name: str, fields: tuple = (), **kwargs - ) -> descriptor_pb2.DescriptorProto: +) -> descriptor_pb2.DescriptorProto: return descriptor_pb2.DescriptorProto(name=name, field=fields, **kwargs) def make_field_pb2(name: str, number: int, - type: int = 11, # 11 == message - type_name: str = None, + type: int = 11, # 11 == message + type_name: str = None, ) -> descriptor_pb2.FieldDescriptorProto: return descriptor_pb2.FieldDescriptorProto( name=name, From 0d9a903e5dc4a4b2de474941d85a7c4cb8b9c1d1 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 19 Feb 2020 14:23:53 -0800 Subject: [PATCH 0205/1339] Add generated unit test coverage for lazy import, part 1 (#303) The Python 3.8 showcase unit tests (and all 'real' generated unit tests when using the lazy import feature) include a test to verify that the logic used for lazy import is covered. If the classes and submodules are available it is assumed that they are the correct ones; asserting that there is no mismatch between lazy import path and corresponding object is outside the current scope of these tests. Lazy import is currently only available for the top level of a module and its corresponding types submodule. Other submodules will be converted to use lazy imports as part of subsequent commits. --- .../%name_%version/%sub/__init__.py.j2 | 9 +-- .../%name_%version/%sub/types/__init__.py.j2 | 42 ++++++++++- .../%name_%version/%sub/test_%service.py.j2 | 69 +++++++++++++++++++ packages/gapic-generator/noxfile.py | 25 ++++--- 4 files changed, 131 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index aa497667fe55..372ce3e2507e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -30,14 +30,14 @@ def from_snake_case(s): if sys.version_info < (3, 7): - raise ImportError('This module requires Python 3.7 or later.') + raise ImportError('This module requires Python 3.7 or later.') # pragma: NO COVER _lazy_name_to_package_map = { 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', - '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.base', - '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.grpc', + '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.transports.base', + '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.transports.grpc', {%- endfor %} } @@ -56,8 +56,9 @@ def __getattr__(name): # Requires Python >= 3.7 if name == '__all__': all_names = globals()['__all__'] = sorted( chain( - (from_snake_case(k) for k in _lazy_name_to_package_map), + (from_snake_case(k) for k in _lazy_name_to_package_map if k != 'types'), _lazy_type_to_package_map, + ['types'], ) ) return all_names diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 index 7b464a970568..8b341523337a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 @@ -1,8 +1,47 @@ {% extends '_base.py.j2' %} {% block content %} +{% if opts.lazy_import -%} {# lazy import #} +import importlib +import sys + + +if sys.version_info < (3, 7): + raise ImportError('This module requires Python 3.7 or later.') # pragma: NO COVER + + +_lazy_type_to_package_map = { + {%- filter sort_lines %} +{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %} +{%- for enum in proto.enums.values() %} + '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %}{%- endfor %}{%- endfilter %} +} + + +# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ +def __getattr__(name): # Requires Python >= 3.7 + if name == '__all__': + all_names = globals()['__all__'] = sorted(_lazy_type_to_package_map) + return all_names + elif name in _lazy_type_to_package_map: + module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') + klass = getattr(module, name) + {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} + globals()[name] = klass + return klass + else: + raise AttributeError(f'unknown sub-module {name!r}.') + + +def __dir__(): + return globals().get('__all__') or __getattr__('__all__') + +{% else -%} {% for p in api.protos.values() if p.file_to_generate and p.messages -%} -from .{{ p.module_name }} import ({% for m in p.messages.values() %}{{ m.name }}, {% endfor %}) +from .{{p.module_name }} import ({% for m in p.messages.values() %}{{ m.name }}, {% endfor %}) {% endfor %} __all__ = ( @@ -10,4 +49,5 @@ __all__ = ( '{{ m.name }}', {%- endfor %}{% endfor %} ) +{% endif -%} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index de9ebf7fe592..a6bf89ffbc6a 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -312,6 +312,75 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): {% endfor -%} {#- method in methods #} +{% if opts.lazy_import -%} {# lazy import #} +def test_module_level_imports(): + # Use the other transport import path so that code gets tested. + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.name }}GrpcTransport + transport = {{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.client_name }} + client = {{ service.client_name }}(transport=transport) + assert client._transport is transport + + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.name|snake_case }}_grpc_transport + transport2 = {{ service.name|snake_case }}_grpc_transport.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + + client2 = {{ service.client_name }}(transport=transport2) + assert client2._transport is transport2 + + {% with type_name = cycler(*service.methods.values()).next().input.name -%} + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ type_name }} + type_ = {{ type_name }}() + + try: + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ type_name|lower }}_squidification + except (AttributeError, ImportError) as e: + pass + else: + assert False + {% endwith -%} + + import {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} as mod + all_names = dir(mod) + expected_names = sorted([ + 'types', + {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} + '{{ service.client_name }}', + '{{ service.transport_name }}', + '{{ service.grpc_transport_name }}', + {%- endfor %} + {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}', + {%- endfor %} + {%- for enum in proto.enums.values() %} + '{{ enum.name }}' + {% endfor %}{%- endfor %} + ]) + assert all_names == expected_names + + {% with type_name = cycler(*service.methods.values()).next().input.name -%} + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.types import {{ type_name }} + type_ = {{ type_name }}() + {% endwith -%} + + import {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.types as types + all_types = dir(types) + expected_types = sorted([ + {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}', + {%- endfor %} + {%- for enum in proto.enums.values() %} + '{{ enum.name }}', + {% endfor %}{%- endfor %} + ]) + assert all_types == expected_types + +{% endif -%} {# lazy import #} + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}GrpcTransport( diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 8b2f2aa73795..d3d84243b916 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -109,15 +109,22 @@ def showcase_unit(session): ) # Write out a client library for Showcase. - session.run('protoc', - f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', - 'google/showcase/v1beta1/messaging.proto', - 'google/showcase/v1beta1/testing.proto', - external=True, - ) + args = [ + 'protoc', + f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + 'google/showcase/v1beta1/messaging.proto', + 'google/showcase/v1beta1/testing.proto', + ] + if session.python == '3.8': + args.append('--python_gapic_opt=lazy-import') + + session.run( + *args, + external=True, + ) # Install the library. session.chdir(tmp_dir) From d29455b4f3f938b3bd9cbe49613705190792bbf2 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 19 Feb 2020 16:53:22 -0800 Subject: [PATCH 0206/1339] Pagers are properly imported when they live in subpackages (#307) --- packages/gapic-generator/gapic/schema/wrappers.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 72978d26cab9..dc3fd59c38c0 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -516,8 +516,7 @@ def client_output(self): return PythonType(meta=metadata.Metadata( address=metadata.Address( name=f'{self.name}Pager', - package=self.ident.api_naming.module_namespace + ( - self.ident.api_naming.versioned_module_name, + package=self.ident.api_naming.module_namespace + (self.ident.api_naming.versioned_module_name,) + self.ident.subpackage + ( 'services', utils.to_snake_case(self.ident.parent[-1]), ), From 81ec8d3dc9ea3f619792ab981eb7bf82a1b80eb1 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 21 Feb 2020 15:24:47 -0800 Subject: [PATCH 0207/1339] Bump the version (#309) Bump generator version to 0.16 and required python version of the generated surface to 3.6 --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- packages/gapic-generator/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 163011bdd26c..e3c5c7a225d3 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -21,7 +21,7 @@ setuptools.setup( 'grpcio >= 1.10.0', 'proto-plus >= 0.4.0', ), - python_requires='>={% if opts.lazy_import %}3.7{% else %}3.5{% endif %}',{# Lazy import requires module-level getattr #} + python_requires='>={% if opts.lazy_import %}3.7{% else %}3.6{% endif %}',{# Lazy import requires module-level getattr #} setup_requires=[ 'libcst >= 0.2.5', ], diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1c2a2c859f92..223bf4c450e3 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.15.0', + version='0.16.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 76ff1ac9616413ef5b1a242f10b5986e4f883889 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 21 Feb 2020 16:34:38 -0800 Subject: [PATCH 0208/1339] Fix the autogenerated from_service_account_file test (#310) If no port number is provided in the service host, include the default of 443 when checking the transport's host. Includes a minor fix such that a method's output type's fields can yield import statements for the generated unit tests. Includes minor refactors undertaken during debugging to improve code clarity. --- .../gapic-generator/gapic/schema/wrappers.py | 42 ++++++++++++------- .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 2 +- 3 files changed, 29 insertions(+), 17 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index dc3fd59c38c0..e26cea802607 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -219,11 +219,13 @@ def __hash__(self): @utils.cached_property def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: """Return all composite fields used in this proto's messages.""" - answer = [] - for field in self.fields.values(): - if field.message or field.enum: - answer.append(field.type) - return tuple(answer) + answer = tuple( + field.type + for field in self.fields.values() + if field.message or field.enum + ) + + return answer @property def map(self) -> bool: @@ -411,6 +413,10 @@ def ident(self) -> metadata.Address: def name(self) -> str: return self.ident.name + @property + def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: + return tuple() + @dataclasses.dataclass(frozen=True) class PrimitiveType(PythonType): @@ -612,6 +618,7 @@ def _ref_types(self, use_legacy: bool = False) -> Sequence[Union[MessageType, En answer = [self.input] if not self.void: answer.append(self.client_output) + answer.extend(self.client_output.field_types) # If this method has flattening that is honored, add its # composite types. @@ -619,9 +626,11 @@ def _ref_types(self, use_legacy: bool = False) -> Sequence[Union[MessageType, En # This entails adding the module for any field on the signature # unless the field is a primitive. flattening = self.legacy_flattened_fields if use_legacy else self.flattened_fields - for field in flattening.values(): - if field.message or field.enum: - answer.append(field.type) + answer.extend( + field.type + for field in flattening.values() + if field.message or field.enum + ) # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. @@ -739,13 +748,16 @@ def names(self) -> FrozenSet[str]: # Identify any import module names where the same module name is used # from distinct packages. - modules: Dict[str, Set[str]] = {} - for t in chain(*[m.ref_types for m in self.methods.values()]): - modules.setdefault(t.ident.module, set()) - modules[t.ident.module].add(t.ident.package) - for module_name, packages in modules.items(): - if len(packages) > 1: - answer.add(module_name) + modules: Dict[str, Set[str]] = collections.defaultdict(set) + for m in self.methods.values(): + for t in m.ref_types: + modules[t.ident.module].add(t.ident.package) + + answer.update( + module_name + for module_name, packages in modules.items() + if len(packages) > 1 + ) # Done; return the answer. return frozenset(answer) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 3463d23573f0..9244268b3fe1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -14,7 +14,7 @@ from google.oauth2 import service_account # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} -{% for ref_type in method.ref_types_legacy -%} +{% for ref_type in method.ref_types -%} {{ ref_type.ident.python_import }} {% endfor -%} {% endfor -%} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index a6bf89ffbc6a..7a09e66ddbb0 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -40,7 +40,7 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): client = {{ service.client_name }}.from_service_account_json("dummy/file/path.json") assert client._transport._credentials == creds - {% if service.host %}assert client._transport._host == '{{ service.host }}'{% endif %} + {% if service.host %}assert client._transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} def test_{{ service.client_name|snake_case }}_client_options(): From 13bf65b39ddde8bd206d1b3a692308239933df53 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 21 Feb 2020 17:23:59 -0800 Subject: [PATCH 0209/1339] Handle empty signature more elegantly (#312) --- packages/gapic-generator/gapic/schema/wrappers.py | 3 ++- .../tests/unit/schema/wrappers/test_method.py | 8 ++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index e26cea802607..f207e1a1dcae 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -555,7 +555,8 @@ def flattened_fields(self) -> Mapping[str, Field]: answer: Dict[str, Field] = collections.OrderedDict( (f.strip(), self.input.get_field(*f.strip().split('.'))) for sig in signatures - for f in sig.split(',') + # Special case for an empty signature check + for f in sig.split(',') if f ) return answer diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 2a296ee88826..75c1c9588e78 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -171,6 +171,14 @@ def test_method_flattened_fields(): assert 'b' in method.flattened_fields +def test_method_flattened_fields_empty_sig(): + a = make_field('a', type=5) # int + b = make_field('b', type=5) + input_msg = make_message('Z', fields=(a, b)) + method = make_method('F', input_message=input_msg, signatures=('',)) + assert len(method.flattened_fields) == 0 + + def test_method_include_flattened_message_fields(): a = make_field('a', type=5) b = make_field('b', type=11, message=make_message('Eggs')) From c7fe9a0c6f106a5b07f7b8eca99599a08320d81f Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 21 Feb 2020 17:35:27 -0800 Subject: [PATCH 0210/1339] Bump version (#313) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 223bf4c450e3..240ad7764dea 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.16.0', + version='0.17.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 3f0fdbd6bb07fda434d17d7116a21c02fd56f1ea Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 24 Feb 2020 14:51:18 -0800 Subject: [PATCH 0211/1339] Remove 3.5 classifier from setup.py (#315) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index e3c5c7a225d3..7ed4c9db81a4 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -32,7 +32,6 @@ setuptools.setup( 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', From 765f5995b636c9921079271daa122a8cc27b1978 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 25 Feb 2020 17:31:22 -0800 Subject: [PATCH 0212/1339] Cross package request messages are constructed specially (#317) A cross package request is almost certainly not a proto-plus wrapped type, which puts certain restrictions on its construction and manipulation due to constraints in the Python protobuf API. It is bad practice but neither forbidden nor impossible to write a method whose request message definitionlives in a different package. A recurring example is IAM Policy Requests. This change detects when a method's request lives in a different package and constructs it either via keyword expansion (a dict was passed in) or with no ctor params. Also, in the above scenario, fields are not eligible for flattening if they point to non-primitive types. Generated unit tests for services that use out-of-package requests include from-dict construction. Adds the grpc-google-iam-v1 requirement as a special case in generated setup.py --- packages/gapic-generator/gapic/schema/api.py | 7 +++++ .../gapic-generator/gapic/schema/wrappers.py | 20 +++++++++++-- .../%sub/services/%service/client.py.j2 | 22 ++++++++++++-- .../gapic/templates/setup.py.j2 | 3 ++ .../%name_%version/%sub/test_%service.py.j2 | 29 ++++++++++++++++++ .../tests/unit/schema/test_api.py | 3 ++ .../tests/unit/schema/wrappers/test_method.py | 30 +++++++++++++++++-- .../unit/schema/wrappers/test_service.py | 1 + 8 files changed, 107 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 112a07d40f7a..66d0b16649c3 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -279,6 +279,13 @@ def subpackages(self) -> Mapping[str, 'API']: ) return answer + def requires_package(self, pkg: Tuple[str, ...]) -> bool: + return any( + message.ident.package == pkg + for proto in self.all_protos.values() + for message in proto.all_messages.values() + ) + class _ProtoBuilder: """A "builder class" for Proto objects. diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index f207e1a1dcae..b38b4f1f74be 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -551,12 +551,26 @@ def field_headers(self) -> Sequence[str]: def flattened_fields(self) -> Mapping[str, Field]: """Return the signature defined for this method.""" signatures = self.options.Extensions[client_pb2.method_signature] + cross_pkg_request = self.input.ident.package != self.ident.package + + def filter_fields(sig): + for f in sig.split(','): + if not f: + # Special case for an empty signature + continue + name = f.strip() + field = self.input.get_field(*name.split('.')) + if cross_pkg_request and not field.is_primitive: + # This is not a proto-plus wrapped message type, + # and setting a non-primitive field directly is verboten. + continue + + yield name, field answer: Dict[str, Field] = collections.OrderedDict( - (f.strip(), self.input.get_field(*f.strip().split('.'))) + name_and_field for sig in signatures - # Special case for an empty signature check - for f in sig.split(',') if f + for name_and_field in filter_fields(sig) ) return answer diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 9244268b3fe1..800b069800c1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -188,14 +188,30 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # If we have keyword arguments corresponding to fields on the - # request, apply these. {% endif -%} + {% if method.input.ident.package != method.ident.package -%} {# request lives in a different package, so there is no proto wrapper #} + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = {{ method.input.ident }}(**request) + elif not request: + request = {{ method.input.ident }}() + {%- else %} request = {{ method.input.ident }}(request) - {%- for key, field in method.flattened_fields.items() %} + {% endif %} {# different request package #} + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} + {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} + {# They can be _extended_, however -#} + {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }}: + request.{{ key }}.extend({{ field.name }}) + {%- endfor %} {%- endif %} # Wrap the RPC method; this adds retry and timeout information, diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 7ed4c9db81a4..9d408a23dd2e 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -20,6 +20,9 @@ setuptools.setup( 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', 'proto-plus >= 0.4.0', + {%- if api.requires_package(('google', 'iam', 'v1')) %} + 'grpc-google-iam-v1', + {%- endif %} ), python_requires='>={% if opts.lazy_import %}3.7{% else %}3.6{% endif %}',{# Lazy import requires module-level getattr #} setup_requires=[ diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 7a09e66ddbb0..b151c0418306 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -165,6 +165,35 @@ def test_{{ method.name|snake_case }}_field_headers(): ) in kw['metadata'] {% endif %} +{% if method.ident.package != method.input.ident.package %} +def test_{{ method.name|snake_case }}_from_dict(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif %} + response = client.{{ method.name|snake_case }}(request={ + {%- for field in method.input.fields.values() %} + '{{ field.name }}': {{ field.mock_value }}, + {%- endfor %} + } + ) + call.assert_called() + +{% endif %} + {% if method.flattened_fields %} def test_{{ method.name|snake_case }}_flattened(): client = {{ service.client_name }}( diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index ba5729d9a48f..dd62ddd7fa0f 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -87,6 +87,9 @@ def test_api_build(): imp.Import(package=('google', 'dep'), module='dep_pb2'), ) + assert api_schema.requires_package(('google', 'example', 'v1')) + assert not api_schema.requires_package(('elgoog', 'example', 'v1')) + # Establish that the subpackages work. assert 'common' in api_schema.subpackages sub = api_schema.subpackages['common'] diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 75c1c9588e78..e708b75b2c5c 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -179,9 +179,35 @@ def test_method_flattened_fields_empty_sig(): assert len(method.flattened_fields) == 0 +def test_method_flattened_fields_different_package_non_primitive(): + # This test verifies that method flattening handles a special case where: + # * the method's request message type lives in a different package and + # * a field in the method_signature is a non-primitive. + # + # If the message is defined in a different package it is not guaranteed to + # be a proto-plus wrapped type, which puts restrictions on assigning + # directly to its fields, which complicates request construction. + # The easiest solution in this case is to just prohibit these fields + # in the method flattening. + message = make_message('Mantle', + package="mollusc.cephalopod.v1", module="squid") + mantle = make_field('mantle', type=11, type_name='Mantle', + message=message, meta=message.meta) + arms_count = make_field('arms_count', type=5, meta=message.meta) + input_message = make_message( + 'Squid', fields=(mantle, arms_count), + package=".".join(message.meta.address.package), + module=message.meta.address.module + ) + method = make_method('PutSquid', input_message=input_message, + package="remote.package.v1", module="module", signatures=("mantle,arms_count",)) + assert set(method.flattened_fields) == {'arms_count'} + + def test_method_include_flattened_message_fields(): a = make_field('a', type=5) - b = make_field('b', type=11, message=make_message('Eggs')) + b = make_field('b', type=11, type_name='Eggs', + message=make_message('Eggs')) input_msg = make_message('Z', fields=(a, b)) method = make_method('F', input_message=input_msg, signatures=('a,b',)) assert len(method.flattened_fields) == 2 @@ -274,7 +300,7 @@ def make_method( output=output_message, meta=metadata.Metadata(address=metadata.Address( name=name, - package=package, + package=tuple(package.split('.')), module=module, parent=(f'{name}Service',), )), diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index dfe9f5b989b8..504ed7c28f30 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -280,6 +280,7 @@ def get_method(name: str, input=input_, output=output, lro=lro, + meta=input_.meta, ) From c4c296d81995ee8a68ed460fa3e587eff18c8b7c Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 26 Feb 2020 15:38:22 -0800 Subject: [PATCH 0213/1339] Gapic tweaks (#319) Minor surface tweaks from issues uncovered while generating GAPICs. --- .../%name_%version/%sub/services/%service/client.py.j2 | 1 + .../tests/unit/%name_%version/%sub/test_%service.py.j2 | 10 +++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 800b069800c1..1b6baadc5d1e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -86,6 +86,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + {% endfor %} def __init__(self, *, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index b151c0418306..a224950c6873 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -4,7 +4,7 @@ from unittest import mock import grpc - +import math import pytest {# Import the service itself as well as every proto module that it imports. -#} @@ -124,7 +124,11 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% else -%} assert isinstance(response, {{ method.client_output.ident }}) {% for field in method.output.fields.values() | rejectattr('message') -%} + {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% else -%} assert response.{{ field.name }} == {{ field.mock_value }} + {% endif -%} {% endfor %} {% endif %} @@ -530,12 +534,12 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): {% for message in service.resource_messages -%} {% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle") -%} -def test_{{ message.name|snake_case }}_path(): +def test_{{ message.resource_type|snake_case }}_path(): {% for arg in message.resource_path_args -%} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) - actual = {{ service.client_name }}.{{ message.name|snake_case }}_path({{message.resource_path_args|join(", ") }}) + actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) assert expected == actual {% endwith -%} From 3d42650f740a710123eccc6009b58da1324de3ba Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 26 Feb 2020 15:50:56 -0800 Subject: [PATCH 0214/1339] Version bump (#320) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 240ad7764dea..6bd1b97e87cb 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.17.0', + version='0.18.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From 002fa63e4fefd2a532ac094b36844e098863130e Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 27 Feb 2020 10:32:00 -0800 Subject: [PATCH 0215/1339] fix syntax errors in docs templates (#321) --- .../gapic/templates/docs/%name_%version/services.rst.j2 | 2 +- .../gapic/templates/docs/%name_%version/types.rst.j2 | 2 +- packages/gapic-generator/gapic/templates/docs/index.rst.j2 | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 index a4c02ad53197..350f120eceb8 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 @@ -1,5 +1,5 @@ Client for {{ api.naming.long_name }} API -{{ '=' * (14 + api.naming.long_name|length) }} +{{ '=' * (15 + api.naming.long_name|length) }} .. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }} :members: diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 index 30f93dd8e3e4..ce3c8882c8cd 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 @@ -1,5 +1,5 @@ Types for {{ api.naming.long_name }} API -{{ '=' * (13 + api.naming.long_name|length) }} +{{ '=' * (14 + api.naming.long_name|length) }} .. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types :members: diff --git a/packages/gapic-generator/gapic/templates/docs/index.rst.j2 b/packages/gapic-generator/gapic/templates/docs/index.rst.j2 index 1a4ece01a37d..c8dea9adbca6 100644 --- a/packages/gapic-generator/gapic/templates/docs/index.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/index.rst.j2 @@ -2,5 +2,6 @@ API Reference ------------- .. toctree:: :maxdepth: 2 + {{ api.naming.versioned_module_name }}/services {{ api.naming.versioned_module_name }}/types From 6eb7e15fded9a0785f218c693577334e6e0cfa8d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 27 Feb 2020 13:55:07 -0800 Subject: [PATCH 0216/1339] Disambiguate Python-keyword-named modules (#326) E.g. import.proto turns into import_.py, not import.py This allows the module to be imported via the normal import mechanisms. --- packages/gapic-generator/gapic/schema/api.py | 18 ++++++++- .../tests/unit/schema/test_api.py | 37 +++++++++++++++++++ 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 66d0b16649c3..b26060d438f2 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -19,9 +19,11 @@ import collections import dataclasses +import keyword +import os import sys from itertools import chain -from typing import Callable, Dict, FrozenSet, Mapping, Sequence, Set, Tuple +from typing import Callable, Container, Dict, FrozenSet, Mapping, Sequence, Set, Tuple from google.api_core import exceptions # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -204,10 +206,24 @@ def build(cls, file_descriptors, ), opts=opts) + def disambiguate_keyword_fname( + full_path: str, + visited_names: Container[str]) -> str: + path, fname = os.path.split(full_path) + name, ext = os.path.splitext(fname) + if name in keyword.kwlist or full_path in visited_names: + name += "_" + full_path = os.path.join(path, name + ext) + if full_path in visited_names: + return disambiguate_keyword_fname(full_path, visited_names) + + return full_path + # Iterate over each FileDescriptorProto and fill out a Proto # object describing it, and save these to the instance. protos: Dict[str, Proto] = {} for fd in file_descriptors: + fd.name = disambiguate_keyword_fname(fd.name, protos) protos[fd.name] = _ProtoBuilder( file_descriptor=fd, file_to_generate=fd.package.startswith(package), diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index dd62ddd7fa0f..f2d2eba10bb3 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -142,6 +142,43 @@ def test_proto_names(): assert proto.disambiguate('foo') == '_foo' +def test_proto_keyword_fname(): + # Protos with filenames that happen to be python keywords + # cannot be directly imported. + # Check that the file names are unspecialized when building the API object. + fd = ( + make_file_pb2( + name='import.proto', + package='google.keywords.v1', + messages=(make_message_pb2(name='ImportRequest', fields=()),), + ), + make_file_pb2( + name='import_.proto', + package='google.keywords.v1', + messages=(make_message_pb2(name='ImportUnderRequest', fields=()),), + ), + make_file_pb2( + name='class_.proto', + package='google.keywords.v1', + messages=(make_message_pb2(name='ClassUnderRequest', fields=()),), + ), + make_file_pb2( + name='class.proto', + package='google.keywords.v1', + messages=(make_message_pb2(name='ClassRequest', fields=()),), + ) + ) + + # We can't create new collisions, so check that renames cascade. + api_schema = api.API.build(fd, package='google.keywords.v1') + assert set(api_schema.protos.keys()) == { + 'import_.proto', + 'import__.proto', + 'class_.proto', + 'class__.proto', + } + + def test_proto_names_import_collision(): # Put together a couple of minimal protos. fd = ( From 738b02d591e349e995f00280e513605809d0b404 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 27 Feb 2020 16:09:41 -0800 Subject: [PATCH 0217/1339] Numerous small cleanups (#328) Add paginated result to a method's refererred to types Remove empty request construction for cross package requests with no flattened fields --- .../gapic-generator/gapic/schema/wrappers.py | 5 +++ .../%sub/services/%service/client.py.j2 | 6 ++-- .../%name_%version/%sub/test_%service.py.j2 | 4 +-- .../tests/unit/schema/wrappers/test_method.py | 34 +++++++++++++++++++ 4 files changed, 45 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index b38b4f1f74be..7711da6d2796 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -653,6 +653,11 @@ def _ref_types(self, use_legacy: bool = False) -> Sequence[Union[MessageType, En answer.append(self.lro.response_type) answer.append(self.lro.metadata_type) + # If this message paginates its responses, it is possible + # that the individual result messages reside in a different module. + if self.paged_result_field: + answer.append(self.paged_result_field.message) + # Done; return the answer. return tuple(answer) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 1b6baadc5d1e..b40de65d240e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -195,16 +195,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # so it must be constructed via keyword expansion. if isinstance(request, dict): request = {{ method.input.ident }}(**request) + {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} elif not request: request = {{ method.input.ident }}() + {% endif -%}{# Cross-package req and flattened fields #} {%- else %} request = {{ method.input.ident }}(request) {% endif %} {# different request package #} - # If we have keyword arguments corresponding to fields on the - # request, apply these. {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + # If we have keyword arguments corresponding to fields on the + # request, apply these. if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index a224950c6873..5a52107bf4d9 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -294,8 +294,8 @@ def test_{{ method.name|snake_case }}_pager(): request={}, )] assert len(results) == 6 - assert all([isinstance(i, {{ method.paged_result_field.message.ident }}) - for i in results]) + assert all(isinstance(i, {{ method.paged_result_field.message.ident }}) + for i in results) def test_{{ method.name|snake_case }}_pages(): client = {{ service.client_name }}( diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index e708b75b2c5c..6003b51a0bf7 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -113,6 +113,40 @@ def test_method_paged_result_field_no_page_field(): assert method.paged_result_field is None +def test_method_paged_result_ref_types(): + input_msg = make_message( + name='ListSquidsRequest', + fields=( + make_field(name='parent', type=9), # str + make_field(name='page_size', type=5), # int + make_field(name='page_token', type=9), # str + ), + module='squid', + ) + mollusc_msg = make_message('Mollusc', module='mollusc') + output_msg = make_message( + name='ListMolluscsResponse', + fields=( + make_field(name='molluscs', message=mollusc_msg, repeated=True), + make_field(name='next_page_token', type=9) + ), + module='mollusc' + ) + method = make_method( + 'ListSquids', + input_message=input_msg, + output_message=output_msg, + module='squid' + ) + + ref_type_names = {t.name for t in method.ref_types} + assert ref_type_names == { + 'ListSquidsRequest', + 'ListSquidsPager', + 'Mollusc', + } + + def test_method_field_headers_none(): method = make_method('DoSomething') assert isinstance(method.field_headers, collections.abc.Sequence) From 5d8e90b15ff6d570ff26fd946117442ddacbe6f8 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 28 Feb 2020 12:54:13 -0800 Subject: [PATCH 0218/1339] Minor cleanup (#329) Remove unused imports in generated client.py and vars in generated test_%service.py --- .../gapic-generator/gapic/schema/wrappers.py | 8 ++++++ .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 6 +++-- .../unit/schema/wrappers/test_service.py | 26 +++++++++++++++++++ 4 files changed, 39 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 7711da6d2796..8a45d6747d28 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -793,6 +793,14 @@ def resource_messages(self) -> FrozenSet[MessageType]: if field.message and field.message.resource_path ) + @utils.cached_property + def any_client_streaming(self) -> bool: + return any(m.client_streaming for m in self.methods.values()) + + @utils.cached_property + def any_server_streaming(self) -> bool: + return any(m.server_streaming for m in self.methods.values()) + def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': """Return a derivative of this service with the provided context. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index b40de65d240e..980bc476958d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -2,7 +2,7 @@ {% block content %} from collections import OrderedDict -from typing import Dict, Iterable, Iterator, Sequence, Tuple, Type, Union +from typing import Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 5a52107bf4d9..3916e795d96e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -126,6 +126,8 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% for field in method.output.fields.values() | rejectattr('message') -%} {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} {% else -%} assert response.{{ field.name }} == {{ field.mock_value }} {% endif -%} @@ -151,7 +153,7 @@ def test_{{ method.name|snake_case }}_field_headers(): type(client._transport.{{ method.name|snake_case }}), '__call__') as call: call.return_value = {{ method.output.ident }}() - response = client.{{ method.name|snake_case }}(request) + client.{{ method.name|snake_case }}(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -475,7 +477,7 @@ def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, 'default') as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - client = {{ service.client_name }}() + {{ service.client_name }}() adc.assert_called_once_with(scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 504ed7c28f30..a03716a6b345 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -13,6 +13,7 @@ # limitations under the License. import collections +import itertools import typing from google.api import annotations_pb2 @@ -196,6 +197,31 @@ def test_resource_messages(): assert expected == actual +def test_service_any_streaming(): + for client, server in itertools.product((True, False), (True, False)): + service = make_service( + f'ClientStream{client}:ServerStream{server}', + methods=( + ( + make_method( + f"GetMollusc", + input_message=make_message( + "GetMolluscRequest", + ), + output_message=make_message( + "GetMolluscResponse", + ), + client_streaming=client, + server_streaming=server, + ), + ) + ) + ) + + assert service.any_client_streaming == client + assert service.any_server_streaming == server + + def make_service(name: str = 'Placeholder', host: str = '', methods: typing.Tuple[wrappers.Method] = (), scopes: typing.Tuple[str] = ()) -> wrappers.Service: From eb9c9056407f78c3d38b3b7ba59e3ee2ec68837c Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 2 Mar 2020 12:40:32 -0800 Subject: [PATCH 0219/1339] Unimported LRO type annotation fix (#330) Create wrapped Protos in two stages: 1) Load messages and enums 2) Load services and methods This allows methods to reference _all_ messages in the entire API surface without having to rely on explicit imports. This is a workaround for a common case of #318 where an LRO response or metadata type is referenced as a string in the method annotation but is not a visible, imported type. --- packages/gapic-generator/gapic/schema/api.py | 286 ++++++++++++------ .../tests/unit/schema/test_api.py | 76 +++++ 2 files changed, 263 insertions(+), 99 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index b26060d438f2..b5b2b43bd64f 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -23,7 +23,7 @@ import os import sys from itertools import chain -from typing import Callable, Container, Dict, FrozenSet, Mapping, Sequence, Set, Tuple +from typing import Callable, Container, Dict, FrozenSet, Mapping, Optional, Sequence, Set, Tuple from google.api_core import exceptions # type: ignore from google.longrunning import operations_pb2 # type: ignore @@ -56,10 +56,13 @@ def __getattr__(self, name: str): return getattr(self.file_pb2, name) @classmethod - def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, - file_to_generate: bool, naming: api_naming.Naming, - prior_protos: Mapping[str, 'Proto'] = None, - ) -> 'Proto': + def build( + cls, file_descriptor: descriptor_pb2.FileDescriptorProto, + file_to_generate: bool, naming: api_naming.Naming, + opts: options.Options = options.Options(), + prior_protos: Mapping[str, 'Proto'] = None, + load_services: bool = True + ) -> 'Proto': """Build and return a Proto instance. Args: @@ -71,12 +74,18 @@ def build(cls, file_descriptor: descriptor_pb2.FileDescriptorProto, with the API. prior_protos (~.Proto): Previous, already processed protos. These are needed to look up messages in imported protos. + load_services (bool): Toggle whether the proto file should + load its services. Not doing so enables a two-pass fix for + LRO response and metadata types in certain situations. """ - return _ProtoBuilder(file_descriptor, - file_to_generate=file_to_generate, - naming=naming, - prior_protos=prior_protos or {}, - ).proto + return _ProtoBuilder( + file_descriptor, + file_to_generate=file_to_generate, + naming=naming, + opts=opts, + prior_protos=prior_protos or {}, + load_services=load_services + ).proto @cached_property def enums(self) -> Mapping[str, wrappers.EnumType]: @@ -184,10 +193,13 @@ class API: subpackage_view: Tuple[str, ...] = dataclasses.field(default_factory=tuple) @classmethod - def build(cls, - file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], - package: str = '', - opts: options.Options = options.Options()) -> 'API': + def build( + cls, + file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], + package: str = '', + opts: options.Options = options.Options(), + prior_protos: Mapping[str, 'Proto'] = None, + ) -> 'API': """Build the internal API schema based on the request. Args: @@ -199,6 +211,9 @@ def build(cls, Protos with packages outside this list are considered imports rather than explicit targets. opts (~.options.Options): CLI options passed to the generator. + prior_protos (~.Proto): Previous, already processed protos. + These are needed to look up messages in imported protos. + Primarily used for testing. """ # Save information about the overall naming for this API. naming = api_naming.Naming.build(*filter( @@ -221,16 +236,43 @@ def disambiguate_keyword_fname( # Iterate over each FileDescriptorProto and fill out a Proto # object describing it, and save these to the instance. - protos: Dict[str, Proto] = {} + # + # The first pass gathers messages and enums but NOT services or methods. + # This is a workaround for a limitation in protobuf annotations for + # long running operations: the annotations are strings that reference + # message types but do not require a proto import. + # This hack attempts to address a common case where API authors, + # not wishing to generate an 'unused import' warning, + # don't import the proto file defining the real response or metadata + # type into the proto file that defines an LRO. + # We just load all the APIs types first and then + # load the services and methods with the full scope of types. + pre_protos: Dict[str, Proto] = dict(prior_protos or {}) for fd in file_descriptors: - fd.name = disambiguate_keyword_fname(fd.name, protos) - protos[fd.name] = _ProtoBuilder( + fd.name = disambiguate_keyword_fname(fd.name, pre_protos) + pre_protos[fd.name] = Proto.build( file_descriptor=fd, file_to_generate=fd.package.startswith(package), naming=naming, opts=opts, - prior_protos=protos, - ).proto + prior_protos=pre_protos, + # Ugly, ugly hack. + load_services=False, + ) + + # Second pass uses all the messages and enums defined in the entire API. + # This allows LRO returning methods to see all the types in the API, + # bypassing the above missing import problem. + protos: Dict[str, Proto] = { + name: Proto.build( + file_descriptor=proto.file_pb2, + file_to_generate=proto.file_to_generate, + naming=naming, + opts=opts, + prior_protos=pre_protos, + ) + for name, proto in pre_protos.items() + } # Done; return the API. return cls(naming=naming, all_protos=protos) @@ -319,11 +361,15 @@ class _ProtoBuilder: """ EMPTY = descriptor_pb2.SourceCodeInfo.Location() - def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, - file_to_generate: bool, - naming: api_naming.Naming, - opts: options.Options = options.Options(), - prior_protos: Mapping[str, Proto] = None): + def __init__( + self, + file_descriptor: descriptor_pb2.FileDescriptorProto, + file_to_generate: bool, + naming: api_naming.Naming, + opts: options.Options = options.Options(), + prior_protos: Mapping[str, Proto] = None, + load_services: bool = True + ): self.proto_messages: Dict[str, wrappers.MessageType] = {} self.proto_enums: Dict[str, wrappers.EnumType] = {} self.proto_services: Dict[str, wrappers.Service] = {} @@ -388,7 +434,7 @@ def __init__(self, file_descriptor: descriptor_pb2.FileDescriptorProto, # This prevents us from generating common services (e.g. LRO) when # they are being used as an import just to get types declared in the # same files. - if file_to_generate: + if file_to_generate and load_services: self._load_children(file_descriptor.service, self._load_service, address=self.address, path=(6,)) # TODO(lukesneeringer): oneofs are on path 7. @@ -522,6 +568,116 @@ def _get_fields(self, # Done; return the answer. return answer + def _get_retry_and_timeout( + self, + service_address: metadata.Address, + meth_pb: descriptor_pb2.MethodDescriptorProto + ) -> Tuple[Optional[wrappers.RetryInfo], Optional[float]]: + """Returns the retry and timeout configuration of a method if it exists. + + Args: + service_address (~.metadata.Address): An address object for the + service, denoting the location of these methods. + meth_pb (~.descriptor_pb2.MethodDescriptorProto): A + protobuf method objects. + + Returns: + Tuple[Optional[~.wrappers.RetryInfo], Optional[float]]: The retry + and timeout information for the method if it exists. + """ + + # If we got a gRPC service config, get the appropriate retry + # and timeout information from it. + retry = None + timeout = None + + # This object should be a dictionary that conforms to the + # gRPC service config proto: + # Repo: https://github.com/grpc/grpc-proto/ + # Filename: grpc/service_config/service_config.proto + # + # We only care about a small piece, so we are just leaving + # it as a dictionary and parsing accordingly. + if self.opts.retry: + # The gRPC service config uses a repeated `name` field + # with a particular format, which we match against. + # This defines the expected selector for *this* method. + selector = { + 'service': '{package}.{service_name}'.format( + package='.'.join(service_address.package), + service_name=service_address.name, + ), + 'method': meth_pb.name, + } + + # Find the method config that applies to us, if any. + mc = next((c for c in self.opts.retry.get('methodConfig', []) + if selector in c.get('name')), None) + if mc: + # Set the timeout according to this method config. + if mc.get('timeout'): + timeout = self._to_float(mc['timeout']) + + # Set the retry according to this method config. + if 'retryPolicy' in mc: + r = mc['retryPolicy'] + retry = wrappers.RetryInfo( + max_attempts=r.get('maxAttempts', 0), + initial_backoff=self._to_float( + r.get('initialBackoff', '0s'), + ), + max_backoff=self._to_float(r.get('maxBackoff', '0s')), + backoff_multiplier=r.get('backoffMultiplier', 0.0), + retryable_exceptions=frozenset( + exceptions.exception_class_for_grpc_status( + getattr(grpc.StatusCode, code), + ) + for code in r.get('retryableStatusCodes', []) + ), + ) + + return retry, timeout + + def _maybe_get_lro( + self, + service_address: metadata.Address, + meth_pb: descriptor_pb2.MethodDescriptorProto + ) -> Optional[wrappers.OperationInfo]: + """Determines whether a method is a Long Running Operation (aka LRO) + and, if it is, return an OperationInfo that includes the response + and metadata types. + + Args: + service_address (~.metadata.Address): An address object for the + service, denoting the location of these methods. + meth_pb (~.descriptor_pb2.MethodDescriptorProto): A + protobuf method objects. + + Returns: + Optional[~.wrappers.OperationInfo]: The info for the long-running + operation, if the passed method is an LRO. + """ + lro = None + + # If the output type is google.longrunning.Operation, we use + # a specialized object in its place. + if meth_pb.output_type.endswith('google.longrunning.Operation'): + op = meth_pb.options.Extensions[operations_pb2.operation_info] + if not op.response_type or not op.metadata_type: + raise TypeError( + f'rpc {meth_pb.name} returns a google.longrunning.' + 'Operation, but is missing a response type or ' + 'metadata type.', + ) + response_key = service_address.resolve(op.response_type) + metadata_key = service_address.resolve(op.metadata_type) + lro = wrappers.OperationInfo( + response_type=self.api_messages[response_key], + metadata_type=self.api_messages[metadata_key], + ) + + return lro + def _get_methods(self, methods: Sequence[descriptor_pb2.MethodDescriptorProto], service_address: metadata.Address, path: Tuple[int, ...], @@ -542,84 +698,16 @@ def _get_methods(self, """ # Iterate over the methods and collect them into a dictionary. answer: Dict[str, wrappers.Method] = collections.OrderedDict() - for meth_pb, i in zip(methods, range(0, sys.maxsize)): - lro = None - - # If the output type is google.longrunning.Operation, we use - # a specialized object in its place. - if meth_pb.output_type.endswith('google.longrunning.Operation'): - op = meth_pb.options.Extensions[operations_pb2.operation_info] - if not op.response_type or not op.metadata_type: - raise TypeError( - f'rpc {meth_pb.name} returns a google.longrunning.' - 'Operation, but is missing a response type or ' - 'metadata type.', - ) - lro = wrappers.OperationInfo( - response_type=self.api_messages[service_address.resolve( - op.response_type, - )], - metadata_type=self.api_messages[service_address.resolve( - op.metadata_type, - )], - ) - - # If we got a gRPC service config, get the appropriate retry - # and timeout information from it. - retry = None - timeout = None - - # This object should be a dictionary that conforms to the - # gRPC service config proto: - # Repo: https://github.com/grpc/grpc-proto/ - # Filename: grpc/service_config/service_config.proto - # - # We only care about a small piece, so we are just leaving - # it as a dictionary and parsing accordingly. - if self.opts.retry: - # The gRPC service config uses a repeated `name` field - # with a particular format, which we match against. - # This defines the expected selector for *this* method. - selector = { - 'service': '{package}.{service_name}'.format( - package='.'.join(service_address.package), - service_name=service_address.name, - ), - 'method': meth_pb.name, - } - - # Find the method config that applies to us, if any. - mc = next((i for i in self.opts.retry.get('methodConfig', []) - if selector in i.get('name')), None) - if mc: - # Set the timeout according to this method config. - if mc.get('timeout'): - timeout = self._to_float(mc['timeout']) - - # Set the retry according to this method config. - if 'retryPolicy' in mc: - r = mc['retryPolicy'] - retry = wrappers.RetryInfo( - max_attempts=r.get('maxAttempts', 0), - initial_backoff=self._to_float( - r.get('initialBackoff', '0s'), - ), - max_backoff=self._to_float( - r.get('maxBackoff', '0s'), - ), - backoff_multiplier=r.get('backoffMultiplier', 0.0), - retryable_exceptions=frozenset( - exceptions.exception_class_for_grpc_status( - getattr(grpc.StatusCode, code), - ) - for code in r.get('retryableStatusCodes', []) - ), - ) + for i, meth_pb in enumerate(methods): + retry, timeout = self._get_retry_and_timeout( + service_address, + meth_pb + ) # Create the method wrapper object. answer[meth_pb.name] = wrappers.Method( input=self.api_messages[meth_pb.input_type.lstrip('.')], - lro=lro, + lro=self._maybe_get_lro(service_address, meth_pb), method_pb=meth_pb, meta=metadata.Metadata( address=service_address.child(meth_pb.name, path + (i,)), diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index f2d2eba10bb3..44b5d491d01f 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -705,6 +705,82 @@ def test_lro_missing_annotation(): }, naming=make_naming()) +def test_cross_file_lro(): + # Protobuf annotations for longrunning operations use strings to name types. + # As far as the protobuf compiler is concerned they don't reference the + # _types_ at all, so the corresponding proto file that owns the types + # does not need to be imported. + # This creates a potential issue when building rich structures around + # LRO returning methods. This test is intended to verify that the issue + # is handled correctly. + + # Set up a prior proto that mimics google/protobuf/empty.proto + lro_proto = api.Proto.build(make_file_pb2( + name='operations.proto', package='google.longrunning', + messages=(make_message_pb2(name='Operation'),), + ), file_to_generate=False, naming=make_naming()) + + # Set up a method with LRO annotations. + method_pb2 = descriptor_pb2.MethodDescriptorProto( + name='AsyncDoThing', + input_type='google.example.v3.AsyncDoThingRequest', + output_type='google.longrunning.Operation', + ) + method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( + operations_pb2.OperationInfo( + response_type='google.example.v3.AsyncDoThingResponse', + metadata_type='google.example.v3.AsyncDoThingMetadata', + ), + ) + + # Set up the service with an RPC. + service_file = make_file_pb2( + name='service_file.proto', + package='google.example.v3', + messages=( + make_message_pb2(name='AsyncDoThingRequest', fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='LongRunningService', + method=(method_pb2,), + ), + ) + ) + + # Set up the messages, including the annotated ones. + # This file is distinct and is not explicitly imported + # into the file that defines the service. + messages_file = make_file_pb2( + name='messages_file.proto', + package='google.example.v3', + messages=( + make_message_pb2(name='AsyncDoThingResponse', fields=()), + make_message_pb2(name='AsyncDoThingMetadata', fields=()), + ), + ) + + api_schema = api.API.build( + file_descriptors=( + service_file, + messages_file, + ), + package='google.example.v3', + prior_protos={'google/longrunning/operations.proto': lro_proto, }, + ) + + method = ( + api_schema. + all_protos['service_file.proto']. + services['google.example.v3.LongRunningService']. + methods['AsyncDoThing'] + ) + + assert method.lro + assert method.lro.response_type.name == 'AsyncDoThingResponse' + assert method.lro.metadata_type.name == 'AsyncDoThingMetadata' + + def test_enums(): L = descriptor_pb2.SourceCodeInfo.Location enum_pb = descriptor_pb2.EnumDescriptorProto(name='Silly', value=( From 6d5566ec1b55ef6bcc18bbfdd4246aa62f38e7db Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 3 Mar 2020 16:48:07 -0800 Subject: [PATCH 0220/1339] Flattening and LRO module conflicts (#332) Flattened method fields generate names in client methods. Barring a central authority, any source of names generates the possibility of name collisions and requires disambiguation. Before this fix, flattened fields could generate name collisions with imported modules. This commit adds field names to the context with which names are disambiguated and subjects LRO operation info structures to the same collision avoidance logic as other rich data types. Unblocks autogenerated unit tests for Datalabeling API. --- packages/gapic-generator/gapic/schema/api.py | 55 ++++----- .../gapic-generator/gapic/schema/metadata.py | 23 ++-- .../gapic-generator/gapic/schema/wrappers.py | 49 ++++++-- .../tests/unit/schema/test_api.py | 105 ++++++++++++++++++ 4 files changed, 187 insertions(+), 45 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index b5b2b43bd64f..20bd45459017 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -123,18 +123,21 @@ def names(self) -> FrozenSet[str]: # Add names of all enums, messages, and fields. answer: Set[str] = {e.name for e in self.all_enums.values()} for message in self.all_messages.values(): - answer = answer.union({f.name for f in message.fields.values()}) + answer.update(f.name for f in message.fields.values()) answer.add(message.name) # Identify any import module names where the same module name is used # from distinct packages. - modules: Dict[str, Set[str]] = {} - for t in chain(*[m.field_types for m in self.all_messages.values()]): - modules.setdefault(t.ident.module, set()) - modules[t.ident.module].add(t.ident.package) - for module_name, packages in modules.items(): - if len(packages) > 1: - answer.add(module_name) + modules: Dict[str, Set[str]] = collections.defaultdict(set) + for m in self.all_messages.values(): + for t in m.field_types: + modules[t.ident.module].add(t.ident.package) + + answer.update( + module_name + for module_name, packages in modules.items() + if len(packages) > 1 + ) # Return the set of collision names. return frozenset(answer) @@ -462,24 +465,24 @@ def proto(self) -> Proto: return naive # Return a context-aware proto object. - # Note: The services bind to themselves, because services get their - # own output files. - return dataclasses.replace(naive, - all_enums=collections.OrderedDict([ - (k, v.with_context(collisions=naive.names)) - for k, v in naive.all_enums.items() - ]), - all_messages=collections.OrderedDict([ - (k, v.with_context(collisions=naive.names)) - for k, v in naive.all_messages.items() - ]), - services=collections.OrderedDict([ - (k, v.with_context(collisions=v.names)) - for k, v in naive.services.items() - ]), - meta=naive.meta.with_context( - collisions=naive.names), - ) + return dataclasses.replace( + naive, + all_enums=collections.OrderedDict( + (k, v.with_context(collisions=naive.names)) + for k, v in naive.all_enums.items() + ), + all_messages=collections.OrderedDict( + (k, v.with_context(collisions=naive.names)) + for k, v in naive.all_messages.items() + ), + services=collections.OrderedDict( + # Note: services bind to themselves because services get their + # own output files. + (k, v.with_context(collisions=v.names)) + for k, v in naive.services.items() + ), + meta=naive.meta.with_context(collisions=naive.names), + ) @cached_property def api_enums(self) -> Mapping[str, wrappers.EnumType]: diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index c6fabd181d59..a7e61407701d 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -84,11 +84,16 @@ def module_alias(self) -> str: to users (albeit looking auto-generated). """ if self.module in self.collisions: - return '_'.join(( - ''.join([i[0] for i in self.package - if i != self.api_naming.version]), - self.module, - )) + return '_'.join( + ( + ''.join( + i[0] + for i in self.package + if i != self.api_naming.version + ), + self.module, + ) + ) return '' @property @@ -161,7 +166,8 @@ def child(self, child_name: str, path: Tuple[int, ...]) -> 'Address': Returns: ~.Address: The new address object. """ - return dataclasses.replace(self, + return dataclasses.replace( + self, module_path=self.module_path + path, name=child_name, parent=self.parent + (self.name,) if self.name else self.parent, @@ -278,9 +284,10 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Metadata': ``Address`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace(self, + return dataclasses.replace( + self, address=self.address.with_context(collisions=collisions), - ) + ) @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 8a45d6747d28..ed95295212d2 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -330,18 +330,19 @@ def with_context(self, *, """ return dataclasses.replace( self, - fields=collections.OrderedDict([ + fields=collections.OrderedDict( (k, v.with_context(collisions=collisions)) for k, v in self.fields.items() - ]) if not skip_fields else self.fields, - nested_enums=collections.OrderedDict([ + ) if not skip_fields else self.fields, + nested_enums=collections.OrderedDict( (k, v.with_context(collisions=collisions)) for k, v in self.nested_enums.items() - ]), - nested_messages=collections.OrderedDict([(k, v.with_context( - collisions=collisions, - skip_fields=skip_fields, - )) for k, v in self.nested_messages.items()]), + ), + nested_messages=collections.OrderedDict( + (k, v.with_context( + collisions=collisions, + skip_fields=skip_fields,)) + for k, v in self.nested_messages.items()), meta=self.meta.with_context(collisions=collisions), ) @@ -455,6 +456,23 @@ class OperationInfo: response_type: MessageType metadata_type: MessageType + def with_context(self, *, collisions: FrozenSet[str]) -> 'OperationInfo': + """Return a derivative of this OperationInfo with the provided context. + + This method is used to address naming collisions. The returned + ``OperationInfo`` object aliases module names to avoid naming collisions + in the file being written. + """ + return dataclasses.replace( + self, + response_type=self.response_type.with_context( + collisions=collisions + ), + metadata_type=self.metadata_type.with_context( + collisions=collisions + ), + ) + @dataclasses.dataclass(frozen=True) class RetryInfo: @@ -681,8 +699,13 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': ``Method`` object aliases module names to avoid naming collisions in the file being written. """ + maybe_lro = self.lro.with_context( + collisions=collisions + ) if self.lro else None + return dataclasses.replace( self, + lro=maybe_lro, input=self.input.with_context(collisions=collisions), output=self.output.with_context(collisions=collisions), meta=self.meta.with_context(collisions=collisions), @@ -810,9 +833,13 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': """ return dataclasses.replace( self, - methods=collections.OrderedDict([ - (k, v.with_context(collisions=collisions)) + methods=collections.OrderedDict( + (k, v.with_context( + # A methodd's flattened fields create additional names + # that may conflict with module imports. + collisions=collisions | frozenset(v.flattened_fields.keys())) + ) for k, v in self.methods.items() - ]), + ), meta=self.meta.with_context(collisions=collisions), ) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 44b5d491d01f..43b10a277ef2 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -17,6 +17,7 @@ import pytest +from google.api import client_pb2 from google.api_core import exceptions from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 @@ -219,6 +220,110 @@ def test_proto_names_import_collision(): 'other_message', 'primitive', 'spam'} +def test_proto_names_import_collision_flattening(): + lro_proto = api.Proto.build(make_file_pb2( + name='operations.proto', package='google.longrunning', + messages=(make_message_pb2(name='Operation'),), + ), file_to_generate=False, naming=make_naming()) + + fd = ( + make_file_pb2( + name='mollusc.proto', + package='google.animalia.mollusca', + messages=( + make_message_pb2(name='Mollusc',), + make_message_pb2(name='MolluscResponse',), + make_message_pb2(name='MolluscMetadata',), + ), + ), + make_file_pb2( + name='squid.proto', + package='google.animalia.mollusca', + messages=( + make_message_pb2( + name='IdentifySquidRequest', + fields=( + make_field_pb2( + name='mollusc', + number=1, + type_name='.google.animalia.mollusca.Mollusc' + ), + ), + ), + make_message_pb2( + name='IdentifySquidResponse', + fields=(), + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='SquidIdentificationService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='IdentifyMollusc', + input_type='google.animalia.mollusca.IdentifySquidRequest', + output_type='google.longrunning.Operation', + ), + ), + ), + ), + ), + ) + + method_options = fd[1].service[0].method[0].options + # Notice that a signature field collides with the name of an imported module + method_options.Extensions[client_pb2.method_signature].append('mollusc') + method_options.Extensions[operations_pb2.operation_info].MergeFrom( + operations_pb2.OperationInfo( + response_type='google.animalia.mollusca.MolluscResponse', + metadata_type='google.animalia.mollusca.MolluscMetadata', + ) + ) + api_schema = api.API.build( + fd, + package='google.animalia.mollusca', + prior_protos={ + 'google/longrunning/operations.proto': lro_proto, + } + ) + + actual_imports = { + ref_type.ident.python_import + for service in api_schema.services.values() + for method in service.methods.values() + for ref_type in method.ref_types + } + + expected_imports = { + imp.Import( + package=('google', 'animalia', 'mollusca', 'types'), + module='mollusc', + alias='gam_mollusc', + ), + imp.Import( + package=('google', 'animalia', 'mollusca', 'types'), + module='squid', + ), + imp.Import(package=('google', 'api_core'), module='operation',), + } + + assert expected_imports == actual_imports + + method = ( + api_schema + .services['google.animalia.mollusca.SquidIdentificationService'] + .methods['IdentifyMollusc'] + ) + + actual_response_import = method.lro.response_type.ident.python_import + expected_response_import = imp.Import( + package=('google', 'animalia', 'mollusca', 'types'), + module='mollusc', + alias='gam_mollusc', + ) + assert actual_response_import == expected_response_import + + def test_proto_builder_constructor(): sentinel_message = descriptor_pb2.DescriptorProto() sentinel_enum = descriptor_pb2.EnumDescriptorProto() From 1e5e81e12f351a4734a8df07a67fce127e02fb94 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 3 Mar 2020 16:53:50 -0800 Subject: [PATCH 0221/1339] Version bump to 0.19.0 (#333) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 6bd1b97e87cb..c5204a5ad83f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.18.0', + version='0.19.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From b9a0a61d7005003459cdb51c4bbf968db7477eeb Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 4 Mar 2020 13:03:40 -0800 Subject: [PATCH 0222/1339] Out of order enums (#334) Message fields can reference enum types before they're defined in a proto file. Even if all enums are loaded before all messages, messages can define nested proto types, which means that the order message types are read in can determine whether field's type exists before it is referenced. The workaround for this is to do a two pass type resolution: load top level enums and top level messages (which may recursively add additional types), then resolve any field in any message whose type is empty. This cl extends existing logic for out of order message definitions to include out of order enums. --- packages/gapic-generator/gapic/schema/api.py | 25 +++++-- .../tests/unit/schema/test_api.py | 68 +++++++++++++++++++ 2 files changed, 86 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 20bd45459017..6a987b3b5028 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -425,13 +425,24 @@ def __init__( # In this situation, we would not have come across the message yet, # and the field would have its original textual reference to the # message (`type_name`) but not its resolved message wrapper. - for message in self.proto_messages.values(): - for field in message.fields.values(): - if field.type_name and not any((field.message, field.enum)): - object.__setattr__( - field, 'message', - self.proto_messages[field.type_name.lstrip('.')], - ) + orphan_field_gen = ( + (field.type_name.lstrip('.'), field) + for message in self.proto_messages.values() + for field in message.fields.values() + if field.type_name and not (field.message or field.enum) + ) + for key, field in orphan_field_gen: + maybe_msg_type = self.proto_messages.get(key) + maybe_enum_type = self.proto_enums.get(key) + if maybe_msg_type: + object.__setattr__(field, 'message', maybe_msg_type) + elif maybe_enum_type: + object.__setattr__(field, 'enum', maybe_enum_type) + else: + raise TypeError( + f"Unknown type referenced in " + "{self.file_descriptor.name}: '{key}'" + ) # Only generate the service if this is a target file to be generated. # This prevents us from generating common services (e.g. LRO) when diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 43b10a277ef2..cd4f6d1504f8 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -488,6 +488,74 @@ def test_messages_nested(): assert bar not in proto.messages +def test_out_of_order_enums(): + # Enums can be referenced as field types before they + # are defined in the proto file. + # This happens when they're a nested type within a message. + messages = ( + make_message_pb2( + name='Squid', + fields=( + make_field_pb2( + name='base_color', + type_name='google.mollusca.Chromatophore.Color', + number=1, + ), + ), + ), + make_message_pb2( + name='Chromatophore', + enum_type=( + descriptor_pb2.EnumDescriptorProto(name='Color', value=()), + ), + ) + ) + fd = ( + make_file_pb2( + name='squid.proto', + package='google.mollusca', + messages=messages, + services=( + descriptor_pb2.ServiceDescriptorProto( + name='SquidService', + ), + ), + ), + ) + api_schema = api.API.build(fd, package='google.mollusca') + field_type = ( + api_schema + .messages['google.mollusca.Squid'] + .fields['base_color'] + .type + ) + enum_type = api_schema.enums['google.mollusca.Chromatophore.Color'] + assert field_type == enum_type + + +def test_undefined_type(): + fd = ( + make_file_pb2( + name='mollusc.proto', + package='google.mollusca', + messages=( + make_message_pb2( + name='Mollusc', + fields=( + make_field_pb2( + name='class', + type_name='google.mollusca.Class', + number=1, + ), + ) + ), + ), + ), + ) + with pytest.raises(TypeError): + api.API.build(fd, package='google.mollusca') + + def test_python_modules_nested(): fd = ( make_file_pb2( From 4d0e5e7b3ddc34226c6f6bad623db12e50828d0e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 4 Mar 2020 18:36:06 -0800 Subject: [PATCH 0223/1339] Add a test and impl for map field mock value (#335) Protobuf map fields are special: under the hood they are implemnted as a sequence of generated type with two fields: 'key', whose type is the map key type, and 'value', whose type is the map value type. The user almost never wants to know about this implementation detail, and the python proto surface allows python dictionaries as rvalues when assigning to a mapped field. This change uses dict literals in generated unit tests where flattened parameters may refer to mapped fields. --- .../gapic-generator/gapic/schema/wrappers.py | 15 +++++-- .../%sub/services/%service/client.py.j2 | 4 +- .../tests/unit/schema/wrappers/test_field.py | 43 ++++++++++++++++++- 3 files changed, 56 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index ed95295212d2..9fcc0a77ccc8 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -110,9 +110,16 @@ def mock_value(self) -> str: sub = next(iter(self.type.fields.values())) answer = f'{self.type.ident}({sub.name}={sub.mock_value})' - # If this is a repeated field, then the mock answer should - # be a list. - if self.repeated: + if self.map: + # Maps are a special case beacuse they're represented internally as + # a list of a generated type with two fields: 'key' and 'value'. + answer = '{{{}: {}}}'.format( + self.type.fields["key"].mock_value, + self.type.fields["value"].mock_value, + ) + elif self.repeated: + # If this is a repeated field, then the mock answer should + # be a list. answer = f'[{answer}]' # Done; return the mock value. @@ -568,7 +575,6 @@ def field_headers(self) -> Sequence[str]: @utils.cached_property def flattened_fields(self) -> Mapping[str, Field]: """Return the signature defined for this method.""" - signatures = self.options.Extensions[client_pb2.method_signature] cross_pkg_request = self.input.ident.package != self.ident.package def filter_fields(sig): @@ -585,6 +591,7 @@ def filter_fields(sig): yield name, field + signatures = self.options.Extensions[client_pb2.method_signature] answer: Dict[str, Field] = collections.OrderedDict( name_and_field for sig in signatures diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 980bc476958d..735f70084764 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -204,9 +204,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} {# different request package #} {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + {% if method.flattened_fields -%} # If we have keyword arguments corresponding to fields on the # request, apply these. + {% endif -%} + {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index ffea8f4ed1a2..8ddc74ebf4d9 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -173,6 +173,25 @@ def test_mock_value_repeated(): assert field.mock_value == "['foo_bar_value']" +def test_mock_value_map(): + entry_msg = make_message( + name='SquidEntry', + fields=( + make_field(name='key', type='TYPE_STRING'), + make_field(name='value', type='TYPE_STRING'), + ), + options=descriptor_pb2.MessageOptions(map_entry=True), + ) + field = make_field( + name='squids', + type_name='mollusc.SquidEntry', + message=entry_msg, + label=3, + type='TYPE_MESSAGE', + ) + assert field.mock_value == "{'key_value': 'value_value'}" + + def test_mock_value_enum(): values = [ descriptor_pb2.EnumValueDescriptorProto(name='UNSPECIFIED', number=0), @@ -227,4 +246,26 @@ def make_field(*, message=None, enum=None, **kwargs) -> wrappers.Field: if isinstance(kwargs['type'], str): kwargs['type'] = T.Value(kwargs['type']) field_pb = descriptor_pb2.FieldDescriptorProto(**kwargs) - return wrappers.Field(field_pb=field_pb, message=message, enum=enum) + field = wrappers.Field(field_pb=field_pb, message=message, enum=enum) + return field + + +def make_message( + name, package='foo.bar.v1', module='baz', fields=(), meta=None, options=None +) -> wrappers.MessageType: + message_pb = descriptor_pb2.DescriptorProto( + name=name, + field=[i.field_pb for i in fields], + options=options, + ) + return wrappers.MessageType( + message_pb=message_pb, + fields=collections.OrderedDict((i.name, i) for i in fields), + nested_messages={}, + nested_enums={}, + meta=meta or metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(package.split('.')), + module=module, + )), + ) From 1fa6bb3c550e0165491255d6415fe63bbec2a6b1 Mon Sep 17 00:00:00 2001 From: Dan O'Meara Date: Mon, 9 Mar 2020 10:34:18 -0700 Subject: [PATCH 0224/1339] fix: correctly instantiates client (#338) fixes #337 --- packages/gapic-generator/docs/getting-started/_verifying.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/docs/getting-started/_verifying.rst b/packages/gapic-generator/docs/getting-started/_verifying.rst index bc33ed8303c2..50fd3a676b84 100644 --- a/packages/gapic-generator/docs/getting-started/_verifying.rst +++ b/packages/gapic-generator/docs/getting-started/_verifying.rst @@ -33,7 +33,7 @@ Here is a test script: # # If you wish, you can send `transport='grpc'` or `transport='http'` # to change which underlying transport layer is being used. - ia = vision.ImageAnnotator(host='vision.googleapis.com') + ia = vision.ImageAnnotatorClient() # Send the request to the server and get the response. response = ia.batch_annotate_images({ From a6451bb109b5009dd67b125f6897d2fa777e38d6 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 12 Mar 2020 11:43:24 -0700 Subject: [PATCH 0225/1339] Update header copyright year to 2020 (#339) Bumps the year to 2020 in the templates. Also tweaked the formatting slightly (copied from go/copyright). --- packages/gapic-generator/gapic/templates/_license.j2 | 2 +- .../gapic-generator/tests/unit/samplegen/test_integration.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/_license.j2 b/packages/gapic-generator/gapic/templates/_license.j2 index 3264db7db75f..03ddf2e6ea0f 100644 --- a/packages/gapic-generator/gapic/templates/_license.j2 +++ b/packages/gapic-generator/gapic/templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright (C) 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 8e022e7f6199..502902dc09d9 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -111,7 +111,7 @@ def test_generate_sample_basic(): sample_id = ("mollusc_classify_sync") expected_str = '''# -*- coding: utf-8 -*- -# Copyright (C) 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -247,7 +247,7 @@ def test_generate_sample_basic_unflattenable(): sample_id = ("mollusc_classify_sync") expected_str = '''# -*- coding: utf-8 -*- -# Copyright (C) 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 9d2c5a5145ddbc9303546367907c82f6d9092c4b Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 16 Mar 2020 11:31:23 -0700 Subject: [PATCH 0226/1339] feat: Initial draft of GAPIC Bazel Extensions gapic-generator-python (#342) Initial draft of GAPIC Bazel Extensions gapic-generator-python --- packages/gapic-generator/BUILD.bazel | 20 +++++++++ packages/gapic-generator/WORKSPACE | 35 ++++++++++++++++ packages/gapic-generator/repositories.bzl | 41 +++++++++++++++++++ packages/gapic-generator/requirements.txt | 8 ++++ .../rules_python_gapic/BUILD.bazel | 0 .../rules_python_gapic/py_gapic.bzl | 31 ++++++++++++++ 6 files changed, 135 insertions(+) create mode 100644 packages/gapic-generator/BUILD.bazel create mode 100644 packages/gapic-generator/WORKSPACE create mode 100644 packages/gapic-generator/repositories.bzl create mode 100644 packages/gapic-generator/requirements.txt create mode 100644 packages/gapic-generator/rules_python_gapic/BUILD.bazel create mode 100644 packages/gapic-generator/rules_python_gapic/py_gapic.bzl diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel new file mode 100644 index 000000000000..21f0bf88f2f2 --- /dev/null +++ b/packages/gapic-generator/BUILD.bazel @@ -0,0 +1,20 @@ +load("@gapic_generator_python_pip_deps//:requirements.bzl", "requirement") + +py_binary( + name = "gapic_plugin", + srcs = glob(["gapic/**/*.py"]), + data = glob(["gapic/**/*.j2"]), + main = "gapic/cli/generate.py", + visibility = ["//visibility:public"], + deps = [ + "@com_google_protobuf//:protobuf_python", + requirement("click"), + requirement("google-api-core"), + requirement("googleapis-common-protos"), + requirement("grpcio"), + requirement("jinja2"), + requirement("pypandoc"), + requirement("PyYAML"), + ], + python_version = "PY3", +) \ No newline at end of file diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE new file mode 100644 index 000000000000..57a357e7b982 --- /dev/null +++ b/packages/gapic-generator/WORKSPACE @@ -0,0 +1,35 @@ +workspace(name = "gapic_generator_python") + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") + +# +# Import rules_python +# +http_archive( + name = "rules_python", + strip_prefix = "rules_python-748aa53d7701e71101dfd15d800e100f6ff8e5d1", + url = "https://github.com/bazelbuild/rules_python/archive/748aa53d7701e71101dfd15d800e100f6ff8e5d1.zip", +) + +load("@rules_python//python:repositories.bzl", "py_repositories") + +py_repositories() + +load("@rules_python//python:pip.bzl", "pip_repositories") + +pip_repositories() + +# +# Import gapic-generator-python specific dependencies +# +load("//:repositories.bzl", "gapic_generator_python") + +gapic_generator_python() + +load("@gapic_generator_python_pip_deps//:requirements.bzl", "pip_install") + +pip_install() + +load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") + +protobuf_deps() diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl new file mode 100644 index 000000000000..ed39838d4f50 --- /dev/null +++ b/packages/gapic-generator/repositories.bzl @@ -0,0 +1,41 @@ +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("@rules_python//python:pip.bzl", "pip_import") + +def gapic_generator_python(): + _maybe( + pip_import, + name = "gapic_generator_python_pip_deps", + python_interpreter = "python3", + requirements = "@gapic_generator_python//:requirements.txt", + ) + + _protobuf_version = "3.11.2" + _protobuf_version_in_link = "v%s" % _protobuf_version + _maybe( + http_archive, + name = "com_google_protobuf", + urls = ["https://github.com/protocolbuffers/protobuf/archive/%s.zip" % _protobuf_version_in_link], + strip_prefix = "protobuf-%s" % _protobuf_version, + ) + + _maybe( + http_archive, + name = "bazel_skylib", + strip_prefix = "bazel-skylib-2169ae1c374aab4a09aa90e65efe1a3aad4e279b", + urls = ["https://github.com/bazelbuild/bazel-skylib/archive/2169ae1c374aab4a09aa90e65efe1a3aad4e279b.tar.gz"], + ) + + _maybe( + http_archive, + name = "com_google_api_codegen", + strip_prefix = "gapic-generator-b32c73219d617f90de70bfa6ff0ea0b0dd638dfe", + urls = ["https://github.com/googleapis/gapic-generator/archive/b32c73219d617f90de70bfa6ff0ea0b0dd638dfe.zip"], + ) + +def _maybe(repo_rule, name, strip_repo_prefix = "", **kwargs): + if not name.startswith(strip_repo_prefix): + return + repo_name = name[len(strip_repo_prefix):] + if repo_name in native.existing_rules(): + return + repo_rule(name = repo_name, **kwargs) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt new file mode 100644 index 000000000000..9cdcdd7574e7 --- /dev/null +++ b/packages/gapic-generator/requirements.txt @@ -0,0 +1,8 @@ +click >= 6.7 +google-api-core >= 1.14.3 +googleapis-common-protos >= 1.6.0 +grpcio >= 1.24.3 +jinja2 >= 2.10 +protobuf >= 3.7.1 +pypandoc >= 1.4 +PyYAML >= 5.1.1 diff --git a/packages/gapic-generator/rules_python_gapic/BUILD.bazel b/packages/gapic-generator/rules_python_gapic/BUILD.bazel new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl new file mode 100644 index 000000000000..50d792b82bc9 --- /dev/null +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -0,0 +1,31 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@com_google_api_codegen//rules_gapic:gapic.bzl", "proto_custom_library") + +def py_gapic_library(name, srcs, **kwargs): + # srcjar_target_name = "%s_srcjar" % name + srcjar_target_name = name + srcjar_output_suffix = ".srcjar" + + proto_custom_library( + name = srcjar_target_name, + deps = srcs, + plugin = Label("@gapic_generator_python//:gapic_plugin"), + plugin_args = [], + plugin_file_args = {}, + output_type = "python_gapic", + output_suffix = srcjar_output_suffix, + **kwargs + ) From cd3a7323dc961afcb0c9cdf5ff996b3224de6ade Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 17 Mar 2020 19:31:32 +0100 Subject: [PATCH 0227/1339] chore(deps): pin dependencies (#343) --- packages/gapic-generator/requirements.txt | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 9cdcdd7574e7..33c114970ce6 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,8 +1,8 @@ -click >= 6.7 -google-api-core >= 1.14.3 -googleapis-common-protos >= 1.6.0 -grpcio >= 1.24.3 -jinja2 >= 2.10 -protobuf >= 3.7.1 -pypandoc >= 1.4 -PyYAML >= 5.1.1 +click==7.1.1 +google-api-core==1.16.0 +googleapis-common-protos==1.51.0 +grpcio==1.27.2 +jinja2==2.11.1 +protobuf==3.11.3 +pypandoc==1.4 +PyYAML==5.3 From 6cf8c4273bc24b805d3f1a48405103ef637e6c6d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 17 Mar 2020 13:47:42 -0700 Subject: [PATCH 0228/1339] Turn naming.Naming into an abstract class (#341) * Turn naming.Naming into an abstract class The new style client libraries have a directory/module structure of the form: apiName: - __init__.py apiName_version: - __init__.py Certain client libraries need to preserve a legacy interface with a directory structure of the form apiName: - version This change abstracts out the Naming class to make make the change feasible. --- packages/gapic-generator/.coveragerc | 1 + .../gapic/generator/options.py | 5 +- .../gapic-generator/gapic/schema/metadata.py | 2 +- .../gapic-generator/gapic/schema/naming.py | 65 +++++++++++++------ .../tests/unit/generator/test_generator.py | 16 +++-- .../tests/unit/generator/test_options.py | 6 ++ .../tests/unit/samplegen/test_integration.py | 4 +- .../tests/unit/samplegen/test_samplegen.py | 2 +- .../tests/unit/schema/test_api.py | 2 +- .../tests/unit/schema/test_metadata.py | 8 +-- .../tests/unit/schema/test_naming.py | 46 +++++++++---- 11 files changed, 109 insertions(+), 48 deletions(-) diff --git a/packages/gapic-generator/.coveragerc b/packages/gapic-generator/.coveragerc index e6d2ce8b2774..0c119f04d491 100644 --- a/packages/gapic-generator/.coveragerc +++ b/packages/gapic-generator/.coveragerc @@ -16,4 +16,5 @@ exclude_lines = def __repr__ # Abstract methods by definition are not invoked @abstractmethod + @abc.abstractmethod \ No newline at end of file diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index 414e6a5c5687..2b98b88a868b 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -37,10 +37,12 @@ class Options: sample_configs: Tuple[str, ...] = dataclasses.field(default=()) templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) lazy_import: bool = False + old_naming: bool = False # Class constants PYTHON_GAPIC_PREFIX: str = 'python-gapic-' OPT_FLAGS: FrozenSet[str] = frozenset(( + 'old-naming', # TODO(dovs): Come up with a better comment 'retry-config', # takes a path 'samples', # output dir 'lazy-import', # requires >= 3.7 @@ -115,7 +117,8 @@ def build(cls, opt_string: str) -> 'Options': for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) ), templates=tuple(os.path.expanduser(i) for i in templates), - lazy_import=bool(opts.pop('lazy-import', False)) + lazy_import=bool(opts.pop('lazy-import', False)), + old_naming=bool(opts.pop('old-naming', False)), ) # Note: if we ever need to recursively check directories for sample diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index a7e61407701d..d5dbf88c6d59 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -44,7 +44,7 @@ class Address: package: Tuple[str, ...] = dataclasses.field(default_factory=tuple) parent: Tuple[str, ...] = dataclasses.field(default_factory=tuple) api_naming: naming.Naming = dataclasses.field( - default_factory=naming.Naming, + default_factory=naming.NewNaming, ) collisions: FrozenSet[str] = dataclasses.field(default_factory=frozenset) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index c0d1e8f87d30..b1a1f6671048 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import abc import dataclasses import os import re @@ -22,15 +23,16 @@ from gapic import utils from gapic.generator import options - -@dataclasses.dataclass(frozen=True) -class Naming: +# See https://github.com/python/mypy/issues/5374 for details on the mypy false +# positive. +@dataclasses.dataclass(frozen=True) # type: ignore +class Naming(abc.ABC): """Naming data for an API. This class contains the naming nomenclature used for this API within templates. - An instance of this object is made available to every template + An concrete child of this object is made available to every template (as ``api.naming``). """ name: str = '' @@ -43,11 +45,11 @@ def __post_init__(self): if not self.product_name: self.__dict__['product_name'] = self.name - @classmethod - def build(cls, - *file_descriptors: descriptor_pb2.FileDescriptorProto, - opts: options.Options = options.Options(), - ) -> 'Naming': + @staticmethod + def build( + *file_descriptors: descriptor_pb2.FileDescriptorProto, + opts: options.Options = options.Options(), + ) -> 'Naming': """Return a full Naming instance based on these file descriptors. This is pieced together from the proto package names as well as the @@ -103,10 +105,12 @@ def build(cls, match = cast(Match, re.search(pattern=pattern, string=root_package)).groupdict() match['namespace'] = match['namespace'] or '' - package_info = cls( + klass = OldNaming if opts.old_naming else NewNaming + package_info = klass( name=match['name'].capitalize(), - namespace=tuple([i.capitalize() - for i in match['namespace'].split('.') if i]), + namespace=tuple( + i.capitalize() for i in match['namespace'].split('.') if i + ), product_name=match['name'].capitalize(), proto_package=root_package, version=match.get('version', ''), @@ -125,16 +129,16 @@ def build(cls, # likely make sense to many users to use dot-separated namespaces and # snake case, so handle that and do the right thing. if opts.name: - package_info = dataclasses.replace(package_info, name=' '.join([ + package_info = dataclasses.replace(package_info, name=' '.join(( i.capitalize() for i in opts.name.replace('_', ' ').split(' ') - ])) + ))) if opts.namespace: - package_info = dataclasses.replace(package_info, namespace=tuple([ + package_info = dataclasses.replace(package_info, namespace=tuple( # The join-and-split on "." here causes us to expand out # dot notation that we may have been sent; e.g. a one-tuple # with ('x.y',) will become a two-tuple: ('x', 'y') i.capitalize() for i in '.'.join(opts.namespace).split('.') - ])) + )) # Done; return the naming information. return package_info @@ -142,7 +146,7 @@ def build(cls, def __bool__(self): """Return True if any of the fields are truthy, False otherwise.""" return any( - [getattr(self, i.name) for i in dataclasses.fields(self)], + (getattr(self, i.name) for i in dataclasses.fields(self)), ) @property @@ -164,19 +168,18 @@ def module_namespace(self) -> Tuple[str, ...]: def namespace_packages(self) -> Tuple[str, ...]: """Return the appropriate Python namespace packages.""" answer: List[str] = [] - for cursor in [i.lower() for i in self.namespace]: + for cursor in (i.lower() for i in self.namespace): answer.append(f'{answer[-1]}.{cursor}' if answer else cursor) return tuple(answer) @property + @abc.abstractmethod def versioned_module_name(self) -> str: """Return the versiond module name (e.g. ``apiname_v1``). If there is no version, this is the same as ``module_name``. """ - if self.version: - return f'{self.module_name}_{self.version}' - return self.module_name + raise NotImplementedError @property def warehouse_package_name(self) -> str: @@ -186,3 +189,23 @@ def warehouse_package_name(self) -> str: # proper package name. answer = list(self.namespace) + self.name.split(' ') return '-'.join(answer).lower() + + +class NewNaming(Naming): + @property + def versioned_module_name(self) -> str: + """Return the versiond module name (e.g. ``apiname_v1``). + + If there is no version, this is the same as ``module_name``. + """ + return self.module_name + (f'_{self.version}' if self.version else '') + + +class OldNaming(Naming): + @property + def versioned_module_name(self) -> str: + """Return the versiond module name (e.g. ``apiname_v1``). + + If there is no version, this is the same as ``module_name``. + """ + return self.module_name + (f'.{self.version}' if self.version else '') diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 2fe230073a56..f676e23e9592 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -356,7 +356,9 @@ def test_samplegen_config_to_output_files( # Need to have the sample template visible to the generator. g._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) - api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) + api_schema = make_api( + naming=naming.NewNaming(name='Mollusc', version='v6') + ) actual_response = g.get_response( api_schema, opts=options.Options.build('') @@ -445,7 +447,9 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): # Need to have the sample template visible to the generator. g._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) - api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) + api_schema = make_api( + naming=naming.NewNaming(name='Mollusc', version='v6') + ) actual_response = g.get_response( api_schema, opts=options.Options.build('') @@ -517,7 +521,9 @@ def test_generator_duplicate_samples(fs): generator = make_generator('samples=samples.yaml') generator._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) - api_schema = make_api(naming=naming.Naming(name='Mollusc', version='v6')) + api_schema = make_api( + naming=naming.NewNaming(name='Mollusc', version='v6') + ) with pytest.raises(types.DuplicateSample): generator.get_response( @@ -591,7 +597,7 @@ def test_dont_generate_in_code_samples( name='Mollusc')], ), ), - naming=naming.Naming(name='Mollusc', version='v6'), + naming=naming.NewNaming(name='Mollusc', version='v6'), ) # Note that we do NOT expect a clam sample. @@ -674,4 +680,4 @@ def make_naming(**kwargs) -> naming.Naming: kwargs.setdefault('namespace', ('Google', 'Cloud')) kwargs.setdefault('version', 'v1') kwargs.setdefault('product_name', 'Hatstand') - return naming.Naming(**kwargs) + return naming.NewNaming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 9a8ade2be397..0d8aff6c9a70 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -25,6 +25,7 @@ def test_options_empty(): assert len(opts.templates) == 1 assert opts.templates[0].endswith('gapic/templates') assert not opts.lazy_import + assert not opts.old_naming def test_options_replace_templates(): @@ -121,3 +122,8 @@ def test_options_service_config(fs): def test_options_lazy_import(): opts = options.Options.build('lazy-import') assert opts.lazy_import + + +def test_options_old_naming(): + opts = options.Options.build('old-naming') + assert opts.old_naming diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 502902dc09d9..ca6e1a44f0a7 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -68,7 +68,7 @@ def test_generate_sample_basic(): } ) - api_naming = naming.Naming( + api_naming = naming.NewNaming( name="MolluscClient", namespace=("molluscs", "v1")) service = wrappers.Service( service_pb=namedtuple('service_pb', ['name'])('MolluscService'), @@ -207,7 +207,7 @@ def test_generate_sample_basic_unflattenable(): } ) - api_naming = naming.Naming( + api_naming = naming.NewNaming( name="MolluscClient", namespace=("molluscs", "v1")) service = wrappers.Service( service_pb=namedtuple('service_pb', ['name'])('MolluscService'), diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 8717bf1d7794..82e4bee1e28f 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -83,7 +83,7 @@ def test_preprocess_sample(): # Verify that the default response is added. sample = {} api_schema = api.API( - naming.Naming( + naming.NewNaming( namespace=("mollusc", "cephalopod", "teuthida") ), all_protos={}, diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index cd4f6d1504f8..b50d0b4a9024 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -1022,4 +1022,4 @@ def make_naming(**kwargs) -> naming.Naming: kwargs.setdefault('namespace', ('Google', 'Cloud')) kwargs.setdefault('version', 'v1') kwargs.setdefault('product_name', 'Hatstand') - return naming.Naming(**kwargs) + return naming.NewNaming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 76744e142a32..a16dc7668429 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -135,7 +135,7 @@ def test_address_resolve(): def test_address_subpackage(): addr = metadata.Address( package=('foo', 'bar', 'baz', 'v1', 'spam', 'eggs'), - api_naming=naming.Naming(proto_package='foo.bar.baz.v1'), + api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1'), ) assert addr.subpackage == ('spam', 'eggs') @@ -143,7 +143,7 @@ def test_address_subpackage(): def test_address_subpackage_no_version(): addr = metadata.Address( package=('foo', 'bar', 'baz', 'spam', 'eggs'), - api_naming=naming.Naming(proto_package='foo.bar.baz'), + api_naming=naming.NewNaming(proto_package='foo.bar.baz'), ) assert addr.subpackage == ('spam', 'eggs') @@ -151,7 +151,7 @@ def test_address_subpackage_no_version(): def test_address_subpackage_empty(): addr = metadata.Address( package=('foo', 'bar', 'baz', 'v1'), - api_naming=naming.Naming(proto_package='foo.bar.baz.v1'), + api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1'), ) assert addr.subpackage == () @@ -188,7 +188,7 @@ def make_doc_meta( leading: str = '', trailing: str = '', detached: typing.List[str] = [], - ) -> descriptor_pb2.SourceCodeInfo.Location: +) -> descriptor_pb2.SourceCodeInfo.Location: return metadata.Metadata( documentation=descriptor_pb2.SourceCodeInfo.Location( leading_comments=leading, diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 0463fb2704f4..5b80fa32cc5d 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -149,7 +149,7 @@ def test_cli_override_name(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.cloud.videointelligence.v1') n = naming.Naming.build(proto1, - opts=options.Options(name='Video Intelligence'), + opts=options.Options(name='Video Intelligence'), ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Video Intelligence' @@ -160,7 +160,7 @@ def test_cli_override_name_underscores(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.cloud.videointelligence.v1') n = naming.Naming.build(proto1, - opts=options.Options(name='video_intelligence'), + opts=options.Options(name='video_intelligence'), ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Video Intelligence' @@ -170,9 +170,10 @@ def test_cli_override_name_underscores(): def test_cli_override_namespace(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.spanner.v1') - n = naming.Naming.build(proto1, + n = naming.Naming.build( + proto1, opts=options.Options(namespace=('google', 'cloud')), - ) + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Spanner' assert n.version == 'v1' @@ -182,7 +183,7 @@ def test_cli_override_namespace_dotted(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.spanner.v1') n = naming.Naming.build(proto1, - opts=options.Options(namespace=('google.cloud',)), + opts=options.Options(namespace=('google.cloud',)), ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Spanner' @@ -192,9 +193,12 @@ def test_cli_override_namespace_dotted(): def test_cli_override_name_and_namespace(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.translation.v2') - n = naming.Naming.build(proto1, - opts=options.Options(namespace=('google', 'cloud'), name='translate'), - ) + n = naming.Naming.build( + proto1, + opts=options.Options( + namespace=('google', 'cloud'), name='translate' + ), + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Translate' assert n.version == 'v2' @@ -203,17 +207,35 @@ def test_cli_override_name_and_namespace(): def test_cli_override_name_and_namespace_versionless(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.translation') - n = naming.Naming.build(proto1, + n = naming.Naming.build( + proto1, opts=options.Options(namespace=('google', 'cloud'), name='translate'), - ) + ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Translate' assert not n.version -def make_naming(**kwargs) -> naming.Naming: +def test_build_factory(): + proto = descriptor_pb2.FileDescriptorProto( + package='google.mollusc.v1alpha1' + ) + old = naming.Naming.build( + proto, + opts=options.Options(old_naming=True) + ) + assert old.versioned_module_name == 'mollusc.v1alpha1' + + new = naming.Naming.build( + proto, + opts=options.Options() + ) + assert new.versioned_module_name == 'mollusc_v1alpha1' + + +def make_naming(klass=naming.NewNaming, **kwargs) -> naming.Naming: kwargs.setdefault('name', 'Hatstand') kwargs.setdefault('namespace', ('Google', 'Cloud')) kwargs.setdefault('version', 'v1') kwargs.setdefault('product_name', 'Hatstand') - return naming.Naming(**kwargs) + return klass(**kwargs) From be254cab682a3f2595a5a680acaa3b4fbb7e93cd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 19 Mar 2020 22:48:59 +0100 Subject: [PATCH 0229/1339] chore(deps): update dependency pyyaml to v5.3.1 (#344) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 33c114970ce6..2419b0864fed 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -5,4 +5,4 @@ grpcio==1.27.2 jinja2==2.11.1 protobuf==3.11.3 pypandoc==1.4 -PyYAML==5.3 +PyYAML==5.3.1 From fdef285b84cead4d355b60c632f694ecc4e0a194 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 19 Mar 2020 15:31:27 -0700 Subject: [PATCH 0230/1339] Move unit test wrapper generation into a test utils module (#345) This is a maintenance change to move all the unit test utility code that generates schema objects into a test utility module. All the schema tests use the test utils module --- .../gapic-generator/gapic/schema/wrappers.py | 23 +- .../gapic-generator/test_utils/test_utils.py | 352 ++++++++++++++++++ .../tests/unit/schema/test_api.py | 51 +-- .../tests/unit/schema/test_metadata.py | 17 +- .../tests/unit/schema/test_naming.py | 10 +- .../tests/unit/schema/wrappers/test_enums.py | 25 +- .../tests/unit/schema/wrappers/test_field.py | 38 +- .../unit/schema/wrappers/test_message.py | 72 +--- .../tests/unit/schema/wrappers/test_method.py | 87 +---- .../unit/schema/wrappers/test_service.py | 237 +----------- 10 files changed, 397 insertions(+), 515 deletions(-) create mode 100644 packages/gapic-generator/test_utils/test_utils.py diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 9fcc0a77ccc8..c177b09bf1a9 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -184,8 +184,8 @@ def type(self) -> Union['MessageType', 'EnumType', 'PrimitiveType']: return PrimitiveType.build(bytes) # This should never happen. - raise TypeError('Unrecognized protobuf type. This code should ' - 'not be reachable; please file a bug.') + raise TypeError(f'Unrecognized protobuf type: {self.field_pb.type}. ' + 'This code should not be reachable; please file a bug.') def with_context(self, *, collisions: FrozenSet[str]) -> 'Field': """Return a derivative of this field with the provided context. @@ -652,7 +652,8 @@ def paged_result_field(self) -> Optional[Field]: # We found no repeated fields. Return None. return None - def _ref_types(self, use_legacy: bool = False) -> Sequence[Union[MessageType, EnumType]]: + @utils.cached_property + def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: """Return types referenced by this method.""" # Begin with the input (request) and output (response) messages. answer = [self.input] @@ -660,15 +661,9 @@ def _ref_types(self, use_legacy: bool = False) -> Sequence[Union[MessageType, En answer.append(self.client_output) answer.extend(self.client_output.field_types) - # If this method has flattening that is honored, add its - # composite types. - # - # This entails adding the module for any field on the signature - # unless the field is a primitive. - flattening = self.legacy_flattened_fields if use_legacy else self.flattened_fields answer.extend( field.type - for field in flattening.values() + for field in self.flattened_fields.values() if field.message or field.enum ) @@ -686,14 +681,6 @@ def _ref_types(self, use_legacy: bool = False) -> Sequence[Union[MessageType, En # Done; return the answer. return tuple(answer) - @utils.cached_property - def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: - return self._ref_types() - - @utils.cached_property - def ref_types_legacy(self) -> Sequence[Union[MessageType, EnumType]]: - return self._ref_types(use_legacy=True) - @property def void(self) -> bool: """Return True if this method has no return value, False otherwise.""" diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py new file mode 100644 index 000000000000..32c99fe82e35 --- /dev/null +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -0,0 +1,352 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import typing + +from gapic.schema import metadata +from gapic.schema import naming +from gapic.schema import wrappers +from google.api import annotations_pb2 +from google.api import client_pb2 +from google.api import http_pb2 +from google.protobuf import descriptor_pb2 as desc + + +def make_service(name: str = 'Placeholder', host: str = '', + methods: typing.Tuple[wrappers.Method] = (), + scopes: typing.Tuple[str] = ()) -> wrappers.Service: + # Define a service descriptor, and set a host and oauth scopes if + # appropriate. + service_pb = desc.ServiceDescriptorProto(name=name) + if host: + service_pb.options.Extensions[client_pb2.default_host] = host + service_pb.options.Extensions[client_pb2.oauth_scopes] = ','.join(scopes) + + # Return a service object to test. + return wrappers.Service( + service_pb=service_pb, + methods={m.name: m for m in methods}, + ) + + +# FIXME (lukesneeringer): This test method is convoluted and it makes these +# tests difficult to understand and maintain. +def make_service_with_method_options( + *, + http_rule: http_pb2.HttpRule = None, + method_signature: str = '', + in_fields: typing.Tuple[desc.FieldDescriptorProto] = () +) -> wrappers.Service: + # Declare a method with options enabled for long-running operations and + # field headers. + method = get_method( + 'DoBigThing', + 'foo.bar.ThingRequest', + 'google.longrunning.operations_pb2.Operation', + lro_response_type='foo.baz.ThingResponse', + lro_metadata_type='foo.qux.ThingMetadata', + in_fields=in_fields, + http_rule=http_rule, + method_signature=method_signature, + ) + + # Define a service descriptor. + service_pb = desc.ServiceDescriptorProto(name='ThingDoer') + + # Return a service object to test. + return wrappers.Service( + service_pb=service_pb, + methods={method.name: method}, + ) + + +def get_method(name: str, + in_type: str, + out_type: str, + lro_response_type: str = '', + lro_metadata_type: str = '', *, + in_fields: typing.Tuple[desc.FieldDescriptorProto] = (), + http_rule: http_pb2.HttpRule = None, + method_signature: str = '', + ) -> wrappers.Method: + input_ = get_message(in_type, fields=in_fields) + output = get_message(out_type) + lro = None + + # Define a method descriptor. Set the field headers if appropriate. + method_pb = desc.MethodDescriptorProto( + name=name, + input_type=input_.ident.proto, + output_type=output.ident.proto, + ) + if lro_response_type: + lro = wrappers.OperationInfo( + response_type=get_message(lro_response_type), + metadata_type=get_message(lro_metadata_type), + ) + if http_rule: + ext_key = annotations_pb2.http + method_pb.options.Extensions[ext_key].MergeFrom(http_rule) + if method_signature: + ext_key = client_pb2.method_signature + method_pb.options.Extensions[ext_key].append(method_signature) + + return wrappers.Method( + method_pb=method_pb, + input=input_, + output=output, + lro=lro, + meta=input_.meta, + ) + + +def get_message(dot_path: str, *, + fields: typing.Tuple[desc.FieldDescriptorProto] = (), + ) -> wrappers.MessageType: + # Pass explicit None through (for lro_metadata). + if dot_path is None: + return None + + # Note: The `dot_path` here is distinct from the canonical proto path + # because it includes the module, which the proto path does not. + # + # So, if trying to test the DescriptorProto message here, the path + # would be google.protobuf.descriptor.DescriptorProto (whereas the proto + # path is just google.protobuf.DescriptorProto). + pieces = dot_path.split('.') + pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] + + return wrappers.MessageType( + fields={i.name: wrappers.Field( + field_pb=i, + enum=get_enum(i.type_name) if i.type_name else None, + ) for i in fields}, + nested_messages={}, + nested_enums={}, + message_pb=desc.DescriptorProto(name=name, field=fields), + meta=metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(pkg), + module=module, + )), + ) + + +def make_method( + name: str, input_message: wrappers.MessageType = None, + output_message: wrappers.MessageType = None, + package: typing.Union[typing.Tuple[str], str] = 'foo.bar.v1', + module: str = 'baz', + http_rule: http_pb2.HttpRule = None, + signatures: typing.Sequence[str] = (), + **kwargs) -> wrappers.Method: + # Use default input and output messages if they are not provided. + input_message = input_message or make_message('MethodInput') + output_message = output_message or make_message('MethodOutput') + + # Create the method pb2. + method_pb = desc.MethodDescriptorProto( + name=name, + input_type=str(input_message.meta.address), + output_type=str(output_message.meta.address), + **kwargs + ) + + # If there is an HTTP rule, process it. + if http_rule: + ext_key = annotations_pb2.http + method_pb.options.Extensions[ext_key].MergeFrom(http_rule) + + # If there are signatures, include them. + for sig in signatures: + ext_key = client_pb2.method_signature + method_pb.options.Extensions[ext_key].append(sig) + + if isinstance(package, str): + package = tuple(package.split('.')) + + # Instantiate the wrapper class. + return wrappers.Method( + method_pb=method_pb, + input=input_message, + output=output_message, + meta=metadata.Metadata(address=metadata.Address( + name=name, + package=package, + module=module, + parent=(f'{name}Service',), + )), + ) + + +def make_field( + name: str = 'my_field', + number: int = 1, + repeated: bool = False, + message: wrappers.MessageType = None, + enum: wrappers.EnumType = None, + meta: metadata.Metadata = None, + **kwargs +) -> wrappers.Field: + T = desc.FieldDescriptorProto.Type + + if message: + kwargs.setdefault('type_name', str(message.meta.address)) + kwargs['type'] = 'TYPE_MESSAGE' + elif enum: + kwargs.setdefault('type_name', str(enum.meta.address)) + kwargs['type'] = 'TYPE_ENUM' + else: + kwargs.setdefault('type', T.Value('TYPE_BOOL')) + + if isinstance(kwargs['type'], str): + kwargs['type'] = T.Value(kwargs['type']) + + label = kwargs.pop('label', 3 if repeated else 1) + field_pb = desc.FieldDescriptorProto( + name=name, + label=label, + number=number, + **kwargs + ) + return wrappers.Field( + field_pb=field_pb, + enum=enum, + message=message, + meta=meta or metadata.Metadata(), + ) + + +def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', + fields: typing.Sequence[wrappers.Field] = (), + meta: metadata.Metadata = None, + options: desc.MethodOptions = None, + ) -> wrappers.MessageType: + message_pb = desc.DescriptorProto( + name=name, + field=[i.field_pb for i in fields], + options=options, + ) + return wrappers.MessageType( + message_pb=message_pb, + fields=collections.OrderedDict((i.name, i) for i in fields), + nested_messages={}, + nested_enums={}, + meta=meta or metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(package.split('.')), + module=module, + )), + ) + + +def get_enum(dot_path: str) -> wrappers.EnumType: + pieces = dot_path.split('.') + pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] + return wrappers.EnumType( + enum_pb=desc.EnumDescriptorProto(name=name), + meta=metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(pkg), + module=module, + )), + values=[], + ) + + +def make_enum( + name: str, + package: str = 'foo.bar.v1', + module: str = 'baz', + values: typing.Tuple[str, int] = (), + meta: metadata.Metadata = None, +) -> wrappers.EnumType: + enum_value_pbs = [ + desc.EnumValueDescriptorProto(name=i[0], number=i[1]) + for i in values + ] + enum_pb = desc.EnumDescriptorProto( + name=name, + value=enum_value_pbs, + ) + return wrappers.EnumType( + enum_pb=enum_pb, + values=[wrappers.EnumValueType(enum_value_pb=evpb) + for evpb in enum_value_pbs], + meta=meta or metadata.Metadata(address=metadata.Address( + name=name, + package=tuple(package.split('.')), + module=module, + )), + ) + + +def make_naming(**kwargs) -> naming.Naming: + kwargs.setdefault('name', 'Hatstand') + kwargs.setdefault('namespace', ('Google', 'Cloud')) + kwargs.setdefault('version', 'v1') + kwargs.setdefault('product_name', 'Hatstand') + return naming.NewNaming(**kwargs) + + +def make_message_pb2( + name: str, + fields: tuple = (), + **kwargs +) -> desc.DescriptorProto: + return desc.DescriptorProto(name=name, field=fields, **kwargs) + + +def make_field_pb2(name: str, number: int, + type: int = 11, # 11 == message + type_name: str = None, + ) -> desc.FieldDescriptorProto: + return desc.FieldDescriptorProto( + name=name, + number=number, + type=type, + type_name=type_name, + ) + + +def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, + messages: typing.Sequence[desc.DescriptorProto] = (), + enums: typing.Sequence[desc.EnumDescriptorProto] = (), + services: typing.Sequence[desc.ServiceDescriptorProto] = (), + locations: typing.Sequence[desc.SourceCodeInfo.Location] = (), + ) -> desc.FileDescriptorProto: + return desc.FileDescriptorProto( + name=name, + package=package, + message_type=messages, + enum_type=enums, + service=services, + source_code_info=desc.SourceCodeInfo(location=locations), + ) + + +def make_doc_meta( + *, + leading: str = '', + trailing: str = '', + detached: typing.List[str] = [], +) -> desc.SourceCodeInfo.Location: + return metadata.Metadata( + documentation=desc.SourceCodeInfo.Location( + leading_comments=leading, + trailing_comments=trailing, + leading_detached_comments=detached, + ), + ) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index b50d0b4a9024..eccae782cb80 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -28,6 +28,13 @@ from gapic.schema import naming from gapic.schema import wrappers +from test_utils.test_utils import ( + make_field_pb2, + make_file_pb2, + make_message_pb2, + make_naming, +) + def test_api_build(): # Put together a couple of minimal protos. @@ -979,47 +986,3 @@ def test_enums(): assert enum.values[1].meta.doc == 'This is the one value.' assert enum.values[2].name == 'THREE' assert enum.values[2].meta.doc == '' - - -def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, - messages: Sequence[descriptor_pb2.DescriptorProto] = (), - enums: Sequence[descriptor_pb2.EnumDescriptorProto] = (), - services: Sequence[descriptor_pb2.ServiceDescriptorProto] = (), - locations: Sequence[descriptor_pb2.SourceCodeInfo.Location] = (), - ) -> descriptor_pb2.FileDescriptorProto: - return descriptor_pb2.FileDescriptorProto( - name=name, - package=package, - message_type=messages, - enum_type=enums, - service=services, - source_code_info=descriptor_pb2.SourceCodeInfo(location=locations), - ) - - -def make_message_pb2( - name: str, - fields: tuple = (), - **kwargs -) -> descriptor_pb2.DescriptorProto: - return descriptor_pb2.DescriptorProto(name=name, field=fields, **kwargs) - - -def make_field_pb2(name: str, number: int, - type: int = 11, # 11 == message - type_name: str = None, - ) -> descriptor_pb2.FieldDescriptorProto: - return descriptor_pb2.FieldDescriptorProto( - name=name, - number=number, - type=type, - type_name=type_name, - ) - - -def make_naming(**kwargs) -> naming.Naming: - kwargs.setdefault('name', 'Hatstand') - kwargs.setdefault('namespace', ('Google', 'Cloud')) - kwargs.setdefault('version', 'v1') - kwargs.setdefault('product_name', 'Hatstand') - return naming.NewNaming(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index a16dc7668429..a693e295b877 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -16,6 +16,8 @@ from google.protobuf import descriptor_pb2 +from test_utils.test_utils import make_doc_meta + from gapic.schema import metadata from gapic.schema import naming @@ -181,18 +183,3 @@ def test_doc_trailing_trumps_detached(): def test_doc_detached_joined(): meta = make_doc_meta(detached=['foo', 'bar']) assert meta.doc == 'foo\n\nbar' - - -def make_doc_meta( - *, - leading: str = '', - trailing: str = '', - detached: typing.List[str] = [], -) -> descriptor_pb2.SourceCodeInfo.Location: - return metadata.Metadata( - documentation=descriptor_pb2.SourceCodeInfo.Location( - leading_comments=leading, - trailing_comments=trailing, - leading_detached_comments=detached, - ), - ) diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 5b80fa32cc5d..8418f3730051 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -19,6 +19,8 @@ from gapic.generator import options from gapic.schema import naming +from test_utils.test_utils import make_naming + def test_long_name(): n = make_naming(name='Genie', namespace=['Agrabah', 'Lamp']) @@ -231,11 +233,3 @@ def test_build_factory(): opts=options.Options() ) assert new.versioned_module_name == 'mollusc_v1alpha1' - - -def make_naming(klass=naming.NewNaming, **kwargs) -> naming.Naming: - kwargs.setdefault('name', 'Hatstand') - kwargs.setdefault('namespace', ('Google', 'Cloud')) - kwargs.setdefault('version', 'v1') - kwargs.setdefault('product_name', 'Hatstand') - return klass(**kwargs) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index 37d0aeacd586..0602a09e9c0f 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -19,6 +19,8 @@ from gapic.schema import metadata from gapic.schema import wrappers +from test_utils.test_utils import make_enum + def test_enum_properties(): enum_type = make_enum(name='Color') @@ -38,26 +40,3 @@ def test_enum_ident(): message = make_enum('Baz', package='foo.v1', module='bar') assert str(message.ident) == 'bar.Baz' assert message.ident.sphinx == '~.bar.Baz' - - -def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - values: Tuple[str, int] = (), meta: metadata.Metadata = None, - ) -> wrappers.EnumType: - enum_value_pbs = [ - descriptor_pb2.EnumValueDescriptorProto(name=i[0], number=i[1]) - for i in values - ] - enum_pb = descriptor_pb2.EnumDescriptorProto( - name=name, - value=enum_value_pbs, - ) - return wrappers.EnumType( - enum_pb=enum_pb, - values=[wrappers.EnumValueType(enum_value_pb=evpb) - for evpb in enum_value_pbs], - meta=meta or metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), - ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 8ddc74ebf4d9..7104c735bec5 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -22,6 +22,11 @@ from gapic.schema import metadata from gapic.schema import wrappers +from test_utils.test_utils import ( + make_field, + make_message, +) + def test_field_properties(): field = make_field(name='my_field', number=1, type='TYPE_BOOL') @@ -236,36 +241,3 @@ def test_mock_value_message(): message=message, ) assert field.mock_value == 'bogus.Message(foo=324)' - - -def make_field(*, message=None, enum=None, **kwargs) -> wrappers.Field: - T = descriptor_pb2.FieldDescriptorProto.Type - kwargs.setdefault('name', 'my_field') - kwargs.setdefault('number', 1) - kwargs.setdefault('type', T.Value('TYPE_BOOL')) - if isinstance(kwargs['type'], str): - kwargs['type'] = T.Value(kwargs['type']) - field_pb = descriptor_pb2.FieldDescriptorProto(**kwargs) - field = wrappers.Field(field_pb=field_pb, message=message, enum=enum) - return field - - -def make_message( - name, package='foo.bar.v1', module='baz', fields=(), meta=None, options=None -) -> wrappers.MessageType: - message_pb = descriptor_pb2.DescriptorProto( - name=name, - field=[i.field_pb for i in fields], - options=options, - ) - return wrappers.MessageType( - message_pb=message_pb, - fields=collections.OrderedDict((i.name, i) for i in fields), - nested_messages={}, - nested_enums={}, - meta=meta or metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), - ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 3e1de39e6d8f..9aab430ddba7 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -23,6 +23,12 @@ from gapic.schema import metadata from gapic.schema import wrappers +from test_utils.test_utils import ( + make_enum, + make_field, + make_message, +) + def test_message_properties(): message = make_message('MyMessage') @@ -153,69 +159,3 @@ def test_field_map(): entry_field = make_field('foos', message=entry_msg, repeated=True) assert entry_msg.map assert entry_field.map - - -def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, - options: descriptor_pb2.MethodOptions = None, - ) -> wrappers.MessageType: - message_pb = descriptor_pb2.DescriptorProto( - name=name, - field=[i.field_pb for i in fields], - options=options, - ) - return wrappers.MessageType( - message_pb=message_pb, - fields=collections.OrderedDict((i.name, i) for i in fields), - nested_messages={}, - nested_enums={}, - meta=meta or metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), - ) - - -def make_field(name: str, repeated: bool = False, - message: wrappers.MessageType = None, - enum: wrappers.EnumType = None, - meta: metadata.Metadata = None, **kwargs) -> wrappers.Method: - if message: - kwargs['type_name'] = str(message.meta.address) - if enum: - kwargs['type_name'] = str(enum.meta.address) - field_pb = descriptor_pb2.FieldDescriptorProto( - name=name, - label=3 if repeated else 1, - **kwargs - ) - return wrappers.Field( - enum=enum, - field_pb=field_pb, - message=message, - meta=meta or metadata.Metadata(), - ) - - -def make_enum(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - values: Tuple[str, int] = (), meta: metadata.Metadata = None, - ) -> wrappers.EnumType: - enum_value_pbs = [ - descriptor_pb2.EnumValueDescriptorProto(name=i[0], number=i[1]) - for i in values - ] - enum_pb = descriptor_pb2.EnumDescriptorProto( - name=name, - value=enum_value_pbs, - ) - return wrappers.EnumType( - enum_pb=enum_pb, - values=[wrappers.EnumValueType(enum_value_pb=evpb) - for evpb in enum_value_pbs], - meta=meta or metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), - ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 6003b51a0bf7..7086b73747b0 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -15,8 +15,6 @@ import collections from typing import Sequence -from google.api import annotations_pb2 -from google.api import client_pb2 from google.api import field_behavior_pb2 from google.api import http_pb2 from google.protobuf import descriptor_pb2 @@ -24,6 +22,12 @@ from gapic.schema import metadata from gapic.schema import wrappers +from test_utils.test_utils import ( + make_field, + make_message, + make_method, +) + def test_method_types(): input_msg = make_message(name='Input', module='baz') @@ -296,82 +300,3 @@ def test_method_legacy_flattened_fields(): ]) assert method.legacy_flattened_fields == expected - - -def make_method( - name: str, input_message: wrappers.MessageType = None, - output_message: wrappers.MessageType = None, - package: str = 'foo.bar.v1', module: str = 'baz', - http_rule: http_pb2.HttpRule = None, - signatures: Sequence[str] = (), - **kwargs) -> wrappers.Method: - # Use default input and output messages if they are not provided. - input_message = input_message or make_message('MethodInput') - output_message = output_message or make_message('MethodOutput') - - # Create the method pb2. - method_pb = descriptor_pb2.MethodDescriptorProto( - name=name, - input_type=str(input_message.meta.address), - output_type=str(output_message.meta.address), - **kwargs - ) - - # If there is an HTTP rule, process it. - if http_rule: - ext_key = annotations_pb2.http - method_pb.options.Extensions[ext_key].MergeFrom(http_rule) - - # If there are signatures, include them. - for sig in signatures: - ext_key = client_pb2.method_signature - method_pb.options.Extensions[ext_key].append(sig) - - # Instantiate the wrapper class. - return wrappers.Method( - method_pb=method_pb, - input=input_message, - output=output_message, - meta=metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - parent=(f'{name}Service',), - )), - ) - - -def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - fields: Sequence[wrappers.Field] = (), - ) -> wrappers.MessageType: - message_pb = descriptor_pb2.DescriptorProto( - name=name, - field=[i.field_pb for i in fields], - ) - return wrappers.MessageType( - message_pb=message_pb, - nested_messages={}, - nested_enums={}, - fields=collections.OrderedDict((i.name, i) for i in fields), - meta=metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), - ) - - -def make_field(name: str, repeated: bool = False, - meta: metadata.Metadata = None, - message: wrappers.MessageType = None, - **kwargs) -> wrappers.Method: - field_pb = descriptor_pb2.FieldDescriptorProto( - name=name, - label=3 if repeated else 1, - **kwargs - ) - return wrappers.Field( - field_pb=field_pb, - message=message, - meta=meta or metadata.Metadata(), - ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index a03716a6b345..0890109c0aba 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -16,15 +16,19 @@ import itertools import typing -from google.api import annotations_pb2 -from google.api import client_pb2 -from google.api import http_pb2 from google.api import resource_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import imp -from gapic.schema import metadata -from gapic.schema import wrappers + +from test_utils.test_utils import ( + get_method, + make_field, + make_message, + make_method, + make_service, + make_service_with_method_options, +) def test_service_properties(): @@ -87,7 +91,7 @@ def test_service_python_modules(): imports = { i.ident.python_import for m in service.methods.values() - for i in m.ref_types_legacy + for i in m.ref_types } assert imports == { imp.Import(package=('a', 'b', 'v1'), module='c'), @@ -220,224 +224,3 @@ def test_service_any_streaming(): assert service.any_client_streaming == client assert service.any_server_streaming == server - - -def make_service(name: str = 'Placeholder', host: str = '', - methods: typing.Tuple[wrappers.Method] = (), - scopes: typing.Tuple[str] = ()) -> wrappers.Service: - # Define a service descriptor, and set a host and oauth scopes if - # appropriate. - service_pb = descriptor_pb2.ServiceDescriptorProto(name=name) - if host: - service_pb.options.Extensions[client_pb2.default_host] = host - service_pb.options.Extensions[client_pb2.oauth_scopes] = ','.join(scopes) - - # Return a service object to test. - return wrappers.Service( - service_pb=service_pb, - methods={m.name: m for m in methods}, - ) - - -# FIXME (lukesneeringer): This test method is convoluted and it makes these -# tests difficult to understand and maintain. -def make_service_with_method_options( - *, - http_rule: http_pb2.HttpRule = None, - method_signature: str = '', - in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = () -) -> wrappers.Service: - # Declare a method with options enabled for long-running operations and - # field headers. - method = get_method( - 'DoBigThing', - 'foo.bar.ThingRequest', - 'google.longrunning.operations_pb2.Operation', - lro_response_type='foo.baz.ThingResponse', - lro_metadata_type='foo.qux.ThingMetadata', - in_fields=in_fields, - http_rule=http_rule, - method_signature=method_signature, - ) - - # Define a service descriptor. - service_pb = descriptor_pb2.ServiceDescriptorProto(name='ThingDoer') - - # Return a service object to test. - return wrappers.Service( - service_pb=service_pb, - methods={method.name: method}, - ) - - -def get_method(name: str, - in_type: str, - out_type: str, - lro_response_type: str = '', - lro_metadata_type: str = '', *, - in_fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), - http_rule: http_pb2.HttpRule = None, - method_signature: str = '', - ) -> wrappers.Method: - input_ = get_message(in_type, fields=in_fields) - output = get_message(out_type) - lro = None - - # Define a method descriptor. Set the field headers if appropriate. - method_pb = descriptor_pb2.MethodDescriptorProto( - name=name, - input_type=input_.ident.proto, - output_type=output.ident.proto, - ) - if lro_response_type: - lro = wrappers.OperationInfo( - response_type=get_message(lro_response_type), - metadata_type=get_message(lro_metadata_type), - ) - if http_rule: - ext_key = annotations_pb2.http - method_pb.options.Extensions[ext_key].MergeFrom(http_rule) - if method_signature: - ext_key = client_pb2.method_signature - method_pb.options.Extensions[ext_key].append(method_signature) - - return wrappers.Method( - method_pb=method_pb, - input=input_, - output=output, - lro=lro, - meta=input_.meta, - ) - - -def get_message(dot_path: str, *, - fields: typing.Tuple[descriptor_pb2.FieldDescriptorProto] = (), - ) -> wrappers.MessageType: - # Pass explicit None through (for lro_metadata). - if dot_path is None: - return None - - # Note: The `dot_path` here is distinct from the canonical proto path - # because it includes the module, which the proto path does not. - # - # So, if trying to test the DescriptorProto message here, the path - # would be google.protobuf.descriptor.DescriptorProto (whereas the proto - # path is just google.protobuf.DescriptorProto). - pieces = dot_path.split('.') - pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] - - return wrappers.MessageType( - fields={i.name: wrappers.Field( - field_pb=i, - enum=get_enum(i.type_name) if i.type_name else None, - ) for i in fields}, - nested_messages={}, - nested_enums={}, - message_pb=descriptor_pb2.DescriptorProto(name=name, field=fields), - meta=metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(pkg), - module=module, - )), - ) - - -def make_method( - name: str, input_message: wrappers.MessageType = None, - output_message: wrappers.MessageType = None, - package: str = 'foo.bar.v1', module: str = 'baz', - http_rule: http_pb2.HttpRule = None, - signatures: typing.Sequence[str] = (), - **kwargs) -> wrappers.Method: - # Use default input and output messages if they are not provided. - input_message = input_message or make_message('MethodInput') - output_message = output_message or make_message('MethodOutput') - - # Create the method pb2. - method_pb = descriptor_pb2.MethodDescriptorProto( - name=name, - input_type=str(input_message.meta.address), - output_type=str(output_message.meta.address), - **kwargs - ) - - # If there is an HTTP rule, process it. - if http_rule: - ext_key = annotations_pb2.http - method_pb.options.Extensions[ext_key].MergeFrom(http_rule) - - # If there are signatures, include them. - for sig in signatures: - ext_key = client_pb2.method_signature - method_pb.options.Extensions[ext_key].append(sig) - - # Instantiate the wrapper class. - return wrappers.Method( - method_pb=method_pb, - input=input_message, - output=output_message, - meta=metadata.Metadata(address=metadata.Address( - name=name, - package=package, - module=module, - parent=(f'{name}Service',), - )), - ) - - -def make_field(name: str, repeated: bool = False, - message: wrappers.MessageType = None, - enum: wrappers.EnumType = None, - meta: metadata.Metadata = None, **kwargs) -> wrappers.Method: - if message: - kwargs['type_name'] = str(message.meta.address) - if enum: - kwargs['type_name'] = str(enum.meta.address) - field_pb = descriptor_pb2.FieldDescriptorProto( - name=name, - label=3 if repeated else 1, - **kwargs - ) - return wrappers.Field( - enum=enum, - field_pb=field_pb, - message=message, - meta=meta or metadata.Metadata(), - ) - - -def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - fields: typing.Sequence[wrappers.Field] = (), - meta: metadata.Metadata = None, - options: descriptor_pb2.MethodOptions = None, - ) -> wrappers.MessageType: - message_pb = descriptor_pb2.DescriptorProto( - name=name, - field=[i.field_pb for i in fields], - options=options, - ) - return wrappers.MessageType( - message_pb=message_pb, - fields=collections.OrderedDict((i.name, i) for i in fields), - nested_messages={}, - nested_enums={}, - meta=meta or metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), - ) - - -def get_enum(dot_path: str) -> wrappers.EnumType: - pieces = dot_path.split('.') - pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] - return wrappers.EnumType( - enum_pb=descriptor_pb2.EnumDescriptorProto(name=name), - meta=metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(pkg), - module=module, - )), - values=[], - ) From 7457c371eaaa6e1a4f5ec254e9e2cfefb9e3d224 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 23 Mar 2020 10:43:01 -0700 Subject: [PATCH 0231/1339] Fix for flattened unit tests with deeply nested request messages (#346) Expands the unit-test visible method ref types to include _all_ recursive types. Do NOT use this deep nesting in the client submodule. The client ONLY needs to import, for each method, the request type, response type, and the types of any flattened fields. --- packages/gapic-generator/gapic/schema/api.py | 9 ++-- .../gapic-generator/gapic/schema/wrappers.py | 45 +++++++++++++---- .../%sub/services/%service/client.py.j2 | 2 +- .../gapic-generator/gapic/utils/__init__.py | 2 + .../gapic/utils/reserved_names.py | 18 +++++++ .../gapic-generator/test_utils/test_utils.py | 19 +++++--- .../unit/schema/wrappers/test_message.py | 37 +++++++++++++- .../tests/unit/schema/wrappers/test_method.py | 48 +++++++++++++++++++ 8 files changed, 157 insertions(+), 23 deletions(-) create mode 100644 packages/gapic-generator/gapic/utils/reserved_names.py diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 6a987b3b5028..734ccf337744 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -19,10 +19,8 @@ import collections import dataclasses -import keyword import os import sys -from itertools import chain from typing import Callable, Container, Dict, FrozenSet, Mapping, Optional, Sequence, Set, Tuple from google.api_core import exceptions # type: ignore @@ -37,6 +35,7 @@ from gapic.schema import naming as api_naming from gapic.utils import cached_property from gapic.utils import to_snake_case +from gapic.utils import RESERVED_NAMES @dataclasses.dataclass(frozen=True) @@ -130,13 +129,13 @@ def names(self) -> FrozenSet[str]: # from distinct packages. modules: Dict[str, Set[str]] = collections.defaultdict(set) for m in self.all_messages.values(): - for t in m.field_types: + for t in m.recursive_field_types: modules[t.ident.module].add(t.ident.package) answer.update( module_name for module_name, packages in modules.items() - if len(packages) > 1 + if len(packages) > 1 or module_name in RESERVED_NAMES ) # Return the set of collision names. @@ -229,7 +228,7 @@ def disambiguate_keyword_fname( visited_names: Container[str]) -> str: path, fname = os.path.split(full_path) name, ext = os.path.splitext(fname) - if name in keyword.kwlist or full_path in visited_names: + if name in RESERVED_NAMES or full_path in visited_names: name += "_" full_path = os.path.join(path, name + ext) if full_path in visited_names: diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index c177b09bf1a9..b7f415b0835c 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -31,7 +31,7 @@ import dataclasses import re from itertools import chain -from typing import (cast, Dict, FrozenSet, List, Mapping, Optional, +from typing import (cast, Dict, FrozenSet, Iterable, List, Mapping, Optional, Sequence, Set, Union) from google.api import annotations_pb2 # type: ignore @@ -225,7 +225,6 @@ def __hash__(self): @utils.cached_property def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: - """Return all composite fields used in this proto's messages.""" answer = tuple( field.type for field in self.fields.values() @@ -234,6 +233,23 @@ def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: return answer + @utils.cached_property + def recursive_field_types(self) -> Sequence[ + Union['MessageType', 'EnumType'] + ]: + """Return all composite fields used in this proto's messages.""" + types: List[Union['MessageType', 'EnumType']] = [] + stack = [iter(self.fields.values())] + while stack: + fields_iter = stack.pop() + for field in fields_iter: + if field.message and field.type not in types: + stack.append(iter(field.message.fields.values())) + if not field.is_primitive: + types.append(field.type) + + return tuple(types) + @property def map(self) -> bool: """Return True if the given message is a map, False otherwise.""" @@ -654,19 +670,30 @@ def paged_result_field(self) -> Optional[Field]: @utils.cached_property def ref_types(self) -> Sequence[Union[MessageType, EnumType]]: + return self._ref_types(True) + + @utils.cached_property + def flat_ref_types(self) -> Sequence[Union[MessageType, EnumType]]: + return self._ref_types(False) + + def _ref_types(self, recursive: bool) -> Sequence[Union[MessageType, EnumType]]: """Return types referenced by this method.""" # Begin with the input (request) and output (response) messages. - answer = [self.input] + answer: List[Union[MessageType, EnumType]] = [self.input] + types: Iterable[Union[MessageType, EnumType]] = ( + self.input.recursive_field_types if recursive + else ( + f.type + for f in self.flattened_fields.values() + if f.message or f.enum + ) + ) + answer.extend(types) + if not self.void: answer.append(self.client_output) answer.extend(self.client_output.field_types) - answer.extend( - field.type - for field in self.flattened_fields.values() - if field.message or field.enum - ) - # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. if self.lro: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 735f70084764..1f821b8af91d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -14,7 +14,7 @@ from google.oauth2 import service_account # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} -{% for ref_type in method.ref_types -%} +{% for ref_type in method.flat_ref_types -%} {{ ref_type.ident.python_import }} {% endfor -%} {% endfor -%} diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 70603228b4b6..315c575e2b06 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -21,6 +21,7 @@ from gapic.utils.filename import to_valid_module_name from gapic.utils.lines import sort_lines from gapic.utils.lines import wrap +from gapic.utils.reserved_names import RESERVED_NAMES from gapic.utils.rst import rst @@ -29,6 +30,7 @@ 'doc', 'empty', 'partition', + 'RESERVED_NAMES', 'rst', 'sort_lines', 'to_snake_case', diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py new file mode 100644 index 000000000000..b146ce08e56f --- /dev/null +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -0,0 +1,18 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import keyword + + +RESERVED_NAMES = frozenset(keyword.kwlist) diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index 32c99fe82e35..ffe6bae078e4 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -145,13 +145,15 @@ def get_message(dot_path: str, *, def make_method( - name: str, input_message: wrappers.MessageType = None, + name: str, + input_message: wrappers.MessageType = None, output_message: wrappers.MessageType = None, package: typing.Union[typing.Tuple[str], str] = 'foo.bar.v1', module: str = 'baz', http_rule: http_pb2.HttpRule = None, signatures: typing.Sequence[str] = (), - **kwargs) -> wrappers.Method: + **kwargs +) -> wrappers.Method: # Use default input and output messages if they are not provided. input_message = input_message or make_message('MethodInput') output_message = output_message or make_message('MethodOutput') @@ -229,11 +231,14 @@ def make_field( ) -def make_message(name: str, package: str = 'foo.bar.v1', module: str = 'baz', - fields: typing.Sequence[wrappers.Field] = (), - meta: metadata.Metadata = None, - options: desc.MethodOptions = None, - ) -> wrappers.MessageType: +def make_message( + name: str, + package: str = 'foo.bar.v1', + module: str = 'baz', + fields: typing.Sequence[wrappers.Field] = (), + meta: metadata.Metadata = None, + options: desc.MethodOptions = None, +) -> wrappers.MessageType: message_pb = desc.DescriptorProto( name=name, field=[i.field_pb for i in fields], diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 9aab430ddba7..99b0751e0530 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -70,7 +70,15 @@ def test_get_field(): def test_field_types(): # Create the inner message. - inner_msg = make_message('InnerMessage', fields=()) + inner_msg = make_message( + 'InnerMessage', + fields=( + make_field( + 'hidden_message', + message=make_message('HiddenMessage'), + ), + ) + ) inner_enum = make_enum('InnerEnum') # Create the outer message, which contains an Inner as a field. @@ -87,6 +95,33 @@ def test_field_types(): assert inner_enum in outer.field_types +def test_field_types_recursive(): + enumeration = make_enum('Enumeration') + innest_msg = make_message( + 'InnestMessage', + fields=( + make_field('enumeration', enum=enumeration), + ) + ) + inner_msg = make_message( + 'InnerMessage', + fields=( + make_field('innest_message', message=innest_msg), + ) + ) + topmost_msg = make_message( + 'TopmostMessage', + fields=( + make_field('inner_message', message=inner_msg), + make_field('uninteresting') + ) + ) + + actual = {t.name for t in topmost_msg.recursive_field_types} + expected = {t.name for t in (enumeration, innest_msg, inner_msg)} + assert actual == expected + + def test_get_field_recursive(): # Create the inner message. inner_fields = (make_field('zero'), make_field('one')) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 7086b73747b0..c2ed32c33d03 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -23,6 +23,7 @@ from gapic.schema import wrappers from test_utils.test_utils import ( + make_enum, make_field, make_message, make_method, @@ -151,6 +152,53 @@ def test_method_paged_result_ref_types(): } +def test_flattened_ref_types(): + method = make_method( + 'IdentifyMollusc', + input_message=make_message( + 'IdentifyMolluscRequest', + fields=( + make_field( + 'cephalopod', + message=make_message( + 'Cephalopod', + fields=( + make_field('mass_kg', type='TYPE_INT32'), + make_field( + 'squid', + number=2, + message=make_message('Squid'), + ), + make_field( + 'clam', + number=3, + message=make_message('Clam'), + ), + ), + ), + ), + make_field( + 'stratum', + enum=make_enum( + 'Stratum', + ) + ), + ), + ), + signatures=('cephalopod.squid,stratum',), + output_message=make_message('Mollusc'), + ) + + expected_flat_ref_type_names = { + 'IdentifyMolluscRequest', + 'Squid', + 'Stratum', + 'Mollusc', + } + actual_flat_ref_type_names = {t.name for t in method.flat_ref_types} + assert expected_flat_ref_type_names == actual_flat_ref_type_names + + def test_method_field_headers_none(): method = make_method('DoSomething') assert isinstance(method.field_headers, collections.abc.Sequence) From 391889446799e9843fcc01f4ae8325d4de2164ae Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 23 Mar 2020 16:15:08 -0700 Subject: [PATCH 0232/1339] Minor optimization of recursive_field_types (#347) --- packages/gapic-generator/gapic/cli/generate.py | 1 - packages/gapic-generator/gapic/schema/metadata.py | 13 +++++++++++++ packages/gapic-generator/gapic/schema/wrappers.py | 12 +++++++++--- 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index 8c5a98925ba3..9ca2f8217467 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -37,7 +37,6 @@ def generate( request: typing.BinaryIO, output: typing.BinaryIO) -> None: """Generate a full API client description.""" - # Load the protobuf CodeGeneratorRequest. req = plugin_pb2.CodeGeneratorRequest.FromString(request.read()) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index d5dbf88c6d59..349d7f13f9ad 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -52,6 +52,19 @@ def __eq__(self, other) -> bool: return all([getattr(self, i) == getattr(other, i) for i in ('name', 'module', 'module_path', 'package', 'parent')]) + def __hash__(self): + # Do NOT include collisions; they are not relevant. + return hash( + ( + self.name, + self.module, + self.module_path, + self.package, + self.parent, + self.api_naming, + ) + ) + def __str__(self) -> str: """Return the Python identifier for this type. diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index b7f415b0835c..61d762322659 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -221,7 +221,8 @@ def __getattr__(self, name): return getattr(self.message_pb, name) def __hash__(self): - return hash(self.name) + # Identity is sufficiently unambiguous. + return hash(self.ident) @utils.cached_property def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: @@ -238,7 +239,8 @@ def recursive_field_types(self) -> Sequence[ Union['MessageType', 'EnumType'] ]: """Return all composite fields used in this proto's messages.""" - types: List[Union['MessageType', 'EnumType']] = [] + types: Set[Union['MessageType', 'EnumType']] = set() + stack = [iter(self.fields.values())] while stack: fields_iter = stack.pop() @@ -246,7 +248,7 @@ def recursive_field_types(self) -> Sequence[ if field.message and field.type not in types: stack.append(iter(field.message.fields.values())) if not field.is_primitive: - types.append(field.type) + types.add(field.type) return tuple(types) @@ -391,6 +393,10 @@ class EnumType: default_factory=metadata.Metadata, ) + def __hash__(self): + # Identity is sufficiently unambiguous. + return hash(self.ident) + def __getattr__(self, name): return getattr(self.enum_pb, name) From 0e4ede25176531b4588a7b794da74bdc8ae08299 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 24 Mar 2020 12:16:19 -0700 Subject: [PATCH 0233/1339] Test and impl for recursive and inclusive resource path helper funcs (#348) More thoroughly look for resource messages: recursively look through input messages for resources and also check the input messages themselves. --- .../gapic-generator/gapic/schema/wrappers.py | 20 +++- .../unit/schema/wrappers/test_service.py | 92 +++++++++++++------ 2 files changed, 79 insertions(+), 33 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 61d762322659..625274710970 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -400,6 +400,13 @@ def __hash__(self): def __getattr__(self, name): return getattr(self.enum_pb, name) + @property + def resource_path(self) -> Optional[str]: + # This is a minor duck-typing workaround for the resource_messages + # property in the Service class: we need to check fields recursively + # to see if they're resources, and recursive_field_types includes enums + return None + @property def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" @@ -836,11 +843,18 @@ def names(self) -> FrozenSet[str]: def resource_messages(self) -> FrozenSet[MessageType]: """Returns all the resource message types used in all request fields in the service.""" + def gen_resources(message): + if message.resource_path: + yield message + + for type_ in message.recursive_field_types: + if type_.resource_path: + yield type_ + return frozenset( - field.message + resource_msg for method in self.methods.values() - for field in method.input.fields.values() - if field.message and field.message.resource_path + for resource_msg in gen_resources(method.input) ) @utils.cached_property diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 0890109c0aba..e78a477b77ee 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -23,6 +23,7 @@ from test_utils.test_utils import ( get_method, + make_enum, make_field, make_message, make_method, @@ -156,47 +157,78 @@ def test_module_name(): def test_resource_messages(): - # Resources - squid_options = descriptor_pb2.MessageOptions() - squid_options.Extensions[resource_pb2.resource].pattern.append( - "squid/{squid}") - squid_message = make_message("Squid", options=squid_options) - clam_options = descriptor_pb2.MessageOptions() - clam_options.Extensions[resource_pb2.resource].pattern.append( - "clam/{clam}") - clam_message = make_message("Clam", options=clam_options) - whelk_options = descriptor_pb2.MessageOptions() - whelk_options.Extensions[resource_pb2.resource].pattern.append( - "whelk/{whelk}") - whelk_message = make_message("Whelk", options=whelk_options) - - # Not resources - octopus_message = make_message("Octopus") - oyster_message = make_message("Oyster") - nudibranch_message = make_message("Nudibranch") + # Resources are labeled via an options extension + def make_resource_opts(*args): + opts = descriptor_pb2.MessageOptions() + opts.Extensions[resource_pb2.resource].pattern.append( + "/".join("{{{arg}}}/{arg}" for arg in args) + ) + return opts + + # Regular, top level resource + squid_resource = make_message("Squid", options=make_resource_opts("squid")) + squid_request = make_message( + "CreateSquid", + fields=( + make_field('squid', message=squid_resource), + ), + ) + + # Nested resource + squamosa_message = make_message( + "Squamosa", + options=make_resource_opts("clam", "squamosa"), + ) + clam_resource = make_message( + "Clam", + options=make_resource_opts("clam"), + fields=( + make_field('squamosa', message=squamosa_message), + ), + ) + clam_request = make_message( + 'CreateClam', + fields=( + make_field('clam', message=clam_resource), + # Red herring, not resources :) + make_field('zone', 2, enum=make_enum('Zone')), + make_field('pearls', 3, True, message=make_message('Pearl')), + ), + ) + + # Some special APIs have request messages that _are_ resources. + whelk_resource = make_message("Whelk", options=make_resource_opts("whelk")) + + # Not a resource + octopus_request = make_message( + "CreateOctopus", + fields=( + make_field('Octopus', message=make_message('Octopus')), + ), + ) service = make_service( 'Molluscs', methods=( make_method( - f"Get{message.name}", - input_message=make_message( - f"{message.name}Request", - fields=[make_field(message.name, message=message)] - ) + f"{message.name}", + input_message=message, ) for message in ( - squid_message, - clam_message, - whelk_message, - octopus_message, - oyster_message, - nudibranch_message + squid_request, + clam_request, + whelk_resource, + octopus_request, ) ) ) - expected = {squid_message, clam_message, whelk_message} + expected = { + squid_resource, + clam_resource, + whelk_resource, + squamosa_message, + } actual = service.resource_messages assert expected == actual From a24be588f8c4cba7ff52e0c8a855b7ea50e1e9d8 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 24 Mar 2020 14:19:35 -0700 Subject: [PATCH 0234/1339] Version bump (#349) Bump version to 0.20.0 --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index c5204a5ad83f..8dd28982e926 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.19.0', + version='0.20.0', license='Apache 2.0', author='Luke Sneeringer', author_email='lukesneeringer@google.com', From ad7a0a9bd8b3ace020521decbefdd1f19ac9abb7 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 27 Mar 2020 11:27:53 -0700 Subject: [PATCH 0235/1339] Ammend ownership information (#351) Edit the author and contact information to reflect the current maintainer Remove the disclaimer --- packages/gapic-generator/README.rst | 6 ------ packages/gapic-generator/setup.py | 4 ++-- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index 545ae1e8c134..b574be800266 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -28,12 +28,6 @@ Documentation .. _documentation: https://gapic-generator-python.readthedocs.io/ -Disclaimer ----------- - -This is not an official Google product. - - .. |release level| image:: https://img.shields.io/badge/release%20level-beta-yellow.svg?style=flat :target: https://cloud.google.com/terms/launch-stages .. |docs| image:: https://readthedocs.org/projects/gapic-generator-python/badge/?version=latest diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 8dd28982e926..63ad642577a3 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -27,8 +27,8 @@ name='gapic-generator', version='0.20.0', license='Apache 2.0', - author='Luke Sneeringer', - author_email='lukesneeringer@google.com', + author='Dov Shlachter', + author_email='dovs@google.com', url='https://github.com/googleapis/gapic-generator-python.git', packages=find_packages(exclude=['docs', 'tests']), description='Python client library generator for APIs defined by protocol' From b4b16b5c4dfd44595b211c64a43af0503f3a3de4 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 30 Mar 2020 15:36:45 -0700 Subject: [PATCH 0236/1339] Remove 'proof-of-concept' (#352) --- packages/gapic-generator/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index b574be800266..5a2a087b60c6 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -5,7 +5,7 @@ API Client Generator for Python A generator for protocol buffer described APIs for and in Python 3. -This is a proof-of-concept generator for API client libraries for APIs +This is a generator for API client libraries for APIs specified by `protocol buffers`_, such as those inside Google. It takes a protocol buffer (with particular annotations) and uses it to generate a client library. From c82168ab921a334188fa3e17da945a7ac60046ef Mon Sep 17 00:00:00 2001 From: Lidi Zheng Date: Wed, 1 Apr 2020 16:49:24 -0700 Subject: [PATCH 0237/1339] A minor fix for the type annotation (#353) --- packages/gapic-generator/gapic/schema/wrappers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 625274710970..09744ac3d9cd 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -32,7 +32,7 @@ import re from itertools import chain from typing import (cast, Dict, FrozenSet, Iterable, List, Mapping, Optional, - Sequence, Set, Union) + Sequence, Set, Tuple, Union) from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 @@ -606,7 +606,7 @@ def flattened_fields(self) -> Mapping[str, Field]: """Return the signature defined for this method.""" cross_pkg_request = self.input.ident.package != self.ident.package - def filter_fields(sig): + def filter_fields(sig: str) -> Iterable[Tuple[str, Field]]: for f in sig.split(','): if not f: # Special case for an empty signature From d52dfa9ae6b3a4b6e5a79b23978b46340ebc6bdf Mon Sep 17 00:00:00 2001 From: Lidi Zheng Date: Wed, 1 Apr 2020 16:57:20 -0700 Subject: [PATCH 0238/1339] Bump mypy syntax version for generated code (#354) --- packages/gapic-generator/gapic/templates/mypy.ini.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/mypy.ini.j2 b/packages/gapic-generator/gapic/templates/mypy.ini.j2 index f23e6b533aad..4505b485436b 100644 --- a/packages/gapic-generator/gapic/templates/mypy.ini.j2 +++ b/packages/gapic-generator/gapic/templates/mypy.ini.j2 @@ -1,3 +1,3 @@ [mypy] -python_version = 3.5 +python_version = 3.6 namespace_packages = True From 37a814ffc3e117e9da07dcc12560ae325f63b74e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 3 Apr 2020 09:50:09 -0700 Subject: [PATCH 0239/1339] Dual templates (#357) Add a parallel set of templates, used primarily for generation of https://github.com/googleads/google-ads-python/ Parameterize nox tests to take template path configuration Run pytest tests in parallel from nox --- packages/gapic-generator/.circleci/config.yml | 162 ++++++ .../%name/%version/%sub/__init__.py.j2 | 153 +++++ .../%sub/services/%service/__init__.py.j2 | 9 + .../%sub/services/%service/client.py.j2 | 308 ++++++++++ .../%sub/services/%service/pagers.py.j2 | 75 +++ .../%service/transports/__init__.py.j2 | 20 + .../services/%service/transports/base.py.j2 | 77 +++ .../services/%service/transports/grpc.py.j2 | 162 ++++++ .../%version/%sub/services/__init__.py.j2 | 1 + .../%name/%version/%sub/types/%proto.py.j2 | 42 ++ .../%name/%version/%sub/types/__init__.py.j2 | 53 ++ .../%name/%version/%sub/types/_enum.py.j2 | 6 + .../%name/%version/%sub/types/_message.py.j2 | 51 ++ .../%namespace/%name/__init__.py.j2 | 158 +++++ .../%namespace/%name/py.typed.j2 | 2 + .../gapic/ads-templates/.coveragerc.j2 | 18 + .../gapic/ads-templates/MANIFEST.in.j2 | 2 + .../gapic/ads-templates/_base.py.j2 | 6 + .../gapic/ads-templates/_license.j2 | 14 + .../docs/%name_%version/services.rst.j2 | 6 + .../docs/%name_%version/types.rst.j2 | 5 + .../gapic/ads-templates/docs/conf.py.j2 | 363 ++++++++++++ .../gapic/ads-templates/docs/index.rst.j2 | 7 + .../examples/feature_fragments.j2 | 287 +++++++++ .../gapic/ads-templates/examples/sample.py.j2 | 48 ++ .../gapic/ads-templates/mypy.ini.j2 | 3 + .../gapic/ads-templates/noxfile.py.j2 | 40 ++ .../scripts/fixup_keywords.py.j2 | 176 ++++++ .../gapic/ads-templates/setup.py.j2 | 46 ++ .../unit/%name_%version/%sub/__init__.py | 0 .../%name_%version/%sub/test_%service.py.j2 | 550 ++++++++++++++++++ .../gapic/generator/options.py | 14 +- packages/gapic-generator/noxfile.py | 126 ++-- 33 files changed, 2947 insertions(+), 43 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/py.typed.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/MANIFEST.in.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/_base.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/_license.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/docs/index.rst.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/examples/feature_fragments.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/examples/sample.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/mypy.ini.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/scripts/fixup_keywords.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/setup.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/__init__.py create mode 100644 packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 0da43b33429e..b6891e367037 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -44,12 +44,42 @@ workflows: filters: tags: only: /^\d+\.\d+\.\d+$/ + - showcase-unit-alternative-templates-3.6: + requires: + - unit-3.6 + - unit-3.7 + - unit-3.8 + filters: + tags: + only: /^\d+\.\d+\.\d+$/ + - showcase-unit-alternative-templates-3.7: + requires: + - unit-3.6 + - unit-3.7 + - unit-3.8 + filters: + tags: + only: /^\d+\.\d+\.\d+$/ + - showcase-unit-alternative-templates-3.8: + requires: + - unit-3.6 + - unit-3.7 + - unit-3.8 + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - showcase-mypy: requires: - mypy filters: tags: only: /^\d+\.\d+\.\d+$/ + - showcase-mypy-alternative-templates: + requires: + - mypy + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - showcase: requires: - docs @@ -61,6 +91,17 @@ workflows: filters: tags: only: /^\d+\.\d+\.\d+$/ + - showcase-alternative-templates: + requires: + - docs + - mypy + - showcase-unit-alternative-templates-3.6 + - showcase-unit-alternative-templates-3.7 + - showcase-unit-alternative-templates-3.8 + - showcase-mypy-alternative-templates + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - docs: filters: tags: @@ -188,6 +229,31 @@ jobs: - run: name: Run showcase tests. command: nox -s showcase + showcase-alternative-templates: + docker: + - image: python:3.8-slim + - image: gcr.io/gapic-images/gapic-showcase:0.6.1 + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install nox. + command: pip install nox + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Run showcase tests. + command: nox -s showcase_alternative_templates showcase-unit-3.6: docker: - image: python:3.6-slim @@ -260,6 +326,78 @@ jobs: - run: name: Run unit tests. command: nox -s showcase_unit-3.8 + showcase-unit-alternative-templates-3.6: + docker: + - image: python:3.6-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Run unit tests. + command: nox -s showcase_unit_alternative_templates-3.6 + showcase-unit-alternative-templates-3.7: + docker: + - image: python:3.7-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Run unit tests. + command: nox -s showcase_unit_alternative_templates-3.7 + showcase-unit-alternative-templates-3.8: + docker: + - image: python:3.8-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Run unit tests. + command: nox -s showcase_unit_alternative_templates-3.8 showcase-mypy: docker: - image: python:3.8-slim @@ -284,6 +422,30 @@ jobs: - run: name: Typecheck the generated output. command: nox -s showcase_mypy + showcase-mypy-alternative-templates: + docker: + - image: python:3.8-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Typecheck the generated output. + command: nox -s showcase_mypy_alternative_templates unit-3.6: docker: - image: python:3.6-slim diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 new file mode 100644 index 000000000000..372ce3e2507e --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 @@ -0,0 +1,153 @@ +{% extends '_base.py.j2' %} + +{% block content %} +{% if opts.lazy_import -%} {# lazy import #} +import importlib +import re +import sys + +from itertools import chain + +def to_snake_case(s: str) -> str: + s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) + s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) + + # Numbers are a weird case; the goal is to spot when they _start_ + # some kind of name or acronym (e.g. 2FA, 3M). + # + # Find cases of a number preceded by a lower-case letter _and_ + # followed by at least two capital letters or a single capital and + # end of string. + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) + + return s.lower() + + +def from_snake_case(s): + _CHARS_TO_UPCASE_RE = re.compile(r'(?:_|^)([a-z])') + return _CHARS_TO_UPCASE_RE.sub(lambda m: m.group().replace('_', '').upper(), s) + + +if sys.version_info < (3, 7): + raise ImportError('This module requires Python 3.7 or later.') # pragma: NO COVER + +_lazy_name_to_package_map = { + 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', + {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} + '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', + '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.transports.base', + '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.transports.grpc', + {%- endfor %} +} + +_lazy_type_to_package_map = { +{%- filter sort_lines %} +{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %} +{%- for enum in proto.enums.values() %} + '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %}{%- endfor %}{%- endfilter %} +} + +# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ +def __getattr__(name): # Requires Python >= 3.7 + if name == '__all__': + all_names = globals()['__all__'] = sorted( + chain( + (from_snake_case(k) for k in _lazy_name_to_package_map if k != 'types'), + _lazy_type_to_package_map, + ['types'], + ) + ) + return all_names + elif name.endswith('Transport'): + module = __getattr__(to_snake_case(name)) + sub_mod_class = getattr(module, name) + klass = type(name, (sub_mod_class,), {'__doc__': sub_mod_class.__doc__}) + globals()[name] = klass + return klass + elif name.endswith('Client'): + module = __getattr__(to_snake_case(name)) + sub_mod_class = getattr(module, name) + klass = type( + name, + (sub_mod_class,), + {'__doc__': sub_mod_class.__doc__} + ) + globals()[name] = klass + return klass + elif name in _lazy_name_to_package_map: + module = importlib.import_module(f'{_lazy_name_to_package_map[name]}') + globals()[name] = module + return module + elif name in _lazy_type_to_package_map: + module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') + klass = getattr(module, name) + {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} + globals()[name] = klass + return klass + else: + raise AttributeError(f'unknown sub-module {name!r}.') + + +def __dir__(): + return globals().get('__all__') or __getattr__('__all__') +{% else -%} {# do not use lazy import #} +{# Import subpackages. -#} +{% for subpackage in api.subpackages.keys() -%} +from . import {{ subpackage }} +{% endfor -%} + +{# Import services for this package. -#} +{% filter sort_lines -%} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view -%} +from .services.{{ service.name|snake_case }} import {{ service.client_name }} +{% endfor -%} +{% endfilter -%} + +{# Import messages and enums from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. +-#} +{% filter sort_lines -%} +{% for proto in api.protos.values() + if proto.meta.address.subpackage == api.subpackage_view -%} +{% for message in proto.messages.values() -%} +from .types.{{ proto.module_name }} import {{ message.name }} +{% endfor -%} +{% for enum in proto.enums.values() -%} +from .types.{{ proto.module_name }} import {{ enum.name }} +{% endfor -%} +{% endfor -%} +{% endfilter %} + +{# Define __all__. + This requires the full set of imported names, so we iterate over + them again. +-#} +__all__ = ( + {%- filter sort_lines %} + {%- for subpackage in api.subpackages.keys() %} + '{{ subpackage }}', + {%- endfor %} + {%- for service in api.services.values() + if service.meta.address.subpackage == api.subpackage_view %} + '{{ service.client_name }}', + {%- endfor %} + {%- for proto in api.protos.values() + if proto.meta.address.subpackage == api.subpackage_view %} + {%- for message in proto.messages.values() %} + '{{ message.name }}', + {%- endfor %} + {%- for enum in proto.enums.values() %} + '{{ enum.name }}', + {%- endfor %} + {%- endfor %} + {%- endfilter %} +) +{% endif -%} {# lazy import #} +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/__init__.py.j2 new file mode 100644 index 000000000000..f9f07d44df9a --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/__init__.py.j2 @@ -0,0 +1,9 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from .client import {{ service.client_name }} + +__all__ = ( + '{{ service.client_name }}', +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 new file mode 100644 index 000000000000..1f821b8af91d --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -0,0 +1,308 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from collections import OrderedDict +from typing import Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{% for ref_type in method.flat_ref_types -%} +{{ ref_type.ident.python_import }} +{% endfor -%} +{% endfor -%} +{% endfilter %} +from .transports.base import {{ service.name }}Transport +from .transports.grpc import {{ service.name }}GrpcTransport + + +class {{ service.client_name }}Meta(type): + """Metaclass for the {{ service.name }} client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] + _transport_registry['grpc'] = {{ service.name }}GrpcTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[{{ service.name }}Transport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): + """{{ service.meta.doc|rst(width=72, indent=4) }}""" + + DEFAULT_OPTIONS = ClientOptions.ClientOptions({% if service.host %}api_endpoint='{{ service.host }}'{% endif %}) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + + {% for message in service.resource_messages -%} + @staticmethod + def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: + """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" + return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + + {% endfor %} + + def __init__(self, *, + credentials: credentials.Credentials = None, + transport: Union[str, {{ service.name }}Transport] = None, + client_options: ClientOptions = DEFAULT_OPTIONS, + ) -> None: + """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.{{ service.name }}Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, {{ service.name }}Transport): + if credentials: + raise ValueError('When providing a transport instance, ' + 'provide its credentials directly.') + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + host=client_options.api_endpoint{% if service.host %} or '{{ service.host }}'{% endif %}, + ) + + {% for method in service.methods.values() -%} + def {{ method.name|snake_case }}(self, + {%- if not method.client_streaming %} + request: {{ method.input.ident }} = None, + *, + {% for field in method.flattened_fields.values() -%} + {{ field.name }}: {{ field.ident }} = None, + {% endfor -%} + {%- else %} + requests: Iterator[{{ method.input.ident }}] = None, + *, + {% endif -%} + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + {%- if not method.server_streaming %} + ) -> {{ method.client_output.ident }}: + {%- else %} + ) -> Iterable[{{ method.client_output.ident }}]: + {%- endif %} + r"""{{ method.meta.doc|rst(width=72, indent=8) }} + + Args: + {%- if not method.client_streaming %} + request (:class:`{{ method.input.ident.sphinx }}`): + The request object.{{ ' ' -}} + {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {% for key, field in method.flattened_fields.items() -%} + {{ field.name }} (:class:`{{ field.ident.sphinx }}`): + {{ field.meta.doc|rst(width=72, indent=16, nl=False) }} + This corresponds to the ``{{ key }}`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + {% endfor -%} + {%- else %} + requests (Iterator[`{{ method.input.ident.sphinx }}`]): + The request object iterator.{{ ' ' -}} + {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {%- endif %} + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {%- if not method.void %} + + Returns: + {%- if not method.server_streaming %} + {{ method.client_output.ident.sphinx }}: + {%- else %} + Iterable[{{ method.client_output.ident.sphinx }}]: + {%- endif %} + {{ method.client_output.meta.doc|rst(width=72, indent=16) }} + {%- endif %} + """ + {%- if not method.client_streaming %} + # Create or coerce a protobuf request object. + {% if method.flattened_fields -%} + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + {% endif -%} + {% if method.input.ident.package != method.ident.package -%} {# request lives in a different package, so there is no proto wrapper #} + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = {{ method.input.ident }}(**request) + {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} + elif not request: + request = {{ method.input.ident }}() + {% endif -%}{# Cross-package req and flattened fields #} + {%- else %} + request = {{ method.input.ident }}(request) + {% endif %} {# different request package #} + + {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} + {% if method.flattened_fields -%} + # If we have keyword arguments corresponding to fields on the + # request, apply these. + {% endif -%} + {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }} is not None: + request.{{ key }} = {{ field.name }} + {%- endfor %} + {# They can be _extended_, however -#} + {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }}: + request.{{ key }}.extend({{ field.name }}) + {%- endfor %} + {%- endif %} + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.{{ method.name|snake_case }}, + {%- if method.retry %} + default_retry=retries.Retry( + {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} + {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} + {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} + predicate=retries.if_exception_type( + {%- filter sort_lines %} + {%- for ex in method.retry.retryable_exceptions %} + exceptions.{{ ex.__name__ }}, + {%- endfor %} + {%- endfilter %} + ), + ), + {%- endif %} + default_timeout={{ method.timeout }}, + client_info=_client_info, + ) + {%- if method.field_headers %} + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {%- for field_header in method.field_headers %} + ('{{ field_header }}', request.{{ field_header }}), + {%- endfor %} + )), + ) + {%- endif %} + + # Send the request. + {% if not method.void %}response = {% endif %}rpc( + {%- if not method.client_streaming %} + request, + {%- else %} + requests, + {%- endif %} + retry=retry, + timeout=timeout, + metadata=metadata, + ) + {%- if method.lro %} + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + {{ method.lro.response_type.ident }}, + metadata_type={{ method.lro.metadata_type.ident }}, + ) + {%- elif method.paged_result_field %} + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = {{ method.client_output.ident }}( + method=rpc, + request=request, + response=response, + ) + {%- endif %} + {%- if not method.void %} + + # Done; return the response. + return response + {%- endif %} + {{ '\n' }} + {% endfor %} + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + '{{ api.naming.warehouse_package_name }}', + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + '{{ service.client_name }}', +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 new file mode 100644 index 000000000000..0e7ef018a775 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 @@ -0,0 +1,75 @@ +{% extends '_base.py.j2' %} + +{% block content %} +{% for method in service.methods.values() | selectattr('paged_result_field') -%} +{% if loop.first -%} +{# This lives within the loop in order to ensure that this template + is empty if there are no paged methods. + -#} +from typing import Any, Callable, Iterable + +{% filter sort_lines -%} +{% for method in service.methods.values() | selectattr('paged_result_field') -%} +{{ method.input.ident.python_import }} +{{ method.output.ident.python_import }} +{{ method.paged_result_field.message.ident.python_import }} +{% endfor %} +{% endfilter -%} +{% endif %} + +class {{ method.name }}Pager: + """A pager for iterating through ``{{ method.name|snake_case }}`` requests. + + This class thinly wraps an initial + :class:`{{ method.output.ident.sphinx }}` object, and + provides an ``__iter__`` method to iterate through its + ``{{ method.paged_result_field.name }}`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``{{ method.name }}`` requests and continue to iterate + through the ``{{ method.paged_result_field.name }}`` field on the + corresponding responses. + + All the usual :class:`{{ method.output.ident.sphinx }}` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[[{{ method.input.ident }}], + {{ method.output.ident }}], + request: {{ method.input.ident }}, + response: {{ method.output.ident }}): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`{{ method.input.ident.sphinx }}`): + The initial request object. + response (:class:`{{ method.output.ident.sphinx }}`): + The initial response object. + """ + self._method = method + self._request = {{ method.input.ident }}(request) + self._response = response + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[{{ method.output.ident }}]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request) + yield self._response + + def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: + for page in self.pages: + yield from page.{{ method.paged_result_field.name }} + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + +{% endfor %} +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 new file mode 100644 index 000000000000..470cde5d1969 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 @@ -0,0 +1,20 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from collections import OrderedDict +from typing import Dict, Type + +from .base import {{ service.name }}Transport +from .grpc import {{ service.name }}GrpcTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] +_transport_registry['grpc'] = {{ service.name }}GrpcTransport + + +__all__ = ( + '{{ service.name }}Transport', + '{{ service.name }}GrpcTransport', +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 new file mode 100644 index 000000000000..694e0a16645f --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -0,0 +1,77 @@ +{% extends '_base.py.j2' %} + +{% block content %} +import abc +import typing + +from google import auth +{%- if service.has_lro %} +from google.api_core import operations_v1 # type: ignore +{%- endif %} +from google.auth import credentials # type: ignore + +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{{ method.input.ident.python_import }} +{{ method.output.ident.python_import }} +{% endfor -%} +{% endfilter %} + +class {{ service.name }}Transport(metaclass=abc.ABCMeta): + """Abstract transport class for {{ service.name }}.""" + + AUTH_SCOPES = ( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ) + + def __init__( + self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + ) -> None: + """Instantiate the transport. + + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{- ' ' }}The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials is None: + credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + + # Save the credentials. + self._credentials = credentials + {%- if service.has_lro %} + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError + {%- endif %} + {%- for method in service.methods.values() %} + + @property + def {{ method.name|snake_case }}(self) -> typing.Callable[ + [{{ method.input.ident }}], + {{ method.output.ident }}]: + raise NotImplementedError + {%- endfor %} + + +__all__ = ( + '{{ service.name }}Transport', +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 new file mode 100644 index 000000000000..bd3b074e7414 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -0,0 +1,162 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from typing import Callable, Dict + +from google.api_core import grpc_helpers # type: ignore +{%- if service.has_lro %} +from google.api_core import operations_v1 # type: ignore +{%- endif %} +from google.auth import credentials # type: ignore + +import grpc # type: ignore + +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{{ method.input.ident.python_import }} +{{ method.output.ident.python_import }} +{% endfor -%} +{% endfilter %} +from .base import {{ service.name }}Transport + + +class {{ service.name }}GrpcTransport({{ service.name }}Transport): + """gRPC backend transport for {{ service.name }}. + + {{ service.meta.doc|rst(width=72, indent=4) }} + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + channel: grpc.Channel = None) -> None: + """Instantiate the transport. + + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{- ' ' }}The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + """ + # Sanity check: Ensure that channel and credentials are not both + # provided. + if channel: + credentials = False + + # Run the base constructor. + super().__init__(host=host, credentials=credentials) + self._stubs = {} # type: Dict[str, Callable] + + # If a channel was explicitly provided, set it. + if channel: + self._grpc_channel = channel + + @classmethod + def create_channel(cls, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + """ + return grpc_helpers.create_channel( + host, + credentials=credentials, + scopes=cls.AUTH_SCOPES, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, '_grpc_channel'): + self._grpc_channel = self.create_channel( + self._host, + credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + {%- if service.has_lro %} + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if 'operations_client' not in self.__dict__: + self.__dict__['operations_client'] = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self.__dict__['operations_client'] + {%- endif %} + {%- for method in service.methods.values() %} + + @property + def {{ method.name|snake_case }}(self) -> Callable[ + [{{ method.input.ident }}], + {{ method.output.ident }}]: + r"""Return a callable for the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=40, indent=8) }} + {{- ' ' -}} method over gRPC. + + {{ method.meta.doc|rst(width=72, indent=8) }} + + Returns: + Callable[[~.{{ method.input.name }}], + ~.{{ method.output.name }}]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '{{ method.name|snake_case }}' not in self._stubs: + self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', + request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, + response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, + ) + return self._stubs['{{ method.name|snake_case }}'] + {%- endfor %} + + +__all__ = ( + '{{ service.name }}GrpcTransport', +) +{%- endblock -%} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/__init__.py.j2 new file mode 100644 index 000000000000..9cee1e99950c --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/__init__.py.j2 @@ -0,0 +1 @@ +{% extends '_base.py.j2' %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 new file mode 100644 index 000000000000..48ef010d4983 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 @@ -0,0 +1,42 @@ +{% extends "_base.py.j2" %} + +{% block content -%} +{% with p = proto.disambiguate('proto') %} +{% if proto.messages|length or proto.all_enums|length -%} +import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore +{% endif %} + +{% filter sort_lines -%} +{% for import_ in proto.python_modules -%} +{{ import_ }} +{% endfor -%} +{% endfilter %} + + +__protobuf__ = {{ p }}.module( + package='{{ '.'.join(proto.meta.address.package) }}', + {% if api.naming.proto_package != '.'.join(proto.meta.address.package) -%} + marshal='{{ api.naming.proto_package }}', + {% endif -%} + manifest={ + {%- for enum in proto.enums.values() %} + '{{ enum.name }}', + {%- endfor %} + {%- for message in proto.messages.values() %} + '{{ message.name }}', + {%- endfor %} + }, +) + + +{% for enum in proto.enums.values() -%} + {% include '%namespace/%name/%version/%sub/types/_enum.py.j2' with context %} +{% endfor %} + +{% for message in proto.messages.values() -%} + {% include '%namespace/%name/%version/%sub/types/_message.py.j2' with context %} +{% endfor %} +{% endwith %} + +__all__ = tuple(sorted(__protobuf__.manifest)) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/__init__.py.j2 new file mode 100644 index 000000000000..8b341523337a --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/__init__.py.j2 @@ -0,0 +1,53 @@ +{% extends '_base.py.j2' %} + +{% block content %} +{% if opts.lazy_import -%} {# lazy import #} +import importlib +import sys + + +if sys.version_info < (3, 7): + raise ImportError('This module requires Python 3.7 or later.') # pragma: NO COVER + + +_lazy_type_to_package_map = { + {%- filter sort_lines %} +{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %} +{%- for enum in proto.enums.values() %} + '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %}{%- endfor %}{%- endfilter %} +} + + +# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ +def __getattr__(name): # Requires Python >= 3.7 + if name == '__all__': + all_names = globals()['__all__'] = sorted(_lazy_type_to_package_map) + return all_names + elif name in _lazy_type_to_package_map: + module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') + klass = getattr(module, name) + {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} + globals()[name] = klass + return klass + else: + raise AttributeError(f'unknown sub-module {name!r}.') + + +def __dir__(): + return globals().get('__all__') or __getattr__('__all__') + +{% else -%} +{% for p in api.protos.values() if p.file_to_generate and p.messages -%} +from .{{p.module_name }} import ({% for m in p.messages.values() %}{{ m.name }}, {% endfor %}) +{% endfor %} + +__all__ = ( +{%- for p in api.protos.values() if p.file_to_generate %}{% for m in p.messages.values() %} + '{{ m.name }}', +{%- endfor %}{% endfor %} +) +{% endif -%} {# lazy import #} +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 new file mode 100644 index 000000000000..c9f4cb0c4f0c --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 @@ -0,0 +1,6 @@ +class {{ enum.name }}({{ p }}.Enum): + r"""{{ enum.meta.doc|rst(indent=4) }}""" + {% for enum_value in enum.values -%} + {{ enum_value.name }} = {{ enum_value.number }} + {% endfor -%} +{{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 new file mode 100644 index 000000000000..a8119827b834 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -0,0 +1,51 @@ +class {{ message.name }}({{ p }}.Message): + r"""{{ message.meta.doc|rst(indent=4) }}{% if message.fields|length %} + + Attributes: + {%- for field in message.fields.values() %} + {{ field.name }} ({{ field.ident.sphinx }}): + {{ field.meta.doc|rst(indent=12, nl=False) }} + {%- endfor %} + {% endif -%} + """ + {# Iterate over nested enums. -#} + {% for enum in message.nested_enums.values() -%} + {% filter indent %} + {%- include '%namespace/%name/%version/%sub/types/_enum.py.j2' %} + {% endfilter %} + {% endfor -%} + + {# Iterate over nested messages. -#} + {% for submessage in message.nested_messages.values() -%} + {% if not submessage.map -%} + {% with message = submessage %}{% filter indent %} + {%- include '%namespace/%name/%version/%sub/types/_message.py.j2' %} + {% endfilter %}{% endwith %} + {% endif %} + {% endfor -%} + + {% if "next_page_token" in message.fields.values()|map(attribute='name') %} + @property + def raw_page(self): + return self + {% endif %} + + {# Iterate over fields. -#} + {% for field in message.fields.values() -%} + {% if field.map -%} + {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] -%} + {{ field.name }} = {{ p }}.MapField( + {{- p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, number={{ field.number }} + {%- if value_field.enum or value_field.message %}, + {{ value_field.proto_type.lower() }}={{ value_field.type.ident.rel(message.ident) }}, + {% endif %}) + {% endwith -%} + {% else -%} + {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( + {{- p }}.{{ field.proto_type }}, number={{ field.number }} + {%- if field.enum or field.message %}, + {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, + {% endif %}) + {% endif -%} + {% endfor -%} +{{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 new file mode 100644 index 000000000000..1ea3128c5776 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 @@ -0,0 +1,158 @@ +{% extends '_base.py.j2' %} +{% block content %} +{% if opts.lazy_import -%} {# lazy import #} +import importlib +import re +import sys + +from itertools import chain + +def to_snake_case(s: str) -> str: + s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) + s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) + + # Numbers are a weird case; the goal is to spot when they _start_ + # some kind of name or acronym (e.g. 2FA, 3M). + # + # Find cases of a number preceded by a lower-case letter _and_ + # followed by at least two capital letters or a single capital and + # end of string. + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) + s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) + + return s.lower() + + +def from_snake_case(s): + _CHARS_TO_UPCASE_RE = re.compile(r'(?:_|^)([a-z])') + return _CHARS_TO_UPCASE_RE.sub(lambda m: m.group().replace('_', '').upper(), s) + + +if sys.version_info < (3, 7): + raise ImportError('This module requires Python 3.7 or later.') + +_lazy_name_to_package_map = { + 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', + {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} + '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', + '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.base', + '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.grpc', + {%- endfor %} {# Need to do types and enums #} +} + +_lazy_type_to_package_map = { +{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %} +{%- for enum in proto.enums.values() %} + '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', +{%- endfor %}{%- endfor %} +} + + +# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ +def __getattr__(name): # Requires Python >= 3.7 + if name == '__all__': + all_names = globals()['__all__'] = sorted( + chain( + (from_snake_case(k) for k in _lazy_name_to_package_map), + _lazy_type_to_package_map, + ) + ) + return all_names + elif name.endswith('Transport'): + module = __getattr__(to_snake_case(name)) + sub_mod_class = getattr(module, name) + klass = type(name, (sub_mod_class,), {'__doc__': sub_mod_class.__doc__}) + globals()[name] = klass + return klass + elif name.endswith('Client'): + module = __getattr__(to_snake_case(name)) + sub_mod_class = getattr(module, name) + klass = type( + name, + (sub_mod_class,), + {'__doc__': sub_mod_class.__doc__} + ) + globals()[name] = klass + return klass + elif name in _lazy_name_to_package_map: + module = importlib.import_module(f'{_lazy_name_to_package_map[name]}') + globals()[name] = module + return module + elif name in _lazy_type_to_package_map: + module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') + klass = getattr(module, name) + {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} + globals()[name] = klass + return klass + else: + raise AttributeError(f'unknown sub-module {name!r}.') + + +def __dir__(): + return globals().get('__all__') or __getattr__('__all__') +{% else -%} {# do not use lazy import #} +{# Import subpackages. -#} +{% filter sort_lines -%} +{% for subpackage in api.subpackages.keys() -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }} import {{ subpackage }} +{% endfor -%} + +{# Import services for this package. -#} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} +{% endfor -%} + +{# Import messages and enums from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. +-#} +{# Import messages from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. + -#} +{% for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view -%} + {% for message in proto.messages.values()|sort(attribute='name') -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} +{% endfor -%} +{% for enum in proto.enums.values()|sort(attribute='name') -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} +{% endfor %}{% endfor -%} +{% endfilter %} +{# Define __all__. + This requires the full set of imported names, so we iterate over + them again. +-#} +__all__ = ( +{%- filter indent %} +{% filter sort_lines -%} +{% for subpackage in api.subpackages.keys() -%} +'{{ subpackage }}', +{% endfor -%} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view -%} +'{{ service.client_name }}', +{% endfor -%} +{% for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view -%} +{% for message in proto.messages.values()|sort(attribute='name') -%} +'{{ message.name }}', +{% endfor -%} +{% for enum in proto.enums.values()|sort(attribute='name') + if proto.meta.address.subpackage == api.subpackage_view -%} +'{{ enum.name }}', +{% endfor -%}{% endfor -%} +{% endfilter -%} +{% endfilter -%} +) +{% endif -%} {# lazy import #} +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/py.typed.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/py.typed.j2 new file mode 100644 index 000000000000..58fdb544c225 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/py.typed.j2 @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The {{ api.naming.warehouse_package_name }} package uses inline types. diff --git a/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 b/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 new file mode 100644 index 000000000000..f2ac95dda9d7 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 @@ -0,0 +1,18 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +omit = + {{ api.naming.module_namespace|join("/") }}/{{ api.naming.module_name }}/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/gapic/ads-templates/MANIFEST.in.j2 b/packages/gapic-generator/gapic/ads-templates/MANIFEST.in.j2 new file mode 100644 index 000000000000..b7239d5404a9 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/MANIFEST.in.j2 @@ -0,0 +1,2 @@ +recursive-include {{ '/'.join(api.naming.module_namespace + (api.naming.module_name,)) }} *.py +recursive-include {{ '/'.join(api.naming.module_namespace + (api.naming.versioned_module_name,)) }} *.py diff --git a/packages/gapic-generator/gapic/ads-templates/_base.py.j2 b/packages/gapic-generator/gapic/ads-templates/_base.py.j2 new file mode 100644 index 000000000000..133cf7aa5811 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/_base.py.j2 @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +{% block license %} +{% include "_license.j2" %} +{% endblock %} +{%- block content %} +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/_license.j2 b/packages/gapic-generator/gapic/ads-templates/_license.j2 new file mode 100644 index 000000000000..03ddf2e6ea0f --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/_license.j2 @@ -0,0 +1,14 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 new file mode 100644 index 000000000000..350f120eceb8 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 @@ -0,0 +1,6 @@ +Client for {{ api.naming.long_name }} API +{{ '=' * (15 + api.naming.long_name|length) }} + +.. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }} + :members: + :inherited-members: diff --git a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 new file mode 100644 index 000000000000..ce3c8882c8cd --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 @@ -0,0 +1,5 @@ +Types for {{ api.naming.long_name }} API +{{ '=' * (14 + api.naming.long_name|length) }} + +.. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types + :members: diff --git a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 new file mode 100644 index 000000000000..1e827b37d96c --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 @@ -0,0 +1,363 @@ +{% extends '_base.py.j2' %} + +{% block content %} +# +# {{ api.naming.warehouse_package_name }} documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"{{ api.naming.warehouse_package_name }}" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "{{ api.naming.namespace|join(' ') }} Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "{{ api.naming.warehouse_package_name }}-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "{{ api.naming.warehouse_package_name }}.tex", + u"{{ api.naming.warehouse_package_name }} Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "{{ api.naming.warehouse_package_name }}", + u"{{ api.naming.long_name }} Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "{{ api.naming.warehouse_package_name }}", + u"{{ api.naming.warehouse_package_name }} Documentation", + author, + "{{ api.naming.warehouse_package_name }}", + "GAPIC library for {{ api.naming.long_name }} API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/docs/index.rst.j2 b/packages/gapic-generator/gapic/ads-templates/docs/index.rst.j2 new file mode 100644 index 000000000000..c8dea9adbca6 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/docs/index.rst.j2 @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + {{ api.naming.versioned_module_name }}/services + {{ api.naming.versioned_module_name }}/types diff --git a/packages/gapic-generator/gapic/ads-templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/ads-templates/examples/feature_fragments.j2 new file mode 100644 index 000000000000..6de840fe92bc --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/examples/feature_fragments.j2 @@ -0,0 +1,287 @@ +{# + # Copyright (C) 2019 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + #} + +{# +A careful reader may comment that there is duplication of effort +between the python verification step and the dispatch/rendering here. +There is a little, but not enough for it to be important because +1) Other python artifacts (client libraries, unit tests, and so forth) + are generated using templates, so doing the same for generated samples is consistent. +2) Using jinja for anything requiring real logic or data structures is a bad idea. +#} + +{# response handling macros #} + +{% macro sample_header(sample, calling_form) %} + +# DO NOT EDIT! This is a generated sample ("{{ calling_form }}", "{{ sample.id }}") +# +# To install the latest published package dependency, execute the following: +# pip3 install {{ sample.package_name }} +{% endmacro %} + +{% macro print_string_formatting(string_list) %} +{% if string_list|length == 1 %} +"{{ string_list[0]|replace("%s", "{}")|replace('\"', '\\\"') }}" +{% else %} +"{{ string_list[0]|replace("%s", "{}")|replace('\"', '\\\"') }}".format({{ string_list[1:]|map("coerce_response_name")|join(", ") }}) +{% endif %} +{% endmacro %} + +{% macro print_input_params(full_request) %} +{% with input_parameters = [] %} + {% for request in full_request.request_list %} + {% if request.body %} + {% for element in request.body if element.input_parameter %} + {% do input_parameters.append(element.input_parameter) %} + {% endfor %} + {% elif request.single and request.single.input_parameter %} + {% do input_parameters.append(request.single.input_parameter) %} + {% endif %} + {% endfor %} +{{ input_parameters|join(", ") -}} +{% endwith %} +{% endmacro %} + +{# First elment is a format string, remaining elements are the format string parameters #} +{# Validating that the number of format params equals #} +{# the number of remaining params is handled by real python code #} +{% macro render_print(elts) %} +print({{ print_string_formatting(elts)|trim }}) +{% endmacro %} + +{% macro render_comment(elts) %} + {# First elment is a format string, remaining elements are the format string parameters #} + {# Validating that the number of format params equals #} + {# the number of remaining params is handled by real python code #} + {% with fmtStr = elts[0] %} + {% with params = elts[1:]|map("coerce_response_name")|list %} +# {{ fmtStr|format(*params) }} +{% endwith %} +{% endwith %} +{% endmacro %} + +{% macro render_define(statement) %} +{# Python code already verified the form, no need to check #} +{% with lvalue, rvalue = statement.split("=") %} +{{ lvalue }} = {{ rvalue|coerce_response_name }} +{% endwith %} +{% endmacro %} + +{% macro render_collection_loop(statement) %} +for {{ statement.variable }} in {{ statement.collection|coerce_response_name }}: + {% for s in statement.body %} +{{ dispatch_statement(s, 4) }} + {% endfor %} +{% endmacro %} + +{% macro render_map_loop(statement) %} + {# At least one of key and value exist; validated in python #} +{% if "key" not in statement %} +for {{ statement.value }} in {{ statement.map|coerce_response_name }}.values(): +{% elif "value" not in statement %} +for {{ statement.key }} in {{ statement.map|coerce_response_name }}.keys(): +{% else %} +for {{statement.key }}, {{ statement.value }} in {{ statement.map|coerce_response_name }}.items(): +{% endif %} +{% for s in statement.body %} +{{ dispatch_statement(s, 4) }} +{% endfor %} +{% endmacro %} + +{% macro render_write_file(statement) %} + {% with contents_rval = statement["contents"]|coerce_response_name %} +with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: + f.write({{ contents_rval }}) + {% endwith %} +{% endmacro %} + +{% macro dispatch_statement(statement, indentation=0) %} +{# Each statement is a dict with a single key/value pair #} +{% if "print" in statement -%} +{{ render_print(statement["print"])|indent(width=indentation, first=True) }} +{% elif "define" in statement -%} +{{ render_define(statement["define"])|indent(width=indentation, first=True) }} +{% elif "comment" in statement -%} +{{ render_comment(statement["comment"])|indent(width=indentation, first=True) }} +{% elif "loop" in statement -%} + {% with loop = statement["loop"] -%} + {% if "collection" in loop -%} +{{ render_collection_loop(loop)|indent(width=indentation, first=True) }} + {% else -%} +{{ render_map_loop(loop)|indent(width=indentation, first=True) }} + {% endif -%} + {% endwith -%} +{% elif "write_file" in statement -%} +{{ render_write_file(statement["write_file"])|indent(indentation, first=True) }} +{% endif %} +{% endmacro %} + +{% macro render_request_attr(base_name, attr) %} +{# Note: python code will have manipulated the value #} +{# to be the correct enum from the right module, if necessary. #} +{# Python is also responsible for verifying that each input parameter is unique,#} +{# no parameter is a reserved keyword #} + {% if attr.input_parameter %} +# {{ attr.input_parameter }} = {{ attr.value }} + {% if attr.value_is_file %} +with open({{ attr.input_parameter }}, "rb") as f: + {{ base_name }}["{{ attr.field }}"] = f.read() + {% else %} +{{ base_name }}["{{ attr.field }}"] = {{ attr.input_parameter }} + {% endif %} + {% else %} +{{ base_name }}["{{ attr.field }}"] = {{ attr.value }} + {% endif %} +{% endmacro %} + +{% macro render_request_setup(full_request) %} +{% for parameter_block in full_request.request_list if parameter_block.body %} +{% if parameter_block.pattern -%} +{# This is a resource-name patterned lookup parameter #} +{% with formals = [] -%} +{% for attr in parameter_block.body -%} +{% do formals.append("%s=%s"|format(attr.field, attr.input_parameter or attr.value)) -%} +{% endfor -%} +{{ parameter_block.base }} = "{{parameter_block.pattern }}".format({{ formals|join(", ") }}) +{% endwith -%} +{% else -%} {# End resource name construction #} +{{ parameter_block.base }} = {} +{% for attr in parameter_block.body %} +{{ render_request_attr(parameter_block.base, attr) }} +{% endfor %} +{% endif -%} +{% endfor %} +{% if not full_request.flattenable -%} +request = { +{% for parameter in full_request.request_list %} + '{{ parameter.base }}': {{ parameter.base if parameter.body else parameter.single }}, +{% endfor -%} +} +{% endif -%} +{% endmacro %} + +{% macro render_request_params(request) %} + {# Provide the top level parameters last and as keyword params #} + {% with params = [] -%} + {% for r in request if r.body -%} + {% do params.append(r.base) -%} + {% endfor -%} + {% for r in request if r.single -%} + {% do params.append("%s=%s"|format(r.base, r.single.value)) -%} + {% endfor -%} +{{ params|join(", ") -}} + {% endwith -%} +{% endmacro %} + +{% macro render_request_params_unary(request) %} + {# Provide the top level parameters last and as keyword params #} + {% if request.flattenable -%} + {% with params = [] -%} + {% for r in request.request_list -%} + {% do params.append("%s=%s"|format(r.base, r.single.value if r.single else r.base)) -%} + {% endfor -%} +{{ params|join(", ") -}} + {% endwith -%} + {% else -%} +request=request + {% endif -%} +{% endmacro %} + + +{% macro render_method_call(sample, calling_form, calling_form_enum) %} + {# Note: this doesn't deal with enums or unions #} +{% if calling_form in [calling_form_enum.RequestStreamingBidi, + calling_form_enum.RequestStreamingClient] -%} +client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request.request_list)|trim -}}]) +{% else -%} {# TODO: deal with flattening #} +{# TODO: set up client streaming once some questions are answered #} +client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request)|trim -}}) +{% endif -%} +{% endmacro %} + +{# Setting up the method invocation is the responsibility of the caller: #} +{# it's just easier to set up client side streaming and other things from outside this macro. #} +{% macro render_calling_form(method_invocation_text, calling_form, calling_form_enum, response_statements ) %} +{% if calling_form == calling_form_enum.Request %} +response = {{ method_invocation_text|trim }} +{% for statement in response_statements %} +{{ dispatch_statement(statement)|trim }} +{% endfor %} +{% elif calling_form == calling_form_enum.RequestPagedAll %} +page_result = {{ method_invocation_text|trim }} +for response in page_result: + {% for statement in response_statements %} + {{ dispatch_statement(statement)|trim }} + {% endfor %} +{% elif calling_form == calling_form_enum.RequestPaged %} +page_result = {{ method_invocation_text|trim }} +for page in page_result.pages(): + for response in page: + {% for statement in response_statements %} + {{ dispatch_statement(statement)|trim }} + {% endfor %} +{% elif calling_form in [calling_form_enum.RequestStreamingServer, + calling_form_enum.RequestStreamingBidi] %} +stream = {{ method_invocation_text|trim }} +for response in stream: + {% for statement in response_statements %} + {{ dispatch_statement(statement)|trim }} + {% endfor %} +{% elif calling_form == calling_form_enum.LongRunningRequestPromise %} +operation = {{ method_invocation_text|trim }} + +print("Waiting for operation to complete...") + +response = operation.result() +{% for statement in response_statements %} +{{ dispatch_statement(statement)|trim }} +{% endfor %} +{% endif %} +{% endmacro %} + +{% macro render_method_name(method_name) %} +{{ method_name|snake_case }} +{% endmacro %} + +{% macro render_main_block(method_name, full_request) %} +def main(): + import argparse + + parser = argparse.ArgumentParser() +{% with arg_list = [] -%} +{% for request in full_request.request_list if request.body -%} +{% for attr in request.body if attr.input_parameter %} + parser.add_argument("--{{ attr.input_parameter }}", + type=str, + default={{ attr.value }}) +{% do arg_list.append("args." + attr.input_parameter) -%} +{% endfor -%} +{% endfor -%} +{% for request in full_request.request_list if request.single and request.single.input_parameter %} + parser.add_argument("--{{ request.single.input_parameter }}", + type=str, + default={{ request.single.value }}) +{% endfor %} + args = parser.parse_args() + + sample_{{ render_method_name(method_name)|trim }}({{ arg_list|join(", ") }}) + + +if __name__ == "__main__": + main() +{% endwith %} +{% endmacro %} diff --git a/packages/gapic-generator/gapic/ads-templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/ads-templates/examples/sample.py.j2 new file mode 100644 index 000000000000..f054e2f2f0d4 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/examples/sample.py.j2 @@ -0,0 +1,48 @@ +{# + # Copyright (C) 2019 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} +{% extends "_base.py.j2" %} + +{% block content %} +{# Input parameters: sample #} +{# callingForm #} +{% import "examples/feature_fragments.j2" as frags %} +{{ frags.sample_header(sample, calling_form) }} + +# [START {{ sample.id }}] +{# python code is responsible for all transformations: all we do here is render #} +{% for import_statement in imports %} +{{ import_statement }} +{% endfor %} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} + +{# also need calling form #} +def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input_params(sample.request)|trim -}}): + """{{ sample.description }}""" + + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + transport="grpc", + ) + + {{ frags.render_request_setup(sample.request)|indent }} +{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} + {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response, )|indent -}} +{% endwith %} + +# [END {{ sample.id }}] + +{{ frags.render_main_block(sample.rpc, sample.request) }} +{%- endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/mypy.ini.j2 b/packages/gapic-generator/gapic/ads-templates/mypy.ini.j2 new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/mypy.ini.j2 @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 new file mode 100644 index 000000000000..71f99a414481 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -0,0 +1,40 @@ +{% extends "_base.py.j2" %} + +{% block content %} +import os + +import nox # type: ignore + + +@nox.session(python=['3.6', '3.7']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov={{ api.naming.module_namespace|join("/") }}/{{ api.naming.versioned_module_name }}/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', '{{ api.naming.versioned_module_name }}'), + ) + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy') + session.install('.') + session.run( + 'mypy', + {%- if api.naming.module_namespace %} + '{{ api.naming.module_namespace[0] }}', + {%- else %} + '{{ api.naming.versioned_module_name }}', + {%- endif %} + ) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_keywords.py.j2 b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_keywords.py.j2 new file mode 100644 index 000000000000..99681ed99225 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_keywords.py.j2 @@ -0,0 +1,176 @@ +{% extends '_base.py.j2' %} +{% block content %} +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + {% with all_methods = [] -%} + {% for service in api.services.values() %}{% for method in service.methods.values() -%} + {% do all_methods.append(method) -%} + {% endfor %}{% endfor -%} + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + {% for method in all_methods|sort(attribute='name')|unique(attribute='name') -%} + '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), + {% endfor -%} + } + {% endwith %} + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + {# Inline comments and formatting are currently stripped out. -#} + {# My current attempts at preverving comments and formatting -#} + {# keep the comments, but the formatting is run through a log -#} + {# chipper, and an extra comma gets added, which causes a -#} + {# parse error. -#} + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer={{ api.naming.module_name }}CallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the {{ api.naming.module_name }} client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 new file mode 100644 index 000000000000..9d408a23dd2e --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -0,0 +1,46 @@ +{% extends '_base.py.j2' %} + +{% block content %} +import setuptools # type: ignore + + +setuptools.setup( + name='{{ api.naming.warehouse_package_name }}', + version='0.0.1', + {% if api.naming.namespace -%} + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages={{ api.naming.namespace_packages }}, + {% else -%} + packages=setuptools.find_packages(), + {% endif -%} + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core >= 1.8.0, < 2.0.0dev', + 'googleapis-common-protos >= 1.5.8', + 'grpcio >= 1.10.0', + 'proto-plus >= 0.4.0', + {%- if api.requires_package(('google', 'iam', 'v1')) %} + 'grpc-google-iam-v1', + {%- endif %} + ), + python_requires='>={% if opts.lazy_import %}3.7{% else %}3.6{% endif %}',{# Lazy import requires module-level getattr #} + setup_requires=[ + 'libcst >= 0.2.5', + ], + scripts=[ + 'scripts/fixup_keywords.py', + ], + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/__init__.py b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 new file mode 100644 index 000000000000..3916e795d96e --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -0,0 +1,550 @@ +{% extends "_base.py.j2" %} + +{% block content %} +from unittest import mock + +import grpc +import math +import pytest + +{# Import the service itself as well as every proto module that it imports. -#} +{% filter sort_lines -%} +from google import auth +from google.auth import credentials +from google.oauth2 import service_account +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports +from google.api_core import client_options +{% if service.has_lro -%} +from google.api_core import future +from google.api_core import operations_v1 +from google.longrunning import operations_pb2 +{% endif -%} +{% for method in service.methods.values() -%} +{% for ref_type in method.ref_types + if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') + or ref_type.ident.python_import.package == ('google', 'protobuf') and ref_type.ident.python_import.module == 'empty_pb2') -%} +{{ ref_type.ident.python_import }} +{% endfor -%} +{% endfor -%} +{% endfilter %} + + +def test_{{ service.client_name|snake_case }}_from_service_account_file(): + creds = credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = {{ service.client_name }}.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = {{ service.client_name }}.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + {% if service.host %}assert client._transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + + +def test_{{ service.client_name|snake_case }}_client_options(): + # Check the default options have their expected values. + {% if service.host %}assert {{ service.client_name }}.DEFAULT_OPTIONS.api_endpoint == '{{ service.host }}'{% endif %} + + # Check that options can be customized. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: + transport = gtc.return_value = mock.MagicMock() + client = {{ service.client_name }}( + client_options=options + ) + transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + + +def test_{{ service.client_name|snake_case }}_client_options_from_dict(): + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: + transport = gtc.return_value = mock.MagicMock() + client = {{ service.client_name }}( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + + +{% for method in service.methods.values() -%} +def test_{{ method.name|snake_case }}(transport: str = 'grpc'): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}( + {%- for field in method.output.fields.values() | rejectattr('message') %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + {% endif -%} + {% if method.client_streaming %} + response = client.{{ method.name|snake_case }}(iter(requests)) + {% else %} + response = client.{{ method.name|snake_case }}(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + {% if method.client_streaming %} + assert next(args[0]) == request + {% else %} + assert args[0] == request + {% endif %} + + # Establish that the response is the type that we expect. + {% if method.void -%} + assert response is None + {% elif method.lro -%} + assert isinstance(response, future.Future) + {% elif method.server_streaming -%} + for message in response: + assert isinstance(message, {{ method.output.ident }}) + {% else -%} + assert isinstance(response, {{ method.client_output.ident }}) + {% for field in method.output.fields.values() | rejectattr('message') -%} + {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else -%} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif -%} + {% endfor %} + {% endif %} + +{% if method.field_headers %} +def test_{{ method.name|snake_case }}_field_headers(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = {{ method.input.ident }}( + {%- for field_header in method.field_headers %} + {{ field_header }}='{{ field_header }}/value', + {%- endfor %} + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + call.return_value = {{ method.output.ident }}() + client.{{ method.name|snake_case }}(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + '{% for field_header in method.field_headers -%} + {{ field_header }}={{ field_header }}/value + {%- if not loop.last %}&{% endif -%} + {%- endfor %}', + ) in kw['metadata'] +{% endif %} + +{% if method.ident.package != method.input.ident.package %} +def test_{{ method.name|snake_case }}_from_dict(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif %} + response = client.{{ method.name|snake_case }}(request={ + {%- for field in method.input.fields.values() %} + '{{ field.name }}': {{ field.mock_value }}, + {%- endfor %} + } + ) + call.assert_called() + +{% endif %} + +{% if method.flattened_fields %} +def test_{{ method.name|snake_case }}_flattened(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif %} + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = client.{{ method.name|snake_case }}( + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + {% for key, field in method.flattened_fields.items() -%} + assert args[0].{{ key }} == {{ field.mock_value }} + {% endfor %} + + +def test_{{ method.name|snake_case }}_flattened_error(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method.name|snake_case }}( + {{ method.input.ident }}(), + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) +{% endif %} + + +{% if method.paged_result_field %} +def test_{{ method.name|snake_case }}_pager(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + ), + RuntimeError, + ) + results = [i for i in client.{{ method.name|snake_case }}( + request={}, + )] + assert len(results) == 6 + assert all(isinstance(i, {{ method.paged_result_field.message.ident }}) + for i in results) + +def test_{{ method.name|snake_case }}_pages(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + ), + RuntimeError, + ) + pages = list(client.{{ method.name|snake_case }}(request={}).pages) + for page, token in zip(pages, ['abc','def','ghi', '']): + assert page.raw_page.next_page_token == token +{% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} +def test_{{ method.name|snake_case }}_raw_page_lro(): + response = {{ method.lro.response_type.ident }}() + assert response.raw_page is response +{% endif %} {#- method.paged_result_field #} + +{% endfor -%} {#- method in methods #} + +{% if opts.lazy_import -%} {# lazy import #} +def test_module_level_imports(): + # Use the other transport import path so that code gets tested. + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.name }}GrpcTransport + transport = {{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.client_name }} + client = {{ service.client_name }}(transport=transport) + assert client._transport is transport + + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.name|snake_case }}_grpc_transport + transport2 = {{ service.name|snake_case }}_grpc_transport.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + + client2 = {{ service.client_name }}(transport=transport2) + assert client2._transport is transport2 + + {% with type_name = cycler(*service.methods.values()).next().input.name -%} + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ type_name }} + type_ = {{ type_name }}() + + try: + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ type_name|lower }}_squidification + except (AttributeError, ImportError) as e: + pass + else: + assert False + {% endwith -%} + + import {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} as mod + all_names = dir(mod) + expected_names = sorted([ + 'types', + {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} + '{{ service.client_name }}', + '{{ service.transport_name }}', + '{{ service.grpc_transport_name }}', + {%- endfor %} + {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}', + {%- endfor %} + {%- for enum in proto.enums.values() %} + '{{ enum.name }}' + {% endfor %}{%- endfor %} + ]) + assert all_names == expected_names + + {% with type_name = cycler(*service.methods.values()).next().input.name -%} + from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.types import {{ type_name }} + type_ = {{ type_name }}() + {% endwith -%} + + import {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.types as types + all_types = dir(types) + expected_types = sorted([ + {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} + '{{ message.name }}', + {%- endfor %} + {%- for enum in proto.enums.values() %} + '{{ enum.name }}', + {% endfor %}{%- endfor %} + ]) + assert all_types == expected_types + +{% endif -%} {# lazy import #} + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = {{ service.client_name }}(transport=transport) + assert client._transport is transport + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + assert isinstance( + client._transport, + transports.{{ service.name }}GrpcTransport, + ) + + +def test_{{ service.name|snake_case }}_base_transport(): + # Instantiate the base transport. + transport = transports.{{ service.name }}Transport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + {% for method in service.methods.values() -%} + '{{ method.name|snake_case }}', + {% endfor -%} + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + {% if service.has_lro -%} + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + {% endif %} + + +def test_{{ service.name|snake_case }}_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, 'default') as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + {{ service.client_name }}() + adc.assert_called_once_with(scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + )) + + +def test_{{ service.name|snake_case }}_host_no_port(): + {% with host = (service.host|default('localhost', true)).split(':')[0] -%} + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), + transport='grpc', + ) + assert client._transport._host == '{{ host }}:443' + {% endwith %} + + +def test_{{ service.name|snake_case }}_host_with_port(): + {% with host = (service.host|default('localhost', true)).split(':')[0] -%} + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), + transport='grpc', + ) + assert client._transport._host == '{{ host }}:8000' + {% endwith %} + + +def test_{{ service.name|snake_case }}_grpc_transport_channel(): + channel = grpc.insecure_channel('http://localhost/') + transport = transports.{{ service.name }}GrpcTransport( + channel=channel, + ) + assert transport.grpc_channel is channel + + +{% if service.has_lro -%} +def test_{{ service.name|snake_case }}_grpc_lro_client(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client._transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +{% endif -%} + +{% for message in service.resource_messages -%} +{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle") -%} +def test_{{ message.resource_type|snake_case }}_path(): + {% for arg in message.resource_path_args -%} + {{ arg }} = "{{ molluscs.next() }}" + {% endfor %} + expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) + assert expected == actual + +{% endwith -%} +{% endfor -%} + +{% endblock %} diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index 2b98b88a868b..ef7c9a93de71 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -92,11 +92,15 @@ def build(cls, opt_string: str) -> 'Options': # If templates are specified, one of the specified directories # may be our default; perform that replacement. - templates = opts.pop('templates', ['DEFAULT']) - while 'DEFAULT' in templates: - templates[templates.index('DEFAULT')] = os.path.realpath( - os.path.join(os.path.dirname(__file__), '..', 'templates'), - ) + default_token = 'DEFAULT' + templates = opts.pop('templates', [default_token]) + default_path = os.path.realpath( + os.path.join(os.path.dirname(__file__), '..', 'templates'), + ) + templates = [ + (default_path if path == default_token else path) + for path in templates + ] retry_cfg = None retry_paths = opts.pop('retry-config', None) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index d3d84243b916..cfee241ad299 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -15,9 +15,11 @@ from __future__ import absolute_import import os import tempfile - +import typing import nox # type: ignore +from os import path + showcase_version = '0.6.1' @@ -26,22 +28,33 @@ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov', 'pyfakefs') + session.install( + 'coverage', + 'pytest', + 'pytest-cov', + 'pytest-xdist', + 'pyfakefs', + ) session.install('-e', '.') session.run( 'py.test', '-vv', + '-n=auto', '--cov=gapic', '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - *(session.posargs or [os.path.join('tests', 'unit')]), + *(session.posargs or [path.join('tests', 'unit')]), ) @nox.session(python='3.8') -def showcase(session): +def showcase( + session, + templates='DEFAULT', + other_opts: typing.Iterable[str] = (), +): """Run the Showcase test suite.""" # Try to make it clear if Showcase is not running, so that @@ -64,35 +77,48 @@ def showcase(session): 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' f'download/v{showcase_version}/' f'gapic-showcase-{showcase_version}.desc', - '-L', '--output', os.path.join(tmp_dir, 'showcase.desc'), + '-L', '--output', path.join(tmp_dir, 'showcase.desc'), external=True, silent=True, ) # Write out a client library for Showcase. - session.run('protoc', - f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', - '--python_gapic_opt=lazy-import,', - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', - external=True, - ) + template_opt = f'python-gapic-templates={templates}' + opts = f'--python_gapic_opt={template_opt}' + opts += ','.join(other_opts + ('lazy-import',)) + session.run( + 'protoc', + f'--descriptor_set_in={tmp_dir}{path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + external=True, + ) # Install the library. session.install(tmp_dir) session.run( - 'py.test', '--quiet', *(session.posargs or [os.path.join('tests', 'system')]) + 'py.test', '--quiet', *(session.posargs or [path.join('tests', 'system')]) ) +@nox.session(python='3.8') +def showcase_alternative_templates(session): + templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') + showcase(session, templates=templates, other_opts=('old-naming',)) + + @nox.session(python=['3.6', '3.7', '3.8']) -def showcase_unit(session): +def showcase_unit( + session, + templates='DEFAULT', + other_opts: typing.Iterable[str] = (), +): """Run the generated unit tests against the Showcase library.""" # Install pytest and gapic-generator-python - session.install('coverage', 'pytest', 'pytest-cov') + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-xdist',) session.install('.') # Install a client library for Showcase. @@ -103,26 +129,30 @@ def showcase_unit(session): 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' f'download/v{showcase_version}/' f'gapic-showcase-{showcase_version}.desc', - '-L', '--output', os.path.join(tmp_dir, 'showcase.desc'), + '-L', '--output', path.join(tmp_dir, 'showcase.desc'), external=True, silent=True, ) # Write out a client library for Showcase. - args = [ + opts = [ + f'python-gapic-templates={templates}', + ] + opts.extend(other_opts) + if session.python == '3.8': + opts.append('lazy-import') + + opt_str = f'--python_gapic_opt={",".join(opts)},' + + session.run( 'protoc', - f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', + f'--descriptor_set_in={tmp_dir}{path.sep}showcase.desc', f'--python_gapic_out={tmp_dir}', + opt_str, 'google/showcase/v1beta1/echo.proto', 'google/showcase/v1beta1/identity.proto', 'google/showcase/v1beta1/messaging.proto', 'google/showcase/v1beta1/testing.proto', - ] - if session.python == '3.8': - args.append('--python_gapic_opt=lazy-import') - - session.run( - *args, external=True, ) @@ -133,15 +163,25 @@ def showcase_unit(session): # Run the tests. session.run( 'py.test', + '-n=auto', '--quiet', '--cov=google', '--cov-report=term', - *(session.posargs or [os.path.join('tests', 'unit')]), + *(session.posargs or [path.join('tests', 'unit')]), ) +@nox.session(python=['3.6', '3.7', '3.8']) +def showcase_unit_alternative_templates(session): + templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') + showcase_unit(session, templates=templates, other_opts=('old-naming',)) + + @nox.session(python='3.8') -def showcase_mypy(session): +def showcase_mypy( + session, templates='DEFAULT', + other_opts: typing.Iterable[str] = (), +): """Perform typecheck analysis on the generated Showcase library.""" # Install pytest and gapic-generator-python @@ -155,21 +195,25 @@ def showcase_mypy(session): 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' f'download/v{showcase_version}/' f'gapic-showcase-{showcase_version}.desc', - '-L', '--output', os.path.join(tmp_dir, 'showcase.desc'), + '-L', '--output', path.join(tmp_dir, 'showcase.desc'), external=True, silent=True, ) - # Write out a client library for Showcase. - session.run('protoc', - f'--descriptor_set_in={tmp_dir}{os.path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', - 'google/showcase/v1beta1/messaging.proto', - 'google/showcase/v1beta1/testing.proto', - external=True, - ) + template_opt = f'python-gapic-templates={templates}' + gapic_opts = f'--python_gapic_opt={template_opt},' + gapic_opts += ','.join(other_opts) + session.run( + 'protoc', + f'--descriptor_set_in={tmp_dir}{path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + gapic_opts, + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + 'google/showcase/v1beta1/messaging.proto', + 'google/showcase/v1beta1/testing.proto', + external=True, + ) # Install the library. session.chdir(tmp_dir) @@ -179,6 +223,12 @@ def showcase_mypy(session): session.run('mypy', 'google') +@nox.session(python='3.8') +def showcase_mypy_alternative_templates(session): + templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') + showcase_mypy(session, templates=templates, other_opts=('old-naming',)) + + @nox.session(python='3.6') def docs(session): """Build the docs.""" From b95dceffec1b9cdd35adf249b56091ad6f6d4299 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 3 Apr 2020 23:40:01 +0200 Subject: [PATCH 0240/1339] Update dependency grpcio to v1.28.1 (#358) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2419b0864fed..bd7aaf9a2fd2 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,7 +1,7 @@ click==7.1.1 google-api-core==1.16.0 googleapis-common-protos==1.51.0 -grpcio==1.27.2 +grpcio==1.28.1 jinja2==2.11.1 protobuf==3.11.3 pypandoc==1.4 From da0510a03387b3f63ada2191b1518838b1aaeb21 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 8 Apr 2020 14:54:23 -0700 Subject: [PATCH 0241/1339] Add mTLS support to generator (#359) Add preliminary support for mTLS to the generated surface. mTLS provides mutual authentication between a client and a service using certificates. Unless an alternative or custom endpoint is provided, the client surface assumes that the mTLS endpoint for a service is the same as the non-mtls variant with the 'mtls' moniker prepended to the domain. Client certificates can be passed explicitly or yielded via a callback. --- packages/gapic-generator/.circleci/config.yml | 14 +- .../%sub/services/%service/client.py.j2 | 75 ++++- .../services/%service/transports/grpc.py.j2 | 50 +++- .../gapic/templates/setup.py.j2 | 1 + .../%name_%version/%sub/test_%service.py.j2 | 267 ++++++++++++++---- packages/gapic-generator/noxfile.py | 4 + 6 files changed, 332 insertions(+), 79 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index b6891e367037..1bb51589691b 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -253,7 +253,7 @@ jobs: ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Run showcase tests. - command: nox -s showcase_alternative_templates + command: nox -s showcase_alternative_templates showcase-unit-3.6: docker: - image: python:3.6-slim @@ -263,7 +263,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip git - run: name: Install protoc 3.7.1. command: | @@ -287,7 +287,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip git - run: name: Install protoc 3.7.1. command: | @@ -311,7 +311,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip git - run: name: Install protoc 3.7.1. command: | @@ -335,7 +335,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip git - run: name: Install protoc 3.7.1. command: | @@ -359,7 +359,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip git - run: name: Install protoc 3.7.1. command: | @@ -383,7 +383,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip git - run: name: Install protoc 3.7.1. command: | diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 1f821b8af91d..f0b7d05381b0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -2,7 +2,8 @@ {% block content %} from collections import OrderedDict -from typing import Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +import re +from typing import Callable, Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -57,7 +58,40 @@ class {{ service.client_name }}Meta(type): class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """{{ service.meta.doc|rst(width=72, indent=4) }}""" - DEFAULT_OPTIONS = ClientOptions.ClientOptions({% if service.host %}api_endpoint='{{ service.host }}'{% endif %}) + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = {% if service.host %}'{{ service.host }}'{% else %}None{% endif %} + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + DEFAULT_OPTIONS = ClientOptions.ClientOptions(api_endpoint=DEFAULT_ENDPOINT) @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -106,23 +140,56 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport to use. If set to None, a transport is chosen automatically. client_options (ClientOptions): Custom options for the client. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. + (2) If ``transport`` argument is None, ``client_options`` can be + used to create a mutual TLS transport. If ``api_endpoint`` is + provided and different from the default endpoint, or the + ``client_cert_source`` property is provided, mutual TLS + transport will be created if client SSL credentials are found. + Client SSL credentials are obtained from ``client_cert_source`` + or application default SSL credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. """ if isinstance(client_options, dict): client_options = ClientOptions.from_dict(client_options) + # Set default api endpoint if not set. + if client_options.api_endpoint is None: + client_options.api_endpoint = self.DEFAULT_ENDPOINT + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, {{ service.name }}Transport): + # transport is a {{ service.name }}Transport instance. if credentials: raise ValueError('When providing a transport instance, ' 'provide its credentials directly.') self._transport = transport - else: + elif transport is not None or ( + client_options.api_endpoint == self.DEFAULT_ENDPOINT + and client_options.client_cert_source is None + ): + # Don't trigger mTLS. Transport = type(self).get_transport_class(transport) self._transport = Transport( + credentials=credentials, host=client_options.api_endpoint + ) + else: + # Trigger mTLS. If the user overrides endpoint, use it as the mTLS + # endpoint, otherwise use the default mTLS endpoint. + option_endpoint = client_options.api_endpoint + api_mtls_endpoint = self.DEFAULT_MTLS_ENDPOINT if option_endpoint == self.DEFAULT_ENDPOINT else option_endpoint + + self._transport = {{ service.name }}GrpcTransport( credentials=credentials, - host=client_options.api_endpoint{% if service.host %} or '{{ service.host }}'{% endif %}, + host=client_options.api_endpoint, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=client_options.client_cert_source, ) {% for method in service.methods.values() -%} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index bd3b074e7414..eb47dbdc52ed 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -1,13 +1,15 @@ {% extends '_base.py.j2' %} {% block content %} -from typing import Callable, Dict +from typing import Callable, Dict, Tuple from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + import grpc # type: ignore @@ -35,7 +37,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, - channel: grpc.Channel = None) -> None: + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None) -> None: """Instantiate the transport. Args: @@ -49,19 +53,51 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. """ - # Sanity check: Ensure that channel and credentials are not both - # provided. if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = grpc_helpers.create_channel( + host, + credentials=credentials, + ssl_credentials=ssl_credentials, + scopes=self.AUTH_SCOPES, + ) + # Run the base constructor. super().__init__(host=host, credentials=credentials) self._stubs = {} # type: Dict[str, Callable] - # If a channel was explicitly provided, set it. - if channel: - self._grpc_channel = channel @classmethod def create_channel(cls, diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 9d408a23dd2e..91bff5f82b5c 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -16,6 +16,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( + 'google-auth >= 1.13.1', 'google-api-core >= 1.8.0, < 2.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 3916e795d96e..e55cc999091e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -15,6 +15,7 @@ from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports from google.api_core import client_options +from google.api_core import grpc_helpers {% if service.has_lro -%} from google.api_core import future from google.api_core import operations_v1 @@ -30,6 +31,25 @@ from google.longrunning import operations_pb2 {% endfilter %} +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert {{ service.client_name }}._get_default_mtls_endpoint(None) == None + assert {{ service.client_name }}._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert {{ service.client_name }}._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert {{ service.client_name }}._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert {{ service.client_name }}._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + def test_{{ service.client_name|snake_case }}_from_service_account_file(): creds = credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: @@ -45,25 +65,65 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): def test_{{ service.client_name|snake_case }}_client_options(): # Check the default options have their expected values. - {% if service.host %}assert {{ service.client_name }}.DEFAULT_OPTIONS.api_endpoint == '{{ service.host }}'{% endif %} + assert {{ service.client_name }}.DEFAULT_OPTIONS.api_endpoint == {% if service.host %}'{{ service.host }}'{% else %}None{% endif %} + assert {{ service.client_name }}.DEFAULT_OPTIONS.api_endpoint == {{ service.client_name }}.DEFAULT_ENDPOINT - # Check that options can be customized. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + # Check that if channel is provided we won't create a new one. + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials() + ) + client = {{ service.client_name }}(transport=transport) + gtc.assert_not_called() + + # Check mTLS is not triggered with empty client options. + options = client_options.ClientOptions() with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: transport = gtc.return_value = mock.MagicMock() - client = {{ service.client_name }}( - client_options=options + client = {{ service.client_name }}(client_options=options) + transport.assert_called_once_with( + credentials=None, + host=client.DEFAULT_ENDPOINT, ) - transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + # Check mTLS is triggered with api endpoint override. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + credentials=None, + host="squid.clam.whelk", + ) + + # Check mTLS is triggered if client_cert_source is provided. + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + credentials=None, + host=client.DEFAULT_ENDPOINT, + ) def test_{{ service.client_name|snake_case }}_client_options_from_dict(): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: - transport = gtc.return_value = mock.MagicMock() + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None client = {{ service.client_name }}( client_options={'api_endpoint': 'squid.clam.whelk'} ) - transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + grpc_transport.assert_called_once_with( + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + credentials=None, + host="squid.clam.whelk", + ) {% for method in service.methods.values() -%} @@ -154,7 +214,7 @@ def test_{{ method.name|snake_case }}_field_headers(): '__call__') as call: call.return_value = {{ method.output.ident }}() client.{{ method.name|snake_case }}(request) - + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] @@ -200,57 +260,57 @@ def test_{{ method.name|snake_case }}_from_dict(): {% endif %} -{% if method.flattened_fields %} -def test_{{ method.name|snake_case }}_flattened(): - client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void -%} - call.return_value = None - {% elif method.lro -%} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} - call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} - call.return_value = {{ method.output.ident }}() - {% endif %} - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.{{ method.name|snake_case }}( - {%- for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {%- endfor %} - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] +{% if method.flattened_fields %} +def test_{{ method.name|snake_case }}_flattened(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif %} + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = client.{{ method.name|snake_case }}( + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] {% for key, field in method.flattened_fields.items() -%} - assert args[0].{{ key }} == {{ field.mock_value }} - {% endfor %} - - -def test_{{ method.name|snake_case }}_flattened_error(): - client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.{{ method.name|snake_case }}( - {{ method.input.ident }}(), - {%- for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {%- endfor %} - ) + assert args[0].{{ key }} == {{ field.mock_value }} + {% endfor %} + + +def test_{{ method.name|snake_case }}_flattened_error(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method.name|snake_case }}( + {{ method.input.ident }}(), + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) {% endif %} @@ -509,10 +569,95 @@ def test_{{ service.name|snake_case }}_host_with_port(): def test_{{ service.name|snake_case }}_grpc_transport_channel(): channel = grpc.insecure_channel('http://localhost/') + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() transport = transports.{{ service.name }}GrpcTransport( + host="squid.clam.whelk", channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.{{ service.name }}GrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, ) - assert transport.grpc_channel is channel + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + ssl_credentials=mock_ssl_cred, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.{{ service.name }}GrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + ssl_credentials=mock_ssl_cred, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ) + assert transport.grpc_channel == mock_grpc_channel {% if service.has_lro -%} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index cfee241ad299..691e88163d8c 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -118,6 +118,10 @@ def showcase_unit( """Run the generated unit tests against the Showcase library.""" # Install pytest and gapic-generator-python + session.install( + "-e", + "git+https://github.com/googleapis/python-api-core.git@ca6c41cf460e505e6b228263170927270626222a#egg=google-api-core", + ) session.install('coverage', 'pytest', 'pytest-cov', 'pytest-xdist',) session.install('.') From 6240d44bd0c9ff0d81e918272c493728e4b0c08c Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Fri, 10 Apr 2020 15:14:30 -0700 Subject: [PATCH 0242/1339] fix: fix mTLS logic (#374) Previous PR triggers mTLS if client_options.api_endpoint is different from the default one, in this PR, we change the logic, mTLS is triggered if client_options.api_endpoint is provided --- .../%sub/services/%service/client.py.j2 | 48 +++++++++++-------- .../%name_%version/%sub/test_%service.py.j2 | 26 +++++++--- 2 files changed, 46 insertions(+), 28 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index f0b7d05381b0..5c5fd5157bce 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -91,7 +91,6 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) - DEFAULT_OPTIONS = ClientOptions.ClientOptions(api_endpoint=DEFAULT_ENDPOINT) @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -126,7 +125,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def __init__(self, *, credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = None, - client_options: ClientOptions = DEFAULT_OPTIONS, + client_options: ClientOptions = None, ) -> None: """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -143,12 +142,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. (2) If ``transport`` argument is None, ``client_options`` can be - used to create a mutual TLS transport. If ``api_endpoint`` is - provided and different from the default endpoint, or the - ``client_cert_source`` property is provided, mutual TLS - transport will be created if client SSL credentials are found. - Client SSL credentials are obtained from ``client_cert_source`` - or application default SSL credentials. + used to create a mutual TLS transport. If ``client_cert_source`` + is provided, mutual TLS transport will be created with the given + ``api_endpoint`` or the default mTLS endpoint, and the client + SSL credentials obtained from ``client_cert_source``. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -157,10 +154,6 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if isinstance(client_options, dict): client_options = ClientOptions.from_dict(client_options) - # Set default api endpoint if not set. - if client_options.api_endpoint is None: - client_options.api_endpoint = self.DEFAULT_ENDPOINT - # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. @@ -170,24 +163,37 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise ValueError('When providing a transport instance, ' 'provide its credentials directly.') self._transport = transport - elif transport is not None or ( - client_options.api_endpoint == self.DEFAULT_ENDPOINT + elif client_options is None or ( + client_options.api_endpoint == None and client_options.client_cert_source is None ): - # Don't trigger mTLS. + # Don't trigger mTLS if we get an empty ClientOptions. Transport = type(self).get_transport_class(transport) self._transport = Transport( - credentials=credentials, host=client_options.api_endpoint + credentials=credentials, host=self.DEFAULT_ENDPOINT ) else: - # Trigger mTLS. If the user overrides endpoint, use it as the mTLS - # endpoint, otherwise use the default mTLS endpoint. - option_endpoint = client_options.api_endpoint - api_mtls_endpoint = self.DEFAULT_MTLS_ENDPOINT if option_endpoint == self.DEFAULT_ENDPOINT else option_endpoint + # We have a non-empty ClientOptions. If client_cert_source is + # provided, trigger mTLS with user provided endpoint or the default + # mTLS endpoint. + if client_options.client_cert_source: + api_mtls_endpoint = ( + client_options.api_endpoint + if client_options.api_endpoint + else self.DEFAULT_MTLS_ENDPOINT + ) + else: + api_mtls_endpoint = None + + api_endpoint = ( + client_options.api_endpoint + if client_options.api_endpoint + else self.DEFAULT_ENDPOINT + ) self._transport = {{ service.name }}GrpcTransport( credentials=credentials, - host=client_options.api_endpoint, + host=api_endpoint, api_mtls_endpoint=api_mtls_endpoint, client_cert_source=client_options.client_cert_source, ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index e55cc999091e..4e3e89a32966 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -64,10 +64,6 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): def test_{{ service.client_name|snake_case }}_client_options(): - # Check the default options have their expected values. - assert {{ service.client_name }}.DEFAULT_OPTIONS.api_endpoint == {% if service.host %}'{{ service.host }}'{% else %}None{% endif %} - assert {{ service.client_name }}.DEFAULT_OPTIONS.api_endpoint == {{ service.client_name }}.DEFAULT_ENDPOINT - # Check that if channel is provided we won't create a new one. with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: transport = transports.{{ service.name }}GrpcTransport( @@ -86,13 +82,14 @@ def test_{{ service.client_name|snake_case }}_client_options(): host=client.DEFAULT_ENDPOINT, ) - # Check mTLS is triggered with api endpoint override. + # Check mTLS is not triggered if api_endpoint is provided but + # client_cert_source is None. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}(client_options=options) grpc_transport.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", + api_mtls_endpoint=None, client_cert_source=None, credentials=None, host="squid.clam.whelk", @@ -112,6 +109,21 @@ def test_{{ service.client_name|snake_case }}_client_options(): host=client.DEFAULT_ENDPOINT, ) + # Check mTLS is triggered if api_endpoint and client_cert_source are provided. + options = client_options.ClientOptions( + api_endpoint="squid.clam.whelk", + client_cert_source=client_cert_source_callback + ) + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=client_cert_source_callback, + credentials=None, + host="squid.clam.whelk", + ) + def test_{{ service.client_name|snake_case }}_client_options_from_dict(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -119,7 +131,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_options={'api_endpoint': 'squid.clam.whelk'} ) grpc_transport.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", + api_mtls_endpoint=None, client_cert_source=None, credentials=None, host="squid.clam.whelk", From 1ae3223a50e56f3f9d02567dafda7b5503a1f877 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Apr 2020 23:24:36 +0200 Subject: [PATCH 0243/1339] chore(deps): update dependency jinja2 to v2.11.2 (#387) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index bd7aaf9a2fd2..694147c01f19 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -2,7 +2,7 @@ click==7.1.1 google-api-core==1.16.0 googleapis-common-protos==1.51.0 grpcio==1.28.1 -jinja2==2.11.1 +jinja2==2.11.2 protobuf==3.11.3 pypandoc==1.4 PyYAML==5.3.1 From 3695be4355d74ead9f9cd598011fd632bc140b9f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Apr 2020 23:45:09 +0200 Subject: [PATCH 0244/1339] chore(deps): update dependency pypandoc to v1.5 (#386) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 694147c01f19..e2211633a1c7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,5 +4,5 @@ googleapis-common-protos==1.51.0 grpcio==1.28.1 jinja2==2.11.2 protobuf==3.11.3 -pypandoc==1.4 +pypandoc==1.5 PyYAML==5.3.1 From aa26989aa0ee5e8fb37e55f7a8579ef07ec3e947 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Mon, 13 Apr 2020 15:02:41 -0700 Subject: [PATCH 0245/1339] feat: add mTLS to ads template (#384) Co-authored-by: Dov Shlachter --- .../%sub/services/%service/client.py.j2 | 85 +++++- .../services/%service/transports/grpc.py.j2 | 52 +++- .../%name_%version/%sub/test_%service.py.j2 | 282 ++++++++++++++---- 3 files changed, 343 insertions(+), 76 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 1f821b8af91d..7f5cf6b73f15 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -2,7 +2,8 @@ {% block content %} from collections import OrderedDict -from typing import Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +import re +from typing import Callable, Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -57,7 +58,39 @@ class {{ service.client_name }}Meta(type): class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """{{ service.meta.doc|rst(width=72, indent=4) }}""" - DEFAULT_OPTIONS = ClientOptions.ClientOptions({% if service.host %}api_endpoint='{{ service.host }}'{% endif %}) + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = {% if service.host %}'{{ service.host }}'{% else %}None{% endif %} + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -92,7 +125,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def __init__(self, *, credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = None, - client_options: ClientOptions = DEFAULT_OPTIONS, + client_options: ClientOptions = None, ) -> None: """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -106,6 +139,17 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport to use. If set to None, a transport is chosen automatically. client_options (ClientOptions): Custom options for the client. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. + (2) If ``transport`` argument is None, ``client_options`` can be + used to create a mutual TLS transport. If ``client_cert_source`` + is provided, mutual TLS transport will be created with the given + ``api_endpoint`` or the default mTLS endpoint, and the client + SSL credentials obtained from ``client_cert_source``. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. """ if isinstance(client_options, dict): client_options = ClientOptions.from_dict(client_options) @@ -114,16 +158,45 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, {{ service.name }}Transport): + # transport is a {{ service.name }}Transport instance. if credentials: raise ValueError('When providing a transport instance, ' 'provide its credentials directly.') self._transport = transport - else: + elif client_options is None or ( + client_options.api_endpoint == None + and client_options.client_cert_source is None + ): + # Don't trigger mTLS if we get an empty ClientOptions. Transport = type(self).get_transport_class(transport) self._transport = Transport( - credentials=credentials, - host=client_options.api_endpoint{% if service.host %} or '{{ service.host }}'{% endif %}, + credentials=credentials, host=self.DEFAULT_ENDPOINT ) + else: + # We have a non-empty ClientOptions. If client_cert_source is + # provided, trigger mTLS with user provided endpoint or the default + # mTLS endpoint. + if client_options.client_cert_source: + api_mtls_endpoint = ( + client_options.api_endpoint + if client_options.api_endpoint + else self.DEFAULT_MTLS_ENDPOINT + ) + else: + api_mtls_endpoint = None + + api_endpoint = ( + client_options.api_endpoint + if client_options.api_endpoint + else self.DEFAULT_ENDPOINT + ) + + self._transport = {{ service.name }}GrpcTransport( + credentials=credentials, + host=api_endpoint, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=client_options.client_cert_source, + ) {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index bd3b074e7414..c42770f9a0f1 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -1,13 +1,15 @@ {% extends '_base.py.j2' %} {% block content %} -from typing import Callable, Dict +from typing import Callable, Dict, Tuple from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + import grpc # type: ignore @@ -35,7 +37,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, - channel: grpc.Channel = None) -> None: + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None) -> None: """Instantiate the transport. Args: @@ -49,19 +53,51 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. """ - # Sanity check: Ensure that channel and credentials are not both - # provided. if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = grpc_helpers.create_channel( + host, + credentials=credentials, + ssl_credentials=ssl_credentials, + scopes=self.AUTH_SCOPES, + ) + # Run the base constructor. super().__init__(host=host, credentials=credentials) - self._stubs = {} # type: Dict[str, Callable] + self._stubs = {} # type: Dict[str, Callable] - # If a channel was explicitly provided, set it. - if channel: - self._grpc_channel = channel @classmethod def create_channel(cls, diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 3916e795d96e..32e66ba8ad1c 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -15,6 +15,7 @@ from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports from google.api_core import client_options +from google.api_core import grpc_helpers {% if service.has_lro -%} from google.api_core import future from google.api_core import operations_v1 @@ -30,6 +31,25 @@ from google.longrunning import operations_pb2 {% endfilter %} +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert {{ service.client_name }}._get_default_mtls_endpoint(None) == None + assert {{ service.client_name }}._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert {{ service.client_name }}._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert {{ service.client_name }}._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert {{ service.client_name }}._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + def test_{{ service.client_name|snake_case }}_from_service_account_file(): creds = credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: @@ -44,26 +64,79 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): def test_{{ service.client_name|snake_case }}_client_options(): - # Check the default options have their expected values. - {% if service.host %}assert {{ service.client_name }}.DEFAULT_OPTIONS.api_endpoint == '{{ service.host }}'{% endif %} + # Check that if channel is provided we won't create a new one. + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials() + ) + client = {{ service.client_name }}(transport=transport) + gtc.assert_not_called() - # Check that options can be customized. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + # Check mTLS is not triggered with empty client options. + options = client_options.ClientOptions() with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: transport = gtc.return_value = mock.MagicMock() - client = {{ service.client_name }}( - client_options=options + client = {{ service.client_name }}(client_options=options) + transport.assert_called_once_with( + credentials=None, + host=client.DEFAULT_ENDPOINT, + ) + + # Check mTLS is not triggered if api_endpoint is provided but + # client_cert_source is None. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint=None, + client_cert_source=None, + credentials=None, + host="squid.clam.whelk", + ) + + # Check mTLS is triggered if client_cert_source is provided. + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + credentials=None, + host=client.DEFAULT_ENDPOINT, + ) + + # Check mTLS is triggered if api_endpoint and client_cert_source are provided. + options = client_options.ClientOptions( + api_endpoint="squid.clam.whelk", + client_cert_source=client_cert_source_callback + ) + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}(client_options=options) + grpc_transport.assert_called_once_with( + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=client_cert_source_callback, + credentials=None, + host="squid.clam.whelk", ) - transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") def test_{{ service.client_name|snake_case }}_client_options_from_dict(): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: - transport = gtc.return_value = mock.MagicMock() + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None client = {{ service.client_name }}( client_options={'api_endpoint': 'squid.clam.whelk'} ) - transport.assert_called_once_with(credentials=None, host="squid.clam.whelk") + grpc_transport.assert_called_once_with( + api_mtls_endpoint=None, + client_cert_source=None, + credentials=None, + host="squid.clam.whelk", + ) {% for method in service.methods.values() -%} @@ -154,7 +227,7 @@ def test_{{ method.name|snake_case }}_field_headers(): '__call__') as call: call.return_value = {{ method.output.ident }}() client.{{ method.name|snake_case }}(request) - + # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] @@ -200,57 +273,57 @@ def test_{{ method.name|snake_case }}_from_dict(): {% endif %} -{% if method.flattened_fields %} -def test_{{ method.name|snake_case }}_flattened(): - client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void -%} - call.return_value = None - {% elif method.lro -%} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} - call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} - call.return_value = {{ method.output.ident }}() - {% endif %} - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = client.{{ method.name|snake_case }}( - {%- for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {%- endfor %} - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] +{% if method.flattened_fields %} +def test_{{ method.name|snake_case }}_flattened(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif %} + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = client.{{ method.name|snake_case }}( + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] {% for key, field in method.flattened_fields.items() -%} - assert args[0].{{ key }} == {{ field.mock_value }} - {% endfor %} - - -def test_{{ method.name|snake_case }}_flattened_error(): - client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.{{ method.name|snake_case }}( - {{ method.input.ident }}(), - {%- for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {%- endfor %} - ) + assert args[0].{{ key }} == {{ field.mock_value }} + {% endfor %} + + +def test_{{ method.name|snake_case }}_flattened_error(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method.name|snake_case }}( + {{ method.input.ident }}(), + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) {% endif %} @@ -509,10 +582,95 @@ def test_{{ service.name|snake_case }}_host_with_port(): def test_{{ service.name|snake_case }}_grpc_transport_channel(): channel = grpc.insecure_channel('http://localhost/') + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() transport = transports.{{ service.name }}GrpcTransport( + host="squid.clam.whelk", channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.{{ service.name }}GrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" ) - assert transport.grpc_channel is channel + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + ssl_credentials=mock_ssl_cred, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.{{ service.name }}GrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + ssl_credentials=mock_ssl_cred, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ) + assert transport.grpc_channel == mock_grpc_channel {% if service.has_lro -%} From c2eb14cd0cbbb1d50e9924855c945b10625a33b4 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 13 Apr 2020 15:46:12 -0700 Subject: [PATCH 0246/1339] Test and impl for stripping whitespace off option tokens (#389) Shell scripts that invoke the generator may construct the gapic generator options via formatted string concatenation and may contain escaped newlines. Proper parsing includes stripping whitespace after tokenizing from a comma-delimited string. --- packages/gapic-generator/gapic/generator/options.py | 1 + .../tests/unit/generator/test_options.py | 12 ++++++++++++ 2 files changed, 13 insertions(+) diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index ef7c9a93de71..0c823fa84df0 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -70,6 +70,7 @@ def build(cls, opt_string: str) -> 'Options': # Parse out every option beginning with `python-gapic` opts: DefaultDict[str, List[str]] = defaultdict(list) for opt in opt_string.split(','): + opt = opt.strip() # Parse out the key and value. value = 'true' if '=' in opt: diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 0d8aff6c9a70..78af30dfeea8 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -52,6 +52,18 @@ def test_options_unrecognized_likely_typo(): assert len(warn.mock_calls) == 0 +def test_options_trim_whitespace(): + # When writing shell scripts, users may construct options strings with + # whitespace that needs to be trimmed after tokenizing. + opts = options.Options.build( + ''' + python-gapic-templates=/squid/clam/whelk , + python-gapic-name=mollusca , + ''') + assert opts.templates[0] == '/squid/clam/whelk' + assert opts.name == 'mollusca' + + def test_options_no_valid_sample_config(fs): fs.create_file("sampledir/not_a_config.yaml") with pytest.raises(types.InvalidConfig): From b66aaf466b9e026199e2e9482efd2ed8cd2a52cc Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 15 Apr 2020 11:47:16 -0700 Subject: [PATCH 0247/1339] fix: update dependencies (#393) --- packages/gapic-generator/.circleci/config.yml | 12 ++++++------ .../gapic-generator/gapic/ads-templates/setup.py.j2 | 2 +- packages/gapic-generator/gapic/templates/setup.py.j2 | 4 ++-- packages/gapic-generator/noxfile.py | 4 ---- packages/gapic-generator/requirements.txt | 2 +- packages/gapic-generator/setup.py | 2 +- 6 files changed, 11 insertions(+), 15 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 1bb51589691b..adb3b67267a1 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -263,7 +263,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip git + apt-get install -y curl pandoc unzip - run: name: Install protoc 3.7.1. command: | @@ -287,7 +287,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip git + apt-get install -y curl pandoc unzip - run: name: Install protoc 3.7.1. command: | @@ -311,7 +311,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip git + apt-get install -y curl pandoc unzip - run: name: Install protoc 3.7.1. command: | @@ -335,7 +335,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip git + apt-get install -y curl pandoc unzip - run: name: Install protoc 3.7.1. command: | @@ -359,7 +359,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip git + apt-get install -y curl pandoc unzip - run: name: Install protoc 3.7.1. command: | @@ -383,7 +383,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip git + apt-get install -y curl pandoc unzip - run: name: Install protoc 3.7.1. command: | diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 9d408a23dd2e..78cc078c17c4 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -16,7 +16,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core >= 1.8.0, < 2.0.0dev', + 'google-api-core >= 1.17.0, < 2.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', 'proto-plus >= 0.4.0', diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 91bff5f82b5c..19251b6f6dcf 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -16,8 +16,8 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-auth >= 1.13.1', - 'google-api-core >= 1.8.0, < 2.0.0dev', + 'google-auth >= 1.14.0', + 'google-api-core >= 1.17.0, < 2.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', 'proto-plus >= 0.4.0', diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 691e88163d8c..cfee241ad299 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -118,10 +118,6 @@ def showcase_unit( """Run the generated unit tests against the Showcase library.""" # Install pytest and gapic-generator-python - session.install( - "-e", - "git+https://github.com/googleapis/python-api-core.git@ca6c41cf460e505e6b228263170927270626222a#egg=google-api-core", - ) session.install('coverage', 'pytest', 'pytest-cov', 'pytest-xdist',) session.install('.') diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e2211633a1c7..baaa5dd77375 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.1 -google-api-core==1.16.0 +google-api-core==1.17.0 googleapis-common-protos==1.51.0 grpcio==1.28.1 jinja2==2.11.2 diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 63ad642577a3..11864f1ca944 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -42,7 +42,7 @@ include_package_data=True, install_requires=( 'click >= 6.7', - 'google-api-core >= 1.14.3', + 'google-api-core >= 1.17.0', 'googleapis-common-protos >= 1.6.0', 'grpcio >= 1.24.3', 'jinja2 >= 2.10', From 9d16ecf08fe70bdf1aa10326647ce828b8b62a76 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 15 Apr 2020 11:59:48 -0700 Subject: [PATCH 0248/1339] Test and impl for resource path parsing methods in generated clients (#391) Given a fully qualified resource path, it is sometimes desirable to parse out the component segments. This change adds generated methods to do this parsing to gapic client classes, accompanying generated unit tests, logic in the wrapper schema to support this feature, and generator unit tests for the schema logic. --- .../%sub/services/%service/client.py.j2 | 6 +++ .../%name_%version/%sub/test_%service.py.j2 | 15 ++++++- .../gapic-generator/gapic/schema/wrappers.py | 32 ++++++++++++++- .../%sub/services/%service/client.py.j2 | 6 +++ .../%name_%version/%sub/test_%service.py.j2 | 27 ++++++++---- .../unit/schema/wrappers/test_message.py | 41 +++++++++++++++++++ 6 files changed, 117 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 7f5cf6b73f15..ff7e2c4d484e 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -120,6 +120,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + + @staticmethod + def parse_{{ message.resource_type|snake_case }}_path(path: str) -> Dict[str,str]: + """Parse a {{ message.resource_type|snake_case }} path into its component segments.""" + m = re.match(r"{{ message.path_regex_str }}", path) + return m.groupdict() if m else {} {% endfor %} def __init__(self, *, diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 32e66ba8ad1c..5cd0019b5925 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -693,7 +693,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): {% endif -%} {% for message in service.resource_messages -%} -{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle") -%} +{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} def test_{{ message.resource_type|snake_case }}_path(): {% for arg in message.resource_path_args -%} {{ arg }} = "{{ molluscs.next() }}" @@ -702,6 +702,19 @@ def test_{{ message.resource_type|snake_case }}_path(): actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) assert expected == actual + +def test_parse_{{ message.resource_type|snake_case }}_path(): + expected = { + {% for arg in message.resource_path_args -%} + "{{ arg }}": "{{ molluscs.next() }}", + {% endfor %} + } + path = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path(**expected) + + # Check that the path construction is reversible. + actual = {{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path(path) + assert expected == actual + {% endwith -%} {% endfor -%} diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 09744ac3d9cd..1c0ae4d85a7a 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -209,6 +209,10 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Field': @dataclasses.dataclass(frozen=True) class MessageType: """Description of a message (defined with the ``message`` keyword).""" + # Class attributes + PATH_ARG_RE = re.compile(r'\{([a-zA-Z0-9_-]+)\}') + + # Instance attributes message_pb: descriptor_pb2.DescriptorProto fields: Mapping[str, Field] nested_enums: Mapping[str, 'EnumType'] @@ -278,8 +282,32 @@ def resource_type(self) -> Optional[str]: @property def resource_path_args(self) -> Sequence[str]: - path_arg_re = re.compile(r'\{([a-zA-Z0-9_-]+)\}') - return path_arg_re.findall(self.resource_path or '') + return self.PATH_ARG_RE.findall(self.resource_path or '') + + @utils.cached_property + def path_regex_str(self) -> str: + # The indirection here is a little confusing: + # we're using the resource path template as the base of a regex, + # with each resource ID segment being captured by a regex. + # E.g., the path schema + # kingdoms/{kingdom}/phyla/{phylum} + # becomes the regex + # ^kingdoms/(?P.+?)/phyla/(?P.+?)$ + parsing_regex_str = ( + "^" + + self.PATH_ARG_RE.sub( + # We can't just use (?P[^/]+) because segments may be + # separated by delimiters other than '/'. + # Multiple delimiter characters within one schema are allowed, + # e.g. + # as/{a}-{b}/cs/{c}%{d}_{e} + # This is discouraged but permitted by AIP4231 + lambda m: "(?P<{name}>.+?)".format(name=m.groups()[0]), + self.resource_path or '' + ) + + "$" + ) + return parsing_regex_str def get_field(self, *field_path: str, collisions: FrozenSet[str] = frozenset()) -> Field: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 5c5fd5157bce..4099fe5b10de 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -120,6 +120,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + + @staticmethod + def parse_{{ message.resource_type|snake_case }}_path(path: str) -> Dict[str,str]: + """Parse a {{ message.resource_type|snake_case }} path into its component segments.""" + m = re.match(r"{{ message.path_regex_str }}", path) + return m.groupdict() if m else {} {% endfor %} def __init__(self, *, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 4e3e89a32966..aac51fe7910d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -692,14 +692,27 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): {% endif -%} {% for message in service.resource_messages -%} -{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle") -%} +{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} def test_{{ message.resource_type|snake_case }}_path(): - {% for arg in message.resource_path_args -%} - {{ arg }} = "{{ molluscs.next() }}" - {% endfor %} - expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) - actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) - assert expected == actual + {% for arg in message.resource_path_args -%} + {{ arg }} = "{{ molluscs.next() }}" + {% endfor %} + expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) + assert expected == actual + + +def test_parse_{{ message.resource_type|snake_case }}_path(): + expected = { + {% for arg in message.resource_path_args -%} + "{{ arg }}": "{{ molluscs.next() }}", + {% endfor %} + } + path = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path(**expected) + + # Check that the path construction is reversible. + actual = {{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path(path) + assert expected == actual {% endwith -%} {% endfor -%} diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 99b0751e0530..7ae95d02990f 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -13,6 +13,7 @@ # limitations under the License. import collections +import re from typing import Sequence, Tuple import pytest @@ -181,6 +182,46 @@ def test_resource_path(): assert message.resource_type == "Class" +def test_parse_resource_path(): + options = descriptor_pb2.MessageOptions() + resource = options.Extensions[resource_pb2.resource] + resource.pattern.append( + "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}" + ) + resource.type = "taxonomy.biology.com/Klass" + message = make_message('Klass', options=options) + + # Plausible resource ID path + path = "kingdoms/animalia/phyla/mollusca/classes/cephalopoda" + expected = { + 'kingdom': 'animalia', + 'phylum': 'mollusca', + 'klass': 'cephalopoda', + } + actual = re.match(message.path_regex_str, path).groupdict() + + assert expected == actual + + options2 = descriptor_pb2.MessageOptions() + resource2 = options2.Extensions[resource_pb2.resource] + resource2.pattern.append( + "kingdoms-{kingdom}_{phylum}#classes%{klass}" + ) + resource2.type = "taxonomy.biology.com/Klass" + message2 = make_message('Klass', options=options2) + + # Plausible resource ID path from a non-standard schema + path2 = "kingdoms-Animalia/_Mollusca~#classes%Cephalopoda" + expected2 = { + 'kingdom': 'Animalia/', + 'phylum': 'Mollusca~', + 'klass': 'Cephalopoda', + } + actual2 = re.match(message2.path_regex_str, path2).groupdict() + + assert expected2 == actual2 + + def test_field_map(): # Create an Entry message. entry_msg = make_message( From f602735a7cfc0eb58fe4ae29d5a8a0ac4c4ba053 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 16 Apr 2020 16:12:14 -0700 Subject: [PATCH 0249/1339] fix: add system tests and cert/key (#394) * fix: add mtls system tests and testing cert/key * update --- packages/gapic-generator/.circleci/config.yml | 108 +++++++++++++++- packages/gapic-generator/noxfile.py | 62 +++++++++ packages/gapic-generator/tests/cert/mtls.crt | 21 ++++ packages/gapic-generator/tests/cert/mtls.key | 28 +++++ .../gapic-generator/tests/system/conftest.py | 118 ++++++++++++++---- 5 files changed, 309 insertions(+), 28 deletions(-) create mode 100644 packages/gapic-generator/tests/cert/mtls.crt create mode 100644 packages/gapic-generator/tests/cert/mtls.key diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index adb3b67267a1..debcecea558b 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -91,6 +91,17 @@ workflows: filters: tags: only: /^\d+\.\d+\.\d+$/ + - showcase-mtls: + requires: + - docs + - mypy + - showcase-unit-3.6 + - showcase-unit-3.7 + - showcase-unit-3.8 + - showcase-mypy + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - showcase-alternative-templates: requires: - docs @@ -102,6 +113,17 @@ workflows: filters: tags: only: /^\d+\.\d+\.\d+$/ + - showcase-mtls-alternative-templates: + requires: + - docs + - mypy + - showcase-unit-alternative-templates-3.6 + - showcase-unit-alternative-templates-3.7 + - showcase-unit-alternative-templates-3.8 + - showcase-mypy-alternative-templates + filters: + tags: + only: /^\d+\.\d+\.\d+$/ - docs: filters: tags: @@ -207,7 +229,7 @@ jobs: showcase: docker: - image: python:3.8-slim - - image: gcr.io/gapic-images/gapic-showcase:0.6.1 + - image: gcr.io/gapic-images/gapic-showcase:0.8.1 steps: - checkout - run: @@ -229,10 +251,51 @@ jobs: - run: name: Run showcase tests. command: nox -s showcase + showcase-mtls: + working_directory: /tmp/workspace + docker: + - image: python:3.8-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install nox. + command: pip install nox + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Run showcase tests. + command: | + mkdir gapic_showcase + cd gapic_showcase + curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.8.1/gapic-showcase-0.8.1-linux-amd64.tar.gz | tar xz + ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & + showcase_pid=$! + + cleanup() { + echo "kill showcase server" + kill $showcase_pid + # Wait for the process to die, but don't report error from the kill. + wait $showcase_pid || exit $exit_code + } + trap cleanup EXIT + + cd .. + nox -s showcase_mtls showcase-alternative-templates: docker: - image: python:3.8-slim - - image: gcr.io/gapic-images/gapic-showcase:0.6.1 + - image: gcr.io/gapic-images/gapic-showcase:0.8.1 steps: - checkout - run: @@ -254,6 +317,47 @@ jobs: - run: name: Run showcase tests. command: nox -s showcase_alternative_templates + showcase-mtls-alternative-templates: + working_directory: /tmp/workspace + docker: + - image: python:3.8-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install nox. + command: pip install nox + - run: + name: Install protoc 3.7.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + cd /usr/src/protoc/ + unzip protoc-3.7.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Run showcase tests. + command: | + mkdir gapic_showcase + cd gapic_showcase + curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.8.1/gapic-showcase-0.8.1-linux-amd64.tar.gz | tar xz + ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & + showcase_pid=$! + + cleanup() { + echo "kill showcase server" + kill $showcase_pid + # Wait for the process to die, but don't report error from the kill. + wait $showcase_pid || exit $exit_code + } + trap cleanup EXIT + + cd .. + nox -s showcase_mtls_alternative_templates showcase-unit-3.6: docker: - image: python:3.6-slim diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index cfee241ad299..9a6ed566feda 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -67,6 +67,7 @@ def showcase( session.log('-' * 70) # Install pytest and gapic-generator-python + session.install('mock') session.install('pytest') session.install('-e', '.') @@ -103,12 +104,73 @@ def showcase( ) +@nox.session(python='3.8') +def showcase_mtls( + session, + templates='DEFAULT', + other_opts: typing.Iterable[str] = (), +): + """Run the Showcase mtls test suite.""" + + # Try to make it clear if Showcase is not running, so that + # people do not end up with tons of difficult-to-debug failures over + # an obvious problem. + if not os.environ.get('CIRCLECI'): + session.log('-' * 70) + session.log('Note: Showcase must be running for these tests to work.') + session.log('See https://github.com/googleapis/gapic-showcase') + session.log('-' * 70) + + # Install pytest and gapic-generator-python + session.install('mock') + session.install('pytest') + session.install('-e', '.') + + # Install a client library for Showcase. + with tempfile.TemporaryDirectory() as tmp_dir: + # Download the Showcase descriptor. + session.run( + 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' + f'download/v{showcase_version}/' + f'gapic-showcase-{showcase_version}.desc', + '-L', '--output', path.join(tmp_dir, 'showcase.desc'), + external=True, + silent=True, + ) + + # Write out a client library for Showcase. + template_opt = f'python-gapic-templates={templates}' + opts = f'--python_gapic_opt={template_opt}' + opts += ','.join(other_opts + ('lazy-import',)) + session.run( + 'protoc', + f'--descriptor_set_in={tmp_dir}{path.sep}showcase.desc', + f'--python_gapic_out={tmp_dir}', + 'google/showcase/v1beta1/echo.proto', + 'google/showcase/v1beta1/identity.proto', + external=True, + ) + + # Install the library. + session.install(tmp_dir) + + session.run( + 'py.test', '--quiet', '--mtls', *(session.posargs or [path.join('tests', 'system')]) + ) + + @nox.session(python='3.8') def showcase_alternative_templates(session): templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') showcase(session, templates=templates, other_opts=('old-naming',)) +@nox.session(python='3.8') +def showcase_mtls_alternative_templates(session): + templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') + showcase_mtls(session, templates=templates, other_opts=('old-naming',)) + + @nox.session(python=['3.6', '3.7', '3.8']) def showcase_unit( session, diff --git a/packages/gapic-generator/tests/cert/mtls.crt b/packages/gapic-generator/tests/cert/mtls.crt new file mode 100644 index 000000000000..f59c43474caa --- /dev/null +++ b/packages/gapic-generator/tests/cert/mtls.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbDCCAlSgAwIBAgIJALV2ZblaPmp2MA0GCSqGSIb3DQEBCwUAMEoxCzAJBgNV +BAYTAlVTMRMwEQYDVQQIDApTb21lLVN0YXRlMRIwEAYDVQQKDAlsb2NhbGhvc3Qx +EjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0yMDA0MTUwNjE2NDRaGA8zMDE5MDgxNzA2 +MTY0NFowSjELMAkGA1UEBhMCVVMxEzARBgNVBAgMClNvbWUtU3RhdGUxEjAQBgNV +BAoMCWxvY2FsaG9zdDESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEAxOcBZ3f679mn90KA7RzBTr8zwKqcI/7OcJ2GooZh +JvZpD/M6TqhopIgf29O082QrJZLo29lSyVtufTalmg9U4lNDFPAm/BvX7ydaHSdN +FZzn1BInhqvtBXMOy1nGegr4QtgdFSlShuhT8Lo3XxMERP+/Nhyv8wPEy+MTxym3 +WxbJPPhsmQQ42gIgRyqWHVbj6vpCRHp7l81Kh+/wcbC+C/5ARw0vgPIDAAk9iWBU +TJS1q0ghUZyITeafw6fMVqgMAT7vM2WZzfOeOsLunm3t4DQCsJxFrvKQkgi3loXa +MueqepuF0UZIChg/o4k6ecJ2qxD7ad04UsvX1pRBvKKvNQIDAQABo1MwUTAdBgNV +HQ4EFgQUwqm+cCEtQM+Vu05zLforb4IssBswHwYDVR0jBBgwFoAUwqm+cCEtQM+V +u05zLforb4IssBswDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEA +oqO8ZN92cWNB0TAd9WyPfGz1szn2pRWgOMomEMkry4ESGhOKivrY5CcyMZddfh2a +qmbB0i/pw6/YUVHuhVN369xB/L5pi5UJC+nqdA8p2zSuRidH7cUIxhXTCU6wr8H/ +dZV/tYXmvyRoB7tHh3Jzy1/BhowvCWkBNfAGuFRGb+nlJ2i3Nu9bej32ql4U3zPF +TuOtYH4hSlGa1jBjFp8XM1RiwSA4EkZ79J8Vb0h8IFeMPxobAUiBPLfU+jbmmC90 +aaZI2IhjUUkfUvatLL8brGeo9KdzepaXhQj62OUOyz1ZmAox3TPZNOXgv8+9d8hG +q5TMYoc9yklgNpo+VPtbug== +-----END CERTIFICATE----- diff --git a/packages/gapic-generator/tests/cert/mtls.key b/packages/gapic-generator/tests/cert/mtls.key new file mode 100644 index 000000000000..a4f01089dfd5 --- /dev/null +++ b/packages/gapic-generator/tests/cert/mtls.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDE5wFnd/rv2af3 +QoDtHMFOvzPAqpwj/s5wnYaihmEm9mkP8zpOqGikiB/b07TzZCslkujb2VLJW259 +NqWaD1TiU0MU8Cb8G9fvJ1odJ00VnOfUEieGq+0Fcw7LWcZ6CvhC2B0VKVKG6FPw +ujdfEwRE/782HK/zA8TL4xPHKbdbFsk8+GyZBDjaAiBHKpYdVuPq+kJEenuXzUqH +7/BxsL4L/kBHDS+A8gMACT2JYFRMlLWrSCFRnIhN5p/Dp8xWqAwBPu8zZZnN8546 +wu6ebe3gNAKwnEWu8pCSCLeWhdoy56p6m4XRRkgKGD+jiTp5wnarEPtp3ThSy9fW +lEG8oq81AgMBAAECggEAQma4xYDjogEfsLW/rra0xe6a8E1YzJbAXZ/x6Fsy5iXQ +9m0K673FVD8Hp2V0r2PHXSt21bUrQvZPg3BrVlH3ST/U7nmyW/Cz2FXIAO7hAvng +AFeC9tqB2wWbJp6G3V9Xq4sf+6PszcwJirPxumE6Xl50yDXSbDyIIE3avJ5n1BJ6 +S9RrjABVzIQ21/7mjgt8kkz4n7bOuHHkYH24D/NgTjcec/OXU/zcQlHDb3a0MRsG +GjYAWVRM5mg/BJ7Tq0zkibrubWv+Ns2fU9lj3FNVSMpuCqFidwkMcVgreOsdTFLo +GnoZCsHRTsSZNOs5RkFvEKcCzpyAjeNC5aOqpJOYIQKBgQDw0hJe9LhZu2TB0uqr +E1X4a5UdrMolSBSx6hWpSPpSZR4TfVv1nvMG8joiQBt+JJRLUqiMMjLVPvMeM7I7 +XtzgVCcSetIQoyXWOmSBi1aT8m8BJpyWTzTLimiENtPwgFZLOFkiBNuXVjI9cWD6 +9iLHQ766WrBusFJ8VtRpNUghGQKBgQDRUEK24/KFa84PfTTZjJGcJMj9qkNkarPr +KmD1+e9BmRkt/d/ApX6vtQMVS4mfGoQr989VYGkDbQorYo4RMT1fAW/A6tG94nf+ +okUzhWFZZP+kXPtIR11Du9R0XbUbehvp5L72A8ZBLEuR+83N6Ywv3DHcuudYMi1c +1Q2TypF2fQKBgHtzdS9wTEDTq6cgtGPWma+rltmLhmTuOo2p7kjFvG0YoP5kuQuE +3Bine92q920G225xhS25Xg2rI35MCgYdbyUgfpcelykoOXFEpdky0MMu+HCilosH +N41P+pwsbyFO6O3OiQLDcr511XNh99Eu9E0qEU/+xYs4oFBVQrZcNgmhAoGBALqW +58IN1gYmIh0gqlo8uxkMmbe3bjg3/odm0eS0lxpNFmsvY6ViYlrT7Bmxqs8QXj6r +vEIJndOWAnjGdIrS8DifGTZKngq9teZiVXomLq/4HwQwdzjplTRqXmwVlPsXkYah +ibHZj4RNrlhGtiIXTgbkLfbtDopKwLF+o4naDG4pAoGBAMFWBV5EDu6V9lSKUojA +/V1PmbVU5qcaEpE6N6d3M0rk6u486JGwgzn9mcSBktqKJmYXIZwVHJYbJc/v8HNt +rjHp7WkDjQF05QQm7hWjPAN8RXRSbVDUQ9kG/uN6gTbjeH0qqmlFfdBoE26wO97N +Q5o2l+4C3QlHrO5ifRFvh3hX +-----END PRIVATE KEY----- diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 5aa782c94ca7..a3108c436d22 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -13,76 +13,142 @@ # limitations under the License. import collections +import mock +import os import pytest +import google.api_core.client_options as ClientOptions +from google import showcase +from google.auth import credentials from google.showcase import EchoClient from google.showcase import IdentityClient import grpc -@pytest.fixture -def echo(): - transport = EchoClient.get_transport_class('grpc')( - channel=grpc.insecure_channel('localhost:7469'), +dir = os.path.dirname(__file__) +with open(os.path.join(dir, "../cert/mtls.crt"), "rb") as fh: + cert = fh.read() +with open(os.path.join(dir, "../cert/mtls.key"), "rb") as fh: + key = fh.read() + +ssl_credentials = grpc.ssl_channel_credentials( + root_certificates=cert, certificate_chain=cert, private_key=key +) + + +def callback(): + return cert, key + + +client_options = ClientOptions.ClientOptions() +client_options.client_cert_source = callback + + +def pytest_addoption(parser): + parser.addoption( + "--mtls", action="store_true", help="Run system test with mutual TLS channel" ) - return EchoClient(transport=transport) @pytest.fixture -def identity(): - transport = IdentityClient.get_transport_class('grpc')( - channel=grpc.insecure_channel('localhost:7469'), - ) - return IdentityClient(transport=transport) +def use_mtls(request): + return request.config.getoption("--mtls") -class MetadataClientInterceptor(grpc.UnaryUnaryClientInterceptor, - grpc.UnaryStreamClientInterceptor, - grpc.StreamUnaryClientInterceptor, - grpc.StreamStreamClientInterceptor): +@pytest.fixture +def echo(use_mtls): + if use_mtls: + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: + mock_ssl_cred.return_value = ssl_credentials + client = EchoClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options, + ) + mock_ssl_cred.assert_called_once_with( + certificate_chain=cert, private_key=key + ) + return client + else: + transport = EchoClient.get_transport_class("grpc")( + channel=grpc.insecure_channel("localhost:7469") + ) + return EchoClient(transport=transport) + +@pytest.fixture +def identity(use_mtls): + if use_mtls: + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: + mock_ssl_cred.return_value = ssl_credentials + client = IdentityClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options, + ) + mock_ssl_cred.assert_called_once_with( + certificate_chain=cert, private_key=key + ) + return client + else: + transport = IdentityClient.get_transport_class("grpc")( + channel=grpc.insecure_channel("localhost:7469") + ) + return IdentityClient(transport=transport) + + +class MetadataClientInterceptor( + grpc.UnaryUnaryClientInterceptor, + grpc.UnaryStreamClientInterceptor, + grpc.StreamUnaryClientInterceptor, + grpc.StreamStreamClientInterceptor, +): def __init__(self, key, value): self._key = key self._value = value def _add_metadata(self, client_call_details): if client_call_details.metadata is not None: - client_call_details.metadata.append((self._key, self._value,)) + client_call_details.metadata.append((self._key, self._value)) def intercept_unary_unary(self, continuation, client_call_details, request): self._add_metadata(client_call_details) response = continuation(client_call_details, request) return response - def intercept_unary_stream(self, continuation, client_call_details, - request): + def intercept_unary_stream(self, continuation, client_call_details, request): self._add_metadata(client_call_details) response_it = continuation(client_call_details, request) return response_it - def intercept_stream_unary(self, continuation, client_call_details, - request_iterator): + def intercept_stream_unary( + self, continuation, client_call_details, request_iterator + ): self._add_metadata(client_call_details) response = continuation(client_call_details, request_iterator) return response - def intercept_stream_stream(self, continuation, client_call_details, - request_iterator): + def intercept_stream_stream( + self, continuation, client_call_details, request_iterator + ): self._add_metadata(client_call_details) response_it = continuation(client_call_details, request_iterator) return response_it @pytest.fixture -def intercepted_echo(): +def intercepted_echo(use_mtls): # The interceptor adds 'showcase-trailer' client metadata. Showcase server # echos any metadata with key 'showcase-trailer', so the same metadata # should appear as trailing metadata in the response. - interceptor = MetadataClientInterceptor('showcase-trailer', 'intercepted') - channel = grpc.insecure_channel('localhost:7469') + interceptor = MetadataClientInterceptor("showcase-trailer", "intercepted") + host = "localhost:7469" + channel = ( + grpc.secure_channel(host, ssl_credentials) + if use_mtls + else grpc.insecure_channel(host) + ) intercept_channel = grpc.intercept_channel(channel, interceptor) - transport = EchoClient.get_transport_class('grpc')( - channel=intercept_channel, + transport = EchoClient.get_transport_class("grpc")( + channel=intercept_channel ) return EchoClient(transport=transport) From 10140f7323c8e5b7fe99dc37fa637d2b96405464 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 16 Apr 2020 16:28:48 -0700 Subject: [PATCH 0250/1339] release 0.21.0 (#395) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 11864f1ca944..0ed5e29be920 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.20.0', + version='0.21.0', license='Apache 2.0', author='Dov Shlachter', author_email='dovs@google.com', From aca1dcc533be41096507d4999398cff75a2bdc62 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Fri, 17 Apr 2020 15:12:15 -0700 Subject: [PATCH 0251/1339] fix: lint issue (#396) --- .../%name/%version/%sub/services/%service/client.py.j2 | 2 +- .../tests/unit/%name_%version/%sub/test_%service.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- .../tests/unit/%name_%version/%sub/test_%service.py.j2 | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index ff7e2c4d484e..12e92de7d199 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -170,7 +170,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): 'provide its credentials directly.') self._transport = transport elif client_options is None or ( - client_options.api_endpoint == None + client_options.api_endpoint is None and client_options.client_cert_source is None ): # Don't trigger mTLS if we get an empty ClientOptions. diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 5cd0019b5925..acbc8c17302d 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -42,7 +42,7 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert {{ service.client_name }}._get_default_mtls_endpoint(None) == None + assert {{ service.client_name }}._get_default_mtls_endpoint(None) is None assert {{ service.client_name }}._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint assert {{ service.client_name }}._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint assert {{ service.client_name }}._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 4099fe5b10de..a3c9d5b9b318 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -170,7 +170,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): 'provide its credentials directly.') self._transport = transport elif client_options is None or ( - client_options.api_endpoint == None + client_options.api_endpoint is None and client_options.client_cert_source is None ): # Don't trigger mTLS if we get an empty ClientOptions. diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index aac51fe7910d..3a1aa0e35089 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -42,7 +42,7 @@ def test__get_default_mtls_endpoint(): sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert {{ service.client_name }}._get_default_mtls_endpoint(None) == None + assert {{ service.client_name }}._get_default_mtls_endpoint(None) is None assert {{ service.client_name }}._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint assert {{ service.client_name }}._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint assert {{ service.client_name }}._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint From 757f44bbafa58f42714bcce7598bb6916a6e3759 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Mon, 20 Apr 2020 09:09:23 -0700 Subject: [PATCH 0252/1339] release 0.21.1 (#397) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 0ed5e29be920..782c8f6fc6c3 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.21.0', + version='0.21.1', license='Apache 2.0', author='Dov Shlachter', author_email='dovs@google.com', From ecee115d56c91448335fb76bd03104c37898f1a3 Mon Sep 17 00:00:00 2001 From: Adam Ohren Date: Mon, 20 Apr 2020 17:37:49 +0100 Subject: [PATCH 0253/1339] Add markupsafe dependency (#398) Co-authored-by: Adam Ohren Co-authored-by: Dov Shlachter --- packages/gapic-generator/BUILD.bazel | 3 ++- packages/gapic-generator/requirements.txt | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 21f0bf88f2f2..cf7a87fbf5ef 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -13,8 +13,9 @@ py_binary( requirement("googleapis-common-protos"), requirement("grpcio"), requirement("jinja2"), + requirement("MarkupSafe"), requirement("pypandoc"), requirement("PyYAML"), ], python_version = "PY3", -) \ No newline at end of file +) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index baaa5dd77375..77abdddb2fee 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,6 +3,7 @@ google-api-core==1.17.0 googleapis-common-protos==1.51.0 grpcio==1.28.1 jinja2==2.11.2 +MarkupSafe==1.1.1 protobuf==3.11.3 pypandoc==1.5 PyYAML==5.3.1 From 47b11500b5c8dd42da65941f7d89310c1a71c0f8 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 22 Apr 2020 10:44:23 -0700 Subject: [PATCH 0254/1339] Fix for lazy import feature in alternative templates (#399) This change places lazy imports only at the top levels of the generated GAPIC. The resulting surface is easier to test and maintains operational simplicity. --- .../%name/%version/%sub/__init__.py.j2 | 95 --------------- .../%name/%version/%sub/types/__init__.py.j2 | 52 -------- .../%namespace/%name/%version/__init__.py.j2 | 112 ++++++++++++++++++ .../%namespace/%name/__init__.py.j2 | 80 +++---------- .../gapic/ads-templates/.coveragerc.j2 | 3 + .../%name_%version/%sub/test_%service.py.j2 | 68 ----------- .../%name_%version/test_module_import.py.j2 | 79 ++++++++++++ .../gapic/generator/generator.py | 1 - packages/gapic-generator/gapic/schema/api.py | 18 +++ .../gapic-generator/test_utils/test_utils.py | 13 ++ .../tests/unit/schema/test_api.py | 52 ++++++++ 11 files changed, 294 insertions(+), 279 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/test_module_import.py.j2 diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 index 372ce3e2507e..df685b243801 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 @@ -1,100 +1,6 @@ {% extends '_base.py.j2' %} {% block content %} -{% if opts.lazy_import -%} {# lazy import #} -import importlib -import re -import sys - -from itertools import chain - -def to_snake_case(s: str) -> str: - s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) - s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) - - # Numbers are a weird case; the goal is to spot when they _start_ - # some kind of name or acronym (e.g. 2FA, 3M). - # - # Find cases of a number preceded by a lower-case letter _and_ - # followed by at least two capital letters or a single capital and - # end of string. - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) - - return s.lower() - - -def from_snake_case(s): - _CHARS_TO_UPCASE_RE = re.compile(r'(?:_|^)([a-z])') - return _CHARS_TO_UPCASE_RE.sub(lambda m: m.group().replace('_', '').upper(), s) - - -if sys.version_info < (3, 7): - raise ImportError('This module requires Python 3.7 or later.') # pragma: NO COVER - -_lazy_name_to_package_map = { - 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', - {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', - '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.transports.base', - '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.transports.grpc', - {%- endfor %} -} - -_lazy_type_to_package_map = { -{%- filter sort_lines %} -{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %} -{%- for enum in proto.enums.values() %} - '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %}{%- endfor %}{%- endfilter %} -} - -# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ -def __getattr__(name): # Requires Python >= 3.7 - if name == '__all__': - all_names = globals()['__all__'] = sorted( - chain( - (from_snake_case(k) for k in _lazy_name_to_package_map if k != 'types'), - _lazy_type_to_package_map, - ['types'], - ) - ) - return all_names - elif name.endswith('Transport'): - module = __getattr__(to_snake_case(name)) - sub_mod_class = getattr(module, name) - klass = type(name, (sub_mod_class,), {'__doc__': sub_mod_class.__doc__}) - globals()[name] = klass - return klass - elif name.endswith('Client'): - module = __getattr__(to_snake_case(name)) - sub_mod_class = getattr(module, name) - klass = type( - name, - (sub_mod_class,), - {'__doc__': sub_mod_class.__doc__} - ) - globals()[name] = klass - return klass - elif name in _lazy_name_to_package_map: - module = importlib.import_module(f'{_lazy_name_to_package_map[name]}') - globals()[name] = module - return module - elif name in _lazy_type_to_package_map: - module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') - klass = getattr(module, name) - {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} - globals()[name] = klass - return klass - else: - raise AttributeError(f'unknown sub-module {name!r}.') - - -def __dir__(): - return globals().get('__all__') or __getattr__('__all__') -{% else -%} {# do not use lazy import #} {# Import subpackages. -#} {% for subpackage in api.subpackages.keys() -%} from . import {{ subpackage }} @@ -149,5 +55,4 @@ __all__ = ( {%- endfor %} {%- endfilter %} ) -{% endif -%} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/__init__.py.j2 index 8b341523337a..9cee1e99950c 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/__init__.py.j2 @@ -1,53 +1 @@ {% extends '_base.py.j2' %} - -{% block content %} -{% if opts.lazy_import -%} {# lazy import #} -import importlib -import sys - - -if sys.version_info < (3, 7): - raise ImportError('This module requires Python 3.7 or later.') # pragma: NO COVER - - -_lazy_type_to_package_map = { - {%- filter sort_lines %} -{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %} -{%- for enum in proto.enums.values() %} - '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %}{%- endfor %}{%- endfilter %} -} - - -# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ -def __getattr__(name): # Requires Python >= 3.7 - if name == '__all__': - all_names = globals()['__all__'] = sorted(_lazy_type_to_package_map) - return all_names - elif name in _lazy_type_to_package_map: - module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') - klass = getattr(module, name) - {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} - globals()[name] = klass - return klass - else: - raise AttributeError(f'unknown sub-module {name!r}.') - - -def __dir__(): - return globals().get('__all__') or __getattr__('__all__') - -{% else -%} -{% for p in api.protos.values() if p.file_to_generate and p.messages -%} -from .{{p.module_name }} import ({% for m in p.messages.values() %}{{ m.name }}, {% endfor %}) -{% endfor %} - -__all__ = ( -{%- for p in api.protos.values() if p.file_to_generate %}{% for m in p.messages.values() %} - '{{ m.name }}', -{%- endfor %}{% endfor %} -) -{% endif -%} {# lazy import #} -{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 new file mode 100644 index 000000000000..749a408c4235 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -0,0 +1,112 @@ +{% extends '_base.py.j2' %} +{% block content %} +{% if opts.lazy_import -%} {# lazy import #} +import importlib +import sys + + +if sys.version_info < (3, 7): + raise ImportError('This module requires Python 3.7 or later.') + + +_lazy_type_to_package_map = { + # Message types +{%- for message in api.top_level_messages.values() %} + '{{ message.name }}': '{{ message.ident.package|join('.') }}.types.{{ message.ident.module }}', +{%- endfor %} + + # Enum types +{%- for enum in api.top_level_enums.values() %} + '{{ enum.name }}': '{{ enum.ident.package|join('.') }}.types.{{enum.ident.module }}', +{%- endfor %} + + # Client classes and transports +{%- for service in api.services.values() %} + '{{ service.client_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}', + '{{ service.transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', + '{{ service.grpc_transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', +{%- endfor %} +} + + +# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ +def __getattr__(name): # Requires Python >= 3.7 + if name == '__all__': + all_names = globals()['__all__'] = sorted(_lazy_type_to_package_map) + return all_names + elif name in _lazy_type_to_package_map: + module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') + klass = getattr(module, name) + {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} + globals()[name] = klass + return klass + else: + raise AttributeError(f'unknown type {name!r}.') + + +def __dir__(): + return globals().get('__all__') or __getattr__('__all__') +{% else -%} {# do not use lazy import #} +{# Import subpackages. -#} +{% filter sort_lines -%} +{% for subpackage in api.subpackages.keys() -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }} import {{ subpackage }} +{% endfor -%} + +{# Import services for this package. -#} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} +{% endfor -%} + +{# Import messages and enums from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. +-#} +{# Import messages from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. + -#} +{% for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view -%} + {% for message in proto.messages.values()|sort(attribute='name') -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} +{% endfor -%} +{% for enum in proto.enums.values()|sort(attribute='name') -%} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} +{% endfor %}{% endfor -%} +{% endfilter %} +{# Define __all__. + This requires the full set of imported names, so we iterate over + them again. +-#} +__all__ = ( +{%- filter indent %} +{% filter sort_lines -%} +{% for subpackage in api.subpackages.keys() -%} +'{{ subpackage }}', +{% endfor -%} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view -%} +'{{ service.client_name }}', +{% endfor -%} +{% for proto in api.protos.values()|sort(attribute='module_name') + if proto.meta.address.subpackage == api.subpackage_view -%} +{% for message in proto.messages.values()|sort(attribute='name') -%} +'{{ message.name }}', +{% endfor -%} +{% for enum in proto.enums.values()|sort(attribute='name') + if proto.meta.address.subpackage == api.subpackage_view -%} +'{{ enum.name }}', +{% endfor -%}{% endfor -%} +{% endfilter -%} +{% endfilter -%} +) +{% endif -%} {# lazy import #} +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 index 1ea3128c5776..749a408c4235 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 @@ -2,84 +2,38 @@ {% block content %} {% if opts.lazy_import -%} {# lazy import #} import importlib -import re import sys -from itertools import chain - -def to_snake_case(s: str) -> str: - s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) - s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) - - # Numbers are a weird case; the goal is to spot when they _start_ - # some kind of name or acronym (e.g. 2FA, 3M). - # - # Find cases of a number preceded by a lower-case letter _and_ - # followed by at least two capital letters or a single capital and - # end of string. - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) - - return s.lower() - - -def from_snake_case(s): - _CHARS_TO_UPCASE_RE = re.compile(r'(?:_|^)([a-z])') - return _CHARS_TO_UPCASE_RE.sub(lambda m: m.group().replace('_', '').upper(), s) - if sys.version_info < (3, 7): raise ImportError('This module requires Python 3.7 or later.') -_lazy_name_to_package_map = { - 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', - {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', - '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.base', - '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.grpc', - {%- endfor %} {# Need to do types and enums #} -} _lazy_type_to_package_map = { -{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', + # Message types +{%- for message in api.top_level_messages.values() %} + '{{ message.name }}': '{{ message.ident.package|join('.') }}.types.{{ message.ident.module }}', +{%- endfor %} + + # Enum types +{%- for enum in api.top_level_enums.values() %} + '{{ enum.name }}': '{{ enum.ident.package|join('.') }}.types.{{enum.ident.module }}', +{%- endfor %} + + # Client classes and transports +{%- for service in api.services.values() %} + '{{ service.client_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}', + '{{ service.transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', + '{{ service.grpc_transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', {%- endfor %} -{%- for enum in proto.enums.values() %} - '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %}{%- endfor %} } # Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ def __getattr__(name): # Requires Python >= 3.7 if name == '__all__': - all_names = globals()['__all__'] = sorted( - chain( - (from_snake_case(k) for k in _lazy_name_to_package_map), - _lazy_type_to_package_map, - ) - ) + all_names = globals()['__all__'] = sorted(_lazy_type_to_package_map) return all_names - elif name.endswith('Transport'): - module = __getattr__(to_snake_case(name)) - sub_mod_class = getattr(module, name) - klass = type(name, (sub_mod_class,), {'__doc__': sub_mod_class.__doc__}) - globals()[name] = klass - return klass - elif name.endswith('Client'): - module = __getattr__(to_snake_case(name)) - sub_mod_class = getattr(module, name) - klass = type( - name, - (sub_mod_class,), - {'__doc__': sub_mod_class.__doc__} - ) - globals()[name] = klass - return klass - elif name in _lazy_name_to_package_map: - module = importlib.import_module(f'{_lazy_name_to_package_map[name]}') - globals()[name] = module - return module elif name in _lazy_type_to_package_map: module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') klass = getattr(module, name) @@ -87,7 +41,7 @@ def __getattr__(name): # Requires Python >= 3.7 globals()[name] = klass return klass else: - raise AttributeError(f'unknown sub-module {name!r}.') + raise AttributeError(f'unknown type {name!r}.') def __dir__(): diff --git a/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 b/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 index f2ac95dda9d7..9d0b8441dff7 100644 --- a/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 +++ b/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 @@ -16,3 +16,6 @@ exclude_lines = # generates the code and tries to run it without pip installing. This # makes it virtually impossible to test properly. except pkg_resources.DistributionNotFound + # This is used to indicate a python version mismatch, + # which is not easily tested in unit tests. + raise ImportError diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index acbc8c17302d..5e2e5e287086 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -420,74 +420,6 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): {% endfor -%} {#- method in methods #} -{% if opts.lazy_import -%} {# lazy import #} -def test_module_level_imports(): - # Use the other transport import path so that code gets tested. - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.name }}GrpcTransport - transport = {{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.client_name }} - client = {{ service.client_name }}(transport=transport) - assert client._transport is transport - - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.name|snake_case }}_grpc_transport - transport2 = {{ service.name|snake_case }}_grpc_transport.{{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - - client2 = {{ service.client_name }}(transport=transport2) - assert client2._transport is transport2 - - {% with type_name = cycler(*service.methods.values()).next().input.name -%} - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ type_name }} - type_ = {{ type_name }}() - - try: - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ type_name|lower }}_squidification - except (AttributeError, ImportError) as e: - pass - else: - assert False - {% endwith -%} - - import {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} as mod - all_names = dir(mod) - expected_names = sorted([ - 'types', - {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.client_name }}', - '{{ service.transport_name }}', - '{{ service.grpc_transport_name }}', - {%- endfor %} - {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}', - {%- endfor %} - {%- for enum in proto.enums.values() %} - '{{ enum.name }}' - {% endfor %}{%- endfor %} - ]) - assert all_names == expected_names - - {% with type_name = cycler(*service.methods.values()).next().input.name -%} - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.types import {{ type_name }} - type_ = {{ type_name }}() - {% endwith -%} - - import {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.types as types - all_types = dir(types) - expected_types = sorted([ - {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}', - {%- endfor %} - {%- for enum in proto.enums.values() %} - '{{ enum.name }}', - {% endfor %}{%- endfor %} - ]) - assert all_types == expected_types - -{% endif -%} {# lazy import #} def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/test_module_import.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/test_module_import.py.j2 new file mode 100644 index 000000000000..158545d6c4d1 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/test_module_import.py.j2 @@ -0,0 +1,79 @@ +{% extends "_base.py.j2" %} +{% block content %} +{% if opts.lazy_import -%} {# lazy import #} +import pytest + + +def test_module_level_imports(): + expected_names = [] + + # Message types + {% for message in api.top_level_messages.values() %} + from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.module_name }} import {{ message.name }} + expected_names.append({{ message.name }}.__name__) + {%- endfor %} + + {% if api.top_level_enums %}# Enum types{% endif %} + {%- for enum in api.top_level_enums.values() %} + from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.module_name }} import {{ enum.name }} + expected_names.append({{ enum.name }}.__name__) + {%- endfor %} + + # Client and transport classes + {%- for service in api.services.values() %} + from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.client_name }} + expected_names.append({{ service.client_name}}.__name__) + from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.transport_name }} + expected_names.append({{ service.transport_name }}.__name__) + from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.grpc_transport_name }} + expected_names.append({{ service.grpc_transport_name }}.__name__) + {%- endfor %} + + expected_names.sort() + from {{ api.naming.module_namespace|join('.') }} import {{ api.naming.module_name }} + actual_names = dir({{ api.naming.module_name }}) + assert expected_names == actual_names + + + # Verify the logic for handling non-existant names + with pytest.raises(ImportError): + from {{ api.naming.module_namespace|join('.' )}}.{{ api.naming.module_name }} import GiantSquid + + +def test_versionsed_module_level_imports(): + expected_names = [] + + # Message types + {% for message in api.top_level_messages.values() %} + from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.versioned_module_name }} import {{ message.name }} + expected_names.append({{ message.name }}.__name__) + {%- endfor %} + + {% if api.top_level_enums %}# Enum types{% endif %} + {%- for enum in api.top_level_enums.values() %} + from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.versioned_module_name }} import {{ enum.name }} + expected_names.append({{ enum.name }}.__name__) + {%- endfor %} + + # Client and transport classes + {%- for service in api.services.values() %} + from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.client_name }} + expected_names.append({{ service.client_name}}.__name__) + from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.transport_name }} + expected_names.append({{ service.transport_name }}.__name__) + from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.grpc_transport_name }} + expected_names.append({{ service.grpc_transport_name }}.__name__) + {%- endfor %} + + expected_names.sort() + from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.module_name }} import {{ api.naming.version }} + actual_names = dir({{ api.naming.version }}) + assert expected_names == actual_names + + # Verify the logic for handling non-existant names + with pytest.raises(ImportError): + from {{ api.naming.module_namespace|join('.' )}}.{{ api.naming.versioned_module_name }} import GiantSquid + + +{% endif -%} {# lazy import #} +{% endblock %} diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 2913dab8bc58..a1bf4bba99a9 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -80,7 +80,6 @@ def get_response( files and contents. See ``plugin.proto``. """ output_files: Dict[str, CodeGeneratorResponse.File] = OrderedDict() - sample_templates, client_templates = utils.partition( lambda fname: os.path.basename( fname) == samplegen.DEFAULT_TEMPLATE_NAME, diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 734ccf337744..cc9b9cf1f05d 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -293,6 +293,24 @@ def messages(self) -> Mapping[str, wrappers.MessageType]: *[p.all_messages for p in self.protos.values()], ) + @cached_property + def top_level_messages(self) -> Mapping[str, wrappers.MessageType]: + """Return a map of all messages that are NOT nested.""" + return { + k: v + for p in self.protos.values() + for k, v in p.messages.items() + } + + @cached_property + def top_level_enums(self) -> Mapping[str, wrappers.EnumType]: + """Return a map of all messages that are NOT nested.""" + return { + k: v + for p in self.protos.values() + for k, v in p.enums.items() + } + @cached_property def protos(self) -> Mapping[str, Proto]: """Return a map of all protos specific to this API. diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index ffe6bae078e4..697d08e8dfc8 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -306,6 +306,19 @@ def make_naming(**kwargs) -> naming.Naming: return naming.NewNaming(**kwargs) +def make_enum_pb2( + name: str, + *values: typing.Sequence[str], + **kwargs +) -> desc.EnumDescriptorProto: + enum_value_pbs = [ + desc.EnumValueDescriptorProto(name=n, number=i) + for i, n in enumerate(values) + ] + enum_pb = desc.EnumDescriptorProto(name=name, value=enum_value_pbs, **kwargs) + return enum_pb + + def make_message_pb2( name: str, fields: tuple = (), diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index eccae782cb80..b519b0353a69 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -29,6 +29,7 @@ from gapic.schema import wrappers from test_utils.test_utils import ( + make_enum_pb2, make_field_pb2, make_file_pb2, make_message_pb2, @@ -106,6 +107,57 @@ def test_api_build(): assert 'google.example.v1.Foo' not in sub.messages +def test_top_level_messages(): + message_pbs = ( + make_message_pb2(name='Mollusc', nested_type=( + make_message_pb2(name='Squid'), + )), + ) + fds = ( + make_file_pb2( + messages=message_pbs, + package='google.example.v3', + ), + ) + api_schema = api.API.build(fds, package='google.example.v3') + actual = [m.name for m in api_schema.top_level_messages.values()] + expected = ['Mollusc'] + assert expected == actual + + +def test_top_level_enum(): + # Test that a nested enum works properly. + message_pbs = ( + make_message_pb2(name='Coleoidea', enum_type=( + make_enum_pb2( + 'Superorder', + 'Decapodiformes', + 'Octopodiformes', + 'Palaeoteuthomorpha', + ), + )), + ) + enum_pbs = ( + make_enum_pb2( + 'Order', + 'Gastropoda', + 'Bivalvia', + 'Cephalopoda', + ), + ) + fds = ( + make_file_pb2( + messages=message_pbs, + enums=enum_pbs, + package='google.example.v3', + ), + ) + api_schema = api.API.build(fds, package='google.example.v3') + actual = [e.name for e in api_schema.top_level_enums.values()] + expected = ['Order'] + assert expected == actual + + def test_proto_build(): fdp = descriptor_pb2.FileDescriptorProto( name='my_proto_file.proto', From 83ed7fc8a52cefa5c6f382bc6e5baf4f06897114 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 28 Apr 2020 22:17:01 +0200 Subject: [PATCH 0255/1339] Update dependency click to v7.1.2 (#403) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 77abdddb2fee..ed6e2ab8e4df 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==7.1.1 +click==7.1.2 google-api-core==1.17.0 googleapis-common-protos==1.51.0 grpcio==1.28.1 From e005362e449c1c63bbecd51b12c5538c7cffcd4b Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 4 May 2020 14:52:53 -0700 Subject: [PATCH 0256/1339] feat: Make GAPIC Bazel rules production ready (#402) Shoud fix https://github.com/googleapis/gapic-generator-python/issues/400 and https://github.com/googleapis/gapic-generator-python/issues/390, plus a bunch of other not-yet-opened issues. This includes: 1) Fix long initial load time (5+ min). This was caused by python_rules buildling `grpcio` dependency from sources in one core (which was super slow). Switched to using bazel-native `"@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio"` target instead, which is not only much faster, but is also already used in googleapis, so there is no additional cost for reusing it in microgenerator rules. 2) Properly handle `pandoc` dependency (platform-sepcific version of pandoc is properly pulled by bazel itself using toolchains). 3) Add simplistic version of the `py_gapic_assembly_pkg` rule, to make output of microgenerator compatible with `GAPICBazel` class in synthtool. 4) Add `plugin_args` argument for python_gapic_library rule to pass custom argumetns to the plugin (similar to PHP rules). 5) Add compatibility with `python3.6` runtime (otherwise `python3.7` is minimum because of dependency on `dataclasses` module). Python 3.6 compatibility can be enabled by adding `--define=gapic_gen_python=3.6` command line argument to `bazel build` command. 6) Add support for Python runtimes installed with `pyenv`. To tell bazel using Python3 installed via pyenv add `--extra_toolchains=@gapic_generator_python//:pyenv3_toolchain` argument to `bazel build` command. --- packages/gapic-generator/BUILD.bazel | 62 ++++++++++++++-- packages/gapic-generator/WORKSPACE | 39 +++++++++-- .../gapic/cli/generate_with_pandoc.py | 9 +++ .../gapic_generator_python.bzl | 62 ++++++++++++++++ packages/gapic-generator/pyenv3wrapper.sh | 4 ++ packages/gapic-generator/repositories.bzl | 36 ++++++++++ packages/gapic-generator/requirements.txt | 2 +- .../rules_python_gapic/py_gapic.bzl | 4 +- .../rules_python_gapic/py_gapic_pkg.bzl | 70 +++++++++++++++++++ 9 files changed, 276 insertions(+), 12 deletions(-) create mode 100644 packages/gapic-generator/gapic/cli/generate_with_pandoc.py create mode 100644 packages/gapic-generator/gapic_generator_python.bzl create mode 100755 packages/gapic-generator/pyenv3wrapper.sh create mode 100644 packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index cf7a87fbf5ef..4d6451aa92ca 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -1,21 +1,73 @@ +load("//:gapic_generator_python.bzl", "pandoc_binary", "pandoc_toolchain") load("@gapic_generator_python_pip_deps//:requirements.bzl", "requirement") +load("@bazel_tools//tools/python:toolchain.bzl", "py_runtime_pair") + +toolchain_type( + name = "pandoc_toolchain_type", + visibility = ["//visibility:public"], +) + +pandoc_toolchain( + exec_compatible_with = [ + "@bazel_tools//platforms:linux", + "@bazel_tools//platforms:x86_64", + ], + platform = "linux", +) + +pandoc_toolchain( + exec_compatible_with = [ + "@bazel_tools//platforms:osx", + "@bazel_tools//platforms:x86_64", + ], + platform = "macOS", +) + +pandoc_binary( + name = "pandoc_binary", +) + +config_setting( + name = "gapic_gen_python_3_6", + values = {"define": "gapic_gen_python=3.6"}, +) + +py_runtime( + name = "pyenv3_runtime", + interpreter = ":pyenv3wrapper.sh", + python_version="PY3", +) + +py_runtime_pair( + name = "pyenv3_runtime_pair", + py3_runtime = ":pyenv3_runtime", +) + +toolchain( + name = "pyenv3_toolchain", + toolchain = ":pyenv3_runtime_pair", + toolchain_type = "@bazel_tools//tools/python:toolchain_type", +) py_binary( name = "gapic_plugin", srcs = glob(["gapic/**/*.py"]), - data = glob(["gapic/**/*.j2"]), - main = "gapic/cli/generate.py", + data = [":pandoc_binary"] + glob(["gapic/**/*.j2"]), + main = "gapic/cli/generate_with_pandoc.py", + python_version = "PY3", visibility = ["//visibility:public"], deps = [ "@com_google_protobuf//:protobuf_python", + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", requirement("click"), requirement("google-api-core"), requirement("googleapis-common-protos"), - requirement("grpcio"), requirement("jinja2"), requirement("MarkupSafe"), requirement("pypandoc"), requirement("PyYAML"), - ], - python_version = "PY3", + ] + select({ + ":gapic_gen_python_3_6": [requirement("dataclasses")], + "//conditions:default": [], + }), ) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 57a357e7b982..b0cee19c28c1 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -2,9 +2,11 @@ workspace(name = "gapic_generator_python") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") -# -# Import rules_python -# +http_archive( + name = "bazel_skylib", + urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/0.9.0/bazel_skylib-0.9.0.tar.gz"], +) + http_archive( name = "rules_python", strip_prefix = "rules_python-748aa53d7701e71101dfd15d800e100f6ff8e5d1", @@ -22,10 +24,15 @@ pip_repositories() # # Import gapic-generator-python specific dependencies # -load("//:repositories.bzl", "gapic_generator_python") +load("//:repositories.bzl", + "gapic_generator_python", + "gapic_generator_register_toolchains" +) gapic_generator_python() +gapic_generator_register_toolchains() + load("@gapic_generator_python_pip_deps//:requirements.bzl", "pip_install") pip_install() @@ -33,3 +40,27 @@ pip_install() load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") protobuf_deps() + +# +# Import grpc as a native bazel dependency. This avoids duplication and also +# speeds up loading phase a lot (otherwise python_rules will be building grpcio +# from sources in a single-core speed, which takes around 5 minutes on a regular +# workstation) +# +load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") + +grpc_deps() + +load("@upb//bazel:repository_defs.bzl", "bazel_version_repository") + +bazel_version_repository( + name = "bazel_version", +) + +load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies") + +apple_rules_dependencies() + +load("@build_bazel_apple_support//lib:repositories.bzl", "apple_support_dependencies") + +apple_support_dependencies() diff --git a/packages/gapic-generator/gapic/cli/generate_with_pandoc.py b/packages/gapic-generator/gapic/cli/generate_with_pandoc.py new file mode 100644 index 000000000000..264d1c8b0b3c --- /dev/null +++ b/packages/gapic-generator/gapic/cli/generate_with_pandoc.py @@ -0,0 +1,9 @@ +import os + +from gapic.cli import generate + +if __name__ == '__main__': + os.environ['PYPANDOC_PANDOC'] = os.path.join( + os.path.abspath(__file__).rsplit("gapic", 1)[0], "pandoc") + os.environ['LC_ALL'] = 'C.UTF-8' + generate.generate() diff --git a/packages/gapic-generator/gapic_generator_python.bzl b/packages/gapic-generator/gapic_generator_python.bzl new file mode 100644 index 000000000000..c76f51a125aa --- /dev/null +++ b/packages/gapic-generator/gapic_generator_python.bzl @@ -0,0 +1,62 @@ +def _pandoc_binary_impl(ctx): + toolchain = ctx.toolchains["@gapic_generator_python//:pandoc_toolchain_type"] + output = ctx.actions.declare_file(ctx.attr.binary_name) + + script = """ + cp {input} {output} + chmod +x {output} + """.format( + input = toolchain.pandoc.files.to_list()[0].path, + output = output.path, + ) + ctx.actions.run_shell( + command = script, + inputs = toolchain.pandoc.files, + outputs = [output], + ) + return [DefaultInfo(files = depset(direct = [output]), executable = output)] + +pandoc_binary = rule( + attrs = { + "binary_name": attr.string(default = "pandoc") + }, + executable = True, + toolchains = ["@gapic_generator_python//:pandoc_toolchain_type"], + implementation = _pandoc_binary_impl, +) + +# +# Toolchains +# +def _pandoc_toolchain_info_impl(ctx): + return [ + platform_common.ToolchainInfo( + pandoc = ctx.attr.pandoc, + ), + ] + +_pandoc_toolchain_info = rule( + attrs = { + "pandoc": attr.label( + allow_single_file = True, + cfg = "host", + executable = True, + ), + }, + implementation = _pandoc_toolchain_info_impl, +) + +def pandoc_toolchain(platform, exec_compatible_with): + toolchain_info_name = "pandoc_toolchain_info_%s" % platform + _pandoc_toolchain_info( + name = toolchain_info_name, + pandoc = "@pandoc_%s//:pandoc" % platform, + visibility = ["//visibility:public"], + ) + + native.toolchain( + name = "pandoc_toolchain_%s" % platform, + exec_compatible_with = exec_compatible_with, + toolchain = toolchain_info_name, + toolchain_type = ":pandoc_toolchain_type", + ) diff --git a/packages/gapic-generator/pyenv3wrapper.sh b/packages/gapic-generator/pyenv3wrapper.sh new file mode 100755 index 000000000000..54176219f731 --- /dev/null +++ b/packages/gapic-generator/pyenv3wrapper.sh @@ -0,0 +1,4 @@ +#!/bin/sh + +HOME_DIR=$(getent passwd "$(whoami)" | cut -d: -f6) +exec "$HOME_DIR/.pyenv/shims/python3" "$@" diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index ed39838d4f50..a31c6440cfb8 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -1,6 +1,13 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") load("@rules_python//python:pip.bzl", "pip_import") +_PANDOC_BUILD_FILE = """ +filegroup( + name = "pandoc", + srcs = ["bin/pandoc"], + visibility = ["//visibility:public"], +)""" + def gapic_generator_python(): _maybe( pip_import, @@ -25,6 +32,29 @@ def gapic_generator_python(): urls = ["https://github.com/bazelbuild/bazel-skylib/archive/2169ae1c374aab4a09aa90e65efe1a3aad4e279b.tar.gz"], ) + _maybe( + http_archive, + name = "com_github_grpc_grpc", + strip_prefix = "grpc-8347f4753568b5b66e49111c60ae2841278d3f33", # this is 1.25.0 with fixes + urls = ["https://github.com/grpc/grpc/archive/8347f4753568b5b66e49111c60ae2841278d3f33.zip"], + ) + + _maybe( + http_archive, + name = "pandoc_linux", + build_file_content = _PANDOC_BUILD_FILE, + strip_prefix = "pandoc-2.2.1", + url = "https://github.com/jgm/pandoc/releases/download/2.2.1/pandoc-2.2.1-linux.tar.gz", + ) + + _maybe( + http_archive, + name = "pandoc_macOS", + build_file_content = _PANDOC_BUILD_FILE, + strip_prefix = "pandoc-2.2.1", + url = "https://github.com/jgm/pandoc/releases/download/2.2.1/pandoc-2.2.1-macOS.zip", + ) + _maybe( http_archive, name = "com_google_api_codegen", @@ -32,6 +62,12 @@ def gapic_generator_python(): urls = ["https://github.com/googleapis/gapic-generator/archive/b32c73219d617f90de70bfa6ff0ea0b0dd638dfe.zip"], ) +def gapic_generator_register_toolchains(): + native.register_toolchains( + "@gapic_generator_python//:pandoc_toolchain_linux", + "@gapic_generator_python//:pandoc_toolchain_macOS", + ) + def _maybe(repo_rule, name, strip_repo_prefix = "", **kwargs): if not name.startswith(strip_repo_prefix): return diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ed6e2ab8e4df..58e650047815 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,9 +1,9 @@ click==7.1.2 google-api-core==1.17.0 googleapis-common-protos==1.51.0 -grpcio==1.28.1 jinja2==2.11.2 MarkupSafe==1.1.1 protobuf==3.11.3 pypandoc==1.5 PyYAML==5.3.1 +dataclasses==0.6 \ No newline at end of file diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index 50d792b82bc9..f132e32dcee4 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -14,7 +14,7 @@ load("@com_google_api_codegen//rules_gapic:gapic.bzl", "proto_custom_library") -def py_gapic_library(name, srcs, **kwargs): +def py_gapic_library(name, srcs, plugin_args = [], **kwargs): # srcjar_target_name = "%s_srcjar" % name srcjar_target_name = name srcjar_output_suffix = ".srcjar" @@ -23,7 +23,7 @@ def py_gapic_library(name, srcs, **kwargs): name = srcjar_target_name, deps = srcs, plugin = Label("@gapic_generator_python//:gapic_plugin"), - plugin_args = [], + plugin_args = plugin_args, plugin_file_args = {}, output_type = "python_gapic", output_suffix = srcjar_output_suffix, diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl new file mode 100644 index 000000000000..ec80c87a84f2 --- /dev/null +++ b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl @@ -0,0 +1,70 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +load("@com_google_api_codegen//rules_gapic:gapic_pkg.bzl", "construct_package_dir_paths") + +def _py_gapic_src_pkg_impl(ctx): + srcjar_srcs = [] + for dep in ctx.attr.deps: + for f in dep.files.to_list(): + if f.extension in ("srcjar", "jar", "zip"): + srcjar_srcs.append(f) + + paths = construct_package_dir_paths(ctx.attr.package_dir, ctx.outputs.pkg, ctx.label.name) + + script = """ + mkdir -p {package_dir_path} + for srcjar_src in {srcjar_srcs}; do + unzip -q -o $srcjar_src -d {package_dir_path} + done + cd {package_dir_path}/.. + tar -zchpf {package_dir}/{package_dir}.tar.gz {package_dir} + cd - + mv {package_dir_path}/{package_dir}.tar.gz {pkg} + rm -rf {package_dir_path} + """.format( + srcjar_srcs = " ".join(["'%s'" % f.path for f in srcjar_srcs]), + package_dir_path = paths.package_dir_path, + package_dir = paths.package_dir, + pkg = ctx.outputs.pkg.path, + package_dir_expr = paths.package_dir_expr, + ) + + ctx.actions.run_shell( + inputs = srcjar_srcs, + command = script, + outputs = [ctx.outputs.pkg], + ) + +_py_gapic_src_pkg = rule( + attrs = { + "deps": attr.label_list(allow_files = True, mandatory = True), + "package_dir": attr.string(mandatory = True), + }, + outputs = {"pkg": "%{name}.tar.gz"}, + implementation = _py_gapic_src_pkg_impl, +) + +def py_gapic_assembly_pkg(name, deps, assembly_name = None, **kwargs): + package_dir = name + if assembly_name: + package_dir = "%s-%s" % (assembly_name, name) + _py_gapic_src_pkg( + name = name, + deps = deps, + package_dir = package_dir, + **kwargs + ) + + From bd7fdecf76be4be0003f5a04e210ffb60c9bf702 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 5 May 2020 10:45:40 -0700 Subject: [PATCH 0257/1339] Version bump to 0.22.0 (#408) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 782c8f6fc6c3..150f356ce422 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.21.1', + version='0.22.0', license='Apache 2.0', author='Dov Shlachter', author_email='dovs@google.com', From 61cddb4e0d52c274dfcd3cbcf8212a56ed3f8c9c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 6 May 2020 00:57:40 +0200 Subject: [PATCH 0258/1339] chore(deps): update dependency dataclasses to v0.7 (#407) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 58e650047815..f5df6c1bf238 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,4 +6,4 @@ MarkupSafe==1.1.1 protobuf==3.11.3 pypandoc==1.5 PyYAML==5.3.1 -dataclasses==0.6 \ No newline at end of file +dataclasses==0.7 \ No newline at end of file From fd2df04fe6f2773f83f67efc46584292f125d684 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 6 May 2020 15:58:53 -0700 Subject: [PATCH 0259/1339] Template directories passed to options are turned into absolute paths (#409) --- .../gapic/generator/options.py | 23 ++++++++++++------- .../tests/unit/generator/test_generator.py | 2 +- .../tests/unit/generator/test_options.py | 10 +++++++- 3 files changed, 25 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index 0c823fa84df0..40052d01c546 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -13,6 +13,7 @@ # limitations under the License. from collections import defaultdict +from os import path from typing import Any, DefaultDict, Dict, FrozenSet, List, Optional, Tuple import dataclasses @@ -95,13 +96,19 @@ def build(cls, opt_string: str) -> 'Options': # may be our default; perform that replacement. default_token = 'DEFAULT' templates = opts.pop('templates', [default_token]) - default_path = os.path.realpath( - os.path.join(os.path.dirname(__file__), '..', 'templates'), - ) - templates = [ - (default_path if path == default_token else path) - for path in templates - ] + pwd = path.join(path.dirname(__file__), '..') + default_path = path.realpath(path.join(pwd, 'templates')) + + def tweak_path(p): + if p == default_token: + return default_path + + if path.isabs(p): + return path.normpath(p) + + return path.normpath(path.join(pwd, p)) + + templates = [tweak_path(p) for p in templates] retry_cfg = None retry_paths = opts.pop('retry-config', None) @@ -121,7 +128,7 @@ def build(cls, opt_string: str) -> 'Options': for s in sample_paths for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) ), - templates=tuple(os.path.expanduser(i) for i in templates), + templates=tuple(path.expanduser(i) for i in templates), lazy_import=bool(opts.pop('lazy-import', False)), old_naming=bool(opts.pop('old-naming', False)), ) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index f676e23e9592..69cfc665f38e 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -36,7 +36,7 @@ def test_custom_template_directory(): g = generator.Generator(opts) # Assert that the Jinja loader will pull from the correct location. - assert g._env.loader.searchpath == ['/templates/'] + assert g._env.loader.searchpath == ['/templates'] def test_get_response(): diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 78af30dfeea8..b7d6cc2082e8 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import pytest from unittest import mock import warnings @@ -31,7 +32,14 @@ def test_options_empty(): def test_options_replace_templates(): opts = options.Options.build('python-gapic-templates=/foo/') assert len(opts.templates) == 1 - assert opts.templates[0] == '/foo/' + assert opts.templates[0] == '/foo' + + +def test_options_relative_templates(): + opts = options.Options.build('python-gapic-templates=../../squid/clam') + + expected = (os.path.abspath('../squid/clam'),) + assert opts.templates == expected def test_options_unrecognized(): From 71d809f74ba977f9d166df0e8f2cca96bcd7d670 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 8 May 2020 16:32:26 -0700 Subject: [PATCH 0260/1339] Use latest version of api-common-protos (#412) Should address #405. --- packages/gapic-generator/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile index 4b30a6ba257e..750c79a1c115 100644 --- a/packages/gapic-generator/Dockerfile +++ b/packages/gapic-generator/Dockerfile @@ -7,8 +7,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* # Add protoc and our common protos. -COPY --from=gcr.io/gapic-images/api-common-protos:0.1.0 /usr/local/bin/protoc /usr/local/bin/protoc -COPY --from=gcr.io/gapic-images/api-common-protos:0.1.0 /protos/ /protos/ +COPY --from=gcr.io/gapic-images/api-common-protos:latest /usr/local/bin/protoc /usr/local/bin/protoc +COPY --from=gcr.io/gapic-images/api-common-protos:latest /protos/ /protos/ # Add our code to the Docker image. ADD . /usr/src/gapic-generator-python/ From 35db7a2a9213d3d32baa6f5319d5f9982c1ffb22 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 8 May 2020 16:55:26 -0700 Subject: [PATCH 0261/1339] feat: Add opt_args to py_gapic_library (#411) This is to suport _opt command line args for protoc with a . See https://github.com/googleapis/gapic-generator-python/issues/390 for more details. This PR depends on https://github.com/googleapis/gapic-generator/pull/3191 Example of usage: ```bzl py_gapic_library( name = "documentai_py_gapic", srcs = [":documentai_proto"], opt_args = [ "old-naming", "lazy-import", "python-gapic-name=documentai", "python-gapic-templates=ads-templates", ], ) ``` --- packages/gapic-generator/repositories.bzl | 4 ++-- packages/gapic-generator/requirements.txt | 2 +- packages/gapic-generator/rules_python_gapic/py_gapic.bzl | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index a31c6440cfb8..9af2d302de9f 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -58,8 +58,8 @@ def gapic_generator_python(): _maybe( http_archive, name = "com_google_api_codegen", - strip_prefix = "gapic-generator-b32c73219d617f90de70bfa6ff0ea0b0dd638dfe", - urls = ["https://github.com/googleapis/gapic-generator/archive/b32c73219d617f90de70bfa6ff0ea0b0dd638dfe.zip"], + strip_prefix = "gapic-generator-03abac35ec0716c6f426ffc1532f9a62f1c9e6a2", + urls = ["https://github.com/googleapis/gapic-generator/archive/03abac35ec0716c6f426ffc1532f9a62f1c9e6a2.zip"], ) def gapic_generator_register_toolchains(): diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f5df6c1bf238..58e650047815 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,4 +6,4 @@ MarkupSafe==1.1.1 protobuf==3.11.3 pypandoc==1.5 PyYAML==5.3.1 -dataclasses==0.7 \ No newline at end of file +dataclasses==0.6 \ No newline at end of file diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index f132e32dcee4..55c23223cc94 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -14,7 +14,7 @@ load("@com_google_api_codegen//rules_gapic:gapic.bzl", "proto_custom_library") -def py_gapic_library(name, srcs, plugin_args = [], **kwargs): +def py_gapic_library(name, srcs, plugin_args = [], opt_args = [], **kwargs): # srcjar_target_name = "%s_srcjar" % name srcjar_target_name = name srcjar_output_suffix = ".srcjar" @@ -25,6 +25,7 @@ def py_gapic_library(name, srcs, plugin_args = [], **kwargs): plugin = Label("@gapic_generator_python//:gapic_plugin"), plugin_args = plugin_args, plugin_file_args = {}, + opt_args = opt_args, output_type = "python_gapic", output_suffix = srcjar_output_suffix, **kwargs From e51f9552baa479a08cea27c7408ca722b0cd2da6 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 13 May 2020 10:44:11 -0700 Subject: [PATCH 0262/1339] fix: fix style-check error (#416) --- packages/gapic-generator/gapic/schema/naming.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index b1a1f6671048..699b524ab5f0 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -25,6 +25,8 @@ # See https://github.com/python/mypy/issues/5374 for details on the mypy false # positive. + + @dataclasses.dataclass(frozen=True) # type: ignore class Naming(abc.ABC): """Naming data for an API. From 1a720c70945d4095bec459b8afcc7797b4b5e7c3 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 13 May 2020 11:14:41 -0700 Subject: [PATCH 0263/1339] Bump version to 0.23.0 (#417) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 150f356ce422..9a571b054eab 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name='gapic-generator', - version='0.22.0', + version='0.23.0', license='Apache 2.0', author='Dov Shlachter', author_email='dovs@google.com', From 50090d2211f229007ab0a1a3a595259752b58b2a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 21 May 2020 23:15:09 +0200 Subject: [PATCH 0264/1339] chore(deps): update dependency protobuf to v3.12.1 (#419) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 58e650047815..e2c781334b66 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.17.0 googleapis-common-protos==1.51.0 jinja2==2.11.2 MarkupSafe==1.1.1 -protobuf==3.11.3 +protobuf==3.12.1 pypandoc==1.5 PyYAML==5.3.1 dataclasses==0.6 \ No newline at end of file From 9ddce341edaea1df4cc411f7681a65895e79d81c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 26 May 2020 14:11:28 -0700 Subject: [PATCH 0265/1339] fix: consolidate dependencies in setup.py (#422) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 19251b6f6dcf..4fd9de216de1 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -16,10 +16,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-auth >= 1.14.0', - 'google-api-core >= 1.17.0, < 2.0.0dev', - 'googleapis-common-protos >= 1.5.8', - 'grpcio >= 1.10.0', + 'google-api-core[grpc] >= 1.17.0, < 2.0.0dev', 'proto-plus >= 0.4.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', From 55078f57609ae998717255f3442320e98250b068 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 27 May 2020 14:30:54 -0700 Subject: [PATCH 0266/1339] Move `libcst` to `install_requires` (#423) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use of `setup_requires` is discouraged by [setuptools](https://setuptools.readthedocs.io/en/latest/setuptools.html?highlight=setup_requires#options) A string or list of strings specifying what other distributions need to be present in order for the setup script to run. setuptools will attempt to obtain these (even going so far as to download them using EasyInstall) before processing the rest of the setup script or commands. This argument is needed if you are using distutils extensions as part of your build process; for example, extensions that process setup() arguments and turn them into EGG-INFO metadata files. (Note: projects listed in setup_requires will NOT be automatically installed on the system where the setup script is being run. They are simply downloaded to the ./.eggs directory if they’re not locally available already. If you want them to be installed, as well as being available when the setup script is run, you should add them to install_requires and setup_requires.) `libcst` is also technically not needed by the setup script, so adding it as an `install_requires` feels more appropriate. --- packages/gapic-generator/gapic/templates/setup.py.j2 | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 4fd9de216de1..e400754b1124 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -17,15 +17,13 @@ setuptools.setup( include_package_data=True, install_requires=( 'google-api-core[grpc] >= 1.17.0, < 2.0.0dev', + 'libcst >= 0.2.5', 'proto-plus >= 0.4.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', {%- endif %} ), python_requires='>={% if opts.lazy_import %}3.7{% else %}3.6{% endif %}',{# Lazy import requires module-level getattr #} - setup_requires=[ - 'libcst >= 0.2.5', - ], scripts=[ 'scripts/fixup_keywords.py', ], From 5d4ed2b672596365cea5cf9d1d37655468ecdeed Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 27 May 2020 23:37:26 +0200 Subject: [PATCH 0267/1339] chore(deps): update dependency protobuf to v3.12.2 (#424) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e2c781334b66..418f6c7cc626 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.17.0 googleapis-common-protos==1.51.0 jinja2==2.11.2 MarkupSafe==1.1.1 -protobuf==3.12.1 +protobuf==3.12.2 pypandoc==1.5 PyYAML==5.3.1 dataclasses==0.6 \ No newline at end of file From 9c92094d7cf8d09ef33e980f875e52f0b2fca46a Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 27 May 2020 14:55:22 -0700 Subject: [PATCH 0268/1339] feat: add GOOGLE_API_USE_MTLS support (#420) Co-authored-by: Dov Shlachter --- .../%sub/services/%service/client.py.j2 | 73 ++++++----- .../services/%service/transports/grpc.py.j2 | 8 +- .../%name_%version/%sub/test_%service.py.j2 | 118 ++++++++++++----- .../%sub/services/%service/client.py.j2 | 73 ++++++----- .../services/%service/transports/grpc.py.j2 | 6 +- .../%name_%version/%sub/test_%service.py.j2 | 119 +++++++++++++----- 6 files changed, 272 insertions(+), 125 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 12e92de7d199..af3a3b76d514 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -2,6 +2,7 @@ {% block content %} from collections import OrderedDict +import os import re from typing import Callable, Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources @@ -11,6 +12,8 @@ from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore {% filter sort_lines -%} @@ -144,21 +147,47 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport (Union[str, ~.{{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. + client_options (ClientOptions): Custom options for the client. It + won't take effect unless ``transport`` is None. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. - (2) If ``transport`` argument is None, ``client_options`` can be - used to create a mutual TLS transport. If ``client_cert_source`` - is provided, mutual TLS transport will be created with the given - ``api_endpoint`` or the default mTLS endpoint, and the client - SSL credentials obtained from ``client_cert_source``. + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "Always" (always use the default mTLS endpoint), "Never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "Auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if transport is None and client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "Never") + if use_mtls_env == "Never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "Always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "Auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if has_client_cert_source else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: Never, Auto, Always" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -169,38 +198,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise ValueError('When providing a transport instance, ' 'provide its credentials directly.') self._transport = transport - elif client_options is None or ( - client_options.api_endpoint is None - and client_options.client_cert_source is None - ): - # Don't trigger mTLS if we get an empty ClientOptions. + elif isinstance(transport, str): Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, host=self.DEFAULT_ENDPOINT ) else: - # We have a non-empty ClientOptions. If client_cert_source is - # provided, trigger mTLS with user provided endpoint or the default - # mTLS endpoint. - if client_options.client_cert_source: - api_mtls_endpoint = ( - client_options.api_endpoint - if client_options.api_endpoint - else self.DEFAULT_MTLS_ENDPOINT - ) - else: - api_mtls_endpoint = None - - api_endpoint = ( - client_options.api_endpoint - if client_options.api_endpoint - else self.DEFAULT_ENDPOINT - ) - self._transport = {{ service.name }}GrpcTransport( credentials=credentials, - host=api_endpoint, - api_mtls_endpoint=api_mtls_endpoint, + host=client_options.api_endpoint, + api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, ) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index c42770f9a0f1..1632b7762110 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -7,6 +7,7 @@ from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -63,7 +64,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): is None. Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if channel: @@ -76,6 +77,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): elif api_mtls_endpoint: host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + if credentials is None: + credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -96,7 +100,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Run the base constructor. super().__init__(host=host, credentials=credentials) - self._stubs = {} # type: Dict[str, Callable] + self._stubs = {} # type: Dict[str, Callable] @classmethod diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 5e2e5e287086..1e2043b62110 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -1,6 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} +import os from unittest import mock import grpc @@ -11,6 +12,7 @@ import pytest {% filter sort_lines -%} from google import auth from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports @@ -63,6 +65,14 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): {% if service.host %}assert client._transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} +def test_{{ service.client_name|snake_case }}_get_transport_class(): + transport = {{ service.client_name }}.get_transport_class() + assert transport == transports.{{ service.name }}GrpcTransport + + transport = {{ service.client_name }}.get_transport_class("grpc") + assert transport == transports.{{ service.name }}GrpcTransport + + def test_{{ service.client_name|snake_case }}_client_options(): # Check that if channel is provided we won't create a new one. with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: @@ -72,58 +82,99 @@ def test_{{ service.client_name|snake_case }}_client_options(): client = {{ service.client_name }}(transport=transport) gtc.assert_not_called() - # Check mTLS is not triggered with empty client options. - options = client_options.ClientOptions() + # Check that if channel is provided via str we will create a new one. with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: - transport = gtc.return_value = mock.MagicMock() - client = {{ service.client_name }}(client_options=options) - transport.assert_called_once_with( - credentials=None, - host=client.DEFAULT_ENDPOINT, - ) + client = {{ service.client_name }}(transport="grpc") + gtc.assert_called() - # Check mTLS is not triggered if api_endpoint is provided but - # client_cert_source is None. + # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}(client_options=options) grpc_transport.assert_called_once_with( - api_mtls_endpoint=None, + api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, credentials=None, host="squid.clam.whelk", ) - # Check mTLS is triggered if client_cert_source is provided. - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "Never". + os.environ["GOOGLE_API_USE_MTLS"] = "Never" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None - client = {{ service.client_name }}(client_options=options) + client = {{ service.client_name }}() grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, credentials=None, host=client.DEFAULT_ENDPOINT, ) - # Check mTLS is triggered if api_endpoint and client_cert_source are provided. - options = client_options.ClientOptions( - api_endpoint="squid.clam.whelk", - client_cert_source=client_cert_source_callback - ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "Always". + os.environ["GOOGLE_API_USE_MTLS"] = "Always" + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_MTLS_ENDPOINT, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "Auto", and client_cert_source is provided. + os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}(client_options=options) grpc_transport.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, credentials=None, - host="squid.clam.whelk", + host=client.DEFAULT_MTLS_ENDPOINT, ) + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "Auto", and default_client_cert_source is provided. + os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_MTLS_ENDPOINT, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "Auto", but client_cert_source and default_client_cert_source are None. + os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_ENDPOINT, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" + with pytest.raises(MutualTLSChannelError): + client = {{ service.client_name }}() + + del os.environ["GOOGLE_API_USE_MTLS"] + def test_{{ service.client_name|snake_case }}_client_options_from_dict(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: @@ -132,7 +183,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_options={'api_endpoint': 'squid.clam.whelk'} ) grpc_transport.assert_called_once_with( - api_mtls_endpoint=None, + api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, credentials=None, host="squid.clam.whelk", @@ -490,12 +541,24 @@ def test_{{ service.name|snake_case }}_auth_adc(): )) +def test_{{ service.name|snake_case }}_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, 'default') as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.{{ service.name }}GrpcTransport(host="squid.clam.whelk") + adc.assert_called_once_with(scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + )) + + def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), - transport='grpc', ) assert client._transport._host == '{{ host }}:443' {% endwith %} @@ -506,7 +569,6 @@ def test_{{ service.name|snake_case }}_host_with_port(): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), - transport='grpc', ) assert client._transport._host == '{{ host }}:8000' {% endwith %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index a3c9d5b9b318..1915722a0cd2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -2,6 +2,7 @@ {% block content %} from collections import OrderedDict +import os import re from typing import Callable, Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources @@ -11,6 +12,8 @@ from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore {% filter sort_lines -%} @@ -144,21 +147,47 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport (Union[str, ~.{{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. + client_options (ClientOptions): Custom options for the client. It + won't take effect unless ``transport`` is None. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. - (2) If ``transport`` argument is None, ``client_options`` can be - used to create a mutual TLS transport. If ``client_cert_source`` - is provided, mutual TLS transport will be created with the given - ``api_endpoint`` or the default mTLS endpoint, and the client - SSL credentials obtained from ``client_cert_source``. + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "Always" (always use the default mTLS endpoint), "Never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "Auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if transport is None and client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "Never") + if use_mtls_env == "Never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "Always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "Auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if has_client_cert_source else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: Never, Auto, Always" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport @@ -169,38 +198,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise ValueError('When providing a transport instance, ' 'provide its credentials directly.') self._transport = transport - elif client_options is None or ( - client_options.api_endpoint is None - and client_options.client_cert_source is None - ): - # Don't trigger mTLS if we get an empty ClientOptions. + elif isinstance(transport, str): Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, host=self.DEFAULT_ENDPOINT ) else: - # We have a non-empty ClientOptions. If client_cert_source is - # provided, trigger mTLS with user provided endpoint or the default - # mTLS endpoint. - if client_options.client_cert_source: - api_mtls_endpoint = ( - client_options.api_endpoint - if client_options.api_endpoint - else self.DEFAULT_MTLS_ENDPOINT - ) - else: - api_mtls_endpoint = None - - api_endpoint = ( - client_options.api_endpoint - if client_options.api_endpoint - else self.DEFAULT_ENDPOINT - ) - self._transport = {{ service.name }}GrpcTransport( credentials=credentials, - host=api_endpoint, - api_mtls_endpoint=api_mtls_endpoint, + host=client_options.api_endpoint, + api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index eb47dbdc52ed..1632b7762110 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -7,6 +7,7 @@ from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -63,7 +64,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): is None. Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if channel: @@ -76,6 +77,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): elif api_mtls_endpoint: host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + if credentials is None: + credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 3a1aa0e35089..a613c6753f7b 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -1,6 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} +import os from unittest import mock import grpc @@ -11,6 +12,7 @@ import pytest {% filter sort_lines -%} from google import auth from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports @@ -63,6 +65,14 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): {% if service.host %}assert client._transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} +def test_{{ service.client_name|snake_case }}_get_transport_class(): + transport = {{ service.client_name }}.get_transport_class() + assert transport == transports.{{ service.name }}GrpcTransport + + transport = {{ service.client_name }}.get_transport_class("grpc") + assert transport == transports.{{ service.name }}GrpcTransport + + def test_{{ service.client_name|snake_case }}_client_options(): # Check that if channel is provided we won't create a new one. with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: @@ -72,58 +82,100 @@ def test_{{ service.client_name|snake_case }}_client_options(): client = {{ service.client_name }}(transport=transport) gtc.assert_not_called() - # Check mTLS is not triggered with empty client options. - options = client_options.ClientOptions() + # Check that if channel is provided via str we will create a new one. with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: - transport = gtc.return_value = mock.MagicMock() - client = {{ service.client_name }}(client_options=options) - transport.assert_called_once_with( - credentials=None, - host=client.DEFAULT_ENDPOINT, - ) + client = {{ service.client_name }}(transport="grpc") + gtc.assert_called() - # Check mTLS is not triggered if api_endpoint is provided but - # client_cert_source is None. + # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}(client_options=options) grpc_transport.assert_called_once_with( - api_mtls_endpoint=None, + api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, credentials=None, host="squid.clam.whelk", ) - # Check mTLS is triggered if client_cert_source is provided. - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "Never". + os.environ["GOOGLE_API_USE_MTLS"] = "Never" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None - client = {{ service.client_name }}(client_options=options) + client = {{ service.client_name }}() grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, credentials=None, host=client.DEFAULT_ENDPOINT, ) - # Check mTLS is triggered if api_endpoint and client_cert_source are provided. - options = client_options.ClientOptions( - api_endpoint="squid.clam.whelk", - client_cert_source=client_cert_source_callback - ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "Always". + os.environ["GOOGLE_API_USE_MTLS"] = "Always" + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_MTLS_ENDPOINT, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "Auto", and client_cert_source is provided. + os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}(client_options=options) grpc_transport.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, credentials=None, - host="squid.clam.whelk", + host=client.DEFAULT_MTLS_ENDPOINT, ) + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "Auto", and default_client_cert_source is provided. + os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_MTLS_ENDPOINT, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "Auto", but client_cert_source and default_client_cert_source are None. + os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_ENDPOINT, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" + with pytest.raises(MutualTLSChannelError): + client = {{ service.client_name }}() + + del os.environ["GOOGLE_API_USE_MTLS"] + + def test_{{ service.client_name|snake_case }}_client_options_from_dict(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -131,7 +183,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_options={'api_endpoint': 'squid.clam.whelk'} ) grpc_transport.assert_called_once_with( - api_mtls_endpoint=None, + api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, credentials=None, host="squid.clam.whelk", @@ -557,12 +609,24 @@ def test_{{ service.name|snake_case }}_auth_adc(): )) +def test_{{ service.name|snake_case }}_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, 'default') as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.{{ service.name }}GrpcTransport(host="squid.clam.whelk") + adc.assert_called_once_with(scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + )) + + def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), - transport='grpc', ) assert client._transport._host == '{{ host }}:443' {% endwith %} @@ -573,7 +637,6 @@ def test_{{ service.name|snake_case }}_host_with_port(): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), - transport='grpc', ) assert client._transport._host == '{{ host }}:8000' {% endwith %} From 52505fd5c54ad870e5b716d71d07cf3a31b776ac Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 28 May 2020 15:09:07 -0700 Subject: [PATCH 0269/1339] Remove unused variable source (#428) --- .../tests/unit/%name_%version/%sub/test_%service.py.j2 | 2 +- .../tests/unit/%name_%version/%sub/test_%service.py.j2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 1e2043b62110..da6795d58fe7 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -346,7 +346,7 @@ def test_{{ method.name|snake_case }}_flattened(): {% endif %} # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = client.{{ method.name|snake_case }}( + client.{{ method.name|snake_case }}( {%- for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index a613c6753f7b..7b29eeaf2a28 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -346,7 +346,7 @@ def test_{{ method.name|snake_case }}_flattened(): {% endif %} # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = client.{{ method.name|snake_case }}( + client.{{ method.name|snake_case }}( {%- for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, {%- endfor %} From 66a648fa7a479d72a35de678ee7b6a3e7fb418cc Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Wed, 3 Jun 2020 10:32:21 -0700 Subject: [PATCH 0270/1339] chore: Update documentation to include use via Bazel rule (#436) See https://github.com/googleapis/gapic-generator-python/issues/404 for more details. --- .../docs/getting-started/_verifying.rst | 4 +- .../docs/getting-started/bazel.rst | 116 ++++++++++++++++++ .../docs/getting-started/index.rst | 1 + 3 files changed, 119 insertions(+), 2 deletions(-) create mode 100644 packages/gapic-generator/docs/getting-started/bazel.rst diff --git a/packages/gapic-generator/docs/getting-started/_verifying.rst b/packages/gapic-generator/docs/getting-started/_verifying.rst index 50fd3a676b84..3b25c4537ead 100644 --- a/packages/gapic-generator/docs/getting-started/_verifying.rst +++ b/packages/gapic-generator/docs/getting-started/_verifying.rst @@ -1,8 +1,8 @@ Verifying the Library --------------------- -Once you have compiled a client library, whether using a Docker image or -a local installation, it is time for the fun part: actually running it! +Once you have compiled a client library, whether using a Docker image, +local installation or bazel, it is time for the fun part: actually running it! Create a virtual environment for the library: diff --git a/packages/gapic-generator/docs/getting-started/bazel.rst b/packages/gapic-generator/docs/getting-started/bazel.rst new file mode 100644 index 000000000000..428b43ed957c --- /dev/null +++ b/packages/gapic-generator/docs/getting-started/bazel.rst @@ -0,0 +1,116 @@ +.. _getting-started/bazel: + +Bazel Build +=========== + +This generator can be called from `Bazel`_, which is a recommended way of using +it inside a continuous integration build or any other automated pipeline. + +.. _Bazel: https://www.bazel.build/ + +Installing +---------- + +Bazel +~~~~~~ +You will need Bazel version 3.0+. Please check the Bazel `website`_ for the +available installation options. + +Bazel is distributed in a form of a single binary, so one of the easiest ways to +install it is simply downloading the binary and making it executable: + +.. code-block:: shell + + curl -L https://github.com/bazelbuild/bazel/releases/download/3.2.0/bazel-3.2.0-linux-x86_64 -o bazel + chmod +x bazel + +.. _website: https://docs.bazel.build/versions/3.2.0/install-ubuntu.html + +Python and Dependencies +~~~~~~~~~~~~~~~~~~~~~~~ +Bazel build is mostly hermetic, with a few exceptions for Python generator. +Specifically it expects Python 3.7+ with the python dev packages to be installed. + +On Linux, to install those, simply run: + +.. code-block:: shell + + sudo apt-get install \ + python-dev \ + python3-dev + +Usage +----- + +.. include:: _usage_intro.rst + +Example +~~~~~~~ + +To generate a client library with Bazel you will need a Bazel workspace. An +example of such workspace would be `googleapis`_. It is already integrated with +this this generator in its `WORKSPACE`_ file. + +You need to clone the `googleapis`_ repository from GitHub: + +.. code-block:: shell + + $ git clone https://github.com/googleapis/googleapis.git + +The API we use as an example is the `Document AI`_ API, +available in the ``google/cloud/documentai/v1beta2/`` subdirectory. + +.. _googleapis: https://github.com/googleapis/googleapis +.. _WORKSPACE: https://github.com/googleapis/googleapis/blob/master/WORKSPACE#L220 +.. _Document AI: .. https://cloud.google.com/solutions/document-ai + +Creating the Targets +~~~~~~~~~~~~~~~~~~~~ + +To build somethign with bazel you need to create the corresponding tagets in +your ``BUIDL.bazel`` file. You can use the Python section of the Document AI +`BUIDL.bazel`_ file as an example: + +.. code-block:: python + + load( + "@gapic_generator_python//rules_python_gapic:py_gapic.bzl", + "py_gapic_library" + ) + + load( + "@gapic_generator_python//rules_python_gapic:py_gapic_pkg.bzl", + "py_gapic_assembly_pkg" + ) + + py_gapic_library( + name = "documentai_py_gapic", + srcs = [":documentai_proto"], + ) + + py_gapic_assembly_pkg( + name = "documentai-v1beta2-py", + deps = [ + ":documentai_py_gapic", + ], + ) +.. _BUIDL.bazel: https://github.com/googleapis/googleapis/blob/master/google/cloud/documentai/v1beta2/BUILD.bazel + +Compiling an API +~~~~~~~~~~~~~~~~ +To generate the client library simply run the bazel command from the repository +root, specifying the py_gapic_assembly_pkg target name as the argument: + +.. code-block:: shell + + bazel build //google/cloud/documentai/v1beta2:documentai-v1beta2-py + +This will generate a `tar.gz` archive with the generated library packaged in it. +To unpack it in `dest` location simply run the following command from the Bazel +workspace root: + +.. code-block:: shell + + tar -xzpf bazel-bin/google/cloud/documentai/v1beta2/documentai-v1beta2-py.tar.gz -C dest + +.. include:: _verifying.rst diff --git a/packages/gapic-generator/docs/getting-started/index.rst b/packages/gapic-generator/docs/getting-started/index.rst index f3d5e3ff6462..fa2ebf02231e 100644 --- a/packages/gapic-generator/docs/getting-started/index.rst +++ b/packages/gapic-generator/docs/getting-started/index.rst @@ -24,3 +24,4 @@ and this approach is fully supported. docker local + bazel From d7f356390847f3fb0b2d7004526d6fd14e8ca064 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 3 Jun 2020 10:34:08 -0700 Subject: [PATCH 0271/1339] Add version to documentation RSTs (#435) docs: add version name to RSTs and list services --- .../ads-templates/docs/%name_%version/services.rst.j2 | 8 +++++--- .../gapic/ads-templates/docs/%name_%version/types.rst.j2 | 4 ++-- .../gapic/templates/docs/%name_%version/services.rst.j2 | 8 +++++--- .../gapic/templates/docs/%name_%version/types.rst.j2 | 4 ++-- 4 files changed, 14 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 index 350f120eceb8..b0f05d693191 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 @@ -1,6 +1,8 @@ -Client for {{ api.naming.long_name }} API -{{ '=' * (15 + api.naming.long_name|length) }} +Services for {{ api.naming.long_name }} {{ api.naming.version }} API +{{ '=' * (18 + api.naming.long_name|length + api.naming.version|length) }} -.. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }} +{% for service in api.services.values()|sort(attribute='name') -%} +.. automodule:: {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} :members: :inherited-members: +{% endfor %} diff --git a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 index ce3c8882c8cd..640641f2117d 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 @@ -1,5 +1,5 @@ -Types for {{ api.naming.long_name }} API -{{ '=' * (14 + api.naming.long_name|length) }} +Types for {{ api.naming.long_name }} {{ api.naming.version }} API +{{ '=' * (15 + api.naming.long_name|length + api.naming.version|length) }} .. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types :members: diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 index 350f120eceb8..b0f05d693191 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 @@ -1,6 +1,8 @@ -Client for {{ api.naming.long_name }} API -{{ '=' * (15 + api.naming.long_name|length) }} +Services for {{ api.naming.long_name }} {{ api.naming.version }} API +{{ '=' * (18 + api.naming.long_name|length + api.naming.version|length) }} -.. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }} +{% for service in api.services.values()|sort(attribute='name') -%} +.. automodule:: {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} :members: :inherited-members: +{% endfor %} diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 index ce3c8882c8cd..640641f2117d 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 @@ -1,5 +1,5 @@ -Types for {{ api.naming.long_name }} API -{{ '=' * (14 + api.naming.long_name|length) }} +Types for {{ api.naming.long_name }} {{ api.naming.version }} API +{{ '=' * (15 + api.naming.long_name|length + api.naming.version|length) }} .. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types :members: From 47228fd7646561f24a9e71fdd6fba94a0145ce03 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 3 Jun 2020 15:33:28 -0700 Subject: [PATCH 0272/1339] Integration tests for nonslash resource paths (#427) Add integration tests based on showcase's non-slash separated resource paths. Note: I was having editor formatting problems and eventually resorted to using 'apply black on save'. This is to blame for all the spurious string and formatting changes. Note: Even though the changes in this commit imply support for proto3 optional fields, such support is not yet implemented. The 'support' is a workaround to allow the most recent showcase spec to compile. --- packages/gapic-generator/.circleci/config.yml | 72 +-- .../gapic/generator/generator.py | 234 ++++--- .../templates/%namespace/%name/__init__.py.j2 | 94 --- .../%name_%version/%sub/__init__.py.j2 | 95 --- .../%name_%version/%sub/types/__init__.py.j2 | 44 +- .../%name_%version/%sub/test_%service.py.j2 | 69 -- packages/gapic-generator/noxfile.py | 292 +++++---- packages/gapic-generator/setup.py | 64 +- .../gapic-generator/tests/system/conftest.py | 47 +- .../tests/system/test_resource_crud.py | 54 +- .../tests/unit/generator/test_generator.py | 592 +++++++++--------- 11 files changed, 691 insertions(+), 966 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index debcecea558b..99c47b680627 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -241,12 +241,12 @@ jobs: name: Install nox. command: pip install nox - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Run showcase tests. @@ -266,12 +266,12 @@ jobs: name: Install nox. command: pip install nox - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Run showcase tests. @@ -307,12 +307,12 @@ jobs: name: Install nox. command: pip install nox - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Run showcase tests. @@ -332,12 +332,12 @@ jobs: name: Install nox. command: pip install nox - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Run showcase tests. @@ -369,12 +369,12 @@ jobs: apt-get update apt-get install -y curl pandoc unzip - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. @@ -393,12 +393,12 @@ jobs: apt-get update apt-get install -y curl pandoc unzip - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. @@ -417,12 +417,12 @@ jobs: apt-get update apt-get install -y curl pandoc unzip - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. @@ -441,12 +441,12 @@ jobs: apt-get update apt-get install -y curl pandoc unzip - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. @@ -465,12 +465,12 @@ jobs: apt-get update apt-get install -y curl pandoc unzip - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. @@ -489,12 +489,12 @@ jobs: apt-get update apt-get install -y curl pandoc unzip - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. @@ -513,12 +513,12 @@ jobs: apt-get update apt-get install -y curl pandoc unzip - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. @@ -537,12 +537,12 @@ jobs: apt-get update apt-get install -y curl pandoc unzip - run: - name: Install protoc 3.7.1. + name: Install protoc 3.12.1. command: | mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.7.1/protoc-3.7.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.7.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip cd /usr/src/protoc/ - unzip protoc-3.7.1.zip + unzip protoc-3.12.1.zip ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - run: name: Install nox. diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index a1bf4bba99a9..8cc380e10550 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -16,13 +16,12 @@ import yaml import re import os -from typing import (Any, DefaultDict, Dict, Mapping) +from typing import Any, DefaultDict, Dict, Mapping from hashlib import sha256 -from collections import (OrderedDict, defaultdict) -from gapic.samplegen_utils.utils import ( - coerce_response_name, is_valid_sample_cfg) +from collections import OrderedDict, defaultdict +from gapic.samplegen_utils.utils import coerce_response_name, is_valid_sample_cfg from gapic.samplegen_utils.types import DuplicateSample -from gapic.samplegen import (manifest, samplegen) +from gapic.samplegen import manifest, samplegen from gapic.generator import options from gapic.generator import formatter from gapic.schema import api @@ -45,7 +44,7 @@ class Generator: """ def __init__(self, opts: options.Options) -> None: - # Create the jinja environment with which to render templates. + # Create the jinja environment with which to render templates. self._env = jinja2.Environment( loader=jinja2.FileSystemLoader(searchpath=opts.templates), undefined=jinja2.StrictUndefined, @@ -53,18 +52,16 @@ def __init__(self, opts: options.Options) -> None: ) # Add filters which templates require. - self._env.filters['rst'] = utils.rst - self._env.filters['snake_case'] = utils.to_snake_case - self._env.filters['sort_lines'] = utils.sort_lines - self._env.filters['wrap'] = utils.wrap - self._env.filters['coerce_response_name'] = coerce_response_name + self._env.filters["rst"] = utils.rst + self._env.filters["snake_case"] = utils.to_snake_case + self._env.filters["sort_lines"] = utils.sort_lines + self._env.filters["wrap"] = utils.wrap + self._env.filters["coerce_response_name"] = coerce_response_name self._sample_configs = opts.sample_configs def get_response( - self, - api_schema: api.API, - opts: options.Options + self, api_schema: api.API, opts: options.Options ) -> CodeGeneratorResponse: """Return a :class:`~.CodeGeneratorResponse` for this library. @@ -83,7 +80,8 @@ def get_response( sample_templates, client_templates = utils.partition( lambda fname: os.path.basename( fname) == samplegen.DEFAULT_TEMPLATE_NAME, - self._env.loader.list_templates()) + self._env.loader.list_templates(), + ) # Iterate over each template and add the appropriate output files # based on that template. @@ -92,31 +90,30 @@ def get_response( # and plug those into the template. for template_name in client_templates: # Sanity check: Skip "private" templates. - filename = template_name.split('/')[-1] - if filename.startswith('_') and filename != '__init__.py.j2': + filename = template_name.split("/")[-1] + if filename.startswith("_") and filename != "__init__.py.j2": continue # Append to the output files dictionary. output_files.update( self._render_template( - template_name, - api_schema=api_schema, - opts=opts - ) + template_name, api_schema=api_schema, opts=opts) ) - output_files.update(self._generate_samples_and_manifest( - api_schema, - self._env.get_template(sample_templates[0]), - )) + output_files.update( + self._generate_samples_and_manifest( + api_schema, self._env.get_template(sample_templates[0]), + ) + ) # Return the CodeGeneratorResponse output. - return CodeGeneratorResponse(file=[i for i in output_files.values()]) + res = CodeGeneratorResponse( + file=[i for i in output_files.values()]) # type: ignore + res.supported_features |= CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL # type: ignore + return res def _generate_samples_and_manifest( - self, - api_schema: api.API, - sample_template: jinja2.Template, + self, api_schema: api.API, sample_template: jinja2.Template, ) -> Dict[str, CodeGeneratorResponse.File]: """Generate samples and samplegen manifest for the API. @@ -129,8 +126,8 @@ def _generate_samples_and_manifest( # The two-layer data structure lets us do two things: # * detect duplicate samples, which is an error # * detect distinct samples with the same ID, which are disambiguated - id_to_hash_to_spec: DefaultDict[str, Dict[str, Any]] = defaultdict( - dict) + id_to_hash_to_spec: DefaultDict[str, + Dict[str, Any]] = defaultdict(dict) STANDALONE_TYPE = "standalone" for config_fpath in self._sample_configs: @@ -139,7 +136,8 @@ def _generate_samples_and_manifest( spec_generator = ( spec - for cfg in configs if is_valid_sample_cfg(cfg) + for cfg in configs + if is_valid_sample_cfg(cfg) for spec in cfg.get("samples", []) # If unspecified, assume a sample config describes a standalone. # If sample_types are specified, standalone samples must be @@ -155,10 +153,9 @@ def _generate_samples_and_manifest( # # Ideally the sample author should pick a descriptive, unique ID, # but this may be impractical and can be error-prone. - spec_hash = sha256(str(spec).encode('utf8')).hexdigest()[:8] - sample_id = (spec.get("id") - or spec.get("region_tag") - or spec_hash) + spec_hash = sha256(str(spec).encode("utf8")).hexdigest()[:8] + sample_id = spec.get("id") or spec.get( + "region_tag") or spec_hash spec["id"] = sample_id hash_to_spec = id_to_hash_to_spec[sample_id] @@ -179,19 +176,17 @@ def _generate_samples_and_manifest( spec["id"] += f"_{spec_hash}" sample = samplegen.generate_sample( - spec, - api_schema, - sample_template, - ) + spec, api_schema, sample_template,) fpath = spec["id"] + ".py" - fpath_to_spec_and_rendered[os.path.join(out_dir, fpath)] = (spec, - sample) + fpath_to_spec_and_rendered[os.path.join(out_dir, fpath)] = ( + spec, + sample, + ) output_files = { fname: CodeGeneratorResponse.File( - content=formatter.fix_whitespace(sample), - name=fname + content=formatter.fix_whitespace(sample), name=fname ) for fname, (_, sample) in fpath_to_spec_and_rendered.items() } @@ -199,24 +194,22 @@ def _generate_samples_and_manifest( # Only generate a manifest if we generated samples. if output_files: manifest_fname, manifest_doc = manifest.generate( - ((fname, spec) - for fname, (spec, _) in fpath_to_spec_and_rendered.items()), - api_schema + ( + (fname, spec) + for fname, (spec, _) in fpath_to_spec_and_rendered.items() + ), + api_schema, ) manifest_fname = os.path.join(out_dir, manifest_fname) output_files[manifest_fname] = CodeGeneratorResponse.File( - content=manifest_doc.render(), - name=manifest_fname + content=manifest_doc.render(), name=manifest_fname ) return output_files def _render_template( - self, - template_name: str, *, - api_schema: api.API, - opts: options.Options, + self, template_name: str, *, api_schema: api.API, opts: options.Options, ) -> Dict[str, CodeGeneratorResponse.File]: """Render the requested templates. @@ -238,82 +231,95 @@ def _render_template( # Sanity check: Rendering per service and per proto would be a # combinatorial explosion and is almost certainly not what anyone # ever wants. Error colorfully on it. - if '%service' in template_name and '%proto' in template_name: - raise ValueError('Template files may live under a %proto or ' - '%service directory, but not both.') + if "%service" in template_name and "%proto" in template_name: + raise ValueError( + "Template files may live under a %proto or " + "%service directory, but not both." + ) # If this template should be rendered for subpackages, process it # for all subpackages and set the strict flag (restricting what # services and protos we pull from for the remainder of the method). - if '%sub' in template_name: + if "%sub" in template_name: for subpackage in api_schema.subpackages.values(): - answer.update(self._render_template(template_name, - api_schema=subpackage, - opts=opts - )) - skip_subpackages = True + answer.update( + self._render_template( + template_name, api_schema=subpackage, opts=opts + ) + ) + skip_subpackages = True # If this template should be rendered once per proto, iterate over # all protos to be rendered - if '%proto' in template_name: + if "%proto" in template_name: for proto in api_schema.protos.values(): - if (skip_subpackages and proto.meta.address.subpackage != - api_schema.subpackage_view): + if ( + skip_subpackages + and proto.meta.address.subpackage != api_schema.subpackage_view + ): continue - answer.update(self._get_file(template_name, - api_schema=api_schema, - proto=proto, - opts=opts - )) + + answer.update( + self._get_file( + template_name, api_schema=api_schema, proto=proto, opts=opts + ) + ) + return answer # If this template should be rendered once per service, iterate # over all services to be rendered. - if '%service' in template_name: + if "%service" in template_name: for service in api_schema.services.values(): - if (skip_subpackages and service.meta.address.subpackage != - api_schema.subpackage_view): + if ( + skip_subpackages + and service.meta.address.subpackage != api_schema.subpackage_view + ): continue - answer.update(self._get_file(template_name, - api_schema=api_schema, - service=service, - opts=opts, - )) + + answer.update( + self._get_file( + template_name, + api_schema=api_schema, + service=service, + opts=opts, + ) + ) return answer # This file is not iterating over anything else; return back # the one applicable file. - answer.update( - self._get_file(template_name, api_schema=api_schema, opts=opts) - ) + answer.update(self._get_file( + template_name, api_schema=api_schema, opts=opts)) return answer - def _get_file(self, template_name: str, *, - opts: options.Options, - api_schema=api.API, - **context: Mapping): + def _get_file( + self, + template_name: str, + *, + opts: options.Options, + api_schema=api.API, + **context: Mapping, + ): """Render a template to a protobuf plugin File object.""" # Determine the target filename. - fn = self._get_filename(template_name, - api_schema=api_schema, - context=context, - ) + fn = self._get_filename( + template_name, api_schema=api_schema, context=context,) # Render the file contents. cgr_file = CodeGeneratorResponse.File( content=formatter.fix_whitespace( self._env.get_template(template_name).render( - api=api_schema, - opts=opts, - **context + api=api_schema, opts=opts, **context ), ), name=fn, ) # Sanity check: Do not render empty files. - if (utils.empty(cgr_file.content) and - not fn.endswith(('py.typed', '__init__.py'))): + if utils.empty(cgr_file.content) and not fn.endswith( + ("py.typed", "__init__.py") + ): return {} # Return the filename and content in a length-1 dictionary @@ -321,10 +327,7 @@ def _get_file(self, template_name: str, *, return {fn: cgr_file} def _get_filename( - self, - template_name: str, *, - api_schema: api.API, - context: dict = None, + self, template_name: str, *, api_schema: api.API, context: dict = None, ) -> str: """Return the appropriate output filename for this template. @@ -346,45 +349,40 @@ def _get_filename( Returns: str: The appropriate output filename. """ - filename = template_name[:-len('.j2')] + filename = template_name[: -len(".j2")] # Replace the %namespace variable. filename = filename.replace( - '%namespace', + "%namespace", os.path.sep.join([i.lower() for i in api_schema.naming.namespace]), ).lstrip(os.path.sep) # Replace the %name, %version, and %sub variables. - filename = filename.replace('%name_%version', - api_schema.naming.versioned_module_name) - filename = filename.replace('%version', api_schema.naming.version) - filename = filename.replace('%name', api_schema.naming.module_name) - filename = filename.replace('%sub', - '/'.join(api_schema.subpackage_view)) + filename = filename.replace( + "%name_%version", api_schema.naming.versioned_module_name + ) + filename = filename.replace("%version", api_schema.naming.version) + filename = filename.replace("%name", api_schema.naming.module_name) + filename = filename.replace( + "%sub", "/".join(api_schema.subpackage_view)) # Replace the %service variable if applicable. - if context and 'service' in context: + if context and "service" in context: filename = filename.replace( - '%service', - context['service'].module_name, - ) + "%service", context["service"].module_name,) # Replace the %proto variable if appliable. # In the cases of protos, we also honor subpackages. - if context and 'proto' in context: + if context and "proto" in context: filename = filename.replace( - '%proto', - context['proto'].module_name, - ) + "%proto", context["proto"].module_name,) # Paths may have empty path segments if components are empty # (e.g. no %version); handle this. - filename = re.sub(r'/+', '/', filename) + filename = re.sub(r"/+", "/", filename) # Done, return the filename. return filename -__all__ = ( - 'Generator', -) +__all__ = ("Generator",) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index 1ea3128c5776..15f4a17e44b7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -1,98 +1,5 @@ {% extends '_base.py.j2' %} {% block content %} -{% if opts.lazy_import -%} {# lazy import #} -import importlib -import re -import sys - -from itertools import chain - -def to_snake_case(s: str) -> str: - s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) - s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) - - # Numbers are a weird case; the goal is to spot when they _start_ - # some kind of name or acronym (e.g. 2FA, 3M). - # - # Find cases of a number preceded by a lower-case letter _and_ - # followed by at least two capital letters or a single capital and - # end of string. - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) - - return s.lower() - - -def from_snake_case(s): - _CHARS_TO_UPCASE_RE = re.compile(r'(?:_|^)([a-z])') - return _CHARS_TO_UPCASE_RE.sub(lambda m: m.group().replace('_', '').upper(), s) - - -if sys.version_info < (3, 7): - raise ImportError('This module requires Python 3.7 or later.') - -_lazy_name_to_package_map = { - 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', - {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', - '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.base', - '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.transports.grpc', - {%- endfor %} {# Need to do types and enums #} -} - -_lazy_type_to_package_map = { -{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %} -{%- for enum in proto.enums.values() %} - '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %}{%- endfor %} -} - - -# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ -def __getattr__(name): # Requires Python >= 3.7 - if name == '__all__': - all_names = globals()['__all__'] = sorted( - chain( - (from_snake_case(k) for k in _lazy_name_to_package_map), - _lazy_type_to_package_map, - ) - ) - return all_names - elif name.endswith('Transport'): - module = __getattr__(to_snake_case(name)) - sub_mod_class = getattr(module, name) - klass = type(name, (sub_mod_class,), {'__doc__': sub_mod_class.__doc__}) - globals()[name] = klass - return klass - elif name.endswith('Client'): - module = __getattr__(to_snake_case(name)) - sub_mod_class = getattr(module, name) - klass = type( - name, - (sub_mod_class,), - {'__doc__': sub_mod_class.__doc__} - ) - globals()[name] = klass - return klass - elif name in _lazy_name_to_package_map: - module = importlib.import_module(f'{_lazy_name_to_package_map[name]}') - globals()[name] = module - return module - elif name in _lazy_type_to_package_map: - module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') - klass = getattr(module, name) - {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} - globals()[name] = klass - return klass - else: - raise AttributeError(f'unknown sub-module {name!r}.') - - -def __dir__(): - return globals().get('__all__') or __getattr__('__all__') -{% else -%} {# do not use lazy import #} {# Import subpackages. -#} {% filter sort_lines -%} {% for subpackage in api.subpackages.keys() -%} @@ -154,5 +61,4 @@ __all__ = ( {% endfilter -%} {% endfilter -%} ) -{% endif -%} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 372ce3e2507e..df685b243801 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -1,100 +1,6 @@ {% extends '_base.py.j2' %} {% block content %} -{% if opts.lazy_import -%} {# lazy import #} -import importlib -import re -import sys - -from itertools import chain - -def to_snake_case(s: str) -> str: - s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) - s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) - - # Numbers are a weird case; the goal is to spot when they _start_ - # some kind of name or acronym (e.g. 2FA, 3M). - # - # Find cases of a number preceded by a lower-case letter _and_ - # followed by at least two capital letters or a single capital and - # end of string. - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) - - return s.lower() - - -def from_snake_case(s): - _CHARS_TO_UPCASE_RE = re.compile(r'(?:_|^)([a-z])') - return _CHARS_TO_UPCASE_RE.sub(lambda m: m.group().replace('_', '').upper(), s) - - -if sys.version_info < (3, 7): - raise ImportError('This module requires Python 3.7 or later.') # pragma: NO COVER - -_lazy_name_to_package_map = { - 'types': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types', - {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.client_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client', - '{{ service.transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.transports.base', - '{{ service.grpc_transport_name|snake_case }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.transports.grpc', - {%- endfor %} -} - -_lazy_type_to_package_map = { -{%- filter sort_lines %} -{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}':'{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %} -{%- for enum in proto.enums.values() %} - '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %}{%- endfor %}{%- endfilter %} -} - -# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ -def __getattr__(name): # Requires Python >= 3.7 - if name == '__all__': - all_names = globals()['__all__'] = sorted( - chain( - (from_snake_case(k) for k in _lazy_name_to_package_map if k != 'types'), - _lazy_type_to_package_map, - ['types'], - ) - ) - return all_names - elif name.endswith('Transport'): - module = __getattr__(to_snake_case(name)) - sub_mod_class = getattr(module, name) - klass = type(name, (sub_mod_class,), {'__doc__': sub_mod_class.__doc__}) - globals()[name] = klass - return klass - elif name.endswith('Client'): - module = __getattr__(to_snake_case(name)) - sub_mod_class = getattr(module, name) - klass = type( - name, - (sub_mod_class,), - {'__doc__': sub_mod_class.__doc__} - ) - globals()[name] = klass - return klass - elif name in _lazy_name_to_package_map: - module = importlib.import_module(f'{_lazy_name_to_package_map[name]}') - globals()[name] = module - return module - elif name in _lazy_type_to_package_map: - module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') - klass = getattr(module, name) - {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} - globals()[name] = klass - return klass - else: - raise AttributeError(f'unknown sub-module {name!r}.') - - -def __dir__(): - return globals().get('__all__') or __getattr__('__all__') -{% else -%} {# do not use lazy import #} {# Import subpackages. -#} {% for subpackage in api.subpackages.keys() -%} from . import {{ subpackage }} @@ -149,5 +55,4 @@ __all__ = ( {%- endfor %} {%- endfilter %} ) -{% endif -%} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 index 8b341523337a..4ebdec8677e5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 @@ -1,53 +1,13 @@ {% extends '_base.py.j2' %} {% block content %} -{% if opts.lazy_import -%} {# lazy import #} -import importlib -import sys - - -if sys.version_info < (3, 7): - raise ImportError('This module requires Python 3.7 or later.') # pragma: NO COVER - - -_lazy_type_to_package_map = { - {%- filter sort_lines %} -{%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %} -{%- for enum in proto.enums.values() %} - '{{ enum.name }}': '{% if api.naming.module_namespace %}{{ api.naming.module_namespace|join(".") }}.{% endif -%}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }}', -{%- endfor %}{%- endfor %}{%- endfilter %} -} - - -# Background on how this behaves: https://www.python.org/dev/peps/pep-0562/ -def __getattr__(name): # Requires Python >= 3.7 - if name == '__all__': - all_names = globals()['__all__'] = sorted(_lazy_type_to_package_map) - return all_names - elif name in _lazy_type_to_package_map: - module = importlib.import_module(f'{_lazy_type_to_package_map[name]}') - klass = getattr(module, name) - {# new_klass = type(name, (klass,), {'__doc__': klass.__doc__}) #} - globals()[name] = klass - return klass - else: - raise AttributeError(f'unknown sub-module {name!r}.') - - -def __dir__(): - return globals().get('__all__') or __getattr__('__all__') - -{% else -%} {% for p in api.protos.values() if p.file_to_generate and p.messages -%} from .{{p.module_name }} import ({% for m in p.messages.values() %}{{ m.name }}, {% endfor %}) {% endfor %} __all__ = ( -{%- for p in api.protos.values() if p.file_to_generate %}{% for m in p.messages.values() %} + {%- for p in api.protos.values() if p.file_to_generate %}{% for m in p.messages.values() %} '{{ m.name }}', -{%- endfor %}{% endfor %} + {%- endfor %}{% endfor %} ) -{% endif -%} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 7b29eeaf2a28..4a57aca16314 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -471,75 +471,6 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): {% endfor -%} {#- method in methods #} -{% if opts.lazy_import -%} {# lazy import #} -def test_module_level_imports(): - # Use the other transport import path so that code gets tested. - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.name }}GrpcTransport - transport = {{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.client_name }} - client = {{ service.client_name }}(transport=transport) - assert client._transport is transport - - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ service.name|snake_case }}_grpc_transport - transport2 = {{ service.name|snake_case }}_grpc_transport.{{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials(), - ) - - client2 = {{ service.client_name }}(transport=transport2) - assert client2._transport is transport2 - - {% with type_name = cycler(*service.methods.values()).next().input.name -%} - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ type_name }} - type_ = {{ type_name }}() - - try: - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} import {{ type_name|lower }}_squidification - except (AttributeError, ImportError) as e: - pass - else: - assert False - {% endwith -%} - - import {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }} as mod - all_names = dir(mod) - expected_names = sorted([ - 'types', - {%- for service in api.services.values()|sort(attribute='name')|unique(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} - '{{ service.client_name }}', - '{{ service.transport_name }}', - '{{ service.grpc_transport_name }}', - {%- endfor %} - {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}', - {%- endfor %} - {%- for enum in proto.enums.values() %} - '{{ enum.name }}' - {% endfor %}{%- endfor %} - ]) - assert all_names == expected_names - - {% with type_name = cycler(*service.methods.values()).next().input.name -%} - from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.types import {{ type_name }} - type_ = {{ type_name }}() - {% endwith -%} - - import {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.types as types - all_types = dir(types) - expected_types = sorted([ - {%- for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %}{%- for message in proto.messages.values() %} - '{{ message.name }}', - {%- endfor %} - {%- for enum in proto.enums.values() %} - '{{ enum.name }}', - {% endfor %}{%- endfor %} - ]) - assert all_types == expected_types - -{% endif -%} {# lazy import #} - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}GrpcTransport( diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 9a6ed566feda..83d38f6042f6 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -21,78 +21,77 @@ from os import path -showcase_version = '0.6.1' +showcase_version = "0.11.0" -@nox.session(python=['3.6', '3.7', '3.8']) +@nox.session(python=["3.6", "3.7", "3.8"]) def unit(session): """Run the unit test suite.""" session.install( - 'coverage', - 'pytest', - 'pytest-cov', - 'pytest-xdist', - 'pyfakefs', + "coverage", "pytest", "pytest-cov", "pytest-xdist", "pyfakefs", ) - session.install('-e', '.') + session.install("-e", ".") session.run( - 'py.test', - '-vv', - '-n=auto', - '--cov=gapic', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - *(session.posargs or [path.join('tests', 'unit')]), + "py.test", + "-vv", + "-n=auto", + "--cov=gapic", + "--cov-config=.coveragerc", + "--cov-report=term", + "--cov-report=html", + *(session.posargs or [path.join("tests", "unit")]), ) -@nox.session(python='3.8') +@nox.session(python="3.8") def showcase( - session, - templates='DEFAULT', - other_opts: typing.Iterable[str] = (), + session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): """Run the Showcase test suite.""" # Try to make it clear if Showcase is not running, so that # people do not end up with tons of difficult-to-debug failures over # an obvious problem. - if not os.environ.get('CIRCLECI'): - session.log('-' * 70) - session.log('Note: Showcase must be running for these tests to work.') - session.log('See https://github.com/googleapis/gapic-showcase') - session.log('-' * 70) + if not os.environ.get("CIRCLECI"): + session.log("-" * 70) + session.log("Note: Showcase must be running for these tests to work.") + session.log("See https://github.com/googleapis/gapic-showcase") + session.log("-" * 70) # Install pytest and gapic-generator-python - session.install('mock') - session.install('pytest') - session.install('-e', '.') + session.install("mock") + session.install("pytest") + session.install("-e", ".") # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: # Download the Showcase descriptor. session.run( - 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' - f'download/v{showcase_version}/' - f'gapic-showcase-{showcase_version}.desc', - '-L', '--output', path.join(tmp_dir, 'showcase.desc'), + "curl", + "https://github.com/googleapis/gapic-showcase/releases/" + f"download/v{showcase_version}/" + f"gapic-showcase-{showcase_version}.desc", + "-L", + "--output", + path.join(tmp_dir, "showcase.desc"), external=True, silent=True, ) # Write out a client library for Showcase. - template_opt = f'python-gapic-templates={templates}' - opts = f'--python_gapic_opt={template_opt}' - opts += ','.join(other_opts + ('lazy-import',)) + template_opt = f"python-gapic-templates={templates}" + opts = f"--python_gapic_opt={template_opt}" + opts += ",".join(other_opts + ("lazy-import",)) session.run( - 'protoc', - f'--descriptor_set_in={tmp_dir}{path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', + "protoc", + "--experimental_allow_proto3_optional", + f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", + f"--python_gapic_out={tmp_dir}", + "google/showcase/v1beta1/echo.proto", + "google/showcase/v1beta1/identity.proto", + "google/showcase/v1beta1/messaging.proto", external=True, ) @@ -100,54 +99,57 @@ def showcase( session.install(tmp_dir) session.run( - 'py.test', '--quiet', *(session.posargs or [path.join('tests', 'system')]) + "py.test", "--quiet", *(session.posargs or [path.join("tests", "system")]) ) -@nox.session(python='3.8') +@nox.session(python="3.8") def showcase_mtls( - session, - templates='DEFAULT', - other_opts: typing.Iterable[str] = (), + session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): """Run the Showcase mtls test suite.""" # Try to make it clear if Showcase is not running, so that # people do not end up with tons of difficult-to-debug failures over # an obvious problem. - if not os.environ.get('CIRCLECI'): - session.log('-' * 70) - session.log('Note: Showcase must be running for these tests to work.') - session.log('See https://github.com/googleapis/gapic-showcase') - session.log('-' * 70) + if not os.environ.get("CIRCLECI"): + session.log("-" * 70) + session.log("Note: Showcase must be running for these tests to work.") + session.log("See https://github.com/googleapis/gapic-showcase") + session.log("-" * 70) # Install pytest and gapic-generator-python - session.install('mock') - session.install('pytest') - session.install('-e', '.') + session.install("mock") + session.install("pytest") + session.install("-e", ".") # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: # Download the Showcase descriptor. session.run( - 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' - f'download/v{showcase_version}/' - f'gapic-showcase-{showcase_version}.desc', - '-L', '--output', path.join(tmp_dir, 'showcase.desc'), + "curl", + "https://github.com/googleapis/gapic-showcase/releases/" + f"download/v{showcase_version}/" + f"gapic-showcase-{showcase_version}.desc", + "-L", + "--output", + path.join(tmp_dir, "showcase.desc"), external=True, silent=True, ) # Write out a client library for Showcase. - template_opt = f'python-gapic-templates={templates}' - opts = f'--python_gapic_opt={template_opt}' - opts += ','.join(other_opts + ('lazy-import',)) + template_opt = f"python-gapic-templates={templates}" + opts = f"--python_gapic_opt={template_opt}" + opts += ",".join(other_opts + ("lazy-import",)) session.run( - 'protoc', - f'--descriptor_set_in={tmp_dir}{path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', + "protoc", + "--experimental_allow_proto3_optional", + f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", + f"--python_gapic_out={tmp_dir}", + "google/showcase/v1beta1/echo.proto", + "google/showcase/v1beta1/identity.proto", + "google/showcase/v1beta1/messaging.proto", external=True, ) @@ -155,159 +157,177 @@ def showcase_mtls( session.install(tmp_dir) session.run( - 'py.test', '--quiet', '--mtls', *(session.posargs or [path.join('tests', 'system')]) + "py.test", + "--quiet", + "--mtls", + *(session.posargs or [path.join("tests", "system")]), ) -@nox.session(python='3.8') +@nox.session(python="3.8") def showcase_alternative_templates(session): - templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') - showcase(session, templates=templates, other_opts=('old-naming',)) + templates = path.join(path.dirname(__file__), "gapic", "ads-templates") + showcase(session, templates=templates, other_opts=("old-naming",)) -@nox.session(python='3.8') +@nox.session(python="3.8") def showcase_mtls_alternative_templates(session): - templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') - showcase_mtls(session, templates=templates, other_opts=('old-naming',)) + templates = path.join(path.dirname(__file__), "gapic", "ads-templates") + showcase_mtls(session, templates=templates, other_opts=("old-naming",)) -@nox.session(python=['3.6', '3.7', '3.8']) +@nox.session(python=["3.6", "3.7", "3.8"]) def showcase_unit( - session, - templates='DEFAULT', - other_opts: typing.Iterable[str] = (), + session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): """Run the generated unit tests against the Showcase library.""" # Install pytest and gapic-generator-python - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-xdist',) - session.install('.') + session.install( + "coverage", "pytest", "pytest-cov", "pytest-xdist", + ) + session.install(".") # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: # Download the Showcase descriptor. session.run( - 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' - f'download/v{showcase_version}/' - f'gapic-showcase-{showcase_version}.desc', - '-L', '--output', path.join(tmp_dir, 'showcase.desc'), + "curl", + "https://github.com/googleapis/gapic-showcase/releases/" + f"download/v{showcase_version}/" + f"gapic-showcase-{showcase_version}.desc", + "-L", + "--output", + path.join(tmp_dir, "showcase.desc"), external=True, silent=True, ) # Write out a client library for Showcase. opts = [ - f'python-gapic-templates={templates}', + f"python-gapic-templates={templates}", ] opts.extend(other_opts) - if session.python == '3.8': - opts.append('lazy-import') + if session.python == "3.8": + opts.append("lazy-import") opt_str = f'--python_gapic_opt={",".join(opts)},' session.run( - 'protoc', - f'--descriptor_set_in={tmp_dir}{path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', + "protoc", + "--experimental_allow_proto3_optional", + f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", + f"--python_gapic_out={tmp_dir}", opt_str, - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', - 'google/showcase/v1beta1/messaging.proto', - 'google/showcase/v1beta1/testing.proto', + "google/showcase/v1beta1/echo.proto", + "google/showcase/v1beta1/identity.proto", + "google/showcase/v1beta1/messaging.proto", + "google/showcase/v1beta1/testing.proto", external=True, ) # Install the library. session.chdir(tmp_dir) - session.install('-e', tmp_dir) + session.install("-e", tmp_dir) # Run the tests. session.run( - 'py.test', - '-n=auto', - '--quiet', - '--cov=google', - '--cov-report=term', - *(session.posargs or [path.join('tests', 'unit')]), + "py.test", + "-n=auto", + "--quiet", + "--cov=google", + "--cov-report=term", + *(session.posargs or [path.join("tests", "unit")]), ) -@nox.session(python=['3.6', '3.7', '3.8']) +@nox.session(python=["3.6", "3.7", "3.8"]) def showcase_unit_alternative_templates(session): - templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') - showcase_unit(session, templates=templates, other_opts=('old-naming',)) + templates = path.join(path.dirname(__file__), "gapic", "ads-templates") + showcase_unit(session, templates=templates, other_opts=("old-naming",)) -@nox.session(python='3.8') +@nox.session(python="3.8") def showcase_mypy( - session, templates='DEFAULT', - other_opts: typing.Iterable[str] = (), + session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): """Perform typecheck analysis on the generated Showcase library.""" # Install pytest and gapic-generator-python - session.install('mypy') - session.install('.') + session.install("mypy") + session.install(".") # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: # Download the Showcase descriptor. session.run( - 'curl', 'https://github.com/googleapis/gapic-showcase/releases/' - f'download/v{showcase_version}/' - f'gapic-showcase-{showcase_version}.desc', - '-L', '--output', path.join(tmp_dir, 'showcase.desc'), + "curl", + "https://github.com/googleapis/gapic-showcase/releases/" + f"download/v{showcase_version}/" + f"gapic-showcase-{showcase_version}.desc", + "-L", + "--output", + path.join(tmp_dir, "showcase.desc"), external=True, silent=True, ) # Write out a client library for Showcase. - template_opt = f'python-gapic-templates={templates}' - gapic_opts = f'--python_gapic_opt={template_opt},' - gapic_opts += ','.join(other_opts) + template_opt = f"python-gapic-templates={templates}" + gapic_opts = f"--python_gapic_opt={template_opt}," + gapic_opts += ",".join(other_opts) session.run( - 'protoc', - f'--descriptor_set_in={tmp_dir}{path.sep}showcase.desc', - f'--python_gapic_out={tmp_dir}', + "protoc", + "--experimental_allow_proto3_optional", + f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", + f"--python_gapic_out={tmp_dir}", gapic_opts, - 'google/showcase/v1beta1/echo.proto', - 'google/showcase/v1beta1/identity.proto', - 'google/showcase/v1beta1/messaging.proto', - 'google/showcase/v1beta1/testing.proto', + "google/showcase/v1beta1/echo.proto", + "google/showcase/v1beta1/identity.proto", + "google/showcase/v1beta1/messaging.proto", + "google/showcase/v1beta1/testing.proto", external=True, ) # Install the library. session.chdir(tmp_dir) - session.install('-e', tmp_dir) + session.install("-e", tmp_dir) # Run the tests. - session.run('mypy', 'google') + session.run("mypy", "google") -@nox.session(python='3.8') +@nox.session(python="3.8") def showcase_mypy_alternative_templates(session): - templates = path.join(path.dirname(__file__), 'gapic', 'ads-templates') - showcase_mypy(session, templates=templates, other_opts=('old-naming',)) + templates = path.join(path.dirname(__file__), "gapic", "ads-templates") + showcase_mypy(session, templates=templates, other_opts=("old-naming",)) -@nox.session(python='3.6') +@nox.session(python="3.6") def docs(session): """Build the docs.""" - session.install('sphinx < 1.8', 'sphinx_rtd_theme') - session.install('.') + session.install("sphinx < 1.8", "sphinx_rtd_theme") + session.install(".") # Build the docs! - session.run('rm', '-rf', 'docs/_build/') - session.run('sphinx-build', '-W', '-b', 'html', '-d', - 'docs/_build/doctrees', 'docs/', 'docs/_build/html/') + session.run("rm", "-rf", "docs/_build/") + session.run( + "sphinx-build", + "-W", + "-b", + "html", + "-d", + "docs/_build/doctrees", + "docs/", + "docs/_build/html/", + ) -@nox.session(python=['3.7', '3.8']) +@nox.session(python=["3.7", "3.8"]) def mypy(session): """Perform typecheck analysis.""" - session.install('mypy') - session.install('.') - session.run('mypy', 'gapic') + session.install("mypy") + session.install(".") + session.run("mypy", "gapic") diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9a571b054eab..54bf1a856315 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,52 +20,48 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -with io.open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: +with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() setup( - name='gapic-generator', - version='0.23.0', - license='Apache 2.0', - author='Dov Shlachter', - author_email='dovs@google.com', - url='https://github.com/googleapis/gapic-generator-python.git', - packages=find_packages(exclude=['docs', 'tests']), - description='Python client library generator for APIs defined by protocol' - 'buffers', + name="gapic-generator", + version="0.23.0", + license="Apache 2.0", + author="Dov Shlachter", + author_email="dovs@google.com", + url="https://github.com/googleapis/gapic-generator-python.git", + packages=find_packages(exclude=["docs", "tests"]), + description="Python client library generator for APIs defined by protocol" + "buffers", long_description=README, entry_points="""[console_scripts] protoc-gen-dump=gapic.cli.dump:dump protoc-gen-python_gapic=gapic.cli.generate:generate """, - platforms='Posix; MacOS X', + platforms="Posix; MacOS X", include_package_data=True, install_requires=( - 'click >= 6.7', - 'google-api-core >= 1.17.0', - 'googleapis-common-protos >= 1.6.0', - 'grpcio >= 1.24.3', - 'jinja2 >= 2.10', - 'protobuf >= 3.7.1', - 'pypandoc >= 1.4', - 'PyYAML >= 5.1.1', - ), - extras_require={ - ':python_version<"3.7"': ('dataclasses >= 0.4',), - }, - tests_require=( - 'pyfakefs >= 3.6', + "click >= 6.7", + "google-api-core >= 1.17.0", + "googleapis-common-protos >= 1.6.0", + "grpcio >= 1.24.3", + "jinja2 >= 2.10", + "protobuf >= 3.12.0", + "pypandoc >= 1.4", + "PyYAML >= 5.1.1", ), + extras_require={':python_version<"3.7"': ("dataclasses >= 0.4",),}, + tests_require=("pyfakefs >= 3.6",), classifiers=( - 'Development Status :: 4 - Beta', - 'Environment :: Console', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: POSIX', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Software Development :: Code Generators', - 'Topic :: Software Development :: Libraries :: Python Modules', + "Development Status :: 4 - Beta", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Operating System :: POSIX", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Software Development :: Code Generators", + "Topic :: Software Development :: Libraries :: Python Modules", ), zip_safe=False, ) diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index a3108c436d22..ed21d45d5476 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -22,6 +22,7 @@ from google.auth import credentials from google.showcase import EchoClient from google.showcase import IdentityClient +from google.showcase import MessagingClient import grpc @@ -51,17 +52,11 @@ def pytest_addoption(parser): ) -@pytest.fixture -def use_mtls(request): - return request.config.getoption("--mtls") - - -@pytest.fixture -def echo(use_mtls): +def construct_client(client_class, use_mtls): if use_mtls: with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: mock_ssl_cred.return_value = ssl_credentials - client = EchoClient( + client = client_class( credentials=credentials.AnonymousCredentials(), client_options=client_options, ) @@ -70,30 +65,30 @@ def echo(use_mtls): ) return client else: - transport = EchoClient.get_transport_class("grpc")( + transport = client_class.get_transport_class("grpc")( channel=grpc.insecure_channel("localhost:7469") ) - return EchoClient(transport=transport) + return client_class(transport=transport) + + +@pytest.fixture +def use_mtls(request): + return request.config.getoption("--mtls") + + +@pytest.fixture +def echo(use_mtls): + return construct_client(EchoClient, use_mtls) @pytest.fixture def identity(use_mtls): - if use_mtls: - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: - mock_ssl_cred.return_value = ssl_credentials - client = IdentityClient( - credentials=credentials.AnonymousCredentials(), - client_options=client_options, - ) - mock_ssl_cred.assert_called_once_with( - certificate_chain=cert, private_key=key - ) - return client - else: - transport = IdentityClient.get_transport_class("grpc")( - channel=grpc.insecure_channel("localhost:7469") - ) - return IdentityClient(transport=transport) + return construct_client(IdentityClient, use_mtls) + + +@pytest.fixture +def messaging(use_mtls): + return construct_client(MessagingClient, use_mtls) class MetadataClientInterceptor( diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 597b936ccaf9..7d32c37e6f57 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -15,32 +15,30 @@ def test_crud_with_request(identity): count = len(identity.list_users().users) - user = identity.create_user(request={'user': { - 'display_name': 'Guido van Rossum', - 'email': 'guido@guido.fake', - }}) + user = identity.create_user( + request={ + "user": {"display_name": "Guido van Rossum", "email": "guido@guido.fake", } + } + ) try: - assert user.display_name == 'Guido van Rossum' - assert user.email == 'guido@guido.fake' + assert user.display_name == "Guido van Rossum" + assert user.email == "guido@guido.fake" assert len(identity.list_users().users) == count + 1 - assert identity.get_user({ - 'name': user.name - }).display_name == 'Guido van Rossum' + assert identity.get_user( + {"name": user.name}).display_name == "Guido van Rossum" finally: - identity.delete_user({'name': user.name}) + identity.delete_user({"name": user.name}) def test_crud_flattened(identity): count = len(identity.list_users().users) user = identity.create_user( - display_name='Monty Python', - email='monty@python.org', - ) + display_name="Monty Python", email="monty@python.org", ) try: - assert user.display_name == 'Monty Python' - assert user.email == 'monty@python.org' + assert user.display_name == "Monty Python" + assert user.email == "monty@python.org" assert len(identity.list_users().users) == count + 1 - assert identity.get_user(name=user.name).display_name == 'Monty Python' + assert identity.get_user(name=user.name).display_name == "Monty Python" finally: identity.delete_user(name=user.name) @@ -50,3 +48,27 @@ def test_path_methods(identity): actual = identity.user_path("bdfl") assert expected == actual + + +def test_nonslash_resource(messaging): + expected = "users/bdfl/profile/blurbs/legacy/apocalyptic~city" + actual = messaging.blurb_path("bdfl", "apocalyptic", "city") + + assert expected == actual + + +def test_path_parsing(messaging): + expected = {"room_id": "tiki"} + actual = messaging.parse_room_path(messaging.room_path("tiki")) + + assert expected == actual + + expected = { + "user_id": "bdfl", + "legacy_user_id": "apocalyptic", + "blurb_id": "city", + } + actual = messaging.parse_blurb_path( + messaging.blurb_path("bdfl", "apocalyptic", "city") + ) + assert expected == actual diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 69cfc665f38e..ede00ba51a2b 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -24,7 +24,7 @@ from gapic.generator import generator from gapic.generator import options -from gapic.samplegen_utils import (types, yaml) +from gapic.samplegen_utils import types, yaml from gapic.schema import api from gapic.schema import naming from gapic.schema import wrappers @@ -32,276 +32,298 @@ def test_custom_template_directory(): # Create a generator. - opts = options.Options.build('python-gapic-templates=/templates/') + opts = options.Options.build("python-gapic-templates=/templates/") g = generator.Generator(opts) # Assert that the Jinja loader will pull from the correct location. - assert g._env.loader.searchpath == ['/templates'] + assert g._env.loader.searchpath == ["/templates"] def test_get_response(): g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: - lt.return_value = ['foo/bar/baz.py.j2', 'molluscs/squid/sample.py.j2'] - with mock.patch.object(jinja2.Environment, 'get_template') as gt: - gt.return_value = jinja2.Template('I am a template result.') - cgr = g.get_response( - api_schema=make_api(), - opts=options.Options.build('') + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: + lt.return_value = ["foo/bar/baz.py.j2", "molluscs/squid/sample.py.j2"] + with mock.patch.object(jinja2.Environment, "get_template") as gt: + gt.return_value = jinja2.Template("I am a template result.") + cgr = g.get_response(api_schema=make_api(), + opts=options.Options.build("")) + lt.assert_called_once() + gt.assert_has_calls( + [ + mock.call("foo/bar/baz.py.j2"), + mock.call("molluscs/squid/sample.py.j2"), + ] ) - lt.assert_called_once() - gt.assert_has_calls([ - mock.call('foo/bar/baz.py.j2'), - mock.call('molluscs/squid/sample.py.j2') - ]) - assert len(cgr.file) == 1 - assert cgr.file[0].name == 'foo/bar/baz.py' - assert cgr.file[0].content == 'I am a template result.\n' + assert len(cgr.file) == 1 + assert cgr.file[0].name == "foo/bar/baz.py" + assert cgr.file[0].content == "I am a template result.\n" def test_get_response_ignores_empty_files(): g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: - lt.return_value = ['foo/bar/baz.py.j2', 'molluscs/squid/sample.py.j2'] - with mock.patch.object(jinja2.Environment, 'get_template') as gt: - gt.return_value = jinja2.Template('# Meaningless comment') - cgr = g.get_response( - api_schema=make_api(), - opts=options.Options.build('') + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: + lt.return_value = ["foo/bar/baz.py.j2", "molluscs/squid/sample.py.j2"] + with mock.patch.object(jinja2.Environment, "get_template") as gt: + gt.return_value = jinja2.Template("# Meaningless comment") + cgr = g.get_response(api_schema=make_api(), + opts=options.Options.build("")) + lt.assert_called_once() + gt.assert_has_calls( + [ + mock.call("foo/bar/baz.py.j2"), + mock.call("molluscs/squid/sample.py.j2"), + ] ) - lt.assert_called_once() - gt.assert_has_calls([ - mock.call('foo/bar/baz.py.j2'), - mock.call('molluscs/squid/sample.py.j2') - ]) - assert len(cgr.file) == 0 + assert len(cgr.file) == 0 def test_get_response_ignores_private_files(): g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: lt.return_value = [ - 'foo/bar/baz.py.j2', - 'foo/bar/_base.py.j2', - 'molluscs/squid/sample.py.j2', + "foo/bar/baz.py.j2", + "foo/bar/_base.py.j2", + "molluscs/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, 'get_template') as gt: - gt.return_value = jinja2.Template('I am a template result.') - cgr = g.get_response( - api_schema=make_api(), - opts=options.Options.build('') + with mock.patch.object(jinja2.Environment, "get_template") as gt: + gt.return_value = jinja2.Template("I am a template result.") + cgr = g.get_response(api_schema=make_api(), + opts=options.Options.build("")) + lt.assert_called_once() + gt.assert_has_calls( + [ + mock.call("foo/bar/baz.py.j2"), + mock.call("molluscs/squid/sample.py.j2"), + ] ) - lt.assert_called_once() - gt.assert_has_calls([ - mock.call('foo/bar/baz.py.j2'), - mock.call('molluscs/squid/sample.py.j2') - ]) - assert len(cgr.file) == 1 - assert cgr.file[0].name == 'foo/bar/baz.py' - assert cgr.file[0].content == 'I am a template result.\n' + assert len(cgr.file) == 1 + assert cgr.file[0].name == "foo/bar/baz.py" + assert cgr.file[0].content == "I am a template result.\n" def test_get_response_fails_invalid_file_paths(): g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: lt.return_value = [ - 'foo/bar/%service/%proto/baz.py.j2', - 'molluscs/squid/sample.py.j2', + "foo/bar/%service/%proto/baz.py.j2", + "molluscs/squid/sample.py.j2", ] with pytest.raises(ValueError) as ex: - g.get_response( - api_schema=make_api(), - opts=options.Options.build('') - ) + g.get_response(api_schema=make_api(), + opts=options.Options.build("")) ex_str = str(ex.value) - assert '%proto' in ex_str and '%service' in ex_str + assert "%proto" in ex_str and "%service" in ex_str def test_get_response_enumerates_services(): g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: lt.return_value = [ - 'foo/%service/baz.py.j2', - 'molluscs/squid/sample.py.j2', + "foo/%service/baz.py.j2", + "molluscs/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, 'get_template') as gt: - gt.return_value = jinja2.Template('Service: {{ service.name }}') + with mock.patch.object(jinja2.Environment, "get_template") as gt: + gt.return_value = jinja2.Template("Service: {{ service.name }}") cgr = g.get_response( - api_schema=make_api(make_proto( - descriptor_pb2.FileDescriptorProto(service=[ - descriptor_pb2.ServiceDescriptorProto(name='Spam'), - descriptor_pb2.ServiceDescriptorProto( - name='EggsService' + api_schema=make_api( + make_proto( + descriptor_pb2.FileDescriptorProto( + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="Spam"), + descriptor_pb2.ServiceDescriptorProto( + name="EggsService" + ), + ] ), - ]), - )), opts=options.Options.build('')) - assert len(cgr.file) == 2 - assert {i.name for i in cgr.file} == { - 'foo/spam/baz.py', - 'foo/eggs_service/baz.py', - } + ) + ), + opts=options.Options.build(""), + ) + assert len(cgr.file) == 2 + assert {i.name for i in cgr.file} == { + "foo/spam/baz.py", + "foo/eggs_service/baz.py", + } def test_get_response_enumerates_proto(): g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: lt.return_value = [ - 'foo/%proto.py.j2', - 'molluscs/squid/sample.py.j2', + "foo/%proto.py.j2", + "molluscs/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, 'get_template') as gt: - gt.return_value = jinja2.Template('Proto: {{ proto.module_name }}') - cgr = g.get_response(api_schema=make_api( - make_proto(descriptor_pb2.FileDescriptorProto(name='a.proto')), - make_proto(descriptor_pb2.FileDescriptorProto(name='b.proto')), - ), opts=options.Options.build('')) - assert len(cgr.file) == 2 - assert {i.name for i in cgr.file} == {'foo/a.py', 'foo/b.py'} + with mock.patch.object(jinja2.Environment, "get_template") as gt: + gt.return_value = jinja2.Template("Proto: {{ proto.module_name }}") + cgr = g.get_response( + api_schema=make_api( + make_proto( + descriptor_pb2.FileDescriptorProto(name="a.proto")), + make_proto( + descriptor_pb2.FileDescriptorProto(name="b.proto")), + ), + opts=options.Options.build(""), + ) + assert len(cgr.file) == 2 + assert {i.name for i in cgr.file} == {"foo/a.py", "foo/b.py"} def test_get_response_divides_subpackages(): g = make_generator() - api_schema = api.API.build([ - descriptor_pb2.FileDescriptorProto( - name='top.proto', - package='foo.v1', - service=[descriptor_pb2.ServiceDescriptorProto(name='Top')], - ), - descriptor_pb2.FileDescriptorProto( - name='a/spam/ham.proto', - package='foo.v1.spam', - service=[descriptor_pb2.ServiceDescriptorProto(name='Bacon')], - ), - descriptor_pb2.FileDescriptorProto( - name='a/eggs/yolk.proto', - package='foo.v1.eggs', - service=[descriptor_pb2.ServiceDescriptorProto(name='Scramble')], - ), - ], package='foo.v1') - with mock.patch.object(jinja2.FileSystemLoader, 'list_templates') as lt: + api_schema = api.API.build( + [ + descriptor_pb2.FileDescriptorProto( + name="top.proto", + package="foo.v1", + service=[descriptor_pb2.ServiceDescriptorProto(name="Top")], + ), + descriptor_pb2.FileDescriptorProto( + name="a/spam/ham.proto", + package="foo.v1.spam", + service=[descriptor_pb2.ServiceDescriptorProto(name="Bacon")], + ), + descriptor_pb2.FileDescriptorProto( + name="a/eggs/yolk.proto", + package="foo.v1.eggs", + service=[descriptor_pb2.ServiceDescriptorProto( + name="Scramble")], + ), + ], + package="foo.v1", + ) + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: lt.return_value = [ - 'foo/%sub/types/%proto.py.j2', - 'foo/%sub/services/%service.py.j2', - 'molluscs/squid/sample.py.j2', + "foo/%sub/types/%proto.py.j2", + "foo/%sub/services/%service.py.j2", + "molluscs/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, 'get_template') as gt: - gt.return_value = jinja2.Template(""" + with mock.patch.object(jinja2.Environment, "get_template") as gt: + gt.return_value = jinja2.Template( + """ {{- '' }}Subpackage: {{ '.'.join(api.subpackage_view) }} - """.strip()) - cgr = g.get_response( - api_schema=api_schema, - opts=options.Options.build('') + """.strip() ) - assert len(cgr.file) == 6 - assert {i.name for i in cgr.file} == { - 'foo/types/top.py', - 'foo/services/top.py', - 'foo/spam/types/ham.py', - 'foo/spam/services/bacon.py', - 'foo/eggs/types/yolk.py', - 'foo/eggs/services/scramble.py', - } + cgr = g.get_response(api_schema=api_schema, + opts=options.Options.build("")) + assert len(cgr.file) == 6 + assert {i.name for i in cgr.file} == { + "foo/types/top.py", + "foo/services/top.py", + "foo/spam/types/ham.py", + "foo/spam/services/bacon.py", + "foo/eggs/types/yolk.py", + "foo/eggs/services/scramble.py", + } def test_get_filename(): g = make_generator() - template_name = '%namespace/%name_%version/foo.py.j2' - assert g._get_filename(template_name, - api_schema=make_api( - naming=make_naming( - namespace=(), name='Spam', version='v2'), - ) - ) == 'spam_v2/foo.py' + template_name = "%namespace/%name_%version/foo.py.j2" + assert ( + g._get_filename( + template_name, + api_schema=make_api( + naming=make_naming(namespace=(), name="Spam", version="v2"), + ), + ) + == "spam_v2/foo.py" + ) def test_get_filename_with_namespace(): g = make_generator() - template_name = '%namespace/%name_%version/foo.py.j2' - assert g._get_filename(template_name, - api_schema=make_api( - naming=make_naming( - name='Spam', - namespace=('Ham', 'Bacon'), - version='v2', - ), - ), - ) == 'ham/bacon/spam_v2/foo.py' + template_name = "%namespace/%name_%version/foo.py.j2" + assert ( + g._get_filename( + template_name, + api_schema=make_api( + naming=make_naming( + name="Spam", namespace=("Ham", "Bacon"), version="v2", + ), + ), + ) + == "ham/bacon/spam_v2/foo.py" + ) def test_get_filename_with_service(): g = make_generator() - template_name = '%name/%service/foo.py.j2' - assert g._get_filename( - template_name, - api_schema=make_api( - naming=make_naming(namespace=(), name='Spam', version='v2'), - ), - context={ - 'service': wrappers.Service( - methods=[], - service_pb=descriptor_pb2.ServiceDescriptorProto(name='Eggs'), + template_name = "%name/%service/foo.py.j2" + assert ( + g._get_filename( + template_name, + api_schema=make_api( + naming=make_naming(namespace=(), name="Spam", version="v2"), ), - } - ) == 'spam/eggs/foo.py' + context={ + "service": wrappers.Service( + methods=[], + service_pb=descriptor_pb2.ServiceDescriptorProto( + name="Eggs"), + ), + }, + ) + == "spam/eggs/foo.py" + ) def test_get_filename_with_proto(): file_pb2 = descriptor_pb2.FileDescriptorProto( - name='bacon.proto', - package='foo.bar.v1', + name="bacon.proto", package="foo.bar.v1", ) api = make_api( make_proto(file_pb2), - naming=make_naming(namespace=(), name='Spam', version='v2'), + naming=make_naming(namespace=(), name="Spam", version="v2"), ) g = make_generator() - assert g._get_filename( - '%name/types/%proto.py.j2', - api_schema=api, - context={'proto': api.protos['bacon.proto']}, - ) == 'spam/types/bacon.py' + assert ( + g._get_filename( + "%name/types/%proto.py.j2", + api_schema=api, + context={"proto": api.protos["bacon.proto"]}, + ) + == "spam/types/bacon.py" + ) def test_get_filename_with_proto_and_sub(): file_pb2 = descriptor_pb2.FileDescriptorProto( - name='bacon.proto', - package='foo.bar.v2.baz', + name="bacon.proto", package="foo.bar.v2.baz", ) naming = make_naming( - namespace=('Foo',), - name='Bar', - proto_package='foo.bar.v2', - version='v2', + namespace=("Foo",), name="Bar", proto_package="foo.bar.v2", version="v2", ) api = make_api( - make_proto(file_pb2, naming=naming), - naming=naming, - subpackage_view=('baz',), + make_proto(file_pb2, naming=naming), naming=naming, subpackage_view=("baz",), ) g = make_generator() - assert g._get_filename( - '%name/types/%sub/%proto.py.j2', - api_schema=api, - context={'proto': api.protos['bacon.proto']}, - ) == 'bar/types/baz/bacon.py' + assert ( + g._get_filename( + "%name/types/%sub/%proto.py.j2", + api_schema=api, + context={"proto": api.protos["bacon.proto"]}, + ) + == "bar/types/baz/bacon.py" + ) def test_parse_sample_paths(fs): - fpath = 'sampledir/sample.yaml' + fpath = "sampledir/sample.yaml" fs.create_file( fpath, contents=dedent( - ''' + """ --- type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: - service: google.cloud.language.v1.LanguageService - ''' - ) + """ + ), ) with pytest.raises(types.InvalidConfig): @@ -309,16 +331,11 @@ def test_parse_sample_paths(fs): @mock.patch( - 'gapic.samplegen.samplegen.generate_sample', - return_value='', -) -@mock.patch( - 'time.gmtime', + "gapic.samplegen.samplegen.generate_sample", return_value="", ) +@mock.patch("time.gmtime",) def test_samplegen_config_to_output_files( - mock_gmtime, - mock_generate_sample, - fs, + mock_gmtime, mock_generate_sample, fs, ): # These time values are nothing special, # they just need to be deterministic. @@ -332,9 +349,9 @@ def test_samplegen_config_to_output_files( mock_gmtime.return_value = returner fs.create_file( - 'samples.yaml', + "samples.yaml", contents=dedent( - ''' + """ --- type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 @@ -344,38 +361,28 @@ def test_samplegen_config_to_output_files( rpc: get_squid_streaming - region_tag: clam_sample rpc: get_clam - ''' - ) + """ + ), ) - g = generator.Generator( - options.Options.build( - 'samples=samples.yaml', - ) - ) + g = generator.Generator(options.Options.build("samples=samples.yaml",)) # Need to have the sample template visible to the generator. - g._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) + g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) - api_schema = make_api( - naming=naming.NewNaming(name='Mollusc', version='v6') - ) + api_schema = make_api(naming=naming.NewNaming( + name="Mollusc", version="v6")) actual_response = g.get_response( - api_schema, - opts=options.Options.build('') - ) + api_schema, opts=options.Options.build("")) expected_response = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( - name="samples/squid_sample.py", - content="\n", - ), + name="samples/squid_sample.py", content="\n",), CodeGeneratorResponse.File( - name="samples/clam_sample.py", - content="\n", - ), + name="samples/clam_sample.py", content="\n",), CodeGeneratorResponse.File( name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", - content=dedent("""\ + content=dedent( + """\ --- type: manifest/samples schema_version: 3 @@ -393,21 +400,22 @@ def test_samplegen_config_to_output_files( sample: clam_sample path: '{base_path}/clam_sample.py' region_tag: clam_sample - """), - ) + """ + ), + ), ] ) + expected_response.supported_features |= ( + CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + ) assert actual_response == expected_response @mock.patch( - 'gapic.samplegen.samplegen.generate_sample', - return_value='', -) -@mock.patch( - 'time.gmtime', + "gapic.samplegen.samplegen.generate_sample", return_value="", ) +@mock.patch("time.gmtime",) def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): # These time values are nothing special, # they just need to be deterministic. @@ -425,9 +433,9 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): # The third has no id of any kind, so the generator is required to make a # unique ID for it. fs.create_file( - 'samples.yaml', + "samples.yaml", contents=dedent( - ''' + """ --- type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 @@ -440,37 +448,31 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): rpc: get_squid_streaming # No id or region tag. - rpc: get_squid_streaming - ''' - ) + """ + ), ) - g = generator.Generator(options.Options.build('samples=samples.yaml')) + g = generator.Generator(options.Options.build("samples=samples.yaml")) # Need to have the sample template visible to the generator. - g._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) + g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) - api_schema = make_api( - naming=naming.NewNaming(name='Mollusc', version='v6') - ) - actual_response = g.get_response( - api_schema, - opts=options.Options.build('') - ) + api_schema = make_api(naming=naming.NewNaming( + name="Mollusc", version="v6")) + actual_response = g.get_response(api_schema, + opts=options.Options.build("")) expected_response = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( - name="samples/squid_sample_91a465c6.py", - content="\n", - ), - CodeGeneratorResponse.File( - name="samples/squid_sample_55051b38.py", - content="\n", + name="samples/squid_sample_91a465c6.py", content="\n", ), CodeGeneratorResponse.File( - name="samples/157884ee.py", - content="\n", + name="samples/squid_sample_55051b38.py", content="\n", ), + CodeGeneratorResponse.File(name="samples/157884ee.py", + content="\n",), CodeGeneratorResponse.File( name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", - content=dedent("""\ + content=dedent( + """\ --- type: manifest/samples schema_version: 3 @@ -491,10 +493,14 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): - <<: *python sample: 157884ee path: '{base_path}/157884ee.py' - """) + """ + ), ), ] ) + expected_response.supported_features |= ( + CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + ) assert actual_response == expected_response @@ -504,7 +510,7 @@ def test_generator_duplicate_samples(fs): fs.create_file( config_fpath, contents=dedent( - ''' + """ # Note: the samples are duplicates. type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 @@ -515,35 +521,23 @@ def test_generator_duplicate_samples(fs): - id: squid_sample region_tag: humboldt_tag rpc: get_squid - ''' - ) + """ + ), ) - generator = make_generator('samples=samples.yaml') - generator._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) - api_schema = make_api( - naming=naming.NewNaming(name='Mollusc', version='v6') - ) + generator = make_generator("samples=samples.yaml") + generator._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) + api_schema = make_api(naming=naming.NewNaming( + name="Mollusc", version="v6")) with pytest.raises(types.DuplicateSample): - generator.get_response( - api_schema=api_schema, - opts=options.Options.build('') - ) + generator.get_response(api_schema=api_schema, + opts=options.Options.build("")) -@mock.patch( - 'gapic.samplegen.samplegen.generate_sample', - return_value='' -) -@mock.patch( - 'time.gmtime', -) -def test_dont_generate_in_code_samples( - mock_gmtime, - mock_generate_sample, - fs -): +@mock.patch("gapic.samplegen.samplegen.generate_sample", return_value="") +@mock.patch("time.gmtime",) +def test_dont_generate_in_code_samples(mock_gmtime, mock_generate_sample, fs): # These time values are nothing special, # they just need to be deterministic. returner = mock.MagicMock() @@ -559,7 +553,7 @@ def test_dont_generate_in_code_samples( fs.create_file( config_fpath, contents=dedent( - ''' + """ type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: @@ -582,22 +576,22 @@ def test_dont_generate_in_code_samples( - id: octopus_sample rpc: IdentifyMollusc service: Mollusc.v1.Mollusc - ''' - ) + """ + ), ) - generator = make_generator(f'samples={config_fpath}') - generator._env.loader = jinja2.DictLoader({'sample.py.j2': ''}) + generator = make_generator(f"samples={config_fpath}") + generator._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = make_api( make_proto( descriptor_pb2.FileDescriptorProto( - name='mollusc.proto', - package='Mollusc.v1', + name="mollusc.proto", + package="Mollusc.v1", service=[descriptor_pb2.ServiceDescriptorProto( - name='Mollusc')], + name="Mollusc")], ), ), - naming=naming.NewNaming(name='Mollusc', version='v6'), + naming=naming.NewNaming(name="Mollusc", version="v6"), ) # Note that we do NOT expect a clam sample. @@ -609,20 +603,15 @@ def test_dont_generate_in_code_samples( expected = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( - name="samples/squid_sample.py", - content="\n", - ), + name="samples/squid_sample.py", content="\n",), CodeGeneratorResponse.File( - name="samples/whelk_sample.py", - content="\n", - ), + name="samples/whelk_sample.py", content="\n",), CodeGeneratorResponse.File( - name="samples/octopus_sample.py", - content="\n", - ), + name="samples/octopus_sample.py", content="\n",), CodeGeneratorResponse.File( name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", - content=dedent(""" --- + content=dedent( + """ --- type: manifest/samples schema_version: 3 python: &python @@ -640,44 +629,47 @@ def test_dont_generate_in_code_samples( - <<: *python sample: octopus_sample path: \'{base_path}/octopus_sample.py\' - """) - ) + """ + ), + ), ] ) + expected.supported_features |= CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + actual = generator.get_response( - api_schema=api_schema, - opts=options.Options.build('') + api_schema=api_schema, opts=options.Options.build("") ) assert actual == expected -def make_generator(opts_str: str = '') -> generator.Generator: +def make_generator(opts_str: str = "") -> generator.Generator: return generator.Generator(options.Options.build(opts_str)) -def make_proto(file_pb: descriptor_pb2.FileDescriptorProto, - file_to_generate: bool = True, prior_protos: Mapping = None, - naming: naming.Naming = None, - ) -> api.Proto: +def make_proto( + file_pb: descriptor_pb2.FileDescriptorProto, + file_to_generate: bool = True, + prior_protos: Mapping = None, + naming: naming.Naming = None, +) -> api.Proto: prior_protos = prior_protos or {} - return api._ProtoBuilder(file_pb, - file_to_generate=file_to_generate, - naming=naming or make_naming(), - prior_protos=prior_protos, - ).proto + return api._ProtoBuilder( + file_pb, + file_to_generate=file_to_generate, + naming=naming or make_naming(), + prior_protos=prior_protos, + ).proto def make_api(*protos, naming: naming.Naming = None, **kwargs) -> api.API: return api.API( - naming=naming or make_naming(), - all_protos={i.name: i for i in protos}, - **kwargs + naming=naming or make_naming(), all_protos={i.name: i for i in protos}, **kwargs ) def make_naming(**kwargs) -> naming.Naming: - kwargs.setdefault('name', 'Hatstand') - kwargs.setdefault('namespace', ('Google', 'Cloud')) - kwargs.setdefault('version', 'v1') - kwargs.setdefault('product_name', 'Hatstand') + kwargs.setdefault("name", "Hatstand") + kwargs.setdefault("namespace", ("Google", "Cloud")) + kwargs.setdefault("version", "v1") + kwargs.setdefault("product_name", "Hatstand") return naming.NewNaming(**kwargs) From adb3793423f0fe13639f70604af3573a14b5c5d9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Jun 2020 18:49:06 +0200 Subject: [PATCH 0273/1339] Update dependency dataclasses to v0.7 (#439) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 418f6c7cc626..e4aac62ad688 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,4 +6,4 @@ MarkupSafe==1.1.1 protobuf==3.12.2 pypandoc==1.5 PyYAML==5.3.1 -dataclasses==0.6 \ No newline at end of file +dataclasses==0.7 \ No newline at end of file From 9ef3eccdc9eac0faba44120cf16343359e0aa98c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Jun 2020 18:50:18 +0200 Subject: [PATCH 0274/1339] Update dependency googleapis-common-protos to v1.52.0 (#440) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e4aac62ad688..a74616ed1d51 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==7.1.2 google-api-core==1.17.0 -googleapis-common-protos==1.51.0 +googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 protobuf==3.12.2 From 2c39024135d81eb27b786eb2d0a713707cb6202e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 4 Jun 2020 09:56:02 -0700 Subject: [PATCH 0275/1339] Stabilize resource name funcs (#438) Fix for #429 --- .../%name/%version/%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index af3a3b76d514..fa339e671fd6 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -117,7 +117,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): from_service_account_json = from_service_account_file - {% for message in service.resource_messages -%} + {% for message in service.resource_messages|sort(attribute="resource_type") -%} @staticmethod def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 1915722a0cd2..3481b82c1ea8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -117,7 +117,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): from_service_account_json = from_service_account_file - {% for message in service.resource_messages -%} + {% for message in service.resource_messages|sort(attribute="resource_type") -%} @staticmethod def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" From f033c69e1307f62d9ecdbe5db0bdc20ed9742fe3 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 4 Jun 2020 14:01:34 -0700 Subject: [PATCH 0276/1339] docs: use api-common-protos master branch (#442) --- packages/gapic-generator/docs/getting-started/local.rst | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/docs/getting-started/local.rst b/packages/gapic-generator/docs/getting-started/local.rst index 0d2cc8beba01..5251dccad8f7 100644 --- a/packages/gapic-generator/docs/getting-started/local.rst +++ b/packages/gapic-generator/docs/getting-started/local.rst @@ -117,16 +117,13 @@ Example .. include:: _example.rst -You will also need the common protos, currently in experimental status, -which define certain client-specific annotations. These are in the -`api-common-protos`_ repository. Clone this from GitHub also: +You will also need the common protos, which define certain client-specific annotations. +These are in the `api-common-protos`_ repository. +Clone this from GitHub also: .. code-block:: shell $ git clone https://github.com/googleapis/api-common-protos.git - $ cd api-common-protos - $ git checkout --track -b input-contract origin/input-contract - $ cd .. .. _api-common-protos: https://github.com/googleapis/api-common-protos/tree/input-contract From 59aa43b9cd8df7ac75bdd61bb592a671fca08031 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Jun 2020 23:02:07 +0200 Subject: [PATCH 0277/1339] Update dependency google-api-core to v1.18.0 (#441) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index a74616ed1d51..d81ddeebb955 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.17.0 +google-api-core==1.18.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 54f5fefa0198053353061ae89f569a669ec4ff72 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Jun 2020 18:51:57 +0200 Subject: [PATCH 0278/1339] chore(deps): update dependency google-api-core to v1.19.0 (#444) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d81ddeebb955..50183243c549 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.18.0 +google-api-core==1.19.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From ee3b04c6e68bbf4f3c577f73ff4c74b4d6ee06c6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 8 Jun 2020 22:48:25 +0200 Subject: [PATCH 0279/1339] chore(deps): update dependency google-api-core to v1.19.1 (#446) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 50183243c549..8648132545d8 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.19.0 +google-api-core==1.19.1 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From de415aa3f982c64b40c68d33080387cd75ec15e9 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 9 Jun 2020 15:00:04 -0700 Subject: [PATCH 0280/1339] fix: add field headers for other http verbs (#443) Closes #401 --- .../%sub/services/%service/client.py.j2 | 2 ++ .../%name_%version/%sub/test_%service.py.j2 | 35 +++++++++++-------- .../gapic-generator/gapic/schema/wrappers.py | 16 +++++++-- .../%sub/services/%service/client.py.j2 | 2 ++ .../%name_%version/%sub/test_%service.py.j2 | 22 ++++++++---- .../tests/unit/schema/wrappers/test_method.py | 15 ++++++-- 6 files changed, 65 insertions(+), 27 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index fa339e671fd6..6e001f3e2c82 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -333,7 +333,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( {%- for field_header in method.field_headers %} + {%- if not method.client_streaming %} ('{{ field_header }}', request.{{ field_header }}), + {%- endif %} {%- endfor %} )), ) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index da6795d58fe7..c5a9561bfcfb 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -258,25 +258,33 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% endfor %} {% endif %} -{% if method.field_headers %} +{% if method.field_headers and not method.client_streaming %} def test_{{ method.name|snake_case }}_field_headers(): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), - ) + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}( - {%- for field_header in method.field_headers %} - {{ field_header }}='{{ field_header }}/value', - {%- endfor %} - ) + request = {{ method.input.ident }}() + + {%- for field_header in method.field_headers %} + request.{{ field_header }} = '{{ field_header }}/value' + {%- endfor %} # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client._transport.{{ method.name|snake_case }}), '__call__') as call: + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} call.return_value = {{ method.output.ident }}() + {% endif %} client.{{ method.name|snake_case }}(request) # Establish that the underlying gRPC stub method was called. @@ -471,7 +479,6 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): {% endfor -%} {#- method in methods #} - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}GrpcTransport( @@ -689,12 +696,12 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): {% for message in service.resource_messages -%} {% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} def test_{{ message.resource_type|snake_case }}_path(): - {% for arg in message.resource_path_args -%} - {{ arg }} = "{{ molluscs.next() }}" - {% endfor %} - expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) - actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) - assert expected == actual + {% for arg in message.resource_path_args -%} + {{ arg }} = "{{ molluscs.next() }}" + {% endfor %} + expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) + assert expected == actual def test_parse_{{ message.resource_type|snake_case }}_path(): diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 1c0ae4d85a7a..f2c0d5effc7d 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -625,9 +625,19 @@ def client_output(self): def field_headers(self) -> Sequence[str]: """Return the field headers defined for this method.""" http = self.options.Extensions[annotations_pb2.http] - if http.get: - return tuple(re.findall(r'\{([a-z][\w\d_.]+)=', http.get)) - return () + + pattern = re.compile(r'\{([a-z][\w\d_.]+)=') + + potential_verbs = [ + http.get, + http.put, + http.post, + http.delete, + http.patch, + http.custom.path, + ] + + return next((tuple(pattern.findall(verb)) for verb in potential_verbs if verb), ()) @utils.cached_property def flattened_fields(self) -> Mapping[str, Field]: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 3481b82c1ea8..0d765b55338b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -333,7 +333,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( {%- for field_header in method.field_headers %} + {%- if not method.client_streaming %} ('{{ field_header }}', request.{{ field_header }}), + {%- endif %} {%- endfor %} )), ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index 4a57aca16314..c5a9561bfcfb 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -258,25 +258,33 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% endfor %} {% endif %} -{% if method.field_headers %} +{% if method.field_headers and not method.client_streaming %} def test_{{ method.name|snake_case }}_field_headers(): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), - ) + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}( - {%- for field_header in method.field_headers %} - {{ field_header }}='{{ field_header }}/value', - {%- endfor %} - ) + request = {{ method.input.ident }}() + + {%- for field_header in method.field_headers %} + request.{{ field_header }} = '{{ field_header }}/value' + {%- endfor %} # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client._transport.{{ method.name|snake_case }}), '__call__') as call: + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} call.return_value = {{ method.output.ident }}() + {% endif %} client.{{ method.name|snake_case }}(request) # Establish that the underlying gRPC stub method was called. diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index c2ed32c33d03..f1ec092a070d 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -205,9 +205,18 @@ def test_method_field_headers_none(): def test_method_field_headers_present(): - http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') - method = make_method('DoSomething', http_rule=http_rule) - assert method.field_headers == ('parent',) + verbs = [ + 'get', + 'put', + 'post', + 'delete', + 'patch', + ] + + for v in verbs: + rule = http_pb2.HttpRule(**{v: '/v1/{parent=projects/*}/topics'}) + method = make_method('DoSomething', http_rule=rule) + assert method.field_headers == ('parent',) def test_method_idempotent_yes(): From db815796f4d36f6d116cb991f5352f23b4a4ac2d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 10 Jun 2020 00:11:24 +0200 Subject: [PATCH 0281/1339] chore(deps): update dependency google-api-core to v1.20.0 (#447) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8648132545d8..8dddbf62cf09 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.19.1 +google-api-core==1.20.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From e15350ce1109db07572bdd447a0336e623d3052d Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 10 Jun 2020 10:48:26 -0700 Subject: [PATCH 0282/1339] fix: update GOOGLE_API_USE_MTLS values (#449) Go client uses lower case, so change the values to lower case. --- .../%sub/services/%service/client.py.j2 | 12 +++++------ .../%name_%version/%sub/test_%service.py.j2 | 20 +++++++++---------- .../%sub/services/%service/client.py.j2 | 12 +++++------ .../%name_%version/%sub/test_%service.py.j2 | 20 +++++++++---------- 4 files changed, 32 insertions(+), 32 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 6e001f3e2c82..559fc72c167f 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -152,9 +152,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS environment variable can also be used to override the endpoint: - "Always" (always use the default mTLS endpoint), "Never" (always + "always" (always use the default mTLS endpoint), "never" (always use the default regular endpoint, this is the default value for - the environment variable) and "Auto" (auto switch to the default + the environment variable) and "auto" (auto switch to the default mTLS endpoint if client SSL credentials is present). However, the ``api_endpoint`` property takes precedence if provided. (2) The ``client_cert_source`` property is used to provide client @@ -171,12 +171,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = ClientOptions.ClientOptions() if transport is None and client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "Never") - if use_mtls_env == "Never": + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": client_options.api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "Always": + elif use_mtls_env == "always": client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "Auto": + elif use_mtls_env == "auto": has_client_cert_source = ( client_options.client_cert_source is not None or mtls.has_default_client_cert_source() diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index c5a9561bfcfb..79d420da8224 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -100,8 +100,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is - # "Never". - os.environ["GOOGLE_API_USE_MTLS"] = "Never" + # "never". + os.environ["GOOGLE_API_USE_MTLS"] = "never" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}() @@ -113,8 +113,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is - # "Always". - os.environ["GOOGLE_API_USE_MTLS"] = "Always" + # "always". + os.environ["GOOGLE_API_USE_MTLS"] = "always" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}() @@ -126,8 +126,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "Auto", and client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + # "auto", and client_cert_source is provided. + os.environ["GOOGLE_API_USE_MTLS"] = "auto" options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -140,8 +140,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "Auto", and default_client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + # "auto", and default_client_cert_source is provided. + os.environ["GOOGLE_API_USE_MTLS"] = "auto" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): grpc_transport.return_value = None @@ -154,8 +154,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "Auto", but client_cert_source and default_client_cert_source are None. - os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + # "auto", but client_cert_source and default_client_cert_source are None. + os.environ["GOOGLE_API_USE_MTLS"] = "auto" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): grpc_transport.return_value = None diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 0d765b55338b..16c3359d00a7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -152,9 +152,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS environment variable can also be used to override the endpoint: - "Always" (always use the default mTLS endpoint), "Never" (always + "always" (always use the default mTLS endpoint), "never" (always use the default regular endpoint, this is the default value for - the environment variable) and "Auto" (auto switch to the default + the environment variable) and "auto" (auto switch to the default mTLS endpoint if client SSL credentials is present). However, the ``api_endpoint`` property takes precedence if provided. (2) The ``client_cert_source`` property is used to provide client @@ -171,12 +171,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = ClientOptions.ClientOptions() if transport is None and client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "Never") - if use_mtls_env == "Never": + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": client_options.api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "Always": + elif use_mtls_env == "always": client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "Auto": + elif use_mtls_env == "auto": has_client_cert_source = ( client_options.client_cert_source is not None or mtls.has_default_client_cert_source() diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 index c5a9561bfcfb..79d420da8224 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 @@ -100,8 +100,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is - # "Never". - os.environ["GOOGLE_API_USE_MTLS"] = "Never" + # "never". + os.environ["GOOGLE_API_USE_MTLS"] = "never" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}() @@ -113,8 +113,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is - # "Always". - os.environ["GOOGLE_API_USE_MTLS"] = "Always" + # "always". + os.environ["GOOGLE_API_USE_MTLS"] = "always" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}() @@ -126,8 +126,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "Auto", and client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + # "auto", and client_cert_source is provided. + os.environ["GOOGLE_API_USE_MTLS"] = "auto" options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -140,8 +140,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "Auto", and default_client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + # "auto", and default_client_cert_source is provided. + os.environ["GOOGLE_API_USE_MTLS"] = "auto" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): grpc_transport.return_value = None @@ -154,8 +154,8 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "Auto", but client_cert_source and default_client_cert_source are None. - os.environ["GOOGLE_API_USE_MTLS"] = "Auto" + # "auto", but client_cert_source and default_client_cert_source are None. + os.environ["GOOGLE_API_USE_MTLS"] = "auto" with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): grpc_transport.return_value = None From ea3d1099c13740318800e8a6557fc31588a34078 Mon Sep 17 00:00:00 2001 From: Ben Karl <5302342+BenRKarl@users.noreply.github.com> Date: Wed, 10 Jun 2020 13:55:49 -0400 Subject: [PATCH 0283/1339] Remove unecessary imports from submodule init files in Ads templates. (#450) Co-authored-by: Ben Karl Co-authored-by: Dov Shlachter --- .../%name/%version/%sub/__init__.py.j2 | 29 ------------------- 1 file changed, 29 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 index df685b243801..d8485fc47653 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 @@ -1,35 +1,6 @@ {% extends '_base.py.j2' %} {% block content %} -{# Import subpackages. -#} -{% for subpackage in api.subpackages.keys() -%} -from . import {{ subpackage }} -{% endfor -%} - -{# Import services for this package. -#} -{% filter sort_lines -%} -{% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view -%} -from .services.{{ service.name|snake_case }} import {{ service.client_name }} -{% endfor -%} -{% endfilter -%} - -{# Import messages and enums from each proto. - It is safe to import all of the messages into the same namespace here, - because protocol buffers itself enforces selector uniqueness within - a proto package. --#} -{% filter sort_lines -%} -{% for proto in api.protos.values() - if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.messages.values() -%} -from .types.{{ proto.module_name }} import {{ message.name }} -{% endfor -%} -{% for enum in proto.enums.values() -%} -from .types.{{ proto.module_name }} import {{ enum.name }} -{% endfor -%} -{% endfor -%} -{% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over From 8449601a8c45e4c2c00c7bfc5d5e3d32afb8bd8c Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 10 Jun 2020 13:40:38 -0700 Subject: [PATCH 0284/1339] Release version bump (#451) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 54bf1a856315..80e32a5c0c24 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,7 +25,7 @@ setup( name="gapic-generator", - version="0.23.0", + version="0.24.0", license="Apache 2.0", author="Dov Shlachter", author_email="dovs@google.com", From 5d5b3dfdd0cad6859b594187c0c6d76252bd329e Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 11 Jun 2020 14:20:08 -0700 Subject: [PATCH 0285/1339] fix: update GOOGLE_API_USE_MTLS value (#453) update the values in error message --- .../%name/%version/%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 559fc72c167f..9688ca75016a 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -186,7 +186,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: Never, Auto, Always" + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" ) # Save or instantiate the transport. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 16c3359d00a7..6e25957f638c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -186,7 +186,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: Never, Auto, Always" + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" ) # Save or instantiate the transport. From 63c95c6a9c2d04a8d162da8a3fc85be95fac75f6 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 12 Jun 2020 16:18:16 -0700 Subject: [PATCH 0286/1339] chore: add release-please (#452) Release-As: 0.24.1 --- packages/gapic-generator/.github/release-please.yml | 1 + packages/gapic-generator/setup.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/.github/release-please.yml diff --git a/packages/gapic-generator/.github/release-please.yml b/packages/gapic-generator/.github/release-please.yml new file mode 100644 index 000000000000..4507ad0598a5 --- /dev/null +++ b/packages/gapic-generator/.github/release-please.yml @@ -0,0 +1 @@ +releaseType: python diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 80e32a5c0c24..f5ac3aad290d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,12 +20,14 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) +version = "0.24.0" + with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() setup( name="gapic-generator", - version="0.24.0", + version=version, license="Apache 2.0", author="Dov Shlachter", author_email="dovs@google.com", From d2d0cde1d6ca602525f63155ddc042f4cd55775b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 12 Jun 2020 16:29:52 -0700 Subject: [PATCH 0287/1339] chore: release 0.24.1 (#455) * created CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/CHANGELOG.md diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md new file mode 100644 index 000000000000..0a66332cb7a3 --- /dev/null +++ b/packages/gapic-generator/CHANGELOG.md @@ -0,0 +1,8 @@ +# Changelog + +### [0.24.1](https://www.github.com/googleapis/gapic-generator-python/compare/0.24.0...v0.24.1) (2020-06-12) + + +### Bug Fixes + +* update GOOGLE_API_USE_MTLS value ([#453](https://www.github.com/googleapis/gapic-generator-python/issues/453)) ([7449ad5](https://www.github.com/googleapis/gapic-generator-python/commit/7449ad5aad4a1fbbf9ca3796e097512fc80991e3)) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f5ac3aad290d..a20ffbe0a922 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.24.0" +version = "0.24.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From c1025201251e6b916221d07bcf765bf5117318b8 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 12 Jun 2020 17:45:12 -0700 Subject: [PATCH 0288/1339] fix: generated unit tests live in the 'tests/gapic' subdir (#456) Some GAPICs have a manual layer and corresponding handwritten unit tests. These handwritten surfaces are subject to linting, but the autogenerated portions of the surface should not be. Moving generated unit tests to their own subdirectory makes it easier to remove them from linting rules. implementation for #454 --- .../tests/unit/{ => gapic}/%name_%version/%sub/__init__.py | 0 .../unit/{ => gapic}/%name_%version/%sub/test_%service.py.j2 | 0 .../unit/{ => gapic}/%name_%version/test_module_import.py.j2 | 0 .../tests/unit/{ => gapic}/%name_%version/%sub/__init__.py | 0 .../unit/{ => gapic}/%name_%version/%sub/test_%service.py.j2 | 0 5 files changed, 0 insertions(+), 0 deletions(-) rename packages/gapic-generator/gapic/ads-templates/tests/unit/{ => gapic}/%name_%version/%sub/__init__.py (100%) rename packages/gapic-generator/gapic/ads-templates/tests/unit/{ => gapic}/%name_%version/%sub/test_%service.py.j2 (100%) rename packages/gapic-generator/gapic/ads-templates/tests/unit/{ => gapic}/%name_%version/test_module_import.py.j2 (100%) rename packages/gapic-generator/gapic/templates/tests/unit/{ => gapic}/%name_%version/%sub/__init__.py (100%) rename packages/gapic-generator/gapic/templates/tests/unit/{ => gapic}/%name_%version/%sub/test_%service.py.j2 (100%) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/__init__.py b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py similarity index 100% rename from packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/__init__.py rename to packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/%sub/test_%service.py.j2 rename to packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/test_module_import.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/ads-templates/tests/unit/%name_%version/test_module_import.py.j2 rename to packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/__init__.py b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py similarity index 100% rename from packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/__init__.py rename to packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py diff --git a/packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/tests/unit/%name_%version/%sub/test_%service.py.j2 rename to packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 From ccaa049ef451bc8a190cd729140d41a4a7258b32 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 12 Jun 2020 17:59:47 -0700 Subject: [PATCH 0289/1339] chore: release 0.24.2 (#457) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 0a66332cb7a3..25d0b428adb6 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.24.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.24.1...v0.24.2) (2020-06-13) + + +### Bug Fixes + +* generated unit tests live in the 'tests/gapic' subdir ([#456](https://www.github.com/googleapis/gapic-generator-python/issues/456)) ([1ed7c9d](https://www.github.com/googleapis/gapic-generator-python/commit/1ed7c9d6fe9595c390387d72113d741ebf28538d)), closes [#454](https://www.github.com/googleapis/gapic-generator-python/issues/454) + ### [0.24.1](https://www.github.com/googleapis/gapic-generator-python/compare/0.24.0...v0.24.1) (2020-06-12) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a20ffbe0a922..938b67136927 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.24.1" +version = "0.24.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From a62dc8f869b0b95d94750fc47f885e66605951a6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Jun 2020 23:37:12 +0200 Subject: [PATCH 0290/1339] chore(deps): update dependency google-api-core to v1.20.1 (#463) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8dddbf62cf09..8adddf856d71 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.20.0 +google-api-core==1.20.1 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 1e9781a0ae4ffe755e3957e6e460a3e8e102d9b8 Mon Sep 17 00:00:00 2001 From: Lidi Zheng Date: Tue, 16 Jun 2020 17:07:50 -0700 Subject: [PATCH 0291/1339] feat: provide AsyncIO support for generated code (#365) --- .../gapic-generator/gapic/schema/wrappers.py | 26 +- .../templates/%namespace/%name/__init__.py.j2 | 3 + .../%sub/services/%service/__init__.py.j2 | 2 + .../%sub/services/%service/async_client.py.j2 | 271 ++++++++++ .../%sub/services/%service/client.py.j2 | 18 +- .../%sub/services/%service/pagers.py.j2 | 61 ++- .../%service/transports/__init__.py.j2 | 3 + .../services/%service/transports/base.py.j2 | 12 +- .../services/%service/transports/grpc.py.j2 | 18 +- .../%service/transports/grpc_asyncio.py.j2 | 207 ++++++++ .../gapic/templates/noxfile.py.j2 | 2 +- .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 498 ++++++++++++++++-- packages/gapic-generator/noxfile.py | 6 +- .../gapic-generator/tests/system/conftest.py | 54 +- .../tests/system/test_grpc_lro.py | 14 + .../tests/system/test_grpc_streams.py | 132 +++++ .../tests/system/test_grpc_unary.py | 31 ++ .../tests/system/test_pagination.py | 39 ++ .../tests/system/test_resource_crud.py | 44 ++ .../tests/system/test_retry.py | 11 + .../tests/unit/schema/test_api.py | 1 + .../tests/unit/schema/wrappers/test_method.py | 7 + .../unit/schema/wrappers/test_service.py | 8 +- 24 files changed, 1397 insertions(+), 73 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index f2c0d5effc7d..5a632fcc02ef 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -566,6 +566,13 @@ def __getattr__(self, name): @utils.cached_property def client_output(self): + return self._client_output(enable_asyncio=False) + + @utils.cached_property + def client_output_async(self): + return self._client_output(enable_asyncio=True) + + def _client_output(self, enable_asyncio: bool): """Return the output from the client layer. This takes into account transformations made by the outer GAPIC @@ -584,8 +591,8 @@ def client_output(self): if self.lro: return PythonType(meta=metadata.Metadata( address=metadata.Address( - name='Operation', - module='operation', + name='AsyncOperation' if enable_asyncio else 'Operation', + module='operation_async' if enable_asyncio else 'operation', package=('google', 'api_core'), collisions=self.lro.response_type.ident.collisions, ), @@ -603,7 +610,7 @@ def client_output(self): if self.paged_result_field: return PythonType(meta=metadata.Metadata( address=metadata.Address( - name=f'{self.name}Pager', + name=f'{self.name}AsyncPager' if enable_asyncio else f'{self.name}Pager', package=self.ident.api_naming.module_namespace + (self.ident.api_naming.versioned_module_name,) + self.ident.subpackage + ( 'services', utils.to_snake_case(self.ident.parent[-1]), @@ -744,6 +751,8 @@ def _ref_types(self, recursive: bool) -> Sequence[Union[MessageType, EnumType]]: if not self.void: answer.append(self.client_output) answer.extend(self.client_output.field_types) + answer.append(self.client_output_async) + answer.extend(self.client_output_async.field_types) # If this method has LRO, it is possible (albeit unlikely) that # the LRO messages reside in a different module. @@ -801,6 +810,11 @@ def client_name(self) -> str: """Returns the name of the generated client class""" return self.name + "Client" + @property + def async_client_name(self) -> str: + """Returns the name of the generated AsyncIO client class""" + return self.name + "AsyncClient" + @property def transport_name(self): return self.name + "Transport" @@ -809,6 +823,10 @@ def transport_name(self): def grpc_transport_name(self): return self.name + "GrpcTransport" + @property + def grpc_asyncio_transport_name(self): + return self.name + "GrpcAsyncIOTransport" + @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" @@ -856,7 +874,7 @@ def names(self) -> FrozenSet[str]: used for imports. """ # Put together a set of the service and method names. - answer = {self.name, self.client_name} + answer = {self.name, self.client_name, self.async_client_name} answer.update( utils.to_snake_case(i.name) for i in self.methods.values() ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index 15f4a17e44b7..d777dc86e374 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -12,6 +12,8 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' if service.meta.address.subpackage == api.subpackage_view -%} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.async_client import {{ service.async_client_name }} {% endfor -%} {# Import messages and enums from each proto. @@ -48,6 +50,7 @@ __all__ = ( {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view -%} '{{ service.client_name }}', +'{{ service.async_client_name }}', {% endfor -%} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 index f9f07d44df9a..c99b2a5f91e5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 @@ -2,8 +2,10 @@ {% block content %} from .client import {{ service.client_name }} +from .async_client import {{ service.async_client_name }} __all__ = ( '{{ service.client_name }}', + '{{ service.async_client_name }}', ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 new file mode 100644 index 000000000000..fb501fe2bcda --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -0,0 +1,271 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from collections import OrderedDict +import functools +import re +from typing import Dict, {% if service.any_server_streaming %}AsyncIterable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{% for ref_type in method.flat_ref_types -%} +{{ ref_type.ident.python_import }} +{% endfor -%} +{% endfor -%} +{% endfilter %} +from .transports.base import {{ service.name }}Transport +from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} +from .client import {{ service.client_name }} + + +class {{ service.async_client_name }}: + """{{ service.meta.doc|rst(width=72, indent=4) }}""" + + _client: {{ service.client_name }} + + DEFAULT_ENDPOINT = {{ service.client_name }}.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + + {% for message in service.resource_messages -%} + {{ message.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.{{ message.resource_type|snake_case }}_path) + + {% endfor %} + + from_service_account_file = {{ service.client_name }}.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial(type({{ service.client_name }}).get_transport_class, type({{ service.client_name }})) + + def __init__(self, *, + credentials: credentials.Credentials = None, + transport: Union[str, {{ service.name }}Transport] = 'grpc_asyncio', + client_options: ClientOptions = None, + ) -> None: + """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.{{ service.name }}Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + {# NOTE(lidiz) Not using kwargs since we want the docstring and types. #} + self._client = {{ service.client_name }}( + credentials=credentials, + transport=transport, + client_options=client_options, + ) + + {% for method in service.methods.values() -%} + {% if not method.server_streaming %}async {% endif -%}def {{ method.name|snake_case }}(self, + {%- if not method.client_streaming %} + request: {{ method.input.ident }} = None, + *, + {% for field in method.flattened_fields.values() -%} + {{ field.name }}: {{ field.ident }} = None, + {% endfor -%} + {%- else %} + requests: AsyncIterator[{{ method.input.ident }}] = None, + *, + {% endif -%} + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + {%- if not method.server_streaming %} + ) -> {{ method.client_output_async.ident }}: + {%- else %} + ) -> AsyncIterable[{{ method.client_output_async.ident }}]: + {%- endif %} + r"""{{ method.meta.doc|rst(width=72, indent=8) }} + + Args: + {%- if not method.client_streaming %} + request (:class:`{{ method.input.ident.sphinx }}`): + The request object.{{ ' ' -}} + {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {% for key, field in method.flattened_fields.items() -%} + {{ field.name }} (:class:`{{ field.ident.sphinx }}`): + {{ field.meta.doc|rst(width=72, indent=16, nl=False) }} + This corresponds to the ``{{ key }}`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + {% endfor -%} + {%- else %} + requests (AsyncIterator[`{{ method.input.ident.sphinx }}`]): + The request object AsyncIterator.{{ ' ' -}} + {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {%- endif %} + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {%- if not method.void %} + + Returns: + {%- if not method.server_streaming %} + {{ method.client_output_async.ident.sphinx }}: + {%- else %} + AsyncIterable[{{ method.client_output_async.ident.sphinx }}]: + {%- endif %} + {{ method.client_output_async.meta.doc|rst(width=72, indent=16) }} + {%- endif %} + """ + {%- if not method.client_streaming %} + # Create or coerce a protobuf request object. + {% if method.flattened_fields -%} + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + {% endif -%} + {% if method.input.ident.package != method.ident.package -%} {# request lives in a different package, so there is no proto wrapper #} + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = {{ method.input.ident }}(**request) + {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} + elif not request: + request = {{ method.input.ident }}() + {% endif -%}{# Cross-package req and flattened fields #} + {%- else %} + request = {{ method.input.ident }}(request) + {% endif %} {# different request package #} + + {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} + {% if method.flattened_fields -%} + # If we have keyword arguments corresponding to fields on the + # request, apply these. + {% endif -%} + {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }} is not None: + request.{{ key }} = {{ field.name }} + {%- endfor %} + {# They can be _extended_, however -#} + {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }}: + request.{{ key }}.extend({{ field.name }}) + {%- endfor %} + {%- endif %} + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.{{ method.name|snake_case }}, + {%- if method.retry %} + default_retry=retries.Retry( + {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} + {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} + {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} + predicate=retries.if_exception_type( + {%- filter sort_lines %} + {%- for ex in method.retry.retryable_exceptions %} + exceptions.{{ ex.__name__ }}, + {%- endfor %} + {%- endfilter %} + ), + ), + {%- endif %} + default_timeout={{ method.timeout }}, + client_info=_client_info, + ) + {%- if method.field_headers %} + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {%- for field_header in method.field_headers %} + {%- if not method.client_streaming %} + ('{{ field_header }}', request.{{ field_header }}), + {%- endif %} + {%- endfor %} + )), + ) + {%- endif %} + + # Send the request. + {% if not method.void %}response = {% endif %} + {%- if not method.server_streaming %}await {% endif %}rpc( + {%- if not method.client_streaming %} + request, + {%- else %} + requests, + {%- endif %} + retry=retry, + timeout=timeout, + metadata=metadata, + ) + {%- if method.lro %} + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + {{ method.lro.response_type.ident }}, + metadata_type={{ method.lro.metadata_type.ident }}, + ) + {%- elif method.paged_result_field %} + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = {{ method.client_output_async.ident }}( + method=rpc, + request=request, + response=response, + ) + {%- endif %} + {%- if not method.void %} + + # Done; return the response. + return response + {%- endif %} + {{ '\n' }} + {% endfor %} + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + '{{ api.naming.warehouse_package_name }}', + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + '{{ service.async_client_name }}', +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 6e25957f638c..48efc9de0b61 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -24,7 +24,8 @@ from google.oauth2 import service_account # type: ignore {% endfor -%} {% endfilter %} from .transports.base import {{ service.name }}Transport -from .transports.grpc import {{ service.name }}GrpcTransport +from .transports.grpc import {{ service.grpc_transport_name }} +from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} class {{ service.client_name }}Meta(type): @@ -35,11 +36,12 @@ class {{ service.client_name }}Meta(type): objects. """ _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] - _transport_registry['grpc'] = {{ service.name }}GrpcTransport + _transport_registry['grpc'] = {{ service.grpc_transport_name }} + _transport_registry['grpc_asyncio'] = {{ service.grpc_asyncio_transport_name }} def get_transport_class(cls, label: str = None, - ) -> Type[{{ service.name }}Transport]: + ) -> Type[{{ service.name }}Transport]: """Return an appropriate transport class. Args: @@ -148,7 +150,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport to use. If set to None, a transport is chosen automatically. client_options (ClientOptions): Custom options for the client. It - won't take effect unless ``transport`` is None. + won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS environment variable can also be used to override the endpoint: @@ -170,7 +172,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if client_options is None: client_options = ClientOptions.ClientOptions() - if transport is None and client_options.api_endpoint is None: + if client_options.api_endpoint is None: use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") if use_mtls_env == "never": client_options.api_endpoint = self.DEFAULT_ENDPOINT @@ -198,13 +200,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise ValueError('When providing a transport instance, ' 'provide its credentials directly.') self._transport = transport - elif isinstance(transport, str): + else: Transport = type(self).get_transport_class(transport) self._transport = Transport( - credentials=credentials, host=self.DEFAULT_ENDPOINT - ) - else: - self._transport = {{ service.name }}GrpcTransport( credentials=credentials, host=client_options.api_endpoint, api_mtls_endpoint=client_options.api_endpoint, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 0e7ef018a775..5c069b68fdde 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -6,7 +6,7 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. -#} -from typing import Any, Callable, Iterable +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable {% filter sort_lines -%} {% for method in service.methods.values() | selectattr('paged_result_field') -%} @@ -71,5 +71,64 @@ class {{ method.name }}Pager: def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + +class {{ method.name }}AsyncPager: + """A pager for iterating through ``{{ method.name|snake_case }}`` requests. + + This class thinly wraps an initial + :class:`{{ method.output.ident.sphinx }}` object, and + provides an ``__aiter__`` method to iterate through its + ``{{ method.paged_result_field.name }}`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``{{ method.name }}`` requests and continue to iterate + through the ``{{ method.paged_result_field.name }}`` field on the + corresponding responses. + + All the usual :class:`{{ method.output.ident.sphinx }}` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[[{{ method.input.ident }}], + Awaitable[{{ method.output.ident }}]], + request: {{ method.input.ident }}, + response: {{ method.output.ident }}): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`{{ method.input.ident.sphinx }}`): + The initial request object. + response (:class:`{{ method.output.ident.sphinx }}`): + The initial response object. + """ + self._method = method + self._request = {{ method.input.ident }}(request) + self._response = response + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[{{ method.output.ident }}]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request) + yield self._response + + def __aiter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'AsyncIterable') }}: + async def async_generator(): + async for page in self.pages: + for response in page.{{ method.paged_result_field.name }}: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + {% endfor %} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 index 470cde5d1969..fa97f46164cb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 @@ -6,15 +6,18 @@ from typing import Dict, Type from .base import {{ service.name }}Transport from .grpc import {{ service.name }}GrpcTransport +from .grpc_asyncio import {{ service.name }}GrpcAsyncIOTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] _transport_registry['grpc'] = {{ service.name }}GrpcTransport +_transport_registry['grpc_asyncio'] = {{ service.name }}GrpcAsyncIOTransport __all__ = ( '{{ service.name }}Transport', '{{ service.name }}GrpcTransport', + '{{ service.name }}GrpcAsyncIOTransport', ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 694e0a16645f..6eaf9994598c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -17,7 +17,7 @@ from google.auth import credentials # type: ignore {% endfor -%} {% endfilter %} -class {{ service.name }}Transport(metaclass=abc.ABCMeta): +class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" AUTH_SCOPES = ( @@ -30,6 +30,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + **kwargs, ) -> None: """Instantiate the transport. @@ -59,15 +60,18 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" - raise NotImplementedError + raise NotImplementedError() {%- endif %} {%- for method in service.methods.values() %} @property def {{ method.name|snake_case }}(self) -> typing.Callable[ [{{ method.input.ident }}], - {{ method.output.ident }}]: - raise NotImplementedError + typing.Union[ + {{ method.output.ident }}, + typing.Awaitable[{{ method.output.ident }}] + ]]: + raise NotImplementedError() {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 1632b7762110..7288972b8c71 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -1,7 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} -from typing import Callable, Dict, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} @@ -35,6 +35,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] + def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, @@ -64,8 +66,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): is None. Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. """ if channel: # Sanity check: Ensure that channel and credentials are not both @@ -91,7 +93,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ssl_credentials = SslCredentials().ssl_credentials # create a new channel. The provided one is ignored. - self._grpc_channel = grpc_helpers.create_channel( + self._grpc_channel = type(self).create_channel( host, credentials=credentials, ssl_credentials=ssl_credentials, @@ -102,11 +104,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): super().__init__(host=host, credentials=credentials) self._stubs = {} # type: Dict[str, Callable] - @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + scopes: Optional[Sequence[str]] = None, **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -116,15 +118,19 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. """ + scopes = scopes or cls.AUTH_SCOPES return grpc_helpers.create_channel( host, credentials=credentials, - scopes=cls.AUTH_SCOPES, + scopes=scopes, **kwargs ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 new file mode 100644 index 000000000000..53fd1c718888 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -0,0 +1,207 @@ +{% extends '_base.py.j2' %} + +{% block content %} +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers_async # type: ignore +{%- if service.has_lro %} +from google.api_core import operations_v1 # type: ignore +{%- endif %} +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{{ method.input.ident.python_import }} +{{ method.output.ident.python_import }} +{% endfor -%} +{% endfilter %} +from .base import {{ service.name }}Transport +from .grpc import {{ service.name }}GrpcTransport + + +class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): + """gRPC AsyncIO backend transport for {{ service.name }}. + + {{ service.meta.doc|rst(width=72, indent=4) }} + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + scopes: Optional[Sequence[str]] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + scopes=scopes, + **kwargs + ) + + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None) -> None: + """Instantiate the transport. + + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{- ' ' }}The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + ssl_credentials=ssl_credentials, + scopes=self.AUTH_SCOPES, + ) + + # Run the base constructor. + super().__init__(host=host, credentials=credentials) + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, '_grpc_channel'): + self._grpc_channel = self.create_channel( + self._host, + credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + {%- if service.has_lro %} + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if 'operations_client' not in self.__dict__: + self.__dict__['operations_client'] = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self.__dict__['operations_client'] + {%- endif %} + {%- for method in service.methods.values() %} + + @property + def {{ method.name|snake_case }}(self) -> Callable[ + [{{ method.input.ident }}], + Awaitable[{{ method.output.ident }}]]: + r"""Return a callable for the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=40, indent=8) }} + {{- ' ' -}} method over gRPC. + + {{ method.meta.doc|rst(width=72, indent=8) }} + + Returns: + Callable[[~.{{ method.input.name }}], + Awaitable[~.{{ method.output.name }}]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '{{ method.name|snake_case }}' not in self._stubs: + self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', + request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, + response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, + ) + return self._stubs['{{ method.name|snake_case }}'] + {%- endfor %} + + +__all__ = ( + '{{ service.name }}GrpcAsyncIOTransport', +) +{%- endblock -%} diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 71f99a414481..d31a325e2f0c 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -10,7 +10,7 @@ import nox # type: ignore def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov') + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index e400754b1124..ccfb661812f0 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -16,7 +16,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.17.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.17.2, < 2.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 0.4.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 79d420da8224..550ca1bac4cb 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -2,9 +2,10 @@ {% block content %} import os -from unittest import mock +import mock import grpc +from grpc.experimental import aio import math import pytest @@ -15,9 +16,11 @@ from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.async_client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports from google.api_core import client_options from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async {% if service.has_lro -%} from google.api_core import future from google.api_core import operations_v1 @@ -52,14 +55,15 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi -def test_{{ service.client_name|snake_case }}_from_service_account_file(): +@pytest.mark.parametrize("client_class", [{{ service.client_name }}, {{ service.async_client_name }}]) +def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = {{ service.client_name }}.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json") assert client._transport._credentials == creds - client = {{ service.client_name }}.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json") assert client._transport._credentials == creds {% if service.host %}assert client._transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} @@ -73,26 +77,30 @@ def test_{{ service.client_name|snake_case }}_get_transport_class(): assert transport == transports.{{ service.name }}GrpcTransport -def test_{{ service.client_name|snake_case }}_client_options(): +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") +]) +def test_{{ service.client_name|snake_case }}_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: - transport = transports.{{ service.name }}GrpcTransport( + with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: + transport = transport_class( credentials=credentials.AnonymousCredentials() ) - client = {{ service.client_name }}(transport=transport) + client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: - client = {{ service.client_name }}(transport="grpc") + with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}(client_options=options) - grpc_transport.assert_called_once_with( + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, credentials=None, @@ -102,10 +110,10 @@ def test_{{ service.client_name|snake_case }}_client_options(): # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "never". os.environ["GOOGLE_API_USE_MTLS"] = "never" - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, credentials=None, @@ -115,10 +123,10 @@ def test_{{ service.client_name|snake_case }}_client_options(): # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "always". os.environ["GOOGLE_API_USE_MTLS"] = "always" - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, credentials=None, @@ -129,10 +137,10 @@ def test_{{ service.client_name|snake_case }}_client_options(): # "auto", and client_cert_source is provided. os.environ["GOOGLE_API_USE_MTLS"] = "auto" options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}(client_options=options) - grpc_transport.assert_called_once_with( + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, credentials=None, @@ -142,11 +150,11 @@ def test_{{ service.client_name|snake_case }}_client_options(): # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is # "auto", and default_client_cert_source is provided. os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch.object(transport_class, '__init__') as patched: with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( + patched.return_value = None + client = client_class() + patched.assert_called_once_with( api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, credentials=None, @@ -156,11 +164,11 @@ def test_{{ service.client_name|snake_case }}_client_options(): # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is # "auto", but client_cert_source and default_client_cert_source are None. os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch.object(transport_class, '__init__') as patched: with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( + patched.return_value = None + client = client_class() + patched.assert_called_once_with( api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, credentials=None, @@ -171,7 +179,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): # unsupported value. os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" with pytest.raises(MutualTLSChannelError): - client = {{ service.client_name }}() + client = client_class() del os.environ["GOOGLE_API_USE_MTLS"] @@ -258,6 +266,89 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% endfor %} {% endif %} + +@pytest.mark.asyncio +async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio'): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + {% elif method.lro -%} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + {% elif not method.client_streaming and method.server_streaming -%} + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% elif method.client_streaming and method.server_streaming -%} + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% else -%} + call.return_value ={{' '}} + {%- if not method.client_streaming and not method.server_streaming -%} + grpc_helpers_async.FakeUnaryUnaryCall + {%- else -%} + grpc_helpers_async.FakeStreamUnaryCall + {%- endif -%}({{ method.output.ident }}( + {%- for field in method.output.fields.values() | rejectattr('message') %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + )) + {% endif -%} + {% if method.client_streaming and method.server_streaming %} + response = await client.{{ method.name|snake_case }}(iter(requests)) + {% elif method.client_streaming and not method.server_streaming %} + response = await (await client.{{ method.name|snake_case }}(iter(requests))) + {% else %} + response = await client.{{ method.name|snake_case }}(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + {% if method.client_streaming %} + assert next(args[0]) == request + {% else %} + assert args[0] == request + {% endif %} + + # Establish that the response is the type that we expect. + {% if method.void -%} + assert response is None + {% elif method.lro -%} + assert isinstance(response, future.Future) + {% elif method.server_streaming -%} + message = await response.read() + assert isinstance(message, {{ method.output.ident }}) + {% else -%} + assert isinstance(response, {{ method.client_output_async.ident }}) + {% for field in method.output.fields.values() | rejectattr('message') -%} + {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else -%} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif -%} + {% endfor %} + {% endif %} + + {% if method.field_headers and not method.client_streaming %} def test_{{ method.name|snake_case }}_field_headers(): client = {{ service.client_name }}( @@ -301,6 +392,52 @@ def test_{{ method.name|snake_case }}_field_headers(): {%- if not loop.last %}&{% endif -%} {%- endfor %}', ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_{{ method.name|snake_case }}_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = {{ method.input.ident }}() + + {%- for field_header in method.field_headers %} + request.{{ field_header }} = '{{ field_header }}/value' + {%- endfor %} + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.{{ method.name|snake_case }}), + '__call__') as call: + {% if method.void -%} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + {% elif method.lro -%} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + {% elif method.server_streaming -%} + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% else -%} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall({{ method.output.ident }}()) + {% endif %} + await client.{{ method.name|snake_case }}(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + '{% for field_header in method.field_headers -%} + {{ field_header }}={{ field_header }}/value + {%- if not loop.last %}&{% endif -%} + {%- endfor %}', + ) in kw['metadata'] {% endif %} {% if method.ident.package != method.input.ident.package %} @@ -383,6 +520,80 @@ def test_{{ method.name|snake_case }}_flattened_error(): {{ field.name }}={{ field.mock_value }}, {%- endfor %} ) + + +@pytest.mark.asyncio +async def test_{{ method.name|snake_case }}_flattened_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.{{ method.name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void -%} + call.return_value = None + {% elif method.lro -%} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming -%} + call.return_value = iter([{{ method.output.ident }}()]) + {% else -%} + call.return_value = {{ method.output.ident }}() + {% endif %} + + + {% if method.void -%} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + {% elif method.lro -%} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + {% elif not method.client_streaming and method.server_streaming -%} + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + {% elif method.client_streaming and method.server_streaming -%} + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + {% else -%} + call.return_value ={{' '}} + {%- if not method.client_streaming and not method.server_streaming -%} + grpc_helpers_async.FakeUnaryUnaryCall + {%- else -%} + grpc_helpers_async.FakeStreamUnaryCall + {%- endif -%}({{ method.output.ident }}()) + {% endif -%} + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.{{ method.name|snake_case }}( + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + {% for key, field in method.flattened_fields.items() -%} + assert args[0].{{ key }} == {{ field.mock_value }} + {% endfor %} + + +@pytest.mark.asyncio +async def test_{{ method.name|snake_case }}_flattened_error_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.{{ method.name|snake_case }}( + {{ method.input.ident }}(), + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) {% endif %} @@ -471,6 +682,98 @@ def test_{{ method.name|snake_case }}_pages(): pages = list(client.{{ method.name|snake_case }}(request={}).pages) for page, token in zip(pages, ['abc','def','ghi', '']): assert page.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_{{ method.name|snake_case }}_async_pager(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.{{ method.name|snake_case }}), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + ), + RuntimeError, + ) + async_pager = await client.{{ method.name|snake_case }}(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, {{ method.paged_result_field.message.ident }}) + for i in responses) + +@pytest.mark.asyncio +async def test_{{ method.name|snake_case }}_async_pages(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.{{ method.name|snake_case }}), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.message.ident }}(), + ], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.{{ method.name|snake_case }}(request={})).pages: + pages.append(page) + for page, token in zip(pages, ['abc','def','ghi', '']): + assert page.raw_page.next_page_token == token {% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} def test_{{ method.name|snake_case }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() @@ -500,6 +803,21 @@ def test_transport_instance(): assert client._transport is transport +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.{{ service.grpc_asyncio_transport_name }}( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = {{ service.client_name }}( @@ -598,6 +916,23 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel(): assert not callback.called +def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel('http://localhost/') + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.{{ service.name }}GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + @mock.patch("grpc.ssl_channel_credentials", autospec=True) @mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_client_cert_source( @@ -635,6 +970,43 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_client_c assert transport.grpc_channel == mock_grpc_channel +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.{{ service.name }}GrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + ssl_credentials=mock_ssl_cred, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ) + assert transport.grpc_channel == mock_grpc_channel + + @pytest.mark.parametrize( "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] ) @@ -674,6 +1046,45 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_adc( assert transport.grpc_channel == mock_grpc_channel +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.{{ service.name }}GrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + ssl_credentials=mock_ssl_cred, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ) + assert transport.grpc_channel == mock_grpc_channel + + {% if service.has_lro -%} def test_{{ service.name|snake_case }}_grpc_lro_client(): client = {{ service.client_name }}( @@ -691,6 +1102,23 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + +def test_{{ service.name|snake_case }}_grpc_lro_async_client(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client._client._transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + {% endif -%} {% for message in service.resource_messages -%} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 83d38f6042f6..6b4c3f14e666 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -63,6 +63,7 @@ def showcase( # Install pytest and gapic-generator-python session.install("mock") session.install("pytest") + session.install("pytest-asyncio") session.install("-e", ".") # Install a client library for Showcase. @@ -121,6 +122,7 @@ def showcase_mtls( # Install pytest and gapic-generator-python session.install("mock") session.install("pytest") + session.install("pytest-asyncio") session.install("-e", ".") # Install a client library for Showcase. @@ -182,15 +184,13 @@ def showcase_unit( ): """Run the generated unit tests against the Showcase library.""" - # Install pytest and gapic-generator-python session.install( - "coverage", "pytest", "pytest-cov", "pytest-xdist", + "coverage", "pytest", "pytest-cov", "pytest-xdist", 'asyncmock', 'pytest-asyncio' ) session.install(".") # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: - # Download the Showcase descriptor. session.run( "curl", diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index ed21d45d5476..b549eeb6b99f 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -16,15 +16,26 @@ import mock import os import pytest +import asyncio import google.api_core.client_options as ClientOptions -from google import showcase from google.auth import credentials -from google.showcase import EchoClient -from google.showcase import IdentityClient +from google.showcase import EchoClient, EchoAsyncClient +from google.showcase import IdentityClient, IdentityAsyncClient from google.showcase import MessagingClient import grpc +from grpc.experimental import aio + + +# NOTE(lidiz) We must override the default event_loop fixture from +# pytest-asyncio. pytest fixture frees resources once there isn't any reference +# to it. So, the event loop might close before tests finishes. In the +# customized version, we don't close the event loop. +@pytest.fixture +def event_loop(): + loop = asyncio.get_event_loop() + return loop dir = os.path.dirname(__file__) @@ -52,7 +63,10 @@ def pytest_addoption(parser): ) -def construct_client(client_class, use_mtls): +def construct_client(client_class, + use_mtls, + transport="grpc", + channel_creator=grpc.insecure_channel): if use_mtls: with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: mock_ssl_cred.return_value = ssl_credentials @@ -65,8 +79,8 @@ def construct_client(client_class, use_mtls): ) return client else: - transport = client_class.get_transport_class("grpc")( - channel=grpc.insecure_channel("localhost:7469") + transport = client_class.get_transport_class(transport)( + channel=channel_creator("localhost:7469") ) return client_class(transport=transport) @@ -81,6 +95,34 @@ def echo(use_mtls): return construct_client(EchoClient, use_mtls) +@pytest.fixture +def async_echo(use_mtls, event_loop): + return construct_client( + EchoAsyncClient, + use_mtls, + transport="grpc_asyncio", + channel_creator=aio.insecure_channel + ) + + +@pytest.fixture +def identity(): + transport = IdentityClient.get_transport_class('grpc')( + channel=grpc.insecure_channel('localhost:7469'), + ) + return IdentityClient(transport=transport) + + +@pytest.fixture +def async_identity(use_mtls, event_loop): + return construct_client( + IdentityAsyncClient, + use_mtls, + transport="grpc_asyncio", + channel_creator=aio.insecure_channel + ) + + @pytest.fixture def identity(use_mtls): return construct_client(IdentityClient, use_mtls) diff --git a/packages/gapic-generator/tests/system/test_grpc_lro.py b/packages/gapic-generator/tests/system/test_grpc_lro.py index 617163a0db1d..a4578a168ade 100644 --- a/packages/gapic-generator/tests/system/test_grpc_lro.py +++ b/packages/gapic-generator/tests/system/test_grpc_lro.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest from datetime import datetime, timedelta, timezone from google import showcase_v1beta1 @@ -27,3 +28,16 @@ def test_lro(echo): response = future.result() assert isinstance(response, showcase_v1beta1.WaitResponse) assert response.content.endswith('the snails...eventually.') + + +@pytest.mark.asyncio +async def test_lro_async(async_echo): + future = await async_echo.wait({ + 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), + 'success': { + 'content': 'The hail in Wales falls mainly on the snails...eventually.' + }} + ) + response = await future.result() + assert isinstance(response, showcase_v1beta1.WaitResponse) + assert response.content.endswith('the snails...eventually.') diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py index d0879d6a8986..f77e819986c2 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -12,6 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging +import pytest +import asyncio +import threading from google import showcase @@ -71,3 +75,131 @@ def test_stream_stream_passing_dict(echo): assert contents == ['hello', 'world!'] assert responses.trailing_metadata() == metadata + + +@pytest.mark.asyncio +async def test_async_unary_stream_reader(async_echo): + content = 'The hail in Wales falls mainly on the snails.' + call = await async_echo.expand({ + 'content': content, + }, metadata=metadata) + + # Consume the response and ensure it matches what we expect. + # with pytest.raises(exceptions.NotFound) as exc: + for ground_truth in content.split(' '): + response = await call.read() + assert response.content == ground_truth + assert ground_truth == 'snails.' + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata + + +@pytest.mark.asyncio +async def test_async_unary_stream_async_generator(async_echo): + content = 'The hail in Wales falls mainly on the snails.' + call = await async_echo.expand({ + 'content': content, + }, metadata=metadata) + + # Consume the response and ensure it matches what we expect. + # with pytest.raises(exceptions.NotFound) as exc: + tokens = iter(content.split(' ')) + async for response in call: + ground_truth = next(tokens) + assert response.content == ground_truth + assert ground_truth == 'snails.' + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata + + +@pytest.mark.asyncio +async def test_async_stream_unary_iterable(async_echo): + requests = [] + requests.append(showcase.EchoRequest(content="hello")) + requests.append(showcase.EchoRequest(content="world!")) + + call = await async_echo.collect(requests) + response = await call + assert response.content == 'hello world!' + + +@pytest.mark.asyncio +async def test_async_stream_unary_async_generator(async_echo): + + async def async_generator(): + yield showcase.EchoRequest(content="hello") + yield showcase.EchoRequest(content="world!") + + call = await async_echo.collect(async_generator()) + response = await call + assert response.content == 'hello world!' + + +@pytest.mark.asyncio +async def test_async_stream_unary_writer(async_echo): + call = await async_echo.collect() + await call.write(showcase.EchoRequest(content="hello")) + await call.write(showcase.EchoRequest(content="world!")) + await call.done_writing() + + response = await call + assert response.content == 'hello world!' + + +@pytest.mark.asyncio +async def test_async_stream_unary_passing_dict(async_echo): + requests = [{'content': 'hello'}, {'content': 'world!'}] + call = await async_echo.collect(iter(requests)) + response = await call + assert response.content == 'hello world!' + + +@pytest.mark.asyncio +async def test_async_stream_stream_reader_writier(async_echo): + call = await async_echo.chat(metadata=metadata) + await call.write(showcase.EchoRequest(content="hello")) + await call.write(showcase.EchoRequest(content="world!")) + await call.done_writing() + + contents = [ + (await call.read()).content, + (await call.read()).content + ] + assert contents == ['hello', 'world!'] + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata + + +@pytest.mark.asyncio +async def test_async_stream_stream_async_generator(async_echo): + + async def async_generator(): + yield showcase.EchoRequest(content="hello") + yield showcase.EchoRequest(content="world!") + + call = await async_echo.chat(async_generator(), metadata=metadata) + + contents = [] + async for response in call: + contents.append(response.content) + assert contents == ['hello', 'world!'] + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata + + +@pytest.mark.asyncio +async def test_async_stream_stream_passing_dict(async_echo): + requests = [{'content': 'hello'}, {'content': 'world!'}] + call = await async_echo.chat(iter(requests), metadata=metadata) + + contents = [] + async for response in call: + contents.append(response.content) + assert contents == ['hello', 'world!'] + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_grpc_unary.py index f8735a3a31df..c1694d975e63 100644 --- a/packages/gapic-generator/tests/system/test_grpc_unary.py +++ b/packages/gapic-generator/tests/system/test_grpc_unary.py @@ -13,6 +13,7 @@ # limitations under the License. import pytest +import asyncio from google.api_core import exceptions from google.rpc import code_pb2 @@ -45,3 +46,33 @@ def test_unary_error(echo): }) assert exc.value.code == 400 assert exc.value.message == message + + +@pytest.mark.asyncio +async def test_async_unary_with_request_object(async_echo): + response = await async_echo.echo(showcase.EchoRequest( + content='The hail in Wales falls mainly on the snails.', + ), timeout=1) + assert response.content == 'The hail in Wales falls mainly on the snails.' + + +@pytest.mark.asyncio +async def test_async_unary_with_dict(async_echo): + response = await async_echo.echo({ + 'content': 'The hail in Wales falls mainly on the snails.', + }) + assert response.content == 'The hail in Wales falls mainly on the snails.' + + +@pytest.mark.asyncio +async def test_async_unary_error(async_echo): + message = 'Bad things! Bad things!' + with pytest.raises(exceptions.InvalidArgument) as exc: + await async_echo.echo({ + 'error': { + 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), + 'message': message, + }, + }) + assert exc.value.code == 400 + assert exc.value.message == message diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py index 781614cad466..8f53a6c01d15 100644 --- a/packages/gapic-generator/tests/system/test_pagination.py +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest from google import showcase @@ -44,3 +45,41 @@ def test_pagination_pages(echo): results = [r for p in page_results for r in p.responses] assert results == [showcase.EchoResponse(content=i) for i in text.split(' ')] + + +@pytest.mark.asyncio +async def test_pagination_async(async_echo): + text = 'The hail in Wales falls mainly on the snails.' + results = [] + async for i in await async_echo.paged_expand({ + 'content': text, + 'page_size': 3, + }): + results.append(i) + + assert len(results) == 9 + assert results == [showcase.EchoResponse(content=i) + for i in text.split(' ')] + + +@pytest.mark.asyncio +async def test_pagination_pages_async(async_echo): + text = "The hail in Wales falls mainly on the snails." + page_results = [] + async for page in (await async_echo.paged_expand({ + 'content': text, + 'page_size': 3, + })).pages: + page_results.append(page) + + assert len(page_results) == 3 + assert not page_results[-1].next_page_token + + # The monolithic surface uses a wrapper type that needs an explicit property + # for a 'raw_page': we need to duplicate that interface, even though the + # architecture is different. + assert page_results[0].raw_page is page_results[0] + + results = [r for p in page_results for r in p.responses] + assert results == [showcase.EchoResponse(content=i) + for i in text.split(' ')] diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 7d32c37e6f57..5372da4b6bc8 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + def test_crud_with_request(identity): count = len(identity.list_users().users) @@ -72,3 +74,45 @@ def test_path_parsing(messaging): messaging.blurb_path("bdfl", "apocalyptic", "city") ) assert expected == actual + + +@pytest.mark.asyncio +async def test_crud_with_request_async(async_identity): + pager = await async_identity.list_users() + count = len(pager.users) + user = await async_identity.create_user(request={'user': { + 'display_name': 'Guido van Rossum', + 'email': 'guido@guido.fake', + }}) + try: + assert user.display_name == 'Guido van Rossum' + assert user.email == 'guido@guido.fake' + pager = (await async_identity.list_users()) + assert len(pager.users) == count + 1 + assert (await async_identity.get_user({ + 'name': user.name + })).display_name == 'Guido van Rossum' + finally: + await async_identity.delete_user({'name': user.name}) + + +@pytest.mark.asyncio +async def test_crud_flattened_async(async_identity): + count = len((await async_identity.list_users()).users) + user = await async_identity.create_user( + display_name='Monty Python', + email='monty@python.org', + ) + try: + assert user.display_name == 'Monty Python' + assert user.email == 'monty@python.org' + assert len((await async_identity.list_users()).users) == count + 1 + assert (await async_identity.get_user(name=user.name)).display_name == 'Monty Python' + finally: + await async_identity.delete_user(name=user.name) + + +def test_path_methods_async(async_identity): + expected = "users/bdfl" + actual = async_identity.user_path("bdfl") + assert expected == actual diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index bf0284294925..0bc70f9f8e8f 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -26,3 +26,14 @@ def test_retry_bubble(echo): 'message': 'This took longer than you said it should.', }, }) + + +@pytest.mark.asyncio +async def test_retry_bubble_async(async_echo): + with pytest.raises(exceptions.DeadlineExceeded): + await async_echo.echo({ + 'error': { + 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), + 'message': 'This took longer than you said it should.', + }, + }) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index b519b0353a69..8dc1760cd892 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -364,6 +364,7 @@ def test_proto_names_import_collision_flattening(): module='squid', ), imp.Import(package=('google', 'api_core'), module='operation',), + imp.Import(package=('google', 'api_core'), module='operation_async',), } assert expected_imports == actual_imports diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index f1ec092a070d..c0102402c267 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -83,6 +83,12 @@ def test_method_client_output_paged(): assert method.client_output.ident.name == 'ListFoosPager' +def test_method_client_output_async_empty(): + empty = make_message(name='Empty', package='google.protobuf') + method = make_method('Meh', output_message=empty) + assert method.client_output_async == wrappers.PrimitiveType.build(None) + + def test_method_paged_result_field_not_first(): paged = make_field(name='foos', message=make_message('Foo'), repeated=True) input_msg = make_message(name='ListFoosRequest', fields=( @@ -148,6 +154,7 @@ def test_method_paged_result_ref_types(): assert ref_type_names == { 'ListSquidsRequest', 'ListSquidsPager', + 'ListSquidsAsyncPager', 'Mollusc', } diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index e78a477b77ee..86d1aa2e977d 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -36,8 +36,10 @@ def test_service_properties(): service = make_service(name='ThingDoer') assert service.name == 'ThingDoer' assert service.client_name == 'ThingDoerClient' + assert service.async_client_name == 'ThingDoerAsyncClient' assert service.transport_name == 'ThingDoerTransport' assert service.grpc_transport_name == 'ThingDoerGrpcTransport' + assert service.grpc_asyncio_transport_name == 'ThingDoerGrpcAsyncIOTransport' def test_service_host(): @@ -62,7 +64,7 @@ def test_service_names(): get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), )) - expected_names = {'ThingDoer', 'ThingDoerClient', + expected_names = {'ThingDoer', 'ThingDoerClient', 'ThingDoerAsyncClient', 'do_thing', 'jump', 'yawn'} assert service.names == expected_names @@ -73,7 +75,7 @@ def test_service_name_colliding_modules(): get_method('Jump', 'bacon.bar.JumpRequest', 'bacon.bar.JumpResponse'), get_method('Yawn', 'a.b.v1.c.YawnRequest', 'a.b.v1.c.YawnResponse'), )) - expected_names = {'ThingDoer', 'ThingDoerClient', + expected_names = {'ThingDoer', 'ThingDoerClient', 'ThingDoerAsyncClient', 'do_thing', 'jump', 'yawn', 'bar'} assert service.names == expected_names @@ -112,6 +114,7 @@ def test_service_python_modules_lro(): imp.Import(package=('foo',), module='baz'), imp.Import(package=('foo',), module='qux'), imp.Import(package=('google', 'api_core'), module='operation'), + imp.Import(package=('google', 'api_core'), module='operation_async'), } @@ -138,6 +141,7 @@ def test_service_python_modules_signature(): imp.Import(package=('foo',), module='baz'), imp.Import(package=('foo',), module='qux'), imp.Import(package=('google', 'api_core'), module='operation'), + imp.Import(package=('google', 'api_core'), module='operation_async'), } From e38ccd0bdf978d1a8d36106e8b1742bd6858ecc7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 17 Jun 2020 12:31:46 -0700 Subject: [PATCH 0292/1339] chore: release 0.25.0 (#465) --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 25d0b428adb6..6e942b1459db 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.25.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.24.2...v0.25.0) (2020-06-17) + + +### Features + +* provide AsyncIO support for generated code ([#365](https://www.github.com/googleapis/gapic-generator-python/issues/365)) ([305ed34](https://www.github.com/googleapis/gapic-generator-python/commit/305ed34cfc1607c990f2f88b27f53358da25c366)) + ### [0.24.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.24.1...v0.24.2) (2020-06-13) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 938b67136927..262edf48315f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.24.2" +version = "0.25.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From fd19e1fc0544948f820bde5076b231f7f9b09394 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 18 Jun 2020 14:13:48 -0700 Subject: [PATCH 0293/1339] build: adjust regex for tags (#468) Release-Please tags release in the format `v1.2.3`. --- packages/gapic-generator/.circleci/config.yml | 40 +++++++++---------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 99c47b680627..3cf25f022017 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -7,19 +7,19 @@ workflows: - style-check: filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - unit-3.6: filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - unit-3.7: filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - unit-3.8: filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-3.6: requires: - unit-3.6 @@ -27,7 +27,7 @@ workflows: - unit-3.8 filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-3.7: requires: - unit-3.6 @@ -35,7 +35,7 @@ workflows: - unit-3.8 filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-3.8: requires: - unit-3.6 @@ -43,7 +43,7 @@ workflows: - unit-3.8 filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-alternative-templates-3.6: requires: - unit-3.6 @@ -51,7 +51,7 @@ workflows: - unit-3.8 filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-alternative-templates-3.7: requires: - unit-3.6 @@ -59,7 +59,7 @@ workflows: - unit-3.8 filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-alternative-templates-3.8: requires: - unit-3.6 @@ -67,19 +67,19 @@ workflows: - unit-3.8 filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-mypy: requires: - mypy filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-mypy-alternative-templates: requires: - mypy filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase: requires: - docs @@ -90,7 +90,7 @@ workflows: - showcase-mypy filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-mtls: requires: - docs @@ -101,7 +101,7 @@ workflows: - showcase-mypy filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-alternative-templates: requires: - docs @@ -112,7 +112,7 @@ workflows: - showcase-mypy-alternative-templates filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - showcase-mtls-alternative-templates: requires: - docs @@ -123,15 +123,15 @@ workflows: - showcase-mypy-alternative-templates filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - docs: filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - mypy: filters: tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - publish_package: requires: - showcase @@ -140,7 +140,7 @@ workflows: branches: ignore: /.*/ tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ - publish_image: requires: - showcase @@ -149,7 +149,7 @@ workflows: branches: ignore: /.*/ tags: - only: /^\d+\.\d+\.\d+$/ + only: /^v\d+\.\d+\.\d+$/ jobs: docs: docker: From 90eb5112c6e53df38e7075617a3b78e617e22f66 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 18 Jun 2020 14:37:40 -0700 Subject: [PATCH 0294/1339] refactor: move showcase library setup into shared function (#467) --- packages/gapic-generator/noxfile.py | 182 ++++++---------------------- 1 file changed, 38 insertions(+), 144 deletions(-) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 6b4c3f14e666..e497d0efd8cf 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -18,10 +18,12 @@ import typing import nox # type: ignore +from contextlib import contextmanager from os import path showcase_version = "0.11.0" +ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") @nox.session(python=["3.6", "3.7", "3.8"]) @@ -45,11 +47,11 @@ def unit(session): ) -@nox.session(python="3.8") -def showcase( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), +@contextmanager +def showcase_library( + session, templates="DEFAULT", other_opts: typing.Iterable[str] = () ): - """Run the Showcase test suite.""" + """Install the generated library into the session for showcase tests.""" # Try to make it clear if Showcase is not running, so that # people do not end up with tons of difficult-to-debug failures over @@ -60,10 +62,7 @@ def showcase( session.log("See https://github.com/googleapis/gapic-showcase") session.log("-" * 70) - # Install pytest and gapic-generator-python - session.install("mock") - session.install("pytest") - session.install("pytest-asyncio") + # Install gapic-generator-python session.install("-e", ".") # Install a client library for Showcase. @@ -99,71 +98,36 @@ def showcase( # Install the library. session.install(tmp_dir) - session.run( - "py.test", "--quiet", *(session.posargs or [path.join("tests", "system")]) - ) + yield tmp_dir @nox.session(python="3.8") -def showcase_mtls( +def showcase( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): - """Run the Showcase mtls test suite.""" - - # Try to make it clear if Showcase is not running, so that - # people do not end up with tons of difficult-to-debug failures over - # an obvious problem. - if not os.environ.get("CIRCLECI"): - session.log("-" * 70) - session.log("Note: Showcase must be running for these tests to work.") - session.log("See https://github.com/googleapis/gapic-showcase") - session.log("-" * 70) - - # Install pytest and gapic-generator-python - session.install("mock") - session.install("pytest") - session.install("pytest-asyncio") - session.install("-e", ".") + """Run the Showcase test suite.""" - # Install a client library for Showcase. - with tempfile.TemporaryDirectory() as tmp_dir: - # Download the Showcase descriptor. + with showcase_library(session, templates=templates, other_opts=other_opts): + session.install("mock", "pytest", "pytest-asyncio") session.run( - "curl", - "https://github.com/googleapis/gapic-showcase/releases/" - f"download/v{showcase_version}/" - f"gapic-showcase-{showcase_version}.desc", - "-L", - "--output", - path.join(tmp_dir, "showcase.desc"), - external=True, - silent=True, + "py.test", "--quiet", *(session.posargs or [path.join("tests", "system")]) ) - # Write out a client library for Showcase. - template_opt = f"python-gapic-templates={templates}" - opts = f"--python_gapic_opt={template_opt}" - opts += ",".join(other_opts + ("lazy-import",)) - session.run( - "protoc", - "--experimental_allow_proto3_optional", - f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", - f"--python_gapic_out={tmp_dir}", - "google/showcase/v1beta1/echo.proto", - "google/showcase/v1beta1/identity.proto", - "google/showcase/v1beta1/messaging.proto", - external=True, - ) - # Install the library. - session.install(tmp_dir) +@nox.session(python="3.8") +def showcase_mtls( + session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), +): + """Run the Showcase mtls test suite.""" - session.run( - "py.test", - "--quiet", - "--mtls", - *(session.posargs or [path.join("tests", "system")]), - ) + with showcase_library(session, templates=templates, other_opts=other_opts): + session.install("mock", "pytest", "pytest-asyncio") + session.run( + "py.test", + "--quiet", + "--mtls", + *(session.posargs or [path.join("tests", "system")]), + ) @nox.session(python="3.8") @@ -185,51 +149,16 @@ def showcase_unit( """Run the generated unit tests against the Showcase library.""" session.install( - "coverage", "pytest", "pytest-cov", "pytest-xdist", 'asyncmock', 'pytest-asyncio' + "coverage", + "pytest", + "pytest-cov", + "pytest-xdist", + "asyncmock", + "pytest-asyncio", ) - session.install(".") - - # Install a client library for Showcase. - with tempfile.TemporaryDirectory() as tmp_dir: - # Download the Showcase descriptor. - session.run( - "curl", - "https://github.com/googleapis/gapic-showcase/releases/" - f"download/v{showcase_version}/" - f"gapic-showcase-{showcase_version}.desc", - "-L", - "--output", - path.join(tmp_dir, "showcase.desc"), - external=True, - silent=True, - ) - # Write out a client library for Showcase. - opts = [ - f"python-gapic-templates={templates}", - ] - opts.extend(other_opts) - if session.python == "3.8": - opts.append("lazy-import") - - opt_str = f'--python_gapic_opt={",".join(opts)},' - - session.run( - "protoc", - "--experimental_allow_proto3_optional", - f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", - f"--python_gapic_out={tmp_dir}", - opt_str, - "google/showcase/v1beta1/echo.proto", - "google/showcase/v1beta1/identity.proto", - "google/showcase/v1beta1/messaging.proto", - "google/showcase/v1beta1/testing.proto", - external=True, - ) - - # Install the library. - session.chdir(tmp_dir) - session.install("-e", tmp_dir) + with showcase_library(session, templates=templates, other_opts=other_opts) as lib: + session.chdir(lib) # Run the tests. session.run( @@ -244,8 +173,7 @@ def showcase_unit( @nox.session(python=["3.6", "3.7", "3.8"]) def showcase_unit_alternative_templates(session): - templates = path.join(path.dirname(__file__), "gapic", "ads-templates") - showcase_unit(session, templates=templates, other_opts=("old-naming",)) + showcase_unit(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) @nox.session(python="3.8") @@ -256,42 +184,9 @@ def showcase_mypy( # Install pytest and gapic-generator-python session.install("mypy") - session.install(".") - # Install a client library for Showcase. - with tempfile.TemporaryDirectory() as tmp_dir: - # Download the Showcase descriptor. - session.run( - "curl", - "https://github.com/googleapis/gapic-showcase/releases/" - f"download/v{showcase_version}/" - f"gapic-showcase-{showcase_version}.desc", - "-L", - "--output", - path.join(tmp_dir, "showcase.desc"), - external=True, - silent=True, - ) - # Write out a client library for Showcase. - template_opt = f"python-gapic-templates={templates}" - gapic_opts = f"--python_gapic_opt={template_opt}," - gapic_opts += ",".join(other_opts) - session.run( - "protoc", - "--experimental_allow_proto3_optional", - f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", - f"--python_gapic_out={tmp_dir}", - gapic_opts, - "google/showcase/v1beta1/echo.proto", - "google/showcase/v1beta1/identity.proto", - "google/showcase/v1beta1/messaging.proto", - "google/showcase/v1beta1/testing.proto", - external=True, - ) - - # Install the library. - session.chdir(tmp_dir) - session.install("-e", tmp_dir) + with showcase_library(session, templates=templates, other_opts=other_opts) as lib: + session.chdir(lib) # Run the tests. session.run("mypy", "google") @@ -299,8 +194,7 @@ def showcase_mypy( @nox.session(python="3.8") def showcase_mypy_alternative_templates(session): - templates = path.join(path.dirname(__file__), "gapic", "ads-templates") - showcase_mypy(session, templates=templates, other_opts=("old-naming",)) + showcase_mypy(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) @nox.session(python="3.6") From 991de1f58d04561971475d92b3e6eb32c9c10b53 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Jun 2020 22:22:50 +0200 Subject: [PATCH 0295/1339] chore(deps): update dependency google-api-core to v1.21.0 (#473) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8adddf856d71..c6f1e41e2324 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.20.1 +google-api-core==1.21.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From e95f44a05d7d367ff37188c2ce94dce1d89eafea Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 22 Jun 2020 18:02:14 -0700 Subject: [PATCH 0296/1339] fix: only require dataclases if python<3.7 (#475) --- packages/gapic-generator/requirements.txt | 2 +- packages/gapic-generator/setup.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index c6f1e41e2324..5ba300c78b64 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,4 +6,4 @@ MarkupSafe==1.1.1 protobuf==3.12.2 pypandoc==1.5 PyYAML==5.3.1 -dataclasses==0.7 \ No newline at end of file +dataclasses==0.6; python_version < '3.7' diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 262edf48315f..ba38e947e563 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -51,9 +51,11 @@ "protobuf >= 3.12.0", "pypandoc >= 1.4", "PyYAML >= 5.1.1", + "dataclasses < 0.7; python_version < '3.7'" ), extras_require={':python_version<"3.7"': ("dataclasses >= 0.4",),}, tests_require=("pyfakefs >= 3.6",), + python_requires=">=3.6", classifiers=( "Development Status :: 4 - Beta", "Environment :: Console", @@ -62,6 +64,7 @@ "Operating System :: POSIX", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Libraries :: Python Modules", ), From 63b2e41df6892161988e35c49700649418218b02 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 22 Jun 2020 18:09:48 -0700 Subject: [PATCH 0297/1339] chore: release 0.25.1 (#477) --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 6e942b1459db..fc9a36cab65c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.25.1](https://www.github.com/googleapis/gapic-generator-python/compare/0.25.0...v0.25.1) (2020-06-23) + + +### Bug Fixes + +* only require dataclases if python<3.7 ([#475](https://www.github.com/googleapis/gapic-generator-python/issues/475)) ([9597695](https://www.github.com/googleapis/gapic-generator-python/commit/959769518ea47df383b23b6e48c5da148f69029e)) + ## [0.25.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.24.2...v0.25.0) (2020-06-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index ba38e947e563..64a2f50164f0 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.25.0" +version = "0.25.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 5eaafd947ffb8c7cd7e29d00ed4598da10eab281 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 23 Jun 2020 19:08:13 +0200 Subject: [PATCH 0298/1339] chore(deps): update dependency dataclasses to <0.8 (#478) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 64a2f50164f0..3fd71479cf44 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -51,7 +51,7 @@ "protobuf >= 3.12.0", "pypandoc >= 1.4", "PyYAML >= 5.1.1", - "dataclasses < 0.7; python_version < '3.7'" + "dataclasses<0.8; python_version < '3.7'" ), extras_require={':python_version<"3.7"': ("dataclasses >= 0.4",),}, tests_require=("pyfakefs >= 3.6",), From ae0e45b427e7b8a1c6f9b6ec81aee409f6fc2103 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 23 Jun 2020 10:26:17 -0700 Subject: [PATCH 0299/1339] fix: always use dataclasses 0.6 (#481) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 5ba300c78b64..afd9f7aac606 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,4 +6,4 @@ MarkupSafe==1.1.1 protobuf==3.12.2 pypandoc==1.5 PyYAML==5.3.1 -dataclasses==0.6; python_version < '3.7' +dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From d1cdcb0bdec4e37e7dae99429b79118085d707c0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 23 Jun 2020 10:40:00 -0700 Subject: [PATCH 0300/1339] chore: release 0.25.2 (#482) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index fc9a36cab65c..6176375ed6a1 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.25.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.25.1...v0.25.2) (2020-06-23) + + +### Bug Fixes + +* always use dataclasses 0.6 ([#481](https://www.github.com/googleapis/gapic-generator-python/issues/481)) ([066d04e](https://www.github.com/googleapis/gapic-generator-python/commit/066d04e7d53301024106f244280502f16af46b79)) + ### [0.25.1](https://www.github.com/googleapis/gapic-generator-python/compare/0.25.0...v0.25.1) (2020-06-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 3fd71479cf44..1659462c2f60 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.25.1" +version = "0.25.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 748ee32e436cc0bdb6dc3151a2208013da674454 Mon Sep 17 00:00:00 2001 From: Lidi Zheng Date: Tue, 30 Jun 2020 12:19:58 -0700 Subject: [PATCH 0301/1339] fix: Temporarily define a fixed testing event loop (#493) --- packages/gapic-generator/tests/system/conftest.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index b549eeb6b99f..3a89ba763ba9 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -27,15 +27,18 @@ import grpc from grpc.experimental import aio +_test_event_loop = asyncio.new_event_loop() # NOTE(lidiz) We must override the default event_loop fixture from # pytest-asyncio. pytest fixture frees resources once there isn't any reference # to it. So, the event loop might close before tests finishes. In the # customized version, we don't close the event loop. + + @pytest.fixture def event_loop(): - loop = asyncio.get_event_loop() - return loop + asyncio.set_event_loop(_test_event_loop) + return asyncio.get_event_loop() dir = os.path.dirname(__file__) From 0fbf52d3614025fc50005f4c5c492fdc882f7fad Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 30 Jun 2020 12:44:19 -0700 Subject: [PATCH 0302/1339] chorse: update showcase version in circleci (#492) Bump to use 0.11.0 --- packages/gapic-generator/.circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 3cf25f022017..df3c7356dcc3 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -229,7 +229,7 @@ jobs: showcase: docker: - image: python:3.8-slim - - image: gcr.io/gapic-images/gapic-showcase:0.8.1 + - image: gcr.io/gapic-images/gapic-showcase:0.11.0 steps: - checkout - run: @@ -278,7 +278,7 @@ jobs: command: | mkdir gapic_showcase cd gapic_showcase - curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.8.1/gapic-showcase-0.8.1-linux-amd64.tar.gz | tar xz + curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.11.0/gapic-showcase-0.11.0-linux-amd64.tar.gz | tar xz ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & showcase_pid=$! @@ -295,7 +295,7 @@ jobs: showcase-alternative-templates: docker: - image: python:3.8-slim - - image: gcr.io/gapic-images/gapic-showcase:0.8.1 + - image: gcr.io/gapic-images/gapic-showcase:0.11.0 steps: - checkout - run: @@ -344,7 +344,7 @@ jobs: command: | mkdir gapic_showcase cd gapic_showcase - curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.8.1/gapic-showcase-0.8.1-linux-amd64.tar.gz | tar xz + curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.11.0/gapic-showcase-0.11.0-linux-amd64.tar.gz | tar xz ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & showcase_pid=$! From cc88af483eb58488f97c473bd4338871d1ea03be Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 30 Jun 2020 13:19:40 -0700 Subject: [PATCH 0303/1339] fix: add name and version info to fixup script name (#490) --- ...fixup_keywords.py.j2 => fixup_%name_%version_keywords.py.j2} | 0 packages/gapic-generator/gapic/ads-templates/setup.py.j2 | 2 +- ...fixup_keywords.py.j2 => fixup_%name_%version_keywords.py.j2} | 0 packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- 4 files changed, 2 insertions(+), 2 deletions(-) rename packages/gapic-generator/gapic/ads-templates/scripts/{fixup_keywords.py.j2 => fixup_%name_%version_keywords.py.j2} (100%) rename packages/gapic-generator/gapic/templates/scripts/{fixup_keywords.py.j2 => fixup_%name_%version_keywords.py.j2} (100%) diff --git a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_keywords.py.j2 b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/ads-templates/scripts/fixup_keywords.py.j2 rename to packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 78cc078c17c4..5556158e7039 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -29,7 +29,7 @@ setuptools.setup( 'libcst >= 0.2.5', ], scripts=[ - 'scripts/fixup_keywords.py', + 'scripts/fixup_{{ api.naming.versioned_module_name }}_keywords.py', ], classifiers=[ 'Development Status :: 3 - Alpha', diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/scripts/fixup_keywords.py.j2 rename to packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index ccfb661812f0..c008080ee057 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -25,7 +25,7 @@ setuptools.setup( ), python_requires='>={% if opts.lazy_import %}3.7{% else %}3.6{% endif %}',{# Lazy import requires module-level getattr #} scripts=[ - 'scripts/fixup_keywords.py', + 'scripts/fixup_{{ api.naming.versioned_module_name }}_keywords.py', ], classifiers=[ 'Development Status :: 3 - Alpha', From 7bf88dfe2abe71fbb12dee51fc0f789cb1b4c7bb Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 30 Jun 2020 13:26:54 -0700 Subject: [PATCH 0304/1339] feat: add `credentials_file` and `scopes` via `client_options` (#461) --- .../%sub/services/%service/client.py.j2 | 9 +- .../services/%service/transports/base.py.j2 | 18 ++- .../services/%service/transports/grpc.py.j2 | 33 +++- .../%service/transports/grpc_asyncio.py.j2 | 28 +++- .../%name_%version/%sub/test_%service.py.j2 | 144 +++++++++++++++--- 5 files changed, 203 insertions(+), 29 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 48efc9de0b61..9b92f6e6fc60 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -196,15 +196,22 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # instance provides an extensibility point for unusual situations. if isinstance(transport, {{ service.name }}Transport): # transport is a {{ service.name }}Transport instance. - if credentials: + if credentials or client_options.credentials_file: raise ValueError('When providing a transport instance, ' 'provide its credentials directly.') + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) self._transport = transport else: Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, + credentials_file=client_options.credentials_file, host=client_options.api_endpoint, + scopes=client_options.scopes, api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 6eaf9994598c..8d03088780de 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -5,6 +5,7 @@ import abc import typing from google import auth +from google.api_core import exceptions # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} @@ -30,7 +31,9 @@ class {{ service.name }}Transport(abc.ABC): self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, - **kwargs, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + **kwargs, ) -> None: """Instantiate the transport. @@ -42,6 +45,10 @@ class {{ service.name }}Transport(abc.ABC): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -50,8 +57,13 @@ class {{ service.name }}Transport(abc.ABC): # If no credentials are provided, then determine the appropriate # defaults. - if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file(credentials_file, scopes=scopes) + elif credentials is None: + credentials, _ = auth.default(scopes=scopes) # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 7288972b8c71..245602bf50a2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -40,6 +40,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None) -> None: @@ -54,6 +56,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If @@ -66,8 +73,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): is None. Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. """ if channel: # Sanity check: Ensure that channel and credentials are not both @@ -96,18 +105,26 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): self._grpc_channel = type(self).create_channel( host, credentials=credentials, + credentials_file=credentials_file, ssl_credentials=ssl_credentials, - scopes=self.AUTH_SCOPES, + scopes=scopes or self.AUTH_SCOPES, ) # Run the base constructor. - super().__init__(host=host, credentials=credentials) + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES + ) + self._stubs = {} # type: Dict[str, Callable] @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + credentials_file: str = None, scopes: Optional[Sequence[str]] = None, **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. @@ -118,6 +135,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -125,11 +145,16 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): channel creation. Returns: grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. """ scopes = scopes or cls.AUTH_SCOPES return grpc_helpers.create_channel( host, credentials=credentials, + credentials_file=credentials_file, scopes=scopes, **kwargs ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 53fd1c718888..af182c11b840 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -43,6 +43,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, **kwargs) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. @@ -53,6 +54,9 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -65,6 +69,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): return grpc_helpers_async.create_channel( host, credentials=credentials, + credentials_file=credentials_file, scopes=scopes, **kwargs ) @@ -72,6 +77,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None) -> None: @@ -86,6 +93,12 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If @@ -98,8 +111,10 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): is None. Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. """ if channel: # Sanity check: Ensure that channel and credentials are not both @@ -125,12 +140,19 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): self._grpc_channel = type(self).create_channel( host, credentials=credentials, + credentials_file=credentials_file, ssl_credentials=ssl_credentials, - scopes=self.AUTH_SCOPES, + scopes=scopes or self.AUTH_SCOPES, ) # Run the base constructor. - super().__init__(host=host, credentials=credentials) + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES + ) + self._stubs = {} @property diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 550ca1bac4cb..ecfe6af704a7 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -19,6 +19,7 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.async_client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports from google.api_core import client_options +from google.api_core import exceptions from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async {% if service.has_lro -%} @@ -101,10 +102,12 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, credentials=None, + credentials_file=None, host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -114,10 +117,12 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.return_value = None client = client_class() patched.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, credentials=None, + credentials_file=None, host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -127,10 +132,12 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.return_value = None client = client_class() patched.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, credentials=None, + credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -141,10 +148,13 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.return_value = None client = client_class(client_options=options) patched.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, credentials=None, + credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -155,10 +165,12 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.return_value = None client = client_class() patched.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, credentials=None, + credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -169,10 +181,12 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.return_value = None client = client_class() patched.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, credentials=None, + credentials_file=None, host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has @@ -184,6 +198,50 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans del os.environ["GOOGLE_API_USE_MTLS"] +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") +]) +def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class, transport_class, transport_name): + # Check the case api_endpoint is provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="localhost:7469", + scopes=["1", "2"], + api_mtls_endpoint="localhost:7469", + client_cert_source=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") +]) +def test_{{ service.client_name|snake_case }}_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case api_endpoint is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host="localhost:7469", + scopes=None, + api_mtls_endpoint="localhost:7469", + client_cert_source=None, + ) + + def test_{{ service.client_name|snake_case }}_client_options_from_dict(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -191,10 +249,12 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_options={'api_endpoint': 'squid.clam.whelk'} ) grpc_transport.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, credentials=None, + credentials_file=None, host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, ) @@ -793,6 +853,27 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide a credentials file and a transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = {{ service.client_name }}( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = {{ service.client_name }}( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + def test_transport_instance(): # A client may be instantiated with a custom transport instance. @@ -829,6 +910,15 @@ def test_transport_grpc_default(): ) +def test_{{ service.name|snake_case }}_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.{{ service.name }}Transport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + def test_{{ service.name|snake_case }}_base_transport(): # Instantiate the base transport. transport = transports.{{ service.name }}Transport( @@ -854,6 +944,20 @@ def test_{{ service.name|snake_case }}_base_transport(): {% endif %} +def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(auth, 'load_credentials_from_file') as load_creds: + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.{{ service.name }}Transport( + credentials_file="credentials.json", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + )) + + def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, 'default') as adc: @@ -960,12 +1064,13 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_client_c grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, - ssl_credentials=mock_ssl_cred, + credentials_file=None, scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', {%- endfor %} ), + ssl_credentials=mock_ssl_cred, ) assert transport.grpc_channel == mock_grpc_channel @@ -997,12 +1102,13 @@ def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel_mtls_with_ grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, - ssl_credentials=mock_ssl_cred, + credentials_file=None, scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', {%- endfor %} ), + ssl_credentials=mock_ssl_cred, ) assert transport.grpc_channel == mock_grpc_channel @@ -1036,12 +1142,13 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_adc( grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, - ssl_credentials=mock_ssl_cred, + credentials_file=None, scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', {%- endfor %} ), + ssl_credentials=mock_ssl_cred, ) assert transport.grpc_channel == mock_grpc_channel @@ -1075,12 +1182,13 @@ def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel_mtls_with_ grpc_create_channel.assert_called_once_with( "mtls.squid.clam.whelk:443", credentials=mock_cred, - ssl_credentials=mock_ssl_cred, + credentials_file=None, scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', {%- endfor %} ), + ssl_credentials=mock_ssl_cred, ) assert transport.grpc_channel == mock_grpc_channel From 0ce98c0e2ebf3db9dceee24c44bf62c5f7121107 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 30 Jun 2020 17:11:09 -0700 Subject: [PATCH 0305/1339] chore: release 0.26.0 (#495) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 6176375ed6a1..f16e2cfe2a8d 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [0.26.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.25.2...v0.26.0) (2020-06-30) + + +### Features + +* add `credentials_file` and `scopes` via `client_options` ([#461](https://www.github.com/googleapis/gapic-generator-python/issues/461)) ([b5e1b1e](https://www.github.com/googleapis/gapic-generator-python/commit/b5e1b1e8991159dc176da889e9bdf12e3eebdb1e)) + + +### Bug Fixes + +* add name and version info to fixup script name ([#490](https://www.github.com/googleapis/gapic-generator-python/issues/490)) ([16fe7e7](https://www.github.com/googleapis/gapic-generator-python/commit/16fe7e7885b7e17bf16b4f1f8f8844b9f5d0bdfe)) +* Temporarily define a fixed testing event loop ([#493](https://www.github.com/googleapis/gapic-generator-python/issues/493)) ([2d22d91](https://www.github.com/googleapis/gapic-generator-python/commit/2d22d919bc8c08e03f501ff2f23152b761467c80)) + ### [0.25.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.25.1...v0.25.2) (2020-06-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1659462c2f60..1f835d350a08 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.25.2" +version = "0.26.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From ca46a5b479a688cb8e489813838b00612bf49c71 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 7 Jul 2020 15:29:39 -0700 Subject: [PATCH 0306/1339] fix: pass metadata to pagers (#470) Closes #469 --- .../gapic-generator/gapic/schema/wrappers.py | 5 +++ .../%sub/services/%service/async_client.py.j2 | 1 + .../%sub/services/%service/client.py.j2 | 1 + .../%sub/services/%service/pagers.py.j2 | 26 +++++++++----- .../%name_%version/%sub/test_%service.py.j2 | 24 +++++++++++-- .../unit/schema/wrappers/test_service.py | 35 +++++++++++++++++++ 6 files changed, 80 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 5a632fcc02ef..6f7e041f16ab 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -832,6 +832,11 @@ def has_lro(self) -> bool: """Return whether the service has a long-running method.""" return any([m.lro for m in self.methods.values()]) + @property + def has_pagers(self) -> bool: + """Return whether the service has paged methods.""" + return any(m.paged_result_field for m in self.methods.values()) + @property def host(self) -> str: """Return the hostname for this service, if specified. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index fb501fe2bcda..36a34471f86e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -244,6 +244,7 @@ class {{ service.async_client_name }}: method=rpc, request=request, response=response, + metadata=metadata, ) {%- endif %} {%- if not method.void %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 9b92f6e6fc60..c34babd7631b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -374,6 +374,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): method=rpc, request=request, response=response, + metadata=metadata, ) {%- endif %} {%- if not method.void %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 5c069b68fdde..cc7bc56100ff 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -6,7 +6,7 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. -#} -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple {% filter sort_lines -%} {% for method in service.methods.values() | selectattr('paged_result_field') -%} @@ -35,10 +35,11 @@ class {{ method.name }}Pager: the most recent response is retained, and thus used for attribute lookup. """ def __init__(self, - method: Callable[[{{ method.input.ident }}], - {{ method.output.ident }}], + method: Callable[..., {{ method.output.ident }}], request: {{ method.input.ident }}, - response: {{ method.output.ident }}): + response: {{ method.output.ident }}, + *, + metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. Args: @@ -48,10 +49,13 @@ class {{ method.name }}Pager: The initial request object. response (:class:`{{ method.output.ident.sphinx }}`): The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. """ self._method = method self._request = {{ method.input.ident }}(request) self._response = response + self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @@ -61,7 +65,7 @@ class {{ method.name }}Pager: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request) + self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: @@ -90,10 +94,11 @@ class {{ method.name }}AsyncPager: the most recent response is retained, and thus used for attribute lookup. """ def __init__(self, - method: Callable[[{{ method.input.ident }}], - Awaitable[{{ method.output.ident }}]], + method: Callable[..., Awaitable[{{ method.output.ident }}]], request: {{ method.input.ident }}, - response: {{ method.output.ident }}): + response: {{ method.output.ident }}, + *, + metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. Args: @@ -103,10 +108,13 @@ class {{ method.name }}AsyncPager: The initial request object. response (:class:`{{ method.output.ident.sphinx }}`): The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. """ self._method = method self._request = {{ method.input.ident }}(request) self._response = response + self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @@ -116,7 +124,7 @@ class {{ method.name }}AsyncPager: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request) + self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'AsyncIterable') }}: diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ecfe6af704a7..c21846a4ac4e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -27,6 +27,9 @@ from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% endif -%} +{% if service.has_pagers -%} +from google.api_core import gapic_v1 +{% endif -%} {% for method in service.methods.values() -%} {% for ref_type in method.ref_types if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') @@ -695,9 +698,24 @@ def test_{{ method.name|snake_case }}_pager(): ), RuntimeError, ) - results = [i for i in client.{{ method.name|snake_case }}( - request={}, - )] + + metadata = () + {% if method.field_headers -%} + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {%- for field_header in method.field_headers %} + {%- if not method.client_streaming %} + ('{{ field_header }}', ''), + {%- endif %} + {%- endfor %} + )), + ) + {% endif -%} + pager = client.{{ method.name|snake_case }}(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, {{ method.paged_result_field.message.ident }}) for i in results) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 86d1aa2e977d..8502617b5d3e 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -260,3 +260,38 @@ def test_service_any_streaming(): assert service.any_client_streaming == client assert service.any_server_streaming == server + + +def test_has_pagers(): + paged = make_field(name='foos', message=make_message('Foo'), repeated=True) + input_msg = make_message( + name='ListFoosRequest', + fields=( + make_field(name='parent', type=9), # str + make_field(name='page_size', type=5), # int + make_field(name='page_token', type=9), # str + ), + ) + output_msg = make_message( + name='ListFoosResponse', + fields=( + paged, + make_field(name='next_page_token', type=9), # str + ), + ) + method = make_method( + 'ListFoos', + input_message=input_msg, + output_message=output_msg, + ) + + service = make_service(name="Fooer", methods=(method,),) + assert service.has_pagers + + other_service = make_service( + name="Unfooer", + methods=( + get_method("Unfoo", "foo.bar.UnfooReq", "foo.bar.UnFooResp"), + ), + ) + assert not other_service.has_pagers From a10685a9a803887910a07a7f5f3b6adc8b73a691 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Jul 2020 15:43:27 -0700 Subject: [PATCH 0307/1339] chore: release 0.26.1 (#499) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f16e2cfe2a8d..b01b9fa57ac6 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.26.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.0...v0.26.1) (2020-07-07) + + +### Bug Fixes + +* pass metadata to pagers ([#470](https://www.github.com/googleapis/gapic-generator-python/issues/470)) ([c43c6d9](https://www.github.com/googleapis/gapic-generator-python/commit/c43c6d943fa99f202014bf4bba795df25d314a63)), closes [#469](https://www.github.com/googleapis/gapic-generator-python/issues/469) + ## [0.26.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.25.2...v0.26.0) (2020-06-30) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1f835d350a08..a0c0c1d6014c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.26.0" +version = "0.26.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 5c6131a6bb1a52115b68bb5a367283448ee042e8 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 7 Jul 2020 15:47:16 -0700 Subject: [PATCH 0308/1339] fix: add oneof fields to generated protoplus init (#485) Fixes: #484 --- .../%name/%version/%sub/types/_message.py.j2 | 1 + packages/gapic-generator/gapic/schema/api.py | 51 +++++++++++++++++-- .../gapic-generator/gapic/schema/wrappers.py | 11 ++++ .../%name_%version/%sub/types/_message.py.j2 | 11 ++-- .../gapic/templates/noxfile.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 13 +++-- .../gapic-generator/gapic/utils/__init__.py | 2 + packages/gapic-generator/gapic/utils/code.py | 15 +++++- .../gapic-generator/test_utils/test_utils.py | 13 ++++- .../tests/unit/schema/test_api.py | 40 +++++++++++++++ .../tests/unit/schema/wrappers/test_field.py | 7 +++ .../tests/unit/schema/wrappers/test_oneof.py | 35 +++++++++++++ .../tests/unit/utils/test_code.py | 9 ++++ 13 files changed, 193 insertions(+), 17 deletions(-) create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_oneof.py diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index a8119827b834..15bf3ea4ab5e 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -43,6 +43,7 @@ class {{ message.name }}({{ p }}.Message): {% else -%} {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( {{- p }}.{{ field.proto_type }}, number={{ field.number }} + {% if field.oneof %}, oneof='{{ field.oneof }}'{% endif %} {%- if field.enum or field.message %}, {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, {% endif %}) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index cc9b9cf1f05d..df3e1daa8e8e 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -34,6 +34,7 @@ from gapic.schema import wrappers from gapic.schema import naming as api_naming from gapic.utils import cached_property +from gapic.utils import nth from gapic.utils import to_snake_case from gapic.utils import RESERVED_NAMES @@ -556,14 +557,42 @@ def _load_children(self, answer[wrapped.name] = wrapped return answer + def _get_oneofs(self, + oneof_pbs: Sequence[descriptor_pb2.OneofDescriptorProto], + address: metadata.Address, path: Tuple[int, ...], + ) -> Dict[str, wrappers.Oneof]: + """Return a dictionary of wrapped oneofs for the given message. + + Args: + oneof_fields (Sequence[~.descriptor_pb2.OneofDescriptorProto]): A + sequence of protobuf field objects. + address (~.metadata.Address): An address object denoting the + location of these oneofs. + path (Tuple[int]): The source location path thus far, as + understood by ``SourceCodeInfo.Location``. + + Returns: + Mapping[str, ~.wrappers.Oneof]: A ordered mapping of + :class:`~.wrappers.Oneof` objects. + """ + # Iterate over the oneofs and collect them into a dictionary. + answer = collections.OrderedDict( + (oneof_pb.name, wrappers.Oneof(oneof_pb=oneof_pb)) + for i, oneof_pb in enumerate(oneof_pbs) + ) + + # Done; return the answer. + return answer + def _get_fields(self, field_pbs: Sequence[descriptor_pb2.FieldDescriptorProto], address: metadata.Address, path: Tuple[int, ...], + oneofs: Optional[Dict[str, wrappers.Oneof]] = None ) -> Dict[str, wrappers.Field]: """Return a dictionary of wrapped fields for the given message. Args: - fields (Sequence[~.descriptor_pb2.FieldDescriptorProto]): A + field_pbs (Sequence[~.descriptor_pb2.FieldDescriptorProto]): A sequence of protobuf field objects. address (~.metadata.Address): An address object denoting the location of these fields. @@ -585,7 +614,13 @@ def _get_fields(self, # first) and this will be None. This case is addressed in the # `_load_message` method. answer: Dict[str, wrappers.Field] = collections.OrderedDict() - for field_pb, i in zip(field_pbs, range(0, sys.maxsize)): + for i, field_pb in enumerate(field_pbs): + is_oneof = oneofs and field_pb.oneof_index > 0 + oneof_name = nth( + (oneofs or {}).keys(), + field_pb.oneof_index + ) if is_oneof else None + answer[field_pb.name] = wrappers.Field( field_pb=field_pb, enum=self.api_enums.get(field_pb.type_name.lstrip('.')), @@ -594,6 +629,7 @@ def _get_fields(self, address=address.child(field_pb.name, path + (i,)), documentation=self.docs.get(path + (i,), self.EMPTY), ), + oneof=oneof_name, ) # Done; return the answer. @@ -779,19 +815,25 @@ def _load_message(self, loader=self._load_message, path=path + (3,), ) - # self._load_children(message.oneof_decl, loader=self._load_field, - # address=nested_addr, info=info.get(8, {})) + + oneofs = self._get_oneofs( + message_pb.oneof_decl, + address=address, + path=path + (7,), + ) # Create a dictionary of all the fields for this message. fields = self._get_fields( message_pb.field, address=address, path=path + (2,), + oneofs=oneofs, ) fields.update(self._get_fields( message_pb.extension, address=address, path=path + (6,), + oneofs=oneofs, )) # Create a message correspoding to this descriptor. @@ -804,6 +846,7 @@ def _load_message(self, address=address, documentation=self.docs.get(path, self.EMPTY), ), + oneofs=oneofs, ) return self.proto_messages[address.proto] diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 6f7e041f16ab..106162037867 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -54,6 +54,7 @@ class Field: meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) + oneof: Optional[str] = None def __getattr__(self, name): return getattr(self.field_pb, name) @@ -206,6 +207,15 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Field': ) +@dataclasses.dataclass(frozen=True) +class Oneof: + """Description of a field.""" + oneof_pb: descriptor_pb2.OneofDescriptorProto + + def __getattr__(self, name): + return getattr(self.oneof_pb, name) + + @dataclasses.dataclass(frozen=True) class MessageType: """Description of a message (defined with the ``message`` keyword).""" @@ -220,6 +230,7 @@ class MessageType: meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) + oneofs: Optional[Mapping[str, 'Oneof']] = None def __getattr__(self, name): return getattr(self.message_pb, name) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index e9586108c043..5a1eb5fcc62f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -38,14 +38,15 @@ class {{ message.name }}({{ p }}.Message): {{- p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, number={{ field.number }} {%- if value_field.enum or value_field.message %}, {{ value_field.proto_type.lower() }}={{ value_field.type.ident.rel(message.ident) }}, - {% endif %}) + {% endif %}) {# enum or message#} {% endwith -%} - {% else -%} + {% else -%} {# field.map #} {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( {{- p }}.{{ field.proto_type }}, number={{ field.number }} + {% if field.oneof %}, oneof='{{ field.oneof }}'{% endif %} {%- if field.enum or field.message %}, {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, - {% endif %}) - {% endif -%} - {% endfor -%} + {% endif %}) {# enum or message #} + {% endif -%} {# field.map #} + {% endfor -%} {# for field in message.fields.values#} {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index d31a325e2f0c..5fde488f006d 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -20,7 +20,7 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', '{{ api.naming.versioned_module_name }}'), + os.path.join('tests', 'unit',) ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index c21846a4ac4e..f561d927e13c 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -288,9 +288,9 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): call.return_value = iter([{{ method.output.ident }}()]) {% else -%} call.return_value = {{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message') %} + {%- for field in method.output.fields.values() | rejectattr('message')%}{% if not (field.oneof and not field.proto3_optional) %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endif %}{%- endfor %} ) {% endif -%} {% if method.client_streaming %} @@ -318,7 +318,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): assert isinstance(message, {{ method.output.ident }}) {% else -%} assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') -%} + {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not (field.oneof and not field.proto3_optional) %} {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} @@ -326,6 +326,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% else -%} assert response.{{ field.name }} == {{ field.mock_value }} {% endif -%} + {% endif -%} {# end oneof/optional #} {% endfor %} {% endif %} @@ -368,8 +369,9 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio {%- else -%} grpc_helpers_async.FakeStreamUnaryCall {%- endif -%}({{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message') %} + {%- for field in method.output.fields.values() | rejectattr('message') %}{% if not (field.oneof and not field.proto3_optional) %} {{ field.name }}={{ field.mock_value }}, + {%- endif %} {%- endfor %} )) {% endif -%} @@ -400,7 +402,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio assert isinstance(message, {{ method.output.ident }}) {% else -%} assert isinstance(response, {{ method.client_output_async.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') -%} + {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not (field.oneof and not field.proto3_optional) %} {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} @@ -408,6 +410,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio {% else -%} assert response.{{ field.name }} == {{ field.mock_value }} {% endif -%} + {% endif -%} {# oneof/optional #} {% endfor %} {% endif %} diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 315c575e2b06..905fcbdec216 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -15,6 +15,7 @@ from gapic.utils.cache import cached_property from gapic.utils.case import to_snake_case from gapic.utils.code import empty +from gapic.utils.code import nth from gapic.utils.code import partition from gapic.utils.doc import doc from gapic.utils.filename import to_valid_filename @@ -29,6 +30,7 @@ 'cached_property', 'doc', 'empty', + 'nth', 'partition', 'RESERVED_NAMES', 'rst', diff --git a/packages/gapic-generator/gapic/utils/code.py b/packages/gapic-generator/gapic/utils/code.py index 27458a999cb5..15f327983c85 100644 --- a/packages/gapic-generator/gapic/utils/code.py +++ b/packages/gapic-generator/gapic/utils/code.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import (Callable, Iterable, List, Tuple, TypeVar) +from typing import (Callable, Iterable, List, Optional, Tuple, TypeVar) +import itertools def empty(content: str) -> bool: @@ -50,3 +51,15 @@ def partition(predicate: Callable[[T], bool], # Returns trueList, falseList return results[1], results[0] + + +def nth(iterable: Iterable[T], n: int, default: Optional[T] = None) -> Optional[T]: + """Return the nth element of an iterable or a default value. + + Args + iterable (Iterable(T)): An iterable on any type. + n (int): The 'index' of the lement to retrieve. + default (Optional(T)): An optional default elemnt if the iterable has + fewer than n elements. + """ + return next(itertools.islice(iterable, n, None), default) diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index 697d08e8dfc8..89fd7351429c 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -200,6 +200,7 @@ def make_field( message: wrappers.MessageType = None, enum: wrappers.EnumType = None, meta: metadata.Metadata = None, + oneof: str = None, **kwargs ) -> wrappers.Field: T = desc.FieldDescriptorProto.Type @@ -223,11 +224,13 @@ def make_field( number=number, **kwargs ) + return wrappers.Field( field_pb=field_pb, enum=enum, message=message, meta=meta or metadata.Metadata(), + oneof=oneof, ) @@ -322,20 +325,28 @@ def make_enum_pb2( def make_message_pb2( name: str, fields: tuple = (), + oneof_decl: tuple = (), **kwargs ) -> desc.DescriptorProto: - return desc.DescriptorProto(name=name, field=fields, **kwargs) + return desc.DescriptorProto(name=name, field=fields, oneof_decl=oneof_decl, **kwargs) def make_field_pb2(name: str, number: int, type: int = 11, # 11 == message type_name: str = None, + oneof_index: int = None ) -> desc.FieldDescriptorProto: return desc.FieldDescriptorProto( name=name, number=number, type=type, type_name=type_name, + oneof_index=oneof_index, + ) + +def make_oneof_pb2(name: str) -> desc.OneofDescriptorProto: + return desc.OneofDescriptorProto( + name=name, ) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 8dc1760cd892..b3f023054c49 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -34,6 +34,7 @@ make_file_pb2, make_message_pb2, make_naming, + make_oneof_pb2, ) @@ -239,6 +240,45 @@ def test_proto_keyword_fname(): } +def test_proto_oneof(): + # Put together a couple of minimal protos. + fd = ( + make_file_pb2( + name='dep.proto', + package='google.dep', + messages=(make_message_pb2(name='ImportedMessage', fields=()),), + ), + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=()), + make_message_pb2( + name='Bar', + fields=( + make_field_pb2(name='imported_message', number=1, + type_name='.google.dep.ImportedMessage', + oneof_index=0), + make_field_pb2( + name='primitive', number=2, type=1, oneof_index=0), + ), + oneof_decl=( + make_oneof_pb2(name="value_type"), + ) + ) + ) + ) + ) + + # Create an API with those protos. + api_schema = api.API.build(fd, package='google.example.v1') + proto = api_schema.protos['foo.proto'] + assert proto.names == {'imported_message', 'Bar', 'primitive', 'Foo'} + oneofs = proto.messages["google.example.v1.Bar"].oneofs + assert len(oneofs) == 1 + assert "value_type" in oneofs.keys() + + def test_proto_names_import_collision(): # Put together a couple of minimal protos. fd = ( diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 7104c735bec5..3cdcaf9bcfe2 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -153,6 +153,13 @@ def test_mock_value_int(): assert field.mock_value == '728' +def test_oneof(): + REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') + + field = make_field(oneof="oneof_name") + assert field.oneof == "oneof_name" + + def test_mock_value_float(): field = make_field(name='foo_bar', type='TYPE_DOUBLE') assert field.mock_value == '0.728' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_oneof.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_oneof.py new file mode 100644 index 000000000000..90fe2546cec6 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_oneof.py @@ -0,0 +1,35 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections + +import pytest + +from google.api import field_behavior_pb2 +from google.protobuf import descriptor_pb2 + +from gapic.schema import metadata +from gapic.schema import wrappers + +from test_utils.test_utils import ( + make_oneof_pb2, +) + + +def test_wrapped_oneof(): + oneof_pb = make_oneof_pb2("oneof_name") + wrapped = wrappers.Oneof(oneof_pb=oneof_pb) + + assert wrapped.oneof_pb == oneof_pb + assert wrapped.name == oneof_pb.name diff --git a/packages/gapic-generator/tests/unit/utils/test_code.py b/packages/gapic-generator/tests/unit/utils/test_code.py index 1069443f7b94..5f18679d6f2b 100644 --- a/packages/gapic-generator/tests/unit/utils/test_code.py +++ b/packages/gapic-generator/tests/unit/utils/test_code.py @@ -34,3 +34,12 @@ def test_empty_whitespace_comments(): def test_empty_code(): assert not code.empty('import this') + + +def test_nth(): + # list + assert code.nth([i * i for i in range(20)], 4) == 16 + # generator + assert code.nth((i * i for i in range(20)), 4) == 16 + # default + assert code.nth((i * i for i in range(20)), 30, 2112) == 2112 From de80ebf630684db5fdf9edd185d2070c8fe09049 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Jul 2020 15:53:57 -0700 Subject: [PATCH 0309/1339] chore: release 0.26.2 (#501) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b01b9fa57ac6..a0fdcf55e580 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.26.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.1...v0.26.2) (2020-07-07) + + +### Bug Fixes + +* add oneof fields to generated protoplus init ([#485](https://www.github.com/googleapis/gapic-generator-python/issues/485)) ([be5a847](https://www.github.com/googleapis/gapic-generator-python/commit/be5a847aeff6687679f7bca46308362d588f5c77)), closes [#484](https://www.github.com/googleapis/gapic-generator-python/issues/484) + ### [0.26.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.0...v0.26.1) (2020-07-07) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a0c0c1d6014c..f4850e7d453d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.26.1" +version = "0.26.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 2b235680e395f6952c20c59f9ae85d8b671131cb Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Tue, 7 Jul 2020 18:08:52 -0700 Subject: [PATCH 0310/1339] fix: fix wrong unit test (#502) --- .../gapic/%name_%version/%sub/test_%service.py.j2 | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f561d927e13c..4f30579121ce 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -206,7 +206,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") ]) def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class, transport_class, transport_name): - # Check the case api_endpoint is provided. + # Check the case scopes are provided. options = client_options.ClientOptions( scopes=["1", "2"], ) @@ -216,9 +216,9 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class patched.assert_called_once_with( credentials=None, credentials_file=None, - host="localhost:7469", + host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint="localhost:7469", + api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, ) @@ -228,7 +228,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") ]) def test_{{ service.client_name|snake_case }}_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case api_endpoint is provided. + # Check the case credentials file is provided. options = client_options.ClientOptions( credentials_file="credentials.json" ) @@ -238,9 +238,9 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host="localhost:7469", + host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint="localhost:7469", + api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, ) @@ -893,7 +893,7 @@ def test_credentials_transport_error(): client_options={"scopes": ["1", "2"]}, transport=transport, ) - + def test_transport_instance(): From 15fc9a0ebc606fb5f713468078f563fd8c043455 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Jul 2020 18:53:34 -0700 Subject: [PATCH 0311/1339] chore: release 0.26.3 (#503) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a0fdcf55e580..60324921e240 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.26.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.2...v0.26.3) (2020-07-08) + + +### Bug Fixes + +* fix wrong unit test ([#502](https://www.github.com/googleapis/gapic-generator-python/issues/502)) ([c95bd45](https://www.github.com/googleapis/gapic-generator-python/commit/c95bd45506df7973758b9e1249586597d8214985)) + ### [0.26.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.1...v0.26.2) (2020-07-07) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f4850e7d453d..8320afa26a1f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.26.2" +version = "0.26.3" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 2aabec5556fa08cd750d197f81458eebcb8b8c3d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 9 Jul 2020 17:32:50 -0700 Subject: [PATCH 0312/1339] fix: tweak oneof detection (#505) Oneof detection and assignment to fields is tricky. This patch fixes detection of oneof fields, fixes uses in generated clients and tweaks generated tests to use them correctly. --- packages/gapic-generator/gapic/schema/api.py | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 22 ++++++++ .../%name_%version/%sub/test_%service.py.j2 | 27 +++++++-- .../unit/schema/wrappers/test_message.py | 23 ++++++++ .../tests/unit/schema/wrappers/test_method.py | 56 +++++++++++++++++++ 5 files changed, 124 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index df3e1daa8e8e..3c79d8f7cd7a 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -615,7 +615,7 @@ def _get_fields(self, # `_load_message` method. answer: Dict[str, wrappers.Field] = collections.OrderedDict() for i, field_pb in enumerate(field_pbs): - is_oneof = oneofs and field_pb.oneof_index > 0 + is_oneof = oneofs and field_pb.HasField('oneof_index') oneof_name = nth( (oneofs or {}).keys(), field_pb.oneof_index diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 106162037867..bbeeec679bc9 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -239,6 +239,15 @@ def __hash__(self): # Identity is sufficiently unambiguous. return hash(self.ident) + def oneof_fields(self, include_optional=False): + oneof_fields = collections.defaultdict(list) + for field in self.fields.values(): + # Only include proto3 optional oneofs if explicitly looked for. + if field.oneof and not field.proto3_optional or include_optional: + oneof_fields[field.oneof].append(field) + + return oneof_fields + @utils.cached_property def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: answer = tuple( @@ -583,6 +592,15 @@ def client_output(self): def client_output_async(self): return self._client_output(enable_asyncio=True) + def flattened_oneof_fields(self, include_optional=False): + oneof_fields = collections.defaultdict(list) + for field in self.flattened_fields.values(): + # Only include proto3 optional oneofs if explicitly looked for. + if field.oneof and not field.proto3_optional or include_optional: + oneof_fields[field.oneof].append(field) + + return oneof_fields + def _client_output(self, enable_asyncio: bool): """Return the output from the client layer. @@ -685,6 +703,10 @@ def filter_fields(sig: str) -> Iterable[Tuple[str, Field]]: return answer + @utils.cached_property + def flattened_field_to_key(self): + return {field.name: key for key, field in self.flattened_fields.items()} + @utils.cached_property def legacy_flattened_fields(self) -> Mapping[str, Field]: """Return the legacy flattening interface: top level fields only, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4f30579121ce..322094d2266a 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -288,9 +288,15 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): call.return_value = iter([{{ method.output.ident }}()]) {% else -%} call.return_value = {{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message')%}{% if not (field.oneof and not field.proto3_optional) %} + {%- for field in method.output.fields.values() | rejectattr('message')%}{% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {% endif %}{%- endfor %} + {#- This is a hack to only pick one field #} + {%- for oneof_fields in method.output.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {%- endwith %} + {%- endfor %} ) {% endif -%} {% if method.client_streaming %} @@ -567,9 +573,15 @@ def test_{{ method.name|snake_case }}_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() -%} + {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} assert args[0].{{ key }} == {{ field.mock_value }} - {% endfor %} + {% endif %}{% endfor %} + {%- for oneofs in method.flattened_oneof_fields().values() %} + {%- with field = oneofs[-1] %} + assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} + {%- endwith %} + {%- endfor %} + def test_{{ method.name|snake_case }}_flattened_error(): @@ -640,9 +652,14 @@ async def test_{{ method.name|snake_case }}_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() -%} + {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} assert args[0].{{ key }} == {{ field.mock_value }} - {% endfor %} + {% endif %}{% endfor %} + {%- for oneofs in method.flattened_oneof_fields().values() %} + {%- with field = oneofs[-1] %} + assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} + {%- endwith %} + {%- endfor %} @pytest.mark.asyncio diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 7ae95d02990f..7d8cca169aab 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -235,3 +235,26 @@ def test_field_map(): entry_field = make_field('foos', message=entry_msg, repeated=True) assert entry_msg.map assert entry_field.map + + +def test_oneof_fields(): + mass_kg = make_field(name="mass_kg", oneof="mass", type=5) + mass_lbs = make_field(name="mass_lbs", oneof="mass", type=5) + length_m = make_field(name="length_m", oneof="length", type=5) + length_f = make_field(name="length_f", oneof="length", type=5) + color = make_field(name="color", type=5) + request = make_message( + name="CreateMolluscReuqest", + fields=( + mass_kg, + mass_lbs, + length_m, + length_f, + color, + ), + ) + actual_oneofs = request.oneof_fields() + expected_oneofs = { + "mass": [mass_kg, mass_lbs], + "length": [length_m, length_f], + } diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index c0102402c267..f10bb078cd09 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -364,3 +364,59 @@ def test_method_legacy_flattened_fields(): ]) assert method.legacy_flattened_fields == expected + + +def test_flattened_oneof_fields(): + mass_kg = make_field(name="mass_kg", oneof="mass", type=5) + mass_lbs = make_field(name="mass_lbs", oneof="mass", type=5) + + length_m = make_field(name="length_m", oneof="length", type=5) + length_f = make_field(name="length_f", oneof="length", type=5) + + color = make_field(name="color", type=5) + mantle = make_field( + name="mantle", + message=make_message( + name="Mantle", + fields=( + make_field(name="color", type=5), + mass_kg, + mass_lbs, + ), + ), + ) + request = make_message( + name="CreateMolluscReuqest", + fields=( + length_m, + length_f, + color, + mantle, + ), + ) + method = make_method( + name="CreateMollusc", + input_message=request, + signatures=[ + "length_m,", + "length_f,", + "mantle.mass_kg,", + "mantle.mass_lbs,", + "color", + ] + ) + + expected = {"mass": [mass_kg, mass_lbs], "length": [length_m, length_f]} + actual = method.flattened_oneof_fields() + assert expected == actual + + # Check this method too becasue the setup is a lot of work. + expected = { + "color": "color", + "length_m": "length_m", + "length_f": "length_f", + "mass_kg": "mantle.mass_kg", + "mass_lbs": "mantle.mass_lbs", + } + actual = method.flattened_field_to_key + assert expected == actual From a33041c907de177420187f5beaa15bdde1a15adf Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 9 Jul 2020 17:40:10 -0700 Subject: [PATCH 0313/1339] fix: require min google-api-core version of 1.21.0 (#506) This is the version required for credentials_file and scopes support via google-api-core. (#461) Co-authored-by: Dov Shlachter --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index c008080ee057..e710b15202d0 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -16,7 +16,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.17.2, < 2.0.0dev', + 'google-api-core[grpc] >= 1.21.0, < 2.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 0.4.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} From 4900b3c97960bb2950c9cfc72746d70f89c54dce Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 9 Jul 2020 17:47:18 -0700 Subject: [PATCH 0314/1339] chore: release 0.26.4 (#507) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 60324921e240..4e9033b68f6c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [0.26.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.3...v0.26.4) (2020-07-10) + + +### Bug Fixes + +* require min google-api-core version of 1.21.0 ([#506](https://www.github.com/googleapis/gapic-generator-python/issues/506)) ([bf787bd](https://www.github.com/googleapis/gapic-generator-python/commit/bf787bd36198288d6a40e45e44e43f0098cfec7c)), closes [#461](https://www.github.com/googleapis/gapic-generator-python/issues/461) +* tweak oneof detection ([#505](https://www.github.com/googleapis/gapic-generator-python/issues/505)) ([1632e25](https://www.github.com/googleapis/gapic-generator-python/commit/1632e250cfc01a17ccad128c3e065008b334473a)) + ### [0.26.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.2...v0.26.3) (2020-07-08) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 8320afa26a1f..bd95faae7555 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.26.3" +version = "0.26.4" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From e353e61f84f3e54f213a90eca400be2ea59d2dd4 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 9 Jul 2020 18:12:04 -0700 Subject: [PATCH 0315/1339] Tweak ads tests (#509) --- .../%name_%version/%sub/test_%service.py.j2 | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 79d420da8224..4e3456305514 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -217,9 +217,15 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): call.return_value = iter([{{ method.output.ident }}()]) {% else -%} call.return_value = {{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message') %} + {%- for field in method.output.fields.values() | rejectattr('message') %}{%- for field in method.output.fields.values() | rejectattr('message')%}{% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {%- endfor %} + {#- This is a hack to only pick one field #} + {%- for oneof_fields in method.output.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {%- endwith %} + {%- endfor %} ) {% endif -%} {% if method.client_streaming %} @@ -364,9 +370,14 @@ def test_{{ method.name|snake_case }}_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() -%} + {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} assert args[0].{{ key }} == {{ field.mock_value }} - {% endfor %} + {% endif %}{% endfor %} + {%- for oneofs in method.flattened_oneof_fields().values() %} + {%- with field = oneofs[-1] %} + assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} + {%- endwith %} + {%- endfor %} def test_{{ method.name|snake_case }}_flattened_error(): From 3713a4ba13f21e466009ea2c90bed10f63197cbb Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 10 Jul 2020 10:30:19 -0700 Subject: [PATCH 0316/1339] fix: convert datetime back to proto for unit tests (#511) --- .../gapic/%name_%version/%sub/test_%service.py.j2 | 7 +++++++ .../gapic/%name_%version/%sub/test_%service.py.j2 | 13 +++++++++++++ 2 files changed, 20 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4e3456305514..7a83fd1122ed 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -7,6 +7,7 @@ from unittest import mock import grpc import math import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule {# Import the service itself as well as every proto module that it imports. -#} {% filter sort_lines -%} @@ -371,7 +372,13 @@ def test_{{ method.name|snake_case }}_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp.Timestamp' -%} + assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif field.ident|string() == 'duration.Duration' -%} + assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% else -%} assert args[0].{{ key }} == {{ field.mock_value }} + {% endif %} {% endif %}{% endfor %} {%- for oneofs in method.flattened_oneof_fields().values() %} {%- with field = oneofs[-1] %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 322094d2266a..232d6c47337d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -8,6 +8,7 @@ import grpc from grpc.experimental import aio import math import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule {# Import the service itself as well as every proto module that it imports. -#} {% filter sort_lines -%} @@ -574,7 +575,13 @@ def test_{{ method.name|snake_case }}_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp.Timestamp' -%} + assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif field.ident|string() == 'duration.Duration' -%} + assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% else -%} assert args[0].{{ key }} == {{ field.mock_value }} + {% endif %} {% endif %}{% endfor %} {%- for oneofs in method.flattened_oneof_fields().values() %} {%- with field = oneofs[-1] %} @@ -653,7 +660,13 @@ async def test_{{ method.name|snake_case }}_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp.Timestamp' -%} + assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif field.ident|string() == 'duration.Duration' -%} + assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% else -%} assert args[0].{{ key }} == {{ field.mock_value }} + {% endif %} {% endif %}{% endfor %} {%- for oneofs in method.flattened_oneof_fields().values() %} {%- with field = oneofs[-1] %} From ddcea4b25841d7e93d4549157a7435d2f4ae740e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 10 Jul 2020 11:11:03 -0700 Subject: [PATCH 0317/1339] Cross patch #470 to ads templates (#510) --- .../%sub/services/%service/client.py.j2 | 1 + .../%sub/services/%service/pagers.py.j2 | 13 ++++++---- .../%name_%version/%sub/test_%service.py.j2 | 24 ++++++++++++++++--- 3 files changed, 30 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 9688ca75016a..1d14fbd64281 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -369,6 +369,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): method=rpc, request=request, response=response, + metadata=metadata, ) {%- endif %} {%- if not method.void %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 index 0e7ef018a775..da4dc875815b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 @@ -6,7 +6,7 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. -#} -from typing import Any, Callable, Iterable +from typing import Any, Callable, Iterable, Sequence, Tuple {% filter sort_lines -%} {% for method in service.methods.values() | selectattr('paged_result_field') -%} @@ -35,10 +35,10 @@ class {{ method.name }}Pager: the most recent response is retained, and thus used for attribute lookup. """ def __init__(self, - method: Callable[[{{ method.input.ident }}], - {{ method.output.ident }}], + method: Callable[..., {{ method.output.ident }}], request: {{ method.input.ident }}, - response: {{ method.output.ident }}): + response: {{ method.output.ident }}, + metadata: Sequence[Tuple[str, str]] = ())): """Instantiate the pager. Args: @@ -48,10 +48,13 @@ class {{ method.name }}Pager: The initial request object. response (:class:`{{ method.output.ident.sphinx }}`): The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. """ self._method = method self._request = {{ method.input.ident }}(request) self._response = response + self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @@ -61,7 +64,7 @@ class {{ method.name }}Pager: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request) + self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 7a83fd1122ed..e4583db132fa 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -24,6 +24,9 @@ from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% endif -%} +{% if service.has_pagers -%} +from google.api_core import gapic_v1 +{% endif -%} {% for method in service.methods.values() -%} {% for ref_type in method.ref_types if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') @@ -442,9 +445,24 @@ def test_{{ method.name|snake_case }}_pager(): ), RuntimeError, ) - results = [i for i in client.{{ method.name|snake_case }}( - request={}, - )] + + metadata = () + {% if method.field_headers -%} + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {%- for field_header in method.field_headers %} + {%- if not method.client_streaming %} + ('{{ field_header }}', ''), + {%- endif %} + {%- endfor %} + )), + ) + {% endif -%} + pager = client.{{ method.name|snake_case }}(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] assert len(results) == 6 assert all(isinstance(i, {{ method.paged_result_field.message.ident }}) for i in results) From f8095efbc718b6ccc9472bddf466a2dabc4a432d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 10 Jul 2020 12:01:54 -0700 Subject: [PATCH 0318/1339] Disambiguate fields whose names are reserved python words (#514) --- packages/gapic-generator/gapic/schema/api.py | 3 ++- packages/gapic-generator/gapic/schema/wrappers.py | 6 ++++++ .../tests/unit/schema/wrappers/test_field.py | 12 ++++++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 3c79d8f7cd7a..db91c461594a 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -621,7 +621,7 @@ def _get_fields(self, field_pb.oneof_index ) if is_oneof else None - answer[field_pb.name] = wrappers.Field( + field = wrappers.Field( field_pb=field_pb, enum=self.api_enums.get(field_pb.type_name.lstrip('.')), message=self.api_messages.get(field_pb.type_name.lstrip('.')), @@ -631,6 +631,7 @@ def _get_fields(self, ), oneof=oneof_name, ) + answer[field.name] = field # Done; return the answer. return answer diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index bbeeec679bc9..49b8efda695f 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -59,6 +59,12 @@ class Field: def __getattr__(self, name): return getattr(self.field_pb, name) + @property + def name(self) -> str: + """Used to prevent collisions with python keywords""" + name = self.field_pb.name + return name + "_" if name in utils.RESERVED_NAMES else name + @utils.cached_property def ident(self) -> metadata.FieldIdentifier: """Return the identifier to be used in templates.""" diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 3cdcaf9bcfe2..bba280b8f511 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -248,3 +248,15 @@ def test_mock_value_message(): message=message, ) assert field.mock_value == 'bogus.Message(foo=324)' + + +def test_field_name_kword_disambiguation(): + from_field = make_field( + name="from", + ) + assert from_field.name == "from_" + + frum_field = make_field( + name="frum", + ) + assert frum_field.name == "frum" From e3d8bcf6d201fc53c76e2ee814bbdaf4e97fecec Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 10 Jul 2020 12:17:43 -0700 Subject: [PATCH 0319/1339] chore: release 0.26.5 (#515) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 4e9033b68f6c..d1c924eaa5c2 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.26.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.4...v0.26.5) (2020-07-10) + + +### Bug Fixes + +* convert datetime back to proto for unit tests ([#511](https://www.github.com/googleapis/gapic-generator-python/issues/511)) ([e1c787d](https://www.github.com/googleapis/gapic-generator-python/commit/e1c787d3b6fe09dc0b4e00f07a7bd77fb5f1e6a3)) + ### [0.26.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.3...v0.26.4) (2020-07-10) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index bd95faae7555..ffba7dca499f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.26.4" +version = "0.26.5" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From e1b00a44f4affceb7e43189d7031ded9eb482496 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 10 Jul 2020 16:07:43 -0700 Subject: [PATCH 0320/1339] fix: primitive repeated fields are now correctly auto paginated (#517) --- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../tests/unit/schema/wrappers/test_method.py | 23 +++++++++++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 49b8efda695f..24178af08a14 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -759,7 +759,7 @@ def paged_result_field(self) -> Optional[Field]: # Return the first repeated field. for field in self.output.fields.values(): - if field.repeated and field.message: + if field.repeated: return field # We found no repeated fields. Return None. diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index f10bb078cd09..8b551df560e7 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -206,6 +206,29 @@ def test_flattened_ref_types(): assert expected_flat_ref_type_names == actual_flat_ref_type_names +def test_method_paged_result_primitive(): + paged = make_field(name='squids', type=9, repeated=True) + input_msg = make_message( + name='ListSquidsRequest', + fields=( + make_field(name='parent', type=9), # str + make_field(name='page_size', type=5), # int + make_field(name='page_token', type=9), # str + ), + ) + output_msg = make_message(name='ListFoosResponse', fields=( + paged, + make_field(name='next_page_token', type=9), # str + )) + method = make_method( + 'ListSquids', + input_message=input_msg, + output_message=output_msg, + ) + assert method.paged_result_field == paged + assert method.client_output.ident.name == 'ListSquidsPager' + + def test_method_field_headers_none(): method = make_method('DoSomething') assert isinstance(method.field_headers, collections.abc.Sequence) From 78f4611632e2e54b884e902b9fe409a4761be01c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 10 Jul 2020 16:12:01 -0700 Subject: [PATCH 0321/1339] chore: release 0.26.6 (#518) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index d1c924eaa5c2..144846d1c692 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.26.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.5...v0.26.6) (2020-07-10) + + +### Bug Fixes + +* primitive repeated fields are now correctly auto paginated ([#517](https://www.github.com/googleapis/gapic-generator-python/issues/517)) ([61a2cc0](https://www.github.com/googleapis/gapic-generator-python/commit/61a2cc0d4c08064d442fd4d7aa4b1b9e56158eaa)) + ### [0.26.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.4...v0.26.5) (2020-07-10) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index ffba7dca499f..cac6481b7083 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.26.5" +version = "0.26.6" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 9ddfe1c39d5d66c53292da468cacd8d476b0826e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 13 Jul 2020 12:27:19 -0700 Subject: [PATCH 0322/1339] feat: support for proto3 optional fields (#519) Fields marked as 'optional' in proto files are given an 'optional=True' parameter in their constructor. Bumps required version of proto-plus in gapic surface to 1.1.0 Includes minor cleanups in template code that refers to oneofs. --- .../%name/%version/%sub/services/%service/pagers.py.j2 | 2 +- .../%namespace/%name/%version/%sub/types/_message.py.j2 | 3 +-- packages/gapic-generator/gapic/ads-templates/setup.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 7 ++++--- .../%namespace/%name_%version/%sub/types/_message.py.j2 | 3 +-- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 6 +++--- 7 files changed, 12 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 index da4dc875815b..cdc469227940 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 @@ -38,7 +38,7 @@ class {{ method.name }}Pager: method: Callable[..., {{ method.output.ident }}], request: {{ method.input.ident }}, response: {{ method.output.ident }}, - metadata: Sequence[Tuple[str, str]] = ())): + metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. Args: diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index 15bf3ea4ab5e..435b576dbef9 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -42,8 +42,7 @@ class {{ message.name }}({{ p }}.Message): {% endwith -%} {% else -%} {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( - {{- p }}.{{ field.proto_type }}, number={{ field.number }} - {% if field.oneof %}, oneof='{{ field.oneof }}'{% endif %} + {{- p }}.{{ field.proto_type }}, number={{ field.number }}{% if field.proto3_optional %}, optional=True{% elif field.oneof %}, oneof='{{ field.oneof }}'{% endif %} {%- if field.enum or field.message %}, {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, {% endif %}) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 5556158e7039..1dfcf0caf215 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -19,7 +19,7 @@ setuptools.setup( 'google-api-core >= 1.17.0, < 2.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', - 'proto-plus >= 0.4.0', + 'proto-plus >= 1.1.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', {%- endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e4583db132fa..6091d5a3f716 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -221,9 +221,9 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): call.return_value = iter([{{ method.output.ident }}()]) {% else -%} call.return_value = {{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message') %}{%- for field in method.output.fields.values() | rejectattr('message')%}{% if not field.oneof or field.proto3_optional %} + {%- for field in method.output.fields.values() | rejectattr('message')%}{% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endif %}{%- endfor %} {#- This is a hack to only pick one field #} {%- for oneof_fields in method.output.oneof_fields().values() %} {% with field = oneof_fields[0] %} @@ -257,7 +257,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): assert isinstance(message, {{ method.output.ident }}) {% else -%} assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') -%} + {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} @@ -265,6 +265,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% else -%} assert response.{{ field.name }} == {{ field.mock_value }} {% endif -%} + {% endif -%} {# end oneof/optional #} {% endfor %} {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 5a1eb5fcc62f..8524d638efec 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -42,8 +42,7 @@ class {{ message.name }}({{ p }}.Message): {% endwith -%} {% else -%} {# field.map #} {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( - {{- p }}.{{ field.proto_type }}, number={{ field.number }} - {% if field.oneof %}, oneof='{{ field.oneof }}'{% endif %} + {{- p }}.{{ field.proto_type }}, number={{ field.number }}{% if field.proto3_optional %}, optional=True{% elif field.oneof %}, oneof='{{ field.oneof }}'{% endif %} {%- if field.enum or field.message %}, {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, {% endif %}) {# enum or message #} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index e710b15202d0..1b9a43d1d9cd 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -18,7 +18,7 @@ setuptools.setup( install_requires=( 'google-api-core[grpc] >= 1.21.0, < 2.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 0.4.0', + 'proto-plus >= 1.1.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', {%- endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 232d6c47337d..e8c5bd067a34 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -325,7 +325,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): assert isinstance(message, {{ method.output.ident }}) {% else -%} assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not (field.oneof and not field.proto3_optional) %} + {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} @@ -376,7 +376,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio {%- else -%} grpc_helpers_async.FakeStreamUnaryCall {%- endif -%}({{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message') %}{% if not (field.oneof and not field.proto3_optional) %} + {%- for field in method.output.fields.values() | rejectattr('message') %}{% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {%- endif %} {%- endfor %} @@ -409,7 +409,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio assert isinstance(message, {{ method.output.ident }}) {% else -%} assert isinstance(response, {{ method.client_output_async.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not (field.oneof and not field.proto3_optional) %} + {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} From 4542b6988027db4542e6ce8df5c88fb103ca5f26 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 13 Jul 2020 12:32:32 -0700 Subject: [PATCH 0323/1339] chore: release 0.27.0 (#520) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 144846d1c692..a29751d70db4 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.27.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.6...v0.27.0) (2020-07-13) + + +### Features + +* support for proto3 optional fields ([#519](https://www.github.com/googleapis/gapic-generator-python/issues/519)) ([1aa729c](https://www.github.com/googleapis/gapic-generator-python/commit/1aa729cc8d2f7f0de25c8348fdbf9d6dd96f5847)) + ### [0.26.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.5...v0.26.6) (2020-07-10) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index cac6481b7083..1764dc1e5392 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.26.6" +version = "0.27.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 8f0ab5ed367bf13476595c852821450b34104de7 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 15 Jul 2020 16:03:41 -0700 Subject: [PATCH 0324/1339] fix: paged code and templates are no longer message centric (#527) --- .../%sub/services/%service/pagers.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 26 +++++----- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../%sub/services/%service/pagers.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 52 +++++++++---------- 5 files changed, 42 insertions(+), 42 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 index cdc469227940..73115c59acef 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 @@ -12,7 +12,7 @@ from typing import Any, Callable, Iterable, Sequence, Tuple {% for method in service.methods.values() | selectattr('paged_result_field') -%} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{{ method.paged_result_field.message.ident.python_import }} +{% if not method.paged_result_field.is_primitive %}{{ method.paged_result_field.message.ident.python_import }}{% endif %} {% endfor %} {% endfilter -%} {% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 6091d5a3f716..f4c17b6fb548 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -422,9 +422,9 @@ def test_{{ method.name|snake_case }}_pager(): call.side_effect = ( {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='abc', ), @@ -434,14 +434,14 @@ def test_{{ method.name|snake_case }}_pager(): ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='ghi', ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], ), RuntimeError, @@ -465,7 +465,7 @@ def test_{{ method.name|snake_case }}_pager(): results = [i for i in pager] assert len(results) == 6 - assert all(isinstance(i, {{ method.paged_result_field.message.ident }}) + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) for i in results) def test_{{ method.name|snake_case }}_pages(): @@ -481,9 +481,9 @@ def test_{{ method.name|snake_case }}_pages(): call.side_effect = ( {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='abc', ), @@ -493,14 +493,14 @@ def test_{{ method.name|snake_case }}_pages(): ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='ghi', ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], ), RuntimeError, diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 24178af08a14..54f5364fb739 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -801,7 +801,7 @@ def _ref_types(self, recursive: bool) -> Sequence[Union[MessageType, EnumType]]: # If this message paginates its responses, it is possible # that the individual result messages reside in a different module. - if self.paged_result_field: + if self.paged_result_field and self.paged_result_field.message: answer.append(self.paged_result_field.message) # Done; return the answer. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index cc7bc56100ff..2f9598d2e3e2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -12,7 +12,7 @@ from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, {% for method in service.methods.values() | selectattr('paged_result_field') -%} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{{ method.paged_result_field.message.ident.python_import }} +{% if not method.paged_result_field.is_primitive %}{{ method.paged_result_field.message.ident.python_import }}{% endif %} {% endfor %} {% endfilter -%} {% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e8c5bd067a34..3b63c2db3dae 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -707,9 +707,9 @@ def test_{{ method.name|snake_case }}_pager(): call.side_effect = ( {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='abc', ), @@ -719,14 +719,14 @@ def test_{{ method.name|snake_case }}_pager(): ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='ghi', ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], ), RuntimeError, @@ -750,7 +750,7 @@ def test_{{ method.name|snake_case }}_pager(): results = [i for i in pager] assert len(results) == 6 - assert all(isinstance(i, {{ method.paged_result_field.message.ident }}) + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) for i in results) def test_{{ method.name|snake_case }}_pages(): @@ -766,9 +766,9 @@ def test_{{ method.name|snake_case }}_pages(): call.side_effect = ( {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='abc', ), @@ -778,14 +778,14 @@ def test_{{ method.name|snake_case }}_pages(): ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='ghi', ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], ), RuntimeError, @@ -808,9 +808,9 @@ async def test_{{ method.name|snake_case }}_async_pager(): call.side_effect = ( {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='abc', ), @@ -820,14 +820,14 @@ async def test_{{ method.name|snake_case }}_async_pager(): ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='ghi', ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], ), RuntimeError, @@ -839,7 +839,7 @@ async def test_{{ method.name|snake_case }}_async_pager(): responses.append(response) assert len(responses) == 6 - assert all(isinstance(i, {{ method.paged_result_field.message.ident }}) + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) for i in responses) @pytest.mark.asyncio @@ -856,9 +856,9 @@ async def test_{{ method.name|snake_case }}_async_pages(): call.side_effect = ( {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='abc', ), @@ -868,14 +868,14 @@ async def test_{{ method.name|snake_case }}_async_pages(): ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], next_page_token='ghi', ), {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.message.ident }}(), - {{ method.paged_result_field.message.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), ], ), RuntimeError, From d393b648a5a3984f30b0bfc47541d64aaddd41e6 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 16 Jul 2020 10:57:35 -0700 Subject: [PATCH 0325/1339] feat: add retry config passed to bazel rule (#526) --- .../rules_python_gapic/py_gapic.bzl | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index 55c23223cc94..34360b9dffa4 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -14,17 +14,28 @@ load("@com_google_api_codegen//rules_gapic:gapic.bzl", "proto_custom_library") -def py_gapic_library(name, srcs, plugin_args = [], opt_args = [], **kwargs): +def py_gapic_library( + name, + srcs, + grpc_service_config = None, + plugin_args = [], + opt_args = [], + **kwargs): # srcjar_target_name = "%s_srcjar" % name srcjar_target_name = name srcjar_output_suffix = ".srcjar" + file_args = {} + if grpc_service_config: + file_args[grpc_service_config] = "retry-config" + + proto_ proto_custom_library( name = srcjar_target_name, deps = srcs, plugin = Label("@gapic_generator_python//:gapic_plugin"), plugin_args = plugin_args, - plugin_file_args = {}, + plugin_file_args = file_args, opt_args = opt_args, output_type = "python_gapic", output_suffix = srcjar_output_suffix, From b80d87fa2bed9679d392823a92406ce0ec190244 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 16 Jul 2020 11:25:09 -0700 Subject: [PATCH 0326/1339] chore: release 0.28.0 (#529) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a29751d70db4..3019eb614982 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.28.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.27.0...v0.28.0) (2020-07-16) + + +### Features + +* add retry config passed to bazel rule ([#526](https://www.github.com/googleapis/gapic-generator-python/issues/526)) ([9e96151](https://www.github.com/googleapis/gapic-generator-python/commit/9e96151d702786912fcf033f7535efad8ae754ee)) + + +### Bug Fixes + +* paged code and templates are no longer message centric ([#527](https://www.github.com/googleapis/gapic-generator-python/issues/527)) ([00ba77c](https://www.github.com/googleapis/gapic-generator-python/commit/00ba77c3d27ef9a0b8742db3660983b80a68c672)) + ## [0.27.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.6...v0.27.0) (2020-07-13) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1764dc1e5392..d6cf0c728fb5 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.27.0" +version = "0.28.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From cf08769fc2621ba9ff0ca49c8a5d3e26d0b31c4c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 16 Jul 2020 14:08:01 -0700 Subject: [PATCH 0327/1339] fix: remove typo from py_gapic.bzl (#532) --- packages/gapic-generator/rules_python_gapic/py_gapic.bzl | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index 34360b9dffa4..7c6f938f5230 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -29,7 +29,6 @@ def py_gapic_library( if grpc_service_config: file_args[grpc_service_config] = "retry-config" - proto_ proto_custom_library( name = srcjar_target_name, deps = srcs, From f274d491e805d00b7a6aaaea9c0ccf499dea621c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 16 Jul 2020 19:47:37 -0700 Subject: [PATCH 0328/1339] chore: release 0.28.1 (#533) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3019eb614982..3f44027c222a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.28.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.28.0...v0.28.1) (2020-07-16) + + +### Bug Fixes + +* remove typo from py_gapic.bzl ([#532](https://www.github.com/googleapis/gapic-generator-python/issues/532)) ([2975c2d](https://www.github.com/googleapis/gapic-generator-python/commit/2975c2d76e08b5ee5324730707707d9dd6ced8ae)) + ## [0.28.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.27.0...v0.28.0) (2020-07-16) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index d6cf0c728fb5..df27bd1fa79d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.28.0" +version = "0.28.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 045137495d5040175d91f93f40a366444c59afe9 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 21 Jul 2020 22:29:39 -0700 Subject: [PATCH 0329/1339] fix: make # after alpha/beta optional (#540) --- packages/gapic-generator/gapic/schema/naming.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 699b524ab5f0..21a075b95d94 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -99,7 +99,7 @@ def build( # This code may look counter-intuitive (why not use ? to make it # optional), but the engine's greediness routine will decide that # the version is the name, which is not what we want. - version = r'\.(?Pv[0-9]+(p[0-9]+)?((alpha|beta)[0-9]+)?)' + version = r'\.(?Pv[0-9]+(p[0-9]+)?((alpha|beta)[0-9]*)?)' if re.search(version, root_package): pattern += version From b607c1f827f4e94d60e7b956222ee0ddc2e49fb3 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 22 Jul 2020 10:12:22 -0700 Subject: [PATCH 0330/1339] feat: support quota project override via client options (#496) * feat: support quota project override via client options * chore(deps): bump api-core version * Update setup.py.j2 --- .../%sub/services/%service/client.py.j2 | 1 + .../services/%service/transports/base.py.j2 | 12 ++++- .../services/%service/transports/grpc.py.j2 | 15 ++++-- .../%service/transports/grpc_asyncio.py.j2 | 14 ++++- .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 52 +++++++++++++++---- packages/gapic-generator/requirements.txt | 2 +- 7 files changed, 78 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index c34babd7631b..4fc0102ddf20 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -214,6 +214,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): scopes=client_options.scopes, api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, ) {% for method in service.methods.values() -%} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 8d03088780de..3e5836c76d41 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -33,6 +33,7 @@ class {{ service.name }}Transport(abc.ABC): credentials: credentials.Credentials = None, credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -49,6 +50,8 @@ class {{ service.name }}Transport(abc.ABC): be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -61,9 +64,14 @@ class {{ service.name }}Transport(abc.ABC): raise exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file(credentials_file, scopes=scopes) + credentials, _ = auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id + ) + elif credentials is None: - credentials, _ = auth.default(scopes=scopes) + credentials, _ = auth.default(scopes=scopes, quota_project_id=quota_project_id) # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 245602bf50a2..d5fb0818bdab 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -44,7 +44,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None) -> None: + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None) -> None: """Instantiate the transport. Args: @@ -71,6 +72,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -89,7 +92,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -108,6 +111,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) # Run the base constructor. @@ -115,7 +119,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) self._stubs = {} # type: Dict[str, Callable] @@ -126,6 +131,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -141,6 +147,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -156,6 +164,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials=credentials, credentials_file=credentials_file, scopes=scopes, + quota_project_id=quota_project_id, **kwargs ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index af182c11b840..700f1e746279 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -45,6 +45,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): credentials: credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, **kwargs) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: @@ -60,6 +61,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -71,6 +74,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): credentials=credentials, credentials_file=credentials_file, scopes=scopes, + quota_project_id=quota_project_id, **kwargs ) @@ -81,7 +85,9 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None) -> None: + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + ) -> None: """Instantiate the transport. Args: @@ -109,6 +115,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -143,6 +151,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) # Run the base constructor. @@ -150,7 +159,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) self._stubs = {} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 1b9a43d1d9cd..e163e98389db 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -16,7 +16,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.21.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.22.0, < 2.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.1.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3b63c2db3dae..753e33541386 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -112,6 +112,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans scopes=None, api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, + quota_project_id=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -127,6 +128,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans scopes=None, api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -142,6 +144,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans scopes=None, api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -158,6 +161,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans scopes=None, api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, + quota_project_id=None, ) @@ -175,6 +179,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans scopes=None, api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -191,15 +196,29 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans scopes=None, api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has # unsupported value. - os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" - with pytest.raises(MutualTLSChannelError): - client = client_class() + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() - del os.environ["GOOGLE_API_USE_MTLS"] + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -221,6 +240,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class scopes=["1", "2"], api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) @@ -243,6 +263,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl scopes=None, api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, + quota_project_id=None, ) @@ -259,6 +280,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): scopes=None, api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, + quota_project_id=None, ) @@ -1001,12 +1023,15 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport( credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with("credentials.json", scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', {%- endfor %} - )) + ), + quota_project_id="octopus", + ) def test_{{ service.name|snake_case }}_auth_adc(): @@ -1017,8 +1042,9 @@ def test_{{ service.name|snake_case }}_auth_adc(): adc.assert_called_once_with(scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} - )) + {%- endfor %}), + quota_project_id=None, + ) def test_{{ service.name|snake_case }}_transport_auth_adc(): @@ -1026,13 +1052,13 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(): # ADC credentials. with mock.patch.object(auth, 'default') as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - transports.{{ service.name }}GrpcTransport(host="squid.clam.whelk") + transports.{{ service.name }}GrpcTransport(host="squid.clam.whelk", quota_project_id="octopus") adc.assert_called_once_with(scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} - )) - + {%- endfor %}), + quota_project_id="octopus", + ) def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} @@ -1122,6 +1148,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_client_c {%- endfor %} ), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -1160,6 +1187,7 @@ def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel_mtls_with_ {%- endfor %} ), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -1200,6 +1228,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_adc( {%- endfor %} ), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel @@ -1240,6 +1269,7 @@ def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel_mtls_with_ {%- endfor %} ), ssl_credentials=mock_ssl_cred, + quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index afd9f7aac606..0c6e8cabedd3 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.21.0 +google-api-core==1.22.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 3b5366a9c3d411185609a8262ca84777d31dea66 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 22 Jul 2020 10:48:39 -0700 Subject: [PATCH 0331/1339] feat: add iam methods to templates (#545) * feat: add iam methods to templates * fix: fix typo * chore: fix formatting * fix: fix ref to option * chore(deps): add iam dependency * chore: add circleci * chore: take 2 * chore: only test with 3.8 Co-authored-by: Dov Shlachter --- packages/gapic-generator/.circleci/config.yml | 32 ++ .../gapic/generator/options.py | 3 + .../%sub/services/%service/async_client.py.j2 | 270 +++++++++++ .../%sub/services/%service/client.py.j2 | 270 +++++++++++ .../services/%service/transports/base.py.j2 | 36 ++ .../services/%service/transports/grpc.py.j2 | 84 ++++ .../%service/transports/grpc_asyncio.py.j2 | 85 ++++ .../fixup_%name_%version_keywords.py.j2 | 5 + .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 431 ++++++++++++++++++ packages/gapic-generator/noxfile.py | 5 + .../tests/unit/generator/test_options.py | 5 + 12 files changed, 1227 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index df3c7356dcc3..23646670cbc1 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -68,6 +68,14 @@ workflows: filters: tags: only: /^v\d+\.\d+\.\d+$/ + - showcase-unit-add-iam-methods: + requires: + - unit-3.6 + - unit-3.7 + - unit-3.8 + filters: + tags: + only: /^v\d+\.\d+\.\d+$/ - showcase-mypy: requires: - mypy @@ -502,6 +510,30 @@ jobs: - run: name: Run unit tests. command: nox -s showcase_unit_alternative_templates-3.8 + showcase-unit-add-iam-methods: + docker: + - image: python:3.8-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip + - run: + name: Install protoc 3.12.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Run unit tests. + command: nox -s showcase_unit_add_iam_methods-3.8 showcase-mypy: docker: - image: python:3.8-slim diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/generator/options.py index 40052d01c546..c3a1ef322e6e 100644 --- a/packages/gapic-generator/gapic/generator/options.py +++ b/packages/gapic-generator/gapic/generator/options.py @@ -39,6 +39,7 @@ class Options: templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) lazy_import: bool = False old_naming: bool = False + add_iam_methods: bool = False # Class constants PYTHON_GAPIC_PREFIX: str = 'python-gapic-' @@ -47,6 +48,7 @@ class Options: 'retry-config', # takes a path 'samples', # output dir 'lazy-import', # requires >= 3.7 + 'add-iam-methods', # microgenerator implementation for `reroute_to_grpc_interface` )) @classmethod @@ -131,6 +133,7 @@ def tweak_path(p): templates=tuple(path.expanduser(i) for i in templates), lazy_import=bool(opts.pop('lazy-import', False)), old_naming=bool(opts.pop('old-naming', False)), + add_iam_methods=bool(opts.pop('add-iam-methods', False)), ) # Note: if we ever need to recursively check directories for sample diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 36a34471f86e..5447e1f9e618 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -20,6 +20,10 @@ from google.oauth2 import service_account # type: ignore {{ ref_type.ident.python_import }} {% endfor -%} {% endfor -%} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} @@ -256,6 +260,272 @@ class {{ service.async_client_name }}: {% endfor %} + {% if opts.add_iam_methods %} + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + try: _client_info = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 4fc0102ddf20..b1d1898e25fc 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -22,6 +22,10 @@ from google.oauth2 import service_account # type: ignore {{ ref_type.ident.python_import }} {% endfor -%} {% endfor -%} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport from .transports.grpc import {{ service.grpc_transport_name }} @@ -386,6 +390,272 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ '\n' }} {% endfor %} + {% if opts.add_iam_methods %} + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + try: _client_info = gapic_v1.client_info.ClientInfo( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 3e5836c76d41..57fa07609d6d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -16,6 +16,10 @@ from google.auth import credentials # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor -%} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +{% endif %} {% endfilter %} class {{ service.name }}Transport(abc.ABC): @@ -95,6 +99,38 @@ class {{ service.name }}Transport(abc.ABC): {%- endfor %} + {% if opts.add_iam_methods %} + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + {% endif %} + __all__ = ( '{{ service.name }}Transport', ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index d5fb0818bdab..2c3ce4ba35b0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -19,6 +19,10 @@ import grpc # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor -%} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport @@ -235,6 +239,86 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} + {% if opts.add_iam_methods %} + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + {% endif %} __all__ = ( '{{ service.name }}GrpcTransport', diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 700f1e746279..dbf71b29621d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -18,6 +18,10 @@ from grpc.experimental import aio # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor -%} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport from .grpc import {{ service.name }}GrpcTransport @@ -232,6 +236,87 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): return self._stubs['{{ method.name|snake_case }}'] {%- endfor %} + {% if opts.add_iam_methods %} + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + {% endif %} __all__ = ( '{{ service.name }}GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 index 99681ed99225..6688025184e4 100644 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 @@ -32,6 +32,11 @@ class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): {% for method in all_methods|sort(attribute='name')|unique(attribute='name') -%} '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), {% endfor -%} + {% if opts.add_iam_methods %} + 'get_iam_policy': ('resource', 'options', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + {% endif %} } {% endwith %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index e163e98389db..c22b22449401 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -19,7 +19,7 @@ setuptools.setup( 'google-api-core[grpc] >= 1.22.0, < 2.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.1.0', - {%- if api.requires_package(('google', 'iam', 'v1')) %} + {%- if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} 'grpc-google-iam-v1', {%- endif %} ), diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 753e33541386..a00581380fb5 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -38,6 +38,11 @@ from google.api_core import gapic_v1 {{ ref_type.ident.python_import }} {% endfor -%} {% endfor -%} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +{% endif %} {% endfilter %} @@ -1004,6 +1009,11 @@ def test_{{ service.name|snake_case }}_base_transport(): {% for method in service.methods.values() -%} '{{ method.name|snake_case }}', {% endfor -%} + {%- if opts.add_iam_methods -%} + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + {% endif -%} ) for method in methods: with pytest.raises(NotImplementedError): @@ -1336,4 +1346,425 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): {% endwith -%} {% endfor -%} +{% if opts.add_iam_methods %} +def test_set_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() +{% endif %} + {% endblock %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index e497d0efd8cf..abaef7fa0c8f 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -176,6 +176,11 @@ def showcase_unit_alternative_templates(session): showcase_unit(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) +@nox.session(python=["3.8"]) +def showcase_unit_add_iam_methods(session): + showcase_unit(session, other_opts=("add-iam-methods",)) + + @nox.session(python="3.8") def showcase_mypy( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index b7d6cc2082e8..e4bac805eedc 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -147,3 +147,8 @@ def test_options_lazy_import(): def test_options_old_naming(): opts = options.Options.build('old-naming') assert opts.old_naming + + +def test_options_add_iam_methods(): + opts = options.Options.build('add-iam-methods') + assert opts.add_iam_methods From 8ad8f62634006900de3c685dfa2beb12eaef5e6e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 22 Jul 2020 10:54:36 -0700 Subject: [PATCH 0332/1339] chore: release 0.29.0 (#547) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3f44027c222a..d82f72ba8a0a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [0.29.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.28.1...v0.29.0) (2020-07-22) + + +### Features + +* add iam methods to templates ([#545](https://www.github.com/googleapis/gapic-generator-python/issues/545)) ([3f42c3c](https://www.github.com/googleapis/gapic-generator-python/commit/3f42c3cf8aae432a9bda0953fbabd7f0c8d774de)) +* support quota project override via client options ([#496](https://www.github.com/googleapis/gapic-generator-python/issues/496)) ([bbc6b36](https://www.github.com/googleapis/gapic-generator-python/commit/bbc6b367f50526312e8320f0fc668ef88f230dbd)) + + +### Bug Fixes + +* make # after alpha/beta optional ([#540](https://www.github.com/googleapis/gapic-generator-python/issues/540)) ([f86a47b](https://www.github.com/googleapis/gapic-generator-python/commit/f86a47b6431e374ae1797061511b49fe6bf22daf)) + ### [0.28.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.28.0...v0.28.1) (2020-07-16) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index df27bd1fa79d..04047b6e6e70 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.28.1" +version = "0.29.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 14c416d9a4007e05bf8e28293c38f8d3ac27a34f Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 22 Jul 2020 17:29:10 -0700 Subject: [PATCH 0333/1339] fix: use context manager for mtls env var (#548) --- .../%name_%version/%sub/test_%service.py.j2 | 104 ++++++++-------- .../%name_%version/%sub/test_%service.py.j2 | 116 +++++++++--------- 2 files changed, 109 insertions(+), 111 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f4c17b6fb548..574759dfbdce 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -105,49 +105,21 @@ def test_{{ service.client_name|snake_case }}_client_options(): # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "never". - os.environ["GOOGLE_API_USE_MTLS"] = "never" - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - credentials=None, - host=client.DEFAULT_ENDPOINT, - ) + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_ENDPOINT, + ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "always". - os.environ["GOOGLE_API_USE_MTLS"] = "always" - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - credentials=None, - host=client.DEFAULT_MTLS_ENDPOINT, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}(client_options=options) - grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - credentials=None, - host=client.DEFAULT_MTLS_ENDPOINT, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}() grpc_transport.assert_called_once_with( @@ -158,26 +130,52 @@ def test_{{ service.client_name|snake_case }}_client_options(): ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None - client = {{ service.client_name }}() + client = {{ service.client_name }}(client_options=options) grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, credentials=None, - host=client.DEFAULT_ENDPOINT, + host=client.DEFAULT_MTLS_ENDPOINT, ) + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_MTLS_ENDPOINT, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + credentials=None, + host=client.DEFAULT_ENDPOINT, + ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has # unsupported value. - os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" - with pytest.raises(MutualTLSChannelError): - client = {{ service.client_name }}() - - del os.environ["GOOGLE_API_USE_MTLS"] + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = {{ service.client_name }}() def test_{{ service.client_name|snake_case }}_client_options_from_dict(): diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index a00581380fb5..95d86404ce38 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -122,59 +122,24 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "never". - os.environ["GOOGLE_API_USE_MTLS"] = "never" - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - ) + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is # "always". - os.environ["GOOGLE_API_USE_MTLS"] = "always" - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -188,22 +153,57 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, quota_project_id=None, + ) + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): From d2a0dd6bfa4dc7164e166c374ec6ec665aec81cd Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 22 Jul 2020 17:34:36 -0700 Subject: [PATCH 0334/1339] chore: release 0.29.1 (#549) --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index d82f72ba8a0a..f7605562d979 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.29.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.0...v0.29.1) (2020-07-23) + + +### Bug Fixes + +* use context manager for mtls env var ([#548](https://www.github.com/googleapis/gapic-generator-python/issues/548)) ([d19e180](https://www.github.com/googleapis/gapic-generator-python/commit/d19e1808df9cd2884ae7a449977a479b4829bc1d)) + ## [0.29.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.28.1...v0.29.0) (2020-07-22) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 04047b6e6e70..50361085b653 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.29.0" +version = "0.29.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 173ad9012bf1aeda639eabb68d152579fb0a85be Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 23 Jul 2020 12:24:25 -0700 Subject: [PATCH 0335/1339] fix: rename __init__.py to __init__.py.j2 (#550) Otherwise this file is excluded from generation. Partially addresses #437 --- .../templates/tests/unit/gapic/%name_%version/%sub/__init__.py | 0 .../tests/unit/gapic/%name_%version/%sub/__init__.py.j2 | 1 + 2 files changed, 1 insertion(+) delete mode 100644 packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py create mode 100644 packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 new file mode 100644 index 000000000000..d3f5a12faa99 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 @@ -0,0 +1 @@ + From 24576bac2c15fd4c9e7b1bce72a97d385948cbc2 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 23 Jul 2020 13:40:12 -0700 Subject: [PATCH 0336/1339] chore: distinguish default/mtls endpoint for tests (#551) * chore: distinguish default/mtls endpoint for tests * remove async client on ads template --- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 8 ++++++++ .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 9 +++++++++ 2 files changed, 17 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 574759dfbdce..5f8c444448a4 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -41,6 +41,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -77,6 +84,7 @@ def test_{{ service.client_name|snake_case }}_get_transport_class(): assert transport == transports.{{ service.name }}GrpcTransport +@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) def test_{{ service.client_name|snake_case }}_client_options(): # Check that if channel is provided we won't create a new one. with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 95d86404ce38..c112d3ef58cf 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -50,6 +50,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -91,6 +98,8 @@ def test_{{ service.client_name|snake_case }}_get_transport_class(): ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") ]) +@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) def test_{{ service.client_name|snake_case }}_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: From 835d82e419fc1b2e4866ad0655aeb8eea6bb4edb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 23 Jul 2020 17:21:42 -0700 Subject: [PATCH 0337/1339] chore: release 0.29.2 (#552) * updated CHANGELOG.md [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Dov Shlachter --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f7605562d979..8b0fd7abd3ea 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.29.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.1...v0.29.2) (2020-07-23) + + +### Bug Fixes + +* rename __init__.py to __init__.py.j2 ([#550](https://www.github.com/googleapis/gapic-generator-python/issues/550)) ([71a7062](https://www.github.com/googleapis/gapic-generator-python/commit/71a7062b918136b916cc5bfc7dbdf64f870edf6a)), closes [#437](https://www.github.com/googleapis/gapic-generator-python/issues/437) + ### [0.29.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.0...v0.29.1) (2020-07-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 50361085b653..9253804626ab 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.29.1" +version = "0.29.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From e7941071a5374aea633a06bb7ee671ae389ab43a Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 27 Jul 2020 10:23:09 -0700 Subject: [PATCH 0338/1339] feat: precache wrapped rpcs (#553) During transport construction, cache the wrapped methods that the client will eventually use when invoking rpcs. This has a ~7.4% time impact in synthetic benchmarks. --- .../%sub/services/%service/client.py.j2 | 31 +------------ .../services/%service/transports/base.py.j2 | 43 +++++++++++++++++++ .../services/%service/transports/grpc.py.j2 | 3 +- .../gapic/ads-templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 10 +++-- .../%sub/services/%service/client.py.j2 | 21 +-------- .../services/%service/transports/base.py.j2 | 43 +++++++++++++++++++ .../services/%service/transports/grpc.py.j2 | 4 +- .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 11 +++-- 10 files changed, 109 insertions(+), 61 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 1d14fbd64281..ffb77b3ce4c5 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -211,6 +211,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_cert_source=client_options.client_cert_source, ) + {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, {%- if not method.client_streaming %} @@ -307,25 +308,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.{{ method.name|snake_case }}, - {%- if method.retry %} - default_retry=retries.Retry( - {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} - {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} - {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} - predicate=retries.if_exception_type( - {%- filter sort_lines %} - {%- for ex in method.retry.retryable_exceptions %} - exceptions.{{ ex.__name__ }}, - {%- endfor %} - {%- endfilter %} - ), - ), - {%- endif %} - default_timeout={{ method.timeout }}, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.{{ method.name|snake_case}}] {%- if method.field_headers %} # Certain fields should be provided within the metadata header; @@ -381,16 +364,6 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %} -try: - _client_info = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - '{{ api.naming.warehouse_package_name }}', - ).version, - ) -except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() - - __all__ = ( '{{ service.client_name }}', ) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 694e0a16645f..977f2ec0b8a7 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -3,8 +3,10 @@ {% block content %} import abc import typing +import pkg_resources from google import auth +from google.api_core import gapic_v1 # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} @@ -17,6 +19,16 @@ from google.auth import credentials # type: ignore {% endfor -%} {% endfilter %} +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + '{{ api.naming.warehouse_package_name }}', + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + class {{ service.name }}Transport(metaclass=abc.ABCMeta): """Abstract transport class for {{ service.name }}.""" @@ -54,6 +66,37 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): # Save the credentials. self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages() + + def _prep_wrapped_messages(self): + # Precomputed wrapped methods + self._wrapped_methods = { + {% for method in service.methods.values() -%} + self.{{ method.name|snake_case }}: gapic_v1.method.wrap_method( + self.{{ method.name|snake_case }}, + {%- if method.retry %} + default_retry=retries.Retry( + {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} + {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} + {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} + predicate=retries.if_exception_type( + {%- filter sort_lines %} + {%- for ex in method.retry.retryable_exceptions %} + exceptions.{{ ex.__name__ }}, + {%- endfor %} + {%- endfilter %} + ), + ), + {%- endif %} + default_timeout={{ method.timeout }}, + client_info=_client_info, + ), + {% endfor %} {# precomputed wrappers loop #} + } + + {%- if service.has_lro %} @property diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 1632b7762110..01c77c8f231d 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -98,9 +98,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): scopes=self.AUTH_SCOPES, ) + self._stubs = {} # type: Dict[str, Callable] + # Run the base constructor. super().__init__(host=host, credentials=credentials) - self._stubs = {} # type: Dict[str, Callable] @classmethod diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 1dfcf0caf215..70a168c254db 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -19,7 +19,7 @@ setuptools.setup( 'google-api-core >= 1.17.0, < 2.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', - 'proto-plus >= 1.1.0', + 'proto-plus >= 1.4.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', {%- endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 5f8c444448a4..80d9f4321451 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -187,7 +187,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): def test_{{ service.client_name|snake_case }}_client_options_from_dict(): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}( client_options={'api_endpoint': 'squid.clam.whelk'} @@ -556,9 +556,11 @@ def test_transport_grpc_default(): def test_{{ service.name|snake_case }}_base_transport(): # Instantiate the base transport. - transport = transports.{{ service.name }}Transport( - credentials=credentials.AnonymousCredentials(), - ) + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.{{ service.name }}Transport( + credentials=credentials.AnonymousCredentials(), + ) # Every method on the transport should just blindly # raise NotImplementedError. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index b1d1898e25fc..e85a025ada39 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -221,6 +221,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): quota_project_id=client_options.quota_project_id, ) + {% for method in service.methods.values() -%} def {{ method.name|snake_case }}(self, {%- if not method.client_streaming %} @@ -317,25 +318,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.{{ method.name|snake_case }}, - {%- if method.retry %} - default_retry=retries.Retry( - {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} - {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} - {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} - predicate=retries.if_exception_type( - {%- filter sort_lines %} - {%- for ex in method.retry.retryable_exceptions %} - exceptions.{{ ex.__name__ }}, - {%- endfor %} - {%- endfilter %} - ), - ), - {%- endif %} - default_timeout={{ method.timeout }}, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.{{ method.name|snake_case}}] {%- if method.field_headers %} # Certain fields should be provided within the metadata header; diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 57fa07609d6d..2cfb2d6e6074 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -3,9 +3,11 @@ {% block content %} import abc import typing +import pkg_resources from google import auth from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} @@ -22,6 +24,15 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore {% endif %} {% endfilter %} +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + '{{ api.naming.warehouse_package_name }}', + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" @@ -79,6 +90,38 @@ class {{ service.name }}Transport(abc.ABC): # Save the credentials. self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages() + + + def _prep_wrapped_messages(self): + # Precompute the wrapped methods. + self._wrapped_methods = { + {% for method in service.methods.values() -%} + self.{{ method.name|snake_case }}: gapic_v1.method.wrap_method( + self.{{ method.name|snake_case }}, + {%- if method.retry %} + default_retry=retries.Retry( + {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} + {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} + {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} + predicate=retries.if_exception_type( + {%- filter sort_lines %} + {%- for ex in method.retry.retryable_exceptions %} + exceptions.{{ ex.__name__ }}, + {%- endfor %} + {%- endfilter %} + ), + ), + {%- endif %} + default_timeout={{ method.timeout }}, + client_info=_client_info, + ), + {% endfor %} {# precomputed wrappers loop #} + } + + {%- if service.has_lro %} @property diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 2c3ce4ba35b0..f4a81c31f1c3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -118,6 +118,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): quota_project_id=quota_project_id, ) + self._stubs = {} # type: Dict[str, Callable] + # Run the base constructor. super().__init__( host=host, @@ -127,8 +129,6 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): quota_project_id=quota_project_id, ) - self._stubs = {} # type: Dict[str, Callable] - @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index c22b22449401..b784adbdc02e 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -18,7 +18,7 @@ setuptools.setup( install_requires=( 'google-api-core[grpc] >= 1.22.0, < 2.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.1.0', + 'proto-plus >= 1.4.0', {%- if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} 'grpc-google-iam-v1', {%- endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index c112d3ef58cf..f07153ff3d38 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1008,9 +1008,11 @@ def test_{{ service.name|snake_case }}_base_transport_error(): def test_{{ service.name|snake_case }}_base_transport(): # Instantiate the base transport. - transport = transports.{{ service.name }}Transport( - credentials=credentials.AnonymousCredentials(), - ) + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.{{ service.name }}Transport( + credentials=credentials.AnonymousCredentials(), + ) # Every method on the transport should just blindly # raise NotImplementedError. @@ -1038,7 +1040,8 @@ def test_{{ service.name|snake_case }}_base_transport(): def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(auth, 'load_credentials_from_file') as load_creds: + with mock.patch.object(auth, 'load_credentials_from_file') as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport( credentials_file="credentials.json", From 68b1c77d19b42ab079895ca94434e836b0a57145 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 27 Jul 2020 10:33:44 -0700 Subject: [PATCH 0339/1339] chore: release 0.30.0 (#554) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8b0fd7abd3ea..8c07b9edc250 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.30.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.2...v0.30.0) (2020-07-27) + + +### Features + +* precache wrapped rpcs ([#553](https://www.github.com/googleapis/gapic-generator-python/issues/553)) ([2f2fb5d](https://www.github.com/googleapis/gapic-generator-python/commit/2f2fb5d3d9472a79c80be6d052129d07d2bbb835)) + ### [0.29.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.1...v0.29.2) (2020-07-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9253804626ab..bb8fd00a19c8 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.29.2" +version = "0.30.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 7213059caab2dbd4041890c210ca0c2091c71675 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 28 Jul 2020 12:47:34 -0700 Subject: [PATCH 0340/1339] fix: add google.api_core.retry import to base.py (#555) --- .../%name/%version/%sub/services/%service/transports/base.py.j2 | 1 + .../%name_%version/%sub/services/%service/transports/base.py.j2 | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 977f2ec0b8a7..ba39b27abe93 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -7,6 +7,7 @@ import pkg_resources from google import auth from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 2cfb2d6e6074..dbc64639d7f2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -8,6 +8,7 @@ import pkg_resources from google import auth from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} From ff5d971c40654cecd9945f73357a6c25d71adcd1 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 28 Jul 2020 14:57:41 -0700 Subject: [PATCH 0341/1339] feat: bypass request copying in method calls (#557) If a proto-plus message is passed in, do not copy it. --- .../%sub/services/%service/client.py.j2 | 36 +++++++++-------- .../%name_%version/%sub/test_%service.py.j2 | 11 +++-- .../%sub/services/%service/client.py.j2 | 40 +++++++++++-------- .../%name_%version/%sub/test_%service.py.j2 | 10 +++-- 4 files changed, 58 insertions(+), 39 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index ffb77b3ce4c5..6fd1f7262199 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -287,24 +287,28 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request = {{ method.input.ident }}() {% endif -%}{# Cross-package req and flattened fields #} {%- else %} - request = {{ method.input.ident }}(request) + # Minor optimization to avoid making a copy if the user passes + # in a {{ method.input.ident }}. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, {{ method.input.ident }}): + request = {{ method.input.ident }}(request) {% endif %} {# different request package #} - {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields -%} - # If we have keyword arguments corresponding to fields on the - # request, apply these. - {% endif -%} - {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} - if {{ field.name }} is not None: - request.{{ key }} = {{ field.name }} - {%- endfor %} - {# They can be _extended_, however -#} - {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} - if {{ field.name }}: - request.{{ key }}.extend({{ field.name }}) - {%- endfor %} - {%- endif %} + {% if method.flattened_fields -%} + # If we have keyword arguments corresponding to fields on the + # request, apply these. + {% endif -%} + {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }} is not None: + request.{{ key }} = {{ field.name }} + {%- endfor %} + {# They can be _extended_, however -#} + {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }}: + request.{{ key }}.extend({{ field.name }}) + {%- endfor %} + {%- endif %} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 80d9f4321451..fc5a596d6fa9 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -201,7 +201,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): {% for method in service.methods.values() -%} -def test_{{ method.name|snake_case }}(transport: str = 'grpc'): +def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -209,7 +209,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = {{ method.input.ident }}() + request = request_type() {% if method.client_streaming %} requests = [request] {% endif %} @@ -250,7 +250,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% if method.client_streaming %} assert next(args[0]) == request {% else %} - assert args[0] == request + assert args[0] == {{ method.input.ident }}() {% endif %} # Establish that the response is the type that we expect. @@ -275,6 +275,11 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% endfor %} {% endif %} + +def test_{{ method.name|snake_case }}_from_dict(): + test_{{ method.name|snake_case }}(request_type=dict) + + {% if method.field_headers and not method.client_streaming %} def test_{{ method.name|snake_case }}_field_headers(): client = {{ service.client_name }}( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index e85a025ada39..9f4ba159083f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -282,7 +282,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if method.flattened_fields -%} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): + has_flattened_params = any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]) + if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -297,24 +298,29 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request = {{ method.input.ident }}() {% endif -%}{# Cross-package req and flattened fields #} {%- else %} - request = {{ method.input.ident }}(request) + # Minor optimization to avoid making a copy if the user passes + # in a {{ method.input.ident }}. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, {{ method.input.ident }}): + request = {{ method.input.ident }}(request) {% endif %} {# different request package #} - {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields -%} - # If we have keyword arguments corresponding to fields on the - # request, apply these. - {% endif -%} - {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} - if {{ field.name }} is not None: - request.{{ key }} = {{ field.name }} - {%- endfor %} - {# They can be _extended_, however -#} - {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} - if {{ field.name }}: - request.{{ key }}.extend({{ field.name }}) - {%- endfor %} - {%- endif %} + {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} + {% if method.flattened_fields -%} + # If we have keyword arguments corresponding to fields on the + # request, apply these. + {% endif -%} + {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }} is not None: + request.{{ key }} = {{ field.name }} + {%- endfor %} + {# They can be _extended_, however -#} + {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} + if {{ field.name }}: + request.{{ key }}.extend({{ field.name }}) + {%- endfor %} + {%- endif %} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f07153ff3d38..2fc3923b2acc 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -299,7 +299,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): {% for method in service.methods.values() -%} -def test_{{ method.name|snake_case }}(transport: str = 'grpc'): +def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -307,7 +307,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = {{ method.input.ident }}() + request = request_type() {% if method.client_streaming %} requests = [request] {% endif %} @@ -348,7 +348,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% if method.client_streaming %} assert next(args[0]) == request {% else %} - assert args[0] == request + assert args[0] == {{ method.input.ident }}() {% endif %} # Establish that the response is the type that we expect. @@ -374,6 +374,10 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc'): {% endif %} +def test_{{ method.name|snake_case }}_from_dict(): + test_{{ method.name|snake_case }}(request_type=dict) + + @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio'): client = {{ service.async_client_name }}( From 669e2fe647cf58ca75d178ec24653574cc7a40fc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 28 Jul 2020 15:03:31 -0700 Subject: [PATCH 0342/1339] chore: release 0.31.0 (#558) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8c07b9edc250..4eb6c1ff9393 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.31.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.30.0...v0.31.0) (2020-07-28) + + +### Features + +* bypass request copying in method calls ([#557](https://www.github.com/googleapis/gapic-generator-python/issues/557)) ([3a23143](https://www.github.com/googleapis/gapic-generator-python/commit/3a2314318de229a3353c984a8cb2766ae95cc968)) + + +### Bug Fixes + +* add google.api_core.retry import to base.py ([#555](https://www.github.com/googleapis/gapic-generator-python/issues/555)) ([1d08e60](https://www.github.com/googleapis/gapic-generator-python/commit/1d08e60cea4c5b3fa2555a4952161b0115d686f2)) + ## [0.30.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.2...v0.30.0) (2020-07-27) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index bb8fd00a19c8..72147a100985 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.30.0" +version = "0.31.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From f11ab07b589c6107d1b22bd1d9906cd523362e6b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 30 Jul 2020 01:10:44 +0200 Subject: [PATCH 0343/1339] chore(deps): update dependency protobuf to v3.12.4 (#560) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0c6e8cabedd3..bd3ab29521b8 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.22.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 -protobuf==3.12.2 +protobuf==3.12.4 pypandoc==1.5 PyYAML==5.3.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From d828e36819541139676b6fce393ef894b4721d6b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 17 Aug 2020 17:47:00 +0200 Subject: [PATCH 0344/1339] chore(deps): update dependency google-api-core to v1.22.1 (#567) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index bd3ab29521b8..a7526cf9c096 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.22.0 +google-api-core==1.22.1 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 60433dc64963244cce1ad7dfb520dc600c701563 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 17 Aug 2020 09:40:25 -0700 Subject: [PATCH 0345/1339] fix: install gcc by hand (#571) Preliminary fix for broken unit tests in protobuf dependency update --- packages/gapic-generator/.circleci/config.yml | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 23646670cbc1..86717ba169d7 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -244,7 +244,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install nox. command: pip install nox @@ -269,7 +269,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install nox. command: pip install nox @@ -310,7 +310,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install nox. command: pip install nox @@ -335,7 +335,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install nox. command: pip install nox @@ -375,7 +375,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -399,7 +399,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -423,7 +423,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -447,7 +447,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -471,7 +471,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -495,7 +495,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -519,7 +519,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -543,7 +543,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -567,7 +567,7 @@ jobs: name: Install system dependencies. command: | apt-get update - apt-get install -y curl pandoc unzip + apt-get install -y curl pandoc unzip gcc - run: name: Install protoc 3.12.1. command: | @@ -591,7 +591,7 @@ jobs: name: Install pandoc command: | apt-get update - apt-get install -y pandoc + apt-get install -y pandoc gcc - run: name: Install nox and codecov. command: | @@ -613,7 +613,7 @@ jobs: name: Install pandoc command: | apt-get update - apt-get install -y pandoc + apt-get install -y pandoc gcc - run: name: Install nox and codecov. command: | @@ -635,7 +635,7 @@ jobs: name: Install pandoc command: | apt-get update - apt-get install -y pandoc + apt-get install -y pandoc gcc - run: name: Install nox and codecov. command: | From e1d54922b82c0f0f3b01103125bbc3426a811666 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 17 Aug 2020 18:46:22 +0200 Subject: [PATCH 0346/1339] chore(deps): update dependency protobuf to v3.13.0 (#570) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index a7526cf9c096..09be3b3756c7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.22.1 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 -protobuf==3.12.4 +protobuf==3.13.0 pypandoc==1.5 PyYAML==5.3.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 929d6c85b761142b2b49bf4f53a74a3e189f47ea Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 17 Aug 2020 10:04:41 -0700 Subject: [PATCH 0347/1339] chore: release 0.31.1 (#572) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Dov Shlachter --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 4eb6c1ff9393..5aa15e869ea4 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.31.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.31.0...v0.31.1) (2020-08-17) + + +### Bug Fixes + +* install gcc by hand ([#571](https://www.github.com/googleapis/gapic-generator-python/issues/571)) ([e224a03](https://www.github.com/googleapis/gapic-generator-python/commit/e224a0365a2d3ed20d69cf4d1298a3f022f8da76)) + ## [0.31.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.30.0...v0.31.0) (2020-07-28) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 72147a100985..53733a04eda4 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.31.0" +version = "0.31.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 01b9fa2e8257fb21498260c3216861415e48c547 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 17 Aug 2020 15:43:03 -0700 Subject: [PATCH 0348/1339] feat: allow user-provided client info (#573) Fix for googleapis/python-kms#37, #566, and similar. --- .../%sub/services/%service/client.py.j2 | 9 ++++- .../services/%service/transports/base.py.j2 | 16 ++++++--- .../services/%service/transports/grpc.py.j2 | 18 ++++++++-- .../%name_%version/%sub/test_%service.py.j2 | 28 ++++++++++++++-- .../%sub/services/%service/async_client.py.j2 | 17 ++++++---- .../%sub/services/%service/client.py.j2 | 21 ++++++++---- .../services/%service/transports/base.py.j2 | 16 ++++++--- .../services/%service/transports/grpc.py.j2 | 13 ++++++-- .../%service/transports/grpc_asyncio.py.j2 | 10 +++++- .../%name_%version/%sub/test_%service.py.j2 | 33 +++++++++++++++++-- 10 files changed, 145 insertions(+), 36 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 6fd1f7262199..4c5a58740574 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -23,7 +23,7 @@ from google.oauth2 import service_account # type: ignore {% endfor -%} {% endfor -%} {% endfilter %} -from .transports.base import {{ service.name }}Transport +from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc import {{ service.name }}GrpcTransport @@ -135,6 +135,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = None, client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -160,6 +161,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): (2) The ``client_cert_source`` property is used to provide client SSL credentials for mutual TLS transport. If not provided, the default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -209,6 +215,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): host=client_options.api_endpoint, api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, + client_info=client_info, ) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index ba39b27abe93..32e6c11f9fb2 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -21,13 +21,13 @@ from google.auth import credentials # type: ignore {% endfilter %} try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( '{{ api.naming.warehouse_package_name }}', ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class {{ service.name }}Transport(metaclass=abc.ABCMeta): @@ -43,6 +43,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -54,6 +55,11 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -69,9 +75,9 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages() + self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self): + def _prep_wrapped_messages(self, client_info): # Precomputed wrapped methods self._wrapped_methods = { {% for method in service.methods.values() -%} @@ -92,7 +98,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): ), {%- endif %} default_timeout={{ method.timeout }}, - client_info=_client_info, + client_info=client_info, ), {% endfor %} {# precomputed wrappers loop #} } diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 01c77c8f231d..b25817b70b84 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -7,6 +7,7 @@ from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -20,7 +21,7 @@ import grpc # type: ignore {{ method.output.ident.python_import }} {% endfor -%} {% endfilter %} -from .base import {{ service.name }}Transport +from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO class {{ service.name }}GrpcTransport({{ service.name }}Transport): @@ -40,7 +41,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials: credentials.Credentials = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None) -> None: + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiate the transport. Args: @@ -62,6 +65,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -101,7 +109,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): self._stubs = {} # type: Dict[str, Callable] # Run the base constructor. - super().__init__(host=host, credentials=credentials) + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + ) @classmethod diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index fc5a596d6fa9..e9d47ac8aeb0 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -24,9 +24,7 @@ from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% endif -%} -{% if service.has_pagers -%} from google.api_core import gapic_v1 -{% endif -%} {% for method in service.methods.values() -%} {% for ref_type in method.ref_types if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') @@ -109,6 +107,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): client_cert_source=None, credentials=None, host="squid.clam.whelk", + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -122,6 +121,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): client_cert_source=None, credentials=None, host=client.DEFAULT_ENDPOINT, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -135,6 +135,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): client_cert_source=None, credentials=None, host=client.DEFAULT_MTLS_ENDPOINT, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -149,6 +150,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): client_cert_source=client_cert_source_callback, credentials=None, host=client.DEFAULT_MTLS_ENDPOINT, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -163,6 +165,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): client_cert_source=None, credentials=None, host=client.DEFAULT_MTLS_ENDPOINT, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -177,6 +180,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): client_cert_source=None, credentials=None, host=client.DEFAULT_ENDPOINT, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has @@ -197,6 +201,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_cert_source=None, credentials=None, host="squid.clam.whelk", + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -769,4 +774,23 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): {% endwith -%} {% endfor -%} +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: + transport_class = {{ service.client_name }}.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 5447e1f9e618..d6882d95f4d2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -25,7 +25,7 @@ from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore {% endif %} {% endfilter %} -from .transports.base import {{ service.name }}Transport +from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} from .client import {{ service.client_name }} @@ -52,6 +52,7 @@ class {{ service.async_client_name }}: credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = 'grpc_asyncio', client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -87,6 +88,8 @@ class {{ service.async_client_name }}: credentials=credentials, transport=transport, client_options=client_options, + client_info=client_info, + ) {% for method in service.methods.values() -%} @@ -202,7 +205,7 @@ class {{ service.async_client_name }}: ), {%- endif %} default_timeout={{ method.timeout }}, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) {%- if method.field_headers %} @@ -352,7 +355,7 @@ class {{ service.async_client_name }}: rpc = gapic_v1.method_async.wrap_method( self._client._transport.set_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -459,7 +462,7 @@ class {{ service.async_client_name }}: rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -510,7 +513,7 @@ class {{ service.async_client_name }}: rpc = gapic_v1.method_async.wrap_method( self._client._transport.test_iam_permissions, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -527,13 +530,13 @@ class {{ service.async_client_name }}: {% endif %} try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( '{{ api.naming.warehouse_package_name }}', ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 9f4ba159083f..0aef4812bc4c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -27,7 +27,7 @@ from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore {% endif %} {% endfilter %} -from .transports.base import {{ service.name }}Transport +from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc import {{ service.grpc_transport_name }} from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} @@ -141,6 +141,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): credentials: credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = None, client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -166,7 +167,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): (2) The ``client_cert_source`` property is used to provide client SSL credentials for mutual TLS transport. If not provided, the default SSL credentials will be used if present. - + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. @@ -219,6 +225,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): api_mtls_endpoint=client_options.api_endpoint, client_cert_source=client_options.client_cert_source, quota_project_id=client_options.quota_project_id, + client_info=client_info, ) @@ -471,7 +478,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): rpc = gapic_v1.method.wrap_method( self._transport.set_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -578,7 +585,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): rpc = gapic_v1.method.wrap_method( self._transport.get_iam_policy, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -629,7 +636,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): rpc = gapic_v1.method.wrap_method( self._transport.test_iam_permissions, default_timeout=None, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -647,13 +654,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( '{{ api.naming.warehouse_package_name }}', ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index dbc64639d7f2..3ee35f8724cb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -26,13 +26,13 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore {% endfilter %} try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( '{{ api.naming.warehouse_package_name }}', ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" @@ -50,6 +50,7 @@ class {{ service.name }}Transport(abc.ABC): credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -68,6 +69,11 @@ class {{ service.name }}Transport(abc.ABC): scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -93,10 +99,10 @@ class {{ service.name }}Transport(abc.ABC): self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages() + self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self): + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { {% for method in service.methods.values() -%} @@ -117,7 +123,7 @@ class {{ service.name }}Transport(abc.ABC): ), {%- endif %} default_timeout={{ method.timeout }}, - client_info=_client_info, + client_info=client_info, ), {% endfor %} {# precomputed wrappers loop #} } diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index f4a81c31f1c3..0952a1a7ca5b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -7,6 +7,7 @@ from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -24,7 +25,7 @@ from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore {% endif %} {% endfilter %} -from .base import {{ service.name }}Transport +from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO class {{ service.name }}GrpcTransport({{ service.name }}Transport): @@ -49,7 +50,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None) -> None: + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: """Instantiate the transport. Args: @@ -78,6 +81,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -127,6 +135,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) @classmethod diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index dbf71b29621d..7015b31cfbc4 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -3,6 +3,7 @@ {% block content %} from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore @@ -23,7 +24,7 @@ from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore {% endif %} {% endfilter %} -from .base import {{ service.name }}Transport +from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .grpc import {{ service.name }}GrpcTransport @@ -91,6 +92,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -121,6 +123,11 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): is None. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -165,6 +172,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 2fc3923b2acc..ff14d7766d18 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -28,9 +28,7 @@ from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% endif -%} -{% if service.has_pagers -%} from google.api_core import gapic_v1 -{% endif -%} {% for method in service.methods.values() -%} {% for ref_type in method.ref_types if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') @@ -127,6 +125,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -143,6 +142,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is @@ -159,6 +159,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -176,7 +177,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=client_cert_source_callback, quota_project_id=None, - + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -194,6 +195,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is @@ -211,6 +213,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has @@ -232,6 +235,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -255,6 +259,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -278,6 +283,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl api_mtls_endpoint=client.DEFAULT_ENDPOINT, client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -295,6 +301,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): api_mtls_endpoint="squid.clam.whelk", client_cert_source=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -1362,6 +1369,26 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): {% endwith -%} {% endfor -%} + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: + transport_class = {{ service.client_name }}.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + {% if opts.add_iam_methods %} def test_set_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( From 50689a5b1c6e4fb75807518c9880a39feb9125c3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 17 Aug 2020 16:11:13 -0700 Subject: [PATCH 0349/1339] chore: release 0.32.0 (#574) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5aa15e869ea4..0354048bc5c0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.32.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.31.1...v0.32.0) (2020-08-17) + + +### Features + +* allow user-provided client info ([#573](https://www.github.com/googleapis/gapic-generator-python/issues/573)) ([b2e5274](https://www.github.com/googleapis/gapic-generator-python/commit/b2e52746c7ce4b983482fb776224b30767978c79)), closes [googleapis/python-kms#37](https://www.github.com/googleapis/python-kms/issues/37) [#566](https://www.github.com/googleapis/gapic-generator-python/issues/566) + ### [0.31.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.31.0...v0.31.1) (2020-08-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 53733a04eda4..b6899141deb1 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.31.1" +version = "0.32.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From a1c3cb1097338972079fe8c190c7fe657855dd23 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 19 Aug 2020 11:08:24 -0700 Subject: [PATCH 0350/1339] fix: rename local var page in generated tests (#577) --- .../gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- .../gapic/%name_%version/%sub/test_%service.py.j2 | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e9d47ac8aeb0..4396dbce98a7 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -522,8 +522,8 @@ def test_{{ method.name|snake_case }}_pages(): RuntimeError, ) pages = list(client.{{ method.name|snake_case }}(request={}).pages) - for page, token in zip(pages, ['abc','def','ghi', '']): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token {% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} def test_{{ method.name|snake_case }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ff14d7766d18..f8d6af0bdbd5 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -838,8 +838,8 @@ def test_{{ method.name|snake_case }}_pages(): RuntimeError, ) pages = list(client.{{ method.name|snake_case }}(request={}).pages) - for page, token in zip(pages, ['abc','def','ghi', '']): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_async_pager(): @@ -928,10 +928,10 @@ async def test_{{ method.name|snake_case }}_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.{{ method.name|snake_case }}(request={})).pages: - pages.append(page) - for page, token in zip(pages, ['abc','def','ghi', '']): - assert page.raw_page.next_page_token == token + async for page_ in (await client.{{ method.name|snake_case }}(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token {% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} def test_{{ method.name|snake_case }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() From b45bbd8f3dee78f848bead4e778be60a46a3513e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 19 Aug 2020 11:17:29 -0700 Subject: [PATCH 0351/1339] chore: release 0.32.1 (#578) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 0354048bc5c0..3e0494ded8e3 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.32.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.0...v0.32.1) (2020-08-19) + + +### Bug Fixes + +* rename local var page in generated tests ([#577](https://www.github.com/googleapis/gapic-generator-python/issues/577)) ([075f9e8](https://www.github.com/googleapis/gapic-generator-python/commit/075f9e8d50b02ffb5f2f042b84f27a9f634636e2)) + ## [0.32.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.31.1...v0.32.0) (2020-08-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index b6899141deb1..7f6d1132c874 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.32.0" +version = "0.32.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 408c7b69f391bfc9ccc2ca766eae1555ab3087b1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 20 Aug 2020 17:12:41 -0400 Subject: [PATCH 0352/1339] fix: add 'type: ignore' comment for 'google.auth' (#579) --- .../%name_%version/%sub/services/%service/transports/base.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 3ee35f8724cb..22ed87a1db11 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -5,7 +5,7 @@ import abc import typing import pkg_resources -from google import auth +from google import auth # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore From 91ff85f091e0ca4e335fef742eac6886306745fb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 24 Aug 2020 09:02:37 -0600 Subject: [PATCH 0353/1339] chore: release 0.32.2 (#580) --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3e0494ded8e3..5a8300c29448 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.32.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.1...v0.32.2) (2020-08-20) + + +### Bug Fixes + +* add 'type: ignore' comment for 'google.auth' ([#579](https://www.github.com/googleapis/gapic-generator-python/issues/579)) ([af17501](https://www.github.com/googleapis/gapic-generator-python/commit/af17501d258c7c37fc1081fcad5fe18f7629f4c3)) + ### [0.32.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.0...v0.32.1) (2020-08-19) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 7f6d1132c874..3d1d723b4d5c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.32.1" +version = "0.32.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 72b69fcbcc74b0d2aefe6a0a2b075ac0ce83fded Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 28 Aug 2020 10:08:04 -0700 Subject: [PATCH 0354/1339] fix: stabilize the order of resource helper methods and (#582) --- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 +- .../%name_%version/%sub/services/%service/async_client.py.j2 | 4 ++-- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4396dbce98a7..9f1a0e6b4f02 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -748,7 +748,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): {% endif -%} -{% for message in service.resource_messages -%} +{% for message in service.resource_messages|sort(attribute="resource_type") -%} {% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} def test_{{ message.resource_type|snake_case }}_path(): {% for arg in message.resource_path_args -%} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index d6882d95f4d2..5d874c0d19e6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -38,9 +38,9 @@ class {{ service.async_client_name }}: DEFAULT_ENDPOINT = {{ service.client_name }}.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT - {% for message in service.resource_messages -%} + {% for message in service.resource_messages|sort(attribute="resource_type") -%} {{ message.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.{{ message.resource_type|snake_case }}_path) - + parse_{{ message.resource_type|snake_case}}_path = staticmethod({{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path) {% endfor %} from_service_account_file = {{ service.client_name }}.from_service_account_file diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f8d6af0bdbd5..8543eae2ab49 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1343,7 +1343,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_async_client(): {% endif -%} -{% for message in service.resource_messages -%} +{% for message in service.resource_messages|sort(attribute="resource_type") -%} {% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} def test_{{ message.resource_type|snake_case }}_path(): {% for arg in message.resource_path_args -%} From 6818525b712580b7e149e51a870f58e25b3849f0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 28 Aug 2020 10:24:18 -0700 Subject: [PATCH 0355/1339] chore: release 0.32.3 (#583) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5a8300c29448..c2cc9e27dfea 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.32.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.2...v0.32.3) (2020-08-28) + + +### Bug Fixes + +* stabilize the order of resource helper methods and ([#582](https://www.github.com/googleapis/gapic-generator-python/issues/582)) ([7d2adde](https://www.github.com/googleapis/gapic-generator-python/commit/7d2adde3a1ae81ac88ced822d6dfdfb26ffbfdf0)) + ### [0.32.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.1...v0.32.2) (2020-08-20) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 3d1d723b4d5c..9b9604e4fc90 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.32.2" +version = "0.32.3" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 14e3800242f182089a2928f3b34ecf0c509dec06 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 28 Aug 2020 10:36:39 -0700 Subject: [PATCH 0356/1339] chore: remove spurious circleci dependencies to speed up CI (#584) --- packages/gapic-generator/.circleci/config.yml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 86717ba169d7..17eed24bd111 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -23,23 +23,17 @@ workflows: - showcase-unit-3.6: requires: - unit-3.6 - - unit-3.7 - - unit-3.8 filters: tags: only: /^v\d+\.\d+\.\d+$/ - showcase-unit-3.7: requires: - - unit-3.6 - unit-3.7 - - unit-3.8 filters: tags: only: /^v\d+\.\d+\.\d+$/ - showcase-unit-3.8: requires: - - unit-3.6 - - unit-3.7 - unit-3.8 filters: tags: @@ -47,31 +41,23 @@ workflows: - showcase-unit-alternative-templates-3.6: requires: - unit-3.6 - - unit-3.7 - - unit-3.8 filters: tags: only: /^v\d+\.\d+\.\d+$/ - showcase-unit-alternative-templates-3.7: requires: - - unit-3.6 - unit-3.7 - - unit-3.8 filters: tags: only: /^v\d+\.\d+\.\d+$/ - showcase-unit-alternative-templates-3.8: requires: - - unit-3.6 - - unit-3.7 - unit-3.8 filters: tags: only: /^v\d+\.\d+\.\d+$/ - showcase-unit-add-iam-methods: requires: - - unit-3.6 - - unit-3.7 - unit-3.8 filters: tags: From 550307e4fde1d148a81a6f05d6145d8dfb39c732 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 3 Sep 2020 13:51:59 -0700 Subject: [PATCH 0357/1339] fix: rendering mock values for recursive messages no longer crashes (#587) Protobuf allows recursive message types, i.e. messages whose fields are of the same type as the message itself. message Foo { Foo foo = 1; // Degenerate case } A real world example is bigquery.v2.data:RowFilter These recursive types cause a problem when trying to render mock values for unit tests because there's no inherent limit on when to stop rendering nested values. The solution in this commit is an artifical cap on the depth of recursion in rendering mock values. --- .../gapic-generator/gapic/schema/wrappers.py | 27 +++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 54f5364fb739..7447607170d7 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -56,6 +56,9 @@ class Field: ) oneof: Optional[str] = None + # Arbitrary cap set via heuristic rule of thumb. + MAX_MOCK_DEPTH: int = 20 + def __getattr__(self, name): return getattr(self.field_pb, name) @@ -85,6 +88,17 @@ def map(self) -> bool: @utils.cached_property def mock_value(self) -> str: + depth = 0 + stack = [self] + answer = "{}" + while stack: + expr = stack.pop() + answer = answer.format(expr.inner_mock(stack, depth)) + depth += 1 + + return answer + + def inner_mock(self, stack, depth): """Return a repr of a valid, usually truthy mock value.""" # For primitives, send a truthy value computed from the # field name. @@ -113,9 +127,18 @@ def mock_value(self) -> str: answer = f'{self.type.ident}.{mock_value.name}' # If this is another message, set one value on the message. - if isinstance(self.type, MessageType) and len(self.type.fields): + if ( + not self.map # Maps are handled separately + and isinstance(self.type, MessageType) + and len(self.type.fields) + # Nested message types need to terminate eventually + and depth < self.MAX_MOCK_DEPTH + ): sub = next(iter(self.type.fields.values())) - answer = f'{self.type.ident}({sub.name}={sub.mock_value})' + stack.append(sub) + # Don't do the recursive rendering here, just set up + # where the nested value should go with the double {}. + answer = f'{self.type.ident}({sub.name}={{}})' if self.map: # Maps are a special case beacuse they're represented internally as From b7112b719875159fe9d7da0595f895752b0ee4db Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 3 Sep 2020 13:59:14 -0700 Subject: [PATCH 0358/1339] chore: release 0.32.4 (#588) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c2cc9e27dfea..b16c910e0cd0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.32.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.3...v0.32.4) (2020-09-03) + + +### Bug Fixes + +* rendering mock values for recursive messages no longer crashes ([#587](https://www.github.com/googleapis/gapic-generator-python/issues/587)) ([c2a83e5](https://www.github.com/googleapis/gapic-generator-python/commit/c2a83e561bf46b4af21e9008c7d67a1c609d7d06)) + ### [0.32.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.2...v0.32.3) (2020-08-28) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9b9604e4fc90..48380f858190 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.32.3" +version = "0.32.4" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 47f7688affcd5dd0260a0a97768a88509c99110e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 9 Sep 2020 20:27:49 +0200 Subject: [PATCH 0359/1339] chore(deps): update dependency google-api-core to v1.22.2 (#590) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 09be3b3756c7..ed7f530ffa85 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.22.1 +google-api-core==1.22.2 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 873020f7eb03f63a195936abc0443f4942699d71 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 10 Sep 2020 12:36:38 -0700 Subject: [PATCH 0360/1339] feat: support mtls env variables (#589) Implement the two mtls env variables mentioned in https://google.aip.dev/auth/4114 New behavior summary: (1) GOOGLE_API_USE_CLIENT_CERTIFICATE env variable: Values: "true": use client cert if exists "false" (default): never use client cert, even if it exists or it is explicitly provided by user (2) GOOGLE_API_USE_MTLS_ENDPOINT env variable: Values: "never": use regular endpoint "always": use mtls endpoint "auto" (default): auto switch to mtls endpoint, if client cert exists and we are allowed to use it (controlled by GOOGLE_API_USE_CLIENT_CERTIFICATE) --- .../%sub/services/%service/client.py.j2 | 66 ++- .../services/%service/transports/grpc.py.j2 | 30 +- .../gapic/ads-templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 238 +++++----- .../%sub/services/%service/async_client.py.j2 | 19 +- .../%sub/services/%service/client.py.j2 | 66 ++- .../services/%service/transports/grpc.py.j2 | 42 +- .../%service/transports/grpc_asyncio.py.j2 | 45 +- .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 429 ++++++++---------- .../gapic-generator/tests/system/conftest.py | 21 +- 11 files changed, 471 insertions(+), 489 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 4c5a58740574..89322d3e372b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -2,6 +2,7 @@ {% block content %} from collections import OrderedDict +from distutils import util import os import re from typing import Callable, Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union @@ -13,6 +14,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -151,16 +153,19 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options (ClientOptions): Custom options for the client. It won't take effect unless ``transport`` is None. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -175,24 +180,40 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = ClientOptions.from_dict(client_options) if client_options is None: client_options = ClientOptions.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None - if transport is None and client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if has_client_cert_source else self.DEFAULT_ENDPOINT - ) + api_endpoint = self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -212,9 +233,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): else: self._transport = {{ service.name }}GrpcTransport( credentials=credentials, - host=client_options.api_endpoint, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + host=api_endpoint, + ssl_channel_credentials=ssl_credentials, client_info=client_info, ) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index b25817b70b84..7fbf53e78c6b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -10,7 +10,6 @@ from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -40,8 +39,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -57,14 +55,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -82,27 +74,17 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel - elif api_mtls_endpoint: - host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + else: + host = host if ":" in host else host + ":443" if credentials is None: credentials, _ = auth.default(scopes=self.AUTH_SCOPES) - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. self._grpc_channel = grpc_helpers.create_channel( host, credentials=credentials, - ssl_credentials=ssl_credentials, + ssl_credentials=ssl_channel_credentials, scopes=self.AUTH_SCOPES, ) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 70a168c254db..6587ebd21ec1 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -16,7 +16,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core >= 1.17.0, < 2.0.0dev', + 'google-api-core >= 1.22.2, < 2.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', 'proto-plus >= 1.4.0', diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 9f1a0e6b4f02..43a8e02814b4 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -103,91 +103,122 @@ def test_{{ service.client_name|snake_case }}_client_options(): grpc_transport.return_value = None client = {{ service.client_name }}(client_options=options) grpc_transport.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, credentials=None, host="squid.clam.whelk", client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT + # is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}() grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, credentials=None, host=client.DEFAULT_ENDPOINT, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}() grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, credentials=None, host=client.DEFAULT_MTLS_ENDPOINT, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}(client_options=options) - grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - credentials=None, - host=client.DEFAULT_MTLS_ENDPOINT, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = {{ service.client_name }}() - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - credentials=None, - host=client.DEFAULT_MTLS_ENDPOINT, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + +@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +@pytest.mark.parametrize("use_client_cert_env", ["true", "false"]) +def test_{{ service.client_name|snake_case }}_mtls_env_auto(use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + ssl_channel_creds = mock.Mock() + with mock.patch('grpc.ssl_channel_credentials', return_value=ssl_channel_creds): grpc_transport.return_value = None - client = {{ service.client_name }}() + client = {{ service.client_name }}(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + grpc_transport.assert_called_once_with( - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, credentials=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = {{ service.client_name }}() + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): + with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: + with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ssl_credentials_mock.return_value + + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + ssl_channel_credentials=expected_ssl_channel_creds, + credentials=None, + host=expected_host, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: + with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): + with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: + is_mtls_mock.return_value = False + grpc_transport.return_value = None + client = {{ service.client_name }}() + grpc_transport.assert_called_once_with( + ssl_channel_credentials=None, + credentials=None, + host=client.DEFAULT_ENDPOINT, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) def test_{{ service.client_name|snake_case }}_client_options_from_dict(): @@ -197,8 +228,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_options={'api_endpoint': 'squid.clam.whelk'} ) grpc_transport.assert_called_once_with( - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, credentials=None, host="squid.clam.whelk", client_info=transports.base.DEFAULT_CLIENT_INFO, @@ -564,6 +594,14 @@ def test_transport_grpc_default(): ) +def test_transport_adc(): + # Test default credentials are used if not provided. + with mock.patch.object(auth, 'default') as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.{{ service.name }}Transport() + adc.assert_called_once() + + def test_{{ service.name|snake_case }}_base_transport(): # Instantiate the base transport. with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as Transport: @@ -591,6 +629,15 @@ def test_{{ service.name|snake_case }}_base_transport(): {% endif %} +def test_{{ service.name|snake_case }}_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, 'default') as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.{{ service.name }}Transport() + adc.assert_called_once() + + def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, 'default') as adc: @@ -639,94 +686,13 @@ def test_{{ service.name|snake_case }}_host_with_port(): def test_{{ service.name|snake_case }}_grpc_transport_channel(): channel = grpc.insecure_channel('http://localhost/') - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.{{ service.name }}GrpcTransport( host="squid.clam.whelk", channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.{{ service.name }}GrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - ssl_credentials=mock_ssl_cred, - scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %} - ), - ) - assert transport.grpc_channel == mock_grpc_channel - - -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.{{ service.name }}GrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - ssl_credentials=mock_ssl_cred, - scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %} - ), - ) - assert transport.grpc_channel == mock_grpc_channel {% if service.has_lro -%} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 5d874c0d19e6..8dfeb1a0bd6a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -68,16 +68,19 @@ class {{ service.async_client_name }}: client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 0aef4812bc4c..dbd5adb52247 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -2,6 +2,7 @@ {% block content %} from collections import OrderedDict +from distutils import util import os import re from typing import Callable, Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union @@ -13,6 +14,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -157,16 +159,19 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -181,24 +186,40 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = ClientOptions.from_dict(client_options) if client_options is None: client_options = ClientOptions.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if has_client_cert_source else self.DEFAULT_ENDPOINT - ) + api_endpoint = self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -220,10 +241,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 0952a1a7ca5b..4283b12ad6c8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore @@ -12,7 +13,6 @@ from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore {% filter sort_lines -%} @@ -50,6 +50,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -71,14 +72,16 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -101,6 +104,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: + warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" if credentials is None: @@ -125,6 +130,21 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -188,14 +208,6 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, '_grpc_channel'): - self._grpc_channel = self.create_channel( - self._host, - credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel {%- if service.has_lro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 7015b31cfbc4..aae858bf79b7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore @@ -8,6 +9,7 @@ from google.api_core import grpc_helpers_async # type: ignore {%- if service.has_lro %} from google.api_core import operations_v1 # type: ignore {%- endif %} +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -91,6 +93,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -113,14 +116,16 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -143,8 +148,13 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: + warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + if credentials is None: + credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -164,6 +174,21 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -184,14 +209,6 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, '_grpc_channel'): - self._grpc_channel = self.create_channel( - self._host, - credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel {%- if service.has_lro %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index b784adbdc02e..e2d1ad659790 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -16,7 +16,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.22.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.22.2, < 2.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.4.0', {%- if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 8543eae2ab49..4d5b93415152 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -122,15 +122,14 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class() @@ -139,15 +138,14 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class() @@ -156,71 +154,21 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -232,12 +180,99 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", "true"), + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", "true"), + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", "false"), + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", "false") +]) +@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + ssl_channel_creds = mock.Mock() + with mock.patch('grpc.ssl_channel_credentials', return_value=ssl_channel_creds): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): + with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: + with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ssl_credentials_mock.return_value + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): + with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), @@ -256,13 +291,11 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - @pytest.mark.parametrize("client_class,transport_class,transport_name", [ ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") @@ -280,8 +313,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -298,8 +330,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -997,6 +1028,18 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize("transport_class", [ + transports.{{ service.grpc_transport_name }}, + transports.{{ service.grpc_asyncio_transport_name }} +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, 'default') as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = {{ service.client_name }}( @@ -1067,6 +1110,15 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): ) +def test_{{ service.name|snake_case }}_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, 'default') as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.{{ service.name }}Transport() + adc.assert_called_once() + + def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, 'default') as adc: @@ -1116,195 +1168,104 @@ def test_{{ service.name|snake_case }}_host_with_port(): def test_{{ service.name|snake_case }}_grpc_transport_channel(): channel = grpc.insecure_channel('http://localhost/') - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.{{ service.name }}GrpcTransport( host="squid.clam.whelk", channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel(): channel = aio.insecure_channel('http://localhost/') - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.{{ service.name }}GrpcAsyncIOTransport( host="squid.clam.whelk", channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.{{ service.name }}GrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %} - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred +@pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}]) +def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_source( + transport_class ): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.{{ service.name }}GrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %} - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_{{ service.name|snake_case }}_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint +@pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}]) +def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( + transport_class ): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.{{ service.name }}GrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %} - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel + with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.{{ service.name }}GrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %} - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel {% if service.has_lro -%} diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 3a89ba763ba9..db3742aa04a4 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -71,16 +71,17 @@ def construct_client(client_class, transport="grpc", channel_creator=grpc.insecure_channel): if use_mtls: - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: - mock_ssl_cred.return_value = ssl_credentials - client = client_class( - credentials=credentials.AnonymousCredentials(), - client_options=client_options, - ) - mock_ssl_cred.assert_called_once_with( - certificate_chain=cert, private_key=key - ) - return client + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: + mock_ssl_cred.return_value = ssl_credentials + client = client_class( + credentials=credentials.AnonymousCredentials(), + client_options=client_options, + ) + mock_ssl_cred.assert_called_once_with( + certificate_chain=cert, private_key=key + ) + return client else: transport = client_class.get_transport_class(transport)( channel=channel_creator("localhost:7469") From 4d0115032bdb34beb0463a5496026aad27e89aa2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 10 Sep 2020 14:08:08 -0700 Subject: [PATCH 0361/1339] chore: release 0.33.0 (#591) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b16c910e0cd0..196cc1e7cce0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.33.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.4...v0.33.0) (2020-09-10) + + +### Features + +* support mtls env variables ([#589](https://www.github.com/googleapis/gapic-generator-python/issues/589)) ([b19026d](https://www.github.com/googleapis/gapic-generator-python/commit/b19026d9cca26ebd1cd0c3e73f738c4d1870d987)) + ### [0.32.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.3...v0.32.4) (2020-09-03) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 48380f858190..9cc3c25e86ca 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.32.4" +version = "0.33.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 449483062485d8510fcd0647bef8d5a1f6063236 Mon Sep 17 00:00:00 2001 From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com> Date: Tue, 15 Sep 2020 13:37:02 -0400 Subject: [PATCH 0362/1339] fix: Fix client template type hints (#593) Minor fix and cosmetic changes in client.py.j2 template --- .../%sub/services/%service/client.py.j2 | 38 +++++++++---------- .../%sub/services/%service/client.py.j2 | 34 ++++++++--------- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 89322d3e372b..8de258d60bab 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -5,18 +5,18 @@ from collections import OrderedDict from distutils import util import os import re -from typing import Callable, Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} @@ -134,9 +134,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %} def __init__(self, *, - credentials: credentials.Credentials = None, - transport: Union[str, {{ service.name }}Transport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, {{ service.name }}Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -150,8 +150,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport (Union[str, ~.{{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect unless ``transport`` is None. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -171,19 +171,19 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. - + Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - + ssl_credentials = None is_mtls = False if use_client_cert: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index dbd5adb52247..b598ac2526bf 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -5,18 +5,18 @@ from collections import OrderedDict from distutils import util import os import re -from typing import Callable, Dict, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore {% filter sort_lines -%} {% for method in service.methods.values() -%} @@ -140,9 +140,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %} def __init__(self, *, - credentials: credentials.Credentials = None, - transport: Union[str, {{ service.name }}Transport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, {{ service.name }}Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -156,8 +156,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport (Union[str, ~.{{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -183,9 +183,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) From 4b017c22a61d33c3f18393098b8cbb654c9b7d09 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 15 Sep 2020 10:57:57 -0700 Subject: [PATCH 0363/1339] chore: release 0.33.1 (#594) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 196cc1e7cce0..3950060f5437 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.33.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.0...v0.33.1) (2020-09-15) + + +### Bug Fixes + +* Fix client template type hints ([#593](https://www.github.com/googleapis/gapic-generator-python/issues/593)) ([93f34e8](https://www.github.com/googleapis/gapic-generator-python/commit/93f34e8a2a351a24a49424c1722baec2893dc764)) + ## [0.33.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.4...v0.33.0) (2020-09-10) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9cc3c25e86ca..8cfe3e6cc599 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.0" +version = "0.33.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From f3fe615355bebc7a2528f318e28d252d2ec80e43 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Sep 2020 15:55:24 -0400 Subject: [PATCH 0364/1339] fix: ignore types for imports generated from 'google.api_core' (#597) Closes #596. --- packages/gapic-generator/gapic/schema/imp.py | 2 +- packages/gapic-generator/tests/unit/schema/test_imp.py | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/imp.py b/packages/gapic-generator/gapic/schema/imp.py index eb5d8ee83007..ba53a91af18b 100644 --- a/packages/gapic-generator/gapic/schema/imp.py +++ b/packages/gapic-generator/gapic/schema/imp.py @@ -31,6 +31,6 @@ def __str__(self) -> str: answer = f"from {'.'.join(self.package)} {answer}" if self.alias: answer += f' as {self.alias}' - if self.module.endswith('_pb2'): + if self.module.endswith('_pb2') or 'api_core' in self.package: answer += ' # type: ignore' return answer diff --git a/packages/gapic-generator/tests/unit/schema/test_imp.py b/packages/gapic-generator/tests/unit/schema/test_imp.py index c27e4b876901..cb42b81d930f 100644 --- a/packages/gapic-generator/tests/unit/schema/test_imp.py +++ b/packages/gapic-generator/tests/unit/schema/test_imp.py @@ -35,6 +35,11 @@ def test_str_untyped_pb2(): assert str(i) == 'from foo.bar import baz_pb2 as bacon # type: ignore' +def test_str_untyped_api_core(): + i = imp.Import(package=('foo', 'api_core'), module='baz', alias='bacon') + assert str(i) == 'from foo.api_core import baz as bacon # type: ignore' + + def test_str_eq(): i1 = imp.Import(package=('foo', 'bar'), module='baz') i2 = imp.Import(package=('foo', 'bar'), module='baz') From 2035703cd1110415ebc7f900bb9ba7737d569dbe Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 15 Sep 2020 16:20:00 -0400 Subject: [PATCH 0365/1339] chore: release 0.33.2 (#598) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3950060f5437..e13e55ff1602 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.33.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.1...v0.33.2) (2020-09-15) + + +### Bug Fixes + +* ignore types for imports generated from 'google.api_core' ([#597](https://www.github.com/googleapis/gapic-generator-python/issues/597)) ([8440e09](https://www.github.com/googleapis/gapic-generator-python/commit/8440e09855d399d647b62238a9697e04ea4d0d41)), closes [#596](https://www.github.com/googleapis/gapic-generator-python/issues/596) + ### [0.33.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.0...v0.33.1) (2020-09-15) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 8cfe3e6cc599..36644091135a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.1" +version = "0.33.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 5cd2cfe2af245e77f02544201c25900bd6786a9f Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 15 Sep 2020 16:01:48 -0700 Subject: [PATCH 0366/1339] fix: module names can no longer collide with keywords or builtins (#595) E.g. protobuf/any.proto will be imported as from google.protobuf import any_pb2 as gp_any # Was previously 'as any' --- packages/gapic-generator/gapic/schema/api.py | 3 ++- .../gapic-generator/gapic/schema/metadata.py | 4 ++++ .../gapic/utils/reserved_names.py | 13 +++++++++- .../tests/unit/schema/test_metadata.py | 24 +++++++++++++++++-- 4 files changed, 40 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index db91c461594a..e303db8d8892 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -19,6 +19,7 @@ import collections import dataclasses +import keyword import os import sys from typing import Callable, Container, Dict, FrozenSet, Mapping, Optional, Sequence, Set, Tuple @@ -229,7 +230,7 @@ def disambiguate_keyword_fname( visited_names: Container[str]) -> str: path, fname = os.path.split(full_path) name, ext = os.path.splitext(fname) - if name in RESERVED_NAMES or full_path in visited_names: + if name in keyword.kwlist or full_path in visited_names: name += "_" full_path = os.path.join(path, name + ext) if full_path in visited_names: diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 349d7f13f9ad..9459bb5ef11e 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -34,6 +34,7 @@ from gapic.schema import imp from gapic.schema import naming from gapic.utils import cached_property +from gapic.utils import RESERVED_NAMES @dataclasses.dataclass(frozen=True) @@ -48,6 +49,9 @@ class Address: ) collisions: FrozenSet[str] = dataclasses.field(default_factory=frozenset) + def __post_init__(self): + super().__setattr__("collisions", self.collisions | RESERVED_NAMES) + def __eq__(self, other) -> bool: return all([getattr(self, i) == getattr(other, i) for i in ('name', 'module', 'module_path', 'package', 'parent')]) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index b146ce08e56f..866a867663f4 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -12,7 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +import builtins +import itertools import keyword -RESERVED_NAMES = frozenset(keyword.kwlist) +# The filter and map builtins are a historical artifact; +# they are not used in modern, idiomatic python, +# nor are they used in the gapic surface. +# They are too useful to reserve. +RESERVED_NAMES = frozenset( + itertools.chain( + keyword.kwlist, + set(dir(builtins)) - {"filter", "map"}, + ) +) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index a693e295b877..62cd957cbdab 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -20,6 +20,7 @@ from gapic.schema import metadata from gapic.schema import naming +from gapic.utils import RESERVED_NAMES def test_address_str(): @@ -160,9 +161,28 @@ def test_address_subpackage_empty(): def test_metadata_with_context(): meta = metadata.Metadata() - assert meta.with_context( + collisions = meta.with_context( collisions={'foo', 'bar'}, - ).address.collisions == {'foo', 'bar'} + ).address.collisions - RESERVED_NAMES + assert collisions == {'foo', 'bar'} + + +def test_address_name_builtin_keyword(): + addr_builtin = metadata.Address( + name="Any", + module="any", + package=("google", "protobuf"), + api_naming=naming.NewNaming(proto_package="foo.bar.baz.v1"), + ) + assert addr_builtin.module_alias == "gp_any" + + addr_kword = metadata.Address( + name="Class", + module="class", + package=("google", "protobuf"), + api_naming=naming.NewNaming(proto_package="foo.bar.baz.v1"), + ) + assert addr_kword.module_alias == "gp_class" def test_doc_nothing(): From 57b231840cfd8f2578b32022cf2e28d65799f152 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 15 Sep 2020 16:06:25 -0700 Subject: [PATCH 0367/1339] chore: release 0.33.3 (#599) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index e13e55ff1602..b31f5515c2fb 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.33.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.2...v0.33.3) (2020-09-15) + + +### Bug Fixes + +* module names can no longer collide with keywords or builtins ([#595](https://www.github.com/googleapis/gapic-generator-python/issues/595)) ([960d550](https://www.github.com/googleapis/gapic-generator-python/commit/960d550c4a8fd09b052cce785d76243a5d4525d7)) + ### [0.33.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.1...v0.33.2) (2020-09-15) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 36644091135a..ab32647cd9a0 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.2" +version = "0.33.3" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 210cdb3a6fe041c5b78e1feed3cf16b5ff75596b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 17 Sep 2020 09:06:38 -0700 Subject: [PATCH 0368/1339] fix: 'id' should not be a reserved name (#602) A number of python builtins collide with flattened fields from certain API methods. More common names, especially ones that conflict with builtins that aren't used by the surface, are explicitly allowed. --- packages/gapic-generator/gapic/utils/reserved_names.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index 866a867663f4..14958fc52fbc 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -24,6 +24,6 @@ RESERVED_NAMES = frozenset( itertools.chain( keyword.kwlist, - set(dir(builtins)) - {"filter", "map"}, + set(dir(builtins)) - {"filter", "map", "id"}, ) ) From 0bc6dc05554350d91289f60a49f0f0084e8f315b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 17 Sep 2020 09:26:49 -0700 Subject: [PATCH 0369/1339] chore: release 0.33.4 (#603) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b31f5515c2fb..a844d1d5e807 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.33.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.3...v0.33.4) (2020-09-17) + + +### Bug Fixes + +* 'id' should not be a reserved name ([#602](https://www.github.com/googleapis/gapic-generator-python/issues/602)) ([c43c574](https://www.github.com/googleapis/gapic-generator-python/commit/c43c5740db099be19c5f6e52b3a917a631003411)) + ### [0.33.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.2...v0.33.3) (2020-09-15) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index ab32647cd9a0..5a3ccbc1ab95 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.3" +version = "0.33.4" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From c61400f6ce95f635674227126191baed6616c1f9 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Mon, 21 Sep 2020 16:42:51 -0700 Subject: [PATCH 0370/1339] chore: add CODEOWNERS and repo sync config (#607) --- packages/gapic-generator/.github/CODEOWNERS | 7 ++++ .../.github/sync-repo-settings.yaml | 38 +++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 packages/gapic-generator/.github/CODEOWNERS create mode 100644 packages/gapic-generator/.github/sync-repo-settings.yaml diff --git a/packages/gapic-generator/.github/CODEOWNERS b/packages/gapic-generator/.github/CODEOWNERS new file mode 100644 index 000000000000..aaabff6ec140 --- /dev/null +++ b/packages/gapic-generator/.github/CODEOWNERS @@ -0,0 +1,7 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. + +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + +* @googleapis/actools @googleapis/yoshi-python diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml new file mode 100644 index 000000000000..d0e56dcae893 --- /dev/null +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -0,0 +1,38 @@ +rebaseMergeAllowed: true +squashMergeAllowed: true +mergeCommitAllowed: false +branchProtectionRules: +- pattern: master + isAdminEnforced: true + requiredStatusCheckContexts: + - 'ci/circleci: docs' + - 'ci/circleci: mypy' + - 'ci/circleci: showcase' + - 'ci/circleci: showcase-alternative-templates' + - 'ci/circleci: showcase-mtls' + - 'ci/circleci: showcase-mtls-alternative-templates' + - 'ci/circleci: showcase-mypy' + - 'ci/circleci: showcase-mypy-alternative-templates' + - 'ci/circleci: showcase-unit-3.6' + - 'ci/circleci: showcase-unit-3.7' + - 'ci/circleci: showcase-unit-3.8' + - 'ci/circleci: showcase-unit-add-iam-methods' + - 'ci/circleci: showcase-unit-alternative-templates-3.6' + - 'ci/circleci: showcase-unit-alternative-templates-3.7' + - 'ci/circleci: showcase-unit-alternative-templates-3.8' + - 'ci/circleci: style-check' + - 'ci/circleci: unit-3.6' + - 'ci/circleci: unit-3.7' + - 'ci/circleci: unit-3.8' + - 'cla/google' + - 'codecov/patch' + - 'codecov/project' + - 'conventionalcommits.org' + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true +permissionRules: + - team: actools + permission: admin + - team: yoshi-python + permission: push From 804861bca2fd5c4be78d9410d272cca9107aee13 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 22 Sep 2020 12:49:57 -0700 Subject: [PATCH 0371/1339] fix: remove 'property' from reserved names (#613) The Python 'property' builtin name should not be reserved. --- packages/gapic-generator/gapic/utils/reserved_names.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index 14958fc52fbc..3d1e9b445907 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -17,13 +17,11 @@ import keyword -# The filter and map builtins are a historical artifact; -# they are not used in modern, idiomatic python, -# nor are they used in the gapic surface. -# They are too useful to reserve. +# The exceptions to builtins are frequent and useful. +# They are explicitly allowed message, module, and field names. RESERVED_NAMES = frozenset( itertools.chain( keyword.kwlist, - set(dir(builtins)) - {"filter", "map", "id"}, + set(dir(builtins)) - {"filter", "map", "id", "property"}, ) ) From 3994cb8f036c85a42b0067503ed6ccc356f99b59 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 22 Sep 2020 12:55:11 -0700 Subject: [PATCH 0372/1339] chore: release 0.33.5 (#614) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a844d1d5e807..c89b8a9b00f9 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.33.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.4...v0.33.5) (2020-09-22) + + +### Bug Fixes + +* remove 'property' from reserved names ([#613](https://www.github.com/googleapis/gapic-generator-python/issues/613)) ([8338a51](https://www.github.com/googleapis/gapic-generator-python/commit/8338a51a81f5f5b8ebacf68c8e46d3e1804d3f8b)) + ### [0.33.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.3...v0.33.4) (2020-09-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 5a3ccbc1ab95..6d4509a45702 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.4" +version = "0.33.5" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From eb958327bcc43b0c30d75e62a0230fd06b80a718 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 22 Sep 2020 14:20:57 -0700 Subject: [PATCH 0373/1339] fix: operation module is properly aliased if necessary (#615) Some APIs define their own module named 'operation' that naively clashes with google.api_core.operation. Both modules are imported with a disambiguating alias, but the alias was not always referenced for the api_core submodule. This change fixes that issue. Fix for #610 --- .../%name/%version/%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/async_client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 8de258d60bab..2acc770d9160 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -369,7 +369,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {%- if method.lro %} # Wrap the response in an operation future. - response = operation.from_gapic( + response = {{ method.client_output.ident.module_alias or method.client_output.ident.module }}.from_gapic( response, self._transport.operations_client, {{ method.lro.response_type.ident }}, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 8dfeb1a0bd6a..7368bf6943f4 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -240,7 +240,7 @@ class {{ service.async_client_name }}: {%- if method.lro %} # Wrap the response in an operation future. - response = operation_async.from_gapic( + response = {{ method.client_output_async.ident.module_alias or method.client_output_async.ident.module }}.from_gapic( response, self._client._transport.operations_client, {{ method.lro.response_type.ident }}, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index b598ac2526bf..b75384836178 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -381,7 +381,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {%- if method.lro %} # Wrap the response in an operation future. - response = operation.from_gapic( + response = {{ method.client_output.ident.module_alias or method.client_output.ident.module }}.from_gapic( response, self._transport.operations_client, {{ method.lro.response_type.ident }}, From b3cdc0f119f5cf05bd32e5d1f58b3e7f9cf25e1e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 22 Sep 2020 14:26:11 -0700 Subject: [PATCH 0374/1339] chore: release 0.33.6 (#616) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c89b8a9b00f9..86729871c6a3 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.33.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.5...v0.33.6) (2020-09-22) + + +### Bug Fixes + +* operation module is properly aliased if necessary ([#615](https://www.github.com/googleapis/gapic-generator-python/issues/615)) ([8f92fd9](https://www.github.com/googleapis/gapic-generator-python/commit/8f92fd9999286ef3f916119be78dbeb838a15550)), closes [#610](https://www.github.com/googleapis/gapic-generator-python/issues/610) + ### [0.33.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.4...v0.33.5) (2020-09-22) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 6d4509a45702..4ce942227c64 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.5" +version = "0.33.6" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 232436f8e650bf86285029472858c7f321f292b5 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 24 Sep 2020 11:32:03 -0700 Subject: [PATCH 0375/1339] fix: retriable exceptions are deterministically ordered in GAPICs (#619) Certain code patterns in the generated surface are created by iteration over data collections. This iteration order should be deterministic in order to prevent spurious deltas in the generated surface. --- .../%version/%sub/services/%service/transports/base.py.j2 | 4 +--- .../%name_%version/%sub/services/%service/async_client.py.j2 | 4 +--- .../%sub/services/%service/transports/base.py.j2 | 4 +--- 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 32e6c11f9fb2..2053e9fe4faf 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -89,11 +89,9 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {%- filter sort_lines %} - {%- for ex in method.retry.retryable_exceptions %} + {%- for ex in method.retry.retryable_exceptions|sort(attribute='__name__) %} exceptions.{{ ex.__name__ }}, {%- endfor %} - {%- endfilter %} ), ), {%- endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 7368bf6943f4..95a250479f21 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -199,11 +199,9 @@ class {{ service.async_client_name }}: {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {%- filter sort_lines %} - {%- for ex in method.retry.retryable_exceptions %} + {%- for ex in method.retry.retryable_exceptions|sort(attribue='__name__') %} exceptions.{{ ex.__name__ }}, {%- endfor %} - {%- endfilter %} ), ), {%- endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 22ed87a1db11..08b5c4b20b83 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -114,11 +114,9 @@ class {{ service.name }}Transport(abc.ABC): {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {%- filter sort_lines %} - {%- for ex in method.retry.retryable_exceptions %} + {%- for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} exceptions.{{ ex.__name__ }}, {%- endfor %} - {%- endfilter %} ), ), {%- endif %} From d1c74fe69ea32a8f494b591f90b32db06290f0df Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 24 Sep 2020 13:00:07 -0700 Subject: [PATCH 0376/1339] chore: release 0.33.7 (#620) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 86729871c6a3..c0d0e37dba95 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.33.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.6...v0.33.7) (2020-09-24) + + +### Bug Fixes + +* retriable exceptions are deterministically ordered in GAPICs ([#619](https://www.github.com/googleapis/gapic-generator-python/issues/619)) ([f7b1164](https://www.github.com/googleapis/gapic-generator-python/commit/f7b11640b74d8c64747b33783976d6e0ab9c61c4)) + ### [0.33.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.5...v0.33.6) (2020-09-22) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 4ce942227c64..f5e1d36cdd75 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.6" +version = "0.33.7" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 79e56156649fc4dcb1507dbfcb201096b8a69c9e Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Fri, 25 Sep 2020 14:41:30 -0700 Subject: [PATCH 0377/1339] fix: handle repeated fields in method signatures (#445) Co-authored-by: Dov Shlachter --- .../%name_%version/%sub/services/%service/client.py.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index b75384836178..ccc4aa85f083 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -338,12 +338,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # If we have keyword arguments corresponding to fields on the # request, apply these. {% endif -%} - {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + {%- for key, field in method.flattened_fields.items() if not(field.repeated or method.input.ident.package != method.ident.package) %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} {# They can be _extended_, however -#} - {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} + {%- for key, field in method.flattened_fields.items() if field.repeated %} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) {%- endfor %} From 1983e524cd13e355ea526f052a5e0b36a84042cd Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 25 Sep 2020 16:07:34 -0700 Subject: [PATCH 0378/1339] chore: release 0.33.8 (#621) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c0d0e37dba95..98fd68593dd4 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.33.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.7...v0.33.8) (2020-09-25) + + +### Bug Fixes + +* handle repeated fields in method signatures ([#445](https://www.github.com/googleapis/gapic-generator-python/issues/445)) ([3aae799](https://www.github.com/googleapis/gapic-generator-python/commit/3aae799f62a1f5d3b0506d919cc6080ee417f14b)) + ### [0.33.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.6...v0.33.7) (2020-09-24) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f5e1d36cdd75..d659a5e48845 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.7" +version = "0.33.8" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 29639a2654a1347a51e2158696c3d1f855711246 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 29 Sep 2020 09:49:27 -0700 Subject: [PATCH 0379/1339] feat: add support for common resource paths (#622) Google Cloud defines a small set of common resources that do not belong to specific APIs or message types. All generated service clients now contain helper methods that allow construction and parsing of these paths. See https://github.com/googleapis/googleapis/blob/master/google/cloud/common_resources.proto for the list of common resources for Google Cloud. --- .../%sub/services/%service/client.py.j2 | 13 +++++ .../%name_%version/%sub/test_%service.py.j2 | 27 +++++++++- .../gapic-generator/gapic/schema/wrappers.py | 51 ++++++++++++++++++- .../%sub/services/%service/async_client.py.j2 | 4 ++ .../%sub/services/%service/client.py.j2 | 15 +++++- .../%name_%version/%sub/test_%service.py.j2 | 27 +++++++++- .../unit/schema/wrappers/test_service.py | 41 +++++++++++++++ 7 files changed, 171 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 2acc770d9160..db181fabb8c8 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -132,6 +132,19 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} {% endfor %} + {% for resource_msg in service.common_resources|sort(attribute="type_name") -%} + @staticmethod + def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: + """Return a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" + return "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + + @staticmethod + def parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(path: str) -> Dict[str,str]: + """Parse a {{ resource_msg.message_type.resource_type|snake_case }} path into its component segments.""" + m = re.match(r"{{ resource_msg.message_type.path_regex_str }}", path) + return m.groupdict() if m else {} + + {% endfor %} {# common resources #} def __init__(self, *, credentials: Optional[credentials.Credentials] = None, diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 43a8e02814b4..a342db6f1cb8 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -714,8 +714,8 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): {% endif -%} -{% for message in service.resource_messages|sort(attribute="resource_type") -%} {% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} +{% for message in service.resource_messages|sort(attribute="resource_type") -%} def test_{{ message.resource_type|snake_case }}_path(): {% for arg in message.resource_path_args -%} {{ arg }} = "{{ molluscs.next() }}" @@ -737,8 +737,31 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): actual = {{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path(path) assert expected == actual -{% endwith -%} {% endfor -%} +{% for resource_msg in service.common_resources -%} +def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): + {% for arg in resource_msg.message_type.resource_path_args -%} + {{ arg }} = "{{ molluscs.next() }}" + {% endfor %} + expected = "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + actual = {{ service.client_name }}.common_{{ resource_msg.message_type.resource_type|snake_case }}_path({{ resource_msg.message_type.resource_path_args|join(", ") }}) + assert expected == actual + + +def test_parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): + expected = { + {% for arg in resource_msg.message_type.resource_path_args -%} + "{{ arg }}": "{{ molluscs.next() }}", + {% endfor %} + } + path = {{ service.client_name }}.common_{{ resource_msg.message_type.resource_type|snake_case }}_path(**expected) + + # Check that the path construction is reversible. + actual = {{ service.client_name }}.parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(path) + assert expected == actual + +{% endfor -%} {# common resources#} +{% endwith -%} {# cycler #} def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 7447607170d7..68ec4e2316ff 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -31,8 +31,8 @@ import dataclasses import re from itertools import chain -from typing import (cast, Dict, FrozenSet, Iterable, List, Mapping, Optional, - Sequence, Set, Tuple, Union) +from typing import (cast, Dict, FrozenSet, Iterable, List, Mapping, + ClassVar, Optional, Sequence, Set, Tuple, Union) from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 @@ -855,6 +855,26 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': ) +@dataclasses.dataclass(frozen=True) +class CommonResource: + type_name: str + pattern: str + + @utils.cached_property + def message_type(self): + message_pb = descriptor_pb2.DescriptorProto() + res_pb = message_pb.options.Extensions[resource_pb2.resource] + res_pb.type = self.type_name + res_pb.pattern.append(self.pattern) + + return MessageType( + message_pb=message_pb, + fields={}, + nested_enums={}, + nested_messages={}, + ) + + @dataclasses.dataclass(frozen=True) class Service: """Description of a service (defined with the ``service`` keyword).""" @@ -864,6 +884,33 @@ class Service: default_factory=metadata.Metadata, ) + common_resources: ClassVar[Sequence[CommonResource]] = dataclasses.field( + default=( + CommonResource( + "cloudresourcemanager.googleapis.com/Project", + "projects/{project}", + ), + CommonResource( + "cloudresourcemanager.googleapis.com/Organization", + "organizations/{organization}", + ), + CommonResource( + "cloudresourcemanager.googleapis.com/Folder", + "folders/{folder}", + ), + CommonResource( + "cloudbilling.googleapis.com/BillingAccount", + "billingAccounts/{billing_account}", + ), + CommonResource( + "locations.googleapis.com/Location", + "projects/{project}/locations/{location}", + ), + ), + init=False, + compare=False, + ) + def __getattr__(self, name): return getattr(self.service_pb, name) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 95a250479f21..772643b448f6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -42,6 +42,10 @@ class {{ service.async_client_name }}: {{ message.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.{{ message.resource_type|snake_case }}_path) parse_{{ message.resource_type|snake_case}}_path = staticmethod({{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path) {% endfor %} + {% for resource_msg in service.common_resources %} + common_{{ resource_msg.message_type.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.common_{{ resource_msg.message_type.resource_type|snake_case }}_path) + parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path) + {% endfor %} from_service_account_file = {{ service.client_name }}.from_service_account_file from_service_account_json = from_service_account_file diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index ccc4aa85f083..d70979251441 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -137,7 +137,20 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """Parse a {{ message.resource_type|snake_case }} path into its component segments.""" m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} - {% endfor %} + {% endfor %} {# resources #} + {% for resource_msg in service.common_resources|sort(attribute="type_name") -%} + @staticmethod + def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: + """Return a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" + return "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + + @staticmethod + def parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(path: str) -> Dict[str,str]: + """Parse a {{ resource_msg.message_type.resource_type|snake_case }} path into its component segments.""" + m = re.match(r"{{ resource_msg.message_type.path_regex_str }}", path) + return m.groupdict() if m else {} + + {% endfor %} {# common resources #} def __init__(self, *, credentials: Optional[credentials.Credentials] = None, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4d5b93415152..c74d49c1024e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1304,8 +1304,8 @@ def test_{{ service.name|snake_case }}_grpc_lro_async_client(): {% endif -%} -{% for message in service.resource_messages|sort(attribute="resource_type") -%} {% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} +{% for message in service.resource_messages|sort(attribute="resource_type") -%} def test_{{ message.resource_type|snake_case }}_path(): {% for arg in message.resource_path_args -%} {{ arg }} = "{{ molluscs.next() }}" @@ -1327,8 +1327,31 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): actual = {{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path(path) assert expected == actual -{% endwith -%} {% endfor -%} +{% for resource_msg in service.common_resources -%} +def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): + {% for arg in resource_msg.message_type.resource_path_args -%} + {{ arg }} = "{{ molluscs.next() }}" + {% endfor %} + expected = "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + actual = {{ service.client_name }}.common_{{ resource_msg.message_type.resource_type|snake_case }}_path({{ resource_msg.message_type.resource_path_args|join(", ") }}) + assert expected == actual + + +def test_parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): + expected = { + {% for arg in resource_msg.message_type.resource_path_args -%} + "{{ arg }}": "{{ molluscs.next() }}", + {% endfor %} + } + path = {{ service.client_name }}.common_{{ resource_msg.message_type.resource_type|snake_case }}_path(**expected) + + # Check that the path construction is reversible. + actual = {{ service.client_name }}.parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(path) + assert expected == actual + +{% endfor -%} {# common resources#} +{% endwith -%} {# cycler #} def test_client_withDEFAULT_CLIENT_INFO(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 8502617b5d3e..d733d1fa2d4c 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -20,6 +20,7 @@ from google.protobuf import descriptor_pb2 from gapic.schema import imp +from gapic.schema.wrappers import CommonResource from test_utils.test_utils import ( get_method, @@ -295,3 +296,43 @@ def test_has_pagers(): ), ) assert not other_service.has_pagers + + +def test_default_common_resources(): + service = make_service(name="MolluscMaker") + + assert service.common_resources == ( + CommonResource( + "cloudresourcemanager.googleapis.com/Project", + "projects/{project}", + ), + CommonResource( + "cloudresourcemanager.googleapis.com/Organization", + "organizations/{organization}", + ), + CommonResource( + "cloudresourcemanager.googleapis.com/Folder", + "folders/{folder}", + ), + CommonResource( + "cloudbilling.googleapis.com/BillingAccount", + "billingAccounts/{billing_account}", + ), + CommonResource( + "locations.googleapis.com/Location", + "projects/{project}/locations/{location}", + ), + ) + + +def test_common_resource_patterns(): + species = CommonResource( + "nomenclature.linnaen.com/Species", + "families/{family}/genera/{genus}/species/{species}", + ) + species_msg = species.message_type + + assert species_msg.resource_path == "families/{family}/genera/{genus}/species/{species}" + assert species_msg.resource_type == "Species" + assert species_msg.resource_path_args == ["family", "genus", "species"] + assert species_msg.path_regex_str == '^families/(?P.+?)/genera/(?P.+?)/species/(?P.+?)$' From 76405a40281701f10c7e62ac6836bea5fbec7d6e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 29 Sep 2020 10:21:12 -0700 Subject: [PATCH 0380/1339] chore: release 0.34.0 (#623) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 98fd68593dd4..59771e6f6ddc 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.34.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.8...v0.34.0) (2020-09-29) + + +### Features + +* add support for common resource paths ([#622](https://www.github.com/googleapis/gapic-generator-python/issues/622)) ([15a7fde](https://www.github.com/googleapis/gapic-generator-python/commit/15a7fdeb966cb64a742b6305d2c71dd3d485d0f9)) + ### [0.33.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.7...v0.33.8) (2020-09-25) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index d659a5e48845..02a1ce51853d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.33.8" +version = "0.34.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 447ab18539b17f9e0de8a265c369a8b50c423756 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 30 Sep 2020 13:16:18 -0600 Subject: [PATCH 0381/1339] fix: fix typo attribue -> attribute (#627) Closes #626 --- .../%name_%version/%sub/services/%service/async_client.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 772643b448f6..14be05325ddf 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -203,7 +203,7 @@ class {{ service.async_client_name }}: {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {%- for ex in method.retry.retryable_exceptions|sort(attribue='__name__') %} + {%- for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} exceptions.{{ ex.__name__ }}, {%- endfor %} ), From 8eb82615729830a902a053c7f5646278a41ecc9c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 30 Sep 2020 13:24:30 -0600 Subject: [PATCH 0382/1339] chore: release 0.34.1 (#628) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 59771e6f6ddc..17ddf887123f 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.34.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.0...v0.34.1) (2020-09-30) + + +### Bug Fixes + +* fix typo attribue -> attribute ([#627](https://www.github.com/googleapis/gapic-generator-python/issues/627)) ([729146f](https://www.github.com/googleapis/gapic-generator-python/commit/729146fd53edf1e4ae4d3c9a90640a7520b1ba9d)), closes [#626](https://www.github.com/googleapis/gapic-generator-python/issues/626) + ## [0.34.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.8...v0.34.0) (2020-09-29) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 02a1ce51853d..51a9941de7ff 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.34.0" +version = "0.34.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 78c034932c9a8f8a27278036871ad5acd3263c57 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 30 Sep 2020 14:04:17 -0700 Subject: [PATCH 0383/1339] fix: resource messages in method response types generate helpers (#629) Some resource messages are only referenced in method responses, either directly (the method returns a resource) or indirectly (the resource is a field for some other message). These response-resources now generate helper methods in client classes. Contains a minor formatting fix in the generated output, a minor fix in an error message, and a tweak to nox.py to aid interactive debugging. --- packages/gapic-generator/gapic/schema/api.py | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 11 +++- .../%sub/services/%service/client.py.j2 | 1 + packages/gapic-generator/noxfile.py | 19 ++++--- .../unit/schema/wrappers/test_service.py | 57 ++++++++++++++++--- 5 files changed, 71 insertions(+), 19 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index e303db8d8892..a0c878517ced 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -460,7 +460,7 @@ def __init__( else: raise TypeError( f"Unknown type referenced in " - "{self.file_descriptor.name}: '{key}'" + f"{self.file_descriptor.name}: '{key}'" ) # Only generate the service if this is a target file to be generated. diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 68ec4e2316ff..8fc4b8d71298 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1012,7 +1012,7 @@ def names(self) -> FrozenSet[str]: @utils.cached_property def resource_messages(self) -> FrozenSet[MessageType]: """Returns all the resource message types used in all - request fields in the service.""" + request and response fields in the service.""" def gen_resources(message): if message.resource_path: yield message @@ -1022,9 +1022,14 @@ def gen_resources(message): yield type_ return frozenset( - resource_msg + msg for method in self.methods.values() - for resource_msg in gen_resources(method.input) + for msg in chain( + gen_resources(method.input), + gen_resources( + method.lro.response_type if method.lro else method.output + ), + ) ) @utils.cached_property diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index d70979251441..67cb2856d62f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -137,6 +137,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """Parse a {{ message.resource_type|snake_case }} path into its component segments.""" m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} + {% endfor %} {# resources #} {% for resource_msg in service.common_resources|sort(attribute="type_name") -%} @staticmethod diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index abaef7fa0c8f..47880e3d1c50 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -37,13 +37,18 @@ def unit(session): session.run( "py.test", - "-vv", - "-n=auto", - "--cov=gapic", - "--cov-config=.coveragerc", - "--cov-report=term", - "--cov-report=html", - *(session.posargs or [path.join("tests", "unit")]), + *( + session.posargs + or [ + "-vv", + "-n=auto", + "--cov=gapic", + "--cov-config=.coveragerc", + "--cov-report=term", + "--cov-report=html", + path.join("tests", "unit"), + ] + ), ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index d733d1fa2d4c..28c7cbe5e775 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -32,6 +32,24 @@ make_service_with_method_options, ) +################################ +# Helper Functions +################################ + + +def make_resource_opts(*args): + # Resources are labeled via an options extension + opts = descriptor_pb2.MessageOptions() + opts.Extensions[resource_pb2.resource].pattern.append( + "/".join("{{{arg}}}/{arg}" for arg in args) + ) + return opts + + +################################ +# End Helper Functions +################################ + def test_service_properties(): service = make_service(name='ThingDoer') @@ -162,14 +180,6 @@ def test_module_name(): def test_resource_messages(): - # Resources are labeled via an options extension - def make_resource_opts(*args): - opts = descriptor_pb2.MessageOptions() - opts.Extensions[resource_pb2.resource].pattern.append( - "/".join("{{{arg}}}/{arg}" for arg in args) - ) - return opts - # Regular, top level resource squid_resource = make_message("Squid", options=make_resource_opts("squid")) squid_request = make_message( @@ -336,3 +346,34 @@ def test_common_resource_patterns(): assert species_msg.resource_type == "Species" assert species_msg.resource_path_args == ["family", "genus", "species"] assert species_msg.path_regex_str == '^families/(?P.+?)/genera/(?P.+?)/species/(?P.+?)$' + + +def test_resource_response(): + # Top level response resource + squid_resource = make_message("Squid", options=make_resource_opts("squid")) + squid_request = make_message("CreateSquidRequest") + + # Nested response resource + clam_resource = make_message("Clam", options=make_resource_opts("clam")) + clam_response = make_message( + "CreateClamResponse", + fields=( + make_field('clam', message=clam_resource), + ), + ) + clam_request = make_message("CreateClamRequest") + + mollusc_service = make_service( + "MolluscService", + methods=( + make_method(f"{request.name}", request, response) + for request, response in ( + (squid_request, squid_resource), + (clam_request, clam_response), + ) + ), + ) + + expected = {squid_resource, clam_resource} + actual = mollusc_service.resource_messages + assert expected == actual From fa1d51b40f87873324ae0d8d4ec6881ff1c3309e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 30 Sep 2020 14:28:38 -0700 Subject: [PATCH 0384/1339] chore: release 0.34.2 (#630) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 17ddf887123f..a4ee51ff147f 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.34.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.1...v0.34.2) (2020-09-30) + + +### Bug Fixes + +* resource messages in method response types generate helpers ([#629](https://www.github.com/googleapis/gapic-generator-python/issues/629)) ([52bfd6d](https://www.github.com/googleapis/gapic-generator-python/commit/52bfd6d5d5821b33e78e6b9867a3be2865cdbc74)) + ### [0.34.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.0...v0.34.1) (2020-09-30) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 51a9941de7ff..7d6c9685ae6c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.34.1" +version = "0.34.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From a4c3726c6d9315c5864a6189ff90daa5e26fd89e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 5 Oct 2020 17:32:43 +0200 Subject: [PATCH 0385/1339] chore(deps): update dependency google-api-core to v1.22.3 (#634) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ed7f530ffa85..11484480502c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.22.2 +google-api-core==1.22.3 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 3a9bfaed90d2178f2f74ba9b334b85846fd7b98e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 6 Oct 2020 00:10:59 +0200 Subject: [PATCH 0386/1339] chore(deps): update dependency google-api-core to v1.22.4 (#636) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 11484480502c..2422d03832ce 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.22.3 +google-api-core==1.22.4 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From cfad9ec665b4d42fbdbb6ce5a69d39d9ff3ac2ed Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 7 Oct 2020 11:16:49 -0700 Subject: [PATCH 0387/1339] refactor: move generator.options into utils module (#639) This removes a circular import that made targeted unit testing difficult. --- .../docs/reference/generator.rst | 3 -- .../gapic-generator/gapic/cli/generate.py | 4 +-- .../gapic/generator/generator.py | 10 +++--- packages/gapic-generator/gapic/schema/api.py | 8 ++--- .../gapic-generator/gapic/schema/naming.py | 4 +-- .../gapic-generator/gapic/utils/__init__.py | 2 ++ .../gapic/{generator => utils}/options.py | 0 .../tests/unit/generator/test_generator.py | 34 +++++++++---------- .../tests/unit/generator/test_options.py | 28 +++++++-------- .../tests/unit/schema/test_api.py | 4 +-- .../tests/unit/schema/test_naming.py | 18 +++++----- 11 files changed, 57 insertions(+), 58 deletions(-) rename packages/gapic-generator/gapic/{generator => utils}/options.py (100%) diff --git a/packages/gapic-generator/docs/reference/generator.rst b/packages/gapic-generator/docs/reference/generator.rst index 088a09232674..eb2192fb2c05 100644 --- a/packages/gapic-generator/docs/reference/generator.rst +++ b/packages/gapic-generator/docs/reference/generator.rst @@ -5,6 +5,3 @@ generator .. automodule:: gapic.generator.generator :members: - -.. automodule:: gapic.generator.options - :members: diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index 9ca2f8217467..b0e50ccf2e32 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -22,7 +22,7 @@ from gapic import generator from gapic.schema import api -from gapic.generator import options +from gapic.utils import Options @click.command() @@ -41,7 +41,7 @@ def generate( req = plugin_pb2.CodeGeneratorRequest.FromString(request.read()) # Pull apart arguments in the request. - opts = options.Options.build(req.parameter) + opts = Options.build(req.parameter) # Determine the appropriate package. # This generator uses a slightly different mechanism for determining diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 8cc380e10550..5bd8a4f3c928 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -22,10 +22,10 @@ from gapic.samplegen_utils.utils import coerce_response_name, is_valid_sample_cfg from gapic.samplegen_utils.types import DuplicateSample from gapic.samplegen import manifest, samplegen -from gapic.generator import options from gapic.generator import formatter from gapic.schema import api from gapic import utils +from gapic.utils import Options from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse @@ -43,7 +43,7 @@ class Generator: this application are used. """ - def __init__(self, opts: options.Options) -> None: + def __init__(self, opts: Options) -> None: # Create the jinja environment with which to render templates. self._env = jinja2.Environment( loader=jinja2.FileSystemLoader(searchpath=opts.templates), @@ -61,7 +61,7 @@ def __init__(self, opts: options.Options) -> None: self._sample_configs = opts.sample_configs def get_response( - self, api_schema: api.API, opts: options.Options + self, api_schema: api.API, opts: Options ) -> CodeGeneratorResponse: """Return a :class:`~.CodeGeneratorResponse` for this library. @@ -209,7 +209,7 @@ def _generate_samples_and_manifest( return output_files def _render_template( - self, template_name: str, *, api_schema: api.API, opts: options.Options, + self, template_name: str, *, api_schema: api.API, opts: Options, ) -> Dict[str, CodeGeneratorResponse.File]: """Render the requested templates. @@ -297,7 +297,7 @@ def _get_file( self, template_name: str, *, - opts: options.Options, + opts: Options, api_schema=api.API, **context: Mapping, ): diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index a0c878517ced..92c2b741c0b8 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -30,12 +30,12 @@ import grpc # type: ignore -from gapic.generator import options from gapic.schema import metadata from gapic.schema import wrappers from gapic.schema import naming as api_naming from gapic.utils import cached_property from gapic.utils import nth +from gapic.utils import Options from gapic.utils import to_snake_case from gapic.utils import RESERVED_NAMES @@ -60,7 +60,7 @@ def __getattr__(self, name: str): def build( cls, file_descriptor: descriptor_pb2.FileDescriptorProto, file_to_generate: bool, naming: api_naming.Naming, - opts: options.Options = options.Options(), + opts: Options = Options(), prior_protos: Mapping[str, 'Proto'] = None, load_services: bool = True ) -> 'Proto': @@ -201,7 +201,7 @@ def build( cls, file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], package: str = '', - opts: options.Options = options.Options(), + opts: Options = Options(), prior_protos: Mapping[str, 'Proto'] = None, ) -> 'API': """Build the internal API schema based on the request. @@ -388,7 +388,7 @@ def __init__( file_descriptor: descriptor_pb2.FileDescriptorProto, file_to_generate: bool, naming: api_naming.Naming, - opts: options.Options = options.Options(), + opts: Options = Options(), prior_protos: Mapping[str, Proto] = None, load_services: bool = True ): diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 21a075b95d94..c591ad59cca3 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -21,7 +21,7 @@ from google.protobuf import descriptor_pb2 from gapic import utils -from gapic.generator import options +from gapic.utils import Options # See https://github.com/python/mypy/issues/5374 for details on the mypy false # positive. @@ -50,7 +50,7 @@ def __post_init__(self): @staticmethod def build( *file_descriptors: descriptor_pb2.FileDescriptorProto, - opts: options.Options = options.Options(), + opts: Options = Options(), ) -> 'Naming': """Return a full Naming instance based on these file descriptors. diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 905fcbdec216..9719a8f7a2c7 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -22,6 +22,7 @@ from gapic.utils.filename import to_valid_module_name from gapic.utils.lines import sort_lines from gapic.utils.lines import wrap +from gapic.utils.options import Options from gapic.utils.reserved_names import RESERVED_NAMES from gapic.utils.rst import rst @@ -31,6 +32,7 @@ 'doc', 'empty', 'nth', + 'Options', 'partition', 'RESERVED_NAMES', 'rst', diff --git a/packages/gapic-generator/gapic/generator/options.py b/packages/gapic-generator/gapic/utils/options.py similarity index 100% rename from packages/gapic-generator/gapic/generator/options.py rename to packages/gapic-generator/gapic/utils/options.py diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index ede00ba51a2b..6a120bbe1df9 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -23,16 +23,16 @@ from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse from gapic.generator import generator -from gapic.generator import options from gapic.samplegen_utils import types, yaml from gapic.schema import api from gapic.schema import naming from gapic.schema import wrappers +from gapic.utils import Options def test_custom_template_directory(): # Create a generator. - opts = options.Options.build("python-gapic-templates=/templates/") + opts = Options.build("python-gapic-templates=/templates/") g = generator.Generator(opts) # Assert that the Jinja loader will pull from the correct location. @@ -46,7 +46,7 @@ def test_get_response(): with mock.patch.object(jinja2.Environment, "get_template") as gt: gt.return_value = jinja2.Template("I am a template result.") cgr = g.get_response(api_schema=make_api(), - opts=options.Options.build("")) + opts=Options.build("")) lt.assert_called_once() gt.assert_has_calls( [ @@ -66,7 +66,7 @@ def test_get_response_ignores_empty_files(): with mock.patch.object(jinja2.Environment, "get_template") as gt: gt.return_value = jinja2.Template("# Meaningless comment") cgr = g.get_response(api_schema=make_api(), - opts=options.Options.build("")) + opts=Options.build("")) lt.assert_called_once() gt.assert_has_calls( [ @@ -88,7 +88,7 @@ def test_get_response_ignores_private_files(): with mock.patch.object(jinja2.Environment, "get_template") as gt: gt.return_value = jinja2.Template("I am a template result.") cgr = g.get_response(api_schema=make_api(), - opts=options.Options.build("")) + opts=Options.build("")) lt.assert_called_once() gt.assert_has_calls( [ @@ -110,7 +110,7 @@ def test_get_response_fails_invalid_file_paths(): ] with pytest.raises(ValueError) as ex: g.get_response(api_schema=make_api(), - opts=options.Options.build("")) + opts=Options.build("")) ex_str = str(ex.value) assert "%proto" in ex_str and "%service" in ex_str @@ -139,7 +139,7 @@ def test_get_response_enumerates_services(): ), ) ), - opts=options.Options.build(""), + opts=Options.build(""), ) assert len(cgr.file) == 2 assert {i.name for i in cgr.file} == { @@ -164,7 +164,7 @@ def test_get_response_enumerates_proto(): make_proto( descriptor_pb2.FileDescriptorProto(name="b.proto")), ), - opts=options.Options.build(""), + opts=Options.build(""), ) assert len(cgr.file) == 2 assert {i.name for i in cgr.file} == {"foo/a.py", "foo/b.py"} @@ -206,7 +206,7 @@ def test_get_response_divides_subpackages(): """.strip() ) cgr = g.get_response(api_schema=api_schema, - opts=options.Options.build("")) + opts=Options.build("")) assert len(cgr.file) == 6 assert {i.name for i in cgr.file} == { "foo/types/top.py", @@ -327,7 +327,7 @@ def test_parse_sample_paths(fs): ) with pytest.raises(types.InvalidConfig): - options.Options.build("samples=sampledir/,") + Options.build("samples=sampledir/,") @mock.patch( @@ -365,14 +365,14 @@ def test_samplegen_config_to_output_files( ), ) - g = generator.Generator(options.Options.build("samples=samples.yaml",)) + g = generator.Generator(Options.build("samples=samples.yaml",)) # Need to have the sample template visible to the generator. g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = make_api(naming=naming.NewNaming( name="Mollusc", version="v6")) actual_response = g.get_response( - api_schema, opts=options.Options.build("")) + api_schema, opts=Options.build("")) expected_response = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( @@ -451,14 +451,14 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): """ ), ) - g = generator.Generator(options.Options.build("samples=samples.yaml")) + g = generator.Generator(Options.build("samples=samples.yaml")) # Need to have the sample template visible to the generator. g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = make_api(naming=naming.NewNaming( name="Mollusc", version="v6")) actual_response = g.get_response(api_schema, - opts=options.Options.build("")) + opts=Options.build("")) expected_response = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( @@ -532,7 +532,7 @@ def test_generator_duplicate_samples(fs): with pytest.raises(types.DuplicateSample): generator.get_response(api_schema=api_schema, - opts=options.Options.build("")) + opts=Options.build("")) @mock.patch("gapic.samplegen.samplegen.generate_sample", return_value="") @@ -637,13 +637,13 @@ def test_dont_generate_in_code_samples(mock_gmtime, mock_generate_sample, fs): expected.supported_features |= CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL actual = generator.get_response( - api_schema=api_schema, opts=options.Options.build("") + api_schema=api_schema, opts=Options.build("") ) assert actual == expected def make_generator(opts_str: str = "") -> generator.Generator: - return generator.Generator(options.Options.build(opts_str)) + return generator.Generator(Options.build(opts_str)) def make_proto( diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index e4bac805eedc..5235c2e45393 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -17,12 +17,12 @@ from unittest import mock import warnings -from gapic.generator import options from gapic.samplegen_utils import types +from gapic.utils import Options def test_options_empty(): - opts = options.Options.build('') + opts = Options.build('') assert len(opts.templates) == 1 assert opts.templates[0].endswith('gapic/templates') assert not opts.lazy_import @@ -30,13 +30,13 @@ def test_options_empty(): def test_options_replace_templates(): - opts = options.Options.build('python-gapic-templates=/foo/') + opts = Options.build('python-gapic-templates=/foo/') assert len(opts.templates) == 1 assert opts.templates[0] == '/foo' def test_options_relative_templates(): - opts = options.Options.build('python-gapic-templates=../../squid/clam') + opts = Options.build('python-gapic-templates=../../squid/clam') expected = (os.path.abspath('../squid/clam'),) assert opts.templates == expected @@ -44,26 +44,26 @@ def test_options_relative_templates(): def test_options_unrecognized(): with mock.patch.object(warnings, 'warn') as warn: - options.Options.build('python-gapic-abc=xyz') + Options.build('python-gapic-abc=xyz') warn.assert_called_once_with('Unrecognized option: `python-gapic-abc`.') def test_flags_unrecognized(): with mock.patch.object(warnings, 'warn') as warn: - options.Options.build('python-gapic-abc') + Options.build('python-gapic-abc') warn.assert_called_once_with('Unrecognized option: `python-gapic-abc`.') def test_options_unrecognized_likely_typo(): with mock.patch.object(warnings, 'warn') as warn: - options.Options.build('go-gapic-abc=xyz') + Options.build('go-gapic-abc=xyz') assert len(warn.mock_calls) == 0 def test_options_trim_whitespace(): # When writing shell scripts, users may construct options strings with # whitespace that needs to be trimmed after tokenizing. - opts = options.Options.build( + opts = Options.build( ''' python-gapic-templates=/squid/clam/whelk , python-gapic-name=mollusca , @@ -75,11 +75,11 @@ def test_options_trim_whitespace(): def test_options_no_valid_sample_config(fs): fs.create_file("sampledir/not_a_config.yaml") with pytest.raises(types.InvalidConfig): - options.Options.build("samples=sampledir/") + Options.build("samples=sampledir/") def test_options_service_config(fs): - opts = options.Options.build("") + opts = Options.build("") assert opts.retry is None # Default of None is okay, verify build can read a config. @@ -109,7 +109,7 @@ def test_options_service_config(fs): }""") opt_string = f"retry-config={service_config_fpath}" - opts = options.Options.build(opt_string) + opts = Options.build(opt_string) # Verify the config was read in correctly. expected_cfg = { @@ -140,15 +140,15 @@ def test_options_service_config(fs): def test_options_lazy_import(): - opts = options.Options.build('lazy-import') + opts = Options.build('lazy-import') assert opts.lazy_import def test_options_old_naming(): - opts = options.Options.build('old-naming') + opts = Options.build('old-naming') assert opts.old_naming def test_options_add_iam_methods(): - opts = options.Options.build('add-iam-methods') + opts = Options.build('add-iam-methods') assert opts.add_iam_methods diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index b3f023054c49..fbe82a8f9b9b 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -22,11 +22,11 @@ from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 -from gapic.generator import options from gapic.schema import api from gapic.schema import imp from gapic.schema import naming from gapic.schema import wrappers +from gapic.utils import Options from test_utils.test_utils import ( make_enum_pb2, @@ -737,7 +737,7 @@ def _n(method_name: str): } # Set up retry information. - opts = options.Options(retry={'methodConfig': [ + opts = Options(retry={'methodConfig': [ {'name': [_n('TimeoutableGetFoo')], 'timeout': '30s'}, {'name': [_n('RetryableGetFoo')], 'retryPolicy': { 'maxAttempts': 3, diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 8418f3730051..ec1e0dad6460 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -16,8 +16,8 @@ from google.protobuf import descriptor_pb2 -from gapic.generator import options from gapic.schema import naming +from gapic.utils import Options from test_utils.test_utils import make_naming @@ -151,7 +151,7 @@ def test_cli_override_name(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.cloud.videointelligence.v1') n = naming.Naming.build(proto1, - opts=options.Options(name='Video Intelligence'), + opts=Options(name='Video Intelligence'), ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Video Intelligence' @@ -162,7 +162,7 @@ def test_cli_override_name_underscores(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.cloud.videointelligence.v1') n = naming.Naming.build(proto1, - opts=options.Options(name='video_intelligence'), + opts=Options(name='video_intelligence'), ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Video Intelligence' @@ -174,7 +174,7 @@ def test_cli_override_namespace(): proto1 = FileDesc(package='google.spanner.v1') n = naming.Naming.build( proto1, - opts=options.Options(namespace=('google', 'cloud')), + opts=Options(namespace=('google', 'cloud')), ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Spanner' @@ -185,7 +185,7 @@ def test_cli_override_namespace_dotted(): FileDesc = descriptor_pb2.FileDescriptorProto proto1 = FileDesc(package='google.spanner.v1') n = naming.Naming.build(proto1, - opts=options.Options(namespace=('google.cloud',)), + opts=Options(namespace=('google.cloud',)), ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Spanner' @@ -197,7 +197,7 @@ def test_cli_override_name_and_namespace(): proto1 = FileDesc(package='google.translation.v2') n = naming.Naming.build( proto1, - opts=options.Options( + opts=Options( namespace=('google', 'cloud'), name='translate' ), ) @@ -211,7 +211,7 @@ def test_cli_override_name_and_namespace_versionless(): proto1 = FileDesc(package='google.translation') n = naming.Naming.build( proto1, - opts=options.Options(namespace=('google', 'cloud'), name='translate'), + opts=Options(namespace=('google', 'cloud'), name='translate'), ) assert n.namespace == ('Google', 'Cloud') assert n.name == 'Translate' @@ -224,12 +224,12 @@ def test_build_factory(): ) old = naming.Naming.build( proto, - opts=options.Options(old_naming=True) + opts=Options(old_naming=True) ) assert old.versioned_module_name == 'mollusc.v1alpha1' new = naming.Naming.build( proto, - opts=options.Options() + opts=Options() ) assert new.versioned_module_name == 'mollusc_v1alpha1' From 5f8171497b26c492744175f5696c6a2d083c4249 Mon Sep 17 00:00:00 2001 From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com> Date: Thu, 8 Oct 2020 17:04:45 -0400 Subject: [PATCH 0388/1339] fix: fix types on server and bidi streaming callables (#641) The return type is an Awaitable that produces an AsyncIterator. --- .../%name_%version/%sub/services/%service/async_client.py.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 14be05325ddf..bdc7ce4d0d7f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -4,7 +4,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, {% if service.any_server_streaming %}AsyncIterable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Dict, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -117,7 +117,7 @@ class {{ service.async_client_name }}: {%- if not method.server_streaming %} ) -> {{ method.client_output_async.ident }}: {%- else %} - ) -> AsyncIterable[{{ method.client_output_async.ident }}]: + ) -> Awaitable[AsyncIterable[{{ method.client_output_async.ident }}]]: {%- endif %} r"""{{ method.meta.doc|rst(width=72, indent=8) }} From c3a948fbaca163f90c688cfcfb107e5e1756676b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 8 Oct 2020 16:09:23 -0700 Subject: [PATCH 0389/1339] chore: release 0.34.3 (#643) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a4ee51ff147f..e3ad3d4eae68 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.34.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.2...v0.34.3) (2020-10-08) + + +### Bug Fixes + +* fix types on server and bidi streaming callables ([#641](https://www.github.com/googleapis/gapic-generator-python/issues/641)) ([d92c202](https://www.github.com/googleapis/gapic-generator-python/commit/d92c2029398c969ebf2a68a5bf77c5eb4fff7b31)) + ### [0.34.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.1...v0.34.2) (2020-09-30) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 7d6c9685ae6c..bd032f056cf0 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.34.2" +version = "0.34.3" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 110ba149e99dc38071378e8e66bb66a179c30429 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 9 Oct 2020 13:24:23 -0700 Subject: [PATCH 0390/1339] ci: unit-test tasks install git (#646) The python:3.*-slim images have stopped including git, so it must be added manually. --- packages/gapic-generator/.circleci/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 17eed24bd111..5d00282c835c 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -577,7 +577,7 @@ jobs: name: Install pandoc command: | apt-get update - apt-get install -y pandoc gcc + apt-get install -y pandoc gcc git - run: name: Install nox and codecov. command: | @@ -599,7 +599,7 @@ jobs: name: Install pandoc command: | apt-get update - apt-get install -y pandoc gcc + apt-get install -y pandoc gcc git - run: name: Install nox and codecov. command: | @@ -621,7 +621,7 @@ jobs: name: Install pandoc command: | apt-get update - apt-get install -y pandoc gcc + apt-get install -y pandoc gcc git - run: name: Install nox and codecov. command: | From e4de9b19ec8f2fc4c168a8d532acfa848e6b19e6 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 9 Oct 2020 13:32:24 -0700 Subject: [PATCH 0391/1339] fix: expose transport property for clients (#645) Sometimes it's useful to get a reference to the transport for a client object. Closes #640 --- .../%sub/services/%service/client.py.j2 | 25 ++++-- .../%name_%version/%sub/test_%service.py.j2 | 38 ++++----- .../%sub/services/%service/client.py.j2 | 25 ++++-- .../%name_%version/%sub/test_%service.py.j2 | 84 +++++++++---------- 4 files changed, 95 insertions(+), 77 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index db181fabb8c8..098f37426b66 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -118,6 +118,15 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): from_service_account_json = from_service_account_file + @property + def transport(self) -> {{ service.name }}Transport: + """Return the transport used by the client instance. + + Returns: + {{ service.name }}Transport: The transport used by the client instance. + """ + return self._transport + {% for message in service.resource_messages|sort(attribute="resource_type") -%} @staticmethod @@ -143,7 +152,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """Parse a {{ resource_msg.message_type.resource_type|snake_case }} path into its component segments.""" m = re.match(r"{{ resource_msg.message_type.path_regex_str }}", path) return m.groupdict() if m else {} - + {% endfor %} {# common resources #} def __init__(self, *, @@ -179,12 +188,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. - + Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. @@ -193,10 +202,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() - + # Create SSL credentials for mutual TLS if needed. use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - + ssl_credentials = None is_mtls = False if use_client_cert: diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index a342db6f1cb8..0c327942d68a 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -66,12 +66,12 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = {{ service.client_name }}.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = {{ service.client_name }}.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - {% if service.host %}assert client._transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} def test_{{ service.client_name|snake_case }}_get_transport_class(): @@ -170,7 +170,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(use_client_cert_env) else: expected_ssl_channel_creds = ssl_channel_creds expected_host = client.DEFAULT_MTLS_ENDPOINT - + grpc_transport.assert_called_once_with( ssl_channel_credentials=expected_ssl_channel_creds, credentials=None, @@ -182,9 +182,9 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(use_client_cert_env) # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): + with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: - with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock: + with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock: if use_client_cert_env == "false": is_mtls_mock.return_value = False ssl_credentials_mock.return_value = None @@ -195,7 +195,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(use_client_cert_env) ssl_credentials_mock.return_value = mock.Mock() expected_host = client.DEFAULT_MTLS_ENDPOINT expected_ssl_channel_creds = ssl_credentials_mock.return_value - + grpc_transport.return_value = None client = {{ service.client_name }}() grpc_transport.assert_called_once_with( @@ -208,7 +208,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(use_client_cert_env) # Check the case client_cert_source and ADC client cert are not provided. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): + with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: is_mtls_mock.return_value = False grpc_transport.return_value = None @@ -251,7 +251,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -331,7 +331,7 @@ def test_{{ method.name|snake_case }}_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: {% if method.void -%} call.return_value = None @@ -367,7 +367,7 @@ def test_{{ method.name|snake_case }}_from_dict(): ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -397,7 +397,7 @@ def test_{{ method.name|snake_case }}_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -462,7 +462,7 @@ def test_{{ method.name|snake_case }}_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -521,7 +521,7 @@ def test_{{ method.name|snake_case }}_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -580,7 +580,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = {{ service.client_name }}(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_grpc_default(): @@ -589,7 +589,7 @@ def test_transport_grpc_default(): credentials=credentials.AnonymousCredentials(), ) assert isinstance( - client._transport, + client.transport, transports.{{ service.name }}GrpcTransport, ) @@ -669,7 +669,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), ) - assert client._transport._host == '{{ host }}:443' + assert client.transport._host == '{{ host }}:443' {% endwith %} @@ -679,7 +679,7 @@ def test_{{ service.name|snake_case }}_host_with_port(): credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), ) - assert client._transport._host == '{{ host }}:8000' + assert client.transport._host == '{{ host }}:8000' {% endwith %} @@ -701,7 +701,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): credentials=credentials.AnonymousCredentials(), transport='grpc', ) - transport = client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 67cb2856d62f..aaa3075838cb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -124,6 +124,15 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): from_service_account_json = from_service_account_file + @property + def transport(self) -> {{ service.name }}Transport: + """Return the transport used by the client instance. + + Returns: + {{ service.name }}Transport: The transport used by the client instance. + """ + return self._transport + {% for message in service.resource_messages|sort(attribute="resource_type") -%} @staticmethod @@ -150,7 +159,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """Parse a {{ resource_msg.message_type.resource_type|snake_case }} path into its component segments.""" m = re.match(r"{{ resource_msg.message_type.path_regex_str }}", path) return m.groupdict() if m else {} - + {% endfor %} {# common resources #} def __init__(self, *, @@ -186,12 +195,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. - + Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. @@ -200,10 +209,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() - + # Create SSL credentials for mutual TLS if needed. use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - + ssl_credentials = None is_mtls = False if use_client_cert: diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index c74d49c1024e..c998864eefd9 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -76,12 +76,12 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(client_c with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - {% if service.host %}assert client._transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} def test_{{ service.client_name|snake_case }}_get_transport_class(): @@ -164,7 +164,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): client = client_class() - + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): @@ -214,7 +214,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp else: expected_ssl_channel_creds = ssl_channel_creds expected_host = client.DEFAULT_MTLS_ENDPOINT - + patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,9 +229,9 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): + with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: - with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock: + with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock: if use_client_cert_env == "false": is_mtls_mock.return_value = False ssl_credentials_mock.return_value = None @@ -242,7 +242,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp ssl_credentials_mock.return_value = mock.Mock() expected_host = client.DEFAULT_MTLS_ENDPOINT expected_ssl_channel_creds = ssl_credentials_mock.return_value - + patched.return_value = None client = client_class() patched.assert_called_once_with( @@ -254,11 +254,11 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - + # Check the case client_cert_source and ADC client cert are not provided. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): + with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: is_mtls_mock.return_value = False patched.return_value = None @@ -352,7 +352,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -432,7 +432,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.{{ method.name|snake_case }}), + type(client._client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -516,7 +516,7 @@ def test_{{ method.name|snake_case }}_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: {% if method.void -%} call.return_value = None @@ -561,7 +561,7 @@ async def test_{{ method.name|snake_case }}_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.{{ method.name|snake_case }}), + type(client._client.transport.{{ method.name|snake_case }}), '__call__') as call: {% if method.void -%} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -598,7 +598,7 @@ def test_{{ method.name|snake_case }}_from_dict(): ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -628,7 +628,7 @@ def test_{{ method.name|snake_case }}_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -693,7 +693,7 @@ async def test_{{ method.name|snake_case }}_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.{{ method.name|snake_case }}), + type(client._client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -779,7 +779,7 @@ def test_{{ method.name|snake_case }}_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -838,7 +838,7 @@ def test_{{ method.name|snake_case }}_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -880,7 +880,7 @@ async def test_{{ method.name|snake_case }}_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.{{ method.name|snake_case }}), + type(client._client.transport.{{ method.name|snake_case }}), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -928,7 +928,7 @@ async def test_{{ method.name|snake_case }}_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.{{ method.name|snake_case }}), + type(client._client.transport.{{ method.name|snake_case }}), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1010,7 +1010,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = {{ service.client_name }}(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -1046,7 +1046,7 @@ def test_transport_grpc_default(): credentials=credentials.AnonymousCredentials(), ) assert isinstance( - client._transport, + client.transport, transports.{{ service.name }}GrpcTransport, ) @@ -1151,7 +1151,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), ) - assert client._transport._host == '{{ host }}:443' + assert client.transport._host == '{{ host }}:443' {% endwith %} @@ -1161,7 +1161,7 @@ def test_{{ service.name|snake_case }}_host_with_port(): credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), ) - assert client._transport._host == '{{ host }}:8000' + assert client.transport._host == '{{ host }}:8000' {% endwith %} @@ -1274,7 +1274,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): credentials=credentials.AnonymousCredentials(), transport='grpc', ) - transport = client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance( @@ -1291,7 +1291,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_async_client(): credentials=credentials.AnonymousCredentials(), transport='grpc_asyncio', ) - transport = client._client._transport + transport = client._client.transport # Ensure that we have a api-core operations client. assert isinstance( @@ -1384,7 +1384,7 @@ def test_set_iam_policy(transport: str = "grpc"): request = iam_policy.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -1416,7 +1416,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" + type(client._client.transport.set_iam_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1450,7 +1450,7 @@ def test_set_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.set_iam_policy(request) @@ -1478,7 +1478,7 @@ async def test_set_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" + type(client._client.transport.set_iam_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) @@ -1499,7 +1499,7 @@ def test_set_iam_policy_from_dict(): credentials=credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -1522,7 +1522,7 @@ def test_get_iam_policy(transport: str = "grpc"): request = iam_policy.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -1554,7 +1554,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" + type(client._client.transport.get_iam_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1588,7 +1588,7 @@ def test_get_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.get_iam_policy(request) @@ -1616,7 +1616,7 @@ async def test_get_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" + type(client._client.transport.get_iam_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) @@ -1637,7 +1637,7 @@ def test_get_iam_policy_from_dict(): credentials=credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -1661,7 +1661,7 @@ def test_test_iam_permissions(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse( @@ -1694,7 +1694,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client._client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1727,7 +1727,7 @@ def test_test_iam_permissions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = iam_policy.TestIamPermissionsResponse() @@ -1756,7 +1756,7 @@ async def test_test_iam_permissions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client._client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy.TestIamPermissionsResponse() @@ -1780,7 +1780,7 @@ def test_test_iam_permissions_from_dict(): ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() From 2833d718a48280d86791fd1e21d32a2da13080cd Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 9 Oct 2020 13:43:13 -0700 Subject: [PATCH 0392/1339] chore: release 0.34.4 (#647) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index e3ad3d4eae68..b58757cbbd3d 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.34.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.3...v0.34.4) (2020-10-09) + + +### Bug Fixes + +* expose transport property for clients ([#645](https://www.github.com/googleapis/gapic-generator-python/issues/645)) ([13cddda](https://www.github.com/googleapis/gapic-generator-python/commit/13cddda0623bd4d24ae7973752b1be0eaa40523a)), closes [#640](https://www.github.com/googleapis/gapic-generator-python/issues/640) + ### [0.34.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.2...v0.34.3) (2020-10-08) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index bd032f056cf0..10b88097ca9b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.34.3" +version = "0.34.4" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From c105329a79738069c5bc54ff82e12db78c41a9f4 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 9 Oct 2020 14:00:53 -0700 Subject: [PATCH 0393/1339] feat: file_level and indirectly used resources generate helper methods (#642) * feat: file_level and indirectly used resources generate helper methods File level resources are defined as options for the proto file, not for a message type. Indirectly used resources are resources backed by a message type, but the message type is not a field type referenced by a service. E.g. message Squid { option (google.api.resource) = { type: "animalia.mollusca.com/Squid" pattern: "zones/{zone}/squids/{squid}" }; } message CreateSquidRequest{ string name = 1 [ (google.api.resource_reference) = { type: "animalia.mollusca.com/Squid" } ]; } message CreateSquidResponse{} Both file level and indirectly used resources generate helper methods in service clients that need them. Closes #637 --- packages/gapic-generator/gapic/schema/api.py | 69 +++++++-- .../gapic-generator/gapic/schema/wrappers.py | 39 ++++- .../gapic-generator/test_utils/test_utils.py | 18 ++- .../tests/unit/generator/test_generator.py | 1 + .../tests/unit/samplegen/test_integration.py | 6 +- .../tests/unit/schema/test_api.py | 135 ++++++++++++++++++ 6 files changed, 249 insertions(+), 19 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 92c2b741c0b8..35a59dcee025 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -19,12 +19,15 @@ import collections import dataclasses +import itertools import keyword import os import sys from typing import Callable, Container, Dict, FrozenSet, Mapping, Optional, Sequence, Set, Tuple +from types import MappingProxyType from google.api_core import exceptions # type: ignore +from google.api import resource_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import descriptor_pb2 @@ -58,11 +61,14 @@ def __getattr__(self, name: str): @classmethod def build( - cls, file_descriptor: descriptor_pb2.FileDescriptorProto, - file_to_generate: bool, naming: api_naming.Naming, - opts: Options = Options(), - prior_protos: Mapping[str, 'Proto'] = None, - load_services: bool = True + cls, + file_descriptor: descriptor_pb2.FileDescriptorProto, + file_to_generate: bool, + naming: api_naming.Naming, + opts: Options = Options(), + prior_protos: Mapping[str, 'Proto'] = None, + load_services: bool = True, + all_resources: Optional[Mapping[str, wrappers.MessageType]] = None, ) -> 'Proto': """Build and return a Proto instance. @@ -85,7 +91,8 @@ def build( naming=naming, opts=opts, prior_protos=prior_protos or {}, - load_services=load_services + load_services=load_services, + all_resources=all_resources or {}, ).proto @cached_property @@ -104,6 +111,24 @@ def messages(self) -> Mapping[str, wrappers.MessageType]: if not v.meta.address.parent ) + @cached_property + def resource_messages(self) -> Mapping[str, wrappers.MessageType]: + """Return the file level resources of the proto.""" + file_resource_messages = ( + (res.type, wrappers.CommonResource.build(res).message_type) + for res in self.file_pb2.options.Extensions[resource_pb2.resource_definition] + ) + resource_messages = ( + (msg.options.Extensions[resource_pb2.resource].type, msg) + for msg in self.messages.values() + if msg.options.Extensions[resource_pb2.resource].type + ) + return collections.OrderedDict( + itertools.chain( + file_resource_messages, resource_messages, + ) + ) + @property def module_name(self) -> str: """Return the appropriate module name for this service. @@ -264,6 +289,13 @@ def disambiguate_keyword_fname( load_services=False, ) + # A file descriptor's file-level resources are NOT visible to any importers. + # The only way to make referenced resources visible is to aggregate them at + # the API level and then pass that around. + all_file_resources = collections.ChainMap( + *(proto.resource_messages for proto in pre_protos.values()) + ) + # Second pass uses all the messages and enums defined in the entire API. # This allows LRO returning methods to see all the types in the API, # bypassing the above missing import problem. @@ -274,6 +306,7 @@ def disambiguate_keyword_fname( naming=naming, opts=opts, prior_protos=pre_protos, + all_resources=MappingProxyType(all_file_resources), ) for name, proto in pre_protos.items() } @@ -390,7 +423,8 @@ def __init__( naming: api_naming.Naming, opts: Options = Options(), prior_protos: Mapping[str, Proto] = None, - load_services: bool = True + load_services: bool = True, + all_resources: Optional[Mapping[str, wrappers.MessageType]] = None, ): self.proto_messages: Dict[str, wrappers.MessageType] = {} self.proto_enums: Dict[str, wrappers.EnumType] = {} @@ -432,9 +466,11 @@ def __init__( # below is because `repeated DescriptorProto message_type = 4;` in # descriptor.proto itself). self._load_children(file_descriptor.enum_type, self._load_enum, - address=self.address, path=(5,)) + address=self.address, path=(5,), + resources=all_resources or {}) self._load_children(file_descriptor.message_type, self._load_message, - address=self.address, path=(4,)) + address=self.address, path=(4,), + resources=all_resources or {}) # Edge case: Protocol buffers is not particularly picky about # ordering, and it is possible that a message will have had a field @@ -469,7 +505,8 @@ def __init__( # same files. if file_to_generate and load_services: self._load_children(file_descriptor.service, self._load_service, - address=self.address, path=(6,)) + address=self.address, path=(6,), + resources=all_resources or {}) # TODO(lukesneeringer): oneofs are on path 7. @property @@ -528,7 +565,8 @@ def api_messages(self) -> Mapping[str, wrappers.MessageType]: def _load_children(self, children: Sequence, loader: Callable, *, - address: metadata.Address, path: Tuple[int, ...]) -> Mapping: + address: metadata.Address, path: Tuple[int, ...], + resources: Mapping[str, wrappers.MessageType]) -> Mapping: """Return wrapped versions of arbitrary children from a Descriptor. Args: @@ -554,7 +592,8 @@ def _load_children(self, # applicable loader function on each. answer = {} for child, i in zip(children, range(0, sys.maxsize)): - wrapped = loader(child, address=address, path=path + (i,)) + wrapped = loader(child, address=address, path=path + (i,), + resources=resources) answer[wrapped.name] = wrapped return answer @@ -794,6 +833,7 @@ def _load_message(self, message_pb: descriptor_pb2.DescriptorProto, address: metadata.Address, path: Tuple[int], + resources: Mapping[str, wrappers.MessageType], ) -> wrappers.MessageType: """Load message descriptions from DescriptorProtos.""" address = address.child(message_pb.name, path) @@ -810,12 +850,14 @@ def _load_message(self, address=address, loader=self._load_enum, path=path + (4,), + resources=resources, ) nested_messages = self._load_children( message_pb.nested_type, address=address, loader=self._load_message, path=path + (3,), + resources=resources, ) oneofs = self._get_oneofs( @@ -856,6 +898,7 @@ def _load_enum(self, enum: descriptor_pb2.EnumDescriptorProto, address: metadata.Address, path: Tuple[int], + resources: Mapping[str, wrappers.MessageType], ) -> wrappers.EnumType: """Load enum descriptions from EnumDescriptorProtos.""" address = address.child(enum.name, path) @@ -886,6 +929,7 @@ def _load_service(self, service: descriptor_pb2.ServiceDescriptorProto, address: metadata.Address, path: Tuple[int], + resources: Mapping[str, wrappers.MessageType], ) -> wrappers.Service: """Load comments for a service and its methods.""" address = address.child(service.name, path) @@ -905,6 +949,7 @@ def _load_service(self, ), methods=methods, service_pb=service, + visible_resources=resources, ) return self.proto_services[address.proto] diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 8fc4b8d71298..5e49ceefa4ce 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -33,7 +33,6 @@ from itertools import chain from typing import (cast, Dict, FrozenSet, Iterable, List, Mapping, ClassVar, Optional, Sequence, Set, Tuple, Union) - from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 @@ -62,6 +61,12 @@ class Field: def __getattr__(self, name): return getattr(self.field_pb, name) + def __hash__(self): + # The only sense in which it is meaningful to say a field is equal to + # another field is if they are the same, i.e. they live in the same + # message type under the same moniker, i.e. they have the same id. + return id(self) + @property def name(self) -> str: """Used to prevent collisions with python keywords""" @@ -305,6 +310,15 @@ def recursive_field_types(self) -> Sequence[ return tuple(types) + @utils.cached_property + def recursive_fields(self) -> FrozenSet[Field]: + return frozenset(chain( + self.fields.values(), + (field + for t in self.recursive_field_types if isinstance(t, MessageType) + for field in t.fields.values()), + )) + @property def map(self) -> bool: """Return True if the given message is a map, False otherwise.""" @@ -860,6 +874,13 @@ class CommonResource: type_name: str pattern: str + @classmethod + def build(cls, resource: resource_pb2.ResourceDescriptor): + return cls( + type_name=resource.type, + pattern=next(iter(resource.pattern)) + ) + @utils.cached_property def message_type(self): message_pb = descriptor_pb2.DescriptorProto() @@ -880,6 +901,10 @@ class Service: """Description of a service (defined with the ``service`` keyword).""" service_pb: descriptor_pb2.ServiceDescriptorProto methods: Mapping[str, Method] + # N.B.: visible_resources is intended to be a read-only view + # whose backing store is owned by the API. + # This is represented by a types.MappingProxyType instance. + visible_resources: Mapping[str, MessageType] meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) @@ -1021,6 +1046,14 @@ def gen_resources(message): if type_.resource_path: yield type_ + def gen_indirect_resources_used(message): + for field in message.recursive_fields: + resource = field.options.Extensions[ + resource_pb2.resource_reference] + resource_type = resource.type or resource.child_type + if resource_type: + yield self.visible_resources[resource_type] + return frozenset( msg for method in self.methods.values() @@ -1029,6 +1062,10 @@ def gen_resources(message): gen_resources( method.lro.response_type if method.lro else method.output ), + gen_indirect_resources_used(method.input), + gen_indirect_resources_used( + method.lro.response_type if method.lro else method.output + ), ) ) diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index 89fd7351429c..beab26518f5c 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -24,9 +24,16 @@ from google.protobuf import descriptor_pb2 as desc -def make_service(name: str = 'Placeholder', host: str = '', - methods: typing.Tuple[wrappers.Method] = (), - scopes: typing.Tuple[str] = ()) -> wrappers.Service: +def make_service( + name: str = "Placeholder", + host: str = "", + methods: typing.Tuple[wrappers.Method] = (), + scopes: typing.Tuple[str] = (), + visible_resources: typing.Optional[ + typing.Mapping[str, wrappers.CommonResource] + ] = None, +) -> wrappers.Service: + visible_resources = visible_resources or {} # Define a service descriptor, and set a host and oauth scopes if # appropriate. service_pb = desc.ServiceDescriptorProto(name=name) @@ -38,6 +45,7 @@ def make_service(name: str = 'Placeholder', host: str = '', return wrappers.Service( service_pb=service_pb, methods={m.name: m for m in methods}, + visible_resources=visible_resources, ) @@ -47,7 +55,8 @@ def make_service_with_method_options( *, http_rule: http_pb2.HttpRule = None, method_signature: str = '', - in_fields: typing.Tuple[desc.FieldDescriptorProto] = () + in_fields: typing.Tuple[desc.FieldDescriptorProto] = (), + visible_resources: typing.Optional[typing.Mapping[str, wrappers.CommonResource]] = None, ) -> wrappers.Service: # Declare a method with options enabled for long-running operations and # field headers. @@ -69,6 +78,7 @@ def make_service_with_method_options( return wrappers.Service( service_pb=service_pb, methods={method.name: method}, + visible_resources=visible_resources or {}, ) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 6a120bbe1df9..a258c294cfb6 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -262,6 +262,7 @@ def test_get_filename_with_service(): methods=[], service_pb=descriptor_pb2.ServiceDescriptorProto( name="Eggs"), + visible_resources={}, ), }, ) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index ca6e1a44f0a7..b652f1359ed9 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -80,7 +80,8 @@ def test_generate_sample_basic(): "classify_target": DummyField(name="classify_target") } ) - } + }, + visible_resources={}, ) schema = DummyApiSchema( @@ -216,7 +217,8 @@ def test_generate_sample_basic_unflattenable(): input=input_type, output=message_factory("$resp.taxonomy"), ) - } + }, + visible_resources={}, ) schema = DummyApiSchema( diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index fbe82a8f9b9b..c52c2f32686f 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -12,12 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections + from typing import Sequence from unittest import mock import pytest from google.api import client_pb2 +from google.api import resource_pb2 from google.api_core import exceptions from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 @@ -1079,3 +1082,135 @@ def test_enums(): assert enum.values[1].meta.doc == 'This is the one value.' assert enum.values[2].name == 'THREE' assert enum.values[2].meta.doc == '' + + +def test_file_level_resources(): + fdp = make_file_pb2( + name="nomenclature.proto", + package="nomenclature.linneaen.v1", + messages=( + make_message_pb2( + name="CreateSpeciesRequest", + fields=( + make_field_pb2(name='species', number=1, type=9), + ), + ), + make_message_pb2( + name="CreateSpeciesResponse", + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="SpeciesService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="CreateSpecies", + input_type="nomenclature.linneaen.v1.CreateSpeciesRequest", + output_type="nomenclature.linneaen.v1.CreateSpeciesResponse", + ), + ), + ), + ), + ) + res_pb2 = fdp.options.Extensions[resource_pb2.resource_definition] + definitions = [ + ("nomenclature.linnaen.com/Species", + "families/{family}/genera/{genus}/species/{species}"), + ("nomenclature.linnaen.com/Phylum", + "kingdoms/{kingdom}/phyla/{phylum}"), + ] + for type_, pattern in definitions: + resource_definition = res_pb2.add() + resource_definition.type = type_ + resource_definition.pattern.append(pattern) + + species_field = fdp.message_type[0].field[0] + resource_reference = species_field.options.Extensions[resource_pb2.resource_reference] + resource_reference.type = "nomenclature.linnaen.com/Species" + + api_schema = api.API.build([fdp], package='nomenclature.linneaen.v1') + actual = api_schema.protos['nomenclature.proto'].resource_messages + expected = collections.OrderedDict(( + ("nomenclature.linnaen.com/Species", + wrappers.CommonResource( + type_name="nomenclature.linnaen.com/Species", + pattern="families/{family}/genera/{genus}/species/{species}" + ).message_type), + ("nomenclature.linnaen.com/Phylum", + wrappers.CommonResource( + type_name="nomenclature.linnaen.com/Phylum", + pattern="kingdoms/{kingdom}/phyla/{phylum}" + ).message_type), + )) + + assert actual == expected + + # The proto file _owns_ the file level resources, but the service needs to + # see them too because the client class owns all the helper methods. + service = api_schema.services["nomenclature.linneaen.v1.SpeciesService"] + actual = service.visible_resources + assert actual == expected + + # The service doesn't own any method that owns a message that references + # Phylum, so the service doesn't count it among its resource messages. + expected.pop("nomenclature.linnaen.com/Phylum") + expected = frozenset(expected.values()) + actual = service.resource_messages + + assert actual == expected + + +def test_resources_referenced_but_not_typed(reference_attr="type"): + fdp = make_file_pb2( + name="nomenclature.proto", + package="nomenclature.linneaen.v1", + messages=( + make_message_pb2( + name="Species", + ), + make_message_pb2( + name="CreateSpeciesRequest", + fields=( + make_field_pb2(name='species', number=1, type=9), + ), + ), + make_message_pb2( + name="CreateSpeciesResponse", + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="SpeciesService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="CreateSpecies", + input_type="nomenclature.linneaen.v1.CreateSpeciesRequest", + output_type="nomenclature.linneaen.v1.CreateSpeciesResponse", + ), + ), + ), + ), + ) + + # Set up the resource + species_resource_opts = fdp.message_type[0].options.Extensions[resource_pb2.resource] + species_resource_opts.type = "nomenclature.linnaen.com/Species" + species_resource_opts.pattern.append( + "families/{family}/genera/{genus}/species/{species}") + + # Set up the reference + name_resource_opts = fdp.message_type[1].field[0].options.Extensions[resource_pb2.resource_reference] + if reference_attr == "type": + name_resource_opts.type = species_resource_opts.type + else: + name_resource_opts.child_type = species_resource_opts.type + + api_schema = api.API.build([fdp], package="nomenclature.linneaen.v1") + expected = {api_schema.messages["nomenclature.linneaen.v1.Species"]} + actual = api_schema.services["nomenclature.linneaen.v1.SpeciesService"].resource_messages + + assert actual == expected + + +def test_resources_referenced_but_not_typed_child_type(): + test_resources_referenced_but_not_typed("child_type") From 8e5e109805d4e98f42bd65b85498479686a2ef2a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 9 Oct 2020 14:08:20 -0700 Subject: [PATCH 0394/1339] chore: release 0.35.0 (#649) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b58757cbbd3d..c0043e468c08 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.35.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.4...v0.35.0) (2020-10-09) + + +### Features + +* file_level and indirectly used resources generate helper methods ([#642](https://www.github.com/googleapis/gapic-generator-python/issues/642)) ([42e224c](https://www.github.com/googleapis/gapic-generator-python/commit/42e224cb100f6e2aa9370bc6a5179d62979b5c4d)), closes [#637](https://www.github.com/googleapis/gapic-generator-python/issues/637) + ### [0.34.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.3...v0.34.4) (2020-10-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 10b88097ca9b..2d16b41aab94 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.34.4" +version = "0.35.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 9653551117b46c11097e7401b6d37cf12275f320 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 9 Oct 2020 16:04:44 -0700 Subject: [PATCH 0395/1339] fix: the common resources are not targets for lookup (#650) The five common resources of Google Cloud are file-level options that are rendered separately from the rest of the APIs resources. --- .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 21 +++++++++++-------- .../%sub/services/%service/async_client.py.j2 | 2 +- .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 2 +- .../unit/schema/wrappers/test_service.py | 14 ++++++------- 7 files changed, 24 insertions(+), 21 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 098f37426b66..67d5de4680ca 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -141,7 +141,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} {% endfor %} - {% for resource_msg in service.common_resources|sort(attribute="type_name") -%} + {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") -%} @staticmethod def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: """Return a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 0c327942d68a..55768e7cd70e 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -738,7 +738,7 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): assert expected == actual {% endfor -%} -{% for resource_msg in service.common_resources -%} +{% for resource_msg in service.common_resources.values()|sort(attribute="type_name") -%} def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): {% for arg in resource_msg.message_type.resource_path_args -%} {{ arg }} = "{{ molluscs.next() }}" diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 5e49ceefa4ce..e68b397ee7bc 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -909,29 +909,29 @@ class Service: default_factory=metadata.Metadata, ) - common_resources: ClassVar[Sequence[CommonResource]] = dataclasses.field( - default=( - CommonResource( + common_resources: ClassVar[Mapping[str, CommonResource]] = dataclasses.field( + default={ + "cloudresourcemanager.googleapis.com/Project": CommonResource( "cloudresourcemanager.googleapis.com/Project", "projects/{project}", ), - CommonResource( + "cloudresourcemanager.googleapis.com/Organization": CommonResource( "cloudresourcemanager.googleapis.com/Organization", "organizations/{organization}", ), - CommonResource( + "cloudresourcemanager.googleapis.com/Folder": CommonResource( "cloudresourcemanager.googleapis.com/Folder", "folders/{folder}", ), - CommonResource( + "cloudbilling.googleapis.com/BillingAccount": CommonResource( "cloudbilling.googleapis.com/BillingAccount", "billingAccounts/{billing_account}", ), - CommonResource( + "locations.googleapis.com/Location": CommonResource( "locations.googleapis.com/Location", "projects/{project}/locations/{location}", ), - ), + }, init=False, compare=False, ) @@ -1051,7 +1051,10 @@ def gen_indirect_resources_used(message): resource = field.options.Extensions[ resource_pb2.resource_reference] resource_type = resource.type or resource.child_type - if resource_type: + # The common resources are defined (and rendered) explicitly + # by separate logic, and the resource definitions are never + # visible in any of the APIs file descriptor protos. + if resource_type and resource_type not in self.common_resources: yield self.visible_resources[resource_type] return frozenset( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index bdc7ce4d0d7f..275c7c9e8b77 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -42,7 +42,7 @@ class {{ service.async_client_name }}: {{ message.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.{{ message.resource_type|snake_case }}_path) parse_{{ message.resource_type|snake_case}}_path = staticmethod({{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path) {% endfor %} - {% for resource_msg in service.common_resources %} + {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") %} common_{{ resource_msg.message_type.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.common_{{ resource_msg.message_type.resource_type|snake_case }}_path) parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path) {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index aaa3075838cb..c8040284d540 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -148,7 +148,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return m.groupdict() if m else {} {% endfor %} {# resources #} - {% for resource_msg in service.common_resources|sort(attribute="type_name") -%} + {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") -%} @staticmethod def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: """Return a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index c998864eefd9..f9365635d74f 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1328,7 +1328,7 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): assert expected == actual {% endfor -%} -{% for resource_msg in service.common_resources -%} +{% for resource_msg in service.common_resources.values()|sort(attribute="type_name") -%} def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): {% for arg in resource_msg.message_type.resource_path_args -%} {{ arg }} = "{{ molluscs.next() }}" diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 28c7cbe5e775..d939493328ab 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -311,28 +311,28 @@ def test_has_pagers(): def test_default_common_resources(): service = make_service(name="MolluscMaker") - assert service.common_resources == ( - CommonResource( + assert service.common_resources == { + "cloudresourcemanager.googleapis.com/Project": CommonResource( "cloudresourcemanager.googleapis.com/Project", "projects/{project}", ), - CommonResource( + "cloudresourcemanager.googleapis.com/Organization": CommonResource( "cloudresourcemanager.googleapis.com/Organization", "organizations/{organization}", ), - CommonResource( + "cloudresourcemanager.googleapis.com/Folder": CommonResource( "cloudresourcemanager.googleapis.com/Folder", "folders/{folder}", ), - CommonResource( + "cloudbilling.googleapis.com/BillingAccount": CommonResource( "cloudbilling.googleapis.com/BillingAccount", "billingAccounts/{billing_account}", ), - CommonResource( + "locations.googleapis.com/Location": CommonResource( "locations.googleapis.com/Location", "projects/{project}/locations/{location}", ), - ) + } def test_common_resource_patterns(): From 29f90b5bb38ea98ed95ba86ddb25900476d5a1e9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 12 Oct 2020 08:41:03 -0700 Subject: [PATCH 0396/1339] chore: release 0.35.1 (#651) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c0043e468c08..495a5a7fda63 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.0...v0.35.1) (2020-10-09) + + +### Bug Fixes + +* the common resources are not targets for lookup ([#650](https://www.github.com/googleapis/gapic-generator-python/issues/650)) ([8e1b384](https://www.github.com/googleapis/gapic-generator-python/commit/8e1b384e812ef519c421c8c288d5118961d8b4cf)) + ## [0.35.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.4...v0.35.0) (2020-10-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2d16b41aab94..8218bcb51685 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.0" +version = "0.35.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 6ede78761d7b9e7311c22864e3ba491bc078da66 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 13 Oct 2020 10:49:40 -0700 Subject: [PATCH 0397/1339] fix: modules referenced in MapField message type are properly aliased (#654) This was noticed when attempting to generate Bigtable Admin in a message definition: an imported module is given an alias to prevent collision with a field name. When the module is referenced to describe the type of a singleton field it is properly disambiguated. When used to describe the type of a MapField it is _not_ disambiguated. Fix for that. Closes #618 --- .../gapic-generator/gapic/schema/metadata.py | 14 ++++ .../gapic-generator/gapic/schema/wrappers.py | 26 +++++-- .../tests/unit/schema/test_api.py | 73 +++++++++++++++++++ 3 files changed, 108 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 9459bb5ef11e..b801bb7603bf 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -90,6 +90,20 @@ def __str__(self) -> str: # Return the Python identifier. return '.'.join(self.parent + (self.name,)) + def __repr__(self) -> str: + return "({})".format( + ", ".join( + ( + self.name, + self.module, + str(self.module_path), + str(self.package), + str(self.parent), + str(self.api_naming), + ) + ) + ) + @property def module_alias(self) -> str: """Return an appropriate module alias if necessary. diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index e68b397ee7bc..1b0db83e5f59 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -222,7 +222,12 @@ def type(self) -> Union['MessageType', 'EnumType', 'PrimitiveType']: raise TypeError(f'Unrecognized protobuf type: {self.field_pb.type}. ' 'This code should not be reachable; please file a bug.') - def with_context(self, *, collisions: FrozenSet[str]) -> 'Field': + def with_context( + self, + *, + collisions: FrozenSet[str], + visited_messages: FrozenSet["MessageType"], + ) -> 'Field': """Return a derivative of this field with the provided context. This method is used to address naming collisions. The returned @@ -233,7 +238,8 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Field': self, message=self.message.with_context( collisions=collisions, - skip_fields=True, + skip_fields=self.message in visited_messages, + visited_messages=visited_messages, ) if self.message else None, enum=self.enum.with_context(collisions=collisions) if self.enum else None, @@ -406,7 +412,10 @@ def get_field(self, *field_path: str, # Base case: If this is the last field in the path, return it outright. if len(field_path) == 1: - return cursor.with_context(collisions=collisions) + return cursor.with_context( + collisions=collisions, + visited_messages=frozenset({self}), + ) # Sanity check: If cursor is a repeated field, then raise an exception. # Repeated fields are only permitted in the terminal position. @@ -433,6 +442,7 @@ def get_field(self, *field_path: str, def with_context(self, *, collisions: FrozenSet[str], skip_fields: bool = False, + visited_messages: FrozenSet["MessageType"] = frozenset(), ) -> 'MessageType': """Return a derivative of this message with the provided context. @@ -444,10 +454,14 @@ def with_context(self, *, underlying fields. This provides for an "exit" in the case of circular references. """ + visited_messages = visited_messages | {self} return dataclasses.replace( self, fields=collections.OrderedDict( - (k, v.with_context(collisions=collisions)) + (k, v.with_context( + collisions=collisions, + visited_messages=visited_messages + )) for k, v in self.fields.items() ) if not skip_fields else self.fields, nested_enums=collections.OrderedDict( @@ -457,7 +471,9 @@ def with_context(self, *, nested_messages=collections.OrderedDict( (k, v.with_context( collisions=collisions, - skip_fields=skip_fields,)) + skip_fields=skip_fields, + visited_messages=visited_messages, + )) for k, v in self.nested_messages.items()), meta=self.meta.with_context(collisions=collisions), ) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index c52c2f32686f..e91a310ee0d6 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -1214,3 +1214,76 @@ def test_resources_referenced_but_not_typed(reference_attr="type"): def test_resources_referenced_but_not_typed_child_type(): test_resources_referenced_but_not_typed("child_type") + + +def test_map_field_name_disambiguation(): + squid_file_pb = descriptor_pb2.FileDescriptorProto( + name="mollusc.proto", + package="animalia.mollusca.v2", + message_type=( + descriptor_pb2.DescriptorProto( + name="Mollusc", + ), + ), + ) + method_types_file_pb = descriptor_pb2.FileDescriptorProto( + name="mollusc_service.proto", + package="animalia.mollusca.v2", + message_type=( + descriptor_pb2.DescriptorProto( + name="CreateMolluscRequest", + field=( + descriptor_pb2.FieldDescriptorProto( + name="mollusc", + type="TYPE_MESSAGE", + type_name=".animalia.mollusca.v2.Mollusc", + number=1, + ), + descriptor_pb2.FieldDescriptorProto( + name="molluscs_map", + type="TYPE_MESSAGE", + number=2, + type_name=".animalia.mollusca.v2.CreateMolluscRequest.MolluscsMapEntry", + label="LABEL_REPEATED", + ), + ), + nested_type=( + descriptor_pb2.DescriptorProto( + name="MolluscsMapEntry", + field=( + descriptor_pb2.FieldDescriptorProto( + name="key", + type="TYPE_STRING", + number=1, + ), + descriptor_pb2.FieldDescriptorProto( + name="value", + type="TYPE_MESSAGE", + number=2, + # We use the same type for the map value as for + # the singleton above to better highlight the + # problem raised in + # https://github.com/googleapis/gapic-generator-python/issues/618. + # The module _is_ disambiguated for singleton + # fields but NOT for map fields. + type_name=".animalia.mollusca.v2.Mollusc" + ), + ), + options=descriptor_pb2.MessageOptions(map_entry=True), + ), + ), + ), + ), + ) + my_api = api.API.build( + file_descriptors=[squid_file_pb, method_types_file_pb], + ) + create = my_api.messages['animalia.mollusca.v2.CreateMolluscRequest'] + mollusc = create.fields['mollusc'] + molluscs_map = create.fields['molluscs_map'] + mollusc_ident = str(mollusc.type.ident) + mollusc_map_ident = str(molluscs_map.message.fields['value'].type.ident) + + # The same module used in the same place should have the same import alias. + # Because there's a "mollusc" name used, the import should be disambiguated. + assert mollusc_ident == mollusc_map_ident == "am_mollusc.Mollusc" From 9e63f9797a5509c30943f988631b86c2f1e6df48 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 13 Oct 2020 10:56:24 -0700 Subject: [PATCH 0398/1339] chore: release 0.35.2 (#655) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 495a5a7fda63..c0950657e27a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.1...v0.35.2) (2020-10-13) + + +### Bug Fixes + +* modules referenced in MapField message type are properly aliased ([#654](https://www.github.com/googleapis/gapic-generator-python/issues/654)) ([2c79349](https://www.github.com/googleapis/gapic-generator-python/commit/2c79349e7b89435bc45e499885f7b12ac0bc2d9f)), closes [#618](https://www.github.com/googleapis/gapic-generator-python/issues/618) + ### [0.35.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.0...v0.35.1) (2020-10-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 8218bcb51685..ad3daf6cdc8d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.1" +version = "0.35.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From a97d262096643e2e4024defbe6aec738cd31bdcb Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 13 Oct 2020 13:11:22 -0700 Subject: [PATCH 0399/1339] docs: remove references to pipsi (#656) Multiple people have experienced difficulty with pipsi. To that end, documentation recommending its use is now rewritten to recommend just using pyenv. Adds small comment expansion aroud bazel rules. --- .../docs/getting-started/index.rst | 10 ++++++++-- .../docs/getting-started/local.rst | 18 +++++++++++------- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/docs/getting-started/index.rst b/packages/gapic-generator/docs/getting-started/index.rst index fa2ebf02231e..ec9149a582b7 100644 --- a/packages/gapic-generator/docs/getting-started/index.rst +++ b/packages/gapic-generator/docs/getting-started/index.rst @@ -7,7 +7,8 @@ protocol buffers do. Because dependency management and such can be a significant undertaking, we offer a Docker image and interface which requires you only to have Docker -installed and provide the protos for your API. +installed and provide the protos for your API. Alternatively, the generator is +also invocable via bazel rules. It is also possible to install the tool locally and run it through ``protoc``, and this approach is fully supported. @@ -15,7 +16,12 @@ and this approach is fully supported. .. note:: The Docker approach is recommended for users new to this ecosystem, or - those which do not have a robust Python environment available. + those which do not have a robust Python environment available. If you want + to experiment with generating client libraries but do not want to make + changes to the generator itself, try the Docker image first. + + The bazel approach is recommended for established pipelines. It is more + lightweight than the Docker image but may take some more effort to set up. .. _protocol buffers: https://developers.google.com/protocol-buffers/ diff --git a/packages/gapic-generator/docs/getting-started/local.rst b/packages/gapic-generator/docs/getting-started/local.rst index 5251dccad8f7..9c2a989fb84b 100644 --- a/packages/gapic-generator/docs/getting-started/local.rst +++ b/packages/gapic-generator/docs/getting-started/local.rst @@ -80,9 +80,6 @@ Python 3.7, so you will need that installed. (Most Linux distributions ship with earlier versions.) Use `pyenv`_ to get Python 3.7 installed in a friendly way. -As for this library itself, the recommended installation approach is -`pipsi`_. - .. code-block:: shell # Due to its experimental state, this tool is not published to a @@ -91,11 +88,19 @@ As for this library itself, the recommended installation approach is git clone https://github.com/googleapis/gapic-generator-python.git cd gapic-generator-python/ - # Install the tool. This will handle the virtualenv for you, and + # Install a version of python that is supported by the microgenerator. + # We use 3.8.6 as an example. + # You may need to install additional packages in order to + # build python from source. + # Setting a 'global' python is convenient for development but may interfere + # with other system activities. Adjust as your environment requires. + pyenv install 3.8.6 && pyenv global 3.8.6 + + # Install the tool. This will handle the virtualenv for you, and # make an appropriately-aliased executable. # The `--editable` flag is only necessary if you want to work on the # tool (as opposed to just use it). - pipsi install --editable --python=`which python3.7` . + python -m pip install --editable . To ensure the tool is installed properly: @@ -105,7 +110,6 @@ To ensure the tool is installed properly: /path/to/protoc-gen-python_gapic .. _pyenv: https://github.com/pyenv/pyenv -.. _pipsi: https://github.com/mitsuhiko/pipsi Usage ----- @@ -151,7 +155,7 @@ This plugin is invoked under the hood via. the ``--python_gapic_out`` switch. the common protos must come first, and then the path to the API being built. .. include:: _samplegen.rst - + .. code-block:: shell # Multiple sample paths or directories can be passed simultaneously by duplicating From 524e47ebef34b8fe398ff4b08e9a9fa7198e9564 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 13 Oct 2020 13:15:42 -0700 Subject: [PATCH 0400/1339] chore: release 0.35.3 (#658) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c0950657e27a..22cc6e7bf670 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.2...v0.35.3) (2020-10-13) + + +### Documentation + +* remove references to pipsi ([#656](https://www.github.com/googleapis/gapic-generator-python/issues/656)) ([39c612b](https://www.github.com/googleapis/gapic-generator-python/commit/39c612b545bc93c7c738a78f074672ee66365efb)) + ### [0.35.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.1...v0.35.2) (2020-10-13) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index ad3daf6cdc8d..f3a1830fd045 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.2" +version = "0.35.3" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 4bf1711b02763c178d18c7026e7b2b8b6366dde9 Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Thu, 15 Oct 2020 10:06:03 -0700 Subject: [PATCH 0401/1339] chore: codeowners replace actools w/actools-python (#662) Narrows down CODEOWNER `actools` to `actools-python`. --- packages/gapic-generator/.github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/.github/CODEOWNERS b/packages/gapic-generator/.github/CODEOWNERS index aaabff6ec140..0546e1d81418 100644 --- a/packages/gapic-generator/.github/CODEOWNERS +++ b/packages/gapic-generator/.github/CODEOWNERS @@ -4,4 +4,4 @@ # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -* @googleapis/actools @googleapis/yoshi-python +* @googleapis/actools-python @googleapis/yoshi-python From 78d29796794e74d49245d469301c70bcb9649d22 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 16 Oct 2020 14:20:45 -0700 Subject: [PATCH 0402/1339] chore: remove 3.6 tests for alternative templates (#666) Features in the Ads templates require Python 3.7 or later. --- packages/gapic-generator/.circleci/config.yml | 32 ------------------- .../.github/sync-repo-settings.yaml | 1 - packages/gapic-generator/noxfile.py | 2 +- 3 files changed, 1 insertion(+), 34 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 5d00282c835c..9ae961fcee1b 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -38,12 +38,6 @@ workflows: filters: tags: only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-alternative-templates-3.6: - requires: - - unit-3.6 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - showcase-unit-alternative-templates-3.7: requires: - unit-3.7 @@ -100,7 +94,6 @@ workflows: requires: - docs - mypy - - showcase-unit-alternative-templates-3.6 - showcase-unit-alternative-templates-3.7 - showcase-unit-alternative-templates-3.8 - showcase-mypy-alternative-templates @@ -111,7 +104,6 @@ workflows: requires: - docs - mypy - - showcase-unit-alternative-templates-3.6 - showcase-unit-alternative-templates-3.7 - showcase-unit-alternative-templates-3.8 - showcase-mypy-alternative-templates @@ -424,30 +416,6 @@ jobs: - run: name: Run unit tests. command: nox -s showcase_unit-3.8 - showcase-unit-alternative-templates-3.6: - docker: - - image: python:3.6-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit_alternative_templates-3.6 showcase-unit-alternative-templates-3.7: docker: - image: python:3.7-slim diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index d0e56dcae893..86677eec40f9 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -17,7 +17,6 @@ branchProtectionRules: - 'ci/circleci: showcase-unit-3.7' - 'ci/circleci: showcase-unit-3.8' - 'ci/circleci: showcase-unit-add-iam-methods' - - 'ci/circleci: showcase-unit-alternative-templates-3.6' - 'ci/circleci: showcase-unit-alternative-templates-3.7' - 'ci/circleci: showcase-unit-alternative-templates-3.8' - 'ci/circleci: style-check' diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 47880e3d1c50..bcab1b7e8211 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -176,7 +176,7 @@ def showcase_unit( ) -@nox.session(python=["3.6", "3.7", "3.8"]) +@nox.session(python=["3.7", "3.8"]) def showcase_unit_alternative_templates(session): showcase_unit(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) From cfa6c0e136b0c585e645afe3555459f87efa0eac Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 16 Oct 2020 14:21:32 -0700 Subject: [PATCH 0403/1339] fix: minor typo in ads template (#664) CircleCI machinery now invokes the alternative (Ads) templates for the showcase_alternative_templates_* tests. Includes numerous fixes and additions to the Ads grpc transport, client class, and unit test templates. The showcase system tests now selectively enable async tests via an environment variable. The async client code has not yet been added to the Ads templates, and the corresponding system tests have been disabled for alternative templates. --- .../services/%service/transports/base.py.j2 | 10 +- .../services/%service/transports/grpc.py.j2 | 82 ++++-- .../gapic/ads-templates/noxfile.py.j2 | 2 +- .../gapic/ads-templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 102 ++++++- .../%sub/services/%service/async_client.py.j2 | 10 + .../services/%service/transports/grpc.py.j2 | 20 +- .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 88 +++++- packages/gapic-generator/noxfile.py | 48 +++- .../gapic-generator/tests/system/conftest.py | 69 ++--- .../tests/system/test_grpc_lro.py | 30 ++- .../tests/system/test_grpc_streams.py | 250 +++++++++--------- .../tests/system/test_grpc_unary.py | 54 ++-- .../tests/system/test_pagination.py | 74 +++--- .../tests/system/test_resource_crud.py | 82 +++--- .../tests/system/test_retry.py | 22 +- 17 files changed, 590 insertions(+), 357 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 2053e9fe4faf..f25ba96a1ae5 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -55,10 +55,10 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -89,7 +89,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {%- for ex in method.retry.retryable_exceptions|sort(attribute='__name__) %} + {%- for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} exceptions.{{ ex.__name__ }}, {%- endfor %} ), diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 7fbf53e78c6b..6995549378df 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -1,7 +1,8 @@ {% extends '_base.py.j2' %} {% block content %} -from typing import Callable, Dict, Tuple +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} @@ -10,7 +11,7 @@ from google.api_core import operations_v1 # type: ignore from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore - +from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -38,8 +39,13 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -53,14 +59,29 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -74,6 +95,33 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel + elif api_mtls_endpoint: + warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) + + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" + + if credentials is None: + credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) else: host = host if ":" in host else host + ":443" @@ -81,7 +129,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials, _ = auth.default(scopes=self.AUTH_SCOPES) # create a new channel. The provided one is ignored. - self._grpc_channel = grpc_helpers.create_channel( + self._grpc_channel = type(self).create_channel( host, credentials=credentials, ssl_credentials=ssl_channel_credentials, @@ -102,6 +150,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: credentials.Credentials = None, + scopes: Optional[Sequence[str]] = None, **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -111,6 +160,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -119,26 +171,14 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): return grpc_helpers.create_channel( host, credentials=credentials, - scopes=cls.AUTH_SCOPES, + scopes=scopes or cls.AUTH_SCOPES, **kwargs ) @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, '_grpc_channel'): - self._grpc_channel = self.create_channel( - self._host, - credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel {%- if service.has_lro %} diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 71f99a414481..4760bc548bac 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -20,7 +20,7 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', '{{ api.naming.versioned_module_name }}'), + os.path.join('tests', 'unit', 'gapic', '{{ api.naming.versioned_module_name }}'), ) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 6587ebd21ec1..92ae4ea7afc8 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -24,7 +24,7 @@ setuptools.setup( 'grpc-google-iam-v1', {%- endif %} ), - python_requires='>={% if opts.lazy_import %}3.7{% else %}3.6{% endif %}',{# Lazy import requires module-level getattr #} + python_requires='>=3.7',{# Lazy import requires module-level getattr #} setup_requires=[ 'libcst >= 0.2.5', ], diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 55768e7cd70e..810a6f2e927e 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -144,7 +144,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = client_class() + client = {{ service.client_name }}() @mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) @@ -222,7 +222,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(use_client_cert_env) def test_{{ service.client_name|snake_case }}_client_options_from_dict(): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as grpc_transport: + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None client = {{ service.client_name }}( client_options={'api_endpoint': 'squid.clam.whelk'} @@ -583,6 +583,15 @@ def test_transport_instance(): assert client.transport is transport +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = {{ service.client_name }}( @@ -593,18 +602,20 @@ def test_transport_grpc_default(): transports.{{ service.name }}GrpcTransport, ) - -def test_transport_adc(): +@pytest.mark.parametrize("transport_class", [ + transports.{{ service.grpc_transport_name }}, +]) +def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(auth, 'default') as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - transports.{{ service.name }}Transport() + transport_class() adc.assert_called_once() def test_{{ service.name|snake_case }}_base_transport(): # Instantiate the base transport. - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as Transport: + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as Transport: Transport.return_value = None transport = transports.{{ service.name }}Transport( credentials=credentials.AnonymousCredentials(), @@ -695,6 +706,85 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel(): assert transport._host == "squid.clam.whelk:443" +@pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}]) +def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }},]) +def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + {% if service.has_lro -%} def test_{{ service.name|snake_case }}_grpc_lro_client(): client = {{ service.client_name }}( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 275c7c9e8b77..0f2e88700fda 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -50,6 +50,16 @@ class {{ service.async_client_name }}: from_service_account_file = {{ service.client_name }}.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> {{ service.name }}Transport: + """Return the transport used by the client instance. + + Returns: + {{ service.name }}Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type({{ service.client_name }}).get_transport_class, type({{ service.client_name }})) def __init__(self, *, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 4283b12ad6c8..47eaffeb19a4 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -76,7 +76,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. @@ -84,10 +84,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -105,7 +105,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): self._grpc_channel = channel elif api_mtls_endpoint: warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) - + host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" if credentials is None: @@ -203,12 +203,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel {%- if service.has_lro %} @@ -339,7 +335,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] - {% endif %} + {% endif %} __all__ = ( '{{ service.name }}GrpcTransport', diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index e2d1ad659790..fdfbf7a63701 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -23,7 +23,7 @@ setuptools.setup( 'grpc-google-iam-v1', {%- endif %} ), - python_requires='>={% if opts.lazy_import %}3.7{% else %}3.6{% endif %}',{# Lazy import requires module-level getattr #} + python_requires='>=3.6', scripts=[ 'scripts/fixup_{{ api.naming.versioned_module_name }}_keywords.py', ], diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f9365635d74f..359b548a0b67 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -432,7 +432,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -561,7 +561,7 @@ async def test_{{ method.name|snake_case }}_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: {% if method.void -%} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -693,7 +693,7 @@ async def test_{{ method.name|snake_case }}_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void -%} @@ -880,7 +880,7 @@ async def test_{{ method.name|snake_case }}_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -928,7 +928,7 @@ async def test_{{ method.name|snake_case }}_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.name|snake_case }}), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1291,7 +1291,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_async_client(): credentials=credentials.AnonymousCredentials(), transport='grpc_asyncio', ) - transport = client._client.transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance( @@ -1416,7 +1416,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.set_iam_policy), "__call__" + type(client.transport.set_iam_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1478,7 +1478,7 @@ async def test_set_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.set_iam_policy), "__call__" + type(client.transport.set_iam_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) @@ -1512,6 +1512,27 @@ def test_set_iam_policy_from_dict(): call.assert_called() +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + def test_get_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -1554,7 +1575,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.get_iam_policy), "__call__" + type(client.transport.get_iam_policy), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1616,7 +1637,7 @@ async def test_get_iam_policy_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.get_iam_policy), "__call__" + type(client.transport.get_iam_policy), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) @@ -1649,6 +1670,26 @@ def test_get_iam_policy_from_dict(): ) call.assert_called() +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + def test_test_iam_permissions(transport: str = "grpc"): client = {{ service.client_name }}( @@ -1694,7 +1735,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1756,7 +1797,7 @@ async def test_test_iam_permissions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client.transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy.TestIamPermissionsResponse() @@ -1792,6 +1833,29 @@ def test_test_iam_permissions_from_dict(): } ) call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + {% endif %} {% endblock %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index bcab1b7e8211..ba283480e158 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -87,16 +87,20 @@ def showcase_library( # Write out a client library for Showcase. template_opt = f"python-gapic-templates={templates}" - opts = f"--python_gapic_opt={template_opt}" - opts += ",".join(other_opts + ("lazy-import",)) - session.run( - "protoc", - "--experimental_allow_proto3_optional", + opts = "--python_gapic_opt=" + opts += ",".join(other_opts + (f"{template_opt}",)) + cmd_tup = ( + f"protoc", + f"--experimental_allow_proto3_optional", f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", + opts, f"--python_gapic_out={tmp_dir}", - "google/showcase/v1beta1/echo.proto", - "google/showcase/v1beta1/identity.proto", - "google/showcase/v1beta1/messaging.proto", + f"google/showcase/v1beta1/echo.proto", + f"google/showcase/v1beta1/identity.proto", + f"google/showcase/v1beta1/messaging.proto", + ) + session.run( + *cmd_tup, external=True, ) @@ -108,20 +112,27 @@ def showcase_library( @nox.session(python="3.8") def showcase( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), + session, + templates="DEFAULT", + other_opts: typing.Iterable[str] = (), + env: typing.Optional[typing.Dict[str, str]] = None, ): """Run the Showcase test suite.""" with showcase_library(session, templates=templates, other_opts=other_opts): session.install("mock", "pytest", "pytest-asyncio") session.run( - "py.test", "--quiet", *(session.posargs or [path.join("tests", "system")]) + "py.test", "--quiet", *(session.posargs or [path.join("tests", "system")]), + env=env, ) @nox.session(python="3.8") def showcase_mtls( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), + session, + templates="DEFAULT", + other_opts: typing.Iterable[str] = (), + env: typing.Optional[typing.Dict[str, str]] = None, ): """Run the Showcase mtls test suite.""" @@ -132,19 +143,30 @@ def showcase_mtls( "--quiet", "--mtls", *(session.posargs or [path.join("tests", "system")]), + env=env, ) @nox.session(python="3.8") def showcase_alternative_templates(session): templates = path.join(path.dirname(__file__), "gapic", "ads-templates") - showcase(session, templates=templates, other_opts=("old-naming",)) + showcase( + session, + templates=templates, + other_opts=("old-naming",), + env={"GAPIC_PYTHON_ASYNC": "False"}, + ) @nox.session(python="3.8") def showcase_mtls_alternative_templates(session): templates = path.join(path.dirname(__file__), "gapic", "ads-templates") - showcase_mtls(session, templates=templates, other_opts=("old-naming",)) + showcase_mtls( + session, + templates=templates, + other_opts=("old-naming",), + env={"GAPIC_PYTHON_ASYNC": "False"}, + ) @nox.session(python=["3.6", "3.7", "3.8"]) diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index db3742aa04a4..e01f596b2d96 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -13,32 +13,53 @@ # limitations under the License. import collections +import distutils +import grpc import mock import os import pytest -import asyncio import google.api_core.client_options as ClientOptions from google.auth import credentials -from google.showcase import EchoClient, EchoAsyncClient -from google.showcase import IdentityClient, IdentityAsyncClient +from google.showcase import EchoClient +from google.showcase import IdentityClient from google.showcase import MessagingClient -import grpc -from grpc.experimental import aio +if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): + from grpc.experimental import aio + import asyncio + from google.showcase import EchoAsyncClient + from google.showcase import IdentityAsyncClient + + @pytest.fixture + def async_echo(use_mtls, event_loop): + return construct_client( + EchoAsyncClient, + use_mtls, + transport="grpc_asyncio", + channel_creator=aio.insecure_channel + ) -_test_event_loop = asyncio.new_event_loop() + @pytest.fixture + def async_identity(use_mtls, event_loop): + return construct_client( + IdentityAsyncClient, + use_mtls, + transport="grpc_asyncio", + channel_creator=aio.insecure_channel + ) -# NOTE(lidiz) We must override the default event_loop fixture from -# pytest-asyncio. pytest fixture frees resources once there isn't any reference -# to it. So, the event loop might close before tests finishes. In the -# customized version, we don't close the event loop. + _test_event_loop = asyncio.new_event_loop() + # NOTE(lidiz) We must override the default event_loop fixture from + # pytest-asyncio. pytest fixture frees resources once there isn't any reference + # to it. So, the event loop might close before tests finishes. In the + # customized version, we don't close the event loop. -@pytest.fixture -def event_loop(): - asyncio.set_event_loop(_test_event_loop) - return asyncio.get_event_loop() + @pytest.fixture + def event_loop(): + asyncio.set_event_loop(_test_event_loop) + return asyncio.get_event_loop() dir = os.path.dirname(__file__) @@ -99,16 +120,6 @@ def echo(use_mtls): return construct_client(EchoClient, use_mtls) -@pytest.fixture -def async_echo(use_mtls, event_loop): - return construct_client( - EchoAsyncClient, - use_mtls, - transport="grpc_asyncio", - channel_creator=aio.insecure_channel - ) - - @pytest.fixture def identity(): transport = IdentityClient.get_transport_class('grpc')( @@ -117,16 +128,6 @@ def identity(): return IdentityClient(transport=transport) -@pytest.fixture -def async_identity(use_mtls, event_loop): - return construct_client( - IdentityAsyncClient, - use_mtls, - transport="grpc_asyncio", - channel_creator=aio.insecure_channel - ) - - @pytest.fixture def identity(use_mtls): return construct_client(IdentityClient, use_mtls) diff --git a/packages/gapic-generator/tests/system/test_grpc_lro.py b/packages/gapic-generator/tests/system/test_grpc_lro.py index a4578a168ade..6dbd3af3960f 100644 --- a/packages/gapic-generator/tests/system/test_grpc_lro.py +++ b/packages/gapic-generator/tests/system/test_grpc_lro.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import distutils +import os import pytest from datetime import datetime, timedelta, timezone -from google import showcase_v1beta1 +from google import showcase def test_lro(echo): @@ -26,18 +28,20 @@ def test_lro(echo): }} ) response = future.result() - assert isinstance(response, showcase_v1beta1.WaitResponse) + assert isinstance(response, showcase.WaitResponse) assert response.content.endswith('the snails...eventually.') -@pytest.mark.asyncio -async def test_lro_async(async_echo): - future = await async_echo.wait({ - 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), - 'success': { - 'content': 'The hail in Wales falls mainly on the snails...eventually.' - }} - ) - response = await future.result() - assert isinstance(response, showcase_v1beta1.WaitResponse) - assert response.content.endswith('the snails...eventually.') +if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): + + @pytest.mark.asyncio + async def test_lro_async(async_echo): + future = await async_echo.wait({ + 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), + 'success': { + 'content': 'The hail in Wales falls mainly on the snails...eventually.' + }} + ) + response = await future.result() + assert isinstance(response, showcase.WaitResponse) + assert response.content.endswith('the snails...eventually.') diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py index f77e819986c2..f9da07948c10 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import distutils import logging +import os import pytest -import asyncio import threading from google import showcase @@ -77,129 +78,124 @@ def test_stream_stream_passing_dict(echo): assert responses.trailing_metadata() == metadata -@pytest.mark.asyncio -async def test_async_unary_stream_reader(async_echo): - content = 'The hail in Wales falls mainly on the snails.' - call = await async_echo.expand({ - 'content': content, - }, metadata=metadata) - - # Consume the response and ensure it matches what we expect. - # with pytest.raises(exceptions.NotFound) as exc: - for ground_truth in content.split(' '): - response = await call.read() - assert response.content == ground_truth - assert ground_truth == 'snails.' - - trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata - - -@pytest.mark.asyncio -async def test_async_unary_stream_async_generator(async_echo): - content = 'The hail in Wales falls mainly on the snails.' - call = await async_echo.expand({ - 'content': content, - }, metadata=metadata) - - # Consume the response and ensure it matches what we expect. - # with pytest.raises(exceptions.NotFound) as exc: - tokens = iter(content.split(' ')) - async for response in call: - ground_truth = next(tokens) - assert response.content == ground_truth - assert ground_truth == 'snails.' - - trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata - - -@pytest.mark.asyncio -async def test_async_stream_unary_iterable(async_echo): - requests = [] - requests.append(showcase.EchoRequest(content="hello")) - requests.append(showcase.EchoRequest(content="world!")) - - call = await async_echo.collect(requests) - response = await call - assert response.content == 'hello world!' - - -@pytest.mark.asyncio -async def test_async_stream_unary_async_generator(async_echo): - - async def async_generator(): - yield showcase.EchoRequest(content="hello") - yield showcase.EchoRequest(content="world!") - - call = await async_echo.collect(async_generator()) - response = await call - assert response.content == 'hello world!' - - -@pytest.mark.asyncio -async def test_async_stream_unary_writer(async_echo): - call = await async_echo.collect() - await call.write(showcase.EchoRequest(content="hello")) - await call.write(showcase.EchoRequest(content="world!")) - await call.done_writing() - - response = await call - assert response.content == 'hello world!' - - -@pytest.mark.asyncio -async def test_async_stream_unary_passing_dict(async_echo): - requests = [{'content': 'hello'}, {'content': 'world!'}] - call = await async_echo.collect(iter(requests)) - response = await call - assert response.content == 'hello world!' - - -@pytest.mark.asyncio -async def test_async_stream_stream_reader_writier(async_echo): - call = await async_echo.chat(metadata=metadata) - await call.write(showcase.EchoRequest(content="hello")) - await call.write(showcase.EchoRequest(content="world!")) - await call.done_writing() - - contents = [ - (await call.read()).content, - (await call.read()).content - ] - assert contents == ['hello', 'world!'] - - trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata - - -@pytest.mark.asyncio -async def test_async_stream_stream_async_generator(async_echo): - - async def async_generator(): - yield showcase.EchoRequest(content="hello") - yield showcase.EchoRequest(content="world!") - - call = await async_echo.chat(async_generator(), metadata=metadata) - - contents = [] - async for response in call: - contents.append(response.content) - assert contents == ['hello', 'world!'] - - trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata - - -@pytest.mark.asyncio -async def test_async_stream_stream_passing_dict(async_echo): - requests = [{'content': 'hello'}, {'content': 'world!'}] - call = await async_echo.chat(iter(requests), metadata=metadata) - - contents = [] - async for response in call: - contents.append(response.content) - assert contents == ['hello', 'world!'] - - trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata +if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): + import asyncio + + @pytest.mark.asyncio + async def test_async_unary_stream_reader(async_echo): + content = 'The hail in Wales falls mainly on the snails.' + call = await async_echo.expand({ + 'content': content, + }, metadata=metadata) + + # Consume the response and ensure it matches what we expect. + # with pytest.raises(exceptions.NotFound) as exc: + for ground_truth in content.split(' '): + response = await call.read() + assert response.content == ground_truth + assert ground_truth == 'snails.' + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata + + @pytest.mark.asyncio + async def test_async_unary_stream_async_generator(async_echo): + content = 'The hail in Wales falls mainly on the snails.' + call = await async_echo.expand({ + 'content': content, + }, metadata=metadata) + + # Consume the response and ensure it matches what we expect. + # with pytest.raises(exceptions.NotFound) as exc: + tokens = iter(content.split(' ')) + async for response in call: + ground_truth = next(tokens) + assert response.content == ground_truth + assert ground_truth == 'snails.' + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata + + @pytest.mark.asyncio + async def test_async_stream_unary_iterable(async_echo): + requests = [] + requests.append(showcase.EchoRequest(content="hello")) + requests.append(showcase.EchoRequest(content="world!")) + + call = await async_echo.collect(requests) + response = await call + assert response.content == 'hello world!' + + @pytest.mark.asyncio + async def test_async_stream_unary_async_generator(async_echo): + + async def async_generator(): + yield showcase.EchoRequest(content="hello") + yield showcase.EchoRequest(content="world!") + + call = await async_echo.collect(async_generator()) + response = await call + assert response.content == 'hello world!' + + @pytest.mark.asyncio + async def test_async_stream_unary_writer(async_echo): + call = await async_echo.collect() + await call.write(showcase.EchoRequest(content="hello")) + await call.write(showcase.EchoRequest(content="world!")) + await call.done_writing() + + response = await call + assert response.content == 'hello world!' + + @pytest.mark.asyncio + async def test_async_stream_unary_passing_dict(async_echo): + requests = [{'content': 'hello'}, {'content': 'world!'}] + call = await async_echo.collect(iter(requests)) + response = await call + assert response.content == 'hello world!' + + @pytest.mark.asyncio + async def test_async_stream_stream_reader_writier(async_echo): + call = await async_echo.chat(metadata=metadata) + await call.write(showcase.EchoRequest(content="hello")) + await call.write(showcase.EchoRequest(content="world!")) + await call.done_writing() + + contents = [ + (await call.read()).content, + (await call.read()).content + ] + assert contents == ['hello', 'world!'] + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata + + @pytest.mark.asyncio + async def test_async_stream_stream_async_generator(async_echo): + + async def async_generator(): + yield showcase.EchoRequest(content="hello") + yield showcase.EchoRequest(content="world!") + + call = await async_echo.chat(async_generator(), metadata=metadata) + + contents = [] + async for response in call: + contents.append(response.content) + assert contents == ['hello', 'world!'] + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata + + @pytest.mark.asyncio + async def test_async_stream_stream_passing_dict(async_echo): + requests = [{'content': 'hello'}, {'content': 'world!'}] + call = await async_echo.chat(iter(requests), metadata=metadata) + + contents = [] + async for response in call: + contents.append(response.content) + assert contents == ['hello', 'world!'] + + trailing_metadata = await call.trailing_metadata() + assert trailing_metadata == metadata diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_grpc_unary.py index c1694d975e63..f8ca2f65276b 100644 --- a/packages/gapic-generator/tests/system/test_grpc_unary.py +++ b/packages/gapic-generator/tests/system/test_grpc_unary.py @@ -12,8 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import distutils +import os import pytest -import asyncio from google.api_core import exceptions from google.rpc import code_pb2 @@ -48,31 +49,32 @@ def test_unary_error(echo): assert exc.value.message == message -@pytest.mark.asyncio -async def test_async_unary_with_request_object(async_echo): - response = await async_echo.echo(showcase.EchoRequest( - content='The hail in Wales falls mainly on the snails.', - ), timeout=1) - assert response.content == 'The hail in Wales falls mainly on the snails.' - - -@pytest.mark.asyncio -async def test_async_unary_with_dict(async_echo): - response = await async_echo.echo({ - 'content': 'The hail in Wales falls mainly on the snails.', - }) - assert response.content == 'The hail in Wales falls mainly on the snails.' +if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): + import asyncio + @pytest.mark.asyncio + async def test_async_unary_with_request_object(async_echo): + response = await async_echo.echo(showcase.EchoRequest( + content='The hail in Wales falls mainly on the snails.', + ), timeout=1) + assert response.content == 'The hail in Wales falls mainly on the snails.' -@pytest.mark.asyncio -async def test_async_unary_error(async_echo): - message = 'Bad things! Bad things!' - with pytest.raises(exceptions.InvalidArgument) as exc: - await async_echo.echo({ - 'error': { - 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), - 'message': message, - }, + @pytest.mark.asyncio + async def test_async_unary_with_dict(async_echo): + response = await async_echo.echo({ + 'content': 'The hail in Wales falls mainly on the snails.', }) - assert exc.value.code == 400 - assert exc.value.message == message + assert response.content == 'The hail in Wales falls mainly on the snails.' + + @pytest.mark.asyncio + async def test_async_unary_error(async_echo): + message = 'Bad things! Bad things!' + with pytest.raises(exceptions.InvalidArgument) as exc: + await async_echo.echo({ + 'error': { + 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), + 'message': message, + }, + }) + assert exc.value.code == 400 + assert exc.value.message == message diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py index 8f53a6c01d15..eb195ea12896 100644 --- a/packages/gapic-generator/tests/system/test_pagination.py +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import distutils +import os import pytest from google import showcase @@ -47,39 +49,39 @@ def test_pagination_pages(echo): for i in text.split(' ')] -@pytest.mark.asyncio -async def test_pagination_async(async_echo): - text = 'The hail in Wales falls mainly on the snails.' - results = [] - async for i in await async_echo.paged_expand({ - 'content': text, - 'page_size': 3, - }): - results.append(i) - - assert len(results) == 9 - assert results == [showcase.EchoResponse(content=i) - for i in text.split(' ')] - - -@pytest.mark.asyncio -async def test_pagination_pages_async(async_echo): - text = "The hail in Wales falls mainly on the snails." - page_results = [] - async for page in (await async_echo.paged_expand({ - 'content': text, - 'page_size': 3, - })).pages: - page_results.append(page) - - assert len(page_results) == 3 - assert not page_results[-1].next_page_token - - # The monolithic surface uses a wrapper type that needs an explicit property - # for a 'raw_page': we need to duplicate that interface, even though the - # architecture is different. - assert page_results[0].raw_page is page_results[0] - - results = [r for p in page_results for r in p.responses] - assert results == [showcase.EchoResponse(content=i) - for i in text.split(' ')] +if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): + @pytest.mark.asyncio + async def test_pagination_async(async_echo): + text = 'The hail in Wales falls mainly on the snails.' + results = [] + async for i in await async_echo.paged_expand({ + 'content': text, + 'page_size': 3, + }): + results.append(i) + + assert len(results) == 9 + assert results == [showcase.EchoResponse(content=i) + for i in text.split(' ')] + + @pytest.mark.asyncio + async def test_pagination_pages_async(async_echo): + text = "The hail in Wales falls mainly on the snails." + page_results = [] + async for page in (await async_echo.paged_expand({ + 'content': text, + 'page_size': 3, + })).pages: + page_results.append(page) + + assert len(page_results) == 3 + assert not page_results[-1].next_page_token + + # The monolithic surface uses a wrapper type that needs an explicit property + # for a 'raw_page': we need to duplicate that interface, even though the + # architecture is different. + assert page_results[0].raw_page is page_results[0] + + results = [r for p in page_results for r in p.responses] + assert results == [showcase.EchoResponse(content=i) + for i in text.split(' ')] diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 5372da4b6bc8..85bafe561af4 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import distutils +import os import pytest @@ -76,43 +78,43 @@ def test_path_parsing(messaging): assert expected == actual -@pytest.mark.asyncio -async def test_crud_with_request_async(async_identity): - pager = await async_identity.list_users() - count = len(pager.users) - user = await async_identity.create_user(request={'user': { - 'display_name': 'Guido van Rossum', - 'email': 'guido@guido.fake', - }}) - try: - assert user.display_name == 'Guido van Rossum' - assert user.email == 'guido@guido.fake' - pager = (await async_identity.list_users()) - assert len(pager.users) == count + 1 - assert (await async_identity.get_user({ - 'name': user.name - })).display_name == 'Guido van Rossum' - finally: - await async_identity.delete_user({'name': user.name}) - - -@pytest.mark.asyncio -async def test_crud_flattened_async(async_identity): - count = len((await async_identity.list_users()).users) - user = await async_identity.create_user( - display_name='Monty Python', - email='monty@python.org', - ) - try: - assert user.display_name == 'Monty Python' - assert user.email == 'monty@python.org' - assert len((await async_identity.list_users()).users) == count + 1 - assert (await async_identity.get_user(name=user.name)).display_name == 'Monty Python' - finally: - await async_identity.delete_user(name=user.name) - - -def test_path_methods_async(async_identity): - expected = "users/bdfl" - actual = async_identity.user_path("bdfl") - assert expected == actual +if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): + + @pytest.mark.asyncio + async def test_crud_with_request_async(async_identity): + pager = await async_identity.list_users() + count = len(pager.users) + user = await async_identity.create_user(request={'user': { + 'display_name': 'Guido van Rossum', + 'email': 'guido@guido.fake', + }}) + try: + assert user.display_name == 'Guido van Rossum' + assert user.email == 'guido@guido.fake' + pager = (await async_identity.list_users()) + assert len(pager.users) == count + 1 + assert (await async_identity.get_user({ + 'name': user.name + })).display_name == 'Guido van Rossum' + finally: + await async_identity.delete_user({'name': user.name}) + + @pytest.mark.asyncio + async def test_crud_flattened_async(async_identity): + count = len((await async_identity.list_users()).users) + user = await async_identity.create_user( + display_name='Monty Python', + email='monty@python.org', + ) + try: + assert user.display_name == 'Monty Python' + assert user.email == 'monty@python.org' + assert len((await async_identity.list_users()).users) == count + 1 + assert (await async_identity.get_user(name=user.name)).display_name == 'Monty Python' + finally: + await async_identity.delete_user(name=user.name) + + def test_path_methods_async(async_identity): + expected = "users/bdfl" + actual = async_identity.user_path("bdfl") + assert expected == actual diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 0bc70f9f8e8f..97e0c60beb10 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import distutils +import os import pytest from google.api_core import exceptions @@ -28,12 +30,14 @@ def test_retry_bubble(echo): }) -@pytest.mark.asyncio -async def test_retry_bubble_async(async_echo): - with pytest.raises(exceptions.DeadlineExceeded): - await async_echo.echo({ - 'error': { - 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), - 'message': 'This took longer than you said it should.', - }, - }) +if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): + + @pytest.mark.asyncio + async def test_retry_bubble_async(async_echo): + with pytest.raises(exceptions.DeadlineExceeded): + await async_echo.echo({ + 'error': { + 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), + 'message': 'This took longer than you said it should.', + }, + }) From 364c7c383f75e3d3b60e1ce2c848f3848e24bb6e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 16 Oct 2020 14:27:55 -0700 Subject: [PATCH 0404/1339] chore: release 0.35.4 (#667) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 22cc6e7bf670..0ed83d030514 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.3...v0.35.4) (2020-10-16) + + +### Bug Fixes + +* minor typo in ads template ([#664](https://www.github.com/googleapis/gapic-generator-python/issues/664)) ([816f965](https://www.github.com/googleapis/gapic-generator-python/commit/816f965c8560bf65d8043bd67672c660a2b1300b)) + ### [0.35.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.2...v0.35.3) (2020-10-13) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f3a1830fd045..987b54e02c6a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.3" +version = "0.35.4" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 2eb36f9e4a94c5b6a4ae769e3bdabc999e0cf577 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 19 Oct 2020 14:47:22 -0700 Subject: [PATCH 0405/1339] fix: numerous small fixes to allow bigtable-admin (#660) Includes: * tweaked logic around defining recursive message types * more sophisticated logic for generating unit tests using recursive message types * flattened map-y fields are handled properly * fixed a corner case where a method has a third-party request object and flattened fields --- .../%sub/services/%service/client.py.j2 | 14 ++++++--- .../%name_%version/%sub/test_%service.py.j2 | 4 +++ .../gapic-generator/gapic/schema/metadata.py | 15 ++++++---- .../gapic-generator/gapic/schema/wrappers.py | 13 ++++---- .../%sub/services/%service/async_client.py.j2 | 19 ++++++++---- .../%sub/services/%service/client.py.j2 | 16 ++++++---- .../%name_%version/%sub/test_%service.py.j2 | 17 ++++++++--- .../tests/unit/schema/test_metadata.py | 2 +- .../tests/unit/schema/wrappers/test_field.py | 30 +++++++++++++++++++ 9 files changed, 96 insertions(+), 34 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 67d5de4680ca..5f12de323c9d 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -333,7 +333,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request = {{ method.input.ident }}(**request) {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} elif not request: - request = {{ method.input.ident }}() + request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) {% endif -%}{# Cross-package req and flattened fields #} {%- else %} # Minor optimization to avoid making a copy if the user passes @@ -344,7 +344,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request = {{ method.input.ident }}(request) {% endif %} {# different request package #} {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields -%} + {% if method.flattened_fields and method.input.ident.package == method.ident.package -%} # If we have keyword arguments corresponding to fields on the # request, apply these. {% endif -%} @@ -352,8 +352,14 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} - {# They can be _extended_, however -#} - {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} + {# Map-y fields can be _updated_, however #} + {%- for key, field in method.flattened_fields.items() if field.map and method.input.ident.package == method.ident.package %} + + if {{ field.name }}: + request.{{ key }}.update({{ field.name }}) + {%- endfor %} + {# And list-y fields can be _extended_ -#} + {%- for key, field in method.flattened_fields.items() if field.repeated and not field.map and method.input.ident.package == method.ident.package %} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) {%- endfor %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 810a6f2e927e..14687174cdb0 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -297,6 +297,10 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m for message in response: assert isinstance(message, {{ method.output.ident }}) {% else -%} + {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {# Cheeser assertion to force code coverage for bad paginated methods #} + assert response.raw_page is response + {% endif %} assert isinstance(response, {{ method.client_output.ident }}) {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index b801bb7603bf..4e78119f9116 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -242,12 +242,15 @@ def rel(self, address: 'Address') -> str: # It is possible that a field references a message that has # not yet been declared. If so, send its name enclosed in quotes # (a string) instead. - if self.module_path > address.module_path or self == address: - return f"'{'.'.join(self.parent + (self.name,))}'" - - # This is a message in the same module, already declared. - # Send its name. - return '.'.join(self.parent + (self.name,)) + # + # Note: this is a conservative construction; it generates a stringy + # identifier all the time when it may be possible to use a regular + # module lookup. + # On the other hand, there's no reason _not_ to use a stringy + # identifier. It is guaranteed to work all the time because + # it bumps name resolution until a time when all types in a module + # are guaranteed to be fully defined. + return f"'{'.'.join(self.parent + (self.name,))}'" # Return the usual `module.Name`. return str(self) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 1b0db83e5f59..62c36270bf32 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -55,9 +55,6 @@ class Field: ) oneof: Optional[str] = None - # Arbitrary cap set via heuristic rule of thumb. - MAX_MOCK_DEPTH: int = 20 - def __getattr__(self, name): return getattr(self.field_pb, name) @@ -93,17 +90,16 @@ def map(self) -> bool: @utils.cached_property def mock_value(self) -> str: - depth = 0 + visited_fields: Set["Field"] = set() stack = [self] answer = "{}" while stack: expr = stack.pop() - answer = answer.format(expr.inner_mock(stack, depth)) - depth += 1 + answer = answer.format(expr.inner_mock(stack, visited_fields)) return answer - def inner_mock(self, stack, depth): + def inner_mock(self, stack, visited_fields): """Return a repr of a valid, usually truthy mock value.""" # For primitives, send a truthy value computed from the # field name. @@ -137,10 +133,11 @@ def inner_mock(self, stack, depth): and isinstance(self.type, MessageType) and len(self.type.fields) # Nested message types need to terminate eventually - and depth < self.MAX_MOCK_DEPTH + and self not in visited_fields ): sub = next(iter(self.type.fields.values())) stack.append(sub) + visited_fields.add(self) # Don't do the recursive rendering here, just set up # where the nested value should go with the double {}. answer = f'{self.type.ident}({sub.name}={{}})' diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 0f2e88700fda..9d5150d86980 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -169,7 +169,8 @@ class {{ service.async_client_name }}: {% if method.flattened_fields -%} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): + has_flattened_params = any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]) + if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -181,23 +182,29 @@ class {{ service.async_client_name }}: request = {{ method.input.ident }}(**request) {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} elif not request: - request = {{ method.input.ident }}() + request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) {% endif -%}{# Cross-package req and flattened fields #} {%- else %} request = {{ method.input.ident }}(request) {% endif %} {# different request package #} {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields -%} + {% if method.flattened_fields and method.input.ident.package == method.ident.package -%} # If we have keyword arguments corresponding to fields on the # request, apply these. {% endif -%} - {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + {%- for key, field in method.flattened_fields.items() if not field.repeated and method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} - {# They can be _extended_, however -#} - {%- for key, field in method.flattened_fields.items() if (field.repeated and method.input.ident.package != method.ident.package) %} + {# Map-y fields can be _updated_, however #} + {%- for key, field in method.flattened_fields.items() if field.map and method.input.ident.package == method.ident.package %} + + if {{ field.name }}: + request.{{ key }}.update({{ field.name }}) + {%- endfor %} + {# And list-y fields can be _extended_ -#} + {%- for key, field in method.flattened_fields.items() if field.repeated and not field.map and method.input.ident.package == method.ident.package %} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index c8040284d540..c3093aa1cf7e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -345,7 +345,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request = {{ method.input.ident }}(**request) {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} elif not request: - request = {{ method.input.ident }}() + request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) {% endif -%}{# Cross-package req and flattened fields #} {%- else %} # Minor optimization to avoid making a copy if the user passes @@ -357,16 +357,22 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} {# different request package #} {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields -%} + {% if method.flattened_fields and method.input.ident.package == method.ident.package -%} # If we have keyword arguments corresponding to fields on the # request, apply these. {% endif -%} - {%- for key, field in method.flattened_fields.items() if not(field.repeated or method.input.ident.package != method.ident.package) %} + {%- for key, field in method.flattened_fields.items() if not field.repeated and method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} - {# They can be _extended_, however -#} - {%- for key, field in method.flattened_fields.items() if field.repeated %} + {# Map-y fields can be _updated_, however #} + {%- for key, field in method.flattened_fields.items() if field.map and method.input.ident.package == method.ident.package %} + + if {{ field.name }}: + request.{{ key }}.update({{ field.name }}) + {%- endfor %} + {# And list-y fields can be _extended_ -#} + {%- for key, field in method.flattened_fields.items() if field.repeated and not field.map and method.input.ident.package == method.ident.package %} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) {%- endfor %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 359b548a0b67..dd4fd637ecf1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -398,6 +398,10 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m for message in response: assert isinstance(message, {{ method.output.ident }}) {% else -%} + {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {# Cheeser assertion to force code coverage for bad paginated methods #} + assert response.raw_page is response + {% endif %} assert isinstance(response, {{ method.client_output.ident }}) {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} @@ -417,7 +421,7 @@ def test_{{ method.name|snake_case }}_from_dict(): @pytest.mark.asyncio -async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio'): +async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): client = {{ service.async_client_name }}( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -425,7 +429,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = {{ method.input.ident }}() + request = request_type() {% if method.client_streaming %} requests = [request] {% endif %} @@ -474,7 +478,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio {% if method.client_streaming %} assert next(args[0]) == request {% else %} - assert args[0] == request + assert args[0] == {{ method.input.ident }}() {% endif %} # Establish that the response is the type that we expect. @@ -500,6 +504,11 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio {% endif %} +@pytest.mark.asyncio +async def test_{{ method.name|snake_case }}_async_from_dict(): + await test_{{ method.name|snake_case }}_async(request_type=dict) + + {% if method.field_headers and not method.client_streaming %} def test_{{ method.name|snake_case }}_field_headers(): client = {{ service.client_name }}( @@ -592,7 +601,7 @@ async def test_{{ method.name|snake_case }}_field_headers_async(): {% endif %} {% if method.ident.package != method.input.ident.package %} -def test_{{ method.name|snake_case }}_from_dict(): +def test_{{ method.name|snake_case }}_from_dict_foreign(): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), ) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 62cd957cbdab..4be166bc1b5e 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -70,7 +70,7 @@ def test_address_rel(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.rel( metadata.Address(package=('foo', 'bar'), module='baz'), - ) == 'Bacon' + ) == "'Bacon'" def test_address_rel_other(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index bba280b8f511..99f2edc9f72d 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -19,6 +19,7 @@ from google.api import field_behavior_pb2 from google.protobuf import descriptor_pb2 +from gapic.schema import api from gapic.schema import metadata from gapic.schema import wrappers @@ -250,6 +251,35 @@ def test_mock_value_message(): assert field.mock_value == 'bogus.Message(foo=324)' +def test_mock_value_recursive(): + # The elaborate setup is an unfortunate requirement. + file_pb = descriptor_pb2.FileDescriptorProto( + name="turtle.proto", + package="animalia.chordata.v2", + message_type=( + descriptor_pb2.DescriptorProto( + # It's turtles all the way down ;) + name="Turtle", + field=( + descriptor_pb2.FieldDescriptorProto( + name="turtle", + type="TYPE_MESSAGE", + type_name=".animalia.chordata.v2.Turtle", + number=1, + ), + ), + ), + ), + ) + my_api = api.API.build([file_pb], package="animalia.chordata.v2") + turtle_field = my_api.messages["animalia.chordata.v2.Turtle"].fields["turtle"] + + # If not handled properly, this will run forever and eventually OOM. + actual = turtle_field.mock_value + expected = "ac_turtle.Turtle(turtle=ac_turtle.Turtle(turtle=turtle.Turtle(turtle=None)))" + assert actual == expected + + def test_field_name_kword_disambiguation(): from_field = make_field( name="from", From 3be57e3cb3be7329db1a8f7d044e58d3e1185b10 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 19 Oct 2020 21:58:04 +0000 Subject: [PATCH 0406/1339] chore: release 0.35.5 (#673) :robot: I have created a release \*beep\* \*boop\* --- ### [0.35.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.4...v0.35.5) (2020-10-19) ### Bug Fixes * numerous small fixes to allow bigtable-admin ([#660](https://www.github.com/googleapis/gapic-generator-python/issues/660)) ([09692c4](https://www.github.com/googleapis/gapic-generator-python/commit/09692c4e889ccde3b0ca31a5e8476c1679804beb)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 0ed83d030514..f2a68fc5435c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.4...v0.35.5) (2020-10-19) + + +### Bug Fixes + +* numerous small fixes to allow bigtable-admin ([#660](https://www.github.com/googleapis/gapic-generator-python/issues/660)) ([09692c4](https://www.github.com/googleapis/gapic-generator-python/commit/09692c4e889ccde3b0ca31a5e8476c1679804beb)) + ### [0.35.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.3...v0.35.4) (2020-10-16) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 987b54e02c6a..64908e332c2b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.4" +version = "0.35.5" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 7f3f9c19ab3199ec16226f4c2236e109d6d00272 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 20 Oct 2020 21:42:03 +0200 Subject: [PATCH 0407/1339] chore(deps): update dependency google-api-core to v1.23.0 (#674) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | minor | `==1.22.4` -> `==1.23.0` | --- ### Release Notes
googleapis/python-api-core ### [`v1.23.0`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​1230-httpswwwgithubcomgoogleapispython-api-corecomparev1224v1230-2020-10-16) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.22.4...v1.23.0) ##### Features - **api-core:** pass retry from result() to done() ([#​9](https://www.github.com/googleapis/python-api-core/issues/9)) ([6623b31](https://www.github.com/googleapis/python-api-core/commit/6623b31a2040b834be808d711fa397dc428f1837)) ##### Bug Fixes - map LRO errors to library exception types ([#​86](https://www.github.com/googleapis/python-api-core/issues/86)) ([a855339](https://www.github.com/googleapis/python-api-core/commit/a85533903c57be4809fe76435e298409e0903931)), closes [#​15](https://www.github.com/googleapis/python-api-core/issues/15) - harden install to use full paths, and windows separators on windows ([#​88](https://www.github.com/googleapis/python-api-core/issues/88)) ([db8e636](https://www.github.com/googleapis/python-api-core/commit/db8e636f545a8872f959e3f403cfec30ffed6c34)) - update out-of-date comment in exceptions.py ([#​93](https://www.github.com/googleapis/python-api-core/issues/93)) ([70ebe42](https://www.github.com/googleapis/python-api-core/commit/70ebe42601b3d088b3421233ef7d8245229b7265)) ##### [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05) ##### Bug Fixes - use version.py instead of pkg_resources.get_distribution ([#​80](https://www.github.com/googleapis/python-api-core/issues/80)) ([d480d97](https://www.github.com/googleapis/python-api-core/commit/d480d97e41cd6705325b3b649360553a83c23f47)) ##### [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02) ##### Bug Fixes - **deps:** require six >= 1.13.0 ([#​78](https://www.github.com/googleapis/python-api-core/issues/78)) ([a7a8b98](https://www.github.com/googleapis/python-api-core/commit/a7a8b98602a3eb277fdc607ac69f3bcb147f3351)), closes [/github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES#L30-L31](https://www.github.com/googleapis//github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES/issues/L30-L31) ##### [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03) ##### Bug Fixes - only add quota project id if supported ([#​75](https://www.github.com/googleapis/python-api-core/issues/75)) ([8f8ee78](https://www.github.com/googleapis/python-api-core/commit/8f8ee7879e4f834f3c676e535ffc41b5b9b2de62)) ##### [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12) ##### Documentation - fix spelling errors for amount in retry ([#​69](https://www.github.com/googleapis/python-api-core/issues/69)) ([7bb713d](https://www.github.com/googleapis/python-api-core/commit/7bb713d13b1fe3cca58263f5e499136a84abc456))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2422d03832ce..fb7cad03af9e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.22.4 +google-api-core==1.23.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From cbcd7c3ff1ec2e30c089deccf76fee20db3ab68e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 20 Oct 2020 15:18:05 -0700 Subject: [PATCH 0408/1339] fix: unknown resources do not cause a generator crash (#675) Some resource references do not map to actual known resource types, e.g. from the Logging API string destination = 3 [ (google.api.resource_reference) = { type: "*" } ]; It's easiest to just ignore invalid resource types. --- .../gapic-generator/gapic/schema/wrappers.py | 11 +++++---- .../unit/schema/wrappers/test_service.py | 24 +++++++++++++++++++ 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 62c36270bf32..7bad6adb3ba2 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1064,11 +1064,12 @@ def gen_indirect_resources_used(message): resource = field.options.Extensions[ resource_pb2.resource_reference] resource_type = resource.type or resource.child_type - # The common resources are defined (and rendered) explicitly - # by separate logic, and the resource definitions are never - # visible in any of the APIs file descriptor protos. - if resource_type and resource_type not in self.common_resources: - yield self.visible_resources[resource_type] + # The resource may not be visible if the resource type is one of + # the common_resources (see the class var in class definition) + # or if it's something unhelpful like '*'. + resource = self.visible_resources.get(resource_type) + if resource: + yield resource return frozenset( msg diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index d939493328ab..c4c8d9b83804 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -248,6 +248,30 @@ def test_resource_messages(): assert expected == actual +def test_service_unknown_resource_reference(): + # This happens occasionally. + opts = descriptor_pb2.FieldOptions() + res_ref = opts.Extensions[resource_pb2.resource_reference] + res_ref.type = "*" + squid_request = make_message( + "CreateSquid", + fields=( + make_field("parent", type="TYPE_STRING", options=opts,), + ), + ) + squid_service = make_service( + "SquidService", + methods=( + make_method( + "CreateSquid", + input_message=squid_request, + ), + ), + ) + + assert not squid_service.resource_messages + + def test_service_any_streaming(): for client, server in itertools.product((True, False), (True, False)): service = make_service( From e6831fe81b99c9bf1ba9e09c263c05ef2b36f9a2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 20 Oct 2020 22:24:05 +0000 Subject: [PATCH 0409/1339] chore: release 0.35.6 (#676) :robot: I have created a release \*beep\* \*boop\* --- ### [0.35.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.5...v0.35.6) (2020-10-20) ### Bug Fixes * unknown resources do not cause a generator crash ([#675](https://www.github.com/googleapis/gapic-generator-python/issues/675)) ([2d23d7d](https://www.github.com/googleapis/gapic-generator-python/commit/2d23d7d202099ccf145c01aeb9a03ae46b4e1b00)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f2a68fc5435c..083773979909 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.5...v0.35.6) (2020-10-20) + + +### Bug Fixes + +* unknown resources do not cause a generator crash ([#675](https://www.github.com/googleapis/gapic-generator-python/issues/675)) ([2d23d7d](https://www.github.com/googleapis/gapic-generator-python/commit/2d23d7d202099ccf145c01aeb9a03ae46b4e1b00)) + ### [0.35.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.4...v0.35.5) (2020-10-19) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 64908e332c2b..e727b5dde165 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.5" +version = "0.35.6" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From f6bc1f11b4bbdf58aa47241e6bb2bbb45c16cc4a Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Tue, 20 Oct 2020 17:02:13 -0700 Subject: [PATCH 0410/1339] fix: expose ssl credentials from transport (#677) Expose ssl credentials from transport. This is used to fix pubsub client [mtls issue](https://github.com/googleapis/python-pubsub/issues/224). Pubsub client creates its own transport so mtls is completely missing. The solution would be taking the ssl credentials from the auto-generated client's transport and passing it when the handwritten client creates the transport. --- .../%version/%sub/services/%service/transports/grpc.py.j2 | 4 ++++ .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 ++ .../%sub/services/%service/transports/grpc.py.j2 | 4 ++++ .../%sub/services/%service/transports/grpc_asyncio.py.j2 | 4 ++++ .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 3 +++ 5 files changed, 17 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 6995549378df..6438991b57a0 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -88,6 +88,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -95,6 +97,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) @@ -122,6 +125,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 14687174cdb0..5dc0f26babe1 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -708,6 +708,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}]) @@ -749,6 +750,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }},]) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 47eaffeb19a4..e2c68c483d0e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -96,6 +96,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -103,6 +105,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) @@ -130,6 +133,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index aae858bf79b7..6399f1f1cd93 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -140,6 +140,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -147,6 +149,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) @@ -174,6 +177,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index dd4fd637ecf1..5882e95f785b 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1184,6 +1184,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel(): @@ -1196,6 +1197,7 @@ def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}]) @@ -1237,6 +1239,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}]) From 1aaf75a2bbd5ac370ba70f684cd703b0d875a13d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 20 Oct 2020 22:20:18 -0700 Subject: [PATCH 0411/1339] chore: release 0.35.7 (#678) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 083773979909..b8ff722558fa 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.6...v0.35.7) (2020-10-21) + + +### Bug Fixes + +* expose ssl credentials from transport ([#677](https://www.github.com/googleapis/gapic-generator-python/issues/677)) ([da0ee3e](https://www.github.com/googleapis/gapic-generator-python/commit/da0ee3eab4f80bf3d70fa5e06a2dcef7e1d4d22e)) + ### [0.35.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.5...v0.35.6) (2020-10-20) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e727b5dde165..f39769ddf9ba 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.6" +version = "0.35.7" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 64de514e2a5e016dbe5f7c8d37de31ec8b4fbac6 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 21 Oct 2020 11:02:03 -0700 Subject: [PATCH 0412/1339] docs: generated message types reference proto-plus (#680) Multiple issues have been filed indicating that json marshalling has been broken and the fix is not obvious. This will hopefully be ameliorated by indicating the inheritance from proto.Message, with a link to the appropriate documentation. --- .../ads-templates/scripts/fixup_%name_%version_keywords.py.j2 | 1 + .../gapic/templates/docs/%name_%version/types.rst.j2 | 1 + packages/gapic-generator/gapic/templates/docs/conf.py.j2 | 1 + .../gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 | 1 + packages/gapic-generator/noxfile.py | 2 +- 5 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 index 99681ed99225..9c4afb35c87a 100644 --- a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 {% extends '_base.py.j2' %} {% block content %} import argparse diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 index 640641f2117d..a77df003f7bd 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 @@ -3,3 +3,4 @@ Types for {{ api.naming.long_name }} {{ api.naming.version }} API .. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types :members: + :show-inheritance: diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index 1e827b37d96c..423c3ad08813 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -346,6 +346,7 @@ intersphinx_mapping = { "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), "grpc": ("https://grpc.io/grpc/python/", None), "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), } diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 index 6688025184e4..d040ba812676 100644 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 {% extends '_base.py.j2' %} {% block content %} import argparse diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index ba283480e158..9c2462b815b5 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -229,7 +229,7 @@ def showcase_mypy_alternative_templates(session): showcase_mypy(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) -@nox.session(python="3.6") +@nox.session(python="3.8") def docs(session): """Build the docs.""" From 29d7e7c0f0e2108f081ea6163f86ef88322b568b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 21 Oct 2020 18:12:04 +0000 Subject: [PATCH 0413/1339] chore: release 0.35.8 (#681) :robot: I have created a release \*beep\* \*boop\* --- ### [0.35.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.7...v0.35.8) (2020-10-21) ### Documentation * generated message types reference proto-plus ([#680](https://www.github.com/googleapis/gapic-generator-python/issues/680)) ([23327b2](https://www.github.com/googleapis/gapic-generator-python/commit/23327b275fb5a3fefe6c47cb15b9d9ecb02aac1f)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b8ff722558fa..5f88fda9a831 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.7...v0.35.8) (2020-10-21) + + +### Documentation + +* generated message types reference proto-plus ([#680](https://www.github.com/googleapis/gapic-generator-python/issues/680)) ([23327b2](https://www.github.com/googleapis/gapic-generator-python/commit/23327b275fb5a3fefe6c47cb15b9d9ecb02aac1f)) + ### [0.35.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.6...v0.35.7) (2020-10-21) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f39769ddf9ba..4d4945d34bad 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.7" +version = "0.35.8" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From c44c63c1f66ef5c9a23877f9a5927a03266ef8bc Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 21 Oct 2020 13:44:10 -0700 Subject: [PATCH 0414/1339] chore: add test support for Python 3.9 (#682) Adds nox and CI targets for 3.9 --- packages/gapic-generator/.circleci/config.yml | 90 +++++++++++++++++++ packages/gapic-generator/noxfile.py | 8 +- 2 files changed, 94 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 9ae961fcee1b..77375976fc54 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -20,6 +20,10 @@ workflows: filters: tags: only: /^v\d+\.\d+\.\d+$/ + - unit-3.9: + filters: + tags: + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-3.6: requires: - unit-3.6 @@ -38,6 +42,12 @@ workflows: filters: tags: only: /^v\d+\.\d+\.\d+$/ + - showcase-unit-3.9: + requires: + - unit-3.9 + filters: + tags: + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-alternative-templates-3.7: requires: - unit-3.7 @@ -50,6 +60,12 @@ workflows: filters: tags: only: /^v\d+\.\d+\.\d+$/ + - showcase-unit-alternative-templates-3.9: + requires: + - unit-3.9 + filters: + tags: + only: /^v\d+\.\d+\.\d+$/ - showcase-unit-add-iam-methods: requires: - unit-3.8 @@ -75,6 +91,7 @@ workflows: - showcase-unit-3.6 - showcase-unit-3.7 - showcase-unit-3.8 + - showcase-unit-3.9 - showcase-mypy filters: tags: @@ -86,6 +103,7 @@ workflows: - showcase-unit-3.6 - showcase-unit-3.7 - showcase-unit-3.8 + - showcase-unit-3.9 - showcase-mypy filters: tags: @@ -96,6 +114,7 @@ workflows: - mypy - showcase-unit-alternative-templates-3.7 - showcase-unit-alternative-templates-3.8 + - showcase-unit-alternative-templates-3.9 - showcase-mypy-alternative-templates filters: tags: @@ -106,6 +125,7 @@ workflows: - mypy - showcase-unit-alternative-templates-3.7 - showcase-unit-alternative-templates-3.8 + - showcase-unit-alternative-templates-3.9 - showcase-mypy-alternative-templates filters: tags: @@ -416,6 +436,30 @@ jobs: - run: name: Run unit tests. command: nox -s showcase_unit-3.8 + showcase-unit-3.9: + docker: + - image: python:3.9-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip gcc + - run: + name: Install protoc 3.12.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Run unit tests. + command: nox -s showcase_unit-3.9 showcase-unit-alternative-templates-3.7: docker: - image: python:3.7-slim @@ -464,6 +508,30 @@ jobs: - run: name: Run unit tests. command: nox -s showcase_unit_alternative_templates-3.8 + showcase-unit-alternative-templates-3.9: + docker: + - image: python:3.9-slim + steps: + - checkout + - run: + name: Install system dependencies. + command: | + apt-get update + apt-get install -y curl pandoc unzip gcc + - run: + name: Install protoc 3.12.1. + command: | + mkdir -p /usr/src/protoc/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - run: + name: Install nox. + command: pip install nox + - run: + name: Run unit tests. + command: nox -s showcase_unit_alternative_templates-3.9 showcase-unit-add-iam-methods: docker: - image: python:3.8-slim @@ -602,6 +670,28 @@ jobs: name: Submit coverage data to codecov. command: codecov when: always + unit-3.9: + docker: + - image: python:3.9-slim + steps: + - checkout + - run: + name: Install pandoc + command: | + apt-get update + apt-get install -y pandoc gcc git + - run: + name: Install nox and codecov. + command: | + pip install nox + pip install codecov + - run: + name: Run unit tests. + command: nox -s unit-3.9 + - run: + name: Submit coverage data to codecov. + command: codecov + when: always style-check: docker: - image: python:3.6-alpine diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 9c2462b815b5..37a2be048a12 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -26,7 +26,7 @@ ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") -@nox.session(python=["3.6", "3.7", "3.8"]) +@nox.session(python=["3.6", "3.7", "3.8", "3.9"]) def unit(session): """Run the unit test suite.""" @@ -169,7 +169,7 @@ def showcase_mtls_alternative_templates(session): ) -@nox.session(python=["3.6", "3.7", "3.8"]) +@nox.session(python=["3.6", "3.7", "3.8", "3.9"]) def showcase_unit( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): @@ -198,7 +198,7 @@ def showcase_unit( ) -@nox.session(python=["3.7", "3.8"]) +@nox.session(python=["3.7", "3.8", "3.9"]) def showcase_unit_alternative_templates(session): showcase_unit(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) @@ -250,7 +250,7 @@ def docs(session): ) -@nox.session(python=["3.7", "3.8"]) +@nox.session(python=["3.7", "3.8", "3.9"]) def mypy(session): """Perform typecheck analysis.""" From dceb2cba2703fd41f1fc48922373955f1bbc1865 Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Thu, 22 Oct 2020 16:20:12 -0700 Subject: [PATCH 0415/1339] chore: update actools-python perm in sync settings (#683) --- packages/gapic-generator/.github/sync-repo-settings.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 86677eec40f9..b3fa3d000780 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -35,3 +35,5 @@ permissionRules: permission: admin - team: yoshi-python permission: push + - team: actools-python + permission: push From a10bdfd62cc5049b123593e453e9643de335b286 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 27 Oct 2020 10:26:08 -0700 Subject: [PATCH 0416/1339] perf: collisions don't contain reserved names by default (#684) The 'collisions' set does NOT contain RESERVED_NAMES; they are combined at runtime when needed. For a large real-world, this results in an order of magnitude reduction in memory usage. I'm not joking: Google Ads v5 uses 2.45 GB peak before this change and 223 MB after. Also contains changes to add `__slots__` attributes to Metadata and Address. These are ancillary, optional, and open to negotiation. In the above scenario, they reduce memory usage from 223 MB to 177 MB. If other people feel that the reduction in readability does not warrant the reduction in memory usage I am absolutely open to dropping that particular commit. Includes other minor memory usage optimizations that collectively shave about 5 MB. --- packages/gapic-generator/gapic/schema/metadata.py | 7 ++----- packages/gapic-generator/gapic/schema/wrappers.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 4e78119f9116..cd96ab86c35b 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -49,9 +49,6 @@ class Address: ) collisions: FrozenSet[str] = dataclasses.field(default_factory=frozenset) - def __post_init__(self): - super().__setattr__("collisions", self.collisions | RESERVED_NAMES) - def __eq__(self, other) -> bool: return all([getattr(self, i) == getattr(other, i) for i in ('name', 'module', 'module_path', 'package', 'parent')]) @@ -114,7 +111,7 @@ def module_alias(self) -> str: while still providing names that are fundamentally readable to users (albeit looking auto-generated). """ - if self.module in self.collisions: + if self.module in self.collisions | RESERVED_NAMES: return '_'.join( ( ''.join( @@ -283,7 +280,7 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Address': ``Address`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace(self, collisions=frozenset(collisions)) + return dataclasses.replace(self, collisions=collisions) @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 7bad6adb3ba2..b5ae6fd0e90f 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -314,13 +314,19 @@ def recursive_field_types(self) -> Sequence[ return tuple(types) @utils.cached_property - def recursive_fields(self) -> FrozenSet[Field]: - return frozenset(chain( + def recursive_resource_fields(self) -> FrozenSet[Field]: + all_fields = chain( self.fields.values(), (field for t in self.recursive_field_types if isinstance(t, MessageType) for field in t.fields.values()), - )) + ) + return frozenset( + f + for f in all_fields + if (f.options.Extensions[resource_pb2.resource_reference].type or + f.options.Extensions[resource_pb2.resource_reference].child_type) + ) @property def map(self) -> bool: @@ -1060,7 +1066,7 @@ def gen_resources(message): yield type_ def gen_indirect_resources_used(message): - for field in message.recursive_fields: + for field in message.recursive_resource_fields: resource = field.options.Extensions[ resource_pb2.resource_reference] resource_type = resource.type or resource.child_type From f575ef54016c4a3ab973939ce9440c08e20c289f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 27 Oct 2020 17:32:07 +0000 Subject: [PATCH 0417/1339] chore: release 0.35.9 (#686) :robot: I have created a release \*beep\* \*boop\* --- ### [0.35.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.8...v0.35.9) (2020-10-27) ### Performance Improvements * collisions don't contain reserved names by default ([#684](https://www.github.com/googleapis/gapic-generator-python/issues/684)) ([2ec6ea6](https://www.github.com/googleapis/gapic-generator-python/commit/2ec6ea6835256c0d7b252e035cf4eac1ff442647)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5f88fda9a831..410936c34a43 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.8...v0.35.9) (2020-10-27) + + +### Performance Improvements + +* collisions don't contain reserved names by default ([#684](https://www.github.com/googleapis/gapic-generator-python/issues/684)) ([2ec6ea6](https://www.github.com/googleapis/gapic-generator-python/commit/2ec6ea6835256c0d7b252e035cf4eac1ff442647)) + ### [0.35.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.7...v0.35.8) (2020-10-21) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 4d4945d34bad..c00816cedb2a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.8" +version = "0.35.9" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 50ef2a42c9e370af72678ebaea3d1de122db276f Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 9 Nov 2020 10:08:44 -0800 Subject: [PATCH 0418/1339] docs: fix a few typos (#690) --- packages/gapic-generator/docs/getting-started/bazel.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/docs/getting-started/bazel.rst b/packages/gapic-generator/docs/getting-started/bazel.rst index 428b43ed957c..1e67bfa045c4 100644 --- a/packages/gapic-generator/docs/getting-started/bazel.rst +++ b/packages/gapic-generator/docs/getting-started/bazel.rst @@ -67,9 +67,9 @@ available in the ``google/cloud/documentai/v1beta2/`` subdirectory. Creating the Targets ~~~~~~~~~~~~~~~~~~~~ -To build somethign with bazel you need to create the corresponding tagets in -your ``BUIDL.bazel`` file. You can use the Python section of the Document AI -`BUIDL.bazel`_ file as an example: +To build something with bazel you need to create the corresponding targets in +your ``BUILD.bazel`` file. You can use the Python section of the Document AI +`BUILD.bazel`_ file as an example: .. code-block:: python @@ -94,7 +94,7 @@ your ``BUIDL.bazel`` file. You can use the Python section of the Document AI ":documentai_py_gapic", ], ) -.. _BUIDL.bazel: https://github.com/googleapis/googleapis/blob/master/google/cloud/documentai/v1beta2/BUILD.bazel +.. _BUILD.bazel: https://github.com/googleapis/googleapis/blob/master/google/cloud/documentai/v1beta2/BUILD.bazel Compiling an API ~~~~~~~~~~~~~~~~ From fe4ae46b02ef57a2b8d7dc297e59e534e482c2ae Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Nov 2020 10:25:21 -0800 Subject: [PATCH 0419/1339] chore: release 0.35.10 (#691) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 410936c34a43..77ea9b7ad9a0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.35.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.9...v0.35.10) (2020-11-09) + + +### Documentation + +* fix a few typos ([#690](https://www.github.com/googleapis/gapic-generator-python/issues/690)) ([2716838](https://www.github.com/googleapis/gapic-generator-python/commit/2716838fb739c9350eee2c95b5cf207c4d83423d)) + ### [0.35.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.8...v0.35.9) (2020-10-27) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index c00816cedb2a..16fef93d78c5 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.9" +version = "0.35.10" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From d3c00ba439c3dffafa76c00b4f25be588e61b3e6 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 12 Nov 2020 11:41:12 -0700 Subject: [PATCH 0420/1339] fix: add enums to types/__init__.py (#695) --- .../%name_%version/%sub/types/__init__.py.j2 | 21 +++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 index 4ebdec8677e5..7b25899bc5fe 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 @@ -1,13 +1,22 @@ {% extends '_base.py.j2' %} {% block content %} -{% for p in api.protos.values() if p.file_to_generate and p.messages -%} -from .{{p.module_name }} import ({% for m in p.messages.values() %}{{ m.name }}, {% endfor %}) -{% endfor %} +{%- for proto in api.protos.values() if proto.file_to_generate and proto.messages %} +from .{{proto.module_name }} import ( + {%- for message in proto.messages.values() %} + {{message.name }}, {% endfor %} + {%- for enum in proto.enums.values() %} + {{ enum.name }}, {% endfor %} +){% endfor %} __all__ = ( - {%- for p in api.protos.values() if p.file_to_generate %}{% for m in p.messages.values() %} - '{{ m.name }}', - {%- endfor %}{% endfor %} + {%- for proto in api.protos.values() if proto.file_to_generate %} + {%- for message in proto.messages.values() %} + '{{ message.name }}', + {%- endfor -%} + {%- for enum in proto.enums.values() %} + '{{ enum.name }}', + {%- endfor -%} + {%- endfor %} ) {% endblock %} From 0bc42817dae179347b0291eae432f414beb220c1 Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Thu, 12 Nov 2020 10:50:10 -0800 Subject: [PATCH 0421/1339] fix: update protobuf version [gapic-generator-python] (#696) --- packages/gapic-generator/repositories.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 9af2d302de9f..03e0e1cb64b7 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -16,7 +16,7 @@ def gapic_generator_python(): requirements = "@gapic_generator_python//:requirements.txt", ) - _protobuf_version = "3.11.2" + _protobuf_version = "3.13.0" _protobuf_version_in_link = "v%s" % _protobuf_version _maybe( http_archive, From 959f7e91d0fe9eb78091d1b609f0f316d9b232ff Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 12 Nov 2020 18:56:19 +0000 Subject: [PATCH 0422/1339] chore: release 0.35.11 (#697) :robot: I have created a release \*beep\* \*boop\* --- ### [0.35.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.10...v0.35.11) (2020-11-12) ### Bug Fixes * add enums to types/__init__.py ([#695](https://www.github.com/googleapis/gapic-generator-python/issues/695)) ([e1d4a4a](https://www.github.com/googleapis/gapic-generator-python/commit/e1d4a4ae768a631f6e6dc28f2acfde8be8dc4a8f)) * update protobuf version [gapic-generator-python] ([#696](https://www.github.com/googleapis/gapic-generator-python/issues/696)) ([ea3e519](https://www.github.com/googleapis/gapic-generator-python/commit/ea3e5198862881f5b142638df6ea604654f81f82)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 77ea9b7ad9a0..c9553df4bf3d 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [0.35.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.10...v0.35.11) (2020-11-12) + + +### Bug Fixes + +* add enums to types/__init__.py ([#695](https://www.github.com/googleapis/gapic-generator-python/issues/695)) ([e1d4a4a](https://www.github.com/googleapis/gapic-generator-python/commit/e1d4a4ae768a631f6e6dc28f2acfde8be8dc4a8f)) +* update protobuf version [gapic-generator-python] ([#696](https://www.github.com/googleapis/gapic-generator-python/issues/696)) ([ea3e519](https://www.github.com/googleapis/gapic-generator-python/commit/ea3e5198862881f5b142638df6ea604654f81f82)) + ### [0.35.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.9...v0.35.10) (2020-11-09) From cec83bd5392de8e98e1d777477f6176e8bdc50ed Mon Sep 17 00:00:00 2001 From: yon-mg <71726126+yon-mg@users.noreply.github.com> Date: Fri, 13 Nov 2020 22:59:54 -0800 Subject: [PATCH 0423/1339] feat: add rest transport generation for clients with optional transport flag (#688) * feat: add rest transport generation for clients * feat: add rest transport generation for clients * feat: add transport flag * refactor: moved template logic outside * fix: small fixes in transport option logic * test: added unit test for transport flag * test: add unit test for http option method * test: add unit test for http option method branch * fix: fix import paths * fix: style check issues * fix: more style check issues * fix: addressing pr reviews * fix: typo in test_method * fix: style check fixes --- .../%namespace/%name/%version/__init__.py.j2 | 1 + .../%namespace/%name/__init__.py.j2 | 1 + .../gapic/generator/generator.py | 12 +- .../gapic-generator/gapic/schema/wrappers.py | 36 ++++ .../%sub/services/%service/async_client.py.j2 | 1 + .../%sub/services/%service/client.py.j2 | 10 + .../%service/transports/__init__.py.j2 | 17 +- .../services/%service/transports/grpc.py.j2 | 11 +- .../%service/transports/grpc_asyncio.py.j2 | 9 +- .../services/%service/transports/rest.py.j2 | 175 ++++++++++++++++++ .../gapic-generator/gapic/utils/options.py | 7 + .../tests/unit/generator/test_generator.py | 37 ++++ .../tests/unit/schema/wrappers/test_method.py | 30 +++ .../unit/schema/wrappers/test_service.py | 1 + 14 files changed, 338 insertions(+), 10 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 index 749a408c4235..aa12751852a4 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -20,6 +20,7 @@ _lazy_type_to_package_map = { '{{ enum.name }}': '{{ enum.ident.package|join('.') }}.types.{{enum.ident.module }}', {%- endfor %} + {# TODO(yon-mg): add rest transport service once I know what this is #} # Client classes and transports {%- for service in api.services.values() %} '{{ service.client_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}', diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 index 749a408c4235..aa12751852a4 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 @@ -20,6 +20,7 @@ _lazy_type_to_package_map = { '{{ enum.name }}': '{{ enum.ident.package|join('.') }}.types.{{enum.ident.module }}', {%- endfor %} + {# TODO(yon-mg): add rest transport service once I know what this is #} # Client classes and transports {%- for service in api.services.values() %} '{{ service.client_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}', diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 5bd8a4f3c928..45b204ce833e 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -272,8 +272,11 @@ def _render_template( if "%service" in template_name: for service in api_schema.services.values(): if ( - skip_subpackages - and service.meta.address.subpackage != api_schema.subpackage_view + (skip_subpackages + and service.meta.address.subpackage != api_schema.subpackage_view) + or + ('transport' in template_name + and not self._is_desired_transport(template_name, opts)) ): continue @@ -293,6 +296,11 @@ def _render_template( template_name, api_schema=api_schema, opts=opts)) return answer + def _is_desired_transport(self, template_name: str, opts: Options) -> bool: + """Returns true if template name contains a desired transport""" + desired_transports = ['__init__', 'base'] + opts.transport + return any(transport in template_name for transport in desired_transports) + def _get_file( self, template_name: str, diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index b5ae6fd0e90f..664f240ffaab 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -719,6 +719,8 @@ def _client_output(self, enable_asyncio: bool): # Return the usual output. return self.output + # TODO(yon-mg): remove or rewrite: don't think it performs as intended + # e.g. doesn't work with basic case of gRPC transcoding @property def field_headers(self) -> Sequence[str]: """Return the field headers defined for this method.""" @@ -737,6 +739,35 @@ def field_headers(self) -> Sequence[str]: return next((tuple(pattern.findall(verb)) for verb in potential_verbs if verb), ()) + @property + def http_opt(self) -> Optional[Dict[str, str]]: + """Return the http option for this method. + + e.g. {'verb': 'post' + 'url': '/some/path' + 'body': '*'} + + """ + http: List[Tuple[descriptor_pb2.FieldDescriptorProto, str]] + http = self.options.Extensions[annotations_pb2.http].ListFields() + + if len(http) < 1: + return None + + http_method = http[0] + answer: Dict[str, str] = { + 'verb': http_method[0].name, + 'url': http_method[1], + } + if len(http) > 1: + body_spec = http[1] + answer[body_spec[0].name] = body_spec[1] + + # TODO(yon-mg): handle nested fields & fields past body i.e. 'additional bindings' + # TODO(yon-mg): enums for http verbs? + return answer + + # TODO(yon-mg): refactor as there may be more than one method signature @utils.cached_property def flattened_fields(self) -> Mapping[str, Field]: """Return the signature defined for this method.""" @@ -786,6 +817,7 @@ def grpc_stub_type(self) -> str: server='stream' if self.server_streaming else 'unary', ) + # TODO(yon-mg): figure out why idempotent is reliant on http annotation @utils.cached_property def idempotent(self) -> bool: """Return True if we know this method is idempotent, False otherwise. @@ -980,6 +1012,10 @@ def grpc_transport_name(self): def grpc_asyncio_transport_name(self): return self.name + "GrpcAsyncIOTransport" + @property + def rest_transport_name(self): + return self.name + "RestTransport" + @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 9d5150d86980..c6320144efcf 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -30,6 +30,7 @@ from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} from .client import {{ service.client_name }} +{# TODO(yon-mg): handle rest transport async client interaction #} class {{ service.async_client_name }}: """{{ service.meta.doc|rst(width=72, indent=4) }}""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index c3093aa1cf7e..6ec3d5d879d5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -30,8 +30,13 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO +{%- if 'grpc' in opts.transport %} from .transports.grpc import {{ service.grpc_transport_name }} from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} +{%- endif %} +{%- if 'rest' in opts.transport %} +from .transports.rest import {{ service.name }}RestTransport +{%- endif %} class {{ service.client_name }}Meta(type): @@ -42,8 +47,13 @@ class {{ service.client_name }}Meta(type): objects. """ _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] + {%- if 'grpc' in opts.transport %} _transport_registry['grpc'] = {{ service.grpc_transport_name }} _transport_registry['grpc_asyncio'] = {{ service.grpc_asyncio_transport_name }} + {%- endif %} + {%- if 'rest' in opts.transport %} + _transport_registry['rest'] = {{ service.name }}RestTransport + {%- endif %} def get_transport_class(cls, label: str = None, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 index fa97f46164cb..bd7981387f22 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 @@ -5,19 +5,34 @@ from collections import OrderedDict from typing import Dict, Type from .base import {{ service.name }}Transport +{%- if 'grpc' in opts.transport %} from .grpc import {{ service.name }}GrpcTransport from .grpc_asyncio import {{ service.name }}GrpcAsyncIOTransport +{%- endif %} +{%- if 'rest' in opts.transport %} +from .rest import {{ service.name }}RestTransport +{%- endif %} + # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] +{%- if 'grpc' in opts.transport %} _transport_registry['grpc'] = {{ service.name }}GrpcTransport _transport_registry['grpc_asyncio'] = {{ service.name }}GrpcAsyncIOTransport - +{%- endif %} +{%- if 'rest' in opts.transport %} +_transport_registry['rest'] = {{ service.name }}RestTransport +{%- endif %} __all__ = ( '{{ service.name }}Transport', + {%- if 'grpc' in opts.transport %} '{{ service.name }}GrpcTransport', '{{ service.name }}GrpcAsyncIOTransport', + {%- endif %} + {%- if 'rest' in opts.transport %} + '{{ service.name }}RestTransport', + {%- endif %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index e2c68c483d0e..3d1f5ca9b2c9 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -151,6 +151,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ) self._stubs = {} # type: Dict[str, Callable] + {%- if service.has_lro %} + self._operations_client = None + {%- endif %} # Run the base constructor. super().__init__( @@ -172,7 +175,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -220,13 +223,13 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): client. """ # Sanity check: Only create a new client if we do not already have one. - if 'operations_client' not in self.__dict__: - self.__dict__['operations_client'] = operations_v1.OperationsClient( + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__['operations_client'] + return self._operations_client {%- endif %} {%- for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 6399f1f1cd93..5ea70311624a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -205,6 +205,9 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ) self._stubs = {} + {%- if service.has_lro %} + self._operations_client = None + {%- endif %} @property def grpc_channel(self) -> aio.Channel: @@ -225,13 +228,13 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): client. """ # Sanity check: Only create a new client if we do not already have one. - if 'operations_client' not in self.__dict__: - self.__dict__['operations_client'] = operations_v1.OperationsAsyncClient( + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__['operations_client'] + return self._operations_client {%- endif %} {%- for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 new file mode 100644 index 000000000000..d26856dd2c9d --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -0,0 +1,175 @@ +{% extends '_base.py.j2' %} + +{% block content %} +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +{% if service.has_lro %} +from google.api_core import operations_v1 +{%- endif %} +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.auth.transport.requests import AuthorizedSession + +{# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} +{% filter sort_lines -%} +{% for method in service.methods.values() -%} +{{ method.input.ident.python_import }} +{{ method.output.ident.python_import }} +{% endfor -%} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +{% endif %} +{% endfilter %} + +from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO + + +class {{ service.name }}RestTransport({{ service.name }}Transport): + """REST backend transport for {{ service.name }}. + + {{ service.meta.doc|rst(width=72, indent=4) }} + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + {# TODO(yon-mg): handle mtls stuff if that's relevant for rest transport #} + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{- ' ' }}The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + super().__init__(host=host, credentials=credentials) + self._session = AuthorizedSession(self._credentials) + {%- if service.has_lro %} + self._operations_client = None + {%- endif %} + + {%- if service.has_lro %} + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + from google.api_core import grpc_helpers + self._operations_client = operations_v1.OperationsClient( + grpc_helpers.create_channel( + self._host, + credentials=self._credentials, + scopes=self.AUTH_SCOPES, + ) + ) + + # Return the client from cache. + return self._operations_client + {%- endif %} + {%- for method in service.methods.values() %} + {%- if method.http_opt %} + + def {{ method.name|snake_case }}(self, + request: {{ method.input.ident }}, *, + metadata: Sequence[Tuple[str, str]] = (), + ) -> {{ method.output.ident }}: + r"""Call the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request (~.{{ method.input.ident }}): + The request object. + {{ method.input.meta.doc|rst(width=72, indent=16) }} + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {%- if not method.void %} + + Returns: + ~.{{ method.output.ident }}: + {{ method.output.meta.doc|rst(width=72, indent=16) }} + {%- endif %} + """ + + {%- if 'body' in method.http_opt.keys() %} + # Jsonify the input + data = {{ method.output.ident }}.to_json( + {%- if method.http_opt['body'] == '*' %} + request + {%- else %} + request.body + {%- endif %} + ) + {%- endif %} + + {# TODO(yon-mg): Write helper method for handling grpc transcoding url #} + # TODO(yon-mg): need to handle grpc transcoding and parse url correctly + # current impl assumes simpler version of grpc transcoding + # Send the request + url = 'https://{host}{{ method.http_opt['url'] }}'.format( + host=self._host, + {%- for field in method.input.fields.keys() %} + {{ field }}=request.{{ field }}, + {%- endfor %} + ) + {% if not method.void %}response = {% endif %}self._session.{{ method.http_opt['verb'] }}( + url, + {%- if 'body' in method.http_opt.keys() %} + json=data, + {%- endif %} + ) + {%- if not method.void %} + + # Return the response + return {{ method.output.ident }}.from_json(response.content) + {%- endif %} + {%- endif %} + {%- endfor %} + + +__all__ = ( + '{{ service.name }}RestTransport', +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index c3a1ef322e6e..d99e34c63139 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -40,6 +40,8 @@ class Options: lazy_import: bool = False old_naming: bool = False add_iam_methods: bool = False + # TODO(yon-mg): should there be an enum for transport type? + transport: List[str] = dataclasses.field(default_factory=lambda: []) # Class constants PYTHON_GAPIC_PREFIX: str = 'python-gapic-' @@ -49,6 +51,8 @@ class Options: 'samples', # output dir 'lazy-import', # requires >= 3.7 'add-iam-methods', # microgenerator implementation for `reroute_to_grpc_interface` + # transport type(s) delineated by '+' (i.e. grpc, rest, custom.[something], etc?) + 'transport', )) @classmethod @@ -121,6 +125,7 @@ def tweak_path(p): # Build the options instance. sample_paths = opts.pop('samples', []) + answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), @@ -134,6 +139,8 @@ def tweak_path(p): lazy_import=bool(opts.pop('lazy-import', False)), old_naming=bool(opts.pop('old-naming', False)), add_iam_methods=bool(opts.pop('add-iam-methods', False)), + # transport should include desired transports delimited by '+', e.g. transport='grpc+rest' + transport=opts.pop('transport', ['grpc'])[0].split('+') ) # Note: if we ever need to recursively check directories for sample diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index a258c294cfb6..97793e4433f3 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -116,6 +116,43 @@ def test_get_response_fails_invalid_file_paths(): assert "%proto" in ex_str and "%service" in ex_str +def test_get_response_ignores_unwanted_transports(): + g = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: + lt.return_value = [ + "foo/%service/transports/river.py.j2", + "foo/%service/transports/car.py.j2", + "foo/%service/transports/grpc.py.j2", + "foo/%service/transports/__init__.py.j2", + "foo/%service/transports/base.py.j2", + "mollusks/squid/sample.py.j2", + ] + + with mock.patch.object(jinja2.Environment, "get_template") as gt: + gt.return_value = jinja2.Template("Service: {{ service.name }}") + cgr = g.get_response( + api_schema=make_api( + make_proto( + descriptor_pb2.FileDescriptorProto( + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="SomeService"), + ] + ), + ) + ), + opts=Options.build("transport=river+car") + ) + + assert len(cgr.file) == 4 + assert {i.name for i in cgr.file} == { + "foo/some_service/transports/river.py", + "foo/some_service/transports/car.py", + "foo/some_service/transports/__init__.py", + "foo/some_service/transports/base.py", + } + + def test_get_response_enumerates_services(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 8b551df560e7..f6db3c044bdc 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -249,6 +249,36 @@ def test_method_field_headers_present(): assert method.field_headers == ('parent',) +def test_method_http_opt(): + http_rule = http_pb2.HttpRule( + post='/v1/{parent=projects/*}/topics', + body='*' + ) + method = make_method('DoSomething', http_rule=http_rule) + assert method.http_opt == { + 'verb': 'post', + 'url': '/v1/{parent=projects/*}/topics', + 'body': '*' + } +# TODO(yon-mg) to test: grpc transcoding, +# correct handling of path/query params +# correct handling of body & additional binding + + +def test_method_http_opt_no_body(): + http_rule = http_pb2.HttpRule(post='/v1/{parent=projects/*}/topics') + method = make_method('DoSomething', http_rule=http_rule) + assert method.http_opt == { + 'verb': 'post', + 'url': '/v1/{parent=projects/*}/topics' + } + + +def test_method_http_opt_no_http_rule(): + method = make_method('DoSomething') + assert method.http_opt == None + + def test_method_idempotent_yes(): http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') method = make_method('DoSomething', http_rule=http_rule) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index c4c8d9b83804..ef14e27a6ff3 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -59,6 +59,7 @@ def test_service_properties(): assert service.transport_name == 'ThingDoerTransport' assert service.grpc_transport_name == 'ThingDoerGrpcTransport' assert service.grpc_asyncio_transport_name == 'ThingDoerGrpcAsyncIOTransport' + assert service.rest_transport_name == 'ThingDoerRestTransport' def test_service_host(): From 5bbcdc6d1e5dbe1b67d8b2655fc47e05f9da5ef9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 16 Nov 2020 19:46:03 +0000 Subject: [PATCH 0424/1339] chore: release 0.36.0 (#700) :robot: I have created a release \*beep\* \*boop\* --- ## [0.36.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.11...v0.36.0) (2020-11-14) ### Features * add rest transport generation for clients with optional transport flag ([#688](https://www.github.com/googleapis/gapic-generator-python/issues/688)) ([af59c2c](https://www.github.com/googleapis/gapic-generator-python/commit/af59c2c3c3d6b7e1f626c3fbc2c03f99ca31b4a4)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c9553df4bf3d..fa92081f5158 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.36.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.11...v0.36.0) (2020-11-14) + + +### Features + +* add rest transport generation for clients with optional transport flag ([#688](https://www.github.com/googleapis/gapic-generator-python/issues/688)) ([af59c2c](https://www.github.com/googleapis/gapic-generator-python/commit/af59c2c3c3d6b7e1f626c3fbc2c03f99ca31b4a4)) + ### [0.35.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.10...v0.35.11) (2020-11-12) From b7d6e8b21a8c30e68c00ced6a680c40516035b86 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 16 Nov 2020 21:04:03 +0100 Subject: [PATCH 0425/1339] chore(deps): update dependency protobuf to v3.14.0 (#699) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | minor | `==3.13.0` -> `==3.14.0` | --- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index fb7cad03af9e..d357cf067657 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.23.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 -protobuf==3.13.0 +protobuf==3.14.0 pypandoc==1.5 PyYAML==5.3.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 6369857ebec88d45cc769f88c30b9663bab5812f Mon Sep 17 00:00:00 2001 From: yon-mg <71726126+yon-mg@users.noreply.github.com> Date: Mon, 7 Dec 2020 20:29:38 -0800 Subject: [PATCH 0426/1339] feat: add proper handling of query/path/body parameters for rest transport (#702) * feat: add proper handling of query/path/body parameters for rest transport * fix: typing errors * Update case.py * fix: minor changes adding a test, refactor and style check * fix: camel_case bug with constant case * fix: to_camel_case to produce lower camel case instead of PascalCase where relevant * fix: addressing pr comments * fix: adding appropriate todos, addressing comments * fix: dataclass dependency issue * Update wrappers.py Co-authored-by: Dov Shlachter --- packages/gapic-generator/.circleci/config.yml | 1 + .../gapic/generator/generator.py | 1 + .../gapic-generator/gapic/schema/wrappers.py | 25 +++++++++ .../services/%service/transports/rest.py.j2 | 54 ++++++++++++++----- .../gapic-generator/gapic/utils/__init__.py | 2 + packages/gapic-generator/gapic/utils/case.py | 16 ++++++ packages/gapic-generator/noxfile.py | 6 +++ packages/gapic-generator/setup.py | 4 +- .../tests/unit/schema/wrappers/test_method.py | 51 ++++++++++++++++++ .../tests/unit/utils/test_case.py | 16 ++++++ 10 files changed, 161 insertions(+), 15 deletions(-) diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml index 77375976fc54..6fef7704cf50 100644 --- a/packages/gapic-generator/.circleci/config.yml +++ b/packages/gapic-generator/.circleci/config.yml @@ -364,6 +364,7 @@ jobs: cd .. nox -s showcase_mtls_alternative_templates + # TODO(yon-mg): add compute unit tests showcase-unit-3.6: docker: - image: python:3.6-slim diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 45b204ce833e..6a3446cbf878 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -54,6 +54,7 @@ def __init__(self, opts: Options) -> None: # Add filters which templates require. self._env.filters["rst"] = utils.rst self._env.filters["snake_case"] = utils.to_snake_case + self._env.filters["camel_case"] = utils.to_camel_case self._env.filters["sort_lines"] = utils.sort_lines self._env.filters["wrap"] = utils.wrap self._env.filters["coerce_response_name"] = coerce_response_name diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 664f240ffaab..00f22d6da264 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -767,6 +767,31 @@ def http_opt(self) -> Optional[Dict[str, str]]: # TODO(yon-mg): enums for http verbs? return answer + @property + def path_params(self) -> Sequence[str]: + """Return the path parameters found in the http annotation path template""" + # TODO(yon-mg): fully implement grpc transcoding (currently only handles basic case) + if self.http_opt is None: + return [] + + pattern = r'\{(\w+)\}' + return re.findall(pattern, self.http_opt['url']) + + @property + def query_params(self) -> Set[str]: + """Return query parameters for API call as determined by http annotation and grpc transcoding""" + # TODO(yon-mg): fully implement grpc transcoding (currently only handles basic case) + # TODO(yon-mg): remove this method and move logic to generated client + if self.http_opt is None: + return set() + + params = set(self.path_params) + body = self.http_opt.get('body') + if body: + params.add(body) + + return set(self.input.fields) - params + # TODO(yon-mg): refactor as there may be more than one method signature @utils.cached_property def flattened_fields(self) -> Mapping[str, Field]: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index d26856dd2c9d..a25f66e2a7a5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -133,31 +133,59 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {%- endif %} """ - {%- if 'body' in method.http_opt.keys() %} - # Jsonify the input - data = {{ method.output.ident }}.to_json( - {%- if method.http_opt['body'] == '*' %} + {# TODO(yon-mg): refactor when implementing grpc transcoding + - parse request pb & assign body, path params + - shove leftovers into query params + - make sure dotted nested fields preserved + - format url and send the request + #} + {%- if 'body' in method.http_opt %} + # Jsonify the request body + {%- if method.http_opt['body'] != '*' %} + body = {{ method.input.fields[method.http_opt['body']].type.ident }}.to_json( + request.{{ method.http_opt['body'] }}, + including_default_value_fields=False + ) + {%- else %} + body = {{ method.input.ident }}.to_json( request - {%- else %} - request.body - {%- endif %} ) {%- endif %} + {%- endif %} {# TODO(yon-mg): Write helper method for handling grpc transcoding url #} # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes simpler version of grpc transcoding - # Send the request + # current impl assumes basic case of grpc transcoding url = 'https://{host}{{ method.http_opt['url'] }}'.format( host=self._host, - {%- for field in method.input.fields.keys() %} + {%- for field in method.path_params %} {{ field }}=request.{{ field }}, {%- endfor %} ) + + {# TODO(yon-mg): move all query param logic out of wrappers into here to handle + nested fields correctly (can't just use set of top level fields + #} + # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields + # not required for GCE + query_params = { + {%- for field in method.query_params %} + '{{ field|camel_case }}': request.{{ field }}, + {%- endfor %} + } + # TODO(yon-mg): further discussion needed whether 'python truthiness' is appropriate here + # discards default values + # TODO(yon-mg): add test for proper url encoded strings + query_params = ((k, v) for k, v in query_params.items() if v) + for i, (param_name, param_value) in enumerate(query_params): + q = '?' if i == 0 else '&' + url += "{q}{name}={value}".format(q=q, name=param_name, value=param_value.replace(' ', '+')) + + # Send the request {% if not method.void %}response = {% endif %}self._session.{{ method.http_opt['verb'] }}( - url, - {%- if 'body' in method.http_opt.keys() %} - json=data, + url + {%- if 'body' in method.http_opt %}, + json=body, {%- endif %} ) {%- if not method.void %} diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 9719a8f7a2c7..9729591c3c4b 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -14,6 +14,7 @@ from gapic.utils.cache import cached_property from gapic.utils.case import to_snake_case +from gapic.utils.case import to_camel_case from gapic.utils.code import empty from gapic.utils.code import nth from gapic.utils.code import partition @@ -38,6 +39,7 @@ 'rst', 'sort_lines', 'to_snake_case', + 'to_camel_case', 'to_valid_filename', 'to_valid_module_name', 'wrap', diff --git a/packages/gapic-generator/gapic/utils/case.py b/packages/gapic-generator/gapic/utils/case.py index a7552e2abab3..f58aa4adc6e0 100644 --- a/packages/gapic-generator/gapic/utils/case.py +++ b/packages/gapic-generator/gapic/utils/case.py @@ -45,3 +45,19 @@ def to_snake_case(s: str) -> str: # Done; return the camel-cased string. return s.lower() + + +def to_camel_case(s: str) -> str: + '''Convert any string to camel case. + + This is provided to templates as the ``camel_case`` filter. + + Args: + s (str): The input string, provided in any sane case system + + Returns: + str: The string in lower camel case. + ''' + + items = re.split(r'[_-]', to_snake_case(s)) + return items[0].lower() + "".join(x.capitalize() for x in items[1:]) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 37a2be048a12..a50376efe1fe 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -52,6 +52,10 @@ def unit(session): ) +# TODO(yon-mg): -add compute context manager that includes rest transport +# -add compute unit tests +# (to test against temporarily while rest transport is incomplete) +# (to be removed once all features are complete) @contextmanager def showcase_library( session, templates="DEFAULT", other_opts: typing.Iterable[str] = () @@ -87,6 +91,8 @@ def showcase_library( # Write out a client library for Showcase. template_opt = f"python-gapic-templates={templates}" + # TODO(yon-mg): add "transports=grpc+rest" when all rest features required for + # Showcase are implemented i.e. (grpc transcoding, LROs, etc) opts = "--python_gapic_opt=" opts += ",".join(other_opts + (f"{template_opt}",)) cmd_tup = ( diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 16fef93d78c5..409dea8b65f4 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -51,9 +51,9 @@ "protobuf >= 3.12.0", "pypandoc >= 1.4", "PyYAML >= 5.1.1", - "dataclasses<0.8; python_version < '3.7'" + "dataclasses < 0.8; python_version < '3.7'" ), - extras_require={':python_version<"3.7"': ("dataclasses >= 0.4",),}, + extras_require={':python_version<"3.7"': ("dataclasses >= 0.4, < 0.8",),}, tests_require=("pyfakefs >= 3.6",), python_requires=">=3.6", classifiers=( diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index f6db3c044bdc..bcaeb6880093 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -279,6 +279,57 @@ def test_method_http_opt_no_http_rule(): assert method.http_opt == None +def test_method_path_params(): + # tests only the basic case of grpc transcoding + http_rule = http_pb2.HttpRule(post='/v1/{project}/topics') + method = make_method('DoSomething', http_rule=http_rule) + assert method.path_params == ['project'] + + +def test_method_path_params_no_http_rule(): + method = make_method('DoSomething') + assert method.path_params == [] + + +def test_method_query_params(): + # tests only the basic case of grpc transcoding + http_rule = http_pb2.HttpRule( + post='/v1/{project}/topics', + body='address' + ) + input_message = make_message( + 'MethodInput', + fields=( + make_field('region'), + make_field('project'), + make_field('address') + ) + ) + method = make_method('DoSomething', http_rule=http_rule, + input_message=input_message) + assert method.query_params == {'region'} + + +def test_method_query_params_no_body(): + # tests only the basic case of grpc transcoding + http_rule = http_pb2.HttpRule(post='/v1/{project}/topics') + input_message = make_message( + 'MethodInput', + fields=( + make_field('region'), + make_field('project'), + ) + ) + method = make_method('DoSomething', http_rule=http_rule, + input_message=input_message) + assert method.query_params == {'region'} + + +def test_method_query_params_no_http_rule(): + method = make_method('DoSomething') + assert method.query_params == set() + + def test_method_idempotent_yes(): http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') method = make_method('DoSomething', http_rule=http_rule) diff --git a/packages/gapic-generator/tests/unit/utils/test_case.py b/packages/gapic-generator/tests/unit/utils/test_case.py index 93b86ea76350..83406ca43e1a 100644 --- a/packages/gapic-generator/tests/unit/utils/test_case.py +++ b/packages/gapic-generator/tests/unit/utils/test_case.py @@ -25,3 +25,19 @@ def test_camel_to_snake(): def test_constant_to_snake(): assert case.to_snake_case('CONSTANT_CASE_THING') == 'constant_case_thing' + + +def test_pascal_to_camel(): + assert case.to_camel_case('PascalCaseThing') == 'pascalCaseThing' + + +def test_snake_to_camel(): + assert case.to_camel_case('snake_case_thing') == 'snakeCaseThing' + + +def test_constant_to_camel(): + assert case.to_camel_case('CONSTANT_CASE_THING') == 'constantCaseThing' + + +def test_kebab_to_camel(): + assert case.to_camel_case('kebab-case-thing') == 'kebabCaseThing' From 30b8428e8acf60c21c01d99c590f14f15b87745c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 8 Dec 2020 20:04:06 +0000 Subject: [PATCH 0427/1339] chore: release 0.37.0 (#703) :robot: I have created a release \*beep\* \*boop\* --- ## [0.37.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.36.0...v0.37.0) (2020-12-08) ### Features * add proper handling of query/path/body parameters for rest transport ([#702](https://www.github.com/googleapis/gapic-generator-python/issues/702)) ([6b2de5d](https://www.github.com/googleapis/gapic-generator-python/commit/6b2de5dd9fbf15e6b0a42b428b01eb03f1a3820a)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index fa92081f5158..8300716937b8 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.37.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.36.0...v0.37.0) (2020-12-08) + + +### Features + +* add proper handling of query/path/body parameters for rest transport ([#702](https://www.github.com/googleapis/gapic-generator-python/issues/702)) ([6b2de5d](https://www.github.com/googleapis/gapic-generator-python/commit/6b2de5dd9fbf15e6b0a42b428b01eb03f1a3820a)) + ## [0.36.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.11...v0.36.0) (2020-11-14) From 2d2d32c969dc5af8c7129ea4ac6730dcfdd17db2 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 10 Dec 2020 13:03:47 -0700 Subject: [PATCH 0428/1339] fix: remove client recv msg limit (#704) --- .../%version/%sub/services/%service/transports/grpc.py.j2 | 8 ++++++++ .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 8 ++++++++ .../%sub/services/%service/transports/grpc.py.j2 | 8 ++++++++ .../%sub/services/%service/transports/grpc_asyncio.py.j2 | 8 ++++++++ .../%sub/services/%service/transports/rest.py.j2 | 4 ++++ .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 8 ++++++++ 6 files changed, 44 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 6438991b57a0..c8e721b9cc86 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -124,6 +124,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -138,6 +142,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials=credentials, ssl_credentials=ssl_channel_credentials, scopes=self.AUTH_SCOPES, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 5dc0f26babe1..b1b9ec2679d5 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -748,6 +748,10 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -787,6 +791,10 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 3d1f5ca9b2c9..0bcd1fa64fd4 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -132,6 +132,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -148,6 +152,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 5ea70311624a..83013d83ee15 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -176,6 +176,10 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -192,6 +196,10 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index a25f66e2a7a5..ad7e4051b9d7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -101,6 +101,10 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): self._host, credentials=self._credentials, scopes=self.AUTH_SCOPES, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 5882e95f785b..18108233b8bf 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1237,6 +1237,10 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -1276,6 +1280,10 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel From 9c070b18c948fe7870d5cd9f54c940fd8bea0e1b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 10 Dec 2020 13:28:38 -0700 Subject: [PATCH 0429/1339] chore: release 0.37.1 (#707) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8300716937b8..46b3a1971449 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.37.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.37.0...v0.37.1) (2020-12-10) + + +### Bug Fixes + +* remove client recv msg limit ([#704](https://www.github.com/googleapis/gapic-generator-python/issues/704)) ([80147ce](https://www.github.com/googleapis/gapic-generator-python/commit/80147ce177ce435dcb1b611181e80dc35f915293)) + ## [0.37.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.36.0...v0.37.0) (2020-12-08) From 199fb7bda8b920208d639f28daec96144039ef26 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 15 Dec 2020 00:16:07 +0100 Subject: [PATCH 0430/1339] chore(deps): update dependency google-api-core to v1.24.0 (#709) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | minor | `==1.23.0` -> `==1.24.0` | --- ### Release Notes
googleapis/python-api-core ### [`v1.24.0`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​1240-httpswwwgithubcomgoogleapispython-api-corecomparev1230v1240-2020-12-14) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.23.0...v1.24.0) ##### Features - add support for Python 3.9, drop support for Python 3.5 ([#​111](https://www.github.com/googleapis/python-api-core/issues/111)) ([fdbed0f](https://www.github.com/googleapis/python-api-core/commit/fdbed0f0cbae8de21c73338a6817f8aa79cef4c9)), closes [#​110](https://www.github.com/googleapis/python-api-core/issues/110) ##### Documentation - explain how to create credentials from dict ([#​109](https://www.github.com/googleapis/python-api-core/issues/109)) ([5dce6d6](https://www.github.com/googleapis/python-api-core/commit/5dce6d61e7324a415c1b3ceaeec1ce1b5f1ea189))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d357cf067657..f3bc85e08190 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.23.0 +google-api-core==1.24.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 860f8a98f8e86a512f58c933f090f97dcfaf336d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 16 Dec 2020 13:20:03 -0500 Subject: [PATCH 0431/1339] feat: add 'from_service_account_info' factory to clients (#706) Closes #705 --- .../%version/%sub/services/%service/client.py.j2 | 16 ++++++++++++++++ .../%name_%version/%sub/test_%service.py.j2 | 11 +++++++++++ .../%sub/services/%service/async_client.py.j2 | 1 + .../%sub/services/%service/client.py.j2 | 16 ++++++++++++++++ .../%name_%version/%sub/test_%service.py.j2 | 11 +++++++++++ 5 files changed, 55 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 5f12de323c9d..e3d0016a7a24 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -97,6 +97,22 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index b1b9ec2679d5..a3888503e65c 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -61,6 +61,17 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test_{{ service.client_name|snake_case }}_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = {{ service.client_name }}.from_service_account_info(info) + assert client.transport._credentials == creds + + {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + + def test_{{ service.client_name|snake_case }}_from_service_account_file(): creds = credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index c6320144efcf..6354418f7ff9 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -48,6 +48,7 @@ class {{ service.async_client_name }}: parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path) {% endfor %} + from_service_account_info = {{ service.client_name }}.from_service_account_info from_service_account_file = {{ service.client_name }}.from_service_account_file from_service_account_json = from_service_account_file diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 6ec3d5d879d5..6d703fb2de76 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -113,6 +113,22 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 18108233b8bf..570e997a6f4c 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -70,6 +70,17 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test_{{ service.client_name|snake_case }}_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = {{ service.client_name }}.from_service_account_info(info) + assert client.transport._credentials == creds + + {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + + @pytest.mark.parametrize("client_class", [{{ service.client_name }}, {{ service.async_client_name }}]) def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() From 43bea54e037a72c096b8f7bef1211e07304d21f3 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 16 Dec 2020 11:48:04 -0700 Subject: [PATCH 0432/1339] build: use rules_python 0.1.0 (#708) https://github.com/bazelbuild/rules_python/releases/tag/0.1.0 --- packages/gapic-generator/WORKSPACE | 4 ++-- packages/gapic-generator/repositories.bzl | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index b0cee19c28c1..673c802f9962 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -9,8 +9,8 @@ http_archive( http_archive( name = "rules_python", - strip_prefix = "rules_python-748aa53d7701e71101dfd15d800e100f6ff8e5d1", - url = "https://github.com/bazelbuild/rules_python/archive/748aa53d7701e71101dfd15d800e100f6ff8e5d1.zip", + strip_prefix = "rules_python-0.1.0, + url = "https://github.com/bazelbuild/rules_python/archive/0.1.0.tar.gz", ) load("@rules_python//python:repositories.bzl", "py_repositories") diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 03e0e1cb64b7..38ad41907e2d 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -1,5 +1,5 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") -load("@rules_python//python:pip.bzl", "pip_import") +load("@rules_python//python:pip.bzl", "pip_install") _PANDOC_BUILD_FILE = """ filegroup( @@ -10,9 +10,8 @@ filegroup( def gapic_generator_python(): _maybe( - pip_import, + pip_install, name = "gapic_generator_python_pip_deps", - python_interpreter = "python3", requirements = "@gapic_generator_python//:requirements.txt", ) From d62aeefd5e7ab6f35339aac26b07475a70f5e74c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 16 Dec 2020 19:54:05 +0100 Subject: [PATCH 0433/1339] chore(deps): update dependency google-api-core to v1.24.1 (#711) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | patch | `==1.24.0` -> `==1.24.1` | --- ### Release Notes
googleapis/python-api-core ### [`v1.24.1`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​1241-httpswwwgithubcomgoogleapispython-api-corecomparev1240v1241-2020-12-16) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f3bc85e08190..c435becd3389 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.24.0 +google-api-core==1.24.1 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 7b767f9f6abdcd6afa56a575d89b509f6be62abe Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 16 Dec 2020 19:00:05 +0000 Subject: [PATCH 0434/1339] chore: release 0.38.0 (#710) :robot: I have created a release \*beep\* \*boop\* --- ## [0.38.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.37.1...v0.38.0) (2020-12-16) ### Features * add 'from_service_account_info' factory to clients ([#706](https://www.github.com/googleapis/gapic-generator-python/issues/706)) ([94d5f0c](https://www.github.com/googleapis/gapic-generator-python/commit/94d5f0c11b8041cbae8e4a89bb504d6c6e200a95)), closes [#705](https://www.github.com/googleapis/gapic-generator-python/issues/705) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 46b3a1971449..0c28ecdb738b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.38.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.37.1...v0.38.0) (2020-12-16) + + +### Features + +* add 'from_service_account_info' factory to clients ([#706](https://www.github.com/googleapis/gapic-generator-python/issues/706)) ([94d5f0c](https://www.github.com/googleapis/gapic-generator-python/commit/94d5f0c11b8041cbae8e4a89bb504d6c6e200a95)), closes [#705](https://www.github.com/googleapis/gapic-generator-python/issues/705) + ### [0.37.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.37.0...v0.37.1) (2020-12-10) From 9f9e6b56644c522490f7253d930134a8a428deba Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 22 Dec 2020 10:51:18 -0700 Subject: [PATCH 0435/1339] fix: fix sphinx identifiers (#714) Cross-references like `~.ImageAnnotatorClient` don't always work correctly with sphinx. This PR changes the `sphinx()` method to always produce a full path like `google.cloud.vision_v1.ImageAnnotatorClient`. Also some other smaller changes: - Generate a separate `.rst` page for each service, which improves readability for APIs that have (1) a lot of services or (2) a lot of methods in a service. `services.rst` acts as an index page instead. - Add pagers to the generated docs - Use `undoc-members` to list enum attributes in generated docs (fixes #625) - Add newlines after bulleted lists by removing `nl=False`. Fixes #604 - Add a 'docs' session to the templated `noxfile.py` so folks using the self-service model can have generated docs. - Fix reference to LRO result type in `Returns:` - Fix `{@api.name}` reference in the `from_service_account..`. methods to reference the client type instead - Remove `:class:` notation when specifying types for attributes (sphinx doesn't need it to create a link) --- packages/gapic-generator/.gitignore | 3 +++ .../%sub/services/%service/client.py.j2 | 10 ++++----- .../docs/%name_%version/types.rst.j2 | 1 + .../ads-templates/docs/_static/custom.css | 3 +++ .../gapic/ads-templates/docs/conf.py.j2 | 2 +- .../gapic-generator/gapic/schema/metadata.py | 19 +++++++++++++--- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../%sub/services/%service/async_client.py.j2 | 4 ++-- .../%sub/services/%service/client.py.j2 | 18 +++++++-------- .../%sub/services/%service/pagers.py.j2 | 8 +++---- .../docs/%name_%version/%service.rst.j2 | 12 ++++++++++ .../docs/%name_%version/services.rst.j2 | 10 ++++----- .../docs/%name_%version/types.rst.j2 | 1 + .../gapic/templates/docs/_static/custom.css | 3 +++ .../gapic/templates/docs/conf.py.j2 | 3 ++- .../gapic/templates/noxfile.py.j2 | 22 +++++++++++++++++++ .../tests/unit/schema/wrappers/test_enums.py | 2 +- .../unit/schema/wrappers/test_message.py | 21 ++++++++++++++++-- 18 files changed, 110 insertions(+), 34 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/docs/_static/custom.css create mode 100644 packages/gapic-generator/gapic/templates/docs/%name_%version/%service.rst.j2 create mode 100644 packages/gapic-generator/gapic/templates/docs/_static/custom.css diff --git a/packages/gapic-generator/.gitignore b/packages/gapic-generator/.gitignore index df28dce99d3c..5b68f2ed5833 100644 --- a/packages/gapic-generator/.gitignore +++ b/packages/gapic-generator/.gitignore @@ -45,6 +45,9 @@ htmlcov # JetBrains .idea +# VS Code +.vscode + # Built documentation docs/_build docs/_build_doc2dash diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index e3d0016a7a24..4d2f0abebb48 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -107,7 +107,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + {{ service.client_name }}: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -125,7 +125,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + {{ service.client_name }}: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( filename) @@ -188,7 +188,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport (Union[str, ~.{{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -306,7 +306,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() -%} {{ field.name }} (:class:`{{ field.ident.sphinx }}`): - {{ field.meta.doc|rst(width=72, indent=16, nl=False) }} + {{ field.meta.doc|rst(width=72, indent=16) }} This corresponds to the ``{{ key }}`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -329,7 +329,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {%- else %} Iterable[{{ method.client_output.ident.sphinx }}]: {%- endif %} - {{ method.client_output.meta.doc|rst(width=72, indent=16) }} + {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format='rst') }} {%- endif %} """ {%- if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 index 640641f2117d..a46bd12b224d 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/types.rst.j2 @@ -3,3 +3,4 @@ Types for {{ api.naming.long_name }} {{ api.naming.version }} API .. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types :members: + :undoc-members: diff --git a/packages/gapic-generator/gapic/ads-templates/docs/_static/custom.css b/packages/gapic-generator/gapic/ads-templates/docs/_static/custom.css new file mode 100644 index 000000000000..c4e78bac6354 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 index 1e827b37d96c..1d60ece902b8 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 @@ -165,7 +165,7 @@ html_theme_options = { # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index cd96ab86c35b..0276fe098974 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -173,9 +173,22 @@ def python_import(self) -> imp.Import: @property def sphinx(self) -> str: """Return the Sphinx identifier for this type.""" - if self.module: - return f'~.{self}' - return self.name + + if not self.api_naming: + if self.package: + return '.'.join(self.package + (self.module, self.name)) + else: + return str(self) + + # Check if this is a generated type + # Use the original module name rather than the module_alias + if self.proto_package.startswith(self.api_naming.proto_package): + return '.'.join(self.api_naming.module_namespace + ( + self.api_naming.versioned_module_name, + ) + self.subpackage + ('types',) + self.parent + (self.name, )) + + # Anything left is a standard _pb2 type + return f'{self.proto_package}.{self.module}_pb2.{self.name}' @property def subpackage(self) -> Tuple[str, ...]: diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 00f22d6da264..6020a2606186 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -690,7 +690,7 @@ def _client_output(self, enable_asyncio: bool): documentation=utils.doc( 'An object representing a long-running operation. \n\n' 'The result type for the operation will be ' - ':class:`{ident}`: {doc}'.format( + ':class:`{ident}` {doc}'.format( doc=self.lro.response_type.meta.doc, ident=self.lro.response_type.ident.sphinx, ), diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 6354418f7ff9..f6e380d0679a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -140,7 +140,7 @@ class {{ service.async_client_name }}: {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() -%} {{ field.name }} (:class:`{{ field.ident.sphinx }}`): - {{ field.meta.doc|rst(width=72, indent=16, nl=False) }} + {{ field.meta.doc|rst(width=72, indent=16) }} This corresponds to the ``{{ key }}`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -163,7 +163,7 @@ class {{ service.async_client_name }}: {%- else %} AsyncIterable[{{ method.client_output_async.ident.sphinx }}]: {%- endif %} - {{ method.client_output_async.meta.doc|rst(width=72, indent=16) }} + {{ method.client_output_async.meta.doc|rst(width=72, indent=16, source_format='rst') }} {%- endif %} """ {%- if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 6d703fb2de76..21b0c9f20166 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -123,7 +123,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + {{ service.client_name }}: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -141,7 +141,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + {{ service.client_name }}: The constructed client. """ credentials = service_account.Credentials.from_service_account_file( filename) @@ -202,10 +202,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.{{ service.name }}Transport]): The + transport (Union[str, {{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -322,18 +322,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Args: {%- if not method.client_streaming %} - request (:class:`{{ method.input.ident.sphinx }}`): + request ({{ method.input.ident.sphinx }}): The request object.{{ ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() -%} - {{ field.name }} (:class:`{{ field.ident.sphinx }}`): - {{ field.meta.doc|rst(width=72, indent=16, nl=False) }} + {{ field.name }} ({{ field.ident.sphinx }}): + {{ field.meta.doc|rst(width=72, indent=16) }} This corresponds to the ``{{ key }}`` field on the ``request`` instance; if ``request`` is provided, this should not be set. {% endfor -%} {%- else %} - requests (Iterator[`{{ method.input.ident.sphinx }}`]): + requests (Iterator[{{ method.input.ident.sphinx }}]): The request object iterator.{{ ' ' -}} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {%- endif %} @@ -350,7 +350,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {%- else %} Iterable[{{ method.client_output.ident.sphinx }}]: {%- endif %} - {{ method.client_output.meta.doc|rst(width=72, indent=16) }} + {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format='rst') }} {%- endif %} """ {%- if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 2f9598d2e3e2..ea08466ba0ae 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -45,9 +45,9 @@ class {{ method.name }}Pager: Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`{{ method.input.ident.sphinx }}`): + request ({{ method.input.ident.sphinx }}): The initial request object. - response (:class:`{{ method.output.ident.sphinx }}`): + response ({{ method.output.ident.sphinx }}): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -104,9 +104,9 @@ class {{ method.name }}AsyncPager: Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`{{ method.input.ident.sphinx }}`): + request ({{ method.input.ident.sphinx }}): The initial request object. - response (:class:`{{ method.output.ident.sphinx }}`): + response ({{ method.output.ident.sphinx }}): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/%service.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/%service.rst.j2 new file mode 100644 index 000000000000..6a54833b5a16 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/%service.rst.j2 @@ -0,0 +1,12 @@ +{{ service.name }} +{{ '-' * (18 + service.name|length) }} + +.. automodule:: {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} + :members: + :inherited-members: + +{% if service.has_pagers %} +.. automodule:: {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.pagers + :members: + :inherited-members: +{% endif %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 index b0f05d693191..98ba64f60f91 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 @@ -1,8 +1,8 @@ Services for {{ api.naming.long_name }} {{ api.naming.version }} API {{ '=' * (18 + api.naming.long_name|length + api.naming.version|length) }} +.. toctree:: + :maxdepth: 2 -{% for service in api.services.values()|sort(attribute='name') -%} -.. automodule:: {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} - :members: - :inherited-members: -{% endfor %} + {% for service in api.services.values()|sort(attribute='name') -%} + {{ service.name|snake_case }} + {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 index a77df003f7bd..2f453dd8cd4d 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 @@ -3,4 +3,5 @@ Types for {{ api.naming.long_name }} {{ api.naming.version }} API .. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types :members: + :undoc-members: :show-inheritance: diff --git a/packages/gapic-generator/gapic/templates/docs/_static/custom.css b/packages/gapic-generator/gapic/templates/docs/_static/custom.css new file mode 100644 index 000000000000..c4e78bac6354 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index 423c3ad08813..d4f1b215a691 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -165,7 +165,7 @@ html_theme_options = { # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -347,6 +347,7 @@ intersphinx_mapping = { "grpc": ("https://grpc.io/grpc/python/", None), "requests": ("http://requests.kennethreitz.org/en/stable/", None), "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 5fde488f006d..65397ea71c21 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -2,6 +2,7 @@ {% block content %} import os +import shutil import nox # type: ignore @@ -37,4 +38,25 @@ def mypy(session): '{{ api.naming.versioned_module_name }}', {%- endif %} ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) {% endblock %} diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index 0602a09e9c0f..3debb5603b83 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -39,4 +39,4 @@ def test_enum_value_properties(): def test_enum_ident(): message = make_enum('Baz', package='foo.v1', module='bar') assert str(message.ident) == 'bar.Baz' - assert message.ident.sphinx == '~.bar.Baz' + assert message.ident.sphinx == 'foo.v1.bar.Baz' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 7d8cca169aab..4a7905d1b294 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -21,6 +21,7 @@ from google.api import resource_pb2 from google.protobuf import descriptor_pb2 +from gapic.schema import naming from gapic.schema import metadata from gapic.schema import wrappers @@ -50,7 +51,7 @@ def test_message_docstring(): def test_message_ident(): message = make_message('Baz', package='foo.v1', module='bar') assert str(message.ident) == 'bar.Baz' - assert message.ident.sphinx == '~.bar.Baz' + assert message.ident.sphinx == 'foo.v1.bar.Baz' def test_message_ident_collisions(): @@ -58,7 +59,23 @@ def test_message_ident_collisions(): collisions={'bar'}, ) assert str(message.ident) == 'fv_bar.Baz' - assert message.ident.sphinx == '~.fv_bar.Baz' + assert message.ident.sphinx == 'foo.v1.bar.Baz' + + +def test_message_pb2_sphinx_ident(): + meta = metadata.Metadata( + address=metadata.Address( + name='Timestamp', + package=('google', 'protobuf'), + module='timestamp', + api_naming=naming.NewNaming( + proto_package="foo.bar" + ) + ) + ) + message = make_message("Timestamp", package='google.protobuf', + module='timestamp', meta=meta) + assert message.ident.sphinx == 'google.protobuf.timestamp_pb2.Timestamp' def test_get_field(): From 386214008d2582d7850ff4ae9c23eeb834500177 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 22 Dec 2020 11:08:03 -0700 Subject: [PATCH 0436/1339] feat: allow warehouse name to be customized (#717) Allow warehouse name (package name in `setup.py`) to be customized via a CLI option. This is a pretty common reason for a `synth.py` regex replace: - One repo has more than one API (e.g., Bigtable and Bigtable Admin) but the package name should always be `google-cloud-bigtable` - We want an extra `-` in the repo name and package name to make it easier to read and type. (`google-cloud-binaryauthorization` -> `google-cloud-binary-authorization`) - Package name constructed from the namespace doesn't match the `google-cloud-{API}` convention (`google-cloud-devtools-containeranalysis` -> `google-cloud-containeranalysis`) Fixes #605 --- packages/gapic-generator/gapic/schema/naming.py | 13 ++++++++++--- packages/gapic-generator/gapic/utils/options.py | 4 ++++ .../tests/unit/generator/test_options.py | 5 +++++ .../tests/unit/schema/test_naming.py | 10 ++++++++++ 4 files changed, 29 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index c591ad59cca3..3f49a18a2e48 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -42,6 +42,7 @@ class Naming(abc.ABC): version: str = '' product_name: str = '' proto_package: str = '' + _warehouse_package_name: str = '' def __post_init__(self): if not self.product_name: @@ -141,6 +142,10 @@ def build( # with ('x.y',) will become a two-tuple: ('x', 'y') i.capitalize() for i in '.'.join(opts.namespace).split('.') )) + if opts.warehouse_package_name: + package_info = dataclasses.replace(package_info, + _warehouse_package_name=opts.warehouse_package_name + ) # Done; return the naming information. return package_info @@ -186,9 +191,11 @@ def versioned_module_name(self) -> str: @property def warehouse_package_name(self) -> str: """Return the appropriate Python package name for Warehouse.""" - - # Piece the name and namespace together to come up with the - # proper package name. + # If a custom name has been set, use it + if self._warehouse_package_name: + return self._warehouse_package_name + # Otherwise piece the name and namespace together to come + # up with the proper package name. answer = list(self.namespace) + self.name.split(' ') return '-'.join(answer).lower() diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index d99e34c63139..b8e79a060139 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -34,6 +34,7 @@ class Options: """ name: str = '' namespace: Tuple[str, ...] = dataclasses.field(default=()) + warehouse_package_name: str = '' retry: Optional[Dict[str, Any]] = None sample_configs: Tuple[str, ...] = dataclasses.field(default=()) templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) @@ -53,6 +54,7 @@ class Options: 'add-iam-methods', # microgenerator implementation for `reroute_to_grpc_interface` # transport type(s) delineated by '+' (i.e. grpc, rest, custom.[something], etc?) 'transport', + 'warehouse-package-name' # change the package name on PyPI )) @classmethod @@ -129,6 +131,8 @@ def tweak_path(p): answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), + warehouse_package_name=opts.pop( + 'warehouse-package-name', ['']).pop(), retry=retry_cfg, sample_configs=tuple( cfg_path diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 5235c2e45393..60d365a84c52 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -152,3 +152,8 @@ def test_options_old_naming(): def test_options_add_iam_methods(): opts = Options.build('add-iam-methods') assert opts.add_iam_methods + + +def test_options_warehouse_package_name(): + opts = Options.build('warehouse-package-name') + assert opts.warehouse_package_name diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index ec1e0dad6460..c0487b7d2620 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -218,6 +218,16 @@ def test_cli_override_name_and_namespace_versionless(): assert not n.version +def test_cli_override_warehouse_package_name(): + FileDesc = descriptor_pb2.FileDescriptorProto + proto1 = FileDesc(package='google.translation') + n = naming.Naming.build( + proto1, + opts=Options(warehouse_package_name='google-cloud-foo'), + ) + assert n.warehouse_package_name == "google-cloud-foo" + + def test_build_factory(): proto = descriptor_pb2.FileDescriptorProto( package='google.mollusc.v1alpha1' From ee6e8ebd5ca3fcf94da2fadd6791c5862e797d84 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 22 Dec 2020 18:14:03 +0000 Subject: [PATCH 0437/1339] chore: release 0.39.0 (#719) :robot: I have created a release \*beep\* \*boop\* --- ## [0.39.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.38.0...v0.39.0) (2020-12-22) ### Features * allow warehouse name to be customized ([#717](https://www.github.com/googleapis/gapic-generator-python/issues/717)) ([7c185e8](https://www.github.com/googleapis/gapic-generator-python/commit/7c185e87cb4252b1f99ed121515814595f9492c4)), closes [#605](https://www.github.com/googleapis/gapic-generator-python/issues/605) ### Bug Fixes * fix sphinx identifiers ([#714](https://www.github.com/googleapis/gapic-generator-python/issues/714)) ([39be474](https://www.github.com/googleapis/gapic-generator-python/commit/39be474b4419dfa521ef51927fd36dbf257d68e3)), closes [#625](https://www.github.com/googleapis/gapic-generator-python/issues/625) [#604](https://www.github.com/googleapis/gapic-generator-python/issues/604) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 0c28ecdb738b..395709717935 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.39.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.38.0...v0.39.0) (2020-12-22) + + +### Features + +* allow warehouse name to be customized ([#717](https://www.github.com/googleapis/gapic-generator-python/issues/717)) ([7c185e8](https://www.github.com/googleapis/gapic-generator-python/commit/7c185e87cb4252b1f99ed121515814595f9492c4)), closes [#605](https://www.github.com/googleapis/gapic-generator-python/issues/605) + + +### Bug Fixes + +* fix sphinx identifiers ([#714](https://www.github.com/googleapis/gapic-generator-python/issues/714)) ([39be474](https://www.github.com/googleapis/gapic-generator-python/commit/39be474b4419dfa521ef51927fd36dbf257d68e3)), closes [#625](https://www.github.com/googleapis/gapic-generator-python/issues/625) [#604](https://www.github.com/googleapis/gapic-generator-python/issues/604) + ## [0.38.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.37.1...v0.38.0) (2020-12-16) From f2c5c9b51f417f2ba8085794d6313bd1e71d397a Mon Sep 17 00:00:00 2001 From: yon-mg <71726126+yon-mg@users.noreply.github.com> Date: Wed, 30 Dec 2020 16:07:20 -0600 Subject: [PATCH 0438/1339] fix: updating testing, rest-only generation, & minor bug-fixes (#716) * fix: updating testing, rest-only generation, & minor bug-fixes * test: test async client generation * fix: fixed reserved keyword bug, fixed bugs in gapic tests * fix: reverted bug causing change to , refactored template tests * fix: return type mismatch * fix: reserved keyword issue in * fix: replace bad regex checks with checks against field_pb type * Update gapic/templates/noxfile.py.j2 Co-authored-by: Dov Shlachter Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .../gapic/generator/generator.py | 8 + .../gapic-generator/gapic/schema/wrappers.py | 5 +- .../templates/%namespace/%name/__init__.py.j2 | 4 + .../%sub/services/%service/__init__.py.j2 | 4 + .../services/%service/transports/rest.py.j2 | 16 +- .../gapic/templates/noxfile.py.j2 | 4 +- .../%name_%version/%sub/test_%service.py.j2 | 228 ++++++++++++++++-- .../gapic-generator/gapic/utils/__init__.py | 4 + packages/gapic-generator/gapic/utils/case.py | 4 +- .../gapic-generator/gapic/utils/checks.py | 33 +++ .../tests/unit/generator/test_generator.py | 44 +++- .../tests/unit/utils/test_checks.py | 34 +++ 12 files changed, 342 insertions(+), 46 deletions(-) create mode 100644 packages/gapic-generator/gapic/utils/checks.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_checks.py diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 6a3446cbf878..d6eb3aca9dd4 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -59,6 +59,10 @@ def __init__(self, opts: Options) -> None: self._env.filters["wrap"] = utils.wrap self._env.filters["coerce_response_name"] = coerce_response_name + # Add tests to determine type of expressions stored in strings + self._env.tests["str_field_pb"] = utils.is_str_field_pb + self._env.tests["msg_field_pb"] = utils.is_msg_field_pb + self._sample_configs = opts.sample_configs def get_response( @@ -278,6 +282,10 @@ def _render_template( or ('transport' in template_name and not self._is_desired_transport(template_name, opts)) + or + # TODO(yon-mg) - remove when rest async implementation resolved + # temporarily stop async client gen while rest async is unkown + ('async' in template_name and 'grpc' not in opts.transport) ): continue diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 6020a2606186..eefe0cdc7e16 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -411,7 +411,9 @@ def get_field(self, *field_path: str, collisions = collisions or self.meta.address.collisions # Get the first field in the path. - cursor = self.fields[field_path[0]] + first_field = field_path[0] + cursor = self.fields[first_field + + ('_' if first_field in utils.RESERVED_NAMES else '')] # Base case: If this is the last field in the path, return it outright. if len(field_path) == 1: @@ -805,6 +807,7 @@ def filter_fields(sig: str) -> Iterable[Tuple[str, Field]]: continue name = f.strip() field = self.input.get_field(*name.split('.')) + name += '_' if field.field_pb.name in utils.RESERVED_NAMES else '' if cross_pkg_request and not field.is_primitive: # This is not a proto-plus wrapped message type, # and setting a non-primitive field directly is verboten. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index d777dc86e374..7ffe67b3fe3c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -12,8 +12,10 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' if service.meta.address.subpackage == api.subpackage_view -%} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} +{%- if 'grpc' in opts.transport %} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.async_client import {{ service.async_client_name }} +{%- endif %} {% endfor -%} {# Import messages and enums from each proto. @@ -50,7 +52,9 @@ __all__ = ( {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view -%} '{{ service.client_name }}', + {%- if 'grpc' in opts.transport %} '{{ service.async_client_name }}', + {%- endif %} {% endfor -%} {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view -%} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 index c99b2a5f91e5..e0112041c3a0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 @@ -2,10 +2,14 @@ {% block content %} from .client import {{ service.client_name }} +{%- if 'grpc' in opts.transport %} from .async_client import {{ service.async_client_name }} +{%- endif %} __all__ = ( '{{ service.client_name }}', + {%- if 'grpc' in opts.transport %} '{{ service.async_client_name }}', + {%- endif %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index ad7e4051b9d7..338bfcbaff76 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -78,7 +78,13 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): Generally, you only need to set this if you're developing your own client library. """ - super().__init__(host=host, credentials=credentials) + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + ) self._session = AuthorizedSession(self._credentials) {%- if service.has_lro %} self._operations_client = None @@ -163,7 +169,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): url = 'https://{host}{{ method.http_opt['url'] }}'.format( host=self._host, {%- for field in method.path_params %} - {{ field }}=request.{{ field }}, + {{ field }}=request.{{ method.input.get_field(field).name }}, {%- endfor %} ) @@ -180,10 +186,8 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # TODO(yon-mg): further discussion needed whether 'python truthiness' is appropriate here # discards default values # TODO(yon-mg): add test for proper url encoded strings - query_params = ((k, v) for k, v in query_params.items() if v) - for i, (param_name, param_value) in enumerate(query_params): - q = '?' if i == 0 else '&' - url += "{q}{name}={value}".format(q=q, name=param_name, value=param_value.replace(' ', '+')) + query_params = ['{k}={v}'.format(k=k, v=v) for k, v in query_params.items() if v] + url += '?{}'.format('&'.join(query_params)).replace(' ', '+') # Send the request {% if not method.void %}response = {% endif %}self._session.{{ method.http_opt['verb'] }}( diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 65397ea71c21..ee97ea01cb34 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -7,7 +7,7 @@ import shutil import nox # type: ignore -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) def unit(session): """Run the unit test suite.""" @@ -21,7 +21,7 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit',) + os.path.join('tests', 'unit', ''.join(session.posargs)) ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 570e997a6f4c..f912c479a301 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -10,6 +10,11 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule +{%- if 'rest' in opts.transport %} +from requests import Response +from requests.sessions import Session +{%- endif %} + {# Import the service itself as well as every proto module that it imports. -#} {% filter sort_lines -%} from google import auth @@ -17,7 +22,9 @@ from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} +{%- if 'grpc' in opts.transport %} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.async_client_name }} +{%- endif %} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports from google.api_core import client_options from google.api_core import exceptions @@ -81,7 +88,12 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(): {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} -@pytest.mark.parametrize("client_class", [{{ service.client_name }}, {{ service.async_client_name }}]) +@pytest.mark.parametrize("client_class", [ + {{ service.client_name }}, + {%- if 'grpc' in opts.transport %} + {{ service.async_client_name }}, + {%- endif %} +]) def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: @@ -97,18 +109,29 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(client_c def test_{{ service.client_name|snake_case }}_get_transport_class(): transport = {{ service.client_name }}.get_transport_class() - assert transport == transports.{{ service.name }}GrpcTransport + available_transports = [ + {%- for transport_name in opts.transport %} + transports.{{ service.name }}{{ transport_name.capitalize() }}Transport, + {%- endfor %} + ] + assert transport in available_transports - transport = {{ service.client_name }}.get_transport_class("grpc") - assert transport == transports.{{ service.name }}GrpcTransport + transport = {{ service.client_name }}.get_transport_class("{{ opts.transport[0] }}") + assert transport == transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport @pytest.mark.parametrize("client_class,transport_class,transport_name", [ + {%- if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), - ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), + {%- elif 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), + {%- endif %} ]) @mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +{%- if 'grpc' in opts.transport %} @mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +{%- endif %} def test_{{ service.client_name|snake_case }}_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: @@ -197,13 +220,20 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + {% if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", "true"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", "true"), ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", "false"), - ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", "false") + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", "false"), + {% elif 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "true"), + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "false"), + {%- endif %} ]) @mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +{%- if 'grpc' in opts.transport %} @mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +{%- endif %} @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -286,8 +316,12 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp @pytest.mark.parametrize("client_class,transport_class,transport_name", [ + {%- if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), - ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), + {%- elif 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), + {%- endif %} ]) def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -308,8 +342,12 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ + {%- if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), - ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio") + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), + {%- elif 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), + {%- endif %} ]) def test_{{ service.client_name|snake_case }}_client_options_credentials_file(client_class, transport_class, transport_name): # Check the case credentials file is provided. @@ -328,6 +366,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) +{%- if 'grpc' in opts.transport %} def test_{{ service.client_name|snake_case }}_client_options_from_dict(): @@ -345,9 +384,10 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) +{%- endif %} -{% for method in service.methods.values() -%} +{% for method in service.methods.values() if 'grpc' in opts.transport -%} def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), @@ -991,9 +1031,148 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): {% endfor -%} {#- method in methods #} +{% for method in service.methods.values() if 'rest' in opts.transport -%} +def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void -%} + return_value = None + {% elif method.lro -%} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming -%} + return_value = iter([{{ method.output.ident }}()]) + {% else -%} + return_value = {{ method.output.ident }}( + {%- for field in method.output.fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + {% endif -%} + + # Wrap the value into a proper Response obj + json_return_value = {{ method.output.ident }}.to_json(return_value) + response_value = Response() + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + {% if method.client_streaming %} + response = client.{{ method.name|snake_case }}(iter(requests)) + {% else %} + response = client.{{ method.name|snake_case }}(request) + {% endif %} + + {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {# Cheeser assertion to force code coverage for bad paginated methods #} + assert response.raw_page is response + {% endif %} + + # Establish that the response is the type that we expect. + {% if method.void -%} + assert response is None + {% else %} + assert isinstance(response, {{ method.client_output.ident }}) + {% for field in method.output.fields.values() -%} + {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else -%} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif -%} + {% endfor %} + {% endif %} + + +def test_{{ method.name|snake_case }}_rest_from_dict(): + test_{{ method.name|snake_case }}_rest(request_type=dict) + + +def test_{{ method.name|snake_case }}_rest_flattened(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void -%} + return_value = None + {% elif method.lro -%} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming -%} + return_value = iter([{{ method.output.ident }}()]) + {% else -%} + return_value = {{ method.output.ident }}() + {% endif %} + + # Wrap the value into a proper Response obj + json_return_value = {{ method.output.ident }}.to_json(return_value) + response_value = Response() + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + {%- for field in method.flattened_fields.values() if field.field_pb is msg_field_pb %} + {{ field.name }} = {{ field.mock_value }} + {% endfor %} + client.{{ method.name|snake_case }}( + {%- for field in method.flattened_fields.values() %} + {% if field.field_pb is msg_field_pb %}{{ field.name }}={{ field.name }},{% else %}{{ field.name }}={{ field.mock_value }},{% endif %} + {%- endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, http_call, http_params = req.mock_calls[0] + body = http_params.get('json') + {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp.Timestamp' -%} + assert TimestampRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} + {% elif field.ident|string() == 'duration.Duration' -%} + assert DurationRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} + {% else -%} + assert {% if field.field_pb is msg_field_pb %}{{ field.ident }}.to_json({{ field.name }}, including_default_value_fields=False) + {%- elif field.field_pb is str_field_pb %}{{ field.mock_value }} + {%- else %}str({{ field.mock_value }}) + {%- endif %} in http_call[1] + str(body) + {% endif %} + {% endif %}{% endfor %} + + +def test_{{ method.name|snake_case }}_rest_flattened_error(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method.name|snake_case }}( + {{ method.input.ident }}(), + {%- for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {%- endfor %} + ) + + +{% endfor -%} def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. - transport = transports.{{ service.name }}GrpcTransport( + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): @@ -1003,7 +1182,7 @@ def test_credentials_transport_error(): ) # It is an error to provide a credentials file and a transport instance. - transport = transports.{{ service.name }}GrpcTransport( + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): @@ -1013,7 +1192,7 @@ def test_credentials_transport_error(): ) # It is an error to provide scopes and a transport instance. - transport = transports.{{ service.name }}GrpcTransport( + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( credentials=credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): @@ -1023,16 +1202,15 @@ def test_credentials_transport_error(): ) - def test_transport_instance(): # A client may be instantiated with a custom transport instance. - transport = transports.{{ service.name }}GrpcTransport( + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( credentials=credentials.AnonymousCredentials(), ) client = {{ service.client_name }}(transport=transport) assert client.transport is transport - +{% if 'grpc' in opts.transport %} def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}GrpcTransport( @@ -1046,11 +1224,15 @@ def test_transport_get_channel(): ) channel = transport.grpc_channel assert channel - +{% endif %} @pytest.mark.parametrize("transport_class", [ + {%- if 'grpc' in opts.transport %} transports.{{ service.grpc_transport_name }}, - transports.{{ service.grpc_asyncio_transport_name }} + transports.{{ service.grpc_asyncio_transport_name }}, + {%- elif 'rest' in opts.transport %} + transports.{{ service.rest_transport_name }}, + {%- endif %} ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -1059,7 +1241,7 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() - +{% if 'grpc' in opts.transport %} def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = {{ service.client_name }}( @@ -1069,7 +1251,7 @@ def test_transport_grpc_default(): client.transport, transports.{{ service.name }}GrpcTransport, ) - +{% endif %} def test_{{ service.name|snake_case }}_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error @@ -1151,7 +1333,7 @@ def test_{{ service.name|snake_case }}_auth_adc(): quota_project_id=None, ) - +{% if 'grpc' in opts.transport %} def test_{{ service.name|snake_case }}_transport_auth_adc(): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -1164,6 +1346,7 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(): {%- endfor %}), quota_project_id="octopus", ) +{% endif %} def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} @@ -1184,7 +1367,7 @@ def test_{{ service.name|snake_case }}_host_with_port(): assert client.transport._host == '{{ host }}:8000' {% endwith %} - +{% if 'grpc' in opts.transport %} def test_{{ service.name|snake_case }}_grpc_transport_channel(): channel = grpc.insecure_channel('http://localhost/') @@ -1334,6 +1517,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client {% endif -%} +{% endif %} {# if grpc in opts #} {% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} {% for message in service.resource_messages|sort(attribute="resource_type") -%} @@ -1404,7 +1588,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): prep.assert_called_once_with(client_info) -{% if opts.add_iam_methods %} +{% if opts.add_iam_methods and 'grpc' in opts.transport %} def test_set_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), transport=transport, diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 9729591c3c4b..98d31c283fc6 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -15,6 +15,8 @@ from gapic.utils.cache import cached_property from gapic.utils.case import to_snake_case from gapic.utils.case import to_camel_case +from gapic.utils.checks import is_msg_field_pb +from gapic.utils.checks import is_str_field_pb from gapic.utils.code import empty from gapic.utils.code import nth from gapic.utils.code import partition @@ -32,6 +34,8 @@ 'cached_property', 'doc', 'empty', + 'is_msg_field_pb', + 'is_str_field_pb', 'nth', 'Options', 'partition', diff --git a/packages/gapic-generator/gapic/utils/case.py b/packages/gapic-generator/gapic/utils/case.py index f58aa4adc6e0..635d2945c5bc 100644 --- a/packages/gapic-generator/gapic/utils/case.py +++ b/packages/gapic-generator/gapic/utils/case.py @@ -21,7 +21,7 @@ def to_snake_case(s: str) -> str: This is provided to templates as the ``snake_case`` filter. Args: - s (str): The input string, provided in any sane case system. + s (str): The input string, provided in any sane case system without spaces. Returns: str: The string in snake case (and all lower-cased). @@ -53,7 +53,7 @@ def to_camel_case(s: str) -> str: This is provided to templates as the ``camel_case`` filter. Args: - s (str): The input string, provided in any sane case system + s (str): The input string, provided in any sane case system without spaces. Returns: str: The string in lower camel case. diff --git a/packages/gapic-generator/gapic/utils/checks.py b/packages/gapic-generator/gapic/utils/checks.py new file mode 100644 index 000000000000..a4f7ec7445bf --- /dev/null +++ b/packages/gapic-generator/gapic/utils/checks.py @@ -0,0 +1,33 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf.descriptor_pb2 import FieldDescriptorProto + + +def is_str_field_pb(field_pb: FieldDescriptorProto) -> bool: + """Determine if field_pb is of type string. + + Args: + field (Field): The input field as a FieldDescriptorProto + """ + return field_pb.type == FieldDescriptorProto.TYPE_STRING + + +def is_msg_field_pb(field_pb: FieldDescriptorProto) -> bool: + """Determine if field_pb is of type Message. + + Args: + field (Field): The input field as a FieldDescriptorProto. + """ + return field_pb.type == FieldDescriptorProto.TYPE_MESSAGE diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 97793e4433f3..3f66033d42f8 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -116,7 +116,7 @@ def test_get_response_fails_invalid_file_paths(): assert "%proto" in ex_str and "%service" in ex_str -def test_get_response_ignores_unwanted_transports(): +def test_get_response_ignores_unwanted_transports_and_clients(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: lt.return_value = [ @@ -125,31 +125,49 @@ def test_get_response_ignores_unwanted_transports(): "foo/%service/transports/grpc.py.j2", "foo/%service/transports/__init__.py.j2", "foo/%service/transports/base.py.j2", + "foo/%service/async_client.py.j2", + "foo/%service/client.py.j2", "mollusks/squid/sample.py.j2", ] with mock.patch.object(jinja2.Environment, "get_template") as gt: gt.return_value = jinja2.Template("Service: {{ service.name }}") + api_schema = make_api( + make_proto( + descriptor_pb2.FileDescriptorProto( + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="SomeService"), + ] + ), + ) + ) + cgr = g.get_response( - api_schema=make_api( - make_proto( - descriptor_pb2.FileDescriptorProto( - service=[ - descriptor_pb2.ServiceDescriptorProto( - name="SomeService"), - ] - ), - ) - ), + api_schema=api_schema, opts=Options.build("transport=river+car") ) - - assert len(cgr.file) == 4 + assert len(cgr.file) == 5 assert {i.name for i in cgr.file} == { "foo/some_service/transports/river.py", "foo/some_service/transports/car.py", "foo/some_service/transports/__init__.py", "foo/some_service/transports/base.py", + # Only generate async client with grpc transport + "foo/some_service/client.py", + } + + cgr = g.get_response( + api_schema=api_schema, + opts=Options.build("transport=grpc") + ) + assert len(cgr.file) == 5 + assert {i.name for i in cgr.file} == { + "foo/some_service/transports/grpc.py", + "foo/some_service/transports/__init__.py", + "foo/some_service/transports/base.py", + "foo/some_service/client.py", + "foo/some_service/async_client.py", } diff --git a/packages/gapic-generator/tests/unit/utils/test_checks.py b/packages/gapic-generator/tests/unit/utils/test_checks.py new file mode 100644 index 000000000000..32d5b33b493b --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_checks.py @@ -0,0 +1,34 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gapic.utils import checks +from test_utils.test_utils import make_field, make_message + + +def test_is_str_field_pb(): + msg_field = make_field('msg_field', message=make_message('test_msg')) + str_field = make_field('str_field', type=9) + int_field = make_field('int_field', type=5) + assert not checks.is_str_field_pb(msg_field.field_pb) + assert checks.is_str_field_pb(str_field.field_pb) + assert not checks.is_str_field_pb(int_field.field_pb) + + +def test_is_msg_field_pb(): + msg_field = make_field('msg_field', message=make_message('test_msg')) + str_field = make_field('str_field', type=9) + int_field = make_field('int_field', type=5) + assert checks.is_msg_field_pb(msg_field.field_pb) + assert not checks.is_msg_field_pb(str_field.field_pb) + assert not checks.is_msg_field_pb(int_field.field_pb) From 5b2972639cdd5f3681065cc9e0637aa0d46c0ace Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 4 Jan 2021 09:58:58 -0800 Subject: [PATCH 0439/1339] fix: fix missing .coveragerc and the broken bazel build (#723) --- packages/gapic-generator/BUILD.bazel | 2 +- packages/gapic-generator/WORKSPACE | 15 ++++----------- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 4d6451aa92ca..0ec34987b0c7 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -52,7 +52,7 @@ toolchain( py_binary( name = "gapic_plugin", srcs = glob(["gapic/**/*.py"]), - data = [":pandoc_binary"] + glob(["gapic/**/*.j2"]), + data = [":pandoc_binary"] + glob(["gapic/**/*.j2", "gapic/**/.*.j2"]), main = "gapic/cli/generate_with_pandoc.py", python_version = "PY3", visibility = ["//visibility:public"], diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 673c802f9962..d6fa214f2b1b 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -9,14 +9,10 @@ http_archive( http_archive( name = "rules_python", - strip_prefix = "rules_python-0.1.0, + strip_prefix = "rules_python-0.1.0", url = "https://github.com/bazelbuild/rules_python/archive/0.1.0.tar.gz", ) -load("@rules_python//python:repositories.bzl", "py_repositories") - -py_repositories() - load("@rules_python//python:pip.bzl", "pip_repositories") pip_repositories() @@ -24,19 +20,16 @@ pip_repositories() # # Import gapic-generator-python specific dependencies # -load("//:repositories.bzl", +load( + "//:repositories.bzl", "gapic_generator_python", - "gapic_generator_register_toolchains" + "gapic_generator_register_toolchains", ) gapic_generator_python() gapic_generator_register_toolchains() -load("@gapic_generator_python_pip_deps//:requirements.bzl", "pip_install") - -pip_install() - load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") protobuf_deps() From 51f2671a344242df59c83a4268d00b6c95584159 Mon Sep 17 00:00:00 2001 From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com> Date: Tue, 5 Jan 2021 15:10:34 -0500 Subject: [PATCH 0440/1339] fix: Update gapic-generator-python to gracefully handle internal google inconsistencies (#721) Release-As: 0.39.1 --- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f912c479a301..6affa40bc8fa 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1369,7 +1369,7 @@ def test_{{ service.name|snake_case }}_host_with_port(): {% if 'grpc' in opts.transport %} def test_{{ service.name|snake_case }}_grpc_transport_channel(): - channel = grpc.insecure_channel('http://localhost/') + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.{{ service.name }}GrpcTransport( @@ -1382,7 +1382,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel(): def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel('http://localhost/') + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.{{ service.name }}GrpcAsyncIOTransport( @@ -1399,7 +1399,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s transport_class ): with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1450,7 +1450,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() From 032599fdcc9ff4cd673651589b218dbbf5aabf2e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 5 Jan 2021 13:19:08 -0700 Subject: [PATCH 0441/1339] chore: release 0.39.1 (#724) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 395709717935..06c040bc2423 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +### [0.39.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.39.0...v0.39.1) (2021-01-05) + + +### Bug Fixes + +* fix missing .coveragerc and the broken bazel build ([#723](https://www.github.com/googleapis/gapic-generator-python/issues/723)) ([7f8235f](https://www.github.com/googleapis/gapic-generator-python/commit/7f8235f6dfbd309a879895701aeb5e73c6425483)) +* Update gapic-generator-python to gracefully handle internal google inconsistencies ([#721](https://www.github.com/googleapis/gapic-generator-python/issues/721)) ([b984295](https://www.github.com/googleapis/gapic-generator-python/commit/b9842952433924a1d8de4ef9cc3ea9e7fa91c01a)) +* updating testing, rest-only generation, & minor bug-fixes ([#716](https://www.github.com/googleapis/gapic-generator-python/issues/716)) ([56c31de](https://www.github.com/googleapis/gapic-generator-python/commit/56c31de4a9f661e3d69b52e19c9a28dddfe9d7dc)) + ## [0.39.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.38.0...v0.39.0) (2020-12-22) From 758c3862284abb633180a90ab2ac7bfa997517dc Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 15 Jan 2021 01:10:13 +0100 Subject: [PATCH 0442/1339] chore(deps): update dependency google-api-core to v1.25.0 (#733) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index c435becd3389..636b26f190d1 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.24.1 +google-api-core==1.25.0 googleapis-common-protos==1.52.0 jinja2==2.11.2 MarkupSafe==1.1.1 From 5ba617125f09f7727beeae2bbbeb4c6cd6d479cf Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Tue, 19 Jan 2021 14:29:26 -0800 Subject: [PATCH 0443/1339] feat: add mtls feature to rest transport (#731) * feat: add mtls support to rest transport * update * update * update --- .../%sub/services/%service/client.py.j2 | 16 +- .../services/%service/transports/grpc.py.j2 | 20 +- .../%service/transports/grpc_asyncio.py.j2 | 20 +- .../services/%service/transports/rest.py.j2 | 9 +- .../%name_%version/%sub/test_%service.py.j2 | 171 ++++++++++++------ 5 files changed, 157 insertions(+), 79 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 21b0c9f20166..ada75471b0ec 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -239,21 +239,15 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Create SSL credentials for mutual TLS if needed. use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = mtls.default_client_cert_source() if is_mtls else None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -292,7 +286,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 0bcd1fa64fd4..b1d1d18917f8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -51,6 +51,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -82,6 +83,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -98,6 +103,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -107,8 +117,6 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) - host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" if credentials is None: @@ -144,12 +152,18 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if credentials is None: credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 83013d83ee15..94da8db76a93 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -94,6 +94,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -126,6 +127,10 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -141,6 +146,11 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): and ``credentials_file`` are passed. """ self._ssl_channel_credentials = ssl_channel_credentials + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: # Sanity check: Ensure that channel and credentials are not both @@ -151,8 +161,6 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) - host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" if credentials is None: @@ -188,12 +196,18 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if credentials is None: credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 338bfcbaff76..3446a06e90d5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -48,7 +48,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -68,8 +68,9 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): This argument is ignored if ``channel`` is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -89,6 +90,8 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {%- if service.has_lro %} self._operations_client = None {%- endif %} + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) {%- if service.has_lro %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 6affa40bc8fa..860625b493ac 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -156,7 +156,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -172,7 +172,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -188,7 +188,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -214,7 +214,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -244,76 +244,67 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: - ssl_channel_creds = mock.Mock() - with mock.patch('grpc.ssl_channel_credentials', return_value=ssl_channel_creds): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): - with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: - with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ssl_credentials_mock.return_value - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): - with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ {%- if 'grpc' in opts.transport %} @@ -336,7 +327,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -362,7 +353,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -380,7 +371,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -1348,6 +1339,64 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(): ) {% endif %} +{% if 'grpc' in opts.transport %} +@pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}]) +def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) +{% endif %} + +{% if 'rest' in opts.transport %} +def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtls(): + cred = credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.{{ service.rest_transport_name }} ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) +{% endif %} + def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] -%} client = {{ service.client_name }}( @@ -1394,6 +1443,8 @@ def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}]) def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_source( transport_class @@ -1440,6 +1491,8 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}]) def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( transport_class From e85dcc55e30a438fe820250a2d5c48975fa005a5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 19 Jan 2021 14:34:00 -0800 Subject: [PATCH 0444/1339] chore: release 0.40.0 (#736) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 06c040bc2423..1db9c2ab4d5f 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.40.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.39.1...v0.40.0) (2021-01-19) + + +### Features + +* add mtls feature to rest transport ([#731](https://www.github.com/googleapis/gapic-generator-python/issues/731)) ([524dbab](https://www.github.com/googleapis/gapic-generator-python/commit/524dbab16d248198ca10a08ecede4600fd36cefc)) + ### [0.39.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.39.0...v0.39.1) (2021-01-05) From 207814f61fd8d56f77b160cb0cf06ae4ad78ca2b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 20 Jan 2021 00:38:07 +0100 Subject: [PATCH 0445/1339] chore(deps): update dependency pyyaml to v5.4 (#735) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [PyYAML](https://pyyaml.org/) ([source](https://togithub.com/yaml/pyyaml)) | minor | `==5.3.1` -> `==5.4` | --- ### Release Notes
yaml/pyyaml ### [`v5.4`](https://togithub.com/yaml/pyyaml/compare/5.3.1...5.4) [Compare Source](https://togithub.com/yaml/pyyaml/compare/5.3.1...5.4)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 636b26f190d1..ba548cf6843c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -5,5 +5,5 @@ jinja2==2.11.2 MarkupSafe==1.1.1 protobuf==3.14.0 pypandoc==1.5 -PyYAML==5.3.1 +PyYAML==5.4 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 996cd52c26f6820b8749ae7926d431bdc93f47b8 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 20 Jan 2021 13:54:49 -0800 Subject: [PATCH 0446/1339] fix: raise for rest transport http error (#738) --- .../%sub/services/%service/transports/rest.py.j2 | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 3446a06e90d5..7d9862941d17 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -199,6 +199,9 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): json=body, {%- endif %} ) + + # Raise requests.exceptions.HTTPError if the status code is >= 400 + response.raise_for_status() {%- if not method.void %} # Return the response From dd21e380d1943ec8aafb3722b61a26b1f3afa09e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 20 Jan 2021 14:06:56 -0800 Subject: [PATCH 0447/1339] chore: release 0.40.1 (#739) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1db9c2ab4d5f..22175aa7fdaf 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.0...v0.40.1) (2021-01-20) + + +### Bug Fixes + +* raise for rest transport http error ([#738](https://www.github.com/googleapis/gapic-generator-python/issues/738)) ([7d24f3d](https://www.github.com/googleapis/gapic-generator-python/commit/7d24f3d81499ad714e57c7c9562b842c09e49d20)) + ## [0.40.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.39.1...v0.40.0) (2021-01-19) From d353d677e631358ab98a46242fae4b703d7e10ad Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 20 Jan 2021 17:26:27 -0800 Subject: [PATCH 0448/1339] fix: fix rest transport unit test template (#741) --- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 860625b493ac..e1c42f89a2d1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1056,6 +1056,7 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type # Wrap the value into a proper Response obj json_return_value = {{ method.output.ident }}.to_json(return_value) response_value = Response() + response_value.status_code = 200 response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value {% if method.client_streaming %} @@ -1111,6 +1112,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(): # Wrap the value into a proper Response obj json_return_value = {{ method.output.ident }}.to_json(return_value) response_value = Response() + response_value.status_code = 200 response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value From f43d49c9f24cb577f2fe37ffa1432994ba8987e6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 21 Jan 2021 01:32:04 +0000 Subject: [PATCH 0449/1339] chore: release 0.40.2 (#745) :robot: I have created a release \*beep\* \*boop\* --- ### [0.40.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.1...v0.40.2) (2021-01-21) ### Bug Fixes * fix rest transport unit test template ([#741](https://www.github.com/googleapis/gapic-generator-python/issues/741)) ([54b9806](https://www.github.com/googleapis/gapic-generator-python/commit/54b98060f881c8f0424c7e146488d3adc19fec7a)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 22175aa7fdaf..f127e33a2d23 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.1...v0.40.2) (2021-01-21) + + +### Bug Fixes + +* fix rest transport unit test template ([#741](https://www.github.com/googleapis/gapic-generator-python/issues/741)) ([54b9806](https://www.github.com/googleapis/gapic-generator-python/commit/54b98060f881c8f0424c7e146488d3adc19fec7a)) + ### [0.40.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.0...v0.40.1) (2021-01-20) From 75d3ef647e234d9dc405062324ac83856a517fee Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 21 Jan 2021 09:29:59 -0700 Subject: [PATCH 0450/1339] fix: stabilize order of query_params (#742) --- .../%name_%version/%sub/services/%service/transports/rest.py.j2 | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 7d9862941d17..54ec5ca92ec9 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -182,9 +182,11 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields # not required for GCE query_params = { + {% filter sort_lines -%} {%- for field in method.query_params %} '{{ field|camel_case }}': request.{{ field }}, {%- endfor %} + {% endfilter -%} } # TODO(yon-mg): further discussion needed whether 'python truthiness' is appropriate here # discards default values From f2219e64348d639c57b093250d46e8ce77d2a3b8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 26 Jan 2021 21:22:05 +0000 Subject: [PATCH 0451/1339] chore: release 0.40.3 (#747) :robot: I have created a release \*beep\* \*boop\* --- ### [0.40.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.2...v0.40.3) (2021-01-21) ### Bug Fixes * stabilize order of query_params ([#742](https://www.github.com/googleapis/gapic-generator-python/issues/742)) ([2835ddb](https://www.github.com/googleapis/gapic-generator-python/commit/2835ddbe62b520e2e4c84f02810b1ac936c9cbb9)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f127e33a2d23..f19d635297c5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.2...v0.40.3) (2021-01-21) + + +### Bug Fixes + +* stabilize order of query_params ([#742](https://www.github.com/googleapis/gapic-generator-python/issues/742)) ([2835ddb](https://www.github.com/googleapis/gapic-generator-python/commit/2835ddbe62b520e2e4c84f02810b1ac936c9cbb9)) + ### [0.40.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.1...v0.40.2) (2021-01-21) From 8cefab6a6741f3ecae764c823c73d917ea76dc7a Mon Sep 17 00:00:00 2001 From: yon-mg <71726126+yon-mg@users.noreply.github.com> Date: Thu, 28 Jan 2021 14:03:33 -0800 Subject: [PATCH 0452/1339] fix: mypy 0.800 update errors (#754) --- .../tests/unit/gapic/%name_%version/%sub/__init__.py | 0 packages/gapic-generator/gapic/templates/noxfile.py.j2 | 1 + packages/gapic-generator/noxfile.py | 2 +- 3 files changed, 2 insertions(+), 1 deletion(-) delete mode 100644 packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index ee97ea01cb34..b6225d867da3 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -32,6 +32,7 @@ def mypy(session): session.install('.') session.run( 'mypy', + '--explicit-package-bases', {%- if api.naming.module_namespace %} '{{ api.naming.module_namespace[0] }}', {%- else %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index a50376efe1fe..7dbe33ebc366 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -227,7 +227,7 @@ def showcase_mypy( session.chdir(lib) # Run the tests. - session.run("mypy", "google") + session.run("mypy", "--explicit-package-bases", "google") @nox.session(python="3.8") From e1fd7834ce9c0e201eee034ae7d13c688d208a35 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Thu, 28 Jan 2021 14:43:54 -0800 Subject: [PATCH 0453/1339] fix: Make gapic-generator-python compatible with protobuf 3.14.0 (packaged as native namespace package) (#753) More details about Python namespace packaging here: https://packaging.python.org/guides/packaging-namespace-packages/#native-namespace-packages The protobuf changes, which made this fix necessary are here: https://github.com/protocolbuffers/protobuf/pull/7902 https://github.com/protocolbuffers/protobuf/pull/7908 The tracking bug for this issue (probably not the only one) https://github.com/googleapis/gapic-generator/issues/3334 This is only part of the fix, for the proper fix other google-namespaced python packages must be migrated to [pkgutil-style-namespace-packages](https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages) and make sure they **do not** have `namespace_packages` in their `setup.py` file (an artifact from the legacy `pkg_resources-style`packages) --- packages/gapic-generator/BUILD.bazel | 2 +- packages/gapic-generator/WORKSPACE | 6 ++---- packages/gapic-generator/repositories.bzl | 2 +- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 0ec34987b0c7..8fb9dd587b08 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -57,8 +57,8 @@ py_binary( python_version = "PY3", visibility = ["//visibility:public"], deps = [ - "@com_google_protobuf//:protobuf_python", "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + requirement("protobuf"), requirement("click"), requirement("google-api-core"), requirement("googleapis-common-protos"), diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index d6fa214f2b1b..9475af113bbd 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -44,11 +44,9 @@ load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") grpc_deps() -load("@upb//bazel:repository_defs.bzl", "bazel_version_repository") +load("@com_github_grpc_grpc//bazel:grpc_extra_deps.bzl", "grpc_extra_deps") -bazel_version_repository( - name = "bazel_version", -) +grpc_extra_deps() load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies") diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 38ad41907e2d..c6af87d4bc3e 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -15,7 +15,7 @@ def gapic_generator_python(): requirements = "@gapic_generator_python//:requirements.txt", ) - _protobuf_version = "3.13.0" + _protobuf_version = "3.14.0" _protobuf_version_in_link = "v%s" % _protobuf_version _maybe( http_archive, From 3af2f51ba3eb49230bbffd1980c01bd8a4d2a462 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 29 Jan 2021 08:38:27 -0700 Subject: [PATCH 0454/1339] chore: release 0.40.4 (#755) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f19d635297c5..bb26bfcb9f5b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [0.40.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.3...v0.40.4) (2021-01-28) + + +### Bug Fixes + +* Make gapic-generator-python compatible with protobuf 3.14.0 (packaged as native namespace package) ([#753](https://www.github.com/googleapis/gapic-generator-python/issues/753)) ([45212af](https://www.github.com/googleapis/gapic-generator-python/commit/45212afb9f523a416d86272798d71ce05dc292f0)) +* mypy 0.800 update errors ([#754](https://www.github.com/googleapis/gapic-generator-python/issues/754)) ([608275a](https://www.github.com/googleapis/gapic-generator-python/commit/608275aa923f495520dea8ebddb94a99f26e27a5)) + ### [0.40.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.2...v0.40.3) (2021-01-21) From fdd29656d855f9fa74f2723ed6d3a8530ecd9b89 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 1 Feb 2021 11:19:20 -0800 Subject: [PATCH 0455/1339] fix: Fix namespace packages conflict issue (#757) * fix: Fix namespace packages conflict issue This fixes the https://github.com/googleapis/gapic-generator/issues/3334 by excluding system-wide site-packages dir from python packages resolution path completely. This pretty much implements the long-standing featrue request for rules_python https://github.com/bazelbuild/bazel/issues/4939, but only in scope of gapic-generator-python. * Format with autopep8 --- .../gapic-generator/gapic/cli/generate_with_pandoc.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/cli/generate_with_pandoc.py b/packages/gapic-generator/gapic/cli/generate_with_pandoc.py index 264d1c8b0b3c..4a31f76292dd 100644 --- a/packages/gapic-generator/gapic/cli/generate_with_pandoc.py +++ b/packages/gapic-generator/gapic/cli/generate_with_pandoc.py @@ -1,9 +1,14 @@ import os - -from gapic.cli import generate +import sys if __name__ == '__main__': os.environ['PYPANDOC_PANDOC'] = os.path.join( os.path.abspath(__file__).rsplit("gapic", 1)[0], "pandoc") os.environ['LC_ALL'] = 'C.UTF-8' - generate.generate() + os.environ['PYTHONNOUSERSITE'] = 'True' + + entry_point_script = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "generate.py") + args = [sys.executable, entry_point_script] + sys.argv[1:] + + os.execv(args[0], args) From 9d9d3e65bd9f51bbf07d98efe0b8b62593b93c89 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 1 Feb 2021 12:23:39 -0700 Subject: [PATCH 0456/1339] chore: release 0.40.5 (#759) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index bb26bfcb9f5b..df99e6f750a2 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.4...v0.40.5) (2021-02-01) + + +### Bug Fixes + +* Fix namespace packages conflict issue ([#757](https://www.github.com/googleapis/gapic-generator-python/issues/757)) ([8035662](https://www.github.com/googleapis/gapic-generator-python/commit/8035662bdcfbdffd1c294c5d28479733358407ca)) + ### [0.40.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.3...v0.40.4) (2021-01-28) From 62d98146b672f359336e9215eaf5e12f9552578b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 1 Feb 2021 20:50:03 +0100 Subject: [PATCH 0457/1339] chore(deps): update dependency jinja2 to v2.11.3 (#758) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [jinja2](https://palletsprojects.com/p/jinja/) ([source](https://togithub.com/pallets/jinja)) | `==2.11.2` -> `==2.11.3` | [![age](https://badges.renovateapi.com/packages/pypi/jinja2/2.11.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/jinja2/2.11.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/jinja2/2.11.3/compatibility-slim/2.11.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/jinja2/2.11.3/confidence-slim/2.11.2)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
pallets/jinja ### [`v2.11.3`](https://togithub.com/pallets/jinja/blob/master/CHANGES.rst#Version-2113) [Compare Source](https://togithub.com/pallets/jinja/compare/2.11.2...2.11.3) Released 2021-01-31 - Improve the speed of the `urlize` filter by reducing regex backtracking. Email matching requires a word character at the start of the domain part, and only word characters in the TLD. :pr:`1343`
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ba548cf6843c..7784cde09bdd 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,7 +1,7 @@ click==7.1.2 google-api-core==1.25.0 googleapis-common-protos==1.52.0 -jinja2==2.11.2 +jinja2==2.11.3 MarkupSafe==1.1.1 protobuf==3.14.0 pypandoc==1.5 From dc2597fdd8839dd03df96a599d3b3a84d5d7b784 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 2 Feb 2021 09:46:05 -0800 Subject: [PATCH 0458/1339] fix: remove duplicate assignment of certain flattened, repeated fields (#760) Fix for #756. Under certain circumstances, flattened, repeated fields could be duplicated during request construction. --- .../%sub/services/%service/client.py.j2 | 28 +++++++++---------- .../gapic/ads-templates/noxfile.py.j2 | 4 +-- .../gapic/ads-templates/setup.py.j2 | 1 - .../%sub/services/%service/client.py.j2 | 18 ++++++------ 4 files changed, 25 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 4d2f0abebb48..2f36e5058b70 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -341,9 +341,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - {% endif -%} + {% endif -%} {# method.flattened_fields #} {% if method.input.ident.package != method.ident.package -%} {# request lives in a different package, so there is no proto wrapper #} - # The request isn't a proto-plus wrapped type, + # The request isn't a proto-plus wrapped type. # so it must be constructed via keyword expansion. if isinstance(request, dict): request = {{ method.input.ident }}(**request) @@ -351,7 +351,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): elif not request: request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) {% endif -%}{# Cross-package req and flattened fields #} - {%- else %} + {%- else %} {# Request is in _our_ package #} # Minor optimization to avoid making a copy if the user passes # in a {{ method.input.ident }}. # There's no risk of modifying the input as we've already verified @@ -364,22 +364,22 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # If we have keyword arguments corresponding to fields on the # request, apply these. {% endif -%} - {%- for key, field in method.flattened_fields.items() if not(field.repeated and method.input.ident.package != method.ident.package) %} + {%- for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} {# Map-y fields can be _updated_, however #} - {%- for key, field in method.flattened_fields.items() if field.map and method.input.ident.package == method.ident.package %} - + {%- for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} + {%- if field.map %} {# map implies repeated, but repeated does NOT imply map#} if {{ field.name }}: request.{{ key }}.update({{ field.name }}) - {%- endfor %} + {%- else %} {# And list-y fields can be _extended_ -#} - {%- for key, field in method.flattened_fields.items() if field.repeated and not field.map and method.input.ident.package == method.ident.package %} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) - {%- endfor %} - {%- endif %} + {%- endif %} {# field.map #} + {%- endfor %} {# key, field in method.flattened_fields.items() #} + {%- endif %} {# method.client_streaming #} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -397,7 +397,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {%- endfor %} )), ) - {%- endif %} + {%- endif %} {# method.field_headers #} # Send the request. {% if not method.void %}response = {% endif %}rpc( @@ -405,7 +405,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request, {%- else %} requests, - {%- endif %} + {%- endif %} {# method.client_streaming #} retry=retry, timeout=timeout, metadata=metadata, @@ -429,12 +429,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): response=response, metadata=metadata, ) - {%- endif %} + {%- endif %} {# method.lro #} {%- if not method.void %} # Done; return the response. return response - {%- endif %} + {%- endif %} {# method.void #} {{ '\n' }} {% endfor %} diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 4760bc548bac..36dcd2b2e1ec 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -6,7 +6,7 @@ import os import nox # type: ignore -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.7', '3.8']) def unit(session): """Run the unit test suite.""" @@ -24,7 +24,7 @@ def unit(session): ) -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.7', '3.8']) def mypy(session): """Run the type checker.""" session.install('mypy') diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 92ae4ea7afc8..18f06803ddc6 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -35,7 +35,6 @@ setuptools.setup( 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Topic :: Internet', diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index ada75471b0ec..d7073af7d05e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -244,7 +244,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if use_client_cert: if client_options.client_cert_source: is_mtls = True - client_cert_source_func = client_options.client_cert_source + client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() client_cert_source_func = mtls.default_client_cert_source() if is_mtls else None @@ -381,22 +381,22 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # If we have keyword arguments corresponding to fields on the # request, apply these. {% endif -%} - {%- for key, field in method.flattened_fields.items() if not field.repeated and method.input.ident.package == method.ident.package %} + {%- for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} {%- endfor %} - {# Map-y fields can be _updated_, however #} - {%- for key, field in method.flattened_fields.items() if field.map and method.input.ident.package == method.ident.package %} - + {# Map-y fields can be _updated_, however #} + {%- for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} + {%- if field.map %} {# map implies repeated, but repeated does NOT imply map#} if {{ field.name }}: request.{{ key }}.update({{ field.name }}) - {%- endfor %} + {%- else %} {# And list-y fields can be _extended_ -#} - {%- for key, field in method.flattened_fields.items() if field.repeated and not field.map and method.input.ident.package == method.ident.package %} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) - {%- endfor %} - {%- endif %} + {%- endif %} {# field.map #} + {%- endfor %} {# method.flattened_fields.items() #} + {%- endif %} {# method.client_streaming #} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. From 4ab1632f88b43fc7583cc0e3957c57e56bb4bb96 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 2 Feb 2021 18:06:04 +0000 Subject: [PATCH 0459/1339] chore: release 0.40.6 (#762) :robot: I have created a release \*beep\* \*boop\* --- ### [0.40.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.5...v0.40.6) (2021-02-02) ### Bug Fixes * remove duplicate assignment of certain flattened, repeated fields ([#760](https://www.github.com/googleapis/gapic-generator-python/issues/760)) ([cdbc221](https://www.github.com/googleapis/gapic-generator-python/commit/cdbc22130a176e733c529f60a6b8b1d224e82e89)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index df99e6f750a2..acd91750a911 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.5...v0.40.6) (2021-02-02) + + +### Bug Fixes + +* remove duplicate assignment of certain flattened, repeated fields ([#760](https://www.github.com/googleapis/gapic-generator-python/issues/760)) ([cdbc221](https://www.github.com/googleapis/gapic-generator-python/commit/cdbc22130a176e733c529f60a6b8b1d224e82e89)) + ### [0.40.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.4...v0.40.5) (2021-02-01) From 5dee8ce82c1b6e0fbc8fc3d53d5b106f8b0bd8cd Mon Sep 17 00:00:00 2001 From: yon-mg <71726126+yon-mg@users.noreply.github.com> Date: Wed, 3 Feb 2021 08:10:03 -0800 Subject: [PATCH 0460/1339] fix: don't use integer for enums in json encoding (#761) --- .../%sub/services/%service/transports/rest.py.j2 | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 54ec5ca92ec9..7e84b78a996c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -157,11 +157,13 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {%- if method.http_opt['body'] != '*' %} body = {{ method.input.fields[method.http_opt['body']].type.ident }}.to_json( request.{{ method.http_opt['body'] }}, - including_default_value_fields=False + including_default_value_fields=False, + use_integers_for_enums=False ) {%- else %} body = {{ method.input.ident }}.to_json( - request + request, + use_integers_for_enums=False ) {%- endif %} {%- endif %} From c152ce8876eaa921a1ed7595cf0a16e67ab2d899 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 3 Feb 2021 11:01:58 -0800 Subject: [PATCH 0461/1339] chore: release 0.40.7 (#764) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index acd91750a911..39136258c0f7 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.6...v0.40.7) (2021-02-03) + + +### Bug Fixes + +* don't use integer for enums in json encoding ([#761](https://www.github.com/googleapis/gapic-generator-python/issues/761)) ([6d37a73](https://www.github.com/googleapis/gapic-generator-python/commit/6d37a7388995b90428ee6293bcce5d48cd9a48f8)) + ### [0.40.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.5...v0.40.6) (2021-02-02) From aa7a3a0af8357d7934f611abec1de59928fd2928 Mon Sep 17 00:00:00 2001 From: yon-mg <71726126+yon-mg@users.noreply.github.com> Date: Thu, 4 Feb 2021 11:42:11 -0800 Subject: [PATCH 0462/1339] fix: update paging implementation to handle unconventional pagination (#750) * fix: update paging implementation to handle unconventional pagination * fix: typing errors, mypy cli update * fix: mypy cli flag * fix: delete __init__.py, remove -p mypy flag * fix: clearing up statements, tests, minor bug in filter usage * fix: wrong generated type hints --- .../gapic-generator/gapic/schema/wrappers.py | 17 ++- .../%sub/services/%service/pagers.py.j2 | 14 +- .../services/%service/transports/rest.py.j2 | 4 +- .../%name_%version/%sub/test_%service.py.j2 | 123 +++++++++++++++++- .../tests/unit/schema/wrappers/test_method.py | 50 +++++-- 5 files changed, 189 insertions(+), 19 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index eefe0cdc7e16..812630720b90 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -866,13 +866,22 @@ def paged_result_field(self) -> Optional[Field]: """Return the response pagination field if the method is paginated.""" # If the request field lacks any of the expected pagination fields, # then the method is not paginated. - for page_field in ((self.input, int, 'page_size'), - (self.input, str, 'page_token'), + + # The request must have page_token and next_page_token as they keep track of pages + for source, source_type, name in ((self.input, str, 'page_token'), (self.output, str, 'next_page_token')): - field = page_field[0].fields.get(page_field[2], None) - if not field or field.type != page_field[1]: + field = source.fields.get(name, None) + if not field or field.type != source_type: return None + # The request must have max_results or page_size + page_fields = (self.input.fields.get('max_results', None), + self.input.fields.get('page_size', None)) + page_field_size = next( + (field for field in page_fields if field), None) + if not page_field_size or page_field_size.type != int: + return None + # Return the first repeated field. for field in self.output.fields.values(): if field.repeated: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index ea08466ba0ae..ca3cc8d40e02 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -6,7 +6,7 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. -#} -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional {% filter sort_lines -%} {% for method in service.methods.values() | selectattr('paged_result_field') -%} @@ -68,14 +68,25 @@ class {{ method.name }}Pager: self._response = self._method(self._request, metadata=self._metadata) yield self._response + {% if method.paged_result_field.map %} + def __iter__(self) -> Iterable[Tuple[str, {{ method.paged_result_field.type.fields.get('value').ident }}]]: + for page in self.pages: + yield from page.{{ method.paged_result_field.name}}.items() + + def get(self, key: str) -> Optional[{{ method.paged_result_field.type.fields.get('value').ident }}]: + return self._response.items.get(key) + {% else %} def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: for page in self.pages: yield from page.{{ method.paged_result_field.name }} + {% endif %} def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) +{# TODO(yon-mg): remove on rest async transport impl #} +{% if 'grpc' in opts.transport %} class {{ method.name }}AsyncPager: """A pager for iterating through ``{{ method.name|snake_case }}`` requests. @@ -138,5 +149,6 @@ class {{ method.name }}AsyncPager: def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) +{% endif %} {% endfor %} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 7e84b78a996c..060e5d0744e1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -184,11 +184,9 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields # not required for GCE query_params = { - {% filter sort_lines -%} - {%- for field in method.query_params %} + {%- for field in method.query_params | sort%} '{{ field|camel_case }}': request.{{ field }}, {%- endfor %} - {% endfilter -%} } # TODO(yon-mg): further discussion needed whether 'python truthiness' is appropriate here # discards default values diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e1c42f89a2d1..59611cfd3307 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1020,7 +1020,7 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): assert response.raw_page is response {% endif %} {#- method.paged_result_field #} -{% endfor -%} {#- method in methods #} +{% endfor -%} {#- method in methods for grpc #} {% for method in service.methods.values() if 'rest' in opts.transport -%} def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): @@ -1162,7 +1162,126 @@ def test_{{ method.name|snake_case }}_rest_flattened_error(): ) -{% endfor -%} +{% if method.paged_result_field %} +def test_{{ method.name|snake_case }}_pager(): + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Set the response as a series of pages + {% if method.paged_result_field.map%} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + ) + {% else %} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + ) + {% endif %} + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple({{ method.output.ident }}.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + metadata = () + {% if method.field_headers -%} + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {%- for field_header in method.field_headers %} + {%- if not method.client_streaming %} + ('{{ field_header }}', ''), + {%- endif %} + {%- endfor %} + )), + ) + {% endif -%} + pager = client.{{ method.name|snake_case }}(request={}) + + assert pager._metadata == metadata + + {% if method.paged_result_field.map %} + assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) + assert pager.get('h') is None + {% endif %} + + results = list(pager) + assert len(results) == 6 + {% if method.paged_result_field.map %} + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) + {% else %} + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) + for i in results) + {% endif %} + + pages = list(client.{{ method.name|snake_case }}(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +{% endif %} {# paged methods #} +{% endfor -%} {#- method in methods for rest #} def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index bcaeb6880093..2162effbbbad 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -66,19 +66,38 @@ def test_method_client_output_empty(): def test_method_client_output_paged(): paged = make_field(name='foos', message=make_message('Foo'), repeated=True) + parent = make_field(name='parent', type=9) # str + page_size = make_field(name='page_size', type=5) # int + page_token = make_field(name='page_token', type=9) # str + input_msg = make_message(name='ListFoosRequest', fields=( - make_field(name='parent', type=9), # str - make_field(name='page_size', type=5), # int - make_field(name='page_token', type=9), # str + parent, + page_size, + page_token, )) output_msg = make_message(name='ListFoosResponse', fields=( paged, make_field(name='next_page_token', type=9), # str )) - method = make_method('ListFoos', - input_message=input_msg, - output_message=output_msg, - ) + method = make_method( + 'ListFoos', + input_message=input_msg, + output_message=output_msg, + ) + assert method.paged_result_field == paged + assert method.client_output.ident.name == 'ListFoosPager' + + max_results = make_field(name='max_results', type=5) # int + input_msg = make_message(name='ListFoosRequest', fields=( + parent, + max_results, + page_token, + )) + method = make_method( + 'ListFoos', + input_message=input_msg, + output_message=output_msg, + ) assert method.paged_result_field == paged assert method.client_output.ident.name == 'ListFoosPager' @@ -123,6 +142,19 @@ def test_method_paged_result_field_no_page_field(): ) assert method.paged_result_field is None + method = make_method( + name='Foo', + input_message=make_message( + name='FooRequest', + fields=(make_field(name='page_token', type=9),) # str + ), + output_message=make_message( + name='FooResponse', + fields=(make_field(name='next_page_token', type=9),) # str + ) + ) + assert method.paged_result_field is None + def test_method_paged_result_ref_types(): input_msg = make_message( @@ -139,7 +171,7 @@ def test_method_paged_result_ref_types(): name='ListMolluscsResponse', fields=( make_field(name='molluscs', message=mollusc_msg, repeated=True), - make_field(name='next_page_token', type=9) + make_field(name='next_page_token', type=9) # str ), module='mollusc' ) @@ -207,7 +239,7 @@ def test_flattened_ref_types(): def test_method_paged_result_primitive(): - paged = make_field(name='squids', type=9, repeated=True) + paged = make_field(name='squids', type=9, repeated=True) # str input_msg = make_message( name='ListSquidsRequest', fields=( From 2d30ec51c90c0a99b767f80e3f9cd1ab97c6413b Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Thu, 4 Feb 2021 17:17:38 -0800 Subject: [PATCH 0463/1339] fix: body encoding for rest transport (#768) Basically just replace `json` argument with `data` Apparently, the `json` parameter in requests.Session.request() method does not expect JSON string, but expects python dictionary instead, which is not intuitive and does not even match the documentation of the method: https://github.com/psf/requests/blob/master/requests/sessions.py#L483. At the same time in the Quickstart, it is explicitly said that `json` parameter was added in version `2.4.2` and expects python `dict`, while `data` argument can process raw encoded json string. --- .../%name_%version/%sub/services/%service/transports/rest.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 060e5d0744e1..1341571e484c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -198,7 +198,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {% if not method.void %}response = {% endif %}self._session.{{ method.http_opt['verb'] }}( url {%- if 'body' in method.http_opt %}, - json=body, + data=body, {%- endif %} ) From fd7f377d905353e51e16daf21f65bfe70c1fc591 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 4 Feb 2021 18:27:37 -0700 Subject: [PATCH 0464/1339] chore: release 0.40.8 (#767) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 39136258c0f7..3830c72f0fd8 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [0.40.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.7...v0.40.8) (2021-02-05) + + +### Bug Fixes + +* body encoding for rest transport ([#768](https://www.github.com/googleapis/gapic-generator-python/issues/768)) ([cc55a18](https://www.github.com/googleapis/gapic-generator-python/commit/cc55a182b878d78f92aba259c067d47ab1d01e5b)) +* update paging implementation to handle unconventional pagination ([#750](https://www.github.com/googleapis/gapic-generator-python/issues/750)) ([eaac3e6](https://www.github.com/googleapis/gapic-generator-python/commit/eaac3e69d366b610ae7551d94d4f546819e24bc2)) + ### [0.40.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.6...v0.40.7) (2021-02-03) From e1597eaa59dc28785730b709deba53091dcec402 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 9 Feb 2021 11:25:22 -0800 Subject: [PATCH 0465/1339] chore: Create tests.yml (#766) Initial conversion to Github Actions for CI --- .../.github/sync-repo-settings.yaml | 29 +- .../.github/workflows/tests.yaml | 321 ++++++++++++++++++ 2 files changed, 332 insertions(+), 18 deletions(-) create mode 100644 packages/gapic-generator/.github/workflows/tests.yaml diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index b3fa3d000780..00d9c8e1f60d 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -5,28 +5,21 @@ branchProtectionRules: - pattern: master isAdminEnforced: true requiredStatusCheckContexts: - - 'ci/circleci: docs' - - 'ci/circleci: mypy' - - 'ci/circleci: showcase' - - 'ci/circleci: showcase-alternative-templates' - - 'ci/circleci: showcase-mtls' - - 'ci/circleci: showcase-mtls-alternative-templates' - - 'ci/circleci: showcase-mypy' - - 'ci/circleci: showcase-mypy-alternative-templates' - - 'ci/circleci: showcase-unit-3.6' - - 'ci/circleci: showcase-unit-3.7' - - 'ci/circleci: showcase-unit-3.8' - - 'ci/circleci: showcase-unit-add-iam-methods' - - 'ci/circleci: showcase-unit-alternative-templates-3.7' - - 'ci/circleci: showcase-unit-alternative-templates-3.8' - - 'ci/circleci: style-check' - - 'ci/circleci: unit-3.6' - - 'ci/circleci: unit-3.7' - - 'ci/circleci: unit-3.8' - 'cla/google' - 'codecov/patch' - 'codecov/project' - 'conventionalcommits.org' + - 'docs' + - 'mypy' + - 'showcase' + - 'showcase-mtls' + - 'showcase-unit' + - 'showcase-unit-alternative-templates' + - 'showcase-unit-add-iam-methods' + - 'showcase-mypy' + - 'showcase-mypy-alternative-templates' + - 'unit' + - 'style-check' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true requiresStrictStatusChecks: true diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml new file mode 100644 index 000000000000..5e9f06347974 --- /dev/null +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -0,0 +1,321 @@ +name: Tests + +# Controls when the action will run. +on: + pull_request: + push: + branches: [ $default-branch ] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +jobs: + docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install nox. + run: python -m pip install nox + - name: Build the documentation. + run: nox -s docs + mypy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install nox. + run: python -m pip install nox + - name: Check type annotations. + run: nox -s mypy + # publish_image: + # runs-on: ubuntu-latest + # container: docker + # steps: + # - uses: actions/checkout@v2 + # - setup_remote_docker + # - name: Build Docker image. + # run: docker build . -t gcr.io/gapic-images/gapic-generator-python:latest + # - name: Download curl + # run: apk add --no-cache curl + # - name: Download the GCR credential helper. + # run: | + # curl -fsSL https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v1.5.0/docker-credential-gcr_linux_amd64-1.5.0.tar.gz \ + # | tar xz --to-stdout ./docker-credential-gcr \ + # > /usr/bin/docker-credential-gcr && chmod a+x /usr/bin/docker-credential-gcr + # - name: Set up authentication to Google Container Registry. + # run: | + # echo ${GCLOUD_SERVICE_KEY} > ${GOOGLE_APPLICATION_CREDENTIALS} + # docker-credential-gcr configure-docker + # - name: Tag the Docker image and push it to Google Container Registry. + # run: | + # if [ -n "$CIRCLE_TAG" ]; then + # export MAJOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $1; }'` + # export MINOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $2; }'` + # export PATCH=`echo $CIRCLE_TAG | awk -F '.' '{ print $3; }'` + # docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH + # docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR + # docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR + # docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH + # docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR + # docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR + # fi + # docker push gcr.io/gapic-images/gapic-generator-python:latest + showcase: + strategy: + matrix: + target: [showcase, showcase_alternative_templates] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install showcase + run: | + sudo mkdir -p /usr/src/showcase + sudo chown -R ${USER} /usr/src/ + curl --location https://github.com/googleapis/gapic-showcase/releases/download/v0.12.0/gapic-showcase-0.12.0-linux-amd64.tar.gz --output /usr/src/showcase/showcase-0.12.0-linux-amd64.tar.gz + cd /usr/src/showcase/ + tar -xf showcase-* + ./gapic-showcase run & + cd - + - name: Install nox. + run: python -m pip install nox + - name: Install protoc 3.12.1. + run: | + sudo mkdir -p /usr/src/protoc/ + sudo chown -R ${USER} /usr/src/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - name: Run showcase tests. + run: nox -s ${{ matrix.target }} + showcase-mtls: + strategy: + matrix: + target: [showcase_mtls, showcase_mtls_alternative_templates] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Setup temp directory + run: | + sudo mkdir -p /tmp/workspace/tests/cert/ + sudo chown -R ${USER} /tmp/workspace/ + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Copy mtls files + run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install nox. + run: python -m pip install nox + - name: Install protoc 3.12.1. + run: | + sudo mkdir -p /usr/src/protoc/ + sudo chown -R ${USER} /usr/src + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + cd - + - name: Run showcase tests. + run: | + sudo mkdir gapic_showcase + sudo chown ${USER} gapic_showcase + cd gapic_showcase + curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.11.0/gapic-showcase-0.11.0-linux-amd64.tar.gz | tar xz + ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & + showcase_pid=$! + + cleanup() { + echo "kill showcase server" + kill $showcase_pid + # Wait for the process to die, but don't report error from the kill. + wait $showcase_pid || exit $exit_code + } + trap cleanup EXIT + + cd .. + nox -s ${{ matrix.target }} + # TODO(yon-mg): add compute unit tests + showcase-unit: + strategy: + matrix: + python: [3.6, 3.7, 3.8, 3.9] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install protoc 3.12.1. + run: | + sudo mkdir -p /usr/src/protoc/ + sudo chown -R ${USER} /usr/src/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - name: Install nox. + run: python -m pip install nox + - name: Run unit tests. + run: nox -s showcase_unit-${{ matrix.python }} + showcase-unit-alternative-templates: + strategy: + matrix: + python: [3.7, 3.8, 3.9] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install protoc 3.12.1. + run: | + sudo mkdir -p /usr/src/protoc/ + sudo chown -R ${USER} /usr/src/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - name: Install nox. + run: python -m pip install nox + - name: Run unit tests. + run: nox -s showcase_unit_alternative_templates-${{ matrix.python }} + showcase-unit-add-iam-methods: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install protoc 3.12.1. + run: | + sudo mkdir -p /usr/src/protoc/ + sudo chown -R ${USER} /usr/src/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - name: Install nox. + run: python -m pip install nox + - name: Run unit tests. + run: nox -s showcase_unit_add_iam_methods-3.8 + showcase-mypy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install protoc 3.12.1. + run: | + sudo mkdir -p /usr/src/protoc/ + sudo chown -R ${USER} /usr/src/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - name: Install nox. + run: python -m pip install nox + - name: Typecheck the generated output. + run: nox -s showcase_mypy + showcase-mypy-alternative-templates: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install protoc 3.12.1. + run: | + sudo mkdir -p /usr/src/protoc/ + sudo chown -R ${USER} /usr/src/ + curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + cd /usr/src/protoc/ + unzip protoc-3.12.1.zip + sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + - name: Install nox. + run: python -m pip install nox + - name: Typecheck the generated output. + run: nox -s showcase_mypy_alternative_templates + unit: + strategy: + matrix: + python: [3.6, 3.7, 3.8, 3.9] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install pandoc + run: | + sudo apt-get update + sudo apt-get install -y pandoc gcc git + - name: Install nox and codecov. + run: | + python -m pip install nox + python -m pip install codecov + - name: Run unit tests. + run: nox -s unit-${{ matrix.python }} + - name: Submit coverage data to codecov. + run: codecov + if: always() + style-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install autopep8 + run: | + python -m pip install autopep8 + - name: Check diff + run: | + find gapic tests -name "*.py" | xargs autopep8 --in-place --exit-code From 42c1d64562ca4e5777a513fbd47adab89ade6512 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Feb 2021 23:01:19 +0100 Subject: [PATCH 0466/1339] chore(deps): update dependency google-api-core to v1.26.0 (#751) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 7784cde09bdd..3a8d486b614c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.25.0 +google-api-core==1.26.0 googleapis-common-protos==1.52.0 jinja2==2.11.3 MarkupSafe==1.1.1 From a3ddb98a08c914bad36271ed2560b3a410ea910d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 9 Feb 2021 15:36:07 -0700 Subject: [PATCH 0467/1339] chore: add template README and tweak setup.py (#728) - Add a license header to the empty `__init__.py` in the unit test directory. (OSPO's tool flags this as missing a license header.) - Teak the setup.py so there are fewer manual adjustments needed during initial repo setup of a Cloud library. - Add a generic README with instructions on how to install the library locally. This is for the folks using the self-service approach. (A README for a published library needs more content like links to PyPI, links to documentation, etc.). --- .../gapic/templates/README.rst.j2 | 50 +++++++++++++++++++ .../gapic/templates/setup.py.j2 | 17 +++++-- .../gapic/%name_%version/%sub/__init__.py.j2 | 1 + 3 files changed, 63 insertions(+), 5 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/README.rst.j2 diff --git a/packages/gapic-generator/gapic/templates/README.rst.j2 b/packages/gapic-generator/gapic/templates/README.rst.j2 new file mode 100644 index 000000000000..c443a634060f --- /dev/null +++ b/packages/gapic-generator/gapic/templates/README.rst.j2 @@ -0,0 +1,50 @@ +Python Client for {{ api.naming.long_name }} API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the {{ api.naming.long_name }} API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index fdfbf7a63701..94af4ae760c3 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -1,17 +1,27 @@ {% extends '_base.py.j2' %} {% block content %} +import io +import os import setuptools # type: ignore +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() setuptools.setup( name='{{ api.naming.warehouse_package_name }}', - version='0.0.1', + version=version, + long_description=readme, {% if api.naming.namespace -%} packages=setuptools.PEP420PackageFinder.find(), namespace_packages={{ api.naming.namespace_packages }}, {% else -%} - packages=setuptools.find_packages(), + packages=setuptools.PEP420PackageFinder.find(), {% endif -%} platforms='Posix; MacOS X; Windows', include_package_data=True, @@ -24,9 +34,6 @@ setuptools.setup( {%- endif %} ), python_requires='>=3.6', - scripts=[ - 'scripts/fixup_{{ api.naming.versioned_module_name }}_keywords.py', - ], classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 index d3f5a12faa99..34200f2eca9e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 @@ -1 +1,2 @@ +{% extends '_base.py.j2' %} \ No newline at end of file From fd47322b3e7fcb8bd2927a7430e6d1fe27495f85 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Feb 2021 23:44:04 +0100 Subject: [PATCH 0468/1339] chore(deps): update dependency pyyaml to v5.4.1 (#740) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [PyYAML](https://pyyaml.org/) ([source](https://togithub.com/yaml/pyyaml)) | `==5.4` -> `==5.4.1` | [![age](https://badges.renovateapi.com/packages/pypi/PyYAML/5.4.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/PyYAML/5.4.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/PyYAML/5.4.1/compatibility-slim/5.4)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/PyYAML/5.4.1/confidence-slim/5.4)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
yaml/pyyaml ### [`v5.4.1`](https://togithub.com/yaml/pyyaml/compare/5.4...5.4.1) [Compare Source](https://togithub.com/yaml/pyyaml/compare/5.4...5.4.1)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 3a8d486b614c..6fa2a4873960 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -5,5 +5,5 @@ jinja2==2.11.3 MarkupSafe==1.1.1 protobuf==3.14.0 pypandoc==1.5 -PyYAML==5.4 +PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 196b94dfc0cdda6ec8e61e5eed96cc32584ca5f1 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 9 Feb 2021 17:08:26 -0800 Subject: [PATCH 0469/1339] fix: fix rest transport tests (#772) This includes fix fo `data` vs `json` parameter issue and the enum serialization (string instead of number) issue. --- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 59611cfd3307..e03331c680f6 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1131,14 +1131,14 @@ def test_{{ method.name|snake_case }}_rest_flattened(): # request object values. assert len(req.mock_calls) == 1 _, http_call, http_params = req.mock_calls[0] - body = http_params.get('json') + body = http_params.get('data') {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} {% if field.ident|string() == 'timestamp.Timestamp' -%} assert TimestampRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} {% elif field.ident|string() == 'duration.Duration' -%} assert DurationRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} {% else -%} - assert {% if field.field_pb is msg_field_pb %}{{ field.ident }}.to_json({{ field.name }}, including_default_value_fields=False) + assert {% if field.field_pb is msg_field_pb %}{{ field.ident }}.to_json({{ field.name }}, including_default_value_fields=False, use_integers_for_enums=False) {%- elif field.field_pb is str_field_pb %}{{ field.mock_value }} {%- else %}str({{ field.mock_value }}) {%- endif %} in http_call[1] + str(body) From 254d40f575646f65543cfd3d9833259edd9bb6c6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 9 Feb 2021 18:13:57 -0700 Subject: [PATCH 0470/1339] chore: release 0.40.9 (#773) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3830c72f0fd8..5c92f6855ebd 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.8...v0.40.9) (2021-02-10) + + +### Bug Fixes + +* fix rest transport tests ([#772](https://www.github.com/googleapis/gapic-generator-python/issues/772)) ([ce110a3](https://www.github.com/googleapis/gapic-generator-python/commit/ce110a35894aa1a838649f9782294b3b8446be5c)) + ### [0.40.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.7...v0.40.8) (2021-02-05) From b5f3ee246f3598327ec10d04f9a596aea089474b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 17 Feb 2021 11:38:56 -0800 Subject: [PATCH 0471/1339] chore: cancel preexisting workflow runs, refactor workflow file (#775) Pushing to an open PR should (hopefully) cancel preexisting actions. Remove dead CI requirements. --- packages/gapic-generator/.github/CODEOWNERS | 3 +- .../.github/sync-repo-settings.yaml | 24 +++-- .../.github/workflows/tests.yaml | 96 +++++++++---------- 3 files changed, 63 insertions(+), 60 deletions(-) diff --git a/packages/gapic-generator/.github/CODEOWNERS b/packages/gapic-generator/.github/CODEOWNERS index 0546e1d81418..f5c036fe95a1 100644 --- a/packages/gapic-generator/.github/CODEOWNERS +++ b/packages/gapic-generator/.github/CODEOWNERS @@ -4,4 +4,5 @@ # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -* @googleapis/actools-python @googleapis/yoshi-python +* @googleapis/actools-python @googleapis/yoshi-python @lukesneeringer +*.yaml @googleapis/actools @googleapis/yoshi-python @googleapis/actools-python @lukesneeringer diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 00d9c8e1f60d..30ca3f5c8fe2 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -11,15 +11,25 @@ branchProtectionRules: - 'conventionalcommits.org' - 'docs' - 'mypy' - - 'showcase' - - 'showcase-mtls' - - 'showcase-unit' - - 'showcase-unit-alternative-templates' - - 'showcase-unit-add-iam-methods' + - 'showcase (showcase)' + - 'showcase (showcase_alternative_templates)' + - 'showcase-mtls (showcase_mtls)' + - 'showcase-mtls (showcase_mtls_alternative_templates)' - 'showcase-mypy' - - 'showcase-mypy-alternative-templates' - - 'unit' + - 'showcase-mypy (_alternative_templates)' + - 'showcase-unit (3.6)' + - 'showcase-unit (3.7)' + - 'showcase-unit (3.7, _alternative_templates)' + - 'showcase-unit (3.8)' + - 'showcase-unit (3.8, _alternative_templates)' + - 'showcase-unit (3.9)' + - 'showcase-unit (3.9, _alternative_templates)' + - 'showcase-unit-add-iam-methods' - 'style-check' + - 'unit (3.6)' + - 'unit (3.7)' + - 'unit (3.8)' + - 'unit (3.9)' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true requiresStrictStatusChecks: true diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 5e9f06347974..dae6d0bd54f8 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -13,6 +13,10 @@ jobs: docs: runs-on: ubuntu-latest steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -25,6 +29,10 @@ jobs: mypy: runs-on: ubuntu-latest steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -73,6 +81,10 @@ jobs: target: [showcase, showcase_alternative_templates] runs-on: ubuntu-latest steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -109,6 +121,10 @@ jobs: target: [showcase_mtls, showcase_mtls_alternative_templates] runs-on: ubuntu-latest steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Setup temp directory run: | @@ -159,35 +175,16 @@ jobs: strategy: matrix: python: [3.6, 3.7, 3.8, 3.9] + variant: ['', _alternative_templates] + exclude: + - python: 3.6 + variant: _alternative_templates runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v2 + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 with: - python-version: ${{ matrix.python }} - - name: Install system dependencies. - run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc - - name: Install protoc 3.12.1. - run: | - sudo mkdir -p /usr/src/protoc/ - sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - name: Install nox. - run: python -m pip install nox - - name: Run unit tests. - run: nox -s showcase_unit-${{ matrix.python }} - showcase-unit-alternative-templates: - strategy: - matrix: - python: [3.7, 3.8, 3.9] - runs-on: ubuntu-latest - steps: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v2 @@ -208,10 +205,14 @@ jobs: - name: Install nox. run: python -m pip install nox - name: Run unit tests. - run: nox -s showcase_unit_alternative_templates-${{ matrix.python }} + run: nox -s showcase_unit${{ matrix.variant }}-${{ matrix.python }} showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -232,34 +233,17 @@ jobs: - name: Install nox. run: python -m pip install nox - name: Run unit tests. - run: nox -s showcase_unit_add_iam_methods-3.8 + run: nox -s showcase_unit_add_iam_methods showcase-mypy: runs-on: ubuntu-latest + strategy: + matrix: + variant: ['', _alternative_templates] steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 with: - python-version: 3.8 - - name: Install system dependencies. - run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc - - name: Install protoc 3.12.1. - run: | - sudo mkdir -p /usr/src/protoc/ - sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - name: Install nox. - run: python -m pip install nox - - name: Typecheck the generated output. - run: nox -s showcase_mypy - showcase-mypy-alternative-templates: - runs-on: ubuntu-latest - steps: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -280,13 +264,17 @@ jobs: - name: Install nox. run: python -m pip install nox - name: Typecheck the generated output. - run: nox -s showcase_mypy_alternative_templates + run: nox -s showcase_mypy${{ matrix.variant }} unit: strategy: matrix: python: [3.6, 3.7, 3.8, 3.9] runs-on: ubuntu-latest steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v2 @@ -308,6 +296,10 @@ jobs: style-check: runs-on: ubuntu-latest steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 From 78d6d63c98d9ee9c8dda992c4d9b757a1d19608d Mon Sep 17 00:00:00 2001 From: Ilango Rajagopal Date: Thu, 18 Feb 2021 03:04:47 +0530 Subject: [PATCH 0472/1339] fix: from_service_account constructors for async clients (#779) Currently, creating async clients with constructor works fine: from google.cloud import vision client = vision.ImageAnnotatorAsyncClient() print(type(client)) prints: Using from_service_account_* methods on the other hand return sync client: client = vision.ImageAnnotatorAsyncClient.from_service_account_file("service_account.json") print(type(client)) This gives , which is just a normal synchronous client. This happens because these functions are linked to sync clients' methods. Now it's fixed to call the same method, but with proper class. --- .../%sub/services/%service/async_client.py.j2 | 32 +++++++++++++++++-- .../%name_%version/%sub/test_%service.py.j2 | 13 ++++++-- 2 files changed, 41 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index f6e380d0679a..c0e2c9a6ac7b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -48,8 +48,36 @@ class {{ service.async_client_name }}: parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path) {% endfor %} - from_service_account_info = {{ service.client_name }}.from_service_account_info - from_service_account_file = {{ service.client_name }}.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {{ service.async_client_name }}: The constructed client. + """ + return {{ service.client_name }}.from_service_account_info.__func__({{ service.async_client_name }}, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {{ service.async_client_name }}: The constructed client. + """ + return {{ service.client_name }}.from_service_account_file.__func__({{ service.async_client_name }}, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e03331c680f6..14a7f03f7580 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -77,13 +77,20 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi -def test_{{ service.client_name|snake_case }}_from_service_account_info(): +@pytest.mark.parametrize("client_class", [ + {{ service.client_name }}, + {%- if 'grpc' in opts.transport %} + {{ service.async_client_name }}, + {%- endif %} +]) +def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = {{ service.client_name }}.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} @@ -100,9 +107,11 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(client_c factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} From dd86e411de2a50d004ec31dcc05f0a2543e01a61 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Wed, 17 Feb 2021 13:52:05 -0800 Subject: [PATCH 0473/1339] fix: ignore unknown fields returned from server for REST (#777) Otherwise the client may throw an exception if the server is newer than the client (i.e. some of the returned messages have new fields). A tracking bug: https://github.com/googleapis/gapic-generator-python/issues/780 --- .../%sub/services/%service/transports/rest.py.j2 | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 1341571e484c..bb6acbadd64f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -207,7 +207,10 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {%- if not method.void %} # Return the response - return {{ method.output.ident }}.from_json(response.content) + return {{ method.output.ident }}.from_json( + response.content, + ignore_unknown_fields=True + ) {%- endif %} {%- endif %} {%- endfor %} From e614182a4d39bc0538be899ee334e6aabb8e1dc6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 18 Feb 2021 00:06:04 +0000 Subject: [PATCH 0474/1339] chore: release 0.40.10 (#783) :robot: I have created a release \*beep\* \*boop\* --- ### [0.40.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.9...v0.40.10) (2021-02-17) ### Bug Fixes * ignore unknown fields returned from server for REST ([#777](https://www.github.com/googleapis/gapic-generator-python/issues/777)) ([a70b078](https://www.github.com/googleapis/gapic-generator-python/commit/a70b0787f7e3d40642a4f68574f0cc493cc4e054)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5c92f6855ebd..79dcfdedde3b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.9...v0.40.10) (2021-02-17) + + +### Bug Fixes + +* ignore unknown fields returned from server for REST ([#777](https://www.github.com/googleapis/gapic-generator-python/issues/777)) ([a70b078](https://www.github.com/googleapis/gapic-generator-python/commit/a70b0787f7e3d40642a4f68574f0cc493cc4e054)) + ### [0.40.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.8...v0.40.9) (2021-02-10) From eed8e4b82bff770cbb18daea7f9b45d3a3cb8c66 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Feb 2021 01:56:05 +0100 Subject: [PATCH 0475/1339] chore(deps): update dependency protobuf to v3.15.0 (#784) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.14.0` -> `==3.15.0` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.0/compatibility-slim/3.14.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.0/confidence-slim/3.14.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 6fa2a4873960..d524edfca156 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.0 googleapis-common-protos==1.52.0 jinja2==2.11.3 MarkupSafe==1.1.1 -protobuf==3.14.0 +protobuf==3.15.0 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 925fafb7dcea257d3b5e6c51f7c686e3de251a06 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 24 Feb 2021 14:11:41 -0800 Subject: [PATCH 0476/1339] fix: remove duplicate field entries (#786) Fix for #778 --- .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 30 +++++++++++++++++-- 2 files changed, 28 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index d7073af7d05e..5ab88e6fc961 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -365,7 +365,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request = {{ method.input.ident }}(**request) {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} elif not request: - request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) + request = {{ method.input.ident }}() {% endif -%}{# Cross-package req and flattened fields #} {%- else %} # Minor optimization to avoid making a copy if the user passes diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 14a7f03f7580..f1d8685850fb 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -471,6 +471,30 @@ def test_{{ method.name|snake_case }}_from_dict(): test_{{ method.name|snake_case }}(request_type=dict) +{% if not method.client_streaming -%} +def test_{{ method.name|snake_case }}_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = {{ service.client_name }}( + credentials=credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.name|snake_case }}), + '__call__') as call: + client.{{ method.name|snake_case }}() + call.assert_called() + _, args, _ = call.mock_calls[0] + {% if method.client_streaming %} + assert next(args[0]) == request + {% else %} + assert args[0] == {{ method.input.ident }}() + {% endif %} +{% endif -%} + + @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): client = {{ service.async_client_name }}( @@ -1276,7 +1300,7 @@ def test_{{ method.name|snake_case }}_pager(): for result in results: assert isinstance(result, tuple) assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) - + assert pager.get('a') is None assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) {% else %} @@ -1288,7 +1312,7 @@ def test_{{ method.name|snake_case }}_pager(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token - + {% endif %} {# paged methods #} {% endfor -%} {#- method in methods for rest #} def test_credentials_transport_error(): @@ -1500,7 +1524,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtl ("grpc.max_receive_message_length", -1), ], ) - + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls # is used. with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): From 82197f0e3509a8f0e8fe2e0d6228dab76f7c3fa3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 24 Feb 2021 23:23:19 +0100 Subject: [PATCH 0477/1339] chore(deps): update dependency protobuf to v3.15.2 (#785) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d524edfca156..27dca81d583c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.0 googleapis-common-protos==1.52.0 jinja2==2.11.3 MarkupSafe==1.1.1 -protobuf==3.15.0 +protobuf==3.15.2 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From d095482e5ffc75f77ea4d4005499cbe685a1aa84 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 24 Feb 2021 15:39:48 -0800 Subject: [PATCH 0478/1339] chore: release 0.40.11 (#787) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Dov Shlachter --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 79dcfdedde3b..0c41f21b5aba 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.10...v0.40.11) (2021-02-24) + + +### Bug Fixes + +* remove duplicate field entries ([#786](https://www.github.com/googleapis/gapic-generator-python/issues/786)) ([9f4dfa4](https://www.github.com/googleapis/gapic-generator-python/commit/9f4dfa46cb6a67081563ce096452fedd9e35051d)) + ### [0.40.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.9...v0.40.10) (2021-02-17) From 90ff882cfee54fce213b6fa7e5e0e767af3b254e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 26 Feb 2021 20:12:06 +0100 Subject: [PATCH 0479/1339] chore(deps): update dependency protobuf to v3.15.3 (#790) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.15.2` -> `==3.15.3` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.3/compatibility-slim/3.15.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.3/confidence-slim/3.15.2)](https://docs.renovatebot.com/merge-confidence/) | --- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 27dca81d583c..cb667cea43c0 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.0 googleapis-common-protos==1.52.0 jinja2==2.11.3 MarkupSafe==1.1.1 -protobuf==3.15.2 +protobuf==3.15.3 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 429ab8b32353b204ab2e8cfc2eeaa3bd82acb1a2 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 26 Feb 2021 12:16:04 -0700 Subject: [PATCH 0480/1339] fix: exclude 'input' from reserved names list (#788) The motivation for this is to prevent a breaking change in a library that has already been made GA: texttospeech https://github.com/googleapis/python-texttospeech/pull/99. The TTS protos have a field named [`input`](https://github.com/googleapis/googleapis/blob/eabe7c0fde64b1451df6ea171b2009238b0df07c/google/cloud/texttospeech/v1/cloud_tts.proto#L134-L143). Dialogflow CX beta appears to also have a field named `input`, but those changes have not yet been published and it is a pre-GA API. https://github.com/search?l=Python&p=1&q=org%3Agoogleapis+input_&type=Code --- packages/gapic-generator/gapic/utils/reserved_names.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index 3d1e9b445907..9bf1c9a91403 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -22,6 +22,6 @@ RESERVED_NAMES = frozenset( itertools.chain( keyword.kwlist, - set(dir(builtins)) - {"filter", "map", "id", "property"}, + set(dir(builtins)) - {"filter", "map", "id", "input", "property"}, ) ) From 04e25a590f4bdd6d1d843a19e8695b82db534a5d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 27 Feb 2021 00:02:05 +0100 Subject: [PATCH 0481/1339] chore(deps): update dependency googleapis-common-protos to v1.53.0 (#789) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [googleapis-common-protos](https://togithub.com/googleapis/python-api-common-protos) | `==1.52.0` -> `==1.53.0` | [![age](https://badges.renovateapi.com/packages/pypi/googleapis-common-protos/1.53.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/googleapis-common-protos/1.53.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/googleapis-common-protos/1.53.0/compatibility-slim/1.52.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/googleapis-common-protos/1.53.0/confidence-slim/1.52.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-api-common-protos ### [`v1.53.0`](https://togithub.com/googleapis/python-api-common-protos/blob/master/CHANGELOG.md#​1530-httpswwwgithubcomgoogleapispython-api-common-protoscomparev1520v1530-2021-02-25) [Compare Source](https://togithub.com/googleapis/python-api-common-protos/compare/v1.52.0...v1.53.0) ##### Features - add `google.api.ResourceDescriptor.Style` ([4ce679c](https://www.github.com/googleapis/python-api-common-protos/commit/4ce679cd49771946bf781108e92e07cdf04a61eb)) - add API method signatures to longrunning operations ([8de7ae2](https://www.github.com/googleapis/python-api-common-protos/commit/8de7ae28dfe5dd4d0cb99dd3b89a8f1e614bbe6d)) - add gapic_metadata_pb2 ([#​38](https://www.github.com/googleapis/python-api-common-protos/issues/38)) ([8de7ae2](https://www.github.com/googleapis/python-api-common-protos/commit/8de7ae28dfe5dd4d0cb99dd3b89a8f1e614bbe6d)) - add UNORDERED_LIST to field options ([8de7ae2](https://www.github.com/googleapis/python-api-common-protos/commit/8de7ae28dfe5dd4d0cb99dd3b89a8f1e614bbe6d)) - add WaitOperation method to longrunning operations ([8de7ae2](https://www.github.com/googleapis/python-api-common-protos/commit/8de7ae28dfe5dd4d0cb99dd3b89a8f1e614bbe6d)) - require python >=3.6 and ([#​31](https://www.github.com/googleapis/python-api-common-protos/issues/31)) ([4ce679c](https://www.github.com/googleapis/python-api-common-protos/commit/4ce679cd49771946bf781108e92e07cdf04a61eb)) ##### Bug Fixes - add `create_key` to FieldDescriptors ([4ce679c](https://www.github.com/googleapis/python-api-common-protos/commit/4ce679cd49771946bf781108e92e07cdf04a61eb)) - Generate gRPC files for long-running operations ([#​13](https://www.github.com/googleapis/python-api-common-protos/issues/13)) ([a9ce288](https://www.github.com/googleapis/python-api-common-protos/commit/a9ce28840ddfec712da5b296f43e6c3131840db4)) ##### Documentation - add link to PyPI ([#​10](https://www.github.com/googleapis/python-api-common-protos/issues/10)) ([3f79402](https://www.github.com/googleapis/python-api-common-protos/commit/3f7940226b0e22aef31b82c8dc2196aa25b48a3f))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index cb667cea43c0..8a1c30d5142f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==7.1.2 google-api-core==1.26.0 -googleapis-common-protos==1.52.0 +googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 protobuf==3.15.3 From 9099da0bcb8d6166da2d742b090453f5e1bf9b48 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 26 Feb 2021 23:12:06 +0000 Subject: [PATCH 0482/1339] chore: release 0.40.12 (#793) :robot: I have created a release \*beep\* \*boop\* --- ### [0.40.12](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.11...v0.40.12) (2021-02-26) ### Bug Fixes * exclude 'input' from reserved names list ([#788](https://www.github.com/googleapis/gapic-generator-python/issues/788)) ([da2ff71](https://www.github.com/googleapis/gapic-generator-python/commit/da2ff717b82357359baeeafad9a3e48a70e194cb)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 0c41f21b5aba..1574ca4f28d8 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.40.12](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.11...v0.40.12) (2021-02-26) + + +### Bug Fixes + +* exclude 'input' from reserved names list ([#788](https://www.github.com/googleapis/gapic-generator-python/issues/788)) ([da2ff71](https://www.github.com/googleapis/gapic-generator-python/commit/da2ff717b82357359baeeafad9a3e48a70e194cb)) + ### [0.40.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.10...v0.40.11) (2021-02-24) From 14e621d8be11c56f0176e7ae3162dc3c39b95b13 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 26 Feb 2021 15:22:51 -0800 Subject: [PATCH 0483/1339] chore: publish to PyPI on release (#792) Publish to PyPI on release Minor tweak to 'style-check' CI task Co-authored-by: gcf-merge-on-green[bot] <60162190+gcf-merge-on-green[bot]@users.noreply.github.com> --- .../.github/workflows/pypi-upload.yaml | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 packages/gapic-generator/.github/workflows/pypi-upload.yaml diff --git a/packages/gapic-generator/.github/workflows/pypi-upload.yaml b/packages/gapic-generator/.github/workflows/pypi-upload.yaml new file mode 100644 index 000000000000..60e1bd3c82e5 --- /dev/null +++ b/packages/gapic-generator/.github/workflows/pypi-upload.yaml @@ -0,0 +1,25 @@ +name: Upload Python Package to PyPI + +on: + release: + types: [created] + +jobs: + publish: + runs-on: ubuntu-latest + environment: PyPI + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.x' + - name: Install dependencies + run: python -m pip install twine wheel + - name: Package and upload modulee + env: + TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + python setup.py sdist bdist_wheel + twine upload dist/* From c9fa1a9893e46cbcfa9a5e325fc93ee21463a7f6 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 2 Mar 2021 14:59:27 -0800 Subject: [PATCH 0484/1339] feat: add gapic metadata file (#781) The GAPIC metadata file is used to track code, samples, and test coverage for every RPC and library method. --- packages/gapic-generator/.github/CODEOWNERS | 4 +- .../.github/workflows/tests.yaml | 2 +- .../%name/%version/gapic_metadata.json.j2 | 1 + .../gapic-generator/gapic/cli/generate.py | 9 +- packages/gapic-generator/gapic/schema/api.py | 41 ++++ .../%name_%version/gapic_metadata.json.j2 | 1 + packages/gapic-generator/setup.py | 2 +- .../tests/unit/schema/test_api.py | 227 +++++++++++++++++- 8 files changed, 277 insertions(+), 10 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/gapic_metadata.json.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_metadata.json.j2 diff --git a/packages/gapic-generator/.github/CODEOWNERS b/packages/gapic-generator/.github/CODEOWNERS index f5c036fe95a1..5bc27edbcc57 100644 --- a/packages/gapic-generator/.github/CODEOWNERS +++ b/packages/gapic-generator/.github/CODEOWNERS @@ -4,5 +4,5 @@ # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -* @googleapis/actools-python @googleapis/yoshi-python @lukesneeringer -*.yaml @googleapis/actools @googleapis/yoshi-python @googleapis/actools-python @lukesneeringer +* @googleapis/actools-python @googleapis/yoshi-python +*.yaml @googleapis/actools @googleapis/yoshi-python @googleapis/actools-python diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index dae6d0bd54f8..74fb502ec402 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -310,4 +310,4 @@ jobs: python -m pip install autopep8 - name: Check diff run: | - find gapic tests -name "*.py" | xargs autopep8 --in-place --exit-code + find gapic tests -name "*.py" | xargs autopep8 --diff --exit-code diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/gapic_metadata.json.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/gapic_metadata.json.j2 new file mode 100644 index 000000000000..edd79fda8aa0 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/gapic_metadata.json.j2 @@ -0,0 +1 @@ +{# {{ api.gapic_metadata_json(opts) }} #} {# TODO(dovs): This is temporarily commented out pending the addition of a flag #} diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index b0e50ccf2e32..414170a21380 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -47,10 +47,11 @@ def generate( # This generator uses a slightly different mechanism for determining # which files to generate; it tracks at package level rather than file # level. - package = os.path.commonprefix([i.package for i in filter( - lambda p: p.name in req.file_to_generate, - req.proto_file, - )]).rstrip('.') + package = os.path.commonprefix([ + p.package + for p in req.proto_file + if p.name in req.file_to_generate + ]).rstrip('.') # Build the API model object. # This object is a frozen representation of the whole API, and is sent diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 35a59dcee025..39f0c4904544 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -28,8 +28,10 @@ from google.api_core import exceptions # type: ignore from google.api import resource_pb2 # type: ignore +from google.gapic.metadata import gapic_metadata_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import descriptor_pb2 +from google.protobuf.json_format import MessageToJson import grpc # type: ignore @@ -392,6 +394,45 @@ def subpackages(self) -> Mapping[str, 'API']: ) return answer + def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: + gm = gapic_metadata_pb2.GapicMetadata( + schema="1.0", + comment="This file maps proto services/RPCs to the corresponding library clients/methods", + language="python", + proto_package=self.naming.proto_package, + library_package=".".join( + self.naming.module_namespace + + (self.naming.versioned_module_name,) + ), + ) + + for service in sorted(self.services.values(), key=lambda s: s.name): + service_desc = gm.services.get_or_create(service.name) + + # At least one of "grpc" or "rest" is guaranteed to be present because + # of the way that Options instances are created. + # This assumes the options are generated by the class method factory. + transports = [] + if "grpc" in options.transport: + transports.append(("grpc", service.client_name)) + transports.append(("grpcAsync", service.async_client_name)) + + if "rest" in options.transport: + transports.append(("rest", service.client_name)) + + methods = sorted(service.methods.values(), key=lambda m: m.name) + for tprt, client_name in transports: + transport = service_desc.clients.get_or_create(tprt) + transport.library_client = client_name + for method in methods: + method_desc = transport.rpcs.get_or_create(method.name) + method_desc.methods.append(to_snake_case(method.name)) + + return gm + + def gapic_metadata_json(self, options: Options) -> str: + return MessageToJson(self.gapic_metadata(options), sort_keys=True) + def requires_package(self, pkg: Tuple[str, ...]) -> bool: return any( message.ident.package == pkg diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_metadata.json.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_metadata.json.j2 new file mode 100644 index 000000000000..edd79fda8aa0 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_metadata.json.j2 @@ -0,0 +1 @@ +{# {{ api.gapic_metadata_json(opts) }} #} {# TODO(dovs): This is temporarily commented out pending the addition of a flag #} diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 409dea8b65f4..69bb01d55f31 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -45,7 +45,7 @@ install_requires=( "click >= 6.7", "google-api-core >= 1.17.0", - "googleapis-common-protos >= 1.6.0", + "googleapis-common-protos >= 1.53.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", "protobuf >= 3.12.0", diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index e91a310ee0d6..1eaa8a57aeb2 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -22,8 +22,10 @@ from google.api import client_pb2 from google.api import resource_pb2 from google.api_core import exceptions +from google.gapic.metadata import gapic_metadata_pb2 from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 +from google.protobuf.json_format import MessageToJson from gapic.schema import api from gapic.schema import imp @@ -260,8 +262,8 @@ def test_proto_oneof(): name='Bar', fields=( make_field_pb2(name='imported_message', number=1, - type_name='.google.dep.ImportedMessage', - oneof_index=0), + type_name='.google.dep.ImportedMessage', + oneof_index=0), make_field_pb2( name='primitive', number=2, type=1, oneof_index=0), ), @@ -1287,3 +1289,224 @@ def test_map_field_name_disambiguation(): # The same module used in the same place should have the same import alias. # Because there's a "mollusc" name used, the import should be disambiguated. assert mollusc_ident == mollusc_map_ident == "am_mollusc.Mollusc" + + +def test_gapic_metadata(): + api_schema = api.API.build( + file_descriptors=[ + descriptor_pb2.FileDescriptorProto( + name="cephalopod.proto", + package="animalia.mollusca.v1", + message_type=[ + descriptor_pb2.DescriptorProto( + name="MolluscRequest", + ), + descriptor_pb2.DescriptorProto( + name="Mollusc", + ), + ], + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="Squid", + method=[ + descriptor_pb2.MethodDescriptorProto( + name="Ramshorn", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + descriptor_pb2.MethodDescriptorProto( + name="Humboldt", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + descriptor_pb2.MethodDescriptorProto( + name="Giant", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + ], + ), + descriptor_pb2.ServiceDescriptorProto( + name="Octopus", + method=[ + descriptor_pb2.MethodDescriptorProto( + name="GiantPacific", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + descriptor_pb2.MethodDescriptorProto( + name="BlueSpot", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + ] + ), + ], + ) + ] + ) + + opts = Options.build("transport=grpc") + expected = gapic_metadata_pb2.GapicMetadata( + schema="1.0", + comment="This file maps proto services/RPCs to the corresponding library clients/methods", + language="python", + proto_package="animalia.mollusca.v1", + library_package="animalia.mollusca_v1", + services={ + "Octopus": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + clients={ + "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="OctopusClient", + rpcs={ + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + }, + ), + "grpcAsync": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="OctopusAsyncClient", + rpcs={ + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + }, + ), + } + ), + "Squid": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + clients={ + "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="SquidClient", + rpcs={ + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + }, + ), + "grpcAsync": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="SquidAsyncClient", + rpcs={ + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + }, + ), + } + ), + } + ) + actual = api_schema.gapic_metadata(opts) + assert expected == actual + expected = MessageToJson(expected, sort_keys=True) + actual = api_schema.gapic_metadata_json(opts) + assert expected == actual + + opts = Options.build("transport=rest") + expected = gapic_metadata_pb2.GapicMetadata( + schema="1.0", + comment="This file maps proto services/RPCs to the corresponding library clients/methods", + language="python", + proto_package="animalia.mollusca.v1", + library_package="animalia.mollusca_v1", + services={ + "Octopus": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + clients={ + "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="OctopusClient", + rpcs={ + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + }, + ) + } + ), + "Squid": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + clients={ + "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="SquidClient", + rpcs={ + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + }, + ), + + } + ), + } + ) + actual = api_schema.gapic_metadata(opts) + assert expected == actual + expected = MessageToJson(expected, sort_keys=True) + actual = api_schema.gapic_metadata_json(opts) + assert expected == actual + + opts = Options.build("transport=rest+grpc") + expected = gapic_metadata_pb2.GapicMetadata( + schema="1.0", + comment="This file maps proto services/RPCs to the corresponding library clients/methods", + language="python", + proto_package="animalia.mollusca.v1", + library_package="animalia.mollusca_v1", + services={ + "Octopus": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + clients={ + "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="OctopusClient", + rpcs={ + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + }, + ), + "grpcAsync": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="OctopusAsyncClient", + rpcs={ + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + }, + ), + "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="OctopusClient", + rpcs={ + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + }, + ) + } + ), + "Squid": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + clients={ + "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="SquidClient", + rpcs={ + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + }, + ), + "grpcAsync": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="SquidAsyncClient", + rpcs={ + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + }, + ), + "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="SquidClient", + rpcs={ + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + }, + ), + + } + ), + } + ) + + actual = api_schema.gapic_metadata(opts) + assert expected == actual + expected = MessageToJson(expected, sort_keys=True) + actual = api_schema.gapic_metadata_json(opts) + assert expected == actual From 56ffd5f1fbaed4ec697f7fc8cb00499a6767e1d1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 2 Mar 2021 23:02:03 +0000 Subject: [PATCH 0485/1339] chore: release 0.41.0 (#796) :robot: I have created a release \*beep\* \*boop\* --- ## [0.41.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.12...v0.41.0) (2021-03-02) ### Features * add gapic metadata file ([#781](https://www.github.com/googleapis/gapic-generator-python/issues/781)) ([5dd8fcc](https://www.github.com/googleapis/gapic-generator-python/commit/5dd8fccf6b4da57edef0347beb07102634daa992)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1574ca4f28d8..1a2f945396fe 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.41.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.12...v0.41.0) (2021-03-02) + + +### Features + +* add gapic metadata file ([#781](https://www.github.com/googleapis/gapic-generator-python/issues/781)) ([5dd8fcc](https://www.github.com/googleapis/gapic-generator-python/commit/5dd8fccf6b4da57edef0347beb07102634daa992)) + ### [0.40.12](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.11...v0.40.12) (2021-02-26) From ecf931018ac7ebc291f1d329bf0d7ebfe7506c74 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 3 Mar 2021 15:44:06 -0800 Subject: [PATCH 0486/1339] feat: add flag for gapic metadata (#795) As per the design doc, gapic metadata should not be generated by default, and should be gated by the 'metadata' flag. When invoking the generator via protoc, toggle the metadata flag like so: --python_gapic_opt="metadata" Subsequent Bazel integration is WIP. --- .../gapic/generator/generator.py | 15 ++++--- .../gapic-generator/gapic/utils/options.py | 9 +++-- .../tests/unit/generator/test_generator.py | 16 ++++++++ .../tests/unit/generator/test_options.py | 40 ++++++++++--------- 4 files changed, 54 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index d6eb3aca9dd4..2a63028a4467 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -105,11 +105,12 @@ def get_response( template_name, api_schema=api_schema, opts=opts) ) - output_files.update( - self._generate_samples_and_manifest( - api_schema, self._env.get_template(sample_templates[0]), - ) - ) + sample_output = self._generate_samples_and_manifest( + api_schema, + self._env.get_template(sample_templates[0]), + ) if sample_templates else {} + + output_files.update(sample_output) # Return the CodeGeneratorResponse output. res = CodeGeneratorResponse( @@ -233,6 +234,10 @@ def _render_template( answer: Dict[str, CodeGeneratorResponse.File] = OrderedDict() skip_subpackages = False + # Very, very special case. This flag exists to gate this one file. + if not opts.metadata and template_name.endswith("gapic_metadata.json.j2"): + return answer + # Sanity check: Rendering per service and per proto would be a # combinatorial explosion and is almost certainly not what anyone # ever wants. Error colorfully on it. diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index b8e79a060139..826c7734bc3b 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -41,20 +41,22 @@ class Options: lazy_import: bool = False old_naming: bool = False add_iam_methods: bool = False + metadata: bool = False # TODO(yon-mg): should there be an enum for transport type? transport: List[str] = dataclasses.field(default_factory=lambda: []) # Class constants PYTHON_GAPIC_PREFIX: str = 'python-gapic-' OPT_FLAGS: FrozenSet[str] = frozenset(( + 'add-iam-methods', # microgenerator implementation for `reroute_to_grpc_interface` + 'lazy-import', # requires >= 3.7 + 'metadata', # generate GAPIC metadata JSON file 'old-naming', # TODO(dovs): Come up with a better comment 'retry-config', # takes a path 'samples', # output dir - 'lazy-import', # requires >= 3.7 - 'add-iam-methods', # microgenerator implementation for `reroute_to_grpc_interface` # transport type(s) delineated by '+' (i.e. grpc, rest, custom.[something], etc?) 'transport', - 'warehouse-package-name' # change the package name on PyPI + 'warehouse-package-name', # change the package name on PyPI )) @classmethod @@ -143,6 +145,7 @@ def tweak_path(p): lazy_import=bool(opts.pop('lazy-import', False)), old_naming=bool(opts.pop('old-naming', False)), add_iam_methods=bool(opts.pop('add-iam-methods', False)), + metadata=bool(opts.pop('metadata', False)), # transport should include desired transports delimited by '+', e.g. transport='grpc+rest' transport=opts.pop('transport', ['grpc'])[0].split('+') ) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 3f66033d42f8..7c6d8500767f 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -116,6 +116,22 @@ def test_get_response_fails_invalid_file_paths(): assert "%proto" in ex_str and "%service" in ex_str +def test_get_response_ignore_gapic_metadata(): + g = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: + lt.return_value = ["gapic/gapic_metadata.json.j2"] + with mock.patch.object(jinja2.Environment, "get_template") as gt: + gt.return_value = jinja2.Template( + "This is not something we want to see") + res = g.get_response( + api_schema=make_api(), + opts=Options.build(""), + ) + + # We don't expect any files because opts.metadata is not set. + assert res.file == CodeGeneratorResponse().file + + def test_get_response_ignores_unwanted_transports_and_clients(): g = make_generator() with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 60d365a84c52..d5bd11f64e17 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -14,6 +14,7 @@ import os import pytest +import re from unittest import mock import warnings @@ -139,21 +140,24 @@ def test_options_service_config(fs): assert opts.retry == expected_cfg -def test_options_lazy_import(): - opts = Options.build('lazy-import') - assert opts.lazy_import - - -def test_options_old_naming(): - opts = Options.build('old-naming') - assert opts.old_naming - - -def test_options_add_iam_methods(): - opts = Options.build('add-iam-methods') - assert opts.add_iam_methods - - -def test_options_warehouse_package_name(): - opts = Options.build('warehouse-package-name') - assert opts.warehouse_package_name +def test_options_bool_flags(): + # All these options are default False. + # If new options violate this assumption, + # this test may need to be tweaked. + # New options should follow the dash-case/snake_case convention. + opt_str_to_attr_name = { + name: re.sub(r"-", "_", name) + for name in + ["lazy-import", + "old-naming", + "add-iam-methods", + "metadata", + "warehouse-package-name", + ]} + + for opt, attr in opt_str_to_attr_name.items(): + options = Options.build("") + assert not getattr(options, attr) + + options = Options.build(opt) + assert getattr(options, attr) From a5708a26fc4a3d99ed3fe94772e7f5628e096f1d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 3 Mar 2021 15:52:14 -0800 Subject: [PATCH 0487/1339] chore: release 0.42.0 (#798) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1a2f945396fe..7ddad094d545 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.42.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.41.0...v0.42.0) (2021-03-03) + + +### Features + +* add flag for gapic metadata ([#795](https://www.github.com/googleapis/gapic-generator-python/issues/795)) ([9cd7664](https://www.github.com/googleapis/gapic-generator-python/commit/9cd7664141835edcd8970629d9cf3abe4b7fd7c4)) + ## [0.41.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.12...v0.41.0) (2021-03-02) From def4cac18e71d87e2ebff0e656631a892435b389 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Mar 2021 19:42:11 +0100 Subject: [PATCH 0488/1339] chore(deps): update dependency protobuf to v3.15.4 (#799) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.15.3` -> `==3.15.4` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.4/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.4/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.4/compatibility-slim/3.15.3)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.4/confidence-slim/3.15.3)](https://docs.renovatebot.com/merge-confidence/) | --- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8a1c30d5142f..b7a611e7acb6 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.0 googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 -protobuf==3.15.3 +protobuf==3.15.4 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From a5b5e7b8036fea5fdd401a5df47f86c78217bced Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Mar 2021 19:50:11 +0100 Subject: [PATCH 0489/1339] chore(deps): update dependency google-api-core to v1.26.1 (#800) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | `==1.26.0` -> `==1.26.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-core/1.26.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-core/1.26.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-core/1.26.1/compatibility-slim/1.26.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-core/1.26.1/confidence-slim/1.26.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-api-core ### [`v1.26.1`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​1261-httpswwwgithubcomgoogleapispython-api-corecomparev1260v1261-2021-02-12) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b7a611e7acb6..04771e44ebdc 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.26.0 +google-api-core==1.26.1 googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 From 71c13e063dffd915b25ac73be5e90bdec7c79e60 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 4 Mar 2021 12:06:09 -0800 Subject: [PATCH 0490/1339] fix: corner case fix for empty request generated test (#801) Fix for #791 --- .../%name_%version/%sub/services/%service/client.py.j2 | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 5ab88e6fc961..ded93da10bad 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -359,14 +359,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif -%} {% if method.input.ident.package != method.ident.package -%} {# request lives in a different package, so there is no proto wrapper #} - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = {{ method.input.ident }}(**request) - {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} elif not request: + # Null request, just make one. request = {{ method.input.ident }}() - {% endif -%}{# Cross-package req and flattened fields #} {%- else %} # Minor optimization to avoid making a copy if the user passes # in a {{ method.input.ident }}. From ade7f8ebb5df74077cecfc7b3be2263f70c3443d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 4 Mar 2021 21:08:07 +0000 Subject: [PATCH 0491/1339] chore: release 0.42.1 (#802) :robot: I have created a release \*beep\* \*boop\* --- ### [0.42.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.0...v0.42.1) (2021-03-04) ### Bug Fixes * corner case fix for empty request generated test ([#801](https://www.github.com/googleapis/gapic-generator-python/issues/801)) ([039dc71](https://www.github.com/googleapis/gapic-generator-python/commit/039dc713fed291142058741e1138da5a4bec542f)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 7ddad094d545..0d56ff9fd60b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.42.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.0...v0.42.1) (2021-03-04) + + +### Bug Fixes + +* corner case fix for empty request generated test ([#801](https://www.github.com/googleapis/gapic-generator-python/issues/801)) ([039dc71](https://www.github.com/googleapis/gapic-generator-python/commit/039dc713fed291142058741e1138da5a4bec542f)) + ## [0.42.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.41.0...v0.42.0) (2021-03-03) From 36920cf947d20677355c81b249248643e9e038c4 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 4 Mar 2021 13:40:07 -0800 Subject: [PATCH 0492/1339] fix: s/grpcAsync/grpc-async for gapic metadata (#803) As discussed in the design doc, the agreed upon naming convention for the the asynchronous variants in gapic metadata use kebab-case instead of Pascal case. --- packages/gapic-generator/gapic/schema/api.py | 2 +- packages/gapic-generator/tests/unit/schema/test_api.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 39f0c4904544..8af8933e8d29 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -415,7 +415,7 @@ def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: transports = [] if "grpc" in options.transport: transports.append(("grpc", service.client_name)) - transports.append(("grpcAsync", service.async_client_name)) + transports.append(("grpc-async", service.async_client_name)) if "rest" in options.transport: transports.append(("rest", service.client_name)) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 1eaa8a57aeb2..de705d88dfe5 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -1363,7 +1363,7 @@ def test_gapic_metadata(): "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), }, ), - "grpcAsync": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="OctopusAsyncClient", rpcs={ "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), @@ -1382,7 +1382,7 @@ def test_gapic_metadata(): "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), }, ), - "grpcAsync": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidAsyncClient", rpcs={ "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), @@ -1457,7 +1457,7 @@ def test_gapic_metadata(): "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), }, ), - "grpcAsync": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="OctopusAsyncClient", rpcs={ "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), @@ -1483,7 +1483,7 @@ def test_gapic_metadata(): "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), }, ), - "grpcAsync": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidAsyncClient", rpcs={ "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), From 06c20bd5922114bb55ca2c6863ecb71ac378a8fd Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 5 Mar 2021 12:28:02 -0800 Subject: [PATCH 0493/1339] chore: __init__.py file output is now more deterministic (#806) Sort everything that is iteratively produced, e.g. message and enum classes provided. --- .../%namespace/%name/%version/__init__.py.j2 | 12 ++++++------ .../ads-templates/%namespace/%name/__init__.py.j2 | 14 +++++++------- .../%namespace/%name_%version/%sub/__init__.py.j2 | 2 +- .../%name_%version/%sub/types/__init__.py.j2 | 12 ++++++------ 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 index aa12751852a4..3889fa1545dd 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -11,18 +11,18 @@ if sys.version_info < (3, 7): _lazy_type_to_package_map = { # Message types -{%- for message in api.top_level_messages.values() %} +{%- for _, message in api.top_level_messages|dictsort %} '{{ message.name }}': '{{ message.ident.package|join('.') }}.types.{{ message.ident.module }}', {%- endfor %} - + # Enum types -{%- for enum in api.top_level_enums.values() %} +{%- for _, enum in api.top_level_enums|dictsort %} '{{ enum.name }}': '{{ enum.ident.package|join('.') }}.types.{{enum.ident.module }}', {%- endfor %} - + {# TODO(yon-mg): add rest transport service once I know what this is #} - # Client classes and transports -{%- for service in api.services.values() %} + # Client classes and transports +{%- for _, service in api.services|dictsort %} '{{ service.client_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}', '{{ service.transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', '{{ service.grpc_transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 index aa12751852a4..322d19bcc7e1 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 @@ -11,18 +11,18 @@ if sys.version_info < (3, 7): _lazy_type_to_package_map = { # Message types -{%- for message in api.top_level_messages.values() %} +{%- for _, message in api.top_level_messages|dictsort %} '{{ message.name }}': '{{ message.ident.package|join('.') }}.types.{{ message.ident.module }}', {%- endfor %} - + # Enum types -{%- for enum in api.top_level_enums.values() %} +{%- for _, enum in api.top_level_enums|dictsort %} '{{ enum.name }}': '{{ enum.ident.package|join('.') }}.types.{{enum.ident.module }}', {%- endfor %} - + {# TODO(yon-mg): add rest transport service once I know what this is #} - # Client classes and transports -{%- for service in api.services.values() %} + # Client classes and transports +{%- for _, service in api.services|dictsort %} '{{ service.client_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}', '{{ service.transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', '{{ service.grpc_transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', @@ -90,7 +90,7 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' __all__ = ( {%- filter indent %} {% filter sort_lines -%} -{% for subpackage in api.subpackages.keys() -%} +{% for subpackage, _ in api.subpackages|dictsort -%} '{{ subpackage }}', {% endfor -%} {% for service in api.services.values()|sort(attribute='name') diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index df685b243801..217cb20e58a6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -2,7 +2,7 @@ {% block content %} {# Import subpackages. -#} -{% for subpackage in api.subpackages.keys() -%} +{% for subpackage, _ in api.subpackages|dictsort -%} from . import {{ subpackage }} {% endfor -%} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 index 7b25899bc5fe..6f331c3ad97a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 @@ -1,20 +1,20 @@ {% extends '_base.py.j2' %} {% block content %} -{%- for proto in api.protos.values() if proto.file_to_generate and proto.messages %} +{%- for _, proto in api.protos|dictsort if proto.file_to_generate and proto.messages %} from .{{proto.module_name }} import ( - {%- for message in proto.messages.values() %} + {%- for _, message in proto.messages|dictsort %} {{message.name }}, {% endfor %} - {%- for enum in proto.enums.values() %} + {%- for _, enum in proto.enums|dictsort %} {{ enum.name }}, {% endfor %} ){% endfor %} __all__ = ( - {%- for proto in api.protos.values() if proto.file_to_generate %} - {%- for message in proto.messages.values() %} + {%- for _, proto in api.protos|dictsort if proto.file_to_generate %} + {%- for _, message in proto.messages|dictsort %} '{{ message.name }}', {%- endfor -%} - {%- for enum in proto.enums.values() %} + {%- for _, enum in proto.enums|dictsort %} '{{ enum.name }}', {%- endfor -%} {%- endfor %} From 294ad658e855d148e7a229f6671123b8f71a32b4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Mar 2021 22:00:07 +0100 Subject: [PATCH 0494/1339] chore(deps): update dependency protobuf to v3.15.5 (#804) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.15.4` -> `==3.15.5` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.5/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.5/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.5/compatibility-slim/3.15.4)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.15.5/confidence-slim/3.15.4)](https://docs.renovatebot.com/merge-confidence/) | --- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 04771e44ebdc..17d86ccef8c4 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.1 googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 -protobuf==3.15.4 +protobuf==3.15.5 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 3268ba76e09a08f3c18072e3924c3a8bc119f4f3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 5 Mar 2021 21:04:02 +0000 Subject: [PATCH 0495/1339] chore: release 0.42.2 (#807) :robot: I have created a release \*beep\* \*boop\* --- ### [0.42.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.1...v0.42.2) (2021-03-05) ### Bug Fixes * s/grpcAsync/grpc-async for gapic metadata ([#803](https://www.github.com/googleapis/gapic-generator-python/issues/803)) ([96f7864](https://www.github.com/googleapis/gapic-generator-python/commit/96f78640d90cf50c6b525924d14c6afe31874be6)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 0d56ff9fd60b..4f6ca5c78458 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.42.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.1...v0.42.2) (2021-03-05) + + +### Bug Fixes + +* s/grpcAsync/grpc-async for gapic metadata ([#803](https://www.github.com/googleapis/gapic-generator-python/issues/803)) ([96f7864](https://www.github.com/googleapis/gapic-generator-python/commit/96f78640d90cf50c6b525924d14c6afe31874be6)) + ### [0.42.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.0...v0.42.1) (2021-03-04) From e26e5e801144fc2c1b02c981a34564efc2cd7bc5 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 10 Mar 2021 14:48:49 -0700 Subject: [PATCH 0496/1339] refactor: remove duplication in grpc transport init (#808) * refactor: remove duplication in grpc transport init * refactor: let base transport set final host, creds, scopes * refactor: remove scopes or self.AUTH_SCOPES in grpc * docs: explain self._prep_wrapped_messages --- .../services/%service/transports/base.py.j2 | 10 +- .../services/%service/transports/grpc.py.j2 | 104 ++++++++---------- .../%service/transports/grpc_asyncio.py.j2 | 101 +++++++---------- .../services/%service/transports/rest.py.j2 | 1 + 4 files changed, 90 insertions(+), 126 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 08b5c4b20b83..c2db7df705ba 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -80,6 +80,9 @@ class {{ service.name }}Transport(abc.ABC): host += ':443' self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -88,19 +91,16 @@ class {{ service.name }}Transport(abc.ABC): if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( credentials_file, - scopes=scopes, + scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default(scopes=scopes, quota_project_id=quota_project_id) + credentials, _ = auth.default(scopes=self._scopes, quota_project_id=quota_project_id) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index b1d1d18917f8..f03d09f6720a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -101,7 +101,12 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + {%- if service.has_lro %} + self._operations_client = None + {%- endif %} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -109,62 +114,50 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" - - if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials + else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) - - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -172,20 +165,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ], ) - self._stubs = {} # type: Dict[str, Callable] - {%- if service.has_lro %} - self._operations_client = None - {%- endif %} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) @classmethod def create_channel(cls, @@ -197,7 +179,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 94da8db76a93..614802d1d73c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -56,7 +56,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): **kwargs) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -145,7 +145,12 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + {%- if service.has_lro %} + self._operations_client = None + {%- endif %} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -153,62 +158,50 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" - - if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -216,20 +209,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - {%- if service.has_lro %} - self._operations_client = None - {%- endif %} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index bb6acbadd64f..f368273d1619 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -92,6 +92,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {%- endif %} if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._prep_wrapped_messages(client_info) {%- if service.has_lro %} From e3a46d8f7a4f5b29febf9e38d73a49cd66abeecf Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 10 Mar 2021 17:14:41 -0800 Subject: [PATCH 0497/1339] feat: update templates to permit enum aliases (#809) Certain APIs like RecommendationEngine use multiple enum variant monikers to reference the same value. Achieving this in protos requires explicit support from proto-plus and the generated surface. Hand testing indicates compliance. Bump the min proto-plus version for generated clients. --- .../%name/%version/%sub/types/_enum.py.j2 | 3 +++ .../gapic/ads-templates/setup.py.j2 | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 17 +++++++++++++++-- .../%name_%version/%sub/types/_enum.py.j2 | 3 +++ .../gapic/templates/setup.py.j2 | 2 +- .../gapic-generator/test_utils/test_utils.py | 2 ++ .../tests/unit/schema/wrappers/test_enums.py | 18 +++++++++++++++--- 7 files changed, 40 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 index c9f4cb0c4f0c..73994a158c5d 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 @@ -1,5 +1,8 @@ class {{ enum.name }}({{ p }}.Enum): r"""{{ enum.meta.doc|rst(indent=4) }}""" + {% if enum.enum_pb.HasField("options") -%} + _pb_options = {{ enum.options_dict }} + {% endif -%} {% for enum_value in enum.values -%} {{ enum_value.name }} = {{ enum_value.number }} {% endfor -%} diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 18f06803ddc6..111ce8fb4009 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -19,7 +19,7 @@ setuptools.setup( 'google-api-core >= 1.22.2, < 2.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', - 'proto-plus >= 1.4.0', + 'proto-plus >= 1.15.0', {%- if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', {%- endif %} diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 812630720b90..f6ae04ea3ef6 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -39,6 +39,7 @@ from google.api import resource_pb2 from google.api_core import exceptions # type: ignore from google.protobuf import descriptor_pb2 # type: ignore +from google.protobuf.json_format import MessageToDict # type: ignore from gapic import utils from gapic.schema import metadata @@ -413,7 +414,7 @@ def get_field(self, *field_path: str, # Get the first field in the path. first_field = field_path[0] cursor = self.fields[first_field + - ('_' if first_field in utils.RESERVED_NAMES else '')] + ('_' if first_field in utils.RESERVED_NAMES else '')] # Base case: If this is the last field in the path, return it outright. if len(field_path) == 1: @@ -536,6 +537,18 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'EnumType': meta=self.meta.with_context(collisions=collisions), ) + @property + def options_dict(self) -> Dict: + """Return the EnumOptions (if present) as a dict. + + This is a hack to support a pythonic structure representation for + the generator templates. + """ + return MessageToDict( + self.enum_pb.options, + preserving_proto_field_name=True + ) + @dataclasses.dataclass(frozen=True) class PythonType: @@ -869,7 +882,7 @@ def paged_result_field(self) -> Optional[Field]: # The request must have page_token and next_page_token as they keep track of pages for source, source_type, name in ((self.input, str, 'page_token'), - (self.output, str, 'next_page_token')): + (self.output, str, 'next_page_token')): field = source.fields.get(name, None) if not field or field.type != source_type: return None diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 index c9f4cb0c4f0c..73994a158c5d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 @@ -1,5 +1,8 @@ class {{ enum.name }}({{ p }}.Enum): r"""{{ enum.meta.doc|rst(indent=4) }}""" + {% if enum.enum_pb.HasField("options") -%} + _pb_options = {{ enum.options_dict }} + {% endif -%} {% for enum_value in enum.values -%} {{ enum_value.name }} = {{ enum_value.number }} {% endfor -%} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 94af4ae760c3..f7ed0a9923a5 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -28,7 +28,7 @@ setuptools.setup( install_requires=( 'google-api-core[grpc] >= 1.22.2, < 2.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.4.0', + 'proto-plus >= 1.15.0', {%- if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} 'grpc-google-iam-v1', {%- endif %} diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index beab26518f5c..2aafab454a3b 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -290,6 +290,7 @@ def make_enum( module: str = 'baz', values: typing.Tuple[str, int] = (), meta: metadata.Metadata = None, + options: desc.EnumOptions = None, ) -> wrappers.EnumType: enum_value_pbs = [ desc.EnumValueDescriptorProto(name=i[0], number=i[1]) @@ -298,6 +299,7 @@ def make_enum( enum_pb = desc.EnumDescriptorProto( name=name, value=enum_value_pbs, + options=options, ) return wrappers.EnumType( enum_pb=enum_pb, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index 3debb5603b83..2eeb9c043ff2 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -37,6 +37,18 @@ def test_enum_value_properties(): def test_enum_ident(): - message = make_enum('Baz', package='foo.v1', module='bar') - assert str(message.ident) == 'bar.Baz' - assert message.ident.sphinx == 'foo.v1.bar.Baz' + enum = make_enum('Baz', package='foo.v1', module='bar') + assert str(enum.ident) == 'bar.Baz' + assert enum.ident.sphinx == 'foo.v1.bar.Baz' + + +def test_enum_options_dict(): + cephalopod = make_enum("Cephalopod", package="animalia.v1", + module="mollusca", options={"allow_alias": True}) + assert isinstance(cephalopod.enum_pb.options, descriptor_pb2.EnumOptions) + assert cephalopod.options_dict == {"allow_alias": True} + + bivalve = make_enum("Bivalve", package="animalia.v1", + module="mollusca") + assert isinstance(bivalve.enum_pb.options, descriptor_pb2.EnumOptions) + assert bivalve.options_dict == {} From c6f3b459015f7e967373d2e12d6c6108725bf2bf Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 11 Mar 2021 14:59:31 -0800 Subject: [PATCH 0498/1339] feat: add bazel support for gapic metadata (#811) --- .../gapic-generator/rules_python_gapic/py_gapic.bzl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index 7c6f938f5230..bcb55fdb9301 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -18,13 +18,20 @@ def py_gapic_library( name, srcs, grpc_service_config = None, - plugin_args = [], - opt_args = [], + plugin_args = None, + opt_args = None, + metadata = False, **kwargs): # srcjar_target_name = "%s_srcjar" % name srcjar_target_name = name srcjar_output_suffix = ".srcjar" + plugin_args = plugin_args or [] + opt_args = opt_args or [] + + if metadata: + plugin_args.append("metadata") + file_args = {} if grpc_service_config: file_args[grpc_service_config] = "retry-config" From a24960008f924f0f1091459ed710a8801d863076 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Mar 2021 00:44:54 +0100 Subject: [PATCH 0499/1339] chore(deps): update dependency protobuf to v3.15.6 (#812) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 17d86ccef8c4..2770b54084ba 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.1 googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 -protobuf==3.15.5 +protobuf==3.15.6 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From a9b21c102afeea82ea5e36ba9b205b4cdf1937b5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 11 Mar 2021 23:56:02 +0000 Subject: [PATCH 0500/1339] chore: release 0.43.0 (#810) :robot: I have created a release \*beep\* \*boop\* --- ## [0.43.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.2...v0.43.0) (2021-03-11) ### Features * add bazel support for gapic metadata ([#811](https://www.github.com/googleapis/gapic-generator-python/issues/811)) ([7ced24a](https://www.github.com/googleapis/gapic-generator-python/commit/7ced24a0b20cb6505587b946c03b1b038eef4b4a)) * update templates to permit enum aliases ([#809](https://www.github.com/googleapis/gapic-generator-python/issues/809)) ([2e7ea11](https://www.github.com/googleapis/gapic-generator-python/commit/2e7ea11f80210459106f9780e5f013e2a0381d29)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 4f6ca5c78458..8488f7aa5ca8 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [0.43.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.2...v0.43.0) (2021-03-11) + + +### Features + +* add bazel support for gapic metadata ([#811](https://www.github.com/googleapis/gapic-generator-python/issues/811)) ([7ced24a](https://www.github.com/googleapis/gapic-generator-python/commit/7ced24a0b20cb6505587b946c03b1b038eef4b4a)) +* update templates to permit enum aliases ([#809](https://www.github.com/googleapis/gapic-generator-python/issues/809)) ([2e7ea11](https://www.github.com/googleapis/gapic-generator-python/commit/2e7ea11f80210459106f9780e5f013e2a0381d29)) + ### [0.42.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.1...v0.42.2) (2021-03-05) From a9c21a356af11a8a233a496c249c57c0f5cb165b Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 19 Mar 2021 16:13:48 -0600 Subject: [PATCH 0501/1339] chore: remove trailing whitespace (#816) --- .../%sub/services/%service/transports/base.py.j2 | 14 +++++++------- .../%sub/services/%service/transports/grpc.py.j2 | 6 +++--- .../%service/transports/grpc_asyncio.py.j2 | 16 ++++++++-------- .../%sub/services/%service/transports/rest.py.j2 | 14 +++++++------- 4 files changed, 25 insertions(+), 25 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index c2db7df705ba..d450f9a2876e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -51,7 +51,7 @@ class {{ service.name }}Transport(abc.ABC): scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, quota_project_id: typing.Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - **kwargs, + **kwargs, ) -> None: """Instantiate the transport. @@ -69,10 +69,10 @@ class {{ service.name }}Transport(abc.ABC): scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -87,7 +87,7 @@ class {{ service.name }}Transport(abc.ABC): # defaults. if credentials and credentials_file: raise exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - + if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( credentials_file, @@ -177,7 +177,7 @@ class {{ service.name }}Transport(abc.ABC): ], ]: raise NotImplementedError() - {% endif %} + {% endif %} __all__ = ( '{{ service.name }}Transport', diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index f03d09f6720a..7d5494119025 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -119,7 +119,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - + else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -133,14 +133,14 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials - + else: if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() self._ssl_channel_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) - + # The base transport sets the host, credentials and scopes super().__init__( host=host, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 614802d1d73c..21d9311c4ecb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -121,7 +121,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. @@ -133,10 +133,10 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -151,7 +151,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): {%- if service.has_lro %} self._operations_client = None {%- endif %} - + if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) if client_cert_source: @@ -177,7 +177,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ) else: self._ssl_channel_credentials = SslCredentials().ssl_credentials - + else: if client_cert_source_for_mtls and not ssl_channel_credentials: cert, key = client_cert_source_for_mtls() @@ -351,7 +351,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] - {% endif %} + {% endif %} __all__ = ( '{{ service.name }}GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index f368273d1619..9b2e1ff52f86 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -62,7 +62,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - + credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. @@ -73,10 +73,10 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Run the base constructor @@ -95,7 +95,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): self._prep_wrapped_messages(client_info) {%- if service.has_lro %} - + @property def operations_client(self) -> operations_v1.OperationsClient: """Create the client designed to process long-running operations. @@ -147,7 +147,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {%- endif %} """ - {# TODO(yon-mg): refactor when implementing grpc transcoding + {# TODO(yon-mg): refactor when implementing grpc transcoding - parse request pb & assign body, path params - shove leftovers into query params - make sure dotted nested fields preserved From a8968a3814cca90bb4fdd145d3576c49e323786f Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 19 Mar 2021 23:20:51 +0100 Subject: [PATCH 0502/1339] fix: use correct retry deadline in publisher methods (#814) Add Retry timeout to ads and async template Co-authored-by: Dov Shlachter --- .../%name/%version/%sub/services/%service/transports/base.py.j2 | 1 + .../%name_%version/%sub/services/%service/async_client.py.j2 | 1 + .../%name_%version/%sub/services/%service/transports/base.py.j2 | 1 + 3 files changed, 3 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index f25ba96a1ae5..29f8c2df9aa0 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -93,6 +93,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): exceptions.{{ ex.__name__ }}, {%- endfor %} ), + deadline={{ method.timeout }}, ), {%- endif %} default_timeout={{ method.timeout }}, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index c0e2c9a6ac7b..6fd4e482ecfc 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -254,6 +254,7 @@ class {{ service.async_client_name }}: exceptions.{{ ex.__name__ }}, {%- endfor %} ), + deadline={{ method.timeout }}, ), {%- endif %} default_timeout={{ method.timeout }}, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index d450f9a2876e..fabd1769f723 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -118,6 +118,7 @@ class {{ service.name }}Transport(abc.ABC): exceptions.{{ ex.__name__ }}, {%- endfor %} ), + deadline={{ method.timeout }}, ), {%- endif %} default_timeout={{ method.timeout }}, From 5ca09997e516b5f29ac82445a9cf3b0fce3a9434 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 19 Mar 2021 22:24:02 +0000 Subject: [PATCH 0503/1339] chore: release 0.43.1 (#817) :robot: I have created a release \*beep\* \*boop\* --- ### [0.43.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.0...v0.43.1) (2021-03-19) ### Bug Fixes * use correct retry deadline in publisher methods ([#814](https://www.github.com/googleapis/gapic-generator-python/issues/814)) ([92a2cfc](https://www.github.com/googleapis/gapic-generator-python/commit/92a2cfc47b24c4b1a041d5bbb944d69a67a962a2)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8488f7aa5ca8..ea06972a0018 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.43.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.0...v0.43.1) (2021-03-19) + + +### Bug Fixes + +* use correct retry deadline in publisher methods ([#814](https://www.github.com/googleapis/gapic-generator-python/issues/814)) ([92a2cfc](https://www.github.com/googleapis/gapic-generator-python/commit/92a2cfc47b24c4b1a041d5bbb944d69a67a962a2)) + ## [0.43.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.2...v0.43.0) (2021-03-11) From e01022a4c4a88305cb4e6efc13237232eff8b2d4 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 23 Mar 2021 18:08:32 -0400 Subject: [PATCH 0504/1339] fix: Update module alias to resolve naming conflict (#820) In the current design, the first character of each package is used to create an alias. I'd like to append the first character before underscores in the package name to further reduce conflicts. Previously google.appengine_admin would have an alias prefix of ga. With this change, the alias prefix will be gaa. Fixes: #819 --- packages/gapic-generator/gapic/schema/metadata.py | 9 ++++++--- .../gapic-generator/tests/unit/schema/test_metadata.py | 8 ++++++++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 0276fe098974..0b1f6df2d50a 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -115,9 +115,12 @@ def module_alias(self) -> str: return '_'.join( ( ''.join( - i[0] - for i in self.package - if i != self.api_naming.version + [ + partial_name[0] + for i in self.package + for partial_name in i.split("_") + if i != self.api_naming.version + ] ), self.module, ) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 4be166bc1b5e..693beffa9bbe 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -184,6 +184,14 @@ def test_address_name_builtin_keyword(): ) assert addr_kword.module_alias == "gp_class" + addr_kword = metadata.Address( + name="Class", + module="class", + package=("google", "appengine_admin"), + api_naming=naming.NewNaming(proto_package="foo.bar.baz.v1"), + ) + assert addr_kword.module_alias == "gaa_class" + def test_doc_nothing(): meta = metadata.Metadata() From 1668190530611f7eef437dc588d02ddb2704925f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 24 Mar 2021 00:08:06 +0100 Subject: [PATCH 0505/1339] chore(deps): update dependency google-api-core to v1.26.2 (#822) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | `==1.26.1` -> `==1.26.2` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-core/1.26.2/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-core/1.26.2/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-core/1.26.2/compatibility-slim/1.26.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-core/1.26.2/confidence-slim/1.26.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-api-core ### [`v1.26.2`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​1262-httpswwwgithubcomgoogleapispython-api-corecomparev1261v1262-2021-03-23) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2770b54084ba..4c5eb59b9741 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.26.1 +google-api-core==1.26.2 googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 From 7433e623d800d619782175a51df0b26364f6ae94 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 24 Mar 2021 10:20:02 -0700 Subject: [PATCH 0506/1339] fix: add certain raw imports to RESERVED_NAMES (#824) The current example is 'auth', which is imported directly by both transports and unit tests. This name conflicts with any calculated dependency import whose name happens to be 'auth'. Fix involves adding 'auth' and other direct, raw imports in templates to RESERVED_NAMES. --- packages/gapic-generator/gapic/utils/reserved_names.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index 9bf1c9a91403..9104015d5c15 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -21,7 +21,15 @@ # They are explicitly allowed message, module, and field names. RESERVED_NAMES = frozenset( itertools.chain( + # We CANNOT make exceptions for keywords. keyword.kwlist, + # We make SOME exceptions for certain names that collide with builtins. set(dir(builtins)) - {"filter", "map", "id", "input", "property"}, + # This is a hand-maintained list of modules that are directly imported + # in templates, i.e. they are not added as dependencies to any type, + # the raw text is just there in the template. + # More can be added as collisions are discovered. + # See issue #819 for additional info. + {"auth", "credentials", "exceptions", "future", "options", "policy", "math"} ) ) From a206187d5cf59888d47ff5f3322fd75f5e2d9260 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 24 Mar 2021 17:30:03 +0000 Subject: [PATCH 0507/1339] chore: release 0.43.2 (#823) :robot: I have created a release \*beep\* \*boop\* --- ### [0.43.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.1...v0.43.2) (2021-03-24) ### Bug Fixes * add certain raw imports to RESERVED_NAMES ([#824](https://www.github.com/googleapis/gapic-generator-python/issues/824)) ([04bd8aa](https://www.github.com/googleapis/gapic-generator-python/commit/04bd8aaf0fc2c2c0615105cab39dc33266b66775)) * Update module alias to resolve naming conflict ([#820](https://www.github.com/googleapis/gapic-generator-python/issues/820)) ([f5e9f36](https://www.github.com/googleapis/gapic-generator-python/commit/f5e9f367ec6a72b4272f559a93f6fbb3d7e54b8b)), closes [#819](https://www.github.com/googleapis/gapic-generator-python/issues/819) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index ea06972a0018..5f4b3c3fae4f 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [0.43.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.1...v0.43.2) (2021-03-24) + + +### Bug Fixes + +* add certain raw imports to RESERVED_NAMES ([#824](https://www.github.com/googleapis/gapic-generator-python/issues/824)) ([04bd8aa](https://www.github.com/googleapis/gapic-generator-python/commit/04bd8aaf0fc2c2c0615105cab39dc33266b66775)) +* Update module alias to resolve naming conflict ([#820](https://www.github.com/googleapis/gapic-generator-python/issues/820)) ([f5e9f36](https://www.github.com/googleapis/gapic-generator-python/commit/f5e9f367ec6a72b4272f559a93f6fbb3d7e54b8b)), closes [#819](https://www.github.com/googleapis/gapic-generator-python/issues/819) + ### [0.43.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.0...v0.43.1) (2021-03-19) From 2845a207ba701b525110f3f6cc91282b39ba6794 Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Thu, 25 Mar 2021 23:06:04 +0000 Subject: [PATCH 0508/1339] chore: add a Code of Conduct (#821) add a code of conduct --- packages/gapic-generator/CODE_OF_CONDUCT.md | 43 +++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 packages/gapic-generator/CODE_OF_CONDUCT.md diff --git a/packages/gapic-generator/CODE_OF_CONDUCT.md b/packages/gapic-generator/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..46b2a08ea6d1 --- /dev/null +++ b/packages/gapic-generator/CODE_OF_CONDUCT.md @@ -0,0 +1,43 @@ +# Contributor Code of Conduct + +As contributors and maintainers of this project, +and in the interest of fostering an open and welcoming community, +we pledge to respect all people who contribute through reporting issues, +posting feature requests, updating documentation, +submitting pull requests or patches, and other activities. + +We are committed to making participation in this project +a harassment-free experience for everyone, +regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, +such as physical or electronic +addresses, without explicit permission +* Other unethical or unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. +By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently +applying these principles to every aspect of managing this project. +Project maintainers who do not follow or enforce the Code of Conduct +may be permanently removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior +may be reported by opening an issue +or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, +available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) From 4764f8a9463c2d2a09c387030203c6f82246a65c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 30 Mar 2021 17:41:29 +0200 Subject: [PATCH 0509/1339] chore(deps): update dependency google-api-core to v1.26.3 (#829) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 4c5eb59b9741..472f2d6b3bce 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==7.1.2 -google-api-core==1.26.2 +google-api-core==1.26.3 googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 From af8da44d0b1297243da02dc753b499d41617f61d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 2 Apr 2021 07:50:25 -0700 Subject: [PATCH 0510/1339] fixup: change the default state of the metadata bazel flag (#832) --- packages/gapic-generator/rules_python_gapic/py_gapic.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index bcb55fdb9301..e0f0500638a3 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -20,7 +20,7 @@ def py_gapic_library( grpc_service_config = None, plugin_args = None, opt_args = None, - metadata = False, + metadata = True, **kwargs): # srcjar_target_name = "%s_srcjar" % name srcjar_target_name = name From d2065f2ef2fd5afe496a1da98d67f2005aa07e07 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 13 Apr 2021 00:06:14 +0200 Subject: [PATCH 0511/1339] chore(deps): update dependency protobuf to v3.15.8 (#833) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 472f2d6b3bce..fd1efaac3ad0 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.3 googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 -protobuf==3.15.6 +protobuf==3.15.8 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From ecb858b7b49f2f50e3d4d8923316a177f03e253e Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 12 Apr 2021 16:28:07 -0600 Subject: [PATCH 0512/1339] fix: sort subpackages in %namespace/%name/__init__.py (#836) Small follow up to https://github.com/googleapis/gapic-generator-python/pull/806 --- .../gapic/templates/%namespace/%name/__init__.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index 7ffe67b3fe3c..ae721dedf041 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -46,7 +46,7 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' __all__ = ( {%- filter indent %} {% filter sort_lines -%} -{% for subpackage in api.subpackages.keys() -%} +{% for subpackage, _ in api.subpackages|dictsort -%} '{{ subpackage }}', {% endfor -%} {% for service in api.services.values()|sort(attribute='name') From dac27525c944f9c8a2bcfd5f6b1de2d06fecb3c4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 13 Apr 2021 09:31:36 -0700 Subject: [PATCH 0513/1339] chore: release 0.43.3 (#839) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5f4b3c3fae4f..0fd8cc191a4a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.43.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.2...v0.43.3) (2021-04-12) + + +### Bug Fixes + +* sort subpackages in %namespace/%name/__init__.py ([#836](https://www.github.com/googleapis/gapic-generator-python/issues/836)) ([90cf882](https://www.github.com/googleapis/gapic-generator-python/commit/90cf882b20f430499f692e6b9b23497b3555e928)) + ### [0.43.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.1...v0.43.2) (2021-03-24) From 79b4a01f203071dc7adeebac5528dae72d228856 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Thu, 15 Apr 2021 13:42:01 -0400 Subject: [PATCH 0514/1339] chore: prevent normalization of semver versioning (#841) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 69bb01d55f31..eea88a26ded6 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -27,7 +27,7 @@ setup( name="gapic-generator", - version=version, + version=setuptools.sic(version), license="Apache 2.0", author="Dov Shlachter", author_email="dovs@google.com", From 63366ca2c6322a99b56971f5c96ea7b8033a09e5 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 21 Apr 2021 12:55:19 -0600 Subject: [PATCH 0515/1339] feat: support self-signed JWT flow for service accounts (#774) See [RFC (internal only)](https://docs.google.com/document/d/1SNCVTmW6Rtr__u-_V7nsT9PhSzjj1z0P9fAD3YUgRoc/edit#) and https://aip.dev/auth/4111. Support the self-signed JWT flow for service accounts by passing `default_scopes` and `default_host` in calls to the auth library and `create_channel`. This depends on features exposed in the following PRs: https://github.com/googleapis/python-api-core/pull/134, https://github.com/googleapis/google-auth-library-python/pull/665. It may be easier to look at https://github.com/googleapis/python-translate/pull/107/files for a diff on a real library. This change is written so that the library is (temporarily) compatible with older `google-api-core` and `google-auth` versions. Because of this it not possible to reach 100% coverage on a single unit test run. `pytest` runs twice in two of the `nox` sessions. Miscellaneous changes: - sprinkled in `__init__.py` files in subdirs of the `test/` directory, as otherwise pytest-cov seems to fail to collect coverage properly in some instances. - new dependency on `packaging` for Version comparison https://pypi.org/project/packaging/ Co-authored-by: Brent Shaffer --- .../services/%service/transports/base.py.j2 | 96 +++++-- .../services/%service/transports/grpc.py.j2 | 8 +- .../%service/transports/grpc_asyncio.py.j2 | 10 +- .../services/%service/transports/rest.py.j2 | 9 +- .../gapic/templates/.coveragerc.j2 | 1 - .../gapic/templates/noxfile.py.j2 | 62 +++++ .../gapic/templates/setup.py.j2 | 3 +- .../gapic/templates/tests/__init__.py.j2 | 2 + .../gapic/templates/tests/unit/__init__.py.j2 | 2 + .../%name_%version/%sub/test_%service.py.j2 | 245 ++++++++++++++++-- .../templates/tests/unit/gapic/__init__.py.j2 | 2 + packages/gapic-generator/noxfile.py | 80 ++++-- 12 files changed, 458 insertions(+), 62 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/tests/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/tests/unit/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/tests/unit/gapic/__init__.py.j2 diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index fabd1769f723..f5d9ee5dc658 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -2,10 +2,12 @@ {% block content %} import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources from google import auth # type: ignore +import google.api_core # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -34,6 +36,18 @@ try: except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + + class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" @@ -43,13 +57,15 @@ class {{ service.name }}Transport(abc.ABC): {%- endfor %} ) + DEFAULT_HOST: str = {% if service.host %}'{{ service.host }}'{% else %}{{ '' }}{% endif %} + def __init__( self, *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + host: str = DEFAULT_HOST, credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: @@ -66,7 +82,7 @@ class {{ service.name }}Transport(abc.ABC): credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,6 +96,8 @@ class {{ service.name }}Transport(abc.ABC): host += ':443' self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. self._scopes = scopes or self.AUTH_SCOPES @@ -91,17 +109,59 @@ class {{ service.name }}Transport(abc.ABC): if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( credentials_file, - scopes=self._scopes, + **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default(scopes=self._scopes, quota_project_id=quota_project_id) + credentials, _ = auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Save the credentials. self._credentials = credentials + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs + + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -138,11 +198,11 @@ class {{ service.name }}Transport(abc.ABC): {%- for method in service.methods.values() %} @property - def {{ method.name|snake_case }}(self) -> typing.Callable[ + def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], - typing.Union[ + Union[ {{ method.output.ident }}, - typing.Awaitable[{{ method.output.ident }}] + Awaitable[{{ method.output.ident }}] ]]: raise NotImplementedError() {%- endfor %} @@ -152,29 +212,29 @@ class {{ service.name }}Transport(abc.ABC): @property def set_iam_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + Union[policy.Policy, Awaitable[policy.Policy]], ]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> typing.Callable[ + ) -> Callable[ [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + Union[policy.Policy, Awaitable[policy.Policy]], ]: raise NotImplementedError() @property def test_iam_permissions( self, - ) -> typing.Callable[ + ) -> Callable[ [iam_policy.TestIamPermissionsRequest], - typing.Union[ + Union[ iam_policy.TestIamPermissionsResponse, - typing.Awaitable[iam_policy.TestIamPermissionsResponse], + Awaitable[iam_policy.TestIamPermissionsResponse], ], ]: raise NotImplementedError() diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 7d5494119025..e7df35a1df2c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -2,7 +2,7 @@ {% block content %} import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore {%- if service.has_lro %} @@ -202,13 +202,15 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 21d9311c4ecb..accb46fc91d3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -2,7 +2,7 @@ {% block content %} import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore @@ -12,6 +12,7 @@ from google.api_core import operations_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -75,13 +76,15 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs ) @@ -163,7 +166,6 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 9b2e1ff52f86..4f30997ce46d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -81,12 +81,14 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object super().__init__( host=host, credentials=credentials, client_info=client_info, ) - self._session = AuthorizedSession(self._credentials) + self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) {%- if service.has_lro %} self._operations_client = None {%- endif %} @@ -106,11 +108,14 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # Sanity check: Only create a new client if we do not already have one. if self._operations_client is None: from google.api_core import grpc_helpers + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(self._host, self._scopes) + self._operations_client = operations_v1.OperationsClient( grpc_helpers.create_channel( self._host, credentials=self._credentials, - scopes=self.AUTH_SCOPES, + **self_signed_jwt_kwargs, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), diff --git a/packages/gapic-generator/gapic/templates/.coveragerc.j2 b/packages/gapic-generator/gapic/templates/.coveragerc.j2 index f2ac95dda9d7..6e2f585cbd56 100644 --- a/packages/gapic-generator/gapic/templates/.coveragerc.j2 +++ b/packages/gapic-generator/gapic/templates/.coveragerc.j2 @@ -2,7 +2,6 @@ branch = True [report] -fail_under = 100 show_missing = True omit = {{ api.naming.module_namespace|join("/") }}/{{ api.naming.module_name }}/__init__.py diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index b6225d867da3..0b3e167a7301 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -2,10 +2,28 @@ {% block content %} import os +import pathlib import shutil +import subprocess +import sys + import nox # type: ignore +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] @nox.session(python=['3.6', '3.7', '3.8', '3.9']) def unit(session): @@ -25,6 +43,18 @@ def unit(session): ) +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + @nox.session(python=['3.6', '3.7']) def mypy(session): """Run the type checker.""" @@ -40,6 +70,38 @@ def mypy(session): {%- endif %} ) + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + @nox.session(python='3.6') def docs(session): """Build the docs for this library.""" diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index f7ed0a9923a5..637c4fa97d28 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -29,8 +29,9 @@ setuptools.setup( 'google-api-core[grpc] >= 1.22.2, < 2.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.15.0', + 'packaging >= 14.3', {%- if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} - 'grpc-google-iam-v1', + 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', {%- endif %} ), python_requires='>=3.6', diff --git a/packages/gapic-generator/gapic/templates/tests/__init__.py.j2 b/packages/gapic-generator/gapic/templates/tests/__init__.py.j2 new file mode 100644 index 000000000000..34200f2eca9e --- /dev/null +++ b/packages/gapic-generator/gapic/templates/tests/__init__.py.j2 @@ -0,0 +1,2 @@ + +{% extends '_base.py.j2' %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/tests/unit/__init__.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/__init__.py.j2 new file mode 100644 index 000000000000..34200f2eca9e --- /dev/null +++ b/packages/gapic-generator/gapic/templates/tests/unit/__init__.py.j2 @@ -0,0 +1,2 @@ + +{% extends '_base.py.j2' %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f1d8685850fb..f7ae145b74a6 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -3,6 +3,7 @@ {% block content %} import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -26,6 +27,8 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.async_client_name }} {%- endif %} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _GOOGLE_AUTH_VERSION +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _API_CORE_VERSION from google.api_core import client_options from google.api_core import exceptions from google.api_core import grpc_helpers @@ -51,6 +54,28 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore {% endfilter %} +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -1439,16 +1464,39 @@ def test_{{ service.name|snake_case }}_base_transport(): {% endif %} +@requires_google_auth_gte_1_25_0 def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(auth, 'load_credentials_from_file') as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + with mock.patch.object(auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.{{ service.name }}Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %} + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_{{ service.name|snake_case }}_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport( credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", scopes=( + load_creds.assert_called_once_with("credentials.json", + scopes=( {%- for scope in service.oauth_scopes %} '{{ scope }}', {%- endfor %} @@ -1459,38 +1507,205 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): def test_{{ service.name|snake_case }}_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, 'default') as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + with mock.patch.object(auth, 'default', autospec=True) as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None adc.return_value = (credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, 'default') as adc: + with mock.patch.object(auth, 'default', autospec=True) as adc: adc.return_value = (credentials.AnonymousCredentials(), None) {{ service.client_name }}() - adc.assert_called_once_with(scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %}), + adc.assert_called_once_with( + scopes=None, + default_scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %}), quota_project_id=None, ) + +@requires_google_auth_lt_1_25_0 +def test_{{ service.name|snake_case }}_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, 'default', autospec=True) as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + {{ service.client_name }}() + adc.assert_called_once_with( + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %}), + quota_project_id=None, + ) + + {% if 'grpc' in opts.transport %} -def test_{{ service.name|snake_case }}_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.{{ service.name }}GrpcTransport, + transports.{{ service.name }}GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, 'default') as adc: + with mock.patch.object(auth, 'default', autospec=True) as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - transports.{{ service.name }}GrpcTransport(host="squid.clam.whelk", quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %}), + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %}), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.{{ service.name }}GrpcTransport, + transports.{{ service.name }}GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_{{ service.name|snake_case }}_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default", autospec=True) as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with( + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %}), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.{{ service.name }}GrpcTransport, grpc_helpers), + (transports.{{ service.name }}GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +@requires_api_core_gte_1_26_0 +def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + {% with host = (service.host|default('localhost', true)) -%} + create_channel.assert_called_with( + "{{ host }}", + credentials=creds, + credentials_file=None, quota_project_id="octopus", + default_scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %}), + scopes=["1", "2"], + default_host="{{ host }}", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) + {% endwith %} + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.{{ service.name }}GrpcTransport, grpc_helpers), + (transports.{{ service.name }}GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +@requires_api_core_lt_1_26_0 +def test_{{ service.name|snake_case }}_transport_create_channel_old_api_core(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + {% with host = (service.host|default('localhost', true)) -%} + create_channel.assert_called_with( + "{{ host }}", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %}), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + {% endwith %} + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.{{ service.name }}GrpcTransport, grpc_helpers), + (transports.{{ service.name }}GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +@requires_api_core_lt_1_26_0 +def test_{{ service.name|snake_case }}_transport_create_channel_user_scopes(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = credentials.AnonymousCredentials() + adc.return_value = (creds, None) + {% with host = (service.host|default('localhost', true)) -%} + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "{{ host }}", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + {% endwith %} + {% endif %} {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/__init__.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/__init__.py.j2 new file mode 100644 index 000000000000..34200f2eca9e --- /dev/null +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/__init__.py.j2 @@ -0,0 +1,2 @@ + +{% extends '_base.py.j2' %} \ No newline at end of file diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 7dbe33ebc366..e17277a48761 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -175,12 +175,7 @@ def showcase_mtls_alternative_templates(session): ) -@nox.session(python=["3.6", "3.7", "3.8", "3.9"]) -def showcase_unit( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), -): - """Run the generated unit tests against the Showcase library.""" - +def run_showcase_unit_tests(session, fail_under=100): session.install( "coverage", "pytest", @@ -190,28 +185,77 @@ def showcase_unit( "pytest-asyncio", ) + # Run the tests. + session.run( + "py.test", + "-n=auto", + "--quiet", + "--cov=google", + "--cov-append", + f"--cov-fail-under={str(fail_under)}", + *(session.posargs or [path.join("tests", "unit")]), + ) + + +@nox.session(python=["3.6", "3.7", "3.8", "3.9"]) +def showcase_unit( + session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), +): + """Run the generated unit tests against the Showcase library.""" + with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) - - # Run the tests. - session.run( - "py.test", - "-n=auto", - "--quiet", - "--cov=google", - "--cov-report=term", - *(session.posargs or [path.join("tests", "unit")]), - ) + + # Unit tests are run twice with different dependencies to exercise + # all code paths. + # TODO(busunkim): remove when default templates require google-auth>=1.25.0 + + # 1. Run tests at lower bound of dependencies + session.install("nox") + session.run("nox", "-s", "update_lower_bounds") + session.install(".", "--force-reinstall", "-c", "constraints.txt") + # Some code paths require an older version of google-auth. + # google-auth is a transitive dependency so it isn't in the + # lower bound constraints file produced above. + session.install("google-auth==1.21.1") + run_showcase_unit_tests(session, fail_under=0) + + # 2. Run the tests again with latest version of dependencies + session.install(".", "--upgrade", "--force-reinstall") + # This time aggregate coverage should reach 100% + run_showcase_unit_tests(session, fail_under=100) @nox.session(python=["3.7", "3.8", "3.9"]) def showcase_unit_alternative_templates(session): - showcase_unit(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) + with showcase_library(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) as lib: + session.chdir(lib) + run_showcase_unit_tests(session) @nox.session(python=["3.8"]) def showcase_unit_add_iam_methods(session): - showcase_unit(session, other_opts=("add-iam-methods",)) + with showcase_library(session, other_opts=("add-iam-methods",)) as lib: + session.chdir(lib) + + # Unit tests are run twice with different dependencies to exercise + # all code paths. + # TODO(busunkim): remove when default templates require google-auth>=1.25.0 + + # 1. Run tests at lower bound of dependencies + session.install("nox") + session.run("nox", "-s", "update_lower_bounds") + session.install(".", "--force-reinstall", "-c", "constraints.txt") + # Some code paths require an older version of google-auth. + # google-auth is a transitive dependency so it isn't in the + # lower bound constraints file produced above. + session.install("google-auth==1.21.1") + run_showcase_unit_tests(session, fail_under=0) + + # 2. Run the tests again with latest version of dependencies + session.install(".", "--upgrade", "--force-reinstall") + # This time aggregate coverage should reach 100% + run_showcase_unit_tests(session, fail_under=100) @nox.session(python="3.8") From bc72466235463c7f3aefefa2dbd51bf045be03ba Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 22 Apr 2021 09:45:21 -0600 Subject: [PATCH 0516/1339] chore: use trim_blocks and lstrip_blocks for jinja templates (#838) --- .../%name/%version/%sub/__init__.py.j2 | 25 +- .../%sub/services/%service/__init__.py.j2 | 1 + .../%sub/services/%service/client.py.j2 | 122 ++--- .../%sub/services/%service/pagers.py.j2 | 15 +- .../%service/transports/__init__.py.j2 | 1 + .../services/%service/transports/base.py.j2 | 35 +- .../services/%service/transports/grpc.py.j2 | 27 +- .../%name/%version/%sub/types/%proto.py.j2 | 29 +- .../%name/%version/%sub/types/_enum.py.j2 | 8 +- .../%name/%version/%sub/types/_message.py.j2 | 60 ++- .../%namespace/%name/%version/__init__.py.j2 | 73 +-- .../%namespace/%name/__init__.py.j2 | 89 ++-- .../gapic/ads-templates/_base.py.j2 | 2 +- .../docs/%name_%version/services.rst.j2 | 2 +- .../gapic/ads-templates/docs/conf.py.j2 | 1 + .../examples/feature_fragments.j2 | 113 +++-- .../gapic/ads-templates/examples/sample.py.j2 | 15 +- .../gapic/ads-templates/noxfile.py.j2 | 7 +- .../fixup_%name_%version_keywords.py.j2 | 13 +- .../gapic/ads-templates/setup.py.j2 | 11 +- .../%name_%version/%sub/test_%service.py.j2 | 193 ++++---- .../%name_%version/test_module_import.py.j2 | 31 +- .../gapic/generator/generator.py | 2 + .../templates/%namespace/%name/__init__.py.j2 | 82 +-- .../%name_%version/%sub/__init__.py.j2 | 51 +- .../%sub/services/%service/__init__.py.j2 | 9 +- .../%sub/services/%service/async_client.py.j2 | 133 ++--- .../%sub/services/%service/client.py.j2 | 140 +++--- .../%sub/services/%service/pagers.py.j2 | 18 +- .../%service/transports/__init__.py.j2 | 25 +- .../services/%service/transports/base.py.j2 | 35 +- .../services/%service/transports/grpc.py.j2 | 31 +- .../%service/transports/grpc_asyncio.py.j2 | 31 +- .../services/%service/transports/rest.py.j2 | 61 +-- .../%name_%version/%sub/types/%proto.py.j2 | 27 +- .../%name_%version/%sub/types/__init__.py.j2 | 28 +- .../%name_%version/%sub/types/_enum.py.j2 | 8 +- .../%name_%version/%sub/types/_message.py.j2 | 60 ++- .../gapic/templates/docs/conf.py.j2 | 1 + .../templates/examples/feature_fragments.j2 | 116 +++-- .../gapic/templates/examples/sample.py.j2 | 15 +- .../gapic/templates/noxfile.py.j2 | 1 + .../fixup_%name_%version_keywords.py.j2 | 13 +- .../gapic/templates/setup.py.j2 | 7 +- .../%name_%version/%sub/test_%service.py.j2 | 467 +++++++++--------- .../tests/unit/samplegen/test_integration.py | 10 +- 46 files changed, 1159 insertions(+), 1085 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 index d8485fc47653..1e1ddd49f0a4 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 @@ -2,28 +2,29 @@ {% block content %} + {# Define __all__. This requires the full set of imported names, so we iterate over them again. -#} __all__ = ( - {%- filter sort_lines %} - {%- for subpackage in api.subpackages.keys() %} + {% filter sort_lines %} + {% for subpackage in api.subpackages.keys() %} '{{ subpackage }}', - {%- endfor %} - {%- for service in api.services.values() + {% endfor %} + {% for service in api.services.values() if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name }}', - {%- endfor %} - {%- for proto in api.protos.values() + {% endfor %} + {% for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %} - {%- for message in proto.messages.values() %} + {% for message in proto.messages.values() %} '{{ message.name }}', - {%- endfor %} - {%- for enum in proto.enums.values() %} + {% endfor %} + {% for enum in proto.enums.values() %} '{{ enum.name }}', - {%- endfor %} - {%- endfor %} - {%- endfilter %} + {% endfor %} + {% endfor %} + {% endfilter %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/__init__.py.j2 index f9f07d44df9a..cfa88069e8ca 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/__init__.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + from .client import {{ service.client_name }} __all__ = ( diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 2f36e5058b70..aba4ca28dcc7 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + from collections import OrderedDict from distutils import util import os @@ -18,12 +19,12 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -{% filter sort_lines -%} -{% for method in service.methods.values() -%} -{% for ref_type in method.flat_ref_types -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} +{% for ref_type in method.flat_ref_types %} {{ ref_type.ident.python_import }} -{% endfor -%} -{% endfor -%} +{% endfor %} +{% endfor %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc import {{ service.name }}GrpcTransport @@ -93,6 +94,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") DEFAULT_ENDPOINT = {% if service.host %}'{{ service.host }}'{% else %}None{% endif %} + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) @@ -144,7 +146,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return self._transport - {% for message in service.resource_messages|sort(attribute="resource_type") -%} + {% for message in service.resource_messages|sort(attribute="resource_type") %} @staticmethod def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" @@ -157,7 +159,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} {% endfor %} - {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") -%} + {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") %} @staticmethod def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: """Return a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" @@ -277,81 +279,81 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): ) - {% for method in service.methods.values() -%} + {% for method in service.methods.values() %} def {{ method.name|snake_case }}(self, - {%- if not method.client_streaming %} + {% if not method.client_streaming %} request: {{ method.input.ident }} = None, *, - {% for field in method.flattened_fields.values() -%} + {% for field in method.flattened_fields.values() %} {{ field.name }}: {{ field.ident }} = None, - {% endfor -%} - {%- else %} + {% endfor %} + {% else %} requests: Iterator[{{ method.input.ident }}] = None, *, - {% endif -%} + {% endif %} retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - {%- if not method.server_streaming %} + {% if not method.server_streaming %} ) -> {{ method.client_output.ident }}: - {%- else %} + {% else %} ) -> Iterable[{{ method.client_output.ident }}]: - {%- endif %} + {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8) }} Args: - {%- if not method.client_streaming %} + {% if not method.client_streaming %} request (:class:`{{ method.input.ident.sphinx }}`): - The request object.{{ ' ' -}} + The request object.{{ ' ' }} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} - {% for key, field in method.flattened_fields.items() -%} + {% for key, field in method.flattened_fields.items() %} {{ field.name }} (:class:`{{ field.ident.sphinx }}`): {{ field.meta.doc|rst(width=72, indent=16) }} This corresponds to the ``{{ key }}`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - {% endfor -%} - {%- else %} + {% endfor %} + {% else %} requests (Iterator[`{{ method.input.ident.sphinx }}`]): - The request object iterator.{{ ' ' -}} + The request object iterator.{{ ' ' }} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} - {%- endif %} + {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - {%- if not method.void %} + {% if not method.void %} Returns: - {%- if not method.server_streaming %} + {% if not method.server_streaming %} {{ method.client_output.ident.sphinx }}: - {%- else %} + {% else %} Iterable[{{ method.client_output.ident.sphinx }}]: - {%- endif %} + {% endif %} {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format='rst') }} - {%- endif %} + {% endif %} """ - {%- if not method.client_streaming %} + {% if not method.client_streaming %} # Create or coerce a protobuf request object. - {% if method.flattened_fields -%} + {% if method.flattened_fields %} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - {% endif -%} {# method.flattened_fields #} - {% if method.input.ident.package != method.ident.package -%} {# request lives in a different package, so there is no proto wrapper #} + {% endif %} {# method.flattened_fields #} + {% if method.input.ident.package != method.ident.package %} {# request lives in a different package, so there is no proto wrapper #} # The request isn't a proto-plus wrapped type. # so it must be constructed via keyword expansion. if isinstance(request, dict): request = {{ method.input.ident }}(**request) - {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} + {% if method.flattened_fields %}{# Cross-package req and flattened fields #} elif not request: request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) - {% endif -%}{# Cross-package req and flattened fields #} - {%- else %} {# Request is in _our_ package #} + {% endif %}{# Cross-package req and flattened fields #} + {% else %} {# Request is in _our_ package #} # Minor optimization to avoid making a copy if the user passes # in a {{ method.input.ident }}. # There's no risk of modifying the input as we've already verified @@ -360,57 +362,57 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request = {{ method.input.ident }}(request) {% endif %} {# different request package #} {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields and method.input.ident.package == method.ident.package -%} + {% if method.flattened_fields and method.input.ident.package == method.ident.package %} # If we have keyword arguments corresponding to fields on the # request, apply these. - {% endif -%} - {%- for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} + {% endif %} + {% for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} - {%- endfor %} + {% endfor %} {# Map-y fields can be _updated_, however #} - {%- for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} - {%- if field.map %} {# map implies repeated, but repeated does NOT imply map#} + {% for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} + {% if field.map %} {# map implies repeated, but repeated does NOT imply map#} if {{ field.name }}: request.{{ key }}.update({{ field.name }}) - {%- else %} + {% else %} {# And list-y fields can be _extended_ -#} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) - {%- endif %} {# field.map #} - {%- endfor %} {# key, field in method.flattened_fields.items() #} - {%- endif %} {# method.client_streaming #} + {% endif %} {# field.map #} + {% endfor %} {# key, field in method.flattened_fields.items() #} + {% endif %} {# method.client_streaming #} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.{{ method.name|snake_case}}] - {%- if method.field_headers %} + {% if method.field_headers %} # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - {%- for field_header in method.field_headers %} - {%- if not method.client_streaming %} + {% for field_header in method.field_headers %} + {% if not method.client_streaming %} ('{{ field_header }}', request.{{ field_header }}), - {%- endif %} - {%- endfor %} + {% endif %} + {% endfor %} )), ) - {%- endif %} {# method.field_headers #} + {% endif %} {# method.field_headers #} # Send the request. - {% if not method.void %}response = {% endif %}rpc( - {%- if not method.client_streaming %} + {%+ if not method.void %}response = {% endif %}rpc( + {% if not method.client_streaming %} request, - {%- else %} + {% else %} requests, - {%- endif %} {# method.client_streaming #} + {% endif %} {# method.client_streaming #} retry=retry, timeout=timeout, metadata=metadata, ) - {%- if method.lro %} + {% if method.lro %} # Wrap the response in an operation future. response = {{ method.client_output.ident.module_alias or method.client_output.ident.module }}.from_gapic( @@ -419,7 +421,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ method.lro.response_type.ident }}, metadata_type={{ method.lro.metadata_type.ident }}, ) - {%- elif method.paged_result_field %} + {% elif method.paged_result_field %} # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. @@ -429,12 +431,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): response=response, metadata=metadata, ) - {%- endif %} {# method.lro #} - {%- if not method.void %} + {% endif %} {# method.lro #} + {% if not method.void %} # Done; return the response. return response - {%- endif %} {# method.void #} + {% endif %} {# method.void #} {{ '\n' }} {% endfor %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 index 73115c59acef..26075b4e61e5 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 @@ -1,20 +1,23 @@ {% extends '_base.py.j2' %} {% block content %} -{% for method in service.methods.values() | selectattr('paged_result_field') -%} -{% if loop.first -%} + +{% for method in service.methods.values() | selectattr('paged_result_field') %} +{% if loop.first %} {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. -#} from typing import Any, Callable, Iterable, Sequence, Tuple -{% filter sort_lines -%} -{% for method in service.methods.values() | selectattr('paged_result_field') -%} +{% filter sort_lines %} +{% for method in service.methods.values() | selectattr('paged_result_field') %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{% if not method.paged_result_field.is_primitive %}{{ method.paged_result_field.message.ident.python_import }}{% endif %} +{% if not method.paged_result_field.is_primitive %} +{{ method.paged_result_field.message.ident.python_import }} +{% endif %} {% endfor %} -{% endfilter -%} +{% endfilter %} {% endif %} class {{ method.name }}Pager: diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 index 470cde5d1969..d175b9a97ad5 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + from collections import OrderedDict from typing import Dict, Type diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 29f8c2df9aa0..a6a0458a332f 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + import abc import typing import pkg_resources @@ -8,16 +9,16 @@ import pkg_resources from google import auth from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -{%- if service.has_lro %} +{% if service.has_lro %} from google.api_core import operations_v1 # type: ignore -{%- endif %} +{% endif %} from google.auth import credentials # type: ignore -{% filter sort_lines -%} -{% for method in service.methods.values() -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{% endfor -%} +{% endfor %} {% endfilter %} try: @@ -34,9 +35,9 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): """Abstract transport class for {{ service.name }}.""" AUTH_SCOPES = ( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} ) def __init__( @@ -49,7 +50,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{- ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -80,22 +81,22 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): def _prep_wrapped_messages(self, client_info): # Precomputed wrapped methods self._wrapped_methods = { - {% for method in service.methods.values() -%} + {% for method in service.methods.values() %} self.{{ method.name|snake_case }}: gapic_v1.method.wrap_method( self.{{ method.name|snake_case }}, - {%- if method.retry %} + {% if method.retry %} default_retry=retries.Retry( {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {%- for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} + {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} exceptions.{{ ex.__name__ }}, - {%- endfor %} + {% endfor %} ), deadline={{ method.timeout }}, ), - {%- endif %} + {% endif %} default_timeout={{ method.timeout }}, client_info=client_info, ), @@ -103,21 +104,21 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): } - {%- if service.has_lro %} + {% if service.has_lro %} @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" raise NotImplementedError - {%- endif %} - {%- for method in service.methods.values() %} + {% endif %} + {% for method in service.methods.values() %} @property def {{ method.name|snake_case }}(self) -> typing.Callable[ [{{ method.input.ident }}], {{ method.output.ident }}]: raise NotImplementedError - {%- endfor %} + {% endfor %} __all__ = ( diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index c8e721b9cc86..a167ae640e74 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -1,13 +1,14 @@ {% extends '_base.py.j2' %} {% block content %} + import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore -{%- if service.has_lro %} +{% if service.has_lro %} from google.api_core import operations_v1 # type: ignore -{%- endif %} +{% endif %} from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore @@ -15,11 +16,11 @@ from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore -{% filter sort_lines -%} -{% for method in service.methods.values() -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{% endfor -%} +{% endfor %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -52,7 +53,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{- ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -192,7 +193,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): """Return the channel designed to connect to this service. """ return self._grpc_channel - {%- if service.has_lro %} + {% if service.has_lro %} @property def operations_client(self) -> operations_v1.OperationsClient: @@ -209,17 +210,17 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Return the client from cache. return self.__dict__['operations_client'] - {%- endif %} - {%- for method in service.methods.values() %} + {% endif %} + {% for method in service.methods.values() %} @property def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], {{ method.output.ident }}]: - r"""Return a callable for the {{- ' ' -}} + r"""Return a callable for the {{ ' ' }} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) }} - {{- ' ' -}} method over gRPC. + {{ ' ' }} method over gRPC. {{ method.meta.doc|rst(width=72, indent=8) }} @@ -240,10 +241,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) return self._stubs['{{ method.name|snake_case }}'] - {%- endfor %} + {% endfor %} __all__ = ( '{{ service.name }}GrpcTransport', ) -{%- endblock -%} +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 index 48ef010d4983..c200027ac27a 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 @@ -1,40 +1,41 @@ {% extends "_base.py.j2" %} -{% block content -%} +{% block content %} + {% with p = proto.disambiguate('proto') %} -{% if proto.messages|length or proto.all_enums|length -%} +{% if proto.messages|length or proto.all_enums|length %} import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endif %} -{% filter sort_lines -%} -{% for import_ in proto.python_modules -%} +{% filter sort_lines %} +{% for import_ in proto.python_modules %} {{ import_ }} -{% endfor -%} +{% endfor %} {% endfilter %} __protobuf__ = {{ p }}.module( package='{{ '.'.join(proto.meta.address.package) }}', - {% if api.naming.proto_package != '.'.join(proto.meta.address.package) -%} + {% if api.naming.proto_package != '.'.join(proto.meta.address.package) %} marshal='{{ api.naming.proto_package }}', - {% endif -%} + {% endif %} manifest={ - {%- for enum in proto.enums.values() %} + {% for enum in proto.enums.values() %} '{{ enum.name }}', - {%- endfor %} - {%- for message in proto.messages.values() %} + {% endfor %} + {% for message in proto.messages.values() %} '{{ message.name }}', - {%- endfor %} + {% endfor %} }, ) -{% for enum in proto.enums.values() -%} +{% for enum in proto.enums.values() %} {% include '%namespace/%name/%version/%sub/types/_enum.py.j2' with context %} {% endfor %} -{% for message in proto.messages.values() -%} - {% include '%namespace/%name/%version/%sub/types/_message.py.j2' with context %} +{% for message in proto.messages.values() %} + {% include '%namespace/%name/%version/%sub/types/_message.py.j2' with context %} {% endfor %} {% endwith %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 index 73994a158c5d..8921af307062 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 @@ -1,9 +1,9 @@ class {{ enum.name }}({{ p }}.Enum): r"""{{ enum.meta.doc|rst(indent=4) }}""" - {% if enum.enum_pb.HasField("options") -%} + {% if enum.enum_pb.HasField("options") %} _pb_options = {{ enum.options_dict }} - {% endif -%} - {% for enum_value in enum.values -%} + {% endif %} + {% for enum_value in enum.values %} {{ enum_value.name }} = {{ enum_value.number }} - {% endfor -%} + {% endfor %} {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index 435b576dbef9..5e07a55f2bc9 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -2,27 +2,27 @@ class {{ message.name }}({{ p }}.Message): r"""{{ message.meta.doc|rst(indent=4) }}{% if message.fields|length %} Attributes: - {%- for field in message.fields.values() %} + {% for field in message.fields.values() %} {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(indent=12, nl=False) }} - {%- endfor %} - {% endif -%} + {% endfor %} + {% endif %} """ {# Iterate over nested enums. -#} - {% for enum in message.nested_enums.values() -%} - {% filter indent %} - {%- include '%namespace/%name/%version/%sub/types/_enum.py.j2' %} + {% for enum in message.nested_enums.values() %} + {% filter indent(first=True) %} + {% include '%namespace/%name/%version/%sub/types/_enum.py.j2' %} {% endfilter %} - {% endfor -%} + {% endfor %} {# Iterate over nested messages. -#} - {% for submessage in message.nested_messages.values() -%} - {% if not submessage.map -%} - {% with message = submessage %}{% filter indent %} - {%- include '%namespace/%name/%version/%sub/types/_message.py.j2' %} + {% for submessage in message.nested_messages.values() %} + {% if not submessage.map %} + {% with message = submessage %}{% filter indent(first=True) %} + {% include '%namespace/%name/%version/%sub/types/_message.py.j2' %} {% endfilter %}{% endwith %} {% endif %} - {% endfor -%} + {% endfor %} {% if "next_page_token" in message.fields.values()|map(attribute='name') %} @property @@ -31,21 +31,31 @@ class {{ message.name }}({{ p }}.Message): {% endif %} {# Iterate over fields. -#} - {% for field in message.fields.values() -%} - {% if field.map -%} - {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] -%} + {% for field in message.fields.values() %} + {% if field.map %} + {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] %} {{ field.name }} = {{ p }}.MapField( - {{- p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, number={{ field.number }} - {%- if value_field.enum or value_field.message %}, + {{ p }}.{{ key_field.proto_type }}, + {{ p }}.{{ value_field.proto_type }}, + number={{ field.number }}, + {% if value_field.enum or value_field.message %} {{ value_field.proto_type.lower() }}={{ value_field.type.ident.rel(message.ident) }}, - {% endif %}) - {% endwith -%} - {% else -%} + {% endif %} + ) + {% endwith %} + {% else %} {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( - {{- p }}.{{ field.proto_type }}, number={{ field.number }}{% if field.proto3_optional %}, optional=True{% elif field.oneof %}, oneof='{{ field.oneof }}'{% endif %} - {%- if field.enum or field.message %}, + {{ p }}.{{ field.proto_type }}, + number={{ field.number }}, + {% if field.proto3_optional %} + optional=True, + {% elif field.oneof %} + oneof='{{ field.oneof }}', + {% endif %} + {% if field.enum or field.message %} {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, - {% endif %}) - {% endif -%} - {% endfor -%} + {% endif %} + ) + {% endif %} + {% endfor %} {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 index 3889fa1545dd..933e55dc3451 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} -{% if opts.lazy_import -%} {# lazy import #} + +{% if opts.lazy_import %} {# lazy import #} import importlib import sys @@ -11,22 +12,22 @@ if sys.version_info < (3, 7): _lazy_type_to_package_map = { # Message types -{%- for _, message in api.top_level_messages|dictsort %} +{% for _, message in api.top_level_messages|dictsort %} '{{ message.name }}': '{{ message.ident.package|join('.') }}.types.{{ message.ident.module }}', -{%- endfor %} +{% endfor %} # Enum types -{%- for _, enum in api.top_level_enums|dictsort %} +{% for _, enum in api.top_level_enums|dictsort %} '{{ enum.name }}': '{{ enum.ident.package|join('.') }}.types.{{enum.ident.module }}', -{%- endfor %} +{% endfor %} {# TODO(yon-mg): add rest transport service once I know what this is #} # Client classes and transports -{%- for _, service in api.services|dictsort %} +{% for _, service in api.services|dictsort %} '{{ service.client_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}', '{{ service.transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', '{{ service.grpc_transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', -{%- endfor %} +{% endfor %} } @@ -47,20 +48,20 @@ def __getattr__(name): # Requires Python >= 3.7 def __dir__(): return globals().get('__all__') or __getattr__('__all__') -{% else -%} {# do not use lazy import #} +{% else %} {# do not use lazy import #} {# Import subpackages. -#} -{% filter sort_lines -%} -{% for subpackage in api.subpackages.keys() -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} +{% filter sort_lines %} +{% for subpackage in api.subpackages.keys() %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{ api.naming.versioned_module_name }} import {{ subpackage }} -{% endfor -%} +{% endfor %} {# Import services for this package. -#} {% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + if service.meta.address.subpackage == api.subpackage_view %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} -{% endfor -%} +{% endfor %} {# Import messages and enums from each proto. It is safe to import all of the messages into the same namespace here, @@ -73,41 +74,41 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' a proto package. -#} {% for proto in api.protos.values()|sort(attribute='module_name') - if proto.meta.address.subpackage == api.subpackage_view -%} - {% for message in proto.messages.values()|sort(attribute='name') -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} + if proto.meta.address.subpackage == api.subpackage_view %} + {% for message in proto.messages.values()|sort(attribute='name') %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} -{% endfor -%} -{% for enum in proto.enums.values()|sort(attribute='name') -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} +{% endfor %} +{% for enum in proto.enums.values()|sort(attribute='name') %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} -{% endfor %}{% endfor -%} +{% endfor %}{% endfor %} {% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. -#} __all__ = ( -{%- filter indent %} -{% filter sort_lines -%} -{% for subpackage in api.subpackages.keys() -%} +{% filter indent %} +{% filter sort_lines %} +{% for subpackage in api.subpackages.keys() %} '{{ subpackage }}', -{% endfor -%} +{% endfor %} {% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view -%} + if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name }}', -{% endfor -%} +{% endfor %} {% for proto in api.protos.values()|sort(attribute='module_name') - if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.messages.values()|sort(attribute='name') -%} + if proto.meta.address.subpackage == api.subpackage_view %} +{% for message in proto.messages.values()|sort(attribute='name') %} '{{ message.name }}', -{% endfor -%} +{% endfor %} {% for enum in proto.enums.values()|sort(attribute='name') - if proto.meta.address.subpackage == api.subpackage_view -%} + if proto.meta.address.subpackage == api.subpackage_view %} '{{ enum.name }}', -{% endfor -%}{% endfor -%} -{% endfilter -%} -{% endfilter -%} +{% endfor %}{% endfor %} +{% endfilter %} +{% endfilter %} ) -{% endif -%} {# lazy import #} +{% endif %} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 index 322d19bcc7e1..8ccc6a5747d1 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} -{% if opts.lazy_import -%} {# lazy import #} + +{% if opts.lazy_import %} {# lazy import #} import importlib import sys @@ -11,22 +12,20 @@ if sys.version_info < (3, 7): _lazy_type_to_package_map = { # Message types -{%- for _, message in api.top_level_messages|dictsort %} +{% for _, message in api.top_level_messages|dictsort %} '{{ message.name }}': '{{ message.ident.package|join('.') }}.types.{{ message.ident.module }}', -{%- endfor %} - +{% endfor %} # Enum types -{%- for _, enum in api.top_level_enums|dictsort %} +{% for _, enum in api.top_level_enums|dictsort %} '{{ enum.name }}': '{{ enum.ident.package|join('.') }}.types.{{enum.ident.module }}', -{%- endfor %} - +{% endfor %} {# TODO(yon-mg): add rest transport service once I know what this is #} # Client classes and transports -{%- for _, service in api.services|dictsort %} +{% for _, service in api.services|dictsort %} '{{ service.client_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}', '{{ service.transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', '{{ service.grpc_transport_name }}': '{{ service.meta.address.package|join('.') }}.services.{{ service.meta.address.module }}.transports', -{%- endfor %} +{% endfor %} } @@ -47,67 +46,67 @@ def __getattr__(name): # Requires Python >= 3.7 def __dir__(): return globals().get('__all__') or __getattr__('__all__') -{% else -%} {# do not use lazy import #} +{% else %} {# do not use lazy import #} {# Import subpackages. -#} -{% filter sort_lines -%} -{% for subpackage in api.subpackages.keys() -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }} import {{ subpackage }} -{% endfor -%} +{% filter sort_lines %} +{% for subpackage in api.subpackages.keys() %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }} import {{ subpackage }} +{% endfor %} {# Import services for this package. -#} {% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} -{% endfor -%} + if service.meta.address.subpackage == api.subpackage_view %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} +{% endfor %} {# Import messages and enums from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. --#} +#} {# Import messages from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. - -#} +#} {% for proto in api.protos.values()|sort(attribute='module_name') - if proto.meta.address.subpackage == api.subpackage_view -%} - {% for message in proto.messages.values()|sort(attribute='name') -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} -{% endfor -%} -{% for enum in proto.enums.values()|sort(attribute='name') -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} -{% endfor %}{% endfor -%} + if proto.meta.address.subpackage == api.subpackage_view %} + {% for message in proto.messages.values()|sort(attribute='name') %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} +{% endfor %} +{% for enum in proto.enums.values()|sort(attribute='name') %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} +{% endfor %}{% endfor %} {% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. --#} +#} __all__ = ( -{%- filter indent %} -{% filter sort_lines -%} -{% for subpackage, _ in api.subpackages|dictsort -%} +{% filter indent %} +{% filter sort_lines %} +{% for subpackage, _ in api.subpackages|dictsort %} '{{ subpackage }}', -{% endfor -%} +{% endfor %} {% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view -%} + if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name }}', -{% endfor -%} +{% endfor %} {% for proto in api.protos.values()|sort(attribute='module_name') - if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.messages.values()|sort(attribute='name') -%} + if proto.meta.address.subpackage == api.subpackage_view %} +{% for message in proto.messages.values()|sort(attribute='name') %} '{{ message.name }}', -{% endfor -%} +{% endfor %} {% for enum in proto.enums.values()|sort(attribute='name') - if proto.meta.address.subpackage == api.subpackage_view -%} + if proto.meta.address.subpackage == api.subpackage_view %} '{{ enum.name }}', -{% endfor -%}{% endfor -%} -{% endfilter -%} -{% endfilter -%} +{% endfor %}{% endfor %} +{% endfilter %} +{% endfilter %} ) -{% endif -%} {# lazy import #} +{% endif %} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/_base.py.j2 b/packages/gapic-generator/gapic/ads-templates/_base.py.j2 index 133cf7aa5811..35d3c9100f30 100644 --- a/packages/gapic-generator/gapic/ads-templates/_base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/_base.py.j2 @@ -2,5 +2,5 @@ {% block license %} {% include "_license.j2" %} {% endblock %} -{%- block content %} +{% block content %} {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 index b0f05d693191..6ba664e217ce 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/%name_%version/services.rst.j2 @@ -1,7 +1,7 @@ Services for {{ api.naming.long_name }} {{ api.naming.version }} API {{ '=' * (18 + api.naming.long_name|length + api.naming.version|length) }} -{% for service in api.services.values()|sort(attribute='name') -%} +{% for service in api.services.values()|sort(attribute='name') %} .. automodule:: {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} :members: :inherited-members: diff --git a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 index 1d60ece902b8..18475542c4b1 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + # # {{ api.naming.warehouse_package_name }} documentation build configuration file # diff --git a/packages/gapic-generator/gapic/ads-templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/ads-templates/examples/feature_fragments.j2 index 6de840fe92bc..7c420761d94c 100644 --- a/packages/gapic-generator/gapic/ads-templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/ads-templates/examples/feature_fragments.j2 @@ -52,7 +52,7 @@ There is a little, but not enough for it to be important because {% do input_parameters.append(request.single.input_parameter) %} {% endif %} {% endfor %} -{{ input_parameters|join(", ") -}} +{{ input_parameters|join(", ") }} {% endwith %} {% endmacro %} @@ -89,7 +89,7 @@ for {{ statement.variable }} in {{ statement.collection|coerce_response_name }}: {% endmacro %} {% macro render_map_loop(statement) %} - {# At least one of key and value exist; validated in python #} +{# At least one of key and value exist; validated in python #} {% if "key" not in statement %} for {{ statement.value }} in {{ statement.map|coerce_response_name }}.values(): {% elif "value" not in statement %} @@ -111,21 +111,21 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {% macro dispatch_statement(statement, indentation=0) %} {# Each statement is a dict with a single key/value pair #} -{% if "print" in statement -%} +{% if "print" in statement %} {{ render_print(statement["print"])|indent(width=indentation, first=True) }} -{% elif "define" in statement -%} +{% elif "define" in statement %} {{ render_define(statement["define"])|indent(width=indentation, first=True) }} -{% elif "comment" in statement -%} +{% elif "comment" in statement %} {{ render_comment(statement["comment"])|indent(width=indentation, first=True) }} -{% elif "loop" in statement -%} - {% with loop = statement["loop"] -%} - {% if "collection" in loop -%} +{% elif "loop" in statement %} + {% with loop = statement["loop"] %} + {% if "collection" in loop %} {{ render_collection_loop(loop)|indent(width=indentation, first=True) }} - {% else -%} + {% else %} {{ render_map_loop(loop)|indent(width=indentation, first=True) }} - {% endif -%} - {% endwith -%} -{% elif "write_file" in statement -%} + {% endif %} + {% endwith %} +{% elif "write_file" in statement %} {{ render_write_file(statement["write_file"])|indent(indentation, first=True) }} {% endif %} {% endmacro %} @@ -135,9 +135,9 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {# to be the correct enum from the right module, if necessary. #} {# Python is also responsible for verifying that each input parameter is unique,#} {# no parameter is a reserved keyword #} - {% if attr.input_parameter %} +{% if attr.input_parameter %} # {{ attr.input_parameter }} = {{ attr.value }} - {% if attr.value_is_file %} +{% if attr.value_is_file %} with open({{ attr.input_parameter }}, "rb") as f: {{ base_name }}["{{ attr.field }}"] = f.read() {% else %} @@ -150,67 +150,66 @@ with open({{ attr.input_parameter }}, "rb") as f: {% macro render_request_setup(full_request) %} {% for parameter_block in full_request.request_list if parameter_block.body %} -{% if parameter_block.pattern -%} +{% if parameter_block.pattern %} {# This is a resource-name patterned lookup parameter #} -{% with formals = [] -%} -{% for attr in parameter_block.body -%} -{% do formals.append("%s=%s"|format(attr.field, attr.input_parameter or attr.value)) -%} -{% endfor -%} +{% with formals = [] %} +{% for attr in parameter_block.body %} +{% do formals.append("%s=%s"|format(attr.field, attr.input_parameter or attr.value)) %} +{% endfor %} {{ parameter_block.base }} = "{{parameter_block.pattern }}".format({{ formals|join(", ") }}) -{% endwith -%} -{% else -%} {# End resource name construction #} +{% endwith %} +{% else %}{# End resource name construction #} {{ parameter_block.base }} = {} {% for attr in parameter_block.body %} {{ render_request_attr(parameter_block.base, attr) }} {% endfor %} -{% endif -%} +{% endif %} {% endfor %} -{% if not full_request.flattenable -%} +{% if not full_request.flattenable %} request = { {% for parameter in full_request.request_list %} '{{ parameter.base }}': {{ parameter.base if parameter.body else parameter.single }}, -{% endfor -%} -} -{% endif -%} +{% endfor %}} +{% endif %} {% endmacro %} {% macro render_request_params(request) %} - {# Provide the top level parameters last and as keyword params #} - {% with params = [] -%} - {% for r in request if r.body -%} - {% do params.append(r.base) -%} - {% endfor -%} - {% for r in request if r.single -%} - {% do params.append("%s=%s"|format(r.base, r.single.value)) -%} - {% endfor -%} -{{ params|join(", ") -}} - {% endwith -%} +{# Provide the top level parameters last and as keyword params #} + {% with params = [] %} + {% for r in request if r.body %} + {% do params.append(r.base) %} + {% endfor %} + {% for r in request if r.single %} + {% do params.append("%s=%s"|format(r.base, r.single.value)) %} + {% endfor %} +{{ params|join(", ") }} + {% endwith %} {% endmacro %} {% macro render_request_params_unary(request) %} - {# Provide the top level parameters last and as keyword params #} - {% if request.flattenable -%} - {% with params = [] -%} - {% for r in request.request_list -%} - {% do params.append("%s=%s"|format(r.base, r.single.value if r.single else r.base)) -%} - {% endfor -%} -{{ params|join(", ") -}} - {% endwith -%} - {% else -%} +{# Provide the top level parameters last and as keyword params #} + {% if request.flattenable %} + {% with params = [] %} + {% for r in request.request_list %} + {% do params.append("%s=%s"|format(r.base, r.single.value if r.single else r.base)) %} + {% endfor %} +{{ params|join(", ") }} + {% endwith %} + {% else %} request=request - {% endif -%} + {% endif %} {% endmacro %} {% macro render_method_call(sample, calling_form, calling_form_enum) %} - {# Note: this doesn't deal with enums or unions #} +{# Note: this doesn't deal with enums or unions #} {% if calling_form in [calling_form_enum.RequestStreamingBidi, - calling_form_enum.RequestStreamingClient] -%} -client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request.request_list)|trim -}}]) -{% else -%} {# TODO: deal with flattening #} + calling_form_enum.RequestStreamingClient] %} +client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request.request_list)|trim }}]) +{% else %}{# TODO: deal with flattening #} {# TODO: set up client streaming once some questions are answered #} -client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request)|trim -}}) -{% endif -%} +client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request)|trim }}) +{% endif %} {% endmacro %} {# Setting up the method invocation is the responsibility of the caller: #} @@ -262,15 +261,15 @@ def main(): import argparse parser = argparse.ArgumentParser() -{% with arg_list = [] -%} -{% for request in full_request.request_list if request.body -%} +{% with arg_list = [] %} +{% for request in full_request.request_list if request.body %} {% for attr in request.body if attr.input_parameter %} parser.add_argument("--{{ attr.input_parameter }}", type=str, default={{ attr.value }}) -{% do arg_list.append("args." + attr.input_parameter) -%} -{% endfor -%} -{% endfor -%} +{% do arg_list.append("args." + attr.input_parameter) %} +{% endfor %} +{% endfor %} {% for request in full_request.request_list if request.single and request.single.input_parameter %} parser.add_argument("--{{ request.single.input_parameter }}", type=str, diff --git a/packages/gapic-generator/gapic/ads-templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/ads-templates/examples/sample.py.j2 index f054e2f2f0d4..05ff8e1ee20c 100644 --- a/packages/gapic-generator/gapic/ads-templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/examples/sample.py.j2 @@ -16,6 +16,7 @@ {% extends "_base.py.j2" %} {% block content %} + {# Input parameters: sample #} {# callingForm #} {% import "examples/feature_fragments.j2" as frags %} @@ -29,7 +30,7 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} {# also need calling form #} -def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input_params(sample.request)|trim -}}): +def sample_{{ frags.render_method_name(sample.rpc)|trim }}({{ frags.print_input_params(sample.request)|trim }}): """{{ sample.description }}""" client = {{ service.client_name }}( @@ -37,12 +38,12 @@ def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input transport="grpc", ) - {{ frags.render_request_setup(sample.request)|indent }} -{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} - {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response, )|indent -}} -{% endwith %} + {{ frags.render_request_setup(sample.request) }} +{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} + {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response, )|indent }} +{% endwith %} # [END {{ sample.id }}] -{{ frags.render_main_block(sample.rpc, sample.request) }} -{%- endblock %} +{{ frags.render_main_block(sample.rpc, sample.request) }} +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 36dcd2b2e1ec..484c437e5c35 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -1,6 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} + import os import nox # type: ignore @@ -31,10 +32,10 @@ def mypy(session): session.install('.') session.run( 'mypy', - {%- if api.naming.module_namespace %} + {% if api.naming.module_namespace %} '{{ api.naming.module_namespace[0] }}', - {%- else %} + {% else %} '{{ api.naming.versioned_module_name }}', - {%- endif %} + {% endif %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 index 9c4afb35c87a..1bd2ec79c3e0 100644 --- a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 @@ -1,6 +1,7 @@ #! /usr/bin/env python3 {% extends '_base.py.j2' %} {% block content %} + import argparse import os import libcst as cst @@ -25,14 +26,14 @@ def partition( class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - {% with all_methods = [] -%} - {% for service in api.services.values() %}{% for method in service.methods.values() -%} - {% do all_methods.append(method) -%} - {% endfor %}{% endfor -%} + {% with all_methods = [] %} + {% for service in api.services.values() %}{% for method in service.methods.values() %} + {% do all_methods.append(method) %} + {% endfor %}{% endfor %} METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - {% for method in all_methods|sort(attribute='name')|unique(attribute='name') -%} + {% for method in all_methods|sort(attribute='name')|unique(attribute='name') %} '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), - {% endfor -%} + {% endfor %} } {% endwith %} diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 111ce8fb4009..559bc570014b 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -1,18 +1,19 @@ {% extends '_base.py.j2' %} {% block content %} + import setuptools # type: ignore setuptools.setup( name='{{ api.naming.warehouse_package_name }}', version='0.0.1', - {% if api.naming.namespace -%} + {% if api.naming.namespace %} packages=setuptools.PEP420PackageFinder.find(), namespace_packages={{ api.naming.namespace_packages }}, - {% else -%} + {% else %} packages=setuptools.find_packages(), - {% endif -%} + {% endif %} platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( @@ -20,9 +21,9 @@ setuptools.setup( 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', 'proto-plus >= 1.15.0', - {%- if api.requires_package(('google', 'iam', 'v1')) %} + {% if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', - {%- endif %} + {% endif %} ), python_requires='>=3.7',{# Lazy import requires module-level getattr #} setup_requires=[ diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index a3888503e65c..e507bb573afc 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1,6 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} + import os from unittest import mock @@ -10,7 +11,7 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule {# Import the service itself as well as every proto module that it imports. -#} -{% filter sort_lines -%} +{% filter sort_lines %} from google import auth from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError @@ -19,19 +20,19 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports from google.api_core import client_options from google.api_core import grpc_helpers -{% if service.has_lro -%} +{% if service.has_lro %} from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 -{% endif -%} +{% endif %} from google.api_core import gapic_v1 -{% for method in service.methods.values() -%} +{% for method in service.methods.values() %} {% for ref_type in method.ref_types if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') - or ref_type.ident.python_import.package == ('google', 'protobuf') and ref_type.ident.python_import.module == 'empty_pb2') -%} + or ref_type.ident.python_import.package == ('google', 'protobuf') and ref_type.ident.python_import.module == 'empty_pb2') %} {{ ref_type.ident.python_import }} -{% endfor -%} -{% endfor -%} +{% endfor %} +{% endfor %} {% endfilter %} @@ -69,7 +70,9 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(): client = {{ service.client_name }}.from_service_account_info(info) assert client.transport._credentials == creds - {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + {% if service.host %} + assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + {% endif %} def test_{{ service.client_name|snake_case }}_from_service_account_file(): @@ -82,7 +85,9 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): client = {{ service.client_name }}.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds - {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + {% if service.host %} + assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + {% endif %} def test_{{ service.client_name|snake_case }}_get_transport_class(): @@ -246,7 +251,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): ) -{% for method in service.methods.values() -%} +{% for method in service.methods.values() %} def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), @@ -265,25 +270,26 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message')%}{% if not field.oneof or field.proto3_optional %} + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, - {% endif %}{%- endfor %} - {#- This is a hack to only pick one field #} - {%- for oneof_fields in method.output.oneof_fields().values() %} + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} {% with field = oneof_fields[0] %} {{ field.name }}={{ field.mock_value }}, - {%- endwith %} - {%- endfor %} + {% endwith %} + {% endfor %} ) - {% endif -%} + {% endif %} {% if method.client_streaming %} response = client.{{ method.name|snake_case }}(iter(requests)) {% else %} @@ -300,28 +306,29 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m {% endif %} # Establish that the response is the type that we expect. - {% if method.void -%} + {% if method.void %} assert response is None - {% elif method.lro -%} + {% elif method.lro %} assert isinstance(response, future.Future) - {% elif method.server_streaming -%} + {% elif method.server_streaming %} for message in response: assert isinstance(message, {{ method.output.ident }}) - {% else -%} + {% else %} {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} {# Cheeser assertion to force code coverage for bad paginated methods #} assert response.raw_page is response {% endif %} assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not field.oneof or field.proto3_optional %} - {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} + {% for field in method.output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} - {% else -%} + {% else %} assert response.{{ field.name }} == {{ field.mock_value }} - {% endif -%} - {% endif -%} {# end oneof/optional #} + {% endif %} + {% endif %}{# end oneof/optional #} {% endfor %} {% endif %} @@ -340,21 +347,21 @@ def test_{{ method.name|snake_case }}_field_headers(): # a field header. Set these to a non-empty value. request = {{ method.input.ident }}() - {%- for field_header in method.field_headers %} + {% for field_header in method.field_headers %} request.{{ field_header }} = '{{ field_header }}/value' - {%- endfor %} + {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.{{ method.name|snake_case }}), '__call__') as call: - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}() {% endif %} client.{{ method.name|snake_case }}(request) @@ -370,8 +377,8 @@ def test_{{ method.name|snake_case }}_field_headers(): 'x-goog-request-params', '{% for field_header in method.field_headers -%} {{ field_header }}={{ field_header }}/value - {%- if not loop.last %}&{% endif -%} - {%- endfor %}', + {%- if not loop.last %}&{% endif %} + {%- endfor -%}', ) in kw['metadata'] {% endif %} @@ -385,19 +392,19 @@ def test_{{ method.name|snake_case }}_from_dict(): type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}() {% endif %} response = client.{{ method.name|snake_case }}(request={ - {%- for field in method.input.fields.values() %} + {% for field in method.input.fields.values() %} '{{ field.name }}': {{ field.mock_value }}, - {%- endfor %} + {% endfor %} } ) call.assert_called() @@ -415,41 +422,41 @@ def test_{{ method.name|snake_case }}_flattened(): type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}() {% endif %} # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.{{ method.name|snake_case }}( - {%- for field in method.flattened_fields.values() %} + {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endfor %} ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp.Timestamp' -%} + {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp.Timestamp' %} assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration.Duration' -%} + {% elif field.ident|string() == 'duration.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% else -%} + {% else %} assert args[0].{{ key }} == {{ field.mock_value }} {% endif %} {% endif %}{% endfor %} - {%- for oneofs in method.flattened_oneof_fields().values() %} - {%- with field = oneofs[-1] %} + {% for oneofs in method.flattened_oneof_fields().values() %} + {% with field = oneofs[-1] %} assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} - {%- endwith %} - {%- endfor %} + {% endwith %} + {% endfor %} def test_{{ method.name|snake_case }}_flattened_error(): @@ -462,9 +469,9 @@ def test_{{ method.name|snake_case }}_flattened_error(): with pytest.raises(ValueError): client.{{ method.name|snake_case }}( {{ method.input.ident }}(), - {%- for field in method.flattened_fields.values() %} + {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endfor %} ) {% endif %} @@ -509,17 +516,17 @@ def test_{{ method.name|snake_case }}_pager(): ) metadata = () - {% if method.field_headers -%} + {% if method.field_headers %} metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - {%- for field_header in method.field_headers %} - {%- if not method.client_streaming %} + {% for field_header in method.field_headers %} + {% if not method.client_streaming %} ('{{ field_header }}', ''), - {%- endif %} - {%- endfor %} + {% endif %} + {% endfor %} )), ) - {% endif -%} + {% endif %} pager = client.{{ method.name|snake_case }}(request={}) assert pager._metadata == metadata @@ -575,7 +582,7 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): assert response.raw_page is response {% endif %} {#- method.paged_result_field #} -{% endfor -%} {#- method in methods #} +{% endfor %} {#- method in methods #} def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -639,15 +646,15 @@ def test_{{ service.name|snake_case }}_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - {% for method in service.methods.values() -%} + {% for method in service.methods.values() %} '{{ method.name|snake_case }}', - {% endfor -%} + {% endfor %} ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) - {% if service.has_lro -%} + {% if service.has_lro %} # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): @@ -670,9 +677,9 @@ def test_{{ service.name|snake_case }}_auth_adc(): adc.return_value = (credentials.AnonymousCredentials(), None) {{ service.client_name }}() adc.assert_called_once_with(scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} )) @@ -683,14 +690,14 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(): adc.return_value = (credentials.AnonymousCredentials(), None) transports.{{ service.name }}GrpcTransport(host="squid.clam.whelk") adc.assert_called_once_with(scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} )) def test_{{ service.name|snake_case }}_host_no_port(): - {% with host = (service.host|default('localhost', true)).split(':')[0] -%} + {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), @@ -700,7 +707,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): def test_{{ service.name|snake_case }}_host_with_port(): - {% with host = (service.host|default('localhost', true)).split(':')[0] -%} + {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), @@ -753,9 +760,9 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s credentials=cred, credentials_file=None, scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} ), ssl_credentials=mock_ssl_cred, quota_project_id=None, @@ -796,9 +803,9 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( credentials=mock_cred, credentials_file=None, scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} ), ssl_credentials=mock_ssl_cred, quota_project_id=None, @@ -810,7 +817,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( assert transport.grpc_channel == mock_grpc_channel -{% if service.has_lro -%} +{% if service.has_lro %} def test_{{ service.name|snake_case }}_grpc_lro_client(): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), @@ -827,12 +834,12 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client -{% endif -%} +{% endif %} -{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} -{% for message in service.resource_messages|sort(attribute="resource_type") -%} +{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") %} +{% for message in service.resource_messages|sort(attribute="resource_type") %} def test_{{ message.resource_type|snake_case }}_path(): - {% for arg in message.resource_path_args -%} + {% for arg in message.resource_path_args %} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @@ -842,7 +849,7 @@ def test_{{ message.resource_type|snake_case }}_path(): def test_parse_{{ message.resource_type|snake_case }}_path(): expected = { - {% for arg in message.resource_path_args -%} + {% for arg in message.resource_path_args %} "{{ arg }}": "{{ molluscs.next() }}", {% endfor %} } @@ -852,10 +859,10 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): actual = {{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path(path) assert expected == actual -{% endfor -%} -{% for resource_msg in service.common_resources.values()|sort(attribute="type_name") -%} +{% endfor %} +{% for resource_msg in service.common_resources.values()|sort(attribute="type_name") %} def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): - {% for arg in resource_msg.message_type.resource_path_args -%} + {% for arg in resource_msg.message_type.resource_path_args %} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} expected = "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @@ -865,7 +872,7 @@ def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): def test_parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): expected = { - {% for arg in resource_msg.message_type.resource_path_args -%} + {% for arg in resource_msg.message_type.resource_path_args %} "{{ arg }}": "{{ molluscs.next() }}", {% endfor %} } @@ -875,8 +882,8 @@ def test_parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_p actual = {{ service.client_name }}.parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(path) assert expected == actual -{% endfor -%} {# common resources#} -{% endwith -%} {# cycler #} +{% endfor %} {# common resources#} +{% endwith %} {# cycler #} def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 index 158545d6c4d1..2ed725a826b3 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 @@ -1,8 +1,9 @@ {% extends "_base.py.j2" %} {% block content %} -{% if opts.lazy_import -%} {# lazy import #} + +{% if opts.lazy_import %} {# lazy import #} import pytest - + def test_module_level_imports(): expected_names = [] @@ -11,23 +12,23 @@ def test_module_level_imports(): {% for message in api.top_level_messages.values() %} from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.module_name }} import {{ message.name }} expected_names.append({{ message.name }}.__name__) - {%- endfor %} + {% endfor %} {% if api.top_level_enums %}# Enum types{% endif %} - {%- for enum in api.top_level_enums.values() %} + {% for enum in api.top_level_enums.values() %} from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.module_name }} import {{ enum.name }} expected_names.append({{ enum.name }}.__name__) - {%- endfor %} + {% endfor %} # Client and transport classes - {%- for service in api.services.values() %} + {% for service in api.services.values() %} from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.client_name }} expected_names.append({{ service.client_name}}.__name__) from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.transport_name }} expected_names.append({{ service.transport_name }}.__name__) from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.grpc_transport_name }} expected_names.append({{ service.grpc_transport_name }}.__name__) - {%- endfor %} + {% endfor %} expected_names.sort() from {{ api.naming.module_namespace|join('.') }} import {{ api.naming.module_name }} @@ -47,23 +48,23 @@ def test_versionsed_module_level_imports(): {% for message in api.top_level_messages.values() %} from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.versioned_module_name }} import {{ message.name }} expected_names.append({{ message.name }}.__name__) - {%- endfor %} + {% endfor %} {% if api.top_level_enums %}# Enum types{% endif %} - {%- for enum in api.top_level_enums.values() %} + {% for enum in api.top_level_enums.values() %} from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.versioned_module_name }} import {{ enum.name }} expected_names.append({{ enum.name }}.__name__) - {%- endfor %} + {% endfor %} # Client and transport classes - {%- for service in api.services.values() %} + {% for service in api.services.values() %} from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.client_name }} expected_names.append({{ service.client_name}}.__name__) from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.transport_name }} expected_names.append({{ service.transport_name }}.__name__) from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.grpc_transport_name }} expected_names.append({{ service.grpc_transport_name }}.__name__) - {%- endfor %} + {% endfor %} expected_names.sort() from {{ api.naming.module_namespace|join('.') }}.{{ api.naming.module_name }} import {{ api.naming.version }} @@ -73,7 +74,7 @@ def test_versionsed_module_level_imports(): # Verify the logic for handling non-existant names with pytest.raises(ImportError): from {{ api.naming.module_namespace|join('.' )}}.{{ api.naming.versioned_module_name }} import GiantSquid - - -{% endif -%} {# lazy import #} + + +{% endif %} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 2a63028a4467..461bb84bcbd2 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -49,6 +49,8 @@ def __init__(self, opts: Options) -> None: loader=jinja2.FileSystemLoader(searchpath=opts.templates), undefined=jinja2.StrictUndefined, extensions=["jinja2.ext.do"], + trim_blocks=True, + lstrip_blocks=True, ) # Add filters which templates require. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index ae721dedf041..f706229c89a7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -1,71 +1,73 @@ {% extends '_base.py.j2' %} {% block content %} + {# Import subpackages. -#} -{% filter sort_lines -%} -{% for subpackage in api.subpackages.keys() -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }} import {{ subpackage }} -{% endfor -%} +{% filter sort_lines %} +{% for subpackage in api.subpackages.keys() %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }} import {{ subpackage }} +{% endfor %} -{# Import services for this package. -#} +{# Import services for this package. #} {% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} -{%- if 'grpc' in opts.transport %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.async_client import {{ service.async_client_name }} -{%- endif %} -{% endfor -%} + if service.meta.address.subpackage == api.subpackage_view %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} +{% if 'grpc' in opts.transport %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.async_client import {{ service.async_client_name }} +{% endif %} +{% endfor %} {# Import messages and enums from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. --#} +#} {# Import messages from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. - -#} + #} {% for proto in api.protos.values()|sort(attribute='module_name') - if proto.meta.address.subpackage == api.subpackage_view -%} - {% for message in proto.messages.values()|sort(attribute='name') -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} -{% endfor -%} -{% for enum in proto.enums.values()|sort(attribute='name') -%} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif -%} - {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} -{% endfor %}{% endfor -%} + if proto.meta.address.subpackage == api.subpackage_view %} + {% for message in proto.messages.values()|sort(attribute='name') %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} +{% endfor %} +{% for enum in proto.enums.values()|sort(attribute='name') %} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} + {{- api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} +{% endfor %}{% endfor %} {% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. --#} +#} + __all__ = ( {%- filter indent %} -{% filter sort_lines -%} -{% for subpackage, _ in api.subpackages|dictsort -%} +{% filter sort_lines %} +{% for subpackage, _ in api.subpackages|dictsort %} '{{ subpackage }}', -{% endfor -%} +{% endfor %} {% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view -%} + if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name }}', - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} '{{ service.async_client_name }}', - {%- endif %} -{% endfor -%} + {% endif %} +{% endfor %} {% for proto in api.protos.values()|sort(attribute='module_name') - if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.messages.values()|sort(attribute='name') -%} + if proto.meta.address.subpackage == api.subpackage_view %} +{% for message in proto.messages.values()|sort(attribute='name') %} '{{ message.name }}', -{% endfor -%} +{% endfor %} {% for enum in proto.enums.values()|sort(attribute='name') - if proto.meta.address.subpackage == api.subpackage_view -%} + if proto.meta.address.subpackage == api.subpackage_view %} '{{ enum.name }}', -{% endfor -%}{% endfor -%} -{% endfilter -%} -{% endfilter -%} +{% endfor %}{% endfor %} +{% endfilter %} +{% endfilter %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 217cb20e58a6..9cbfff88b209 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -1,34 +1,35 @@ {% extends '_base.py.j2' %} {% block content %} + {# Import subpackages. -#} -{% for subpackage, _ in api.subpackages|dictsort -%} +{% for subpackage, _ in api.subpackages|dictsort %} from . import {{ subpackage }} -{% endfor -%} +{% endfor %} {# Import services for this package. -#} -{% filter sort_lines -%} +{% filter sort_lines %} {% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view -%} + if service.meta.address.subpackage == api.subpackage_view %} from .services.{{ service.name|snake_case }} import {{ service.client_name }} -{% endfor -%} -{% endfilter -%} +{% endfor %} +{% endfilter %} {# Import messages and enums from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. -#} -{% filter sort_lines -%} +{% filter sort_lines %} {% for proto in api.protos.values() - if proto.meta.address.subpackage == api.subpackage_view -%} -{% for message in proto.messages.values() -%} + if proto.meta.address.subpackage == api.subpackage_view %} +{% for message in proto.messages.values() %} from .types.{{ proto.module_name }} import {{ message.name }} -{% endfor -%} -{% for enum in proto.enums.values() -%} +{% endfor %} +{% for enum in proto.enums.values() %} from .types.{{ proto.module_name }} import {{ enum.name }} -{% endfor -%} -{% endfor -%} +{% endfor %} +{% endfor %} {% endfilter %} {# Define __all__. @@ -36,23 +37,23 @@ from .types.{{ proto.module_name }} import {{ enum.name }} them again. -#} __all__ = ( - {%- filter sort_lines %} - {%- for subpackage in api.subpackages.keys() %} + {% filter sort_lines %} + {% for subpackage in api.subpackages.keys() %} '{{ subpackage }}', - {%- endfor %} - {%- for service in api.services.values() + {% endfor %} + {% for service in api.services.values() if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name }}', - {%- endfor %} - {%- for proto in api.protos.values() + {% endfor %} + {% for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view %} - {%- for message in proto.messages.values() %} + {% for message in proto.messages.values() %} '{{ message.name }}', - {%- endfor %} - {%- for enum in proto.enums.values() %} + {% endfor %} + {% for enum in proto.enums.values() %} '{{ enum.name }}', - {%- endfor %} - {%- endfor %} - {%- endfilter %} + {% endfor %} + {% endfor %} + {% endfilter %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 index e0112041c3a0..7f0b61b14498 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/__init__.py.j2 @@ -1,15 +1,16 @@ {% extends '_base.py.j2' %} {% block content %} + from .client import {{ service.client_name }} -{%- if 'grpc' in opts.transport %} +{% if 'grpc' in opts.transport %} from .async_client import {{ service.async_client_name }} -{%- endif %} +{% endif %} __all__ = ( '{{ service.client_name }}', - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} '{{ service.async_client_name }}', - {%- endif %} + {% endif %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 6fd4e482ecfc..78a2a4ce8dfc 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + from collections import OrderedDict import functools import re @@ -14,12 +15,12 @@ from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore -{% filter sort_lines -%} -{% for method in service.methods.values() -%} -{% for ref_type in method.flat_ref_types -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} +{% for ref_type in method.flat_ref_types %} {{ ref_type.ident.python_import }} -{% endfor -%} -{% endfor -%} +{% endfor %} +{% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore @@ -39,7 +40,7 @@ class {{ service.async_client_name }}: DEFAULT_ENDPOINT = {{ service.client_name }}.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT - {% for message in service.resource_messages|sort(attribute="resource_type") -%} + {% for message in service.resource_messages|sort(attribute="resource_type") %} {{ message.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.{{ message.resource_type|snake_case }}_path) parse_{{ message.resource_type|snake_case}}_path = staticmethod({{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path) {% endfor %} @@ -139,64 +140,64 @@ class {{ service.async_client_name }}: ) - {% for method in service.methods.values() -%} - {% if not method.server_streaming %}async {% endif -%}def {{ method.name|snake_case }}(self, - {%- if not method.client_streaming %} + {% for method in service.methods.values() %} + {%+ if not method.server_streaming %}async {% endif %}def {{ method.name|snake_case }}(self, + {% if not method.client_streaming %} request: {{ method.input.ident }} = None, *, - {% for field in method.flattened_fields.values() -%} + {% for field in method.flattened_fields.values() %} {{ field.name }}: {{ field.ident }} = None, - {% endfor -%} - {%- else %} + {% endfor %} + {% else %} requests: AsyncIterator[{{ method.input.ident }}] = None, *, - {% endif -%} + {% endif %} retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - {%- if not method.server_streaming %} + {% if not method.server_streaming %} ) -> {{ method.client_output_async.ident }}: - {%- else %} + {% else %} ) -> Awaitable[AsyncIterable[{{ method.client_output_async.ident }}]]: - {%- endif %} + {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8) }} Args: - {%- if not method.client_streaming %} + {% if not method.client_streaming %} request (:class:`{{ method.input.ident.sphinx }}`): - The request object.{{ ' ' -}} + The request object.{{ ' ' }} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} - {% for key, field in method.flattened_fields.items() -%} + {% for key, field in method.flattened_fields.items() %} {{ field.name }} (:class:`{{ field.ident.sphinx }}`): {{ field.meta.doc|rst(width=72, indent=16) }} This corresponds to the ``{{ key }}`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - {% endfor -%} - {%- else %} + {% endfor %} + {% else %} requests (AsyncIterator[`{{ method.input.ident.sphinx }}`]): - The request object AsyncIterator.{{ ' ' -}} + The request object AsyncIterator.{{ ' ' }} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} - {%- endif %} + {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - {%- if not method.void %} + {% if not method.void %} Returns: - {%- if not method.server_streaming %} + {% if not method.server_streaming %} {{ method.client_output_async.ident.sphinx }}: - {%- else %} + {% else %} AsyncIterable[{{ method.client_output_async.ident.sphinx }}]: - {%- endif %} + {% endif %} {{ method.client_output_async.meta.doc|rst(width=72, indent=16, source_format='rst') }} - {%- endif %} + {% endif %} """ - {%- if not method.client_streaming %} + {% if not method.client_streaming %} # Create or coerce a protobuf request object. - {% if method.flattened_fields -%} + {% if method.flattened_fields %} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]) @@ -204,90 +205,90 @@ class {{ service.async_client_name }}: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - {% endif -%} - {% if method.input.ident.package != method.ident.package -%} {# request lives in a different package, so there is no proto wrapper #} + {% endif %} + {% if method.input.ident.package != method.ident.package %} {# request lives in a different package, so there is no proto wrapper #} # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): request = {{ method.input.ident }}(**request) - {% if method.flattened_fields -%}{# Cross-package req and flattened fields #} + {% if method.flattened_fields %}{# Cross-package req and flattened fields #} elif not request: request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) - {% endif -%}{# Cross-package req and flattened fields #} - {%- else %} + {% endif %}{# Cross-package req and flattened fields #} + {% else %} request = {{ method.input.ident }}(request) {% endif %} {# different request package #} - {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields and method.input.ident.package == method.ident.package -%} + {# Vanilla python protobuf wrapper types cannot _set_ repeated fields #} + {% if method.flattened_fields and method.input.ident.package == method.ident.package %} # If we have keyword arguments corresponding to fields on the # request, apply these. - {% endif -%} - {%- for key, field in method.flattened_fields.items() if not field.repeated and method.input.ident.package == method.ident.package %} + {% endif %} + {% for key, field in method.flattened_fields.items() if not field.repeated and method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} - {%- endfor %} + {% endfor %} {# Map-y fields can be _updated_, however #} - {%- for key, field in method.flattened_fields.items() if field.map and method.input.ident.package == method.ident.package %} + {% for key, field in method.flattened_fields.items() if field.map and method.input.ident.package == method.ident.package %} if {{ field.name }}: request.{{ key }}.update({{ field.name }}) - {%- endfor %} - {# And list-y fields can be _extended_ -#} - {%- for key, field in method.flattened_fields.items() if field.repeated and not field.map and method.input.ident.package == method.ident.package %} + {% endfor %} + {# And list-y fields can be _extended_ #} + {% for key, field in method.flattened_fields.items() if field.repeated and not field.map and method.input.ident.package == method.ident.package %} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) - {%- endfor %} - {%- endif %} + {% endfor %} + {% endif %} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.{{ method.name|snake_case }}, - {%- if method.retry %} + {% if method.retry %} default_retry=retries.Retry( {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {%- for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} + {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} exceptions.{{ ex.__name__ }}, - {%- endfor %} + {% endfor %} ), deadline={{ method.timeout }}, ), - {%- endif %} + {% endif %} default_timeout={{ method.timeout }}, client_info=DEFAULT_CLIENT_INFO, ) - {%- if method.field_headers %} + {% if method.field_headers %} # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - {%- for field_header in method.field_headers %} - {%- if not method.client_streaming %} + {% for field_header in method.field_headers %} + {% if not method.client_streaming %} ('{{ field_header }}', request.{{ field_header }}), - {%- endif %} - {%- endfor %} + {% endif %} + {% endfor %} )), ) - {%- endif %} + {% endif %} # Send the request. - {% if not method.void %}response = {% endif %} - {%- if not method.server_streaming %}await {% endif %}rpc( - {%- if not method.client_streaming %} + {%+ if not method.void %}response = {% endif %} + {% if not method.server_streaming %}await {% endif %}rpc( + {% if not method.client_streaming %} request, - {%- else %} + {% else %} requests, - {%- endif %} + {% endif %} retry=retry, timeout=timeout, metadata=metadata, ) - {%- if method.lro %} + {% if method.lro %} # Wrap the response in an operation future. response = {{ method.client_output_async.ident.module_alias or method.client_output_async.ident.module }}.from_gapic( @@ -296,7 +297,7 @@ class {{ service.async_client_name }}: {{ method.lro.response_type.ident }}, metadata_type={{ method.lro.metadata_type.ident }}, ) - {%- elif method.paged_result_field %} + {% elif method.paged_result_field %} # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. @@ -306,12 +307,12 @@ class {{ service.async_client_name }}: response=response, metadata=metadata, ) - {%- endif %} - {%- if not method.void %} + {% endif %} + {% if not method.void %} # Done; return the response. return response - {%- endif %} + {% endif %} {{ '\n' }} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index ded93da10bad..ae69e60a50d0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + from collections import OrderedDict from distutils import util import os @@ -18,25 +19,25 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -{% filter sort_lines -%} -{% for method in service.methods.values() -%} -{% for ref_type in method.flat_ref_types -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} +{% for ref_type in method.flat_ref_types %} {{ ref_type.ident.python_import }} -{% endfor -%} -{% endfor -%} +{% endfor %} +{% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO -{%- if 'grpc' in opts.transport %} +{% if 'grpc' in opts.transport %} from .transports.grpc import {{ service.grpc_transport_name }} from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} -{%- endif %} -{%- if 'rest' in opts.transport %} +{% endif %} +{% if 'rest' in opts.transport %} from .transports.rest import {{ service.name }}RestTransport -{%- endif %} +{% endif %} class {{ service.client_name }}Meta(type): @@ -47,13 +48,13 @@ class {{ service.client_name }}Meta(type): objects. """ _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} _transport_registry['grpc'] = {{ service.grpc_transport_name }} _transport_registry['grpc_asyncio'] = {{ service.grpc_asyncio_transport_name }} - {%- endif %} - {%- if 'rest' in opts.transport %} + {% endif %} + {% if 'rest' in opts.transport %} _transport_registry['rest'] = {{ service.name }}RestTransport - {%- endif %} + {% endif %} def get_transport_class(cls, label: str = None, @@ -109,6 +110,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") DEFAULT_ENDPOINT = {% if service.host %}'{{ service.host }}'{% else %}None{% endif %} + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) @@ -160,7 +162,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return self._transport - {% for message in service.resource_messages|sort(attribute="resource_type") -%} + {% for message in service.resource_messages|sort(attribute="resource_type") %} @staticmethod def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" @@ -173,8 +175,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} - {% endfor %} {# resources #} - {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") -%} + {% endfor %}{# resources #} + {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") %} @staticmethod def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: """Return a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" @@ -186,7 +188,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): m = re.match(r"{{ resource_msg.message_type.path_regex_str }}", path) return m.groupdict() if m else {} - {% endfor %} {# common resources #} + {% endfor %}{# common resources #} def __init__(self, *, credentials: Optional[credentials.Credentials] = None, @@ -292,64 +294,64 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): ) - {% for method in service.methods.values() -%} + {% for method in service.methods.values() %} def {{ method.name|snake_case }}(self, - {%- if not method.client_streaming %} + {% if not method.client_streaming %} request: {{ method.input.ident }} = None, *, - {% for field in method.flattened_fields.values() -%} + {% for field in method.flattened_fields.values() %} {{ field.name }}: {{ field.ident }} = None, - {% endfor -%} - {%- else %} + {% endfor %} + {% else %} requests: Iterator[{{ method.input.ident }}] = None, *, - {% endif -%} + {% endif %} retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - {%- if not method.server_streaming %} + {% if not method.server_streaming %} ) -> {{ method.client_output.ident }}: - {%- else %} + {% else %} ) -> Iterable[{{ method.client_output.ident }}]: - {%- endif %} + {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8) }} Args: - {%- if not method.client_streaming %} + {% if not method.client_streaming %} request ({{ method.input.ident.sphinx }}): - The request object.{{ ' ' -}} + The request object.{{ ' ' }} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} - {% for key, field in method.flattened_fields.items() -%} + {% for key, field in method.flattened_fields.items() %} {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(width=72, indent=16) }} This corresponds to the ``{{ key }}`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - {% endfor -%} - {%- else %} + {% endfor %} + {% else %} requests (Iterator[{{ method.input.ident.sphinx }}]): - The request object iterator.{{ ' ' -}} + The request object iterator.{{ ' ' }} {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} - {%- endif %} + {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - {%- if not method.void %} + {% if not method.void %} Returns: - {%- if not method.server_streaming %} + {% if not method.server_streaming %} {{ method.client_output.ident.sphinx }}: - {%- else %} + {% else %} Iterable[{{ method.client_output.ident.sphinx }}]: - {%- endif %} + {% endif %} {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format='rst') }} - {%- endif %} + {% endif %} """ - {%- if not method.client_streaming %} + {% if not method.client_streaming %} # Create or coerce a protobuf request object. - {% if method.flattened_fields -%} + {% if method.flattened_fields %} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]) @@ -357,8 +359,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - {% endif -%} - {% if method.input.ident.package != method.ident.package -%} {# request lives in a different package, so there is no proto wrapper #} + {% endif %} + {% if method.input.ident.package != method.ident.package %} {# request lives in a different package, so there is no proto wrapper #} if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. @@ -366,7 +368,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): elif not request: # Null request, just make one. request = {{ method.input.ident }}() - {%- else %} + {% else %} # Minor optimization to avoid making a copy if the user passes # in a {{ method.input.ident }}. # There's no risk of modifying the input as we've already verified @@ -376,57 +378,57 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} {# different request package #} {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields and method.input.ident.package == method.ident.package -%} + {% if method.flattened_fields and method.input.ident.package == method.ident.package %} # If we have keyword arguments corresponding to fields on the # request, apply these. - {% endif -%} - {%- for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} + {% endif %} + {% for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: request.{{ key }} = {{ field.name }} - {%- endfor %} + {% endfor %} {# Map-y fields can be _updated_, however #} - {%- for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} - {%- if field.map %} {# map implies repeated, but repeated does NOT imply map#} + {% for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} + {% if field.map %} {# map implies repeated, but repeated does NOT imply map#} if {{ field.name }}: request.{{ key }}.update({{ field.name }}) - {%- else %} - {# And list-y fields can be _extended_ -#} + {% else %} + {# And list-y fields can be _extended_ #} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) - {%- endif %} {# field.map #} - {%- endfor %} {# method.flattened_fields.items() #} - {%- endif %} {# method.client_streaming #} + {% endif %} {# field.map #} + {% endfor %} {# method.flattened_fields.items() #} + {% endif %} {# method.client_streaming #} # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.{{ method.name|snake_case}}] - {%- if method.field_headers %} + {% if method.field_headers %} # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - {%- for field_header in method.field_headers %} - {%- if not method.client_streaming %} + {% for field_header in method.field_headers %} + {% if not method.client_streaming %} ('{{ field_header }}', request.{{ field_header }}), - {%- endif %} - {%- endfor %} + {% endif %} + {% endfor %} )), ) - {%- endif %} + {% endif %} # Send the request. - {% if not method.void %}response = {% endif %}rpc( - {%- if not method.client_streaming %} + {%+ if not method.void %}response = {% endif %}rpc( + {% if not method.client_streaming %} request, - {%- else %} + {% else %} requests, - {%- endif %} + {% endif %} retry=retry, timeout=timeout, metadata=metadata, ) - {%- if method.lro %} + {% if method.lro %} # Wrap the response in an operation future. response = {{ method.client_output.ident.module_alias or method.client_output.ident.module }}.from_gapic( @@ -435,7 +437,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ method.lro.response_type.ident }}, metadata_type={{ method.lro.metadata_type.ident }}, ) - {%- elif method.paged_result_field %} + {% elif method.paged_result_field %} # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. @@ -445,12 +447,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): response=response, metadata=metadata, ) - {%- endif %} - {%- if not method.void %} + {% endif %} + {% if not method.void %} # Done; return the response. return response - {%- endif %} + {% endif %} {{ '\n' }} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index ca3cc8d40e02..53ce39b6e2aa 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -1,22 +1,26 @@ {% extends '_base.py.j2' %} {% block content %} -{% for method in service.methods.values() | selectattr('paged_result_field') -%} -{% if loop.first -%} + +{% for method in service.methods.values() | selectattr('paged_result_field') %} +{% if loop.first %} {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. - -#} + #} from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional -{% filter sort_lines -%} -{% for method in service.methods.values() | selectattr('paged_result_field') -%} +{% filter sort_lines %} +{% for method in service.methods.values() | selectattr('paged_result_field') %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{% if not method.paged_result_field.is_primitive %}{{ method.paged_result_field.message.ident.python_import }}{% endif %} +{% if not method.paged_result_field.is_primitive %} +{{ method.paged_result_field.message.ident.python_import }} +{% endif %} {% endfor %} -{% endfilter -%} +{% endfilter %} {% endif %} + class {{ method.name }}Pager: """A pager for iterating through ``{{ method.name|snake_case }}`` requests. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 index bd7981387f22..107e2bd4e872 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 @@ -1,38 +1,39 @@ {% extends '_base.py.j2' %} {% block content %} + from collections import OrderedDict from typing import Dict, Type from .base import {{ service.name }}Transport -{%- if 'grpc' in opts.transport %} +{% if 'grpc' in opts.transport %} from .grpc import {{ service.name }}GrpcTransport from .grpc_asyncio import {{ service.name }}GrpcAsyncIOTransport -{%- endif %} -{%- if 'rest' in opts.transport %} +{% endif %} +{% if 'rest' in opts.transport %} from .rest import {{ service.name }}RestTransport -{%- endif %} +{% endif %} # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] -{%- if 'grpc' in opts.transport %} +{% if 'grpc' in opts.transport %} _transport_registry['grpc'] = {{ service.name }}GrpcTransport _transport_registry['grpc_asyncio'] = {{ service.name }}GrpcAsyncIOTransport -{%- endif %} -{%- if 'rest' in opts.transport %} +{% endif %} +{% if 'rest' in opts.transport %} _transport_registry['rest'] = {{ service.name }}RestTransport -{%- endif %} +{% endif %} __all__ = ( '{{ service.name }}Transport', - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} '{{ service.name }}GrpcTransport', '{{ service.name }}GrpcAsyncIOTransport', - {%- endif %} - {%- if 'rest' in opts.transport %} + {% endif %} + {% if 'rest' in opts.transport %} '{{ service.name }}RestTransport', - {%- endif %} + {% endif %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index f5d9ee5dc658..fe1549742ccf 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import packaging.version @@ -11,16 +12,16 @@ import google.api_core # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -{%- if service.has_lro %} +{% if service.has_lro %} from google.api_core import operations_v1 # type: ignore -{%- endif %} +{% endif %} from google.auth import credentials # type: ignore -{% filter sort_lines -%} -{% for method in service.methods.values() -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{% endfor -%} +{% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore @@ -52,9 +53,9 @@ class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" AUTH_SCOPES = ( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} ) DEFAULT_HOST: str = {% if service.host %}'{{ service.host }}'{% else %}{{ '' }}{% endif %} @@ -73,7 +74,7 @@ class {{ service.name }}Transport(abc.ABC): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{- ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -165,22 +166,22 @@ class {{ service.name }}Transport(abc.ABC): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - {% for method in service.methods.values() -%} + {% for method in service.methods.values() %} self.{{ method.name|snake_case }}: gapic_v1.method.wrap_method( self.{{ method.name|snake_case }}, - {%- if method.retry %} + {% if method.retry %} default_retry=retries.Retry( {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {%- for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} + {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} exceptions.{{ ex.__name__ }}, - {%- endfor %} + {% endfor %} ), deadline={{ method.timeout }}, ), - {%- endif %} + {% endif %} default_timeout={{ method.timeout }}, client_info=client_info, ), @@ -188,14 +189,14 @@ class {{ service.name }}Transport(abc.ABC): } - {%- if service.has_lro %} + {% if service.has_lro %} @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" raise NotImplementedError() - {%- endif %} - {%- for method in service.methods.values() %} + {% endif %} + {% for method in service.methods.values() %} @property def {{ method.name|snake_case }}(self) -> Callable[ @@ -205,7 +206,7 @@ class {{ service.name }}Transport(abc.ABC): Awaitable[{{ method.output.ident }}] ]]: raise NotImplementedError() - {%- endfor %} + {% endfor %} {% if opts.add_iam_methods %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index e7df35a1df2c..a0bb3de73a19 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -1,13 +1,14 @@ {% extends '_base.py.j2' %} {% block content %} + import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore -{%- if service.has_lro %} +{% if service.has_lro %} from google.api_core import operations_v1 # type: ignore -{%- endif %} +{% endif %} from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore @@ -15,11 +16,11 @@ from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore -{% filter sort_lines -%} -{% for method in service.methods.values() -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{% endfor -%} +{% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore @@ -59,7 +60,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{- ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -104,9 +105,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - {%- if service.has_lro %} + {% if service.has_lro %} self._operations_client = None - {%- endif %} + {% endif %} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -219,7 +220,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): """Return the channel designed to connect to this service. """ return self._grpc_channel - {%- if service.has_lro %} + {% if service.has_lro %} @property def operations_client(self) -> operations_v1.OperationsClient: @@ -236,17 +237,17 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Return the client from cache. return self._operations_client - {%- endif %} - {%- for method in service.methods.values() %} + {% endif %} + {% for method in service.methods.values() %} @property def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], {{ method.output.ident }}]: - r"""Return a callable for the {{- ' ' -}} + r"""Return a callable for the {{ ' ' }} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) }} - {{- ' ' -}} method over gRPC. + {{ ' ' }} method over gRPC. {{ method.meta.doc|rst(width=72, indent=8) }} @@ -267,7 +268,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) return self._stubs['{{ method.name|snake_case }}'] - {%- endfor %} + {% endfor %} {% if opts.add_iam_methods %} @property @@ -353,4 +354,4 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): __all__ = ( '{{ service.name }}GrpcTransport', ) -{%- endblock -%} +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index accb46fc91d3..06c28cbb25e8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -1,14 +1,15 @@ {% extends '_base.py.j2' %} {% block content %} + import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -{%- if service.has_lro %} +{% if service.has_lro %} from google.api_core import operations_v1 # type: ignore -{%- endif %} +{% endif %} from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -17,11 +18,11 @@ import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore -{% filter sort_lines -%} -{% for method in service.methods.values() -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{% endfor -%} +{% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore @@ -105,7 +106,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{- ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -151,9 +152,9 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - {%- if service.has_lro %} + {% if service.has_lro %} self._operations_client = None - {%- endif %} + {% endif %} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -223,7 +224,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): """ # Return the channel from cache. return self._grpc_channel - {%- if service.has_lro %} + {% if service.has_lro %} @property def operations_client(self) -> operations_v1.OperationsAsyncClient: @@ -240,17 +241,17 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # Return the client from cache. return self._operations_client - {%- endif %} - {%- for method in service.methods.values() %} + {% endif %} + {% for method in service.methods.values() %} @property def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], Awaitable[{{ method.output.ident }}]]: - r"""Return a callable for the {{- ' ' -}} + r"""Return a callable for the {{ ' ' }} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=40, indent=8) }} - {{- ' ' -}} method over gRPC. + {{ ' ' }} method over gRPC. {{ method.meta.doc|rst(width=72, indent=8) }} @@ -271,7 +272,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) return self._stubs['{{ method.name|snake_case }}'] - {%- endfor %} + {% endfor %} {% if opts.add_iam_methods %} @property @@ -358,4 +359,4 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): __all__ = ( '{{ service.name }}GrpcAsyncIOTransport', ) -{%- endblock -%} +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 4f30997ce46d..5614a6b4a89f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -1,12 +1,13 @@ {% extends '_base.py.j2' %} {% block content %} + import warnings from typing import Callable, Dict, Optional, Sequence, Tuple {% if service.has_lro %} from google.api_core import operations_v1 -{%- endif %} +{% endif %} from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore @@ -17,11 +18,11 @@ import grpc # type: ignore from google.auth.transport.requests import AuthorizedSession {# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} -{% filter sort_lines -%} -{% for method in service.methods.values() -%} +{% filter sort_lines %} +{% for method in service.methods.values() %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} -{% endfor -%} +{% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore @@ -56,7 +57,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{- ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -82,7 +83,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object + # credentials object super().__init__( host=host, credentials=credentials, @@ -91,12 +92,12 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) {%- if service.has_lro %} self._operations_client = None - {%- endif %} + {% endif %} if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - {%- if service.has_lro %} + {% if service.has_lro %} @property def operations_client(self) -> operations_v1.OperationsClient: @@ -125,18 +126,18 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # Return the client from cache. return self._operations_client - {%- endif %} - {%- for method in service.methods.values() %} - {%- if method.http_opt %} + {% endif %} + {% for method in service.methods.values() %} + {% if method.http_opt %} def {{ method.name|snake_case }}(self, request: {{ method.input.ident }}, *, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: - r"""Call the {{- ' ' -}} + r"""Call the {{ ' ' }} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} - {{- ' ' -}} method over HTTP. + {{ ' ' }} method over HTTP. Args: request (~.{{ method.input.ident }}): @@ -144,12 +145,12 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {{ method.input.meta.doc|rst(width=72, indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. - {%- if not method.void %} + {% if not method.void %} Returns: ~.{{ method.output.ident }}: {{ method.output.meta.doc|rst(width=72, indent=16) }} - {%- endif %} + {% endif %} """ {# TODO(yon-mg): refactor when implementing grpc transcoding @@ -158,30 +159,30 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): - make sure dotted nested fields preserved - format url and send the request #} - {%- if 'body' in method.http_opt %} + {% if 'body' in method.http_opt %} # Jsonify the request body - {%- if method.http_opt['body'] != '*' %} + {% if method.http_opt['body'] != '*' %} body = {{ method.input.fields[method.http_opt['body']].type.ident }}.to_json( request.{{ method.http_opt['body'] }}, including_default_value_fields=False, use_integers_for_enums=False ) - {%- else %} + {% else %} body = {{ method.input.ident }}.to_json( request, use_integers_for_enums=False ) - {%- endif %} - {%- endif %} + {% endif %} + {% endif %} {# TODO(yon-mg): Write helper method for handling grpc transcoding url #} # TODO(yon-mg): need to handle grpc transcoding and parse url correctly # current impl assumes basic case of grpc transcoding url = 'https://{host}{{ method.http_opt['url'] }}'.format( host=self._host, - {%- for field in method.path_params %} + {% for field in method.path_params %} {{ field }}=request.{{ method.input.get_field(field).name }}, - {%- endfor %} + {% endfor %} ) {# TODO(yon-mg): move all query param logic out of wrappers into here to handle @@ -190,9 +191,9 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields # not required for GCE query_params = { - {%- for field in method.query_params | sort%} + {% for field in method.query_params | sort%} '{{ field|camel_case }}': request.{{ field }}, - {%- endfor %} + {% endfor %} } # TODO(yon-mg): further discussion needed whether 'python truthiness' is appropriate here # discards default values @@ -203,23 +204,23 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # Send the request {% if not method.void %}response = {% endif %}self._session.{{ method.http_opt['verb'] }}( url - {%- if 'body' in method.http_opt %}, + {% if 'body' in method.http_opt %}, data=body, - {%- endif %} + {% endif %} ) # Raise requests.exceptions.HTTPError if the status code is >= 400 response.raise_for_status() - {%- if not method.void %} + {% if not method.void %} # Return the response return {{ method.output.ident }}.from_json( response.content, ignore_unknown_fields=True ) - {%- endif %} - {%- endif %} - {%- endfor %} + {% endif %} + {% endif %} + {% endfor %} __all__ = ( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 index 9fd9353481a4..f4e4be617d69 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 @@ -1,39 +1,40 @@ {% extends "_base.py.j2" %} -{% block content -%} +{% block content %} + {% with p = proto.disambiguate('proto') %} -{% if proto.messages|length or proto.all_enums|length -%} +{% if proto.messages|length or proto.all_enums|length %} import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endif %} -{% filter sort_lines -%} -{% for import_ in proto.python_modules -%} +{% filter sort_lines %} +{% for import_ in proto.python_modules %} {{ import_ }} -{% endfor -%} +{% endfor %} {% endfilter %} __protobuf__ = {{ p }}.module( package='{{ '.'.join(proto.meta.address.package) }}', - {% if api.naming.proto_package != '.'.join(proto.meta.address.package) -%} + {% if api.naming.proto_package != '.'.join(proto.meta.address.package) %} marshal='{{ api.naming.proto_package }}', - {% endif -%} + {% endif %} manifest={ - {%- for enum in proto.enums.values() %} + {% for enum in proto.enums.values() %} '{{ enum.name }}', - {%- endfor %} - {%- for message in proto.messages.values() %} + {% endfor %} + {% for message in proto.messages.values() %} '{{ message.name }}', - {%- endfor %} + {% endfor %} }, ) -{% for enum in proto.enums.values() -%} +{% for enum in proto.enums.values() %} {% include '%namespace/%name_%version/%sub/types/_enum.py.j2' with context %} {% endfor %} -{% for message in proto.messages.values() -%} +{% for message in proto.messages.values() %} {% include "%namespace/%name_%version/%sub/types/_message.py.j2" with context %} {% endfor %} {% endwith %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 index 6f331c3ad97a..d5c74e888424 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 @@ -1,22 +1,26 @@ {% extends '_base.py.j2' %} {% block content %} -{%- for _, proto in api.protos|dictsort if proto.file_to_generate and proto.messages %} + +{% for _, proto in api.protos|dictsort if proto.file_to_generate and proto.messages %} from .{{proto.module_name }} import ( - {%- for _, message in proto.messages|dictsort %} - {{message.name }}, {% endfor %} - {%- for _, enum in proto.enums|dictsort %} - {{ enum.name }}, {% endfor %} -){% endfor %} + {% for _, message in proto.messages|dictsort %} + {{message.name }}, + {% endfor %} + {% for _, enum in proto.enums|dictsort %} + {{ enum.name }}, + {% endfor %} +) +{% endfor %} __all__ = ( - {%- for _, proto in api.protos|dictsort if proto.file_to_generate %} - {%- for _, message in proto.messages|dictsort %} + {% for _, proto in api.protos|dictsort if proto.file_to_generate %} + {% for _, message in proto.messages|dictsort %} '{{ message.name }}', - {%- endfor -%} - {%- for _, enum in proto.enums|dictsort %} + {% endfor %} + {% for _, enum in proto.enums|dictsort %} '{{ enum.name }}', - {%- endfor -%} - {%- endfor %} + {% endfor %} + {% endfor %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 index 73994a158c5d..8921af307062 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 @@ -1,9 +1,9 @@ class {{ enum.name }}({{ p }}.Enum): r"""{{ enum.meta.doc|rst(indent=4) }}""" - {% if enum.enum_pb.HasField("options") -%} + {% if enum.enum_pb.HasField("options") %} _pb_options = {{ enum.options_dict }} - {% endif -%} - {% for enum_value in enum.values -%} + {% endif %} + {% for enum_value in enum.values %} {{ enum_value.name }} = {{ enum_value.number }} - {% endfor -%} + {% endfor %} {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 8524d638efec..73ee46cf2487 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -2,27 +2,27 @@ class {{ message.name }}({{ p }}.Message): r"""{{ message.meta.doc|rst(indent=4) }}{% if message.fields|length %} Attributes: - {%- for field in message.fields.values() %} + {% for field in message.fields.values() %} {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(indent=12, nl=False) }} - {%- endfor %} - {% endif -%} + {% endfor %} + {% endif %} """ {# Iterate over nested enums. -#} - {% for enum in message.nested_enums.values() -%} - {% filter indent %} - {%- include '%namespace/%name_%version/%sub/types/_enum.py.j2' %} + {% for enum in message.nested_enums.values() %} + {% filter indent(first=True) %} + {% include '%namespace/%name_%version/%sub/types/_enum.py.j2' %} {% endfilter %} - {% endfor -%} + {% endfor %} {# Iterate over nested messages. -#} - {% for submessage in message.nested_messages.values() -%} - {% if not submessage.map -%} - {% with message = submessage %}{% filter indent %} - {%- include '%namespace/%name_%version/%sub/types/_message.py.j2' %} + {% for submessage in message.nested_messages.values() %} + {% if not submessage.map %} + {% with message = submessage %}{% filter indent(first=True) %} + {% include '%namespace/%name_%version/%sub/types/_message.py.j2' %} {% endfilter %}{% endwith %} {% endif %} - {% endfor -%} + {% endfor %} {% if "next_page_token" in message.fields.values()|map(attribute='name') %} @property @@ -31,21 +31,31 @@ class {{ message.name }}({{ p }}.Message): {% endif %} {# Iterate over fields. -#} - {% for field in message.fields.values() -%} - {% if field.map -%} - {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] -%} + {% for field in message.fields.values() %} + {% if field.map %} + {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] %} {{ field.name }} = {{ p }}.MapField( - {{- p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, number={{ field.number }} - {%- if value_field.enum or value_field.message %}, + {{ p }}.{{ key_field.proto_type }}, + {{ p }}.{{ value_field.proto_type }}, + number={{ field.number }} + {% if value_field.enum or value_field.message %} {{ value_field.proto_type.lower() }}={{ value_field.type.ident.rel(message.ident) }}, - {% endif %}) {# enum or message#} - {% endwith -%} - {% else -%} {# field.map #} + {% endif %}{# enum or message#} + ) + {% endwith %} + {% else %}{# field.map #} {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( - {{- p }}.{{ field.proto_type }}, number={{ field.number }}{% if field.proto3_optional %}, optional=True{% elif field.oneof %}, oneof='{{ field.oneof }}'{% endif %} - {%- if field.enum or field.message %}, + {{ p }}.{{ field.proto_type }}, + number={{ field.number }}, + {% if field.proto3_optional %} + optional=True, + {% elif field.oneof %} + oneof='{{ field.oneof }}', + {% endif %} + {% if field.enum or field.message %} {{ field.proto_type.lower() }}={{ field.type.ident.rel(message.ident) }}, - {% endif %}) {# enum or message #} - {% endif -%} {# field.map #} - {% endfor -%} {# for field in message.fields.values#} + {% endif %}{# enum or message #} + ) + {% endif %}{# field.map #} + {% endfor %}{# for field in message.fields.values#} {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index d4f1b215a691..7987f050301b 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + # # {{ api.naming.warehouse_package_name }} documentation build configuration file # diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 6de840fe92bc..4702d596b694 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -26,7 +26,6 @@ There is a little, but not enough for it to be important because {# response handling macros #} {% macro sample_header(sample, calling_form) %} - # DO NOT EDIT! This is a generated sample ("{{ calling_form }}", "{{ sample.id }}") # # To install the latest published package dependency, execute the following: @@ -111,21 +110,21 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {% macro dispatch_statement(statement, indentation=0) %} {# Each statement is a dict with a single key/value pair #} -{% if "print" in statement -%} +{% if "print" in statement %} {{ render_print(statement["print"])|indent(width=indentation, first=True) }} -{% elif "define" in statement -%} +{% elif "define" in statement %} {{ render_define(statement["define"])|indent(width=indentation, first=True) }} -{% elif "comment" in statement -%} +{% elif "comment" in statement %} {{ render_comment(statement["comment"])|indent(width=indentation, first=True) }} -{% elif "loop" in statement -%} - {% with loop = statement["loop"] -%} - {% if "collection" in loop -%} +{% elif "loop" in statement %} + {% with loop = statement["loop"] %} + {% if "collection" in loop %} {{ render_collection_loop(loop)|indent(width=indentation, first=True) }} - {% else -%} + {% else %} {{ render_map_loop(loop)|indent(width=indentation, first=True) }} - {% endif -%} - {% endwith -%} -{% elif "write_file" in statement -%} + {% endif %} + {% endwith %} +{% elif "write_file" in statement %} {{ render_write_file(statement["write_file"])|indent(indentation, first=True) }} {% endif %} {% endmacro %} @@ -135,82 +134,81 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {# to be the correct enum from the right module, if necessary. #} {# Python is also responsible for verifying that each input parameter is unique,#} {# no parameter is a reserved keyword #} - {% if attr.input_parameter %} +{% if attr.input_parameter %} # {{ attr.input_parameter }} = {{ attr.value }} - {% if attr.value_is_file %} +{% if attr.value_is_file %} with open({{ attr.input_parameter }}, "rb") as f: {{ base_name }}["{{ attr.field }}"] = f.read() - {% else %} +{% else %} {{ base_name }}["{{ attr.field }}"] = {{ attr.input_parameter }} - {% endif %} - {% else %} +{% endif %} +{% else %} {{ base_name }}["{{ attr.field }}"] = {{ attr.value }} - {% endif %} +{% endif %} {% endmacro %} {% macro render_request_setup(full_request) %} {% for parameter_block in full_request.request_list if parameter_block.body %} -{% if parameter_block.pattern -%} +{% if parameter_block.pattern %} {# This is a resource-name patterned lookup parameter #} -{% with formals = [] -%} -{% for attr in parameter_block.body -%} -{% do formals.append("%s=%s"|format(attr.field, attr.input_parameter or attr.value)) -%} -{% endfor -%} +{% with formals = [] %} +{% for attr in parameter_block.body %} +{% do formals.append("%s=%s"|format(attr.field, attr.input_parameter or attr.value)) %} +{% endfor %} {{ parameter_block.base }} = "{{parameter_block.pattern }}".format({{ formals|join(", ") }}) -{% endwith -%} -{% else -%} {# End resource name construction #} +{% endwith %} +{% else %}{# End resource name construction #} {{ parameter_block.base }} = {} {% for attr in parameter_block.body %} {{ render_request_attr(parameter_block.base, attr) }} {% endfor %} -{% endif -%} +{% endif %} {% endfor %} -{% if not full_request.flattenable -%} +{% if not full_request.flattenable %} request = { {% for parameter in full_request.request_list %} '{{ parameter.base }}': {{ parameter.base if parameter.body else parameter.single }}, -{% endfor -%} -} -{% endif -%} +{% endfor %}} +{% endif %} {% endmacro %} {% macro render_request_params(request) %} - {# Provide the top level parameters last and as keyword params #} - {% with params = [] -%} - {% for r in request if r.body -%} - {% do params.append(r.base) -%} - {% endfor -%} - {% for r in request if r.single -%} - {% do params.append("%s=%s"|format(r.base, r.single.value)) -%} - {% endfor -%} +{# Provide the top level parameters last and as keyword params #} + {% with params = [] %} + {% for r in request if r.body %} + {% do params.append(r.base) %} + {% endfor %} + {% for r in request if r.single %} + {% do params.append("%s=%s"|format(r.base, r.single.value)) %} + {% endfor %} {{ params|join(", ") -}} - {% endwith -%} + {% endwith %} {% endmacro %} {% macro render_request_params_unary(request) %} - {# Provide the top level parameters last and as keyword params #} - {% if request.flattenable -%} - {% with params = [] -%} - {% for r in request.request_list -%} - {% do params.append("%s=%s"|format(r.base, r.single.value if r.single else r.base)) -%} - {% endfor -%} -{{ params|join(", ") -}} - {% endwith -%} - {% else -%} +{# Provide the top level parameters last and as keyword params #} + {% if request.flattenable %} + {% with params = [] %} + {% for r in request.request_list %} + {% do params.append("%s=%s"|format(r.base, r.single.value if r.single else r.base)) %} + {% endfor %} +{{ params|join(", ") }} + {% endwith %} + {% else %} request=request - {% endif -%} + {% endif %} {% endmacro %} {% macro render_method_call(sample, calling_form, calling_form_enum) %} - {# Note: this doesn't deal with enums or unions #} +{# Note: this doesn't deal with enums or unions #} {% if calling_form in [calling_form_enum.RequestStreamingBidi, - calling_form_enum.RequestStreamingClient] -%} -client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request.request_list)|trim -}}]) -{% else -%} {# TODO: deal with flattening #} + calling_form_enum.RequestStreamingClient] %} +client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request.request_list)|trim }}]) +{% else %}{# TODO: deal with flattening #} {# TODO: set up client streaming once some questions are answered #} -client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request)|trim -}}) -{% endif -%} +client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request)|trim }}) +{% endif %} {% endmacro %} {# Setting up the method invocation is the responsibility of the caller: #} @@ -262,15 +260,15 @@ def main(): import argparse parser = argparse.ArgumentParser() -{% with arg_list = [] -%} -{% for request in full_request.request_list if request.body -%} +{% with arg_list = [] %} +{% for request in full_request.request_list if request.body %} {% for attr in request.body if attr.input_parameter %} parser.add_argument("--{{ attr.input_parameter }}", type=str, default={{ attr.value }}) -{% do arg_list.append("args." + attr.input_parameter) -%} -{% endfor -%} -{% endfor -%} +{% do arg_list.append("args." + attr.input_parameter) %} +{% endfor %} +{% endfor %} {% for request in full_request.request_list if request.single and request.single.input_parameter %} parser.add_argument("--{{ request.single.input_parameter }}", type=str, diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index f054e2f2f0d4..4cdb81e47ca1 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -16,6 +16,7 @@ {% extends "_base.py.j2" %} {% block content %} + {# Input parameters: sample #} {# callingForm #} {% import "examples/feature_fragments.j2" as frags %} @@ -29,7 +30,7 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} {# also need calling form #} -def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input_params(sample.request)|trim -}}): +def sample_{{ frags.render_method_name(sample.rpc)|trim }}({{ frags.print_input_params(sample.request)|trim }}): """{{ sample.description }}""" client = {{ service.client_name }}( @@ -37,12 +38,12 @@ def sample_{{ frags.render_method_name(sample.rpc)|trim -}}({{ frags.print_input transport="grpc", ) - {{ frags.render_request_setup(sample.request)|indent }} -{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} - {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response, )|indent -}} -{% endwith %} + {{ frags.render_request_setup(sample.request)|indent }} +{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} + {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response, )|indent }} +{% endwith %} # [END {{ sample.id }}] -{{ frags.render_main_block(sample.rpc, sample.request) }} -{%- endblock %} +{{ frags.render_main_block(sample.rpc, sample.request) -}} +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 0b3e167a7301..87b5ef2a008f 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -1,6 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} + import os import pathlib import shutil diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 index d040ba812676..32cc47c49ace 100644 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 @@ -1,6 +1,7 @@ #! /usr/bin/env python3 {% extends '_base.py.j2' %} {% block content %} + import argparse import os import libcst as cst @@ -25,14 +26,14 @@ def partition( class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - {% with all_methods = [] -%} - {% for service in api.services.values() %}{% for method in service.methods.values() -%} - {% do all_methods.append(method) -%} - {% endfor %}{% endfor -%} + {% with all_methods = [] %} + {% for service in api.services.values() %}{% for method in service.methods.values() %} + {% do all_methods.append(method) %} + {% endfor %}{% endfor %} METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - {% for method in all_methods|sort(attribute='name')|unique(attribute='name') -%} + {% for method in all_methods|sort(attribute='name')|unique(attribute='name') %} '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), - {% endfor -%} + {% endfor %} {% if opts.add_iam_methods %} 'get_iam_policy': ('resource', 'options', ), 'set_iam_policy': ('resource', 'policy', ), diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 637c4fa97d28..9c97573fc00f 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} + import io import os import setuptools # type: ignore @@ -17,12 +18,12 @@ setuptools.setup( name='{{ api.naming.warehouse_package_name }}', version=version, long_description=readme, - {% if api.naming.namespace -%} + {% if api.naming.namespace %} packages=setuptools.PEP420PackageFinder.find(), namespace_packages={{ api.naming.namespace_packages }}, - {% else -%} + {% else %} packages=setuptools.PEP420PackageFinder.find(), - {% endif -%} + {% endif %} platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f7ae145b74a6..4df54e2d7873 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1,6 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} + import os import mock import packaging.version @@ -11,21 +12,21 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -{%- if 'rest' in opts.transport %} +{% if 'rest' in opts.transport %} from requests import Response from requests.sessions import Session -{%- endif %} +{% endif %} -{# Import the service itself as well as every proto module that it imports. -#} -{% filter sort_lines -%} +{# Import the service itself as well as every proto module that it imports. #} +{% filter sort_lines %} from google import auth from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} -{%- if 'grpc' in opts.transport %} +{% if 'grpc' in opts.transport %} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.async_client_name }} -{%- endif %} +{% endif %} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _GOOGLE_AUTH_VERSION from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _API_CORE_VERSION @@ -33,19 +34,19 @@ from google.api_core import client_options from google.api_core import exceptions from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -{% if service.has_lro -%} +{% if service.has_lro %} from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 -{% endif -%} +{% endif %} from google.api_core import gapic_v1 -{% for method in service.methods.values() -%} +{% for method in service.methods.values() %} {% for ref_type in method.ref_types if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') - or ref_type.ident.python_import.package == ('google', 'protobuf') and ref_type.ident.python_import.module == 'empty_pb2') -%} + or ref_type.ident.python_import.package == ('google', 'protobuf') and ref_type.ident.python_import.module == 'empty_pb2') %} {{ ref_type.ident.python_import }} -{% endfor -%} -{% endfor -%} +{% endfor %} +{% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore from google.iam.v1 import options_pb2 as options # type: ignore @@ -104,9 +105,9 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class", [ {{ service.client_name }}, - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} {{ service.async_client_name }}, - {%- endif %} + {% endif %} ]) def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() @@ -117,14 +118,16 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(client_c assert client.transport._credentials == creds assert isinstance(client, client_class) - {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + {% if service.host %} + assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + {% endif %} @pytest.mark.parametrize("client_class", [ {{ service.client_name }}, - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} {{ service.async_client_name }}, - {%- endif %} + {% endif %} ]) def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -138,15 +141,17 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(client_c assert client.transport._credentials == creds assert isinstance(client, client_class) - {% if service.host %}assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}'{% endif %} + {% if service.host %} + assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + {% endif %} def test_{{ service.client_name|snake_case }}_get_transport_class(): transport = {{ service.client_name }}.get_transport_class() available_transports = [ - {%- for transport_name in opts.transport %} + {% for transport_name in opts.transport %} transports.{{ service.name }}{{ transport_name.capitalize() }}Transport, - {%- endfor %} + {% endfor %} ] assert transport in available_transports @@ -155,17 +160,17 @@ def test_{{ service.client_name|snake_case }}_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), - {%- elif 'rest' in opts.transport %} + {% elif 'rest' in opts.transport %} ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), - {%- endif %} + {% endif %} ]) @mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) -{%- if 'grpc' in opts.transport %} +{% if 'grpc' in opts.transport %} @mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) -{%- endif %} +{% endif %} def test_{{ service.client_name|snake_case }}_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: @@ -262,12 +267,12 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans {% elif 'rest' in opts.transport %} ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "true"), ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "false"), - {%- endif %} + {% endif %} ]) @mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) -{%- if 'grpc' in opts.transport %} +{% if 'grpc' in opts.transport %} @mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) -{%- endif %} +{% endif %} @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -341,12 +346,12 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp @pytest.mark.parametrize("client_class,transport_class,transport_name", [ - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), - {%- elif 'rest' in opts.transport %} + {% elif 'rest' in opts.transport %} ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), - {%- endif %} + {% endif %} ]) def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -367,12 +372,12 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), - {%- elif 'rest' in opts.transport %} + {% elif 'rest' in opts.transport %} ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), - {%- endif %} + {% endif %} ]) def test_{{ service.client_name|snake_case }}_client_options_credentials_file(client_class, transport_class, transport_name): # Check the case credentials file is provided. @@ -391,7 +396,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) -{%- if 'grpc' in opts.transport %} +{% if 'grpc' in opts.transport %} def test_{{ service.client_name|snake_case }}_client_options_from_dict(): @@ -409,10 +414,10 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) -{%- endif %} +{% endif %} -{% for method in service.methods.values() if 'grpc' in opts.transport -%} +{% for method in service.methods.values() if 'grpc' in opts.transport %} def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), @@ -431,25 +436,26 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message')%}{% if not field.oneof or field.proto3_optional %} + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, - {% endif %}{%- endfor %} - {#- This is a hack to only pick one field #} - {%- for oneof_fields in method.output.oneof_fields().values() %} + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} {% with field = oneof_fields[0] %} {{ field.name }}={{ field.mock_value }}, - {%- endwith %} - {%- endfor %} + {% endwith %} + {% endfor %} ) - {% endif -%} + {% endif %} {% if method.client_streaming %} response = client.{{ method.name|snake_case }}(iter(requests)) {% else %} @@ -466,28 +472,29 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m {% endif %} # Establish that the response is the type that we expect. - {% if method.void -%} + {% if method.void %} assert response is None - {% elif method.lro -%} + {% elif method.lro %} assert isinstance(response, future.Future) - {% elif method.server_streaming -%} + {% elif method.server_streaming %} for message in response: assert isinstance(message, {{ method.output.ident }}) - {% else -%} + {% else %} {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} {# Cheeser assertion to force code coverage for bad paginated methods #} assert response.raw_page is response {% endif %} assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not field.oneof or field.proto3_optional %} - {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} + {% for field in method.output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} - {% else -%} + {% else %} assert response.{{ field.name }} == {{ field.mock_value }} - {% endif -%} - {% endif -%} {# end oneof/optional #} + {% endif %} + {% endif %}{# end oneof/optional #} {% endfor %} {% endif %} @@ -496,7 +503,7 @@ def test_{{ method.name|snake_case }}_from_dict(): test_{{ method.name|snake_case }}(request_type=dict) -{% if not method.client_streaming -%} +{% if not method.client_streaming %} def test_{{ method.name|snake_case }}_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -517,7 +524,7 @@ def test_{{ method.name|snake_case }}_empty_call(): {% else %} assert args[0] == {{ method.input.ident }}() {% endif %} -{% endif -%} +{% endif %} @pytest.mark.asyncio @@ -539,31 +546,31 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. - {% if method.void -%} + {% if method.void %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - {% elif method.lro -%} + {% elif method.lro %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - {% elif not method.client_streaming and method.server_streaming -%} + {% elif not method.client_streaming and method.server_streaming %} call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) - {% elif method.client_streaming and method.server_streaming -%} + {% elif method.client_streaming and method.server_streaming %} call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) - {% else -%} - call.return_value ={{' '}} + {% else %} + call.return_value ={{ '' }} {%- if not method.client_streaming and not method.server_streaming -%} grpc_helpers_async.FakeUnaryUnaryCall {%- else -%} grpc_helpers_async.FakeStreamUnaryCall {%- endif -%}({{ method.output.ident }}( - {%- for field in method.output.fields.values() | rejectattr('message') %}{% if not field.oneof or field.proto3_optional %} + {% for field in method.output.fields.values() | rejectattr('message') %}{% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, - {%- endif %} - {%- endfor %} + {% endif %} + {% endfor %} )) - {% endif -%} + {% endif %} {% if method.client_streaming and method.server_streaming %} response = await client.{{ method.name|snake_case }}(iter(requests)) {% elif method.client_streaming and not method.server_streaming %} @@ -582,24 +589,25 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio {% endif %} # Establish that the response is the type that we expect. - {% if method.void -%} + {% if method.void %} assert response is None - {% elif method.lro -%} + {% elif method.lro %} assert isinstance(response, future.Future) - {% elif method.server_streaming -%} + {% elif method.server_streaming %} message = await response.read() assert isinstance(message, {{ method.output.ident }}) - {% else -%} + {% else %} assert isinstance(response, {{ method.client_output_async.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') -%}{% if not field.oneof or field.proto3_optional %} - {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} + {% for field in method.output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} - {% else -%} + {% else %} assert response.{{ field.name }} == {{ field.mock_value }} - {% endif -%} - {% endif -%} {# oneof/optional #} + {% endif %} + {% endif %}{# oneof/optional #} {% endfor %} {% endif %} @@ -619,21 +627,21 @@ def test_{{ method.name|snake_case }}_field_headers(): # a field header. Set these to a non-empty value. request = {{ method.input.ident }}() - {%- for field_header in method.field_headers %} + {% for field_header in method.field_headers %} request.{{ field_header }} = '{{ field_header }}/value' - {%- endfor %} + {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.{{ method.name|snake_case }}), '__call__') as call: - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}() {% endif %} client.{{ method.name|snake_case }}(request) @@ -649,8 +657,8 @@ def test_{{ method.name|snake_case }}_field_headers(): 'x-goog-request-params', '{% for field_header in method.field_headers -%} {{ field_header }}={{ field_header }}/value - {%- if not loop.last %}&{% endif -%} - {%- endfor %}', + {%- if not loop.last %}&{% endif %} + {%- endfor -%}', ) in kw['metadata'] @@ -664,22 +672,22 @@ async def test_{{ method.name|snake_case }}_field_headers_async(): # a field header. Set these to a non-empty value. request = {{ method.input.ident }}() - {%- for field_header in method.field_headers %} + {% for field_header in method.field_headers %} request.{{ field_header }} = '{{ field_header }}/value' - {%- endfor %} + {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.{{ method.name|snake_case }}), '__call__') as call: - {% if method.void -%} + {% if method.void %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - {% elif method.lro -%} + {% elif method.lro %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall({{ method.output.ident }}()) {% endif %} await client.{{ method.name|snake_case }}(request) @@ -695,8 +703,8 @@ async def test_{{ method.name|snake_case }}_field_headers_async(): 'x-goog-request-params', '{% for field_header in method.field_headers -%} {{ field_header }}={{ field_header }}/value - {%- if not loop.last %}&{% endif -%} - {%- endfor %}', + {%- if not loop.last %}&{% endif %} + {%- endfor -%}', ) in kw['metadata'] {% endif %} @@ -710,19 +718,19 @@ def test_{{ method.name|snake_case }}_from_dict_foreign(): type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}() {% endif %} response = client.{{ method.name|snake_case }}(request={ - {%- for field in method.input.fields.values() %} + {% for field in method.input.fields.values() %} '{{ field.name }}': {{ field.mock_value }}, - {%- endfor %} + {% endfor %} } ) call.assert_called() @@ -740,41 +748,41 @@ def test_{{ method.name|snake_case }}_flattened(): type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}() {% endif %} # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.{{ method.name|snake_case }}( - {%- for field in method.flattened_fields.values() %} + {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endfor %} ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp.Timestamp' -%} + {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp.Timestamp' %} assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration.Duration' -%} + {% elif field.ident|string() == 'duration.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% else -%} + {% else %} assert args[0].{{ key }} == {{ field.mock_value }} {% endif %} {% endif %}{% endfor %} - {%- for oneofs in method.flattened_oneof_fields().values() %} - {%- with field = oneofs[-1] %} + {% for oneofs in method.flattened_oneof_fields().values() %} + {% with field = oneofs[-1] %} assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} - {%- endwith %} - {%- endfor %} + {% endwith %} + {% endfor %} @@ -788,9 +796,9 @@ def test_{{ method.name|snake_case }}_flattened_error(): with pytest.raises(ValueError): client.{{ method.name|snake_case }}( {{ method.input.ident }}(), - {%- for field in method.flattened_fields.values() %} + {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endfor %} ) @@ -805,61 +813,61 @@ async def test_{{ method.name|snake_case }}_flattened_async(): type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. - {% if method.void -%} + {% if method.void %} call.return_value = None - {% elif method.lro -%} + {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} call.return_value = {{ method.output.ident }}() {% endif %} - {% if method.void -%} + {% if method.void %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - {% elif method.lro -%} + {% elif method.lro %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - {% elif not method.client_streaming and method.server_streaming -%} + {% elif not method.client_streaming and method.server_streaming %} call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - {% elif method.client_streaming and method.server_streaming -%} + {% elif method.client_streaming and method.server_streaming %} call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - {% else -%} - call.return_value ={{' '}} + {% else %} + call.return_value = {{ '' }} {%- if not method.client_streaming and not method.server_streaming -%} grpc_helpers_async.FakeUnaryUnaryCall {%- else -%} grpc_helpers_async.FakeStreamUnaryCall {%- endif -%}({{ method.output.ident }}()) - {% endif -%} + {% endif %} # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.{{ method.name|snake_case }}( - {%- for field in method.flattened_fields.values() %} + {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endfor %} ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp.Timestamp' -%} + {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp.Timestamp' %} assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration.Duration' -%} + {% elif field.ident|string() == 'duration.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% else -%} + {% else %} assert args[0].{{ key }} == {{ field.mock_value }} {% endif %} {% endif %}{% endfor %} - {%- for oneofs in method.flattened_oneof_fields().values() %} - {%- with field = oneofs[-1] %} + {% for oneofs in method.flattened_oneof_fields().values() %} + {% with field = oneofs[-1] %} assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} - {%- endwith %} - {%- endfor %} + {% endwith %} + {% endfor %} @pytest.mark.asyncio @@ -873,9 +881,9 @@ async def test_{{ method.name|snake_case }}_flattened_error_async(): with pytest.raises(ValueError): await client.{{ method.name|snake_case }}( {{ method.input.ident }}(), - {%- for field in method.flattened_fields.values() %} + {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endfor %} ) {% endif %} @@ -920,17 +928,17 @@ def test_{{ method.name|snake_case }}_pager(): ) metadata = () - {% if method.field_headers -%} + {% if method.field_headers %} metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - {%- for field_header in method.field_headers %} - {%- if not method.client_streaming %} + {% for field_header in method.field_headers %} + {% if not method.client_streaming %} ('{{ field_header }}', ''), - {%- endif %} - {%- endfor %} + {% endif %} + {% endfor %} )), ) - {% endif -%} + {% endif %} pager = client.{{ method.name|snake_case }}(request={}) assert pager._metadata == metadata @@ -1076,11 +1084,11 @@ async def test_{{ method.name|snake_case }}_async_pages(): def test_{{ method.name|snake_case }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() assert response.raw_page is response -{% endif %} {#- method.paged_result_field #} +{% endif %} {# method.paged_result_field #} -{% endfor -%} {#- method in methods for grpc #} +{% endfor %} {# method in methods for grpc #} -{% for method in service.methods.values() if 'rest' in opts.transport -%} +{% for method in service.methods.values() if 'rest' in opts.transport %} def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), @@ -1097,19 +1105,19 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # Designate an appropriate value for the returned response. - {% if method.void -%} + {% if method.void %} return_value = None - {% elif method.lro -%} + {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} return_value = {{ method.output.ident }}( - {%- for field in method.output.fields.values() %} + {% for field in method.output.fields.values() %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endfor %} ) - {% endif -%} + {% endif %} # Wrap the value into a proper Response obj json_return_value = {{ method.output.ident }}.to_json(return_value) @@ -1129,18 +1137,18 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type {% endif %} # Establish that the response is the type that we expect. - {% if method.void -%} + {% if method.void %} assert response is None {% else %} assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() -%} - {% if field.field_pb.type in [1, 2] -%} {# Use approx eq for floats -#} + {% for field in method.output.fields.values() %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% elif field.field_pb.type == 8 -%} {# Use 'is' for bools #} + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} - {% else -%} + {% else %} assert response.{{ field.name }} == {{ field.mock_value }} - {% endif -%} + {% endif %} {% endfor %} {% endif %} @@ -1157,13 +1165,13 @@ def test_{{ method.name|snake_case }}_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # Designate an appropriate value for the returned response. - {% if method.void -%} + {% if method.void %} return_value = None - {% elif method.lro -%} + {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming -%} + {% elif method.server_streaming %} return_value = iter([{{ method.output.ident }}()]) - {% else -%} + {% else %} return_value = {{ method.output.ident }}() {% endif %} @@ -1176,13 +1184,13 @@ def test_{{ method.name|snake_case }}_rest_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - {%- for field in method.flattened_fields.values() if field.field_pb is msg_field_pb %} + {% for field in method.flattened_fields.values() if field.field_pb is msg_field_pb %} {{ field.name }} = {{ field.mock_value }} {% endfor %} client.{{ method.name|snake_case }}( - {%- for field in method.flattened_fields.values() %} + {% for field in method.flattened_fields.values() %} {% if field.field_pb is msg_field_pb %}{{ field.name }}={{ field.name }},{% else %}{{ field.name }}={{ field.mock_value }},{% endif %} - {%- endfor %} + {% endfor %} ) # Establish that the underlying call was made with the expected @@ -1190,16 +1198,16 @@ def test_{{ method.name|snake_case }}_rest_flattened(): assert len(req.mock_calls) == 1 _, http_call, http_params = req.mock_calls[0] body = http_params.get('data') - {% for key, field in method.flattened_fields.items() -%}{%- if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp.Timestamp' -%} + {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp.Timestamp' %} assert TimestampRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration.Duration' -%} + {% elif field.ident|string() == 'duration.Duration' %} assert DurationRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} - {% else -%} + {% else %} assert {% if field.field_pb is msg_field_pb %}{{ field.ident }}.to_json({{ field.name }}, including_default_value_fields=False, use_integers_for_enums=False) - {%- elif field.field_pb is str_field_pb %}{{ field.mock_value }} - {%- else %}str({{ field.mock_value }}) - {%- endif %} in http_call[1] + str(body) + {% elif field.field_pb is str_field_pb %}{{ field.mock_value }} + {% else %}str({{ field.mock_value }}) + {% endif %} in http_call[1] + str(body) {% endif %} {% endif %}{% endfor %} @@ -1214,9 +1222,9 @@ def test_{{ method.name|snake_case }}_rest_flattened_error(): with pytest.raises(ValueError): client.{{ method.name|snake_case }}( {{ method.input.ident }}(), - {%- for field in method.flattened_fields.values() %} + {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, - {%- endfor %} + {% endfor %} ) @@ -1296,17 +1304,17 @@ def test_{{ method.name|snake_case }}_pager(): req.side_effect = return_values metadata = () - {% if method.field_headers -%} + {% if method.field_headers %} metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - {%- for field_header in method.field_headers %} - {%- if not method.client_streaming %} + {% for field_header in method.field_headers %} + {% if not method.client_streaming %} ('{{ field_header }}', ''), - {%- endif %} - {%- endfor %} + {% endif %} + {% endfor %} )), ) - {% endif -%} + {% endif %} pager = client.{{ method.name|snake_case }}(request={}) assert pager._metadata == metadata @@ -1338,8 +1346,8 @@ def test_{{ method.name|snake_case }}_pager(): assert page_.raw_page.next_page_token == token -{% endif %} {# paged methods #} -{% endfor -%} {#- method in methods for rest #} +{% endif %}{# paged methods #} +{% endfor %}{# method in methods for rest #} def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( @@ -1397,12 +1405,12 @@ def test_transport_get_channel(): {% endif %} @pytest.mark.parametrize("transport_class", [ - {%- if 'grpc' in opts.transport %} + {% if 'grpc' in opts.transport %} transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}, - {%- elif 'rest' in opts.transport %} + {% elif 'rest' in opts.transport %} transports.{{ service.rest_transport_name }}, - {%- endif %} + {% endif %} ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -1443,20 +1451,20 @@ def test_{{ service.name|snake_case }}_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - {% for method in service.methods.values() -%} + {% for method in service.methods.values() %} '{{ method.name|snake_case }}', - {% endfor -%} - {%- if opts.add_iam_methods -%} + {% endfor %} + {% if opts.add_iam_methods %} 'set_iam_policy', 'get_iam_policy', 'test_iam_permissions', - {% endif -%} + {% endif %} ) for method in methods: with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) - {% if service.has_lro -%} + {% if service.has_lro %} # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): @@ -1495,11 +1503,10 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file_old_ credentials_file="credentials.json", quota_project_id="octopus", ) - load_creds.assert_called_once_with("credentials.json", - scopes=( - {%- for scope in service.oauth_scopes %} + load_creds.assert_called_once_with("credentials.json", scopes=( + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} ), quota_project_id="octopus", ) @@ -1523,9 +1530,10 @@ def test_{{ service.name|snake_case }}_auth_adc(): adc.assert_called_once_with( scopes=None, default_scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %}), + {% for scope in service.oauth_scopes %} + '{{ scope }}', + {% endfor %}), + quota_project_id=None, ) @@ -1584,11 +1592,10 @@ def test_{{ service.name|snake_case }}_transport_auth_adc_old_google_auth(transp with mock.patch.object(auth, "default", autospec=True) as adc: adc.return_value = (credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %}), + adc.assert_called_once_with(scopes=( + {% for scope in service.oauth_scopes %} + '{{ scope }}', + {% endfor %}), quota_project_id="octopus", ) @@ -1614,7 +1621,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, scopes=["1", "2"] ) - {% with host = (service.host|default('localhost', true)) -%} + {% with host = (service.host|default('localhost', true)) %} create_channel.assert_called_with( "{{ host }}", credentials=creds, @@ -1653,7 +1660,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel_old_api_core(tra adc.return_value = (creds, None) transport_class(quota_project_id="octopus") - {% with host = (service.host|default('localhost', true)) -%} + {% with host = (service.host|default('localhost', true)) %} create_channel.assert_called_with( "{{ host }}", credentials=creds, @@ -1688,7 +1695,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel_user_scopes(tran ) as create_channel: creds = credentials.AnonymousCredentials() adc.return_value = (creds, None) - {% with host = (service.host|default('localhost', true)) -%} + {% with host = (service.host|default('localhost', true)) %} transport_class(quota_project_id="octopus", scopes=["1", "2"]) @@ -1728,9 +1735,9 @@ def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtl credentials=cred, credentials_file=None, scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} ), ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, @@ -1767,7 +1774,7 @@ def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtl {% endif %} def test_{{ service.name|snake_case }}_host_no_port(): - {% with host = (service.host|default('localhost', true)).split(':')[0] -%} + {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), @@ -1777,7 +1784,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): def test_{{ service.name|snake_case }}_host_with_port(): - {% with host = (service.host|default('localhost', true)).split(':')[0] -%} + {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), @@ -1845,9 +1852,9 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s credentials=cred, credentials_file=None, scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} ), ssl_credentials=mock_ssl_cred, quota_project_id=None, @@ -1890,9 +1897,9 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( credentials=mock_cred, credentials_file=None, scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} + {% endfor %} ), ssl_credentials=mock_ssl_cred, quota_project_id=None, @@ -1904,7 +1911,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( assert transport.grpc_channel == mock_grpc_channel -{% if service.has_lro -%} +{% if service.has_lro %} def test_{{ service.name|snake_case }}_grpc_lro_client(): client = {{ service.client_name }}( credentials=credentials.AnonymousCredentials(), @@ -1938,13 +1945,13 @@ def test_{{ service.name|snake_case }}_grpc_lro_async_client(): # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client -{% endif -%} +{% endif %} {% endif %} {# if grpc in opts #} -{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") -%} -{% for message in service.resource_messages|sort(attribute="resource_type") -%} +{% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") %} +{% for message in service.resource_messages|sort(attribute="resource_type") %} def test_{{ message.resource_type|snake_case }}_path(): - {% for arg in message.resource_path_args -%} + {% for arg in message.resource_path_args %} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @@ -1954,7 +1961,7 @@ def test_{{ message.resource_type|snake_case }}_path(): def test_parse_{{ message.resource_type|snake_case }}_path(): expected = { - {% for arg in message.resource_path_args -%} + {% for arg in message.resource_path_args %} "{{ arg }}": "{{ molluscs.next() }}", {% endfor %} } @@ -1964,10 +1971,10 @@ def test_parse_{{ message.resource_type|snake_case }}_path(): actual = {{ service.client_name }}.parse_{{ message.resource_type|snake_case }}_path(path) assert expected == actual -{% endfor -%} -{% for resource_msg in service.common_resources.values()|sort(attribute="type_name") -%} +{% endfor %} +{% for resource_msg in service.common_resources.values()|sort(attribute="type_name") %} def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): - {% for arg in resource_msg.message_type.resource_path_args -%} + {% for arg in resource_msg.message_type.resource_path_args %} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} expected = "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @@ -1977,7 +1984,7 @@ def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): def test_parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): expected = { - {% for arg in resource_msg.message_type.resource_path_args -%} + {% for arg in resource_msg.message_type.resource_path_args %} "{{ arg }}": "{{ molluscs.next() }}", {% endfor %} } @@ -1987,8 +1994,8 @@ def test_parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_p actual = {{ service.client_name }}.parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(path) assert expected == actual -{% endfor -%} {# common resources#} -{% endwith -%} {# cycler #} +{% endfor %} {# common resources#} +{% endwith %} {# cycler #} def test_client_withDEFAULT_CLIENT_INFO(): diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index b652f1359ed9..0f1d98de7461 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -153,11 +153,10 @@ def sample_classify(video, location): # location = "New Zealand" classify_target["location_annotation"] = location - - + response = client.classify(classify_target=classify_target) print("Mollusc is a \\"{}\\"".format(response.taxonomy)) - + # [END %s] @@ -293,11 +292,10 @@ def sample_classify(video, location): request = { 'classify_target': classify_target, } - - + response = client.classify(request=request) print("Mollusc is a \\"{}\\"".format(response.taxonomy)) - + # [END %s] From ba98b827ad7614ac9dc2590e04d4972739095458 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 23 Apr 2021 13:20:24 -0600 Subject: [PATCH 0517/1339] fix: enable GAPIC metadata generation (#843) --- .../templates/%namespace/%name_%version/gapic_metadata.json.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_metadata.json.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_metadata.json.j2 index edd79fda8aa0..99ae71e021b3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_metadata.json.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_metadata.json.j2 @@ -1 +1 @@ -{# {{ api.gapic_metadata_json(opts) }} #} {# TODO(dovs): This is temporarily commented out pending the addition of a flag #} + {{ api.gapic_metadata_json(opts) }} From 450cffa5f58554ae3152b1f80714dd49dfa400db Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 23 Apr 2021 19:32:03 +0000 Subject: [PATCH 0518/1339] chore: release 0.44.0 (#842) :robot: I have created a release \*beep\* \*boop\* --- ## [0.44.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.3...v0.44.0) (2021-04-23) ### Features * support self-signed JWT flow for service accounts ([#774](https://www.github.com/googleapis/gapic-generator-python/issues/774)) ([89d6f35](https://www.github.com/googleapis/gapic-generator-python/commit/89d6f35c54b0a9b81c9b5f580d2e9eb87352ed93)) ### Bug Fixes * enable GAPIC metadata generation ([#843](https://www.github.com/googleapis/gapic-generator-python/issues/843)) ([697816c](https://www.github.com/googleapis/gapic-generator-python/commit/697816ce7d5b201d6ced85fadd89f9140da67b37)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 0fd8cc191a4a..b010547a463f 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.44.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.3...v0.44.0) (2021-04-23) + + +### Features + +* support self-signed JWT flow for service accounts ([#774](https://www.github.com/googleapis/gapic-generator-python/issues/774)) ([89d6f35](https://www.github.com/googleapis/gapic-generator-python/commit/89d6f35c54b0a9b81c9b5f580d2e9eb87352ed93)) + + +### Bug Fixes + +* enable GAPIC metadata generation ([#843](https://www.github.com/googleapis/gapic-generator-python/issues/843)) ([697816c](https://www.github.com/googleapis/gapic-generator-python/commit/697816ce7d5b201d6ced85fadd89f9140da67b37)) + ### [0.43.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.2...v0.43.3) (2021-04-12) From 856ae2cfad73f5f3219a69a24d1ee0b8f3fadab6 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Fri, 23 Apr 2021 17:46:54 -0400 Subject: [PATCH 0519/1339] chore(revert): revert preventing normalization (#844) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index eea88a26ded6..69bb01d55f31 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -27,7 +27,7 @@ setup( name="gapic-generator", - version=setuptools.sic(version), + version=version, license="Apache 2.0", author="Dov Shlachter", author_email="dovs@google.com", From dcb0d7e33714369d5c4e8fbf057ac237413be788 Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Tue, 27 Apr 2021 09:46:53 -0700 Subject: [PATCH 0520/1339] chore: add SECURITY.md (#847) Co-authored-by: google-cloud-policy-bot[bot] <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> --- packages/gapic-generator/SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 packages/gapic-generator/SECURITY.md diff --git a/packages/gapic-generator/SECURITY.md b/packages/gapic-generator/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/gapic-generator/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From c23077cba56db98bb4babc28870605b5081785de Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 27 Apr 2021 16:52:10 -0700 Subject: [PATCH 0521/1339] cleanup: sort output in generated __init__.py files better (#846) --- .../%name/%version/%sub/__init__.py.j2 | 10 ++++----- .../%namespace/%name/%version/__init__.py.j2 | 8 ++----- .../%namespace/%name/__init__.py.j2 | 6 +---- .../gapic-generator/gapic/schema/metadata.py | 6 ++--- .../templates/%namespace/%name/__init__.py.j2 | 6 +---- .../%name_%version/%sub/__init__.py.j2 | 22 +++++++------------ 6 files changed, 19 insertions(+), 39 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 index 1e1ddd49f0a4..e6a09c63fbc0 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 @@ -8,23 +8,21 @@ them again. -#} __all__ = ( - {% filter sort_lines %} - {% for subpackage in api.subpackages.keys() %} + {% for subpackage in api.subpackages|dictsort %} '{{ subpackage }}', {% endfor %} - {% for service in api.services.values() + {% for service in api.services.values()|sort(attribute='client_name') if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name }}', {% endfor %} - {% for proto in api.protos.values() + {% for proto in api.protos.values()|sort(attribute='name') if proto.meta.address.subpackage == api.subpackage_view %} {% for message in proto.messages.values() %} '{{ message.name }}', {% endfor %} - {% for enum in proto.enums.values() %} + {% for enum in proto.enums.values()|sort(attribute='name') %} '{{ enum.name }}', {% endfor %} {% endfor %} - {% endfilter %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 index 933e55dc3451..1a7dd2732abc 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -50,8 +50,7 @@ def __dir__(): return globals().get('__all__') or __getattr__('__all__') {% else %} {# do not use lazy import #} {# Import subpackages. -#} -{% filter sort_lines %} -{% for subpackage in api.subpackages.keys() %} +{% for subpackage in api.subpackages|dictsort %} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{ api.naming.versioned_module_name }} import {{ subpackage }} {% endfor %} @@ -83,15 +82,13 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} {% endfor %}{% endfor %} -{% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. -#} __all__ = ( {% filter indent %} -{% filter sort_lines %} -{% for subpackage in api.subpackages.keys() %} +{% for subpackage in api.subpackages|dictsort %} '{{ subpackage }}', {% endfor %} {% for service in api.services.values()|sort(attribute='name') @@ -108,7 +105,6 @@ __all__ = ( '{{ enum.name }}', {% endfor %}{% endfor %} {% endfilter %} -{% endfilter %} ) {% endif %} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 index 8ccc6a5747d1..319b6073e010 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 @@ -48,8 +48,7 @@ def __dir__(): return globals().get('__all__') or __getattr__('__all__') {% else %} {# do not use lazy import #} {# Import subpackages. -#} -{% filter sort_lines %} -{% for subpackage in api.subpackages.keys() %} +{% for subpackage in api.subpackages|dictsort %} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{- api.naming.versioned_module_name }} import {{ subpackage }} {% endfor %} @@ -81,14 +80,12 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{- api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} {% endfor %}{% endfor %} -{% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. #} __all__ = ( {% filter indent %} -{% filter sort_lines %} {% for subpackage, _ in api.subpackages|dictsort %} '{{ subpackage }}', {% endfor %} @@ -106,7 +103,6 @@ __all__ = ( '{{ enum.name }}', {% endfor %}{% endfor %} {% endfilter %} -{% endfilter %} ) {% endif %} {# lazy import #} {% endblock %} diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 0b1f6df2d50a..b1ae02228ca1 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -50,8 +50,8 @@ class Address: collisions: FrozenSet[str] = dataclasses.field(default_factory=frozenset) def __eq__(self, other) -> bool: - return all([getattr(self, i) == getattr(other, i) for i - in ('name', 'module', 'module_path', 'package', 'parent')]) + return all(getattr(self, i) == getattr(other, i) + for i in ('name', 'module', 'module_path', 'package', 'parent')) def __hash__(self): # Do NOT include collisions; they are not relevant. @@ -188,7 +188,7 @@ def sphinx(self) -> str: if self.proto_package.startswith(self.api_naming.proto_package): return '.'.join(self.api_naming.module_namespace + ( self.api_naming.versioned_module_name, - ) + self.subpackage + ('types',) + self.parent + (self.name, )) + ) + self.subpackage + ('types',) + self.parent + (self.name, )) # Anything left is a standard _pb2 type return f'{self.proto_package}.{self.module}_pb2.{self.name}' diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index f706229c89a7..73b871932ade 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -2,8 +2,7 @@ {% block content %} {# Import subpackages. -#} -{% filter sort_lines %} -{% for subpackage in api.subpackages.keys() %} +{% for subpackage in api.subpackages|dictsort %} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{- api.naming.versioned_module_name }} import {{ subpackage }} {% endfor %} @@ -39,7 +38,6 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} {{- api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} {% endfor %}{% endfor %} -{% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. @@ -47,7 +45,6 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' __all__ = ( {%- filter indent %} -{% filter sort_lines %} {% for subpackage, _ in api.subpackages|dictsort %} '{{ subpackage }}', {% endfor %} @@ -68,6 +65,5 @@ __all__ = ( '{{ enum.name }}', {% endfor %}{% endfor %} {% endfilter %} -{% endfilter %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 9cbfff88b209..be52e37f29b8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -8,52 +8,46 @@ from . import {{ subpackage }} {% endfor %} {# Import services for this package. -#} -{% filter sort_lines %} {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} from .services.{{ service.name|snake_case }} import {{ service.client_name }} {% endfor %} -{% endfilter %} {# Import messages and enums from each proto. It is safe to import all of the messages into the same namespace here, because protocol buffers itself enforces selector uniqueness within a proto package. -#} -{% filter sort_lines %} -{% for proto in api.protos.values() +{% for proto in api.protos.values()|sort(attribute='name') if proto.meta.address.subpackage == api.subpackage_view %} -{% for message in proto.messages.values() %} +{% for message in proto.messages.values()|sort(attribute='name') %} from .types.{{ proto.module_name }} import {{ message.name }} {% endfor %} -{% for enum in proto.enums.values() %} +{% for enum in proto.enums.values()|sort(attribute='name') %} from .types.{{ proto.module_name }} import {{ enum.name }} {% endfor %} {% endfor %} -{% endfilter %} {# Define __all__. This requires the full set of imported names, so we iterate over them again. -#} __all__ = ( - {% filter sort_lines %} - {% for subpackage in api.subpackages.keys() %} + {% for subpackage in api.subpackages|dictsort %} '{{ subpackage }}', {% endfor %} - {% for service in api.services.values() + {% for service in api.services.values()|sort(attribute='client_name') if service.meta.address.subpackage == api.subpackage_view %} '{{ service.client_name }}', {% endfor %} - {% for proto in api.protos.values() + {% for proto in api.protos.values()|sort(attribute='name') if proto.meta.address.subpackage == api.subpackage_view %} - {% for message in proto.messages.values() %} + {% for message in proto.messages.values()|sort(attribute='name') %} '{{ message.name }}', {% endfor %} - {% for enum in proto.enums.values() %} + {% for enum in proto.enums.values()|sort(attribute='name') %} '{{ enum.name }}', {% endfor %} {% endfor %} - {% endfilter %} ) {% endblock %} From cb4bf2019c00199c06ec5f1d435dfa9203106394 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 28 Apr 2021 08:54:03 -0600 Subject: [PATCH 0522/1339] fix: fix syntax errors and failing unit tests (#849) Fixes #848 --- .../%version/%sub/services/%service/client.py.j2 | 6 +++--- .../%sub/services/%service/transports/grpc.py.j2 | 8 ++++---- .../%sub/services/%service/async_client.py.j2 | 4 ++-- .../%sub/services/%service/client.py.j2 | 8 ++++---- .../%sub/services/%service/transports/grpc.py.j2 | 8 ++++---- .../%service/transports/grpc_asyncio.py.j2 | 8 ++++---- .../%sub/services/%service/transports/rest.py.j2 | 4 ++-- .../%name_%version/%sub/types/_message.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 16 +++++++--------- 9 files changed, 31 insertions(+), 33 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index aba4ca28dcc7..9e48007d26a4 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -305,7 +305,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if not method.client_streaming %} request (:class:`{{ method.input.ident.sphinx }}`): The request object.{{ ' ' }} - {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} {{ field.name }} (:class:`{{ field.ident.sphinx }}`): {{ field.meta.doc|rst(width=72, indent=16) }} @@ -316,7 +316,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% else %} requests (Iterator[`{{ method.input.ident.sphinx }}`]): The request object iterator.{{ ' ' }} - {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -344,7 +344,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): 'the individual field arguments should be set.') {% endif %} {# method.flattened_fields #} - {% if method.input.ident.package != method.ident.package %} {# request lives in a different package, so there is no proto wrapper #} + {% if method.input.ident.package != method.ident.package %}{# request lives in a different package, so there is no proto wrapper #} # The request isn't a proto-plus wrapped type. # so it must be constructed via keyword expansion. if isinstance(request, dict): diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index a167ae640e74..381d003569ee 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -217,10 +217,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], {{ method.output.ident }}]: - r"""Return a callable for the {{ ' ' }} - {{ (method.name|snake_case).replace('_',' ')|wrap( - width=70, offset=40, indent=8) }} - {{ ' ' }} method over gRPC. + r"""Return a callable for the{{ ' ' }} + {{- (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=40, indent=8) -}} + {{ ' ' }}method over gRPC. {{ method.meta.doc|rst(width=72, indent=8) }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 78a2a4ce8dfc..ec0eee683184 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -166,7 +166,7 @@ class {{ service.async_client_name }}: {% if not method.client_streaming %} request (:class:`{{ method.input.ident.sphinx }}`): The request object.{{ ' ' }} - {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} {{ field.name }} (:class:`{{ field.ident.sphinx }}`): {{ field.meta.doc|rst(width=72, indent=16) }} @@ -177,7 +177,7 @@ class {{ service.async_client_name }}: {% else %} requests (AsyncIterator[`{{ method.input.ident.sphinx }}`]): The request object AsyncIterator.{{ ' ' }} - {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index ae69e60a50d0..36b651309b02 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -320,7 +320,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if not method.client_streaming %} request ({{ method.input.ident.sphinx }}): The request object.{{ ' ' }} - {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(width=72, indent=16) }} @@ -331,7 +331,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% else %} requests (Iterator[{{ method.input.ident.sphinx }}]): The request object iterator.{{ ' ' }} - {{ method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -360,7 +360,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): 'the individual field arguments should be set.') {% endif %} - {% if method.input.ident.package != method.ident.package %} {# request lives in a different package, so there is no proto wrapper #} + {% if method.input.ident.package != method.ident.package %}{# request lives in a different package, so there is no proto wrapper #} if isinstance(request, dict): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. @@ -375,7 +375,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # there are no flattened fields. if not isinstance(request, {{ method.input.ident }}): request = {{ method.input.ident }}(request) - {% endif %} {# different request package #} + {% endif %}{# different request package #} {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} {% if method.flattened_fields and method.input.ident.package == method.ident.package %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index a0bb3de73a19..6f0866a988db 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -244,10 +244,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], {{ method.output.ident }}]: - r"""Return a callable for the {{ ' ' }} - {{ (method.name|snake_case).replace('_',' ')|wrap( - width=70, offset=40, indent=8) }} - {{ ' ' }} method over gRPC. + r"""Return a callable for the{{ ' ' }} + {{- (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=40, indent=8) -}} + {{ ' ' }}method over gRPC. {{ method.meta.doc|rst(width=72, indent=8) }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 06c28cbb25e8..a518ec94b2ef 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -248,10 +248,10 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], Awaitable[{{ method.output.ident }}]]: - r"""Return a callable for the {{ ' ' }} - {{ (method.name|snake_case).replace('_',' ')|wrap( - width=70, offset=40, indent=8) }} - {{ ' ' }} method over gRPC. + r"""Return a callable for the{{ ' ' }} + {{- (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=40, indent=8) -}} + {{ ' ' }}method over gRPC. {{ method.meta.doc|rst(width=72, indent=8) }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 5614a6b4a89f..afe9985600ff 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -141,8 +141,8 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): Args: request (~.{{ method.input.ident }}): - The request object. - {{ method.input.meta.doc|rst(width=72, indent=16) }} + The request object.{{ ' ' }} + {{- method.input.meta.doc|rst(width=72, indent=16) }} metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. {% if not method.void %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 73ee46cf2487..890b2cd80dcd 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -37,7 +37,7 @@ class {{ message.name }}({{ p }}.Message): {{ field.name }} = {{ p }}.MapField( {{ p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, - number={{ field.number }} + number={{ field.number }}, {% if value_field.enum or value_field.message %} {{ value_field.proto_type.lower() }}={{ value_field.type.ident.rel(message.ident) }}, {% endif %}{# enum or message#} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4df54e2d7873..8c2042f9bcd8 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1485,10 +1485,9 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): load_creds.assert_called_once_with("credentials.json", scopes=None, default_scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %} - ), + {% endfor %}), quota_project_id="octopus", ) @@ -1533,7 +1532,6 @@ def test_{{ service.name|snake_case }}_auth_adc(): {% for scope in service.oauth_scopes %} '{{ scope }}', {% endfor %}), - quota_project_id=None, ) @@ -1623,14 +1621,14 @@ def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, {% with host = (service.host|default('localhost', true)) %} create_channel.assert_called_with( - "{{ host }}", + "{{ host }}{% if ":" not in service.host %}:443{% endif %}", credentials=creds, credentials_file=None, quota_project_id="octopus", default_scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %}), + {% endfor %}), scopes=["1", "2"], default_host="{{ host }}", ssl_credentials=None, @@ -1667,9 +1665,9 @@ def test_{{ service.name|snake_case }}_transport_create_channel_old_api_core(tra credentials_file=None, quota_project_id="octopus", scopes=( - {%- for scope in service.oauth_scopes %} + {% for scope in service.oauth_scopes %} '{{ scope }}', - {%- endfor %}), + {% endfor %}), ssl_credentials=None, options=[ ("grpc.max_send_message_length", -1), From ba1f07a2db7d1f1e17b1ba9758f5f1c96aba6f0d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 28 Apr 2021 08:57:07 -0600 Subject: [PATCH 0523/1339] chore: release 0.44.1 (#850) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b010547a463f..aba41e82d66b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.44.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.0...v0.44.1) (2021-04-28) + + +### Bug Fixes + +* fix syntax errors and failing unit tests ([#849](https://www.github.com/googleapis/gapic-generator-python/issues/849)) ([9046261](https://www.github.com/googleapis/gapic-generator-python/commit/90462617e3e2b90eb8684210b6a70e890bdc0d96)), closes [#848](https://www.github.com/googleapis/gapic-generator-python/issues/848) + ## [0.44.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.3...v0.44.0) (2021-04-23) From 274c8bd36e1262347a4029c45bbae7a5dc174186 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 30 Apr 2021 14:12:55 -0600 Subject: [PATCH 0524/1339] fix: remove auth, policy, and options from the reserved names list (#851) Fixes #835. Breakdown by name that was originally added in #824 - `auth`: `from google import auth` -> `import google.auth` - `credentials`: `from google.auth import credentials` -> `from google.auth import credentials as ga_credentials` - `exceptions`: `from google.api_core import exceptions` -> `from google.api_core import exceptions as core_exceptions` - `future`: skipped, as it is only used in the [generated tests](https://github.com/googleapis/gapic-generator-python/search?q=%22import+future%22) and has a low chance of colliding - `options` `from google.iam.v1 import options_pb2 as options` -> `from google.iam.v1 import options_pb2` - `policy` `from google.iam.v1 import policy_pb2 as policy` -> `from google.iam.v1 import policy_pb2` - `math` skipped as it is only used in [generated tests](https://github.com/googleapis/gapic-generator-python/search?q=%22import+math%22) For `options` and `policy` there is a small change to `gapic/schema/metadata.py` to not alias `_pb2` types --- .../%sub/services/%service/client.py.j2 | 6 +- .../services/%service/transports/base.py.j2 | 8 +- .../services/%service/transports/grpc.py.j2 | 12 +- .../%name_%version/%sub/test_%service.py.j2 | 66 ++--- .../gapic-generator/gapic/schema/metadata.py | 10 +- .../%sub/services/%service/async_client.py.j2 | 40 +-- .../%sub/services/%service/client.py.j2 | 40 +-- .../services/%service/transports/base.py.j2 | 36 +-- .../services/%service/transports/grpc.py.j2 | 30 +- .../%service/transports/grpc_asyncio.py.j2 | 31 +-- .../services/%service/transports/rest.py.j2 | 9 +- .../%name_%version/%sub/test_%service.py.j2 | 256 +++++++++--------- .../gapic/utils/reserved_names.py | 6 - .../tests/unit/schema/test_metadata.py | 6 + 14 files changed, 280 insertions(+), 276 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 9e48007d26a4..3e0221671207 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -10,10 +10,10 @@ from typing import Callable, Dict, Optional, {% if service.any_server_streaming import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -174,7 +174,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %} {# common resources #} def __init__(self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, {{ service.name }}Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index a6a0458a332f..d630c8f02336 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -6,13 +6,13 @@ import abc import typing import pkg_resources -from google import auth +import google.auth # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore {% if service.has_lro %} from google.api_core import operations_v1 # type: ignore {% endif %} -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} @@ -43,7 +43,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): def __init__( self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -70,7 +70,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): # If no credentials are provided, then determine the appropriate # defaults. if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES) # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 381d003569ee..af6b11ad89e3 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -10,8 +10,8 @@ from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore {% endif %} from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -39,7 +39,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): """ def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -105,7 +105,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -135,7 +135,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): host = host if ":" in host else host + ":443" if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES) # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( @@ -162,7 +162,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, scopes: Optional[Sequence[str]] = None, **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e507bb573afc..159552914d22 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -12,8 +12,8 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule {# Import the service itself as well as every proto module that it imports. -#} {% filter sort_lines %} -from google import auth -from google.auth import credentials +import google.auth +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} @@ -63,7 +63,7 @@ def test__get_default_mtls_endpoint(): def test_{{ service.client_name|snake_case }}_from_service_account_info(): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -76,7 +76,7 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(): def test_{{ service.client_name|snake_case }}_from_service_account_file(): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = {{ service.client_name }}.from_service_account_file("dummy/file/path.json") @@ -103,7 +103,7 @@ def test_{{ service.client_name|snake_case }}_client_options(): # Check that if channel is provided we won't create a new one. with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: transport = transports.{{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials() ) client = {{ service.client_name }}(transport=transport) gtc.assert_not_called() @@ -254,7 +254,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): {% for method in service.methods.values() %} def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -340,7 +340,7 @@ def test_{{ method.name|snake_case }}_from_dict(): {% if method.field_headers and not method.client_streaming %} def test_{{ method.name|snake_case }}_field_headers(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -385,7 +385,7 @@ def test_{{ method.name|snake_case }}_field_headers(): {% if method.ident.package != method.input.ident.package %} def test_{{ method.name|snake_case }}_from_dict(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -414,7 +414,7 @@ def test_{{ method.name|snake_case }}_from_dict(): {% if method.flattened_fields %} def test_{{ method.name|snake_case }}_flattened(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -461,7 +461,7 @@ def test_{{ method.name|snake_case }}_flattened(): def test_{{ method.name|snake_case }}_flattened_error(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -479,7 +479,7 @@ def test_{{ method.name|snake_case }}_flattened_error(): {% if method.paged_result_field %} def test_{{ method.name|snake_case }}_pager(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -538,7 +538,7 @@ def test_{{ method.name|snake_case }}_pager(): def test_{{ method.name|snake_case }}_pages(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -587,11 +587,11 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -599,7 +599,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = {{ service.client_name }}(transport=transport) assert client.transport is transport @@ -608,7 +608,7 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -617,7 +617,7 @@ def test_transport_get_channel(): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -629,8 +629,8 @@ def test_transport_grpc_default(): ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, 'default') as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -640,7 +640,7 @@ def test_{{ service.name|snake_case }}_base_transport(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as Transport: Transport.return_value = None transport = transports.{{ service.name }}Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -664,17 +664,17 @@ def test_{{ service.name|snake_case }}_base_transport(): def test_{{ service.name|snake_case }}_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, 'default') as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, 'default') as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport() adc.assert_called_once() def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, 'default') as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) {{ service.client_name }}() adc.assert_called_once_with(scopes=( {% for scope in service.oauth_scopes %} @@ -686,8 +686,8 @@ def test_{{ service.name|snake_case }}_auth_adc(): def test_{{ service.name|snake_case }}_transport_auth_adc(): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, 'default') as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transports.{{ service.name }}GrpcTransport(host="squid.clam.whelk") adc.assert_called_once_with(scopes=( {% for scope in service.oauth_scopes %} @@ -699,7 +699,7 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(): def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), ) assert client.transport._host == '{{ host }}:443' @@ -709,7 +709,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): def test_{{ service.name|snake_case }}_host_with_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), ) assert client.transport._host == '{{ host }}:8000' @@ -741,9 +741,9 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, 'default') as adc: + with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -820,7 +820,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( {% if service.has_lro %} def test_{{ service.name|snake_case }}_grpc_lro_client(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) transport = client.transport @@ -890,7 +890,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -898,7 +898,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: transport_class = {{ service.client_name }}.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index b1ae02228ca1..b63921762fa2 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -74,9 +74,16 @@ def __str__(self) -> str: """ # Most (but not all) types are in a module. if self.module: + module_name = self.module + + # This module is from a different proto package + # Most commonly happens for a common proto + # https://pypi.org/project/googleapis-common-protos/ + if not self.proto_package.startswith(self.api_naming.proto_package): + module_name = f'{self.module}_pb2' + # If collisions are registered and conflict with our module, # use the module alias instead. - module_name = self.module if self.module_alias: module_name = self.module_alias @@ -170,7 +177,6 @@ def python_import(self) -> imp.Import: return imp.Import( package=self.package, module=f'{self.module}_pb2', - alias=self.module_alias if self.module_alias else self.module, ) @property diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index ec0eee683184..47ccc596a89c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -9,10 +9,10 @@ from typing import Dict, {% if service.any_server_streaming %}AsyncIterable, Awa import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore {% filter sort_lines %} @@ -22,8 +22,8 @@ from google.oauth2 import service_account # type: ignore {% endfor %} {% endfor %} {% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -94,7 +94,7 @@ class {{ service.async_client_name }}: get_transport_class = functools.partial(type({{ service.client_name }}).get_transport_class, type({{ service.client_name }})) def __init__(self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, {{ service.name }}Transport] = 'grpc_asyncio', client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, @@ -320,16 +320,16 @@ class {{ service.async_client_name }}: {% if opts.add_iam_methods %} async def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. Replaces any existing policy. Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -338,7 +338,7 @@ class {{ service.async_client_name }}: metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -402,7 +402,7 @@ class {{ service.async_client_name }}: # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -426,17 +426,17 @@ class {{ service.async_client_name }}: async def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does not have a policy set. Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -445,7 +445,7 @@ class {{ service.async_client_name }}: metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -509,7 +509,7 @@ class {{ service.async_client_name }}: # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -533,17 +533,17 @@ class {{ service.async_client_name }}: async def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control policy for a function. If the function does not exist, this will return an empty set of permissions, not a NOT_FOUND error. Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -552,7 +552,7 @@ class {{ service.async_client_name }}: metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: + ~iam_policy_pb2.PolicyTestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. @@ -560,7 +560,7 @@ class {{ service.async_client_name }}: # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 36b651309b02..45efbc3cc166 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -10,10 +10,10 @@ from typing import Callable, Dict, Optional, {% if service.any_server_streaming import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -26,8 +26,8 @@ from google.oauth2 import service_account # type: ignore {% endfor %} {% endfor %} {% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -191,7 +191,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %}{# common resources #} def __init__(self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, {{ service.name }}Transport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, @@ -459,16 +459,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if opts.add_iam_methods %} def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. Replaces any existing policy. Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -477,7 +477,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -541,7 +541,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -565,17 +565,17 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does not have a policy set. Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -584,7 +584,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -648,7 +648,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -672,17 +672,17 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control policy for a function. If the function does not exist, this will return an empty set of permissions, not a NOT_FOUND error. Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -691,7 +691,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: + ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. @@ -699,7 +699,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index fe1549742ccf..819448a18ba5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -7,15 +7,15 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import packaging.version import pkg_resources -from google import auth # type: ignore +import google.auth # type: ignore import google.api_core # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore {% if service.has_lro %} from google.api_core import operations_v1 # type: ignore {% endif %} -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} @@ -23,8 +23,8 @@ from google.auth import credentials # type: ignore {{ method.output.ident.python_import }} {% endfor %} {% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} @@ -39,7 +39,7 @@ except pkg_resources.DistributionNotFound: try: # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = auth.__version__ + _GOOGLE_AUTH_VERSION = google.auth.__version__ except AttributeError: try: # try pkg_resources if it is available _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version @@ -63,7 +63,7 @@ class {{ service.name }}Transport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -105,17 +105,17 @@ class {{ service.name }}Transport(abc.ABC): # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( + credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Save the credentials. self._credentials = credentials @@ -176,7 +176,7 @@ class {{ service.name }}Transport(abc.ABC): {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} - exceptions.{{ ex.__name__ }}, + core_exceptions.{{ ex.__name__ }}, {% endfor %} ), deadline={{ method.timeout }}, @@ -214,8 +214,8 @@ class {{ service.name }}Transport(abc.ABC): def set_iam_policy( self, ) -> Callable[ - [iam_policy.SetIamPolicyRequest], - Union[policy.Policy, Awaitable[policy.Policy]], + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @@ -223,8 +223,8 @@ class {{ service.name }}Transport(abc.ABC): def get_iam_policy( self, ) -> Callable[ - [iam_policy.GetIamPolicyRequest], - Union[policy.Policy, Awaitable[policy.Policy]], + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @@ -232,10 +232,10 @@ class {{ service.name }}Transport(abc.ABC): def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], + [iam_policy_pb2.TestIamPermissionsRequest], Union[ - iam_policy.TestIamPermissionsResponse, - Awaitable[iam_policy.TestIamPermissionsResponse], + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ], ]: raise NotImplementedError() diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 6f0866a988db..c266473ace29 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -10,8 +10,8 @@ from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore {% endif %} from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -22,8 +22,8 @@ import grpc # type: ignore {{ method.output.ident.python_import }} {% endfor %} {% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -45,7 +45,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -173,7 +173,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -274,7 +274,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM access control policy on the specified function. Replaces any existing policy. @@ -291,15 +291,15 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does @@ -317,8 +317,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -326,7 +326,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -345,8 +345,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index a518ec94b2ef..5c02730a1958 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -10,8 +10,7 @@ from google.api_core import grpc_helpers_async # type: ignore {% if service.has_lro %} from google.api_core import operations_v1 # type: ignore {% endif %} -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import packaging.version @@ -24,8 +23,8 @@ from grpc.experimental import aio # type: ignore {{ method.output.ident.python_import }} {% endfor %} {% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -51,7 +50,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -91,7 +90,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -278,7 +277,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM access control policy on the specified function. Replaces any existing policy. @@ -295,15 +294,15 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does @@ -321,8 +320,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -330,8 +329,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], - Awaitable[iam_policy.TestIamPermissionsResponse], + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -350,8 +349,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index afe9985600ff..dbb21bfa72c7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -9,8 +9,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import operations_v1 {% endif %} from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -24,8 +23,8 @@ from google.auth.transport.requests import AuthorizedSession {{ method.output.ident.python_import }} {% endfor %} {% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} @@ -46,7 +45,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {# TODO(yon-mg): handle mtls stuff if that's relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 8c2042f9bcd8..08a68d39bc80 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -19,8 +19,8 @@ from requests.sessions import Session {# Import the service itself as well as every proto module that it imports. #} {% filter sort_lines %} -from google import auth -from google.auth import credentials +import google.auth +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} @@ -31,7 +31,7 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _GOOGLE_AUTH_VERSION from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _API_CORE_VERSION from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async {% if service.has_lro %} @@ -48,9 +48,9 @@ from google.api_core import gapic_v1 {% endfor %} {% endfor %} {% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} @@ -110,7 +110,7 @@ def test__get_default_mtls_endpoint(): {% endif %} ]) def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -130,7 +130,7 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(client_c {% endif %} ]) def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") @@ -175,7 +175,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans # Check that if channel is provided we won't create a new one. with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: transport = transport_class( - credentials=credentials.AnonymousCredentials() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -420,7 +420,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): {% for method in service.methods.values() if 'grpc' in opts.transport %} def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -508,7 +508,7 @@ def test_{{ method.name|snake_case }}_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -530,7 +530,7 @@ def test_{{ method.name|snake_case }}_empty_call(): @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -620,7 +620,7 @@ async def test_{{ method.name|snake_case }}_async_from_dict(): {% if method.field_headers and not method.client_streaming %} def test_{{ method.name|snake_case }}_field_headers(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -665,7 +665,7 @@ def test_{{ method.name|snake_case }}_field_headers(): @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_field_headers_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -711,7 +711,7 @@ async def test_{{ method.name|snake_case }}_field_headers_async(): {% if method.ident.package != method.input.ident.package %} def test_{{ method.name|snake_case }}_from_dict_foreign(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -740,7 +740,7 @@ def test_{{ method.name|snake_case }}_from_dict_foreign(): {% if method.flattened_fields %} def test_{{ method.name|snake_case }}_flattened(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -788,7 +788,7 @@ def test_{{ method.name|snake_case }}_flattened(): def test_{{ method.name|snake_case }}_flattened_error(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -805,7 +805,7 @@ def test_{{ method.name|snake_case }}_flattened_error(): @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_flattened_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -873,7 +873,7 @@ async def test_{{ method.name|snake_case }}_flattened_async(): @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_flattened_error_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -891,7 +891,7 @@ async def test_{{ method.name|snake_case }}_flattened_error_async(): {% if method.paged_result_field %} def test_{{ method.name|snake_case }}_pager(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -950,7 +950,7 @@ def test_{{ method.name|snake_case }}_pager(): def test_{{ method.name|snake_case }}_pages(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -992,7 +992,7 @@ def test_{{ method.name|snake_case }}_pages(): @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_async_pager(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1040,7 +1040,7 @@ async def test_{{ method.name|snake_case }}_async_pager(): @pytest.mark.asyncio async def test_{{ method.name|snake_case }}_async_pages(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1091,7 +1091,7 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): {% for method in service.methods.values() if 'rest' in opts.transport %} def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1159,7 +1159,7 @@ def test_{{ method.name|snake_case }}_rest_from_dict(): def test_{{ method.name|snake_case }}_rest_flattened(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. @@ -1214,7 +1214,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(): def test_{{ method.name|snake_case }}_rest_flattened_error(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1231,7 +1231,7 @@ def test_{{ method.name|snake_case }}_rest_flattened_error(): {% if method.paged_result_field %} def test_{{ method.name|snake_case }}_pager(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. @@ -1351,17 +1351,17 @@ def test_{{ method.name|snake_case }}_pager(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( @@ -1371,7 +1371,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( @@ -1383,7 +1383,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = {{ service.client_name }}(transport=transport) assert client.transport is transport @@ -1392,13 +1392,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}GrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.{{ service.grpc_asyncio_transport_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1414,8 +1414,8 @@ def test_transport_get_channel(): ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, 'default') as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -1423,7 +1423,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -1433,9 +1433,9 @@ def test_transport_grpc_default(): def test_{{ service.name|snake_case }}_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.{{ service.name }}Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -1445,7 +1445,7 @@ def test_{{ service.name|snake_case }}_base_transport(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as Transport: Transport.return_value = None transport = transports.{{ service.name }}Transport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1475,9 +1475,9 @@ def test_{{ service.name|snake_case }}_base_transport(): @requires_google_auth_gte_1_25_0 def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -1495,9 +1495,9 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): @requires_google_auth_lt_1_25_0 def test_{{ service.name|snake_case }}_base_transport_with_credentials_file_old_google_auth(): # Instantiate the base transport with a credentials file - with mock.patch.object(auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -1513,9 +1513,9 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file_old_ def test_{{ service.name|snake_case }}_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, 'default', autospec=True) as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport() adc.assert_called_once() @@ -1523,8 +1523,8 @@ def test_{{ service.name|snake_case }}_base_transport_with_adc(): @requires_google_auth_gte_1_25_0 def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, 'default', autospec=True) as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) {{ service.client_name }}() adc.assert_called_once_with( scopes=None, @@ -1539,8 +1539,8 @@ def test_{{ service.name|snake_case }}_auth_adc(): @requires_google_auth_lt_1_25_0 def test_{{ service.name|snake_case }}_auth_adc_old_google_auth(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, 'default', autospec=True) as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) {{ service.client_name }}() adc.assert_called_once_with( scopes=( @@ -1563,8 +1563,8 @@ def test_{{ service.name|snake_case }}_auth_adc_old_google_auth(): def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, 'default', autospec=True) as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -1587,8 +1587,8 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): def test_{{ service.name|snake_case }}_transport_auth_adc_old_google_auth(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default", autospec=True) as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus") adc.assert_called_once_with(scopes=( {% for scope in service.oauth_scopes %} @@ -1609,10 +1609,10 @@ def test_{{ service.name|snake_case }}_transport_auth_adc_old_google_auth(transp def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default", autospec=True) as adc, mock.patch.object( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -1651,10 +1651,10 @@ def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, def test_{{ service.name|snake_case }}_transport_create_channel_old_api_core(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default", autospec=True) as adc, mock.patch.object( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class(quota_project_id="octopus") @@ -1688,10 +1688,10 @@ def test_{{ service.name|snake_case }}_transport_create_channel_old_api_core(tra def test_{{ service.name|snake_case }}_transport_create_channel_user_scopes(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default", autospec=True) as adc, mock.patch.object( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) {% with host = (service.host|default('localhost', true)) %} @@ -1718,7 +1718,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel_user_scopes(tran def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -1762,7 +1762,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtl {% if 'rest' in opts.transport %} def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtls(): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.{{ service.rest_transport_name }} ( credentials=cred, @@ -1774,7 +1774,7 @@ def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtl def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), ) assert client.transport._host == '{{ host }}:443' @@ -1784,7 +1784,7 @@ def test_{{ service.name|snake_case }}_host_no_port(): def test_{{ service.name|snake_case }}_host_with_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), ) assert client.transport._host == '{{ host }}:8000' @@ -1831,9 +1831,9 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, 'default') as adc: + with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -1912,7 +1912,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( {% if service.has_lro %} def test_{{ service.name|snake_case }}_grpc_lro_client(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) transport = client.transport @@ -1929,7 +1929,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): def test_{{ service.name|snake_case }}_grpc_lro_async_client(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc_asyncio', ) transport = client.transport @@ -2001,7 +2001,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2009,7 +2009,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: transport_class = {{ service.client_name }}.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2018,17 +2018,17 @@ def test_client_withDEFAULT_CLIENT_INFO(): {% if opts.add_iam_methods and 'grpc' in opts.transport %} def test_set_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) @@ -2039,7 +2039,7 @@ def test_set_iam_policy(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -2049,12 +2049,12 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2062,7 +2062,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) @@ -2074,7 +2074,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -2083,17 +2083,17 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() client.set_iam_policy(request) @@ -2110,19 +2110,19 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.set_iam_policy), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy(request) @@ -2138,17 +2138,17 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() @@ -2157,19 +2157,19 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy() + policy_pb2.Policy() ) response = await client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() @@ -2177,17 +2177,17 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) @@ -2198,7 +2198,7 @@ def test_get_iam_policy(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -2208,12 +2208,12 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2221,7 +2221,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) @@ -2233,7 +2233,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -2242,17 +2242,17 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() client.get_iam_policy(request) @@ -2269,19 +2269,19 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_iam_policy), "__call__" ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) @@ -2297,17 +2297,17 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -2315,19 +2315,19 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy() + policy_pb2.Policy() ) response = await client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -2335,19 +2335,19 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse( + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) @@ -2360,7 +2360,7 @@ def test_test_iam_permissions(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -2368,12 +2368,12 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2381,7 +2381,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) ) response = await client.test_iam_permissions(request) @@ -2393,26 +2393,26 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] def test_test_iam_permissions_field_headers(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - call.return_value = iam_policy.TestIamPermissionsResponse() + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() client.test_iam_permissions(request) @@ -2429,12 +2429,12 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2442,7 +2442,7 @@ async def test_test_iam_permissions_field_headers_async(): type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) await client.test_iam_permissions(request) @@ -2459,14 +2459,14 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse() + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() response = client.test_iam_permissions( request={ @@ -2479,7 +2479,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = {{ service.async_client_name }}( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2487,7 +2487,7 @@ async def test_test_iam_permissions_from_dict_async(): ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) response = await client.test_iam_permissions( diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index 9104015d5c15..cf87e839925f 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -25,11 +25,5 @@ keyword.kwlist, # We make SOME exceptions for certain names that collide with builtins. set(dir(builtins)) - {"filter", "map", "id", "input", "property"}, - # This is a hand-maintained list of modules that are directly imported - # in templates, i.e. they are not added as dependencies to any type, - # the raw text is just there in the template. - # More can be added as collisions are discovered. - # See issue #819 for additional info. - {"auth", "credentials", "exceptions", "future", "options", "policy", "math"} ) ) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 693beffa9bbe..477fcf5ad155 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -43,6 +43,12 @@ def test_address_str_parent(): assert str(addr) == 'baz.spam.eggs.Bacon' +def test_address_str_different_proto_package(): + addr = metadata.Address(package=('google', 'iam', 'v1'), module='options', name='GetPolicyOptions', + api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1')) + assert str(addr) == 'options_pb2.GetPolicyOptions' + + def test_address_proto(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.proto == 'foo.bar.Bacon' From 0c425047e2eaa2981a3deb031a886edea832b450 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 30 Apr 2021 14:57:40 -0700 Subject: [PATCH 0525/1339] chore: release 0.44.2 (#854) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index aba41e82d66b..e67a9b9bb34e 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.44.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.1...v0.44.2) (2021-04-30) + + +### Bug Fixes + +* remove auth, policy, and options from the reserved names list ([#851](https://www.github.com/googleapis/gapic-generator-python/issues/851)) ([d3f31a0](https://www.github.com/googleapis/gapic-generator-python/commit/d3f31a0d33411b3248871ddbe51135e83b699a73)) + ### [0.44.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.0...v0.44.1) (2021-04-28) From f00f8043426e7e3979db13f502c692b885584f7e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 3 May 2021 10:09:16 -0700 Subject: [PATCH 0526/1339] perf: reduce unnecessary copies, optimize Address comparison (#855) Includes various other performance optimizations and minor whitespace fixes. Local testing cuts down Ads generation time to ~1m (and prevents OOM) and DialogflowCX from ~10 minutes to ~5 minutes. --- .../%name/%version/%sub/__init__.py.j2 | 26 +++++------ .../%namespace/%name/%version/__init__.py.j2 | 2 +- .../%namespace/%name/__init__.py.j2 | 2 +- .../%name_%version/test_module_import.py.j2 | 2 +- .../gapic/generator/generator.py | 2 +- .../gapic-generator/gapic/schema/metadata.py | 41 ++++++++++++------ .../gapic-generator/gapic/schema/wrappers.py | 43 ++++++++++--------- .../%name_%version/%sub/__init__.py.j2 | 26 +++++------ .../tests/unit/schema/test_metadata.py | 2 +- .../unit/schema/wrappers/test_message.py | 2 +- 10 files changed, 84 insertions(+), 64 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 index e6a09c63fbc0..929dbe8317b9 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/__init__.py.j2 @@ -8,21 +8,23 @@ them again. -#} __all__ = ( - {% for subpackage in api.subpackages|dictsort %} + {% filter sort_lines -%} + {% for subpackage in api.subpackages -%} '{{ subpackage }}', - {% endfor %} - {% for service in api.services.values()|sort(attribute='client_name') - if service.meta.address.subpackage == api.subpackage_view %} + {% endfor -%} + {% for service in api.services.values() + if service.meta.address.subpackage == api.subpackage_view -%} '{{ service.client_name }}', - {% endfor %} - {% for proto in api.protos.values()|sort(attribute='name') - if proto.meta.address.subpackage == api.subpackage_view %} - {% for message in proto.messages.values() %} + {% endfor -%} + {% for proto in api.protos.values() + if proto.meta.address.subpackage == api.subpackage_view -%} + {% for message in proto.messages.values() -%} '{{ message.name }}', - {% endfor %} - {% for enum in proto.enums.values()|sort(attribute='name') %} + {% endfor -%} + {% for enum in proto.enums.values() -%} '{{ enum.name }}', - {% endfor %} - {% endfor %} + {% endfor -%} + {% endfor -%} + {% endfilter -%} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 index 1a7dd2732abc..8ec595555f13 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -1,7 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} -{% if opts.lazy_import %} {# lazy import #} +{% if opts.lazy_import -%} {# lazy import #} import importlib import sys diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 index 319b6073e010..abdafe27172a 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/__init__.py.j2 @@ -1,7 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} -{% if opts.lazy_import %} {# lazy import #} +{% if opts.lazy_import -%} {# lazy import #} import importlib import sys diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 index 2ed725a826b3..47da788b4502 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 @@ -1,7 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} -{% if opts.lazy_import %} {# lazy import #} +{% if opts.lazy_import -%} {# lazy import #} import pytest diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 461bb84bcbd2..7c081b4722b9 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -378,7 +378,7 @@ def _get_filename( # Replace the %namespace variable. filename = filename.replace( "%namespace", - os.path.sep.join([i.lower() for i in api_schema.naming.namespace]), + os.path.sep.join(i.lower() for i in api_schema.naming.namespace), ).lstrip(os.path.sep) # Replace the %name, %version, and %sub variables. diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index b63921762fa2..e14190d00763 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -36,22 +36,30 @@ from gapic.utils import cached_property from gapic.utils import RESERVED_NAMES +# This class is a minor hack to optimize Address's __eq__ method. + @dataclasses.dataclass(frozen=True) -class Address: +class BaseAddress: name: str = '' module: str = '' module_path: Tuple[int, ...] = dataclasses.field(default_factory=tuple) package: Tuple[str, ...] = dataclasses.field(default_factory=tuple) parent: Tuple[str, ...] = dataclasses.field(default_factory=tuple) + + +@dataclasses.dataclass(frozen=True) +class Address(BaseAddress): api_naming: naming.Naming = dataclasses.field( default_factory=naming.NewNaming, ) collisions: FrozenSet[str] = dataclasses.field(default_factory=frozenset) def __eq__(self, other) -> bool: - return all(getattr(self, i) == getattr(other, i) - for i in ('name', 'module', 'module_path', 'package', 'parent')) + # We don't want to use api_naming or collisions to determine equality, + # so defer to the parent class's eq method. + # This is an fairly important optimization for large APIs. + return super().__eq__(other) def __hash__(self): # Do NOT include collisions; they are not relevant. @@ -94,7 +102,8 @@ def __str__(self) -> str: # Return the Python identifier. return '.'.join(self.parent + (self.name,)) - def __repr__(self) -> str: + @cached_property + def __cached_string_repr(self): return "({})".format( ", ".join( ( @@ -108,6 +117,9 @@ def __repr__(self) -> str: ) ) + def __repr__(self) -> str: + return self.__cached_string_repr + @property def module_alias(self) -> str: """Return an appropriate module alias if necessary. @@ -118,16 +130,15 @@ def module_alias(self) -> str: while still providing names that are fundamentally readable to users (albeit looking auto-generated). """ - if self.module in self.collisions | RESERVED_NAMES: + # This is a minor optimization to prevent constructing a temporary set. + if self.module in self.collisions or self.module in RESERVED_NAMES: return '_'.join( ( ''.join( - [ - partial_name[0] - for i in self.package - for partial_name in i.split("_") - if i != self.api_naming.version - ] + partial_name[0] + for i in self.package + for partial_name in i.split("_") + if i != self.api_naming.version ), self.module, ) @@ -302,7 +313,11 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Address': ``Address`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace(self, collisions=collisions) + return ( + dataclasses.replace(self, collisions=collisions) + if collisions and collisions != self.collisions + else self + ) @dataclasses.dataclass(frozen=True) @@ -340,7 +355,7 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Metadata': return dataclasses.replace( self, address=self.address.with_context(collisions=collisions), - ) + ) if collisions and collisions != self.address.collisions else self @dataclasses.dataclass(frozen=True) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index f6ae04ea3ef6..442528b73696 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -463,24 +463,24 @@ def with_context(self, *, visited_messages = visited_messages | {self} return dataclasses.replace( self, - fields=collections.OrderedDict( - (k, v.with_context( + fields={ + k: v.with_context( collisions=collisions, visited_messages=visited_messages - )) - for k, v in self.fields.items() - ) if not skip_fields else self.fields, - nested_enums=collections.OrderedDict( - (k, v.with_context(collisions=collisions)) + ) for k, v in self.fields.items() + } if not skip_fields else self.fields, + nested_enums={ + k: v.with_context(collisions=collisions) for k, v in self.nested_enums.items() - ), - nested_messages=collections.OrderedDict( - (k, v.with_context( + }, + nested_messages={ + k: v.with_context( collisions=collisions, skip_fields=skip_fields, visited_messages=visited_messages, - )) - for k, v in self.nested_messages.items()), + ) + for k, v in self.nested_messages.items() + }, meta=self.meta.with_context(collisions=collisions), ) @@ -535,7 +535,7 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'EnumType': return dataclasses.replace( self, meta=self.meta.with_context(collisions=collisions), - ) + ) if collisions else self @property def options_dict(self) -> Dict: @@ -957,9 +957,11 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': ``Method`` object aliases module names to avoid naming collisions in the file being written. """ - maybe_lro = self.lro.with_context( - collisions=collisions - ) if self.lro else None + maybe_lro = None + if self.lro: + maybe_lro = self.lro.with_context( + collisions=collisions + ) if collisions else self.lro return dataclasses.replace( self, @@ -1195,13 +1197,12 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': """ return dataclasses.replace( self, - methods=collections.OrderedDict( - (k, v.with_context( - # A methodd's flattened fields create additional names + methods={ + k: v.with_context( + # A method's flattened fields create additional names # that may conflict with module imports. collisions=collisions | frozenset(v.flattened_fields.keys())) - ) for k, v in self.methods.items() - ), + }, meta=self.meta.with_context(collisions=collisions), ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index be52e37f29b8..6c5b948a2da5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -33,21 +33,23 @@ from .types.{{ proto.module_name }} import {{ enum.name }} them again. -#} __all__ = ( - {% for subpackage in api.subpackages|dictsort %} + {% filter sort_lines -%} + {% for subpackage in api.subpackages -%} '{{ subpackage }}', - {% endfor %} - {% for service in api.services.values()|sort(attribute='client_name') - if service.meta.address.subpackage == api.subpackage_view %} + {% endfor -%} + {% for service in api.services.values() + if service.meta.address.subpackage == api.subpackage_view -%} '{{ service.client_name }}', - {% endfor %} - {% for proto in api.protos.values()|sort(attribute='name') - if proto.meta.address.subpackage == api.subpackage_view %} - {% for message in proto.messages.values()|sort(attribute='name') %} + {% endfor -%} + {% for proto in api.protos.values() + if proto.meta.address.subpackage == api.subpackage_view -%} + {% for message in proto.messages.values()|sort(attribute='name') -%} '{{ message.name }}', - {% endfor %} - {% for enum in proto.enums.values()|sort(attribute='name') %} + {% endfor -%} + {% for enum in proto.enums.values() -%} '{{ enum.name }}', - {% endfor %} - {% endfor %} + {% endfor -%} + {% endfor -%} + {% endfilter %} ) {% endblock %} diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 477fcf5ad155..c778000c7c3d 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -33,7 +33,7 @@ def test_address_str_with_context(): package=('foo', 'bar'), module='baz', name='Bacon', - ).with_context(collisions={'baz'}) + ).with_context(collisions=frozenset({'baz'})) assert str(addr) == 'fb_baz.Bacon' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 4a7905d1b294..3de69d3e5498 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -56,7 +56,7 @@ def test_message_ident(): def test_message_ident_collisions(): message = make_message('Baz', package='foo.v1', module='bar').with_context( - collisions={'bar'}, + collisions=frozenset({'bar'}), ) assert str(message.ident) == 'fv_bar.Baz' assert message.ident.sphinx == 'foo.v1.bar.Baz' From ce73e99096e471b44a044e6d1583ee84a63e9910 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 3 May 2021 11:24:00 -0600 Subject: [PATCH 0527/1339] chore: release 0.44.3 (#857) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index e67a9b9bb34e..0f3fb2aaed16 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.44.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.2...v0.44.3) (2021-05-03) + + +### Performance Improvements + +* reduce unnecessary copies, optimize Address comparison ([#855](https://www.github.com/googleapis/gapic-generator-python/issues/855)) ([e843540](https://www.github.com/googleapis/gapic-generator-python/commit/e8435400257707458e83424019c9b1a16fac9a99)) + ### [0.44.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.1...v0.44.2) (2021-04-30) From 81194313494da35a00a456a6098614e08f25b5c2 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 3 May 2021 16:49:51 -0600 Subject: [PATCH 0528/1339] feat: add autogenerated snippets (#845) This PR targets iteration 1 and 2 specified in the [Snippet Gen Design](go/snippet-gen-design): Full canonical coverage of simple requests, paginated, LRO, server streaming, and Bidi streaming with empty request objects. Snippet generation is hidden behind a new option `autogen-snippets`. After discussion with folks on different language teams on snippetgen, I decided using "golden" snippet files would be easier than following the unit testing strategy used to check the library surface. I also believe goldens will be be easier for review for other Python DPEs. Other notes: - I've commented out the existing metadata generation code and tests. The new metadata format is still under discussion. - Async samples are excluded as the existing samplegen infrastructure was written pre-async. I will add the async samples in the next PR. Co-authored-by: Dov Shlachter --- .../gapic-generator/.github/snippet-bot.yml | 3 + .../.github/workflows/tests.yaml | 20 ++ packages/gapic-generator/.gitignore | 4 + .../gapic/generator/generator.py | 123 +++---- .../gapic/samplegen/samplegen.py | 309 ++++++++++++------ .../gapic/samplegen_utils/types.py | 3 - .../gapic/samplegen_utils/utils.py | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 13 + .../gapic/templates/_base.py.j2 | 2 +- .../docs/%name_%version/services.rst.j2 | 2 +- .../templates/examples/feature_fragments.j2 | 43 +-- .../gapic/templates/examples/sample.py.j2 | 23 +- .../gapic/templates/noxfile.py.j2 | 6 +- .../gapic-generator/gapic/utils/options.py | 3 + packages/gapic-generator/noxfile.py | 48 ++- ...ollusca_v1_snippets_list_resources_grpc.py | 45 +++ ..._v1_snippets_method_bidi_streaming_grpc.py | 45 +++ ..._v1_snippets_method_lro_signatures_grpc.py | 48 +++ ...a_v1_snippets_method_one_signature_grpc.py | 46 +++ ...1_snippets_method_server_streaming_grpc.py | 45 +++ .../tests/snippetgen/snippets.proto | 106 ++++++ .../tests/snippetgen/test_snippetgen.py | 81 +++++ .../tests/unit/generator/test_generator.py | 200 +++++++----- .../tests/unit/samplegen/common_types.py | 14 +- .../samplegen/golden_snippets/sample_basic.py | 55 ++++ .../sample_basic_unflattenable.py | 55 ++++ .../tests/unit/samplegen/test_integration.py | 164 +--------- .../tests/unit/samplegen/test_samplegen.py | 123 ++++++- .../tests/unit/samplegen/test_template.py | 52 +-- 29 files changed, 1217 insertions(+), 466 deletions(-) create mode 100644 packages/gapic-generator/.github/snippet-bot.yml create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_grpc.py create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_grpc.py create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_grpc.py create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_grpc.py create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_grpc.py create mode 100644 packages/gapic-generator/tests/snippetgen/snippets.proto create mode 100644 packages/gapic-generator/tests/snippetgen/test_snippetgen.py create mode 100644 packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py create mode 100644 packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py diff --git a/packages/gapic-generator/.github/snippet-bot.yml b/packages/gapic-generator/.github/snippet-bot.yml new file mode 100644 index 000000000000..77ce8f8255e5 --- /dev/null +++ b/packages/gapic-generator/.github/snippet-bot.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/master/packages/snippet-bot +ignoreFiles: + - "**/*.py" diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 74fb502ec402..92ff88118547 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -265,6 +265,26 @@ jobs: run: python -m pip install nox - name: Typecheck the generated output. run: nox -s showcase_mypy${{ matrix.variant }} + snippetgen: + runs-on: ubuntu-latest + steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install nox. + run: python -m pip install nox + - name: Check autogenerated snippets. + run: nox -s snippetgen unit: strategy: matrix: diff --git a/packages/gapic-generator/.gitignore b/packages/gapic-generator/.gitignore index 5b68f2ed5833..2cead4ed7d47 100644 --- a/packages/gapic-generator/.gitignore +++ b/packages/gapic-generator/.gitignore @@ -65,3 +65,7 @@ pylintrc.test # pyenv .python-version + +# Test dependencies and output +api-common-protos +tests/snippetgen/.test_output diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 7c081b4722b9..362753ff2d17 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -14,8 +14,10 @@ import jinja2 import yaml +import itertools import re import os +import typing from typing import Any, DefaultDict, Dict, Mapping from hashlib import sha256 from collections import OrderedDict, defaultdict @@ -107,12 +109,12 @@ def get_response( template_name, api_schema=api_schema, opts=opts) ) - sample_output = self._generate_samples_and_manifest( - api_schema, - self._env.get_template(sample_templates[0]), - ) if sample_templates else {} - - output_files.update(sample_output) + if sample_templates: + sample_output = self._generate_samples_and_manifest( + api_schema, self._env.get_template(sample_templates[0]), + opts=opts, + ) + output_files.update(sample_output) # Return the CodeGeneratorResponse output. res = CodeGeneratorResponse( @@ -121,12 +123,13 @@ def get_response( return res def _generate_samples_and_manifest( - self, api_schema: api.API, sample_template: jinja2.Template, - ) -> Dict[str, CodeGeneratorResponse.File]: + self, api_schema: api.API, sample_template: jinja2.Template, *, opts: Options) -> Dict: """Generate samples and samplegen manifest for the API. Arguments: api_schema (api.API): The schema for the API to which the samples belong. + sample_template (jinja2.Template): The template to use to generate samples. + opts (Options): Additional generator options. Returns: Dict[str, CodeGeneratorResponse.File]: A dict mapping filepath to rendered file. @@ -137,56 +140,50 @@ def _generate_samples_and_manifest( id_to_hash_to_spec: DefaultDict[str, Dict[str, Any]] = defaultdict(dict) - STANDALONE_TYPE = "standalone" - for config_fpath in self._sample_configs: - with open(config_fpath) as f: - configs = yaml.safe_load_all(f.read()) - - spec_generator = ( - spec - for cfg in configs - if is_valid_sample_cfg(cfg) - for spec in cfg.get("samples", []) - # If unspecified, assume a sample config describes a standalone. - # If sample_types are specified, standalone samples must be - # explicitly enabled. - if STANDALONE_TYPE in spec.get("sample_type", [STANDALONE_TYPE]) - ) + # Autogenerated sample specs + autogen_specs: typing.List[typing.Dict[str, Any]] = [] + if opts.autogen_snippets: + autogen_specs = list( + samplegen.generate_sample_specs(api_schema, opts=opts)) + + # Also process any handwritten sample specs + handwritten_specs = samplegen.parse_handwritten_specs( + self._sample_configs) + + sample_specs = autogen_specs + list(handwritten_specs) + + for spec in sample_specs: + # Every sample requires an ID. This may be provided + # by a samplegen config author. + # If no ID is provided, fall back to the region tag. + # + # Ideally the sample author should pick a descriptive, unique ID, + # but this may be impractical and can be error-prone. + spec_hash = sha256(str(spec).encode("utf8")).hexdigest()[:8] + sample_id = spec.get("id") or spec.get("region_tag") or spec_hash + spec["id"] = sample_id - for spec in spec_generator: - # Every sample requires an ID, preferably provided by the - # samplegen config author. - # If no ID is provided, fall back to the region tag. - # If there's no region tag, generate a unique ID. - # - # Ideally the sample author should pick a descriptive, unique ID, - # but this may be impractical and can be error-prone. - spec_hash = sha256(str(spec).encode("utf8")).hexdigest()[:8] - sample_id = spec.get("id") or spec.get( - "region_tag") or spec_hash - spec["id"] = sample_id - - hash_to_spec = id_to_hash_to_spec[sample_id] - if spec_hash in hash_to_spec: - raise DuplicateSample( - f"Duplicate samplegen spec found: {spec}") - - hash_to_spec[spec_hash] = spec - - out_dir = "samples" + hash_to_spec = id_to_hash_to_spec[sample_id] + + if spec_hash in hash_to_spec: + raise DuplicateSample( + f"Duplicate samplegen spec found: {spec}") + + hash_to_spec[spec_hash] = spec + + out_dir = "samples/generated_samples" fpath_to_spec_and_rendered = {} for hash_to_spec in id_to_hash_to_spec.values(): for spec_hash, spec in hash_to_spec.items(): id_is_unique = len(hash_to_spec) == 1 - # The ID is used to generate the file name and by sample tester - # to link filenames to invoked samples. It must be globally unique. + # The ID is used to generate the file name. It must be globally unique. if not id_is_unique: spec["id"] += f"_{spec_hash}" sample = samplegen.generate_sample( spec, api_schema, sample_template,) - fpath = spec["id"] + ".py" + fpath = utils.to_snake_case(spec["id"]) + ".py" fpath_to_spec_and_rendered[os.path.join(out_dir, fpath)] = ( spec, sample, @@ -199,20 +196,24 @@ def _generate_samples_and_manifest( for fname, (_, sample) in fpath_to_spec_and_rendered.items() } - # Only generate a manifest if we generated samples. - if output_files: - manifest_fname, manifest_doc = manifest.generate( - ( - (fname, spec) - for fname, (spec, _) in fpath_to_spec_and_rendered.items() - ), - api_schema, - ) - - manifest_fname = os.path.join(out_dir, manifest_fname) - output_files[manifest_fname] = CodeGeneratorResponse.File( - content=manifest_doc.render(), name=manifest_fname - ) + # TODO(busunkim): Re-enable manifest generation once metadata + # format has been formalized. + # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue + # + # if output_files: + + # manifest_fname, manifest_doc = manifest.generate( + # ( + # (fname, spec) + # for fname, (spec, _) in fpath_to_spec_and_rendered.items() + # ), + # api_schema, + # ) + + # manifest_fname = os.path.join(out_dir, manifest_fname) + # output_files[manifest_fname] = CodeGeneratorResponse.File( + # content=manifest_doc.render(), name=manifest_fname + # ) return output_files diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 051419d7db80..9cd4987d590d 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -20,14 +20,17 @@ import os import re import time +import yaml from gapic import utils from gapic.samplegen_utils import types +from gapic.samplegen_utils.utils import is_valid_sample_cfg +from gapic.schema import api from gapic.schema import wrappers -from collections import (defaultdict, namedtuple, ChainMap as chainmap) -from typing import (ChainMap, Dict, FrozenSet, List, Mapping, Optional, Tuple) +from collections import defaultdict, namedtuple, ChainMap as chainmap +from typing import Any, ChainMap, Dict, FrozenSet, Generator, List, Mapping, Optional, Tuple, Sequence # There is no library stub file for this module, so ignore it. from google.api import resource_pb2 # type: ignore @@ -71,6 +74,7 @@ class AttributeRequestSetup: that contains the value for the attribute. """ + value: str field: Optional[str] = None value_is_file: bool = False @@ -98,6 +102,7 @@ class TransformedRequest: The Optional[single]/Optional[body] is workaround for not having tagged unions. """ + base: str single: Optional[AttributeRequestSetup] body: Optional[List[AttributeRequestSetup]] @@ -108,8 +113,14 @@ class TransformedRequest: RESOURCE_RE = re.compile(r"\{([^}/]+)\}") @classmethod - def build(cls, request_type: wrappers.MessageType, api_schema, base: str, - attrs: List[AttributeRequestSetup], is_resource_request: bool): + def build( + cls, + request_type: wrappers.MessageType, + api_schema, + base: str, + attrs: List[AttributeRequestSetup], + is_resource_request: bool, + ): """Build a TransformedRequest based on parsed input. Acts as a factory to hide complicated logic for resource-based requests. @@ -152,34 +163,44 @@ def build(cls, request_type: wrappers.MessageType, api_schema, base: str, # # It's a precondition that the base field is # a valid field of the request message type. - resource_typestr = (request_type. - fields[base]. - options. - Extensions[resource_pb2.resource_reference]. - type) + resource_typestr = ( + request_type.fields[base] + .options.Extensions[resource_pb2.resource_reference] + .type + ) resource_message_descriptor = next( - (msg.options.Extensions[resource_pb2.resource] - for msg in api_schema.messages.values() - if msg.options.Extensions[resource_pb2.resource].type == resource_typestr), - None + ( + msg.options.Extensions[resource_pb2.resource] + for msg in api_schema.messages.values() + if msg.options.Extensions[resource_pb2.resource].type + == resource_typestr + ), + None, ) if not resource_message_descriptor: raise types.NoSuchResource( - f"No message exists for resource: {resource_typestr}") + f"No message exists for resource: {resource_typestr}" + ) # The field is only ever empty for singleton attributes. attr_names: List[str] = [a.field for a in attrs] # type: ignore # A single resource may be found under multiple paths and have many patterns. # We want to find an _exact_ match, if one exists. - pattern = next((p - for p in resource_message_descriptor.pattern - if cls.RESOURCE_RE.findall(p) == attr_names), None) + pattern = next( + ( + p + for p in resource_message_descriptor.pattern + if cls.RESOURCE_RE.findall(p) == attr_names + ), + None, + ) if not pattern: attr_name_str = ", ".join(attr_names) raise types.NoSuchResourcePattern( - f"Resource {resource_typestr} has no pattern with params: {attr_name_str}") + f"Resource {resource_typestr} has no pattern with params: {attr_name_str}" + ) return cls(base=base, body=attrs, single=None, pattern=pattern) @@ -222,10 +243,12 @@ class Validator: EXPRESSION_ATTR_RE = re.compile( r""" (?P\$?\w+)(?:\[(?P\d+)\]|\{["'](?P[^"']+)["']\})?$ - """.strip()) + """.strip() + ) VALID_REQUEST_KWORDS = frozenset( - ("value", "field", "value_is_file", "input_parameter", "comment")) + ("value", "field", "value_is_file", "input_parameter", "comment") + ) # TODO(dovs): make the schema a required param. def __init__(self, method: wrappers.Method, api_schema=None): @@ -234,7 +257,7 @@ def __init__(self, method: wrappers.Method, api_schema=None): self.request_type_ = method.input response_type = method.output if method.paged_result_field: - response_type = method.paged_result_field + response_type = method.paged_result_field.message elif method.lro: response_type = method.lro.response_type @@ -258,21 +281,30 @@ def __init__(self, method: wrappers.Method, api_schema=None): ) @staticmethod - def preprocess_sample(sample, api_schema): + def preprocess_sample(sample, api_schema: api.API, rpc: wrappers.Method): """Modify a sample to set default or missing fields. Args: sample (Any): A definition for a single sample generated from parsed yaml. api_schema (api.API): The schema that defines the API to which the sample belongs. + rpc (wrappers.Method): The rpc method used in the sample. """ sample["package_name"] = api_schema.naming.warehouse_package_name - sample.setdefault("response", [{"print": ["%s", "$resp"]}]) + sample["module_name"] = api_schema.naming.versioned_module_name + sample["module_namespace"] = api_schema.naming.module_namespace + + sample["client_name"] = api_schema.services[sample["service"]].client_name + # the type of the request object passed to the rpc e.g, `ListRequest` + sample["request_type"] = rpc.input.ident.name + + # If no response was specified in the config + # Add reasonable defaults depending on the type of the sample + if not rpc.void: + sample.setdefault("response", [{"print": ["%s", "$resp"]}]) @utils.cached_property def flattenable_fields(self) -> FrozenSet[str]: - return frozenset( - field.name for field in self.method.flattened_fields.values() - ) + return frozenset(field.name for field in self.method.flattened_fields.values()) def var_field(self, var_name: str) -> Optional[wrappers.Field]: return self.var_defs_.get(var_name) @@ -299,7 +331,9 @@ def _normal_request_setup(self, base_param_to_attrs, val, request, field): if not attr: raise types.BadAttributeLookup( "Method request type {} has no attribute: '{}'".format( - self.request_type_, attr_name)) + self.request_type_, attr_name + ) + ) if attr.message: base = attr.message @@ -309,20 +343,23 @@ def _normal_request_setup(self, base_param_to_attrs, val, request, field): witness = any(e.name == val for e in attr.enum.values) if not witness: raise types.InvalidEnumVariant( - "Invalid variant for enum {}: '{}'".format(attr, val)) + "Invalid variant for enum {}: '{}'".format(attr, val) + ) break elif attr.is_primitive: # Only valid if this is the last attribute in the chain. break else: raise TypeError( - f"Could not handle attribute '{attr_name}' of type: {attr.type}") + f"Could not handle attribute '{attr_name}' of type: {attr.type}" + ) if i != len(attr_chain) - 1: # We broke out of the loop after processing an enum or a primitive. extra_attrs = ".".join(attr_chain[i:]) raise types.NonTerminalPrimitiveOrEnum( - f"Attempted to reference attributes of enum value or primitive type: '{extra_attrs}'") + f"Attempted to reference attributes of enum value or primitive type: '{extra_attrs}'" + ) if len(attr_chain) > 1: request["field"] = ".".join(attr_chain[1:]) @@ -333,7 +370,9 @@ def _normal_request_setup(self, base_param_to_attrs, val, request, field): if attr_chain[0] in base_param_to_attrs: raise types.InvalidRequestSetup( "Duplicated top level field in request block: '{}'".format( - attr_chain[0])) + attr_chain[0] + ) + ) del request["field"] if isinstance(request["value"], str): @@ -351,9 +390,9 @@ def _normal_request_setup(self, base_param_to_attrs, val, request, field): # so disable it for the AttributeRequestSetup ctor call. return attr_chain[0], AttributeRequestSetup(**request) # type: ignore - def validate_and_transform_request(self, - calling_form: types.CallingForm, - request: List[Mapping[str, str]]) -> FullRequest: + def validate_and_transform_request( + self, calling_form: types.CallingForm, request: List[Mapping[str, str]] + ) -> FullRequest: """Validates and transforms the "request" block from a sample config. In the initial request, each dict has a "field" key that maps to a dotted @@ -427,61 +466,76 @@ def validate_and_transform_request(self, """ base_param_to_attrs: Dict[str, - RequestEntry] = defaultdict(RequestEntry) + RequestEntry] = defaultdict(RequestEntry) for r in request: r_dup = dict(r) val = r_dup.get("value") if not val: raise types.InvalidRequestSetup( - "Missing keyword in request entry: 'value'") + "Missing keyword in request entry: 'value'" + ) field = r_dup.get("field") if not field: raise types.InvalidRequestSetup( - "Missing keyword in request entry: 'field'") + "Missing keyword in request entry: 'field'" + ) spurious_kwords = set(r_dup.keys()) - self.VALID_REQUEST_KWORDS if spurious_kwords: raise types.InvalidRequestSetup( "Spurious keyword(s) in request entry: {}".format( - ", ".join(f"'{kword}'" for kword in spurious_kwords))) + ", ".join(f"'{kword}'" for kword in spurious_kwords) + ) + ) input_parameter = r_dup.get("input_parameter") if input_parameter: - self._handle_lvalue(input_parameter, wrappers.Field( - field_pb=descriptor_pb2.FieldDescriptorProto())) + self._handle_lvalue( + input_parameter, + wrappers.Field( + field_pb=descriptor_pb2.FieldDescriptorProto()), + ) # The percentage sign is used for setting up resource based requests - percent_idx = field.find('%') + percent_idx = field.find("%") if percent_idx == -1: base_param, attr = self._normal_request_setup( - base_param_to_attrs, val, r_dup, field) + base_param_to_attrs, val, r_dup, field + ) request_entry = base_param_to_attrs.get(base_param) if request_entry and request_entry.is_resource_request: raise types.ResourceRequestMismatch( - f"Request setup mismatch for base: {base_param}") + f"Request setup mismatch for base: {base_param}" + ) base_param_to_attrs[base_param].attrs.append(attr) else: # It's a resource based request. - base_param, resource_attr = (field[:percent_idx], - field[percent_idx + 1:]) + base_param, resource_attr = ( + field[:percent_idx], + field[percent_idx + 1:], + ) request_entry = base_param_to_attrs.get(base_param) if request_entry and not request_entry.is_resource_request: raise types.ResourceRequestMismatch( - f"Request setup mismatch for base: {base_param}") + f"Request setup mismatch for base: {base_param}" + ) if not self.request_type_.fields.get(base_param): raise types.BadAttributeLookup( "Method request type {} has no attribute: '{}'".format( - self.request_type_, base_param)) + self.request_type_, base_param + ) + ) r_dup["field"] = resource_attr request_entry = base_param_to_attrs[base_param] request_entry.is_resource_request = True request_entry.attrs.append( - AttributeRequestSetup(**r_dup)) # type: ignore + AttributeRequestSetup(**r_dup) # type: ignore + ) client_streaming_forms = { types.CallingForm.RequestStreamingClient, @@ -490,7 +544,8 @@ def validate_and_transform_request(self, if len(base_param_to_attrs) > 1 and calling_form in client_streaming_forms: raise types.InvalidRequestSetup( - "Too many base parameters for client side streaming form") + "Too many base parameters for client side streaming form" + ) # We can only flatten a collection of request parameters if they're a # subset of the flattened fields of the method. @@ -502,11 +557,11 @@ def validate_and_transform_request(self, self.api_schema_, key, val.attrs, - val.is_resource_request + val.is_resource_request, ) for key, val in base_param_to_attrs.items() ], - flattenable=flattenable + flattenable=False, ) def validate_response(self, response): @@ -535,7 +590,8 @@ def validate_response(self, response): validater = self.STATEMENT_DISPATCH_TABLE.get(keyword) if not validater: raise types.InvalidStatement( - "Invalid statement keyword: {}".format(keyword)) + "Invalid statement keyword: {}".format(keyword) + ) validater(self, body) @@ -558,34 +614,45 @@ def validate_expression(self, exp: str) -> wrappers.Field: Returns: wrappers.Field: The final field in the chain. """ + def validate_recursively(expression, scope, depth=0): first_dot = expression.find(".") base = expression[:first_dot] if first_dot > 0 else expression match = self.EXPRESSION_ATTR_RE.match(base) if not match: raise types.BadAttributeLookup( - f"Badly formed attribute expression: {expression}") + f"Badly formed attribute expression: {expression}" + ) - name, idxed, mapped = (match.groupdict()["attr_name"], - bool(match.groupdict()["index"]), - bool(match.groupdict()["key"])) + name, idxed, mapped = ( + match.groupdict()["attr_name"], + bool(match.groupdict()["index"]), + bool(match.groupdict()["key"]), + ) field = scope.get(name) + if not field: - exception_class = (types.BadAttributeLookup if depth else - types.UndefinedVariableReference) + exception_class = ( + types.BadAttributeLookup + if depth + else types.UndefinedVariableReference + ) raise exception_class(f"No such variable or attribute: {name}") # Invalid input if (idxed or mapped) and not field.repeated: raise types.BadAttributeLookup( - f"Collection lookup on non-repeated field: {base}") + f"Collection lookup on non-repeated field: {base}" + ) # Can only ignore indexing or mapping in an indexed (or mapped) field # if it is the terminal point in the expression. if field.repeated and not (idxed or mapped) and first_dot != -1: raise types.BadAttributeLookup( - ("Accessing attribute on a non-terminal collection without" - f"indexing into the collection: {base}") + ( + "Accessing attribute on a non-terminal collection without" + f"indexing into the collection: {base}" + ) ) message = field.message @@ -601,12 +668,14 @@ def validate_recursively(expression, scope, depth=0): value_field = message.fields.get("value") if not value_field: raise types.BadAttributeLookup( - f"Mapped attribute has no value field: {base}") + f"Mapped attribute has no value field: {base}" + ) value_message = value_field.message if not value_message: raise types.BadAttributeLookup( - f"Mapped value field is not a message: {base}") + f"Mapped value field is not a message: {base}" + ) if first_dot != -1: scope = value_message.fields @@ -618,11 +687,10 @@ def validate_recursively(expression, scope, depth=0): # Enums and primitives are only allowed at the tail of an expression. if not message: raise types.BadAttributeLookup( - f"Non-terminal attribute is not a message: {base}") + f"Non-terminal attribute is not a message: {base}" + ) - return validate_recursively(expression[first_dot + 1:], - scope, - depth + 1) + return validate_recursively(expression[first_dot + 1:], scope, depth + 1) return validate_recursively(exp, self.var_defs_) @@ -664,9 +732,7 @@ def _validate_format(self, body: List[str]): if num_prints != len(body) - 1: raise types.MismatchedFormatSpecifier( "Expected {} expresssions in format string '{}' but found {}".format( - num_prints, - fmt_str, - len(body) - 1 + num_prints, fmt_str, len(body) - 1 ) ) @@ -714,14 +780,16 @@ def _validate_write_file(self, body): fname_fmt = body.get("filename") if not fname_fmt: raise types.InvalidStatement( - "Missing key in 'write_file' statement: 'filename'") + "Missing key in 'write_file' statement: 'filename'" + ) self._validate_format(fname_fmt) contents_var = body.get("contents") if not contents_var: raise types.InvalidStatement( - "Missing key in 'write_file' statement: 'contents'") + "Missing key in 'write_file' statement: 'contents'" + ) self.validate_expression(contents_var) @@ -775,13 +843,14 @@ def _validate_loop(self, loop): # TODO: resolve the implicit $resp dilemma # if collection_name.startswith("."): # collection_name = "$resp" + collection_name - collection_field = self.validate_expression( - loop[self.COLL_KWORD]) + collection_field = self.validate_expression(loop[self.COLL_KWORD]) if not collection_field.repeated: raise types.BadLoop( "Tried to use a non-repeated field as a collection: {}".format( - tokens[-1])) + tokens[-1] + ) + ) var = loop[self.VAR_KWORD] # The collection_field is repeated, @@ -792,8 +861,8 @@ def _validate_loop(self, loop): field_pb=collection_field.field_pb, message=collection_field.message, enum=collection_field.enum, - meta=collection_field.meta - ) + meta=collection_field.meta, + ), ) elif map_args <= segments: @@ -817,7 +886,8 @@ def _validate_loop(self, loop): if not (key or val): raise types.BadLoop( - "Need at least one of 'key' or 'value' in a map loop") + "Need at least one of 'key' or 'value' in a map loop" + ) else: raise types.BadLoop("Unexpected loop form: {}".format(segments)) @@ -838,17 +908,70 @@ def _validate_loop(self, loop): } -def generate_sample( - sample, - api_schema, - sample_template: jinja2.Template -) -> str: +def parse_handwritten_specs(sample_configs: Sequence[str]) -> Generator[Dict[str, Any], None, None]: + """Parse a handwritten sample spec""" + + STANDALONE_TYPE = "standalone" + + for config_fpath in sample_configs: + with open(config_fpath) as f: + configs = yaml.safe_load_all(f.read()) + + for cfg in configs: + valid = is_valid_sample_cfg(cfg) + if not valid: + raise types.InvalidConfig( + "Sample config is invalid", valid) + for spec in cfg.get("samples", []): + # If unspecified, assume a sample config describes a standalone. + # If sample_types are specified, standalone samples must be + # explicitly enabled. + if STANDALONE_TYPE in spec.get("sample_type", [STANDALONE_TYPE]): + yield spec + + +def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, Any], None, None]: + """Given an API, generate basic sample specs for each method. + + Args: + api_schema (api.API): The schema that defines the API. + + Yields: + Dict[str, Any]: A sample spec. + """ + + gapic_metadata = api_schema.gapic_metadata(opts) + + for service_name, service in gapic_metadata.services.items(): + api_short_name = api_schema.services[f"{api_schema.naming.proto_package}.{service_name}"].shortname + for transport_type, client in service.clients.items(): + if transport_type == "grpc-async": + # TODO(busunkim): Enable generation of async samples + continue + for rpc_name, method_list in client.rpcs.items(): + # Region Tag Format: + # [{START|END} ${apishortname}_generated_${api}_${apiVersion}_${serviceName}_${rpcName}_{sync|async}_${overloadDisambiguation}] + region_tag = f"{api_short_name}_generated_{api_schema.naming.versioned_module_name}_{service_name}_{rpc_name}_{transport_type}" + spec = { + "sample_type": "standalone", + "rpc": rpc_name, + "request": [], + # response is populated in `preprocess_sample` + "service": f"{api_schema.naming.proto_package}.{service_name}", + "region_tag": region_tag, + "description": f"Snippet for {utils.to_snake_case(rpc_name)}" + } + + yield spec + + +def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> str: """Generate a standalone, runnable sample. Writing the rendered output is left for the caller. Args: - sample (Any): A definition for a single sample generated from parsed yaml. + sample (Any): A definition for a single sample. api_schema (api.API): The schema that defines the API to which the sample belongs. sample_template (jinja2.Template): The template representing a generic sample. @@ -871,21 +994,19 @@ def generate_sample( calling_form = types.CallingForm.method_default(rpc) v = Validator(rpc) - # Tweak some small aspects of the sample to set sane defaults for optional + # Tweak some small aspects of the sample to set defaults for optional # fields, add fields that are required for the template, and so forth. - v.preprocess_sample(sample, api_schema) - sample["request"] = v.validate_and_transform_request(calling_form, - sample["request"]) + v.preprocess_sample(sample, api_schema, rpc) + sample["request"] = v.validate_and_transform_request( + calling_form, sample["request"] + ) v.validate_response(sample["response"]) return sample_template.render( sample=sample, - imports=[ - "from google import auth", - "from google.auth import credentials", - ], + imports=[], calling_form=calling_form, calling_form_enum=types.CallingForm, - api=api_schema, - service=service, + trim_blocks=True, + lstrip_blocks=True, ) diff --git a/packages/gapic-generator/gapic/samplegen_utils/types.py b/packages/gapic-generator/gapic/samplegen_utils/types.py index dfd89c80985c..48a086f953ab 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/types.py +++ b/packages/gapic-generator/gapic/samplegen_utils/types.py @@ -123,6 +123,3 @@ def method_default(cls, m): return cls.RequestStreamingServer return cls.Request - - def __str__(self): - return to_snake_case(super().__str__().split(".")[-1]) diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py index a0d9892e9b3c..7cf0a14a39c9 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -47,7 +47,7 @@ def is_valid_sample_cfg( min_version: Tuple[int, int, int] = MIN_SCHEMA_VERSION, config_type: str = VALID_CONFIG_TYPE, ) -> bool: - """Predicate that takes a parsed yaml doc checks if it is a valid sampel config. + """Predicate that takes a parsed yaml doc checks if it is a valid sample config. Arguments: doc (Any): The yaml document to be assessed diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 442528b73696..962407aa9cb0 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1089,6 +1089,19 @@ def host(self) -> str: return self.options.Extensions[client_pb2.default_host] return '' + @property + def shortname(self) -> str: + """Return the API short name. DRIFT uses this to identify + APIs. + + Returns: + str: The api shortname. + """ + # Get the shortname from the host + # Real APIs are expected to have format: + # "{api_shortname}.googleapis.com" + return self.host.split(".")[0] + @property def oauth_scopes(self) -> Sequence[str]: """Return a sequence of oauth scopes, if applicable. diff --git a/packages/gapic-generator/gapic/templates/_base.py.j2 b/packages/gapic-generator/gapic/templates/_base.py.j2 index 133cf7aa5811..35d3c9100f30 100644 --- a/packages/gapic-generator/gapic/templates/_base.py.j2 +++ b/packages/gapic-generator/gapic/templates/_base.py.j2 @@ -2,5 +2,5 @@ {% block license %} {% include "_license.j2" %} {% endblock %} -{%- block content %} +{% block content %} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 index 98ba64f60f91..442a48cab691 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 @@ -3,6 +3,6 @@ Services for {{ api.naming.long_name }} {{ api.naming.version }} API .. toctree:: :maxdepth: 2 - {% for service in api.services.values()|sort(attribute='name') -%} + {% for service in api.services.values()|sort(attribute='name') %} {{ service.name|snake_case }} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 4702d596b694..2959157a3069 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -13,23 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. #} - -{# -A careful reader may comment that there is duplication of effort -between the python verification step and the dispatch/rendering here. -There is a little, but not enough for it to be important because -1) Other python artifacts (client libraries, unit tests, and so forth) - are generated using templates, so doing the same for generated samples is consistent. -2) Using jinja for anything requiring real logic or data structures is a bad idea. -#} - {# response handling macros #} {% macro sample_header(sample, calling_form) %} -# DO NOT EDIT! This is a generated sample ("{{ calling_form }}", "{{ sample.id }}") +# Generated code. DO NOT EDIT! # +# Snippet for {{ sample.rpc }} +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + # To install the latest published package dependency, execute the following: -# pip3 install {{ sample.package_name }} +# python3 -m pip install {{ sample.package_name }} {% endmacro %} {% macro print_string_formatting(string_list) %} @@ -147,7 +141,14 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endif %} {% endmacro %} -{% macro render_request_setup(full_request) %} + +{% macro render_client_setup(module_name, client_name) %} +# Create a client +client = {{ module_name }}.{{ client_name }}() +{% endmacro %}} + +{% macro render_request_setup(full_request, module_name, request_type) %} +# Initialize request argument(s) {% for parameter_block in full_request.request_list if parameter_block.body %} {% if parameter_block.pattern %} {# This is a resource-name patterned lookup parameter #} @@ -165,15 +166,16 @@ with open({{ attr.input_parameter }}, "rb") as f: {% endif %} {% endfor %} {% if not full_request.flattenable %} -request = { +request = {{ module_name }}.{{ request_type }}( {% for parameter in full_request.request_list %} - '{{ parameter.base }}': {{ parameter.base if parameter.body else parameter.single }}, -{% endfor %}} + {{ parameter.base }}={{ parameter.base if parameter.body else parameter.single }}, +{% endfor %} +) {% endif %} {% endmacro %} {% macro render_request_params(request) %} -{# Provide the top level parameters last and as keyword params #} + {# Provide the top level parameters last and as keyword params #} {% with params = [] %} {% for r in request if r.body %} {% do params.append(r.base) %} @@ -186,13 +188,13 @@ request = { {% endmacro %} {% macro render_request_params_unary(request) %} -{# Provide the top level parameters last and as keyword params #} + {# Provide the top level parameters last and as keyword params #} {% if request.flattenable %} {% with params = [] %} {% for r in request.request_list %} {% do params.append("%s=%s"|format(r.base, r.single.value if r.single else r.base)) %} {% endfor %} -{{ params|join(", ") }} +{{ params|join(", ") -}} {% endwith %} {% else %} request=request @@ -214,8 +216,11 @@ client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request {# Setting up the method invocation is the responsibility of the caller: #} {# it's just easier to set up client side streaming and other things from outside this macro. #} {% macro render_calling_form(method_invocation_text, calling_form, calling_form_enum, response_statements ) %} +# Make the request {% if calling_form == calling_form_enum.Request %} response = {{ method_invocation_text|trim }} + +# Handle response {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index 4cdb81e47ca1..79614d71a399 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -12,7 +12,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -#} + #} {% extends "_base.py.j2" %} {% block content %} @@ -27,23 +27,20 @@ {% for import_statement in imports %} {{ import_statement }} {% endfor %} -from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} +from {{ sample.module_namespace|join(".") }} import {{ sample.module_name }} + {# also need calling form #} def sample_{{ frags.render_method_name(sample.rpc)|trim }}({{ frags.print_input_params(sample.request)|trim }}): """{{ sample.description }}""" - client = {{ service.client_name }}( - credentials=credentials.AnonymousCredentials(), - transport="grpc", - ) - - {{ frags.render_request_setup(sample.request)|indent }} -{% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} - {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response, )|indent }} -{% endwith %} + {{ frags.render_client_setup(sample.module_name, sample.client_name)|indent }} + {{ frags.render_request_setup(sample.request, sample.module_name, sample.request_type)|indent }} + {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} + {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response)|indent -}} + {% endwith %} # [END {{ sample.id }}] - -{{ frags.render_main_block(sample.rpc, sample.request) -}} +{# TODO: Enable main block (or decide to remove main block from python sample) #} +{# {{ frags.render_main_block(sample.rpc, sample.request) }} #} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 87b5ef2a008f..c2d08a480764 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -64,11 +64,11 @@ def mypy(session): session.run( 'mypy', '--explicit-package-bases', - {%- if api.naming.module_namespace %} + {% if api.naming.module_namespace %} '{{ api.naming.module_namespace[0] }}', - {%- else %} + {% else %} '{{ api.naming.versioned_module_name }}', - {%- endif %} + {% endif %} ) diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index 826c7734bc3b..d7bbe2473df6 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -37,6 +37,7 @@ class Options: warehouse_package_name: str = '' retry: Optional[Dict[str, Any]] = None sample_configs: Tuple[str, ...] = dataclasses.field(default=()) + autogen_snippets: bool = False templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) lazy_import: bool = False old_naming: bool = False @@ -54,6 +55,7 @@ class Options: 'old-naming', # TODO(dovs): Come up with a better comment 'retry-config', # takes a path 'samples', # output dir + 'autogen-snippets', # produce auto-generated snippets # transport type(s) delineated by '+' (i.e. grpc, rest, custom.[something], etc?) 'transport', 'warehouse-package-name', # change the package name on PyPI @@ -141,6 +143,7 @@ def tweak_path(p): for s in sample_paths for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) ), + autogen_snippets=bool(opts.pop("autogen-snippets", False)), templates=tuple(path.expanduser(i) for i in templates), lazy_import=bool(opts.pop('lazy-import', False)), old_naming=bool(opts.pop('old-naming', False)), diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index e17277a48761..0924ef59998a 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -13,13 +13,16 @@ # limitations under the License. from __future__ import absolute_import +from pathlib import Path import os +import sys import tempfile import typing import nox # type: ignore from contextlib import contextmanager from os import path +import shutil showcase_version = "0.11.0" @@ -74,6 +77,9 @@ def showcase_library( # Install gapic-generator-python session.install("-e", ".") + # Install grpcio-tools for protoc + session.install("grpcio-tools") + # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: # Download the Showcase descriptor. @@ -96,7 +102,9 @@ def showcase_library( opts = "--python_gapic_opt=" opts += ",".join(other_opts + (f"{template_opt}",)) cmd_tup = ( - f"protoc", + "python", + "-m", + "grpc_tools.protoc", f"--experimental_allow_proto3_optional", f"--descriptor_set_in={tmp_dir}{path.sep}showcase.desc", opts, @@ -205,11 +213,11 @@ def showcase_unit( with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) - + # Unit tests are run twice with different dependencies to exercise # all code paths. # TODO(busunkim): remove when default templates require google-auth>=1.25.0 - + # 1. Run tests at lower bound of dependencies session.install("nox") session.run("nox", "-s", "update_lower_bounds") @@ -217,7 +225,7 @@ def showcase_unit( # Some code paths require an older version of google-auth. # google-auth is a transitive dependency so it isn't in the # lower bound constraints file produced above. - session.install("google-auth==1.21.1") + session.install("google-auth==1.21.1") run_showcase_unit_tests(session, fail_under=0) # 2. Run the tests again with latest version of dependencies @@ -241,7 +249,7 @@ def showcase_unit_add_iam_methods(session): # Unit tests are run twice with different dependencies to exercise # all code paths. # TODO(busunkim): remove when default templates require google-auth>=1.25.0 - + # 1. Run tests at lower bound of dependencies session.install("nox") session.run("nox", "-s", "update_lower_bounds") @@ -249,7 +257,7 @@ def showcase_unit_add_iam_methods(session): # Some code paths require an older version of google-auth. # google-auth is a transitive dependency so it isn't in the # lower bound constraints file produced above. - session.install("google-auth==1.21.1") + session.install("google-auth==1.21.1") run_showcase_unit_tests(session, fail_under=0) # 2. Run the tests again with latest version of dependencies @@ -279,6 +287,34 @@ def showcase_mypy_alternative_templates(session): showcase_mypy(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) +@nox.session(python="3.8") +def snippetgen(session): + # Clone googleapis/api-common-protos which are referenced by the snippet + # protos + api_common_protos = "api-common-protos" + try: + session.run("git", "-C", api_common_protos, "pull", external=True) + except nox.command.CommandFailed: + session.run( + "git", + "clone", + "--single-branch", + f"https://github.com/googleapis/{api_common_protos}", + external=True, + ) + + # Install gapic-generator-python + session.install("-e", ".") + + session.install("grpcio-tools", "mock", "pytest", "pytest-asyncio") + + session.run( + "py.test", + "--quiet", + "tests/snippetgen" + ) + + @nox.session(python="3.8") def docs(session): """Build the docs.""" diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_grpc.py new file mode 100644 index 000000000000..1ea032b5d9f1 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_ListResources_grpc] +from animalia import mollusca_v1 + + +def sample_list_resources(): + """Snippet for list_resources""" + + # Create a client + client = mollusca_v1.SnippetsClient() + + # Initialize request argument(s) + request = mollusca_v1.ListResourcesRequest( + ) + + # Make the request + page_result = client.list_resources(request=request) + for response in page_result: + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_ListResources_grpc] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_grpc.py new file mode 100644 index 000000000000..1c9be7560f7d --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MethodBidiStreaming +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_grpc] +from animalia import mollusca_v1 + + +def sample_method_bidi_streaming(): + """Snippet for method_bidi_streaming""" + + # Create a client + client = mollusca_v1.SnippetsClient() + + # Initialize request argument(s) + request = mollusca_v1.SignatureRequest( + ) + + # Make the request + stream = client.method_bidi_streaming([]) + for response in stream: + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_grpc] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_grpc.py new file mode 100644 index 000000000000..50974d82b3bc --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MethodLroSignatures +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_grpc] +from animalia import mollusca_v1 + + +def sample_method_lro_signatures(): + """Snippet for method_lro_signatures""" + + # Create a client + client = mollusca_v1.SnippetsClient() + + # Initialize request argument(s) + request = mollusca_v1.SignatureRequest( + ) + + # Make the request + operation = client.method_lro_signatures(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_grpc] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_grpc.py new file mode 100644 index 000000000000..9c6192b43fb1 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MethodOneSignature +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_grpc] +from animalia import mollusca_v1 + + +def sample_method_one_signature(): + """Snippet for method_one_signature""" + + # Create a client + client = mollusca_v1.SnippetsClient() + + # Initialize request argument(s) + request = mollusca_v1.SignatureRequest( + ) + + # Make the request + response = client.method_one_signature(request=request) + + # Handle response + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_grpc] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_grpc.py new file mode 100644 index 000000000000..13913a0ed391 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MethodServerStreaming +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_grpc] +from animalia import mollusca_v1 + + +def sample_method_server_streaming(): + """Snippet for method_server_streaming""" + + # Create a client + client = mollusca_v1.SnippetsClient() + + # Initialize request argument(s) + request = mollusca_v1.SignatureRequest( + ) + + # Make the request + stream = client.method_server_streaming(request=request) + for response in stream: + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_grpc] diff --git a/packages/gapic-generator/tests/snippetgen/snippets.proto b/packages/gapic-generator/tests/snippetgen/snippets.proto new file mode 100644 index 000000000000..6aaa404bcf9d --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/snippets.proto @@ -0,0 +1,106 @@ +// -*- coding: utf-8 -*- +// Copyright 2021 Google LLC + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at + +// http://www.apache.org/licenses/LICENSE-2.0 + +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +syntax = "proto3"; + +package animalia.mollusca.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/wrappers.proto"; + +service Snippets { + option (google.api.default_host) = "mollusca.example.com"; + + rpc MethodOneSignature(SignatureRequest) returns(Response) { + option (google.api.method_signature) = "a_string,an_int,a_bool"; + } + + rpc MethodLroSignatures(SignatureRequest) returns(google.longrunning.Operation) { + option (google.api.method_signature) = "a_string,an_int,a_bool"; + option (google.longrunning.operation_info) = { + response_type: "LroResponse" + metadata_type: "LroMetadata" + }; + } + + rpc ListResources(ListResourcesRequest) returns (ListResourcesResponse) { + option (google.api.http) = { + get: "/v1/{parent=items/*}/resources" + }; + option (google.api.method_signature) = "parent"; + } + + rpc MethodServerStreaming(SignatureRequest) returns(stream Response) { + option (google.api.method_signature) = "a_string,a_bool"; + option (google.api.method_signature) = ""; + } + + rpc MethodBidiStreaming(stream SignatureRequest) returns (stream Response); +} + +message ListResourcesRequest { + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "snippets.example.com/Resource" + }]; + + int32 page_size = 2; + string page_token = 3; +} + +message ListResourcesResponse { + repeated Resource resources = 1; + string next_page_token = 2; +} + +message ParentResource { + option (google.api.resource) = { + type: "snippets.example.com/ParentResource" + pattern: "items/{item_id}" + }; + string name = 1; +} + +message Resource { + option (google.api.resource) = { + type: "snippets.example.com/Resource" + pattern: "items/{item_id}/parts/{part_id}" + }; + string name = 1; +} + + +message SignatureRequest { + string a_string = 1; + int32 an_int = 2; + bool a_bool = 3; + map map_int_string = 4; +} + +message Response { +} + +message LroResponse { +} + +message LroMetadata { +} + diff --git a/packages/gapic-generator/tests/snippetgen/test_snippetgen.py b/packages/gapic-generator/tests/snippetgen/test_snippetgen.py new file mode 100644 index 000000000000..389e7c5334c5 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/test_snippetgen.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from pathlib import Path +import shutil +import subprocess +import sys +import tempfile + +import pytest + + +CURRENT_DIRECTORY = Path(__file__).parent.absolute() +REPO_ROOT = CURRENT_DIRECTORY.parent.parent + +GOLDEN_SNIPPETS = CURRENT_DIRECTORY / "goldens" +GENERATED_SNIPPETS = CURRENT_DIRECTORY / ".test_output" + + +def setup_module(module): + """Run protoc on modules and copy the output samples into .test_output""" + + # Delete any existing content in .test_output + # We intentionally preserve this directory between test runs to make + # it easier to inspect generated samples. + shutil.rmtree(GENERATED_SNIPPETS, ignore_errors=True) + + protos = {str(p) for p in CURRENT_DIRECTORY.glob("*.proto")} + api_common_protos = Path(REPO_ROOT / "api-common-protos").absolute() + + with tempfile.TemporaryDirectory() as tmp_dir: + # Write out a client library and samples + subprocess.check_output( + [ + "python", + "-m", + "grpc_tools.protoc", + f"--experimental_allow_proto3_optional", + "--python_gapic_opt=autogen-snippets", + f"--proto_path={CURRENT_DIRECTORY}", + f"--proto_path={api_common_protos}", + f"--python_gapic_out={tmp_dir}", + *protos, + ] + ) + + # We only care about the auto-generated samples + generated_samples = Path(tmp_dir) / "samples" / "generated_samples" + + shutil.copytree(generated_samples, GENERATED_SNIPPETS) + + +def test_files_exist(): + # The golden directory and .test_output directory + # should have exactly the same number of entries + golden_files = {p.name for p in GOLDEN_SNIPPETS.glob("*.py")} + test_output_files = {p.name for p in GENERATED_SNIPPETS.glob("*.py")} + + assert golden_files == test_output_files + + +def test_goldens(): + # Loop through the goldens directory and assert that each file + # exists in output directory and has the same code. + golden_files = GOLDEN_SNIPPETS.glob("*.py") + for golden in golden_files: + output_file = GENERATED_SNIPPETS / golden.name + assert output_file.exists() + assert golden.read_text() == output_file.read_text() diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 7c6d8500767f..d068250e9729 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -448,33 +448,36 @@ def test_samplegen_config_to_output_files( expected_response = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( - name="samples/squid_sample.py", content="\n",), + name="samples/generated_samples/squid_sample.py", content="\n",), CodeGeneratorResponse.File( - name="samples/clam_sample.py", content="\n",), - CodeGeneratorResponse.File( - name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", - content=dedent( - """\ - --- - type: manifest/samples - schema_version: 3 - python: &python - environment: python - bin: python3 - base_path: samples - invocation: '{bin} {path} @args' - samples: - - <<: *python - sample: squid_sample - path: '{base_path}/squid_sample.py' - region_tag: humboldt_tag - - <<: *python - sample: clam_sample - path: '{base_path}/clam_sample.py' - region_tag: clam_sample - """ - ), - ), + name="samples/generated_samples/clam_sample.py", content="\n",), + # TODO(busunkim): Re-enable manifest generation once metadata + # format has been formalized. + # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue + # CodeGeneratorResponse.File( + # name="samples/generated_samples/mollusc.v6.python.21120601.131313.manifest.yaml", + # content=dedent( + # """\ + # --- + # type: manifest/samples + # schema_version: 3 + # python: &python + # environment: python + # bin: python3 + # base_path: samples + # invocation: '{bin} {path} @args' + # samples: + # - <<: *python + # sample: squid_sample + # path: '{base_path}/squid_sample.py' + # region_tag: humboldt_tag + # - <<: *python + # sample: clam_sample + # path: '{base_path}/clam_sample.py' + # region_tag: clam_sample + # """ + # ), + # ), ] ) expected_response.supported_features |= ( @@ -484,6 +487,31 @@ def test_samplegen_config_to_output_files( assert actual_response == expected_response +@mock.patch( + "gapic.samplegen.samplegen.generate_sample_specs", return_value=[] +) +@mock.patch( + "gapic.samplegen.samplegen.generate_sample", return_value="", +) +def test_generate_autogen_samples(mock_generate_sample, mock_generate_specs): + opts = Options.build("autogen-snippets") + g = generator.Generator(opts) + # Need to have the sample template visible to the generator. + g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) + + api_schema = make_api(naming=naming.NewNaming( + name="Mollusc", version="v6")) + + actual_response = g.get_response(api_schema, opts=opts) + + # Just check that generate_sample_specs was called + # Correctness of the spec is tested in samplegen unit tests + mock_generate_specs.assert_called_once_with( + api_schema, + opts=opts + ) + + @mock.patch( "gapic.samplegen.samplegen.generate_sample", return_value="", ) @@ -534,40 +562,43 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): expected_response = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( - name="samples/squid_sample_91a465c6.py", content="\n", + name="samples/generated_samples/squid_sample_91a465c6.py", content="\n", ), CodeGeneratorResponse.File( - name="samples/squid_sample_55051b38.py", content="\n", + name="samples/generated_samples/squid_sample_55051b38.py", content="\n", ), - CodeGeneratorResponse.File(name="samples/157884ee.py", + CodeGeneratorResponse.File(name="samples/generated_samples/157884ee.py", content="\n",), - CodeGeneratorResponse.File( - name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", - content=dedent( - """\ - --- - type: manifest/samples - schema_version: 3 - python: &python - environment: python - bin: python3 - base_path: samples - invocation: '{bin} {path} @args' - samples: - - <<: *python - sample: squid_sample_91a465c6 - path: '{base_path}/squid_sample_91a465c6.py' - region_tag: humboldt_tag - - <<: *python - sample: squid_sample_55051b38 - path: '{base_path}/squid_sample_55051b38.py' - region_tag: squid_sample - - <<: *python - sample: 157884ee - path: '{base_path}/157884ee.py' - """ - ), - ), + # TODO(busunkim): Re-enable manifest generation once metadata + # format has been formalized. + # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue + # CodeGeneratorResponse.File( + # name="samples/generated_samples/mollusc.v6.python.21120601.131313.manifest.yaml", + # content=dedent( + # """\ + # --- + # type: manifest/samples + # schema_version: 3 + # python: &python + # environment: python + # bin: python3 + # base_path: samples + # invocation: '{bin} {path} @args' + # samples: + # - <<: *python + # sample: squid_sample_91a465c6 + # path: '{base_path}/squid_sample_91a465c6.py' + # region_tag: humboldt_tag + # - <<: *python + # sample: squid_sample_55051b38 + # path: '{base_path}/squid_sample_55051b38.py' + # region_tag: squid_sample + # - <<: *python + # sample: 157884ee + # path: '{base_path}/157884ee.py' + # """ + # ), + # ), ] ) expected_response.supported_features |= ( @@ -675,35 +706,38 @@ def test_dont_generate_in_code_samples(mock_gmtime, mock_generate_sample, fs): expected = CodeGeneratorResponse( file=[ CodeGeneratorResponse.File( - name="samples/squid_sample.py", content="\n",), - CodeGeneratorResponse.File( - name="samples/whelk_sample.py", content="\n",), + name="samples/generated_samples/squid_sample.py", content="\n",), CodeGeneratorResponse.File( - name="samples/octopus_sample.py", content="\n",), + name="samples/generated_samples/whelk_sample.py", content="\n",), CodeGeneratorResponse.File( - name="samples/mollusc.v6.python.21120601.131313.manifest.yaml", - content=dedent( - """ --- - type: manifest/samples - schema_version: 3 - python: &python - environment: python - bin: python3 - base_path: samples - invocation: \'{bin} {path} @args\' - samples: - - <<: *python - sample: squid_sample - path: \'{base_path}/squid_sample.py\' - - <<: *python - sample: whelk_sample - path: \'{base_path}/whelk_sample.py\' - - <<: *python - sample: octopus_sample - path: \'{base_path}/octopus_sample.py\' - """ - ), - ), + name="samples/generated_samples/octopus_sample.py", content="\n",), + # TODO(busunkim): Re-enable manifest generation once metadata + # format has been formalized. + # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue + # CodeGeneratorResponse.File( + # name="samples/generated_samples/mollusc.v6.python.21120601.131313.manifest.yaml", + # content=dedent( + # """ --- + # type: manifest/samples + # schema_version: 3 + # python: &python + # environment: python + # bin: python3 + # base_path: samples + # invocation: \'{bin} {path} @args\' + # samples: + # - <<: *python + # sample: squid_sample + # path: \'{base_path}/squid_sample.py\' + # - <<: *python + # sample: whelk_sample + # path: \'{base_path}/whelk_sample.py\' + # - <<: *python + # sample: octopus_sample + # path: \'{base_path}/octopus_sample.py\' + # """ + # ), + # ), ] ) expected.supported_features |= CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index e07350192317..0e9c8129d143 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -30,13 +30,17 @@ class DummyMethod: input: bool = False output: bool = False lro: bool = False + void: bool = False paged_result_field: bool = False client_streaming: bool = False server_streaming: bool = False flattened_fields: Dict[str, Any] = dataclasses.field(default_factory=dict) -DummyMessage = namedtuple("DummyMessage", ["fields", "type", "options"]) +DummyIdent = namedtuple("DummyIdent", ["name"]) + +DummyMessage = namedtuple( + "DummyMessage", ["fields", "type", "options", "ident"]) DummyMessage.__new__.__defaults__ = (False,) * len(DummyMessage._fields) DummyField = namedtuple("DummyField", @@ -50,20 +54,21 @@ class DummyMethod: "type"]) DummyField.__new__.__defaults__ = (False,) * len(DummyField._fields) -DummyService = namedtuple("DummyService", ["methods"]) +DummyService = namedtuple("DummyService", ["methods", "client_name"]) DummyApiSchema = namedtuple("DummyApiSchema", ["services", "naming", "messages"]) DummyApiSchema.__new__.__defaults__ = (False,) * len(DummyApiSchema._fields) DummyNaming = namedtuple( - "DummyNaming", ["warehouse_package_name", "name", "version"]) + "DummyNaming", ["warehouse_package_name", "name", "version", "versioned_module_name", "module_namespace"]) DummyNaming.__new__.__defaults__ = (False,) * len(DummyNaming._fields) def message_factory(exp: str, repeated_iter=itertools.repeat(False), - enum: Optional[wrappers.EnumType] = None) -> DummyMessage: + enum: Optional[wrappers.EnumType] = None, + ) -> DummyMessage: # This mimics the structure of MessageType in the wrappers module: # A MessageType has a map from field names to Fields, # and a Field has an (optional) MessageType. @@ -81,7 +86,6 @@ def message_factory(exp: str, base.fields[attr_name] = (DummyField(message=field, repeated=repeated_field) if isinstance(field, DummyMessage) else DummyField(enum=field)) - return messages[0] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py new file mode 100644 index 000000000000..5310595732e3 --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Classify +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install molluscs-v1-molluscclient + + +# [START mollusc_classify_sync] +from molluscs.v1 import molluscclient + + +def sample_classify(video, location): + """Determine the full taxonomy of input mollusc""" + + # Create a client + client = molluscclient.MolluscServiceClient() + + # Initialize request argument(s) + classify_target = {} + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_target["video"] = f.read() + + # location = "New Zealand" + classify_target["location_annotation"] = location + + request = molluscclient.molluscs.v1.ClassifyRequest( + classify_target=classify_target, + ) + + # Make the request + response = client.classify(request=request) + + # Handle response + print("Mollusc is a \"{}\"".format(response.taxonomy)) + +# [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py new file mode 100644 index 000000000000..5310595732e3 --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Classify +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install molluscs-v1-molluscclient + + +# [START mollusc_classify_sync] +from molluscs.v1 import molluscclient + + +def sample_classify(video, location): + """Determine the full taxonomy of input mollusc""" + + # Create a client + client = molluscclient.MolluscServiceClient() + + # Initialize request argument(s) + classify_target = {} + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_target["video"] = f.read() + + # location = "New Zealand" + classify_target["location_annotation"] = location + + request = molluscclient.molluscs.v1.ClassifyRequest( + classify_target=classify_target, + ) + + # Make the request + response = client.classify(request=request) + + # Handle response + print("Mollusc is a \"{}\"".format(response.taxonomy)) + +# [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 0f1d98de7461..c3a2cb7d2cde 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -15,6 +15,7 @@ import jinja2 import os.path as path import pytest +from pathlib import Path import gapic.utils as utils @@ -22,7 +23,8 @@ from gapic.samplegen_utils import (types, utils as gapic_utils) from gapic.schema import (naming, wrappers) -from common_types import (DummyField, DummyMessage, DummyMethod, DummyService, +from tests.unit.samplegen.common_types import (DummyField, DummyMessage, + DummyMethod, DummyService, DummyIdent, DummyApiSchema, DummyNaming, enum_factory, message_factory) from collections import namedtuple @@ -43,12 +45,19 @@ env.filters['coerce_response_name'] = gapic_utils.coerce_response_name +def golden_snippet(filename: str) -> str: + """Load the golden snippet with the name provided""" + snippet_path = Path(__file__).parent / "golden_snippets" / filename + return snippet_path.read_text() + + def test_generate_sample_basic(): # Note: the sample integration tests are needfully large # and difficult to eyeball parse. They are intended to be integration tests # that catch errors in behavior that is emergent from combining smaller features # or in features that are sufficiently small and trivial that it doesn't make sense # to have standalone tests. + input_type = DummyMessage( type="REQUEST TYPE", fields={ @@ -65,7 +74,8 @@ def test_generate_sample_basic(): }, ) ) - } + }, + ident=DummyIdent(name="molluscs.v1.ClassifyRequest") ) api_naming = naming.NewNaming( @@ -110,76 +120,7 @@ def test_generate_sample_basic(): env.get_template('examples/sample.py.j2') ) - sample_id = ("mollusc_classify_sync") - expected_str = '''# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# DO NOT EDIT! This is a generated sample ("request", "%s") -# -# To install the latest published package dependency, execute the following: -# pip3 install molluscs-v1-molluscclient - - -# [START %s] -from google import auth -from google.auth import credentials -from molluscs.v1.molluscclient.services.mollusc_service import MolluscServiceClient - -def sample_classify(video, location): - """Determine the full taxonomy of input mollusc""" - - client = MolluscServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", - ) - - classify_target = {} - # video = "path/to/mollusc/video.mkv" - with open(video, "rb") as f: - classify_target["video"] = f.read() - - # location = "New Zealand" - classify_target["location_annotation"] = location - - - response = client.classify(classify_target=classify_target) - print("Mollusc is a \\"{}\\"".format(response.taxonomy)) - - -# [END %s] - -def main(): - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("--video", - type=str, - default="path/to/mollusc/video.mkv") - parser.add_argument("--location", - type=str, - default="New Zealand") - args = parser.parse_args() - - sample_classify(args.video, args.location) - - -if __name__ == "__main__": - main() -''' % (sample_id, sample_id, sample_id) - - assert sample_str == expected_str + assert sample_str == golden_snippet("sample_basic.py") def test_generate_sample_basic_unflattenable(): @@ -204,7 +145,8 @@ def test_generate_sample_basic_unflattenable(): }, ) ) - } + }, + ident=DummyIdent(name="molluscs.v1.ClassifyRequest") ) api_naming = naming.NewNaming( @@ -246,79 +188,7 @@ def test_generate_sample_basic_unflattenable(): env.get_template('examples/sample.py.j2') ) - sample_id = ("mollusc_classify_sync") - expected_str = '''# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# DO NOT EDIT! This is a generated sample ("request", "%s") -# -# To install the latest published package dependency, execute the following: -# pip3 install molluscs-v1-molluscclient - - -# [START %s] -from google import auth -from google.auth import credentials -from molluscs.v1.molluscclient.services.mollusc_service import MolluscServiceClient - -def sample_classify(video, location): - """Determine the full taxonomy of input mollusc""" - - client = MolluscServiceClient( - credentials=credentials.AnonymousCredentials(), - transport="grpc", - ) - - classify_target = {} - # video = "path/to/mollusc/video.mkv" - with open(video, "rb") as f: - classify_target["video"] = f.read() - - # location = "New Zealand" - classify_target["location_annotation"] = location - - request = { - 'classify_target': classify_target, - } - - response = client.classify(request=request) - print("Mollusc is a \\"{}\\"".format(response.taxonomy)) - - -# [END %s] - -def main(): - import argparse - - parser = argparse.ArgumentParser() - parser.add_argument("--video", - type=str, - default="path/to/mollusc/video.mkv") - parser.add_argument("--location", - type=str, - default="New Zealand") - args = parser.parse_args() - - sample_classify(args.video, args.location) - - -if __name__ == "__main__": - main() -''' % (sample_id, sample_id, sample_id) - - assert sample_str == expected_str + assert sample_str == golden_snippet("sample_basic_unflattenable.py") def test_generate_sample_service_not_found(): @@ -335,7 +205,7 @@ def test_generate_sample_service_not_found(): def test_generate_sample_rpc_not_found(): schema = DummyApiSchema( - {"Mollusc": DummyService({})}, DummyNaming("pkg_name")) + {"Mollusc": DummyService(methods={}, client_name="ClassifyClient")}, DummyNaming("pkg_name")) sample = {"service": "Mollusc", "rpc": "Classify"} with pytest.raises(types.RpcMethodNotFound): diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 82e4bee1e28f..32ef4411f103 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -15,8 +15,10 @@ import yaml import pytest +from textwrap import dedent from typing import (TypeVar, Sequence) from collections import (OrderedDict, namedtuple) +from google.api import client_pb2 from google.api import resource_pb2 from google.protobuf import descriptor_pb2 @@ -25,9 +27,10 @@ import gapic.samplegen_utils.yaml as gapic_yaml from gapic.schema import (api, metadata, naming) import gapic.schema.wrappers as wrappers +from gapic.utils import Options -from common_types import (DummyApiSchema, DummyField, DummyMessage, - DummyMethod, message_factory, enum_factory) +from common_types import (DummyApiSchema, DummyField, DummyIdent, DummyNaming, DummyMessage, + DummyService, DummyMethod, message_factory, enum_factory) from gapic.samplegen_utils import utils @@ -81,15 +84,18 @@ def test_define_redefinition(): def test_preprocess_sample(): # Verify that the default response is added. - sample = {} - api_schema = api.API( - naming.NewNaming( - namespace=("mollusc", "cephalopod", "teuthida") - ), - all_protos={}, + sample = {"service": "Mollusc", "rpc": "Classify"} + api_schema = DummyApiSchema( + services={"Mollusc": DummyService( + methods={}, client_name="MolluscClient")}, + naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), ) - samplegen.Validator.preprocess_sample(sample, api_schema) + rpc = DummyMethod(input=DummyMessage( + ident=DummyIdent(name="ClassifyRequest"))) + + samplegen.Validator.preprocess_sample(sample, api_schema, rpc) response = sample.get("response") assert response == [{"print": ["%s", "$resp"]}] @@ -97,6 +103,36 @@ def test_preprocess_sample(): package_name = sample.get("package_name") assert package_name == "mollusc-cephalopod-teuthida-" + module_name = sample.get("module_name") + assert module_name == "teuthida_v1" + + module_namespace = sample.get("module_namespace") + assert module_namespace == "mollusc.cephalopod" + + client_name = sample.get("client_name") + assert client_name == "MolluscClient" + + request_type = sample.get("request_type") + assert request_type == "ClassifyRequest" + + +def test_preprocess_sample_void_method(): + # Verify no response is added for a void method + sample = {"service": "Mollusc", "rpc": "Classify"} + api_schema = DummyApiSchema( + services={"Mollusc": DummyService( + methods={}, client_name="MolluscClient")}, + naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + ) + + rpc = DummyMethod(void=True, input=DummyMessage( + ident=DummyIdent(name="ClassifyRequest"))) + + samplegen.Validator.preprocess_sample(sample, api_schema, rpc) + + assert "response" not in sample + def test_define_input_param(): v = samplegen.Validator( @@ -1220,7 +1256,8 @@ def test_regular_response_type(): def test_paged_response_type(): OutputType = TypeVar("OutputType") PagedType = TypeVar("PagedType") - method = DummyMethod(output=OutputType, paged_result_field=PagedType) + PagedField = DummyField(message=PagedType) + method = DummyMethod(output=OutputType, paged_result_field=PagedField) v = samplegen.Validator(method) assert v.var_field("$resp").message == PagedType @@ -1821,6 +1858,72 @@ def test_validate_request_non_terminal_primitive_field(): request) +def test_parse_invalid_handwritten_spec(fs): + fpath = "sampledir/sample.yaml" + fs.create_file( + fpath, + # spec is missing type + contents=dedent( + """ + --- + schema_version: 1.2.0 + samples: + - service: google.cloud.language.v1.LanguageService + """ + ), + ) + + with pytest.raises(types.InvalidConfig): + list(samplegen.parse_handwritten_specs(sample_configs=[fpath])) + + +def test_generate_sample_spec_basic(): + service_options = descriptor_pb2.ServiceOptions() + service_options.Extensions[client_pb2.default_host] = "example.googleapis.com" + + api_schema = api.API.build( + file_descriptors=[ + descriptor_pb2.FileDescriptorProto( + name="cephalopod.proto", + package="animalia.mollusca.v1", + message_type=[ + descriptor_pb2.DescriptorProto( + name="MolluscRequest", + ), + descriptor_pb2.DescriptorProto( + name="Mollusc", + ), + ], + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="Squid", + options=service_options, + method=[ + descriptor_pb2.MethodDescriptorProto( + name="Ramshorn", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + ], + ), + ], + ) + ] + ) + opts = Options.build("transport=grpc") + specs = list(samplegen.generate_sample_specs(api_schema, opts=opts)) + assert len(specs) == 1 + + assert specs[0] == { + "sample_type": "standalone", + "rpc": "Ramshorn", + "request": [], + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_generated_mollusca_v1_Squid_Ramshorn_grpc", + "description": "Snippet for ramshorn" + } + + def make_message(name: str, package: str = 'animalia.mollusca.v1', module: str = 'cephalopoda', fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, options: descriptor_pb2.MethodOptions = None, diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index bd3e539891fe..0eabe9e4f092 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -21,6 +21,7 @@ from gapic.samplegen_utils.types import CallingForm from textwrap import dedent +from tests.unit.samplegen import common_types def check_template(template_fragment, expected_output, **kwargs): @@ -112,6 +113,7 @@ def test_render_request_basic(): {{ frags.render_request_setup(request) }} ''', ''' + # Initialize request argument(s) cephalopod = {} # cephalopod_mass = '10 kg' cephalopod["mantle_mass"] = cephalopod_mass @@ -182,9 +184,10 @@ def test_render_request_unflattened(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_request_setup(request) }} + {{ frags.render_request_setup(request, "mollusca", "CreateMolluscRequest") }} ''', ''' + # Initialize request argument(s) cephalopod = {} # cephalopod_mass = '10 kg' cephalopod["mantle_mass"] = cephalopod_mass @@ -205,11 +208,11 @@ def test_render_request_unflattened(): with open(movie_path, "rb") as f: gastropod["movie"] = f.read() - request = { - 'cephalopod': cephalopod, - 'gastropod': gastropod, - 'bivalve': "humboldt", - } + request = mollusca.CreateMolluscRequest( + cephalopod=cephalopod, + gastropod=gastropod, + bivalve="humboldt", + ) ''', request=samplegen.FullRequest( request_list=[ @@ -254,7 +257,9 @@ def test_render_request_unflattened(): body=None, single='"humboldt"'), ] - ) + ), + api=common_types.DummyApiSchema(), + ) @@ -265,6 +270,7 @@ def test_render_request_resource_name(): {{ frags.render_request_setup(request) }} ''', ''' + # Initialize request argument(s) taxon = "kingdom/{kingdom}/phylum/{phylum}".format(kingdom="animalia", phylum=mollusca) ''', request=samplegen.FullRequest( @@ -287,7 +293,7 @@ def test_render_request_resource_name(): ), ], flattenable=True - ) + ), ) @@ -538,7 +544,7 @@ def test_dispatch_map_loop(): print("A {} is a {}".format(example, cls)) - + ''', statement={"loop": {"map": "molluscs", "key": "cls", @@ -586,11 +592,11 @@ def test_render_nested_loop_collection(): print("Sucker: {}".format(s)) - - - - - + + + + + """, statement=statement ) @@ -639,11 +645,11 @@ def test_render_nested_loop_map(): print("Example: {}".format(ex)) - - - - - + + + + + """, statement=statement ) @@ -702,7 +708,10 @@ def test_print_input_params(): def test_render_calling_form_request(): check_template(CALLING_FORM_TEMPLATE_TEST_STR, ''' + # Make the request response = TEST_INVOCATION_TXT + + # Handle response print("Test print statement") ''', calling_form_enum=CallingForm, @@ -712,6 +721,7 @@ def test_render_calling_form_request(): def test_render_calling_form_paged_all(): check_template(CALLING_FORM_TEMPLATE_TEST_STR, ''' + # Make the request page_result = TEST_INVOCATION_TXT for response in page_result: print("Test print statement") @@ -723,6 +733,7 @@ def test_render_calling_form_paged_all(): def test_render_calling_form_paged(): check_template(CALLING_FORM_TEMPLATE_TEST_STR, ''' + # Make the request page_result = TEST_INVOCATION_TXT for page in page_result.pages(): for response in page: @@ -735,6 +746,7 @@ def test_render_calling_form_paged(): def test_render_calling_form_streaming_server(): check_template(CALLING_FORM_TEMPLATE_TEST_STR, ''' + # Make the request stream = TEST_INVOCATION_TXT for response in stream: print("Test print statement") @@ -746,6 +758,7 @@ def test_render_calling_form_streaming_server(): def test_render_calling_form_streaming_bidi(): check_template(CALLING_FORM_TEMPLATE_TEST_STR, ''' + # Make the request stream = TEST_INVOCATION_TXT for response in stream: print("Test print statement") @@ -757,6 +770,7 @@ def test_render_calling_form_streaming_bidi(): def test_render_calling_form_longrunning(): check_template(CALLING_FORM_TEMPLATE_TEST_STR, ''' + # Make the request operation = TEST_INVOCATION_TXT print("Waiting for operation to complete...") From 68c6d33f3f4998dd82cf06a8c77d3e7833db872f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 3 May 2021 15:54:22 -0700 Subject: [PATCH 0529/1339] chore: release 0.45.0 (#858) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 0f3fb2aaed16..c2820c176ae0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.45.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.3...v0.45.0) (2021-05-03) + + +### Features + +* add autogenerated snippets ([#845](https://www.github.com/googleapis/gapic-generator-python/issues/845)) ([abdf5ec](https://www.github.com/googleapis/gapic-generator-python/commit/abdf5ec00261e5500dbdd190c23b0b2b05836799)) + ### [0.44.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.2...v0.44.3) (2021-05-03) From 4cf3bdd751b648885e1321a4ad8174ac436066fd Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 4 May 2021 09:36:34 -0600 Subject: [PATCH 0530/1339] fix: add async client to %name_%version/__init__.py (#859) --- .../templates/%namespace/%name_%version/%sub/__init__.py.j2 | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 6c5b948a2da5..8f2f3b7b68a6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -11,6 +11,9 @@ from . import {{ subpackage }} {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} from .services.{{ service.name|snake_case }} import {{ service.client_name }} +{% if 'grpc' in opts.transport %} +from .services.{{ service.name|snake_case }} import {{ service.async_client_name }} +{% endif %} {% endfor %} {# Import messages and enums from each proto. From 40e9acabca87ae94fe4faeb1b089f475bac711e0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 4 May 2021 15:40:04 +0000 Subject: [PATCH 0531/1339] chore: release 0.45.1 (#860) :robot: I have created a release \*beep\* \*boop\* --- ### [0.45.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.0...v0.45.1) (2021-05-04) ### Bug Fixes * add async client to %name_%version/__init__.py ([#859](https://www.github.com/googleapis/gapic-generator-python/issues/859)) ([391fdb8](https://www.github.com/googleapis/gapic-generator-python/commit/391fdb84b13c5628c21d81ad311c689da8971f6a)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c2820c176ae0..d24fbea20c19 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.45.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.0...v0.45.1) (2021-05-04) + + +### Bug Fixes + +* add async client to %name_%version/__init__.py ([#859](https://www.github.com/googleapis/gapic-generator-python/issues/859)) ([391fdb8](https://www.github.com/googleapis/gapic-generator-python/commit/391fdb84b13c5628c21d81ad311c689da8971f6a)) + ## [0.45.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.3...v0.45.0) (2021-05-03) From d9e78751751cb6986bc97df368aad794f18f06d8 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 6 May 2021 11:53:11 -0600 Subject: [PATCH 0532/1339] fix: remove extra space before_pb_options (#863) * fix: add async client to * fix: remove extra space before_pb_options --- .../%namespace/%name/%version/%sub/types/_enum.py.j2 | 2 +- .../templates/%namespace/%name_%version/%sub/types/_enum.py.j2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 index 8921af307062..cf82d19ea163 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_enum.py.j2 @@ -1,7 +1,7 @@ class {{ enum.name }}({{ p }}.Enum): r"""{{ enum.meta.doc|rst(indent=4) }}""" {% if enum.enum_pb.HasField("options") %} - _pb_options = {{ enum.options_dict }} + _pb_options = {{ enum.options_dict }} {% endif %} {% for enum_value in enum.values %} {{ enum_value.name }} = {{ enum_value.number }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 index 8921af307062..cf82d19ea163 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 @@ -1,7 +1,7 @@ class {{ enum.name }}({{ p }}.Enum): r"""{{ enum.meta.doc|rst(indent=4) }}""" {% if enum.enum_pb.HasField("options") %} - _pb_options = {{ enum.options_dict }} + _pb_options = {{ enum.options_dict }} {% endif %} {% for enum_value in enum.values %} {{ enum_value.name }} = {{ enum_value.number }} From aea009b6bcc56f23e596cadebfa438f4e828d9f7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 6 May 2021 12:37:13 -0600 Subject: [PATCH 0533/1339] chore: release 0.45.2 (#864) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index d24fbea20c19..792ed10740dd 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.45.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.1...v0.45.2) (2021-05-06) + + +### Bug Fixes + +* remove extra space before_pb_options ([#863](https://www.github.com/googleapis/gapic-generator-python/issues/863)) ([f0532e7](https://www.github.com/googleapis/gapic-generator-python/commit/f0532e7a88479aeb805c1509239008bdd19e9d85)) + ### [0.45.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.0...v0.45.1) (2021-05-04) From 2fd23fe486892333f97cb370e316db6e0fc31914 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 7 May 2021 09:48:28 -0700 Subject: [PATCH 0534/1339] feat: Support field presence for query parameters in REST clients (#866) Also fix witespace issues in rest transport ands tests. Also update bazel-specific dependencies. --- .../services/%service/transports/rest.py.j2 | 27 +++++++++++-------- .../%name_%version/%sub/test_%service.py.j2 | 15 +++++++---- packages/gapic-generator/repositories.bzl | 6 ++--- 3 files changed, 29 insertions(+), 19 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index dbb21bfa72c7..e3622630a216 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -89,7 +89,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): client_info=client_info, ) self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) - {%- if service.has_lro %} + {% if service.has_lro %} self._operations_client = None {% endif %} if client_cert_source_for_mtls: @@ -133,10 +133,10 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): request: {{ method.input.ident }}, *, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: - r"""Call the {{ ' ' }} + r"""Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} - {{ ' ' }} method over HTTP. + {{- ' ' -}} method over HTTP. Args: request (~.{{ method.input.ident }}): @@ -189,11 +189,16 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): #} # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields # not required for GCE - query_params = { - {% for field in method.query_params | sort%} - '{{ field|camel_case }}': request.{{ field }}, - {% endfor %} - } + query_params = {} + {% for field in method.query_params | sort%} + {% if method.input.fields[field].proto3_optional %} + if {{ method.input.ident }}.{{ field }} in request: + query_params['{{ field|camel_case }}'] = request.{{ field }} + {% else %} + query_params['{{ field|camel_case }}'] = request.{{ field }} + {% endif %} + {% endfor %} + # TODO(yon-mg): further discussion needed whether 'python truthiness' is appropriate here # discards default values # TODO(yon-mg): add test for proper url encoded strings @@ -201,9 +206,9 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): url += '?{}'.format('&'.join(query_params)).replace(' ', '+') # Send the request - {% if not method.void %}response = {% endif %}self._session.{{ method.http_opt['verb'] }}( - url - {% if 'body' in method.http_opt %}, + response = self._session.{{ method.http_opt['verb'] }}( + url, + {% if 'body' in method.http_opt %} data=body, {% endif %} ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 08a68d39bc80..3227b7b34025 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1189,7 +1189,11 @@ def test_{{ method.name|snake_case }}_rest_flattened(): {% endfor %} client.{{ method.name|snake_case }}( {% for field in method.flattened_fields.values() %} - {% if field.field_pb is msg_field_pb %}{{ field.name }}={{ field.name }},{% else %}{{ field.name }}={{ field.mock_value }},{% endif %} + {% if field.field_pb is msg_field_pb %} + {{ field.name }}={{ field.name }}, + {% else %} + {{ field.name }}={{ field.mock_value }}, + {% endif %} {% endfor %} ) @@ -1198,16 +1202,17 @@ def test_{{ method.name|snake_case }}_rest_flattened(): assert len(req.mock_calls) == 1 _, http_call, http_params = req.mock_calls[0] body = http_params.get('data') - {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} + {% for key, field in method.flattened_fields.items() %} + {% if not field.oneof or field.proto3_optional %} {% if field.ident|string() == 'timestamp.Timestamp' %} assert TimestampRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} {% elif field.ident|string() == 'duration.Duration' %} assert DurationRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} {% else %} assert {% if field.field_pb is msg_field_pb %}{{ field.ident }}.to_json({{ field.name }}, including_default_value_fields=False, use_integers_for_enums=False) - {% elif field.field_pb is str_field_pb %}{{ field.mock_value }} - {% else %}str({{ field.mock_value }}) - {% endif %} in http_call[1] + str(body) + {%- elif field.field_pb is str_field_pb %}{{ field.mock_value }} + {%- else %}str({{ field.mock_value }}) + {%- endif %} in http_call[1] + str(body) {% endif %} {% endif %}{% endfor %} diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index c6af87d4bc3e..0e4de1db1aad 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -15,7 +15,7 @@ def gapic_generator_python(): requirements = "@gapic_generator_python//:requirements.txt", ) - _protobuf_version = "3.14.0" + _protobuf_version = "3.15.8" _protobuf_version_in_link = "v%s" % _protobuf_version _maybe( http_archive, @@ -34,8 +34,8 @@ def gapic_generator_python(): _maybe( http_archive, name = "com_github_grpc_grpc", - strip_prefix = "grpc-8347f4753568b5b66e49111c60ae2841278d3f33", # this is 1.25.0 with fixes - urls = ["https://github.com/grpc/grpc/archive/8347f4753568b5b66e49111c60ae2841278d3f33.zip"], + strip_prefix = "grpc-1.36.4", + urls = ["https://github.com/grpc/grpc/archive/v1.36.4.zip"], ) _maybe( From ea5065803f9cffc97c33b808f42b1609a6ac5475 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 7 May 2021 14:40:33 -0700 Subject: [PATCH 0535/1339] fix: Check for default value presence for non-optional fields in REST (#868) --- .../%sub/services/%service/transports/rest.py.j2 | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index e3622630a216..24af43d76331 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -195,14 +195,15 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): if {{ method.input.ident }}.{{ field }} in request: query_params['{{ field|camel_case }}'] = request.{{ field }} {% else %} - query_params['{{ field|camel_case }}'] = request.{{ field }} + if request.{{ field }}: + query_params['{{ field|camel_case }}'] = request.{{ field }} {% endif %} {% endfor %} # TODO(yon-mg): further discussion needed whether 'python truthiness' is appropriate here # discards default values # TODO(yon-mg): add test for proper url encoded strings - query_params = ['{k}={v}'.format(k=k, v=v) for k, v in query_params.items() if v] + query_params = ['{k}={v}'.format(k=k, v=v) for k, v in query_params.items()] url += '?{}'.format('&'.join(query_params)).replace(' ', '+') # Send the request From d5d93d4c7a47b8f48c2614473ed06beec8c11ed1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 7 May 2021 15:52:31 -0600 Subject: [PATCH 0536/1339] chore: release 0.46.0 (#867) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 792ed10740dd..226047f4ecba 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.46.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.2...v0.46.0) (2021-05-07) + + +### Features + +* Support field presence for query parameters in REST clients ([#866](https://www.github.com/googleapis/gapic-generator-python/issues/866)) ([5339db1](https://www.github.com/googleapis/gapic-generator-python/commit/5339db1308326d91a05a34d38e31cf91b79a9225)) + + +### Bug Fixes + +* Check for default value presence for non-optional fields in REST ([#868](https://www.github.com/googleapis/gapic-generator-python/issues/868)) ([5748001](https://www.github.com/googleapis/gapic-generator-python/commit/57480019c3e77c6b3a85bdaf8441334170b318e8)) + ### [0.45.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.1...v0.45.2) (2021-05-06) From cd5d2f930f00affaa82344bf6284fab2cbc93a67 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 7 May 2021 16:40:03 -0600 Subject: [PATCH 0537/1339] fix: also add the async client to __all__ (#869) Follow up to #859 Closes #815 Also delete a stray } in feature_fragments which was resulting in almost empty files getting generated in clients. https://github.com/googleapis/googleapis-gen/blob/master/google/appengine/v1/google-cloud-appengine-v1-py/examples/feature_fragments --- .../templates/%namespace/%name_%version/%sub/__init__.py.j2 | 3 +++ .../gapic/templates/examples/feature_fragments.j2 | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 8f2f3b7b68a6..176cfd7f5d84 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -43,6 +43,9 @@ __all__ = ( {% for service in api.services.values() if service.meta.address.subpackage == api.subpackage_view -%} '{{ service.client_name }}', + {% if 'grpc' in opts.transport %} + '{{ service.async_client_name }}', + {% endif %} {% endfor -%} {% for proto in api.protos.values() if proto.meta.address.subpackage == api.subpackage_view -%} diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 2959157a3069..c6dc93d99c93 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -145,7 +145,7 @@ with open({{ attr.input_parameter }}, "rb") as f: {% macro render_client_setup(module_name, client_name) %} # Create a client client = {{ module_name }}.{{ client_name }}() -{% endmacro %}} +{% endmacro %} {% macro render_request_setup(full_request, module_name, request_type) %} # Initialize request argument(s) From 1135113962b5bf74d12922fb28fd67631e5a3efe Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 7 May 2021 15:48:03 -0700 Subject: [PATCH 0538/1339] chore: release 0.46.1 (#870) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 226047f4ecba..00fde0c8d832 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.46.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.0...v0.46.1) (2021-05-07) + + +### Bug Fixes + +* also add the async client to __all__ ([#869](https://www.github.com/googleapis/gapic-generator-python/issues/869)) ([09c90fa](https://www.github.com/googleapis/gapic-generator-python/commit/09c90fa48515cb7da1d0ebf1d93a0d49fc6448e8)) + ## [0.46.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.2...v0.46.0) (2021-05-07) From 78bdd2131b4cba4fd53088feed8053cbccbf1cef Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 8 May 2021 00:52:02 +0200 Subject: [PATCH 0539/1339] chore(deps): update dependency protobuf to v3.16.0 (#865) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.15.8` -> `==3.16.0` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.16.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.16.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.16.0/compatibility-slim/3.15.8)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.16.0/confidence-slim/3.15.8)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index fd1efaac3ad0..43ad012986d4 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.3 googleapis-common-protos==1.53.0 jinja2==2.11.3 MarkupSafe==1.1.1 -protobuf==3.15.8 +protobuf==3.16.0 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From b4a69aa8d45f328a3b1dd1609d3a91b59a3f55fd Mon Sep 17 00:00:00 2001 From: klmilam <38926902+klmilam@users.noreply.github.com> Date: Tue, 11 May 2021 13:11:16 -0400 Subject: [PATCH 0540/1339] style: update Python client library templates to align with the Python Style Guide (#874) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The template conflicts with the Python Style Guide in a couple of areas: "Be consistent with your choice of string quote character within a file. Pick ' or " and stick with it. It is okay to use the other quote character on a string to avoid the need to \ escape within the string." Lambda functions: "Okay to use them for one-liners. If the code inside the lambda function is longer than 60-80 chars, it’s probably better to define it as a regular nested function." "Maximum line length is 80 characters." Function docstrings including descriptive-style phrasing and newlines. This will improve the style of the auto-generated Python client library; however, the changes are not exhaustive because the code is still auto-generated. It does not change any functionality. --- .../%sub/services/%service/async_client.py.j2 | 46 ++++--- .../%sub/services/%service/client.py.j2 | 121 +++++++++++------- .../%sub/services/%service/pagers.py.j2 | 2 +- 3 files changed, 100 insertions(+), 69 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 47ccc596a89c..f5d60d8e606f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -1,4 +1,4 @@ -{% extends '_base.py.j2' %} +{% extends "_base.py.j2" %} {% block content %} @@ -51,7 +51,8 @@ class {{ service.async_client_name }}: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -66,7 +67,7 @@ class {{ service.async_client_name }}: @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -83,7 +84,7 @@ class {{ service.async_client_name }}: @property def transport(self) -> {{ service.name }}Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: {{ service.name }}Transport: The transport used by the client instance. @@ -95,11 +96,11 @@ class {{ service.async_client_name }}: def __init__(self, *, credentials: ga_credentials.Credentials = None, - transport: Union[str, {{ service.name }}Transport] = 'grpc_asyncio', + transport: Union[str, {{ service.name }}Transport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. + """Instantiates the {{ (service.client_name|snake_case).replace("_", " ") }}. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -165,7 +166,7 @@ class {{ service.async_client_name }}: Args: {% if not method.client_streaming %} request (:class:`{{ method.input.ident.sphinx }}`): - The request object.{{ ' ' }} + The request object.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} {{ field.name }} (:class:`{{ field.ident.sphinx }}`): @@ -176,7 +177,7 @@ class {{ service.async_client_name }}: {% endfor %} {% else %} requests (AsyncIterator[`{{ method.input.ident.sphinx }}`]): - The request object AsyncIterator.{{ ' ' }} + The request object AsyncIterator.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -200,10 +201,10 @@ class {{ service.async_client_name }}: {% if method.flattened_fields %} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]) + has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") {% endif %} {% if method.input.ident.package != method.ident.package %} {# request lives in a different package, so there is no proto wrapper #} @@ -251,7 +252,7 @@ class {{ service.async_client_name }}: {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( - {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} + {% for ex in method.retry.retryable_exceptions|sort(attribute="__name__") %} exceptions.{{ ex.__name__ }}, {% endfor %} ), @@ -269,7 +270,7 @@ class {{ service.async_client_name }}: gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} - ('{{ field_header }}', request.{{ field_header }}), + ("{{ field_header }}", request.{{ field_header }}), {% endif %} {% endfor %} )), @@ -313,7 +314,7 @@ class {{ service.async_client_name }}: # Done; return the response. return response {% endif %} - {{ '\n' }} + {{ "\n" }} {% endfor %} @@ -326,8 +327,10 @@ class {{ service.async_client_name }}: timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified - function. Replaces any existing policy. + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + Args: request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` @@ -433,8 +436,10 @@ class {{ service.async_client_name }}: metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does not have a policy set. + Args: request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` @@ -540,8 +545,11 @@ class {{ service.async_client_name }}: metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will + policy for a function. + + If the function does not exist, this will return an empty set of permissions, not a NOT_FOUND error. + Args: request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for @@ -586,7 +594,7 @@ class {{ service.async_client_name }}: try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( - '{{ api.naming.warehouse_package_name }}', + "{{ api.naming.warehouse_package_name }}", ).version, ) except pkg_resources.DistributionNotFound: @@ -594,6 +602,6 @@ except pkg_resources.DistributionNotFound: __all__ = ( - '{{ service.async_client_name }}', + "{{ service.async_client_name }}", ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 45efbc3cc166..281913acd3f0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -48,18 +48,18 @@ class {{ service.client_name }}Meta(type): objects. """ _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] - {% if 'grpc' in opts.transport %} - _transport_registry['grpc'] = {{ service.grpc_transport_name }} - _transport_registry['grpc_asyncio'] = {{ service.grpc_asyncio_transport_name }} + {% if "grpc" in opts.transport %} + _transport_registry["grpc"] = {{ service.grpc_transport_name }} + _transport_registry["grpc_asyncio"] = {{ service.grpc_asyncio_transport_name }} {% endif %} - {% if 'rest' in opts.transport %} - _transport_registry['rest'] = {{ service.name }}RestTransport + {% if "rest" in opts.transport %} + _transport_registry["rest"] = {{ service.name }}RestTransport {% endif %} def get_transport_class(cls, label: str = None, ) -> Type[{{ service.name }}Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -82,7 +82,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -109,7 +110,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - DEFAULT_ENDPOINT = {% if service.host %}'{{ service.host }}'{% else %}None{% endif %} + DEFAULT_ENDPOINT = {% if service.host %}"{{ service.host }}"{% else %}None{% endif %} DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT @@ -117,7 +118,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -134,7 +136,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -147,17 +149,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ credentials = service_account.Credentials.from_service_account_file( filename) - kwargs['credentials'] = credentials + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @property def transport(self) -> {{ service.name }}Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - {{ service.name }}Transport: The transport used by the client instance. + {{ service.name }}Transport: The transport used by the client + instance. """ return self._transport @@ -165,13 +168,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for message in service.resource_messages|sort(attribute="resource_type") %} @staticmethod def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: - """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" + """Returns a fully-qualified {{ message.resource_type|snake_case }} string.""" return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @staticmethod def parse_{{ message.resource_type|snake_case }}_path(path: str) -> Dict[str,str]: - """Parse a {{ message.resource_type|snake_case }} path into its component segments.""" + """Parses a {{ message.resource_type|snake_case }} path into its component segments.""" m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} @@ -179,7 +182,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") %} @staticmethod def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: - """Return a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" + """Returns a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" return "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @staticmethod @@ -196,7 +199,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. + """Instantiates the {{ (service.client_name|snake_case).replace('_', ' ') }}. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -249,7 +252,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = mtls.default_client_cert_source() if is_mtls else None + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -261,10 +267,14 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -273,12 +283,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if isinstance(transport, {{ service.name }}Transport): # transport is a {{ service.name }}Transport instance. if credentials or client_options.credentials_file: - raise ValueError('When providing a transport instance, ' - 'provide its credentials directly.') + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -319,7 +329,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Args: {% if not method.client_streaming %} request ({{ method.input.ident.sphinx }}): - The request object.{{ ' ' }} + The request object.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} {{ field.name }} ({{ field.ident.sphinx }}): @@ -330,7 +340,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %} {% else %} requests (Iterator[{{ method.input.ident.sphinx }}]): - The request object iterator.{{ ' ' }} + The request object iterator.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -346,7 +356,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% else %} Iterable[{{ method.client_output.ident.sphinx }}]: {% endif %} - {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format='rst') }} + {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format="rst") }} {% endif %} """ {% if not method.client_streaming %} @@ -354,7 +364,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if method.flattened_fields %} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]) + has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -410,7 +420,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} - ('{{ field_header }}', request.{{ field_header }}), + ("{{ field_header }}", request.{{ field_header }}), {% endif %} {% endfor %} )), @@ -453,7 +463,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Done; return the response. return response {% endif %} - {{ '\n' }} + {{ "\n" }} {% endfor %} {% if opts.add_iam_methods %} @@ -465,8 +475,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified - function. Replaces any existing policy. + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + Args: request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` @@ -554,11 +566,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -572,14 +586,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. + + Returns an empty policy if the function exists and does not have a + policy set. + Args: request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -661,11 +677,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -678,15 +696,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + Args: request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -712,11 +733,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response @@ -726,7 +749,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( - '{{ api.naming.warehouse_package_name }}', + "{{ api.naming.warehouse_package_name }}", ).version, ) except pkg_resources.DistributionNotFound: @@ -734,6 +757,6 @@ except pkg_resources.DistributionNotFound: __all__ = ( - '{{ service.client_name }}', + "{{ service.client_name }}", ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 53ce39b6e2aa..badc77a8c17b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -114,7 +114,7 @@ class {{ method.name }}AsyncPager: response: {{ method.output.ident }}, *, metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and From 65797ea14768b55fc655573314b6e5f8ab9ba53c Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 12 May 2021 11:52:02 -0700 Subject: [PATCH 0541/1339] chore: satiate mypy (#880) --- packages/gapic-generator/gapic/generator/generator.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 362753ff2d17..3014719c17f3 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -89,7 +89,7 @@ def get_response( sample_templates, client_templates = utils.partition( lambda fname: os.path.basename( fname) == samplegen.DEFAULT_TEMPLATE_NAME, - self._env.loader.list_templates(), + self._env.loader.list_templates(), # type: ignore ) # Iterate over each template and add the appropriate output files @@ -113,7 +113,7 @@ def get_response( sample_output = self._generate_samples_and_manifest( api_schema, self._env.get_template(sample_templates[0]), opts=opts, - ) + ) output_files.update(sample_output) # Return the CodeGeneratorResponse output. @@ -286,10 +286,10 @@ def _render_template( for service in api_schema.services.values(): if ( (skip_subpackages - and service.meta.address.subpackage != api_schema.subpackage_view) + and service.meta.address.subpackage != api_schema.subpackage_view) or ('transport' in template_name - and not self._is_desired_transport(template_name, opts)) + and not self._is_desired_transport(template_name, opts)) or # TODO(yon-mg) - remove when rest async implementation resolved # temporarily stop async client gen while rest async is unkown From 6c12b5d4d0aa07a8627af789afc5ef4a96565d9a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 12 May 2021 21:08:02 +0200 Subject: [PATCH 0542/1339] chore(deps): update dependency markupsafe to v2 (#879) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [MarkupSafe](https://palletsprojects.com/p/markupsafe/) ([changelog](https://markupsafe.palletsprojects.com/changes/)) | `==1.1.1` -> `==2.0.0` | [![age](https://badges.renovateapi.com/packages/pypi/MarkupSafe/2.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/MarkupSafe/2.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/MarkupSafe/2.0.0/compatibility-slim/1.1.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/MarkupSafe/2.0.0/confidence-slim/1.1.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 43ad012986d4..b18aa46ec029 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -2,7 +2,7 @@ click==7.1.2 google-api-core==1.26.3 googleapis-common-protos==1.53.0 jinja2==2.11.3 -MarkupSafe==1.1.1 +MarkupSafe==2.0.0 protobuf==3.16.0 pypandoc==1.5 PyYAML==5.4.1 From 1ebbc4b163436944fac702e035e5f2ef243f7a1a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 12 May 2021 21:28:02 +0200 Subject: [PATCH 0543/1339] chore(deps): update dependency jinja2 to v3 (#878) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [jinja2](https://palletsprojects.com/p/jinja/) ([changelog](https://jinja.palletsprojects.com/changes/)) | `==2.11.3` -> `==3.0.0` | [![age](https://badges.renovateapi.com/packages/pypi/jinja2/3.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/jinja2/3.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/jinja2/3.0.0/compatibility-slim/2.11.3)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/jinja2/3.0.0/confidence-slim/2.11.3)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b18aa46ec029..5e740f28ef44 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,7 +1,7 @@ click==7.1.2 google-api-core==1.26.3 googleapis-common-protos==1.53.0 -jinja2==2.11.3 +jinja2==3.0.0 MarkupSafe==2.0.0 protobuf==3.16.0 pypandoc==1.5 From 31a8af5c5cd2169415f05e4c761bc4b7bca28883 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 12 May 2021 22:00:07 +0200 Subject: [PATCH 0544/1339] chore(deps): update dependency click to v8 (#877) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [click](https://palletsprojects.com/p/click/) ([changelog](https://click.palletsprojects.com/changes/)) | `==7.1.2` -> `==8.0.0` | [![age](https://badges.renovateapi.com/packages/pypi/click/8.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/click/8.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/click/8.0.0/compatibility-slim/7.1.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/click/8.0.0/confidence-slim/7.1.2)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 5e740f28ef44..ee93fbbd146a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==7.1.2 +click==8.0.0 google-api-core==1.26.3 googleapis-common-protos==1.53.0 jinja2==3.0.0 From 5afb22eced5872ca664a5e216ac4d44b8003ea4d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 12 May 2021 14:06:02 -0600 Subject: [PATCH 0545/1339] fix: fix incorrectly referenced exceptions, add missing port to tests (#873) Fixes #872 --- .../%version/%sub/services/%service/transports/base.py.j2 | 2 +- .../%name_%version/%sub/services/%service/async_client.py.j2 | 2 +- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index d630c8f02336..82ab1464387b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -91,7 +91,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} - exceptions.{{ ex.__name__ }}, + core_exceptions.{{ ex.__name__ }}, {% endfor %} ), deadline={{ method.timeout }}, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index f5d60d8e606f..3c5988073cd2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -253,7 +253,7 @@ class {{ service.async_client_name }}: {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} predicate=retries.if_exception_type( {% for ex in method.retry.retryable_exceptions|sort(attribute="__name__") %} - exceptions.{{ ex.__name__ }}, + core_exceptions.{{ ex.__name__ }}, {% endfor %} ), deadline={{ method.timeout }}, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3227b7b34025..f6ef2fba9bf3 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1665,7 +1665,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel_old_api_core(tra {% with host = (service.host|default('localhost', true)) %} create_channel.assert_called_with( - "{{ host }}", + "{{ host }}{% if ":" not in service.host %}:443{% endif %}", credentials=creds, credentials_file=None, quota_project_id="octopus", @@ -1703,7 +1703,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel_user_scopes(tran transport_class(quota_project_id="octopus", scopes=["1", "2"]) create_channel.assert_called_with( - "{{ host }}", + "{{ host }}{% if ":" not in service.host %}:443{% endif %}", credentials=creds, credentials_file=None, quota_project_id="octopus", From 88adb7d9decba887e8131cbaaca5cc7a946019d7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 12 May 2021 14:30:30 -0600 Subject: [PATCH 0546/1339] chore: release 0.46.2 (#881) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 00fde0c8d832..74f499bd5e0b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.46.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.1...v0.46.2) (2021-05-12) + + +### Bug Fixes + +* fix incorrectly referenced exceptions, add missing port to tests ([#873](https://www.github.com/googleapis/gapic-generator-python/issues/873)) ([40078c4](https://www.github.com/googleapis/gapic-generator-python/commit/40078c46b21a0dfa489d4cd80ed7d95bb542f3c3)), closes [#872](https://www.github.com/googleapis/gapic-generator-python/issues/872) + ### [0.46.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.0...v0.46.1) (2021-05-07) From f8b0faec9c0f45092464bccd315c18038f628980 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 12 May 2021 17:42:02 -0600 Subject: [PATCH 0547/1339] fix: consistently use _pb2 identifier (#883) Small change to fix bug noticed in https://github.com/googleapis/python-dialogflow/pull/288. The import was `from google.rpc import status_pb2` but code expected `gr_status` --- packages/gapic-generator/gapic/schema/metadata.py | 10 +++++----- .../tests/unit/schema/test_metadata.py | 11 +++++++++++ 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index e14190d00763..91e690577755 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -84,17 +84,17 @@ def __str__(self) -> str: if self.module: module_name = self.module + # If collisions are registered and conflict with our module, + # use the module alias instead. + if self.module_alias: + module_name = self.module_alias + # This module is from a different proto package # Most commonly happens for a common proto # https://pypi.org/project/googleapis-common-protos/ if not self.proto_package.startswith(self.api_naming.proto_package): module_name = f'{self.module}_pb2' - # If collisions are registered and conflict with our module, - # use the module alias instead. - if self.module_alias: - module_name = self.module_alias - # Return the dot-separated Python identifier. return '.'.join((module_name,) + self.parent + (self.name,)) diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index c778000c7c3d..179361e039f4 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -49,6 +49,17 @@ def test_address_str_different_proto_package(): assert str(addr) == 'options_pb2.GetPolicyOptions' +def test_address_str_different_proto_package_with_collision(): + addr = metadata.Address( + package=('google', 'rpc'), + module='status', + name='Status', + api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1') + ).with_context(collisions=frozenset({'status'})) + # the module alias should be ignored for _pb2 types + assert str(addr) == 'status_pb2.Status' + + def test_address_proto(): addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') assert addr.proto == 'foo.bar.Bacon' From 12db9f219b764ea3d2f350127878259652e1b233 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 13 May 2021 08:44:09 -0600 Subject: [PATCH 0548/1339] chore: release 0.46.3 (#884) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 74f499bd5e0b..4703233c0bba 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.46.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.2...v0.46.3) (2021-05-12) + + +### Bug Fixes + +* consistently use _pb2 identifier ([#883](https://www.github.com/googleapis/gapic-generator-python/issues/883)) ([d789c84](https://www.github.com/googleapis/gapic-generator-python/commit/d789c84d0d686bdb2d88179041b4c04cc32a3e66)) + ### [0.46.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.1...v0.46.2) (2021-05-12) From 40fe09246b625b9bfcac76dac1f170812f936996 Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Thu, 13 May 2021 09:34:08 -0700 Subject: [PATCH 0549/1339] feat: support protobuf method deprecation option [gapic-generator-python] (#875) --- .../%name/%version/%sub/services/%service/client.py.j2 | 3 +++ packages/gapic-generator/gapic/schema/wrappers.py | 6 ++++++ .../%name_%version/%sub/services/%service/client.py.j2 | 7 +++++-- packages/gapic-generator/test_utils/test_utils.py | 4 ++++ .../tests/unit/schema/wrappers/test_method.py | 5 +++++ 5 files changed, 23 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 3e0221671207..bb9425bbfa68 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -334,6 +334,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format='rst') }} {% endif %} """ + {% if method.is_deprecated %} + warnings.warn("{{ method.name|snake_case }} is deprecated", warnings.DeprecationWarning) + {% endif %} {% if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields %} diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 962407aa9cb0..249ca5b5d4e0 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -734,8 +734,14 @@ def _client_output(self, enable_asyncio: bool): # Return the usual output. return self.output + @property + def is_deprecated(self) -> bool: + """Returns true if the method is deprecated, false otherwise.""" + return descriptor_pb2.MethodOptions.HasField(self.options, 'deprecated') + # TODO(yon-mg): remove or rewrite: don't think it performs as intended # e.g. doesn't work with basic case of gRPC transcoding + @property def field_headers(self) -> Sequence[str]: """Return the field headers defined for this method.""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 281913acd3f0..9db0f092d18b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -359,6 +359,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format="rst") }} {% endif %} """ + {% if method.is_deprecated %} + warnings.warn("{{ method.name|snake_case }} is deprecated", warnings.DeprecationWarning) + {% endif %} {% if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields %} @@ -476,9 +479,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. - + Replaces any existing policy. - + Args: request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index 2aafab454a3b..a499606f49f7 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -162,6 +162,7 @@ def make_method( module: str = 'baz', http_rule: http_pb2.HttpRule = None, signatures: typing.Sequence[str] = (), + is_deprecated: bool = False, **kwargs ) -> wrappers.Method: # Use default input and output messages if they are not provided. @@ -189,6 +190,9 @@ def make_method( if isinstance(package, str): package = tuple(package.split('.')) + if is_deprecated: + method_pb.options.deprecated = True + # Instantiate the wrapper class. return wrappers.Method( method_pb=method_pb, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 2162effbbbad..c13a9afb28f5 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -52,6 +52,11 @@ def test_method_not_void(): assert not method.void +def test_method_deprecated(): + method = make_method('DeprecatedMethod', is_deprecated=True) + assert method.is_deprecated + + def test_method_client_output(): output = make_message(name='Input', module='baz') method = make_method('DoStuff', output_message=output) From 89200410414bf75ca23278bfe7816a8ea57a13ad Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 14 May 2021 10:49:33 -0700 Subject: [PATCH 0550/1339] chore: release 0.47.0 (#886) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 4703233c0bba..bb8b5d1b97f6 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.47.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.3...v0.47.0) (2021-05-13) + + +### Features + +* support protobuf method deprecation option [gapic-generator-python] ([#875](https://www.github.com/googleapis/gapic-generator-python/issues/875)) ([5a5a839](https://www.github.com/googleapis/gapic-generator-python/commit/5a5a839b99d78ec5a5c52452e57c289b55ad1db5)) + ### [0.46.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.2...v0.46.3) (2021-05-12) From b38d95e6aa44a4a89f92d4e797dde2695ea371de Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 14 May 2021 19:54:05 +0200 Subject: [PATCH 0551/1339] chore(deps): update dependency protobuf to v3.17.0 (#885) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.16.0` -> `==3.17.0` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.0/compatibility-slim/3.16.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.0/confidence-slim/3.16.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ee93fbbd146a..8e9db4cb022c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.26.3 googleapis-common-protos==1.53.0 jinja2==3.0.0 MarkupSafe==2.0.0 -protobuf==3.16.0 +protobuf==3.17.0 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From b1424f3b5b717cb501ecdefa9de419830a2b9351 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 May 2021 19:58:37 +0200 Subject: [PATCH 0552/1339] chore(deps): update dependency jinja2 to v3.0.1 (#892) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8e9db4cb022c..2231ff15fafb 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,7 +1,7 @@ click==8.0.0 google-api-core==1.26.3 googleapis-common-protos==1.53.0 -jinja2==3.0.0 +jinja2==3.0.1 MarkupSafe==2.0.0 protobuf==3.17.0 pypandoc==1.5 From 3577a1775385827745cf71db699df53945495817 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 May 2021 20:02:16 +0200 Subject: [PATCH 0553/1339] chore(deps): update dependency markupsafe to v2.0.1 (#890) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [MarkupSafe](https://palletsprojects.com/p/markupsafe/) ([changelog](https://markupsafe.palletsprojects.com/changes/)) | `==2.0.0` -> `==2.0.1` | [![age](https://badges.renovateapi.com/packages/pypi/MarkupSafe/2.0.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/MarkupSafe/2.0.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/MarkupSafe/2.0.1/compatibility-slim/2.0.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/MarkupSafe/2.0.1/confidence-slim/2.0.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2231ff15fafb..84ffd42ff025 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -2,7 +2,7 @@ click==8.0.0 google-api-core==1.26.3 googleapis-common-protos==1.53.0 jinja2==3.0.1 -MarkupSafe==2.0.0 +MarkupSafe==2.0.1 protobuf==3.17.0 pypandoc==1.5 PyYAML==5.4.1 From 787a76fc0df234575aa95b6bf3e0bf034c82b24d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 May 2021 20:08:07 +0200 Subject: [PATCH 0554/1339] chore(deps): update dependency google-api-core to v1.27.0 (#889) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | `==1.26.3` -> `==1.27.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-core/1.27.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-core/1.27.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-core/1.27.0/compatibility-slim/1.26.3)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-core/1.27.0/confidence-slim/1.26.3)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-api-core ### [`v1.27.0`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​1270-httpswwwgithubcomgoogleapispython-api-corecomparev1263v1270-2021-05-18) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.26.3...v1.27.0) ##### Features - Add support for `rest/` token in `x-goog-api-client` header ([#​189](https://www.github.com/googleapis/python-api-core/issues/189)) ([15aca6b](https://www.github.com/googleapis/python-api-core/commit/15aca6b288b2ec5ce0251e442e1dfa7f52e1b124)) - retry google.auth TransportError and requests ConnectionError ([#​178](https://www.github.com/googleapis/python-api-core/issues/178)) ([6ae04a8](https://www.github.com/googleapis/python-api-core/commit/6ae04a8d134fffe13f06081e15f9723c1b2ea334)) ##### [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25) ##### Bug Fixes - skip empty policy bindings in `len()` and `iter()` ([#​159](https://www.github.com/googleapis/python-api-core/issues/159)) ([9eaa786](https://www.github.com/googleapis/python-api-core/commit/9eaa7868164a7e98792de24d2be97f79fba22322)) ##### Documentation - update python contributing guide ([#​147](https://www.github.com/googleapis/python-api-core/issues/147)) ([1d76b57](https://www.github.com/googleapis/python-api-core/commit/1d76b57d1f218f7885f85dc7c052bad1ad3857ac)) ##### [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23) ##### Bug Fixes - save empty IAM policy bindings ([#​155](https://www.github.com/googleapis/python-api-core/issues/155)) ([536c2ca](https://www.github.com/googleapis/python-api-core/commit/536c2cad814b8fa8cd346a3d7bd5f6b9889c4a6f)) ##### [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12) ##### Bug Fixes - add operation name to x-goog-request-params in async client ([#​137](https://www.github.com/googleapis/python-api-core/issues/137)) ([7271b23](https://www.github.com/googleapis/python-api-core/commit/7271b23afddb032e49e957525704d0cd5bfa4c65))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 84ffd42ff025..7f10d067224d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.0 -google-api-core==1.26.3 +google-api-core==1.27.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From 16ca92f2687e6ba4d0937c8f5da020134809e0ee Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 19 May 2021 15:05:17 -0600 Subject: [PATCH 0555/1339] fix: remove support for google-api-core<1.26.0 (#893) --- .../services/%service/transports/base.py.j2 | 26 +----- .../services/%service/transports/grpc.py.j2 | 6 +- .../%service/transports/grpc_asyncio.py.j2 | 6 +- .../services/%service/transports/rest.py.j2 | 6 +- .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 89 +------------------ 6 files changed, 15 insertions(+), 120 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 819448a18ba5..0570b930932e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -46,8 +46,6 @@ except AttributeError: except pkg_resources.DistributionNotFound: # pragma: NO COVER _GOOGLE_AUTH_VERSION = None -_API_CORE_VERSION = google.api_core.__version__ - class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" @@ -121,10 +119,9 @@ class {{ service.name }}Transport(abc.ABC): self._credentials = credentials - # TODO(busunkim): These two class methods are in the base transport + # TODO(busunkim): This method is in the base transport # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-api-core - # and google-auth are increased. + # should be deleted once the minimum required versions of google-auth is increased. # TODO: Remove this function once google-auth >= 1.25.0 is required @classmethod @@ -143,25 +140,6 @@ class {{ service.name }}Transport(abc.ABC): return scopes_kwargs - # TODO: Remove this function once google-api-core >= 1.26.0 is required - @classmethod - def _get_self_signed_jwt_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Union[Optional[Sequence[str]], str]]: - """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" - - self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} - - if _API_CORE_VERSION and ( - packaging.version.parse(_API_CORE_VERSION) - >= packaging.version.parse("1.26.0") - ): - self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES - self_signed_jwt_kwargs["scopes"] = scopes - self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST - else: - self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES - - return self_signed_jwt_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index c266473ace29..3d9fbf9820dd 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -204,14 +204,14 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): and ``credentials_file`` are passed. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 5c02730a1958..0b72ecfe9e8c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -77,14 +77,14 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): aio.Channel: A gRPC AsyncIO channel object. """ - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) - return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 24af43d76331..bf3cec5efc2a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -109,13 +109,13 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): if self._operations_client is None: from google.api_core import grpc_helpers - self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(self._host, self._scopes) - self._operations_client = operations_v1.OperationsClient( grpc_helpers.create_channel( self._host, credentials=self._credentials, - **self_signed_jwt_kwargs, + default_scopes=cls.AUTH_SCOPES, + scopes=self._scopes, + default_host=cls.DEFAULT_HOST, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 9c97573fc00f..07d9535c6a12 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -27,7 +27,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.22.2, < 2.0.0dev', + 'google-api-core[grpc] >= 1.26.0, < 2.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.15.0', 'packaging >= 14.3', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f6ef2fba9bf3..5e436e33bf46 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -29,7 +29,6 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser {% endif %} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _GOOGLE_AUTH_VERSION -from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _API_CORE_VERSION from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers @@ -55,8 +54,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% endfilter %} -# TODO(busunkim): Once google-api-core >= 1.26.0 is required: -# - Delete all the api-core and auth "less than" test cases +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases # - Delete these pytest markers (Make the "greater than or equal to" tests the default). requires_google_auth_lt_1_25_0 = pytest.mark.skipif( packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), @@ -67,16 +67,6 @@ requires_google_auth_gte_1_25_0 = pytest.mark.skipif( reason="This test requires google-auth >= 1.25.0", ) -requires_api_core_lt_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), - reason="This test requires google-api-core < 1.26.0", -) - -requires_api_core_gte_1_26_0 = pytest.mark.skipif( - packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), - reason="This test requires google-api-core >= 1.26.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -1610,7 +1600,6 @@ def test_{{ service.name|snake_case }}_transport_auth_adc_old_google_auth(transp (transports.{{ service.name }}GrpcAsyncIOTransport, grpc_helpers_async) ], ) -@requires_api_core_gte_1_26_0 def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, grpc_helpers): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -1644,78 +1633,6 @@ def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, ) {% endwith %} - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.{{ service.name }}GrpcTransport, grpc_helpers), - (transports.{{ service.name }}GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -@requires_api_core_lt_1_26_0 -def test_{{ service.name|snake_case }}_transport_create_channel_old_api_core(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus") - - {% with host = (service.host|default('localhost', true)) %} - create_channel.assert_called_with( - "{{ host }}{% if ":" not in service.host %}:443{% endif %}", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %}), - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - {% endwith %} - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.{{ service.name }}GrpcTransport, grpc_helpers), - (transports.{{ service.name }}GrpcAsyncIOTransport, grpc_helpers_async) - ], -) -@requires_api_core_lt_1_26_0 -def test_{{ service.name|snake_case }}_transport_create_channel_user_scopes(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - {% with host = (service.host|default('localhost', true)) %} - - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "{{ host }}{% if ":" not in service.host %}:443{% endif %}", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - scopes=["1", "2"], - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - {% endwith %} - {% endif %} {% if 'grpc' in opts.transport %} From a21dd6ba2f6a670dcc251230da7dfd5e7d45967d Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Wed, 19 May 2021 15:34:48 -0700 Subject: [PATCH 0556/1339] feat: Raise GoogleAPICallError on REST response errors (#891) While testing against Showcase, I found that server errors resulted in a large exception stack dump that did not include the body of the erroring response. This PR shortens the stack dump and prints out the request body. --- .../%sub/services/%service/transports/rest.py.j2 | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index bf3cec5efc2a..afb296aeaf86 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -9,8 +9,9 @@ from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import operations_v1 {% endif %} from google.api_core import gapic_v1 # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -214,8 +215,10 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {% endif %} ) - # Raise requests.exceptions.HTTPError if the status code is >= 400 - response.raise_for_status() + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) {% if not method.void %} # Return the response From ef9931aad9fb6afd4a863bc31d3fd7abc57e421f Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 20 May 2021 08:52:03 -0600 Subject: [PATCH 0557/1339] fix: fix datetime comparison unit tests (#898) Fixes #897 The string ident changed to have an `_pb2` in #851. If there's a more robust way to do this comparison please let me know. --- .../gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- .../gapic/%name_%version/%sub/test_%service.py.j2 | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 159552914d22..8d9af17ae691 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -444,9 +444,9 @@ def test_{{ method.name|snake_case }}_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp.Timestamp' %} + {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration.Duration' %} + {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% else %} assert args[0].{{ key }} == {{ field.mock_value }} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 5e436e33bf46..b3c179d29014 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -760,9 +760,9 @@ def test_{{ method.name|snake_case }}_flattened(): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp.Timestamp' %} + {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration.Duration' %} + {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% else %} assert args[0].{{ key }} == {{ field.mock_value }} @@ -845,9 +845,9 @@ async def test_{{ method.name|snake_case }}_flattened_async(): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp.Timestamp' %} + {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration.Duration' %} + {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% else %} assert args[0].{{ key }} == {{ field.mock_value }} @@ -1194,9 +1194,9 @@ def test_{{ method.name|snake_case }}_rest_flattened(): body = http_params.get('data') {% for key, field in method.flattened_fields.items() %} {% if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp.Timestamp' %} + {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} assert TimestampRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration.Duration' %} + {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} {% else %} assert {% if field.field_pb is msg_field_pb %}{{ field.ident }}.to_json({{ field.name }}, including_default_value_fields=False, use_integers_for_enums=False) From b2c3aefe03af6dd36080e5f9b4f91dae3ce8aa4c Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Thu, 20 May 2021 15:56:35 -0700 Subject: [PATCH 0558/1339] feat: Add `x-goog-api-client` header to rest clients (#888) Also make api-core 1.27.0 minimum requirement --- .../%sub/services/%service/transports/base.py.j2 | 9 +++++++++ .../%sub/services/%service/transports/rest.py.j2 | 3 +++ packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 0570b930932e..728961464d48 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -6,6 +6,9 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import packaging.version import pkg_resources +{% if 'rest' in opts.transport %} +from requests import __version__ as requests_version +{% endif %} import google.auth # type: ignore import google.api_core # type: ignore @@ -33,6 +36,12 @@ try: gapic_version=pkg_resources.get_distribution( '{{ api.naming.warehouse_package_name }}', ).version, + {% if 'grpc' not in opts.transport %} + grpc_version=None, + {% endif %} + {% if 'rest' in opts.transport %} + rest_version=requests_version, + {% endif %} ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index afb296aeaf86..2961172ace9b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -208,8 +208,11 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): url += '?{}'.format('&'.join(query_params)).replace(' ', '+') # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' response = self._session.{{ method.http_opt['verb'] }}( url, + headers=headers, {% if 'body' in method.http_opt %} data=body, {% endif %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 07d9535c6a12..2dc389be2c57 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -27,7 +27,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.26.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.15.0', 'packaging >= 14.3', From c4b0f4cc559dbe6402447ab024188ae8d5af525a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 21 May 2021 03:14:09 +0200 Subject: [PATCH 0559/1339] chore(deps): update dependency google-api-core to v1.28.0 (#903) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | `==1.27.0` -> `==1.28.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-core/1.28.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-core/1.28.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-core/1.28.0/compatibility-slim/1.27.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-core/1.28.0/confidence-slim/1.27.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-api-core ### [`v1.28.0`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​1280-httpswwwgithubcomgoogleapispython-api-corecomparev1270v1280-2021-05-20) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.27.0...v1.28.0) ##### Bug Fixes - require google-auth>=1.25.0 ([#​190](https://www.github.com/googleapis/python-api-core/issues/190)) ([155da5e](https://www.github.com/googleapis/python-api-core/commit/155da5e18cc2fdcfa57de6f956b7d078e79cd4b7)) ##### Miscellaneous Chores - release 1.28.0 ([#​192](https://www.github.com/googleapis/python-api-core/issues/192)) ([11b5da4](https://www.github.com/googleapis/python-api-core/commit/11b5da426a842541ca2b861d3387fc312b3f5b60))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 7f10d067224d..f97a6dc6b35f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.0 -google-api-core==1.27.0 +google-api-core==1.28.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From f9e5b5812dd71537f4324583f1625a975610fdd4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 21 May 2021 03:56:07 +0200 Subject: [PATCH 0560/1339] chore(deps): update dependency click to v8.0.1 (#896) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [click](https://palletsprojects.com/p/click/) ([changelog](https://click.palletsprojects.com/changes/)) | `==8.0.0` -> `==8.0.1` | [![age](https://badges.renovateapi.com/packages/pypi/click/8.0.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/click/8.0.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/click/8.0.1/compatibility-slim/8.0.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/click/8.0.1/confidence-slim/8.0.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™»ï¸ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f97a6dc6b35f..500661cb424e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==8.0.0 +click==8.0.1 google-api-core==1.28.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 From dc2c01631ff07d4434893424822d1a10048bdec6 Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Mon, 24 May 2021 12:49:40 -0700 Subject: [PATCH 0561/1339] docs: Add DEVELOPMENT.md (#876) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- packages/gapic-generator/DEVELOPMENT.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 packages/gapic-generator/DEVELOPMENT.md diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md new file mode 100644 index 000000000000..bcd648ab6cfb --- /dev/null +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -0,0 +1,7 @@ +# Development + +- Install dependencies with `pip install .` +- See all nox sessions with `nox -l` +- Execute unit tests by running one of the sessions prefixed with `unit-` + - Example: `nox -s unit-3.8` +- Lint sources by running `autopep8`. From fc089799d01e131320c8134623d7a9d3d14b4a50 Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Thu, 27 May 2021 12:43:40 -0700 Subject: [PATCH 0562/1339] feat(tests): Add integration test framework, goldens for 4 APIs [gapic-generator-python] (#905) * feat(tests): Add integration test framework, goldens for 4 APIs * fix: exclude generated sources from style checks * fix: add integration tests to CI * fix: split out integration tests * fix: bazel install * fix: Use 3.8 only for integration tests * fix: add integration test steps to docs --- .../.github/workflows/tests.yaml | 30 +- packages/gapic-generator/DEVELOPMENT.md | 4 + packages/gapic-generator/WORKSPACE | 8 + packages/gapic-generator/repositories.bzl | 15 + .../rules_python_gapic/py_gapic.bzl | 4 +- .../rules_python_gapic/py_gapic_pkg.bzl | 4 +- .../rules_python_gapic/test/BUILD.bazel | 0 .../test/integration_test.bzl | 152 + .../tests/integration/BUILD.bazel | 78 + .../cloudasset_grpc_service_config.json | 95 + .../integration/goldens/asset/.coveragerc | 17 + .../integration/goldens/asset/BUILD.bazel | 12 + .../integration/goldens/asset/MANIFEST.in | 2 + .../integration/goldens/asset/README.rst | 49 + .../asset/docs/asset_v1/asset_service.rst | 10 + .../goldens/asset/docs/asset_v1/services.rst | 6 + .../goldens/asset/docs/asset_v1/types.rst | 7 + .../integration/goldens/asset/docs/conf.py | 376 + .../integration/goldens/asset/docs/index.rst | 7 + .../asset/google/cloud/asset/__init__.py | 99 + .../goldens/asset/google/cloud/asset/py.typed | 2 + .../asset/google/cloud/asset_v1/__init__.py | 100 + .../google/cloud/asset_v1/gapic_metadata.json | 133 + .../asset/google/cloud/asset_v1/py.typed | 2 + .../cloud/asset_v1/services/__init__.py | 15 + .../services/asset_service/__init__.py | 22 + .../services/asset_service/async_client.py | 1180 +++ .../asset_v1/services/asset_service/client.py | 1332 ++++ .../asset_v1/services/asset_service/pagers.py | 263 + .../asset_service/transports/__init__.py | 33 + .../services/asset_service/transports/base.py | 356 + .../services/asset_service/transports/grpc.py | 567 ++ .../asset_service/transports/grpc_asyncio.py | 571 ++ .../google/cloud/asset_v1/types/__init__.py | 98 + .../cloud/asset_v1/types/asset_service.py | 1452 ++++ .../google/cloud/asset_v1/types/assets.py | 867 +++ .../tests/integration/goldens/asset/mypy.ini | 3 + .../integration/goldens/asset/noxfile.py | 132 + .../asset/scripts/fixup_asset_v1_keywords.py | 186 + .../tests/integration/goldens/asset/setup.py | 53 + .../goldens/asset/tests/__init__.py | 16 + .../goldens/asset/tests/unit/__init__.py | 16 + .../asset/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/asset_v1/__init__.py | 16 + .../unit/gapic/asset_v1/test_asset_service.py | 3612 +++++++++ .../goldens/credentials/.coveragerc | 17 + .../goldens/credentials/BUILD.bazel | 12 + .../goldens/credentials/MANIFEST.in | 2 + .../goldens/credentials/README.rst | 49 + .../goldens/credentials/docs/conf.py | 376 + .../docs/credentials_v1/iam_credentials.rst | 6 + .../docs/credentials_v1/services.rst | 6 + .../credentials/docs/credentials_v1/types.rst | 7 + .../goldens/credentials/docs/index.rst | 7 + .../google/iam/credentials/__init__.py | 39 + .../google/iam/credentials/py.typed | 2 + .../google/iam/credentials_v1/__init__.py | 40 + .../iam/credentials_v1/gapic_metadata.json | 63 + .../google/iam/credentials_v1/py.typed | 2 + .../iam/credentials_v1/services/__init__.py | 15 + .../services/iam_credentials/__init__.py | 22 + .../services/iam_credentials/async_client.py | 663 ++ .../services/iam_credentials/client.py | 822 +++ .../iam_credentials/transports/__init__.py | 33 + .../iam_credentials/transports/base.py | 230 + .../iam_credentials/transports/grpc.py | 339 + .../transports/grpc_asyncio.py | 343 + .../iam/credentials_v1/types/__init__.py | 36 + .../google/iam/credentials_v1/types/common.py | 299 + .../credentials_v1/types/iamcredentials.py | 25 + .../integration/goldens/credentials/mypy.ini | 3 + .../goldens/credentials/noxfile.py | 132 + .../scripts/fixup_credentials_v1_keywords.py | 179 + .../integration/goldens/credentials/setup.py | 53 + .../goldens/credentials/tests/__init__.py | 16 + .../credentials/tests/unit/__init__.py | 16 + .../credentials/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/credentials_v1/__init__.py | 16 + .../credentials_v1/test_iam_credentials.py | 1910 +++++ .../integration/goldens/logging/.coveragerc | 17 + .../integration/goldens/logging/BUILD.bazel | 12 + .../integration/goldens/logging/MANIFEST.in | 2 + .../integration/goldens/logging/README.rst | 49 + .../integration/goldens/logging/docs/conf.py | 376 + .../goldens/logging/docs/index.rst | 7 + .../docs/logging_v2/config_service_v2.rst | 10 + .../docs/logging_v2/logging_service_v2.rst | 10 + .../docs/logging_v2/metrics_service_v2.rst | 10 + .../logging/docs/logging_v2/services.rst | 8 + .../goldens/logging/docs/logging_v2/types.rst | 7 + .../logging/google/cloud/logging/__init__.py | 143 + .../logging/google/cloud/logging/py.typed | 2 + .../google/cloud/logging_v2/__init__.py | 144 + .../cloud/logging_v2/gapic_metadata.json | 391 + .../logging/google/cloud/logging_v2/py.typed | 2 + .../cloud/logging_v2/services/__init__.py | 15 + .../services/config_service_v2/__init__.py | 22 + .../config_service_v2/async_client.py | 2016 ++++++ .../services/config_service_v2/client.py | 2194 ++++++ .../services/config_service_v2/pagers.py | 506 ++ .../config_service_v2/transports/__init__.py | 33 + .../config_service_v2/transports/base.py | 528 ++ .../config_service_v2/transports/grpc.py | 874 +++ .../transports/grpc_asyncio.py | 878 +++ .../services/logging_service_v2/__init__.py | 22 + .../logging_service_v2/async_client.py | 781 ++ .../services/logging_service_v2/client.py | 916 +++ .../services/logging_service_v2/pagers.py | 386 + .../logging_service_v2/transports/__init__.py | 33 + .../logging_service_v2/transports/base.py | 283 + .../logging_service_v2/transports/grpc.py | 398 + .../transports/grpc_asyncio.py | 402 + .../services/metrics_service_v2/__init__.py | 22 + .../metrics_service_v2/async_client.py | 640 ++ .../services/metrics_service_v2/client.py | 795 ++ .../services/metrics_service_v2/pagers.py | 140 + .../metrics_service_v2/transports/__init__.py | 33 + .../metrics_service_v2/transports/base.py | 253 + .../metrics_service_v2/transports/grpc.py | 353 + .../transports/grpc_asyncio.py | 357 + .../google/cloud/logging_v2/types/__init__.py | 138 + .../cloud/logging_v2/types/log_entry.py | 321 + .../google/cloud/logging_v2/types/logging.py | 573 ++ .../cloud/logging_v2/types/logging_config.py | 1457 ++++ .../cloud/logging_v2/types/logging_metrics.py | 371 + .../integration/goldens/logging/mypy.ini | 3 + .../integration/goldens/logging/noxfile.py | 132 + .../scripts/fixup_logging_v2_keywords.py | 209 + .../integration/goldens/logging/setup.py | 53 + .../goldens/logging/tests/__init__.py | 16 + .../goldens/logging/tests/unit/__init__.py | 16 + .../logging/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/logging_v2/__init__.py | 16 + .../logging_v2/test_config_service_v2.py | 6436 +++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 2486 +++++++ .../logging_v2/test_metrics_service_v2.py | 2351 ++++++ .../integration/goldens/redis/.coveragerc | 17 + .../integration/goldens/redis/BUILD.bazel | 12 + .../integration/goldens/redis/MANIFEST.in | 2 + .../integration/goldens/redis/README.rst | 49 + .../integration/goldens/redis/docs/conf.py | 376 + .../integration/goldens/redis/docs/index.rst | 7 + .../redis/docs/redis_v1/cloud_redis.rst | 10 + .../goldens/redis/docs/redis_v1/services.rst | 6 + .../goldens/redis/docs/redis_v1/types.rst | 7 + .../redis/google/cloud/redis/__init__.py | 59 + .../goldens/redis/google/cloud/redis/py.typed | 2 + .../redis/google/cloud/redis_v1/__init__.py | 60 + .../google/cloud/redis_v1/gapic_metadata.json | 113 + .../redis/google/cloud/redis_v1/py.typed | 2 + .../cloud/redis_v1/services/__init__.py | 15 + .../redis_v1/services/cloud_redis/__init__.py | 22 + .../services/cloud_redis/async_client.py | 1097 +++ .../redis_v1/services/cloud_redis/client.py | 1284 ++++ .../redis_v1/services/cloud_redis/pagers.py | 140 + .../cloud_redis/transports/__init__.py | 33 + .../services/cloud_redis/transports/base.py | 279 + .../services/cloud_redis/transports/grpc.py | 538 ++ .../cloud_redis/transports/grpc_asyncio.py | 542 ++ .../google/cloud/redis_v1/types/__init__.py | 56 + .../cloud/redis_v1/types/cloud_redis.py | 708 ++ .../tests/integration/goldens/redis/mypy.ini | 3 + .../integration/goldens/redis/noxfile.py | 132 + .../redis/scripts/fixup_redis_v1_keywords.py | 184 + .../tests/integration/goldens/redis/setup.py | 53 + .../goldens/redis/tests/__init__.py | 16 + .../goldens/redis/tests/unit/__init__.py | 16 + .../redis/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/redis_v1/__init__.py | 16 + .../unit/gapic/redis_v1/test_cloud_redis.py | 3326 +++++++++ .../iamcredentials_grpc_service_config.json | 35 + .../logging_grpc_service_config.json | 162 + .../redis_grpc_service_config.json | 49 + 173 files changed, 55916 insertions(+), 6 deletions(-) create mode 100644 packages/gapic-generator/rules_python_gapic/test/BUILD.bazel create mode 100644 packages/gapic-generator/rules_python_gapic/test/integration_test.bzl create mode 100644 packages/gapic-generator/tests/integration/BUILD.bazel create mode 100755 packages/gapic-generator/tests/integration/cloudasset_grpc_service_config.json create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/.coveragerc create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/BUILD.bazel create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/README.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/asset_service.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/mypy.ini create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/noxfile.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/setup.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/BUILD.bazel create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/README.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/iam_credentials.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/setup.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/.coveragerc create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/BUILD.bazel create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/README.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/config_service_v2.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/logging_service_v2.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/metrics_service_v2.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/mypy.ini create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/noxfile.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/setup.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/.coveragerc create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/BUILD.bazel create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/README.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/cloud_redis.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/mypy.ini create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/noxfile.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/setup.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py create mode 100755 packages/gapic-generator/tests/integration/iamcredentials_grpc_service_config.json create mode 100755 packages/gapic-generator/tests/integration/logging_grpc_service_config.json create mode 100755 packages/gapic-generator/tests/integration/redis_grpc_service_config.json diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 92ff88118547..623dfc04c4a2 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -313,6 +313,34 @@ jobs: - name: Submit coverage data to codecov. run: codecov if: always() + integration: + runs-on: ubuntu-latest + steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install system dependencies. + run: | + sudo apt-get update + sudo apt-get install -y curl pandoc unzip gcc + - name: Install Bazel + run: | + wget -q "https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/$BAZEL_BINARY" + wget -q "https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/$BAZEL_BINARY.sha256" + sha256sum -c "$BAZEL_BINARY.sha256" + sudo dpkg -i "$BAZEL_BINARY" + env: + BAZEL_VERSION: 3.5.0 + BAZEL_BINARY: bazel_3.5.0-linux-x86_64.deb + - name: Integration Tests + run: bazel test tests/integration:asset tests/integration:credentials tests/integration:logging tests/integration:redis + style-check: runs-on: ubuntu-latest steps: @@ -330,4 +358,4 @@ jobs: python -m pip install autopep8 - name: Check diff run: | - find gapic tests -name "*.py" | xargs autopep8 --diff --exit-code + find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --diff --exit-code diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index bcd648ab6cfb..25d000c58ba2 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -5,3 +5,7 @@ - Execute unit tests by running one of the sessions prefixed with `unit-` - Example: `nox -s unit-3.8` - Lint sources by running `autopep8`. + +## Integration Tests +- Running tests: `bazel test tests/integration:asset`. See the full list of targets in `tests/integration/BUILD.bazel`. +- Updating golden files: `bazel run tests/integration:asset_update` diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 9475af113bbd..65b9832e7ee5 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -55,3 +55,11 @@ apple_rules_dependencies() load("@build_bazel_apple_support//lib:repositories.bzl", "apple_support_dependencies") apple_support_dependencies() + +load("@com_google_googleapis//:repository_rules.bzl", "switched_rules_by_language") + +switched_rules_by_language( + name = "com_google_googleapis_imports", + gapic = True, + grpc = True, +) diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 0e4de1db1aad..524670034810 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -61,6 +61,21 @@ def gapic_generator_python(): urls = ["https://github.com/googleapis/gapic-generator/archive/03abac35ec0716c6f426ffc1532f9a62f1c9e6a2.zip"], ) + _rules_gapic_version = "0.5.3" + _maybe( + http_archive, + name = "rules_gapic", + strip_prefix = "rules_gapic-%s" % _rules_gapic_version, + urls = ["https://github.com/googleapis/rules_gapic/archive/v%s.tar.gz" % _rules_gapic_version], + ) + + _maybe( + http_archive, + name = "com_google_googleapis", + strip_prefix = "googleapis-51fe6432d4076a4c101f561967df4bf1f27818e1", + urls = ["https://github.com/googleapis/googleapis/archive/51fe6432d4076a4c101f561967df4bf1f27818e1.zip"], + ) + def gapic_generator_register_toolchains(): native.register_toolchains( "@gapic_generator_python//:pandoc_toolchain_linux", diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index e0f0500638a3..c9965902d158 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -load("@com_google_api_codegen//rules_gapic:gapic.bzl", "proto_custom_library") +load("@rules_gapic//:gapic.bzl", "proto_custom_library") def py_gapic_library( name, @@ -34,7 +34,7 @@ def py_gapic_library( file_args = {} if grpc_service_config: - file_args[grpc_service_config] = "retry-config" + file_args[grpc_service_config] = "retry-config" proto_custom_library( name = srcjar_target_name, diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl index ec80c87a84f2..a590a3aa7484 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -load("@com_google_api_codegen//rules_gapic:gapic_pkg.bzl", "construct_package_dir_paths") +load("@rules_gapic//:gapic_pkg.bzl", "construct_package_dir_paths") def _py_gapic_src_pkg_impl(ctx): srcjar_srcs = [] @@ -66,5 +66,3 @@ def py_gapic_assembly_pkg(name, deps, assembly_name = None, **kwargs): package_dir = package_dir, **kwargs ) - - diff --git a/packages/gapic-generator/rules_python_gapic/test/BUILD.bazel b/packages/gapic-generator/rules_python_gapic/test/BUILD.bazel new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl b/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl new file mode 100644 index 000000000000..ef064a323430 --- /dev/null +++ b/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl @@ -0,0 +1,152 @@ +def _diff_integration_goldens_impl(ctx): + # Extract the Python source files from the generated 3 srcjars from API bazel target, + # and put them in the temporary folder `codegen_tmp`. + # Compare the `codegen_tmp` with the goldens folder e.g `tests/integration/goldens/redis` + # and save the differences in output file `diff_output.txt`. + + diff_output = ctx.outputs.diff_output + check_diff_script = ctx.outputs.check_diff_script + gapic_library = ctx.attr.gapic_library + srcs = ctx.files.srcs + api_name = ctx.attr.name + + script = """ + mkdir codegen_tmp + unzip {input_srcs} -d codegen_tmp + diff -r codegen_tmp $PWD/tests/integration/goldens/{api_name} > {diff_output} + exit 0 # Avoid a build failure. + """.format( + diff_output = diff_output.path, + input_srcs = gapic_library[DefaultInfo].files.to_list()[0].path, + api_name = api_name, + ) + ctx.actions.run_shell( + inputs = srcs + [ + gapic_library[DefaultInfo].files.to_list()[0], + ], + outputs = [diff_output], + command = script, + ) + + # Check the generated diff_output file, if it is empty, that means there is no difference + # between generated source code and goldens files, test should pass. If it is not empty, then + # test will fail by exiting 1. + + check_diff_script_content = """ + # This will not print diff_output to the console unless `--test_output=all` option + # is enabled, it only emits the comparison results to the test.log. + # We could not copy the diff_output.txt to the test.log ($XML_OUTPUT_FILE) because that + # file is not existing at the moment. It is generated once test is finished. + cat $PWD/tests/integration/{api_name}_diff_output.txt + if [ -s $PWD/tests/integration/{api_name}_diff_output.txt ] + then + exit 1 + fi + """.format( + api_name = api_name, + ) + + ctx.actions.write( + output = check_diff_script, + content = check_diff_script_content, + ) + runfiles = ctx.runfiles(files = [ctx.outputs.diff_output]) + return [DefaultInfo(executable = check_diff_script, runfiles = runfiles)] + +diff_integration_goldens_test = rule( + attrs = { + "gapic_library": attr.label(), + "srcs": attr.label_list( + allow_files = True, + mandatory = True, + ), + }, + outputs = { + "diff_output": "%{name}_diff_output.txt", + "check_diff_script": "%{name}_check_diff_script.sh", + }, + implementation = _diff_integration_goldens_impl, + test = True, +) + +def integration_test(name, target, data): + # Bazel target `py_gapic_library` will generate 1 source jar that holds the + # Gapic_library's python sources. + diff_integration_goldens_test( + name = name, + gapic_library = target, + srcs = data, + ) + +def _overwrite_golden_impl(ctx): + # Extract the Java source files from the generated 3 srcjars from API bazel target, + # and put them in the temporary folder `codegen_tmp`, zip as `goldens_output_zip`. + # Overwrite the goldens folder e.g `tests/integration/goldens/redis` with the + # code generation in `goldens_output_zip`. + + gapic_library = ctx.attr.gapic_library + srcs = ctx.files.srcs + + # Convert the name of bazel rules e.g. `redis_update` to `redis` + # because we will need to overwrite the goldens files in `redis` folder. + api_name = "_".join(ctx.attr.name.split("_")[:-1]) + goldens_output_zip = ctx.outputs.goldens_output_zip + + script = """ + mkdir codegen_tmp + unzip {input_srcs} -d codegen_tmp + cd codegen_tmp + zip -r ../{goldens_output_zip} . + """.format( + goldens_output_zip = goldens_output_zip.path, + input_srcs = gapic_library[DefaultInfo].files.to_list()[0].path, + ) + + ctx.actions.run_shell( + inputs = srcs + [ + gapic_library[DefaultInfo].files.to_list()[0], + ], + outputs = [goldens_output_zip], + command = script, + ) + + # Overwrite the goldens. + golden_update_script_content = """ + cd ${{BUILD_WORKSPACE_DIRECTORY}} + # Filename pattern-based removal is needed to preserve the BUILD.bazel file. + find tests/Integration/goldens/{api_name}/ -name \\*.py-type f -delete + find tests/Integration/goldens/{api_name}/ -name \\*.json -type f -delete + unzip -ao {goldens_output_zip} -d tests/integration/goldens/{api_name} + """.format( + goldens_output_zip = goldens_output_zip.path, + api_name = api_name, + ) + ctx.actions.write( + output = ctx.outputs.golden_update_script, + content = golden_update_script_content, + is_executable = True, + ) + return [DefaultInfo(executable = ctx.outputs.golden_update_script)] + +overwrite_golden = rule( + attrs = { + "gapic_library": attr.label(), + "srcs": attr.label_list( + allow_files = True, + mandatory = True, + ), + }, + outputs = { + "goldens_output_zip": "%{name}.zip", + "golden_update_script": "%{name}.sh", + }, + executable = True, + implementation = _overwrite_golden_impl, +) + +def golden_update(name, target, data): + overwrite_golden( + name = name, + gapic_library = target, + srcs = data, + ) diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel new file mode 100644 index 000000000000..d816f50c840b --- /dev/null +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -0,0 +1,78 @@ +load( + "@gapic_generator_python//rules_python_gapic:py_gapic.bzl", + "py_gapic_library", +) +load( + "@gapic_generator_python//rules_python_gapic:py_gapic_pkg.bzl", + "py_gapic_assembly_pkg", +) +load( + "@gapic_generator_python//rules_python_gapic/test:integration_test.bzl", + "golden_update", + "integration_test", +) +load("@rules_proto//proto:defs.bzl", "proto_library") + +package(default_visibility = ["//visibility:public"]) + +#################################################### +# Integration Test Rules +# +# Usage: +# Run tests: bazel test tests/integration:asset +# Update goldens: bazel run tests/integration:asset_update +#################################################### + +INTEGRATION_TEST_LIBRARIES = [ + "asset", # Basic case. + "credentials", # Check that the capital name edge case is handled. + "logging", # Java package remapping in gapic.yaml. + "redis", # Has a gapic.yaml. +] + +[integration_test( + name = lib_name, + data = ["//tests/integration/goldens/%s:goldens_files" % lib_name], + target = ":%s_py_gapic" % lib_name, +) for lib_name in INTEGRATION_TEST_LIBRARIES] + +[golden_update( + name = "%s_update" % lib_name, + data = ["//tests/integration/goldens/%s:goldens_files" % lib_name], + target = ":%s_py_gapic" % lib_name, +) for lib_name in INTEGRATION_TEST_LIBRARIES] + +#################################################### +# API Library Rules +#################################################### + +# Asset. +py_gapic_library( + name = "asset_py_gapic", + srcs = ["@com_google_googleapis//google/cloud/asset/v1:asset_proto"], + grpc_service_config = "cloudasset_grpc_service_config.json", +) + +# Credentials. +py_gapic_library( + name = "credentials_py_gapic", + srcs = ["@com_google_googleapis//google/iam/credentials/v1:credentials_proto"], + grpc_service_config = "iamcredentials_grpc_service_config.json", +) + +# Logging. +py_gapic_library( + name = "logging_py_gapic", + srcs = ["@com_google_googleapis//google/logging/v2:logging_proto"], + grpc_service_config = "logging_grpc_service_config.json", + opt_args = [ + "python-gapic-namespace=google.cloud", + "python-gapic-name=logging", + ], +) + +py_gapic_library( + name = "redis_py_gapic", + srcs = ["@com_google_googleapis//google/cloud/redis/v1:redis_proto"], + grpc_service_config = "redis_grpc_service_config.json", +) diff --git a/packages/gapic-generator/tests/integration/cloudasset_grpc_service_config.json b/packages/gapic-generator/tests/integration/cloudasset_grpc_service_config.json new file mode 100755 index 000000000000..48db7c339208 --- /dev/null +++ b/packages/gapic-generator/tests/integration/cloudasset_grpc_service_config.json @@ -0,0 +1,95 @@ +{ + "methodConfig": [ + { + "name": [ + { + "service": "google.cloud.asset.v1.AssetService", + "method": "ExportAssets" + }, + { + "service": "google.cloud.asset.v1.AssetService", + "method": "CreateFeed" + }, + { + "service": "google.cloud.asset.v1.AssetService", + "method": "UpdateFeed" + }, + { + "service": "google.cloud.asset.v1.AssetService", + "method": "AnalyzeIamPolicyLongrunning" + } + ], + "timeout": "60s" + }, + { + "name": [ + { + "service": "google.cloud.asset.v1.AssetService", + "method": "BatchGetAssetsHistory" + }, + { + "service": "google.cloud.asset.v1.AssetService", + "method": "GetFeed" + }, + { + "service": "google.cloud.asset.v1.AssetService", + "method": "ListFeeds" + }, + { + "service": "google.cloud.asset.v1.AssetService", + "method": "DeleteFeed" + } + ], + "timeout": "60s", + "retryPolicy": { + "initialBackoff": "0.100s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ] + } + }, + { + "name": [ + { + "service": "google.cloud.asset.v1.AssetService", + "method": "SearchAllResources" + }, + { + "service": "google.cloud.asset.v1.AssetService", + "method": "SearchAllIamPolicies" + } + ], + "timeout": "15s", + "retryPolicy": { + "maxAttempts": 5, + "initialBackoff": "0.100s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ] + } + }, + { + "name": [ + { + "service": "google.cloud.asset.v1.AssetService", + "method": "AnalyzeIamPolicy" + } + ], + "timeout": "300s", + "retryPolicy": { + "initialBackoff": "0.100s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": [ + "UNAVAILABLE" + ] + } + } + ] +} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc b/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc new file mode 100644 index 000000000000..3425850c049e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/asset/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/asset/BUILD.bazel b/packages/gapic-generator/tests/integration/goldens/asset/BUILD.bazel new file mode 100644 index 000000000000..2822013159c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/BUILD.bazel @@ -0,0 +1,12 @@ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "goldens_files", + srcs = glob( + ["**/*"], + exclude = [ + "BUILD.bazel", + ".*.sw*", + ], + ), +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in new file mode 100644 index 000000000000..5c97e27612a3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/asset *.py +recursive-include google/cloud/asset_v1 *.py diff --git a/packages/gapic-generator/tests/integration/goldens/asset/README.rst b/packages/gapic-generator/tests/integration/goldens/asset/README.rst new file mode 100644 index 000000000000..110d4086abb7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Asset API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Asset API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/asset_service.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/asset_service.rst new file mode 100644 index 000000000000..b2f80a4bd4ba --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/asset_service.rst @@ -0,0 +1,10 @@ +AssetService +------------------------------ + +.. automodule:: google.cloud.asset_v1.services.asset_service + :members: + :inherited-members: + +.. automodule:: google.cloud.asset_v1.services.asset_service.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst new file mode 100644 index 000000000000..a5ddb91fe4be --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Asset v1 API +====================================== +.. toctree:: + :maxdepth: 2 + + asset_service diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst new file mode 100644 index 000000000000..c75a1efdeabc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Asset v1 API +=================================== + +.. automodule:: google.cloud.asset_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py new file mode 100644 index 000000000000..3aa26721fecd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-asset documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-asset" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-asset-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-asset.tex", + u"google-cloud-asset Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-asset", + u"Google Cloud Asset Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-asset", + u"google-cloud-asset Documentation", + author, + "google-cloud-asset", + "GAPIC library for Google Cloud Asset API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst new file mode 100644 index 000000000000..fee6608ede43 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + asset_v1/services + asset_v1/types diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py new file mode 100644 index 000000000000..d823276527f8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.asset_v1.services.asset_service.client import AssetServiceClient +from google.cloud.asset_v1.services.asset_service.async_client import AssetServiceAsyncClient + +from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyResponse +from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryRequest +from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryResponse +from google.cloud.asset_v1.types.asset_service import BigQueryDestination +from google.cloud.asset_v1.types.asset_service import CreateFeedRequest +from google.cloud.asset_v1.types.asset_service import DeleteFeedRequest +from google.cloud.asset_v1.types.asset_service import ExportAssetsRequest +from google.cloud.asset_v1.types.asset_service import ExportAssetsResponse +from google.cloud.asset_v1.types.asset_service import Feed +from google.cloud.asset_v1.types.asset_service import FeedOutputConfig +from google.cloud.asset_v1.types.asset_service import GcsDestination +from google.cloud.asset_v1.types.asset_service import GcsOutputResult +from google.cloud.asset_v1.types.asset_service import GetFeedRequest +from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisOutputConfig +from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisQuery +from google.cloud.asset_v1.types.asset_service import ListFeedsRequest +from google.cloud.asset_v1.types.asset_service import ListFeedsResponse +from google.cloud.asset_v1.types.asset_service import OutputConfig +from google.cloud.asset_v1.types.asset_service import OutputResult +from google.cloud.asset_v1.types.asset_service import PartitionSpec +from google.cloud.asset_v1.types.asset_service import PubsubDestination +from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesRequest +from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesResponse +from google.cloud.asset_v1.types.asset_service import SearchAllResourcesRequest +from google.cloud.asset_v1.types.asset_service import SearchAllResourcesResponse +from google.cloud.asset_v1.types.asset_service import UpdateFeedRequest +from google.cloud.asset_v1.types.asset_service import ContentType +from google.cloud.asset_v1.types.assets import Asset +from google.cloud.asset_v1.types.assets import IamPolicyAnalysisResult +from google.cloud.asset_v1.types.assets import IamPolicyAnalysisState +from google.cloud.asset_v1.types.assets import IamPolicySearchResult +from google.cloud.asset_v1.types.assets import Resource +from google.cloud.asset_v1.types.assets import ResourceSearchResult +from google.cloud.asset_v1.types.assets import TemporalAsset +from google.cloud.asset_v1.types.assets import TimeWindow + +__all__ = ('AssetServiceClient', + 'AssetServiceAsyncClient', + 'AnalyzeIamPolicyLongrunningRequest', + 'AnalyzeIamPolicyLongrunningResponse', + 'AnalyzeIamPolicyRequest', + 'AnalyzeIamPolicyResponse', + 'BatchGetAssetsHistoryRequest', + 'BatchGetAssetsHistoryResponse', + 'BigQueryDestination', + 'CreateFeedRequest', + 'DeleteFeedRequest', + 'ExportAssetsRequest', + 'ExportAssetsResponse', + 'Feed', + 'FeedOutputConfig', + 'GcsDestination', + 'GcsOutputResult', + 'GetFeedRequest', + 'IamPolicyAnalysisOutputConfig', + 'IamPolicyAnalysisQuery', + 'ListFeedsRequest', + 'ListFeedsResponse', + 'OutputConfig', + 'OutputResult', + 'PartitionSpec', + 'PubsubDestination', + 'SearchAllIamPoliciesRequest', + 'SearchAllIamPoliciesResponse', + 'SearchAllResourcesRequest', + 'SearchAllResourcesResponse', + 'UpdateFeedRequest', + 'ContentType', + 'Asset', + 'IamPolicyAnalysisResult', + 'IamPolicyAnalysisState', + 'IamPolicySearchResult', + 'Resource', + 'ResourceSearchResult', + 'TemporalAsset', + 'TimeWindow', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/py.typed b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/py.typed new file mode 100644 index 000000000000..3dbb09a39130 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-asset package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py new file mode 100644 index 000000000000..3988eaad259f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.asset_service import AssetServiceClient +from .services.asset_service import AssetServiceAsyncClient + +from .types.asset_service import AnalyzeIamPolicyLongrunningRequest +from .types.asset_service import AnalyzeIamPolicyLongrunningResponse +from .types.asset_service import AnalyzeIamPolicyRequest +from .types.asset_service import AnalyzeIamPolicyResponse +from .types.asset_service import BatchGetAssetsHistoryRequest +from .types.asset_service import BatchGetAssetsHistoryResponse +from .types.asset_service import BigQueryDestination +from .types.asset_service import CreateFeedRequest +from .types.asset_service import DeleteFeedRequest +from .types.asset_service import ExportAssetsRequest +from .types.asset_service import ExportAssetsResponse +from .types.asset_service import Feed +from .types.asset_service import FeedOutputConfig +from .types.asset_service import GcsDestination +from .types.asset_service import GcsOutputResult +from .types.asset_service import GetFeedRequest +from .types.asset_service import IamPolicyAnalysisOutputConfig +from .types.asset_service import IamPolicyAnalysisQuery +from .types.asset_service import ListFeedsRequest +from .types.asset_service import ListFeedsResponse +from .types.asset_service import OutputConfig +from .types.asset_service import OutputResult +from .types.asset_service import PartitionSpec +from .types.asset_service import PubsubDestination +from .types.asset_service import SearchAllIamPoliciesRequest +from .types.asset_service import SearchAllIamPoliciesResponse +from .types.asset_service import SearchAllResourcesRequest +from .types.asset_service import SearchAllResourcesResponse +from .types.asset_service import UpdateFeedRequest +from .types.asset_service import ContentType +from .types.assets import Asset +from .types.assets import IamPolicyAnalysisResult +from .types.assets import IamPolicyAnalysisState +from .types.assets import IamPolicySearchResult +from .types.assets import Resource +from .types.assets import ResourceSearchResult +from .types.assets import TemporalAsset +from .types.assets import TimeWindow + +__all__ = ( + 'AssetServiceAsyncClient', +'AnalyzeIamPolicyLongrunningRequest', +'AnalyzeIamPolicyLongrunningResponse', +'AnalyzeIamPolicyRequest', +'AnalyzeIamPolicyResponse', +'Asset', +'AssetServiceClient', +'BatchGetAssetsHistoryRequest', +'BatchGetAssetsHistoryResponse', +'BigQueryDestination', +'ContentType', +'CreateFeedRequest', +'DeleteFeedRequest', +'ExportAssetsRequest', +'ExportAssetsResponse', +'Feed', +'FeedOutputConfig', +'GcsDestination', +'GcsOutputResult', +'GetFeedRequest', +'IamPolicyAnalysisOutputConfig', +'IamPolicyAnalysisQuery', +'IamPolicyAnalysisResult', +'IamPolicyAnalysisState', +'IamPolicySearchResult', +'ListFeedsRequest', +'ListFeedsResponse', +'OutputConfig', +'OutputResult', +'PartitionSpec', +'PubsubDestination', +'Resource', +'ResourceSearchResult', +'SearchAllIamPoliciesRequest', +'SearchAllIamPoliciesResponse', +'SearchAllResourcesRequest', +'SearchAllResourcesResponse', +'TemporalAsset', +'TimeWindow', +'UpdateFeedRequest', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json new file mode 100644 index 000000000000..a80eb281c4a9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json @@ -0,0 +1,133 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.asset_v1", + "protoPackage": "google.cloud.asset.v1", + "schema": "1.0", + "services": { + "AssetService": { + "clients": { + "grpc": { + "libraryClient": "AssetServiceClient", + "rpcs": { + "AnalyzeIamPolicy": { + "methods": [ + "analyze_iam_policy" + ] + }, + "AnalyzeIamPolicyLongrunning": { + "methods": [ + "analyze_iam_policy_longrunning" + ] + }, + "BatchGetAssetsHistory": { + "methods": [ + "batch_get_assets_history" + ] + }, + "CreateFeed": { + "methods": [ + "create_feed" + ] + }, + "DeleteFeed": { + "methods": [ + "delete_feed" + ] + }, + "ExportAssets": { + "methods": [ + "export_assets" + ] + }, + "GetFeed": { + "methods": [ + "get_feed" + ] + }, + "ListFeeds": { + "methods": [ + "list_feeds" + ] + }, + "SearchAllIamPolicies": { + "methods": [ + "search_all_iam_policies" + ] + }, + "SearchAllResources": { + "methods": [ + "search_all_resources" + ] + }, + "UpdateFeed": { + "methods": [ + "update_feed" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AssetServiceAsyncClient", + "rpcs": { + "AnalyzeIamPolicy": { + "methods": [ + "analyze_iam_policy" + ] + }, + "AnalyzeIamPolicyLongrunning": { + "methods": [ + "analyze_iam_policy_longrunning" + ] + }, + "BatchGetAssetsHistory": { + "methods": [ + "batch_get_assets_history" + ] + }, + "CreateFeed": { + "methods": [ + "create_feed" + ] + }, + "DeleteFeed": { + "methods": [ + "delete_feed" + ] + }, + "ExportAssets": { + "methods": [ + "export_assets" + ] + }, + "GetFeed": { + "methods": [ + "get_feed" + ] + }, + "ListFeeds": { + "methods": [ + "list_feeds" + ] + }, + "SearchAllIamPolicies": { + "methods": [ + "search_all_iam_policies" + ] + }, + "SearchAllResources": { + "methods": [ + "search_all_resources" + ] + }, + "UpdateFeed": { + "methods": [ + "update_feed" + ] + } + } + } + } + } + } +} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/py.typed new file mode 100644 index 000000000000..3dbb09a39130 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-asset package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py new file mode 100644 index 000000000000..357f952048fc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import AssetServiceClient +from .async_client import AssetServiceAsyncClient + +__all__ = ( + 'AssetServiceClient', + 'AssetServiceAsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py new file mode 100644 index 000000000000..099a951d1737 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -0,0 +1,1180 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.asset_v1.services.asset_service import pagers +from google.cloud.asset_v1.types import asset_service +from google.cloud.asset_v1.types import assets +from google.type import expr_pb2 # type: ignore +from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport +from .client import AssetServiceClient + + +class AssetServiceAsyncClient: + """Asset service definition.""" + + _client: AssetServiceClient + + DEFAULT_ENDPOINT = AssetServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AssetServiceClient.DEFAULT_MTLS_ENDPOINT + + asset_path = staticmethod(AssetServiceClient.asset_path) + parse_asset_path = staticmethod(AssetServiceClient.parse_asset_path) + feed_path = staticmethod(AssetServiceClient.feed_path) + parse_feed_path = staticmethod(AssetServiceClient.parse_feed_path) + common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(AssetServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(AssetServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(AssetServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(AssetServiceClient.parse_common_organization_path) + common_project_path = staticmethod(AssetServiceClient.common_project_path) + parse_common_project_path = staticmethod(AssetServiceClient.parse_common_project_path) + common_location_path = staticmethod(AssetServiceClient.common_location_path) + parse_common_location_path = staticmethod(AssetServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssetServiceAsyncClient: The constructed client. + """ + return AssetServiceClient.from_service_account_info.__func__(AssetServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssetServiceAsyncClient: The constructed client. + """ + return AssetServiceClient.from_service_account_file.__func__(AssetServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AssetServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AssetServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(AssetServiceClient).get_transport_class, type(AssetServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, AssetServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the asset service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.AssetServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AssetServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def export_assets(self, + request: asset_service.ExportAssetsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports assets with time and resource types to a given Cloud + Storage location/BigQuery table. For Cloud Storage location + destinations, the output format is newline-delimited JSON. Each + line represents a + [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in + the JSON format; for BigQuery table destinations, the output + table stores the fields in asset proto as columns. This API + implements the + [google.longrunning.Operation][google.longrunning.Operation] API + , which allows you to keep track of the export. We recommend + intervals of at least 2 seconds with exponential retry to poll + the export operation result. For regular-size resource parent, + the export operation usually finishes within 5 minutes. + + Args: + request (:class:`google.cloud.asset_v1.types.ExportAssetsRequest`): + The request object. Export asset request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.asset_v1.types.ExportAssetsResponse` The export asset response. This message is returned by the + [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] + method in the returned + [google.longrunning.Operation.response][google.longrunning.Operation.response] + field. + + """ + # Create or coerce a protobuf request object. + request = asset_service.ExportAssetsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_assets, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + asset_service.ExportAssetsResponse, + metadata_type=asset_service.ExportAssetsRequest, + ) + + # Done; return the response. + return response + + async def batch_get_assets_history(self, + request: asset_service.BatchGetAssetsHistoryRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: + r"""Batch gets the update history of assets that overlap a time + window. For IAM_POLICY content, this API outputs history when + the asset and its attached IAM POLICY both exist. This can + create gaps in the output history. Otherwise, this API outputs + history with asset in both non-delete or deleted status. If a + specified asset does not exist, this API returns an + INVALID_ARGUMENT error. + + Args: + request (:class:`google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest`): + The request object. Batch get assets history request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse: + Batch get assets history response. + """ + # Create or coerce a protobuf request object. + request = asset_service.BatchGetAssetsHistoryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_get_assets_history, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_feed(self, + request: asset_service.CreateFeedRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.Feed: + r"""Creates a feed in a parent + project/folder/organization to listen to its asset + updates. + + Args: + request (:class:`google.cloud.asset_v1.types.CreateFeedRequest`): + The request object. Create asset feed request. + parent (:class:`str`): + Required. The name of the + project/folder/organization where this + feed should be created in. It can only + be an organization number (such as + "organizations/123"), a folder number + (such as "folders/123"), a project ID + (such as "projects/my-project-id")", or + a project number (such as + "projects/12345"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.CreateFeedRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_feed, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_feed(self, + request: asset_service.GetFeedRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.Feed: + r"""Gets details about an asset feed. + + Args: + request (:class:`google.cloud.asset_v1.types.GetFeedRequest`): + The request object. Get asset feed request. + name (:class:`str`): + Required. The name of the Feed and it must be in the + format of: projects/project_number/feeds/feed_id + folders/folder_number/feeds/feed_id + organizations/organization_number/feeds/feed_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.GetFeedRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_feed, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_feeds(self, + request: asset_service.ListFeedsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.ListFeedsResponse: + r"""Lists all asset feeds in a parent + project/folder/organization. + + Args: + request (:class:`google.cloud.asset_v1.types.ListFeedsRequest`): + The request object. List asset feeds request. + parent (:class:`str`): + Required. The parent + project/folder/organization whose feeds + are to be listed. It can only be using + project/folder/organization number (such + as "folders/12345")", or a project ID + (such as "projects/my-project-id"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.ListFeedsResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.ListFeedsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_feeds, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_feed(self, + request: asset_service.UpdateFeedRequest = None, + *, + feed: asset_service.Feed = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.Feed: + r"""Updates an asset feed configuration. + + Args: + request (:class:`google.cloud.asset_v1.types.UpdateFeedRequest`): + The request object. Update asset feed request. + feed (:class:`google.cloud.asset_v1.types.Feed`): + Required. The new values of feed details. It must match + an existing feed and the field ``name`` must be in the + format of: projects/project_number/feeds/feed_id or + folders/folder_number/feeds/feed_id or + organizations/organization_number/feeds/feed_id. + + This corresponds to the ``feed`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([feed]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.UpdateFeedRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if feed is not None: + request.feed = feed + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_feed, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("feed.name", request.feed.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_feed(self, + request: asset_service.DeleteFeedRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an asset feed. + + Args: + request (:class:`google.cloud.asset_v1.types.DeleteFeedRequest`): + The request object. + name (:class:`str`): + Required. The name of the feed and it must be in the + format of: projects/project_number/feeds/feed_id + folders/folder_number/feeds/feed_id + organizations/organization_number/feeds/feed_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.DeleteFeedRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_feed, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def search_all_resources(self, + request: asset_service.SearchAllResourcesRequest = None, + *, + scope: str = None, + query: str = None, + asset_types: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAllResourcesAsyncPager: + r"""Searches all Cloud resources within the specified scope, such as + a project, folder, or organization. The caller must be granted + the ``cloudasset.assets.searchAllResources`` permission on the + desired scope, otherwise the request will be rejected. + + Args: + request (:class:`google.cloud.asset_v1.types.SearchAllResourcesRequest`): + The request object. Search all resources request. + scope (:class:`str`): + Required. A scope can be a project, a folder, or an + organization. The search is limited to the resources + within the ``scope``. The caller must be granted the + ```cloudasset.assets.searchAllResources`` `__ + permission on the desired scope. + + The allowed values are: + + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Optional. The query statement. See `how to construct a + query `__ + for more information. If not specified or empty, it will + search all the resources within the specified ``scope``. + Note that the query string is compared against each + Cloud IAM policy binding, including its members, roles, + and Cloud IAM conditions. The returned Cloud IAM + policies will only contain the bindings that match your + query. To learn more about the IAM policy structure, see + `IAM policy + doc `__. + + Examples: + + - ``name:Important`` to find Cloud resources whose name + contains "Important" as a word. + - ``displayName:Impor*`` to find Cloud resources whose + display name contains "Impor" as a prefix. + - ``description:*por*`` to find Cloud resources whose + description contains "por" as a substring. + - ``location:us-west*`` to find Cloud resources whose + location is prefixed with "us-west". + - ``labels:prod`` to find Cloud resources whose labels + contain "prod" as a key or value. + - ``labels.env:prod`` to find Cloud resources that have + a label "env" and its value is "prod". + - ``labels.env:*`` to find Cloud resources that have a + label "env". + - ``Important`` to find Cloud resources that contain + "Important" as a word in any of the searchable + fields. + - ``Impor*`` to find Cloud resources that contain + "Impor" as a prefix in any of the searchable fields. + - ``*por*`` to find Cloud resources that contain "por" + as a substring in any of the searchable fields. + - ``Important location:(us-west1 OR global)`` to find + Cloud resources that contain "Important" as a word in + any of the searchable fields and are also located in + the "us-west1" region or the "global" location. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset_types (:class:`Sequence[str]`): + Optional. A list of asset types that this request + searches for. If empty, it will search all the + `searchable asset + types `__. + + This corresponds to the ``asset_types`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager: + Search all resources response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, query, asset_types]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.SearchAllResourcesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if query is not None: + request.query = query + if asset_types: + request.asset_types.extend(asset_types) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_all_resources, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=15.0, + ), + default_timeout=15.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchAllResourcesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_all_iam_policies(self, + request: asset_service.SearchAllIamPoliciesRequest = None, + *, + scope: str = None, + query: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAllIamPoliciesAsyncPager: + r"""Searches all IAM policies within the specified scope, such as a + project, folder, or organization. The caller must be granted the + ``cloudasset.assets.searchAllIamPolicies`` permission on the + desired scope, otherwise the request will be rejected. + + Args: + request (:class:`google.cloud.asset_v1.types.SearchAllIamPoliciesRequest`): + The request object. Search all IAM policies request. + scope (:class:`str`): + Required. A scope can be a project, a folder, or an + organization. The search is limited to the IAM policies + within the ``scope``. The caller must be granted the + ```cloudasset.assets.searchAllIamPolicies`` `__ + permission on the desired scope. + + The allowed values are: + + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (:class:`str`): + Optional. The query statement. See `how to construct a + query `__ + for more information. If not specified or empty, it will + search all the IAM policies within the specified + ``scope``. + + Examples: + + - ``policy:amy@gmail.com`` to find IAM policy bindings + that specify user "amy@gmail.com". + - ``policy:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``policy.role.permissions:storage.buckets.update`` to + find IAM policy bindings that specify a role + containing "storage.buckets.update" permission. Note + that if callers don't have ``iam.roles.get`` access + to a role's included permissions, policy bindings + that specify this role will be dropped from the + search results. + - ``resource:organizations/123456`` to find IAM policy + bindings that are set on "organizations/123456". + - ``Important`` to find IAM policy bindings that + contain "Important" as a word in any of the + searchable fields (except for the included + permissions). + - ``*por*`` to find IAM policy bindings that contain + "por" as a substring in any of the searchable fields + (except for the included permissions). + - ``resource:(instance1 OR instance2) policy:amy`` to + find IAM policy bindings that are set on resources + "instance1" or "instance2" and also specify user + "amy". + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager: + Search all IAM policies response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, query]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.SearchAllIamPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_all_iam_policies, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=15.0, + ), + default_timeout=15.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchAllIamPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_iam_policy(self, + request: asset_service.AnalyzeIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: + r"""Analyzes IAM policies to answer which identities have + what accesses on which resources. + + Args: + request (:class:`google.cloud.asset_v1.types.AnalyzeIamPolicyRequest`): + The request object. A request message for + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: + A response message for + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + + """ + # Create or coerce a protobuf request object. + request = asset_service.AnalyzeIamPolicyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_iam_policy, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("analysis_query.scope", request.analysis_query.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_iam_policy_longrunning(self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Analyzes IAM policies asynchronously to answer which identities + have what accesses on which resources, and writes the analysis + results to a Google Cloud Storage or a BigQuery destination. For + Cloud Storage destination, the output format is the JSON format + that represents a + [AnalyzeIamPolicyResponse][google.cloud.asset.v1.AnalyzeIamPolicyResponse]. + This method implements the + [google.longrunning.Operation][google.longrunning.Operation], + which allows you to track the operation status. We recommend + intervals of at least 2 seconds with exponential backoff retry + to poll the operation result. The metadata contains the request + to help callers to map responses to requests. + + Args: + request (:class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest`): + The request object. A request message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` + A response message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + + """ + # Create or coerce a protobuf request object. + request = asset_service.AnalyzeIamPolicyLongrunningRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_iam_policy_longrunning, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("analysis_query.scope", request.analysis_query.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + asset_service.AnalyzeIamPolicyLongrunningResponse, + metadata_type=asset_service.AnalyzeIamPolicyLongrunningRequest, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-asset", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "AssetServiceAsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py new file mode 100644 index 000000000000..1b86808fbd38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -0,0 +1,1332 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.asset_v1.services.asset_service import pagers +from google.cloud.asset_v1.types import asset_service +from google.cloud.asset_v1.types import assets +from google.type import expr_pb2 # type: ignore +from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import AssetServiceGrpcTransport +from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport + + +class AssetServiceClientMeta(type): + """Metaclass for the AssetService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] + _transport_registry["grpc"] = AssetServiceGrpcTransport + _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[AssetServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AssetServiceClient(metaclass=AssetServiceClientMeta): + """Asset service definition.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "cloudasset.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssetServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AssetServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AssetServiceTransport: + """Returns the transport used by the client instance. + + Returns: + AssetServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def asset_path() -> str: + """Returns a fully-qualified asset string.""" + return "*".format() + + @staticmethod + def parse_asset_path(path: str) -> Dict[str,str]: + """Parses a asset path into its component segments.""" + m = re.match(r"^*$", path) + return m.groupdict() if m else {} + + @staticmethod + def feed_path(project: str,feed: str,) -> str: + """Returns a fully-qualified feed string.""" + return "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) + + @staticmethod + def parse_feed_path(path: str) -> Dict[str,str]: + """Parses a feed path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/feeds/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, AssetServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the asset service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, AssetServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, AssetServiceTransport): + # transport is a AssetServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def export_assets(self, + request: asset_service.ExportAssetsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Exports assets with time and resource types to a given Cloud + Storage location/BigQuery table. For Cloud Storage location + destinations, the output format is newline-delimited JSON. Each + line represents a + [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in + the JSON format; for BigQuery table destinations, the output + table stores the fields in asset proto as columns. This API + implements the + [google.longrunning.Operation][google.longrunning.Operation] API + , which allows you to keep track of the export. We recommend + intervals of at least 2 seconds with exponential retry to poll + the export operation result. For regular-size resource parent, + the export operation usually finishes within 5 minutes. + + Args: + request (google.cloud.asset_v1.types.ExportAssetsRequest): + The request object. Export asset request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.asset_v1.types.ExportAssetsResponse` The export asset response. This message is returned by the + [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] + method in the returned + [google.longrunning.Operation.response][google.longrunning.Operation.response] + field. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.ExportAssetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.ExportAssetsRequest): + request = asset_service.ExportAssetsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + asset_service.ExportAssetsResponse, + metadata_type=asset_service.ExportAssetsRequest, + ) + + # Done; return the response. + return response + + def batch_get_assets_history(self, + request: asset_service.BatchGetAssetsHistoryRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.BatchGetAssetsHistoryResponse: + r"""Batch gets the update history of assets that overlap a time + window. For IAM_POLICY content, this API outputs history when + the asset and its attached IAM POLICY both exist. This can + create gaps in the output history. Otherwise, this API outputs + history with asset in both non-delete or deleted status. If a + specified asset does not exist, this API returns an + INVALID_ARGUMENT error. + + Args: + request (google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest): + The request object. Batch get assets history request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse: + Batch get assets history response. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.BatchGetAssetsHistoryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.BatchGetAssetsHistoryRequest): + request = asset_service.BatchGetAssetsHistoryRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_get_assets_history] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_feed(self, + request: asset_service.CreateFeedRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.Feed: + r"""Creates a feed in a parent + project/folder/organization to listen to its asset + updates. + + Args: + request (google.cloud.asset_v1.types.CreateFeedRequest): + The request object. Create asset feed request. + parent (str): + Required. The name of the + project/folder/organization where this + feed should be created in. It can only + be an organization number (such as + "organizations/123"), a folder number + (such as "folders/123"), a project ID + (such as "projects/my-project-id")", or + a project number (such as + "projects/12345"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.CreateFeedRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.CreateFeedRequest): + request = asset_service.CreateFeedRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_feed] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_feed(self, + request: asset_service.GetFeedRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.Feed: + r"""Gets details about an asset feed. + + Args: + request (google.cloud.asset_v1.types.GetFeedRequest): + The request object. Get asset feed request. + name (str): + Required. The name of the Feed and it must be in the + format of: projects/project_number/feeds/feed_id + folders/folder_number/feeds/feed_id + organizations/organization_number/feeds/feed_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.GetFeedRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.GetFeedRequest): + request = asset_service.GetFeedRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_feed] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_feeds(self, + request: asset_service.ListFeedsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.ListFeedsResponse: + r"""Lists all asset feeds in a parent + project/folder/organization. + + Args: + request (google.cloud.asset_v1.types.ListFeedsRequest): + The request object. List asset feeds request. + parent (str): + Required. The parent + project/folder/organization whose feeds + are to be listed. It can only be using + project/folder/organization number (such + as "folders/12345")", or a project ID + (such as "projects/my-project-id"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.ListFeedsResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.ListFeedsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.ListFeedsRequest): + request = asset_service.ListFeedsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_feeds] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_feed(self, + request: asset_service.UpdateFeedRequest = None, + *, + feed: asset_service.Feed = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.Feed: + r"""Updates an asset feed configuration. + + Args: + request (google.cloud.asset_v1.types.UpdateFeedRequest): + The request object. Update asset feed request. + feed (google.cloud.asset_v1.types.Feed): + Required. The new values of feed details. It must match + an existing feed and the field ``name`` must be in the + format of: projects/project_number/feeds/feed_id or + folders/folder_number/feeds/feed_id or + organizations/organization_number/feeds/feed_id. + + This corresponds to the ``feed`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([feed]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.UpdateFeedRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.UpdateFeedRequest): + request = asset_service.UpdateFeedRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if feed is not None: + request.feed = feed + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_feed] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("feed.name", request.feed.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_feed(self, + request: asset_service.DeleteFeedRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an asset feed. + + Args: + request (google.cloud.asset_v1.types.DeleteFeedRequest): + The request object. + name (str): + Required. The name of the feed and it must be in the + format of: projects/project_number/feeds/feed_id + folders/folder_number/feeds/feed_id + organizations/organization_number/feeds/feed_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.DeleteFeedRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.DeleteFeedRequest): + request = asset_service.DeleteFeedRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_feed] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def search_all_resources(self, + request: asset_service.SearchAllResourcesRequest = None, + *, + scope: str = None, + query: str = None, + asset_types: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAllResourcesPager: + r"""Searches all Cloud resources within the specified scope, such as + a project, folder, or organization. The caller must be granted + the ``cloudasset.assets.searchAllResources`` permission on the + desired scope, otherwise the request will be rejected. + + Args: + request (google.cloud.asset_v1.types.SearchAllResourcesRequest): + The request object. Search all resources request. + scope (str): + Required. A scope can be a project, a folder, or an + organization. The search is limited to the resources + within the ``scope``. The caller must be granted the + ```cloudasset.assets.searchAllResources`` `__ + permission on the desired scope. + + The allowed values are: + + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Optional. The query statement. See `how to construct a + query `__ + for more information. If not specified or empty, it will + search all the resources within the specified ``scope``. + Note that the query string is compared against each + Cloud IAM policy binding, including its members, roles, + and Cloud IAM conditions. The returned Cloud IAM + policies will only contain the bindings that match your + query. To learn more about the IAM policy structure, see + `IAM policy + doc `__. + + Examples: + + - ``name:Important`` to find Cloud resources whose name + contains "Important" as a word. + - ``displayName:Impor*`` to find Cloud resources whose + display name contains "Impor" as a prefix. + - ``description:*por*`` to find Cloud resources whose + description contains "por" as a substring. + - ``location:us-west*`` to find Cloud resources whose + location is prefixed with "us-west". + - ``labels:prod`` to find Cloud resources whose labels + contain "prod" as a key or value. + - ``labels.env:prod`` to find Cloud resources that have + a label "env" and its value is "prod". + - ``labels.env:*`` to find Cloud resources that have a + label "env". + - ``Important`` to find Cloud resources that contain + "Important" as a word in any of the searchable + fields. + - ``Impor*`` to find Cloud resources that contain + "Impor" as a prefix in any of the searchable fields. + - ``*por*`` to find Cloud resources that contain "por" + as a substring in any of the searchable fields. + - ``Important location:(us-west1 OR global)`` to find + Cloud resources that contain "Important" as a word in + any of the searchable fields and are also located in + the "us-west1" region or the "global" location. + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + asset_types (Sequence[str]): + Optional. A list of asset types that this request + searches for. If empty, it will search all the + `searchable asset + types `__. + + This corresponds to the ``asset_types`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager: + Search all resources response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, query, asset_types]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.SearchAllResourcesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.SearchAllResourcesRequest): + request = asset_service.SearchAllResourcesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if query is not None: + request.query = query + if asset_types is not None: + request.asset_types = asset_types + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_all_resources] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchAllResourcesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_all_iam_policies(self, + request: asset_service.SearchAllIamPoliciesRequest = None, + *, + scope: str = None, + query: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.SearchAllIamPoliciesPager: + r"""Searches all IAM policies within the specified scope, such as a + project, folder, or organization. The caller must be granted the + ``cloudasset.assets.searchAllIamPolicies`` permission on the + desired scope, otherwise the request will be rejected. + + Args: + request (google.cloud.asset_v1.types.SearchAllIamPoliciesRequest): + The request object. Search all IAM policies request. + scope (str): + Required. A scope can be a project, a folder, or an + organization. The search is limited to the IAM policies + within the ``scope``. The caller must be granted the + ```cloudasset.assets.searchAllIamPolicies`` `__ + permission on the desired scope. + + The allowed values are: + + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + query (str): + Optional. The query statement. See `how to construct a + query `__ + for more information. If not specified or empty, it will + search all the IAM policies within the specified + ``scope``. + + Examples: + + - ``policy:amy@gmail.com`` to find IAM policy bindings + that specify user "amy@gmail.com". + - ``policy:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``policy.role.permissions:storage.buckets.update`` to + find IAM policy bindings that specify a role + containing "storage.buckets.update" permission. Note + that if callers don't have ``iam.roles.get`` access + to a role's included permissions, policy bindings + that specify this role will be dropped from the + search results. + - ``resource:organizations/123456`` to find IAM policy + bindings that are set on "organizations/123456". + - ``Important`` to find IAM policy bindings that + contain "Important" as a word in any of the + searchable fields (except for the included + permissions). + - ``*por*`` to find IAM policy bindings that contain + "por" as a substring in any of the searchable fields + (except for the included permissions). + - ``resource:(instance1 OR instance2) policy:amy`` to + find IAM policy bindings that are set on resources + "instance1" or "instance2" and also specify user + "amy". + + This corresponds to the ``query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager: + Search all IAM policies response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, query]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.SearchAllIamPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.SearchAllIamPoliciesRequest): + request = asset_service.SearchAllIamPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if query is not None: + request.query = query + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_all_iam_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchAllIamPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_iam_policy(self, + request: asset_service.AnalyzeIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.AnalyzeIamPolicyResponse: + r"""Analyzes IAM policies to answer which identities have + what accesses on which resources. + + Args: + request (google.cloud.asset_v1.types.AnalyzeIamPolicyRequest): + The request object. A request message for + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: + A response message for + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeIamPolicyRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeIamPolicyRequest): + request = asset_service.AnalyzeIamPolicyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("analysis_query.scope", request.analysis_query.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_iam_policy_longrunning(self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Analyzes IAM policies asynchronously to answer which identities + have what accesses on which resources, and writes the analysis + results to a Google Cloud Storage or a BigQuery destination. For + Cloud Storage destination, the output format is the JSON format + that represents a + [AnalyzeIamPolicyResponse][google.cloud.asset.v1.AnalyzeIamPolicyResponse]. + This method implements the + [google.longrunning.Operation][google.longrunning.Operation], + which allows you to track the operation status. We recommend + intervals of at least 2 seconds with exponential backoff retry + to poll the operation result. The metadata contains the request + to help callers to map responses to requests. + + Args: + request (google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest): + The request object. A request message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` + A response message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeIamPolicyLongrunningRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeIamPolicyLongrunningRequest): + request = asset_service.AnalyzeIamPolicyLongrunningRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy_longrunning] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("analysis_query.scope", request.analysis_query.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + asset_service.AnalyzeIamPolicyLongrunningResponse, + metadata_type=asset_service.AnalyzeIamPolicyLongrunningRequest, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-asset", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "AssetServiceClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py new file mode 100644 index 000000000000..01ea865a491a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.asset_v1.types import asset_service +from google.cloud.asset_v1.types import assets + + +class SearchAllResourcesPager: + """A pager for iterating through ``search_all_resources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.SearchAllResourcesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchAllResources`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.SearchAllResourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.SearchAllResourcesResponse], + request: asset_service.SearchAllResourcesRequest, + response: asset_service.SearchAllResourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.SearchAllResourcesRequest): + The initial request object. + response (google.cloud.asset_v1.types.SearchAllResourcesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.SearchAllResourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[asset_service.SearchAllResourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[assets.ResourceSearchResult]: + for page in self.pages: + yield from page.results + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchAllResourcesAsyncPager: + """A pager for iterating through ``search_all_resources`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.SearchAllResourcesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchAllResources`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.SearchAllResourcesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.SearchAllResourcesResponse]], + request: asset_service.SearchAllResourcesRequest, + response: asset_service.SearchAllResourcesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.SearchAllResourcesRequest): + The initial request object. + response (google.cloud.asset_v1.types.SearchAllResourcesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.SearchAllResourcesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[asset_service.SearchAllResourcesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[assets.ResourceSearchResult]: + async def async_generator(): + async for page in self.pages: + for response in page.results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchAllIamPoliciesPager: + """A pager for iterating through ``search_all_iam_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.SearchAllIamPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchAllIamPolicies`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.SearchAllIamPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.SearchAllIamPoliciesResponse], + request: asset_service.SearchAllIamPoliciesRequest, + response: asset_service.SearchAllIamPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.SearchAllIamPoliciesRequest): + The initial request object. + response (google.cloud.asset_v1.types.SearchAllIamPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.SearchAllIamPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[asset_service.SearchAllIamPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[assets.IamPolicySearchResult]: + for page in self.pages: + yield from page.results + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class SearchAllIamPoliciesAsyncPager: + """A pager for iterating through ``search_all_iam_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.SearchAllIamPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchAllIamPolicies`` requests and continue to iterate + through the ``results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.SearchAllIamPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.SearchAllIamPoliciesResponse]], + request: asset_service.SearchAllIamPoliciesRequest, + response: asset_service.SearchAllIamPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.SearchAllIamPoliciesRequest): + The initial request object. + response (google.cloud.asset_v1.types.SearchAllIamPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.SearchAllIamPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[asset_service.SearchAllIamPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[assets.IamPolicySearchResult]: + async def async_generator(): + async for page in self.pages: + for response in page.results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py new file mode 100644 index 000000000000..2c12069f8d49 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AssetServiceTransport +from .grpc import AssetServiceGrpcTransport +from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] +_transport_registry['grpc'] = AssetServiceGrpcTransport +_transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport + +__all__ = ( + 'AssetServiceTransport', + 'AssetServiceGrpcTransport', + 'AssetServiceGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py new file mode 100644 index 000000000000..132b35963d36 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.cloud.asset_v1.types import asset_service +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-asset', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class AssetServiceTransport(abc.ABC): + """Abstract transport class for AssetService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'cloudasset.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.export_assets: gapic_v1.method.wrap_method( + self.export_assets, + default_timeout=60.0, + client_info=client_info, + ), + self.batch_get_assets_history: gapic_v1.method.wrap_method( + self.batch_get_assets_history, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_feed: gapic_v1.method.wrap_method( + self.create_feed, + default_timeout=60.0, + client_info=client_info, + ), + self.get_feed: gapic_v1.method.wrap_method( + self.get_feed, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_feeds: gapic_v1.method.wrap_method( + self.list_feeds, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_feed: gapic_v1.method.wrap_method( + self.update_feed, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_feed: gapic_v1.method.wrap_method( + self.delete_feed, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.search_all_resources: gapic_v1.method.wrap_method( + self.search_all_resources, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=15.0, + ), + default_timeout=15.0, + client_info=client_info, + ), + self.search_all_iam_policies: gapic_v1.method.wrap_method( + self.search_all_iam_policies, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=15.0, + ), + default_timeout=15.0, + client_info=client_info, + ), + self.analyze_iam_policy: gapic_v1.method.wrap_method( + self.analyze_iam_policy, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.analyze_iam_policy_longrunning: gapic_v1.method.wrap_method( + self.analyze_iam_policy_longrunning, + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def export_assets(self) -> Callable[ + [asset_service.ExportAssetsRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def batch_get_assets_history(self) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + Union[ + asset_service.BatchGetAssetsHistoryResponse, + Awaitable[asset_service.BatchGetAssetsHistoryResponse] + ]]: + raise NotImplementedError() + + @property + def create_feed(self) -> Callable[ + [asset_service.CreateFeedRequest], + Union[ + asset_service.Feed, + Awaitable[asset_service.Feed] + ]]: + raise NotImplementedError() + + @property + def get_feed(self) -> Callable[ + [asset_service.GetFeedRequest], + Union[ + asset_service.Feed, + Awaitable[asset_service.Feed] + ]]: + raise NotImplementedError() + + @property + def list_feeds(self) -> Callable[ + [asset_service.ListFeedsRequest], + Union[ + asset_service.ListFeedsResponse, + Awaitable[asset_service.ListFeedsResponse] + ]]: + raise NotImplementedError() + + @property + def update_feed(self) -> Callable[ + [asset_service.UpdateFeedRequest], + Union[ + asset_service.Feed, + Awaitable[asset_service.Feed] + ]]: + raise NotImplementedError() + + @property + def delete_feed(self) -> Callable[ + [asset_service.DeleteFeedRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def search_all_resources(self) -> Callable[ + [asset_service.SearchAllResourcesRequest], + Union[ + asset_service.SearchAllResourcesResponse, + Awaitable[asset_service.SearchAllResourcesResponse] + ]]: + raise NotImplementedError() + + @property + def search_all_iam_policies(self) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + Union[ + asset_service.SearchAllIamPoliciesResponse, + Awaitable[asset_service.SearchAllIamPoliciesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_iam_policy(self) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + Union[ + asset_service.AnalyzeIamPolicyResponse, + Awaitable[asset_service.AnalyzeIamPolicyResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_iam_policy_longrunning(self) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'AssetServiceTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py new file mode 100644 index 000000000000..aa6b2cc1e456 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -0,0 +1,567 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.asset_v1.types import asset_service +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO + + +class AssetServiceGrpcTransport(AssetServiceTransport): + """gRPC backend transport for AssetService. + + Asset service definition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'cloudasset.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'cloudasset.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def export_assets(self) -> Callable[ + [asset_service.ExportAssetsRequest], + operations_pb2.Operation]: + r"""Return a callable for the export assets method over gRPC. + + Exports assets with time and resource types to a given Cloud + Storage location/BigQuery table. For Cloud Storage location + destinations, the output format is newline-delimited JSON. Each + line represents a + [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in + the JSON format; for BigQuery table destinations, the output + table stores the fields in asset proto as columns. This API + implements the + [google.longrunning.Operation][google.longrunning.Operation] API + , which allows you to keep track of the export. We recommend + intervals of at least 2 seconds with exponential retry to poll + the export operation result. For regular-size resource parent, + the export operation usually finishes within 5 minutes. + + Returns: + Callable[[~.ExportAssetsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_assets' not in self._stubs: + self._stubs['export_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ExportAssets', + request_serializer=asset_service.ExportAssetsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['export_assets'] + + @property + def batch_get_assets_history(self) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + asset_service.BatchGetAssetsHistoryResponse]: + r"""Return a callable for the batch get assets history method over gRPC. + + Batch gets the update history of assets that overlap a time + window. For IAM_POLICY content, this API outputs history when + the asset and its attached IAM POLICY both exist. This can + create gaps in the output history. Otherwise, this API outputs + history with asset in both non-delete or deleted status. If a + specified asset does not exist, this API returns an + INVALID_ARGUMENT error. + + Returns: + Callable[[~.BatchGetAssetsHistoryRequest], + ~.BatchGetAssetsHistoryResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_assets_history' not in self._stubs: + self._stubs['batch_get_assets_history'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', + request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, + response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, + ) + return self._stubs['batch_get_assets_history'] + + @property + def create_feed(self) -> Callable[ + [asset_service.CreateFeedRequest], + asset_service.Feed]: + r"""Return a callable for the create feed method over gRPC. + + Creates a feed in a parent + project/folder/organization to listen to its asset + updates. + + Returns: + Callable[[~.CreateFeedRequest], + ~.Feed]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_feed' not in self._stubs: + self._stubs['create_feed'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/CreateFeed', + request_serializer=asset_service.CreateFeedRequest.serialize, + response_deserializer=asset_service.Feed.deserialize, + ) + return self._stubs['create_feed'] + + @property + def get_feed(self) -> Callable[ + [asset_service.GetFeedRequest], + asset_service.Feed]: + r"""Return a callable for the get feed method over gRPC. + + Gets details about an asset feed. + + Returns: + Callable[[~.GetFeedRequest], + ~.Feed]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_feed' not in self._stubs: + self._stubs['get_feed'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/GetFeed', + request_serializer=asset_service.GetFeedRequest.serialize, + response_deserializer=asset_service.Feed.deserialize, + ) + return self._stubs['get_feed'] + + @property + def list_feeds(self) -> Callable[ + [asset_service.ListFeedsRequest], + asset_service.ListFeedsResponse]: + r"""Return a callable for the list feeds method over gRPC. + + Lists all asset feeds in a parent + project/folder/organization. + + Returns: + Callable[[~.ListFeedsRequest], + ~.ListFeedsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_feeds' not in self._stubs: + self._stubs['list_feeds'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ListFeeds', + request_serializer=asset_service.ListFeedsRequest.serialize, + response_deserializer=asset_service.ListFeedsResponse.deserialize, + ) + return self._stubs['list_feeds'] + + @property + def update_feed(self) -> Callable[ + [asset_service.UpdateFeedRequest], + asset_service.Feed]: + r"""Return a callable for the update feed method over gRPC. + + Updates an asset feed configuration. + + Returns: + Callable[[~.UpdateFeedRequest], + ~.Feed]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_feed' not in self._stubs: + self._stubs['update_feed'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/UpdateFeed', + request_serializer=asset_service.UpdateFeedRequest.serialize, + response_deserializer=asset_service.Feed.deserialize, + ) + return self._stubs['update_feed'] + + @property + def delete_feed(self) -> Callable[ + [asset_service.DeleteFeedRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete feed method over gRPC. + + Deletes an asset feed. + + Returns: + Callable[[~.DeleteFeedRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_feed' not in self._stubs: + self._stubs['delete_feed'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/DeleteFeed', + request_serializer=asset_service.DeleteFeedRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_feed'] + + @property + def search_all_resources(self) -> Callable[ + [asset_service.SearchAllResourcesRequest], + asset_service.SearchAllResourcesResponse]: + r"""Return a callable for the search all resources method over gRPC. + + Searches all Cloud resources within the specified scope, such as + a project, folder, or organization. The caller must be granted + the ``cloudasset.assets.searchAllResources`` permission on the + desired scope, otherwise the request will be rejected. + + Returns: + Callable[[~.SearchAllResourcesRequest], + ~.SearchAllResourcesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_all_resources' not in self._stubs: + self._stubs['search_all_resources'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/SearchAllResources', + request_serializer=asset_service.SearchAllResourcesRequest.serialize, + response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, + ) + return self._stubs['search_all_resources'] + + @property + def search_all_iam_policies(self) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + asset_service.SearchAllIamPoliciesResponse]: + r"""Return a callable for the search all iam policies method over gRPC. + + Searches all IAM policies within the specified scope, such as a + project, folder, or organization. The caller must be granted the + ``cloudasset.assets.searchAllIamPolicies`` permission on the + desired scope, otherwise the request will be rejected. + + Returns: + Callable[[~.SearchAllIamPoliciesRequest], + ~.SearchAllIamPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_all_iam_policies' not in self._stubs: + self._stubs['search_all_iam_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', + request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, + response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, + ) + return self._stubs['search_all_iam_policies'] + + @property + def analyze_iam_policy(self) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + asset_service.AnalyzeIamPolicyResponse]: + r"""Return a callable for the analyze iam policy method over gRPC. + + Analyzes IAM policies to answer which identities have + what accesses on which resources. + + Returns: + Callable[[~.AnalyzeIamPolicyRequest], + ~.AnalyzeIamPolicyResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_iam_policy' not in self._stubs: + self._stubs['analyze_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', + request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, + response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, + ) + return self._stubs['analyze_iam_policy'] + + @property + def analyze_iam_policy_longrunning(self) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + operations_pb2.Operation]: + r"""Return a callable for the analyze iam policy longrunning method over gRPC. + + Analyzes IAM policies asynchronously to answer which identities + have what accesses on which resources, and writes the analysis + results to a Google Cloud Storage or a BigQuery destination. For + Cloud Storage destination, the output format is the JSON format + that represents a + [AnalyzeIamPolicyResponse][google.cloud.asset.v1.AnalyzeIamPolicyResponse]. + This method implements the + [google.longrunning.Operation][google.longrunning.Operation], + which allows you to track the operation status. We recommend + intervals of at least 2 seconds with exponential backoff retry + to poll the operation result. The metadata contains the request + to help callers to map responses to requests. + + Returns: + Callable[[~.AnalyzeIamPolicyLongrunningRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_iam_policy_longrunning' not in self._stubs: + self._stubs['analyze_iam_policy_longrunning'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', + request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['analyze_iam_policy_longrunning'] + + +__all__ = ( + 'AssetServiceGrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..b488dc4cc030 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -0,0 +1,571 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.asset_v1.types import asset_service +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import AssetServiceGrpcTransport + + +class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): + """gRPC AsyncIO backend transport for AssetService. + + Asset service definition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'cloudasset.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'cloudasset.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def export_assets(self) -> Callable[ + [asset_service.ExportAssetsRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the export assets method over gRPC. + + Exports assets with time and resource types to a given Cloud + Storage location/BigQuery table. For Cloud Storage location + destinations, the output format is newline-delimited JSON. Each + line represents a + [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in + the JSON format; for BigQuery table destinations, the output + table stores the fields in asset proto as columns. This API + implements the + [google.longrunning.Operation][google.longrunning.Operation] API + , which allows you to keep track of the export. We recommend + intervals of at least 2 seconds with exponential retry to poll + the export operation result. For regular-size resource parent, + the export operation usually finishes within 5 minutes. + + Returns: + Callable[[~.ExportAssetsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_assets' not in self._stubs: + self._stubs['export_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ExportAssets', + request_serializer=asset_service.ExportAssetsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['export_assets'] + + @property + def batch_get_assets_history(self) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + Awaitable[asset_service.BatchGetAssetsHistoryResponse]]: + r"""Return a callable for the batch get assets history method over gRPC. + + Batch gets the update history of assets that overlap a time + window. For IAM_POLICY content, this API outputs history when + the asset and its attached IAM POLICY both exist. This can + create gaps in the output history. Otherwise, this API outputs + history with asset in both non-delete or deleted status. If a + specified asset does not exist, this API returns an + INVALID_ARGUMENT error. + + Returns: + Callable[[~.BatchGetAssetsHistoryRequest], + Awaitable[~.BatchGetAssetsHistoryResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_assets_history' not in self._stubs: + self._stubs['batch_get_assets_history'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', + request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, + response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, + ) + return self._stubs['batch_get_assets_history'] + + @property + def create_feed(self) -> Callable[ + [asset_service.CreateFeedRequest], + Awaitable[asset_service.Feed]]: + r"""Return a callable for the create feed method over gRPC. + + Creates a feed in a parent + project/folder/organization to listen to its asset + updates. + + Returns: + Callable[[~.CreateFeedRequest], + Awaitable[~.Feed]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_feed' not in self._stubs: + self._stubs['create_feed'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/CreateFeed', + request_serializer=asset_service.CreateFeedRequest.serialize, + response_deserializer=asset_service.Feed.deserialize, + ) + return self._stubs['create_feed'] + + @property + def get_feed(self) -> Callable[ + [asset_service.GetFeedRequest], + Awaitable[asset_service.Feed]]: + r"""Return a callable for the get feed method over gRPC. + + Gets details about an asset feed. + + Returns: + Callable[[~.GetFeedRequest], + Awaitable[~.Feed]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_feed' not in self._stubs: + self._stubs['get_feed'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/GetFeed', + request_serializer=asset_service.GetFeedRequest.serialize, + response_deserializer=asset_service.Feed.deserialize, + ) + return self._stubs['get_feed'] + + @property + def list_feeds(self) -> Callable[ + [asset_service.ListFeedsRequest], + Awaitable[asset_service.ListFeedsResponse]]: + r"""Return a callable for the list feeds method over gRPC. + + Lists all asset feeds in a parent + project/folder/organization. + + Returns: + Callable[[~.ListFeedsRequest], + Awaitable[~.ListFeedsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_feeds' not in self._stubs: + self._stubs['list_feeds'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ListFeeds', + request_serializer=asset_service.ListFeedsRequest.serialize, + response_deserializer=asset_service.ListFeedsResponse.deserialize, + ) + return self._stubs['list_feeds'] + + @property + def update_feed(self) -> Callable[ + [asset_service.UpdateFeedRequest], + Awaitable[asset_service.Feed]]: + r"""Return a callable for the update feed method over gRPC. + + Updates an asset feed configuration. + + Returns: + Callable[[~.UpdateFeedRequest], + Awaitable[~.Feed]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_feed' not in self._stubs: + self._stubs['update_feed'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/UpdateFeed', + request_serializer=asset_service.UpdateFeedRequest.serialize, + response_deserializer=asset_service.Feed.deserialize, + ) + return self._stubs['update_feed'] + + @property + def delete_feed(self) -> Callable[ + [asset_service.DeleteFeedRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete feed method over gRPC. + + Deletes an asset feed. + + Returns: + Callable[[~.DeleteFeedRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_feed' not in self._stubs: + self._stubs['delete_feed'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/DeleteFeed', + request_serializer=asset_service.DeleteFeedRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_feed'] + + @property + def search_all_resources(self) -> Callable[ + [asset_service.SearchAllResourcesRequest], + Awaitable[asset_service.SearchAllResourcesResponse]]: + r"""Return a callable for the search all resources method over gRPC. + + Searches all Cloud resources within the specified scope, such as + a project, folder, or organization. The caller must be granted + the ``cloudasset.assets.searchAllResources`` permission on the + desired scope, otherwise the request will be rejected. + + Returns: + Callable[[~.SearchAllResourcesRequest], + Awaitable[~.SearchAllResourcesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_all_resources' not in self._stubs: + self._stubs['search_all_resources'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/SearchAllResources', + request_serializer=asset_service.SearchAllResourcesRequest.serialize, + response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, + ) + return self._stubs['search_all_resources'] + + @property + def search_all_iam_policies(self) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + Awaitable[asset_service.SearchAllIamPoliciesResponse]]: + r"""Return a callable for the search all iam policies method over gRPC. + + Searches all IAM policies within the specified scope, such as a + project, folder, or organization. The caller must be granted the + ``cloudasset.assets.searchAllIamPolicies`` permission on the + desired scope, otherwise the request will be rejected. + + Returns: + Callable[[~.SearchAllIamPoliciesRequest], + Awaitable[~.SearchAllIamPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'search_all_iam_policies' not in self._stubs: + self._stubs['search_all_iam_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', + request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, + response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, + ) + return self._stubs['search_all_iam_policies'] + + @property + def analyze_iam_policy(self) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + Awaitable[asset_service.AnalyzeIamPolicyResponse]]: + r"""Return a callable for the analyze iam policy method over gRPC. + + Analyzes IAM policies to answer which identities have + what accesses on which resources. + + Returns: + Callable[[~.AnalyzeIamPolicyRequest], + Awaitable[~.AnalyzeIamPolicyResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_iam_policy' not in self._stubs: + self._stubs['analyze_iam_policy'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', + request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, + response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, + ) + return self._stubs['analyze_iam_policy'] + + @property + def analyze_iam_policy_longrunning(self) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the analyze iam policy longrunning method over gRPC. + + Analyzes IAM policies asynchronously to answer which identities + have what accesses on which resources, and writes the analysis + results to a Google Cloud Storage or a BigQuery destination. For + Cloud Storage destination, the output format is the JSON format + that represents a + [AnalyzeIamPolicyResponse][google.cloud.asset.v1.AnalyzeIamPolicyResponse]. + This method implements the + [google.longrunning.Operation][google.longrunning.Operation], + which allows you to track the operation status. We recommend + intervals of at least 2 seconds with exponential backoff retry + to poll the operation result. The metadata contains the request + to help callers to map responses to requests. + + Returns: + Callable[[~.AnalyzeIamPolicyLongrunningRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_iam_policy_longrunning' not in self._stubs: + self._stubs['analyze_iam_policy_longrunning'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', + request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['analyze_iam_policy_longrunning'] + + +__all__ = ( + 'AssetServiceGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py new file mode 100644 index 000000000000..02a737df6f5e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .asset_service import ( + AnalyzeIamPolicyLongrunningRequest, + AnalyzeIamPolicyLongrunningResponse, + AnalyzeIamPolicyRequest, + AnalyzeIamPolicyResponse, + BatchGetAssetsHistoryRequest, + BatchGetAssetsHistoryResponse, + BigQueryDestination, + CreateFeedRequest, + DeleteFeedRequest, + ExportAssetsRequest, + ExportAssetsResponse, + Feed, + FeedOutputConfig, + GcsDestination, + GcsOutputResult, + GetFeedRequest, + IamPolicyAnalysisOutputConfig, + IamPolicyAnalysisQuery, + ListFeedsRequest, + ListFeedsResponse, + OutputConfig, + OutputResult, + PartitionSpec, + PubsubDestination, + SearchAllIamPoliciesRequest, + SearchAllIamPoliciesResponse, + SearchAllResourcesRequest, + SearchAllResourcesResponse, + UpdateFeedRequest, + ContentType, +) +from .assets import ( + Asset, + IamPolicyAnalysisResult, + IamPolicyAnalysisState, + IamPolicySearchResult, + Resource, + ResourceSearchResult, + TemporalAsset, + TimeWindow, +) + +__all__ = ( + 'AnalyzeIamPolicyLongrunningRequest', + 'AnalyzeIamPolicyLongrunningResponse', + 'AnalyzeIamPolicyRequest', + 'AnalyzeIamPolicyResponse', + 'BatchGetAssetsHistoryRequest', + 'BatchGetAssetsHistoryResponse', + 'BigQueryDestination', + 'CreateFeedRequest', + 'DeleteFeedRequest', + 'ExportAssetsRequest', + 'ExportAssetsResponse', + 'Feed', + 'FeedOutputConfig', + 'GcsDestination', + 'GcsOutputResult', + 'GetFeedRequest', + 'IamPolicyAnalysisOutputConfig', + 'IamPolicyAnalysisQuery', + 'ListFeedsRequest', + 'ListFeedsResponse', + 'OutputConfig', + 'OutputResult', + 'PartitionSpec', + 'PubsubDestination', + 'SearchAllIamPoliciesRequest', + 'SearchAllIamPoliciesResponse', + 'SearchAllResourcesRequest', + 'SearchAllResourcesResponse', + 'UpdateFeedRequest', + 'ContentType', + 'Asset', + 'IamPolicyAnalysisResult', + 'IamPolicyAnalysisState', + 'IamPolicySearchResult', + 'Resource', + 'ResourceSearchResult', + 'TemporalAsset', + 'TimeWindow', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py new file mode 100644 index 000000000000..3bc57820106c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -0,0 +1,1452 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.asset_v1.types import assets as gca_assets +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.asset.v1', + manifest={ + 'ContentType', + 'ExportAssetsRequest', + 'ExportAssetsResponse', + 'BatchGetAssetsHistoryRequest', + 'BatchGetAssetsHistoryResponse', + 'CreateFeedRequest', + 'GetFeedRequest', + 'ListFeedsRequest', + 'ListFeedsResponse', + 'UpdateFeedRequest', + 'DeleteFeedRequest', + 'OutputConfig', + 'OutputResult', + 'GcsOutputResult', + 'GcsDestination', + 'BigQueryDestination', + 'PartitionSpec', + 'PubsubDestination', + 'FeedOutputConfig', + 'Feed', + 'SearchAllResourcesRequest', + 'SearchAllResourcesResponse', + 'SearchAllIamPoliciesRequest', + 'SearchAllIamPoliciesResponse', + 'IamPolicyAnalysisQuery', + 'AnalyzeIamPolicyRequest', + 'AnalyzeIamPolicyResponse', + 'IamPolicyAnalysisOutputConfig', + 'AnalyzeIamPolicyLongrunningRequest', + 'AnalyzeIamPolicyLongrunningResponse', + }, +) + + +class ContentType(proto.Enum): + r"""Asset content type.""" + CONTENT_TYPE_UNSPECIFIED = 0 + RESOURCE = 1 + IAM_POLICY = 2 + ORG_POLICY = 4 + ACCESS_POLICY = 5 + OS_INVENTORY = 6 + + +class ExportAssetsRequest(proto.Message): + r"""Export asset request. + Attributes: + parent (str): + Required. The relative name of the root + asset. This can only be an organization number + (such as "organizations/123"), a project ID + (such as "projects/my-project-id"), or a project + number (such as "projects/12345"), or a folder + number (such as "folders/123"). + read_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp to take an asset snapshot. This can + only be set to a timestamp between the current + time and the current time minus 35 days + (inclusive). If not specified, the current time + will be used. Due to delays in resource data + collection and indexing, there is a volatile + window during which running the same query may + get different results. + asset_types (Sequence[str]): + A list of asset types to take a snapshot for. For example: + "compute.googleapis.com/Disk". + + Regular expressions are also supported. For example: + + - "compute.googleapis.com.*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type ends + with "Instance". + - ".*Instance.*" snapshots resources whose asset type + contains "Instance". + + See `RE2 `__ for + all supported regular expression syntax. If the regular + expression does not match any supported asset type, an + INVALID_ARGUMENT error will be returned. + + If specified, only matching assets will be returned, + otherwise, it will snapshot all asset types. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types. + content_type (google.cloud.asset_v1.types.ContentType): + Asset content type. If not specified, no + content but the asset name will be returned. + output_config (google.cloud.asset_v1.types.OutputConfig): + Required. Output configuration indicating + where the results will be output to. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + read_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + asset_types = proto.RepeatedField( + proto.STRING, + number=3, + ) + content_type = proto.Field( + proto.ENUM, + number=4, + enum='ContentType', + ) + output_config = proto.Field( + proto.MESSAGE, + number=5, + message='OutputConfig', + ) + + +class ExportAssetsResponse(proto.Message): + r"""The export asset response. This message is returned by the + [google.longrunning.Operations.GetOperation][google.longrunning.Operations.GetOperation] + method in the returned + [google.longrunning.Operation.response][google.longrunning.Operation.response] + field. + + Attributes: + read_time (google.protobuf.timestamp_pb2.Timestamp): + Time the snapshot was taken. + output_config (google.cloud.asset_v1.types.OutputConfig): + Output configuration indicating where the + results were output to. + output_result (google.cloud.asset_v1.types.OutputResult): + Output result indicating where the assets were exported to. + For example, a set of actual Google Cloud Storage object + uris where the assets are exported to. The uris can be + different from what [output_config] has specified, as the + service will split the output object into multiple ones once + it exceeds a single Google Cloud Storage object limit. + """ + + read_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + output_config = proto.Field( + proto.MESSAGE, + number=2, + message='OutputConfig', + ) + output_result = proto.Field( + proto.MESSAGE, + number=3, + message='OutputResult', + ) + + +class BatchGetAssetsHistoryRequest(proto.Message): + r"""Batch get assets history request. + Attributes: + parent (str): + Required. The relative name of the root + asset. It can only be an organization number + (such as "organizations/123"), a project ID + (such as "projects/my-project-id")", or a + project number (such as "projects/12345"). + asset_names (Sequence[str]): + A list of the full names of the assets. See: + https://cloud.google.com/asset-inventory/docs/resource-name-format + Example: + + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. + + The request becomes a no-op if the asset name list is empty, + and the max size of the asset name list is 100 in one + request. + content_type (google.cloud.asset_v1.types.ContentType): + Optional. The content type. + read_time_window (google.cloud.asset_v1.types.TimeWindow): + Optional. The time window for the asset history. Both + start_time and end_time are optional and if set, it must be + after the current time minus 35 days. If end_time is not + set, it is default to current timestamp. If start_time is + not set, the snapshot of the assets at end_time will be + returned. The returned results contain all temporal assets + whose time window overlap with read_time_window. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + asset_names = proto.RepeatedField( + proto.STRING, + number=2, + ) + content_type = proto.Field( + proto.ENUM, + number=3, + enum='ContentType', + ) + read_time_window = proto.Field( + proto.MESSAGE, + number=4, + message=gca_assets.TimeWindow, + ) + + +class BatchGetAssetsHistoryResponse(proto.Message): + r"""Batch get assets history response. + Attributes: + assets (Sequence[google.cloud.asset_v1.types.TemporalAsset]): + A list of assets with valid time windows. + """ + + assets = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_assets.TemporalAsset, + ) + + +class CreateFeedRequest(proto.Message): + r"""Create asset feed request. + Attributes: + parent (str): + Required. The name of the + project/folder/organization where this feed + should be created in. It can only be an + organization number (such as + "organizations/123"), a folder number (such as + "folders/123"), a project ID (such as + "projects/my-project-id")", or a project number + (such as "projects/12345"). + feed_id (str): + Required. This is the client-assigned asset + feed identifier and it needs to be unique under + a specific parent project/folder/organization. + feed (google.cloud.asset_v1.types.Feed): + Required. The feed details. The field ``name`` must be empty + and it will be generated in the format of: + projects/project_number/feeds/feed_id + folders/folder_number/feeds/feed_id + organizations/organization_number/feeds/feed_id + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + feed_id = proto.Field( + proto.STRING, + number=2, + ) + feed = proto.Field( + proto.MESSAGE, + number=3, + message='Feed', + ) + + +class GetFeedRequest(proto.Message): + r"""Get asset feed request. + Attributes: + name (str): + Required. The name of the Feed and it must be in the format + of: projects/project_number/feeds/feed_id + folders/folder_number/feeds/feed_id + organizations/organization_number/feeds/feed_id + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListFeedsRequest(proto.Message): + r"""List asset feeds request. + Attributes: + parent (str): + Required. The parent + project/folder/organization whose feeds are to + be listed. It can only be using + project/folder/organization number (such as + "folders/12345")", or a project ID (such as + "projects/my-project-id"). + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + + +class ListFeedsResponse(proto.Message): + r""" + Attributes: + feeds (Sequence[google.cloud.asset_v1.types.Feed]): + A list of feeds. + """ + + feeds = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Feed', + ) + + +class UpdateFeedRequest(proto.Message): + r"""Update asset feed request. + Attributes: + feed (google.cloud.asset_v1.types.Feed): + Required. The new values of feed details. It must match an + existing feed and the field ``name`` must be in the format + of: projects/project_number/feeds/feed_id or + folders/folder_number/feeds/feed_id or + organizations/organization_number/feeds/feed_id. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Only updates the ``feed`` fields indicated by this + mask. The field mask must not be empty, and it must not + contain fields that are immutable or only set by the server. + """ + + feed = proto.Field( + proto.MESSAGE, + number=1, + message='Feed', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteFeedRequest(proto.Message): + r""" + Attributes: + name (str): + Required. The name of the feed and it must be in the format + of: projects/project_number/feeds/feed_id + folders/folder_number/feeds/feed_id + organizations/organization_number/feeds/feed_id + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class OutputConfig(proto.Message): + r"""Output configuration for export assets destination. + Attributes: + gcs_destination (google.cloud.asset_v1.types.GcsDestination): + Destination on Cloud Storage. + bigquery_destination (google.cloud.asset_v1.types.BigQueryDestination): + Destination on BigQuery. The output table + stores the fields in asset proto as columns in + BigQuery. + """ + + gcs_destination = proto.Field( + proto.MESSAGE, + number=1, + oneof='destination', + message='GcsDestination', + ) + bigquery_destination = proto.Field( + proto.MESSAGE, + number=2, + oneof='destination', + message='BigQueryDestination', + ) + + +class OutputResult(proto.Message): + r"""Output result of export assets. + Attributes: + gcs_result (google.cloud.asset_v1.types.GcsOutputResult): + Export result on Cloud Storage. + """ + + gcs_result = proto.Field( + proto.MESSAGE, + number=1, + oneof='result', + message='GcsOutputResult', + ) + + +class GcsOutputResult(proto.Message): + r"""A Cloud Storage output result. + Attributes: + uris (Sequence[str]): + List of uris of the Cloud Storage objects. Example: + "gs://bucket_name/object_name". + """ + + uris = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class GcsDestination(proto.Message): + r"""A Cloud Storage location. + Attributes: + uri (str): + The uri of the Cloud Storage object. It's the same uri that + is used by gsutil. Example: "gs://bucket_name/object_name". + See `Viewing and Editing Object + Metadata `__ + for more information. + uri_prefix (str): + The uri prefix of all generated Cloud Storage objects. + Example: "gs://bucket_name/object_name_prefix". Each object + uri is in format: "gs://bucket_name/object_name_prefix// and + only contains assets for that type. starts from 0. Example: + "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" + is the first shard of output objects containing all + compute.googleapis.com/Disk assets. An INVALID_ARGUMENT + error will be returned if file with the same name + "gs://bucket_name/object_name_prefix" already exists. + """ + + uri = proto.Field( + proto.STRING, + number=1, + oneof='object_uri', + ) + uri_prefix = proto.Field( + proto.STRING, + number=2, + oneof='object_uri', + ) + + +class BigQueryDestination(proto.Message): + r"""A BigQuery destination for exporting assets to. + Attributes: + dataset (str): + Required. The BigQuery dataset in format + "projects/projectId/datasets/datasetId", to which the + snapshot result should be exported. If this dataset does not + exist, the export call returns an INVALID_ARGUMENT error. + table (str): + Required. The BigQuery table to which the + snapshot result should be written. If this table + does not exist, a new table with the given name + will be created. + force (bool): + If the destination table already exists and this flag is + ``TRUE``, the table will be overwritten by the contents of + assets snapshot. If the flag is ``FALSE`` or unset and the + destination table already exists, the export call returns an + INVALID_ARGUMEMT error. + partition_spec (google.cloud.asset_v1.types.PartitionSpec): + [partition_spec] determines whether to export to partitioned + table(s) and how to partition the data. + + If [partition_spec] is unset or + [partition_spec.partition_key] is unset or + ``PARTITION_KEY_UNSPECIFIED``, the snapshot results will be + exported to non-partitioned table(s). [force] will decide + whether to overwrite existing table(s). + + If [partition_spec] is specified. First, the snapshot + results will be written to partitioned table(s) with two + additional timestamp columns, readTime and requestTime, one + of which will be the partition key. Secondly, in the case + when any destination table already exists, it will first try + to update existing table's schema as necessary by appending + additional columns. Then, if [force] is ``TRUE``, the + corresponding partition will be overwritten by the snapshot + results (data in different partitions will remain intact); + if [force] is unset or ``FALSE``, it will append the data. + An error will be returned if the schema update or data + appension fails. + separate_tables_per_asset_type (bool): + If this flag is ``TRUE``, the snapshot results will be + written to one or multiple tables, each of which contains + results of one asset type. The [force] and [partition_spec] + fields will apply to each of them. + + Field [table] will be concatenated with "*" and the asset + type names (see + https://cloud.google.com/asset-inventory/docs/supported-asset-types + for supported asset types) to construct per-asset-type table + names, in which all non-alphanumeric characters like "." and + "/" will be substituted by "*". Example: if field [table] is + "mytable" and snapshot results contain + "storage.googleapis.com/Bucket" assets, the corresponding + table name will be "mytable_storage_googleapis_com_Bucket". + If any of these tables does not exist, a new table with the + concatenated name will be created. + + When [content_type] in the ExportAssetsRequest is + ``RESOURCE``, the schema of each table will include + RECORD-type columns mapped to the nested fields in the + Asset.resource.data field of that asset type (up to the 15 + nested level BigQuery supports + (https://cloud.google.com/bigquery/docs/nested-repeated#limitations)). + The fields in >15 nested levels will be stored in JSON + format string as a child column of its parent RECORD column. + + If error occurs when exporting to any table, the whole + export call will return an error but the export results that + already succeed will persist. Example: if exporting to + table_type_A succeeds when exporting to table_type_B fails + during one export call, the results in table_type_A will + persist and there will not be partial results persisting in + a table. + """ + + dataset = proto.Field( + proto.STRING, + number=1, + ) + table = proto.Field( + proto.STRING, + number=2, + ) + force = proto.Field( + proto.BOOL, + number=3, + ) + partition_spec = proto.Field( + proto.MESSAGE, + number=4, + message='PartitionSpec', + ) + separate_tables_per_asset_type = proto.Field( + proto.BOOL, + number=5, + ) + + +class PartitionSpec(proto.Message): + r"""Specifications of BigQuery partitioned table as export + destination. + + Attributes: + partition_key (google.cloud.asset_v1.types.PartitionSpec.PartitionKey): + The partition key for BigQuery partitioned + table. + """ + class PartitionKey(proto.Enum): + r"""This enum is used to determine the partition key column when + exporting assets to BigQuery partitioned table(s). Note that, if the + partition key is a timestamp column, the actual partition is based + on its date value (expressed in UTC. see details in + https://cloud.google.com/bigquery/docs/partitioned-tables#date_timestamp_partitioned_tables). + """ + PARTITION_KEY_UNSPECIFIED = 0 + READ_TIME = 1 + REQUEST_TIME = 2 + + partition_key = proto.Field( + proto.ENUM, + number=1, + enum=PartitionKey, + ) + + +class PubsubDestination(proto.Message): + r"""A Pub/Sub destination. + Attributes: + topic (str): + The name of the Pub/Sub topic to publish to. Example: + ``projects/PROJECT_ID/topics/TOPIC_ID``. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + + +class FeedOutputConfig(proto.Message): + r"""Output configuration for asset feed destination. + Attributes: + pubsub_destination (google.cloud.asset_v1.types.PubsubDestination): + Destination on Pub/Sub. + """ + + pubsub_destination = proto.Field( + proto.MESSAGE, + number=1, + oneof='destination', + message='PubsubDestination', + ) + + +class Feed(proto.Message): + r"""An asset feed used to export asset updates to a destinations. + An asset feed filter controls what updates are exported. The + asset feed must be created within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + Attributes: + name (str): + Required. The format will be + projects/{project_number}/feeds/{client-assigned_feed_identifier} + or + folders/{folder_number}/feeds/{client-assigned_feed_identifier} + or + organizations/{organization_number}/feeds/{client-assigned_feed_identifier} + + The client-assigned feed identifier must be unique within + the parent project/folder/organization. + asset_names (Sequence[str]): + A list of the full names of the assets to receive updates. + You must specify either or both of asset_names and + asset_types. Only asset updates matching specified + asset_names or asset_types are exported to the feed. + Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. + See `Resource + Names `__ + for more info. + asset_types (Sequence[str]): + A list of types of the assets to receive updates. You must + specify either or both of asset_names and asset_types. Only + asset updates matching specified asset_names or asset_types + are exported to the feed. Example: + ``"compute.googleapis.com/Disk"`` + + See `this + topic `__ + for a list of all supported asset types. + content_type (google.cloud.asset_v1.types.ContentType): + Asset content type. If not specified, no + content but the asset name and type will be + returned. + feed_output_config (google.cloud.asset_v1.types.FeedOutputConfig): + Required. Feed output configuration defining + where the asset updates are published to. + condition (google.type.expr_pb2.Expr): + A condition which determines whether an asset update should + be published. If specified, an asset will be returned only + when the expression evaluates to true. When set, + ``expression`` field in the ``Expr`` must be a valid [CEL + expression] (https://github.com/google/cel-spec) on a + TemporalAsset with name ``temporal_asset``. Example: a Feed + with expression ("temporal_asset.deleted == true") will only + publish Asset deletions. Other fields of ``Expr`` are + optional. + + See our `user + guide `__ + for detailed instructions. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + asset_names = proto.RepeatedField( + proto.STRING, + number=2, + ) + asset_types = proto.RepeatedField( + proto.STRING, + number=3, + ) + content_type = proto.Field( + proto.ENUM, + number=4, + enum='ContentType', + ) + feed_output_config = proto.Field( + proto.MESSAGE, + number=5, + message='FeedOutputConfig', + ) + condition = proto.Field( + proto.MESSAGE, + number=6, + message=expr_pb2.Expr, + ) + + +class SearchAllResourcesRequest(proto.Message): + r"""Search all resources request. + Attributes: + scope (str): + Required. A scope can be a project, a folder, or an + organization. The search is limited to the resources within + the ``scope``. The caller must be granted the + ```cloudasset.assets.searchAllResources`` `__ + permission on the desired scope. + + The allowed values are: + + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + query (str): + Optional. The query statement. See `how to construct a + query `__ + for more information. If not specified or empty, it will + search all the resources within the specified ``scope``. + Note that the query string is compared against each Cloud + IAM policy binding, including its members, roles, and Cloud + IAM conditions. The returned Cloud IAM policies will only + contain the bindings that match your query. To learn more + about the IAM policy structure, see `IAM policy + doc `__. + + Examples: + + - ``name:Important`` to find Cloud resources whose name + contains "Important" as a word. + - ``displayName:Impor*`` to find Cloud resources whose + display name contains "Impor" as a prefix. + - ``description:*por*`` to find Cloud resources whose + description contains "por" as a substring. + - ``location:us-west*`` to find Cloud resources whose + location is prefixed with "us-west". + - ``labels:prod`` to find Cloud resources whose labels + contain "prod" as a key or value. + - ``labels.env:prod`` to find Cloud resources that have a + label "env" and its value is "prod". + - ``labels.env:*`` to find Cloud resources that have a + label "env". + - ``Important`` to find Cloud resources that contain + "Important" as a word in any of the searchable fields. + - ``Impor*`` to find Cloud resources that contain "Impor" + as a prefix in any of the searchable fields. + - ``*por*`` to find Cloud resources that contain "por" as a + substring in any of the searchable fields. + - ``Important location:(us-west1 OR global)`` to find Cloud + resources that contain "Important" as a word in any of + the searchable fields and are also located in the + "us-west1" region or the "global" location. + asset_types (Sequence[str]): + Optional. A list of asset types that this request searches + for. If empty, it will search all the `searchable asset + types `__. + page_size (int): + Optional. The page size for search result pagination. Page + size is capped at 500 even if a larger value is given. If + set to zero, server will pick an appropriate default. + Returned results may be fewer than requested. When this + happens, there could be more results as long as + ``next_page_token`` is returned. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``page_token`` must be the value of ``next_page_token`` from + the previous response. The values of all other method + parameters, must be identical to those in the previous call. + order_by (str): + Optional. A comma separated list of fields specifying the + sorting order of the results. The default order is + ascending. Add " DESC" after the field name to indicate + descending order. Redundant space characters are ignored. + Example: "location DESC, name". Only string fields in the + response are sortable, including ``name``, ``displayName``, + ``description``, ``location``. All the other fields such as + repeated fields (e.g., ``networkTags``), map fields (e.g., + ``labels``) and struct fields (e.g., + ``additionalAttributes``) are not supported. + """ + + scope = proto.Field( + proto.STRING, + number=1, + ) + query = proto.Field( + proto.STRING, + number=2, + ) + asset_types = proto.RepeatedField( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) + order_by = proto.Field( + proto.STRING, + number=6, + ) + + +class SearchAllResourcesResponse(proto.Message): + r"""Search all resources response. + Attributes: + results (Sequence[google.cloud.asset_v1.types.ResourceSearchResult]): + A list of Resources that match the search + query. It contains the resource standard + metadata information. + next_page_token (str): + If there are more results than those appearing in this + response, then ``next_page_token`` is included. To get the + next set of results, call this method again using the value + of ``next_page_token`` as ``page_token``. + """ + + @property + def raw_page(self): + return self + + results = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_assets.ResourceSearchResult, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchAllIamPoliciesRequest(proto.Message): + r"""Search all IAM policies request. + Attributes: + scope (str): + Required. A scope can be a project, a folder, or an + organization. The search is limited to the IAM policies + within the ``scope``. The caller must be granted the + ```cloudasset.assets.searchAllIamPolicies`` `__ + permission on the desired scope. + + The allowed values are: + + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + query (str): + Optional. The query statement. See `how to construct a + query `__ + for more information. If not specified or empty, it will + search all the IAM policies within the specified ``scope``. + + Examples: + + - ``policy:amy@gmail.com`` to find IAM policy bindings that + specify user "amy@gmail.com". + - ``policy:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``policy.role.permissions:storage.buckets.update`` to + find IAM policy bindings that specify a role containing + "storage.buckets.update" permission. Note that if callers + don't have ``iam.roles.get`` access to a role's included + permissions, policy bindings that specify this role will + be dropped from the search results. + - ``resource:organizations/123456`` to find IAM policy + bindings that are set on "organizations/123456". + - ``Important`` to find IAM policy bindings that contain + "Important" as a word in any of the searchable fields + (except for the included permissions). + - ``*por*`` to find IAM policy bindings that contain "por" + as a substring in any of the searchable fields (except + for the included permissions). + - ``resource:(instance1 OR instance2) policy:amy`` to find + IAM policy bindings that are set on resources "instance1" + or "instance2" and also specify user "amy". + page_size (int): + Optional. The page size for search result pagination. Page + size is capped at 500 even if a larger value is given. If + set to zero, server will pick an appropriate default. + Returned results may be fewer than requested. When this + happens, there could be more results as long as + ``next_page_token`` is returned. + page_token (str): + Optional. If present, retrieve the next batch of results + from the preceding call to this method. ``page_token`` must + be the value of ``next_page_token`` from the previous + response. The values of all other method parameters must be + identical to those in the previous call. + """ + + scope = proto.Field( + proto.STRING, + number=1, + ) + query = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class SearchAllIamPoliciesResponse(proto.Message): + r"""Search all IAM policies response. + Attributes: + results (Sequence[google.cloud.asset_v1.types.IamPolicySearchResult]): + A list of IamPolicy that match the search + query. Related information such as the + associated resource is returned along with the + policy. + next_page_token (str): + Set if there are more results than those appearing in this + response; to get the next set of results, call this method + again, using this value as the ``page_token``. + """ + + @property + def raw_page(self): + return self + + results = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gca_assets.IamPolicySearchResult, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class IamPolicyAnalysisQuery(proto.Message): + r"""IAM policy analysis query message. + Attributes: + scope (str): + Required. The relative name of the root asset. Only + resources and IAM policies within the scope will be + analyzed. + + This can only be an organization number (such as + "organizations/123"), a folder number (such as + "folders/123"), a project ID (such as + "projects/my-project-id"), or a project number (such as + "projects/12345"). + + To know how to get organization id, visit + `here `__. + + To know how to get folder or project id, visit + `here `__. + resource_selector (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.ResourceSelector): + Optional. Specifies a resource for analysis. + identity_selector (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.IdentitySelector): + Optional. Specifies an identity for analysis. + access_selector (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.AccessSelector): + Optional. Specifies roles or permissions for + analysis. This is optional. + options (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.Options): + Optional. The query options. + """ + + class ResourceSelector(proto.Message): + r"""Specifies the resource to analyze for access policies, which + may be set directly on the resource, or on ancestors such as + organizations, folders or projects. + + Attributes: + full_resource_name (str): + Required. The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of a resource of `supported resource + types `__. + """ + + full_resource_name = proto.Field( + proto.STRING, + number=1, + ) + + class IdentitySelector(proto.Message): + r"""Specifies an identity for which to determine resource access, + based on roles assigned either directly to them or to the groups + they belong to, directly or indirectly. + + Attributes: + identity (str): + Required. The identity appear in the form of members in `IAM + policy + binding `__. + + The examples of supported forms are: + "user:mike@example.com", "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com". + + Notice that wildcard characters (such as \* and ?) are not + supported. You must give a specific identity. + """ + + identity = proto.Field( + proto.STRING, + number=1, + ) + + class AccessSelector(proto.Message): + r"""Specifies roles and/or permissions to analyze, to determine + both the identities possessing them and the resources they + control. If multiple values are specified, results will include + roles or permissions matching any of them. The total number of + roles and permissions should be equal or less than 10. + + Attributes: + roles (Sequence[str]): + Optional. The roles to appear in result. + permissions (Sequence[str]): + Optional. The permissions to appear in + result. + """ + + roles = proto.RepeatedField( + proto.STRING, + number=1, + ) + permissions = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class Options(proto.Message): + r"""Contains query options. + Attributes: + expand_groups (bool): + Optional. If true, the identities section of the result will + expand any Google groups appearing in an IAM policy binding. + + If + [IamPolicyAnalysisQuery.identity_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.identity_selector] + is specified, the identity in the result will be determined + by the selector, and this flag is not allowed to set. + + Default is false. + expand_roles (bool): + Optional. If true, the access section of result will expand + any roles appearing in IAM policy bindings to include their + permissions. + + If + [IamPolicyAnalysisQuery.access_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.access_selector] + is specified, the access section of the result will be + determined by the selector, and this flag is not allowed to + set. + + Default is false. + expand_resources (bool): + Optional. If true and + [IamPolicyAnalysisQuery.resource_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.resource_selector] + is not specified, the resource section of the result will + expand any resource attached to an IAM policy to include + resources lower in the resource hierarchy. + + For example, if the request analyzes for which resources + user A has permission P, and the results include an IAM + policy with P on a GCP folder, the results will also include + resources in that folder with permission P. + + If true and + [IamPolicyAnalysisQuery.resource_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.resource_selector] + is specified, the resource section of the result will expand + the specified resource to include resources lower in the + resource hierarchy. Only project or lower resources are + supported. Folder and organization resource cannot be used + together with this option. + + For example, if the request analyzes for which users have + permission P on a GCP project with this option enabled, the + results will include all users who have permission P on that + project or any lower resource. + + Default is false. + output_resource_edges (bool): + Optional. If true, the result will output + resource edges, starting from the policy + attached resource, to any expanded resources. + Default is false. + output_group_edges (bool): + Optional. If true, the result will output + group identity edges, starting from the + binding's group members, to any expanded + identities. Default is false. + analyze_service_account_impersonation (bool): + Optional. If true, the response will include access analysis + from identities to resources via service account + impersonation. This is a very expensive operation, because + many derived queries will be executed. We highly recommend + you use + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning] + rpc instead. + + For example, if the request analyzes for which resources + user A has permission P, and there's an IAM policy states + user A has iam.serviceAccounts.getAccessToken permission to + a service account SA, and there's another IAM policy states + service account SA has permission P to a GCP folder F, then + user A potentially has access to the GCP folder F. And those + advanced analysis results will be included in + [AnalyzeIamPolicyResponse.service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis]. + + Another example, if the request analyzes for who has + permission P to a GCP folder F, and there's an IAM policy + states user A has iam.serviceAccounts.actAs permission to a + service account SA, and there's another IAM policy states + service account SA has permission P to the GCP folder F, + then user A potentially has access to the GCP folder F. And + those advanced analysis results will be included in + [AnalyzeIamPolicyResponse.service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis]. + + Default is false. + """ + + expand_groups = proto.Field( + proto.BOOL, + number=1, + ) + expand_roles = proto.Field( + proto.BOOL, + number=2, + ) + expand_resources = proto.Field( + proto.BOOL, + number=3, + ) + output_resource_edges = proto.Field( + proto.BOOL, + number=4, + ) + output_group_edges = proto.Field( + proto.BOOL, + number=5, + ) + analyze_service_account_impersonation = proto.Field( + proto.BOOL, + number=6, + ) + + scope = proto.Field( + proto.STRING, + number=1, + ) + resource_selector = proto.Field( + proto.MESSAGE, + number=2, + message=ResourceSelector, + ) + identity_selector = proto.Field( + proto.MESSAGE, + number=3, + message=IdentitySelector, + ) + access_selector = proto.Field( + proto.MESSAGE, + number=4, + message=AccessSelector, + ) + options = proto.Field( + proto.MESSAGE, + number=5, + message=Options, + ) + + +class AnalyzeIamPolicyRequest(proto.Message): + r"""A request message for + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + + Attributes: + analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): + Required. The request query. + execution_timeout (google.protobuf.duration_pb2.Duration): + Optional. Amount of time executable has to complete. See + JSON representation of + `Duration `__. + + If this field is set with a value less than the RPC + deadline, and the execution of your query hasn't finished in + the specified execution timeout, you will get a response + with partial result. Otherwise, your query's execution will + continue until the RPC deadline. If it's not finished until + then, you will get a DEADLINE_EXCEEDED error. + + Default is empty. + """ + + analysis_query = proto.Field( + proto.MESSAGE, + number=1, + message='IamPolicyAnalysisQuery', + ) + execution_timeout = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class AnalyzeIamPolicyResponse(proto.Message): + r"""A response message for + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + + Attributes: + main_analysis (google.cloud.asset_v1.types.AnalyzeIamPolicyResponse.IamPolicyAnalysis): + The main analysis that matches the original + request. + service_account_impersonation_analysis (Sequence[google.cloud.asset_v1.types.AnalyzeIamPolicyResponse.IamPolicyAnalysis]): + The service account impersonation analysis if + [AnalyzeIamPolicyRequest.analyze_service_account_impersonation][] + is enabled. + fully_explored (bool): + Represents whether all entries in the + [main_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.main_analysis] + and + [service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis] + have been fully explored to answer the query in the request. + """ + + class IamPolicyAnalysis(proto.Message): + r"""An analysis message to group the query and results. + Attributes: + analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): + The analysis query. + analysis_results (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult]): + A list of + [IamPolicyAnalysisResult][google.cloud.asset.v1.IamPolicyAnalysisResult] + that matches the analysis query, or empty if no result is + found. + fully_explored (bool): + Represents whether all entries in the + [analysis_results][google.cloud.asset.v1.AnalyzeIamPolicyResponse.IamPolicyAnalysis.analysis_results] + have been fully explored to answer the query. + non_critical_errors (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisState]): + A list of non-critical errors happened during + the query handling. + """ + + analysis_query = proto.Field( + proto.MESSAGE, + number=1, + message='IamPolicyAnalysisQuery', + ) + analysis_results = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gca_assets.IamPolicyAnalysisResult, + ) + fully_explored = proto.Field( + proto.BOOL, + number=3, + ) + non_critical_errors = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=gca_assets.IamPolicyAnalysisState, + ) + + main_analysis = proto.Field( + proto.MESSAGE, + number=1, + message=IamPolicyAnalysis, + ) + service_account_impersonation_analysis = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=IamPolicyAnalysis, + ) + fully_explored = proto.Field( + proto.BOOL, + number=3, + ) + + +class IamPolicyAnalysisOutputConfig(proto.Message): + r"""Output configuration for export IAM policy analysis + destination. + + Attributes: + gcs_destination (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.GcsDestination): + Destination on Cloud Storage. + bigquery_destination (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.BigQueryDestination): + Destination on BigQuery. + """ + + class GcsDestination(proto.Message): + r"""A Cloud Storage location. + Attributes: + uri (str): + Required. The uri of the Cloud Storage object. It's the same + uri that is used by gsutil. For example: + "gs://bucket_name/object_name". See [Quickstart: Using the + gsutil tool] + (https://cloud.google.com/storage/docs/quickstart-gsutil) + for examples. + """ + + uri = proto.Field( + proto.STRING, + number=1, + ) + + class BigQueryDestination(proto.Message): + r"""A BigQuery destination. + Attributes: + dataset (str): + Required. The BigQuery dataset in format + "projects/projectId/datasets/datasetId", to which the + analysis results should be exported. If this dataset does + not exist, the export call will return an INVALID_ARGUMENT + error. + table_prefix (str): + Required. The prefix of the BigQuery tables to which the + analysis results will be written. Tables will be created + based on this table_prefix if not exist: + + - _analysis table will contain export + operation's metadata. + - _analysis_result will contain all the + [IamPolicyAnalysisResult][google.cloud.asset.v1.IamPolicyAnalysisResult]. + When [partition_key] is specified, both tables will be + partitioned based on the [partition_key]. + partition_key (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey): + The partition key for BigQuery partitioned + table. + write_disposition (str): + Optional. Specifies the action that occurs if the + destination table or partition already exists. The following + values are supported: + + - WRITE_TRUNCATE: If the table or partition already exists, + BigQuery overwrites the entire table or all the + partitions data. + - WRITE_APPEND: If the table or partition already exists, + BigQuery appends the data to the table or the latest + partition. + - WRITE_EMPTY: If the table already exists and contains + data, an error is returned. + + The default value is WRITE_APPEND. Each action is atomic and + only occurs if BigQuery is able to complete the job + successfully. Details are at + https://cloud.google.com/bigquery/docs/loading-data-local#appending_to_or_overwriting_a_table_using_a_local_file. + """ + class PartitionKey(proto.Enum): + r"""This enum determines the partition key column for the + bigquery tables. Partitioning can improve query performance and + reduce query cost by filtering partitions. Refer to + https://cloud.google.com/bigquery/docs/partitioned-tables for + details. + """ + PARTITION_KEY_UNSPECIFIED = 0 + REQUEST_TIME = 1 + + dataset = proto.Field( + proto.STRING, + number=1, + ) + table_prefix = proto.Field( + proto.STRING, + number=2, + ) + partition_key = proto.Field( + proto.ENUM, + number=3, + enum='IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey', + ) + write_disposition = proto.Field( + proto.STRING, + number=4, + ) + + gcs_destination = proto.Field( + proto.MESSAGE, + number=1, + oneof='destination', + message=GcsDestination, + ) + bigquery_destination = proto.Field( + proto.MESSAGE, + number=2, + oneof='destination', + message=BigQueryDestination, + ) + + +class AnalyzeIamPolicyLongrunningRequest(proto.Message): + r"""A request message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + + Attributes: + analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): + Required. The request query. + output_config (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig): + Required. Output configuration indicating + where the results will be output to. + """ + + analysis_query = proto.Field( + proto.MESSAGE, + number=1, + message='IamPolicyAnalysisQuery', + ) + output_config = proto.Field( + proto.MESSAGE, + number=2, + message='IamPolicyAnalysisOutputConfig', + ) + + +class AnalyzeIamPolicyLongrunningResponse(proto.Message): + r"""A response message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + """ + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py new file mode 100644 index 000000000000..6c4611e717b7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -0,0 +1,867 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.orgpolicy.v1 import orgpolicy_pb2 # type: ignore +from google.cloud.osconfig.v1 import inventory_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.identity.accesscontextmanager.v1 import access_level_pb2 # type: ignore +from google.identity.accesscontextmanager.v1 import access_policy_pb2 # type: ignore +from google.identity.accesscontextmanager.v1 import service_perimeter_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.asset.v1', + manifest={ + 'TemporalAsset', + 'TimeWindow', + 'Asset', + 'Resource', + 'ResourceSearchResult', + 'IamPolicySearchResult', + 'IamPolicyAnalysisState', + 'IamPolicyAnalysisResult', + }, +) + + +class TemporalAsset(proto.Message): + r"""An asset in Google Cloud and its temporal metadata, including + the time window when it was observed and its status during that + window. + + Attributes: + window (google.cloud.asset_v1.types.TimeWindow): + The time window when the asset data and state + was observed. + deleted (bool): + Whether the asset has been deleted or not. + asset (google.cloud.asset_v1.types.Asset): + An asset in Google Cloud. + prior_asset_state (google.cloud.asset_v1.types.TemporalAsset.PriorAssetState): + State of prior_asset. + prior_asset (google.cloud.asset_v1.types.Asset): + Prior copy of the asset. Populated if prior_asset_state is + PRESENT. Currently this is only set for responses in + Real-Time Feed. + """ + class PriorAssetState(proto.Enum): + r"""State of prior asset.""" + PRIOR_ASSET_STATE_UNSPECIFIED = 0 + PRESENT = 1 + INVALID = 2 + DOES_NOT_EXIST = 3 + DELETED = 4 + + window = proto.Field( + proto.MESSAGE, + number=1, + message='TimeWindow', + ) + deleted = proto.Field( + proto.BOOL, + number=2, + ) + asset = proto.Field( + proto.MESSAGE, + number=3, + message='Asset', + ) + prior_asset_state = proto.Field( + proto.ENUM, + number=4, + enum=PriorAssetState, + ) + prior_asset = proto.Field( + proto.MESSAGE, + number=5, + message='Asset', + ) + + +class TimeWindow(proto.Message): + r"""A time window specified by its ``start_time`` and ``end_time``. + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Start time of the time window (exclusive). + end_time (google.protobuf.timestamp_pb2.Timestamp): + End time of the time window (inclusive). If + not specified, the current timestamp is used + instead. + """ + + start_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class Asset(proto.Message): + r"""An asset in Google Cloud. An asset can be any resource in the Google + Cloud `resource + hierarchy `__, + a resource outside the Google Cloud resource hierarchy (such as + Google Kubernetes Engine clusters and objects), or a policy (e.g. + Cloud IAM policy). See `Supported asset + types `__ + for more information. + + Attributes: + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update timestamp of an asset. update_time is + updated when create/update/delete operation is performed. + name (str): + The full name of the asset. Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`` + + See `Resource + names `__ + for more information. + asset_type (str): + The type of the asset. Example: + ``compute.googleapis.com/Disk`` + + See `Supported asset + types `__ + for more information. + resource (google.cloud.asset_v1.types.Resource): + A representation of the resource. + iam_policy (google.iam.v1.policy_pb2.Policy): + A representation of the Cloud IAM policy set on a Google + Cloud resource. There can be a maximum of one Cloud IAM + policy set on any given resource. In addition, Cloud IAM + policies inherit their granted access scope from any + policies set on parent resources in the resource hierarchy. + Therefore, the effectively policy is the union of both the + policy set on this resource and each policy set on all of + the resource's ancestry resource levels in the hierarchy. + See `this + topic `__ + for more information. + org_policy (Sequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): + A representation of an `organization + policy `__. + There can be more than one organization policy with + different constraints set on a given resource. + access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): + Please also refer to the `access policy user + guide `__. + access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): + Please also refer to the `access level user + guide `__. + service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): + Please also refer to the `service perimeter user + guide `__. + os_inventory (google.cloud.osconfig.v1.inventory_pb2.Inventory): + A representation of runtime OS Inventory information. See + `this + topic `__ + for more information. + ancestors (Sequence[str]): + The ancestry path of an asset in Google Cloud `resource + hierarchy `__, + represented as a list of relative resource names. An + ancestry path starts with the closest ancestor in the + hierarchy and ends at root. If the asset is a project, + folder, or organization, the ancestry path starts from the + asset itself. + + Example: + ``["projects/123456789", "folders/5432", "organizations/1234"]`` + """ + + update_time = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + name = proto.Field( + proto.STRING, + number=1, + ) + asset_type = proto.Field( + proto.STRING, + number=2, + ) + resource = proto.Field( + proto.MESSAGE, + number=3, + message='Resource', + ) + iam_policy = proto.Field( + proto.MESSAGE, + number=4, + message=policy_pb2.Policy, + ) + org_policy = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=orgpolicy_pb2.Policy, + ) + access_policy = proto.Field( + proto.MESSAGE, + number=7, + oneof='access_context_policy', + message=access_policy_pb2.AccessPolicy, + ) + access_level = proto.Field( + proto.MESSAGE, + number=8, + oneof='access_context_policy', + message=access_level_pb2.AccessLevel, + ) + service_perimeter = proto.Field( + proto.MESSAGE, + number=9, + oneof='access_context_policy', + message=service_perimeter_pb2.ServicePerimeter, + ) + os_inventory = proto.Field( + proto.MESSAGE, + number=12, + message=inventory_pb2.Inventory, + ) + ancestors = proto.RepeatedField( + proto.STRING, + number=10, + ) + + +class Resource(proto.Message): + r"""A representation of a Google Cloud resource. + Attributes: + version (str): + The API version. Example: ``v1`` + discovery_document_uri (str): + The URL of the discovery document containing the resource's + JSON schema. Example: + ``https://www.googleapis.com/discovery/v1/apis/compute/v1/rest`` + + This value is unspecified for resources that do not have an + API based on a discovery document, such as Cloud Bigtable. + discovery_name (str): + The JSON schema name listed in the discovery document. + Example: ``Project`` + + This value is unspecified for resources that do not have an + API based on a discovery document, such as Cloud Bigtable. + resource_url (str): + The REST URL for accessing the resource. An HTTP ``GET`` + request using this URL returns the resource itself. Example: + ``https://cloudresourcemanager.googleapis.com/v1/projects/my-project-123`` + + This value is unspecified for resources without a REST API. + parent (str): + The full name of the immediate parent of this resource. See + `Resource + Names `__ + for more information. + + For Google Cloud assets, this value is the parent resource + defined in the `Cloud IAM policy + hierarchy `__. + Example: + ``//cloudresourcemanager.googleapis.com/projects/my_project_123`` + + For third-party assets, this field may be set differently. + data (google.protobuf.struct_pb2.Struct): + The content of the resource, in which some + sensitive fields are removed and may not be + present. + location (str): + The location of the resource in Google Cloud, + such as its zone and region. For more + information, see + https://cloud.google.com/about/locations/. + """ + + version = proto.Field( + proto.STRING, + number=1, + ) + discovery_document_uri = proto.Field( + proto.STRING, + number=2, + ) + discovery_name = proto.Field( + proto.STRING, + number=3, + ) + resource_url = proto.Field( + proto.STRING, + number=4, + ) + parent = proto.Field( + proto.STRING, + number=5, + ) + data = proto.Field( + proto.MESSAGE, + number=6, + message=struct_pb2.Struct, + ) + location = proto.Field( + proto.STRING, + number=8, + ) + + +class ResourceSearchResult(proto.Message): + r"""A result of Resource Search, containing information of a + cloud resource. + + Attributes: + name (str): + The full resource name of this resource. Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. + See `Cloud Asset Inventory Resource Name + Format `__ + for more information. + + To search against the ``name``: + + - use a field query. Example: ``name:instance1`` + - use a free text query. Example: ``instance1`` + asset_type (str): + The type of this resource. Example: + ``compute.googleapis.com/Disk``. + + To search against the ``asset_type``: + + - specify the ``asset_type`` field in your search request. + project (str): + The project that this resource belongs to, in the form of + projects/{PROJECT_NUMBER}. + + To search against the ``project``: + + - specify the ``scope`` field as this project in your + search request. + display_name (str): + The display name of this resource. + + To search against the ``display_name``: + + - use a field query. Example: ``displayName:"My Instance"`` + - use a free text query. Example: ``"My Instance"`` + description (str): + One or more paragraphs of text description of this resource. + Maximum length could be up to 1M bytes. + + To search against the ``description``: + + - use a field query. Example: + ``description:"*important instance*"`` + - use a free text query. Example: + ``"*important instance*"`` + location (str): + Location can be ``global``, regional like ``us-east1``, or + zonal like ``us-west1-b``. + + To search against the ``location``: + + - use a field query. Example: ``location:us-west*`` + - use a free text query. Example: ``us-west*`` + labels (Sequence[google.cloud.asset_v1.types.ResourceSearchResult.LabelsEntry]): + Labels associated with this resource. See `Labelling and + grouping GCP + resources `__ + for more information. + + To search against the ``labels``: + + - use a field query: + + - query on any label's key or value. Example: + ``labels:prod`` + - query by a given label. Example: ``labels.env:prod`` + - query by a given label's existence. Example: + ``labels.env:*`` + + - use a free text query. Example: ``prod`` + network_tags (Sequence[str]): + Network tags associated with this resource. Like labels, + network tags are a type of annotations used to group GCP + resources. See `Labelling GCP + resources `__ + for more information. + + To search against the ``network_tags``: + + - use a field query. Example: ``networkTags:internal`` + - use a free text query. Example: ``internal`` + additional_attributes (google.protobuf.struct_pb2.Struct): + The additional searchable attributes of this resource. The + attributes may vary from one resource type to another. + Examples: ``projectId`` for Project, ``dnsName`` for DNS + ManagedZone. This field contains a subset of the resource + metadata fields that are returned by the List or Get APIs + provided by the corresponding GCP service (e.g., Compute + Engine). see `API references and supported searchable + attributes `__ + for more information. + + You can search values of these fields through free text + search. However, you should not consume the field + programically as the field names and values may change as + the GCP service updates to a new incompatible API version. + + To search against the ``additional_attributes``: + + - use a free text query to match the attributes values. + Example: to search + ``additional_attributes = { dnsName: "foobar" }``, you + can issue a query ``foobar``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + asset_type = proto.Field( + proto.STRING, + number=2, + ) + project = proto.Field( + proto.STRING, + number=3, + ) + display_name = proto.Field( + proto.STRING, + number=4, + ) + description = proto.Field( + proto.STRING, + number=5, + ) + location = proto.Field( + proto.STRING, + number=6, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + network_tags = proto.RepeatedField( + proto.STRING, + number=8, + ) + additional_attributes = proto.Field( + proto.MESSAGE, + number=9, + message=struct_pb2.Struct, + ) + + +class IamPolicySearchResult(proto.Message): + r"""A result of IAM Policy search, containing information of an + IAM policy. + + Attributes: + resource (str): + The full resource name of the resource associated with this + IAM policy. Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. + See `Cloud Asset Inventory Resource Name + Format `__ + for more information. + + To search against the ``resource``: + + - use a field query. Example: + ``resource:organizations/123`` + project (str): + The project that the associated GCP resource belongs to, in + the form of projects/{PROJECT_NUMBER}. If an IAM policy is + set on a resource (like VM instance, Cloud Storage bucket), + the project field will indicate the project that contains + the resource. If an IAM policy is set on a folder or + orgnization, the project field will be empty. + + To search against the ``project``: + + - specify the ``scope`` field as this project in your + search request. + policy (google.iam.v1.policy_pb2.Policy): + The IAM policy directly set on the given resource. Note that + the original IAM policy can contain multiple bindings. This + only contains the bindings that match the given query. For + queries that don't contain a constrain on policies (e.g., an + empty query), this contains all the bindings. + + To search against the ``policy`` bindings: + + - use a field query: + + - query by the policy contained members. Example: + ``policy:amy@gmail.com`` + - query by the policy contained roles. Example: + ``policy:roles/compute.admin`` + - query by the policy contained roles' included + permissions. Example: + ``policy.role.permissions:compute.instances.create`` + explanation (google.cloud.asset_v1.types.IamPolicySearchResult.Explanation): + Explanation about the IAM policy search + result. It contains additional information to + explain why the search result matches the query. + """ + + class Explanation(proto.Message): + r"""Explanation about the IAM policy search result. + Attributes: + matched_permissions (Sequence[google.cloud.asset_v1.types.IamPolicySearchResult.Explanation.MatchedPermissionsEntry]): + The map from roles to their included permissions that match + the permission query (i.e., a query containing + ``policy.role.permissions:``). Example: if query + ``policy.role.permissions:compute.disk.get`` matches a + policy binding that contains owner role, the + matched_permissions will be + ``{"roles/owner": ["compute.disk.get"]}``. The roles can + also be found in the returned ``policy`` bindings. Note that + the map is populated only for requests with permission + queries. + """ + + class Permissions(proto.Message): + r"""IAM permissions + Attributes: + permissions (Sequence[str]): + A list of permissions. A sample permission string: + ``compute.disk.get``. + """ + + permissions = proto.RepeatedField( + proto.STRING, + number=1, + ) + + matched_permissions = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message='IamPolicySearchResult.Explanation.Permissions', + ) + + resource = proto.Field( + proto.STRING, + number=1, + ) + project = proto.Field( + proto.STRING, + number=2, + ) + policy = proto.Field( + proto.MESSAGE, + number=3, + message=policy_pb2.Policy, + ) + explanation = proto.Field( + proto.MESSAGE, + number=4, + message=Explanation, + ) + + +class IamPolicyAnalysisState(proto.Message): + r"""Represents the detailed state of an entity under analysis, + such as a resource, an identity or an access. + + Attributes: + code (google.rpc.code_pb2.Code): + The Google standard error code that best describes the + state. For example: + + - OK means the analysis on this entity has been + successfully finished; + - PERMISSION_DENIED means an access denied error is + encountered; + - DEADLINE_EXCEEDED means the analysis on this entity + hasn't been started in time; + cause (str): + The human-readable description of the cause + of failure. + """ + + code = proto.Field( + proto.ENUM, + number=1, + enum=code_pb2.Code, + ) + cause = proto.Field( + proto.STRING, + number=2, + ) + + +class IamPolicyAnalysisResult(proto.Message): + r"""IAM Policy analysis result, consisting of one IAM policy + binding and derived access control lists. + + Attributes: + attached_resource_full_name (str): + The `full resource + name `__ + of the resource to which the + [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] + policy attaches. + iam_binding (google.iam.v1.policy_pb2.Binding): + The Cloud IAM policy binding under analysis. + access_control_lists (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.AccessControlList]): + The access control lists derived from the + [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] + that match or potentially match resource and access + selectors specified in the request. + identity_list (google.cloud.asset_v1.types.IamPolicyAnalysisResult.IdentityList): + The identity list derived from members of the + [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] + that match or potentially match identity selector specified + in the request. + fully_explored (bool): + Represents whether all analyses on the + [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] + have successfully finished. + """ + + class Resource(proto.Message): + r"""A Google Cloud resource under analysis. + Attributes: + full_resource_name (str): + The `full resource + name `__ + analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): + The analysis state of this resource. + """ + + full_resource_name = proto.Field( + proto.STRING, + number=1, + ) + analysis_state = proto.Field( + proto.MESSAGE, + number=2, + message='IamPolicyAnalysisState', + ) + + class Access(proto.Message): + r"""An IAM role or permission under analysis. + Attributes: + role (str): + The role. + permission (str): + The permission. + analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): + The analysis state of this access. + """ + + role = proto.Field( + proto.STRING, + number=1, + oneof='oneof_access', + ) + permission = proto.Field( + proto.STRING, + number=2, + oneof='oneof_access', + ) + analysis_state = proto.Field( + proto.MESSAGE, + number=3, + message='IamPolicyAnalysisState', + ) + + class Identity(proto.Message): + r"""An identity under analysis. + Attributes: + name (str): + The identity name in any form of members appear in `IAM + policy + binding `__, + such as: + + - user:foo@google.com + - group:group1@google.com + - serviceAccount:s1@prj1.iam.gserviceaccount.com + - projectOwner:some_project_id + - domain:google.com + - allUsers + - etc. + analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): + The analysis state of this identity. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + analysis_state = proto.Field( + proto.MESSAGE, + number=2, + message='IamPolicyAnalysisState', + ) + + class Edge(proto.Message): + r"""A directional edge. + Attributes: + source_node (str): + The source node of the edge. For example, it + could be a full resource name for a resource + node or an email of an identity. + target_node (str): + The target node of the edge. For example, it + could be a full resource name for a resource + node or an email of an identity. + """ + + source_node = proto.Field( + proto.STRING, + number=1, + ) + target_node = proto.Field( + proto.STRING, + number=2, + ) + + class AccessControlList(proto.Message): + r"""An access control list, derived from the above IAM policy binding, + which contains a set of resources and accesses. May include one item + from each set to compose an access control entry. + + NOTICE that there could be multiple access control lists for one IAM + policy binding. The access control lists are created based on + resource and access combinations. + + For example, assume we have the following cases in one IAM policy + binding: + + - Permission P1 and P2 apply to resource R1 and R2; + - Permission P3 applies to resource R2 and R3; + + This will result in the following access control lists: + + - AccessControlList 1: [R1, R2], [P1, P2] + - AccessControlList 2: [R2, R3], [P3] + + Attributes: + resources (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Resource]): + The resources that match one of the following conditions: + + - The resource_selector, if it is specified in request; + - Otherwise, resources reachable from the policy attached + resource. + accesses (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Access]): + The accesses that match one of the following conditions: + + - The access_selector, if it is specified in request; + - Otherwise, access specifiers reachable from the policy + binding's role. + resource_edges (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): + Resource edges of the graph starting from the policy + attached resource to any descendant resources. The + [Edge.source_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.source_node] + contains the full resource name of a parent resource and + [Edge.target_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.target_node] + contains the full resource name of a child resource. This + field is present only if the output_resource_edges option is + enabled in request. + """ + + resources = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='IamPolicyAnalysisResult.Resource', + ) + accesses = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='IamPolicyAnalysisResult.Access', + ) + resource_edges = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='IamPolicyAnalysisResult.Edge', + ) + + class IdentityList(proto.Message): + r"""The identities and group edges. + Attributes: + identities (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Identity]): + Only the identities that match one of the following + conditions will be presented: + + - The identity_selector, if it is specified in request; + - Otherwise, identities reachable from the policy binding's + members. + group_edges (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): + Group identity edges of the graph starting from the + binding's group members to any node of the + [identities][google.cloud.asset.v1.IamPolicyAnalysisResult.IdentityList.identities]. + The + [Edge.source_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.source_node] + contains a group, such as ``group:parent@google.com``. The + [Edge.target_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.target_node] + contains a member of the group, such as + ``group:child@google.com`` or ``user:foo@google.com``. This + field is present only if the output_group_edges option is + enabled in request. + """ + + identities = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='IamPolicyAnalysisResult.Identity', + ) + group_edges = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='IamPolicyAnalysisResult.Edge', + ) + + attached_resource_full_name = proto.Field( + proto.STRING, + number=1, + ) + iam_binding = proto.Field( + proto.MESSAGE, + number=2, + message=policy_pb2.Binding, + ) + access_control_lists = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=AccessControlList, + ) + identity_list = proto.Field( + proto.MESSAGE, + number=4, + message=IdentityList, + ) + fully_explored = proto.Field( + proto.BOOL, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini b/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py new file mode 100644 index 000000000000..e4734091267c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/asset_v1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py new file mode 100644 index 000000000000..b0cdcf3f4a56 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -0,0 +1,186 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class assetCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'analyze_iam_policy': ('analysis_query', 'execution_timeout', ), + 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', ), + 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', ), + 'create_feed': ('parent', 'feed_id', 'feed', ), + 'delete_feed': ('name', ), + 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', ), + 'get_feed': ('name', ), + 'list_feeds': ('parent', ), + 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', ), + 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', ), + 'update_feed': ('feed', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=assetCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the asset client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py new file mode 100644 index 000000000000..1aece31bd3f4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-asset', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py new file mode 100644 index 000000000000..400ed9a8ef86 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -0,0 +1,3612 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.asset_v1.services.asset_service import AssetServiceAsyncClient +from google.cloud.asset_v1.services.asset_service import AssetServiceClient +from google.cloud.asset_v1.services.asset_service import pagers +from google.cloud.asset_v1.services.asset_service import transports +from google.cloud.asset_v1.services.asset_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.asset_v1.types import asset_service +from google.cloud.asset_v1.types import assets +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import expr_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert AssetServiceClient._get_default_mtls_endpoint(None) is None + assert AssetServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert AssetServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert AssetServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + AssetServiceClient, + AssetServiceAsyncClient, +]) +def test_asset_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'cloudasset.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + AssetServiceClient, + AssetServiceAsyncClient, +]) +def test_asset_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'cloudasset.googleapis.com:443' + + +def test_asset_service_client_get_transport_class(): + transport = AssetServiceClient.get_transport_class() + available_transports = [ + transports.AssetServiceGrpcTransport, + ] + assert transport in available_transports + + transport = AssetServiceClient.get_transport_class("grpc") + assert transport == transports.AssetServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +def test_asset_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "true"), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_asset_service_client_client_options_from_dict(): + with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = AssetServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_export_assets(transport: str = 'grpc', request_type=asset_service.ExportAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.export_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ExportAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_assets_from_dict(): + test_export_assets(request_type=dict) + + +def test_export_assets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + client.export_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ExportAssetsRequest() + + +@pytest.mark.asyncio +async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.export_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ExportAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_assets_async_from_dict(): + await test_export_assets_async(request_type=dict) + + +def test_export_assets_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ExportAssetsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.export_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_export_assets_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ExportAssetsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.export_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_batch_get_assets_history(transport: str = 'grpc', request_type=asset_service.BatchGetAssetsHistoryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.BatchGetAssetsHistoryResponse( + ) + response = client.batch_get_assets_history(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + + +def test_batch_get_assets_history_from_dict(): + test_batch_get_assets_history(request_type=dict) + + +def test_batch_get_assets_history_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + client.batch_get_assets_history() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + + +@pytest.mark.asyncio +async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( + )) + response = await client.batch_get_assets_history(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + + +@pytest.mark.asyncio +async def test_batch_get_assets_history_async_from_dict(): + await test_batch_get_assets_history_async(request_type=dict) + + +def test_batch_get_assets_history_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.BatchGetAssetsHistoryRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + call.return_value = asset_service.BatchGetAssetsHistoryResponse() + client.batch_get_assets_history(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_get_assets_history_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.BatchGetAssetsHistoryRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse()) + await client.batch_get_assets_history(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_feed(transport: str = 'grpc', request_type=asset_service.CreateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + ) + response = client.create_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateFeedRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +def test_create_feed_from_dict(): + test_create_feed(request_type=dict) + + +def test_create_feed_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + client.create_feed() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateFeedRequest() + + +@pytest.mark.asyncio +async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + )) + response = await client.create_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateFeedRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +@pytest.mark.asyncio +async def test_create_feed_async_from_dict(): + await test_create_feed_async(request_type=dict) + + +def test_create_feed_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.CreateFeedRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.create_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_feed_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.CreateFeedRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) + await client.create_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_feed_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_feed( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_create_feed_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_feed( + asset_service.CreateFeedRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_create_feed_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_feed( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_create_feed_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_feed( + asset_service.CreateFeedRequest(), + parent='parent_value', + ) + + +def test_get_feed(transport: str = 'grpc', request_type=asset_service.GetFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + ) + response = client.get_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetFeedRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +def test_get_feed_from_dict(): + test_get_feed(request_type=dict) + + +def test_get_feed_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + client.get_feed() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetFeedRequest() + + +@pytest.mark.asyncio +async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + )) + response = await client.get_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetFeedRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +@pytest.mark.asyncio +async def test_get_feed_async_from_dict(): + await test_get_feed_async(request_type=dict) + + +def test_get_feed_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.GetFeedRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.get_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_feed_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.GetFeedRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) + await client.get_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_feed_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_feed( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_feed_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_feed( + asset_service.GetFeedRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_feed_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_feed( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_feed_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_feed( + asset_service.GetFeedRequest(), + name='name_value', + ) + + +def test_list_feeds(transport: str = 'grpc', request_type=asset_service.ListFeedsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListFeedsResponse( + ) + response = client.list_feeds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListFeedsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.ListFeedsResponse) + + +def test_list_feeds_from_dict(): + test_list_feeds(request_type=dict) + + +def test_list_feeds_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + client.list_feeds() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListFeedsRequest() + + +@pytest.mark.asyncio +async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( + )) + response = await client.list_feeds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListFeedsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.ListFeedsResponse) + + +@pytest.mark.asyncio +async def test_list_feeds_async_from_dict(): + await test_list_feeds_async(request_type=dict) + + +def test_list_feeds_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ListFeedsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + call.return_value = asset_service.ListFeedsResponse() + client.list_feeds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_feeds_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ListFeedsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) + await client.list_feeds(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_feeds_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListFeedsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_feeds( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_feeds_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_feeds( + asset_service.ListFeedsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_feeds_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListFeedsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_feeds( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_feeds_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_feeds( + asset_service.ListFeedsRequest(), + parent='parent_value', + ) + + +def test_update_feed(transport: str = 'grpc', request_type=asset_service.UpdateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + ) + response = client.update_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateFeedRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +def test_update_feed_from_dict(): + test_update_feed(request_type=dict) + + +def test_update_feed_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + client.update_feed() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateFeedRequest() + + +@pytest.mark.asyncio +async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + )) + response = await client.update_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateFeedRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +@pytest.mark.asyncio +async def test_update_feed_async_from_dict(): + await test_update_feed_async(request_type=dict) + + +def test_update_feed_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.UpdateFeedRequest() + + request.feed.name = 'feed.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.update_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'feed.name=feed.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_feed_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.UpdateFeedRequest() + + request.feed.name = 'feed.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) + await client.update_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'feed.name=feed.name/value', + ) in kw['metadata'] + + +def test_update_feed_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_feed( + feed=asset_service.Feed(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].feed == asset_service.Feed(name='name_value') + + +def test_update_feed_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_feed( + asset_service.UpdateFeedRequest(), + feed=asset_service.Feed(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_feed_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.Feed() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_feed( + feed=asset_service.Feed(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].feed == asset_service.Feed(name='name_value') + + +@pytest.mark.asyncio +async def test_update_feed_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_feed( + asset_service.UpdateFeedRequest(), + feed=asset_service.Feed(name='name_value'), + ) + + +def test_delete_feed(transport: str = 'grpc', request_type=asset_service.DeleteFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteFeedRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_feed_from_dict(): + test_delete_feed(request_type=dict) + + +def test_delete_feed_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + client.delete_feed() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteFeedRequest() + + +@pytest.mark.asyncio +async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteFeedRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_feed_async_from_dict(): + await test_delete_feed_async(request_type=dict) + + +def test_delete_feed_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.DeleteFeedRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + call.return_value = None + client.delete_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_feed_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.DeleteFeedRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_feed_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_feed( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_feed_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_feed( + asset_service.DeleteFeedRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_feed_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_feed( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_feed_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_feed( + asset_service.DeleteFeedRequest(), + name='name_value', + ) + + +def test_search_all_resources(transport: str = 'grpc', request_type=asset_service.SearchAllResourcesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + ) + response = client.search_all_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllResourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllResourcesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_all_resources_from_dict(): + test_search_all_resources(request_type=dict) + + +def test_search_all_resources_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + client.search_all_resources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllResourcesRequest() + + +@pytest.mark.asyncio +async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + )) + response = await client.search_all_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllResourcesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllResourcesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_search_all_resources_async_from_dict(): + await test_search_all_resources_async(request_type=dict) + + +def test_search_all_resources_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.SearchAllResourcesRequest() + + request.scope = 'scope/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + call.return_value = asset_service.SearchAllResourcesResponse() + client.search_all_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_search_all_resources_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.SearchAllResourcesRequest() + + request.scope = 'scope/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) + await client.search_all_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope/value', + ) in kw['metadata'] + + +def test_search_all_resources_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SearchAllResourcesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_all_resources( + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].scope == 'scope_value' + assert args[0].query == 'query_value' + assert args[0].asset_types == ['asset_types_value'] + + +def test_search_all_resources_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_resources( + asset_service.SearchAllResourcesRequest(), + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + + +@pytest.mark.asyncio +async def test_search_all_resources_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SearchAllResourcesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_all_resources( + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].scope == 'scope_value' + assert args[0].query == 'query_value' + assert args[0].asset_types == ['asset_types_value'] + + +@pytest.mark.asyncio +async def test_search_all_resources_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_all_resources( + asset_service.SearchAllResourcesRequest(), + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + + +def test_search_all_resources_pager(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllResourcesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('scope', ''), + )), + ) + pager = client.search_all_resources(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, assets.ResourceSearchResult) + for i in results) + +def test_search_all_resources_pages(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllResourcesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + ), + RuntimeError, + ) + pages = list(client.search_all_resources(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_search_all_resources_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllResourcesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_all_resources(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, assets.ResourceSearchResult) + for i in responses) + +@pytest.mark.asyncio +async def test_search_all_resources_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllResourcesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.search_all_resources(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_search_all_iam_policies(transport: str = 'grpc', request_type=asset_service.SearchAllIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + ) + response = client.search_all_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllIamPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllIamPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_all_iam_policies_from_dict(): + test_search_all_iam_policies(request_type=dict) + + +def test_search_all_iam_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + client.search_all_iam_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllIamPoliciesRequest() + + +@pytest.mark.asyncio +async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + )) + response = await client.search_all_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllIamPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllIamPoliciesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_search_all_iam_policies_async_from_dict(): + await test_search_all_iam_policies_async(request_type=dict) + + +def test_search_all_iam_policies_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.SearchAllIamPoliciesRequest() + + request.scope = 'scope/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + call.return_value = asset_service.SearchAllIamPoliciesResponse() + client.search_all_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_search_all_iam_policies_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.SearchAllIamPoliciesRequest() + + request.scope = 'scope/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) + await client.search_all_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope/value', + ) in kw['metadata'] + + +def test_search_all_iam_policies_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SearchAllIamPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_all_iam_policies( + scope='scope_value', + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].scope == 'scope_value' + assert args[0].query == 'query_value' + + +def test_search_all_iam_policies_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_iam_policies( + asset_service.SearchAllIamPoliciesRequest(), + scope='scope_value', + query='query_value', + ) + + +@pytest.mark.asyncio +async def test_search_all_iam_policies_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SearchAllIamPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_all_iam_policies( + scope='scope_value', + query='query_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].scope == 'scope_value' + assert args[0].query == 'query_value' + + +@pytest.mark.asyncio +async def test_search_all_iam_policies_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_all_iam_policies( + asset_service.SearchAllIamPoliciesRequest(), + scope='scope_value', + query='query_value', + ) + + +def test_search_all_iam_policies_pager(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('scope', ''), + )), + ) + pager = client.search_all_iam_policies(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, assets.IamPolicySearchResult) + for i in results) + +def test_search_all_iam_policies_pages(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + ), + RuntimeError, + ) + pages = list(client.search_all_iam_policies(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_search_all_iam_policies_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_all_iam_policies(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, assets.IamPolicySearchResult) + for i in responses) + +@pytest.mark.asyncio +async def test_search_all_iam_policies_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.search_all_iam_policies(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_analyze_iam_policy(transport: str = 'grpc', request_type=asset_service.AnalyzeIamPolicyRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + response = client.analyze_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) + assert response.fully_explored is True + + +def test_analyze_iam_policy_from_dict(): + test_analyze_iam_policy(request_type=dict) + + +def test_analyze_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + client.analyze_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyRequest() + + +@pytest.mark.asyncio +async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + )) + response = await client.analyze_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) + assert response.fully_explored is True + + +@pytest.mark.asyncio +async def test_analyze_iam_policy_async_from_dict(): + await test_analyze_iam_policy_async(request_type=dict) + + +def test_analyze_iam_policy_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeIamPolicyRequest() + + request.analysis_query.scope = 'analysis_query.scope/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + call.return_value = asset_service.AnalyzeIamPolicyResponse() + client.analyze_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'analysis_query.scope=analysis_query.scope/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_iam_policy_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeIamPolicyRequest() + + request.analysis_query.scope = 'analysis_query.scope/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse()) + await client.analyze_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'analysis_query.scope=analysis_query.scope/value', + ) in kw['metadata'] + + +def test_analyze_iam_policy_longrunning(transport: str = 'grpc', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.analyze_iam_policy_longrunning(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_analyze_iam_policy_longrunning_from_dict(): + test_analyze_iam_policy_longrunning(request_type=dict) + + +def test_analyze_iam_policy_longrunning_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + client.analyze_iam_policy_longrunning() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + + +@pytest.mark.asyncio +async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.analyze_iam_policy_longrunning(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_analyze_iam_policy_longrunning_async_from_dict(): + await test_analyze_iam_policy_longrunning_async(request_type=dict) + + +def test_analyze_iam_policy_longrunning_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeIamPolicyLongrunningRequest() + + request.analysis_query.scope = 'analysis_query.scope/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.analyze_iam_policy_longrunning(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'analysis_query.scope=analysis_query.scope/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_iam_policy_longrunning_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeIamPolicyLongrunningRequest() + + request.analysis_query.scope = 'analysis_query.scope/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.analyze_iam_policy_longrunning(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'analysis_query.scope=analysis_query.scope/value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssetServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AssetServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AssetServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AssetServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.AssetServiceGrpcTransport, + transports.AssetServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AssetServiceGrpcTransport, + ) + +def test_asset_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AssetServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_asset_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.AssetServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'export_assets', + 'batch_get_assets_history', + 'create_feed', + 'get_feed', + 'list_feeds', + 'update_feed', + 'delete_feed', + 'search_all_resources', + 'search_all_iam_policies', + 'analyze_iam_policy', + 'analyze_iam_policy_longrunning', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +@requires_google_auth_gte_1_25_0 +def test_asset_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AssetServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_asset_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AssetServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_asset_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AssetServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_asset_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AssetServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_asset_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AssetServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssetServiceGrpcTransport, + transports.AssetServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_asset_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssetServiceGrpcTransport, + transports.AssetServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_asset_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AssetServiceGrpcTransport, grpc_helpers), + (transports.AssetServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_asset_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "cloudasset.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="cloudasset.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) +def test_asset_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_asset_service_host_no_port(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), + ) + assert client.transport._host == 'cloudasset.googleapis.com:443' + + +def test_asset_service_host_with_port(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), + ) + assert client.transport._host == 'cloudasset.googleapis.com:8000' + +def test_asset_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AssetServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_asset_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AssetServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) +def test_asset_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) +def test_asset_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_asset_service_grpc_lro_client(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_asset_service_grpc_lro_async_client(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_asset_path(): + expected = "*".format() + actual = AssetServiceClient.asset_path() + assert expected == actual + + +def test_parse_asset_path(): + expected = { + } + path = AssetServiceClient.asset_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_asset_path(path) + assert expected == actual + +def test_feed_path(): + project = "squid" + feed = "clam" + expected = "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) + actual = AssetServiceClient.feed_path(project, feed) + assert expected == actual + + +def test_parse_feed_path(): + expected = { + "project": "whelk", + "feed": "octopus", + } + path = AssetServiceClient.feed_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_feed_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = AssetServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = AssetServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = AssetServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = AssetServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = AssetServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = AssetServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = AssetServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = AssetServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = AssetServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = AssetServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = AssetServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc b/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc new file mode 100644 index 000000000000..9fd3c4f8b3e4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/iam/credentials/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/BUILD.bazel b/packages/gapic-generator/tests/integration/goldens/credentials/BUILD.bazel new file mode 100644 index 000000000000..2822013159c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/BUILD.bazel @@ -0,0 +1,12 @@ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "goldens_files", + srcs = glob( + ["**/*"], + exclude = [ + "BUILD.bazel", + ".*.sw*", + ], + ), +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in new file mode 100644 index 000000000000..a17a81a99b35 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/iam/credentials *.py +recursive-include google/iam/credentials_v1 *.py diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/README.rst b/packages/gapic-generator/tests/integration/goldens/credentials/README.rst new file mode 100644 index 000000000000..b4de94145075 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Iam Credentials API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Iam Credentials API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py new file mode 100644 index 000000000000..8f9d83a8bfc4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-iam-credentials documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-iam-credentials" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Iam Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-iam-credentials-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-iam-credentials.tex", + u"google-iam-credentials Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-iam-credentials", + u"Google Iam Credentials Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-iam-credentials", + u"google-iam-credentials Documentation", + author, + "google-iam-credentials", + "GAPIC library for Google Iam Credentials API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/iam_credentials.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/iam_credentials.rst new file mode 100644 index 000000000000..8b94f41ca39c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/iam_credentials.rst @@ -0,0 +1,6 @@ +IAMCredentials +-------------------------------- + +.. automodule:: google.iam.credentials_v1.services.iam_credentials + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst new file mode 100644 index 000000000000..c47ca8150e25 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Iam Credentials v1 API +========================================== +.. toctree:: + :maxdepth: 2 + + iam_credentials diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst new file mode 100644 index 000000000000..97befa67ef15 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Iam Credentials v1 API +======================================= + +.. automodule:: google.iam.credentials_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst new file mode 100644 index 000000000000..3e271990d6f9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + credentials_v1/services + credentials_v1/types diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py new file mode 100644 index 000000000000..1bfd4c8c0975 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.iam.credentials_v1.services.iam_credentials.client import IAMCredentialsClient +from google.iam.credentials_v1.services.iam_credentials.async_client import IAMCredentialsAsyncClient + +from google.iam.credentials_v1.types.common import GenerateAccessTokenRequest +from google.iam.credentials_v1.types.common import GenerateAccessTokenResponse +from google.iam.credentials_v1.types.common import GenerateIdTokenRequest +from google.iam.credentials_v1.types.common import GenerateIdTokenResponse +from google.iam.credentials_v1.types.common import SignBlobRequest +from google.iam.credentials_v1.types.common import SignBlobResponse +from google.iam.credentials_v1.types.common import SignJwtRequest +from google.iam.credentials_v1.types.common import SignJwtResponse + +__all__ = ('IAMCredentialsClient', + 'IAMCredentialsAsyncClient', + 'GenerateAccessTokenRequest', + 'GenerateAccessTokenResponse', + 'GenerateIdTokenRequest', + 'GenerateIdTokenResponse', + 'SignBlobRequest', + 'SignBlobResponse', + 'SignJwtRequest', + 'SignJwtResponse', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/py.typed b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/py.typed new file mode 100644 index 000000000000..4d9bf557d038 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-iam-credentials package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py new file mode 100644 index 000000000000..02be17e16c05 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.iam_credentials import IAMCredentialsClient +from .services.iam_credentials import IAMCredentialsAsyncClient + +from .types.common import GenerateAccessTokenRequest +from .types.common import GenerateAccessTokenResponse +from .types.common import GenerateIdTokenRequest +from .types.common import GenerateIdTokenResponse +from .types.common import SignBlobRequest +from .types.common import SignBlobResponse +from .types.common import SignJwtRequest +from .types.common import SignJwtResponse + +__all__ = ( + 'IAMCredentialsAsyncClient', +'GenerateAccessTokenRequest', +'GenerateAccessTokenResponse', +'GenerateIdTokenRequest', +'GenerateIdTokenResponse', +'IAMCredentialsClient', +'SignBlobRequest', +'SignBlobResponse', +'SignJwtRequest', +'SignJwtResponse', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json new file mode 100644 index 000000000000..82b1d8ae9f6d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json @@ -0,0 +1,63 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.iam.credentials_v1", + "protoPackage": "google.iam.credentials.v1", + "schema": "1.0", + "services": { + "IAMCredentials": { + "clients": { + "grpc": { + "libraryClient": "IAMCredentialsClient", + "rpcs": { + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GenerateIdToken": { + "methods": [ + "generate_id_token" + ] + }, + "SignBlob": { + "methods": [ + "sign_blob" + ] + }, + "SignJwt": { + "methods": [ + "sign_jwt" + ] + } + } + }, + "grpc-async": { + "libraryClient": "IAMCredentialsAsyncClient", + "rpcs": { + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GenerateIdToken": { + "methods": [ + "generate_id_token" + ] + }, + "SignBlob": { + "methods": [ + "sign_blob" + ] + }, + "SignJwt": { + "methods": [ + "sign_jwt" + ] + } + } + } + } + } + } +} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/py.typed new file mode 100644 index 000000000000..4d9bf557d038 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-iam-credentials package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py new file mode 100644 index 000000000000..9cd541f4dbe7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import IAMCredentialsClient +from .async_client import IAMCredentialsAsyncClient + +__all__ = ( + 'IAMCredentialsClient', + 'IAMCredentialsAsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py new file mode 100644 index 000000000000..a6390ec743fb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -0,0 +1,663 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.credentials_v1.types import common +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport +from .client import IAMCredentialsClient + + +class IAMCredentialsAsyncClient: + """A service account is a special type of Google account that + belongs to your application or a virtual machine (VM), instead + of to an individual end user. Your application assumes the + identity of the service account to call Google APIs, so that the + users aren't directly involved. + + Service account credentials are used to temporarily assume the + identity of the service account. Supported credential types + include OAuth 2.0 access tokens, OpenID Connect ID tokens, self- + signed JSON Web Tokens (JWTs), and more. + """ + + _client: IAMCredentialsClient + + DEFAULT_ENDPOINT = IAMCredentialsClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + + service_account_path = staticmethod(IAMCredentialsClient.service_account_path) + parse_service_account_path = staticmethod(IAMCredentialsClient.parse_service_account_path) + common_billing_account_path = staticmethod(IAMCredentialsClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(IAMCredentialsClient.parse_common_billing_account_path) + common_folder_path = staticmethod(IAMCredentialsClient.common_folder_path) + parse_common_folder_path = staticmethod(IAMCredentialsClient.parse_common_folder_path) + common_organization_path = staticmethod(IAMCredentialsClient.common_organization_path) + parse_common_organization_path = staticmethod(IAMCredentialsClient.parse_common_organization_path) + common_project_path = staticmethod(IAMCredentialsClient.common_project_path) + parse_common_project_path = staticmethod(IAMCredentialsClient.parse_common_project_path) + common_location_path = staticmethod(IAMCredentialsClient.common_location_path) + parse_common_location_path = staticmethod(IAMCredentialsClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IAMCredentialsAsyncClient: The constructed client. + """ + return IAMCredentialsClient.from_service_account_info.__func__(IAMCredentialsAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IAMCredentialsAsyncClient: The constructed client. + """ + return IAMCredentialsClient.from_service_account_file.__func__(IAMCredentialsAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> IAMCredentialsTransport: + """Returns the transport used by the client instance. + + Returns: + IAMCredentialsTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(IAMCredentialsClient).get_transport_class, type(IAMCredentialsClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, IAMCredentialsTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the iam credentials client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.IAMCredentialsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = IAMCredentialsClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def generate_access_token(self, + request: common.GenerateAccessTokenRequest = None, + *, + name: str = None, + delegates: Sequence[str] = None, + scope: Sequence[str] = None, + lifetime: duration_pb2.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.GenerateAccessTokenResponse: + r"""Generates an OAuth 2.0 access token for a service + account. + + Args: + request (:class:`google.iam.credentials_v1.types.GenerateAccessTokenRequest`): + The request object. + name (:class:`str`): + Required. The resource name of the service account for + which the credentials are requested, in the following + format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + delegates (:class:`Sequence[str]`): + The sequence of service accounts in a delegation chain. + Each service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its + next service account in the chain. The last service + account in the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the + service account that is specified in the ``name`` field + of the request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``delegates`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + scope (:class:`Sequence[str]`): + Required. Code to identify the scopes + to be included in the OAuth 2.0 access + token. See + https://developers.google.com/identity/protocols/googlescopes + for more information. + At least one value required. + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lifetime (:class:`google.protobuf.duration_pb2.Duration`): + The desired lifetime duration of the + access token in seconds. Must be set to + a value less than or equal to 3600 (1 + hour). If a value is not specified, the + token's lifetime will be set to a + default value of one hour. + + This corresponds to the ``lifetime`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.credentials_v1.types.GenerateAccessTokenResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, delegates, scope, lifetime]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = common.GenerateAccessTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if lifetime is not None: + request.lifetime = lifetime + if delegates: + request.delegates.extend(delegates) + if scope: + request.scope.extend(scope) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_access_token, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def generate_id_token(self, + request: common.GenerateIdTokenRequest = None, + *, + name: str = None, + delegates: Sequence[str] = None, + audience: str = None, + include_email: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.GenerateIdTokenResponse: + r"""Generates an OpenID Connect ID token for a service + account. + + Args: + request (:class:`google.iam.credentials_v1.types.GenerateIdTokenRequest`): + The request object. + name (:class:`str`): + Required. The resource name of the service account for + which the credentials are requested, in the following + format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + delegates (:class:`Sequence[str]`): + The sequence of service accounts in a delegation chain. + Each service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its + next service account in the chain. The last service + account in the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the + service account that is specified in the ``name`` field + of the request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``delegates`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audience (:class:`str`): + Required. The audience for the token, + such as the API or account that this + token grants access to. + + This corresponds to the ``audience`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + include_email (:class:`bool`): + Include the service account email in the token. If set + to ``true``, the token will contain ``email`` and + ``email_verified`` claims. + + This corresponds to the ``include_email`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.credentials_v1.types.GenerateIdTokenResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, delegates, audience, include_email]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = common.GenerateIdTokenRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if audience is not None: + request.audience = audience + if include_email is not None: + request.include_email = include_email + if delegates: + request.delegates.extend(delegates) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.generate_id_token, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def sign_blob(self, + request: common.SignBlobRequest = None, + *, + name: str = None, + delegates: Sequence[str] = None, + payload: bytes = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.SignBlobResponse: + r"""Signs a blob using a service account's system-managed + private key. + + Args: + request (:class:`google.iam.credentials_v1.types.SignBlobRequest`): + The request object. + name (:class:`str`): + Required. The resource name of the service account for + which the credentials are requested, in the following + format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + delegates (:class:`Sequence[str]`): + The sequence of service accounts in a delegation chain. + Each service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its + next service account in the chain. The last service + account in the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the + service account that is specified in the ``name`` field + of the request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``delegates`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + payload (:class:`bytes`): + Required. The bytes to sign. + This corresponds to the ``payload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.credentials_v1.types.SignBlobResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, delegates, payload]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = common.SignBlobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if payload is not None: + request.payload = payload + if delegates: + request.delegates.extend(delegates) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.sign_blob, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def sign_jwt(self, + request: common.SignJwtRequest = None, + *, + name: str = None, + delegates: Sequence[str] = None, + payload: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.SignJwtResponse: + r"""Signs a JWT using a service account's system-managed + private key. + + Args: + request (:class:`google.iam.credentials_v1.types.SignJwtRequest`): + The request object. + name (:class:`str`): + Required. The resource name of the service account for + which the credentials are requested, in the following + format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + delegates (:class:`Sequence[str]`): + The sequence of service accounts in a delegation chain. + Each service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its + next service account in the chain. The last service + account in the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the + service account that is specified in the ``name`` field + of the request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``delegates`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + payload (:class:`str`): + Required. The JWT payload to sign: a + JSON object that contains a JWT Claims + Set. + + This corresponds to the ``payload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.credentials_v1.types.SignJwtResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, delegates, payload]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = common.SignJwtRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if payload is not None: + request.payload = payload + if delegates: + request.delegates.extend(delegates) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.sign_jwt, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-iam-credentials", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "IAMCredentialsAsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py new file mode 100644 index 000000000000..8fe89f514f8d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -0,0 +1,822 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.credentials_v1.types import common +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import IAMCredentialsGrpcTransport +from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport + + +class IAMCredentialsClientMeta(type): + """Metaclass for the IAMCredentials client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]] + _transport_registry["grpc"] = IAMCredentialsGrpcTransport + _transport_registry["grpc_asyncio"] = IAMCredentialsGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[IAMCredentialsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class IAMCredentialsClient(metaclass=IAMCredentialsClientMeta): + """A service account is a special type of Google account that + belongs to your application or a virtual machine (VM), instead + of to an individual end user. Your application assumes the + identity of the service account to call Google APIs, so that the + users aren't directly involved. + + Service account credentials are used to temporarily assume the + identity of the service account. Supported credential types + include OAuth 2.0 access tokens, OpenID Connect ID tokens, self- + signed JSON Web Tokens (JWTs), and more. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "iamcredentials.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IAMCredentialsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + IAMCredentialsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> IAMCredentialsTransport: + """Returns the transport used by the client instance. + + Returns: + IAMCredentialsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def service_account_path(project: str,service_account: str,) -> str: + """Returns a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str,str]: + """Parses a service_account path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, IAMCredentialsTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the iam credentials client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, IAMCredentialsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, IAMCredentialsTransport): + # transport is a IAMCredentialsTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def generate_access_token(self, + request: common.GenerateAccessTokenRequest = None, + *, + name: str = None, + delegates: Sequence[str] = None, + scope: Sequence[str] = None, + lifetime: duration_pb2.Duration = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.GenerateAccessTokenResponse: + r"""Generates an OAuth 2.0 access token for a service + account. + + Args: + request (google.iam.credentials_v1.types.GenerateAccessTokenRequest): + The request object. + name (str): + Required. The resource name of the service account for + which the credentials are requested, in the following + format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + delegates (Sequence[str]): + The sequence of service accounts in a delegation chain. + Each service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its + next service account in the chain. The last service + account in the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the + service account that is specified in the ``name`` field + of the request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``delegates`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + scope (Sequence[str]): + Required. Code to identify the scopes + to be included in the OAuth 2.0 access + token. See + https://developers.google.com/identity/protocols/googlescopes + for more information. + At least one value required. + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + lifetime (google.protobuf.duration_pb2.Duration): + The desired lifetime duration of the + access token in seconds. Must be set to + a value less than or equal to 3600 (1 + hour). If a value is not specified, the + token's lifetime will be set to a + default value of one hour. + + This corresponds to the ``lifetime`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.credentials_v1.types.GenerateAccessTokenResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, delegates, scope, lifetime]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a common.GenerateAccessTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, common.GenerateAccessTokenRequest): + request = common.GenerateAccessTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if delegates is not None: + request.delegates = delegates + if scope is not None: + request.scope = scope + if lifetime is not None: + request.lifetime = lifetime + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_access_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def generate_id_token(self, + request: common.GenerateIdTokenRequest = None, + *, + name: str = None, + delegates: Sequence[str] = None, + audience: str = None, + include_email: bool = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.GenerateIdTokenResponse: + r"""Generates an OpenID Connect ID token for a service + account. + + Args: + request (google.iam.credentials_v1.types.GenerateIdTokenRequest): + The request object. + name (str): + Required. The resource name of the service account for + which the credentials are requested, in the following + format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + delegates (Sequence[str]): + The sequence of service accounts in a delegation chain. + Each service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its + next service account in the chain. The last service + account in the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the + service account that is specified in the ``name`` field + of the request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``delegates`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + audience (str): + Required. The audience for the token, + such as the API or account that this + token grants access to. + + This corresponds to the ``audience`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + include_email (bool): + Include the service account email in the token. If set + to ``true``, the token will contain ``email`` and + ``email_verified`` claims. + + This corresponds to the ``include_email`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.credentials_v1.types.GenerateIdTokenResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, delegates, audience, include_email]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a common.GenerateIdTokenRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, common.GenerateIdTokenRequest): + request = common.GenerateIdTokenRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if delegates is not None: + request.delegates = delegates + if audience is not None: + request.audience = audience + if include_email is not None: + request.include_email = include_email + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.generate_id_token] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def sign_blob(self, + request: common.SignBlobRequest = None, + *, + name: str = None, + delegates: Sequence[str] = None, + payload: bytes = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.SignBlobResponse: + r"""Signs a blob using a service account's system-managed + private key. + + Args: + request (google.iam.credentials_v1.types.SignBlobRequest): + The request object. + name (str): + Required. The resource name of the service account for + which the credentials are requested, in the following + format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + delegates (Sequence[str]): + The sequence of service accounts in a delegation chain. + Each service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its + next service account in the chain. The last service + account in the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the + service account that is specified in the ``name`` field + of the request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``delegates`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + payload (bytes): + Required. The bytes to sign. + This corresponds to the ``payload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.credentials_v1.types.SignBlobResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, delegates, payload]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a common.SignBlobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, common.SignBlobRequest): + request = common.SignBlobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if delegates is not None: + request.delegates = delegates + if payload is not None: + request.payload = payload + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.sign_blob] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def sign_jwt(self, + request: common.SignJwtRequest = None, + *, + name: str = None, + delegates: Sequence[str] = None, + payload: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.SignJwtResponse: + r"""Signs a JWT using a service account's system-managed + private key. + + Args: + request (google.iam.credentials_v1.types.SignJwtRequest): + The request object. + name (str): + Required. The resource name of the service account for + which the credentials are requested, in the following + format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + delegates (Sequence[str]): + The sequence of service accounts in a delegation chain. + Each service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its + next service account in the chain. The last service + account in the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the + service account that is specified in the ``name`` field + of the request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it + with a project ID is invalid. + + This corresponds to the ``delegates`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + payload (str): + Required. The JWT payload to sign: a + JSON object that contains a JWT Claims + Set. + + This corresponds to the ``payload`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.iam.credentials_v1.types.SignJwtResponse: + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, delegates, payload]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a common.SignJwtRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, common.SignJwtRequest): + request = common.SignJwtRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if delegates is not None: + request.delegates = delegates + if payload is not None: + request.payload = payload + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.sign_jwt] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-iam-credentials", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "IAMCredentialsClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py new file mode 100644 index 000000000000..d4e5cd93f4c0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import IAMCredentialsTransport +from .grpc import IAMCredentialsGrpcTransport +from .grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]] +_transport_registry['grpc'] = IAMCredentialsGrpcTransport +_transport_registry['grpc_asyncio'] = IAMCredentialsGrpcAsyncIOTransport + +__all__ = ( + 'IAMCredentialsTransport', + 'IAMCredentialsGrpcTransport', + 'IAMCredentialsGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py new file mode 100644 index 000000000000..a0f053c02a25 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.iam.credentials_v1.types import common + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-iam-credentials', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class IAMCredentialsTransport(abc.ABC): + """Abstract transport class for IAMCredentials.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'iamcredentials.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.generate_access_token: gapic_v1.method.wrap_method( + self.generate_access_token, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.generate_id_token: gapic_v1.method.wrap_method( + self.generate_id_token, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.sign_blob: gapic_v1.method.wrap_method( + self.sign_blob, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.sign_jwt: gapic_v1.method.wrap_method( + self.sign_jwt, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def generate_access_token(self) -> Callable[ + [common.GenerateAccessTokenRequest], + Union[ + common.GenerateAccessTokenResponse, + Awaitable[common.GenerateAccessTokenResponse] + ]]: + raise NotImplementedError() + + @property + def generate_id_token(self) -> Callable[ + [common.GenerateIdTokenRequest], + Union[ + common.GenerateIdTokenResponse, + Awaitable[common.GenerateIdTokenResponse] + ]]: + raise NotImplementedError() + + @property + def sign_blob(self) -> Callable[ + [common.SignBlobRequest], + Union[ + common.SignBlobResponse, + Awaitable[common.SignBlobResponse] + ]]: + raise NotImplementedError() + + @property + def sign_jwt(self) -> Callable[ + [common.SignJwtRequest], + Union[ + common.SignJwtResponse, + Awaitable[common.SignJwtResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'IAMCredentialsTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py new file mode 100644 index 000000000000..64e38cb3ee9c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.iam.credentials_v1.types import common +from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO + + +class IAMCredentialsGrpcTransport(IAMCredentialsTransport): + """gRPC backend transport for IAMCredentials. + + A service account is a special type of Google account that + belongs to your application or a virtual machine (VM), instead + of to an individual end user. Your application assumes the + identity of the service account to call Google APIs, so that the + users aren't directly involved. + + Service account credentials are used to temporarily assume the + identity of the service account. Supported credential types + include OAuth 2.0 access tokens, OpenID Connect ID tokens, self- + signed JSON Web Tokens (JWTs), and more. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'iamcredentials.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'iamcredentials.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def generate_access_token(self) -> Callable[ + [common.GenerateAccessTokenRequest], + common.GenerateAccessTokenResponse]: + r"""Return a callable for the generate access token method over gRPC. + + Generates an OAuth 2.0 access token for a service + account. + + Returns: + Callable[[~.GenerateAccessTokenRequest], + ~.GenerateAccessTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_access_token' not in self._stubs: + self._stubs['generate_access_token'] = self.grpc_channel.unary_unary( + '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', + request_serializer=common.GenerateAccessTokenRequest.serialize, + response_deserializer=common.GenerateAccessTokenResponse.deserialize, + ) + return self._stubs['generate_access_token'] + + @property + def generate_id_token(self) -> Callable[ + [common.GenerateIdTokenRequest], + common.GenerateIdTokenResponse]: + r"""Return a callable for the generate id token method over gRPC. + + Generates an OpenID Connect ID token for a service + account. + + Returns: + Callable[[~.GenerateIdTokenRequest], + ~.GenerateIdTokenResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_id_token' not in self._stubs: + self._stubs['generate_id_token'] = self.grpc_channel.unary_unary( + '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', + request_serializer=common.GenerateIdTokenRequest.serialize, + response_deserializer=common.GenerateIdTokenResponse.deserialize, + ) + return self._stubs['generate_id_token'] + + @property + def sign_blob(self) -> Callable[ + [common.SignBlobRequest], + common.SignBlobResponse]: + r"""Return a callable for the sign blob method over gRPC. + + Signs a blob using a service account's system-managed + private key. + + Returns: + Callable[[~.SignBlobRequest], + ~.SignBlobResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'sign_blob' not in self._stubs: + self._stubs['sign_blob'] = self.grpc_channel.unary_unary( + '/google.iam.credentials.v1.IAMCredentials/SignBlob', + request_serializer=common.SignBlobRequest.serialize, + response_deserializer=common.SignBlobResponse.deserialize, + ) + return self._stubs['sign_blob'] + + @property + def sign_jwt(self) -> Callable[ + [common.SignJwtRequest], + common.SignJwtResponse]: + r"""Return a callable for the sign jwt method over gRPC. + + Signs a JWT using a service account's system-managed + private key. + + Returns: + Callable[[~.SignJwtRequest], + ~.SignJwtResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'sign_jwt' not in self._stubs: + self._stubs['sign_jwt'] = self.grpc_channel.unary_unary( + '/google.iam.credentials.v1.IAMCredentials/SignJwt', + request_serializer=common.SignJwtRequest.serialize, + response_deserializer=common.SignJwtResponse.deserialize, + ) + return self._stubs['sign_jwt'] + + +__all__ = ( + 'IAMCredentialsGrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py new file mode 100644 index 000000000000..b1748ed0b386 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.credentials_v1.types import common +from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +from .grpc import IAMCredentialsGrpcTransport + + +class IAMCredentialsGrpcAsyncIOTransport(IAMCredentialsTransport): + """gRPC AsyncIO backend transport for IAMCredentials. + + A service account is a special type of Google account that + belongs to your application or a virtual machine (VM), instead + of to an individual end user. Your application assumes the + identity of the service account to call Google APIs, so that the + users aren't directly involved. + + Service account credentials are used to temporarily assume the + identity of the service account. Supported credential types + include OAuth 2.0 access tokens, OpenID Connect ID tokens, self- + signed JSON Web Tokens (JWTs), and more. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'iamcredentials.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'iamcredentials.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def generate_access_token(self) -> Callable[ + [common.GenerateAccessTokenRequest], + Awaitable[common.GenerateAccessTokenResponse]]: + r"""Return a callable for the generate access token method over gRPC. + + Generates an OAuth 2.0 access token for a service + account. + + Returns: + Callable[[~.GenerateAccessTokenRequest], + Awaitable[~.GenerateAccessTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_access_token' not in self._stubs: + self._stubs['generate_access_token'] = self.grpc_channel.unary_unary( + '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', + request_serializer=common.GenerateAccessTokenRequest.serialize, + response_deserializer=common.GenerateAccessTokenResponse.deserialize, + ) + return self._stubs['generate_access_token'] + + @property + def generate_id_token(self) -> Callable[ + [common.GenerateIdTokenRequest], + Awaitable[common.GenerateIdTokenResponse]]: + r"""Return a callable for the generate id token method over gRPC. + + Generates an OpenID Connect ID token for a service + account. + + Returns: + Callable[[~.GenerateIdTokenRequest], + Awaitable[~.GenerateIdTokenResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'generate_id_token' not in self._stubs: + self._stubs['generate_id_token'] = self.grpc_channel.unary_unary( + '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', + request_serializer=common.GenerateIdTokenRequest.serialize, + response_deserializer=common.GenerateIdTokenResponse.deserialize, + ) + return self._stubs['generate_id_token'] + + @property + def sign_blob(self) -> Callable[ + [common.SignBlobRequest], + Awaitable[common.SignBlobResponse]]: + r"""Return a callable for the sign blob method over gRPC. + + Signs a blob using a service account's system-managed + private key. + + Returns: + Callable[[~.SignBlobRequest], + Awaitable[~.SignBlobResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'sign_blob' not in self._stubs: + self._stubs['sign_blob'] = self.grpc_channel.unary_unary( + '/google.iam.credentials.v1.IAMCredentials/SignBlob', + request_serializer=common.SignBlobRequest.serialize, + response_deserializer=common.SignBlobResponse.deserialize, + ) + return self._stubs['sign_blob'] + + @property + def sign_jwt(self) -> Callable[ + [common.SignJwtRequest], + Awaitable[common.SignJwtResponse]]: + r"""Return a callable for the sign jwt method over gRPC. + + Signs a JWT using a service account's system-managed + private key. + + Returns: + Callable[[~.SignJwtRequest], + Awaitable[~.SignJwtResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'sign_jwt' not in self._stubs: + self._stubs['sign_jwt'] = self.grpc_channel.unary_unary( + '/google.iam.credentials.v1.IAMCredentials/SignJwt', + request_serializer=common.SignJwtRequest.serialize, + response_deserializer=common.SignJwtResponse.deserialize, + ) + return self._stubs['sign_jwt'] + + +__all__ = ( + 'IAMCredentialsGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py new file mode 100644 index 000000000000..40f194ee4a4b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .common import ( + GenerateAccessTokenRequest, + GenerateAccessTokenResponse, + GenerateIdTokenRequest, + GenerateIdTokenResponse, + SignBlobRequest, + SignBlobResponse, + SignJwtRequest, + SignJwtResponse, +) + +__all__ = ( + 'GenerateAccessTokenRequest', + 'GenerateAccessTokenResponse', + 'GenerateIdTokenRequest', + 'GenerateIdTokenResponse', + 'SignBlobRequest', + 'SignBlobResponse', + 'SignJwtRequest', + 'SignJwtResponse', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py new file mode 100644 index 000000000000..17bb1ec37111 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -0,0 +1,299 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.iam.credentials.v1', + manifest={ + 'GenerateAccessTokenRequest', + 'GenerateAccessTokenResponse', + 'SignBlobRequest', + 'SignBlobResponse', + 'SignJwtRequest', + 'SignJwtResponse', + 'GenerateIdTokenRequest', + 'GenerateIdTokenResponse', + }, +) + + +class GenerateAccessTokenRequest(proto.Message): + r""" + Attributes: + name (str): + Required. The resource name of the service account for which + the credentials are requested, in the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it with + a project ID is invalid. + delegates (Sequence[str]): + The sequence of service accounts in a delegation chain. Each + service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its next + service account in the chain. The last service account in + the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the service + account that is specified in the ``name`` field of the + request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it with + a project ID is invalid. + scope (Sequence[str]): + Required. Code to identify the scopes to be + included in the OAuth 2.0 access token. See + https://developers.google.com/identity/protocols/googlescopes + for more information. + At least one value required. + lifetime (google.protobuf.duration_pb2.Duration): + The desired lifetime duration of the access + token in seconds. Must be set to a value less + than or equal to 3600 (1 hour). If a value is + not specified, the token's lifetime will be set + to a default value of one hour. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + delegates = proto.RepeatedField( + proto.STRING, + number=2, + ) + scope = proto.RepeatedField( + proto.STRING, + number=4, + ) + lifetime = proto.Field( + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, + ) + + +class GenerateAccessTokenResponse(proto.Message): + r""" + Attributes: + access_token (str): + The OAuth 2.0 access token. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Token expiration time. + The expiration time is always set. + """ + + access_token = proto.Field( + proto.STRING, + number=1, + ) + expire_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class SignBlobRequest(proto.Message): + r""" + Attributes: + name (str): + Required. The resource name of the service account for which + the credentials are requested, in the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it with + a project ID is invalid. + delegates (Sequence[str]): + The sequence of service accounts in a delegation chain. Each + service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its next + service account in the chain. The last service account in + the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the service + account that is specified in the ``name`` field of the + request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it with + a project ID is invalid. + payload (bytes): + Required. The bytes to sign. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + delegates = proto.RepeatedField( + proto.STRING, + number=3, + ) + payload = proto.Field( + proto.BYTES, + number=5, + ) + + +class SignBlobResponse(proto.Message): + r""" + Attributes: + key_id (str): + The ID of the key used to sign the blob. + signed_blob (bytes): + The signed blob. + """ + + key_id = proto.Field( + proto.STRING, + number=1, + ) + signed_blob = proto.Field( + proto.BYTES, + number=4, + ) + + +class SignJwtRequest(proto.Message): + r""" + Attributes: + name (str): + Required. The resource name of the service account for which + the credentials are requested, in the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it with + a project ID is invalid. + delegates (Sequence[str]): + The sequence of service accounts in a delegation chain. Each + service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its next + service account in the chain. The last service account in + the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the service + account that is specified in the ``name`` field of the + request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it with + a project ID is invalid. + payload (str): + Required. The JWT payload to sign: a JSON + object that contains a JWT Claims Set. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + delegates = proto.RepeatedField( + proto.STRING, + number=3, + ) + payload = proto.Field( + proto.STRING, + number=5, + ) + + +class SignJwtResponse(proto.Message): + r""" + Attributes: + key_id (str): + The ID of the key used to sign the JWT. + signed_jwt (str): + The signed JWT. + """ + + key_id = proto.Field( + proto.STRING, + number=1, + ) + signed_jwt = proto.Field( + proto.STRING, + number=2, + ) + + +class GenerateIdTokenRequest(proto.Message): + r""" + Attributes: + name (str): + Required. The resource name of the service account for which + the credentials are requested, in the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it with + a project ID is invalid. + delegates (Sequence[str]): + The sequence of service accounts in a delegation chain. Each + service account must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on its next + service account in the chain. The last service account in + the chain must be granted the + ``roles/iam.serviceAccountTokenCreator`` role on the service + account that is specified in the ``name`` field of the + request. + + The delegates must have the following format: + ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. + The ``-`` wildcard character is required; replacing it with + a project ID is invalid. + audience (str): + Required. The audience for the token, such as + the API or account that this token grants access + to. + include_email (bool): + Include the service account email in the token. If set to + ``true``, the token will contain ``email`` and + ``email_verified`` claims. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + delegates = proto.RepeatedField( + proto.STRING, + number=2, + ) + audience = proto.Field( + proto.STRING, + number=3, + ) + include_email = proto.Field( + proto.BOOL, + number=4, + ) + + +class GenerateIdTokenResponse(proto.Message): + r""" + Attributes: + token (str): + The OpenId Connect ID token. + """ + + token = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py new file mode 100644 index 000000000000..14f0e8ae5fb8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +__protobuf__ = proto.module( + package='google.iam.credentials.v1', + manifest={ + }, +) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini b/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py new file mode 100644 index 000000000000..7d27f4d101a7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/iam/credentials_v1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py new file mode 100644 index 000000000000..f9e01419c920 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py @@ -0,0 +1,179 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class credentialsCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'generate_access_token': ('name', 'scope', 'delegates', 'lifetime', ), + 'generate_id_token': ('name', 'audience', 'delegates', 'include_email', ), + 'sign_blob': ('name', 'payload', 'delegates', ), + 'sign_jwt': ('name', 'payload', 'delegates', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=credentialsCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the credentials client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py new file mode 100644 index 000000000000..fe435284e2cc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-iam-credentials', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.iam'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py new file mode 100644 index 000000000000..681a98bfb2f9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -0,0 +1,1910 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.credentials_v1.services.iam_credentials import IAMCredentialsAsyncClient +from google.iam.credentials_v1.services.iam_credentials import IAMCredentialsClient +from google.iam.credentials_v1.services.iam_credentials import transports +from google.iam.credentials_v1.services.iam_credentials.transports.base import _GOOGLE_AUTH_VERSION +from google.iam.credentials_v1.types import common +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert IAMCredentialsClient._get_default_mtls_endpoint(None) is None + assert IAMCredentialsClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert IAMCredentialsClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert IAMCredentialsClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert IAMCredentialsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert IAMCredentialsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + IAMCredentialsClient, + IAMCredentialsAsyncClient, +]) +def test_iam_credentials_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'iamcredentials.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + IAMCredentialsClient, + IAMCredentialsAsyncClient, +]) +def test_iam_credentials_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'iamcredentials.googleapis.com:443' + + +def test_iam_credentials_client_get_transport_class(): + transport = IAMCredentialsClient.get_transport_class() + available_transports = [ + transports.IAMCredentialsGrpcTransport, + ] + assert transport in available_transports + + transport = IAMCredentialsClient.get_transport_class("grpc") + assert transport == transports.IAMCredentialsGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +def test_iam_credentials_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(IAMCredentialsClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(IAMCredentialsClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", "true"), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", "false"), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_iam_credentials_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_iam_credentials_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_iam_credentials_client_client_options_from_dict(): + with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = IAMCredentialsClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_generate_access_token(transport: str = 'grpc', request_type=common.GenerateAccessTokenRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.GenerateAccessTokenResponse( + access_token='access_token_value', + ) + response = client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateAccessTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateAccessTokenResponse) + assert response.access_token == 'access_token_value' + + +def test_generate_access_token_from_dict(): + test_generate_access_token(request_type=dict) + + +def test_generate_access_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + client.generate_access_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateAccessTokenRequest() + + +@pytest.mark.asyncio +async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( + access_token='access_token_value', + )) + response = await client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateAccessTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateAccessTokenResponse) + assert response.access_token == 'access_token_value' + + +@pytest.mark.asyncio +async def test_generate_access_token_async_from_dict(): + await test_generate_access_token_async(request_type=dict) + + +def test_generate_access_token_field_headers(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = common.GenerateAccessTokenRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + call.return_value = common.GenerateAccessTokenResponse() + client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_generate_access_token_field_headers_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = common.GenerateAccessTokenRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse()) + await client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_generate_access_token_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.GenerateAccessTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_access_token( + name='name_value', + delegates=['delegates_value'], + scope=['scope_value'], + lifetime=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].delegates == ['delegates_value'] + assert args[0].scope == ['scope_value'] + assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) + + +def test_generate_access_token_flattened_error(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_access_token( + common.GenerateAccessTokenRequest(), + name='name_value', + delegates=['delegates_value'], + scope=['scope_value'], + lifetime=duration_pb2.Duration(seconds=751), + ) + + +@pytest.mark.asyncio +async def test_generate_access_token_flattened_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.GenerateAccessTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_access_token( + name='name_value', + delegates=['delegates_value'], + scope=['scope_value'], + lifetime=duration_pb2.Duration(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].delegates == ['delegates_value'] + assert args[0].scope == ['scope_value'] + assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) + + +@pytest.mark.asyncio +async def test_generate_access_token_flattened_error_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_access_token( + common.GenerateAccessTokenRequest(), + name='name_value', + delegates=['delegates_value'], + scope=['scope_value'], + lifetime=duration_pb2.Duration(seconds=751), + ) + + +def test_generate_id_token(transport: str = 'grpc', request_type=common.GenerateIdTokenRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.GenerateIdTokenResponse( + token='token_value', + ) + response = client.generate_id_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateIdTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateIdTokenResponse) + assert response.token == 'token_value' + + +def test_generate_id_token_from_dict(): + test_generate_id_token(request_type=dict) + + +def test_generate_id_token_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + client.generate_id_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateIdTokenRequest() + + +@pytest.mark.asyncio +async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( + token='token_value', + )) + response = await client.generate_id_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateIdTokenRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateIdTokenResponse) + assert response.token == 'token_value' + + +@pytest.mark.asyncio +async def test_generate_id_token_async_from_dict(): + await test_generate_id_token_async(request_type=dict) + + +def test_generate_id_token_field_headers(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = common.GenerateIdTokenRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + call.return_value = common.GenerateIdTokenResponse() + client.generate_id_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_generate_id_token_field_headers_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = common.GenerateIdTokenRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse()) + await client.generate_id_token(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_generate_id_token_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.GenerateIdTokenResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.generate_id_token( + name='name_value', + delegates=['delegates_value'], + audience='audience_value', + include_email=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].delegates == ['delegates_value'] + assert args[0].audience == 'audience_value' + assert args[0].include_email == True + + +def test_generate_id_token_flattened_error(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_id_token( + common.GenerateIdTokenRequest(), + name='name_value', + delegates=['delegates_value'], + audience='audience_value', + include_email=True, + ) + + +@pytest.mark.asyncio +async def test_generate_id_token_flattened_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.GenerateIdTokenResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.generate_id_token( + name='name_value', + delegates=['delegates_value'], + audience='audience_value', + include_email=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].delegates == ['delegates_value'] + assert args[0].audience == 'audience_value' + assert args[0].include_email == True + + +@pytest.mark.asyncio +async def test_generate_id_token_flattened_error_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.generate_id_token( + common.GenerateIdTokenRequest(), + name='name_value', + delegates=['delegates_value'], + audience='audience_value', + include_email=True, + ) + + +def test_sign_blob(transport: str = 'grpc', request_type=common.SignBlobRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + ) + response = client.sign_blob(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignBlobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignBlobResponse) + assert response.key_id == 'key_id_value' + assert response.signed_blob == b'signed_blob_blob' + + +def test_sign_blob_from_dict(): + test_sign_blob(request_type=dict) + + +def test_sign_blob_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + client.sign_blob() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignBlobRequest() + + +@pytest.mark.asyncio +async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + )) + response = await client.sign_blob(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignBlobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignBlobResponse) + assert response.key_id == 'key_id_value' + assert response.signed_blob == b'signed_blob_blob' + + +@pytest.mark.asyncio +async def test_sign_blob_async_from_dict(): + await test_sign_blob_async(request_type=dict) + + +def test_sign_blob_field_headers(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = common.SignBlobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + call.return_value = common.SignBlobResponse() + client.sign_blob(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_sign_blob_field_headers_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = common.SignBlobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse()) + await client.sign_blob(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_sign_blob_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.SignBlobResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.sign_blob( + name='name_value', + delegates=['delegates_value'], + payload=b'payload_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].delegates == ['delegates_value'] + assert args[0].payload == b'payload_blob' + + +def test_sign_blob_flattened_error(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.sign_blob( + common.SignBlobRequest(), + name='name_value', + delegates=['delegates_value'], + payload=b'payload_blob', + ) + + +@pytest.mark.asyncio +async def test_sign_blob_flattened_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.SignBlobResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.sign_blob( + name='name_value', + delegates=['delegates_value'], + payload=b'payload_blob', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].delegates == ['delegates_value'] + assert args[0].payload == b'payload_blob' + + +@pytest.mark.asyncio +async def test_sign_blob_flattened_error_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.sign_blob( + common.SignBlobRequest(), + name='name_value', + delegates=['delegates_value'], + payload=b'payload_blob', + ) + + +def test_sign_jwt(transport: str = 'grpc', request_type=common.SignJwtRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', + ) + response = client.sign_jwt(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignJwtRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignJwtResponse) + assert response.key_id == 'key_id_value' + assert response.signed_jwt == 'signed_jwt_value' + + +def test_sign_jwt_from_dict(): + test_sign_jwt(request_type=dict) + + +def test_sign_jwt_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + client.sign_jwt() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignJwtRequest() + + +@pytest.mark.asyncio +async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', + )) + response = await client.sign_jwt(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignJwtRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignJwtResponse) + assert response.key_id == 'key_id_value' + assert response.signed_jwt == 'signed_jwt_value' + + +@pytest.mark.asyncio +async def test_sign_jwt_async_from_dict(): + await test_sign_jwt_async(request_type=dict) + + +def test_sign_jwt_field_headers(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = common.SignJwtRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + call.return_value = common.SignJwtResponse() + client.sign_jwt(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_sign_jwt_field_headers_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = common.SignJwtRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse()) + await client.sign_jwt(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_sign_jwt_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.SignJwtResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.sign_jwt( + name='name_value', + delegates=['delegates_value'], + payload='payload_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].delegates == ['delegates_value'] + assert args[0].payload == 'payload_value' + + +def test_sign_jwt_flattened_error(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.sign_jwt( + common.SignJwtRequest(), + name='name_value', + delegates=['delegates_value'], + payload='payload_value', + ) + + +@pytest.mark.asyncio +async def test_sign_jwt_flattened_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = common.SignJwtResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.sign_jwt( + name='name_value', + delegates=['delegates_value'], + payload='payload_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].delegates == ['delegates_value'] + assert args[0].payload == 'payload_value' + + +@pytest.mark.asyncio +async def test_sign_jwt_flattened_error_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.sign_jwt( + common.SignJwtRequest(), + name='name_value', + delegates=['delegates_value'], + payload='payload_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IAMCredentialsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = IAMCredentialsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = IAMCredentialsClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.IAMCredentialsGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.IAMCredentialsGrpcTransport, + ) + +def test_iam_credentials_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.IAMCredentialsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_iam_credentials_base_transport(): + # Instantiate the base transport. + with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.IAMCredentialsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'generate_access_token', + 'generate_id_token', + 'sign_blob', + 'sign_jwt', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_iam_credentials_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IAMCredentialsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_iam_credentials_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IAMCredentialsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_iam_credentials_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.IAMCredentialsTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_iam_credentials_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + IAMCredentialsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_iam_credentials_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + IAMCredentialsClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_iam_credentials_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_iam_credentials_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.IAMCredentialsGrpcTransport, grpc_helpers), + (transports.IAMCredentialsGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_iam_credentials_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "iamcredentials.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="iamcredentials.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport]) +def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_iam_credentials_host_no_port(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com'), + ) + assert client.transport._host == 'iamcredentials.googleapis.com:443' + + +def test_iam_credentials_host_with_port(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com:8000'), + ) + assert client.transport._host == 'iamcredentials.googleapis.com:8000' + +def test_iam_credentials_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.IAMCredentialsGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_iam_credentials_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.IAMCredentialsGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport]) +def test_iam_credentials_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport]) +def test_iam_credentials_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_service_account_path(): + project = "squid" + service_account = "clam" + expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + actual = IAMCredentialsClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "whelk", + "service_account": "octopus", + } + path = IAMCredentialsClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = IAMCredentialsClient.parse_service_account_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = IAMCredentialsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = IAMCredentialsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = IAMCredentialsClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = IAMCredentialsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = IAMCredentialsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = IAMCredentialsClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = IAMCredentialsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = IAMCredentialsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = IAMCredentialsClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = IAMCredentialsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = IAMCredentialsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = IAMCredentialsClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = IAMCredentialsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = IAMCredentialsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = IAMCredentialsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: + transport_class = IAMCredentialsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc b/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc new file mode 100644 index 000000000000..b38d22e21fd1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/logging/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/logging/BUILD.bazel b/packages/gapic-generator/tests/integration/goldens/logging/BUILD.bazel new file mode 100644 index 000000000000..2822013159c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/BUILD.bazel @@ -0,0 +1,12 @@ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "goldens_files", + srcs = glob( + ["**/*"], + exclude = [ + "BUILD.bazel", + ".*.sw*", + ], + ), +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in new file mode 100644 index 000000000000..f8c276f2cce8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/logging *.py +recursive-include google/cloud/logging_v2 *.py diff --git a/packages/gapic-generator/tests/integration/goldens/logging/README.rst b/packages/gapic-generator/tests/integration/goldens/logging/README.rst new file mode 100644 index 000000000000..56aa7d0a8ad9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Logging API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Logging API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py new file mode 100644 index 000000000000..eb6783779012 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-logging documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-logging" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-logging-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-logging.tex", + u"google-cloud-logging Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-logging", + u"Google Cloud Logging Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-logging", + u"google-cloud-logging Documentation", + author, + "google-cloud-logging", + "GAPIC library for Google Cloud Logging API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst new file mode 100644 index 000000000000..6a4859643f45 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + logging_v2/services + logging_v2/types diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/config_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/config_service_v2.rst new file mode 100644 index 000000000000..f7c0a7701de1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/config_service_v2.rst @@ -0,0 +1,10 @@ +ConfigServiceV2 +--------------------------------- + +.. automodule:: google.cloud.logging_v2.services.config_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.config_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/logging_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/logging_service_v2.rst new file mode 100644 index 000000000000..f41c0c89b78c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/logging_service_v2.rst @@ -0,0 +1,10 @@ +LoggingServiceV2 +---------------------------------- + +.. automodule:: google.cloud.logging_v2.services.logging_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.logging_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/metrics_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/metrics_service_v2.rst new file mode 100644 index 000000000000..fd4d9bc7d9ba --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/metrics_service_v2.rst @@ -0,0 +1,10 @@ +MetricsServiceV2 +---------------------------------- + +.. automodule:: google.cloud.logging_v2.services.metrics_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.metrics_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst new file mode 100644 index 000000000000..d7a0471b13c3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst @@ -0,0 +1,8 @@ +Services for Google Cloud Logging v2 API +======================================== +.. toctree:: + :maxdepth: 2 + + config_service_v2 + logging_service_v2 + metrics_service_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst new file mode 100644 index 000000000000..843c0dc370d4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Logging v2 API +===================================== + +.. automodule:: google.cloud.logging_v2.types + :members: + :undoc-members: + :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py new file mode 100644 index 000000000000..16e3d0cc06cf --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.logging_v2.services.config_service_v2.client import ConfigServiceV2Client +from google.cloud.logging_v2.services.config_service_v2.async_client import ConfigServiceV2AsyncClient +from google.cloud.logging_v2.services.logging_service_v2.client import LoggingServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2.async_client import LoggingServiceV2AsyncClient +from google.cloud.logging_v2.services.metrics_service_v2.client import MetricsServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2.async_client import MetricsServiceV2AsyncClient + +from google.cloud.logging_v2.types.log_entry import LogEntry +from google.cloud.logging_v2.types.log_entry import LogEntryOperation +from google.cloud.logging_v2.types.log_entry import LogEntrySourceLocation +from google.cloud.logging_v2.types.logging import DeleteLogRequest +from google.cloud.logging_v2.types.logging import ListLogEntriesRequest +from google.cloud.logging_v2.types.logging import ListLogEntriesResponse +from google.cloud.logging_v2.types.logging import ListLogsRequest +from google.cloud.logging_v2.types.logging import ListLogsResponse +from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsRequest +from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsResponse +from google.cloud.logging_v2.types.logging import TailLogEntriesRequest +from google.cloud.logging_v2.types.logging import TailLogEntriesResponse +from google.cloud.logging_v2.types.logging import WriteLogEntriesPartialErrors +from google.cloud.logging_v2.types.logging import WriteLogEntriesRequest +from google.cloud.logging_v2.types.logging import WriteLogEntriesResponse +from google.cloud.logging_v2.types.logging_config import BigQueryOptions +from google.cloud.logging_v2.types.logging_config import CmekSettings +from google.cloud.logging_v2.types.logging_config import CreateBucketRequest +from google.cloud.logging_v2.types.logging_config import CreateExclusionRequest +from google.cloud.logging_v2.types.logging_config import CreateSinkRequest +from google.cloud.logging_v2.types.logging_config import CreateViewRequest +from google.cloud.logging_v2.types.logging_config import DeleteBucketRequest +from google.cloud.logging_v2.types.logging_config import DeleteExclusionRequest +from google.cloud.logging_v2.types.logging_config import DeleteSinkRequest +from google.cloud.logging_v2.types.logging_config import DeleteViewRequest +from google.cloud.logging_v2.types.logging_config import GetBucketRequest +from google.cloud.logging_v2.types.logging_config import GetCmekSettingsRequest +from google.cloud.logging_v2.types.logging_config import GetExclusionRequest +from google.cloud.logging_v2.types.logging_config import GetSinkRequest +from google.cloud.logging_v2.types.logging_config import GetViewRequest +from google.cloud.logging_v2.types.logging_config import ListBucketsRequest +from google.cloud.logging_v2.types.logging_config import ListBucketsResponse +from google.cloud.logging_v2.types.logging_config import ListExclusionsRequest +from google.cloud.logging_v2.types.logging_config import ListExclusionsResponse +from google.cloud.logging_v2.types.logging_config import ListSinksRequest +from google.cloud.logging_v2.types.logging_config import ListSinksResponse +from google.cloud.logging_v2.types.logging_config import ListViewsRequest +from google.cloud.logging_v2.types.logging_config import ListViewsResponse +from google.cloud.logging_v2.types.logging_config import LogBucket +from google.cloud.logging_v2.types.logging_config import LogExclusion +from google.cloud.logging_v2.types.logging_config import LogSink +from google.cloud.logging_v2.types.logging_config import LogView +from google.cloud.logging_v2.types.logging_config import UndeleteBucketRequest +from google.cloud.logging_v2.types.logging_config import UpdateBucketRequest +from google.cloud.logging_v2.types.logging_config import UpdateCmekSettingsRequest +from google.cloud.logging_v2.types.logging_config import UpdateExclusionRequest +from google.cloud.logging_v2.types.logging_config import UpdateSinkRequest +from google.cloud.logging_v2.types.logging_config import UpdateViewRequest +from google.cloud.logging_v2.types.logging_config import LifecycleState +from google.cloud.logging_v2.types.logging_metrics import CreateLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import DeleteLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import GetLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsRequest +from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsResponse +from google.cloud.logging_v2.types.logging_metrics import LogMetric +from google.cloud.logging_v2.types.logging_metrics import UpdateLogMetricRequest + +__all__ = ('ConfigServiceV2Client', + 'ConfigServiceV2AsyncClient', + 'LoggingServiceV2Client', + 'LoggingServiceV2AsyncClient', + 'MetricsServiceV2Client', + 'MetricsServiceV2AsyncClient', + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'DeleteLogRequest', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'BigQueryOptions', + 'CmekSettings', + 'CreateBucketRequest', + 'CreateExclusionRequest', + 'CreateSinkRequest', + 'CreateViewRequest', + 'DeleteBucketRequest', + 'DeleteExclusionRequest', + 'DeleteSinkRequest', + 'DeleteViewRequest', + 'GetBucketRequest', + 'GetCmekSettingsRequest', + 'GetExclusionRequest', + 'GetSinkRequest', + 'GetViewRequest', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'ListSinksRequest', + 'ListSinksResponse', + 'ListViewsRequest', + 'ListViewsResponse', + 'LogBucket', + 'LogExclusion', + 'LogSink', + 'LogView', + 'UndeleteBucketRequest', + 'UpdateBucketRequest', + 'UpdateCmekSettingsRequest', + 'UpdateExclusionRequest', + 'UpdateSinkRequest', + 'UpdateViewRequest', + 'LifecycleState', + 'CreateLogMetricRequest', + 'DeleteLogMetricRequest', + 'GetLogMetricRequest', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'LogMetric', + 'UpdateLogMetricRequest', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/py.typed b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/py.typed new file mode 100644 index 000000000000..6c7420d0d9cb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-logging package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py new file mode 100644 index 000000000000..1dc1e1eac254 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -0,0 +1,144 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.config_service_v2 import ConfigServiceV2Client +from .services.config_service_v2 import ConfigServiceV2AsyncClient +from .services.logging_service_v2 import LoggingServiceV2Client +from .services.logging_service_v2 import LoggingServiceV2AsyncClient +from .services.metrics_service_v2 import MetricsServiceV2Client +from .services.metrics_service_v2 import MetricsServiceV2AsyncClient + +from .types.log_entry import LogEntry +from .types.log_entry import LogEntryOperation +from .types.log_entry import LogEntrySourceLocation +from .types.logging import DeleteLogRequest +from .types.logging import ListLogEntriesRequest +from .types.logging import ListLogEntriesResponse +from .types.logging import ListLogsRequest +from .types.logging import ListLogsResponse +from .types.logging import ListMonitoredResourceDescriptorsRequest +from .types.logging import ListMonitoredResourceDescriptorsResponse +from .types.logging import TailLogEntriesRequest +from .types.logging import TailLogEntriesResponse +from .types.logging import WriteLogEntriesPartialErrors +from .types.logging import WriteLogEntriesRequest +from .types.logging import WriteLogEntriesResponse +from .types.logging_config import BigQueryOptions +from .types.logging_config import CmekSettings +from .types.logging_config import CreateBucketRequest +from .types.logging_config import CreateExclusionRequest +from .types.logging_config import CreateSinkRequest +from .types.logging_config import CreateViewRequest +from .types.logging_config import DeleteBucketRequest +from .types.logging_config import DeleteExclusionRequest +from .types.logging_config import DeleteSinkRequest +from .types.logging_config import DeleteViewRequest +from .types.logging_config import GetBucketRequest +from .types.logging_config import GetCmekSettingsRequest +from .types.logging_config import GetExclusionRequest +from .types.logging_config import GetSinkRequest +from .types.logging_config import GetViewRequest +from .types.logging_config import ListBucketsRequest +from .types.logging_config import ListBucketsResponse +from .types.logging_config import ListExclusionsRequest +from .types.logging_config import ListExclusionsResponse +from .types.logging_config import ListSinksRequest +from .types.logging_config import ListSinksResponse +from .types.logging_config import ListViewsRequest +from .types.logging_config import ListViewsResponse +from .types.logging_config import LogBucket +from .types.logging_config import LogExclusion +from .types.logging_config import LogSink +from .types.logging_config import LogView +from .types.logging_config import UndeleteBucketRequest +from .types.logging_config import UpdateBucketRequest +from .types.logging_config import UpdateCmekSettingsRequest +from .types.logging_config import UpdateExclusionRequest +from .types.logging_config import UpdateSinkRequest +from .types.logging_config import UpdateViewRequest +from .types.logging_config import LifecycleState +from .types.logging_metrics import CreateLogMetricRequest +from .types.logging_metrics import DeleteLogMetricRequest +from .types.logging_metrics import GetLogMetricRequest +from .types.logging_metrics import ListLogMetricsRequest +from .types.logging_metrics import ListLogMetricsResponse +from .types.logging_metrics import LogMetric +from .types.logging_metrics import UpdateLogMetricRequest + +__all__ = ( + 'ConfigServiceV2AsyncClient', + 'LoggingServiceV2AsyncClient', + 'MetricsServiceV2AsyncClient', +'BigQueryOptions', +'CmekSettings', +'ConfigServiceV2Client', +'CreateBucketRequest', +'CreateExclusionRequest', +'CreateLogMetricRequest', +'CreateSinkRequest', +'CreateViewRequest', +'DeleteBucketRequest', +'DeleteExclusionRequest', +'DeleteLogMetricRequest', +'DeleteLogRequest', +'DeleteSinkRequest', +'DeleteViewRequest', +'GetBucketRequest', +'GetCmekSettingsRequest', +'GetExclusionRequest', +'GetLogMetricRequest', +'GetSinkRequest', +'GetViewRequest', +'LifecycleState', +'ListBucketsRequest', +'ListBucketsResponse', +'ListExclusionsRequest', +'ListExclusionsResponse', +'ListLogEntriesRequest', +'ListLogEntriesResponse', +'ListLogMetricsRequest', +'ListLogMetricsResponse', +'ListLogsRequest', +'ListLogsResponse', +'ListMonitoredResourceDescriptorsRequest', +'ListMonitoredResourceDescriptorsResponse', +'ListSinksRequest', +'ListSinksResponse', +'ListViewsRequest', +'ListViewsResponse', +'LogBucket', +'LogEntry', +'LogEntryOperation', +'LogEntrySourceLocation', +'LogExclusion', +'LogMetric', +'LogSink', +'LogView', +'LoggingServiceV2Client', +'MetricsServiceV2Client', +'TailLogEntriesRequest', +'TailLogEntriesResponse', +'UndeleteBucketRequest', +'UpdateBucketRequest', +'UpdateCmekSettingsRequest', +'UpdateExclusionRequest', +'UpdateLogMetricRequest', +'UpdateSinkRequest', +'UpdateViewRequest', +'WriteLogEntriesPartialErrors', +'WriteLogEntriesRequest', +'WriteLogEntriesResponse', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json new file mode 100644 index 000000000000..da4eefd477fc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json @@ -0,0 +1,391 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.logging_v2", + "protoPackage": "google.logging.v2", + "schema": "1.0", + "services": { + "ConfigServiceV2": { + "clients": { + "grpc": { + "libraryClient": "ConfigServiceV2Client", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + }, + "grpc-async": { + "libraryClient": "ConfigServiceV2AsyncClient", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } + } + } + }, + "LoggingServiceV2": { + "clients": { + "grpc": { + "libraryClient": "LoggingServiceV2Client", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LoggingServiceV2AsyncClient", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + } + } + }, + "MetricsServiceV2": { + "clients": { + "grpc": { + "libraryClient": "MetricsServiceV2Client", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + }, + "grpc-async": { + "libraryClient": "MetricsServiceV2AsyncClient", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } + } + } + } + } +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/py.typed b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/py.typed new file mode 100644 index 000000000000..6c7420d0d9cb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-logging package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py new file mode 100644 index 000000000000..2b27a12e93f8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ConfigServiceV2Client +from .async_client import ConfigServiceV2AsyncClient + +__all__ = ( + 'ConfigServiceV2Client', + 'ConfigServiceV2AsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py new file mode 100644 index 000000000000..82e84aab817c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -0,0 +1,2016 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport +from .client import ConfigServiceV2Client + + +class ConfigServiceV2AsyncClient: + """Service for configuring sinks used to route log entries.""" + + _client: ConfigServiceV2Client + + DEFAULT_ENDPOINT = ConfigServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + + cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) + parse_cmek_settings_path = staticmethod(ConfigServiceV2Client.parse_cmek_settings_path) + log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) + parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) + log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) + parse_log_exclusion_path = staticmethod(ConfigServiceV2Client.parse_log_exclusion_path) + log_sink_path = staticmethod(ConfigServiceV2Client.log_sink_path) + parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) + log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) + parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) + common_billing_account_path = staticmethod(ConfigServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(ConfigServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(ConfigServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(ConfigServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(ConfigServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(ConfigServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(ConfigServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(ConfigServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(ConfigServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2AsyncClient: The constructed client. + """ + return ConfigServiceV2Client.from_service_account_info.__func__(ConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2AsyncClient: The constructed client. + """ + return ConfigServiceV2Client.from_service_account_file.__func__(ConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfigServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + ConfigServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the config service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.ConfigServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = ConfigServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_buckets(self, + request: logging_config.ListBucketsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBucketsAsyncPager: + r"""Lists buckets. + + Args: + request (:class:`google.cloud.logging_v2.types.ListBucketsRequest`): + The request object. The parameters to `ListBuckets`. + parent (:class:`str`): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: + The response from ListBuckets. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListBucketsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_buckets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBucketsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_bucket(self, + request: logging_config.GetBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Gets a bucket. + + Args: + request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): + The request object. The parameters to `GetBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_bucket(self, + request: logging_config.CreateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): + The request object. The parameters to `CreateBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_bucket(self, + request: logging_config.UpdateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): + The request object. The parameters to `UpdateBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_bucket(self, + request: logging_config.DeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): + The request object. The parameters to `DeleteBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def undelete_bucket(self, + request: logging_config.UndeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Args: + request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): + The request object. The parameters to `UndeleteBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.undelete_bucket, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_views(self, + request: logging_config.ListViewsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViewsAsyncPager: + r"""Lists views on a bucket. + + Args: + request (:class:`google.cloud.logging_v2.types.ListViewsRequest`): + The request object. The parameters to `ListViews`. + parent (:class:`str`): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: + The response from ListViews. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListViewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_views, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListViewsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_view(self, + request: logging_config.GetViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Gets a view. + + Args: + request (:class:`google.cloud.logging_v2.types.GetViewRequest`): + The request object. The parameters to `GetView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_view(self, + request: logging_config.CreateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): + The request object. The parameters to `CreateView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_view(self, + request: logging_config.UpdateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): + The request object. The parameters to `UpdateView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_view(self, + request: logging_config.DeleteViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a view from a bucket. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): + The request object. The parameters to `DeleteView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_view, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_sinks(self, + request: logging_config.ListSinksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSinksAsyncPager: + r"""Lists sinks. + + Args: + request (:class:`google.cloud.logging_v2.types.ListSinksRequest`): + The request object. The parameters to `ListSinks`. + parent (:class:`str`): + Required. The parent resource whose sinks are to be + listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: + Result returned from ListSinks. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListSinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_sinks, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSinksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_sink(self, + request: logging_config.GetSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Gets a sink. + + Args: + request (:class:`google.cloud.logging_v2.types.GetSinkRequest`): + The request object. The parameters to `GetSink`. + sink_name (:class:`str`): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.GetSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_sink(self, + request: logging_config.CreateSinkRequest = None, + *, + parent: str = None, + sink: logging_config.LogSink = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateSinkRequest`): + The request object. The parameters to `CreateSink`. + parent (:class:`str`): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`google.cloud.logging_v2.types.LogSink`): + Required. The new sink, whose ``name`` parameter is a + sink identifier that is not already in use. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, sink]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.CreateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sink is not None: + request.sink = sink + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_sink, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_sink(self, + request: logging_config.UpdateSinkRequest = None, + *, + sink_name: str = None, + sink: logging_config.LogSink = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateSinkRequest`): + The request object. The parameters to `UpdateSink`. + sink_name (:class:`str`): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`google.cloud.logging_v2.types.LogSink`): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name, sink, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.UpdateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_sink(self, + request: logging_config.DeleteSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteSinkRequest`): + The request object. The parameters to `DeleteSink`. + sink_name (:class:`str`): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.DeleteSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_exclusions(self, + request: logging_config.ListExclusionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsAsyncPager: + r"""Lists all the exclusions in a parent resource. + + Args: + request (:class:`google.cloud.logging_v2.types.ListExclusionsRequest`): + The request object. The parameters to `ListExclusions`. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListExclusionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_exclusions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExclusionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_exclusion(self, + request: logging_config.GetExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion. + + Args: + request (:class:`google.cloud.logging_v2.types.GetExclusionRequest`): + The request object. The parameters to `GetExclusion`. + name (:class:`str`): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.GetExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_exclusion(self, + request: logging_config.CreateExclusionRequest = None, + *, + parent: str = None, + exclusion: logging_config.LogExclusion = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateExclusionRequest`): + The request object. The parameters to `CreateExclusion`. + parent (:class:`str`): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.CreateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_exclusion(self, + request: logging_config.UpdateExclusionRequest = None, + *, + name: str = None, + exclusion: logging_config.LogExclusion = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing + exclusion. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateExclusionRequest`): + The request object. The parameters to `UpdateExclusion`. + name (:class:`str`): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.UpdateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_exclusion(self, + request: logging_config.DeleteExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteExclusionRequest`): + The request object. The parameters to `DeleteExclusion`. + name (:class:`str`): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.DeleteExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_cmek_settings(self, + request: logging_config.GetCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (:class:`google.cloud.logging_v2.types.GetCmekSettingsRequest`): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_cmek_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_cmek_settings(self, + request: logging_config.UpdateCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateCmekSettingsRequest`): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_cmek_settings, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "ConfigServiceV2AsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py new file mode 100644 index 000000000000..67658652a61d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -0,0 +1,2194 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import ConfigServiceV2GrpcTransport +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport + + +class ConfigServiceV2ClientMeta(type): + """Metaclass for the ConfigServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] + _transport_registry["grpc"] = ConfigServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[ConfigServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ConfigServiceV2Client(metaclass=ConfigServiceV2ClientMeta): + """Service for configuring sinks used to route log entries.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ConfigServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfigServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + ConfigServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cmek_settings_path(project: str,) -> str: + """Returns a fully-qualified cmek_settings string.""" + return "projects/{project}/cmekSettings".format(project=project, ) + + @staticmethod + def parse_cmek_settings_path(path: str) -> Dict[str,str]: + """Parses a cmek_settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_bucket_path(project: str,location: str,bucket: str,) -> str: + """Returns a fully-qualified log_bucket string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + + @staticmethod + def parse_log_bucket_path(path: str) -> Dict[str,str]: + """Parses a log_bucket path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_exclusion_path(project: str,exclusion: str,) -> str: + """Returns a fully-qualified log_exclusion string.""" + return "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + + @staticmethod + def parse_log_exclusion_path(path: str) -> Dict[str,str]: + """Parses a log_exclusion path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_sink_path(project: str,sink: str,) -> str: + """Returns a fully-qualified log_sink string.""" + return "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + + @staticmethod + def parse_log_sink_path(path: str) -> Dict[str,str]: + """Parses a log_sink path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_view_path(project: str,location: str,bucket: str,view: str,) -> str: + """Returns a fully-qualified log_view string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + + @staticmethod + def parse_log_view_path(path: str) -> Dict[str,str]: + """Parses a log_view path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, ConfigServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the config service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ConfigServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, ConfigServiceV2Transport): + # transport is a ConfigServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_buckets(self, + request: logging_config.ListBucketsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListBucketsPager: + r"""Lists buckets. + + Args: + request (google.cloud.logging_v2.types.ListBucketsRequest): + The request object. The parameters to `ListBuckets`. + parent (str): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: + The response from ListBuckets. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListBucketsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_buckets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBucketsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_bucket(self, + request: logging_config.GetBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Gets a bucket. + + Args: + request (google.cloud.logging_v2.types.GetBucketRequest): + The request object. The parameters to `GetBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_bucket(self, + request: logging_config.CreateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Args: + request (google.cloud.logging_v2.types.CreateBucketRequest): + The request object. The parameters to `CreateBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_bucket(self, + request: logging_config.UpdateBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogBucket: + r"""Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. + + Args: + request (google.cloud.logging_v2.types.UpdateBucketRequest): + The request object. The parameters to `UpdateBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository of logs. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_bucket(self, + request: logging_config.DeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Args: + request (google.cloud.logging_v2.types.DeleteBucketRequest): + The request object. The parameters to `DeleteBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def undelete_bucket(self, + request: logging_config.UndeleteBucketRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Args: + request (google.cloud.logging_v2.types.UndeleteBucketRequest): + The request object. The parameters to `UndeleteBucket`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UndeleteBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_views(self, + request: logging_config.ListViewsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListViewsPager: + r"""Lists views on a bucket. + + Args: + request (google.cloud.logging_v2.types.ListViewsRequest): + The request object. The parameters to `ListViews`. + parent (str): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: + The response from ListViews. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListViewsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_views] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListViewsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_view(self, + request: logging_config.GetViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Gets a view. + + Args: + request (google.cloud.logging_v2.types.GetViewRequest): + The request object. The parameters to `GetView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_view(self, + request: logging_config.CreateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Args: + request (google.cloud.logging_v2.types.CreateViewRequest): + The request object. The parameters to `CreateView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_view(self, + request: logging_config.UpdateViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogView: + r"""Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Args: + request (google.cloud.logging_v2.types.UpdateViewRequest): + The request object. The parameters to `UpdateView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over logs in a + bucket. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_view(self, + request: logging_config.DeleteViewRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a view from a bucket. + + Args: + request (google.cloud.logging_v2.types.DeleteViewRequest): + The request object. The parameters to `DeleteView`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteViewRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_sinks(self, + request: logging_config.ListSinksRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSinksPager: + r"""Lists sinks. + + Args: + request (google.cloud.logging_v2.types.ListSinksRequest): + The request object. The parameters to `ListSinks`. + parent (str): + Required. The parent resource whose sinks are to be + listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: + Result returned from ListSinks. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListSinksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_sinks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSinksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_sink(self, + request: logging_config.GetSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Gets a sink. + + Args: + request (google.cloud.logging_v2.types.GetSinkRequest): + The request object. The parameters to `GetSink`. + sink_name (str): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_sink(self, + request: logging_config.CreateSinkRequest = None, + *, + parent: str = None, + sink: logging_config.LogSink = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Args: + request (google.cloud.logging_v2.types.CreateSinkRequest): + The request object. The parameters to `CreateSink`. + parent (str): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (google.cloud.logging_v2.types.LogSink): + Required. The new sink, whose ``name`` parameter is a + sink identifier that is not already in use. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, sink]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sink is not None: + request.sink = sink + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_sink(self, + request: logging_config.UpdateSinkRequest = None, + *, + sink_name: str = None, + sink: logging_config.LogSink = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogSink: + r"""Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Args: + request (google.cloud.logging_v2.types.UpdateSinkRequest): + The request object. The parameters to `UpdateSink`. + sink_name (str): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (google.cloud.logging_v2.types.LogSink): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name, sink, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_sink(self, + request: logging_config.DeleteSinkRequest = None, + *, + sink_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Args: + request (google.cloud.logging_v2.types.DeleteSinkRequest): + The request object. The parameters to `DeleteSink`. + sink_name (str): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteSinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def list_exclusions(self, + request: logging_config.ListExclusionsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsPager: + r"""Lists all the exclusions in a parent resource. + + Args: + request (google.cloud.logging_v2.types.ListExclusionsRequest): + The request object. The parameters to `ListExclusions`. + parent (str): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListExclusionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_exclusions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExclusionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_exclusion(self, + request: logging_config.GetExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion. + + Args: + request (google.cloud.logging_v2.types.GetExclusionRequest): + The request object. The parameters to `GetExclusion`. + name (str): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_exclusion(self, + request: logging_config.CreateExclusionRequest = None, + *, + parent: str = None, + exclusion: logging_config.LogExclusion = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Args: + request (google.cloud.logging_v2.types.CreateExclusionRequest): + The request object. The parameters to `CreateExclusion`. + parent (str): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_exclusion(self, + request: logging_config.UpdateExclusionRequest = None, + *, + name: str = None, + exclusion: logging_config.LogExclusion = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing + exclusion. + + Args: + request (google.cloud.logging_v2.types.UpdateExclusionRequest): + The request object. The parameters to `UpdateExclusion`. + name (str): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_exclusion(self, + request: logging_config.DeleteExclusionRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion. + + Args: + request (google.cloud.logging_v2.types.DeleteExclusionRequest): + The request object. The parameters to `DeleteExclusion`. + name (str): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + Example: + ``"projects/my-project-id/exclusions/my-exclusion-id"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.DeleteExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_cmek_settings(self, + request: logging_config.GetCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (google.cloud.logging_v2.types.GetCmekSettingsRequest): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetCmekSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_cmek_settings(self, + request: logging_config.UpdateCmekSettingsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Args: + request (google.cloud.logging_v2.types.UpdateCmekSettingsRequest): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + See [Enabling CMEK for Logs + Router](https://cloud.google.com/logging/docs/routing/managed- + encryption) for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See [Enabling CMEK for Logs + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateCmekSettingsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "ConfigServiceV2Client", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py new file mode 100644 index 000000000000..11dce2ab7d58 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -0,0 +1,506 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.logging_v2.types import logging_config + + +class ListBucketsPager: + """A pager for iterating through ``list_buckets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``buckets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBuckets`` requests and continue to iterate + through the ``buckets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListBucketsResponse], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListBucketsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListBucketsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListBucketsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListBucketsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogBucket]: + for page in self.pages: + yield from page.buckets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBucketsAsyncPager: + """A pager for iterating through ``list_buckets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``buckets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBuckets`` requests and continue to iterate + through the ``buckets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListBucketsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListBucketsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListBucketsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListBucketsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogBucket]: + async def async_generator(): + async for page in self.pages: + for response in page.buckets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListViewsPager: + """A pager for iterating through ``list_views`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListViewsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListViewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogView]: + for page in self.pages: + yield from page.views + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListViewsAsyncPager: + """A pager for iterating through ``list_views`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListViewsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListViewsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogView]: + async def async_generator(): + async for page in self.pages: + for response in page.views: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSinksPager: + """A pager for iterating through ``list_sinks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sinks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSinks`` requests and continue to iterate + through the ``sinks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListSinksResponse], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListSinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListSinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListSinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListSinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogSink]: + for page in self.pages: + yield from page.sinks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSinksAsyncPager: + """A pager for iterating through ``list_sinks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sinks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSinks`` requests and continue to iterate + through the ``sinks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListSinksResponse]], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListSinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListSinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListSinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListSinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogSink]: + async def async_generator(): + async for page in self.pages: + for response in page.sinks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListExclusionsPager: + """A pager for iterating through ``list_exclusions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``exclusions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListExclusions`` requests and continue to iterate + through the ``exclusions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListExclusionsResponse], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListExclusionsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListExclusionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListExclusionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_config.ListExclusionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_config.LogExclusion]: + for page in self.pages: + yield from page.exclusions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListExclusionsAsyncPager: + """A pager for iterating through ``list_exclusions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``exclusions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListExclusions`` requests and continue to iterate + through the ``exclusions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListExclusionsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListExclusionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListExclusionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_config.ListExclusionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_config.LogExclusion]: + async def async_generator(): + async for page in self.pages: + for response in page.exclusions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py new file mode 100644 index 000000000000..6e18c331ff70 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConfigServiceV2Transport +from .grpc import ConfigServiceV2GrpcTransport +from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] +_transport_registry['grpc'] = ConfigServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'ConfigServiceV2Transport', + 'ConfigServiceV2GrpcTransport', + 'ConfigServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py new file mode 100644 index 000000000000..d5a0fa84e318 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -0,0 +1,528 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-logging', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class ConfigServiceV2Transport(abc.ABC): + """Abstract transport class for ConfigServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_buckets: gapic_v1.method.wrap_method( + self.list_buckets, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket: gapic_v1.method.wrap_method( + self.get_bucket, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket: gapic_v1.method.wrap_method( + self.create_bucket, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket: gapic_v1.method.wrap_method( + self.update_bucket, + default_timeout=None, + client_info=client_info, + ), + self.delete_bucket: gapic_v1.method.wrap_method( + self.delete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.undelete_bucket: gapic_v1.method.wrap_method( + self.undelete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.list_views: gapic_v1.method.wrap_method( + self.list_views, + default_timeout=None, + client_info=client_info, + ), + self.get_view: gapic_v1.method.wrap_method( + self.get_view, + default_timeout=None, + client_info=client_info, + ), + self.create_view: gapic_v1.method.wrap_method( + self.create_view, + default_timeout=None, + client_info=client_info, + ), + self.update_view: gapic_v1.method.wrap_method( + self.update_view, + default_timeout=None, + client_info=client_info, + ), + self.delete_view: gapic_v1.method.wrap_method( + self.delete_view, + default_timeout=None, + client_info=client_info, + ), + self.list_sinks: gapic_v1.method.wrap_method( + self.list_sinks, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sink: gapic_v1.method.wrap_method( + self.get_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_sink: gapic_v1.method.wrap_method( + self.create_sink, + default_timeout=120.0, + client_info=client_info, + ), + self.update_sink: gapic_v1.method.wrap_method( + self.update_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_sink: gapic_v1.method.wrap_method( + self.delete_sink, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_exclusions: gapic_v1.method.wrap_method( + self.list_exclusions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_exclusion: gapic_v1.method.wrap_method( + self.get_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_exclusion: gapic_v1.method.wrap_method( + self.create_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.update_exclusion: gapic_v1.method.wrap_method( + self.update_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.delete_exclusion: gapic_v1.method.wrap_method( + self.delete_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cmek_settings: gapic_v1.method.wrap_method( + self.get_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_cmek_settings: gapic_v1.method.wrap_method( + self.update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + } + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + Union[ + logging_config.ListBucketsResponse, + Awaitable[logging_config.ListBucketsResponse] + ]]: + raise NotImplementedError() + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + Union[ + logging_config.ListViewsResponse, + Awaitable[logging_config.ListViewsResponse] + ]]: + raise NotImplementedError() + + @property + def get_view(self) -> Callable[ + [logging_config.GetViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + Union[ + logging_config.ListSinksResponse, + Awaitable[logging_config.ListSinksResponse] + ]]: + raise NotImplementedError() + + @property + def get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + Union[ + logging_config.ListExclusionsResponse, + Awaitable[logging_config.ListExclusionsResponse] + ]]: + raise NotImplementedError() + + @property + def get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ]]: + raise NotImplementedError() + + @property + def update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'ConfigServiceV2Transport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py new file mode 100644 index 000000000000..94f628450fc2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -0,0 +1,874 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO + + +class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): + """gRPC backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + logging_config.ListBucketsResponse]: + r"""Return a callable for the list buckets method over gRPC. + + Lists buckets. + + Returns: + Callable[[~.ListBucketsRequest], + ~.ListBucketsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_buckets' not in self._stubs: + self._stubs['list_buckets'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListBuckets', + request_serializer=logging_config.ListBucketsRequest.serialize, + response_deserializer=logging_config.ListBucketsResponse.deserialize, + ) + return self._stubs['list_buckets'] + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the get bucket method over gRPC. + + Gets a bucket. + + Returns: + Callable[[~.GetBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_bucket' not in self._stubs: + self._stubs['get_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetBucket', + request_serializer=logging_config.GetBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['get_bucket'] + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket' not in self._stubs: + self._stubs['create_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucket', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['create_bucket'] + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the update bucket method over gRPC. + + Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket' not in self._stubs: + self._stubs['update_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucket', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['update_bucket'] + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_bucket' not in self._stubs: + self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteBucket', + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_bucket'] + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + empty_pb2.Empty]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Returns: + Callable[[~.UndeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'undelete_bucket' not in self._stubs: + self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['undelete_bucket'] + + @property + def list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + logging_config.ListViewsResponse]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a bucket. + + Returns: + Callable[[~.ListViewsRequest], + ~.ListViewsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_views' not in self._stubs: + self._stubs['list_views'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListViews', + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs['list_views'] + + @property + def get_view(self) -> Callable[ + [logging_config.GetViewRequest], + logging_config.LogView]: + r"""Return a callable for the get view method over gRPC. + + Gets a view. + + Returns: + Callable[[~.GetViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_view' not in self._stubs: + self._stubs['get_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetView', + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['get_view'] + + @property + def create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + logging_config.LogView]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Returns: + Callable[[~.CreateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_view' not in self._stubs: + self._stubs['create_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateView', + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['create_view'] + + @property + def update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + logging_config.LogView]: + r"""Return a callable for the update view method over gRPC. + + Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Returns: + Callable[[~.UpdateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_view' not in self._stubs: + self._stubs['update_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateView', + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['update_view'] + + @property + def delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view from a bucket. + + Returns: + Callable[[~.DeleteViewRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_view' not in self._stubs: + self._stubs['delete_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteView', + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_view'] + + @property + def list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + logging_config.ListSinksResponse]: + r"""Return a callable for the list sinks method over gRPC. + + Lists sinks. + + Returns: + Callable[[~.ListSinksRequest], + ~.ListSinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sinks' not in self._stubs: + self._stubs['list_sinks'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', + request_serializer=logging_config.ListSinksRequest.serialize, + response_deserializer=logging_config.ListSinksResponse.deserialize, + ) + return self._stubs['list_sinks'] + + @property + def get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the get sink method over gRPC. + + Gets a sink. + + Returns: + Callable[[~.GetSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_sink' not in self._stubs: + self._stubs['get_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', + request_serializer=logging_config.GetSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['get_sink'] + + @property + def create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the create sink method over gRPC. + + Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Returns: + Callable[[~.CreateSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_sink' not in self._stubs: + self._stubs['create_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', + request_serializer=logging_config.CreateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['create_sink'] + + @property + def update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the update sink method over gRPC. + + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Returns: + Callable[[~.UpdateSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_sink' not in self._stubs: + self._stubs['update_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', + request_serializer=logging_config.UpdateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['update_sink'] + + @property + def delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete sink method over gRPC. + + Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Returns: + Callable[[~.DeleteSinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_sink' not in self._stubs: + self._stubs['delete_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', + request_serializer=logging_config.DeleteSinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_sink'] + + @property + def list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + logging_config.ListExclusionsResponse]: + r"""Return a callable for the list exclusions method over gRPC. + + Lists all the exclusions in a parent resource. + + Returns: + Callable[[~.ListExclusionsRequest], + ~.ListExclusionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_exclusions' not in self._stubs: + self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', + request_serializer=logging_config.ListExclusionsRequest.serialize, + response_deserializer=logging_config.ListExclusionsResponse.deserialize, + ) + return self._stubs['list_exclusions'] + + @property + def get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the get exclusion method over gRPC. + + Gets the description of an exclusion. + + Returns: + Callable[[~.GetExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_exclusion' not in self._stubs: + self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', + request_serializer=logging_config.GetExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['get_exclusion'] + + @property + def create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the create exclusion method over gRPC. + + Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Returns: + Callable[[~.CreateExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_exclusion' not in self._stubs: + self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', + request_serializer=logging_config.CreateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['create_exclusion'] + + @property + def update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the update exclusion method over gRPC. + + Changes one or more properties of an existing + exclusion. + + Returns: + Callable[[~.UpdateExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_exclusion' not in self._stubs: + self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + request_serializer=logging_config.UpdateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['update_exclusion'] + + @property + def delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete exclusion method over gRPC. + + Deletes an exclusion. + + Returns: + Callable[[~.DeleteExclusionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_exclusion' not in self._stubs: + self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + request_serializer=logging_config.DeleteExclusionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_exclusion'] + + @property + def get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + logging_config.CmekSettings]: + r"""Return a callable for the get cmek settings method over gRPC. + + Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.GetCmekSettingsRequest], + ~.CmekSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_cmek_settings' not in self._stubs: + self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + request_serializer=logging_config.GetCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['get_cmek_settings'] + + @property + def update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + logging_config.CmekSettings]: + r"""Return a callable for the update cmek settings method over gRPC. + + Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.UpdateCmekSettingsRequest], + ~.CmekSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_cmek_settings' not in self._stubs: + self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['update_cmek_settings'] + + +__all__ = ( + 'ConfigServiceV2GrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..498d4c1dbbc4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,878 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import ConfigServiceV2GrpcTransport + + +class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): + """gRPC AsyncIO backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + Awaitable[logging_config.ListBucketsResponse]]: + r"""Return a callable for the list buckets method over gRPC. + + Lists buckets. + + Returns: + Callable[[~.ListBucketsRequest], + Awaitable[~.ListBucketsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_buckets' not in self._stubs: + self._stubs['list_buckets'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListBuckets', + request_serializer=logging_config.ListBucketsRequest.serialize, + response_deserializer=logging_config.ListBucketsResponse.deserialize, + ) + return self._stubs['list_buckets'] + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the get bucket method over gRPC. + + Gets a bucket. + + Returns: + Callable[[~.GetBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_bucket' not in self._stubs: + self._stubs['get_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetBucket', + request_serializer=logging_config.GetBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['get_bucket'] + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a bucket that can be used to store log + entries. Once a bucket has been created, the region + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket' not in self._stubs: + self._stubs['create_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucket', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['create_bucket'] + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the update bucket method over gRPC. + + Updates a bucket. This method replaces the following fields in + the existing bucket with values from the new bucket: + ``retention_period`` + + If the retention period is decreased and the bucket is locked, + FAILED_PRECONDITION will be returned. + + If the bucket has a LifecycleState of DELETE_REQUESTED, + FAILED_PRECONDITION will be returned. + + A buckets region may not be modified after it is created. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket' not in self._stubs: + self._stubs['update_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucket', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['update_bucket'] + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a bucket. Moves the bucket to the DELETE_REQUESTED + state. After 7 days, the bucket will be purged and all logs in + the bucket will be permanently deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_bucket' not in self._stubs: + self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteBucket', + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_bucket'] + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a bucket. A bucket that has been deleted + may be undeleted within the grace period of 7 days. + + Returns: + Callable[[~.UndeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'undelete_bucket' not in self._stubs: + self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['undelete_bucket'] + + @property + def list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + Awaitable[logging_config.ListViewsResponse]]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a bucket. + + Returns: + Callable[[~.ListViewsRequest], + Awaitable[~.ListViewsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_views' not in self._stubs: + self._stubs['list_views'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListViews', + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs['list_views'] + + @property + def get_view(self) -> Callable[ + [logging_config.GetViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the get view method over gRPC. + + Gets a view. + + Returns: + Callable[[~.GetViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_view' not in self._stubs: + self._stubs['get_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetView', + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['get_view'] + + @property + def create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over logs in a bucket. A bucket may + contain a maximum of 50 views. + + Returns: + Callable[[~.CreateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_view' not in self._stubs: + self._stubs['create_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateView', + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['create_view'] + + @property + def update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the update view method over gRPC. + + Updates a view. This method replaces the following fields in the + existing view with values from the new view: ``filter``. + + Returns: + Callable[[~.UpdateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_view' not in self._stubs: + self._stubs['update_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateView', + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['update_view'] + + @property + def delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view from a bucket. + + Returns: + Callable[[~.DeleteViewRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_view' not in self._stubs: + self._stubs['delete_view'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteView', + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_view'] + + @property + def list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + Awaitable[logging_config.ListSinksResponse]]: + r"""Return a callable for the list sinks method over gRPC. + + Lists sinks. + + Returns: + Callable[[~.ListSinksRequest], + Awaitable[~.ListSinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_sinks' not in self._stubs: + self._stubs['list_sinks'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', + request_serializer=logging_config.ListSinksRequest.serialize, + response_deserializer=logging_config.ListSinksResponse.deserialize, + ) + return self._stubs['list_sinks'] + + @property + def get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the get sink method over gRPC. + + Gets a sink. + + Returns: + Callable[[~.GetSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_sink' not in self._stubs: + self._stubs['get_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', + request_serializer=logging_config.GetSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['get_sink'] + + @property + def create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the create sink method over gRPC. + + Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Returns: + Callable[[~.CreateSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_sink' not in self._stubs: + self._stubs['create_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', + request_serializer=logging_config.CreateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['create_sink'] + + @property + def update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the update sink method over gRPC. + + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Returns: + Callable[[~.UpdateSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_sink' not in self._stubs: + self._stubs['update_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', + request_serializer=logging_config.UpdateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['update_sink'] + + @property + def delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete sink method over gRPC. + + Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Returns: + Callable[[~.DeleteSinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_sink' not in self._stubs: + self._stubs['delete_sink'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', + request_serializer=logging_config.DeleteSinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_sink'] + + @property + def list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + Awaitable[logging_config.ListExclusionsResponse]]: + r"""Return a callable for the list exclusions method over gRPC. + + Lists all the exclusions in a parent resource. + + Returns: + Callable[[~.ListExclusionsRequest], + Awaitable[~.ListExclusionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_exclusions' not in self._stubs: + self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', + request_serializer=logging_config.ListExclusionsRequest.serialize, + response_deserializer=logging_config.ListExclusionsResponse.deserialize, + ) + return self._stubs['list_exclusions'] + + @property + def get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the get exclusion method over gRPC. + + Gets the description of an exclusion. + + Returns: + Callable[[~.GetExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_exclusion' not in self._stubs: + self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', + request_serializer=logging_config.GetExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['get_exclusion'] + + @property + def create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the create exclusion method over gRPC. + + Creates a new exclusion in a specified parent + resource. Only log entries belonging to that resource + can be excluded. You can have up to 10 exclusions in a + resource. + + Returns: + Callable[[~.CreateExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_exclusion' not in self._stubs: + self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', + request_serializer=logging_config.CreateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['create_exclusion'] + + @property + def update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the update exclusion method over gRPC. + + Changes one or more properties of an existing + exclusion. + + Returns: + Callable[[~.UpdateExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_exclusion' not in self._stubs: + self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', + request_serializer=logging_config.UpdateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['update_exclusion'] + + @property + def delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete exclusion method over gRPC. + + Deletes an exclusion. + + Returns: + Callable[[~.DeleteExclusionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_exclusion' not in self._stubs: + self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', + request_serializer=logging_config.DeleteExclusionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_exclusion'] + + @property + def get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Awaitable[logging_config.CmekSettings]]: + r"""Return a callable for the get cmek settings method over gRPC. + + Gets the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.GetCmekSettingsRequest], + Awaitable[~.CmekSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_cmek_settings' not in self._stubs: + self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetCmekSettings', + request_serializer=logging_config.GetCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['get_cmek_settings'] + + @property + def update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Awaitable[logging_config.CmekSettings]]: + r"""Return a callable for the update cmek settings method over gRPC. + + Updates the Logs Router CMEK settings for the given resource. + + Note: CMEK for the Logs Router can currently only be configured + for GCP organizations. Once configured, it applies to all + projects and folders in the GCP organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Returns: + Callable[[~.UpdateCmekSettingsRequest], + Awaitable[~.CmekSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_cmek_settings' not in self._stubs: + self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', + request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['update_cmek_settings'] + + +__all__ = ( + 'ConfigServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py new file mode 100644 index 000000000000..ed08d1888503 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LoggingServiceV2Client +from .async_client import LoggingServiceV2AsyncClient + +__all__ = ( + 'LoggingServiceV2Client', + 'LoggingServiceV2AsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py new file mode 100644 index 000000000000..dd9cbb78dd9a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -0,0 +1,781 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport +from .client import LoggingServiceV2Client + + +class LoggingServiceV2AsyncClient: + """Service for ingesting and querying logs.""" + + _client: LoggingServiceV2Client + + DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + + log_path = staticmethod(LoggingServiceV2Client.log_path) + parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) + common_billing_account_path = staticmethod(LoggingServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(LoggingServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(LoggingServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(LoggingServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(LoggingServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(LoggingServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(LoggingServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2AsyncClient: The constructed client. + """ + return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2AsyncClient: The constructed client. + """ + return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LoggingServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + LoggingServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the logging service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.LoggingServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LoggingServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def delete_log(self, + request: logging.DeleteLogRequest = None, + *, + log_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteLogRequest`): + The request object. The parameters to DeleteLog. + log_name (:class:`str`): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging.DeleteLogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_log, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("log_name", request.log_name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def write_log_entries(self, + request: logging.WriteLogEntriesRequest = None, + *, + log_name: str = None, + resource: monitored_resource_pb2.MonitoredResource = None, + labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + entries: Sequence[log_entry.LogEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging.WriteLogEntriesResponse: + r"""Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Args: + request (:class:`google.cloud.logging_v2.types.WriteLogEntriesRequest`): + The request object. The parameters to WriteLogEntries. + log_name (:class:`str`): + Optional. A default log resource name that is assigned + to all log entries in ``entries`` that do not specify a + value for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed + on each project, organization, billing account, or + folder that is receiving new log entries, whether the + resource is specified in ``logName`` or in an individual + log entry. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (:class:`google.api.monitored_resource_pb2.MonitoredResource`): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]`): + Optional. Default labels that are added to the + ``labels`` field of all log entries in ``entries``. If a + log entry already has a label with the same key as a + label in this parameter, then the log entry's label is + not changed. See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entries (:class:`Sequence[google.cloud.logging_v2.types.LogEntry]`): + Required. The log entries to send to Logging. The order + of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, + and ``labels`` fields are copied into those log entries + in this list that do not include values for their + corresponding fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing + in log entries, then this method supplies the current + time or a unique identifier, respectively. The supplied + values are chosen so that, among the log entries that + did not supply their own values, the entries earlier in + the list will sort before the entries later in the list. + See the ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ + in the past or more than 24 hours in the future will not + be available when calling ``entries.list``. However, + those log entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ + for calls to ``entries.write``, you should try to + include several log entries in this list, rather than + calling this method for each individual log entry. + + This corresponds to the ``entries`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name, resource, labels, entries]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging.WriteLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + if resource is not None: + request.resource = resource + + if labels: + request.labels.update(labels) + if entries: + request.entries.extend(entries) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.write_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_log_entries(self, + request: logging.ListLogEntriesRequest = None, + *, + resource_names: Sequence[str] = None, + filter: str = None, + order_by: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogEntriesAsyncPager: + r"""Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Args: + request (:class:`google.cloud.logging_v2.types.ListLogEntriesRequest`): + The request object. The parameters to `ListLogEntries`. + resource_names (:class:`Sequence[str]`): + Required. Names of one or more parent resources from + which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + Projects listed in the ``project_ids`` field are added + to this list. + + This corresponds to the ``resource_names`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. A filter that chooses which log entries to + return. See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources + listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will + cause the filter to return no results. The maximum + length of the filter is 20000 characters. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_by (:class:`str`): + Optional. How the results should be sorted. Presently, + the only permitted values are ``"timestamp asc"`` + (default) and ``"timestamp desc"``. The first option + returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second + option returns entries in order of decreasing timestamps + (newest first). Entries with equal timestamps are + returned in order of their ``insert_id`` values. + + This corresponds to the ``order_by`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: + Result returned from ListLogEntries. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource_names, filter, order_by]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging.ListLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if filter is not None: + request.filter = filter + if order_by is not None: + request.order_by = order_by + if resource_names: + request.resource_names.extend(resource_names) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogEntriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_monitored_resource_descriptors(self, + request: logging.ListMonitoredResourceDescriptorsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + r"""Lists the descriptors for monitored resource types + used by Logging. + + Args: + request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): + The request object. The parameters to + ListMonitoredResourceDescriptors + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: + Result returned from + ListMonitoredResourceDescriptors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = logging.ListMonitoredResourceDescriptorsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_monitored_resource_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_logs(self, + request: logging.ListLogsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogsAsyncPager: + r"""Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Args: + request (:class:`google.cloud.logging_v2.types.ListLogsRequest`): + The request object. The parameters to ListLogs. + parent (:class:`str`): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: + Result returned from ListLogs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging.ListLogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_logs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def tail_log_entries(self, + requests: AsyncIterator[logging.TailLogEntriesRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Args: + requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): + The request object AsyncIterator. The parameters to `TailLogEntries`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: + Result returned from TailLogEntries. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.tail_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "LoggingServiceV2AsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py new file mode 100644 index 000000000000..8c16313ba974 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -0,0 +1,916 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import LoggingServiceV2GrpcTransport +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport + + +class LoggingServiceV2ClientMeta(type): + """Metaclass for the LoggingServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] + _transport_registry["grpc"] = LoggingServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[LoggingServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LoggingServiceV2Client(metaclass=LoggingServiceV2ClientMeta): + """Service for ingesting and querying logs.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LoggingServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + LoggingServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def log_path(project: str,log: str,) -> str: + """Returns a fully-qualified log string.""" + return "projects/{project}/logs/{log}".format(project=project, log=log, ) + + @staticmethod + def parse_log_path(path: str) -> Dict[str,str]: + """Parses a log path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, LoggingServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the logging service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, LoggingServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, LoggingServiceV2Transport): + # transport is a LoggingServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def delete_log(self, + request: logging.DeleteLogRequest = None, + *, + log_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Args: + request (google.cloud.logging_v2.types.DeleteLogRequest): + The request object. The parameters to DeleteLog. + log_name (str): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging.DeleteLogRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_log] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("log_name", request.log_name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def write_log_entries(self, + request: logging.WriteLogEntriesRequest = None, + *, + log_name: str = None, + resource: monitored_resource_pb2.MonitoredResource = None, + labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + entries: Sequence[log_entry.LogEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging.WriteLogEntriesResponse: + r"""Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Args: + request (google.cloud.logging_v2.types.WriteLogEntriesRequest): + The request object. The parameters to WriteLogEntries. + log_name (str): + Optional. A default log resource name that is assigned + to all log entries in ``entries`` that do not specify a + value for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed + on each project, organization, billing account, or + folder that is receiving new log entries, whether the + resource is specified in ``logName`` or in an individual + log entry. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + Optional. Default labels that are added to the + ``labels`` field of all log entries in ``entries``. If a + log entry already has a label with the same key as a + label in this parameter, then the log entry's label is + not changed. See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + Required. The log entries to send to Logging. The order + of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, + and ``labels`` fields are copied into those log entries + in this list that do not include values for their + corresponding fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing + in log entries, then this method supplies the current + time or a unique identifier, respectively. The supplied + values are chosen so that, among the log entries that + did not supply their own values, the entries earlier in + the list will sort before the entries later in the list. + See the ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ + in the past or more than 24 hours in the future will not + be available when calling ``entries.list``. However, + those log entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ + for calls to ``entries.write``, you should try to + include several log entries in this list, rather than + calling this method for each individual log entry. + + This corresponds to the ``entries`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name, resource, labels, entries]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging.WriteLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + if resource is not None: + request.resource = resource + if labels is not None: + request.labels = labels + if entries is not None: + request.entries = entries + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.write_log_entries] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_log_entries(self, + request: logging.ListLogEntriesRequest = None, + *, + resource_names: Sequence[str] = None, + filter: str = None, + order_by: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogEntriesPager: + r"""Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Args: + request (google.cloud.logging_v2.types.ListLogEntriesRequest): + The request object. The parameters to `ListLogEntries`. + resource_names (Sequence[str]): + Required. Names of one or more parent resources from + which to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + Projects listed in the ``project_ids`` field are added + to this list. + + This corresponds to the ``resource_names`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + Optional. A filter that chooses which log entries to + return. See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources + listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will + cause the filter to return no results. The maximum + length of the filter is 20000 characters. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_by (str): + Optional. How the results should be sorted. Presently, + the only permitted values are ``"timestamp asc"`` + (default) and ``"timestamp desc"``. The first option + returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second + option returns entries in order of decreasing timestamps + (newest first). Entries with equal timestamps are + returned in order of their ``insert_id`` values. + + This corresponds to the ``order_by`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: + Result returned from ListLogEntries. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource_names, filter, order_by]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListLogEntriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if resource_names is not None: + request.resource_names = resource_names + if filter is not None: + request.filter = filter + if order_by is not None: + request.order_by = order_by + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_log_entries] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogEntriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_monitored_resource_descriptors(self, + request: logging.ListMonitoredResourceDescriptorsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: + r"""Lists the descriptors for monitored resource types + used by Logging. + + Args: + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + The request object. The parameters to + ListMonitoredResourceDescriptors + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: + Result returned from + ListMonitoredResourceDescriptors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListMonitoredResourceDescriptorsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_logs(self, + request: logging.ListLogsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogsPager: + r"""Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Args: + request (google.cloud.logging_v2.types.ListLogsRequest): + The request object. The parameters to ListLogs. + parent (str): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: + Result returned from ListLogs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging.ListLogsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_logs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def tail_log_entries(self, + requests: Iterator[logging.TailLogEntriesRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Args: + requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): + The request object iterator. The parameters to `TailLogEntries`. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: + Result returned from TailLogEntries. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "LoggingServiceV2Client", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py new file mode 100644 index 000000000000..9b94311d2e33 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -0,0 +1,386 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging + + +class ListLogEntriesPager: + """A pager for iterating through ``list_log_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListLogEntriesResponse], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogEntriesRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[log_entry.LogEntry]: + for page in self.pages: + yield from page.entries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogEntriesAsyncPager: + """A pager for iterating through ``list_log_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogEntriesRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogEntriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogEntriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging.ListLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[log_entry.LogEntry]: + async def async_generator(): + async for page in self.pages: + for response in page.entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + for page in self.pages: + yield from page.resource_descriptors + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsAsyncPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + async def async_generator(): + async for page in self.pages: + for response in page.resource_descriptors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogsPager: + """A pager for iterating through ``list_logs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``log_names`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogs`` requests and continue to iterate + through the ``log_names`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListLogsResponse], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging.ListLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[str]: + for page in self.pages: + yield from page.log_names + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogsAsyncPager: + """A pager for iterating through ``list_logs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``log_names`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogs`` requests and continue to iterate + through the ``log_names`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListLogsResponse]], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging.ListLogsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging.ListLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[str]: + async def async_generator(): + async for page in self.pages: + for response in page.log_names: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py new file mode 100644 index 000000000000..46e9a1fcbf4c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LoggingServiceV2Transport +from .grpc import LoggingServiceV2GrpcTransport +from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] +_transport_registry['grpc'] = LoggingServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'LoggingServiceV2Transport', + 'LoggingServiceV2GrpcTransport', + 'LoggingServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py new file mode 100644 index 000000000000..419242eb550a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -0,0 +1,283 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-logging', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class LoggingServiceV2Transport(abc.ABC): + """Abstract transport class for LoggingServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete_log: gapic_v1.method.wrap_method( + self.delete_log, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: gapic_v1.method.wrap_method( + self.write_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: gapic_v1.method.wrap_method( + self.list_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: gapic_v1.method.wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: gapic_v1.method.wrap_method( + self.list_logs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.tail_log_entries: gapic_v1.method.wrap_method( + self.tail_log_entries, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + } + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + Union[ + logging.WriteLogEntriesResponse, + Awaitable[logging.WriteLogEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + Union[ + logging.ListLogEntriesResponse, + Awaitable[logging.ListLogEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Union[ + logging.ListMonitoredResourceDescriptorsResponse, + Awaitable[logging.ListMonitoredResourceDescriptorsResponse] + ]]: + raise NotImplementedError() + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + Union[ + logging.ListLogsResponse, + Awaitable[logging.ListLogsResponse] + ]]: + raise NotImplementedError() + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + Union[ + logging.TailLogEntriesResponse, + Awaitable[logging.TailLogEntriesResponse] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'LoggingServiceV2Transport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py new file mode 100644 index 000000000000..a8011ec63491 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -0,0 +1,398 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 # type: ignore +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO + + +class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): + """gRPC backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete log method over gRPC. + + Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Returns: + Callable[[~.DeleteLogRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log' not in self._stubs: + self._stubs['delete_log'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=logging.DeleteLogRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log'] + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + logging.WriteLogEntriesResponse]: + r"""Return a callable for the write log entries method over gRPC. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Returns: + Callable[[~.WriteLogEntriesRequest], + ~.WriteLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'write_log_entries' not in self._stubs: + self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=logging.WriteLogEntriesRequest.serialize, + response_deserializer=logging.WriteLogEntriesResponse.deserialize, + ) + return self._stubs['write_log_entries'] + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + logging.ListLogEntriesResponse]: + r"""Return a callable for the list log entries method over gRPC. + + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Returns: + Callable[[~.ListLogEntriesRequest], + ~.ListLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_entries' not in self._stubs: + self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=logging.ListLogEntriesRequest.serialize, + response_deserializer=logging.ListLogEntriesResponse.deserialize, + ) + return self._stubs['list_log_entries'] + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + logging.ListMonitoredResourceDescriptorsResponse]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists the descriptors for monitored resource types + used by Logging. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + ~.ListMonitoredResourceDescriptorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_monitored_resource_descriptors' not in self._stubs: + self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs['list_monitored_resource_descriptors'] + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + logging.ListLogsResponse]: + r"""Return a callable for the list logs method over gRPC. + + Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Returns: + Callable[[~.ListLogsRequest], + ~.ListLogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_logs' not in self._stubs: + self._stubs['list_logs'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=logging.ListLogsRequest.serialize, + response_deserializer=logging.ListLogsResponse.deserialize, + ) + return self._stubs['list_logs'] + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + logging.TailLogEntriesResponse]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + ~.TailLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'tail_log_entries' not in self._stubs: + self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( + '/google.logging.v2.LoggingServiceV2/TailLogEntries', + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs['tail_log_entries'] + + +__all__ = ( + 'LoggingServiceV2GrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..72b4fbf64e70 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,402 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 # type: ignore +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import LoggingServiceV2GrpcTransport + + +class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): + """gRPC AsyncIO backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete log method over gRPC. + + Deletes all the log entries in a log. The log + reappears if it receives new entries. Log entries + written shortly before the delete operation might not be + deleted. Entries received after the delete operation + with a timestamp before the operation will be deleted. + + Returns: + Callable[[~.DeleteLogRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log' not in self._stubs: + self._stubs['delete_log'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=logging.DeleteLogRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log'] + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + Awaitable[logging.WriteLogEntriesResponse]]: + r"""Return a callable for the write log entries method over gRPC. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Returns: + Callable[[~.WriteLogEntriesRequest], + Awaitable[~.WriteLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'write_log_entries' not in self._stubs: + self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=logging.WriteLogEntriesRequest.serialize, + response_deserializer=logging.WriteLogEntriesResponse.deserialize, + ) + return self._stubs['write_log_entries'] + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + Awaitable[logging.ListLogEntriesResponse]]: + r"""Return a callable for the list log entries method over gRPC. + + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Returns: + Callable[[~.ListLogEntriesRequest], + Awaitable[~.ListLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_entries' not in self._stubs: + self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=logging.ListLogEntriesRequest.serialize, + response_deserializer=logging.ListLogEntriesResponse.deserialize, + ) + return self._stubs['list_log_entries'] + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists the descriptors for monitored resource types + used by Logging. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + Awaitable[~.ListMonitoredResourceDescriptorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_monitored_resource_descriptors' not in self._stubs: + self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs['list_monitored_resource_descriptors'] + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + Awaitable[logging.ListLogsResponse]]: + r"""Return a callable for the list logs method over gRPC. + + Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Returns: + Callable[[~.ListLogsRequest], + Awaitable[~.ListLogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_logs' not in self._stubs: + self._stubs['list_logs'] = self.grpc_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=logging.ListLogsRequest.serialize, + response_deserializer=logging.ListLogsResponse.deserialize, + ) + return self._stubs['list_logs'] + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + Awaitable[logging.TailLogEntriesResponse]]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + Awaitable[~.TailLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'tail_log_entries' not in self._stubs: + self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( + '/google.logging.v2.LoggingServiceV2/TailLogEntries', + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs['tail_log_entries'] + + +__all__ = ( + 'LoggingServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py new file mode 100644 index 000000000000..1b5d1805cdcd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import MetricsServiceV2Client +from .async_client import MetricsServiceV2AsyncClient + +__all__ = ( + 'MetricsServiceV2Client', + 'MetricsServiceV2AsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py new file mode 100644 index 000000000000..764f44f66698 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -0,0 +1,640 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport +from .client import MetricsServiceV2Client + + +class MetricsServiceV2AsyncClient: + """Service for configuring logs-based metrics.""" + + _client: MetricsServiceV2Client + + DEFAULT_ENDPOINT = MetricsServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + + log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) + parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) + common_billing_account_path = staticmethod(MetricsServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(MetricsServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(MetricsServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(MetricsServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(MetricsServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(MetricsServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(MetricsServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(MetricsServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(MetricsServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(MetricsServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2AsyncClient: The constructed client. + """ + return MetricsServiceV2Client.from_service_account_info.__func__(MetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2AsyncClient: The constructed client. + """ + return MetricsServiceV2Client.from_service_account_file.__func__(MetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metrics service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.MetricsServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = MetricsServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_log_metrics(self, + request: logging_metrics.ListLogMetricsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogMetricsAsyncPager: + r"""Lists logs-based metrics. + + Args: + request (:class:`google.cloud.logging_v2.types.ListLogMetricsRequest`): + The request object. The parameters to ListLogMetrics. + parent (:class:`str`): + Required. The name of the project containing the + metrics: + + :: + + "projects/[PROJECT_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: + Result returned from ListLogMetrics. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.ListLogMetricsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_log_metrics, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogMetricsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_log_metric(self, + request: logging_metrics.GetLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Gets a logs-based metric. + + Args: + request (:class:`google.cloud.logging_v2.types.GetLogMetricRequest`): + The request object. The parameters to GetLogMetric. + metric_name (:class:`str`): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.GetLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_log_metric(self, + request: logging_metrics.CreateLogMetricRequest = None, + *, + parent: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates a logs-based metric. + + Args: + request (:class:`google.cloud.logging_v2.types.CreateLogMetricRequest`): + The request object. The parameters to CreateLogMetric. + parent (:class:`str`): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`google.cloud.logging_v2.types.LogMetric`): + Required. The new logs-based metric, + which must not have an identifier that + already exists. + + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metric]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.CreateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_log_metric, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_log_metric(self, + request: logging_metrics.UpdateLogMetricRequest = None, + *, + metric_name: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates or updates a logs-based metric. + + Args: + request (:class:`google.cloud.logging_v2.types.UpdateLogMetricRequest`): + The request object. The parameters to UpdateLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and + it's ``name`` field must be the same as ``[METRIC_ID]`` + If the metric does not exist in ``[PROJECT_ID]``, then a + new metric is created. + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`google.cloud.logging_v2.types.LogMetric`): + Required. The updated metric. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name, metric]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.UpdateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_log_metric(self, + request: logging_metrics.DeleteLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a logs-based metric. + + Args: + request (:class:`google.cloud.logging_v2.types.DeleteLogMetricRequest`): + The request object. The parameters to DeleteLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_metrics.DeleteLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "MetricsServiceV2AsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py new file mode 100644 index 000000000000..d733b400b3c9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -0,0 +1,795 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetricsServiceV2GrpcTransport +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport + + +class MetricsServiceV2ClientMeta(type): + """Metaclass for the MetricsServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] + _transport_registry["grpc"] = MetricsServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[MetricsServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class MetricsServiceV2Client(metaclass=MetricsServiceV2ClientMeta): + """Service for configuring logs-based metrics.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + MetricsServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def log_metric_path(project: str,metric: str,) -> str: + """Returns a fully-qualified log_metric string.""" + return "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + + @staticmethod + def parse_log_metric_path(path: str) -> Dict[str,str]: + """Parses a log_metric path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, MetricsServiceV2Transport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the metrics service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, MetricsServiceV2Transport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, MetricsServiceV2Transport): + # transport is a MetricsServiceV2Transport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_log_metrics(self, + request: logging_metrics.ListLogMetricsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLogMetricsPager: + r"""Lists logs-based metrics. + + Args: + request (google.cloud.logging_v2.types.ListLogMetricsRequest): + The request object. The parameters to ListLogMetrics. + parent (str): + Required. The name of the project containing the + metrics: + + :: + + "projects/[PROJECT_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: + Result returned from ListLogMetrics. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.ListLogMetricsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_log_metrics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogMetricsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_log_metric(self, + request: logging_metrics.GetLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Gets a logs-based metric. + + Args: + request (google.cloud.logging_v2.types.GetLogMetricRequest): + The request object. The parameters to GetLogMetric. + metric_name (str): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.GetLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_log_metric(self, + request: logging_metrics.CreateLogMetricRequest = None, + *, + parent: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates a logs-based metric. + + Args: + request (google.cloud.logging_v2.types.CreateLogMetricRequest): + The request object. The parameters to CreateLogMetric. + parent (str): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The new logs-based metric, + which must not have an identifier that + already exists. + + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metric]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.CreateLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_log_metric(self, + request: logging_metrics.UpdateLogMetricRequest = None, + *, + metric_name: str = None, + metric: logging_metrics.LogMetric = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates or updates a logs-based metric. + + Args: + request (google.cloud.logging_v2.types.UpdateLogMetricRequest): + The request object. The parameters to UpdateLogMetric. + metric_name (str): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and + it's ``name`` field must be the same as ``[METRIC_ID]`` + If the metric does not exist in ``[PROJECT_ID]``, then a + new metric is created. + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The updated metric. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name, metric]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.UpdateLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_log_metric(self, + request: logging_metrics.DeleteLogMetricRequest = None, + *, + metric_name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a logs-based metric. + + Args: + request (google.cloud.logging_v2.types.DeleteLogMetricRequest): + The request object. The parameters to DeleteLogMetric. + metric_name (str): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_metrics.DeleteLogMetricRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-logging", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "MetricsServiceV2Client", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py new file mode 100644 index 000000000000..f6bf04e4f968 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.logging_v2.types import logging_metrics + + +class ListLogMetricsPager: + """A pager for iterating through ``list_log_metrics`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metrics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogMetrics`` requests and continue to iterate + through the ``metrics`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_metrics.ListLogMetricsResponse], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogMetricsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogMetricsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_metrics.ListLogMetricsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[logging_metrics.ListLogMetricsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[logging_metrics.LogMetric]: + for page in self.pages: + yield from page.metrics + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogMetricsAsyncPager: + """A pager for iterating through ``list_log_metrics`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metrics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogMetrics`` requests and continue to iterate + through the ``metrics`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogMetricsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogMetricsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_metrics.ListLogMetricsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[logging_metrics.ListLogMetricsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[logging_metrics.LogMetric]: + async def async_generator(): + async for page in self.pages: + for response in page.metrics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py new file mode 100644 index 000000000000..28e9b710ec84 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetricsServiceV2Transport +from .grpc import MetricsServiceV2GrpcTransport +from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] +_transport_registry['grpc'] = MetricsServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'MetricsServiceV2Transport', + 'MetricsServiceV2GrpcTransport', + 'MetricsServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py new file mode 100644 index 000000000000..73f542ac3b65 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -0,0 +1,253 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-logging', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class MetricsServiceV2Transport(abc.ABC): + """Abstract transport class for MetricsServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_log_metrics: gapic_v1.method.wrap_method( + self.list_log_metrics, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_log_metric: gapic_v1.method.wrap_method( + self.get_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_log_metric: gapic_v1.method.wrap_method( + self.create_log_metric, + default_timeout=60.0, + client_info=client_info, + ), + self.update_log_metric: gapic_v1.method.wrap_method( + self.update_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_log_metric: gapic_v1.method.wrap_method( + self.delete_log_metric, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Union[ + logging_metrics.ListLogMetricsResponse, + Awaitable[logging_metrics.ListLogMetricsResponse] + ]]: + raise NotImplementedError() + + @property + def get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'MetricsServiceV2Transport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py new file mode 100644 index 000000000000..9eb3fc2c9cff --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 # type: ignore +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO + + +class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): + """gRPC backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + logging_metrics.ListLogMetricsResponse]: + r"""Return a callable for the list log metrics method over gRPC. + + Lists logs-based metrics. + + Returns: + Callable[[~.ListLogMetricsRequest], + ~.ListLogMetricsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_metrics' not in self._stubs: + self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + request_serializer=logging_metrics.ListLogMetricsRequest.serialize, + response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, + ) + return self._stubs['list_log_metrics'] + + @property + def get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the get log metric method over gRPC. + + Gets a logs-based metric. + + Returns: + Callable[[~.GetLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_log_metric' not in self._stubs: + self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', + request_serializer=logging_metrics.GetLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['get_log_metric'] + + @property + def create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the create log metric method over gRPC. + + Creates a logs-based metric. + + Returns: + Callable[[~.CreateLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_log_metric' not in self._stubs: + self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + request_serializer=logging_metrics.CreateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['create_log_metric'] + + @property + def update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the update log metric method over gRPC. + + Creates or updates a logs-based metric. + + Returns: + Callable[[~.UpdateLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_log_metric' not in self._stubs: + self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['update_log_metric'] + + @property + def delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete log metric method over gRPC. + + Deletes a logs-based metric. + + Returns: + Callable[[~.DeleteLogMetricRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log_metric' not in self._stubs: + self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log_metric'] + + +__all__ = ( + 'MetricsServiceV2GrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py new file mode 100644 index 000000000000..a937e3b793fe --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,357 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 # type: ignore +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import MetricsServiceV2GrpcTransport + + +class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): + """gRPC AsyncIO backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Awaitable[logging_metrics.ListLogMetricsResponse]]: + r"""Return a callable for the list log metrics method over gRPC. + + Lists logs-based metrics. + + Returns: + Callable[[~.ListLogMetricsRequest], + Awaitable[~.ListLogMetricsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_metrics' not in self._stubs: + self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', + request_serializer=logging_metrics.ListLogMetricsRequest.serialize, + response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, + ) + return self._stubs['list_log_metrics'] + + @property + def get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the get log metric method over gRPC. + + Gets a logs-based metric. + + Returns: + Callable[[~.GetLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_log_metric' not in self._stubs: + self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', + request_serializer=logging_metrics.GetLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['get_log_metric'] + + @property + def create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the create log metric method over gRPC. + + Creates a logs-based metric. + + Returns: + Callable[[~.CreateLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_log_metric' not in self._stubs: + self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', + request_serializer=logging_metrics.CreateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['create_log_metric'] + + @property + def update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the update log metric method over gRPC. + + Creates or updates a logs-based metric. + + Returns: + Callable[[~.UpdateLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_log_metric' not in self._stubs: + self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', + request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['update_log_metric'] + + @property + def delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete log metric method over gRPC. + + Deletes a logs-based metric. + + Returns: + Callable[[~.DeleteLogMetricRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log_metric' not in self._stubs: + self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', + request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log_metric'] + + +__all__ = ( + 'MetricsServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py new file mode 100644 index 000000000000..38c93c541801 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, +) +from .logging import ( + DeleteLogRequest, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListLogsRequest, + ListLogsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, + WriteLogEntriesPartialErrors, + WriteLogEntriesRequest, + WriteLogEntriesResponse, +) +from .logging_config import ( + BigQueryOptions, + CmekSettings, + CreateBucketRequest, + CreateExclusionRequest, + CreateSinkRequest, + CreateViewRequest, + DeleteBucketRequest, + DeleteExclusionRequest, + DeleteSinkRequest, + DeleteViewRequest, + GetBucketRequest, + GetCmekSettingsRequest, + GetExclusionRequest, + GetSinkRequest, + GetViewRequest, + ListBucketsRequest, + ListBucketsResponse, + ListExclusionsRequest, + ListExclusionsResponse, + ListSinksRequest, + ListSinksResponse, + ListViewsRequest, + ListViewsResponse, + LogBucket, + LogExclusion, + LogSink, + LogView, + UndeleteBucketRequest, + UpdateBucketRequest, + UpdateCmekSettingsRequest, + UpdateExclusionRequest, + UpdateSinkRequest, + UpdateViewRequest, + LifecycleState, +) +from .logging_metrics import ( + CreateLogMetricRequest, + DeleteLogMetricRequest, + GetLogMetricRequest, + ListLogMetricsRequest, + ListLogMetricsResponse, + LogMetric, + UpdateLogMetricRequest, +) + +__all__ = ( + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'DeleteLogRequest', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'BigQueryOptions', + 'CmekSettings', + 'CreateBucketRequest', + 'CreateExclusionRequest', + 'CreateSinkRequest', + 'CreateViewRequest', + 'DeleteBucketRequest', + 'DeleteExclusionRequest', + 'DeleteSinkRequest', + 'DeleteViewRequest', + 'GetBucketRequest', + 'GetCmekSettingsRequest', + 'GetExclusionRequest', + 'GetSinkRequest', + 'GetViewRequest', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'ListSinksRequest', + 'ListSinksResponse', + 'ListViewsRequest', + 'ListViewsResponse', + 'LogBucket', + 'LogExclusion', + 'LogSink', + 'LogView', + 'UndeleteBucketRequest', + 'UpdateBucketRequest', + 'UpdateCmekSettingsRequest', + 'UpdateExclusionRequest', + 'UpdateSinkRequest', + 'UpdateViewRequest', + 'LifecycleState', + 'CreateLogMetricRequest', + 'DeleteLogMetricRequest', + 'GetLogMetricRequest', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'LogMetric', + 'UpdateLogMetricRequest', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py new file mode 100644 index 000000000000..45b1c8858763 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -0,0 +1,321 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + }, +) + + +class LogEntry(proto.Message): + r"""An individual entry in a log. + Attributes: + log_name (str): + Required. The resource name of the log to which this log + entry belongs: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + A project number may be used in place of PROJECT_ID. The + project number is translated to its corresponding PROJECT_ID + internally and the ``log_name`` field will contain + PROJECT_ID in queries and exports. + + ``[LOG_ID]`` must be URL-encoded within ``log_name``. + Example: + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``[LOG_ID]`` must be less than 512 characters long and can + only include the following characters: upper and lower case + alphanumeric characters, forward-slash, underscore, hyphen, + and period. + + For backward compatibility, if ``log_name`` begins with a + forward-slash, such as ``/projects/...``, then the log entry + is ingested as usual but the forward-slash is removed. + Listing the log entry will not show the leading slash and + filtering for a log name with a leading slash will never + return any results. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Required. The monitored resource that + produced this log entry. + Example: a log entry that reports a database + error would be associated with the monitored + resource designating the particular database + that reported the error. + proto_payload (google.protobuf.any_pb2.Any): + The log entry payload, represented as a + protocol buffer. Some Google Cloud Platform + services use this field for their log entry + payloads. + The following protocol buffer types are + supported; user-defined types are not supported: + + "type.googleapis.com/google.cloud.audit.AuditLog" + "type.googleapis.com/google.appengine.logging.v1.RequestLog". + text_payload (str): + The log entry payload, represented as a + Unicode string (UTF-8). + json_payload (google.protobuf.struct_pb2.Struct): + The log entry payload, represented as a + structure that is expressed as a JSON object. + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time the event described by the log entry + occurred. This time is used to compute the log entry's age + and to enforce the logs retention period. If this field is + omitted in a new log entry, then Logging assigns it the + current time. Timestamps have nanosecond accuracy, but + trailing zeros in the fractional seconds might be omitted + when the timestamp is displayed. + + Incoming log entries must have timestamps that don't exceed + the `logs retention + period `__ + in the past, and that don't exceed 24 hours in the future. + Log entries outside those time boundaries aren't ingested by + Logging. + receive_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the log entry was + received by Logging. + severity (google.logging.type.log_severity_pb2.LogSeverity): + Optional. The severity of the log entry. The default value + is ``LogSeverity.DEFAULT``. + insert_id (str): + Optional. A unique identifier for the log entry. If you + provide a value, then Logging considers other log entries in + the same project, with the same ``timestamp``, and with the + same ``insert_id`` to be duplicates which are removed in a + single query result. However, there are no guarantees of + de-duplication in the export of logs. + + If the ``insert_id`` is omitted when writing a log entry, + the Logging API assigns its own unique identifier in this + field. + + In queries, the ``insert_id`` is also used to order log + entries that have the same ``log_name`` and ``timestamp`` + values. + http_request (google.logging.type.http_request_pb2.HttpRequest): + Optional. Information about the HTTP request + associated with this log entry, if applicable. + labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): + Optional. A set of user-defined (key, value) + data that provides additional information about + the log entry. + operation (google.cloud.logging_v2.types.LogEntryOperation): + Optional. Information about an operation + associated with the log entry, if applicable. + trace (str): + Optional. Resource name of the trace associated with the log + entry, if any. If it contains a relative resource name, the + name is assumed to be relative to + ``//tracing.googleapis.com``. Example: + ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` + span_id (str): + Optional. The span ID within the trace associated with the + log entry. + + For Trace spans, this is the same format that the Trace API + v2 uses: a 16-character hexadecimal encoding of an 8-byte + array, such as ``000000000000004a``. + trace_sampled (bool): + Optional. The sampling decision of the trace associated with + the log entry. + + True means that the trace resource name in the ``trace`` + field was sampled for storage in a trace backend. False + means that the trace was not sampled for storage when this + log entry was written, or the sampling decision was unknown + at the time. A non-sampled ``trace`` value is still useful + as a request correlation identifier. The default is False. + source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): + Optional. Source code location information + associated with the log entry, if any. + """ + + log_name = proto.Field( + proto.STRING, + number=12, + ) + resource = proto.Field( + proto.MESSAGE, + number=8, + message=monitored_resource_pb2.MonitoredResource, + ) + proto_payload = proto.Field( + proto.MESSAGE, + number=2, + oneof='payload', + message=any_pb2.Any, + ) + text_payload = proto.Field( + proto.STRING, + number=3, + oneof='payload', + ) + json_payload = proto.Field( + proto.MESSAGE, + number=6, + oneof='payload', + message=struct_pb2.Struct, + ) + timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + receive_timestamp = proto.Field( + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + severity = proto.Field( + proto.ENUM, + number=10, + enum=log_severity_pb2.LogSeverity, + ) + insert_id = proto.Field( + proto.STRING, + number=4, + ) + http_request = proto.Field( + proto.MESSAGE, + number=7, + message=http_request_pb2.HttpRequest, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=11, + ) + operation = proto.Field( + proto.MESSAGE, + number=15, + message='LogEntryOperation', + ) + trace = proto.Field( + proto.STRING, + number=22, + ) + span_id = proto.Field( + proto.STRING, + number=27, + ) + trace_sampled = proto.Field( + proto.BOOL, + number=30, + ) + source_location = proto.Field( + proto.MESSAGE, + number=23, + message='LogEntrySourceLocation', + ) + + +class LogEntryOperation(proto.Message): + r"""Additional information about a potentially long-running + operation with which a log entry is associated. + + Attributes: + id (str): + Optional. An arbitrary operation identifier. + Log entries with the same identifier are assumed + to be part of the same operation. + producer (str): + Optional. An arbitrary producer identifier. The combination + of ``id`` and ``producer`` must be globally unique. Examples + for ``producer``: ``"MyDivision.MyBigCompany.com"``, + ``"github.com/MyProject/MyApplication"``. + first (bool): + Optional. Set this to True if this is the + first log entry in the operation. + last (bool): + Optional. Set this to True if this is the + last log entry in the operation. + """ + + id = proto.Field( + proto.STRING, + number=1, + ) + producer = proto.Field( + proto.STRING, + number=2, + ) + first = proto.Field( + proto.BOOL, + number=3, + ) + last = proto.Field( + proto.BOOL, + number=4, + ) + + +class LogEntrySourceLocation(proto.Message): + r"""Additional information about the source code location that + produced the log entry. + + Attributes: + file (str): + Optional. Source file name. Depending on the + runtime environment, this might be a simple name + or a fully-qualified name. + line (int): + Optional. Line within the source file. + 1-based; 0 indicates no line number available. + function (str): + Optional. Human-readable name of the function or method + being invoked, with optional context such as the class or + package name. This information may be used in contexts such + as the logs viewer, where a file and line number are less + meaningful. The format can vary by language. For example: + ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` + (Go), ``function`` (Python). + """ + + file = proto.Field( + proto.STRING, + number=1, + ) + line = proto.Field( + proto.INT64, + number=2, + ) + function = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py new file mode 100644 index 000000000000..cfae1781a75d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -0,0 +1,573 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.types import log_entry +from google.protobuf import duration_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'DeleteLogRequest', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + }, +) + + +class DeleteLogRequest(proto.Message): + r"""The parameters to DeleteLog. + Attributes: + log_name (str): + Required. The resource name of the log to delete: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + """ + + log_name = proto.Field( + proto.STRING, + number=1, + ) + + +class WriteLogEntriesRequest(proto.Message): + r"""The parameters to WriteLogEntries. + Attributes: + log_name (str): + Optional. A default log resource name that is assigned to + all log entries in ``entries`` that do not specify a value + for ``log_name``: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed on + each project, organization, billing account, or folder that + is receiving new log entries, whether the resource is + specified in ``logName`` or in an individual log entry. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + Optional. Default labels that are added to the ``labels`` + field of all log entries in ``entries``. If a log entry + already has a label with the same key as a label in this + parameter, then the log entry's label is not changed. See + [LogEntry][google.logging.v2.LogEntry]. + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + Required. The log entries to send to Logging. The order of + log entries in this list does not matter. Values supplied in + this method's ``log_name``, ``resource``, and ``labels`` + fields are copied into those log entries in this list that + do not include values for their corresponding fields. For + more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing in + log entries, then this method supplies the current time or a + unique identifier, respectively. The supplied values are + chosen so that, among the log entries that did not supply + their own values, the entries earlier in the list will sort + before the entries later in the list. See the + ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those log + entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling this + method for each individual log entry. + partial_success (bool): + Optional. Whether valid entries should be written even if + some other entries fail due to INVALID_ARGUMENT or + PERMISSION_DENIED errors. If any entry is not written, then + the response status is the error associated with one of the + failed entries and the response includes error details keyed + by the entries' zero-based index in the ``entries.write`` + method. + dry_run (bool): + Optional. If true, the request should expect + normal response, but the entries won't be + persisted nor exported. Useful for checking + whether the logging API endpoints are working + properly before sending valuable data. + """ + + log_name = proto.Field( + proto.STRING, + number=1, + ) + resource = proto.Field( + proto.MESSAGE, + number=2, + message=monitored_resource_pb2.MonitoredResource, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + entries = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=log_entry.LogEntry, + ) + partial_success = proto.Field( + proto.BOOL, + number=5, + ) + dry_run = proto.Field( + proto.BOOL, + number=6, + ) + + +class WriteLogEntriesResponse(proto.Message): + r"""Result returned from WriteLogEntries. """ + + +class WriteLogEntriesPartialErrors(proto.Message): + r"""Error details for WriteLogEntries with partial success. + Attributes: + log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): + When ``WriteLogEntriesRequest.partial_success`` is true, + records the error status for entries that were not written + due to a permanent error, keyed by the entry's zero-based + index in ``WriteLogEntriesRequest.entries``. + + Failed requests for which no entries are written will not + include per-entry errors. + """ + + log_entry_errors = proto.MapField( + proto.INT32, + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + + +class ListLogEntriesRequest(proto.Message): + r"""The parameters to ``ListLogEntries``. + Attributes: + resource_names (Sequence[str]): + Required. Names of one or more parent resources from which + to retrieve log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + Projects listed in the ``project_ids`` field are added to + this list. + filter (str): + Optional. A filter that chooses which log entries to return. + See `Advanced Logs + Queries `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources listed + in ``resource_names``. Referencing a parent resource that is + not listed in ``resource_names`` will cause the filter to + return no results. The maximum length of the filter is 20000 + characters. + order_by (str): + Optional. How the results should be sorted. Presently, the + only permitted values are ``"timestamp asc"`` (default) and + ``"timestamp desc"``. The first option returns entries in + order of increasing values of ``LogEntry.timestamp`` (oldest + first), and the second option returns entries in order of + decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` + values. + page_size (int): + Optional. The maximum number of results to return from this + request. Default is 50. If the value is negative or exceeds + 1000, the request is rejected. The presence of + ``next_page_token`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``page_token`` must be the value of ``next_page_token`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + resource_names = proto.RepeatedField( + proto.STRING, + number=8, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + order_by = proto.Field( + proto.STRING, + number=3, + ) + page_size = proto.Field( + proto.INT32, + number=4, + ) + page_token = proto.Field( + proto.STRING, + number=5, + ) + + +class ListLogEntriesResponse(proto.Message): + r"""Result returned from ``ListLogEntries``. + Attributes: + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + A list of log entries. If ``entries`` is empty, + ``nextPageToken`` may still be returned, indicating that + more entries may exist. See ``nextPageToken`` for more + information. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + + If a value for ``next_page_token`` appears and the + ``entries`` field is empty, it means that the search found + no log entries so far but it did not have time to search all + the possible log entries. Retry the method with this value + for ``page_token`` to continue the search. Alternatively, + consider speeding up the search by changing your filter to + specify a single log name or resource type, or to narrow the + time range of the search. + """ + + @property + def raw_page(self): + return self + + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListMonitoredResourceDescriptorsRequest(proto.Message): + r"""The parameters to ListMonitoredResourceDescriptors + Attributes: + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + page_size = proto.Field( + proto.INT32, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListMonitoredResourceDescriptorsResponse(proto.Message): + r"""Result returned from ListMonitoredResourceDescriptors. + Attributes: + resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): + A list of resource descriptors. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + resource_descriptors = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=monitored_resource_pb2.MonitoredResourceDescriptor, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListLogsRequest(proto.Message): + r"""The parameters to ListLogs. + Attributes: + parent (str): + Required. The resource name that owns the logs: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + resource_names (Sequence[str]): + Optional. The resource name that owns the logs: + projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + + To support legacy queries, it could also be: + "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + resource_names = proto.RepeatedField( + proto.STRING, + number=8, + ) + + +class ListLogsResponse(proto.Message): + r"""Result returned from ListLogs. + Attributes: + log_names (Sequence[str]): + A list of log names. For example, + ``"projects/my-project/logs/syslog"`` or + ``"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + log_names = proto.RepeatedField( + proto.STRING, + number=3, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class TailLogEntriesRequest(proto.Message): + r"""The parameters to ``TailLogEntries``. + Attributes: + resource_names (Sequence[str]): + Required. Name of a parent resource from which to retrieve + log entries: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + May alternatively be one or more views: + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". + filter (str): + Optional. A filter that chooses which log entries to return. + See `Advanced Logs + Filters `__. + Only log entries that match the filter are returned. An + empty filter matches all log entries in the resources listed + in ``resource_names``. Referencing a parent resource that is + not in ``resource_names`` will cause the filter to return no + results. The maximum length of the filter is 20000 + characters. + buffer_window (google.protobuf.duration_pb2.Duration): + Optional. The amount of time to buffer log + entries at the server before being returned to + prevent out of order results due to late + arriving log entries. Valid values are between + 0-60000 milliseconds. Defaults to 2000 + milliseconds. + """ + + resource_names = proto.RepeatedField( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + buffer_window = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class TailLogEntriesResponse(proto.Message): + r"""Result returned from ``TailLogEntries``. + Attributes: + entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + A list of log entries. Each response in the stream will + order entries with increasing values of + ``LogEntry.timestamp``. Ordering is not guaranteed between + separate responses. + suppression_info (Sequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): + If entries that otherwise would have been + included in the session were not sent back to + the client, counts of relevant entries omitted + from the session with the reason that they were + not included. There will be at most one of each + reason per response. The counts represent the + number of suppressed entries since the last + streamed response. + """ + + class SuppressionInfo(proto.Message): + r"""Information about entries that were omitted from the session. + Attributes: + reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): + The reason that entries were omitted from the + session. + suppressed_count (int): + A lower bound on the count of entries omitted due to + ``reason``. + """ + class Reason(proto.Enum): + r"""An indicator of why entries were omitted.""" + REASON_UNSPECIFIED = 0 + RATE_LIMIT = 1 + NOT_CONSUMED = 2 + + reason = proto.Field( + proto.ENUM, + number=1, + enum='TailLogEntriesResponse.SuppressionInfo.Reason', + ) + suppressed_count = proto.Field( + proto.INT32, + number=2, + ) + + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) + suppression_info = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=SuppressionInfo, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py new file mode 100644 index 000000000000..a4b7b2571d7a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -0,0 +1,1457 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'LifecycleState', + 'LogBucket', + 'LogView', + 'LogSink', + 'BigQueryOptions', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'CreateBucketRequest', + 'UpdateBucketRequest', + 'GetBucketRequest', + 'DeleteBucketRequest', + 'UndeleteBucketRequest', + 'ListViewsRequest', + 'ListViewsResponse', + 'CreateViewRequest', + 'UpdateViewRequest', + 'GetViewRequest', + 'DeleteViewRequest', + 'ListSinksRequest', + 'ListSinksResponse', + 'GetSinkRequest', + 'CreateSinkRequest', + 'UpdateSinkRequest', + 'DeleteSinkRequest', + 'LogExclusion', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'GetExclusionRequest', + 'CreateExclusionRequest', + 'UpdateExclusionRequest', + 'DeleteExclusionRequest', + 'GetCmekSettingsRequest', + 'UpdateCmekSettingsRequest', + 'CmekSettings', + }, +) + + +class LifecycleState(proto.Enum): + r"""LogBucket lifecycle states.""" + LIFECYCLE_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + DELETE_REQUESTED = 2 + + +class LogBucket(proto.Message): + r"""Describes a repository of logs. + Attributes: + name (str): + The resource name of the bucket. For example: + "projects/my-project-id/locations/my-location/buckets/my-bucket-id + The supported locations are: "global" + + For the location of ``global`` it is unspecified where logs + are actually stored. Once a bucket has been created, the + location can not be changed. + description (str): + Describes this bucket. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + bucket. This is not set for any of the default + buckets. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + bucket. + retention_days (int): + Logs will be retained by default for this + amount of time, after which they will + automatically be deleted. The minimum retention + period is 1 day. If this value is set to zero at + bucket creation time, the default time of 30 + days will be used. + locked (bool): + Whether the bucket has been locked. + The retention period on a locked bucket may not + be changed. Locked buckets may only be deleted + if they are empty. + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): + Output only. The bucket lifecycle state. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + retention_days = proto.Field( + proto.INT32, + number=11, + ) + locked = proto.Field( + proto.BOOL, + number=9, + ) + lifecycle_state = proto.Field( + proto.ENUM, + number=12, + enum='LifecycleState', + ) + + +class LogView(proto.Message): + r"""Describes a view over logs in a bucket. + Attributes: + name (str): + The resource name of the view. + For example + "projects/my-project-id/locations/my- + location/buckets/my-bucket-id/views/my-view + description (str): + Describes this view. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + view. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + view. + filter (str): + Filter that restricts which log entries in a bucket are + visible in this view. Filters are restricted to be a logical + AND of ==/!= of any of the following: originating + project/folder/organization/billing account. resource type + log id Example: SOURCE("projects/myproject") AND + resource.type = "gce_instance" AND LOG_ID("stdout") + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + filter = proto.Field( + proto.STRING, + number=7, + ) + + +class LogSink(proto.Message): + r"""Describes a sink used to export log entries to one of the + following destinations in any project: a Cloud Storage bucket, a + BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. The sink must be + created within a project, organization, billing account, or + folder. + + Attributes: + name (str): + Required. The client-assigned sink identifier, unique within + the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + identifiers are limited to 100 characters and can include + only the following characters: upper and lower-case + alphanumeric characters, underscores, hyphens, and periods. + First character has to be alphanumeric. + destination (str): + Required. The export destination: + + :: + + "storage.googleapis.com/[GCS_BUCKET]" + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" + "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" + + The sink's ``writer_identity``, set when the sink is + created, must have permission to write to the destination or + else the log entries are not exported. For more information, + see `Exporting Logs with + Sinks `__. + filter (str): + Optional. An `advanced logs + filter `__. + The only exported log entries are those that are in the + resource owning the sink and that match the filter. For + example: + + :: + + logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + description (str): + Optional. A description of this sink. + The maximum length of the description is 8000 + characters. + disabled (bool): + Optional. If set to True, then this sink is + disabled and it does not export any log entries. + exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): + Optional. Log entries that match any of the exclusion + filters will not be exported. If a log entry is matched by + both ``filter`` and one of ``exclusion_filters`` it will not + be exported. + output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): + Deprecated. This field is unused. + writer_identity (str): + Output only. An IAM identity—a service account or + group—under which Logging writes the exported log entries to + the sink's destination. This field is set by + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + and + [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + based on the value of ``unique_writer_identity`` in those + methods. + + Until you grant this identity write-access to the + destination, log entry exports from this sink will fail. For + more information, see `Granting Access for a + Resource `__. + Consult the destination service's documentation to determine + the appropriate IAM roles to assign to the identity. + include_children (bool): + Optional. This field applies only to sinks owned by + organizations and folders. If the field is false, the + default, only the logs owned by the sink's parent resource + are available for export. If the field is true, then logs + from all the projects, folders, and billing accounts + contained in the sink's parent resource are also available + for export. Whether a particular log entry from the children + is exported depends on the sink's filter expression. For + example, if this field is true, then the filter + ``resource.type=gce_instance`` would export all Compute + Engine VM instance log entries from all projects in the + sink's parent. To only export entries from certain child + projects, filter on the project part of the log name: + + :: + + logName:("projects/test-project1/" OR "projects/test-project2/") AND + resource.type=gce_instance + bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): + Optional. Options that affect sinks exporting + data to BigQuery. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + sink. + This field may not be present for older sinks. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + sink. + This field may not be present for older sinks. + """ + class VersionFormat(proto.Enum): + r"""Deprecated. This is unused.""" + VERSION_FORMAT_UNSPECIFIED = 0 + V2 = 1 + V1 = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + destination = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + description = proto.Field( + proto.STRING, + number=18, + ) + disabled = proto.Field( + proto.BOOL, + number=19, + ) + exclusions = proto.RepeatedField( + proto.MESSAGE, + number=16, + message='LogExclusion', + ) + output_version_format = proto.Field( + proto.ENUM, + number=6, + enum=VersionFormat, + ) + writer_identity = proto.Field( + proto.STRING, + number=8, + ) + include_children = proto.Field( + proto.BOOL, + number=9, + ) + bigquery_options = proto.Field( + proto.MESSAGE, + number=12, + oneof='options', + message='BigQueryOptions', + ) + create_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + + +class BigQueryOptions(proto.Message): + r"""Options that change functionality of a sink exporting data to + BigQuery. + + Attributes: + use_partitioned_tables (bool): + Optional. Whether to use `BigQuery's partition + tables `__. + By default, Logging creates dated tables based on the log + entries' timestamps, e.g. syslog_20170523. With partitioned + tables the date suffix is no longer present and `special + query + syntax `__ + has to be used instead. In both cases, tables are sharded + based on UTC timezone. + uses_timestamp_column_partitioning (bool): + Output only. True if new timestamp column based partitioning + is in use, false if legacy ingestion-time partitioning is in + use. All new sinks will have this field set true and will + use timestamp column based partitioning. If + use_partitioned_tables is false, this value has no meaning + and will be false. Legacy sinks using partitioned tables + will have this field set to false. + """ + + use_partitioned_tables = proto.Field( + proto.BOOL, + number=1, + ) + uses_timestamp_column_partitioning = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListBucketsRequest(proto.Message): + r"""The parameters to ``ListBuckets``. + Attributes: + parent (str): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + + +class ListBucketsResponse(proto.Message): + r"""The response from ListBuckets. + Attributes: + buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): + A list of buckets. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + buckets = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogBucket', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateBucketRequest(proto.Message): + r"""The parameters to ``CreateBucket``. + Attributes: + parent (str): + Required. The resource in which to create the bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + + Example: ``"projects/my-logging-project/locations/global"`` + bucket_id (str): + Required. A client-assigned identifier such as + ``"my-bucket"``. Identifiers are limited to 100 characters + and can include only letters, digits, underscores, hyphens, + and periods. + bucket (google.cloud.logging_v2.types.LogBucket): + Required. The new bucket. The region + specified in the new bucket must be compliant + with any Location Restriction Org Policy. The + name field in the bucket is ignored. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + bucket_id = proto.Field( + proto.STRING, + number=2, + ) + bucket = proto.Field( + proto.MESSAGE, + number=3, + message='LogBucket', + ) + + +class UpdateBucketRequest(proto.Message): + r"""The parameters to ``UpdateBucket``. + Attributes: + name (str): + Required. The full resource name of the bucket to update. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + Also requires permission + "resourcemanager.projects.updateLiens" to set the locked + property + bucket (google.cloud.logging_v2.types.LogBucket): + Required. The updated bucket. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask that specifies the fields in ``bucket`` + that need an update. A bucket field will be overwritten if, + and only if, it is in the update mask. ``name`` and output + only fields cannot be updated. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=retention_days``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + bucket = proto.Field( + proto.MESSAGE, + number=2, + message='LogBucket', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class GetBucketRequest(proto.Message): + r"""The parameters to ``GetBucket``. + Attributes: + name (str): + Required. The resource name of the bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBucketRequest(proto.Message): + r"""The parameters to ``DeleteBucket``. + Attributes: + name (str): + Required. The full resource name of the bucket to delete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class UndeleteBucketRequest(proto.Message): + r"""The parameters to ``UndeleteBucket``. + Attributes: + name (str): + Required. The full resource name of the bucket to undelete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListViewsRequest(proto.Message): + r"""The parameters to ``ListViews``. + Attributes: + parent (str): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + + +class ListViewsResponse(proto.Message): + r"""The response from ListViews. + Attributes: + views (Sequence[google.cloud.logging_v2.types.LogView]): + A list of views. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + views = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogView', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateViewRequest(proto.Message): + r"""The parameters to ``CreateView``. + Attributes: + parent (str): + Required. The bucket in which to create the view + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + Example: + ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` + view_id (str): + Required. The id to use for this view. + view (google.cloud.logging_v2.types.LogView): + Required. The new view. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + view_id = proto.Field( + proto.STRING, + number=2, + ) + view = proto.Field( + proto.MESSAGE, + number=3, + message='LogView', + ) + + +class UpdateViewRequest(proto.Message): + r"""The parameters to ``UpdateView``. + Attributes: + name (str): + Required. The full resource name of the view to update + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + view (google.cloud.logging_v2.types.LogView): + Required. The updated view. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in ``view`` + that need an update. A field will be overwritten if, and + only if, it is in the update mask. ``name`` and output only + fields cannot be updated. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.MESSAGE, + number=2, + message='LogView', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class GetViewRequest(proto.Message): + r"""The parameters to ``GetView``. + Attributes: + name (str): + Required. The resource name of the policy: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteViewRequest(proto.Message): + r"""The parameters to ``DeleteView``. + Attributes: + name (str): + Required. The full resource name of the view to delete: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + Example: + ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSinksRequest(proto.Message): + r"""The parameters to ``ListSinks``. + Attributes: + parent (str): + Required. The parent resource whose sinks are to be listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + + +class ListSinksResponse(proto.Message): + r"""Result returned from ``ListSinks``. + Attributes: + sinks (Sequence[google.cloud.logging_v2.types.LogSink]): + A list of sinks. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + sinks = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogSink', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetSinkRequest(proto.Message): + r"""The parameters to ``GetSink``. + Attributes: + sink_name (str): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + """ + + sink_name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSinkRequest(proto.Message): + r"""The parameters to ``CreateSink``. + Attributes: + parent (str): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + Examples: ``"projects/my-logging-project"``, + ``"organizations/123456789"``. + sink (google.cloud.logging_v2.types.LogSink): + Required. The new sink, whose ``name`` parameter is a sink + identifier that is not already in use. + unique_writer_identity (bool): + Optional. Determines the kind of IAM identity returned as + ``writer_identity`` in the new sink. If this value is + omitted or set to false, and if the sink's parent is a + project, then the value returned as ``writer_identity`` is + the same group or service account used by Logging before the + addition of writer identities to this API. The sink's + destination must be in the same project as the sink itself. + + If this field is set to true, or if the sink is owned by a + non-project resource such as an organization, then the value + of ``writer_identity`` will be a unique service account used + only for exports from the new sink. For more information, + see ``writer_identity`` in + [LogSink][google.logging.v2.LogSink]. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + sink = proto.Field( + proto.MESSAGE, + number=2, + message='LogSink', + ) + unique_writer_identity = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateSinkRequest(proto.Message): + r"""The parameters to ``UpdateSink``. + Attributes: + sink_name (str): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + sink (google.cloud.logging_v2.types.LogSink): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + unique_writer_identity (bool): + Optional. See + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + for a description of this field. When updating a sink, the + effect of this field on the value of ``writer_identity`` in + the updated sink depends on both the old and new values of + this field: + + - If the old and new values of this field are both false or + both true, then there is no change to the sink's + ``writer_identity``. + - If the old value is false and the new value is true, then + ``writer_identity`` is changed to a unique service + account. + - It is an error if the old value is true and the new value + is set to false or defaulted to false. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in ``sink`` + that need an update. A sink field will be overwritten if, + and only if, it is in the update mask. ``name`` and output + only fields cannot be updated. + + An empty updateMask is temporarily treated as using the + following mask for backwards compatibility purposes: + destination,filter,includeChildren At some point in the + future, behavior will be removed and specifying an empty + updateMask will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + Example: ``updateMask=filter``. + """ + + sink_name = proto.Field( + proto.STRING, + number=1, + ) + sink = proto.Field( + proto.MESSAGE, + number=2, + message='LogSink', + ) + unique_writer_identity = proto.Field( + proto.BOOL, + number=3, + ) + update_mask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteSinkRequest(proto.Message): + r"""The parameters to ``DeleteSink``. + Attributes: + sink_name (str): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + Example: ``"projects/my-project-id/sinks/my-sink-id"``. + """ + + sink_name = proto.Field( + proto.STRING, + number=1, + ) + + +class LogExclusion(proto.Message): + r"""Specifies a set of log entries that are not to be stored in + Logging. If your GCP resource receives a large volume of logs, + you can use exclusions to reduce your chargeable logs. + Exclusions are processed after log sinks, so you can export log + entries before they are excluded. Note that organization-level + and folder-level exclusions don't apply to child resources, and + that you can't exclude audit log entries. + + Attributes: + name (str): + Required. A client-assigned identifier, such as + ``"load-balancer-exclusion"``. Identifiers are limited to + 100 characters and can include only letters, digits, + underscores, hyphens, and periods. First character has to be + alphanumeric. + description (str): + Optional. A description of this exclusion. + filter (str): + Required. An `advanced logs + filter `__ + that matches the log entries to be excluded. By using the + `sample + function `__, + you can exclude less than 100% of the matching log entries. + For example, the following query matches 99% of low-severity + log entries from Google Cloud Storage buckets: + + ``"resource.type=gcs_bucket severity`__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve CMEK settings. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + Example: ``"organizations/12345/cmekSettings"``. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP organization. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateCmekSettingsRequest(proto.Message): + r"""The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the CMEK settings to update. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + Example: ``"organizations/12345/cmekSettings"``. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP organization. + cmek_settings (google.cloud.logging_v2.types.CmekSettings): + Required. The CMEK settings to update. + + See `Enabling CMEK for Logs + Router `__ + for more information. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``cmek_settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. Output + only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + Example: ``"updateMask=kmsKeyName"`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + cmek_settings = proto.Field( + proto.MESSAGE, + number=2, + message='CmekSettings', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class CmekSettings(proto.Message): + r"""Describes the customer-managed encryption key (CMEK) settings + associated with a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Logs Router can currently only be configured for + GCP organizations. Once configured, it applies to all projects and + folders in the GCP organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + Attributes: + name (str): + Output only. The resource name of the CMEK + settings. + kms_key_name (str): + The resource name for the configured Cloud KMS key. + + KMS key name format: + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + ``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"`` + + To enable CMEK for the Logs Router, set this field to a + valid ``kms_key_name`` for which the associated service + account has the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned + for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Logs Router, set this field to an + empty string. + + See `Enabling CMEK for Logs + Router `__ + for more information. + service_account_id (str): + Output only. The service account that will be used by the + Logs Router to access your Cloud KMS key. + + Before enabling CMEK for Logs Router, you must first assign + the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to + the service account that the Logs Router will use to access + your Cloud KMS key. Use + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + to obtain the service account ID. + + See `Enabling CMEK for Logs + Router `__ + for more information. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name = proto.Field( + proto.STRING, + number=2, + ) + service_account_id = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py new file mode 100644 index 000000000000..252e43760b02 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -0,0 +1,371 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'LogMetric', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'GetLogMetricRequest', + 'CreateLogMetricRequest', + 'UpdateLogMetricRequest', + 'DeleteLogMetricRequest', + }, +) + + +class LogMetric(proto.Message): + r"""Describes a logs-based metric. The value of the metric is the + number of log entries that match a logs filter in a given time + interval. + Logs-based metrics can also be used to extract values from logs + and create a distribution of the values. The distribution + records the statistics of the extracted values along with an + optional histogram of the values as specified by the bucket + options. + + Attributes: + name (str): + Required. The client-assigned metric identifier. Examples: + ``"error_count"``, ``"nginx/requests"``. + + Metric identifiers are limited to 100 characters and can + include only the following characters: ``A-Z``, ``a-z``, + ``0-9``, and the special characters ``_-.,+!*',()%/``. The + forward-slash character (``/``) denotes a hierarchy of name + pieces, and it cannot be the first character of the name. + + The metric identifier in this field must not be + `URL-encoded `__. + However, when the metric identifier appears as the + ``[METRIC_ID]`` part of a ``metric_name`` API parameter, + then the metric identifier must be URL-encoded. Example: + ``"projects/my-project/metrics/nginx%2Frequests"``. + description (str): + Optional. A description of this metric, which + is used in documentation. The maximum length of + the description is 8000 characters. + filter (str): + Required. An `advanced logs + filter `__ + which is used to match log entries. Example: + + :: + + "resource.type=gae_app AND severity>=ERROR" + + The maximum length of the filter is 20000 characters. + metric_descriptor (google.api.metric_pb2.MetricDescriptor): + Optional. The metric descriptor associated with the + logs-based metric. If unspecified, it uses a default metric + descriptor with a DELTA metric kind, INT64 value type, with + no labels and a unit of "1". Such a metric counts the number + of log entries matching the ``filter`` expression. + + The ``name``, ``type``, and ``description`` fields in the + ``metric_descriptor`` are output only, and is constructed + using the ``name`` and ``description`` field in the + LogMetric. + + To create a logs-based metric that records a distribution of + log values, a DELTA metric kind with a DISTRIBUTION value + type must be used along with a ``value_extractor`` + expression in the LogMetric. + + Each label in the metric descriptor must have a matching + label name as the key and an extractor expression as the + value in the ``label_extractors`` map. + + The ``metric_kind`` and ``value_type`` fields in the + ``metric_descriptor`` cannot be updated once initially + configured. New labels can be added in the + ``metric_descriptor``, but existing labels cannot be + modified except for their description. + value_extractor (str): + Optional. A ``value_extractor`` is required when using a + distribution logs-based metric to extract the values to + record from a log entry. Two functions are supported for + value extraction: ``EXTRACT(field)`` or + ``REGEXP_EXTRACT(field, regex)``. The argument are: + + 1. field: The name of the log entry field from which the + value is to be extracted. + 2. regex: A regular expression using the Google RE2 syntax + (https://github.com/google/re2/wiki/Syntax) with a single + capture group to extract data from the specified log + entry field. The value of the field is converted to a + string before applying the regex. It is an error to + specify a regex that does not include exactly one capture + group. + + The result of the extraction must be convertible to a double + type, as the distribution always records double values. If + either the extraction or the conversion to double fails, + then those values are not recorded in the distribution. + + Example: + ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` + label_extractors (Sequence[google.cloud.logging_v2.types.LogMetric.LabelExtractorsEntry]): + Optional. A map from a label key string to an extractor + expression which is used to extract data from a log entry + field and assign as the label value. Each label key + specified in the LabelDescriptor must have an associated + extractor expression in this map. The syntax of the + extractor expression is the same as for the + ``value_extractor`` field. + + The extracted value is converted to the type defined in the + label descriptor. If the either the extraction or the type + conversion fails, the label will have a default value. The + default value for a string label is an empty string, for an + integer label its 0, and for a boolean label its ``false``. + + Note that there are upper bounds on the maximum number of + labels and the number of active time series that are allowed + in a project. + bucket_options (google.api.distribution_pb2.BucketOptions): + Optional. The ``bucket_options`` are required when the + logs-based metric is using a DISTRIBUTION value type and it + describes the bucket boundaries used to create a histogram + of the extracted values. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + metric. + This field may not be present for older metrics. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + metric. + This field may not be present for older metrics. + version (google.cloud.logging_v2.types.LogMetric.ApiVersion): + Deprecated. The API version that created or + updated this metric. The v2 format is used by + default and cannot be changed. + """ + class ApiVersion(proto.Enum): + r"""Logging API version.""" + V2 = 0 + V1 = 1 + + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + filter = proto.Field( + proto.STRING, + number=3, + ) + metric_descriptor = proto.Field( + proto.MESSAGE, + number=5, + message=metric_pb2.MetricDescriptor, + ) + value_extractor = proto.Field( + proto.STRING, + number=6, + ) + label_extractors = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + bucket_options = proto.Field( + proto.MESSAGE, + number=8, + message=distribution_pb2.Distribution.BucketOptions, + ) + create_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + version = proto.Field( + proto.ENUM, + number=4, + enum=ApiVersion, + ) + + +class ListLogMetricsRequest(proto.Message): + r"""The parameters to ListLogMetrics. + Attributes: + parent (str): + Required. The name of the project containing the metrics: + + :: + + "projects/[PROJECT_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + + +class ListLogMetricsResponse(proto.Message): + r"""Result returned from ListLogMetrics. + Attributes: + metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): + A list of logs-based metrics. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + metrics = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogMetric', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLogMetricRequest(proto.Message): + r"""The parameters to GetLogMetric. + Attributes: + metric_name (str): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + """ + + metric_name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateLogMetricRequest(proto.Message): + r"""The parameters to CreateLogMetric. + Attributes: + parent (str): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The new logs-based metric, which + must not have an identifier that already exists. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + metric = proto.Field( + proto.MESSAGE, + number=2, + message='LogMetric', + ) + + +class UpdateLogMetricRequest(proto.Message): + r"""The parameters to UpdateLogMetric. + Attributes: + metric_name (str): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and it's + ``name`` field must be the same as ``[METRIC_ID]`` If the + metric does not exist in ``[PROJECT_ID]``, then a new metric + is created. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The updated metric. + """ + + metric_name = proto.Field( + proto.STRING, + number=1, + ) + metric = proto.Field( + proto.MESSAGE, + number=2, + message='LogMetric', + ) + + +class DeleteLogMetricRequest(proto.Message): + r"""The parameters to DeleteLogMetric. + Attributes: + metric_name (str): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + """ + + metric_name = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini b/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py new file mode 100644 index 000000000000..4a78a7f99eaf --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/logging_v2/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py new file mode 100644 index 000000000000..5a3ed2504c02 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class loggingCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_bucket': ('parent', 'bucket_id', 'bucket', ), + 'create_exclusion': ('parent', 'exclusion', ), + 'create_log_metric': ('parent', 'metric', ), + 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), + 'create_view': ('parent', 'view_id', 'view', ), + 'delete_bucket': ('name', ), + 'delete_exclusion': ('name', ), + 'delete_log': ('log_name', ), + 'delete_log_metric': ('metric_name', ), + 'delete_sink': ('sink_name', ), + 'delete_view': ('name', ), + 'get_bucket': ('name', ), + 'get_cmek_settings': ('name', ), + 'get_exclusion': ('name', ), + 'get_log_metric': ('metric_name', ), + 'get_sink': ('sink_name', ), + 'get_view': ('name', ), + 'list_buckets': ('parent', 'page_token', 'page_size', ), + 'list_exclusions': ('parent', 'page_token', 'page_size', ), + 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), + 'list_log_metrics': ('parent', 'page_token', 'page_size', ), + 'list_logs': ('parent', 'page_size', 'page_token', 'resource_names', ), + 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), + 'list_sinks': ('parent', 'page_token', 'page_size', ), + 'list_views': ('parent', 'page_token', 'page_size', ), + 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), + 'undelete_bucket': ('name', ), + 'update_bucket': ('name', 'bucket', 'update_mask', ), + 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), + 'update_exclusion': ('name', 'exclusion', 'update_mask', ), + 'update_log_metric': ('metric_name', 'metric', ), + 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), + 'update_view': ('name', 'view', 'update_mask', ), + 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=loggingCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the logging client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py new file mode 100644 index 000000000000..32b47ac8f030 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-logging', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py new file mode 100644 index 000000000000..1127edd9023f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -0,0 +1,6436 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2AsyncClient +from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.services.config_service_v2.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.logging_v2.types import logging_config +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert ConfigServiceV2Client._get_default_mtls_endpoint(None) is None + assert ConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, +]) +def test_config_service_v2_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, +]) +def test_config_service_v2_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_config_service_v2_client_get_transport_class(): + transport = ConfigServiceV2Client.get_transport_class() + available_transports = [ + transports.ConfigServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = ConfigServiceV2Client.get_transport_class("grpc") + assert transport == transports.ConfigServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +def test_config_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "true"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_config_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = ConfigServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_buckets(transport: str = 'grpc', request_type=logging_config.ListBucketsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_buckets_from_dict(): + test_list_buckets(request_type=dict) + + +def test_list_buckets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + client.list_buckets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + + +@pytest.mark.asyncio +async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_buckets_async_from_dict(): + await test_list_buckets_async(request_type=dict) + + +def test_list_buckets_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListBucketsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = logging_config.ListBucketsResponse() + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_buckets_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListBucketsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_buckets_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_buckets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_buckets_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_buckets( + logging_config.ListBucketsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_buckets_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_buckets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_buckets_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_buckets( + logging_config.ListBucketsRequest(), + parent='parent_value', + ) + + +def test_list_buckets_pager(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_buckets(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogBucket) + for i in results) + +def test_list_buckets_pages(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + pages = list(client.list_buckets(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_buckets_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_buckets(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogBucket) + for i in responses) + +@pytest.mark.asyncio +async def test_list_buckets_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_buckets(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_bucket(transport: str = 'grpc', request_type=logging_config.GetBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + response = client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_get_bucket_from_dict(): + test_get_bucket(request_type=dict) + + +def test_get_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + client.get_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + + +@pytest.mark.asyncio +async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + response = await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_get_bucket_async_from_dict(): + await test_get_bucket_async(request_type=dict) + + +def test_get_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_bucket(transport: str = 'grpc', request_type=logging_config.CreateBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + response = client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_create_bucket_from_dict(): + test_create_bucket(request_type=dict) + + +def test_create_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + client.create_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + +@pytest.mark.asyncio +async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + response = await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_create_bucket_async_from_dict(): + await test_create_bucket_async(request_type=dict) + + +def test_create_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_update_bucket(transport: str = 'grpc', request_type=logging_config.UpdateBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + response = client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_update_bucket_from_dict(): + test_update_bucket(request_type=dict) + + +def test_update_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + client.update_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + +@pytest.mark.asyncio +async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + response = await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test_update_bucket_async_from_dict(): + await test_update_bucket_async(request_type=dict) + + +def test_update_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_bucket(transport: str = 'grpc', request_type=logging_config.DeleteBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_bucket_from_dict(): + test_delete_bucket(request_type=dict) + + +def test_delete_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + client.delete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + + +@pytest.mark.asyncio +async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_bucket_async_from_dict(): + await test_delete_bucket_async(request_type=dict) + + +def test_delete_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = None + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_undelete_bucket(transport: str = 'grpc', request_type=logging_config.UndeleteBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_undelete_bucket_from_dict(): + test_undelete_bucket(request_type=dict) + + +def test_undelete_bucket_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + client.undelete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + + +@pytest.mark.asyncio +async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_undelete_bucket_async_from_dict(): + await test_undelete_bucket_async(request_type=dict) + + +def test_undelete_bucket_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = None + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_undelete_bucket_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_views(transport: str = 'grpc', request_type=logging_config.ListViewsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_views_from_dict(): + test_list_views(request_type=dict) + + +def test_list_views_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + + +@pytest.mark.asyncio +async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_views_async_from_dict(): + await test_list_views_async(request_type=dict) + + +def test_list_views_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + call.return_value = logging_config.ListViewsResponse() + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_views_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_views_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_views( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_views_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + logging_config.ListViewsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_views_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_views( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_views_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_views( + logging_config.ListViewsRequest(), + parent='parent_value', + ) + + +def test_list_views_pager(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_views(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogView) + for i in results) + +def test_list_views_pages(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = list(client.list_views(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_views_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_views(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogView) + for i in responses) + +@pytest.mark.asyncio +async def test_list_views_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_views(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_view(transport: str = 'grpc', request_type=logging_config.GetViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_get_view_from_dict(): + test_get_view(request_type=dict) + + +def test_get_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + + +@pytest.mark.asyncio +async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test_get_view_async_from_dict(): + await test_get_view_async(request_type=dict) + + +def test_get_view_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_view_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_view(transport: str = 'grpc', request_type=logging_config.CreateViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_create_view_from_dict(): + test_create_view(request_type=dict) + + +def test_create_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + client.create_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + + +@pytest.mark.asyncio +async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test_create_view_async_from_dict(): + await test_create_view_async(request_type=dict) + + +def test_create_view_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateViewRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_view_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateViewRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_update_view(transport: str = 'grpc', request_type=logging_config.UpdateViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_update_view_from_dict(): + test_update_view(request_type=dict) + + +def test_update_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + + +@pytest.mark.asyncio +async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test_update_view_async_from_dict(): + await test_update_view_async(request_type=dict) + + +def test_update_view_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_view_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_view(transport: str = 'grpc', request_type=logging_config.DeleteViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_view_from_dict(): + test_delete_view(request_type=dict) + + +def test_delete_view_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + +@pytest.mark.asyncio +async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_view_async_from_dict(): + await test_delete_view_async(request_type=dict) + + +def test_delete_view_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + call.return_value = None + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_view_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_list_sinks(transport: str = 'grpc', request_type=logging_config.ListSinksRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + ) + response = client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSinksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_sinks_from_dict(): + test_list_sinks(request_type=dict) + + +def test_list_sinks_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + client.list_sinks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + + +@pytest.mark.asyncio +async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSinksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_sinks_async_from_dict(): + await test_list_sinks_async(request_type=dict) + + +def test_list_sinks_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListSinksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + call.return_value = logging_config.ListSinksResponse() + client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_sinks_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListSinksRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_sinks_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_sinks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_sinks_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sinks( + logging_config.ListSinksRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_sinks_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_sinks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_sinks_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_sinks( + logging_config.ListSinksRequest(), + parent='parent_value', + ) + + +def test_list_sinks_pager(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_sinks(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogSink) + for i in results) + +def test_list_sinks_pages(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = list(client.list_sinks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_sinks_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_sinks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogSink) + for i in responses) + +@pytest.mark.asyncio +async def test_list_sinks_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_sinks(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_sink(transport: str = 'grpc', request_type=logging_config.GetSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + response = client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_get_sink_from_dict(): + test_get_sink(request_type=dict) + + +def test_get_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + client.get_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + + +@pytest.mark.asyncio +async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_get_sink_async_from_dict(): + await test_get_sink_async(request_type=dict) + + +def test_get_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +def test_get_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + + +def test_get_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sink( + logging_config.GetSinkRequest(), + sink_name='sink_name_value', + ) + + +@pytest.mark.asyncio +async def test_get_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + + +@pytest.mark.asyncio +async def test_get_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_sink( + logging_config.GetSinkRequest(), + sink_name='sink_name_value', + ) + + +def test_create_sink(transport: str = 'grpc', request_type=logging_config.CreateSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + response = client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_create_sink_from_dict(): + test_create_sink(request_type=dict) + + +def test_create_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + client.create_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + + +@pytest.mark.asyncio +async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_create_sink_async_from_dict(): + await test_create_sink_async(request_type=dict) + + +def test_create_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_sink( + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].sink == logging_config.LogSink(name='name_value') + + +def test_create_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sink( + logging_config.CreateSinkRequest(), + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_sink( + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].sink == logging_config.LogSink(name='name_value') + + +@pytest.mark.asyncio +async def test_create_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_sink( + logging_config.CreateSinkRequest(), + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + +def test_update_sink(transport: str = 'grpc', request_type=logging_config.UpdateSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + response = client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_update_sink_from_dict(): + test_update_sink(request_type=dict) + + +def test_update_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + client.update_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + + +@pytest.mark.asyncio +async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test_update_sink_async_from_dict(): + await test_update_sink_async(request_type=dict) + + +def test_update_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +def test_update_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_sink( + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + assert args[0].sink == logging_config.LogSink(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_sink( + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + assert args[0].sink == logging_config.LogSink(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_sink(transport: str = 'grpc', request_type=logging_config.DeleteSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_sink_from_dict(): + test_delete_sink(request_type=dict) + + +def test_delete_sink_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + client.delete_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + + +@pytest.mark.asyncio +async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_sink_async_from_dict(): + await test_delete_sink_async(request_type=dict) + + +def test_delete_sink_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + call.return_value = None + client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_sink_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + + request.sink_name = 'sink_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name/value', + ) in kw['metadata'] + + +def test_delete_sink_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + + +def test_delete_sink_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name='sink_name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_sink_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].sink_name == 'sink_name_value' + + +@pytest.mark.asyncio +async def test_delete_sink_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name='sink_name_value', + ) + + +def test_list_exclusions(transport: str = 'grpc', request_type=logging_config.ListExclusionsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExclusionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_exclusions_from_dict(): + test_list_exclusions(request_type=dict) + + +def test_list_exclusions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + client.list_exclusions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + + +@pytest.mark.asyncio +async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExclusionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_exclusions_async_from_dict(): + await test_list_exclusions_async(request_type=dict) + + +def test_list_exclusions_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_exclusions_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_exclusions_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_exclusions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_exclusions_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_exclusions_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_exclusions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_exclusions_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', + ) + + +def test_list_exclusions_pager(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_exclusions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in results) + +def test_list_exclusions_pages(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = list(client.list_exclusions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_exclusions_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_exclusions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in responses) + +@pytest.mark.asyncio +async def test_list_exclusions_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_exclusions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_exclusion(transport: str = 'grpc', request_type=logging_config.GetExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_get_exclusion_from_dict(): + test_get_exclusion(request_type=dict) + + +def test_get_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + client.get_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + +@pytest.mark.asyncio +async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_get_exclusion_async_from_dict(): + await test_get_exclusion_async(request_type=dict) + + +def test_get_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + + +def test_create_exclusion(transport: str = 'grpc', request_type=logging_config.CreateExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_create_exclusion_from_dict(): + test_create_exclusion(request_type=dict) + + +def test_create_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + client.create_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + +@pytest.mark.asyncio +async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_create_exclusion_async_from_dict(): + await test_create_exclusion_async(request_type=dict) + + +def test_create_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_exclusion( + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + + +def test_create_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_exclusion( + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + + +@pytest.mark.asyncio +async def test_create_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + +def test_update_exclusion(transport: str = 'grpc', request_type=logging_config.UpdateExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_update_exclusion_from_dict(): + test_update_exclusion(request_type=dict) + + +def test_update_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + client.update_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + +@pytest.mark.asyncio +async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test_update_exclusion_async_from_dict(): + await test_update_exclusion_async(request_type=dict) + + +def test_update_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_update_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_delete_exclusion(transport: str = 'grpc', request_type=logging_config.DeleteExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_exclusion_from_dict(): + test_delete_exclusion(request_type=dict) + + +def test_delete_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + client.delete_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + +@pytest.mark.asyncio +async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_exclusion_async_from_dict(): + await test_delete_exclusion_async(request_type=dict) + + +def test_delete_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = None + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_exclusion_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) + + +def test_get_cmek_settings(transport: str = 'grpc', request_type=logging_config.GetCmekSettingsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + ) + response = client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +def test_get_cmek_settings_from_dict(): + test_get_cmek_settings(request_type=dict) + + +def test_get_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + client.get_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + )) + response = await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +@pytest.mark.asyncio +async def test_get_cmek_settings_async_from_dict(): + await test_get_cmek_settings_async(request_type=dict) + + +def test_get_cmek_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_update_cmek_settings(transport: str = 'grpc', request_type=logging_config.UpdateCmekSettingsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + ) + response = client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +def test_update_cmek_settings_from_dict(): + test_update_cmek_settings(request_type=dict) + + +def test_update_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + client.update_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + )) + response = await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +@pytest.mark.asyncio +async def test_update_cmek_settings_async_from_dict(): + await test_update_cmek_settings_async(request_type=dict) + + +def test_update_cmek_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConfigServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConfigServiceV2GrpcTransport, + ) + +def test_config_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConfigServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_config_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.ConfigServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_buckets', + 'get_bucket', + 'create_bucket', + 'update_bucket', + 'delete_bucket', + 'undelete_bucket', + 'list_views', + 'get_view', + 'create_view', + 'update_view', + 'delete_view', + 'list_sinks', + 'get_sink', + 'create_sink', + 'update_sink', + 'delete_sink', + 'list_exclusions', + 'get_exclusion', + 'create_exclusion', + 'update_exclusion', + 'delete_exclusion', + 'get_cmek_settings', + 'update_cmek_settings', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + ), + quota_project_id="octopus", + ) + + +def test_config_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ConfigServiceV2Client() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_config_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_config_service_v2_host_no_port(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + ) + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_config_service_v2_host_with_port(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + ) + assert client.transport._host == 'logging.googleapis.com:8000' + +def test_config_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_config_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cmek_settings_path(): + project = "squid" + expected = "projects/{project}/cmekSettings".format(project=project, ) + actual = ConfigServiceV2Client.cmek_settings_path(project) + assert expected == actual + + +def test_parse_cmek_settings_path(): + expected = { + "project": "clam", + } + path = ConfigServiceV2Client.cmek_settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_cmek_settings_path(path) + assert expected == actual + +def test_log_bucket_path(): + project = "whelk" + location = "octopus" + bucket = "oyster" + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) + assert expected == actual + + +def test_parse_log_bucket_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "bucket": "mussel", + } + path = ConfigServiceV2Client.log_bucket_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_bucket_path(path) + assert expected == actual + +def test_log_exclusion_path(): + project = "winkle" + exclusion = "nautilus" + expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) + assert expected == actual + + +def test_parse_log_exclusion_path(): + expected = { + "project": "scallop", + "exclusion": "abalone", + } + path = ConfigServiceV2Client.log_exclusion_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_exclusion_path(path) + assert expected == actual + +def test_log_sink_path(): + project = "squid" + sink = "clam" + expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + actual = ConfigServiceV2Client.log_sink_path(project, sink) + assert expected == actual + + +def test_parse_log_sink_path(): + expected = { + "project": "whelk", + "sink": "octopus", + } + path = ConfigServiceV2Client.log_sink_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_sink_path(path) + assert expected == actual + +def test_log_view_path(): + project = "oyster" + location = "nudibranch" + bucket = "cuttlefish" + view = "mussel" + expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) + assert expected == actual + + +def test_parse_log_view_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "bucket": "scallop", + "view": "abalone", + } + path = ConfigServiceV2Client.log_view_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_log_view_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = ConfigServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ConfigServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = ConfigServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ConfigServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = ConfigServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ConfigServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = ConfigServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ConfigServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = ConfigServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ConfigServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = ConfigServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py new file mode 100644 index 000000000000..e0932066947e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -0,0 +1,2486 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api import monitored_resource_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2AsyncClient +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.services.logging_service_v2 import transports +from google.cloud.logging_v2.services.logging_service_v2.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LoggingServiceV2Client._get_default_mtls_endpoint(None) is None + assert LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, +]) +def test_logging_service_v2_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, +]) +def test_logging_service_v2_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_logging_service_v2_client_get_transport_class(): + transport = LoggingServiceV2Client.get_transport_class() + available_transports = [ + transports.LoggingServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = LoggingServiceV2Client.get_transport_class("grpc") + assert transport == transports.LoggingServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +def test_logging_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "true"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_logging_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = LoggingServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_delete_log(transport: str = 'grpc', request_type=logging.DeleteLogRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_from_dict(): + test_delete_log(request_type=dict) + + +def test_delete_log_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + client.delete_log() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + + +@pytest.mark.asyncio +async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_log_async_from_dict(): + await test_delete_log_async(request_type=dict) + + +def test_delete_log_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.DeleteLogRequest() + + request.log_name = 'log_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = None + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'log_name=log_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_log_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.DeleteLogRequest() + + request.log_name = 'log_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'log_name=log_name/value', + ) in kw['metadata'] + + +def test_delete_log_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_log( + log_name='log_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].log_name == 'log_name_value' + + +def test_delete_log_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log( + logging.DeleteLogRequest(), + log_name='log_name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_log_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_log( + log_name='log_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].log_name == 'log_name_value' + + +@pytest.mark.asyncio +async def test_delete_log_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_log( + logging.DeleteLogRequest(), + log_name='log_name_value', + ) + + +def test_write_log_entries(transport: str = 'grpc', request_type=logging.WriteLogEntriesRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse( + ) + response = client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) + + +def test_write_log_entries_from_dict(): + test_write_log_entries(request_type=dict) + + +def test_write_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + client.write_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + + +@pytest.mark.asyncio +async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( + )) + response = await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_write_log_entries_async_from_dict(): + await test_write_log_entries_async(request_type=dict) + + +def test_write_log_entries_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.write_log_entries( + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].log_name == 'log_name_value' + assert args[0].resource == monitored_resource_pb2.MonitoredResource(type_='type__value') + assert args[0].labels == {'key_value': 'value_value'} + assert args[0].entries == [log_entry.LogEntry(log_name='log_name_value')] + + +def test_write_log_entries_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + +@pytest.mark.asyncio +async def test_write_log_entries_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.write_log_entries( + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].log_name == 'log_name_value' + assert args[0].resource == monitored_resource_pb2.MonitoredResource(type_='type__value') + assert args[0].labels == {'key_value': 'value_value'} + assert args[0].entries == [log_entry.LogEntry(log_name='log_name_value')] + + +@pytest.mark.asyncio +async def test_write_log_entries_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + +def test_list_log_entries(transport: str = 'grpc', request_type=logging.ListLogEntriesRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_log_entries_from_dict(): + test_list_log_entries(request_type=dict) + + +def test_list_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + client.list_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + + +@pytest.mark.asyncio +async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogEntriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_log_entries_async_from_dict(): + await test_list_log_entries_async(request_type=dict) + + +def test_list_log_entries_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_log_entries( + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].resource_names == ['resource_names_value'] + assert args[0].filter == 'filter_value' + assert args[0].order_by == 'order_by_value' + + +def test_list_log_entries_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + +@pytest.mark.asyncio +async def test_list_log_entries_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_log_entries( + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].resource_names == ['resource_names_value'] + assert args[0].filter == 'filter_value' + assert args[0].order_by == 'order_by_value' + + +@pytest.mark.asyncio +async def test_list_log_entries_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + +def test_list_log_entries_pager(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_log_entries(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, log_entry.LogEntry) + for i in results) + +def test_list_log_entries_pages(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_log_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_log_entries_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_log_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, log_entry.LogEntry) + for i in responses) + +@pytest.mark.asyncio +async def test_list_log_entries_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_log_entries(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_list_monitored_resource_descriptors(transport: str = 'grpc', request_type=logging.ListMonitoredResourceDescriptorsRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_monitored_resource_descriptors_from_dict(): + test_list_monitored_resource_descriptors(request_type=dict) + + +def test_list_monitored_resource_descriptors_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + client.list_monitored_resource_descriptors() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_from_dict(): + await test_list_monitored_resource_descriptors_async(request_type=dict) + + +def test_list_monitored_resource_descriptors_pager(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + + metadata = () + pager = client.list_monitored_resource_descriptors(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in results) + +def test_list_monitored_resource_descriptors_pages(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = list(client.list_monitored_resource_descriptors(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_monitored_resource_descriptors(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in responses) + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_monitored_resource_descriptors(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_list_logs(transport: str = 'grpc', request_type=logging.ListLogsRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + ) + response = client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogsPager) + assert response.log_names == ['log_names_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_list_logs_from_dict(): + test_list_logs(request_type=dict) + + +def test_list_logs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + client.list_logs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + +@pytest.mark.asyncio +async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + )) + response = await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogsAsyncPager) + assert response.log_names == ['log_names_value'] + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_logs_async_from_dict(): + await test_list_logs_async(request_type=dict) + + +def test_list_logs_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.ListLogsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = logging.ListLogsResponse() + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_logs_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.ListLogsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_logs_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_logs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_logs_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_logs( + logging.ListLogsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_logs_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_logs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_logs_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_logs( + logging.ListLogsRequest(), + parent='parent_value', + ) + + +def test_list_logs_pager(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_logs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) + +def test_list_logs_pages(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_logs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_logs_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_logs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) + +@pytest.mark.asyncio +async def test_list_logs_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_logs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_tail_log_entries(transport: str = 'grpc', request_type=logging.TailLogEntriesRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.tail_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([logging.TailLogEntriesResponse()]) + response = client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, logging.TailLogEntriesResponse) + + +def test_tail_log_entries_from_dict(): + test_tail_log_entries(request_type=dict) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.tail_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[logging.TailLogEntriesResponse()]) + response = await client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, logging.TailLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_from_dict(): + await test_tail_log_entries_async(request_type=dict) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LoggingServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LoggingServiceV2GrpcTransport, + ) + +def test_logging_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_logging_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete_log', + 'write_log_entries', + 'list_log_entries', + 'list_monitored_resource_descriptors', + 'list_logs', + 'tail_log_entries', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + quota_project_id="octopus", + ) + + +def test_logging_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_logging_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_logging_service_v2_host_no_port(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + ) + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_logging_service_v2_host_with_port(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + ) + assert client.transport._host == 'logging.googleapis.com:8000' + +def test_logging_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LoggingServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_logging_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_path(): + project = "squid" + log = "clam" + expected = "projects/{project}/logs/{log}".format(project=project, log=log, ) + actual = LoggingServiceV2Client.log_path(project, log) + assert expected == actual + + +def test_parse_log_path(): + expected = { + "project": "whelk", + "log": "octopus", + } + path = LoggingServiceV2Client.log_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_log_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LoggingServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LoggingServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = LoggingServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LoggingServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LoggingServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LoggingServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = LoggingServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LoggingServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LoggingServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LoggingServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = LoggingServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py new file mode 100644 index 000000000000..04123a32febd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -0,0 +1,2351 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api import distribution_pb2 # type: ignore +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2AsyncClient +from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.services.metrics_service_v2 import transports +from google.cloud.logging_v2.services.metrics_service_v2.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.logging_v2.types import logging_metrics +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert MetricsServiceV2Client._get_default_mtls_endpoint(None) is None + assert MetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert MetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, +]) +def test_metrics_service_v2_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, +]) +def test_metrics_service_v2_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_metrics_service_v2_client_get_transport_class(): + transport = MetricsServiceV2Client.get_transport_class() + available_transports = [ + transports.MetricsServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = MetricsServiceV2Client.get_transport_class("grpc") + assert transport == transports.MetricsServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +def test_metrics_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "true"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_metrics_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = MetricsServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_log_metrics(transport: str = 'grpc', request_type=logging_metrics.ListLogMetricsRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogMetricsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_log_metrics_from_dict(): + test_list_log_metrics(request_type=dict) + + +def test_list_log_metrics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + client.list_log_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + + +@pytest.mark.asyncio +async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogMetricsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_log_metrics_async_from_dict(): + await test_list_log_metrics_async(request_type=dict) + + +def test_list_log_metrics_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.ListLogMetricsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_log_metrics_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.ListLogMetricsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_log_metrics_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_log_metrics( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_log_metrics_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_metrics( + logging_metrics.ListLogMetricsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_log_metrics_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_log_metrics( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_log_metrics_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_log_metrics( + logging_metrics.ListLogMetricsRequest(), + parent='parent_value', + ) + + +def test_list_log_metrics_pager(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_log_metrics(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) + for i in results) + +def test_list_log_metrics_pages(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + pages = list(client.list_log_metrics(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_log_metrics_async_pager(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_log_metrics(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) + for i in responses) + +@pytest.mark.asyncio +async def test_list_log_metrics_async_pages(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_log_metrics(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_log_metric(transport: str = 'grpc', request_type=logging_metrics.GetLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_get_log_metric_from_dict(): + test_get_log_metric(request_type=dict) + + +def test_get_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + client.get_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + + +@pytest.mark.asyncio +async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_get_log_metric_async_from_dict(): + await test_get_log_metric_async(request_type=dict) + + +def test_get_log_metric_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.GetLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.GetLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +def test_get_log_metric_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + + +def test_get_log_metric_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_log_metric( + logging_metrics.GetLogMetricRequest(), + metric_name='metric_name_value', + ) + + +@pytest.mark.asyncio +async def test_get_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + + +@pytest.mark.asyncio +async def test_get_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_log_metric( + logging_metrics.GetLogMetricRequest(), + metric_name='metric_name_value', + ) + + +def test_create_log_metric(transport: str = 'grpc', request_type=logging_metrics.CreateLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_create_log_metric_from_dict(): + test_create_log_metric(request_type=dict) + + +def test_create_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + client.create_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + + +@pytest.mark.asyncio +async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_create_log_metric_async_from_dict(): + await test_create_log_metric_async(request_type=dict) + + +def test_create_log_metric_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.CreateLogMetricRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.CreateLogMetricRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_log_metric_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_log_metric( + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].metric == logging_metrics.LogMetric(name='name_value') + + +def test_create_log_metric_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_log_metric( + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].metric == logging_metrics.LogMetric(name='name_value') + + +@pytest.mark.asyncio +async def test_create_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +def test_update_log_metric(transport: str = 'grpc', request_type=logging_metrics.UpdateLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_update_log_metric_from_dict(): + test_update_log_metric(request_type=dict) + + +def test_update_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + client.update_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + +@pytest.mark.asyncio +async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test_update_log_metric_async_from_dict(): + await test_update_log_metric_async(request_type=dict) + + +def test_update_log_metric_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.UpdateLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.UpdateLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +def test_update_log_metric_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_log_metric( + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + assert args[0].metric == logging_metrics.LogMetric(name='name_value') + + +def test_update_log_metric_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_log_metric( + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + assert args[0].metric == logging_metrics.LogMetric(name='name_value') + + +@pytest.mark.asyncio +async def test_update_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +def test_delete_log_metric(transport: str = 'grpc', request_type=logging_metrics.DeleteLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_metric_from_dict(): + test_delete_log_metric(request_type=dict) + + +def test_delete_log_metric_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + client.delete_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + +@pytest.mark.asyncio +async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_log_metric_async_from_dict(): + await test_delete_log_metric_async(request_type=dict) + + +def test_delete_log_metric_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.DeleteLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + call.return_value = None + client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_log_metric_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.DeleteLogMetricRequest() + + request.metric_name = 'metric_name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name/value', + ) in kw['metadata'] + + +def test_delete_log_metric_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + + +def test_delete_log_metric_flattened_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), + metric_name='metric_name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_log_metric_flattened_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].metric_name == 'metric_name_value' + + +@pytest.mark.asyncio +async def test_delete_log_metric_flattened_error_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), + metric_name='metric_name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = MetricsServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricsServiceV2GrpcTransport, + ) + +def test_metrics_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_metrics_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_log_metrics', + 'get_log_metric', + 'create_log_metric', + 'update_log_metric', + 'delete_log_metric', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + quota_project_id="octopus", + ) + + +def test_metrics_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricsServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + MetricsServiceV2Client() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_metrics_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_metrics_service_v2_host_no_port(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + ) + assert client.transport._host == 'logging.googleapis.com:443' + + +def test_metrics_service_v2_host_with_port(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + ) + assert client.transport._host == 'logging.googleapis.com:8000' + +def test_metrics_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricsServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metrics_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_metric_path(): + project = "squid" + metric = "clam" + expected = "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + actual = MetricsServiceV2Client.log_metric_path(project, metric) + assert expected == actual + + +def test_parse_log_metric_path(): + expected = { + "project": "whelk", + "metric": "octopus", + } + path = MetricsServiceV2Client.log_metric_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_log_metric_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = MetricsServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = MetricsServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = MetricsServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = MetricsServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = MetricsServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = MetricsServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = MetricsServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = MetricsServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = MetricsServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = MetricsServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = MetricsServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = MetricsServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc b/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc new file mode 100644 index 000000000000..f77eadc824ae --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/redis/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/redis/BUILD.bazel b/packages/gapic-generator/tests/integration/goldens/redis/BUILD.bazel new file mode 100644 index 000000000000..2822013159c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/BUILD.bazel @@ -0,0 +1,12 @@ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "goldens_files", + srcs = glob( + ["**/*"], + exclude = [ + "BUILD.bazel", + ".*.sw*", + ], + ), +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in new file mode 100644 index 000000000000..5a95b2698cbb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/redis *.py +recursive-include google/cloud/redis_v1 *.py diff --git a/packages/gapic-generator/tests/integration/goldens/redis/README.rst b/packages/gapic-generator/tests/integration/goldens/redis/README.rst new file mode 100644 index 000000000000..45c06d80c64a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Redis API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Redis API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py new file mode 100644 index 000000000000..a9b259f3561e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-redis documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-redis" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-redis-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-redis.tex", + u"google-cloud-redis Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-redis", + u"Google Cloud Redis Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-redis", + u"google-cloud-redis Documentation", + author, + "google-cloud-redis", + "GAPIC library for Google Cloud Redis API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst new file mode 100644 index 000000000000..f7ccd42cd0a6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + redis_v1/services + redis_v1/types diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/cloud_redis.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/cloud_redis.rst new file mode 100644 index 000000000000..0e3d7cfa809e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/cloud_redis.rst @@ -0,0 +1,10 @@ +CloudRedis +---------------------------- + +.. automodule:: google.cloud.redis_v1.services.cloud_redis + :members: + :inherited-members: + +.. automodule:: google.cloud.redis_v1.services.cloud_redis.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst new file mode 100644 index 000000000000..dba59a371880 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Redis v1 API +====================================== +.. toctree:: + :maxdepth: 2 + + cloud_redis diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst new file mode 100644 index 000000000000..38a6d6595f8a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Redis v1 API +=================================== + +.. automodule:: google.cloud.redis_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py new file mode 100644 index 000000000000..40db9a635620 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.redis_v1.services.cloud_redis.client import CloudRedisClient +from google.cloud.redis_v1.services.cloud_redis.async_client import CloudRedisAsyncClient + +from google.cloud.redis_v1.types.cloud_redis import CreateInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import DeleteInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import ExportInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import FailoverInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import GcsDestination +from google.cloud.redis_v1.types.cloud_redis import GcsSource +from google.cloud.redis_v1.types.cloud_redis import GetInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import ImportInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import InputConfig +from google.cloud.redis_v1.types.cloud_redis import Instance +from google.cloud.redis_v1.types.cloud_redis import ListInstancesRequest +from google.cloud.redis_v1.types.cloud_redis import ListInstancesResponse +from google.cloud.redis_v1.types.cloud_redis import LocationMetadata +from google.cloud.redis_v1.types.cloud_redis import OperationMetadata +from google.cloud.redis_v1.types.cloud_redis import OutputConfig +from google.cloud.redis_v1.types.cloud_redis import UpdateInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import UpgradeInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import ZoneMetadata + +__all__ = ('CloudRedisClient', + 'CloudRedisAsyncClient', + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'ExportInstanceRequest', + 'FailoverInstanceRequest', + 'GcsDestination', + 'GcsSource', + 'GetInstanceRequest', + 'ImportInstanceRequest', + 'InputConfig', + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'LocationMetadata', + 'OperationMetadata', + 'OutputConfig', + 'UpdateInstanceRequest', + 'UpgradeInstanceRequest', + 'ZoneMetadata', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/py.typed b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/py.typed new file mode 100644 index 000000000000..960151ecda8b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-redis package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py new file mode 100644 index 000000000000..2ec655b66627 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.cloud_redis import CloudRedisClient +from .services.cloud_redis import CloudRedisAsyncClient + +from .types.cloud_redis import CreateInstanceRequest +from .types.cloud_redis import DeleteInstanceRequest +from .types.cloud_redis import ExportInstanceRequest +from .types.cloud_redis import FailoverInstanceRequest +from .types.cloud_redis import GcsDestination +from .types.cloud_redis import GcsSource +from .types.cloud_redis import GetInstanceRequest +from .types.cloud_redis import ImportInstanceRequest +from .types.cloud_redis import InputConfig +from .types.cloud_redis import Instance +from .types.cloud_redis import ListInstancesRequest +from .types.cloud_redis import ListInstancesResponse +from .types.cloud_redis import LocationMetadata +from .types.cloud_redis import OperationMetadata +from .types.cloud_redis import OutputConfig +from .types.cloud_redis import UpdateInstanceRequest +from .types.cloud_redis import UpgradeInstanceRequest +from .types.cloud_redis import ZoneMetadata + +__all__ = ( + 'CloudRedisAsyncClient', +'CloudRedisClient', +'CreateInstanceRequest', +'DeleteInstanceRequest', +'ExportInstanceRequest', +'FailoverInstanceRequest', +'GcsDestination', +'GcsSource', +'GetInstanceRequest', +'ImportInstanceRequest', +'InputConfig', +'Instance', +'ListInstancesRequest', +'ListInstancesResponse', +'LocationMetadata', +'OperationMetadata', +'OutputConfig', +'UpdateInstanceRequest', +'UpgradeInstanceRequest', +'ZoneMetadata', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json new file mode 100644 index 000000000000..038bb9952176 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json @@ -0,0 +1,113 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.redis_v1", + "protoPackage": "google.cloud.redis.v1", + "schema": "1.0", + "services": { + "CloudRedis": { + "clients": { + "grpc": { + "libraryClient": "CloudRedisClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportInstance": { + "methods": [ + "export_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportInstance": { + "methods": [ + "import_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpgradeInstance": { + "methods": [ + "upgrade_instance" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudRedisAsyncClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportInstance": { + "methods": [ + "export_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportInstance": { + "methods": [ + "import_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpgradeInstance": { + "methods": [ + "upgrade_instance" + ] + } + } + } + } + } + } +} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/py.typed new file mode 100644 index 000000000000..960151ecda8b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-redis package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py new file mode 100644 index 000000000000..900f778f73b9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CloudRedisClient +from .async_client import CloudRedisAsyncClient + +__all__ = ( + 'CloudRedisClient', + 'CloudRedisAsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py new file mode 100644 index 000000000000..512bab3f1268 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -0,0 +1,1097 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.types import cloud_redis +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport +from .client import CloudRedisClient + + +class CloudRedisAsyncClient: + """Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + """ + + _client: CloudRedisClient + + DEFAULT_ENDPOINT = CloudRedisClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudRedisClient.DEFAULT_MTLS_ENDPOINT + + instance_path = staticmethod(CloudRedisClient.instance_path) + parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path) + common_billing_account_path = staticmethod(CloudRedisClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CloudRedisClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CloudRedisClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudRedisClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudRedisClient.common_organization_path) + parse_common_organization_path = staticmethod(CloudRedisClient.parse_common_organization_path) + common_project_path = staticmethod(CloudRedisClient.common_project_path) + parse_common_project_path = staticmethod(CloudRedisClient.parse_common_project_path) + common_location_path = staticmethod(CloudRedisClient.common_location_path) + parse_common_location_path = staticmethod(CloudRedisClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudRedisAsyncClient: The constructed client. + """ + return CloudRedisClient.from_service_account_info.__func__(CloudRedisAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudRedisAsyncClient: The constructed client. + """ + return CloudRedisClient.from_service_account_file.__func__(CloudRedisAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudRedisTransport: + """Returns the transport used by the client instance. + + Returns: + CloudRedisTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(CloudRedisClient).get_transport_class, type(CloudRedisClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, CloudRedisTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud redis client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.CloudRedisTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudRedisClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def list_instances(self, + request: cloud_redis.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists all Redis instances owned by a project in either the + specified location (region) or all locations. + + The location should have the following format: + + - ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + Args: + request (:class:`google.cloud.redis_v1.types.ListInstancesRequest`): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesAsyncPager: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_instances, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance(self, + request: cloud_redis.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis.Instance: + r"""Gets the details of a specific Redis instance. + + Args: + request (:class:`google.cloud.redis_v1.types.GetInstanceRequest`): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_v1.types.Instance: + A Google Cloud Redis instance. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance(self, + request: cloud_redis.CreateInstanceRequest = None, + *, + parent: str = None, + instance_id: str = None, + instance: cloud_redis.Instance = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Redis instance based on the specified tier and memory + size. + + By default, the instance is accessible from the project's + `default network `__. + + The creation is executed asynchronously and callers may check + the returned operation to track its progress. Once the operation + is completed the Redis instance will be fully functional. + Completed longrunning.Operation will contain the new instance + object in the response field. + + The returned operation is automatically deleted after a few + hours, so there is no need to call DeleteOperation. + + Args: + request (:class:`google.cloud.redis_v1.types.CreateInstanceRequest`): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The logical name of the Redis instance in the + customer project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.redis_v1.types.Instance`): + Required. A Redis [Instance] resource + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance_id, instance]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_instance, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance(self, + request: cloud_redis.UpdateInstanceRequest = None, + *, + update_mask: field_mask_pb2.FieldMask = None, + instance: cloud_redis.Instance = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the metadata and configuration of a specific + Redis instance. + Completed longrunning.Operation will contain the new + instance object in the response field. The returned + operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. + + Args: + request (:class:`google.cloud.redis_v1.types.UpdateInstanceRequest`): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. At least one path + must be supplied in this field. The elements of the + repeated paths field may only include these fields from + [Instance][google.cloud.redis.v1.Instance]: + + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.redis_v1.types.Instance`): + Required. Update description. Only fields specified in + update_mask are updated. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, instance]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if update_mask is not None: + request.update_mask = update_mask + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_instance, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def upgrade_instance(self, + request: cloud_redis.UpgradeInstanceRequest = None, + *, + name: str = None, + redis_version: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Upgrades Redis instance to the newer Redis version + specified in the request. + + Args: + request (:class:`google.cloud.redis_v1.types.UpgradeInstanceRequest`): + The request object. Request for + [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + redis_version (:class:`str`): + Required. Specifies the target + version of Redis software to upgrade to. + + This corresponds to the ``redis_version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, redis_version]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.UpgradeInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if redis_version is not None: + request.redis_version = redis_version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.upgrade_instance, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def import_instance(self, + request: cloud_redis.ImportInstanceRequest = None, + *, + name: str = None, + input_config: cloud_redis.InputConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Import a Redis RDB snapshot file from Cloud Storage + into a Redis instance. + Redis may stop serving during this operation. Instance + state will be IMPORTING for entire operation. When + complete, the instance will contain only data from the + imported file. + + The returned operation is automatically deleted after a + few hours, so there is no need to call DeleteOperation. + + Args: + request (:class:`google.cloud.redis_v1.types.ImportInstanceRequest`): + The request object. Request for + [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + input_config (:class:`google.cloud.redis_v1.types.InputConfig`): + Required. Specify data to be + imported. + + This corresponds to the ``input_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, input_config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.ImportInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if input_config is not None: + request.input_config = input_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_instance, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def export_instance(self, + request: cloud_redis.ExportInstanceRequest = None, + *, + name: str = None, + output_config: cloud_redis.OutputConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Export Redis instance data into a Redis RDB format + file in Cloud Storage. + Redis will continue serving during this operation. + The returned operation is automatically deleted after a + few hours, so there is no need to call DeleteOperation. + + Args: + request (:class:`google.cloud.redis_v1.types.ExportInstanceRequest`): + The request object. Request for + [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + output_config (:class:`google.cloud.redis_v1.types.OutputConfig`): + Required. Specify data to be + exported. + + This corresponds to the ``output_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, output_config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.ExportInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if output_config is not None: + request.output_config = output_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.export_instance, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def failover_instance(self, + request: cloud_redis.FailoverInstanceRequest = None, + *, + name: str = None, + data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Initiates a failover of the master node to current + replica node for a specific STANDARD tier Cloud + Memorystore for Redis instance. + + Args: + request (:class:`google.cloud.redis_v1.types.FailoverInstanceRequest`): + The request object. Request for + [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_protection_mode (:class:`google.cloud.redis_v1.types.FailoverInstanceRequest.DataProtectionMode`): + Optional. Available data protection modes that the user + can choose. If it's unspecified, data protection mode + will be LIMITED_DATA_LOSS by default. + + This corresponds to the ``data_protection_mode`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, data_protection_mode]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.FailoverInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if data_protection_mode is not None: + request.data_protection_mode = data_protection_mode + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.failover_instance, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance(self, + request: cloud_redis.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a specific Redis instance. Instance stops + serving and data is deleted. + + Args: + request (:class:`google.cloud.redis_v1.types.DeleteInstanceRequest`): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_instance, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-redis", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "CloudRedisAsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py new file mode 100644 index 000000000000..87892f6c857e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -0,0 +1,1284 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.types import cloud_redis +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudRedisGrpcTransport +from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport + + +class CloudRedisClientMeta(type): + """Metaclass for the CloudRedis client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] + _transport_registry["grpc"] = CloudRedisGrpcTransport + _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[CloudRedisTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudRedisClient(metaclass=CloudRedisClientMeta): + """Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "redis.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudRedisClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudRedisClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudRedisTransport: + """Returns the transport used by the client instance. + + Returns: + CloudRedisTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def instance_path(project: str,location: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, CloudRedisTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud redis client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, CloudRedisTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, CloudRedisTransport): + # transport is a CloudRedisTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def list_instances(self, + request: cloud_redis.ListInstancesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists all Redis instances owned by a project in either the + specified location (region) or all locations. + + The location should have the following format: + + - ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + Args: + request (google.cloud.redis_v1.types.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + parent (str): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesPager: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.ListInstancesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.ListInstancesRequest): + request = cloud_redis.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance(self, + request: cloud_redis.GetInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis.Instance: + r"""Gets the details of a specific Redis instance. + + Args: + request (google.cloud.redis_v1.types.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_v1.types.Instance: + A Google Cloud Redis instance. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.GetInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.GetInstanceRequest): + request = cloud_redis.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance(self, + request: cloud_redis.CreateInstanceRequest = None, + *, + parent: str = None, + instance_id: str = None, + instance: cloud_redis.Instance = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a Redis instance based on the specified tier and memory + size. + + By default, the instance is accessible from the project's + `default network `__. + + The creation is executed asynchronously and callers may check + the returned operation to track its progress. Once the operation + is completed the Redis instance will be fully functional. + Completed longrunning.Operation will contain the new instance + object in the response field. + + The returned operation is automatically deleted after a few + hours, so there is no need to call DeleteOperation. + + Args: + request (google.cloud.redis_v1.types.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + parent (str): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The logical name of the Redis instance in the + customer project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.redis_v1.types.Instance): + Required. A Redis [Instance] resource + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance_id, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.CreateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.CreateInstanceRequest): + request = cloud_redis.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance(self, + request: cloud_redis.UpdateInstanceRequest = None, + *, + update_mask: field_mask_pb2.FieldMask = None, + instance: cloud_redis.Instance = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the metadata and configuration of a specific + Redis instance. + Completed longrunning.Operation will contain the new + instance object in the response field. The returned + operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. + + Args: + request (google.cloud.redis_v1.types.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. At least one path + must be supplied in this field. The elements of the + repeated paths field may only include these fields from + [Instance][google.cloud.redis.v1.Instance]: + + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.redis_v1.types.Instance): + Required. Update description. Only fields specified in + update_mask are updated. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.UpdateInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.UpdateInstanceRequest): + request = cloud_redis.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if update_mask is not None: + request.update_mask = update_mask + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def upgrade_instance(self, + request: cloud_redis.UpgradeInstanceRequest = None, + *, + name: str = None, + redis_version: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Upgrades Redis instance to the newer Redis version + specified in the request. + + Args: + request (google.cloud.redis_v1.types.UpgradeInstanceRequest): + The request object. Request for + [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + redis_version (str): + Required. Specifies the target + version of Redis software to upgrade to. + + This corresponds to the ``redis_version`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, redis_version]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.UpgradeInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.UpgradeInstanceRequest): + request = cloud_redis.UpgradeInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if redis_version is not None: + request.redis_version = redis_version + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.upgrade_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def import_instance(self, + request: cloud_redis.ImportInstanceRequest = None, + *, + name: str = None, + input_config: cloud_redis.InputConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Import a Redis RDB snapshot file from Cloud Storage + into a Redis instance. + Redis may stop serving during this operation. Instance + state will be IMPORTING for entire operation. When + complete, the instance will contain only data from the + imported file. + + The returned operation is automatically deleted after a + few hours, so there is no need to call DeleteOperation. + + Args: + request (google.cloud.redis_v1.types.ImportInstanceRequest): + The request object. Request for + [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + input_config (google.cloud.redis_v1.types.InputConfig): + Required. Specify data to be + imported. + + This corresponds to the ``input_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, input_config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.ImportInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.ImportInstanceRequest): + request = cloud_redis.ImportInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if input_config is not None: + request.input_config = input_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def export_instance(self, + request: cloud_redis.ExportInstanceRequest = None, + *, + name: str = None, + output_config: cloud_redis.OutputConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Export Redis instance data into a Redis RDB format + file in Cloud Storage. + Redis will continue serving during this operation. + The returned operation is automatically deleted after a + few hours, so there is no need to call DeleteOperation. + + Args: + request (google.cloud.redis_v1.types.ExportInstanceRequest): + The request object. Request for + [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + output_config (google.cloud.redis_v1.types.OutputConfig): + Required. Specify data to be + exported. + + This corresponds to the ``output_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, output_config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.ExportInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.ExportInstanceRequest): + request = cloud_redis.ExportInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if output_config is not None: + request.output_config = output_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def failover_instance(self, + request: cloud_redis.FailoverInstanceRequest = None, + *, + name: str = None, + data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Initiates a failover of the master node to current + replica node for a specific STANDARD tier Cloud + Memorystore for Redis instance. + + Args: + request (google.cloud.redis_v1.types.FailoverInstanceRequest): + The request object. Request for + [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_protection_mode (google.cloud.redis_v1.types.FailoverInstanceRequest.DataProtectionMode): + Optional. Available data protection modes that the user + can choose. If it's unspecified, data protection mode + will be LIMITED_DATA_LOSS by default. + + This corresponds to the ``data_protection_mode`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A Google + Cloud Redis instance. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, data_protection_mode]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.FailoverInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.FailoverInstanceRequest): + request = cloud_redis.FailoverInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if data_protection_mode is not None: + request.data_protection_mode = data_protection_mode + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.failover_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance(self, + request: cloud_redis.DeleteInstanceRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a specific Redis instance. Instance stops + serving and data is deleted. + + Args: + request (google.cloud.redis_v1.types.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + The JSON representation for Empty is empty JSON + object {}. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.DeleteInstanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.DeleteInstanceRequest): + request = cloud_redis.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-redis", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "CloudRedisClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py new file mode 100644 index 000000000000..ea1c2287e22e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.redis_v1.types import cloud_redis + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.redis_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.redis_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloud_redis.ListInstancesResponse], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.redis_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.redis_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_redis.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[cloud_redis.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[cloud_redis.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.redis_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.redis_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.redis_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.redis_v1.types.ListInstancesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = cloud_redis.ListInstancesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[cloud_redis.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[cloud_redis.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py new file mode 100644 index 000000000000..165d31b2295f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import CloudRedisTransport +from .grpc import CloudRedisGrpcTransport +from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] +_transport_registry['grpc'] = CloudRedisGrpcTransport +_transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport + +__all__ = ( + 'CloudRedisTransport', + 'CloudRedisGrpcTransport', + 'CloudRedisGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py new file mode 100644 index 000000000000..f0b0ba3137b9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-redis', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class CloudRedisTransport(abc.ABC): + """Abstract transport class for CloudRedis.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'redis.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.upgrade_instance: gapic_v1.method.wrap_method( + self.upgrade_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.import_instance: gapic_v1.method.wrap_method( + self.import_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.export_instance: gapic_v1.method.wrap_method( + self.export_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.failover_instance: gapic_v1.method.wrap_method( + self.failover_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + } + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + Union[ + cloud_redis.ListInstancesResponse, + Awaitable[cloud_redis.ListInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + Union[ + cloud_redis.Instance, + Awaitable[cloud_redis.Instance] + ]]: + raise NotImplementedError() + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def upgrade_instance(self) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def import_instance(self) -> Callable[ + [cloud_redis.ImportInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def export_instance(self) -> Callable[ + [cloud_redis.ExportInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def failover_instance(self) -> Callable[ + [cloud_redis.FailoverInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'CloudRedisTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py new file mode 100644 index 000000000000..6130efc05e01 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -0,0 +1,538 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO + + +class CloudRedisGrpcTransport(CloudRedisTransport): + """gRPC backend transport for CloudRedis. + + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'redis.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + cloud_redis.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists all Redis instances owned by a project in either the + specified location (region) or all locations. + + The location should have the following format: + + - ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/ListInstances', + request_serializer=cloud_redis.ListInstancesRequest.serialize, + response_deserializer=cloud_redis.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + cloud_redis.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets the details of a specific Redis instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/GetInstance', + request_serializer=cloud_redis.GetInstanceRequest.serialize, + response_deserializer=cloud_redis.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a Redis instance based on the specified tier and memory + size. + + By default, the instance is accessible from the project's + `default network `__. + + The creation is executed asynchronously and callers may check + the returned operation to track its progress. Once the operation + is completed the Redis instance will be fully functional. + Completed longrunning.Operation will contain the new instance + object in the response field. + + The returned operation is automatically deleted after a few + hours, so there is no need to call DeleteOperation. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/CreateInstance', + request_serializer=cloud_redis.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates the metadata and configuration of a specific + Redis instance. + Completed longrunning.Operation will contain the new + instance object in the response field. The returned + operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + request_serializer=cloud_redis.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def upgrade_instance(self) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the upgrade instance method over gRPC. + + Upgrades Redis instance to the newer Redis version + specified in the request. + + Returns: + Callable[[~.UpgradeInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'upgrade_instance' not in self._stubs: + self._stubs['upgrade_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', + request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['upgrade_instance'] + + @property + def import_instance(self) -> Callable[ + [cloud_redis.ImportInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the import instance method over gRPC. + + Import a Redis RDB snapshot file from Cloud Storage + into a Redis instance. + Redis may stop serving during this operation. Instance + state will be IMPORTING for entire operation. When + complete, the instance will contain only data from the + imported file. + + The returned operation is automatically deleted after a + few hours, so there is no need to call DeleteOperation. + + Returns: + Callable[[~.ImportInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_instance' not in self._stubs: + self._stubs['import_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/ImportInstance', + request_serializer=cloud_redis.ImportInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_instance'] + + @property + def export_instance(self) -> Callable[ + [cloud_redis.ExportInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the export instance method over gRPC. + + Export Redis instance data into a Redis RDB format + file in Cloud Storage. + Redis will continue serving during this operation. + The returned operation is automatically deleted after a + few hours, so there is no need to call DeleteOperation. + + Returns: + Callable[[~.ExportInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_instance' not in self._stubs: + self._stubs['export_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/ExportInstance', + request_serializer=cloud_redis.ExportInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['export_instance'] + + @property + def failover_instance(self) -> Callable[ + [cloud_redis.FailoverInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the failover instance method over gRPC. + + Initiates a failover of the master node to current + replica node for a specific STANDARD tier Cloud + Memorystore for Redis instance. + + Returns: + Callable[[~.FailoverInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'failover_instance' not in self._stubs: + self._stubs['failover_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/FailoverInstance', + request_serializer=cloud_redis.FailoverInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['failover_instance'] + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a specific Redis instance. Instance stops + serving and data is deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + request_serializer=cloud_redis.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_instance'] + + +__all__ = ( + 'CloudRedisGrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py new file mode 100644 index 000000000000..7e3bfc51f7fd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -0,0 +1,542 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import operations_v1 # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudRedisGrpcTransport + + +class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): + """gRPC AsyncIO backend transport for CloudRedis. + + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'redis.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Sanity check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + Awaitable[cloud_redis.ListInstancesResponse]]: + r"""Return a callable for the list instances method over gRPC. + + Lists all Redis instances owned by a project in either the + specified location (region) or all locations. + + The location should have the following format: + + - ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/ListInstances', + request_serializer=cloud_redis.ListInstancesRequest.serialize, + response_deserializer=cloud_redis.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + Awaitable[cloud_redis.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets the details of a specific Redis instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/GetInstance', + request_serializer=cloud_redis.GetInstanceRequest.serialize, + response_deserializer=cloud_redis.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates a Redis instance based on the specified tier and memory + size. + + By default, the instance is accessible from the project's + `default network `__. + + The creation is executed asynchronously and callers may check + the returned operation to track its progress. Once the operation + is completed the Redis instance will be fully functional. + Completed longrunning.Operation will contain the new instance + object in the response field. + + The returned operation is automatically deleted after a few + hours, so there is no need to call DeleteOperation. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/CreateInstance', + request_serializer=cloud_redis.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance method over gRPC. + + Updates the metadata and configuration of a specific + Redis instance. + Completed longrunning.Operation will contain the new + instance object in the response field. The returned + operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + request_serializer=cloud_redis.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def upgrade_instance(self) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the upgrade instance method over gRPC. + + Upgrades Redis instance to the newer Redis version + specified in the request. + + Returns: + Callable[[~.UpgradeInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'upgrade_instance' not in self._stubs: + self._stubs['upgrade_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', + request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['upgrade_instance'] + + @property + def import_instance(self) -> Callable[ + [cloud_redis.ImportInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the import instance method over gRPC. + + Import a Redis RDB snapshot file from Cloud Storage + into a Redis instance. + Redis may stop serving during this operation. Instance + state will be IMPORTING for entire operation. When + complete, the instance will contain only data from the + imported file. + + The returned operation is automatically deleted after a + few hours, so there is no need to call DeleteOperation. + + Returns: + Callable[[~.ImportInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'import_instance' not in self._stubs: + self._stubs['import_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/ImportInstance', + request_serializer=cloud_redis.ImportInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['import_instance'] + + @property + def export_instance(self) -> Callable[ + [cloud_redis.ExportInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the export instance method over gRPC. + + Export Redis instance data into a Redis RDB format + file in Cloud Storage. + Redis will continue serving during this operation. + The returned operation is automatically deleted after a + few hours, so there is no need to call DeleteOperation. + + Returns: + Callable[[~.ExportInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'export_instance' not in self._stubs: + self._stubs['export_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/ExportInstance', + request_serializer=cloud_redis.ExportInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['export_instance'] + + @property + def failover_instance(self) -> Callable[ + [cloud_redis.FailoverInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the failover instance method over gRPC. + + Initiates a failover of the master node to current + replica node for a specific STANDARD tier Cloud + Memorystore for Redis instance. + + Returns: + Callable[[~.FailoverInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'failover_instance' not in self._stubs: + self._stubs['failover_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/FailoverInstance', + request_serializer=cloud_redis.FailoverInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['failover_instance'] + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a specific Redis instance. Instance stops + serving and data is deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + request_serializer=cloud_redis.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_instance'] + + +__all__ = ( + 'CloudRedisGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py new file mode 100644 index 000000000000..1d86627eef24 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloud_redis import ( + CreateInstanceRequest, + DeleteInstanceRequest, + ExportInstanceRequest, + FailoverInstanceRequest, + GcsDestination, + GcsSource, + GetInstanceRequest, + ImportInstanceRequest, + InputConfig, + Instance, + ListInstancesRequest, + ListInstancesResponse, + LocationMetadata, + OperationMetadata, + OutputConfig, + UpdateInstanceRequest, + UpgradeInstanceRequest, + ZoneMetadata, +) + +__all__ = ( + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'ExportInstanceRequest', + 'FailoverInstanceRequest', + 'GcsDestination', + 'GcsSource', + 'GetInstanceRequest', + 'ImportInstanceRequest', + 'InputConfig', + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'LocationMetadata', + 'OperationMetadata', + 'OutputConfig', + 'UpdateInstanceRequest', + 'UpgradeInstanceRequest', + 'ZoneMetadata', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py new file mode 100644 index 000000000000..9caecb067b31 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -0,0 +1,708 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.redis.v1', + manifest={ + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'GetInstanceRequest', + 'CreateInstanceRequest', + 'UpdateInstanceRequest', + 'UpgradeInstanceRequest', + 'DeleteInstanceRequest', + 'GcsSource', + 'InputConfig', + 'ImportInstanceRequest', + 'GcsDestination', + 'OutputConfig', + 'ExportInstanceRequest', + 'FailoverInstanceRequest', + 'OperationMetadata', + 'LocationMetadata', + 'ZoneMetadata', + }, +) + + +class Instance(proto.Message): + r"""A Google Cloud Redis instance. + Attributes: + name (str): + Required. Unique name of the resource in this scope + including project and location using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note: Redis instances are managed and addressed at regional + level so location_id here refers to a GCP region; however, + users may choose which specific zone (or collection of zones + for cross-zone instances) an instance should be provisioned + in. Refer to + [location_id][google.cloud.redis.v1.Instance.location_id] + and + [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id] + fields for more details. + display_name (str): + An arbitrary and optional user-provided name + for the instance. + labels (Sequence[google.cloud.redis_v1.types.Instance.LabelsEntry]): + Resource labels to represent user provided + metadata + location_id (str): + Optional. The zone where the instance will be provisioned. + If not provided, the service will choose a zone for the + instance. For STANDARD_HA tier, instances will be created + across two zones for protection against zonal failures. If + [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id] + is also provided, it must be different from + [location_id][google.cloud.redis.v1.Instance.location_id]. + alternative_location_id (str): + Optional. Only applicable to STANDARD_HA tier which protects + the instance against zonal failures by provisioning it + across two zones. If provided, it must be a different zone + from the one provided in + [location_id][google.cloud.redis.v1.Instance.location_id]. + redis_version (str): + Optional. The version of Redis software. If not provided, + latest supported version will be used. Currently, the + supported values are: + + - ``REDIS_3_2`` for Redis 3.2 compatibility + - ``REDIS_4_0`` for Redis 4.0 compatibility (default) + - ``REDIS_5_0`` for Redis 5.0 compatibility + reserved_ip_range (str): + Optional. The CIDR range of internal + addresses that are reserved for this instance. + If not provided, the service will choose an + unused /29 block, for example, 10.0.0.0/29 or + 192.168.0.0/29. Ranges must be unique and non- + overlapping with existing subnets in an + authorized network. + host (str): + Output only. Hostname or IP address of the + exposed Redis endpoint used by clients to + connect to the service. + port (int): + Output only. The port number of the exposed + Redis endpoint. + current_location_id (str): + Output only. The current zone where the Redis endpoint is + placed. For Basic Tier instances, this will always be the + same as the + [location_id][google.cloud.redis.v1.Instance.location_id] + provided by the user at creation time. For Standard Tier + instances, this can be either + [location_id][google.cloud.redis.v1.Instance.location_id] or + [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id] + and can change after a failover event. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the instance was + created. + state (google.cloud.redis_v1.types.Instance.State): + Output only. The current state of this + instance. + status_message (str): + Output only. Additional information about the + current status of this instance, if available. + redis_configs (Sequence[google.cloud.redis_v1.types.Instance.RedisConfigsEntry]): + Optional. Redis configuration parameters, according to + http://redis.io/topics/config. Currently, the only supported + parameters are: + + Redis version 3.2 and newer: + + - maxmemory-policy + - notify-keyspace-events + + Redis version 4.0 and newer: + + - activedefrag + - lfu-decay-time + - lfu-log-factor + - maxmemory-gb + + Redis version 5.0 and newer: + + - stream-node-max-bytes + - stream-node-max-entries + tier (google.cloud.redis_v1.types.Instance.Tier): + Required. The service tier of the instance. + memory_size_gb (int): + Required. Redis memory size in GiB. + authorized_network (str): + Optional. The full name of the Google Compute Engine + `network `__ to which + the instance is connected. If left unspecified, the + ``default`` network will be used. + persistence_iam_identity (str): + Output only. Cloud IAM identity used by import / export + operations to transfer data to/from Cloud Storage. Format is + "serviceAccount:". The value may + change over time for a given instance so should be checked + before each import/export operation. + connect_mode (google.cloud.redis_v1.types.Instance.ConnectMode): + Optional. The network connect mode of the Redis instance. If + not provided, the connect mode defaults to DIRECT_PEERING. + """ + class State(proto.Enum): + r"""Represents the different states of a Redis instance.""" + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + UPDATING = 3 + DELETING = 4 + REPAIRING = 5 + MAINTENANCE = 6 + IMPORTING = 8 + FAILING_OVER = 9 + + class Tier(proto.Enum): + r"""Available service tiers to choose from""" + TIER_UNSPECIFIED = 0 + BASIC = 1 + STANDARD_HA = 3 + + class ConnectMode(proto.Enum): + r"""Available connection modes.""" + CONNECT_MODE_UNSPECIFIED = 0 + DIRECT_PEERING = 1 + PRIVATE_SERVICE_ACCESS = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + location_id = proto.Field( + proto.STRING, + number=4, + ) + alternative_location_id = proto.Field( + proto.STRING, + number=5, + ) + redis_version = proto.Field( + proto.STRING, + number=7, + ) + reserved_ip_range = proto.Field( + proto.STRING, + number=9, + ) + host = proto.Field( + proto.STRING, + number=10, + ) + port = proto.Field( + proto.INT32, + number=11, + ) + current_location_id = proto.Field( + proto.STRING, + number=12, + ) + create_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.ENUM, + number=14, + enum=State, + ) + status_message = proto.Field( + proto.STRING, + number=15, + ) + redis_configs = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + tier = proto.Field( + proto.ENUM, + number=17, + enum=Tier, + ) + memory_size_gb = proto.Field( + proto.INT32, + number=18, + ) + authorized_network = proto.Field( + proto.STRING, + number=20, + ) + persistence_iam_identity = proto.Field( + proto.STRING, + number=21, + ) + connect_mode = proto.Field( + proto.ENUM, + number=22, + enum=ConnectMode, + ) + + +class ListInstancesRequest(proto.Message): + r"""Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + Attributes: + parent (str): + Required. The resource name of the instance location using + the form: ``projects/{project_id}/locations/{location_id}`` + where ``location_id`` refers to a GCP region. + page_size (int): + The maximum number of items to return. + + If not specified, a default value of 1000 will be used by + the service. Regardless of the page_size value, the response + may include a partial list and a caller should only rely on + response's + [``next_page_token``][google.cloud.redis.v1.ListInstancesResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + The ``next_page_token`` value returned from a previous + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances] + request, if any. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInstancesResponse(proto.Message): + r"""Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + Attributes: + instances (Sequence[google.cloud.redis_v1.types.Instance]): + A list of Redis instances in the project in the specified + location, or across all locations. + + If the ``location_id`` in the parent field of the request is + "-", all regions available to the project are queried, and + the results aggregated. If in such an aggregated query a + location is unavailable, a dummy Redis entry is included in + the response with the ``name`` field set to a value of the + form + ``projects/{project_id}/locations/{location_id}/instances/``- + and the ``status`` field set to ERROR and ``status_message`` + field set to "location not available for ListInstances". + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (Sequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + + Attributes: + parent (str): + Required. The resource name of the instance location using + the form: ``projects/{project_id}/locations/{location_id}`` + where ``location_id`` refers to a GCP region. + instance_id (str): + Required. The logical name of the Redis instance in the + customer project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + instance (google.cloud.redis_v1.types.Instance): + Required. A Redis [Instance] resource + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + instance_id = proto.Field( + proto.STRING, + number=2, + ) + instance = proto.Field( + proto.MESSAGE, + number=3, + message='Instance', + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. At least one path must + be supplied in this field. The elements of the repeated + paths field may only include these fields from + [Instance][google.cloud.redis.v1.Instance]: + + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + instance (google.cloud.redis_v1.types.Instance): + Required. Update description. Only fields specified in + update_mask are updated. + """ + + update_mask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance = proto.Field( + proto.MESSAGE, + number=2, + message='Instance', + ) + + +class UpgradeInstanceRequest(proto.Message): + r"""Request for + [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + redis_version (str): + Required. Specifies the target version of + Redis software to upgrade to. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + redis_version = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class GcsSource(proto.Message): + r"""The Cloud Storage location for the input content + Attributes: + uri (str): + Required. Source data URI. (e.g. + 'gs://my_bucket/my_object'). + """ + + uri = proto.Field( + proto.STRING, + number=1, + ) + + +class InputConfig(proto.Message): + r"""The input content + Attributes: + gcs_source (google.cloud.redis_v1.types.GcsSource): + Google Cloud Storage location where input + content is located. + """ + + gcs_source = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='GcsSource', + ) + + +class ImportInstanceRequest(proto.Message): + r"""Request for + [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + input_config (google.cloud.redis_v1.types.InputConfig): + Required. Specify data to be imported. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + input_config = proto.Field( + proto.MESSAGE, + number=3, + message='InputConfig', + ) + + +class GcsDestination(proto.Message): + r"""The Cloud Storage location for the output content + Attributes: + uri (str): + Required. Data destination URI (e.g. + 'gs://my_bucket/my_object'). Existing files will be + overwritten. + """ + + uri = proto.Field( + proto.STRING, + number=1, + ) + + +class OutputConfig(proto.Message): + r"""The output content + Attributes: + gcs_destination (google.cloud.redis_v1.types.GcsDestination): + Google Cloud Storage destination for output + content. + """ + + gcs_destination = proto.Field( + proto.MESSAGE, + number=1, + oneof='destination', + message='GcsDestination', + ) + + +class ExportInstanceRequest(proto.Message): + r"""Request for + [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + output_config (google.cloud.redis_v1.types.OutputConfig): + Required. Specify data to be exported. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + output_config = proto.Field( + proto.MESSAGE, + number=3, + message='OutputConfig', + ) + + +class FailoverInstanceRequest(proto.Message): + r"""Request for + [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + data_protection_mode (google.cloud.redis_v1.types.FailoverInstanceRequest.DataProtectionMode): + Optional. Available data protection modes that the user can + choose. If it's unspecified, data protection mode will be + LIMITED_DATA_LOSS by default. + """ + class DataProtectionMode(proto.Enum): + r"""Specifies different modes of operation in relation to the + data retention. + """ + DATA_PROTECTION_MODE_UNSPECIFIED = 0 + LIMITED_DATA_LOSS = 1 + FORCE_DATA_LOSS = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + data_protection_mode = proto.Field( + proto.ENUM, + number=2, + enum=DataProtectionMode, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the v1 metadata of the long-running operation. + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Creation timestamp. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End timestamp. + target (str): + Operation target. + verb (str): + Operation verb. + status_detail (str): + Operation status details. + cancel_requested (bool): + Specifies if cancellation was requested for + the operation. + api_version (str): + API version. + """ + + create_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target = proto.Field( + proto.STRING, + number=3, + ) + verb = proto.Field( + proto.STRING, + number=4, + ) + status_detail = proto.Field( + proto.STRING, + number=5, + ) + cancel_requested = proto.Field( + proto.BOOL, + number=6, + ) + api_version = proto.Field( + proto.STRING, + number=7, + ) + + +class LocationMetadata(proto.Message): + r"""This location metadata represents additional configuration options + for a given location where a Redis instance may be created. All + fields are output only. It is returned as content of the + ``google.cloud.location.Location.metadata`` field. + + Attributes: + available_zones (Sequence[google.cloud.redis_v1.types.LocationMetadata.AvailableZonesEntry]): + Output only. The set of available zones in the location. The + map is keyed by the lowercase ID of each zone, as defined by + GCE. These keys can be specified in ``location_id`` or + ``alternative_location_id`` fields when creating a Redis + instance. + """ + + available_zones = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message='ZoneMetadata', + ) + + +class ZoneMetadata(proto.Message): + r"""Defines specific information for a particular zone. Currently + empty and reserved for future use only. + """ + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini b/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py new file mode 100644 index 000000000000..5b0d60db004a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/redis_v1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py new file mode 100644 index 000000000000..876e658d593c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py @@ -0,0 +1,184 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class redisCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'export_instance': ('name', 'output_config', ), + 'failover_instance': ('name', 'data_protection_mode', ), + 'get_instance': ('name', ), + 'import_instance': ('name', 'input_config', ), + 'list_instances': ('parent', 'page_size', 'page_token', ), + 'update_instance': ('update_mask', 'instance', ), + 'upgrade_instance': ('name', 'redis_version', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=redisCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the redis client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py new file mode 100644 index 000000000000..d3a786b97d3f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-redis', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py new file mode 100644 index 000000000000..b54a5fcc42cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py new file mode 100644 index 000000000000..ff39c8c468f2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -0,0 +1,3326 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.redis_v1.services.cloud_redis import CloudRedisAsyncClient +from google.cloud.redis_v1.services.cloud_redis import CloudRedisClient +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.services.cloud_redis import transports +from google.cloud.redis_v1.services.cloud_redis.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudRedisClient._get_default_mtls_endpoint(None) is None + assert CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + CloudRedisClient, + CloudRedisAsyncClient, +]) +def test_cloud_redis_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'redis.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + CloudRedisClient, + CloudRedisAsyncClient, +]) +def test_cloud_redis_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'redis.googleapis.com:443' + + +def test_cloud_redis_client_get_transport_class(): + transport = CloudRedisClient.get_transport_class() + available_transports = [ + transports.CloudRedisGrpcTransport, + ] + assert transport in available_transports + + transport = CloudRedisClient.get_transport_class("grpc") + assert transport == transports.CloudRedisGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +def test_cloud_redis_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_redis_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_redis_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_cloud_redis_client_client_options_from_dict(): + with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CloudRedisClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_list_instances(transport: str = 'grpc', request_type=cloud_redis.ListInstancesRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instances_from_dict(): + test_list_instances(request_type=dict) + + +def test_list_instances_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ListInstancesRequest() + + +@pytest.mark.asyncio +async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ListInstancesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + + +def test_list_instances_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.ListInstancesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = cloud_redis.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.ListInstancesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse()) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_instances_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_instances_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_redis.ListInstancesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + cloud_redis.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_pager(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instances(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, cloud_redis.Instance) + for i in results) + +def test_list_instances_pages(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_redis.Instance) + for i in responses) + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_instances(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_instance(transport: str = 'grpc', request_type=cloud_redis.GetInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + + +def test_get_instance_from_dict(): + test_get_instance(request_type=dict) + + +def test_get_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceRequest() + + +@pytest.mark.asyncio +async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + )) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + + +def test_get_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.GetInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = cloud_redis.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.GetInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_redis.GetInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + cloud_redis.GetInstanceRequest(), + name='name_value', + ) + + +def test_create_instance(transport: str = 'grpc', request_type=cloud_redis.CreateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_from_dict(): + test_create_instance(request_type=dict) + + +def test_create_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.CreateInstanceRequest() + + +@pytest.mark.asyncio +async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.CreateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + + +def test_create_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.CreateInstanceRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.CreateInstanceRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].instance_id == 'instance_id_value' + assert args[0].instance == cloud_redis.Instance(name='name_value') + + +def test_create_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_redis.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].instance_id == 'instance_id_value' + assert args[0].instance == cloud_redis.Instance(name='name_value') + + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + cloud_redis.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + +def test_update_instance(transport: str = 'grpc', request_type=cloud_redis.UpdateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_from_dict(): + test_update_instance(request_type=dict) + + +def test_update_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpdateInstanceRequest() + + +@pytest.mark.asyncio +async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpdateInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + + +def test_update_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.UpdateInstanceRequest() + + request.instance.name = 'instance.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=instance.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.UpdateInstanceRequest() + + request.instance.name = 'instance.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=instance.name/value', + ) in kw['metadata'] + + +def test_update_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + assert args[0].instance == cloud_redis.Instance(name='name_value') + + +def test_update_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_redis.UpdateInstanceRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + assert args[0].instance == cloud_redis.Instance(name='name_value') + + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + cloud_redis.UpdateInstanceRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + +def test_upgrade_instance(transport: str = 'grpc', request_type=cloud_redis.UpgradeInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpgradeInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_upgrade_instance_from_dict(): + test_upgrade_instance(request_type=dict) + + +def test_upgrade_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + client.upgrade_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpgradeInstanceRequest() + + +@pytest.mark.asyncio +async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpgradeInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_upgrade_instance_async_from_dict(): + await test_upgrade_instance_async(request_type=dict) + + +def test_upgrade_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.UpgradeInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_upgrade_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.UpgradeInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_upgrade_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.upgrade_instance( + name='name_value', + redis_version='redis_version_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].redis_version == 'redis_version_value' + + +def test_upgrade_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.upgrade_instance( + cloud_redis.UpgradeInstanceRequest(), + name='name_value', + redis_version='redis_version_value', + ) + + +@pytest.mark.asyncio +async def test_upgrade_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.upgrade_instance( + name='name_value', + redis_version='redis_version_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].redis_version == 'redis_version_value' + + +@pytest.mark.asyncio +async def test_upgrade_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.upgrade_instance( + cloud_redis.UpgradeInstanceRequest(), + name='name_value', + redis_version='redis_version_value', + ) + + +def test_import_instance(transport: str = 'grpc', request_type=cloud_redis.ImportInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.import_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ImportInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_instance_from_dict(): + test_import_instance(request_type=dict) + + +def test_import_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + client.import_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ImportInstanceRequest() + + +@pytest.mark.asyncio +async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.import_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ImportInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_instance_async_from_dict(): + await test_import_instance_async(request_type=dict) + + +def test_import_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.ImportInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.import_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_import_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.ImportInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.import_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_import_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.import_instance( + name='name_value', + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].input_config == cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + + +def test_import_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_instance( + cloud_redis.ImportInstanceRequest(), + name='name_value', + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + ) + + +@pytest.mark.asyncio +async def test_import_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.import_instance( + name='name_value', + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].input_config == cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + + +@pytest.mark.asyncio +async def test_import_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.import_instance( + cloud_redis.ImportInstanceRequest(), + name='name_value', + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + ) + + +def test_export_instance(transport: str = 'grpc', request_type=cloud_redis.ExportInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.export_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ExportInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_instance_from_dict(): + test_export_instance(request_type=dict) + + +def test_export_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + client.export_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ExportInstanceRequest() + + +@pytest.mark.asyncio +async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.export_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ExportInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_instance_async_from_dict(): + await test_export_instance_async(request_type=dict) + + +def test_export_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.ExportInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.export_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_export_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.ExportInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.export_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_export_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_instance( + name='name_value', + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].output_config == cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + + +def test_export_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_instance( + cloud_redis.ExportInstanceRequest(), + name='name_value', + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + ) + + +@pytest.mark.asyncio +async def test_export_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_instance( + name='name_value', + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].output_config == cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + + +@pytest.mark.asyncio +async def test_export_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_instance( + cloud_redis.ExportInstanceRequest(), + name='name_value', + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + ) + + +def test_failover_instance(transport: str = 'grpc', request_type=cloud_redis.FailoverInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.FailoverInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_failover_instance_from_dict(): + test_failover_instance(request_type=dict) + + +def test_failover_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + client.failover_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.FailoverInstanceRequest() + + +@pytest.mark.asyncio +async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.FailoverInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_failover_instance_async_from_dict(): + await test_failover_instance_async(request_type=dict) + + +def test_failover_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.FailoverInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_failover_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.FailoverInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_failover_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.failover_instance( + name='name_value', + data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].data_protection_mode == cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + + +def test_failover_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.failover_instance( + cloud_redis.FailoverInstanceRequest(), + name='name_value', + data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, + ) + + +@pytest.mark.asyncio +async def test_failover_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.failover_instance( + name='name_value', + data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].data_protection_mode == cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + + +@pytest.mark.asyncio +async def test_failover_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.failover_instance( + cloud_redis.FailoverInstanceRequest(), + name='name_value', + data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, + ) + + +def test_delete_instance(transport: str = 'grpc', request_type=cloud_redis.DeleteInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_from_dict(): + test_delete_instance(request_type=dict) + + +def test_delete_instance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.DeleteInstanceRequest() + + +@pytest.mark.asyncio +async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.DeleteInstanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + + +def test_delete_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.DeleteInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.DeleteInstanceRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_redis.DeleteInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + cloud_redis.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudRedisClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudRedisGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudRedisGrpcTransport, + ) + +def test_cloud_redis_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudRedisTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_redis_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudRedisTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instances', + 'get_instance', + 'create_instance', + 'update_instance', + 'upgrade_instance', + 'import_instance', + 'export_instance', + 'failover_instance', + 'delete_instance', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + +@requires_google_auth_gte_1_25_0 +def test_cloud_redis_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_redis_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_cloud_redis_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_cloud_redis_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudRedisClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_cloud_redis_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudRedisClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_cloud_redis_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_cloud_redis_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudRedisGrpcTransport, grpc_helpers), + (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "redis.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="redis.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_cloud_redis_host_no_port(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), + ) + assert client.transport._host == 'redis.googleapis.com:443' + + +def test_cloud_redis_host_with_port(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), + ) + assert client.transport._host == 'redis.googleapis.com:8000' + +def test_cloud_redis_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudRedisGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_redis_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudRedisGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cloud_redis_grpc_lro_client(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_redis_grpc_lro_async_client(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + actual = CloudRedisClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = CloudRedisClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_instance_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudRedisClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudRedisClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudRedisClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudRedisClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudRedisClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudRedisClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = CloudRedisClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudRedisClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudRedisClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudRedisClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudRedisClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/packages/gapic-generator/tests/integration/iamcredentials_grpc_service_config.json b/packages/gapic-generator/tests/integration/iamcredentials_grpc_service_config.json new file mode 100755 index 000000000000..360bca92de11 --- /dev/null +++ b/packages/gapic-generator/tests/integration/iamcredentials_grpc_service_config.json @@ -0,0 +1,35 @@ +{ + "methodConfig": [ + { + "name": [ + { + "service": "google.iam.credentials.v1.IAMCredentials", + "method": "GenerateAccessToken" + }, + { + "service": "google.iam.credentials.v1.IAMCredentials", + "method": "GenerateIdToken" + }, + { + "service": "google.iam.credentials.v1.IAMCredentials", + "method": "SignBlob" + }, + { + "service": "google.iam.credentials.v1.IAMCredentials", + "method": "SignJwt" + } + ], + "timeout": "60s", + "retryPolicy": { + "maxAttempts": 5, + "initialBackoff": "0.100s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": [ + "UNAVAILABLE", + "DEADLINE_EXCEEDED" + ] + } + } + ] +} diff --git a/packages/gapic-generator/tests/integration/logging_grpc_service_config.json b/packages/gapic-generator/tests/integration/logging_grpc_service_config.json new file mode 100755 index 000000000000..fc9c085b427f --- /dev/null +++ b/packages/gapic-generator/tests/integration/logging_grpc_service_config.json @@ -0,0 +1,162 @@ +{ + "methodConfig": [ + { + "name": [ + { + "service": "google.logging.v2.MetricsServiceV2", + "method": "CreateLogMetric" + } + ], + "timeout": "60s" + }, + { + "name": [ + { + "service": "google.logging.v2.LoggingServiceV2", + "method": "DeleteLog" + }, + { + "service": "google.logging.v2.LoggingServiceV2", + "method": "WriteLogEntries" + }, + { + "service": "google.logging.v2.LoggingServiceV2", + "method": "ListLogEntries" + }, + { + "service": "google.logging.v2.LoggingServiceV2", + "method": "ListMonitoredResourceDescriptors" + }, + { + "service": "google.logging.v2.LoggingServiceV2", + "method": "ListLogs" + } + ], + "timeout": "60s", + "retryPolicy": { + "maxAttempts": 5, + "initialBackoff": "0.100s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": [ + "DEADLINE_EXCEEDED", + "INTERNAL", + "UNAVAILABLE" + ] + } + }, + { + "name": [ + { + "service": "google.logging.v2.LoggingServiceV2", + "method": "TailLogEntries" + } + ], + "timeout": "3600s", + "retryPolicy": { + "maxAttempts": 5, + "initialBackoff": "0.100s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": [ + "DEADLINE_EXCEEDED", + "INTERNAL", + "UNAVAILABLE" + ] + } + }, + { + "name": [ + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "ListSinks" + }, + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "GetSink" + }, + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "UpdateSink" + }, + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "DeleteSink" + }, + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "ListExclusions" + }, + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "GetExclusion" + }, + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "DeleteExclusion" + } + ], + "timeout": "60s", + "retryPolicy": { + "maxAttempts": 5, + "initialBackoff": "0.100s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": [ + "DEADLINE_EXCEEDED", + "INTERNAL", + "UNAVAILABLE" + ] + } + }, + { + "name": [ + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "CreateSink" + }, + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "CreateExclusion" + }, + { + "service": "google.logging.v2.ConfigServiceV2", + "method": "UpdateExclusion" + } + ], + "timeout": "120s" + }, + { + "name": [ + { + "service": "google.logging.v2.MetricsServiceV2", + "method": "ListLogMetrics" + }, + { + "service": "google.logging.v2.MetricsServiceV2", + "method": "GetLogMetric" + }, + { + "service": "google.logging.v2.MetricsServiceV2", + "method": "UpdateLogMetric" + }, + { + "service": "google.logging.v2.MetricsServiceV2", + "method": "DeleteLogMetric" + } + ], + "timeout": "60s", + "retryPolicy": { + "maxAttempts": 5, + "initialBackoff": "0.100s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": [ + "DEADLINE_EXCEEDED", + "INTERNAL", + "UNAVAILABLE" + ] + } + } + ] +} diff --git a/packages/gapic-generator/tests/integration/redis_grpc_service_config.json b/packages/gapic-generator/tests/integration/redis_grpc_service_config.json new file mode 100755 index 000000000000..77201fd2540e --- /dev/null +++ b/packages/gapic-generator/tests/integration/redis_grpc_service_config.json @@ -0,0 +1,49 @@ +{ + "methodConfig": [ + { + "name": [ + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "ListInstances" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "GetInstance" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "GetInstanceAuthString" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "CreateInstance" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "UpdateInstance" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "UpgradeInstance" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "ImportInstance" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "ExportInstance" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "FailoverInstance" + }, + { + "service": "google.cloud.redis.v1.CloudRedis", + "method": "DeleteInstance" + } + ], + "timeout": "600s" + } + ] +} From a1078b5ce481a7b911b3128f109a191fd007ab5b Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Thu, 27 May 2021 13:28:33 -0700 Subject: [PATCH 0563/1339] chore: Add Bazel files to .gitignore (#907) --- packages/gapic-generator/.gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/gapic-generator/.gitignore b/packages/gapic-generator/.gitignore index 2cead4ed7d47..2436ca5072fa 100644 --- a/packages/gapic-generator/.gitignore +++ b/packages/gapic-generator/.gitignore @@ -1,6 +1,9 @@ *.py[cod] *.sw[op] +# Bazel +bazel-* + # C extensions *.so From ec9c7e66436b3bc98da19c10fb1693ca13df95b4 Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Thu, 27 May 2021 13:37:39 -0700 Subject: [PATCH 0564/1339] feat(dev): Add Git pre-commit hooks [gapic-generator-python] (#908) * feat(tests): Add integration test framework, goldens for 4 APIs * fix: exclude generated sources from style checks * fix: add integration tests to CI * fix: split out integration tests * fix: bazel install * fix: Use 3.8 only for integration tests * fix: add integration test steps to docs * feat(dev): Add Git pre-commit hooks * fix: use unit-3.9 instead * fix: add license header to pre-commit --- packages/gapic-generator/.githooks/pre-commit | 146 ++++++++++++++++++ packages/gapic-generator/DEVELOPMENT.md | 50 +++++- 2 files changed, 189 insertions(+), 7 deletions(-) create mode 100755 packages/gapic-generator/.githooks/pre-commit diff --git a/packages/gapic-generator/.githooks/pre-commit b/packages/gapic-generator/.githooks/pre-commit new file mode 100755 index 000000000000..580aaffbcaae --- /dev/null +++ b/packages/gapic-generator/.githooks/pre-commit @@ -0,0 +1,146 @@ +#!/bin/sh + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Pre-commit Git checks. +# Set up: +# ln -s .githooks/pre-commit .git/hooks/pre-commit + +# Constants. +BOLD="\e[1m" +UNSET="\e[0m" +WHITE="\e[97m" +RED="\e[91m" +BACK_MAGENTA="\e[45m" +BACK_BLUE="\e[44m" +BACK_RED="\e[41m" +BACK_GREEN="\e[42m" + +# Methods. +function echo_error { + ERR_MSG=$1 + HELP_MSG=$2 + echo -e "$BOLD $BACK_BLUE $WHITE Precommit:\t $BACK_RED Changes NOT committed. $UNSET" + echo -e "$BOLD $BACK_BLUE $WHITE Precommit:\t $BACK_RED $WHITE $ERR_MSG $BACK_BLUE $HELP_MSG $UNSET" +} + +function echo_status { + STATUS_MSG=$1 + echo -e "$BOLD $BACK_BLUE $WHITE Precommit:\t $STATUS_MSG $UNSET" +} + +function echo_success { + echo -e "$BOLD $BACK_BLUE $WHITE Precommit:\t $BACK_GREEN $WHITE SUCCESS. $UNSET All checks passed!" +} + +function header_check_preparation { + echo_status "Setting up license check environment" + export GOPATH=$(go env GOPATH) + if [ $? -ne 0 ]; + then + echo_status "Please install Go first, instructions can be found here: https://golang.org/doc/install." + else + export ENV_PATH=$(echo $PATH) + if [[ $ENV_PATH != *$GOPATH* ]]; + then + echo_status "GOPATH is not in the system path, adding it now." + export PATH=$GOPATH/bin:$PATH + fi + which addlicense + if [ $? -ne 0 ]; + then + echo_status "addlicense tool is not yet installed, downloading it now." + go get -u github.com/google/addlicense + fi + fi +} + +# Disk cache. +BAZEL_CACHE_DIR=/tmp/bazel_cache_gapic_generator_java +if [ ! -d $BAZEL_CACHE_DIR ] +then + mkdir $BAZEL_CACHE_DIR +fi + +# Check only the staged files. +NUM_TOTAL_FILES_CHANGED=$(git diff --cached --name-only | wc -l) +NUM_PYTHON_FILES_CHANGED=$(git diff --cached --name-only "*.java" | wc -l) +NUM_UNIT_GOLDEN_FILES_CHANGED=$(git diff --cached --name-only "src/test/*/*.golden" | wc -l) +NUM_INTEGRATION_GOLDEN_FILES_CHANGED=$(git diff --cached --name-only "tests/integration/goldens/*/*.golden" | wc -l) +NUM_INTEGRATION_BAZEL_FILES_CHANGED=$(git diff --cached --name-only "tests/integration/*/BUILD.bazel" | wc -l) +NUM_BAZEL_FILES_CHANGED=$(git diff --cached --name-only "*BUILD.bazel" | wc -l) + +if [ $NUM_TOTAL_FILES_CHANGED -le 0 ] +then + echo_error "No new files to commit." "" + exit 1 +fi + + +if [ -x /usr/lib/git-core/google_hook ]; then + /usr/lib/git-core/google_hook pre-commit "$@" +fi + +# Check Python format. +if [ $NUM_PYTHON_FILES_CHANGED -gt 0 ] +then + echo_status "Running Python linter..." + find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --diff --exit-code + FORMAT_STATUS=$? + if [ $FORMAT_STATUS != 0 ] + then + echo_error "Linting failed." "Please try again after running autopep8 on the gapic/ and tests/ directories." + exit 1 + fi +fi + +# Check unit tests. +if [ $NUM_PYTHON_FILES_CHANGED -gt 0 ] || [ $NUM_UNIT_GOLDEN_FILES_CHANGED -gt 0 ] +then + echo_status "Checking unit tests..." + nox -s unit-3.9 + TEST_STATUS=$? + if [ $TEST_STATUS != 0 ] + then + echo_error "Tests failed." "Please fix them and try again." + exit 1 + fi +fi + +# Check integration tests. +if [ $NUM_PYTHON_FILES_CHANGED -gt 0 ] \ + || [ $NUM_INTEGRATION_GOLDEN_FILES_CHANGED -gt 0 ] \ + || [ $NUM_INTEGRATION_BAZEL_FILES_CHANGED -gt 0 ] +then + echo_status "Checking integration tests..." + bazel --batch test --disk_cache="$BAZEL_CACHE_DIR" //tests/integration/... + TEST_STATUS=$? + if [ $TEST_STATUS != 0 ] + then + echo_error "Tests failed." "Please fix them and try again." + exit 1 + fi +fi + +# Check and fix Bazel format. +if [ $NUM_BAZEL_FILES_CHANGED -gt 0 ] +then + for FILE in $(find ./ -name BUILD.bazel) + do + buildifier --lint=fix $FILE + done +fi + +echo_success diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index 25d000c58ba2..72f76166ef21 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -1,11 +1,47 @@ # Development -- Install dependencies with `pip install .` -- See all nox sessions with `nox -l` -- Execute unit tests by running one of the sessions prefixed with `unit-` - - Example: `nox -s unit-3.8` -- Lint sources by running `autopep8`. +## Setup + +1. Clone this repo. + +2. Copy the Git pre-commit hooks. This will automatically check the build, run + tests, and perform linting before each commit. (Symlinks don't seem to work, + but if you find a way, please add it here!) + + ```sh + cp .githooks/pre-commit .git/hooks/pre-commit + ``` + +3. Install dependencies with `pip install .` + +## Unit Tests + +Execute unit tests by running one of the sessions prefixed with `unit-`. + +- Example: `nox -s unit-3.8` +- See all Nox sessions with `nox -l`. + +## Formatting + +- Lint sources by running `autopep8`. The specific command is the following. + + ``` + find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --diff --exit-code + ``` ## Integration Tests -- Running tests: `bazel test tests/integration:asset`. See the full list of targets in `tests/integration/BUILD.bazel`. -- Updating golden files: `bazel run tests/integration:asset_update` + +- Run a single integration test for one API. This generates Python source code + with the microgenerator and compares them to the golden files in + `test/integration/goldens/asset`. + + ```sh + bazel test //test/integration:asset + ``` + +- Update goldens files. This overwrites the golden files in + `test/integration/goldens/asset`. + + ```sh + bazel run //test/integration:asset_update + ``` \ No newline at end of file From 93b5a9c57434b7edc86f5ab4ce128160f68abd9a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 1 Jun 2021 10:56:57 -0700 Subject: [PATCH 0565/1339] chore: release 0.48.0 (#899) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index bb8b5d1b97f6..2b7aa5841fd5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [0.48.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.47.0...v0.48.0) (2021-05-27) + + +### Features + +* Add `x-goog-api-client` header to rest clients ([#888](https://www.github.com/googleapis/gapic-generator-python/issues/888)) ([2d1d3ae](https://www.github.com/googleapis/gapic-generator-python/commit/2d1d3ae135a75bbfff13df7703de5d0dad44695c)) +* **dev:** Add Git pre-commit hooks [gapic-generator-python] ([#908](https://www.github.com/googleapis/gapic-generator-python/issues/908)) ([298db39](https://www.github.com/googleapis/gapic-generator-python/commit/298db39064e29de764537f25dc38f9e5ac301390)) +* Raise GoogleAPICallError on REST response errors ([#891](https://www.github.com/googleapis/gapic-generator-python/issues/891)) ([edb8c63](https://www.github.com/googleapis/gapic-generator-python/commit/edb8c63e8a331f5e08ea19202d8de42de7051299)) +* **tests:** Add integration test framework, goldens for 4 APIs [gapic-generator-python] ([#905](https://www.github.com/googleapis/gapic-generator-python/issues/905)) ([48db1e6](https://www.github.com/googleapis/gapic-generator-python/commit/48db1e644badc2180253e11d9a3d3657e8f9aeed)) + + +### Bug Fixes + +* fix datetime comparison unit tests ([#898](https://www.github.com/googleapis/gapic-generator-python/issues/898)) ([81932a2](https://www.github.com/googleapis/gapic-generator-python/commit/81932a2b71e6ca5f424ddc5c52933ad1d452583a)) +* remove support for google-api-core<1.26.0 ([#893](https://www.github.com/googleapis/gapic-generator-python/issues/893)) ([ce558ac](https://www.github.com/googleapis/gapic-generator-python/commit/ce558acef9ec9c9bcc54243cddb708ef168c05f0)) + + +### Documentation + +* Add DEVELOPMENT.md ([#876](https://www.github.com/googleapis/gapic-generator-python/issues/876)) ([592ec06](https://www.github.com/googleapis/gapic-generator-python/commit/592ec061d4eec35e35633c5a9e62cf1e598a8461)) + ## [0.47.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.3...v0.47.0) (2021-05-13) From df6eef3374ffd2726e2d12ec89ad5161c6feac67 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 3 Jun 2021 16:52:04 +0200 Subject: [PATCH 0566/1339] chore(deps): update dependency google-api-core to v1.29.0 (#910) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 500661cb424e..258b35d79039 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.1 -google-api-core==1.28.0 +google-api-core==1.29.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From ff67883459c93f8c71c4e72e0072e35cc17b72cd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 3 Jun 2021 18:40:26 +0200 Subject: [PATCH 0567/1339] chore(deps): update dependency protobuf to v3.17.2 (#906) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.17.0` -> `==3.17.2` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.2/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.2/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.2/compatibility-slim/3.17.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.2/confidence-slim/3.17.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 258b35d79039..503be35270c4 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.29.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 -protobuf==3.17.0 +protobuf==3.17.2 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From e73d12d603db48d7c3a58cef1af8e493b6970624 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 9 Jun 2021 11:08:35 -0600 Subject: [PATCH 0568/1339] fix: samplegen always produces sample dicts with "response" (#914) - fix samplegen logic to always set a `sample[response]` - enable `autogen-snippets` for the golden integration tests that are run through Bazel - install some extra stub packages for mypy that are needed with the new release --- packages/gapic-generator/BUILD.bazel | 7 +- packages/gapic-generator/DEVELOPMENT.md | 22 +++++-- .../gapic/samplegen/samplegen.py | 3 + .../templates/examples/feature_fragments.j2 | 2 + .../gapic/templates/noxfile.py.j2 | 2 +- packages/gapic-generator/noxfile.py | 9 ++- .../tests/integration/BUILD.bazel | 10 +++ .../integration/goldens/asset/noxfile.py | 2 +- ...1_asset_service_analyze_iam_policy_grpc.py | 46 +++++++++++++ ...ice_analyze_iam_policy_longrunning_grpc.py | 48 ++++++++++++++ ...t_service_batch_get_assets_history_grpc.py | 46 +++++++++++++ ...asset_v1_asset_service_create_feed_grpc.py | 46 +++++++++++++ ...asset_v1_asset_service_delete_feed_grpc.py | 44 +++++++++++++ ...set_v1_asset_service_export_assets_grpc.py | 48 ++++++++++++++ ...ed_asset_v1_asset_service_get_feed_grpc.py | 46 +++++++++++++ ..._asset_v1_asset_service_list_feeds_grpc.py | 46 +++++++++++++ ...et_service_search_all_iam_policies_grpc.py | 45 +++++++++++++ ...asset_service_search_all_resources_grpc.py | 45 +++++++++++++ ...asset_v1_asset_service_update_feed_grpc.py | 46 +++++++++++++ .../goldens/credentials/noxfile.py | 2 +- ..._credentials_generate_access_token_grpc.py | 46 +++++++++++++ ..._iam_credentials_generate_id_token_grpc.py | 46 +++++++++++++ ...tials_v1_iam_credentials_sign_blob_grpc.py | 46 +++++++++++++ ...ntials_v1_iam_credentials_sign_jwt_grpc.py | 46 +++++++++++++ .../integration/goldens/logging/noxfile.py | 2 +- ...v2_config_service_v2_create_bucket_grpc.py | 46 +++++++++++++ ...config_service_v2_create_exclusion_grpc.py | 46 +++++++++++++ ...g_v2_config_service_v2_create_sink_grpc.py | 46 +++++++++++++ ...g_v2_config_service_v2_create_view_grpc.py | 46 +++++++++++++ ...v2_config_service_v2_delete_bucket_grpc.py | 44 +++++++++++++ ...config_service_v2_delete_exclusion_grpc.py | 44 +++++++++++++ ...g_v2_config_service_v2_delete_sink_grpc.py | 44 +++++++++++++ ...g_v2_config_service_v2_delete_view_grpc.py | 44 +++++++++++++ ...ng_v2_config_service_v2_get_bucket_grpc.py | 46 +++++++++++++ ...onfig_service_v2_get_cmek_settings_grpc.py | 46 +++++++++++++ ...v2_config_service_v2_get_exclusion_grpc.py | 46 +++++++++++++ ...ging_v2_config_service_v2_get_sink_grpc.py | 46 +++++++++++++ ...ging_v2_config_service_v2_get_view_grpc.py | 46 +++++++++++++ ..._v2_config_service_v2_list_buckets_grpc.py | 45 +++++++++++++ ..._config_service_v2_list_exclusions_grpc.py | 45 +++++++++++++ ...ng_v2_config_service_v2_list_sinks_grpc.py | 45 +++++++++++++ ...ng_v2_config_service_v2_list_views_grpc.py | 45 +++++++++++++ ..._config_service_v2_undelete_bucket_grpc.py | 44 +++++++++++++ ...v2_config_service_v2_update_bucket_grpc.py | 46 +++++++++++++ ...ig_service_v2_update_cmek_settings_grpc.py | 46 +++++++++++++ ...config_service_v2_update_exclusion_grpc.py | 46 +++++++++++++ ...g_v2_config_service_v2_update_sink_grpc.py | 46 +++++++++++++ ...g_v2_config_service_v2_update_view_grpc.py | 46 +++++++++++++ ...g_v2_logging_service_v2_delete_log_grpc.py | 44 +++++++++++++ ...ogging_service_v2_list_log_entries_grpc.py | 45 +++++++++++++ ...ng_v2_logging_service_v2_list_logs_grpc.py | 45 +++++++++++++ ...ist_monitored_resource_descriptors_grpc.py | 45 +++++++++++++ ...ogging_service_v2_tail_log_entries_grpc.py | 45 +++++++++++++ ...gging_service_v2_write_log_entries_grpc.py | 46 +++++++++++++ ...trics_service_v2_create_log_metric_grpc.py | 46 +++++++++++++ ...trics_service_v2_delete_log_metric_grpc.py | 44 +++++++++++++ ..._metrics_service_v2_get_log_metric_grpc.py | 46 +++++++++++++ ...etrics_service_v2_list_log_metrics_grpc.py | 45 +++++++++++++ ...trics_service_v2_update_log_metric_grpc.py | 46 +++++++++++++ .../integration/goldens/redis/noxfile.py | 2 +- ...dis_v1_cloud_redis_create_instance_grpc.py | 48 ++++++++++++++ ...dis_v1_cloud_redis_delete_instance_grpc.py | 48 ++++++++++++++ ...dis_v1_cloud_redis_export_instance_grpc.py | 48 ++++++++++++++ ...s_v1_cloud_redis_failover_instance_grpc.py | 48 ++++++++++++++ ..._redis_v1_cloud_redis_get_instance_grpc.py | 46 +++++++++++++ ...dis_v1_cloud_redis_import_instance_grpc.py | 48 ++++++++++++++ ...edis_v1_cloud_redis_list_instances_grpc.py | 45 +++++++++++++ ...dis_v1_cloud_redis_update_instance_grpc.py | 48 ++++++++++++++ ...is_v1_cloud_redis_upgrade_instance_grpc.py | 48 ++++++++++++++ .../sample_basic_void_method.py | 53 +++++++++++++++ .../tests/unit/samplegen/test_integration.py | 66 +++++++++++++++++++ .../tests/unit/samplegen/test_samplegen.py | 3 +- 72 files changed, 2826 insertions(+), 17 deletions(-) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_grpc.py create mode 100644 packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 8fb9dd587b08..38ae745d0d3e 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -35,7 +35,7 @@ config_setting( py_runtime( name = "pyenv3_runtime", interpreter = ":pyenv3wrapper.sh", - python_version="PY3", + python_version = "PY3", ) py_runtime_pair( @@ -52,7 +52,10 @@ toolchain( py_binary( name = "gapic_plugin", srcs = glob(["gapic/**/*.py"]), - data = [":pandoc_binary"] + glob(["gapic/**/*.j2", "gapic/**/.*.j2"]), + data = [":pandoc_binary"] + glob([ + "gapic/**/*.j2", + "gapic/**/.*.j2", + ]), main = "gapic/cli/generate_with_pandoc.py", python_version = "PY3", visibility = ["//visibility:public"], diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index 72f76166ef21..16f04c7cc26b 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -33,15 +33,25 @@ Execute unit tests by running one of the sessions prefixed with `unit-`. - Run a single integration test for one API. This generates Python source code with the microgenerator and compares them to the golden files in - `test/integration/goldens/asset`. + `tests/integration/goldens/asset`. ```sh - bazel test //test/integration:asset + bazel test //tests/integration:asset ``` -- Update goldens files. This overwrites the golden files in - `test/integration/goldens/asset`. +- Run integration tests for all APIs. ```sh - bazel run //test/integration:asset_update - ``` \ No newline at end of file + bazel test //tests/integration:all + ``` + +- Update all goldens files. This overwrites the golden files in + `tests/integration/goldens/`. + + ```sh + bazel run //tests/integration:asset_update + bazel run //tests/integration:credentials_update + bazel run //tests/integration:logging_update + bazel run //tests/integration:redis_update + ``` + diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 9cd4987d590d..2db9d8c9dbbd 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -301,6 +301,8 @@ def preprocess_sample(sample, api_schema: api.API, rpc: wrappers.Method): # Add reasonable defaults depending on the type of the sample if not rpc.void: sample.setdefault("response", [{"print": ["%s", "$resp"]}]) + else: + sample.setdefault("response", []) @utils.cached_property def flattenable_fields(self) -> FrozenSet[str]: @@ -1000,6 +1002,7 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> str sample["request"] = v.validate_and_transform_request( calling_form, sample["request"] ) + v.validate_response(sample["response"]) return sample_template.render( diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index c6dc93d99c93..7525c308f841 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -220,10 +220,12 @@ client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request {% if calling_form == calling_form_enum.Request %} response = {{ method_invocation_text|trim }} +{% if response_statements %} # Handle response {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} +{% endif %} {% elif calling_form == calling_form_enum.RequestPagedAll %} page_result = {{ method_invocation_text|trim }} for response in page_result: diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index c2d08a480764..d3e9b2ce928d 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -59,7 +59,7 @@ def cover(session): @nox.session(python=['3.6', '3.7']) def mypy(session): """Run the type checker.""" - session.install('mypy') + session.install('mypy', 'types-pkg_resources') session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 0924ef59998a..bf5e378d978a 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -273,7 +273,7 @@ def showcase_mypy( """Perform typecheck analysis on the generated Showcase library.""" # Install pytest and gapic-generator-python - session.install("mypy") + session.install("mypy", "types-pkg-resources") with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) @@ -340,6 +340,11 @@ def docs(session): def mypy(session): """Perform typecheck analysis.""" - session.install("mypy") + session.install( + "mypy", + "types-protobuf", + "types-PyYAML", + "types-dataclasses" + ) session.install(".") session.run("mypy", "gapic") diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index d816f50c840b..95538a05055d 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -51,6 +51,9 @@ py_gapic_library( name = "asset_py_gapic", srcs = ["@com_google_googleapis//google/cloud/asset/v1:asset_proto"], grpc_service_config = "cloudasset_grpc_service_config.json", + opt_args = [ + "autogen-snippets", + ], ) # Credentials. @@ -58,6 +61,9 @@ py_gapic_library( name = "credentials_py_gapic", srcs = ["@com_google_googleapis//google/iam/credentials/v1:credentials_proto"], grpc_service_config = "iamcredentials_grpc_service_config.json", + opt_args = [ + "autogen-snippets", + ], ) # Logging. @@ -68,6 +74,7 @@ py_gapic_library( opt_args = [ "python-gapic-namespace=google.cloud", "python-gapic-name=logging", + "autogen-snippets", ], ) @@ -75,4 +82,7 @@ py_gapic_library( name = "redis_py_gapic", srcs = ["@com_google_googleapis//google/cloud/redis/v1:redis_proto"], grpc_service_config = "redis_grpc_service_config.json", + opt_args = [ + "autogen-snippets", + ], ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index e4734091267c..15e45490321b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -70,7 +70,7 @@ def cover(session): @nox.session(python=['3.6', '3.7']) def mypy(session): """Run the type checker.""" - session.install('mypy') + session.install('mypy', 'types-pkg_resources') session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_grpc.py new file mode 100644 index 000000000000..9bbac93d771c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_grpc] +from google.cloud import asset_v1 + + +def sample_analyze_iam_policy(): + """Snippet for analyze_iam_policy""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeIamPolicyRequest( + ) + + # Make the request + response = client.analyze_iam_policy(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_grpc.py new file mode 100644 index 000000000000..e7a432399b98 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeIamPolicyLongrunning +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_grpc] +from google.cloud import asset_v1 + + +def sample_analyze_iam_policy_longrunning(): + """Snippet for analyze_iam_policy_longrunning""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeIamPolicyLongrunningRequest( + ) + + # Make the request + operation = client.analyze_iam_policy_longrunning(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_grpc.py new file mode 100644 index 000000000000..4a691dd0994b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetAssetsHistory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_grpc] +from google.cloud import asset_v1 + + +def sample_batch_get_assets_history(): + """Snippet for batch_get_assets_history""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetAssetsHistoryRequest( + ) + + # Make the request + response = client.batch_get_assets_history(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_grpc.py new file mode 100644 index 000000000000..d4afa1d05b75 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateFeed +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_CreateFeed_grpc] +from google.cloud import asset_v1 + + +def sample_create_feed(): + """Snippet for create_feed""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.CreateFeedRequest( + ) + + # Make the request + response = client.create_feed(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_CreateFeed_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_grpc.py new file mode 100644 index 000000000000..f654233fd9ec --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_grpc.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFeed +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_DeleteFeed_grpc] +from google.cloud import asset_v1 + + +def sample_delete_feed(): + """Snippet for delete_feed""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.DeleteFeedRequest( + ) + + # Make the request + response = client.delete_feed(request=request) + + +# [END cloudasset_generated_asset_v1_AssetService_DeleteFeed_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_grpc.py new file mode 100644 index 000000000000..ef0446666429 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_ExportAssets_grpc] +from google.cloud import asset_v1 + + +def sample_export_assets(): + """Snippet for export_assets""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ExportAssetsRequest( + ) + + # Make the request + operation = client.export_assets(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_ExportAssets_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_grpc.py new file mode 100644 index 000000000000..4c04e1cd575a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFeed +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_GetFeed_grpc] +from google.cloud import asset_v1 + + +def sample_get_feed(): + """Snippet for get_feed""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.GetFeedRequest( + ) + + # Make the request + response = client.get_feed(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_GetFeed_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_grpc.py new file mode 100644 index 000000000000..b5b759711eb5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFeeds +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_ListFeeds_grpc] +from google.cloud import asset_v1 + + +def sample_list_feeds(): + """Snippet for list_feeds""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListFeedsRequest( + ) + + # Make the request + response = client.list_feeds(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_ListFeeds_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_grpc.py new file mode 100644 index 000000000000..c3582046cadf --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAllIamPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_grpc] +from google.cloud import asset_v1 + + +def sample_search_all_iam_policies(): + """Snippet for search_all_iam_policies""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.SearchAllIamPoliciesRequest( + ) + + # Make the request + page_result = client.search_all_iam_policies(request=request) + for response in page_result: + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_grpc.py new file mode 100644 index 000000000000..9e0772693912 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAllResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_SearchAllResources_grpc] +from google.cloud import asset_v1 + + +def sample_search_all_resources(): + """Snippet for search_all_resources""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.SearchAllResourcesRequest( + ) + + # Make the request + page_result = client.search_all_resources(request=request) + for response in page_result: + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_SearchAllResources_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_grpc.py new file mode 100644 index 000000000000..e3835835459d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFeed +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_UpdateFeed_grpc] +from google.cloud import asset_v1 + + +def sample_update_feed(): + """Snippet for update_feed""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.UpdateFeedRequest( + ) + + # Make the request + response = client.update_feed(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 7d27f4d101a7..a72fb423c050 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -70,7 +70,7 @@ def cover(session): @nox.session(python=['3.6', '3.7']) def mypy(session): """Run the type checker.""" - session.install('mypy') + session.install('mypy', 'types-pkg_resources') session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_grpc.py new file mode 100644 index 000000000000..b5c1028a86d1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAccessToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-iam-credentials + + +# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_grpc] +from google.iam import credentials_v1 + + +def sample_generate_access_token(): + """Snippet for generate_access_token""" + + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + request = credentials_v1.GenerateAccessTokenRequest( + ) + + # Make the request + response = client.generate_access_token(request=request) + + # Handle response + print("{}".format(response)) + +# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_grpc.py new file mode 100644 index 000000000000..2dc2d87112f2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateIdToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-iam-credentials + + +# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_grpc] +from google.iam import credentials_v1 + + +def sample_generate_id_token(): + """Snippet for generate_id_token""" + + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + request = credentials_v1.GenerateIdTokenRequest( + ) + + # Make the request + response = client.generate_id_token(request=request) + + # Handle response + print("{}".format(response)) + +# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_grpc.py new file mode 100644 index 000000000000..6d013e74344e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SignBlob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-iam-credentials + + +# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_grpc] +from google.iam import credentials_v1 + + +def sample_sign_blob(): + """Snippet for sign_blob""" + + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + request = credentials_v1.SignBlobRequest( + ) + + # Make the request + response = client.sign_blob(request=request) + + # Handle response + print("{}".format(response)) + +# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_grpc.py new file mode 100644 index 000000000000..530251cf57b9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SignJwt +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-iam-credentials + + +# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_grpc] +from google.iam import credentials_v1 + + +def sample_sign_jwt(): + """Snippet for sign_jwt""" + + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + request = credentials_v1.SignJwtRequest( + ) + + # Make the request + response = client.sign_jwt(request=request) + + # Handle response + print("{}".format(response)) + +# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 4a78a7f99eaf..10ed0a998e1e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -70,7 +70,7 @@ def cover(session): @nox.session(python=['3.6', '3.7']) def mypy(session): """Run the type checker.""" - session.install('mypy') + session.install('mypy', 'types-pkg_resources') session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_grpc.py new file mode 100644 index 000000000000..c74aa5daa6b1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_CreateBucket_grpc] +from google.cloud import logging_v2 + + +def sample_create_bucket(): + """Snippet for create_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_grpc.py new file mode 100644 index 000000000000..e0f6f874ea36 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_grpc] +from google.cloud import logging_v2 + + +def sample_create_exclusion(): + """Snippet for create_exclusion""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateExclusionRequest( + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_grpc.py new file mode 100644 index 000000000000..1f32e69be9fd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_CreateSink_grpc] +from google.cloud import logging_v2 + + +def sample_create_sink(): + """Snippet for create_sink""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateSinkRequest( + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_grpc.py new file mode 100644 index 000000000000..f1754665b4dd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_CreateView_grpc] +from google.cloud import logging_v2 + + +def sample_create_view(): + """Snippet for create_view""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + ) + + # Make the request + response = client.create_view(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_CreateView_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_grpc.py new file mode 100644 index 000000000000..0f951bf249a0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_grpc.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_grpc] +from google.cloud import logging_v2 + + +def sample_delete_bucket(): + """Snippet for delete_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + ) + + # Make the request + response = client.delete_bucket(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_grpc.py new file mode 100644 index 000000000000..3a45b831e124 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_grpc.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_grpc] +from google.cloud import logging_v2 + + +def sample_delete_exclusion(): + """Snippet for delete_exclusion""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + ) + + # Make the request + response = client.delete_exclusion(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_grpc.py new file mode 100644 index 000000000000..67df1f2f72b6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_grpc.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteSink_grpc] +from google.cloud import logging_v2 + + +def sample_delete_sink(): + """Snippet for delete_sink""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + ) + + # Make the request + response = client.delete_sink(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteSink_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_grpc.py new file mode 100644 index 000000000000..d18f27c1bd80 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_grpc.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteView_grpc] +from google.cloud import logging_v2 + + +def sample_delete_view(): + """Snippet for delete_view""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + ) + + # Make the request + response = client.delete_view(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteView_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_grpc.py new file mode 100644 index 000000000000..0294fab02619 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetBucket_grpc] +from google.cloud import logging_v2 + + +def sample_get_bucket(): + """Snippet for get_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_grpc.py new file mode 100644 index 000000000000..cd2a59003303 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_grpc] +from google.cloud import logging_v2 + + +def sample_get_cmek_settings(): + """Snippet for get_cmek_settings""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_grpc.py new file mode 100644 index 000000000000..c28b3ba3cd27 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetExclusion_grpc] +from google.cloud import logging_v2 + + +def sample_get_exclusion(): + """Snippet for get_exclusion""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_grpc.py new file mode 100644 index 000000000000..2a70061ee4ad --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetSink_grpc] +from google.cloud import logging_v2 + + +def sample_get_sink(): + """Snippet for get_sink""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetSink_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_grpc.py new file mode 100644 index 000000000000..d736bc227d15 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetView_grpc] +from google.cloud import logging_v2 + + +def sample_get_view(): + """Snippet for get_view""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + ) + + # Make the request + response = client.get_view(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetView_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_grpc.py new file mode 100644 index 000000000000..9dd1ba5d9ee3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuckets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_ListBuckets_grpc] +from google.cloud import logging_v2 + + +def sample_list_buckets(): + """Snippet for list_buckets""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + ) + + # Make the request + page_result = client.list_buckets(request=request) + for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_ListBuckets_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_grpc.py new file mode 100644 index 000000000000..a6a9814d8fd3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExclusions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_ListExclusions_grpc] +from google.cloud import logging_v2 + + +def sample_list_exclusions(): + """Snippet for list_exclusions""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + ) + + # Make the request + page_result = client.list_exclusions(request=request) + for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_ListExclusions_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_grpc.py new file mode 100644 index 000000000000..af1edb26f0d1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_ListSinks_grpc] +from google.cloud import logging_v2 + + +def sample_list_sinks(): + """Snippet for list_sinks""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + ) + + # Make the request + page_result = client.list_sinks(request=request) + for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_ListSinks_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_grpc.py new file mode 100644 index 000000000000..b436c1c780e0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListViews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_ListViews_grpc] +from google.cloud import logging_v2 + + +def sample_list_views(): + """Snippet for list_views""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + ) + + # Make the request + page_result = client.list_views(request=request) + for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_ListViews_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_grpc.py new file mode 100644 index 000000000000..37f0c8a7883d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_grpc.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_grpc] +from google.cloud import logging_v2 + + +def sample_undelete_bucket(): + """Snippet for undelete_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + ) + + # Make the request + response = client.undelete_bucket(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_grpc.py new file mode 100644 index 000000000000..ceb6d9f132a8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_grpc] +from google.cloud import logging_v2 + + +def sample_update_bucket(): + """Snippet for update_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_grpc.py new file mode 100644 index 000000000000..1d476684f1e5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_grpc] +from google.cloud import logging_v2 + + +def sample_update_cmek_settings(): + """Snippet for update_cmek_settings""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_grpc.py new file mode 100644 index 000000000000..7f115341b2b6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_grpc] +from google.cloud import logging_v2 + + +def sample_update_exclusion(): + """Snippet for update_exclusion""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateExclusionRequest( + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_grpc.py new file mode 100644 index 000000000000..aa43c457ae41 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateSink_grpc] +from google.cloud import logging_v2 + + +def sample_update_sink(): + """Snippet for update_sink""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSinkRequest( + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_grpc.py new file mode 100644 index 000000000000..982d897ac1b6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateView_grpc] +from google.cloud import logging_v2 + + +def sample_update_view(): + """Snippet for update_view""" + + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + ) + + # Make the request + response = client.update_view(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_grpc.py new file mode 100644 index 000000000000..1e28c486eef4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_grpc.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_DeleteLog_grpc] +from google.cloud import logging_v2 + + +def sample_delete_log(): + """Snippet for delete_log""" + + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + ) + + # Make the request + response = client.delete_log(request=request) + + +# [END logging_generated_logging_v2_LoggingServiceV2_DeleteLog_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_grpc.py new file mode 100644 index 000000000000..4d5d9ec2a71b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_grpc] +from google.cloud import logging_v2 + + +def sample_list_log_entries(): + """Snippet for list_log_entries""" + + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + ) + + # Make the request + page_result = client.list_log_entries(request=request) + for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_grpc.py new file mode 100644 index 000000000000..082e37a62dec --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_ListLogs_grpc] +from google.cloud import logging_v2 + + +def sample_list_logs(): + """Snippet for list_logs""" + + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + ) + + # Make the request + page_result = client.list_logs(request=request) + for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_ListLogs_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_grpc.py new file mode 100644 index 000000000000..ea3f250f8c54 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMonitoredResourceDescriptors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_grpc] +from google.cloud import logging_v2 + + +def sample_list_monitored_resource_descriptors(): + """Snippet for list_monitored_resource_descriptors""" + + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_grpc.py new file mode 100644 index 000000000000..5b92819f1fdf --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TailLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_grpc] +from google.cloud import logging_v2 + + +def sample_tail_log_entries(): + """Snippet for tail_log_entries""" + + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + ) + + # Make the request + stream = client.tail_log_entries([]) + for response in stream: + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_grpc.py new file mode 100644 index 000000000000..466eb64175aa --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_grpc] +from google.cloud import logging_v2 + + +def sample_write_log_entries(): + """Snippet for write_log_entries""" + + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.WriteLogEntriesRequest( + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_grpc.py new file mode 100644 index 000000000000..03abbe60d5c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_grpc] +from google.cloud import logging_v2 + + +def sample_create_log_metric(): + """Snippet for create_log_metric""" + + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateLogMetricRequest( + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_grpc.py new file mode 100644 index 000000000000..d940d3619ed1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_grpc.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_grpc] +from google.cloud import logging_v2 + + +def sample_delete_log_metric(): + """Snippet for delete_log_metric""" + + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + ) + + # Make the request + response = client.delete_log_metric(request=request) + + +# [END logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_grpc.py new file mode 100644 index 000000000000..9d14540d0dd4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_grpc] +from google.cloud import logging_v2 + + +def sample_get_log_metric(): + """Snippet for get_log_metric""" + + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_grpc.py new file mode 100644 index 000000000000..5e6f0c09d040 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_grpc] +from google.cloud import logging_v2 + + +def sample_list_log_metrics(): + """Snippet for list_log_metrics""" + + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_grpc.py new file mode 100644 index 000000000000..51f9396d3ad3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_grpc] +from google.cloud import logging_v2 + + +def sample_update_log_metric(): + """Snippet for update_log_metric""" + + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateLogMetricRequest( + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 5b0d60db004a..93380724feb0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -70,7 +70,7 @@ def cover(session): @nox.session(python=['3.6', '3.7']) def mypy(session): """Run the type checker.""" - session.install('mypy') + session.install('mypy', 'types-pkg_resources') session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_grpc.py new file mode 100644 index 000000000000..bf03c6c8d4b6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_CreateInstance_grpc] +from google.cloud import redis_v1 + + +def sample_create_instance(): + """Snippet for create_instance""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.CreateInstanceRequest( + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_CreateInstance_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_grpc.py new file mode 100644 index 000000000000..76ee5fb446f7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_DeleteInstance_grpc] +from google.cloud import redis_v1 + + +def sample_delete_instance(): + """Snippet for delete_instance""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.DeleteInstanceRequest( + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_DeleteInstance_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_grpc.py new file mode 100644 index 000000000000..bb48e0954dda --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_ExportInstance_grpc] +from google.cloud import redis_v1 + + +def sample_export_instance(): + """Snippet for export_instance""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.ExportInstanceRequest( + ) + + # Make the request + operation = client.export_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_ExportInstance_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_grpc.py new file mode 100644 index 000000000000..acf11a58708b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FailoverInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_FailoverInstance_grpc] +from google.cloud import redis_v1 + + +def sample_failover_instance(): + """Snippet for failover_instance""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.FailoverInstanceRequest( + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_FailoverInstance_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_grpc.py new file mode 100644 index 000000000000..4d1b9942c700 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_grpc.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_GetInstance_grpc] +from google.cloud import redis_v1 + + +def sample_get_instance(): + """Snippet for get_instance""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceRequest( + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle response + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_GetInstance_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_grpc.py new file mode 100644 index 000000000000..95826e9db298 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_ImportInstance_grpc] +from google.cloud import redis_v1 + + +def sample_import_instance(): + """Snippet for import_instance""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.ImportInstanceRequest( + ) + + # Make the request + operation = client.import_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_ImportInstance_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_grpc.py new file mode 100644 index 000000000000..1268a040147b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_grpc.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_ListInstances_grpc] +from google.cloud import redis_v1 + + +def sample_list_instances(): + """Snippet for list_instances""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.ListInstancesRequest( + ) + + # Make the request + page_result = client.list_instances(request=request) + for response in page_result: + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_ListInstances_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_grpc.py new file mode 100644 index 000000000000..24a3d55117dd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_UpdateInstance_grpc] +from google.cloud import redis_v1 + + +def sample_update_instance(): + """Snippet for update_instance""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.UpdateInstanceRequest( + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_UpdateInstance_grpc] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_grpc.py new file mode 100644 index 000000000000..0594da347e27 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_grpc.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpgradeInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_UpgradeInstance_grpc] +from google.cloud import redis_v1 + + +def sample_upgrade_instance(): + """Snippet for upgrade_instance""" + + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.UpgradeInstanceRequest( + ) + + # Make the request + operation = client.upgrade_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_grpc] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py new file mode 100644 index 000000000000..47fe2452909b --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Classify +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install molluscs-v1-molluscclient + + +# [START mollusc_classify_sync] +from molluscs.v1 import molluscclient + + +def sample_classify(video, location): + """Determine the full taxonomy of input mollusc""" + + # Create a client + client = molluscclient.MolluscServiceClient() + + # Initialize request argument(s) + classify_target = {} + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_target["video"] = f.read() + + # location = "New Zealand" + classify_target["location_annotation"] = location + + request = molluscclient.molluscs.v1.ClassifyRequest( + classify_target=classify_target, + ) + + # Make the request + response = client.classify(request=request) + + +# [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index c3a2cb7d2cde..b0fa2de22116 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -191,6 +191,72 @@ def test_generate_sample_basic_unflattenable(): assert sample_str == golden_snippet("sample_basic_unflattenable.py") +def test_generate_sample_void_method(): + input_type = DummyMessage( + type="REQUEST TYPE", + fields={ + "classify_target": DummyField( + message=DummyMessage( + type="CLASSIFY TYPE", + fields={ + "video": DummyField( + message=DummyMessage(type="VIDEO TYPE"), + ), + "location_annotation": DummyField( + message=DummyMessage(type="LOCATION TYPE"), + ) + }, + ) + ) + }, + ident=DummyIdent(name="molluscs.v1.ClassifyRequest") + ) + + api_naming = naming.NewNaming( + name="MolluscClient", namespace=("molluscs", "v1")) + service = wrappers.Service( + service_pb=namedtuple('service_pb', ['name'])('MolluscService'), + methods={ + "Classify": DummyMethod( + void=True, + input=input_type, + output=message_factory("$resp.taxonomy"), + flattened_fields={ + "classify_target": DummyField(name="classify_target") + } + ) + }, + visible_resources={}, + ) + + schema = DummyApiSchema( + services={"animalia.mollusca.v1.Mollusc": service}, + naming=api_naming, + ) + + sample = {"service": "animalia.mollusca.v1.Mollusc", + "rpc": "Classify", + "id": "mollusc_classify_sync", + "description": "Determine the full taxonomy of input mollusc", + "request": [ + {"field": "classify_target.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True}, + {"field": "classify_target.location_annotation", + "value": "New Zealand", + "input_parameter": "location"} + ]} + + sample_str = samplegen.generate_sample( + sample, + schema, + env.get_template('examples/sample.py.j2') + ) + + assert sample_str == golden_snippet("sample_basic_void_method.py") + + def test_generate_sample_service_not_found(): schema = DummyApiSchema({}, DummyNaming("pkg_name")) sample = {"service": "Mollusc"} diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 32ef4411f103..5359840f0b0c 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -117,7 +117,6 @@ def test_preprocess_sample(): def test_preprocess_sample_void_method(): - # Verify no response is added for a void method sample = {"service": "Mollusc", "rpc": "Classify"} api_schema = DummyApiSchema( services={"Mollusc": DummyService( @@ -131,7 +130,7 @@ def test_preprocess_sample_void_method(): samplegen.Validator.preprocess_sample(sample, api_schema, rpc) - assert "response" not in sample + assert sample["response"] == [] def test_define_input_param(): From 589f3e1c5877453fc4ac019e00be67312a92d9d4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 9 Jun 2021 19:34:04 +0200 Subject: [PATCH 0569/1339] chore(deps): update dependency google-api-core to v1.30.0 (#913) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | `==1.29.0` -> `==1.30.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-core/1.30.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-core/1.30.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-core/1.30.0/compatibility-slim/1.29.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-core/1.30.0/confidence-slim/1.29.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-api-core ### [`v1.30.0`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​1300-httpswwwgithubcomgoogleapispython-api-corecomparev1290v1300-2021-06-08) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.29.0...v1.30.0) ##### Features - add iterator capability to paged iterators ([#​200](https://www.github.com/googleapis/python-api-core/issues/200)) ([3487d68](https://www.github.com/googleapis/python-api-core/commit/3487d68bdab6f20e2ab931c8283f63c94862cf31))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 503be35270c4..f8870350c5be 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.1 -google-api-core==1.29.0 +google-api-core==1.30.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From 64626df256ef43be9992dfa221bb1386d5fa5cf6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 9 Jun 2021 19:38:03 +0200 Subject: [PATCH 0570/1339] chore(deps): update dependency protobuf to v3.17.3 (#912) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.17.2` -> `==3.17.3` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.3/compatibility-slim/3.17.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.17.3/confidence-slim/3.17.2)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f8870350c5be..c98e3036ad5e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==1.30.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 -protobuf==3.17.2 +protobuf==3.17.3 pypandoc==1.5 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From e9c9e2a4bd8c905adc61bd2e2ca3b77d871568a3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 9 Jun 2021 17:42:03 +0000 Subject: [PATCH 0571/1339] chore: release 0.48.1 (#916) :robot: I have created a release \*beep\* \*boop\* --- ### [0.48.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.48.0...v0.48.1) (2021-06-09) ### Bug Fixes * samplegen always produces sample dicts with "response" ([#914](https://www.github.com/googleapis/gapic-generator-python/issues/914)) ([0b168f2](https://www.github.com/googleapis/gapic-generator-python/commit/0b168f20f4cbf419131fcc512141fccca8186681)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 2b7aa5841fd5..6440171dd582 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.48.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.48.0...v0.48.1) (2021-06-09) + + +### Bug Fixes + +* samplegen always produces sample dicts with "response" ([#914](https://www.github.com/googleapis/gapic-generator-python/issues/914)) ([0b168f2](https://www.github.com/googleapis/gapic-generator-python/commit/0b168f20f4cbf419131fcc512141fccca8186681)) + ## [0.48.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.47.0...v0.48.0) (2021-05-27) From adabe0ba460615b0979c18c08667ae917ddd309d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 11 Jun 2021 11:55:22 -0600 Subject: [PATCH 0572/1339] feat: add async samples (#861) --- .../gapic/samplegen/samplegen.py | 17 +- packages/gapic-generator/gapic/schema/api.py | 12 +- .../templates/examples/feature_fragments.j2 | 17 +- .../gapic/templates/examples/sample.py.j2 | 6 +- ..._asset_service_analyze_iam_policy_async.py | 46 +++++ ...ce_analyze_iam_policy_longrunning_async.py | 48 +++++ ...ce_analyze_iam_policy_longrunning_sync.py} | 4 +- ..._asset_service_analyze_iam_policy_sync.py} | 4 +- ..._service_batch_get_assets_history_async.py | 46 +++++ ..._service_batch_get_assets_history_sync.py} | 4 +- ...sset_v1_asset_service_create_feed_async.py | 46 +++++ ...sset_v1_asset_service_create_feed_sync.py} | 4 +- ...sset_v1_asset_service_delete_feed_async.py | 44 ++++ ...sset_v1_asset_service_delete_feed_sync.py} | 4 +- ...et_v1_asset_service_export_assets_async.py | 48 +++++ ...et_v1_asset_service_export_assets_sync.py} | 4 +- ...d_asset_v1_asset_service_get_feed_async.py | 46 +++++ ...d_asset_v1_asset_service_get_feed_sync.py} | 4 +- ...asset_v1_asset_service_list_feeds_async.py | 46 +++++ ...asset_v1_asset_service_list_feeds_sync.py} | 4 +- ...t_service_search_all_iam_policies_async.py | 45 +++++ ...t_service_search_all_iam_policies_sync.py} | 4 +- ...sset_service_search_all_resources_async.py | 45 +++++ ...sset_service_search_all_resources_sync.py} | 4 +- ...sset_v1_asset_service_update_feed_async.py | 46 +++++ ...sset_v1_asset_service_update_feed_sync.py} | 4 +- ...credentials_generate_access_token_async.py | 46 +++++ ...credentials_generate_access_token_sync.py} | 4 +- ...iam_credentials_generate_id_token_async.py | 46 +++++ ...iam_credentials_generate_id_token_sync.py} | 4 +- ...ials_v1_iam_credentials_sign_blob_async.py | 46 +++++ ...ials_v1_iam_credentials_sign_blob_sync.py} | 4 +- ...tials_v1_iam_credentials_sign_jwt_async.py | 46 +++++ ...tials_v1_iam_credentials_sign_jwt_sync.py} | 4 +- ...2_config_service_v2_create_bucket_async.py | 46 +++++ ...2_config_service_v2_create_bucket_sync.py} | 4 +- ...onfig_service_v2_create_exclusion_async.py | 46 +++++ ...onfig_service_v2_create_exclusion_sync.py} | 4 +- ..._v2_config_service_v2_create_sink_async.py | 46 +++++ ..._v2_config_service_v2_create_sink_sync.py} | 4 +- ..._v2_config_service_v2_create_view_async.py | 46 +++++ ..._v2_config_service_v2_create_view_sync.py} | 4 +- ...2_config_service_v2_delete_bucket_async.py | 44 ++++ ...2_config_service_v2_delete_bucket_sync.py} | 4 +- ...onfig_service_v2_delete_exclusion_async.py | 44 ++++ ...onfig_service_v2_delete_exclusion_sync.py} | 4 +- ..._v2_config_service_v2_delete_sink_async.py | 44 ++++ ..._v2_config_service_v2_delete_sink_sync.py} | 4 +- ..._v2_config_service_v2_delete_view_async.py | 44 ++++ ..._v2_config_service_v2_delete_view_sync.py} | 4 +- ...g_v2_config_service_v2_get_bucket_async.py | 46 +++++ ...g_v2_config_service_v2_get_bucket_sync.py} | 4 +- ...nfig_service_v2_get_cmek_settings_async.py | 46 +++++ ...nfig_service_v2_get_cmek_settings_sync.py} | 4 +- ...2_config_service_v2_get_exclusion_async.py | 46 +++++ ...2_config_service_v2_get_exclusion_sync.py} | 4 +- ...ing_v2_config_service_v2_get_sink_async.py | 46 +++++ ...ing_v2_config_service_v2_get_sink_sync.py} | 4 +- ...ing_v2_config_service_v2_get_view_async.py | 46 +++++ ...ing_v2_config_service_v2_get_view_sync.py} | 4 +- ...v2_config_service_v2_list_buckets_async.py | 45 +++++ ...v2_config_service_v2_list_buckets_sync.py} | 4 +- ...config_service_v2_list_exclusions_async.py | 45 +++++ ...config_service_v2_list_exclusions_sync.py} | 4 +- ...g_v2_config_service_v2_list_sinks_async.py | 45 +++++ ...g_v2_config_service_v2_list_sinks_sync.py} | 4 +- ...g_v2_config_service_v2_list_views_async.py | 45 +++++ ...g_v2_config_service_v2_list_views_sync.py} | 4 +- ...config_service_v2_undelete_bucket_async.py | 44 ++++ ...config_service_v2_undelete_bucket_sync.py} | 4 +- ...2_config_service_v2_update_bucket_async.py | 46 +++++ ...2_config_service_v2_update_bucket_sync.py} | 4 +- ...g_service_v2_update_cmek_settings_async.py | 46 +++++ ...g_service_v2_update_cmek_settings_sync.py} | 4 +- ...onfig_service_v2_update_exclusion_async.py | 46 +++++ ...onfig_service_v2_update_exclusion_sync.py} | 4 +- ..._v2_config_service_v2_update_sink_async.py | 46 +++++ ..._v2_config_service_v2_update_sink_sync.py} | 4 +- ..._v2_config_service_v2_update_view_async.py | 46 +++++ ..._v2_config_service_v2_update_view_sync.py} | 4 +- ..._v2_logging_service_v2_delete_log_async.py | 44 ++++ ..._v2_logging_service_v2_delete_log_sync.py} | 4 +- ...gging_service_v2_list_log_entries_async.py | 45 +++++ ...gging_service_v2_list_log_entries_sync.py} | 4 +- ...g_v2_logging_service_v2_list_logs_async.py | 45 +++++ ...g_v2_logging_service_v2_list_logs_sync.py} | 4 +- ...st_monitored_resource_descriptors_async.py | 45 +++++ ...st_monitored_resource_descriptors_sync.py} | 4 +- ...gging_service_v2_tail_log_entries_async.py | 45 +++++ ...gging_service_v2_tail_log_entries_sync.py} | 4 +- ...ging_service_v2_write_log_entries_async.py | 46 +++++ ...ging_service_v2_write_log_entries_sync.py} | 4 +- ...rics_service_v2_create_log_metric_async.py | 46 +++++ ...rics_service_v2_create_log_metric_sync.py} | 4 +- ...rics_service_v2_delete_log_metric_async.py | 44 ++++ ...rics_service_v2_delete_log_metric_sync.py} | 4 +- ...metrics_service_v2_get_log_metric_async.py | 46 +++++ ...metrics_service_v2_get_log_metric_sync.py} | 4 +- ...trics_service_v2_list_log_metrics_async.py | 45 +++++ ...trics_service_v2_list_log_metrics_sync.py} | 4 +- ...rics_service_v2_update_log_metric_async.py | 46 +++++ ...rics_service_v2_update_log_metric_sync.py} | 4 +- ...is_v1_cloud_redis_create_instance_async.py | 48 +++++ ...is_v1_cloud_redis_create_instance_sync.py} | 4 +- ...is_v1_cloud_redis_delete_instance_async.py | 48 +++++ ...is_v1_cloud_redis_delete_instance_sync.py} | 4 +- ...is_v1_cloud_redis_export_instance_async.py | 48 +++++ ...is_v1_cloud_redis_export_instance_sync.py} | 4 +- ..._v1_cloud_redis_failover_instance_async.py | 48 +++++ ..._v1_cloud_redis_failover_instance_sync.py} | 4 +- ...redis_v1_cloud_redis_get_instance_async.py | 46 +++++ ...redis_v1_cloud_redis_get_instance_sync.py} | 4 +- ...is_v1_cloud_redis_import_instance_async.py | 48 +++++ ...is_v1_cloud_redis_import_instance_sync.py} | 4 +- ...dis_v1_cloud_redis_list_instances_async.py | 45 +++++ ...dis_v1_cloud_redis_list_instances_sync.py} | 4 +- ...is_v1_cloud_redis_update_instance_async.py | 48 +++++ ...is_v1_cloud_redis_update_instance_sync.py} | 4 +- ...s_v1_cloud_redis_upgrade_instance_async.py | 48 +++++ ...s_v1_cloud_redis_upgrade_instance_sync.py} | 4 +- ...llusca_v1_snippets_list_resources_async.py | 45 +++++ ...llusca_v1_snippets_list_resources_sync.py} | 4 +- ...v1_snippets_method_bidi_streaming_async.py | 45 +++++ ...v1_snippets_method_bidi_streaming_sync.py} | 4 +- ...v1_snippets_method_lro_signatures_async.py | 48 +++++ ...v1_snippets_method_lro_signatures_sync.py} | 4 +- ..._v1_snippets_method_one_signature_async.py | 46 +++++ ..._v1_snippets_method_one_signature_sync.py} | 4 +- ..._snippets_method_server_streaming_async.py | 45 +++++ ..._snippets_method_server_streaming_sync.py} | 4 +- .../golden_snippets/sample_basic_async.py | 55 +++++ .../tests/unit/samplegen/test_integration.py | 73 +++++++ .../tests/unit/samplegen/test_samplegen.py | 19 +- .../tests/unit/samplegen/test_template.py | 188 ++++++++++++++++-- 134 files changed, 3365 insertions(+), 161 deletions(-) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_grpc.py => cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py} (97%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_grpc.py => cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_grpc.py => cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_create_feed_grpc.py => cloudasset_generated_asset_v1_asset_service_create_feed_sync.py} (90%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_delete_feed_grpc.py => cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py} (90%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_export_assets_grpc.py => cloudasset_generated_asset_v1_asset_service_export_assets_sync.py} (95%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_get_feed_grpc.py => cloudasset_generated_asset_v1_asset_service_get_feed_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_list_feeds_grpc.py => cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py} (90%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_grpc.py => cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_search_all_resources_grpc.py => cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_update_feed_grpc.py => cloudasset_generated_asset_v1_asset_service_update_feed_sync.py} (90%) create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_grpc.py => iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py} (96%) create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_grpc.py => iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py} (97%) create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_grpc.py => iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_grpc.py => iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_bucket_grpc.py => logging_generated_logging_v2_config_service_v2_create_bucket_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_exclusion_grpc.py => logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_sink_grpc.py => logging_generated_logging_v2_config_service_v2_create_sink_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_view_grpc.py => logging_generated_logging_v2_config_service_v2_create_view_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_bucket_grpc.py => logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_exclusion_grpc.py => logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_sink_grpc.py => logging_generated_logging_v2_config_service_v2_delete_sink_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_view_grpc.py => logging_generated_logging_v2_config_service_v2_delete_view_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_bucket_grpc.py => logging_generated_logging_v2_config_service_v2_get_bucket_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_cmek_settings_grpc.py => logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_exclusion_grpc.py => logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_sink_grpc.py => logging_generated_logging_v2_config_service_v2_get_sink_sync.py} (90%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_view_grpc.py => logging_generated_logging_v2_config_service_v2_get_view_sync.py} (90%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_buckets_grpc.py => logging_generated_logging_v2_config_service_v2_list_buckets_sync.py} (99%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_exclusions_grpc.py => logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_sinks_grpc.py => logging_generated_logging_v2_config_service_v2_list_sinks_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_views_grpc.py => logging_generated_logging_v2_config_service_v2_list_views_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_undelete_bucket_grpc.py => logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_bucket_grpc.py => logging_generated_logging_v2_config_service_v2_update_bucket_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_cmek_settings_grpc.py => logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_exclusion_grpc.py => logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_sink_grpc.py => logging_generated_logging_v2_config_service_v2_update_sink_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_view_grpc.py => logging_generated_logging_v2_config_service_v2_update_view_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_delete_log_grpc.py => logging_generated_logging_v2_logging_service_v2_delete_log_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_log_entries_grpc.py => logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_logs_grpc.py => logging_generated_logging_v2_logging_service_v2_list_logs_sync.py} (94%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_grpc.py => logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py} (96%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_tail_log_entries_grpc.py => logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_write_log_entries_grpc.py => logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_create_log_metric_grpc.py => logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_delete_log_metric_grpc.py => logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_get_log_metric_grpc.py => logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_list_log_metrics_grpc.py => logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_update_log_metric_grpc.py => logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_create_instance_grpc.py => redis_generated_redis_v1_cloud_redis_create_instance_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_delete_instance_grpc.py => redis_generated_redis_v1_cloud_redis_delete_instance_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_export_instance_grpc.py => redis_generated_redis_v1_cloud_redis_export_instance_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_failover_instance_grpc.py => redis_generated_redis_v1_cloud_redis_failover_instance_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_get_instance_grpc.py => redis_generated_redis_v1_cloud_redis_get_instance_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_import_instance_grpc.py => redis_generated_redis_v1_cloud_redis_import_instance_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_list_instances_grpc.py => redis_generated_redis_v1_cloud_redis_list_instances_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_update_instance_grpc.py => redis_generated_redis_v1_cloud_redis_update_instance_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_upgrade_instance_grpc.py => redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_list_resources_grpc.py => mollusca_generated_mollusca_v1_snippets_list_resources_sync.py} (91%) create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_grpc.py => mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_lro_signatures_grpc.py => mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_one_signature_grpc.py => mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_server_streaming_grpc.py => mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py} (98%) create mode 100644 packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 2db9d8c9dbbd..876f0b9d0ad5 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -293,7 +293,15 @@ def preprocess_sample(sample, api_schema: api.API, rpc: wrappers.Method): sample["module_name"] = api_schema.naming.versioned_module_name sample["module_namespace"] = api_schema.naming.module_namespace - sample["client_name"] = api_schema.services[sample["service"]].client_name + # Assume the gRPC transport if the transport is not specified + sample.setdefault("transport", api.TRANSPORT_GRPC) + + if sample["transport"] == api.TRANSPORT_GRPC_ASYNC: + sample["client_name"] = api_schema.services[sample["service"] + ].async_client_name + else: + sample["client_name"] = api_schema.services[sample["service"]].client_name + # the type of the request object passed to the rpc e.g, `ListRequest` sample["request_type"] = rpc.input.ident.name @@ -946,10 +954,8 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A for service_name, service in gapic_metadata.services.items(): api_short_name = api_schema.services[f"{api_schema.naming.proto_package}.{service_name}"].shortname - for transport_type, client in service.clients.items(): - if transport_type == "grpc-async": - # TODO(busunkim): Enable generation of async samples - continue + for transport, client in service.clients.items(): + transport_type = "async" if transport == api.TRANSPORT_GRPC_ASYNC else "sync" for rpc_name, method_list in client.rpcs.items(): # Region Tag Format: # [{START|END} ${apishortname}_generated_${api}_${apiVersion}_${serviceName}_${rpcName}_{sync|async}_${overloadDisambiguation}] @@ -957,6 +963,7 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A spec = { "sample_type": "standalone", "rpc": rpc_name, + "transport": transport, "request": [], # response is populated in `preprocess_sample` "service": f"{api_schema.naming.proto_package}.{service_name}", diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 8af8933e8d29..cdfb5ca6e701 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -45,6 +45,11 @@ from gapic.utils import RESERVED_NAMES +TRANSPORT_GRPC = "grpc" +TRANSPORT_GRPC_ASYNC = "grpc-async" +TRANSPORT_REST = "rest" + + @dataclasses.dataclass(frozen=True) class Proto: """A representation of a particular proto file within an API.""" @@ -414,11 +419,12 @@ def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: # This assumes the options are generated by the class method factory. transports = [] if "grpc" in options.transport: - transports.append(("grpc", service.client_name)) - transports.append(("grpc-async", service.async_client_name)) + transports.append((TRANSPORT_GRPC, service.client_name)) + transports.append( + (TRANSPORT_GRPC_ASYNC, service.async_client_name)) if "rest" in options.transport: - transports.append(("rest", service.client_name)) + transports.append((TRANSPORT_REST, service.client_name)) methods = sorted(service.methods.values(), key=lambda m: m.name) for tprt, client_name in transports: diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 7525c308f841..502d68fa607f 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -202,8 +202,13 @@ request=request {% endmacro %} -{% macro render_method_call(sample, calling_form, calling_form_enum) %} +{% macro render_method_call(sample, calling_form, calling_form_enum, transport) %} {# Note: this doesn't deal with enums or unions #} +{# LROs return operation objects and paged requests return pager objects #} +{% if transport == "grpc-async" and calling_form not in +[calling_form_enum.LongRunningRequestPromise, calling_form_enum.RequestPagedAll] %} +await{{ " "}} +{%- endif -%} {% if calling_form in [calling_form_enum.RequestStreamingBidi, calling_form_enum.RequestStreamingClient] %} client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request.request_list)|trim }}]) @@ -215,7 +220,7 @@ client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request {# Setting up the method invocation is the responsibility of the caller: #} {# it's just easier to set up client side streaming and other things from outside this macro. #} -{% macro render_calling_form(method_invocation_text, calling_form, calling_form_enum, response_statements ) %} +{% macro render_calling_form(method_invocation_text, calling_form, calling_form_enum, transport, response_statements ) %} # Make the request {% if calling_form == calling_form_enum.Request %} response = {{ method_invocation_text|trim }} @@ -228,13 +233,13 @@ response = {{ method_invocation_text|trim }} {% endif %} {% elif calling_form == calling_form_enum.RequestPagedAll %} page_result = {{ method_invocation_text|trim }} -for response in page_result: +{% if transport == "grpc-async" %}async {% endif %}for response in page_result: {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form == calling_form_enum.RequestPaged %} page_result = {{ method_invocation_text|trim }} -for page in page_result.pages(): +{% if transport == "grpc-async" %}async {% endif %}for page in page_result.pages(): for response in page: {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} @@ -242,7 +247,7 @@ for page in page_result.pages(): {% elif calling_form in [calling_form_enum.RequestStreamingServer, calling_form_enum.RequestStreamingBidi] %} stream = {{ method_invocation_text|trim }} -for response in stream: +{% if transport == "grpc-async" %}async {% endif %}for response in stream: {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} @@ -251,7 +256,7 @@ operation = {{ method_invocation_text|trim }} print("Waiting for operation to complete...") -response = operation.result() +response = {% if transport == "grpc-async" %}await {% endif %}operation.result() {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index 79614d71a399..54db08ca2146 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -31,13 +31,13 @@ from {{ sample.module_namespace|join(".") }} import {{ sample.module_name }} {# also need calling form #} -def sample_{{ frags.render_method_name(sample.rpc)|trim }}({{ frags.print_input_params(sample.request)|trim }}): +{% if sample.transport == "grpc-async" %}async {% endif %}def sample_{{ frags.render_method_name(sample.rpc)|trim }}({{ frags.print_input_params(sample.request)|trim }}): """{{ sample.description }}""" {{ frags.render_client_setup(sample.module_name, sample.client_name)|indent }} {{ frags.render_request_setup(sample.request, sample.module_name, sample.request_type)|indent }} - {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum) %} - {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.response)|indent -}} + {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum, sample.transport) %} + {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.transport, sample.response)|indent -}} {% endwith %} # [END {{ sample.id }}] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py new file mode 100644 index 000000000000..e5c7daddc003 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_async] +from google.cloud import asset_v1 + + +async def sample_analyze_iam_policy(): + """Snippet for analyze_iam_policy""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeIamPolicyRequest( + ) + + # Make the request + response = await client.analyze_iam_policy(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py new file mode 100644 index 000000000000..81abec24034e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeIamPolicyLongrunning +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async] +from google.cloud import asset_v1 + + +async def sample_analyze_iam_policy_longrunning(): + """Snippet for analyze_iam_policy_longrunning""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeIamPolicyLongrunningRequest( + ) + + # Make the request + operation = client.analyze_iam_policy_longrunning(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py similarity index 97% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py index e7a432399b98..c59ff3936a9d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_grpc] +# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync] from google.cloud import asset_v1 @@ -45,4 +45,4 @@ def sample_analyze_iam_policy_longrunning(): response = operation.result() print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_grpc] +# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py index 9bbac93d771c..188cc12bca01 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_grpc] +# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_analyze_iam_policy(): # Handle response print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_grpc] +# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py new file mode 100644 index 000000000000..29da9244a8ff --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetAssetsHistory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_async] +from google.cloud import asset_v1 + + +async def sample_batch_get_assets_history(): + """Snippet for batch_get_assets_history""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetAssetsHistoryRequest( + ) + + # Make the request + response = await client.batch_get_assets_history(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py index 4a691dd0994b..8fd8b83c10f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_grpc] +# [START cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_batch_get_assets_history(): # Handle response print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_grpc] +# [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py new file mode 100644 index 000000000000..e6a4c3b8dd24 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateFeed +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_CreateFeed_async] +from google.cloud import asset_v1 + + +async def sample_create_feed(): + """Snippet for create_feed""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.CreateFeedRequest( + ) + + # Make the request + response = await client.create_feed(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_CreateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py index d4afa1d05b75..c41f2d928911 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_CreateFeed_grpc] +# [START cloudasset_generated_asset_v1_AssetService_CreateFeed_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_create_feed(): # Handle response print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_CreateFeed_grpc] +# [END cloudasset_generated_asset_v1_AssetService_CreateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py new file mode 100644 index 000000000000..8e10aedf8d9c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteFeed +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_DeleteFeed_async] +from google.cloud import asset_v1 + + +async def sample_delete_feed(): + """Snippet for delete_feed""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.DeleteFeedRequest( + ) + + # Make the request + response = await client.delete_feed(request=request) + + +# [END cloudasset_generated_asset_v1_AssetService_DeleteFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py index f654233fd9ec..6f28f8c5de1c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_DeleteFeed_grpc] +# [START cloudasset_generated_asset_v1_AssetService_DeleteFeed_sync] from google.cloud import asset_v1 @@ -41,4 +41,4 @@ def sample_delete_feed(): response = client.delete_feed(request=request) -# [END cloudasset_generated_asset_v1_AssetService_DeleteFeed_grpc] +# [END cloudasset_generated_asset_v1_AssetService_DeleteFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py new file mode 100644 index 000000000000..776264c0ddd5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_ExportAssets_async] +from google.cloud import asset_v1 + + +async def sample_export_assets(): + """Snippet for export_assets""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.ExportAssetsRequest( + ) + + # Make the request + operation = client.export_assets(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_ExportAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py similarity index 95% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py index ef0446666429..e274800452bb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_ExportAssets_grpc] +# [START cloudasset_generated_asset_v1_AssetService_ExportAssets_sync] from google.cloud import asset_v1 @@ -45,4 +45,4 @@ def sample_export_assets(): response = operation.result() print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_ExportAssets_grpc] +# [END cloudasset_generated_asset_v1_AssetService_ExportAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py new file mode 100644 index 000000000000..3c10ab1e1835 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetFeed +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_GetFeed_async] +from google.cloud import asset_v1 + + +async def sample_get_feed(): + """Snippet for get_feed""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.GetFeedRequest( + ) + + # Make the request + response = await client.get_feed(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_GetFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py index 4c04e1cd575a..63dbf0c90ce9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_GetFeed_grpc] +# [START cloudasset_generated_asset_v1_AssetService_GetFeed_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_get_feed(): # Handle response print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_GetFeed_grpc] +# [END cloudasset_generated_asset_v1_AssetService_GetFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py new file mode 100644 index 000000000000..a15ad4a11353 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListFeeds +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_ListFeeds_async] +from google.cloud import asset_v1 + + +async def sample_list_feeds(): + """Snippet for list_feeds""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.ListFeedsRequest( + ) + + # Make the request + response = await client.list_feeds(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_ListFeeds_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py index b5b759711eb5..2ad102f795aa 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_ListFeeds_grpc] +# [START cloudasset_generated_asset_v1_AssetService_ListFeeds_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_list_feeds(): # Handle response print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_ListFeeds_grpc] +# [END cloudasset_generated_asset_v1_AssetService_ListFeeds_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py new file mode 100644 index 000000000000..b99fc9ac643a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAllIamPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_async] +from google.cloud import asset_v1 + + +async def sample_search_all_iam_policies(): + """Snippet for search_all_iam_policies""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.SearchAllIamPoliciesRequest( + ) + + # Make the request + page_result = client.search_all_iam_policies(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py index c3582046cadf..e142ae2fcbb5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_grpc] +# [START cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_sync] from google.cloud import asset_v1 @@ -42,4 +42,4 @@ def sample_search_all_iam_policies(): for response in page_result: print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_grpc] +# [END cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py new file mode 100644 index 000000000000..2fc426361c37 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAllResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_SearchAllResources_async] +from google.cloud import asset_v1 + + +async def sample_search_all_resources(): + """Snippet for search_all_resources""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.SearchAllResourcesRequest( + ) + + # Make the request + page_result = client.search_all_resources(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_SearchAllResources_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py index 9e0772693912..85d6799a352d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_SearchAllResources_grpc] +# [START cloudasset_generated_asset_v1_AssetService_SearchAllResources_sync] from google.cloud import asset_v1 @@ -42,4 +42,4 @@ def sample_search_all_resources(): for response in page_result: print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_SearchAllResources_grpc] +# [END cloudasset_generated_asset_v1_AssetService_SearchAllResources_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py new file mode 100644 index 000000000000..2386c5443df2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateFeed +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_UpdateFeed_async] +from google.cloud import asset_v1 + + +async def sample_update_feed(): + """Snippet for update_feed""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.UpdateFeedRequest( + ) + + # Make the request + response = await client.update_feed(request=request) + + # Handle response + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_grpc.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py index e3835835459d..4dc4915cf53c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_UpdateFeed_grpc] +# [START cloudasset_generated_asset_v1_AssetService_UpdateFeed_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_update_feed(): # Handle response print("{}".format(response)) -# [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_grpc] +# [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py new file mode 100644 index 000000000000..de3341a6901c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateAccessToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-iam-credentials + + +# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async] +from google.iam import credentials_v1 + + +async def sample_generate_access_token(): + """Snippet for generate_access_token""" + + # Create a client + client = credentials_v1.IAMCredentialsAsyncClient() + + # Initialize request argument(s) + request = credentials_v1.GenerateAccessTokenRequest( + ) + + # Make the request + response = await client.generate_access_token(request=request) + + # Handle response + print("{}".format(response)) + +# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py similarity index 96% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_grpc.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py index b5c1028a86d1..24e4484bfcbf 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_grpc] +# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ def sample_generate_access_token(): # Handle response print("{}".format(response)) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_grpc] +# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py new file mode 100644 index 000000000000..4417fcb6bbfe --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GenerateIdToken +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-iam-credentials + + +# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async] +from google.iam import credentials_v1 + + +async def sample_generate_id_token(): + """Snippet for generate_id_token""" + + # Create a client + client = credentials_v1.IAMCredentialsAsyncClient() + + # Initialize request argument(s) + request = credentials_v1.GenerateIdTokenRequest( + ) + + # Make the request + response = await client.generate_id_token(request=request) + + # Handle response + print("{}".format(response)) + +# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py similarity index 97% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_grpc.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py index 2dc2d87112f2..e23294e55977 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_grpc] +# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ def sample_generate_id_token(): # Handle response print("{}".format(response)) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_grpc] +# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py new file mode 100644 index 000000000000..3732426ff813 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SignBlob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-iam-credentials + + +# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async] +from google.iam import credentials_v1 + + +async def sample_sign_blob(): + """Snippet for sign_blob""" + + # Create a client + client = credentials_v1.IAMCredentialsAsyncClient() + + # Initialize request argument(s) + request = credentials_v1.SignBlobRequest( + ) + + # Make the request + response = await client.sign_blob(request=request) + + # Handle response + print("{}".format(response)) + +# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_grpc.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py index 6d013e74344e..51312014ffda 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_grpc] +# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ def sample_sign_blob(): # Handle response print("{}".format(response)) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_grpc] +# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py new file mode 100644 index 000000000000..ce4303485a73 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SignJwt +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-iam-credentials + + +# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async] +from google.iam import credentials_v1 + + +async def sample_sign_jwt(): + """Snippet for sign_jwt""" + + # Create a client + client = credentials_v1.IAMCredentialsAsyncClient() + + # Initialize request argument(s) + request = credentials_v1.SignJwtRequest( + ) + + # Make the request + response = await client.sign_jwt(request=request) + + # Handle response + print("{}".format(response)) + +# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_grpc.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py index 530251cf57b9..22b860091731 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_grpc] +# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ def sample_sign_jwt(): # Handle response print("{}".format(response)) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_grpc] +# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py new file mode 100644 index 000000000000..4cb0f540ec0b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async] +from google.cloud import logging_v2 + + +async def sample_create_bucket(): + """Snippet for create_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + ) + + # Make the request + response = await client.create_bucket(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py index c74aa5daa6b1..004ee0a4865d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateBucket_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_create_bucket(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py new file mode 100644 index 000000000000..1d0523610fcc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async] +from google.cloud import logging_v2 + + +async def sample_create_exclusion(): + """Snippet for create_exclusion""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateExclusionRequest( + ) + + # Make the request + response = await client.create_exclusion(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py index e0f6f874ea36..ce102ad9916f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_create_exclusion(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py new file mode 100644 index 000000000000..4678b4314fca --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_CreateSink_async] +from google.cloud import logging_v2 + + +async def sample_create_sink(): + """Snippet for create_sink""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateSinkRequest( + ) + + # Make the request + response = await client.create_sink(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py index 1f32e69be9fd..f47adb78d36a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateSink_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_create_sink(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py new file mode 100644 index 000000000000..4fbcdd61a87e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_CreateView_async] +from google.cloud import logging_v2 + + +async def sample_create_view(): + """Snippet for create_view""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + ) + + # Make the request + response = await client.create_view(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_CreateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py index f1754665b4dd..eefdb75b888f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateView_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_CreateView_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_create_view(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateView_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_CreateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py new file mode 100644 index 000000000000..8ca8c4748e8f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_async] +from google.cloud import logging_v2 + + +async def sample_delete_bucket(): + """Snippet for delete_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + ) + + # Make the request + response = await client.delete_bucket(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py index 0f951bf249a0..9616621b4e92 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_sync] from google.cloud import logging_v2 @@ -41,4 +41,4 @@ def sample_delete_bucket(): response = client.delete_bucket(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py new file mode 100644 index 000000000000..0e8df5d06c7d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_async] +from google.cloud import logging_v2 + + +async def sample_delete_exclusion(): + """Snippet for delete_exclusion""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + ) + + # Make the request + response = await client.delete_exclusion(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py index 3a45b831e124..0268e93f28c7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_sync] from google.cloud import logging_v2 @@ -41,4 +41,4 @@ def sample_delete_exclusion(): response = client.delete_exclusion(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py new file mode 100644 index 000000000000..d1aaaf9ca5b3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteSink_async] +from google.cloud import logging_v2 + + +async def sample_delete_sink(): + """Snippet for delete_sink""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + ) + + # Make the request + response = await client.delete_sink(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py index 67df1f2f72b6..6bdc8b65dc93 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteSink_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteSink_sync] from google.cloud import logging_v2 @@ -41,4 +41,4 @@ def sample_delete_sink(): response = client.delete_sink(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteSink_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py new file mode 100644 index 000000000000..e9b1a1255ce4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteView_async] +from google.cloud import logging_v2 + + +async def sample_delete_view(): + """Snippet for delete_view""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + ) + + # Make the request + response = await client.delete_view(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py index d18f27c1bd80..a27f4604d361 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteView_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_DeleteView_sync] from google.cloud import logging_v2 @@ -41,4 +41,4 @@ def sample_delete_view(): response = client.delete_view(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteView_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_DeleteView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py new file mode 100644 index 000000000000..bf65579867c2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetBucket_async] +from google.cloud import logging_v2 + + +async def sample_get_bucket(): + """Snippet for get_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + ) + + # Make the request + response = await client.get_bucket(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py index 0294fab02619..80470bf3c4da 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetBucket_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_get_bucket(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py new file mode 100644 index 000000000000..ece79ce221a4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async] +from google.cloud import logging_v2 + + +async def sample_get_cmek_settings(): + """Snippet for get_cmek_settings""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + ) + + # Make the request + response = await client.get_cmek_settings(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py index cd2a59003303..19cef4493424 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_get_cmek_settings(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py new file mode 100644 index 000000000000..9ce42101f1b4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async] +from google.cloud import logging_v2 + + +async def sample_get_exclusion(): + """Snippet for get_exclusion""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + ) + + # Make the request + response = await client.get_exclusion(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py index c28b3ba3cd27..7712065b80e7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetExclusion_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_get_exclusion(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py new file mode 100644 index 000000000000..b1a69a1d6fd2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetSink_async] +from google.cloud import logging_v2 + + +async def sample_get_sink(): + """Snippet for get_sink""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + ) + + # Make the request + response = await client.get_sink(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py index 2a70061ee4ad..e1d4ed0b813d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetSink_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_GetSink_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_get_sink(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_GetSink_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_GetSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py new file mode 100644 index 000000000000..5594e7ac2335 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_GetView_async] +from google.cloud import logging_v2 + + +async def sample_get_view(): + """Snippet for get_view""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + ) + + # Make the request + response = await client.get_view(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_GetView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py index d736bc227d15..4865050cd9e2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetView_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_GetView_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_get_view(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_GetView_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_GetView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py new file mode 100644 index 000000000000..ef7e8ba87adf --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuckets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async] +from google.cloud import logging_v2 + + +async def sample_list_buckets(): + """Snippet for list_buckets""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + ) + + # Make the request + page_result = client.list_buckets(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py similarity index 99% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py index 9dd1ba5d9ee3..2c1ea9332de4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListBuckets_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_buckets(): for response in page_result: print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_ListBuckets_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py new file mode 100644 index 000000000000..c3d7f9165bbe --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListExclusions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async] +from google.cloud import logging_v2 + + +async def sample_list_exclusions(): + """Snippet for list_exclusions""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + ) + + # Make the request + page_result = client.list_exclusions(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py index a6a9814d8fd3..255e4851e471 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListExclusions_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_exclusions(): for response in page_result: print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_ListExclusions_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py new file mode 100644 index 000000000000..98d31d25353d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_ListSinks_async] +from google.cloud import logging_v2 + + +async def sample_list_sinks(): + """Snippet for list_sinks""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + ) + + # Make the request + page_result = client.list_sinks(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_ListSinks_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py index af1edb26f0d1..d911ed8ed1f1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListSinks_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_sinks(): for response in page_result: print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_ListSinks_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py new file mode 100644 index 000000000000..7a24536f99e8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListViews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_ListViews_async] +from google.cloud import logging_v2 + + +async def sample_list_views(): + """Snippet for list_views""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + ) + + # Make the request + page_result = client.list_views(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_ListViews_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py index b436c1c780e0..b87a9315565c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListViews_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_ListViews_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_views(): for response in page_result: print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_ListViews_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_ListViews_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py new file mode 100644 index 000000000000..c0a8f1efa448 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_async] +from google.cloud import logging_v2 + + +async def sample_undelete_bucket(): + """Snippet for undelete_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + ) + + # Make the request + response = await client.undelete_bucket(request=request) + + +# [END logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py index 37f0c8a7883d..8a4968c9f734 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_sync] from google.cloud import logging_v2 @@ -41,4 +41,4 @@ def sample_undelete_bucket(): response = client.undelete_bucket(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py new file mode 100644 index 000000000000..423986e3fc24 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async] +from google.cloud import logging_v2 + + +async def sample_update_bucket(): + """Snippet for update_bucket""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + ) + + # Make the request + response = await client.update_bucket(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py index ceb6d9f132a8..4b6a11c7174a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_update_bucket(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py new file mode 100644 index 000000000000..67a9e4f2bcd0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_async] +from google.cloud import logging_v2 + + +async def sample_update_cmek_settings(): + """Snippet for update_cmek_settings""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + ) + + # Make the request + response = await client.update_cmek_settings(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py index 1d476684f1e5..568a350a7bc5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_update_cmek_settings(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py new file mode 100644 index 000000000000..7f13ad361eb5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async] +from google.cloud import logging_v2 + + +async def sample_update_exclusion(): + """Snippet for update_exclusion""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateExclusionRequest( + ) + + # Make the request + response = await client.update_exclusion(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py index 7f115341b2b6..bc48f0654aee 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_update_exclusion(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py new file mode 100644 index 000000000000..a5d122924fba --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async] +from google.cloud import logging_v2 + + +async def sample_update_sink(): + """Snippet for update_sink""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateSinkRequest( + ) + + # Make the request + response = await client.update_sink(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py index aa43c457ae41..773e4399859c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateSink_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_update_sink(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py new file mode 100644 index 000000000000..0eda6b705cd2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateView_async] +from google.cloud import logging_v2 + + +async def sample_update_view(): + """Snippet for update_view""" + + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + ) + + # Make the request + response = await client.update_view(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py index 982d897ac1b6..bd1ad230f794 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateView_grpc] +# [START logging_generated_logging_v2_ConfigServiceV2_UpdateView_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_update_view(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_grpc] +# [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py new file mode 100644 index 000000000000..b317c8d26c9f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_DeleteLog_async] +from google.cloud import logging_v2 + + +async def sample_delete_log(): + """Snippet for delete_log""" + + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + ) + + # Make the request + response = await client.delete_log(request=request) + + +# [END logging_generated_logging_v2_LoggingServiceV2_DeleteLog_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py index 1e28c486eef4..0470d72af8a4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_DeleteLog_grpc] +# [START logging_generated_logging_v2_LoggingServiceV2_DeleteLog_sync] from google.cloud import logging_v2 @@ -41,4 +41,4 @@ def sample_delete_log(): response = client.delete_log(request=request) -# [END logging_generated_logging_v2_LoggingServiceV2_DeleteLog_grpc] +# [END logging_generated_logging_v2_LoggingServiceV2_DeleteLog_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py new file mode 100644 index 000000000000..771ba15974c0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_list_log_entries(): + """Snippet for list_log_entries""" + + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + ) + + # Make the request + page_result = client.list_log_entries(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py index 4d5d9ec2a71b..79aa53d23d75 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_grpc] +# [START logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_log_entries(): for response in page_result: print("{}".format(response)) -# [END logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_grpc] +# [END logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py new file mode 100644 index 000000000000..f9b2685a7dcb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_ListLogs_async] +from google.cloud import logging_v2 + + +async def sample_list_logs(): + """Snippet for list_logs""" + + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + ) + + # Make the request + page_result = client.list_logs(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_ListLogs_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py similarity index 94% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py index 082e37a62dec..2515bc936c15 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListLogs_grpc] +# [START logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_logs(): for response in page_result: print("{}".format(response)) -# [END logging_generated_logging_v2_LoggingServiceV2_ListLogs_grpc] +# [END logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py new file mode 100644 index 000000000000..8ecd94e6d8e0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMonitoredResourceDescriptors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_async] +from google.cloud import logging_v2 + + +async def sample_list_monitored_resource_descriptors(): + """Snippet for list_monitored_resource_descriptors""" + + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py similarity index 96% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py index ea3f250f8c54..5ca468a3a1ed 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_grpc] +# [START logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_monitored_resource_descriptors(): for response in page_result: print("{}".format(response)) -# [END logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_grpc] +# [END logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py new file mode 100644 index 000000000000..6ee5862d94ae --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TailLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_tail_log_entries(): + """Snippet for tail_log_entries""" + + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + ) + + # Make the request + stream = await client.tail_log_entries([]) + async for response in stream: + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py index 5b92819f1fdf..442a1f3cac2f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_grpc] +# [START logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_tail_log_entries(): for response in stream: print("{}".format(response)) -# [END logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_grpc] +# [END logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py new file mode 100644 index 000000000000..da4353446d40 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_async] +from google.cloud import logging_v2 + + +async def sample_write_log_entries(): + """Snippet for write_log_entries""" + + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.WriteLogEntriesRequest( + ) + + # Make the request + response = await client.write_log_entries(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py index 466eb64175aa..a74258a1deb0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_grpc] +# [START logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_write_log_entries(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_grpc] +# [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py new file mode 100644 index 000000000000..dc77adacf59c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_create_log_metric(): + """Snippet for create_log_metric""" + + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateLogMetricRequest( + ) + + # Make the request + response = await client.create_log_metric(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py index 03abbe60d5c1..61131f6bdce5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_grpc] +# [START logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_create_log_metric(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_grpc] +# [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py new file mode 100644 index 000000000000..939087d1912b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_delete_log_metric(): + """Snippet for delete_log_metric""" + + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + ) + + # Make the request + response = await client.delete_log_metric(request=request) + + +# [END logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py index d940d3619ed1..8a0451c31952 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_grpc] +# [START logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_sync] from google.cloud import logging_v2 @@ -41,4 +41,4 @@ def sample_delete_log_metric(): response = client.delete_log_metric(request=request) -# [END logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_grpc] +# [END logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py new file mode 100644 index 000000000000..64eb3f59f411 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_get_log_metric(): + """Snippet for get_log_metric""" + + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + ) + + # Make the request + response = await client.get_log_metric(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py index 9d14540d0dd4..7115ee688b0d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_grpc] +# [START logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_get_log_metric(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_grpc] +# [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py new file mode 100644 index 000000000000..9acfc76ee897 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async] +from google.cloud import logging_v2 + + +async def sample_list_log_metrics(): + """Snippet for list_log_metrics""" + + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py index 5e6f0c09d040..702a2867113c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_grpc] +# [START logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_log_metrics(): for response in page_result: print("{}".format(response)) -# [END logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_grpc] +# [END logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py new file mode 100644 index 000000000000..c25de0cd5fab --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async] +from google.cloud import logging_v2 + + +async def sample_update_log_metric(): + """Snippet for update_log_metric""" + + # Create a client + client = logging_v2.MetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateLogMetricRequest( + ) + + # Make the request + response = await client.update_log_metric(request=request) + + # Handle response + print("{}".format(response)) + +# [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py similarity index 98% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_grpc.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py index 51f9396d3ad3..89167019acfa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_grpc] +# [START logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_update_log_metric(): # Handle response print("{}".format(response)) -# [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_grpc] +# [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py new file mode 100644 index 000000000000..dbfed3042be9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_CreateInstance_async] +from google.cloud import redis_v1 + + +async def sample_create_instance(): + """Snippet for create_instance""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.CreateInstanceRequest( + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_CreateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py index bf03c6c8d4b6..8b874ba3c965 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_CreateInstance_grpc] +# [START redis_generated_redis_v1_CloudRedis_CreateInstance_sync] from google.cloud import redis_v1 @@ -45,4 +45,4 @@ def sample_create_instance(): response = operation.result() print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_CreateInstance_grpc] +# [END redis_generated_redis_v1_CloudRedis_CreateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py new file mode 100644 index 000000000000..2a706ace319a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_DeleteInstance_async] +from google.cloud import redis_v1 + + +async def sample_delete_instance(): + """Snippet for delete_instance""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.DeleteInstanceRequest( + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_DeleteInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py index 76ee5fb446f7..f2bfcef90559 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_DeleteInstance_grpc] +# [START redis_generated_redis_v1_CloudRedis_DeleteInstance_sync] from google.cloud import redis_v1 @@ -45,4 +45,4 @@ def sample_delete_instance(): response = operation.result() print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_DeleteInstance_grpc] +# [END redis_generated_redis_v1_CloudRedis_DeleteInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py new file mode 100644 index 000000000000..9cbe0ee55254 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_ExportInstance_async] +from google.cloud import redis_v1 + + +async def sample_export_instance(): + """Snippet for export_instance""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.ExportInstanceRequest( + ) + + # Make the request + operation = client.export_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_ExportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py index bb48e0954dda..f6ec74e9e853 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ExportInstance_grpc] +# [START redis_generated_redis_v1_CloudRedis_ExportInstance_sync] from google.cloud import redis_v1 @@ -45,4 +45,4 @@ def sample_export_instance(): response = operation.result() print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_ExportInstance_grpc] +# [END redis_generated_redis_v1_CloudRedis_ExportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py new file mode 100644 index 000000000000..441dd31f6a12 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FailoverInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_FailoverInstance_async] +from google.cloud import redis_v1 + + +async def sample_failover_instance(): + """Snippet for failover_instance""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.FailoverInstanceRequest( + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_FailoverInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py index acf11a58708b..f80743627e40 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_FailoverInstance_grpc] +# [START redis_generated_redis_v1_CloudRedis_FailoverInstance_sync] from google.cloud import redis_v1 @@ -45,4 +45,4 @@ def sample_failover_instance(): response = operation.result() print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_FailoverInstance_grpc] +# [END redis_generated_redis_v1_CloudRedis_FailoverInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py new file mode 100644 index 000000000000..390c4f9b1a4e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_GetInstance_async] +from google.cloud import redis_v1 + + +async def sample_get_instance(): + """Snippet for get_instance""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceRequest( + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle response + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_GetInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py index 4d1b9942c700..c7ee84f2316c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_GetInstance_grpc] +# [START redis_generated_redis_v1_CloudRedis_GetInstance_sync] from google.cloud import redis_v1 @@ -43,4 +43,4 @@ def sample_get_instance(): # Handle response print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_GetInstance_grpc] +# [END redis_generated_redis_v1_CloudRedis_GetInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py new file mode 100644 index 000000000000..fff17dfa8457 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_ImportInstance_async] +from google.cloud import redis_v1 + + +async def sample_import_instance(): + """Snippet for import_instance""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.ImportInstanceRequest( + ) + + # Make the request + operation = client.import_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_ImportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py index 95826e9db298..19feb1321519 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ImportInstance_grpc] +# [START redis_generated_redis_v1_CloudRedis_ImportInstance_sync] from google.cloud import redis_v1 @@ -45,4 +45,4 @@ def sample_import_instance(): response = operation.result() print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_ImportInstance_grpc] +# [END redis_generated_redis_v1_CloudRedis_ImportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py new file mode 100644 index 000000000000..33d0758f39af --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_ListInstances_async] +from google.cloud import redis_v1 + + +async def sample_list_instances(): + """Snippet for list_instances""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.ListInstancesRequest( + ) + + # Make the request + page_result = client.list_instances(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_ListInstances_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py index 1268a040147b..df9296d34a20 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ListInstances_grpc] +# [START redis_generated_redis_v1_CloudRedis_ListInstances_sync] from google.cloud import redis_v1 @@ -42,4 +42,4 @@ def sample_list_instances(): for response in page_result: print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_ListInstances_grpc] +# [END redis_generated_redis_v1_CloudRedis_ListInstances_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py new file mode 100644 index 000000000000..3f6263abc066 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_UpdateInstance_async] +from google.cloud import redis_v1 + + +async def sample_update_instance(): + """Snippet for update_instance""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.UpdateInstanceRequest( + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_UpdateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py index 24a3d55117dd..e363d28c117b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_UpdateInstance_grpc] +# [START redis_generated_redis_v1_CloudRedis_UpdateInstance_sync] from google.cloud import redis_v1 @@ -45,4 +45,4 @@ def sample_update_instance(): response = operation.result() print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_UpdateInstance_grpc] +# [END redis_generated_redis_v1_CloudRedis_UpdateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py new file mode 100644 index 000000000000..1ae6bc3a6a98 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpgradeInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_generated_redis_v1_CloudRedis_UpgradeInstance_async] +from google.cloud import redis_v1 + + +async def sample_upgrade_instance(): + """Snippet for upgrade_instance""" + + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.UpgradeInstanceRequest( + ) + + # Make the request + operation = client.upgrade_instance(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_grpc.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py index 0594da347e27..60759777e066 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_UpgradeInstance_grpc] +# [START redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync] from google.cloud import redis_v1 @@ -45,4 +45,4 @@ def sample_upgrade_instance(): response = operation.result() print("{}".format(response)) -# [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_grpc] +# [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py new file mode 100644 index 000000000000..72a2f6595054 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListResources +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_ListResources_async] +from animalia import mollusca_v1 + + +async def sample_list_resources(): + """Snippet for list_resources""" + + # Create a client + client = mollusca_v1.SnippetsAsyncClient() + + # Initialize request argument(s) + request = mollusca_v1.ListResourcesRequest( + ) + + # Make the request + page_result = client.list_resources(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_ListResources_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_grpc.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py index 1ea032b5d9f1..e7423eaf3d0d 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_grpc.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_ListResources_grpc] +# [START mollusca_generated_mollusca_v1_Snippets_ListResources_sync] from animalia import mollusca_v1 @@ -42,4 +42,4 @@ def sample_list_resources(): for response in page_result: print("{}".format(response)) -# [END mollusca_generated_mollusca_v1_Snippets_ListResources_grpc] +# [END mollusca_generated_mollusca_v1_Snippets_ListResources_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py new file mode 100644 index 000000000000..26ab1b086cfb --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MethodBidiStreaming +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async] +from animalia import mollusca_v1 + + +async def sample_method_bidi_streaming(): + """Snippet for method_bidi_streaming""" + + # Create a client + client = mollusca_v1.SnippetsAsyncClient() + + # Initialize request argument(s) + request = mollusca_v1.SignatureRequest( + ) + + # Make the request + stream = await client.method_bidi_streaming([]) + async for response in stream: + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py similarity index 98% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_grpc.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py index 1c9be7560f7d..239eeb77639f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_grpc.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_grpc] +# [START mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync] from animalia import mollusca_v1 @@ -42,4 +42,4 @@ def sample_method_bidi_streaming(): for response in stream: print("{}".format(response)) -# [END mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_grpc] +# [END mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py new file mode 100644 index 000000000000..3327f9a2b427 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MethodLroSignatures +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async] +from animalia import mollusca_v1 + + +async def sample_method_lro_signatures(): + """Snippet for method_lro_signatures""" + + # Create a client + client = mollusca_v1.SnippetsAsyncClient() + + # Initialize request argument(s) + request = mollusca_v1.SignatureRequest( + ) + + # Make the request + operation = client.method_lro_signatures(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py similarity index 98% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_grpc.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py index 50974d82b3bc..af22fb412539 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_grpc.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_grpc] +# [START mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync] from animalia import mollusca_v1 @@ -45,4 +45,4 @@ def sample_method_lro_signatures(): response = operation.result() print("{}".format(response)) -# [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_grpc] +# [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py new file mode 100644 index 000000000000..b8f462957747 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MethodOneSignature +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async] +from animalia import mollusca_v1 + + +async def sample_method_one_signature(): + """Snippet for method_one_signature""" + + # Create a client + client = mollusca_v1.SnippetsAsyncClient() + + # Initialize request argument(s) + request = mollusca_v1.SignatureRequest( + ) + + # Make the request + response = await client.method_one_signature(request=request) + + # Handle response + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py similarity index 98% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_grpc.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py index 9c6192b43fb1..e8e438f169a8 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_grpc.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_grpc] +# [START mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync] from animalia import mollusca_v1 @@ -43,4 +43,4 @@ def sample_method_one_signature(): # Handle response print("{}".format(response)) -# [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_grpc] +# [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py new file mode 100644 index 000000000000..753c7666e5b8 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for MethodServerStreaming +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async] +from animalia import mollusca_v1 + + +async def sample_method_server_streaming(): + """Snippet for method_server_streaming""" + + # Create a client + client = mollusca_v1.SnippetsAsyncClient() + + # Initialize request argument(s) + request = mollusca_v1.SignatureRequest( + ) + + # Make the request + stream = await client.method_server_streaming(request=request) + async for response in stream: + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_grpc.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py similarity index 98% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_grpc.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py index 13913a0ed391..339623a2d60f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_grpc.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_grpc] +# [START mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync] from animalia import mollusca_v1 @@ -42,4 +42,4 @@ def sample_method_server_streaming(): for response in stream: print("{}".format(response)) -# [END mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_grpc] +# [END mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py new file mode 100644 index 000000000000..5aa99485ad45 --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Classify +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install molluscs-v1-molluscclient + + +# [START mollusc_classify_sync] +from molluscs.v1 import molluscclient + + +async def sample_classify(video, location): + """Determine the full taxonomy of input mollusc""" + + # Create a client + client = molluscclient.MolluscServiceAsyncClient() + + # Initialize request argument(s) + classify_target = {} + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_target["video"] = f.read() + + # location = "New Zealand" + classify_target["location_annotation"] = location + + request = molluscclient.molluscs.v1.ClassifyRequest( + classify_target=classify_target, + ) + + # Make the request + response = await client.classify(request=request) + + # Handle response + print("Mollusc is a \"{}\"".format(response.taxonomy)) + +# [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index b0fa2de22116..8b8883014737 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -123,6 +123,79 @@ def test_generate_sample_basic(): assert sample_str == golden_snippet("sample_basic.py") +def test_generate_sample_basic_async(): + # Note: the sample integration tests are needfully large + # and difficult to eyeball parse. They are intended to be integration tests + # that catch errors in behavior that is emergent from combining smaller features + # or in features that are sufficiently small and trivial that it doesn't make sense + # to have standalone tests. + + input_type = DummyMessage( + type="REQUEST TYPE", + fields={ + "classify_target": DummyField( + message=DummyMessage( + type="CLASSIFY TYPE", + fields={ + "video": DummyField( + message=DummyMessage(type="VIDEO TYPE"), + ), + "location_annotation": DummyField( + message=DummyMessage(type="LOCATION TYPE"), + ) + }, + ) + ) + }, + ident=DummyIdent(name="molluscs.v1.ClassifyRequest") + ) + + api_naming = naming.NewNaming( + name="MolluscClient", namespace=("molluscs", "v1")) + service = wrappers.Service( + service_pb=namedtuple('service_pb', ['name'])('MolluscService'), + methods={ + "Classify": DummyMethod( + input=input_type, + output=message_factory("$resp.taxonomy"), + flattened_fields={ + "classify_target": DummyField(name="classify_target") + } + ) + }, + visible_resources={}, + ) + + schema = DummyApiSchema( + services={"animalia.mollusca.v1.Mollusc": service}, + naming=api_naming, + ) + + sample = {"service": "animalia.mollusca.v1.Mollusc", + "rpc": "Classify", + "transport": "grpc-async", + "id": "mollusc_classify_sync", + "description": "Determine the full taxonomy of input mollusc", + "request": [ + {"field": "classify_target.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True}, + {"field": "classify_target.location_annotation", + "value": "New Zealand", + "input_parameter": "location"} + ], + "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} + + sample_str = samplegen.generate_sample( + sample, + schema, + env.get_template('examples/sample.py.j2') + ) + + assert sample_str == golden_snippet("sample_basic_async.py") + + def test_generate_sample_basic_unflattenable(): # Note: the sample integration tests are needfully large # and difficult to eyeball parse. They are intended to be integration tests diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 5359840f0b0c..73798ce842ee 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -1910,15 +1910,28 @@ def test_generate_sample_spec_basic(): ] ) opts = Options.build("transport=grpc") - specs = list(samplegen.generate_sample_specs(api_schema, opts=opts)) - assert len(specs) == 1 + specs = sorted(samplegen.generate_sample_specs( + api_schema, opts=opts), key=lambda x: x["transport"]) + specs.sort(key=lambda x: x["transport"]) + assert len(specs) == 2 assert specs[0] == { "sample_type": "standalone", "rpc": "Ramshorn", + "transport": "grpc", "request": [], "service": "animalia.mollusca.v1.Squid", - "region_tag": "example_generated_mollusca_v1_Squid_Ramshorn_grpc", + "region_tag": "example_generated_mollusca_v1_Squid_Ramshorn_sync", + "description": "Snippet for ramshorn" + } + + assert specs[1] == { + "sample_type": "standalone", + "rpc": "Ramshorn", + "transport": "grpc-async", + "request": [], + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_generated_mollusca_v1_Squid_Ramshorn_async", "description": "Snippet for ramshorn" } diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 0eabe9e4f092..edd7e3b0fe77 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -700,7 +700,7 @@ def test_print_input_params(): CALLING_FORM_TEMPLATE_TEST_STR = ''' {% import "feature_fragments.j2" as frags %} {{ frags.render_calling_form("TEST_INVOCATION_TXT", calling_form, - calling_form_enum, + calling_form_enum, transport, [{"print": ["Test print statement"]}]) }} ''' @@ -715,7 +715,8 @@ def test_render_calling_form_request(): print("Test print statement") ''', calling_form_enum=CallingForm, - calling_form=CallingForm.Request) + calling_form=CallingForm.Request, + transport="grpc") def test_render_calling_form_paged_all(): @@ -727,7 +728,21 @@ def test_render_calling_form_paged_all(): print("Test print statement") ''', calling_form_enum=CallingForm, - calling_form=CallingForm.RequestPagedAll) + calling_form=CallingForm.RequestPagedAll, + transport="grpc") + + +def test_render_calling_form_paged_all_async(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + # Make the request + page_result = TEST_INVOCATION_TXT + async for response in page_result: + print("Test print statement") + ''', + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestPagedAll, + transport="grpc-async") def test_render_calling_form_paged(): @@ -740,7 +755,22 @@ def test_render_calling_form_paged(): print("Test print statement") ''', calling_form_enum=CallingForm, - calling_form=CallingForm.RequestPaged) + calling_form=CallingForm.RequestPaged, + transport="grpc") + + +def test_render_calling_form_paged_async(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + # Make the request + page_result = TEST_INVOCATION_TXT + async for page in page_result.pages(): + for response in page: + print("Test print statement") + ''', + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestPaged, + transport="grpc-async") def test_render_calling_form_streaming_server(): @@ -752,7 +782,21 @@ def test_render_calling_form_streaming_server(): print("Test print statement") ''', calling_form_enum=CallingForm, - calling_form=CallingForm.RequestStreamingServer) + calling_form=CallingForm.RequestStreamingServer, + transport="grpc") + + +def test_render_calling_form_streaming_server_async(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + # Make the request + stream = TEST_INVOCATION_TXT + async for response in stream: + print("Test print statement") + ''', + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingServer, + transport="grpc-async") def test_render_calling_form_streaming_bidi(): @@ -764,7 +808,21 @@ def test_render_calling_form_streaming_bidi(): print("Test print statement") ''', calling_form_enum=CallingForm, - calling_form=CallingForm.RequestStreamingBidi) + calling_form=CallingForm.RequestStreamingBidi, + transport="grpc") + + +def test_render_calling_form_streaming_bidi_async(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + # Make the request + stream = TEST_INVOCATION_TXT + async for response in stream: + print("Test print statement") + ''', + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingBidi, + transport="grpc-async") def test_render_calling_form_longrunning(): @@ -779,7 +837,24 @@ def test_render_calling_form_longrunning(): print("Test print statement") ''', calling_form_enum=CallingForm, - calling_form=CallingForm.LongRunningRequestPromise) + calling_form=CallingForm.LongRunningRequestPromise, + transport="grpc") + + +def test_render_calling_form_longrunning_async(): + check_template(CALLING_FORM_TEMPLATE_TEST_STR, + ''' + # Make the request + operation = TEST_INVOCATION_TXT + + print("Waiting for operation to complete...") + + response = await operation.result() + print("Test print statement") + ''', + calling_form_enum=CallingForm, + calling_form=CallingForm.LongRunningRequestPromise, + transport="grpc-async") def test_render_method_call_basic(): @@ -787,7 +862,7 @@ def test_render_method_call_basic(): ''' {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, - calling_form, calling_form_enum) }} + calling_form, calling_form_enum, transport) }} ''', ''' client.categorize_mollusc(request=request) @@ -806,7 +881,37 @@ def test_render_method_call_basic(): ], ), calling_form_enum=CallingForm, - calling_form=CallingForm.Request + calling_form=CallingForm.Request, + transport="grpc" + ) + + +def test_render_method_call_basic_async(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + calling_form, calling_form_enum, transport) }} + ''', + ''' + await client.categorize_mollusc(request=request) + ''', + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", + body=True, + single=None), + samplegen.TransformedRequest(base="audio", + body=True, + single=None), + samplegen.TransformedRequest(base="guess", + body=True, + single=None) + ], + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.Request, + transport="grpc-async" ) @@ -815,7 +920,7 @@ def test_render_method_call_basic_flattenable(): ''' {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, - calling_form, calling_form_enum) }} + calling_form, calling_form_enum, transport) }} ''', ''' client.categorize_mollusc(video=video, audio=audio, guess=guess) @@ -835,7 +940,8 @@ def test_render_method_call_basic_flattenable(): flattenable=True, ), calling_form_enum=CallingForm, - calling_form=CallingForm.Request + calling_form=CallingForm.Request, + transport="grpc" ) @@ -844,7 +950,7 @@ def test_render_method_call_bidi(): ''' {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, - calling_form, calling_form_enum) }} + calling_form, calling_form_enum, transport) }} ''', ''' client.categorize_mollusc([video]) @@ -859,7 +965,33 @@ def test_render_method_call_bidi(): ] ), calling_form_enum=CallingForm, - calling_form=CallingForm.RequestStreamingBidi + calling_form=CallingForm.RequestStreamingBidi, + transport="grpc", + ) + + +def test_render_method_call_bidi_async(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + calling_form, calling_form_enum, transport) }} + ''', + ''' + await client.categorize_mollusc([video]) + ''', + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="video", + body=True, + single=None + ) + ] + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingBidi, + transport="grpc-async", ) @@ -868,7 +1000,7 @@ def test_render_method_call_client(): ''' {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, - calling_form, calling_form_enum) }} + calling_form, calling_form_enum, transport) }} ''', ''' client.categorize_mollusc([video]) @@ -883,7 +1015,33 @@ def test_render_method_call_client(): ] ), calling_form_enum=CallingForm, - calling_form=CallingForm.RequestStreamingClient + calling_form=CallingForm.RequestStreamingClient, + transport="grpc", + ) + + +def test_render_method_call_client_async(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + calling_form, calling_form_enum, transport) }} + ''', + ''' + await client.categorize_mollusc([video]) + ''', + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + base="video", + body=True, + single=None + ) + ] + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingClient, + transport="grpc-async", ) From 4549637642ffc249b070863ffd95262b3fff89a1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 11 Jun 2021 18:00:07 +0000 Subject: [PATCH 0573/1339] chore: release 0.49.0 (#919) :robot: I have created a release \*beep\* \*boop\* --- ## [0.49.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.48.1...v0.49.0) (2021-06-11) ### Features * add async samples ([#861](https://www.github.com/googleapis/gapic-generator-python/issues/861)) ([e385ffd](https://www.github.com/googleapis/gapic-generator-python/commit/e385ffd7f012c6a38c9fcd7c5f36ce090311032b)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 6440171dd582..8b1bd9fdf07e 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.49.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.48.1...v0.49.0) (2021-06-11) + + +### Features + +* add async samples ([#861](https://www.github.com/googleapis/gapic-generator-python/issues/861)) ([e385ffd](https://www.github.com/googleapis/gapic-generator-python/commit/e385ffd7f012c6a38c9fcd7c5f36ce090311032b)) + ### [0.48.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.48.0...v0.48.1) (2021-06-09) From b6f758be0f0d769d81f5676177a9defeb5bd7b79 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 16 Jun 2021 15:30:58 -0700 Subject: [PATCH 0574/1339] feat: enable self signed jwt for grpc (#920) * feat: enable self signe jwt for grpc * update test * update golden files --- .../%sub/services/%service/transports/base.py.j2 | 8 ++++++++ .../%sub/services/%service/transports/grpc.py.j2 | 1 + .../services/%service/transports/grpc_asyncio.py.j2 | 1 + .../gapic/%name_%version/%sub/test_%service.py.j2 | 13 +++++++++++++ .../services/asset_service/transports/base.py | 8 ++++++++ .../services/asset_service/transports/grpc.py | 1 + .../asset_service/transports/grpc_asyncio.py | 1 + .../tests/unit/gapic/asset_v1/test_asset_service.py | 11 +++++++++++ .../services/iam_credentials/transports/base.py | 8 ++++++++ .../services/iam_credentials/transports/grpc.py | 1 + .../iam_credentials/transports/grpc_asyncio.py | 1 + .../gapic/credentials_v1/test_iam_credentials.py | 11 +++++++++++ .../services/config_service_v2/transports/base.py | 8 ++++++++ .../services/config_service_v2/transports/grpc.py | 1 + .../config_service_v2/transports/grpc_asyncio.py | 1 + .../services/logging_service_v2/transports/base.py | 8 ++++++++ .../services/logging_service_v2/transports/grpc.py | 1 + .../logging_service_v2/transports/grpc_asyncio.py | 1 + .../services/metrics_service_v2/transports/base.py | 8 ++++++++ .../services/metrics_service_v2/transports/grpc.py | 1 + .../metrics_service_v2/transports/grpc_asyncio.py | 1 + .../unit/gapic/logging_v2/test_config_service_v2.py | 11 +++++++++++ .../gapic/logging_v2/test_logging_service_v2.py | 11 +++++++++++ .../gapic/logging_v2/test_metrics_service_v2.py | 11 +++++++++++ .../services/cloud_redis/transports/base.py | 8 ++++++++ .../services/cloud_redis/transports/grpc.py | 1 + .../services/cloud_redis/transports/grpc_asyncio.py | 1 + .../tests/unit/gapic/redis_v1/test_cloud_redis.py | 11 +++++++++++ 28 files changed, 149 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 728961464d48..09fb65cbe8bb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -19,6 +19,7 @@ from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore {% endif %} from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} @@ -75,6 +76,7 @@ class {{ service.name }}Transport(abc.ABC): scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -98,6 +100,8 @@ class {{ service.name }}Transport(abc.ABC): API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -124,6 +128,10 @@ class {{ service.name }}Transport(abc.ABC): elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 3d9fbf9820dd..c2dd618569e5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -150,6 +150,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 0b72ecfe9e8c..24fc3476a146 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -195,6 +195,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index b3c179d29014..bfcab30571f3 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -113,6 +113,19 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(client_c {% endif %} +@pytest.mark.parametrize("client_class", [ + {{ service.client_name }}, + {% if 'grpc' in opts.transport %} + {{ service.async_client_name }}, + {% endif %} +]) +def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [ {{ service.client_name }}, {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 132b35963d36..21e813df40a4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.asset_v1.types import asset_service from google.longrunning import operations_pb2 # type: ignore @@ -65,6 +66,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -88,6 +90,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -114,6 +118,10 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index aa6b2cc1e456..1fedd3cc1988 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -150,6 +150,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index b488dc4cc030..cac570116016 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -195,6 +195,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 400ed9a8ef86..84505a99d1e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -105,6 +105,17 @@ def test_asset_service_client_from_service_account_info(client_class): assert client.transport._host == 'cloudasset.googleapis.com:443' +@pytest.mark.parametrize("client_class", [ + AssetServiceClient, + AssetServiceAsyncClient, +]) +def test_asset_service_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [ AssetServiceClient, AssetServiceAsyncClient, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index a0f053c02a25..d15cfc9c1ea8 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.iam.credentials_v1.types import common @@ -62,6 +63,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -85,6 +87,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -111,6 +115,10 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 64e38cb3ee9c..76c83a0b0380 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -155,6 +155,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index b1748ed0b386..65aa22ae67e0 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -200,6 +200,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 681a98bfb2f9..f0f99349b769 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -97,6 +97,17 @@ def test_iam_credentials_client_from_service_account_info(client_class): assert client.transport._host == 'iamcredentials.googleapis.com:443' +@pytest.mark.parametrize("client_class", [ + IAMCredentialsClient, + IAMCredentialsAsyncClient, +]) +def test_iam_credentials_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [ IAMCredentialsClient, IAMCredentialsAsyncClient, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index d5a0fa84e318..39b42884c1e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_config from google.protobuf import empty_pb2 # type: ignore @@ -66,6 +67,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -89,6 +91,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -115,6 +119,10 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 94f628450fc2..75441232816e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -147,6 +147,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 498d4c1dbbc4..970b3049a7c2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 419242eb550a..4a482cd6c895 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging from google.protobuf import empty_pb2 # type: ignore @@ -67,6 +68,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -90,6 +92,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -116,6 +120,10 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index a8011ec63491..bf8e6e527c20 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -147,6 +147,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 72b4fbf64e70..dd277841974b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 73f542ac3b65..b5f5613b955a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_metrics from google.protobuf import empty_pb2 # type: ignore @@ -67,6 +68,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -90,6 +92,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -116,6 +120,10 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 9eb3fc2c9cff..a8bbf146c957 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -147,6 +147,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index a937e3b793fe..bb19604e441d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -192,6 +192,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 1127edd9023f..dde627c9d21e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -98,6 +98,17 @@ def test_config_service_v2_client_from_service_account_info(client_class): assert client.transport._host == 'logging.googleapis.com:443' +@pytest.mark.parametrize("client_class", [ + ConfigServiceV2Client, + ConfigServiceV2AsyncClient, +]) +def test_config_service_v2_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [ ConfigServiceV2Client, ConfigServiceV2AsyncClient, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e0932066947e..e583321973bd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -104,6 +104,17 @@ def test_logging_service_v2_client_from_service_account_info(client_class): assert client.transport._host == 'logging.googleapis.com:443' +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, + LoggingServiceV2AsyncClient, +]) +def test_logging_service_v2_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [ LoggingServiceV2Client, LoggingServiceV2AsyncClient, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 04123a32febd..5310d8382c3e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -102,6 +102,17 @@ def test_metrics_service_v2_client_from_service_account_info(client_class): assert client.transport._host == 'logging.googleapis.com:443' +@pytest.mark.parametrize("client_class", [ + MetricsServiceV2Client, + MetricsServiceV2AsyncClient, +]) +def test_metrics_service_v2_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [ MetricsServiceV2Client, MetricsServiceV2AsyncClient, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index f0b0ba3137b9..cdaaa458c1fd 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore @@ -64,6 +65,7 @@ def __init__( scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. @@ -87,6 +89,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: @@ -113,6 +117,10 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 6130efc05e01..8b7117bfdf76 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -169,6 +169,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 7e3bfc51f7fd..55f04e1ab9b4 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -214,6 +214,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ff39c8c468f2..904da1cd1b22 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -102,6 +102,17 @@ def test_cloud_redis_client_from_service_account_info(client_class): assert client.transport._host == 'redis.googleapis.com:443' +@pytest.mark.parametrize("client_class", [ + CloudRedisClient, + CloudRedisAsyncClient, +]) +def test_cloud_redis_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_called_with(True) + + @pytest.mark.parametrize("client_class", [ CloudRedisClient, CloudRedisAsyncClient, From 0d16dc9eb6fc662b42e79efe6058754bb46e316d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 21 Jun 2021 11:27:53 -0700 Subject: [PATCH 0575/1339] fix: temporarily disable code coverage in showcase_unit tests (#925) Code coverage is behaving badly and preventing merges. Also updates version of showcase used. --- .../.github/workflows/tests.yaml | 20 ++++++++++------ packages/gapic-generator/noxfile.py | 23 +++++-------------- .../tests/system/test_resource_crud.py | 8 +++---- 3 files changed, 23 insertions(+), 28 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 623dfc04c4a2..2ff05a9095c2 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -98,11 +98,13 @@ jobs: run: | sudo mkdir -p /usr/src/showcase sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/googleapis/gapic-showcase/releases/download/v0.12.0/gapic-showcase-0.12.0-linux-amd64.tar.gz --output /usr/src/showcase/showcase-0.12.0-linux-amd64.tar.gz + curl --location https://github.com/googleapis/gapic-showcase/releases/download/v${SHOWCASE_VERSION}/gapic-showcase-${SHOWCASE_VERSION}-linux-amd64.tar.gz --output /usr/src/showcase/showcase-${SHOWCASE_VERSION}-linux-amd64.tar.gz cd /usr/src/showcase/ tar -xf showcase-* ./gapic-showcase run & cd - + env: + SHOWCASE_VERSION: 0.16.0 - name: Install nox. run: python -m pip install nox - name: Install protoc 3.12.1. @@ -156,7 +158,7 @@ jobs: sudo mkdir gapic_showcase sudo chown ${USER} gapic_showcase cd gapic_showcase - curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.11.0/gapic-showcase-0.11.0-linux-amd64.tar.gz | tar xz + curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v${SHOWCASE_VERSION}/gapic-showcase-${SHOWCASE_VERSION}-linux-amd64.tar.gz | tar xz ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & showcase_pid=$! @@ -170,6 +172,8 @@ jobs: cd .. nox -s ${{ matrix.target }} + env: + SHOWCASE_VERSION: 0.16.0 # TODO(yon-mg): add compute unit tests showcase-unit: strategy: @@ -206,6 +210,8 @@ jobs: run: python -m pip install nox - name: Run unit tests. run: nox -s showcase_unit${{ matrix.variant }}-${{ matrix.python }} + env: + SHOWCASE_VERSION: 0.16.0 showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: @@ -234,6 +240,8 @@ jobs: run: python -m pip install nox - name: Run unit tests. run: nox -s showcase_unit_add_iam_methods + env: + SHOWCASE_VERSION: 0.16.0 showcase-mypy: runs-on: ubuntu-latest strategy: @@ -265,6 +273,8 @@ jobs: run: python -m pip install nox - name: Typecheck the generated output. run: nox -s showcase_mypy${{ matrix.variant }} + env: + SHOWCASE_VERSION: 0.16.0 snippetgen: runs-on: ubuntu-latest steps: @@ -304,15 +314,11 @@ jobs: run: | sudo apt-get update sudo apt-get install -y pandoc gcc git - - name: Install nox and codecov. + - name: Install nox. run: | python -m pip install nox - python -m pip install codecov - name: Run unit tests. run: nox -s unit-${{ matrix.python }} - - name: Submit coverage data to codecov. - run: codecov - if: always() integration: runs-on: ubuntu-latest steps: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index bf5e378d978a..674fe965f997 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -25,7 +25,7 @@ import shutil -showcase_version = "0.11.0" +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.16.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") @@ -34,7 +34,7 @@ def unit(session): """Run the unit test suite.""" session.install( - "coverage", "pytest", "pytest-cov", "pytest-xdist", "pyfakefs", + "pytest", "pytest-xdist", "pyfakefs", ) session.install("-e", ".") @@ -45,10 +45,6 @@ def unit(session): or [ "-vv", "-n=auto", - "--cov=gapic", - "--cov-config=.coveragerc", - "--cov-report=term", - "--cov-report=html", path.join("tests", "unit"), ] ), @@ -185,9 +181,7 @@ def showcase_mtls_alternative_templates(session): def run_showcase_unit_tests(session, fail_under=100): session.install( - "coverage", "pytest", - "pytest-cov", "pytest-xdist", "asyncmock", "pytest-asyncio", @@ -198,9 +192,6 @@ def run_showcase_unit_tests(session, fail_under=100): "py.test", "-n=auto", "--quiet", - "--cov=google", - "--cov-append", - f"--cov-fail-under={str(fail_under)}", *(session.posargs or [path.join("tests", "unit")]), ) @@ -226,12 +217,11 @@ def showcase_unit( # google-auth is a transitive dependency so it isn't in the # lower bound constraints file produced above. session.install("google-auth==1.21.1") - run_showcase_unit_tests(session, fail_under=0) + run_showcase_unit_tests(session) # 2. Run the tests again with latest version of dependencies session.install(".", "--upgrade", "--force-reinstall") - # This time aggregate coverage should reach 100% - run_showcase_unit_tests(session, fail_under=100) + run_showcase_unit_tests(session) @nox.session(python=["3.7", "3.8", "3.9"]) @@ -258,12 +248,11 @@ def showcase_unit_add_iam_methods(session): # google-auth is a transitive dependency so it isn't in the # lower bound constraints file produced above. session.install("google-auth==1.21.1") - run_showcase_unit_tests(session, fail_under=0) + run_showcase_unit_tests(session) # 2. Run the tests again with latest version of dependencies session.install(".", "--upgrade", "--force-reinstall") - # This time aggregate coverage should reach 100% - run_showcase_unit_tests(session, fail_under=100) + run_showcase_unit_tests(session) @nox.session(python="3.8") diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 85bafe561af4..8af963e86a72 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -62,15 +62,15 @@ def test_nonslash_resource(messaging): def test_path_parsing(messaging): - expected = {"room_id": "tiki"} + expected = {"room": "tiki"} actual = messaging.parse_room_path(messaging.room_path("tiki")) assert expected == actual expected = { - "user_id": "bdfl", - "legacy_user_id": "apocalyptic", - "blurb_id": "city", + "user": "bdfl", + "legacy_user": "apocalyptic", + "blurb": "city", } actual = messaging.parse_blurb_path( messaging.blurb_path("bdfl", "apocalyptic", "city") From 4af84da8e30372c8d93bde895b5117ae761bb5e2 Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Mon, 21 Jun 2021 11:36:13 -0700 Subject: [PATCH 0576/1339] =?UTF-8?q?fix(bazel):=20Remove=20monolith=20imp?= =?UTF-8?q?orts=20from=20Python=20=C2=B5gen=20Bazel=20rules=20(#923)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(bazel): Remove monolith imports from Python µgen Bazel rules * fix: temporarily disable integration tests * chore: empty commit to rerun CI Co-authored-by: Dov Shlachter --- packages/gapic-generator/.github/workflows/tests.yaml | 6 ++++-- packages/gapic-generator/repositories.bzl | 9 +-------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 2ff05a9095c2..8d47dd3a8d67 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -344,8 +344,10 @@ jobs: env: BAZEL_VERSION: 3.5.0 BAZEL_BINARY: bazel_3.5.0-linux-x86_64.deb - - name: Integration Tests - run: bazel test tests/integration:asset tests/integration:credentials tests/integration:logging tests/integration:redis + # Temporarily disable integration tests due to a circular Bazel rule dependency. + # TODO(miraleung): Uncomment these once the monolith is gone from googleapis. + #- name: Integration Tests + #run: bazel test tests/integration:asset tests/integration:credentials tests/integration:logging tests/integration:redis style-check: runs-on: ubuntu-latest diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 524670034810..19a1a867b69c 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -54,14 +54,7 @@ def gapic_generator_python(): url = "https://github.com/jgm/pandoc/releases/download/2.2.1/pandoc-2.2.1-macOS.zip", ) - _maybe( - http_archive, - name = "com_google_api_codegen", - strip_prefix = "gapic-generator-03abac35ec0716c6f426ffc1532f9a62f1c9e6a2", - urls = ["https://github.com/googleapis/gapic-generator/archive/03abac35ec0716c6f426ffc1532f9a62f1c9e6a2.zip"], - ) - - _rules_gapic_version = "0.5.3" + _rules_gapic_version = "0.5.4" _maybe( http_archive, name = "rules_gapic", From 0dd23c90526232621261040647adf1e85d1eb2c6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 21 Jun 2021 12:16:26 -0700 Subject: [PATCH 0577/1339] chore: release 0.50.0 (#922) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8b1bd9fdf07e..ffa64685fae0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## [0.50.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.49.0...v0.50.0) (2021-06-21) + + +### Features + +* enable self signed jwt for grpc ([#920](https://www.github.com/googleapis/gapic-generator-python/issues/920)) ([da119c7](https://www.github.com/googleapis/gapic-generator-python/commit/da119c72c82d04e168c4b41e5bf910a0c1609ce3)) + + +### Bug Fixes + +* **bazel:** Remove monolith imports from Python µgen Bazel rules ([#923](https://www.github.com/googleapis/gapic-generator-python/issues/923)) ([4a2afa7](https://www.github.com/googleapis/gapic-generator-python/commit/4a2afa78455817e7e6c058d21857326867fe3f21)) +* temporarily disable code coverage in showcase_unit tests ([#925](https://www.github.com/googleapis/gapic-generator-python/issues/925)) ([0dfac03](https://www.github.com/googleapis/gapic-generator-python/commit/0dfac03bd3ef8c12b33e6c03e62eab3e7bf2cd69)) + ## [0.49.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.48.1...v0.49.0) (2021-06-11) From 751c627bc1fcefc22877f915b53195cd78ed2da9 Mon Sep 17 00:00:00 2001 From: Mira Leung Date: Thu, 24 Jun 2021 15:31:08 -0700 Subject: [PATCH 0578/1339] =?UTF-8?q?fix(bazel):=20Re-enable=20Python=20?= =?UTF-8?q?=C2=B5gen=20integration=20tests=20post=20monolith=20rule=20remo?= =?UTF-8?q?val=20(#926)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(bazel): Re-enable Python µgen integration tests post monolith rule removal * fix: update googleapis hash * fix: update asset goldens (due to googleapis hash update) --- .../.github/workflows/tests.yaml | 6 +- packages/gapic-generator/repositories.bzl | 4 +- .../asset/google/cloud/asset/__init__.py | 6 + .../asset/google/cloud/asset_v1/__init__.py | 6 + .../google/cloud/asset_v1/gapic_metadata.json | 10 + .../services/asset_service/async_client.py | 173 +++++++- .../asset_v1/services/asset_service/client.py | 173 +++++++- .../asset_v1/services/asset_service/pagers.py | 122 +++++ .../services/asset_service/transports/base.py | 14 + .../services/asset_service/transports/grpc.py | 27 ++ .../asset_service/transports/grpc_asyncio.py | 27 ++ .../google/cloud/asset_v1/types/__init__.py | 6 + .../cloud/asset_v1/types/asset_service.py | 312 +++++++++++-- .../google/cloud/asset_v1/types/assets.py | 243 +++++++++- ...sset_v1_asset_service_list_assets_async.py | 45 ++ ...asset_v1_asset_service_list_assets_sync.py | 45 ++ .../asset/scripts/fixup_asset_v1_keywords.py | 3 +- .../unit/gapic/asset_v1/test_asset_service.py | 418 ++++++++++++++++++ 18 files changed, 1546 insertions(+), 94 deletions(-) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 8d47dd3a8d67..2ff05a9095c2 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -344,10 +344,8 @@ jobs: env: BAZEL_VERSION: 3.5.0 BAZEL_BINARY: bazel_3.5.0-linux-x86_64.deb - # Temporarily disable integration tests due to a circular Bazel rule dependency. - # TODO(miraleung): Uncomment these once the monolith is gone from googleapis. - #- name: Integration Tests - #run: bazel test tests/integration:asset tests/integration:credentials tests/integration:logging tests/integration:redis + - name: Integration Tests + run: bazel test tests/integration:asset tests/integration:credentials tests/integration:logging tests/integration:redis style-check: runs-on: ubuntu-latest diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 19a1a867b69c..83ba719f34c1 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -65,8 +65,8 @@ def gapic_generator_python(): _maybe( http_archive, name = "com_google_googleapis", - strip_prefix = "googleapis-51fe6432d4076a4c101f561967df4bf1f27818e1", - urls = ["https://github.com/googleapis/googleapis/archive/51fe6432d4076a4c101f561967df4bf1f27818e1.zip"], + strip_prefix = "googleapis-ffc531383747ebb702dad3db237ef5fdea796363", + urls = ["https://github.com/googleapis/googleapis/archive/ffc531383747ebb702dad3db237ef5fdea796363.zip"], ) def gapic_generator_register_toolchains(): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index d823276527f8..815c196c23c6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -35,6 +35,8 @@ from google.cloud.asset_v1.types.asset_service import GetFeedRequest from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisOutputConfig from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisQuery +from google.cloud.asset_v1.types.asset_service import ListAssetsRequest +from google.cloud.asset_v1.types.asset_service import ListAssetsResponse from google.cloud.asset_v1.types.asset_service import ListFeedsRequest from google.cloud.asset_v1.types.asset_service import ListFeedsResponse from google.cloud.asset_v1.types.asset_service import OutputConfig @@ -48,6 +50,7 @@ from google.cloud.asset_v1.types.asset_service import UpdateFeedRequest from google.cloud.asset_v1.types.asset_service import ContentType from google.cloud.asset_v1.types.assets import Asset +from google.cloud.asset_v1.types.assets import ConditionEvaluation from google.cloud.asset_v1.types.assets import IamPolicyAnalysisResult from google.cloud.asset_v1.types.assets import IamPolicyAnalysisState from google.cloud.asset_v1.types.assets import IamPolicySearchResult @@ -76,6 +79,8 @@ 'GetFeedRequest', 'IamPolicyAnalysisOutputConfig', 'IamPolicyAnalysisQuery', + 'ListAssetsRequest', + 'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', 'OutputConfig', @@ -89,6 +94,7 @@ 'UpdateFeedRequest', 'ContentType', 'Asset', + 'ConditionEvaluation', 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 3988eaad259f..35fdc0668d2a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -35,6 +35,8 @@ from .types.asset_service import GetFeedRequest from .types.asset_service import IamPolicyAnalysisOutputConfig from .types.asset_service import IamPolicyAnalysisQuery +from .types.asset_service import ListAssetsRequest +from .types.asset_service import ListAssetsResponse from .types.asset_service import ListFeedsRequest from .types.asset_service import ListFeedsResponse from .types.asset_service import OutputConfig @@ -48,6 +50,7 @@ from .types.asset_service import UpdateFeedRequest from .types.asset_service import ContentType from .types.assets import Asset +from .types.assets import ConditionEvaluation from .types.assets import IamPolicyAnalysisResult from .types.assets import IamPolicyAnalysisState from .types.assets import IamPolicySearchResult @@ -67,6 +70,7 @@ 'BatchGetAssetsHistoryRequest', 'BatchGetAssetsHistoryResponse', 'BigQueryDestination', +'ConditionEvaluation', 'ContentType', 'CreateFeedRequest', 'DeleteFeedRequest', @@ -82,6 +86,8 @@ 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', +'ListAssetsRequest', +'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', 'OutputConfig', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json index a80eb281c4a9..eebf25a106eb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json @@ -45,6 +45,11 @@ "get_feed" ] }, + "ListAssets": { + "methods": [ + "list_assets" + ] + }, "ListFeeds": { "methods": [ "list_feeds" @@ -105,6 +110,11 @@ "get_feed" ] }, + "ListAssets": { + "methods": [ + "list_assets" + ] + }, "ListFeeds": { "methods": [ "list_feeds" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 099a951d1737..054bfe55652b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -229,6 +229,95 @@ async def export_assets(self, # Done; return the response. return response + async def list_assets(self, + request: asset_service.ListAssetsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAssetsAsyncPager: + r"""Lists assets with time and resource types and returns + paged results in response. + + Args: + request (:class:`google.cloud.asset_v1.types.ListAssetsRequest`): + The request object. ListAssets request. + parent (:class:`str`): + Required. Name of the organization or project the assets + belong to. Format: "organizations/[organization-number]" + (such as "organizations/123"), "projects/[project-id]" + (such as "projects/my-project-id"), or + "projects/[project-number]" (such as "projects/12345"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager: + ListAssets response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.ListAssetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_assets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAssetsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + async def batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest = None, *, @@ -732,7 +821,7 @@ async def search_all_resources(self, Required. A scope can be a project, a folder, or an organization. The search is limited to the resources within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllResources`` `__ + ```cloudasset.assets.searchAllResources`` `__ permission on the desired scope. The allowed values are: @@ -748,40 +837,48 @@ async def search_all_resources(self, should not be set. query (:class:`str`): Optional. The query statement. See `how to construct a - query `__ + query `__ for more information. If not specified or empty, it will search all the resources within the specified ``scope``. - Note that the query string is compared against each - Cloud IAM policy binding, including its members, roles, - and Cloud IAM conditions. The returned Cloud IAM - policies will only contain the bindings that match your - query. To learn more about the IAM policy structure, see - `IAM policy - doc `__. Examples: - ``name:Important`` to find Cloud resources whose name contains "Important" as a word. + - ``name=Important`` to find the Cloud resource whose + name is exactly "Important". - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix. - - ``description:*por*`` to find Cloud resources whose - description contains "por" as a substring. + display name contains "Impor" as a prefix of any word + in the field. - ``location:us-west*`` to find Cloud resources whose - location is prefixed with "us-west". + location contains both "us" and "west" as prefixes. - ``labels:prod`` to find Cloud resources whose labels contain "prod" as a key or value. - ``labels.env:prod`` to find Cloud resources that have a label "env" and its value is "prod". - ``labels.env:*`` to find Cloud resources that have a label "env". + - ``kmsKey:key`` to find Cloud resources encrypted with + a customer-managed encryption key whose name contains + the word "key". + - ``state:ACTIVE`` to find Cloud resources whose state + contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find {{gcp_name}} resources + whose state doesn't contain "ACTIVE" as a word. + - ``createTime<1609459200`` to find Cloud resources + that were created before "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 + 00:00:00 UTC" in seconds. + - ``updateTime>1609459200`` to find Cloud resources + that were updated after "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 + 00:00:00 UTC" in seconds. - ``Important`` to find Cloud resources that contain "Important" as a word in any of the searchable fields. - ``Impor*`` to find Cloud resources that contain - "Impor" as a prefix in any of the searchable fields. - - ``*por*`` to find Cloud resources that contain "por" - as a substring in any of the searchable fields. + "Impor" as a prefix of any word in any of the + searchable fields. - ``Important location:(us-west1 OR global)`` to find Cloud resources that contain "Important" as a word in any of the searchable fields and are also located in @@ -796,6 +893,20 @@ async def search_all_resources(self, `searchable asset types `__. + Regular expressions are also supported. For example: + + - "compute.googleapis.com.*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type + ends with "Instance". + - ".*Instance.*" snapshots resources whose asset type + contains "Instance". + + See `RE2 `__ + for all supported regular expression syntax. If the + regular expression does not match any supported asset + type, an INVALID_ARGUMENT error will be returned. + This corresponds to the ``asset_types`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -896,7 +1007,7 @@ async def search_all_iam_policies(self, Required. A scope can be a project, a folder, or an organization. The search is limited to the IAM policies within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllIamPolicies`` `__ + ```cloudasset.assets.searchAllIamPolicies`` `__ permission on the desired scope. The allowed values are: @@ -915,7 +1026,13 @@ async def search_all_iam_policies(self, query `__ for more information. If not specified or empty, it will search all the IAM policies within the specified - ``scope``. + ``scope``. Note that the query string is compared + against each Cloud IAM policy binding, including its + members, roles, and Cloud IAM conditions. The returned + Cloud IAM policies will only contain the bindings that + match your query. To learn more about the IAM policy + structure, see `IAM policy + doc `__. Examples: @@ -923,6 +1040,9 @@ async def search_all_iam_policies(self, that specify user "amy@gmail.com". - ``policy:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. + - ``policy:comp*`` to find IAM policy bindings that + contain "comp" as a prefix of any word in the + binding. - ``policy.role.permissions:storage.buckets.update`` to find IAM policy bindings that specify a role containing "storage.buckets.update" permission. Note @@ -930,19 +1050,30 @@ async def search_all_iam_policies(self, to a role's included permissions, policy bindings that specify this role will be dropped from the search results. + - ``policy.role.permissions:upd*`` to find IAM policy + bindings that specify a role containing "upd" as a + prefix of any word in the role permission. Note that + if callers don't have ``iam.roles.get`` access to a + role's included permissions, policy bindings that + specify this role will be dropped from the search + results. - ``resource:organizations/123456`` to find IAM policy bindings that are set on "organizations/123456". + - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` + to find IAM policy bindings that are set on the + project named "myproject". - ``Important`` to find IAM policy bindings that contain "Important" as a word in any of the searchable fields (except for the included permissions). - - ``*por*`` to find IAM policy bindings that contain - "por" as a substring in any of the searchable fields - (except for the included permissions). - ``resource:(instance1 OR instance2) policy:amy`` to find IAM policy bindings that are set on resources "instance1" or "instance2" and also specify user "amy". + - ``roles:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``memberTypes:user`` to find IAM policy bindings that + contain the "user" member type. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 1b86808fbd38..aa285d830e49 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -426,6 +426,95 @@ def export_assets(self, # Done; return the response. return response + def list_assets(self, + request: asset_service.ListAssetsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListAssetsPager: + r"""Lists assets with time and resource types and returns + paged results in response. + + Args: + request (google.cloud.asset_v1.types.ListAssetsRequest): + The request object. ListAssets request. + parent (str): + Required. Name of the organization or project the assets + belong to. Format: "organizations/[organization-number]" + (such as "organizations/123"), "projects/[project-id]" + (such as "projects/my-project-id"), or + "projects/[project-number]" (such as "projects/12345"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager: + ListAssets response. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.ListAssetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.ListAssetsRequest): + request = asset_service.ListAssetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAssetsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + def batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest = None, *, @@ -902,7 +991,7 @@ def search_all_resources(self, Required. A scope can be a project, a folder, or an organization. The search is limited to the resources within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllResources`` `__ + ```cloudasset.assets.searchAllResources`` `__ permission on the desired scope. The allowed values are: @@ -918,40 +1007,48 @@ def search_all_resources(self, should not be set. query (str): Optional. The query statement. See `how to construct a - query `__ + query `__ for more information. If not specified or empty, it will search all the resources within the specified ``scope``. - Note that the query string is compared against each - Cloud IAM policy binding, including its members, roles, - and Cloud IAM conditions. The returned Cloud IAM - policies will only contain the bindings that match your - query. To learn more about the IAM policy structure, see - `IAM policy - doc `__. Examples: - ``name:Important`` to find Cloud resources whose name contains "Important" as a word. + - ``name=Important`` to find the Cloud resource whose + name is exactly "Important". - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix. - - ``description:*por*`` to find Cloud resources whose - description contains "por" as a substring. + display name contains "Impor" as a prefix of any word + in the field. - ``location:us-west*`` to find Cloud resources whose - location is prefixed with "us-west". + location contains both "us" and "west" as prefixes. - ``labels:prod`` to find Cloud resources whose labels contain "prod" as a key or value. - ``labels.env:prod`` to find Cloud resources that have a label "env" and its value is "prod". - ``labels.env:*`` to find Cloud resources that have a label "env". + - ``kmsKey:key`` to find Cloud resources encrypted with + a customer-managed encryption key whose name contains + the word "key". + - ``state:ACTIVE`` to find Cloud resources whose state + contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find {{gcp_name}} resources + whose state doesn't contain "ACTIVE" as a word. + - ``createTime<1609459200`` to find Cloud resources + that were created before "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 + 00:00:00 UTC" in seconds. + - ``updateTime>1609459200`` to find Cloud resources + that were updated after "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 + 00:00:00 UTC" in seconds. - ``Important`` to find Cloud resources that contain "Important" as a word in any of the searchable fields. - ``Impor*`` to find Cloud resources that contain - "Impor" as a prefix in any of the searchable fields. - - ``*por*`` to find Cloud resources that contain "por" - as a substring in any of the searchable fields. + "Impor" as a prefix of any word in any of the + searchable fields. - ``Important location:(us-west1 OR global)`` to find Cloud resources that contain "Important" as a word in any of the searchable fields and are also located in @@ -966,6 +1063,20 @@ def search_all_resources(self, `searchable asset types `__. + Regular expressions are also supported. For example: + + - "compute.googleapis.com.*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type + ends with "Instance". + - ".*Instance.*" snapshots resources whose asset type + contains "Instance". + + See `RE2 `__ + for all supported regular expression syntax. If the + regular expression does not match any supported asset + type, an INVALID_ARGUMENT error will be returned. + This corresponds to the ``asset_types`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1059,7 +1170,7 @@ def search_all_iam_policies(self, Required. A scope can be a project, a folder, or an organization. The search is limited to the IAM policies within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllIamPolicies`` `__ + ```cloudasset.assets.searchAllIamPolicies`` `__ permission on the desired scope. The allowed values are: @@ -1078,7 +1189,13 @@ def search_all_iam_policies(self, query `__ for more information. If not specified or empty, it will search all the IAM policies within the specified - ``scope``. + ``scope``. Note that the query string is compared + against each Cloud IAM policy binding, including its + members, roles, and Cloud IAM conditions. The returned + Cloud IAM policies will only contain the bindings that + match your query. To learn more about the IAM policy + structure, see `IAM policy + doc `__. Examples: @@ -1086,6 +1203,9 @@ def search_all_iam_policies(self, that specify user "amy@gmail.com". - ``policy:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. + - ``policy:comp*`` to find IAM policy bindings that + contain "comp" as a prefix of any word in the + binding. - ``policy.role.permissions:storage.buckets.update`` to find IAM policy bindings that specify a role containing "storage.buckets.update" permission. Note @@ -1093,19 +1213,30 @@ def search_all_iam_policies(self, to a role's included permissions, policy bindings that specify this role will be dropped from the search results. + - ``policy.role.permissions:upd*`` to find IAM policy + bindings that specify a role containing "upd" as a + prefix of any word in the role permission. Note that + if callers don't have ``iam.roles.get`` access to a + role's included permissions, policy bindings that + specify this role will be dropped from the search + results. - ``resource:organizations/123456`` to find IAM policy bindings that are set on "organizations/123456". + - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` + to find IAM policy bindings that are set on the + project named "myproject". - ``Important`` to find IAM policy bindings that contain "Important" as a word in any of the searchable fields (except for the included permissions). - - ``*por*`` to find IAM policy bindings that contain - "por" as a substring in any of the searchable fields - (except for the included permissions). - ``resource:(instance1 OR instance2) policy:amy`` to find IAM policy bindings that are set on resources "instance1" or "instance2" and also specify user "amy". + - ``roles:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``memberTypes:user`` to find IAM policy bindings that + contain the "user" member type. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 01ea865a491a..c09af15cdfa4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -19,6 +19,128 @@ from google.cloud.asset_v1.types import assets +class ListAssetsPager: + """A pager for iterating through ``list_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.ListAssetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``assets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAssets`` requests and continue to iterate + through the ``assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.ListAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.ListAssetsResponse], + request: asset_service.ListAssetsRequest, + response: asset_service.ListAssetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.ListAssetsRequest): + The initial request object. + response (google.cloud.asset_v1.types.ListAssetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.ListAssetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[asset_service.ListAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[assets.Asset]: + for page in self.pages: + yield from page.assets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListAssetsAsyncPager: + """A pager for iterating through ``list_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.ListAssetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``assets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAssets`` requests and continue to iterate + through the ``assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.ListAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.ListAssetsResponse]], + request: asset_service.ListAssetsRequest, + response: asset_service.ListAssetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.ListAssetsRequest): + The initial request object. + response (google.cloud.asset_v1.types.ListAssetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.ListAssetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[asset_service.ListAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[assets.Asset]: + async def async_generator(): + async for page in self.pages: + for response in page.assets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + class SearchAllResourcesPager: """A pager for iterating through ``search_all_resources`` requests. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 21e813df40a4..6d94cb355084 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -154,6 +154,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.list_assets: gapic_v1.method.wrap_method( + self.list_assets, + default_timeout=None, + client_info=client_info, + ), self.batch_get_assets_history: gapic_v1.method.wrap_method( self.batch_get_assets_history, default_retry=retries.Retry( @@ -268,6 +273,15 @@ def export_assets(self) -> Callable[ ]]: raise NotImplementedError() + @property + def list_assets(self) -> Callable[ + [asset_service.ListAssetsRequest], + Union[ + asset_service.ListAssetsResponse, + Awaitable[asset_service.ListAssetsResponse] + ]]: + raise NotImplementedError() + @property def batch_get_assets_history(self) -> Callable[ [asset_service.BatchGetAssetsHistoryRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 1fedd3cc1988..60326ac61ddf 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -275,6 +275,33 @@ def export_assets(self) -> Callable[ ) return self._stubs['export_assets'] + @property + def list_assets(self) -> Callable[ + [asset_service.ListAssetsRequest], + asset_service.ListAssetsResponse]: + r"""Return a callable for the list assets method over gRPC. + + Lists assets with time and resource types and returns + paged results in response. + + Returns: + Callable[[~.ListAssetsRequest], + ~.ListAssetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_assets' not in self._stubs: + self._stubs['list_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ListAssets', + request_serializer=asset_service.ListAssetsRequest.serialize, + response_deserializer=asset_service.ListAssetsResponse.deserialize, + ) + return self._stubs['list_assets'] + @property def batch_get_assets_history(self) -> Callable[ [asset_service.BatchGetAssetsHistoryRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index cac570116016..748505dd03e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -279,6 +279,33 @@ def export_assets(self) -> Callable[ ) return self._stubs['export_assets'] + @property + def list_assets(self) -> Callable[ + [asset_service.ListAssetsRequest], + Awaitable[asset_service.ListAssetsResponse]]: + r"""Return a callable for the list assets method over gRPC. + + Lists assets with time and resource types and returns + paged results in response. + + Returns: + Callable[[~.ListAssetsRequest], + Awaitable[~.ListAssetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_assets' not in self._stubs: + self._stubs['list_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ListAssets', + request_serializer=asset_service.ListAssetsRequest.serialize, + response_deserializer=asset_service.ListAssetsResponse.deserialize, + ) + return self._stubs['list_assets'] + @property def batch_get_assets_history(self) -> Callable[ [asset_service.BatchGetAssetsHistoryRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index 02a737df6f5e..f20e22242252 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -32,6 +32,8 @@ GetFeedRequest, IamPolicyAnalysisOutputConfig, IamPolicyAnalysisQuery, + ListAssetsRequest, + ListAssetsResponse, ListFeedsRequest, ListFeedsResponse, OutputConfig, @@ -47,6 +49,7 @@ ) from .assets import ( Asset, + ConditionEvaluation, IamPolicyAnalysisResult, IamPolicyAnalysisState, IamPolicySearchResult, @@ -75,6 +78,8 @@ 'GetFeedRequest', 'IamPolicyAnalysisOutputConfig', 'IamPolicyAnalysisQuery', + 'ListAssetsRequest', + 'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', 'OutputConfig', @@ -88,6 +93,7 @@ 'UpdateFeedRequest', 'ContentType', 'Asset', + 'ConditionEvaluation', 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 3bc57820106c..8ecee480f817 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -28,6 +28,8 @@ 'ContentType', 'ExportAssetsRequest', 'ExportAssetsResponse', + 'ListAssetsRequest', + 'ListAssetsResponse', 'BatchGetAssetsHistoryRequest', 'BatchGetAssetsHistoryResponse', 'CreateFeedRequest', @@ -183,6 +185,124 @@ class ExportAssetsResponse(proto.Message): ) +class ListAssetsRequest(proto.Message): + r"""ListAssets request. + Attributes: + parent (str): + Required. Name of the organization or project the assets + belong to. Format: "organizations/[organization-number]" + (such as "organizations/123"), "projects/[project-id]" (such + as "projects/my-project-id"), or "projects/[project-number]" + (such as "projects/12345"). + read_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp to take an asset snapshot. This can + only be set to a timestamp between the current + time and the current time minus 35 days + (inclusive). If not specified, the current time + will be used. Due to delays in resource data + collection and indexing, there is a volatile + window during which running the same query may + get different results. + asset_types (Sequence[str]): + A list of asset types to take a snapshot for. For example: + "compute.googleapis.com/Disk". + + Regular expression is also supported. For example: + + - "compute.googleapis.com.*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type ends + with "Instance". + - ".*Instance.*" snapshots resources whose asset type + contains "Instance". + + See `RE2 `__ for + all supported regular expression syntax. If the regular + expression does not match any supported asset type, an + INVALID_ARGUMENT error will be returned. + + If specified, only matching assets will be returned, + otherwise, it will snapshot all asset types. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types. + content_type (google.cloud.asset_v1.types.ContentType): + Asset content type. If not specified, no + content but the asset name will be returned. + page_size (int): + The maximum number of assets to be returned + in a single response. Default is 100, minimum is + 1, and maximum is 1000. + page_token (str): + The ``next_page_token`` returned from the previous + ``ListAssetsResponse``, or unspecified for the first + ``ListAssetsRequest``. It is a continuation of a prior + ``ListAssets`` call, and the API should return the next page + of assets. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + read_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + asset_types = proto.RepeatedField( + proto.STRING, + number=3, + ) + content_type = proto.Field( + proto.ENUM, + number=4, + enum='ContentType', + ) + page_size = proto.Field( + proto.INT32, + number=5, + ) + page_token = proto.Field( + proto.STRING, + number=6, + ) + + +class ListAssetsResponse(proto.Message): + r"""ListAssets response. + Attributes: + read_time (google.protobuf.timestamp_pb2.Timestamp): + Time the snapshot was taken. + assets (Sequence[google.cloud.asset_v1.types.Asset]): + Assets. + next_page_token (str): + Token to retrieve the next page of results. + It expires 72 hours after the page token for the + first page is generated. Set to empty if there + are no remaining results. + """ + + @property + def raw_page(self): + return self + + read_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + assets = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=gca_assets.Asset, + ) + next_page_token = proto.Field( + proto.STRING, + number=3, + ) + + class BatchGetAssetsHistoryRequest(proto.Message): r"""Batch get assets history request. Attributes: @@ -441,6 +561,11 @@ class GcsDestination(proto.Message): See `Viewing and Editing Object Metadata `__ for more information. + + If the specified Cloud Storage object already exists and + there is no + `hold `__, + it will be overwritten with the exported result. uri_prefix (str): The uri prefix of all generated Cloud Storage objects. Example: "gs://bucket_name/object_name_prefix". Each object @@ -718,7 +843,7 @@ class SearchAllResourcesRequest(proto.Message): Required. A scope can be a project, a folder, or an organization. The search is limited to the resources within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllResources`` `__ + ```cloudasset.assets.searchAllResources`` `__ permission on the desired scope. The allowed values are: @@ -730,38 +855,46 @@ class SearchAllResourcesRequest(proto.Message): "organizations/123456") query (str): Optional. The query statement. See `how to construct a - query `__ + query `__ for more information. If not specified or empty, it will search all the resources within the specified ``scope``. - Note that the query string is compared against each Cloud - IAM policy binding, including its members, roles, and Cloud - IAM conditions. The returned Cloud IAM policies will only - contain the bindings that match your query. To learn more - about the IAM policy structure, see `IAM policy - doc `__. Examples: - ``name:Important`` to find Cloud resources whose name contains "Important" as a word. + - ``name=Important`` to find the Cloud resource whose name + is exactly "Important". - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix. - - ``description:*por*`` to find Cloud resources whose - description contains "por" as a substring. + display name contains "Impor" as a prefix of any word in + the field. - ``location:us-west*`` to find Cloud resources whose - location is prefixed with "us-west". + location contains both "us" and "west" as prefixes. - ``labels:prod`` to find Cloud resources whose labels contain "prod" as a key or value. - ``labels.env:prod`` to find Cloud resources that have a label "env" and its value is "prod". - ``labels.env:*`` to find Cloud resources that have a label "env". + - ``kmsKey:key`` to find Cloud resources encrypted with a + customer-managed encryption key whose name contains the + word "key". + - ``state:ACTIVE`` to find Cloud resources whose state + contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find {{gcp_name}} resources whose + state doesn't contain "ACTIVE" as a word. + - ``createTime<1609459200`` to find Cloud resources that + were created before "2021-01-01 00:00:00 UTC". 1609459200 + is the epoch timestamp of "2021-01-01 00:00:00 UTC" in + seconds. + - ``updateTime>1609459200`` to find Cloud resources that + were updated after "2021-01-01 00:00:00 UTC". 1609459200 + is the epoch timestamp of "2021-01-01 00:00:00 UTC" in + seconds. - ``Important`` to find Cloud resources that contain "Important" as a word in any of the searchable fields. - ``Impor*`` to find Cloud resources that contain "Impor" - as a prefix in any of the searchable fields. - - ``*por*`` to find Cloud resources that contain "por" as a - substring in any of the searchable fields. + as a prefix of any word in any of the searchable fields. - ``Important location:(us-west1 OR global)`` to find Cloud resources that contain "Important" as a word in any of the searchable fields and are also located in the @@ -770,6 +903,20 @@ class SearchAllResourcesRequest(proto.Message): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset types `__. + + Regular expressions are also supported. For example: + + - "compute.googleapis.com.*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type ends + with "Instance". + - ".*Instance.*" snapshots resources whose asset type + contains "Instance". + + See `RE2 `__ for + all supported regular expression syntax. If the regular + expression does not match any supported asset type, an + INVALID_ARGUMENT error will be returned. page_size (int): Optional. The page size for search result pagination. Page size is capped at 500 even if a larger value is given. If @@ -784,16 +931,28 @@ class SearchAllResourcesRequest(proto.Message): the previous response. The values of all other method parameters, must be identical to those in the previous call. order_by (str): - Optional. A comma separated list of fields specifying the + Optional. A comma-separated list of fields specifying the sorting order of the results. The default order is ascending. Add " DESC" after the field name to indicate descending order. Redundant space characters are ignored. - Example: "location DESC, name". Only string fields in the - response are sortable, including ``name``, ``displayName``, - ``description``, ``location``. All the other fields such as - repeated fields (e.g., ``networkTags``), map fields (e.g., - ``labels``) and struct fields (e.g., - ``additionalAttributes``) are not supported. + Example: "location DESC, name". Only singular primitive + fields in the response are sortable: + + - name + - assetType + - project + - displayName + - description + - location + - kmsKey + - createTime + - updateTime + - state + - parentFullResourceName + - parentAssetType All the other fields such as repeated + fields (e.g., ``networkTags``), map fields (e.g., + ``labels``) and struct fields (e.g., + ``additionalAttributes``) are not supported. """ scope = proto.Field( @@ -858,7 +1017,7 @@ class SearchAllIamPoliciesRequest(proto.Message): Required. A scope can be a project, a folder, or an organization. The search is limited to the IAM policies within the ``scope``. The caller must be granted the - ```cloudasset.assets.searchAllIamPolicies`` `__ + ```cloudasset.assets.searchAllIamPolicies`` `__ permission on the desired scope. The allowed values are: @@ -873,6 +1032,12 @@ class SearchAllIamPoliciesRequest(proto.Message): query `__ for more information. If not specified or empty, it will search all the IAM policies within the specified ``scope``. + Note that the query string is compared against each Cloud + IAM policy binding, including its members, roles, and Cloud + IAM conditions. The returned Cloud IAM policies will only + contain the bindings that match your query. To learn more + about the IAM policy structure, see `IAM policy + doc `__. Examples: @@ -880,23 +1045,35 @@ class SearchAllIamPoliciesRequest(proto.Message): specify user "amy@gmail.com". - ``policy:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. + - ``policy:comp*`` to find IAM policy bindings that contain + "comp" as a prefix of any word in the binding. - ``policy.role.permissions:storage.buckets.update`` to find IAM policy bindings that specify a role containing "storage.buckets.update" permission. Note that if callers don't have ``iam.roles.get`` access to a role's included permissions, policy bindings that specify this role will be dropped from the search results. + - ``policy.role.permissions:upd*`` to find IAM policy + bindings that specify a role containing "upd" as a prefix + of any word in the role permission. Note that if callers + don't have ``iam.roles.get`` access to a role's included + permissions, policy bindings that specify this role will + be dropped from the search results. - ``resource:organizations/123456`` to find IAM policy bindings that are set on "organizations/123456". + - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` + to find IAM policy bindings that are set on the project + named "myproject". - ``Important`` to find IAM policy bindings that contain "Important" as a word in any of the searchable fields (except for the included permissions). - - ``*por*`` to find IAM policy bindings that contain "por" - as a substring in any of the searchable fields (except - for the included permissions). - ``resource:(instance1 OR instance2) policy:amy`` to find IAM policy bindings that are set on resources "instance1" or "instance2" and also specify user "amy". + - ``roles:roles/compute.admin`` to find IAM policy bindings + that specify the Compute Admin role. + - ``memberTypes:user`` to find IAM policy bindings that + contain the "user" member type. page_size (int): Optional. The page size for search result pagination. Page size is capped at 500 even if a larger value is given. If @@ -910,6 +1087,39 @@ class SearchAllIamPoliciesRequest(proto.Message): be the value of ``next_page_token`` from the previous response. The values of all other method parameters must be identical to those in the previous call. + asset_types (Sequence[str]): + Optional. A list of asset types that the IAM policies are + attached to. If empty, it will search the IAM policies that + are attached to all the `searchable asset + types `__. + + Regular expressions are also supported. For example: + + - "compute.googleapis.com.*" snapshots IAM policies + attached to asset type starts with + "compute.googleapis.com". + - ".*Instance" snapshots IAM policies attached to asset + type ends with "Instance". + - ".*Instance.*" snapshots IAM policies attached to asset + type contains "Instance". + + See `RE2 `__ for + all supported regular expression syntax. If the regular + expression does not match any supported asset type, an + INVALID_ARGUMENT error will be returned. + order_by (str): + Optional. A comma-separated list of fields specifying the + sorting order of the results. The default order is + ascending. Add " DESC" after the field name to indicate + descending order. Redundant space characters are ignored. + Example: "assetType DESC, resource". Only singular primitive + fields in the response are sortable: + + - resource + - assetType + - project All the other fields such as repeated fields + (e.g., ``folders``) and non-primitive fields (e.g., + ``policy``) are not supported. """ scope = proto.Field( @@ -928,6 +1138,14 @@ class SearchAllIamPoliciesRequest(proto.Message): proto.STRING, number=4, ) + asset_types = proto.RepeatedField( + proto.STRING, + number=5, + ) + order_by = proto.Field( + proto.STRING, + number=7, + ) class SearchAllIamPoliciesResponse(proto.Message): @@ -960,7 +1178,7 @@ def raw_page(self): class IamPolicyAnalysisQuery(proto.Message): - r"""IAM policy analysis query message. + r"""## IAM policy analysis query message. Attributes: scope (str): Required. The relative name of the root asset. Only @@ -987,6 +1205,9 @@ class IamPolicyAnalysisQuery(proto.Message): analysis. This is optional. options (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.Options): Optional. The query options. + condition_context (google.cloud.asset_v1.types.IamPolicyAnalysisQuery.ConditionContext): + Optional. The hypothetical context for IAM + conditions evaluation. """ class ResourceSelector(proto.Message): @@ -1172,6 +1393,23 @@ class Options(proto.Message): number=6, ) + class ConditionContext(proto.Message): + r"""The IAM conditions context. + Attributes: + access_time (google.protobuf.timestamp_pb2.Timestamp): + The hypothetical access timestamp to evaluate IAM + conditions. Note that this value must not be earlier than + the current time; otherwise, an INVALID_ARGUMENT error will + be returned. + """ + + access_time = proto.Field( + proto.MESSAGE, + number=1, + oneof='TimeContext', + message=timestamp_pb2.Timestamp, + ) + scope = proto.Field( proto.STRING, number=1, @@ -1196,6 +1434,11 @@ class Options(proto.Message): number=5, message=Options, ) + condition_context = proto.Field( + proto.MESSAGE, + number=6, + message=ConditionContext, + ) class AnalyzeIamPolicyRequest(proto.Message): @@ -1323,11 +1566,16 @@ class GcsDestination(proto.Message): Attributes: uri (str): Required. The uri of the Cloud Storage object. It's the same - uri that is used by gsutil. For example: - "gs://bucket_name/object_name". See [Quickstart: Using the - gsutil tool] - (https://cloud.google.com/storage/docs/quickstart-gsutil) - for examples. + uri that is used by gsutil. Example: + "gs://bucket_name/object_name". See `Viewing and Editing + Object + Metadata `__ + for more information. + + If the specified Cloud Storage object already exists and + there is no + `hold `__, + it will be overwritten with the analysis result. """ uri = proto.Field( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index 6c4611e717b7..b2b5a9156bff 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -36,6 +36,7 @@ 'ResourceSearchResult', 'IamPolicySearchResult', 'IamPolicyAnalysisState', + 'ConditionEvaluation', 'IamPolicyAnalysisResult', }, ) @@ -124,7 +125,8 @@ class Asset(proto.Message): hierarchy `__, a resource outside the Google Cloud resource hierarchy (such as Google Kubernetes Engine clusters and objects), or a policy (e.g. - Cloud IAM policy). See `Supported asset + Cloud IAM policy), or a relationship (e.g. an + INSTANCE_TO_INSTANCEGROUP relationship). See `Supported asset types `__ for more information. @@ -353,14 +355,40 @@ class ResourceSearchResult(proto.Message): - specify the ``asset_type`` field in your search request. project (str): The project that this resource belongs to, in the form of - projects/{PROJECT_NUMBER}. + projects/{PROJECT_NUMBER}. This field is available when the + resource belongs to a project. - To search against the ``project``: + To search against ``project``: + - use a field query. Example: ``project:12345`` + - use a free text query. Example: ``12345`` - specify the ``scope`` field as this project in your search request. + folders (Sequence[str]): + The folder(s) that this resource belongs to, in the form of + folders/{FOLDER_NUMBER}. This field is available when the + resource belongs to one or more folders. + + To search against ``folders``: + + - use a field query. Example: ``folders:(123 OR 456)`` + - use a free text query. Example: ``123`` + - specify the ``scope`` field as this folder in your search + request. + organization (str): + The organization that this resource belongs to, in the form + of organizations/{ORGANIZATION_NUMBER}. This field is + available when the resource belongs to an organization. + + To search against ``organization``: + + - use a field query. Example: ``organization:123`` + - use a free text query. Example: ``123`` + - specify the ``scope`` field as this organization in your + search request. display_name (str): - The display name of this resource. + The display name of this resource. This field is available + only when the resource's proto contains it. To search against the ``display_name``: @@ -368,17 +396,18 @@ class ResourceSearchResult(proto.Message): - use a free text query. Example: ``"My Instance"`` description (str): One or more paragraphs of text description of this resource. - Maximum length could be up to 1M bytes. + Maximum length could be up to 1M bytes. This field is + available only when the resource's proto contains it. To search against the ``description``: - use a field query. Example: - ``description:"*important instance*"`` - - use a free text query. Example: - ``"*important instance*"`` + ``description:"important instance"`` + - use a free text query. Example: ``"important instance"`` location (str): Location can be ``global``, regional like ``us-east1``, or - zonal like ``us-west1-b``. + zonal like ``us-west1-b``. This field is available only when + the resource's proto contains it. To search against the ``location``: @@ -388,7 +417,8 @@ class ResourceSearchResult(proto.Message): Labels associated with this resource. See `Labelling and grouping GCP resources `__ - for more information. + for more information. This field is available only when the + resource's proto contains it. To search against the ``labels``: @@ -406,12 +436,78 @@ class ResourceSearchResult(proto.Message): network tags are a type of annotations used to group GCP resources. See `Labelling GCP resources `__ - for more information. + for more information. This field is available only when the + resource's proto contains it. To search against the ``network_tags``: - use a field query. Example: ``networkTags:internal`` - use a free text query. Example: ``internal`` + kms_key (str): + The Cloud KMS + `CryptoKey `__ + name or + `CryptoKeyVersion `__ + name. This field is available only when the resource's proto + contains it. + + To search against the ``kms_key``: + + - use a field query. Example: ``kmsKey:key`` + - use a free text query. Example: ``key`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + The create timestamp of this resource, at which the resource + was created. The granularity is in seconds. Timestamp.nanos + will always be 0. This field is available only when the + resource's proto contains it. + + To search against ``create_time``: + + - use a field query. + + - value in seconds since unix epoch. Example: + ``createTime > 1609459200`` + - value in date string. Example: + ``createTime > 2021-01-01`` + - value in date-time string (must be quoted). Example: + ``createTime > "2021-01-01T00:00:00"`` + update_time (google.protobuf.timestamp_pb2.Timestamp): + The last update timestamp of this resource, at which the + resource was last modified or deleted. The granularity is in + seconds. Timestamp.nanos will always be 0. This field is + available only when the resource's proto contains it. + + To search against ``update_time``: + + - use a field query. + + - value in seconds since unix epoch. Example: + ``updateTime < 1609459200`` + - value in date string. Example: + ``updateTime < 2021-01-01`` + - value in date-time string (must be quoted). Example: + ``updateTime < "2021-01-01T00:00:00"`` + state (str): + The state of this resource. Different resources types have + different state definitions that are mapped from various + fields of different resource types. This field is available + only when the resource's proto contains it. + + Example: If the resource is an instance provided by Compute + Engine, its state will include PROVISIONING, STAGING, + RUNNING, STOPPING, SUSPENDING, SUSPENDED, REPAIRING, and + TERMINATED. See ``status`` definition in `API + Reference `__. + If the resource is a project provided by Cloud Resource + Manager, its state will include LIFECYCLE_STATE_UNSPECIFIED, + ACTIVE, DELETE_REQUESTED and DELETE_IN_PROGRESS. See + ``lifecycleState`` definition in `API + Reference `__. + + To search against the ``state``: + + - use a field query. Example: ``state:RUNNING`` + - use a free text query. Example: ``RUNNING`` additional_attributes (google.protobuf.struct_pb2.Struct): The additional searchable attributes of this resource. The attributes may vary from one resource type to another. @@ -421,7 +517,7 @@ class ResourceSearchResult(proto.Message): provided by the corresponding GCP service (e.g., Compute Engine). see `API references and supported searchable attributes `__ - for more information. + to see which fields are included. You can search values of these fields through free text search. However, you should not consume the field @@ -434,6 +530,23 @@ class ResourceSearchResult(proto.Message): Example: to search ``additional_attributes = { dnsName: "foobar" }``, you can issue a query ``foobar``. + parent_full_resource_name (str): + The full resource name of this resource's parent, if it has + one. To search against the ``parent_full_resource_name``: + + - use a field query. Example: + ``parentFullResourceName:"project-name"`` + - use a free text query. Example: ``project-name`` + parent_asset_type (str): + The type of this resource's immediate parent, if there is + one. + + To search against the ``parent_asset_type``: + + - use a field query. Example: + ``parentAssetType:"cloudresourcemanager.googleapis.com/Project"`` + - use a free text query. Example: + ``cloudresourcemanager.googleapis.com/Project`` """ name = proto.Field( @@ -448,6 +561,14 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=3, ) + folders = proto.RepeatedField( + proto.STRING, + number=17, + ) + organization = proto.Field( + proto.STRING, + number=18, + ) display_name = proto.Field( proto.STRING, number=4, @@ -469,11 +590,37 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=8, ) + kms_key = proto.Field( + proto.STRING, + number=10, + ) + create_time = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.STRING, + number=13, + ) additional_attributes = proto.Field( proto.MESSAGE, number=9, message=struct_pb2.Struct, ) + parent_full_resource_name = proto.Field( + proto.STRING, + number=19, + ) + parent_asset_type = proto.Field( + proto.STRING, + number=103, + ) class IamPolicySearchResult(proto.Message): @@ -493,18 +640,47 @@ class IamPolicySearchResult(proto.Message): - use a field query. Example: ``resource:organizations/123`` + asset_type (str): + The type of the resource associated with this IAM policy. + Example: ``compute.googleapis.com/Disk``. + + To search against the ``asset_type``: + + - specify the ``asset_types`` field in your search request. project (str): The project that the associated GCP resource belongs to, in the form of projects/{PROJECT_NUMBER}. If an IAM policy is set on a resource (like VM instance, Cloud Storage bucket), the project field will indicate the project that contains the resource. If an IAM policy is set on a folder or - orgnization, the project field will be empty. + orgnization, this field will be empty. To search against the ``project``: - specify the ``scope`` field as this project in your search request. + folders (Sequence[str]): + The folder(s) that the IAM policy belongs to, in the form of + folders/{FOLDER_NUMBER}. This field is available when the + IAM policy belongs to one or more folders. + + To search against ``folders``: + + - use a field query. Example: ``folders:(123 OR 456)`` + - use a free text query. Example: ``123`` + - specify the ``scope`` field as this folder in your search + request. + organization (str): + The organization that the IAM policy belongs to, in the form + of organizations/{ORGANIZATION_NUMBER}. This field is + available when the IAM policy belongs to an organization. + + To search against ``organization``: + + - use a field query. Example: ``organization:123`` + - use a free text query. Example: ``123`` + - specify the ``scope`` field as this organization in your + search request. policy (google.iam.v1.policy_pb2.Policy): The IAM policy directly set on the given resource. Note that the original IAM policy can contain multiple bindings. This @@ -569,10 +745,22 @@ class Permissions(proto.Message): proto.STRING, number=1, ) + asset_type = proto.Field( + proto.STRING, + number=5, + ) project = proto.Field( proto.STRING, number=2, ) + folders = proto.RepeatedField( + proto.STRING, + number=6, + ) + organization = proto.Field( + proto.STRING, + number=7, + ) policy = proto.Field( proto.MESSAGE, number=3, @@ -616,6 +804,26 @@ class IamPolicyAnalysisState(proto.Message): ) +class ConditionEvaluation(proto.Message): + r"""The Condition evaluation. + Attributes: + evaluation_value (google.cloud.asset_v1.types.ConditionEvaluation.EvaluationValue): + The evaluation result. + """ + class EvaluationValue(proto.Enum): + r"""Value of this expression.""" + EVALUATION_VALUE_UNSPECIFIED = 0 + TRUE = 1 + FALSE = 2 + CONDITIONAL = 3 + + evaluation_value = proto.Field( + proto.ENUM, + number=1, + enum=EvaluationValue, + ) + + class IamPolicyAnalysisResult(proto.Message): r"""IAM Policy analysis result, consisting of one IAM policy binding and derived access control lists. @@ -786,6 +994,10 @@ class AccessControlList(proto.Message): contains the full resource name of a child resource. This field is present only if the output_resource_edges option is enabled in request. + condition_evaluation (google.cloud.asset_v1.types.ConditionEvaluation): + Condition evaluation for this + AccessControlList, if there is a condition + defined in the above IAM policy binding. """ resources = proto.RepeatedField( @@ -803,6 +1015,11 @@ class AccessControlList(proto.Message): number=3, message='IamPolicyAnalysisResult.Edge', ) + condition_evaluation = proto.Field( + proto.MESSAGE, + number=4, + message='ConditionEvaluation', + ) class IdentityList(proto.Message): r"""The identities and group edges. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py new file mode 100644 index 000000000000..3f79199a3359 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_ListAssets_async] +from google.cloud import asset_v1 + + +async def sample_list_assets(): + """Snippet for list_assets""" + + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.ListAssetsRequest( + ) + + # Make the request + page_result = client.list_assets(request=request) + async for response in page_result: + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_ListAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py new file mode 100644 index 000000000000..78dfb91b9e42 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_generated_asset_v1_AssetService_ListAssets_sync] +from google.cloud import asset_v1 + + +def sample_list_assets(): + """Snippet for list_assets""" + + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListAssetsRequest( + ) + + # Make the request + page_result = client.list_assets(request=request) + for response in page_result: + print("{}".format(response)) + +# [END cloudasset_generated_asset_v1_AssetService_ListAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py index b0cdcf3f4a56..4a9e79e96cb2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -46,8 +46,9 @@ class assetCallTransformer(cst.CSTTransformer): 'delete_feed': ('name', ), 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', ), 'get_feed': ('name', ), + 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', ), 'list_feeds': ('parent', ), - 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', ), + 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', 'asset_types', 'order_by', ), 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', ), 'update_feed': ('feed', 'update_mask', ), } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 84505a99d1e3..2d31593c3ac4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -528,6 +528,423 @@ async def test_export_assets_field_headers_async(): ) in kw['metadata'] +def test_list_assets(transport: str = 'grpc', request_type=asset_service.ListAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_assets_from_dict(): + test_list_assets(request_type=dict) + + +def test_list_assets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + client.list_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListAssetsRequest() + + +@pytest.mark.asyncio +async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_assets_async_from_dict(): + await test_list_assets_async(request_type=dict) + + +def test_list_assets_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ListAssetsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = asset_service.ListAssetsResponse() + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_assets_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ListAssetsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) + await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_assets_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListAssetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_assets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_assets_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assets( + asset_service.ListAssetsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_assets_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListAssetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_assets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_assets_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_assets( + asset_service.ListAssetsRequest(), + parent='parent_value', + ) + + +def test_list_assets_pager(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + assets.Asset(), + ], + next_page_token='abc', + ), + asset_service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + ], + next_page_token='ghi', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_assets(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, assets.Asset) + for i in results) + +def test_list_assets_pages(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + assets.Asset(), + ], + next_page_token='abc', + ), + asset_service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + ], + next_page_token='ghi', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + ], + ), + RuntimeError, + ) + pages = list(client.list_assets(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_assets_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + assets.Asset(), + ], + next_page_token='abc', + ), + asset_service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + ], + next_page_token='ghi', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_assets(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, assets.Asset) + for i in responses) + +@pytest.mark.asyncio +async def test_list_assets_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + assets.Asset(), + ], + next_page_token='abc', + ), + asset_service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + ], + next_page_token='ghi', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_assets(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + def test_batch_get_assets_history(transport: str = 'grpc', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3102,6 +3519,7 @@ def test_asset_service_base_transport(): # raise NotImplementedError. methods = ( 'export_assets', + 'list_assets', 'batch_get_assets_history', 'create_feed', 'get_feed', From 184b557e4a9edaf60274b5f487b97bd55e513b39 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 24 Jun 2021 22:48:44 +0000 Subject: [PATCH 0579/1339] chore: release 0.50.1 (#928) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit :robot: I have created a release \*beep\* \*boop\* --- ### [0.50.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.0...v0.50.1) (2021-06-24) ### Bug Fixes * **bazel:** Re-enable Python µgen integration tests post monolith rule removal ([#926](https://www.github.com/googleapis/gapic-generator-python/issues/926)) ([13a6b3a](https://www.github.com/googleapis/gapic-generator-python/commit/13a6b3aed35b5af85aea047922aa219258460a58)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index ffa64685fae0..fc7e6a9a4cd5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.50.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.0...v0.50.1) (2021-06-24) + + +### Bug Fixes + +* **bazel:** Re-enable Python µgen integration tests post monolith rule removal ([#926](https://www.github.com/googleapis/gapic-generator-python/issues/926)) ([13a6b3a](https://www.github.com/googleapis/gapic-generator-python/commit/13a6b3aed35b5af85aea047922aa219258460a58)) + ## [0.50.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.49.0...v0.50.0) (2021-06-21) From 359f46c2b4d20c8086ef3d92b47c56c6d6fc23c5 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 28 Jun 2021 10:32:37 -0600 Subject: [PATCH 0580/1339] fix: import warnings when needed (#930) * fix: import warnings when needed * fix: remove stray } --- .../%name/%version/%sub/services/%service/client.py.j2 | 7 ++++++- packages/gapic-generator/gapic/schema/wrappers.py | 4 ++++ .../%sub/services/%service/async_client.py.j2 | 8 ++++++++ .../%name_%version/%sub/services/%service/client.py.j2 | 7 ++++++- 4 files changed, 24 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index bb9425bbfa68..71aa2f7cba59 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -8,6 +8,9 @@ import os import re from typing import Callable, Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources +{% if service.any_deprecated %} +import warnings +{% endif %} from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore @@ -335,7 +338,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} """ {% if method.is_deprecated %} - warnings.warn("{{ method.name|snake_case }} is deprecated", warnings.DeprecationWarning) + warnings.warn("{{ service.client_name }}.{{ method.name|snake_case }} is deprecated", + warnings.DeprecationWarning) + {% endif %} {% if not method.client_streaming %} # Create or coerce a protobuf request object. diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 249ca5b5d4e0..0e5945892144 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1207,6 +1207,10 @@ def any_client_streaming(self) -> bool: def any_server_streaming(self) -> bool: return any(m.server_streaming for m in self.methods.values()) + @utils.cached_property + def any_deprecated(self) -> bool: + return any(m.is_deprecated for m in self.methods.values()) + def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': """Return a derivative of this service with the provided context. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 3c5988073cd2..fcff6b82ab69 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -7,6 +7,9 @@ import functools import re from typing import Dict, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources +{% if service.any_deprecated %} +import warnings +{% endif %} import google.api_core.client_options as ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore @@ -196,6 +199,11 @@ class {{ service.async_client_name }}: {{ method.client_output_async.meta.doc|rst(width=72, indent=16, source_format='rst') }} {% endif %} """ + {% if method.is_deprecated %} + warnings.warn("{{ service.async_client_name }}.{{ method.name|snake_case }} is deprecated", + warnings.DeprecationWarning) + + {% endif %} {% if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 9db0f092d18b..26c1ec8e3d04 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -8,6 +8,9 @@ import os import re from typing import Callable, Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources +{% if service.any_deprecated %} +import warnings +{% endif %} from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore @@ -360,7 +363,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} """ {% if method.is_deprecated %} - warnings.warn("{{ method.name|snake_case }} is deprecated", warnings.DeprecationWarning) + warnings.warn("{{ service.client_name }}.{{ method.name|snake_case }} is deprecated", + warnings.DeprecationWarning) + {% endif %} {% if not method.client_streaming %} # Create or coerce a protobuf request object. From cfca63b07f76973fdd22aee65ed60c8f11b8ddd8 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Mon, 28 Jun 2021 10:00:00 -0700 Subject: [PATCH 0581/1339] fix: fix wrong scopes for self signed jwt (#935) --- .../services/%service/transports/base.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 18 +++----------- .../services/asset_service/transports/base.py | 2 +- .../unit/gapic/asset_v1/test_asset_service.py | 12 +++------- .../iam_credentials/transports/base.py | 2 +- .../credentials_v1/test_iam_credentials.py | 12 +++------- .../config_service_v2/transports/base.py | 2 +- .../logging_service_v2/transports/base.py | 2 +- .../metrics_service_v2/transports/base.py | 2 +- .../logging_v2/test_config_service_v2.py | 21 +++------------- .../logging_v2/test_logging_service_v2.py | 24 +++---------------- .../logging_v2/test_metrics_service_v2.py | 24 +++---------------- .../services/cloud_redis/transports/base.py | 2 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 12 +++------- 14 files changed, 28 insertions(+), 109 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 09fb65cbe8bb..df07685e6c6b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -111,7 +111,7 @@ class {{ service.name }}Transport(abc.ABC): scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index bfcab30571f3..587679e8df7c 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1667,11 +1667,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtl "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %} - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -1784,11 +1780,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %} - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -1829,11 +1821,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %} - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 6d94cb355084..cd427b6a2a18 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -101,7 +101,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 2d31593c3ac4..018e0e9dbd27 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -3711,9 +3711,7 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -3810,9 +3808,7 @@ def test_asset_service_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -3853,9 +3849,7 @@ def test_asset_service_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index d15cfc9c1ea8..4c45c7dda022 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -98,7 +98,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index f0f99349b769..4f77de860070 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1640,9 +1640,7 @@ def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -1739,9 +1737,7 @@ def test_iam_credentials_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -1782,9 +1778,7 @@ def test_iam_credentials_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 39b42884c1e9..1c5dae9241ae 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -102,7 +102,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 4a482cd6c895..4dd3a952259c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -103,7 +103,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index b5f5613b955a..dbd78ea6a797 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -103,7 +103,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index dde627c9d21e..7677467375bf 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -6077,12 +6077,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -6179,12 +6174,7 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -6225,12 +6215,7 @@ def test_config_service_v2_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e583321973bd..19889d562bb8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2204,13 +2204,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -2307,13 +2301,7 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2354,13 +2342,7 @@ def test_logging_service_v2_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 5310d8382c3e..cb8188a8b961 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2069,13 +2069,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -2172,13 +2166,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2219,13 +2207,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index cdaaa458c1fd..e9ade44365cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -100,7 +100,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 904da1cd1b22..70fc02b2f669 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -3020,9 +3020,7 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -3119,9 +3117,7 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source( "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -3162,9 +3158,7 @@ def test_cloud_redis_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ From bd2cc248d898fe7691221a4fd19a258fb72c37ba Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 28 Jun 2021 10:10:47 -0700 Subject: [PATCH 0582/1339] chore: release 0.50.2 (#938) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index fc7e6a9a4cd5..977d07835e54 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [0.50.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.1...v0.50.2) (2021-06-28) + + +### Bug Fixes + +* fix wrong scopes for self signed jwt ([#935](https://www.github.com/googleapis/gapic-generator-python/issues/935)) ([e033acd](https://www.github.com/googleapis/gapic-generator-python/commit/e033acd44763f7cf65eabb6b35f66093022b1bcb)) +* import warnings when needed ([#930](https://www.github.com/googleapis/gapic-generator-python/issues/930)) ([d4270ae](https://www.github.com/googleapis/gapic-generator-python/commit/d4270ae5805f44ab8ee30bb11fe42a0da6d79755)) + ### [0.50.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.0...v0.50.1) (2021-06-24) From c361a335b6d51b2f6036881351934f95b11028db Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 29 Jun 2021 09:42:57 -0600 Subject: [PATCH 0583/1339] test: re-add coverage requirement (#937) Co-authored-by: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> --- .../.github/sync-repo-settings.yaml | 2 - packages/gapic-generator/noxfile.py | 21 +++++++--- .../unit/schema/wrappers/test_service.py | 39 +++++++++++++++++++ 3 files changed, 54 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 30ca3f5c8fe2..aaca04c67556 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -6,8 +6,6 @@ branchProtectionRules: isAdminEnforced: true requiredStatusCheckContexts: - 'cla/google' - - 'codecov/patch' - - 'codecov/project' - 'conventionalcommits.org' - 'docs' - 'mypy' diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 674fe965f997..5296514a68e6 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -34,7 +34,7 @@ def unit(session): """Run the unit test suite.""" session.install( - "pytest", "pytest-xdist", "pyfakefs", + "coverage", "pytest-cov", "pytest", "pytest-xdist", "pyfakefs", ) session.install("-e", ".") @@ -45,6 +45,10 @@ def unit(session): or [ "-vv", "-n=auto", + "--cov=gapic", + "--cov-config=.coveragerc", + "--cov-report=term", + "--cov-fail-under=100", path.join("tests", "unit"), ] ), @@ -115,7 +119,7 @@ def showcase_library( ) # Install the library. - session.install(tmp_dir) + session.install("-e", tmp_dir) yield tmp_dir @@ -181,7 +185,9 @@ def showcase_mtls_alternative_templates(session): def run_showcase_unit_tests(session, fail_under=100): session.install( + "coverage", "pytest", + "pytest-cov", "pytest-xdist", "asyncmock", "pytest-asyncio", @@ -192,6 +198,9 @@ def run_showcase_unit_tests(session, fail_under=100): "py.test", "-n=auto", "--quiet", + "--cov=google", + "--cov-append", + f"--cov-fail-under={str(fail_under)}", *(session.posargs or [path.join("tests", "unit")]), ) @@ -217,11 +226,11 @@ def showcase_unit( # google-auth is a transitive dependency so it isn't in the # lower bound constraints file produced above. session.install("google-auth==1.21.1") - run_showcase_unit_tests(session) + run_showcase_unit_tests(session, fail_under=0) # 2. Run the tests again with latest version of dependencies session.install(".", "--upgrade", "--force-reinstall") - run_showcase_unit_tests(session) + run_showcase_unit_tests(session, fail_under=100) @nox.session(python=["3.7", "3.8", "3.9"]) @@ -248,11 +257,11 @@ def showcase_unit_add_iam_methods(session): # google-auth is a transitive dependency so it isn't in the # lower bound constraints file produced above. session.install("google-auth==1.21.1") - run_showcase_unit_tests(session) + run_showcase_unit_tests(session, fail_under=0) # 2. Run the tests again with latest version of dependencies session.install(".", "--upgrade", "--force-reinstall") - run_showcase_unit_tests(session) + run_showcase_unit_tests(session, fail_under=100) @nox.session(python="3.8") diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index ef14e27a6ff3..9c47797dafc3 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -298,6 +298,45 @@ def test_service_any_streaming(): assert service.any_server_streaming == server +def test_service_any_deprecated(): + service = make_service( + name='Service', + methods=( + ( + make_method( + f"GetMollusc", + input_message=make_message( + "GetMolluscRequest", + ), + output_message=make_message( + "GetMolluscResponse", + ), + ), + ) + )) + + assert service.any_deprecated == False + + deprecated_service = make_service( + name='ServiceWithDeprecatedMethod', + methods=( + ( + make_method( + f"GetMollusc", + input_message=make_message( + "GetMolluscRequest", + ), + output_message=make_message( + "GetMolluscResponse", + ), + is_deprecated=True, + ), + ) + )) + + assert deprecated_service.any_deprecated == True + + def test_has_pagers(): paged = make_field(name='foos', message=make_message('Foo'), repeated=True) input_msg = make_message( From 6aa57bc5394073df132c0b2b465684e62d61f367 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 29 Jun 2021 10:50:35 -0600 Subject: [PATCH 0584/1339] fix: disable always_use_jwt_access (#939) Some APIs with Cloud Storage integrations have failing samples when non-default scopes are provided. This PR disables the feature globally for now while we investigate. We can no longer rollback da119c72c8 as some libraries have been released with the change and removing the kwarg would be a breaking change. See internal issue 192297181. --- .../services/%service/transports/base.py.j2 | 2 +- .../services/%service/transports/grpc.py.j2 | 5 ++++- .../%service/transports/grpc_asyncio.py.j2 | 5 ++++- .../%name_%version/%sub/test_%service.py.j2 | 17 ++++++++++++++++- .../services/asset_service/transports/grpc.py | 5 ++++- .../asset_service/transports/grpc_asyncio.py | 5 ++++- .../unit/gapic/asset_v1/test_asset_service.py | 13 ++++++++++++- .../services/iam_credentials/transports/grpc.py | 5 ++++- .../iam_credentials/transports/grpc_asyncio.py | 5 ++++- .../credentials_v1/test_iam_credentials.py | 13 ++++++++++++- .../config_service_v2/transports/grpc.py | 5 ++++- .../transports/grpc_asyncio.py | 5 ++++- .../logging_service_v2/transports/grpc.py | 5 ++++- .../transports/grpc_asyncio.py | 5 ++++- .../metrics_service_v2/transports/grpc.py | 5 ++++- .../transports/grpc_asyncio.py | 5 ++++- .../gapic/logging_v2/test_config_service_v2.py | 13 ++++++++++++- .../gapic/logging_v2/test_logging_service_v2.py | 13 ++++++++++++- .../gapic/logging_v2/test_metrics_service_v2.py | 13 ++++++++++++- .../services/cloud_redis/transports/grpc.py | 5 ++++- .../cloud_redis/transports/grpc_asyncio.py | 5 ++++- .../unit/gapic/redis_v1/test_cloud_redis.py | 13 ++++++++++++- 22 files changed, 145 insertions(+), 22 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index df07685e6c6b..76d8fccbddd0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -131,7 +131,7 @@ class {{ service.name }}Transport(abc.ABC): # If the credentials is service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): credentials = credentials.with_always_use_jwt_access(True) - + # Save the credentials. self._credentials = credentials diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index c2dd618569e5..a534ed95d723 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -55,6 +55,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -95,6 +96,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -150,7 +153,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 24fc3476a146..15f290f3b844 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -100,6 +100,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -141,6 +142,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -195,7 +198,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 587679e8df7c..4792b4976eb7 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -123,7 +123,22 @@ def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(cli with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("transport_class,transport_name", [ + {% if 'grpc' in opts.transport %} + (transports.{{ service.grpc_transport_name }}, "grpc"), + (transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), + {% elif 'rest' in opts.transport %} + (transports.{{ service.rest_transport_name }}, "rest"), + {% endif %} +]) +def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt_true(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize("client_class", [ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 60326ac61ddf..d20c2b2418e1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -57,6 +57,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -97,6 +98,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -150,7 +153,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 748505dd03e9..0634b4f4cd83 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -102,6 +102,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -143,6 +144,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -195,7 +198,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 018e0e9dbd27..5d5e291df275 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -113,7 +113,18 @@ def test_asset_service_client_service_account_always_use_jwt(client_class): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.AssetServiceGrpcTransport, "grpc"), + (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_asset_service_client_service_account_always_use_jwt_true(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize("client_class", [ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 76c83a0b0380..90204dac799a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -63,6 +63,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -103,6 +104,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -155,7 +158,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 65aa22ae67e0..c27282cd395a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -108,6 +108,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -149,6 +150,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -200,7 +203,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 4f77de860070..1603f0e7e67a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -105,7 +105,18 @@ def test_iam_credentials_client_service_account_always_use_jwt(client_class): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.IAMCredentialsGrpcTransport, "grpc"), + (transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_iam_credentials_client_service_account_always_use_jwt_true(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize("client_class", [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 75441232816e..5d8245df1c19 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -55,6 +55,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -95,6 +96,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -147,7 +150,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 970b3049a7c2..07343e8297f0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -100,6 +100,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -141,6 +142,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -192,7 +195,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index bf8e6e527c20..d527a00bba36 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -55,6 +55,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -95,6 +96,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -147,7 +150,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index dd277841974b..7293506fcda9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -100,6 +100,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -141,6 +142,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -192,7 +195,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index a8bbf146c957..2f34d5651f1d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -55,6 +55,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -95,6 +96,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -147,7 +150,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index bb19604e441d..33692de4aa92 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -100,6 +100,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -141,6 +142,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -192,7 +195,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 7677467375bf..0b47d5216e43 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -106,7 +106,18 @@ def test_config_service_v2_client_service_account_always_use_jwt(client_class): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ConfigServiceV2GrpcTransport, "grpc"), + (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_config_service_v2_client_service_account_always_use_jwt_true(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize("client_class", [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 19889d562bb8..817eb6260f5f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -112,7 +112,18 @@ def test_logging_service_v2_client_service_account_always_use_jwt(client_class): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LoggingServiceV2GrpcTransport, "grpc"), + (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_logging_service_v2_client_service_account_always_use_jwt_true(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize("client_class", [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index cb8188a8b961..eb61169d7339 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -110,7 +110,18 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(client_class): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MetricsServiceV2GrpcTransport, "grpc"), + (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_metrics_service_v2_client_service_account_always_use_jwt_true(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize("client_class", [ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 8b7117bfdf76..5801e47267a1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -76,6 +76,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -116,6 +117,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -169,7 +172,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 55f04e1ab9b4..d4e7de81c8ff 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -121,6 +121,7 @@ def __init__(self, *, client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -162,6 +163,8 @@ def __init__(self, *, API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -214,7 +217,7 @@ def __init__(self, *, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 70fc02b2f669..d39ceb9cbfe2 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -110,7 +110,18 @@ def test_cloud_redis_client_service_account_always_use_jwt(client_class): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.CloudRedisGrpcTransport, "grpc"), + (transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_cloud_redis_client_service_account_always_use_jwt_true(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize("client_class", [ From e0543b07fbd10b7ba9860f3ca14e64ab1855b24c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 29 Jun 2021 10:13:03 -0700 Subject: [PATCH 0585/1339] chore: release 0.50.3 (#940) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 977d07835e54..58ddd896071c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.50.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.2...v0.50.3) (2021-06-29) + + +### Bug Fixes + +* disable always_use_jwt_access ([#939](https://www.github.com/googleapis/gapic-generator-python/issues/939)) ([1302352](https://www.github.com/googleapis/gapic-generator-python/commit/130235220849987df572c1840735b3c199b85dfc)) + ### [0.50.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.1...v0.50.2) (2021-06-28) From 7b53dcce153051453ac1f8670429db99af6d3caf Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 30 Jun 2021 14:11:18 -0600 Subject: [PATCH 0586/1339] fix: fix syntax for Deprecationwarning (#942) --- .../%name/%version/%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/async_client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 71aa2f7cba59..946cb69a380e 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -339,7 +339,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ {% if method.is_deprecated %} warnings.warn("{{ service.client_name }}.{{ method.name|snake_case }} is deprecated", - warnings.DeprecationWarning) + DeprecationWarning) {% endif %} {% if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index fcff6b82ab69..ea40ea1c255f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -201,7 +201,7 @@ class {{ service.async_client_name }}: """ {% if method.is_deprecated %} warnings.warn("{{ service.async_client_name }}.{{ method.name|snake_case }} is deprecated", - warnings.DeprecationWarning) + DeprecationWarning) {% endif %} {% if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 26c1ec8e3d04..5b6c2c03aebf 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -364,7 +364,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ {% if method.is_deprecated %} warnings.warn("{{ service.client_name }}.{{ method.name|snake_case }} is deprecated", - warnings.DeprecationWarning) + DeprecationWarning) {% endif %} {% if not method.client_streaming %} From bb9de0cb9f153e3848c8fafbbb3273dd2e13ee96 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 30 Jun 2021 14:59:48 -0600 Subject: [PATCH 0587/1339] chore: release 0.50.4 (#943) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 58ddd896071c..4857ad570f85 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.50.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.3...v0.50.4) (2021-06-30) + + +### Bug Fixes + +* fix syntax for Deprecationwarning ([#942](https://www.github.com/googleapis/gapic-generator-python/issues/942)) ([82dbddb](https://www.github.com/googleapis/gapic-generator-python/commit/82dbddb6a9caf1227c4b335345f365dd01025794)) + ### [0.50.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.2...v0.50.3) (2021-06-29) From 5a1b403826b7a59241145e7c1c4f36180141b14a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 9 Jul 2021 22:07:37 +0200 Subject: [PATCH 0588/1339] chore(deps): update dependency google-api-core to v1.31.0 (#949) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index c98e3036ad5e..166280e9d419 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.1 -google-api-core==1.30.0 +google-api-core==1.31.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From 85aa5c3fef28f9aa604969f0b15eff0e67fafeac Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 12 Jul 2021 13:38:56 -0700 Subject: [PATCH 0589/1339] fix: fix rest transport unit test and required query prams handling (#951) --- .../%sub/services/%service/transports/rest.py.j2 | 12 ++++-------- .../gapic/%name_%version/%sub/test_%service.py.j2 | 3 ++- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 2961172ace9b..b22ced97ab91 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -52,6 +52,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -88,6 +89,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): host=host, credentials=credentials, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) {% if service.has_lro %} @@ -196,23 +198,17 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): if {{ method.input.ident }}.{{ field }} in request: query_params['{{ field|camel_case }}'] = request.{{ field }} {% else %} - if request.{{ field }}: - query_params['{{ field|camel_case }}'] = request.{{ field }} + query_params['{{ field|camel_case }}'] = request.{{ field }} {% endif %} {% endfor %} - # TODO(yon-mg): further discussion needed whether 'python truthiness' is appropriate here - # discards default values - # TODO(yon-mg): add test for proper url encoded strings - query_params = ['{k}={v}'.format(k=k, v=v) for k, v in query_params.items()] - url += '?{}'.format('&'.join(query_params)).replace(' ', '+') - # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' response = self._session.{{ method.http_opt['verb'] }}( url, headers=headers, + params=query_params, {% if 'body' in method.http_opt %} data=body, {% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4792b4976eb7..cd6c6bb44dbf 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1220,6 +1220,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(): assert len(req.mock_calls) == 1 _, http_call, http_params = req.mock_calls[0] body = http_params.get('data') + params = http_params.get('params') {% for key, field in method.flattened_fields.items() %} {% if not field.oneof or field.proto3_optional %} {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} @@ -1230,7 +1231,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(): assert {% if field.field_pb is msg_field_pb %}{{ field.ident }}.to_json({{ field.name }}, including_default_value_fields=False, use_integers_for_enums=False) {%- elif field.field_pb is str_field_pb %}{{ field.mock_value }} {%- else %}str({{ field.mock_value }}) - {%- endif %} in http_call[1] + str(body) + {%- endif %} in http_call[1] + str(body) + str(params) {% endif %} {% endif %}{% endfor %} From edf63031de19c5579059abb6f898674f25345cbf Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Tue, 13 Jul 2021 09:38:22 +0000 Subject: [PATCH 0590/1339] chore: add CONTRIBUTING.md (#953) chore: add CONTRIBUTING.md --- packages/gapic-generator/CONTRIBUTING.md | 28 ++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 packages/gapic-generator/CONTRIBUTING.md diff --git a/packages/gapic-generator/CONTRIBUTING.md b/packages/gapic-generator/CONTRIBUTING.md new file mode 100644 index 000000000000..6272489dae31 --- /dev/null +++ b/packages/gapic-generator/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code Reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google/conduct/). From 1446eb9bfc110050e9e5f249de12917e3971db42 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 13 Jul 2021 13:04:44 -0400 Subject: [PATCH 0591/1339] chore: delete CONTRIBUTING.rst (#954) --- packages/gapic-generator/CONTRIBUTING.rst | 22 ---------------------- 1 file changed, 22 deletions(-) delete mode 100644 packages/gapic-generator/CONTRIBUTING.rst diff --git a/packages/gapic-generator/CONTRIBUTING.rst b/packages/gapic-generator/CONTRIBUTING.rst deleted file mode 100644 index 2e0d8efd6790..000000000000 --- a/packages/gapic-generator/CONTRIBUTING.rst +++ /dev/null @@ -1,22 +0,0 @@ -Contributing -============ - -We are thrilled that you are interested in contributing to this project. -Please open an issue or pull request with your ideas. - - -Contributor License Agreements ------------------------------- - -Before we can accept your pull requests, you will need to sign a Contributor -License Agreement (CLA): - -- **If you are an individual writing original source code** and **you own the - intellectual property**, then you'll need to sign an - `individual CLA `__. -- **If you work for a company that wants to allow you to contribute your work**, - then you'll need to sign a - `corporate CLA `__. - -You can sign these electronically (just scroll to the bottom). After that, -we'll be able to accept your pull requests. From ed6e038dfc4e7a290f73723ac1ab9048573cc6db Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 15 Jul 2021 15:49:52 -0400 Subject: [PATCH 0592/1339] chore: allow 'google-api-core' 2.x versions (#956) Closes #955. Co-authored-by: Bu Sun Kim --- packages/gapic-generator/gapic/ads-templates/setup.py.j2 | 2 +- packages/gapic-generator/gapic/templates/setup.py.j2 | 3 ++- .../gapic-generator/tests/integration/goldens/asset/setup.py | 3 ++- .../tests/integration/goldens/credentials/setup.py | 3 ++- .../gapic-generator/tests/integration/goldens/logging/setup.py | 3 ++- .../gapic-generator/tests/integration/goldens/redis/setup.py | 3 ++- 6 files changed, 11 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 559bc570014b..6266d0ae3ca8 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -17,7 +17,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core >= 1.22.2, < 2.0.0dev', + 'google-api-core >= 1.22.2, < 3.0.0dev', 'googleapis-common-protos >= 1.5.8', 'grpcio >= 1.10.0', 'proto-plus >= 1.15.0', diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 2dc389be2c57..39ef330320c2 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -27,7 +27,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.15.0', 'packaging >= 14.3', @@ -43,6 +43,7 @@ setuptools.setup( 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 1aece31bd3f4..43eb370430ef 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -34,7 +34,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.15.0', 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), @@ -46,6 +46,7 @@ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index fe435284e2cc..825178c9f678 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -34,7 +34,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.15.0', 'packaging >= 14.3', ), @@ -46,6 +46,7 @@ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 32b47ac8f030..4b98728b93f3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -34,7 +34,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.15.0', 'packaging >= 14.3', ), @@ -46,6 +46,7 @@ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index d3a786b97d3f..9d98a420c25c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -34,7 +34,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.15.0', 'packaging >= 14.3', ), @@ -46,6 +46,7 @@ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], From bcea265c29202a0b6ea9e2eeb2836e04633752d0 Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 21 Jul 2021 17:08:39 -0700 Subject: [PATCH 0593/1339] fix: enable self signed jwt for grpc (#958) --- .../%sub/services/%service/client.py.j2 | 6 +++ .../%name_%version/%sub/test_%service.py.j2 | 48 +++++++++++++------ .../asset_v1/services/asset_service/client.py | 4 ++ .../unit/gapic/asset_v1/test_asset_service.py | 28 ++++++----- .../services/iam_credentials/client.py | 4 ++ .../credentials_v1/test_iam_credentials.py | 28 ++++++----- .../services/config_service_v2/client.py | 4 ++ .../services/logging_service_v2/client.py | 4 ++ .../services/metrics_service_v2/client.py | 4 ++ .../logging_v2/test_config_service_v2.py | 28 ++++++----- .../logging_v2/test_logging_service_v2.py | 28 ++++++----- .../logging_v2/test_metrics_service_v2.py | 28 ++++++----- .../redis_v1/services/cloud_redis/client.py | 4 ++ .../unit/gapic/redis_v1/test_cloud_redis.py | 28 ++++++----- 14 files changed, 160 insertions(+), 86 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 5b6c2c03aebf..9bae12313cb6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -304,6 +304,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + {% if "grpc" in opts.transport %} + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + {% endif %} ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index cd6c6bb44dbf..61618036d404 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -113,19 +113,6 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(client_c {% endif %} -@pytest.mark.parametrize("client_class", [ - {{ service.client_name }}, - {% if 'grpc' in opts.transport %} - {{ service.async_client_name }}, - {% endif %} -]) -def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize("transport_class,transport_name", [ {% if 'grpc' in opts.transport %} (transports.{{ service.grpc_transport_name }}, "grpc"), @@ -134,12 +121,17 @@ def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(cli (transports.{{ service.rest_transport_name }}, "rest"), {% endif %} ]) -def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt_true(transport_class, transport_name): +def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [ {{ service.client_name }}, @@ -216,6 +208,9 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -232,6 +227,9 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -248,6 +246,9 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -274,6 +275,9 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -319,6 +323,9 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -344,6 +351,9 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) # Check the case client_cert_source and ADC client cert are not provided. @@ -360,6 +370,9 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) @@ -387,6 +400,9 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -413,6 +429,9 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + {% if 'grpc' in opts.transport %} + always_use_jwt_access=True, + {% endif %} ) {% if 'grpc' in opts.transport %} @@ -431,6 +450,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) {% endif %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index aa285d830e49..bcc70595025f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -344,6 +344,10 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def export_assets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 5d5e291df275..87c110ebcb55 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -105,27 +105,21 @@ def test_asset_service_client_from_service_account_info(client_class): assert client.transport._host == 'cloudasset.googleapis.com:443' -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, - AssetServiceAsyncClient, -]) -def test_asset_service_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize("transport_class,transport_name", [ (transports.AssetServiceGrpcTransport, "grpc"), (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), ]) -def test_asset_service_client_service_account_always_use_jwt_true(transport_class, transport_name): +def test_asset_service_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [ AssetServiceClient, @@ -190,6 +184,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -206,6 +201,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -222,6 +218,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -248,6 +245,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -286,6 +284,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -311,6 +310,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -327,6 +327,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -350,6 +351,7 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -372,6 +374,7 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -389,6 +392,7 @@ def test_asset_service_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 8fe89f514f8d..4238eb763d31 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -340,6 +340,10 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def generate_access_token(self, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 1603f0e7e67a..74dacb0923ff 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -97,27 +97,21 @@ def test_iam_credentials_client_from_service_account_info(client_class): assert client.transport._host == 'iamcredentials.googleapis.com:443' -@pytest.mark.parametrize("client_class", [ - IAMCredentialsClient, - IAMCredentialsAsyncClient, -]) -def test_iam_credentials_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize("transport_class,transport_name", [ (transports.IAMCredentialsGrpcTransport, "grpc"), (transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), ]) -def test_iam_credentials_client_service_account_always_use_jwt_true(transport_class, transport_name): +def test_iam_credentials_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [ IAMCredentialsClient, @@ -182,6 +176,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -198,6 +193,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -214,6 +210,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -240,6 +237,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -278,6 +276,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -303,6 +302,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -319,6 +319,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -342,6 +343,7 @@ def test_iam_credentials_client_client_options_scopes(client_class, transport_cl client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -364,6 +366,7 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -381,6 +384,7 @@ def test_iam_credentials_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 67658652a61d..ad8f294462bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -375,6 +375,10 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def list_buckets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 8c16313ba974..03b5f832f0d7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -331,6 +331,10 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def delete_log(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index d733b400b3c9..a4d4efcda78a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -332,6 +332,10 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def list_log_metrics(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 0b47d5216e43..979cbd360592 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -98,27 +98,21 @@ def test_config_service_v2_client_from_service_account_info(client_class): assert client.transport._host == 'logging.googleapis.com:443' -@pytest.mark.parametrize("client_class", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, -]) -def test_config_service_v2_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize("transport_class,transport_name", [ (transports.ConfigServiceV2GrpcTransport, "grpc"), (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ]) -def test_config_service_v2_client_service_account_always_use_jwt_true(transport_class, transport_name): +def test_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [ ConfigServiceV2Client, @@ -183,6 +177,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -199,6 +194,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -215,6 +211,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -241,6 +238,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -279,6 +277,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -304,6 +303,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -320,6 +320,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -343,6 +344,7 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -365,6 +367,7 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -382,6 +385,7 @@ def test_config_service_v2_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 817eb6260f5f..b95281460984 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -104,27 +104,21 @@ def test_logging_service_v2_client_from_service_account_info(client_class): assert client.transport._host == 'logging.googleapis.com:443' -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, -]) -def test_logging_service_v2_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize("transport_class,transport_name", [ (transports.LoggingServiceV2GrpcTransport, "grpc"), (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ]) -def test_logging_service_v2_client_service_account_always_use_jwt_true(transport_class, transport_name): +def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [ LoggingServiceV2Client, @@ -189,6 +183,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,6 +200,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,6 +217,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -247,6 +244,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -285,6 +283,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -310,6 +309,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -326,6 +326,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -349,6 +350,7 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -371,6 +373,7 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -388,6 +391,7 @@ def test_logging_service_v2_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index eb61169d7339..5ce85b428459 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -102,27 +102,21 @@ def test_metrics_service_v2_client_from_service_account_info(client_class): assert client.transport._host == 'logging.googleapis.com:443' -@pytest.mark.parametrize("client_class", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, -]) -def test_metrics_service_v2_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize("transport_class,transport_name", [ (transports.MetricsServiceV2GrpcTransport, "grpc"), (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ]) -def test_metrics_service_v2_client_service_account_always_use_jwt_true(transport_class, transport_name): +def test_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [ MetricsServiceV2Client, @@ -187,6 +181,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -203,6 +198,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -219,6 +215,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,6 +242,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -283,6 +281,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -308,6 +307,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -324,6 +324,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -347,6 +348,7 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -369,6 +371,7 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -386,6 +389,7 @@ def test_metrics_service_v2_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 87892f6c857e..dca373977364 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -355,6 +355,10 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def list_instances(self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index d39ceb9cbfe2..a1c9cc2e643b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -102,27 +102,21 @@ def test_cloud_redis_client_from_service_account_info(client_class): assert client.transport._host == 'redis.googleapis.com:443' -@pytest.mark.parametrize("client_class", [ - CloudRedisClient, - CloudRedisAsyncClient, -]) -def test_cloud_redis_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize("transport_class,transport_name", [ (transports.CloudRedisGrpcTransport, "grpc"), (transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), ]) -def test_cloud_redis_client_service_account_always_use_jwt_true(transport_class, transport_name): +def test_cloud_redis_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: creds = service_account.Credentials(None, None, None) transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [ CloudRedisClient, @@ -187,6 +181,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -203,6 +198,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -219,6 +215,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,6 +242,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -283,6 +281,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -308,6 +307,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -324,6 +324,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -347,6 +348,7 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -369,6 +371,7 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -386,6 +389,7 @@ def test_cloud_redis_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) From 36aaee755c7ee7718bb86b992031e785d3e86119 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 21 Jul 2021 17:25:25 -0700 Subject: [PATCH 0594/1339] chore: release 0.50.5 (#952) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 4857ad570f85..400393690c0a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [0.50.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.4...v0.50.5) (2021-07-22) + + +### Bug Fixes + +* enable self signed jwt for grpc ([#958](https://www.github.com/googleapis/gapic-generator-python/issues/958)) ([af02a9c](https://www.github.com/googleapis/gapic-generator-python/commit/af02a9cae522ff2cdc8e97cfffe2ba2bb84d6b6a)) +* fix rest transport unit test and required query prams handling ([#951](https://www.github.com/googleapis/gapic-generator-python/issues/951)) ([b793017](https://www.github.com/googleapis/gapic-generator-python/commit/b7930177da9a8be556bf6485febcc0a9bdef897b)) + ### [0.50.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.3...v0.50.4) (2021-06-30) From e0009befec43010cc7b5245cf541a812ae5872d5 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 26 Jul 2021 14:42:40 -0400 Subject: [PATCH 0595/1339] chore: fix typos in templated files (#962) --- .../%version/%sub/services/%service/transports/grpc.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- .../%sub/services/%service/transports/base.py.j2 | 2 +- .../%sub/services/%service/transports/grpc.py.j2 | 6 +++--- .../%sub/services/%service/transports/grpc_asyncio.py.j2 | 6 +++--- .../%sub/services/%service/transports/rest.py.j2 | 2 +- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../asset_v1/services/asset_service/transports/base.py | 2 +- .../asset_v1/services/asset_service/transports/grpc.py | 6 +++--- .../services/asset_service/transports/grpc_asyncio.py | 6 +++--- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../services/iam_credentials/transports/base.py | 2 +- .../services/iam_credentials/transports/grpc.py | 6 +++--- .../services/iam_credentials/transports/grpc_asyncio.py | 6 +++--- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../services/config_service_v2/transports/base.py | 2 +- .../services/config_service_v2/transports/grpc.py | 6 +++--- .../services/config_service_v2/transports/grpc_asyncio.py | 6 +++--- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../services/logging_service_v2/transports/base.py | 2 +- .../services/logging_service_v2/transports/grpc.py | 6 +++--- .../services/logging_service_v2/transports/grpc_asyncio.py | 6 +++--- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../services/metrics_service_v2/transports/base.py | 2 +- .../services/metrics_service_v2/transports/grpc.py | 6 +++--- .../services/metrics_service_v2/transports/grpc_asyncio.py | 6 +++--- .../google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/base.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/grpc.py | 6 +++--- .../services/cloud_redis/transports/grpc_asyncio.py | 6 +++--- 30 files changed, 58 insertions(+), 58 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index af6b11ad89e3..1562b6f88c61 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -70,7 +70,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 9bae12313cb6..fa2c353d3616 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -6,7 +6,7 @@ from collections import OrderedDict from distutils import util import os import re -from typing import Callable, Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources {% if service.any_deprecated %} import warnings diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 76d8fccbddd0..b38a3535b03f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -128,7 +128,7 @@ class {{ service.name }}Transport(abc.ABC): elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): credentials = credentials.with_always_use_jwt_access(True) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index a534ed95d723..f2f0da6ea3a3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -78,16 +78,16 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 15f290f3b844..934a931aff57 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -124,16 +124,16 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index b22ced97ab91..0e6303efd530 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -190,7 +190,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {# TODO(yon-mg): move all query param logic out of wrappers into here to handle nested fields correctly (can't just use set of top level fields #} - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields + # TODO(yon-mg): handle nested fields correctly rather than using only top level fields # not required for GCE query_params = {} {% for field in method.query_params | sort%} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index bcc70595025f..57f9777d6157 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index cd427b6a2a18..45083d66871a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -118,7 +118,7 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): credentials = credentials.with_always_use_jwt_access(True) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index d20c2b2418e1..c95a90bff1f5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -80,16 +80,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 0634b4f4cd83..8559391cf053 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -126,16 +126,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 4238eb763d31..0ebc616d992a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 4c45c7dda022..99c147750219 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -115,7 +115,7 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): credentials = credentials.with_always_use_jwt_access(True) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 90204dac799a..95c0f5f97599 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -86,16 +86,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index c27282cd395a..c944ba08b8fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -132,16 +132,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index ad8f294462bc..cbccc479383e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 1c5dae9241ae..e7f0db9d401c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -119,7 +119,7 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): credentials = credentials.with_always_use_jwt_access(True) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 5d8245df1c19..94e4af68334b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -78,16 +78,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 07343e8297f0..78e442d8a4e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -124,16 +124,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 03b5f832f0d7..05bcee998908 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 4dd3a952259c..222ed3c1f99c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -120,7 +120,7 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): credentials = credentials.with_always_use_jwt_access(True) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index d527a00bba36..f66cb54a21aa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -78,16 +78,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 7293506fcda9..a19007ab65c9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -124,16 +124,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index a4d4efcda78a..0204f594a187 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index dbd78ea6a797..b9170bf568f9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -120,7 +120,7 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): credentials = credentials.with_always_use_jwt_access(True) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 2f34d5651f1d..e300d9f5320e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -78,16 +78,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 33692de4aa92..7da832822ebd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -124,16 +124,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index dca373977364..d3513d79dea9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index e9ade44365cb..d8161b1e788e 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -117,7 +117,7 @@ def __init__( elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): credentials = credentials.with_always_use_jwt_access(True) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 5801e47267a1..5b15755d0407 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -99,16 +99,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index d4e7de81c8ff..2a2184de64ec 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -145,16 +145,16 @@ def __init__(self, *, api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. From 240fb4e20e452e26a17760b9fc2d0c692023657a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 26 Jul 2021 22:02:48 +0200 Subject: [PATCH 0596/1339] chore(deps): update dependency pypandoc to v1.6.3 (#946) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 166280e9d419..81ed7642f1c7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,6 +4,6 @@ googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 protobuf==3.17.3 -pypandoc==1.5 +pypandoc==1.6.3 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 32f49fbeb3296ebe4aa3c4ff3421daa1f63aa452 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 26 Jul 2021 17:02:37 -0400 Subject: [PATCH 0597/1339] chore: clean up fixup scripts (#963) Incorporate changes from https://github.com/googleapis/python-dialogflow/pull/351 in templated fixup script files --- .../fixup_%name_%version_keywords.py.j2 | 4 +- .../fixup_%name_%version_keywords.py.j2 | 4 +- .../asset/scripts/fixup_asset_v1_keywords.py | 26 +++---- .../scripts/fixup_credentials_v1_keywords.py | 10 +-- .../scripts/fixup_logging_v2_keywords.py | 70 +++++++++---------- .../redis/scripts/fixup_redis_v1_keywords.py | 20 +++--- 6 files changed, 67 insertions(+), 67 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 index 1bd2ec79c3e0..f197e8512d4f 100644 --- a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 @@ -32,7 +32,7 @@ class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): {% endfor %}{% endfor %} METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { {% for method in all_methods|sort(attribute='name')|unique(attribute='name') %} - '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), + '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), {% endfor %} } {% endwith %} @@ -54,7 +54,7 @@ class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 index 32cc47c49ace..e408cfd34d47 100644 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 +++ b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 @@ -32,7 +32,7 @@ class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): {% endfor %}{% endfor %} METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { {% for method in all_methods|sort(attribute='name')|unique(attribute='name') %} - '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), + '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), {% endfor %} {% if opts.add_iam_methods %} 'get_iam_policy': ('resource', 'options', ), @@ -59,7 +59,7 @@ class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py index 4a9e79e96cb2..c52bebe8e71b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -39,18 +39,18 @@ def partition( class assetCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_iam_policy': ('analysis_query', 'execution_timeout', ), - 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', ), - 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', ), - 'create_feed': ('parent', 'feed_id', 'feed', ), - 'delete_feed': ('name', ), - 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', ), - 'get_feed': ('name', ), - 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', ), - 'list_feeds': ('parent', ), - 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', 'asset_types', 'order_by', ), - 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', ), - 'update_feed': ('feed', 'update_mask', ), + 'analyze_iam_policy': ('analysis_query', 'execution_timeout', ), + 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', ), + 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', ), + 'create_feed': ('parent', 'feed_id', 'feed', ), + 'delete_feed': ('name', ), + 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', ), + 'get_feed': ('name', ), + 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', ), + 'list_feeds': ('parent', ), + 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', 'asset_types', 'order_by', ), + 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', ), + 'update_feed': ('feed', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -69,7 +69,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py index f9e01419c920..9510dabdd80a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py @@ -39,10 +39,10 @@ def partition( class credentialsCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'generate_access_token': ('name', 'scope', 'delegates', 'lifetime', ), - 'generate_id_token': ('name', 'audience', 'delegates', 'include_email', ), - 'sign_blob': ('name', 'payload', 'delegates', ), - 'sign_jwt': ('name', 'payload', 'delegates', ), + 'generate_access_token': ('name', 'scope', 'delegates', 'lifetime', ), + 'generate_id_token': ('name', 'audience', 'delegates', 'include_email', ), + 'sign_blob': ('name', 'payload', 'delegates', ), + 'sign_jwt': ('name', 'payload', 'delegates', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -61,7 +61,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py index 5a3ed2504c02..2a368fb9ccea 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py @@ -39,40 +39,40 @@ def partition( class loggingCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_bucket': ('parent', 'bucket_id', 'bucket', ), - 'create_exclusion': ('parent', 'exclusion', ), - 'create_log_metric': ('parent', 'metric', ), - 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), - 'create_view': ('parent', 'view_id', 'view', ), - 'delete_bucket': ('name', ), - 'delete_exclusion': ('name', ), - 'delete_log': ('log_name', ), - 'delete_log_metric': ('metric_name', ), - 'delete_sink': ('sink_name', ), - 'delete_view': ('name', ), - 'get_bucket': ('name', ), - 'get_cmek_settings': ('name', ), - 'get_exclusion': ('name', ), - 'get_log_metric': ('metric_name', ), - 'get_sink': ('sink_name', ), - 'get_view': ('name', ), - 'list_buckets': ('parent', 'page_token', 'page_size', ), - 'list_exclusions': ('parent', 'page_token', 'page_size', ), - 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), - 'list_log_metrics': ('parent', 'page_token', 'page_size', ), - 'list_logs': ('parent', 'page_size', 'page_token', 'resource_names', ), - 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), - 'list_sinks': ('parent', 'page_token', 'page_size', ), - 'list_views': ('parent', 'page_token', 'page_size', ), - 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), - 'undelete_bucket': ('name', ), - 'update_bucket': ('name', 'bucket', 'update_mask', ), - 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), - 'update_exclusion': ('name', 'exclusion', 'update_mask', ), - 'update_log_metric': ('metric_name', 'metric', ), - 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), - 'update_view': ('name', 'view', 'update_mask', ), - 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), + 'create_bucket': ('parent', 'bucket_id', 'bucket', ), + 'create_exclusion': ('parent', 'exclusion', ), + 'create_log_metric': ('parent', 'metric', ), + 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), + 'create_view': ('parent', 'view_id', 'view', ), + 'delete_bucket': ('name', ), + 'delete_exclusion': ('name', ), + 'delete_log': ('log_name', ), + 'delete_log_metric': ('metric_name', ), + 'delete_sink': ('sink_name', ), + 'delete_view': ('name', ), + 'get_bucket': ('name', ), + 'get_cmek_settings': ('name', ), + 'get_exclusion': ('name', ), + 'get_log_metric': ('metric_name', ), + 'get_sink': ('sink_name', ), + 'get_view': ('name', ), + 'list_buckets': ('parent', 'page_token', 'page_size', ), + 'list_exclusions': ('parent', 'page_token', 'page_size', ), + 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), + 'list_log_metrics': ('parent', 'page_token', 'page_size', ), + 'list_logs': ('parent', 'page_size', 'page_token', 'resource_names', ), + 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), + 'list_sinks': ('parent', 'page_token', 'page_size', ), + 'list_views': ('parent', 'page_token', 'page_size', ), + 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), + 'undelete_bucket': ('name', ), + 'update_bucket': ('name', 'bucket', 'update_mask', ), + 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), + 'update_exclusion': ('name', 'exclusion', 'update_mask', ), + 'update_log_metric': ('metric_name', 'metric', ), + 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), + 'update_view': ('name', 'view', 'update_mask', ), + 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -91,7 +91,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py index 876e658d593c..a8415f0b249a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py @@ -39,15 +39,15 @@ def partition( class redisCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'export_instance': ('name', 'output_config', ), - 'failover_instance': ('name', 'data_protection_mode', ), - 'get_instance': ('name', ), - 'import_instance': ('name', 'input_config', ), - 'list_instances': ('parent', 'page_size', 'page_token', ), - 'update_instance': ('update_mask', 'instance', ), - 'upgrade_instance': ('name', 'redis_version', ), + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'export_instance': ('name', 'output_config', ), + 'failover_instance': ('name', 'data_protection_mode', ), + 'get_instance': ('name', ), + 'import_instance': ('name', 'input_config', ), + 'list_instances': ('parent', 'page_size', 'page_token', ), + 'update_instance': ('update_mask', 'instance', ), + 'upgrade_instance': ('name', 'redis_version', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -66,7 +66,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) From 31fd08c9b6823d9c37b416e5d20cc083445b119c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 2 Aug 2021 18:59:39 +0200 Subject: [PATCH 0598/1339] chore(deps): update dependency google-api-core to v1.31.1 (#964) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 81ed7642f1c7..6a684cbbf6b4 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.1 -google-api-core==1.31.0 +google-api-core==1.31.1 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From 1303e74811cf36bf95ff4dfae72f6e2ba33bb900 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 4 Aug 2021 10:05:18 -0600 Subject: [PATCH 0599/1339] fix: add 'dict' type annotation to 'request' (#966) * fix: add 'dict' type annotation to request * Update client.py.j2 --- .../%sub/services/%service/client.py.j2 | 4 +- .../%sub/services/%service/client.py.j2 | 4 +- .../asset_v1/services/asset_service/client.py | 48 +++++----- .../services/iam_credentials/client.py | 16 ++-- .../services/config_service_v2/client.py | 92 +++++++++---------- .../services/logging_service_v2/client.py | 20 ++-- .../services/metrics_service_v2/client.py | 20 ++-- .../redis_v1/services/cloud_redis/client.py | 36 ++++---- 8 files changed, 120 insertions(+), 120 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 946cb69a380e..90e5002b684b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -285,7 +285,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() %} def {{ method.name|snake_case }}(self, {% if not method.client_streaming %} - request: {{ method.input.ident }} = None, + request: Union[{{ method.input.ident }}, dict] = None, *, {% for field in method.flattened_fields.values() %} {{ field.name }}: {{ field.ident }} = None, @@ -306,7 +306,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Args: {% if not method.client_streaming %} - request (:class:`{{ method.input.ident.sphinx }}`): + request (Union[{{ method.input.ident.sphinx }}, dict]): The request object.{{ ' ' }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index fa2c353d3616..f7a513220ee3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -316,7 +316,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() %} def {{ method.name|snake_case }}(self, {% if not method.client_streaming %} - request: {{ method.input.ident }} = None, + request: Union[{{ method.input.ident }}, dict] = None, *, {% for field in method.flattened_fields.values() %} {{ field.name }}: {{ field.ident }} = None, @@ -337,7 +337,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Args: {% if not method.client_streaming %} - request ({{ method.input.ident.sphinx }}): + request (Union[{{ method.input.ident.sphinx }}, dict]): The request object.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 57f9777d6157..98dcd3aa8f3d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -351,7 +351,7 @@ def __init__(self, *, ) def export_assets(self, - request: asset_service.ExportAssetsRequest = None, + request: Union[asset_service.ExportAssetsRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -372,7 +372,7 @@ def export_assets(self, the export operation usually finishes within 5 minutes. Args: - request (google.cloud.asset_v1.types.ExportAssetsRequest): + request (Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]): The request object. Export asset request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -431,7 +431,7 @@ def export_assets(self, return response def list_assets(self, - request: asset_service.ListAssetsRequest = None, + request: Union[asset_service.ListAssetsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -442,7 +442,7 @@ def list_assets(self, paged results in response. Args: - request (google.cloud.asset_v1.types.ListAssetsRequest): + request (Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]): The request object. ListAssets request. parent (str): Required. Name of the organization or project the assets @@ -520,7 +520,7 @@ def list_assets(self, return response def batch_get_assets_history(self, - request: asset_service.BatchGetAssetsHistoryRequest = None, + request: Union[asset_service.BatchGetAssetsHistoryRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -535,7 +535,7 @@ def batch_get_assets_history(self, INVALID_ARGUMENT error. Args: - request (google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest): + request (Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]): The request object. Batch get assets history request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -579,7 +579,7 @@ def batch_get_assets_history(self, return response def create_feed(self, - request: asset_service.CreateFeedRequest = None, + request: Union[asset_service.CreateFeedRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -591,7 +591,7 @@ def create_feed(self, updates. Args: - request (google.cloud.asset_v1.types.CreateFeedRequest): + request (Union[google.cloud.asset_v1.types.CreateFeedRequest, dict]): The request object. Create asset feed request. parent (str): Required. The name of the @@ -667,7 +667,7 @@ def create_feed(self, return response def get_feed(self, - request: asset_service.GetFeedRequest = None, + request: Union[asset_service.GetFeedRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -677,7 +677,7 @@ def get_feed(self, r"""Gets details about an asset feed. Args: - request (google.cloud.asset_v1.types.GetFeedRequest): + request (Union[google.cloud.asset_v1.types.GetFeedRequest, dict]): The request object. Get asset feed request. name (str): Required. The name of the Feed and it must be in the @@ -748,7 +748,7 @@ def get_feed(self, return response def list_feeds(self, - request: asset_service.ListFeedsRequest = None, + request: Union[asset_service.ListFeedsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -759,7 +759,7 @@ def list_feeds(self, project/folder/organization. Args: - request (google.cloud.asset_v1.types.ListFeedsRequest): + request (Union[google.cloud.asset_v1.types.ListFeedsRequest, dict]): The request object. List asset feeds request. parent (str): Required. The parent @@ -825,7 +825,7 @@ def list_feeds(self, return response def update_feed(self, - request: asset_service.UpdateFeedRequest = None, + request: Union[asset_service.UpdateFeedRequest, dict] = None, *, feed: asset_service.Feed = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -835,7 +835,7 @@ def update_feed(self, r"""Updates an asset feed configuration. Args: - request (google.cloud.asset_v1.types.UpdateFeedRequest): + request (Union[google.cloud.asset_v1.types.UpdateFeedRequest, dict]): The request object. Update asset feed request. feed (google.cloud.asset_v1.types.Feed): Required. The new values of feed details. It must match @@ -907,7 +907,7 @@ def update_feed(self, return response def delete_feed(self, - request: asset_service.DeleteFeedRequest = None, + request: Union[asset_service.DeleteFeedRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -917,7 +917,7 @@ def delete_feed(self, r"""Deletes an asset feed. Args: - request (google.cloud.asset_v1.types.DeleteFeedRequest): + request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): The request object. name (str): Required. The name of the feed and it must be in the @@ -974,7 +974,7 @@ def delete_feed(self, ) def search_all_resources(self, - request: asset_service.SearchAllResourcesRequest = None, + request: Union[asset_service.SearchAllResourcesRequest, dict] = None, *, scope: str = None, query: str = None, @@ -989,7 +989,7 @@ def search_all_resources(self, desired scope, otherwise the request will be rejected. Args: - request (google.cloud.asset_v1.types.SearchAllResourcesRequest): + request (Union[google.cloud.asset_v1.types.SearchAllResourcesRequest, dict]): The request object. Search all resources request. scope (str): Required. A scope can be a project, a folder, or an @@ -1154,7 +1154,7 @@ def search_all_resources(self, return response def search_all_iam_policies(self, - request: asset_service.SearchAllIamPoliciesRequest = None, + request: Union[asset_service.SearchAllIamPoliciesRequest, dict] = None, *, scope: str = None, query: str = None, @@ -1168,7 +1168,7 @@ def search_all_iam_policies(self, desired scope, otherwise the request will be rejected. Args: - request (google.cloud.asset_v1.types.SearchAllIamPoliciesRequest): + request (Union[google.cloud.asset_v1.types.SearchAllIamPoliciesRequest, dict]): The request object. Search all IAM policies request. scope (str): Required. A scope can be a project, a folder, or an @@ -1313,7 +1313,7 @@ def search_all_iam_policies(self, return response def analyze_iam_policy(self, - request: asset_service.AnalyzeIamPolicyRequest = None, + request: Union[asset_service.AnalyzeIamPolicyRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1323,7 +1323,7 @@ def analyze_iam_policy(self, what accesses on which resources. Args: - request (google.cloud.asset_v1.types.AnalyzeIamPolicyRequest): + request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]): The request object. A request message for [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1370,7 +1370,7 @@ def analyze_iam_policy(self, return response def analyze_iam_policy_longrunning(self, - request: asset_service.AnalyzeIamPolicyLongrunningRequest = None, + request: Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1390,7 +1390,7 @@ def analyze_iam_policy_longrunning(self, to help callers to map responses to requests. Args: - request (google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest): + request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]): The request object. A request message for [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 0ebc616d992a..1280b71919ad 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -347,7 +347,7 @@ def __init__(self, *, ) def generate_access_token(self, - request: common.GenerateAccessTokenRequest = None, + request: Union[common.GenerateAccessTokenRequest, dict] = None, *, name: str = None, delegates: Sequence[str] = None, @@ -361,7 +361,7 @@ def generate_access_token(self, account. Args: - request (google.iam.credentials_v1.types.GenerateAccessTokenRequest): + request (Union[google.iam.credentials_v1.types.GenerateAccessTokenRequest, dict]): The request object. name (str): Required. The resource name of the service account for @@ -473,7 +473,7 @@ def generate_access_token(self, return response def generate_id_token(self, - request: common.GenerateIdTokenRequest = None, + request: Union[common.GenerateIdTokenRequest, dict] = None, *, name: str = None, delegates: Sequence[str] = None, @@ -487,7 +487,7 @@ def generate_id_token(self, account. Args: - request (google.iam.credentials_v1.types.GenerateIdTokenRequest): + request (Union[google.iam.credentials_v1.types.GenerateIdTokenRequest, dict]): The request object. name (str): Required. The resource name of the service account for @@ -593,7 +593,7 @@ def generate_id_token(self, return response def sign_blob(self, - request: common.SignBlobRequest = None, + request: Union[common.SignBlobRequest, dict] = None, *, name: str = None, delegates: Sequence[str] = None, @@ -606,7 +606,7 @@ def sign_blob(self, private key. Args: - request (google.iam.credentials_v1.types.SignBlobRequest): + request (Union[google.iam.credentials_v1.types.SignBlobRequest, dict]): The request object. name (str): Required. The resource name of the service account for @@ -699,7 +699,7 @@ def sign_blob(self, return response def sign_jwt(self, - request: common.SignJwtRequest = None, + request: Union[common.SignJwtRequest, dict] = None, *, name: str = None, delegates: Sequence[str] = None, @@ -712,7 +712,7 @@ def sign_jwt(self, private key. Args: - request (google.iam.credentials_v1.types.SignJwtRequest): + request (Union[google.iam.credentials_v1.types.SignJwtRequest, dict]): The request object. name (str): Required. The resource name of the service account for diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index cbccc479383e..acf10f2292b8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -382,7 +382,7 @@ def __init__(self, *, ) def list_buckets(self, - request: logging_config.ListBucketsRequest = None, + request: Union[logging_config.ListBucketsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -392,7 +392,7 @@ def list_buckets(self, r"""Lists buckets. Args: - request (google.cloud.logging_v2.types.ListBucketsRequest): + request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): The request object. The parameters to `ListBuckets`. parent (str): Required. The parent resource whose buckets are to be @@ -478,7 +478,7 @@ def list_buckets(self, return response def get_bucket(self, - request: logging_config.GetBucketRequest = None, + request: Union[logging_config.GetBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -487,7 +487,7 @@ def get_bucket(self, r"""Gets a bucket. Args: - request (google.cloud.logging_v2.types.GetBucketRequest): + request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -531,7 +531,7 @@ def get_bucket(self, return response def create_bucket(self, - request: logging_config.CreateBucketRequest = None, + request: Union[logging_config.CreateBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -542,7 +542,7 @@ def create_bucket(self, cannot be changed. Args: - request (google.cloud.logging_v2.types.CreateBucketRequest): + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): The request object. The parameters to `CreateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -586,7 +586,7 @@ def create_bucket(self, return response def update_bucket(self, - request: logging_config.UpdateBucketRequest = None, + request: Union[logging_config.UpdateBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -605,7 +605,7 @@ def update_bucket(self, A buckets region may not be modified after it is created. Args: - request (google.cloud.logging_v2.types.UpdateBucketRequest): + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -649,7 +649,7 @@ def update_bucket(self, return response def delete_bucket(self, - request: logging_config.DeleteBucketRequest = None, + request: Union[logging_config.DeleteBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -660,7 +660,7 @@ def delete_bucket(self, the bucket will be permanently deleted. Args: - request (google.cloud.logging_v2.types.DeleteBucketRequest): + request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): The request object. The parameters to `DeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -697,7 +697,7 @@ def delete_bucket(self, ) def undelete_bucket(self, - request: logging_config.UndeleteBucketRequest = None, + request: Union[logging_config.UndeleteBucketRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -707,7 +707,7 @@ def undelete_bucket(self, may be undeleted within the grace period of 7 days. Args: - request (google.cloud.logging_v2.types.UndeleteBucketRequest): + request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): The request object. The parameters to `UndeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -744,7 +744,7 @@ def undelete_bucket(self, ) def list_views(self, - request: logging_config.ListViewsRequest = None, + request: Union[logging_config.ListViewsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -754,7 +754,7 @@ def list_views(self, r"""Lists views on a bucket. Args: - request (google.cloud.logging_v2.types.ListViewsRequest): + request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): The request object. The parameters to `ListViews`. parent (str): Required. The bucket whose views are to be listed: @@ -832,7 +832,7 @@ def list_views(self, return response def get_view(self, - request: logging_config.GetViewRequest = None, + request: Union[logging_config.GetViewRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -841,7 +841,7 @@ def get_view(self, r"""Gets a view. Args: - request (google.cloud.logging_v2.types.GetViewRequest): + request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): The request object. The parameters to `GetView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -887,7 +887,7 @@ def get_view(self, return response def create_view(self, - request: logging_config.CreateViewRequest = None, + request: Union[logging_config.CreateViewRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -897,7 +897,7 @@ def create_view(self, contain a maximum of 50 views. Args: - request (google.cloud.logging_v2.types.CreateViewRequest): + request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): The request object. The parameters to `CreateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -943,7 +943,7 @@ def create_view(self, return response def update_view(self, - request: logging_config.UpdateViewRequest = None, + request: Union[logging_config.UpdateViewRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -953,7 +953,7 @@ def update_view(self, existing view with values from the new view: ``filter``. Args: - request (google.cloud.logging_v2.types.UpdateViewRequest): + request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): The request object. The parameters to `UpdateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -999,7 +999,7 @@ def update_view(self, return response def delete_view(self, - request: logging_config.DeleteViewRequest = None, + request: Union[logging_config.DeleteViewRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1008,7 +1008,7 @@ def delete_view(self, r"""Deletes a view from a bucket. Args: - request (google.cloud.logging_v2.types.DeleteViewRequest): + request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): The request object. The parameters to `DeleteView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1045,7 +1045,7 @@ def delete_view(self, ) def list_sinks(self, - request: logging_config.ListSinksRequest = None, + request: Union[logging_config.ListSinksRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1055,7 +1055,7 @@ def list_sinks(self, r"""Lists sinks. Args: - request (google.cloud.logging_v2.types.ListSinksRequest): + request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. parent (str): Required. The parent resource whose sinks are to be @@ -1137,7 +1137,7 @@ def list_sinks(self, return response def get_sink(self, - request: logging_config.GetSinkRequest = None, + request: Union[logging_config.GetSinkRequest, dict] = None, *, sink_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1147,7 +1147,7 @@ def get_sink(self, r"""Gets a sink. Args: - request (google.cloud.logging_v2.types.GetSinkRequest): + request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. sink_name (str): Required. The resource name of the sink: @@ -1226,7 +1226,7 @@ def get_sink(self, return response def create_sink(self, - request: logging_config.CreateSinkRequest = None, + request: Union[logging_config.CreateSinkRequest, dict] = None, *, parent: str = None, sink: logging_config.LogSink = None, @@ -1241,7 +1241,7 @@ def create_sink(self, entries only from the resource owning the sink. Args: - request (google.cloud.logging_v2.types.CreateSinkRequest): + request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. parent (str): Required. The resource in which to create the sink: @@ -1330,7 +1330,7 @@ def create_sink(self, return response def update_sink(self, - request: logging_config.UpdateSinkRequest = None, + request: Union[logging_config.UpdateSinkRequest, dict] = None, *, sink_name: str = None, sink: logging_config.LogSink = None, @@ -1347,7 +1347,7 @@ def update_sink(self, the ``unique_writer_identity`` field. Args: - request (google.cloud.logging_v2.types.UpdateSinkRequest): + request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. sink_name (str): Required. The full resource name of the sink to update, @@ -1458,7 +1458,7 @@ def update_sink(self, return response def delete_sink(self, - request: logging_config.DeleteSinkRequest = None, + request: Union[logging_config.DeleteSinkRequest, dict] = None, *, sink_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1469,7 +1469,7 @@ def delete_sink(self, then that service account is also deleted. Args: - request (google.cloud.logging_v2.types.DeleteSinkRequest): + request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. sink_name (str): Required. The full resource name of the sink to delete, @@ -1533,7 +1533,7 @@ def delete_sink(self, ) def list_exclusions(self, - request: logging_config.ListExclusionsRequest = None, + request: Union[logging_config.ListExclusionsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1543,7 +1543,7 @@ def list_exclusions(self, r"""Lists all the exclusions in a parent resource. Args: - request (google.cloud.logging_v2.types.ListExclusionsRequest): + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): The request object. The parameters to `ListExclusions`. parent (str): Required. The parent resource whose exclusions are to be @@ -1625,7 +1625,7 @@ def list_exclusions(self, return response def get_exclusion(self, - request: logging_config.GetExclusionRequest = None, + request: Union[logging_config.GetExclusionRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1635,7 +1635,7 @@ def get_exclusion(self, r"""Gets the description of an exclusion. Args: - request (google.cloud.logging_v2.types.GetExclusionRequest): + request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): The request object. The parameters to `GetExclusion`. name (str): Required. The resource name of an existing exclusion: @@ -1717,7 +1717,7 @@ def get_exclusion(self, return response def create_exclusion(self, - request: logging_config.CreateExclusionRequest = None, + request: Union[logging_config.CreateExclusionRequest, dict] = None, *, parent: str = None, exclusion: logging_config.LogExclusion = None, @@ -1731,7 +1731,7 @@ def create_exclusion(self, resource. Args: - request (google.cloud.logging_v2.types.CreateExclusionRequest): + request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): The request object. The parameters to `CreateExclusion`. parent (str): Required. The parent resource in which to create the @@ -1824,7 +1824,7 @@ def create_exclusion(self, return response def update_exclusion(self, - request: logging_config.UpdateExclusionRequest = None, + request: Union[logging_config.UpdateExclusionRequest, dict] = None, *, name: str = None, exclusion: logging_config.LogExclusion = None, @@ -1837,7 +1837,7 @@ def update_exclusion(self, exclusion. Args: - request (google.cloud.logging_v2.types.UpdateExclusionRequest): + request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): The request object. The parameters to `UpdateExclusion`. name (str): Required. The resource name of the exclusion to update: @@ -1945,7 +1945,7 @@ def update_exclusion(self, return response def delete_exclusion(self, - request: logging_config.DeleteExclusionRequest = None, + request: Union[logging_config.DeleteExclusionRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1955,7 +1955,7 @@ def delete_exclusion(self, r"""Deletes an exclusion. Args: - request (google.cloud.logging_v2.types.DeleteExclusionRequest): + request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): The request object. The parameters to `DeleteExclusion`. name (str): Required. The resource name of an existing exclusion to @@ -2020,7 +2020,7 @@ def delete_exclusion(self, ) def get_cmek_settings(self, - request: logging_config.GetCmekSettingsRequest = None, + request: Union[logging_config.GetCmekSettingsRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -2037,7 +2037,7 @@ def get_cmek_settings(self, for more information. Args: - request (google.cloud.logging_v2.types.GetCmekSettingsRequest): + request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs @@ -2097,7 +2097,7 @@ def get_cmek_settings(self, return response def update_cmek_settings(self, - request: logging_config.UpdateCmekSettingsRequest = None, + request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -2120,7 +2120,7 @@ def update_cmek_settings(self, for more information. Args: - request (google.cloud.logging_v2.types.UpdateCmekSettingsRequest): + request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 05bcee998908..354945976630 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -338,7 +338,7 @@ def __init__(self, *, ) def delete_log(self, - request: logging.DeleteLogRequest = None, + request: Union[logging.DeleteLogRequest, dict] = None, *, log_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -352,7 +352,7 @@ def delete_log(self, with a timestamp before the operation will be deleted. Args: - request (google.cloud.logging_v2.types.DeleteLogRequest): + request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): The request object. The parameters to DeleteLog. log_name (str): Required. The resource name of the log to delete: @@ -419,7 +419,7 @@ def delete_log(self, ) def write_log_entries(self, - request: logging.WriteLogEntriesRequest = None, + request: Union[logging.WriteLogEntriesRequest, dict] = None, *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, @@ -438,7 +438,7 @@ def write_log_entries(self, organizations, billing accounts or folders) Args: - request (google.cloud.logging_v2.types.WriteLogEntriesRequest): + request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. log_name (str): Optional. A default log resource name that is assigned @@ -579,7 +579,7 @@ def write_log_entries(self, return response def list_log_entries(self, - request: logging.ListLogEntriesRequest = None, + request: Union[logging.ListLogEntriesRequest, dict] = None, *, resource_names: Sequence[str] = None, filter: str = None, @@ -594,7 +594,7 @@ def list_log_entries(self, Logs `__. Args: - request (google.cloud.logging_v2.types.ListLogEntriesRequest): + request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. resource_names (Sequence[str]): Required. Names of one or more parent resources from @@ -708,7 +708,7 @@ def list_log_entries(self, return response def list_monitored_resource_descriptors(self, - request: logging.ListMonitoredResourceDescriptorsRequest = None, + request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -718,7 +718,7 @@ def list_monitored_resource_descriptors(self, used by Logging. Args: - request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to ListMonitoredResourceDescriptors retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -769,7 +769,7 @@ def list_monitored_resource_descriptors(self, return response def list_logs(self, - request: logging.ListLogsRequest = None, + request: Union[logging.ListLogsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -781,7 +781,7 @@ def list_logs(self, listed. Args: - request (google.cloud.logging_v2.types.ListLogsRequest): + request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (str): Required. The resource name that owns the logs: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 0204f594a187..af554cf6d6fd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -339,7 +339,7 @@ def __init__(self, *, ) def list_log_metrics(self, - request: logging_metrics.ListLogMetricsRequest = None, + request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -349,7 +349,7 @@ def list_log_metrics(self, r"""Lists logs-based metrics. Args: - request (google.cloud.logging_v2.types.ListLogMetricsRequest): + request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. parent (str): Required. The name of the project containing the @@ -428,7 +428,7 @@ def list_log_metrics(self, return response def get_log_metric(self, - request: logging_metrics.GetLogMetricRequest = None, + request: Union[logging_metrics.GetLogMetricRequest, dict] = None, *, metric_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -438,7 +438,7 @@ def get_log_metric(self, r"""Gets a logs-based metric. Args: - request (google.cloud.logging_v2.types.GetLogMetricRequest): + request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. metric_name (str): Required. The resource name of the desired metric: @@ -514,7 +514,7 @@ def get_log_metric(self, return response def create_log_metric(self, - request: logging_metrics.CreateLogMetricRequest = None, + request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, *, parent: str = None, metric: logging_metrics.LogMetric = None, @@ -525,7 +525,7 @@ def create_log_metric(self, r"""Creates a logs-based metric. Args: - request (google.cloud.logging_v2.types.CreateLogMetricRequest): + request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. parent (str): Required. The resource name of the project in which to @@ -614,7 +614,7 @@ def create_log_metric(self, return response def update_log_metric(self, - request: logging_metrics.UpdateLogMetricRequest = None, + request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, *, metric_name: str = None, metric: logging_metrics.LogMetric = None, @@ -625,7 +625,7 @@ def update_log_metric(self, r"""Creates or updates a logs-based metric. Args: - request (google.cloud.logging_v2.types.UpdateLogMetricRequest): + request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. metric_name (str): Required. The resource name of the metric to update: @@ -713,7 +713,7 @@ def update_log_metric(self, return response def delete_log_metric(self, - request: logging_metrics.DeleteLogMetricRequest = None, + request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, *, metric_name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -723,7 +723,7 @@ def delete_log_metric(self, r"""Deletes a logs-based metric. Args: - request (google.cloud.logging_v2.types.DeleteLogMetricRequest): + request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. metric_name (str): Required. The resource name of the metric to delete: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index d3513d79dea9..2857a5fb9fde 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -362,7 +362,7 @@ def __init__(self, *, ) def list_instances(self, - request: cloud_redis.ListInstancesRequest = None, + request: Union[cloud_redis.ListInstancesRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -381,7 +381,7 @@ def list_instances(self, are aggregated. Args: - request (google.cloud.redis_v1.types.ListInstancesRequest): + request (Union[google.cloud.redis_v1.types.ListInstancesRequest, dict]): The request object. Request for [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. parent (str): @@ -460,7 +460,7 @@ def list_instances(self, return response def get_instance(self, - request: cloud_redis.GetInstanceRequest = None, + request: Union[cloud_redis.GetInstanceRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -470,7 +470,7 @@ def get_instance(self, r"""Gets the details of a specific Redis instance. Args: - request (google.cloud.redis_v1.types.GetInstanceRequest): + request (Union[google.cloud.redis_v1.types.GetInstanceRequest, dict]): The request object. Request for [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. name (str): @@ -534,7 +534,7 @@ def get_instance(self, return response def create_instance(self, - request: cloud_redis.CreateInstanceRequest = None, + request: Union[cloud_redis.CreateInstanceRequest, dict] = None, *, parent: str = None, instance_id: str = None, @@ -559,7 +559,7 @@ def create_instance(self, hours, so there is no need to call DeleteOperation. Args: - request (google.cloud.redis_v1.types.CreateInstanceRequest): + request (Union[google.cloud.redis_v1.types.CreateInstanceRequest, dict]): The request object. Request for [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. parent (str): @@ -660,7 +660,7 @@ def create_instance(self, return response def update_instance(self, - request: cloud_redis.UpdateInstanceRequest = None, + request: Union[cloud_redis.UpdateInstanceRequest, dict] = None, *, update_mask: field_mask_pb2.FieldMask = None, instance: cloud_redis.Instance = None, @@ -676,7 +676,7 @@ def update_instance(self, there is no need to call DeleteOperation. Args: - request (google.cloud.redis_v1.types.UpdateInstanceRequest): + request (Union[google.cloud.redis_v1.types.UpdateInstanceRequest, dict]): The request object. Request for [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -768,7 +768,7 @@ def update_instance(self, return response def upgrade_instance(self, - request: cloud_redis.UpgradeInstanceRequest = None, + request: Union[cloud_redis.UpgradeInstanceRequest, dict] = None, *, name: str = None, redis_version: str = None, @@ -780,7 +780,7 @@ def upgrade_instance(self, specified in the request. Args: - request (google.cloud.redis_v1.types.UpgradeInstanceRequest): + request (Union[google.cloud.redis_v1.types.UpgradeInstanceRequest, dict]): The request object. Request for [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. name (str): @@ -866,7 +866,7 @@ def upgrade_instance(self, return response def import_instance(self, - request: cloud_redis.ImportInstanceRequest = None, + request: Union[cloud_redis.ImportInstanceRequest, dict] = None, *, name: str = None, input_config: cloud_redis.InputConfig = None, @@ -885,7 +885,7 @@ def import_instance(self, few hours, so there is no need to call DeleteOperation. Args: - request (google.cloud.redis_v1.types.ImportInstanceRequest): + request (Union[google.cloud.redis_v1.types.ImportInstanceRequest, dict]): The request object. Request for [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. name (str): @@ -971,7 +971,7 @@ def import_instance(self, return response def export_instance(self, - request: cloud_redis.ExportInstanceRequest = None, + request: Union[cloud_redis.ExportInstanceRequest, dict] = None, *, name: str = None, output_config: cloud_redis.OutputConfig = None, @@ -986,7 +986,7 @@ def export_instance(self, few hours, so there is no need to call DeleteOperation. Args: - request (google.cloud.redis_v1.types.ExportInstanceRequest): + request (Union[google.cloud.redis_v1.types.ExportInstanceRequest, dict]): The request object. Request for [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. name (str): @@ -1072,7 +1072,7 @@ def export_instance(self, return response def failover_instance(self, - request: cloud_redis.FailoverInstanceRequest = None, + request: Union[cloud_redis.FailoverInstanceRequest, dict] = None, *, name: str = None, data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None, @@ -1085,7 +1085,7 @@ def failover_instance(self, Memorystore for Redis instance. Args: - request (google.cloud.redis_v1.types.FailoverInstanceRequest): + request (Union[google.cloud.redis_v1.types.FailoverInstanceRequest, dict]): The request object. Request for [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. name (str): @@ -1172,7 +1172,7 @@ def failover_instance(self, return response def delete_instance(self, - request: cloud_redis.DeleteInstanceRequest = None, + request: Union[cloud_redis.DeleteInstanceRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1183,7 +1183,7 @@ def delete_instance(self, serving and data is deleted. Args: - request (google.cloud.redis_v1.types.DeleteInstanceRequest): + request (Union[google.cloud.redis_v1.types.DeleteInstanceRequest, dict]): The request object. Request for [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. name (str): From 271e6dc0ad809173fef5ca3abc2e40147c6ee1ed Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 18 Aug 2021 15:36:54 -0600 Subject: [PATCH 0600/1339] feat(snippetgen): generate mock input for required fields (#941) Generate mock field inputs for all required fields. * For oneofs, the first option is selected. * For enums, the last option is selected (as 0 is often unspecified). * Mock input for string fields that reference resources (`resource_pb2.resource_reference`) is the pattern provided in the protos. **TODOs:** - When a resource's pattern is `*` this results in unhelpful strings ```proto message Asset { option (google.api.resource) = { type: "cloudasset.googleapis.com/Asset" pattern: "*" }; ``` ```py request = asset_v1.BatchGetAssetsHistoryRequest( parent="*", ) ``` - Map fields are not yet handled. **Other changes:** * Fields are set like `foo.bar` rather than through dict access `foo["bar"]`. This is because the former allows you to set fields nested more than one field deep `foo.bar.baz`. **Before**: ```py output_config = {} output_config["gcs_destination"]["uri"] = "uri_value" # fails as there is no nested dict ``` **After**: ```py output_config = asset_v1.IamPolicyAnalysisOutputConfig() output_config.gcs_destination.uri = "uri_value" ``` --- .../gapic/samplegen/samplegen.py | 78 +++++++-- .../gapic-generator/gapic/schema/wrappers.py | 112 ++++++++++-- .../templates/examples/feature_fragments.j2 | 18 +- packages/gapic-generator/noxfile.py | 4 +- ..._asset_service_analyze_iam_policy_async.py | 4 + ...ce_analyze_iam_policy_longrunning_async.py | 8 + ...ice_analyze_iam_policy_longrunning_sync.py | 8 + ...1_asset_service_analyze_iam_policy_sync.py | 4 + ..._service_batch_get_assets_history_async.py | 1 + ...t_service_batch_get_assets_history_sync.py | 1 + ...sset_v1_asset_service_create_feed_async.py | 6 + ...asset_v1_asset_service_create_feed_sync.py | 6 + ...sset_v1_asset_service_delete_feed_async.py | 1 + ...asset_v1_asset_service_delete_feed_sync.py | 1 + ...et_v1_asset_service_export_assets_async.py | 5 + ...set_v1_asset_service_export_assets_sync.py | 5 + ...d_asset_v1_asset_service_get_feed_async.py | 1 + ...ed_asset_v1_asset_service_get_feed_sync.py | 1 + ...sset_v1_asset_service_list_assets_async.py | 1 + ...asset_v1_asset_service_list_assets_sync.py | 1 + ...asset_v1_asset_service_list_feeds_async.py | 1 + ..._asset_v1_asset_service_list_feeds_sync.py | 1 + ...t_service_search_all_iam_policies_async.py | 1 + ...et_service_search_all_iam_policies_sync.py | 1 + ...sset_service_search_all_resources_async.py | 1 + ...asset_service_search_all_resources_sync.py | 1 + ...sset_v1_asset_service_update_feed_async.py | 4 + ...asset_v1_asset_service_update_feed_sync.py | 4 + ...credentials_generate_access_token_async.py | 2 + ..._credentials_generate_access_token_sync.py | 2 + ...iam_credentials_generate_id_token_async.py | 2 + ..._iam_credentials_generate_id_token_sync.py | 2 + ...ials_v1_iam_credentials_sign_blob_async.py | 2 + ...tials_v1_iam_credentials_sign_blob_sync.py | 2 + ...tials_v1_iam_credentials_sign_jwt_async.py | 2 + ...ntials_v1_iam_credentials_sign_jwt_sync.py | 2 + ...2_config_service_v2_create_bucket_async.py | 2 + ...v2_config_service_v2_create_bucket_sync.py | 2 + ...onfig_service_v2_create_exclusion_async.py | 6 + ...config_service_v2_create_exclusion_sync.py | 6 + ..._v2_config_service_v2_create_sink_async.py | 6 + ...g_v2_config_service_v2_create_sink_sync.py | 6 + ..._v2_config_service_v2_create_view_async.py | 2 + ...g_v2_config_service_v2_create_view_sync.py | 2 + ...2_config_service_v2_delete_bucket_async.py | 1 + ...v2_config_service_v2_delete_bucket_sync.py | 1 + ...onfig_service_v2_delete_exclusion_async.py | 1 + ...config_service_v2_delete_exclusion_sync.py | 1 + ..._v2_config_service_v2_delete_sink_async.py | 1 + ...g_v2_config_service_v2_delete_sink_sync.py | 1 + ..._v2_config_service_v2_delete_view_async.py | 1 + ...g_v2_config_service_v2_delete_view_sync.py | 1 + ...g_v2_config_service_v2_get_bucket_async.py | 1 + ...ng_v2_config_service_v2_get_bucket_sync.py | 1 + ...nfig_service_v2_get_cmek_settings_async.py | 1 + ...onfig_service_v2_get_cmek_settings_sync.py | 1 + ...2_config_service_v2_get_exclusion_async.py | 1 + ...v2_config_service_v2_get_exclusion_sync.py | 1 + ...ing_v2_config_service_v2_get_sink_async.py | 1 + ...ging_v2_config_service_v2_get_sink_sync.py | 1 + ...ing_v2_config_service_v2_get_view_async.py | 1 + ...ging_v2_config_service_v2_get_view_sync.py | 1 + ...v2_config_service_v2_list_buckets_async.py | 1 + ..._v2_config_service_v2_list_buckets_sync.py | 1 + ...config_service_v2_list_exclusions_async.py | 1 + ..._config_service_v2_list_exclusions_sync.py | 1 + ...g_v2_config_service_v2_list_sinks_async.py | 1 + ...ng_v2_config_service_v2_list_sinks_sync.py | 1 + ...g_v2_config_service_v2_list_views_async.py | 1 + ...ng_v2_config_service_v2_list_views_sync.py | 1 + ...config_service_v2_undelete_bucket_async.py | 1 + ..._config_service_v2_undelete_bucket_sync.py | 1 + ...2_config_service_v2_update_bucket_async.py | 1 + ...v2_config_service_v2_update_bucket_sync.py | 1 + ...g_service_v2_update_cmek_settings_async.py | 1 + ...ig_service_v2_update_cmek_settings_sync.py | 1 + ...onfig_service_v2_update_exclusion_async.py | 6 + ...config_service_v2_update_exclusion_sync.py | 6 + ..._v2_config_service_v2_update_sink_async.py | 6 + ...g_v2_config_service_v2_update_sink_sync.py | 6 + ..._v2_config_service_v2_update_view_async.py | 1 + ...g_v2_config_service_v2_update_view_sync.py | 1 + ..._v2_logging_service_v2_delete_log_async.py | 1 + ...g_v2_logging_service_v2_delete_log_sync.py | 1 + ...gging_service_v2_list_log_entries_async.py | 1 + ...ogging_service_v2_list_log_entries_sync.py | 1 + ...g_v2_logging_service_v2_list_logs_async.py | 1 + ...ng_v2_logging_service_v2_list_logs_sync.py | 1 + ...gging_service_v2_tail_log_entries_async.py | 3 +- ...ogging_service_v2_tail_log_entries_sync.py | 3 +- ...ging_service_v2_write_log_entries_async.py | 4 + ...gging_service_v2_write_log_entries_sync.py | 4 + ...rics_service_v2_create_log_metric_async.py | 6 + ...trics_service_v2_create_log_metric_sync.py | 6 + ...rics_service_v2_delete_log_metric_async.py | 1 + ...trics_service_v2_delete_log_metric_sync.py | 1 + ...metrics_service_v2_get_log_metric_async.py | 1 + ..._metrics_service_v2_get_log_metric_sync.py | 1 + ...trics_service_v2_list_log_metrics_async.py | 1 + ...etrics_service_v2_list_log_metrics_sync.py | 1 + ...rics_service_v2_update_log_metric_async.py | 6 + ...trics_service_v2_update_log_metric_sync.py | 6 + ...is_v1_cloud_redis_create_instance_async.py | 8 + ...dis_v1_cloud_redis_create_instance_sync.py | 8 + ...is_v1_cloud_redis_delete_instance_async.py | 1 + ...dis_v1_cloud_redis_delete_instance_sync.py | 1 + ...is_v1_cloud_redis_export_instance_async.py | 5 + ...dis_v1_cloud_redis_export_instance_sync.py | 5 + ..._v1_cloud_redis_failover_instance_async.py | 1 + ...s_v1_cloud_redis_failover_instance_sync.py | 1 + ...redis_v1_cloud_redis_get_instance_async.py | 1 + ..._redis_v1_cloud_redis_get_instance_sync.py | 1 + ...is_v1_cloud_redis_import_instance_async.py | 5 + ...dis_v1_cloud_redis_import_instance_sync.py | 5 + ...dis_v1_cloud_redis_list_instances_async.py | 1 + ...edis_v1_cloud_redis_list_instances_sync.py | 1 + ...is_v1_cloud_redis_update_instance_async.py | 6 + ...dis_v1_cloud_redis_update_instance_sync.py | 6 + ...s_v1_cloud_redis_upgrade_instance_async.py | 2 + ...is_v1_cloud_redis_upgrade_instance_sync.py | 2 + ...llusca_v1_snippets_list_resources_async.py | 1 + ...ollusca_v1_snippets_list_resources_sync.py | 1 + ...v1_snippets_method_bidi_streaming_async.py | 5 +- ..._v1_snippets_method_bidi_streaming_sync.py | 5 +- ...v1_snippets_method_lro_signatures_async.py | 9 + ..._v1_snippets_method_lro_signatures_sync.py | 9 + ..._v1_snippets_method_one_signature_async.py | 9 + ...a_v1_snippets_method_one_signature_sync.py | 9 + ..._snippets_method_server_streaming_async.py | 9 + ...1_snippets_method_server_streaming_sync.py | 9 + ...ollusca_v1_snippets_one_of_method_async.py | 48 +++++ ...mollusca_v1_snippets_one_of_method_sync.py | 48 +++++ .../tests/snippetgen/snippets.proto | 48 ++++- .../tests/unit/samplegen/common_types.py | 49 +++++- .../samplegen/golden_snippets/sample_basic.py | 7 +- .../golden_snippets/sample_basic_async.py | 7 +- .../sample_basic_unflattenable.py | 7 +- .../sample_basic_void_method.py | 7 +- .../tests/unit/samplegen/test_integration.py | 19 +- .../tests/unit/samplegen/test_samplegen.py | 164 +++++++++++++++++- .../tests/unit/samplegen/test_template.py | 110 +++++++++--- .../tests/unit/schema/wrappers/test_field.py | 69 ++++++++ .../unit/schema/wrappers/test_message.py | 32 ++++ .../unit/schema/wrappers/test_service.py | 85 ++++++++- 144 files changed, 1155 insertions(+), 107 deletions(-) create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 876f0b9d0ad5..b19ce4fa8728 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -202,7 +202,7 @@ def build( f"Resource {resource_typestr} has no pattern with params: {attr_name_str}" ) - return cls(base=base, body=attrs, single=None, pattern=pattern) + return cls(base=base, body=attrs, single=None, pattern=pattern,) @dataclasses.dataclass @@ -293,17 +293,22 @@ def preprocess_sample(sample, api_schema: api.API, rpc: wrappers.Method): sample["module_name"] = api_schema.naming.versioned_module_name sample["module_namespace"] = api_schema.naming.module_namespace + service = api_schema.services[sample["service"]] + # Assume the gRPC transport if the transport is not specified - sample.setdefault("transport", api.TRANSPORT_GRPC) + transport = sample.setdefault("transport", api.TRANSPORT_GRPC) - if sample["transport"] == api.TRANSPORT_GRPC_ASYNC: - sample["client_name"] = api_schema.services[sample["service"] - ].async_client_name - else: - sample["client_name"] = api_schema.services[sample["service"]].client_name + is_async = transport == api.TRANSPORT_GRPC_ASYNC + sample["client_name"] = service.async_client_name if is_async else service.client_name - # the type of the request object passed to the rpc e.g, `ListRequest` - sample["request_type"] = rpc.input.ident.name + # the MessageType of the request object passed to the rpc e.g, `ListRequest` + sample["request_type"] = rpc.input + + # If no request was specified in the config + # Add reasonable default values as placeholders + if "request" not in sample: + sample["request"] = generate_request_object( + api_schema, service, rpc.input) # If no response was specified in the config # Add reasonable defaults depending on the type of the sample @@ -940,6 +945,58 @@ def parse_handwritten_specs(sample_configs: Sequence[str]) -> Generator[Dict[str yield spec +def generate_request_object(api_schema: api.API, service: wrappers.Service, message: wrappers.MessageType, field_name_prefix: str = ""): + """Generate dummy input for a given message. + + Args: + api_schema (api.API): The schema that defines the API. + service (wrappers.Service): The service object the message belongs to. + message (wrappers.MessageType): The message to generate a request object for. + field_name_prefix (str): A prefix to attach to the field name in the request. + + Returns: + List[Dict[str, Any]]: A list of dicts that can be turned into TransformedRequests. + """ + request: List[Dict[str, Any]] = [] + + request_fields: List[wrappers.Field] = [] + + # Choose the first option for each oneof + selected_oneofs: List[wrappers.Field] = [oneof_fields[0] + for oneof_fields in message.oneof_fields().values()] + request_fields = selected_oneofs + message.required_fields + + for field in request_fields: + # TransformedRequest expects nested fields to be referenced like + # `destination.input_config.name` + field_name = ".".join([field_name_prefix, field.name]).lstrip('.') + + # TODO(busunkim): Properly handle map fields + if field.is_primitive: + placeholder_value = field.mock_value_original_type + # If this field identifies a resource use the resource path + if service.resource_messages_dict.get(field.resource_reference): + placeholder_value = service.resource_messages_dict[ + field.resource_reference].resource_path + request.append({"field": field_name, "value": placeholder_value}) + elif field.enum: + # Choose the last enum value in the list since index 0 is often "unspecified" + request.append( + {"field": field_name, "value": field.enum.values[-1].name}) + else: + # This is a message type, recurse + # TODO(busunkim): Some real world APIs have + # request objects are recursive. + # Reference `Field.mock_value` to ensure + # this always terminates. + request += generate_request_object( + api_schema, service, field.type, + field_name_prefix=field_name, + ) + + return request + + def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, Any], None, None]: """Given an API, generate basic sample specs for each method. @@ -964,8 +1021,7 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A "sample_type": "standalone", "rpc": rpc_name, "transport": transport, - "request": [], - # response is populated in `preprocess_sample` + # `request` and `response` is populated in `preprocess_sample` "service": f"{api_schema.naming.proto_package}.{service_name}", "region_tag": region_tag, "description": f"Snippet for {utils.to_snake_case(rpc_name)}" diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 0e5945892144..8c2313f8a7fb 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -31,7 +31,7 @@ import dataclasses import re from itertools import chain -from typing import (cast, Dict, FrozenSet, Iterable, List, Mapping, +from typing import (Any, cast, Dict, FrozenSet, Iterable, List, Mapping, ClassVar, Optional, Sequence, Set, Tuple, Union) from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 @@ -89,6 +89,17 @@ def map(self) -> bool: """Return True if this field is a map, False otherwise.""" return bool(self.repeated and self.message and self.message.map) + @utils.cached_property + def mock_value_original_type(self) -> Union[bool, str, bytes, int, float, List[Any], None]: + answer = self.primitive_mock() or None + + # If this is a repeated field, then the mock answer should + # be a list. + if self.repeated: + answer = [answer] + + return answer + @utils.cached_property def mock_value(self) -> str: visited_fields: Set["Field"] = set() @@ -100,25 +111,13 @@ def mock_value(self) -> str: return answer - def inner_mock(self, stack, visited_fields): + def inner_mock(self, stack, visited_fields) -> str: """Return a repr of a valid, usually truthy mock value.""" # For primitives, send a truthy value computed from the # field name. answer = 'None' if isinstance(self.type, PrimitiveType): - if self.type.python_type == bool: - answer = 'True' - elif self.type.python_type == str: - answer = f"'{self.name}_value'" - elif self.type.python_type == bytes: - answer = f"b'{self.name}_blob'" - elif self.type.python_type == int: - answer = f'{sum([ord(i) for i in self.name])}' - elif self.type.python_type == float: - answer = f'0.{sum([ord(i) for i in self.name])}' - else: # Impossible; skip coverage checks. - raise TypeError('Unrecognized PrimitiveType. This should ' - 'never happen; please file an issue.') + answer = self.primitive_mock_as_str() # If this is an enum, select the first truthy value (or the zero # value if nothing else exists). @@ -158,6 +157,45 @@ def inner_mock(self, stack, visited_fields): # Done; return the mock value. return answer + def primitive_mock(self) -> Union[bool, str, bytes, int, float, List[Any], None]: + """Generate a valid mock for a primitive type. This function + returns the original (Python) type. + """ + answer: Union[bool, str, bytes, int, float, List[Any], None] = None + + if not isinstance(self.type, PrimitiveType): + raise TypeError(f"'inner_mock_as_original_type' can only be used for" + f"PrimitiveType, but type is {self.type}") + + else: + if self.type.python_type == bool: + answer = True + elif self.type.python_type == str: + answer = f"{self.name}_value" + elif self.type.python_type == bytes: + answer = bytes(f"{self.name}_blob", encoding="utf-8") + elif self.type.python_type == int: + answer = sum([ord(i) for i in self.name]) + elif self.type.python_type == float: + name_sum = sum([ord(i) for i in self.name]) + answer = name_sum * pow(10, -1 * len(str(name_sum))) + else: # Impossible; skip coverage checks. + raise TypeError('Unrecognized PrimitiveType. This should ' + 'never happen; please file an issue.') + + return answer + + def primitive_mock_as_str(self) -> str: + """Like primitive mock, but return the mock as a string.""" + answer = self.primitive_mock() + + if isinstance(answer, str): + answer = f"'{answer}'" + else: + answer = str(answer) + + return answer + @property def proto_type(self) -> str: """Return the proto type constant to be used in templates.""" @@ -186,6 +224,17 @@ def required(self) -> bool: return (field_behavior_pb2.FieldBehavior.Value('REQUIRED') in self.options.Extensions[field_behavior_pb2.field_behavior]) + @property + def resource_reference(self) -> Optional[str]: + """Return a resource reference type if it exists. + + This is only applicable for string fields. + Example: "translate.googleapis.com/Glossary" + """ + return (self.options.Extensions[resource_pb2.resource_reference].type + or self.options.Extensions[resource_pb2.resource_reference].child_type + or None) + @utils.cached_property def type(self) -> Union['MessageType', 'EnumType', 'PrimitiveType']: """Return the type of this field.""" @@ -286,6 +335,13 @@ def oneof_fields(self, include_optional=False): return oneof_fields + @utils.cached_property + def required_fields(self) -> Sequence['Field']: + required_fields = [ + field for field in self.fields.values() if field.required] + + return required_fields + @utils.cached_property def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: answer = tuple( @@ -353,6 +409,11 @@ def resource_type(self) -> Optional[str]: resource = self.options.Extensions[resource_pb2.resource] return resource.type[resource.type.find('/') + 1:] if resource else None + @property + def resource_type_full_path(self) -> Optional[str]: + resource = self.options.Extensions[resource_pb2.resource] + return resource.type if resource else None + @property def resource_path_args(self) -> Sequence[str]: return self.PATH_ARG_RE.findall(self.resource_path or '') @@ -1199,6 +1260,27 @@ def gen_indirect_resources_used(message): ) ) + @utils.cached_property + def resource_messages_dict(self) -> Dict[str, MessageType]: + """Returns a dict from resource reference to + the message type. This *includes* the common resource messages. + + Returns: + Dict[str, MessageType]: A mapping from resource path + string to the corresponding MessageType. + `{"locations.googleapis.com/Location": MessageType(...)}` + """ + service_resource_messages = { + r.resource_type_full_path: r for r in self.resource_messages} + + # Add common resources + service_resource_messages.update( + (resource_path, resource.message_type) + for resource_path, resource in self.common_resources.items() + ) + + return service_resource_messages + @utils.cached_property def any_client_streaming(self) -> bool: return any(m.client_streaming for m in self.methods.values()) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 502d68fa607f..eda14df7e467 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -126,18 +126,19 @@ with open({{ print_string_formatting(statement["filename"])|trim }}, "wb") as f: {% macro render_request_attr(base_name, attr) %} {# Note: python code will have manipulated the value #} {# to be the correct enum from the right module, if necessary. #} -{# Python is also responsible for verifying that each input parameter is unique,#} +{# Python is also responsible for verifying that each input parameter is unique, #} {# no parameter is a reserved keyword #} {% if attr.input_parameter %} + # {{ attr.input_parameter }} = {{ attr.value }} {% if attr.value_is_file %} with open({{ attr.input_parameter }}, "rb") as f: - {{ base_name }}["{{ attr.field }}"] = f.read() + {{ base_name }}.{{ attr.field }} = f.read() {% else %} -{{ base_name }}["{{ attr.field }}"] = {{ attr.input_parameter }} +{{ base_name }}.{{ attr.field }} = {{ attr.input_parameter }} {% endif %} {% else %} -{{ base_name }}["{{ attr.field }}"] = {{ attr.value }} +{{ base_name }}.{{ attr.field }} = {{ attr.value }} {% endif %} {% endmacro %} @@ -159,16 +160,17 @@ client = {{ module_name }}.{{ client_name }}() {{ parameter_block.base }} = "{{parameter_block.pattern }}".format({{ formals|join(", ") }}) {% endwith %} {% else %}{# End resource name construction #} -{{ parameter_block.base }} = {} +{{ parameter_block.base }} = {{ module_name }}.{{ request_type.get_field(parameter_block.base).type.name }}() {% for attr in parameter_block.body %} -{{ render_request_attr(parameter_block.base, attr) }} +{{ render_request_attr(parameter_block.base, attr) -}} {% endfor %} + {% endif %} {% endfor %} {% if not full_request.flattenable %} -request = {{ module_name }}.{{ request_type }}( +request = {{ module_name }}.{{ request_type.ident.name }}( {% for parameter in full_request.request_list %} - {{ parameter.base }}={{ parameter.base if parameter.body else parameter.single }}, + {{ parameter.base }}={{ parameter.base if parameter.body else parameter.single.value }}, {% endfor %} ) {% endif %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 5296514a68e6..e270f0423ef5 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -50,7 +50,7 @@ def unit(session): "--cov-report=term", "--cov-fail-under=100", path.join("tests", "unit"), - ] + ] ), ) @@ -308,7 +308,7 @@ def snippetgen(session): session.run( "py.test", - "--quiet", + "-vv", "tests/snippetgen" ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py index e5c7daddc003..633f8b046707 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py @@ -34,7 +34,11 @@ async def sample_analyze_iam_policy(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) + analysis_query = asset_v1.IamPolicyAnalysisQuery() + analysis_query.scope = "scope_value" + request = asset_v1.AnalyzeIamPolicyRequest( + analysis_query=analysis_query, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py index 81abec24034e..f3a40e1568f4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py @@ -34,7 +34,15 @@ async def sample_analyze_iam_policy_longrunning(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) + analysis_query = asset_v1.IamPolicyAnalysisQuery() + analysis_query.scope = "scope_value" + + output_config = asset_v1.IamPolicyAnalysisOutputConfig() + output_config.gcs_destination.uri = "uri_value" + request = asset_v1.AnalyzeIamPolicyLongrunningRequest( + analysis_query=analysis_query, + output_config=output_config, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py index c59ff3936a9d..6bfec27de932 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py @@ -34,7 +34,15 @@ def sample_analyze_iam_policy_longrunning(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) + analysis_query = asset_v1.IamPolicyAnalysisQuery() + analysis_query.scope = "scope_value" + + output_config = asset_v1.IamPolicyAnalysisOutputConfig() + output_config.gcs_destination.uri = "uri_value" + request = asset_v1.AnalyzeIamPolicyLongrunningRequest( + analysis_query=analysis_query, + output_config=output_config, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py index 188cc12bca01..22017d9960ed 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py @@ -34,7 +34,11 @@ def sample_analyze_iam_policy(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) + analysis_query = asset_v1.IamPolicyAnalysisQuery() + analysis_query.scope = "scope_value" + request = asset_v1.AnalyzeIamPolicyRequest( + analysis_query=analysis_query, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py index 29da9244a8ff..f2f051da59e7 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py @@ -35,6 +35,7 @@ async def sample_batch_get_assets_history(): # Initialize request argument(s) request = asset_v1.BatchGetAssetsHistoryRequest( + parent="*", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py index 8fd8b83c10f6..ed2d78e9e695 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py @@ -35,6 +35,7 @@ def sample_batch_get_assets_history(): # Initialize request argument(s) request = asset_v1.BatchGetAssetsHistoryRequest( + parent="*", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py index e6a4c3b8dd24..eb4291971443 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py @@ -34,7 +34,13 @@ async def sample_create_feed(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) + feed = asset_v1.Feed() + feed.name = "name_value" + request = asset_v1.CreateFeedRequest( + parent="parent_value", + feed_id="feed_id_value", + feed=feed, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py index c41f2d928911..c5a31c080d1a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py @@ -34,7 +34,13 @@ def sample_create_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) + feed = asset_v1.Feed() + feed.name = "name_value" + request = asset_v1.CreateFeedRequest( + parent="parent_value", + feed_id="feed_id_value", + feed=feed, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py index 8e10aedf8d9c..793a1d28acec 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py @@ -35,6 +35,7 @@ async def sample_delete_feed(): # Initialize request argument(s) request = asset_v1.DeleteFeedRequest( + name="projects/{project}/feeds/{feed}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py index 6f28f8c5de1c..f12a42bd1efc 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py @@ -35,6 +35,7 @@ def sample_delete_feed(): # Initialize request argument(s) request = asset_v1.DeleteFeedRequest( + name="projects/{project}/feeds/{feed}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py index 776264c0ddd5..00c75331d986 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py @@ -34,7 +34,12 @@ async def sample_export_assets(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) + output_config = asset_v1.OutputConfig() + output_config.gcs_destination.uri = "uri_value" + request = asset_v1.ExportAssetsRequest( + parent="*", + output_config=output_config, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py index e274800452bb..e4548eed7d4c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py @@ -34,7 +34,12 @@ def sample_export_assets(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) + output_config = asset_v1.OutputConfig() + output_config.gcs_destination.uri = "uri_value" + request = asset_v1.ExportAssetsRequest( + parent="*", + output_config=output_config, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py index 3c10ab1e1835..c4078151b7d6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py @@ -35,6 +35,7 @@ async def sample_get_feed(): # Initialize request argument(s) request = asset_v1.GetFeedRequest( + name="projects/{project}/feeds/{feed}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py index 63dbf0c90ce9..cb53327cce8d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py @@ -35,6 +35,7 @@ def sample_get_feed(): # Initialize request argument(s) request = asset_v1.GetFeedRequest( + name="projects/{project}/feeds/{feed}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py index 3f79199a3359..ff87a684fe1f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py @@ -35,6 +35,7 @@ async def sample_list_assets(): # Initialize request argument(s) request = asset_v1.ListAssetsRequest( + parent="*", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py index 78dfb91b9e42..47ed9d149b6c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py @@ -35,6 +35,7 @@ def sample_list_assets(): # Initialize request argument(s) request = asset_v1.ListAssetsRequest( + parent="*", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py index a15ad4a11353..396b767010b4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py @@ -35,6 +35,7 @@ async def sample_list_feeds(): # Initialize request argument(s) request = asset_v1.ListFeedsRequest( + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py index 2ad102f795aa..816523c64bc9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py @@ -35,6 +35,7 @@ def sample_list_feeds(): # Initialize request argument(s) request = asset_v1.ListFeedsRequest( + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py index b99fc9ac643a..58fdd4b9bdf8 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py @@ -35,6 +35,7 @@ async def sample_search_all_iam_policies(): # Initialize request argument(s) request = asset_v1.SearchAllIamPoliciesRequest( + scope="scope_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py index e142ae2fcbb5..dbe303e7fbf5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py @@ -35,6 +35,7 @@ def sample_search_all_iam_policies(): # Initialize request argument(s) request = asset_v1.SearchAllIamPoliciesRequest( + scope="scope_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py index 2fc426361c37..f0021cafdc41 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py @@ -35,6 +35,7 @@ async def sample_search_all_resources(): # Initialize request argument(s) request = asset_v1.SearchAllResourcesRequest( + scope="scope_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py index 85d6799a352d..ac0b998a0607 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py @@ -35,6 +35,7 @@ def sample_search_all_resources(): # Initialize request argument(s) request = asset_v1.SearchAllResourcesRequest( + scope="scope_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py index 2386c5443df2..12dd103c4ba5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py @@ -34,7 +34,11 @@ async def sample_update_feed(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) + feed = asset_v1.Feed() + feed.name = "name_value" + request = asset_v1.UpdateFeedRequest( + feed=feed, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py index 4dc4915cf53c..3402c41d22bb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py @@ -34,7 +34,11 @@ def sample_update_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) + feed = asset_v1.Feed() + feed.name = "name_value" + request = asset_v1.UpdateFeedRequest( + feed=feed, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py index de3341a6901c..b08ce8febc83 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py @@ -35,6 +35,8 @@ async def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( + name="projects/{project}/serviceAccounts/{service_account}", + scope=['scope_value'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py index 24e4484bfcbf..357d62c459f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py @@ -35,6 +35,8 @@ def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( + name="projects/{project}/serviceAccounts/{service_account}", + scope=['scope_value'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py index 4417fcb6bbfe..d83bb8d93308 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py @@ -35,6 +35,8 @@ async def sample_generate_id_token(): # Initialize request argument(s) request = credentials_v1.GenerateIdTokenRequest( + name="projects/{project}/serviceAccounts/{service_account}", + audience="audience_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py index e23294e55977..da92452b3d09 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py @@ -35,6 +35,8 @@ def sample_generate_id_token(): # Initialize request argument(s) request = credentials_v1.GenerateIdTokenRequest( + name="projects/{project}/serviceAccounts/{service_account}", + audience="audience_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py index 3732426ff813..e71f7d3256fa 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py @@ -35,6 +35,8 @@ async def sample_sign_blob(): # Initialize request argument(s) request = credentials_v1.SignBlobRequest( + name="projects/{project}/serviceAccounts/{service_account}", + payload=b'payload_blob', ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py index 51312014ffda..40bd3ae56173 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py @@ -35,6 +35,8 @@ def sample_sign_blob(): # Initialize request argument(s) request = credentials_v1.SignBlobRequest( + name="projects/{project}/serviceAccounts/{service_account}", + payload=b'payload_blob', ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py index ce4303485a73..8ca30833076d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py @@ -35,6 +35,8 @@ async def sample_sign_jwt(): # Initialize request argument(s) request = credentials_v1.SignJwtRequest( + name="projects/{project}/serviceAccounts/{service_account}", + payload="payload_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py index 22b860091731..8dc778a594db 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py @@ -35,6 +35,8 @@ def sample_sign_jwt(): # Initialize request argument(s) request = credentials_v1.SignJwtRequest( + name="projects/{project}/serviceAccounts/{service_account}", + payload="payload_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py index 4cb0f540ec0b..9425509149e0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py @@ -35,6 +35,8 @@ async def sample_create_bucket(): # Initialize request argument(s) request = logging_v2.CreateBucketRequest( + parent="projects/{project}/locations/{location}/buckets/{bucket}", + bucket_id="bucket_id_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py index 004ee0a4865d..490adb5296cd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py @@ -35,6 +35,8 @@ def sample_create_bucket(): # Initialize request argument(s) request = logging_v2.CreateBucketRequest( + parent="projects/{project}/locations/{location}/buckets/{bucket}", + bucket_id="bucket_id_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py index 1d0523610fcc..c41f337710c6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py @@ -34,7 +34,13 @@ async def sample_create_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + request = logging_v2.CreateExclusionRequest( + parent="projects/{project}/exclusions/{exclusion}", + exclusion=exclusion, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py index ce102ad9916f..ac7e04733cac 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py @@ -34,7 +34,13 @@ def sample_create_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + request = logging_v2.CreateExclusionRequest( + parent="projects/{project}/exclusions/{exclusion}", + exclusion=exclusion, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py index 4678b4314fca..0affba0c4bab 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py @@ -34,7 +34,13 @@ async def sample_create_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + request = logging_v2.CreateSinkRequest( + parent="projects/{project}/sinks/{sink}", + sink=sink, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py index f47adb78d36a..8bc2129be0ea 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py @@ -34,7 +34,13 @@ def sample_create_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + request = logging_v2.CreateSinkRequest( + parent="projects/{project}/sinks/{sink}", + sink=sink, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py index 4fbcdd61a87e..a9ced73e6ac2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py @@ -35,6 +35,8 @@ async def sample_create_view(): # Initialize request argument(s) request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py index eefdb75b888f..bbb9033c5cd5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py @@ -35,6 +35,8 @@ def sample_create_view(): # Initialize request argument(s) request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py index 8ca8c4748e8f..ed6a9173a48a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py @@ -35,6 +35,7 @@ async def sample_delete_bucket(): # Initialize request argument(s) request = logging_v2.DeleteBucketRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py index 9616621b4e92..bf107ae157bf 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py @@ -35,6 +35,7 @@ def sample_delete_bucket(): # Initialize request argument(s) request = logging_v2.DeleteBucketRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py index 0e8df5d06c7d..73cf51dd59da 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py @@ -35,6 +35,7 @@ async def sample_delete_exclusion(): # Initialize request argument(s) request = logging_v2.DeleteExclusionRequest( + name="projects/{project}/exclusions/{exclusion}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py index 0268e93f28c7..d1ddf4aeccad 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py @@ -35,6 +35,7 @@ def sample_delete_exclusion(): # Initialize request argument(s) request = logging_v2.DeleteExclusionRequest( + name="projects/{project}/exclusions/{exclusion}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py index d1aaaf9ca5b3..fa0ea94ec535 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py @@ -35,6 +35,7 @@ async def sample_delete_sink(): # Initialize request argument(s) request = logging_v2.DeleteSinkRequest( + sink_name="projects/{project}/sinks/{sink}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py index 6bdc8b65dc93..c432aa6709b8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py @@ -35,6 +35,7 @@ def sample_delete_sink(): # Initialize request argument(s) request = logging_v2.DeleteSinkRequest( + sink_name="projects/{project}/sinks/{sink}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py index e9b1a1255ce4..ec9ce8c4ad45 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py @@ -35,6 +35,7 @@ async def sample_delete_view(): # Initialize request argument(s) request = logging_v2.DeleteViewRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}/views/{view}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py index a27f4604d361..f866fb6b3ae1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py @@ -35,6 +35,7 @@ def sample_delete_view(): # Initialize request argument(s) request = logging_v2.DeleteViewRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}/views/{view}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py index bf65579867c2..43ae4b510264 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py @@ -35,6 +35,7 @@ async def sample_get_bucket(): # Initialize request argument(s) request = logging_v2.GetBucketRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py index 80470bf3c4da..b729d8de740c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py @@ -35,6 +35,7 @@ def sample_get_bucket(): # Initialize request argument(s) request = logging_v2.GetBucketRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py index ece79ce221a4..bc085f29b579 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py @@ -35,6 +35,7 @@ async def sample_get_cmek_settings(): # Initialize request argument(s) request = logging_v2.GetCmekSettingsRequest( + name="projects/{project}/cmekSettings", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py index 19cef4493424..b838221d5b02 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py @@ -35,6 +35,7 @@ def sample_get_cmek_settings(): # Initialize request argument(s) request = logging_v2.GetCmekSettingsRequest( + name="projects/{project}/cmekSettings", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py index 9ce42101f1b4..d0766aa6a96c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py @@ -35,6 +35,7 @@ async def sample_get_exclusion(): # Initialize request argument(s) request = logging_v2.GetExclusionRequest( + name="projects/{project}/exclusions/{exclusion}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py index 7712065b80e7..4f025180ef9c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py @@ -35,6 +35,7 @@ def sample_get_exclusion(): # Initialize request argument(s) request = logging_v2.GetExclusionRequest( + name="projects/{project}/exclusions/{exclusion}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py index b1a69a1d6fd2..bc8889c48be2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py @@ -35,6 +35,7 @@ async def sample_get_sink(): # Initialize request argument(s) request = logging_v2.GetSinkRequest( + sink_name="projects/{project}/sinks/{sink}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py index e1d4ed0b813d..54360300ea26 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py @@ -35,6 +35,7 @@ def sample_get_sink(): # Initialize request argument(s) request = logging_v2.GetSinkRequest( + sink_name="projects/{project}/sinks/{sink}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py index 5594e7ac2335..c280eb79b361 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py @@ -35,6 +35,7 @@ async def sample_get_view(): # Initialize request argument(s) request = logging_v2.GetViewRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}/views/{view}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py index 4865050cd9e2..ca94c6088b20 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py @@ -35,6 +35,7 @@ def sample_get_view(): # Initialize request argument(s) request = logging_v2.GetViewRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}/views/{view}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py index ef7e8ba87adf..09849854319a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py @@ -35,6 +35,7 @@ async def sample_list_buckets(): # Initialize request argument(s) request = logging_v2.ListBucketsRequest( + parent="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py index 2c1ea9332de4..a6b0fc001ae1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py @@ -35,6 +35,7 @@ def sample_list_buckets(): # Initialize request argument(s) request = logging_v2.ListBucketsRequest( + parent="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py index c3d7f9165bbe..fa305a7f19fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py @@ -35,6 +35,7 @@ async def sample_list_exclusions(): # Initialize request argument(s) request = logging_v2.ListExclusionsRequest( + parent="projects/{project}/exclusions/{exclusion}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py index 255e4851e471..c47b051b0ea3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py @@ -35,6 +35,7 @@ def sample_list_exclusions(): # Initialize request argument(s) request = logging_v2.ListExclusionsRequest( + parent="projects/{project}/exclusions/{exclusion}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py index 98d31d25353d..0f44922bf669 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py @@ -35,6 +35,7 @@ async def sample_list_sinks(): # Initialize request argument(s) request = logging_v2.ListSinksRequest( + parent="projects/{project}/sinks/{sink}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py index d911ed8ed1f1..81dc1c7dbf4c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py @@ -35,6 +35,7 @@ def sample_list_sinks(): # Initialize request argument(s) request = logging_v2.ListSinksRequest( + parent="projects/{project}/sinks/{sink}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py index 7a24536f99e8..13ebc352954d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py @@ -35,6 +35,7 @@ async def sample_list_views(): # Initialize request argument(s) request = logging_v2.ListViewsRequest( + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py index b87a9315565c..a61e85641182 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py @@ -35,6 +35,7 @@ def sample_list_views(): # Initialize request argument(s) request = logging_v2.ListViewsRequest( + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py index c0a8f1efa448..badbebdd6a9a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py @@ -35,6 +35,7 @@ async def sample_undelete_bucket(): # Initialize request argument(s) request = logging_v2.UndeleteBucketRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py index 8a4968c9f734..783b187d4741 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py @@ -35,6 +35,7 @@ def sample_undelete_bucket(): # Initialize request argument(s) request = logging_v2.UndeleteBucketRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py index 423986e3fc24..4343a258b4fd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py @@ -35,6 +35,7 @@ async def sample_update_bucket(): # Initialize request argument(s) request = logging_v2.UpdateBucketRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py index 4b6a11c7174a..a2370594b32b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py @@ -35,6 +35,7 @@ def sample_update_bucket(): # Initialize request argument(s) request = logging_v2.UpdateBucketRequest( + name="projects/{project}/locations/{location}/buckets/{bucket}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py index 67a9e4f2bcd0..6c868ad0639d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py @@ -35,6 +35,7 @@ async def sample_update_cmek_settings(): # Initialize request argument(s) request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py index 568a350a7bc5..9b7f34e9d10a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py @@ -35,6 +35,7 @@ def sample_update_cmek_settings(): # Initialize request argument(s) request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py index 7f13ad361eb5..7ed53afeb727 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py @@ -34,7 +34,13 @@ async def sample_update_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + request = logging_v2.UpdateExclusionRequest( + name="projects/{project}/exclusions/{exclusion}", + exclusion=exclusion, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py index bc48f0654aee..6adacd542a2c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py @@ -34,7 +34,13 @@ def sample_update_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + request = logging_v2.UpdateExclusionRequest( + name="projects/{project}/exclusions/{exclusion}", + exclusion=exclusion, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py index a5d122924fba..03c6b96b2450 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py @@ -34,7 +34,13 @@ async def sample_update_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + request = logging_v2.UpdateSinkRequest( + sink_name="projects/{project}/sinks/{sink}", + sink=sink, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py index 773e4399859c..3bc9c59e336d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py @@ -34,7 +34,13 @@ def sample_update_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + request = logging_v2.UpdateSinkRequest( + sink_name="projects/{project}/sinks/{sink}", + sink=sink, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py index 0eda6b705cd2..e2a1f97e089d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py @@ -35,6 +35,7 @@ async def sample_update_view(): # Initialize request argument(s) request = logging_v2.UpdateViewRequest( + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py index bd1ad230f794..fb5811deaddd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py @@ -35,6 +35,7 @@ def sample_update_view(): # Initialize request argument(s) request = logging_v2.UpdateViewRequest( + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py index b317c8d26c9f..70b50994f543 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py @@ -35,6 +35,7 @@ async def sample_delete_log(): # Initialize request argument(s) request = logging_v2.DeleteLogRequest( + log_name="projects/{project}/logs/{log}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py index 0470d72af8a4..357ee19596d0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py @@ -35,6 +35,7 @@ def sample_delete_log(): # Initialize request argument(s) request = logging_v2.DeleteLogRequest( + log_name="projects/{project}/logs/{log}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py index 771ba15974c0..dc2f62c7d9b2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py @@ -35,6 +35,7 @@ async def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( + resource_names="projects/{project}/logs/{log}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py index 79aa53d23d75..c1a6455f6dc1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py @@ -35,6 +35,7 @@ def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( + resource_names="projects/{project}/logs/{log}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py index f9b2685a7dcb..a0e119a9266b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py @@ -35,6 +35,7 @@ async def sample_list_logs(): # Initialize request argument(s) request = logging_v2.ListLogsRequest( + parent="projects/{project}/logs/{log}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py index 2515bc936c15..13a18b0ebdbc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py @@ -35,6 +35,7 @@ def sample_list_logs(): # Initialize request argument(s) request = logging_v2.ListLogsRequest( + parent="projects/{project}/logs/{log}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py index 6ee5862d94ae..3202818c3f4a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py @@ -35,10 +35,11 @@ async def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value'], ) # Make the request - stream = await client.tail_log_entries([]) + stream = await client.tail_log_entries([resource_names=['resource_names_value']]) async for response in stream: print("{}".format(response)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py index 442a1f3cac2f..909a8e8aa2ce 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py @@ -35,10 +35,11 @@ def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value'], ) # Make the request - stream = client.tail_log_entries([]) + stream = client.tail_log_entries([resource_names=['resource_names_value']]) for response in stream: print("{}".format(response)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py index da4353446d40..1a6981c740d5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py @@ -34,7 +34,11 @@ async def sample_write_log_entries(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + request = logging_v2.WriteLogEntriesRequest( + entries=entries, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py index a74258a1deb0..fdce118737f4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py @@ -34,7 +34,11 @@ def sample_write_log_entries(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + request = logging_v2.WriteLogEntriesRequest( + entries=entries, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py index dc77adacf59c..02f97a044b13 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py @@ -34,7 +34,13 @@ async def sample_create_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + request = logging_v2.CreateLogMetricRequest( + parent="projects/{project}/metrics/{metric}", + metric=metric, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py index 61131f6bdce5..0f2c2824985d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py @@ -34,7 +34,13 @@ def sample_create_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + request = logging_v2.CreateLogMetricRequest( + parent="projects/{project}/metrics/{metric}", + metric=metric, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py index 939087d1912b..d0cc9d1c89b5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py @@ -35,6 +35,7 @@ async def sample_delete_log_metric(): # Initialize request argument(s) request = logging_v2.DeleteLogMetricRequest( + metric_name="projects/{project}/metrics/{metric}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py index 8a0451c31952..e82eff3ea790 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py @@ -35,6 +35,7 @@ def sample_delete_log_metric(): # Initialize request argument(s) request = logging_v2.DeleteLogMetricRequest( + metric_name="projects/{project}/metrics/{metric}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py index 64eb3f59f411..effabeb34f49 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py @@ -35,6 +35,7 @@ async def sample_get_log_metric(): # Initialize request argument(s) request = logging_v2.GetLogMetricRequest( + metric_name="projects/{project}/metrics/{metric}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py index 7115ee688b0d..d49f387ac18c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py @@ -35,6 +35,7 @@ def sample_get_log_metric(): # Initialize request argument(s) request = logging_v2.GetLogMetricRequest( + metric_name="projects/{project}/metrics/{metric}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py index 9acfc76ee897..23a495c16386 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py @@ -35,6 +35,7 @@ async def sample_list_log_metrics(): # Initialize request argument(s) request = logging_v2.ListLogMetricsRequest( + parent="projects/{project}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py index 702a2867113c..dcc0784eda3d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py @@ -35,6 +35,7 @@ def sample_list_log_metrics(): # Initialize request argument(s) request = logging_v2.ListLogMetricsRequest( + parent="projects/{project}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py index c25de0cd5fab..080789a8afeb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py @@ -34,7 +34,13 @@ async def sample_update_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + request = logging_v2.UpdateLogMetricRequest( + metric_name="projects/{project}/metrics/{metric}", + metric=metric, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py index 89167019acfa..1c5ce5af9d31 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py @@ -34,7 +34,13 @@ def sample_update_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + request = logging_v2.UpdateLogMetricRequest( + metric_name="projects/{project}/metrics/{metric}", + metric=metric, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py index dbfed3042be9..b3705e3af78f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py @@ -34,7 +34,15 @@ async def sample_create_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + request = redis_v1.CreateInstanceRequest( + parent="projects/{project}/locations/{location}", + instance_id="instance_id_value", + instance=instance, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py index 8b874ba3c965..16f1ed86af29 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py @@ -34,7 +34,15 @@ def sample_create_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + request = redis_v1.CreateInstanceRequest( + parent="projects/{project}/locations/{location}", + instance_id="instance_id_value", + instance=instance, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py index 2a706ace319a..58ad65f23451 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py @@ -35,6 +35,7 @@ async def sample_delete_instance(): # Initialize request argument(s) request = redis_v1.DeleteInstanceRequest( + name="projects/{project}/locations/{location}/instances/{instance}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py index f2bfcef90559..85ea9b2e7005 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py @@ -35,6 +35,7 @@ def sample_delete_instance(): # Initialize request argument(s) request = redis_v1.DeleteInstanceRequest( + name="projects/{project}/locations/{location}/instances/{instance}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py index 9cbe0ee55254..01a418c516c2 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py @@ -34,7 +34,12 @@ async def sample_export_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + output_config = redis_v1.OutputConfig() + output_config.gcs_destination.uri = "uri_value" + request = redis_v1.ExportInstanceRequest( + name="name_value", + output_config=output_config, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py index f6ec74e9e853..f00a794b5305 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py @@ -34,7 +34,12 @@ def sample_export_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + output_config = redis_v1.OutputConfig() + output_config.gcs_destination.uri = "uri_value" + request = redis_v1.ExportInstanceRequest( + name="name_value", + output_config=output_config, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py index 441dd31f6a12..23f4aa913509 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py @@ -35,6 +35,7 @@ async def sample_failover_instance(): # Initialize request argument(s) request = redis_v1.FailoverInstanceRequest( + name="projects/{project}/locations/{location}/instances/{instance}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py index f80743627e40..9de974244a48 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py @@ -35,6 +35,7 @@ def sample_failover_instance(): # Initialize request argument(s) request = redis_v1.FailoverInstanceRequest( + name="projects/{project}/locations/{location}/instances/{instance}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py index 390c4f9b1a4e..87273f1da476 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py @@ -35,6 +35,7 @@ async def sample_get_instance(): # Initialize request argument(s) request = redis_v1.GetInstanceRequest( + name="projects/{project}/locations/{location}/instances/{instance}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py index c7ee84f2316c..d72d199eb216 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py @@ -35,6 +35,7 @@ def sample_get_instance(): # Initialize request argument(s) request = redis_v1.GetInstanceRequest( + name="projects/{project}/locations/{location}/instances/{instance}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py index fff17dfa8457..4f5137291e6c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py @@ -34,7 +34,12 @@ async def sample_import_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + input_config = redis_v1.InputConfig() + input_config.gcs_source.uri = "uri_value" + request = redis_v1.ImportInstanceRequest( + name="name_value", + input_config=input_config, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py index 19feb1321519..3cbe06483880 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py @@ -34,7 +34,12 @@ def sample_import_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + input_config = redis_v1.InputConfig() + input_config.gcs_source.uri = "uri_value" + request = redis_v1.ImportInstanceRequest( + name="name_value", + input_config=input_config, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py index 33d0758f39af..9ef268347e97 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py @@ -35,6 +35,7 @@ async def sample_list_instances(): # Initialize request argument(s) request = redis_v1.ListInstancesRequest( + parent="projects/{project}/locations/{location}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py index df9296d34a20..5cbdd3818495 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py @@ -35,6 +35,7 @@ def sample_list_instances(): # Initialize request argument(s) request = redis_v1.ListInstancesRequest( + parent="projects/{project}/locations/{location}", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py index 3f6263abc066..cc2340794a68 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py @@ -34,7 +34,13 @@ async def sample_update_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + request = redis_v1.UpdateInstanceRequest( + instance=instance, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py index e363d28c117b..a98f88b579a6 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py @@ -34,7 +34,13 @@ def sample_update_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + request = redis_v1.UpdateInstanceRequest( + instance=instance, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py index 1ae6bc3a6a98..b170fb02d947 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py @@ -35,6 +35,8 @@ async def sample_upgrade_instance(): # Initialize request argument(s) request = redis_v1.UpgradeInstanceRequest( + name="projects/{project}/locations/{location}/instances/{instance}", + redis_version="redis_version_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py index 60759777e066..c1b6caf8b580 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py @@ -35,6 +35,8 @@ def sample_upgrade_instance(): # Initialize request argument(s) request = redis_v1.UpgradeInstanceRequest( + name="projects/{project}/locations/{location}/instances/{instance}", + redis_version="redis_version_value", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py index 72a2f6595054..6d6afcf99463 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py @@ -35,6 +35,7 @@ async def sample_list_resources(): # Initialize request argument(s) request = mollusca_v1.ListResourcesRequest( + parent="items/{item_id}/parts/{part_id}", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py index e7423eaf3d0d..9ca10fc35c17 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py @@ -35,6 +35,7 @@ def sample_list_resources(): # Initialize request argument(s) request = mollusca_v1.ListResourcesRequest( + parent="items/{item_id}/parts/{part_id}", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py index 26ab1b086cfb..d34f0a980d8f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py @@ -34,11 +34,12 @@ async def sample_method_bidi_streaming(): client = mollusca_v1.SnippetsAsyncClient() # Initialize request argument(s) - request = mollusca_v1.SignatureRequest( + request = mollusca_v1.SignatureRequestOneRequiredField( + my_string="my_string_value", ) # Make the request - stream = await client.method_bidi_streaming([]) + stream = await client.method_bidi_streaming([my_string="my_string_value"]) async for response in stream: print("{}".format(response)) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py index 239eeb77639f..daba69f1b6d9 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py @@ -34,11 +34,12 @@ def sample_method_bidi_streaming(): client = mollusca_v1.SnippetsClient() # Initialize request argument(s) - request = mollusca_v1.SignatureRequest( + request = mollusca_v1.SignatureRequestOneRequiredField( + my_string="my_string_value", ) # Make the request - stream = client.method_bidi_streaming([]) + stream = client.method_bidi_streaming([my_string="my_string_value"]) for response in stream: print("{}".format(response)) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py index 3327f9a2b427..61497f4a437e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py @@ -34,7 +34,16 @@ async def sample_method_lro_signatures(): client = mollusca_v1.SnippetsAsyncClient() # Initialize request argument(s) + my_message = mollusca_v1.MessageWithNesting() + my_message.message.required_string = "required_string_value" + my_message.my_int = 656 + request = mollusca_v1.SignatureRequest( + my_string="my_string_value", + my_int=656, + my_bool=True, + my_message=my_message, + single_enum="DEFAULT", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py index af22fb412539..ea51e76e12b5 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py @@ -34,7 +34,16 @@ def sample_method_lro_signatures(): client = mollusca_v1.SnippetsClient() # Initialize request argument(s) + my_message = mollusca_v1.MessageWithNesting() + my_message.message.required_string = "required_string_value" + my_message.my_int = 656 + request = mollusca_v1.SignatureRequest( + my_string="my_string_value", + my_int=656, + my_bool=True, + my_message=my_message, + single_enum="DEFAULT", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py index b8f462957747..f7351c4b369e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py @@ -34,7 +34,16 @@ async def sample_method_one_signature(): client = mollusca_v1.SnippetsAsyncClient() # Initialize request argument(s) + my_message = mollusca_v1.MessageWithNesting() + my_message.message.required_string = "required_string_value" + my_message.my_int = 656 + request = mollusca_v1.SignatureRequest( + my_string="my_string_value", + my_int=656, + my_bool=True, + my_message=my_message, + single_enum="DEFAULT", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py index e8e438f169a8..dcb1a81f0e1e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py @@ -34,7 +34,16 @@ def sample_method_one_signature(): client = mollusca_v1.SnippetsClient() # Initialize request argument(s) + my_message = mollusca_v1.MessageWithNesting() + my_message.message.required_string = "required_string_value" + my_message.my_int = 656 + request = mollusca_v1.SignatureRequest( + my_string="my_string_value", + my_int=656, + my_bool=True, + my_message=my_message, + single_enum="DEFAULT", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py index 753c7666e5b8..785c22899a9a 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py @@ -34,7 +34,16 @@ async def sample_method_server_streaming(): client = mollusca_v1.SnippetsAsyncClient() # Initialize request argument(s) + my_message = mollusca_v1.MessageWithNesting() + my_message.message.required_string = "required_string_value" + my_message.my_int = 656 + request = mollusca_v1.SignatureRequest( + my_string="my_string_value", + my_int=656, + my_bool=True, + my_message=my_message, + single_enum="DEFAULT", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py index 339623a2d60f..5894bff3e943 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py @@ -34,7 +34,16 @@ def sample_method_server_streaming(): client = mollusca_v1.SnippetsClient() # Initialize request argument(s) + my_message = mollusca_v1.MessageWithNesting() + my_message.message.required_string = "required_string_value" + my_message.my_int = 656 + request = mollusca_v1.SignatureRequest( + my_string="my_string_value", + my_int=656, + my_bool=True, + my_message=my_message, + single_enum="DEFAULT", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py new file mode 100644 index 000000000000..fee612e387fa --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for OneOfMethod +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async] +from animalia import mollusca_v1 + + +async def sample_one_of_method(): + """Snippet for one_of_method""" + + # Create a client + client = mollusca_v1.SnippetsAsyncClient() + + # Initialize request argument(s) + request = mollusca_v1.OneOfRequest( + my_string="my_string_value", + non_one_of_string="non_one_of_string_value", + ) + + # Make the request + response = await client.one_of_method(request=request) + + # Handle response + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py new file mode 100644 index 000000000000..71efe54950ba --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for OneOfMethod +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync] +from animalia import mollusca_v1 + + +def sample_one_of_method(): + """Snippet for one_of_method""" + + # Create a client + client = mollusca_v1.SnippetsClient() + + # Initialize request argument(s) + request = mollusca_v1.OneOfRequest( + my_string="my_string_value", + non_one_of_string="non_one_of_string_value", + ) + + # Make the request + response = client.one_of_method(request=request) + + # Handle response + print("{}".format(response)) + +# [END mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync] diff --git a/packages/gapic-generator/tests/snippetgen/snippets.proto b/packages/gapic-generator/tests/snippetgen/snippets.proto index 6aaa404bcf9d..d5bbb9f78b32 100644 --- a/packages/gapic-generator/tests/snippetgen/snippets.proto +++ b/packages/gapic-generator/tests/snippetgen/snippets.proto @@ -29,11 +29,11 @@ service Snippets { option (google.api.default_host) = "mollusca.example.com"; rpc MethodOneSignature(SignatureRequest) returns(Response) { - option (google.api.method_signature) = "a_string,an_int,a_bool"; + option (google.api.method_signature) = "my_string,my_int,my_bool"; } rpc MethodLroSignatures(SignatureRequest) returns(google.longrunning.Operation) { - option (google.api.method_signature) = "a_string,an_int,a_bool"; + option (google.api.method_signature) = "my_string,my_int,my_bool"; option (google.longrunning.operation_info) = { response_type: "LroResponse" metadata_type: "LroMetadata" @@ -48,18 +48,24 @@ service Snippets { } rpc MethodServerStreaming(SignatureRequest) returns(stream Response) { - option (google.api.method_signature) = "a_string,a_bool"; + option (google.api.method_signature) = "my_string,my_int,my_bool"; option (google.api.method_signature) = ""; } - rpc MethodBidiStreaming(stream SignatureRequest) returns (stream Response); + rpc MethodBidiStreaming(stream SignatureRequestOneRequiredField) returns (stream Response); + + rpc OneOfMethod(OneOfRequest) returns (Response); +} + +enum Enum { + DEFAULT = 0; // First value must be 0 and is the default } message ListResourcesRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "snippets.example.com/Resource" + type: "snippets.example.com/Resource" }]; int32 page_size = 2; @@ -85,16 +91,35 @@ message Resource { pattern: "items/{item_id}/parts/{part_id}" }; string name = 1; + + +} + +message MessageWithNesting { + message NestedMessage { + string required_string = 1 [(google.api.field_behavior) = REQUIRED]; + string optional_string = 2; + } + + NestedMessage message = 1 [(google.api.field_behavior) = REQUIRED]; + int32 my_int = 2 [(google.api.field_behavior) = REQUIRED]; } +message SignatureRequestOneRequiredField { + string my_string = 1 [(google.api.field_behavior) = REQUIRED]; +} + message SignatureRequest { - string a_string = 1; - int32 an_int = 2; - bool a_bool = 3; + string my_string = 1 [(google.api.field_behavior) = REQUIRED]; + int32 my_int = 2 [(google.api.field_behavior) = REQUIRED]; + bool my_bool = 3 [(google.api.field_behavior) = REQUIRED]; map map_int_string = 4; + MessageWithNesting my_message = 5 [(google.api.field_behavior) = REQUIRED]; + Enum single_enum = 6 [(google.api.field_behavior) = REQUIRED]; } + message Response { } @@ -104,3 +129,10 @@ message LroResponse { message LroMetadata { } +message OneOfRequest { + string non_one_of_string = 1 [(google.api.field_behavior) = REQUIRED]; + oneof my_one_of { + string my_string = 2; + int32 my_number = 3; + } +} diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index 0e9c8129d143..c0e3fc83e467 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -39,22 +39,56 @@ class DummyMethod: DummyIdent = namedtuple("DummyIdent", ["name"]) -DummyMessage = namedtuple( - "DummyMessage", ["fields", "type", "options", "ident"]) -DummyMessage.__new__.__defaults__ = (False,) * len(DummyMessage._fields) +DummyMessageTypePB = namedtuple("DummyMessageTypePB", ["name"]) -DummyField = namedtuple("DummyField", +# DummyMessageBase = namedtuple( +# "DummyMessage", ["fields", "type", "options", "ident",]) +# DummyMessageBase.__new__.__defaults__ = (False,) * len(DummyMessageBase._fields) + + +DummyFieldBase = namedtuple("DummyField", ["message", "enum", "name", "repeated", + "required", + "resource_reference", + "oneof", "field_pb", "meta", "is_primitive", "type"]) -DummyField.__new__.__defaults__ = (False,) * len(DummyField._fields) +DummyFieldBase.__new__.__defaults__ = (False,) * len(DummyFieldBase._fields) + + +class DummyField(DummyFieldBase): + @property + def mock_value_original_type(self): + return "mock_value" + + +class DummyMessage: + def __init__(self, *, fields={}, type="", options=False, ident=False, resource_path=False): + self.fields = fields + self.type = type + self.options = options + self.ident = ident + self.resource_path = resource_path + + def get_field(self, field_name: str): + return self.fields[field_name] + + def oneof_fields(self): + return dict((field.oneof, field) for field in self.fields.values() if field.oneof) + + @property + def required_fields(self): + return [field for field in self.fields.values() if field.required] + -DummyService = namedtuple("DummyService", ["methods", "client_name"]) +DummyService = namedtuple("DummyService", [ + "methods", "client_name", "async_client_name", "resource_messages_dict"]) +DummyService.__new__.__defaults__ = (False,) * len(DummyService._fields) DummyApiSchema = namedtuple("DummyApiSchema", ["services", "naming", "messages"]) @@ -76,7 +110,8 @@ def message_factory(exp: str, # used to describe the field and type hierarchy, # e.g. "mollusc.cephalopod.coleoid" toks = exp.split(".") - messages = [DummyMessage({}, tok.upper() + "_TYPE") for tok in toks] + messages = [DummyMessage(fields={}, type=tok.upper() + "_TYPE") + for tok in toks] if enum: messages[-1] = enum diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index 5310595732e3..b1fdedea4133 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -34,13 +34,14 @@ def sample_classify(video, location): client = molluscclient.MolluscServiceClient() # Initialize request argument(s) - classify_target = {} + classify_target = molluscclient.ClassifyTarget() + # video = "path/to/mollusc/video.mkv" with open(video, "rb") as f: - classify_target["video"] = f.read() + classify_target.video = f.read() # location = "New Zealand" - classify_target["location_annotation"] = location + classify_target.location_annotation = location request = molluscclient.molluscs.v1.ClassifyRequest( classify_target=classify_target, diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index 5aa99485ad45..59fde511d214 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -34,13 +34,14 @@ async def sample_classify(video, location): client = molluscclient.MolluscServiceAsyncClient() # Initialize request argument(s) - classify_target = {} + classify_target = molluscclient.ClassifyTarget() + # video = "path/to/mollusc/video.mkv" with open(video, "rb") as f: - classify_target["video"] = f.read() + classify_target.video = f.read() # location = "New Zealand" - classify_target["location_annotation"] = location + classify_target.location_annotation = location request = molluscclient.molluscs.v1.ClassifyRequest( classify_target=classify_target, diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index 5310595732e3..b1fdedea4133 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -34,13 +34,14 @@ def sample_classify(video, location): client = molluscclient.MolluscServiceClient() # Initialize request argument(s) - classify_target = {} + classify_target = molluscclient.ClassifyTarget() + # video = "path/to/mollusc/video.mkv" with open(video, "rb") as f: - classify_target["video"] = f.read() + classify_target.video = f.read() # location = "New Zealand" - classify_target["location_annotation"] = location + classify_target.location_annotation = location request = molluscclient.molluscs.v1.ClassifyRequest( classify_target=classify_target, diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py index 47fe2452909b..a6e7d48b6e98 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -34,13 +34,14 @@ def sample_classify(video, location): client = molluscclient.MolluscServiceClient() # Initialize request argument(s) - classify_target = {} + classify_target = molluscclient.ClassifyTarget() + # video = "path/to/mollusc/video.mkv" with open(video, "rb") as f: - classify_target["video"] = f.read() + classify_target.video = f.read() # location = "New Zealand" - classify_target["location_annotation"] = location + classify_target.location_annotation = location request = molluscclient.molluscs.v1.ClassifyRequest( classify_target=classify_target, diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 8b8883014737..95b7e74f53dd 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -24,7 +24,8 @@ from gapic.schema import (naming, wrappers) from tests.unit.samplegen.common_types import (DummyField, DummyMessage, - DummyMethod, DummyService, DummyIdent, + + DummyMessageTypePB, DummyMethod, DummyService, DummyIdent, DummyApiSchema, DummyNaming, enum_factory, message_factory) from collections import namedtuple @@ -62,13 +63,16 @@ def test_generate_sample_basic(): type="REQUEST TYPE", fields={ "classify_target": DummyField( + type=DummyMessageTypePB(name="ClassifyTarget"), message=DummyMessage( type="CLASSIFY TYPE", fields={ "video": DummyField( + type=DummyMessageTypePB(name="Video"), message=DummyMessage(type="VIDEO TYPE"), ), "location_annotation": DummyField( + type=DummyMessageTypePB(name="Location"), message=DummyMessage(type="LOCATION TYPE"), ) }, @@ -134,13 +138,16 @@ def test_generate_sample_basic_async(): type="REQUEST TYPE", fields={ "classify_target": DummyField( + type=DummyMessageTypePB(name="ClassifyTarget"), message=DummyMessage( - type="CLASSIFY TYPE", + type=DummyMessageTypePB(name="CLASSIFY TYPE"), fields={ "video": DummyField( + type=DummyMessageTypePB(name="Video"), message=DummyMessage(type="VIDEO TYPE"), ), "location_annotation": DummyField( + type=DummyMessageTypePB(name="Location"), message=DummyMessage(type="LOCATION TYPE"), ) }, @@ -206,13 +213,15 @@ def test_generate_sample_basic_unflattenable(): type="REQUEST TYPE", fields={ "classify_target": DummyField( + type=DummyMessageTypePB(name="ClassifyTarget"), message=DummyMessage( - type="CLASSIFY TYPE", fields={ "video": DummyField( + type=DummyMessageTypePB(name="Video"), message=DummyMessage(type="VIDEO TYPE"), ), "location_annotation": DummyField( + type=DummyMessageTypePB(name="Location"), message=DummyMessage(type="LOCATION TYPE"), ) }, @@ -269,13 +278,15 @@ def test_generate_sample_void_method(): type="REQUEST TYPE", fields={ "classify_target": DummyField( + type=DummyMessageTypePB(name="ClassifyTarget"), message=DummyMessage( - type="CLASSIFY TYPE", fields={ "video": DummyField( + type=DummyMessageTypePB(name="Video"), message=DummyMessage(type="VIDEO TYPE"), ), "location_annotation": DummyField( + type=DummyMessageTypePB(name="Location"), message=DummyMessage(type="LOCATION TYPE"), ) }, diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 73798ce842ee..e9c9913d0f9a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -29,7 +29,7 @@ import gapic.schema.wrappers as wrappers from gapic.utils import Options -from common_types import (DummyApiSchema, DummyField, DummyIdent, DummyNaming, DummyMessage, +from common_types import (DummyApiSchema, DummyField, DummyIdent, DummyNaming, DummyMessage, DummyMessageTypePB, DummyService, DummyMethod, message_factory, enum_factory) from gapic.samplegen_utils import utils @@ -85,15 +85,25 @@ def test_define_redefinition(): def test_preprocess_sample(): # Verify that the default response is added. sample = {"service": "Mollusc", "rpc": "Classify"} + + classify_request_message = DummyMessage( + fields={ + "parent": DummyField(is_primitive=True, type=str, required=True, name="parent"), + }, + type=DummyMessageTypePB(name="ClassifyRequest"), + ident=DummyIdent(name="ClassifyRequest") + ) + api_schema = DummyApiSchema( services={"Mollusc": DummyService( - methods={}, client_name="MolluscClient")}, + methods={}, client_name="MolluscClient", + resource_messages_dict={})}, naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + messages=classify_request_message ) - rpc = DummyMethod(input=DummyMessage( - ident=DummyIdent(name="ClassifyRequest"))) + rpc = DummyMethod(input=classify_request_message) samplegen.Validator.preprocess_sample(sample, api_schema, rpc) @@ -113,7 +123,149 @@ def test_preprocess_sample(): assert client_name == "MolluscClient" request_type = sample.get("request_type") - assert request_type == "ClassifyRequest" + assert request_type.ident.name == "ClassifyRequest" + + # assert mock request is created + assert sample["request"] == [ + { + "field": "parent", + "value": "mock_value" + } + ] + + +def test_preprocess_sample_resource_message_field(): + # Verify that the default response is added. + sample = {"service": "Mollusc", "rpc": "Classify"} + + classify_request_message = DummyMessage( + fields={ + "parent": DummyField(is_primitive=True, type=str, required=True, name="parent", resource_reference="parent"), + }, + type=DummyMessageTypePB(name="ClassifyRequest"), + ident=DummyIdent(name="ClassifyRequest") + ) + + api_schema = DummyApiSchema( + services={"Mollusc": DummyService( + methods={}, client_name="MolluscClient", + resource_messages_dict={"parent": DummyMessage( + resource_path="projects/{project}")} + )}, + naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + messages=classify_request_message, + + ) + + rpc = DummyMethod(input=classify_request_message) + + samplegen.Validator.preprocess_sample(sample, api_schema, rpc) + + # assert mock request is created + assert sample["request"] == [ + { + "field": "parent", + "value": "projects/{project}" + } + ] + + +def test_preprocess_sample_with_enum_field(): + # Verify that the default response is added. + sample = {"service": "Mollusc", "rpc": "Classify"} + + classify_request_message = DummyMessage( + fields={ + "type": DummyField( + name="type", + required=True, + type=enum_factory("type", ["TYPE_1", "TYPE_2"]), + enum=enum_factory("type", ["TYPE_1", "TYPE_2"]) + ) + }, + type=DummyMessageTypePB(name="ClassifyRequest"), + ident=DummyIdent(name="ClassifyRequest") + ) + + api_schema = DummyApiSchema( + services={"Mollusc": DummyService( + methods={}, client_name="MolluscClient", + resource_messages_dict={})}, + naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + messages=classify_request_message + ) + + rpc = DummyMethod(input=classify_request_message) + + samplegen.Validator.preprocess_sample(sample, api_schema, rpc) + + response = sample.get("response") + assert response == [{"print": ["%s", "$resp"]}] + + package_name = sample.get("package_name") + assert package_name == "mollusc-cephalopod-teuthida-" + + module_name = sample.get("module_name") + assert module_name == "teuthida_v1" + + module_namespace = sample.get("module_namespace") + assert module_namespace == "mollusc.cephalopod" + + client_name = sample.get("client_name") + assert client_name == "MolluscClient" + + request_type = sample.get("request_type") + assert request_type.ident.name == "ClassifyRequest" + + # assert mock request is created + assert sample["request"] == [ + { + "field": "type", + "value": "TYPE_2" + } + ] + + +def test_preprocess_sample_nested_message_field(): + # Verify that the default response is added. + sample = {"service": "Mollusc", "rpc": "Classify"} + + classify_request_message = DummyMessage( + fields={ + "config": DummyField(name="config", is_primitive=False, required=True, oneof=False, type=DummyMessage( + fields={"name": DummyField( + is_primitive=True, type=str, name="name", required=True, oneof=False)}, + )) + }, + type=DummyMessageTypePB(name="ClassifyRequest"), + ident=DummyIdent(name="ClassifyRequest") + ) + + api_schema = DummyApiSchema( + services={"Mollusc": DummyService( + methods={}, client_name="MolluscClient", + resource_messages_dict={} + )}, + naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + messages=classify_request_message, + + ) + + rpc = DummyMethod(input=classify_request_message) + + samplegen.Validator.preprocess_sample(sample, api_schema, rpc) + + # assert mock request is created + assert sample["request"] == [ + { + "field": "config.name", + "value": "mock_value" + }, + + ] def test_preprocess_sample_void_method(): @@ -1919,7 +2071,6 @@ def test_generate_sample_spec_basic(): "sample_type": "standalone", "rpc": "Ramshorn", "transport": "grpc", - "request": [], "service": "animalia.mollusca.v1.Squid", "region_tag": "example_generated_mollusca_v1_Squid_Ramshorn_sync", "description": "Snippet for ramshorn" @@ -1929,7 +2080,6 @@ def test_generate_sample_spec_basic(): "sample_type": "standalone", "rpc": "Ramshorn", "transport": "grpc-async", - "request": [], "service": "animalia.mollusca.v1.Squid", "region_tag": "example_generated_mollusca_v1_Squid_Ramshorn_async", "description": "Snippet for ramshorn" diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index edd7e3b0fe77..edf59a925f2a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -64,7 +64,7 @@ def test_render_attr_value(): {{ frags.render_request_attr("mollusc", request) }} ''', ''' - mollusc["order"] = Molluscs.Cephalopoda.Coleoidea + mollusc.order = Molluscs.Cephalopoda.Coleoidea ''', request=samplegen.AttributeRequestSetup( field="order", @@ -80,8 +80,9 @@ def test_render_attr_input_parameter(): {{ frags.render_request_attr("squid", request) }} ''', ''' + # species = 'Humboldt' - squid["species"] = species + squid.species = species ''', request=samplegen.AttributeRequestSetup(field="species", value="'Humboldt'", @@ -95,9 +96,10 @@ def test_render_attr_file(): {{ frags.render_request_attr("classify_mollusc_request", request) }} ''', ''' + # mollusc_video_path = 'path/to/mollusc/video.mkv' with open(mollusc_video_path, "rb") as f: - classify_mollusc_request["mollusc_video"] = f.read() + classify_mollusc_request.mollusc_video = f.read() ''', request=samplegen.AttributeRequestSetup(field="mollusc_video", value="'path/to/mollusc/video.mkv'", @@ -110,29 +112,29 @@ def test_render_request_basic(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_request_setup(request) }} + {{ frags.render_request_setup(request, module_name, request_type) }} ''', ''' # Initialize request argument(s) - cephalopod = {} + cephalopod = mollusca.Cephalopod() + # cephalopod_mass = '10 kg' - cephalopod["mantle_mass"] = cephalopod_mass + cephalopod.mantle_mass = cephalopod_mass # photo_path = 'path/to/cephalopod/photo.jpg' with open(photo_path, "rb") as f: - cephalopod["photo"] = f.read() + cephalopod.photo = f.read() + cephalopod.order = Molluscs.Cephalopoda.Coleoidea - cephalopod["order"] = Molluscs.Cephalopoda.Coleoidea + gastropod = mollusca.Gastropod() - gastropod = {} # gastropod_mass = '1 kg' - gastropod["mantle_mass"] = gastropod_mass - - gastropod["order"] = Molluscs.Gastropoda.Pulmonata + gastropod.mantle_mass = gastropod_mass + gastropod.order = Molluscs.Gastropoda.Pulmonata # movie_path = 'path/to/gastropod/movie.mkv' with open(movie_path, "rb") as f: - gastropod["movie"] = f.read() + gastropod.movie = f.read() ''', request=samplegen.FullRequest( @@ -176,6 +178,19 @@ def test_render_request_basic(): single=None), ], flattenable=True, + ), + module_name="mollusca", + request_type=common_types.DummyMessage( + fields={ + "cephalopod": common_types.DummyField( + name="cephalopod", + type=common_types.DummyMessageTypePB(name="Cephalopod") + ), + "gastropod": common_types.DummyField( + name="gastropod", + type=common_types.DummyMessageTypePB(name="Gastropod") + ) + } ) ) @@ -184,29 +199,29 @@ def test_render_request_unflattened(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_request_setup(request, "mollusca", "CreateMolluscRequest") }} + {{ frags.render_request_setup(request, module_name, request_type) }} ''', ''' # Initialize request argument(s) - cephalopod = {} + cephalopod = mollusca.Cephalopod() + # cephalopod_mass = '10 kg' - cephalopod["mantle_mass"] = cephalopod_mass + cephalopod.mantle_mass = cephalopod_mass # photo_path = 'path/to/cephalopod/photo.jpg' with open(photo_path, "rb") as f: - cephalopod["photo"] = f.read() + cephalopod.photo = f.read() + cephalopod.order = Molluscs.Cephalopoda.Coleoidea - cephalopod["order"] = Molluscs.Cephalopoda.Coleoidea + gastropod = mollusca.Gastropod() - gastropod = {} # gastropod_mass = '1 kg' - gastropod["mantle_mass"] = gastropod_mass - - gastropod["order"] = Molluscs.Gastropoda.Pulmonata + gastropod.mantle_mass = gastropod_mass + gastropod.order = Molluscs.Gastropoda.Pulmonata # movie_path = 'path/to/gastropod/movie.mkv' with open(movie_path, "rb") as f: - gastropod["movie"] = f.read() + gastropod.movie = f.read() request = mollusca.CreateMolluscRequest( cephalopod=cephalopod, @@ -255,11 +270,25 @@ def test_render_request_unflattened(): single=None), samplegen.TransformedRequest(base="bivalve", body=None, - single='"humboldt"'), + single=samplegen.AttributeRequestSetup( + value='"humboldt"', + ),), ] ), - api=common_types.DummyApiSchema(), - + module_name="mollusca", + request_type=common_types.DummyMessage( + fields={ + "cephalopod": common_types.DummyField( + name="cephalopod", + type=common_types.DummyMessageTypePB(name="Cephalopod") + ), + "gastropod": common_types.DummyField( + name="gastropod", + type=common_types.DummyMessageTypePB(name="Gastropod") + ) + }, + ident=common_types.DummyIdent(name="CreateMolluscRequest") + ) ) @@ -915,6 +944,35 @@ def test_render_method_call_basic_async(): ) +def test_render_method_call_basic_async(): + check_template( + ''' + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + calling_form, calling_form_enum, transport) }} + ''', + ''' + await client.categorize_mollusc(request=request) + ''', + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", + body=True, + single=None), + samplegen.TransformedRequest(base="audio", + body=True, + single=None), + samplegen.TransformedRequest(base="guess", + body=True, + single=None) + ], + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.Request, + transport="grpc-async" + ) + + def test_render_method_call_basic_flattenable(): check_template( ''' diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 99f2edc9f72d..2fa2c09783a2 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -17,6 +17,7 @@ import pytest from google.api import field_behavior_pb2 +from google.api import resource_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import api @@ -108,6 +109,13 @@ def test_ident_sphinx_repeated(): assert field.ident.sphinx == 'Sequence[bool]' +def test_resource_reference(): + field = make_field(type='TYPE_STRING') + field.options.Extensions[resource_pb2.resource_reference].type = "translate.googleapis.com/Glossary" + + assert field.resource_reference == "translate.googleapis.com/Glossary" + + def test_type_primitives(): assert make_field(type='TYPE_FLOAT').type.python_type == float assert make_field(type='TYPE_INT64').type.python_type == int @@ -154,6 +162,11 @@ def test_mock_value_int(): assert field.mock_value == '728' +def test_mock_value_original_type_int(): + field = make_field(name='foo_bar', type='TYPE_INT32') + assert field.mock_value_original_type == 728 + + def test_oneof(): REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') @@ -166,26 +179,51 @@ def test_mock_value_float(): assert field.mock_value == '0.728' +def test_mock_value_original_type_float(): + field = make_field(name='foo_bar', type='TYPE_DOUBLE') + assert field.mock_value_original_type == 0.728 + + def test_mock_value_bool(): field = make_field(name='foo_bar', type='TYPE_BOOL') assert field.mock_value == 'True' +def test_mock_value_original_type_bool(): + field = make_field(name='foo_bar', type='TYPE_BOOL') + assert field.mock_value_original_type == True + + def test_mock_value_str(): field = make_field(name='foo_bar', type='TYPE_STRING') assert field.mock_value == "'foo_bar_value'" +def test_mock_value_original_type_str(): + field = make_field(name='foo_bar', type='TYPE_STRING') + assert field.mock_value_original_type == "foo_bar_value" + + def test_mock_value_bytes(): field = make_field(name='foo_bar', type='TYPE_BYTES') assert field.mock_value == "b'foo_bar_blob'" +def test_mock_value_original_type_bytes(): + field = make_field(name='foo_bar', type='TYPE_BYTES') + assert field.mock_value_original_type == b"foo_bar_blob" + + def test_mock_value_repeated(): field = make_field(name='foo_bar', type='TYPE_STRING', label=3) assert field.mock_value == "['foo_bar_value']" +def test_mock_value_original_type_repeated(): + field = make_field(name='foo_bar', type='TYPE_STRING', label=3) + assert field.mock_value_original_type == ["foo_bar_value"] + + def test_mock_value_map(): entry_msg = make_message( name='SquidEntry', @@ -251,6 +289,33 @@ def test_mock_value_message(): assert field.mock_value == 'bogus.Message(foo=324)' +def test_mock_value_original_type_message_errors(): + subfields = collections.OrderedDict(( + ('foo', make_field(name='foo', type='TYPE_INT32')), + ('bar', make_field(name='bar', type='TYPE_STRING')) + )) + message = wrappers.MessageType( + fields=subfields, + message_pb=descriptor_pb2.DescriptorProto(name='Message', field=[ + i.field_pb for i in subfields.values() + ]), + meta=metadata.Metadata(address=metadata.Address( + module='bogus', + name='Message', + )), + nested_enums={}, + nested_messages={}, + ) + field = make_field( + type='TYPE_MESSAGE', + type_name='bogus.Message', + message=message, + ) + + with pytest.raises(TypeError): + mock = field.mock_value_original_type + + def test_mock_value_recursive(): # The elaborate setup is an unfortunate requirement. file_pb = descriptor_pb2.FileDescriptorProto( @@ -290,3 +355,7 @@ def test_field_name_kword_disambiguation(): name="frum", ) assert frum_field.name == "frum" + + +def test_field_resource_reference(): + field = make_field(name='parent', type='TYPE_STRING') diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 3de69d3e5498..f0b5d611663a 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -18,6 +18,7 @@ import pytest +from google.api import field_behavior_pb2 from google.api import resource_pb2 from google.protobuf import descriptor_pb2 @@ -275,3 +276,34 @@ def test_oneof_fields(): "mass": [mass_kg, mass_lbs], "length": [length_m, length_f], } + assert actual_oneofs == expected_oneofs + + +def test_required_fields(): + REQUIRED = field_behavior_pb2.FieldBehavior.Value('REQUIRED') + + mass_kg = make_field(name="mass_kg", type=5) + mass_kg.options.Extensions[field_behavior_pb2.field_behavior].append( + REQUIRED + ) + + length_m = make_field(name="length_m", type=5) + length_m.options.Extensions[field_behavior_pb2.field_behavior].append( + REQUIRED + ) + + color = make_field(name="color", type=5) + color.options.Extensions[field_behavior_pb2.field_behavior].append( + REQUIRED + ) + + request = make_message( + name="CreateMolluscReuqest", + fields=( + mass_kg, + length_m, + color, + ), + ) + + assert set(request.required_fields) == {mass_kg, length_m, color} diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 9c47797dafc3..f93ddc814e8a 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -41,8 +41,10 @@ def make_resource_opts(*args): # Resources are labeled via an options extension opts = descriptor_pb2.MessageOptions() opts.Extensions[resource_pb2.resource].pattern.append( - "/".join("{{{arg}}}/{arg}" for arg in args) + "/".join(f"{arg}/{{{arg}}}" for arg in args) ) + opts.Extensions[resource_pb2.resource].type = "/".join( + f"{arg}/{{{arg}}}" for arg in args) return opts @@ -249,6 +251,87 @@ def test_resource_messages(): assert expected == actual +def test_resource_messages_dict(): + # Regular, top level resource + squid_resource = make_message("Squid", options=make_resource_opts("squid")) + squid_request = make_message( + "CreateSquid", + fields=( + make_field('squid', message=squid_resource), + ), + ) + + # Nested resource + squamosa_message = make_message( + "Squamosa", + options=make_resource_opts("clam", "squamosa"), + ) + clam_resource = make_message( + "Clam", + options=make_resource_opts("clam"), + fields=( + make_field('squamosa', message=squamosa_message), + ), + ) + clam_request = make_message( + 'CreateClam', + fields=( + make_field('clam', message=clam_resource), + # Red herring, not resources :) + make_field('zone', 2, enum=make_enum('Zone')), + make_field('pearls', 3, True, message=make_message('Pearl')), + ), + ) + + # Some special APIs have request messages that _are_ resources. + whelk_resource = make_message("Whelk", options=make_resource_opts("whelk")) + + # Not a resource + octopus_request = make_message( + "CreateOctopus", + fields=( + make_field('Octopus', message=make_message('Octopus')), + ), + ) + + service = make_service( + 'Molluscs', + methods=( + make_method( + f"{message.name}", + input_message=message, + ) + for message in ( + squid_request, + clam_request, + whelk_resource, + octopus_request, + ) + ) + ) + + expected = { + # Service specific + "squid/{squid}": squid_resource, + "clam/{clam}": clam_resource, + "whelk/{whelk}": whelk_resource, + "clam/{clam}/squamosa/{squamosa}": squamosa_message, + # Common resources + "cloudresourcemanager.googleapis.com/Project": + service.common_resources["cloudresourcemanager.googleapis.com/Project"].message_type, + "cloudresourcemanager.googleapis.com/Organization": + service.common_resources["cloudresourcemanager.googleapis.com/Organization"].message_type, + "cloudresourcemanager.googleapis.com/Folder": + service.common_resources["cloudresourcemanager.googleapis.com/Folder"].message_type, + "cloudbilling.googleapis.com/BillingAccount": + service.common_resources["cloudbilling.googleapis.com/BillingAccount"].message_type, + "locations.googleapis.com/Location": + service.common_resources["locations.googleapis.com/Location"].message_type + } + actual = service.resource_messages_dict + assert expected == actual + + def test_service_unknown_resource_reference(): # This happens occasionally. opts = descriptor_pb2.FieldOptions() From b790ea3ca10fa3b6c6a4990863a7c97f42177920 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 Aug 2021 14:39:24 -0600 Subject: [PATCH 0601/1339] chore: release 0.51.0 (#972) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 400393690c0a..cc586b011be2 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.51.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.5...v0.51.0) (2021-08-18) + + +### Features + +* **snippetgen:** generate mock input for required fields ([#941](https://www.github.com/googleapis/gapic-generator-python/issues/941)) ([b2149da](https://www.github.com/googleapis/gapic-generator-python/commit/b2149da5e6873e1f71871bfecd899bb9aa0b6439)) + + +### Bug Fixes + +* add 'dict' type annotation to 'request' ([#966](https://www.github.com/googleapis/gapic-generator-python/issues/966)) ([49205d9](https://www.github.com/googleapis/gapic-generator-python/commit/49205d99dd440690b838c8eb3f6a695f35b061c2)) + ### [0.50.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.4...v0.50.5) (2021-07-22) From e4181d04bbb386f7001e098465c166e269dad266 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 19 Aug 2021 23:38:25 +0200 Subject: [PATCH 0602/1339] chore(deps): update dependency google-api-core to v2 (#973) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | `==1.31.1` -> `==2.0.0` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-core/2.0.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-core/2.0.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-core/2.0.0/compatibility-slim/1.31.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-core/2.0.0/confidence-slim/1.31.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-api-core ### [`v2.0.0`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​200-httpswwwgithubcomgoogleapispython-api-corecomparev200-b1v200-2021-08-18) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.31.2...v2.0.0) ##### âš  BREAKING CHANGES - drop support for Python 2.7 / 3.5 ([#​212](https://www.togithub.com/googleapis/python-api-core/issues/212)) ([a30f004](https://www.github.com/googleapis/python-api-core/commit/a30f004e74f709d46e905dd819c71f43354e9ac9)) ##### Bug Fixes - bump grpcio version to use stable aio API ([#​234](https://www.togithub.com/googleapis/python-api-core/issues/234)) ([bdbf889](https://www.github.com/googleapis/python-api-core/commit/bdbf889210b709d7c1945f2160bcba9161b4dd2e)) - strip trailing \_ from field mask paths ([#​228](https://www.togithub.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416)) ### [`v1.31.2`](https://togithub.com/googleapis/python-api-core/releases/v1.31.2) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v1.31.1...v1.31.2) ##### Bug Fixes - strip trailing \_ from field mask paths ([#​228](https://www.togithub.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 6a684cbbf6b4..8be5e627d903 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.1 -google-api-core==1.31.1 +google-api-core==2.0.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From 5a9e5502e541349ef56bf1ec1f1649e8936da29b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 20 Aug 2021 16:06:17 -0700 Subject: [PATCH 0603/1339] fix: timeouts are handled by rest clients, retries silently ignored (#976) --- .../%sub/services/%service/transports/rest.py.j2 | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 0e6303efd530..8c677847dad7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -9,6 +9,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import operations_v1 {% endif %} from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -134,6 +135,8 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): def {{ method.name|snake_case }}(self, request: {{ method.input.ident }}, *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> {{ method.output.ident }}: r"""Call the {{- ' ' -}} @@ -145,6 +148,9 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): request (~.{{ method.input.ident }}): The request object.{{ ' ' }} {{- method.input.meta.doc|rst(width=72, indent=16) }} + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. {% if not method.void %} @@ -207,6 +213,7 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): headers['Content-Type'] = 'application/json' response = self._session.{{ method.http_opt['verb'] }}( url, + timeout=timeout, headers=headers, params=query_params, {% if 'body' in method.http_opt %} From 6ac3be6a3084bc1414a819af69b83aad9c24e15f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 20 Aug 2021 16:31:09 -0700 Subject: [PATCH 0604/1339] chore: release 0.51.1 (#977) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index cc586b011be2..3d1c1ecd3a30 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.51.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.0...v0.51.1) (2021-08-20) + + +### Bug Fixes + +* timeouts are handled by rest clients, retries silently ignored ([#976](https://www.github.com/googleapis/gapic-generator-python/issues/976)) ([a62463c](https://www.github.com/googleapis/gapic-generator-python/commit/a62463cadee0cdaf861e93998faa27e6a82adab4)) + ## [0.51.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.5...v0.51.0) (2021-08-18) From ff43b9d855f1c840db5bd4219319eab1a87f766d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 31 Aug 2021 09:36:45 -0600 Subject: [PATCH 0605/1339] fix(snippetgen): use f-strings in print statements (#975) Co-authored-by: Tres Seaver --- packages/gapic-generator/DEVELOPMENT.md | 6 +++++ .../gapic/generator/generator.py | 3 ++- .../gapic/samplegen_utils/utils.py | 21 ++++++++++++++++- .../templates/examples/feature_fragments.j2 | 9 ++++++-- ..._asset_service_analyze_iam_policy_async.py | 2 +- ...ce_analyze_iam_policy_longrunning_async.py | 2 +- ...ice_analyze_iam_policy_longrunning_sync.py | 2 +- ...1_asset_service_analyze_iam_policy_sync.py | 2 +- ..._service_batch_get_assets_history_async.py | 2 +- ...t_service_batch_get_assets_history_sync.py | 2 +- ...sset_v1_asset_service_create_feed_async.py | 2 +- ...asset_v1_asset_service_create_feed_sync.py | 2 +- ...et_v1_asset_service_export_assets_async.py | 2 +- ...set_v1_asset_service_export_assets_sync.py | 2 +- ...d_asset_v1_asset_service_get_feed_async.py | 2 +- ...ed_asset_v1_asset_service_get_feed_sync.py | 2 +- ...sset_v1_asset_service_list_assets_async.py | 2 +- ...asset_v1_asset_service_list_assets_sync.py | 2 +- ...asset_v1_asset_service_list_feeds_async.py | 2 +- ..._asset_v1_asset_service_list_feeds_sync.py | 2 +- ...t_service_search_all_iam_policies_async.py | 2 +- ...et_service_search_all_iam_policies_sync.py | 2 +- ...sset_service_search_all_resources_async.py | 2 +- ...asset_service_search_all_resources_sync.py | 2 +- ...sset_v1_asset_service_update_feed_async.py | 2 +- ...asset_v1_asset_service_update_feed_sync.py | 2 +- ...credentials_generate_access_token_async.py | 2 +- ..._credentials_generate_access_token_sync.py | 2 +- ...iam_credentials_generate_id_token_async.py | 2 +- ..._iam_credentials_generate_id_token_sync.py | 2 +- ...ials_v1_iam_credentials_sign_blob_async.py | 2 +- ...tials_v1_iam_credentials_sign_blob_sync.py | 2 +- ...tials_v1_iam_credentials_sign_jwt_async.py | 2 +- ...ntials_v1_iam_credentials_sign_jwt_sync.py | 2 +- ...2_config_service_v2_create_bucket_async.py | 2 +- ...v2_config_service_v2_create_bucket_sync.py | 2 +- ...onfig_service_v2_create_exclusion_async.py | 2 +- ...config_service_v2_create_exclusion_sync.py | 2 +- ..._v2_config_service_v2_create_sink_async.py | 2 +- ...g_v2_config_service_v2_create_sink_sync.py | 2 +- ..._v2_config_service_v2_create_view_async.py | 2 +- ...g_v2_config_service_v2_create_view_sync.py | 2 +- ...g_v2_config_service_v2_get_bucket_async.py | 2 +- ...ng_v2_config_service_v2_get_bucket_sync.py | 2 +- ...nfig_service_v2_get_cmek_settings_async.py | 2 +- ...onfig_service_v2_get_cmek_settings_sync.py | 2 +- ...2_config_service_v2_get_exclusion_async.py | 2 +- ...v2_config_service_v2_get_exclusion_sync.py | 2 +- ...ing_v2_config_service_v2_get_sink_async.py | 2 +- ...ging_v2_config_service_v2_get_sink_sync.py | 2 +- ...ing_v2_config_service_v2_get_view_async.py | 2 +- ...ging_v2_config_service_v2_get_view_sync.py | 2 +- ...v2_config_service_v2_list_buckets_async.py | 2 +- ..._v2_config_service_v2_list_buckets_sync.py | 2 +- ...config_service_v2_list_exclusions_async.py | 2 +- ..._config_service_v2_list_exclusions_sync.py | 2 +- ...g_v2_config_service_v2_list_sinks_async.py | 2 +- ...ng_v2_config_service_v2_list_sinks_sync.py | 2 +- ...g_v2_config_service_v2_list_views_async.py | 2 +- ...ng_v2_config_service_v2_list_views_sync.py | 2 +- ...2_config_service_v2_update_bucket_async.py | 2 +- ...v2_config_service_v2_update_bucket_sync.py | 2 +- ...g_service_v2_update_cmek_settings_async.py | 2 +- ...ig_service_v2_update_cmek_settings_sync.py | 2 +- ...onfig_service_v2_update_exclusion_async.py | 2 +- ...config_service_v2_update_exclusion_sync.py | 2 +- ..._v2_config_service_v2_update_sink_async.py | 2 +- ...g_v2_config_service_v2_update_sink_sync.py | 2 +- ..._v2_config_service_v2_update_view_async.py | 2 +- ...g_v2_config_service_v2_update_view_sync.py | 2 +- ...gging_service_v2_list_log_entries_async.py | 2 +- ...ogging_service_v2_list_log_entries_sync.py | 2 +- ...g_v2_logging_service_v2_list_logs_async.py | 2 +- ...ng_v2_logging_service_v2_list_logs_sync.py | 2 +- ...st_monitored_resource_descriptors_async.py | 2 +- ...ist_monitored_resource_descriptors_sync.py | 2 +- ...gging_service_v2_tail_log_entries_async.py | 2 +- ...ogging_service_v2_tail_log_entries_sync.py | 2 +- ...ging_service_v2_write_log_entries_async.py | 2 +- ...gging_service_v2_write_log_entries_sync.py | 2 +- ...rics_service_v2_create_log_metric_async.py | 2 +- ...trics_service_v2_create_log_metric_sync.py | 2 +- ...metrics_service_v2_get_log_metric_async.py | 2 +- ..._metrics_service_v2_get_log_metric_sync.py | 2 +- ...trics_service_v2_list_log_metrics_async.py | 2 +- ...etrics_service_v2_list_log_metrics_sync.py | 2 +- ...rics_service_v2_update_log_metric_async.py | 2 +- ...trics_service_v2_update_log_metric_sync.py | 2 +- ...is_v1_cloud_redis_create_instance_async.py | 2 +- ...dis_v1_cloud_redis_create_instance_sync.py | 2 +- ...is_v1_cloud_redis_delete_instance_async.py | 2 +- ...dis_v1_cloud_redis_delete_instance_sync.py | 2 +- ...is_v1_cloud_redis_export_instance_async.py | 2 +- ...dis_v1_cloud_redis_export_instance_sync.py | 2 +- ..._v1_cloud_redis_failover_instance_async.py | 2 +- ...s_v1_cloud_redis_failover_instance_sync.py | 2 +- ...redis_v1_cloud_redis_get_instance_async.py | 2 +- ..._redis_v1_cloud_redis_get_instance_sync.py | 2 +- ...is_v1_cloud_redis_import_instance_async.py | 2 +- ...dis_v1_cloud_redis_import_instance_sync.py | 2 +- ...dis_v1_cloud_redis_list_instances_async.py | 2 +- ...edis_v1_cloud_redis_list_instances_sync.py | 2 +- ...is_v1_cloud_redis_update_instance_async.py | 2 +- ...dis_v1_cloud_redis_update_instance_sync.py | 2 +- ...s_v1_cloud_redis_upgrade_instance_async.py | 2 +- ...is_v1_cloud_redis_upgrade_instance_sync.py | 2 +- ...llusca_v1_snippets_list_resources_async.py | 2 +- ...ollusca_v1_snippets_list_resources_sync.py | 2 +- ...v1_snippets_method_bidi_streaming_async.py | 2 +- ..._v1_snippets_method_bidi_streaming_sync.py | 2 +- ...v1_snippets_method_lro_signatures_async.py | 2 +- ..._v1_snippets_method_lro_signatures_sync.py | 2 +- ..._v1_snippets_method_one_signature_async.py | 2 +- ...a_v1_snippets_method_one_signature_sync.py | 2 +- ..._snippets_method_server_streaming_async.py | 2 +- ...1_snippets_method_server_streaming_sync.py | 2 +- ...ollusca_v1_snippets_one_of_method_async.py | 2 +- ...mollusca_v1_snippets_one_of_method_sync.py | 2 +- .../samplegen/golden_snippets/sample_basic.py | 2 +- .../golden_snippets/sample_basic_async.py | 2 +- .../sample_basic_unflattenable.py | 2 +- .../tests/unit/samplegen/test_integration.py | 1 + .../tests/unit/samplegen/test_template.py | 23 ++++++++++--------- 123 files changed, 165 insertions(+), 132 deletions(-) diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index 16f04c7cc26b..57d048b62df7 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -29,6 +29,12 @@ Execute unit tests by running one of the sessions prefixed with `unit-`. find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --diff --exit-code ``` +- Format sources in place: + + ``` + find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --in-place + ``` + ## Integration Tests - Run a single integration test for one API. This generates Python source code diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 3014719c17f3..7d2b2cc95cac 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -21,7 +21,7 @@ from typing import Any, DefaultDict, Dict, Mapping from hashlib import sha256 from collections import OrderedDict, defaultdict -from gapic.samplegen_utils.utils import coerce_response_name, is_valid_sample_cfg +from gapic.samplegen_utils.utils import coerce_response_name, is_valid_sample_cfg, render_format_string from gapic.samplegen_utils.types import DuplicateSample from gapic.samplegen import manifest, samplegen from gapic.generator import formatter @@ -62,6 +62,7 @@ def __init__(self, opts: Options) -> None: self._env.filters["sort_lines"] = utils.sort_lines self._env.filters["wrap"] = utils.wrap self._env.filters["coerce_response_name"] = coerce_response_name + self._env.filters["render_format_string"] = render_format_string # Add tests to determine type of expressions stored in strings self._env.tests["str_field_pb"] = utils.is_str_field_pb diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py index 7cf0a14a39c9..27ccb50efafc 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -18,7 +18,7 @@ import os import yaml -from typing import (Generator, Tuple) +from typing import (Generator, Tuple, List, Union) from gapic.samplegen_utils import types @@ -28,6 +28,25 @@ VALID_CONFIG_TYPE = "com.google.api.codegen.samplegen.v1p2.SampleConfigProto" +def render_format_string(s: str, expressions: List[str] = []) -> str: + """Given string s and a list of expressions, substitute each %s + in the string with {exp}. + + Arguments: + s (str): The string literal. + expressions (Optional[List[str]]): A list of expressions. + """ + + s = s.replace('\"', '\\\"') + + for exp in expressions: + # some expressions will contain references to "$resp" + exp = coerce_response_name(exp) + s = s.replace("%s", f"{{{exp}}}", 1) + + return s + + def coerce_response_name(s: str) -> str: # In the sample config, the "$resp" keyword is used to refer to the # item of interest as received by the corresponding calling form. diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index eda14df7e467..7938a648b648 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -27,10 +27,15 @@ {% endmacro %} {% macro print_string_formatting(string_list) %} + {% if string_list|length == 1 %} -"{{ string_list[0]|replace("%s", "{}")|replace('\"', '\\\"') }}" +"{{ string_list[0] | render_format_string }}" +{% elif string_list|length == 2 and string_list[0] == "%s" and string_list[1] == "$resp" %} +response {% else %} -"{{ string_list[0]|replace("%s", "{}")|replace('\"', '\\\"') }}".format({{ string_list[1:]|map("coerce_response_name")|join(", ") }}) +{# Note: This is the equivalent of render_format_string(string_list[0], string_list[1:] ) +# See https://jinja.palletsprojects.com/en/3.0.x/api/#custom-filters #} +f"{{ string_list[0] | render_format_string(string_list[1:]) }}" {% endif %} {% endmacro %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py index 633f8b046707..8147347e6627 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py @@ -45,6 +45,6 @@ async def sample_analyze_iam_policy(): response = await client.analyze_iam_policy(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py index f3a40e1568f4..12e57510e8e6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py @@ -51,6 +51,6 @@ async def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py index 6bfec27de932..a4e998a182bb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py @@ -51,6 +51,6 @@ def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py index 22017d9960ed..1a02995511ba 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py @@ -45,6 +45,6 @@ def sample_analyze_iam_policy(): response = client.analyze_iam_policy(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py index f2f051da59e7..dcbb0b0159c9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py @@ -42,6 +42,6 @@ async def sample_batch_get_assets_history(): response = await client.batch_get_assets_history(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py index ed2d78e9e695..035d76dedcd1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py @@ -42,6 +42,6 @@ def sample_batch_get_assets_history(): response = client.batch_get_assets_history(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py index eb4291971443..d0b4a37a3ab2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py @@ -47,6 +47,6 @@ async def sample_create_feed(): response = await client.create_feed(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_CreateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py index c5a31c080d1a..9eb643290ba6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py @@ -47,6 +47,6 @@ def sample_create_feed(): response = client.create_feed(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_CreateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py index 00c75331d986..39b41d605eaa 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py @@ -48,6 +48,6 @@ async def sample_export_assets(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_ExportAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py index e4548eed7d4c..204c9e7c5c0a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py @@ -48,6 +48,6 @@ def sample_export_assets(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_ExportAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py index c4078151b7d6..ed6e4c7f2e3d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py @@ -42,6 +42,6 @@ async def sample_get_feed(): response = await client.get_feed(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_GetFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py index cb53327cce8d..c50a77f3a869 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py @@ -42,6 +42,6 @@ def sample_get_feed(): response = client.get_feed(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_GetFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py index ff87a684fe1f..5e6b0a007cad 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py @@ -41,6 +41,6 @@ async def sample_list_assets(): # Make the request page_result = client.list_assets(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_ListAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py index 47ed9d149b6c..a7fb179d5ffa 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py @@ -41,6 +41,6 @@ def sample_list_assets(): # Make the request page_result = client.list_assets(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_ListAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py index 396b767010b4..31a26bde207b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py @@ -42,6 +42,6 @@ async def sample_list_feeds(): response = await client.list_feeds(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_ListFeeds_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py index 816523c64bc9..9075f2cd9fee 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py @@ -42,6 +42,6 @@ def sample_list_feeds(): response = client.list_feeds(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_ListFeeds_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py index 58fdd4b9bdf8..3893b85552cf 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py @@ -41,6 +41,6 @@ async def sample_search_all_iam_policies(): # Make the request page_result = client.search_all_iam_policies(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py index dbe303e7fbf5..53133196b1f2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py @@ -41,6 +41,6 @@ def sample_search_all_iam_policies(): # Make the request page_result = client.search_all_iam_policies(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py index f0021cafdc41..d97f46fb4f7a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py @@ -41,6 +41,6 @@ async def sample_search_all_resources(): # Make the request page_result = client.search_all_resources(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_SearchAllResources_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py index ac0b998a0607..30f66f6ef4a3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py @@ -41,6 +41,6 @@ def sample_search_all_resources(): # Make the request page_result = client.search_all_resources(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_SearchAllResources_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py index 12dd103c4ba5..6cebe148df80 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py @@ -45,6 +45,6 @@ async def sample_update_feed(): response = await client.update_feed(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py index 3402c41d22bb..d2046c2d9b46 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py @@ -45,6 +45,6 @@ def sample_update_feed(): response = client.update_feed(request=request) # Handle response - print("{}".format(response)) + print(response) # [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py index b08ce8febc83..1e6b47658170 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py @@ -43,6 +43,6 @@ async def sample_generate_access_token(): response = await client.generate_access_token(request=request) # Handle response - print("{}".format(response)) + print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py index 357d62c459f6..3c4f4909dc06 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py @@ -43,6 +43,6 @@ def sample_generate_access_token(): response = client.generate_access_token(request=request) # Handle response - print("{}".format(response)) + print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py index d83bb8d93308..4494e93ef30c 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py @@ -43,6 +43,6 @@ async def sample_generate_id_token(): response = await client.generate_id_token(request=request) # Handle response - print("{}".format(response)) + print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py index da92452b3d09..b3bfb3193878 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py @@ -43,6 +43,6 @@ def sample_generate_id_token(): response = client.generate_id_token(request=request) # Handle response - print("{}".format(response)) + print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py index e71f7d3256fa..5455fe959af5 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py @@ -43,6 +43,6 @@ async def sample_sign_blob(): response = await client.sign_blob(request=request) # Handle response - print("{}".format(response)) + print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py index 40bd3ae56173..ff60375d5635 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py @@ -43,6 +43,6 @@ def sample_sign_blob(): response = client.sign_blob(request=request) # Handle response - print("{}".format(response)) + print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py index 8ca30833076d..c75f5ac006a4 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py @@ -43,6 +43,6 @@ async def sample_sign_jwt(): response = await client.sign_jwt(request=request) # Handle response - print("{}".format(response)) + print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py index 8dc778a594db..0d6e9322bca1 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py @@ -43,6 +43,6 @@ def sample_sign_jwt(): response = client.sign_jwt(request=request) # Handle response - print("{}".format(response)) + print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py index 9425509149e0..bfc49260eb84 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py @@ -43,6 +43,6 @@ async def sample_create_bucket(): response = await client.create_bucket(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py index 490adb5296cd..b0546c1a7142 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py @@ -43,6 +43,6 @@ def sample_create_bucket(): response = client.create_bucket(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py index c41f337710c6..3d473947f862 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py @@ -47,6 +47,6 @@ async def sample_create_exclusion(): response = await client.create_exclusion(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py index ac7e04733cac..c9fe4d30f6b9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py @@ -47,6 +47,6 @@ def sample_create_exclusion(): response = client.create_exclusion(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py index 0affba0c4bab..068d1fa0778a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py @@ -47,6 +47,6 @@ async def sample_create_sink(): response = await client.create_sink(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py index 8bc2129be0ea..34591a8a87e8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py @@ -47,6 +47,6 @@ def sample_create_sink(): response = client.create_sink(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py index a9ced73e6ac2..555d13d0cb81 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py @@ -43,6 +43,6 @@ async def sample_create_view(): response = await client.create_view(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py index bbb9033c5cd5..518426012d66 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py @@ -43,6 +43,6 @@ def sample_create_view(): response = client.create_view(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py index 43ae4b510264..bbc44365064d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py @@ -42,6 +42,6 @@ async def sample_get_bucket(): response = await client.get_bucket(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py index b729d8de740c..933db8359cc0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py @@ -42,6 +42,6 @@ def sample_get_bucket(): response = client.get_bucket(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py index bc085f29b579..a12b7fffcc6f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py @@ -42,6 +42,6 @@ async def sample_get_cmek_settings(): response = await client.get_cmek_settings(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py index b838221d5b02..c252c6b12ffd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py @@ -42,6 +42,6 @@ def sample_get_cmek_settings(): response = client.get_cmek_settings(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py index d0766aa6a96c..7aab7cd6302a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py @@ -42,6 +42,6 @@ async def sample_get_exclusion(): response = await client.get_exclusion(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py index 4f025180ef9c..6fd1b2108f68 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py @@ -42,6 +42,6 @@ def sample_get_exclusion(): response = client.get_exclusion(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py index bc8889c48be2..d945db625473 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py @@ -42,6 +42,6 @@ async def sample_get_sink(): response = await client.get_sink(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py index 54360300ea26..9c9172304d99 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py @@ -42,6 +42,6 @@ def sample_get_sink(): response = client.get_sink(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py index c280eb79b361..c68aa0551caa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py @@ -42,6 +42,6 @@ async def sample_get_view(): response = await client.get_view(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py index ca94c6088b20..0de5e38b580c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py @@ -42,6 +42,6 @@ def sample_get_view(): response = client.get_view(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py index 09849854319a..698255b56043 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py @@ -41,6 +41,6 @@ async def sample_list_buckets(): # Make the request page_result = client.list_buckets(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py index a6b0fc001ae1..62f78c0e2a05 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py @@ -41,6 +41,6 @@ def sample_list_buckets(): # Make the request page_result = client.list_buckets(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py index fa305a7f19fe..49c7788689ad 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py @@ -41,6 +41,6 @@ async def sample_list_exclusions(): # Make the request page_result = client.list_exclusions(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py index c47b051b0ea3..2a00c57c380a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py @@ -41,6 +41,6 @@ def sample_list_exclusions(): # Make the request page_result = client.list_exclusions(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py index 0f44922bf669..7e40c4f3cbbd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py @@ -41,6 +41,6 @@ async def sample_list_sinks(): # Make the request page_result = client.list_sinks(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_ListSinks_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py index 81dc1c7dbf4c..99f7f0abb1ef 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py @@ -41,6 +41,6 @@ def sample_list_sinks(): # Make the request page_result = client.list_sinks(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py index 13ebc352954d..a4843f371e5a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py @@ -41,6 +41,6 @@ async def sample_list_views(): # Make the request page_result = client.list_views(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_ListViews_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py index a61e85641182..d32a0aa11008 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py @@ -41,6 +41,6 @@ def sample_list_views(): # Make the request page_result = client.list_views(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_ListViews_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py index 4343a258b4fd..a859113d5ad1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py @@ -42,6 +42,6 @@ async def sample_update_bucket(): response = await client.update_bucket(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py index a2370594b32b..2614b76d9576 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py @@ -42,6 +42,6 @@ def sample_update_bucket(): response = client.update_bucket(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py index 6c868ad0639d..cd01d5b04a81 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py @@ -42,6 +42,6 @@ async def sample_update_cmek_settings(): response = await client.update_cmek_settings(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py index 9b7f34e9d10a..a679dfa44109 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py @@ -42,6 +42,6 @@ def sample_update_cmek_settings(): response = client.update_cmek_settings(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py index 7ed53afeb727..bde70749f1e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py @@ -47,6 +47,6 @@ async def sample_update_exclusion(): response = await client.update_exclusion(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py index 6adacd542a2c..b974978fb806 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py @@ -47,6 +47,6 @@ def sample_update_exclusion(): response = client.update_exclusion(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py index 03c6b96b2450..1973b924cd28 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py @@ -47,6 +47,6 @@ async def sample_update_sink(): response = await client.update_sink(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py index 3bc9c59e336d..9cc57a79f610 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py @@ -47,6 +47,6 @@ def sample_update_sink(): response = client.update_sink(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py index e2a1f97e089d..5177171f8fc2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py @@ -42,6 +42,6 @@ async def sample_update_view(): response = await client.update_view(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py index fb5811deaddd..164e01f49b30 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py @@ -42,6 +42,6 @@ def sample_update_view(): response = client.update_view(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py index dc2f62c7d9b2..482bf96209e5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py @@ -41,6 +41,6 @@ async def sample_list_log_entries(): # Make the request page_result = client.list_log_entries(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py index c1a6455f6dc1..b7d3b3ca02e7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py @@ -41,6 +41,6 @@ def sample_list_log_entries(): # Make the request page_result = client.list_log_entries(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py index a0e119a9266b..0d943f89d909 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py @@ -41,6 +41,6 @@ async def sample_list_logs(): # Make the request page_result = client.list_logs(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_ListLogs_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py index 13a18b0ebdbc..218acc033bc2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py @@ -41,6 +41,6 @@ def sample_list_logs(): # Make the request page_result = client.list_logs(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py index 8ecd94e6d8e0..6734d6a5c84a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -40,6 +40,6 @@ async def sample_list_monitored_resource_descriptors(): # Make the request page_result = client.list_monitored_resource_descriptors(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py index 5ca468a3a1ed..89da0c9c6765 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -40,6 +40,6 @@ def sample_list_monitored_resource_descriptors(): # Make the request page_result = client.list_monitored_resource_descriptors(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py index 3202818c3f4a..69e71695982b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py @@ -41,6 +41,6 @@ async def sample_tail_log_entries(): # Make the request stream = await client.tail_log_entries([resource_names=['resource_names_value']]) async for response in stream: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py index 909a8e8aa2ce..6296d6783e8e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py @@ -41,6 +41,6 @@ def sample_tail_log_entries(): # Make the request stream = client.tail_log_entries([resource_names=['resource_names_value']]) for response in stream: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py index 1a6981c740d5..71120d98a2d9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py @@ -45,6 +45,6 @@ async def sample_write_log_entries(): response = await client.write_log_entries(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py index fdce118737f4..7da931be50ab 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py @@ -45,6 +45,6 @@ def sample_write_log_entries(): response = client.write_log_entries(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py index 02f97a044b13..091013bb320c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py @@ -47,6 +47,6 @@ async def sample_create_log_metric(): response = await client.create_log_metric(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py index 0f2c2824985d..1d63becf0ad2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py @@ -47,6 +47,6 @@ def sample_create_log_metric(): response = client.create_log_metric(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py index effabeb34f49..9d225f360f7e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py @@ -42,6 +42,6 @@ async def sample_get_log_metric(): response = await client.get_log_metric(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py index d49f387ac18c..bb272a2d4164 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py @@ -42,6 +42,6 @@ def sample_get_log_metric(): response = client.get_log_metric(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py index 23a495c16386..240b775d36e4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py @@ -41,6 +41,6 @@ async def sample_list_log_metrics(): # Make the request page_result = client.list_log_metrics(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py index dcc0784eda3d..9944c9443a60 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py @@ -41,6 +41,6 @@ def sample_list_log_metrics(): # Make the request page_result = client.list_log_metrics(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py index 080789a8afeb..8bae70082b3f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py @@ -47,6 +47,6 @@ async def sample_update_log_metric(): response = await client.update_log_metric(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py index 1c5ce5af9d31..1f8631f3755c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py @@ -47,6 +47,6 @@ def sample_update_log_metric(): response = client.update_log_metric(request=request) # Handle response - print("{}".format(response)) + print(response) # [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py index b3705e3af78f..de3eddebdfa1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py @@ -51,6 +51,6 @@ async def sample_create_instance(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_CreateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py index 16f1ed86af29..d2e83c8ed021 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py @@ -51,6 +51,6 @@ def sample_create_instance(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_CreateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py index 58ad65f23451..119b2e34ac00 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py @@ -44,6 +44,6 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_DeleteInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py index 85ea9b2e7005..0d868c91cd5a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py @@ -44,6 +44,6 @@ def sample_delete_instance(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_DeleteInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py index 01a418c516c2..302890bb283c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py @@ -48,6 +48,6 @@ async def sample_export_instance(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_ExportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py index f00a794b5305..adadccb21450 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py @@ -48,6 +48,6 @@ def sample_export_instance(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_ExportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py index 23f4aa913509..85263ab3ada9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py @@ -44,6 +44,6 @@ async def sample_failover_instance(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_FailoverInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py index 9de974244a48..8dafe96046be 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py @@ -44,6 +44,6 @@ def sample_failover_instance(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_FailoverInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py index 87273f1da476..ed9d84195f5b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py @@ -42,6 +42,6 @@ async def sample_get_instance(): response = await client.get_instance(request=request) # Handle response - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_GetInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py index d72d199eb216..45fbec17c9ad 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py @@ -42,6 +42,6 @@ def sample_get_instance(): response = client.get_instance(request=request) # Handle response - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_GetInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py index 4f5137291e6c..739afbf6b01a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py @@ -48,6 +48,6 @@ async def sample_import_instance(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_ImportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py index 3cbe06483880..2f03a78c6f0d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py @@ -48,6 +48,6 @@ def sample_import_instance(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_ImportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py index 9ef268347e97..93268aa42c2d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py @@ -41,6 +41,6 @@ async def sample_list_instances(): # Make the request page_result = client.list_instances(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_ListInstances_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py index 5cbdd3818495..196f5ebcbf8a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py @@ -41,6 +41,6 @@ def sample_list_instances(): # Make the request page_result = client.list_instances(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_ListInstances_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py index cc2340794a68..3ecf9137601a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py @@ -49,6 +49,6 @@ async def sample_update_instance(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_UpdateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py index a98f88b579a6..16da68c748c0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py @@ -49,6 +49,6 @@ def sample_update_instance(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_UpdateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py index b170fb02d947..dee9b3f826f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py @@ -45,6 +45,6 @@ async def sample_upgrade_instance(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py index c1b6caf8b580..b14f29675cee 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py @@ -45,6 +45,6 @@ def sample_upgrade_instance(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py index 6d6afcf99463..492aa22271ef 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py @@ -41,6 +41,6 @@ async def sample_list_resources(): # Make the request page_result = client.list_resources(request=request) async for response in page_result: - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_ListResources_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py index 9ca10fc35c17..817f86924739 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py @@ -41,6 +41,6 @@ def sample_list_resources(): # Make the request page_result = client.list_resources(request=request) for response in page_result: - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_ListResources_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py index d34f0a980d8f..98190750d59c 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py @@ -41,6 +41,6 @@ async def sample_method_bidi_streaming(): # Make the request stream = await client.method_bidi_streaming([my_string="my_string_value"]) async for response in stream: - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py index daba69f1b6d9..de6ffb254d46 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py @@ -41,6 +41,6 @@ def sample_method_bidi_streaming(): # Make the request stream = client.method_bidi_streaming([my_string="my_string_value"]) for response in stream: - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py index 61497f4a437e..392241b5e28a 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py @@ -52,6 +52,6 @@ async def sample_method_lro_signatures(): print("Waiting for operation to complete...") response = await operation.result() - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py index ea51e76e12b5..e0fa332206d0 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py @@ -52,6 +52,6 @@ def sample_method_lro_signatures(): print("Waiting for operation to complete...") response = operation.result() - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py index f7351c4b369e..85cf60ef6cbe 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py @@ -50,6 +50,6 @@ async def sample_method_one_signature(): response = await client.method_one_signature(request=request) # Handle response - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py index dcb1a81f0e1e..d09678e58a42 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py @@ -50,6 +50,6 @@ def sample_method_one_signature(): response = client.method_one_signature(request=request) # Handle response - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py index 785c22899a9a..5dfc1b09badf 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py @@ -49,6 +49,6 @@ async def sample_method_server_streaming(): # Make the request stream = await client.method_server_streaming(request=request) async for response in stream: - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py index 5894bff3e943..92782bcad353 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py @@ -49,6 +49,6 @@ def sample_method_server_streaming(): # Make the request stream = client.method_server_streaming(request=request) for response in stream: - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py index fee612e387fa..2bc72ff6d22f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py @@ -43,6 +43,6 @@ async def sample_one_of_method(): response = await client.one_of_method(request=request) # Handle response - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py index 71efe54950ba..ce2a7288433d 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py @@ -43,6 +43,6 @@ def sample_one_of_method(): response = client.one_of_method(request=request) # Handle response - print("{}".format(response)) + print(response) # [END mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index b1fdedea4133..c6c44239c888 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -51,6 +51,6 @@ def sample_classify(video, location): response = client.classify(request=request) # Handle response - print("Mollusc is a \"{}\"".format(response.taxonomy)) + print(f"Mollusc is a \"{response.taxonomy}\"") # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index 59fde511d214..8ad414a78f53 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -51,6 +51,6 @@ async def sample_classify(video, location): response = await client.classify(request=request) # Handle response - print("Mollusc is a \"{}\"".format(response.taxonomy)) + print(f"Mollusc is a \"{response.taxonomy}\"") # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index b1fdedea4133..c6c44239c888 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -51,6 +51,6 @@ def sample_classify(video, location): response = client.classify(request=request) # Handle response - print("Mollusc is a \"{}\"".format(response.taxonomy)) + print(f"Mollusc is a \"{response.taxonomy}\"") # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 95b7e74f53dd..75ae35ec5af3 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -44,6 +44,7 @@ ) env.filters['snake_case'] = utils.to_snake_case env.filters['coerce_response_name'] = gapic_utils.coerce_response_name +env.filters['render_format_string'] = gapic_utils.render_format_string def golden_snippet(filename: str) -> str: diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index edf59a925f2a..3d1b3cff26e4 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -49,6 +49,7 @@ def check_template(template_fragment, expected_output, **kwargs): env.filters['snake_case'] = utils.to_snake_case env.filters['coerce_response_name'] = sample_utils.coerce_response_name + env.filters['render_format_string'] = sample_utils.render_format_string template = env.get_template("template_fragment") text = template.render(**kwargs) @@ -345,7 +346,7 @@ def test_render_print_args(): {{ frags.render_print(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} ''', ''' - print("$resp {} {}".format(response.squids, response.clams)) + print(f"$resp {response.squids} {response.clams}") ''' ) @@ -446,7 +447,7 @@ def test_write_file(): "contents": "$resp.photo"}) }} ''', ''' - with open("specimen-{}".format(response.species), "wb") as f: + with open(f"specimen-{response.species}", "wb") as f: f.write(response.photo) ''' ) @@ -462,7 +463,7 @@ def test_dispatch_write_file(): "contents": "$resp.photo"}})}} ''', ''' - with open("specimen-{}".format(response.species), "wb") as f: + with open(f"specimen-{response.species}", "wb") as f: f.write(response.photo) ''' @@ -477,7 +478,7 @@ def test_collection_loop(): ''', ''' for m in response.molluscs: - print("Mollusc: {}".format(m)) + print(f"Mollusc: {m}") ''', @@ -494,7 +495,7 @@ def test_dispatch_collection_loop(): {{ frags.dispatch_statement(statement) }}''', ''' for m in molluscs: - print("Mollusc: {}".format(m)) + print(f"Mollusc: {m}") @@ -513,7 +514,7 @@ def test_map_loop(): }}''', ''' for cls, example in response.molluscs.items(): - print("A {} is a {}".format(example, cls)) + print(f"A {example} is a {cls}") ''', @@ -533,7 +534,7 @@ def test_map_loop_no_key(): ''', ''' for example in response.molluscs.values(): - print("A {} is a mollusc".format(example)) + print(f"A {example} is a mollusc") ''', @@ -552,7 +553,7 @@ def test_map_loop_no_value(): ''', ''' for cls in response.molluscs.keys(): - print("A {} is a mollusc".format(cls)) + print(f"A {cls} is a mollusc") ''', @@ -570,7 +571,7 @@ def test_dispatch_map_loop(): ''', ''' for cls, example in molluscs.items(): - print("A {} is a {}".format(example, cls)) + print(f"A {example} is a {cls}") @@ -618,7 +619,7 @@ def test_render_nested_loop_collection(): for m in response.molluscs: for t in m.tentacles: for s in t.suckers: - print("Sucker: {}".format(s)) + print(f"Sucker: {s}") @@ -671,7 +672,7 @@ def test_render_nested_loop_map(): for klass, orders in response.molluscs.items(): for order, families in orders.items(): for family, ex in families.items(): - print("Example: {}".format(ex)) + print(f"Example: {ex}") From d278feba4fae7b514eeae9150233047754dc40a4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 1 Sep 2021 13:57:04 +0200 Subject: [PATCH 0606/1339] chore(deps): update dependency google-api-core to v2.0.1 (#985) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8be5e627d903..02f40c810d62 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.1 -google-api-core==2.0.0 +google-api-core==2.0.1 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From f28888cbd67f68b66518ec0c11d5fa08dfce6797 Mon Sep 17 00:00:00 2001 From: James Lynn Wu Date: Fri, 10 Sep 2021 19:31:38 -0400 Subject: [PATCH 0607/1339] fix: add a separate DEFAULT_CLIENT_INFO for rest clients (#988) --- .../%sub/services/%service/transports/base.py.j2 | 9 --------- .../%sub/services/%service/transports/rest.py.j2 | 9 ++++++++- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index b38a3535b03f..b5b7bb2ac4f8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -6,9 +6,6 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import packaging.version import pkg_resources -{% if 'rest' in opts.transport %} -from requests import __version__ as requests_version -{% endif %} import google.auth # type: ignore import google.api_core # type: ignore @@ -37,12 +34,6 @@ try: gapic_version=pkg_resources.get_distribution( '{{ api.naming.warehouse_package_name }}', ).version, - {% if 'grpc' not in opts.transport %} - grpc_version=None, - {% endif %} - {% if 'rest' in opts.transport %} - rest_version=requests_version, - {% endif %} ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 8c677847dad7..2308ea8c26f6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -4,6 +4,7 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple +from requests import __version__ as requests_version {% if service.has_lro %} from google.api_core import operations_v1 @@ -30,9 +31,15 @@ from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} -from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO +from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + class {{ service.name }}RestTransport({{ service.name }}Transport): """REST backend transport for {{ service.name }}. From 5368269cf8c02399fefbc3b3bdcc6b607639267c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Sep 2021 16:52:53 +0200 Subject: [PATCH 0608/1339] chore(deps): update dependency pypandoc to v1.6.4 (#990) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 02f40c810d62..68516fbac8e0 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,6 +4,6 @@ googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 protobuf==3.17.3 -pypandoc==1.6.3 +pypandoc==1.6.4 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 51c159f15f50d919453855e5294c45e336658fa2 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 13 Sep 2021 09:10:27 -0600 Subject: [PATCH 0609/1339] chore: make conventional commits optional (#989) --- packages/gapic-generator/.github/sync-repo-settings.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index aaca04c67556..7cd00810e3e9 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -6,7 +6,6 @@ branchProtectionRules: isAdminEnforced: true requiredStatusCheckContexts: - 'cla/google' - - 'conventionalcommits.org' - 'docs' - 'mypy' - 'showcase (showcase)' From 13832b250f4b786931eb482bdc071d1b3ae57fbf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 14 Sep 2021 17:58:18 +0000 Subject: [PATCH 0610/1339] chore: release 0.51.2 (#984) :robot: I have created a release \*beep\* \*boop\* --- ### [0.51.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.1...v0.51.2) (2021-09-13) ### Bug Fixes * add a separate DEFAULT_CLIENT_INFO for rest clients ([#988](https://www.github.com/googleapis/gapic-generator-python/issues/988)) ([22ac400](https://www.github.com/googleapis/gapic-generator-python/commit/22ac40097ab50bb2d3a7f1a2d35d659c391e0927)) * **snippetgen:** use f-strings in print statements ([#975](https://www.github.com/googleapis/gapic-generator-python/issues/975)) ([122e85c](https://www.github.com/googleapis/gapic-generator-python/commit/122e85c37ff6aa0a99f64361397eb3df5495a3b4)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3d1c1ecd3a30..60420fe94a94 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +### [0.51.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.1...v0.51.2) (2021-09-13) + + +### Bug Fixes + +* add a separate DEFAULT_CLIENT_INFO for rest clients ([#988](https://www.github.com/googleapis/gapic-generator-python/issues/988)) ([22ac400](https://www.github.com/googleapis/gapic-generator-python/commit/22ac40097ab50bb2d3a7f1a2d35d659c391e0927)) +* **snippetgen:** use f-strings in print statements ([#975](https://www.github.com/googleapis/gapic-generator-python/issues/975)) ([122e85c](https://www.github.com/googleapis/gapic-generator-python/commit/122e85c37ff6aa0a99f64361397eb3df5495a3b4)) + ### [0.51.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.0...v0.51.1) (2021-08-20) From 66ba2e19288abb3214096df87ce462606d9557b1 Mon Sep 17 00:00:00 2001 From: kbandes Date: Wed, 15 Sep 2021 14:34:17 -0400 Subject: [PATCH 0611/1339] feat: Support alternative http bindings in the gapic schema. (#993) Support alternative http bindings in the gapic schema and adds support for parsing multiple bindings for one method. Co-authored-by: Kenneth Bandes --- .../gapic-generator/gapic/schema/wrappers.py | 39 ++++++++- .../tests/unit/schema/wrappers/test_method.py | 81 +++++++++++++++++++ 2 files changed, 116 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 8c2313f8a7fb..2af844d0a18c 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -36,6 +36,7 @@ from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 +from google.api import http_pb2 from google.api import resource_pb2 from google.api_core import exceptions # type: ignore from google.protobuf import descriptor_pb2 # type: ignore @@ -706,6 +707,27 @@ class RetryInfo: retryable_exceptions: FrozenSet[exceptions.GoogleAPICallError] +@dataclasses.dataclass(frozen=True) +class HttpRule: + """Representation of the method's http bindings.""" + method: str + uri: str + body: Optional[str] + + @classmethod + def try_parse_http_rule(cls, http_rule) -> Optional['HttpRule']: + method = http_rule.WhichOneof("pattern") + if method is None or method == "custom": + return None + + uri = getattr(http_rule, method) + if not uri: + return None + + body = http_rule.body or None + return cls(method, uri, body) + + @dataclasses.dataclass(frozen=True) class Method: """Description of a method (defined with the ``rpc`` keyword).""" @@ -821,13 +843,22 @@ def field_headers(self) -> Sequence[str]: return next((tuple(pattern.findall(verb)) for verb in potential_verbs if verb), ()) + @property + def http_options(self) -> List[HttpRule]: + """Return a list of the http bindings for this method.""" + http = self.options.Extensions[annotations_pb2.http] + http_options = [http] + list(http.additional_bindings) + opt_gen = (HttpRule.try_parse_http_rule(http_rule) + for http_rule in http_options) + return [rule for rule in opt_gen if rule] + @property def http_opt(self) -> Optional[Dict[str, str]]: - """Return the http option for this method. + """Return the (main) http option for this method. - e.g. {'verb': 'post' - 'url': '/some/path' - 'body': '*'} + e.g. {'verb': 'post' + 'url': '/some/path' + 'body': '*'} """ http: List[Tuple[descriptor_pb2.FieldDescriptorProto, str]] diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index c13a9afb28f5..00ade8aefbbf 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -13,6 +13,7 @@ # limitations under the License. import collections +import dataclasses from typing import Sequence from google.api import field_behavior_pb2 @@ -328,6 +329,86 @@ def test_method_path_params_no_http_rule(): assert method.path_params == [] +def test_method_http_options(): + verbs = [ + 'get', + 'put', + 'post', + 'delete', + 'patch' + ] + for v in verbs: + http_rule = http_pb2.HttpRule(**{v: '/v1/{parent=projects/*}/topics'}) + method = make_method('DoSomething', http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [{ + 'method': v, + 'uri': '/v1/{parent=projects/*}/topics', + 'body': None + }] + + +def test_method_http_options_empty_http_rule(): + http_rule = http_pb2.HttpRule() + method = make_method('DoSomething', http_rule=http_rule) + assert method.http_options == [] + + http_rule = http_pb2.HttpRule(get='') + method = make_method('DoSomething', http_rule=http_rule) + assert method.http_options == [] + + +def test_method_http_options_no_http_rule(): + method = make_method('DoSomething') + assert method.path_params == [] + + +def test_method_http_options_body(): + http_rule = http_pb2.HttpRule( + post='/v1/{parent=projects/*}/topics', + body='*' + ) + method = make_method('DoSomething', http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/topics', + 'body': '*' + }] + + +def test_method_http_options_additional_bindings(): + http_rule = http_pb2.HttpRule( + post='/v1/{parent=projects/*}/topics', + body='*', + additional_bindings=[ + http_pb2.HttpRule( + post='/v1/{parent=projects/*/regions/*}/topics', + body='*', + ), + http_pb2.HttpRule( + post='/v1/projects/p1/topics', + body='body_field', + ), + ] + ) + method = make_method('DoSomething', http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [ + { + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/topics', + 'body': '*' + }, + { + 'method': 'post', + 'uri': '/v1/{parent=projects/*/regions/*}/topics', + 'body': '*' + }, + { + 'method': 'post', + 'uri': '/v1/projects/p1/topics', + 'body': 'body_field' + }] + + def test_method_query_params(): # tests only the basic case of grpc transcoding http_rule = http_pb2.HttpRule( From 2629ef5ca77c676184c4a3b0c5be961b247a5634 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 24 Sep 2021 00:51:37 +0200 Subject: [PATCH 0612/1339] chore(deps): update dependency protobuf to v3.18.0 (#996) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 68516fbac8e0..a90cdbe974d3 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.0.1 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 -protobuf==3.17.3 +protobuf==3.18.0 pypandoc==1.6.4 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 43ea92da4f9e59f97150da965ffa20098d5fd49d Mon Sep 17 00:00:00 2001 From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com> Date: Wed, 29 Sep 2021 16:23:30 -0400 Subject: [PATCH 0613/1339] fix: improper types in pagers generation (#970) * fix: improper types in pagers generation Generators are Iterators not Iterables https://docs.python.org/3/library/typing.html#typing.Generator * chore: update goldens Co-authored-by: Bu Sun Kim Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .../%sub/services/%service/pagers.py.j2 | 12 +++---- .../asset_v1/services/asset_service/pagers.py | 26 +++++++------- .../services/config_service_v2/pagers.py | 34 +++++++++---------- .../services/logging_service_v2/pagers.py | 26 +++++++------- .../services/metrics_service_v2/pagers.py | 10 +++--- .../redis_v1/services/cloud_redis/pagers.py | 10 +++--- 6 files changed, 59 insertions(+), 59 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index badc77a8c17b..3270aaf19a3b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -7,7 +7,7 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. #} -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator {% filter sort_lines %} {% for method in service.methods.values() | selectattr('paged_result_field') %} @@ -65,7 +65,7 @@ class {{ method.name }}Pager: return getattr(self._response, name) @property - def pages(self) -> Iterable[{{ method.output.ident }}]: + def pages(self) -> Iterator[{{ method.output.ident }}]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -73,14 +73,14 @@ class {{ method.name }}Pager: yield self._response {% if method.paged_result_field.map %} - def __iter__(self) -> Iterable[Tuple[str, {{ method.paged_result_field.type.fields.get('value').ident }}]]: + def __iter__(self) -> Iterator[Tuple[str, {{ method.paged_result_field.type.fields.get('value').ident }}]]: for page in self.pages: yield from page.{{ method.paged_result_field.name}}.items() def get(self, key: str) -> Optional[{{ method.paged_result_field.type.fields.get('value').ident }}]: return self._response.items.get(key) {% else %} - def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: + def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterator') }}: for page in self.pages: yield from page.{{ method.paged_result_field.name }} {% endif %} @@ -135,14 +135,14 @@ class {{ method.name }}AsyncPager: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[{{ method.output.ident }}]: + async def pages(self) -> AsyncIterator[{{ method.output.ident }}]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'AsyncIterable') }}: + def __aiter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'AsyncIterator') }}: async def async_generator(): async for page in self.pages: for response in page.{{ method.paged_result_field.name }}: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index c09af15cdfa4..94e4b1961df4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets @@ -63,14 +63,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[asset_service.ListAssetsResponse]: + def pages(self) -> Iterator[asset_service.ListAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[assets.Asset]: + def __iter__(self) -> Iterator[assets.Asset]: for page in self.pages: yield from page.assets @@ -122,14 +122,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[asset_service.ListAssetsResponse]: + async def pages(self) -> AsyncIterator[asset_service.ListAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[assets.Asset]: + def __aiter__(self) -> AsyncIterator[assets.Asset]: async def async_generator(): async for page in self.pages: for response in page.assets: @@ -185,14 +185,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[asset_service.SearchAllResourcesResponse]: + def pages(self) -> Iterator[asset_service.SearchAllResourcesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[assets.ResourceSearchResult]: + def __iter__(self) -> Iterator[assets.ResourceSearchResult]: for page in self.pages: yield from page.results @@ -244,14 +244,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[asset_service.SearchAllResourcesResponse]: + async def pages(self) -> AsyncIterator[asset_service.SearchAllResourcesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[assets.ResourceSearchResult]: + def __aiter__(self) -> AsyncIterator[assets.ResourceSearchResult]: async def async_generator(): async for page in self.pages: for response in page.results: @@ -307,14 +307,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[asset_service.SearchAllIamPoliciesResponse]: + def pages(self) -> Iterator[asset_service.SearchAllIamPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[assets.IamPolicySearchResult]: + def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: for page in self.pages: yield from page.results @@ -366,14 +366,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[asset_service.SearchAllIamPoliciesResponse]: + async def pages(self) -> AsyncIterator[asset_service.SearchAllIamPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[assets.IamPolicySearchResult]: + def __aiter__(self) -> AsyncIterator[assets.IamPolicySearchResult]: async def async_generator(): async for page in self.pages: for response in page.results: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 11dce2ab7d58..43e0084a0019 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator from google.cloud.logging_v2.types import logging_config @@ -62,14 +62,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListBucketsResponse]: + def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogBucket]: + def __iter__(self) -> Iterator[logging_config.LogBucket]: for page in self.pages: yield from page.buckets @@ -121,14 +121,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListBucketsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogBucket]: + def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): async for page in self.pages: for response in page.buckets: @@ -184,14 +184,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListViewsResponse]: + def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogView]: + def __iter__(self) -> Iterator[logging_config.LogView]: for page in self.pages: yield from page.views @@ -243,14 +243,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListViewsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogView]: + def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): async for page in self.pages: for response in page.views: @@ -306,14 +306,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListSinksResponse]: + def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogSink]: + def __iter__(self) -> Iterator[logging_config.LogSink]: for page in self.pages: yield from page.sinks @@ -365,14 +365,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListSinksResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogSink]: + def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): async for page in self.pages: for response in page.sinks: @@ -428,14 +428,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_config.ListExclusionsResponse]: + def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_config.LogExclusion]: + def __iter__(self) -> Iterator[logging_config.LogExclusion]: for page in self.pages: yield from page.exclusions @@ -487,14 +487,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_config.ListExclusionsResponse]: + async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_config.LogExclusion]: + def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): async for page in self.pages: for response in page.exclusions: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 9b94311d2e33..95adb7e912c9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry @@ -64,14 +64,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListLogEntriesResponse]: + def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[log_entry.LogEntry]: + def __iter__(self) -> Iterator[log_entry.LogEntry]: for page in self.pages: yield from page.entries @@ -123,14 +123,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging.ListLogEntriesResponse]: + async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[log_entry.LogEntry]: + def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): async for page in self.pages: for response in page.entries: @@ -186,14 +186,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListMonitoredResourceDescriptorsResponse]: + def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: for page in self.pages: yield from page.resource_descriptors @@ -245,14 +245,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging.ListMonitoredResourceDescriptorsResponse]: + async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[monitored_resource_pb2.MonitoredResourceDescriptor]: + def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: for response in page.resource_descriptors: @@ -308,14 +308,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging.ListLogsResponse]: + def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[str]: + def __iter__(self) -> Iterator[str]: for page in self.pages: yield from page.log_names @@ -367,14 +367,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging.ListLogsResponse]: + async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[str]: + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: for response in page.log_names: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index f6bf04e4f968..a3faa77a20f7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator from google.cloud.logging_v2.types import logging_metrics @@ -62,14 +62,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[logging_metrics.ListLogMetricsResponse]: + def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[logging_metrics.LogMetric]: + def __iter__(self) -> Iterator[logging_metrics.LogMetric]: for page in self.pages: yield from page.metrics @@ -121,14 +121,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[logging_metrics.ListLogMetricsResponse]: + async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[logging_metrics.LogMetric]: + def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): async for page in self.pages: for response in page.metrics: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index ea1c2287e22e..804104f8f062 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator from google.cloud.redis_v1.types import cloud_redis @@ -62,14 +62,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[cloud_redis.ListInstancesResponse]: + def pages(self) -> Iterator[cloud_redis.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[cloud_redis.Instance]: + def __iter__(self) -> Iterator[cloud_redis.Instance]: for page in self.pages: yield from page.instances @@ -121,14 +121,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[cloud_redis.ListInstancesResponse]: + async def pages(self) -> AsyncIterator[cloud_redis.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[cloud_redis.Instance]: + def __aiter__(self) -> AsyncIterator[cloud_redis.Instance]: async def async_generator(): async for page in self.pages: for response in page.instances: From 2b93783e3365e5c1554a80425e5d69e81c3dc8db Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 29 Sep 2021 20:26:42 +0000 Subject: [PATCH 0614/1339] chore: release 0.52.0 (#995) :robot: I have created a release \*beep\* \*boop\* --- ## [0.52.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.2...v0.52.0) (2021-09-29) ### Features * Support alternative http bindings in the gapic schema. ([#993](https://www.github.com/googleapis/gapic-generator-python/issues/993)) ([041a726](https://www.github.com/googleapis/gapic-generator-python/commit/041a726b818cd67812d689c23757f31ec9964d66)) ### Bug Fixes * improper types in pagers generation ([#970](https://www.github.com/googleapis/gapic-generator-python/issues/970)) ([bba3eea](https://www.github.com/googleapis/gapic-generator-python/commit/bba3eea5d45fe57c0395ceef30402ad7880013d7)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 60420fe94a94..9eecaa0978e6 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.52.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.2...v0.52.0) (2021-09-29) + + +### Features + +* Support alternative http bindings in the gapic schema. ([#993](https://www.github.com/googleapis/gapic-generator-python/issues/993)) ([041a726](https://www.github.com/googleapis/gapic-generator-python/commit/041a726b818cd67812d689c23757f31ec9964d66)) + + +### Bug Fixes + +* improper types in pagers generation ([#970](https://www.github.com/googleapis/gapic-generator-python/issues/970)) ([bba3eea](https://www.github.com/googleapis/gapic-generator-python/commit/bba3eea5d45fe57c0395ceef30402ad7880013d7)) + ### [0.51.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.1...v0.51.2) (2021-09-13) From e9a6ac506aeabc62cf4ba3606669fec4364d6bca Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 30 Sep 2021 13:13:30 -0700 Subject: [PATCH 0615/1339] feat: enable self signed jwt for http (#1000) --- .../%sub/services/%service/client.py.j2 | 5 +---- .../%name_%version/%sub/test_%service.py.j2 | 18 ------------------ .../asset_v1/services/asset_service/client.py | 5 +---- .../services/iam_credentials/client.py | 5 +---- .../services/config_service_v2/client.py | 5 +---- .../services/logging_service_v2/client.py | 5 +---- .../services/metrics_service_v2/client.py | 5 +---- .../redis_v1/services/cloud_redis/client.py | 5 +---- 8 files changed, 7 insertions(+), 46 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index f7a513220ee3..3bc7f20c5f2e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -305,10 +305,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): quota_project_id=client_options.quota_project_id, client_info=client_info, {% if "grpc" in opts.transport %} - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, {% endif %} ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 61618036d404..7187d6295b26 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -208,9 +208,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,9 +225,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -246,9 +242,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -275,9 +269,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -323,9 +315,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -351,9 +341,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) # Check the case client_cert_source and ADC client cert are not provided. @@ -370,9 +358,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) @@ -400,9 +386,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) @pytest.mark.parametrize("client_class,transport_class,transport_name", [ @@ -429,9 +413,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, - {% if 'grpc' in opts.transport %} always_use_jwt_access=True, - {% endif %} ) {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 98dcd3aa8f3d..0d7bac0e5e19 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -344,10 +344,7 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def export_assets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 1280b71919ad..caa32cecbf0f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -340,10 +340,7 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def generate_access_token(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index acf10f2292b8..af174010f674 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -375,10 +375,7 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_buckets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 354945976630..4a73c2318129 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -331,10 +331,7 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def delete_log(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index af554cf6d6fd..616d0a69e80d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -332,10 +332,7 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_log_metrics(self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 2857a5fb9fde..9afc35601ae1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -355,10 +355,7 @@ def __init__(self, *, client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def list_instances(self, From 7b39d4a5e2ac85e778ccba6d2c59232b74ce064d Mon Sep 17 00:00:00 2001 From: Ken Bandes Date: Thu, 30 Sep 2021 18:18:47 -0400 Subject: [PATCH 0616/1339] feat: implement grpc transcode for rest transport and complete generated tests (#999) feat: implement grpc transcode for rest transport and complete generated tests. --- .../gapic-generator/gapic/schema/wrappers.py | 14 + .../services/%service/transports/rest.py.j2 | 198 ++++++----- .../%name_%version/%sub/test_%service.py.j2 | 328 ++++++++++-------- .../gapic-generator/gapic/utils/__init__.py | 2 + .../gapic-generator/gapic/utils/uri_sample.py | 76 ++++ .../unit/gapic/asset_v1/test_asset_service.py | 1 + .../credentials_v1/test_iam_credentials.py | 1 + .../logging_v2/test_config_service_v2.py | 1 + .../logging_v2/test_logging_service_v2.py | 1 + .../logging_v2/test_metrics_service_v2.py | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 1 + .../tests/unit/schema/wrappers/test_method.py | 11 + 12 files changed, 415 insertions(+), 220 deletions(-) create mode 100644 packages/gapic-generator/gapic/utils/uri_sample.py diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 2af844d0a18c..b9a38761338f 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -29,6 +29,7 @@ import collections import dataclasses +import json import re from itertools import chain from typing import (Any, cast, Dict, FrozenSet, Iterable, List, Mapping, @@ -39,6 +40,7 @@ from google.api import http_pb2 from google.api import resource_pb2 from google.api_core import exceptions # type: ignore +from google.api_core import path_template # type: ignore from google.protobuf import descriptor_pb2 # type: ignore from google.protobuf.json_format import MessageToDict # type: ignore @@ -714,6 +716,18 @@ class HttpRule: uri: str body: Optional[str] + @property + def path_fields(self) -> List[Tuple[str, str]]: + """return list of (name, template) tuples extracted from uri.""" + return [(match.group("name"), match.group("template")) + for match in path_template._VARIABLE_RE.finditer(self.uri)] + + @property + def sample_request(self) -> str: + """return json dict for sample request matching the uri template.""" + sample = utils.sample_from_path_fields(self.path_fields) + return json.dumps(sample) + @classmethod def try_parse_http_rule(cls, http_rule) -> Optional['HttpRule']: method = http_rule.WhichOneof("pattern") diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 2308ea8c26f6..87eef7d49747 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -1,29 +1,31 @@ +from google.auth.transport.requests import AuthorizedSession +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import rest_helpers # type: ignore +from google.api_core import path_template # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import operations_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple +import warnings {% extends '_base.py.j2' %} {% block content %} -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from requests import __version__ as requests_version {% if service.has_lro %} -from google.api_core import operations_v1 {% endif %} -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from google.auth.transport.requests import AuthorizedSession {# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} {% filter sort_lines %} {% for method in service.methods.values() %} -{{ method.input.ident.python_import }} -{{ method.output.ident.python_import }} +{{method.input.ident.python_import}} +{{method.output.ident.python_import}} {% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -31,7 +33,7 @@ from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} -from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -40,7 +42,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=requests_version, ) -class {{ service.name }}RestTransport({{ service.name }}Transport): +class {{service.name}}RestTransport({{service.name}}Transport): """REST backend transport for {{ service.name }}. {{ service.meta.doc|rst(width=72, indent=4) }} @@ -54,13 +56,15 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {# TODO(yon-mg): handle mtls stuff if that's relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', ) -> None: """Instantiate the transport. @@ -88,6 +92,11 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -99,7 +108,8 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): client_info=client_info, always_use_jwt_access=always_use_jwt_access, ) - self._session = AuthorizedSession(self._credentials, default_host=self.DEFAULT_HOST) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) {% if service.has_lro %} self._operations_client = None {% endif %} @@ -136,16 +146,17 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): # Return the client from cache. return self._operations_client + + {% endif %} {% for method in service.methods.values() %} - {% if method.http_opt %} - - def {{ method.name|snake_case }}(self, - request: {{ method.input.ident }}, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> {{ method.output.ident }}: + {%- if method.http_options and not method.lro and not (method.server_streaming or method.client_streaming) %} + def _{{method.name | snake_case}}(self, + request: {{method.input.ident}}, *, + retry: retries.Retry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> {{method.output.ident}}: r"""Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} @@ -168,62 +179,57 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {% endif %} """ - {# TODO(yon-mg): refactor when implementing grpc transcoding - - parse request pb & assign body, path params - - shove leftovers into query params - - make sure dotted nested fields preserved - - format url and send the request - #} - {% if 'body' in method.http_opt %} + http_options = [ + {%- for rule in method.http_options %}{ + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {%- if rule.body %} + 'body': '{{ rule.body }}', + {%- endif %} + }, + {%- endfor %}] + + request_kwargs = {{method.input.ident}}.to_dict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + {% set body_spec = method.http_options[0].body %} + {%- if body_spec %} + # Jsonify the request body - {% if method.http_opt['body'] != '*' %} - body = {{ method.input.fields[method.http_opt['body']].type.ident }}.to_json( - request.{{ method.http_opt['body'] }}, + body = {% if body_spec == '*' -%} + {{method.input.ident}}.to_json( + {{method.input.ident}}(transcoded_request['body']), + {%- else -%} + {{method.input.fields[body_spec].type.ident}}.to_json( + {{method.input.fields[body_spec].type.ident}}( + transcoded_request['body']), + {%- endif %} + including_default_value_fields=False, use_integers_for_enums=False ) - {% else %} - body = {{ method.input.ident }}.to_json( - request, - use_integers_for_enums=False - ) - {% endif %} - {% endif %} + {%- endif %}{# body_spec #} - {# TODO(yon-mg): Write helper method for handling grpc transcoding url #} - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = 'https://{host}{{ method.http_opt['url'] }}'.format( - host=self._host, - {% for field in method.path_params %} - {{ field }}=request.{{ method.input.get_field(field).name }}, - {% endfor %} - ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] - {# TODO(yon-mg): move all query param logic out of wrappers into here to handle - nested fields correctly (can't just use set of top level fields - #} - # TODO(yon-mg): handle nested fields correctly rather than using only top level fields - # not required for GCE - query_params = {} - {% for field in method.query_params | sort%} - {% if method.input.fields[field].proto3_optional %} - if {{ method.input.ident }}.{{ field }} in request: - query_params['{{ field|camel_case }}'] = request.{{ field }} - {% else %} - query_params['{{ field|camel_case }}'] = request.{{ field }} - {% endif %} - {% endfor %} + # Jsonify the query params + query_params = json.loads({{method.input.ident}}.to_json( + {{method.input.ident}}(transcoded_request['query_params']), + including_default_value_fields=False, + use_integers_for_enums=False + )) # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' - response = self._session.{{ method.http_opt['verb'] }}( - url, + response=getattr(self._session, method)( + uri, timeout=timeout, headers=headers, - params=query_params, - {% if 'body' in method.http_opt %} + params=rest_helpers.flatten_query_params(query_params), + {% if body_spec %} data=body, {% endif %} ) @@ -235,16 +241,50 @@ class {{ service.name }}RestTransport({{ service.name }}Transport): {% if not method.void %} # Return the response - return {{ method.output.ident }}.from_json( + return {{method.output.ident}}.from_json( response.content, ignore_unknown_fields=True ) {% endif %} - {% endif %} + {% else %} + + def _{{method.name | snake_case}}(self, + request: {{method.input.ident}}, *, + metadata: Sequence[Tuple[str, str]]=(), + ) -> {{method.output.ident}}: + r"""Placeholder: Unable to implement over REST + """ + {%- if not method.http_options %} + + raise RuntimeError( + "Cannot define a method without a valid 'google.api.http' annotation.") + {%- elif method.lro %} + + raise NotImplementedError( + "LRO over REST is not yet defined for python client.") + {%- elif method.server_streaming or method.client_streaming %} + + raise NotImplementedError( + "Streaming over REST is not yet defined for python client") + {%- else %} + + raise NotImplementedError() + {%- endif %} + {%- endif %} + + {% endfor %} + {%- for method in service.methods.values() %} + + @ property + def {{method.name | snake_case}}(self) -> Callable[ + [{{method.input.ident}}], + {{method.output.ident}}]: + return self._{{method.name | snake_case}} + {%- endfor %} -__all__ = ( +__all__=( '{{ service.name }}RestTransport', ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 7187d6295b26..d76414d34d68 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -33,6 +33,7 @@ from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template {% if service.has_lro %} from google.api_core import future from google.api_core import operations_v1 @@ -1108,16 +1109,18 @@ def test_{{ method.name|snake_case }}_raw_page_lro(): {% endfor %} {# method in methods for grpc #} -{% for method in service.methods.values() if 'rest' in opts.transport %} +{% for method in service.methods.values() if 'rest' in opts.transport and + method.http_options %} +{# TODO(kbandes): remove this if condition when lro and streaming are supported. #} +{% if not method.lro and not (method.server_streaming or method.client_streaming) %} def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request = request_type({{ method.http_options[0].sample_request}}) {% if method.client_streaming %} requests = [request] {% endif %} @@ -1133,16 +1136,27 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type return_value = iter([{{ method.output.ident }}()]) {% else %} return_value = {{ method.output.ident }}( - {% for field in method.output.fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {% endfor %} + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endfor %} ) {% endif %} # Wrap the value into a proper Response obj - json_return_value = {{ method.output.ident }}.to_json(return_value) response_value = Response() response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value {% if method.client_streaming %} @@ -1161,7 +1175,8 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type assert response is None {% else %} assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() %} + {% for field in method.output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} @@ -1169,6 +1184,7 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type {% else %} assert response.{{ field.name }} == {{ field.mock_value }} {% endif %} + {% endif %}{# end oneof/optional #} {% endfor %} {% endif %} @@ -1177,6 +1193,7 @@ def test_{{ method.name|snake_case }}_rest_from_dict(): test_{{ method.name|snake_case }}_rest(request_type=dict) +{% if method.flattened_fields %} def test_{{ method.name|snake_case }}_rest_flattened(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), @@ -1196,46 +1213,40 @@ def test_{{ method.name|snake_case }}_rest_flattened(): {% endif %} # Wrap the value into a proper Response obj - json_return_value = {{ method.output.ident }}.to_json(return_value) response_value = Response() response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - {% for field in method.flattened_fields.values() if field.field_pb is msg_field_pb %} - {{ field.name }} = {{ field.mock_value }} - {% endfor %} - client.{{ method.name|snake_case }}( + # get arguments that satisfy an http rule for this method + sample_request = {{ method.http_options[0].sample_request }} + + # get truthy value for each flattened field + mock_args = dict( {% for field in method.flattened_fields.values() %} - {% if field.field_pb is msg_field_pb %} - {{ field.name }}={{ field.name }}, - {% else %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} {{ field.name }}={{ field.mock_value }}, {% endif %} {% endfor %} ) + mock_args.update(sample_request) + client.{{ method.name|snake_case }}(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get('data') - params = http_params.get('params') - {% for key, field in method.flattened_fields.items() %} - {% if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} - assert TimestampRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration_pb2.Duration' %} - assert DurationRule().to_proto(http_call[0].{{ key }}) == {{ field.mock_value }} - {% else %} - assert {% if field.field_pb is msg_field_pb %}{{ field.ident }}.to_json({{ field.name }}, including_default_value_fields=False, use_integers_for_enums=False) - {%- elif field.field_pb is str_field_pb %}{{ field.mock_value }} - {%- else %}str({{ field.mock_value }}) - {%- endif %} in http_call[1] + str(body) + str(params) - {% endif %} - {% endif %}{% endfor %} + _, args, _ = req.mock_calls[0] + {% with uri = method.http_options[0].uri %} + assert path_template.validate("{{ uri }}", args[1]) + {% endwith %} + {# TODO(kbandes) - reverse-transcode request args to check all request fields #} def test_{{ method.name|snake_case }}_rest_flattened_error(): @@ -1252,128 +1263,143 @@ def test_{{ method.name|snake_case }}_rest_flattened_error(): {{ field.name }}={{ field.mock_value }}, {% endfor %} ) +{% endif %}{# flattened fields #} {% if method.paged_result_field %} -def test_{{ method.name|snake_case }}_pager(): +def test_{{ method.name|snake_case }}_rest_pager(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: - # Set the response as a series of pages - {% if method.paged_result_field.map%} - response = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={}, - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - ), - ) - {% else %} - response = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[], - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - ), - ) - {% endif %} - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple({{ method.output.ident }}.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + {% if method.paged_result_field.map%} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + ) + {% else %} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + ) + {% endif %} + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple({{ method.output.ident }}.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {{ method.http_options[0].sample_request }} + pager = client.{{ method.name|snake_case }}(request=sample_request) + + {% if method.paged_result_field.map %} + assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) + assert pager.get('h') is None + {% endif %} - metadata = () - {% if method.field_headers %} - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - {% for field_header in method.field_headers %} - {% if not method.client_streaming %} - ('{{ field_header }}', ''), - {% endif %} - {% endfor %} - )), - ) - {% endif %} - pager = client.{{ method.name|snake_case }}(request={}) + results = list(pager) + assert len(results) == 6 + {% if method.paged_result_field.map %} + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) + {% else %} + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) + for i in results) + {% endif %} - assert pager._metadata == metadata + pages = list(client.{{ method.name|snake_case }}(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token - {% if method.paged_result_field.map %} - assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) - assert pager.get('h') is None - {% endif %} - results = list(pager) - assert len(results) == 6 - {% if method.paged_result_field.map %} - assert all( - isinstance(i, tuple) - for i in results) - for result in results: - assert isinstance(result, tuple) - assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) +{% endif %} {# paged methods #} +{%- else %} - assert pager.get('a') is None - assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) - {% else %} - assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) - for i in results) - {% endif %} +def test_{{ method.name|snake_case }}_rest_error(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + {%- if not method.http_options %} + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.{{ method.name|snake_case }}({}) + assert ('Cannot define a method without a valid `google.api.http` annotation.' + in str(runtime_error.value)) + {%- else %} + + # TODO(yon-mg): Remove when this method has a working implementation + # or testing straegy + with pytest.raises(NotImplementedError): + client.{{ method.name|snake_case }}({}) - pages = list(client.{{ method.name|snake_case }}(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token + {%- endif %} +{% endif %} -{% endif %}{# paged methods #} -{% endfor %}{# method in methods for rest #} +{% endfor -%} {#- method in methods for rest #} def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( @@ -1718,7 +1744,27 @@ def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtl client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -{% endif %} + + +{# TODO(kbandes): re-enable this code when LRO is implmented for rest #} +{% if False and service.has_lro -%} +def test_{{ service.name|snake_case }}_rest_lro_client(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client +{%- endif %} +{% endif %} {# rest #} def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] %} diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 98d31c283fc6..447b0df837f6 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -28,6 +28,7 @@ from gapic.utils.options import Options from gapic.utils.reserved_names import RESERVED_NAMES from gapic.utils.rst import rst +from gapic.utils.uri_sample import sample_from_path_fields __all__ = ( @@ -41,6 +42,7 @@ 'partition', 'RESERVED_NAMES', 'rst', + 'sample_from_path_fields', 'sort_lines', 'to_snake_case', 'to_camel_case', diff --git a/packages/gapic-generator/gapic/utils/uri_sample.py b/packages/gapic-generator/gapic/utils/uri_sample.py new file mode 100644 index 000000000000..43b8865abfff --- /dev/null +++ b/packages/gapic-generator/gapic/utils/uri_sample.py @@ -0,0 +1,76 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Generator, Dict, List, Tuple +import re + + +def _sample_names() -> Generator[str, None, None]: + sample_num: int = 0 + while True: + sample_num += 1 + yield "sample{}".format(sample_num) + + +def add_field(obj, path, value): + """Insert a field into a nested dict and return the (outer) dict. + Keys and sub-dicts are inserted if necessary to create the path. + e.g. if obj, as passed in, is {}, path is "a.b.c", and value is + "hello", obj will be updated to: + {'a': + {'b': + { + 'c': 'hello' + } + } + } + + Args: + obj: a (possibly) nested dict (parsed json) + path: a segmented field name, e.g. "a.b.c" + where each part is a dict key. + value: the value of the new key. + Returns: + obj, possibly modified + Raises: + AttributeError if the path references a key that is + not a dict.: e.g. path='a.b', obj = {'a':'abc'} + """ + segments = path.split('.') + leaf = segments.pop() + subfield = obj + for segment in segments: + subfield = subfield.setdefault(segment, {}) + subfield[leaf] = value + return obj + + +def sample_from_path_fields(paths: List[Tuple[str, str]]) -> Dict[Any, Any]: + """Construct a dict for a sample request object from a list of fields + and template patterns. + + Args: + paths: a list of tuples, each with a (segmented) name and a pattern. + Returns: + A new nested dict with the templates instantiated. + """ + + request: Dict[str, Any] = {} + sample_names = _sample_names() + + for path, template in paths: + sample_value = re.sub( + r"(\*\*|\*)", lambda n: next(sample_names), template) + add_field(request, path, sample_value) + return request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 87c110ebcb55..21d14df35c36 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -32,6 +32,7 @@ from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.asset_v1.services.asset_service import AssetServiceAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 74dacb0923ff..6945e557a0f0 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.credentials_v1.services.iam_credentials import IAMCredentialsAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 979cbd360592..288323a4bc0c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2AsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index b95281460984..31f15bca043d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -30,6 +30,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2AsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 5ce85b428459..e3eb2f3aca46 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -33,6 +33,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2AsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index a1c9cc2e643b..1b719406947b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -32,6 +32,7 @@ from google.api_core import grpc_helpers_async from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.redis_v1.services.cloud_redis import CloudRedisAsyncClient diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 00ade8aefbbf..c6a81d9128ac 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -14,6 +14,7 @@ import collections import dataclasses +import json from typing import Sequence from google.api import field_behavior_pb2 @@ -409,6 +410,16 @@ def test_method_http_options_additional_bindings(): }] +def test_method_http_options_generate_sample(): + http_rule = http_pb2.HttpRule( + get='/v1/{resource.id=projects/*/regions/*/id/**}/stuff', + ) + method = make_method('DoSomething', http_rule=http_rule) + sample = method.http_options[0].sample_request + assert json.loads(sample) == {'resource': { + 'id': 'projects/sample1/regions/sample2/id/sample3'}} + + def test_method_query_params(): # tests only the basic case of grpc transcoding http_rule = http_pb2.HttpRule( From 9961f43f7c4b74813d3e2d5802f95cb85a4c4549 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 4 Oct 2021 10:32:37 -0600 Subject: [PATCH 0617/1339] fix: fix docstring for first attribute of protos (#1004) --- .../%name/%version/%sub/types/_message.py.j2 | 3 +- .../%name_%version/%sub/types/_message.py.j2 | 3 +- .../cloud/asset_v1/types/asset_service.py | 31 ++++++++++++++++++- .../google/cloud/asset_v1/types/assets.py | 10 ++++++ .../google/iam/credentials_v1/types/common.py | 8 +++++ .../cloud/logging_v2/types/log_entry.py | 1 + .../google/cloud/logging_v2/types/logging.py | 15 ++++++++- .../cloud/logging_v2/types/logging_config.py | 27 ++++++++++++++++ .../cloud/logging_v2/types/logging_metrics.py | 6 ++++ .../cloud/redis_v1/types/cloud_redis.py | 9 +++++- 10 files changed, 108 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index 5e07a55f2bc9..581c976b5e36 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -1,5 +1,6 @@ class {{ message.name }}({{ p }}.Message): - r"""{{ message.meta.doc|rst(indent=4) }}{% if message.fields|length %} + r"""{{ message.meta.doc|rst(indent=4) }} + {% if message.fields|length %} Attributes: {% for field in message.fields.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 890b2cd80dcd..9bd6d51d82bb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -1,5 +1,6 @@ class {{ message.name }}({{ p }}.Message): - r"""{{ message.meta.doc|rst(indent=4) }}{% if message.fields|length %} + r"""{{ message.meta.doc|rst(indent=4) }} + {% if message.fields|length %} Attributes: {% for field in message.fields.values() %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 8ecee480f817..739d78d9a1e0 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -73,6 +73,7 @@ class ContentType(proto.Enum): class ExportAssetsRequest(proto.Message): r"""Export asset request. + Attributes: parent (str): Required. The relative name of the root @@ -187,6 +188,7 @@ class ExportAssetsResponse(proto.Message): class ListAssetsRequest(proto.Message): r"""ListAssets request. + Attributes: parent (str): Required. Name of the organization or project the assets @@ -271,6 +273,7 @@ class ListAssetsRequest(proto.Message): class ListAssetsResponse(proto.Message): r"""ListAssets response. + Attributes: read_time (google.protobuf.timestamp_pb2.Timestamp): Time the snapshot was taken. @@ -305,6 +308,7 @@ def raw_page(self): class BatchGetAssetsHistoryRequest(proto.Message): r"""Batch get assets history request. + Attributes: parent (str): Required. The relative name of the root @@ -356,6 +360,7 @@ class BatchGetAssetsHistoryRequest(proto.Message): class BatchGetAssetsHistoryResponse(proto.Message): r"""Batch get assets history response. + Attributes: assets (Sequence[google.cloud.asset_v1.types.TemporalAsset]): A list of assets with valid time windows. @@ -370,6 +375,7 @@ class BatchGetAssetsHistoryResponse(proto.Message): class CreateFeedRequest(proto.Message): r"""Create asset feed request. + Attributes: parent (str): Required. The name of the @@ -409,6 +415,7 @@ class CreateFeedRequest(proto.Message): class GetFeedRequest(proto.Message): r"""Get asset feed request. + Attributes: name (str): Required. The name of the Feed and it must be in the format @@ -425,6 +432,7 @@ class GetFeedRequest(proto.Message): class ListFeedsRequest(proto.Message): r"""List asset feeds request. + Attributes: parent (str): Required. The parent @@ -443,6 +451,7 @@ class ListFeedsRequest(proto.Message): class ListFeedsResponse(proto.Message): r""" + Attributes: feeds (Sequence[google.cloud.asset_v1.types.Feed]): A list of feeds. @@ -457,6 +466,7 @@ class ListFeedsResponse(proto.Message): class UpdateFeedRequest(proto.Message): r"""Update asset feed request. + Attributes: feed (google.cloud.asset_v1.types.Feed): Required. The new values of feed details. It must match an @@ -484,6 +494,7 @@ class UpdateFeedRequest(proto.Message): class DeleteFeedRequest(proto.Message): r""" + Attributes: name (str): Required. The name of the feed and it must be in the format @@ -500,6 +511,7 @@ class DeleteFeedRequest(proto.Message): class OutputConfig(proto.Message): r"""Output configuration for export assets destination. + Attributes: gcs_destination (google.cloud.asset_v1.types.GcsDestination): Destination on Cloud Storage. @@ -525,6 +537,7 @@ class OutputConfig(proto.Message): class OutputResult(proto.Message): r"""Output result of export assets. + Attributes: gcs_result (google.cloud.asset_v1.types.GcsOutputResult): Export result on Cloud Storage. @@ -540,6 +553,7 @@ class OutputResult(proto.Message): class GcsOutputResult(proto.Message): r"""A Cloud Storage output result. + Attributes: uris (Sequence[str]): List of uris of the Cloud Storage objects. Example: @@ -554,6 +568,7 @@ class GcsOutputResult(proto.Message): class GcsDestination(proto.Message): r"""A Cloud Storage location. + Attributes: uri (str): The uri of the Cloud Storage object. It's the same uri that @@ -592,6 +607,7 @@ class GcsDestination(proto.Message): class BigQueryDestination(proto.Message): r"""A BigQuery destination for exporting assets to. + Attributes: dataset (str): Required. The BigQuery dataset in format @@ -719,6 +735,7 @@ class PartitionKey(proto.Enum): class PubsubDestination(proto.Message): r"""A Pub/Sub destination. + Attributes: topic (str): The name of the Pub/Sub topic to publish to. Example: @@ -733,6 +750,7 @@ class PubsubDestination(proto.Message): class FeedOutputConfig(proto.Message): r"""Output configuration for asset feed destination. + Attributes: pubsub_destination (google.cloud.asset_v1.types.PubsubDestination): Destination on Pub/Sub. @@ -838,6 +856,7 @@ class Feed(proto.Message): class SearchAllResourcesRequest(proto.Message): r"""Search all resources request. + Attributes: scope (str): Required. A scope can be a project, a folder, or an @@ -983,6 +1002,7 @@ class SearchAllResourcesRequest(proto.Message): class SearchAllResourcesResponse(proto.Message): r"""Search all resources response. + Attributes: results (Sequence[google.cloud.asset_v1.types.ResourceSearchResult]): A list of Resources that match the search @@ -1012,6 +1032,7 @@ def raw_page(self): class SearchAllIamPoliciesRequest(proto.Message): r"""Search all IAM policies request. + Attributes: scope (str): Required. A scope can be a project, a folder, or an @@ -1150,6 +1171,7 @@ class SearchAllIamPoliciesRequest(proto.Message): class SearchAllIamPoliciesResponse(proto.Message): r"""Search all IAM policies response. + Attributes: results (Sequence[google.cloud.asset_v1.types.IamPolicySearchResult]): A list of IamPolicy that match the search @@ -1179,6 +1201,7 @@ def raw_page(self): class IamPolicyAnalysisQuery(proto.Message): r"""## IAM policy analysis query message. + Attributes: scope (str): Required. The relative name of the root asset. Only @@ -1279,6 +1302,7 @@ class AccessSelector(proto.Message): class Options(proto.Message): r"""Contains query options. + Attributes: expand_groups (bool): Optional. If true, the identities section of the result will @@ -1395,6 +1419,7 @@ class Options(proto.Message): class ConditionContext(proto.Message): r"""The IAM conditions context. + Attributes: access_time (google.protobuf.timestamp_pb2.Timestamp): The hypothetical access timestamp to evaluate IAM @@ -1497,6 +1522,7 @@ class AnalyzeIamPolicyResponse(proto.Message): class IamPolicyAnalysis(proto.Message): r"""An analysis message to group the query and results. + Attributes: analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): The analysis query. @@ -1563,6 +1589,7 @@ class IamPolicyAnalysisOutputConfig(proto.Message): class GcsDestination(proto.Message): r"""A Cloud Storage location. + Attributes: uri (str): Required. The uri of the Cloud Storage object. It's the same @@ -1585,6 +1612,7 @@ class GcsDestination(proto.Message): class BigQueryDestination(proto.Message): r"""A BigQuery destination. + Attributes: dataset (str): Required. The BigQuery dataset in format @@ -1694,7 +1722,8 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): class AnalyzeIamPolicyLongrunningResponse(proto.Message): r"""A response message for [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - """ + + """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index b2b5a9156bff..ef30863e3c6c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -98,6 +98,7 @@ class PriorAssetState(proto.Enum): class TimeWindow(proto.Message): r"""A time window specified by its ``start_time`` and ``end_time``. + Attributes: start_time (google.protobuf.timestamp_pb2.Timestamp): Start time of the time window (exclusive). @@ -253,6 +254,7 @@ class Asset(proto.Message): class Resource(proto.Message): r"""A representation of a Google Cloud resource. + Attributes: version (str): The API version. Example: ``v1`` @@ -707,6 +709,7 @@ class IamPolicySearchResult(proto.Message): class Explanation(proto.Message): r"""Explanation about the IAM policy search result. + Attributes: matched_permissions (Sequence[google.cloud.asset_v1.types.IamPolicySearchResult.Explanation.MatchedPermissionsEntry]): The map from roles to their included permissions that match @@ -723,6 +726,7 @@ class Explanation(proto.Message): class Permissions(proto.Message): r"""IAM permissions + Attributes: permissions (Sequence[str]): A list of permissions. A sample permission string: @@ -806,6 +810,7 @@ class IamPolicyAnalysisState(proto.Message): class ConditionEvaluation(proto.Message): r"""The Condition evaluation. + Attributes: evaluation_value (google.cloud.asset_v1.types.ConditionEvaluation.EvaluationValue): The evaluation result. @@ -855,6 +860,7 @@ class IamPolicyAnalysisResult(proto.Message): class Resource(proto.Message): r"""A Google Cloud resource under analysis. + Attributes: full_resource_name (str): The `full resource @@ -875,6 +881,7 @@ class Resource(proto.Message): class Access(proto.Message): r"""An IAM role or permission under analysis. + Attributes: role (str): The role. @@ -902,6 +909,7 @@ class Access(proto.Message): class Identity(proto.Message): r"""An identity under analysis. + Attributes: name (str): The identity name in any form of members appear in `IAM @@ -932,6 +940,7 @@ class Identity(proto.Message): class Edge(proto.Message): r"""A directional edge. + Attributes: source_node (str): The source node of the edge. For example, it @@ -1023,6 +1032,7 @@ class AccessControlList(proto.Message): class IdentityList(proto.Message): r"""The identities and group edges. + Attributes: identities (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Identity]): Only the identities that match one of the following diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index 17bb1ec37111..9789d7f27190 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -36,6 +36,7 @@ class GenerateAccessTokenRequest(proto.Message): r""" + Attributes: name (str): Required. The resource name of the service account for which @@ -92,6 +93,7 @@ class GenerateAccessTokenRequest(proto.Message): class GenerateAccessTokenResponse(proto.Message): r""" + Attributes: access_token (str): The OAuth 2.0 access token. @@ -113,6 +115,7 @@ class GenerateAccessTokenResponse(proto.Message): class SignBlobRequest(proto.Message): r""" + Attributes: name (str): Required. The resource name of the service account for which @@ -154,6 +157,7 @@ class SignBlobRequest(proto.Message): class SignBlobResponse(proto.Message): r""" + Attributes: key_id (str): The ID of the key used to sign the blob. @@ -173,6 +177,7 @@ class SignBlobResponse(proto.Message): class SignJwtRequest(proto.Message): r""" + Attributes: name (str): Required. The resource name of the service account for which @@ -215,6 +220,7 @@ class SignJwtRequest(proto.Message): class SignJwtResponse(proto.Message): r""" + Attributes: key_id (str): The ID of the key used to sign the JWT. @@ -234,6 +240,7 @@ class SignJwtResponse(proto.Message): class GenerateIdTokenRequest(proto.Message): r""" + Attributes: name (str): Required. The resource name of the service account for which @@ -285,6 +292,7 @@ class GenerateIdTokenRequest(proto.Message): class GenerateIdTokenResponse(proto.Message): r""" + Attributes: token (str): The OpenId Connect ID token. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 45b1c8858763..18822503d972 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -35,6 +35,7 @@ class LogEntry(proto.Message): r"""An individual entry in a log. + Attributes: log_name (str): Required. The resource name of the log to which this log diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index cfae1781a75d..edd2a7c33a9e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -42,6 +42,7 @@ class DeleteLogRequest(proto.Message): r"""The parameters to DeleteLog. + Attributes: log_name (str): Required. The resource name of the log to delete: @@ -68,6 +69,7 @@ class DeleteLogRequest(proto.Message): class WriteLogEntriesRequest(proto.Message): r"""The parameters to WriteLogEntries. + Attributes: log_name (str): Optional. A default log resource name that is assigned to @@ -186,11 +188,13 @@ class WriteLogEntriesRequest(proto.Message): class WriteLogEntriesResponse(proto.Message): - r"""Result returned from WriteLogEntries. """ + r"""Result returned from WriteLogEntries. + """ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. + Attributes: log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): When ``WriteLogEntriesRequest.partial_success`` is true, @@ -212,6 +216,7 @@ class WriteLogEntriesPartialErrors(proto.Message): class ListLogEntriesRequest(proto.Message): r"""The parameters to ``ListLogEntries``. + Attributes: resource_names (Sequence[str]): Required. Names of one or more parent resources from which @@ -289,6 +294,7 @@ class ListLogEntriesRequest(proto.Message): class ListLogEntriesResponse(proto.Message): r"""Result returned from ``ListLogEntries``. + Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. If ``entries`` is empty, @@ -328,6 +334,7 @@ def raw_page(self): class ListMonitoredResourceDescriptorsRequest(proto.Message): r"""The parameters to ListMonitoredResourceDescriptors + Attributes: page_size (int): Optional. The maximum number of results to return from this @@ -354,6 +361,7 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""Result returned from ListMonitoredResourceDescriptors. + Attributes: resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): A list of resource descriptors. @@ -381,6 +389,7 @@ def raw_page(self): class ListLogsRequest(proto.Message): r"""The parameters to ListLogs. + Attributes: parent (str): Required. The resource name that owns the logs: @@ -434,6 +443,7 @@ class ListLogsRequest(proto.Message): class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. + Attributes: log_names (Sequence[str]): A list of log names. For example, @@ -462,6 +472,7 @@ def raw_page(self): class TailLogEntriesRequest(proto.Message): r"""The parameters to ``TailLogEntries``. + Attributes: resource_names (Sequence[str]): Required. Name of a parent resource from which to retrieve @@ -515,6 +526,7 @@ class TailLogEntriesRequest(proto.Message): class TailLogEntriesResponse(proto.Message): r"""Result returned from ``TailLogEntries``. + Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will @@ -534,6 +546,7 @@ class TailLogEntriesResponse(proto.Message): class SuppressionInfo(proto.Message): r"""Information about entries that were omitted from the session. + Attributes: reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): The reason that entries were omitted from the diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index a4b7b2571d7a..1122a620097b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -69,6 +69,7 @@ class LifecycleState(proto.Enum): class LogBucket(proto.Message): r"""Describes a repository of logs. + Attributes: name (str): The resource name of the bucket. For example: @@ -138,6 +139,7 @@ class LogBucket(proto.Message): class LogView(proto.Message): r"""Describes a view over logs in a bucket. + Attributes: name (str): The resource name of the view. @@ -385,6 +387,7 @@ class BigQueryOptions(proto.Message): class ListBucketsRequest(proto.Message): r"""The parameters to ``ListBuckets``. + Attributes: parent (str): Required. The parent resource whose buckets are to be @@ -429,6 +432,7 @@ class ListBucketsRequest(proto.Message): class ListBucketsResponse(proto.Message): r"""The response from ListBuckets. + Attributes: buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): A list of buckets. @@ -456,6 +460,7 @@ def raw_page(self): class CreateBucketRequest(proto.Message): r"""The parameters to ``CreateBucket``. + Attributes: parent (str): Required. The resource in which to create the bucket: @@ -494,6 +499,7 @@ class CreateBucketRequest(proto.Message): class UpdateBucketRequest(proto.Message): r"""The parameters to ``UpdateBucket``. + Attributes: name (str): Required. The full resource name of the bucket to update. @@ -542,6 +548,7 @@ class UpdateBucketRequest(proto.Message): class GetBucketRequest(proto.Message): r"""The parameters to ``GetBucket``. + Attributes: name (str): Required. The resource name of the bucket: @@ -565,6 +572,7 @@ class GetBucketRequest(proto.Message): class DeleteBucketRequest(proto.Message): r"""The parameters to ``DeleteBucket``. + Attributes: name (str): Required. The full resource name of the bucket to delete. @@ -588,6 +596,7 @@ class DeleteBucketRequest(proto.Message): class UndeleteBucketRequest(proto.Message): r"""The parameters to ``UndeleteBucket``. + Attributes: name (str): Required. The full resource name of the bucket to undelete. @@ -611,6 +620,7 @@ class UndeleteBucketRequest(proto.Message): class ListViewsRequest(proto.Message): r"""The parameters to ``ListViews``. + Attributes: parent (str): Required. The bucket whose views are to be listed: @@ -647,6 +657,7 @@ class ListViewsRequest(proto.Message): class ListViewsResponse(proto.Message): r"""The response from ListViews. + Attributes: views (Sequence[google.cloud.logging_v2.types.LogView]): A list of views. @@ -674,6 +685,7 @@ def raw_page(self): class CreateViewRequest(proto.Message): r"""The parameters to ``CreateView``. + Attributes: parent (str): Required. The bucket in which to create the view @@ -707,6 +719,7 @@ class CreateViewRequest(proto.Message): class UpdateViewRequest(proto.Message): r"""The parameters to ``UpdateView``. + Attributes: name (str): Required. The full resource name of the view to update @@ -749,6 +762,7 @@ class UpdateViewRequest(proto.Message): class GetViewRequest(proto.Message): r"""The parameters to ``GetView``. + Attributes: name (str): Required. The resource name of the policy: @@ -769,6 +783,7 @@ class GetViewRequest(proto.Message): class DeleteViewRequest(proto.Message): r"""The parameters to ``DeleteView``. + Attributes: name (str): Required. The full resource name of the view to delete: @@ -789,6 +804,7 @@ class DeleteViewRequest(proto.Message): class ListSinksRequest(proto.Message): r"""The parameters to ``ListSinks``. + Attributes: parent (str): Required. The parent resource whose sinks are to be listed: @@ -828,6 +844,7 @@ class ListSinksRequest(proto.Message): class ListSinksResponse(proto.Message): r"""Result returned from ``ListSinks``. + Attributes: sinks (Sequence[google.cloud.logging_v2.types.LogSink]): A list of sinks. @@ -855,6 +872,7 @@ def raw_page(self): class GetSinkRequest(proto.Message): r"""The parameters to ``GetSink``. + Attributes: sink_name (str): Required. The resource name of the sink: @@ -877,6 +895,7 @@ class GetSinkRequest(proto.Message): class CreateSinkRequest(proto.Message): r"""The parameters to ``CreateSink``. + Attributes: parent (str): Required. The resource in which to create the sink: @@ -927,6 +946,7 @@ class CreateSinkRequest(proto.Message): class UpdateSinkRequest(proto.Message): r"""The parameters to ``UpdateSink``. + Attributes: sink_name (str): Required. The full resource name of the sink to update, @@ -999,6 +1019,7 @@ class UpdateSinkRequest(proto.Message): class DeleteSinkRequest(proto.Message): r"""The parameters to ``DeleteSink``. + Attributes: sink_name (str): Required. The full resource name of the sink to delete, @@ -1096,6 +1117,7 @@ class LogExclusion(proto.Message): class ListExclusionsRequest(proto.Message): r"""The parameters to ``ListExclusions``. + Attributes: parent (str): Required. The parent resource whose exclusions are to be @@ -1136,6 +1158,7 @@ class ListExclusionsRequest(proto.Message): class ListExclusionsResponse(proto.Message): r"""Result returned from ``ListExclusions``. + Attributes: exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): A list of exclusions. @@ -1163,6 +1186,7 @@ def raw_page(self): class GetExclusionRequest(proto.Message): r"""The parameters to ``GetExclusion``. + Attributes: name (str): Required. The resource name of an existing exclusion: @@ -1186,6 +1210,7 @@ class GetExclusionRequest(proto.Message): class CreateExclusionRequest(proto.Message): r"""The parameters to ``CreateExclusion``. + Attributes: parent (str): Required. The parent resource in which to create the @@ -1219,6 +1244,7 @@ class CreateExclusionRequest(proto.Message): class UpdateExclusionRequest(proto.Message): r"""The parameters to ``UpdateExclusion``. + Attributes: name (str): Required. The resource name of the exclusion to update: @@ -1266,6 +1292,7 @@ class UpdateExclusionRequest(proto.Message): class DeleteExclusionRequest(proto.Message): r"""The parameters to ``DeleteExclusion``. + Attributes: name (str): Required. The resource name of an existing exclusion to diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 252e43760b02..1ec255b25388 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -216,6 +216,7 @@ class ApiVersion(proto.Enum): class ListLogMetricsRequest(proto.Message): r"""The parameters to ListLogMetrics. + Attributes: parent (str): Required. The name of the project containing the metrics: @@ -252,6 +253,7 @@ class ListLogMetricsRequest(proto.Message): class ListLogMetricsResponse(proto.Message): r"""Result returned from ListLogMetrics. + Attributes: metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): A list of logs-based metrics. @@ -279,6 +281,7 @@ def raw_page(self): class GetLogMetricRequest(proto.Message): r"""The parameters to GetLogMetric. + Attributes: metric_name (str): Required. The resource name of the desired metric: @@ -296,6 +299,7 @@ class GetLogMetricRequest(proto.Message): class CreateLogMetricRequest(proto.Message): r"""The parameters to CreateLogMetric. + Attributes: parent (str): Required. The resource name of the project in which to @@ -324,6 +328,7 @@ class CreateLogMetricRequest(proto.Message): class UpdateLogMetricRequest(proto.Message): r"""The parameters to UpdateLogMetric. + Attributes: metric_name (str): Required. The resource name of the metric to update: @@ -353,6 +358,7 @@ class UpdateLogMetricRequest(proto.Message): class DeleteLogMetricRequest(proto.Message): r"""The parameters to DeleteLogMetric. + Attributes: metric_name (str): Required. The resource name of the metric to delete: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 9caecb067b31..793fa802c0b0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -46,6 +46,7 @@ class Instance(proto.Message): r"""A Google Cloud Redis instance. + Attributes: name (str): Required. Unique name of the resource in this scope @@ -484,6 +485,7 @@ class DeleteInstanceRequest(proto.Message): class GcsSource(proto.Message): r"""The Cloud Storage location for the input content + Attributes: uri (str): Required. Source data URI. (e.g. @@ -498,6 +500,7 @@ class GcsSource(proto.Message): class InputConfig(proto.Message): r"""The input content + Attributes: gcs_source (google.cloud.redis_v1.types.GcsSource): Google Cloud Storage location where input @@ -538,6 +541,7 @@ class ImportInstanceRequest(proto.Message): class GcsDestination(proto.Message): r"""The Cloud Storage location for the output content + Attributes: uri (str): Required. Data destination URI (e.g. @@ -553,6 +557,7 @@ class GcsDestination(proto.Message): class OutputConfig(proto.Message): r"""The output content + Attributes: gcs_destination (google.cloud.redis_v1.types.GcsDestination): Google Cloud Storage destination for output @@ -626,6 +631,7 @@ class DataProtectionMode(proto.Enum): class OperationMetadata(proto.Message): r"""Represents the v1 metadata of the long-running operation. + Attributes: create_time (google.protobuf.timestamp_pb2.Timestamp): Creation timestamp. @@ -702,7 +708,8 @@ class LocationMetadata(proto.Message): class ZoneMetadata(proto.Message): r"""Defines specific information for a particular zone. Currently empty and reserved for future use only. - """ + + """ __all__ = tuple(sorted(__protobuf__.manifest)) From 7e23c7e3d8cc9725c5e19adc9277305995e171b0 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Mon, 4 Oct 2021 10:48:27 -0700 Subject: [PATCH 0618/1339] feat: add support for context manager in client (#987) * feat: add support for context manager in client. * chore: remove extra whitespace. * chore: adds autogenerated unit tests. * chore: adds stronger warning. * chore: fixes tests. * chore: adds auto-generated tests for ads. * chore: updates golden files. * chore: refactor. * chore: refactor. * feat: adds close() to transport and ctx to async client. * feat: adds close method and removes ctx from transport in ads. * chore: adds warning infobox to docstring. * chore: updates integration tests. * chore: fixes typo. --- .../%sub/services/%service/client.py.j2 | 12 ++++ .../services/%service/transports/base.py.j2 | 8 +++ .../services/%service/transports/grpc.py.j2 | 3 + .../%name_%version/%sub/test_%service.py.j2 | 24 ++++++++ .../%sub/services/%service/async_client.py.j2 | 6 ++ .../%sub/services/%service/client.py.j2 | 13 +++++ .../services/%service/transports/base.py.j2 | 8 +++ .../services/%service/transports/grpc.py.j2 | 4 ++ .../%service/transports/grpc_asyncio.py.j2 | 4 ++ .../services/%service/transports/rest.py.j2 | 3 + .../%name_%version/%sub/test_%service.py.j2 | 55 +++++++++++++++++++ .../services/asset_service/async_client.py | 6 +- .../asset_v1/services/asset_service/client.py | 11 ++++ .../services/asset_service/transports/base.py | 9 +++ .../services/asset_service/transports/grpc.py | 2 + .../asset_service/transports/grpc_asyncio.py | 3 + .../unit/gapic/asset_v1/test_asset_service.py | 46 ++++++++++++++++ .../services/iam_credentials/async_client.py | 6 +- .../services/iam_credentials/client.py | 11 ++++ .../iam_credentials/transports/base.py | 9 +++ .../iam_credentials/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 3 + .../credentials_v1/test_iam_credentials.py | 46 ++++++++++++++++ .../config_service_v2/async_client.py | 6 +- .../services/config_service_v2/client.py | 11 ++++ .../config_service_v2/transports/base.py | 9 +++ .../config_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 3 + .../logging_service_v2/async_client.py | 6 +- .../services/logging_service_v2/client.py | 11 ++++ .../logging_service_v2/transports/base.py | 9 +++ .../logging_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 3 + .../metrics_service_v2/async_client.py | 6 +- .../services/metrics_service_v2/client.py | 11 ++++ .../metrics_service_v2/transports/base.py | 9 +++ .../metrics_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 3 + .../logging_v2/test_config_service_v2.py | 46 ++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 46 ++++++++++++++++ .../logging_v2/test_metrics_service_v2.py | 46 ++++++++++++++++ .../services/cloud_redis/async_client.py | 6 +- .../redis_v1/services/cloud_redis/client.py | 11 ++++ .../services/cloud_redis/transports/base.py | 9 +++ .../services/cloud_redis/transports/grpc.py | 2 + .../cloud_redis/transports/grpc_asyncio.py | 3 + .../unit/gapic/redis_v1/test_cloud_redis.py | 46 ++++++++++++++++ .../system/test_client_context_manager.py | 53 ++++++++++++++++++ 48 files changed, 643 insertions(+), 12 deletions(-) create mode 100644 packages/gapic-generator/tests/system/test_client_context_manager.py diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 90e5002b684b..3d14c341a56c 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -148,6 +148,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ return self._transport + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() {% for message in service.resource_messages|sort(attribute="resource_type") %} @staticmethod diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 82ab1464387b..a87e3b671748 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -103,6 +103,14 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): {% endfor %} {# precomputed wrappers loop #} } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() {% if service.has_lro %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 1562b6f88c61..59a2219ca9de 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -188,6 +188,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): **kwargs ) + def close(self): + self.grpc_channel.close() + @property def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service. diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 8d9af17ae691..0392324b06cb 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -654,6 +654,9 @@ def test_{{ service.name|snake_case }}_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + {% if service.has_lro %} # Additionally, the LRO client (a property) should # also raise NotImplementedError @@ -903,5 +906,26 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) prep.assert_called_once_with(client_info) +def test_grpc_transport_close(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + with mock.patch.object(type(client.transport._grpc_channel), 'close') as chan_close: + with client as _: + chan_close.assert_not_called() + chan_close.assert_called_once() + +def test_grpc_client_ctx(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client as _: + pass + close.assert_called() {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index ea40ea1c255f..3df81e9b05c6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -599,6 +599,12 @@ class {{ service.async_client_name }}: return response {% endif %} + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 3bc7f20c5f2e..f0248d53a51d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -477,6 +477,19 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ "\n" }} {% endfor %} + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + {% if opts.add_iam_methods %} def set_iam_policy( self, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index b5b7bb2ac4f8..fb937ae19283 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -174,6 +174,14 @@ class {{ service.name }}Transport(abc.ABC): {% endfor %} {# precomputed wrappers loop #} } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() {% if service.has_lro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index f2f0da6ea3a3..894f4470d54a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -224,6 +224,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): """Return the channel designed to connect to this service. """ return self._grpc_channel + {% if service.has_lro %} @property @@ -355,6 +356,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): return self._stubs["test_iam_permissions"] {% endif %} + def close(self): + self.grpc_channel.close() + __all__ = ( '{{ service.name }}GrpcTransport', ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 934a931aff57..5c4f8d251f5f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -359,6 +359,10 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): return self._stubs["test_iam_permissions"] {% endif %} + def close(self): + return self.grpc_channel.close() + + __all__ = ( '{{ service.name }}GrpcAsyncIOTransport', ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 87eef7d49747..8072567cb652 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -283,6 +283,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): return self._{{method.name | snake_case}} {%- endfor %} + def close(self): + self._session.close() + __all__=( '{{ service.name }}RestTransport', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index d76414d34d68..165e12a8b4f9 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1516,6 +1516,9 @@ def test_{{ service.name|snake_case }}_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + {% if service.has_lro %} # Additionally, the LRO client (a property) should # also raise NotImplementedError @@ -2487,4 +2490,56 @@ async def test_test_iam_permissions_from_dict_async(): {% endif %} +@pytest.mark.asyncio +async def test_transport_close_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + {% if 'rest' in opts.transport %} + "rest": "_session", + {% endif %} + {% if 'grpc' in opts.transport %} + "grpc": "_grpc_channel", + {% endif %} + } + + for transport, close_name in transports.items(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + {% if 'rest' in opts.transport %} + 'rest', + {% endif %} + {% if 'grpc' in opts.transport %} + 'grpc', + {% endif %} + ] + for transport in transports: + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + {% endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 054bfe55652b..a6327c26df1e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1292,9 +1292,11 @@ async def analyze_iam_policy_longrunning(self, # Done; return the response. return response + async def __aenter__(self): + return self - - + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 0d7bac0e5e19..0462c07e4203 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1445,7 +1445,18 @@ def analyze_iam_policy_longrunning(self, # Done; return the response. return response + def __enter__(self): + return self + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 45083d66871a..1da08824f811 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -259,6 +259,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index c95a90bff1f5..7ed6de3dc242 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -592,6 +592,8 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ) return self._stubs['analyze_iam_policy_longrunning'] + def close(self): + self.grpc_channel.close() __all__ = ( 'AssetServiceGrpcTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 8559391cf053..cc021dfbb0b1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -596,6 +596,9 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ) return self._stubs['analyze_iam_policy_longrunning'] + def close(self): + return self.grpc_channel.close() + __all__ = ( 'AssetServiceGrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 21d14df35c36..f864b50348aa 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -3551,6 +3551,9 @@ def test_asset_service_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): @@ -4049,3 +4052,46 @@ def test_client_withDEFAULT_CLIENT_INFO(): client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index a6390ec743fb..c7ae75924aef 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -644,9 +644,11 @@ async def sign_jwt(self, # Done; return the response. return response + async def __aenter__(self): + return self - - + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index caa32cecbf0f..f6345dabc9fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -804,7 +804,18 @@ def sign_jwt(self, # Done; return the response. return response + def __enter__(self): + return self + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 99c147750219..5a0c487eb6c6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -196,6 +196,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def generate_access_token(self) -> Callable[ [common.GenerateAccessTokenRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 95c0f5f97599..4c5159d800f0 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -337,6 +337,8 @@ def sign_jwt(self) -> Callable[ ) return self._stubs['sign_jwt'] + def close(self): + self.grpc_channel.close() __all__ = ( 'IAMCredentialsGrpcTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index c944ba08b8fe..cebb4866b05c 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -341,6 +341,9 @@ def sign_jwt(self) -> Callable[ ) return self._stubs['sign_jwt'] + def close(self): + return self.grpc_channel.close() + __all__ = ( 'IAMCredentialsGrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 6945e557a0f0..80c276662e27 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1485,6 +1485,9 @@ def test_iam_credentials_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_iam_credentials_base_transport_with_credentials_file(): @@ -1929,3 +1932,46 @@ def test_client_withDEFAULT_CLIENT_INFO(): client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 82e84aab817c..5d322cb502ee 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1997,9 +1997,11 @@ async def update_cmek_settings(self, # Done; return the response. return response + async def __aenter__(self): + return self - - + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index af174010f674..1deb0ddafffa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -2176,7 +2176,18 @@ def update_cmek_settings(self, # Done; return the response. return response + def __enter__(self): + return self + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index e7f0db9d401c..92a840f2a353 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -323,6 +323,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def list_buckets(self) -> Callable[ [logging_config.ListBucketsRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 94e4af68334b..3a640eaf0bb5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -872,6 +872,8 @@ def update_cmek_settings(self) -> Callable[ ) return self._stubs['update_cmek_settings'] + def close(self): + self.grpc_channel.close() __all__ = ( 'ConfigServiceV2GrpcTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 78e442d8a4e3..2f18d861c16f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -876,6 +876,9 @@ def update_cmek_settings(self) -> Callable[ ) return self._stubs['update_cmek_settings'] + def close(self): + return self.grpc_channel.close() + __all__ = ( 'ConfigServiceV2GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index dd9cbb78dd9a..f03a8d0c0415 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -762,9 +762,11 @@ def tail_log_entries(self, # Done; return the response. return response + async def __aenter__(self): + return self - - + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 4a73c2318129..db8650fb871c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -898,7 +898,18 @@ def tail_log_entries(self, # Done; return the response. return response + def __enter__(self): + return self + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 222ed3c1f99c..195d115497dd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -231,6 +231,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete_log(self) -> Callable[ [logging.DeleteLogRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index f66cb54a21aa..4ec732a2688e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -396,6 +396,8 @@ def tail_log_entries(self) -> Callable[ ) return self._stubs['tail_log_entries'] + def close(self): + self.grpc_channel.close() __all__ = ( 'LoggingServiceV2GrpcTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index a19007ab65c9..9978fe57f74a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -400,6 +400,9 @@ def tail_log_entries(self) -> Callable[ ) return self._stubs['tail_log_entries'] + def close(self): + return self.grpc_channel.close() + __all__ = ( 'LoggingServiceV2GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 764f44f66698..d3b11a198e00 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -621,9 +621,11 @@ async def delete_log_metric(self, metadata=metadata, ) + async def __aenter__(self): + return self - - + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 616d0a69e80d..e31a068c67d6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -777,7 +777,18 @@ def delete_log_metric(self, metadata=metadata, ) + def __enter__(self): + return self + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index b9170bf568f9..2a4ff25e1a9a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -210,6 +210,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def list_log_metrics(self) -> Callable[ [logging_metrics.ListLogMetricsRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index e300d9f5320e..8aced9e6d345 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -351,6 +351,8 @@ def delete_log_metric(self) -> Callable[ ) return self._stubs['delete_log_metric'] + def close(self): + self.grpc_channel.close() __all__ = ( 'MetricsServiceV2GrpcTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 7da832822ebd..9c07c1172b70 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -355,6 +355,9 @@ def delete_log_metric(self) -> Callable[ ) return self._stubs['delete_log_metric'] + def close(self): + return self.grpc_channel.close() + __all__ = ( 'MetricsServiceV2GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 288323a4bc0c..8a1d5e3189b7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -5907,6 +5907,9 @@ def test_config_service_v2_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_config_service_v2_base_transport_with_credentials_file(): @@ -6446,3 +6449,46 @@ def test_client_withDEFAULT_CLIENT_INFO(): client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 31f15bca043d..f372ad41b981 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2029,6 +2029,9 @@ def test_logging_service_v2_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_logging_service_v2_base_transport_with_credentials_file(): @@ -2493,3 +2496,46 @@ def test_client_withDEFAULT_CLIENT_INFO(): client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index e3eb2f3aca46..a42c0e2fdd93 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1894,6 +1894,9 @@ def test_metrics_service_v2_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_metrics_service_v2_base_transport_with_credentials_file(): @@ -2358,3 +2361,46 @@ def test_client_withDEFAULT_CLIENT_INFO(): client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 512bab3f1268..8d22164a8856 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1078,9 +1078,11 @@ async def delete_instance(self, # Done; return the response. return response + async def __aenter__(self): + return self - - + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 9afc35601ae1..7be2d6781b9f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1266,7 +1266,18 @@ def delete_instance(self, # Done; return the response. return response + def __enter__(self): + return self + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index d8161b1e788e..240133be8082 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -195,6 +195,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 5b15755d0407..82eeca621273 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -536,6 +536,8 @@ def delete_instance(self) -> Callable[ ) return self._stubs['delete_instance'] + def close(self): + self.grpc_channel.close() __all__ = ( 'CloudRedisGrpcTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 2a2184de64ec..d89351dcb1aa 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -540,6 +540,9 @@ def delete_instance(self) -> Callable[ ) return self._stubs['delete_instance'] + def close(self): + return self.grpc_channel.close() + __all__ = ( 'CloudRedisGrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 1b719406947b..54f12b158e28 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -2860,6 +2860,9 @@ def test_cloud_redis_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + # Additionally, the LRO client (a property) should # also raise NotImplementedError with pytest.raises(NotImplementedError): @@ -3345,3 +3348,46 @@ def test_client_withDEFAULT_CLIENT_INFO(): client_info=client_info, ) prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/gapic-generator/tests/system/test_client_context_manager.py b/packages/gapic-generator/tests/system/test_client_context_manager.py new file mode 100644 index 000000000000..88ced947ed4e --- /dev/null +++ b/packages/gapic-generator/tests/system/test_client_context_manager.py @@ -0,0 +1,53 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pytest +import grpc +import distutils + + +def test_client(echo): + with echo as c: + resp = c.echo({ + 'content': 'hello' + }) + assert resp.content == 'hello' + + +def test_client_destroyed(echo): + echo.__exit__(None, None, None) + with pytest.raises(ValueError): + echo.echo({ + 'content': 'hello' + }) + + +if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): + + @pytest.mark.asyncio + async def test_client_async(async_echo): + async with async_echo: + response = await async_echo.echo({ + 'content': 'hello' + }) + assert response.content == 'hello' + + @pytest.mark.asyncio + async def test_client_destroyed_async(async_echo): + await async_echo.__aexit__(None, None, None) + with pytest.raises(grpc._cython.cygrpc.UsageError): + await async_echo.echo({ + 'content': 'hello' + }) From 6e356c3098c0a460fa1b68e13ac37e974f5edede Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 11:03:40 -0600 Subject: [PATCH 0619/1339] chore: release 0.53.0 (#1002) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 9eecaa0978e6..e4d110d697c2 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.53.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.52.0...v0.53.0) (2021-10-04) + + +### Features + +* add support for context manager in client ([#987](https://www.github.com/googleapis/gapic-generator-python/issues/987)) ([4edabcf](https://www.github.com/googleapis/gapic-generator-python/commit/4edabcf6791cfb0874a951b695b39672036760d4)) +* enable self signed jwt for http ([#1000](https://www.github.com/googleapis/gapic-generator-python/issues/1000)) ([5f87973](https://www.github.com/googleapis/gapic-generator-python/commit/5f8797396a2477b772b7bfb827499db32e28710e)) +* implement grpc transcode for rest transport and complete generated tests ([#999](https://www.github.com/googleapis/gapic-generator-python/issues/999)) ([ccdd17d](https://www.github.com/googleapis/gapic-generator-python/commit/ccdd17d6133274a34dd727fab0576e6c63238833)) +* implement grpc transcode for rest transport and complete generated tests. ([ccdd17d](https://www.github.com/googleapis/gapic-generator-python/commit/ccdd17d6133274a34dd727fab0576e6c63238833)) + + +### Bug Fixes + +* fix docstring for first attribute of protos ([#1004](https://www.github.com/googleapis/gapic-generator-python/issues/1004)) ([383f655](https://www.github.com/googleapis/gapic-generator-python/commit/383f6555a1d850889b2aa74be28c8d06465399e5)) + ## [0.52.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.2...v0.52.0) (2021-09-29) From 7d1c858868c0490ad13c04ea9bbdec9bf965e46d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 6 Oct 2021 19:34:22 +0200 Subject: [PATCH 0620/1339] chore(deps): update dependency google-api-core to v2.1.0 (#1009) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index a90cdbe974d3..04923748ce24 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.1 -google-api-core==2.0.1 +google-api-core==2.1.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 From 09ed155c6460c7517019ccdcb23f94a879f518ca Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 6 Oct 2021 19:51:13 +0200 Subject: [PATCH 0621/1339] chore(deps): update dependency protobuf to v3.18.1 (#1008) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 04923748ce24..657ab25e842c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.1.0 googleapis-common-protos==1.53.0 jinja2==3.0.1 MarkupSafe==2.0.1 -protobuf==3.18.0 +protobuf==3.18.1 pypandoc==1.6.4 PyYAML==5.4.1 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 6784bd726d9636d6026253c3d0b258dccba352b5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 6 Oct 2021 20:01:54 +0200 Subject: [PATCH 0622/1339] chore(deps): update dependency jinja2 to v3.0.2 (#1006) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 657ab25e842c..fb89e30cffc4 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,7 +1,7 @@ click==8.0.1 google-api-core==2.1.0 googleapis-common-protos==1.53.0 -jinja2==3.0.1 +jinja2==3.0.2 MarkupSafe==2.0.1 protobuf==3.18.1 pypandoc==1.6.4 From 5c4402ab8f788f58b5269a95eee5ed1792e4aa37 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 7 Oct 2021 09:12:30 -0600 Subject: [PATCH 0623/1339] chore: make api_schema required for samplegen validator (#1010) I found this change makes it a bit easier to implement #960. --- .../gapic/samplegen/samplegen.py | 5 +- .../tests/unit/samplegen/test_samplegen.py | 407 ++++++++++-------- 2 files changed, 228 insertions(+), 184 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index b19ce4fa8728..1538173ce965 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -250,8 +250,7 @@ class Validator: ("value", "field", "value_is_file", "input_parameter", "comment") ) - # TODO(dovs): make the schema a required param. - def __init__(self, method: wrappers.Method, api_schema=None): + def __init__(self, method: wrappers.Method, api_schema: api.API): # The response ($resp) variable is special and guaranteed to exist. self.method = method self.request_type_ = method.input @@ -1058,7 +1057,7 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> str calling_form = types.CallingForm.method_default(rpc) - v = Validator(rpc) + v = Validator(rpc, api_schema) # Tweak some small aspects of the sample to set defaults for optional # fields, add fields that are required for the template, and so forth. v.preprocess_sample(sample, api_schema, rpc) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index e9c9913d0f9a..b44f34c13e19 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -36,48 +36,76 @@ # validate_response tests +@pytest.fixture(scope="module") +def dummy_api_schema(): + # For most of the unit tests in this file the internals of API Schema do not matter + classify_request_message = DummyMessage( + fields={ + "parent": DummyField(is_primitive=True, type=str, required=True, name="parent"), + }, + type=DummyMessageTypePB(name="ClassifyRequest"), + ident=DummyIdent(name="ClassifyRequest") + ) + + return DummyApiSchema( + services={"Mollusc": DummyService( + methods={}, client_name="MolluscClient", + resource_messages_dict={} + )}, + naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + messages=classify_request_message, -def test_define(): + ) + + +def test_define(dummy_api_schema): define = {"define": "squid=$resp"} - v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("mollusc")), api_schema=dummy_api_schema) v.validate_response([define]) -def test_define_undefined_var(): +def test_define_undefined_var(dummy_api_schema): define = {"define": "squid=humboldt"} - v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("mollusc")), api_schema=dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response([define]) -def test_define_reserved_varname(): +def test_define_reserved_varname(dummy_api_schema): define = {"define": "class=$resp"} - v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("mollusc")), api_schema=dummy_api_schema) with pytest.raises(types.ReservedVariableName): v.validate_response([define]) -def test_define_add_var(): +def test_define_add_var(dummy_api_schema): v = samplegen.Validator(DummyMethod( - output=message_factory("mollusc.name"))) + output=message_factory("mollusc.name")), + api_schema=dummy_api_schema) v.validate_response([{"define": "squid=$resp"}, {"define": "name=squid.name"}]) -def test_define_bad_form(): +def test_define_bad_form(dummy_api_schema): define = {"define": "mollusc=$resp.squid=$resp.clam"} - v = samplegen.Validator(DummyMethod(output=message_factory("mollusc"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("mollusc")), api_schema=dummy_api_schema) with pytest.raises(types.BadAssignment): v.validate_response([define]) -def test_define_redefinition(): +def test_define_redefinition(dummy_api_schema): statements = [ {"define": "molluscs=$resp.molluscs"}, {"define": "molluscs=$resp.molluscs"}, ] v = samplegen.Validator(DummyMethod(output=message_factory("$resp.molluscs", - repeated_iter=[True]))) + repeated_iter=[True])), + api_schema=dummy_api_schema) with pytest.raises(types.RedefinedVariable): v.validate_response(statements) @@ -285,9 +313,10 @@ def test_preprocess_sample_void_method(): assert sample["response"] == [] -def test_define_input_param(): +def test_define_input_param(dummy_api_schema): v = samplegen.Validator( - DummyMethod(input=message_factory("mollusc.squid.mantle_length"))) + DummyMethod(input=message_factory("mollusc.squid.mantle_length")), + dummy_api_schema) v.validate_and_transform_request( types.CallingForm.Request, [ @@ -301,9 +330,10 @@ def test_define_input_param(): v.validate_response([{"define": "length=mantle_length"}]) -def test_define_input_param_redefinition(): +def test_define_input_param_redefinition(dummy_api_schema): v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid.mantle_length"))) + input=message_factory("mollusc.squid.mantle_length")), + dummy_api_schema) v.validate_and_transform_request( types.CallingForm.Request, [ @@ -319,71 +349,78 @@ def test_define_input_param_redefinition(): [{"define": "mantle_length=mantle_length"}]) -def test_print_basic(): +def test_print_basic(dummy_api_schema): print_statement = {"print": ["This is a squid"]} - samplegen.Validator(DummyMethod()).validate_response([print_statement]) + samplegen.Validator(DummyMethod(), dummy_api_schema).validate_response( + [print_statement]) -def test_print_fmt_str(): +def test_print_fmt_str(dummy_api_schema): print_statement = {"print": ["This is a squid named %s", "$resp.name"]} - v = samplegen.Validator(DummyMethod(output=message_factory("$resp.name"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("$resp.name")), dummy_api_schema) v.validate_response([print_statement]) -def test_print_fmt_mismatch(): +def test_print_fmt_mismatch(dummy_api_schema): print_statement = {"print": ["This is a squid named %s"]} - v = samplegen.Validator(DummyMethod(output=message_factory("$resp.name"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("$resp.name")), dummy_api_schema) with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([print_statement]) -def test_print_fmt_mismatch2(): +def test_print_fmt_mismatch2(dummy_api_schema): print_statement = {"print": ["This is a squid", "$resp.name"]} - v = samplegen.Validator(DummyMethod(output=message_factory("$resp.name"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("$resp.name")), dummy_api_schema) with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([print_statement]) -def test_print_undefined_var(): +def test_print_undefined_var(dummy_api_schema): print_statement = {"print": ["This mollusc is a %s", "mollusc.type"]} - v = samplegen.Validator(DummyMethod(output=message_factory("$resp.type"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("$resp.type")), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response([print_statement]) -def test_comment(): +def test_comment(dummy_api_schema): comment = {"comment": ["This is a mollusc"]} - samplegen.Validator(DummyMethod()).validate_response([comment]) + samplegen.Validator( + DummyMethod(), dummy_api_schema).validate_response([comment]) -def test_comment_fmt_str(): +def test_comment_fmt_str(dummy_api_schema): comment = {"comment": ["This is a mollusc of class %s", "$resp.klass"]} - v = samplegen.Validator(DummyMethod(output=message_factory("$resp.klass"))) + v = samplegen.Validator(DummyMethod( + output=message_factory("$resp.klass")), dummy_api_schema) v.validate_response([comment]) -def test_comment_fmt_undefined_var(): +def test_comment_fmt_undefined_var(dummy_api_schema): comment = {"comment": ["This is a mollusc of class %s", "cephalopod"]} - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response([comment]) -def test_comment_fmt_mismatch(): +def test_comment_fmt_mismatch(dummy_api_schema): comment = {"comment": ["This is a mollusc of class %s"]} - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([comment]) -def test_comment_fmt_mismatch2(): +def test_comment_fmt_mismatch2(dummy_api_schema): comment = {"comment": ["This is a mollusc of class ", "$resp.class"]} - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([comment]) -def test_loop_collection(): +def test_loop_collection(dummy_api_schema): loop = { "loop": { "collection": "$resp.molluscs", @@ -393,11 +430,11 @@ def test_loop_collection(): } OutputType = message_factory( "$resp.molluscs.class", repeated_iter=[True, False]) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) v.validate_response([loop]) -def test_loop_collection_redefinition(): +def test_loop_collection_redefinition(dummy_api_schema): statements = [ {"define": "m=$resp.molluscs"}, { @@ -409,12 +446,12 @@ def test_loop_collection_redefinition(): }, ] v = samplegen.Validator( - DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True]))) + DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True])), dummy_api_schema) with pytest.raises(types.RedefinedVariable): v.validate_response(statements) -def test_loop_undefined_collection(): +def test_loop_undefined_collection(dummy_api_schema): loop = { "loop": { "collection": "squid", @@ -422,12 +459,12 @@ def test_loop_undefined_collection(): "body": [{"print": ["Squid: %s", "s"]}], } } - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response([loop]) -def test_loop_collection_extra_kword(): +def test_loop_collection_extra_kword(dummy_api_schema): loop = { "loop": { "collection": "$resp.molluscs", @@ -436,24 +473,24 @@ def test_loop_collection_extra_kword(): "body": [{"print": ["Mollusc of class: %s", "m.class"]}], } } - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.BadLoop): v.validate_response([loop]) -def test_loop_collection_missing_kword(): +def test_loop_collection_missing_kword(dummy_api_schema): loop = { "loop": { "collection": "$resp.molluscs", "body": [{"print": ["Mollusc of class: %s", "m.class"]}], } } - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.BadLoop): v.validate_response([loop]) -def test_loop_collection_reserved_loop_var(): +def test_loop_collection_reserved_loop_var(dummy_api_schema): loop = { "loop": { "collection": "$resp.molluscs", @@ -462,12 +499,12 @@ def test_loop_collection_reserved_loop_var(): } } v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.molluscs", repeated_iter=[True]))) + output=message_factory("$resp.molluscs", repeated_iter=[True])), dummy_api_schema) with pytest.raises(types.ReservedVariableName): v.validate_response([loop]) -def test_loop_map(): +def test_loop_map(dummy_api_schema): loop = { "loop": { "map": "$resp.molluscs", @@ -497,11 +534,11 @@ def test_loop_map(): }, type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) v.validate_response([loop]) -def test_collection_loop_lexical_scope_variable(): +def test_collection_loop_lexical_scope_variable(dummy_api_schema): statements = [ { "loop": { @@ -513,12 +550,13 @@ def test_collection_loop_lexical_scope_variable(): {"define": "cephalopod=m"}, ] v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.molluscs", repeated_iter=[True]))) + output=message_factory("$resp.molluscs", repeated_iter=[True])), + dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) -def test_collection_loop_lexical_scope_inline(): +def test_collection_loop_lexical_scope_inline(dummy_api_schema): statements = [ { "loop": { @@ -530,12 +568,13 @@ def test_collection_loop_lexical_scope_inline(): {"define": "cephalopod=squid"}, ] v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.molluscs", repeated_iter=[True]))) + output=message_factory("$resp.molluscs", repeated_iter=[True])), + dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) -def test_map_loop_lexical_scope_key(): +def test_map_loop_lexical_scope_key(dummy_api_schema): statements = [ { "loop": { @@ -571,12 +610,12 @@ def test_map_loop_lexical_scope_key(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) -def test_map_loop_lexical_scope_value(): +def test_map_loop_lexical_scope_value(dummy_api_schema): statements = [ { "loop": { @@ -612,12 +651,12 @@ def test_map_loop_lexical_scope_value(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) -def test_map_loop_lexical_scope_inline(): +def test_map_loop_lexical_scope_inline(dummy_api_schema): statements = [ { "loop": { @@ -652,12 +691,12 @@ def test_map_loop_lexical_scope_inline(): }, type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) -def test_loop_map_reserved_key(): +def test_loop_map_reserved_key(dummy_api_schema): loop = { "loop": { "map": "$resp.molluscs", @@ -689,12 +728,12 @@ def test_loop_map_reserved_key(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.ReservedVariableName): v.validate_response([loop]) -def test_loop_map_reserved_val(): +def test_loop_map_reserved_val(dummy_api_schema): loop = { "loop": { "map": "$resp.molluscs", @@ -726,12 +765,12 @@ def test_loop_map_reserved_val(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.ReservedVariableName): v.validate_response([loop]) -def test_loop_map_undefined(): +def test_loop_map_undefined(dummy_api_schema): loop = { "loop": { "map": "molluscs", @@ -740,12 +779,12 @@ def test_loop_map_undefined(): "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], } } - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response([loop]) -def test_loop_map_no_key(): +def test_loop_map_no_key(dummy_api_schema): loop = { "loop": { "map": "$resp.molluscs", @@ -775,11 +814,11 @@ def test_loop_map_no_key(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) v.validate_response([loop]) -def test_loop_map_no_value(): +def test_loop_map_no_value(dummy_api_schema): loop = { "loop": { "map": "$resp.molluscs", @@ -809,11 +848,11 @@ def test_loop_map_no_value(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) v.validate_response([loop]) -def test_loop_map_no_key_or_value(): +def test_loop_map_no_key_or_value(dummy_api_schema): loop = {"loop": {"map": "$resp.molluscs", # Need at least one of 'key' or 'value' "body": [{"print": ["Dead loop"]}]}} @@ -839,12 +878,12 @@ def test_loop_map_no_key_or_value(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.BadLoop): v.validate_response([loop]) -def test_loop_map_no_map(): +def test_loop_map_no_map(dummy_api_schema): loop = { "loop": { "key": "name", @@ -852,19 +891,19 @@ def test_loop_map_no_map(): "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], } } - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.BadLoop): v.validate_response([loop]) -def test_loop_map_no_body(): +def test_loop_map_no_body(dummy_api_schema): loop = {"loop": {"map": "$resp.molluscs", "key": "name", "value": "mollusc"}} - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.BadLoop): v.validate_response([loop]) -def test_loop_map_extra_kword(): +def test_loop_map_extra_kword(dummy_api_schema): loop = { "loop": { "map": "$resp.molluscs", @@ -874,12 +913,12 @@ def test_loop_map_extra_kword(): "body": [{"print": ["A %s is a %s", "mollusc", "name"]}], } } - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.BadLoop): v.validate_response([loop]) -def test_loop_map_redefined_key(): +def test_loop_map_redefined_key(dummy_api_schema): statements = [ {"define": "mollusc=$resp.molluscs"}, { @@ -913,12 +952,12 @@ def test_loop_map_redefined_key(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.RedefinedVariable): v.validate_response(statements) -def test_loop_map_redefined_value(): +def test_loop_map_redefined_value(dummy_api_schema): statements = [ {"define": "mollusc=$resp.molluscs"}, { @@ -952,12 +991,12 @@ def test_loop_map_redefined_value(): type="RESPONSE_TYPE" ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.RedefinedVariable): v.validate_response(statements) -def test_validate_write_file(): +def test_validate_write_file(dummy_api_schema): statements = [ { "write_file": { @@ -972,31 +1011,31 @@ def test_validate_write_file(): "photo": DummyField(message=DummyMessage(fields={})) } ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) v.validate_response(statements) -def test_validate_write_file_fname_fmt(): +def test_validate_write_file_fname_fmt(dummy_api_schema): statements = [{"write_file": {"filename": ["specimen-%s"], "contents": "$resp.photo"}}] - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response(statements) -def test_validate_write_file_fname_bad_var(): +def test_validate_write_file_fname_bad_var(dummy_api_schema): statements = [{ "write_file": { "filename": ["specimen-%s", "squid.species"], "contents": "$resp.photo", } }] - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) -def test_validate_write_file_missing_fname(): +def test_validate_write_file_missing_fname(dummy_api_schema): statements = [{"write_file": {"contents": "$resp.photo"}}] OutputType = DummyMessage( fields={ @@ -1004,12 +1043,12 @@ def test_validate_write_file_missing_fname(): "photo": DummyField(message=DummyMessage(fields={})) } ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.InvalidStatement): v.validate_response(statements) -def test_validate_write_file_missing_contents(): +def test_validate_write_file_missing_contents(dummy_api_schema): statements = [{"write_file": {"filename": ["specimen-%s", "$resp.species"]}}] OutputType = DummyMessage( @@ -1019,12 +1058,12 @@ def test_validate_write_file_missing_contents(): } ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.InvalidStatement): v.validate_response(statements) -def test_validate_write_file_bad_contents_var(): +def test_validate_write_file_bad_contents_var(dummy_api_schema): statements = [{ "write_file": { "filename": ["specimen-%s", "$resp.species"], @@ -1037,27 +1076,27 @@ def test_validate_write_file_bad_contents_var(): "photo": DummyField(message=DummyMessage(fields={})) } ) - v = samplegen.Validator(DummyMethod(output=OutputType)) + v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) -def test_invalid_statement(): +def test_invalid_statement(dummy_api_schema): statements = [{"print": ["Name"], "comment": ["Value"]}] - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.InvalidStatement): v.validate_response(statements) -def test_invalid_statement2(): +def test_invalid_statement2(dummy_api_schema): statements = [{"squidify": ["Statement body"]}] - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.InvalidStatement): v.validate_response(statements) # validate_and_transform_request tests -def test_validate_request_basic(): +def test_validate_request_basic(dummy_api_schema): input_type = DummyMessage( fields={ "squid": DummyField( @@ -1077,7 +1116,7 @@ def test_validate_request_basic(): type="REQUEST_TYPE" ) - v = samplegen.Validator(DummyMethod(input=input_type)) + v = samplegen.Validator(DummyMethod(input=input_type), dummy_api_schema) actual = v.validate_and_transform_request( types.CallingForm.Request, [ @@ -1106,9 +1145,9 @@ def test_validate_request_basic(): assert actual == expected -def test_validate_request_no_field_parameter(): +def test_validate_request_no_field_parameter(dummy_api_schema): # May need to remeove this test because it doesn't necessarily make sense any more. - v = samplegen.Validator(DummyMethod()) + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( types.CallingForm.Request, [{"squid": "humboldt", @@ -1116,9 +1155,10 @@ def test_validate_request_no_field_parameter(): ) -def test_validate_request_no_such_attribute(): +def test_validate_request_no_such_attribute(dummy_api_schema): v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid.mantle"))) + input=message_factory("mollusc.squid.mantle")), + dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_and_transform_request( types.CallingForm.Request, @@ -1126,9 +1166,10 @@ def test_validate_request_no_such_attribute(): ) -def test_validate_request_top_level_field(): +def test_validate_request_top_level_field(dummy_api_schema): v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid"))) + input=message_factory("mollusc.squid")), + dummy_api_schema) actual = v.validate_and_transform_request( types.CallingForm.Request, [{"field": "squid", "value": "humboldt"}] @@ -1147,9 +1188,10 @@ def test_validate_request_top_level_field(): assert actual == expected -def test_validate_request_missing_keyword(kword="field"): +def test_validate_request_missing_keyword(dummy_api_schema, kword="field"): v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid"))) + input=message_factory("mollusc.squid")), + dummy_api_schema) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( types.CallingForm.Request, @@ -1157,13 +1199,14 @@ def test_validate_request_missing_keyword(kword="field"): ) -def test_validate_request_missing_value(): - test_validate_request_missing_keyword(kword="value") +def test_validate_request_missing_value(dummy_api_schema): + test_validate_request_missing_keyword(dummy_api_schema, kword="value") -def test_validate_request_spurious_kword(): +def test_validate_request_spurious_kword(dummy_api_schema): v = samplegen.Validator( - DummyMethod(input=message_factory("mollusc.squid"))) + DummyMethod(input=message_factory("mollusc.squid")), + dummy_api_schema) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( types.CallingForm.Request, @@ -1171,9 +1214,9 @@ def test_validate_request_spurious_kword(): ) -def test_validate_request_unknown_field_type(): +def test_validate_request_unknown_field_type(dummy_api_schema): v = samplegen.Validator(DummyMethod( - input=DummyMessage(fields={"squid": DummyField()}))) + input=DummyMessage(fields={"squid": DummyField()})), dummy_api_schema) with pytest.raises(TypeError): v.validate_and_transform_request( types.CallingForm.Request, @@ -1181,9 +1224,9 @@ def test_validate_request_unknown_field_type(): ) -def test_validate_request_duplicate_top_level_fields(): +def test_validate_request_duplicate_top_level_fields(dummy_api_schema): v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid"))) + input=message_factory("mollusc.squid")), dummy_api_schema) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( types.CallingForm.Request, @@ -1192,7 +1235,7 @@ def test_validate_request_duplicate_top_level_fields(): ) -def test_validate_request_multiple_arguments(): +def test_validate_request_multiple_arguments(dummy_api_schema): input_type = DummyMessage( fields={ "squid": DummyField( @@ -1213,7 +1256,7 @@ def test_validate_request_multiple_arguments(): type="REQUEST_TYPE" ) - v = samplegen.Validator(DummyMethod(input=input_type)) + v = samplegen.Validator(DummyMethod(input=input_type), dummy_api_schema) actual = v.validate_and_transform_request( types.CallingForm.Request, [ @@ -1252,7 +1295,7 @@ def test_validate_request_multiple_arguments(): assert actual == expected -def test_validate_request_duplicate_input_param(): +def test_validate_request_duplicate_input_param(dummy_api_schema): input_type = DummyMessage( fields={ "squid": DummyField( @@ -1273,7 +1316,7 @@ def test_validate_request_duplicate_input_param(): type="REQUEST_TYPE" ) - v = samplegen.Validator(DummyMethod(input=input_type)) + v = samplegen.Validator(DummyMethod(input=input_type), dummy_api_schema) with pytest.raises(types.RedefinedVariable): v.validate_and_transform_request( types.CallingForm.Request, @@ -1292,8 +1335,8 @@ def test_validate_request_duplicate_input_param(): ) -def test_validate_request_reserved_input_param(): - v = samplegen.Validator(DummyMethod()) +def test_validate_request_reserved_input_param(dummy_api_schema): + v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.ReservedVariableName): v.validate_and_transform_request( types.CallingForm.Request, @@ -1307,7 +1350,7 @@ def test_validate_request_reserved_input_param(): ) -def test_single_request_client_streaming( +def test_single_request_client_streaming(dummy_api_schema, calling_form=types.CallingForm.RequestStreamingClient): # Each API client method really only takes one parameter: # either a single protobuf message or an iterable of protobuf messages. @@ -1342,7 +1385,7 @@ def test_single_request_client_streaming( }, type="MOLLUSC_TYPE" ) - v = samplegen.Validator(DummyMethod(input=input_type)) + v = samplegen.Validator(DummyMethod(input=input_type), dummy_api_schema) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( types.CallingForm.RequestStreamingClient, @@ -1396,25 +1439,25 @@ def test_coerce_response_name(): assert utils.coerce_response_name("mollusc.squid") == "mollusc.squid" -def test_regular_response_type(): +def test_regular_response_type(dummy_api_schema): OutputType = TypeVar("OutputType") method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) assert v.var_field("$resp").message == OutputType -def test_paged_response_type(): +def test_paged_response_type(dummy_api_schema): OutputType = TypeVar("OutputType") PagedType = TypeVar("PagedType") PagedField = DummyField(message=PagedType) method = DummyMethod(output=OutputType, paged_result_field=PagedField) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) assert v.var_field("$resp").message == PagedType -def test_lro_response_type(): +def test_lro_response_type(dummy_api_schema): OutputType = TypeVar("OutputType") LroType = TypeVar("LroType") method = DummyMethod( @@ -1422,58 +1465,58 @@ def test_lro_response_type(): "operation", ["response_type"])(LroType) ) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) assert v.var_field("$resp").message == LroType -def test_validate_expression(): +def test_validate_expression(dummy_api_schema): exp = "$resp.coleoidea.octopodiformes.octopus" OutputType = message_factory(exp) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) exp_type = v.validate_expression(exp) assert exp_type.message.type == "OCTOPUS_TYPE" -def test_validate_expression_undefined_base(): +def test_validate_expression_undefined_base(dummy_api_schema): exp = "$resp.coleoidea.octopodiformes.octopus" OutputType = message_factory(exp) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_expression("mollusc") -def test_validate_expression_no_such_attr(): +def test_validate_expression_no_such_attr(dummy_api_schema): OutputType = message_factory("$resp.coleoidea") method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_expression("$resp.nautiloidea") -def test_validate_expression_non_indexed_non_terminal_repeated(): +def test_validate_expression_non_indexed_non_terminal_repeated(dummy_api_schema): # This is a little tricky: there's an attribute hierarchy # of response/coleoidea/octopodiformes, but coleoidea is a repeated field, # so accessing $resp.coleoidea.octopodiformes doesn't make any sense. exp = "$resp.coleoidea.octopodiformes" OutputType = message_factory(exp, repeated_iter=[True, False]) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_response( [{"define": "octopus=$resp.coleoidea.octopodiformes"}]) -def test_validate_expression_collection(): +def test_validate_expression_collection(dummy_api_schema): exp = "$resp.molluscs" OutputType = message_factory(exp, repeated_iter=[True]) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) v.validate_response( [ { @@ -1487,7 +1530,7 @@ def test_validate_expression_collection(): ) -def test_validate_expression_collection_error(): +def test_validate_expression_collection_error(dummy_api_schema): exp = "$resp.molluscs.mollusc" OutputType = message_factory(exp) method = DummyMethod(output=OutputType) @@ -1500,48 +1543,48 @@ def test_validate_expression_collection_error(): } } - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) # Because 'molluscs' isn't repeated with pytest.raises(types.BadLoop): v.validate_response([statement]) -def test_validate_expression_repeated_lookup(): +def test_validate_expression_repeated_lookup(dummy_api_schema): exp = "$resp.molluscs.mantle" OutputType = message_factory(exp, repeated_iter=[True, False]) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) v.validate_expression("$resp.molluscs[0].mantle") -def test_validate_expression_repeated_lookup_nested(): +def test_validate_expression_repeated_lookup_nested(dummy_api_schema): exp = "$resp.molluscs.tentacles.club" OutputType = message_factory(exp, [True, True, False]) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) v.validate_expression("$resp.molluscs[0].tentacles[0].club") -def test_validate_expression_repeated_lookup_invalid(): +def test_validate_expression_repeated_lookup_invalid(dummy_api_schema): exp = "$resp.molluscs.mantle" OutputType = message_factory(exp) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_expression("$resp.molluscs[0].mantle") -def test_validate_expression_base_attr_is_repeated(): +def test_validate_expression_base_attr_is_repeated(dummy_api_schema): exp = "$resp.molluscs.mantle" OutputType = message_factory(exp, repeated_iter=[True, False]) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) v.validate_response([{"define": "molluscs=$resp.molluscs"}]) v.validate_expression("molluscs[0].mantle") -def test_validate_expression_map_lookup(): +def test_validate_expression_map_lookup(dummy_api_schema): # See https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto#L475 # for details on how mapped attributes get transformed by the protoc compiler. OutputType = DummyMessage( @@ -1571,11 +1614,11 @@ def test_validate_expression_map_lookup(): type="MOLLUSC_TYPE" ) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) v.validate_expression('$resp.cephalopods{"squid"}.mantle') -def test_validate_expression_map_lookup_terminal_lookup(): +def test_validate_expression_map_lookup_terminal_lookup(dummy_api_schema): OutputType = DummyMessage( fields={ "cephalopods": DummyField( @@ -1602,11 +1645,11 @@ def test_validate_expression_map_lookup_terminal_lookup(): type="MOLLUSC_TYPE" ) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) v.validate_expression('$resp.cephalopods{"squid"}') -def test_validate_expression_mapped_no_map_field(): +def test_validate_expression_mapped_no_map_field(dummy_api_schema): OutputType = DummyMessage( fields={ "cephalopods": DummyField( @@ -1634,12 +1677,12 @@ def test_validate_expression_mapped_no_map_field(): type="MOLLUSC_TYPE" ) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_expression('$resp.cephalopods{"squid"}.mantle') -def test_validate_expression_mapped_no_value(): +def test_validate_expression_mapped_no_value(dummy_api_schema): OutputType = DummyMessage( fields={ "cephalopods": DummyField( @@ -1654,12 +1697,12 @@ def test_validate_expression_mapped_no_value(): type="MOLLUSC_TYPE" ) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_expression('$resp.cephalopods{"squid"}.mantle') -def test_validate_expression_mapped_no_message(): +def test_validate_expression_mapped_no_message(dummy_api_schema): OutputType = DummyMessage( fields={ "cephalopods": DummyField( @@ -1677,45 +1720,45 @@ def test_validate_expression_mapped_no_message(): type="MOLLUSC_TYPE" ) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_expression('$resp.cephalopods{"squid"}.mantle') -def test_validate_expresssion_lookup_unrepeated_base(): +def test_validate_expresssion_lookup_unrepeated_base(dummy_api_schema): exp = "$resp.molluscs" OutputType = message_factory(exp) method = DummyMethod(output=OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_response([{"define": "m=$resp[0]"}]) -def test_validate_expression_malformed_base(): +def test_validate_expression_malformed_base(dummy_api_schema): # Note the mistype exp = "r$esp.mollusc" OutputType = message_factory(exp) method = DummyMethod(OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_expression(exp) -def test_validate_expression_malformed_attr(): +def test_validate_expression_malformed_attr(dummy_api_schema): # Note the mistype exp = "$resp.mollu$c" OutputType = message_factory(exp) method = DummyMethod(OutputType) - v = samplegen.Validator(method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_expression(exp) -def test_validate_request_enum(): +def test_validate_request_enum(dummy_api_schema): enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) request_type = message_factory("mollusc.cephalopod.subclass", enum=enum) - v = samplegen.Validator(DummyMethod(input=request_type)) + v = samplegen.Validator(DummyMethod(input=request_type), dummy_api_schema) actual = v.validate_and_transform_request( types.CallingForm.Request, [{"field": "cephalopod.subclass", "value": "COLEOIDEA"}] @@ -1733,11 +1776,11 @@ def test_validate_request_enum(): assert actual == expected -def test_validate_request_enum_top_level(): +def test_validate_request_enum_top_level(dummy_api_schema): enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) request_type = message_factory("mollusc.subclass", enum=enum) - v = samplegen.Validator(DummyMethod(input=request_type)) + v = samplegen.Validator(DummyMethod(input=request_type), dummy_api_schema) actual = v.validate_and_transform_request( types.CallingForm.Request, [{"field": "subclass", "value": "COLEOIDEA"}] @@ -1749,11 +1792,11 @@ def test_validate_request_enum_top_level(): assert actual == expected -def test_validate_request_enum_invalid_value(): +def test_validate_request_enum_invalid_value(dummy_api_schema): enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) request_type = message_factory("mollusc.cephalopod.subclass", enum=enum) v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), - input=request_type)) + input=request_type), dummy_api_schema) with pytest.raises(types.InvalidEnumVariant): v.validate_and_transform_request( types.CallingForm.Request, @@ -1762,7 +1805,7 @@ def test_validate_request_enum_invalid_value(): ) -def test_validate_request_enum_not_last_attr(): +def test_validate_request_enum_not_last_attr(dummy_api_schema): # enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) # field = make_field(name="subclass", enum=enum) request_type = make_message( @@ -1779,7 +1822,7 @@ def test_validate_request_enum_not_last_attr(): # request_type = message_factory("mollusc.subclass", enum=enum) v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), - input=request_type)) + input=request_type), dummy_api_schema) with pytest.raises(types.NonTerminalPrimitiveOrEnum): v.validate_and_transform_request( types.CallingForm.Request, @@ -1852,7 +1895,7 @@ def test_validate_request_resource_name(): assert actual == expected -def test_validate_request_primitive_field(): +def test_validate_request_primitive_field(dummy_api_schema): field = make_field(name="species", type="TYPE_STRING") request_type = make_message(name="request", fields=[field]) @@ -1861,7 +1904,8 @@ def test_validate_request_primitive_field(): DummyMethod( output=message_factory("mollusc_result"), input=request_type - ) + ), + dummy_api_schema ) actual = v.validate_and_transform_request(types.CallingForm.Request, @@ -1926,18 +1970,18 @@ def test_validate_request_resource_name_mixed_reversed(): test_validate_request_resource_name_mixed(request) -def test_validate_request_no_such_attr(): +def test_validate_request_no_such_attr(dummy_api_schema): request = [ {"field": "taxon%kingdom", "value": "animalia"} ] method = DummyMethod(input=make_message(name="Request")) - v = samplegen.Validator(method=method) + v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): v.validate_and_transform_request(types.CallingForm.Request, request) -def test_validate_request_no_such_resource(): +def test_validate_request_no_such_resource(dummy_api_schema): request = [ {"field": "taxon%kingdom", "value": "animalia"} ] @@ -1992,7 +2036,7 @@ def test_validate_request_no_such_pattern(): v.validate_and_transform_request(types.CallingForm.Request, request) -def test_validate_request_non_terminal_primitive_field(): +def test_validate_request_non_terminal_primitive_field(dummy_api_schema): field = make_field(name="species", type="TYPE_STRING") request_type = make_message(name="request", fields=[field]) @@ -2001,7 +2045,8 @@ def test_validate_request_non_terminal_primitive_field(): DummyMethod( output=message_factory("mollusc_result"), input=request_type - ) + ), + dummy_api_schema ) with pytest.raises(types.NonTerminalPrimitiveOrEnum): From ae926d49880d5dfe3f3e8f95fdaffa12a314d637 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 8 Oct 2021 17:24:02 -0600 Subject: [PATCH 0624/1339] chore(snippetgen): add 2 placeholder values in lists (#1013) --- .../gapic-generator/gapic/schema/wrappers.py | 18 ++++++++++++------ ..._credentials_generate_access_token_async.py | 2 +- ...m_credentials_generate_access_token_sync.py | 2 +- ...ogging_service_v2_tail_log_entries_async.py | 4 ++-- ...logging_service_v2_tail_log_entries_sync.py | 4 ++-- .../tests/unit/schema/wrappers/test_field.py | 3 ++- 6 files changed, 20 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index b9a38761338f..aecda19f4483 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -99,7 +99,9 @@ def mock_value_original_type(self) -> Union[bool, str, bytes, int, float, List[A # If this is a repeated field, then the mock answer should # be a list. if self.repeated: - answer = [answer] + first_item = self.primitive_mock(suffix=1) or None + second_item = self.primitive_mock(suffix=2) or None + answer = [first_item, second_item] return answer @@ -160,9 +162,12 @@ def inner_mock(self, stack, visited_fields) -> str: # Done; return the mock value. return answer - def primitive_mock(self) -> Union[bool, str, bytes, int, float, List[Any], None]: + def primitive_mock(self, suffix: int = 0) -> Union[bool, str, bytes, int, float, List[Any], None]: """Generate a valid mock for a primitive type. This function returns the original (Python) type. + + If a suffix is provided, generate a slightly different mock + using the provided integer. """ answer: Union[bool, str, bytes, int, float, List[Any], None] = None @@ -174,13 +179,14 @@ def primitive_mock(self) -> Union[bool, str, bytes, int, float, List[Any], None] if self.type.python_type == bool: answer = True elif self.type.python_type == str: - answer = f"{self.name}_value" + answer = f"{self.name}_value_{suffix}" if suffix else f"{self.name}_value" elif self.type.python_type == bytes: - answer = bytes(f"{self.name}_blob", encoding="utf-8") + answer_str = f"{self.name}_blob_{suffix}" if suffix else f"{self.name}_blob" + answer = bytes(answer_str, encoding="utf-8") elif self.type.python_type == int: - answer = sum([ord(i) for i in self.name]) + answer = sum([ord(i) for i in self.name]) + suffix elif self.type.python_type == float: - name_sum = sum([ord(i) for i in self.name]) + name_sum = sum([ord(i) for i in self.name]) + suffix answer = name_sum * pow(10, -1 * len(str(name_sum))) else: # Impossible; skip coverage checks. raise TypeError('Unrecognized PrimitiveType. This should ' diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py index 1e6b47658170..067ec4e09d4e 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py @@ -36,7 +36,7 @@ async def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( name="projects/{project}/serviceAccounts/{service_account}", - scope=['scope_value'], + scope=['scope_value_1', 'scope_value_2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py index 3c4f4909dc06..e2ec5d222d73 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py @@ -36,7 +36,7 @@ def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( name="projects/{project}/serviceAccounts/{service_account}", - scope=['scope_value'], + scope=['scope_value_1', 'scope_value_2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py index 69e71695982b..38efe9e40da6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py @@ -35,11 +35,11 @@ async def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value'], + resource_names=['resource_names_value_1', 'resource_names_value_2'], ) # Make the request - stream = await client.tail_log_entries([resource_names=['resource_names_value']]) + stream = await client.tail_log_entries([resource_names=['resource_names_value_1', 'resource_names_value_2']]) async for response in stream: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py index 6296d6783e8e..a533ee9ad62b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py @@ -35,11 +35,11 @@ def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value'], + resource_names=['resource_names_value_1', 'resource_names_value_2'], ) # Make the request - stream = client.tail_log_entries([resource_names=['resource_names_value']]) + stream = client.tail_log_entries([resource_names=['resource_names_value_1', 'resource_names_value_2']]) for response in stream: print(response) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 2fa2c09783a2..7c9bfacbd10e 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -221,7 +221,8 @@ def test_mock_value_repeated(): def test_mock_value_original_type_repeated(): field = make_field(name='foo_bar', type='TYPE_STRING', label=3) - assert field.mock_value_original_type == ["foo_bar_value"] + assert field.mock_value_original_type == [ + "foo_bar_value_1", "foo_bar_value_2"] def test_mock_value_map(): From 16d8522422b6ee24690eb65b698b11715a5f590b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 11 Oct 2021 18:00:49 +0200 Subject: [PATCH 0625/1339] chore(deps): update dependency click to v8.0.3 (#1016) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index fb89e30cffc4..b6ed3e1761e8 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==8.0.1 +click==8.0.3 google-api-core==2.1.0 googleapis-common-protos==1.53.0 jinja2==3.0.2 From 82ad7cb782395f7167965b6aded6b8b6bf330aea Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 12 Oct 2021 13:54:33 -0400 Subject: [PATCH 0626/1339] chore: add Python 3.10 support (#1019) --- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 2 +- packages/gapic-generator/gapic/templates/setup.py.j2 | 1 + packages/gapic-generator/noxfile.py | 2 +- packages/gapic-generator/setup.py | 2 ++ .../gapic-generator/tests/integration/goldens/asset/noxfile.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 1 + .../tests/integration/goldens/credentials/noxfile.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 1 + .../tests/integration/goldens/logging/noxfile.py | 2 +- .../gapic-generator/tests/integration/goldens/logging/setup.py | 1 + .../gapic-generator/tests/integration/goldens/redis/noxfile.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 1 + 12 files changed, 13 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index d3e9b2ce928d..596194ad7e41 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -26,7 +26,7 @@ nox.sessions = [ "docs", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) def unit(session): """Run the unit test suite.""" diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 39ef330320c2..00b671335b79 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -44,6 +44,7 @@ setuptools.setup( 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index e270f0423ef5..98ceb103dbed 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -29,7 +29,7 @@ ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") -@nox.session(python=["3.6", "3.7", "3.8", "3.9"]) +@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10"]) def unit(session): """Run the unit test suite.""" diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 69bb01d55f31..2cc950d342fc 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -65,6 +65,8 @@ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Libraries :: Python Modules", ), diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 15e45490321b..372acb5981cd 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -37,7 +37,7 @@ "docs", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) def unit(session): """Run the unit test suite.""" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 43eb370430ef..379f5e9de640 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -47,6 +47,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index a72fb423c050..8d60c7f807c5 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -37,7 +37,7 @@ "docs", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) def unit(session): """Run the unit test suite.""" diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 825178c9f678..4f182c1ff3b4 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -47,6 +47,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 10ed0a998e1e..9daae7a0df33 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -37,7 +37,7 @@ "docs", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) def unit(session): """Run the unit test suite.""" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 4b98728b93f3..250bfa5d8c1a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -47,6 +47,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 93380724feb0..c42261e8aaa1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -37,7 +37,7 @@ "docs", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) def unit(session): """Run the unit test suite.""" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 9d98a420c25c..955a1fe01011 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -47,6 +47,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], From 49878648c6ac14eab1146698ce37d0a2fd0f8a01 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Oct 2021 12:08:04 -0400 Subject: [PATCH 0627/1339] chore: disable dependency dashboard (#1021) Closes #980 The dependency dashboard was enabled by default in Renovate v26.0.0. --- packages/gapic-generator/renovate.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/renovate.json b/packages/gapic-generator/renovate.json index 33c6788ce6d8..4de84a027424 100644 --- a/packages/gapic-generator/renovate.json +++ b/packages/gapic-generator/renovate.json @@ -1,6 +1,7 @@ { "extends": [ "config:base", - "docker:disable" + "docker:disable", + ":disableDependencyDashboard" ] } From b75758a9f3f22484c84d7f0f16174693e745b64e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 13 Oct 2021 23:35:05 +0200 Subject: [PATCH 0628/1339] chore(deps): update dependency pyyaml to v6 (#1023) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b6ed3e1761e8..7c8c39a187d6 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -5,5 +5,5 @@ jinja2==3.0.2 MarkupSafe==2.0.1 protobuf==3.18.1 pypandoc==1.6.4 -PyYAML==5.4.1 +PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 19f4756062093f878c6c2ca51f29703cc3ad0e32 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 13 Oct 2021 19:27:40 -0400 Subject: [PATCH 0629/1339] fix: use correct typing for retries / operations_client (#1026) * fix: use correct typing for retries Closes #1024 * fix: import 'ClientOptions' class correctly Closes #1025 * fix: remove typing from base transport 'operations_client', repair typing for `_operations_client` attr Closes #1028. --- .../%sub/services/%service/client.py.j2 | 4 +- .../services/%service/transports/base.py.j2 | 2 +- .../%sub/services/%service/async_client.py.j2 | 12 +++-- .../%sub/services/%service/client.py.j2 | 10 ++-- .../services/%service/transports/base.py.j2 | 2 +- .../services/%service/transports/grpc.py.j2 | 2 +- .../%service/transports/grpc_asyncio.py.j2 | 2 +- .../services/%service/transports/rest.py.j2 | 6 ++- .../services/asset_service/async_client.py | 28 ++++++----- .../asset_v1/services/asset_service/client.py | 26 +++++----- .../services/asset_service/transports/base.py | 2 +- .../services/asset_service/transports/grpc.py | 2 +- .../asset_service/transports/grpc_asyncio.py | 2 +- .../services/iam_credentials/async_client.py | 12 +++-- .../services/iam_credentials/client.py | 10 ++-- .../config_service_v2/async_client.py | 50 ++++++++++--------- .../services/config_service_v2/client.py | 48 +++++++++--------- .../logging_service_v2/async_client.py | 16 +++--- .../services/logging_service_v2/client.py | 14 +++--- .../metrics_service_v2/async_client.py | 14 +++--- .../services/metrics_service_v2/client.py | 12 +++-- .../services/cloud_redis/async_client.py | 22 ++++---- .../redis_v1/services/cloud_redis/client.py | 20 ++++---- .../services/cloud_redis/transports/base.py | 2 +- .../services/cloud_redis/transports/grpc.py | 2 +- .../cloud_redis/transports/grpc_asyncio.py | 2 +- .../gapic-generator/tests/system/conftest.py | 4 +- 27 files changed, 180 insertions(+), 148 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 3d14c341a56c..4f28d0ea2ef2 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -22,6 +22,8 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + {% filter sort_lines %} {% for method in service.methods.values() %} {% for ref_type in method.flat_ref_types %} @@ -306,7 +308,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): requests: Iterator[{{ method.input.ident }}] = None, *, {% endif %} - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), {% if not method.server_streaming %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index a87e3b671748..06e826572787 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -115,7 +115,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): {% if service.has_lro %} @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 3df81e9b05c6..1f60736c11fb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -11,13 +11,15 @@ import pkg_resources import warnings {% endif %} -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + {% filter sort_lines %} {% for method in service.methods.values() %} {% for ref_type in method.flat_ref_types %} @@ -156,7 +158,7 @@ class {{ service.async_client_name }}: requests: AsyncIterator[{{ method.input.ident }}] = None, *, {% endif %} - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), {% if not method.server_streaming %} @@ -331,7 +333,7 @@ class {{ service.async_client_name }}: self, request: iam_policy_pb2.SetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -439,7 +441,7 @@ class {{ service.async_client_name }}: self, request: iam_policy_pb2.GetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -548,7 +550,7 @@ class {{ service.async_client_name }}: self, request: iam_policy_pb2.TestIamPermissionsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index f0248d53a51d..02bfe76135b3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -22,6 +22,8 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + {% filter sort_lines %} {% for method in service.methods.values() %} {% for ref_type in method.flat_ref_types %} @@ -322,7 +324,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): requests: Iterator[{{ method.input.ident }}] = None, *, {% endif %} - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), {% if not method.server_streaming %} @@ -495,7 +497,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self, request: iam_policy_pb2.SetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -605,7 +607,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self, request: iam_policy_pb2.GetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -716,7 +718,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self, request: iam_policy_pb2.TestIamPermissionsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index fb937ae19283..6065e957562e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -186,7 +186,7 @@ class {{ service.name }}Transport(abc.ABC): {% if service.has_lro %} @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 894f4470d54a..dbf01bc68244 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -109,7 +109,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} {% if service.has_lro %} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsClient] = None {% endif %} if api_mtls_endpoint: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 5c4f8d251f5f..1a8a243ff8f1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -155,7 +155,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} {% if service.has_lro %} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None {% endif %} if api_mtls_endpoint: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 8072567cb652..994e30d9e48b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -10,8 +10,10 @@ from google.api_core import path_template # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import operations_v1 from requests import __version__ as requests_version -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings + +OptionalRetry = Union[retries.Retry, object] {% extends '_base.py.j2' %} {% block content %} @@ -153,7 +155,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {%- if method.http_options and not method.lro and not (method.server_streaming or method.client_streaming) %} def _{{method.name | snake_case}}(self, request: {{method.input.ident}}, *, - retry: retries.Retry=gapic_v1.method.DEFAULT, + retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[str, str]]=(), ) -> {{method.output.ident}}: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index a6327c26df1e..4d7ae3b74a57 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.asset_v1.services.asset_service import pagers @@ -153,7 +155,7 @@ def __init__(self, *, async def export_assets(self, request: asset_service.ExportAssetsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -233,7 +235,7 @@ async def list_assets(self, request: asset_service.ListAssetsRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAssetsAsyncPager: @@ -321,7 +323,7 @@ async def list_assets(self, async def batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: @@ -387,7 +389,7 @@ async def create_feed(self, request: asset_service.CreateFeedRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: @@ -475,7 +477,7 @@ async def get_feed(self, request: asset_service.GetFeedRequest = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: @@ -563,7 +565,7 @@ async def list_feeds(self, request: asset_service.ListFeedsRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.ListFeedsResponse: @@ -647,7 +649,7 @@ async def update_feed(self, request: asset_service.UpdateFeedRequest = None, *, feed: asset_service.Feed = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: @@ -729,7 +731,7 @@ async def delete_feed(self, request: asset_service.DeleteFeedRequest = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -805,7 +807,7 @@ async def search_all_resources(self, scope: str = None, query: str = None, asset_types: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesAsyncPager: @@ -991,7 +993,7 @@ async def search_all_iam_policies(self, *, scope: str = None, query: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllIamPoliciesAsyncPager: @@ -1155,7 +1157,7 @@ async def search_all_iam_policies(self, async def analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: @@ -1217,7 +1219,7 @@ async def analyze_iam_policy(self, async def analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 0462c07e4203..0d5b57dab3dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.asset_v1.services.asset_service import pagers @@ -350,7 +352,7 @@ def __init__(self, *, def export_assets(self, request: Union[asset_service.ExportAssetsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -431,7 +433,7 @@ def list_assets(self, request: Union[asset_service.ListAssetsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAssetsPager: @@ -519,7 +521,7 @@ def list_assets(self, def batch_get_assets_history(self, request: Union[asset_service.BatchGetAssetsHistoryRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: @@ -579,7 +581,7 @@ def create_feed(self, request: Union[asset_service.CreateFeedRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: @@ -667,7 +669,7 @@ def get_feed(self, request: Union[asset_service.GetFeedRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: @@ -748,7 +750,7 @@ def list_feeds(self, request: Union[asset_service.ListFeedsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.ListFeedsResponse: @@ -825,7 +827,7 @@ def update_feed(self, request: Union[asset_service.UpdateFeedRequest, dict] = None, *, feed: asset_service.Feed = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: @@ -907,7 +909,7 @@ def delete_feed(self, request: Union[asset_service.DeleteFeedRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -976,7 +978,7 @@ def search_all_resources(self, scope: str = None, query: str = None, asset_types: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesPager: @@ -1155,7 +1157,7 @@ def search_all_iam_policies(self, *, scope: str = None, query: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllIamPoliciesPager: @@ -1312,7 +1314,7 @@ def search_all_iam_policies(self, def analyze_iam_policy(self, request: Union[asset_service.AnalyzeIamPolicyRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: @@ -1369,7 +1371,7 @@ def analyze_iam_policy(self, def analyze_iam_policy_longrunning(self, request: Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 1da08824f811..bf723d536705 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -269,7 +269,7 @@ def close(self): raise NotImplementedError() @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 7ed6de3dc242..0db532b0d348 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -110,7 +110,7 @@ def __init__(self, *, self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index cc021dfbb0b1..683fa4b9ef0c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -156,7 +156,7 @@ def __init__(self, *, self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index c7ae75924aef..bdf30ffa4921 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -162,7 +164,7 @@ async def generate_access_token(self, delegates: Sequence[str] = None, scope: Sequence[str] = None, lifetime: duration_pb2.Duration = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateAccessTokenResponse: @@ -295,7 +297,7 @@ async def generate_id_token(self, delegates: Sequence[str] = None, audience: str = None, include_email: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateIdTokenResponse: @@ -421,7 +423,7 @@ async def sign_blob(self, name: str = None, delegates: Sequence[str] = None, payload: bytes = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignBlobResponse: @@ -534,7 +536,7 @@ async def sign_jwt(self, name: str = None, delegates: Sequence[str] = None, payload: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignJwtResponse: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index f6345dabc9fe..d278ac7c769a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -350,7 +352,7 @@ def generate_access_token(self, delegates: Sequence[str] = None, scope: Sequence[str] = None, lifetime: duration_pb2.Duration = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateAccessTokenResponse: @@ -476,7 +478,7 @@ def generate_id_token(self, delegates: Sequence[str] = None, audience: str = None, include_email: bool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateIdTokenResponse: @@ -595,7 +597,7 @@ def sign_blob(self, name: str = None, delegates: Sequence[str] = None, payload: bytes = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignBlobResponse: @@ -701,7 +703,7 @@ def sign_jwt(self, name: str = None, delegates: Sequence[str] = None, payload: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignJwtResponse: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 5d322cb502ee..9b0c3e83acbc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config from google.protobuf import field_mask_pb2 # type: ignore @@ -158,7 +160,7 @@ async def list_buckets(self, request: logging_config.ListBucketsRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: @@ -253,7 +255,7 @@ async def list_buckets(self, async def get_bucket(self, request: logging_config.GetBucketRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -305,7 +307,7 @@ async def get_bucket(self, async def create_bucket(self, request: logging_config.CreateBucketRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -359,7 +361,7 @@ async def create_bucket(self, async def update_bucket(self, request: logging_config.UpdateBucketRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -421,7 +423,7 @@ async def update_bucket(self, async def delete_bucket(self, request: logging_config.DeleteBucketRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -468,7 +470,7 @@ async def delete_bucket(self, async def undelete_bucket(self, request: logging_config.UndeleteBucketRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -515,7 +517,7 @@ async def list_views(self, request: logging_config.ListViewsRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: @@ -602,7 +604,7 @@ async def list_views(self, async def get_view(self, request: logging_config.GetViewRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -656,7 +658,7 @@ async def get_view(self, async def create_view(self, request: logging_config.CreateViewRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -711,7 +713,7 @@ async def create_view(self, async def update_view(self, request: logging_config.UpdateViewRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -766,7 +768,7 @@ async def update_view(self, async def delete_view(self, request: logging_config.DeleteViewRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -812,7 +814,7 @@ async def list_sinks(self, request: logging_config.ListSinksRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksAsyncPager: @@ -912,7 +914,7 @@ async def get_sink(self, request: logging_config.GetSinkRequest = None, *, sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1010,7 +1012,7 @@ async def create_sink(self, *, parent: str = None, sink: logging_config.LogSink = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1115,7 +1117,7 @@ async def update_sink(self, sink_name: str = None, sink: logging_config.LogSink = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1249,7 +1251,7 @@ async def delete_sink(self, request: logging_config.DeleteSinkRequest = None, *, sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1332,7 +1334,7 @@ async def list_exclusions(self, request: logging_config.ListExclusionsRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsAsyncPager: @@ -1432,7 +1434,7 @@ async def get_exclusion(self, request: logging_config.GetExclusionRequest = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1533,7 +1535,7 @@ async def create_exclusion(self, *, parent: str = None, exclusion: logging_config.LogExclusion = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1641,7 +1643,7 @@ async def update_exclusion(self, name: str = None, exclusion: logging_config.LogExclusion = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1760,7 +1762,7 @@ async def delete_exclusion(self, request: logging_config.DeleteExclusionRequest = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1842,7 +1844,7 @@ async def delete_exclusion(self, async def get_cmek_settings(self, request: logging_config.GetCmekSettingsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: @@ -1918,7 +1920,7 @@ async def get_cmek_settings(self, async def update_cmek_settings(self, request: logging_config.UpdateCmekSettingsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 1deb0ddafffa..a00d1f97d635 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config from google.protobuf import field_mask_pb2 # type: ignore @@ -382,7 +384,7 @@ def list_buckets(self, request: Union[logging_config.ListBucketsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: @@ -477,7 +479,7 @@ def list_buckets(self, def get_bucket(self, request: Union[logging_config.GetBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -530,7 +532,7 @@ def get_bucket(self, def create_bucket(self, request: Union[logging_config.CreateBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -585,7 +587,7 @@ def create_bucket(self, def update_bucket(self, request: Union[logging_config.UpdateBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: @@ -648,7 +650,7 @@ def update_bucket(self, def delete_bucket(self, request: Union[logging_config.DeleteBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -696,7 +698,7 @@ def delete_bucket(self, def undelete_bucket(self, request: Union[logging_config.UndeleteBucketRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -744,7 +746,7 @@ def list_views(self, request: Union[logging_config.ListViewsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: @@ -831,7 +833,7 @@ def list_views(self, def get_view(self, request: Union[logging_config.GetViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -886,7 +888,7 @@ def get_view(self, def create_view(self, request: Union[logging_config.CreateViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -942,7 +944,7 @@ def create_view(self, def update_view(self, request: Union[logging_config.UpdateViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: @@ -998,7 +1000,7 @@ def update_view(self, def delete_view(self, request: Union[logging_config.DeleteViewRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1045,7 +1047,7 @@ def list_sinks(self, request: Union[logging_config.ListSinksRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksPager: @@ -1137,7 +1139,7 @@ def get_sink(self, request: Union[logging_config.GetSinkRequest, dict] = None, *, sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1227,7 +1229,7 @@ def create_sink(self, *, parent: str = None, sink: logging_config.LogSink = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1332,7 +1334,7 @@ def update_sink(self, sink_name: str = None, sink: logging_config.LogSink = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: @@ -1458,7 +1460,7 @@ def delete_sink(self, request: Union[logging_config.DeleteSinkRequest, dict] = None, *, sink_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1533,7 +1535,7 @@ def list_exclusions(self, request: Union[logging_config.ListExclusionsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsPager: @@ -1625,7 +1627,7 @@ def get_exclusion(self, request: Union[logging_config.GetExclusionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1718,7 +1720,7 @@ def create_exclusion(self, *, parent: str = None, exclusion: logging_config.LogExclusion = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1826,7 +1828,7 @@ def update_exclusion(self, name: str = None, exclusion: logging_config.LogExclusion = None, update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: @@ -1945,7 +1947,7 @@ def delete_exclusion(self, request: Union[logging_config.DeleteExclusionRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -2019,7 +2021,7 @@ def delete_exclusion(self, def get_cmek_settings(self, request: Union[logging_config.GetCmekSettingsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: @@ -2096,7 +2098,7 @@ def get_cmek_settings(self, def update_cmek_settings(self, request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index f03a8d0c0415..f9e7efddea07 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry @@ -150,7 +152,7 @@ async def delete_log(self, request: logging.DeleteLogRequest = None, *, log_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -242,7 +244,7 @@ async def write_log_entries(self, resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: @@ -410,7 +412,7 @@ async def list_log_entries(self, resource_names: Sequence[str] = None, filter: str = None, order_by: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesAsyncPager: @@ -544,7 +546,7 @@ async def list_log_entries(self, async def list_monitored_resource_descriptors(self, request: logging.ListMonitoredResourceDescriptorsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: @@ -613,7 +615,7 @@ async def list_logs(self, request: logging.ListLogsRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsAsyncPager: @@ -713,7 +715,7 @@ async def list_logs(self, def tail_log_entries(self, requests: AsyncIterator[logging.TailLogEntriesRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index db8650fb871c..1d404a9141aa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry @@ -338,7 +340,7 @@ def delete_log(self, request: Union[logging.DeleteLogRequest, dict] = None, *, log_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -422,7 +424,7 @@ def write_log_entries(self, resource: monitored_resource_pb2.MonitoredResource = None, labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, entries: Sequence[log_entry.LogEntry] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: @@ -581,7 +583,7 @@ def list_log_entries(self, resource_names: Sequence[str] = None, filter: str = None, order_by: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesPager: @@ -707,7 +709,7 @@ def list_log_entries(self, def list_monitored_resource_descriptors(self, request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: @@ -769,7 +771,7 @@ def list_logs(self, request: Union[logging.ListLogsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsPager: @@ -861,7 +863,7 @@ def list_logs(self, def tail_log_entries(self, requests: Iterator[logging.TailLogEntriesRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index d3b11a198e00..44218ec65135 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers @@ -151,7 +153,7 @@ async def list_log_metrics(self, request: logging_metrics.ListLogMetricsRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsAsyncPager: @@ -248,7 +250,7 @@ async def get_log_metric(self, request: logging_metrics.GetLogMetricRequest = None, *, metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -343,7 +345,7 @@ async def create_log_metric(self, *, parent: str = None, metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -443,7 +445,7 @@ async def update_log_metric(self, *, metric_name: str = None, metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -549,7 +551,7 @@ async def delete_log_metric(self, request: logging_metrics.DeleteLogMetricRequest = None, *, metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index e31a068c67d6..da766d98d6bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers @@ -339,7 +341,7 @@ def list_log_metrics(self, request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsPager: @@ -428,7 +430,7 @@ def get_log_metric(self, request: Union[logging_metrics.GetLogMetricRequest, dict] = None, *, metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -515,7 +517,7 @@ def create_log_metric(self, *, parent: str = None, metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -615,7 +617,7 @@ def update_log_metric(self, *, metric_name: str = None, metric: logging_metrics.LogMetric = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: @@ -713,7 +715,7 @@ def delete_log_metric(self, request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, *, metric_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 8d22164a8856..612026a68860 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers @@ -174,7 +176,7 @@ async def list_instances(self, request: cloud_redis.ListInstancesRequest = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: @@ -272,7 +274,7 @@ async def get_instance(self, request: cloud_redis.GetInstanceRequest = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_redis.Instance: @@ -348,7 +350,7 @@ async def create_instance(self, parent: str = None, instance_id: str = None, instance: cloud_redis.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -473,7 +475,7 @@ async def update_instance(self, *, update_mask: field_mask_pb2.FieldMask = None, instance: cloud_redis.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -581,7 +583,7 @@ async def upgrade_instance(self, *, name: str = None, redis_version: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -679,7 +681,7 @@ async def import_instance(self, *, name: str = None, input_config: cloud_redis.InputConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -784,7 +786,7 @@ async def export_instance(self, *, name: str = None, output_config: cloud_redis.OutputConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -885,7 +887,7 @@ async def failover_instance(self, *, name: str = None, data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: @@ -984,7 +986,7 @@ async def delete_instance(self, request: cloud_redis.DeleteInstanceRequest = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 7be2d6781b9f..80cec0d8da59 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers @@ -362,7 +364,7 @@ def list_instances(self, request: Union[cloud_redis.ListInstancesRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: @@ -460,7 +462,7 @@ def get_instance(self, request: Union[cloud_redis.GetInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_redis.Instance: @@ -536,7 +538,7 @@ def create_instance(self, parent: str = None, instance_id: str = None, instance: cloud_redis.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -661,7 +663,7 @@ def update_instance(self, *, update_mask: field_mask_pb2.FieldMask = None, instance: cloud_redis.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -769,7 +771,7 @@ def upgrade_instance(self, *, name: str = None, redis_version: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -867,7 +869,7 @@ def import_instance(self, *, name: str = None, input_config: cloud_redis.InputConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -972,7 +974,7 @@ def export_instance(self, *, name: str = None, output_config: cloud_redis.OutputConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -1073,7 +1075,7 @@ def failover_instance(self, *, name: str = None, data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: @@ -1172,7 +1174,7 @@ def delete_instance(self, request: Union[cloud_redis.DeleteInstanceRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 240133be8082..ab43da3637ec 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -205,7 +205,7 @@ def close(self): raise NotImplementedError() @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self): """Return the client designed to process long-running operations.""" raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 82eeca621273..74b0217f5023 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -129,7 +129,7 @@ def __init__(self, *, self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index d89351dcb1aa..1f4e774f65ea 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -175,7 +175,7 @@ def __init__(self, *, self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} - self._operations_client = None + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index e01f596b2d96..e7f535c74b35 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -19,7 +19,7 @@ import os import pytest -import google.api_core.client_options as ClientOptions +from google.api_core.client_options import ClientOptions # type: ignore from google.auth import credentials from google.showcase import EchoClient from google.showcase import IdentityClient @@ -77,7 +77,7 @@ def callback(): return cert, key -client_options = ClientOptions.ClientOptions() +client_options = ClientOptions() client_options.client_cert_source = callback From c4ae5cfc31af4eedf63cbc20adbf00be1186bedc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 14 Oct 2021 10:32:55 -0600 Subject: [PATCH 0630/1339] chore: release 0.53.1 (#1029) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index e4d110d697c2..5a324ca49844 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.53.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.0...v0.53.1) (2021-10-13) + + +### Bug Fixes + +* use correct typing for retries / operations_client ([#1026](https://www.github.com/googleapis/gapic-generator-python/issues/1026)) ([acb3ea8](https://www.github.com/googleapis/gapic-generator-python/commit/acb3ea83becf6bf85c142739dede556cae2cebae)) + ## [0.53.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.52.0...v0.53.0) (2021-10-04) From 2ad5ca26ef4838d172c0139d5c025f49357b5e7b Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 14 Oct 2021 10:13:19 -0700 Subject: [PATCH 0631/1339] refactor: cleanup and boost dependency reqs in generated clients (#1018) Boost Proto-Plus dependency to 1.19.4 Boost common protos dependency to 1.53.0 Boost Google API core requirement to 2.1.0 and (transitively) Google Auth to 1.25.0 Remove logic for handling older auth library versions --- .../gapic/ads-templates/noxfile.py.j2 | 4 +- .../gapic/ads-templates/setup.py.j2 | 12 ++- .../services/%service/transports/base.py.j2 | 34 +-------- .../%service/transports/grpc_asyncio.py.j2 | 1 - .../gapic/templates/noxfile.py.j2 | 6 +- .../gapic/templates/setup.py.j2 | 15 ++-- .../%name_%version/%sub/test_%service.py.j2 | 73 +----------------- packages/gapic-generator/noxfile.py | 33 ++++---- packages/gapic-generator/setup.py | 4 +- .../services/asset_service/transports/base.py | 33 +------- .../asset_service/transports/grpc_asyncio.py | 1 - .../integration/goldens/asset/noxfile.py | 6 +- .../tests/integration/goldens/asset/setup.py | 8 +- .../unit/gapic/asset_v1/test_asset_service.py | 68 ----------------- .../iam_credentials/transports/base.py | 33 +------- .../transports/grpc_asyncio.py | 1 - .../goldens/credentials/noxfile.py | 6 +- .../integration/goldens/credentials/setup.py | 7 +- .../credentials_v1/test_iam_credentials.py | 68 ----------------- .../config_service_v2/transports/base.py | 33 +------- .../transports/grpc_asyncio.py | 1 - .../logging_service_v2/transports/base.py | 33 +------- .../transports/grpc_asyncio.py | 1 - .../metrics_service_v2/transports/base.py | 33 +------- .../transports/grpc_asyncio.py | 1 - .../integration/goldens/logging/noxfile.py | 6 +- .../integration/goldens/logging/setup.py | 7 +- .../logging_v2/test_config_service_v2.py | 74 ------------------ .../logging_v2/test_logging_service_v2.py | 76 ------------------- .../logging_v2/test_metrics_service_v2.py | 76 ------------------- .../services/cloud_redis/transports/base.py | 33 +------- .../cloud_redis/transports/grpc_asyncio.py | 1 - .../integration/goldens/redis/noxfile.py | 6 +- .../tests/integration/goldens/redis/setup.py | 7 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 68 ----------------- 35 files changed, 71 insertions(+), 798 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 484c437e5c35..62a869c60736 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -7,7 +7,7 @@ import os import nox # type: ignore -@nox.session(python=['3.7', '3.8']) +@nox.session(python=['3.7', '3.8', '3.9']) def unit(session): """Run the unit test suite.""" @@ -25,7 +25,7 @@ def unit(session): ) -@nox.session(python=['3.7', '3.8']) +@nox.session(python=['3.7', '3.8', '3.9']) def mypy(session): """Run the type checker.""" session.install('mypy') diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 6266d0ae3ca8..4827ca35ee16 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -17,10 +17,15 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core >= 1.22.2, < 3.0.0dev', - 'googleapis-common-protos >= 1.5.8', + {# TODO(dovs): remove when 1.x deprecation is complete #} + {% if 'rest' in opts.transport %} + 'google-api-core[grpc] >= 2.1.0, < 3.0.0dev', + {% else %} + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + {% endif %} + 'googleapis-common-protos >= 1.53.0', 'grpcio >= 1.10.0', - 'proto-plus >= 1.15.0', + 'proto-plus >= 1.19.4', {% if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', {% endif %} @@ -38,6 +43,7 @@ setuptools.setup( 'Operating System :: OS Independent', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 6065e957562e..58e41e4abfd6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -4,7 +4,6 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -38,15 +37,6 @@ try: except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" @@ -99,7 +89,7 @@ class {{ service.name }}Transport(abc.ABC): host += ':443' self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -127,28 +117,6 @@ class {{ service.name }}Transport(abc.ABC): self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 1a8a243ff8f1..ed243e04ffb8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -12,7 +12,6 @@ from google.api_core import operations_v1 # type: ignore {% endif %} from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 596194ad7e41..f4b393cbce7e 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -44,7 +44,7 @@ def unit(session): ) -@nox.session(python='3.7') +@nox.session(python='3.9') def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -56,7 +56,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') @@ -103,7 +103,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.6') +@nox.session(python='3.9') def docs(session): """Build the docs for this library.""" diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 00b671335b79..7606bc71d8b8 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -27,13 +27,17 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + {# TODO(dovs): remove when 1.x deprecation is complete #} + {% if 'rest' in opts.transport %} + 'google-api-core[grpc] >= 2.1.0, < 3.0.0dev', + {% else %} + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + {% endif %} 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', - {%- if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} + 'proto-plus >= 1.19.4', + {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', - {%- endif %} + {% endif %} ), python_requires='>=3.6', classifiers=[ @@ -44,7 +48,6 @@ setuptools.setup( 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 165e12a8b4f9..d4ec2c314210 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -4,7 +4,6 @@ import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -28,7 +27,7 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.async_client_name }} {% endif %} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports -from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.base import _GOOGLE_AUTH_VERSION + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers @@ -55,19 +54,6 @@ from google.iam.v1 import policy_pb2 # type: ignore {% endfilter %} -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -1527,7 +1513,6 @@ def test_{{ service.name|snake_case }}_base_transport(): {% endif %} -@requires_google_auth_gte_1_25_0 def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: @@ -1547,25 +1532,6 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_{{ service.name|snake_case }}_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.{{ service.name }}Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %} - ), - quota_project_id="octopus", - ) - - def test_{{ service.name|snake_case }}_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: @@ -1575,7 +1541,6 @@ def test_{{ service.name|snake_case }}_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: @@ -1591,21 +1556,6 @@ def test_{{ service.name|snake_case }}_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_{{ service.name|snake_case }}_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - {{ service.client_name }}() - adc.assert_called_once_with( - scopes=( - {%- for scope in service.oauth_scopes %} - '{{ scope }}', - {%- endfor %}), - quota_project_id=None, - ) - - {% if 'grpc' in opts.transport %} @pytest.mark.parametrize( "transport_class", @@ -1614,7 +1564,6 @@ def test_{{ service.name|snake_case }}_auth_adc_old_google_auth(): transports.{{ service.name }}GrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -1631,26 +1580,6 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.{{ service.name }}GrpcTransport, - transports.{{ service.name }}GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_{{ service.name|snake_case }}_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %}), - quota_project_id="octopus", - ) @pytest.mark.parametrize( diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 98ceb103dbed..8b446ff44b1d 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -124,7 +124,7 @@ def showcase_library( yield tmp_dir -@nox.session(python="3.8") +@nox.session(python="3.9") def showcase( session, templates="DEFAULT", @@ -141,7 +141,7 @@ def showcase( ) -@nox.session(python="3.8") +@nox.session(python="3.9") def showcase_mtls( session, templates="DEFAULT", @@ -161,7 +161,7 @@ def showcase_mtls( ) -@nox.session(python="3.8") +@nox.session(python="3.9") def showcase_alternative_templates(session): templates = path.join(path.dirname(__file__), "gapic", "ads-templates") showcase( @@ -172,7 +172,7 @@ def showcase_alternative_templates(session): ) -@nox.session(python="3.8") +@nox.session(python="3.9") def showcase_mtls_alternative_templates(session): templates = path.join(path.dirname(__file__), "gapic", "ads-templates") showcase_mtls( @@ -225,7 +225,7 @@ def showcase_unit( # Some code paths require an older version of google-auth. # google-auth is a transitive dependency so it isn't in the # lower bound constraints file produced above. - session.install("google-auth==1.21.1") + session.install("google-auth==1.28.0") run_showcase_unit_tests(session, fail_under=0) # 2. Run the tests again with latest version of dependencies @@ -240,31 +240,24 @@ def showcase_unit_alternative_templates(session): run_showcase_unit_tests(session) -@nox.session(python=["3.8"]) +@nox.session(python=["3.9"]) def showcase_unit_add_iam_methods(session): with showcase_library(session, other_opts=("add-iam-methods",)) as lib: session.chdir(lib) - # Unit tests are run twice with different dependencies to exercise - # all code paths. - # TODO(busunkim): remove when default templates require google-auth>=1.25.0 - - # 1. Run tests at lower bound of dependencies + # Unit tests are run twice with different dependencies. + # 1. Run tests at lower bound of dependencies. session.install("nox") session.run("nox", "-s", "update_lower_bounds") session.install(".", "--force-reinstall", "-c", "constraints.txt") - # Some code paths require an older version of google-auth. - # google-auth is a transitive dependency so it isn't in the - # lower bound constraints file produced above. - session.install("google-auth==1.21.1") run_showcase_unit_tests(session, fail_under=0) - # 2. Run the tests again with latest version of dependencies + # 2. Run the tests again with latest version of dependencies. session.install(".", "--upgrade", "--force-reinstall") run_showcase_unit_tests(session, fail_under=100) -@nox.session(python="3.8") +@nox.session(python="3.9") def showcase_mypy( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): @@ -280,12 +273,12 @@ def showcase_mypy( session.run("mypy", "--explicit-package-bases", "google") -@nox.session(python="3.8") +@nox.session(python="3.9") def showcase_mypy_alternative_templates(session): showcase_mypy(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) -@nox.session(python="3.8") +@nox.session(python="3.9") def snippetgen(session): # Clone googleapis/api-common-protos which are referenced by the snippet # protos @@ -313,7 +306,7 @@ def snippetgen(session): ) -@nox.session(python="3.8") +@nox.session(python="3.9") def docs(session): """Build the docs.""" diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2cc950d342fc..27b6c1758fa5 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -44,11 +44,11 @@ include_package_data=True, install_requires=( "click >= 6.7", - "google-api-core >= 1.17.0", + "google-api-core >= 2.1.0", "googleapis-common-protos >= 1.53.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", - "protobuf >= 3.12.0", + "protobuf >= 3.18.0", "pypandoc >= 1.4", "PyYAML >= 5.1.1", "dataclasses < 0.8; python_version < '3.7'" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index bf723d536705..b90728a618d2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -40,15 +39,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class AssetServiceTransport(abc.ABC): """Abstract transport class for AssetService.""" @@ -98,7 +88,7 @@ def __init__( host += ':443' self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -125,27 +115,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 683fa4b9ef0c..f8d325057f5f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -21,7 +21,6 @@ from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 372acb5981cd..c999d03a684c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -55,7 +55,7 @@ def unit(session): ) -@nox.session(python='3.7') +@nox.session(python='3.9') def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -67,7 +67,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') @@ -110,7 +110,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.6') +@nox.session(python='3.9') def docs(session): """Build the docs for this library.""" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 379f5e9de640..8994eddd9d97 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -34,10 +34,11 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), + 'proto-plus >= 1.19.4', + 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', + ), python_requires='>=3.6', classifiers=[ 'Development Status :: 3 - Alpha', @@ -47,7 +48,6 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index f864b50348aa..24d9abc43b41 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -39,7 +38,6 @@ from google.cloud.asset_v1.services.asset_service import AssetServiceClient from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.services.asset_service import transports -from google.cloud.asset_v1.services.asset_service.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets from google.longrunning import operations_pb2 @@ -51,19 +49,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -3560,7 +3545,6 @@ def test_asset_service_base_transport(): transport.operations_client -@requires_google_auth_gte_1_25_0 def test_asset_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: @@ -3579,23 +3563,6 @@ def test_asset_service_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_asset_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AssetServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - def test_asset_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: @@ -3605,7 +3572,6 @@ def test_asset_service_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_asset_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: @@ -3620,18 +3586,6 @@ def test_asset_service_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_asset_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AssetServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -3639,7 +3593,6 @@ def test_asset_service_auth_adc_old_google_auth(): transports.AssetServiceGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_asset_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -3653,27 +3606,6 @@ def test_asset_service_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.AssetServiceGrpcTransport, - transports.AssetServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_asset_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 5a0c487eb6c6..a57ba5e1e1cc 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -37,15 +36,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class IAMCredentialsTransport(abc.ABC): """Abstract transport class for IAMCredentials.""" @@ -95,7 +85,7 @@ def __init__( host += ':443' self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,27 +112,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index cebb4866b05c..8277d243b257 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 8d60c7f807c5..fa57dfa77798 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -55,7 +55,7 @@ def unit(session): ) -@nox.session(python='3.7') +@nox.session(python='3.9') def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -67,7 +67,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') @@ -110,7 +110,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.6') +@nox.session(python='3.9') def docs(session): """Build the docs for this library.""" diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 4f182c1ff3b4..2690e8742a38 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -34,10 +34,10 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', ), + 'proto-plus >= 1.19.4', + ), python_requires='>=3.6', classifiers=[ 'Development Status :: 3 - Alpha', @@ -47,7 +47,6 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 80c276662e27..3bd38a836052 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -35,7 +34,6 @@ from google.iam.credentials_v1.services.iam_credentials import IAMCredentialsAsyncClient from google.iam.credentials_v1.services.iam_credentials import IAMCredentialsClient from google.iam.credentials_v1.services.iam_credentials import transports -from google.iam.credentials_v1.services.iam_credentials.transports.base import _GOOGLE_AUTH_VERSION from google.iam.credentials_v1.types import common from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore @@ -43,19 +41,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -1489,7 +1474,6 @@ def test_iam_credentials_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_iam_credentials_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: @@ -1508,23 +1492,6 @@ def test_iam_credentials_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_iam_credentials_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.IAMCredentialsTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - def test_iam_credentials_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: @@ -1534,7 +1501,6 @@ def test_iam_credentials_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_iam_credentials_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: @@ -1549,18 +1515,6 @@ def test_iam_credentials_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_iam_credentials_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - IAMCredentialsClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -1568,7 +1522,6 @@ def test_iam_credentials_auth_adc_old_google_auth(): transports.IAMCredentialsGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_iam_credentials_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -1582,27 +1535,6 @@ def test_iam_credentials_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.IAMCredentialsGrpcTransport, - transports.IAMCredentialsGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_iam_credentials_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 92a840f2a353..45efc2431c51 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -38,15 +37,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" @@ -99,7 +89,7 @@ def __init__( host += ':443' self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -126,27 +116,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 2f18d861c16f..d52cbfc66d9b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 195d115497dd..acef64a6cfbd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -38,15 +37,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" @@ -100,7 +90,7 @@ def __init__( host += ':443' self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -127,27 +117,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 9978fe57f74a..7a700c9d2b11 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 2a4ff25e1a9a..1f20a5d83848 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -38,15 +37,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" @@ -100,7 +90,7 @@ def __init__( host += ':443' self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -127,27 +117,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 9c07c1172b70..8f91b272c183 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 9daae7a0df33..d8322070774e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -55,7 +55,7 @@ def unit(session): ) -@nox.session(python='3.7') +@nox.session(python='3.9') def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -67,7 +67,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') @@ -110,7 +110,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.6') +@nox.session(python='3.9') def docs(session): """Build the docs for this library.""" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 250bfa5d8c1a..f3fc9586e5fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -34,10 +34,10 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', ), + 'proto-plus >= 1.19.4', + ), python_requires='>=3.6', classifiers=[ 'Development Status :: 3 - Alpha', @@ -47,7 +47,6 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 8a1d5e3189b7..aef12045abea 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -36,7 +35,6 @@ from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2Client from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports -from google.cloud.logging_v2.services.config_service_v2.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.logging_v2.types import logging_config from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore @@ -44,19 +42,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -5911,7 +5896,6 @@ def test_config_service_v2_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: @@ -5933,26 +5917,6 @@ def test_config_service_v2_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ConfigServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - ), - quota_project_id="octopus", - ) - - def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: @@ -5962,7 +5926,6 @@ def test_config_service_v2_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: @@ -5980,18 +5943,6 @@ def test_config_service_v2_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ConfigServiceV2Client() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -5999,7 +5950,6 @@ def test_config_service_v2_auth_adc_old_google_auth(): transports.ConfigServiceV2GrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -6013,30 +5963,6 @@ def test_config_service_v2_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_config_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index f372ad41b981..bb826104bbc2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -37,7 +36,6 @@ from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.services.logging_service_v2 import transports -from google.cloud.logging_v2.services.logging_service_v2.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging from google.logging.type import http_request_pb2 # type: ignore @@ -50,19 +48,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -2033,7 +2018,6 @@ def test_logging_service_v2_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: @@ -2056,27 +2040,6 @@ def test_logging_service_v2_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LoggingServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), - quota_project_id="octopus", - ) - - def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: @@ -2086,7 +2049,6 @@ def test_logging_service_v2_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: @@ -2105,18 +2067,6 @@ def test_logging_service_v2_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LoggingServiceV2Client() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2124,7 +2074,6 @@ def test_logging_service_v2_auth_adc_old_google_auth(): transports.LoggingServiceV2GrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2138,31 +2087,6 @@ def test_logging_service_v2_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_logging_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index a42c0e2fdd93..01c23750e8a1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -40,7 +39,6 @@ from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2Client from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports -from google.cloud.logging_v2.services.metrics_service_v2.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.logging_v2.types import logging_metrics from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore @@ -48,19 +46,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -1898,7 +1883,6 @@ def test_metrics_service_v2_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: @@ -1921,27 +1905,6 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MetricsServiceV2Transport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', - ), - quota_project_id="octopus", - ) - - def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: @@ -1951,7 +1914,6 @@ def test_metrics_service_v2_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: @@ -1970,18 +1932,6 @@ def test_metrics_service_v2_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MetricsServiceV2Client() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -1989,7 +1939,6 @@ def test_metrics_service_v2_auth_adc_old_google_auth(): transports.MetricsServiceV2GrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2003,31 +1952,6 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2GrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_metrics_service_v2_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index ab43da3637ec..4cd11451e3c3 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -39,15 +38,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class CloudRedisTransport(abc.ABC): """Abstract transport class for CloudRedis.""" @@ -97,7 +87,7 @@ def __init__( host += ':443' self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -124,27 +114,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 1f4e774f65ea..95307155cedc 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -21,7 +21,6 @@ from google.api_core import operations_v1 # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index c42261e8aaa1..318ac36b6a0a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -55,7 +55,7 @@ def unit(session): ) -@nox.session(python='3.7') +@nox.session(python='3.9') def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -67,7 +67,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7']) +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') @@ -110,7 +110,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.6') +@nox.session(python='3.9') def docs(session): """Build the docs for this library.""" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 955a1fe01011..1972f2de4095 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -34,10 +34,10 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', ), + 'proto-plus >= 1.19.4', + ), python_requires='>=3.6', classifiers=[ 'Development Status :: 3 - Alpha', @@ -47,7 +47,6 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 54f12b158e28..edd01300675b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -39,7 +38,6 @@ from google.cloud.redis_v1.services.cloud_redis import CloudRedisClient from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.services.cloud_redis import transports -from google.cloud.redis_v1.services.cloud_redis.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 from google.oauth2 import service_account @@ -48,19 +46,6 @@ import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -2869,7 +2854,6 @@ def test_cloud_redis_base_transport(): transport.operations_client -@requires_google_auth_gte_1_25_0 def test_cloud_redis_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: @@ -2888,23 +2872,6 @@ def test_cloud_redis_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_cloud_redis_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudRedisTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - def test_cloud_redis_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: @@ -2914,7 +2881,6 @@ def test_cloud_redis_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_cloud_redis_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: @@ -2929,18 +2895,6 @@ def test_cloud_redis_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_cloud_redis_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudRedisClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -2948,7 +2902,6 @@ def test_cloud_redis_auth_adc_old_google_auth(): transports.CloudRedisGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_cloud_redis_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2962,27 +2915,6 @@ def test_cloud_redis_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudRedisGrpcTransport, - transports.CloudRedisGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_cloud_redis_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ From 99ee3e781f232ed7207a755ee6376516039d0927 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 14 Oct 2021 19:16:14 +0200 Subject: [PATCH 0632/1339] chore(deps): update dependency google-api-core to v2.1.1 (#1027) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-api-core](https://togithub.com/googleapis/python-api-core) | `==2.1.0` -> `==2.1.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-api-core/2.1.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-api-core/2.1.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-api-core/2.1.1/compatibility-slim/2.1.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-api-core/2.1.1/confidence-slim/2.1.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-api-core ### [`v2.1.1`](https://togithub.com/googleapis/python-api-core/blob/master/CHANGELOG.md#​211-httpswwwgithubcomgoogleapispython-api-corecomparev210v211-2021-10-13) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v2.1.0...v2.1.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™» **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 7c8c39a187d6..942f398b2ff9 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.0.3 -google-api-core==2.1.0 +google-api-core==2.1.1 googleapis-common-protos==1.53.0 jinja2==3.0.2 MarkupSafe==2.0.1 From c9cb6c70212db698fccadbc18b3abc3298d0eca2 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 18 Oct 2021 17:30:13 -0600 Subject: [PATCH 0633/1339] fix: list oneofs in docstring (#1030) * fix: list oneofs in docstring * chore: copy to ads templates * docs: remove separate list in docstring * chore: also update ads --- .../%name/%version/%sub/types/_message.py.j2 | 15 ++++++++ .../%name_%version/%sub/types/_message.py.j2 | 15 ++++++++ .../cloud/asset_v1/types/asset_service.py | 36 +++++++++++++++++++ .../google/cloud/asset_v1/types/assets.py | 19 ++++++++++ .../cloud/logging_v2/types/log_entry.py | 10 ++++++ .../cloud/logging_v2/types/logging_config.py | 4 +++ .../cloud/redis_v1/types/cloud_redis.py | 6 ++++ 7 files changed, 105 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index 581c976b5e36..32fb35672d92 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -2,10 +2,25 @@ class {{ message.name }}({{ p }}.Message): r"""{{ message.meta.doc|rst(indent=4) }} {% if message.fields|length %} + {# Only include note if a oneof has more than one member field. #} + {% if message.oneof_fields() %} + {% if message.oneof_fields().values() | map('length') | max > 1 %} + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + {% endif %} + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + {% endif %} Attributes: {% for field in message.fields.values() %} {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(indent=12, nl=False) }} + {% if field.oneof %} + This field is a member of `oneof`_ ``{{ field.oneof }}``. + {% endif %} {% endfor %} {% endif %} """ diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 9bd6d51d82bb..11e6b129be22 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -2,10 +2,25 @@ class {{ message.name }}({{ p }}.Message): r"""{{ message.meta.doc|rst(indent=4) }} {% if message.fields|length %} + {# Only include note if a oneof has more than one member field. #} + {% if message.oneof_fields() %} + {% if message.oneof_fields().values() | map('length') | max > 1 %} + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + {% endif %} + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + {% endif %} Attributes: {% for field in message.fields.values() %} {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(indent=12, nl=False) }} + {% if field.oneof %} + This field is a member of `oneof`_ ``{{ field.oneof }}``. + {% endif %} {% endfor %} {% endif %} """ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 739d78d9a1e0..0d028e2bf7c3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -512,13 +512,22 @@ class DeleteFeedRequest(proto.Message): class OutputConfig(proto.Message): r"""Output configuration for export assets destination. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: gcs_destination (google.cloud.asset_v1.types.GcsDestination): Destination on Cloud Storage. + This field is a member of `oneof`_ ``destination``. bigquery_destination (google.cloud.asset_v1.types.BigQueryDestination): Destination on BigQuery. The output table stores the fields in asset proto as columns in BigQuery. + This field is a member of `oneof`_ ``destination``. """ gcs_destination = proto.Field( @@ -538,9 +547,12 @@ class OutputConfig(proto.Message): class OutputResult(proto.Message): r"""Output result of export assets. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: gcs_result (google.cloud.asset_v1.types.GcsOutputResult): Export result on Cloud Storage. + This field is a member of `oneof`_ ``result``. """ gcs_result = proto.Field( @@ -569,6 +581,13 @@ class GcsOutputResult(proto.Message): class GcsDestination(proto.Message): r"""A Cloud Storage location. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: uri (str): The uri of the Cloud Storage object. It's the same uri that @@ -581,6 +600,7 @@ class GcsDestination(proto.Message): there is no `hold `__, it will be overwritten with the exported result. + This field is a member of `oneof`_ ``object_uri``. uri_prefix (str): The uri prefix of all generated Cloud Storage objects. Example: "gs://bucket_name/object_name_prefix". Each object @@ -591,6 +611,7 @@ class GcsDestination(proto.Message): compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be returned if file with the same name "gs://bucket_name/object_name_prefix" already exists. + This field is a member of `oneof`_ ``object_uri``. """ uri = proto.Field( @@ -751,9 +772,12 @@ class PubsubDestination(proto.Message): class FeedOutputConfig(proto.Message): r"""Output configuration for asset feed destination. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: pubsub_destination (google.cloud.asset_v1.types.PubsubDestination): Destination on Pub/Sub. + This field is a member of `oneof`_ ``destination``. """ pubsub_destination = proto.Field( @@ -1420,12 +1444,15 @@ class Options(proto.Message): class ConditionContext(proto.Message): r"""The IAM conditions context. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: access_time (google.protobuf.timestamp_pb2.Timestamp): The hypothetical access timestamp to evaluate IAM conditions. Note that this value must not be earlier than the current time; otherwise, an INVALID_ARGUMENT error will be returned. + This field is a member of `oneof`_ ``TimeContext``. """ access_time = proto.Field( @@ -1580,11 +1607,20 @@ class IamPolicyAnalysisOutputConfig(proto.Message): r"""Output configuration for export IAM policy analysis destination. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: gcs_destination (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.GcsDestination): Destination on Cloud Storage. + This field is a member of `oneof`_ ``destination``. bigquery_destination (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.BigQueryDestination): Destination on BigQuery. + This field is a member of `oneof`_ ``destination``. """ class GcsDestination(proto.Message): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index ef30863e3c6c..40a0faf93cef 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -131,6 +131,13 @@ class Asset(proto.Message): types `__ for more information. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: update_time (google.protobuf.timestamp_pb2.Timestamp): The last update timestamp of an asset. update_time is @@ -171,12 +178,15 @@ class Asset(proto.Message): access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): Please also refer to the `access policy user guide `__. + This field is a member of `oneof`_ ``access_context_policy``. access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): Please also refer to the `access level user guide `__. + This field is a member of `oneof`_ ``access_context_policy``. service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): Please also refer to the `service perimeter user guide `__. + This field is a member of `oneof`_ ``access_context_policy``. os_inventory (google.cloud.osconfig.v1.inventory_pb2.Inventory): A representation of runtime OS Inventory information. See `this @@ -882,11 +892,20 @@ class Resource(proto.Message): class Access(proto.Message): r"""An IAM role or permission under analysis. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: role (str): The role. + This field is a member of `oneof`_ ``oneof_access``. permission (str): The permission. + This field is a member of `oneof`_ ``oneof_access``. analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): The analysis state of this access. """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 18822503d972..96cfd0f40361 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -36,6 +36,13 @@ class LogEntry(proto.Message): r"""An individual entry in a log. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: log_name (str): Required. The resource name of the log to which this log @@ -84,12 +91,15 @@ class LogEntry(proto.Message): "type.googleapis.com/google.cloud.audit.AuditLog" "type.googleapis.com/google.appengine.logging.v1.RequestLog". + This field is a member of `oneof`_ ``payload``. text_payload (str): The log entry payload, represented as a Unicode string (UTF-8). + This field is a member of `oneof`_ ``payload``. json_payload (google.protobuf.struct_pb2.Struct): The log entry payload, represented as a structure that is expressed as a JSON object. + This field is a member of `oneof`_ ``payload``. timestamp (google.protobuf.timestamp_pb2.Timestamp): Optional. The time the event described by the log entry occurred. This time is used to compute the log entry's age diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 1122a620097b..fd04767f0253 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -195,6 +195,9 @@ class LogSink(proto.Message): created within a project, organization, billing account, or folder. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Required. The client-assigned sink identifier, unique within @@ -279,6 +282,7 @@ class LogSink(proto.Message): bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. + This field is a member of `oneof`_ ``options``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the sink. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 793fa802c0b0..3448acb64935 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -501,10 +501,13 @@ class GcsSource(proto.Message): class InputConfig(proto.Message): r"""The input content + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: gcs_source (google.cloud.redis_v1.types.GcsSource): Google Cloud Storage location where input content is located. + This field is a member of `oneof`_ ``source``. """ gcs_source = proto.Field( @@ -558,10 +561,13 @@ class GcsDestination(proto.Message): class OutputConfig(proto.Message): r"""The output content + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: gcs_destination (google.cloud.redis_v1.types.GcsDestination): Google Cloud Storage destination for output content. + This field is a member of `oneof`_ ``destination``. """ gcs_destination = proto.Field( From 5cbe8b2953e64f768ea53eb79b941b6a8be2a041 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 25 Oct 2021 16:05:12 -0700 Subject: [PATCH 0634/1339] fix: Fix rest transport logic (#1039) * fix: Fix rest transport logic This includes 1) Do not include asyncio tests in the generated tests, because rest transport does not have asynio client. 2) Generate body field mock values for generated tests (otherwise grpc transcodding logic would fail). 3) Make `always_use_jwt_access=True` default for rest clients (grpc already does that) to match expected calls in generated tests. 4) Fix mypy errors for `AuthorizedSession` by ignoring it 5) Include operations_v1 conditionally, only if the client has lro There are few more fixes left, which are expected to be fixed in separate PRs. 1) `message->to_dict->message` roundrtip problem for int64 types is expected to be fixed by https://github.com/googleapis/proto-plus-python/pull/267 2) builtins conflicts (`license_` vs `license` as field name) is expected to be fixed by a TBD PR * fix integration tests --- .../gapic-generator/gapic/schema/wrappers.py | 13 +++++- .../%sub/services/%service/client.py.j2 | 2 - .../services/%service/transports/rest.py.j2 | 16 ++++--- .../%name_%version/%sub/test_%service.py.j2 | 11 ++++- .../tests/unit/schema/wrappers/test_method.py | 44 ++++++++++++++++++- 5 files changed, 74 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index aecda19f4483..24422244a9b9 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -925,9 +925,21 @@ def query_params(self) -> Set[str]: return set(self.input.fields) - params + @property + def body_fields(self) -> Mapping[str, Field]: + bindings = self.http_options + if bindings and bindings[0].body and bindings[0].body != "*": + return self._fields_mapping([bindings[0].body]) + return {} + # TODO(yon-mg): refactor as there may be more than one method signature @utils.cached_property def flattened_fields(self) -> Mapping[str, Field]: + signatures = self.options.Extensions[client_pb2.method_signature] + return self._fields_mapping(signatures) + + # TODO(yon-mg): refactor as there may be more than one method signature + def _fields_mapping(self, signatures) -> Mapping[str, Field]: """Return the signature defined for this method.""" cross_pkg_request = self.input.ident.package != self.ident.package @@ -946,7 +958,6 @@ def filter_fields(sig: str) -> Iterable[Tuple[str, Field]]: yield name, field - signatures = self.options.Extensions[client_pb2.method_signature] answer: Dict[str, Field] = collections.OrderedDict( name_and_field for sig in signatures diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 02bfe76135b3..809f728dd179 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -306,9 +306,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - {% if "grpc" in opts.transport %} always_use_jwt_access=True, - {% endif %} ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 994e30d9e48b..d85695b76f60 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -1,14 +1,16 @@ -from google.auth.transport.requests import AuthorizedSession +from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import rest_helpers # type: ignore -from google.api_core import path_template # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import rest_helpers # type: ignore +from google.api_core import path_template # type: ignore +from google.api_core import gapic_v1 # type: ignore +{% if service.has_lro %} from google.api_core import operations_v1 +{% endif %} from requests import __version__ as requests_version from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index d4ec2c314210..53ef17652916 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1106,7 +1106,14 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type ) # send a request that will satisfy transcoding - request = request_type({{ method.http_options[0].sample_request}}) + request_init = {{ method.http_options[0].sample_request}} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.mock_value }} + {% endif %} + {% endfor %} + request = request_type(request_init) {% if method.client_streaming %} requests = [request] {% endif %} @@ -2419,6 +2426,7 @@ async def test_test_iam_permissions_from_dict_async(): {% endif %} +{% if 'grpc' in opts.transport %} @pytest.mark.asyncio async def test_transport_close_async(): client = {{ service.async_client_name }}( @@ -2429,6 +2437,7 @@ async def test_transport_close_async(): async with client: close.assert_not_called() close.assert_called_once() +{% endif %} def test_transport_close(): transports = { diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index c6a81d9128ac..d377375036f9 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -330,6 +330,35 @@ def test_method_path_params_no_http_rule(): assert method.path_params == [] +def test_body_fields(): + http_rule = http_pb2.HttpRule( + post='/v1/{arms_shape=arms/*}/squids', + body='mantle' + ) + + mantle_stuff = make_field(name='mantle_stuff', type=9) + message = make_message('Mantle', fields=(mantle_stuff,)) + mantle = make_field('mantle', type=11, type_name='Mantle', message=message) + arms_shape = make_field('arms_shape', type=9) + input_message = make_message('Squid', fields=(mantle, arms_shape)) + method = make_method( + 'PutSquid', input_message=input_message, http_rule=http_rule) + assert set(method.body_fields) == {'mantle'} + mock_value = method.body_fields['mantle'].mock_value + assert mock_value == "baz.Mantle(mantle_stuff='mantle_stuff_value')" + + +def test_body_fields_no_body(): + http_rule = http_pb2.HttpRule( + post='/v1/{arms_shape=arms/*}/squids', + ) + + method = make_method( + 'PutSquid', http_rule=http_rule) + + assert not method.body_fields + + def test_method_http_options(): verbs = [ 'get', @@ -363,7 +392,7 @@ def test_method_http_options_no_http_rule(): assert method.path_params == [] -def test_method_http_options_body(): +def test_method_http_options_body_star(): http_rule = http_pb2.HttpRule( post='/v1/{parent=projects/*}/topics', body='*' @@ -376,6 +405,19 @@ def test_method_http_options_body(): }] +def test_method_http_options_body_field(): + http_rule = http_pb2.HttpRule( + post='/v1/{parent=projects/*}/topics', + body='body_field' + ) + method = make_method('DoSomething', http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*}/topics', + 'body': 'body_field' + }] + + def test_method_http_options_additional_bindings(): http_rule = http_pb2.HttpRule( post='/v1/{parent=projects/*}/topics', From 4b52f5f2ea56d16c4ea6f19f9a8f3cbff0220412 Mon Sep 17 00:00:00 2001 From: Ken Bandes Date: Tue, 26 Oct 2021 20:56:50 -0400 Subject: [PATCH 0635/1339] fix: Adjust Field Names in URI Templates (#1041) * Fix:Adjust Field Names in URI Templates That Conflict with Reserved Names * fix: add grpcio-status module now required by unit tests. Co-authored-by: Kenneth Bandes --- .../gapic-generator/gapic/schema/wrappers.py | 1 + .../gapic-generator/gapic/utils/__init__.py | 2 + .../gapic-generator/gapic/utils/uri_conv.py | 49 +++++++++++++++++++ packages/gapic-generator/noxfile.py | 2 +- .../tests/unit/schema/wrappers/test_method.py | 13 +++++ .../tests/unit/utils/test_uri_conv.py | 35 +++++++++++++ 6 files changed, 101 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/gapic/utils/uri_conv.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_uri_conv.py diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 24422244a9b9..2fbc5a2b20da 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -743,6 +743,7 @@ def try_parse_http_rule(cls, http_rule) -> Optional['HttpRule']: uri = getattr(http_rule, method) if not uri: return None + uri = utils.convert_uri_fieldnames(uri) body = http_rule.body or None return cls(method, uri, body) diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 447b0df837f6..5000d78b49ac 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -28,11 +28,13 @@ from gapic.utils.options import Options from gapic.utils.reserved_names import RESERVED_NAMES from gapic.utils.rst import rst +from gapic.utils.uri_conv import convert_uri_fieldnames from gapic.utils.uri_sample import sample_from_path_fields __all__ = ( 'cached_property', + 'convert_uri_fieldnames', 'doc', 'empty', 'is_msg_field_pb', diff --git a/packages/gapic-generator/gapic/utils/uri_conv.py b/packages/gapic-generator/gapic/utils/uri_conv.py new file mode 100644 index 000000000000..988a8c299749 --- /dev/null +++ b/packages/gapic-generator/gapic/utils/uri_conv.py @@ -0,0 +1,49 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gapic.utils.reserved_names import RESERVED_NAMES +from google.api_core import path_template # type: ignore + + +def convert_uri_fieldnames(uri: str) -> str: + """Modify field names in uri_templates to avoid reserved names. + + Args: + uri: a uri template, optionally containing field references in {} braces. + + Returns: + the uri with any field names modified if they conflict with + reserved names. + """ + + def _fix_name_segment(name_seg: str) -> str: + return name_seg + "_" if name_seg in RESERVED_NAMES else name_seg + + def _fix_field_path(field_path: str) -> str: + return ".".join( + (_fix_name_segment(name_seg) for name_seg in field_path.split("."))) + + last = 0 + pieces = [] + for match in path_template._VARIABLE_RE.finditer(uri): + start_pos, end_pos = match.span("name") + if start_pos == end_pos: + continue + pieces.append(uri[last:start_pos]) + fixed_field_path = _fix_field_path(uri[start_pos:end_pos]) + pieces.append(fixed_field_path) + last = end_pos + pieces.append(uri[last:]) + + return "".join(pieces) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 8b446ff44b1d..c4e3f34f4332 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -34,7 +34,7 @@ def unit(session): """Run the unit test suite.""" session.install( - "coverage", "pytest-cov", "pytest", "pytest-xdist", "pyfakefs", + "coverage", "pytest-cov", "pytest", "pytest-xdist", "pyfakefs", "grpcio-status", ) session.install("-e", ".") diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index d377375036f9..72766d453ecb 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -452,6 +452,19 @@ def test_method_http_options_additional_bindings(): }] +def test_method_http_options_reserved_name_in_url(): + http_rule = http_pb2.HttpRule( + post='/v1/license/{license=lic/*}', + body='*' + ) + method = make_method('DoSomething', http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [{ + 'method': 'post', + 'uri': '/v1/license/{license_=lic/*}', + 'body': '*' + }] + + def test_method_http_options_generate_sample(): http_rule = http_pb2.HttpRule( get='/v1/{resource.id=projects/*/regions/*/id/**}/stuff', diff --git a/packages/gapic-generator/tests/unit/utils/test_uri_conv.py b/packages/gapic-generator/tests/unit/utils/test_uri_conv.py new file mode 100644 index 000000000000..5c870b430af2 --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_uri_conv.py @@ -0,0 +1,35 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock + +import pypandoc + +from gapic import utils + + +def test_convert_uri_fieldname(): + uri = "abc/*/license/{license}/{xyz.reversed=reversed/*}" + expected_uri = "abc/*/license/{license_}/{xyz.reversed_=reversed/*}" + assert utils.convert_uri_fieldnames(uri) == expected_uri + + +def test_convert_uri_fieldname_no_fields(): + uri = "abc/license" + assert utils.convert_uri_fieldnames(uri) == uri + + +def test_convert_uri_fieldname_no_reserved_names(): + uri = "abc/*/books/{book}/{xyz.chapter=page/*}" + assert utils.convert_uri_fieldnames(uri) == uri From c02083204031ecbce0949cc6750db611451481a0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 27 Oct 2021 10:21:37 -0700 Subject: [PATCH 0636/1339] chore: release 0.53.2 (#1037) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5a324ca49844..afc4930c0c86 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +### [0.53.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.1...v0.53.2) (2021-10-27) + + +### Bug Fixes + +* Adjust Field Names in URI Templates ([#1041](https://www.github.com/googleapis/gapic-generator-python/issues/1041)) ([06cd7b6](https://www.github.com/googleapis/gapic-generator-python/commit/06cd7b66f0f303b066f7f1f510332ae19aa9de8e)) +* Fix rest transport logic ([#1039](https://www.github.com/googleapis/gapic-generator-python/issues/1039)) ([50d61af](https://www.github.com/googleapis/gapic-generator-python/commit/50d61afd30b021835fe898e41b783f4d04acff09)) +* list oneofs in docstring ([#1030](https://www.github.com/googleapis/gapic-generator-python/issues/1030)) ([a0e25c8](https://www.github.com/googleapis/gapic-generator-python/commit/a0e25c8c00391b99a351e667eddc8b4fecad30d8)) + ### [0.53.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.0...v0.53.1) (2021-10-13) From 3e7585406faa12417e39ea5d230021ae1894a4a6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 27 Oct 2021 19:26:38 +0200 Subject: [PATCH 0637/1339] chore(deps): update dependency protobuf to v3.19.0 (#1038) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.18.1` -> `==3.19.0` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.0/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.0/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.0/compatibility-slim/3.18.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.0/confidence-slim/3.18.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™» **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 942f398b2ff9..96fbc33d6480 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.1.1 googleapis-common-protos==1.53.0 jinja2==3.0.2 MarkupSafe==2.0.1 -protobuf==3.18.1 +protobuf==3.19.0 pypandoc==1.6.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From fd48be03ec0d848e08ebf35224f14261ef895c0f Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Wed, 27 Oct 2021 12:54:44 -0700 Subject: [PATCH 0638/1339] fix: more fixes for rest transport (#1042) * fix: more fixes for rest transport This includes: 1) Implicit template support for grpc transcodding (accept `/v1/{project}/stuff` whichis equivalent to `/v1/{project=*}/stuff`) 2) Proper request message construction for paginated methods test 3) Depend on `google-api-core 2.2.0` for rest to accommodate the `int64` support fixes * fix a typo in test method name --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 7 +++++++ packages/gapic-generator/gapic/utils/uri_sample.py | 4 +++- packages/gapic-generator/setup.py | 2 +- .../tests/unit/schema/wrappers/test_method.py | 10 ++++++++++ 5 files changed, 22 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 7606bc71d8b8..7351e4296854 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -29,7 +29,7 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.1.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.2.0, < 3.0.0dev', {% else %} 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', {% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 53ef17652916..ca0257b08534 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1337,6 +1337,13 @@ def test_{{ method.name|snake_case }}_rest_pager(): req.side_effect = return_values sample_request = {{ method.http_options[0].sample_request }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + sample_request["{{ field.name }}"] = {{ field.mock_value }} + {% endif %} + {% endfor %} + pager = client.{{ method.name|snake_case }}(request=sample_request) {% if method.paged_result_field.map %} diff --git a/packages/gapic-generator/gapic/utils/uri_sample.py b/packages/gapic-generator/gapic/utils/uri_sample.py index 43b8865abfff..0eba82220f82 100644 --- a/packages/gapic-generator/gapic/utils/uri_sample.py +++ b/packages/gapic-generator/gapic/utils/uri_sample.py @@ -71,6 +71,8 @@ def sample_from_path_fields(paths: List[Tuple[str, str]]) -> Dict[Any, Any]: for path, template in paths: sample_value = re.sub( - r"(\*\*|\*)", lambda n: next(sample_names), template) + r"(\*\*|\*)", + lambda n: next(sample_names), template if template else '*' + ) add_field(request, path, sample_value) return request diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 27b6c1758fa5..76e590d457f8 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -44,7 +44,7 @@ include_package_data=True, install_requires=( "click >= 6.7", - "google-api-core >= 2.1.0", + "google-api-core >= 2.2.0", "googleapis-common-protos >= 1.53.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 72766d453ecb..42198e09ebcc 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -475,6 +475,16 @@ def test_method_http_options_generate_sample(): 'id': 'projects/sample1/regions/sample2/id/sample3'}} +def test_method_http_options_generate_sample_implicit_template(): + http_rule = http_pb2.HttpRule( + get='/v1/{resource.id}/stuff', + ) + method = make_method('DoSomething', http_rule=http_rule) + sample = method.http_options[0].sample_request + assert json.loads(sample) == {'resource': { + 'id': 'sample1'}} + + def test_method_query_params(): # tests only the basic case of grpc transcoding http_rule = http_pb2.HttpRule( From 7bdf3d112804ddfd3f053ac5b448cf8e9f87eae4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 27 Oct 2021 13:20:22 -0700 Subject: [PATCH 0639/1339] chore: release 0.53.3 (#1045) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index afc4930c0c86..543a94b7eafa 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.53.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.2...v0.53.3) (2021-10-27) + + +### Bug Fixes + +* more fixes for rest transport ([#1042](https://www.github.com/googleapis/gapic-generator-python/issues/1042)) ([13d5f77](https://www.github.com/googleapis/gapic-generator-python/commit/13d5f77f8b6d4ce1181b29f2335d7584783be753)) + ### [0.53.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.1...v0.53.2) (2021-10-27) From 0df24a1fccf778601b0e9c545615bffa5ca7e363 Mon Sep 17 00:00:00 2001 From: Ken Bandes Date: Thu, 28 Oct 2021 13:44:56 -0400 Subject: [PATCH 0640/1339] fix: methods returning Operation w/o operation_info are now allowed. (#1047) Formerly, a method that returned google.longrunning.Operation but did not have the google.longrunning.operation_info extension was considered an error; but there are valid cases for this, particularly in the Operation service itself. This change makes it acceptable to have a method like this. It is not treated as a long-running operation. Co-authored-by: Kenneth Bandes --- packages/gapic-generator/gapic/schema/api.py | 4 ++ .../tests/unit/schema/test_api.py | 54 ++++++++++++++++++- 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index cdfb5ca6e701..0e632ed21783 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -817,6 +817,10 @@ def _maybe_get_lro( # If the output type is google.longrunning.Operation, we use # a specialized object in its place. if meth_pb.output_type.endswith('google.longrunning.Operation'): + if not meth_pb.options.HasExtension(operations_pb2.operation_info): + # This is not a long running operation even though it returns + # an Operation. + return None op = meth_pb.options.Extensions[operations_pb2.operation_info] if not op.response_type or not op.metadata_type: raise TypeError( diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index de705d88dfe5..2c139ce7f162 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -944,19 +944,69 @@ def test_lro(): assert len(lro_proto.messages) == 1 -def test_lro_missing_annotation(): +def test_lro_operation_no_annotation(): + # A method that returns google.longrunning.Operation, + # but has no operation_info option, is treated as not lro. + # Set up a prior proto that mimics google/protobuf/empty.proto lro_proto = api.Proto.build(make_file_pb2( name='operations.proto', package='google.longrunning', messages=(make_message_pb2(name='Operation'),), ), file_to_generate=False, naming=make_naming()) - # Set up a method with an LRO but no annotation. + # Set up a method that returns an Operation, but has no annotation. + method_pb2 = descriptor_pb2.MethodDescriptorProto( + name='GetOperation', + input_type='google.example.v3.GetOperationRequest', + output_type='google.longrunning.Operation', + ) + + # Set up the service with an RPC. + service_pb = descriptor_pb2.ServiceDescriptorProto( + name='OperationService', + method=(method_pb2,), + ) + + # Set up the messages, including the annotated ones. + messages = ( + make_message_pb2(name='GetOperationRequest', fields=()), + ) + + # Finally, set up the file that encompasses these. + fdp = make_file_pb2( + package='google.example.v3', + messages=messages, + services=(service_pb,), + ) + + # Make the proto object. + proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ + 'google/longrunning/operations.proto': lro_proto, + }, naming=make_naming()) + + service = proto.services['google.example.v3.OperationService'] + method = service.methods['GetOperation'] + assert method.lro is None + + +def test_lro_bad_annotation(): + # Set up a prior proto that mimics google/protobuf/empty.proto + lro_proto = api.Proto.build(make_file_pb2( + name='operations.proto', package='google.longrunning', + messages=(make_message_pb2(name='Operation'),), + ), file_to_generate=False, naming=make_naming()) + + # Set up a method with an LRO and incomplete annotation. method_pb2 = descriptor_pb2.MethodDescriptorProto( name='AsyncDoThing', input_type='google.example.v3.AsyncDoThingRequest', output_type='google.longrunning.Operation', ) + method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( + operations_pb2.OperationInfo( + response_type='google.example.v3.AsyncDoThingResponse', + ), + ) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( From d6c4dbaff61fb5c6fbc11fbe5e6bdbff30a466bc Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 29 Oct 2021 10:45:58 -0700 Subject: [PATCH 0641/1339] fix: fix tests generation logic (#1049) * fix: fix tests generation logic This includes: 1) Fix test logic for grpc+rest case, when clients with both transports need to be initialized in parametrized tests 2) Fix 100% coverage problem for rest clients, when the http error (>= 400 error code) case logic was not covered. * fix integration testrs --- .../%name_%version/%sub/test_%service.py.j2 | 76 ++++++++++++++----- .../unit/gapic/asset_v1/test_asset_service.py | 18 ++--- .../credentials_v1/test_iam_credentials.py | 18 ++--- .../logging_v2/test_config_service_v2.py | 18 ++--- .../logging_v2/test_logging_service_v2.py | 18 ++--- .../logging_v2/test_metrics_service_v2.py | 18 ++--- .../unit/gapic/redis_v1/test_cloud_redis.py | 18 ++--- 7 files changed, 113 insertions(+), 71 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ca0257b08534..ad2e82861c49 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -13,6 +13,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule {% if 'rest' in opts.transport %} from requests import Response +from requests import Request from requests.sessions import Session {% endif %} @@ -104,7 +105,8 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(client_c {% if 'grpc' in opts.transport %} (transports.{{ service.grpc_transport_name }}, "grpc"), (transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), - {% elif 'rest' in opts.transport %} + {% endif %} + {% if 'rest' in opts.transport %} (transports.{{ service.rest_transport_name }}, "rest"), {% endif %} ]) @@ -160,7 +162,8 @@ def test_{{ service.client_name|snake_case }}_get_transport_class(): {% if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), - {% elif 'rest' in opts.transport %} + {% endif %} + {% if 'rest' in opts.transport %} ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), {% endif %} ]) @@ -186,7 +189,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -203,7 +206,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,7 +223,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -247,7 +250,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,7 +268,8 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", "true"), ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", "false"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", "false"), - {% elif 'rest' in opts.transport %} + {% endif %} + {% if 'rest' in opts.transport %} ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "true"), ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "false"), {% endif %} @@ -285,7 +289,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -319,7 +323,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -336,7 +340,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp with mock.patch.object(transport_class, '__init__') as patched: with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -353,7 +357,8 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp {% if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), - {% elif 'rest' in opts.transport %} + {% endif %} + {% if 'rest' in opts.transport %} ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), {% endif %} ]) @@ -364,7 +369,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -380,7 +385,8 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class {% if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), - {% elif 'rest' in opts.transport %} + {% endif %} + {% if 'rest' in opts.transport %} ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), {% endif %} ]) @@ -391,7 +397,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -1182,14 +1188,48 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type {% endif %} +def test_{{ method.name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request}} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.mock_value }} + {% endif %} + {% endfor %} + request = request_type(request_init) + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + {% if method.client_streaming %} + client.{{ method.name|snake_case }}(iter(requests)) + {% else %} + client.{{ method.name|snake_case }}(request) + {% endif %} + + def test_{{ method.name|snake_case }}_rest_from_dict(): test_{{ method.name|snake_case }}_rest(request_type=dict) {% if method.flattened_fields %} -def test_{{ method.name|snake_case }}_rest_flattened(): +def test_{{ method.name|snake_case }}_rest_flattened(transport: str = 'rest'): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1242,9 +1282,10 @@ def test_{{ method.name|snake_case }}_rest_flattened(): {# TODO(kbandes) - reverse-transcode request args to check all request fields #} -def test_{{ method.name|snake_case }}_rest_flattened_error(): +def test_{{ method.name|snake_case }}_rest_flattened_error(transport: str = 'rest'): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1460,7 +1501,8 @@ def test_transport_get_channel(): {% if 'grpc' in opts.transport %} transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}, - {% elif 'rest' in opts.transport %} + {% endif %} + {% if 'rest' in opts.transport %} transports.{{ service.rest_transport_name }}, {% endif %} ]) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 24d9abc43b41..98379ffdcee5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -161,7 +161,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -178,7 +178,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -195,7 +195,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -222,7 +222,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -253,7 +253,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -287,7 +287,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -304,7 +304,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans with mock.patch.object(transport_class, '__init__') as patched: with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -328,7 +328,7 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -351,7 +351,7 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 3bd38a836052..1ca68f9187cf 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -153,7 +153,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -170,7 +170,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -187,7 +187,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,7 +214,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,7 +245,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -279,7 +279,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -296,7 +296,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra with mock.patch.object(transport_class, '__init__') as patched: with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -320,7 +320,7 @@ def test_iam_credentials_client_client_options_scopes(client_class, transport_cl ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -343,7 +343,7 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index aef12045abea..60e262121907 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -154,7 +154,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -171,7 +171,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -188,7 +188,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -215,7 +215,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -246,7 +246,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -280,7 +280,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -297,7 +297,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t with mock.patch.object(transport_class, '__init__') as patched: with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -321,7 +321,7 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -344,7 +344,7 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index bb826104bbc2..ddf1b0c45db3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -160,7 +160,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -177,7 +177,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -194,7 +194,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,7 +221,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -252,7 +252,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -286,7 +286,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -303,7 +303,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, with mock.patch.object(transport_class, '__init__') as patched: with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -327,7 +327,7 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -350,7 +350,7 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 01c23750e8a1..fc8a79736621 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -158,7 +158,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -175,7 +175,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -192,7 +192,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,7 +219,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -250,7 +250,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -284,7 +284,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -301,7 +301,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, with mock.patch.object(transport_class, '__init__') as patched: with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -325,7 +325,7 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -348,7 +348,7 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index edd01300675b..2d55a27a4ae0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -158,7 +158,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -175,7 +175,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -192,7 +192,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,7 +219,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -250,7 +250,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -284,7 +284,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -301,7 +301,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo with mock.patch.object(transport_class, '__init__') as patched: with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -325,7 +325,7 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -348,7 +348,7 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", From 0957a37707c4880be5bb0ddcecea49d5bb3b2b34 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 29 Oct 2021 15:03:26 -0400 Subject: [PATCH 0642/1339] fix: add 'dict' type annotation to 'request' for async_client (#1051) --- .../%sub/services/%service/async_client.py.j2 | 4 +- .../services/asset_service/async_client.py | 48 +++++----- .../services/iam_credentials/async_client.py | 16 ++-- .../config_service_v2/async_client.py | 92 +++++++++---------- .../logging_service_v2/async_client.py | 20 ++-- .../metrics_service_v2/async_client.py | 20 ++-- .../services/cloud_redis/async_client.py | 36 ++++---- 7 files changed, 118 insertions(+), 118 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 1f60736c11fb..bd955dabcb00 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -149,7 +149,7 @@ class {{ service.async_client_name }}: {% for method in service.methods.values() %} {%+ if not method.server_streaming %}async {% endif %}def {{ method.name|snake_case }}(self, {% if not method.client_streaming %} - request: {{ method.input.ident }} = None, + request: Union[{{ method.input.ident }}, dict] = None, *, {% for field in method.flattened_fields.values() %} {{ field.name }}: {{ field.ident }} = None, @@ -170,7 +170,7 @@ class {{ service.async_client_name }}: Args: {% if not method.client_streaming %} - request (:class:`{{ method.input.ident.sphinx }}`): + request (Union[{{ method.input.ident.sphinx }}, dict]): The request object.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 4d7ae3b74a57..7048813d2e9d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -153,7 +153,7 @@ def __init__(self, *, ) async def export_assets(self, - request: asset_service.ExportAssetsRequest = None, + request: Union[asset_service.ExportAssetsRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -174,7 +174,7 @@ async def export_assets(self, the export operation usually finishes within 5 minutes. Args: - request (:class:`google.cloud.asset_v1.types.ExportAssetsRequest`): + request (Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]): The request object. Export asset request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -232,7 +232,7 @@ async def export_assets(self, return response async def list_assets(self, - request: asset_service.ListAssetsRequest = None, + request: Union[asset_service.ListAssetsRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -243,7 +243,7 @@ async def list_assets(self, paged results in response. Args: - request (:class:`google.cloud.asset_v1.types.ListAssetsRequest`): + request (Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]): The request object. ListAssets request. parent (:class:`str`): Required. Name of the organization or project the assets @@ -321,7 +321,7 @@ async def list_assets(self, return response async def batch_get_assets_history(self, - request: asset_service.BatchGetAssetsHistoryRequest = None, + request: Union[asset_service.BatchGetAssetsHistoryRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -336,7 +336,7 @@ async def batch_get_assets_history(self, INVALID_ARGUMENT error. Args: - request (:class:`google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest`): + request (Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]): The request object. Batch get assets history request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -386,7 +386,7 @@ async def batch_get_assets_history(self, return response async def create_feed(self, - request: asset_service.CreateFeedRequest = None, + request: Union[asset_service.CreateFeedRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -398,7 +398,7 @@ async def create_feed(self, updates. Args: - request (:class:`google.cloud.asset_v1.types.CreateFeedRequest`): + request (Union[google.cloud.asset_v1.types.CreateFeedRequest, dict]): The request object. Create asset feed request. parent (:class:`str`): Required. The name of the @@ -474,7 +474,7 @@ async def create_feed(self, return response async def get_feed(self, - request: asset_service.GetFeedRequest = None, + request: Union[asset_service.GetFeedRequest, dict] = None, *, name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -484,7 +484,7 @@ async def get_feed(self, r"""Gets details about an asset feed. Args: - request (:class:`google.cloud.asset_v1.types.GetFeedRequest`): + request (Union[google.cloud.asset_v1.types.GetFeedRequest, dict]): The request object. Get asset feed request. name (:class:`str`): Required. The name of the Feed and it must be in the @@ -562,7 +562,7 @@ async def get_feed(self, return response async def list_feeds(self, - request: asset_service.ListFeedsRequest = None, + request: Union[asset_service.ListFeedsRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -573,7 +573,7 @@ async def list_feeds(self, project/folder/organization. Args: - request (:class:`google.cloud.asset_v1.types.ListFeedsRequest`): + request (Union[google.cloud.asset_v1.types.ListFeedsRequest, dict]): The request object. List asset feeds request. parent (:class:`str`): Required. The parent @@ -646,7 +646,7 @@ async def list_feeds(self, return response async def update_feed(self, - request: asset_service.UpdateFeedRequest = None, + request: Union[asset_service.UpdateFeedRequest, dict] = None, *, feed: asset_service.Feed = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -656,7 +656,7 @@ async def update_feed(self, r"""Updates an asset feed configuration. Args: - request (:class:`google.cloud.asset_v1.types.UpdateFeedRequest`): + request (Union[google.cloud.asset_v1.types.UpdateFeedRequest, dict]): The request object. Update asset feed request. feed (:class:`google.cloud.asset_v1.types.Feed`): Required. The new values of feed details. It must match @@ -728,7 +728,7 @@ async def update_feed(self, return response async def delete_feed(self, - request: asset_service.DeleteFeedRequest = None, + request: Union[asset_service.DeleteFeedRequest, dict] = None, *, name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -738,7 +738,7 @@ async def delete_feed(self, r"""Deletes an asset feed. Args: - request (:class:`google.cloud.asset_v1.types.DeleteFeedRequest`): + request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): The request object. name (:class:`str`): Required. The name of the feed and it must be in the @@ -802,7 +802,7 @@ async def delete_feed(self, ) async def search_all_resources(self, - request: asset_service.SearchAllResourcesRequest = None, + request: Union[asset_service.SearchAllResourcesRequest, dict] = None, *, scope: str = None, query: str = None, @@ -817,7 +817,7 @@ async def search_all_resources(self, desired scope, otherwise the request will be rejected. Args: - request (:class:`google.cloud.asset_v1.types.SearchAllResourcesRequest`): + request (Union[google.cloud.asset_v1.types.SearchAllResourcesRequest, dict]): The request object. Search all resources request. scope (:class:`str`): Required. A scope can be a project, a folder, or an @@ -989,7 +989,7 @@ async def search_all_resources(self, return response async def search_all_iam_policies(self, - request: asset_service.SearchAllIamPoliciesRequest = None, + request: Union[asset_service.SearchAllIamPoliciesRequest, dict] = None, *, scope: str = None, query: str = None, @@ -1003,7 +1003,7 @@ async def search_all_iam_policies(self, desired scope, otherwise the request will be rejected. Args: - request (:class:`google.cloud.asset_v1.types.SearchAllIamPoliciesRequest`): + request (Union[google.cloud.asset_v1.types.SearchAllIamPoliciesRequest, dict]): The request object. Search all IAM policies request. scope (:class:`str`): Required. A scope can be a project, a folder, or an @@ -1155,7 +1155,7 @@ async def search_all_iam_policies(self, return response async def analyze_iam_policy(self, - request: asset_service.AnalyzeIamPolicyRequest = None, + request: Union[asset_service.AnalyzeIamPolicyRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1165,7 +1165,7 @@ async def analyze_iam_policy(self, what accesses on which resources. Args: - request (:class:`google.cloud.asset_v1.types.AnalyzeIamPolicyRequest`): + request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]): The request object. A request message for [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1217,7 +1217,7 @@ async def analyze_iam_policy(self, return response async def analyze_iam_policy_longrunning(self, - request: asset_service.AnalyzeIamPolicyLongrunningRequest = None, + request: Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1237,7 +1237,7 @@ async def analyze_iam_policy_longrunning(self, to help callers to map responses to requests. Args: - request (:class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest`): + request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]): The request object. A request message for [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index bdf30ffa4921..ff6175ab8e8b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -158,7 +158,7 @@ def __init__(self, *, ) async def generate_access_token(self, - request: common.GenerateAccessTokenRequest = None, + request: Union[common.GenerateAccessTokenRequest, dict] = None, *, name: str = None, delegates: Sequence[str] = None, @@ -172,7 +172,7 @@ async def generate_access_token(self, account. Args: - request (:class:`google.iam.credentials_v1.types.GenerateAccessTokenRequest`): + request (Union[google.iam.credentials_v1.types.GenerateAccessTokenRequest, dict]): The request object. name (:class:`str`): Required. The resource name of the service account for @@ -291,7 +291,7 @@ async def generate_access_token(self, return response async def generate_id_token(self, - request: common.GenerateIdTokenRequest = None, + request: Union[common.GenerateIdTokenRequest, dict] = None, *, name: str = None, delegates: Sequence[str] = None, @@ -305,7 +305,7 @@ async def generate_id_token(self, account. Args: - request (:class:`google.iam.credentials_v1.types.GenerateIdTokenRequest`): + request (Union[google.iam.credentials_v1.types.GenerateIdTokenRequest, dict]): The request object. name (:class:`str`): Required. The resource name of the service account for @@ -418,7 +418,7 @@ async def generate_id_token(self, return response async def sign_blob(self, - request: common.SignBlobRequest = None, + request: Union[common.SignBlobRequest, dict] = None, *, name: str = None, delegates: Sequence[str] = None, @@ -431,7 +431,7 @@ async def sign_blob(self, private key. Args: - request (:class:`google.iam.credentials_v1.types.SignBlobRequest`): + request (Union[google.iam.credentials_v1.types.SignBlobRequest, dict]): The request object. name (:class:`str`): Required. The resource name of the service account for @@ -531,7 +531,7 @@ async def sign_blob(self, return response async def sign_jwt(self, - request: common.SignJwtRequest = None, + request: Union[common.SignJwtRequest, dict] = None, *, name: str = None, delegates: Sequence[str] = None, @@ -544,7 +544,7 @@ async def sign_jwt(self, private key. Args: - request (:class:`google.iam.credentials_v1.types.SignJwtRequest`): + request (Union[google.iam.credentials_v1.types.SignJwtRequest, dict]): The request object. name (:class:`str`): Required. The resource name of the service account for diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 9b0c3e83acbc..61556342cb90 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -157,7 +157,7 @@ def __init__(self, *, ) async def list_buckets(self, - request: logging_config.ListBucketsRequest = None, + request: Union[logging_config.ListBucketsRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -167,7 +167,7 @@ async def list_buckets(self, r"""Lists buckets. Args: - request (:class:`google.cloud.logging_v2.types.ListBucketsRequest`): + request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be @@ -253,7 +253,7 @@ async def list_buckets(self, return response async def get_bucket(self, - request: logging_config.GetBucketRequest = None, + request: Union[logging_config.GetBucketRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -262,7 +262,7 @@ async def get_bucket(self, r"""Gets a bucket. Args: - request (:class:`google.cloud.logging_v2.types.GetBucketRequest`): + request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -305,7 +305,7 @@ async def get_bucket(self, return response async def create_bucket(self, - request: logging_config.CreateBucketRequest = None, + request: Union[logging_config.CreateBucketRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -316,7 +316,7 @@ async def create_bucket(self, cannot be changed. Args: - request (:class:`google.cloud.logging_v2.types.CreateBucketRequest`): + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): The request object. The parameters to `CreateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -359,7 +359,7 @@ async def create_bucket(self, return response async def update_bucket(self, - request: logging_config.UpdateBucketRequest = None, + request: Union[logging_config.UpdateBucketRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -378,7 +378,7 @@ async def update_bucket(self, A buckets region may not be modified after it is created. Args: - request (:class:`google.cloud.logging_v2.types.UpdateBucketRequest`): + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -421,7 +421,7 @@ async def update_bucket(self, return response async def delete_bucket(self, - request: logging_config.DeleteBucketRequest = None, + request: Union[logging_config.DeleteBucketRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -432,7 +432,7 @@ async def delete_bucket(self, the bucket will be permanently deleted. Args: - request (:class:`google.cloud.logging_v2.types.DeleteBucketRequest`): + request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): The request object. The parameters to `DeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -468,7 +468,7 @@ async def delete_bucket(self, ) async def undelete_bucket(self, - request: logging_config.UndeleteBucketRequest = None, + request: Union[logging_config.UndeleteBucketRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -478,7 +478,7 @@ async def undelete_bucket(self, may be undeleted within the grace period of 7 days. Args: - request (:class:`google.cloud.logging_v2.types.UndeleteBucketRequest`): + request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): The request object. The parameters to `UndeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -514,7 +514,7 @@ async def undelete_bucket(self, ) async def list_views(self, - request: logging_config.ListViewsRequest = None, + request: Union[logging_config.ListViewsRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -524,7 +524,7 @@ async def list_views(self, r"""Lists views on a bucket. Args: - request (:class:`google.cloud.logging_v2.types.ListViewsRequest`): + request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): The request object. The parameters to `ListViews`. parent (:class:`str`): Required. The bucket whose views are to be listed: @@ -602,7 +602,7 @@ async def list_views(self, return response async def get_view(self, - request: logging_config.GetViewRequest = None, + request: Union[logging_config.GetViewRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -611,7 +611,7 @@ async def get_view(self, r"""Gets a view. Args: - request (:class:`google.cloud.logging_v2.types.GetViewRequest`): + request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): The request object. The parameters to `GetView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -656,7 +656,7 @@ async def get_view(self, return response async def create_view(self, - request: logging_config.CreateViewRequest = None, + request: Union[logging_config.CreateViewRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -666,7 +666,7 @@ async def create_view(self, contain a maximum of 50 views. Args: - request (:class:`google.cloud.logging_v2.types.CreateViewRequest`): + request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): The request object. The parameters to `CreateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -711,7 +711,7 @@ async def create_view(self, return response async def update_view(self, - request: logging_config.UpdateViewRequest = None, + request: Union[logging_config.UpdateViewRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -721,7 +721,7 @@ async def update_view(self, existing view with values from the new view: ``filter``. Args: - request (:class:`google.cloud.logging_v2.types.UpdateViewRequest`): + request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): The request object. The parameters to `UpdateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -766,7 +766,7 @@ async def update_view(self, return response async def delete_view(self, - request: logging_config.DeleteViewRequest = None, + request: Union[logging_config.DeleteViewRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -775,7 +775,7 @@ async def delete_view(self, r"""Deletes a view from a bucket. Args: - request (:class:`google.cloud.logging_v2.types.DeleteViewRequest`): + request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): The request object. The parameters to `DeleteView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -811,7 +811,7 @@ async def delete_view(self, ) async def list_sinks(self, - request: logging_config.ListSinksRequest = None, + request: Union[logging_config.ListSinksRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -821,7 +821,7 @@ async def list_sinks(self, r"""Lists sinks. Args: - request (:class:`google.cloud.logging_v2.types.ListSinksRequest`): + request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. parent (:class:`str`): Required. The parent resource whose sinks are to be @@ -911,7 +911,7 @@ async def list_sinks(self, return response async def get_sink(self, - request: logging_config.GetSinkRequest = None, + request: Union[logging_config.GetSinkRequest, dict] = None, *, sink_name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -921,7 +921,7 @@ async def get_sink(self, r"""Gets a sink. Args: - request (:class:`google.cloud.logging_v2.types.GetSinkRequest`): + request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. sink_name (:class:`str`): Required. The resource name of the sink: @@ -1008,7 +1008,7 @@ async def get_sink(self, return response async def create_sink(self, - request: logging_config.CreateSinkRequest = None, + request: Union[logging_config.CreateSinkRequest, dict] = None, *, parent: str = None, sink: logging_config.LogSink = None, @@ -1023,7 +1023,7 @@ async def create_sink(self, entries only from the resource owning the sink. Args: - request (:class:`google.cloud.logging_v2.types.CreateSinkRequest`): + request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. parent (:class:`str`): Required. The resource in which to create the sink: @@ -1112,7 +1112,7 @@ async def create_sink(self, return response async def update_sink(self, - request: logging_config.UpdateSinkRequest = None, + request: Union[logging_config.UpdateSinkRequest, dict] = None, *, sink_name: str = None, sink: logging_config.LogSink = None, @@ -1129,7 +1129,7 @@ async def update_sink(self, the ``unique_writer_identity`` field. Args: - request (:class:`google.cloud.logging_v2.types.UpdateSinkRequest`): + request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. sink_name (:class:`str`): Required. The full resource name of the sink to update, @@ -1248,7 +1248,7 @@ async def update_sink(self, return response async def delete_sink(self, - request: logging_config.DeleteSinkRequest = None, + request: Union[logging_config.DeleteSinkRequest, dict] = None, *, sink_name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -1259,7 +1259,7 @@ async def delete_sink(self, then that service account is also deleted. Args: - request (:class:`google.cloud.logging_v2.types.DeleteSinkRequest`): + request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. sink_name (:class:`str`): Required. The full resource name of the sink to delete, @@ -1331,7 +1331,7 @@ async def delete_sink(self, ) async def list_exclusions(self, - request: logging_config.ListExclusionsRequest = None, + request: Union[logging_config.ListExclusionsRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -1341,7 +1341,7 @@ async def list_exclusions(self, r"""Lists all the exclusions in a parent resource. Args: - request (:class:`google.cloud.logging_v2.types.ListExclusionsRequest`): + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): The request object. The parameters to `ListExclusions`. parent (:class:`str`): Required. The parent resource whose exclusions are to be @@ -1431,7 +1431,7 @@ async def list_exclusions(self, return response async def get_exclusion(self, - request: logging_config.GetExclusionRequest = None, + request: Union[logging_config.GetExclusionRequest, dict] = None, *, name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -1441,7 +1441,7 @@ async def get_exclusion(self, r"""Gets the description of an exclusion. Args: - request (:class:`google.cloud.logging_v2.types.GetExclusionRequest`): + request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): The request object. The parameters to `GetExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion: @@ -1531,7 +1531,7 @@ async def get_exclusion(self, return response async def create_exclusion(self, - request: logging_config.CreateExclusionRequest = None, + request: Union[logging_config.CreateExclusionRequest, dict] = None, *, parent: str = None, exclusion: logging_config.LogExclusion = None, @@ -1545,7 +1545,7 @@ async def create_exclusion(self, resource. Args: - request (:class:`google.cloud.logging_v2.types.CreateExclusionRequest`): + request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): The request object. The parameters to `CreateExclusion`. parent (:class:`str`): Required. The parent resource in which to create the @@ -1638,7 +1638,7 @@ async def create_exclusion(self, return response async def update_exclusion(self, - request: logging_config.UpdateExclusionRequest = None, + request: Union[logging_config.UpdateExclusionRequest, dict] = None, *, name: str = None, exclusion: logging_config.LogExclusion = None, @@ -1651,7 +1651,7 @@ async def update_exclusion(self, exclusion. Args: - request (:class:`google.cloud.logging_v2.types.UpdateExclusionRequest`): + request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): The request object. The parameters to `UpdateExclusion`. name (:class:`str`): Required. The resource name of the exclusion to update: @@ -1759,7 +1759,7 @@ async def update_exclusion(self, return response async def delete_exclusion(self, - request: logging_config.DeleteExclusionRequest = None, + request: Union[logging_config.DeleteExclusionRequest, dict] = None, *, name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -1769,7 +1769,7 @@ async def delete_exclusion(self, r"""Deletes an exclusion. Args: - request (:class:`google.cloud.logging_v2.types.DeleteExclusionRequest`): + request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): The request object. The parameters to `DeleteExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion to @@ -1842,7 +1842,7 @@ async def delete_exclusion(self, ) async def get_cmek_settings(self, - request: logging_config.GetCmekSettingsRequest = None, + request: Union[logging_config.GetCmekSettingsRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1859,7 +1859,7 @@ async def get_cmek_settings(self, for more information. Args: - request (:class:`google.cloud.logging_v2.types.GetCmekSettingsRequest`): + request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs @@ -1918,7 +1918,7 @@ async def get_cmek_settings(self, return response async def update_cmek_settings(self, - request: logging_config.UpdateCmekSettingsRequest = None, + request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1941,7 +1941,7 @@ async def update_cmek_settings(self, for more information. Args: - request (:class:`google.cloud.logging_v2.types.UpdateCmekSettingsRequest`): + request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index f9e7efddea07..ec3fa2fe9271 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -149,7 +149,7 @@ def __init__(self, *, ) async def delete_log(self, - request: logging.DeleteLogRequest = None, + request: Union[logging.DeleteLogRequest, dict] = None, *, log_name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -163,7 +163,7 @@ async def delete_log(self, with a timestamp before the operation will be deleted. Args: - request (:class:`google.cloud.logging_v2.types.DeleteLogRequest`): + request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): The request object. The parameters to DeleteLog. log_name (:class:`str`): Required. The resource name of the log to delete: @@ -238,7 +238,7 @@ async def delete_log(self, ) async def write_log_entries(self, - request: logging.WriteLogEntriesRequest = None, + request: Union[logging.WriteLogEntriesRequest, dict] = None, *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, @@ -257,7 +257,7 @@ async def write_log_entries(self, organizations, billing accounts or folders) Args: - request (:class:`google.cloud.logging_v2.types.WriteLogEntriesRequest`): + request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. log_name (:class:`str`): Optional. A default log resource name that is assigned @@ -407,7 +407,7 @@ async def write_log_entries(self, return response async def list_log_entries(self, - request: logging.ListLogEntriesRequest = None, + request: Union[logging.ListLogEntriesRequest, dict] = None, *, resource_names: Sequence[str] = None, filter: str = None, @@ -422,7 +422,7 @@ async def list_log_entries(self, Logs `__. Args: - request (:class:`google.cloud.logging_v2.types.ListLogEntriesRequest`): + request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. resource_names (:class:`Sequence[str]`): Required. Names of one or more parent resources from @@ -544,7 +544,7 @@ async def list_log_entries(self, return response async def list_monitored_resource_descriptors(self, - request: logging.ListMonitoredResourceDescriptorsRequest = None, + request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -554,7 +554,7 @@ async def list_monitored_resource_descriptors(self, used by Logging. Args: - request (:class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest`): + request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to ListMonitoredResourceDescriptors retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -612,7 +612,7 @@ async def list_monitored_resource_descriptors(self, return response async def list_logs(self, - request: logging.ListLogsRequest = None, + request: Union[logging.ListLogsRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -624,7 +624,7 @@ async def list_logs(self, listed. Args: - request (:class:`google.cloud.logging_v2.types.ListLogsRequest`): + request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (:class:`str`): Required. The resource name that owns the logs: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 44218ec65135..088d179d4e45 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -150,7 +150,7 @@ def __init__(self, *, ) async def list_log_metrics(self, - request: logging_metrics.ListLogMetricsRequest = None, + request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -160,7 +160,7 @@ async def list_log_metrics(self, r"""Lists logs-based metrics. Args: - request (:class:`google.cloud.logging_v2.types.ListLogMetricsRequest`): + request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. parent (:class:`str`): Required. The name of the project containing the @@ -247,7 +247,7 @@ async def list_log_metrics(self, return response async def get_log_metric(self, - request: logging_metrics.GetLogMetricRequest = None, + request: Union[logging_metrics.GetLogMetricRequest, dict] = None, *, metric_name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -257,7 +257,7 @@ async def get_log_metric(self, r"""Gets a logs-based metric. Args: - request (:class:`google.cloud.logging_v2.types.GetLogMetricRequest`): + request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. metric_name (:class:`str`): Required. The resource name of the desired metric: @@ -341,7 +341,7 @@ async def get_log_metric(self, return response async def create_log_metric(self, - request: logging_metrics.CreateLogMetricRequest = None, + request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, *, parent: str = None, metric: logging_metrics.LogMetric = None, @@ -352,7 +352,7 @@ async def create_log_metric(self, r"""Creates a logs-based metric. Args: - request (:class:`google.cloud.logging_v2.types.CreateLogMetricRequest`): + request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. parent (:class:`str`): Required. The resource name of the project in which to @@ -441,7 +441,7 @@ async def create_log_metric(self, return response async def update_log_metric(self, - request: logging_metrics.UpdateLogMetricRequest = None, + request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, *, metric_name: str = None, metric: logging_metrics.LogMetric = None, @@ -452,7 +452,7 @@ async def update_log_metric(self, r"""Creates or updates a logs-based metric. Args: - request (:class:`google.cloud.logging_v2.types.UpdateLogMetricRequest`): + request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to update: @@ -548,7 +548,7 @@ async def update_log_metric(self, return response async def delete_log_metric(self, - request: logging_metrics.DeleteLogMetricRequest = None, + request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, *, metric_name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -558,7 +558,7 @@ async def delete_log_metric(self, r"""Deletes a logs-based metric. Args: - request (:class:`google.cloud.logging_v2.types.DeleteLogMetricRequest`): + request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to delete: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 612026a68860..205b6a01f362 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -173,7 +173,7 @@ def __init__(self, *, ) async def list_instances(self, - request: cloud_redis.ListInstancesRequest = None, + request: Union[cloud_redis.ListInstancesRequest, dict] = None, *, parent: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -192,7 +192,7 @@ async def list_instances(self, are aggregated. Args: - request (:class:`google.cloud.redis_v1.types.ListInstancesRequest`): + request (Union[google.cloud.redis_v1.types.ListInstancesRequest, dict]): The request object. Request for [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. parent (:class:`str`): @@ -271,7 +271,7 @@ async def list_instances(self, return response async def get_instance(self, - request: cloud_redis.GetInstanceRequest = None, + request: Union[cloud_redis.GetInstanceRequest, dict] = None, *, name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -281,7 +281,7 @@ async def get_instance(self, r"""Gets the details of a specific Redis instance. Args: - request (:class:`google.cloud.redis_v1.types.GetInstanceRequest`): + request (Union[google.cloud.redis_v1.types.GetInstanceRequest, dict]): The request object. Request for [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. name (:class:`str`): @@ -345,7 +345,7 @@ async def get_instance(self, return response async def create_instance(self, - request: cloud_redis.CreateInstanceRequest = None, + request: Union[cloud_redis.CreateInstanceRequest, dict] = None, *, parent: str = None, instance_id: str = None, @@ -370,7 +370,7 @@ async def create_instance(self, hours, so there is no need to call DeleteOperation. Args: - request (:class:`google.cloud.redis_v1.types.CreateInstanceRequest`): + request (Union[google.cloud.redis_v1.types.CreateInstanceRequest, dict]): The request object. Request for [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. parent (:class:`str`): @@ -471,7 +471,7 @@ async def create_instance(self, return response async def update_instance(self, - request: cloud_redis.UpdateInstanceRequest = None, + request: Union[cloud_redis.UpdateInstanceRequest, dict] = None, *, update_mask: field_mask_pb2.FieldMask = None, instance: cloud_redis.Instance = None, @@ -487,7 +487,7 @@ async def update_instance(self, there is no need to call DeleteOperation. Args: - request (:class:`google.cloud.redis_v1.types.UpdateInstanceRequest`): + request (Union[google.cloud.redis_v1.types.UpdateInstanceRequest, dict]): The request object. Request for [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -579,7 +579,7 @@ async def update_instance(self, return response async def upgrade_instance(self, - request: cloud_redis.UpgradeInstanceRequest = None, + request: Union[cloud_redis.UpgradeInstanceRequest, dict] = None, *, name: str = None, redis_version: str = None, @@ -591,7 +591,7 @@ async def upgrade_instance(self, specified in the request. Args: - request (:class:`google.cloud.redis_v1.types.UpgradeInstanceRequest`): + request (Union[google.cloud.redis_v1.types.UpgradeInstanceRequest, dict]): The request object. Request for [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. name (:class:`str`): @@ -677,7 +677,7 @@ async def upgrade_instance(self, return response async def import_instance(self, - request: cloud_redis.ImportInstanceRequest = None, + request: Union[cloud_redis.ImportInstanceRequest, dict] = None, *, name: str = None, input_config: cloud_redis.InputConfig = None, @@ -696,7 +696,7 @@ async def import_instance(self, few hours, so there is no need to call DeleteOperation. Args: - request (:class:`google.cloud.redis_v1.types.ImportInstanceRequest`): + request (Union[google.cloud.redis_v1.types.ImportInstanceRequest, dict]): The request object. Request for [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. name (:class:`str`): @@ -782,7 +782,7 @@ async def import_instance(self, return response async def export_instance(self, - request: cloud_redis.ExportInstanceRequest = None, + request: Union[cloud_redis.ExportInstanceRequest, dict] = None, *, name: str = None, output_config: cloud_redis.OutputConfig = None, @@ -797,7 +797,7 @@ async def export_instance(self, few hours, so there is no need to call DeleteOperation. Args: - request (:class:`google.cloud.redis_v1.types.ExportInstanceRequest`): + request (Union[google.cloud.redis_v1.types.ExportInstanceRequest, dict]): The request object. Request for [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. name (:class:`str`): @@ -883,7 +883,7 @@ async def export_instance(self, return response async def failover_instance(self, - request: cloud_redis.FailoverInstanceRequest = None, + request: Union[cloud_redis.FailoverInstanceRequest, dict] = None, *, name: str = None, data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None, @@ -896,7 +896,7 @@ async def failover_instance(self, Memorystore for Redis instance. Args: - request (:class:`google.cloud.redis_v1.types.FailoverInstanceRequest`): + request (Union[google.cloud.redis_v1.types.FailoverInstanceRequest, dict]): The request object. Request for [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. name (:class:`str`): @@ -983,7 +983,7 @@ async def failover_instance(self, return response async def delete_instance(self, - request: cloud_redis.DeleteInstanceRequest = None, + request: Union[cloud_redis.DeleteInstanceRequest, dict] = None, *, name: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -994,7 +994,7 @@ async def delete_instance(self, serving and data is deleted. Args: - request (:class:`google.cloud.redis_v1.types.DeleteInstanceRequest`): + request (Union[google.cloud.redis_v1.types.DeleteInstanceRequest, dict]): The request object. Request for [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. name (:class:`str`): From 21ec1ac60c5411421022af48e106241a6729e8f6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 29 Oct 2021 14:16:59 -0600 Subject: [PATCH 0643/1339] chore: release 0.53.4 (#1048) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 543a94b7eafa..55ba0b18f95a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +### [0.53.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.3...v0.53.4) (2021-10-29) + + +### Bug Fixes + +* add 'dict' type annotation to 'request' for async_client ([#1051](https://www.github.com/googleapis/gapic-generator-python/issues/1051)) ([08cc2c4](https://www.github.com/googleapis/gapic-generator-python/commit/08cc2c4c85297759892782e307bcaa63dff41212)) +* fix tests generation logic ([#1049](https://www.github.com/googleapis/gapic-generator-python/issues/1049)) ([8f213ad](https://www.github.com/googleapis/gapic-generator-python/commit/8f213add4cb02366bb370ef46a686c6f0c37a575)) +* methods returning Operation w/o operation_info are now allowed. ([#1047](https://www.github.com/googleapis/gapic-generator-python/issues/1047)) ([6b640af](https://www.github.com/googleapis/gapic-generator-python/commit/6b640afbd93ea8c861b902211dc34e188234d072)) + ### [0.53.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.2...v0.53.3) (2021-10-27) From 7c6cd7f5b7b963ddd29f3f46d5b58386534d3cb9 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 29 Oct 2021 16:04:33 -0600 Subject: [PATCH 0644/1339] feat: generate code snippets by default (#1044) --- packages/gapic-generator/gapic/utils/options.py | 15 +++++++++++++-- .../tests/unit/generator/test_generator.py | 16 ++++++++++++---- 2 files changed, 25 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index d7bbe2473df6..d6c692c8fe49 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -37,7 +37,7 @@ class Options: warehouse_package_name: str = '' retry: Optional[Dict[str, Any]] = None sample_configs: Tuple[str, ...] = dataclasses.field(default=()) - autogen_snippets: bool = False + autogen_snippets: bool = True templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) lazy_import: bool = False old_naming: bool = False @@ -132,6 +132,17 @@ def tweak_path(p): # Build the options instance. sample_paths = opts.pop('samples', []) + # autogen-snippets is True by default, so make sure users can disable + # by passing `autogen-snippets=false` + autogen_snippets = opts.pop( + "autogen-snippets", ["True"])[0] in ("True", "true", "T", "t", "TRUE") + + # NOTE: Snippets are not currently correct for the alternative (Ads) templates + # so always disable snippetgen in that case + # https://github.com/googleapis/gapic-generator-python/issues/1052 + if opts.get("old-naming"): + autogen_snippets = False + answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), @@ -143,7 +154,7 @@ def tweak_path(p): for s in sample_paths for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) ), - autogen_snippets=bool(opts.pop("autogen-snippets", False)), + autogen_snippets=autogen_snippets, templates=tuple(path.expanduser(i) for i in templates), lazy_import=bool(opts.pop('lazy-import', False)), old_naming=bool(opts.pop('old-naming', False)), diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index d068250e9729..26873b1d33cc 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -242,7 +242,10 @@ def test_get_response_enumerates_proto(): def test_get_response_divides_subpackages(): - g = make_generator() + # NOTE: autogen-snippets is intentionally disabled for this test + # The API schema below is incomplete and will result in errors when the + # snippetgen logic tries to parse it. + g = make_generator("autogen-snippets=false") api_schema = api.API.build( [ descriptor_pb2.FileDescriptorProto( @@ -277,7 +280,7 @@ def test_get_response_divides_subpackages(): """.strip() ) cgr = g.get_response(api_schema=api_schema, - opts=Options.build("")) + opts=Options.build("autogen-snippets=false")) assert len(cgr.file) == 6 assert {i.name for i in cgr.file} == { "foo/types/top.py", @@ -683,7 +686,12 @@ def test_dont_generate_in_code_samples(mock_gmtime, mock_generate_sample, fs): ), ) - generator = make_generator(f"samples={config_fpath}") + # NOTE: autogen-snippets is intentionally disabled for this test + # The API schema below is incomplete and will result in errors when the + # snippetgen logic attempts to parse it. + generator = make_generator( + f"samples={config_fpath},autogen-snippets=False") + print(generator) generator._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = make_api( make_proto( @@ -743,7 +751,7 @@ def test_dont_generate_in_code_samples(mock_gmtime, mock_generate_sample, fs): expected.supported_features |= CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL actual = generator.get_response( - api_schema=api_schema, opts=Options.build("") + api_schema=api_schema, opts=Options.build("autogen-snippets=False") ) assert actual == expected From 91adf2f515c772a4d6a3cd0f8dae6b6868ce5e44 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 29 Oct 2021 22:08:17 +0000 Subject: [PATCH 0645/1339] chore: release 0.54.0 (#1054) :robot: I have created a release \*beep\* \*boop\* --- ## [0.54.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.4...v0.54.0) (2021-10-29) ### Features * generate code snippets by default ([#1044](https://www.github.com/googleapis/gapic-generator-python/issues/1044)) ([e46f443](https://www.github.com/googleapis/gapic-generator-python/commit/e46f443dbeffe16b63f97668801b06189769e972)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 55ba0b18f95a..c5e2c24cffb0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.54.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.4...v0.54.0) (2021-10-29) + + +### Features + +* generate code snippets by default ([#1044](https://www.github.com/googleapis/gapic-generator-python/issues/1044)) ([e46f443](https://www.github.com/googleapis/gapic-generator-python/commit/e46f443dbeffe16b63f97668801b06189769e972)) + ### [0.53.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.3...v0.53.4) (2021-10-29) From bc5d31404153ac13f55435481672e0f43b9b4833 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 1 Nov 2021 11:52:21 -0700 Subject: [PATCH 0646/1339] feat: add fragment tests (#1056) Fragment tests are defined by a small proto file describing an API surface with characteristics such that it is desirable to test the generated surface for correctness or to prevent regressions. As part of a fragment test, the generator is run on a fragment to create a GAPIC library for the fragment. The generated unit tests for the fragment are then executed to test the surface. --- .../.github/workflows/tests.yaml | 57 +- packages/gapic-generator/WORKSPACE | 4 - .../%sub/services/%service/client.py.j2 | 6 + .../%name_%version/%sub/test_%service.py.j2 | 20 +- packages/gapic-generator/noxfile.py | 143 ++- .../tests/fragments/google/api/client.proto | 99 ++ .../google/protobuf/descriptor.proto | 909 ++++++++++++++++++ .../fragments/google/protobuf/struct.proto | 95 ++ .../tests/fragments/import.proto | 21 + .../fragments/test_flattened_value.proto | 37 + .../tests/fragments/test_keyword_import.proto | 33 + .../fragments/test_optional_signature.proto | 35 + .../fragments/test_recursive_messages.proto | 34 + .../fragments/test_reserved_field_name.proto | 41 + .../unit/gapic/asset_v1/test_asset_service.py | 88 +- .../credentials_v1/test_iam_credentials.py | 104 +- .../logging_v2/test_config_service_v2.py | 144 ++- .../logging_v2/test_logging_service_v2.py | 72 +- .../logging_v2/test_metrics_service_v2.py | 56 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 128 ++- 20 files changed, 1909 insertions(+), 217 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/google/api/client.proto create mode 100644 packages/gapic-generator/tests/fragments/google/protobuf/descriptor.proto create mode 100644 packages/gapic-generator/tests/fragments/google/protobuf/struct.proto create mode 100644 packages/gapic-generator/tests/fragments/import.proto create mode 100644 packages/gapic-generator/tests/fragments/test_flattened_value.proto create mode 100644 packages/gapic-generator/tests/fragments/test_keyword_import.proto create mode 100644 packages/gapic-generator/tests/fragments/test_optional_signature.proto create mode 100644 packages/gapic-generator/tests/fragments/test_recursive_messages.proto create mode 100644 packages/gapic-generator/tests/fragments/test_reserved_field_name.proto diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 2ff05a9095c2..33f2264da684 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -42,39 +42,6 @@ jobs: run: python -m pip install nox - name: Check type annotations. run: nox -s mypy - # publish_image: - # runs-on: ubuntu-latest - # container: docker - # steps: - # - uses: actions/checkout@v2 - # - setup_remote_docker - # - name: Build Docker image. - # run: docker build . -t gcr.io/gapic-images/gapic-generator-python:latest - # - name: Download curl - # run: apk add --no-cache curl - # - name: Download the GCR credential helper. - # run: | - # curl -fsSL https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v1.5.0/docker-credential-gcr_linux_amd64-1.5.0.tar.gz \ - # | tar xz --to-stdout ./docker-credential-gcr \ - # > /usr/bin/docker-credential-gcr && chmod a+x /usr/bin/docker-credential-gcr - # - name: Set up authentication to Google Container Registry. - # run: | - # echo ${GCLOUD_SERVICE_KEY} > ${GOOGLE_APPLICATION_CREDENTIALS} - # docker-credential-gcr configure-docker - # - name: Tag the Docker image and push it to Google Container Registry. - # run: | - # if [ -n "$CIRCLE_TAG" ]; then - # export MAJOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $1; }'` - # export MINOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $2; }'` - # export PATCH=`echo $CIRCLE_TAG | awk -F '.' '{ print $3; }'` - # docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH - # docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR - # docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR - # docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH - # docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR - # docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR - # fi - # docker push gcr.io/gapic-images/gapic-generator-python:latest showcase: strategy: matrix: @@ -319,6 +286,30 @@ jobs: python -m pip install nox - name: Run unit tests. run: nox -s unit-${{ matrix.python }} + fragment: + strategy: + matrix: + python: [3.6, 3.7, 3.8, 3.9] + runs-on: ubuntu-latest + steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.7.0 + with: + access_token: ${{ github.token }} + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install pandoc + run: | + sudo apt-get update + sudo apt-get install -y pandoc gcc git + - name: Install nox. + run: | + python -m pip install nox + - name: Run fragment tests. + run: nox -s fragment-${{ matrix.python }} integration: runs-on: ubuntu-latest steps: diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 65b9832e7ee5..8bf6d5fb0d6c 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -13,10 +13,6 @@ http_archive( url = "https://github.com/bazelbuild/rules_python/archive/0.1.0.tar.gz", ) -load("@rules_python//python:pip.bzl", "pip_repositories") - -pip_repositories() - # # Import gapic-generator-python specific dependencies # diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 809f728dd179..1e0ceecd1e12 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -405,7 +405,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} {% for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: + {# Repeated values is a special case, because values can be lists. #} + {# In order to not confuse the marshalling logic, extend these fields instead of assigning #} + {% if field.ident.ident|string() == "struct_pb2.Value" and field.repeated %} + request.{{ key }}.extend({{ field.name }}) + {% else %} request.{{ key }} = {{ field.name }} + {% endif %}{# struct_pb2.Value #} {% endfor %} {# Map-y fields can be _updated_, however #} {% for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ad2e82861c49..865aabdd0c77 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -788,7 +788,15 @@ def test_{{ method.name|snake_case }}_flattened(): {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% else %} - assert args[0].{{ key }} == {{ field.mock_value }} + arg = args[0].{{ key }} + mock_val = {{ field.mock_value }} + {% if field.ident|string() == "struct_pb2.Value" %} + from proto.marshal import Marshal + from proto.marshal.rules.struct import ValueRule + rule = ValueRule(marshal=Marshal(name="Test")) + mock_val = rule.to_python(mock_val) + {% endif %}{# struct_pb2.Value #} + assert arg == mock_val {% endif %} {% endif %}{% endfor %} {% for oneofs in method.flattened_oneof_fields().values() %} @@ -873,7 +881,15 @@ async def test_{{ method.name|snake_case }}_flattened_async(): {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% else %} - assert args[0].{{ key }} == {{ field.mock_value }} + arg = args[0].{{ key }} + mock_val = {{ field.mock_value }} + {% if field.ident|string() == "struct_pb2.Value" %} + from proto.marshal import Marshal + from proto.marshal.rules.struct import ValueRule + rule = ValueRule(marshal=Marshal(name="Test")) + mock_val = rule.to_python(mock_val) + {% endif %}{# struct_pb2.Value #} + assert arg == mock_val {% endif %} {% endif %}{% endfor %} {% for oneofs in method.flattened_oneof_fields().values() %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index c4e3f34f4332..a656b2943ded 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -13,6 +13,7 @@ # limitations under the License. from __future__ import absolute_import +from concurrent.futures import ThreadPoolExecutor from pathlib import Path import os import sys @@ -29,7 +30,17 @@ ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") -@nox.session(python=["3.6", "3.7", "3.8", "3.9", "3.10"]) +ALL_PYTHON = ( + "3.6", + "3.7", + "3.8", + "3.9", +) + +NEWEST_PYTHON = "3.9" + + +@nox.session(python=ALL_PYTHON) def unit(session): """Run the unit test suite.""" @@ -50,11 +61,89 @@ def unit(session): "--cov-report=term", "--cov-fail-under=100", path.join("tests", "unit"), - ] + ] ), ) +FRAG_DIR = Path("tests") / "fragments" +FRAGMENT_FILES = tuple( + Path(dirname).relative_to(FRAG_DIR) / f + for dirname, _, files in os.walk(FRAG_DIR) + for f in files + if os.path.splitext(f)[1] == ".proto" and f.startswith("test_") +) + +# Note: this class lives outside 'fragment' +# so that, if necessary, it can be pickled for a ProcessPoolExecutor +# A callable class is necessary so that the session can be closed over +# instead of passed in, which simplifies the invocation via map. +class FragTester: + def __init__(self, session): + self.session = session + + def __call__(self, frag): + with tempfile.TemporaryDirectory() as tmp_dir: + # Generate the fragment GAPIC. + outputs = [] + outputs.append( + self.session.run( + "python", + "-m", + "grpc_tools.protoc", + f"--proto_path={str(FRAG_DIR)}", + f"--python_gapic_out={tmp_dir}", + "--python_gapic_opt=transport=grpc+rest", + str(frag), + external=True, + silent=True, + ) + ) + + # Install the generated fragment library. + # Note: install into the tempdir to prevent issues + # with running pip concurrently. + self.session.install(tmp_dir, "-e", ".", "-t", tmp_dir, "-qqq") + + # Run the fragment's generated unit tests. + # Don't bother parallelizing them: we already parallelize + # the fragments, and there usually aren't too many tests per fragment. + outputs.append( + self.session.run( + "py.test", + "--quiet", + f"--cov-config={str(Path(tmp_dir) / '.coveragerc')}", + "--cov-report=term", + "--cov-fail-under=100", + str(Path(tmp_dir) / "tests" / "unit"), + silent=True, + ) + ) + + return "".join(outputs) + + +# TODO(dovs): ads templates +@nox.session(python=ALL_PYTHON) +def fragment(session): + session.install( + "coverage", + "pytest", + "pytest-cov", + "pytest-xdist", + "asyncmock", + "pytest-asyncio", + "grpcio-tools", + ) + session.install("-e", ".") + + with ThreadPoolExecutor() as p: + all_outs = p.map(FragTester(session), FRAGMENT_FILES) + + output = "".join(all_outs) + session.log(output) + + # TODO(yon-mg): -add compute context manager that includes rest transport # -add compute unit tests # (to test against temporarily while rest transport is incomplete) @@ -114,8 +203,7 @@ def showcase_library( f"google/showcase/v1beta1/messaging.proto", ) session.run( - *cmd_tup, - external=True, + *cmd_tup, external=True, ) # Install the library. @@ -124,7 +212,7 @@ def showcase_library( yield tmp_dir -@nox.session(python="3.9") +@nox.session(python=NEWEST_PYTHON) def showcase( session, templates="DEFAULT", @@ -136,12 +224,14 @@ def showcase( with showcase_library(session, templates=templates, other_opts=other_opts): session.install("mock", "pytest", "pytest-asyncio") session.run( - "py.test", "--quiet", *(session.posargs or [path.join("tests", "system")]), + "py.test", + "--quiet", + *(session.posargs or [path.join("tests", "system")]), env=env, ) -@nox.session(python="3.9") +@nox.session(python=NEWEST_PYTHON) def showcase_mtls( session, templates="DEFAULT", @@ -161,7 +251,7 @@ def showcase_mtls( ) -@nox.session(python="3.9") +@nox.session(python=NEWEST_PYTHON) def showcase_alternative_templates(session): templates = path.join(path.dirname(__file__), "gapic", "ads-templates") showcase( @@ -172,7 +262,7 @@ def showcase_alternative_templates(session): ) -@nox.session(python="3.9") +@nox.session(python=NEWEST_PYTHON) def showcase_mtls_alternative_templates(session): templates = path.join(path.dirname(__file__), "gapic", "ads-templates") showcase_mtls( @@ -200,12 +290,12 @@ def run_showcase_unit_tests(session, fail_under=100): "--quiet", "--cov=google", "--cov-append", - f"--cov-fail-under={str(fail_under)}", + f"--cov-fail-under={str(fail_under)}", *(session.posargs or [path.join("tests", "unit")]), ) -@nox.session(python=["3.6", "3.7", "3.8", "3.9"]) +@nox.session(python=ALL_PYTHON) def showcase_unit( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): @@ -233,14 +323,16 @@ def showcase_unit( run_showcase_unit_tests(session, fail_under=100) -@nox.session(python=["3.7", "3.8", "3.9"]) +@nox.session(python=ALL_PYTHON[1:]) # Do not test 3.6 def showcase_unit_alternative_templates(session): - with showcase_library(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) as lib: + with showcase_library( + session, templates=ADS_TEMPLATES, other_opts=("old-naming",) + ) as lib: session.chdir(lib) run_showcase_unit_tests(session) -@nox.session(python=["3.9"]) +@nox.session(python=NEWEST_PYTHON) def showcase_unit_add_iam_methods(session): with showcase_library(session, other_opts=("add-iam-methods",)) as lib: session.chdir(lib) @@ -257,7 +349,7 @@ def showcase_unit_add_iam_methods(session): run_showcase_unit_tests(session, fail_under=100) -@nox.session(python="3.9") +@nox.session(python=NEWEST_PYTHON) def showcase_mypy( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): @@ -273,12 +365,12 @@ def showcase_mypy( session.run("mypy", "--explicit-package-bases", "google") -@nox.session(python="3.9") +@nox.session(python=NEWEST_PYTHON) def showcase_mypy_alternative_templates(session): showcase_mypy(session, templates=ADS_TEMPLATES, other_opts=("old-naming",)) -@nox.session(python="3.9") +@nox.session(python=NEWEST_PYTHON) def snippetgen(session): # Clone googleapis/api-common-protos which are referenced by the snippet # protos @@ -299,14 +391,10 @@ def snippetgen(session): session.install("grpcio-tools", "mock", "pytest", "pytest-asyncio") - session.run( - "py.test", - "-vv", - "tests/snippetgen" - ) + session.run("py.test", "-vv", "tests/snippetgen") -@nox.session(python="3.9") +@nox.session(python=NEWEST_PYTHON) def docs(session): """Build the docs.""" @@ -327,15 +415,10 @@ def docs(session): ) -@nox.session(python=["3.7", "3.8", "3.9"]) +@nox.session(python=NEWEST_PYTHON) def mypy(session): """Perform typecheck analysis.""" - session.install( - "mypy", - "types-protobuf", - "types-PyYAML", - "types-dataclasses" - ) + session.install("mypy", "types-protobuf", "types-PyYAML", "types-dataclasses") session.install(".") session.run("mypy", "gapic") diff --git a/packages/gapic-generator/tests/fragments/google/api/client.proto b/packages/gapic-generator/tests/fragments/google/api/client.proto new file mode 100644 index 000000000000..2102623d305f --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/api/client.proto @@ -0,0 +1,99 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "ClientProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // A definition of a client library method signature. + // + // In client libraries, each proto RPC corresponds to one or more methods + // which the end user is able to call, and calls the underlying RPC. + // Normally, this method receives a single argument (a struct or instance + // corresponding to the RPC request object). Defining this field will + // add one or more overloads providing flattened or simpler method signatures + // in some languages. + // + // The fields on the method signature are provided as a comma-separated + // string. + // + // For example, the proto RPC and annotation: + // + // rpc CreateSubscription(CreateSubscriptionRequest) + // returns (Subscription) { + // option (google.api.method_signature) = "name,topic"; + // } + // + // Would add the following Java overload (in addition to the method accepting + // the request object): + // + // public final Subscription createSubscription(String name, String topic) + // + // The following backwards-compatibility guidelines apply: + // + // * Adding this annotation to an unannotated method is backwards + // compatible. + // * Adding this annotation to a method which already has existing + // method signature annotations is backwards compatible if and only if + // the new method signature annotation is last in the sequence. + // * Modifying or removing an existing method signature annotation is + // a breaking change. + // * Re-ordering existing method signature annotations is a breaking + // change. + repeated string method_signature = 1051; +} + +extend google.protobuf.ServiceOptions { + // The hostname for this service. + // This should be specified with no prefix or protocol. + // + // Example: + // + // service Foo { + // option (google.api.default_host) = "foo.googleapi.com"; + // ... + // } + string default_host = 1049; + + // OAuth scopes needed for the client. + // + // Example: + // + // service Foo { + // option (google.api.oauth_scopes) = \ + // "https://www.googleapis.com/auth/cloud-platform"; + // ... + // } + // + // If there is more than one scope, use a comma-separated string: + // + // Example: + // + // service Foo { + // option (google.api.oauth_scopes) = \ + // "https://www.googleapis.com/auth/cloud-platform," + // "https://www.googleapis.com/auth/monitoring"; + // ... + // } + string oauth_scopes = 1050; +} diff --git a/packages/gapic-generator/tests/fragments/google/protobuf/descriptor.proto b/packages/gapic-generator/tests/fragments/google/protobuf/descriptor.proto new file mode 100644 index 000000000000..9f0ce6cde071 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/protobuf/descriptor.proto @@ -0,0 +1,909 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Author: kenton@google.com (Kenton Varda) +// Based on original Protocol Buffers design by +// Sanjay Ghemawat, Jeff Dean, and others. +// +// The messages in this file describe the definitions found in .proto files. +// A valid .proto file can be translated directly to a FileDescriptorProto +// without any other information (e.g. without reading its imports). + + +syntax = "proto2"; + +package google.protobuf; + +option go_package = "google.golang.org/protobuf/types/descriptorpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "DescriptorProtos"; +option csharp_namespace = "Google.Protobuf.Reflection"; +option objc_class_prefix = "GPB"; +option cc_enable_arenas = true; + +// descriptor.proto must be optimized for speed because reflection-based +// algorithms don't work during bootstrapping. +option optimize_for = SPEED; + +// The protocol compiler can output a FileDescriptorSet containing the .proto +// files it parses. +message FileDescriptorSet { + repeated FileDescriptorProto file = 1; +} + +// Describes a complete .proto file. +message FileDescriptorProto { + optional string name = 1; // file name, relative to root of source tree + optional string package = 2; // e.g. "foo", "foo.bar", etc. + + // Names of files imported by this file. + repeated string dependency = 3; + // Indexes of the public imported files in the dependency list above. + repeated int32 public_dependency = 10; + // Indexes of the weak imported files in the dependency list. + // For Google-internal migration only. Do not use. + repeated int32 weak_dependency = 11; + + // All top-level definitions in this file. + repeated DescriptorProto message_type = 4; + repeated EnumDescriptorProto enum_type = 5; + repeated ServiceDescriptorProto service = 6; + repeated FieldDescriptorProto extension = 7; + + optional FileOptions options = 8; + + // This field contains optional information about the original source code. + // You may safely remove this entire field without harming runtime + // functionality of the descriptors -- the information is needed only by + // development tools. + optional SourceCodeInfo source_code_info = 9; + + // The syntax of the proto file. + // The supported values are "proto2" and "proto3". + optional string syntax = 12; +} + +// Describes a message type. +message DescriptorProto { + optional string name = 1; + + repeated FieldDescriptorProto field = 2; + repeated FieldDescriptorProto extension = 6; + + repeated DescriptorProto nested_type = 3; + repeated EnumDescriptorProto enum_type = 4; + + message ExtensionRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + + optional ExtensionRangeOptions options = 3; + } + repeated ExtensionRange extension_range = 5; + + repeated OneofDescriptorProto oneof_decl = 8; + + optional MessageOptions options = 7; + + // Range of reserved tag numbers. Reserved tag numbers may not be used by + // fields or extension ranges in the same message. Reserved ranges may + // not overlap. + message ReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Exclusive. + } + repeated ReservedRange reserved_range = 9; + // Reserved field names, which may not be used by fields in the same message. + // A given name may only be reserved once. + repeated string reserved_name = 10; +} + +message ExtensionRangeOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +// Describes a field within a message. +message FieldDescriptorProto { + enum Type { + // 0 is reserved for errors. + // Order is weird for historical reasons. + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + // negative values are likely. + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + // negative values are likely. + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + // Tag-delimited aggregate. + // Group type is deprecated and not supported in proto3. However, Proto3 + // implementations should still be able to parse the group wire format and + // treat group fields as unknown fields. + TYPE_GROUP = 10; + TYPE_MESSAGE = 11; // Length-delimited aggregate. + + // New in version 2. + TYPE_BYTES = 12; + TYPE_UINT32 = 13; + TYPE_ENUM = 14; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; // Uses ZigZag encoding. + TYPE_SINT64 = 18; // Uses ZigZag encoding. + } + + enum Label { + // 0 is reserved for errors + LABEL_OPTIONAL = 1; + LABEL_REQUIRED = 2; + LABEL_REPEATED = 3; + } + + optional string name = 1; + optional int32 number = 3; + optional Label label = 4; + + // If type_name is set, this need not be set. If both this and type_name + // are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + optional Type type = 5; + + // For message and enum types, this is the name of the type. If the name + // starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + // rules are used to find the type (i.e. first the nested types within this + // message are searched, then within the parent, on up to the root + // namespace). + optional string type_name = 6; + + // For extensions, this is the name of the type being extended. It is + // resolved in the same manner as type_name. + optional string extendee = 2; + + // For numeric types, contains the original text representation of the value. + // For booleans, "true" or "false". + // For strings, contains the default text contents (not escaped in any way). + // For bytes, contains the C escaped value. All bytes >= 128 are escaped. + // TODO(kenton): Base-64 encode? + optional string default_value = 7; + + // If set, gives the index of a oneof in the containing type's oneof_decl + // list. This field is a member of that oneof. + optional int32 oneof_index = 9; + + // JSON name of this field. The value is set by protocol compiler. If the + // user has set a "json_name" option on this field, that option's value + // will be used. Otherwise, it's deduced from the field's name by converting + // it to camelCase. + optional string json_name = 10; + + optional FieldOptions options = 8; + + // If true, this is a proto3 "optional". When a proto3 field is optional, it + // tracks presence regardless of field type. + // + // When proto3_optional is true, this field must be belong to a oneof to + // signal to old proto3 clients that presence is tracked for this field. This + // oneof is known as a "synthetic" oneof, and this field must be its sole + // member (each proto3 optional field gets its own synthetic oneof). Synthetic + // oneofs exist in the descriptor only, and do not generate any API. Synthetic + // oneofs must be ordered after all "real" oneofs. + // + // For message fields, proto3_optional doesn't create any semantic change, + // since non-repeated message fields always track presence. However it still + // indicates the semantic detail of whether the user wrote "optional" or not. + // This can be useful for round-tripping the .proto file. For consistency we + // give message fields a synthetic oneof also, even though it is not required + // to track presence. This is especially important because the parser can't + // tell if a field is a message or an enum, so it must always create a + // synthetic oneof. + // + // Proto2 optional fields do not set this flag, because they already indicate + // optional with `LABEL_OPTIONAL`. + optional bool proto3_optional = 17; +} + +// Describes a oneof. +message OneofDescriptorProto { + optional string name = 1; + optional OneofOptions options = 2; +} + +// Describes an enum type. +message EnumDescriptorProto { + optional string name = 1; + + repeated EnumValueDescriptorProto value = 2; + + optional EnumOptions options = 3; + + // Range of reserved numeric values. Reserved values may not be used by + // entries in the same enum. Reserved ranges may not overlap. + // + // Note that this is distinct from DescriptorProto.ReservedRange in that it + // is inclusive such that it can appropriately represent the entire int32 + // domain. + message EnumReservedRange { + optional int32 start = 1; // Inclusive. + optional int32 end = 2; // Inclusive. + } + + // Range of reserved numeric values. Reserved numeric values may not be used + // by enum values in the same enum declaration. Reserved ranges may not + // overlap. + repeated EnumReservedRange reserved_range = 4; + + // Reserved enum value names, which may not be reused. A given name may only + // be reserved once. + repeated string reserved_name = 5; +} + +// Describes a value within an enum. +message EnumValueDescriptorProto { + optional string name = 1; + optional int32 number = 2; + + optional EnumValueOptions options = 3; +} + +// Describes a service. +message ServiceDescriptorProto { + optional string name = 1; + repeated MethodDescriptorProto method = 2; + + optional ServiceOptions options = 3; +} + +// Describes a method of a service. +message MethodDescriptorProto { + optional string name = 1; + + // Input and output type names. These are resolved in the same way as + // FieldDescriptorProto.type_name, but must refer to a message type. + optional string input_type = 2; + optional string output_type = 3; + + optional MethodOptions options = 4; + + // Identifies if client streams multiple client messages + optional bool client_streaming = 5 [default = false]; + // Identifies if server streams multiple server messages + optional bool server_streaming = 6 [default = false]; +} + + +// =================================================================== +// Options + +// Each of the definitions above may have "options" attached. These are +// just annotations which may cause code to be generated slightly differently +// or may contain hints for code that manipulates protocol messages. +// +// Clients may define custom options as extensions of the *Options messages. +// These extensions may not yet be known at parsing time, so the parser cannot +// store the values in them. Instead it stores them in a field in the *Options +// message called uninterpreted_option. This field must have the same name +// across all *Options messages. We then use this field to populate the +// extensions when we build a descriptor, at which point all protos have been +// parsed and so all extensions are known. +// +// Extension numbers for custom options may be chosen as follows: +// * For options which will only be used within a single application or +// organization, or for experimental options, use field numbers 50000 +// through 99999. It is up to you to ensure that you do not use the +// same number for multiple options. +// * For options which will be published and used publicly by multiple +// independent entities, e-mail protobuf-global-extension-registry@google.com +// to reserve extension numbers. Simply provide your project name (e.g. +// Objective-C plugin) and your project website (if available) -- there's no +// need to explain how you intend to use them. Usually you only need one +// extension number. You can declare multiple options with only one extension +// number by putting them in a sub-message. See the Custom Options section of +// the docs for examples: +// https://developers.google.com/protocol-buffers/docs/proto#options +// If this turns out to be popular, a web service will be set up +// to automatically assign option numbers. + +message FileOptions { + + // Sets the Java package where classes generated from this .proto will be + // placed. By default, the proto package is used, but this is often + // inappropriate because proto packages do not normally start with backwards + // domain names. + optional string java_package = 1; + + + // If set, all the classes from the .proto file are wrapped in a single + // outer class with the given name. This applies to both Proto1 + // (equivalent to the old "--one_java_file" option) and Proto2 (where + // a .proto always translates to a single class, but you may want to + // explicitly choose the class name). + optional string java_outer_classname = 8; + + // If set true, then the Java code generator will generate a separate .java + // file for each top-level message, enum, and service defined in the .proto + // file. Thus, these types will *not* be nested inside the outer class + // named by java_outer_classname. However, the outer class will still be + // generated to contain the file's getDescriptor() method as well as any + // top-level extensions defined in the file. + optional bool java_multiple_files = 10 [default = false]; + + // This option does nothing. + optional bool java_generate_equals_and_hash = 20 [deprecated=true]; + + // If set true, then the Java2 code generator will generate code that + // throws an exception whenever an attempt is made to assign a non-UTF-8 + // byte sequence to a string field. + // Message reflection will do the same. + // However, an extension field still accepts non-UTF-8 byte sequences. + // This option has no effect on when used with the lite runtime. + optional bool java_string_check_utf8 = 27 [default = false]; + + + // Generated classes can be optimized for speed or code size. + enum OptimizeMode { + SPEED = 1; // Generate complete code for parsing, serialization, + // etc. + CODE_SIZE = 2; // Use ReflectionOps to implement these methods. + LITE_RUNTIME = 3; // Generate code using MessageLite and the lite runtime. + } + optional OptimizeMode optimize_for = 9 [default = SPEED]; + + // Sets the Go package where structs generated from this .proto will be + // placed. If omitted, the Go package will be derived from the following: + // - The basename of the package import path, if provided. + // - Otherwise, the package statement in the .proto file, if present. + // - Otherwise, the basename of the .proto file, without extension. + optional string go_package = 11; + + + + + // Should generic services be generated in each language? "Generic" services + // are not specific to any particular RPC system. They are generated by the + // main code generators in each language (without additional plugins). + // Generic services were the only kind of service generation supported by + // early versions of google.protobuf. + // + // Generic services are now considered deprecated in favor of using plugins + // that generate code specific to your particular RPC system. Therefore, + // these default to false. Old code which depends on generic services should + // explicitly set them to true. + optional bool cc_generic_services = 16 [default = false]; + optional bool java_generic_services = 17 [default = false]; + optional bool py_generic_services = 18 [default = false]; + optional bool php_generic_services = 42 [default = false]; + + // Is this file deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for everything in the file, or it will be completely ignored; in the very + // least, this is a formalization for deprecating files. + optional bool deprecated = 23 [default = false]; + + // Enables the use of arenas for the proto messages in this file. This applies + // only to generated classes for C++. + optional bool cc_enable_arenas = 31 [default = true]; + + + // Sets the objective c class prefix which is prepended to all objective c + // generated classes from this .proto. There is no default. + optional string objc_class_prefix = 36; + + // Namespace for generated classes; defaults to the package. + optional string csharp_namespace = 37; + + // By default Swift generators will take the proto package and CamelCase it + // replacing '.' with underscore and use that to prefix the types/symbols + // defined. When this options is provided, they will use this value instead + // to prefix the types/symbols defined. + optional string swift_prefix = 39; + + // Sets the php class prefix which is prepended to all php generated classes + // from this .proto. Default is empty. + optional string php_class_prefix = 40; + + // Use this option to change the namespace of php generated classes. Default + // is empty. When this option is empty, the package name will be used for + // determining the namespace. + optional string php_namespace = 41; + + // Use this option to change the namespace of php generated metadata classes. + // Default is empty. When this option is empty, the proto file name will be + // used for determining the namespace. + optional string php_metadata_namespace = 44; + + // Use this option to change the package of ruby generated classes. Default + // is empty. When this option is not set, the package name will be used for + // determining the ruby package. + optional string ruby_package = 45; + + + // The parser stores options it doesn't recognize here. + // See the documentation for the "Options" section above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. + // See the documentation for the "Options" section above. + extensions 1000 to max; + + reserved 38; +} + +message MessageOptions { + // Set true to use the old proto1 MessageSet wire format for extensions. + // This is provided for backwards-compatibility with the MessageSet wire + // format. You should not use this for any other reason: It's less + // efficient, has fewer features, and is more complicated. + // + // The message must be defined exactly as follows: + // message Foo { + // option message_set_wire_format = true; + // extensions 4 to max; + // } + // Note that the message cannot have any defined fields; MessageSets only + // have extensions. + // + // All extensions of your type must be singular messages; e.g. they cannot + // be int32s, enums, or repeated messages. + // + // Because this is an option, the above two restrictions are not enforced by + // the protocol compiler. + optional bool message_set_wire_format = 1 [default = false]; + + // Disables the generation of the standard "descriptor()" accessor, which can + // conflict with a field of the same name. This is meant to make migration + // from proto1 easier; new code should avoid fields named "descriptor". + optional bool no_standard_descriptor_accessor = 2 [default = false]; + + // Is this message deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the message, or it will be completely ignored; in the very least, + // this is a formalization for deprecating messages. + optional bool deprecated = 3 [default = false]; + + // Whether the message is an automatically generated map entry type for the + // maps field. + // + // For maps fields: + // map map_field = 1; + // The parsed descriptor looks like: + // message MapFieldEntry { + // option map_entry = true; + // optional KeyType key = 1; + // optional ValueType value = 2; + // } + // repeated MapFieldEntry map_field = 1; + // + // Implementations may choose not to generate the map_entry=true message, but + // use a native map in the target language to hold the keys and values. + // The reflection APIs in such implementations still need to work as + // if the field is a repeated message field. + // + // NOTE: Do not set the option in .proto files. Always use the maps syntax + // instead. The option should only be implicitly set by the proto compiler + // parser. + optional bool map_entry = 7; + + reserved 8; // javalite_serializable + reserved 9; // javanano_as_lite + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message FieldOptions { + // The ctype option instructs the C++ code generator to use a different + // representation of the field than it normally would. See the specific + // options below. This option is not yet implemented in the open source + // release -- sorry, we'll try to include it in a future version! + optional CType ctype = 1 [default = STRING]; + enum CType { + // Default mode. + STRING = 0; + + CORD = 1; + + STRING_PIECE = 2; + } + // The packed option can be enabled for repeated primitive fields to enable + // a more efficient representation on the wire. Rather than repeatedly + // writing the tag and type for each element, the entire array is encoded as + // a single length-delimited blob. In proto3, only explicit setting it to + // false will avoid using packed encoding. + optional bool packed = 2; + + // The jstype option determines the JavaScript type used for values of the + // field. The option is permitted only for 64 bit integral and fixed types + // (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + // is represented as JavaScript string, which avoids loss of precision that + // can happen when a large value is converted to a floating point JavaScript. + // Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + // use the JavaScript "number" type. The behavior of the default option + // JS_NORMAL is implementation dependent. + // + // This option is an enum to permit additional types to be added, e.g. + // goog.math.Integer. + optional JSType jstype = 6 [default = JS_NORMAL]; + enum JSType { + // Use the default type. + JS_NORMAL = 0; + + // Use JavaScript strings. + JS_STRING = 1; + + // Use JavaScript numbers. + JS_NUMBER = 2; + } + + // Should this field be parsed lazily? Lazy applies only to message-type + // fields. It means that when the outer message is initially parsed, the + // inner message's contents will not be parsed but instead stored in encoded + // form. The inner message will actually be parsed when it is first accessed. + // + // This is only a hint. Implementations are free to choose whether to use + // eager or lazy parsing regardless of the value of this option. However, + // setting this option true suggests that the protocol author believes that + // using lazy parsing on this field is worth the additional bookkeeping + // overhead typically needed to implement it. + // + // This option does not affect the public interface of any generated code; + // all method signatures remain the same. Furthermore, thread-safety of the + // interface is not affected by this option; const methods remain safe to + // call from multiple threads concurrently, while non-const methods continue + // to require exclusive access. + // + // + // Note that implementations may choose not to check required fields within + // a lazy sub-message. That is, calling IsInitialized() on the outer message + // may return true even if the inner message has missing required fields. + // This is necessary because otherwise the inner message would have to be + // parsed in order to perform the check, defeating the purpose of lazy + // parsing. An implementation which chooses not to check required fields + // must be consistent about it. That is, for any particular sub-message, the + // implementation must either *always* check its required fields, or *never* + // check its required fields, regardless of whether or not the message has + // been parsed. + optional bool lazy = 5 [default = false]; + + // Is this field deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for accessors, or it will be completely ignored; in the very least, this + // is a formalization for deprecating fields. + optional bool deprecated = 3 [default = false]; + + // For Google-internal migration only. Do not use. + optional bool weak = 10 [default = false]; + + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; + + reserved 4; // removed jtype +} + +message OneofOptions { + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumOptions { + + // Set this option to true to allow mapping different tag names to the same + // value. + optional bool allow_alias = 2; + + // Is this enum deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum, or it will be completely ignored; in the very least, this + // is a formalization for deprecating enums. + optional bool deprecated = 3 [default = false]; + + reserved 5; // javanano_as_lite + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message EnumValueOptions { + // Is this enum value deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the enum value, or it will be completely ignored; in the very least, + // this is a formalization for deprecating enum values. + optional bool deprecated = 1 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message ServiceOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this service deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the service, or it will be completely ignored; in the very least, + // this is a formalization for deprecating services. + optional bool deprecated = 33 [default = false]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + +message MethodOptions { + + // Note: Field numbers 1 through 32 are reserved for Google's internal RPC + // framework. We apologize for hoarding these numbers to ourselves, but + // we were already using them long before we decided to release Protocol + // Buffers. + + // Is this method deprecated? + // Depending on the target platform, this can emit Deprecated annotations + // for the method, or it will be completely ignored; in the very least, + // this is a formalization for deprecating methods. + optional bool deprecated = 33 [default = false]; + + // Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + // or neither? HTTP based RPC implementation may choose GET verb for safe + // methods, and PUT verb for idempotent methods instead of the default POST. + enum IdempotencyLevel { + IDEMPOTENCY_UNKNOWN = 0; + NO_SIDE_EFFECTS = 1; // implies idempotent + IDEMPOTENT = 2; // idempotent, but may have side effects + } + optional IdempotencyLevel idempotency_level = 34 + [default = IDEMPOTENCY_UNKNOWN]; + + // The parser stores options it doesn't recognize here. See above. + repeated UninterpretedOption uninterpreted_option = 999; + + // Clients can define custom options in extensions of this message. See above. + extensions 1000 to max; +} + + +// A message representing a option the parser does not recognize. This only +// appears in options protos created by the compiler::Parser class. +// DescriptorPool resolves these when building Descriptor objects. Therefore, +// options protos in descriptor objects (e.g. returned by Descriptor::options(), +// or produced by Descriptor::CopyTo()) will never have UninterpretedOptions +// in them. +message UninterpretedOption { + // The name of the uninterpreted option. Each string represents a segment in + // a dot-separated name. is_extension is true iff a segment represents an + // extension (denoted with parentheses in options specs in .proto files). + // E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + // "foo.(bar.baz).qux". + message NamePart { + required string name_part = 1; + required bool is_extension = 2; + } + repeated NamePart name = 2; + + // The value of the uninterpreted option, in whatever type the tokenizer + // identified it as during parsing. Exactly one of these should be set. + optional string identifier_value = 3; + optional uint64 positive_int_value = 4; + optional int64 negative_int_value = 5; + optional double double_value = 6; + optional bytes string_value = 7; + optional string aggregate_value = 8; +} + +// =================================================================== +// Optional source code info + +// Encapsulates information about the original source file from which a +// FileDescriptorProto was generated. +message SourceCodeInfo { + // A Location identifies a piece of source code in a .proto file which + // corresponds to a particular definition. This information is intended + // to be useful to IDEs, code indexers, documentation generators, and similar + // tools. + // + // For example, say we have a file like: + // message Foo { + // optional string foo = 1; + // } + // Let's look at just the field definition: + // optional string foo = 1; + // ^ ^^ ^^ ^ ^^^ + // a bc de f ghi + // We have the following locations: + // span path represents + // [a,i) [ 4, 0, 2, 0 ] The whole field definition. + // [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + // [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + // [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + // [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + // + // Notes: + // - A location may refer to a repeated field itself (i.e. not to any + // particular index within it). This is used whenever a set of elements are + // logically enclosed in a single code segment. For example, an entire + // extend block (possibly containing multiple extension definitions) will + // have an outer location whose path refers to the "extensions" repeated + // field without an index. + // - Multiple locations may have the same path. This happens when a single + // logical declaration is spread out across multiple places. The most + // obvious example is the "extend" block again -- there may be multiple + // extend blocks in the same scope, each of which will have the same path. + // - A location's span is not always a subset of its parent's span. For + // example, the "extendee" of an extension declaration appears at the + // beginning of the "extend" block and is shared by all extensions within + // the block. + // - Just because a location's span is a subset of some other location's span + // does not mean that it is a descendant. For example, a "group" defines + // both a type and a field in a single declaration. Thus, the locations + // corresponding to the type and field and their components will overlap. + // - Code which tries to interpret locations should probably be designed to + // ignore those that it doesn't understand, as more types of locations could + // be recorded in the future. + repeated Location location = 1; + message Location { + // Identifies which part of the FileDescriptorProto was defined at this + // location. + // + // Each element is a field number or an index. They form a path from + // the root FileDescriptorProto to the place where the definition. For + // example, this path: + // [ 4, 3, 2, 7, 1 ] + // refers to: + // file.message_type(3) // 4, 3 + // .field(7) // 2, 7 + // .name() // 1 + // This is because FileDescriptorProto.message_type has field number 4: + // repeated DescriptorProto message_type = 4; + // and DescriptorProto.field has field number 2: + // repeated FieldDescriptorProto field = 2; + // and FieldDescriptorProto.name has field number 1: + // optional string name = 1; + // + // Thus, the above path gives the location of a field name. If we removed + // the last element: + // [ 4, 3, 2, 7 ] + // this path refers to the whole field declaration (from the beginning + // of the label to the terminating semicolon). + repeated int32 path = 1 [packed = true]; + + // Always has exactly three or four elements: start line, start column, + // end line (optional, otherwise assumed same as start line), end column. + // These are packed into a single field for efficiency. Note that line + // and column numbers are zero-based -- typically you will want to add + // 1 to each before displaying to a user. + repeated int32 span = 2 [packed = true]; + + // If this SourceCodeInfo represents a complete declaration, these are any + // comments appearing before and after the declaration which appear to be + // attached to the declaration. + // + // A series of line comments appearing on consecutive lines, with no other + // tokens appearing on those lines, will be treated as a single comment. + // + // leading_detached_comments will keep paragraphs of comments that appear + // before (but not connected to) the current element. Each paragraph, + // separated by empty lines, will be one comment element in the repeated + // field. + // + // Only the comment content is provided; comment markers (e.g. //) are + // stripped out. For block comments, leading whitespace and an asterisk + // will be stripped from the beginning of each line other than the first. + // Newlines are included in the output. + // + // Examples: + // + // optional int32 foo = 1; // Comment attached to foo. + // // Comment attached to bar. + // optional int32 bar = 2; + // + // optional string baz = 3; + // // Comment attached to baz. + // // Another line attached to baz. + // + // // Comment attached to qux. + // // + // // Another line attached to qux. + // optional double qux = 4; + // + // // Detached comment for corge. This is not leading or trailing comments + // // to qux or corge because there are blank lines separating it from + // // both. + // + // // Detached comment for corge paragraph 2. + // + // optional string corge = 5; + // /* Block comment attached + // * to corge. Leading asterisks + // * will be removed. */ + // /* Block comment attached to + // * grault. */ + // optional int32 grault = 6; + // + // // ignored detached comments. + optional string leading_comments = 3; + optional string trailing_comments = 4; + repeated string leading_detached_comments = 6; + } +} + +// Describes the relationship between generated code and its original source +// file. A GeneratedCodeInfo message is associated with only one generated +// source file, but may contain references to different source .proto files. +message GeneratedCodeInfo { + // An Annotation connects some span of text in generated code to an element + // of its generating .proto file. + repeated Annotation annotation = 1; + message Annotation { + // Identifies the element in the original source .proto file. This field + // is formatted the same as SourceCodeInfo.Location.path. + repeated int32 path = 1 [packed = true]; + + // Identifies the filesystem path to the original source .proto. + optional string source_file = 2; + + // Identifies the starting offset in bytes in the generated code + // that relates to the identified object. + optional int32 begin = 3; + + // Identifies the ending offset in bytes in the generated code that + // relates to the identified offset. The end offset should be one past + // the last relevant byte (so the length of the text = end - begin). + optional int32 end = 4; + } +} diff --git a/packages/gapic-generator/tests/fragments/google/protobuf/struct.proto b/packages/gapic-generator/tests/fragments/google/protobuf/struct.proto new file mode 100644 index 000000000000..545215c25276 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/protobuf/struct.proto @@ -0,0 +1,95 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option cc_enable_arenas = true; +option go_package = "google.golang.org/protobuf/types/known/structpb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "StructProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// `Struct` represents a structured data value, consisting of fields +// which map to dynamically typed values. In some languages, `Struct` +// might be supported by a native representation. For example, in +// scripting languages like JS a struct is represented as an +// object. The details of that representation are described together +// with the proto support for the language. +// +// The JSON representation for `Struct` is JSON object. +message Struct { + // Unordered map of dynamically typed values. + map fields = 1; +} + +// `Value` represents a dynamically typed value which can be either +// null, a number, a string, a boolean, a recursive struct value, or a +// list of values. A producer of value is expected to set one of that +// variants, absence of any variant indicates an error. +// +// The JSON representation for `Value` is JSON value. +message Value { + // The kind of value. + oneof kind { + // Represents a null value. + NullValue null_value = 1; + // Represents a double value. + double number_value = 2; + // Represents a string value. + string string_value = 3; + // Represents a boolean value. + bool bool_value = 4; + // Represents a structured value. + Struct struct_value = 5; + // Represents a repeated `Value`. + ListValue list_value = 6; + } +} + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +enum NullValue { + // Null value. + NULL_VALUE = 0; +} + +// `ListValue` is a wrapper around a repeated field of values. +// +// The JSON representation for `ListValue` is JSON array. +message ListValue { + // Repeated field of dynamically typed values. + repeated Value values = 1; +} diff --git a/packages/gapic-generator/tests/fragments/import.proto b/packages/gapic-generator/tests/fragments/import.proto new file mode 100644 index 000000000000..005989420c41 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/import.proto @@ -0,0 +1,21 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +message Import { + string path = 1; +} diff --git a/packages/gapic-generator/tests/fragments/test_flattened_value.proto b/packages/gapic-generator/tests/fragments/test_flattened_value.proto new file mode 100644 index 000000000000..8965b97d402e --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_flattened_value.proto @@ -0,0 +1,37 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/protobuf/struct.proto"; +import "google/api/client.proto"; + +service MyService { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (MethodResponse) { + option (google.api.method_signature) = "parameter,items"; + } +} + +message MethodRequest { + google.protobuf.Value parameter = 1; + repeated google.protobuf.Value items = 2; +} + +message MethodResponse { + string result = 1; +} diff --git a/packages/gapic-generator/tests/fragments/test_keyword_import.proto b/packages/gapic-generator/tests/fragments/test_keyword_import.proto new file mode 100644 index 000000000000..81ce44eda18d --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_keyword_import.proto @@ -0,0 +1,33 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "import.proto"; + +service MyService { + option (google.api.default_host) = "my.example.com"; + rpc MyMethod(MethodRequest) returns (MethodResponse) {} +} + +message MethodRequest { + Import import = 1; +} + +message MethodResponse { + string result = 1; +} diff --git a/packages/gapic-generator/tests/fragments/test_optional_signature.proto b/packages/gapic-generator/tests/fragments/test_optional_signature.proto new file mode 100644 index 000000000000..4789abd59671 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_optional_signature.proto @@ -0,0 +1,35 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; + +service MyService { + option (google.api.default_host) = "my.example.com"; + rpc MyMethod(MethodRequest) returns (MethodResponse) { + option (google.api.method_signature) = "parameter"; + } + +} + +message MethodRequest { + optional string parameter = 1; +} + +message MethodResponse { + string result = 1; +} diff --git a/packages/gapic-generator/tests/fragments/test_recursive_messages.proto b/packages/gapic-generator/tests/fragments/test_recursive_messages.proto new file mode 100644 index 000000000000..efe2685877bb --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_recursive_messages.proto @@ -0,0 +1,34 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; + +service MyService { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (MethodResponse) {} +} + +message MethodRequest { + int32 depth = 1; + MethodRequest child = 2; +} + +message MethodResponse { + string result = 1; +} diff --git a/packages/gapic-generator/tests/fragments/test_reserved_field_name.proto b/packages/gapic-generator/tests/fragments/test_reserved_field_name.proto new file mode 100644 index 000000000000..9fee7912c952 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_reserved_field_name.proto @@ -0,0 +1,41 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; + +service MyService { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (MethodResponse) { + option (google.api.method_signature) = "from,class,import,any,license,type"; + } + +} + +message MethodRequest { + string from = 1; + string class = 2; + string import = 3; + string any = 4; + string license = 5; + string type = 6; +} + +message MethodResponse { + string result = 1; +} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 98379ffdcee5..ae6fd2a5e6a6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -701,7 +701,9 @@ def test_list_assets_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_assets_flattened_error(): @@ -742,7 +744,9 @@ async def test_list_assets_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1277,7 +1281,9 @@ def test_create_feed_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_create_feed_flattened_error(): @@ -1318,7 +1324,9 @@ async def test_create_feed_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1520,7 +1528,9 @@ def test_get_feed_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val def test_get_feed_flattened_error(): @@ -1561,7 +1571,9 @@ async def test_get_feed_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1747,7 +1759,9 @@ def test_list_feeds_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_feeds_flattened_error(): @@ -1788,7 +1802,9 @@ async def test_list_feeds_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1990,7 +2006,9 @@ def test_update_feed_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].feed == asset_service.Feed(name='name_value') + arg = args[0].feed + mock_val = asset_service.Feed(name='name_value') + assert arg == mock_val def test_update_feed_flattened_error(): @@ -2031,7 +2049,9 @@ async def test_update_feed_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].feed == asset_service.Feed(name='name_value') + arg = args[0].feed + mock_val = asset_service.Feed(name='name_value') + assert arg == mock_val @pytest.mark.asyncio @@ -2215,7 +2235,9 @@ def test_delete_feed_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val def test_delete_feed_flattened_error(): @@ -2256,7 +2278,9 @@ async def test_delete_feed_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val @pytest.mark.asyncio @@ -2448,9 +2472,15 @@ def test_search_all_resources_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].scope == 'scope_value' - assert args[0].query == 'query_value' - assert args[0].asset_types == ['asset_types_value'] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + arg = args[0].asset_types + mock_val = ['asset_types_value'] + assert arg == mock_val def test_search_all_resources_flattened_error(): @@ -2495,9 +2525,15 @@ async def test_search_all_resources_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].scope == 'scope_value' - assert args[0].query == 'query_value' - assert args[0].asset_types == ['asset_types_value'] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val + arg = args[0].asset_types + mock_val = ['asset_types_value'] + assert arg == mock_val @pytest.mark.asyncio @@ -2876,8 +2912,12 @@ def test_search_all_iam_policies_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].scope == 'scope_value' - assert args[0].query == 'query_value' + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val def test_search_all_iam_policies_flattened_error(): @@ -2920,8 +2960,12 @@ async def test_search_all_iam_policies_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].scope == 'scope_value' - assert args[0].query == 'query_value' + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].query + mock_val = 'query_value' + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 1ca68f9187cf..54bebbee759a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -549,9 +549,15 @@ def test_generate_access_token_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].delegates == ['delegates_value'] - assert args[0].scope == ['scope_value'] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].delegates + mock_val = ['delegates_value'] + assert arg == mock_val + arg = args[0].scope + mock_val = ['scope_value'] + assert arg == mock_val assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) @@ -599,9 +605,15 @@ async def test_generate_access_token_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].delegates == ['delegates_value'] - assert args[0].scope == ['scope_value'] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].delegates + mock_val = ['delegates_value'] + assert arg == mock_val + arg = args[0].scope + mock_val = ['scope_value'] + assert arg == mock_val assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) @@ -798,10 +810,18 @@ def test_generate_id_token_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].delegates == ['delegates_value'] - assert args[0].audience == 'audience_value' - assert args[0].include_email == True + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].delegates + mock_val = ['delegates_value'] + assert arg == mock_val + arg = args[0].audience + mock_val = 'audience_value' + assert arg == mock_val + arg = args[0].include_email + mock_val = True + assert arg == mock_val def test_generate_id_token_flattened_error(): @@ -848,10 +868,18 @@ async def test_generate_id_token_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].delegates == ['delegates_value'] - assert args[0].audience == 'audience_value' - assert args[0].include_email == True + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].delegates + mock_val = ['delegates_value'] + assert arg == mock_val + arg = args[0].audience + mock_val = 'audience_value' + assert arg == mock_val + arg = args[0].include_email + mock_val = True + assert arg == mock_val @pytest.mark.asyncio @@ -1050,9 +1078,15 @@ def test_sign_blob_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].delegates == ['delegates_value'] - assert args[0].payload == b'payload_blob' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].delegates + mock_val = ['delegates_value'] + assert arg == mock_val + arg = args[0].payload + mock_val = b'payload_blob' + assert arg == mock_val def test_sign_blob_flattened_error(): @@ -1097,9 +1131,15 @@ async def test_sign_blob_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].delegates == ['delegates_value'] - assert args[0].payload == b'payload_blob' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].delegates + mock_val = ['delegates_value'] + assert arg == mock_val + arg = args[0].payload + mock_val = b'payload_blob' + assert arg == mock_val @pytest.mark.asyncio @@ -1297,9 +1337,15 @@ def test_sign_jwt_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].delegates == ['delegates_value'] - assert args[0].payload == 'payload_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].delegates + mock_val = ['delegates_value'] + assert arg == mock_val + arg = args[0].payload + mock_val = 'payload_value' + assert arg == mock_val def test_sign_jwt_flattened_error(): @@ -1344,9 +1390,15 @@ async def test_sign_jwt_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].delegates == ['delegates_value'] - assert args[0].payload == 'payload_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].delegates + mock_val = ['delegates_value'] + assert arg == mock_val + arg = args[0].payload + mock_val = 'payload_value' + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 60e262121907..f8c963c4af4d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -547,7 +547,9 @@ def test_list_buckets_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_buckets_flattened_error(): @@ -588,7 +590,9 @@ async def test_list_buckets_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1755,7 +1759,9 @@ def test_list_views_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_views_flattened_error(): @@ -1796,7 +1802,9 @@ async def test_list_views_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -2794,7 +2802,9 @@ def test_list_sinks_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_sinks_flattened_error(): @@ -2835,7 +2845,9 @@ async def test_list_sinks_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -3240,7 +3252,9 @@ def test_get_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val def test_get_sink_flattened_error(): @@ -3281,7 +3295,9 @@ async def test_get_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val @pytest.mark.asyncio @@ -3501,8 +3517,12 @@ def test_create_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].sink == logging_config.LogSink(name='name_value') + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name='name_value') + assert arg == mock_val def test_create_sink_flattened_error(): @@ -3545,8 +3565,12 @@ async def test_create_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].sink == logging_config.LogSink(name='name_value') + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name='name_value') + assert arg == mock_val @pytest.mark.asyncio @@ -3768,9 +3792,15 @@ def test_update_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' - assert args[0].sink == logging_config.LogSink(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val def test_update_sink_flattened_error(): @@ -3815,9 +3845,15 @@ async def test_update_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' - assert args[0].sink == logging_config.LogSink(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val @pytest.mark.asyncio @@ -4003,7 +4039,9 @@ def test_delete_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val def test_delete_sink_flattened_error(): @@ -4044,7 +4082,9 @@ async def test_delete_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == 'sink_name_value' + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val @pytest.mark.asyncio @@ -4234,7 +4274,9 @@ def test_list_exclusions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_exclusions_flattened_error(): @@ -4275,7 +4317,9 @@ async def test_list_exclusions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -4663,7 +4707,9 @@ def test_get_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val def test_get_exclusion_flattened_error(): @@ -4704,7 +4750,9 @@ async def test_get_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val @pytest.mark.asyncio @@ -4907,8 +4955,12 @@ def test_create_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val def test_create_exclusion_flattened_error(): @@ -4951,8 +5003,12 @@ async def test_create_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].exclusion == logging_config.LogExclusion(name='name_value') + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val @pytest.mark.asyncio @@ -5157,9 +5213,15 @@ def test_update_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].exclusion == logging_config.LogExclusion(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val def test_update_exclusion_flattened_error(): @@ -5204,9 +5266,15 @@ async def test_update_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].exclusion == logging_config.LogExclusion(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val @pytest.mark.asyncio @@ -5392,7 +5460,9 @@ def test_delete_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val def test_delete_exclusion_flattened_error(): @@ -5433,7 +5503,9 @@ async def test_delete_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index ddf1b0c45db3..47cc2177ef4d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -547,7 +547,9 @@ def test_delete_log_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == 'log_name_value' + arg = args[0].log_name + mock_val = 'log_name_value' + assert arg == mock_val def test_delete_log_flattened_error(): @@ -588,7 +590,9 @@ async def test_delete_log_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == 'log_name_value' + arg = args[0].log_name + mock_val = 'log_name_value' + assert arg == mock_val @pytest.mark.asyncio @@ -714,10 +718,18 @@ def test_write_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == 'log_name_value' - assert args[0].resource == monitored_resource_pb2.MonitoredResource(type_='type__value') - assert args[0].labels == {'key_value': 'value_value'} - assert args[0].entries == [log_entry.LogEntry(log_name='log_name_value')] + arg = args[0].log_name + mock_val = 'log_name_value' + assert arg == mock_val + arg = args[0].resource + mock_val = monitored_resource_pb2.MonitoredResource(type_='type__value') + assert arg == mock_val + arg = args[0].labels + mock_val = {'key_value': 'value_value'} + assert arg == mock_val + arg = args[0].entries + mock_val = [log_entry.LogEntry(log_name='log_name_value')] + assert arg == mock_val def test_write_log_entries_flattened_error(): @@ -764,10 +776,18 @@ async def test_write_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == 'log_name_value' - assert args[0].resource == monitored_resource_pb2.MonitoredResource(type_='type__value') - assert args[0].labels == {'key_value': 'value_value'} - assert args[0].entries == [log_entry.LogEntry(log_name='log_name_value')] + arg = args[0].log_name + mock_val = 'log_name_value' + assert arg == mock_val + arg = args[0].resource + mock_val = monitored_resource_pb2.MonitoredResource(type_='type__value') + assert arg == mock_val + arg = args[0].labels + mock_val = {'key_value': 'value_value'} + assert arg == mock_val + arg = args[0].entries + mock_val = [log_entry.LogEntry(log_name='log_name_value')] + assert arg == mock_val @pytest.mark.asyncio @@ -899,9 +919,15 @@ def test_list_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ['resource_names_value'] - assert args[0].filter == 'filter_value' - assert args[0].order_by == 'order_by_value' + arg = args[0].resource_names + mock_val = ['resource_names_value'] + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + arg = args[0].order_by + mock_val = 'order_by_value' + assert arg == mock_val def test_list_log_entries_flattened_error(): @@ -946,9 +972,15 @@ async def test_list_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ['resource_names_value'] - assert args[0].filter == 'filter_value' - assert args[0].order_by == 'order_by_value' + arg = args[0].resource_names + mock_val = ['resource_names_value'] + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + arg = args[0].order_by + mock_val = 'order_by_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1594,7 +1626,9 @@ def test_list_logs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_logs_flattened_error(): @@ -1635,7 +1669,9 @@ async def test_list_logs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index fc8a79736621..7d8951e95ac5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -551,7 +551,9 @@ def test_list_log_metrics_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_log_metrics_flattened_error(): @@ -592,7 +594,9 @@ async def test_list_log_metrics_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -984,7 +988,9 @@ def test_get_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val def test_get_log_metric_flattened_error(): @@ -1025,7 +1031,9 @@ async def test_get_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1232,8 +1240,12 @@ def test_create_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].metric == logging_metrics.LogMetric(name='name_value') + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name='name_value') + assert arg == mock_val def test_create_log_metric_flattened_error(): @@ -1276,8 +1288,12 @@ async def test_create_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].metric == logging_metrics.LogMetric(name='name_value') + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name='name_value') + assert arg == mock_val @pytest.mark.asyncio @@ -1485,8 +1501,12 @@ def test_update_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' - assert args[0].metric == logging_metrics.LogMetric(name='name_value') + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name='name_value') + assert arg == mock_val def test_update_log_metric_flattened_error(): @@ -1529,8 +1549,12 @@ async def test_update_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' - assert args[0].metric == logging_metrics.LogMetric(name='name_value') + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name='name_value') + assert arg == mock_val @pytest.mark.asyncio @@ -1715,7 +1739,9 @@ def test_delete_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val def test_delete_log_metric_flattened_error(): @@ -1756,7 +1782,9 @@ async def test_delete_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == 'metric_name_value' + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 2d55a27a4ae0..160dcfc35a94 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -555,7 +555,9 @@ def test_list_instances_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val def test_list_instances_flattened_error(): @@ -596,7 +598,9 @@ async def test_list_instances_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1032,7 +1036,9 @@ def test_get_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val def test_get_instance_flattened_error(): @@ -1073,7 +1079,9 @@ async def test_get_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1261,9 +1269,15 @@ def test_create_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].instance_id == 'instance_id_value' - assert args[0].instance == cloud_redis.Instance(name='name_value') + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_id + mock_val = 'instance_id_value' + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_redis.Instance(name='name_value') + assert arg == mock_val def test_create_instance_flattened_error(): @@ -1310,9 +1324,15 @@ async def test_create_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].instance_id == 'instance_id_value' - assert args[0].instance == cloud_redis.Instance(name='name_value') + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_id + mock_val = 'instance_id_value' + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_redis.Instance(name='name_value') + assert arg == mock_val @pytest.mark.asyncio @@ -1501,8 +1521,12 @@ def test_update_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - assert args[0].instance == cloud_redis.Instance(name='name_value') + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_redis.Instance(name='name_value') + assert arg == mock_val def test_update_instance_flattened_error(): @@ -1547,8 +1571,12 @@ async def test_update_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - assert args[0].instance == cloud_redis.Instance(name='name_value') + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_redis.Instance(name='name_value') + assert arg == mock_val @pytest.mark.asyncio @@ -1736,8 +1764,12 @@ def test_upgrade_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].redis_version == 'redis_version_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].redis_version + mock_val = 'redis_version_value' + assert arg == mock_val def test_upgrade_instance_flattened_error(): @@ -1782,8 +1814,12 @@ async def test_upgrade_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].redis_version == 'redis_version_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].redis_version + mock_val = 'redis_version_value' + assert arg == mock_val @pytest.mark.asyncio @@ -1971,8 +2007,12 @@ def test_import_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].input_config == cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].input_config + mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + assert arg == mock_val def test_import_instance_flattened_error(): @@ -2017,8 +2057,12 @@ async def test_import_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].input_config == cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].input_config + mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) + assert arg == mock_val @pytest.mark.asyncio @@ -2206,8 +2250,12 @@ def test_export_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].output_config == cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].output_config + mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + assert arg == mock_val def test_export_instance_flattened_error(): @@ -2252,8 +2300,12 @@ async def test_export_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].output_config == cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].output_config + mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) + assert arg == mock_val @pytest.mark.asyncio @@ -2441,8 +2493,12 @@ def test_failover_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].data_protection_mode == cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].data_protection_mode + mock_val = cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + assert arg == mock_val def test_failover_instance_flattened_error(): @@ -2487,8 +2543,12 @@ async def test_failover_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].data_protection_mode == cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].data_protection_mode + mock_val = cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS + assert arg == mock_val @pytest.mark.asyncio @@ -2675,7 +2735,9 @@ def test_delete_instance_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val def test_delete_instance_flattened_error(): @@ -2718,7 +2780,9 @@ async def test_delete_instance_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val @pytest.mark.asyncio From 96d6ebecaa9e7b78f0e944bd593610bf5231f68a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 1 Nov 2021 19:00:15 +0000 Subject: [PATCH 0647/1339] chore: release 0.55.0 (#1059) :robot: I have created a release \*beep\* \*boop\* --- ## [0.55.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.54.0...v0.55.0) (2021-11-01) ### Features * add fragment tests ([#1056](https://www.github.com/googleapis/gapic-generator-python/issues/1056)) ([9d9b33d](https://www.github.com/googleapis/gapic-generator-python/commit/9d9b33dadf587a6d0b09031edeea597d6d2eae62)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c5e2c24cffb0..b2cd5b185723 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.55.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.54.0...v0.55.0) (2021-11-01) + + +### Features + +* add fragment tests ([#1056](https://www.github.com/googleapis/gapic-generator-python/issues/1056)) ([9d9b33d](https://www.github.com/googleapis/gapic-generator-python/commit/9d9b33dadf587a6d0b09031edeea597d6d2eae62)) + ## [0.54.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.4...v0.54.0) (2021-10-29) From 7d6fe5916c60793032eebd7e28450a9f5f9e5bc5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Nov 2021 00:34:19 +0100 Subject: [PATCH 0648/1339] chore(deps): update dependency protobuf to v3.19.1 (#1050) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.19.0` -> `==3.19.1` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.1/compatibility-slim/3.19.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.1/confidence-slim/3.19.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™» **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 96fbc33d6480..cc927fdc2fa9 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.1.1 googleapis-common-protos==1.53.0 jinja2==3.0.2 MarkupSafe==2.0.1 -protobuf==3.19.0 +protobuf==3.19.1 pypandoc==1.6.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From c4a7ca1dad1027b07457aeb5fd836bc95d2fc0f5 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 2 Nov 2021 16:11:51 -0400 Subject: [PATCH 0649/1339] fix: use (new) typing for 'gapic_v1.method.DEFAULT' (#1032) Relies on merge of https://github.com/googleapis/python-api-core/pull/292, but falls back to current (unsat) typing on older versions. Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .../%name/%version/%sub/services/%service/client.py.j2 | 5 ++++- .../%sub/services/%service/async_client.py.j2 | 5 ++++- .../%name_%version/%sub/services/%service/client.py.j2 | 5 ++++- .../%sub/services/%service/transports/rest.py.j2 | 6 +++++- .../cloud/asset_v1/services/asset_service/async_client.py | 5 ++++- .../google/cloud/asset_v1/services/asset_service/client.py | 5 ++++- .../credentials_v1/services/iam_credentials/async_client.py | 5 ++++- .../iam/credentials_v1/services/iam_credentials/client.py | 5 ++++- .../logging_v2/services/config_service_v2/async_client.py | 5 ++++- .../cloud/logging_v2/services/config_service_v2/client.py | 5 ++++- .../logging_v2/services/logging_service_v2/async_client.py | 5 ++++- .../cloud/logging_v2/services/logging_service_v2/client.py | 5 ++++- .../logging_v2/services/metrics_service_v2/async_client.py | 5 ++++- .../cloud/logging_v2/services/metrics_service_v2/client.py | 5 ++++- .../cloud/redis_v1/services/cloud_redis/async_client.py | 5 ++++- .../google/cloud/redis_v1/services/cloud_redis/client.py | 5 ++++- 16 files changed, 65 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 4f28d0ea2ef2..6d1d13705402 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -22,7 +22,10 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index bd955dabcb00..baeea6118b14 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -18,7 +18,10 @@ from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 1e0ceecd1e12..0c2d02515e16 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -22,7 +22,10 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index d85695b76f60..18da5c25d53b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -15,7 +15,11 @@ from requests import __version__ as requests_version from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] + {% extends '_base.py.j2' %} {% block content %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 7048813d2e9d..c441f9423689 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -26,7 +26,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 0d5b57dab3dc..e30290b69087 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -30,7 +30,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index ff6175ab8e8b..3cbf23faa3f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -26,7 +26,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index d278ac7c769a..2ba74e5a0477 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -30,7 +30,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 61556342cb90..5fd915a7e81a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -26,7 +26,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index a00d1f97d635..3ad109302470 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -30,7 +30,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index ec3fa2fe9271..3db85f68188e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -26,7 +26,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 1d404a9141aa..4a440b12e1c6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -30,7 +30,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 088d179d4e45..09f6c8701bba 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -26,7 +26,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index da766d98d6bc..5514f7158173 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -30,7 +30,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 205b6a01f362..f00c9123c831 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -26,7 +26,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 80cec0d8da59..3613db9d4193 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -30,7 +30,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: + OptionalRetry = Union[retries.Retry, object] from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore From e971e419297e0318bff1bca3ed014e45f0827a43 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 2 Nov 2021 14:16:15 -0600 Subject: [PATCH 0650/1339] test: add tests for autogen-snippets option (#1055) Follow up to #1044 --- .../tests/unit/generator/test_options.py | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index d5bd11f64e17..eaa12c971408 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -141,9 +141,7 @@ def test_options_service_config(fs): def test_options_bool_flags(): - # All these options are default False. - # If new options violate this assumption, - # this test may need to be tweaked. + # Most options are default False. # New options should follow the dash-case/snake_case convention. opt_str_to_attr_name = { name: re.sub(r"-", "_", name) @@ -161,3 +159,22 @@ def test_options_bool_flags(): options = Options.build(opt) assert getattr(options, attr) + + # Check autogen-snippets separately, as it is default True + options = Options.build("") + assert options.autogen_snippets + + options = Options.build("autogen-snippets=False") + assert not options.autogen_snippets + + +def test_options_autogen_snippets_false_for_old_naming(): + # NOTE: Snippets are not currently correct for the alternative (Ads) templates + # so always disable snippetgen in that case + # https://github.com/googleapis/gapic-generator-python/issues/1052 + options = Options.build("old-naming") + assert not options.autogen_snippets + + # Even if autogen-snippets is set to True, do not enable snippetgen + options = Options.build("old-naming,autogen-snippets=True") + assert not options.autogen_snippets From de07423606be4629ee8a4882553b16bee80c2ce1 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 3 Nov 2021 11:34:32 -0400 Subject: [PATCH 0651/1339] fix: suppress type error for fallback def of OptionalRetry (#1065) Introduced in #1032. --- .../%name/%version/%sub/services/%service/client.py.j2 | 4 ++-- .../%name_%version/%sub/services/%service/async_client.py.j2 | 4 ++-- .../%name_%version/%sub/services/%service/client.py.j2 | 4 ++-- .../%sub/services/%service/transports/rest.py.j2 | 4 ++-- .../cloud/asset_v1/services/asset_service/async_client.py | 4 ++-- .../google/cloud/asset_v1/services/asset_service/client.py | 4 ++-- .../credentials_v1/services/iam_credentials/async_client.py | 4 ++-- .../iam/credentials_v1/services/iam_credentials/client.py | 4 ++-- .../logging_v2/services/config_service_v2/async_client.py | 4 ++-- .../cloud/logging_v2/services/config_service_v2/client.py | 4 ++-- .../logging_v2/services/logging_service_v2/async_client.py | 4 ++-- .../cloud/logging_v2/services/logging_service_v2/client.py | 4 ++-- .../logging_v2/services/metrics_service_v2/async_client.py | 4 ++-- .../cloud/logging_v2/services/metrics_service_v2/client.py | 4 ++-- .../cloud/redis_v1/services/cloud_redis/async_client.py | 4 ++-- .../google/cloud/redis_v1/services/cloud_redis/client.py | 4 ++-- 16 files changed, 32 insertions(+), 32 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 6d1d13705402..28ee966a58f6 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -24,8 +24,8 @@ from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index baeea6118b14..a435441f4f7d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -20,8 +20,8 @@ from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 0c2d02515e16..0631fff9b33b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -24,8 +24,8 @@ from google.oauth2 import service_account # type: ignore try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 18da5c25d53b..7d678a8f6cc7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -17,8 +17,8 @@ import warnings try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore {% extends '_base.py.j2' %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index c441f9423689..2e2275165524 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -28,8 +28,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index e30290b69087..b22e356cf85f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -32,8 +32,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 3cbf23faa3f6..424195629bc1 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -28,8 +28,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 2ba74e5a0477..ef4a3c163e74 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -32,8 +32,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 5fd915a7e81a..95283d92cf1e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -28,8 +28,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 3ad109302470..c5b26371ea92 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -32,8 +32,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 3db85f68188e..fbadb6bcd1b8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -28,8 +28,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 4a440b12e1c6..17a54995ad5a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -32,8 +32,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 09f6c8701bba..cf838feaacd2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -28,8 +28,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 5514f7158173..3ea0d1e83584 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -32,8 +32,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index f00c9123c831..93bbdd2e4b2c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -28,8 +28,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 3613db9d4193..cde6cafebd1f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -32,8 +32,8 @@ try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: - OptionalRetry = Union[retries.Retry, object] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore From b8cdc30d919fcf1cd185a4884749ba2643f6fa77 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Wed, 3 Nov 2021 13:01:16 -0700 Subject: [PATCH 0652/1339] fix: fix missing http schema (http/https) for REST clients (#1063) * fix: fix missing http schema (http/https) for REST clients * update integration tests to match templates changes --- .../%sub/services/%service/transports/rest.py.j2 | 3 ++- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/logging/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 +- 6 files changed, 7 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 7d678a8f6cc7..dbc75366284d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -233,7 +233,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): headers = dict(metadata) headers['Content-Type'] = 'application/json' response=getattr(self._session, method)( - uri, + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params), diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 7351e4296854..22eef15fd532 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -34,7 +34,7 @@ setuptools.setup( 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', {% endif %} 'libcst >= 0.2.5', - 'proto-plus >= 1.19.4', + 'proto-plus >= 1.19.7', {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', {% endif %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 8994eddd9d97..82da50b971e2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -36,7 +36,7 @@ install_requires=( 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.19.4', + 'proto-plus >= 1.19.7', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), python_requires='>=3.6', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 2690e8742a38..1b831af2363b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -36,7 +36,7 @@ install_requires=( 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.19.4', + 'proto-plus >= 1.19.7', ), python_requires='>=3.6', classifiers=[ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index f3fc9586e5fe..5d505b2fa2c3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -36,7 +36,7 @@ install_requires=( 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.19.4', + 'proto-plus >= 1.19.7', ), python_requires='>=3.6', classifiers=[ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 1972f2de4095..46d59d5f348a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -36,7 +36,7 @@ install_requires=( 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', 'libcst >= 0.2.5', - 'proto-plus >= 1.19.4', + 'proto-plus >= 1.19.7', ), python_requires='>=3.6', classifiers=[ From aaf8c114c3441b4944b2fe2a933d41bcb436b898 Mon Sep 17 00:00:00 2001 From: Ken Bandes Date: Wed, 3 Nov 2021 17:46:31 -0400 Subject: [PATCH 0653/1339] fix: handle required fields properly in query_params (#1068) * fix: Handle required fields with default values in query params. * fix: add host portion of url to unit tests. Co-authored-by: Kenneth Bandes --- .../services/%service/transports/rest.py.j2 | 45 +++++++++++++++---- .../%name_%version/%sub/test_%service.py.j2 | 2 +- 2 files changed, 38 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index dbc75366284d..9269eead8df1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -188,14 +188,31 @@ class {{service.name}}RestTransport({{service.name}}Transport): """ http_options = [ - {%- for rule in method.http_options %}{ - 'method': '{{ rule.method }}', - 'uri': '{{ rule.uri }}', - {%- if rule.body %} - 'body': '{{ rule.body }}', - {%- endif %} - }, - {%- endfor %}] + {% for rule in method.http_options %} + { + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %} + }, + {% endfor %} + ] + + {% if method.input.required_fields %} + required_fields = [ + # (snake_case_name, camel_case_name) + {% for req_field in method.input.required_fields %} + {% if req_field.is_primitive %} + ( + "{{ req_field.name | snake_case }}", + "{{ req_field.name | camel_case }}" + ), + {% endif %}{# is primitive #} + {% endfor %}{# required fields #} + ] + + {% endif %} request_kwargs = {{method.input.ident}}.to_dict(request) transcoded_request = path_template.transcode( @@ -229,6 +246,18 @@ class {{service.name}}RestTransport({{service.name}}Transport): use_integers_for_enums=False )) + {% if method.input.required_fields %} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] + + {% endif %} + # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 865aabdd0c77..eb4877d22b2d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1293,7 +1293,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(transport: str = 'rest'): assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] {% with uri = method.http_options[0].uri %} - assert path_template.validate("{{ uri }}", args[1]) + assert path_template.validate("https://{{ service.host }}{{ uri }}", args[1]) {% endwith %} {# TODO(kbandes) - reverse-transcode request args to check all request fields #} From 5489e0415e1c686995269405fbfba3e29f28bbb8 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 4 Nov 2021 02:10:24 -0600 Subject: [PATCH 0654/1339] fix: leave a newline between field description and oneof line (#1071) --- .../%namespace/%name/%version/%sub/types/_message.py.j2 | 1 + .../%namespace/%name_%version/%sub/types/_message.py.j2 | 1 + .../asset/google/cloud/asset_v1/types/asset_service.py | 9 +++++++++ .../goldens/asset/google/cloud/asset_v1/types/assets.py | 5 +++++ .../logging/google/cloud/logging_v2/types/log_entry.py | 3 +++ .../google/cloud/logging_v2/types/logging_config.py | 1 + .../redis/google/cloud/redis_v1/types/cloud_redis.py | 2 ++ 7 files changed, 22 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index 32fb35672d92..71ab95457a68 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -19,6 +19,7 @@ class {{ message.name }}({{ p }}.Message): {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(indent=12, nl=False) }} {% if field.oneof %} + This field is a member of `oneof`_ ``{{ field.oneof }}``. {% endif %} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 11e6b129be22..67039ea79877 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -19,6 +19,7 @@ class {{ message.name }}({{ p }}.Message): {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(indent=12, nl=False) }} {% if field.oneof %} + This field is a member of `oneof`_ ``{{ field.oneof }}``. {% endif %} {% endfor %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 0d028e2bf7c3..106c76c23838 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -522,11 +522,13 @@ class OutputConfig(proto.Message): Attributes: gcs_destination (google.cloud.asset_v1.types.GcsDestination): Destination on Cloud Storage. + This field is a member of `oneof`_ ``destination``. bigquery_destination (google.cloud.asset_v1.types.BigQueryDestination): Destination on BigQuery. The output table stores the fields in asset proto as columns in BigQuery. + This field is a member of `oneof`_ ``destination``. """ @@ -552,6 +554,7 @@ class OutputResult(proto.Message): Attributes: gcs_result (google.cloud.asset_v1.types.GcsOutputResult): Export result on Cloud Storage. + This field is a member of `oneof`_ ``result``. """ @@ -600,6 +603,7 @@ class GcsDestination(proto.Message): there is no `hold `__, it will be overwritten with the exported result. + This field is a member of `oneof`_ ``object_uri``. uri_prefix (str): The uri prefix of all generated Cloud Storage objects. @@ -611,6 +615,7 @@ class GcsDestination(proto.Message): compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be returned if file with the same name "gs://bucket_name/object_name_prefix" already exists. + This field is a member of `oneof`_ ``object_uri``. """ @@ -777,6 +782,7 @@ class FeedOutputConfig(proto.Message): Attributes: pubsub_destination (google.cloud.asset_v1.types.PubsubDestination): Destination on Pub/Sub. + This field is a member of `oneof`_ ``destination``. """ @@ -1452,6 +1458,7 @@ class ConditionContext(proto.Message): conditions. Note that this value must not be earlier than the current time; otherwise, an INVALID_ARGUMENT error will be returned. + This field is a member of `oneof`_ ``TimeContext``. """ @@ -1617,9 +1624,11 @@ class IamPolicyAnalysisOutputConfig(proto.Message): Attributes: gcs_destination (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.GcsDestination): Destination on Cloud Storage. + This field is a member of `oneof`_ ``destination``. bigquery_destination (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.BigQueryDestination): Destination on BigQuery. + This field is a member of `oneof`_ ``destination``. """ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index 40a0faf93cef..230326d4f310 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -178,14 +178,17 @@ class Asset(proto.Message): access_policy (google.identity.accesscontextmanager.v1.access_policy_pb2.AccessPolicy): Please also refer to the `access policy user guide `__. + This field is a member of `oneof`_ ``access_context_policy``. access_level (google.identity.accesscontextmanager.v1.access_level_pb2.AccessLevel): Please also refer to the `access level user guide `__. + This field is a member of `oneof`_ ``access_context_policy``. service_perimeter (google.identity.accesscontextmanager.v1.service_perimeter_pb2.ServicePerimeter): Please also refer to the `service perimeter user guide `__. + This field is a member of `oneof`_ ``access_context_policy``. os_inventory (google.cloud.osconfig.v1.inventory_pb2.Inventory): A representation of runtime OS Inventory information. See @@ -902,9 +905,11 @@ class Access(proto.Message): Attributes: role (str): The role. + This field is a member of `oneof`_ ``oneof_access``. permission (str): The permission. + This field is a member of `oneof`_ ``oneof_access``. analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): The analysis state of this access. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 96cfd0f40361..c9fbf4ff058b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -91,14 +91,17 @@ class LogEntry(proto.Message): "type.googleapis.com/google.cloud.audit.AuditLog" "type.googleapis.com/google.appengine.logging.v1.RequestLog". + This field is a member of `oneof`_ ``payload``. text_payload (str): The log entry payload, represented as a Unicode string (UTF-8). + This field is a member of `oneof`_ ``payload``. json_payload (google.protobuf.struct_pb2.Struct): The log entry payload, represented as a structure that is expressed as a JSON object. + This field is a member of `oneof`_ ``payload``. timestamp (google.protobuf.timestamp_pb2.Timestamp): Optional. The time the event described by the log entry diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index fd04767f0253..ce8b103af238 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -282,6 +282,7 @@ class LogSink(proto.Message): bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. + This field is a member of `oneof`_ ``options``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 3448acb64935..32aff22d3fed 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -507,6 +507,7 @@ class InputConfig(proto.Message): gcs_source (google.cloud.redis_v1.types.GcsSource): Google Cloud Storage location where input content is located. + This field is a member of `oneof`_ ``source``. """ @@ -567,6 +568,7 @@ class OutputConfig(proto.Message): gcs_destination (google.cloud.redis_v1.types.GcsDestination): Google Cloud Storage destination for output content. + This field is a member of `oneof`_ ``destination``. """ From f61ecafc2b06a17592cf6be864bf4564852ce94a Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 4 Nov 2021 03:47:20 -0600 Subject: [PATCH 0655/1339] chore: make actools@ a CODEOWNER (#1069) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/.github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/.github/CODEOWNERS b/packages/gapic-generator/.github/CODEOWNERS index 5bc27edbcc57..002c1863abdd 100644 --- a/packages/gapic-generator/.github/CODEOWNERS +++ b/packages/gapic-generator/.github/CODEOWNERS @@ -4,5 +4,5 @@ # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -* @googleapis/actools-python @googleapis/yoshi-python +* @googleapis/actools @googleapis/actools-python @googleapis/yoshi-python *.yaml @googleapis/actools @googleapis/yoshi-python @googleapis/actools-python From 0ad6c7dec6d780eb9cd65bbd3e7b1dfb98cd7468 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 4 Nov 2021 13:27:27 -0400 Subject: [PATCH 0656/1339] fix: unignore 'google.api_core' imports (#1066) --- .../%name/%version/%sub/services/%service/client.py.j2 | 8 ++++---- .../%sub/services/%service/transports/base.py.j2 | 6 +++--- .../%sub/services/%service/transports/grpc.py.j2 | 6 +++--- packages/gapic-generator/gapic/schema/api.py | 2 +- packages/gapic-generator/gapic/schema/wrappers.py | 4 ++-- .../%sub/services/%service/async_client.py.j2 | 8 ++++---- .../%name_%version/%sub/services/%service/client.py.j2 | 8 ++++---- .../%sub/services/%service/transports/base.py.j2 | 10 +++++----- .../%sub/services/%service/transports/grpc.py.j2 | 6 +++--- .../services/%service/transports/grpc_asyncio.py.j2 | 6 +++--- .../%sub/services/%service/transports/rest.py.j2 | 10 +++++----- packages/gapic-generator/gapic/utils/uri_conv.py | 2 +- .../asset_v1/services/asset_service/async_client.py | 8 ++++---- .../cloud/asset_v1/services/asset_service/client.py | 8 ++++---- .../asset_v1/services/asset_service/transports/base.py | 10 +++++----- .../asset_v1/services/asset_service/transports/grpc.py | 6 +++--- .../services/asset_service/transports/grpc_asyncio.py | 6 +++--- .../services/iam_credentials/async_client.py | 8 ++++---- .../credentials_v1/services/iam_credentials/client.py | 8 ++++---- .../services/iam_credentials/transports/base.py | 8 ++++---- .../services/iam_credentials/transports/grpc.py | 4 ++-- .../iam_credentials/transports/grpc_asyncio.py | 4 ++-- .../services/config_service_v2/async_client.py | 8 ++++---- .../logging_v2/services/config_service_v2/client.py | 8 ++++---- .../services/config_service_v2/transports/base.py | 8 ++++---- .../services/config_service_v2/transports/grpc.py | 4 ++-- .../config_service_v2/transports/grpc_asyncio.py | 4 ++-- .../services/logging_service_v2/async_client.py | 8 ++++---- .../logging_v2/services/logging_service_v2/client.py | 8 ++++---- .../services/logging_service_v2/transports/base.py | 8 ++++---- .../services/logging_service_v2/transports/grpc.py | 4 ++-- .../logging_service_v2/transports/grpc_asyncio.py | 4 ++-- .../services/metrics_service_v2/async_client.py | 8 ++++---- .../logging_v2/services/metrics_service_v2/client.py | 8 ++++---- .../services/metrics_service_v2/transports/base.py | 8 ++++---- .../services/metrics_service_v2/transports/grpc.py | 4 ++-- .../metrics_service_v2/transports/grpc_asyncio.py | 4 ++-- .../redis_v1/services/cloud_redis/async_client.py | 8 ++++---- .../cloud/redis_v1/services/cloud_redis/client.py | 8 ++++---- .../redis_v1/services/cloud_redis/transports/base.py | 10 +++++----- .../redis_v1/services/cloud_redis/transports/grpc.py | 6 +++--- .../services/cloud_redis/transports/grpc_asyncio.py | 6 +++--- 42 files changed, 140 insertions(+), 140 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 28ee966a58f6..eb1a31dfa759 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -12,10 +12,10 @@ import pkg_resources import warnings {% endif %} -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 06e826572787..46ad9ec23381 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -7,10 +7,10 @@ import typing import pkg_resources import google.auth # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import gapic_v1 +from google.api_core import retry as retries {% if service.has_lro %} -from google.api_core import operations_v1 # type: ignore +from google.api_core import operations_v1 {% endif %} from google.auth import credentials as ga_credentials # type: ignore diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 59a2219ca9de..eaba5f48361e 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -5,11 +5,11 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import grpc_helpers # type: ignore +from google.api_core import grpc_helpers {% if service.has_lro %} -from google.api_core import operations_v1 # type: ignore +from google.api_core import operations_v1 {% endif %} -from google.api_core import gapic_v1 # type: ignore +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 0e632ed21783..d233073d1d7b 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -26,7 +26,7 @@ from typing import Callable, Container, Dict, FrozenSet, Mapping, Optional, Sequence, Set, Tuple from types import MappingProxyType -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions from google.api import resource_pb2 # type: ignore from google.gapic.metadata import gapic_metadata_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 2fbc5a2b20da..cbefe83c2ba7 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -39,8 +39,8 @@ from google.api import field_behavior_pb2 from google.api import http_pb2 from google.api import resource_pb2 -from google.api_core import exceptions # type: ignore -from google.api_core import path_template # type: ignore +from google.api_core import exceptions +from google.api_core import path_template from google.protobuf import descriptor_pb2 # type: ignore from google.protobuf.json_format import MessageToDict # type: ignore diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index a435441f4f7d..bbdffc3ba817 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -11,10 +11,10 @@ import pkg_resources import warnings {% endif %} -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 0631fff9b33b..b217a886c8fe 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -12,10 +12,10 @@ import pkg_resources import warnings {% endif %} -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 58e41e4abfd6..66e9e56f8d69 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -7,12 +7,12 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries {% if service.has_lro %} -from google.api_core import operations_v1 # type: ignore +from google.api_core import operations_v1 {% endif %} from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index dbf01bc68244..baa63b846e51 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -5,11 +5,11 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore +from google.api_core import grpc_helpers {% if service.has_lro %} -from google.api_core import operations_v1 # type: ignore +from google.api_core import operations_v1 {% endif %} -from google.api_core import gapic_v1 # type: ignore +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index ed243e04ffb8..446132d2c527 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -5,10 +5,10 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async {% if service.has_lro %} -from google.api_core import operations_v1 # type: ignore +from google.api_core import operations_v1 {% endif %} from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 9269eead8df1..fafc77d89a4f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -3,11 +3,11 @@ import json # type: ignore import grpc # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import rest_helpers # type: ignore -from google.api_core import path_template # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 {% if service.has_lro %} from google.api_core import operations_v1 {% endif %} diff --git a/packages/gapic-generator/gapic/utils/uri_conv.py b/packages/gapic-generator/gapic/utils/uri_conv.py index 988a8c299749..6b8ba7277111 100644 --- a/packages/gapic-generator/gapic/utils/uri_conv.py +++ b/packages/gapic-generator/gapic/utils/uri_conv.py @@ -13,7 +13,7 @@ # limitations under the License. from gapic.utils.reserved_names import RESERVED_NAMES -from google.api_core import path_template # type: ignore +from google.api_core import path_template def convert_uri_fieldnames(uri: str) -> str: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 2e2275165524..8491b558d31e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -19,10 +19,10 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index b22e356cf85f..6b063f5f9920 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -20,10 +20,10 @@ from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index b90728a618d2..b43800a72d4a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 0db532b0d348..71ed13ca3bc4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index f8d325057f5f..7d64bb4064d7 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 424195629bc1..18ebfa799bc6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -19,10 +19,10 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index ef4a3c163e74..2591dc7d815b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -20,10 +20,10 @@ from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index a57ba5e1e1cc..fd15a3f545bb 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 4c5159d800f0..c213c2fb3209 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 8277d243b257..8515d5fdb130 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 95283d92cf1e..7cddb80f4eb8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -19,10 +19,10 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index c5b26371ea92..7634e4572280 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -20,10 +20,10 @@ from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 45efc2431c51..291c6046af8a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 3a640eaf0bb5..4a0060ff65d7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index d52cbfc66d9b..448083462e9b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index fbadb6bcd1b8..dd48dfd162ae 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -19,10 +19,10 @@ from typing import Dict, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 17a54995ad5a..b345f6c3deb2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -20,10 +20,10 @@ from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index acef64a6cfbd..5989f2c7777c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 4ec732a2688e..1bd1284bd4c3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 7a700c9d2b11..71cf9f3cd7a9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index cf838feaacd2..a8aa79abde37 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -19,10 +19,10 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 3ea0d1e83584..730684c613e5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -20,10 +20,10 @@ from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 1f20a5d83848..c9f37d2793d8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 8aced9e6d345..1acb9b4bcb81 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 8f91b272c183..e370f6943a09 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 93bbdd2e4b2c..60cec2a7448a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -19,10 +19,10 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index cde6cafebd1f..0e6fd19f21ae 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -20,10 +20,10 @@ from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 4cd11451e3c3..5e1e95f3d76a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 74b0217f5023..eee6585cc6fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 95307155cedc..8ec56f68d6d1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore From cfb0e570afd834ae167573c48543bc7f57e332be Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 4 Nov 2021 14:28:09 -0700 Subject: [PATCH 0657/1339] chore: release 0.55.1 (#1062) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b2cd5b185723..8bf318bc1db9 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +### [0.55.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.55.0...v0.55.1) (2021-11-04) + + +### Bug Fixes + +* fix missing http schema (http/https) for REST clients ([#1063](https://www.github.com/googleapis/gapic-generator-python/issues/1063)) ([e3aa7a0](https://www.github.com/googleapis/gapic-generator-python/commit/e3aa7a0b23bc4bfd5170753f74bdeac219902d1a)) +* handle required fields properly in query_params ([#1068](https://www.github.com/googleapis/gapic-generator-python/issues/1068)) ([0e379ca](https://www.github.com/googleapis/gapic-generator-python/commit/0e379ca6c0aee9d79d11a14074b7e9343e9e6af2)) +* leave a newline between field description and oneof line ([#1071](https://www.github.com/googleapis/gapic-generator-python/issues/1071)) ([4d0e365](https://www.github.com/googleapis/gapic-generator-python/commit/4d0e36528a8eb23ea3893b0bbcca10b679867445)) +* suppress type error for fallback def of OptionalRetry ([#1065](https://www.github.com/googleapis/gapic-generator-python/issues/1065)) ([e47faa6](https://www.github.com/googleapis/gapic-generator-python/commit/e47faa6c59a1fadf7dfebc965c962aa05ca30f74)) +* unignore 'google.api_core' imports ([#1066](https://www.github.com/googleapis/gapic-generator-python/issues/1066)) ([13f764c](https://www.github.com/googleapis/gapic-generator-python/commit/13f764c6513b91e7143a4a4a0bcc661cd19be0d8)) +* use (new) typing for 'gapic_v1.method.DEFAULT' ([#1032](https://www.github.com/googleapis/gapic-generator-python/issues/1032)) ([d85dfad](https://www.github.com/googleapis/gapic-generator-python/commit/d85dfadc180e5f218ad582a306c1c441a6c668db)) + ## [0.55.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.54.0...v0.55.0) (2021-11-01) From ef7cda6960a1df2cadaa9a37363c84cdb417ef5c Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 5 Nov 2021 11:39:42 -0700 Subject: [PATCH 0658/1339] chore: add bazel caching to integration tests (#1058) The integration test CI takes ~8 minutes to run, most of which is installing bazel and setting up tool dependencies. It is a massive time optimization to cache this work and prevent its re-creation on subsequent builds. --- .../.github/workflows/tests.yaml | 34 +++++++++++-------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 33f2264da684..2690e36224f4 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -312,32 +312,36 @@ jobs: run: nox -s fragment-${{ matrix.python }} integration: runs-on: ubuntu-latest + container: gcr.io/gapic-images/googleapis-bazel:20210105 steps: - name: Cancel Previous Runs uses: styfle/cancel-workflow-action@0.7.0 with: access_token: ${{ github.token }} - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 + - name: Cache Bazel files + id: cache-bazel + uses: actions/cache@v2 with: - python-version: 3.8 - - name: Install system dependencies. + path: ~/.cache/bazel + # Note: if the container is updated, the key needs to be updated as well. + key: ${{ runner.os }}-bazel-20210105-${{ secrets.CACHE_VERSION }} + - name: Cache not found + if: steps.cache-bazel.outputs.cache-hit != 'true' run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc - - name: Install Bazel + echo "No cache found." + - name: Cache found + if: steps.cache-bazel.outputs.cache-hit == 'true' run: | - wget -q "https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/$BAZEL_BINARY" - wget -q "https://github.com/bazelbuild/bazel/releases/download/$BAZEL_VERSION/$BAZEL_BINARY.sha256" - sha256sum -c "$BAZEL_BINARY.sha256" - sudo dpkg -i "$BAZEL_BINARY" - env: - BAZEL_VERSION: 3.5.0 - BAZEL_BINARY: bazel_3.5.0-linux-x86_64.deb + echo -n "Cache found. Cache size: " + du -sh ~/.cache/bazel + echo "If the cache seems broken, update the CACHE_VERSION secret in" + echo "https://github.com/googleapis/gapic-generator-python/settings/secrets/actions" + echo "(use any random string, any GUID will work)" + echo "and it will start over with a clean cache." + echo "The old one will disappear after 7 days." - name: Integration Tests run: bazel test tests/integration:asset tests/integration:credentials tests/integration:logging tests/integration:redis - style-check: runs-on: ubuntu-latest steps: From 3e41673b756209ab27925d7b38d103d63598b3d2 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 5 Nov 2021 15:36:23 -0600 Subject: [PATCH 0659/1339] feat(snippetgen): turn resource path strings into f-strings (#1012) Fixes #965. Uses the already-existing resource path logic for autogen snippets. Previous: ```py # Initialize request argument(s) request = asset_v1.DeleteFeedRequest( name="projects/{project}/feeds/{feed}", ) ``` Now: ```py # Initialize request argument(s) project = "my-project-id" feed = "feed_value" name = f"projects/{project}/feeds/{feed}" request = asset_v1.DeleteFeedRequest( name=name, ) ``` Note: The logic may result in multiple variables with the same name (it doesn't happen for any of the golden APIs, but might for an API with more required fields). --- .../gapic/samplegen/samplegen.py | 80 +++++++++++++------ .../templates/examples/feature_fragments.j2 | 5 +- ..._service_batch_get_assets_history_async.py | 2 +- ...t_service_batch_get_assets_history_sync.py | 2 +- ...sset_v1_asset_service_delete_feed_async.py | 6 +- ...asset_v1_asset_service_delete_feed_sync.py | 6 +- ...et_v1_asset_service_export_assets_async.py | 2 +- ...set_v1_asset_service_export_assets_sync.py | 2 +- ...d_asset_v1_asset_service_get_feed_async.py | 6 +- ...ed_asset_v1_asset_service_get_feed_sync.py | 6 +- ...sset_v1_asset_service_list_assets_async.py | 2 +- ...asset_v1_asset_service_list_assets_sync.py | 2 +- ...credentials_generate_access_token_async.py | 6 +- ..._credentials_generate_access_token_sync.py | 6 +- ...iam_credentials_generate_id_token_async.py | 6 +- ..._iam_credentials_generate_id_token_sync.py | 6 +- ...ials_v1_iam_credentials_sign_blob_async.py | 6 +- ...tials_v1_iam_credentials_sign_blob_sync.py | 6 +- ...tials_v1_iam_credentials_sign_jwt_async.py | 6 +- ...ntials_v1_iam_credentials_sign_jwt_sync.py | 6 +- ...2_config_service_v2_create_bucket_async.py | 7 +- ...v2_config_service_v2_create_bucket_sync.py | 7 +- ...onfig_service_v2_create_exclusion_async.py | 6 +- ...config_service_v2_create_exclusion_sync.py | 6 +- ..._v2_config_service_v2_create_sink_async.py | 6 +- ...g_v2_config_service_v2_create_sink_sync.py | 6 +- ...2_config_service_v2_delete_bucket_async.py | 7 +- ...v2_config_service_v2_delete_bucket_sync.py | 7 +- ...onfig_service_v2_delete_exclusion_async.py | 6 +- ...config_service_v2_delete_exclusion_sync.py | 6 +- ..._v2_config_service_v2_delete_sink_async.py | 6 +- ...g_v2_config_service_v2_delete_sink_sync.py | 6 +- ..._v2_config_service_v2_delete_view_async.py | 8 +- ...g_v2_config_service_v2_delete_view_sync.py | 8 +- ...g_v2_config_service_v2_get_bucket_async.py | 7 +- ...ng_v2_config_service_v2_get_bucket_sync.py | 7 +- ...nfig_service_v2_get_cmek_settings_async.py | 5 +- ...onfig_service_v2_get_cmek_settings_sync.py | 5 +- ...2_config_service_v2_get_exclusion_async.py | 6 +- ...v2_config_service_v2_get_exclusion_sync.py | 6 +- ...ing_v2_config_service_v2_get_sink_async.py | 6 +- ...ging_v2_config_service_v2_get_sink_sync.py | 6 +- ...ing_v2_config_service_v2_get_view_async.py | 8 +- ...ging_v2_config_service_v2_get_view_sync.py | 8 +- ...v2_config_service_v2_list_buckets_async.py | 7 +- ..._v2_config_service_v2_list_buckets_sync.py | 7 +- ...config_service_v2_list_exclusions_async.py | 6 +- ..._config_service_v2_list_exclusions_sync.py | 6 +- ...g_v2_config_service_v2_list_sinks_async.py | 6 +- ...ng_v2_config_service_v2_list_sinks_sync.py | 6 +- ...config_service_v2_undelete_bucket_async.py | 7 +- ..._config_service_v2_undelete_bucket_sync.py | 7 +- ...2_config_service_v2_update_bucket_async.py | 7 +- ...v2_config_service_v2_update_bucket_sync.py | 7 +- ...onfig_service_v2_update_exclusion_async.py | 6 +- ...config_service_v2_update_exclusion_sync.py | 6 +- ..._v2_config_service_v2_update_sink_async.py | 6 +- ...g_v2_config_service_v2_update_sink_sync.py | 6 +- ..._v2_logging_service_v2_delete_log_async.py | 6 +- ...g_v2_logging_service_v2_delete_log_sync.py | 6 +- ...gging_service_v2_list_log_entries_async.py | 6 +- ...ogging_service_v2_list_log_entries_sync.py | 6 +- ...g_v2_logging_service_v2_list_logs_async.py | 6 +- ...ng_v2_logging_service_v2_list_logs_sync.py | 6 +- ...rics_service_v2_create_log_metric_async.py | 6 +- ...trics_service_v2_create_log_metric_sync.py | 6 +- ...rics_service_v2_delete_log_metric_async.py | 6 +- ...trics_service_v2_delete_log_metric_sync.py | 6 +- ...metrics_service_v2_get_log_metric_async.py | 6 +- ..._metrics_service_v2_get_log_metric_sync.py | 6 +- ...trics_service_v2_list_log_metrics_async.py | 5 +- ...etrics_service_v2_list_log_metrics_sync.py | 5 +- ...rics_service_v2_update_log_metric_async.py | 6 +- ...trics_service_v2_update_log_metric_sync.py | 6 +- ...is_v1_cloud_redis_create_instance_async.py | 6 +- ...dis_v1_cloud_redis_create_instance_sync.py | 6 +- ...is_v1_cloud_redis_delete_instance_async.py | 7 +- ...dis_v1_cloud_redis_delete_instance_sync.py | 7 +- ..._v1_cloud_redis_failover_instance_async.py | 7 +- ...s_v1_cloud_redis_failover_instance_sync.py | 7 +- ...redis_v1_cloud_redis_get_instance_async.py | 7 +- ..._redis_v1_cloud_redis_get_instance_sync.py | 7 +- ...dis_v1_cloud_redis_list_instances_async.py | 6 +- ...edis_v1_cloud_redis_list_instances_sync.py | 6 +- ...s_v1_cloud_redis_upgrade_instance_async.py | 7 +- ...is_v1_cloud_redis_upgrade_instance_sync.py | 7 +- ...llusca_v1_snippets_list_resources_async.py | 6 +- ...ollusca_v1_snippets_list_resources_sync.py | 6 +- .../tests/unit/samplegen/common_types.py | 4 + .../tests/unit/samplegen/test_samplegen.py | 37 +++++++-- .../tests/unit/samplegen/test_template.py | 5 +- 91 files changed, 529 insertions(+), 118 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 1538173ce965..0d372a365e7a 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -116,7 +116,7 @@ class TransformedRequest: def build( cls, request_type: wrappers.MessageType, - api_schema, + api_schema: api.API, base: str, attrs: List[AttributeRequestSetup], is_resource_request: bool, @@ -163,25 +163,22 @@ def build( # # It's a precondition that the base field is # a valid field of the request message type. - resource_typestr = ( - request_type.fields[base] - .options.Extensions[resource_pb2.resource_reference] - .type - ) + resource_reference = request_type.fields[base].options.Extensions[resource_pb2.resource_reference] + resource_typestr = resource_reference.type or resource_reference.child_type - resource_message_descriptor = next( - ( - msg.options.Extensions[resource_pb2.resource] - for msg in api_schema.messages.values() - if msg.options.Extensions[resource_pb2.resource].type - == resource_typestr - ), - None, - ) - if not resource_message_descriptor: + resource_message = None + for service in api_schema.services.values(): + resource_message = service.resource_messages_dict.get( + resource_typestr) + if resource_message is not None: + break + + if resource_message is None: raise types.NoSuchResource( - f"No message exists for resource: {resource_typestr}" + f"No message exists for resource: {resource_typestr}", ) + resource_message_descriptor = resource_message.options.Extensions[ + resource_pb2.resource] # The field is only ever empty for singleton attributes. attr_names: List[str] = [a.field for a in attrs] # type: ignore @@ -944,6 +941,37 @@ def parse_handwritten_specs(sample_configs: Sequence[str]) -> Generator[Dict[str yield spec +def _generate_resource_path_request_object(field_name: str, message: wrappers.MessageType) -> List[Dict[str, str]]: + """Given a message that represents a resource, generate request objects that + populate the resource path args. + + Args: + field_name (str): The name of the field. + message (wrappers.MessageType): The message the field belongs to. + + Returns: + List[Dict[str, str]]: A list of dicts that can be turned into TransformedRequests. + """ + request = [] + + # Look for specific field names to substitute more realistic values + special_values_dict = { + "project": '"my-project-id"', + "location": '"us-central1"' + } + + for resource_path_arg in message.resource_path_args: + value = special_values_dict.get( + resource_path_arg, f'"{resource_path_arg}_value"') + request.append({ + # See TransformedRequest.build() for how 'field' is parsed + "field": f"{field_name}%{resource_path_arg}", + "value": value, + }) + + return request + + def generate_request_object(api_schema: api.API, service: wrappers.Service, message: wrappers.MessageType, field_name_prefix: str = ""): """Generate dummy input for a given message. @@ -972,12 +1000,18 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess # TODO(busunkim): Properly handle map fields if field.is_primitive: - placeholder_value = field.mock_value_original_type - # If this field identifies a resource use the resource path - if service.resource_messages_dict.get(field.resource_reference): - placeholder_value = service.resource_messages_dict[ - field.resource_reference].resource_path - request.append({"field": field_name, "value": placeholder_value}) + resource_reference_message = service.resource_messages_dict.get( + field.resource_reference) + # Some resource patterns have no resource_path_args + # https://github.com/googleapis/gapic-generator-python/issues/701 + if resource_reference_message and resource_reference_message.resource_path_args: + request += _generate_resource_path_request_object( + field_name, + resource_reference_message + ) + else: + request.append( + {"field": field_name, "value": field.mock_value_original_type}) elif field.enum: # Choose the last enum value in the list since index 0 is often "unspecified" request.append( diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 7938a648b648..af2be2bde34a 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -160,9 +160,10 @@ client = {{ module_name }}.{{ client_name }}() {# This is a resource-name patterned lookup parameter #} {% with formals = [] %} {% for attr in parameter_block.body %} -{% do formals.append("%s=%s"|format(attr.field, attr.input_parameter or attr.value)) %} +{{ attr.field }} = {{ attr.input_parameter or attr.value }} {% endfor %} -{{ parameter_block.base }} = "{{parameter_block.pattern }}".format({{ formals|join(", ") }}) +{{ parameter_block.base }} = f"{{parameter_block.pattern }}" + {% endwith %} {% else %}{# End resource name construction #} {{ parameter_block.base }} = {{ module_name }}.{{ request_type.get_field(parameter_block.base).type.name }}() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py index dcbb0b0159c9..bdc83d37df6e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py @@ -35,7 +35,7 @@ async def sample_batch_get_assets_history(): # Initialize request argument(s) request = asset_v1.BatchGetAssetsHistoryRequest( - parent="*", + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py index 035d76dedcd1..5a4b1abbaa09 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py @@ -35,7 +35,7 @@ def sample_batch_get_assets_history(): # Initialize request argument(s) request = asset_v1.BatchGetAssetsHistoryRequest( - parent="*", + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py index 793a1d28acec..d4df7397d433 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py @@ -34,8 +34,12 @@ async def sample_delete_feed(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) + project = "my-project-id" + feed = "feed_value" + name = f"projects/{project}/feeds/{feed}" + request = asset_v1.DeleteFeedRequest( - name="projects/{project}/feeds/{feed}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py index f12a42bd1efc..a49c06314044 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py @@ -34,8 +34,12 @@ def sample_delete_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) + project = "my-project-id" + feed = "feed_value" + name = f"projects/{project}/feeds/{feed}" + request = asset_v1.DeleteFeedRequest( - name="projects/{project}/feeds/{feed}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py index 39b41d605eaa..4edfb7ae3fb2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py @@ -38,7 +38,7 @@ async def sample_export_assets(): output_config.gcs_destination.uri = "uri_value" request = asset_v1.ExportAssetsRequest( - parent="*", + parent="parent_value", output_config=output_config, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py index 204c9e7c5c0a..62fcbbfff715 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py @@ -38,7 +38,7 @@ def sample_export_assets(): output_config.gcs_destination.uri = "uri_value" request = asset_v1.ExportAssetsRequest( - parent="*", + parent="parent_value", output_config=output_config, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py index ed6e4c7f2e3d..bd91353f3ceb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py @@ -34,8 +34,12 @@ async def sample_get_feed(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) + project = "my-project-id" + feed = "feed_value" + name = f"projects/{project}/feeds/{feed}" + request = asset_v1.GetFeedRequest( - name="projects/{project}/feeds/{feed}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py index c50a77f3a869..493ff224ab48 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py @@ -34,8 +34,12 @@ def sample_get_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) + project = "my-project-id" + feed = "feed_value" + name = f"projects/{project}/feeds/{feed}" + request = asset_v1.GetFeedRequest( - name="projects/{project}/feeds/{feed}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py index 5e6b0a007cad..e21b4e35c3ca 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py @@ -35,7 +35,7 @@ async def sample_list_assets(): # Initialize request argument(s) request = asset_v1.ListAssetsRequest( - parent="*", + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py index a7fb179d5ffa..ef1dd925c0f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py @@ -35,7 +35,7 @@ def sample_list_assets(): # Initialize request argument(s) request = asset_v1.ListAssetsRequest( - parent="*", + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py index 067ec4e09d4e..55a66bc1257f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py @@ -34,8 +34,12 @@ async def sample_generate_access_token(): client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + request = credentials_v1.GenerateAccessTokenRequest( - name="projects/{project}/serviceAccounts/{service_account}", + name=name, scope=['scope_value_1', 'scope_value_2'], ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py index e2ec5d222d73..9487ea97253f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py @@ -34,8 +34,12 @@ def sample_generate_access_token(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + request = credentials_v1.GenerateAccessTokenRequest( - name="projects/{project}/serviceAccounts/{service_account}", + name=name, scope=['scope_value_1', 'scope_value_2'], ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py index 4494e93ef30c..03dee14637ae 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py @@ -34,8 +34,12 @@ async def sample_generate_id_token(): client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + request = credentials_v1.GenerateIdTokenRequest( - name="projects/{project}/serviceAccounts/{service_account}", + name=name, audience="audience_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py index b3bfb3193878..fd901f81fc70 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py @@ -34,8 +34,12 @@ def sample_generate_id_token(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + request = credentials_v1.GenerateIdTokenRequest( - name="projects/{project}/serviceAccounts/{service_account}", + name=name, audience="audience_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py index 5455fe959af5..5929291181b2 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py @@ -34,8 +34,12 @@ async def sample_sign_blob(): client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + request = credentials_v1.SignBlobRequest( - name="projects/{project}/serviceAccounts/{service_account}", + name=name, payload=b'payload_blob', ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py index ff60375d5635..d54b6a5e61dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py @@ -34,8 +34,12 @@ def sample_sign_blob(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + request = credentials_v1.SignBlobRequest( - name="projects/{project}/serviceAccounts/{service_account}", + name=name, payload=b'payload_blob', ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py index c75f5ac006a4..bb72d422692b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py @@ -34,8 +34,12 @@ async def sample_sign_jwt(): client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + request = credentials_v1.SignJwtRequest( - name="projects/{project}/serviceAccounts/{service_account}", + name=name, payload="payload_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py index 0d6e9322bca1..6c0508088528 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py @@ -34,8 +34,12 @@ def sample_sign_jwt(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + request = credentials_v1.SignJwtRequest( - name="projects/{project}/serviceAccounts/{service_account}", + name=name, payload="payload_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py index bfc49260eb84..69dbb78e0be6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py @@ -34,8 +34,13 @@ async def sample_create_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + parent = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.CreateBucketRequest( - parent="projects/{project}/locations/{location}/buckets/{bucket}", + parent=parent, bucket_id="bucket_id_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py index b0546c1a7142..5aab6d38777c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py @@ -34,8 +34,13 @@ def sample_create_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + parent = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.CreateBucketRequest( - parent="projects/{project}/locations/{location}/buckets/{bucket}", + parent=parent, bucket_id="bucket_id_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py index 3d473947f862..331d889862b6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py @@ -34,12 +34,16 @@ async def sample_create_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + parent = f"projects/{project}/exclusions/{exclusion}" + exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.CreateExclusionRequest( - parent="projects/{project}/exclusions/{exclusion}", + parent=parent, exclusion=exclusion, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py index c9fe4d30f6b9..340489a8515e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py @@ -34,12 +34,16 @@ def sample_create_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + parent = f"projects/{project}/exclusions/{exclusion}" + exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.CreateExclusionRequest( - parent="projects/{project}/exclusions/{exclusion}", + parent=parent, exclusion=exclusion, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py index 068d1fa0778a..72862fea5ff8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py @@ -34,12 +34,16 @@ async def sample_create_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + parent = f"projects/{project}/sinks/{sink}" + sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.CreateSinkRequest( - parent="projects/{project}/sinks/{sink}", + parent=parent, sink=sink, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py index 34591a8a87e8..8952205a4d8a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py @@ -34,12 +34,16 @@ def sample_create_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + parent = f"projects/{project}/sinks/{sink}" + sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.CreateSinkRequest( - parent="projects/{project}/sinks/{sink}", + parent=parent, sink=sink, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py index ed6a9173a48a..6f4783434a49 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py @@ -34,8 +34,13 @@ async def sample_delete_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.DeleteBucketRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py index bf107ae157bf..05caf82171e7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py @@ -34,8 +34,13 @@ def sample_delete_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.DeleteBucketRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py index 73cf51dd59da..9dc81ab344d7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py @@ -34,8 +34,12 @@ async def sample_delete_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + request = logging_v2.DeleteExclusionRequest( - name="projects/{project}/exclusions/{exclusion}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py index d1ddf4aeccad..06234bd4c5f3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py @@ -34,8 +34,12 @@ def sample_delete_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + request = logging_v2.DeleteExclusionRequest( - name="projects/{project}/exclusions/{exclusion}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py index fa0ea94ec535..9c16d136bd4c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py @@ -34,8 +34,12 @@ async def sample_delete_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + request = logging_v2.DeleteSinkRequest( - sink_name="projects/{project}/sinks/{sink}", + sink_name=sink_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py index c432aa6709b8..03967671a256 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py @@ -34,8 +34,12 @@ def sample_delete_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + request = logging_v2.DeleteSinkRequest( - sink_name="projects/{project}/sinks/{sink}", + sink_name=sink_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py index ec9ce8c4ad45..da713a45c19f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py @@ -34,8 +34,14 @@ async def sample_delete_view(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + view = "view_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + request = logging_v2.DeleteViewRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}/views/{view}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py index f866fb6b3ae1..6e228b20c491 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py @@ -34,8 +34,14 @@ def sample_delete_view(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + view = "view_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + request = logging_v2.DeleteViewRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}/views/{view}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py index bbc44365064d..450137ed5267 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py @@ -34,8 +34,13 @@ async def sample_get_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.GetBucketRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py index 933db8359cc0..2ca4765dc9e8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py @@ -34,8 +34,13 @@ def sample_get_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.GetBucketRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py index a12b7fffcc6f..46dbf2c23276 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py @@ -34,8 +34,11 @@ async def sample_get_cmek_settings(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + name = f"projects/{project}/cmekSettings" + request = logging_v2.GetCmekSettingsRequest( - name="projects/{project}/cmekSettings", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py index c252c6b12ffd..8aafd34f4995 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py @@ -34,8 +34,11 @@ def sample_get_cmek_settings(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + name = f"projects/{project}/cmekSettings" + request = logging_v2.GetCmekSettingsRequest( - name="projects/{project}/cmekSettings", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py index 7aab7cd6302a..d32f26250646 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py @@ -34,8 +34,12 @@ async def sample_get_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + request = logging_v2.GetExclusionRequest( - name="projects/{project}/exclusions/{exclusion}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py index 6fd1b2108f68..4e80012a4a33 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py @@ -34,8 +34,12 @@ def sample_get_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + request = logging_v2.GetExclusionRequest( - name="projects/{project}/exclusions/{exclusion}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py index d945db625473..b92d24eefab7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py @@ -34,8 +34,12 @@ async def sample_get_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + request = logging_v2.GetSinkRequest( - sink_name="projects/{project}/sinks/{sink}", + sink_name=sink_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py index 9c9172304d99..008e2455abf1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py @@ -34,8 +34,12 @@ def sample_get_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + request = logging_v2.GetSinkRequest( - sink_name="projects/{project}/sinks/{sink}", + sink_name=sink_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py index c68aa0551caa..8b76d7edbd52 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py @@ -34,8 +34,14 @@ async def sample_get_view(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + view = "view_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + request = logging_v2.GetViewRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}/views/{view}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py index 0de5e38b580c..0e21b0169983 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py @@ -34,8 +34,14 @@ def sample_get_view(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + view = "view_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + request = logging_v2.GetViewRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}/views/{view}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py index 698255b56043..a530c83b5c14 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py @@ -34,8 +34,13 @@ async def sample_list_buckets(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + parent = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.ListBucketsRequest( - parent="projects/{project}/locations/{location}/buckets/{bucket}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py index 62f78c0e2a05..c6d629794716 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py @@ -34,8 +34,13 @@ def sample_list_buckets(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + parent = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.ListBucketsRequest( - parent="projects/{project}/locations/{location}/buckets/{bucket}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py index 49c7788689ad..1a9db6155509 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py @@ -34,8 +34,12 @@ async def sample_list_exclusions(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + parent = f"projects/{project}/exclusions/{exclusion}" + request = logging_v2.ListExclusionsRequest( - parent="projects/{project}/exclusions/{exclusion}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py index 2a00c57c380a..19ccc14f56f1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py @@ -34,8 +34,12 @@ def sample_list_exclusions(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + parent = f"projects/{project}/exclusions/{exclusion}" + request = logging_v2.ListExclusionsRequest( - parent="projects/{project}/exclusions/{exclusion}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py index 7e40c4f3cbbd..739cb31262d9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py @@ -34,8 +34,12 @@ async def sample_list_sinks(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + parent = f"projects/{project}/sinks/{sink}" + request = logging_v2.ListSinksRequest( - parent="projects/{project}/sinks/{sink}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py index 99f7f0abb1ef..534f3e9f2741 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py @@ -34,8 +34,12 @@ def sample_list_sinks(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + parent = f"projects/{project}/sinks/{sink}" + request = logging_v2.ListSinksRequest( - parent="projects/{project}/sinks/{sink}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py index badbebdd6a9a..5fa49940a1a4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py @@ -34,8 +34,13 @@ async def sample_undelete_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.UndeleteBucketRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py index 783b187d4741..c2804473a5bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py @@ -34,8 +34,13 @@ def sample_undelete_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.UndeleteBucketRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py index a859113d5ad1..bd49a32b7f2e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py @@ -34,8 +34,13 @@ async def sample_update_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.UpdateBucketRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py index 2614b76d9576..de20ddf1e166 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py @@ -34,8 +34,13 @@ def sample_update_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + request = logging_v2.UpdateBucketRequest( - name="projects/{project}/locations/{location}/buckets/{bucket}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py index bde70749f1e9..60d503ac15a8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py @@ -34,12 +34,16 @@ async def sample_update_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.UpdateExclusionRequest( - name="projects/{project}/exclusions/{exclusion}", + name=name, exclusion=exclusion, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py index b974978fb806..48726f9222fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py @@ -34,12 +34,16 @@ def sample_update_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.UpdateExclusionRequest( - name="projects/{project}/exclusions/{exclusion}", + name=name, exclusion=exclusion, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py index 1973b924cd28..096a88890b86 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py @@ -34,12 +34,16 @@ async def sample_update_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.UpdateSinkRequest( - sink_name="projects/{project}/sinks/{sink}", + sink_name=sink_name, sink=sink, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py index 9cc57a79f610..35977595848b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py @@ -34,12 +34,16 @@ def sample_update_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.UpdateSinkRequest( - sink_name="projects/{project}/sinks/{sink}", + sink_name=sink_name, sink=sink, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py index 70b50994f543..1c9bc101aaf8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py @@ -34,8 +34,12 @@ async def sample_delete_log(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + log_name = f"projects/{project}/logs/{log}" + request = logging_v2.DeleteLogRequest( - log_name="projects/{project}/logs/{log}", + log_name=log_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py index 357ee19596d0..0b329d55a622 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py @@ -34,8 +34,12 @@ def sample_delete_log(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + log_name = f"projects/{project}/logs/{log}" + request = logging_v2.DeleteLogRequest( - log_name="projects/{project}/logs/{log}", + log_name=log_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py index 482bf96209e5..ac069139a6c8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py @@ -34,8 +34,12 @@ async def sample_list_log_entries(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + resource_names = f"projects/{project}/logs/{log}" + request = logging_v2.ListLogEntriesRequest( - resource_names="projects/{project}/logs/{log}", + resource_names=resource_names, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py index b7d3b3ca02e7..227e887f0e28 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py @@ -34,8 +34,12 @@ def sample_list_log_entries(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + resource_names = f"projects/{project}/logs/{log}" + request = logging_v2.ListLogEntriesRequest( - resource_names="projects/{project}/logs/{log}", + resource_names=resource_names, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py index 0d943f89d909..2fe01050d299 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py @@ -34,8 +34,12 @@ async def sample_list_logs(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + parent = f"projects/{project}/logs/{log}" + request = logging_v2.ListLogsRequest( - parent="projects/{project}/logs/{log}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py index 218acc033bc2..5c619ad49dea 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py @@ -34,8 +34,12 @@ def sample_list_logs(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + parent = f"projects/{project}/logs/{log}" + request = logging_v2.ListLogsRequest( - parent="projects/{project}/logs/{log}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py index 091013bb320c..de5a085ddaf2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py @@ -34,12 +34,16 @@ async def sample_create_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + parent = f"projects/{project}/metrics/{metric}" + metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.CreateLogMetricRequest( - parent="projects/{project}/metrics/{metric}", + parent=parent, metric=metric, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py index 1d63becf0ad2..e3bd08822c86 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py @@ -34,12 +34,16 @@ def sample_create_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + parent = f"projects/{project}/metrics/{metric}" + metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.CreateLogMetricRequest( - parent="projects/{project}/metrics/{metric}", + parent=parent, metric=metric, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py index d0cc9d1c89b5..5ed756255167 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py @@ -34,8 +34,12 @@ async def sample_delete_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + request = logging_v2.DeleteLogMetricRequest( - metric_name="projects/{project}/metrics/{metric}", + metric_name=metric_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py index e82eff3ea790..012322113c12 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py @@ -34,8 +34,12 @@ def sample_delete_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + request = logging_v2.DeleteLogMetricRequest( - metric_name="projects/{project}/metrics/{metric}", + metric_name=metric_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py index 9d225f360f7e..cc0f0c553608 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py @@ -34,8 +34,12 @@ async def sample_get_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + request = logging_v2.GetLogMetricRequest( - metric_name="projects/{project}/metrics/{metric}", + metric_name=metric_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py index bb272a2d4164..fb8499456f29 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py @@ -34,8 +34,12 @@ def sample_get_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + request = logging_v2.GetLogMetricRequest( - metric_name="projects/{project}/metrics/{metric}", + metric_name=metric_name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py index 240b775d36e4..c640c5972412 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py @@ -34,8 +34,11 @@ async def sample_list_log_metrics(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + parent = f"projects/{project}" + request = logging_v2.ListLogMetricsRequest( - parent="projects/{project}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py index 9944c9443a60..734d4c869b8d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py @@ -34,8 +34,11 @@ def sample_list_log_metrics(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + parent = f"projects/{project}" + request = logging_v2.ListLogMetricsRequest( - parent="projects/{project}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py index 8bae70082b3f..05fb745bb711 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py @@ -34,12 +34,16 @@ async def sample_update_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.UpdateLogMetricRequest( - metric_name="projects/{project}/metrics/{metric}", + metric_name=metric_name, metric=metric, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py index 1f8631f3755c..6e37f22e5979 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py @@ -34,12 +34,16 @@ def sample_update_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.UpdateLogMetricRequest( - metric_name="projects/{project}/metrics/{metric}", + metric_name=metric_name, metric=metric, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py index de3eddebdfa1..46b59320bfb5 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py @@ -34,13 +34,17 @@ async def sample_create_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + parent = f"projects/{project}/locations/{location}" + instance = redis_v1.Instance() instance.name = "name_value" instance.tier = "STANDARD_HA" instance.memory_size_gb = 1499 request = redis_v1.CreateInstanceRequest( - parent="projects/{project}/locations/{location}", + parent=parent, instance_id="instance_id_value", instance=instance, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py index d2e83c8ed021..c0b84c7c52c7 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py @@ -34,13 +34,17 @@ def sample_create_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + parent = f"projects/{project}/locations/{location}" + instance = redis_v1.Instance() instance.name = "name_value" instance.tier = "STANDARD_HA" instance.memory_size_gb = 1499 request = redis_v1.CreateInstanceRequest( - parent="projects/{project}/locations/{location}", + parent=parent, instance_id="instance_id_value", instance=instance, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py index 119b2e34ac00..b63f325b0979 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py @@ -34,8 +34,13 @@ async def sample_delete_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + request = redis_v1.DeleteInstanceRequest( - name="projects/{project}/locations/{location}/instances/{instance}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py index 0d868c91cd5a..5a081bbc7f6c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py @@ -34,8 +34,13 @@ def sample_delete_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + request = redis_v1.DeleteInstanceRequest( - name="projects/{project}/locations/{location}/instances/{instance}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py index 85263ab3ada9..c89d149c4c83 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py @@ -34,8 +34,13 @@ async def sample_failover_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + request = redis_v1.FailoverInstanceRequest( - name="projects/{project}/locations/{location}/instances/{instance}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py index 8dafe96046be..22295a3f6e2c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py @@ -34,8 +34,13 @@ def sample_failover_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + request = redis_v1.FailoverInstanceRequest( - name="projects/{project}/locations/{location}/instances/{instance}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py index ed9d84195f5b..4a024f308ea8 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py @@ -34,8 +34,13 @@ async def sample_get_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + request = redis_v1.GetInstanceRequest( - name="projects/{project}/locations/{location}/instances/{instance}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py index 45fbec17c9ad..5ea1e2f7fcf6 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py @@ -34,8 +34,13 @@ def sample_get_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + request = redis_v1.GetInstanceRequest( - name="projects/{project}/locations/{location}/instances/{instance}", + name=name, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py index 93268aa42c2d..60ae031bf21a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py @@ -34,8 +34,12 @@ async def sample_list_instances(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + parent = f"projects/{project}/locations/{location}" + request = redis_v1.ListInstancesRequest( - parent="projects/{project}/locations/{location}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py index 196f5ebcbf8a..f428f6a9e121 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py @@ -34,8 +34,12 @@ def sample_list_instances(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + parent = f"projects/{project}/locations/{location}" + request = redis_v1.ListInstancesRequest( - parent="projects/{project}/locations/{location}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py index dee9b3f826f6..c00e66a57948 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py @@ -34,8 +34,13 @@ async def sample_upgrade_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + request = redis_v1.UpgradeInstanceRequest( - name="projects/{project}/locations/{location}/instances/{instance}", + name=name, redis_version="redis_version_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py index b14f29675cee..77052b6d5586 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py @@ -34,8 +34,13 @@ def sample_upgrade_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + request = redis_v1.UpgradeInstanceRequest( - name="projects/{project}/locations/{location}/instances/{instance}", + name=name, redis_version="redis_version_value", ) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py index 492aa22271ef..768866029872 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py @@ -34,8 +34,12 @@ async def sample_list_resources(): client = mollusca_v1.SnippetsAsyncClient() # Initialize request argument(s) + item_id = "item_id_value" + part_id = "part_id_value" + parent = f"items/{item_id}/parts/{part_id}" + request = mollusca_v1.ListResourcesRequest( - parent="items/{item_id}/parts/{part_id}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py index 817f86924739..f1aea4939062 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py @@ -34,8 +34,12 @@ def sample_list_resources(): client = mollusca_v1.SnippetsClient() # Initialize request argument(s) + item_id = "item_id_value" + part_id = "part_id_value" + parent = f"items/{item_id}/parts/{part_id}" + request = mollusca_v1.ListResourcesRequest( - parent="items/{item_id}/parts/{part_id}", + parent=parent, ) # Make the request diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index c0e3fc83e467..538b9d68645e 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -85,6 +85,10 @@ def oneof_fields(self): def required_fields(self): return [field for field in self.fields.values() if field.required] + @property + def resource_path_args(self): + return wrappers.MessageType.PATH_ARG_RE.findall(self.resource_path or '') + DummyService = namedtuple("DummyService", [ "methods", "client_name", "async_client_name", "resource_messages_dict"]) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index b44f34c13e19..fde9969ef846 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -193,8 +193,8 @@ def test_preprocess_sample_resource_message_field(): # assert mock request is created assert sample["request"] == [ { - "field": "parent", - "value": "projects/{project}" + "field": "parent%project", + "value": '"my-project-id"' } ] @@ -1861,7 +1861,16 @@ def test_validate_request_resource_name(): request_descriptor, phylum_descriptor, ]) - } + }, + services={ + "Mollusc": DummyService( + methods={}, + client_name="MolluscClient", + resource_messages_dict={ + resource_type: phylum_descriptor + } + ) + }, ) v = samplegen.Validator(method=method, api_schema=api_schema) @@ -1981,7 +1990,7 @@ def test_validate_request_no_such_attr(dummy_api_schema): v.validate_and_transform_request(types.CallingForm.Request, request) -def test_validate_request_no_such_resource(dummy_api_schema): +def test_validate_request_no_such_resource(): request = [ {"field": "taxon%kingdom", "value": "animalia"} ] @@ -1993,7 +2002,14 @@ def test_validate_request_no_such_resource(dummy_api_schema): method = DummyMethod(input=request_descriptor) api_schema = DummyApiSchema( - messages={k: v for k, v in enumerate([request_descriptor])} + messages={k: v for k, v in enumerate([request_descriptor])}, + services={ + "Mollusc": DummyService( + methods={}, + client_name="MolluscClient", + resource_messages_dict={} + ) + }, ) v = samplegen.Validator(method=method, api_schema=api_schema) @@ -2028,7 +2044,16 @@ def test_validate_request_no_such_pattern(): request_descriptor, phylum_descriptor, ]) - } + }, + services={ + "Mollusc": DummyService( + methods={}, + client_name="MolluscClient", + resource_messages_dict={ + resource_type: phylum_descriptor + } + ) + }, ) v = samplegen.Validator(method=method, api_schema=api_schema) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 3d1b3cff26e4..4bf00b15a33a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -301,7 +301,10 @@ def test_render_request_resource_name(): ''', ''' # Initialize request argument(s) - taxon = "kingdom/{kingdom}/phylum/{phylum}".format(kingdom="animalia", phylum=mollusca) + kingdom = "animalia" + phylum = mollusca + taxon = f"kingdom/{kingdom}/phylum/{phylum}" + ''', request=samplegen.FullRequest( request_list=[ From 4680a9766e861bfbbb5feb17477ca1370c29488f Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 5 Nov 2021 16:04:38 -0700 Subject: [PATCH 0660/1339] fix: fix rest unit test (#1074) In some cases `hostname:port` format was expected. --- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index eb4877d22b2d..541ff0312464 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1293,7 +1293,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(transport: str = 'rest'): assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] {% with uri = method.http_options[0].uri %} - assert path_template.validate("https://{{ service.host }}{{ uri }}", args[1]) + assert path_template.validate("https://%s{{ uri }}" % client.transport._host, args[1]) {% endwith %} {# TODO(kbandes) - reverse-transcode request args to check all request fields #} From 170118169e55d052015ad02da1103248aa5df7a4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 5 Nov 2021 16:10:08 -0700 Subject: [PATCH 0661/1339] chore: release 0.56.0 (#1073) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8bf318bc1db9..39405ab79354 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## [0.56.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.55.1...v0.56.0) (2021-11-05) + + +### Features + +* **snippetgen:** turn resource path strings into f-strings ([#1012](https://www.github.com/googleapis/gapic-generator-python/issues/1012)) ([a110e1d](https://www.github.com/googleapis/gapic-generator-python/commit/a110e1d8387ea37b85ab0621bacd30da175fe85b)) + + +### Bug Fixes + +* fix rest unit test ([#1074](https://www.github.com/googleapis/gapic-generator-python/issues/1074)) ([3b2918e](https://www.github.com/googleapis/gapic-generator-python/commit/3b2918ecaeb90229f22834438dc31755498ee2d0)) + ### [0.55.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.55.0...v0.55.1) (2021-11-04) From 0868a8e401177168be1c026870f42ce8af16b7db Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 8 Nov 2021 11:50:17 -0500 Subject: [PATCH 0662/1339] chore: drop use of deprecated 'distutils.util.strtobool' (#1022) Fixes #1007 --- .../%name/%version/%sub/services/%service/client.py.j2 | 5 +++-- .../%name_%version/%sub/services/%service/client.py.j2 | 5 +++-- .../google/cloud/asset_v1/services/asset_service/client.py | 5 +++-- .../iam/credentials_v1/services/iam_credentials/client.py | 5 +++-- .../cloud/logging_v2/services/config_service_v2/client.py | 5 +++-- .../cloud/logging_v2/services/logging_service_v2/client.py | 5 +++-- .../cloud/logging_v2/services/metrics_service_v2/client.py | 5 +++-- .../google/cloud/redis_v1/services/cloud_redis/client.py | 5 +++-- packages/gapic-generator/tests/system/conftest.py | 3 +-- .../tests/system/test_client_context_manager.py | 3 +-- packages/gapic-generator/tests/system/test_grpc_lro.py | 3 +-- packages/gapic-generator/tests/system/test_grpc_streams.py | 3 +-- packages/gapic-generator/tests/system/test_grpc_unary.py | 3 +-- packages/gapic-generator/tests/system/test_pagination.py | 3 +-- packages/gapic-generator/tests/system/test_resource_crud.py | 3 +-- packages/gapic-generator/tests/system/test_retry.py | 3 +-- 16 files changed, 32 insertions(+), 32 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index eb1a31dfa759..c38929917b59 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -3,7 +3,6 @@ {% block content %} from collections import OrderedDict -from distutils import util import os import re from typing import Callable, Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union @@ -242,7 +241,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ssl_credentials = None is_mtls = False diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index b217a886c8fe..917fbd0fe6d5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -3,7 +3,6 @@ {% block content %} from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union @@ -250,7 +249,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" client_cert_source_func = None is_mtls = False diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 6b063f5f9920..c9b71375271e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union @@ -290,7 +289,9 @@ def __init__(self, *, client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" client_cert_source_func = None is_mtls = False diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 2591dc7d815b..1b16bae261bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union @@ -286,7 +285,9 @@ def __init__(self, *, client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" client_cert_source_func = None is_mtls = False diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 7634e4572280..55583554d9ce 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union @@ -321,7 +320,9 @@ def __init__(self, *, client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" client_cert_source_func = None is_mtls = False diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index b345f6c3deb2..8dd1404e33b8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union @@ -277,7 +276,9 @@ def __init__(self, *, client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" client_cert_source_func = None is_mtls = False diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 730684c613e5..683dec1c9015 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union @@ -278,7 +277,9 @@ def __init__(self, *, client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" client_cert_source_func = None is_mtls = False diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 0e6fd19f21ae..79b2746ced04 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union @@ -301,7 +300,9 @@ def __init__(self, *, client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" client_cert_source_func = None is_mtls = False diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index e7f535c74b35..109141e9a605 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -13,7 +13,6 @@ # limitations under the License. import collections -import distutils import grpc import mock import os @@ -25,7 +24,7 @@ from google.showcase import IdentityClient from google.showcase import MessagingClient -if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": from grpc.experimental import aio import asyncio from google.showcase import EchoAsyncClient diff --git a/packages/gapic-generator/tests/system/test_client_context_manager.py b/packages/gapic-generator/tests/system/test_client_context_manager.py index 88ced947ed4e..83e1c4353649 100644 --- a/packages/gapic-generator/tests/system/test_client_context_manager.py +++ b/packages/gapic-generator/tests/system/test_client_context_manager.py @@ -15,7 +15,6 @@ import os import pytest import grpc -import distutils def test_client(echo): @@ -34,7 +33,7 @@ def test_client_destroyed(echo): }) -if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @pytest.mark.asyncio async def test_client_async(async_echo): diff --git a/packages/gapic-generator/tests/system/test_grpc_lro.py b/packages/gapic-generator/tests/system/test_grpc_lro.py index 6dbd3af3960f..0b159660744d 100644 --- a/packages/gapic-generator/tests/system/test_grpc_lro.py +++ b/packages/gapic-generator/tests/system/test_grpc_lro.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import distutils import os import pytest from datetime import datetime, timedelta, timezone @@ -32,7 +31,7 @@ def test_lro(echo): assert response.content.endswith('the snails...eventually.') -if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @pytest.mark.asyncio async def test_lro_async(async_echo): diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_grpc_streams.py index f9da07948c10..3187ca49f539 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_streams.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import distutils import logging import os import pytest @@ -78,7 +77,7 @@ def test_stream_stream_passing_dict(echo): assert responses.trailing_metadata() == metadata -if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": import asyncio @pytest.mark.asyncio diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_grpc_unary.py index f8ca2f65276b..68ed43b01620 100644 --- a/packages/gapic-generator/tests/system/test_grpc_unary.py +++ b/packages/gapic-generator/tests/system/test_grpc_unary.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import distutils import os import pytest @@ -49,7 +48,7 @@ def test_unary_error(echo): assert exc.value.message == message -if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": import asyncio @pytest.mark.asyncio diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py index eb195ea12896..fbf1a243deab 100644 --- a/packages/gapic-generator/tests/system/test_pagination.py +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import distutils import os import pytest from google import showcase @@ -49,7 +48,7 @@ def test_pagination_pages(echo): for i in text.split(' ')] -if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @pytest.mark.asyncio async def test_pagination_async(async_echo): text = 'The hail in Wales falls mainly on the snails.' diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index 8af963e86a72..b3e704d60d4c 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import distutils import os import pytest @@ -78,7 +77,7 @@ def test_path_parsing(messaging): assert expected == actual -if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @pytest.mark.asyncio async def test_crud_with_request_async(async_identity): diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 97e0c60beb10..7e67c298c677 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import distutils import os import pytest @@ -30,7 +29,7 @@ def test_retry_bubble(echo): }) -if distutils.util.strtobool(os.environ.get("GAPIC_PYTHON_ASYNC", "true")): +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @pytest.mark.asyncio async def test_retry_bubble_async(async_echo): From bede4e346c1c7f86f2677fe8f91a7378ac543b6e Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 8 Nov 2021 10:57:05 -0700 Subject: [PATCH 0663/1339] fix(snippetgen): fix client streaming samples (#1061) Fixes #1014 and unblocks #1043. NOTE: Some real world APIs expect the first request to pass a config (example) so the generated samples will not work out of the box. This will be addressed when the new sample config language is sorted out. --- .../.github/workflows/tests.yaml | 2 +- packages/gapic-generator/DEVELOPMENT.md | 4 +- .../gapic/samplegen/samplegen.py | 14 +---- .../templates/examples/feature_fragments.j2 | 19 ++++++- .../gapic/templates/examples/sample.py.j2 | 2 +- ...gging_service_v2_tail_log_entries_async.py | 11 +++- ...ogging_service_v2_tail_log_entries_sync.py | 11 +++- ...v1_snippets_method_bidi_streaming_async.py | 11 +++- ..._v1_snippets_method_bidi_streaming_sync.py | 11 +++- .../tests/unit/samplegen/test_samplegen.py | 51 ------------------- .../tests/unit/samplegen/test_template.py | 14 ++--- 11 files changed, 70 insertions(+), 80 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 2690e36224f4..b8f18282237f 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -359,4 +359,4 @@ jobs: python -m pip install autopep8 - name: Check diff run: | - find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --diff --exit-code + find gapic tests -name "*.py" -not -path 'tests/**/goldens/*' | xargs autopep8 --diff --exit-code diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index 57d048b62df7..2079f744bed3 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -26,13 +26,13 @@ Execute unit tests by running one of the sessions prefixed with `unit-`. - Lint sources by running `autopep8`. The specific command is the following. ``` - find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --diff --exit-code + find gapic tests -name "*.py" -not -path 'tests/**/goldens/*' | xargs autopep8 --diff --exit-code ``` - Format sources in place: ``` - find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --in-place + find gapic tests -name "*.py" -not -path 'tests/**/goldens/*' | xargs autopep8 --in-place ``` ## Integration Tests diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 0d372a365e7a..2a7e68a1f5be 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -466,9 +466,7 @@ def validate_and_transform_request( Raises: InvalidRequestSetup: If a dict in the request lacks a "field" key, - a "value" key, if there is an unexpected keyword, - or if more than one base parameter is given for - a client-side streaming calling form. + a "value" key or if there is an unexpected keyword. BadAttributeLookup: If a request field refers to a non-existent field in the request message type. ResourceRequestMismatch: If a request attempts to describe both @@ -548,16 +546,6 @@ def validate_and_transform_request( AttributeRequestSetup(**r_dup) # type: ignore ) - client_streaming_forms = { - types.CallingForm.RequestStreamingClient, - types.CallingForm.RequestStreamingBidi, - } - - if len(base_param_to_attrs) > 1 and calling_form in client_streaming_forms: - raise types.InvalidRequestSetup( - "Too many base parameters for client side streaming form" - ) - # We can only flatten a collection of request parameters if they're a # subset of the flattened fields of the method. flattenable = self.flattenable_fields >= set(base_param_to_attrs) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index af2be2bde34a..d02d5f1f71b8 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -153,7 +153,7 @@ with open({{ attr.input_parameter }}, "rb") as f: client = {{ module_name }}.{{ client_name }}() {% endmacro %} -{% macro render_request_setup(full_request, module_name, request_type) %} +{% macro render_request_setup(full_request, module_name, request_type, calling_form, calling_form_enum) %} # Initialize request argument(s) {% for parameter_block in full_request.request_list if parameter_block.body %} {% if parameter_block.pattern %} @@ -179,6 +179,21 @@ request = {{ module_name }}.{{ request_type.ident.name }}( {{ parameter.base }}={{ parameter.base if parameter.body else parameter.single.value }}, {% endfor %} ) +{# Note: This template assumes only one request needs to be sent. When samples accept +configs the client streaming logic should be modified to allow 2+ request objects. #} +{# If client streaming, wrap the single request in a generator that produces 'requests' #} +{% if calling_form in [calling_form_enum.RequestStreamingBidi, + calling_form_enum.RequestStreamingClient] %} + +# This method expects an iterator which contains +# '{{module_name}}.{{ request_type.ident.name }}' objects +# Here we create a generator that yields a single `request` for +# demonstrative purposes. +requests = [request] +def request_generator(): + for request in requests: + yield request +{% endif %} {% endif %} {% endmacro %} @@ -219,7 +234,7 @@ await{{ " "}} {%- endif -%} {% if calling_form in [calling_form_enum.RequestStreamingBidi, calling_form_enum.RequestStreamingClient] %} -client.{{ sample.rpc|snake_case }}([{{ render_request_params(sample.request.request_list)|trim }}]) +client.{{ sample.rpc|snake_case }}(requests=request_generator()) {% else %}{# TODO: deal with flattening #} {# TODO: set up client streaming once some questions are answered #} client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request)|trim }}) diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index 54db08ca2146..3191860c0c83 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -35,7 +35,7 @@ from {{ sample.module_namespace|join(".") }} import {{ sample.module_name }} """{{ sample.description }}""" {{ frags.render_client_setup(sample.module_name, sample.client_name)|indent }} - {{ frags.render_request_setup(sample.request, sample.module_name, sample.request_type)|indent }} + {{ frags.render_request_setup(sample.request, sample.module_name, sample.request_type, calling_form, calling_form_enum)|indent }} {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum, sample.transport) %} {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.transport, sample.response)|indent -}} {% endwith %} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py index 38efe9e40da6..1c6e1db5bebc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py @@ -38,8 +38,17 @@ async def sample_tail_log_entries(): resource_names=['resource_names_value_1', 'resource_names_value_2'], ) + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + # Make the request - stream = await client.tail_log_entries([resource_names=['resource_names_value_1', 'resource_names_value_2']]) + stream = await client.tail_log_entries(requests=request_generator()) async for response in stream: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py index a533ee9ad62b..e9a5dfc2ec0a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py @@ -38,8 +38,17 @@ def sample_tail_log_entries(): resource_names=['resource_names_value_1', 'resource_names_value_2'], ) + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + # Make the request - stream = client.tail_log_entries([resource_names=['resource_names_value_1', 'resource_names_value_2']]) + stream = client.tail_log_entries(requests=request_generator()) for response in stream: print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py index 98190750d59c..7e9605052efd 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py @@ -38,8 +38,17 @@ async def sample_method_bidi_streaming(): my_string="my_string_value", ) + # This method expects an iterator which contains + # 'mollusca_v1.SignatureRequestOneRequiredField' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + # Make the request - stream = await client.method_bidi_streaming([my_string="my_string_value"]) + stream = await client.method_bidi_streaming(requests=request_generator()) async for response in stream: print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py index de6ffb254d46..269fe197326c 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py @@ -38,8 +38,17 @@ def sample_method_bidi_streaming(): my_string="my_string_value", ) + # This method expects an iterator which contains + # 'mollusca_v1.SignatureRequestOneRequiredField' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + # Make the request - stream = client.method_bidi_streaming([my_string="my_string_value"]) + stream = client.method_bidi_streaming(requests=request_generator()) for response in stream: print(response) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index fde9969ef846..c42420be594c 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -1350,57 +1350,6 @@ def test_validate_request_reserved_input_param(dummy_api_schema): ) -def test_single_request_client_streaming(dummy_api_schema, - calling_form=types.CallingForm.RequestStreamingClient): - # Each API client method really only takes one parameter: - # either a single protobuf message or an iterable of protobuf messages. - # With unary request methods, python lets us describe attributes as positional - # and keyword parameters, which simplifies request construction. - # The 'base' in the transformed request refers to an attribute, and the - # 'field's refer to sub-attributes. - # Client streaming and bidirectional streaming methods can't use this notation, - # and generate an exception if there is more than one 'base'. - input_type = DummyMessage( - fields={ - "cephalopod": DummyField( - message=DummyMessage( - fields={ - "order": DummyField( - message=DummyMessage(type="ORDER_TYPE") - ) - }, - type="CEPHALOPOD_TYPE" - ) - ), - "gastropod": DummyField( - message=DummyMessage( - fields={ - "order": DummyField( - message=DummyMessage(type="ORDER_TYPE") - ) - }, - type="GASTROPOD_TYPE" - ) - ) - }, - type="MOLLUSC_TYPE" - ) - v = samplegen.Validator(DummyMethod(input=input_type), dummy_api_schema) - with pytest.raises(types.InvalidRequestSetup): - v.validate_and_transform_request( - types.CallingForm.RequestStreamingClient, - [ - {"field": "cephalopod.order", "value": "cephalopoda"}, - {"field": "gastropod.order", "value": "pulmonata"}, - ], - ) - - -def test_single_request_bidi_streaming(): - test_single_request_client_streaming( - types.CallingForm.RequestStreamingBidi) - - def test_validate_request_calling_form(): assert ( types.CallingForm.method_default(DummyMethod(lro=True)) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 4bf00b15a33a..5a4086868d0c 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -200,7 +200,7 @@ def test_render_request_unflattened(): check_template( ''' {% import "feature_fragments.j2" as frags %} - {{ frags.render_request_setup(request, module_name, request_type) }} + {{ frags.render_request_setup(request, module_name, request_type, calling_form, calling_form_enum) }} ''', ''' # Initialize request argument(s) @@ -289,7 +289,9 @@ def test_render_request_unflattened(): ) }, ident=common_types.DummyIdent(name="CreateMolluscRequest") - ) + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.Request, ) @@ -1015,7 +1017,7 @@ def test_render_method_call_bidi(): calling_form, calling_form_enum, transport) }} ''', ''' - client.categorize_mollusc([video]) + client.categorize_mollusc(requests=request_generator()) ''', request=samplegen.FullRequest( request_list=[ @@ -1040,7 +1042,7 @@ def test_render_method_call_bidi_async(): calling_form, calling_form_enum, transport) }} ''', ''' - await client.categorize_mollusc([video]) + await client.categorize_mollusc(requests=request_generator()) ''', request=samplegen.FullRequest( request_list=[ @@ -1065,7 +1067,7 @@ def test_render_method_call_client(): calling_form, calling_form_enum, transport) }} ''', ''' - client.categorize_mollusc([video]) + client.categorize_mollusc(requests=request_generator()) ''', request=samplegen.FullRequest( request_list=[ @@ -1090,7 +1092,7 @@ def test_render_method_call_client_async(): calling_form, calling_form_enum, transport) }} ''', ''' - await client.categorize_mollusc([video]) + await client.categorize_mollusc(requests=request_generator()) ''', request=samplegen.FullRequest( request_list=[ From 186cb25264c3e82abda80a7df5ccb92543fd840b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 8 Nov 2021 11:10:40 -0700 Subject: [PATCH 0664/1339] chore: release 0.56.1 (#1077) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 39405ab79354..dded1d706dac 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.56.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.0...v0.56.1) (2021-11-08) + + +### Bug Fixes + +* **snippetgen:** fix client streaming samples ([#1061](https://www.github.com/googleapis/gapic-generator-python/issues/1061)) ([64b9ad6](https://www.github.com/googleapis/gapic-generator-python/commit/64b9ad6e417a15cfbddf0e7a1b57036b8abfc829)) + ## [0.56.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.55.1...v0.56.0) (2021-11-05) From e68d7a928c7931c5152d7eb29ba154a985fe3756 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 8 Nov 2021 11:43:51 -0700 Subject: [PATCH 0665/1339] fix: don't enable snippetgen by default (#1078) * Revert "test: add tests for autogen-snippets option (#1055)" This reverts commit 185ecc7536db8309e6d2f03f9a66c36db18b1945. * Revert "feat: generate code snippets by default (#1044)" This reverts commit e46f443dbeffe16b63f97668801b06189769e972. --- .../gapic-generator/gapic/utils/options.py | 15 ++---------- .../tests/unit/generator/test_generator.py | 16 ++++--------- .../tests/unit/generator/test_options.py | 23 +++---------------- 3 files changed, 9 insertions(+), 45 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index d6c692c8fe49..d7bbe2473df6 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -37,7 +37,7 @@ class Options: warehouse_package_name: str = '' retry: Optional[Dict[str, Any]] = None sample_configs: Tuple[str, ...] = dataclasses.field(default=()) - autogen_snippets: bool = True + autogen_snippets: bool = False templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) lazy_import: bool = False old_naming: bool = False @@ -132,17 +132,6 @@ def tweak_path(p): # Build the options instance. sample_paths = opts.pop('samples', []) - # autogen-snippets is True by default, so make sure users can disable - # by passing `autogen-snippets=false` - autogen_snippets = opts.pop( - "autogen-snippets", ["True"])[0] in ("True", "true", "T", "t", "TRUE") - - # NOTE: Snippets are not currently correct for the alternative (Ads) templates - # so always disable snippetgen in that case - # https://github.com/googleapis/gapic-generator-python/issues/1052 - if opts.get("old-naming"): - autogen_snippets = False - answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), @@ -154,7 +143,7 @@ def tweak_path(p): for s in sample_paths for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) ), - autogen_snippets=autogen_snippets, + autogen_snippets=bool(opts.pop("autogen-snippets", False)), templates=tuple(path.expanduser(i) for i in templates), lazy_import=bool(opts.pop('lazy-import', False)), old_naming=bool(opts.pop('old-naming', False)), diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 26873b1d33cc..d068250e9729 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -242,10 +242,7 @@ def test_get_response_enumerates_proto(): def test_get_response_divides_subpackages(): - # NOTE: autogen-snippets is intentionally disabled for this test - # The API schema below is incomplete and will result in errors when the - # snippetgen logic tries to parse it. - g = make_generator("autogen-snippets=false") + g = make_generator() api_schema = api.API.build( [ descriptor_pb2.FileDescriptorProto( @@ -280,7 +277,7 @@ def test_get_response_divides_subpackages(): """.strip() ) cgr = g.get_response(api_schema=api_schema, - opts=Options.build("autogen-snippets=false")) + opts=Options.build("")) assert len(cgr.file) == 6 assert {i.name for i in cgr.file} == { "foo/types/top.py", @@ -686,12 +683,7 @@ def test_dont_generate_in_code_samples(mock_gmtime, mock_generate_sample, fs): ), ) - # NOTE: autogen-snippets is intentionally disabled for this test - # The API schema below is incomplete and will result in errors when the - # snippetgen logic attempts to parse it. - generator = make_generator( - f"samples={config_fpath},autogen-snippets=False") - print(generator) + generator = make_generator(f"samples={config_fpath}") generator._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = make_api( make_proto( @@ -751,7 +743,7 @@ def test_dont_generate_in_code_samples(mock_gmtime, mock_generate_sample, fs): expected.supported_features |= CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL actual = generator.get_response( - api_schema=api_schema, opts=Options.build("autogen-snippets=False") + api_schema=api_schema, opts=Options.build("") ) assert actual == expected diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index eaa12c971408..d5bd11f64e17 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -141,7 +141,9 @@ def test_options_service_config(fs): def test_options_bool_flags(): - # Most options are default False. + # All these options are default False. + # If new options violate this assumption, + # this test may need to be tweaked. # New options should follow the dash-case/snake_case convention. opt_str_to_attr_name = { name: re.sub(r"-", "_", name) @@ -159,22 +161,3 @@ def test_options_bool_flags(): options = Options.build(opt) assert getattr(options, attr) - - # Check autogen-snippets separately, as it is default True - options = Options.build("") - assert options.autogen_snippets - - options = Options.build("autogen-snippets=False") - assert not options.autogen_snippets - - -def test_options_autogen_snippets_false_for_old_naming(): - # NOTE: Snippets are not currently correct for the alternative (Ads) templates - # so always disable snippetgen in that case - # https://github.com/googleapis/gapic-generator-python/issues/1052 - options = Options.build("old-naming") - assert not options.autogen_snippets - - # Even if autogen-snippets is set to True, do not enable snippetgen - options = Options.build("old-naming,autogen-snippets=True") - assert not options.autogen_snippets From 9a38d3c0b6224a9a7bd347289830a40bac601b58 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 8 Nov 2021 11:03:03 -0800 Subject: [PATCH 0666/1339] chore: release 0.56.2 (#1079) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index dded1d706dac..151e5143dd40 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.56.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.1...v0.56.2) (2021-11-08) + + +### Bug Fixes + +* don't enable snippetgen by default ([#1078](https://www.github.com/googleapis/gapic-generator-python/issues/1078)) ([8bdb709](https://www.github.com/googleapis/gapic-generator-python/commit/8bdb70931a9ecb1c89fda9608697b0762770bc12)) + ### [0.56.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.0...v0.56.1) (2021-11-08) From c94057c7f8fcae9102beffc94b589d628c704485 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 11 Nov 2021 00:16:20 +0100 Subject: [PATCH 0667/1339] chore(deps): update dependency jinja2 to v3.0.3 (#1082) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [jinja2](https://palletsprojects.com/p/jinja/) ([changelog](https://jinja.palletsprojects.com/changes/)) | `==3.0.2` -> `==3.0.3` | [![age](https://badges.renovateapi.com/packages/pypi/jinja2/3.0.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/jinja2/3.0.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/jinja2/3.0.3/compatibility-slim/3.0.2)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/jinja2/3.0.3/confidence-slim/3.0.2)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. â™» **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index cc927fdc2fa9..62fcf42a5d5a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,7 +1,7 @@ click==8.0.3 google-api-core==2.1.1 googleapis-common-protos==1.53.0 -jinja2==3.0.2 +jinja2==3.0.3 MarkupSafe==2.0.1 protobuf==3.19.1 pypandoc==1.6.4 From e32226d1c7f40f838169437533b357417d5a3984 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Wed, 10 Nov 2021 15:37:56 -0800 Subject: [PATCH 0668/1339] tests: add system tests to check error details (#1076) * feat: adds system tests to check error details. --- .../tests/system/test_error_details.py | 78 +++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 packages/gapic-generator/tests/system/test_error_details.py diff --git a/packages/gapic-generator/tests/system/test_error_details.py b/packages/gapic-generator/tests/system/test_error_details.py new file mode 100644 index 000000000000..6061e8f015d1 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_error_details.py @@ -0,0 +1,78 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from google import showcase +from google.rpc import error_details_pb2 +from google.protobuf import any_pb2 +from grpc_status import rpc_status +from google.api_core import exceptions + + +def create_status(error_details=None): + status = rpc_status.status_pb2.Status() + status.code = 3 + status.message = ( + "test" + ) + status_detail = any_pb2.Any() + if error_details: + status_detail.Pack(error_details) + status.details.append(status_detail) + return status + + +def test_bad_request_details(echo): + def create_bad_request_details(): + bad_request_details = error_details_pb2.BadRequest() + field_violation = bad_request_details.field_violations.add() + field_violation.field = "test field" + field_violation.description = "test description" + return bad_request_details + bad_request_details = create_bad_request_details() + status = create_status(bad_request_details) + + with pytest.raises(exceptions.GoogleAPICallError) as e: + _ = echo.echo(showcase.EchoRequest( + error=status, + )) + assert e.details == [bad_request_details] + + +def test_precondition_failure_details(echo): + def create_precondition_failure_details(): + pf_details = error_details_pb2.PreconditionFailure() + violation = pf_details.violations.add() + violation.type = "test type" + violation.subject = "test subject" + violation.description = "test description" + return pf_details + + pf_details = create_precondition_failure_details() + status = create_status(pf_details) + + with pytest.raises(exceptions.GoogleAPICallError) as e: + _ = echo.echo(showcase.EchoRequest( + error=status, + )) + assert e.details == [pf_details] + + +def test_unknown_details(echo): + status = create_status() + with pytest.raises(exceptions.GoogleAPICallError) as e: + _ = echo.echo(showcase.EchoRequest( + error=status, + )) + assert e.details == status.details From 0acbf6ff2467c2edfe1a99926778b4b355be278a Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 17 Nov 2021 15:04:49 -0800 Subject: [PATCH 0669/1339] feat: forward compatible diregapic LRO support (#1085) Detect whether a method fulfills the criteria for DIREGAPIC LRO. If so, fudge the name of the generated method by adding the suffix '_primitive'. This change is made for both the synchronous and async client variants. Any generated unit tests are changed to use and reference the fudged name. The names of the corresponding transport method is NOT changed. --- packages/gapic-generator/BUILD.bazel | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 46 +++ .../%sub/services/%service/async_client.py.j2 | 4 +- .../%sub/services/%service/client.py.j2 | 4 + .../%name_%version/%sub/test_%service.py.j2 | 107 +++-- .../google/cloud/extended_operations_pb2.py | 132 ++++++ .../fragments/google/api/annotations.proto | 31 ++ .../tests/fragments/google/api/http.proto | 375 ++++++++++++++++++ .../google/cloud/extended_operations.proto | 150 +++++++ .../test_diregapic_forwardcompat_lro.proto | 101 +++++ .../unit/schema/wrappers/test_message.py | 74 ++++ .../tests/unit/schema/wrappers/test_method.py | 58 +++ .../unit/schema/wrappers/test_service.py | 131 ++++++ 13 files changed, 1159 insertions(+), 56 deletions(-) create mode 100755 packages/gapic-generator/google/cloud/extended_operations_pb2.py create mode 100644 packages/gapic-generator/tests/fragments/google/api/annotations.proto create mode 100644 packages/gapic-generator/tests/fragments/google/api/http.proto create mode 100644 packages/gapic-generator/tests/fragments/google/cloud/extended_operations.proto create mode 100644 packages/gapic-generator/tests/fragments/test_diregapic_forwardcompat_lro.proto diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 38ae745d0d3e..b062e9ca7855 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -51,7 +51,7 @@ toolchain( py_binary( name = "gapic_plugin", - srcs = glob(["gapic/**/*.py"]), + srcs = glob(["gapic/**/*.py", "google/**/*.py"]), data = [":pandoc_binary"] + glob([ "gapic/**/*.j2", "gapic/**/.*.j2", diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index cbefe83c2ba7..eef7b9f14c64 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -41,6 +41,7 @@ from google.api import resource_pb2 from google.api_core import exceptions from google.api_core import path_template +from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 # type: ignore from google.protobuf.json_format import MessageToDict # type: ignore @@ -344,6 +345,39 @@ def oneof_fields(self, include_optional=False): return oneof_fields + @utils.cached_property + def is_diregapic_operation(self) -> bool: + if not self.name == "Operation": + return False + + name, status, error_code, error_message = False, False, False, False + duplicate_msg = f"Message '{self.name}' has multiple fields with the same operation response mapping: {{}}" + for f in self.field: + maybe_op_mapping = f.options.Extensions[ex_ops_pb2.operation_field] + OperationResponseMapping = ex_ops_pb2.OperationResponseMapping + + if maybe_op_mapping == OperationResponseMapping.NAME: + if name: + raise TypeError(duplicate_msg.format("name")) + name = True + + if maybe_op_mapping == OperationResponseMapping.STATUS: + if status: + raise TypeError(duplicate_msg.format("status")) + status = True + + if maybe_op_mapping == OperationResponseMapping.ERROR_CODE: + if error_code: + raise TypeError(duplicate_msg.format("error_code")) + error_code = True + + if maybe_op_mapping == OperationResponseMapping.ERROR_MESSAGE: + if error_message: + raise TypeError(duplicate_msg.format("error_message")) + error_message = True + + return name and status and error_code and error_message + @utils.cached_property def required_fields(self) -> Sequence['Field']: required_fields = [ @@ -765,6 +799,10 @@ class Method: def __getattr__(self, name): return getattr(self.method_pb, name) + @property + def is_operation_polling_method(self): + return self.output.is_diregapic_operation and self.options.Extensions[ex_ops_pb2.operation_polling_method] + @utils.cached_property def client_output(self): return self._client_output(enable_asyncio=False) @@ -838,6 +876,10 @@ def _client_output(self, enable_asyncio: bool): # Return the usual output. return self.output + @property + def operation_service(self) -> Optional[str]: + return self.options.Extensions[ex_ops_pb2.operation_service] + @property def is_deprecated(self) -> bool: """Returns true if the method is deprecated, false otherwise.""" @@ -1172,6 +1214,10 @@ class Service: def __getattr__(self, name): return getattr(self.service_pb, name) + @property + def custom_polling_method(self) -> Optional[Method]: + return next((m for m in self.methods.values() if m.is_operation_polling_method), None) + @property def client_name(self) -> str: """Returns the name of the generated client class""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index bbdffc3ba817..2f30d6cfab02 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -150,7 +150,9 @@ class {{ service.async_client_name }}: ) {% for method in service.methods.values() %} - {%+ if not method.server_streaming %}async {% endif %}def {{ method.name|snake_case }}(self, + {% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} + {%+ if not method.server_streaming %}async {% endif %}def {{ method_name }}(self, + {% endwith %} {% if not method.client_streaming %} request: Union[{{ method.input.ident }}, dict] = None, *, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 917fbd0fe6d5..25e4f2ca5418 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -315,7 +315,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() %} + {% if method.operation_service %}{# DIREGAPIC LRO #} + def {{ method.name|snake_case }}_unary(self, + {% else %} def {{ method.name|snake_case }}(self, + {% endif %}{# DIREGAPIC LRO #} {% if not method.client_streaming %} request: Union[{{ method.input.ident }}, dict] = None, *, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 541ff0312464..b53afebe8d9a 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -430,8 +430,8 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): {% endif %} -{% for method in service.methods.values() if 'grpc' in opts.transport %} -def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): +{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} +def test_{{ method_name }}(transport: str = 'grpc', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -472,7 +472,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m {% if method.client_streaming %} response = client.{{ method.name|snake_case }}(iter(requests)) {% else %} - response = client.{{ method.name|snake_case }}(request) + response = client.{{ method_name }}(request) {% endif %} # Establish that the underlying gRPC stub method was called. @@ -512,12 +512,12 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m {% endif %} -def test_{{ method.name|snake_case }}_from_dict(): - test_{{ method.name|snake_case }}(request_type=dict) +def test_{{ method_name }}_from_dict(): + test_{{ method_name }}(request_type=dict) {% if not method.client_streaming %} -def test_{{ method.name|snake_case }}_empty_call(): +def test_{{ method_name }}_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = {{ service.client_name }}( @@ -529,7 +529,7 @@ def test_{{ method.name|snake_case }}_empty_call(): with mock.patch.object( type(client.transport.{{ method.name|snake_case }}), '__call__') as call: - client.{{ method.name|snake_case }}() + client.{{ method_name }}() call.assert_called() _, args, _ = call.mock_calls[0] {% if method.client_streaming %} @@ -541,7 +541,7 @@ def test_{{ method.name|snake_case }}_empty_call(): @pytest.mark.asyncio -async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): +async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): client = {{ service.async_client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -589,7 +589,7 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio {% elif method.client_streaming and not method.server_streaming %} response = await (await client.{{ method.name|snake_case }}(iter(requests))) {% else %} - response = await client.{{ method.name|snake_case }}(request) + response = await client.{{ method_name }}(request) {% endif %} # Establish that the underlying gRPC stub method was called. @@ -626,12 +626,12 @@ async def test_{{ method.name|snake_case }}_async(transport: str = 'grpc_asyncio @pytest.mark.asyncio -async def test_{{ method.name|snake_case }}_async_from_dict(): - await test_{{ method.name|snake_case }}_async(request_type=dict) +async def test_{{ method_name }}_async_from_dict(): + await test_{{ method_name }}_async(request_type=dict) {% if method.field_headers and not method.client_streaming %} -def test_{{ method.name|snake_case }}_field_headers(): +def test_{{ method_name }}_field_headers(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -657,7 +657,7 @@ def test_{{ method.name|snake_case }}_field_headers(): {% else %} call.return_value = {{ method.output.ident }}() {% endif %} - client.{{ method.name|snake_case }}(request) + client.{{ method_name }}(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -676,7 +676,7 @@ def test_{{ method.name|snake_case }}_field_headers(): @pytest.mark.asyncio -async def test_{{ method.name|snake_case }}_field_headers_async(): +async def test_{{ method_name }}_field_headers_async(): client = {{ service.async_client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -703,7 +703,7 @@ async def test_{{ method.name|snake_case }}_field_headers_async(): {% else %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall({{ method.output.ident }}()) {% endif %} - await client.{{ method.name|snake_case }}(request) + await client.{{ method_name }}(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -722,7 +722,7 @@ async def test_{{ method.name|snake_case }}_field_headers_async(): {% endif %} {% if method.ident.package != method.input.ident.package %} -def test_{{ method.name|snake_case }}_from_dict_foreign(): +def test_{{ method_name }}_from_dict_foreign(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -740,7 +740,7 @@ def test_{{ method.name|snake_case }}_from_dict_foreign(): {% else %} call.return_value = {{ method.output.ident }}() {% endif %} - response = client.{{ method.name|snake_case }}(request={ + response = client.{{ method_name }}(request={ {% for field in method.input.fields.values() %} '{{ field.name }}': {{ field.mock_value }}, {% endfor %} @@ -751,7 +751,7 @@ def test_{{ method.name|snake_case }}_from_dict_foreign(): {% endif %} {% if method.flattened_fields %} -def test_{{ method.name|snake_case }}_flattened(): +def test_{{ method_name }}_flattened(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -772,7 +772,7 @@ def test_{{ method.name|snake_case }}_flattened(): {% endif %} # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.{{ method.name|snake_case }}( + client.{{ method_name }}( {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, {% endfor %} @@ -807,7 +807,7 @@ def test_{{ method.name|snake_case }}_flattened(): -def test_{{ method.name|snake_case }}_flattened_error(): +def test_{{ method_name }}_flattened_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -815,7 +815,7 @@ def test_{{ method.name|snake_case }}_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.{{ method.name|snake_case }}( + client.{{ method_name }}( {{ method.input.ident }}(), {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, @@ -824,7 +824,7 @@ def test_{{ method.name|snake_case }}_flattened_error(): @pytest.mark.asyncio -async def test_{{ method.name|snake_case }}_flattened_async(): +async def test_{{ method_name }}_flattened_async(): client = {{ service.async_client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -865,7 +865,7 @@ async def test_{{ method.name|snake_case }}_flattened_async(): {% endif %} # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.{{ method.name|snake_case }}( + response = await client.{{ method_name }}( {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, {% endfor %} @@ -900,7 +900,7 @@ async def test_{{ method.name|snake_case }}_flattened_async(): @pytest.mark.asyncio -async def test_{{ method.name|snake_case }}_flattened_error_async(): +async def test_{{ method_name }}_flattened_error_async(): client = {{ service.async_client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -908,7 +908,7 @@ async def test_{{ method.name|snake_case }}_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.{{ method.name|snake_case }}( + await client.{{ method_name }}( {{ method.input.ident }}(), {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, @@ -918,7 +918,7 @@ async def test_{{ method.name|snake_case }}_flattened_error_async(): {% if method.paged_result_field %} -def test_{{ method.name|snake_case }}_pager(): +def test_{{ method_name }}_pager(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials, ) @@ -968,7 +968,7 @@ def test_{{ method.name|snake_case }}_pager(): )), ) {% endif %} - pager = client.{{ method.name|snake_case }}(request={}) + pager = client.{{ method_name }}(request={}) assert pager._metadata == metadata @@ -977,7 +977,7 @@ def test_{{ method.name|snake_case }}_pager(): assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) for i in results) -def test_{{ method.name|snake_case }}_pages(): +def test_{{ method_name }}_pages(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials, ) @@ -1014,12 +1014,12 @@ def test_{{ method.name|snake_case }}_pages(): ), RuntimeError, ) - pages = list(client.{{ method.name|snake_case }}(request={}).pages) + pages = list(client.{{ method_name }}(request={}).pages) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_{{ method.name|snake_case }}_async_pager(): +async def test_{{ method_name }}_async_pager(): client = {{ service.async_client_name }}( credentials=ga_credentials.AnonymousCredentials, ) @@ -1056,7 +1056,7 @@ async def test_{{ method.name|snake_case }}_async_pager(): ), RuntimeError, ) - async_pager = await client.{{ method.name|snake_case }}(request={},) + async_pager = await client.{{ method_name }}(request={},) assert async_pager.next_page_token == 'abc' responses = [] async for response in async_pager: @@ -1067,7 +1067,7 @@ async def test_{{ method.name|snake_case }}_async_pager(): for i in responses) @pytest.mark.asyncio -async def test_{{ method.name|snake_case }}_async_pages(): +async def test_{{ method_name }}_async_pages(): client = {{ service.async_client_name }}( credentials=ga_credentials.AnonymousCredentials, ) @@ -1105,23 +1105,23 @@ async def test_{{ method.name|snake_case }}_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.{{ method.name|snake_case }}(request={})).pages: + async for page_ in (await client.{{ method_name }}(request={})).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token {% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} -def test_{{ method.name|snake_case }}_raw_page_lro(): +def test_{{ method_name }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() assert response.raw_page is response -{% endif %} {# method.paged_result_field #} +{% endif %}{# method.paged_result_field #}{% endwith %}{# method_name #} {% endfor %} {# method in methods for grpc #} {% for method in service.methods.values() if 'rest' in opts.transport and - method.http_options %} + method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} {# TODO(kbandes): remove this if condition when lro and streaming are supported. #} {% if not method.lro and not (method.server_streaming or method.client_streaming) %} -def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): +def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1177,7 +1177,7 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type {% if method.client_streaming %} response = client.{{ method.name|snake_case }}(iter(requests)) {% else %} - response = client.{{ method.name|snake_case }}(request) + response = client.{{ method_name }}(request) {% endif %} {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} @@ -1204,7 +1204,7 @@ def test_{{ method.name|snake_case }}_rest(transport: str = 'rest', request_type {% endif %} -def test_{{ method.name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): +def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1233,16 +1233,16 @@ def test_{{ method.name|snake_case }}_rest_bad_request(transport: str = 'rest', {% if method.client_streaming %} client.{{ method.name|snake_case }}(iter(requests)) {% else %} - client.{{ method.name|snake_case }}(request) + client.{{ method_name }}(request) {% endif %} -def test_{{ method.name|snake_case }}_rest_from_dict(): - test_{{ method.name|snake_case }}_rest(request_type=dict) +def test_{{ method_name }}_rest_from_dict(): + test_{{ method_name }}_rest(request_type=dict) {% if method.flattened_fields %} -def test_{{ method.name|snake_case }}_rest_flattened(transport: str = 'rest'): +def test_{{ method_name }}_rest_flattened(transport: str = 'rest'): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1286,7 +1286,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(transport: str = 'rest'): {% endfor %} ) mock_args.update(sample_request) - client.{{ method.name|snake_case }}(**mock_args) + client.{{ method_name }}(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -1298,7 +1298,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(transport: str = 'rest'): {# TODO(kbandes) - reverse-transcode request args to check all request fields #} -def test_{{ method.name|snake_case }}_rest_flattened_error(transport: str = 'rest'): +def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1307,7 +1307,7 @@ def test_{{ method.name|snake_case }}_rest_flattened_error(transport: str = 'res # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.{{ method.name|snake_case }}( + client.{{ method_name }}( {{ method.input.ident }}(), {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, @@ -1317,7 +1317,7 @@ def test_{{ method.name|snake_case }}_rest_flattened_error(transport: str = 'res {% if method.paged_result_field %} -def test_{{ method.name|snake_case }}_rest_pager(): +def test_{{ method_name }}_rest_pager(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -1401,7 +1401,7 @@ def test_{{ method.name|snake_case }}_rest_pager(): {% endif %} {% endfor %} - pager = client.{{ method.name|snake_case }}(request=sample_request) + pager = client.{{ method_name }}(request=sample_request) {% if method.paged_result_field.map %} assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) @@ -1425,7 +1425,7 @@ def test_{{ method.name|snake_case }}_rest_pager(): for i in results) {% endif %} - pages = list(client.{{ method.name|snake_case }}(request=sample_request).pages) + pages = list(client.{{ method_name }}(request=sample_request).pages) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -1433,7 +1433,7 @@ def test_{{ method.name|snake_case }}_rest_pager(): {% endif %} {# paged methods #} {%- else %} -def test_{{ method.name|snake_case }}_rest_error(): +def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -1442,7 +1442,7 @@ def test_{{ method.name|snake_case }}_rest_error(): # Since a `google.api.http` annotation is required for using a rest transport # method, this should error. with pytest.raises(RuntimeError) as runtime_error: - client.{{ method.name|snake_case }}({}) + client.{{ method_name }}({}) assert ('Cannot define a method without a valid `google.api.http` annotation.' in str(runtime_error.value)) {%- else %} @@ -1450,11 +1450,10 @@ def test_{{ method.name|snake_case }}_rest_error(): # TODO(yon-mg): Remove when this method has a working implementation # or testing straegy with pytest.raises(NotImplementedError): - client.{{ method.name|snake_case }}({}) + client.{{ method_name }}({}) {%- endif %} -{% endif %} - +{% endif %}{% endwith %}{# method_name #} {% endfor -%} {#- method in methods for rest #} def test_credentials_transport_error(): diff --git a/packages/gapic-generator/google/cloud/extended_operations_pb2.py b/packages/gapic-generator/google/cloud/extended_operations_pb2.py new file mode 100755 index 000000000000..439f7f0be3ba --- /dev/null +++ b/packages/gapic-generator/google/cloud/extended_operations_pb2.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/extended_operations.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/extended_operations.proto', + package='google.cloud', + syntax='proto3', + serialized_options=b'\n\020com.google.cloudB\027ExtendedOperationsProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/extendedops;extendedops\242\002\004GAPI', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n&google/cloud/extended_operations.proto\x12\x0cgoogle.cloud\x1a google/protobuf/descriptor.proto*b\n\x18OperationResponseMapping\x12\r\n\tUNDEFINED\x10\x00\x12\x08\n\x04NAME\x10\x01\x12\n\n\x06STATUS\x10\x02\x12\x0e\n\nERROR_CODE\x10\x03\x12\x11\n\rERROR_MESSAGE\x10\x04:_\n\x0foperation_field\x12\x1d.google.protobuf.FieldOptions\x18\xfd\x08 \x01(\x0e\x32&.google.cloud.OperationResponseMapping:?\n\x17operation_request_field\x12\x1d.google.protobuf.FieldOptions\x18\xfe\x08 \x01(\t:@\n\x18operation_response_field\x12\x1d.google.protobuf.FieldOptions\x18\xff\x08 \x01(\t::\n\x11operation_service\x12\x1e.google.protobuf.MethodOptions\x18\xe1\t \x01(\t:A\n\x18operation_polling_method\x12\x1e.google.protobuf.MethodOptions\x18\xe2\t \x01(\x08\x42y\n\x10\x63om.google.cloudB\x17\x45xtendedOperationsProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/extendedops;extendedops\xa2\x02\x04GAPIb\x06proto3' + , + dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) + +_OPERATIONRESPONSEMAPPING = _descriptor.EnumDescriptor( + name='OperationResponseMapping', + full_name='google.cloud.OperationResponseMapping', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='UNDEFINED', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='NAME', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='STATUS', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='ERROR_CODE', index=3, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='ERROR_MESSAGE', index=4, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=90, + serialized_end=188, +) +_sym_db.RegisterEnumDescriptor(_OPERATIONRESPONSEMAPPING) + +OperationResponseMapping = enum_type_wrapper.EnumTypeWrapper(_OPERATIONRESPONSEMAPPING) +UNDEFINED = 0 +NAME = 1 +STATUS = 2 +ERROR_CODE = 3 +ERROR_MESSAGE = 4 + +OPERATION_FIELD_FIELD_NUMBER = 1149 +operation_field = _descriptor.FieldDescriptor( + name='operation_field', full_name='google.cloud.operation_field', index=0, + number=1149, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) +OPERATION_REQUEST_FIELD_FIELD_NUMBER = 1150 +operation_request_field = _descriptor.FieldDescriptor( + name='operation_request_field', full_name='google.cloud.operation_request_field', index=1, + number=1150, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) +OPERATION_RESPONSE_FIELD_FIELD_NUMBER = 1151 +operation_response_field = _descriptor.FieldDescriptor( + name='operation_response_field', full_name='google.cloud.operation_response_field', index=2, + number=1151, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) +OPERATION_SERVICE_FIELD_NUMBER = 1249 +operation_service = _descriptor.FieldDescriptor( + name='operation_service', full_name='google.cloud.operation_service', index=3, + number=1249, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) +OPERATION_POLLING_METHOD_FIELD_NUMBER = 1250 +operation_polling_method = _descriptor.FieldDescriptor( + name='operation_polling_method', full_name='google.cloud.operation_polling_method', index=4, + number=1250, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=True, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) + +DESCRIPTOR.enum_types_by_name['OperationResponseMapping'] = _OPERATIONRESPONSEMAPPING +DESCRIPTOR.extensions_by_name['operation_field'] = operation_field +DESCRIPTOR.extensions_by_name['operation_request_field'] = operation_request_field +DESCRIPTOR.extensions_by_name['operation_response_field'] = operation_response_field +DESCRIPTOR.extensions_by_name['operation_service'] = operation_service +DESCRIPTOR.extensions_by_name['operation_polling_method'] = operation_polling_method +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +operation_field.enum_type = _OPERATIONRESPONSEMAPPING +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(operation_field) +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(operation_request_field) +google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(operation_response_field) +google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(operation_service) +google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(operation_polling_method) + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/tests/fragments/google/api/annotations.proto b/packages/gapic-generator/tests/fragments/google/api/annotations.proto new file mode 100644 index 000000000000..efdab3db6ca8 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/api/annotations.proto @@ -0,0 +1,31 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/packages/gapic-generator/tests/fragments/google/api/http.proto b/packages/gapic-generator/tests/fragments/google/api/http.proto new file mode 100644 index 000000000000..113fa936a09e --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/api/http.proto @@ -0,0 +1,375 @@ +// Copyright 2015 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} diff --git a/packages/gapic-generator/tests/fragments/google/cloud/extended_operations.proto b/packages/gapic-generator/tests/fragments/google/cloud/extended_operations.proto new file mode 100644 index 000000000000..0a1ff92d1ec2 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/cloud/extended_operations.proto @@ -0,0 +1,150 @@ +// Copyright 2021 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This file contains custom annotations that are used by GAPIC generators to +// handle Long Running Operation methods (LRO) that are NOT compliant with +// https://google.aip.dev/151. These annotations are public for technical +// reasons only. Please DO NOT USE them in your protos. +syntax = "proto3"; + +package google.cloud; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/extendedops;extendedops"; +option java_multiple_files = true; +option java_outer_classname = "ExtendedOperationsProto"; +option java_package = "com.google.cloud"; +option objc_class_prefix = "GAPI"; + +// FieldOptions to match corresponding fields in the initial request, +// polling request and operation response messages. +// +// Example: +// +// In an API-specific operation message: +// +// message MyOperation { +// string http_error_message = 1 [(operation_field) = ERROR_MESSAGE]; +// int32 http_error_status_code = 2 [(operation_field) = ERROR_CODE]; +// string id = 3 [(operation_field) = NAME]; +// Status status = 4 [(operation_field) = STATUS]; +// } +// +// In a polling request message (the one which is used to poll for an LRO +// status): +// +// message MyPollingRequest { +// string operation = 1 [(operation_response_field) = "id"]; +// string project = 2; +// string region = 3; +// } +// +// In an initial request message (the one which starts an LRO): +// +// message MyInitialRequest { +// string my_project = 2 [(operation_request_field) = "project"]; +// string my_region = 3 [(operation_request_field) = "region"]; +// } +// +extend google.protobuf.FieldOptions { + // A field annotation that maps fields in an API-specific Operation object to + // their standard counterparts in google.longrunning.Operation. See + // OperationResponseMapping enum definition. + OperationResponseMapping operation_field = 1149; + + // A field annotation that maps fields in the initial request message + // (the one which started the LRO) to their counterparts in the polling + // request message. For non-standard LRO, the polling response may be missing + // some of the information needed to make a subsequent polling request. The + // missing information (for example, project or region ID) is contained in the + // fields of the initial request message that this annotation must be applied + // to. The string value of the annotation corresponds to the name of the + // counterpart field in the polling request message that the annotated field's + // value will be copied to. + string operation_request_field = 1150; + + // A field annotation that maps fields in the polling request message to their + // counterparts in the initial and/or polling response message. The initial + // and the polling methods return an API-specific Operation object. Some of + // the fields from that response object must be reused in the subsequent + // request (like operation name/ID) to fully identify the polled operation. + // This annotation must be applied to the fields in the polling request + // message, the string value of the annotation must correspond to the name of + // the counterpart field in the Operation response object whose value will be + // copied to the annotated field. + string operation_response_field = 1151; +} + +// MethodOptions to identify the actual service and method used for operation +// status polling. +// +// Example: +// +// In a method, which starts an LRO: +// +// service MyService { +// rpc Foo(MyInitialRequest) returns (MyOperation) { +// option (operation_service) = "MyPollingService"; +// } +// } +// +// In a polling method: +// +// service MyPollingService { +// rpc Get(MyPollingRequest) returns (MyOperation) { +// option (operation_polling_method) = true; +// } +// } +extend google.protobuf.MethodOptions { + // A method annotation that maps an LRO method (the one which starts an LRO) + // to the service, which will be used to poll for the operation status. The + // annotation must be applied to the method which starts an LRO, the string + // value of the annotation must correspond to the name of the service used to + // poll for the operation status. + string operation_service = 1249; + + // A method annotation that marks methods that can be used for polling + // operation status (e.g. the MyPollingService.Get(MyPollingRequest) method). + bool operation_polling_method = 1250; +} + +// An enum to be used to mark the essential (for polling) fields in an +// API-specific Operation object. A custom Operation object may contain many +// different fields, but only few of them are essential to conduct a successful +// polling process. +enum OperationResponseMapping { + // Do not use. + UNDEFINED = 0; + + // A field in an API-specific (custom) Operation object which carries the same + // meaning as google.longrunning.Operation.name. + NAME = 1; + + // A field in an API-specific (custom) Operation object which carries the same + // meaning as google.longrunning.Operation.done. If the annotated field is of + // an enum type, `annotated_field_name == EnumType.DONE` semantics should be + // equivalent to `Operation.done == true`. If the annotated field is of type + // boolean, then it should follow the same semantics as Operation.done. + // Otherwise, a non-empty value should be treated as `Operation.done == true`. + STATUS = 2; + + // A field in an API-specific (custom) Operation object which carries the same + // meaning as google.longrunning.Operation.error.code. + ERROR_CODE = 3; + + // A field in an API-specific (custom) Operation object which carries the same + // meaning as google.longrunning.Operation.error.message. + ERROR_MESSAGE = 4; +} diff --git a/packages/gapic-generator/tests/fragments/test_diregapic_forwardcompat_lro.proto b/packages/gapic-generator/tests/fragments/test_diregapic_forwardcompat_lro.proto new file mode 100644 index 000000000000..f7206a7c6b85 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_diregapic_forwardcompat_lro.proto @@ -0,0 +1,101 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +syntax = "proto3"; + +package google.fragment; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/cloud/extended_operations.proto"; + +message Operation { + enum Status { + // A value indicating that the enum field is not set. + UNDEFINED_STATUS = 0; + DONE = 2104194; + PENDING = 35394935; + RUNNING = 121282975; + } + + optional string name = 1 [(google.cloud.operation_field) = NAME]; + optional string http_error_message = 202521945 [(google.cloud.operation_field) = ERROR_MESSAGE]; + optional int32 http_error_status_code = 312345196 [(google.cloud.operation_field) = ERROR_CODE]; + optional Status status = 181260274 [(google.cloud.operation_field) = STATUS]; +} + +message GetExpensiveComputationRequest { + string name = 1; + string computation = 2; + string fragment = 3; +} + +message DeleteExpensiveComputationRequest { + string computation = 1; + string fragment = 2; +} + +message DeleteOperationResponse { + bool success = 1; +} + +message StartExpensiveComputationRequest { + string computation = 1; + string fragment = 2; +} + +message StartCheapComputationRequest { + string computation = 1; + string fragment = 2; +} + +message StartCheapComputationResponse {} + +// Note: the name is a hint, but this is the de facto Operations client. +// It should _not_ generate methods with disambiguated names. +service DefactoOperationsClient { + option (google.api.default_host) = "my.example.com"; + + rpc Get(GetExpensiveComputationRequest) returns (Operation) { + option (google.cloud.operation_polling_method) = true; + option (google.api.http) = { + get: "/expensive/v1/computations/{computation}/fragments/{fragment}" + }; + } + + + rpc Delete(DeleteExpensiveComputationRequest) returns (DeleteOperationResponse) { + option (google.api.http) = { + delete: "/expensive/v1/computations/{computation}/fragments/{fragment}" + }; + + } +} + + +service DoStuff { + option (google.api.default_host) = "my.example.com"; + + rpc StartExpensiveComputation(StartExpensiveComputationRequest) returns (Operation) { + option (google.cloud.operation_service) = "GlobalOperations"; + option (google.api.http) = { + post: "/expensive/v1/computations/{computation}/fragments/{fragment}" + }; + } + + rpc StartCheapComputation(StartCheapComputationRequest) returns (StartCheapComputationResponse) { + option (google.api.http) = { + post: "/expensive/v1/computations/{computation}/fragments/{fragment}"; + }; + } +} diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index f0b5d611663a..da21f66ebdfa 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -20,6 +20,7 @@ from google.api import field_behavior_pb2 from google.api import resource_pb2 +from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import naming @@ -307,3 +308,76 @@ def test_required_fields(): ) assert set(request.required_fields) == {mass_kg, length_m, color} + + +def test_is_diregapic_operation(): + T = descriptor_pb2.FieldDescriptorProto.Type + + # Canonical Operation + + operation = make_message( + name="Operation", + fields=( + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ) + ) + for f in operation.field: + options = descriptor_pb2.FieldOptions() + # Note: The field numbers were carefully chosen to be the corresponding enum values. + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + assert operation.is_diregapic_operation + + # Missing a required field + + missing = make_message( + name="Operation", + fields=( + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + # Missing error_message + for i, name in enumerate(("name", "status", "error_code"), start=1) + ) + ) + for f in missing.field: + options = descriptor_pb2.FieldOptions() + # Note: The field numbers were carefully chosen to be the corresponding enum values. + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + assert not missing.is_diregapic_operation + + # Named incorrectly + + my_message = make_message( + name="MyMessage", + fields=( + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ) + ) + for f in my_message.field: + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + assert not my_message.is_diregapic_operation + + # Duplicated annotation + for mapping in range(1, 5): + duplicate = make_message( + name="Operation", + fields=( + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ) + ) + for f in duplicate.field: + options = descriptor_pb2.FieldOptions() + # All set to the same value + options.Extensions[ex_ops_pb2.operation_field] = mapping + f.options.MergeFrom(options) + + with pytest.raises(TypeError): + duplicate.is_diregapic_operation diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 42198e09ebcc..774f81b17263 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -19,6 +19,7 @@ from google.api import field_behavior_pb2 from google.api import http_pb2 +from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import metadata @@ -718,3 +719,60 @@ def test_flattened_oneof_fields(): } actual = method.flattened_field_to_key assert expected == actual + + +def test_is_operation_polling_method(): + T = descriptor_pb2.FieldDescriptorProto.Type + + operation = make_message( + name="Operation", + fields=( + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ), + ) + for f in operation.field: + options = descriptor_pb2.FieldOptions() + # Note: The field numbers were carefully chosen to be the corresponding enum values. + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + request = make_message( + name="GetOperation", + fields=[ + make_field(name="name", type=T.Value("TYPE_STRING"), number=1) + ], + ) + + # Correct positive + options = descriptor_pb2.MethodOptions() + options.Extensions[ex_ops_pb2.operation_polling_method] = True + polling_method = make_method( + name="Get", + input_message=request, + output_message=operation, + options=options, + ) + + assert polling_method.is_operation_polling_method + + # Normal method that returns operation + normal_method = make_method( + name="Get", + input_message=request, + output_message=operation, + ) + + assert not normal_method.is_operation_polling_method + + # Method with invalid options combination + response = make_message(name="Response", fields=[make_field(name="name")]) + + invalid_method = make_method( + name="Get", + input_message=request, + output_message=response, + options=options, # Reuse options from the actual polling method + ) + + assert not invalid_method.is_operation_polling_method diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index f93ddc814e8a..7cd41799ed8f 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -17,6 +17,7 @@ import typing from google.api import resource_pb2 +from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import imp @@ -524,3 +525,133 @@ def test_resource_response(): expected = {squid_resource, clam_resource} actual = mollusc_service.resource_messages assert expected == actual + + +def test_operation_polling_method(): + T = descriptor_pb2.FieldDescriptorProto.Type + + operation = make_message( + name="Operation", + fields=( + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ), + ) + for f in operation.field: + options = descriptor_pb2.FieldOptions() + # Note: The field numbers were carefully chosen to be the corresponding enum values. + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + request = make_message( + name="GetOperation", + fields=[ + make_field(name="name", type=T.Value("TYPE_STRING"), number=1) + ], + ) + + options = descriptor_pb2.MethodOptions() + options.Extensions[ex_ops_pb2.operation_polling_method] = True + polling_method = make_method( + name="Get", + input_message=request, + output_message=operation, + options=options, + ) + + # Even though polling_method returns an Operation, it isn't an LRO + ops_service = make_service( + name="CustomOperations", + methods=[ + polling_method, + make_method( + name="Delete", + input_message=make_message(name="Input"), + output_message=make_message("Output"), + ), + ], + ) + + assert ops_service.custom_polling_method == polling_method + + # Methods are LROs, so they are not polling methods + user_service = make_service( + name="ComputationStarter", + methods=[ + make_method( + name="Start", + input_message=make_message(name="StartRequest"), + output_message=operation, + ), + ], + ) + + assert not user_service.custom_polling_method + + +def test_diregapic_lro_detection(): + T = descriptor_pb2.FieldDescriptorProto.Type + + operation = make_message( + name="Operation", + fields=( + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ), + ) + for f in operation.field: + options = descriptor_pb2.FieldOptions() + # Note: The field numbers were carefully chosen to be the corresponding enum values. + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + request = make_message( + name="GetOperation", + fields=[ + make_field(name="name", type=T.Value("TYPE_STRING"), number=1) + ], + ) + + options = descriptor_pb2.MethodOptions() + options.Extensions[ex_ops_pb2.operation_polling_method] = True + polling_method = make_method( + name="Get", + input_message=request, + output_message=operation, + options=options, + ) + + ops_service = make_service( + name="CustomOperations", + methods=[ + polling_method, + make_method( + name="Delete", + input_message=make_message(name="Input"), + output_message=make_message("Output"), + ), + ], + ) + + assert not polling_method.operation_service + + # Methods are LROs, so they are not polling methods + lro_opts = descriptor_pb2.MethodOptions() + lro_opts.Extensions[ex_ops_pb2.operation_service] = "CustomOperations" + lro = make_method( + name="Start", + input_message=make_message(name="StartRequest"), + output_message=operation, + options=lro_opts, + ) + user_service = make_service( + name="ComputationStarter", + methods=[ + lro, + ], + ) + + # Note: we can't have the operation_serivce property point to the actual operation service + # because Service objects can't perform the lookup. + # Instead we kick that can to the API object and make it do the lookup and verification. + assert lro.operation_service == "CustomOperations" From 0a32992179fcc436290168cf9a95fe0bb7b0ba36 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 17 Nov 2021 23:18:37 +0000 Subject: [PATCH 0670/1339] chore: release 0.57.0 (#1090) :robot: I have created a release \*beep\* \*boop\* --- ## [0.57.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.2...v0.57.0) (2021-11-17) ### Features * forward compatible diregapic LRO support ([#1085](https://www.github.com/googleapis/gapic-generator-python/issues/1085)) ([aa7f4d5](https://www.github.com/googleapis/gapic-generator-python/commit/aa7f4d568f7f43738ab3489fc84ce6bc5d6bda18)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 151e5143dd40..fcc05d65530b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.57.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.2...v0.57.0) (2021-11-17) + + +### Features + +* forward compatible diregapic LRO support ([#1085](https://www.github.com/googleapis/gapic-generator-python/issues/1085)) ([aa7f4d5](https://www.github.com/googleapis/gapic-generator-python/commit/aa7f4d568f7f43738ab3489fc84ce6bc5d6bda18)) + ### [0.56.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.1...v0.56.2) (2021-11-08) From de0eaf9aa136656e81f7b7c9f70035b9baa09a02 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 18 Nov 2021 11:34:33 -0700 Subject: [PATCH 0671/1339] fix: fix resource path args for paths with =** (#1089) Some resources have `=**` in the segment. . A segment with `=**` is not matched by the current regex: ```py Python 3.9.5 (default, Jul 27 2021, 22:06:04) [GCC 10.2.1 20210110] on linux Type "help", "copyright", "credits" or "license" for more information. >>> import re >>> pattern = re.compile(r"\{([a-zA-Z0-9_-]+)\}") >>> pattern.search("projects/{project}/metricDescriptors/{metric_descriptor=**}").groups() ('project',) ``` This pattern shows up in the some real APIs ([monitoring protos](https://github.com/googleapis/googleapis/blob/3b9c98eda3bded7bb01e2f5f5c7d20f4a5d3e121/google/monitoring/v3/metric_service.proto#L39-L46), `google/cloud/iap/v1`, `google/cloud/iap/v1beta1`, `google/cloud/recommendationengine/v1beta1`, `google/devtools/remoteworkers/v1test2`, `google/home/graph/v1`, `google/iam/v1`). `**` is mentioned in passing in [Resource Names](https://cloud.google.com/apis/design/resource_names#q_how_should_i_generate_and_parse_resource_names). I was not able to find an explanation of what wildcards are considered valid in https://google.aip.dev/122 or https://google.aip.dev/client-libraries/4231. Monitoring Proto Example: ```proto option (google.api.resource_definition) = { type: "monitoring.googleapis.com/MetricDescriptor" pattern: "projects/{project}/metricDescriptors/{metric_descriptor=**}" pattern: "organizations/{organization}/metricDescriptors/{metric_descriptor=**}" pattern: "folders/{folder}/metricDescriptors/{metric_descriptor=**}" pattern: "*" history: ORIGINALLY_SINGLE_PATTERN }; ``` --- .../gapic/samplegen/samplegen.py | 7 ++++++- .../gapic-generator/gapic/schema/wrappers.py | 3 ++- ...llusca_v1_snippets_list_resources_async.py | 5 +++++ ...ollusca_v1_snippets_list_resources_sync.py | 5 +++++ .../tests/snippetgen/snippets.proto | 19 ++++++++++++++--- .../unit/schema/wrappers/test_message.py | 21 +++++++++++++++++++ 6 files changed, 55 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 2a7e68a1f5be..ab1b817300ce 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -110,7 +110,7 @@ class TransformedRequest: # Resource patterns look something like # kingdom/{kingdom}/phylum/{phylum}/class/{class} - RESOURCE_RE = re.compile(r"\{([^}/]+)\}") + RESOURCE_RE = wrappers.MessageType.PATH_ARG_RE @classmethod def build( @@ -198,6 +198,11 @@ def build( raise types.NoSuchResourcePattern( f"Resource {resource_typestr} has no pattern with params: {attr_name_str}" ) + # This re-writes + # patterns like: 'projects/{project}/metricDescriptors/{metric_descriptor=**}' + # to 'projects/{project}/metricDescriptors/{metric_descriptor} + # so it can be used in sample code as an f-string. + pattern = cls.RESOURCE_RE.sub(r"{\g<1>}", pattern) return cls(base=base, body=attrs, single=None, pattern=pattern,) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index eef7b9f14c64..9dabdfa4da9c 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -317,7 +317,8 @@ def __getattr__(self, name): class MessageType: """Description of a message (defined with the ``message`` keyword).""" # Class attributes - PATH_ARG_RE = re.compile(r'\{([a-zA-Z0-9_-]+)\}') + # https://google.aip.dev/122 + PATH_ARG_RE = re.compile(r'\{([a-zA-Z0-9_\-]+)(?:=\*\*)?\}') # Instance attributes message_pb: descriptor_pb2.DescriptorProto diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py index 768866029872..649e56a88ba1 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py @@ -38,8 +38,13 @@ async def sample_list_resources(): part_id = "part_id_value" parent = f"items/{item_id}/parts/{part_id}" + item_id = "item_id_value" + part_id = "part_id_value" + resource_with_wildcard = f"items/{item_id}/parts/{part_id}" + request = mollusca_v1.ListResourcesRequest( parent=parent, + resource_with_wildcard=resource_with_wildcard, ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py index f1aea4939062..8892452b84b5 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py @@ -38,8 +38,13 @@ def sample_list_resources(): part_id = "part_id_value" parent = f"items/{item_id}/parts/{part_id}" + item_id = "item_id_value" + part_id = "part_id_value" + resource_with_wildcard = f"items/{item_id}/parts/{part_id}" + request = mollusca_v1.ListResourcesRequest( parent=parent, + resource_with_wildcard=resource_with_wildcard, ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/snippets.proto b/packages/gapic-generator/tests/snippetgen/snippets.proto index d5bbb9f78b32..fbffcea258f5 100644 --- a/packages/gapic-generator/tests/snippetgen/snippets.proto +++ b/packages/gapic-generator/tests/snippetgen/snippets.proto @@ -67,9 +67,15 @@ message ListResourcesRequest { (google.api.resource_reference) = { type: "snippets.example.com/Resource" }]; + + string resource_with_wildcard = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "snippets.example.com/ResourceWithWildcardSegment" + }]; - int32 page_size = 2; - string page_token = 3; + int32 page_size = 3; + string page_token = 4; } message ListResourcesResponse { @@ -91,10 +97,17 @@ message Resource { pattern: "items/{item_id}/parts/{part_id}" }; string name = 1; +} - +message ResourceWithWildcardSegment { + option (google.api.resource) = { + type: "snippets.example.com/ResourceWithWildcardSegment" + pattern: "items/{item_id}/parts/{part_id=**}" + }; + string name = 1; } + message MessageWithNesting { message NestedMessage { string required_string = 1 [(google.api.field_behavior) = REQUIRED]; diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index da21f66ebdfa..1519fadc6760 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -201,6 +201,27 @@ def test_resource_path(): assert message.resource_type == "Class" +def test_resource_path_with_wildcard(): + options = descriptor_pb2.MessageOptions() + resource = options.Extensions[resource_pb2.resource] + resource.pattern.append( + "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass=**}") + resource.pattern.append( + "kingdoms/{kingdom}/divisions/{division}/classes/{klass}") + resource.type = "taxonomy.biology.com/Class" + message = make_message('Squid', options=options) + + assert message.resource_path == "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass=**}" + assert message.resource_path_args == ["kingdom", "phylum", "klass"] + assert message.resource_type == "Class" + assert re.match(message.path_regex_str, + "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass") + assert re.match(message.path_regex_str, + "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass/additional-segment") + assert re.match(message.path_regex_str, + "kingdoms/my-kingdom/phyla/my-phylum/classes/") is None + + def test_parse_resource_path(): options = descriptor_pb2.MessageOptions() resource = options.Extensions[resource_pb2.resource] From 3e642cb83cfe1a162188947296c3aa05b4b0676c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 18 Nov 2021 11:40:14 -0700 Subject: [PATCH 0672/1339] fix(snippetgen): don't create duplicate requests for required oneofs (#1088) Some APIs mark every field in a oneof as "required" to express that the oneof itself is required. https://github.com/googleapis/googleapis/blob/55726d62556966c095a096aed1ffda5da231f36d/google/cloud/channel/v1/service.proto#L1057-L1069 ```proto // Request message for [CloudChannelService.ImportCustomer][google.cloud.channel.v1.CloudChannelService.ImportCustomer] message ImportCustomerRequest { // Specifies the identity of the transfer customer. // A customer's cloud_identity_id or domain is required to look up the // customer's Cloud Identity. For Team customers, only the cloud_identity_id // option is valid. oneof customer_identity { // Required. Customer domain. string domain = 2 [(google.api.field_behavior) = REQUIRED]; // Required. Customer's Cloud Identity ID string cloud_identity_id = 3 [(google.api.field_behavior) = REQUIRED]; } ... ``` This causes an error in the current logic since two requests are generated for the same field. --- .../gapic/samplegen/samplegen.py | 12 +++-- ...pets_one_of_method_required_field_async.py | 48 +++++++++++++++++++ ...ppets_one_of_method_required_field_sync.py | 48 +++++++++++++++++++ .../tests/snippetgen/snippets.proto | 13 +++++ 4 files changed, 118 insertions(+), 3 deletions(-) create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index ab1b817300ce..35667f5af631 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -30,7 +30,7 @@ from gapic.schema import wrappers from collections import defaultdict, namedtuple, ChainMap as chainmap -from typing import Any, ChainMap, Dict, FrozenSet, Generator, List, Mapping, Optional, Tuple, Sequence +from typing import Any, ChainMap, Dict, FrozenSet, Generator, List, Mapping, Optional, Sequence # There is no library stub file for this module, so ignore it. from google.api import resource_pb2 # type: ignore @@ -981,10 +981,16 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess request_fields: List[wrappers.Field] = [] - # Choose the first option for each oneof + # There is no standard syntax to mark a oneof as "required" in protos. + # Assume every oneof is required and pick the first option + # in each oneof. selected_oneofs: List[wrappers.Field] = [oneof_fields[0] for oneof_fields in message.oneof_fields().values()] - request_fields = selected_oneofs + message.required_fields + + # Don't add required fields if they're also marked as oneof + required_fields = [ + field for field in message.required_fields if not field.oneof] + request_fields = selected_oneofs + required_fields for field in request_fields: # TransformedRequest expects nested fields to be referenced like diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py new file mode 100644 index 000000000000..4f074994b71b --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for OneOfMethodRequiredField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async] +from animalia import mollusca_v1 + + +async def sample_one_of_method_required_field(): + """Snippet for one_of_method_required_field""" + + # Create a client + client = mollusca_v1.SnippetsAsyncClient() + + # Initialize request argument(s) + request = mollusca_v1.OneOfRequestWithRequiredField( + my_string="my_string_value", + non_one_of_string="non_one_of_string_value", + ) + + # Make the request + response = await client.one_of_method_required_field(request=request) + + # Handle response + print(response) + +# [END mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py new file mode 100644 index 000000000000..6e480d50007f --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for OneOfMethodRequiredField +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install animalia-mollusca + + +# [START mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync] +from animalia import mollusca_v1 + + +def sample_one_of_method_required_field(): + """Snippet for one_of_method_required_field""" + + # Create a client + client = mollusca_v1.SnippetsClient() + + # Initialize request argument(s) + request = mollusca_v1.OneOfRequestWithRequiredField( + my_string="my_string_value", + non_one_of_string="non_one_of_string_value", + ) + + # Make the request + response = client.one_of_method_required_field(request=request) + + # Handle response + print(response) + +# [END mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync] diff --git a/packages/gapic-generator/tests/snippetgen/snippets.proto b/packages/gapic-generator/tests/snippetgen/snippets.proto index fbffcea258f5..d5ead4ed336c 100644 --- a/packages/gapic-generator/tests/snippetgen/snippets.proto +++ b/packages/gapic-generator/tests/snippetgen/snippets.proto @@ -55,6 +55,8 @@ service Snippets { rpc MethodBidiStreaming(stream SignatureRequestOneRequiredField) returns (stream Response); rpc OneOfMethod(OneOfRequest) returns (Response); + + rpc OneOfMethodRequiredField(OneOfRequestWithRequiredField) returns (Response); } enum Enum { @@ -149,3 +151,14 @@ message OneOfRequest { int32 my_number = 3; } } + + +message OneOfRequestWithRequiredField { + string non_one_of_string = 1 [(google.api.field_behavior) = REQUIRED]; + + // Some APIs mark every field in a "required" oneof as required + oneof my_one_of { + string my_string = 2 [(google.api.field_behavior) = REQUIRED]; + int32 my_number = 3 [(google.api.field_behavior) = REQUIRED]; + } +} \ No newline at end of file From 179e3347422829bff50b6a6c4f736c83acf0c9a4 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 23 Nov 2021 13:52:54 -0800 Subject: [PATCH 0673/1339] ci: save pip cache for multi-python jobs (#1095) --- packages/gapic-generator/.github/workflows/tests.yaml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index b8f18282237f..502443da991f 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -22,6 +22,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 + cache: 'pip' - name: Install nox. run: python -m pip install nox - name: Build the documentation. @@ -38,6 +39,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 + cache: 'pip' - name: Install nox. run: python -m pip install nox - name: Check type annotations. @@ -57,6 +59,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 + cache: 'pip' - name: Install system dependencies. run: | sudo apt-get update @@ -103,6 +106,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 + cache: 'pip' - name: Copy mtls files run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ - name: Install system dependencies. @@ -161,6 +165,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} + cache: 'pip' - name: Install system dependencies. run: | sudo apt-get update @@ -191,6 +196,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 + cache: 'pip' - name: Install system dependencies. run: | sudo apt-get update @@ -224,6 +230,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 + cache: 'pip' - name: Install system dependencies. run: | sudo apt-get update @@ -254,6 +261,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 + cache: 'pip' - name: Install system dependencies. run: | sudo apt-get update @@ -277,6 +285,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} + cache: 'pip' - name: Install pandoc run: | sudo apt-get update @@ -301,6 +310,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python }} + cache: 'pip' - name: Install pandoc run: | sudo apt-get update @@ -354,6 +364,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.8 + cache: 'pip' - name: Install autopep8 run: | python -m pip install autopep8 From 6bf1231c5f04ab9ed7b94915d674ac7802eeeacb Mon Sep 17 00:00:00 2001 From: Ken Bandes Date: Wed, 24 Nov 2021 21:18:42 -0500 Subject: [PATCH 0674/1339] feat: add support for long-running operations with rest transport. (#1094) * feat: add support for long-running operations with rest transport. * Update gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 Co-authored-by: Anthonios Partheniou * Update gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 Co-authored-by: Anthonios Partheniou * fix: address review comments * fix: rename rest operations client, fix rest lro unit tests * fix: removed extra space in assignment * fix: update goldens for integration tests due to template changes. Co-authored-by: Kenneth Bandes Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/gapic/schema/api.py | 30 +++++++++- .../services/%service/transports/rest.py.j2 | 57 +++++++++++-------- .../%name_%version/%sub/test_%service.py.j2 | 25 +++++--- .../gapic-generator/gapic/utils/options.py | 17 +++++- .../rules_python_gapic/py_gapic.bzl | 3 + .../unit/gapic/asset_v1/test_asset_service.py | 2 + .../unit/gapic/redis_v1/test_cloud_redis.py | 2 + .../tests/unit/generator/test_generator.py | 13 ++++- .../tests/unit/generator/test_options.py | 30 ++++++++++ .../tests/unit/schema/test_api.py | 35 ++++++++++++ 10 files changed, 178 insertions(+), 36 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index d233073d1d7b..575656351bfc 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -27,11 +27,14 @@ from types import MappingProxyType from google.api_core import exceptions +from google.api import http_pb2 # type: ignore from google.api import resource_pb2 # type: ignore +from google.api import service_pb2 # type: ignore from google.gapic.metadata import gapic_metadata_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import descriptor_pb2 from google.protobuf.json_format import MessageToJson +from google.protobuf.json_format import ParseDict import grpc # type: ignore @@ -226,6 +229,7 @@ class API: """ naming: api_naming.Naming all_protos: Mapping[str, Proto] + service_yaml_config: service_pb2.Service subpackage_view: Tuple[str, ...] = dataclasses.field(default_factory=tuple) @classmethod @@ -318,8 +322,14 @@ def disambiguate_keyword_fname( for name, proto in pre_protos.items() } + # Parse the google.api.Service proto from the service_yaml data. + service_yaml_config = service_pb2.Service() + ParseDict(opts.service_yaml_config, service_yaml_config) + # Done; return the API. - return cls(naming=naming, all_protos=protos) + return cls(naming=naming, + all_protos=protos, + service_yaml_config=service_yaml_config) @cached_property def enums(self) -> Mapping[str, wrappers.EnumType]: @@ -374,6 +384,24 @@ def services(self) -> Mapping[str, wrappers.Service]: *[p.services for p in self.protos.values()], ) + @cached_property + def http_options(self) -> Mapping[str, Sequence[wrappers.HttpRule]]: + """Return a map of API-wide http rules.""" + + def make_http_options(rule: http_pb2.HttpRule + ) -> Sequence[wrappers.HttpRule]: + http_options = [rule] + list(rule.additional_bindings) + opt_gen = (wrappers.HttpRule.try_parse_http_rule(http_rule) + for http_rule in http_options) + return [rule for rule in opt_gen if rule] + + result: Mapping[str, Sequence[http_pb2.HttpRule]] = { + rule.selector: make_http_options(rule) + for rule in self.service_yaml_config.http.rules + } + + return result + @cached_property def subpackages(self) -> Mapping[str, 'API']: """Return a map of all subpackages, if any. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index fafc77d89a4f..ba3ab41185dd 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -10,6 +10,7 @@ from google.api_core import path_template from google.api_core import gapic_v1 {% if service.has_lro %} from google.api_core import operations_v1 +from google.protobuf import json_format {% endif %} from requests import __version__ as requests_version from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -25,10 +26,6 @@ except AttributeError: # pragma: NO COVER {% block content %} -{% if service.has_lro %} -{% endif %} - - {# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} {% filter sort_lines %} {% for method in service.methods.values() %} @@ -134,31 +131,41 @@ class {{service.name}}RestTransport({{service.name}}Transport): This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Only create a new client if we do not already have one. if self._operations_client is None: - from google.api_core import grpc_helpers - - self._operations_client = operations_v1.OperationsClient( - grpc_helpers.create_channel( - self._host, + http_options = { + {% for selector, rules in api.http_options.items() %} + {% if selector.startswith('google.longrunning.Operations') %} + '{{ selector }}': [ + {% for rule in rules %} + { + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %} + }, + {% endfor %}{# rules #} + ], + {% endif %}{# longrunning.Operations #} + {% endfor %}{# http_options #} + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, credentials=self._credentials, - default_scopes=cls.AUTH_SCOPES, scopes=self._scopes, - default_host=cls.DEFAULT_HOST, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - ) + http_options=http_options) + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) # Return the client from cache. return self._operations_client - {% endif %} + {% endif %}{# service.has_lro #} {% for method in service.methods.values() %} - {%- if method.http_options and not method.lro and not (method.server_streaming or method.client_streaming) %} + {%- if method.http_options and not (method.server_streaming or method.client_streaming) %} def _{{method.name | snake_case}}(self, request: {{method.input.ident}}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -279,11 +286,17 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not method.void %} # Return the response + {% if method.lro %} + return_op = operations_pb2.Operation() + json_format.Parse(response.content, return_op, ignore_unknown_fields=True) + return return_op + {% else %} return {{method.output.ident}}.from_json( response.content, ignore_unknown_fields=True ) {% endif %} + {% endif %} {% else %} def _{{method.name | snake_case}}(self, @@ -296,10 +309,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): raise RuntimeError( "Cannot define a method without a valid 'google.api.http' annotation.") - {%- elif method.lro %} - - raise NotImplementedError( - "LRO over REST is not yet defined for python client.") {%- elif method.server_streaming or method.client_streaming %} raise NotImplementedError( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index b53afebe8d9a..a6d1bd256256 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -36,8 +36,10 @@ from google.api_core import grpc_helpers_async from google.api_core import path_template {% if service.has_lro %} from google.api_core import future +from google.api_core import operation from google.api_core import operations_v1 from google.longrunning import operations_pb2 +from google.protobuf import json_format {% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} @@ -1119,8 +1121,8 @@ def test_{{ method_name }}_raw_page_lro(): {% for method in service.methods.values() if 'rest' in opts.transport and method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} -{# TODO(kbandes): remove this if condition when lro and streaming are supported. #} -{% if not method.lro and not (method.server_streaming or method.client_streaming) %} +{# TODO(kbandes): remove this if condition when streaming is supported in rest. #} +{% if not (method.server_streaming or method.client_streaming) %} def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), @@ -1167,11 +1169,13 @@ def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method. # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - {% if method.void %} + {% if method.void %} json_return_value = '' - {% else %} + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) - {% endif %} + {% endif %} response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value {% if method.client_streaming %} @@ -1188,6 +1192,8 @@ def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method. # Establish that the response is the type that we expect. {% if method.void %} assert response is None + {% elif method.lro %} + assert response.operation.name == "operations/spam" {% else %} assert isinstance(response, {{ method.client_output.ident }}) {% for field in method.output.fields.values() | rejectattr('message') %} @@ -1264,11 +1270,13 @@ def test_{{ method_name }}_rest_flattened(transport: str = 'rest'): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - {% if method.void %} + {% if method.void %} json_return_value = '' - {% else %} + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) - {% endif %} + {% endif %} response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -1453,6 +1461,7 @@ def test_{{ method_name }}_rest_error(): client.{{ method_name }}({}) {%- endif %} + {% endif %}{% endwith %}{# method_name #} {% endfor -%} {#- method in methods for rest #} diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index d7bbe2473df6..154106af4f72 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -20,6 +20,7 @@ import json import os import warnings +import yaml from gapic.samplegen_utils import utils as samplegen_utils @@ -45,6 +46,8 @@ class Options: metadata: bool = False # TODO(yon-mg): should there be an enum for transport type? transport: List[str] = dataclasses.field(default_factory=lambda: []) + service_yaml_config: Dict[str, Any] = dataclasses.field( + default_factory=dict) # Class constants PYTHON_GAPIC_PREFIX: str = 'python-gapic-' @@ -54,6 +57,7 @@ class Options: 'metadata', # generate GAPIC metadata JSON file 'old-naming', # TODO(dovs): Come up with a better comment 'retry-config', # takes a path + 'service-yaml', # takes a path 'samples', # output dir 'autogen-snippets', # produce auto-generated snippets # transport type(s) delineated by '+' (i.e. grpc, rest, custom.[something], etc?) @@ -129,6 +133,16 @@ def tweak_path(p): with open(retry_paths[-1]) as f: retry_cfg = json.load(f) + service_yaml_config = {} + service_yaml_paths = opts.pop('service-yaml', None) + if service_yaml_paths: + # Just use the last file specified. + with open(service_yaml_paths[-1]) as f: + service_yaml_config = yaml.load(f, Loader=yaml.Loader) + # The yaml service files typically have this field, + # but it is not a field in the gogle.api.Service proto. + service_yaml_config.pop('type', None) + # Build the options instance. sample_paths = opts.pop('samples', []) @@ -150,7 +164,8 @@ def tweak_path(p): add_iam_methods=bool(opts.pop('add-iam-methods', False)), metadata=bool(opts.pop('metadata', False)), # transport should include desired transports delimited by '+', e.g. transport='grpc+rest' - transport=opts.pop('transport', ['grpc'])[0].split('+') + transport=opts.pop('transport', ['grpc'])[0].split('+'), + service_yaml_config=service_yaml_config, ) # Note: if we ever need to recursively check directories for sample diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index c9965902d158..659996dd67b4 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -21,6 +21,7 @@ def py_gapic_library( plugin_args = None, opt_args = None, metadata = True, + service_yaml = None, **kwargs): # srcjar_target_name = "%s_srcjar" % name srcjar_target_name = name @@ -35,6 +36,8 @@ def py_gapic_library( file_args = {} if grpc_service_config: file_args[grpc_service_config] = "retry-config" + if service_yaml: + file_args[service_yaml] = "service-yaml" proto_custom_library( name = srcjar_target_name, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index ae6fd2a5e6a6..8b0f15491d45 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template @@ -44,6 +45,7 @@ from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.type import expr_pb2 # type: ignore import google.auth diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 160dcfc35a94..d6a487d8dc02 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template @@ -42,6 +43,7 @@ from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import google.auth diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index d068250e9729..62a12df90c7f 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -19,6 +19,7 @@ import jinja2 import pytest +from google.api import service_pb2 from google.protobuf import descriptor_pb2 from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse @@ -767,9 +768,17 @@ def make_proto( ).proto -def make_api(*protos, naming: naming.Naming = None, **kwargs) -> api.API: +def make_api( + *protos, + naming: naming.Naming = None, + service_yaml_config: service_pb2.Service = None, + **kwargs +) -> api.API: return api.API( - naming=naming or make_naming(), all_protos={i.name: i for i in protos}, **kwargs + naming=naming or make_naming(), + service_yaml_config=service_yaml_config or service_pb2.Service(), + all_protos={i.name: i for i in protos}, + **kwargs ) diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index d5bd11f64e17..f881e1f55190 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -140,6 +140,36 @@ def test_options_service_config(fs): assert opts.retry == expected_cfg +def test_options_service_yaml_config(fs): + opts = Options.build("") + assert opts.service_yaml_config == {} + + service_yaml_fpath = "testapi_v1.yaml" + fs.create_file(service_yaml_fpath, + contents=("type: google.api.Service\n" + "config_version: 3\n" + "name: testapi.googleapis.com\n")) + opt_string = f"service-yaml={service_yaml_fpath}" + opts = Options.build(opt_string) + expected_config = { + "config_version": 3, + "name": "testapi.googleapis.com" + } + assert opts.service_yaml_config == expected_config + + service_yaml_fpath = "testapi_v2.yaml" + fs.create_file(service_yaml_fpath, + contents=("config_version: 3\n" + "name: testapi.googleapis.com\n")) + opt_string = f"service-yaml={service_yaml_fpath}" + opts = Options.build(opt_string) + expected_config = { + "config_version": 3, + "name": "testapi.googleapis.com" + } + assert opts.service_yaml_config == expected_config + + def test_options_bool_flags(): # All these options are default False. # If new options violate this assumption, diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 2c139ce7f162..afa82c8cfd4d 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -1560,3 +1560,38 @@ def test_gapic_metadata(): expected = MessageToJson(expected, sort_keys=True) actual = api_schema.gapic_metadata_json(opts) assert expected == actual + + +def test_http_options(fs): + fd = ( + make_file_pb2( + name='example.proto', + package='google.example.v1', + messages=(make_message_pb2(name='ExampleRequest', fields=()),), + ),) + + opts = Options(service_yaml_config={ + 'http': { + 'rules': [ + { + 'selector': 'Cancel', + 'post': '/v3/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*' + }, + { + 'selector': 'Get', + 'get': '/v3/{name=projects/*/locations/*/operations/*}', + 'additional_bindings': [{'get': '/v3/{name=/locations/*/operations/*}'}], + }, ] + } + }) + + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + http_options = api_schema.http_options + assert http_options == { + 'Cancel': [wrappers.HttpRule(method='post', uri='/v3/{name=projects/*/locations/*/operations/*}:cancel', body='*')], + 'Get': [ + wrappers.HttpRule( + method='get', uri='/v3/{name=projects/*/locations/*/operations/*}', body=None), + wrappers.HttpRule(method='get', uri='/v3/{name=/locations/*/operations/*}', body=None)] + } From f0fdeb83f4eb071a3a36885300dd62d7926cc44d Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Thu, 2 Dec 2021 02:34:19 -0800 Subject: [PATCH 0675/1339] chore: remove actools@ from CODEOWNERS (#1099) --- packages/gapic-generator/.github/CODEOWNERS | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.github/CODEOWNERS b/packages/gapic-generator/.github/CODEOWNERS index 002c1863abdd..279eedf06a67 100644 --- a/packages/gapic-generator/.github/CODEOWNERS +++ b/packages/gapic-generator/.github/CODEOWNERS @@ -4,5 +4,5 @@ # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -* @googleapis/actools @googleapis/actools-python @googleapis/yoshi-python -*.yaml @googleapis/actools @googleapis/yoshi-python @googleapis/actools-python +* @googleapis/actools-python @googleapis/yoshi-python +*.yaml @googleapis/yoshi-python @googleapis/actools-python From 66b0d872d248cf160f128e2be86e742e26845c0b Mon Sep 17 00:00:00 2001 From: Ken Bandes Date: Thu, 2 Dec 2021 13:14:34 -0500 Subject: [PATCH 0676/1339] fix: ensure rest unit tests have complete coverage (#1098) * fix: rest paging and lro client tests weren't working. * fix: fix coverage gaps in rest unit tests. * fix: refactor required fields code to move update out of static method. * fix: test that api method with required fields handles them correctly. * fix: removed extra parens from an expression in a test. Co-authored-by: Kenneth Bandes Co-authored-by: Anthonios Partheniou --- .../services/%service/transports/rest.py.j2 | 43 ++--- .../%name_%version/%sub/test_%service.py.j2 | 164 ++++++++++++++++-- .../unit/gapic/asset_v1/test_asset_service.py | 1 + .../credentials_v1/test_iam_credentials.py | 1 + .../logging_v2/test_config_service_v2.py | 1 + .../logging_v2/test_logging_service_v2.py | 1 + .../logging_v2/test_metrics_service_v2.py | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 1 + 8 files changed, 174 insertions(+), 39 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index ba3ab41185dd..64aa67213b81 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -166,6 +166,22 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %}{# service.has_lro #} {% for method in service.methods.values() %} {%- if method.http_options and not (method.server_streaming or method.client_streaming) %} + + {% if method.input.required_fields %} + __{{ method.name | snake_case }}_required_fields_default_values = { + {% for req_field in method.input.required_fields if req_field.is_primitive %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.default_value is string %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.field_pb.default_value }}{% endif %}{# default is str #} + {% endfor %} + } + + + @staticmethod + def _{{ method.name | snake_case }}_get_unset_required_fields(message_dict): + return {k: v for k, v in {{service.name}}RestTransport.__{{ method.name | snake_case }}_required_fields_default_values.items() if k not in message_dict} + + + {% endif %}{# required fields #} + def _{{method.name | snake_case}}(self, request: {{method.input.ident}}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -206,21 +222,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %} ] - {% if method.input.required_fields %} - required_fields = [ - # (snake_case_name, camel_case_name) - {% for req_field in method.input.required_fields %} - {% if req_field.is_primitive %} - ( - "{{ req_field.name | snake_case }}", - "{{ req_field.name | camel_case }}" - ), - {% endif %}{# is primitive #} - {% endfor %}{# required fields #} - ] - - {% endif %} - request_kwargs = {{method.input.ident}}.to_dict(request) transcoded_request = path_template.transcode( http_options, **request_kwargs) @@ -254,16 +255,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): )) {% if method.input.required_fields %} - # Ensure required fields have values in query_params. - # If a required field has a default value, it can get lost - # by the to_json call above. - orig_query_params = transcoded_request["query_params"] - for snake_case_name, camel_case_name in required_fields: - if snake_case_name in orig_query_params: - if camel_case_name not in query_params: - query_params[camel_case_name] = orig_query_params[snake_case_name] - - {% endif %} + query_params.update(self._{{ method.name | snake_case }}_get_unset_required_fields(query_params)) + {% endif %}{# required fields #} # Send the request headers = dict(metadata) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index a6d1bd256256..b31ff4df9b81 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -7,6 +7,7 @@ import mock import grpc from grpc.experimental import aio +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule @@ -1187,6 +1188,7 @@ def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method. {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} {# Cheeser assertion to force code coverage for bad paginated methods #} assert response.raw_page is response + {% endif %} # Establish that the response is the type that we expect. @@ -1210,6 +1212,130 @@ def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method. {% endif %} + {% if method.input.required_fields %} +def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ident }}): + transport_class = transports.{{ service.rest_transport_name }} + + request_init = {} + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% if req_field.field_pb.default_value is string %} + request_init["{{ req_field.name }}"] = "{{ req_field.field_pb.default_value }}" + {% else %} + request_init["{{ req_field.name }}"] = {{ req_field.field_pb.default_value }} + {% endif %}{# default is str #} + {% endfor %} + request = request_type(request_init) + jsonified_request = json.loads(request_type.to_json( + request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + assert "{{ field_name }}" not in jsonified_request + {% endfor %} + + unset_fields = transport_class._{{ method.name | snake_case }}_get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + assert "{{ field_name }}" in jsonified_request + assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] + {% endfor %} + + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + {% set mock_value = req_field.primitive_mock_as_str() %} + jsonified_request["{{ field_name }}"] = {{ mock_value }} + {% endfor %} + + unset_fields = transport_class._{{ method.name | snake_case }}_get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + {% set mock_value = req_field.primitive_mock_as_str() %} + assert "{{ field_name }}" in jsonified_request + assert jsonified_request["{{ field_name }}"] == {{ mock_value }} + {% endfor %} + + + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(request_init) + + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming %} + return_value = iter([{{ method.output.ident }}()]) + {% else %} + return_value = {{ method.output.ident }}() + {% endif %} + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "{{ method.http_options[0].method }}", + 'query_params': request_init, + } + {% if method.http_options[0].body %} + transcode_result['body'] = {} + {% endif %} + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + {% if method.client_streaming %} + response = client.{{ method.name|snake_case }}(iter(requests)) + {% else %} + response = client.{{ method_name }}(request) + {% endif %} + + expected_params = [ + {% for req_field in method.input.required_fields if req_field.is_primitive %} + ( + "{{ req_field.name }}", + {% if req_field.field_pb.default_value is string %} + "{{ req_field.field_pb.default_value }}" + {% else %} + {{ req_field.field_pb.default_value }} + {% endif %}{# default is str #} + ) + {% endfor %} + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + + {% endif %}{# required_fields #} + + def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), @@ -1325,9 +1451,10 @@ def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): {% if method.paged_result_field %} -def test_{{ method_name }}_rest_pager(): +def test_{{ method_name }}_rest_pager(transport: str = 'rest'): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1446,25 +1573,35 @@ def test_{{ method_name }}_rest_error(): credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) - {%- if not method.http_options %} - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(RuntimeError) as runtime_error: - client.{{ method_name }}({}) - assert ('Cannot define a method without a valid `google.api.http` annotation.' - in str(runtime_error.value)) - {%- else %} # TODO(yon-mg): Remove when this method has a working implementation # or testing straegy with pytest.raises(NotImplementedError): client.{{ method_name }}({}) - {%- endif %} -{% endif %}{% endwith %}{# method_name #} +{% endif %}{# not streaming #}{% endwith %}{# method_name #} {% endfor -%} {#- method in methods for rest #} + +{% for method in service.methods.values() if 'rest' in opts.transport and + not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} +def test_{{ method_name }}_rest_error(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.{{ method_name }}({}) + assert ("Cannot define a method without a valid 'google.api.http' annotation." + in str(runtime_error.value)) + + +{% endwith %}{# method_name #} +{% endfor %}{# for methods without http_options #} + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( @@ -1758,8 +1895,7 @@ def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtl mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -{# TODO(kbandes): re-enable this code when LRO is implmented for rest #} -{% if False and service.has_lro -%} +{% if service.has_lro -%} def test_{{ service.name|snake_case }}_rest_lro_client(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), @@ -1770,7 +1906,7 @@ def test_{{ service.name|snake_case }}_rest_lro_client(): # Ensure that we have a api-core operations client. assert isinstance( transport.operations_client, - operations_v1.OperationsClient, + operations_v1.AbstractOperationsClient, ) # Ensure that subsequent calls to the property send the exact same object. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 8b0f15491d45..95115e9d14e2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -18,6 +18,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 54bebbee759a..2bd38142f66a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -18,6 +18,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index f8c963c4af4d..e8bd895fb8e1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -18,6 +18,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 47cc2177ef4d..84911953c121 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -18,6 +18,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7d8951e95ac5..9c97b2030891 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -18,6 +18,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index d6a487d8dc02..8e60bc092e6d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -18,6 +18,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule From bc8a8e46ae4c7fd2cdd35e28844add8a3dfc599e Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 7 Dec 2021 15:17:41 -0800 Subject: [PATCH 0677/1339] chore: extended_operations_pb2.py is moved into api-common-protos (#1100) --- packages/gapic-generator/BUILD.bazel | 2 +- .../google/cloud/extended_operations_pb2.py | 132 ------------------ packages/gapic-generator/requirements.txt | 2 +- packages/gapic-generator/setup.py | 2 +- 4 files changed, 3 insertions(+), 135 deletions(-) delete mode 100755 packages/gapic-generator/google/cloud/extended_operations_pb2.py diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index b062e9ca7855..38ae745d0d3e 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -51,7 +51,7 @@ toolchain( py_binary( name = "gapic_plugin", - srcs = glob(["gapic/**/*.py", "google/**/*.py"]), + srcs = glob(["gapic/**/*.py"]), data = [":pandoc_binary"] + glob([ "gapic/**/*.j2", "gapic/**/.*.j2", diff --git a/packages/gapic-generator/google/cloud/extended_operations_pb2.py b/packages/gapic-generator/google/cloud/extended_operations_pb2.py deleted file mode 100755 index 439f7f0be3ba..000000000000 --- a/packages/gapic-generator/google/cloud/extended_operations_pb2.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/extended_operations.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/extended_operations.proto', - package='google.cloud', - syntax='proto3', - serialized_options=b'\n\020com.google.cloudB\027ExtendedOperationsProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/extendedops;extendedops\242\002\004GAPI', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n&google/cloud/extended_operations.proto\x12\x0cgoogle.cloud\x1a google/protobuf/descriptor.proto*b\n\x18OperationResponseMapping\x12\r\n\tUNDEFINED\x10\x00\x12\x08\n\x04NAME\x10\x01\x12\n\n\x06STATUS\x10\x02\x12\x0e\n\nERROR_CODE\x10\x03\x12\x11\n\rERROR_MESSAGE\x10\x04:_\n\x0foperation_field\x12\x1d.google.protobuf.FieldOptions\x18\xfd\x08 \x01(\x0e\x32&.google.cloud.OperationResponseMapping:?\n\x17operation_request_field\x12\x1d.google.protobuf.FieldOptions\x18\xfe\x08 \x01(\t:@\n\x18operation_response_field\x12\x1d.google.protobuf.FieldOptions\x18\xff\x08 \x01(\t::\n\x11operation_service\x12\x1e.google.protobuf.MethodOptions\x18\xe1\t \x01(\t:A\n\x18operation_polling_method\x12\x1e.google.protobuf.MethodOptions\x18\xe2\t \x01(\x08\x42y\n\x10\x63om.google.cloudB\x17\x45xtendedOperationsProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/extendedops;extendedops\xa2\x02\x04GAPIb\x06proto3' - , - dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - -_OPERATIONRESPONSEMAPPING = _descriptor.EnumDescriptor( - name='OperationResponseMapping', - full_name='google.cloud.OperationResponseMapping', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='UNDEFINED', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='NAME', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STATUS', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ERROR_CODE', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ERROR_MESSAGE', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=90, - serialized_end=188, -) -_sym_db.RegisterEnumDescriptor(_OPERATIONRESPONSEMAPPING) - -OperationResponseMapping = enum_type_wrapper.EnumTypeWrapper(_OPERATIONRESPONSEMAPPING) -UNDEFINED = 0 -NAME = 1 -STATUS = 2 -ERROR_CODE = 3 -ERROR_MESSAGE = 4 - -OPERATION_FIELD_FIELD_NUMBER = 1149 -operation_field = _descriptor.FieldDescriptor( - name='operation_field', full_name='google.cloud.operation_field', index=0, - number=1149, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) -OPERATION_REQUEST_FIELD_FIELD_NUMBER = 1150 -operation_request_field = _descriptor.FieldDescriptor( - name='operation_request_field', full_name='google.cloud.operation_request_field', index=1, - number=1150, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) -OPERATION_RESPONSE_FIELD_FIELD_NUMBER = 1151 -operation_response_field = _descriptor.FieldDescriptor( - name='operation_response_field', full_name='google.cloud.operation_response_field', index=2, - number=1151, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) -OPERATION_SERVICE_FIELD_NUMBER = 1249 -operation_service = _descriptor.FieldDescriptor( - name='operation_service', full_name='google.cloud.operation_service', index=3, - number=1249, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) -OPERATION_POLLING_METHOD_FIELD_NUMBER = 1250 -operation_polling_method = _descriptor.FieldDescriptor( - name='operation_polling_method', full_name='google.cloud.operation_polling_method', index=4, - number=1250, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key) - -DESCRIPTOR.enum_types_by_name['OperationResponseMapping'] = _OPERATIONRESPONSEMAPPING -DESCRIPTOR.extensions_by_name['operation_field'] = operation_field -DESCRIPTOR.extensions_by_name['operation_request_field'] = operation_request_field -DESCRIPTOR.extensions_by_name['operation_response_field'] = operation_response_field -DESCRIPTOR.extensions_by_name['operation_service'] = operation_service -DESCRIPTOR.extensions_by_name['operation_polling_method'] = operation_polling_method -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -operation_field.enum_type = _OPERATIONRESPONSEMAPPING -google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(operation_field) -google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(operation_request_field) -google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(operation_response_field) -google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(operation_service) -google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(operation_polling_method) - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 62fcf42a5d5a..880b056f22ce 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==8.0.3 google-api-core==2.1.1 -googleapis-common-protos==1.53.0 +googleapis-common-protos==1.54.0 jinja2==3.0.3 MarkupSafe==2.0.1 protobuf==3.19.1 diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 76e590d457f8..b5e84e5dd106 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -45,7 +45,7 @@ install_requires=( "click >= 6.7", "google-api-core >= 2.2.0", - "googleapis-common-protos >= 1.53.0", + "googleapis-common-protos >= 1.54.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", "protobuf >= 3.18.0", From ca4054b2628400b2780f9d30e180502d32a80096 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 8 Dec 2021 15:06:17 +0000 Subject: [PATCH 0678/1339] chore: release 0.58.0 (#1097) :robot: I have created a release \*beep\* \*boop\* --- ## [0.58.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.57.0...v0.58.0) (2021-12-07) ### Features * add support for long-running operations with rest transport. ([#1094](https://www.github.com/googleapis/gapic-generator-python/issues/1094)) ([e89fd23](https://www.github.com/googleapis/gapic-generator-python/commit/e89fd23609625c5aa49acd6c6ee67f87fce324fd)) ### Bug Fixes * ensure rest unit tests have complete coverage ([#1098](https://www.github.com/googleapis/gapic-generator-python/issues/1098)) ([0705d9c](https://www.github.com/googleapis/gapic-generator-python/commit/0705d9c5dbbea793867551e64991be37d8339c6b)) * fix resource path args for paths with =** ([#1089](https://www.github.com/googleapis/gapic-generator-python/issues/1089)) ([309cc66](https://www.github.com/googleapis/gapic-generator-python/commit/309cc66e880e07940866864b03c744310ef56762)) * **snippetgen:** don't create duplicate requests for required oneofs ([#1088](https://www.github.com/googleapis/gapic-generator-python/issues/1088)) ([5531795](https://www.github.com/googleapis/gapic-generator-python/commit/55317956397370a91b1a06ecd476e55f58789807)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index fcc05d65530b..6703e1c2b88c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.58.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.57.0...v0.58.0) (2021-12-07) + + +### Features + +* add support for long-running operations with rest transport. ([#1094](https://www.github.com/googleapis/gapic-generator-python/issues/1094)) ([e89fd23](https://www.github.com/googleapis/gapic-generator-python/commit/e89fd23609625c5aa49acd6c6ee67f87fce324fd)) + + +### Bug Fixes + +* ensure rest unit tests have complete coverage ([#1098](https://www.github.com/googleapis/gapic-generator-python/issues/1098)) ([0705d9c](https://www.github.com/googleapis/gapic-generator-python/commit/0705d9c5dbbea793867551e64991be37d8339c6b)) +* fix resource path args for paths with =** ([#1089](https://www.github.com/googleapis/gapic-generator-python/issues/1089)) ([309cc66](https://www.github.com/googleapis/gapic-generator-python/commit/309cc66e880e07940866864b03c744310ef56762)) +* **snippetgen:** don't create duplicate requests for required oneofs ([#1088](https://www.github.com/googleapis/gapic-generator-python/issues/1088)) ([5531795](https://www.github.com/googleapis/gapic-generator-python/commit/55317956397370a91b1a06ecd476e55f58789807)) + ## [0.57.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.2...v0.57.0) (2021-11-17) From 41a04567b6c881dc954a42213ac506a693ddb8f3 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 9 Dec 2021 13:41:07 -0800 Subject: [PATCH 0679/1339] fix: syntax fix for required_fields struct in rest transport (#1103) --- .../%sub/services/%service/transports/rest.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 7 +++++-- .../asset/tests/unit/gapic/asset_v1/test_asset_service.py | 3 --- .../unit/gapic/credentials_v1/test_iam_credentials.py | 1 - .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 1 - .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 1 - .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 1 - .../redis/tests/unit/gapic/redis_v1/test_cloud_redis.py | 3 --- 8 files changed, 6 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 64aa67213b81..a94ce42c5385 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -170,7 +170,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if method.input.required_fields %} __{{ method.name | snake_case }}_required_fields_default_values = { {% for req_field in method.input.required_fields if req_field.is_primitive %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.default_value is string %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.field_pb.default_value }}{% endif %}{# default is str #} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.default_value is string %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.field_pb.default_value }}{% endif %},{# default is str #} {% endfor %} } diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index b31ff4df9b81..cd9a7c34f027 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -7,7 +7,9 @@ import mock import grpc from grpc.experimental import aio +{% if "rest" in opts.transport %} import json +{% endif %} import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule @@ -37,11 +39,12 @@ from google.api_core import grpc_helpers_async from google.api_core import path_template {% if service.has_lro %} from google.api_core import future -from google.api_core import operation from google.api_core import operations_v1 from google.longrunning import operations_pb2 +{% if "rest" in opts.transport %} from google.protobuf import json_format -{% endif %} +{% endif %}{# rest transport #} +{% endif %}{# lro #} from google.api_core import gapic_v1 {% for method in service.methods.values() %} {% for ref_type in method.ref_types diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 95115e9d14e2..ae6fd2a5e6a6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -18,7 +18,6 @@ import grpc from grpc.experimental import aio -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule @@ -30,7 +29,6 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template @@ -46,7 +44,6 @@ from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore from google.type import expr_pb2 # type: ignore import google.auth diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 2bd38142f66a..54bebbee759a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -18,7 +18,6 @@ import grpc from grpc.experimental import aio -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index e8bd895fb8e1..f8c963c4af4d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -18,7 +18,6 @@ import grpc from grpc.experimental import aio -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 84911953c121..47cc2177ef4d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -18,7 +18,6 @@ import grpc from grpc.experimental import aio -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 9c97b2030891..7d8951e95ac5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -18,7 +18,6 @@ import grpc from grpc.experimental import aio -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 8e60bc092e6d..160dcfc35a94 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -18,7 +18,6 @@ import grpc from grpc.experimental import aio -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule @@ -30,7 +29,6 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template @@ -44,7 +42,6 @@ from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import json_format from google.protobuf import timestamp_pb2 # type: ignore import google.auth From 034de32a8495a84510692c975f39f7d367a2341c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 9 Dec 2021 13:59:59 -0800 Subject: [PATCH 0680/1339] chore: release 0.58.1 (#1104) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 6703e1c2b88c..eec318a6a3a5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.58.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.0...v0.58.1) (2021-12-09) + + +### Bug Fixes + +* syntax fix for required_fields struct in rest transport ([#1103](https://www.github.com/googleapis/gapic-generator-python/issues/1103)) ([3d7128c](https://www.github.com/googleapis/gapic-generator-python/commit/3d7128ce8f55523b9aff2e44e2c000450e712ac2)) + ## [0.58.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.57.0...v0.58.0) (2021-12-07) From 1d97308eb5986858f0def6e66ae89e7067c9ea02 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 10 Dec 2021 09:47:53 -0800 Subject: [PATCH 0681/1339] fix: syntax fix and test for multiple required fields (#1105) --- .../%name_%version/%sub/test_%service.py.j2 | 6 +- .../fragments/google/api/field_behavior.proto | 90 +++++++++++++++++++ .../test_multiple_required_fields.proto | 43 +++++++++ 3 files changed, 136 insertions(+), 3 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/google/api/field_behavior.proto create mode 100644 packages/gapic-generator/tests/fragments/test_multiple_required_fields.proto diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index cd9a7c34f027..e1932e462f34 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1325,11 +1325,11 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide ( "{{ req_field.name }}", {% if req_field.field_pb.default_value is string %} - "{{ req_field.field_pb.default_value }}" + "{{ req_field.field_pb.default_value }}", {% else %} - {{ req_field.field_pb.default_value }} + {{ req_field.field_pb.default_value }}, {% endif %}{# default is str #} - ) + ), {% endfor %} ] actual_params = req.call_args.kwargs['params'] diff --git a/packages/gapic-generator/tests/fragments/google/api/field_behavior.proto b/packages/gapic-generator/tests/fragments/google/api/field_behavior.proto new file mode 100644 index 000000000000..c4abe3b670f3 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/api/field_behavior.proto @@ -0,0 +1,90 @@ +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "FieldBehaviorProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.FieldOptions { + // A designation of a specific field behavior (required, output only, etc.) + // in protobuf messages. + // + // Examples: + // + // string name = 1 [(google.api.field_behavior) = REQUIRED]; + // State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + // google.protobuf.Duration ttl = 1 + // [(google.api.field_behavior) = INPUT_ONLY]; + // google.protobuf.Timestamp expire_time = 1 + // [(google.api.field_behavior) = OUTPUT_ONLY, + // (google.api.field_behavior) = IMMUTABLE]; + repeated google.api.FieldBehavior field_behavior = 1052; +} + +// An indicator of the behavior of a given field (for example, that a field +// is required in requests, or given as output but ignored as input). +// This **does not** change the behavior in protocol buffers itself; it only +// denotes the behavior and may affect how API tooling handles the field. +// +// Note: This enum **may** receive new values in the future. +enum FieldBehavior { + // Conventional default for enums. Do not use this. + FIELD_BEHAVIOR_UNSPECIFIED = 0; + + // Specifically denotes a field as optional. + // While all fields in protocol buffers are optional, this may be specified + // for emphasis if appropriate. + OPTIONAL = 1; + + // Denotes a field as required. + // This indicates that the field **must** be provided as part of the request, + // and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + REQUIRED = 2; + + // Denotes a field as output only. + // This indicates that the field is provided in responses, but including the + // field in a request does nothing (the server *must* ignore it and + // *must not* throw an error as a result of the field's presence). + OUTPUT_ONLY = 3; + + // Denotes a field as input only. + // This indicates that the field is provided in requests, and the + // corresponding field is not included in output. + INPUT_ONLY = 4; + + // Denotes a field as immutable. + // This indicates that the field may be set once in a request to create a + // resource, but may not be changed thereafter. + IMMUTABLE = 5; + + // Denotes that a (repeated) field is an unordered list. + // This indicates that the service may provide the elements of the list + // in any arbitrary order, rather than the order the user originally + // provided. Additionally, the list's order may or may not be stable. + UNORDERED_LIST = 6; + + // Denotes that this field returns a non-empty default value if not set. + // This indicates that if the user provides the empty value in a request, + // a non-empty value will be returned. The user will not be aware of what + // non-empty value to expect. + NON_EMPTY_DEFAULT = 7; +} diff --git a/packages/gapic-generator/tests/fragments/test_multiple_required_fields.proto b/packages/gapic-generator/tests/fragments/test_multiple_required_fields.proto new file mode 100644 index 000000000000..8e4cdfe157c2 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_multiple_required_fields.proto @@ -0,0 +1,43 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/annotations.proto"; + +service MultipleRequiredFields { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (MethodResponse) { + option (google.api.http) = { + get: "/nomenclature/v1/kingdom/{kingdom}/phylum/{phylum}" + }; + } +} + +message Description { + string description = 1; +} + +message MethodRequest { + string kingdom = 1 [(google.api.field_behavior) = REQUIRED]; + string phylum = 2 [(google.api.field_behavior) = REQUIRED]; + Description description = 3 [(google.api.field_behavior) = REQUIRED]; +} + +message MethodResponse{} \ No newline at end of file From 39aa70925748595b0a881c139bf1551a21cd3364 Mon Sep 17 00:00:00 2001 From: Ken Bandes Date: Fri, 10 Dec 2021 15:29:11 -0500 Subject: [PATCH 0682/1339] fix: fix case for expected field names in required fields test. (#1107) Co-authored-by: Kenneth Bandes --- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e1932e462f34..fdcec0eeaa75 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1323,7 +1323,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide expected_params = [ {% for req_field in method.input.required_fields if req_field.is_primitive %} ( - "{{ req_field.name }}", + "{{ req_field.name | camel_case }}", {% if req_field.field_pb.default_value is string %} "{{ req_field.field_pb.default_value }}", {% else %} From 510b15128a4c87f228a878636f0c59d55ee7fc70 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Mon, 13 Dec 2021 10:06:42 -0800 Subject: [PATCH 0683/1339] fix: non-string required fields provide correct values (#1108) In generated unit tests checking behavior of required fields in REST transports, fields are given default values in accordance with the type of the field. --- .../gapic-generator/gapic/schema/wrappers.py | 78 +++++++++++++++++-- .../services/%service/transports/rest.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 20 ++--- .../gapic-generator/gapic/utils/__init__.py | 2 - .../gapic-generator/gapic/utils/uri_sample.py | 78 ------------------- .../fragments/test_required_non_string.proto | 41 ++++++++++ .../tests/unit/schema/wrappers/test_method.py | 51 ++++++++++-- 7 files changed, 167 insertions(+), 105 deletions(-) delete mode 100644 packages/gapic-generator/gapic/utils/uri_sample.py create mode 100644 packages/gapic-generator/tests/fragments/test_required_non_string.proto diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 9dabdfa4da9c..aa474e0e4ea3 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -32,7 +32,7 @@ import json import re from itertools import chain -from typing import (Any, cast, Dict, FrozenSet, Iterable, List, Mapping, +from typing import (Any, cast, Dict, FrozenSet, Iterator, Iterable, List, Mapping, ClassVar, Optional, Sequence, Set, Tuple, Union) from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 @@ -757,17 +757,79 @@ class HttpRule: uri: str body: Optional[str] - @property - def path_fields(self) -> List[Tuple[str, str]]: + def path_fields(self, method: "~.Method") -> List[Tuple[Field, str, str]]: """return list of (name, template) tuples extracted from uri.""" - return [(match.group("name"), match.group("template")) + input = method.input + return [(input.get_field(*match.group("name").split(".")), match.group("name"), match.group("template")) for match in path_template._VARIABLE_RE.finditer(self.uri)] - @property - def sample_request(self) -> str: + def sample_request(self, method: "~.Method") -> str: """return json dict for sample request matching the uri template.""" - sample = utils.sample_from_path_fields(self.path_fields) - return json.dumps(sample) + + def sample_from_path_fields(paths: List[Tuple["wrappers.Field", str, str]]) -> Dict[Any, Any]: + """Construct a dict for a sample request object from a list of fields + and template patterns. + + Args: + paths: a list of tuples, each with a (segmented) name and a pattern. + Returns: + A new nested dict with the templates instantiated. + """ + + request: Dict[str, Any] = {} + + def _sample_names() -> Iterator[str]: + sample_num: int = 0 + while True: + sample_num += 1 + yield "sample{}".format(sample_num) + + def add_field(obj, path, value): + """Insert a field into a nested dict and return the (outer) dict. + Keys and sub-dicts are inserted if necessary to create the path. + e.g. if obj, as passed in, is {}, path is "a.b.c", and value is + "hello", obj will be updated to: + {'a': + {'b': + { + 'c': 'hello' + } + } + } + + Args: + obj: a (possibly) nested dict (parsed json) + path: a segmented field name, e.g. "a.b.c" + where each part is a dict key. + value: the value of the new key. + Returns: + obj, possibly modified + Raises: + AttributeError if the path references a key that is + not a dict.: e.g. path='a.b', obj = {'a':'abc'} + """ + + segments = path.split('.') + leaf = segments.pop() + subfield = obj + for segment in segments: + subfield = subfield.setdefault(segment, {}) + subfield[leaf] = value + return obj + + sample_names = _sample_names() + for field, path, template in paths: + sample_value = re.sub( + r"(\*\*|\*)", + lambda n: next(sample_names), + template or '*' + ) if field.type == PrimitiveType.build(str) else field.mock_value_original_type + add_field(request, path, sample_value) + + return request + + sample = sample_from_path_fields(self.path_fields(method)) + return sample @classmethod def try_parse_http_rule(cls, http_rule) -> Optional['HttpRule']: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index a94ce42c5385..0d0998cf43b2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -170,7 +170,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if method.input.required_fields %} __{{ method.name | snake_case }}_required_fields_default_values = { {% for req_field in method.input.required_fields if req_field.is_primitive %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.default_value is string %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.field_pb.default_value }}{% endif %},{# default is str #} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} } diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index fdcec0eeaa75..4cd8c37bf30a 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1134,11 +1134,11 @@ def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method. ) # send a request that will satisfy transcoding - request_init = {{ method.http_options[0].sample_request}} + request_init = {{ method.http_options[0].sample_request(method) }} {% for field in method.body_fields.values() %} {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.mock_value }} + request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} {% endif %} {% endfor %} request = request_type(request_init) @@ -1221,10 +1221,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide request_init = {} {% for req_field in method.input.required_fields if req_field.is_primitive %} - {% if req_field.field_pb.default_value is string %} + {% if req_field.field_pb.type == 9 %} request_init["{{ req_field.name }}"] = "{{ req_field.field_pb.default_value }}" {% else %} - request_init["{{ req_field.name }}"] = {{ req_field.field_pb.default_value }} + request_init["{{ req_field.name }}"] = {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }} {% endif %}{# default is str #} {% endfor %} request = request_type(request_init) @@ -1324,10 +1324,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% for req_field in method.input.required_fields if req_field.is_primitive %} ( "{{ req_field.name | camel_case }}", - {% if req_field.field_pb.default_value is string %} + {% if req_field.field_pb.type == 9 %} "{{ req_field.field_pb.default_value }}", {% else %} - {{ req_field.field_pb.default_value }}, + {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}, {% endif %}{# default is str #} ), {% endfor %} @@ -1346,11 +1346,11 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ ) # send a request that will satisfy transcoding - request_init = {{ method.http_options[0].sample_request}} + request_init = {{ method.http_options[0].sample_request(method) }} {% for field in method.body_fields.values() %} {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.mock_value }} + request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} {% endif %} {% endfor %} request = request_type(request_init) @@ -1411,7 +1411,7 @@ def test_{{ method_name }}_rest_flattened(transport: str = 'rest'): req.return_value = response_value # get arguments that satisfy an http rule for this method - sample_request = {{ method.http_options[0].sample_request }} + sample_request = {{ method.http_options[0].sample_request(method) }} # get truthy value for each flattened field mock_args = dict( @@ -1531,7 +1531,7 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): return_val.status_code = 200 req.side_effect = return_values - sample_request = {{ method.http_options[0].sample_request }} + sample_request = {{ method.http_options[0].sample_request(method) }} {% for field in method.body_fields.values() %} {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 5000d78b49ac..047cc4f300f1 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -29,7 +29,6 @@ from gapic.utils.reserved_names import RESERVED_NAMES from gapic.utils.rst import rst from gapic.utils.uri_conv import convert_uri_fieldnames -from gapic.utils.uri_sample import sample_from_path_fields __all__ = ( @@ -44,7 +43,6 @@ 'partition', 'RESERVED_NAMES', 'rst', - 'sample_from_path_fields', 'sort_lines', 'to_snake_case', 'to_camel_case', diff --git a/packages/gapic-generator/gapic/utils/uri_sample.py b/packages/gapic-generator/gapic/utils/uri_sample.py deleted file mode 100644 index 0eba82220f82..000000000000 --- a/packages/gapic-generator/gapic/utils/uri_sample.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Any, Generator, Dict, List, Tuple -import re - - -def _sample_names() -> Generator[str, None, None]: - sample_num: int = 0 - while True: - sample_num += 1 - yield "sample{}".format(sample_num) - - -def add_field(obj, path, value): - """Insert a field into a nested dict and return the (outer) dict. - Keys and sub-dicts are inserted if necessary to create the path. - e.g. if obj, as passed in, is {}, path is "a.b.c", and value is - "hello", obj will be updated to: - {'a': - {'b': - { - 'c': 'hello' - } - } - } - - Args: - obj: a (possibly) nested dict (parsed json) - path: a segmented field name, e.g. "a.b.c" - where each part is a dict key. - value: the value of the new key. - Returns: - obj, possibly modified - Raises: - AttributeError if the path references a key that is - not a dict.: e.g. path='a.b', obj = {'a':'abc'} - """ - segments = path.split('.') - leaf = segments.pop() - subfield = obj - for segment in segments: - subfield = subfield.setdefault(segment, {}) - subfield[leaf] = value - return obj - - -def sample_from_path_fields(paths: List[Tuple[str, str]]) -> Dict[Any, Any]: - """Construct a dict for a sample request object from a list of fields - and template patterns. - - Args: - paths: a list of tuples, each with a (segmented) name and a pattern. - Returns: - A new nested dict with the templates instantiated. - """ - - request: Dict[str, Any] = {} - sample_names = _sample_names() - - for path, template in paths: - sample_value = re.sub( - r"(\*\*|\*)", - lambda n: next(sample_names), template if template else '*' - ) - add_field(request, path, sample_value) - return request diff --git a/packages/gapic-generator/tests/fragments/test_required_non_string.proto b/packages/gapic-generator/tests/fragments/test_required_non_string.proto new file mode 100644 index 000000000000..fb055d60193b --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_required_non_string.proto @@ -0,0 +1,41 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/annotations.proto"; + +service RestService { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (MethodResponse) { + option (google.api.http) = { + get: "/restservice/v1/mass_kg/{mass_kg}/length_cm/{length_cm}" + }; + } +} + + +message MethodRequest { + int32 mass_kg = 1 [(google.api.field_behavior) = REQUIRED]; + float length_cm = 2 [(google.api.field_behavior) = REQUIRED]; +} + +message MethodResponse { + string name = 1; +} \ No newline at end of file diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 774f81b17263..889dc629a3dd 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -470,9 +470,29 @@ def test_method_http_options_generate_sample(): http_rule = http_pb2.HttpRule( get='/v1/{resource.id=projects/*/regions/*/id/**}/stuff', ) - method = make_method('DoSomething', http_rule=http_rule) - sample = method.http_options[0].sample_request - assert json.loads(sample) == {'resource': { + + method = make_method( + 'DoSomething', + make_message( + name="Input", + fields=[ + make_field( + name="resource", + number=1, + type=11, + message=make_message( + "Resource", + fields=[ + make_field(name="id", type=9), + ], + ), + ), + ], + ), + http_rule=http_rule, + ) + sample = method.http_options[0].sample_request(method) + assert sample == {'resource': { 'id': 'projects/sample1/regions/sample2/id/sample3'}} @@ -480,9 +500,28 @@ def test_method_http_options_generate_sample_implicit_template(): http_rule = http_pb2.HttpRule( get='/v1/{resource.id}/stuff', ) - method = make_method('DoSomething', http_rule=http_rule) - sample = method.http_options[0].sample_request - assert json.loads(sample) == {'resource': { + method = make_method( + 'DoSomething', + make_message( + name="Input", + fields=[ + make_field( + name="resource", + number=1, + message=make_message( + "Resource", + fields=[ + make_field(name="id", type=9), + ], + ), + ), + ], + ), + http_rule=http_rule, + ) + + sample = method.http_options[0].sample_request(method) + assert sample == {'resource': { 'id': 'sample1'}} From 64d711ef2c2fdc8d838a9946df99277524199d3d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 13 Dec 2021 18:18:12 +0000 Subject: [PATCH 0684/1339] chore: release 0.58.2 (#1106) :robot: I have created a release \*beep\* \*boop\* --- ### [0.58.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.1...v0.58.2) (2021-12-13) ### Bug Fixes * fix case for expected field names in required fields test. ([#1107](https://www.github.com/googleapis/gapic-generator-python/issues/1107)) ([6a593f9](https://www.github.com/googleapis/gapic-generator-python/commit/6a593f9807141aaf6c13a8843804e9fa9b300c91)) * non-string required fields provide correct values ([#1108](https://www.github.com/googleapis/gapic-generator-python/issues/1108)) ([bc5f729](https://www.github.com/googleapis/gapic-generator-python/commit/bc5f729cf777d30e1053e23a1d115460952478af)) * syntax fix and test for multiple required fields ([#1105](https://www.github.com/googleapis/gapic-generator-python/issues/1105)) ([4e5fe2d](https://www.github.com/googleapis/gapic-generator-python/commit/4e5fe2db9d0d81929cc1559d3a134c9a38ae595c)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index eec318a6a3a5..a46ce652b3b4 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +### [0.58.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.1...v0.58.2) (2021-12-13) + + +### Bug Fixes + +* fix case for expected field names in required fields test. ([#1107](https://www.github.com/googleapis/gapic-generator-python/issues/1107)) ([6a593f9](https://www.github.com/googleapis/gapic-generator-python/commit/6a593f9807141aaf6c13a8843804e9fa9b300c91)) +* non-string required fields provide correct values ([#1108](https://www.github.com/googleapis/gapic-generator-python/issues/1108)) ([bc5f729](https://www.github.com/googleapis/gapic-generator-python/commit/bc5f729cf777d30e1053e23a1d115460952478af)) +* syntax fix and test for multiple required fields ([#1105](https://www.github.com/googleapis/gapic-generator-python/issues/1105)) ([4e5fe2d](https://www.github.com/googleapis/gapic-generator-python/commit/4e5fe2db9d0d81929cc1559d3a134c9a38ae595c)) + ### [0.58.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.0...v0.58.1) (2021-12-09) From c1aecfb9dc60de113a0f110d1fae6e6ed9c6a341 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 28 Dec 2021 10:34:13 -0800 Subject: [PATCH 0685/1339] fix: add additional reserved names for disambiguation (#1114) The monikers "mapping" and "ignore_unknown_fields" have reserved meaning in the constructor of proto.Message and therefore any fields with those names need disambiguation. --- packages/gapic-generator/gapic/utils/reserved_names.py | 3 +++ .../tests/unit/samplegen/test_integration.py | 2 +- .../gapic-generator/tests/unit/samplegen/test_template.py | 2 +- .../tests/unit/schema/wrappers/test_field.py | 6 ++++++ 4 files changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index cf87e839925f..40d9301fa950 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -25,5 +25,8 @@ keyword.kwlist, # We make SOME exceptions for certain names that collide with builtins. set(dir(builtins)) - {"filter", "map", "id", "input", "property"}, + # "mapping" and "ignore_unknown_fields" have special uses + # in the constructor of proto.Message + {"mapping", "ignore_unknown_fields"}, ) ) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 75ae35ec5af3..aefde2f1dc3e 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -23,7 +23,7 @@ from gapic.samplegen_utils import (types, utils as gapic_utils) from gapic.schema import (naming, wrappers) -from tests.unit.samplegen.common_types import (DummyField, DummyMessage, +from common_types import (DummyField, DummyMessage, DummyMessageTypePB, DummyMethod, DummyService, DummyIdent, DummyApiSchema, DummyNaming, enum_factory, message_factory) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 5a4086868d0c..42b8bb2aad69 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -21,7 +21,7 @@ from gapic.samplegen_utils.types import CallingForm from textwrap import dedent -from tests.unit.samplegen import common_types +import common_types def check_template(template_fragment, expected_output, **kwargs): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 7c9bfacbd10e..f823104e7752 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -357,6 +357,12 @@ def test_field_name_kword_disambiguation(): ) assert frum_field.name == "frum" + mapping_field = make_field(name="mapping") + assert mapping_field.name == "mapping_" + + ignore_field = make_field(name="ignore_unknown_fields") + assert ignore_field.name == "ignore_unknown_fields_" + def test_field_resource_reference(): field = make_field(name='parent', type='TYPE_STRING') From 8f396581d9e106201e6a0764fa4ec2b24755282b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 28 Dec 2021 14:56:27 -0800 Subject: [PATCH 0686/1339] chore: release 0.58.3 (#1115) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a46ce652b3b4..cafdb7ae1fdc 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +### [0.58.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.2...v0.58.3) (2021-12-28) + + +### Bug Fixes + +* add additional reserved names for disambiguation ([#1114](https://www.github.com/googleapis/gapic-generator-python/issues/1114)) ([1cffd8d](https://www.github.com/googleapis/gapic-generator-python/commit/1cffd8d99936cd10649faf05e0288b693e718f81)) + ### [0.58.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.1...v0.58.2) (2021-12-13) From 32ee58efd1321ba00bd911f8bfa38f66bd2f8ab4 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Wed, 29 Dec 2021 11:14:26 -0800 Subject: [PATCH 0687/1339] feat: port REST transport to Ads templates (#1003) Add REST transport to Ads templates Add REST transport tests to Ads generated unit tests Boost google-api-core dependency version Rewrite of REST transport call stubs to assist testing Add fragment tests for Ads Multiple bugfixes --- .../.github/sync-repo-settings.yaml | 8 + .../.github/workflows/tests.yaml | 16 +- .../%sub/services/%service/client.py.j2 | 431 +++- .../%sub/services/%service/pagers.py.j2 | 13 +- .../%service/transports/__init__.py.j2 | 17 +- .../services/%service/transports/base.py.j2 | 103 +- .../services/%service/transports/grpc.py.j2 | 228 ++- .../services/%service/transports/rest.py.j2 | 339 ++++ .../gapic/ads-templates/noxfile.py.j2 | 2 +- .../gapic/ads-templates/tests/__init__.py.j2 | 2 + .../ads-templates/tests/unit/__init__.py.j2 | 2 + .../gapic/%name_%version/%sub/__init__.py.j2 | 2 + .../%name_%version/%sub/test_%service.py.j2 | 1737 ++++++++++++++--- .../%name_%version/test_module_import.py.j2 | 12 + .../tests/unit/gapic/__init__.py.j2 | 2 + .../%sub/services/%service/pagers.py.j2 | 15 +- .../services/%service/transports/base.py.j2 | 1 - .../services/%service/transports/rest.py.j2 | 312 +-- .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 475 +++-- packages/gapic-generator/noxfile.py | 81 +- .../asset_v1/services/asset_service/pagers.py | 3 - .../services/asset_service/transports/base.py | 1 - .../unit/gapic/asset_v1/test_asset_service.py | 165 +- .../iam_credentials/transports/base.py | 1 - .../credentials_v1/test_iam_credentials.py | 55 +- .../services/config_service_v2/pagers.py | 4 - .../config_service_v2/transports/base.py | 1 - .../services/logging_service_v2/pagers.py | 3 - .../logging_service_v2/transports/base.py | 1 - .../services/metrics_service_v2/pagers.py | 1 - .../metrics_service_v2/transports/base.py | 1 - .../logging_v2/test_config_service_v2.py | 285 +-- .../logging_v2/test_logging_service_v2.py | 105 +- .../logging_v2/test_metrics_service_v2.py | 75 +- .../redis_v1/services/cloud_redis/pagers.py | 1 - .../services/cloud_redis/transports/base.py | 1 - .../unit/gapic/redis_v1/test_cloud_redis.py | 115 +- .../gapic-generator/tests/system/conftest.py | 56 +- .../system/{test_grpc_lro.py => test_lro.py} | 4 + .../{test_grpc_streams.py => test_streams.py} | 20 + .../{test_grpc_unary.py => test_unary.py} | 0 42 files changed, 3552 insertions(+), 1146 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/tests/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/tests/unit/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/__init__.py.j2 rename packages/gapic-generator/tests/system/{test_grpc_lro.py => test_lro.py} (92%) rename packages/gapic-generator/tests/system/{test_grpc_streams.py => test_streams.py} (90%) rename packages/gapic-generator/tests/system/{test_grpc_unary.py => test_unary.py} (100%) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 7cd00810e3e9..c4a09c500acf 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -22,11 +22,19 @@ branchProtectionRules: - 'showcase-unit (3.9)' - 'showcase-unit (3.9, _alternative_templates)' - 'showcase-unit-add-iam-methods' + - 'integration' - 'style-check' - 'unit (3.6)' - 'unit (3.7)' - 'unit (3.8)' - 'unit (3.9)' + - 'fragment (3.6)' + - 'fragment (3.7)' + - 'fragment (3.8)' + - 'fragment (3.9)' + - 'fragment (3.7, _alternative_templates)' + - 'fragment (3.8, _alternative_templates)' + - 'fragment (3.9, _alternative_templates)' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true requiresStrictStatusChecks: true diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 502443da991f..caa39e280c39 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -74,7 +74,7 @@ jobs: ./gapic-showcase run & cd - env: - SHOWCASE_VERSION: 0.16.0 + SHOWCASE_VERSION: 0.18.0 - name: Install nox. run: python -m pip install nox - name: Install protoc 3.12.1. @@ -144,7 +144,7 @@ jobs: cd .. nox -s ${{ matrix.target }} env: - SHOWCASE_VERSION: 0.16.0 + SHOWCASE_VERSION: 0.18.0 # TODO(yon-mg): add compute unit tests showcase-unit: strategy: @@ -183,7 +183,7 @@ jobs: - name: Run unit tests. run: nox -s showcase_unit${{ matrix.variant }}-${{ matrix.python }} env: - SHOWCASE_VERSION: 0.16.0 + SHOWCASE_VERSION: 0.18.0 showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: @@ -214,7 +214,7 @@ jobs: - name: Run unit tests. run: nox -s showcase_unit_add_iam_methods env: - SHOWCASE_VERSION: 0.16.0 + SHOWCASE_VERSION: 0.18.0 showcase-mypy: runs-on: ubuntu-latest strategy: @@ -248,7 +248,7 @@ jobs: - name: Typecheck the generated output. run: nox -s showcase_mypy${{ matrix.variant }} env: - SHOWCASE_VERSION: 0.16.0 + SHOWCASE_VERSION: 0.18.0 snippetgen: runs-on: ubuntu-latest steps: @@ -299,6 +299,10 @@ jobs: strategy: matrix: python: [3.6, 3.7, 3.8, 3.9] + variant: ['', _alternative_templates] + exclude: + - python: 3.6 + variant: _alternative_templates runs-on: ubuntu-latest steps: - name: Cancel Previous Runs @@ -319,7 +323,7 @@ jobs: run: | python -m pip install nox - name: Run fragment tests. - run: nox -s fragment-${{ matrix.python }} + run: nox -s fragment${{ matrix.variant }}-${{ matrix.python }} integration: runs-on: ubuntu-latest container: gcr.io/gapic-images/googleapis-bazel:20210105 diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index c38929917b59..5427919d92ad 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -32,9 +32,18 @@ except AttributeError: # pragma: NO COVER {{ ref_type.ident.python_import }} {% endfor %} {% endfor %} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO +{% if 'grpc' in opts.transport %} from .transports.grpc import {{ service.name }}GrpcTransport +{% endif %} +{% if 'rest' in opts.transport %} +from .transports.rest import {{ service.name }}RestTransport +{% endif %} class {{ service.client_name }}Meta(type): @@ -45,12 +54,17 @@ class {{ service.client_name }}Meta(type): objects. """ _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] + {% if "grpc" in opts.transport %} _transport_registry['grpc'] = {{ service.name }}GrpcTransport + {% endif %} + {% if "rest" in opts.transport %} + _transport_registry["rest"] = {{ service.name }}RestTransport + {% endif %} def get_transport_class(cls, label: str = None, ) -> Type[{{ service.name }}Transport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -73,7 +87,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -100,7 +115,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - DEFAULT_ENDPOINT = {% if service.host %}'{{ service.host }}'{% else %}None{% endif %} + DEFAULT_ENDPOINT = {% if service.host %}"{{ service.host }}"{% else %}None{% endif %} DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT @@ -108,7 +123,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -138,17 +154,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ credentials = service_account.Credentials.from_service_account_file( filename) - kwargs['credentials'] = credentials + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @property def transport(self) -> {{ service.name }}Transport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - {{ service.name }}Transport: The transport used by the client instance. + {{ service.name }}Transport: The transport used by the client + instance. """ return self._transport @@ -168,20 +185,21 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for message in service.resource_messages|sort(attribute="resource_type") %} @staticmethod def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: - """Return a fully-qualified {{ message.resource_type|snake_case }} string.""" + """Returns a fully-qualified {{ message.resource_type|snake_case }} string.""" return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @staticmethod def parse_{{ message.resource_type|snake_case }}_path(path: str) -> Dict[str,str]: - """Parse a {{ message.resource_type|snake_case }} path into its component segments.""" + """Parses a {{ message.resource_type|snake_case }} path into its component segments.""" m = re.match(r"{{ message.path_regex_str }}", path) return m.groupdict() if m else {} - {% endfor %} + + {% endfor %}{# resources #} {% for resource_msg in service.common_resources.values()|sort(attribute="type_name") %} @staticmethod def common_{{ resource_msg.message_type.resource_type|snake_case }}_path({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}: str, {%endfor %}) -> str: - """Return a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" + """Returns a fully-qualified {{ resource_msg.message_type.resource_type|snake_case }} string.""" return "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @staticmethod @@ -198,7 +216,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the {{ (service.client_name|snake_case).replace('_', ' ') }}. + """Instantiates the {{ (service.client_name|snake_case).replace('_', ' ') }}. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -206,7 +224,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.{{ service.name }}Transport]): The + transport (Union[str, {{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. client_options (google.api_core.client_options.ClientOptions): Custom options for the @@ -245,21 +263,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -274,7 +289,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): api_endpoint = self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -282,26 +298,35 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # instance provides an extensibility point for unusual situations. if isinstance(transport, {{ service.name }}Transport): # transport is a {{ service.name }}Transport instance. - if credentials: - raise ValueError('When providing a transport instance, ' - 'provide its credentials directly.') + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) self._transport = transport - elif isinstance(transport, str): + else: Transport = type(self).get_transport_class(transport) self._transport = Transport( - credentials=credentials, host=self.DEFAULT_ENDPOINT - ) - else: - self._transport = {{ service.name }}GrpcTransport( credentials=credentials, + credentials_file=client_options.credentials_file, host=api_endpoint, - ssl_channel_credentials=ssl_credentials, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) {% for method in service.methods.values() %} + {% if method.operation_service %}{# DIREGAPIC LRO #} + def {{ method.name|snake_case }}_unary(self, + {% else %} def {{ method.name|snake_case }}(self, + {% endif %}{# DIREGAPIC LRO #} {% if not method.client_streaming %} request: Union[{{ method.input.ident }}, dict] = None, *, @@ -325,18 +350,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Args: {% if not method.client_streaming %} request (Union[{{ method.input.ident.sphinx }}, dict]): - The request object.{{ ' ' }} + The request object.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} - {{ field.name }} (:class:`{{ field.ident.sphinx }}`): + {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(width=72, indent=16) }} This corresponds to the ``{{ key }}`` field on the ``request`` instance; if ``request`` is provided, this should not be set. {% endfor %} {% else %} - requests (Iterator[`{{ method.input.ident.sphinx }}`]): - The request object iterator.{{ ' ' }} + requests (Iterator[{{ method.input.ident.sphinx }}]): + The request object iterator.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -352,7 +377,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% else %} Iterable[{{ method.client_output.ident.sphinx }}]: {% endif %} - {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format='rst') }} + {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format="rst") }} {% endif %} """ {% if method.is_deprecated %} @@ -365,21 +390,21 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if method.flattened_fields %} # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([{{ method.flattened_fields.values()|join(', ', attribute='name') }}]): + has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) + if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - {% endif %} {# method.flattened_fields #} + {% endif %} {% if method.input.ident.package != method.ident.package %}{# request lives in a different package, so there is no proto wrapper #} - # The request isn't a proto-plus wrapped type. - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = {{ method.input.ident }}(**request) - {% if method.flattened_fields %}{# Cross-package req and flattened fields #} elif not request: - request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) - {% endif %}{# Cross-package req and flattened fields #} - {% else %} {# Request is in _our_ package #} + # Null request, just make one. + request = {{ method.input.ident }}() + {% else %} # Minor optimization to avoid making a copy if the user passes # in a {{ method.input.ident }}. # There's no risk of modifying the input as we've already verified @@ -387,6 +412,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if not isinstance(request, {{ method.input.ident }}): request = {{ method.input.ident }}(request) {% endif %} {# different request package #} + {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} {% if method.flattened_fields and method.input.ident.package == method.ident.package %} # If we have keyword arguments corresponding to fields on the @@ -394,7 +420,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} {% for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} if {{ field.name }} is not None: + {# Repeated values is a special case, because values can be lists. #} + {# In order to not confuse the marshalling logic, extend these fields instead of assigning #} + {% if field.ident.ident|string() == "struct_pb2.Value" and field.repeated %} + request.{{ key }}.extend({{ field.name }}) + {% else %} request.{{ key }} = {{ field.name }} + {% endif %}{# struct_pb2.Value #} {% endfor %} {# Map-y fields can be _updated_, however #} {% for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} @@ -402,11 +434,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if {{ field.name }}: request.{{ key }}.update({{ field.name }}) {% else %} - {# And list-y fields can be _extended_ -#} + {# And list-y fields can be _extended_ #} if {{ field.name }}: request.{{ key }}.extend({{ field.name }}) {% endif %} {# field.map #} - {% endfor %} {# key, field in method.flattened_fields.items() #} + {% endfor %} {# method.flattened_fields.items() #} {% endif %} {# method.client_streaming #} # Wrap the RPC method; this adds retry and timeout information, @@ -420,12 +452,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} - ('{{ field_header }}', request.{{ field_header }}), + ("{{ field_header }}", request.{{ field_header }}), {% endif %} {% endfor %} )), ) - {% endif %} {# method.field_headers #} + {% endif %} # Send the request. {%+ if not method.void %}response = {% endif %}rpc( @@ -433,7 +465,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request, {% else %} requests, - {% endif %} {# method.client_streaming #} + {% endif %} retry=retry, timeout=timeout, metadata=metadata, @@ -457,17 +489,306 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): response=response, metadata=metadata, ) - {% endif %} {# method.lro #} + {% endif %} {% if not method.void %} # Done; return the response. return response - {% endif %} {# method.void #} - {{ '\n' }} + {% endif %} + {{ "\n" }} {% endfor %} + {% if opts.add_iam_methods %} + def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "{{ api.naming.warehouse_package_name }}", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + __all__ = ( - '{{ service.client_name }}', + "{{ service.client_name }}", ) {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 index 26075b4e61e5..9b99ad48f596 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 @@ -7,7 +7,7 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. -#} -from typing import Any, Callable, Iterable, Sequence, Tuple +from typing import Any, Callable, Iterable, Sequence, Tuple, Optional, Iterator {% filter sort_lines %} {% for method in service.methods.values() | selectattr('paged_result_field') %} @@ -70,9 +70,18 @@ class {{ method.name }}Pager: self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterable') }}: + {% if method.paged_result_field.map %} + def __iter__(self) -> Iterator[Tuple[str, {{ method.paged_result_field.type.fields.get('value').ident }}]]: + for page in self.pages: + yield from page.{{ method.paged_result_field.name}}.items() + + def get(self, key: str) -> Optional[{{ method.paged_result_field.type.fields.get('value').ident }}]: + return self._response.{{ method.paged_result_field.name }}.get(key) + {% else %} + def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterator') }}: for page in self.pages: yield from page.{{ method.paged_result_field.name }} + {% endif %} def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 index d175b9a97ad5..1241886b6370 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 @@ -6,16 +6,29 @@ from collections import OrderedDict from typing import Dict, Type from .base import {{ service.name }}Transport +{% if 'grpc' in opts.transport %} from .grpc import {{ service.name }}GrpcTransport - +{% endif %} +{% if 'rest' in opts.transport %} +from .rest import {{ service.name }}RestTransport +{% endif %} # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[{{ service.name }}Transport]] +{% if 'grpc' in opts.transport %} _transport_registry['grpc'] = {{ service.name }}GrpcTransport - +{% endif %} +{% if 'rest' in opts.transport %} +_transport_registry['rest'] = {{ service.name }}RestTransport +{% endif %} __all__ = ( '{{ service.name }}Transport', + {% if 'grpc' in opts.transport %} '{{ service.name }}GrpcTransport', + {% endif %} + {% if 'rest' in opts.transport %} + '{{ service.name }}RestTransport', + {% endif %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 46ad9ec23381..7d0ab6ef5e12 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -3,22 +3,29 @@ {% block content %} import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union import pkg_resources import google.auth # type: ignore -from google.api_core import gapic_v1 -from google.api_core import retry as retries +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore {% if service.has_lro %} from google.api_core import operations_v1 {% endif %} from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor %} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} {% endfilter %} try: @@ -31,7 +38,7 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -class {{ service.name }}Transport(metaclass=abc.ABCMeta): +class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" AUTH_SCOPES = ( @@ -40,11 +47,18 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): {% endfor %} ) + DEFAULT_HOST: str = {% if service.host %}'{{ service.host }}'{% else %}{{ '' }}{% endif %} + def __init__( self, *, - host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + host: str = DEFAULT_HOST, credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, ) -> None: """Instantiate the transport. @@ -56,30 +70,54 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: host += ':443' self._host = host + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + # If no credentials are provided, then determine the appropriate # defaults. - if credentials is None: - credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES) + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) def _prep_wrapped_messages(self, client_info): - # Precomputed wrapped methods + # Precompute the wrapped methods. self._wrapped_methods = { {% for method in service.methods.values() %} self.{{ method.name|snake_case }}: gapic_v1.method.wrap_method( @@ -106,7 +144,7 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): def close(self): """Closes resources associated with the transport. - .. warning:: + .. warning:: Only call this method if the transport is NOT shared with other clients - this may cause errors in other clients! """ @@ -117,18 +155,53 @@ class {{ service.name }}Transport(metaclass=abc.ABCMeta): @property def operations_client(self): """Return the client designed to process long-running operations.""" - raise NotImplementedError + raise NotImplementedError() {% endif %} {% for method in service.methods.values() %} @property - def {{ method.name|snake_case }}(self) -> typing.Callable[ + def {{ method.name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], - {{ method.output.ident }}]: - raise NotImplementedError + Union[ + {{ method.output.ident }}, + Awaitable[{{ method.output.ident }}] + ]]: + raise NotImplementedError() {% endfor %} + {% if opts.add_iam_methods %} + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + {% endif %} + __all__ = ( '{{ service.name }}Transport', ) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index eaba5f48361e..baa63b846e51 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -3,7 +3,7 @@ {% block content %} import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers {% if service.has_lro %} @@ -21,6 +21,10 @@ import grpc # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor %} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -37,6 +41,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ + _stubs: Dict[str, Callable] + def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: ga_credentials.Credentials = None, @@ -46,8 +52,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -76,7 +84,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -84,118 +96,135 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + {% if service.has_lro %} + self._operations_client: Optional[operations_v1.OperationsClient] = None + {% endif %} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn("api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning) - - host = api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" - if credentials is None: - credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - if credentials is None: - credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES) - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - ssl_credentials=ssl_channel_credentials, - scopes=self.AUTH_SCOPES, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: ga_credentials.Credentials = None, + credentials_file: str = None, scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. """ + return grpc_helpers.create_channel( host, credentials=credentials, - scopes=scopes or cls.AUTH_SCOPES, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs ) - def close(self): - self.grpc_channel.close() - @property def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service. """ return self._grpc_channel + {% if service.has_lro %} @property @@ -206,13 +235,13 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): client. """ # Sanity check: Only create a new client if we do not already have one. - if 'operations_client' not in self.__dict__: - self.__dict__['operations_client'] = operations_v1.OperationsClient( + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( self.grpc_channel ) # Return the client from cache. - return self.__dict__['operations_client'] + return self._operations_client {% endif %} {% for method in service.methods.values() %} @@ -246,6 +275,89 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): return self._stubs['{{ method.name|snake_case }}'] {% endfor %} + {% if opts.add_iam_methods %} + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + {% endif %} + + def close(self): + self.grpc_channel.close() __all__ = ( '{{ service.name }}GrpcTransport', diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 new file mode 100644 index 000000000000..3a47d1e3636d --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -0,0 +1,339 @@ +{% extends '_base.py.j2' %} + +{% block content %} + + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +{% if service.has_lro %} +from google.api_core import operations_v1 +from google.protobuf import json_format +{% endif %} +from requests import __version__ as requests_version +import dataclasses +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +{# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} +{% filter sort_lines %} +{% for method in service.methods.values() %} +{{method.input.ident.python_import}} +{{method.output.ident.python_import}} +{% endfor %} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% endfilter %} + +from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + +@dataclasses.dataclass +class {{service.name}}RestStub: + _session: AuthorizedSession + _host: str + +class {{service.name}}RestTransport({{service.name}}Transport): + """REST backend transport for {{ service.name }}. + + {{ service.meta.doc|rst(width=72, indent=4) }} + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + _STUBS: Dict[str, {{service.name}}RestStub] = {} + + + {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', + ) -> None: + """Instantiate the transport. + + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{ ' ' }}The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + {% if service.has_lro %} + self._operations_client = None + {% endif %} + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._prep_wrapped_messages(client_info) + + {% if service.has_lro %} + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options = { + {% for selector, rules in api.http_options.items() %} + {% if selector.startswith('google.longrunning.Operations') %} + '{{ selector }}': [ + {% for rule in rules %} + { + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %}{# rule.body #} + }, + {% endfor %}{# rules #} + ], + {% endif %}{# selector.startswith Operations #} + {% endfor %}{# http_options #} + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options) + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + + {% endif %}{# service.has_lro #} + {% for method in service.methods.values()|sort(attribute="name") %} + class _{{method.name}}({{service.name}}RestStub): + def __hash__(self): + return hash("{{method.name}}") + + {% if not (method.server_streaming or method.client_streaming) %} + {% if method.input.required_fields %} + __REQUIRED_FIELDS_DEFAULT_VALUES = { + {% for req_field in method.input.required_fields if req_field.is_primitive %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.field_pb.default_value or 0 }}{% endif %},{# default is str #} + {% endfor %} + } + + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + {% endif %}{# required fields #} + {% endif %}{# not (method.server_streaming or method.client_streaming) #} + + def __call__(self, + request: {{method.input.ident}}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> {{method.output.ident}}: + {% if method.http_options and not (method.server_streaming or method.client_streaming) %} + r"""Call the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request (~.{{ method.input.ident }}): + The request object.{{ ' ' }} + {{- method.input.meta.doc|rst(width=72, indent=16) }} + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {% if not method.void %} + + Returns: + ~.{{ method.output.ident }}: + {{ method.output.meta.doc|rst(width=72, indent=16) }} + {% endif %} + """ + + http_options = [ + {%- for rule in method.http_options %}{ + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %}{# rule.body #} + }, + {% endfor %}{# rule in method.http_options #} + ] + + request_kwargs = {{method.input.ident}}.to_dict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + {% set body_spec = method.http_options[0].body %} + {%- if body_spec %} + # Jsonify the request body + body = {% if body_spec == '*' -%} + {{method.input.ident}}.to_json( + {{method.input.ident}}(transcoded_request['body']), + {%- else -%} + {{method.input.fields[body_spec].type.ident}}.to_json( + {{method.input.fields[body_spec].type.ident}}(transcoded_request['body']), + {%- endif %}{# body_spec == "*" #} + including_default_value_fields=False, + use_integers_for_enums=False + ) + {%- endif %}{# body_spec #} + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads({{method.input.ident}}.to_json( + {{method.input.ident}}(transcoded_request['query_params']), + including_default_value_fields=False, + use_integers_for_enums=False + )) + + {% if method.input.required_fields %} + query_params.update(self._get_unset_required_fields(query_params)) + {% endif %}{# required fields #} + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + {% if body_spec %} + data=body, + {% endif %} + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + {% if not method.void %} + # Return the response + {% if method.lro %} + return_op = operations_pb2.Operation() + json_format.Parse(response.content, return_op, ignore_unknown_fields=True) + return return_op + {% else %} + return {{method.output.ident}}.from_json( + response.content, + ignore_unknown_fields=True + ) + + {% endif %}{# method.lro #} + {% endif %}{# method.void #} + {% else %}{# method.http_options and not (method.server_streaming or method.client_streaming) #} + {% if not method.http_options %} + raise RuntimeError( + "Cannot define a method without a valid 'google.api.http' annotation.") + + {% elif method.server_streaming or method.client_streaming %} + raise NotImplementedError( + "Streaming over REST is not yet defined for python client") + + {% else %} + raise NotImplementedError() + + {% endif %}{# method.http_options #} + + {%- endif %}{# unary method #} + {% endfor %} + {% for method in service.methods.values()|sort(attribute="name") %} + + @property + def {{method.name | snake_case}}(self) -> Callable[ + [{{method.input.ident}}], + {{method.output.ident}}]: + stub = self._STUBS.get("{{method.name | snake_case}}") + if not stub: + stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host) + + return stub + + {% endfor %} + + def close(self): + self._session.close() + + +__all__=( + '{{ service.name }}RestTransport', +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 62a869c60736..cfe6b94671c5 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -11,7 +11,7 @@ import nox # type: ignore def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov') + session.install('coverage', 'mock', 'pytest', 'pytest-cov') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/gapic/ads-templates/tests/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/__init__.py.j2 new file mode 100644 index 000000000000..34200f2eca9e --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/tests/__init__.py.j2 @@ -0,0 +1,2 @@ + +{% extends '_base.py.j2' %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/__init__.py.j2 new file mode 100644 index 000000000000..34200f2eca9e --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/__init__.py.j2 @@ -0,0 +1,2 @@ + +{% extends '_base.py.j2' %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 new file mode 100644 index 000000000000..34200f2eca9e --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/__init__.py.j2 @@ -0,0 +1,2 @@ + +{% extends '_base.py.j2' %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 0392324b06cb..38d91f3ae97c 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -3,14 +3,24 @@ {% block content %} import os -from unittest import mock +import mock import grpc +from grpc.experimental import aio +{% if "rest" in opts.transport %} +import json +{% endif %} import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -{# Import the service itself as well as every proto module that it imports. -#} +{% if 'rest' in opts.transport %} +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +{% endif %} + +{# Import the service itself as well as every proto module that it imports. #} {% filter sort_lines %} import google.auth from google.auth import credentials as ga_credentials @@ -18,13 +28,19 @@ from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports + from google.api_core import client_options +from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers +from google.api_core import path_template {% if service.has_lro %} from google.api_core import future from google.api_core import operations_v1 from google.longrunning import operations_pb2 -{% endif %} +{% if "rest" in opts.transport %} +from google.protobuf import json_format +{% endif %}{# rest transport #} +{% endif %}{# lro #} from google.api_core import gapic_v1 {% for method in service.methods.values() %} {% for ref_type in method.ref_types @@ -33,6 +49,11 @@ from google.api_core import gapic_v1 {{ ref_type.ident.python_import }} {% endfor %} {% endfor %} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} {% endfilter %} @@ -62,28 +83,57 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi -def test_{{ service.client_name|snake_case }}_from_service_account_info(): +@pytest.mark.parametrize("client_class", [ + {{ service.client_name }}, +]) +def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = {{ service.client_name }}.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) {% if service.host %} assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' {% endif %} -def test_{{ service.client_name|snake_case }}_from_service_account_file(): +@pytest.mark.parametrize("transport_class,transport_name", [ + {% if 'grpc' in opts.transport %} + (transports.{{ service.grpc_transport_name }}, "grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + (transports.{{ service.rest_transport_name }}, "rest"), + {% endif %} +]) +def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + {{ service.client_name }}, +]) +def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = {{ service.client_name }}.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) - client = {{ service.client_name }}.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) {% if service.host %} assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' @@ -92,81 +142,130 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(): def test_{{ service.client_name|snake_case }}_get_transport_class(): transport = {{ service.client_name }}.get_transport_class() - assert transport == transports.{{ service.name }}GrpcTransport + available_transports = [ + {% for transport_name in opts.transport %} + transports.{{ service.name }}{{ transport_name.capitalize() }}Transport, + {% endfor %} + ] + assert transport in available_transports - transport = {{ service.client_name }}.get_transport_class("grpc") - assert transport == transports.{{ service.name }}GrpcTransport + transport = {{ service.client_name }}.get_transport_class("{{ opts.transport[0] }}") + assert transport == transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + {% if 'grpc' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), + {% endif %} +]) @mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) -def test_{{ service.client_name|snake_case }}_client_options(): +def test_{{ service.client_name|snake_case }}_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: - transport = transports.{{ service.name }}GrpcTransport( + with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: + transport = transport_class( credentials=ga_credentials.AnonymousCredentials() ) - client = {{ service.client_name }}(transport=transport) + client = client_class(transport=transport) gtc.assert_not_called() # Check that if channel is provided via str we will create a new one. - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.{{ service.client_name }}.get_transport_class') as gtc: - client = {{ service.client_name }}(transport="grpc") + with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}(client_options=options) - grpc_transport.assert_called_once_with( - ssl_channel_credentials=None, + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( credentials=None, + credentials_file=None, host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT - # is "never". + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( - ssl_channel_credentials=None, + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( credentials=None, + credentials_file=None, host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( - ssl_channel_credentials=None, + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( credentials=None, + credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = {{ service.client_name }}() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = {{ service.client_name }}() - + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + {% if 'grpc' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", "true"), + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", "false"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "true"), + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "false"), + {% endif %} +]) @mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -@pytest.mark.parametrize("use_client_cert_env", ["true", "false"]) -def test_{{ service.client_name|snake_case }}_mtls_env_auto(use_client_cert_env): +def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. @@ -174,68 +273,127 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(use_client_cert_env) # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - ssl_channel_creds = mock.Mock() - with mock.patch('grpc.ssl_channel_credentials', return_value=ssl_channel_creds): - grpc_transport.return_value = None - client = {{ service.client_name }}(client_options=options) - - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT - - grpc_transport.assert_called_once_with( - ssl_channel_credentials=expected_ssl_channel_creds, - credentials=None, - host=expected_host, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): - with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: - with mock.patch('google.auth.transport.grpc.SslCredentials.ssl_credentials', new_callable=mock.PropertyMock) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ssl_credentials_mock.return_value - - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( - ssl_channel_credentials=expected_ssl_channel_creds, - credentials=None, - host=expected_host, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: - with mock.patch('google.auth.transport.grpc.SslCredentials.__init__', return_value=None): - with mock.patch('google.auth.transport.grpc.SslCredentials.is_mtls', new_callable=mock.PropertyMock) as is_mtls_mock: - is_mtls_mock.return_value = False - grpc_transport.return_value = None - client = {{ service.client_name }}() - grpc_transport.assert_called_once_with( - ssl_channel_credentials=None, + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( credentials=None, - host=client.DEFAULT_ENDPOINT, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + {% if 'grpc' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), + {% endif %} +]) +def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + {% if 'grpc' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), + {% endif %} +]) +def test_{{ service.client_name|snake_case }}_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) +{% if 'grpc' in opts.transport %} +{# TODO(dovs): genericize this function#} def test_{{ service.client_name|snake_case }}_client_options_from_dict(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: @@ -244,15 +402,24 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_options={'api_endpoint': 'squid.clam.whelk'} ) grpc_transport.assert_called_once_with( - ssl_channel_credentials=None, credentials=None, + credentials_file=None, host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) +{% endif %} -{% for method in service.methods.values() %} -def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ method.input.ident }}): +{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +def test_{{ method_name }}(request_type, transport: str = 'grpc'): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -278,8 +445,8 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m call.return_value = iter([{{ method.output.ident }}()]) {% else %} call.return_value = {{ method.output.ident }}( - {% for field in method.output.fields.values() | rejectattr('message')%} - {% if not field.oneof or field.proto3_optional %} + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {% endif %}{% endfor %} {# This is a hack to only pick one field #} @@ -293,7 +460,7 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m {% if method.client_streaming %} response = client.{{ method.name|snake_case }}(iter(requests)) {% else %} - response = client.{{ method.name|snake_case }}(request) + response = client.{{ method_name }}(request) {% endif %} # Establish that the underlying gRPC stub method was called. @@ -333,12 +500,32 @@ def test_{{ method.name|snake_case }}(transport: str = 'grpc', request_type={{ m {% endif %} -def test_{{ method.name|snake_case }}_from_dict(): - test_{{ method.name|snake_case }}(request_type=dict) +{% if not method.client_streaming %} +def test_{{ method_name }}_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.name|snake_case }}), + '__call__') as call: + client.{{ method_name }}() + call.assert_called() + _, args, _ = call.mock_calls[0] + {% if method.client_streaming %} + assert next(args[0]) == request + {% else %} + assert args[0] == {{ method.input.ident }}() + {% endif %} +{% endif %} {% if method.field_headers and not method.client_streaming %} -def test_{{ method.name|snake_case }}_field_headers(): +def test_{{ method_name }}_field_headers(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -364,7 +551,7 @@ def test_{{ method.name|snake_case }}_field_headers(): {% else %} call.return_value = {{ method.output.ident }}() {% endif %} - client.{{ method.name|snake_case }}(request) + client.{{ method_name }}(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -380,10 +567,12 @@ def test_{{ method.name|snake_case }}_field_headers(): {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] + + {% endif %} {% if method.ident.package != method.input.ident.package %} -def test_{{ method.name|snake_case }}_from_dict(): +def test_{{ method_name }}_from_dict_foreign(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -401,7 +590,7 @@ def test_{{ method.name|snake_case }}_from_dict(): {% else %} call.return_value = {{ method.output.ident }}() {% endif %} - response = client.{{ method.name|snake_case }}(request={ + response = client.{{ method_name }}(request={ {% for field in method.input.fields.values() %} '{{ field.name }}': {{ field.mock_value }}, {% endfor %} @@ -412,7 +601,7 @@ def test_{{ method.name|snake_case }}_from_dict(): {% endif %} {% if method.flattened_fields %} -def test_{{ method.name|snake_case }}_flattened(): +def test_{{ method_name }}_flattened(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -433,7 +622,7 @@ def test_{{ method.name|snake_case }}_flattened(): {% endif %} # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.{{ method.name|snake_case }}( + client.{{ method_name }}( {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, {% endfor %} @@ -449,7 +638,15 @@ def test_{{ method.name|snake_case }}_flattened(): {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% else %} - assert args[0].{{ key }} == {{ field.mock_value }} + arg = args[0].{{ key }} + mock_val = {{ field.mock_value }} + {% if field.ident|string() == "struct_pb2.Value" %} + from proto.marshal import Marshal + from proto.marshal.rules.struct import ValueRule + rule = ValueRule(marshal=Marshal(name="Test")) + mock_val = rule.to_python(mock_val) + {% endif %}{# struct_pb2.Value #} + assert arg == mock_val {% endif %} {% endif %}{% endfor %} {% for oneofs in method.flattened_oneof_fields().values() %} @@ -459,7 +656,8 @@ def test_{{ method.name|snake_case }}_flattened(): {% endfor %} -def test_{{ method.name|snake_case }}_flattened_error(): + +def test_{{ method_name }}_flattened_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), ) @@ -467,19 +665,23 @@ def test_{{ method.name|snake_case }}_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.{{ method.name|snake_case }}( + client.{{ method_name }}( {{ method.input.ident }}(), {% for field in method.flattened_fields.values() %} {{ field.name }}={{ field.mock_value }}, {% endfor %} ) + + {% endif %} {% if method.paged_result_field %} -def test_{{ method.name|snake_case }}_pager(): +{% if not method.paged_result_field.map %} +def test_{{ method_name }}_pager(transport_name: str = "grpc"): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -527,7 +729,7 @@ def test_{{ method.name|snake_case }}_pager(): )), ) {% endif %} - pager = client.{{ method.name|snake_case }}(request={}) + pager = client.{{ method_name }}(request={}) assert pager._metadata == metadata @@ -536,16 +738,48 @@ def test_{{ method.name|snake_case }}_pager(): assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) for i in results) -def test_{{ method.name|snake_case }}_pages(): +{% endif %} + +def test_{{ method_name }}_pages(transport_name: str = "grpc"): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), - '__call__') as call: + type(client.transport.{{ method.name|snake_case }}), + '__call__') as call: # Set the response to a series of pages. + {% if method.paged_result_field.map%} + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + RuntimeError, + ) + {% else %} call.side_effect = ( {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ @@ -573,131 +807,899 @@ def test_{{ method.name|snake_case }}_pages(): ), RuntimeError, ) - pages = list(client.{{ method.name|snake_case }}(request={}).pages) + {% endif %} + {# method.paged_result_field.map #} + pages = list(client.{{ method_name }}(request={}).pages) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token + + {% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} -def test_{{ method.name|snake_case }}_raw_page_lro(): +def test_{{ method_name }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() assert response.raw_page is response -{% endif %} {#- method.paged_result_field #} - -{% endfor %} {#- method in methods #} - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.{{ service.name }}GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +{% endif %}{# method.paged_result_field #}{% endwith %}{# method_name #} +{% endfor %} {# method in methods for grpc #} -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.{{ service.name }}GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = {{ service.client_name }}(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.{{ service.name }}GrpcTransport( +{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %}{% if method.http_options %} +{# TODO(kbandes): remove this if condition when streaming are supported. #} +{% if not (method.server_streaming or method.client_streaming) %} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): + client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel + # Send a request that will satisfy transcoding + request = {{ method.input.ident }}({{ method.http_options[0].sample_request(method) }}) + {% if method.client_streaming %} + requests = [request] + {% endif %} -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. + with mock.patch.object(type(client.transport._session), 'request') as req: + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming %} + return_value = iter([{{ method.output.ident }}()]) + {% else %} + return_value = {{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endfor %} + ) + {% endif %} + req.return_value = Response() + req.return_value.status_code = 500 + req.return_value.request = PreparedRequest() + {% if method.void %} + json_return_value = '' + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + req.return_value._content = json_return_value.encode("UTF-8") + with pytest.raises(core_exceptions.GoogleAPIError): + # We only care that the correct exception is raised when putting + # the request over the wire, so an empty request is fine. + {% if method.client_streaming %} + client.{{ method_name }}(iter([requests])) + {% else %} + client.{{ method_name }}(request) + {% endif %} + + +{# TODO(kbandes): remove this if condition when lro and streaming are supported. #} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +def test_{{ method.name|snake_case }}_rest(request_type): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert isinstance( - client.transport, - transports.{{ service.name }}GrpcTransport, - ) - -@pytest.mark.parametrize("transport_class", [ - transports.{{ service.grpc_transport_name }}, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} + {% endif %} + {% endfor %} + request = request_type(request_init) + {% if method.client_streaming %} + requests = [request] + {% endif %} -def test_{{ service.name|snake_case }}_base_transport(): - # Instantiate the base transport. - with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as Transport: - Transport.return_value = None - transport = transports.{{ service.name }}Transport( - credentials=ga_credentials.AnonymousCredentials(), + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming %} + return_value = iter([{{ method.output.ident }}()]) + {% else %} + return_value = {{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endfor %} ) + {% endif %} - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - {% for method in service.methods.values() %} - '{{ method.name|snake_case }}', - {% endfor %} - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + {% if method.client_streaming %} + response = client.{{ method.name|snake_case }}(iter(requests)) + {% else %} + response = client.{{ method_name }}(request) + {% endif %} - with pytest.raises(NotImplementedError): - transport.close() + {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {# Cheeser assertion to force code coverage for bad paginated methods #} + assert response.raw_page is response - {% if service.has_lro %} - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client {% endif %} + # Establish that the response is the type that we expect. + {% if method.void %} + assert response is None + {% elif method.lro %} + assert response.operation.name == "operations/spam" + {% else %} + assert isinstance(response, {{ method.client_output.ident }}) + {% for field in method.output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else %} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif %} + {% endif %}{# end oneof/optional #} + {% endfor %} + {% endif %} -def test_{{ service.name|snake_case }}_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default') as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.{{ service.name }}Transport() - adc.assert_called_once() + {% if method.input.required_fields %} +def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ident }}): + transport_class = transports.{{ service.rest_transport_name }} -def test_{{ service.name|snake_case }}_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - {{ service.client_name }}() - adc.assert_called_once_with(scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %} + request_init = {} + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% if req_field.field_pb.type == 9 %} + request_init["{{ req_field.name }}"] = "{{ req_field.field_pb.default_value }}" + {% else %} + request_init["{{ req_field.name }}"] = {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }} + {% endif %}{# default is str #} + {% endfor %} + request = request_type(request_init) + jsonified_request = json.loads(request_type.to_json( + request, + including_default_value_fields=False, + use_integers_for_enums=False )) + # verify fields with default values are dropped + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + assert "{{ field_name }}" not in jsonified_request + {% endfor %} -def test_{{ service.name|snake_case }}_transport_auth_adc(): - # If credentials and host are not provided, the transport class should use - # ADC credentials. + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + assert "{{ field_name }}" in jsonified_request + assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] + {% endfor %} + + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + {% set mock_value = req_field.primitive_mock_as_str() %} + jsonified_request["{{ field_name }}"] = {{ mock_value }} + {% endfor %} + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + {% set mock_value = req_field.primitive_mock_as_str() %} + assert "{{ field_name }}" in jsonified_request + assert jsonified_request["{{ field_name }}"] == {{ mock_value }} + {% endfor %} + + + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(request_init) + + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming %} + return_value = iter([{{ method.output.ident }}()]) + {% else %} + return_value = {{ method.output.ident }}() + {% endif %} + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "{{ method.http_options[0].method }}", + 'query_params': request_init, + } + {% if method.http_options[0].body %} + transcode_result['body'] = {} + {% endif %} + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + {% if method.client_streaming %} + response = client.{{ method.name|snake_case }}(iter(requests)) + {% else %} + response = client.{{ method_name }}(request) + {% endif %} + + expected_params = [ + {% for req_field in method.input.required_fields if req_field.is_primitive %} + ( + "{{ req_field.name | camel_case }}", + {% if req_field.field_pb.type == 9 %} + "{{ req_field.field_pb.default_value }}", + {% else %} + {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}, + {% endif %}{# default is str #} + ), + {% endfor %} + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + + {% endif %}{# required_fields #} + + +def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} + {% endif %} + {% endfor %} + request = request_type(request_init) + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + {% if method.client_streaming %} + client.{{ method.name|snake_case }}(iter(requests)) + {% else %} + client.{{ method_name }}(request) + {% endif %} + + +{% if method.flattened_fields and "rest" in opts.transport %} +def test_{{ method.name|snake_case }}_rest_flattened(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming %} + return_value = iter([{{ method.output.ident }}()]) + {% else %} + return_value = {{ method.output.ident }}() + {% endif %} + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + # get arguments that satisfy an http rule for this method + sample_request = {{ method.http_options[0].sample_request(method) }} + + # get truthy value for each flattened field + mock_args = dict( + {% for field in method.flattened_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + {{ field.name }}={{ field.mock_value }}, + {% endif %} + {% endfor %} + ) + mock_args.update(sample_request) + client.{{ method_name }}(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + {% with uri = method.http_options[0].uri %} + assert path_template.validate("https://%s{{ uri }}" % client.transport._host, args[1]) + {% endwith %} + {# TODO(kbandes) - reverse-transcode request args to check all request fields #} + + +def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method_name }}( + {{ method.input.ident }}(), + {% for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) +{% endif %}{# flattened fields #} + + +{% if method.paged_result_field %} +def test_{{ method_name }}_rest_pager(transport: str = 'rest'): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + {% if method.paged_result_field.map%} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + ) + {% else %} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + ) + {% endif %} + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple({{ method.output.ident }}.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + sample_request["{{ field.name }}"] = {{ field.mock_value }} + {% endif %} + {% endfor %} + + + pager = client.{{ method_name }}(request=sample_request) + + {% if method.paged_result_field.map %} + assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) + assert pager.get('h') is None + {% endif %} + + results = list(pager) + assert len(results) == 6 + {% if method.paged_result_field.map %} + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) + {% else %} + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) + for i in results) + {% endif %} + + pages = list(client.{{ method_name }}(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +{%- else %}{# paged_result_field #} + +def test_{{ method_name }}_rest_error(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + {%- if not method.http_options %} + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.{{ method_name }}({}) + assert ('Cannot define a method without a valid `google.api.http` annotation.' + in str(runtime_error.value)) + + {%- endif %} +{% endif %}{# flattened_fields #} + +{% else %}{# this is an lro or streaming method #} +def test_{{ method_name }}_rest_unimplemented(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} + with pytest.raises(NotImplementedError): + client.{{ method_name }}({% if method.client_streaming %}requests{% else %}request{% endif %}) + + +{% endif %}{# not lro and not streaming #}{% else %}{# not method.http_options #} +def test_{{ method_name }}_rest_no_http_options(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} + with pytest.raises(RuntimeError): + client.{{ method_name }}({% if method.client_streaming %}requests{% else %}request{% endif %}) + + +{% endif %}{# not method.http_options #} +{% endwith %}{# method_name #} + +{% endfor -%} {#- method in methods for rest #} + +{% for method in service.methods.values() if 'rest' in opts.transport and + not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} +def test_{{ method_name }}_rest_error(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.{{ method_name }}({}) + assert ("Cannot define a method without a valid 'google.api.http' annotation." + in str(runtime_error.value)) + + +{% endwith %}{# method_name #} +{% endfor %}{# for methods without http_options #} + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = {{ service.client_name }}( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = {{ service.client_name }}( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = {{ service.client_name }}(transport=transport) + assert client.transport is transport + +{% if 'grpc' in opts.transport %} +{# TODO(dovs): parametrize this over async/sync grpc#} +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.{{ service.name }}GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +{% endif %} + +@pytest.mark.parametrize("transport_class", [ + {% if 'grpc' in opts.transport %} + transports.{{ service.grpc_transport_name }}, + {% endif %} + {% if 'rest' in opts.transport %} + transports.{{ service.rest_transport_name }}, + {% endif %} +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transports.{{ service.name }}GrpcTransport(host="squid.clam.whelk") - adc.assert_called_once_with(scopes=( + transport_class() + adc.assert_called_once() + +{% if 'grpc' in opts.transport %} +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.{{ service.name }}GrpcTransport, + ) +{% endif %} + +def test_{{ service.name|snake_case }}_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.{{ service.name }}Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_{{ service.name|snake_case }}_base_transport(): + # Instantiate the base transport. + with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.{{ service.name }}Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + {% for method in service.methods.values() %} + '{{ method.name|snake_case }}', + {% endfor %} + {% if opts.add_iam_methods %} + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + {% endif %} + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + {% if service.has_lro %} + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + {% endif %} + + +def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.{{ service.name }}Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + {% for scope in service.oauth_scopes %} + '{{ scope }}', + {% endfor %}), + quota_project_id="octopus", + ) + + +def test_{{ service.name|snake_case }}_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.{{ service.name }}Transport() + adc.assert_called_once() + + +def test_{{ service.name|snake_case }}_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + {{ service.client_name }}() + adc.assert_called_once_with( + scopes=None, + default_scopes=( {% for scope in service.oauth_scopes %} '{{ scope }}', - {% endfor %} - )) + {% endfor %}), + quota_project_id=None, + ) + + +{% if 'grpc' in opts.transport %} +@pytest.mark.parametrize( + "transport_class", + [ + transports.{{ service.name }}GrpcTransport, + ], +) +def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + {%- for scope in service.oauth_scopes %} + '{{ scope }}', + {%- endfor %}), + quota_project_id="octopus", + ) + + + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.{{ service.name }}GrpcTransport, grpc_helpers), + ], +) +def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + {% with host = (service.host|default('localhost', true)) %} + create_channel.assert_called_with( + "{{ host }}{% if ":" not in service.host %}:443{% endif %}", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + {% for scope in service.oauth_scopes %} + '{{ scope }}', + {% endfor %}), + scopes=["1", "2"], + default_host="{{ host }}", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + {% endwith %} + +{% endif %} + +{% if 'grpc' in opts.transport %} +@pytest.mark.parametrize("transport_class", + [ + transports.{{ service.grpc_transport_name }}, +]) +def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) +{% endif %} + +{% if 'rest' in opts.transport %} +def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.{{ service.rest_transport_name }} ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +{% if service.has_lro -%} +def test_{{ service.name|snake_case }}_rest_lro_client(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client +{%- endif %} +{% endif %} {# rest #} def test_{{ service.name|snake_case }}_host_no_port(): {% with host = (service.host|default('localhost', true)).split(':')[0] %} @@ -718,9 +1720,9 @@ def test_{{ service.name|snake_case }}_host_with_port(): assert client.transport._host == '{{ host }}:8000' {% endwith %} - +{% if 'grpc' in opts.transport %} def test_{{ service.name|snake_case }}_grpc_transport_channel(): - channel = grpc.insecure_channel('http://localhost/') + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.{{ service.name }}GrpcTransport( @@ -732,12 +1734,17 @@ def test_{{ service.name|snake_case }}_grpc_transport_channel(): assert transport._ssl_channel_credentials == None -@pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}]) +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", + [ + transports.{{ service.grpc_transport_name }}, + ]) def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_source( transport_class ): with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -762,23 +1769,24 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %} - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred -@pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }},]) +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", + [ + transports.{{ service.grpc_transport_name }}, + ]) def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( transport_class ): @@ -788,7 +1796,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - with mock.patch.object(transport_class, "create_channel", autospec=True) as grpc_create_channel: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel mock_cred = mock.Mock() @@ -805,11 +1813,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %} - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -837,7 +1841,9 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + {% endif %} +{% endif %} {# if grpc in opts #} {% with molluscs = cycler("squid", "clam", "whelk", "octopus", "oyster", "nudibranch", "cuttlefish", "mussel", "winkle", "nautilus", "scallop", "abalone") %} {% for message in service.resource_messages|sort(attribute="resource_type") %} @@ -888,7 +1894,8 @@ def test_parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_p {% endfor %} {# common resources#} {% endwith %} {# cycler #} -def test_client_withDEFAULT_CLIENT_INFO(): + +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: @@ -906,26 +1913,276 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) prep.assert_called_once_with(client_info) -def test_grpc_transport_close(): + +{% if opts.add_iam_methods and 'grpc' in opts.transport %} +def test_set_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', ) - with mock.patch.object(type(client.transport._grpc_channel), 'close') as chan_close: - with client as _: - chan_close.assert_not_called() - chan_close.assert_called_once() -def test_grpc_client_ctx(): + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client as _: - pass - close.assert_called() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +{% endif %} + +def test_transport_close(): + transports = { + {% if 'rest' in opts.transport %} + "rest": "_session", + {% endif %} + {% if 'grpc' in opts.transport %} + "grpc": "_grpc_channel", + {% endif %} + } + + for transport, close_name in transports.items(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + {% if 'rest' in opts.transport %} + 'rest', + {% endif %} + {% if 'grpc' in opts.transport %} + 'grpc', + {% endif %} + ] + for transport in transports: + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 index 47da788b4502..dc72c61e9878 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/test_module_import.py.j2 @@ -26,8 +26,14 @@ def test_module_level_imports(): expected_names.append({{ service.client_name}}.__name__) from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.transport_name }} expected_names.append({{ service.transport_name }}.__name__) + {% if "grpc" in opts.transport %} from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.grpc_transport_name }} expected_names.append({{ service.grpc_transport_name }}.__name__) + {% endif %} + {% if "rest" in opts.transport %} + from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.module_name }} import {{ service.rest_transport_name }} + expected_names.append({{ service.rest_transport_name }}.__name__) + {% endif %} {% endfor %} expected_names.sort() @@ -62,8 +68,14 @@ def test_versionsed_module_level_imports(): expected_names.append({{ service.client_name}}.__name__) from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.transport_name }} expected_names.append({{ service.transport_name }}.__name__) + {% if "grpc" in opts.transport %} from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.grpc_transport_name }} expected_names.append({{ service.grpc_transport_name }}.__name__) + {% endif %} + {% if "rest" in opts.transport %} + from {{ api.naming.module_namespace|join('.')}}.{{ api.naming.versioned_module_name }} import {{ service.rest_transport_name }} + expected_names.append({{ service.rest_transport_name }}.__name__) + {% endif %} {% endfor %} expected_names.sort() diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/__init__.py.j2 new file mode 100644 index 000000000000..34200f2eca9e --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/__init__.py.j2 @@ -0,0 +1,2 @@ + +{% extends '_base.py.j2' %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 3270aaf19a3b..cc8d34921e25 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -78,7 +78,7 @@ class {{ method.name }}Pager: yield from page.{{ method.paged_result_field.name}}.items() def get(self, key: str) -> Optional[{{ method.paged_result_field.type.fields.get('value').ident }}]: - return self._response.items.get(key) + return self._response.{{ method.paged_result_field.name }}.get(key) {% else %} def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterator') }}: for page in self.pages: @@ -141,7 +141,19 @@ class {{ method.name }}AsyncPager: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response + {% if method.paged_result_field.map %} + def __aiter__(self) -> Iterator[Tuple[str, {{ method.paged_result_field.type.fields.get('value').ident }}]]: + async def async_generator(): + async for page in self.pages: + for response in page.{{ method.paged_result_field.name}}.items(): + yield response + + return async_generator() + + def get(self, key: str) -> Optional[{{ method.paged_result_field.type.fields.get('value').ident }}]: + return self._response.{{ method.paged_result_field.name }}.get(key) + {% else %} def __aiter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'AsyncIterator') }}: async def async_generator(): async for page in self.pages: @@ -150,6 +162,7 @@ class {{ method.name }}AsyncPager: return async_generator() + {% endif %} def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 66e9e56f8d69..3b803ebb150b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -105,7 +105,6 @@ class {{ service.name }}Transport(abc.ABC): **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 0d0998cf43b2..bfec82392306 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -1,3 +1,8 @@ +{% extends '_base.py.j2' %} + +{% block content %} + + from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore import grpc # type: ignore @@ -13,6 +18,7 @@ from google.api_core import operations_v1 from google.protobuf import json_format {% endif %} from requests import __version__ as requests_version +import dataclasses from typing import Callable, Dict, Optional, Sequence, Tuple, Union import warnings @@ -21,10 +27,6 @@ try: except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -{% extends '_base.py.j2' %} - -{% block content %} - {# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} {% filter sort_lines %} @@ -47,6 +49,11 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=requests_version, ) +@dataclasses.dataclass +class {{service.name}}RestStub: + _session: AuthorizedSession + _host: str + class {{service.name}}RestTransport({{service.name}}Transport): """REST backend transport for {{ service.name }}. @@ -58,7 +65,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - {# TODO(yon-mg): handle mtls stuff if that's relevant for rest transport #} + _STUBS: Dict[str, {{service.name}}RestStub] = {} + + + {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, credentials: ga_credentials.Credentials=None, @@ -95,7 +105,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + Generally, you only need to set this if you are developing your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. @@ -125,7 +135,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if service.has_lro %} @property - def operations_client(self) -> operations_v1.OperationsClient: + def operations_client(self) -> operations_v1.AbstractOperationsClient: """Create the client designed to process long-running operations. This property caches on the instance; repeated calls return the same @@ -143,11 +153,11 @@ class {{service.name}}RestTransport({{service.name}}Transport): 'uri': '{{ rule.uri }}', {% if rule.body %} 'body': '{{ rule.body }}', - {% endif %} + {% endif %}{# rule.body #} }, {% endfor %}{# rules #} ], - {% endif %}{# longrunning.Operations #} + {% endif %}{# selector.startswith Operations #} {% endfor %}{# http_options #} } @@ -164,164 +174,160 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %}{# service.has_lro #} - {% for method in service.methods.values() %} - {%- if method.http_options and not (method.server_streaming or method.client_streaming) %} + {% for method in service.methods.values()|sort(attribute="name") %} + class _{{method.name}}({{service.name}}RestStub): + def __hash__(self): + return hash("{{method.name}}") - {% if method.input.required_fields %} - __{{ method.name | snake_case }}_required_fields_default_values = { - {% for req_field in method.input.required_fields if req_field.is_primitive %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} - {% endfor %} - } - - - @staticmethod - def _{{ method.name | snake_case }}_get_unset_required_fields(message_dict): - return {k: v for k, v in {{service.name}}RestTransport.__{{ method.name | snake_case }}_required_fields_default_values.items() if k not in message_dict} - - - {% endif %}{# required fields #} + {% if not (method.server_streaming or method.client_streaming) %} + {% if method.input.required_fields %} + __REQUIRED_FIELDS_DEFAULT_VALUES = { + {% for req_field in method.input.required_fields if req_field.is_primitive %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} + {% endfor %} + } - def _{{method.name | snake_case}}(self, - request: {{method.input.ident}}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> {{method.output.ident}}: - r"""Call the {{- ' ' -}} - {{ (method.name|snake_case).replace('_',' ')|wrap( - width=70, offset=45, indent=8) }} - {{- ' ' -}} method over HTTP. - Args: - request (~.{{ method.input.ident }}): - The request object.{{ ' ' }} - {{- method.input.meta.doc|rst(width=72, indent=16) }} - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - {% if not method.void %} - - Returns: - ~.{{ method.output.ident }}: - {{ method.output.meta.doc|rst(width=72, indent=16) }} - {% endif %} - """ + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + {% endif %}{# required fields #} + {% endif %}{# not (method.server_streaming or method.client_streaming) #} + + def __call__(self, + request: {{method.input.ident}}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> {{method.output.ident}}: + {% if method.http_options and not (method.server_streaming or method.client_streaming) %} + r"""Call the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request (~.{{ method.input.ident }}): + The request object.{{ ' ' }} + {{- method.input.meta.doc|rst(width=72, indent=16) }} + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {% if not method.void %} + + Returns: + ~.{{ method.output.ident }}: + {{ method.output.meta.doc|rst(width=72, indent=16) }} + {% endif %} + """ - http_options = [ - {% for rule in method.http_options %} - { + http_options = [ + {%- for rule in method.http_options %}{ 'method': '{{ rule.method }}', 'uri': '{{ rule.uri }}', {% if rule.body %} 'body': '{{ rule.body }}', - {% endif %} + {% endif %}{# rule.body #} }, - {% endfor %} - ] - - request_kwargs = {{method.input.ident}}.to_dict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - {% set body_spec = method.http_options[0].body %} - {%- if body_spec %} - - # Jsonify the request body - body = {% if body_spec == '*' -%} - {{method.input.ident}}.to_json( - {{method.input.ident}}(transcoded_request['body']), - {%- else -%} - {{method.input.fields[body_spec].type.ident}}.to_json( - {{method.input.fields[body_spec].type.ident}}( - transcoded_request['body']), - {%- endif %} - - including_default_value_fields=False, - use_integers_for_enums=False - ) - {%- endif %}{# body_spec #} - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads({{method.input.ident}}.to_json( - {{method.input.ident}}(transcoded_request['query_params']), - including_default_value_fields=False, - use_integers_for_enums=False - )) - - {% if method.input.required_fields %} - query_params.update(self._{{ method.name | snake_case }}_get_unset_required_fields(query_params)) - {% endif %}{# required fields #} - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response=getattr(self._session, method)( - # Replace with proper schema configuration (http/https) logic - "https://{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - {% if body_spec %} - data=body, - {% endif %} - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - {% if not method.void %} - - # Return the response - {% if method.lro %} - return_op = operations_pb2.Operation() - json_format.Parse(response.content, return_op, ignore_unknown_fields=True) - return return_op - {% else %} - return {{method.output.ident}}.from_json( - response.content, - ignore_unknown_fields=True - ) - {% endif %} - {% endif %} - {% else %} - - def _{{method.name | snake_case}}(self, - request: {{method.input.ident}}, *, - metadata: Sequence[Tuple[str, str]]=(), - ) -> {{method.output.ident}}: - r"""Placeholder: Unable to implement over REST - """ - {%- if not method.http_options %} - - raise RuntimeError( - "Cannot define a method without a valid 'google.api.http' annotation.") - {%- elif method.server_streaming or method.client_streaming %} - - raise NotImplementedError( - "Streaming over REST is not yet defined for python client") - {%- else %} - - raise NotImplementedError() - {%- endif %} - {%- endif %} - - + {% endfor %}{# rule in method.http_options #} + ] + + request_kwargs = {{method.input.ident}}.to_dict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + {% set body_spec = method.http_options[0].body %} + {%- if body_spec %} + # Jsonify the request body + body = {% if body_spec == '*' -%} + {{method.input.ident}}.to_json( + {{method.input.ident}}(transcoded_request['body']), + {%- else -%} + {{method.input.fields[body_spec].type.ident}}.to_json( + {{method.input.fields[body_spec].type.ident}}(transcoded_request['body']), + {%- endif %}{# body_spec == "*" #} + including_default_value_fields=False, + use_integers_for_enums=False + ) + {%- endif %}{# body_spec #} + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads({{method.input.ident}}.to_json( + {{method.input.ident}}(transcoded_request['query_params']), + including_default_value_fields=False, + use_integers_for_enums=False + )) + + {% if method.input.required_fields %} + query_params.update(self._get_unset_required_fields(query_params)) + {% endif %}{# required fields #} + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + {% if body_spec %} + data=body, + {% endif %} + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + {% if not method.void %} + # Return the response + {% if method.lro %} + return_op = operations_pb2.Operation() + json_format.Parse(response.content, return_op, ignore_unknown_fields=True) + return return_op + {% else %} + return {{method.output.ident}}.from_json( + response.content, + ignore_unknown_fields=True + ) + + {% endif %}{# method.lro #} + {% endif %}{# method.void #} + {% else %}{# method.http_options and not (method.server_streaming or method.client_streaming) #} + {% if not method.http_options %} + raise RuntimeError( + "Cannot define a method without a valid 'google.api.http' annotation.") + + {% elif method.server_streaming or method.client_streaming %} + raise NotImplementedError( + "Streaming over REST is not yet defined for python client") + + {% else %} + raise NotImplementedError() + + {% endif %}{# method.http_options #} + + {%- endif %}{# unary method #} {% endfor %} - {%- for method in service.methods.values() %} + {% for method in service.methods.values()|sort(attribute="name") %} - @ property + @property def {{method.name | snake_case}}(self) -> Callable[ [{{method.input.ident}}], {{method.output.ident}}]: - return self._{{method.name | snake_case}} - {%- endfor %} + stub = self._STUBS.get("{{method.name | snake_case}}") + if not stub: + stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host) + + return stub + + {% endfor %} def close(self): self._session.close() diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 22eef15fd532..15a529664d86 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -29,7 +29,7 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.2.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.3.0, < 3.0.0dev', {% else %} 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', {% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4cd8c37bf30a..9dfd50720a1a 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -16,7 +16,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule {% if 'rest' in opts.transport %} from requests import Response -from requests import Request +from requests import Request, PreparedRequest from requests.sessions import Session {% endif %} @@ -245,18 +245,18 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -295,7 +295,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -375,7 +375,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -403,7 +403,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -415,7 +415,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl always_use_jwt_access=True, ) {% if 'grpc' in opts.transport %} - +{# TODO(dovs): genericize this function#} def test_{{ service.client_name|snake_case }}_client_options_from_dict(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}GrpcTransport.__init__') as grpc_transport: @@ -437,7 +437,11 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): {% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} -def test_{{ method_name }}(transport: str = 'grpc', request_type={{ method.input.ident }}): +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +def test_{{ method_name }}(request_type, transport: str = 'grpc'): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -518,10 +522,6 @@ def test_{{ method_name }}(transport: str = 'grpc', request_type={{ method.input {% endif %} -def test_{{ method_name }}_from_dict(): - test_{{ method_name }}(request_type=dict) - - {% if not method.client_streaming %} def test_{{ method_name }}_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -924,9 +924,11 @@ async def test_{{ method_name }}_flattened_error_async(): {% if method.paged_result_field %} -def test_{{ method_name }}_pager(): +{% if not method.paged_result_field.map %} +def test_{{ method_name }}_pager(transport_name: str = "grpc"): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -982,10 +984,11 @@ def test_{{ method_name }}_pager(): assert len(results) == 6 assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) for i in results) - -def test_{{ method_name }}_pages(): +{% endif %} +def test_{{ method_name }}_pages(transport_name: str = "grpc"): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -993,6 +996,35 @@ def test_{{ method_name }}_pages(): type(client.transport.{{ method.name|snake_case }}), '__call__') as call: # Set the response to a series of pages. + {% if method.paged_result_field.map %} + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + RuntimeError, + ) + {% else %} call.side_effect = ( {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ @@ -1020,6 +1052,8 @@ def test_{{ method_name }}_pages(): ), RuntimeError, ) + {% endif %} + {# method.paged_result_field.map #} pages = list(client.{{ method_name }}(request={}).pages) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -1036,6 +1070,33 @@ async def test_{{ method_name }}_async_pager(): '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( + {% if method.paged_result_field.map %} + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + RuntimeError, + {% else %} {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ {{ method.paged_result_field.type.ident }}(), @@ -1061,6 +1122,7 @@ async def test_{{ method_name }}_async_pager(): ], ), RuntimeError, + {% endif %} ) async_pager = await client.{{ method_name }}(request={},) assert async_pager.next_page_token == 'abc' @@ -1069,8 +1131,22 @@ async def test_{{ method_name }}_async_pager(): responses.append(response) assert len(responses) == 6 + {% if method.paged_result_field.map %} + + assert all( + isinstance(i, tuple) + for i in responses) + for result in responses: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) + + assert async_pager.get('a') is None + assert isinstance(async_pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) + {% else %} assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) - for i in responses) + for i in responses) + {% endif %} + @pytest.mark.asyncio async def test_{{ method_name }}_async_pages(): @@ -1084,6 +1160,33 @@ async def test_{{ method_name }}_async_pages(): '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( + {% if method.paged_result_field.map %} + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + RuntimeError, + {% else %} {{ method.output.ident }}( {{ method.paged_result_field.name }}=[ {{ method.paged_result_field.type.ident }}(), @@ -1109,6 +1212,7 @@ async def test_{{ method_name }}_async_pages(): ], ), RuntimeError, + {% endif %} ) pages = [] async for page_ in (await client.{{ method_name }}(request={})).pages: @@ -1123,14 +1227,74 @@ def test_{{ method_name }}_raw_page_lro(): {% endfor %} {# method in methods for grpc #} -{% for method in service.methods.values() if 'rest' in opts.transport and - method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} -{# TODO(kbandes): remove this if condition when streaming is supported in rest. #} +{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %}{% if method.http_options %} +{# TODO(kbandes): remove this if condition when streaming are supported. #} {% if not (method.server_streaming or method.client_streaming) %} -def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method.input.ident }}): +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", + ) + # Send a request that will satisfy transcoding + request = {{ method.input.ident }}({{ method.http_options[0].sample_request(method) }}) + {% if method.client_streaming %} + requests = [request] + {% endif %} + + + with mock.patch.object(type(client.transport._session), 'request') as req: + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming %} + return_value = iter([{{ method.output.ident }}()]) + {% else %} + return_value = {{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endfor %} + ) + {% endif %} + req.return_value = Response() + req.return_value.status_code = 500 + req.return_value.request = PreparedRequest() + {% if method.void %} + json_return_value = '' + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + req.return_value._content = json_return_value.encode("UTF-8") + with pytest.raises(core_exceptions.GoogleAPIError): + # We only care that the correct exception is raised when putting + # the request over the wire, so an empty request is fine. + {% if method.client_streaming %} + client.{{ method_name }}(iter([requests])) + {% else %} + client.{{ method_name }}(request) + {% endif %} + + +{# TODO(kbandes): remove this if condition when lro and streaming are supported. #} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +def test_{{ method_name }}_rest(request_type): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) # send a request that will satisfy transcoding @@ -1147,7 +1311,7 @@ def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method. {% endif %} # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. {% if method.void %} return_value = None @@ -1183,7 +1347,7 @@ def test_{{ method_name }}_rest(transport: str = 'rest', request_type={{ method. response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) + response = client.{{ method_name }}(iter(requests)) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -1240,7 +1404,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide assert "{{ field_name }}" not in jsonified_request {% endfor %} - unset_fields = transport_class._{{ method.name | snake_case }}_get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1256,7 +1420,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request["{{ field_name }}"] = {{ mock_value }} {% endfor %} - unset_fields = transport_class._{{ method.name | snake_case }}_get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1315,7 +1479,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide req.return_value = response_value {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) + response = client.{{ method_name }}(iter(requests)) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -1366,25 +1530,21 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ response_value.request = Request() req.return_value = response_value {% if method.client_streaming %} - client.{{ method.name|snake_case }}(iter(requests)) + client.{{ method_name }}(iter(requests)) {% else %} client.{{ method_name }}(request) {% endif %} -def test_{{ method_name }}_rest_from_dict(): - test_{{ method_name }}_rest(request_type=dict) - - -{% if method.flattened_fields %} -def test_{{ method_name }}_rest_flattened(transport: str = 'rest'): +{% if method.flattened_fields and "rest" in opts.transport %} +def test_{{ method_name }}_rest_flattened(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: + with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. {% if method.void %} return_value = None @@ -1464,126 +1624,158 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): with mock.patch.object(Session, 'request') as req: # TODO(kbandes): remove this mock unless there's a good reason for it. #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - {% if method.paged_result_field.map%} - response = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={}, - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - ), - ) - {% else %} - response = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[], - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - ), - ) - {% endif %} - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple({{ method.output.ident }}.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {{ method.http_options[0].sample_request(method) }} - {% for field in method.body_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - sample_request["{{ field.name }}"] = {{ field.mock_value }} - {% endif %} - {% endfor %} + # Set the response as a series of pages + {% if method.paged_result_field.map%} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + ) + {% else %} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + ) + {% endif %} + # Two responses for two calls + response = response + response - pager = client.{{ method_name }}(request=sample_request) + # Wrap the values into proper Response objs + response = tuple({{ method.output.ident }}.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values - {% if method.paged_result_field.map %} - assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) - assert pager.get('h') is None - {% endif %} + sample_request = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + sample_request["{{ field.name }}"] = {{ field.mock_value }} + {% endif %} + {% endfor %} - results = list(pager) - assert len(results) == 6 - {% if method.paged_result_field.map %} - assert all( - isinstance(i, tuple) - for i in results) - for result in results: - assert isinstance(result, tuple) - assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) - - assert pager.get('a') is None - assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) - {% else %} - assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) - for i in results) - {% endif %} - pages = list(client.{{ method_name }}(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token + pager = client.{{ method_name }}(request=sample_request) + {% if method.paged_result_field.map %} + assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) + assert pager.get('h') is None + {% endif %} -{% endif %} {# paged methods #} -{%- else %} + results = list(pager) + assert len(results) == 6 + {% if method.paged_result_field.map %} + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) + {% else %} + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) + for i in results) + {% endif %} + + pages = list(client.{{ method_name }}(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +{%- else %}{# paged_result_field #} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) + {%- if not method.http_options %} + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.{{ method_name }}({}) + assert ('Cannot define a method without a valid `google.api.http` annotation.' + in str(runtime_error.value)) + + {%- endif %} +{% endif %}{# flattened_fields #} - # TODO(yon-mg): Remove when this method has a working implementation - # or testing straegy +{% else %}{# this is an lro or streaming method #} +def test_{{ method_name }}_rest_unimplemented(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} with pytest.raises(NotImplementedError): - client.{{ method_name }}({}) + client.{{ method_name }}({% if method.client_streaming %}requests{% else %}request{% endif %}) +{% endif %}{# not lro and not streaming #}{% else %}{# not method.http_options #} +def test_{{ method_name }}_rest_no_http_options(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} + with pytest.raises(RuntimeError): + client.{{ method_name }}({% if method.client_streaming %}requests{% else %}request{% endif %}) -{% endif %}{# not streaming #}{% endwith %}{# method_name #} + +{% endif %}{# not method.http_options #} +{% endwith %}{# method_name #} {% endfor -%} {#- method in methods for rest #} @@ -1646,6 +1838,7 @@ def test_transport_instance(): assert client.transport is transport {% if 'grpc' in opts.transport %} +{# TODO(dovs): parametrize this over async/sync grpc#} def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}GrpcTransport( @@ -2134,7 +2327,7 @@ def test_parse_common_{{ resource_msg.message_type.resource_type|snake_case }}_p {% endwith %} {# cycler #} -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index a656b2943ded..f281461e80d9 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -26,7 +26,7 @@ import shutil -showcase_version = os.environ.get("SHOWCASE_VERSION", "0.16.0") +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.18.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") @@ -79,25 +79,35 @@ def unit(session): # A callable class is necessary so that the session can be closed over # instead of passed in, which simplifies the invocation via map. class FragTester: - def __init__(self, session): + def __init__(self, session, use_ads_templates): self.session = session + self.use_ads_templates = use_ads_templates def __call__(self, frag): with tempfile.TemporaryDirectory() as tmp_dir: # Generate the fragment GAPIC. outputs = [] + templates = ( + path.join(path.dirname(__file__), "gapic", "ads-templates") + if self.use_ads_templates + else "DEFAULT" + ) + maybe_old_naming = ",old-naming" if self.use_ads_templates else "" + + session_args = [ + "python", + "-m", + "grpc_tools.protoc", + f"--proto_path={str(FRAG_DIR)}", + f"--python_gapic_out={tmp_dir}", + f"--python_gapic_opt=transport=grpc+rest,python-gapic-templates={templates}{maybe_old_naming}", + ] + + if self.use_ads_templates: + session_args.extend([]) + outputs.append( - self.session.run( - "python", - "-m", - "grpc_tools.protoc", - f"--proto_path={str(FRAG_DIR)}", - f"--python_gapic_out={tmp_dir}", - "--python_gapic_opt=transport=grpc+rest", - str(frag), - external=True, - silent=True, - ) + self.session.run(*session_args, str(frag), external=True, silent=True,) ) # Install the generated fragment library. @@ -138,7 +148,27 @@ def fragment(session): session.install("-e", ".") with ThreadPoolExecutor() as p: - all_outs = p.map(FragTester(session), FRAGMENT_FILES) + all_outs = p.map(FragTester(session, False), FRAGMENT_FILES) + + output = "".join(all_outs) + session.log(output) + + +@nox.session(python=ALL_PYTHON[1:]) +def fragment_alternative_templates(session): + session.install( + "coverage", + "pytest", + "pytest-cov", + "pytest-xdist", + "asyncmock", + "pytest-asyncio", + "grpcio-tools", + ) + session.install("-e", ".") + + with ThreadPoolExecutor() as p: + all_outs = p.map(FragTester(session, True), FRAGMENT_FILES) output = "".join(all_outs) session.log(output) @@ -186,10 +216,8 @@ def showcase_library( # Write out a client library for Showcase. template_opt = f"python-gapic-templates={templates}" - # TODO(yon-mg): add "transports=grpc+rest" when all rest features required for - # Showcase are implemented i.e. (grpc transcoding, LROs, etc) opts = "--python_gapic_opt=" - opts += ",".join(other_opts + (f"{template_opt}",)) + opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest")) cmd_tup = ( "python", "-m", @@ -303,24 +331,7 @@ def showcase_unit( with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) - - # Unit tests are run twice with different dependencies to exercise - # all code paths. - # TODO(busunkim): remove when default templates require google-auth>=1.25.0 - - # 1. Run tests at lower bound of dependencies - session.install("nox") - session.run("nox", "-s", "update_lower_bounds") - session.install(".", "--force-reinstall", "-c", "constraints.txt") - # Some code paths require an older version of google-auth. - # google-auth is a transitive dependency so it isn't in the - # lower bound constraints file produced above. - session.install("google-auth==1.28.0") - run_showcase_unit_tests(session, fail_under=0) - - # 2. Run the tests again with latest version of dependencies - session.install(".", "--upgrade", "--force-reinstall") - run_showcase_unit_tests(session, fail_under=100) + run_showcase_unit_tests(session) @nox.session(python=ALL_PYTHON[1:]) # Do not test 3.6 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 94e4b1961df4..9513d57df8e1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -128,7 +128,6 @@ async def pages(self) -> AsyncIterator[asset_service.ListAssetsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[assets.Asset]: async def async_generator(): async for page in self.pages: @@ -250,7 +249,6 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllResourcesResponse] self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[assets.ResourceSearchResult]: async def async_generator(): async for page in self.pages: @@ -372,7 +370,6 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllIamPoliciesRespons self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[assets.IamPolicySearchResult]: async def async_generator(): async for page in self.pages: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index b43800a72d4a..9ae3eddd3201 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -104,7 +104,6 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index ae6fd2a5e6a6..d027f0f2fd7c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -211,18 +211,18 @@ def test_asset_service_client_client_options(client_class, transport_class, tran # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -253,7 +253,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -328,7 +328,7 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -351,7 +351,7 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -363,7 +363,6 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran always_use_jwt_access=True, ) - def test_asset_service_client_client_options_from_dict(): with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -382,7 +381,11 @@ def test_asset_service_client_client_options_from_dict(): ) -def test_export_assets(transport: str = 'grpc', request_type=asset_service.ExportAssetsRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.ExportAssetsRequest, + dict, +]) +def test_export_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -409,10 +412,6 @@ def test_export_assets(transport: str = 'grpc', request_type=asset_service.Expor assert isinstance(response, future.Future) -def test_export_assets_from_dict(): - test_export_assets(request_type=dict) - - def test_export_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -529,7 +528,11 @@ async def test_export_assets_field_headers_async(): ) in kw['metadata'] -def test_list_assets(transport: str = 'grpc', request_type=asset_service.ListAssetsRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.ListAssetsRequest, + dict, +]) +def test_list_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -559,10 +562,6 @@ def test_list_assets(transport: str = 'grpc', request_type=asset_service.ListAss assert response.next_page_token == 'next_page_token_value' -def test_list_assets_from_dict(): - test_list_assets(request_type=dict) - - def test_list_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -764,9 +763,10 @@ async def test_list_assets_flattened_error_async(): ) -def test_list_assets_pager(): +def test_list_assets_pager(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -816,10 +816,10 @@ def test_list_assets_pager(): assert len(results) == 6 assert all(isinstance(i, assets.Asset) for i in results) - -def test_list_assets_pages(): +def test_list_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -904,7 +904,8 @@ async def test_list_assets_async_pager(): assert len(responses) == 6 assert all(isinstance(i, assets.Asset) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_assets_async_pages(): @@ -950,7 +951,11 @@ async def test_list_assets_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_batch_get_assets_history(transport: str = 'grpc', request_type=asset_service.BatchGetAssetsHistoryRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetAssetsHistoryRequest, + dict, +]) +def test_batch_get_assets_history(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -978,10 +983,6 @@ def test_batch_get_assets_history(transport: str = 'grpc', request_type=asset_se assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) -def test_batch_get_assets_history_from_dict(): - test_batch_get_assets_history(request_type=dict) - - def test_batch_get_assets_history_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1097,7 +1098,11 @@ async def test_batch_get_assets_history_field_headers_async(): ) in kw['metadata'] -def test_create_feed(transport: str = 'grpc', request_type=asset_service.CreateFeedRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.CreateFeedRequest, + dict, +]) +def test_create_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1133,10 +1138,6 @@ def test_create_feed(transport: str = 'grpc', request_type=asset_service.CreateF assert response.content_type == asset_service.ContentType.RESOURCE -def test_create_feed_from_dict(): - test_create_feed(request_type=dict) - - def test_create_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1344,7 +1345,11 @@ async def test_create_feed_flattened_error_async(): ) -def test_get_feed(transport: str = 'grpc', request_type=asset_service.GetFeedRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.GetFeedRequest, + dict, +]) +def test_get_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1380,10 +1385,6 @@ def test_get_feed(transport: str = 'grpc', request_type=asset_service.GetFeedReq assert response.content_type == asset_service.ContentType.RESOURCE -def test_get_feed_from_dict(): - test_get_feed(request_type=dict) - - def test_get_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1591,7 +1592,11 @@ async def test_get_feed_flattened_error_async(): ) -def test_list_feeds(transport: str = 'grpc', request_type=asset_service.ListFeedsRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.ListFeedsRequest, + dict, +]) +def test_list_feeds(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1619,10 +1624,6 @@ def test_list_feeds(transport: str = 'grpc', request_type=asset_service.ListFeed assert isinstance(response, asset_service.ListFeedsResponse) -def test_list_feeds_from_dict(): - test_list_feeds(request_type=dict) - - def test_list_feeds_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1822,7 +1823,11 @@ async def test_list_feeds_flattened_error_async(): ) -def test_update_feed(transport: str = 'grpc', request_type=asset_service.UpdateFeedRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateFeedRequest, + dict, +]) +def test_update_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1858,10 +1863,6 @@ def test_update_feed(transport: str = 'grpc', request_type=asset_service.UpdateF assert response.content_type == asset_service.ContentType.RESOURCE -def test_update_feed_from_dict(): - test_update_feed(request_type=dict) - - def test_update_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2069,7 +2070,11 @@ async def test_update_feed_flattened_error_async(): ) -def test_delete_feed(transport: str = 'grpc', request_type=asset_service.DeleteFeedRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteFeedRequest, + dict, +]) +def test_delete_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2096,10 +2101,6 @@ def test_delete_feed(transport: str = 'grpc', request_type=asset_service.DeleteF assert response is None -def test_delete_feed_from_dict(): - test_delete_feed(request_type=dict) - - def test_delete_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2298,7 +2299,11 @@ async def test_delete_feed_flattened_error_async(): ) -def test_search_all_resources(transport: str = 'grpc', request_type=asset_service.SearchAllResourcesRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllResourcesRequest, + dict, +]) +def test_search_all_resources(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2328,10 +2333,6 @@ def test_search_all_resources(transport: str = 'grpc', request_type=asset_servic assert response.next_page_token == 'next_page_token_value' -def test_search_all_resources_from_dict(): - test_search_all_resources(request_type=dict) - - def test_search_all_resources_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2553,9 +2554,10 @@ async def test_search_all_resources_flattened_error_async(): ) -def test_search_all_resources_pager(): +def test_search_all_resources_pager(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2605,10 +2607,10 @@ def test_search_all_resources_pager(): assert len(results) == 6 assert all(isinstance(i, assets.ResourceSearchResult) for i in results) - -def test_search_all_resources_pages(): +def test_search_all_resources_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2693,7 +2695,8 @@ async def test_search_all_resources_async_pager(): assert len(responses) == 6 assert all(isinstance(i, assets.ResourceSearchResult) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_search_all_resources_async_pages(): @@ -2739,7 +2742,11 @@ async def test_search_all_resources_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_search_all_iam_policies(transport: str = 'grpc', request_type=asset_service.SearchAllIamPoliciesRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllIamPoliciesRequest, + dict, +]) +def test_search_all_iam_policies(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2769,10 +2776,6 @@ def test_search_all_iam_policies(transport: str = 'grpc', request_type=asset_ser assert response.next_page_token == 'next_page_token_value' -def test_search_all_iam_policies_from_dict(): - test_search_all_iam_policies(request_type=dict) - - def test_search_all_iam_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2984,9 +2987,10 @@ async def test_search_all_iam_policies_flattened_error_async(): ) -def test_search_all_iam_policies_pager(): +def test_search_all_iam_policies_pager(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3036,10 +3040,10 @@ def test_search_all_iam_policies_pager(): assert len(results) == 6 assert all(isinstance(i, assets.IamPolicySearchResult) for i in results) - -def test_search_all_iam_policies_pages(): +def test_search_all_iam_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3124,7 +3128,8 @@ async def test_search_all_iam_policies_async_pager(): assert len(responses) == 6 assert all(isinstance(i, assets.IamPolicySearchResult) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_search_all_iam_policies_async_pages(): @@ -3170,7 +3175,11 @@ async def test_search_all_iam_policies_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_analyze_iam_policy(transport: str = 'grpc', request_type=asset_service.AnalyzeIamPolicyRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyRequest, + dict, +]) +def test_analyze_iam_policy(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3200,10 +3209,6 @@ def test_analyze_iam_policy(transport: str = 'grpc', request_type=asset_service. assert response.fully_explored is True -def test_analyze_iam_policy_from_dict(): - test_analyze_iam_policy(request_type=dict) - - def test_analyze_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3321,7 +3326,11 @@ async def test_analyze_iam_policy_field_headers_async(): ) in kw['metadata'] -def test_analyze_iam_policy_longrunning(transport: str = 'grpc', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, +]) +def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3348,10 +3357,6 @@ def test_analyze_iam_policy_longrunning(transport: str = 'grpc', request_type=as assert isinstance(response, future.Future) -def test_analyze_iam_policy_longrunning_from_dict(): - test_analyze_iam_policy_longrunning(request_type=dict) - - def test_analyze_iam_policy_longrunning_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4011,7 +4016,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index fd15a3f545bb..ec719c2a8038 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -101,7 +101,6 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 54bebbee759a..a1bdc0a52d10 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -203,18 +203,18 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,7 +245,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -320,7 +320,7 @@ def test_iam_credentials_client_client_options_scopes(client_class, transport_cl ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -343,7 +343,7 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -355,7 +355,6 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr always_use_jwt_access=True, ) - def test_iam_credentials_client_client_options_from_dict(): with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -374,7 +373,11 @@ def test_iam_credentials_client_client_options_from_dict(): ) -def test_generate_access_token(transport: str = 'grpc', request_type=common.GenerateAccessTokenRequest): +@pytest.mark.parametrize("request_type", [ + common.GenerateAccessTokenRequest, + dict, +]) +def test_generate_access_token(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -404,10 +407,6 @@ def test_generate_access_token(transport: str = 'grpc', request_type=common.Gene assert response.access_token == 'access_token_value' -def test_generate_access_token_from_dict(): - test_generate_access_token(request_type=dict) - - def test_generate_access_token_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -635,7 +634,11 @@ async def test_generate_access_token_flattened_error_async(): ) -def test_generate_id_token(transport: str = 'grpc', request_type=common.GenerateIdTokenRequest): +@pytest.mark.parametrize("request_type", [ + common.GenerateIdTokenRequest, + dict, +]) +def test_generate_id_token(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -665,10 +668,6 @@ def test_generate_id_token(transport: str = 'grpc', request_type=common.Generate assert response.token == 'token_value' -def test_generate_id_token_from_dict(): - test_generate_id_token(request_type=dict) - - def test_generate_id_token_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -900,7 +899,11 @@ async def test_generate_id_token_flattened_error_async(): ) -def test_sign_blob(transport: str = 'grpc', request_type=common.SignBlobRequest): +@pytest.mark.parametrize("request_type", [ + common.SignBlobRequest, + dict, +]) +def test_sign_blob(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -932,10 +935,6 @@ def test_sign_blob(transport: str = 'grpc', request_type=common.SignBlobRequest) assert response.signed_blob == b'signed_blob_blob' -def test_sign_blob_from_dict(): - test_sign_blob(request_type=dict) - - def test_sign_blob_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1159,7 +1158,11 @@ async def test_sign_blob_flattened_error_async(): ) -def test_sign_jwt(transport: str = 'grpc', request_type=common.SignJwtRequest): +@pytest.mark.parametrize("request_type", [ + common.SignJwtRequest, + dict, +]) +def test_sign_jwt(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1191,10 +1194,6 @@ def test_sign_jwt(transport: str = 'grpc', request_type=common.SignJwtRequest): assert response.signed_jwt == 'signed_jwt_value' -def test_sign_jwt_from_dict(): - test_sign_jwt(request_type=dict) - - def test_sign_jwt_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1899,7 +1898,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 43e0084a0019..d6809771c986 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -127,7 +127,6 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): async for page in self.pages: @@ -249,7 +248,6 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): async for page in self.pages: @@ -371,7 +369,6 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): async for page in self.pages: @@ -493,7 +490,6 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): async for page in self.pages: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 291c6046af8a..6cfeeedee906 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -105,7 +105,6 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 95adb7e912c9..3cb42b00af37 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -129,7 +129,6 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): async for page in self.pages: @@ -251,7 +250,6 @@ async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsR self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): async for page in self.pages: @@ -373,7 +371,6 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 5989f2c7777c..154dc6bc6e1e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -106,7 +106,6 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index a3faa77a20f7..0ab85ca09718 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -127,7 +127,6 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): async for page in self.pages: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index c9f37d2793d8..bf92724ce659 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -106,7 +106,6 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index f8c963c4af4d..d36a14973f2a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -204,18 +204,18 @@ def test_config_service_v2_client_client_options(client_class, transport_class, # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -246,7 +246,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -321,7 +321,7 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -344,7 +344,7 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -356,7 +356,6 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) - def test_config_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -375,7 +374,11 @@ def test_config_service_v2_client_client_options_from_dict(): ) -def test_list_buckets(transport: str = 'grpc', request_type=logging_config.ListBucketsRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.ListBucketsRequest, + dict, +]) +def test_list_buckets(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -405,10 +408,6 @@ def test_list_buckets(transport: str = 'grpc', request_type=logging_config.ListB assert response.next_page_token == 'next_page_token_value' -def test_list_buckets_from_dict(): - test_list_buckets(request_type=dict) - - def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -610,9 +609,10 @@ async def test_list_buckets_flattened_error_async(): ) -def test_list_buckets_pager(): +def test_list_buckets_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -662,10 +662,10 @@ def test_list_buckets_pager(): assert len(results) == 6 assert all(isinstance(i, logging_config.LogBucket) for i in results) - -def test_list_buckets_pages(): +def test_list_buckets_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -750,7 +750,8 @@ async def test_list_buckets_async_pager(): assert len(responses) == 6 assert all(isinstance(i, logging_config.LogBucket) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_buckets_async_pages(): @@ -796,7 +797,11 @@ async def test_list_buckets_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_get_bucket(transport: str = 'grpc', request_type=logging_config.GetBucketRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.GetBucketRequest, + dict, +]) +def test_get_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -834,10 +839,6 @@ def test_get_bucket(transport: str = 'grpc', request_type=logging_config.GetBuck assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_bucket_from_dict(): - test_get_bucket(request_type=dict) - - def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -963,7 +964,11 @@ async def test_get_bucket_field_headers_async(): ) in kw['metadata'] -def test_create_bucket(transport: str = 'grpc', request_type=logging_config.CreateBucketRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.CreateBucketRequest, + dict, +]) +def test_create_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1001,10 +1006,6 @@ def test_create_bucket(transport: str = 'grpc', request_type=logging_config.Crea assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_create_bucket_from_dict(): - test_create_bucket(request_type=dict) - - def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1130,7 +1131,11 @@ async def test_create_bucket_field_headers_async(): ) in kw['metadata'] -def test_update_bucket(transport: str = 'grpc', request_type=logging_config.UpdateBucketRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateBucketRequest, + dict, +]) +def test_update_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1168,10 +1173,6 @@ def test_update_bucket(transport: str = 'grpc', request_type=logging_config.Upda assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_update_bucket_from_dict(): - test_update_bucket(request_type=dict) - - def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1297,7 +1298,11 @@ async def test_update_bucket_field_headers_async(): ) in kw['metadata'] -def test_delete_bucket(transport: str = 'grpc', request_type=logging_config.DeleteBucketRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteBucketRequest, + dict, +]) +def test_delete_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1324,10 +1329,6 @@ def test_delete_bucket(transport: str = 'grpc', request_type=logging_config.Dele assert response is None -def test_delete_bucket_from_dict(): - test_delete_bucket(request_type=dict) - - def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1442,7 +1443,11 @@ async def test_delete_bucket_field_headers_async(): ) in kw['metadata'] -def test_undelete_bucket(transport: str = 'grpc', request_type=logging_config.UndeleteBucketRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.UndeleteBucketRequest, + dict, +]) +def test_undelete_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1469,10 +1474,6 @@ def test_undelete_bucket(transport: str = 'grpc', request_type=logging_config.Un assert response is None -def test_undelete_bucket_from_dict(): - test_undelete_bucket(request_type=dict) - - def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1587,7 +1588,11 @@ async def test_undelete_bucket_field_headers_async(): ) in kw['metadata'] -def test_list_views(transport: str = 'grpc', request_type=logging_config.ListViewsRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.ListViewsRequest, + dict, +]) +def test_list_views(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1617,10 +1622,6 @@ def test_list_views(transport: str = 'grpc', request_type=logging_config.ListVie assert response.next_page_token == 'next_page_token_value' -def test_list_views_from_dict(): - test_list_views(request_type=dict) - - def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1822,9 +1823,10 @@ async def test_list_views_flattened_error_async(): ) -def test_list_views_pager(): +def test_list_views_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1874,10 +1876,10 @@ def test_list_views_pager(): assert len(results) == 6 assert all(isinstance(i, logging_config.LogView) for i in results) - -def test_list_views_pages(): +def test_list_views_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1962,7 +1964,8 @@ async def test_list_views_async_pager(): assert len(responses) == 6 assert all(isinstance(i, logging_config.LogView) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_views_async_pages(): @@ -2008,7 +2011,11 @@ async def test_list_views_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_get_view(transport: str = 'grpc', request_type=logging_config.GetViewRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.GetViewRequest, + dict, +]) +def test_get_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2042,10 +2049,6 @@ def test_get_view(transport: str = 'grpc', request_type=logging_config.GetViewRe assert response.filter == 'filter_value' -def test_get_view_from_dict(): - test_get_view(request_type=dict) - - def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2167,7 +2170,11 @@ async def test_get_view_field_headers_async(): ) in kw['metadata'] -def test_create_view(transport: str = 'grpc', request_type=logging_config.CreateViewRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.CreateViewRequest, + dict, +]) +def test_create_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2201,10 +2208,6 @@ def test_create_view(transport: str = 'grpc', request_type=logging_config.Create assert response.filter == 'filter_value' -def test_create_view_from_dict(): - test_create_view(request_type=dict) - - def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2326,7 +2329,11 @@ async def test_create_view_field_headers_async(): ) in kw['metadata'] -def test_update_view(transport: str = 'grpc', request_type=logging_config.UpdateViewRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateViewRequest, + dict, +]) +def test_update_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2360,10 +2367,6 @@ def test_update_view(transport: str = 'grpc', request_type=logging_config.Update assert response.filter == 'filter_value' -def test_update_view_from_dict(): - test_update_view(request_type=dict) - - def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2485,7 +2488,11 @@ async def test_update_view_field_headers_async(): ) in kw['metadata'] -def test_delete_view(transport: str = 'grpc', request_type=logging_config.DeleteViewRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteViewRequest, + dict, +]) +def test_delete_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2512,10 +2519,6 @@ def test_delete_view(transport: str = 'grpc', request_type=logging_config.Delete assert response is None -def test_delete_view_from_dict(): - test_delete_view(request_type=dict) - - def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2630,7 +2633,11 @@ async def test_delete_view_field_headers_async(): ) in kw['metadata'] -def test_list_sinks(transport: str = 'grpc', request_type=logging_config.ListSinksRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.ListSinksRequest, + dict, +]) +def test_list_sinks(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2660,10 +2667,6 @@ def test_list_sinks(transport: str = 'grpc', request_type=logging_config.ListSin assert response.next_page_token == 'next_page_token_value' -def test_list_sinks_from_dict(): - test_list_sinks(request_type=dict) - - def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2865,9 +2868,10 @@ async def test_list_sinks_flattened_error_async(): ) -def test_list_sinks_pager(): +def test_list_sinks_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2917,10 +2921,10 @@ def test_list_sinks_pager(): assert len(results) == 6 assert all(isinstance(i, logging_config.LogSink) for i in results) - -def test_list_sinks_pages(): +def test_list_sinks_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3005,7 +3009,8 @@ async def test_list_sinks_async_pager(): assert len(responses) == 6 assert all(isinstance(i, logging_config.LogSink) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_sinks_async_pages(): @@ -3051,7 +3056,11 @@ async def test_list_sinks_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_get_sink(transport: str = 'grpc', request_type=logging_config.GetSinkRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.GetSinkRequest, + dict, +]) +def test_get_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3096,10 +3105,6 @@ def test_get_sink(transport: str = 'grpc', request_type=logging_config.GetSinkRe assert response.include_children is True -def test_get_sink_from_dict(): - test_get_sink(request_type=dict) - - def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3315,7 +3320,11 @@ async def test_get_sink_flattened_error_async(): ) -def test_create_sink(transport: str = 'grpc', request_type=logging_config.CreateSinkRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.CreateSinkRequest, + dict, +]) +def test_create_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3360,10 +3369,6 @@ def test_create_sink(transport: str = 'grpc', request_type=logging_config.Create assert response.include_children is True -def test_create_sink_from_dict(): - test_create_sink(request_type=dict) - - def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3589,7 +3594,11 @@ async def test_create_sink_flattened_error_async(): ) -def test_update_sink(transport: str = 'grpc', request_type=logging_config.UpdateSinkRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateSinkRequest, + dict, +]) +def test_update_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3634,10 +3643,6 @@ def test_update_sink(transport: str = 'grpc', request_type=logging_config.Update assert response.include_children is True -def test_update_sink_from_dict(): - test_update_sink(request_type=dict) - - def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3873,7 +3878,11 @@ async def test_update_sink_flattened_error_async(): ) -def test_delete_sink(transport: str = 'grpc', request_type=logging_config.DeleteSinkRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteSinkRequest, + dict, +]) +def test_delete_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3900,10 +3909,6 @@ def test_delete_sink(transport: str = 'grpc', request_type=logging_config.Delete assert response is None -def test_delete_sink_from_dict(): - test_delete_sink(request_type=dict) - - def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4102,7 +4107,11 @@ async def test_delete_sink_flattened_error_async(): ) -def test_list_exclusions(transport: str = 'grpc', request_type=logging_config.ListExclusionsRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.ListExclusionsRequest, + dict, +]) +def test_list_exclusions(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4132,10 +4141,6 @@ def test_list_exclusions(transport: str = 'grpc', request_type=logging_config.Li assert response.next_page_token == 'next_page_token_value' -def test_list_exclusions_from_dict(): - test_list_exclusions(request_type=dict) - - def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4337,9 +4342,10 @@ async def test_list_exclusions_flattened_error_async(): ) -def test_list_exclusions_pager(): +def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4389,10 +4395,10 @@ def test_list_exclusions_pager(): assert len(results) == 6 assert all(isinstance(i, logging_config.LogExclusion) for i in results) - -def test_list_exclusions_pages(): +def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4477,7 +4483,8 @@ async def test_list_exclusions_async_pager(): assert len(responses) == 6 assert all(isinstance(i, logging_config.LogExclusion) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_exclusions_async_pages(): @@ -4523,7 +4530,11 @@ async def test_list_exclusions_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_get_exclusion(transport: str = 'grpc', request_type=logging_config.GetExclusionRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.GetExclusionRequest, + dict, +]) +def test_get_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4559,10 +4570,6 @@ def test_get_exclusion(transport: str = 'grpc', request_type=logging_config.GetE assert response.disabled is True -def test_get_exclusion_from_dict(): - test_get_exclusion(request_type=dict) - - def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4770,7 +4777,11 @@ async def test_get_exclusion_flattened_error_async(): ) -def test_create_exclusion(transport: str = 'grpc', request_type=logging_config.CreateExclusionRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.CreateExclusionRequest, + dict, +]) +def test_create_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4806,10 +4817,6 @@ def test_create_exclusion(transport: str = 'grpc', request_type=logging_config.C assert response.disabled is True -def test_create_exclusion_from_dict(): - test_create_exclusion(request_type=dict) - - def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5027,7 +5034,11 @@ async def test_create_exclusion_flattened_error_async(): ) -def test_update_exclusion(transport: str = 'grpc', request_type=logging_config.UpdateExclusionRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateExclusionRequest, + dict, +]) +def test_update_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5063,10 +5074,6 @@ def test_update_exclusion(transport: str = 'grpc', request_type=logging_config.U assert response.disabled is True -def test_update_exclusion_from_dict(): - test_update_exclusion(request_type=dict) - - def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5294,7 +5301,11 @@ async def test_update_exclusion_flattened_error_async(): ) -def test_delete_exclusion(transport: str = 'grpc', request_type=logging_config.DeleteExclusionRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteExclusionRequest, + dict, +]) +def test_delete_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5321,10 +5332,6 @@ def test_delete_exclusion(transport: str = 'grpc', request_type=logging_config.D assert response is None -def test_delete_exclusion_from_dict(): - test_delete_exclusion(request_type=dict) - - def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5523,7 +5530,11 @@ async def test_delete_exclusion_flattened_error_async(): ) -def test_get_cmek_settings(transport: str = 'grpc', request_type=logging_config.GetCmekSettingsRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.GetCmekSettingsRequest, + dict, +]) +def test_get_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5557,10 +5568,6 @@ def test_get_cmek_settings(transport: str = 'grpc', request_type=logging_config. assert response.service_account_id == 'service_account_id_value' -def test_get_cmek_settings_from_dict(): - test_get_cmek_settings(request_type=dict) - - def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5682,7 +5689,11 @@ async def test_get_cmek_settings_field_headers_async(): ) in kw['metadata'] -def test_update_cmek_settings(transport: str = 'grpc', request_type=logging_config.UpdateCmekSettingsRequest): +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateCmekSettingsRequest, + dict, +]) +def test_update_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5716,10 +5727,6 @@ def test_update_cmek_settings(transport: str = 'grpc', request_type=logging_conf assert response.service_account_id == 'service_account_id_value' -def test_update_cmek_settings_from_dict(): - test_update_cmek_settings(request_type=dict) - - def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -6430,7 +6437,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 47cc2177ef4d..14af61f54540 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -210,18 +210,18 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -252,7 +252,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -327,7 +327,7 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -350,7 +350,7 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -362,7 +362,6 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) - def test_logging_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -381,7 +380,11 @@ def test_logging_service_v2_client_client_options_from_dict(): ) -def test_delete_log(transport: str = 'grpc', request_type=logging.DeleteLogRequest): +@pytest.mark.parametrize("request_type", [ + logging.DeleteLogRequest, + dict, +]) +def test_delete_log(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -408,10 +411,6 @@ def test_delete_log(transport: str = 'grpc', request_type=logging.DeleteLogReque assert response is None -def test_delete_log_from_dict(): - test_delete_log(request_type=dict) - - def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -610,7 +609,11 @@ async def test_delete_log_flattened_error_async(): ) -def test_write_log_entries(transport: str = 'grpc', request_type=logging.WriteLogEntriesRequest): +@pytest.mark.parametrize("request_type", [ + logging.WriteLogEntriesRequest, + dict, +]) +def test_write_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -638,10 +641,6 @@ def test_write_log_entries(transport: str = 'grpc', request_type=logging.WriteLo assert isinstance(response, logging.WriteLogEntriesResponse) -def test_write_log_entries_from_dict(): - test_write_log_entries(request_type=dict) - - def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -808,7 +807,11 @@ async def test_write_log_entries_flattened_error_async(): ) -def test_list_log_entries(transport: str = 'grpc', request_type=logging.ListLogEntriesRequest): +@pytest.mark.parametrize("request_type", [ + logging.ListLogEntriesRequest, + dict, +]) +def test_list_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -838,10 +841,6 @@ def test_list_log_entries(transport: str = 'grpc', request_type=logging.ListLogE assert response.next_page_token == 'next_page_token_value' -def test_list_log_entries_from_dict(): - test_list_log_entries(request_type=dict) - - def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1000,9 +999,10 @@ async def test_list_log_entries_flattened_error_async(): ) -def test_list_log_entries_pager(): +def test_list_log_entries_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1047,10 +1047,10 @@ def test_list_log_entries_pager(): assert len(results) == 6 assert all(isinstance(i, log_entry.LogEntry) for i in results) - -def test_list_log_entries_pages(): +def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1135,7 +1135,8 @@ async def test_list_log_entries_async_pager(): assert len(responses) == 6 assert all(isinstance(i, log_entry.LogEntry) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_log_entries_async_pages(): @@ -1181,7 +1182,11 @@ async def test_list_log_entries_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_list_monitored_resource_descriptors(transport: str = 'grpc', request_type=logging.ListMonitoredResourceDescriptorsRequest): +@pytest.mark.parametrize("request_type", [ + logging.ListMonitoredResourceDescriptorsRequest, + dict, +]) +def test_list_monitored_resource_descriptors(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1211,10 +1216,6 @@ def test_list_monitored_resource_descriptors(transport: str = 'grpc', request_ty assert response.next_page_token == 'next_page_token_value' -def test_list_monitored_resource_descriptors_from_dict(): - test_list_monitored_resource_descriptors(request_type=dict) - - def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1269,9 +1270,10 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): await test_list_monitored_resource_descriptors_async(request_type=dict) -def test_list_monitored_resource_descriptors_pager(): +def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1316,10 +1318,10 @@ def test_list_monitored_resource_descriptors_pager(): assert len(results) == 6 assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) for i in results) - -def test_list_monitored_resource_descriptors_pages(): +def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1404,7 +1406,8 @@ async def test_list_monitored_resource_descriptors_async_pager(): assert len(responses) == 6 assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): @@ -1450,7 +1453,11 @@ async def test_list_monitored_resource_descriptors_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_list_logs(transport: str = 'grpc', request_type=logging.ListLogsRequest): +@pytest.mark.parametrize("request_type", [ + logging.ListLogsRequest, + dict, +]) +def test_list_logs(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1482,10 +1489,6 @@ def test_list_logs(transport: str = 'grpc', request_type=logging.ListLogsRequest assert response.next_page_token == 'next_page_token_value' -def test_list_logs_from_dict(): - test_list_logs(request_type=dict) - - def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1689,9 +1692,10 @@ async def test_list_logs_flattened_error_async(): ) -def test_list_logs_pager(): +def test_list_logs_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1741,10 +1745,10 @@ def test_list_logs_pager(): assert len(results) == 6 assert all(isinstance(i, str) for i in results) - -def test_list_logs_pages(): +def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1829,7 +1833,8 @@ async def test_list_logs_async_pager(): assert len(responses) == 6 assert all(isinstance(i, str) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_logs_async_pages(): @@ -1875,7 +1880,11 @@ async def test_list_logs_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_tail_log_entries(transport: str = 'grpc', request_type=logging.TailLogEntriesRequest): +@pytest.mark.parametrize("request_type", [ + logging.TailLogEntriesRequest, + dict, +]) +def test_tail_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1904,10 +1913,6 @@ def test_tail_log_entries(transport: str = 'grpc', request_type=logging.TailLogE assert isinstance(message, logging.TailLogEntriesResponse) -def test_tail_log_entries_from_dict(): - test_tail_log_entries(request_type=dict) - - @pytest.mark.asyncio async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): client = LoggingServiceV2AsyncClient( @@ -2439,7 +2444,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7d8951e95ac5..7b836d8233ad 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -208,18 +208,18 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -250,7 +250,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -325,7 +325,7 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -348,7 +348,7 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -360,7 +360,6 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) - def test_metrics_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -379,7 +378,11 @@ def test_metrics_service_v2_client_client_options_from_dict(): ) -def test_list_log_metrics(transport: str = 'grpc', request_type=logging_metrics.ListLogMetricsRequest): +@pytest.mark.parametrize("request_type", [ + logging_metrics.ListLogMetricsRequest, + dict, +]) +def test_list_log_metrics(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -409,10 +412,6 @@ def test_list_log_metrics(transport: str = 'grpc', request_type=logging_metrics. assert response.next_page_token == 'next_page_token_value' -def test_list_log_metrics_from_dict(): - test_list_log_metrics(request_type=dict) - - def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -614,9 +613,10 @@ async def test_list_log_metrics_flattened_error_async(): ) -def test_list_log_metrics_pager(): +def test_list_log_metrics_pager(transport_name: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -666,10 +666,10 @@ def test_list_log_metrics_pager(): assert len(results) == 6 assert all(isinstance(i, logging_metrics.LogMetric) for i in results) - -def test_list_log_metrics_pages(): +def test_list_log_metrics_pages(transport_name: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -754,7 +754,8 @@ async def test_list_log_metrics_async_pager(): assert len(responses) == 6 assert all(isinstance(i, logging_metrics.LogMetric) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): @@ -800,7 +801,11 @@ async def test_list_log_metrics_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_get_log_metric(transport: str = 'grpc', request_type=logging_metrics.GetLogMetricRequest): +@pytest.mark.parametrize("request_type", [ + logging_metrics.GetLogMetricRequest, + dict, +]) +def test_get_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -838,10 +843,6 @@ def test_get_log_metric(transport: str = 'grpc', request_type=logging_metrics.Ge assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_get_log_metric_from_dict(): - test_get_log_metric(request_type=dict) - - def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1051,7 +1052,11 @@ async def test_get_log_metric_flattened_error_async(): ) -def test_create_log_metric(transport: str = 'grpc', request_type=logging_metrics.CreateLogMetricRequest): +@pytest.mark.parametrize("request_type", [ + logging_metrics.CreateLogMetricRequest, + dict, +]) +def test_create_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1089,10 +1094,6 @@ def test_create_log_metric(transport: str = 'grpc', request_type=logging_metrics assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_create_log_metric_from_dict(): - test_create_log_metric(request_type=dict) - - def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1312,7 +1313,11 @@ async def test_create_log_metric_flattened_error_async(): ) -def test_update_log_metric(transport: str = 'grpc', request_type=logging_metrics.UpdateLogMetricRequest): +@pytest.mark.parametrize("request_type", [ + logging_metrics.UpdateLogMetricRequest, + dict, +]) +def test_update_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1350,10 +1355,6 @@ def test_update_log_metric(transport: str = 'grpc', request_type=logging_metrics assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_update_log_metric_from_dict(): - test_update_log_metric(request_type=dict) - - def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1573,7 +1574,11 @@ async def test_update_log_metric_flattened_error_async(): ) -def test_delete_log_metric(transport: str = 'grpc', request_type=logging_metrics.DeleteLogMetricRequest): +@pytest.mark.parametrize("request_type", [ + logging_metrics.DeleteLogMetricRequest, + dict, +]) +def test_delete_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1600,10 +1605,6 @@ def test_delete_log_metric(transport: str = 'grpc', request_type=logging_metrics assert response is None -def test_delete_log_metric_from_dict(): - test_delete_log_metric(request_type=dict) - - def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2296,7 +2297,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index 804104f8f062..a856b50cc9fc 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -127,7 +127,6 @@ async def pages(self) -> AsyncIterator[cloud_redis.ListInstancesResponse]: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterator[cloud_redis.Instance]: async def async_generator(): async for page in self.pages: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 5e1e95f3d76a..7d874a3182a4 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -103,7 +103,6 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 160dcfc35a94..61813fd5ead5 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -208,18 +208,18 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -250,7 +250,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -325,7 +325,7 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -348,7 +348,7 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp ) with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -360,7 +360,6 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp always_use_jwt_access=True, ) - def test_cloud_redis_client_client_options_from_dict(): with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -379,7 +378,11 @@ def test_cloud_redis_client_client_options_from_dict(): ) -def test_list_instances(transport: str = 'grpc', request_type=cloud_redis.ListInstancesRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +def test_list_instances(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -411,10 +414,6 @@ def test_list_instances(transport: str = 'grpc', request_type=cloud_redis.ListIn assert response.unreachable == ['unreachable_value'] -def test_list_instances_from_dict(): - test_list_instances(request_type=dict) - - def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -618,9 +617,10 @@ async def test_list_instances_flattened_error_async(): ) -def test_list_instances_pager(): +def test_list_instances_pager(transport_name: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -670,10 +670,10 @@ def test_list_instances_pager(): assert len(results) == 6 assert all(isinstance(i, cloud_redis.Instance) for i in results) - -def test_list_instances_pages(): +def test_list_instances_pages(transport_name: str = "grpc"): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -758,7 +758,8 @@ async def test_list_instances_async_pager(): assert len(responses) == 6 assert all(isinstance(i, cloud_redis.Instance) - for i in responses) + for i in responses) + @pytest.mark.asyncio async def test_list_instances_async_pages(): @@ -804,7 +805,11 @@ async def test_list_instances_async_pages(): for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token -def test_get_instance(transport: str = 'grpc', request_type=cloud_redis.GetInstanceRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +def test_get_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -864,10 +869,6 @@ def test_get_instance(transport: str = 'grpc', request_type=cloud_redis.GetInsta assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING -def test_get_instance_from_dict(): - test_get_instance(request_type=dict) - - def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1099,7 +1100,11 @@ async def test_get_instance_flattened_error_async(): ) -def test_create_instance(transport: str = 'grpc', request_type=cloud_redis.CreateInstanceRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +def test_create_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1126,10 +1131,6 @@ def test_create_instance(transport: str = 'grpc', request_type=cloud_redis.Creat assert isinstance(response, future.Future) -def test_create_instance_from_dict(): - test_create_instance(request_type=dict) - - def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1352,7 +1353,11 @@ async def test_create_instance_flattened_error_async(): ) -def test_update_instance(transport: str = 'grpc', request_type=cloud_redis.UpdateInstanceRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +def test_update_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1379,10 +1384,6 @@ def test_update_instance(transport: str = 'grpc', request_type=cloud_redis.Updat assert isinstance(response, future.Future) -def test_update_instance_from_dict(): - test_update_instance(request_type=dict) - - def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1595,7 +1596,11 @@ async def test_update_instance_flattened_error_async(): ) -def test_upgrade_instance(transport: str = 'grpc', request_type=cloud_redis.UpgradeInstanceRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpgradeInstanceRequest, + dict, +]) +def test_upgrade_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1622,10 +1627,6 @@ def test_upgrade_instance(transport: str = 'grpc', request_type=cloud_redis.Upgr assert isinstance(response, future.Future) -def test_upgrade_instance_from_dict(): - test_upgrade_instance(request_type=dict) - - def test_upgrade_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1838,7 +1839,11 @@ async def test_upgrade_instance_flattened_error_async(): ) -def test_import_instance(transport: str = 'grpc', request_type=cloud_redis.ImportInstanceRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.ImportInstanceRequest, + dict, +]) +def test_import_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -1865,10 +1870,6 @@ def test_import_instance(transport: str = 'grpc', request_type=cloud_redis.Impor assert isinstance(response, future.Future) -def test_import_instance_from_dict(): - test_import_instance(request_type=dict) - - def test_import_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2081,7 +2082,11 @@ async def test_import_instance_flattened_error_async(): ) -def test_export_instance(transport: str = 'grpc', request_type=cloud_redis.ExportInstanceRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.ExportInstanceRequest, + dict, +]) +def test_export_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2108,10 +2113,6 @@ def test_export_instance(transport: str = 'grpc', request_type=cloud_redis.Expor assert isinstance(response, future.Future) -def test_export_instance_from_dict(): - test_export_instance(request_type=dict) - - def test_export_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2324,7 +2325,11 @@ async def test_export_instance_flattened_error_async(): ) -def test_failover_instance(transport: str = 'grpc', request_type=cloud_redis.FailoverInstanceRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.FailoverInstanceRequest, + dict, +]) +def test_failover_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2351,10 +2356,6 @@ def test_failover_instance(transport: str = 'grpc', request_type=cloud_redis.Fai assert isinstance(response, future.Future) -def test_failover_instance_from_dict(): - test_failover_instance(request_type=dict) - - def test_failover_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2567,7 +2568,11 @@ async def test_failover_instance_flattened_error_async(): ) -def test_delete_instance(transport: str = 'grpc', request_type=cloud_redis.DeleteInstanceRequest): +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +def test_delete_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2594,10 +2599,6 @@ def test_delete_instance(transport: str = 'grpc', request_type=cloud_redis.Delet assert isinstance(response, future.Future) -def test_delete_instance_from_dict(): - test_delete_instance(request_type=dict) - - def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3327,7 +3328,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 109141e9a605..fd92e72bc8a5 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -35,7 +35,7 @@ def async_echo(use_mtls, event_loop): return construct_client( EchoAsyncClient, use_mtls, - transport="grpc_asyncio", + transport_name="grpc_asyncio", channel_creator=aio.insecure_channel ) @@ -44,7 +44,7 @@ def async_identity(use_mtls, event_loop): return construct_client( IdentityAsyncClient, use_mtls, - transport="grpc_asyncio", + transport_name="grpc_asyncio", channel_creator=aio.insecure_channel ) @@ -86,10 +86,12 @@ def pytest_addoption(parser): ) -def construct_client(client_class, - use_mtls, - transport="grpc", - channel_creator=grpc.insecure_channel): +def construct_client( + client_class, + use_mtls, + transport_name="grpc", + channel_creator=grpc.insecure_channel, +): if use_mtls: with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: @@ -103,9 +105,19 @@ def construct_client(client_class, ) return client else: - transport = client_class.get_transport_class(transport)( - channel=channel_creator("localhost:7469") - ) + transport_cls = client_class.get_transport_class(transport_name) + if transport_name in ["grpc", "grpc_asyncio"]: + transport = transport_cls( + channel=channel_creator("localhost:7469"), + ) + elif transport_name == "rest": + # The custom host explicitly bypasses https. + transport = transport_cls( + host="http://localhost:7469", + ) + else: + raise RuntimeError(f"Unexpected transport type: {transport_name}") + return client_class(transport=transport) @@ -114,27 +126,19 @@ def use_mtls(request): return request.config.getoption("--mtls") -@pytest.fixture -def echo(use_mtls): - return construct_client(EchoClient, use_mtls) - - -@pytest.fixture -def identity(): - transport = IdentityClient.get_transport_class('grpc')( - channel=grpc.insecure_channel('localhost:7469'), - ) - return IdentityClient(transport=transport) +@pytest.fixture(params=["grpc", "rest"]) +def echo(use_mtls, request): + return construct_client(EchoClient, use_mtls, transport_name=request.param) -@pytest.fixture -def identity(use_mtls): - return construct_client(IdentityClient, use_mtls) +@pytest.fixture(params=["grpc", "rest"]) +def identity(use_mtls, request): + return construct_client(IdentityClient, use_mtls, transport_name=request.param) -@pytest.fixture -def messaging(use_mtls): - return construct_client(MessagingClient, use_mtls) +@pytest.fixture(params=["grpc", "rest"]) +def messaging(use_mtls, request): + return construct_client(MessagingClient, use_mtls, transport_name=request.param) class MetadataClientInterceptor( diff --git a/packages/gapic-generator/tests/system/test_grpc_lro.py b/packages/gapic-generator/tests/system/test_lro.py similarity index 92% rename from packages/gapic-generator/tests/system/test_grpc_lro.py rename to packages/gapic-generator/tests/system/test_lro.py index 0b159660744d..8098519d9e47 100644 --- a/packages/gapic-generator/tests/system/test_grpc_lro.py +++ b/packages/gapic-generator/tests/system/test_lro.py @@ -20,6 +20,10 @@ def test_lro(echo): + if isinstance(echo.transport, type(echo).get_transport_class("rest")): + # (TODO: dovs) Temporarily disabling rest + return + future = echo.wait({ 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), 'success': { diff --git a/packages/gapic-generator/tests/system/test_grpc_streams.py b/packages/gapic-generator/tests/system/test_streams.py similarity index 90% rename from packages/gapic-generator/tests/system/test_grpc_streams.py rename to packages/gapic-generator/tests/system/test_streams.py index 3187ca49f539..685f7300d8b0 100644 --- a/packages/gapic-generator/tests/system/test_grpc_streams.py +++ b/packages/gapic-generator/tests/system/test_streams.py @@ -23,6 +23,10 @@ def test_unary_stream(echo): + if isinstance(echo.transport, type(echo).get_transport_class("rest")): + # (TODO: dovs) Temporarily disabling rest + return + content = 'The hail in Wales falls mainly on the snails.' responses = echo.expand({ 'content': content, @@ -38,6 +42,10 @@ def test_unary_stream(echo): def test_stream_unary(echo): + if isinstance(echo.transport, type(echo).get_transport_class("rest")): + # (TODO: dovs) Temporarily disabling rest + return + requests = [] requests.append(showcase.EchoRequest(content="hello")) requests.append(showcase.EchoRequest(content="world!")) @@ -46,12 +54,20 @@ def test_stream_unary(echo): def test_stream_unary_passing_dict(echo): + if isinstance(echo.transport, type(echo).get_transport_class("rest")): + # (TODO: dovs) Temporarily disabling rest + return + requests = [{'content': 'hello'}, {'content': 'world!'}] response = echo.collect(iter(requests)) assert response.content == 'hello world!' def test_stream_stream(echo): + if isinstance(echo.transport, type(echo).get_transport_class("rest")): + # (TODO: dovs) Temporarily disabling rest + return + requests = [] requests.append(showcase.EchoRequest(content="hello")) requests.append(showcase.EchoRequest(content="world!")) @@ -66,6 +82,10 @@ def test_stream_stream(echo): def test_stream_stream_passing_dict(echo): + if isinstance(echo.transport, type(echo).get_transport_class("rest")): + # (TODO: dovs) Temporarily disabling rest + return + requests = [{'content': 'hello'}, {'content': 'world!'}] responses = echo.chat(iter(requests), metadata=metadata) diff --git a/packages/gapic-generator/tests/system/test_grpc_unary.py b/packages/gapic-generator/tests/system/test_unary.py similarity index 100% rename from packages/gapic-generator/tests/system/test_grpc_unary.py rename to packages/gapic-generator/tests/system/test_unary.py From 7fa63c6abcc782dfa71c6a828ff931d1aedff87f Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 30 Dec 2021 12:22:28 -0800 Subject: [PATCH 0688/1339] fix: handle message bodies (#1117) Some methods with http annotations have body fields that are message types. Previously, generated unit tests did not handle this well. This is a fix for that: generate a reasonable mock value for the message, represented as a dict. --- .../gapic-generator/gapic/schema/wrappers.py | 17 +++++- .../fragments/test_non_primitive_body.proto | 53 +++++++++++++++++++ .../tests/unit/schema/wrappers/test_field.py | 30 ++++++++++- 3 files changed, 96 insertions(+), 4 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/test_non_primitive_body.proto diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index aa474e0e4ea3..c4c9e6bec098 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -94,7 +94,20 @@ def map(self) -> bool: return bool(self.repeated and self.message and self.message.map) @utils.cached_property - def mock_value_original_type(self) -> Union[bool, str, bytes, int, float, List[Any], None]: + def mock_value_original_type(self) -> Union[bool, str, bytes, int, float, Dict[str, Any], List[Any], None]: + # Return messages as dicts and let the message ctor handle the conversion. + if self.message: + if self.map: + # Not worth the hassle, just return an empty map. + return {} + + msg_dict = { + f.name: f.mock_value_original_type + for f in self.message.fields.values() + } + + return [msg_dict] if self.repeated else msg_dict + answer = self.primitive_mock() or None # If this is a repeated field, then the mock answer should @@ -173,7 +186,7 @@ def primitive_mock(self, suffix: int = 0) -> Union[bool, str, bytes, int, float, answer: Union[bool, str, bytes, int, float, List[Any], None] = None if not isinstance(self.type, PrimitiveType): - raise TypeError(f"'inner_mock_as_original_type' can only be used for" + raise TypeError(f"'primitive_mock' can only be used for " f"PrimitiveType, but type is {self.type}") else: diff --git a/packages/gapic-generator/tests/fragments/test_non_primitive_body.proto b/packages/gapic-generator/tests/fragments/test_non_primitive_body.proto new file mode 100644 index 000000000000..f322a747a445 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_non_primitive_body.proto @@ -0,0 +1,53 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; + +service SmallCompute { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (MethodResponse) { + option (google.api.http) = { + body: "method_body" + post: "/computation/v1/first_name/{first_name}/last_name/{last_name}" + }; + }; +} + +message SerialNumber { + int32 number = 1; +} + +message MethodRequest { + message MethodBody { + int32 mass_kg = 1; + int32 length_cm = 2; + repeated SerialNumber serial_numbers = 3; + map word_associations = 4; + } + + string first_name = 1 [(google.api.field_behavior) = REQUIRED]; + string last_name = 2 [(google.api.field_behavior) = REQUIRED]; + MethodBody method_body = 3 [(google.api.field_behavior) = REQUIRED]; +} + +message MethodResponse { + string name = 1; +} \ No newline at end of file diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index f823104e7752..151b2762b8a3 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -241,6 +241,7 @@ def test_mock_value_map(): label=3, type='TYPE_MESSAGE', ) + assert field.mock_value == "{'key_value': 'value_value'}" @@ -290,7 +291,7 @@ def test_mock_value_message(): assert field.mock_value == 'bogus.Message(foo=324)' -def test_mock_value_original_type_message_errors(): +def test_mock_value_original_type_message(): subfields = collections.OrderedDict(( ('foo', make_field(name='foo', type='TYPE_INT32')), ('bar', make_field(name='bar', type='TYPE_STRING')) @@ -307,14 +308,39 @@ def test_mock_value_original_type_message_errors(): nested_enums={}, nested_messages={}, ) + field = make_field( type='TYPE_MESSAGE', type_name='bogus.Message', message=message, ) + mock = field.mock_value_original_type + + assert mock == {"foo": 324, "bar": "bar_value"} + + # Messages by definition aren't primitive with pytest.raises(TypeError): - mock = field.mock_value_original_type + field.primitive_mock() + + # Special case for map entries + entry_msg = make_message( + name='MessageEntry', + fields=( + make_field(name='key', type='TYPE_STRING'), + make_field(name='value', type='TYPE_STRING'), + ), + options=descriptor_pb2.MessageOptions(map_entry=True), + ) + entry_field = make_field( + name="messages", + type_name="stuff.MessageEntry", + message=entry_msg, + label=3, + type='TYPE_MESSAGE', + ) + + assert entry_field.mock_value_original_type == {} def test_mock_value_recursive(): From 0c3f552aee10551e509700202e9c0d182d4b9093 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 30 Dec 2021 15:38:11 -0700 Subject: [PATCH 0689/1339] chore: release 0.58.4 (#1118) --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index cafdb7ae1fdc..285d53d8ed28 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog + +### [0.58.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.3...v0.58.4) (2021-12-30) + + +### Bug Fixes + +* handle message bodies ([#1117](https://www.github.com/googleapis/gapic-generator-python/issues/1117)) ([36e3236](https://github.com/googleapis/gapic-generator-python/commit/36e3236b3832993331d8d99c10e72797a8851390)) + + ### [0.58.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.2...v0.58.3) (2021-12-28) From c4efabe955653489298ebec081d5cda71d009a08 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 10 Jan 2022 08:56:39 -0700 Subject: [PATCH 0690/1339] feat: add snippet index (#1121) This PR adds snippet_metadata.proto and another samplegen utils class to store the snippets so that they can be looked up by library template code. PRs to begin generating metadata and add the samples to the library docstrings will follow (I originally planned it for one PR, but the changeset was a bit too big). --- .../gapic/samplegen_utils/snippet_index.py | 175 ++++ .../samplegen_utils/snippet_metadata.proto | 319 +++++++ .../samplegen_utils/snippet_metadata_pb2.py | 840 ++++++++++++++++++ .../tests/unit/samplegen/common_types.py | 2 +- .../unit/samplegen/test_snippet_index.py | 229 +++++ 5 files changed, 1564 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/gapic/samplegen_utils/snippet_index.py create mode 100644 packages/gapic-generator/gapic/samplegen_utils/snippet_metadata.proto create mode 100644 packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py create mode 100644 packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py new file mode 100644 index 000000000000..a8594a92ee27 --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -0,0 +1,175 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, Dict +import re + +from google.protobuf import json_format + +from gapic.schema import api, metadata +from gapic.samplegen_utils import snippet_metadata_pb2 # type: ignore +from gapic.samplegen_utils import types + + +CLIENT_INIT_RE = re.compile(r"^\s+# Create a client") +REQUEST_INIT_RE = re.compile(r"^\s+# Initialize request argument\(s\)") +REQUEST_EXEC_RE = re.compile(r"^\s+# Make the request") +RESPONSE_HANDLING_RE = re.compile(r"^\s+# Handle response") + + +class Snippet: + """A single snippet and its metadata. + + Attributes: + sample_str (str): The full text of the code snippet. + metadata (snippet_metadata_pb2.Snippet): The snippet's metadata. + """ + + def __init__(self, sample_str: str, sample_metadata): + self.sample_str = sample_str + self.metadata = sample_metadata + self._parse_snippet_segments() + + def _parse_snippet_segments(self): + """Parse sections of the sample string and update metadata""" + self.sample_lines = self.sample_str.splitlines(keepends=True) + + self._full_snippet = snippet_metadata_pb2.Snippet.Segment( + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.FULL) + self._short_snippet = snippet_metadata_pb2.Snippet.Segment( + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.SHORT) + self._client_init = snippet_metadata_pb2.Snippet.Segment( + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.CLIENT_INITIALIZATION) + self._request_init = snippet_metadata_pb2.Snippet.Segment( + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.REQUEST_INITIALIZATION) + self._request_exec = snippet_metadata_pb2.Snippet.Segment( + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.REQUEST_EXECUTION) + self._response_handling = snippet_metadata_pb2.Snippet.Segment( + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.RESPONSE_HANDLING, + end=len(self.sample_lines) + ) + + # Index starts at 1 since these represent line numbers + for i, line in enumerate(self.sample_lines, start=1): + if line.startswith("# [START"): # do not include region tag lines + self._full_snippet.start = i + 1 + self._short_snippet.start = self._full_snippet.start + elif line.startswith("# [END"): + self._full_snippet.end = i - 1 + self._short_snippet.end = self._full_snippet.end + elif CLIENT_INIT_RE.match(line): + self._client_init.start = i + elif REQUEST_INIT_RE.match(line): + self._client_init.end = i - 1 + self._request_init.start = i + elif REQUEST_EXEC_RE.match(line): + self._request_init.end = i - 1 + self._request_exec.start = i + elif RESPONSE_HANDLING_RE.match(line): + self._request_exec.end = i - 1 + self._response_handling.start = i + + self.metadata.segments.extend([self._full_snippet, self._short_snippet, self._client_init, + self._request_init, self._request_exec, self._response_handling]) + + @property + def full_snippet(self) -> str: + """The portion between the START and END region tags.""" + start_idx = self._full_snippet.start - 1 + end_idx = self._full_snippet.end + return "".join(self.sample_lines[start_idx:end_idx]) + + +class SnippetIndex: + """An index of all the snippets for an API. + + Attributes: + metadata_index (snippet_metadata_pb2.Index): The snippet metadata index. + """ + + def __init__(self, api_schema: api.API): + self.metadata_index = snippet_metadata_pb2.Index() # type: ignore + + # Construct a dictionary to insert samples into based on the API schema + # NOTE: In the future we expect the generator to support configured samples, + # which will result in more than one sample variant per RPC. At that + # time a different data structure (and re-writes of add_snippet and get_snippet) + # will be needed. + self._index: Dict[str, Dict[str, Dict[str, Optional[Snippet]]]] = {} + + self._index = { + s.name: {m: {"sync": None, "async": None} for m in s.methods} + for s in api_schema.services.values() + } + + def add_snippet(self, snippet: Snippet) -> None: + """Add a single snippet to the snippet index. + + Args: + snippet (Snippet): The code snippet to be added. + + Raises: + UnknownService: If the service indicated by the snippet metadata is not found. + RpcMethodNotFound: If the method indicated by the snippet metadata is not found. + """ + service_name = snippet.metadata.client_method.method.service.short_name + rpc_name = snippet.metadata.client_method.method.full_name + + service = self._index.get(service_name) + if service is None: + raise types.UnknownService( + "API does not have a service named '{}'.".format(service_name)) + + method = service.get(rpc_name) + if method is None: + raise types.RpcMethodNotFound( + "API does not have method '{}' in service '{}'".format(rpc_name, service_name)) + + if getattr(snippet.metadata.client_method, "async"): + method["async"] = snippet + else: + method["sync"] = snippet + + self.metadata_index.snippets.append(snippet.metadata) + + def get_snippet(self, service_name: str, rpc_name: str, sync: bool = True) -> Optional[Snippet]: + """Fetch a single snippet from the index. + + Args: + service_name (str): The name of the service. + rpc_name (str): The name of the RPC. + sync (bool): True for the sync version of the snippet, False for the async version. + + Returns: + Optional[Snippet]: The snippet if it exists, or None. + + Raises: + UnknownService: If the service is not found. + RpcMethodNotFound: If the method is not found. + """ + # Fetch a snippet from the snippet metadata index + service = self._index.get(service_name) + if service is None: + raise types.UnknownService( + "API does not have a service named '{}'.".format(service_name)) + method = service.get(rpc_name) + if method is None: + raise types.RpcMethodNotFound( + "API does not have method '{}' in service '{}'".format(rpc_name, service_name)) + + return method["sync" if sync else "async"] + + def get_metadata_json(self) -> str: + """JSON representation of Snippet Index.""" + return json_format.MessageToJson(self.metadata_index, sort_keys=True) diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata.proto b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata.proto new file mode 100644 index 000000000000..398dc9b27ccb --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata.proto @@ -0,0 +1,319 @@ +// Copyright 2021 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// NOTE(busunkim): This is a temporary home for this file and the corresponding +// snippet_metadata_pb2.py. + +syntax = "proto3"; + +package google.cloud.tools.snippetgen.snippetindex.v1; + +option csharp_namespace = "Google.Cloud.Tools.SnippetGen.SnippetIndex.V1"; +option php_namespace = "Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1"; +option ruby_package = "Google::Cloud::Tools::SnippetGen::SnippetIndex::V1"; + +// The snippet index for a single client library. +message Index { + // The Client Library these snippets are for. + ClientLibrary client_library = 1; + + // The list of snippets. + repeated Snippet snippets = 2; +} + +// One sample. +// Parts of this information will be language specific. +message Snippet { + // The region tag name. Does not include the square brackets or the START or + // END indicators. + string region_tag = 1; + + // The title of the snippet, for human consumption mostly. For generated + // snippets this may be the snippet method or file name, or obtained from + /// snippet configuration. + string title = 2; + + // A description of the snippet, for human consumption mostly. For generated + // snippets this may be the description of the service method, or obtained + // from snippet configuration. + string description = 3; + + // The file where the snippet code lives. + // The path should be relative to where this metadata file is stored on the + // GitHub repo root and should not include branch, tag, commitish, etc., + // as those will be the same as for the metadata file. + string file = 4; + + // The programming language the snippet is written in. + // This will match the client library language most of the time, but not + // always. For instance, in .NET, libraries are written in C# but some samples + // may be written in F# or VB .NET. + // Note that this does not contain information about the snippet supported + // platforms or language versions, etc. This is just a quick way to identify + // the generally supported langauge. + Language language = 5; + + // The client library method this snippet is for. + ClientMethod client_method = 6; + + // Wether this is the canonical snippet for the corresponding service method. + // This is to be interpreted in conjunction with origin as follows: + // For a given service method: + // - A handwritten canonical takes precedence over + // - A config canonical which in turn takes precedence over + // - A baseline canonical. + bool canonical = 7; + + // The origin of the snippet. + Origin origin = 8; + + // The different segments of the snippet. + // Must contain the FULL segment always. + // There may be overlap between segments. + repeated Segment segments = 9; + + // The origin of the snippet. + enum Origin { + // The origin has not been specified. Consumers should not see this value. + ORIGIN_UNSPECIFIED = 0; + + // The snippet is generated from the API definition only, including protos + // and descriptive information, i.e. the same information used to generate + // the client libraries themselves. + // No snippet configuration has been specified. This refers to SnippetGen + // phase 1. + API_DEFINITION = 1; + + // The snippet is generated from the API definition and a specific snippet + // configuration. This refers to SnippetGen phase 2. + CONFIG = 2; + + // The snippet is handwritten. + HANDWRITTEN = 3; + } + + // A segment of the snippet. + message Segment { + // The line where this segment begins, inclusive. + // For the FULL segment, this will be the START region tag line + 1. + int32 start = 1; + + // The line where this segment ends, inclusive. + // For the FULL segment, this will be the END region tag line - 1. + int32 end = 2; + + // The type of the segment. + SegmentType type = 3; + + // The type of the segment. + // Basically describes what this segment shows. + enum SegmentType { + // The segment type has not been specified. Consumers should not see this + // value. + SEGMENT_TYPE_UNSPECIFIED = 0; + + // The full sample including import statements. + // This corresponds to the sample as determined by the region tags. + FULL = 1; + + // A shorter version of the full sample, may not include imports and some + // langauge specific initialization code. This is to be used in contexts + // in which the full aspects of the sample are made clear outside the + // code. + SHORT = 2; + + // The segment contains the service client initialization code only. + // To be used in tutorials, codelabs, etc. + CLIENT_INITIALIZATION = 3; + + // The segment contains the request initialization code only. + // To be used in tutorials, codelabs, etc. + REQUEST_INITIALIZATION = 4; + + // The segment contains the request execution code only. + // To be used in tutorials, codelabs, etc. + REQUEST_EXECUTION = 5; + + // The segment contains the response handling code only. + // To be used in tutorials, codelabs, etc. + RESPONSE_HANDLING = 6; + } + } +} + +// A client library method. +// Will contain language specific information. +message ClientMethod { + // The short name of the method, usually the name it is declared with. + // This may not be unique within the service client because of overloads. + string short_name = 1; + + // The fully qualified name of the method, which is the short_name qualified + // by the full_name of the service client. + // This value is redundant, but present to make it easier for consumers to + // obtain it. + // This may not be unique within the service client because of overloads. + string full_name = 2; + + // Indicates wether this method is synchronous or asynchronous. + // Some languages may support only one of the variants, in which case, this + // field will always have the same value (for that language). + bool async = 3; + + // Parameters of this method in the same order as they appear on the method + // declaration. Must be empty if the method has no parameters. + repeated Parameter parameters = 4; + + // Fully qualified type name of this method result, if any. + string result_type = 5; + + // The service client this method is declared in. + ServiceClient client = 6; + + // The service method this client method is for. + Method method = 7; + + // A method parameter as described by its type and name. + message Parameter { + // Fully qualified type name of this parameter. + // May be empty for languages that don't specify a type. + string type = 1; + + // Name of the parameter as it appears on the method declaration. + string name = 2; + } +} + +// A service client defined in the client library specified in Index. +// Will contain language specific information. +message ServiceClient { + // The short name of the service client, usually the name it is declared with. + // This may not be unique within the client library because of + // namespaces/packages. + string short_name = 1; + + // The fully qualified name of the service client, which is the short_name + // qualified by the namespace/package/type name this client is declared in. + // This will be unique within the client libray. + string full_name = 2; +} + +// A client library. +// Will contain language specific information. +message ClientLibrary { + // The name of the client library. This value will be language dependent + // and may or may not include the library version. + // Usually this will be the name used to identify the library on per-language + // package managers. + // Examples: "Google.Cloud.Translate.V3", + // "cloud.google.com/go/translate/apiv3". + string name = 1; + + // The full version of the client library. May also be language dependent. + // Cannot be updated on metadata generation, but on library release. + // Examples: "4.3.0", "2.5.2-beta01" + string version = 2; + + // The programming language the library is written in. + // Note that this does not contain information about the library supported + // platforms or language versions, etc. This is just a quick way to identify + // the generally supported langauge. + Language language = 3; + + // The APIs this client library is for. + // Some languages bundle several APIs on the same client library. + repeated Api apis = 4; +} + +message Method { + // The short name of the method, which is the name used to + // declare it within the proto file. This is unique within the service, + // but may not be unique within the API. + string short_name = 1; + + // The full name of the method, which is the short name qualified + // by the full name of the service in which it is declared. + // This is globally unique. + string full_name = 2; + + // The service this method is declared in. + Service service = 3; +} + +// A service defined in the API the client library referenced in Index is for. +message Service { + // The short name of the service, which is the name used to + // declare it within the proto file. This is usually, but not + // absolutely necessarily, unique within an API. + // Example: "TranslationService" + string short_name = 1; + + // The full name of the service, which is the short name qualified + // by the package of the proto in which it is declared. + // This is globally unique. + // Example: "google.cloud.translate.v3.TranslationService" + string full_name = 2; +} + +// An API +message Api { + // The ID of the API, identical to the protobuf package + // ending with a version number. + // Example: "google.cloud.translate.v3" + string id = 1; + + // The full version inferred from the end of the ID. + // Examples: "v3", "v2beta1", "v1beta" + string version = 2; +} + +// A programming language +enum Language { + // The language has not been specified. Consumers should not see this value. + LANGUAGE_UNSPECIFIED = 0; + + C_PLUS_PLUS = 1; + + C_SHARP = 2; + + DART = 3; + + ELIXIR = 4; + + ERLANG = 5; + + F_SHARP = 6; + + GO = 7; + + JAVA = 8; + + JAVASCRIPT = 9; + + KOTLIN = 10; + + PHP = 11; + + PYTHON = 12; + + RUBY = 13; + + RUST = 14; + + SWIFT = 15; + + TYPESCRIPT = 16; + + VB_NET = 17; +} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py new file mode 100644 index 000000000000..73a75da6c289 --- /dev/null +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py @@ -0,0 +1,840 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: snippet_metadata.proto + +# type: ignore + +"""Generated protocol buffer code.""" +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='snippet_metadata.proto', + package='google.cloud.tools.snippetgen.snippetindex.v1', + syntax='proto3', + serialized_options=b'\252\002-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\312\002-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\352\0022Google::Cloud::Tools::SnippetGen::SnippetIndex::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\x16snippet_metadata.proto\x12-google.cloud.tools.snippetgen.snippetindex.v1\"\xa7\x01\n\x05Index\x12T\n\x0e\x63lient_library\x18\x01 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary\x12H\n\x08snippets\x18\x02 \x03(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Snippet\"\x9f\x06\n\x07Snippet\x12\x12\n\nregion_tag\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0c\n\x04\x66ile\x18\x04 \x01(\t\x12I\n\x08language\x18\x05 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12R\n\rclient_method\x18\x06 \x01(\x0b\x32;.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod\x12\x11\n\tcanonical\x18\x07 \x01(\x08\x12M\n\x06origin\x18\x08 \x01(\x0e\x32=.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin\x12P\n\x08segments\x18\t \x03(\x0b\x32>.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment\x1a\xa7\x02\n\x07Segment\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12X\n\x04type\x18\x03 \x01(\x0e\x32J.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType\"\xa5\x01\n\x0bSegmentType\x12\x1c\n\x18SEGMENT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\t\n\x05SHORT\x10\x02\x12\x19\n\x15\x43LIENT_INITIALIZATION\x10\x03\x12\x1a\n\x16REQUEST_INITIALIZATION\x10\x04\x12\x15\n\x11REQUEST_EXECUTION\x10\x05\x12\x15\n\x11RESPONSE_HANDLING\x10\x06\"Q\n\x06Origin\x12\x16\n\x12ORIGIN_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x41PI_DEFINITION\x10\x01\x12\n\n\x06\x43ONFIG\x10\x02\x12\x0f\n\x0bHANDWRITTEN\x10\x03\"\xf2\x02\n\x0c\x43lientMethod\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\r\n\x05\x61sync\x18\x03 \x01(\x08\x12Y\n\nparameters\x18\x04 \x03(\x0b\x32\x45.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter\x12\x13\n\x0bresult_type\x18\x05 \x01(\t\x12L\n\x06\x63lient\x18\x06 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient\x12\x45\n\x06method\x18\x07 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.snippetindex.v1.Method\x1a\'\n\tParameter\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\rServiceClient\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\xbb\x01\n\rClientLibrary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12I\n\x08language\x18\x03 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12@\n\x04\x61pis\x18\x04 \x03(\x0b\x32\x32.google.cloud.tools.snippetgen.snippetindex.v1.Api\"x\n\x06Method\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12G\n\x07service\x18\x03 \x01(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Service\"0\n\x07Service\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\"\n\x03\x41pi\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t*\xef\x01\n\x08Language\x12\x18\n\x14LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x08\n\x04\x44\x41RT\x10\x03\x12\n\n\x06\x45LIXIR\x10\x04\x12\n\n\x06\x45RLANG\x10\x05\x12\x0b\n\x07\x46_SHARP\x10\x06\x12\x06\n\x02GO\x10\x07\x12\x08\n\x04JAVA\x10\x08\x12\x0e\n\nJAVASCRIPT\x10\t\x12\n\n\x06KOTLIN\x10\n\x12\x07\n\x03PHP\x10\x0b\x12\n\n\x06PYTHON\x10\x0c\x12\x08\n\x04RUBY\x10\r\x12\x08\n\x04RUST\x10\x0e\x12\t\n\x05SWIFT\x10\x0f\x12\x0e\n\nTYPESCRIPT\x10\x10\x12\n\n\x06VB_NET\x10\x11\x42\x95\x01\xaa\x02-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\xca\x02-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\xea\x02\x32Google::Cloud::Tools::SnippetGen::SnippetIndex::V1b\x06proto3' +) + +_LANGUAGE = _descriptor.EnumDescriptor( + name='Language', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Language', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='LANGUAGE_UNSPECIFIED', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='C_PLUS_PLUS', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='C_SHARP', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='DART', index=3, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='ELIXIR', index=4, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='ERLANG', index=5, number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='F_SHARP', index=6, number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='GO', index=7, number=7, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JAVA', index=8, number=8, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='JAVASCRIPT', index=9, number=9, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='KOTLIN', index=10, number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='PHP', index=11, number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='PYTHON', index=12, number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='RUBY', index=13, number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='RUST', index=14, number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='SWIFT', index=15, number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TYPESCRIPT', index=16, number=16, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='VB_NET', index=17, number=17, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=1873, + serialized_end=2112, +) +_sym_db.RegisterEnumDescriptor(_LANGUAGE) + +Language = enum_type_wrapper.EnumTypeWrapper(_LANGUAGE) +LANGUAGE_UNSPECIFIED = 0 +C_PLUS_PLUS = 1 +C_SHARP = 2 +DART = 3 +ELIXIR = 4 +ERLANG = 5 +F_SHARP = 6 +GO = 7 +JAVA = 8 +JAVASCRIPT = 9 +KOTLIN = 10 +PHP = 11 +PYTHON = 12 +RUBY = 13 +RUST = 14 +SWIFT = 15 +TYPESCRIPT = 16 +VB_NET = 17 + + +_SNIPPET_SEGMENT_SEGMENTTYPE = _descriptor.EnumDescriptor( + name='SegmentType', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='SEGMENT_TYPE_UNSPECIFIED', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='FULL', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='SHORT', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CLIENT_INITIALIZATION', index=3, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='REQUEST_INITIALIZATION', index=4, number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='REQUEST_EXECUTION', index=5, number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='RESPONSE_HANDLING', index=6, number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=795, + serialized_end=960, +) +_sym_db.RegisterEnumDescriptor(_SNIPPET_SEGMENT_SEGMENTTYPE) + +_SNIPPET_ORIGIN = _descriptor.EnumDescriptor( + name='Origin', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='ORIGIN_UNSPECIFIED', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='API_DEFINITION', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CONFIG', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='HANDWRITTEN', index=3, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=962, + serialized_end=1043, +) +_sym_db.RegisterEnumDescriptor(_SNIPPET_ORIGIN) + + +_INDEX = _descriptor.Descriptor( + name='Index', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='client_library', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index.client_library', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='snippets', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index.snippets', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=74, + serialized_end=241, +) + + +_SNIPPET_SEGMENT = _descriptor.Descriptor( + name='Segment', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='start', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.start', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='end', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.end', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.type', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _SNIPPET_SEGMENT_SEGMENTTYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=665, + serialized_end=960, +) + +_SNIPPET = _descriptor.Descriptor( + name='Snippet', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='region_tag', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.region_tag', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='title', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.title', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='description', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.description', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='file', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.file', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.language', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='client_method', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.client_method', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='canonical', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.canonical', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='origin', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.origin', index=7, + number=8, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='segments', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.segments', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_SNIPPET_SEGMENT, ], + enum_types=[ + _SNIPPET_ORIGIN, + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=244, + serialized_end=1043, +) + + +_CLIENTMETHOD_PARAMETER = _descriptor.Descriptor( + name='Parameter', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter.type', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1377, + serialized_end=1416, +) + +_CLIENTMETHOD = _descriptor.Descriptor( + name='ClientMethod', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.short_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.full_name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='async', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.async', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='parameters', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.parameters', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='result_type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.result_type', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='client', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.client', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='method', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.method', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_CLIENTMETHOD_PARAMETER, ], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1046, + serialized_end=1416, +) + + +_SERVICECLIENT = _descriptor.Descriptor( + name='ServiceClient', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient.short_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient.full_name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1418, + serialized_end=1472, +) + + +_CLIENTLIBRARY = _descriptor.Descriptor( + name='ClientLibrary', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='version', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.language', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='apis', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.apis', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1475, + serialized_end=1662, +) + + +_METHOD = _descriptor.Descriptor( + name='Method', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.short_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.full_name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='service', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.service', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1664, + serialized_end=1784, +) + + +_SERVICE = _descriptor.Descriptor( + name='Service', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service.short_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service.full_name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1786, + serialized_end=1834, +) + + +_API = _descriptor.Descriptor( + name='Api', + full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='version', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1836, + serialized_end=1870, +) + +_INDEX.fields_by_name['client_library'].message_type = _CLIENTLIBRARY +_INDEX.fields_by_name['snippets'].message_type = _SNIPPET +_SNIPPET_SEGMENT.fields_by_name['type'].enum_type = _SNIPPET_SEGMENT_SEGMENTTYPE +_SNIPPET_SEGMENT.containing_type = _SNIPPET +_SNIPPET_SEGMENT_SEGMENTTYPE.containing_type = _SNIPPET_SEGMENT +_SNIPPET.fields_by_name['language'].enum_type = _LANGUAGE +_SNIPPET.fields_by_name['client_method'].message_type = _CLIENTMETHOD +_SNIPPET.fields_by_name['origin'].enum_type = _SNIPPET_ORIGIN +_SNIPPET.fields_by_name['segments'].message_type = _SNIPPET_SEGMENT +_SNIPPET_ORIGIN.containing_type = _SNIPPET +_CLIENTMETHOD_PARAMETER.containing_type = _CLIENTMETHOD +_CLIENTMETHOD.fields_by_name['parameters'].message_type = _CLIENTMETHOD_PARAMETER +_CLIENTMETHOD.fields_by_name['client'].message_type = _SERVICECLIENT +_CLIENTMETHOD.fields_by_name['method'].message_type = _METHOD +_CLIENTLIBRARY.fields_by_name['language'].enum_type = _LANGUAGE +_CLIENTLIBRARY.fields_by_name['apis'].message_type = _API +_METHOD.fields_by_name['service'].message_type = _SERVICE +DESCRIPTOR.message_types_by_name['Index'] = _INDEX +DESCRIPTOR.message_types_by_name['Snippet'] = _SNIPPET +DESCRIPTOR.message_types_by_name['ClientMethod'] = _CLIENTMETHOD +DESCRIPTOR.message_types_by_name['ServiceClient'] = _SERVICECLIENT +DESCRIPTOR.message_types_by_name['ClientLibrary'] = _CLIENTLIBRARY +DESCRIPTOR.message_types_by_name['Method'] = _METHOD +DESCRIPTOR.message_types_by_name['Service'] = _SERVICE +DESCRIPTOR.message_types_by_name['Api'] = _API +DESCRIPTOR.enum_types_by_name['Language'] = _LANGUAGE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Index = _reflection.GeneratedProtocolMessageType('Index', (_message.Message,), { + 'DESCRIPTOR': _INDEX, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Index) + }) +_sym_db.RegisterMessage(Index) + +Snippet = _reflection.GeneratedProtocolMessageType('Snippet', (_message.Message,), { + + 'Segment': _reflection.GeneratedProtocolMessageType('Segment', (_message.Message,), { + 'DESCRIPTOR': _SNIPPET_SEGMENT, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment) + }), + 'DESCRIPTOR': _SNIPPET, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet) + }) +_sym_db.RegisterMessage(Snippet) +_sym_db.RegisterMessage(Snippet.Segment) + +ClientMethod = _reflection.GeneratedProtocolMessageType('ClientMethod', (_message.Message,), { + + 'Parameter': _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), { + 'DESCRIPTOR': _CLIENTMETHOD_PARAMETER, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter) + }), + 'DESCRIPTOR': _CLIENTMETHOD, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod) + }) +_sym_db.RegisterMessage(ClientMethod) +_sym_db.RegisterMessage(ClientMethod.Parameter) + +ServiceClient = _reflection.GeneratedProtocolMessageType('ServiceClient', (_message.Message,), { + 'DESCRIPTOR': _SERVICECLIENT, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient) + }) +_sym_db.RegisterMessage(ServiceClient) + +ClientLibrary = _reflection.GeneratedProtocolMessageType('ClientLibrary', (_message.Message,), { + 'DESCRIPTOR': _CLIENTLIBRARY, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary) + }) +_sym_db.RegisterMessage(ClientLibrary) + +Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), { + 'DESCRIPTOR': _METHOD, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Method) + }) +_sym_db.RegisterMessage(Method) + +Service = _reflection.GeneratedProtocolMessageType('Service', (_message.Message,), { + 'DESCRIPTOR': _SERVICE, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Service) + }) +_sym_db.RegisterMessage(Service) + +Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), { + 'DESCRIPTOR': _API, + '__module__': 'snippet_metadata_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Api) + }) +_sym_db.RegisterMessage(Api) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/samplegen/common_types.py index 538b9d68645e..1d10553b7dd4 100644 --- a/packages/gapic-generator/tests/unit/samplegen/common_types.py +++ b/packages/gapic-generator/tests/unit/samplegen/common_types.py @@ -91,7 +91,7 @@ def resource_path_args(self): DummyService = namedtuple("DummyService", [ - "methods", "client_name", "async_client_name", "resource_messages_dict"]) + "name", "methods", "client_name", "async_client_name", "resource_messages_dict"]) DummyService.__new__.__defaults__ = (False,) * len(DummyService._fields) DummyApiSchema = namedtuple("DummyApiSchema", diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py new file mode 100644 index 000000000000..09f782a099f1 --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -0,0 +1,229 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +from google.protobuf import json_format +import pytest + +from gapic.samplegen_utils import snippet_metadata_pb2 +from gapic.samplegen_utils import snippet_index, types +from common_types import DummyApiSchema, DummyService, DummyMethod + + +@pytest.fixture +def sample_str(): + return """# [START mollusc_classify_sync] +from molluscs.v1 import molluscclient + + +def sample_classify(video, location): + # Create a client + client = molluscclient.MolluscServiceClient() + + # Initialize request argument(s) + classify_target = molluscclient.ClassifyTarget() + + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_target.video = f.read() + + # location = "New Zealand" + classify_target.location_annotation = location + + request = molluscclient.molluscs.v1.ClassifyRequest( + classify_target=classify_target, + ) + + # Make the request + response = client.classify(request=request) + + # Handle response + print(f"Mollusc is a \"{response.taxonomy}\"") + +# [END mollusc_classify_sync]""" + + +def test_snippet_init(sample_str): + # We are not trying to exhaustively test the snippet metadata protobuf, + # just checking that fields are not unset + sample_metadata = snippet_metadata_pb2.Snippet(title="classify_squid.py") + sample_metadata.language = snippet_metadata_pb2.Language.PYTHON + snippet = snippet_index.Snippet(sample_str, sample_metadata) + + assert snippet.sample_str == sample_str + + # It's easier to eyeball diffs on the dictionary representation + assert json_format.MessageToDict(snippet.metadata) == { + "language": "PYTHON", + "title": "classify_squid.py", + "segments": [ + {"end": 28, "start": 2, "type": "FULL"}, + {"end": 28, "start": 2, "type": "SHORT"}, + {"end": 8, "start": 6, "type": "CLIENT_INITIALIZATION"}, + {"end": 22, "start": 9, "type": "REQUEST_INITIALIZATION"}, + {"end": 25, "start": 23, "type": "REQUEST_EXECUTION"}, + {"end": 29, "start": 26, "type": "RESPONSE_HANDLING"}, + ] + } + + # This is the same as the sample_str above, minus the # [START ...] + # and # [END ...] lines + expected_full_snipppet = """from molluscs.v1 import molluscclient + + +def sample_classify(video, location): + # Create a client + client = molluscclient.MolluscServiceClient() + + # Initialize request argument(s) + classify_target = molluscclient.ClassifyTarget() + + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_target.video = f.read() + + # location = "New Zealand" + classify_target.location_annotation = location + + request = molluscclient.molluscs.v1.ClassifyRequest( + classify_target=classify_target, + ) + + # Make the request + response = client.classify(request=request) + + # Handle response + print(f"Mollusc is a \"{response.taxonomy}\"") + +""" + + assert snippet.full_snippet == expected_full_snipppet + + +def test_add_snippet_no_matching_service(sample_str): + snippet_metadata = snippet_metadata_pb2.Snippet( + ) + snippet_metadata.client_method.method.service.short_name = "Clam" + snippet = snippet_index.Snippet(sample_str, snippet_metadata) + + # No 'Clam' service in API Schema + index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( + services={"Squid": DummyService(name="Squid", methods={})} + )) + with pytest.raises(types.UnknownService): + index.add_snippet(snippet) + + +def test_add_snippet_no_matching_rpc(sample_str): + snippet_metadata = snippet_metadata_pb2.Snippet( + ) + snippet_metadata.client_method.method.service.short_name = "Squid" + snippet_metadata.client_method.full_name = "classify" + snippet = snippet_index.Snippet(sample_str, snippet_metadata) + + # No 'classify' method in 'Squid' service + index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( + services={"Squid": DummyService(name="Squid", methods={"list": None})} + )) + with pytest.raises(types.RpcMethodNotFound): + index.add_snippet(snippet) + + +def test_get_snippet_no_matching_service(): + index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( + services={"Squid": DummyService( + name="Squid", methods={"classify": DummyMethod()})} + )) + + # No 'Clam' service in API Schema + with pytest.raises(types.UnknownService): + index.get_snippet(service_name="Clam", rpc_name="classify") + + +def test_get_snippet_no_matching_rpc(): + index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( + services={"Squid": DummyService( + name="Squid", methods={"classify": DummyMethod()})} + )) + + # No 'list' RPC in 'Squid' service + with pytest.raises(types.RpcMethodNotFound): + index.get_snippet(service_name="Squid", rpc_name="list") + + +def test_add_and_get_snippet_sync(sample_str): + snippet_metadata = snippet_metadata_pb2.Snippet() + snippet_metadata.client_method.method.service.short_name = "Squid" + snippet_metadata.client_method.method.full_name = "classify" + snippet = snippet_index.Snippet(sample_str, snippet_metadata) + + index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( + services={"Squid": DummyService( + name="Squid", methods={"classify": DummyMethod()})} + )) + + index.add_snippet(snippet) + + index.get_snippet(service_name="Squid", rpc_name="classify") + + +def test_add_and_get_snippet_async(sample_str): + snippet_metadata = snippet_metadata_pb2.Snippet() + snippet_metadata.client_method.method.service.short_name = "Squid" + snippet_metadata.client_method.method.full_name = "classify" + setattr(snippet_metadata.client_method, "async", True) + snippet = snippet_index.Snippet(sample_str, snippet_metadata) + + index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( + services={"Squid": DummyService( + name="Squid", methods={"classify": DummyMethod()})} + )) + + index.add_snippet(snippet) + + index.get_snippet(service_name="Squid", rpc_name="classify", sync=False) + + +def test_get_metadata_json(sample_str): + snippet_metadata = snippet_metadata_pb2.Snippet() + snippet_metadata.client_method.method.service.short_name = "Squid" + snippet_metadata.client_method.method.full_name = "classify" + snippet = snippet_index.Snippet(sample_str, snippet_metadata) + + index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( + services={"Squid": DummyService( + name="Squid", methods={"classify": DummyMethod()})} + )) + + index.add_snippet(snippet) + + assert json.loads(index.get_metadata_json()) == { + 'snippets': [{'clientMethod': {'method': {'fullName': 'classify', + 'service': {'shortName': 'Squid'}}}, + 'segments': [{'end': 28, 'start': 2, 'type': 'FULL'}, + {'end': 28, 'start': 2, 'type': 'SHORT'}, + {'end': 8, + 'start': 6, + 'type': 'CLIENT_INITIALIZATION'}, + {'end': 22, + 'start': 9, + 'type': 'REQUEST_INITIALIZATION'}, + {'end': 25, + 'start': 23, + 'type': 'REQUEST_EXECUTION'}, + {'end': 29, + 'start': 26, + 'type': 'RESPONSE_HANDLING'}]}] + } From 69e36ed21e33e9ce7bd6375b530cf200bcd424e3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 10 Jan 2022 09:20:31 -0800 Subject: [PATCH 0691/1339] chore: release 0.59.0 (#1126) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 285d53d8ed28..2adbf755fd62 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [0.59.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.4...v0.59.0) (2022-01-10) + + +### Features + +* add snippet index ([#1121](https://www.github.com/googleapis/gapic-generator-python/issues/1121)) ([55d2bc6](https://www.github.com/googleapis/gapic-generator-python/commit/55d2bc6580e5db0f837de1b245533a8f1f2e9beb)) + ### [0.58.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.3...v0.58.4) (2021-12-30) From 7710e45c25a3e9e83ae292ea2ca2a797fcac2d99 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Jan 2022 14:21:16 -0500 Subject: [PATCH 0692/1339] chore: upgrade rules_python and grpc (#1124) * chore: upgrade rules_python and grpc * Set minimum bazel version * update integration test image to gcr.io/gapic-images/googleapis * address review feedback --- packages/gapic-generator/.bazelversion | 1 + .../.github/workflows/tests.yaml | 2 +- packages/gapic-generator/WORKSPACE | 16 +++++++++++++--- packages/gapic-generator/repositories.bzl | 17 +++++++++++------ 4 files changed, 26 insertions(+), 10 deletions(-) create mode 100644 packages/gapic-generator/.bazelversion diff --git a/packages/gapic-generator/.bazelversion b/packages/gapic-generator/.bazelversion new file mode 100644 index 000000000000..fcdb2e109f68 --- /dev/null +++ b/packages/gapic-generator/.bazelversion @@ -0,0 +1 @@ +4.0.0 diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index caa39e280c39..32f1967b12a0 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -326,7 +326,7 @@ jobs: run: nox -s fragment${{ matrix.variant }}-${{ matrix.python }} integration: runs-on: ubuntu-latest - container: gcr.io/gapic-images/googleapis-bazel:20210105 + container: gcr.io/gapic-images/googleapis steps: - name: Cancel Previous Runs uses: styfle/cancel-workflow-action@0.7.0 diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 8bf6d5fb0d6c..8456723018ac 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -2,15 +2,25 @@ workspace(name = "gapic_generator_python") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +_bazel_skylib_version = "0.9.0" + +_bazel_skylib_sha256 = "1dde365491125a3db70731e25658dfdd3bc5dbdfd11b840b3e987ecf043c7ca0" + http_archive( name = "bazel_skylib", - urls = ["https://github.com/bazelbuild/bazel-skylib/releases/download/0.9.0/bazel_skylib-0.9.0.tar.gz"], + sha256 = _bazel_skylib_sha256, + url = "https://github.com/bazelbuild/bazel-skylib/releases/download/{0}/bazel_skylib-{0}.tar.gz".format(_bazel_skylib_version), ) +_rules_python_version = "0.5.0" + +_rules_python_sha256 = "a2fd4c2a8bcf897b718e5643040b03d9528ac6179f6990774b7c19b2dc6cd96b" + http_archive( name = "rules_python", - strip_prefix = "rules_python-0.1.0", - url = "https://github.com/bazelbuild/rules_python/archive/0.1.0.tar.gz", + sha256 = _rules_python_sha256, + strip_prefix = "rules_python-{}".format(_rules_python_version), + url = "https://github.com/bazelbuild/rules_python/archive/{}.tar.gz".format(_rules_python_version), ) # diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 83ba719f34c1..0f23aba3ca59 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -15,13 +15,15 @@ def gapic_generator_python(): requirements = "@gapic_generator_python//:requirements.txt", ) - _protobuf_version = "3.15.8" - _protobuf_version_in_link = "v%s" % _protobuf_version + _protobuf_version = "3.19.2" + _protobuf_sha256 = "9ceef0daf7e8be16cd99ac759271eb08021b53b1c7b6edd399953a76390234cd" + _protobuf_version_in_link = "v{}".format(_protobuf_version) _maybe( http_archive, name = "com_google_protobuf", - urls = ["https://github.com/protocolbuffers/protobuf/archive/%s.zip" % _protobuf_version_in_link], - strip_prefix = "protobuf-%s" % _protobuf_version, + sha256 = _protobuf_sha256, + url = "https://github.com/protocolbuffers/protobuf/archive/refs/tags/{}.zip".format(_protobuf_version_in_link), + strip_prefix = "protobuf-{}".format(_protobuf_version), ) _maybe( @@ -31,11 +33,14 @@ def gapic_generator_python(): urls = ["https://github.com/bazelbuild/bazel-skylib/archive/2169ae1c374aab4a09aa90e65efe1a3aad4e279b.tar.gz"], ) + _grpc_version = "1.43.0" + _grpc_sha256 = "9647220c699cea4dafa92ec0917c25c7812be51a18143af047e20f3fb05adddc" _maybe( http_archive, name = "com_github_grpc_grpc", - strip_prefix = "grpc-1.36.4", - urls = ["https://github.com/grpc/grpc/archive/v1.36.4.zip"], + sha256 = _grpc_sha256, + strip_prefix = "grpc-{}".format(_grpc_version), + url = "https://github.com/grpc/grpc/archive/v{}.tar.gz".format(_grpc_version), ) _maybe( From 21c80f213b5dee10cffade5ddb661f40b7c19e6a Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Mon, 10 Jan 2022 15:01:45 -0800 Subject: [PATCH 0693/1339] fix: refactor mtls logic to standalone method (#1123) * fix: refactor mtls logic to standalone method * chore: update tests * chore: fix unit tests * chore: update unit test * chore: update integration tests * chore: update async code --- .../%sub/services/%service/async_client.py.j2 | 36 ++++++- .../%sub/services/%service/client.py.j2 | 97 ++++++++++++------- .../%name_%version/%sub/test_%service.py.j2 | 59 +++++++++++ .../services/asset_service/async_client.py | 36 ++++++- .../asset_v1/services/asset_service/client.py | 97 ++++++++++++------- .../unit/gapic/asset_v1/test_asset_service.py | 53 ++++++++++ .../services/iam_credentials/async_client.py | 36 ++++++- .../services/iam_credentials/client.py | 97 ++++++++++++------- .../credentials_v1/test_iam_credentials.py | 53 ++++++++++ .../config_service_v2/async_client.py | 36 ++++++- .../services/config_service_v2/client.py | 97 ++++++++++++------- .../logging_service_v2/async_client.py | 36 ++++++- .../services/logging_service_v2/client.py | 97 ++++++++++++------- .../metrics_service_v2/async_client.py | 36 ++++++- .../services/metrics_service_v2/client.py | 97 ++++++++++++------- .../logging_v2/test_config_service_v2.py | 53 ++++++++++ .../logging_v2/test_logging_service_v2.py | 53 ++++++++++ .../logging_v2/test_metrics_service_v2.py | 53 ++++++++++ .../services/cloud_redis/async_client.py | 36 ++++++- .../redis_v1/services/cloud_redis/client.py | 97 ++++++++++++------- .../unit/gapic/redis_v1/test_cloud_redis.py | 53 ++++++++++ 21 files changed, 1042 insertions(+), 266 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 2f30d6cfab02..b4173b24693e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -5,7 +5,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Dict, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources {% if service.any_deprecated %} import warnings @@ -90,6 +90,40 @@ class {{ service.async_client_name }}: from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return {{ service.client_name }}.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> {{ service.name }}Transport: """Returns the transport used by the client instance. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 25e4f2ca5418..c47ac6f46585 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -200,6 +200,65 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %}{# common resources #} + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, {{ service.name }}Transport, None] = None, @@ -248,43 +307,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 9dfd50720a1a..4cfacf699e02 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -359,6 +359,65 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp ) +@pytest.mark.parametrize("client_class", [ + {% if 'grpc' in opts.transport %} + {{ service.client_name }}, {{ service.async_client_name }} + {% elif 'rest' in opts.transport %} + {{ service.client_name }} + {% endif %} +]) +@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +{% if 'grpc' in opts.transport %} +@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +{% endif %} +def test_{{ service.client_name|snake_case }}_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ {% if 'grpc' in opts.transport %} ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 8491b558d31e..e4c02ed39249 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -98,6 +98,40 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AssetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> AssetServiceTransport: """Returns the transport used by the client instance. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index c9b71375271e..3d1da34da7ab 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -240,6 +240,65 @@ def parse_common_location_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, AssetServiceTransport, None] = None, @@ -288,43 +347,7 @@ def __init__(self, *, if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index d027f0f2fd7c..2a386149dd73 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -317,6 +317,59 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans ) +@pytest.mark.parametrize("client_class", [ + AssetServiceClient, AssetServiceAsyncClient +]) +@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 18ebfa799bc6..1863e3521ddd 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -103,6 +103,40 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return IAMCredentialsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> IAMCredentialsTransport: """Returns the transport used by the client instance. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 1b16bae261bc..3a856a9eed2a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -236,6 +236,65 @@ def parse_common_location_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, IAMCredentialsTransport, None] = None, @@ -284,43 +343,7 @@ def __init__(self, *, if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index a1bdc0a52d10..b7b2ee1dcef1 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -309,6 +309,59 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra ) +@pytest.mark.parametrize("client_class", [ + IAMCredentialsClient, IAMCredentialsAsyncClient +]) +@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 7cddb80f4eb8..584f5124047e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -102,6 +102,40 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> ConfigServiceV2Transport: """Returns the transport used by the client instance. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 55583554d9ce..377cfda4537d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -271,6 +271,65 @@ def parse_common_location_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ConfigServiceV2Transport, None] = None, @@ -319,43 +378,7 @@ def __init__(self, *, if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index dd48dfd162ae..b5ee66a099d6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +from typing import Dict, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -94,6 +94,40 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LoggingServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> LoggingServiceV2Transport: """Returns the transport used by the client instance. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 8dd1404e33b8..6b5478a60ff2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -227,6 +227,65 @@ def parse_common_location_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, LoggingServiceV2Transport, None] = None, @@ -275,43 +334,7 @@ def __init__(self, *, if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index a8aa79abde37..b64d742352fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -95,6 +95,40 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> MetricsServiceV2Transport: """Returns the transport used by the client instance. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 683dec1c9015..e13a49a69562 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -228,6 +228,65 @@ def parse_common_location_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, MetricsServiceV2Transport, None] = None, @@ -276,43 +335,7 @@ def __init__(self, *, if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index d36a14973f2a..00ab3ebb47eb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -310,6 +310,59 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t ) +@pytest.mark.parametrize("client_class", [ + ConfigServiceV2Client, ConfigServiceV2AsyncClient +]) +@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 14af61f54540..ef0e7fb1a2b3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -316,6 +316,59 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, ) +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, LoggingServiceV2AsyncClient +]) +@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7b836d8233ad..dbc0f9035771 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -314,6 +314,59 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, ) +@pytest.mark.parametrize("client_class", [ + MetricsServiceV2Client, MetricsServiceV2AsyncClient +]) +@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 60cec2a7448a..7ad1b6a79601 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -118,6 +118,40 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudRedisClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> CloudRedisTransport: """Returns the transport used by the client instance. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 79b2746ced04..067f20f27188 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -251,6 +251,65 @@ def parse_common_location_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudRedisTransport, None] = None, @@ -299,43 +358,7 @@ def __init__(self, *, if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 61813fd5ead5..445a95ea3f7c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -314,6 +314,59 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo ) +@pytest.mark.parametrize("client_class", [ + CloudRedisClient, CloudRedisAsyncClient +]) +@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), From d5cd6fbdb15298b51de98dd7ff45b865eeb6c9f8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 12 Jan 2022 09:44:45 -0800 Subject: [PATCH 0694/1339] chore(master): release 0.59.1 (#1128) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 2adbf755fd62..2d95536ac079 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.59.1](https://github.com/googleapis/gapic-generator-python/compare/v0.59.0...v0.59.1) (2022-01-10) + + +### Bug Fixes + +* refactor mtls logic to standalone method ([#1123](https://github.com/googleapis/gapic-generator-python/issues/1123)) ([d528223](https://github.com/googleapis/gapic-generator-python/commit/d528223e3221487f86a3d82c92cd2e2cf04bec4a)) + ## [0.59.0](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.4...v0.59.0) (2022-01-10) From 6447a7a9eaf4747a63d8b1b9155429d26653ad81 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 18 Jan 2022 08:36:04 -0800 Subject: [PATCH 0695/1339] fix: only set unset fields if they are query params (#1130) --- .../services/%service/transports/rest.py.j2 | 4 +- .../%name_%version/%sub/test_%service.py.j2 | 26 +++++-- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../services/%service/transports/rest.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 75 ++++--------------- packages/gapic-generator/noxfile.py | 4 - .../test_multiple_required_fields.proto | 11 ++- .../tests/unit/schema/wrappers/test_method.py | 4 + 8 files changed, 48 insertions(+), 80 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 3a47d1e3636d..488646be1202 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -182,8 +182,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not (method.server_streaming or method.client_streaming) %} {% if method.input.required_fields %} __REQUIRED_FIELDS_DEFAULT_VALUES = { - {% for req_field in method.input.required_fields if req_field.is_primitive %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.field_pb.default_value or 0 }}{% endif %},{# default is str #} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} } diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 38d91f3ae97c..af7d28335d69 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -994,7 +994,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide )) # verify fields with default values are dropped - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" not in jsonified_request {% endfor %} @@ -1003,23 +1003,32 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request.update(unset_fields) # verify required fields with default values are now present - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" in jsonified_request assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] {% endfor %} - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} {% set mock_value = req_field.primitive_mock_as_str() %} + {% if method.query_params %} + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(({% for param in method.query_params %}"{{param|camel_case }}", {% endfor %})) + {% endif %} jsonified_request["{{ field_name }}"] = {{ mock_value }} {% endfor %} unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + {% if method.query_params %} + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(({% for param in method.query_params %}"{{param}}", +{% endfor %})) + {% endif %} jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} {% set mock_value = req_field.primitive_mock_as_str() %} assert "{{ field_name }}" in jsonified_request @@ -1080,7 +1089,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %} expected_params = [ - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} ( "{{ req_field.name | camel_case }}", {% if req_field.field_pb.type == 9 %} @@ -1095,6 +1104,13 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide assert expected_params == actual_params +def test_{{ method_name }}_rest_unset_required_fields(): + transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.{{ method.name|snake_case }}._get_unset_required_fields({}) + assert set(unset_fields) == (set(({% for param in method.query_params %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{param.name|camel_case}}", {% endfor %}))) + + {% endif %}{# required_fields #} diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index c4c9e6bec098..67fa3afe2322 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1026,7 +1026,7 @@ def path_params(self) -> Sequence[str]: if self.http_opt is None: return [] - pattern = r'\{(\w+)\}' + pattern = r'\{(\w+)(?:=.+?)?\}' return re.findall(pattern, self.http_opt['url']) @property diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index bfec82392306..488646be1202 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -182,7 +182,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not (method.server_streaming or method.client_streaming) %} {% if method.input.required_fields %} __REQUIRED_FIELDS_DEFAULT_VALUES = { - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} } diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4cfacf699e02..6da9da3efac7 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1287,65 +1287,8 @@ def test_{{ method_name }}_raw_page_lro(): {% endfor %} {# method in methods for grpc #} {% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %}{% if method.http_options %} -{# TODO(kbandes): remove this if condition when streaming are supported. #} -{% if not (method.server_streaming or method.client_streaming) %} -@pytest.mark.parametrize("request_type", [ - {{ method.input.ident }}, - dict, -]) -def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - # Send a request that will satisfy transcoding - request = {{ method.input.ident }}({{ method.http_options[0].sample_request(method) }}) - {% if method.client_streaming %} - requests = [request] - {% endif %} - - - with mock.patch.object(type(client.transport._session), 'request') as req: - {% if method.void %} - return_value = None - {% elif method.lro %} - return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - return_value = iter([{{ method.output.ident }}()]) - {% else %} - return_value = {{ method.output.ident }}( - {% for field in method.output.fields.values() | rejectattr('message')%} - {% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.output.oneof_fields().values() %} - {% with field = oneof_fields[0] %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} - {% endfor %} - ) - {% endif %} - req.return_value = Response() - req.return_value.status_code = 500 - req.return_value.request = PreparedRequest() - {% if method.void %} - json_return_value = '' - {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) - {% endif %} - req.return_value._content = json_return_value.encode("UTF-8") - with pytest.raises(core_exceptions.GoogleAPIError): - # We only care that the correct exception is raised when putting - # the request over the wire, so an empty request is fine. - {% if method.client_streaming %} - client.{{ method_name }}(iter([requests])) - {% else %} - client.{{ method_name }}(request) - {% endif %} - - {# TODO(kbandes): remove this if condition when lro and streaming are supported. #} +{% if not (method.server_streaming or method.client_streaming) %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -1458,7 +1401,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide )) # verify fields with default values are dropped - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" not in jsonified_request {% endfor %} @@ -1467,7 +1410,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request.update(unset_fields) # verify required fields with default values are now present - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" in jsonified_request assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] @@ -1480,6 +1423,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endfor %} unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + {% if method.query_params %} + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(({% for param in method.query_params %}"{{param}}", {% endfor %})) + {% endif %} jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1544,7 +1491,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %} expected_params = [ - {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} ( "{{ req_field.name | camel_case }}", {% if req_field.field_pb.type == 9 %} @@ -1559,6 +1506,12 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide assert expected_params == actual_params +def test_{{ method_name }}_rest_unset_required_fields(): + transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.{{ method.name|snake_case }}._get_unset_required_fields({}) + assert set(unset_fields) == (set(({% for param in method.query_params %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{ param.name|camel_case }}", {% endfor %}))) + {% endif %}{# required_fields #} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index f281461e80d9..6154ea94bb13 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -103,9 +103,6 @@ def __call__(self, frag): f"--python_gapic_opt=transport=grpc+rest,python-gapic-templates={templates}{maybe_old_naming}", ] - if self.use_ads_templates: - session_args.extend([]) - outputs.append( self.session.run(*session_args, str(frag), external=True, silent=True,) ) @@ -114,7 +111,6 @@ def __call__(self, frag): # Note: install into the tempdir to prevent issues # with running pip concurrently. self.session.install(tmp_dir, "-e", ".", "-t", tmp_dir, "-qqq") - # Run the fragment's generated unit tests. # Don't bother parallelizing them: we already parallelize # the fragments, and there usually aren't too many tests per fragment. diff --git a/packages/gapic-generator/tests/fragments/test_multiple_required_fields.proto b/packages/gapic-generator/tests/fragments/test_multiple_required_fields.proto index 8e4cdfe157c2..55a207b40aae 100644 --- a/packages/gapic-generator/tests/fragments/test_multiple_required_fields.proto +++ b/packages/gapic-generator/tests/fragments/test_multiple_required_fields.proto @@ -30,14 +30,13 @@ service MultipleRequiredFields { } } -message Description { - string description = 1; -} - message MethodRequest { string kingdom = 1 [(google.api.field_behavior) = REQUIRED]; string phylum = 2 [(google.api.field_behavior) = REQUIRED]; - Description description = 3 [(google.api.field_behavior) = REQUIRED]; + string name = 3 [(google.api.field_behavior) = REQUIRED]; + int32 armor_class = 4 [(google.api.field_behavior) = REQUIRED]; } -message MethodResponse{} \ No newline at end of file +message MethodResponse{ + string text = 1; +} \ No newline at end of file diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 889dc629a3dd..814893c39cf5 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -325,6 +325,10 @@ def test_method_path_params(): method = make_method('DoSomething', http_rule=http_rule) assert method.path_params == ['project'] + http_rule2 = http_pb2.HttpRule(post='/v1beta1/{name=rooms/*/blurbs/*}') + method2 = make_method("DoSomething", http_rule=http_rule2) + assert method2.path_params == ["name"] + def test_method_path_params_no_http_rule(): method = make_method('DoSomething') From 2481af785546aaf9373332c3eaa40b34375cc5be Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 18 Jan 2022 11:39:26 -0700 Subject: [PATCH 0696/1339] feat: generate snippet metadata (#1129) Follow up to #1121. **Main changes:** * When snippets are generated, snippet metadata is also generated. * If a method has a snippet, that snippet is included in the method docstring. **Other changes:** * Removed the method docstring in the code snippet file (line right below the method definition) since the same text is already in the comment block at the top of the file. * Removed the concept of a "standalone" sample. All generated samples are expected to be standalone. When someone wants a smaller portion of the sample (e.g., request initialization only) they should fetch it from the file by looking up the line numbers in the snippet metadata file. Other Notes: * ~It doesn't look like it's possible to do type annotations with `_pb2` types, so those are annotated as `Any`.~ It is possible to do mypy checking with https://github.com/dropbox/mypy-protobuf, but I think it will be easier make that change in a separate PR. * There are a lot of golden file updates, [this range of commits](https://github.com/googleapis/gapic-generator-python/pull/1129/files/872c156f5100f1de20631dd59d083206432db374) has _most_ of the generator and test changes. --- .../gapic/generator/generator.py | 75 +- .../gapic/samplegen/samplegen.py | 32 +- .../gapic/samplegen_utils/snippet_index.py | 9 +- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../%sub/services/%service/async_client.py.j2 | 8 + .../%sub/services/%service/client.py.j2 | 9 + .../gapic/templates/examples/sample.py.j2 | 2 - .../services/asset_service/async_client.py | 265 ++ .../asset_v1/services/asset_service/client.py | 277 ++ ..._asset_service_analyze_iam_policy_async.py | 2 - ...ce_analyze_iam_policy_longrunning_async.py | 2 - ...ice_analyze_iam_policy_longrunning_sync.py | 2 - ...1_asset_service_analyze_iam_policy_sync.py | 2 - ..._service_batch_get_assets_history_async.py | 2 - ...t_service_batch_get_assets_history_sync.py | 2 - ...sset_v1_asset_service_create_feed_async.py | 2 - ...asset_v1_asset_service_create_feed_sync.py | 2 - ...sset_v1_asset_service_delete_feed_async.py | 2 - ...asset_v1_asset_service_delete_feed_sync.py | 2 - ...et_v1_asset_service_export_assets_async.py | 2 - ...set_v1_asset_service_export_assets_sync.py | 2 - ...d_asset_v1_asset_service_get_feed_async.py | 2 - ...ed_asset_v1_asset_service_get_feed_sync.py | 2 - ...sset_v1_asset_service_list_assets_async.py | 2 - ...asset_v1_asset_service_list_assets_sync.py | 2 - ...asset_v1_asset_service_list_feeds_async.py | 2 - ..._asset_v1_asset_service_list_feeds_sync.py | 2 - ...t_service_search_all_iam_policies_async.py | 2 - ...et_service_search_all_iam_policies_sync.py | 2 - ...sset_service_search_all_resources_async.py | 2 - ...asset_service_search_all_resources_sync.py | 2 - ...sset_v1_asset_service_update_feed_async.py | 2 - ...asset_v1_asset_service_update_feed_sync.py | 2 - .../snippet_metadata_asset_v1.json | 1048 ++++++ .../services/iam_credentials/async_client.py | 100 + .../services/iam_credentials/client.py | 104 + ...credentials_generate_access_token_async.py | 2 - ..._credentials_generate_access_token_sync.py | 2 - ...iam_credentials_generate_id_token_async.py | 2 - ..._iam_credentials_generate_id_token_sync.py | 2 - ...ials_v1_iam_credentials_sign_blob_async.py | 2 - ...tials_v1_iam_credentials_sign_blob_sync.py | 2 - ...tials_v1_iam_credentials_sign_jwt_async.py | 2 - ...ntials_v1_iam_credentials_sign_jwt_sync.py | 2 - .../snippet_metadata_credentials_v1.json | 360 ++ .../config_service_v2/async_client.py | 538 +++ .../services/config_service_v2/client.py | 561 ++++ .../logging_service_v2/async_client.py | 136 + .../services/logging_service_v2/client.py | 142 + .../metrics_service_v2/async_client.py | 120 + .../services/metrics_service_v2/client.py | 125 + ...2_config_service_v2_create_bucket_async.py | 2 - ...v2_config_service_v2_create_bucket_sync.py | 2 - ...onfig_service_v2_create_exclusion_async.py | 2 - ...config_service_v2_create_exclusion_sync.py | 2 - ..._v2_config_service_v2_create_sink_async.py | 2 - ...g_v2_config_service_v2_create_sink_sync.py | 2 - ..._v2_config_service_v2_create_view_async.py | 2 - ...g_v2_config_service_v2_create_view_sync.py | 2 - ...2_config_service_v2_delete_bucket_async.py | 2 - ...v2_config_service_v2_delete_bucket_sync.py | 2 - ...onfig_service_v2_delete_exclusion_async.py | 2 - ...config_service_v2_delete_exclusion_sync.py | 2 - ..._v2_config_service_v2_delete_sink_async.py | 2 - ...g_v2_config_service_v2_delete_sink_sync.py | 2 - ..._v2_config_service_v2_delete_view_async.py | 2 - ...g_v2_config_service_v2_delete_view_sync.py | 2 - ...g_v2_config_service_v2_get_bucket_async.py | 2 - ...ng_v2_config_service_v2_get_bucket_sync.py | 2 - ...nfig_service_v2_get_cmek_settings_async.py | 2 - ...onfig_service_v2_get_cmek_settings_sync.py | 2 - ...2_config_service_v2_get_exclusion_async.py | 2 - ...v2_config_service_v2_get_exclusion_sync.py | 2 - ...ing_v2_config_service_v2_get_sink_async.py | 2 - ...ging_v2_config_service_v2_get_sink_sync.py | 2 - ...ing_v2_config_service_v2_get_view_async.py | 2 - ...ging_v2_config_service_v2_get_view_sync.py | 2 - ...v2_config_service_v2_list_buckets_async.py | 2 - ..._v2_config_service_v2_list_buckets_sync.py | 2 - ...config_service_v2_list_exclusions_async.py | 2 - ..._config_service_v2_list_exclusions_sync.py | 2 - ...g_v2_config_service_v2_list_sinks_async.py | 2 - ...ng_v2_config_service_v2_list_sinks_sync.py | 2 - ...g_v2_config_service_v2_list_views_async.py | 2 - ...ng_v2_config_service_v2_list_views_sync.py | 2 - ...config_service_v2_undelete_bucket_async.py | 2 - ..._config_service_v2_undelete_bucket_sync.py | 2 - ...2_config_service_v2_update_bucket_async.py | 2 - ...v2_config_service_v2_update_bucket_sync.py | 2 - ...g_service_v2_update_cmek_settings_async.py | 2 - ...ig_service_v2_update_cmek_settings_sync.py | 2 - ...onfig_service_v2_update_exclusion_async.py | 2 - ...config_service_v2_update_exclusion_sync.py | 2 - ..._v2_config_service_v2_update_sink_async.py | 2 - ...g_v2_config_service_v2_update_sink_sync.py | 2 - ..._v2_config_service_v2_update_view_async.py | 2 - ...g_v2_config_service_v2_update_view_sync.py | 2 - ..._v2_logging_service_v2_delete_log_async.py | 2 - ...g_v2_logging_service_v2_delete_log_sync.py | 2 - ...gging_service_v2_list_log_entries_async.py | 2 - ...ogging_service_v2_list_log_entries_sync.py | 2 - ...g_v2_logging_service_v2_list_logs_async.py | 2 - ...ng_v2_logging_service_v2_list_logs_sync.py | 2 - ...st_monitored_resource_descriptors_async.py | 2 - ...ist_monitored_resource_descriptors_sync.py | 2 - ...gging_service_v2_tail_log_entries_async.py | 2 - ...ogging_service_v2_tail_log_entries_sync.py | 2 - ...ging_service_v2_write_log_entries_async.py | 2 - ...gging_service_v2_write_log_entries_sync.py | 2 - ...rics_service_v2_create_log_metric_async.py | 2 - ...trics_service_v2_create_log_metric_sync.py | 2 - ...rics_service_v2_delete_log_metric_async.py | 2 - ...trics_service_v2_delete_log_metric_sync.py | 2 - ...metrics_service_v2_get_log_metric_async.py | 2 - ..._metrics_service_v2_get_log_metric_sync.py | 2 - ...trics_service_v2_list_log_metrics_async.py | 2 - ...etrics_service_v2_list_log_metrics_sync.py | 2 - ...rics_service_v2_update_log_metric_async.py | 2 - ...trics_service_v2_update_log_metric_sync.py | 2 - .../snippet_metadata_logging_v2.json | 2966 +++++++++++++++++ .../services/cloud_redis/async_client.py | 241 ++ .../redis_v1/services/cloud_redis/client.py | 250 ++ ...is_v1_cloud_redis_create_instance_async.py | 2 - ...dis_v1_cloud_redis_create_instance_sync.py | 2 - ...is_v1_cloud_redis_delete_instance_async.py | 2 - ...dis_v1_cloud_redis_delete_instance_sync.py | 2 - ...is_v1_cloud_redis_export_instance_async.py | 2 - ...dis_v1_cloud_redis_export_instance_sync.py | 2 - ..._v1_cloud_redis_failover_instance_async.py | 2 - ...s_v1_cloud_redis_failover_instance_sync.py | 2 - ...redis_v1_cloud_redis_get_instance_async.py | 2 - ..._redis_v1_cloud_redis_get_instance_sync.py | 2 - ...is_v1_cloud_redis_import_instance_async.py | 2 - ...dis_v1_cloud_redis_import_instance_sync.py | 2 - ...dis_v1_cloud_redis_list_instances_async.py | 2 - ...edis_v1_cloud_redis_list_instances_sync.py | 2 - ...is_v1_cloud_redis_update_instance_async.py | 2 - ...dis_v1_cloud_redis_update_instance_sync.py | 2 - ...s_v1_cloud_redis_upgrade_instance_async.py | 2 - ...is_v1_cloud_redis_upgrade_instance_sync.py | 2 - .../snippet_metadata_redis_v1.json | 773 +++++ ...llusca_v1_snippets_list_resources_async.py | 2 - ...ollusca_v1_snippets_list_resources_sync.py | 2 - ...v1_snippets_method_bidi_streaming_async.py | 2 - ..._v1_snippets_method_bidi_streaming_sync.py | 2 - ...v1_snippets_method_lro_signatures_async.py | 2 - ..._v1_snippets_method_lro_signatures_sync.py | 2 - ..._v1_snippets_method_one_signature_async.py | 2 - ...a_v1_snippets_method_one_signature_sync.py | 2 - ..._snippets_method_server_streaming_async.py | 2 - ...1_snippets_method_server_streaming_sync.py | 2 - ...ollusca_v1_snippets_one_of_method_async.py | 2 - ...pets_one_of_method_required_field_async.py | 2 - ...ppets_one_of_method_required_field_sync.py | 2 - ...mollusca_v1_snippets_one_of_method_sync.py | 2 - .../snippet_metadata_v1_mollusca_v1.json | 611 ++++ .../gapic-generator/tests/unit/__init__.py | 0 .../unit/{samplegen => }/common_types.py | 0 .../tests/unit/generator/__init__.py | 0 .../tests/unit/generator/test_generator.py | 409 ++- .../tests/unit/samplegen/__init__.py | 0 .../samplegen/golden_snippets/sample_basic.py | 2 - .../golden_snippets/sample_basic_async.py | 2 - .../sample_basic_unflattenable.py | 2 - .../sample_basic_void_method.py | 2 - .../tests/unit/samplegen/test_integration.py | 53 +- .../tests/unit/samplegen/test_manifest.py | 2 +- .../tests/unit/samplegen/test_samplegen.py | 4 +- .../unit/samplegen/test_snippet_index.py | 13 +- .../tests/unit/samplegen/test_template.py | 2 +- 170 files changed, 8943 insertions(+), 566 deletions(-) create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_v1_mollusca_v1.json create mode 100644 packages/gapic-generator/tests/unit/__init__.py rename packages/gapic-generator/tests/unit/{samplegen => }/common_types.py (100%) create mode 100644 packages/gapic-generator/tests/unit/generator/__init__.py create mode 100644 packages/gapic-generator/tests/unit/samplegen/__init__.py diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 7d2b2cc95cac..609f70bb0f80 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -17,12 +17,14 @@ import itertools import re import os +import pathlib import typing -from typing import Any, DefaultDict, Dict, Mapping +from typing import Any, DefaultDict, Dict, Mapping, Tuple from hashlib import sha256 from collections import OrderedDict, defaultdict from gapic.samplegen_utils.utils import coerce_response_name, is_valid_sample_cfg, render_format_string from gapic.samplegen_utils.types import DuplicateSample +from gapic.samplegen_utils import snippet_index, snippet_metadata_pb2 from gapic.samplegen import manifest, samplegen from gapic.generator import formatter from gapic.schema import api @@ -93,6 +95,17 @@ def get_response( self._env.loader.list_templates(), # type: ignore ) + # We generate code snippets *before* the library code so snippets + # can be inserted into method docstrings. + snippet_idx = snippet_index.SnippetIndex(api_schema) + if sample_templates: + sample_output, snippet_idx = self._generate_samples_and_manifest( + api_schema, snippet_idx, self._env.get_template( + sample_templates[0]), + opts=opts, + ) + output_files.update(sample_output) + # Iterate over each template and add the appropriate output files # based on that template. # Sample templates work differently: there's (usually) only one, @@ -107,15 +120,8 @@ def get_response( # Append to the output files dictionary. output_files.update( self._render_template( - template_name, api_schema=api_schema, opts=opts) - ) - - if sample_templates: - sample_output = self._generate_samples_and_manifest( - api_schema, self._env.get_template(sample_templates[0]), - opts=opts, + template_name, api_schema=api_schema, opts=opts, snippet_index=snippet_idx) ) - output_files.update(sample_output) # Return the CodeGeneratorResponse output. res = CodeGeneratorResponse( @@ -124,7 +130,7 @@ def get_response( return res def _generate_samples_and_manifest( - self, api_schema: api.API, sample_template: jinja2.Template, *, opts: Options) -> Dict: + self, api_schema: api.API, index: snippet_index.SnippetIndex, sample_template: jinja2.Template, *, opts: Options) -> Tuple[Dict, snippet_index.SnippetIndex]: """Generate samples and samplegen manifest for the API. Arguments: @@ -133,7 +139,7 @@ def _generate_samples_and_manifest( opts (Options): Additional generator options. Returns: - Dict[str, CodeGeneratorResponse.File]: A dict mapping filepath to rendered file. + Tuple[Dict[str, CodeGeneratorResponse.File], snippet_index.SnippetIndex] : A dict mapping filepath to rendered file. """ # The two-layer data structure lets us do two things: # * detect duplicate samples, which is an error @@ -181,7 +187,7 @@ def _generate_samples_and_manifest( if not id_is_unique: spec["id"] += f"_{spec_hash}" - sample = samplegen.generate_sample( + sample, snippet_metadata = samplegen.generate_sample( spec, api_schema, sample_template,) fpath = utils.to_snake_case(spec["id"]) + ".py" @@ -190,6 +196,11 @@ def _generate_samples_and_manifest( sample, ) + snippet_metadata.file = fpath + + index.add_snippet( + snippet_index.Snippet(sample, snippet_metadata)) + output_files = { fname: CodeGeneratorResponse.File( content=formatter.fix_whitespace(sample), name=fname @@ -197,29 +208,18 @@ def _generate_samples_and_manifest( for fname, (_, sample) in fpath_to_spec_and_rendered.items() } - # TODO(busunkim): Re-enable manifest generation once metadata - # format has been formalized. - # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue - # - # if output_files: - - # manifest_fname, manifest_doc = manifest.generate( - # ( - # (fname, spec) - # for fname, (spec, _) in fpath_to_spec_and_rendered.items() - # ), - # api_schema, - # ) - - # manifest_fname = os.path.join(out_dir, manifest_fname) - # output_files[manifest_fname] = CodeGeneratorResponse.File( - # content=manifest_doc.render(), name=manifest_fname - # ) + if index.metadata_index.snippets: + # NOTE(busunkim): Not all fields are yet populated in the snippet metadata. + # Expected filename: snippet_metadata_{apishortname}_{apiversion}.json + snippet_metadata_path = str(pathlib.Path( + out_dir) / f"snippet_metadata_{api_schema.naming.name}_{api_schema.naming.version}.json").lower() + output_files[snippet_metadata_path] = CodeGeneratorResponse.File( + content=formatter.fix_whitespace(index.get_metadata_json()), name=snippet_metadata_path) - return output_files + return output_files, index def _render_template( - self, template_name: str, *, api_schema: api.API, opts: Options, + self, template_name: str, *, api_schema: api.API, opts: Options, snippet_index: snippet_index.SnippetIndex, ) -> Dict[str, CodeGeneratorResponse.File]: """Render the requested templates. @@ -258,7 +258,7 @@ def _render_template( for subpackage in api_schema.subpackages.values(): answer.update( self._render_template( - template_name, api_schema=subpackage, opts=opts + template_name, api_schema=subpackage, opts=opts, snippet_index=snippet_index ) ) skip_subpackages = True @@ -275,7 +275,7 @@ def _render_template( answer.update( self._get_file( - template_name, api_schema=api_schema, proto=proto, opts=opts + template_name, api_schema=api_schema, proto=proto, opts=opts, snippet_index=snippet_index ) ) @@ -304,6 +304,7 @@ def _render_template( api_schema=api_schema, service=service, opts=opts, + snippet_index=snippet_index, ) ) return answer @@ -311,7 +312,7 @@ def _render_template( # This file is not iterating over anything else; return back # the one applicable file. answer.update(self._get_file( - template_name, api_schema=api_schema, opts=opts)) + template_name, api_schema=api_schema, opts=opts, snippet_index=snippet_index)) return answer def _is_desired_transport(self, template_name: str, opts: Options) -> bool: @@ -324,8 +325,8 @@ def _get_file( template_name: str, *, opts: Options, - api_schema=api.API, - **context: Mapping, + api_schema: api.API, + **context, ): """Render a template to a protobuf plugin File object.""" # Determine the target filename. diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 35667f5af631..7f4c3dae317d 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -24,13 +24,13 @@ from gapic import utils -from gapic.samplegen_utils import types +from gapic.samplegen_utils import types, snippet_metadata_pb2 # type: ignore from gapic.samplegen_utils.utils import is_valid_sample_cfg from gapic.schema import api from gapic.schema import wrappers from collections import defaultdict, namedtuple, ChainMap as chainmap -from typing import Any, ChainMap, Dict, FrozenSet, Generator, List, Mapping, Optional, Sequence +from typing import Any, ChainMap, Dict, FrozenSet, Generator, List, Mapping, Optional, Sequence, Tuple # There is no library stub file for this module, so ignore it. from google.api import resource_pb2 # type: ignore @@ -915,8 +915,6 @@ def _validate_loop(self, loop): def parse_handwritten_specs(sample_configs: Sequence[str]) -> Generator[Dict[str, Any], None, None]: """Parse a handwritten sample spec""" - STANDALONE_TYPE = "standalone" - for config_fpath in sample_configs: with open(config_fpath) as f: configs = yaml.safe_load_all(f.read()) @@ -925,13 +923,9 @@ def parse_handwritten_specs(sample_configs: Sequence[str]) -> Generator[Dict[str valid = is_valid_sample_cfg(cfg) if not valid: raise types.InvalidConfig( - "Sample config is invalid", valid) + "Sample config in '{}' is invalid\n\n{}".format(config_fpath, cfg), valid) for spec in cfg.get("samples", []): - # If unspecified, assume a sample config describes a standalone. - # If sample_types are specified, standalone samples must be - # explicitly enabled. - if STANDALONE_TYPE in spec.get("sample_type", [STANDALONE_TYPE]): - yield spec + yield spec def _generate_resource_path_request_object(field_name: str, message: wrappers.MessageType) -> List[Dict[str, str]]: @@ -1050,7 +1044,6 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A # [{START|END} ${apishortname}_generated_${api}_${apiVersion}_${serviceName}_${rpcName}_{sync|async}_${overloadDisambiguation}] region_tag = f"{api_short_name}_generated_{api_schema.naming.versioned_module_name}_{service_name}_{rpc_name}_{transport_type}" spec = { - "sample_type": "standalone", "rpc": rpc_name, "transport": transport, # `request` and `response` is populated in `preprocess_sample` @@ -1062,7 +1055,7 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A yield spec -def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> str: +def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> Tuple[str, Any]: """Generate a standalone, runnable sample. Writing the rendered output is left for the caller. @@ -1073,7 +1066,7 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> str sample_template (jinja2.Template): The template representing a generic sample. Returns: - str: The rendered sample. + Tuple(str, snippet_metadata_pb2.Snippet): The rendered sample. """ service_name = sample["service"] service = api_schema.services.get(service_name) @@ -1100,6 +1093,17 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> str v.validate_response(sample["response"]) + # Snippet Metadata can't be fully filled out in any one function + # In this function we add information from + # the API schema and sample dictionary. + snippet_metadata = snippet_metadata_pb2.Snippet() # type: ignore + snippet_metadata.region_tag = sample["region_tag"] + setattr(snippet_metadata.client_method, "async", + sample["transport"] == api.TRANSPORT_GRPC_ASYNC) + snippet_metadata.client_method.method.short_name = sample["rpc"] + snippet_metadata.client_method.method.service.short_name = sample["service"].split( + ".")[-1] + return sample_template.render( sample=sample, imports=[], @@ -1107,4 +1111,4 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> str calling_form_enum=types.CallingForm, trim_blocks=True, lstrip_blocks=True, - ) + ), snippet_metadata diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py index a8594a92ee27..8b7d3d0794bd 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional, Dict import re +from typing import Optional, Dict from google.protobuf import json_format @@ -88,6 +88,7 @@ def full_snippet(self) -> str: """The portion between the START and END region tags.""" start_idx = self._full_snippet.start - 1 end_idx = self._full_snippet.end + self.sample_lines[start_idx] = self.sample_lines[start_idx].strip() return "".join(self.sample_lines[start_idx:end_idx]) @@ -124,7 +125,7 @@ def add_snippet(self, snippet: Snippet) -> None: RpcMethodNotFound: If the method indicated by the snippet metadata is not found. """ service_name = snippet.metadata.client_method.method.service.short_name - rpc_name = snippet.metadata.client_method.method.full_name + rpc_name = snippet.metadata.client_method.method.short_name service = self._index.get(service_name) if service is None: @@ -172,4 +173,8 @@ def get_snippet(self, service_name: str, rpc_name: str, sync: bool = True) -> Op def get_metadata_json(self) -> str: """JSON representation of Snippet Index.""" + + # Downstream tools assume the generator will produce the exact + # same output when run over the same API multiple times + self.metadata_index.snippets.sort(key=lambda s: s.region_tag) return json_format.MessageToJson(self.metadata_index, sort_keys=True) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 67fa3afe2322..cb9203bb7826 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -41,7 +41,7 @@ from google.api import resource_pb2 from google.api_core import exceptions from google.api_core import path_template -from google.cloud import extended_operations_pb2 as ex_ops_pb2 +from google.cloud import extended_operations_pb2 as ex_ops_pb2 # type: ignore from google.protobuf import descriptor_pb2 # type: ignore from google.protobuf.json_format import MessageToDict # type: ignore diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index b4173b24693e..4e22e94625b5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -207,6 +207,14 @@ class {{ service.async_client_name }}: {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8) }} + {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} + {% if snippet is not none %} + .. code-block:: + +{{ snippet.full_snippet|indent(width=12, first=True) }} + {% endif %} + {% endwith %} + Args: {% if not method.client_streaming %} request (Union[{{ method.input.ident.sphinx }}, dict]): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index c47ac6f46585..451ac1afb27f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -363,6 +363,15 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8) }} + + {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} + {% if snippet is not none %} + .. code-block:: + +{{ snippet.full_snippet|indent(width=12, first=True) }} + {% endif %} + {% endwith %} + Args: {% if not method.client_streaming %} request (Union[{{ method.input.ident.sphinx }}, dict]): diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index 3191860c0c83..a75476580432 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -32,8 +32,6 @@ from {{ sample.module_namespace|join(".") }} import {{ sample.module_name }} {# also need calling form #} {% if sample.transport == "grpc-async" %}async {% endif %}def sample_{{ frags.render_method_name(sample.rpc)|trim }}({{ frags.print_input_params(sample.request)|trim }}): - """{{ sample.description }}""" - {{ frags.render_client_setup(sample.module_name, sample.client_name)|indent }} {{ frags.render_request_setup(sample.request, sample.module_name, sample.request_type, calling_form, calling_form_enum)|indent }} {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum, sample.transport) %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index e4c02ed39249..55c4ae53c34a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -210,6 +210,32 @@ async def export_assets(self, the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_export_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + output_config = asset_v1.OutputConfig() + output_config.gcs_destination.uri = "uri_value" + + request = asset_v1.ExportAssetsRequest( + parent="parent_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_assets(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]): The request object. Export asset request. @@ -279,6 +305,25 @@ async def list_assets(self, r"""Lists assets with time and resource types and returns paged results in response. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_list_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]): The request object. ListAssets request. @@ -372,6 +417,26 @@ async def batch_get_assets_history(self, specified asset does not exist, this API returns an INVALID_ARGUMENT error. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_batch_get_assets_history(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetAssetsHistoryRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_assets_history(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]): The request object. Batch get assets history request. @@ -434,6 +499,31 @@ async def create_feed(self, project/folder/organization to listen to its asset updates. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_create_feed(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + feed = asset_v1.Feed() + feed.name = "name_value" + + request = asset_v1.CreateFeedRequest( + parent="parent_value", + feed_id="feed_id_value", + feed=feed, + ) + + # Make the request + response = client.create_feed(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.CreateFeedRequest, dict]): The request object. Create asset feed request. @@ -520,6 +610,29 @@ async def get_feed(self, ) -> asset_service.Feed: r"""Gets details about an asset feed. + .. code-block:: + + from google.cloud import asset_v1 + + def sample_get_feed(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + project = "my-project-id" + feed = "feed_value" + name = f"projects/{project}/feeds/{feed}" + + request = asset_v1.GetFeedRequest( + name=name, + ) + + # Make the request + response = client.get_feed(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.GetFeedRequest, dict]): The request object. Get asset feed request. @@ -609,6 +722,26 @@ async def list_feeds(self, r"""Lists all asset feeds in a parent project/folder/organization. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_list_feeds(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListFeedsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_feeds(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.ListFeedsRequest, dict]): The request object. List asset feeds request. @@ -692,6 +825,28 @@ async def update_feed(self, ) -> asset_service.Feed: r"""Updates an asset feed configuration. + .. code-block:: + + from google.cloud import asset_v1 + + def sample_update_feed(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + feed = asset_v1.Feed() + feed.name = "name_value" + + request = asset_v1.UpdateFeedRequest( + feed=feed, + ) + + # Make the request + response = client.update_feed(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.UpdateFeedRequest, dict]): The request object. Update asset feed request. @@ -774,6 +929,26 @@ async def delete_feed(self, ) -> None: r"""Deletes an asset feed. + .. code-block:: + + from google.cloud import asset_v1 + + def sample_delete_feed(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + project = "my-project-id" + feed = "feed_value" + name = f"projects/{project}/feeds/{feed}" + + request = asset_v1.DeleteFeedRequest( + name=name, + ) + + # Make the request + response = client.delete_feed(request=request) + Args: request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): The request object. @@ -853,6 +1028,25 @@ async def search_all_resources(self, the ``cloudasset.assets.searchAllResources`` permission on the desired scope, otherwise the request will be rejected. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_search_all_resources(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.SearchAllResourcesRequest( + scope="scope_value", + ) + + # Make the request + page_result = client.search_all_resources(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.asset_v1.types.SearchAllResourcesRequest, dict]): The request object. Search all resources request. @@ -1039,6 +1233,25 @@ async def search_all_iam_policies(self, ``cloudasset.assets.searchAllIamPolicies`` permission on the desired scope, otherwise the request will be rejected. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_search_all_iam_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.SearchAllIamPoliciesRequest( + scope="scope_value", + ) + + # Make the request + page_result = client.search_all_iam_policies(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.asset_v1.types.SearchAllIamPoliciesRequest, dict]): The request object. Search all IAM policies request. @@ -1201,6 +1414,29 @@ async def analyze_iam_policy(self, r"""Analyzes IAM policies to answer which identities have what accesses on which resources. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_analyze_iam_policy(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + analysis_query = asset_v1.IamPolicyAnalysisQuery() + analysis_query.scope = "scope_value" + + request = asset_v1.AnalyzeIamPolicyRequest( + analysis_query=analysis_query, + ) + + # Make the request + response = client.analyze_iam_policy(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]): The request object. A request message for @@ -1273,6 +1509,35 @@ async def analyze_iam_policy_longrunning(self, to poll the operation result. The metadata contains the request to help callers to map responses to requests. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_analyze_iam_policy_longrunning(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + analysis_query = asset_v1.IamPolicyAnalysisQuery() + analysis_query.scope = "scope_value" + + output_config = asset_v1.IamPolicyAnalysisOutputConfig() + output_config.gcs_destination.uri = "uri_value" + + request = asset_v1.AnalyzeIamPolicyLongrunningRequest( + analysis_query=analysis_query, + output_config=output_config, + ) + + # Make the request + operation = client.analyze_iam_policy_longrunning(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]): The request object. A request message for diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 3d1da34da7ab..5ddda9d5fafd 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -397,6 +397,33 @@ def export_assets(self, the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_export_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + output_config = asset_v1.OutputConfig() + output_config.gcs_destination.uri = "uri_value" + + request = asset_v1.ExportAssetsRequest( + parent="parent_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_assets(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]): The request object. Export asset request. @@ -467,6 +494,26 @@ def list_assets(self, r"""Lists assets with time and resource types and returns paged results in response. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_list_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListAssetsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_assets(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]): The request object. ListAssets request. @@ -560,6 +607,27 @@ def batch_get_assets_history(self, specified asset does not exist, this API returns an INVALID_ARGUMENT error. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_batch_get_assets_history(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetAssetsHistoryRequest( + parent="parent_value", + ) + + # Make the request + response = client.batch_get_assets_history(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]): The request object. Batch get assets history request. @@ -616,6 +684,32 @@ def create_feed(self, project/folder/organization to listen to its asset updates. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_create_feed(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + feed = asset_v1.Feed() + feed.name = "name_value" + + request = asset_v1.CreateFeedRequest( + parent="parent_value", + feed_id="feed_id_value", + feed=feed, + ) + + # Make the request + response = client.create_feed(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.CreateFeedRequest, dict]): The request object. Create asset feed request. @@ -702,6 +796,30 @@ def get_feed(self, ) -> asset_service.Feed: r"""Gets details about an asset feed. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_get_feed(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + project = "my-project-id" + feed = "feed_value" + name = f"projects/{project}/feeds/{feed}" + + request = asset_v1.GetFeedRequest( + name=name, + ) + + # Make the request + response = client.get_feed(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.GetFeedRequest, dict]): The request object. Get asset feed request. @@ -784,6 +902,27 @@ def list_feeds(self, r"""Lists all asset feeds in a parent project/folder/organization. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_list_feeds(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListFeedsRequest( + parent="parent_value", + ) + + # Make the request + response = client.list_feeds(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.ListFeedsRequest, dict]): The request object. List asset feeds request. @@ -860,6 +999,29 @@ def update_feed(self, ) -> asset_service.Feed: r"""Updates an asset feed configuration. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_update_feed(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + feed = asset_v1.Feed() + feed.name = "name_value" + + request = asset_v1.UpdateFeedRequest( + feed=feed, + ) + + # Make the request + response = client.update_feed(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.UpdateFeedRequest, dict]): The request object. Update asset feed request. @@ -942,6 +1104,27 @@ def delete_feed(self, ) -> None: r"""Deletes an asset feed. + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_delete_feed(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + project = "my-project-id" + feed = "feed_value" + name = f"projects/{project}/feeds/{feed}" + + request = asset_v1.DeleteFeedRequest( + name=name, + ) + + # Make the request + response = client.delete_feed(request=request) + Args: request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): The request object. @@ -1014,6 +1197,26 @@ def search_all_resources(self, the ``cloudasset.assets.searchAllResources`` permission on the desired scope, otherwise the request will be rejected. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_search_all_resources(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.SearchAllResourcesRequest( + scope="scope_value", + ) + + # Make the request + page_result = client.search_all_resources(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.asset_v1.types.SearchAllResourcesRequest, dict]): The request object. Search all resources request. @@ -1193,6 +1396,26 @@ def search_all_iam_policies(self, ``cloudasset.assets.searchAllIamPolicies`` permission on the desired scope, otherwise the request will be rejected. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_search_all_iam_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.SearchAllIamPoliciesRequest( + scope="scope_value", + ) + + # Make the request + page_result = client.search_all_iam_policies(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.asset_v1.types.SearchAllIamPoliciesRequest, dict]): The request object. Search all IAM policies request. @@ -1348,6 +1571,30 @@ def analyze_iam_policy(self, r"""Analyzes IAM policies to answer which identities have what accesses on which resources. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_analyze_iam_policy(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + analysis_query = asset_v1.IamPolicyAnalysisQuery() + analysis_query.scope = "scope_value" + + request = asset_v1.AnalyzeIamPolicyRequest( + analysis_query=analysis_query, + ) + + # Make the request + response = client.analyze_iam_policy(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]): The request object. A request message for @@ -1415,6 +1662,36 @@ def analyze_iam_policy_longrunning(self, to poll the operation result. The metadata contains the request to help callers to map responses to requests. + + + .. code-block:: + + from google.cloud import asset_v1 + + def sample_analyze_iam_policy_longrunning(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + analysis_query = asset_v1.IamPolicyAnalysisQuery() + analysis_query.scope = "scope_value" + + output_config = asset_v1.IamPolicyAnalysisOutputConfig() + output_config.gcs_destination.uri = "uri_value" + + request = asset_v1.AnalyzeIamPolicyLongrunningRequest( + analysis_query=analysis_query, + output_config=output_config, + ) + + # Make the request + operation = client.analyze_iam_policy_longrunning(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]): The request object. A request message for diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py index 8147347e6627..1759b7e38c2c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py @@ -28,8 +28,6 @@ async def sample_analyze_iam_policy(): - """Snippet for analyze_iam_policy""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py index 12e57510e8e6..51ef3ab86a18 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py @@ -28,8 +28,6 @@ async def sample_analyze_iam_policy_longrunning(): - """Snippet for analyze_iam_policy_longrunning""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py index a4e998a182bb..eee8fb97ed75 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py @@ -28,8 +28,6 @@ def sample_analyze_iam_policy_longrunning(): - """Snippet for analyze_iam_policy_longrunning""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py index 1a02995511ba..9dd189550952 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py @@ -28,8 +28,6 @@ def sample_analyze_iam_policy(): - """Snippet for analyze_iam_policy""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py index bdc83d37df6e..edae4c7f9289 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py @@ -28,8 +28,6 @@ async def sample_batch_get_assets_history(): - """Snippet for batch_get_assets_history""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py index 5a4b1abbaa09..5bf8c8de15fb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py @@ -28,8 +28,6 @@ def sample_batch_get_assets_history(): - """Snippet for batch_get_assets_history""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py index d0b4a37a3ab2..a988dfe5d494 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py @@ -28,8 +28,6 @@ async def sample_create_feed(): - """Snippet for create_feed""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py index 9eb643290ba6..e33028822916 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py @@ -28,8 +28,6 @@ def sample_create_feed(): - """Snippet for create_feed""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py index d4df7397d433..4439eee886a3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py @@ -28,8 +28,6 @@ async def sample_delete_feed(): - """Snippet for delete_feed""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py index a49c06314044..ac34a49c01c9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py @@ -28,8 +28,6 @@ def sample_delete_feed(): - """Snippet for delete_feed""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py index 4edfb7ae3fb2..f384bea0adfb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py @@ -28,8 +28,6 @@ async def sample_export_assets(): - """Snippet for export_assets""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py index 62fcbbfff715..4ac84ea71306 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py @@ -28,8 +28,6 @@ def sample_export_assets(): - """Snippet for export_assets""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py index bd91353f3ceb..29cd0a2b165a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py @@ -28,8 +28,6 @@ async def sample_get_feed(): - """Snippet for get_feed""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py index 493ff224ab48..30849fccb712 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py @@ -28,8 +28,6 @@ def sample_get_feed(): - """Snippet for get_feed""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py index e21b4e35c3ca..0a6a0b4a098d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py @@ -28,8 +28,6 @@ async def sample_list_assets(): - """Snippet for list_assets""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py index ef1dd925c0f6..5fad10b12adf 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py @@ -28,8 +28,6 @@ def sample_list_assets(): - """Snippet for list_assets""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py index 31a26bde207b..8eec6bfab483 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py @@ -28,8 +28,6 @@ async def sample_list_feeds(): - """Snippet for list_feeds""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py index 9075f2cd9fee..7aba515f8bd3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py @@ -28,8 +28,6 @@ def sample_list_feeds(): - """Snippet for list_feeds""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py index 3893b85552cf..9be060836a2c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py @@ -28,8 +28,6 @@ async def sample_search_all_iam_policies(): - """Snippet for search_all_iam_policies""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py index 53133196b1f2..e39c08295ba9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py @@ -28,8 +28,6 @@ def sample_search_all_iam_policies(): - """Snippet for search_all_iam_policies""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py index d97f46fb4f7a..aba043b4b80c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py @@ -28,8 +28,6 @@ async def sample_search_all_resources(): - """Snippet for search_all_resources""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py index 30f66f6ef4a3..475d8b4ad58d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py @@ -28,8 +28,6 @@ def sample_search_all_resources(): - """Snippet for search_all_resources""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py index 6cebe148df80..e409b05b7182 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py @@ -28,8 +28,6 @@ async def sample_update_feed(): - """Snippet for update_feed""" - # Create a client client = asset_v1.AssetServiceAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py index d2046c2d9b46..214f4886f215 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py @@ -28,8 +28,6 @@ def sample_update_feed(): - """Snippet for update_feed""" - # Create a client client = asset_v1.AssetServiceClient() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json new file mode 100644 index 000000000000..8f871de592ea --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json @@ -0,0 +1,1048 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "AnalyzeIamPolicyLongrunning" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "AnalyzeIamPolicyLongrunning" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "AnalyzeIamPolicy" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "AnalyzeIamPolicy" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "BatchGetAssetsHistory" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "BatchGetAssetsHistory" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "CreateFeed" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_create_feed_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_CreateFeed_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "CreateFeed" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_create_feed_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_CreateFeed_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "DeleteFeed" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_delete_feed_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_DeleteFeed_async", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "DeleteFeed" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_DeleteFeed_sync", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "ExportAssets" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_export_assets_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_ExportAssets_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "ExportAssets" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_export_assets_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_ExportAssets_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "GetFeed" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_get_feed_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_GetFeed_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "GetFeed" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_get_feed_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_GetFeed_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "ListAssets" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_list_assets_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_ListAssets_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "ListAssets" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_list_assets_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_ListAssets_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "ListFeeds" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_list_feeds_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_ListFeeds_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "ListFeeds" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_ListFeeds_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "SearchAllIamPolicies" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "SearchAllIamPolicies" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "SearchAllResources" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllResources_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "SearchAllResources" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllResources_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "UpdateFeed" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_update_feed_async.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_UpdateFeed_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "AssetService" + }, + "shortName": "UpdateFeed" + } + }, + "file": "cloudasset_generated_asset_v1_asset_service_update_feed_sync.py", + "regionTag": "cloudasset_generated_asset_v1_AssetService_UpdateFeed_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 1863e3521ddd..cd7019d14e3e 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -208,6 +208,31 @@ async def generate_access_token(self, r"""Generates an OAuth 2.0 access token for a service account. + + .. code-block:: + + from google.iam import credentials_v1 + + def sample_generate_access_token(): + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + + request = credentials_v1.GenerateAccessTokenRequest( + name=name, + scope=['scope_value_1', 'scope_value_2'], + ) + + # Make the request + response = client.generate_access_token(request=request) + + # Handle response + print(response) + Args: request (Union[google.iam.credentials_v1.types.GenerateAccessTokenRequest, dict]): The request object. @@ -341,6 +366,31 @@ async def generate_id_token(self, r"""Generates an OpenID Connect ID token for a service account. + + .. code-block:: + + from google.iam import credentials_v1 + + def sample_generate_id_token(): + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + + request = credentials_v1.GenerateIdTokenRequest( + name=name, + audience="audience_value", + ) + + # Make the request + response = client.generate_id_token(request=request) + + # Handle response + print(response) + Args: request (Union[google.iam.credentials_v1.types.GenerateIdTokenRequest, dict]): The request object. @@ -467,6 +517,31 @@ async def sign_blob(self, r"""Signs a blob using a service account's system-managed private key. + + .. code-block:: + + from google.iam import credentials_v1 + + def sample_sign_blob(): + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + + request = credentials_v1.SignBlobRequest( + name=name, + payload=b'payload_blob', + ) + + # Make the request + response = client.sign_blob(request=request) + + # Handle response + print(response) + Args: request (Union[google.iam.credentials_v1.types.SignBlobRequest, dict]): The request object. @@ -580,6 +655,31 @@ async def sign_jwt(self, r"""Signs a JWT using a service account's system-managed private key. + + .. code-block:: + + from google.iam import credentials_v1 + + def sample_sign_jwt(): + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + + request = credentials_v1.SignJwtRequest( + name=name, + payload="payload_value", + ) + + # Make the request + response = client.sign_jwt(request=request) + + # Handle response + print(response) + Args: request (Union[google.iam.credentials_v1.types.SignJwtRequest, dict]): The request object. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 3a856a9eed2a..93876b0369ac 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -386,6 +386,32 @@ def generate_access_token(self, r"""Generates an OAuth 2.0 access token for a service account. + + + .. code-block:: + + from google.iam import credentials_v1 + + def sample_generate_access_token(): + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + + request = credentials_v1.GenerateAccessTokenRequest( + name=name, + scope=['scope_value_1', 'scope_value_2'], + ) + + # Make the request + response = client.generate_access_token(request=request) + + # Handle response + print(response) + Args: request (Union[google.iam.credentials_v1.types.GenerateAccessTokenRequest, dict]): The request object. @@ -512,6 +538,32 @@ def generate_id_token(self, r"""Generates an OpenID Connect ID token for a service account. + + + .. code-block:: + + from google.iam import credentials_v1 + + def sample_generate_id_token(): + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + + request = credentials_v1.GenerateIdTokenRequest( + name=name, + audience="audience_value", + ) + + # Make the request + response = client.generate_id_token(request=request) + + # Handle response + print(response) + Args: request (Union[google.iam.credentials_v1.types.GenerateIdTokenRequest, dict]): The request object. @@ -631,6 +683,32 @@ def sign_blob(self, r"""Signs a blob using a service account's system-managed private key. + + + .. code-block:: + + from google.iam import credentials_v1 + + def sample_sign_blob(): + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + + request = credentials_v1.SignBlobRequest( + name=name, + payload=b'payload_blob', + ) + + # Make the request + response = client.sign_blob(request=request) + + # Handle response + print(response) + Args: request (Union[google.iam.credentials_v1.types.SignBlobRequest, dict]): The request object. @@ -737,6 +815,32 @@ def sign_jwt(self, r"""Signs a JWT using a service account's system-managed private key. + + + .. code-block:: + + from google.iam import credentials_v1 + + def sample_sign_jwt(): + # Create a client + client = credentials_v1.IAMCredentialsClient() + + # Initialize request argument(s) + project = "my-project-id" + service_account = "service_account_value" + name = f"projects/{project}/serviceAccounts/{service_account}" + + request = credentials_v1.SignJwtRequest( + name=name, + payload="payload_value", + ) + + # Make the request + response = client.sign_jwt(request=request) + + # Handle response + print(response) + Args: request (Union[google.iam.credentials_v1.types.SignJwtRequest, dict]): The request object. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py index 55a66bc1257f..650f82bdbab1 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py @@ -28,8 +28,6 @@ async def sample_generate_access_token(): - """Snippet for generate_access_token""" - # Create a client client = credentials_v1.IAMCredentialsAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py index 9487ea97253f..17fedfa26aa7 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py @@ -28,8 +28,6 @@ def sample_generate_access_token(): - """Snippet for generate_access_token""" - # Create a client client = credentials_v1.IAMCredentialsClient() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py index 03dee14637ae..00d6538ca90a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py @@ -28,8 +28,6 @@ async def sample_generate_id_token(): - """Snippet for generate_id_token""" - # Create a client client = credentials_v1.IAMCredentialsAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py index fd901f81fc70..71e49cef5809 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py @@ -28,8 +28,6 @@ def sample_generate_id_token(): - """Snippet for generate_id_token""" - # Create a client client = credentials_v1.IAMCredentialsClient() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py index 5929291181b2..b39981bbdd6f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py @@ -28,8 +28,6 @@ async def sample_sign_blob(): - """Snippet for sign_blob""" - # Create a client client = credentials_v1.IAMCredentialsAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py index d54b6a5e61dc..fbc18e178d73 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py @@ -28,8 +28,6 @@ def sample_sign_blob(): - """Snippet for sign_blob""" - # Create a client client = credentials_v1.IAMCredentialsClient() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py index bb72d422692b..35e865578592 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py @@ -28,8 +28,6 @@ async def sample_sign_jwt(): - """Snippet for sign_jwt""" - # Create a client client = credentials_v1.IAMCredentialsAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py index 6c0508088528..298bfaf3e0d2 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py @@ -28,8 +28,6 @@ def sample_sign_jwt(): - """Snippet for sign_jwt""" - # Create a client client = credentials_v1.IAMCredentialsClient() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json new file mode 100644 index 000000000000..8217a23d267d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json @@ -0,0 +1,360 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "IAMCredentials" + }, + "shortName": "GenerateAccessToken" + } + }, + "file": "iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py", + "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "IAMCredentials" + }, + "shortName": "GenerateAccessToken" + } + }, + "file": "iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py", + "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "IAMCredentials" + }, + "shortName": "GenerateIdToken" + } + }, + "file": "iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py", + "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "IAMCredentials" + }, + "shortName": "GenerateIdToken" + } + }, + "file": "iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py", + "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "IAMCredentials" + }, + "shortName": "SignBlob" + } + }, + "file": "iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py", + "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "IAMCredentials" + }, + "shortName": "SignBlob" + } + }, + "file": "iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py", + "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "IAMCredentials" + }, + "shortName": "SignJwt" + } + }, + "file": "iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py", + "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "IAMCredentials" + }, + "shortName": "SignJwt" + } + }, + "file": "iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py", + "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 584f5124047e..01dd8b63002f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -203,6 +203,29 @@ async def list_buckets(self, ) -> pagers.ListBucketsAsyncPager: r"""Lists buckets. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + parent = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.ListBucketsRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_buckets(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): The request object. The parameters to `ListBuckets`. @@ -298,6 +321,30 @@ async def get_bucket(self, ) -> logging_config.LogBucket: r"""Gets a bucket. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.GetBucketRequest( + name=name, + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): The request object. The parameters to `GetBucket`. @@ -352,6 +399,32 @@ async def create_bucket(self, entries. Once a bucket has been created, the region cannot be changed. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + parent = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.CreateBucketRequest( + parent=parent, + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): The request object. The parameters to `CreateBucket`. @@ -414,6 +487,31 @@ async def update_bucket(self, A buckets region may not be modified after it is created. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.UpdateBucketRequest( + name=name, + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): The request object. The parameters to `UpdateBucket`. @@ -468,6 +566,28 @@ async def delete_bucket(self, state. After 7 days, the bucket will be purged and all logs in the bucket will be permanently deleted. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.DeleteBucketRequest( + name=name, + ) + + # Make the request + response = client.delete_bucket(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): The request object. The parameters to `DeleteBucket`. @@ -514,6 +634,28 @@ async def undelete_bucket(self, r"""Undeletes a bucket. A bucket that has been deleted may be undeleted within the grace period of 7 days. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.UndeleteBucketRequest( + name=name, + ) + + # Make the request + response = client.undelete_bucket(request=request) + Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): The request object. The parameters to `UndeleteBucket`. @@ -560,6 +702,24 @@ async def list_views(self, ) -> pagers.ListViewsAsyncPager: r"""Lists views on a bucket. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): The request object. The parameters to `ListViews`. @@ -647,6 +807,31 @@ async def get_view(self, ) -> logging_config.LogView: r"""Gets a view. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + view = "view_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + + request = logging_v2.GetViewRequest( + name=name, + ) + + # Make the request + response = client.get_view(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): The request object. The parameters to `GetView`. @@ -702,6 +887,27 @@ async def create_view(self, r"""Creates a view over logs in a bucket. A bucket may contain a maximum of 50 views. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client.create_view(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): The request object. The parameters to `CreateView`. @@ -757,6 +963,26 @@ async def update_view(self, r"""Updates a view. This method replaces the following fields in the existing view with values from the new view: ``filter``. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client.update_view(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): The request object. The parameters to `UpdateView`. @@ -811,6 +1037,28 @@ async def delete_view(self, ) -> None: r"""Deletes a view from a bucket. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + view = "view_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + + request = logging_v2.DeleteViewRequest( + name=name, + ) + + # Make the request + response = client.delete_view(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): The request object. The parameters to `DeleteView`. @@ -857,6 +1105,28 @@ async def list_sinks(self, ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + parent = f"projects/{project}/sinks/{sink}" + + request = logging_v2.ListSinksRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_sinks(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. @@ -957,6 +1227,29 @@ async def get_sink(self, ) -> logging_config.LogSink: r"""Gets a sink. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + + request = logging_v2.GetSinkRequest( + sink_name=sink_name, + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. @@ -1059,6 +1352,35 @@ async def create_sink(self, permitted to write to the destination. A sink can export log entries only from the resource owning the sink. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + parent = f"projects/{project}/sinks/{sink}" + + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent=parent, + sink=sink, + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. @@ -1165,6 +1487,35 @@ async def update_sink(self, The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name=sink_name, + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. @@ -1295,6 +1646,27 @@ async def delete_sink(self, r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + + request = logging_v2.DeleteSinkRequest( + sink_name=sink_name, + ) + + # Make the request + response = client.delete_sink(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. @@ -1377,6 +1749,28 @@ async def list_exclusions(self, ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions in a parent resource. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + parent = f"projects/{project}/exclusions/{exclusion}" + + request = logging_v2.ListExclusionsRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_exclusions(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): The request object. The parameters to `ListExclusions`. @@ -1477,6 +1871,29 @@ async def get_exclusion(self, ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + + request = logging_v2.GetExclusionRequest( + name=name, + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): The request object. The parameters to `GetExclusion`. @@ -1581,6 +1998,35 @@ async def create_exclusion(self, can be excluded. You can have up to 10 exclusions in a resource. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + parent = f"projects/{project}/exclusions/{exclusion}" + + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent=parent, + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): The request object. The parameters to `CreateExclusion`. @@ -1687,6 +2133,35 @@ async def update_exclusion(self, r"""Changes one or more properties of an existing exclusion. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name=name, + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): The request object. The parameters to `UpdateExclusion`. @@ -1805,6 +2280,26 @@ async def delete_exclusion(self, ) -> None: r"""Deletes an exclusion. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + + request = logging_v2.DeleteExclusionRequest( + name=name, + ) + + # Make the request + response = client.delete_exclusion(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): The request object. The parameters to `DeleteExclusion`. @@ -1895,6 +2390,29 @@ async def get_cmek_settings(self, Router `__ for more information. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + name = f"projects/{project}/cmekSettings" + + request = logging_v2.GetCmekSettingsRequest( + name=name, + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to @@ -1977,6 +2495,26 @@ async def update_cmek_settings(self, Router `__ for more information. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 377cfda4537d..9637c628dcd5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -417,6 +417,30 @@ def list_buckets(self, ) -> pagers.ListBucketsPager: r"""Lists buckets. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_buckets(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + parent = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.ListBucketsRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_buckets(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): The request object. The parameters to `ListBuckets`. @@ -512,6 +536,31 @@ def get_bucket(self, ) -> logging_config.LogBucket: r"""Gets a bucket. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.GetBucketRequest( + name=name, + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): The request object. The parameters to `GetBucket`. @@ -567,6 +616,33 @@ def create_bucket(self, entries. Once a bucket has been created, the region cannot be changed. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + parent = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.CreateBucketRequest( + parent=parent, + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): The request object. The parameters to `CreateBucket`. @@ -630,6 +706,32 @@ def update_bucket(self, A buckets region may not be modified after it is created. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.UpdateBucketRequest( + name=name, + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): The request object. The parameters to `UpdateBucket`. @@ -685,6 +787,29 @@ def delete_bucket(self, state. After 7 days, the bucket will be purged and all logs in the bucket will be permanently deleted. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.DeleteBucketRequest( + name=name, + ) + + # Make the request + response = client.delete_bucket(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): The request object. The parameters to `DeleteBucket`. @@ -732,6 +857,29 @@ def undelete_bucket(self, r"""Undeletes a bucket. A bucket that has been deleted may be undeleted within the grace period of 7 days. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_undelete_bucket(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}" + + request = logging_v2.UndeleteBucketRequest( + name=name, + ) + + # Make the request + response = client.undelete_bucket(request=request) + Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): The request object. The parameters to `UndeleteBucket`. @@ -779,6 +927,25 @@ def list_views(self, ) -> pagers.ListViewsPager: r"""Lists views on a bucket. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_views(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_views(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): The request object. The parameters to `ListViews`. @@ -866,6 +1033,32 @@ def get_view(self, ) -> logging_config.LogView: r"""Gets a view. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + view = "view_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + + request = logging_v2.GetViewRequest( + name=name, + ) + + # Make the request + response = client.get_view(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): The request object. The parameters to `GetView`. @@ -922,6 +1115,28 @@ def create_view(self, r"""Creates a view over logs in a bucket. A bucket may contain a maximum of 50 views. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client.create_view(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): The request object. The parameters to `CreateView`. @@ -978,6 +1193,27 @@ def update_view(self, r"""Updates a view. This method replaces the following fields in the existing view with values from the new view: ``filter``. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client.update_view(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): The request object. The parameters to `UpdateView`. @@ -1033,6 +1269,29 @@ def delete_view(self, ) -> None: r"""Deletes a view from a bucket. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_view(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + bucket = "bucket_value" + view = "view_value" + name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" + + request = logging_v2.DeleteViewRequest( + name=name, + ) + + # Make the request + response = client.delete_view(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): The request object. The parameters to `DeleteView`. @@ -1080,6 +1339,29 @@ def list_sinks(self, ) -> pagers.ListSinksPager: r"""Lists sinks. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_sinks(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + parent = f"projects/{project}/sinks/{sink}" + + request = logging_v2.ListSinksRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_sinks(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): The request object. The parameters to `ListSinks`. @@ -1172,6 +1454,30 @@ def get_sink(self, ) -> logging_config.LogSink: r"""Gets a sink. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + + request = logging_v2.GetSinkRequest( + sink_name=sink_name, + ) + + # Make the request + response = client.get_sink(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): The request object. The parameters to `GetSink`. @@ -1266,6 +1572,36 @@ def create_sink(self, permitted to write to the destination. A sink can export log entries only from the resource owning the sink. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + parent = f"projects/{project}/sinks/{sink}" + + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent=parent, + sink=sink, + ) + + # Make the request + response = client.create_sink(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): The request object. The parameters to `CreateSink`. @@ -1372,6 +1708,36 @@ def update_sink(self, The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name=sink_name, + sink=sink, + ) + + # Make the request + response = client.update_sink(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): The request object. The parameters to `UpdateSink`. @@ -1494,6 +1860,28 @@ def delete_sink(self, r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_sink(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + sink = "sink_value" + sink_name = f"projects/{project}/sinks/{sink}" + + request = logging_v2.DeleteSinkRequest( + sink_name=sink_name, + ) + + # Make the request + response = client.delete_sink(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): The request object. The parameters to `DeleteSink`. @@ -1568,6 +1956,29 @@ def list_exclusions(self, ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions in a parent resource. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + parent = f"projects/{project}/exclusions/{exclusion}" + + request = logging_v2.ListExclusionsRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_exclusions(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): The request object. The parameters to `ListExclusions`. @@ -1660,6 +2071,30 @@ def get_exclusion(self, ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + + request = logging_v2.GetExclusionRequest( + name=name, + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): The request object. The parameters to `GetExclusion`. @@ -1756,6 +2191,36 @@ def create_exclusion(self, can be excluded. You can have up to 10 exclusions in a resource. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + parent = f"projects/{project}/exclusions/{exclusion}" + + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent=parent, + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): The request object. The parameters to `CreateExclusion`. @@ -1862,6 +2327,36 @@ def update_exclusion(self, r"""Changes one or more properties of an existing exclusion. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name=name, + exclusion=exclusion, + ) + + # Make the request + response = client.update_exclusion(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): The request object. The parameters to `UpdateExclusion`. @@ -1980,6 +2475,27 @@ def delete_exclusion(self, ) -> None: r"""Deletes an exclusion. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + exclusion = "exclusion_value" + name = f"projects/{project}/exclusions/{exclusion}" + + request = logging_v2.DeleteExclusionRequest( + name=name, + ) + + # Make the request + response = client.delete_exclusion(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): The request object. The parameters to `DeleteExclusion`. @@ -2062,6 +2578,30 @@ def get_cmek_settings(self, Router `__ for more information. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + name = f"projects/{project}/cmekSettings" + + request = logging_v2.GetCmekSettingsRequest( + name=name, + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to @@ -2145,6 +2685,27 @@ def update_cmek_settings(self, Router `__ for more information. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_cmek_settings(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index b5ee66a099d6..bf764a2603f9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -199,6 +199,27 @@ async def delete_log(self, deleted. Entries received after the delete operation with a timestamp before the operation will be deleted. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + log_name = f"projects/{project}/logs/{log}" + + request = logging_v2.DeleteLogRequest( + log_name=log_name, + ) + + # Make the request + response = client.delete_log(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): The request object. The parameters to DeleteLog. @@ -293,6 +314,29 @@ async def write_log_entries(self, maximum of 1000 different resources (projects, organizations, billing accounts or folders) + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. @@ -458,6 +502,29 @@ async def list_log_entries(self, For ways to export log entries, see `Exporting Logs `__. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + resource_names = f"projects/{project}/logs/{log}" + + request = logging_v2.ListLogEntriesRequest( + resource_names=resource_names, + ) + + # Make the request + page_result = client.list_log_entries(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. @@ -590,6 +657,24 @@ async def list_monitored_resource_descriptors(self, r"""Lists the descriptors for monitored resource types used by Logging. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to @@ -660,6 +745,29 @@ async def list_logs(self, or billing accounts. Only logs that have entries are listed. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + parent = f"projects/{project}/logs/{log}" + + request = logging_v2.ListLogsRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_logs(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. @@ -760,6 +868,34 @@ def tail_log_entries(self, Until the stream is terminated, it will continue reading logs. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + for response in stream: + print(response) + Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): The request object AsyncIterator. The parameters to `TailLogEntries`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 6b5478a60ff2..fc7e8aed39e7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -377,6 +377,28 @@ def delete_log(self, deleted. Entries received after the delete operation with a timestamp before the operation will be deleted. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + log_name = f"projects/{project}/logs/{log}" + + request = logging_v2.DeleteLogRequest( + log_name=log_name, + ) + + # Make the request + response = client.delete_log(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): The request object. The parameters to DeleteLog. @@ -463,6 +485,30 @@ def write_log_entries(self, maximum of 1000 different resources (projects, organizations, billing accounts or folders) + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): The request object. The parameters to WriteLogEntries. @@ -619,6 +665,30 @@ def list_log_entries(self, For ways to export log entries, see `Exporting Logs `__. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + resource_names = f"projects/{project}/logs/{log}" + + request = logging_v2.ListLogEntriesRequest( + resource_names=resource_names, + ) + + # Make the request + page_result = client.list_log_entries(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. @@ -743,6 +813,25 @@ def list_monitored_resource_descriptors(self, r"""Lists the descriptors for monitored resource types used by Logging. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): The request object. The parameters to @@ -806,6 +895,30 @@ def list_logs(self, or billing accounts. Only logs that have entries are listed. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + log = "log_value" + parent = f"projects/{project}/logs/{log}" + + request = logging_v2.ListLogsRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_logs(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. @@ -898,6 +1011,35 @@ def tail_log_entries(self, Until the stream is terminated, it will continue reading logs. + + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value_1', 'resource_names_value_2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + for response in stream: + print(response) + Args: requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): The request object iterator. The parameters to `TailLogEntries`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index b64d742352fe..bd8825626b4e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -196,6 +196,27 @@ async def list_log_metrics(self, ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + parent = f"projects/{project}" + + request = logging_v2.ListLogMetricsRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. @@ -293,6 +314,29 @@ async def get_log_metric(self, ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + + request = logging_v2.GetLogMetricRequest( + metric_name=metric_name, + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. @@ -388,6 +432,34 @@ async def create_log_metric(self, ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + parent = f"projects/{project}/metrics/{metric}" + + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent=parent, + metric=metric, + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. @@ -488,6 +560,34 @@ async def update_log_metric(self, ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name=metric_name, + metric=metric, + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. @@ -594,6 +694,26 @@ async def delete_log_metric(self, ) -> None: r"""Deletes a logs-based metric. + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + + request = logging_v2.DeleteLogMetricRequest( + metric_name=metric_name, + ) + + # Make the request + response = client.delete_log_metric(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index e13a49a69562..c5575d541856 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -374,6 +374,28 @@ def list_log_metrics(self, ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_list_log_metrics(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + parent = f"projects/{project}" + + request = logging_v2.ListLogMetricsRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_log_metrics(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): The request object. The parameters to ListLogMetrics. @@ -463,6 +485,30 @@ def get_log_metric(self, ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_get_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + + request = logging_v2.GetLogMetricRequest( + metric_name=metric_name, + ) + + # Make the request + response = client.get_log_metric(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): The request object. The parameters to GetLogMetric. @@ -550,6 +596,35 @@ def create_log_metric(self, ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_create_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + parent = f"projects/{project}/metrics/{metric}" + + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent=parent, + metric=metric, + ) + + # Make the request + response = client.create_log_metric(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): The request object. The parameters to CreateLogMetric. @@ -650,6 +725,35 @@ def update_log_metric(self, ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_update_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name=metric_name, + metric=metric, + ) + + # Make the request + response = client.update_log_metric(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): The request object. The parameters to UpdateLogMetric. @@ -748,6 +852,27 @@ def delete_log_metric(self, ) -> None: r"""Deletes a logs-based metric. + + .. code-block:: + + from google.cloud import logging_v2 + + def sample_delete_log_metric(): + # Create a client + client = logging_v2.MetricsServiceV2Client() + + # Initialize request argument(s) + project = "my-project-id" + metric = "metric_value" + metric_name = f"projects/{project}/metrics/{metric}" + + request = logging_v2.DeleteLogMetricRequest( + metric_name=metric_name, + ) + + # Make the request + response = client.delete_log_metric(request=request) + Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): The request object. The parameters to DeleteLogMetric. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py index 69dbb78e0be6..3931b1d669e4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py @@ -28,8 +28,6 @@ async def sample_create_bucket(): - """Snippet for create_bucket""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py index 5aab6d38777c..b850efd1fb72 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py @@ -28,8 +28,6 @@ def sample_create_bucket(): - """Snippet for create_bucket""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py index 331d889862b6..432f9d86a8f3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py @@ -28,8 +28,6 @@ async def sample_create_exclusion(): - """Snippet for create_exclusion""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py index 340489a8515e..e17a5e04e730 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py @@ -28,8 +28,6 @@ def sample_create_exclusion(): - """Snippet for create_exclusion""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py index 72862fea5ff8..5d2bfcf7d270 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py @@ -28,8 +28,6 @@ async def sample_create_sink(): - """Snippet for create_sink""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py index 8952205a4d8a..372a22657be4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py @@ -28,8 +28,6 @@ def sample_create_sink(): - """Snippet for create_sink""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py index 555d13d0cb81..4cd7a79399bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py @@ -28,8 +28,6 @@ async def sample_create_view(): - """Snippet for create_view""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py index 518426012d66..562fb087b86f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py @@ -28,8 +28,6 @@ def sample_create_view(): - """Snippet for create_view""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py index 6f4783434a49..0ff377493e6d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py @@ -28,8 +28,6 @@ async def sample_delete_bucket(): - """Snippet for delete_bucket""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py index 05caf82171e7..b3cc0f22fca5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py @@ -28,8 +28,6 @@ def sample_delete_bucket(): - """Snippet for delete_bucket""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py index 9dc81ab344d7..f51384574efd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py @@ -28,8 +28,6 @@ async def sample_delete_exclusion(): - """Snippet for delete_exclusion""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py index 06234bd4c5f3..f4fd093b5a9c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py @@ -28,8 +28,6 @@ def sample_delete_exclusion(): - """Snippet for delete_exclusion""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py index 9c16d136bd4c..35c3144c6c83 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py @@ -28,8 +28,6 @@ async def sample_delete_sink(): - """Snippet for delete_sink""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py index 03967671a256..ec1cf8acd27c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py @@ -28,8 +28,6 @@ def sample_delete_sink(): - """Snippet for delete_sink""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py index da713a45c19f..596d0ab2082f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py @@ -28,8 +28,6 @@ async def sample_delete_view(): - """Snippet for delete_view""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py index 6e228b20c491..f65d86c656a8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py @@ -28,8 +28,6 @@ def sample_delete_view(): - """Snippet for delete_view""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py index 450137ed5267..e716961730e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py @@ -28,8 +28,6 @@ async def sample_get_bucket(): - """Snippet for get_bucket""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py index 2ca4765dc9e8..5a84535f236e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py @@ -28,8 +28,6 @@ def sample_get_bucket(): - """Snippet for get_bucket""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py index 46dbf2c23276..6335aa131fa5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py @@ -28,8 +28,6 @@ async def sample_get_cmek_settings(): - """Snippet for get_cmek_settings""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py index 8aafd34f4995..f7109b8dd0c3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py @@ -28,8 +28,6 @@ def sample_get_cmek_settings(): - """Snippet for get_cmek_settings""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py index d32f26250646..461762b5a14e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py @@ -28,8 +28,6 @@ async def sample_get_exclusion(): - """Snippet for get_exclusion""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py index 4e80012a4a33..9a99f3a88405 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py @@ -28,8 +28,6 @@ def sample_get_exclusion(): - """Snippet for get_exclusion""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py index b92d24eefab7..fac7632db442 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py @@ -28,8 +28,6 @@ async def sample_get_sink(): - """Snippet for get_sink""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py index 008e2455abf1..b18072a4e19f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py @@ -28,8 +28,6 @@ def sample_get_sink(): - """Snippet for get_sink""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py index 8b76d7edbd52..08d07c7536b2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py @@ -28,8 +28,6 @@ async def sample_get_view(): - """Snippet for get_view""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py index 0e21b0169983..775157e1df20 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py @@ -28,8 +28,6 @@ def sample_get_view(): - """Snippet for get_view""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py index a530c83b5c14..d2ed2615b257 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py @@ -28,8 +28,6 @@ async def sample_list_buckets(): - """Snippet for list_buckets""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py index c6d629794716..3824790fe5b5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py @@ -28,8 +28,6 @@ def sample_list_buckets(): - """Snippet for list_buckets""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py index 1a9db6155509..f8efb65633c9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py @@ -28,8 +28,6 @@ async def sample_list_exclusions(): - """Snippet for list_exclusions""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py index 19ccc14f56f1..ea485dd38373 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py @@ -28,8 +28,6 @@ def sample_list_exclusions(): - """Snippet for list_exclusions""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py index 739cb31262d9..87098906f3e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py @@ -28,8 +28,6 @@ async def sample_list_sinks(): - """Snippet for list_sinks""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py index 534f3e9f2741..c4ed69141cab 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py @@ -28,8 +28,6 @@ def sample_list_sinks(): - """Snippet for list_sinks""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py index a4843f371e5a..f53490638e1f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py @@ -28,8 +28,6 @@ async def sample_list_views(): - """Snippet for list_views""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py index d32a0aa11008..4ae6c7d60f40 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py @@ -28,8 +28,6 @@ def sample_list_views(): - """Snippet for list_views""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py index 5fa49940a1a4..8a8161028fb4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py @@ -28,8 +28,6 @@ async def sample_undelete_bucket(): - """Snippet for undelete_bucket""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py index c2804473a5bc..d9a11f45dd94 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py @@ -28,8 +28,6 @@ def sample_undelete_bucket(): - """Snippet for undelete_bucket""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py index bd49a32b7f2e..595c4a8ffa24 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py @@ -28,8 +28,6 @@ async def sample_update_bucket(): - """Snippet for update_bucket""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py index de20ddf1e166..cb294ae3f62a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py @@ -28,8 +28,6 @@ def sample_update_bucket(): - """Snippet for update_bucket""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py index cd01d5b04a81..74e24c67a79e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py @@ -28,8 +28,6 @@ async def sample_update_cmek_settings(): - """Snippet for update_cmek_settings""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py index a679dfa44109..5e19a86ba4fa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py @@ -28,8 +28,6 @@ def sample_update_cmek_settings(): - """Snippet for update_cmek_settings""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py index 60d503ac15a8..b4d79d2d76be 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py @@ -28,8 +28,6 @@ async def sample_update_exclusion(): - """Snippet for update_exclusion""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py index 48726f9222fe..a3772807ba0e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py @@ -28,8 +28,6 @@ def sample_update_exclusion(): - """Snippet for update_exclusion""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py index 096a88890b86..d43e5ac5e437 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py @@ -28,8 +28,6 @@ async def sample_update_sink(): - """Snippet for update_sink""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py index 35977595848b..ca50c1e35337 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py @@ -28,8 +28,6 @@ def sample_update_sink(): - """Snippet for update_sink""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py index 5177171f8fc2..3d4681021c0a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py @@ -28,8 +28,6 @@ async def sample_update_view(): - """Snippet for update_view""" - # Create a client client = logging_v2.ConfigServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py index 164e01f49b30..9f134431d045 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py @@ -28,8 +28,6 @@ def sample_update_view(): - """Snippet for update_view""" - # Create a client client = logging_v2.ConfigServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py index 1c9bc101aaf8..25f13bae6a5a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py @@ -28,8 +28,6 @@ async def sample_delete_log(): - """Snippet for delete_log""" - # Create a client client = logging_v2.LoggingServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py index 0b329d55a622..daa5767c497b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py @@ -28,8 +28,6 @@ def sample_delete_log(): - """Snippet for delete_log""" - # Create a client client = logging_v2.LoggingServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py index ac069139a6c8..fb156a9757ae 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py @@ -28,8 +28,6 @@ async def sample_list_log_entries(): - """Snippet for list_log_entries""" - # Create a client client = logging_v2.LoggingServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py index 227e887f0e28..74968ae551d0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py @@ -28,8 +28,6 @@ def sample_list_log_entries(): - """Snippet for list_log_entries""" - # Create a client client = logging_v2.LoggingServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py index 2fe01050d299..17362e9d1756 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py @@ -28,8 +28,6 @@ async def sample_list_logs(): - """Snippet for list_logs""" - # Create a client client = logging_v2.LoggingServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py index 5c619ad49dea..12491dadf88e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py @@ -28,8 +28,6 @@ def sample_list_logs(): - """Snippet for list_logs""" - # Create a client client = logging_v2.LoggingServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py index 6734d6a5c84a..35fd5d0b001e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -28,8 +28,6 @@ async def sample_list_monitored_resource_descriptors(): - """Snippet for list_monitored_resource_descriptors""" - # Create a client client = logging_v2.LoggingServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py index 89da0c9c6765..05cdb4a5aaad 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -28,8 +28,6 @@ def sample_list_monitored_resource_descriptors(): - """Snippet for list_monitored_resource_descriptors""" - # Create a client client = logging_v2.LoggingServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py index 1c6e1db5bebc..6ddc30b036c5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py @@ -28,8 +28,6 @@ async def sample_tail_log_entries(): - """Snippet for tail_log_entries""" - # Create a client client = logging_v2.LoggingServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py index e9a5dfc2ec0a..c01d944a8f5a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py @@ -28,8 +28,6 @@ def sample_tail_log_entries(): - """Snippet for tail_log_entries""" - # Create a client client = logging_v2.LoggingServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py index 71120d98a2d9..5e019280f3af 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py @@ -28,8 +28,6 @@ async def sample_write_log_entries(): - """Snippet for write_log_entries""" - # Create a client client = logging_v2.LoggingServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py index 7da931be50ab..9e03979e5cf8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py @@ -28,8 +28,6 @@ def sample_write_log_entries(): - """Snippet for write_log_entries""" - # Create a client client = logging_v2.LoggingServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py index de5a085ddaf2..212444858d12 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py @@ -28,8 +28,6 @@ async def sample_create_log_metric(): - """Snippet for create_log_metric""" - # Create a client client = logging_v2.MetricsServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py index e3bd08822c86..e81eb133b417 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py @@ -28,8 +28,6 @@ def sample_create_log_metric(): - """Snippet for create_log_metric""" - # Create a client client = logging_v2.MetricsServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py index 5ed756255167..876f2bbad825 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py @@ -28,8 +28,6 @@ async def sample_delete_log_metric(): - """Snippet for delete_log_metric""" - # Create a client client = logging_v2.MetricsServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py index 012322113c12..fb2eb6f89766 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py @@ -28,8 +28,6 @@ def sample_delete_log_metric(): - """Snippet for delete_log_metric""" - # Create a client client = logging_v2.MetricsServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py index cc0f0c553608..a9b77fe6fb96 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py @@ -28,8 +28,6 @@ async def sample_get_log_metric(): - """Snippet for get_log_metric""" - # Create a client client = logging_v2.MetricsServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py index fb8499456f29..4e18e335b9fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py @@ -28,8 +28,6 @@ def sample_get_log_metric(): - """Snippet for get_log_metric""" - # Create a client client = logging_v2.MetricsServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py index c640c5972412..5742806d0db1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py @@ -28,8 +28,6 @@ async def sample_list_log_metrics(): - """Snippet for list_log_metrics""" - # Create a client client = logging_v2.MetricsServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py index 734d4c869b8d..974135981041 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py @@ -28,8 +28,6 @@ def sample_list_log_metrics(): - """Snippet for list_log_metrics""" - # Create a client client = logging_v2.MetricsServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py index 05fb745bb711..dcb646539374 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py @@ -28,8 +28,6 @@ async def sample_update_log_metric(): - """Snippet for update_log_metric""" - # Create a client client = logging_v2.MetricsServiceV2AsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py index 6e37f22e5979..ad87904dc242 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py @@ -28,8 +28,6 @@ def sample_update_log_metric(): - """Snippet for update_log_metric""" - # Create a client client = logging_v2.MetricsServiceV2Client() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json new file mode 100644 index 000000000000..c5479a62a740 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -0,0 +1,2966 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_create_bucket_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_create_bucket_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateExclusion" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_create_exclusion_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateExclusion" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_create_sink_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateSink_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_create_sink_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_create_view_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateView_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_create_view_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateView_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_delete_bucket_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_async", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_sync", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_delete_sink_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteSink_async", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_delete_sink_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteSink_sync", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_delete_view_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteView_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_delete_view_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteView_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_bucket_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetBucket_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_bucket_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetCmekSettings" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetCmekSettings" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetExclusion" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_exclusion_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetExclusion" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSink" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_sink_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetSink_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetSink" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_sink_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetSink_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetView" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_view_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetView_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "GetView" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_get_view_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetView_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListBuckets" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_list_buckets_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListBuckets" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_list_buckets_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListExclusions" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_list_exclusions_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListExclusions" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListSinks" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_list_sinks_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListSinks_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListSinks" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_list_sinks_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListViews" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_list_views_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListViews_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "ListViews" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_list_views_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListViews_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UndeleteBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UndeleteBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_bucket_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucket" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_bucket_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateCmekSettings" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateCmekSettings" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateExclusion" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_exclusion_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateExclusion" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSink" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_sink_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSink" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_sink_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateView" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_view_async.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateView_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateView" + } + }, + "file": "logging_generated_logging_v2_config_service_v2_update_view_sync.py", + "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateView_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "DeleteLog" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_delete_log_async.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_DeleteLog_async", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "DeleteLog" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_delete_log_sync.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_DeleteLog_sync", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogEntries" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogEntries" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogs" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_list_logs_async.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogs_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogs" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_list_logs_sync.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListMonitoredResourceDescriptors" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "ListMonitoredResourceDescriptors" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "TailLogEntries" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "TailLogEntries" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "WriteLogEntries" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "LoggingServiceV2" + }, + "shortName": "WriteLogEntries" + } + }, + "file": "logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py", + "regionTag": "logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "CreateLogMetric" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "CreateLogMetric" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "DeleteLogMetric" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_async", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "DeleteLogMetric" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_sync", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "GetLogMetric" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "GetLogMetric" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "ListLogMetrics" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "ListLogMetrics" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync", + "segments": [ + { + "end": 46, + "start": 27, + "type": "FULL" + }, + { + "end": 46, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "UpdateLogMetric" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MetricsServiceV2" + }, + "shortName": "UpdateLogMetric" + } + }, + "file": "logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py", + "regionTag": "logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 7ad1b6a79601..2c17838465a0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -228,6 +228,29 @@ async def list_instances(self, regions available to the project are queried, and the results are aggregated. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_list_instances(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + parent = f"projects/{project}/locations/{location}" + + request = redis_v1.ListInstancesRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_instances(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.redis_v1.types.ListInstancesRequest, dict]): The request object. Request for @@ -317,6 +340,30 @@ async def get_instance(self, ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. + .. code-block:: + + from google.cloud import redis_v1 + + def sample_get_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + + request = redis_v1.GetInstanceRequest( + name=name, + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.redis_v1.types.GetInstanceRequest, dict]): The request object. Request for @@ -406,6 +453,39 @@ async def create_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_create_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + parent = f"projects/{project}/locations/{location}" + + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.CreateInstanceRequest( + parent=parent, + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.CreateInstanceRequest, dict]): The request object. Request for @@ -523,6 +603,33 @@ async def update_instance(self, operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_update_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.UpdateInstanceRequest, dict]): The request object. Request for @@ -627,6 +734,34 @@ async def upgrade_instance(self, r"""Upgrades Redis instance to the newer Redis version specified in the request. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_upgrade_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + + request = redis_v1.UpgradeInstanceRequest( + name=name, + redis_version="redis_version_value", + ) + + # Make the request + operation = client.upgrade_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.UpgradeInstanceRequest, dict]): The request object. Request for @@ -732,6 +867,32 @@ async def import_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_import_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + input_config = redis_v1.InputConfig() + input_config.gcs_source.uri = "uri_value" + + request = redis_v1.ImportInstanceRequest( + name="name_value", + input_config=input_config, + ) + + # Make the request + operation = client.import_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.ImportInstanceRequest, dict]): The request object. Request for @@ -833,6 +994,32 @@ async def export_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_export_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + output_config = redis_v1.OutputConfig() + output_config.gcs_destination.uri = "uri_value" + + request = redis_v1.ExportInstanceRequest( + name="name_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.ExportInstanceRequest, dict]): The request object. Request for @@ -932,6 +1119,33 @@ async def failover_instance(self, replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_failover_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + + request = redis_v1.FailoverInstanceRequest( + name=name, + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.FailoverInstanceRequest, dict]): The request object. Request for @@ -1030,6 +1244,33 @@ async def delete_instance(self, r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_delete_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + + request = redis_v1.DeleteInstanceRequest( + name=name, + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.DeleteInstanceRequest, dict]): The request object. Request for diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 067f20f27188..6f2397c49316 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -406,6 +406,30 @@ def list_instances(self, regions available to the project are queried, and the results are aggregated. + + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_list_instances(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + parent = f"projects/{project}/locations/{location}" + + request = redis_v1.ListInstancesRequest( + parent=parent, + ) + + # Make the request + page_result = client.list_instances(request=request) + for response in page_result: + print(response) + Args: request (Union[google.cloud.redis_v1.types.ListInstancesRequest, dict]): The request object. Request for @@ -495,6 +519,31 @@ def get_instance(self, ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_get_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + + request = redis_v1.GetInstanceRequest( + name=name, + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle response + print(response) + Args: request (Union[google.cloud.redis_v1.types.GetInstanceRequest, dict]): The request object. Request for @@ -584,6 +633,40 @@ def create_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. + + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_create_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + parent = f"projects/{project}/locations/{location}" + + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.CreateInstanceRequest( + parent=parent, + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.CreateInstanceRequest, dict]): The request object. Request for @@ -701,6 +784,34 @@ def update_instance(self, operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. + + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_update_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.UpdateInstanceRequest, dict]): The request object. Request for @@ -805,6 +916,35 @@ def upgrade_instance(self, r"""Upgrades Redis instance to the newer Redis version specified in the request. + + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_upgrade_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + + request = redis_v1.UpgradeInstanceRequest( + name=name, + redis_version="redis_version_value", + ) + + # Make the request + operation = client.upgrade_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.UpgradeInstanceRequest, dict]): The request object. Request for @@ -910,6 +1050,33 @@ def import_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. + + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_import_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + input_config = redis_v1.InputConfig() + input_config.gcs_source.uri = "uri_value" + + request = redis_v1.ImportInstanceRequest( + name="name_value", + input_config=input_config, + ) + + # Make the request + operation = client.import_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.ImportInstanceRequest, dict]): The request object. Request for @@ -1011,6 +1178,33 @@ def export_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. + + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_export_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + output_config = redis_v1.OutputConfig() + output_config.gcs_destination.uri = "uri_value" + + request = redis_v1.ExportInstanceRequest( + name="name_value", + output_config=output_config, + ) + + # Make the request + operation = client.export_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.ExportInstanceRequest, dict]): The request object. Request for @@ -1110,6 +1304,34 @@ def failover_instance(self, replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. + + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_failover_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + + request = redis_v1.FailoverInstanceRequest( + name=name, + ) + + # Make the request + operation = client.failover_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.FailoverInstanceRequest, dict]): The request object. Request for @@ -1208,6 +1430,34 @@ def delete_instance(self, r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. + + + .. code-block:: + + from google.cloud import redis_v1 + + def sample_delete_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + project = "my-project-id" + location = "us-central1" + instance = "instance_value" + name = f"projects/{project}/locations/{location}/instances/{instance}" + + request = redis_v1.DeleteInstanceRequest( + name=name, + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + print(response) + Args: request (Union[google.cloud.redis_v1.types.DeleteInstanceRequest, dict]): The request object. Request for diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py index 46b59320bfb5..39fb9f169ce0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py @@ -28,8 +28,6 @@ async def sample_create_instance(): - """Snippet for create_instance""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py index c0b84c7c52c7..fee1cc58b1a1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py @@ -28,8 +28,6 @@ def sample_create_instance(): - """Snippet for create_instance""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py index b63f325b0979..0447c772f5ab 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py @@ -28,8 +28,6 @@ async def sample_delete_instance(): - """Snippet for delete_instance""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py index 5a081bbc7f6c..20970e507f0c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py @@ -28,8 +28,6 @@ def sample_delete_instance(): - """Snippet for delete_instance""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py index 302890bb283c..22bcf3e41363 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py @@ -28,8 +28,6 @@ async def sample_export_instance(): - """Snippet for export_instance""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py index adadccb21450..3524ddc9aa0d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py @@ -28,8 +28,6 @@ def sample_export_instance(): - """Snippet for export_instance""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py index c89d149c4c83..d0ce830de5f8 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py @@ -28,8 +28,6 @@ async def sample_failover_instance(): - """Snippet for failover_instance""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py index 22295a3f6e2c..f322e8f0cf16 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py @@ -28,8 +28,6 @@ def sample_failover_instance(): - """Snippet for failover_instance""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py index 4a024f308ea8..6be04525a95f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py @@ -28,8 +28,6 @@ async def sample_get_instance(): - """Snippet for get_instance""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py index 5ea1e2f7fcf6..f6c3a5e4f5dd 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py @@ -28,8 +28,6 @@ def sample_get_instance(): - """Snippet for get_instance""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py index 739afbf6b01a..f1a65c3e09df 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py @@ -28,8 +28,6 @@ async def sample_import_instance(): - """Snippet for import_instance""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py index 2f03a78c6f0d..e246a349ca11 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py @@ -28,8 +28,6 @@ def sample_import_instance(): - """Snippet for import_instance""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py index 60ae031bf21a..35153ea3ff41 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py @@ -28,8 +28,6 @@ async def sample_list_instances(): - """Snippet for list_instances""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py index f428f6a9e121..6df89ddf35d1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py @@ -28,8 +28,6 @@ def sample_list_instances(): - """Snippet for list_instances""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py index 3ecf9137601a..89af4b83b5ea 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py @@ -28,8 +28,6 @@ async def sample_update_instance(): - """Snippet for update_instance""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py index 16da68c748c0..83a3b7476486 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py @@ -28,8 +28,6 @@ def sample_update_instance(): - """Snippet for update_instance""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py index c00e66a57948..5b23157fd287 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py @@ -28,8 +28,6 @@ async def sample_upgrade_instance(): - """Snippet for upgrade_instance""" - # Create a client client = redis_v1.CloudRedisAsyncClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py index 77052b6d5586..602c18ffdc9e 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py @@ -28,8 +28,6 @@ def sample_upgrade_instance(): - """Snippet for upgrade_instance""" - # Create a client client = redis_v1.CloudRedisClient() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json new file mode 100644 index 000000000000..9237cc8828d5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json @@ -0,0 +1,773 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "CreateInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_create_instance_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_CreateInstance_async", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "CreateInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_create_instance_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_CreateInstance_sync", + "segments": [ + { + "end": 57, + "start": 27, + "type": "FULL" + }, + { + "end": 57, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 58, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "DeleteInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_delete_instance_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_DeleteInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "DeleteInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_delete_instance_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_DeleteInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "ExportInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_export_instance_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_ExportInstance_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "ExportInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_export_instance_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_ExportInstance_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "FailoverInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_failover_instance_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_FailoverInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "FailoverInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_failover_instance_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_FailoverInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "GetInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_get_instance_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_GetInstance_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "GetInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_get_instance_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_GetInstance_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "ImportInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_import_instance_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_ImportInstance_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "ImportInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_import_instance_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_ImportInstance_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "ListInstances" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_list_instances_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_ListInstances_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "ListInstances" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_list_instances_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_ListInstances_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "UpdateInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_update_instance_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_UpdateInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "UpdateInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_update_instance_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_UpdateInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 43, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 44, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "UpgradeInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_UpgradeInstance_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "CloudRedis" + }, + "shortName": "UpgradeInstance" + } + }, + "file": "redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py", + "regionTag": "redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py index 649e56a88ba1..a0be0b9e2b03 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py @@ -28,8 +28,6 @@ async def sample_list_resources(): - """Snippet for list_resources""" - # Create a client client = mollusca_v1.SnippetsAsyncClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py index 8892452b84b5..ddab64b202a4 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py @@ -28,8 +28,6 @@ def sample_list_resources(): - """Snippet for list_resources""" - # Create a client client = mollusca_v1.SnippetsClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py index 7e9605052efd..2e0013366b67 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py @@ -28,8 +28,6 @@ async def sample_method_bidi_streaming(): - """Snippet for method_bidi_streaming""" - # Create a client client = mollusca_v1.SnippetsAsyncClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py index 269fe197326c..50981f235622 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py @@ -28,8 +28,6 @@ def sample_method_bidi_streaming(): - """Snippet for method_bidi_streaming""" - # Create a client client = mollusca_v1.SnippetsClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py index 392241b5e28a..a66fcd660a01 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py @@ -28,8 +28,6 @@ async def sample_method_lro_signatures(): - """Snippet for method_lro_signatures""" - # Create a client client = mollusca_v1.SnippetsAsyncClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py index e0fa332206d0..2a8fd20deb6d 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py @@ -28,8 +28,6 @@ def sample_method_lro_signatures(): - """Snippet for method_lro_signatures""" - # Create a client client = mollusca_v1.SnippetsClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py index 85cf60ef6cbe..36f286da93fd 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py @@ -28,8 +28,6 @@ async def sample_method_one_signature(): - """Snippet for method_one_signature""" - # Create a client client = mollusca_v1.SnippetsAsyncClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py index d09678e58a42..fad4a7d79539 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py @@ -28,8 +28,6 @@ def sample_method_one_signature(): - """Snippet for method_one_signature""" - # Create a client client = mollusca_v1.SnippetsClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py index 5dfc1b09badf..4a397d798500 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py @@ -28,8 +28,6 @@ async def sample_method_server_streaming(): - """Snippet for method_server_streaming""" - # Create a client client = mollusca_v1.SnippetsAsyncClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py index 92782bcad353..1701a3abc2e0 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py @@ -28,8 +28,6 @@ def sample_method_server_streaming(): - """Snippet for method_server_streaming""" - # Create a client client = mollusca_v1.SnippetsClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py index 2bc72ff6d22f..4a6d9f682726 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py @@ -28,8 +28,6 @@ async def sample_one_of_method(): - """Snippet for one_of_method""" - # Create a client client = mollusca_v1.SnippetsAsyncClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py index 4f074994b71b..92a5bef4b412 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py @@ -28,8 +28,6 @@ async def sample_one_of_method_required_field(): - """Snippet for one_of_method_required_field""" - # Create a client client = mollusca_v1.SnippetsAsyncClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py index 6e480d50007f..3a3c4818c36a 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py @@ -28,8 +28,6 @@ def sample_one_of_method_required_field(): - """Snippet for one_of_method_required_field""" - # Create a client client = mollusca_v1.SnippetsClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py index ce2a7288433d..7372dfbd76e2 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py @@ -28,8 +28,6 @@ def sample_one_of_method(): - """Snippet for one_of_method""" - # Create a client client = mollusca_v1.SnippetsClient() diff --git a/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_v1_mollusca_v1.json b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_v1_mollusca_v1.json new file mode 100644 index 000000000000..c5badb1099b7 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_v1_mollusca_v1.json @@ -0,0 +1,611 @@ +{ + "snippets": [ + { + "clientMethod": { + "method": { + "fullName": "MethodServerStreaming", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync", + "segment": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "fullName": "OneOfMethodRequiredField", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync", + "segment": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "fullName": "OneOfMethod", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync", + "segment": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "fullName": "MethodBidiStreaming", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync", + "segment": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "fullName": "ListResources", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_sync", + "segment": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "fullName": "MethodOneSignature", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync", + "segment": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "fullName": "MethodLroSignatures", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync", + "segment": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "fullName": "MethodBidiStreaming", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async", + "segment": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "fullName": "ListResources", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_list_resources_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_async", + "segment": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "fullName": "OneOfMethodRequiredField", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async", + "segment": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "fullName": "MethodServerStreaming", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async", + "segment": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "fullName": "OneOfMethod", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async", + "segment": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "fullName": "MethodOneSignature", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async", + "segment": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "fullName": "MethodLroSignatures", + "service": { + "shortName": "Snippets" + } + } + }, + "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async", + "segment": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/packages/gapic-generator/tests/unit/__init__.py b/packages/gapic-generator/tests/unit/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/gapic-generator/tests/unit/samplegen/common_types.py b/packages/gapic-generator/tests/unit/common_types.py similarity index 100% rename from packages/gapic-generator/tests/unit/samplegen/common_types.py rename to packages/gapic-generator/tests/unit/common_types.py diff --git a/packages/gapic-generator/tests/unit/generator/__init__.py b/packages/gapic-generator/tests/unit/generator/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 62a12df90c7f..ebf0367194ef 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import json from textwrap import dedent from typing import Mapping from unittest import mock @@ -24,13 +25,25 @@ from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse from gapic.generator import generator -from gapic.samplegen_utils import types, yaml +from gapic.samplegen_utils import snippet_metadata_pb2, types, yaml +from ..common_types import (DummyApiSchema, DummyField, DummyIdent, DummyNaming, DummyMessage, DummyMessageTypePB, + DummyService, DummyMethod, message_factory, enum_factory) + from gapic.schema import api from gapic.schema import naming from gapic.schema import wrappers from gapic.utils import Options +def mock_generate_sample(*args, **kwargs): + dummy_snippet_metadata = snippet_metadata_pb2.Snippet() + dummy_snippet_metadata.client_method.method.service.short_name = args[0]["service"].split( + ".")[-1] + dummy_snippet_metadata.client_method.method.short_name = args[0]['rpc'] + + return "", dummy_snippet_metadata + + def test_custom_template_directory(): # Create a generator. opts = Options.build("python-gapic-templates=/templates/") @@ -51,8 +64,8 @@ def test_get_response(): lt.assert_called_once() gt.assert_has_calls( [ - mock.call("foo/bar/baz.py.j2"), mock.call("molluscs/squid/sample.py.j2"), + mock.call("foo/bar/baz.py.j2"), ] ) assert len(cgr.file) == 1 @@ -71,8 +84,8 @@ def test_get_response_ignores_empty_files(): lt.assert_called_once() gt.assert_has_calls( [ - mock.call("foo/bar/baz.py.j2"), mock.call("molluscs/squid/sample.py.j2"), + mock.call("foo/bar/baz.py.j2"), ] ) assert len(cgr.file) == 0 @@ -93,8 +106,8 @@ def test_get_response_ignores_private_files(): lt.assert_called_once() gt.assert_has_calls( [ - mock.call("foo/bar/baz.py.j2"), mock.call("molluscs/squid/sample.py.j2"), + mock.call("foo/bar/baz.py.j2"), ] ) assert len(cgr.file) == 1 @@ -107,7 +120,6 @@ def test_get_response_fails_invalid_file_paths(): with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: lt.return_value = [ "foo/bar/%service/%proto/baz.py.j2", - "molluscs/squid/sample.py.j2", ] with pytest.raises(ValueError) as ex: g.get_response(api_schema=make_api(), @@ -403,13 +415,8 @@ def test_parse_sample_paths(fs): Options.build("samples=sampledir/,") -@mock.patch( - "gapic.samplegen.samplegen.generate_sample", return_value="", -) @mock.patch("time.gmtime",) -def test_samplegen_config_to_output_files( - mock_gmtime, mock_generate_sample, fs, -): +def test_samplegen_config_to_output_files(mock_gmtime, fs): # These time values are nothing special, # they just need to be deterministic. returner = mock.MagicMock() @@ -431,9 +438,11 @@ def test_samplegen_config_to_output_files( samples: - id: squid_sample region_tag: humboldt_tag - rpc: get_squid_streaming + service: Mollusc.v1.Mollusc + rpc: GetSquidStreaming - region_tag: clam_sample - rpc: get_clam + service: Mollusc.v1.Mollusc + rpc: GetClam """ ), ) @@ -442,57 +451,84 @@ def test_samplegen_config_to_output_files( # Need to have the sample template visible to the generator. g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) - api_schema = make_api(naming=naming.NewNaming( - name="Mollusc", version="v6")) - actual_response = g.get_response( - api_schema, opts=Options.build("")) - expected_response = CodeGeneratorResponse( - file=[ - CodeGeneratorResponse.File( - name="samples/generated_samples/squid_sample.py", content="\n",), - CodeGeneratorResponse.File( - name="samples/generated_samples/clam_sample.py", content="\n",), - # TODO(busunkim): Re-enable manifest generation once metadata - # format has been formalized. - # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue - # CodeGeneratorResponse.File( - # name="samples/generated_samples/mollusc.v6.python.21120601.131313.manifest.yaml", - # content=dedent( - # """\ - # --- - # type: manifest/samples - # schema_version: 3 - # python: &python - # environment: python - # bin: python3 - # base_path: samples - # invocation: '{bin} {path} @args' - # samples: - # - <<: *python - # sample: squid_sample - # path: '{base_path}/squid_sample.py' - # region_tag: humboldt_tag - # - <<: *python - # sample: clam_sample - # path: '{base_path}/clam_sample.py' - # region_tag: clam_sample - # """ - # ), - # ), - ] - ) - expected_response.supported_features |= ( - CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + api_schema = DummyApiSchema( + services={"Mollusc": DummyService( + name="Mollusc", + methods={ + # For this test the generator only cares about the dictionary keys + "GetSquidStreaming": DummyMethod(), + "GetClam": DummyMethod(), + }, + )}, + naming=DummyNaming(name="mollusc", version="v1", warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), ) - assert actual_response == expected_response + with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): + actual_response = g.get_response( + api_schema, opts=Options.build("")) + + expected_snippet_index_json = { + "snippets": [ + { + "clientMethod": { + "method": { + "shortName": "GetSquidStreaming", + "service": { + "shortName": "Mollusc" + } + } + }, + "file": "squid_sample.py", + "segments": [ + {"type": "FULL"}, + {"type": "SHORT"}, + {"type": "CLIENT_INITIALIZATION"}, + {"type": "REQUEST_INITIALIZATION"}, + {"type": "REQUEST_EXECUTION"}, + {"type": "RESPONSE_HANDLING"} + ] + }, + { + "clientMethod": { + "method": { + "shortName": "GetClam", + "service": { + "shortName": "Mollusc" + } + } + }, + "file": "clam_sample.py", + "segments": [ + {"type": "FULL"}, + {"type": "SHORT"}, + {"type": "CLIENT_INITIALIZATION"}, + {"type": "REQUEST_INITIALIZATION"}, + {"type": "REQUEST_EXECUTION"}, + {"type": "RESPONSE_HANDLING"} + ] + } + ] + } + + assert actual_response.supported_features == CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + + assert len(actual_response.file) == 3 + assert actual_response.file[0] == CodeGeneratorResponse.File( + name="samples/generated_samples/squid_sample.py", content="\n",) + assert actual_response.file[1] == CodeGeneratorResponse.File( + name="samples/generated_samples/clam_sample.py", content="\n",) + + assert actual_response.file[2].name == "samples/generated_samples/snippet_metadata_mollusc_v1.json" + assert json.loads( + actual_response.file[2].content) == expected_snippet_index_json @mock.patch( "gapic.samplegen.samplegen.generate_sample_specs", return_value=[] ) @mock.patch( - "gapic.samplegen.samplegen.generate_sample", return_value="", + "gapic.samplegen.samplegen.generate_sample", return_value=("", snippet_metadata_pb2.Snippet()), ) def test_generate_autogen_samples(mock_generate_sample, mock_generate_specs): opts = Options.build("autogen-snippets") @@ -513,11 +549,8 @@ def test_generate_autogen_samples(mock_generate_sample, mock_generate_specs): ) -@mock.patch( - "gapic.samplegen.samplegen.generate_sample", return_value="", -) @mock.patch("time.gmtime",) -def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): +def test_samplegen_id_disambiguation(mock_gmtime, fs): # These time values are nothing special, # they just need to be deterministic. returner = mock.MagicMock() @@ -543,12 +576,15 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): samples: - id: squid_sample region_tag: humboldt_tag - rpc: get_squid_streaming + rpc: GetSquidStreaming + service: Mollusc.v1.Mollusc # Note that this region tag collides with the id of the previous sample. - region_tag: squid_sample - rpc: get_squid_streaming + rpc: GetSquidStreaming + service: Mollusc.v1.Mollusc # No id or region tag. - - rpc: get_squid_streaming + - rpc: GetSquidStreaming + service: Mollusc.v1.Mollusc """ ), ) @@ -556,57 +592,98 @@ def test_samplegen_id_disambiguation(mock_gmtime, mock_generate_sample, fs): # Need to have the sample template visible to the generator. g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) - api_schema = make_api(naming=naming.NewNaming( - name="Mollusc", version="v6")) - actual_response = g.get_response(api_schema, + api_schema = DummyApiSchema( + services={"Mollusc": DummyService( + name="Mollusc", + methods={ + # The generator only cares about the dictionary keys + "GetSquidStreaming": DummyMethod(), + "GetClam": DummyMethod(), + }, + )}, + naming=DummyNaming(name="mollusc", version="v1", warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + ) + with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): + actual_response = g.get_response(api_schema, opts=Options.build("")) - expected_response = CodeGeneratorResponse( - file=[ - CodeGeneratorResponse.File( - name="samples/generated_samples/squid_sample_91a465c6.py", content="\n", - ), - CodeGeneratorResponse.File( - name="samples/generated_samples/squid_sample_55051b38.py", content="\n", - ), - CodeGeneratorResponse.File(name="samples/generated_samples/157884ee.py", - content="\n",), - # TODO(busunkim): Re-enable manifest generation once metadata - # format has been formalized. - # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue - # CodeGeneratorResponse.File( - # name="samples/generated_samples/mollusc.v6.python.21120601.131313.manifest.yaml", - # content=dedent( - # """\ - # --- - # type: manifest/samples - # schema_version: 3 - # python: &python - # environment: python - # bin: python3 - # base_path: samples - # invocation: '{bin} {path} @args' - # samples: - # - <<: *python - # sample: squid_sample_91a465c6 - # path: '{base_path}/squid_sample_91a465c6.py' - # region_tag: humboldt_tag - # - <<: *python - # sample: squid_sample_55051b38 - # path: '{base_path}/squid_sample_55051b38.py' - # region_tag: squid_sample - # - <<: *python - # sample: 157884ee - # path: '{base_path}/157884ee.py' - # """ - # ), - # ), - ] + + expected_snippet_metadata_json = { + "snippets": [ + { + "clientMethod": { + "method": { + "shortName": "GetSquidStreaming", + "service": { + "shortName": "Mollusc" + } + } + }, + "file": "squid_sample_1cfd0b3d.py", + "segments": [ + {"type": "FULL"}, + {"type": "SHORT"}, + {"type": "CLIENT_INITIALIZATION"}, + {"type": "REQUEST_INITIALIZATION"}, + {"type": "REQUEST_EXECUTION"}, + {"type": "RESPONSE_HANDLING"} + ] + }, + { + "clientMethod": { + "method": { + "shortName": "GetSquidStreaming", + "service": { + "shortName": "Mollusc" + } + } + }, + "file": "squid_sample_cf4d4fa4.py", + "segments": [ + {"type": "FULL"}, + {"type": "SHORT"}, + {"type": "CLIENT_INITIALIZATION"}, + {"type": "REQUEST_INITIALIZATION"}, + {"type": "REQUEST_EXECUTION"}, + {"type": "RESPONSE_HANDLING"} + ] + }, + { + "clientMethod": { + "method": { + "shortName": "GetSquidStreaming", + "service": { + "shortName": "Mollusc" + } + } + }, + "file": "7384949e.py", + "segments": [ + {"type": "FULL"}, + {"type": "SHORT"}, + {"type": "CLIENT_INITIALIZATION"}, + {"type": "REQUEST_INITIALIZATION"}, + {"type": "REQUEST_EXECUTION"}, + {"type": "RESPONSE_HANDLING"} + ] + } + ] + } + + assert actual_response.supported_features == CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + assert len(actual_response.file) == 4 + assert actual_response.file[0] == CodeGeneratorResponse.File( + name="samples/generated_samples/squid_sample_1cfd0b3d.py", content="\n", ) - expected_response.supported_features |= ( - CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + assert actual_response.file[1] == CodeGeneratorResponse.File( + name="samples/generated_samples/squid_sample_cf4d4fa4.py", content="\n", ) - - assert actual_response == expected_response + assert actual_response.file[2] == CodeGeneratorResponse.File( + name="samples/generated_samples/7384949e.py", content="\n", + ) + assert actual_response.file[3].name == "samples/generated_samples/snippet_metadata_mollusc_v1.json" + assert json.loads( + actual_response.file[3].content) == expected_snippet_metadata_json def test_generator_duplicate_samples(fs): @@ -639,116 +716,6 @@ def test_generator_duplicate_samples(fs): opts=Options.build("")) -@mock.patch("gapic.samplegen.samplegen.generate_sample", return_value="") -@mock.patch("time.gmtime",) -def test_dont_generate_in_code_samples(mock_gmtime, mock_generate_sample, fs): - # These time values are nothing special, - # they just need to be deterministic. - returner = mock.MagicMock() - returner.tm_year = 2112 - returner.tm_mon = 6 - returner.tm_mday = 1 - returner.tm_hour = 13 - returner.tm_min = 13 - returner.tm_sec = 13 - mock_gmtime.return_value = returner - - config_fpath = "samples.yaml" - fs.create_file( - config_fpath, - contents=dedent( - """ - type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto - schema_version: 1.2.0 - samples: - - id: squid_sample - rpc: IdentifyMollusc - service: Mollusc.v1.Mollusc - sample_type: - - standalone - - incode/SQUID - - id: clam_sample - rpc: IdentifyMollusc - service: Mollusc.v1.Mollusc - sample_type: - - incode/CLAM - - id: whelk_sample - rpc: IdentifyMollusc - service: Mollusc.v1.Mollusc - sample_type: - - standalone - - id: octopus_sample - rpc: IdentifyMollusc - service: Mollusc.v1.Mollusc - """ - ), - ) - - generator = make_generator(f"samples={config_fpath}") - generator._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) - api_schema = make_api( - make_proto( - descriptor_pb2.FileDescriptorProto( - name="mollusc.proto", - package="Mollusc.v1", - service=[descriptor_pb2.ServiceDescriptorProto( - name="Mollusc")], - ), - ), - naming=naming.NewNaming(name="Mollusc", version="v6"), - ) - - # Note that we do NOT expect a clam sample. - # There are four tests going on: - # 1) Just an explicit standalone sample type. - # 2) Multiple sample types, one of which is standalone. - # 3) Explicit sample types but NO standalone sample type. - # 4) Implicit standalone sample type. - expected = CodeGeneratorResponse( - file=[ - CodeGeneratorResponse.File( - name="samples/generated_samples/squid_sample.py", content="\n",), - CodeGeneratorResponse.File( - name="samples/generated_samples/whelk_sample.py", content="\n",), - CodeGeneratorResponse.File( - name="samples/generated_samples/octopus_sample.py", content="\n",), - # TODO(busunkim): Re-enable manifest generation once metadata - # format has been formalized. - # https://docs.google.com/document/d/1ghBam8vMj3xdoe4xfXhzVcOAIwrkbTpkMLgKc9RPD9k/edit#heading=h.sakzausv6hue - # CodeGeneratorResponse.File( - # name="samples/generated_samples/mollusc.v6.python.21120601.131313.manifest.yaml", - # content=dedent( - # """ --- - # type: manifest/samples - # schema_version: 3 - # python: &python - # environment: python - # bin: python3 - # base_path: samples - # invocation: \'{bin} {path} @args\' - # samples: - # - <<: *python - # sample: squid_sample - # path: \'{base_path}/squid_sample.py\' - # - <<: *python - # sample: whelk_sample - # path: \'{base_path}/whelk_sample.py\' - # - <<: *python - # sample: octopus_sample - # path: \'{base_path}/octopus_sample.py\' - # """ - # ), - # ), - ] - ) - expected.supported_features |= CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL - - actual = generator.get_response( - api_schema=api_schema, opts=Options.build("") - ) - assert actual == expected - - def make_generator(opts_str: str = "") -> generator.Generator: return generator.Generator(Options.build(opts_str)) diff --git a/packages/gapic-generator/tests/unit/samplegen/__init__.py b/packages/gapic-generator/tests/unit/samplegen/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index c6c44239c888..76b0b58bff74 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -28,8 +28,6 @@ def sample_classify(video, location): - """Determine the full taxonomy of input mollusc""" - # Create a client client = molluscclient.MolluscServiceClient() diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index 8ad414a78f53..71a6d3853cad 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -28,8 +28,6 @@ async def sample_classify(video, location): - """Determine the full taxonomy of input mollusc""" - # Create a client client = molluscclient.MolluscServiceAsyncClient() diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index c6c44239c888..76b0b58bff74 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -28,8 +28,6 @@ def sample_classify(video, location): - """Determine the full taxonomy of input mollusc""" - # Create a client client = molluscclient.MolluscServiceClient() diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py index a6e7d48b6e98..0b617f40ed95 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -28,8 +28,6 @@ def sample_classify(video, location): - """Determine the full taxonomy of input mollusc""" - # Create a client client = molluscclient.MolluscServiceClient() diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index aefde2f1dc3e..b1b439549e6b 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -17,13 +17,16 @@ import pytest from pathlib import Path +from google.protobuf import json_format + import gapic.utils as utils from gapic.samplegen import samplegen from gapic.samplegen_utils import (types, utils as gapic_utils) +from gapic.samplegen_utils import snippet_metadata_pb2 from gapic.schema import (naming, wrappers) -from common_types import (DummyField, DummyMessage, +from ..common_types import (DummyField, DummyMessage, DummyMessageTypePB, DummyMethod, DummyService, DummyIdent, DummyApiSchema, DummyNaming, enum_factory, message_factory) @@ -105,6 +108,7 @@ def test_generate_sample_basic(): ) sample = {"service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", "rpc": "Classify", "id": "mollusc_classify_sync", "description": "Determine the full taxonomy of input mollusc", @@ -119,13 +123,21 @@ def test_generate_sample_basic(): ], "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} - sample_str = samplegen.generate_sample( + sample_str, metadata = samplegen.generate_sample( sample, schema, env.get_template('examples/sample.py.j2') ) assert sample_str == golden_snippet("sample_basic.py") + assert json_format.MessageToDict(metadata) == { + 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', + 'clientMethod': + {'method': { + 'shortName': 'Classify', + 'service': {'shortName': 'Mollusc'} + }} + } def test_generate_sample_basic_async(): @@ -180,6 +192,7 @@ def test_generate_sample_basic_async(): ) sample = {"service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_async", "rpc": "Classify", "transport": "grpc-async", "id": "mollusc_classify_sync", @@ -195,13 +208,23 @@ def test_generate_sample_basic_async(): ], "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} - sample_str = samplegen.generate_sample( + sample_str, metadata = samplegen.generate_sample( sample, schema, env.get_template('examples/sample.py.j2') ) assert sample_str == golden_snippet("sample_basic_async.py") + assert json_format.MessageToDict(metadata) == { + 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_async', + 'clientMethod': + { + 'async': True, + 'method': { + 'shortName': 'Classify', + 'service': {'shortName': 'Mollusc'} + }} + } def test_generate_sample_basic_unflattenable(): @@ -251,6 +274,7 @@ def test_generate_sample_basic_unflattenable(): ) sample = {"service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", "rpc": "Classify", "id": "mollusc_classify_sync", "description": "Determine the full taxonomy of input mollusc", @@ -265,13 +289,22 @@ def test_generate_sample_basic_unflattenable(): ], "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} - sample_str = samplegen.generate_sample( + sample_str, metadata = samplegen.generate_sample( sample, schema, env.get_template('examples/sample.py.j2') ) assert sample_str == golden_snippet("sample_basic_unflattenable.py") + assert json_format.MessageToDict(metadata) == { + 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', + 'clientMethod': + { + 'method': { + 'shortName': 'Classify', + 'service': {'shortName': 'Mollusc'} + }} + } def test_generate_sample_void_method(): @@ -320,6 +353,7 @@ def test_generate_sample_void_method(): ) sample = {"service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", "rpc": "Classify", "id": "mollusc_classify_sync", "description": "Determine the full taxonomy of input mollusc", @@ -333,13 +367,22 @@ def test_generate_sample_void_method(): "input_parameter": "location"} ]} - sample_str = samplegen.generate_sample( + sample_str, metadata = samplegen.generate_sample( sample, schema, env.get_template('examples/sample.py.j2') ) assert sample_str == golden_snippet("sample_basic_void_method.py") + assert json_format.MessageToDict(metadata) == { + 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', + 'clientMethod': + { + 'method': { + 'shortName': 'Classify', + 'service': {'shortName': 'Mollusc'} + }} + } def test_generate_sample_service_not_found(): diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index d1eb23e78706..5fe7217a30bb 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -19,7 +19,7 @@ import gapic.samplegen_utils.yaml as gapic_yaml import gapic.samplegen_utils.types as types import gapic.samplegen.manifest as manifest -from common_types import DummyApiSchema, DummyNaming +from ..common_types import DummyApiSchema, DummyNaming def test_generate_manifest(): diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index c42420be594c..00608a2ae771 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -29,7 +29,7 @@ import gapic.schema.wrappers as wrappers from gapic.utils import Options -from common_types import (DummyApiSchema, DummyField, DummyIdent, DummyNaming, DummyMessage, DummyMessageTypePB, +from ..common_types import (DummyApiSchema, DummyField, DummyIdent, DummyNaming, DummyMessage, DummyMessageTypePB, DummyService, DummyMethod, message_factory, enum_factory) from gapic.samplegen_utils import utils @@ -2087,7 +2087,6 @@ def test_generate_sample_spec_basic(): assert len(specs) == 2 assert specs[0] == { - "sample_type": "standalone", "rpc": "Ramshorn", "transport": "grpc", "service": "animalia.mollusca.v1.Squid", @@ -2096,7 +2095,6 @@ def test_generate_sample_spec_basic(): } assert specs[1] == { - "sample_type": "standalone", "rpc": "Ramshorn", "transport": "grpc-async", "service": "animalia.mollusca.v1.Squid", diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index 09f782a099f1..8f9db942369f 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -19,7 +19,7 @@ from gapic.samplegen_utils import snippet_metadata_pb2 from gapic.samplegen_utils import snippet_index, types -from common_types import DummyApiSchema, DummyService, DummyMethod +from ..common_types import DummyApiSchema, DummyService, DummyMethod @pytest.fixture @@ -82,7 +82,6 @@ def test_snippet_init(sample_str): # and # [END ...] lines expected_full_snipppet = """from molluscs.v1 import molluscclient - def sample_classify(video, location): # Create a client client = molluscclient.MolluscServiceClient() @@ -130,7 +129,7 @@ def test_add_snippet_no_matching_rpc(sample_str): snippet_metadata = snippet_metadata_pb2.Snippet( ) snippet_metadata.client_method.method.service.short_name = "Squid" - snippet_metadata.client_method.full_name = "classify" + snippet_metadata.client_method.short_name = "classify" snippet = snippet_index.Snippet(sample_str, snippet_metadata) # No 'classify' method in 'Squid' service @@ -166,7 +165,7 @@ def test_get_snippet_no_matching_rpc(): def test_add_and_get_snippet_sync(sample_str): snippet_metadata = snippet_metadata_pb2.Snippet() snippet_metadata.client_method.method.service.short_name = "Squid" - snippet_metadata.client_method.method.full_name = "classify" + snippet_metadata.client_method.method.short_name = "classify" snippet = snippet_index.Snippet(sample_str, snippet_metadata) index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( @@ -182,7 +181,7 @@ def test_add_and_get_snippet_sync(sample_str): def test_add_and_get_snippet_async(sample_str): snippet_metadata = snippet_metadata_pb2.Snippet() snippet_metadata.client_method.method.service.short_name = "Squid" - snippet_metadata.client_method.method.full_name = "classify" + snippet_metadata.client_method.method.short_name = "classify" setattr(snippet_metadata.client_method, "async", True) snippet = snippet_index.Snippet(sample_str, snippet_metadata) @@ -199,7 +198,7 @@ def test_add_and_get_snippet_async(sample_str): def test_get_metadata_json(sample_str): snippet_metadata = snippet_metadata_pb2.Snippet() snippet_metadata.client_method.method.service.short_name = "Squid" - snippet_metadata.client_method.method.full_name = "classify" + snippet_metadata.client_method.method.short_name = "classify" snippet = snippet_index.Snippet(sample_str, snippet_metadata) index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( @@ -210,7 +209,7 @@ def test_get_metadata_json(sample_str): index.add_snippet(snippet) assert json.loads(index.get_metadata_json()) == { - 'snippets': [{'clientMethod': {'method': {'fullName': 'classify', + 'snippets': [{'clientMethod': {'method': {'shortName': 'classify', 'service': {'shortName': 'Squid'}}}, 'segments': [{'end': 28, 'start': 2, 'type': 'FULL'}, {'end': 28, 'start': 2, 'type': 'SHORT'}, diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 42b8bb2aad69..15b59e5369d0 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -21,7 +21,7 @@ from gapic.samplegen_utils.types import CallingForm from textwrap import dedent -import common_types +from .. import common_types def check_template(template_fragment, expected_output, **kwargs): From 500abceb77eb0a044ba8a124043748146e109595 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 19 Jan 2022 09:43:19 -0800 Subject: [PATCH 0697/1339] chore(master): release 0.39.1 (#1133) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 203 ++++++++++++++++++++++++++ 1 file changed, 203 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 2d95536ac079..8d0ea31d5790 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,209 @@ # Changelog +### [0.39.1](https://github.com/googleapis/gapic-generator-python/compare/v0.59.1...v0.39.1) (2022-01-18) + + +### Features + +* add 'from_service_account_info' factory to clients ([#706](https://github.com/googleapis/gapic-generator-python/issues/706)) ([94d5f0c](https://github.com/googleapis/gapic-generator-python/commit/94d5f0c11b8041cbae8e4a89bb504d6c6e200a95)), closes [#705](https://github.com/googleapis/gapic-generator-python/issues/705) +* add `credentials_file` and `scopes` via `client_options` ([#461](https://github.com/googleapis/gapic-generator-python/issues/461)) ([b5e1b1e](https://github.com/googleapis/gapic-generator-python/commit/b5e1b1e8991159dc176da889e9bdf12e3eebdb1e)) +* Add `x-goog-api-client` header to rest clients ([#888](https://github.com/googleapis/gapic-generator-python/issues/888)) ([2d1d3ae](https://github.com/googleapis/gapic-generator-python/commit/2d1d3ae135a75bbfff13df7703de5d0dad44695c)) +* add async samples ([#861](https://github.com/googleapis/gapic-generator-python/issues/861)) ([e385ffd](https://github.com/googleapis/gapic-generator-python/commit/e385ffd7f012c6a38c9fcd7c5f36ce090311032b)) +* add autogenerated snippets ([#845](https://github.com/googleapis/gapic-generator-python/issues/845)) ([abdf5ec](https://github.com/googleapis/gapic-generator-python/commit/abdf5ec00261e5500dbdd190c23b0b2b05836799)) +* add bazel support for gapic metadata ([#811](https://github.com/googleapis/gapic-generator-python/issues/811)) ([7ced24a](https://github.com/googleapis/gapic-generator-python/commit/7ced24a0b20cb6505587b946c03b1b038eef4b4a)) +* add flag for gapic metadata ([#795](https://github.com/googleapis/gapic-generator-python/issues/795)) ([9cd7664](https://github.com/googleapis/gapic-generator-python/commit/9cd7664141835edcd8970629d9cf3abe4b7fd7c4)) +* add fragment tests ([#1056](https://github.com/googleapis/gapic-generator-python/issues/1056)) ([9d9b33d](https://github.com/googleapis/gapic-generator-python/commit/9d9b33dadf587a6d0b09031edeea597d6d2eae62)) +* add gapic metadata file ([#781](https://github.com/googleapis/gapic-generator-python/issues/781)) ([5dd8fcc](https://github.com/googleapis/gapic-generator-python/commit/5dd8fccf6b4da57edef0347beb07102634daa992)) +* add GOOGLE_API_USE_MTLS support ([#420](https://github.com/googleapis/gapic-generator-python/issues/420)) ([41fa725](https://github.com/googleapis/gapic-generator-python/commit/41fa725a304792b3470d2be079e59f577387e5bb)) +* add iam methods to templates ([#545](https://github.com/googleapis/gapic-generator-python/issues/545)) ([3f42c3c](https://github.com/googleapis/gapic-generator-python/commit/3f42c3cf8aae432a9bda0953fbabd7f0c8d774de)) +* add mtls feature to rest transport ([#731](https://github.com/googleapis/gapic-generator-python/issues/731)) ([524dbab](https://github.com/googleapis/gapic-generator-python/commit/524dbab16d248198ca10a08ecede4600fd36cefc)) +* add mTLS to ads template ([#384](https://github.com/googleapis/gapic-generator-python/issues/384)) ([1e76096](https://github.com/googleapis/gapic-generator-python/commit/1e760960df4e80e4f8d50291b872341e80eaa7f1)) +* add proper handling of query/path/body parameters for rest transport ([#702](https://github.com/googleapis/gapic-generator-python/issues/702)) ([6b2de5d](https://github.com/googleapis/gapic-generator-python/commit/6b2de5dd9fbf15e6b0a42b428b01eb03f1a3820a)) +* add rest transport generation for clients with optional transport flag ([#688](https://github.com/googleapis/gapic-generator-python/issues/688)) ([af59c2c](https://github.com/googleapis/gapic-generator-python/commit/af59c2c3c3d6b7e1f626c3fbc2c03f99ca31b4a4)) +* add retry config passed to bazel rule ([#526](https://github.com/googleapis/gapic-generator-python/issues/526)) ([9e96151](https://github.com/googleapis/gapic-generator-python/commit/9e96151d702786912fcf033f7535efad8ae754ee)) +* add snippet index ([#1121](https://github.com/googleapis/gapic-generator-python/issues/1121)) ([55d2bc6](https://github.com/googleapis/gapic-generator-python/commit/55d2bc6580e5db0f837de1b245533a8f1f2e9beb)) +* add support for common resource paths ([#622](https://github.com/googleapis/gapic-generator-python/issues/622)) ([15a7fde](https://github.com/googleapis/gapic-generator-python/commit/15a7fdeb966cb64a742b6305d2c71dd3d485d0f9)) +* add support for context manager in client ([#987](https://github.com/googleapis/gapic-generator-python/issues/987)) ([4edabcf](https://github.com/googleapis/gapic-generator-python/commit/4edabcf6791cfb0874a951b695b39672036760d4)) +* add support for long-running operations with rest transport. ([#1094](https://github.com/googleapis/gapic-generator-python/issues/1094)) ([e89fd23](https://github.com/googleapis/gapic-generator-python/commit/e89fd23609625c5aa49acd6c6ee67f87fce324fd)) +* allow user-provided client info ([#573](https://github.com/googleapis/gapic-generator-python/issues/573)) ([b2e5274](https://github.com/googleapis/gapic-generator-python/commit/b2e52746c7ce4b983482fb776224b30767978c79)) +* allow warehouse name to be customized ([#717](https://github.com/googleapis/gapic-generator-python/issues/717)) ([7c185e8](https://github.com/googleapis/gapic-generator-python/commit/7c185e87cb4252b1f99ed121515814595f9492c4)), closes [#605](https://github.com/googleapis/gapic-generator-python/issues/605) +* bypass request copying in method calls ([#557](https://github.com/googleapis/gapic-generator-python/issues/557)) ([3a23143](https://github.com/googleapis/gapic-generator-python/commit/3a2314318de229a3353c984a8cb2766ae95cc968)) +* **dev:** Add Git pre-commit hooks [gapic-generator-python] ([#908](https://github.com/googleapis/gapic-generator-python/issues/908)) ([298db39](https://github.com/googleapis/gapic-generator-python/commit/298db39064e29de764537f25dc38f9e5ac301390)) +* enable self signed jwt for grpc ([#920](https://github.com/googleapis/gapic-generator-python/issues/920)) ([da119c7](https://github.com/googleapis/gapic-generator-python/commit/da119c72c82d04e168c4b41e5bf910a0c1609ce3)) +* enable self signed jwt for http ([#1000](https://github.com/googleapis/gapic-generator-python/issues/1000)) ([5f87973](https://github.com/googleapis/gapic-generator-python/commit/5f8797396a2477b772b7bfb827499db32e28710e)) +* file_level and indirectly used resources generate helper methods ([#642](https://github.com/googleapis/gapic-generator-python/issues/642)) ([42e224c](https://github.com/googleapis/gapic-generator-python/commit/42e224cb100f6e2aa9370bc6a5179d62979b5c4d)), closes [#637](https://github.com/googleapis/gapic-generator-python/issues/637) +* forward compatible diregapic LRO support ([#1085](https://github.com/googleapis/gapic-generator-python/issues/1085)) ([aa7f4d5](https://github.com/googleapis/gapic-generator-python/commit/aa7f4d568f7f43738ab3489fc84ce6bc5d6bda18)) +* generate code snippets by default ([#1044](https://github.com/googleapis/gapic-generator-python/issues/1044)) ([e46f443](https://github.com/googleapis/gapic-generator-python/commit/e46f443dbeffe16b63f97668801b06189769e972)) +* generate snippet metadata ([#1129](https://github.com/googleapis/gapic-generator-python/issues/1129)) ([9e46031](https://github.com/googleapis/gapic-generator-python/commit/9e46031d01edc3a461140fe3b29d8d400f5ddf86)) +* implement grpc transcode for rest transport and complete generated tests ([#999](https://github.com/googleapis/gapic-generator-python/issues/999)) ([ccdd17d](https://github.com/googleapis/gapic-generator-python/commit/ccdd17d6133274a34dd727fab0576e6c63238833)) +* implement grpc transcode for rest transport and complete generated tests. ([ccdd17d](https://github.com/googleapis/gapic-generator-python/commit/ccdd17d6133274a34dd727fab0576e6c63238833)) +* Initial draft of GAPIC Bazel Extensions gapic-generator-python ([#342](https://github.com/googleapis/gapic-generator-python/issues/342)) ([cc7ab0b](https://github.com/googleapis/gapic-generator-python/commit/cc7ab0ba59634771daf1d4034f3f0b25872da9df)) +* Make GAPIC Bazel rules production ready ([#402](https://github.com/googleapis/gapic-generator-python/issues/402)) ([d18ed41](https://github.com/googleapis/gapic-generator-python/commit/d18ed416240a064fffac0fd7915b61f6415fe140)) +* port REST transport to Ads templates ([#1003](https://github.com/googleapis/gapic-generator-python/issues/1003)) ([d2ab9da](https://github.com/googleapis/gapic-generator-python/commit/d2ab9da2d0d9353f7289be1b1b1b7492d6d98066)) +* precache wrapped rpcs ([#553](https://github.com/googleapis/gapic-generator-python/issues/553)) ([2f2fb5d](https://github.com/googleapis/gapic-generator-python/commit/2f2fb5d3d9472a79c80be6d052129d07d2bbb835)) +* provide AsyncIO support for generated code ([#365](https://github.com/googleapis/gapic-generator-python/issues/365)) ([305ed34](https://github.com/googleapis/gapic-generator-python/commit/305ed34cfc1607c990f2f88b27f53358da25c366)) +* Raise GoogleAPICallError on REST response errors ([#891](https://github.com/googleapis/gapic-generator-python/issues/891)) ([edb8c63](https://github.com/googleapis/gapic-generator-python/commit/edb8c63e8a331f5e08ea19202d8de42de7051299)) +* **snippetgen:** generate mock input for required fields ([#941](https://github.com/googleapis/gapic-generator-python/issues/941)) ([b2149da](https://github.com/googleapis/gapic-generator-python/commit/b2149da5e6873e1f71871bfecd899bb9aa0b6439)) +* **snippetgen:** turn resource path strings into f-strings ([#1012](https://github.com/googleapis/gapic-generator-python/issues/1012)) ([a110e1d](https://github.com/googleapis/gapic-generator-python/commit/a110e1d8387ea37b85ab0621bacd30da175fe85b)) +* Support alternative http bindings in the gapic schema. ([#993](https://github.com/googleapis/gapic-generator-python/issues/993)) ([041a726](https://github.com/googleapis/gapic-generator-python/commit/041a726b818cd67812d689c23757f31ec9964d66)) +* Support field presence for query parameters in REST clients ([#866](https://github.com/googleapis/gapic-generator-python/issues/866)) ([5339db1](https://github.com/googleapis/gapic-generator-python/commit/5339db1308326d91a05a34d38e31cf91b79a9225)) +* support for proto3 optional fields ([#519](https://github.com/googleapis/gapic-generator-python/issues/519)) ([1aa729c](https://github.com/googleapis/gapic-generator-python/commit/1aa729cc8d2f7f0de25c8348fdbf9d6dd96f5847)) +* support mtls env variables ([#589](https://github.com/googleapis/gapic-generator-python/issues/589)) ([b19026d](https://github.com/googleapis/gapic-generator-python/commit/b19026d9cca26ebd1cd0c3e73f738c4d1870d987)) +* support protobuf method deprecation option [gapic-generator-python] ([#875](https://github.com/googleapis/gapic-generator-python/issues/875)) ([5a5a839](https://github.com/googleapis/gapic-generator-python/commit/5a5a839b99d78ec5a5c52452e57c289b55ad1db5)) +* support quota project override via client options ([#496](https://github.com/googleapis/gapic-generator-python/issues/496)) ([bbc6b36](https://github.com/googleapis/gapic-generator-python/commit/bbc6b367f50526312e8320f0fc668ef88f230dbd)) +* support self-signed JWT flow for service accounts ([#774](https://github.com/googleapis/gapic-generator-python/issues/774)) ([89d6f35](https://github.com/googleapis/gapic-generator-python/commit/89d6f35c54b0a9b81c9b5f580d2e9eb87352ed93)) +* **tests:** Add integration test framework, goldens for 4 APIs [gapic-generator-python] ([#905](https://github.com/googleapis/gapic-generator-python/issues/905)) ([48db1e6](https://github.com/googleapis/gapic-generator-python/commit/48db1e644badc2180253e11d9a3d3657e8f9aeed)) +* update templates to permit enum aliases ([#809](https://github.com/googleapis/gapic-generator-python/issues/809)) ([2e7ea11](https://github.com/googleapis/gapic-generator-python/commit/2e7ea11f80210459106f9780e5f013e2a0381d29)) + + +### Bug Fixes + +* 'id' should not be a reserved name ([#602](https://github.com/googleapis/gapic-generator-python/issues/602)) ([c43c574](https://github.com/googleapis/gapic-generator-python/commit/c43c5740db099be19c5f6e52b3a917a631003411)) +* add 'dict' type annotation to 'request' ([#966](https://github.com/googleapis/gapic-generator-python/issues/966)) ([49205d9](https://github.com/googleapis/gapic-generator-python/commit/49205d99dd440690b838c8eb3f6a695f35b061c2)) +* add 'dict' type annotation to 'request' for async_client ([#1051](https://github.com/googleapis/gapic-generator-python/issues/1051)) ([08cc2c4](https://github.com/googleapis/gapic-generator-python/commit/08cc2c4c85297759892782e307bcaa63dff41212)) +* add 'type: ignore' comment for 'google.auth' ([#579](https://github.com/googleapis/gapic-generator-python/issues/579)) ([af17501](https://github.com/googleapis/gapic-generator-python/commit/af17501d258c7c37fc1081fcad5fe18f7629f4c3)) +* add a separate DEFAULT_CLIENT_INFO for rest clients ([#988](https://github.com/googleapis/gapic-generator-python/issues/988)) ([22ac400](https://github.com/googleapis/gapic-generator-python/commit/22ac40097ab50bb2d3a7f1a2d35d659c391e0927)) +* add additional reserved names for disambiguation ([#1114](https://github.com/googleapis/gapic-generator-python/issues/1114)) ([1cffd8d](https://github.com/googleapis/gapic-generator-python/commit/1cffd8d99936cd10649faf05e0288b693e718f81)) +* add async client to %name_%version/__init__.py ([#859](https://github.com/googleapis/gapic-generator-python/issues/859)) ([391fdb8](https://github.com/googleapis/gapic-generator-python/commit/391fdb84b13c5628c21d81ad311c689da8971f6a)) +* add certain raw imports to RESERVED_NAMES ([#824](https://github.com/googleapis/gapic-generator-python/issues/824)) ([04bd8aa](https://github.com/googleapis/gapic-generator-python/commit/04bd8aaf0fc2c2c0615105cab39dc33266b66775)) +* add enums to types/__init__.py ([#695](https://github.com/googleapis/gapic-generator-python/issues/695)) ([e1d4a4a](https://github.com/googleapis/gapic-generator-python/commit/e1d4a4ae768a631f6e6dc28f2acfde8be8dc4a8f)) +* add field headers for other http verbs ([#443](https://github.com/googleapis/gapic-generator-python/issues/443)) ([f8e9712](https://github.com/googleapis/gapic-generator-python/commit/f8e971280c23e1dbde4d51f9a64973bf439e8ab3)), closes [#401](https://github.com/googleapis/gapic-generator-python/issues/401) +* add google.api_core.retry import to base.py ([#555](https://github.com/googleapis/gapic-generator-python/issues/555)) ([1d08e60](https://github.com/googleapis/gapic-generator-python/commit/1d08e60cea4c5b3fa2555a4952161b0115d686f2)) +* add name and version info to fixup script name ([#490](https://github.com/googleapis/gapic-generator-python/issues/490)) ([16fe7e7](https://github.com/googleapis/gapic-generator-python/commit/16fe7e7885b7e17bf16b4f1f8f8844b9f5d0bdfe)) +* add oneof fields to generated protoplus init ([#485](https://github.com/googleapis/gapic-generator-python/issues/485)) ([be5a847](https://github.com/googleapis/gapic-generator-python/commit/be5a847aeff6687679f7bca46308362d588f5c77)), closes [#484](https://github.com/googleapis/gapic-generator-python/issues/484) +* add system tests and cert/key ([#394](https://github.com/googleapis/gapic-generator-python/issues/394)) ([87fe58f](https://github.com/googleapis/gapic-generator-python/commit/87fe58fef32d4aa16b0df7c132bb056e199baec7)) +* Adjust Field Names in URI Templates ([#1041](https://github.com/googleapis/gapic-generator-python/issues/1041)) ([06cd7b6](https://github.com/googleapis/gapic-generator-python/commit/06cd7b66f0f303b066f7f1f510332ae19aa9de8e)) +* also add the async client to __all__ ([#869](https://github.com/googleapis/gapic-generator-python/issues/869)) ([09c90fa](https://github.com/googleapis/gapic-generator-python/commit/09c90fa48515cb7da1d0ebf1d93a0d49fc6448e8)) +* always use dataclasses 0.6 ([#481](https://github.com/googleapis/gapic-generator-python/issues/481)) ([066d04e](https://github.com/googleapis/gapic-generator-python/commit/066d04e7d53301024106f244280502f16af46b79)) +* **bazel:** Re-enable Python µgen integration tests post monolith rule removal ([#926](https://github.com/googleapis/gapic-generator-python/issues/926)) ([13a6b3a](https://github.com/googleapis/gapic-generator-python/commit/13a6b3aed35b5af85aea047922aa219258460a58)) +* **bazel:** Remove monolith imports from Python µgen Bazel rules ([#923](https://github.com/googleapis/gapic-generator-python/issues/923)) ([4a2afa7](https://github.com/googleapis/gapic-generator-python/commit/4a2afa78455817e7e6c058d21857326867fe3f21)) +* body encoding for rest transport ([#768](https://github.com/googleapis/gapic-generator-python/issues/768)) ([cc55a18](https://github.com/googleapis/gapic-generator-python/commit/cc55a182b878d78f92aba259c067d47ab1d01e5b)) +* Check for default value presence for non-optional fields in REST ([#868](https://github.com/googleapis/gapic-generator-python/issues/868)) ([5748001](https://github.com/googleapis/gapic-generator-python/commit/57480019c3e77c6b3a85bdaf8441334170b318e8)) +* consistently use _pb2 identifier ([#883](https://github.com/googleapis/gapic-generator-python/issues/883)) ([d789c84](https://github.com/googleapis/gapic-generator-python/commit/d789c84d0d686bdb2d88179041b4c04cc32a3e66)) +* consolidate dependencies in setup.py ([#422](https://github.com/googleapis/gapic-generator-python/issues/422)) ([10403bb](https://github.com/googleapis/gapic-generator-python/commit/10403bb034954afd2952bb1cb3701d271bd6ecd3)) +* convert datetime back to proto for unit tests ([#511](https://github.com/googleapis/gapic-generator-python/issues/511)) ([e1c787d](https://github.com/googleapis/gapic-generator-python/commit/e1c787d3b6fe09dc0b4e00f07a7bd77fb5f1e6a3)) +* corner case fix for empty request generated test ([#801](https://github.com/googleapis/gapic-generator-python/issues/801)) ([039dc71](https://github.com/googleapis/gapic-generator-python/commit/039dc713fed291142058741e1138da5a4bec542f)) +* correctly instantiates client ([#338](https://github.com/googleapis/gapic-generator-python/issues/338)) ([7a46524](https://github.com/googleapis/gapic-generator-python/commit/7a46524bc7f2eabd7dab27313bb752c27afc358b)), closes [#337](https://github.com/googleapis/gapic-generator-python/issues/337) +* disable always_use_jwt_access ([#939](https://github.com/googleapis/gapic-generator-python/issues/939)) ([1302352](https://github.com/googleapis/gapic-generator-python/commit/130235220849987df572c1840735b3c199b85dfc)) +* don't enable snippetgen by default ([#1078](https://github.com/googleapis/gapic-generator-python/issues/1078)) ([8bdb709](https://github.com/googleapis/gapic-generator-python/commit/8bdb70931a9ecb1c89fda9608697b0762770bc12)) +* don't use integer for enums in json encoding ([#761](https://github.com/googleapis/gapic-generator-python/issues/761)) ([6d37a73](https://github.com/googleapis/gapic-generator-python/commit/6d37a7388995b90428ee6293bcce5d48cd9a48f8)) +* enable GAPIC metadata generation ([#843](https://github.com/googleapis/gapic-generator-python/issues/843)) ([697816c](https://github.com/googleapis/gapic-generator-python/commit/697816ce7d5b201d6ced85fadd89f9140da67b37)) +* enable self signed jwt for grpc ([#958](https://github.com/googleapis/gapic-generator-python/issues/958)) ([af02a9c](https://github.com/googleapis/gapic-generator-python/commit/af02a9cae522ff2cdc8e97cfffe2ba2bb84d6b6a)) +* ensure rest unit tests have complete coverage ([#1098](https://github.com/googleapis/gapic-generator-python/issues/1098)) ([0705d9c](https://github.com/googleapis/gapic-generator-python/commit/0705d9c5dbbea793867551e64991be37d8339c6b)) +* exclude 'input' from reserved names list ([#788](https://github.com/googleapis/gapic-generator-python/issues/788)) ([da2ff71](https://github.com/googleapis/gapic-generator-python/commit/da2ff717b82357359baeeafad9a3e48a70e194cb)) +* expose ssl credentials from transport ([#677](https://github.com/googleapis/gapic-generator-python/issues/677)) ([da0ee3e](https://github.com/googleapis/gapic-generator-python/commit/da0ee3eab4f80bf3d70fa5e06a2dcef7e1d4d22e)) +* expose transport property for clients ([#645](https://github.com/googleapis/gapic-generator-python/issues/645)) ([13cddda](https://github.com/googleapis/gapic-generator-python/commit/13cddda0623bd4d24ae7973752b1be0eaa40523a)), closes [#640](https://github.com/googleapis/gapic-generator-python/issues/640) +* fix case for expected field names in required fields test. ([#1107](https://github.com/googleapis/gapic-generator-python/issues/1107)) ([6a593f9](https://github.com/googleapis/gapic-generator-python/commit/6a593f9807141aaf6c13a8843804e9fa9b300c91)) +* Fix client template type hints ([#593](https://github.com/googleapis/gapic-generator-python/issues/593)) ([93f34e8](https://github.com/googleapis/gapic-generator-python/commit/93f34e8a2a351a24a49424c1722baec2893dc764)) +* fix datetime comparison unit tests ([#898](https://github.com/googleapis/gapic-generator-python/issues/898)) ([81932a2](https://github.com/googleapis/gapic-generator-python/commit/81932a2b71e6ca5f424ddc5c52933ad1d452583a)) +* fix docstring for first attribute of protos ([#1004](https://github.com/googleapis/gapic-generator-python/issues/1004)) ([383f655](https://github.com/googleapis/gapic-generator-python/commit/383f6555a1d850889b2aa74be28c8d06465399e5)) +* fix incorrectly referenced exceptions, add missing port to tests ([#873](https://github.com/googleapis/gapic-generator-python/issues/873)) ([40078c4](https://github.com/googleapis/gapic-generator-python/commit/40078c46b21a0dfa489d4cd80ed7d95bb542f3c3)), closes [#872](https://github.com/googleapis/gapic-generator-python/issues/872) +* fix missing .coveragerc and the broken bazel build ([#723](https://github.com/googleapis/gapic-generator-python/issues/723)) ([7f8235f](https://github.com/googleapis/gapic-generator-python/commit/7f8235f6dfbd309a879895701aeb5e73c6425483)) +* fix missing http schema (http/https) for REST clients ([#1063](https://github.com/googleapis/gapic-generator-python/issues/1063)) ([e3aa7a0](https://github.com/googleapis/gapic-generator-python/commit/e3aa7a0b23bc4bfd5170753f74bdeac219902d1a)) +* fix mTLS logic ([#374](https://github.com/googleapis/gapic-generator-python/issues/374)) ([e3c079b](https://github.com/googleapis/gapic-generator-python/commit/e3c079bab907ac183628d8805e6966af9ca2083f)) +* Fix namespace packages conflict issue ([#757](https://github.com/googleapis/gapic-generator-python/issues/757)) ([8035662](https://github.com/googleapis/gapic-generator-python/commit/8035662bdcfbdffd1c294c5d28479733358407ca)) +* fix resource path args for paths with =** ([#1089](https://github.com/googleapis/gapic-generator-python/issues/1089)) ([309cc66](https://github.com/googleapis/gapic-generator-python/commit/309cc66e880e07940866864b03c744310ef56762)) +* Fix rest transport logic ([#1039](https://github.com/googleapis/gapic-generator-python/issues/1039)) ([50d61af](https://github.com/googleapis/gapic-generator-python/commit/50d61afd30b021835fe898e41b783f4d04acff09)) +* fix rest transport tests ([#772](https://github.com/googleapis/gapic-generator-python/issues/772)) ([ce110a3](https://github.com/googleapis/gapic-generator-python/commit/ce110a35894aa1a838649f9782294b3b8446be5c)) +* fix rest transport unit test and required query prams handling ([#951](https://github.com/googleapis/gapic-generator-python/issues/951)) ([b793017](https://github.com/googleapis/gapic-generator-python/commit/b7930177da9a8be556bf6485febcc0a9bdef897b)) +* fix rest transport unit test template ([#741](https://github.com/googleapis/gapic-generator-python/issues/741)) ([54b9806](https://github.com/googleapis/gapic-generator-python/commit/54b98060f881c8f0424c7e146488d3adc19fec7a)) +* fix rest unit test ([#1074](https://github.com/googleapis/gapic-generator-python/issues/1074)) ([3b2918e](https://github.com/googleapis/gapic-generator-python/commit/3b2918ecaeb90229f22834438dc31755498ee2d0)) +* fix sphinx identifiers ([#714](https://github.com/googleapis/gapic-generator-python/issues/714)) ([39be474](https://github.com/googleapis/gapic-generator-python/commit/39be474b4419dfa521ef51927fd36dbf257d68e3)) +* fix style-check error ([#416](https://github.com/googleapis/gapic-generator-python/issues/416)) ([93ff10b](https://github.com/googleapis/gapic-generator-python/commit/93ff10b9add58dbc045c6656715bf67409a51404)) +* fix syntax errors and failing unit tests ([#849](https://github.com/googleapis/gapic-generator-python/issues/849)) ([9046261](https://github.com/googleapis/gapic-generator-python/commit/90462617e3e2b90eb8684210b6a70e890bdc0d96)), closes [#848](https://github.com/googleapis/gapic-generator-python/issues/848) +* fix syntax for Deprecationwarning ([#942](https://github.com/googleapis/gapic-generator-python/issues/942)) ([82dbddb](https://github.com/googleapis/gapic-generator-python/commit/82dbddb6a9caf1227c4b335345f365dd01025794)) +* fix tests generation logic ([#1049](https://github.com/googleapis/gapic-generator-python/issues/1049)) ([8f213ad](https://github.com/googleapis/gapic-generator-python/commit/8f213add4cb02366bb370ef46a686c6f0c37a575)) +* fix types on server and bidi streaming callables ([#641](https://github.com/googleapis/gapic-generator-python/issues/641)) ([d92c202](https://github.com/googleapis/gapic-generator-python/commit/d92c2029398c969ebf2a68a5bf77c5eb4fff7b31)) +* fix typo attribue -> attribute ([#627](https://github.com/googleapis/gapic-generator-python/issues/627)) ([729146f](https://github.com/googleapis/gapic-generator-python/commit/729146fd53edf1e4ae4d3c9a90640a7520b1ba9d)), closes [#626](https://github.com/googleapis/gapic-generator-python/issues/626) +* fix wrong scopes for self signed jwt ([#935](https://github.com/googleapis/gapic-generator-python/issues/935)) ([e033acd](https://github.com/googleapis/gapic-generator-python/commit/e033acd44763f7cf65eabb6b35f66093022b1bcb)) +* fix wrong unit test ([#502](https://github.com/googleapis/gapic-generator-python/issues/502)) ([c95bd45](https://github.com/googleapis/gapic-generator-python/commit/c95bd45506df7973758b9e1249586597d8214985)) +* generated unit tests live in the 'tests/gapic' subdir ([#456](https://github.com/googleapis/gapic-generator-python/issues/456)) ([1ed7c9d](https://github.com/googleapis/gapic-generator-python/commit/1ed7c9d6fe9595c390387d72113d741ebf28538d)) +* handle message bodies ([#1117](https://github.com/googleapis/gapic-generator-python/issues/1117)) ([36e3236](https://github.com/googleapis/gapic-generator-python/commit/36e3236b3832993331d8d99c10e72797a8851390)) +* handle repeated fields in method signatures ([#445](https://github.com/googleapis/gapic-generator-python/issues/445)) ([3aae799](https://github.com/googleapis/gapic-generator-python/commit/3aae799f62a1f5d3b0506d919cc6080ee417f14b)) +* handle required fields properly in query_params ([#1068](https://github.com/googleapis/gapic-generator-python/issues/1068)) ([0e379ca](https://github.com/googleapis/gapic-generator-python/commit/0e379ca6c0aee9d79d11a14074b7e9343e9e6af2)) +* ignore types for imports generated from 'google.api_core' ([#597](https://github.com/googleapis/gapic-generator-python/issues/597)) ([8440e09](https://github.com/googleapis/gapic-generator-python/commit/8440e09855d399d647b62238a9697e04ea4d0d41)) +* ignore unknown fields returned from server for REST ([#777](https://github.com/googleapis/gapic-generator-python/issues/777)) ([a70b078](https://github.com/googleapis/gapic-generator-python/commit/a70b0787f7e3d40642a4f68574f0cc493cc4e054)) +* import warnings when needed ([#930](https://github.com/googleapis/gapic-generator-python/issues/930)) ([d4270ae](https://github.com/googleapis/gapic-generator-python/commit/d4270ae5805f44ab8ee30bb11fe42a0da6d79755)) +* improper types in pagers generation ([#970](https://github.com/googleapis/gapic-generator-python/issues/970)) ([bba3eea](https://github.com/googleapis/gapic-generator-python/commit/bba3eea5d45fe57c0395ceef30402ad7880013d7)) +* install gcc by hand ([#571](https://github.com/googleapis/gapic-generator-python/issues/571)) ([e224a03](https://github.com/googleapis/gapic-generator-python/commit/e224a0365a2d3ed20d69cf4d1298a3f022f8da76)) +* leave a newline between field description and oneof line ([#1071](https://github.com/googleapis/gapic-generator-python/issues/1071)) ([4d0e365](https://github.com/googleapis/gapic-generator-python/commit/4d0e36528a8eb23ea3893b0bbcca10b679867445)) +* lint issue ([#396](https://github.com/googleapis/gapic-generator-python/issues/396)) ([98c544d](https://github.com/googleapis/gapic-generator-python/commit/98c544da9c285a52f72d1f4e070a9bdf1e762078)) +* list oneofs in docstring ([#1030](https://github.com/googleapis/gapic-generator-python/issues/1030)) ([a0e25c8](https://github.com/googleapis/gapic-generator-python/commit/a0e25c8c00391b99a351e667eddc8b4fecad30d8)) +* make # after alpha/beta optional ([#540](https://github.com/googleapis/gapic-generator-python/issues/540)) ([f86a47b](https://github.com/googleapis/gapic-generator-python/commit/f86a47b6431e374ae1797061511b49fe6bf22daf)) +* Make gapic-generator-python compatible with protobuf 3.14.0 (packaged as native namespace package) ([#753](https://github.com/googleapis/gapic-generator-python/issues/753)) ([45212af](https://github.com/googleapis/gapic-generator-python/commit/45212afb9f523a416d86272798d71ce05dc292f0)) +* methods returning Operation w/o operation_info are now allowed. ([#1047](https://github.com/googleapis/gapic-generator-python/issues/1047)) ([6b640af](https://github.com/googleapis/gapic-generator-python/commit/6b640afbd93ea8c861b902211dc34e188234d072)) +* minor typo in ads template ([#664](https://github.com/googleapis/gapic-generator-python/issues/664)) ([816f965](https://github.com/googleapis/gapic-generator-python/commit/816f965c8560bf65d8043bd67672c660a2b1300b)) +* module names can no longer collide with keywords or builtins ([#595](https://github.com/googleapis/gapic-generator-python/issues/595)) ([960d550](https://github.com/googleapis/gapic-generator-python/commit/960d550c4a8fd09b052cce785d76243a5d4525d7)) +* modules referenced in MapField message type are properly aliased ([#654](https://github.com/googleapis/gapic-generator-python/issues/654)) ([2c79349](https://github.com/googleapis/gapic-generator-python/commit/2c79349e7b89435bc45e499885f7b12ac0bc2d9f)), closes [#618](https://github.com/googleapis/gapic-generator-python/issues/618) +* more fixes for rest transport ([#1042](https://github.com/googleapis/gapic-generator-python/issues/1042)) ([13d5f77](https://github.com/googleapis/gapic-generator-python/commit/13d5f77f8b6d4ce1181b29f2335d7584783be753)) +* mypy 0.800 update errors ([#754](https://github.com/googleapis/gapic-generator-python/issues/754)) ([608275a](https://github.com/googleapis/gapic-generator-python/commit/608275aa923f495520dea8ebddb94a99f26e27a5)) +* non-string required fields provide correct values ([#1108](https://github.com/googleapis/gapic-generator-python/issues/1108)) ([bc5f729](https://github.com/googleapis/gapic-generator-python/commit/bc5f729cf777d30e1053e23a1d115460952478af)) +* numerous small fixes to allow bigtable-admin ([#660](https://github.com/googleapis/gapic-generator-python/issues/660)) ([09692c4](https://github.com/googleapis/gapic-generator-python/commit/09692c4e889ccde3b0ca31a5e8476c1679804beb)) +* only require dataclases if python<3.7 ([#475](https://github.com/googleapis/gapic-generator-python/issues/475)) ([9597695](https://github.com/googleapis/gapic-generator-python/commit/959769518ea47df383b23b6e48c5da148f69029e)) +* only set unset fields if they are query params ([#1130](https://github.com/googleapis/gapic-generator-python/issues/1130)) ([9ad98ca](https://github.com/googleapis/gapic-generator-python/commit/9ad98ca6833f1b280bf3c04c858f92276d59ffbe)) +* operation module is properly aliased if necessary ([#615](https://github.com/googleapis/gapic-generator-python/issues/615)) ([8f92fd9](https://github.com/googleapis/gapic-generator-python/commit/8f92fd9999286ef3f916119be78dbeb838a15550)) +* paged code and templates are no longer message centric ([#527](https://github.com/googleapis/gapic-generator-python/issues/527)) ([00ba77c](https://github.com/googleapis/gapic-generator-python/commit/00ba77c3d27ef9a0b8742db3660983b80a68c672)) +* pass metadata to pagers ([#470](https://github.com/googleapis/gapic-generator-python/issues/470)) ([c43c6d9](https://github.com/googleapis/gapic-generator-python/commit/c43c6d943fa99f202014bf4bba795df25d314a63)), closes [#469](https://github.com/googleapis/gapic-generator-python/issues/469) +* primitive repeated fields are now correctly auto paginated ([#517](https://github.com/googleapis/gapic-generator-python/issues/517)) ([61a2cc0](https://github.com/googleapis/gapic-generator-python/commit/61a2cc0d4c08064d442fd4d7aa4b1b9e56158eaa)) +* raise for rest transport http error ([#738](https://github.com/googleapis/gapic-generator-python/issues/738)) ([7d24f3d](https://github.com/googleapis/gapic-generator-python/commit/7d24f3d81499ad714e57c7c9562b842c09e49d20)) +* refactor mtls logic to standalone method ([#1123](https://github.com/googleapis/gapic-generator-python/issues/1123)) ([d528223](https://github.com/googleapis/gapic-generator-python/commit/d528223e3221487f86a3d82c92cd2e2cf04bec4a)) +* remove 'property' from reserved names ([#613](https://github.com/googleapis/gapic-generator-python/issues/613)) ([8338a51](https://github.com/googleapis/gapic-generator-python/commit/8338a51a81f5f5b8ebacf68c8e46d3e1804d3f8b)) +* remove auth, policy, and options from the reserved names list ([#851](https://github.com/googleapis/gapic-generator-python/issues/851)) ([d3f31a0](https://github.com/googleapis/gapic-generator-python/commit/d3f31a0d33411b3248871ddbe51135e83b699a73)) +* remove client recv msg limit ([#704](https://github.com/googleapis/gapic-generator-python/issues/704)) ([80147ce](https://github.com/googleapis/gapic-generator-python/commit/80147ce177ce435dcb1b611181e80dc35f915293)) +* remove duplicate assignment of certain flattened, repeated fields ([#760](https://github.com/googleapis/gapic-generator-python/issues/760)) ([cdbc221](https://github.com/googleapis/gapic-generator-python/commit/cdbc22130a176e733c529f60a6b8b1d224e82e89)) +* remove duplicate field entries ([#786](https://github.com/googleapis/gapic-generator-python/issues/786)) ([9f4dfa4](https://github.com/googleapis/gapic-generator-python/commit/9f4dfa46cb6a67081563ce096452fedd9e35051d)) +* remove extra space before_pb_options ([#863](https://github.com/googleapis/gapic-generator-python/issues/863)) ([f0532e7](https://github.com/googleapis/gapic-generator-python/commit/f0532e7a88479aeb805c1509239008bdd19e9d85)) +* remove support for google-api-core<1.26.0 ([#893](https://github.com/googleapis/gapic-generator-python/issues/893)) ([ce558ac](https://github.com/googleapis/gapic-generator-python/commit/ce558acef9ec9c9bcc54243cddb708ef168c05f0)) +* remove typo from py_gapic.bzl ([#532](https://github.com/googleapis/gapic-generator-python/issues/532)) ([2975c2d](https://github.com/googleapis/gapic-generator-python/commit/2975c2d76e08b5ee5324730707707d9dd6ced8ae)) +* rename __init__.py to __init__.py.j2 ([#550](https://github.com/googleapis/gapic-generator-python/issues/550)) ([71a7062](https://github.com/googleapis/gapic-generator-python/commit/71a7062b918136b916cc5bfc7dbdf64f870edf6a)) +* rename local var page in generated tests ([#577](https://github.com/googleapis/gapic-generator-python/issues/577)) ([075f9e8](https://github.com/googleapis/gapic-generator-python/commit/075f9e8d50b02ffb5f2f042b84f27a9f634636e2)) +* rendering mock values for recursive messages no longer crashes ([#587](https://github.com/googleapis/gapic-generator-python/issues/587)) ([c2a83e5](https://github.com/googleapis/gapic-generator-python/commit/c2a83e561bf46b4af21e9008c7d67a1c609d7d06)) +* require min google-api-core version of 1.21.0 ([#506](https://github.com/googleapis/gapic-generator-python/issues/506)) ([bf787bd](https://github.com/googleapis/gapic-generator-python/commit/bf787bd36198288d6a40e45e44e43f0098cfec7c)) +* resource messages in method response types generate helpers ([#629](https://github.com/googleapis/gapic-generator-python/issues/629)) ([52bfd6d](https://github.com/googleapis/gapic-generator-python/commit/52bfd6d5d5821b33e78e6b9867a3be2865cdbc74)) +* retriable exceptions are deterministically ordered in GAPICs ([#619](https://github.com/googleapis/gapic-generator-python/issues/619)) ([f7b1164](https://github.com/googleapis/gapic-generator-python/commit/f7b11640b74d8c64747b33783976d6e0ab9c61c4)) +* s/grpcAsync/grpc-async for gapic metadata ([#803](https://github.com/googleapis/gapic-generator-python/issues/803)) ([96f7864](https://github.com/googleapis/gapic-generator-python/commit/96f78640d90cf50c6b525924d14c6afe31874be6)) +* samplegen always produces sample dicts with "response" ([#914](https://github.com/googleapis/gapic-generator-python/issues/914)) ([0b168f2](https://github.com/googleapis/gapic-generator-python/commit/0b168f20f4cbf419131fcc512141fccca8186681)) +* **snippetgen:** don't create duplicate requests for required oneofs ([#1088](https://github.com/googleapis/gapic-generator-python/issues/1088)) ([5531795](https://github.com/googleapis/gapic-generator-python/commit/55317956397370a91b1a06ecd476e55f58789807)) +* **snippetgen:** fix client streaming samples ([#1061](https://github.com/googleapis/gapic-generator-python/issues/1061)) ([64b9ad6](https://github.com/googleapis/gapic-generator-python/commit/64b9ad6e417a15cfbddf0e7a1b57036b8abfc829)) +* **snippetgen:** use f-strings in print statements ([#975](https://github.com/googleapis/gapic-generator-python/issues/975)) ([122e85c](https://github.com/googleapis/gapic-generator-python/commit/122e85c37ff6aa0a99f64361397eb3df5495a3b4)) +* sort subpackages in %namespace/%name/__init__.py ([#836](https://github.com/googleapis/gapic-generator-python/issues/836)) ([90cf882](https://github.com/googleapis/gapic-generator-python/commit/90cf882b20f430499f692e6b9b23497b3555e928)) +* stabilize order of query_params ([#742](https://github.com/googleapis/gapic-generator-python/issues/742)) ([2835ddb](https://github.com/googleapis/gapic-generator-python/commit/2835ddbe62b520e2e4c84f02810b1ac936c9cbb9)) +* stabilize the order of resource helper methods and ([#582](https://github.com/googleapis/gapic-generator-python/issues/582)) ([7d2adde](https://github.com/googleapis/gapic-generator-python/commit/7d2adde3a1ae81ac88ced822d6dfdfb26ffbfdf0)) +* suppress type error for fallback def of OptionalRetry ([#1065](https://github.com/googleapis/gapic-generator-python/issues/1065)) ([e47faa6](https://github.com/googleapis/gapic-generator-python/commit/e47faa6c59a1fadf7dfebc965c962aa05ca30f74)) +* syntax fix and test for multiple required fields ([#1105](https://github.com/googleapis/gapic-generator-python/issues/1105)) ([4e5fe2d](https://github.com/googleapis/gapic-generator-python/commit/4e5fe2db9d0d81929cc1559d3a134c9a38ae595c)) +* syntax fix for required_fields struct in rest transport ([#1103](https://github.com/googleapis/gapic-generator-python/issues/1103)) ([3d7128c](https://github.com/googleapis/gapic-generator-python/commit/3d7128ce8f55523b9aff2e44e2c000450e712ac2)) +* Temporarily define a fixed testing event loop ([#493](https://github.com/googleapis/gapic-generator-python/issues/493)) ([2d22d91](https://github.com/googleapis/gapic-generator-python/commit/2d22d919bc8c08e03f501ff2f23152b761467c80)) +* temporarily disable code coverage in showcase_unit tests ([#925](https://github.com/googleapis/gapic-generator-python/issues/925)) ([0dfac03](https://github.com/googleapis/gapic-generator-python/commit/0dfac03bd3ef8c12b33e6c03e62eab3e7bf2cd69)) +* the common resources are not targets for lookup ([#650](https://github.com/googleapis/gapic-generator-python/issues/650)) ([8e1b384](https://github.com/googleapis/gapic-generator-python/commit/8e1b384e812ef519c421c8c288d5118961d8b4cf)) +* timeouts are handled by rest clients, retries silently ignored ([#976](https://github.com/googleapis/gapic-generator-python/issues/976)) ([a62463c](https://github.com/googleapis/gapic-generator-python/commit/a62463cadee0cdaf861e93998faa27e6a82adab4)) +* tweak oneof detection ([#505](https://github.com/googleapis/gapic-generator-python/issues/505)) ([1632e25](https://github.com/googleapis/gapic-generator-python/commit/1632e250cfc01a17ccad128c3e065008b334473a)) +* unignore 'google.api_core' imports ([#1066](https://github.com/googleapis/gapic-generator-python/issues/1066)) ([13f764c](https://github.com/googleapis/gapic-generator-python/commit/13f764c6513b91e7143a4a4a0bcc661cd19be0d8)) +* unknown resources do not cause a generator crash ([#675](https://github.com/googleapis/gapic-generator-python/issues/675)) ([2d23d7d](https://github.com/googleapis/gapic-generator-python/commit/2d23d7d202099ccf145c01aeb9a03ae46b4e1b00)) +* update dependencies ([#393](https://github.com/googleapis/gapic-generator-python/issues/393)) ([161e486](https://github.com/googleapis/gapic-generator-python/commit/161e48613934058780f82e15f71cbc9f574fdf65)) +* Update gapic-generator-python to gracefully handle internal google inconsistencies ([#721](https://github.com/googleapis/gapic-generator-python/issues/721)) ([b984295](https://github.com/googleapis/gapic-generator-python/commit/b9842952433924a1d8de4ef9cc3ea9e7fa91c01a)) +* update GOOGLE_API_USE_MTLS value ([#453](https://github.com/googleapis/gapic-generator-python/issues/453)) ([7449ad5](https://github.com/googleapis/gapic-generator-python/commit/7449ad5aad4a1fbbf9ca3796e097512fc80991e3)) +* update GOOGLE_API_USE_MTLS values ([#449](https://github.com/googleapis/gapic-generator-python/issues/449)) ([b737ea0](https://github.com/googleapis/gapic-generator-python/commit/b737ea09f004fbd0f160d641ec9f14a6c6c98715)) +* Update module alias to resolve naming conflict ([#820](https://github.com/googleapis/gapic-generator-python/issues/820)) ([f5e9f36](https://github.com/googleapis/gapic-generator-python/commit/f5e9f367ec6a72b4272f559a93f6fbb3d7e54b8b)), closes [#819](https://github.com/googleapis/gapic-generator-python/issues/819) +* update paging implementation to handle unconventional pagination ([#750](https://github.com/googleapis/gapic-generator-python/issues/750)) ([eaac3e6](https://github.com/googleapis/gapic-generator-python/commit/eaac3e69d366b610ae7551d94d4f546819e24bc2)) +* update protobuf version [gapic-generator-python] ([#696](https://github.com/googleapis/gapic-generator-python/issues/696)) ([ea3e519](https://github.com/googleapis/gapic-generator-python/commit/ea3e5198862881f5b142638df6ea604654f81f82)) +* updating testing, rest-only generation, & minor bug-fixes ([#716](https://github.com/googleapis/gapic-generator-python/issues/716)) ([56c31de](https://github.com/googleapis/gapic-generator-python/commit/56c31de4a9f661e3d69b52e19c9a28dddfe9d7dc)) +* use (new) typing for 'gapic_v1.method.DEFAULT' ([#1032](https://github.com/googleapis/gapic-generator-python/issues/1032)) ([d85dfad](https://github.com/googleapis/gapic-generator-python/commit/d85dfadc180e5f218ad582a306c1c441a6c668db)) +* use context manager for mtls env var ([#548](https://github.com/googleapis/gapic-generator-python/issues/548)) ([d19e180](https://github.com/googleapis/gapic-generator-python/commit/d19e1808df9cd2884ae7a449977a479b4829bc1d)) +* use correct retry deadline in publisher methods ([#814](https://github.com/googleapis/gapic-generator-python/issues/814)) ([92a2cfc](https://github.com/googleapis/gapic-generator-python/commit/92a2cfc47b24c4b1a041d5bbb944d69a67a962a2)) +* use correct typing for retries / operations_client ([#1026](https://github.com/googleapis/gapic-generator-python/issues/1026)) ([acb3ea8](https://github.com/googleapis/gapic-generator-python/commit/acb3ea83becf6bf85c142739dede556cae2cebae)) + + +### Performance Improvements + +* collisions don't contain reserved names by default ([#684](https://github.com/googleapis/gapic-generator-python/issues/684)) ([2ec6ea6](https://github.com/googleapis/gapic-generator-python/commit/2ec6ea6835256c0d7b252e035cf4eac1ff442647)) +* reduce unnecessary copies, optimize Address comparison ([#855](https://github.com/googleapis/gapic-generator-python/issues/855)) ([e843540](https://github.com/googleapis/gapic-generator-python/commit/e8435400257707458e83424019c9b1a16fac9a99)) + + +### Miscellaneous Chores + +* add release-please ([#452](https://github.com/googleapis/gapic-generator-python/issues/452)) ([6b35758](https://github.com/googleapis/gapic-generator-python/commit/6b357580bfdc97497a054f26f4063bd766835665)) + ### [0.59.1](https://github.com/googleapis/gapic-generator-python/compare/v0.59.0...v0.59.1) (2022-01-10) From 7e86e7e7e31998fef1b5d5be5121e749e39f2f44 Mon Sep 17 00:00:00 2001 From: Jeff Ching Date: Wed, 19 Jan 2022 13:40:48 -0800 Subject: [PATCH 0698/1339] Revert "chore(master): release 0.39.1 (#1133)" (#1136) This reverts commit ea6cfe6d6a4276894dba9b4a2efe458df86a08a0. --- packages/gapic-generator/CHANGELOG.md | 203 -------------------------- 1 file changed, 203 deletions(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8d0ea31d5790..2d95536ac079 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,209 +1,6 @@ # Changelog -### [0.39.1](https://github.com/googleapis/gapic-generator-python/compare/v0.59.1...v0.39.1) (2022-01-18) - - -### Features - -* add 'from_service_account_info' factory to clients ([#706](https://github.com/googleapis/gapic-generator-python/issues/706)) ([94d5f0c](https://github.com/googleapis/gapic-generator-python/commit/94d5f0c11b8041cbae8e4a89bb504d6c6e200a95)), closes [#705](https://github.com/googleapis/gapic-generator-python/issues/705) -* add `credentials_file` and `scopes` via `client_options` ([#461](https://github.com/googleapis/gapic-generator-python/issues/461)) ([b5e1b1e](https://github.com/googleapis/gapic-generator-python/commit/b5e1b1e8991159dc176da889e9bdf12e3eebdb1e)) -* Add `x-goog-api-client` header to rest clients ([#888](https://github.com/googleapis/gapic-generator-python/issues/888)) ([2d1d3ae](https://github.com/googleapis/gapic-generator-python/commit/2d1d3ae135a75bbfff13df7703de5d0dad44695c)) -* add async samples ([#861](https://github.com/googleapis/gapic-generator-python/issues/861)) ([e385ffd](https://github.com/googleapis/gapic-generator-python/commit/e385ffd7f012c6a38c9fcd7c5f36ce090311032b)) -* add autogenerated snippets ([#845](https://github.com/googleapis/gapic-generator-python/issues/845)) ([abdf5ec](https://github.com/googleapis/gapic-generator-python/commit/abdf5ec00261e5500dbdd190c23b0b2b05836799)) -* add bazel support for gapic metadata ([#811](https://github.com/googleapis/gapic-generator-python/issues/811)) ([7ced24a](https://github.com/googleapis/gapic-generator-python/commit/7ced24a0b20cb6505587b946c03b1b038eef4b4a)) -* add flag for gapic metadata ([#795](https://github.com/googleapis/gapic-generator-python/issues/795)) ([9cd7664](https://github.com/googleapis/gapic-generator-python/commit/9cd7664141835edcd8970629d9cf3abe4b7fd7c4)) -* add fragment tests ([#1056](https://github.com/googleapis/gapic-generator-python/issues/1056)) ([9d9b33d](https://github.com/googleapis/gapic-generator-python/commit/9d9b33dadf587a6d0b09031edeea597d6d2eae62)) -* add gapic metadata file ([#781](https://github.com/googleapis/gapic-generator-python/issues/781)) ([5dd8fcc](https://github.com/googleapis/gapic-generator-python/commit/5dd8fccf6b4da57edef0347beb07102634daa992)) -* add GOOGLE_API_USE_MTLS support ([#420](https://github.com/googleapis/gapic-generator-python/issues/420)) ([41fa725](https://github.com/googleapis/gapic-generator-python/commit/41fa725a304792b3470d2be079e59f577387e5bb)) -* add iam methods to templates ([#545](https://github.com/googleapis/gapic-generator-python/issues/545)) ([3f42c3c](https://github.com/googleapis/gapic-generator-python/commit/3f42c3cf8aae432a9bda0953fbabd7f0c8d774de)) -* add mtls feature to rest transport ([#731](https://github.com/googleapis/gapic-generator-python/issues/731)) ([524dbab](https://github.com/googleapis/gapic-generator-python/commit/524dbab16d248198ca10a08ecede4600fd36cefc)) -* add mTLS to ads template ([#384](https://github.com/googleapis/gapic-generator-python/issues/384)) ([1e76096](https://github.com/googleapis/gapic-generator-python/commit/1e760960df4e80e4f8d50291b872341e80eaa7f1)) -* add proper handling of query/path/body parameters for rest transport ([#702](https://github.com/googleapis/gapic-generator-python/issues/702)) ([6b2de5d](https://github.com/googleapis/gapic-generator-python/commit/6b2de5dd9fbf15e6b0a42b428b01eb03f1a3820a)) -* add rest transport generation for clients with optional transport flag ([#688](https://github.com/googleapis/gapic-generator-python/issues/688)) ([af59c2c](https://github.com/googleapis/gapic-generator-python/commit/af59c2c3c3d6b7e1f626c3fbc2c03f99ca31b4a4)) -* add retry config passed to bazel rule ([#526](https://github.com/googleapis/gapic-generator-python/issues/526)) ([9e96151](https://github.com/googleapis/gapic-generator-python/commit/9e96151d702786912fcf033f7535efad8ae754ee)) -* add snippet index ([#1121](https://github.com/googleapis/gapic-generator-python/issues/1121)) ([55d2bc6](https://github.com/googleapis/gapic-generator-python/commit/55d2bc6580e5db0f837de1b245533a8f1f2e9beb)) -* add support for common resource paths ([#622](https://github.com/googleapis/gapic-generator-python/issues/622)) ([15a7fde](https://github.com/googleapis/gapic-generator-python/commit/15a7fdeb966cb64a742b6305d2c71dd3d485d0f9)) -* add support for context manager in client ([#987](https://github.com/googleapis/gapic-generator-python/issues/987)) ([4edabcf](https://github.com/googleapis/gapic-generator-python/commit/4edabcf6791cfb0874a951b695b39672036760d4)) -* add support for long-running operations with rest transport. ([#1094](https://github.com/googleapis/gapic-generator-python/issues/1094)) ([e89fd23](https://github.com/googleapis/gapic-generator-python/commit/e89fd23609625c5aa49acd6c6ee67f87fce324fd)) -* allow user-provided client info ([#573](https://github.com/googleapis/gapic-generator-python/issues/573)) ([b2e5274](https://github.com/googleapis/gapic-generator-python/commit/b2e52746c7ce4b983482fb776224b30767978c79)) -* allow warehouse name to be customized ([#717](https://github.com/googleapis/gapic-generator-python/issues/717)) ([7c185e8](https://github.com/googleapis/gapic-generator-python/commit/7c185e87cb4252b1f99ed121515814595f9492c4)), closes [#605](https://github.com/googleapis/gapic-generator-python/issues/605) -* bypass request copying in method calls ([#557](https://github.com/googleapis/gapic-generator-python/issues/557)) ([3a23143](https://github.com/googleapis/gapic-generator-python/commit/3a2314318de229a3353c984a8cb2766ae95cc968)) -* **dev:** Add Git pre-commit hooks [gapic-generator-python] ([#908](https://github.com/googleapis/gapic-generator-python/issues/908)) ([298db39](https://github.com/googleapis/gapic-generator-python/commit/298db39064e29de764537f25dc38f9e5ac301390)) -* enable self signed jwt for grpc ([#920](https://github.com/googleapis/gapic-generator-python/issues/920)) ([da119c7](https://github.com/googleapis/gapic-generator-python/commit/da119c72c82d04e168c4b41e5bf910a0c1609ce3)) -* enable self signed jwt for http ([#1000](https://github.com/googleapis/gapic-generator-python/issues/1000)) ([5f87973](https://github.com/googleapis/gapic-generator-python/commit/5f8797396a2477b772b7bfb827499db32e28710e)) -* file_level and indirectly used resources generate helper methods ([#642](https://github.com/googleapis/gapic-generator-python/issues/642)) ([42e224c](https://github.com/googleapis/gapic-generator-python/commit/42e224cb100f6e2aa9370bc6a5179d62979b5c4d)), closes [#637](https://github.com/googleapis/gapic-generator-python/issues/637) -* forward compatible diregapic LRO support ([#1085](https://github.com/googleapis/gapic-generator-python/issues/1085)) ([aa7f4d5](https://github.com/googleapis/gapic-generator-python/commit/aa7f4d568f7f43738ab3489fc84ce6bc5d6bda18)) -* generate code snippets by default ([#1044](https://github.com/googleapis/gapic-generator-python/issues/1044)) ([e46f443](https://github.com/googleapis/gapic-generator-python/commit/e46f443dbeffe16b63f97668801b06189769e972)) -* generate snippet metadata ([#1129](https://github.com/googleapis/gapic-generator-python/issues/1129)) ([9e46031](https://github.com/googleapis/gapic-generator-python/commit/9e46031d01edc3a461140fe3b29d8d400f5ddf86)) -* implement grpc transcode for rest transport and complete generated tests ([#999](https://github.com/googleapis/gapic-generator-python/issues/999)) ([ccdd17d](https://github.com/googleapis/gapic-generator-python/commit/ccdd17d6133274a34dd727fab0576e6c63238833)) -* implement grpc transcode for rest transport and complete generated tests. ([ccdd17d](https://github.com/googleapis/gapic-generator-python/commit/ccdd17d6133274a34dd727fab0576e6c63238833)) -* Initial draft of GAPIC Bazel Extensions gapic-generator-python ([#342](https://github.com/googleapis/gapic-generator-python/issues/342)) ([cc7ab0b](https://github.com/googleapis/gapic-generator-python/commit/cc7ab0ba59634771daf1d4034f3f0b25872da9df)) -* Make GAPIC Bazel rules production ready ([#402](https://github.com/googleapis/gapic-generator-python/issues/402)) ([d18ed41](https://github.com/googleapis/gapic-generator-python/commit/d18ed416240a064fffac0fd7915b61f6415fe140)) -* port REST transport to Ads templates ([#1003](https://github.com/googleapis/gapic-generator-python/issues/1003)) ([d2ab9da](https://github.com/googleapis/gapic-generator-python/commit/d2ab9da2d0d9353f7289be1b1b1b7492d6d98066)) -* precache wrapped rpcs ([#553](https://github.com/googleapis/gapic-generator-python/issues/553)) ([2f2fb5d](https://github.com/googleapis/gapic-generator-python/commit/2f2fb5d3d9472a79c80be6d052129d07d2bbb835)) -* provide AsyncIO support for generated code ([#365](https://github.com/googleapis/gapic-generator-python/issues/365)) ([305ed34](https://github.com/googleapis/gapic-generator-python/commit/305ed34cfc1607c990f2f88b27f53358da25c366)) -* Raise GoogleAPICallError on REST response errors ([#891](https://github.com/googleapis/gapic-generator-python/issues/891)) ([edb8c63](https://github.com/googleapis/gapic-generator-python/commit/edb8c63e8a331f5e08ea19202d8de42de7051299)) -* **snippetgen:** generate mock input for required fields ([#941](https://github.com/googleapis/gapic-generator-python/issues/941)) ([b2149da](https://github.com/googleapis/gapic-generator-python/commit/b2149da5e6873e1f71871bfecd899bb9aa0b6439)) -* **snippetgen:** turn resource path strings into f-strings ([#1012](https://github.com/googleapis/gapic-generator-python/issues/1012)) ([a110e1d](https://github.com/googleapis/gapic-generator-python/commit/a110e1d8387ea37b85ab0621bacd30da175fe85b)) -* Support alternative http bindings in the gapic schema. ([#993](https://github.com/googleapis/gapic-generator-python/issues/993)) ([041a726](https://github.com/googleapis/gapic-generator-python/commit/041a726b818cd67812d689c23757f31ec9964d66)) -* Support field presence for query parameters in REST clients ([#866](https://github.com/googleapis/gapic-generator-python/issues/866)) ([5339db1](https://github.com/googleapis/gapic-generator-python/commit/5339db1308326d91a05a34d38e31cf91b79a9225)) -* support for proto3 optional fields ([#519](https://github.com/googleapis/gapic-generator-python/issues/519)) ([1aa729c](https://github.com/googleapis/gapic-generator-python/commit/1aa729cc8d2f7f0de25c8348fdbf9d6dd96f5847)) -* support mtls env variables ([#589](https://github.com/googleapis/gapic-generator-python/issues/589)) ([b19026d](https://github.com/googleapis/gapic-generator-python/commit/b19026d9cca26ebd1cd0c3e73f738c4d1870d987)) -* support protobuf method deprecation option [gapic-generator-python] ([#875](https://github.com/googleapis/gapic-generator-python/issues/875)) ([5a5a839](https://github.com/googleapis/gapic-generator-python/commit/5a5a839b99d78ec5a5c52452e57c289b55ad1db5)) -* support quota project override via client options ([#496](https://github.com/googleapis/gapic-generator-python/issues/496)) ([bbc6b36](https://github.com/googleapis/gapic-generator-python/commit/bbc6b367f50526312e8320f0fc668ef88f230dbd)) -* support self-signed JWT flow for service accounts ([#774](https://github.com/googleapis/gapic-generator-python/issues/774)) ([89d6f35](https://github.com/googleapis/gapic-generator-python/commit/89d6f35c54b0a9b81c9b5f580d2e9eb87352ed93)) -* **tests:** Add integration test framework, goldens for 4 APIs [gapic-generator-python] ([#905](https://github.com/googleapis/gapic-generator-python/issues/905)) ([48db1e6](https://github.com/googleapis/gapic-generator-python/commit/48db1e644badc2180253e11d9a3d3657e8f9aeed)) -* update templates to permit enum aliases ([#809](https://github.com/googleapis/gapic-generator-python/issues/809)) ([2e7ea11](https://github.com/googleapis/gapic-generator-python/commit/2e7ea11f80210459106f9780e5f013e2a0381d29)) - - -### Bug Fixes - -* 'id' should not be a reserved name ([#602](https://github.com/googleapis/gapic-generator-python/issues/602)) ([c43c574](https://github.com/googleapis/gapic-generator-python/commit/c43c5740db099be19c5f6e52b3a917a631003411)) -* add 'dict' type annotation to 'request' ([#966](https://github.com/googleapis/gapic-generator-python/issues/966)) ([49205d9](https://github.com/googleapis/gapic-generator-python/commit/49205d99dd440690b838c8eb3f6a695f35b061c2)) -* add 'dict' type annotation to 'request' for async_client ([#1051](https://github.com/googleapis/gapic-generator-python/issues/1051)) ([08cc2c4](https://github.com/googleapis/gapic-generator-python/commit/08cc2c4c85297759892782e307bcaa63dff41212)) -* add 'type: ignore' comment for 'google.auth' ([#579](https://github.com/googleapis/gapic-generator-python/issues/579)) ([af17501](https://github.com/googleapis/gapic-generator-python/commit/af17501d258c7c37fc1081fcad5fe18f7629f4c3)) -* add a separate DEFAULT_CLIENT_INFO for rest clients ([#988](https://github.com/googleapis/gapic-generator-python/issues/988)) ([22ac400](https://github.com/googleapis/gapic-generator-python/commit/22ac40097ab50bb2d3a7f1a2d35d659c391e0927)) -* add additional reserved names for disambiguation ([#1114](https://github.com/googleapis/gapic-generator-python/issues/1114)) ([1cffd8d](https://github.com/googleapis/gapic-generator-python/commit/1cffd8d99936cd10649faf05e0288b693e718f81)) -* add async client to %name_%version/__init__.py ([#859](https://github.com/googleapis/gapic-generator-python/issues/859)) ([391fdb8](https://github.com/googleapis/gapic-generator-python/commit/391fdb84b13c5628c21d81ad311c689da8971f6a)) -* add certain raw imports to RESERVED_NAMES ([#824](https://github.com/googleapis/gapic-generator-python/issues/824)) ([04bd8aa](https://github.com/googleapis/gapic-generator-python/commit/04bd8aaf0fc2c2c0615105cab39dc33266b66775)) -* add enums to types/__init__.py ([#695](https://github.com/googleapis/gapic-generator-python/issues/695)) ([e1d4a4a](https://github.com/googleapis/gapic-generator-python/commit/e1d4a4ae768a631f6e6dc28f2acfde8be8dc4a8f)) -* add field headers for other http verbs ([#443](https://github.com/googleapis/gapic-generator-python/issues/443)) ([f8e9712](https://github.com/googleapis/gapic-generator-python/commit/f8e971280c23e1dbde4d51f9a64973bf439e8ab3)), closes [#401](https://github.com/googleapis/gapic-generator-python/issues/401) -* add google.api_core.retry import to base.py ([#555](https://github.com/googleapis/gapic-generator-python/issues/555)) ([1d08e60](https://github.com/googleapis/gapic-generator-python/commit/1d08e60cea4c5b3fa2555a4952161b0115d686f2)) -* add name and version info to fixup script name ([#490](https://github.com/googleapis/gapic-generator-python/issues/490)) ([16fe7e7](https://github.com/googleapis/gapic-generator-python/commit/16fe7e7885b7e17bf16b4f1f8f8844b9f5d0bdfe)) -* add oneof fields to generated protoplus init ([#485](https://github.com/googleapis/gapic-generator-python/issues/485)) ([be5a847](https://github.com/googleapis/gapic-generator-python/commit/be5a847aeff6687679f7bca46308362d588f5c77)), closes [#484](https://github.com/googleapis/gapic-generator-python/issues/484) -* add system tests and cert/key ([#394](https://github.com/googleapis/gapic-generator-python/issues/394)) ([87fe58f](https://github.com/googleapis/gapic-generator-python/commit/87fe58fef32d4aa16b0df7c132bb056e199baec7)) -* Adjust Field Names in URI Templates ([#1041](https://github.com/googleapis/gapic-generator-python/issues/1041)) ([06cd7b6](https://github.com/googleapis/gapic-generator-python/commit/06cd7b66f0f303b066f7f1f510332ae19aa9de8e)) -* also add the async client to __all__ ([#869](https://github.com/googleapis/gapic-generator-python/issues/869)) ([09c90fa](https://github.com/googleapis/gapic-generator-python/commit/09c90fa48515cb7da1d0ebf1d93a0d49fc6448e8)) -* always use dataclasses 0.6 ([#481](https://github.com/googleapis/gapic-generator-python/issues/481)) ([066d04e](https://github.com/googleapis/gapic-generator-python/commit/066d04e7d53301024106f244280502f16af46b79)) -* **bazel:** Re-enable Python µgen integration tests post monolith rule removal ([#926](https://github.com/googleapis/gapic-generator-python/issues/926)) ([13a6b3a](https://github.com/googleapis/gapic-generator-python/commit/13a6b3aed35b5af85aea047922aa219258460a58)) -* **bazel:** Remove monolith imports from Python µgen Bazel rules ([#923](https://github.com/googleapis/gapic-generator-python/issues/923)) ([4a2afa7](https://github.com/googleapis/gapic-generator-python/commit/4a2afa78455817e7e6c058d21857326867fe3f21)) -* body encoding for rest transport ([#768](https://github.com/googleapis/gapic-generator-python/issues/768)) ([cc55a18](https://github.com/googleapis/gapic-generator-python/commit/cc55a182b878d78f92aba259c067d47ab1d01e5b)) -* Check for default value presence for non-optional fields in REST ([#868](https://github.com/googleapis/gapic-generator-python/issues/868)) ([5748001](https://github.com/googleapis/gapic-generator-python/commit/57480019c3e77c6b3a85bdaf8441334170b318e8)) -* consistently use _pb2 identifier ([#883](https://github.com/googleapis/gapic-generator-python/issues/883)) ([d789c84](https://github.com/googleapis/gapic-generator-python/commit/d789c84d0d686bdb2d88179041b4c04cc32a3e66)) -* consolidate dependencies in setup.py ([#422](https://github.com/googleapis/gapic-generator-python/issues/422)) ([10403bb](https://github.com/googleapis/gapic-generator-python/commit/10403bb034954afd2952bb1cb3701d271bd6ecd3)) -* convert datetime back to proto for unit tests ([#511](https://github.com/googleapis/gapic-generator-python/issues/511)) ([e1c787d](https://github.com/googleapis/gapic-generator-python/commit/e1c787d3b6fe09dc0b4e00f07a7bd77fb5f1e6a3)) -* corner case fix for empty request generated test ([#801](https://github.com/googleapis/gapic-generator-python/issues/801)) ([039dc71](https://github.com/googleapis/gapic-generator-python/commit/039dc713fed291142058741e1138da5a4bec542f)) -* correctly instantiates client ([#338](https://github.com/googleapis/gapic-generator-python/issues/338)) ([7a46524](https://github.com/googleapis/gapic-generator-python/commit/7a46524bc7f2eabd7dab27313bb752c27afc358b)), closes [#337](https://github.com/googleapis/gapic-generator-python/issues/337) -* disable always_use_jwt_access ([#939](https://github.com/googleapis/gapic-generator-python/issues/939)) ([1302352](https://github.com/googleapis/gapic-generator-python/commit/130235220849987df572c1840735b3c199b85dfc)) -* don't enable snippetgen by default ([#1078](https://github.com/googleapis/gapic-generator-python/issues/1078)) ([8bdb709](https://github.com/googleapis/gapic-generator-python/commit/8bdb70931a9ecb1c89fda9608697b0762770bc12)) -* don't use integer for enums in json encoding ([#761](https://github.com/googleapis/gapic-generator-python/issues/761)) ([6d37a73](https://github.com/googleapis/gapic-generator-python/commit/6d37a7388995b90428ee6293bcce5d48cd9a48f8)) -* enable GAPIC metadata generation ([#843](https://github.com/googleapis/gapic-generator-python/issues/843)) ([697816c](https://github.com/googleapis/gapic-generator-python/commit/697816ce7d5b201d6ced85fadd89f9140da67b37)) -* enable self signed jwt for grpc ([#958](https://github.com/googleapis/gapic-generator-python/issues/958)) ([af02a9c](https://github.com/googleapis/gapic-generator-python/commit/af02a9cae522ff2cdc8e97cfffe2ba2bb84d6b6a)) -* ensure rest unit tests have complete coverage ([#1098](https://github.com/googleapis/gapic-generator-python/issues/1098)) ([0705d9c](https://github.com/googleapis/gapic-generator-python/commit/0705d9c5dbbea793867551e64991be37d8339c6b)) -* exclude 'input' from reserved names list ([#788](https://github.com/googleapis/gapic-generator-python/issues/788)) ([da2ff71](https://github.com/googleapis/gapic-generator-python/commit/da2ff717b82357359baeeafad9a3e48a70e194cb)) -* expose ssl credentials from transport ([#677](https://github.com/googleapis/gapic-generator-python/issues/677)) ([da0ee3e](https://github.com/googleapis/gapic-generator-python/commit/da0ee3eab4f80bf3d70fa5e06a2dcef7e1d4d22e)) -* expose transport property for clients ([#645](https://github.com/googleapis/gapic-generator-python/issues/645)) ([13cddda](https://github.com/googleapis/gapic-generator-python/commit/13cddda0623bd4d24ae7973752b1be0eaa40523a)), closes [#640](https://github.com/googleapis/gapic-generator-python/issues/640) -* fix case for expected field names in required fields test. ([#1107](https://github.com/googleapis/gapic-generator-python/issues/1107)) ([6a593f9](https://github.com/googleapis/gapic-generator-python/commit/6a593f9807141aaf6c13a8843804e9fa9b300c91)) -* Fix client template type hints ([#593](https://github.com/googleapis/gapic-generator-python/issues/593)) ([93f34e8](https://github.com/googleapis/gapic-generator-python/commit/93f34e8a2a351a24a49424c1722baec2893dc764)) -* fix datetime comparison unit tests ([#898](https://github.com/googleapis/gapic-generator-python/issues/898)) ([81932a2](https://github.com/googleapis/gapic-generator-python/commit/81932a2b71e6ca5f424ddc5c52933ad1d452583a)) -* fix docstring for first attribute of protos ([#1004](https://github.com/googleapis/gapic-generator-python/issues/1004)) ([383f655](https://github.com/googleapis/gapic-generator-python/commit/383f6555a1d850889b2aa74be28c8d06465399e5)) -* fix incorrectly referenced exceptions, add missing port to tests ([#873](https://github.com/googleapis/gapic-generator-python/issues/873)) ([40078c4](https://github.com/googleapis/gapic-generator-python/commit/40078c46b21a0dfa489d4cd80ed7d95bb542f3c3)), closes [#872](https://github.com/googleapis/gapic-generator-python/issues/872) -* fix missing .coveragerc and the broken bazel build ([#723](https://github.com/googleapis/gapic-generator-python/issues/723)) ([7f8235f](https://github.com/googleapis/gapic-generator-python/commit/7f8235f6dfbd309a879895701aeb5e73c6425483)) -* fix missing http schema (http/https) for REST clients ([#1063](https://github.com/googleapis/gapic-generator-python/issues/1063)) ([e3aa7a0](https://github.com/googleapis/gapic-generator-python/commit/e3aa7a0b23bc4bfd5170753f74bdeac219902d1a)) -* fix mTLS logic ([#374](https://github.com/googleapis/gapic-generator-python/issues/374)) ([e3c079b](https://github.com/googleapis/gapic-generator-python/commit/e3c079bab907ac183628d8805e6966af9ca2083f)) -* Fix namespace packages conflict issue ([#757](https://github.com/googleapis/gapic-generator-python/issues/757)) ([8035662](https://github.com/googleapis/gapic-generator-python/commit/8035662bdcfbdffd1c294c5d28479733358407ca)) -* fix resource path args for paths with =** ([#1089](https://github.com/googleapis/gapic-generator-python/issues/1089)) ([309cc66](https://github.com/googleapis/gapic-generator-python/commit/309cc66e880e07940866864b03c744310ef56762)) -* Fix rest transport logic ([#1039](https://github.com/googleapis/gapic-generator-python/issues/1039)) ([50d61af](https://github.com/googleapis/gapic-generator-python/commit/50d61afd30b021835fe898e41b783f4d04acff09)) -* fix rest transport tests ([#772](https://github.com/googleapis/gapic-generator-python/issues/772)) ([ce110a3](https://github.com/googleapis/gapic-generator-python/commit/ce110a35894aa1a838649f9782294b3b8446be5c)) -* fix rest transport unit test and required query prams handling ([#951](https://github.com/googleapis/gapic-generator-python/issues/951)) ([b793017](https://github.com/googleapis/gapic-generator-python/commit/b7930177da9a8be556bf6485febcc0a9bdef897b)) -* fix rest transport unit test template ([#741](https://github.com/googleapis/gapic-generator-python/issues/741)) ([54b9806](https://github.com/googleapis/gapic-generator-python/commit/54b98060f881c8f0424c7e146488d3adc19fec7a)) -* fix rest unit test ([#1074](https://github.com/googleapis/gapic-generator-python/issues/1074)) ([3b2918e](https://github.com/googleapis/gapic-generator-python/commit/3b2918ecaeb90229f22834438dc31755498ee2d0)) -* fix sphinx identifiers ([#714](https://github.com/googleapis/gapic-generator-python/issues/714)) ([39be474](https://github.com/googleapis/gapic-generator-python/commit/39be474b4419dfa521ef51927fd36dbf257d68e3)) -* fix style-check error ([#416](https://github.com/googleapis/gapic-generator-python/issues/416)) ([93ff10b](https://github.com/googleapis/gapic-generator-python/commit/93ff10b9add58dbc045c6656715bf67409a51404)) -* fix syntax errors and failing unit tests ([#849](https://github.com/googleapis/gapic-generator-python/issues/849)) ([9046261](https://github.com/googleapis/gapic-generator-python/commit/90462617e3e2b90eb8684210b6a70e890bdc0d96)), closes [#848](https://github.com/googleapis/gapic-generator-python/issues/848) -* fix syntax for Deprecationwarning ([#942](https://github.com/googleapis/gapic-generator-python/issues/942)) ([82dbddb](https://github.com/googleapis/gapic-generator-python/commit/82dbddb6a9caf1227c4b335345f365dd01025794)) -* fix tests generation logic ([#1049](https://github.com/googleapis/gapic-generator-python/issues/1049)) ([8f213ad](https://github.com/googleapis/gapic-generator-python/commit/8f213add4cb02366bb370ef46a686c6f0c37a575)) -* fix types on server and bidi streaming callables ([#641](https://github.com/googleapis/gapic-generator-python/issues/641)) ([d92c202](https://github.com/googleapis/gapic-generator-python/commit/d92c2029398c969ebf2a68a5bf77c5eb4fff7b31)) -* fix typo attribue -> attribute ([#627](https://github.com/googleapis/gapic-generator-python/issues/627)) ([729146f](https://github.com/googleapis/gapic-generator-python/commit/729146fd53edf1e4ae4d3c9a90640a7520b1ba9d)), closes [#626](https://github.com/googleapis/gapic-generator-python/issues/626) -* fix wrong scopes for self signed jwt ([#935](https://github.com/googleapis/gapic-generator-python/issues/935)) ([e033acd](https://github.com/googleapis/gapic-generator-python/commit/e033acd44763f7cf65eabb6b35f66093022b1bcb)) -* fix wrong unit test ([#502](https://github.com/googleapis/gapic-generator-python/issues/502)) ([c95bd45](https://github.com/googleapis/gapic-generator-python/commit/c95bd45506df7973758b9e1249586597d8214985)) -* generated unit tests live in the 'tests/gapic' subdir ([#456](https://github.com/googleapis/gapic-generator-python/issues/456)) ([1ed7c9d](https://github.com/googleapis/gapic-generator-python/commit/1ed7c9d6fe9595c390387d72113d741ebf28538d)) -* handle message bodies ([#1117](https://github.com/googleapis/gapic-generator-python/issues/1117)) ([36e3236](https://github.com/googleapis/gapic-generator-python/commit/36e3236b3832993331d8d99c10e72797a8851390)) -* handle repeated fields in method signatures ([#445](https://github.com/googleapis/gapic-generator-python/issues/445)) ([3aae799](https://github.com/googleapis/gapic-generator-python/commit/3aae799f62a1f5d3b0506d919cc6080ee417f14b)) -* handle required fields properly in query_params ([#1068](https://github.com/googleapis/gapic-generator-python/issues/1068)) ([0e379ca](https://github.com/googleapis/gapic-generator-python/commit/0e379ca6c0aee9d79d11a14074b7e9343e9e6af2)) -* ignore types for imports generated from 'google.api_core' ([#597](https://github.com/googleapis/gapic-generator-python/issues/597)) ([8440e09](https://github.com/googleapis/gapic-generator-python/commit/8440e09855d399d647b62238a9697e04ea4d0d41)) -* ignore unknown fields returned from server for REST ([#777](https://github.com/googleapis/gapic-generator-python/issues/777)) ([a70b078](https://github.com/googleapis/gapic-generator-python/commit/a70b0787f7e3d40642a4f68574f0cc493cc4e054)) -* import warnings when needed ([#930](https://github.com/googleapis/gapic-generator-python/issues/930)) ([d4270ae](https://github.com/googleapis/gapic-generator-python/commit/d4270ae5805f44ab8ee30bb11fe42a0da6d79755)) -* improper types in pagers generation ([#970](https://github.com/googleapis/gapic-generator-python/issues/970)) ([bba3eea](https://github.com/googleapis/gapic-generator-python/commit/bba3eea5d45fe57c0395ceef30402ad7880013d7)) -* install gcc by hand ([#571](https://github.com/googleapis/gapic-generator-python/issues/571)) ([e224a03](https://github.com/googleapis/gapic-generator-python/commit/e224a0365a2d3ed20d69cf4d1298a3f022f8da76)) -* leave a newline between field description and oneof line ([#1071](https://github.com/googleapis/gapic-generator-python/issues/1071)) ([4d0e365](https://github.com/googleapis/gapic-generator-python/commit/4d0e36528a8eb23ea3893b0bbcca10b679867445)) -* lint issue ([#396](https://github.com/googleapis/gapic-generator-python/issues/396)) ([98c544d](https://github.com/googleapis/gapic-generator-python/commit/98c544da9c285a52f72d1f4e070a9bdf1e762078)) -* list oneofs in docstring ([#1030](https://github.com/googleapis/gapic-generator-python/issues/1030)) ([a0e25c8](https://github.com/googleapis/gapic-generator-python/commit/a0e25c8c00391b99a351e667eddc8b4fecad30d8)) -* make # after alpha/beta optional ([#540](https://github.com/googleapis/gapic-generator-python/issues/540)) ([f86a47b](https://github.com/googleapis/gapic-generator-python/commit/f86a47b6431e374ae1797061511b49fe6bf22daf)) -* Make gapic-generator-python compatible with protobuf 3.14.0 (packaged as native namespace package) ([#753](https://github.com/googleapis/gapic-generator-python/issues/753)) ([45212af](https://github.com/googleapis/gapic-generator-python/commit/45212afb9f523a416d86272798d71ce05dc292f0)) -* methods returning Operation w/o operation_info are now allowed. ([#1047](https://github.com/googleapis/gapic-generator-python/issues/1047)) ([6b640af](https://github.com/googleapis/gapic-generator-python/commit/6b640afbd93ea8c861b902211dc34e188234d072)) -* minor typo in ads template ([#664](https://github.com/googleapis/gapic-generator-python/issues/664)) ([816f965](https://github.com/googleapis/gapic-generator-python/commit/816f965c8560bf65d8043bd67672c660a2b1300b)) -* module names can no longer collide with keywords or builtins ([#595](https://github.com/googleapis/gapic-generator-python/issues/595)) ([960d550](https://github.com/googleapis/gapic-generator-python/commit/960d550c4a8fd09b052cce785d76243a5d4525d7)) -* modules referenced in MapField message type are properly aliased ([#654](https://github.com/googleapis/gapic-generator-python/issues/654)) ([2c79349](https://github.com/googleapis/gapic-generator-python/commit/2c79349e7b89435bc45e499885f7b12ac0bc2d9f)), closes [#618](https://github.com/googleapis/gapic-generator-python/issues/618) -* more fixes for rest transport ([#1042](https://github.com/googleapis/gapic-generator-python/issues/1042)) ([13d5f77](https://github.com/googleapis/gapic-generator-python/commit/13d5f77f8b6d4ce1181b29f2335d7584783be753)) -* mypy 0.800 update errors ([#754](https://github.com/googleapis/gapic-generator-python/issues/754)) ([608275a](https://github.com/googleapis/gapic-generator-python/commit/608275aa923f495520dea8ebddb94a99f26e27a5)) -* non-string required fields provide correct values ([#1108](https://github.com/googleapis/gapic-generator-python/issues/1108)) ([bc5f729](https://github.com/googleapis/gapic-generator-python/commit/bc5f729cf777d30e1053e23a1d115460952478af)) -* numerous small fixes to allow bigtable-admin ([#660](https://github.com/googleapis/gapic-generator-python/issues/660)) ([09692c4](https://github.com/googleapis/gapic-generator-python/commit/09692c4e889ccde3b0ca31a5e8476c1679804beb)) -* only require dataclases if python<3.7 ([#475](https://github.com/googleapis/gapic-generator-python/issues/475)) ([9597695](https://github.com/googleapis/gapic-generator-python/commit/959769518ea47df383b23b6e48c5da148f69029e)) -* only set unset fields if they are query params ([#1130](https://github.com/googleapis/gapic-generator-python/issues/1130)) ([9ad98ca](https://github.com/googleapis/gapic-generator-python/commit/9ad98ca6833f1b280bf3c04c858f92276d59ffbe)) -* operation module is properly aliased if necessary ([#615](https://github.com/googleapis/gapic-generator-python/issues/615)) ([8f92fd9](https://github.com/googleapis/gapic-generator-python/commit/8f92fd9999286ef3f916119be78dbeb838a15550)) -* paged code and templates are no longer message centric ([#527](https://github.com/googleapis/gapic-generator-python/issues/527)) ([00ba77c](https://github.com/googleapis/gapic-generator-python/commit/00ba77c3d27ef9a0b8742db3660983b80a68c672)) -* pass metadata to pagers ([#470](https://github.com/googleapis/gapic-generator-python/issues/470)) ([c43c6d9](https://github.com/googleapis/gapic-generator-python/commit/c43c6d943fa99f202014bf4bba795df25d314a63)), closes [#469](https://github.com/googleapis/gapic-generator-python/issues/469) -* primitive repeated fields are now correctly auto paginated ([#517](https://github.com/googleapis/gapic-generator-python/issues/517)) ([61a2cc0](https://github.com/googleapis/gapic-generator-python/commit/61a2cc0d4c08064d442fd4d7aa4b1b9e56158eaa)) -* raise for rest transport http error ([#738](https://github.com/googleapis/gapic-generator-python/issues/738)) ([7d24f3d](https://github.com/googleapis/gapic-generator-python/commit/7d24f3d81499ad714e57c7c9562b842c09e49d20)) -* refactor mtls logic to standalone method ([#1123](https://github.com/googleapis/gapic-generator-python/issues/1123)) ([d528223](https://github.com/googleapis/gapic-generator-python/commit/d528223e3221487f86a3d82c92cd2e2cf04bec4a)) -* remove 'property' from reserved names ([#613](https://github.com/googleapis/gapic-generator-python/issues/613)) ([8338a51](https://github.com/googleapis/gapic-generator-python/commit/8338a51a81f5f5b8ebacf68c8e46d3e1804d3f8b)) -* remove auth, policy, and options from the reserved names list ([#851](https://github.com/googleapis/gapic-generator-python/issues/851)) ([d3f31a0](https://github.com/googleapis/gapic-generator-python/commit/d3f31a0d33411b3248871ddbe51135e83b699a73)) -* remove client recv msg limit ([#704](https://github.com/googleapis/gapic-generator-python/issues/704)) ([80147ce](https://github.com/googleapis/gapic-generator-python/commit/80147ce177ce435dcb1b611181e80dc35f915293)) -* remove duplicate assignment of certain flattened, repeated fields ([#760](https://github.com/googleapis/gapic-generator-python/issues/760)) ([cdbc221](https://github.com/googleapis/gapic-generator-python/commit/cdbc22130a176e733c529f60a6b8b1d224e82e89)) -* remove duplicate field entries ([#786](https://github.com/googleapis/gapic-generator-python/issues/786)) ([9f4dfa4](https://github.com/googleapis/gapic-generator-python/commit/9f4dfa46cb6a67081563ce096452fedd9e35051d)) -* remove extra space before_pb_options ([#863](https://github.com/googleapis/gapic-generator-python/issues/863)) ([f0532e7](https://github.com/googleapis/gapic-generator-python/commit/f0532e7a88479aeb805c1509239008bdd19e9d85)) -* remove support for google-api-core<1.26.0 ([#893](https://github.com/googleapis/gapic-generator-python/issues/893)) ([ce558ac](https://github.com/googleapis/gapic-generator-python/commit/ce558acef9ec9c9bcc54243cddb708ef168c05f0)) -* remove typo from py_gapic.bzl ([#532](https://github.com/googleapis/gapic-generator-python/issues/532)) ([2975c2d](https://github.com/googleapis/gapic-generator-python/commit/2975c2d76e08b5ee5324730707707d9dd6ced8ae)) -* rename __init__.py to __init__.py.j2 ([#550](https://github.com/googleapis/gapic-generator-python/issues/550)) ([71a7062](https://github.com/googleapis/gapic-generator-python/commit/71a7062b918136b916cc5bfc7dbdf64f870edf6a)) -* rename local var page in generated tests ([#577](https://github.com/googleapis/gapic-generator-python/issues/577)) ([075f9e8](https://github.com/googleapis/gapic-generator-python/commit/075f9e8d50b02ffb5f2f042b84f27a9f634636e2)) -* rendering mock values for recursive messages no longer crashes ([#587](https://github.com/googleapis/gapic-generator-python/issues/587)) ([c2a83e5](https://github.com/googleapis/gapic-generator-python/commit/c2a83e561bf46b4af21e9008c7d67a1c609d7d06)) -* require min google-api-core version of 1.21.0 ([#506](https://github.com/googleapis/gapic-generator-python/issues/506)) ([bf787bd](https://github.com/googleapis/gapic-generator-python/commit/bf787bd36198288d6a40e45e44e43f0098cfec7c)) -* resource messages in method response types generate helpers ([#629](https://github.com/googleapis/gapic-generator-python/issues/629)) ([52bfd6d](https://github.com/googleapis/gapic-generator-python/commit/52bfd6d5d5821b33e78e6b9867a3be2865cdbc74)) -* retriable exceptions are deterministically ordered in GAPICs ([#619](https://github.com/googleapis/gapic-generator-python/issues/619)) ([f7b1164](https://github.com/googleapis/gapic-generator-python/commit/f7b11640b74d8c64747b33783976d6e0ab9c61c4)) -* s/grpcAsync/grpc-async for gapic metadata ([#803](https://github.com/googleapis/gapic-generator-python/issues/803)) ([96f7864](https://github.com/googleapis/gapic-generator-python/commit/96f78640d90cf50c6b525924d14c6afe31874be6)) -* samplegen always produces sample dicts with "response" ([#914](https://github.com/googleapis/gapic-generator-python/issues/914)) ([0b168f2](https://github.com/googleapis/gapic-generator-python/commit/0b168f20f4cbf419131fcc512141fccca8186681)) -* **snippetgen:** don't create duplicate requests for required oneofs ([#1088](https://github.com/googleapis/gapic-generator-python/issues/1088)) ([5531795](https://github.com/googleapis/gapic-generator-python/commit/55317956397370a91b1a06ecd476e55f58789807)) -* **snippetgen:** fix client streaming samples ([#1061](https://github.com/googleapis/gapic-generator-python/issues/1061)) ([64b9ad6](https://github.com/googleapis/gapic-generator-python/commit/64b9ad6e417a15cfbddf0e7a1b57036b8abfc829)) -* **snippetgen:** use f-strings in print statements ([#975](https://github.com/googleapis/gapic-generator-python/issues/975)) ([122e85c](https://github.com/googleapis/gapic-generator-python/commit/122e85c37ff6aa0a99f64361397eb3df5495a3b4)) -* sort subpackages in %namespace/%name/__init__.py ([#836](https://github.com/googleapis/gapic-generator-python/issues/836)) ([90cf882](https://github.com/googleapis/gapic-generator-python/commit/90cf882b20f430499f692e6b9b23497b3555e928)) -* stabilize order of query_params ([#742](https://github.com/googleapis/gapic-generator-python/issues/742)) ([2835ddb](https://github.com/googleapis/gapic-generator-python/commit/2835ddbe62b520e2e4c84f02810b1ac936c9cbb9)) -* stabilize the order of resource helper methods and ([#582](https://github.com/googleapis/gapic-generator-python/issues/582)) ([7d2adde](https://github.com/googleapis/gapic-generator-python/commit/7d2adde3a1ae81ac88ced822d6dfdfb26ffbfdf0)) -* suppress type error for fallback def of OptionalRetry ([#1065](https://github.com/googleapis/gapic-generator-python/issues/1065)) ([e47faa6](https://github.com/googleapis/gapic-generator-python/commit/e47faa6c59a1fadf7dfebc965c962aa05ca30f74)) -* syntax fix and test for multiple required fields ([#1105](https://github.com/googleapis/gapic-generator-python/issues/1105)) ([4e5fe2d](https://github.com/googleapis/gapic-generator-python/commit/4e5fe2db9d0d81929cc1559d3a134c9a38ae595c)) -* syntax fix for required_fields struct in rest transport ([#1103](https://github.com/googleapis/gapic-generator-python/issues/1103)) ([3d7128c](https://github.com/googleapis/gapic-generator-python/commit/3d7128ce8f55523b9aff2e44e2c000450e712ac2)) -* Temporarily define a fixed testing event loop ([#493](https://github.com/googleapis/gapic-generator-python/issues/493)) ([2d22d91](https://github.com/googleapis/gapic-generator-python/commit/2d22d919bc8c08e03f501ff2f23152b761467c80)) -* temporarily disable code coverage in showcase_unit tests ([#925](https://github.com/googleapis/gapic-generator-python/issues/925)) ([0dfac03](https://github.com/googleapis/gapic-generator-python/commit/0dfac03bd3ef8c12b33e6c03e62eab3e7bf2cd69)) -* the common resources are not targets for lookup ([#650](https://github.com/googleapis/gapic-generator-python/issues/650)) ([8e1b384](https://github.com/googleapis/gapic-generator-python/commit/8e1b384e812ef519c421c8c288d5118961d8b4cf)) -* timeouts are handled by rest clients, retries silently ignored ([#976](https://github.com/googleapis/gapic-generator-python/issues/976)) ([a62463c](https://github.com/googleapis/gapic-generator-python/commit/a62463cadee0cdaf861e93998faa27e6a82adab4)) -* tweak oneof detection ([#505](https://github.com/googleapis/gapic-generator-python/issues/505)) ([1632e25](https://github.com/googleapis/gapic-generator-python/commit/1632e250cfc01a17ccad128c3e065008b334473a)) -* unignore 'google.api_core' imports ([#1066](https://github.com/googleapis/gapic-generator-python/issues/1066)) ([13f764c](https://github.com/googleapis/gapic-generator-python/commit/13f764c6513b91e7143a4a4a0bcc661cd19be0d8)) -* unknown resources do not cause a generator crash ([#675](https://github.com/googleapis/gapic-generator-python/issues/675)) ([2d23d7d](https://github.com/googleapis/gapic-generator-python/commit/2d23d7d202099ccf145c01aeb9a03ae46b4e1b00)) -* update dependencies ([#393](https://github.com/googleapis/gapic-generator-python/issues/393)) ([161e486](https://github.com/googleapis/gapic-generator-python/commit/161e48613934058780f82e15f71cbc9f574fdf65)) -* Update gapic-generator-python to gracefully handle internal google inconsistencies ([#721](https://github.com/googleapis/gapic-generator-python/issues/721)) ([b984295](https://github.com/googleapis/gapic-generator-python/commit/b9842952433924a1d8de4ef9cc3ea9e7fa91c01a)) -* update GOOGLE_API_USE_MTLS value ([#453](https://github.com/googleapis/gapic-generator-python/issues/453)) ([7449ad5](https://github.com/googleapis/gapic-generator-python/commit/7449ad5aad4a1fbbf9ca3796e097512fc80991e3)) -* update GOOGLE_API_USE_MTLS values ([#449](https://github.com/googleapis/gapic-generator-python/issues/449)) ([b737ea0](https://github.com/googleapis/gapic-generator-python/commit/b737ea09f004fbd0f160d641ec9f14a6c6c98715)) -* Update module alias to resolve naming conflict ([#820](https://github.com/googleapis/gapic-generator-python/issues/820)) ([f5e9f36](https://github.com/googleapis/gapic-generator-python/commit/f5e9f367ec6a72b4272f559a93f6fbb3d7e54b8b)), closes [#819](https://github.com/googleapis/gapic-generator-python/issues/819) -* update paging implementation to handle unconventional pagination ([#750](https://github.com/googleapis/gapic-generator-python/issues/750)) ([eaac3e6](https://github.com/googleapis/gapic-generator-python/commit/eaac3e69d366b610ae7551d94d4f546819e24bc2)) -* update protobuf version [gapic-generator-python] ([#696](https://github.com/googleapis/gapic-generator-python/issues/696)) ([ea3e519](https://github.com/googleapis/gapic-generator-python/commit/ea3e5198862881f5b142638df6ea604654f81f82)) -* updating testing, rest-only generation, & minor bug-fixes ([#716](https://github.com/googleapis/gapic-generator-python/issues/716)) ([56c31de](https://github.com/googleapis/gapic-generator-python/commit/56c31de4a9f661e3d69b52e19c9a28dddfe9d7dc)) -* use (new) typing for 'gapic_v1.method.DEFAULT' ([#1032](https://github.com/googleapis/gapic-generator-python/issues/1032)) ([d85dfad](https://github.com/googleapis/gapic-generator-python/commit/d85dfadc180e5f218ad582a306c1c441a6c668db)) -* use context manager for mtls env var ([#548](https://github.com/googleapis/gapic-generator-python/issues/548)) ([d19e180](https://github.com/googleapis/gapic-generator-python/commit/d19e1808df9cd2884ae7a449977a479b4829bc1d)) -* use correct retry deadline in publisher methods ([#814](https://github.com/googleapis/gapic-generator-python/issues/814)) ([92a2cfc](https://github.com/googleapis/gapic-generator-python/commit/92a2cfc47b24c4b1a041d5bbb944d69a67a962a2)) -* use correct typing for retries / operations_client ([#1026](https://github.com/googleapis/gapic-generator-python/issues/1026)) ([acb3ea8](https://github.com/googleapis/gapic-generator-python/commit/acb3ea83becf6bf85c142739dede556cae2cebae)) - - -### Performance Improvements - -* collisions don't contain reserved names by default ([#684](https://github.com/googleapis/gapic-generator-python/issues/684)) ([2ec6ea6](https://github.com/googleapis/gapic-generator-python/commit/2ec6ea6835256c0d7b252e035cf4eac1ff442647)) -* reduce unnecessary copies, optimize Address comparison ([#855](https://github.com/googleapis/gapic-generator-python/issues/855)) ([e843540](https://github.com/googleapis/gapic-generator-python/commit/e8435400257707458e83424019c9b1a16fac9a99)) - - -### Miscellaneous Chores - -* add release-please ([#452](https://github.com/googleapis/gapic-generator-python/issues/452)) ([6b35758](https://github.com/googleapis/gapic-generator-python/commit/6b357580bfdc97497a054f26f4063bd766835665)) - ### [0.59.1](https://github.com/googleapis/gapic-generator-python/compare/v0.59.0...v0.59.1) (2022-01-10) From e4ac6aa46997ea503505599672e97d940fe3300c Mon Sep 17 00:00:00 2001 From: Jeff Ching Date: Wed, 19 Jan 2022 14:01:12 -0800 Subject: [PATCH 0699/1339] build: have release-please handle release tagging (#1137) Co-authored-by: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> --- packages/gapic-generator/.github/release-please.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/gapic-generator/.github/release-please.yml b/packages/gapic-generator/.github/release-please.yml index 4507ad0598a5..466597e5b196 100644 --- a/packages/gapic-generator/.github/release-please.yml +++ b/packages/gapic-generator/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true From a91cad52df2803b743507db868ac5ebd90976abe Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 19 Jan 2022 14:55:38 -0800 Subject: [PATCH 0700/1339] feat: add api key support (#969) * feat: add api key support * chore: update integration tests --- .../%sub/services/%service/client.py.j2 | 11 +++- .../%name_%version/%sub/test_%service.py.j2 | 51 +++++++++++++++++++ .../asset_v1/services/asset_service/client.py | 11 +++- .../unit/gapic/asset_v1/test_asset_service.py | 47 +++++++++++++++++ .../services/iam_credentials/client.py | 11 +++- .../credentials_v1/test_iam_credentials.py | 47 +++++++++++++++++ .../services/config_service_v2/client.py | 11 +++- .../services/logging_service_v2/client.py | 11 +++- .../services/metrics_service_v2/client.py | 11 +++- .../logging_v2/test_config_service_v2.py | 47 +++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 47 +++++++++++++++++ .../logging_v2/test_metrics_service_v2.py | 47 +++++++++++++++++ .../redis_v1/services/cloud_redis/client.py | 11 +++- .../unit/gapic/redis_v1/test_cloud_redis.py | 47 +++++++++++++++++ 14 files changed, 403 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 451ac1afb27f..d95620a4b7d0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -309,12 +309,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, {{ service.name }}Transport): # transport is a {{ service.name }}Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") if client_options.scopes: @@ -324,6 +328,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 6da9da3efac7..56cdbc628735 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1829,6 +1829,27 @@ def test_credentials_transport_error(): client_options={"credentials_file": "credentials.json"}, transport=transport, ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = {{ service.client_name }}( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = {{ service.client_name }}( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) # It is an error to provide scopes and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( @@ -2897,4 +2918,34 @@ def test_client_ctx(): pass close.assert_called() +@pytest.mark.parametrize("client_class,transport_class", [ + {% if 'grpc' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}), + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}), + {% elif 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}), + {% endif %} +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + {% endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 5ddda9d5fafd..436a6b943127 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -349,12 +349,16 @@ def __init__(self, *, api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, AssetServiceTransport): # transport is a AssetServiceTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") if client_options.scopes: @@ -364,6 +368,11 @@ def __init__(self, *, ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 2a386149dd73..a2747714b857 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -3547,6 +3547,27 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.AssetServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AssetServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AssetServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.AssetServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -4129,3 +4150,29 @@ def test_client_ctx(): with client: pass close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (AssetServiceClient, transports.AssetServiceGrpcTransport), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 93876b0369ac..712492ba8a7f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -345,12 +345,16 @@ def __init__(self, *, api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, IAMCredentialsTransport): # transport is a IAMCredentialsTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") if client_options.scopes: @@ -360,6 +364,11 @@ def __init__(self, *, ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index b7b2ee1dcef1..862636e9773c 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1491,6 +1491,27 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.IAMCredentialsGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IAMCredentialsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = IAMCredentialsClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.IAMCredentialsGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2011,3 +2032,29 @@ def test_client_ctx(): with client: pass close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 9637c628dcd5..e861ff0898cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -380,12 +380,16 @@ def __init__(self, *, api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, ConfigServiceV2Transport): # transport is a ConfigServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") if client_options.scopes: @@ -395,6 +399,11 @@ def __init__(self, *, ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index fc7e8aed39e7..6381c3feb024 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -336,12 +336,16 @@ def __init__(self, *, api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, LoggingServiceV2Transport): # transport is a LoggingServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") if client_options.scopes: @@ -351,6 +355,11 @@ def __init__(self, *, ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index c5575d541856..93ad8b286a8b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -337,12 +337,16 @@ def __init__(self, *, api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, MetricsServiceV2Transport): # transport is a MetricsServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") if client_options.scopes: @@ -352,6 +356,11 @@ def __init__(self, *, ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 00ab3ebb47eb..7ddf1c2f7d42 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -5922,6 +5922,27 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -6550,3 +6571,29 @@ def test_client_ctx(): with client: pass close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index ef0e7fb1a2b3..74db989ed0f4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2023,6 +2023,27 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2557,3 +2578,29 @@ def test_client_ctx(): with client: pass close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index dbc0f9035771..78c8a3bf1508 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1877,6 +1877,27 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2410,3 +2431,29 @@ def test_client_ctx(): with client: pass close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 6f2397c49316..7fb857604ddb 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -360,12 +360,16 @@ def __init__(self, *, api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, CloudRedisTransport): # transport is a CloudRedisTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") if client_options.scopes: @@ -375,6 +379,11 @@ def __init__(self, *, ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 445a95ea3f7c..e89e2e73fd1e 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -2875,6 +2875,27 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.CloudRedisGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -3441,3 +3462,29 @@ def test_client_ctx(): with client: pass close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From d77535c36ae022b228f50316e7bad6a1bc267f8a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 19 Jan 2022 15:11:09 -0800 Subject: [PATCH 0701/1339] chore(master): release 0.60.0 (#1139) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 2d95536ac079..d247dbc5ae77 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [0.60.0](https://github.com/googleapis/gapic-generator-python/compare/v0.59.1...v0.60.0) (2022-01-19) + + +### Features + +* add api key support ([#969](https://github.com/googleapis/gapic-generator-python/issues/969)) ([7c72739](https://github.com/googleapis/gapic-generator-python/commit/7c7273919193f321e0dc2d4156b35be1b4733458)) +* generate snippet metadata ([#1129](https://github.com/googleapis/gapic-generator-python/issues/1129)) ([9e46031](https://github.com/googleapis/gapic-generator-python/commit/9e46031d01edc3a461140fe3b29d8d400f5ddf86)) + + +### Bug Fixes + +* only set unset fields if they are query params ([#1130](https://github.com/googleapis/gapic-generator-python/issues/1130)) ([9ad98ca](https://github.com/googleapis/gapic-generator-python/commit/9ad98ca6833f1b280bf3c04c858f92276d59ffbe)) + ### [0.59.1](https://github.com/googleapis/gapic-generator-python/compare/v0.59.0...v0.59.1) (2022-01-10) From 073efb257bff96ff2e9145c9480c479783460340 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 21 Jan 2022 12:54:48 -0800 Subject: [PATCH 0702/1339] feat: add interceptor-like functionality to REST transport (#1142) Interceptors are a gRPC feature that wraps rpcs in continuation-passing-style pre and post method custom functions. These can be used e.g. for logging, local caching, and tweaking metadata. This PR adds interceptor like functionality to the REST transport in generated GAPICs. The REST transport interceptors differ in a few ways: 1) They are not continuations. For each method there is a slot for a "pre"function, and for each method with a non-empty return there is a slot for a "post" function. 2) There is always an interceptor for each method. The default simply does nothing. 3) Existing gRPC interceptors and the new REST interceptors are not composable or interoperable. --- .../%service/transports/__init__.py.j2 | 2 + .../services/%service/transports/rest.py.j2 | 73 +++++++++++++++++-- .../%name_%version/%sub/test_%service.py.j2 | 50 +++++++++++++ .../%service/transports/__init__.py.j2 | 2 + .../services/%service/transports/rest.py.j2 | 73 +++++++++++++++++-- .../%name_%version/%sub/test_%service.py.j2 | 58 ++++++++++++++- packages/gapic-generator/noxfile.py | 17 +++-- .../unit/gapic/asset_v1/test_asset_service.py | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 1 + 9 files changed, 255 insertions(+), 22 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 index 1241886b6370..88d196a7c226 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/__init__.py.j2 @@ -11,6 +11,7 @@ from .grpc import {{ service.name }}GrpcTransport {% endif %} {% if 'rest' in opts.transport %} from .rest import {{ service.name }}RestTransport +from .rest import {{ service.name }}RestInterceptor {% endif %} # Compile a registry of transports. @@ -29,6 +30,7 @@ __all__ = ( {% endif %} {% if 'rest' in opts.transport %} '{{ service.name }}RestTransport', + '{{ service.name }}RestInterceptor', {% endif %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 488646be1202..a4dc7c61e3ac 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -49,10 +49,67 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=requests_version, ) + +class {{ service.name }}RestInterceptor: + """Interceptor for {{ service.name }}. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the {{ service.name }}RestTransport. + + .. code-block: + class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): +{% for _, method in service.methods|dictsort if not (method.server_streaming or method.client_streaming) %} + def pre_{{ method.name|snake_case }}(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + {% if not method.void %} + def post_{{ method.name|snake_case }}(response): + logging.log(f"Received response: {response}") + {% endif %} + +{% endfor %} + transport = {{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) + client = {{ service.client_name }}(transport=transport) + + + """ + {% for method in service.methods.values()|sort(attribute="name") if not(method.server_streaming or method.client_streaming) %} + def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {% if not method.void %} + def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + """Post-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + {% endif %} + + {% endfor %} + + @dataclasses.dataclass class {{service.name}}RestStub: _session: AuthorizedSession _host: str + _interceptor: {{ service.name }}RestInterceptor + class {{service.name}}RestTransport({{service.name}}Transport): """REST backend transport for {{ service.name }}. @@ -80,6 +137,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False, url_scheme: str='https', + interceptor: Optional[{{ service.name }}RestInterceptor] = None, ) -> None: """Instantiate the transport. @@ -130,6 +188,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %} if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or {{ service.name }}RestInterceptor() self._prep_wrapped_messages(client_info) {% if service.has_lro %} @@ -233,7 +292,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): }, {% endfor %}{# rule in method.http_options #} ] - + request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) request_kwargs = {{method.input.ident}}.to_dict(request) transcoded_request = path_template.transcode( http_options, **request_kwargs) @@ -288,16 +347,16 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not method.void %} # Return the response {% if method.lro %} - return_op = operations_pb2.Operation() - json_format.Parse(response.content, return_op, ignore_unknown_fields=True) - return return_op + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) {% else %} - return {{method.output.ident}}.from_json( + resp = {{method.output.ident}}.from_json( response.content, ignore_unknown_fields=True ) - {% endif %}{# method.lro #} + resp = self._interceptor.post_{{ method.name|snake_case }}(resp) + return resp {% endif %}{# method.void #} {% else %}{# method.http_options and not (method.server_streaming or method.client_streaming) #} {% if not method.http_options %} @@ -323,7 +382,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {{method.output.ident}}]: stub = self._STUBS.get("{{method.name | snake_case}}") if not stub: - stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host) + stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) return stub diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index af7d28335d69..a7934d84e87b 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -35,6 +35,7 @@ from google.api_core import grpc_helpers from google.api_core import path_template {% if service.has_lro %} from google.api_core import future +from google.api_core import operation from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% if "rest" in opts.transport %} @@ -1113,6 +1114,55 @@ def test_{{ method_name }}_rest_unset_required_fields(): {% endif %}{# required_fields #} +{% if not (method.server_streaming or method.client_streaming) %} +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_{{ method_name }}_rest_interceptors(null_interceptor): + transport = transports.{{ service.name }}RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.{{ service.name}}RestInterceptor(), + ) + client = {{ service.client_name }}(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + {% if method.lro %} + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + {% endif %} + {% if not method.void %} + mock.patch.object(transports.{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ + {% endif %} + mock.patch.object(transports.{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: + pre.assert_not_called() + {% if not method.void %} + post.assert_not_called() + {% endif %} + + transcode.return_value = {"method": "post", "uri": "my_uri", "body": None, "query_params": {},} + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + {% if not method.void %} + req.return_value._content = {% if method.output.ident.package == method.ident.package %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + {% endif %} + + request = {{ method.input.ident }}() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + {% if not method.void %} + post.return_value = {{ method.output.ident }} + {% endif %} + + client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + {% if not method.void %} + post.assert_called_once() + {% endif %} +{% endif %}{# streaming #} + def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 index 107e2bd4e872..66be2e5c29a7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 @@ -12,6 +12,7 @@ from .grpc_asyncio import {{ service.name }}GrpcAsyncIOTransport {% endif %} {% if 'rest' in opts.transport %} from .rest import {{ service.name }}RestTransport +from .rest import {{ service.name }}RestInterceptor {% endif %} @@ -34,6 +35,7 @@ __all__ = ( {% endif %} {% if 'rest' in opts.transport %} '{{ service.name }}RestTransport', + '{{ service.name }}RestInterceptor', {% endif %} ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 488646be1202..b208c0940fcc 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -49,10 +49,67 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=requests_version, ) + +class {{ service.name }}RestInterceptor: + """Interceptor for {{ service.name }}. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the {{ service.name }}RestTransport. + + .. code-block: + class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): + {% for _, method in service.methods|dictsort if not (method.server_streaming or method.client_streaming) %} + def pre_{{ method.name|snake_case }}(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + {% if not method.void %} + def post_{{ method.name|snake_case }}(response): + logging.log(f"Received response: {response}") + {% endif %} + +{% endfor %} + transport = {{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) + client = {{ service.client_name }}(transport=transport) + + + """ + {% for method in service.methods.values()|sort(attribute="name") if not (method.server_streaming or method.client_streaming) %} + def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {% if not method.void %} + def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + """Post-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + {% endif %} + + {% endfor %} + + @dataclasses.dataclass class {{service.name}}RestStub: _session: AuthorizedSession _host: str + _interceptor: {{ service.name }}RestInterceptor + class {{service.name}}RestTransport({{service.name}}Transport): """REST backend transport for {{ service.name }}. @@ -80,6 +137,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False, url_scheme: str='https', + interceptor: Optional[{{ service.name }}RestInterceptor] = None, ) -> None: """Instantiate the transport. @@ -130,6 +188,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %} if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or {{ service.name }}RestInterceptor() self._prep_wrapped_messages(client_info) {% if service.has_lro %} @@ -233,7 +292,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): }, {% endfor %}{# rule in method.http_options #} ] - + request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) request_kwargs = {{method.input.ident}}.to_dict(request) transcoded_request = path_template.transcode( http_options, **request_kwargs) @@ -288,16 +347,16 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not method.void %} # Return the response {% if method.lro %} - return_op = operations_pb2.Operation() - json_format.Parse(response.content, return_op, ignore_unknown_fields=True) - return return_op + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) {% else %} - return {{method.output.ident}}.from_json( + resp = {{method.output.ident}}.from_json( response.content, ignore_unknown_fields=True ) - {% endif %}{# method.lro #} + resp = self._interceptor.post_{{ method.name|snake_case }}(resp) + return resp {% endif %}{# method.void #} {% else %}{# method.http_options and not (method.server_streaming or method.client_streaming) #} {% if not method.http_options %} @@ -323,7 +382,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {{method.output.ident}}]: stub = self._STUBS.get("{{method.name | snake_case}}") if not stub: - stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host) + stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) return stub diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 56cdbc628735..cdee5b76971d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -39,6 +39,7 @@ from google.api_core import grpc_helpers_async from google.api_core import path_template {% if service.has_lro %} from google.api_core import future +from google.api_core import operation from google.api_core import operations_v1 from google.longrunning import operations_pb2 {% if "rest" in opts.transport %} @@ -1515,6 +1516,57 @@ def test_{{ method_name }}_rest_unset_required_fields(): {% endif %}{# required_fields #} +{% if not (method.server_streaming or method.client_streaming) %} +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_{{ method_name }}_rest_interceptors(null_interceptor): + transport = transports.{{ service.name }}RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.{{ service.name}}RestInterceptor(), + ) + client = {{ service.client_name }}(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + {% if method.lro %} + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + {% endif %} + {% if not method.void %} + mock.patch.object(transports.{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ + {% endif %} + mock.patch.object(transports.{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: + pre.assert_not_called() + {% if not method.void %} + post.assert_not_called() + {% endif %} + + transcode.return_value = {"method": "post", "uri": "my_uri", "body": None, "query_params": {},} + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + {% if not method.void %} + req.return_value._content = {% if method.output.ident.package == method.ident.package %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + {% endif %} + + request = {{ method.input.ident }}() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + {% if not method.void %} + post.return_value = {{ method.output.ident }} + {% endif %} + + client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + {% if not method.void %} + post.assert_called_once() + {% endif %} + +{% endif %}{# streaming #} + + def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), @@ -1829,7 +1881,7 @@ def test_credentials_transport_error(): client_options={"credentials_file": "credentials.json"}, transport=transport, ) - + # It is an error to provide an api_key and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( credentials=ga_credentials.AnonymousCredentials(), @@ -1841,7 +1893,7 @@ def test_credentials_transport_error(): client_options=options, transport=transport, ) - + # It is an error to provide an api_key and a credential. options = mock.Mock() options.api_key = "api_key" @@ -2141,6 +2193,8 @@ def test_{{ service.name|snake_case }}_rest_lro_client(): # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client {%- endif %} + + {% endif %} {# rest #} def test_{{ service.name|snake_case }}_host_no_port(): diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 6154ea94bb13..a9df7d65d4c6 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -310,12 +310,17 @@ def run_showcase_unit_tests(session, fail_under=100): # Run the tests. session.run( "py.test", - "-n=auto", - "--quiet", - "--cov=google", - "--cov-append", - f"--cov-fail-under={str(fail_under)}", - *(session.posargs or [path.join("tests", "unit")]), + *( + session.posargs + or [ + "-n=auto", + "--quiet", + "--cov=google", + "--cov-append", + f"--cov-fail-under={str(fail_under)}", + path.join("tests", "unit"), + ] + ), ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index a2747714b857..dd4f527b9c7d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index e89e2e73fd1e..b189511ab7e8 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template From a4481d10dfbbdf48fe1793f2a3688f6816cddc89 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 25 Jan 2022 17:11:25 -0500 Subject: [PATCH 0703/1339] fix: preserve hyperlinks with hyphens (#1140) The default behaviour of textwrap.wrap is to break text on hyphens. This PR sets the break_on_hyphens parameter of textwrap.wrap to False in order to preserve hyperlinks with hyphens. Fixes #1131 --- packages/gapic-generator/gapic/utils/lines.py | 6 ++++++ .../services/iam_credentials/async_client.py | 4 ++-- .../iam/credentials_v1/services/iam_credentials/client.py | 4 ++-- .../services/iam_credentials/transports/grpc.py | 4 ++-- .../services/iam_credentials/transports/grpc_asyncio.py | 4 ++-- .../logging_v2/services/config_service_v2/async_client.py | 8 ++++---- .../cloud/logging_v2/services/config_service_v2/client.py | 8 ++++---- .../google/cloud/logging_v2/types/logging_config.py | 3 +-- .../redis/google/cloud/redis_v1/types/cloud_redis.py | 4 ++-- packages/gapic-generator/tests/unit/utils/test_lines.py | 4 ++++ 10 files changed, 29 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 64d32a31faef..6da582c44a06 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -78,9 +78,12 @@ def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: # Break off the first line of the string to address non-zero offsets. first = text.split('\n')[0] + '\n' if len(first) > width - offset: + # Ensure `break_on_hyphens` is set to `False` when using + # `textwrap.wrap` to avoid breaking hyperlinks with hyphens. initial = textwrap.wrap(first, break_long_words=False, width=width - offset, + break_on_hyphens=False, ) # Strip the first \n from the text so it is not misidentified as an # intentionally short line below. @@ -107,11 +110,14 @@ def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: # Wrap the remainder of the string at the desired width. return '{first}{text}'.format( first=first, + # Ensure `break_on_hyphens` is set to `False` when using + # `textwrap.fill` to avoid breaking hyperlinks with hyphens. text='\n'.join([textwrap.fill( break_long_words=False, initial_indent=' ' * indent, subsequent_indent=' ' * indent, text=token, width=width, + break_on_hyphens=False, ) for token in tokens]), ).rstrip('\n') diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index cd7019d14e3e..28b31e1bf5e2 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -48,8 +48,8 @@ class IAMCredentialsAsyncClient: Service account credentials are used to temporarily assume the identity of the service account. Supported credential types - include OAuth 2.0 access tokens, OpenID Connect ID tokens, self- - signed JSON Web Tokens (JWTs), and more. + include OAuth 2.0 access tokens, OpenID Connect ID tokens, + self-signed JSON Web Tokens (JWTs), and more. """ _client: IAMCredentialsClient diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 712492ba8a7f..5c330ce8974e 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -83,8 +83,8 @@ class IAMCredentialsClient(metaclass=IAMCredentialsClientMeta): Service account credentials are used to temporarily assume the identity of the service account. Supported credential types - include OAuth 2.0 access tokens, OpenID Connect ID tokens, self- - signed JSON Web Tokens (JWTs), and more. + include OAuth 2.0 access tokens, OpenID Connect ID tokens, + self-signed JSON Web Tokens (JWTs), and more. """ @staticmethod diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index c213c2fb3209..597a4480badc 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -39,8 +39,8 @@ class IAMCredentialsGrpcTransport(IAMCredentialsTransport): Service account credentials are used to temporarily assume the identity of the service account. Supported credential types - include OAuth 2.0 access tokens, OpenID Connect ID tokens, self- - signed JSON Web Tokens (JWTs), and more. + include OAuth 2.0 access tokens, OpenID Connect ID tokens, + self-signed JSON Web Tokens (JWTs), and more. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 8515d5fdb130..800002bfcdf6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -40,8 +40,8 @@ class IAMCredentialsGrpcAsyncIOTransport(IAMCredentialsTransport): Service account credentials are used to temporarily assume the identity of the service account. Supported credential types - include OAuth 2.0 access tokens, OpenID Connect ID tokens, self- - signed JSON Web Tokens (JWTs), and more. + include OAuth 2.0 access tokens, OpenID Connect ID tokens, + self-signed JSON Web Tokens (JWTs), and more. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 01dd8b63002f..95f8ac1c8c5c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -2418,8 +2418,8 @@ def sample_get_cmek_settings(): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2520,8 +2520,8 @@ def sample_update_cmek_settings(): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index e861ff0898cb..0f41c6dc22e0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -2616,8 +2616,8 @@ def sample_get_cmek_settings(): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2720,8 +2720,8 @@ def sample_update_cmek_settings(): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed- - encryption) for more information. + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index ce8b103af238..068af0b23643 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -144,8 +144,7 @@ class LogView(proto.Message): name (str): The resource name of the view. For example - "projects/my-project-id/locations/my- - location/buckets/my-bucket-id/views/my-view + "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view description (str): Describes this view. create_time (google.protobuf.timestamp_pb2.Timestamp): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 32aff22d3fed..e228a379822b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -95,8 +95,8 @@ class Instance(proto.Message): addresses that are reserved for this instance. If not provided, the service will choose an unused /29 block, for example, 10.0.0.0/29 or - 192.168.0.0/29. Ranges must be unique and non- - overlapping with existing subnets in an + 192.168.0.0/29. Ranges must be unique and + non-overlapping with existing subnets in an authorized network. host (str): Output only. Hostname or IP address of the diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 65df6f1c36b6..c471cb677bf6 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -87,3 +87,7 @@ def test_wrap_indent_short(): def test_wrap_short_line_preserved(): assert lines.wrap('foo\nbar\nbaz', width=80) == 'foo\nbar\nbaz' + + +def test_wrap_does_not_break_hyphenated_word(): + assert lines.wrap('do-not-break', width=5) == 'do-not-break' From ae67cffd47539851d1e1f4e330316e7d8e53b478 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 26 Jan 2022 14:55:19 -0500 Subject: [PATCH 0704/1339] chore: use alternative terms (#1157) --- .../%sub/services/%service/client.py.j2 | 2 +- .../services/%service/transports/grpc.py.j2 | 2 +- .../gapic/generator/generator.py | 6 ++--- packages/gapic-generator/gapic/schema/api.py | 2 +- .../gapic-generator/gapic/schema/naming.py | 4 ++-- .../gapic-generator/gapic/schema/wrappers.py | 4 ++-- .../%sub/services/%service/async_client.py.j2 | 2 +- .../%sub/services/%service/client.py.j2 | 2 +- .../services/%service/transports/grpc.py.j2 | 2 +- .../%service/transports/grpc_asyncio.py.j2 | 2 +- packages/gapic-generator/gapic/utils/cache.py | 2 +- packages/gapic-generator/gapic/utils/lines.py | 2 +- packages/gapic-generator/gapic/utils/rst.py | 2 +- .../services/asset_service/async_client.py | 16 ++++++------- .../asset_v1/services/asset_service/client.py | 16 ++++++------- .../services/asset_service/transports/grpc.py | 2 +- .../asset_service/transports/grpc_asyncio.py | 2 +- .../services/iam_credentials/async_client.py | 8 +++---- .../services/iam_credentials/client.py | 8 +++---- .../config_service_v2/async_client.py | 24 +++++++++---------- .../services/config_service_v2/client.py | 24 +++++++++---------- .../logging_service_v2/async_client.py | 8 +++---- .../services/logging_service_v2/client.py | 8 +++---- .../metrics_service_v2/async_client.py | 10 ++++---- .../services/metrics_service_v2/client.py | 10 ++++---- .../services/cloud_redis/async_client.py | 18 +++++++------- .../redis_v1/services/cloud_redis/client.py | 18 +++++++------- .../services/cloud_redis/transports/grpc.py | 2 +- .../cloud_redis/transports/grpc_asyncio.py | 2 +- .../tests/unit/samplegen/test_manifest.py | 2 +- 30 files changed, 106 insertions(+), 106 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 5427919d92ad..8f2ae2f41b7e 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -388,7 +388,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields %} - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index baa63b846e51..3a8be2ff82e9 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -234,7 +234,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( self.grpc_channel diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 609f70bb0f80..bf05f961a3ff 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -112,7 +112,7 @@ def get_response( # and instead of iterating over it/them, we iterate over samples # and plug those into the template. for template_name in client_templates: - # Sanity check: Skip "private" templates. + # Quick check: Skip "private" templates. filename = template_name.split("/")[-1] if filename.startswith("_") and filename != "__init__.py.j2": continue @@ -242,7 +242,7 @@ def _render_template( if not opts.metadata and template_name.endswith("gapic_metadata.json.j2"): return answer - # Sanity check: Rendering per service and per proto would be a + # Quick check: Rendering per service and per proto would be a # combinatorial explosion and is almost certainly not what anyone # ever wants. Error colorfully on it. if "%service" in template_name and "%proto" in template_name: @@ -343,7 +343,7 @@ def _get_file( name=fn, ) - # Sanity check: Do not render empty files. + # Quick check: Do not render empty files. if utils.empty(cgr_file.content) and not fn.endswith( ("py.typed", "__init__.py") ): diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 575656351bfc..2a0eb4e2c410 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -195,7 +195,7 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: answer = { t.ident.python_import for m in self.all_messages.values() - # Sanity check: We do make sure that we are not trying to have + # Quick check: We do make sure that we are not trying to have # a module import itself. for t in m.field_types if t.ident.python_import != self_reference } diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 3f49a18a2e48..fa6000f88a67 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -77,7 +77,7 @@ def build( proto_packages = {fd.package for fd in file_descriptors} root_package = os.path.commonprefix(tuple(proto_packages)).rstrip('.') - # Sanity check: If there is no common ground in the package, + # Quick check: If there is no common ground in the package, # we are obviously in trouble. if not root_package: raise ValueError( @@ -119,7 +119,7 @@ def build( version=match.get('version', ''), ) - # Sanity check: Ensure that the package directives all inferred + # Quick check: Ensure that the package directives all inferred # the same information. if not package_info.version and len(proto_packages) > 1: raise ValueError('All protos must have the same proto package ' diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index cb9203bb7826..08b764a5ac78 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -541,7 +541,7 @@ def get_field(self, *field_path: str, visited_messages=frozenset({self}), ) - # Sanity check: If cursor is a repeated field, then raise an exception. + # Quick check: If cursor is a repeated field, then raise an exception. # Repeated fields are only permitted in the terminal position. if cursor.repeated: raise KeyError( @@ -552,7 +552,7 @@ def get_field(self, *field_path: str, 'in the fields list in a position other than the end.', ) - # Sanity check: If this cursor has no message, there is a problem. + # Quick check: If this cursor has no message, there is a problem. if not cursor.message: raise KeyError( f'Field {".".join(field_path)} could not be resolved from ' diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 4e22e94625b5..a6e76a595f4c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -256,7 +256,7 @@ class {{ service.async_client_name }}: {% if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields %} - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index d95620a4b7d0..d0727567c6b6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -422,7 +422,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields %} - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index baa63b846e51..3a8be2ff82e9 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -234,7 +234,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( self.grpc_channel diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 446132d2c527..fe3430946f04 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -235,7 +235,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel diff --git a/packages/gapic-generator/gapic/utils/cache.py b/packages/gapic-generator/gapic/utils/cache.py index 1537ea83b2d8..b2292b16d23e 100644 --- a/packages/gapic-generator/gapic/utils/cache.py +++ b/packages/gapic-generator/gapic/utils/cache.py @@ -29,7 +29,7 @@ def cached_property(fx): """ @functools.wraps(fx) def inner(self): - # Sanity check: If there is no cache at all, create an empty cache. + # Quick check: If there is no cache at all, create an empty cache. if not hasattr(self, '_cached_values'): object.__setattr__(self, '_cached_values', {}) diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 6da582c44a06..4e268f284ee1 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -62,7 +62,7 @@ def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: Returns: str: The wrapped string. """ - # Sanity check: If there is empty text, abort. + # Quick check: If there is empty text, abort. if not text: return '' diff --git a/packages/gapic-generator/gapic/utils/rst.py b/packages/gapic-generator/gapic/utils/rst.py index cd47e82f7eba..e9a76956a412 100644 --- a/packages/gapic-generator/gapic/utils/rst.py +++ b/packages/gapic-generator/gapic/utils/rst.py @@ -37,7 +37,7 @@ def rst(text: str, width: int = 72, indent: int = 0, nl: bool = None, Returns: str: The same text, in RST format. """ - # Sanity check: If the text block does not appear to have any formatting, + # Quick check: If the text block does not appear to have any formatting, # do not convert it. # (This makes code generation significantly faster; calling out to pandoc # is by far the most expensive thing we do.) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 55c4ae53c34a..7515cdcc31b9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -352,7 +352,7 @@ def sample_list_assets(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -559,7 +559,7 @@ def sample_create_feed(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -663,7 +663,7 @@ def sample_get_feed(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -767,7 +767,7 @@ def sample_list_feeds(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -878,7 +878,7 @@ def sample_update_feed(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([feed]) if request is not None and has_flattened_params: @@ -968,7 +968,7 @@ def sample_delete_feed(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1158,7 +1158,7 @@ def sample_search_all_resources(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, query, asset_types]) if request is not None and has_flattened_params: @@ -1345,7 +1345,7 @@ def sample_search_all_iam_policies(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, query]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 436a6b943127..c3eec7541d47 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -551,7 +551,7 @@ def sample_list_assets(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -754,7 +754,7 @@ def sample_create_feed(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -859,7 +859,7 @@ def sample_get_feed(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -957,7 +957,7 @@ def sample_list_feeds(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1062,7 +1062,7 @@ def sample_update_feed(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([feed]) if request is not None and has_flattened_params: @@ -1153,7 +1153,7 @@ def sample_delete_feed(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -1337,7 +1337,7 @@ def sample_search_all_resources(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, query, asset_types]) if request is not None and has_flattened_params: @@ -1518,7 +1518,7 @@ def sample_search_all_iam_policies(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, query]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 71ed13ca3bc4..b96d161c6012 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -231,7 +231,7 @@ def operations_client(self) -> operations_v1.OperationsClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( self.grpc_channel diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 7d64bb4064d7..aa2f77b8ae82 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -234,7 +234,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 28b31e1bf5e2..db6153632c75 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -298,7 +298,7 @@ def sample_generate_access_token(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, scope, lifetime]) if request is not None and has_flattened_params: @@ -450,7 +450,7 @@ def sample_generate_id_token(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, audience, include_email]) if request is not None and has_flattened_params: @@ -590,7 +590,7 @@ def sample_sign_blob(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, payload]) if request is not None and has_flattened_params: @@ -731,7 +731,7 @@ def sample_sign_jwt(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, payload]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 5c330ce8974e..deacb6c9d306 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -486,7 +486,7 @@ def sample_generate_access_token(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, scope, lifetime]) if request is not None and has_flattened_params: @@ -632,7 +632,7 @@ def sample_generate_id_token(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, audience, include_email]) if request is not None and has_flattened_params: @@ -766,7 +766,7 @@ def sample_sign_blob(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, payload]) if request is not None and has_flattened_params: @@ -901,7 +901,7 @@ def sample_sign_jwt(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, payload]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 95f8ac1c8c5c..f1fdde44cfb0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -262,7 +262,7 @@ def sample_list_buckets(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -748,7 +748,7 @@ def sample_list_views(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1159,7 +1159,7 @@ def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1288,7 +1288,7 @@ def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1427,7 +1427,7 @@ def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: @@ -1582,7 +1582,7 @@ def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: @@ -1693,7 +1693,7 @@ def sample_delete_sink(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1803,7 +1803,7 @@ def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1935,7 +1935,7 @@ def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -2077,7 +2077,7 @@ def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: @@ -2225,7 +2225,7 @@ def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: @@ -2327,7 +2327,7 @@ def sample_delete_exclusion(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 0f41c6dc22e0..e9803b90315e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -486,7 +486,7 @@ def sample_list_buckets(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -983,7 +983,7 @@ def sample_list_views(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1403,7 +1403,7 @@ def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -1525,7 +1525,7 @@ def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -1657,7 +1657,7 @@ def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: @@ -1813,7 +1813,7 @@ def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: @@ -1917,7 +1917,7 @@ def sample_delete_sink(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: @@ -2020,7 +2020,7 @@ def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -2145,7 +2145,7 @@ def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -2280,7 +2280,7 @@ def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: @@ -2429,7 +2429,7 @@ def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: @@ -2532,7 +2532,7 @@ def sample_delete_exclusion(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index bf764a2603f9..81c360086661 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -249,7 +249,7 @@ def sample_delete_log(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: @@ -439,7 +439,7 @@ def sample_write_log_entries(): Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: @@ -593,7 +593,7 @@ def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: @@ -799,7 +799,7 @@ def sample_list_logs(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 6381c3feb024..6ad1f4049131 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -437,7 +437,7 @@ def sample_delete_log(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: @@ -620,7 +620,7 @@ def sample_write_log_entries(): Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: @@ -766,7 +766,7 @@ def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: @@ -959,7 +959,7 @@ def sample_list_logs(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index bd8825626b4e..06cb208f2211 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -246,7 +246,7 @@ def sample_list_log_metrics(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -372,7 +372,7 @@ def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: @@ -506,7 +506,7 @@ def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: @@ -633,7 +633,7 @@ def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: @@ -734,7 +734,7 @@ def sample_delete_log_metric(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 93ad8b286a8b..d392c90094f2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -434,7 +434,7 @@ def sample_list_log_metrics(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -553,7 +553,7 @@ def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: @@ -680,7 +680,7 @@ def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: @@ -808,7 +808,7 @@ def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: @@ -902,7 +902,7 @@ def sample_delete_log_metric(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 2c17838465a0..e1aad2df227d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -280,7 +280,7 @@ def sample_list_instances(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -387,7 +387,7 @@ def sample_get_instance(): A Google Cloud Redis instance. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -534,7 +534,7 @@ def sample_create_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance_id, instance]) if request is not None and has_flattened_params: @@ -671,7 +671,7 @@ def sample_update_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([update_mask, instance]) if request is not None and has_flattened_params: @@ -797,7 +797,7 @@ def sample_upgrade_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, redis_version]) if request is not None and has_flattened_params: @@ -928,7 +928,7 @@ def sample_import_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, input_config]) if request is not None and has_flattened_params: @@ -1055,7 +1055,7 @@ def sample_export_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, output_config]) if request is not None and has_flattened_params: @@ -1182,7 +1182,7 @@ def sample_failover_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, data_protection_mode]) if request is not None and has_flattened_params: @@ -1309,7 +1309,7 @@ def sample_delete_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 7fb857604ddb..9f58dc2140e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -468,7 +468,7 @@ def sample_list_instances(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -576,7 +576,7 @@ def sample_get_instance(): A Google Cloud Redis instance. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -724,7 +724,7 @@ def sample_create_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance_id, instance]) if request is not None and has_flattened_params: @@ -862,7 +862,7 @@ def sample_update_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([update_mask, instance]) if request is not None and has_flattened_params: @@ -989,7 +989,7 @@ def sample_upgrade_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, redis_version]) if request is not None and has_flattened_params: @@ -1121,7 +1121,7 @@ def sample_import_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, input_config]) if request is not None and has_flattened_params: @@ -1249,7 +1249,7 @@ def sample_export_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, output_config]) if request is not None and has_flattened_params: @@ -1377,7 +1377,7 @@ def sample_failover_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, data_protection_mode]) if request is not None and has_flattened_params: @@ -1505,7 +1505,7 @@ def sample_delete_instance(): """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index eee6585cc6fe..53fef935f1e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -250,7 +250,7 @@ def operations_client(self) -> operations_v1.OperationsClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( self.grpc_channel diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 8ec56f68d6d1..c1ad4b05b19b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -253,7 +253,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index 5fe7217a30bb..8357c711fece 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -137,7 +137,7 @@ def test_generate_manifest(): assert parsed_manifest == expected_parsed_manifest -def test_generate_manifest_relative_path_sanity(): +def test_generate_manifest_relative_path_quick_check(): with pytest.raises(types.InvalidSampleFpath): manifest.generate( {"molluscs/squid.py": {"id": "squid_sample"}}.items(), From 6f502ffe12e69667c2b320f56bb8734f92d16279 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 26 Jan 2022 15:02:34 -0500 Subject: [PATCH 0705/1339] ci: migrate to Github Actions Concurrency for cancelling workflows (#1158) Replaces `styfle/cancel-workflow-action` with [Github Actions concurrency](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#concurrency). When the workflow has `cancel-in-progress: true`, this will cancel any currently running job or workflow in the same concurrency group which should be the same behaviour as `styfle/cancel-workflow-action`. Closes #1151 --- .../.github/workflows/tests.yaml | 52 ++----------------- 1 file changed, 4 insertions(+), 48 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 32f1967b12a0..0f32d9142541 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -9,14 +9,14 @@ on: # Allows you to run this workflow manually from the Actions tab workflow_dispatch: +concurrency: + group: tests-${{ github.head_ref }} + cancel-in-progress: true + jobs: docs: runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -30,10 +30,6 @@ jobs: mypy: runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -50,10 +46,6 @@ jobs: target: [showcase, showcase_alternative_templates] runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -93,10 +85,6 @@ jobs: target: [showcase_mtls, showcase_mtls_alternative_templates] runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Setup temp directory run: | @@ -156,10 +144,6 @@ jobs: variant: _alternative_templates runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v2 @@ -187,10 +171,6 @@ jobs: showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -221,10 +201,6 @@ jobs: matrix: variant: ['', _alternative_templates] steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -252,10 +228,6 @@ jobs: snippetgen: runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 @@ -276,10 +248,6 @@ jobs: python: [3.6, 3.7, 3.8, 3.9] runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v2 @@ -305,10 +273,6 @@ jobs: variant: _alternative_templates runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v2 @@ -328,10 +292,6 @@ jobs: runs-on: ubuntu-latest container: gcr.io/gapic-images/googleapis steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Cache Bazel files id: cache-bazel @@ -359,10 +319,6 @@ jobs: style-check: runs-on: ubuntu-latest steps: - - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.7.0 - with: - access_token: ${{ github.token }} - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 From d313b52e488de6cc6febdc3c54e88e4d06d0c381 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 26 Jan 2022 16:28:18 -0500 Subject: [PATCH 0706/1339] chore: sort query params in rest unit tests (#1155) I've confirmed that the unit tests in python-compute are still passing with this change. Fixes #1154 --- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 6 +++--- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index a7934d84e87b..53c3489459c3 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1015,7 +1015,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% set mock_value = req_field.primitive_mock_as_str() %} {% if method.query_params %} # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(({% for param in method.query_params %}"{{param|camel_case }}", {% endfor %})) + assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param|camel_case }}", {% endfor %})) {% endif %} jsonified_request["{{ field_name }}"] = {{ mock_value }} {% endfor %} @@ -1023,7 +1023,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) {% if method.query_params %} # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(({% for param in method.query_params %}"{{param}}", + assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", {% endfor %})) {% endif %} jsonified_request.update(unset_fields) @@ -1109,7 +1109,7 @@ def test_{{ method_name }}_rest_unset_required_fields(): transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.{{ method.name|snake_case }}._get_unset_required_fields({}) - assert set(unset_fields) == (set(({% for param in method.query_params %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{param.name|camel_case}}", {% endfor %}))) + assert set(unset_fields) == (set(({% for param in method.query_params|sort %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{param.name|camel_case}}", {% endfor %}))) {% endif %}{# required_fields #} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index cdee5b76971d..3c7c4561f183 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1426,7 +1426,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) {% if method.query_params %} # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(({% for param in method.query_params %}"{{param}}", {% endfor %})) + assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", {% endfor %})) {% endif %} jsonified_request.update(unset_fields) @@ -1511,7 +1511,7 @@ def test_{{ method_name }}_rest_unset_required_fields(): transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.{{ method.name|snake_case }}._get_unset_required_fields({}) - assert set(unset_fields) == (set(({% for param in method.query_params %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{ param.name|camel_case }}", {% endfor %}))) + assert set(unset_fields) == (set(({% for param in method.query_params|sort %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{ param.name|camel_case }}", {% endfor %}))) {% endif %}{# required_fields #} From e706664eda4964910880a95ee30cde7e65c2c32e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 26 Jan 2022 16:34:42 -0500 Subject: [PATCH 0707/1339] chore: migrate default branch to main (#1156) Fixes #1109 --- packages/gapic-generator/.github/snippet-bot.yml | 2 +- .../gapic-generator/.github/sync-repo-settings.yaml | 2 +- packages/gapic-generator/docs/conf.py | 10 +++++----- packages/gapic-generator/docs/templates.rst | 2 +- .../gapic/ads-templates/docs/conf.py.j2 | 10 +++++----- .../gapic-generator/gapic/templates/docs/conf.py.j2 | 10 +++++----- .../tests/integration/goldens/asset/docs/conf.py | 10 +++++----- .../tests/integration/goldens/credentials/docs/conf.py | 10 +++++----- .../tests/integration/goldens/logging/docs/conf.py | 10 +++++----- .../tests/integration/goldens/redis/docs/conf.py | 10 +++++----- 10 files changed, 38 insertions(+), 38 deletions(-) diff --git a/packages/gapic-generator/.github/snippet-bot.yml b/packages/gapic-generator/.github/snippet-bot.yml index 77ce8f8255e5..19254c01c97d 100644 --- a/packages/gapic-generator/.github/snippet-bot.yml +++ b/packages/gapic-generator/.github/snippet-bot.yml @@ -1,3 +1,3 @@ -# https://github.com/googleapis/repo-automation-bots/tree/master/packages/snippet-bot +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/snippet-bot ignoreFiles: - "**/*.py" diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index c4a09c500acf..048788301d12 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -2,7 +2,7 @@ rebaseMergeAllowed: true squashMergeAllowed: true mergeCommitAllowed: false branchProtectionRules: -- pattern: master +- pattern: main isAdminEnforced: true requiredStatusCheckContexts: - 'cla/google' diff --git a/packages/gapic-generator/docs/conf.py b/packages/gapic-generator/docs/conf.py index 92849ebac27e..fbe1c27365fc 100644 --- a/packages/gapic-generator/docs/conf.py +++ b/packages/gapic-generator/docs/conf.py @@ -59,8 +59,8 @@ # source_suffix = ['.rst', '.md'] source_suffix = '.rst' -# The master toctree document. -master_doc = 'index' +# The root toctree document. +root_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -137,7 +137,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'APIClientGeneratorforPython.tex', 'API Client Generator for Python Documentation', + (root_doc, 'APIClientGeneratorforPython.tex', 'API Client Generator for Python Documentation', 'Luke Sneeringer', 'manual'), ] @@ -147,7 +147,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'apiclientgeneratorforpython', 'API Client Generator for Python Documentation', + (root_doc, 'apiclientgeneratorforpython', 'API Client Generator for Python Documentation', [author], 1) ] @@ -158,7 +158,7 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'APIClientGeneratorforPython', 'API Client Generator for Python Documentation', + (root_doc, 'APIClientGeneratorforPython', 'API Client Generator for Python Documentation', author, 'APIClientGeneratorforPython', 'One line description of project.', 'Miscellaneous'), ] diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst index 702160551a9a..6cba34941333 100644 --- a/packages/gapic-generator/docs/templates.rst +++ b/packages/gapic-generator/docs/templates.rst @@ -21,7 +21,7 @@ Locating Templates ------------------ Templates are included in output simply on the basis that they exist. -**There is no master list of templates**; it is assumed that every template +**There is no primary list of templates**; it is assumed that every template should be rendered (unless its name begins with a single underscore). .. note:: diff --git a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 index 18475542c4b1..962d9b876d52 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 @@ -63,8 +63,8 @@ source_suffix = [".rst", ".md"] # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = u"{{ api.naming.warehouse_package_name }}" @@ -261,7 +261,7 @@ latex_elements = { # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "{{ api.naming.warehouse_package_name }}.tex", u"{{ api.naming.warehouse_package_name }} Documentation", author, @@ -296,7 +296,7 @@ latex_documents = [ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "{{ api.naming.warehouse_package_name }}", u"{{ api.naming.long_name }} Documentation", [author], @@ -315,7 +315,7 @@ man_pages = [ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "{{ api.naming.warehouse_package_name }}", u"{{ api.naming.warehouse_package_name }} Documentation", author, diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index 7987f050301b..f2500e2012da 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -63,8 +63,8 @@ source_suffix = [".rst", ".md"] # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = u"{{ api.naming.warehouse_package_name }}" @@ -261,7 +261,7 @@ latex_elements = { # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "{{ api.naming.warehouse_package_name }}.tex", u"{{ api.naming.warehouse_package_name }} Documentation", author, @@ -296,7 +296,7 @@ latex_documents = [ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "{{ api.naming.warehouse_package_name }}", u"{{ api.naming.long_name }} Documentation", [author], @@ -315,7 +315,7 @@ man_pages = [ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "{{ api.naming.warehouse_package_name }}", u"{{ api.naming.warehouse_package_name }} Documentation", author, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index 3aa26721fecd..945269d461e0 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -74,8 +74,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = u"google-cloud-asset" @@ -272,7 +272,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-asset.tex", u"google-cloud-asset Documentation", author, @@ -307,7 +307,7 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-cloud-asset", u"Google Cloud Asset Documentation", [author], @@ -326,7 +326,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-asset", u"google-cloud-asset Documentation", author, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index 8f9d83a8bfc4..9371c5bab00e 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -74,8 +74,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = u"google-iam-credentials" @@ -272,7 +272,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-iam-credentials.tex", u"google-iam-credentials Documentation", author, @@ -307,7 +307,7 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-iam-credentials", u"Google Iam Credentials Documentation", [author], @@ -326,7 +326,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-iam-credentials", u"google-iam-credentials Documentation", author, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index eb6783779012..9c148689227b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -74,8 +74,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = u"google-cloud-logging" @@ -272,7 +272,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-logging.tex", u"google-cloud-logging Documentation", author, @@ -307,7 +307,7 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-cloud-logging", u"Google Cloud Logging Documentation", [author], @@ -326,7 +326,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-logging", u"google-cloud-logging Documentation", author, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index a9b259f3561e..63f6bf6e97f8 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -74,8 +74,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = u"google-cloud-redis" @@ -272,7 +272,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-redis.tex", u"google-cloud-redis Documentation", author, @@ -307,7 +307,7 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-cloud-redis", u"Google Cloud Redis Documentation", [author], @@ -326,7 +326,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-redis", u"google-cloud-redis Documentation", author, From fe0dd476a813d856640448810e786c10b8378f98 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 26 Jan 2022 22:48:22 +0100 Subject: [PATCH 0708/1339] chore(deps): update dependency protobuf to v3.19.3 (#1150) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [protobuf](https://developers.google.com/protocol-buffers/) | `==3.19.1` -> `==3.19.3` | [![age](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.3/compatibility-slim/3.19.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/protobuf/3.19.3/confidence-slim/3.19.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 880b056f22ce..134c06812c8a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.1.1 googleapis-common-protos==1.54.0 jinja2==3.0.3 MarkupSafe==2.0.1 -protobuf==3.19.1 +protobuf==3.19.3 pypandoc==1.6.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From c08190d86c803752e5cccc55de8f4caa7657ddf5 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 27 Jan 2022 11:28:35 -0800 Subject: [PATCH 0709/1339] chore: fix CI invocations to use Python 3.9 (#1148) --- .../.github/workflows/tests.yaml | 32 ++++++++--------- .../services/%service/transports/rest.py.j2 | 35 +++++++++++++------ .../gapic/ads-templates/docs/conf.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 2 +- packages/gapic-generator/gapic/schema/api.py | 22 ++++++++---- .../gapic-generator/gapic/schema/wrappers.py | 12 ++++--- .../services/%service/transports/rest.py.j2 | 31 +++++++++++----- .../gapic/templates/docs/conf.py.j2 | 2 +- .../gapic/templates/noxfile.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 2 +- packages/gapic-generator/noxfile.py | 9 +++-- .../integration/goldens/asset/docs/conf.py | 2 +- .../integration/goldens/asset/noxfile.py | 2 +- .../goldens/credentials/docs/conf.py | 2 +- .../goldens/credentials/noxfile.py | 2 +- .../integration/goldens/logging/docs/conf.py | 2 +- .../integration/goldens/logging/noxfile.py | 2 +- .../integration/goldens/redis/docs/conf.py | 2 +- .../integration/goldens/redis/noxfile.py | 2 +- .../gapic-generator/tests/system/conftest.py | 29 ++++++++------- .../system/test_client_context_manager.py | 4 +++ .../tests/system/test_error_details.py | 14 ++++++++ .../tests/system/test_retry.py | 3 +- .../tests/system/test_unary.py | 3 +- .../tests/unit/schema/wrappers/test_method.py | 19 ++++++++++ 25 files changed, 162 insertions(+), 77 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 0f32d9142541..f65833509fd5 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -18,10 +18,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 cache: 'pip' - name: Install nox. run: python -m pip install nox @@ -31,10 +31,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 cache: 'pip' - name: Install nox. run: python -m pip install nox @@ -47,10 +47,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 cache: 'pip' - name: Install system dependencies. run: | @@ -90,10 +90,10 @@ jobs: run: | sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 cache: 'pip' - name: Copy mtls files run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ @@ -172,10 +172,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 cache: 'pip' - name: Install system dependencies. run: | @@ -202,10 +202,10 @@ jobs: variant: ['', _alternative_templates] steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 cache: 'pip' - name: Install system dependencies. run: | @@ -229,10 +229,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 cache: 'pip' - name: Install system dependencies. run: | @@ -320,10 +320,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.9 cache: 'pip' - name: Install autopep8 run: | diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index a4dc7c61e3ac..6ba9385bc54c 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -19,7 +19,8 @@ from google.protobuf import json_format {% endif %} from requests import __version__ as requests_version import dataclasses -from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -65,7 +66,7 @@ class {{ service.name }}RestInterceptor: .. code-block: class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): -{% for _, method in service.methods|dictsort if not (method.server_streaming or method.client_streaming) %} + {% for _, method in service.methods|dictsort if not (method.server_streaming or method.client_streaming) %} def pre_{{ method.name|snake_case }}(request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -81,7 +82,7 @@ class {{ service.name }}RestInterceptor: """ - {% for method in service.methods.values()|sort(attribute="name") if not(method.server_streaming or method.client_streaming) %} + {% for method in service.methods.values()|sort(attribute="name") if not (method.server_streaming or method.client_streaming) %} def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for {{ method.name|snake_case }} @@ -175,6 +176,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + super().__init__( host=host, credentials=credentials, @@ -184,7 +193,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST) {% if service.has_lro %} - self._operations_client = None + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None {% endif %} if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -202,7 +211,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): """ # Only create a new client if we do not already have one. if self._operations_client is None: - http_options = { + http_options: Dict[str, List[Dict[str, str]]] = { {% for selector, rules in api.http_options.items() %} {% if selector.startswith('google.longrunning.Operations') %} '{{ selector }}': [ @@ -238,9 +247,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __hash__(self): return hash("{{method.name}}") + {% if not (method.server_streaming or method.client_streaming) %} {% if method.input.required_fields %} - __REQUIRED_FIELDS_DEFAULT_VALUES = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} @@ -258,7 +268,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> {{method.output.ident}}: + ){% if not method.void %} -> {{method.output.ident}}{% endif %}: {% if method.http_options and not (method.server_streaming or method.client_streaming) %} r"""Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( @@ -282,7 +292,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %} """ - http_options = [ + http_options: List[Dict[str, str]] = [ {%- for rule in method.http_options %}{ 'method': '{{ rule.method }}', 'uri': '{{ rule.uri }}', @@ -330,8 +340,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): headers = dict(metadata) headers['Content-Type'] = 'application/json' response = getattr(self._session, method)( - # Replace with proper schema configuration (http/https) logic - "https://{host}{uri}".format(host=self._host, uri=uri), + "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params), @@ -344,6 +353,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) + {% if not method.void %} # Return the response {% if method.lro %} @@ -357,6 +367,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %}{# method.lro #} resp = self._interceptor.post_{{ method.name|snake_case }}(resp) return resp + {% endif %}{# method.void #} {% else %}{# method.http_options and not (method.server_streaming or method.client_streaming) #} {% if not method.http_options %} @@ -384,7 +395,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): if not stub: stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) - return stub + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return stub # type: ignore {% endfor %} diff --git a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 index 962d9b876d52..5b3946301db4 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 @@ -28,7 +28,7 @@ __version__ = "0.1.0" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "4.0.1" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 53c3489459c3..68c5a7bb396c 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1251,7 +1251,7 @@ def test_{{ method.name|snake_case }}_rest_flattened(): assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] {% with uri = method.http_options[0].uri %} - assert path_template.validate("https://%s{{ uri }}" % client.transport._host, args[1]) + assert path_template.validate("%s{{ uri }}" % client.transport._host, args[1]) {% endwith %} {# TODO(kbandes) - reverse-transcode request args to check all request fields #} diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 2a0eb4e2c410..f3dbb189718e 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -628,15 +628,25 @@ def proto(self) -> Proto: @cached_property def api_enums(self) -> Mapping[str, wrappers.EnumType]: - return collections.ChainMap({}, self.proto_enums, - *[p.all_enums for p in self.prior_protos.values()], - ) + return collections.ChainMap( + {}, + self.proto_enums, + # This is actually fine from a typing perspective: + # we're agglutinating all the prior protos' enums, which are + # stored in maps. This is just a convenient way to expand it out. + *[p.all_enums for p in self.prior_protos.values()], # type: ignore + ) @cached_property def api_messages(self) -> Mapping[str, wrappers.MessageType]: - return collections.ChainMap({}, self.proto_messages, - *[p.all_messages for p in self.prior_protos.values()], - ) + return collections.ChainMap( + {}, + self.proto_messages, + # This is actually fine from a typing perspective: + # we're agglutinating all the prior protos' enums, which are + # stored in maps. This is just a convenient way to expand it out. + *[p.all_messages for p in self.prior_protos.values()], # type: ignore + ) def _load_children(self, children: Sequence, loader: Callable, *, diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 08b764a5ac78..6f059c39632f 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -770,16 +770,16 @@ class HttpRule: uri: str body: Optional[str] - def path_fields(self, method: "~.Method") -> List[Tuple[Field, str, str]]: + def path_fields(self, method: "Method") -> List[Tuple[Field, str, str]]: """return list of (name, template) tuples extracted from uri.""" input = method.input return [(input.get_field(*match.group("name").split(".")), match.group("name"), match.group("template")) for match in path_template._VARIABLE_RE.finditer(self.uri)] - def sample_request(self, method: "~.Method") -> str: + def sample_request(self, method: "Method") -> Dict[str, Any]: """return json dict for sample request matching the uri template.""" - def sample_from_path_fields(paths: List[Tuple["wrappers.Field", str, str]]) -> Dict[Any, Any]: + def sample_from_path_fields(paths: List[Tuple[Field, str, str]]) -> Dict[str, Any]: """Construct a dict for a sample request object from a list of fields and template patterns. @@ -1040,7 +1040,11 @@ def query_params(self) -> Set[str]: params = set(self.path_params) body = self.http_opt.get('body') if body: - params.add(body) + if body == "*": + # The entire request is the REST body. + return set() + else: + params.add(body) return set(self.input.fields) - params diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index b208c0940fcc..6ba9385bc54c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -19,7 +19,8 @@ from google.protobuf import json_format {% endif %} from requests import __version__ as requests_version import dataclasses -from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -175,6 +176,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + super().__init__( host=host, credentials=credentials, @@ -184,7 +193,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST) {% if service.has_lro %} - self._operations_client = None + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None {% endif %} if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) @@ -202,7 +211,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): """ # Only create a new client if we do not already have one. if self._operations_client is None: - http_options = { + http_options: Dict[str, List[Dict[str, str]]] = { {% for selector, rules in api.http_options.items() %} {% if selector.startswith('google.longrunning.Operations') %} '{{ selector }}': [ @@ -238,9 +247,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __hash__(self): return hash("{{method.name}}") + {% if not (method.server_streaming or method.client_streaming) %} {% if method.input.required_fields %} - __REQUIRED_FIELDS_DEFAULT_VALUES = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} @@ -258,7 +268,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> {{method.output.ident}}: + ){% if not method.void %} -> {{method.output.ident}}{% endif %}: {% if method.http_options and not (method.server_streaming or method.client_streaming) %} r"""Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( @@ -282,7 +292,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %} """ - http_options = [ + http_options: List[Dict[str, str]] = [ {%- for rule in method.http_options %}{ 'method': '{{ rule.method }}', 'uri': '{{ rule.uri }}', @@ -330,8 +340,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): headers = dict(metadata) headers['Content-Type'] = 'application/json' response = getattr(self._session, method)( - # Replace with proper schema configuration (http/https) logic - "https://{host}{uri}".format(host=self._host, uri=uri), + "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params), @@ -344,6 +353,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) + {% if not method.void %} # Return the response {% if method.lro %} @@ -357,6 +367,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %}{# method.lro #} resp = self._interceptor.post_{{ method.name|snake_case }}(resp) return resp + {% endif %}{# method.void #} {% else %}{# method.http_options and not (method.server_streaming or method.client_streaming) #} {% if not method.http_options %} @@ -384,7 +395,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): if not stub: stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) - return stub + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return stub # type: ignore {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index f2500e2012da..bbd444270e29 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -28,7 +28,7 @@ __version__ = "0.1.0" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "4.0.1" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index f4b393cbce7e..de8dcac80415 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -108,7 +108,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3c7c4561f183..5f9aafb63bee 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1654,7 +1654,7 @@ def test_{{ method_name }}_rest_flattened(): assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] {% with uri = method.http_options[0].uri %} - assert path_template.validate("https://%s{{ uri }}" % client.transport._host, args[1]) + assert path_template.validate("%s{{ uri }}" % client.transport._host, args[1]) {% endwith %} {# TODO(kbandes) - reverse-transcode request args to check all request fields #} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index a9df7d65d4c6..358260489d74 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -26,6 +26,9 @@ import shutil +nox.options.error_on_missing_interpreters = True + + showcase_version = os.environ.get("SHOWCASE_VERSION", "0.18.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") @@ -37,7 +40,7 @@ "3.9", ) -NEWEST_PYTHON = "3.9" +NEWEST_PYTHON = ALL_PYTHON[-1] @nox.session(python=ALL_PYTHON) @@ -368,7 +371,7 @@ def showcase_mypy( """Perform typecheck analysis on the generated Showcase library.""" # Install pytest and gapic-generator-python - session.install("mypy", "types-pkg-resources") + session.install("mypy", "types-pkg-resources", "types-protobuf", "types-requests", "types-dataclasses") with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) @@ -410,7 +413,7 @@ def snippetgen(session): def docs(session): """Build the docs.""" - session.install("sphinx < 1.8", "sphinx_rtd_theme") + session.install("sphinx==4.0.1", "sphinx_rtd_theme") session.install(".") # Build the docs! diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index 945269d461e0..8af34db8a130 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -39,7 +39,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "4.0.1" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index c999d03a684c..0c0e075dd885 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -115,7 +115,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index 9371c5bab00e..0a00d31a38e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -39,7 +39,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "4.0.1" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index fa57dfa77798..b8c5b4e7fab9 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -115,7 +115,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index 9c148689227b..1c0f97d4adfc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -39,7 +39,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "4.0.1" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index d8322070774e..4cbb5eafbbb6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -115,7 +115,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index 63f6bf6e97f8..7e4dd684d931 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -39,7 +39,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "4.0.1" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 318ac36b6a0a..947a5981261b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -115,7 +115,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index fd92e72bc8a5..86935c77420b 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -30,6 +30,18 @@ from google.showcase import EchoAsyncClient from google.showcase import IdentityAsyncClient + _test_event_loop = asyncio.new_event_loop() + asyncio.set_event_loop(_test_event_loop) + + # NOTE(lidiz) We must override the default event_loop fixture from + # pytest-asyncio. pytest fixture frees resources once there isn't any reference + # to it. So, the event loop might close before tests finishes. In the + # customized version, we don't close the event loop. + + @pytest.fixture + def event_loop(): + return asyncio.get_event_loop() + @pytest.fixture def async_echo(use_mtls, event_loop): return construct_client( @@ -48,18 +60,6 @@ def async_identity(use_mtls, event_loop): channel_creator=aio.insecure_channel ) - _test_event_loop = asyncio.new_event_loop() - - # NOTE(lidiz) We must override the default event_loop fixture from - # pytest-asyncio. pytest fixture frees resources once there isn't any reference - # to it. So, the event loop might close before tests finishes. In the - # customized version, we don't close the event loop. - - @pytest.fixture - def event_loop(): - asyncio.set_event_loop(_test_event_loop) - return asyncio.get_event_loop() - dir = os.path.dirname(__file__) with open(os.path.join(dir, "../cert/mtls.crt"), "rb") as fh: @@ -108,12 +108,15 @@ def construct_client( transport_cls = client_class.get_transport_class(transport_name) if transport_name in ["grpc", "grpc_asyncio"]: transport = transport_cls( + credentials=credentials.AnonymousCredentials(), channel=channel_creator("localhost:7469"), ) elif transport_name == "rest": # The custom host explicitly bypasses https. transport = transport_cls( - host="http://localhost:7469", + credentials=credentials.AnonymousCredentials(), + host="localhost:7469", + url_scheme="http", ) else: raise RuntimeError(f"Unexpected transport type: {transport_name}") diff --git a/packages/gapic-generator/tests/system/test_client_context_manager.py b/packages/gapic-generator/tests/system/test_client_context_manager.py index 83e1c4353649..0d20292dc6e5 100644 --- a/packages/gapic-generator/tests/system/test_client_context_manager.py +++ b/packages/gapic-generator/tests/system/test_client_context_manager.py @@ -26,6 +26,10 @@ def test_client(echo): def test_client_destroyed(echo): + # The REST session is fine with being closed multiple times. + if "rest" in str(echo.transport).lower(): + return + echo.__exit__(None, None, None) with pytest.raises(ValueError): echo.echo({ diff --git a/packages/gapic-generator/tests/system/test_error_details.py b/packages/gapic-generator/tests/system/test_error_details.py index 6061e8f015d1..0561b3bdfd65 100644 --- a/packages/gapic-generator/tests/system/test_error_details.py +++ b/packages/gapic-generator/tests/system/test_error_details.py @@ -34,6 +34,13 @@ def create_status(error_details=None): def test_bad_request_details(echo): + # TODO(dovs): reenable when transcoding requests with an "Any" + # field is properly handled + # See https://github.com/googleapis/proto-plus-python/issues/285 + # for background and tracking. + if "rest" in str(echo.transport).lower(): + return + def create_bad_request_details(): bad_request_details = error_details_pb2.BadRequest() field_violation = bad_request_details.field_violations.add() @@ -51,6 +58,13 @@ def create_bad_request_details(): def test_precondition_failure_details(echo): + # TODO(dovs): reenable when transcoding requests with an "Any" + # field is properly handled + # See https://github.com/googleapis/proto-plus-python/issues/285 + # for background and tracking. + if "rest" in str(echo.transport).lower(): + return + def create_precondition_failure_details(): pf_details = error_details_pb2.PreconditionFailure() violation = pf_details.violations.add() diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 7e67c298c677..9c7970210f50 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -20,7 +20,8 @@ def test_retry_bubble(echo): - with pytest.raises(exceptions.DeadlineExceeded): + # Note: InvalidArgument is from gRPC, InternalServerError from http + with pytest.raises((exceptions.DeadlineExceeded, exceptions.InternalServerError)): echo.echo({ 'error': { 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), diff --git a/packages/gapic-generator/tests/system/test_unary.py b/packages/gapic-generator/tests/system/test_unary.py index 68ed43b01620..aafc50066d78 100644 --- a/packages/gapic-generator/tests/system/test_unary.py +++ b/packages/gapic-generator/tests/system/test_unary.py @@ -37,7 +37,8 @@ def test_unary_with_dict(echo): def test_unary_error(echo): message = 'Bad things! Bad things!' - with pytest.raises(exceptions.InvalidArgument) as exc: + # Note: InvalidArgument is from gRPC, InternalServerError from http + with pytest.raises((exceptions.InvalidArgument, exceptions.InternalServerError)) as exc: echo.echo({ 'error': { 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 814893c39cf5..ff7b032e7452 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -563,6 +563,25 @@ def test_method_query_params_no_body(): assert method.query_params == {'region'} +def test_method_query_params_star_body(): + # tests only the basic case of grpc transcoding + http_rule = http_pb2.HttpRule( + post='/v1/{project}/topics', + body='*' + ) + input_message = make_message( + 'MethodInput', + fields=( + make_field('region'), + make_field('project'), + make_field('address') + ) + ) + method = make_method('DoSomething', http_rule=http_rule, + input_message=input_message) + assert method.query_params == set() + + def test_method_query_params_no_http_rule(): method = make_method('DoSomething') assert method.query_params == set() From 16cdde764d22991d9133916ac572d53c7a7d4a08 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 27 Jan 2022 15:18:19 -0500 Subject: [PATCH 0710/1339] chore: set package version to 0.60.0 (#1161) I'm hoping this will fix the versioning issue with release please where it shows version 0.39.1 instead of 0.60.1 [here](https://github.com/googleapis/gapic-generator-python/pull/1160). --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index b5e84e5dd106..f7b7ea260f20 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.35.10" +version = "0.60.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 7f11f6cc5db0f0219e95f5305896e0ba2d2ef31c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 27 Jan 2022 23:30:22 +0100 Subject: [PATCH 0711/1339] chore(deps): update dependency pypandoc to v1.7.2 (#1111) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pypandoc](https://togithub.com/bebraw/pypandoc) | `==1.6.4` -> `==1.7.2` | [![age](https://badges.renovateapi.com/packages/pypi/pypandoc/1.7.2/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pypandoc/1.7.2/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pypandoc/1.7.2/compatibility-slim/1.6.4)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pypandoc/1.7.2/confidence-slim/1.6.4)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
bebraw/pypandoc ### [`v1.7.0`](https://togithub.com/bebraw/pypandoc/releases/v1.7.0) [Compare Source](https://togithub.com/bebraw/pypandoc/compare/v1.6.4...v1.7.0) This release of PyPandoc enables the new sandbox mode of pandoc >= 2.… …15 by default. This can be toggled and has no effect on pandoc versions < 2.15. In addition this release also adds an option to verify from and to formats before converting, reducing the calls to pandoc. Two new utility functions has also been added (ensure_pandoc_minimal_version and ensure_pandoc_maximal_version), to check the major and minor version of the pandoc currently being used. In addition, the old convert() function, has been set to be removed in the next major release of PyPandoc (1.8) Other than that, a few bug fixes here and there. - [#​245](https://togithub.com/bebraw/pypandoc/issues/245) - [#​244](https://togithub.com/bebraw/pypandoc/issues/244) - [#​231](https://togithub.com/bebraw/pypandoc/issues/231) - [#​240](https://togithub.com/bebraw/pypandoc/issues/240) - [#​241](https://togithub.com/bebraw/pypandoc/issues/241) - [#​238](https://togithub.com/bebraw/pypandoc/issues/238) - [#​237](https://togithub.com/bebraw/pypandoc/issues/237) - [#​235](https://togithub.com/bebraw/pypandoc/issues/235) - [#​225](https://togithub.com/bebraw/pypandoc/issues/225) - [#​226](https://togithub.com/bebraw/pypandoc/issues/226) - [#​227](https://togithub.com/bebraw/pypandoc/issues/227) - [#​229](https://togithub.com/bebraw/pypandoc/issues/229)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 134c06812c8a..3f3aa38747ca 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,6 +4,6 @@ googleapis-common-protos==1.54.0 jinja2==3.0.3 MarkupSafe==2.0.1 protobuf==3.19.3 -pypandoc==1.6.4 +pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 3edd837c56085bca040fe4e6f3979910fc8626f0 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 27 Jan 2022 19:03:55 -0500 Subject: [PATCH 0712/1339] fix: resolve DuplicateCredentialArgs when using credentials_file (#1159) * fix: resolve DuplicateCredentialArgs when using credentials_file * update golden files * exclude test for rest transport --- .../services/%service/transports/grpc.py.j2 | 5 +- .../services/%service/transports/rest.py.j2 | 1 + .../%name_%version/%sub/test_%service.py.j2 | 37 +++++++++++++++ .../services/%service/transports/grpc.py.j2 | 5 +- .../%service/transports/grpc_asyncio.py.j2 | 5 +- .../services/%service/transports/rest.py.j2 | 1 + .../%name_%version/%sub/test_%service.py.j2 | 47 +++++++++++++++++-- .../services/asset_service/transports/grpc.py | 5 +- .../asset_service/transports/grpc_asyncio.py | 5 +- .../unit/gapic/asset_v1/test_asset_service.py | 40 ++++++++++++++-- .../iam_credentials/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../credentials_v1/test_iam_credentials.py | 40 ++++++++++++++-- .../config_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../logging_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../metrics_service_v2/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../logging_v2/test_config_service_v2.py | 43 +++++++++++++++-- .../logging_v2/test_logging_service_v2.py | 44 +++++++++++++++-- .../logging_v2/test_metrics_service_v2.py | 44 +++++++++++++++-- .../services/cloud_redis/transports/grpc.py | 5 +- .../cloud_redis/transports/grpc_asyncio.py | 5 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 40 ++++++++++++++-- 25 files changed, 368 insertions(+), 44 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 3a8be2ff82e9..0aca3a55f048 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -159,8 +159,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 6ba9385bc54c..8c10178e7864 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -231,6 +231,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): rest_transport = operations_v1.OperationsRestTransport( host=self._host, + # use the credentials which are saved credentials=self._credentials, scopes=self._scopes, http_options=http_options) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 68c5a7bb396c..944b88eb12f5 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -380,6 +380,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl options = client_options.ClientOptions( credentials_file="credentials.json" ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -393,6 +394,42 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) + + {% if 'grpc' in opts.transport %} + if "grpc" in transport_name: + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + {% with host = (service.host|default('localhost', true)) %} + create_channel.assert_called_with( + "{{ host }}{% if ":" not in service.host %}:443{% endif %}", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + {% for scope in service.oauth_scopes %} + '{{ scope }}', + {% endfor %}), + scopes=None, + default_host="{{ host }}", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + {% endwith %} + {% endif %} {% if 'grpc' in opts.transport %} {# TODO(dovs): genericize this function#} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 3a8be2ff82e9..0aca3a55f048 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -159,8 +159,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index fe3430946f04..a6271fe75242 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -203,8 +203,11 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 6ba9385bc54c..8c10178e7864 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -231,6 +231,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): rest_transport = operations_v1.OperationsRestTransport( host=self._host, + # use the credentials which are saved credentials=self._credentials, scopes=self._scopes, http_options=http_options) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 5f9aafb63bee..e98b0d23719d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -447,20 +447,21 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class always_use_jwt_access=True, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ {% if 'grpc' in opts.transport %} - ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), - ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio"), + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", grpc_helpers), + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", grpc_helpers_async), {% endif %} {% if 'rest' in opts.transport %} - ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", None), {% endif %} ]) -def test_{{ service.client_name|snake_case }}_client_options_credentials_file(client_class, transport_class, transport_name): +def test_{{ service.client_name|snake_case }}_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. options = client_options.ClientOptions( credentials_file="credentials.json" ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -474,6 +475,42 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) + + {% if 'grpc' in opts.transport %} + if "grpc" in transport_name: + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + {% with host = (service.host|default('localhost', true)) %} + create_channel.assert_called_with( + "{{ host }}{% if ":" not in service.host %}:443{% endif %}", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + {% for scope in service.oauth_scopes %} + '{{ scope }}', + {% endfor %}), + scopes=None, + default_host="{{ host }}", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + {% endwith %} + {% endif %} {% if 'grpc' in opts.transport %} {# TODO(dovs): genericize this function#} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index b96d161c6012..b4ddfa267bbe 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -159,8 +159,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index aa2f77b8ae82..76849ef54c28 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -203,8 +203,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index dd4f527b9c7d..e7f2f550260f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -394,15 +394,16 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas always_use_jwt_access=True, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), ]) -def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name): +def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. options = client_options.ClientOptions( credentials_file="credentials.json" ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -417,6 +418,37 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran always_use_jwt_access=True, ) + if "grpc" in transport_name: + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudasset.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="cloudasset.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + def test_asset_service_client_client_options_from_dict(): with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 597a4480badc..8826e8749529 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -164,8 +164,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 800002bfcdf6..7a45b94ccc8d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -208,8 +208,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 862636e9773c..d710b57c46c1 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -385,15 +385,16 @@ def test_iam_credentials_client_client_options_scopes(client_class, transport_cl always_use_jwt_access=True, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), - (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", grpc_helpers), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), ]) -def test_iam_credentials_client_client_options_credentials_file(client_class, transport_class, transport_name): +def test_iam_credentials_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. options = client_options.ClientOptions( credentials_file="credentials.json" ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -408,6 +409,37 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr always_use_jwt_access=True, ) + if "grpc" in transport_name: + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "iamcredentials.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="iamcredentials.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + def test_iam_credentials_client_client_options_from_dict(): with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 4a0060ff65d7..a2abc2070821 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -156,8 +156,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 448083462e9b..c23fe70ca640 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -200,8 +200,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 1bd1284bd4c3..a1730e78a706 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -156,8 +156,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 71cf9f3cd7a9..a4511afa8a58 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -200,8 +200,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 1acb9b4bcb81..08b62a437b29 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -156,8 +156,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index e370f6943a09..44e6b95b0867 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -200,8 +200,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 7ddf1c2f7d42..5e06e3f10ef5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -386,15 +386,16 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ always_use_jwt_access=True, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), - (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), ]) -def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): +def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. options = client_options.ClientOptions( credentials_file="credentials.json" ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -409,6 +410,40 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) + if "grpc" in transport_name: + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + def test_config_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 74db989ed0f4..3e5cb31fbb29 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -392,15 +392,16 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport always_use_jwt_access=True, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), - (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), ]) -def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): +def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. options = client_options.ClientOptions( credentials_file="credentials.json" ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -415,6 +416,41 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) + if "grpc" in transport_name: + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + def test_logging_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 78c8a3bf1508..13830b687b1e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -390,15 +390,16 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport always_use_jwt_access=True, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), - (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), ]) -def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name): +def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. options = client_options.ClientOptions( credentials_file="credentials.json" ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -413,6 +414,41 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) + if "grpc" in transport_name: + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + def test_metrics_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 53fef935f1e9..fabc8d5817f8 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -178,8 +178,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index c1ad4b05b19b..ea35b3a00504 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -222,8 +222,11 @@ def __init__(self, *, if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index b189511ab7e8..ed33c9017f8f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -391,15 +391,16 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, always_use_jwt_access=True, ) -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), - (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), ]) -def test_cloud_redis_client_client_options_credentials_file(client_class, transport_class, transport_name): +def test_cloud_redis_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. options = client_options.ClientOptions( credentials_file="credentials.json" ) + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -414,6 +415,37 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp always_use_jwt_access=True, ) + if "grpc" in transport_name: + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "redis.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="redis.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + def test_cloud_redis_client_client_options_from_dict(): with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None From 52c97ffa3f585920865e4ed7e706802e8127422b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 27 Jan 2022 17:00:28 -0800 Subject: [PATCH 0713/1339] chore(main): release 0.61.0 (#1163) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index d247dbc5ae77..5e6791a27a8a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [0.61.0](https://github.com/googleapis/gapic-generator-python/compare/v0.60.0...v0.61.0) (2022-01-28) + + +### Features + +* add interceptor-like functionality to REST transport ([#1142](https://github.com/googleapis/gapic-generator-python/issues/1142)) ([fe57eb2](https://github.com/googleapis/gapic-generator-python/commit/fe57eb26badb596fd9bd8a0b8b65f00f060b009d)) + + +### Bug Fixes + +* preserve hyperlinks with hyphens ([#1140](https://github.com/googleapis/gapic-generator-python/issues/1140)) ([b091bfc](https://github.com/googleapis/gapic-generator-python/commit/b091bfc523ee40af4ef0b28abfc0c26dcdf09ebe)), closes [#1131](https://github.com/googleapis/gapic-generator-python/issues/1131) +* resolve DuplicateCredentialArgs when using credentials_file ([#1159](https://github.com/googleapis/gapic-generator-python/issues/1159)) ([fccd2ba](https://github.com/googleapis/gapic-generator-python/commit/fccd2ba4f67c92bce5d9f7a2d59d5f1ea28829b5)) + ## [0.60.0](https://github.com/googleapis/gapic-generator-python/compare/v0.59.1...v0.60.0) (2022-01-19) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f7b7ea260f20..d012a942751a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.60.0" +version = "0.61.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 3febc005dcf6f57d71ecdcc09ab875c06f973b0d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 27 Jan 2022 17:12:13 -0800 Subject: [PATCH 0714/1339] chore: serialize fragment tests (#1164) Occasionally the fragment test nox target trips over its feet when running concurrently. This change makes the tests serial by default. --- packages/gapic-generator/noxfile.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 358260489d74..11ba0c9658d0 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -146,11 +146,16 @@ def fragment(session): ) session.install("-e", ".") - with ThreadPoolExecutor() as p: - all_outs = p.map(FragTester(session, False), FRAGMENT_FILES) + if os.environ.get("PARALLEL_FRAGMENT_TESTS", "false").lower() == "true": + with ThreadPoolExecutor() as p: + all_outs = p.map(FragTester(session, False), FRAGMENT_FILES) - output = "".join(all_outs) - session.log(output) + output = "".join(all_outs) + session.log(output) + else: + tester = FragTester(session, False) + for frag in FRAGMENT_FILES: + session.log(tester(frag)) @nox.session(python=ALL_PYTHON[1:]) @@ -166,11 +171,16 @@ def fragment_alternative_templates(session): ) session.install("-e", ".") - with ThreadPoolExecutor() as p: - all_outs = p.map(FragTester(session, True), FRAGMENT_FILES) + if os.environ.get("PARALLEL_FRAGMENT_TESTS", "false").lower() == "true": + with ThreadPoolExecutor() as p: + all_outs = p.map(FragTester(session, True), FRAGMENT_FILES) - output = "".join(all_outs) - session.log(output) + output = "".join(all_outs) + session.log(output) + else: + tester = FragTester(session, True) + for frag in FRAGMENT_FILES: + session.log(tester(frag)) # TODO(yon-mg): -add compute context manager that includes rest transport From be498fc4b0c11342f22e790601da870eb7227c7a Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 28 Jan 2022 06:46:51 -0800 Subject: [PATCH 0715/1339] feat: expose extended operations annotations within generator (#1145) * feat: full diregapic LROs WIP add test, test fails * Style check * Integrate reviews * Failures * Mypy Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/gapic/schema/api.py | 15 ++ .../gapic-generator/gapic/schema/wrappers.py | 4 +- .../tests/unit/schema/test_api.py | 149 ++++++++++++++++++ .../unit/schema/wrappers/test_message.py | 10 +- .../unit/schema/wrappers/test_service.py | 2 +- 5 files changed, 172 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index f3dbb189718e..11cb77f28650 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -30,6 +30,7 @@ from google.api import http_pb2 # type: ignore from google.api import resource_pb2 # type: ignore from google.api import service_pb2 # type: ignore +from google.cloud import extended_operations_pb2 as ex_ops_pb2 # type: ignore from google.gapic.metadata import gapic_metadata_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import descriptor_pb2 @@ -474,6 +475,20 @@ def requires_package(self, pkg: Tuple[str, ...]) -> bool: for message in proto.all_messages.values() ) + def get_custom_operation_service(self, method: "wrappers.Method") -> "wrappers.Service": + if not method.output.is_extended_operation: + raise ValueError( + f"Method is not an extended operation LRO: {method.name}") + + op_serv_name = self.naming.proto_package + "." + \ + method.options.Extensions[ex_ops_pb2.operation_service] + op_serv = self.services[op_serv_name] + if not op_serv.custom_polling_method: + raise ValueError( + f"Service is not an extended operation operation service: {op_serv.name}") + + return op_serv + class _ProtoBuilder: """A "builder class" for Proto objects. diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 6f059c39632f..25fb11ae7be8 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -360,7 +360,7 @@ def oneof_fields(self, include_optional=False): return oneof_fields @utils.cached_property - def is_diregapic_operation(self) -> bool: + def is_extended_operation(self) -> bool: if not self.name == "Operation": return False @@ -877,7 +877,7 @@ def __getattr__(self, name): @property def is_operation_polling_method(self): - return self.output.is_diregapic_operation and self.options.Extensions[ex_ops_pb2.operation_polling_method] + return self.output.is_extended_operation and self.options.Extensions[ex_ops_pb2.operation_polling_method] @utils.cached_property def client_output(self): diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index afa82c8cfd4d..eae44b84b114 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -22,6 +22,7 @@ from google.api import client_pb2 from google.api import resource_pb2 from google.api_core import exceptions +from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.gapic.metadata import gapic_metadata_pb2 from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 @@ -1595,3 +1596,151 @@ def test_http_options(fs): method='get', uri='/v3/{name=projects/*/locations/*/operations/*}', body=None), wrappers.HttpRule(method='get', uri='/v3/{name=/locations/*/operations/*}', body=None)] } + + +def generate_basic_extended_operations_setup(): + T = descriptor_pb2.FieldDescriptorProto.Type + + operation = make_message_pb2( + name="Operation", + fields=( + make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ), + ) + + for f in operation.field: + options = descriptor_pb2.FieldOptions() + # Note: The field numbers were carefully chosen to be the corresponding enum values. + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + options = descriptor_pb2.MethodOptions() + options.Extensions[ex_ops_pb2.operation_polling_method] = True + + polling_method = descriptor_pb2.MethodDescriptorProto( + name="Get", + input_type="google.extended_operations.v1.stuff.GetOperation", + output_type="google.extended_operations.v1.stuff.Operation", + options=options, + ) + + delete_input_message = make_message_pb2(name="Input") + delete_output_message = make_message_pb2(name="Output") + ops_service = descriptor_pb2.ServiceDescriptorProto( + name="CustomOperations", + method=[ + polling_method, + descriptor_pb2.MethodDescriptorProto( + name="Delete", + input_type="google.extended_operations.v1.stuff.Input", + output_type="google.extended_operations.v1.stuff.Output", + ), + ], + ) + + request = make_message_pb2( + name="GetOperation", + fields=[ + make_field_pb2(name="name", type=T.Value("TYPE_STRING"), number=1) + ], + ) + + initial_opts = descriptor_pb2.MethodOptions() + initial_opts.Extensions[ex_ops_pb2.operation_service] = ops_service.name + initial_input_message = make_message_pb2(name="Initial") + initial_method = descriptor_pb2.MethodDescriptorProto( + name="CreateTask", + input_type="google.extended_operations.v1.stuff.GetOperation", + output_type="google.extended_operations.v1.stuff.Operation", + options=initial_opts, + ) + + regular_service = descriptor_pb2.ServiceDescriptorProto( + name="RegularService", + method=[ + initial_method, + ], + ) + + file_protos = [ + make_file_pb2( + name="extended_operations.proto", + package="google.extended_operations.v1.stuff", + messages=[ + operation, + request, + delete_output_message, + delete_input_message, + initial_input_message, + ], + services=[ + regular_service, + ops_service, + ], + ), + ] + + return file_protos + + +def test_extended_operations_lro_operation_service(): + file_protos = generate_basic_extended_operations_setup() + api_schema = api.API.build(file_protos) + initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + + expected = api_schema.services['google.extended_operations.v1.stuff.CustomOperations'] + actual = api_schema.get_custom_operation_service(initial_method) + + assert expected is actual + + assert actual.custom_polling_method is actual.methods["Get"] + + +def test_extended_operations_lro_operation_service_no_annotation(): + file_protos = generate_basic_extended_operations_setup() + + api_schema = api.API.build(file_protos) + initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + # It's easier to manipulate data structures after building the API. + del initial_method.options.Extensions[ex_ops_pb2.operation_service] + + with pytest.raises(KeyError): + api_schema.get_custom_operation_service(initial_method) + + +def test_extended_operations_lro_operation_service_no_such_service(): + file_protos = generate_basic_extended_operations_setup() + + api_schema = api.API.build(file_protos) + initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + initial_method.options.Extensions[ex_ops_pb2.operation_service] = "UnrealService" + + with pytest.raises(KeyError): + api_schema.get_custom_operation_service(initial_method) + + +def test_extended_operations_lro_operation_service_not_an_lro(): + file_protos = generate_basic_extended_operations_setup() + + api_schema = api.API.build(file_protos) + initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + # Hack to pretend that the initial_method is not an LRO + super(type(initial_method), initial_method).__setattr__( + "output", initial_method.input) + + with pytest.raises(ValueError): + api_schema.get_custom_operation_service(initial_method) + + +def test_extended_operations_lro_operation_service_no_polling_method(): + file_protos = generate_basic_extended_operations_setup() + + api_schema = api.API.build(file_protos) + initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + + operation_service = api_schema.services["google.extended_operations.v1.stuff.CustomOperations"] + del operation_service.methods["Get"].options.Extensions[ex_ops_pb2.operation_polling_method] + + with pytest.raises(ValueError): + api_schema.get_custom_operation_service(initial_method) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 1519fadc6760..7cd5910c3f08 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -331,7 +331,7 @@ def test_required_fields(): assert set(request.required_fields) == {mass_kg, length_m, color} -def test_is_diregapic_operation(): +def test_is_extended_operation(): T = descriptor_pb2.FieldDescriptorProto.Type # Canonical Operation @@ -349,7 +349,7 @@ def test_is_diregapic_operation(): options.Extensions[ex_ops_pb2.operation_field] = f.number f.options.MergeFrom(options) - assert operation.is_diregapic_operation + assert operation.is_extended_operation # Missing a required field @@ -367,7 +367,7 @@ def test_is_diregapic_operation(): options.Extensions[ex_ops_pb2.operation_field] = f.number f.options.MergeFrom(options) - assert not missing.is_diregapic_operation + assert not missing.is_extended_operation # Named incorrectly @@ -383,7 +383,7 @@ def test_is_diregapic_operation(): options.Extensions[ex_ops_pb2.operation_field] = f.number f.options.MergeFrom(options) - assert not my_message.is_diregapic_operation + assert not my_message.is_extended_operation # Duplicated annotation for mapping in range(1, 5): @@ -401,4 +401,4 @@ def test_is_diregapic_operation(): f.options.MergeFrom(options) with pytest.raises(TypeError): - duplicate.is_diregapic_operation + duplicate.is_extended_operation diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 7cd41799ed8f..33e83494f7c7 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -589,7 +589,7 @@ def test_operation_polling_method(): assert not user_service.custom_polling_method -def test_diregapic_lro_detection(): +def test_extended_operations_lro_detection(): T = descriptor_pb2.FieldDescriptorProto.Type operation = make_message( From 423c0d53b82fcfcc2f9a7408e595b09aaf48db45 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 28 Jan 2022 10:12:12 -0700 Subject: [PATCH 0716/1339] fix(snippetgen): remove special handling for resource paths (#1134) There are some bugs in this implementation that stop generation in some APIs and result in incorrect snippets getting generated for others. Back out this feature for now so we can move forward with enabling snippetgen for all APIs. --- .../gapic/samplegen/samplegen.py | 45 +- .../services/asset_service/async_client.py | 12 +- .../asset_v1/services/asset_service/client.py | 12 +- ...sset_v1_asset_service_delete_feed_async.py | 6 +- ...asset_v1_asset_service_delete_feed_sync.py | 6 +- ...d_asset_v1_asset_service_get_feed_async.py | 6 +- ...ed_asset_v1_asset_service_get_feed_sync.py | 6 +- .../snippet_metadata_asset_v1.json | 48 +- .../services/iam_credentials/async_client.py | 24 +- .../services/iam_credentials/client.py | 24 +- ...credentials_generate_access_token_async.py | 6 +- ..._credentials_generate_access_token_sync.py | 6 +- ...iam_credentials_generate_id_token_async.py | 6 +- ..._iam_credentials_generate_id_token_sync.py | 6 +- ...ials_v1_iam_credentials_sign_blob_async.py | 6 +- ...tials_v1_iam_credentials_sign_blob_sync.py | 6 +- ...tials_v1_iam_credentials_sign_jwt_async.py | 6 +- ...ntials_v1_iam_credentials_sign_jwt_sync.py | 6 +- .../snippet_metadata_credentials_v1.json | 112 +-- .../config_service_v2/async_client.py | 123 +--- .../services/config_service_v2/client.py | 123 +--- .../logging_service_v2/async_client.py | 18 +- .../services/logging_service_v2/client.py | 18 +- .../metrics_service_v2/async_client.py | 29 +- .../services/metrics_service_v2/client.py | 29 +- ...2_config_service_v2_create_bucket_async.py | 7 +- ...v2_config_service_v2_create_bucket_sync.py | 7 +- ...onfig_service_v2_create_exclusion_async.py | 6 +- ...config_service_v2_create_exclusion_sync.py | 6 +- ..._v2_config_service_v2_create_sink_async.py | 6 +- ...g_v2_config_service_v2_create_sink_sync.py | 6 +- ...2_config_service_v2_delete_bucket_async.py | 7 +- ...v2_config_service_v2_delete_bucket_sync.py | 7 +- ...onfig_service_v2_delete_exclusion_async.py | 6 +- ...config_service_v2_delete_exclusion_sync.py | 6 +- ..._v2_config_service_v2_delete_sink_async.py | 6 +- ...g_v2_config_service_v2_delete_sink_sync.py | 6 +- ..._v2_config_service_v2_delete_view_async.py | 8 +- ...g_v2_config_service_v2_delete_view_sync.py | 8 +- ...g_v2_config_service_v2_get_bucket_async.py | 7 +- ...ng_v2_config_service_v2_get_bucket_sync.py | 7 +- ...nfig_service_v2_get_cmek_settings_async.py | 5 +- ...onfig_service_v2_get_cmek_settings_sync.py | 5 +- ...2_config_service_v2_get_exclusion_async.py | 6 +- ...v2_config_service_v2_get_exclusion_sync.py | 6 +- ...ing_v2_config_service_v2_get_sink_async.py | 6 +- ...ging_v2_config_service_v2_get_sink_sync.py | 6 +- ...ing_v2_config_service_v2_get_view_async.py | 8 +- ...ging_v2_config_service_v2_get_view_sync.py | 8 +- ...v2_config_service_v2_list_buckets_async.py | 7 +- ..._v2_config_service_v2_list_buckets_sync.py | 7 +- ...config_service_v2_list_exclusions_async.py | 6 +- ..._config_service_v2_list_exclusions_sync.py | 6 +- ...g_v2_config_service_v2_list_sinks_async.py | 6 +- ...ng_v2_config_service_v2_list_sinks_sync.py | 6 +- ...config_service_v2_undelete_bucket_async.py | 7 +- ..._config_service_v2_undelete_bucket_sync.py | 7 +- ...2_config_service_v2_update_bucket_async.py | 7 +- ...v2_config_service_v2_update_bucket_sync.py | 7 +- ...onfig_service_v2_update_exclusion_async.py | 6 +- ...config_service_v2_update_exclusion_sync.py | 6 +- ..._v2_config_service_v2_update_sink_async.py | 6 +- ...g_v2_config_service_v2_update_sink_sync.py | 6 +- ..._v2_logging_service_v2_delete_log_async.py | 6 +- ...g_v2_logging_service_v2_delete_log_sync.py | 6 +- ...gging_service_v2_list_log_entries_async.py | 6 +- ...ogging_service_v2_list_log_entries_sync.py | 6 +- ...g_v2_logging_service_v2_list_logs_async.py | 6 +- ...ng_v2_logging_service_v2_list_logs_sync.py | 6 +- ...rics_service_v2_create_log_metric_async.py | 6 +- ...trics_service_v2_create_log_metric_sync.py | 6 +- ...rics_service_v2_delete_log_metric_async.py | 6 +- ...trics_service_v2_delete_log_metric_sync.py | 6 +- ...metrics_service_v2_get_log_metric_async.py | 6 +- ..._metrics_service_v2_get_log_metric_sync.py | 6 +- ...trics_service_v2_list_log_metrics_async.py | 5 +- ...etrics_service_v2_list_log_metrics_sync.py | 5 +- ...rics_service_v2_update_log_metric_async.py | 6 +- ...trics_service_v2_update_log_metric_sync.py | 6 +- .../snippet_metadata_logging_v2.json | 652 +++++++++--------- .../services/cloud_redis/async_client.py | 40 +- .../redis_v1/services/cloud_redis/client.py | 40 +- ...is_v1_cloud_redis_create_instance_async.py | 6 +- ...dis_v1_cloud_redis_create_instance_sync.py | 6 +- ...is_v1_cloud_redis_delete_instance_async.py | 7 +- ...dis_v1_cloud_redis_delete_instance_sync.py | 7 +- ..._v1_cloud_redis_failover_instance_async.py | 7 +- ...s_v1_cloud_redis_failover_instance_sync.py | 7 +- ...redis_v1_cloud_redis_get_instance_async.py | 7 +- ..._redis_v1_cloud_redis_get_instance_sync.py | 7 +- ...dis_v1_cloud_redis_list_instances_async.py | 6 +- ...edis_v1_cloud_redis_list_instances_sync.py | 6 +- ...s_v1_cloud_redis_upgrade_instance_async.py | 7 +- ...is_v1_cloud_redis_upgrade_instance_sync.py | 7 +- .../snippet_metadata_redis_v1.json | 128 ++-- .../tests/unit/samplegen/test_samplegen.py | 37 - 96 files changed, 628 insertions(+), 1378 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 7f4c3dae317d..b71dd8b52976 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -928,37 +928,6 @@ def parse_handwritten_specs(sample_configs: Sequence[str]) -> Generator[Dict[str yield spec -def _generate_resource_path_request_object(field_name: str, message: wrappers.MessageType) -> List[Dict[str, str]]: - """Given a message that represents a resource, generate request objects that - populate the resource path args. - - Args: - field_name (str): The name of the field. - message (wrappers.MessageType): The message the field belongs to. - - Returns: - List[Dict[str, str]]: A list of dicts that can be turned into TransformedRequests. - """ - request = [] - - # Look for specific field names to substitute more realistic values - special_values_dict = { - "project": '"my-project-id"', - "location": '"us-central1"' - } - - for resource_path_arg in message.resource_path_args: - value = special_values_dict.get( - resource_path_arg, f'"{resource_path_arg}_value"') - request.append({ - # See TransformedRequest.build() for how 'field' is parsed - "field": f"{field_name}%{resource_path_arg}", - "value": value, - }) - - return request - - def generate_request_object(api_schema: api.API, service: wrappers.Service, message: wrappers.MessageType, field_name_prefix: str = ""): """Generate dummy input for a given message. @@ -993,18 +962,8 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess # TODO(busunkim): Properly handle map fields if field.is_primitive: - resource_reference_message = service.resource_messages_dict.get( - field.resource_reference) - # Some resource patterns have no resource_path_args - # https://github.com/googleapis/gapic-generator-python/issues/701 - if resource_reference_message and resource_reference_message.resource_path_args: - request += _generate_resource_path_request_object( - field_name, - resource_reference_message - ) - else: - request.append( - {"field": field_name, "value": field.mock_value_original_type}) + request.append( + {"field": field_name, "value": field.mock_value_original_type}) elif field.enum: # Choose the last enum value in the list since index 0 is often "unspecified" request.append( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 7515cdcc31b9..b269dcd87b61 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -619,12 +619,8 @@ def sample_get_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) - project = "my-project-id" - feed = "feed_value" - name = f"projects/{project}/feeds/{feed}" - request = asset_v1.GetFeedRequest( - name=name, + name="name_value", ) # Make the request @@ -938,12 +934,8 @@ def sample_delete_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) - project = "my-project-id" - feed = "feed_value" - name = f"projects/{project}/feeds/{feed}" - request = asset_v1.DeleteFeedRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index c3eec7541d47..41f6812d7a2c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -815,12 +815,8 @@ def sample_get_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) - project = "my-project-id" - feed = "feed_value" - name = f"projects/{project}/feeds/{feed}" - request = asset_v1.GetFeedRequest( - name=name, + name="name_value", ) # Make the request @@ -1123,12 +1119,8 @@ def sample_delete_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) - project = "my-project-id" - feed = "feed_value" - name = f"projects/{project}/feeds/{feed}" - request = asset_v1.DeleteFeedRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py index 4439eee886a3..d9fea4281440 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py @@ -32,12 +32,8 @@ async def sample_delete_feed(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) - project = "my-project-id" - feed = "feed_value" - name = f"projects/{project}/feeds/{feed}" - request = asset_v1.DeleteFeedRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py index ac34a49c01c9..f9008baa7b8b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py @@ -32,12 +32,8 @@ def sample_delete_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) - project = "my-project-id" - feed = "feed_value" - name = f"projects/{project}/feeds/{feed}" - request = asset_v1.DeleteFeedRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py index 29cd0a2b165a..33c88d3b4b2a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py @@ -32,12 +32,8 @@ async def sample_get_feed(): client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) - project = "my-project-id" - feed = "feed_value" - name = f"projects/{project}/feeds/{feed}" - request = asset_v1.GetFeedRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py index 30849fccb712..98834ef0be65 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py @@ -32,12 +32,8 @@ def sample_get_feed(): client = asset_v1.AssetServiceClient() # Initialize request argument(s) - project = "my-project-id" - feed = "feed_value" - name = f"projects/{project}/feeds/{feed}" - request = asset_v1.GetFeedRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json index 8f871de592ea..b6d8b364c812 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json @@ -366,12 +366,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_DeleteFeed_async", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -381,16 +381,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -408,12 +408,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_DeleteFeed_sync", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -423,16 +423,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -536,12 +536,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_GetFeed_async", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -551,18 +551,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -580,12 +580,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_GetFeed_sync", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -595,18 +595,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index db6153632c75..79cc549c8a50 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -218,12 +218,8 @@ def sample_generate_access_token(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.GenerateAccessTokenRequest( - name=name, + name="name_value", scope=['scope_value_1', 'scope_value_2'], ) @@ -376,12 +372,8 @@ def sample_generate_id_token(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.GenerateIdTokenRequest( - name=name, + name="name_value", audience="audience_value", ) @@ -527,12 +519,8 @@ def sample_sign_blob(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.SignBlobRequest( - name=name, + name="name_value", payload=b'payload_blob', ) @@ -665,12 +653,8 @@ def sample_sign_jwt(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.SignJwtRequest( - name=name, + name="name_value", payload="payload_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index deacb6c9d306..cfec2f470b59 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -406,12 +406,8 @@ def sample_generate_access_token(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.GenerateAccessTokenRequest( - name=name, + name="name_value", scope=['scope_value_1', 'scope_value_2'], ) @@ -558,12 +554,8 @@ def sample_generate_id_token(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.GenerateIdTokenRequest( - name=name, + name="name_value", audience="audience_value", ) @@ -703,12 +695,8 @@ def sample_sign_blob(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.SignBlobRequest( - name=name, + name="name_value", payload=b'payload_blob', ) @@ -835,12 +823,8 @@ def sample_sign_jwt(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.SignJwtRequest( - name=name, + name="name_value", payload="payload_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py index 650f82bdbab1..60c86fc65a9f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py @@ -32,12 +32,8 @@ async def sample_generate_access_token(): client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.GenerateAccessTokenRequest( - name=name, + name="name_value", scope=['scope_value_1', 'scope_value_2'], ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py index 17fedfa26aa7..4b47fc7ce4fa 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py @@ -32,12 +32,8 @@ def sample_generate_access_token(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.GenerateAccessTokenRequest( - name=name, + name="name_value", scope=['scope_value_1', 'scope_value_2'], ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py index 00d6538ca90a..826bad3e42a7 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py @@ -32,12 +32,8 @@ async def sample_generate_id_token(): client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.GenerateIdTokenRequest( - name=name, + name="name_value", audience="audience_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py index 71e49cef5809..c7df5c9bb625 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py @@ -32,12 +32,8 @@ def sample_generate_id_token(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.GenerateIdTokenRequest( - name=name, + name="name_value", audience="audience_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py index b39981bbdd6f..52c3cc86ee1e 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py @@ -32,12 +32,8 @@ async def sample_sign_blob(): client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.SignBlobRequest( - name=name, + name="name_value", payload=b'payload_blob', ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py index fbc18e178d73..c753ba88a4f2 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py @@ -32,12 +32,8 @@ def sample_sign_blob(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.SignBlobRequest( - name=name, + name="name_value", payload=b'payload_blob', ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py index 35e865578592..6c1e1cfcd71c 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py @@ -32,12 +32,8 @@ async def sample_sign_jwt(): client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.SignJwtRequest( - name=name, + name="name_value", payload="payload_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py index 298bfaf3e0d2..d3c7db98c489 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py @@ -32,12 +32,8 @@ def sample_sign_jwt(): client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) - project = "my-project-id" - service_account = "service_account_value" - name = f"projects/{project}/serviceAccounts/{service_account}" - request = credentials_v1.SignJwtRequest( - name=name, + name="name_value", payload="payload_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json index 8217a23d267d..3b1a110447a6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json @@ -14,12 +14,12 @@ "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async", "segments": [ { - "end": 49, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 45, "start": 27, "type": "SHORT" }, @@ -29,18 +29,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -58,12 +58,12 @@ "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync", "segments": [ { - "end": 49, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 45, "start": 27, "type": "SHORT" }, @@ -73,18 +73,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -103,12 +103,12 @@ "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async", "segments": [ { - "end": 49, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 45, "start": 27, "type": "SHORT" }, @@ -118,18 +118,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -147,12 +147,12 @@ "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync", "segments": [ { - "end": 49, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 45, "start": 27, "type": "SHORT" }, @@ -162,18 +162,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -192,12 +192,12 @@ "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async", "segments": [ { - "end": 49, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 45, "start": 27, "type": "SHORT" }, @@ -207,18 +207,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -236,12 +236,12 @@ "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync", "segments": [ { - "end": 49, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 45, "start": 27, "type": "SHORT" }, @@ -251,18 +251,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -281,12 +281,12 @@ "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async", "segments": [ { - "end": 49, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 45, "start": 27, "type": "SHORT" }, @@ -296,18 +296,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -325,12 +325,12 @@ "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync", "segments": [ { - "end": 49, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 45, "start": 27, "type": "SHORT" }, @@ -340,18 +340,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index f1fdde44cfb0..825e5bcfea86 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -212,13 +212,8 @@ def sample_list_buckets(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - parent = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.ListBucketsRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -330,13 +325,8 @@ def sample_get_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.GetBucketRequest( - name=name, + name="name_value", ) # Make the request @@ -409,13 +399,8 @@ def sample_create_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - parent = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.CreateBucketRequest( - parent=parent, + parent="parent_value", bucket_id="bucket_id_value", ) @@ -497,13 +482,8 @@ def sample_update_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.UpdateBucketRequest( - name=name, + name="name_value", ) # Make the request @@ -576,13 +556,8 @@ def sample_delete_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.DeleteBucketRequest( - name=name, + name="name_value", ) # Make the request @@ -644,13 +619,8 @@ def sample_undelete_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.UndeleteBucketRequest( - name=name, + name="name_value", ) # Make the request @@ -816,14 +786,8 @@ def sample_get_view(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - view = "view_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - request = logging_v2.GetViewRequest( - name=name, + name="name_value", ) # Make the request @@ -1046,14 +1010,8 @@ def sample_delete_view(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - view = "view_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - request = logging_v2.DeleteViewRequest( - name=name, + name="name_value", ) # Make the request @@ -1114,12 +1072,8 @@ def sample_list_sinks(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - parent = f"projects/{project}/sinks/{sink}" - request = logging_v2.ListSinksRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -1236,12 +1190,8 @@ def sample_get_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - request = logging_v2.GetSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", ) # Make the request @@ -1362,16 +1312,12 @@ def sample_create_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - parent = f"projects/{project}/sinks/{sink}" - sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.CreateSinkRequest( - parent=parent, + parent="parent_value", sink=sink, ) @@ -1497,16 +1443,12 @@ def sample_update_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.UpdateSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", sink=sink, ) @@ -1656,12 +1598,8 @@ def sample_delete_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - request = logging_v2.DeleteSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", ) # Make the request @@ -1758,12 +1696,8 @@ def sample_list_exclusions(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - parent = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.ListExclusionsRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -1880,12 +1814,8 @@ def sample_get_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.GetExclusionRequest( - name=name, + name="name_value", ) # Make the request @@ -2008,16 +1938,12 @@ def sample_create_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - parent = f"projects/{project}/exclusions/{exclusion}" - exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.CreateExclusionRequest( - parent=parent, + parent="parent_value", exclusion=exclusion, ) @@ -2143,16 +2069,12 @@ def sample_update_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.UpdateExclusionRequest( - name=name, + name="name_value", exclusion=exclusion, ) @@ -2289,12 +2211,8 @@ def sample_delete_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.DeleteExclusionRequest( - name=name, + name="name_value", ) # Make the request @@ -2400,11 +2318,8 @@ def sample_get_cmek_settings(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - name = f"projects/{project}/cmekSettings" - request = logging_v2.GetCmekSettingsRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index e9803b90315e..52f6b8bf0197 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -436,13 +436,8 @@ def sample_list_buckets(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - parent = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.ListBucketsRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -555,13 +550,8 @@ def sample_get_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.GetBucketRequest( - name=name, + name="name_value", ) # Make the request @@ -636,13 +626,8 @@ def sample_create_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - parent = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.CreateBucketRequest( - parent=parent, + parent="parent_value", bucket_id="bucket_id_value", ) @@ -726,13 +711,8 @@ def sample_update_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.UpdateBucketRequest( - name=name, + name="name_value", ) # Make the request @@ -807,13 +787,8 @@ def sample_delete_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.DeleteBucketRequest( - name=name, + name="name_value", ) # Make the request @@ -877,13 +852,8 @@ def sample_undelete_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.UndeleteBucketRequest( - name=name, + name="name_value", ) # Make the request @@ -1052,14 +1022,8 @@ def sample_get_view(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - view = "view_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - request = logging_v2.GetViewRequest( - name=name, + name="name_value", ) # Make the request @@ -1288,14 +1252,8 @@ def sample_delete_view(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - view = "view_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - request = logging_v2.DeleteViewRequest( - name=name, + name="name_value", ) # Make the request @@ -1358,12 +1316,8 @@ def sample_list_sinks(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - parent = f"projects/{project}/sinks/{sink}" - request = logging_v2.ListSinksRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -1473,12 +1427,8 @@ def sample_get_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - request = logging_v2.GetSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", ) # Make the request @@ -1592,16 +1542,12 @@ def sample_create_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - parent = f"projects/{project}/sinks/{sink}" - sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.CreateSinkRequest( - parent=parent, + parent="parent_value", sink=sink, ) @@ -1728,16 +1674,12 @@ def sample_update_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.UpdateSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", sink=sink, ) @@ -1880,12 +1822,8 @@ def sample_delete_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - request = logging_v2.DeleteSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", ) # Make the request @@ -1975,12 +1913,8 @@ def sample_list_exclusions(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - parent = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.ListExclusionsRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -2090,12 +2024,8 @@ def sample_get_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.GetExclusionRequest( - name=name, + name="name_value", ) # Make the request @@ -2211,16 +2141,12 @@ def sample_create_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - parent = f"projects/{project}/exclusions/{exclusion}" - exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.CreateExclusionRequest( - parent=parent, + parent="parent_value", exclusion=exclusion, ) @@ -2347,16 +2273,12 @@ def sample_update_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.UpdateExclusionRequest( - name=name, + name="name_value", exclusion=exclusion, ) @@ -2494,12 +2416,8 @@ def sample_delete_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.DeleteExclusionRequest( - name=name, + name="name_value", ) # Make the request @@ -2598,11 +2516,8 @@ def sample_get_cmek_settings(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - name = f"projects/{project}/cmekSettings" - request = logging_v2.GetCmekSettingsRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 81c360086661..55ed8dd0693c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -209,12 +209,8 @@ def sample_delete_log(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - log_name = f"projects/{project}/logs/{log}" - request = logging_v2.DeleteLogRequest( - log_name=log_name, + log_name="log_name_value", ) # Make the request @@ -512,12 +508,8 @@ def sample_list_log_entries(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - resource_names = f"projects/{project}/logs/{log}" - request = logging_v2.ListLogEntriesRequest( - resource_names=resource_names, + resource_names=['resource_names_value_1', 'resource_names_value_2'], ) # Make the request @@ -755,12 +747,8 @@ def sample_list_logs(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - parent = f"projects/{project}/logs/{log}" - request = logging_v2.ListLogsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 6ad1f4049131..c30d02220d34 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -397,12 +397,8 @@ def sample_delete_log(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - log_name = f"projects/{project}/logs/{log}" - request = logging_v2.DeleteLogRequest( - log_name=log_name, + log_name="log_name_value", ) # Make the request @@ -685,12 +681,8 @@ def sample_list_log_entries(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - resource_names = f"projects/{project}/logs/{log}" - request = logging_v2.ListLogEntriesRequest( - resource_names=resource_names, + resource_names=['resource_names_value_1', 'resource_names_value_2'], ) # Make the request @@ -915,12 +907,8 @@ def sample_list_logs(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - parent = f"projects/{project}/logs/{log}" - request = logging_v2.ListLogsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 06cb208f2211..f292d70de2dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -205,11 +205,8 @@ def sample_list_log_metrics(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - parent = f"projects/{project}" - request = logging_v2.ListLogMetricsRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -323,12 +320,8 @@ def sample_get_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - request = logging_v2.GetLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", ) # Make the request @@ -441,16 +434,12 @@ def sample_create_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - parent = f"projects/{project}/metrics/{metric}" - metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.CreateLogMetricRequest( - parent=parent, + parent="parent_value", metric=metric, ) @@ -569,16 +558,12 @@ def sample_update_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.UpdateLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", metric=metric, ) @@ -703,12 +688,8 @@ def sample_delete_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - request = logging_v2.DeleteLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index d392c90094f2..408eab1573f3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -393,11 +393,8 @@ def sample_list_log_metrics(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - parent = f"projects/{project}" - request = logging_v2.ListLogMetricsRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -504,12 +501,8 @@ def sample_get_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - request = logging_v2.GetLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", ) # Make the request @@ -615,16 +608,12 @@ def sample_create_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - parent = f"projects/{project}/metrics/{metric}" - metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.CreateLogMetricRequest( - parent=parent, + parent="parent_value", metric=metric, ) @@ -744,16 +733,12 @@ def sample_update_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.UpdateLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", metric=metric, ) @@ -871,12 +856,8 @@ def sample_delete_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - request = logging_v2.DeleteLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py index 3931b1d669e4..40d790738205 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py @@ -32,13 +32,8 @@ async def sample_create_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - parent = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.CreateBucketRequest( - parent=parent, + parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py index b850efd1fb72..879c4e321f20 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py @@ -32,13 +32,8 @@ def sample_create_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - parent = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.CreateBucketRequest( - parent=parent, + parent="parent_value", bucket_id="bucket_id_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py index 432f9d86a8f3..1711976ca93d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py @@ -32,16 +32,12 @@ async def sample_create_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - parent = f"projects/{project}/exclusions/{exclusion}" - exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.CreateExclusionRequest( - parent=parent, + parent="parent_value", exclusion=exclusion, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py index e17a5e04e730..45e2ca3fdd5b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py @@ -32,16 +32,12 @@ def sample_create_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - parent = f"projects/{project}/exclusions/{exclusion}" - exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.CreateExclusionRequest( - parent=parent, + parent="parent_value", exclusion=exclusion, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py index 5d2bfcf7d270..ae0e5e8269ac 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py @@ -32,16 +32,12 @@ async def sample_create_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - parent = f"projects/{project}/sinks/{sink}" - sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.CreateSinkRequest( - parent=parent, + parent="parent_value", sink=sink, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py index 372a22657be4..3c04120b3464 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py @@ -32,16 +32,12 @@ def sample_create_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - parent = f"projects/{project}/sinks/{sink}" - sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.CreateSinkRequest( - parent=parent, + parent="parent_value", sink=sink, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py index 0ff377493e6d..4ad451d92fa7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py @@ -32,13 +32,8 @@ async def sample_delete_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.DeleteBucketRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py index b3cc0f22fca5..011e9a1b6f63 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py @@ -32,13 +32,8 @@ def sample_delete_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.DeleteBucketRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py index f51384574efd..db7b9de949fd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py @@ -32,12 +32,8 @@ async def sample_delete_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.DeleteExclusionRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py index f4fd093b5a9c..ba4202f5e2de 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py @@ -32,12 +32,8 @@ def sample_delete_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.DeleteExclusionRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py index 35c3144c6c83..f4d7d596d422 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py @@ -32,12 +32,8 @@ async def sample_delete_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - request = logging_v2.DeleteSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py index ec1cf8acd27c..8781209f91c6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py @@ -32,12 +32,8 @@ def sample_delete_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - request = logging_v2.DeleteSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py index 596d0ab2082f..7796ff52f417 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py @@ -32,14 +32,8 @@ async def sample_delete_view(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - view = "view_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - request = logging_v2.DeleteViewRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py index f65d86c656a8..5c53ebc0c732 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py @@ -32,14 +32,8 @@ def sample_delete_view(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - view = "view_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - request = logging_v2.DeleteViewRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py index e716961730e3..8b1f13f21e32 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py @@ -32,13 +32,8 @@ async def sample_get_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.GetBucketRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py index 5a84535f236e..9ead630fa3ab 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py @@ -32,13 +32,8 @@ def sample_get_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.GetBucketRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py index 6335aa131fa5..864fed295575 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py @@ -32,11 +32,8 @@ async def sample_get_cmek_settings(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - name = f"projects/{project}/cmekSettings" - request = logging_v2.GetCmekSettingsRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py index f7109b8dd0c3..e3ca86aafcb1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py @@ -32,11 +32,8 @@ def sample_get_cmek_settings(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - name = f"projects/{project}/cmekSettings" - request = logging_v2.GetCmekSettingsRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py index 461762b5a14e..7f452fceb610 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py @@ -32,12 +32,8 @@ async def sample_get_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.GetExclusionRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py index 9a99f3a88405..16aaefefdcf0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py @@ -32,12 +32,8 @@ def sample_get_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.GetExclusionRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py index fac7632db442..12f905ebcbe1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py @@ -32,12 +32,8 @@ async def sample_get_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - request = logging_v2.GetSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py index b18072a4e19f..9b61f7075e75 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py @@ -32,12 +32,8 @@ def sample_get_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - request = logging_v2.GetSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py index 08d07c7536b2..b4924fffa066 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py @@ -32,14 +32,8 @@ async def sample_get_view(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - view = "view_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - request = logging_v2.GetViewRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py index 775157e1df20..c706290a7b65 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py @@ -32,14 +32,8 @@ def sample_get_view(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - view = "view_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}/views/{view}" - request = logging_v2.GetViewRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py index d2ed2615b257..5d71278a2e83 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py @@ -32,13 +32,8 @@ async def sample_list_buckets(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - parent = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.ListBucketsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py index 3824790fe5b5..650229ec53a0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py @@ -32,13 +32,8 @@ def sample_list_buckets(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - parent = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.ListBucketsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py index f8efb65633c9..646be345cdc1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py @@ -32,12 +32,8 @@ async def sample_list_exclusions(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - parent = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.ListExclusionsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py index ea485dd38373..08c01c1237f1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py @@ -32,12 +32,8 @@ def sample_list_exclusions(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - parent = f"projects/{project}/exclusions/{exclusion}" - request = logging_v2.ListExclusionsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py index 87098906f3e3..10287f1c2aaf 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py @@ -32,12 +32,8 @@ async def sample_list_sinks(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - parent = f"projects/{project}/sinks/{sink}" - request = logging_v2.ListSinksRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py index c4ed69141cab..cd6c0da01098 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py @@ -32,12 +32,8 @@ def sample_list_sinks(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - parent = f"projects/{project}/sinks/{sink}" - request = logging_v2.ListSinksRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py index 8a8161028fb4..1c7f06ffdfa9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py @@ -32,13 +32,8 @@ async def sample_undelete_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.UndeleteBucketRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py index d9a11f45dd94..0b1484594963 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py @@ -32,13 +32,8 @@ def sample_undelete_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.UndeleteBucketRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py index 595c4a8ffa24..d714e0a3aa64 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py @@ -32,13 +32,8 @@ async def sample_update_bucket(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.UpdateBucketRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py index cb294ae3f62a..007a2020349b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py @@ -32,13 +32,8 @@ def sample_update_bucket(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - bucket = "bucket_value" - name = f"projects/{project}/locations/{location}/buckets/{bucket}" - request = logging_v2.UpdateBucketRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py index b4d79d2d76be..da749b58fb0b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py @@ -32,16 +32,12 @@ async def sample_update_exclusion(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.UpdateExclusionRequest( - name=name, + name="name_value", exclusion=exclusion, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py index a3772807ba0e..a0c653b460b9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py @@ -32,16 +32,12 @@ def sample_update_exclusion(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - exclusion = "exclusion_value" - name = f"projects/{project}/exclusions/{exclusion}" - exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" request = logging_v2.UpdateExclusionRequest( - name=name, + name="name_value", exclusion=exclusion, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py index d43e5ac5e437..f0d5ce3ca6c8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py @@ -32,16 +32,12 @@ async def sample_update_sink(): client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.UpdateSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", sink=sink, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py index ca50c1e35337..5119de8820d5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py @@ -32,16 +32,12 @@ def sample_update_sink(): client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - sink = "sink_value" - sink_name = f"projects/{project}/sinks/{sink}" - sink = logging_v2.LogSink() sink.name = "name_value" sink.destination = "destination_value" request = logging_v2.UpdateSinkRequest( - sink_name=sink_name, + sink_name="sink_name_value", sink=sink, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py index 25f13bae6a5a..2e291f52922b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py @@ -32,12 +32,8 @@ async def sample_delete_log(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - log_name = f"projects/{project}/logs/{log}" - request = logging_v2.DeleteLogRequest( - log_name=log_name, + log_name="log_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py index daa5767c497b..5f7540b92692 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py @@ -32,12 +32,8 @@ def sample_delete_log(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - log_name = f"projects/{project}/logs/{log}" - request = logging_v2.DeleteLogRequest( - log_name=log_name, + log_name="log_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py index fb156a9757ae..23de611fb34b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py @@ -32,12 +32,8 @@ async def sample_list_log_entries(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - resource_names = f"projects/{project}/logs/{log}" - request = logging_v2.ListLogEntriesRequest( - resource_names=resource_names, + resource_names=['resource_names_value_1', 'resource_names_value_2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py index 74968ae551d0..2c83bf9bc155 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py @@ -32,12 +32,8 @@ def sample_list_log_entries(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - resource_names = f"projects/{project}/logs/{log}" - request = logging_v2.ListLogEntriesRequest( - resource_names=resource_names, + resource_names=['resource_names_value_1', 'resource_names_value_2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py index 17362e9d1756..fe04f70099fd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py @@ -32,12 +32,8 @@ async def sample_list_logs(): client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - parent = f"projects/{project}/logs/{log}" - request = logging_v2.ListLogsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py index 12491dadf88e..909c2e721fd4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py @@ -32,12 +32,8 @@ def sample_list_logs(): client = logging_v2.LoggingServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - log = "log_value" - parent = f"projects/{project}/logs/{log}" - request = logging_v2.ListLogsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py index 212444858d12..c5f272a0ac6e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py @@ -32,16 +32,12 @@ async def sample_create_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - parent = f"projects/{project}/metrics/{metric}" - metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.CreateLogMetricRequest( - parent=parent, + parent="parent_value", metric=metric, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py index e81eb133b417..888355efde8f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py @@ -32,16 +32,12 @@ def sample_create_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - parent = f"projects/{project}/metrics/{metric}" - metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.CreateLogMetricRequest( - parent=parent, + parent="parent_value", metric=metric, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py index 876f2bbad825..21ac440ddcdb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py @@ -32,12 +32,8 @@ async def sample_delete_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - request = logging_v2.DeleteLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py index fb2eb6f89766..4ba7cf79ddb0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py @@ -32,12 +32,8 @@ def sample_delete_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - request = logging_v2.DeleteLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py index a9b77fe6fb96..bf0f0528ce6c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py @@ -32,12 +32,8 @@ async def sample_get_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - request = logging_v2.GetLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py index 4e18e335b9fe..ee5d478a43aa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py @@ -32,12 +32,8 @@ def sample_get_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - request = logging_v2.GetLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py index 5742806d0db1..c71003435eca 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py @@ -32,11 +32,8 @@ async def sample_list_log_metrics(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - parent = f"projects/{project}" - request = logging_v2.ListLogMetricsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py index 974135981041..766079f6b3ce 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py @@ -32,11 +32,8 @@ def sample_list_log_metrics(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - parent = f"projects/{project}" - request = logging_v2.ListLogMetricsRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py index dcb646539374..3f644809cd85 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py @@ -32,16 +32,12 @@ async def sample_update_log_metric(): client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.UpdateLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", metric=metric, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py index ad87904dc242..18d3b2f8d1c5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py @@ -32,16 +32,12 @@ def sample_update_log_metric(): client = logging_v2.MetricsServiceV2Client() # Initialize request argument(s) - project = "my-project-id" - metric = "metric_value" - metric_name = f"projects/{project}/metrics/{metric}" - metric = logging_v2.LogMetric() metric.name = "name_value" metric.filter = "filter_value" request = logging_v2.UpdateLogMetricRequest( - metric_name=metric_name, + metric_name="metric_name_value", metric=metric, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json index c5479a62a740..696911699db9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -14,12 +14,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async", "segments": [ { - "end": 50, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 45, "start": 27, "type": "SHORT" }, @@ -29,18 +29,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -58,12 +58,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync", "segments": [ { - "end": 50, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 45, "start": 27, "type": "SHORT" }, @@ -73,18 +73,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 42, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 46, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -103,12 +103,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -118,18 +118,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -147,12 +147,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -162,18 +162,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -192,12 +192,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateSink_async", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -207,18 +207,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -236,12 +236,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -251,18 +251,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -370,12 +370,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_async", "segments": [ { - "end": 47, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 42, "start": 27, "type": "SHORT" }, @@ -385,16 +385,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -412,12 +412,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_sync", "segments": [ { - "end": 47, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 42, "start": 27, "type": "SHORT" }, @@ -427,16 +427,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -455,12 +455,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_async", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -470,16 +470,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -497,12 +497,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -512,16 +512,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -540,12 +540,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteSink_async", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -555,16 +555,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -582,12 +582,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteSink_sync", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -597,16 +597,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -625,12 +625,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteView_async", "segments": [ { - "end": 48, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 42, "start": 27, "type": "SHORT" }, @@ -640,16 +640,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 45, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -667,12 +667,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteView_sync", "segments": [ { - "end": 48, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 42, "start": 27, "type": "SHORT" }, @@ -682,16 +682,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 45, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -710,12 +710,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetBucket_async", "segments": [ { - "end": 49, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 44, "start": 27, "type": "SHORT" }, @@ -725,18 +725,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -754,12 +754,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync", "segments": [ { - "end": 49, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 44, "start": 27, "type": "SHORT" }, @@ -769,18 +769,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -799,12 +799,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async", "segments": [ { - "end": 47, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 44, "start": 27, "type": "SHORT" }, @@ -814,18 +814,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 41, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -843,12 +843,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { - "end": 47, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 44, "start": 27, "type": "SHORT" }, @@ -858,18 +858,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 41, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -888,12 +888,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -903,18 +903,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -932,12 +932,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -947,18 +947,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -977,12 +977,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetSink_async", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -992,18 +992,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1021,12 +1021,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetSink_sync", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -1036,18 +1036,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1066,12 +1066,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetView_async", "segments": [ { - "end": 50, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 44, "start": 27, "type": "SHORT" }, @@ -1081,18 +1081,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1110,12 +1110,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetView_sync", "segments": [ { - "end": 50, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 44, "start": 27, "type": "SHORT" }, @@ -1125,18 +1125,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1155,12 +1155,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 48, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 43, "start": 27, "type": "SHORT" }, @@ -1170,16 +1170,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -1197,12 +1197,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 48, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 43, "start": 27, "type": "SHORT" }, @@ -1212,16 +1212,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -1240,12 +1240,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -1255,16 +1255,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -1282,12 +1282,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -1297,16 +1297,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -1325,12 +1325,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListSinks_async", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -1340,16 +1340,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -1367,12 +1367,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -1382,16 +1382,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -1495,12 +1495,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_async", "segments": [ { - "end": 47, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 42, "start": 27, "type": "SHORT" }, @@ -1510,16 +1510,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -1537,12 +1537,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { - "end": 47, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 42, "start": 27, "type": "SHORT" }, @@ -1552,16 +1552,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -1580,12 +1580,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async", "segments": [ { - "end": 49, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 44, "start": 27, "type": "SHORT" }, @@ -1595,18 +1595,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1624,12 +1624,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync", "segments": [ { - "end": 49, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 44, "start": 27, "type": "SHORT" }, @@ -1639,18 +1639,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1758,12 +1758,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1773,18 +1773,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -1802,12 +1802,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1817,18 +1817,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -1847,12 +1847,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1862,18 +1862,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -1891,12 +1891,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1906,18 +1906,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -2025,12 +2025,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_DeleteLog_async", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -2040,16 +2040,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -2067,12 +2067,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_DeleteLog_sync", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -2082,16 +2082,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -2110,12 +2110,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -2125,16 +2125,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -2152,12 +2152,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -2167,16 +2167,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -2195,12 +2195,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogs_async", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -2210,16 +2210,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -2237,12 +2237,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -2252,16 +2252,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -2539,12 +2539,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2554,18 +2554,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -2583,12 +2583,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2598,18 +2598,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -2628,12 +2628,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_async", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -2643,16 +2643,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -2670,12 +2670,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_sync", "segments": [ { - "end": 46, + "end": 42, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 42, "start": 27, "type": "SHORT" }, @@ -2685,16 +2685,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 43, "type": "RESPONSE_HANDLING" } ] @@ -2713,12 +2713,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -2728,18 +2728,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2757,12 +2757,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync", "segments": [ { - "end": 48, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 44, "start": 27, "type": "SHORT" }, @@ -2772,18 +2772,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2802,12 +2802,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async", "segments": [ { - "end": 46, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 43, "start": 27, "type": "SHORT" }, @@ -2817,16 +2817,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 41, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 42, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -2844,12 +2844,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync", "segments": [ { - "end": 46, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 43, "start": 27, "type": "SHORT" }, @@ -2859,16 +2859,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 41, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 42, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -2887,12 +2887,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2902,18 +2902,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -2931,12 +2931,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync", "segments": [ { - "end": 53, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2946,18 +2946,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 43, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 46, + "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index e1aad2df227d..8f79ca344630 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -238,12 +238,8 @@ def sample_list_instances(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - parent = f"projects/{project}/locations/{location}" - request = redis_v1.ListInstancesRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -349,13 +345,8 @@ def sample_get_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.GetInstanceRequest( - name=name, + name="name_value", ) # Make the request @@ -463,17 +454,13 @@ def sample_create_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - parent = f"projects/{project}/locations/{location}" - instance = redis_v1.Instance() instance.name = "name_value" instance.tier = "STANDARD_HA" instance.memory_size_gb = 1499 request = redis_v1.CreateInstanceRequest( - parent=parent, + parent="parent_value", instance_id="instance_id_value", instance=instance, ) @@ -744,13 +731,8 @@ def sample_upgrade_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.UpgradeInstanceRequest( - name=name, + name="name_value", redis_version="redis_version_value", ) @@ -1129,13 +1111,8 @@ def sample_failover_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.FailoverInstanceRequest( - name=name, + name="name_value", ) # Make the request @@ -1254,13 +1231,8 @@ def sample_delete_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.DeleteInstanceRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 9f58dc2140e9..307e5b2d443c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -426,12 +426,8 @@ def sample_list_instances(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - parent = f"projects/{project}/locations/{location}" - request = redis_v1.ListInstancesRequest( - parent=parent, + parent="parent_value", ) # Make the request @@ -538,13 +534,8 @@ def sample_get_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.GetInstanceRequest( - name=name, + name="name_value", ) # Make the request @@ -653,17 +644,13 @@ def sample_create_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - parent = f"projects/{project}/locations/{location}" - instance = redis_v1.Instance() instance.name = "name_value" instance.tier = "STANDARD_HA" instance.memory_size_gb = 1499 request = redis_v1.CreateInstanceRequest( - parent=parent, + parent="parent_value", instance_id="instance_id_value", instance=instance, ) @@ -936,13 +923,8 @@ def sample_upgrade_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.UpgradeInstanceRequest( - name=name, + name="name_value", redis_version="redis_version_value", ) @@ -1324,13 +1306,8 @@ def sample_failover_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.FailoverInstanceRequest( - name=name, + name="name_value", ) # Make the request @@ -1450,13 +1427,8 @@ def sample_delete_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.DeleteInstanceRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py index 39fb9f169ce0..33c8e4b43269 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py @@ -32,17 +32,13 @@ async def sample_create_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - parent = f"projects/{project}/locations/{location}" - instance = redis_v1.Instance() instance.name = "name_value" instance.tier = "STANDARD_HA" instance.memory_size_gb = 1499 request = redis_v1.CreateInstanceRequest( - parent=parent, + parent="parent_value", instance_id="instance_id_value", instance=instance, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py index fee1cc58b1a1..4390eea49f62 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py @@ -32,17 +32,13 @@ def sample_create_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - parent = f"projects/{project}/locations/{location}" - instance = redis_v1.Instance() instance.name = "name_value" instance.tier = "STANDARD_HA" instance.memory_size_gb = 1499 request = redis_v1.CreateInstanceRequest( - parent=parent, + parent="parent_value", instance_id="instance_id_value", instance=instance, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py index 0447c772f5ab..0519037e8228 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py @@ -32,13 +32,8 @@ async def sample_delete_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.DeleteInstanceRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py index 20970e507f0c..ace8e8bcb8de 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py @@ -32,13 +32,8 @@ def sample_delete_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.DeleteInstanceRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py index d0ce830de5f8..c29701c80a81 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py @@ -32,13 +32,8 @@ async def sample_failover_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.FailoverInstanceRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py index f322e8f0cf16..ff715fd2d192 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py @@ -32,13 +32,8 @@ def sample_failover_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.FailoverInstanceRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py index 6be04525a95f..1bb5ff229a75 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py @@ -32,13 +32,8 @@ async def sample_get_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.GetInstanceRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py index f6c3a5e4f5dd..7f6b03ea001a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py @@ -32,13 +32,8 @@ def sample_get_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.GetInstanceRequest( - name=name, + name="name_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py index 35153ea3ff41..c3686c396432 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py @@ -32,12 +32,8 @@ async def sample_list_instances(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - parent = f"projects/{project}/locations/{location}" - request = redis_v1.ListInstancesRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py index 6df89ddf35d1..0ee590d3a384 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py @@ -32,12 +32,8 @@ def sample_list_instances(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - parent = f"projects/{project}/locations/{location}" - request = redis_v1.ListInstancesRequest( - parent=parent, + parent="parent_value", ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py index 5b23157fd287..df0b6cd6d275 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py @@ -32,13 +32,8 @@ async def sample_upgrade_instance(): client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.UpgradeInstanceRequest( - name=name, + name="name_value", redis_version="redis_version_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py index 602c18ffdc9e..e792eb291a93 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py @@ -32,13 +32,8 @@ def sample_upgrade_instance(): client = redis_v1.CloudRedisClient() # Initialize request argument(s) - project = "my-project-id" - location = "us-central1" - instance = "instance_value" - name = f"projects/{project}/locations/{location}/instances/{instance}" - request = redis_v1.UpgradeInstanceRequest( - name=name, + name="name_value", redis_version="redis_version_value", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json index 9237cc8828d5..33da11ad344b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json @@ -14,12 +14,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_CreateInstance_async", "segments": [ { - "end": 57, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 53, "start": 27, "type": "SHORT" }, @@ -29,16 +29,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 50, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 58, + "end": 54, "type": "RESPONSE_HANDLING" } ] @@ -56,12 +56,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_CreateInstance_sync", "segments": [ { - "end": 57, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 57, + "end": 53, "start": 27, "type": "SHORT" }, @@ -71,16 +71,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 50, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 58, + "end": 54, "type": "RESPONSE_HANDLING" } ] @@ -99,12 +99,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_DeleteInstance_async", "segments": [ { - "end": 51, + "end": 46, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 46, "start": 27, "type": "SHORT" }, @@ -114,16 +114,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 47, "type": "RESPONSE_HANDLING" } ] @@ -141,12 +141,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_DeleteInstance_sync", "segments": [ { - "end": 51, + "end": 46, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 46, "start": 27, "type": "SHORT" }, @@ -156,16 +156,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 47, "type": "RESPONSE_HANDLING" } ] @@ -269,12 +269,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_FailoverInstance_async", "segments": [ { - "end": 51, + "end": 46, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 46, "start": 27, "type": "SHORT" }, @@ -284,16 +284,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 47, "type": "RESPONSE_HANDLING" } ] @@ -311,12 +311,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_FailoverInstance_sync", "segments": [ { - "end": 51, + "end": 46, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 46, "start": 27, "type": "SHORT" }, @@ -326,16 +326,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 44, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 47, "type": "RESPONSE_HANDLING" } ] @@ -354,12 +354,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_GetInstance_async", "segments": [ { - "end": 49, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 44, "start": 27, "type": "SHORT" }, @@ -369,18 +369,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -398,12 +398,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_GetInstance_sync", "segments": [ { - "end": 49, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 44, "start": 27, "type": "SHORT" }, @@ -413,18 +413,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 43, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 41, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 45, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -528,12 +528,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_ListInstances_async", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -543,16 +543,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -570,12 +570,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_ListInstances_sync", "segments": [ { - "end": 47, + "end": 43, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 43, "start": 27, "type": "SHORT" }, @@ -585,16 +585,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 42, + "end": 38, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 43, + "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 44, "type": "RESPONSE_HANDLING" } ] @@ -698,12 +698,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_UpgradeInstance_async", "segments": [ { - "end": 52, + "end": 47, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 47, "start": 27, "type": "SHORT" }, @@ -713,16 +713,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 45, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 48, "type": "RESPONSE_HANDLING" } ] @@ -740,12 +740,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync", "segments": [ { - "end": 52, + "end": 47, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 47, "start": 27, "type": "SHORT" }, @@ -755,16 +755,16 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 39, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 45, + "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 48, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 00608a2ae771..2f7574e48448 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -162,43 +162,6 @@ def test_preprocess_sample(): ] -def test_preprocess_sample_resource_message_field(): - # Verify that the default response is added. - sample = {"service": "Mollusc", "rpc": "Classify"} - - classify_request_message = DummyMessage( - fields={ - "parent": DummyField(is_primitive=True, type=str, required=True, name="parent", resource_reference="parent"), - }, - type=DummyMessageTypePB(name="ClassifyRequest"), - ident=DummyIdent(name="ClassifyRequest") - ) - - api_schema = DummyApiSchema( - services={"Mollusc": DummyService( - methods={}, client_name="MolluscClient", - resource_messages_dict={"parent": DummyMessage( - resource_path="projects/{project}")} - )}, - naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), - messages=classify_request_message, - - ) - - rpc = DummyMethod(input=classify_request_message) - - samplegen.Validator.preprocess_sample(sample, api_schema, rpc) - - # assert mock request is created - assert sample["request"] == [ - { - "field": "parent%project", - "value": '"my-project-id"' - } - ] - - def test_preprocess_sample_with_enum_field(): # Verify that the default response is added. sample = {"service": "Mollusc", "rpc": "Classify"} From fa1ff08fada00a43d373850007427796d7e6678f Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 28 Jan 2022 12:14:14 -0700 Subject: [PATCH 0717/1339] fix: fix snippetgen golden file (#1170) I accidentally merged #1134 without updating the golden files for snippetgen. This PR updates the golden files and marks the 'snippetgen' check required. --- .../.github/sync-repo-settings.yaml | 1 + ...llusca_v1_snippets_list_resources_async.py | 12 +- ...ollusca_v1_snippets_list_resources_sync.py | 12 +- .../goldens/snippet_metadata_mollusca_v1.json | 611 ++++++++++++++++++ 4 files changed, 616 insertions(+), 20 deletions(-) create mode 100644 packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 048788301d12..8dc6d0203ff4 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -24,6 +24,7 @@ branchProtectionRules: - 'showcase-unit-add-iam-methods' - 'integration' - 'style-check' + - 'snippetgen' - 'unit (3.6)' - 'unit (3.7)' - 'unit (3.8)' diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py index a0be0b9e2b03..de4e1ab4befb 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py @@ -32,17 +32,9 @@ async def sample_list_resources(): client = mollusca_v1.SnippetsAsyncClient() # Initialize request argument(s) - item_id = "item_id_value" - part_id = "part_id_value" - parent = f"items/{item_id}/parts/{part_id}" - - item_id = "item_id_value" - part_id = "part_id_value" - resource_with_wildcard = f"items/{item_id}/parts/{part_id}" - request = mollusca_v1.ListResourcesRequest( - parent=parent, - resource_with_wildcard=resource_with_wildcard, + parent="parent_value", + resource_with_wildcard="resource_with_wildcard_value", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py index ddab64b202a4..4832779217d7 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py @@ -32,17 +32,9 @@ def sample_list_resources(): client = mollusca_v1.SnippetsClient() # Initialize request argument(s) - item_id = "item_id_value" - part_id = "part_id_value" - parent = f"items/{item_id}/parts/{part_id}" - - item_id = "item_id_value" - part_id = "part_id_value" - resource_with_wildcard = f"items/{item_id}/parts/{part_id}" - request = mollusca_v1.ListResourcesRequest( - parent=parent, - resource_with_wildcard=resource_with_wildcard, + parent="parent_value", + resource_with_wildcard="resource_with_wildcard_value", ) # Make the request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json new file mode 100644 index 000000000000..a5346a359c54 --- /dev/null +++ b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json @@ -0,0 +1,611 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "ListResources" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_list_resources_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "ListResources" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_list_resources_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "MethodBidiStreaming" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "MethodBidiStreaming" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "MethodLroSignatures" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "MethodLroSignatures" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "MethodOneSignature" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "MethodOneSignature" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "MethodServerStreaming" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "MethodServerStreaming" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "OneOfMethodRequiredField" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "OneOfMethodRequiredField" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "OneOfMethod" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_one_of_method_async.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Snippets" + }, + "shortName": "OneOfMethod" + } + }, + "file": "mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py", + "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} From 098a77a37acf93c277986fc6e104ba43298f976d Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Fri, 28 Jan 2022 11:22:08 -0800 Subject: [PATCH 0718/1339] fix: add special method parameters to set of reserved module names (#1168) Ignoring flattened parameters, there are four special parameters to all client methods: * request * retry * timeout * metadata These cannot conflict with module names, and so the module names must be disambiguated. --- packages/gapic-generator/gapic/schema/api.py | 6 ++++- .../tests/unit/schema/test_api.py | 26 ++++++++++++++++++- 2 files changed, 30 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 11cb77f28650..c6b69cf34a3d 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -262,12 +262,16 @@ def build( file_descriptors, ), opts=opts) + # "metadata", "retry", "timeout", and "request" are reserved words in client methods. + invalid_module_names = set(keyword.kwlist) | { + "metadata", "retry", "timeout", "request"} + def disambiguate_keyword_fname( full_path: str, visited_names: Container[str]) -> str: path, fname = os.path.split(full_path) name, ext = os.path.splitext(fname) - if name in keyword.kwlist or full_path in visited_names: + if name in invalid_module_names or full_path in visited_names: name += "_" full_path = os.path.join(path, name + ext) if full_path in visited_names: diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index eae44b84b114..212c86daedb3 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -233,7 +233,27 @@ def test_proto_keyword_fname(): name='class.proto', package='google.keywords.v1', messages=(make_message_pb2(name='ClassRequest', fields=()),), - ) + ), + make_file_pb2( + name='metadata.proto', + package='google.keywords.v1', + messages=(make_message_pb2(name='MetadataRequest', fields=()),), + ), + make_file_pb2( + name='retry.proto', + package='google.keywords.v1', + messages=(make_message_pb2(name='RetryRequest', fields=()),), + ), + make_file_pb2( + name='timeout.proto', + package='google.keywords.v1', + messages=(make_message_pb2(name='TimeoutRequest', fields=()),), + ), + make_file_pb2( + name='request.proto', + package='google.keywords.v1', + messages=(make_message_pb2(name='RequestRequest', fields=()),), + ), ) # We can't create new collisions, so check that renames cascade. @@ -243,6 +263,10 @@ def test_proto_keyword_fname(): 'import__.proto', 'class_.proto', 'class__.proto', + 'metadata_.proto', + 'retry_.proto', + 'timeout_.proto', + 'request_.proto', } From 4f639071fad849ce993614a8196711c183c41a1e Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Fri, 28 Jan 2022 11:55:07 -0800 Subject: [PATCH 0719/1339] feat: adds REST server-streaming support. (#1120) --- .../services/%service/transports/rest.py.j2 | 27 ++++--- .../%name_%version/%sub/test_%service.py.j2 | 48 +++++++++--- .../services/%service/transports/rest.py.j2 | 27 ++++--- .../gapic/templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 75 +++++++++++++------ packages/gapic-generator/noxfile.py | 1 - .../tests/fragments/test_rest_streaming.proto | 43 +++++++++++ .../tests/system/test_streams.py | 8 +- 8 files changed, 170 insertions(+), 61 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/test_rest_streaming.proto diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 8c10178e7864..eb34c706f2ad 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -11,8 +11,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers +from google.api_core import rest_streaming from google.api_core import path_template from google.api_core import gapic_v1 + {% if service.has_lro %} from google.api_core import operations_v1 from google.protobuf import json_format @@ -66,7 +68,7 @@ class {{ service.name }}RestInterceptor: .. code-block: class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): - {% for _, method in service.methods|dictsort if not (method.server_streaming or method.client_streaming) %} + {% for _, method in service.methods|dictsort if not method.client_streaming %} def pre_{{ method.name|snake_case }}(request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -82,7 +84,7 @@ class {{ service.name }}RestInterceptor: """ - {% for method in service.methods.values()|sort(attribute="name") if not (method.server_streaming or method.client_streaming) %} + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming %} def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for {{ method.name|snake_case }} @@ -92,7 +94,11 @@ class {{ service.name }}RestInterceptor: return request, metadata {% if not method.void %} + {% if not method.server_streaming %} def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + {% else %} + def post_{{ method.name|snake_case }}(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + {% endif %} """Post-rpc interceptor for {{ method.name|snake_case }} Override in a subclass to manipulate the response @@ -248,8 +254,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __hash__(self): return hash("{{method.name}}") - - {% if not (method.server_streaming or method.client_streaming) %} + {% if not method.client_streaming %} {% if method.input.required_fields %} __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} @@ -262,15 +267,15 @@ class {{service.name}}RestTransport({{service.name}}Transport): def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} {% endif %}{# required fields #} - {% endif %}{# not (method.server_streaming or method.client_streaming) #} + {% endif %}{# not method.client_streaming #} def __call__(self, request: {{method.input.ident}}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[str, str]]=(), - ){% if not method.void %} -> {{method.output.ident}}{% endif %}: - {% if method.http_options and not (method.server_streaming or method.client_streaming) %} + ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: + {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} @@ -360,6 +365,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if method.lro %} resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + {% elif method.server_streaming %} + resp = rest_streaming.ResponseIterator(response, {{method.output.ident}}) {% else %} resp = {{method.output.ident}}.from_json( response.content, @@ -370,14 +377,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): return resp {% endif %}{# method.void #} - {% else %}{# method.http_options and not (method.server_streaming or method.client_streaming) #} + {% else %}{# method.http_options and not method.client_streaming #} {% if not method.http_options %} raise RuntimeError( "Cannot define a method without a valid 'google.api.http' annotation.") - {% elif method.server_streaming or method.client_streaming %} + {% elif method.client_streaming %} raise NotImplementedError( - "Streaming over REST is not yet defined for python client") + "Client streaming over REST is not yet defined for python client") {% else %} raise NotImplementedError() diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 944b88eb12f5..dfa71efc7d9a 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -8,6 +8,7 @@ import mock import grpc from grpc.experimental import aio {% if "rest" in opts.transport %} +from collections.abc import Iterable import json {% endif %} import math @@ -861,8 +862,8 @@ def test_{{ method_name }}_raw_page_lro(): {% endfor %} {# method in methods for grpc #} {% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %}{% if method.http_options %} -{# TODO(kbandes): remove this if condition when streaming are supported. #} -{% if not (method.server_streaming or method.client_streaming) %} +{# TODO(kbandes): remove this if condition when client streaming are supported. #} +{% if not method.client_streaming %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -884,8 +885,6 @@ def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): return_value = None {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - return_value = iter([{{ method.output.ident }}()]) {% else %} return_value = {{ method.output.ident }}( {% for field in method.output.fields.values() | rejectattr('message')%} @@ -905,6 +904,8 @@ def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): req.return_value.request = PreparedRequest() {% if method.void %} json_return_value = '' + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) {% endif %} @@ -914,6 +915,10 @@ def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): # the request over the wire, so an empty request is fine. {% if method.client_streaming %} client.{{ method_name }}(iter([requests])) + {% elif method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.{{ method_name }}(request) {% else %} client.{{ method_name }}(request) {% endif %} @@ -950,8 +955,6 @@ def test_{{ method.name|snake_case }}_rest(request_type): return_value = None {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - return_value = iter([{{ method.output.ident }}()]) {% else %} return_value = {{ method.output.ident }}( {% for field in method.output.fields.values() | rejectattr('message')%} @@ -974,6 +977,8 @@ def test_{{ method.name|snake_case }}_rest(request_type): json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) {% endif %} @@ -981,6 +986,10 @@ def test_{{ method.name|snake_case }}_rest(request_type): req.return_value = response_value {% if method.client_streaming %} response = client.{{ method.name|snake_case }}(iter(requests)) + {% elif method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.{{ method_name }}(request) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -991,6 +1000,11 @@ def test_{{ method.name|snake_case }}_rest(request_type): {% endif %} + {% if method.server_streaming %} + assert isinstance(response, Iterable) + response = next(response) + {% endif %} + # Establish that the response is the type that we expect. {% if method.void %} assert response is None @@ -1085,8 +1099,6 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide return_value = None {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - return_value = iter([{{ method.output.ident }}()]) {% else %} return_value = {{ method.output.ident }}() {% endif %} @@ -1114,6 +1126,8 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) {% endif %} @@ -1122,6 +1136,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% if method.client_streaming %} response = client.{{ method.name|snake_case }}(iter(requests)) + {% elif method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.{{ method_name }}(request) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -1248,8 +1266,6 @@ def test_{{ method.name|snake_case }}_rest_flattened(): return_value = None {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - return_value = iter([{{ method.output.ident }}()]) {% else %} return_value = {{ method.output.ident }}() {% endif %} @@ -1261,6 +1277,8 @@ def test_{{ method.name|snake_case }}_rest_flattened(): json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) {% endif %} @@ -1281,7 +1299,14 @@ def test_{{ method.name|snake_case }}_rest_flattened(): {% endfor %} ) mock_args.update(sample_request) + + {% if method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + client.{{ method_name }}(**mock_args) + {% else %} client.{{ method_name }}(**mock_args) + {% endif %} # Establish that the underlying call was made with the expected # request object values. @@ -1385,6 +1410,9 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): response = tuple({{ method.output.ident }}.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): + {% if method.server_streaming %} + response_val = "[{}]".format({{ method.output.ident }}.to_json(response_val)) + {% endif %} return_val._content = response_val.encode('UTF-8') return_val.status_code = 200 req.side_effect = return_values diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 8c10178e7864..eb34c706f2ad 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -11,8 +11,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers +from google.api_core import rest_streaming from google.api_core import path_template from google.api_core import gapic_v1 + {% if service.has_lro %} from google.api_core import operations_v1 from google.protobuf import json_format @@ -66,7 +68,7 @@ class {{ service.name }}RestInterceptor: .. code-block: class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): - {% for _, method in service.methods|dictsort if not (method.server_streaming or method.client_streaming) %} + {% for _, method in service.methods|dictsort if not method.client_streaming %} def pre_{{ method.name|snake_case }}(request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -82,7 +84,7 @@ class {{ service.name }}RestInterceptor: """ - {% for method in service.methods.values()|sort(attribute="name") if not (method.server_streaming or method.client_streaming) %} + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming %} def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for {{ method.name|snake_case }} @@ -92,7 +94,11 @@ class {{ service.name }}RestInterceptor: return request, metadata {% if not method.void %} + {% if not method.server_streaming %} def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + {% else %} + def post_{{ method.name|snake_case }}(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: + {% endif %} """Post-rpc interceptor for {{ method.name|snake_case }} Override in a subclass to manipulate the response @@ -248,8 +254,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __hash__(self): return hash("{{method.name}}") - - {% if not (method.server_streaming or method.client_streaming) %} + {% if not method.client_streaming %} {% if method.input.required_fields %} __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} @@ -262,15 +267,15 @@ class {{service.name}}RestTransport({{service.name}}Transport): def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} {% endif %}{# required fields #} - {% endif %}{# not (method.server_streaming or method.client_streaming) #} + {% endif %}{# not method.client_streaming #} def __call__(self, request: {{method.input.ident}}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: float=None, metadata: Sequence[Tuple[str, str]]=(), - ){% if not method.void %} -> {{method.output.ident}}{% endif %}: - {% if method.http_options and not (method.server_streaming or method.client_streaming) %} + ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: + {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} {{ (method.name|snake_case).replace('_',' ')|wrap( width=70, offset=45, indent=8) }} @@ -360,6 +365,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if method.lro %} resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + {% elif method.server_streaming %} + resp = rest_streaming.ResponseIterator(response, {{method.output.ident}}) {% else %} resp = {{method.output.ident}}.from_json( response.content, @@ -370,14 +377,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): return resp {% endif %}{# method.void #} - {% else %}{# method.http_options and not (method.server_streaming or method.client_streaming) #} + {% else %}{# method.http_options and not method.client_streaming #} {% if not method.http_options %} raise RuntimeError( "Cannot define a method without a valid 'google.api.http' annotation.") - {% elif method.server_streaming or method.client_streaming %} + {% elif method.client_streaming %} raise NotImplementedError( - "Streaming over REST is not yet defined for python client") + "Client streaming over REST is not yet defined for python client") {% else %} raise NotImplementedError() diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 15a529664d86..575539a36560 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -29,7 +29,7 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.3.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.4.0, < 3.0.0dev', {% else %} 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', {% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e98b0d23719d..22adc474ce1e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -8,6 +8,7 @@ import mock import grpc from grpc.experimental import aio {% if "rest" in opts.transport %} +from collections.abc import Iterable import json {% endif %} import math @@ -1325,8 +1326,8 @@ def test_{{ method_name }}_raw_page_lro(): {% endfor %} {# method in methods for grpc #} {% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %}{% if method.http_options %} -{# TODO(kbandes): remove this if condition when lro and streaming are supported. #} -{% if not (method.server_streaming or method.client_streaming) %} +{# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} +{% if not method.client_streaming %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -1357,8 +1358,6 @@ def test_{{ method_name }}_rest(request_type): return_value = None {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - return_value = iter([{{ method.output.ident }}()]) {% else %} return_value = {{ method.output.ident }}( {% for field in method.output.fields.values() | rejectattr('message')%} @@ -1381,6 +1380,8 @@ def test_{{ method_name }}_rest(request_type): json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) {% endif %} @@ -1388,6 +1389,10 @@ def test_{{ method_name }}_rest(request_type): req.return_value = response_value {% if method.client_streaming %} response = client.{{ method_name }}(iter(requests)) + {% elif method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.{{ method_name }}(request) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -1398,6 +1403,11 @@ def test_{{ method_name }}_rest(request_type): {% endif %} + {% if method.server_streaming %} + assert isinstance(response, Iterable) + response = next(response) + {% endif %} + # Establish that the response is the type that we expect. {% if method.void %} assert response is None @@ -1487,8 +1497,6 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide return_value = None {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - return_value = iter([{{ method.output.ident }}()]) {% else %} return_value = {{ method.output.ident }}() {% endif %} @@ -1516,6 +1524,8 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) {% endif %} @@ -1524,6 +1534,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% if method.client_streaming %} response = client.{{ method_name }}(iter(requests)) + {% elif method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.{{ method_name }}(request) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -1553,7 +1567,7 @@ def test_{{ method_name }}_rest_unset_required_fields(): {% endif %}{# required_fields #} -{% if not (method.server_streaming or method.client_streaming) %} +{% if not method.client_streaming %} @pytest.mark.parametrize("null_interceptor", [True, False]) def test_{{ method_name }}_rest_interceptors(null_interceptor): transport = transports.{{ service.name }}RestTransport( @@ -1582,6 +1596,11 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() {% if not method.void %} req.return_value._content = {% if method.output.ident.package == method.ident.package %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + + {% if method.server_streaming %} + req.return_value._content = "[{}]".format(req.return_value._content) + {% endif %} + {% endif %} request = {{ method.input.ident }}() @@ -1651,12 +1670,24 @@ def test_{{ method_name }}_rest_flattened(): return_value = None {% elif method.lro %} return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - return_value = iter([{{ method.output.ident }}()]) {% else %} return_value = {{ method.output.ident }}() {% endif %} + # get arguments that satisfy an http rule for this method + sample_request = {{ method.http_options[0].sample_request(method) }} + + # get truthy value for each flattened field + mock_args = dict( + {% for field in method.flattened_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + {{ field.name }}={{ field.mock_value }}, + {% endif %} + {% endfor %} + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 @@ -1664,6 +1695,8 @@ def test_{{ method_name }}_rest_flattened(): json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} json_return_value = {{ method.output.ident }}.to_json(return_value) {% endif %} @@ -1671,20 +1704,13 @@ def test_{{ method_name }}_rest_flattened(): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - # get arguments that satisfy an http rule for this method - sample_request = {{ method.http_options[0].sample_request(method) }} - - # get truthy value for each flattened field - mock_args = dict( - {% for field in method.flattened_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - {{ field.name }}={{ field.mock_value }}, - {% endif %} - {% endfor %} - ) - mock_args.update(sample_request) + {% if method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + client.{{ method_name }}(**mock_args) + {% else %} client.{{ method_name }}(**mock_args) + {% endif %} # Establish that the underlying call was made with the expected # request object values. @@ -1788,6 +1814,9 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): response = tuple({{ method.output.ident }}.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): + {% if method.server_streaming %} + response_val = "[{}]".format({{ method.output.ident }}.to_json(response_val)) + {% endif %} return_val._content = response_val.encode('UTF-8') return_val.status_code = 200 req.side_effect = return_values @@ -3039,4 +3068,4 @@ def test_api_key_credentials(client_class, transport_class): always_use_jwt_access=True, ) -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 11ba0c9658d0..c87a8e5559a7 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -342,7 +342,6 @@ def showcase_unit( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), ): """Run the generated unit tests against the Showcase library.""" - with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) run_showcase_unit_tests(session) diff --git a/packages/gapic-generator/tests/fragments/test_rest_streaming.proto b/packages/gapic-generator/tests/fragments/test_rest_streaming.proto new file mode 100644 index 000000000000..b47d2030b94d --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_rest_streaming.proto @@ -0,0 +1,43 @@ +// Copyright (C) 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; + +service MyService { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (stream MethodResponse) { + option (google.api.method_signature) = "from,class,import,any,license,type"; + } +} + +message MethodRequest { + string from = 1; + string class = 2; + string import = 3; + string any = 4; + string license = 5; + string type = 6; + int32 page_size = 7; + string page_token = 8; +} + +message MethodResponse { + string result = 1; + string next_page_token = 2; +} diff --git a/packages/gapic-generator/tests/system/test_streams.py b/packages/gapic-generator/tests/system/test_streams.py index 685f7300d8b0..8294e6795836 100644 --- a/packages/gapic-generator/tests/system/test_streams.py +++ b/packages/gapic-generator/tests/system/test_streams.py @@ -23,10 +23,6 @@ def test_unary_stream(echo): - if isinstance(echo.transport, type(echo).get_transport_class("rest")): - # (TODO: dovs) Temporarily disabling rest - return - content = 'The hail in Wales falls mainly on the snails.' responses = echo.expand({ 'content': content, @@ -37,8 +33,8 @@ def test_unary_stream(echo): for ground_truth, response in zip(content.split(' '), responses): assert response.content == ground_truth assert ground_truth == 'snails.' - - assert responses.trailing_metadata() == metadata + if isinstance(echo.transport, type(echo).get_transport_class("grpc")): + assert responses.trailing_metadata() == metadata def test_stream_unary(echo): From 135232f6d998aac1a37eef5e5e2172ee16a50950 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 28 Jan 2022 21:04:21 +0100 Subject: [PATCH 0720/1339] chore(deps): update dependency protobuf to v3.19.4 (#1169) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 3f3aa38747ca..022953c4f43d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.1.1 googleapis-common-protos==1.54.0 jinja2==3.0.3 MarkupSafe==2.0.1 -protobuf==3.19.3 +protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 868475afa3c9b5411c3d139f70859f6d7f3ba4f6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 28 Jan 2022 22:58:14 +0000 Subject: [PATCH 0721/1339] chore(main): release 0.62.0 (#1166) :robot: I have created a release *beep* *boop* --- ## [0.62.0](https://github.com/googleapis/gapic-generator-python/compare/v0.61.0...v0.62.0) (2022-01-28) ### Features * adds REST server-streaming support. ([#1120](https://github.com/googleapis/gapic-generator-python/issues/1120)) ([812cf3e](https://github.com/googleapis/gapic-generator-python/commit/812cf3e0d11b67f7ecd60b9c643b032370bb9400)) * expose extended operations annotations within generator ([#1145](https://github.com/googleapis/gapic-generator-python/issues/1145)) ([e7bee70](https://github.com/googleapis/gapic-generator-python/commit/e7bee702e92612f88efca782a68d8884b9c71feb)) ### Bug Fixes * add special method parameters to set of reserved module names ([#1168](https://github.com/googleapis/gapic-generator-python/issues/1168)) ([8078961](https://github.com/googleapis/gapic-generator-python/commit/8078961f68d2f34fff6403d318bf95c844889d78)) * fix snippetgen golden file ([#1170](https://github.com/googleapis/gapic-generator-python/issues/1170)) ([13b2028](https://github.com/googleapis/gapic-generator-python/commit/13b2028df5193f11aee40ae42ea5186aeb25eef3)) * **snippetgen:** remove special handling for resource paths ([#1134](https://github.com/googleapis/gapic-generator-python/issues/1134)) ([4ea2d11](https://github.com/googleapis/gapic-generator-python/commit/4ea2d114b670c2f2adb43cd87e5f0cf7750e5407)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 15 +++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5e6791a27a8a..bbea6666f387 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,21 @@ # Changelog +## [0.62.0](https://github.com/googleapis/gapic-generator-python/compare/v0.61.0...v0.62.0) (2022-01-28) + + +### Features + +* adds REST server-streaming support. ([#1120](https://github.com/googleapis/gapic-generator-python/issues/1120)) ([812cf3e](https://github.com/googleapis/gapic-generator-python/commit/812cf3e0d11b67f7ecd60b9c643b032370bb9400)) +* expose extended operations annotations within generator ([#1145](https://github.com/googleapis/gapic-generator-python/issues/1145)) ([e7bee70](https://github.com/googleapis/gapic-generator-python/commit/e7bee702e92612f88efca782a68d8884b9c71feb)) + + +### Bug Fixes + +* add special method parameters to set of reserved module names ([#1168](https://github.com/googleapis/gapic-generator-python/issues/1168)) ([8078961](https://github.com/googleapis/gapic-generator-python/commit/8078961f68d2f34fff6403d318bf95c844889d78)) +* fix snippetgen golden file ([#1170](https://github.com/googleapis/gapic-generator-python/issues/1170)) ([13b2028](https://github.com/googleapis/gapic-generator-python/commit/13b2028df5193f11aee40ae42ea5186aeb25eef3)) +* **snippetgen:** remove special handling for resource paths ([#1134](https://github.com/googleapis/gapic-generator-python/issues/1134)) ([4ea2d11](https://github.com/googleapis/gapic-generator-python/commit/4ea2d114b670c2f2adb43cd87e5f0cf7750e5407)) + ## [0.61.0](https://github.com/googleapis/gapic-generator-python/compare/v0.60.0...v0.61.0) (2022-01-28) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index d012a942751a..e3268bc682eb 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.61.0" +version = "0.62.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 17beee0a1701c7ae70dffe44acc7f4a06faad75c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 31 Jan 2022 14:28:08 -0500 Subject: [PATCH 0722/1339] chore: restore 100% coverage for generated clients (#1173) --- .../%name_%version/%sub/test_%service.py.j2 | 93 +++++++++++------- .../%name_%version/%sub/test_%service.py.j2 | 94 ++++++++++++------- .../unit/gapic/asset_v1/test_asset_service.py | 86 +++++++++++------ .../credentials_v1/test_iam_credentials.py | 86 +++++++++++------ .../logging_v2/test_config_service_v2.py | 92 +++++++++++------- .../logging_v2/test_logging_service_v2.py | 94 ++++++++++++------- .../logging_v2/test_metrics_service_v2.py | 94 ++++++++++++------- .../unit/gapic/redis_v1/test_cloud_redis.py | 86 +++++++++++------ 8 files changed, 456 insertions(+), 269 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index dfa71efc7d9a..d615df153b3f 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -395,42 +395,6 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) - - {% if 'grpc' in opts.transport %} - if "grpc" in transport_name: - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - {% with host = (service.host|default('localhost', true)) %} - create_channel.assert_called_with( - "{{ host }}{% if ":" not in service.host %}:443{% endif %}", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %}), - scopes=None, - default_host="{{ host }}", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - {% endwith %} - {% endif %} {% if 'grpc' in opts.transport %} {# TODO(dovs): genericize this function#} @@ -450,6 +414,63 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), +]) +def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + {% with host = (service.host|default('localhost', true)) %} + create_channel.assert_called_with( + "{{ host }}{% if ":" not in service.host %}:443{% endif %}", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + {% for scope in service.oauth_scopes %} + '{{ scope }}', + {% endfor %}), + scopes=None, + default_host="{{ host }}", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + {% endwith %} {% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 22adc474ce1e..e9eec7e9bb27 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -476,42 +476,6 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) - - {% if 'grpc' in opts.transport %} - if "grpc" in transport_name: - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - {% with host = (service.host|default('localhost', true)) %} - create_channel.assert_called_with( - "{{ host }}{% if ":" not in service.host %}:443{% endif %}", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - {% for scope in service.oauth_scopes %} - '{{ scope }}', - {% endfor %}), - scopes=None, - default_host="{{ host }}", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - {% endwith %} - {% endif %} {% if 'grpc' in opts.transport %} {# TODO(dovs): genericize this function#} @@ -531,6 +495,64 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc", grpc_helpers), + ({{ service.async_client_name }}, transports.{{ service.grpc_asyncio_transport_name }}, "grpc_asyncio", grpc_helpers_async), +]) +def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + {% with host = (service.host|default('localhost', true)) %} + create_channel.assert_called_with( + "{{ host }}{% if ":" not in service.host %}:443{% endif %}", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + {% for scope in service.oauth_scopes %} + '{{ scope }}', + {% endfor %}), + scopes=None, + default_host="{{ host }}", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + {% endwith %} {% endif %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index e7f2f550260f..a71bb7f5d0e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -418,37 +418,6 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran always_use_jwt_access=True, ) - if "grpc" in transport_name: - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudasset.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="cloudasset.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - def test_asset_service_client_client_options_from_dict(): with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -467,6 +436,61 @@ def test_asset_service_client_client_options_from_dict(): ) +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), + (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_asset_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "cloudasset.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="cloudasset.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [ asset_service.ExportAssetsRequest, dict, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index d710b57c46c1..1ac841d60e3d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -409,37 +409,6 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr always_use_jwt_access=True, ) - if "grpc" in transport_name: - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "iamcredentials.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="iamcredentials.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - def test_iam_credentials_client_client_options_from_dict(): with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -458,6 +427,61 @@ def test_iam_credentials_client_client_options_from_dict(): ) +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", grpc_helpers), + (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_iam_credentials_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "iamcredentials.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="iamcredentials.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [ common.GenerateAccessTokenRequest, dict, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 5e06e3f10ef5..75cd76119dfb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -410,40 +410,6 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) - if "grpc" in transport_name: - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', -), - scopes=None, - default_host="logging.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - def test_config_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -462,6 +428,64 @@ def test_config_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_config_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [ logging_config.ListBucketsRequest, dict, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 3e5cb31fbb29..bca0c669efcc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -416,41 +416,6 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) - if "grpc" in transport_name: - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - scopes=None, - default_host="logging.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - def test_logging_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -469,6 +434,65 @@ def test_logging_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_logging_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [ logging.DeleteLogRequest, dict, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 13830b687b1e..ffb6e2282722 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -414,41 +414,6 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, always_use_jwt_access=True, ) - if "grpc" in transport_name: - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "logging.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/cloud-platform.read-only', - 'https://www.googleapis.com/auth/logging.admin', - 'https://www.googleapis.com/auth/logging.read', - 'https://www.googleapis.com/auth/logging.write', -), - scopes=None, - default_host="logging.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - def test_metrics_service_v2_client_client_options_from_dict(): with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -467,6 +432,65 @@ def test_metrics_service_v2_client_client_options_from_dict(): ) +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_metrics_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [ logging_metrics.ListLogMetricsRequest, dict, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ed33c9017f8f..5f38005d629c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -415,37 +415,6 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp always_use_jwt_access=True, ) - if "grpc" in transport_name: - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "redis.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="redis.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - def test_cloud_redis_client_client_options_from_dict(): with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__') as grpc_transport: grpc_transport.return_value = None @@ -464,6 +433,61 @@ def test_cloud_redis_client_client_options_from_dict(): ) +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_cloud_redis_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "redis.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="redis.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [ cloud_redis.ListInstancesRequest, dict, From 3a0371bbeac31021074f0aae2bc75fb9760f91f6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 2 Feb 2022 15:23:23 -0500 Subject: [PATCH 0723/1339] fix(deps): require setuptools<=60.5.0 (#1179) * fix(deps): require setuptools<=60.5.0 * pin types-protobuf<=3.19.7 in mypy nox session * move pin to correct session --- packages/gapic-generator/noxfile.py | 2 +- packages/gapic-generator/requirements.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index c87a8e5559a7..00d385bd47f3 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -443,6 +443,6 @@ def docs(session): def mypy(session): """Perform typecheck analysis.""" - session.install("mypy", "types-protobuf", "types-PyYAML", "types-dataclasses") + session.install("mypy", "types-protobuf<=3.19.7", "types-PyYAML", "types-dataclasses") session.install(".") session.run("mypy", "gapic") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 022953c4f43d..75b19c15186a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,3 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped +setuptools<=60.5.0 # pin setuptools to workaround https://github.com/pypa/setuptools/issues/3072 From 1f25e6de35c5ec2654d198c0c2605c7bd0a8a22d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 2 Feb 2022 15:52:36 -0500 Subject: [PATCH 0724/1339] chore(main): release 0.62.1 (#1181) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index bbea6666f387..b1bb49c12cfb 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.62.1](https://github.com/googleapis/gapic-generator-python/compare/v0.62.0...v0.62.1) (2022-02-02) + + +### Bug Fixes + +* **deps:** require setuptools<=60.5.0 ([#1179](https://github.com/googleapis/gapic-generator-python/issues/1179)) ([fb56af7](https://github.com/googleapis/gapic-generator-python/commit/fb56af7cd33931f6747b5ce1fb8502bcbc74dcc7)) + ## [0.62.0](https://github.com/googleapis/gapic-generator-python/compare/v0.61.0...v0.62.0) (2022-01-28) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e3268bc682eb..288f4fcf6648 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.62.0" +version = "0.62.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From f10be3f3d0af71aecfc2b8b9ec0b519a2e87ebbd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 2 Feb 2022 23:23:03 +0100 Subject: [PATCH 0725/1339] chore(deps): pin dependency setuptools to v==60.5.0 (#1183) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [setuptools](https://togithub.com/pypa/setuptools) | pin | `<=60.5.0` -> `==60.5.0` | 📌 **Important**: Renovate will wait until you have merged this Pin PR before creating any *upgrade* PRs for the affected packages. Add the preset `:preserveSemverRanges` to your config if you instead don't wish to pin dependencies. --- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 75b19c15186a..4f8d81f04a61 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools<=60.5.0 # pin setuptools to workaround https://github.com/pypa/setuptools/issues/3072 +setuptools==60.5.0 # pin setuptools to workaround https://github.com/pypa/setuptools/issues/3072 From 0985dc9704a4c375e0296df887d446235cf30f70 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Wed, 2 Feb 2022 16:08:12 -0800 Subject: [PATCH 0726/1339] feat: adds dynamic routing. (#1135) feat: adds dynamic routing files. --- .../gapic-generator/gapic/schema/wrappers.py | 175 +++++-- .../%sub/services/%service/client.py.j2 | 27 +- .../%name_%version/%sub/test_%service.py.j2 | 40 +- .../gapic-generator/gapic/utils/uri_sample.py | 96 ++++ packages/gapic-generator/noxfile.py | 4 +- .../gapic-generator/test_utils/test_utils.py | 7 +- .../tests/fragments/google/api/routing.proto | 461 ++++++++++++++++++ .../fragments/test_dynamic_routing.proto | 167 +++++++ .../asset_v1/services/asset_service/client.py | 24 +- .../services/iam_credentials/client.py | 8 +- .../services/config_service_v2/client.py | 46 +- .../services/logging_service_v2/client.py | 4 +- .../services/metrics_service_v2/client.py | 10 +- .../redis_v1/services/cloud_redis/client.py | 18 +- .../tests/unit/schema/wrappers/test_method.py | 24 + .../unit/schema/wrappers/test_routing.py | 170 +++++++ .../tests/unit/utils/test_uri_sample.py | 31 ++ 17 files changed, 1205 insertions(+), 107 deletions(-) create mode 100644 packages/gapic-generator/gapic/utils/uri_sample.py create mode 100644 packages/gapic-generator/tests/fragments/google/api/routing.proto create mode 100644 packages/gapic-generator/tests/fragments/test_dynamic_routing.proto create mode 100644 packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py create mode 100644 packages/gapic-generator/tests/unit/utils/test_uri_sample.py diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 25fb11ae7be8..aa379f47872d 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -33,12 +33,13 @@ import re from itertools import chain from typing import (Any, cast, Dict, FrozenSet, Iterator, Iterable, List, Mapping, - ClassVar, Optional, Sequence, Set, Tuple, Union) + ClassVar, Optional, Sequence, Set, Tuple, Union, Pattern) from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 from google.api import http_pb2 from google.api import resource_pb2 +from google.api import routing_pb2 from google.api_core import exceptions from google.api_core import path_template from google.cloud import extended_operations_pb2 as ex_ops_pb2 # type: ignore @@ -47,6 +48,7 @@ from gapic import utils from gapic.schema import metadata +from gapic.utils import uri_sample @dataclasses.dataclass(frozen=True) @@ -763,6 +765,118 @@ class RetryInfo: retryable_exceptions: FrozenSet[exceptions.GoogleAPICallError] +@dataclasses.dataclass(frozen=True) +class RoutingParameter: + field: str + path_template: str + + def _split_into_segments(self, path_template): + segments = path_template.split("/") + named_segment_ids = [i for i, x in enumerate( + segments) if "{" in x or "}" in x] + # bar/{foo}/baz, bar/{foo=one/two/three}/baz. + assert len(named_segment_ids) <= 2 + if len(named_segment_ids) == 2: + # Need to merge a named segment. + i, j = named_segment_ids + segments = ( + segments[:i] + + [self._merge_segments(segments[i: j + 1])] + segments[j + 1:] + ) + return segments + + def _convert_segment_to_regex(self, segment): + # Named segment + if "{" in segment: + assert "}" in segment + # Strip "{" and "}" + segment = segment[1:-1] + if "=" not in segment: + # e.g. {foo} should be {foo=*} + return self._convert_segment_to_regex("{" + f"{segment}=*" + "}") + key, sub_path_template = segment.split("=") + group_name = f"?P<{key}>" + sub_regex = self._convert_to_regex(sub_path_template) + return f"({group_name}{sub_regex})" + # Wildcards + if "**" in segment: + # ?: nameless capture + return ".*" + if "*" in segment: + return "[^/]+" + # Otherwise it's collection ID segment: transformed identically. + return segment + + def _merge_segments(self, segments): + acc = segments[0] + for x in segments[1:]: + # Don't add "/" if it's followed by a "**" + # because "**" will eat it. + if x == ".*": + acc += "(?:/.*)?" + else: + acc += "/" + acc += x + return acc + + def _how_many_named_segments(self, path_template): + return path_template.count("{") + + def _convert_to_regex(self, path_template): + if self._how_many_named_segments(path_template) > 1: + # This also takes care of complex patterns (i.e. {foo}~{bar}) + raise ValueError("There must be exactly one named segment. {} has {}.".format( + path_template, self._how_many_named_segments(path_template))) + segments = self._split_into_segments(path_template) + segment_regexes = [self._convert_segment_to_regex(x) for x in segments] + final_regex = self._merge_segments(segment_regexes) + return final_regex + + def _to_regex(self, path_template: str) -> Pattern: + """Converts path_template into a Python regular expression string. + Args: + path_template (str): A path template corresponding to a resource name. + It can only have 0 or 1 named segments. It can not contain complex resource ID path segments. + See https://google.aip.dev/122, https://google.aip.dev/4222 + and https://google.aip.dev/client-libraries/4231 for more details. + Returns: + Pattern: A Pattern object that matches strings conforming to the path_template. + """ + return re.compile(f"^{self._convert_to_regex(path_template)}$") + + def to_regex(self) -> Pattern: + return self._to_regex(self.path_template) + + @property + def key(self) -> Union[str, None]: + if self.path_template == "": + return self.field + regex = self.to_regex() + group_names = list(regex.groupindex) + # Only 1 named segment is allowed and so only 1 key. + return group_names[0] if group_names else self.field + + @property + def sample_request(self) -> str: + """return json dict for sample request matching the uri template.""" + sample = uri_sample.sample_from_path_template( + self.field, self.path_template) + return json.dumps(sample) + + +@dataclasses.dataclass(frozen=True) +class RoutingRule: + routing_parameters: List[RoutingParameter] + + @classmethod + def try_parse_routing_rule(cls, routing_rule: routing_pb2.RoutingRule) -> Optional['RoutingRule']: + params = getattr(routing_rule, 'routing_parameters') + if not params: + return None + params = [RoutingParameter(x.field, x.path_template) for x in params] + return cls(params) + + @dataclasses.dataclass(frozen=True) class HttpRule: """Representation of the method's http bindings.""" @@ -788,59 +902,18 @@ def sample_from_path_fields(paths: List[Tuple[Field, str, str]]) -> Dict[str, An Returns: A new nested dict with the templates instantiated. """ - request: Dict[str, Any] = {} - def _sample_names() -> Iterator[str]: - sample_num: int = 0 - while True: - sample_num += 1 - yield "sample{}".format(sample_num) - - def add_field(obj, path, value): - """Insert a field into a nested dict and return the (outer) dict. - Keys and sub-dicts are inserted if necessary to create the path. - e.g. if obj, as passed in, is {}, path is "a.b.c", and value is - "hello", obj will be updated to: - {'a': - {'b': - { - 'c': 'hello' - } - } - } - - Args: - obj: a (possibly) nested dict (parsed json) - path: a segmented field name, e.g. "a.b.c" - where each part is a dict key. - value: the value of the new key. - Returns: - obj, possibly modified - Raises: - AttributeError if the path references a key that is - not a dict.: e.g. path='a.b', obj = {'a':'abc'} - """ - - segments = path.split('.') - leaf = segments.pop() - subfield = obj - for segment in segments: - subfield = subfield.setdefault(segment, {}) - subfield[leaf] = value - return obj - - sample_names = _sample_names() + sample_names_ = uri_sample.sample_names() for field, path, template in paths: sample_value = re.sub( r"(\*\*|\*)", - lambda n: next(sample_names), + lambda n: next(sample_names_), template or '*' ) if field.type == PrimitiveType.build(str) else field.mock_value_original_type - add_field(request, path, sample_value) + uri_sample.add_field(request, path, sample_value) return request - sample = sample_from_path_fields(self.path_fields(method)) return sample @@ -982,6 +1055,18 @@ def field_headers(self) -> Sequence[str]: return next((tuple(pattern.findall(verb)) for verb in potential_verbs if verb), ()) + @property + def explicit_routing(self): + return routing_pb2.routing in self.options.Extensions + + @property + def routing_rule(self): + if self.explicit_routing: + routing_ext = self.options.Extensions[routing_pb2.routing] + routing_rule = RoutingRule.try_parse_routing_rule(routing_ext) + return routing_rule + return None + @property def http_options(self) -> List[HttpRule]: """Return a list of the http bindings for this method.""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index d0727567c6b6..46c4b6cf29d7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -478,8 +478,31 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.{{ method.name|snake_case}}] - {% if method.field_headers %} + {% if method.explicit_routing %} + header_params = {} + {% for routing_param in method.routing_rule.routing_parameters %} + {% if routing_param.path_template %} {# Need to match. #} + + routing_param_regex = {{ routing_param.to_regex() }} + regex_match = routing_param_regex.match(request.{{ routing_param.field }}) + if regex_match and regex_match.group("{{ routing_param.key }}"): + header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") + + {% else %} + + if request.{{ routing_param.field }}: + header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} + + {% endif %} + {% endfor %} {# method.routing_rule.routing_parameters #} + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + {% elif method.field_headers %} {# implicit routing #} # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( @@ -491,7 +514,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endfor %} )), ) - {% endif %} + {% endif %} {# method.explicit_routing #} # Send the request. {%+ if not method.void %}response = {% endif %}rpc( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e9eec7e9bb27..7bde90ac43d5 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -756,7 +756,45 @@ async def test_{{ method_name }}_async_from_dict(): await test_{{ method_name }}_async(request_type=dict) -{% if method.field_headers and not method.client_streaming %} +{% if method.explicit_routing %} +def test_{{ method.name|snake_case }}_routing_parameters(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + {% for routing_param in method.routing_rule.routing_parameters %} + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = {{ method.input.ident }}({{ routing_param.sample_request }}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.name|snake_case }}), + '__call__') as call: + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}() + {% endif %} + client.{{ method.name|snake_case }}(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + {% endfor %} +{% endif %} + + +{% if method.field_headers and not method.client_streaming and not method.explicit_routing %} def test_{{ method_name }}_field_headers(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/gapic-generator/gapic/utils/uri_sample.py b/packages/gapic-generator/gapic/utils/uri_sample.py new file mode 100644 index 000000000000..6d2b6f8e9a53 --- /dev/null +++ b/packages/gapic-generator/gapic/utils/uri_sample.py @@ -0,0 +1,96 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Generator, Dict, List, Tuple +import re + + +def sample_names() -> Generator[str, None, None]: + sample_num: int = 0 + while True: + sample_num += 1 + yield "sample{}".format(sample_num) + + +def add_field(obj, path, value): + """Insert a field into a nested dict and return the (outer) dict. + Keys and sub-dicts are inserted if necessary to create the path. + e.g. if obj, as passed in, is {}, path is "a.b.c", and value is + "hello", obj will be updated to: + {'a': + {'b': + { + 'c': 'hello' + } + } + } + + Args: + obj: a (possibly) nested dict (parsed json) + path: a segmented field name, e.g. "a.b.c" + where each part is a dict key. + value: the value of the new key. + Returns: + obj, possibly modified + Raises: + AttributeError if the path references a key that is + not a dict.: e.g. path='a.b', obj = {'a':'abc'} + """ + + segments = path.split('.') + leaf = segments.pop() + subfield = obj + for segment in segments: + subfield = subfield.setdefault(segment, {}) + subfield[leaf] = value + return obj + + +def sample_from_path_fields(paths: List[Tuple[str, str]]) -> Dict[Any, Any]: + """Construct a dict for a sample request object from a list of fields + and template patterns. + Args: + paths: a list of tuples, each with a (segmented) name and a pattern. + Returns: + A new nested dict with the templates instantiated. + """ + + request: Dict[str, Any] = {} + sample_names_ = sample_names() + + for path, template in paths: + sample_value = re.sub( + r"(\*\*|\*)", + lambda n: next(sample_names_), template if template else '*' + ) + add_field(request, path, sample_value) + return request + + +def sample_from_path_template(field: str, path_template: str) -> Dict[Any, Any]: + """Construct a dict for a sample request object from a field and path template. + Args: + field: str, the name of the field in the request. + path_template: str, the template containing the field pattern. + Returns: + A new nested dict that has field as key and the instantiated template as value. + """ + if '{' in path_template: + i = path_template.index('{') + j = path_template.index('}') + seg = path_template[i:j + 1] + # Skip "}" + seg = seg[seg.index('=') + 1:-1] + path_template = path_template[:i] + seg + path_template[j + 1:] + return sample_from_path_fields([(field, path_template)]) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 00d385bd47f3..718469855ce4 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -46,12 +46,10 @@ @nox.session(python=ALL_PYTHON) def unit(session): """Run the unit test suite.""" - session.install( - "coverage", "pytest-cov", "pytest", "pytest-xdist", "pyfakefs", "grpcio-status", + "coverage", "pytest-cov", "pytest", "pytest-xdist", "pyfakefs", "grpcio-status", "proto-plus", ) session.install("-e", ".") - session.run( "py.test", *( diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index a499606f49f7..89e341b5fba7 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -18,7 +18,7 @@ from gapic.schema import metadata from gapic.schema import naming from gapic.schema import wrappers -from google.api import annotations_pb2 +from google.api import annotations_pb2, routing_pb2 from google.api import client_pb2 from google.api import http_pb2 from google.protobuf import descriptor_pb2 as desc @@ -163,6 +163,7 @@ def make_method( http_rule: http_pb2.HttpRule = None, signatures: typing.Sequence[str] = (), is_deprecated: bool = False, + routing_rule: routing_pb2.RoutingRule = None, **kwargs ) -> wrappers.Method: # Use default input and output messages if they are not provided. @@ -177,6 +178,10 @@ def make_method( **kwargs ) + if routing_rule: + ext_key = routing_pb2.routing + method_pb.options.Extensions[ext_key].MergeFrom(routing_rule) + # If there is an HTTP rule, process it. if http_rule: ext_key = annotations_pb2.http diff --git a/packages/gapic-generator/tests/fragments/google/api/routing.proto b/packages/gapic-generator/tests/fragments/google/api/routing.proto new file mode 100644 index 000000000000..f235031a9cb9 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/api/routing.proto @@ -0,0 +1,461 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "RoutingProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See RoutingRule. + google.api.RoutingRule routing = 72295729; +} + +// Specifies the routing information that should be sent along with the request +// in the form of routing header. +// **NOTE:** All service configuration rules follow the "last one wins" order. +// +// The examples below will apply to an RPC which has the following request type: +// +// Message Definition: +// +// message Request { +// // The name of the Table +// // Values can be of the following formats: +// // - `projects//tables/` +// // - `projects//instances//tables/
` +// // - `region//zones//tables/
` +// string table_name = 1; +// +// // This value specifies routing for replication. +// // It can be in the following formats: +// // - `profiles/` +// // - a legacy `profile_id` that can be any string +// string app_profile_id = 2; +// } +// +// Example message: +// +// { +// table_name: projects/proj_foo/instances/instance_bar/table/table_baz, +// app_profile_id: profiles/prof_qux +// } +// +// The routing header consists of one or multiple key-value pairs. Every key +// and value must be percent-encoded, and joined together in the format of +// `key1=value1&key2=value2`. +// In the examples below I am skipping the percent-encoding for readablity. +// +// Example 1 +// +// Extracting a field from the request to put into the routing header +// unchanged, with the key equal to the field name. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `app_profile_id`. +// routing_parameters { +// field: "app_profile_id" +// } +// }; +// +// result: +// +// x-goog-request-params: app_profile_id=profiles/prof_qux +// +// Example 2 +// +// Extracting a field from the request to put into the routing header +// unchanged, with the key different from the field name. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `app_profile_id`, but name it `routing_id` in the header. +// routing_parameters { +// field: "app_profile_id" +// path_template: "{routing_id=**}" +// } +// }; +// +// result: +// +// x-goog-request-params: routing_id=profiles/prof_qux +// +// Example 3 +// +// Extracting a field from the request to put into the routing +// header, while matching a path template syntax on the field's value. +// +// NB: it is more useful to send nothing than to send garbage for the purpose +// of dynamic routing, since garbage pollutes cache. Thus the matching. +// +// Sub-example 3a +// +// The field matches the template. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `table_name`, if it's well-formed (with project-based +// // syntax). +// routing_parameters { +// field: "table_name" +// path_template: "{table_name=projects/*/instances/*/**}" +// } +// }; +// +// result: +// +// x-goog-request-params: +// table_name=projects/proj_foo/instances/instance_bar/table/table_baz +// +// Sub-example 3b +// +// The field does not match the template. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `table_name`, if it's well-formed (with region-based +// // syntax). +// routing_parameters { +// field: "table_name" +// path_template: "{table_name=regions/*/zones/*/**}" +// } +// }; +// +// result: +// +// +// +// Sub-example 3c +// +// Multiple alternative conflictingly named path templates are +// specified. The one that matches is used to construct the header. +// +// annotation: +// +// option (google.api.routing) = { +// // Take the `table_name`, if it's well-formed, whether +// // using the region- or projects-based syntax. +// +// routing_parameters { +// field: "table_name" +// path_template: "{table_name=regions/*/zones/*/**}" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{table_name=projects/*/instances/*/**}" +// } +// }; +// +// result: +// +// x-goog-request-params: +// table_name=projects/proj_foo/instances/instance_bar/table/table_baz +// +// Example 4 +// +// Extracting a single routing header key-value pair by matching a +// template syntax on (a part of) a single request field. +// +// annotation: +// +// option (google.api.routing) = { +// // Take just the project id from the `table_name` field. +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*}/**" +// } +// }; +// +// result: +// +// x-goog-request-params: routing_id=projects/proj_foo +// +// Example 5 +// +// Extracting a single routing header key-value pair by matching +// several conflictingly named path templates on (parts of) a single request +// field. The last template to match "wins" the conflict. +// +// annotation: +// +// option (google.api.routing) = { +// // If the `table_name` does not have instances information, +// // take just the project id for routing. +// // Otherwise take project + instance. +// +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*}/**" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*/instances/*}/**" +// } +// }; +// +// result: +// +// x-goog-request-params: +// routing_id=projects/proj_foo/instances/instance_bar +// +// Example 6 +// +// Extracting multiple routing header key-value pairs by matching +// several non-conflicting path templates on (parts of) a single request field. +// +// Sub-example 6a +// +// Make the templates strict, so that if the `table_name` does not +// have an instance information, nothing is sent. +// +// annotation: +// +// option (google.api.routing) = { +// // The routing code needs two keys instead of one composite +// // but works only for the tables with the "project-instance" name +// // syntax. +// +// routing_parameters { +// field: "table_name" +// path_template: "{project_id=projects/*}/instances/*/**" +// } +// routing_parameters { +// field: "table_name" +// path_template: "projects/*/{instance_id=instances/*}/**" +// } +// }; +// +// result: +// +// x-goog-request-params: +// project_id=projects/proj_foo&instance_id=instances/instance_bar +// +// Sub-example 6b +// +// Make the templates loose, so that if the `table_name` does not +// have an instance information, just the project id part is sent. +// +// annotation: +// +// option (google.api.routing) = { +// // The routing code wants two keys instead of one composite +// // but will work with just the `project_id` for tables without +// // an instance in the `table_name`. +// +// routing_parameters { +// field: "table_name" +// path_template: "{project_id=projects/*}/**" +// } +// routing_parameters { +// field: "table_name" +// path_template: "projects/*/{instance_id=instances/*}/**" +// } +// }; +// +// result (is the same as 6a for our example message because it has the instance +// information): +// +// x-goog-request-params: +// project_id=projects/proj_foo&instance_id=instances/instance_bar +// +// Example 7 +// +// Extracting multiple routing header key-value pairs by matching +// several path templates on multiple request fields. +// +// NB: note that here there is no way to specify sending nothing if one of the +// fields does not match its template. E.g. if the `table_name` is in the wrong +// format, the `project_id` will not be sent, but the `routing_id` will be. +// The backend routing code has to be aware of that and be prepared to not +// receive a full complement of keys if it expects multiple. +// +// annotation: +// +// option (google.api.routing) = { +// // The routing needs both `project_id` and `routing_id` +// // (from the `app_profile_id` field) for routing. +// +// routing_parameters { +// field: "table_name" +// path_template: "{project_id=projects/*}/**" +// } +// routing_parameters { +// field: "app_profile_id" +// path_template: "{routing_id=**}" +// } +// }; +// +// result: +// +// x-goog-request-params: +// project_id=projects/proj_foo&routing_id=profiles/prof_qux +// +// Example 8 +// +// Extracting a single routing header key-value pair by matching +// several conflictingly named path templates on several request fields. The +// last template to match "wins" the conflict. +// +// annotation: +// +// option (google.api.routing) = { +// // The `routing_id` can be a project id or a region id depending on +// // the table name format, but only if the `app_profile_id` is not set. +// // If `app_profile_id` is set it should be used instead. +// +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*}/**" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=regions/*}/**" +// } +// routing_parameters { +// field: "app_profile_id" +// path_template: "{routing_id=**}" +// } +// }; +// +// result: +// +// x-goog-request-params: routing_id=profiles/prof_qux +// +// Example 9 +// +// Bringing it all together. +// +// annotation: +// +// option (google.api.routing) = { +// // For routing both `table_location` and a `routing_id` are needed. +// // +// // table_location can be either an instance id or a region+zone id. +// // +// // For `routing_id`, take the value of `app_profile_id` +// // - If it's in the format `profiles/`, send +// // just the `` part. +// // - If it's any other literal, send it as is. +// // If the `app_profile_id` is empty, and the `table_name` starts with +// // the project_id, send that instead. +// +// routing_parameters { +// field: "table_name" +// path_template: "projects/*/{table_location=instances/*}/tables/*" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{table_location=regions/*/zones/*}/tables/*" +// } +// routing_parameters { +// field: "table_name" +// path_template: "{routing_id=projects/*}/**" +// } +// routing_parameters { +// field: "app_profile_id" +// path_template: "{routing_id=**}" +// } +// routing_parameters { +// field: "app_profile_id" +// path_template: "profiles/{routing_id=*}" +// } +// }; +// +// result: +// +// x-goog-request-params: +// table_location=instances/instance_bar&routing_id=prof_qux +message RoutingRule { + // A collection of Routing Parameter specifications. + // **NOTE:** If multiple Routing Parameters describe the same key + // (via the `path_template` field or via the `field` field when + // `path_template` is not provided), "last one wins" rule + // determines which Parameter gets used. + // See the examples for more details. + repeated RoutingParameter routing_parameters = 2; +} + +// A projection from an input message to the GRPC or REST header. +message RoutingParameter { + // A request field to extract the header key-value pair from. + string field = 1; + + // A pattern matching the key-value field. Optional. + // If not specified, the whole field specified in the `field` field will be + // taken as value, and its name used as key. If specified, it MUST contain + // exactly one named segment (along with any number of unnamed segments) The + // pattern will be matched over the field specified in the `field` field, then + // if the match is successful: + // - the name of the single named segment will be used as a header name, + // - the match value of the segment will be used as a header value; + // if the match is NOT successful, nothing will be sent. + // + // Example: + // + // -- This is a field in the request message + // | that the header value will be extracted from. + // | + // | -- This is the key name in the + // | | routing header. + // V | + // field: "table_name" v + // path_template: "projects/*/{table_location=instances/*}/tables/*" + // ^ ^ + // | | + // In the {} brackets is the pattern that -- | + // specifies what to extract from the | + // field as a value to be sent. | + // | + // The string in the field must match the whole pattern -- + // before brackets, inside brackets, after brackets. + // + // When looking at this specific example, we can see that: + // - A key-value pair with the key `table_location` + // and the value matching `instances/*` should be added + // to the x-goog-request-params routing header. + // - The value is extracted from the request message's `table_name` field + // if it matches the full pattern specified: + // `projects/*/instances/*/tables/*`. + // + // **NB:** If the `path_template` field is not provided, the key name is + // equal to the field name, and the whole field should be sent as a value. + // This makes the pattern for the field and the value functionally equivalent + // to `**`, and the configuration + // + // { + // field: "table_name" + // } + // + // is a functionally equivalent shorthand to: + // + // { + // field: "table_name" + // path_template: "{table_name=**}" + // } + // + // See Example 1 for more details. + string path_template = 2; +} \ No newline at end of file diff --git a/packages/gapic-generator/tests/fragments/test_dynamic_routing.proto b/packages/gapic-generator/tests/fragments/test_dynamic_routing.proto new file mode 100644 index 000000000000..a8be352dc30d --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_dynamic_routing.proto @@ -0,0 +1,167 @@ +// Copyright (C) 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "google/api/annotations.proto"; +import "google/api/routing.proto"; + +service ServiceNoHeaders { + option (google.api.default_host) = "routingheaders.example.com"; + + rpc Plain(Request) returns(Response); +} + +service ServiceImplicitHeaders { + option (google.api.default_host) = "routingheaders.example.com"; + + rpc Plain(Request) returns(Response) { + option (google.api.http) = { + get: "/v2/{table_name=projects/*/instances/*/tables/*}:plain" + }; + } + + rpc WithSubMessage(Request) returns(Response) { + option (google.api.http) = { + get: "/v2/{resource.resource_name=projects/*/instances/*/tables/*}:withSubMessage" + }; + } + + rpc WithMultipleLevels(Request) returns(Response) { + option (google.api.http) = { + get: "/v2/{resource.inner.inner_name=projects/*/instances/*/tables/*}:withMultipleLevels" + }; + } +} + +service ServiceExplicitHeaders { + option (google.api.default_host) = "routingheaders.example.com"; + + rpc PlainNoTemplate(Request) returns(Response) { + option (google.api.http) = { + get: "/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys" + }; + option (google.api.routing) = { + routing_parameters { + field: "table_name" + } + }; + } + + rpc PlainFullField(Request) returns(Response) { + option (google.api.http) = { + get: "/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys" + }; + option (google.api.routing) = { + routing_parameters { + field: "table_name" + path_template: "{table_name=projects/*/instances/*/tables/*}" + } + }; + } + + rpc PlainExtract(Request) returns(Response) { + option (google.api.http) = { + get: "/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys" + }; + option (google.api.routing) = { + routing_parameters { + field: "table_name" + path_template: "projects/*/instances/*/{table_name=tables/*}" + } + }; + } + + rpc Complex(Request) returns(Response) { + option (google.api.http) = { + get: "/v2/{table_name=projects/*/instances/*/tables/*}:sampleRowKeys" + }; + option (google.api.routing) = { + routing_parameters { + field: "table_name" + path_template: "{project_id=projects/*}/instances/*/tables/*" + } + routing_parameters { + field: "table_name" + path_template: "projects/*/{instance_id=instances/*}/tables/*" + } + routing_parameters { + field: "table_name" + path_template: "projects/*/instances/*/{table_id=tables/*}" + } + routing_parameters { + field: "table_name" + path_template: "{table_name=projects/*/instances/*/tables/*}" + } + routing_parameters { + field: "table_name" + path_template: "{table_name=projects/*/instances/*/tables/*/aliases/**}" + } + routing_parameters { + field: "app_profile_id" + } + routing_parameters { + field: "app_profile_id" + path_template: "{app_profile_id_renamed=**}" + } + }; + } + + rpc WithSubMessage(Request) returns(Response) { + option (google.api.http) = { + get: "/v2/{resource.resource_name=projects/*/instances/*/tables/*}:withSubMessage" + }; + option (google.api.routing) = { + routing_parameters { + field: "resource.resource_name" + path_template: "{table_name=projects/*/instances/*/tables/*}" + } + routing_parameters { + field: "app_profile_id" + } + }; + } +} + +message Request { + // The name of the Table + // Values can be of the following formats: + // - `projects//tables/
` + // - `projects//instances//tables/
` + // - `region//zones//tables/
` + string table_name = 1; + + // This value specifies routing for replication. + // It can be in the following formats: + // - profiles/ + // - a legacy profile_id that can be any string + string app_profile_id = 2; + + RequestResource resource = 3; +} + +message RequestResource { + string resource_name = 1; + InnerResource inner = 2; +} + +message InnerResource { + string inner_name = 1; +} + +message Response { +} \ No newline at end of file diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 41f6812d7a2c..cce676d1231e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -465,7 +465,7 @@ def sample_export_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -573,7 +573,7 @@ def sample_list_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -662,7 +662,7 @@ def sample_batch_get_assets_history(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.batch_get_assets_history] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -776,7 +776,7 @@ def sample_create_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -877,7 +877,7 @@ def sample_get_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -975,7 +975,7 @@ def sample_list_feeds(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_feeds] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1080,7 +1080,7 @@ def sample_update_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1167,7 +1167,7 @@ def sample_delete_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1355,7 +1355,7 @@ def sample_search_all_resources(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.search_all_resources] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1534,7 +1534,7 @@ def sample_search_all_iam_policies(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.search_all_iam_policies] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1624,7 +1624,7 @@ def sample_analyze_iam_policy(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1725,7 +1725,7 @@ def sample_analyze_iam_policy_longrunning(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy_longrunning] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index cfec2f470b59..1510b790ad82 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -510,7 +510,7 @@ def sample_generate_access_token(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.generate_access_token] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -652,7 +652,7 @@ def sample_generate_id_token(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.generate_id_token] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -780,7 +780,7 @@ def sample_sign_blob(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.sign_blob] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -911,7 +911,7 @@ def sample_sign_jwt(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.sign_jwt] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 52f6b8bf0197..724f3d323f69 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -503,7 +503,7 @@ def sample_list_buckets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_buckets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -585,7 +585,7 @@ def sample_get_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -662,7 +662,7 @@ def sample_create_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -746,7 +746,7 @@ def sample_update_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -815,7 +815,7 @@ def sample_delete_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -880,7 +880,7 @@ def sample_undelete_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -975,7 +975,7 @@ def sample_list_views(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_views] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1059,7 +1059,7 @@ def sample_get_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1137,7 +1137,7 @@ def sample_create_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1214,7 +1214,7 @@ def sample_update_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1280,7 +1280,7 @@ def sample_delete_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1379,7 +1379,7 @@ def sample_list_sinks(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_sinks] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1497,7 +1497,7 @@ def sample_get_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1627,7 +1627,7 @@ def sample_create_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1781,7 +1781,7 @@ def sample_update_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1877,7 +1877,7 @@ def sample_delete_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1976,7 +1976,7 @@ def sample_list_exclusions(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_exclusions] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2097,7 +2097,7 @@ def sample_get_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2230,7 +2230,7 @@ def sample_create_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2377,7 +2377,7 @@ def sample_update_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2472,7 +2472,7 @@ def sample_delete_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2567,7 +2567,7 @@ def sample_get_cmek_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2671,7 +2671,7 @@ def sample_update_cmek_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index c30d02220d34..1d92d1d7dfbf 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -455,7 +455,7 @@ def sample_delete_log(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_log] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -969,7 +969,7 @@ def sample_list_logs(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_logs] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 408eab1573f3..7fc77b81d78e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -453,7 +453,7 @@ def sample_list_log_metrics(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_log_metrics] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -568,7 +568,7 @@ def sample_get_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -693,7 +693,7 @@ def sample_create_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -817,7 +817,7 @@ def sample_update_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -905,7 +905,7 @@ def sample_delete_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 307e5b2d443c..c4c69e6418dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -486,7 +486,7 @@ def sample_list_instances(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_instances] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -589,7 +589,7 @@ def sample_get_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -737,7 +737,7 @@ def sample_create_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -873,7 +873,7 @@ def sample_update_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -995,7 +995,7 @@ def sample_upgrade_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.upgrade_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1127,7 +1127,7 @@ def sample_import_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.import_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1255,7 +1255,7 @@ def sample_export_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1378,7 +1378,7 @@ def sample_failover_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.failover_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1499,7 +1499,7 @@ def sample_delete_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index ff7b032e7452..1caf1cce3656 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -19,6 +19,7 @@ from google.api import field_behavior_pb2 from google.api import http_pb2 +from google.api import routing_pb2 from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 @@ -289,6 +290,29 @@ def test_method_field_headers_present(): assert method.field_headers == ('parent',) +def test_method_routing_rule(): + routing_rule = routing_pb2.RoutingRule() + param = routing_rule.routing_parameters.add() + param.field = 'table_name' + param.path_template = 'projects/*/{table_location=instances/*}/tables/*' + method = make_method('DoSomething', routing_rule=routing_rule) + assert method.explicit_routing + assert method.routing_rule.routing_parameters == [wrappers.RoutingParameter( + x.field, x.path_template) for x in routing_rule.routing_parameters] + assert method.routing_rule.routing_parameters[0].sample_request is not None + + +def test_method_routing_rule_empty_routing_parameters(): + routing_rule = routing_pb2.RoutingRule() + method = make_method('DoSomething', routing_rule=routing_rule) + assert method.routing_rule is None + + +def test_method_routing_rule_not_set(): + method = make_method('DoSomething') + assert method.routing_rule is None + + def test_method_http_opt(): http_rule = http_pb2.HttpRule( post='/v1/{parent=projects/*}/topics', diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py new file mode 100644 index 000000000000..f93d6680a0a7 --- /dev/null +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py @@ -0,0 +1,170 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gapic.schema import wrappers + +import proto +import pytest + + +class RoutingTestRequest(proto.Message): + table_name = proto.Field(proto.STRING, number=1) + app_profile_id = proto.Field(proto.STRING, number=2) + + +def resolve(rule, request): + """This function performs dynamic header resolution, identical to what's in client.py.j2.""" + + def _get_field(request, field_path: str): + segments = field_path.split(".") + cur = request + for x in segments: + cur = getattr(cur, x) + return cur + + header_params = {} + for routing_param in rule.routing_parameters: + # This may raise exception (which we show to clients). + request_field_value = _get_field(request, routing_param.field) + if routing_param.path_template: + routing_param_regex = routing_param.to_regex() + regex_match = routing_param_regex.match(request_field_value) + if regex_match: + header_params[routing_param.key] = regex_match.group( + routing_param.key) + else: # No need to match + header_params[routing_param.key] = request_field_value + return header_params + + +@pytest.mark.parametrize( + "req, expected", + [ + (RoutingTestRequest(app_profile_id="foo.123"), + {"app_profile_id": "foo.123"}), + ( + RoutingTestRequest(app_profile_id="projects/100"), + {"app_profile_id": "projects/100"}, + ), + (RoutingTestRequest(app_profile_id=""), {"app_profile_id": ""}), + ], +) +def test_routing_rule_resolve_simple_extraction(req, expected): + rule = wrappers.RoutingRule( + [wrappers.RoutingParameter("app_profile_id", "")]) + assert resolve(rule, req) == expected + + +@pytest.mark.parametrize( + "req, expected", + [ + (RoutingTestRequest(app_profile_id="foo.123"), + {"routing_id": "foo.123"}), + ( + RoutingTestRequest(app_profile_id="projects/100"), + {"routing_id": "projects/100"}, + ), + (RoutingTestRequest(app_profile_id=""), {"routing_id": ""}), + ], +) +def test_routing_rule_resolve_rename_extraction(req, expected): + rule = wrappers.RoutingRule( + [wrappers.RoutingParameter("app_profile_id", "{routing_id=**}")] + ) + assert resolve(rule, req) == expected + + +@pytest.mark.parametrize( + "req, expected", + [ + ( + RoutingTestRequest(table_name="projects/100/instances/200"), + {"table_name": "projects/100/instances/200"}, + ), + ( + RoutingTestRequest( + table_name="projects/100/instances/200/whatever"), + {"table_name": "projects/100/instances/200/whatever"}, + ), + (RoutingTestRequest(table_name="foo"), {}), + ], +) +def test_routing_rule_resolve_field_match(req, expected): + rule = wrappers.RoutingRule( + [ + wrappers.RoutingParameter( + "table_name", "{table_name=projects/*/instances/*/**}" + ), + wrappers.RoutingParameter( + "table_name", "{table_name=regions/*/zones/*/**}" + ), + ] + ) + assert resolve(rule, req) == expected + + +@pytest.mark.parametrize( + "routing_parameters, req, expected", + [ + ( + [ + wrappers.RoutingParameter( + "table_name", "{project_id=projects/*}/instances/*/**" + ) + ], + RoutingTestRequest( + table_name="projects/100/instances/200/tables/300"), + {"project_id": "projects/100"}, + ), + ( + [ + wrappers.RoutingParameter( + "table_name", "{project_id=projects/*}/instances/*/**" + ), + wrappers.RoutingParameter( + "table_name", "projects/*/{instance_id=instances/*}/**" + ), + ], + RoutingTestRequest( + table_name="projects/100/instances/200/tables/300"), + {"project_id": "projects/100", "instance_id": "instances/200"}, + ), + ], +) +def test_routing_rule_resolve(routing_parameters, req, expected): + rule = wrappers.RoutingRule(routing_parameters) + got = resolve(rule, req) + assert got == expected + + +@pytest.mark.parametrize( + "field, path_template, expected", + [ + ("table_name", "{project_id=projects/*}/instances/*/**", "project_id"), + ("table_name", + "projects/*/{instance_id=instances/*}/**", "instance_id"), + ("table_name", "projects/*/{instance_id}/**", "instance_id"), + ], +) +def test_routing_parameter_key(field, path_template, expected): + param = wrappers.RoutingParameter(field, path_template) + assert param.key == expected + + +def test_routing_parameter_multi_segment_raises(): + param = wrappers.RoutingParameter( + "table_name", "{project_id=projects/*}/{instance_id=instances/*}/*/**" + ) + with pytest.raises(ValueError): + param.key diff --git a/packages/gapic-generator/tests/unit/utils/test_uri_sample.py b/packages/gapic-generator/tests/unit/utils/test_uri_sample.py new file mode 100644 index 000000000000..d9ce42364095 --- /dev/null +++ b/packages/gapic-generator/tests/unit/utils/test_uri_sample.py @@ -0,0 +1,31 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gapic.utils import uri_sample + + +def test_sample_from_path_template_inner(): + field = "table_name" + path_template = "{project_id=projects/*}/instances/*/tables/*" + res = uri_sample.sample_from_path_template(field, path_template) + assert res == { + "table_name": "projects/sample1/instances/sample2/tables/sample3"} + + +def test_sample_from_path_template_no_inner(): + field = "table_name" + path_template = "projects/*/instances/*/tables/*" + res = uri_sample.sample_from_path_template(field, path_template) + assert res == { + "table_name": "projects/sample1/instances/sample2/tables/sample3"} From e0a769324e06452e20e829dd3cc0b85dae135b04 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 3 Feb 2022 09:53:18 -0700 Subject: [PATCH 0727/1339] feat: enable snippetgen for default templates (#1171) Enable snippetgen for the default (non-Ads) templates. This reverts commit 8bdb70931a9ecb1c89fda9608697b0762770bc12 (which was a revert of #1044 and #1055). I've checked that the changes are OK (don't break generation for any APIs) by creating a [tag](https://github.com/googleapis/gapic-generator-python/commits/v0.62.0b1) and running the [presubmit](https://critique.corp.google.com/cl/424921742). --- .../gapic-generator/gapic/utils/options.py | 18 ++++++++++++--- .../tests/unit/generator/test_generator.py | 11 +++++---- .../tests/unit/generator/test_options.py | 23 ++++++++++++++++--- 3 files changed, 42 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index 154106af4f72..8efd401c8514 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -38,7 +38,7 @@ class Options: warehouse_package_name: str = '' retry: Optional[Dict[str, Any]] = None sample_configs: Tuple[str, ...] = dataclasses.field(default=()) - autogen_snippets: bool = False + autogen_snippets: bool = True templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) lazy_import: bool = False old_naming: bool = False @@ -146,6 +146,18 @@ def tweak_path(p): # Build the options instance. sample_paths = opts.pop('samples', []) + # autogen-snippets is True by default, so make sure users can disable + # by passing `autogen-snippets=false` + autogen_snippets = opts.pop( + "autogen-snippets", ["True"])[0] in ("True", "true", "T", "t", "TRUE") + + # NOTE: Snippets are not currently correct for the alternative (Ads) templates + # so always disable snippetgen in that case + # https://github.com/googleapis/gapic-generator-python/issues/1052 + old_naming = bool(opts.pop('old-naming', False)) + if old_naming: + autogen_snippets = False + answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), @@ -157,10 +169,10 @@ def tweak_path(p): for s in sample_paths for cfg_path in samplegen_utils.generate_all_sample_fpaths(s) ), - autogen_snippets=bool(opts.pop("autogen-snippets", False)), + autogen_snippets=autogen_snippets, templates=tuple(path.expanduser(i) for i in templates), lazy_import=bool(opts.pop('lazy-import', False)), - old_naming=bool(opts.pop('old-naming', False)), + old_naming=old_naming, add_iam_methods=bool(opts.pop('add-iam-methods', False)), metadata=bool(opts.pop('metadata', False)), # transport should include desired transports delimited by '+', e.g. transport='grpc+rest' diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index ebf0367194ef..3482a0df4141 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -255,7 +255,10 @@ def test_get_response_enumerates_proto(): def test_get_response_divides_subpackages(): - g = make_generator() + # NOTE: autogen-snippets is intentionally disabled for this test + # The API schema below is incomplete and will result in errors when the + # snippetgen logic tries to parse it. + g = make_generator("autogen-snippets=false") api_schema = api.API.build( [ descriptor_pb2.FileDescriptorProto( @@ -290,7 +293,7 @@ def test_get_response_divides_subpackages(): """.strip() ) cgr = g.get_response(api_schema=api_schema, - opts=Options.build("")) + opts=Options.build("autogen-snippets=false")) assert len(cgr.file) == 6 assert {i.name for i in cgr.file} == { "foo/types/top.py", @@ -466,7 +469,7 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): actual_response = g.get_response( - api_schema, opts=Options.build("")) + api_schema, opts=Options.build("autogen-snippets=False")) expected_snippet_index_json = { "snippets": [ @@ -606,7 +609,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): ) with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): actual_response = g.get_response(api_schema, - opts=Options.build("")) + opts=Options.build("autogen-snippets=False")) expected_snippet_metadata_json = { "snippets": [ diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index f881e1f55190..edf9846872b2 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -171,9 +171,7 @@ def test_options_service_yaml_config(fs): def test_options_bool_flags(): - # All these options are default False. - # If new options violate this assumption, - # this test may need to be tweaked. + # Most options are default False. # New options should follow the dash-case/snake_case convention. opt_str_to_attr_name = { name: re.sub(r"-", "_", name) @@ -191,3 +189,22 @@ def test_options_bool_flags(): options = Options.build(opt) assert getattr(options, attr) + + # Check autogen-snippets separately, as it is default True + options = Options.build("") + assert options.autogen_snippets + + options = Options.build("autogen-snippets=False") + assert not options.autogen_snippets + + +def test_options_autogen_snippets_false_for_old_naming(): + # NOTE: Snippets are not currently correct for the alternative (Ads) templates + # so always disable snippetgen in that case + # https://github.com/googleapis/gapic-generator-python/issues/1052 + options = Options.build("old-naming") + assert not options.autogen_snippets + + # Even if autogen-snippets is set to True, do not enable snippetgen + options = Options.build("old-naming,autogen-snippets=True") + assert not options.autogen_snippets From 012f60b818d9552ec05c47f30081c897038d5600 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 3 Feb 2022 10:21:29 -0800 Subject: [PATCH 0728/1339] chore(main): release 0.63.0 (#1186) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b1bb49c12cfb..5b55d9337c95 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [0.63.0](https://github.com/googleapis/gapic-generator-python/compare/v0.62.1...v0.63.0) (2022-02-03) + + +### Features + +* adds dynamic routing files. ([8c191a5](https://github.com/googleapis/gapic-generator-python/commit/8c191a5475f208213806fff81c0efa1d26216cd9)) +* adds dynamic routing. ([#1135](https://github.com/googleapis/gapic-generator-python/issues/1135)) ([8c191a5](https://github.com/googleapis/gapic-generator-python/commit/8c191a5475f208213806fff81c0efa1d26216cd9)) +* enable snippetgen for default templates ([#1171](https://github.com/googleapis/gapic-generator-python/issues/1171)) ([c1af051](https://github.com/googleapis/gapic-generator-python/commit/c1af051743dde2fb40e028c51de85dfea47a793d)) + ### [0.62.1](https://github.com/googleapis/gapic-generator-python/compare/v0.62.0...v0.62.1) (2022-02-02) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 288f4fcf6648..ce983a56dbdb 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.62.1" +version = "0.63.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 0744ed57d8a295b515bfeec7ffe972ed0c859bd2 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 3 Feb 2022 14:29:46 -0800 Subject: [PATCH 0729/1339] fix: disambiguate method names that are reserved in transport classes (#1187) In addition to the method specific stubs they provide, the generated transports expose other methods to e.g. create a gRPC channel. This presents the opportunity for a naming collision if an API has a CreateChannel method. This PR disambiguates colliding method names at the transport level. Client level method names are unchanged for ergonomic reasons. --- .../%sub/services/%service/client.py.j2 | 2 +- .../services/%service/transports/base.py.j2 | 6 +- .../services/%service/transports/grpc.py.j2 | 8 +- .../services/%service/transports/rest.py.j2 | 6 +- .../gapic/ads-templates/setup.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 18 +- .../gapic-generator/gapic/schema/wrappers.py | 15 ++ .../%sub/services/%service/async_client.py.j2 | 8 +- .../%sub/services/%service/client.py.j2 | 2 +- .../services/%service/transports/base.py.j2 | 6 +- .../services/%service/transports/grpc.py.j2 | 8 +- .../%service/transports/grpc_asyncio.py.j2 | 8 +- .../services/%service/transports/rest.py.j2 | 6 +- .../%name_%version/%sub/test_%service.py.j2 | 30 +-- packages/gapic-generator/noxfile.py | 37 +-- .../google/longrunning/operations.proto | 247 ++++++++++++++++++ .../tests/fragments/google/protobuf/any.proto | 158 +++++++++++ .../tests/fragments/google/rpc/status.proto | 47 ++++ .../test_reserved_method_names.proto | 82 ++++++ .../tests/unit/schema/wrappers/test_method.py | 18 ++ 20 files changed, 629 insertions(+), 85 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/google/longrunning/operations.proto create mode 100644 packages/gapic-generator/tests/fragments/google/protobuf/any.proto create mode 100644 packages/gapic-generator/tests/fragments/google/rpc/status.proto create mode 100644 packages/gapic-generator/tests/fragments/test_reserved_method_names.proto diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 8f2ae2f41b7e..b31354897236 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -443,7 +443,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.{{ method.name|snake_case}}] + rpc = self._transport._wrapped_methods[self._transport.{{ method.transport_safe_name|snake_case}}] {% if method.field_headers %} # Certain fields should be provided within the metadata header; diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 7d0ab6ef5e12..ea01ac991bc2 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -120,8 +120,8 @@ class {{ service.name }}Transport(abc.ABC): # Precompute the wrapped methods. self._wrapped_methods = { {% for method in service.methods.values() %} - self.{{ method.name|snake_case }}: gapic_v1.method.wrap_method( - self.{{ method.name|snake_case }}, + self.{{ method.transport_safe_name|snake_case }}: gapic_v1.method.wrap_method( + self.{{ method.transport_safe_name|snake_case }}, {% if method.retry %} default_retry=retries.Retry( {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} @@ -160,7 +160,7 @@ class {{ service.name }}Transport(abc.ABC): {% for method in service.methods.values() %} @property - def {{ method.name|snake_case }}(self) -> Callable[ + def {{ method.transport_safe_name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], Union[ {{ method.output.ident }}, diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 0aca3a55f048..cabc67e443e3 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -249,7 +249,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): {% for method in service.methods.values() %} @property - def {{ method.name|snake_case }}(self) -> Callable[ + def {{ method.transport_safe_name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], {{ method.output.ident }}]: r"""Return a callable for the{{ ' ' }} @@ -269,13 +269,13 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '{{ method.name|snake_case }}' not in self._stubs: - self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: + self._stubs['{{ method.transport_safe_name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) - return self._stubs['{{ method.name|snake_case }}'] + return self._stubs['{{ method.transport_safe_name|snake_case }}'] {% endfor %} {% if opts.add_iam_methods %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index eb34c706f2ad..9f00f84ebe25 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -396,12 +396,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% for method in service.methods.values()|sort(attribute="name") %} @property - def {{method.name | snake_case}}(self) -> Callable[ + def {{method.transport_safe_name | snake_case}}(self) -> Callable[ [{{method.input.ident}}], {{method.output.ident}}]: - stub = self._STUBS.get("{{method.name | snake_case}}") + stub = self._STUBS.get("{{method.transport_safe_name | snake_case}}") if not stub: - stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) + stub = self._STUBS["{{method.transport_safe_name | snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 4827ca35ee16..9d96e1809ee0 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -19,7 +19,7 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.1.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.4.0, < 3.0.0dev', {% else %} 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', {% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index d615df153b3f..3592c4a70ed0 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -494,7 +494,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void %} @@ -571,7 +571,7 @@ def test_{{ method_name }}_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: client.{{ method_name }}() call.assert_called() @@ -600,7 +600,7 @@ def test_{{ method_name }}_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: {% if method.void %} call.return_value = None @@ -638,7 +638,7 @@ def test_{{ method_name }}_from_dict_foreign(): ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void %} @@ -668,7 +668,7 @@ def test_{{ method_name }}_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void %} @@ -746,7 +746,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -808,7 +808,7 @@ def test_{{ method_name }}_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Set the response to a series of pages. {% if method.paged_result_field.map%} @@ -1184,7 +1184,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide def test_{{ method_name }}_rest_unset_required_fields(): transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.{{ method.name|snake_case }}._get_unset_required_fields({}) + unset_fields = transport.{{ method.transport_safe_name|snake_case }}._get_unset_required_fields({}) assert set(unset_fields) == (set(({% for param in method.query_params|sort %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{param.name|camel_case}}", {% endfor %}))) @@ -1645,7 +1645,7 @@ def test_{{ service.name|snake_case }}_base_transport(): # raise NotImplementedError. methods = ( {% for method in service.methods.values() %} - '{{ method.name|snake_case }}', + '{{ method.transport_safe_name|snake_case }}', {% endfor %} {% if opts.add_iam_methods %} 'set_iam_policy', diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index aa379f47872d..b9741dacc9ec 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -948,6 +948,21 @@ class Method: def __getattr__(self, name): return getattr(self.method_pb, name) + @property + def transport_safe_name(self) -> str: + # These names conflict with other methods in the transport. + # We don't want to disambiguate the names at the client level + # because the disambiguated name is less convenient and user friendly. + # + # Note: this should really be a class variable, + # but python 3.6 can't handle that. + TRANSPORT_UNSAFE_NAMES = { + "CreateChannel", + "GrpcChannel", + "OperationsClient", + } + return f"{self.name}_" if self.name in TRANSPORT_UNSAFE_NAMES else self.name + @property def is_operation_polling_method(self): return self.output.is_extended_operation and self.options.Extensions[ex_ops_pb2.operation_polling_method] diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index a6e76a595f4c..348f3bf32f00 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -92,7 +92,7 @@ class {{ service.async_client_name }}: @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -100,7 +100,7 @@ class {{ service.async_client_name }}: (2) if `client_options.client_cert_source` is provided, use the provided one; if the default client cert source exists, use the default one; otherwise the client cert source is None. - + The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the @@ -118,7 +118,7 @@ class {{ service.async_client_name }}: Returns: Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the client cert source to use. - + Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ @@ -302,7 +302,7 @@ class {{ service.async_client_name }}: # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.{{ method.name|snake_case }}, + self._client._transport.{{ method.transport_safe_name|snake_case }}, {% if method.retry %} default_retry=retries.Retry( {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 46c4b6cf29d7..3180125d8217 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -477,7 +477,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.{{ method.name|snake_case}}] + rpc = self._transport._wrapped_methods[self._transport.{{ method.transport_safe_name|snake_case}}] {% if method.explicit_routing %} header_params = {} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 3b803ebb150b..bc37e7602f47 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -120,8 +120,8 @@ class {{ service.name }}Transport(abc.ABC): # Precompute the wrapped methods. self._wrapped_methods = { {% for method in service.methods.values() %} - self.{{ method.name|snake_case }}: gapic_v1.method.wrap_method( - self.{{ method.name|snake_case }}, + self.{{ method.transport_safe_name|snake_case }}: gapic_v1.method.wrap_method( + self.{{ method.transport_safe_name|snake_case }}, {% if method.retry %} default_retry=retries.Retry( {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} @@ -160,7 +160,7 @@ class {{ service.name }}Transport(abc.ABC): {% for method in service.methods.values() %} @property - def {{ method.name|snake_case }}(self) -> Callable[ + def {{ method.transport_safe_name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], Union[ {{ method.output.ident }}, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 0aca3a55f048..cabc67e443e3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -249,7 +249,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): {% for method in service.methods.values() %} @property - def {{ method.name|snake_case }}(self) -> Callable[ + def {{ method.transport_safe_name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], {{ method.output.ident }}]: r"""Return a callable for the{{ ' ' }} @@ -269,13 +269,13 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '{{ method.name|snake_case }}' not in self._stubs: - self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: + self._stubs['{{ method.transport_safe_name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) - return self._stubs['{{ method.name|snake_case }}'] + return self._stubs['{{ method.transport_safe_name|snake_case }}'] {% endfor %} {% if opts.add_iam_methods %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index a6271fe75242..9c9e1e164c68 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -250,7 +250,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): {% for method in service.methods.values() %} @property - def {{ method.name|snake_case }}(self) -> Callable[ + def {{ method.transport_safe_name|snake_case }}(self) -> Callable[ [{{ method.input.ident }}], Awaitable[{{ method.output.ident }}]]: r"""Return a callable for the{{ ' ' }} @@ -270,13 +270,13 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '{{ method.name|snake_case }}' not in self._stubs: - self._stubs['{{ method.name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: + self._stubs['{{ method.transport_safe_name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, ) - return self._stubs['{{ method.name|snake_case }}'] + return self._stubs['{{ method.transport_safe_name|snake_case }}'] {% endfor %} {% if opts.add_iam_methods %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index eb34c706f2ad..dc9c58357979 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -396,12 +396,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% for method in service.methods.values()|sort(attribute="name") %} @property - def {{method.name | snake_case}}(self) -> Callable[ + def {{method.transport_safe_name|snake_case}}(self) -> Callable[ [{{method.input.ident}}], {{method.output.ident}}]: - stub = self._STUBS.get("{{method.name | snake_case}}") + stub = self._STUBS.get("{{method.transport_safe_name|snake_case}}") if not stub: - stub = self._STUBS["{{method.name | snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) + stub = self._STUBS["{{method.transport_safe_name|snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 7bde90ac43d5..77a125d222ed 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -576,7 +576,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void %} @@ -653,7 +653,7 @@ def test_{{ method_name }}_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: client.{{ method_name }}() call.assert_called() @@ -682,7 +682,7 @@ async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void %} @@ -769,7 +769,7 @@ def test_{{ method.name|snake_case }}_routing_parameters(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: {% if method.void %} call.return_value = None @@ -810,7 +810,7 @@ def test_{{ method_name }}_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: {% if method.void %} call.return_value = None @@ -855,7 +855,7 @@ async def test_{{ method_name }}_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: {% if method.void %} call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -892,7 +892,7 @@ def test_{{ method_name }}_from_dict_foreign(): ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void %} @@ -922,7 +922,7 @@ def test_{{ method_name }}_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void %} @@ -995,7 +995,7 @@ async def test_{{ method_name }}_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Designate an appropriate return value for the call. {% if method.void %} @@ -1091,7 +1091,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -1151,7 +1151,7 @@ def test_{{ method_name }}_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: # Set the response to a series of pages. {% if method.paged_result_field.map %} @@ -1224,7 +1224,7 @@ async def test_{{ method_name }}_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1314,7 +1314,7 @@ async def test_{{ method_name }}_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.{{ method.name|snake_case }}), + type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1621,7 +1621,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide def test_{{ method_name }}_rest_unset_required_fields(): transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.{{ method.name|snake_case }}._get_unset_required_fields({}) + unset_fields = transport.{{ method.transport_safe_name|snake_case }}._get_unset_required_fields({}) assert set(unset_fields) == (set(({% for param in method.query_params|sort %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{ param.name|camel_case }}", {% endfor %}))) {% endif %}{# required_fields #} @@ -2114,7 +2114,7 @@ def test_{{ service.name|snake_case }}_base_transport(): # raise NotImplementedError. methods = ( {% for method in service.methods.values() %} - '{{ method.name|snake_case }}', + '{{ method.transport_safe_name|snake_case }}', {% endfor %} {% if opts.add_iam_methods %} 'set_iam_policy', diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 718469855ce4..3e1b9638c6a3 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -130,9 +130,8 @@ def __call__(self, frag): return "".join(outputs) -# TODO(dovs): ads templates @nox.session(python=ALL_PYTHON) -def fragment(session): +def fragment(session, use_ads_templates=False): session.install( "coverage", "pytest", @@ -144,47 +143,25 @@ def fragment(session): ) session.install("-e", ".") + frag_files = [Path(f) for f in session.posargs] if session.posargs else FRAGMENT_FILES + if os.environ.get("PARALLEL_FRAGMENT_TESTS", "false").lower() == "true": with ThreadPoolExecutor() as p: - all_outs = p.map(FragTester(session, False), FRAGMENT_FILES) + all_outs = p.map(FragTester(session, use_ads_templates), frag_files) output = "".join(all_outs) session.log(output) else: - tester = FragTester(session, False) - for frag in FRAGMENT_FILES: + tester = FragTester(session, use_ads_templates) + for frag in frag_files: session.log(tester(frag)) @nox.session(python=ALL_PYTHON[1:]) def fragment_alternative_templates(session): - session.install( - "coverage", - "pytest", - "pytest-cov", - "pytest-xdist", - "asyncmock", - "pytest-asyncio", - "grpcio-tools", - ) - session.install("-e", ".") - - if os.environ.get("PARALLEL_FRAGMENT_TESTS", "false").lower() == "true": - with ThreadPoolExecutor() as p: - all_outs = p.map(FragTester(session, True), FRAGMENT_FILES) - - output = "".join(all_outs) - session.log(output) - else: - tester = FragTester(session, True) - for frag in FRAGMENT_FILES: - session.log(tester(frag)) + fragment(session, use_ads_templates=True) -# TODO(yon-mg): -add compute context manager that includes rest transport -# -add compute unit tests -# (to test against temporarily while rest transport is incomplete) -# (to be removed once all features are complete) @contextmanager def showcase_library( session, templates="DEFAULT", other_opts: typing.Iterable[str] = () diff --git a/packages/gapic-generator/tests/fragments/google/longrunning/operations.proto b/packages/gapic-generator/tests/fragments/google/longrunning/operations.proto new file mode 100644 index 000000000000..299eefb2e561 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/longrunning/operations.proto @@ -0,0 +1,247 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.longrunning; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/protobuf/any.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/rpc/status.proto"; +import "google/protobuf/descriptor.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.LongRunning"; +option go_package = "google.golang.org/genproto/googleapis/longrunning;longrunning"; +option java_multiple_files = true; +option java_outer_classname = "OperationsProto"; +option java_package = "com.google.longrunning"; +option php_namespace = "Google\\LongRunning"; + +extend google.protobuf.MethodOptions { + // Additional information regarding long-running operations. + // In particular, this specifies the types that are returned from + // long-running operations. + // + // Required for methods that return `google.longrunning.Operation`; invalid + // otherwise. + google.longrunning.OperationInfo operation_info = 1049; +} + +// Manages long-running operations with an API service. +// +// When an API method normally takes long time to complete, it can be designed +// to return [Operation][google.longrunning.Operation] to the client, and the client can use this +// interface to receive the real response asynchronously by polling the +// operation resource, or pass the operation resource to another API (such as +// Google Cloud Pub/Sub API) to receive the response. Any API service that +// returns long-running operations should implement the `Operations` interface +// so developers can have a consistent client experience. +service Operations { + option (google.api.default_host) = "longrunning.googleapis.com"; + + // Lists operations that match the specified filter in the request. If the + // server doesn't support this method, it returns `UNIMPLEMENTED`. + // + // NOTE: the `name` binding allows API services to override the binding + // to use different resource name schemes, such as `users/*/operations`. To + // override the binding, API services can add a binding such as + // `"/v1/{name=users/*}/operations"` to their service configuration. + // For backwards compatibility, the default name includes the operations + // collection id, however overriding users must ensure the name binding + // is the parent resource, without the operations collection id. + rpc ListOperations(ListOperationsRequest) returns (ListOperationsResponse) { + option (google.api.http) = { + get: "/v1/{name=operations}" + }; + option (google.api.method_signature) = "name,filter"; + } + + // Gets the latest state of a long-running operation. Clients can use this + // method to poll the operation result at intervals as recommended by the API + // service. + rpc GetOperation(GetOperationRequest) returns (Operation) { + option (google.api.http) = { + get: "/v1/{name=operations/**}" + }; + option (google.api.method_signature) = "name"; + } + + // Deletes a long-running operation. This method indicates that the client is + // no longer interested in the operation result. It does not cancel the + // operation. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + rpc DeleteOperation(DeleteOperationRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=operations/**}" + }; + option (google.api.method_signature) = "name"; + } + + // Starts asynchronous cancellation on a long-running operation. The server + // makes a best effort to cancel the operation, but success is not + // guaranteed. If the server doesn't support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. Clients can use + // [Operations.GetOperation][google.longrunning.Operations.GetOperation] or + // other methods to check whether the cancellation succeeded or whether the + // operation completed despite cancellation. On successful cancellation, + // the operation is not deleted; instead, it becomes an operation with + // an [Operation.error][google.longrunning.Operation.error] value with a [google.rpc.Status.code][google.rpc.Status.code] of 1, + // corresponding to `Code.CANCELLED`. + rpc CancelOperation(CancelOperationRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1/{name=operations/**}:cancel" + body: "*" + }; + option (google.api.method_signature) = "name"; + } + + // Waits for the specified long-running operation until it is done or reaches + // at most a specified timeout, returning the latest state. If the operation + // is already done, the latest state is immediately returned. If the timeout + // specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + // timeout is used. If the server does not support this method, it returns + // `google.rpc.Code.UNIMPLEMENTED`. + // Note that this method is on a best-effort basis. It may return the latest + // state before the specified timeout (including immediately), meaning even an + // immediate response is no guarantee that the operation is done. + rpc WaitOperation(WaitOperationRequest) returns (Operation) { + } +} + +// This resource represents a long-running operation that is the result of a +// network API call. +message Operation { + // The server-assigned name, which is only unique within the same service that + // originally returns it. If you use the default HTTP mapping, the + // `name` should be a resource name ending with `operations/{unique_id}`. + string name = 1; + + // Service-specific metadata associated with the operation. It typically + // contains progress information and common metadata such as create time. + // Some services might not provide such metadata. Any method that returns a + // long-running operation should document the metadata type, if any. + google.protobuf.Any metadata = 2; + + // If the value is `false`, it means the operation is still in progress. + // If `true`, the operation is completed, and either `error` or `response` is + // available. + bool done = 3; + + // The operation result, which can be either an `error` or a valid `response`. + // If `done` == `false`, neither `error` nor `response` is set. + // If `done` == `true`, exactly one of `error` or `response` is set. + oneof result { + // The error result of the operation in case of failure or cancellation. + google.rpc.Status error = 4; + + // The normal response of the operation in case of success. If the original + // method returns no data on success, such as `Delete`, the response is + // `google.protobuf.Empty`. If the original method is standard + // `Get`/`Create`/`Update`, the response should be the resource. For other + // methods, the response should have the type `XxxResponse`, where `Xxx` + // is the original method name. For example, if the original method name + // is `TakeSnapshot()`, the inferred response type is + // `TakeSnapshotResponse`. + google.protobuf.Any response = 5; + } +} + +// The request message for [Operations.GetOperation][google.longrunning.Operations.GetOperation]. +message GetOperationRequest { + // The name of the operation resource. + string name = 1; +} + +// The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +message ListOperationsRequest { + // The name of the operation's parent resource. + string name = 4; + + // The standard list filter. + string filter = 1; + + // The standard list page size. + int32 page_size = 2; + + // The standard list page token. + string page_token = 3; +} + +// The response message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. +message ListOperationsResponse { + // A list of operations that matches the specified filter in the request. + repeated Operation operations = 1; + + // The standard List next-page token. + string next_page_token = 2; +} + +// The request message for [Operations.CancelOperation][google.longrunning.Operations.CancelOperation]. +message CancelOperationRequest { + // The name of the operation resource to be cancelled. + string name = 1; +} + +// The request message for [Operations.DeleteOperation][google.longrunning.Operations.DeleteOperation]. +message DeleteOperationRequest { + // The name of the operation resource to be deleted. + string name = 1; +} + +// The request message for [Operations.WaitOperation][google.longrunning.Operations.WaitOperation]. +message WaitOperationRequest { + // The name of the operation resource to wait on. + string name = 1; + + // The maximum duration to wait before timing out. If left blank, the wait + // will be at most the time permitted by the underlying HTTP/RPC protocol. + // If RPC context deadline is also specified, the shorter one will be used. + google.protobuf.Duration timeout = 2; +} + +// A message representing the message types used by a long-running operation. +// +// Example: +// +// rpc LongRunningRecognize(LongRunningRecognizeRequest) +// returns (google.longrunning.Operation) { +// option (google.longrunning.operation_info) = { +// response_type: "LongRunningRecognizeResponse" +// metadata_type: "LongRunningRecognizeMetadata" +// }; +// } +message OperationInfo { + // Required. The message name of the primary return type for this + // long-running operation. + // This type will be used to deserialize the LRO's response. + // + // If the response is in a different package from the rpc, a fully-qualified + // message name must be used (e.g. `google.protobuf.Struct`). + // + // Note: Altering this value constitutes a breaking change. + string response_type = 1; + + // Required. The message name of the metadata type for this long-running + // operation. + // + // If the response is in a different package from the rpc, a fully-qualified + // message name must be used (e.g. `google.protobuf.Struct`). + // + // Note: Altering this value constitutes a breaking change. + string metadata_type = 2; +} diff --git a/packages/gapic-generator/tests/fragments/google/protobuf/any.proto b/packages/gapic-generator/tests/fragments/google/protobuf/any.proto new file mode 100644 index 000000000000..6ed8a23cf5a3 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/protobuf/any.proto @@ -0,0 +1,158 @@ +// Protocol Buffers - Google's data interchange format +// Copyright 2008 Google Inc. All rights reserved. +// https://developers.google.com/protocol-buffers/ +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +syntax = "proto3"; + +package google.protobuf; + +option csharp_namespace = "Google.Protobuf.WellKnownTypes"; +option go_package = "google.golang.org/protobuf/types/known/anypb"; +option java_package = "com.google.protobuf"; +option java_outer_classname = "AnyProto"; +option java_multiple_files = true; +option objc_class_prefix = "GPB"; + +// `Any` contains an arbitrary serialized protocol buffer message along with a +// URL that describes the type of the serialized message. +// +// Protobuf library provides support to pack/unpack Any values in the form +// of utility functions or additional generated methods of the Any type. +// +// Example 1: Pack and unpack a message in C++. +// +// Foo foo = ...; +// Any any; +// any.PackFrom(foo); +// ... +// if (any.UnpackTo(&foo)) { +// ... +// } +// +// Example 2: Pack and unpack a message in Java. +// +// Foo foo = ...; +// Any any = Any.pack(foo); +// ... +// if (any.is(Foo.class)) { +// foo = any.unpack(Foo.class); +// } +// +// Example 3: Pack and unpack a message in Python. +// +// foo = Foo(...) +// any = Any() +// any.Pack(foo) +// ... +// if any.Is(Foo.DESCRIPTOR): +// any.Unpack(foo) +// ... +// +// Example 4: Pack and unpack a message in Go +// +// foo := &pb.Foo{...} +// any, err := anypb.New(foo) +// if err != nil { +// ... +// } +// ... +// foo := &pb.Foo{} +// if err := any.UnmarshalTo(foo); err != nil { +// ... +// } +// +// The pack methods provided by protobuf library will by default use +// 'type.googleapis.com/full.type.name' as the type URL and the unpack +// methods only use the fully qualified type name after the last '/' +// in the type URL, for example "foo.bar.com/x/y.z" will yield type +// name "y.z". +// +// +// JSON +// ==== +// The JSON representation of an `Any` value uses the regular +// representation of the deserialized, embedded message, with an +// additional field `@type` which contains the type URL. Example: +// +// package google.profile; +// message Person { +// string first_name = 1; +// string last_name = 2; +// } +// +// { +// "@type": "type.googleapis.com/google.profile.Person", +// "firstName": , +// "lastName": +// } +// +// If the embedded message type is well-known and has a custom JSON +// representation, that representation will be embedded adding a field +// `value` which holds the custom JSON in addition to the `@type` +// field. Example (for message [google.protobuf.Duration][]): +// +// { +// "@type": "type.googleapis.com/google.protobuf.Duration", +// "value": "1.212s" +// } +// +message Any { + // A URL/resource name that uniquely identifies the type of the serialized + // protocol buffer message. This string must contain at least + // one "/" character. The last segment of the URL's path must represent + // the fully qualified name of the type (as in + // `path/google.protobuf.Duration`). The name should be in a canonical form + // (e.g., leading "." is not accepted). + // + // In practice, teams usually precompile into the binary all types that they + // expect it to use in the context of Any. However, for URLs which use the + // scheme `http`, `https`, or no scheme, one can optionally set up a type + // server that maps type URLs to message definitions as follows: + // + // * If no scheme is provided, `https` is assumed. + // * An HTTP GET on the URL must yield a [google.protobuf.Type][] + // value in binary format, or produce an error. + // * Applications are allowed to cache lookup results based on the + // URL, or have them precompiled into a binary to avoid any + // lookup. Therefore, binary compatibility needs to be preserved + // on changes to types. (Use versioned type names to manage + // breaking changes.) + // + // Note: this functionality is not currently available in the official + // protobuf release, and it is not used for type URLs beginning with + // type.googleapis.com. + // + // Schemes other than `http`, `https` (or the empty scheme) might be + // used with implementation specific semantics. + // + string type_url = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes value = 2; +} diff --git a/packages/gapic-generator/tests/fragments/google/rpc/status.proto b/packages/gapic-generator/tests/fragments/google/rpc/status.proto new file mode 100644 index 000000000000..3b1f7a932f20 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/rpc/status.proto @@ -0,0 +1,47 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.rpc; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/rpc/status;status"; +option java_multiple_files = true; +option java_outer_classname = "StatusProto"; +option java_package = "com.google.rpc"; +option objc_class_prefix = "RPC"; + +// The `Status` type defines a logical error model that is suitable for +// different programming environments, including REST APIs and RPC APIs. It is +// used by [gRPC](https://github.com/grpc). Each `Status` message contains +// three pieces of data: error code, error message, and error details. +// +// You can find out more about this error model and how to work with it in the +// [API Design Guide](https://cloud.google.com/apis/design/errors). +message Status { + // The status code, which should be an enum value of [google.rpc.Code][google.rpc.Code]. + int32 code = 1; + + // A developer-facing error message, which should be in English. Any + // user-facing error message should be localized and sent in the + // [google.rpc.Status.details][google.rpc.Status.details] field, or localized by the client. + string message = 2; + + // A list of messages that carry the error details. There is a common set of + // message types for APIs to use. + repeated google.protobuf.Any details = 3; +} diff --git a/packages/gapic-generator/tests/fragments/test_reserved_method_names.proto b/packages/gapic-generator/tests/fragments/test_reserved_method_names.proto new file mode 100644 index 000000000000..d8f23494fe59 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_reserved_method_names.proto @@ -0,0 +1,82 @@ +// Copyright (C) 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "google/api/annotations.proto"; +import "google/longrunning/operations.proto"; + +service MyService { + option (google.api.default_host) = "my.example.com"; + + rpc CreateChannel(CreateChannelRequest) returns (CreateChannelResponse) { + option (google.api.http) = { + body: "*" + post: "/channels/v1/channel/{channel}" + }; + }; + + rpc GrpcChannel(GrpcChannelRequest) returns (GrpcChannelResponse) { + option (google.api.http) = { + body: "*" + post: "/channels/v1/grpc_channel/{grpc_channel}" + }; + }; + + rpc OperationsClient(OperationsClientRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + body: "*" + post: "/clients/v1/operations_clients/{operations_client}" + }; + option (google.longrunning.operation_info) = { + response_type: "google.fragment.OperationsClientResponse" + metadata_type: "google.fragment.OperationsMetadata" + }; + }; + +} + +message CreateChannelRequest { + string channel = 1; + string info = 2; +} + +message CreateChannelResponse { + string info = 1; +} + +message GrpcChannelRequest { + string grpc_channel = 1; + string info = 2; +} + +message GrpcChannelResponse { + string info = 1; +} + +message OperationsClientRequest { + string operations_client = 1; + string info = 2; +} + +message OperationsClientResponse { + string info = 1; +} + +message OperationsMetadata { + string data = 1; +} \ No newline at end of file diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 1caf1cce3656..2aba8aa44c63 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -862,3 +862,21 @@ def test_is_operation_polling_method(): ) assert not invalid_method.is_operation_polling_method + + +def test_transport_safe_name(): + unsafe_methods = { + name: make_method(name=name) + for name in ["CreateChannel", "GrpcChannel", "OperationsClient"] + } + + safe_methods = { + name: make_method(name=name) + for name in ["Call", "Put", "Hold", "Raise"] + } + + for name, method in safe_methods.items(): + assert method.transport_safe_name == name + + for name, method in unsafe_methods.items(): + assert method.transport_safe_name == f"{name}_" From 6e54acea9617fd07cf2a972d6e7c34c66c93b4a7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 3 Feb 2022 22:43:00 +0000 Subject: [PATCH 0730/1339] chore(main): release 0.63.1 (#1189) :robot: I have created a release *beep* *boop* --- ### [0.63.1](https://github.com/googleapis/gapic-generator-python/compare/v0.63.0...v0.63.1) (2022-02-03) ### Bug Fixes * disambiguate method names that are reserved in transport classes ([#1187](https://github.com/googleapis/gapic-generator-python/issues/1187)) ([78626d8](https://github.com/googleapis/gapic-generator-python/commit/78626d89665128ef7d078ee12b49480475cce9e2)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5b55d9337c95..9714814514f4 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.63.1](https://github.com/googleapis/gapic-generator-python/compare/v0.63.0...v0.63.1) (2022-02-03) + + +### Bug Fixes + +* disambiguate method names that are reserved in transport classes ([#1187](https://github.com/googleapis/gapic-generator-python/issues/1187)) ([78626d8](https://github.com/googleapis/gapic-generator-python/commit/78626d89665128ef7d078ee12b49480475cce9e2)) + ## [0.63.0](https://github.com/googleapis/gapic-generator-python/compare/v0.62.1...v0.63.0) (2022-02-03) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index ce983a56dbdb..57e3acd867f8 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.0" +version = "0.63.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 5d8eaf806693cf8c4ccd6a08a581122f9517a60c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 7 Feb 2022 17:22:11 +0100 Subject: [PATCH 0731/1339] chore(deps): update dependency setuptools to v60.7.1 (#1184) * chore(deps): update dependency setuptools to v60.7.1 * chore: remove link to issue https://github.com/pypa/setuptools/issues/3072 is resolved. Co-authored-by: Anthonios Partheniou Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 4f8d81f04a61..49cc7b92a720 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.5.0 # pin setuptools to workaround https://github.com/pypa/setuptools/issues/3072 +setuptools==60.7.1 From d4f696b9e279b3a32484166c9fc5e98e08f49417 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 7 Feb 2022 11:35:01 -0500 Subject: [PATCH 0732/1339] chore: add comment for handling the response in generated snippets (#1193) --- .../gapic/samplegen_utils/snippet_index.py | 2 +- .../templates/examples/feature_fragments.j2 | 10 +- .../services/asset_service/async_client.py | 22 ++- .../asset_v1/services/asset_service/client.py | 22 ++- ..._asset_service_analyze_iam_policy_async.py | 2 +- ...ce_analyze_iam_policy_longrunning_async.py | 2 + ...ice_analyze_iam_policy_longrunning_sync.py | 2 + ...1_asset_service_analyze_iam_policy_sync.py | 2 +- ..._service_batch_get_assets_history_async.py | 2 +- ...t_service_batch_get_assets_history_sync.py | 2 +- ...sset_v1_asset_service_create_feed_async.py | 2 +- ...asset_v1_asset_service_create_feed_sync.py | 2 +- ...et_v1_asset_service_export_assets_async.py | 2 + ...set_v1_asset_service_export_assets_sync.py | 2 + ...d_asset_v1_asset_service_get_feed_async.py | 2 +- ...ed_asset_v1_asset_service_get_feed_sync.py | 2 +- ...sset_v1_asset_service_list_assets_async.py | 2 + ...asset_v1_asset_service_list_assets_sync.py | 2 + ...asset_v1_asset_service_list_feeds_async.py | 2 +- ..._asset_v1_asset_service_list_feeds_sync.py | 2 +- ...t_service_search_all_iam_policies_async.py | 2 + ...et_service_search_all_iam_policies_sync.py | 2 + ...sset_service_search_all_resources_async.py | 2 + ...asset_service_search_all_resources_sync.py | 2 + ...sset_v1_asset_service_update_feed_async.py | 2 +- ...asset_v1_asset_service_update_feed_sync.py | 2 +- .../snippet_metadata_asset_v1.json | 80 ++++++---- .../services/iam_credentials/async_client.py | 8 +- .../services/iam_credentials/client.py | 8 +- ...credentials_generate_access_token_async.py | 2 +- ..._credentials_generate_access_token_sync.py | 2 +- ...iam_credentials_generate_id_token_async.py | 2 +- ..._iam_credentials_generate_id_token_sync.py | 2 +- ...ials_v1_iam_credentials_sign_blob_async.py | 2 +- ...tials_v1_iam_credentials_sign_blob_sync.py | 2 +- ...tials_v1_iam_credentials_sign_jwt_async.py | 2 +- ...ntials_v1_iam_credentials_sign_jwt_sync.py | 2 +- .../config_service_v2/async_client.py | 36 +++-- .../services/config_service_v2/client.py | 36 +++-- .../logging_service_v2/async_client.py | 10 +- .../services/logging_service_v2/client.py | 10 +- .../metrics_service_v2/async_client.py | 8 +- .../services/metrics_service_v2/client.py | 8 +- ...2_config_service_v2_create_bucket_async.py | 2 +- ...v2_config_service_v2_create_bucket_sync.py | 2 +- ...onfig_service_v2_create_exclusion_async.py | 2 +- ...config_service_v2_create_exclusion_sync.py | 2 +- ..._v2_config_service_v2_create_sink_async.py | 2 +- ...g_v2_config_service_v2_create_sink_sync.py | 2 +- ..._v2_config_service_v2_create_view_async.py | 2 +- ...g_v2_config_service_v2_create_view_sync.py | 2 +- ...g_v2_config_service_v2_get_bucket_async.py | 2 +- ...ng_v2_config_service_v2_get_bucket_sync.py | 2 +- ...nfig_service_v2_get_cmek_settings_async.py | 2 +- ...onfig_service_v2_get_cmek_settings_sync.py | 2 +- ...2_config_service_v2_get_exclusion_async.py | 2 +- ...v2_config_service_v2_get_exclusion_sync.py | 2 +- ...ing_v2_config_service_v2_get_sink_async.py | 2 +- ...ging_v2_config_service_v2_get_sink_sync.py | 2 +- ...ing_v2_config_service_v2_get_view_async.py | 2 +- ...ging_v2_config_service_v2_get_view_sync.py | 2 +- ...v2_config_service_v2_list_buckets_async.py | 2 + ..._v2_config_service_v2_list_buckets_sync.py | 2 + ...config_service_v2_list_exclusions_async.py | 2 + ..._config_service_v2_list_exclusions_sync.py | 2 + ...g_v2_config_service_v2_list_sinks_async.py | 2 + ...ng_v2_config_service_v2_list_sinks_sync.py | 2 + ...g_v2_config_service_v2_list_views_async.py | 2 + ...ng_v2_config_service_v2_list_views_sync.py | 2 + ...2_config_service_v2_update_bucket_async.py | 2 +- ...v2_config_service_v2_update_bucket_sync.py | 2 +- ...g_service_v2_update_cmek_settings_async.py | 2 +- ...ig_service_v2_update_cmek_settings_sync.py | 2 +- ...onfig_service_v2_update_exclusion_async.py | 2 +- ...config_service_v2_update_exclusion_sync.py | 2 +- ..._v2_config_service_v2_update_sink_async.py | 2 +- ...g_v2_config_service_v2_update_sink_sync.py | 2 +- ..._v2_config_service_v2_update_view_async.py | 2 +- ...g_v2_config_service_v2_update_view_sync.py | 2 +- ...gging_service_v2_list_log_entries_async.py | 2 + ...ogging_service_v2_list_log_entries_sync.py | 2 + ...g_v2_logging_service_v2_list_logs_async.py | 2 + ...ng_v2_logging_service_v2_list_logs_sync.py | 2 + ...st_monitored_resource_descriptors_async.py | 2 + ...ist_monitored_resource_descriptors_sync.py | 2 + ...gging_service_v2_tail_log_entries_async.py | 2 + ...ogging_service_v2_tail_log_entries_sync.py | 2 + ...ging_service_v2_write_log_entries_async.py | 2 +- ...gging_service_v2_write_log_entries_sync.py | 2 +- ...rics_service_v2_create_log_metric_async.py | 2 +- ...trics_service_v2_create_log_metric_sync.py | 2 +- ...metrics_service_v2_get_log_metric_async.py | 2 +- ..._metrics_service_v2_get_log_metric_sync.py | 2 +- ...trics_service_v2_list_log_metrics_async.py | 2 + ...etrics_service_v2_list_log_metrics_sync.py | 2 + ...rics_service_v2_update_log_metric_async.py | 2 +- ...trics_service_v2_update_log_metric_sync.py | 2 +- .../snippet_metadata_logging_v2.json | 144 +++++++++++------- .../services/cloud_redis/async_client.py | 18 ++- .../redis_v1/services/cloud_redis/client.py | 18 ++- ...is_v1_cloud_redis_create_instance_async.py | 2 + ...dis_v1_cloud_redis_create_instance_sync.py | 2 + ...is_v1_cloud_redis_delete_instance_async.py | 2 + ...dis_v1_cloud_redis_delete_instance_sync.py | 2 + ...is_v1_cloud_redis_export_instance_async.py | 2 + ...dis_v1_cloud_redis_export_instance_sync.py | 2 + ..._v1_cloud_redis_failover_instance_async.py | 2 + ...s_v1_cloud_redis_failover_instance_sync.py | 2 + ...redis_v1_cloud_redis_get_instance_async.py | 2 +- ..._redis_v1_cloud_redis_get_instance_sync.py | 2 +- ...is_v1_cloud_redis_import_instance_async.py | 2 + ...dis_v1_cloud_redis_import_instance_sync.py | 2 + ...dis_v1_cloud_redis_list_instances_async.py | 2 + ...edis_v1_cloud_redis_list_instances_sync.py | 2 + ...is_v1_cloud_redis_update_instance_async.py | 2 + ...dis_v1_cloud_redis_update_instance_sync.py | 2 + ...s_v1_cloud_redis_upgrade_instance_async.py | 2 + ...is_v1_cloud_redis_upgrade_instance_sync.py | 2 + .../snippet_metadata_redis_v1.json | 128 ++++++++++------ ...llusca_v1_snippets_list_resources_async.py | 2 + ...ollusca_v1_snippets_list_resources_sync.py | 2 + ...v1_snippets_method_bidi_streaming_async.py | 2 + ..._v1_snippets_method_bidi_streaming_sync.py | 2 + ...v1_snippets_method_lro_signatures_async.py | 2 + ..._v1_snippets_method_lro_signatures_sync.py | 2 + ..._v1_snippets_method_one_signature_async.py | 2 +- ...a_v1_snippets_method_one_signature_sync.py | 2 +- ..._snippets_method_server_streaming_async.py | 2 + ...1_snippets_method_server_streaming_sync.py | 2 + ...ollusca_v1_snippets_one_of_method_async.py | 2 +- ...pets_one_of_method_required_field_async.py | 2 +- ...ppets_one_of_method_required_field_sync.py | 2 +- ...mollusca_v1_snippets_one_of_method_sync.py | 2 +- .../samplegen/golden_snippets/sample_basic.py | 2 +- .../golden_snippets/sample_basic_async.py | 2 +- .../sample_basic_unflattenable.py | 2 +- .../unit/samplegen/test_snippet_index.py | 4 +- .../tests/unit/samplegen/test_template.py | 22 ++- 138 files changed, 570 insertions(+), 262 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py index 8b7d3d0794bd..f04912d6f7ff 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -25,7 +25,7 @@ CLIENT_INIT_RE = re.compile(r"^\s+# Create a client") REQUEST_INIT_RE = re.compile(r"^\s+# Initialize request argument\(s\)") REQUEST_EXEC_RE = re.compile(r"^\s+# Make the request") -RESPONSE_HANDLING_RE = re.compile(r"^\s+# Handle response") +RESPONSE_HANDLING_RE = re.compile(r"^\s+# Handle the response") class Snippet: diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index d02d5f1f71b8..7c30912c76d4 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -249,19 +249,23 @@ client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request response = {{ method_invocation_text|trim }} {% if response_statements %} -# Handle response +# Handle the response {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} {% endif %} {% elif calling_form == calling_form_enum.RequestPagedAll %} page_result = {{ method_invocation_text|trim }} + +# Handle the response {% if transport == "grpc-async" %}async {% endif %}for response in page_result: {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} {% elif calling_form == calling_form_enum.RequestPaged %} page_result = {{ method_invocation_text|trim }} + +# Handle the response {% if transport == "grpc-async" %}async {% endif %}for page in page_result.pages(): for response in page: {% for statement in response_statements %} @@ -270,6 +274,8 @@ page_result = {{ method_invocation_text|trim }} {% elif calling_form in [calling_form_enum.RequestStreamingServer, calling_form_enum.RequestStreamingBidi] %} stream = {{ method_invocation_text|trim }} + +# Handle the response {% if transport == "grpc-async" %}async {% endif %}for response in stream: {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} @@ -280,6 +286,8 @@ operation = {{ method_invocation_text|trim }} print("Waiting for operation to complete...") response = {% if transport == "grpc-async" %}await {% endif %}operation.result() + +# Handle the response {% for statement in response_statements %} {{ dispatch_statement(statement)|trim }} {% endfor %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index b269dcd87b61..13fc9b54e629 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -234,6 +234,8 @@ def sample_export_assets(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -321,6 +323,8 @@ def sample_list_assets(): # Make the request page_result = client.list_assets(request=request) + + # Handle the response for response in page_result: print(response) @@ -434,7 +438,7 @@ def sample_batch_get_assets_history(): # Make the request response = client.batch_get_assets_history(request=request) - # Handle response + # Handle the response print(response) Args: @@ -521,7 +525,7 @@ def sample_create_feed(): # Make the request response = client.create_feed(request=request) - # Handle response + # Handle the response print(response) Args: @@ -626,7 +630,7 @@ def sample_get_feed(): # Make the request response = client.get_feed(request=request) - # Handle response + # Handle the response print(response) Args: @@ -735,7 +739,7 @@ def sample_list_feeds(): # Make the request response = client.list_feeds(request=request) - # Handle response + # Handle the response print(response) Args: @@ -840,7 +844,7 @@ def sample_update_feed(): # Make the request response = client.update_feed(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1036,6 +1040,8 @@ def sample_search_all_resources(): # Make the request page_result = client.search_all_resources(request=request) + + # Handle the response for response in page_result: print(response) @@ -1241,6 +1247,8 @@ def sample_search_all_iam_policies(): # Make the request page_result = client.search_all_iam_policies(request=request) + + # Handle the response for response in page_result: print(response) @@ -1426,7 +1434,7 @@ def sample_analyze_iam_policy(): # Make the request response = client.analyze_iam_policy(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1528,6 +1536,8 @@ def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index cce676d1231e..3b8b459ab5b2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -431,6 +431,8 @@ def sample_export_assets(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -520,6 +522,8 @@ def sample_list_assets(): # Make the request page_result = client.list_assets(request=request) + + # Handle the response for response in page_result: print(response) @@ -634,7 +638,7 @@ def sample_batch_get_assets_history(): # Make the request response = client.batch_get_assets_history(request=request) - # Handle response + # Handle the response print(response) Args: @@ -716,7 +720,7 @@ def sample_create_feed(): # Make the request response = client.create_feed(request=request) - # Handle response + # Handle the response print(response) Args: @@ -822,7 +826,7 @@ def sample_get_feed(): # Make the request response = client.get_feed(request=request) - # Handle response + # Handle the response print(response) Args: @@ -925,7 +929,7 @@ def sample_list_feeds(): # Make the request response = client.list_feeds(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1024,7 +1028,7 @@ def sample_update_feed(): # Make the request response = client.update_feed(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1215,6 +1219,8 @@ def sample_search_all_resources(): # Make the request page_result = client.search_all_resources(request=request) + + # Handle the response for response in page_result: print(response) @@ -1414,6 +1420,8 @@ def sample_search_all_iam_policies(): # Make the request page_result = client.search_all_iam_policies(request=request) + + # Handle the response for response in page_result: print(response) @@ -1593,7 +1601,7 @@ def sample_analyze_iam_policy(): # Make the request response = client.analyze_iam_policy(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1691,6 +1699,8 @@ def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py index 1759b7e38c2c..a5d56bf2cbc9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py @@ -42,7 +42,7 @@ async def sample_analyze_iam_policy(): # Make the request response = await client.analyze_iam_policy(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py index 51ef3ab86a18..623137191de1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py @@ -49,6 +49,8 @@ async def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py index eee8fb97ed75..17a51630b690 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py @@ -49,6 +49,8 @@ def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py index 9dd189550952..ddf13b735780 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py @@ -42,7 +42,7 @@ def sample_analyze_iam_policy(): # Make the request response = client.analyze_iam_policy(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py index edae4c7f9289..b84841a0d8cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py @@ -39,7 +39,7 @@ async def sample_batch_get_assets_history(): # Make the request response = await client.batch_get_assets_history(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py index 5bf8c8de15fb..df9319427adf 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py @@ -39,7 +39,7 @@ def sample_batch_get_assets_history(): # Make the request response = client.batch_get_assets_history(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py index a988dfe5d494..9894a01dca1a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py @@ -44,7 +44,7 @@ async def sample_create_feed(): # Make the request response = await client.create_feed(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_CreateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py index e33028822916..5efaf5e1b12e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py @@ -44,7 +44,7 @@ def sample_create_feed(): # Make the request response = client.create_feed(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_CreateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py index f384bea0adfb..e9cd14623189 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py @@ -46,6 +46,8 @@ async def sample_export_assets(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_ExportAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py index 4ac84ea71306..8a1a4fffb10f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py @@ -46,6 +46,8 @@ def sample_export_assets(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_ExportAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py index 33c88d3b4b2a..7ce97094b421 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py @@ -39,7 +39,7 @@ async def sample_get_feed(): # Make the request response = await client.get_feed(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_GetFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py index 98834ef0be65..12f433dc1bab 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py @@ -39,7 +39,7 @@ def sample_get_feed(): # Make the request response = client.get_feed(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_GetFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py index 0a6a0b4a098d..67b2c65501a9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py @@ -38,6 +38,8 @@ async def sample_list_assets(): # Make the request page_result = client.list_assets(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py index 5fad10b12adf..212999ece715 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py @@ -38,6 +38,8 @@ def sample_list_assets(): # Make the request page_result = client.list_assets(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py index 8eec6bfab483..82031eb9a334 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py @@ -39,7 +39,7 @@ async def sample_list_feeds(): # Make the request response = await client.list_feeds(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_ListFeeds_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py index 7aba515f8bd3..dc811344f52a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py @@ -39,7 +39,7 @@ def sample_list_feeds(): # Make the request response = client.list_feeds(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_ListFeeds_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py index 9be060836a2c..610d8d6d419e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py @@ -38,6 +38,8 @@ async def sample_search_all_iam_policies(): # Make the request page_result = client.search_all_iam_policies(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py index e39c08295ba9..d73fcfd4ce41 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py @@ -38,6 +38,8 @@ def sample_search_all_iam_policies(): # Make the request page_result = client.search_all_iam_policies(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py index aba043b4b80c..7d635dce5280 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py @@ -38,6 +38,8 @@ async def sample_search_all_resources(): # Make the request page_result = client.search_all_resources(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py index 475d8b4ad58d..061437752016 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py @@ -38,6 +38,8 @@ def sample_search_all_resources(): # Make the request page_result = client.search_all_resources(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py index e409b05b7182..6f1d9b65f2a7 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py @@ -42,7 +42,7 @@ async def sample_update_feed(): # Make the request response = await client.update_feed(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py index 214f4886f215..9d99cb0070fb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py @@ -42,7 +42,7 @@ def sample_update_feed(): # Make the request response = client.update_feed(request=request) - # Handle response + # Handle the response print(response) # [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json index b6d8b364c812..6a052eb1f941 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json @@ -14,12 +14,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async", "segments": [ { - "end": 53, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 55, "start": 27, "type": "SHORT" }, @@ -34,11 +34,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ] @@ -56,12 +58,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync", "segments": [ { - "end": 53, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 55, "start": 27, "type": "SHORT" }, @@ -76,11 +78,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ] @@ -451,12 +455,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_ExportAssets_async", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -471,11 +475,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 49, "start": 43, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -493,12 +499,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_ExportAssets_sync", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -513,11 +519,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 49, "start": 43, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -625,12 +633,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_ListAssets_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -645,11 +653,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -667,12 +677,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_ListAssets_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -687,11 +697,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -799,12 +811,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -819,11 +831,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -841,12 +855,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -861,11 +875,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -884,12 +900,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllResources_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -904,11 +920,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -926,12 +944,12 @@ "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllResources_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -946,11 +964,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 79cc549c8a50..45f512ccfc85 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -226,7 +226,7 @@ def sample_generate_access_token(): # Make the request response = client.generate_access_token(request=request) - # Handle response + # Handle the response print(response) Args: @@ -380,7 +380,7 @@ def sample_generate_id_token(): # Make the request response = client.generate_id_token(request=request) - # Handle response + # Handle the response print(response) Args: @@ -527,7 +527,7 @@ def sample_sign_blob(): # Make the request response = client.sign_blob(request=request) - # Handle response + # Handle the response print(response) Args: @@ -661,7 +661,7 @@ def sample_sign_jwt(): # Make the request response = client.sign_jwt(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 1510b790ad82..68b1418fdd4a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -414,7 +414,7 @@ def sample_generate_access_token(): # Make the request response = client.generate_access_token(request=request) - # Handle response + # Handle the response print(response) Args: @@ -562,7 +562,7 @@ def sample_generate_id_token(): # Make the request response = client.generate_id_token(request=request) - # Handle response + # Handle the response print(response) Args: @@ -703,7 +703,7 @@ def sample_sign_blob(): # Make the request response = client.sign_blob(request=request) - # Handle response + # Handle the response print(response) Args: @@ -831,7 +831,7 @@ def sample_sign_jwt(): # Make the request response = client.sign_jwt(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py index 60c86fc65a9f..275d3046ce0d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py @@ -40,7 +40,7 @@ async def sample_generate_access_token(): # Make the request response = await client.generate_access_token(request=request) - # Handle response + # Handle the response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py index 4b47fc7ce4fa..db976fd8a51f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py @@ -40,7 +40,7 @@ def sample_generate_access_token(): # Make the request response = client.generate_access_token(request=request) - # Handle response + # Handle the response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py index 826bad3e42a7..ca03fcee62c8 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py @@ -40,7 +40,7 @@ async def sample_generate_id_token(): # Make the request response = await client.generate_id_token(request=request) - # Handle response + # Handle the response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py index c7df5c9bb625..df64df2aced2 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py @@ -40,7 +40,7 @@ def sample_generate_id_token(): # Make the request response = client.generate_id_token(request=request) - # Handle response + # Handle the response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py index 52c3cc86ee1e..b4d89faa1f34 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py @@ -40,7 +40,7 @@ async def sample_sign_blob(): # Make the request response = await client.sign_blob(request=request) - # Handle response + # Handle the response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py index c753ba88a4f2..0f874a6a8594 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py @@ -40,7 +40,7 @@ def sample_sign_blob(): # Make the request response = client.sign_blob(request=request) - # Handle response + # Handle the response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py index 6c1e1cfcd71c..677d1cc5bc4a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py @@ -40,7 +40,7 @@ async def sample_sign_jwt(): # Make the request response = await client.sign_jwt(request=request) - # Handle response + # Handle the response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py index d3c7db98c489..a0f3d1df8e10 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py @@ -40,7 +40,7 @@ def sample_sign_jwt(): # Make the request response = client.sign_jwt(request=request) - # Handle response + # Handle the response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 825e5bcfea86..572a93204cab 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -218,6 +218,8 @@ def sample_list_buckets(): # Make the request page_result = client.list_buckets(request=request) + + # Handle the response for response in page_result: print(response) @@ -332,7 +334,7 @@ def sample_get_bucket(): # Make the request response = client.get_bucket(request=request) - # Handle response + # Handle the response print(response) Args: @@ -407,7 +409,7 @@ def sample_create_bucket(): # Make the request response = client.create_bucket(request=request) - # Handle response + # Handle the response print(response) Args: @@ -489,7 +491,7 @@ def sample_update_bucket(): # Make the request response = client.update_bucket(request=request) - # Handle response + # Handle the response print(response) Args: @@ -687,6 +689,8 @@ def sample_list_views(): # Make the request page_result = client.list_views(request=request) + + # Handle the response for response in page_result: print(response) @@ -793,7 +797,7 @@ def sample_get_view(): # Make the request response = client.get_view(request=request) - # Handle response + # Handle the response print(response) Args: @@ -869,7 +873,7 @@ def sample_create_view(): # Make the request response = client.create_view(request=request) - # Handle response + # Handle the response print(response) Args: @@ -944,7 +948,7 @@ def sample_update_view(): # Make the request response = client.update_view(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1078,6 +1082,8 @@ def sample_list_sinks(): # Make the request page_result = client.list_sinks(request=request) + + # Handle the response for response in page_result: print(response) @@ -1197,7 +1203,7 @@ def sample_get_sink(): # Make the request response = client.get_sink(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1324,7 +1330,7 @@ def sample_create_sink(): # Make the request response = client.create_sink(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1455,7 +1461,7 @@ def sample_update_sink(): # Make the request response = client.update_sink(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1702,6 +1708,8 @@ def sample_list_exclusions(): # Make the request page_result = client.list_exclusions(request=request) + + # Handle the response for response in page_result: print(response) @@ -1821,7 +1829,7 @@ def sample_get_exclusion(): # Make the request response = client.get_exclusion(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1950,7 +1958,7 @@ def sample_create_exclusion(): # Make the request response = client.create_exclusion(request=request) - # Handle response + # Handle the response print(response) Args: @@ -2081,7 +2089,7 @@ def sample_update_exclusion(): # Make the request response = client.update_exclusion(request=request) - # Handle response + # Handle the response print(response) Args: @@ -2325,7 +2333,7 @@ def sample_get_cmek_settings(): # Make the request response = client.get_cmek_settings(request=request) - # Handle response + # Handle the response print(response) Args: @@ -2427,7 +2435,7 @@ def sample_update_cmek_settings(): # Make the request response = client.update_cmek_settings(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 724f3d323f69..8234a71c56a0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -442,6 +442,8 @@ def sample_list_buckets(): # Make the request page_result = client.list_buckets(request=request) + + # Handle the response for response in page_result: print(response) @@ -557,7 +559,7 @@ def sample_get_bucket(): # Make the request response = client.get_bucket(request=request) - # Handle response + # Handle the response print(response) Args: @@ -634,7 +636,7 @@ def sample_create_bucket(): # Make the request response = client.create_bucket(request=request) - # Handle response + # Handle the response print(response) Args: @@ -718,7 +720,7 @@ def sample_update_bucket(): # Make the request response = client.update_bucket(request=request) - # Handle response + # Handle the response print(response) Args: @@ -922,6 +924,8 @@ def sample_list_views(): # Make the request page_result = client.list_views(request=request) + + # Handle the response for response in page_result: print(response) @@ -1029,7 +1033,7 @@ def sample_get_view(): # Make the request response = client.get_view(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1107,7 +1111,7 @@ def sample_create_view(): # Make the request response = client.create_view(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1184,7 +1188,7 @@ def sample_update_view(): # Make the request response = client.update_view(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1322,6 +1326,8 @@ def sample_list_sinks(): # Make the request page_result = client.list_sinks(request=request) + + # Handle the response for response in page_result: print(response) @@ -1434,7 +1440,7 @@ def sample_get_sink(): # Make the request response = client.get_sink(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1554,7 +1560,7 @@ def sample_create_sink(): # Make the request response = client.create_sink(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1686,7 +1692,7 @@ def sample_update_sink(): # Make the request response = client.update_sink(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1919,6 +1925,8 @@ def sample_list_exclusions(): # Make the request page_result = client.list_exclusions(request=request) + + # Handle the response for response in page_result: print(response) @@ -2031,7 +2039,7 @@ def sample_get_exclusion(): # Make the request response = client.get_exclusion(request=request) - # Handle response + # Handle the response print(response) Args: @@ -2153,7 +2161,7 @@ def sample_create_exclusion(): # Make the request response = client.create_exclusion(request=request) - # Handle response + # Handle the response print(response) Args: @@ -2285,7 +2293,7 @@ def sample_update_exclusion(): # Make the request response = client.update_exclusion(request=request) - # Handle response + # Handle the response print(response) Args: @@ -2523,7 +2531,7 @@ def sample_get_cmek_settings(): # Make the request response = client.get_cmek_settings(request=request) - # Handle response + # Handle the response print(response) Args: @@ -2627,7 +2635,7 @@ def sample_update_cmek_settings(): # Make the request response = client.update_cmek_settings(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 55ed8dd0693c..43b19c64260f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -330,7 +330,7 @@ def sample_write_log_entries(): # Make the request response = client.write_log_entries(request=request) - # Handle response + # Handle the response print(response) Args: @@ -514,6 +514,8 @@ def sample_list_log_entries(): # Make the request page_result = client.list_log_entries(request=request) + + # Handle the response for response in page_result: print(response) @@ -664,6 +666,8 @@ def sample_list_monitored_resource_descriptors(): # Make the request page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response for response in page_result: print(response) @@ -753,6 +757,8 @@ def sample_list_logs(): # Make the request page_result = client.list_logs(request=request) + + # Handle the response for response in page_result: print(response) @@ -881,6 +887,8 @@ def request_generator(): # Make the request stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response for response in stream: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 1d92d1d7dfbf..5553d1cc8357 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -511,7 +511,7 @@ def sample_write_log_entries(): # Make the request response = client.write_log_entries(request=request) - # Handle response + # Handle the response print(response) Args: @@ -687,6 +687,8 @@ def sample_list_log_entries(): # Make the request page_result = client.list_log_entries(request=request) + + # Handle the response for response in page_result: print(response) @@ -830,6 +832,8 @@ def sample_list_monitored_resource_descriptors(): # Make the request page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response for response in page_result: print(response) @@ -913,6 +917,8 @@ def sample_list_logs(): # Make the request page_result = client.list_logs(request=request) + + # Handle the response for response in page_result: print(response) @@ -1034,6 +1040,8 @@ def request_generator(): # Make the request stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response for response in stream: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index f292d70de2dc..7774489bb88c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -211,6 +211,8 @@ def sample_list_log_metrics(): # Make the request page_result = client.list_log_metrics(request=request) + + # Handle the response for response in page_result: print(response) @@ -327,7 +329,7 @@ def sample_get_log_metric(): # Make the request response = client.get_log_metric(request=request) - # Handle response + # Handle the response print(response) Args: @@ -446,7 +448,7 @@ def sample_create_log_metric(): # Make the request response = client.create_log_metric(request=request) - # Handle response + # Handle the response print(response) Args: @@ -570,7 +572,7 @@ def sample_update_log_metric(): # Make the request response = client.update_log_metric(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 7fc77b81d78e..3c521d4b0989 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -399,6 +399,8 @@ def sample_list_log_metrics(): # Make the request page_result = client.list_log_metrics(request=request) + + # Handle the response for response in page_result: print(response) @@ -508,7 +510,7 @@ def sample_get_log_metric(): # Make the request response = client.get_log_metric(request=request) - # Handle response + # Handle the response print(response) Args: @@ -620,7 +622,7 @@ def sample_create_log_metric(): # Make the request response = client.create_log_metric(request=request) - # Handle response + # Handle the response print(response) Args: @@ -745,7 +747,7 @@ def sample_update_log_metric(): # Make the request response = client.update_log_metric(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py index 40d790738205..1ea807e1ad05 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py @@ -40,7 +40,7 @@ async def sample_create_bucket(): # Make the request response = await client.create_bucket(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py index 879c4e321f20..f6a5a79199cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py @@ -40,7 +40,7 @@ def sample_create_bucket(): # Make the request response = client.create_bucket(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py index 1711976ca93d..a019ffdfa82a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py @@ -44,7 +44,7 @@ async def sample_create_exclusion(): # Make the request response = await client.create_exclusion(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py index 45e2ca3fdd5b..e85600e9a04c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py @@ -44,7 +44,7 @@ def sample_create_exclusion(): # Make the request response = client.create_exclusion(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py index ae0e5e8269ac..9cd9c0ff71e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py @@ -44,7 +44,7 @@ async def sample_create_sink(): # Make the request response = await client.create_sink(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py index 3c04120b3464..fdd970694e18 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py @@ -44,7 +44,7 @@ def sample_create_sink(): # Make the request response = client.create_sink(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py index 4cd7a79399bc..e3cd7f8519e2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py @@ -40,7 +40,7 @@ async def sample_create_view(): # Make the request response = await client.create_view(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py index 562fb087b86f..1da4d815a515 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py @@ -40,7 +40,7 @@ def sample_create_view(): # Make the request response = client.create_view(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_CreateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py index 8b1f13f21e32..ba6a4a4ba08e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py @@ -39,7 +39,7 @@ async def sample_get_bucket(): # Make the request response = await client.get_bucket(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py index 9ead630fa3ab..92a01864ad21 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py @@ -39,7 +39,7 @@ def sample_get_bucket(): # Make the request response = client.get_bucket(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py index 864fed295575..521315b0c3b5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py @@ -39,7 +39,7 @@ async def sample_get_cmek_settings(): # Make the request response = await client.get_cmek_settings(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py index e3ca86aafcb1..5f698d823b07 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py @@ -39,7 +39,7 @@ def sample_get_cmek_settings(): # Make the request response = client.get_cmek_settings(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py index 7f452fceb610..2af03e6b1d1a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py @@ -39,7 +39,7 @@ async def sample_get_exclusion(): # Make the request response = await client.get_exclusion(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py index 16aaefefdcf0..453029905f1f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py @@ -39,7 +39,7 @@ def sample_get_exclusion(): # Make the request response = client.get_exclusion(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py index 12f905ebcbe1..1d2cbe30a4f8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py @@ -39,7 +39,7 @@ async def sample_get_sink(): # Make the request response = await client.get_sink(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py index 9b61f7075e75..596f9c2342c5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py @@ -39,7 +39,7 @@ def sample_get_sink(): # Make the request response = client.get_sink(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py index b4924fffa066..15becab59450 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py @@ -39,7 +39,7 @@ async def sample_get_view(): # Make the request response = await client.get_view(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py index c706290a7b65..188b2b80c768 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py @@ -39,7 +39,7 @@ def sample_get_view(): # Make the request response = client.get_view(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_GetView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py index 5d71278a2e83..80a8fe9f6d17 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py @@ -38,6 +38,8 @@ async def sample_list_buckets(): # Make the request page_result = client.list_buckets(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py index 650229ec53a0..39943150e817 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py @@ -38,6 +38,8 @@ def sample_list_buckets(): # Make the request page_result = client.list_buckets(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py index 646be345cdc1..82f229bcad9b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py @@ -38,6 +38,8 @@ async def sample_list_exclusions(): # Make the request page_result = client.list_exclusions(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py index 08c01c1237f1..888e6f2e3f05 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py @@ -38,6 +38,8 @@ def sample_list_exclusions(): # Make the request page_result = client.list_exclusions(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py index 10287f1c2aaf..e5a8bf2f723a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py @@ -38,6 +38,8 @@ async def sample_list_sinks(): # Make the request page_result = client.list_sinks(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py index cd6c0da01098..f5286da0a53f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py @@ -38,6 +38,8 @@ def sample_list_sinks(): # Make the request page_result = client.list_sinks(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py index f53490638e1f..b089db3c89e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py @@ -38,6 +38,8 @@ async def sample_list_views(): # Make the request page_result = client.list_views(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py index 4ae6c7d60f40..a02d5b03b3bf 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py @@ -38,6 +38,8 @@ def sample_list_views(): # Make the request page_result = client.list_views(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py index d714e0a3aa64..e1781886c601 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py @@ -39,7 +39,7 @@ async def sample_update_bucket(): # Make the request response = await client.update_bucket(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py index 007a2020349b..6ba5cd29de25 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py @@ -39,7 +39,7 @@ def sample_update_bucket(): # Make the request response = client.update_bucket(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py index 74e24c67a79e..318d33d234f7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py @@ -39,7 +39,7 @@ async def sample_update_cmek_settings(): # Make the request response = await client.update_cmek_settings(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py index 5e19a86ba4fa..02145d1ede6a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py @@ -39,7 +39,7 @@ def sample_update_cmek_settings(): # Make the request response = client.update_cmek_settings(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py index da749b58fb0b..2301627f27e5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py @@ -44,7 +44,7 @@ async def sample_update_exclusion(): # Make the request response = await client.update_exclusion(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py index a0c653b460b9..5bfa0c017b70 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py @@ -44,7 +44,7 @@ def sample_update_exclusion(): # Make the request response = client.update_exclusion(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py index f0d5ce3ca6c8..2983000ce4e7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py @@ -44,7 +44,7 @@ async def sample_update_sink(): # Make the request response = await client.update_sink(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py index 5119de8820d5..fc06b2f07bb6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py @@ -44,7 +44,7 @@ def sample_update_sink(): # Make the request response = client.update_sink(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py index 3d4681021c0a..882fafea62e8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py @@ -39,7 +39,7 @@ async def sample_update_view(): # Make the request response = await client.update_view(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py index 9f134431d045..be01a3fe2218 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py @@ -39,7 +39,7 @@ def sample_update_view(): # Make the request response = client.update_view(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py index 23de611fb34b..5493f7d5ea52 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py @@ -38,6 +38,8 @@ async def sample_list_log_entries(): # Make the request page_result = client.list_log_entries(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py index 2c83bf9bc155..2a08cb42a85d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py @@ -38,6 +38,8 @@ def sample_list_log_entries(): # Make the request page_result = client.list_log_entries(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py index fe04f70099fd..9cd5472c4c8e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py @@ -38,6 +38,8 @@ async def sample_list_logs(): # Make the request page_result = client.list_logs(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py index 909c2e721fd4..cda949536987 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py @@ -38,6 +38,8 @@ def sample_list_logs(): # Make the request page_result = client.list_logs(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py index 35fd5d0b001e..63b4fbcff815 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -37,6 +37,8 @@ async def sample_list_monitored_resource_descriptors(): # Make the request page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py index 05cdb4a5aaad..c1789fb8e226 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -37,6 +37,8 @@ def sample_list_monitored_resource_descriptors(): # Make the request page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py index 6ddc30b036c5..0416cc76196a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py @@ -47,6 +47,8 @@ def request_generator(): # Make the request stream = await client.tail_log_entries(requests=request_generator()) + + # Handle the response async for response in stream: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py index c01d944a8f5a..b341d9aab536 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py @@ -47,6 +47,8 @@ def request_generator(): # Make the request stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response for response in stream: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py index 5e019280f3af..4cff3bca7439 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py @@ -42,7 +42,7 @@ async def sample_write_log_entries(): # Make the request response = await client.write_log_entries(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py index 9e03979e5cf8..eb77c2e21f0e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py @@ -42,7 +42,7 @@ def sample_write_log_entries(): # Make the request response = client.write_log_entries(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py index c5f272a0ac6e..1175b4f0e827 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py @@ -44,7 +44,7 @@ async def sample_create_log_metric(): # Make the request response = await client.create_log_metric(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py index 888355efde8f..a228c92efa6f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py @@ -44,7 +44,7 @@ def sample_create_log_metric(): # Make the request response = client.create_log_metric(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py index bf0f0528ce6c..77352b3dda73 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py @@ -39,7 +39,7 @@ async def sample_get_log_metric(): # Make the request response = await client.get_log_metric(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py index ee5d478a43aa..cf49377ad04e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py @@ -39,7 +39,7 @@ def sample_get_log_metric(): # Make the request response = client.get_log_metric(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py index c71003435eca..6d270523716b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py @@ -38,6 +38,8 @@ async def sample_list_log_metrics(): # Make the request page_result = client.list_log_metrics(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py index 766079f6b3ce..d4fe00402286 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py @@ -38,6 +38,8 @@ def sample_list_log_metrics(): # Make the request page_result = client.list_log_metrics(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py index 3f644809cd85..55bec2442ced 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py @@ -44,7 +44,7 @@ async def sample_update_log_metric(): # Make the request response = await client.update_log_metric(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py index 18d3b2f8d1c5..0cb8bc9054a5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py @@ -44,7 +44,7 @@ def sample_update_log_metric(): # Make the request response = client.update_log_metric(request=request) - # Handle response + # Handle the response print(response) # [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json index 696911699db9..9ee0df5de427 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -1155,12 +1155,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1175,11 +1175,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1197,12 +1199,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1217,11 +1219,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1240,12 +1244,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1260,11 +1264,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1282,12 +1288,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1302,11 +1308,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1325,12 +1333,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListSinks_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1345,11 +1353,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1367,12 +1377,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1387,11 +1397,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1410,12 +1422,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListViews_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1430,11 +1442,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1452,12 +1466,12 @@ "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListViews_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1472,11 +1486,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2110,12 +2126,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -2130,11 +2146,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2152,12 +2170,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -2172,11 +2190,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2195,12 +2215,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogs_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -2215,11 +2235,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2237,12 +2259,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -2257,11 +2279,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2280,12 +2304,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_async", "segments": [ { - "end": 42, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 44, "start": 27, "type": "SHORT" }, @@ -2300,11 +2324,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 40, "start": 38, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 45, + "start": 41, "type": "RESPONSE_HANDLING" } ] @@ -2322,12 +2348,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", "segments": [ { - "end": 42, + "end": 44, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 44, "start": 27, "type": "SHORT" }, @@ -2342,11 +2368,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 40, "start": 38, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 45, + "start": 41, "type": "RESPONSE_HANDLING" } ] @@ -2365,12 +2393,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2385,11 +2413,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 50, "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 55, + "start": 51, "type": "RESPONSE_HANDLING" } ] @@ -2407,12 +2437,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -2427,11 +2457,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 50, "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 55, + "start": 51, "type": "RESPONSE_HANDLING" } ] @@ -2802,12 +2834,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -2822,11 +2854,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2844,12 +2878,12 @@ "regionTag": "logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -2864,11 +2898,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 8f79ca344630..16ab081efd37 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -244,6 +244,8 @@ def sample_list_instances(): # Make the request page_result = client.list_instances(request=request) + + # Handle the response for response in page_result: print(response) @@ -352,7 +354,7 @@ def sample_get_instance(): # Make the request response = client.get_instance(request=request) - # Handle response + # Handle the response print(response) Args: @@ -471,6 +473,8 @@ def sample_create_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -615,6 +619,8 @@ def sample_update_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -742,6 +748,8 @@ def sample_upgrade_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -873,6 +881,8 @@ def sample_import_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -1000,6 +1010,8 @@ def sample_export_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -1121,6 +1133,8 @@ def sample_failover_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -1241,6 +1255,8 @@ def sample_delete_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index c4c69e6418dc..5ee6ec46251a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -432,6 +432,8 @@ def sample_list_instances(): # Make the request page_result = client.list_instances(request=request) + + # Handle the response for response in page_result: print(response) @@ -541,7 +543,7 @@ def sample_get_instance(): # Make the request response = client.get_instance(request=request) - # Handle response + # Handle the response print(response) Args: @@ -661,6 +663,8 @@ def sample_create_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -806,6 +810,8 @@ def sample_update_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -934,6 +940,8 @@ def sample_upgrade_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -1066,6 +1074,8 @@ def sample_import_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -1194,6 +1204,8 @@ def sample_export_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -1316,6 +1328,8 @@ def sample_failover_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: @@ -1437,6 +1451,8 @@ def sample_delete_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py index 33c8e4b43269..500ed1b479c0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py @@ -49,6 +49,8 @@ async def sample_create_instance(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_CreateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py index 4390eea49f62..66fcf9e4ca48 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py @@ -49,6 +49,8 @@ def sample_create_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_CreateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py index 0519037e8228..359aab4e5b0d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py @@ -42,6 +42,8 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_DeleteInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py index ace8e8bcb8de..1dbbc9a65871 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py @@ -42,6 +42,8 @@ def sample_delete_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_DeleteInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py index 22bcf3e41363..8b0e87eaeed3 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py @@ -46,6 +46,8 @@ async def sample_export_instance(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_ExportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py index 3524ddc9aa0d..a33bbd295ed7 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py @@ -46,6 +46,8 @@ def sample_export_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_ExportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py index c29701c80a81..c1e0c8c99a81 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py @@ -42,6 +42,8 @@ async def sample_failover_instance(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_FailoverInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py index ff715fd2d192..a6fa6dd166d5 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py @@ -42,6 +42,8 @@ def sample_failover_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_FailoverInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py index 1bb5ff229a75..bd446a23c874 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py @@ -39,7 +39,7 @@ async def sample_get_instance(): # Make the request response = await client.get_instance(request=request) - # Handle response + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_GetInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py index 7f6b03ea001a..abb06d38a31a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py @@ -39,7 +39,7 @@ def sample_get_instance(): # Make the request response = client.get_instance(request=request) - # Handle response + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_GetInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py index f1a65c3e09df..3e5f7f479e67 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py @@ -46,6 +46,8 @@ async def sample_import_instance(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_ImportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py index e246a349ca11..761227a26dbd 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py @@ -46,6 +46,8 @@ def sample_import_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_ImportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py index c3686c396432..c4962e994081 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py @@ -38,6 +38,8 @@ async def sample_list_instances(): # Make the request page_result = client.list_instances(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py index 0ee590d3a384..597721915a23 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py @@ -38,6 +38,8 @@ def sample_list_instances(): # Make the request page_result = client.list_instances(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py index 89af4b83b5ea..63d5a05ffbf0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py @@ -47,6 +47,8 @@ async def sample_update_instance(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_UpdateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py index 83a3b7476486..d692938f99ca 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py @@ -47,6 +47,8 @@ def sample_update_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_UpdateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py index df0b6cd6d275..7ae624c30e64 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py @@ -43,6 +43,8 @@ async def sample_upgrade_instance(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py index e792eb291a93..4858c84fddd0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py @@ -43,6 +43,8 @@ def sample_upgrade_instance(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json index 33da11ad344b..a246d7922870 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json @@ -14,12 +14,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_CreateInstance_async", "segments": [ { - "end": 53, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 55, "start": 27, "type": "SHORT" }, @@ -34,11 +34,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ] @@ -56,12 +58,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_CreateInstance_sync", "segments": [ { - "end": 53, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 55, "start": 27, "type": "SHORT" }, @@ -76,11 +78,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ] @@ -99,12 +103,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_DeleteInstance_async", "segments": [ { - "end": 46, + "end": 48, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 48, "start": 27, "type": "SHORT" }, @@ -119,11 +123,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 45, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 49, + "start": 46, "type": "RESPONSE_HANDLING" } ] @@ -141,12 +147,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_DeleteInstance_sync", "segments": [ { - "end": 46, + "end": 48, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 48, "start": 27, "type": "SHORT" }, @@ -161,11 +167,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 45, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 49, + "start": 46, "type": "RESPONSE_HANDLING" } ] @@ -184,12 +192,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_ExportInstance_async", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -204,11 +212,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 49, "start": 43, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -226,12 +236,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_ExportInstance_sync", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -246,11 +256,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 49, "start": 43, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -269,12 +281,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_FailoverInstance_async", "segments": [ { - "end": 46, + "end": 48, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 48, "start": 27, "type": "SHORT" }, @@ -289,11 +301,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 45, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 49, + "start": 46, "type": "RESPONSE_HANDLING" } ] @@ -311,12 +325,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_FailoverInstance_sync", "segments": [ { - "end": 46, + "end": 48, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 48, "start": 27, "type": "SHORT" }, @@ -331,11 +345,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 45, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 47, + "end": 49, + "start": 46, "type": "RESPONSE_HANDLING" } ] @@ -443,12 +459,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_ImportInstance_async", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -463,11 +479,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 49, "start": 43, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -485,12 +503,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_ImportInstance_sync", "segments": [ { - "end": 50, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 52, "start": 27, "type": "SHORT" }, @@ -505,11 +523,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 49, "start": 43, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -528,12 +548,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_ListInstances_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -548,11 +568,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -570,12 +592,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_ListInstances_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -590,11 +612,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -613,12 +637,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_UpdateInstance_async", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -633,11 +657,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 50, "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ] @@ -655,12 +681,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_UpdateInstance_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -675,11 +701,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 50, "start": 44, "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ] @@ -698,12 +726,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_UpgradeInstance_async", "segments": [ { - "end": 47, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 49, "start": 27, "type": "SHORT" }, @@ -718,11 +746,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 46, "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] @@ -740,12 +770,12 @@ "regionTag": "redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync", "segments": [ { - "end": 47, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 49, "start": 27, "type": "SHORT" }, @@ -760,11 +790,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 46, "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 48, + "end": 50, + "start": 47, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py index de4e1ab4befb..b862da753932 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py @@ -39,6 +39,8 @@ async def sample_list_resources(): # Make the request page_result = client.list_resources(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py index 4832779217d7..cca546067d58 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py @@ -39,6 +39,8 @@ def sample_list_resources(): # Make the request page_result = client.list_resources(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py index 2e0013366b67..d22dd32e829d 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py @@ -47,6 +47,8 @@ def request_generator(): # Make the request stream = await client.method_bidi_streaming(requests=request_generator()) + + # Handle the response async for response in stream: print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py index 50981f235622..2d1ae20b55ad 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py @@ -47,6 +47,8 @@ def request_generator(): # Make the request stream = client.method_bidi_streaming(requests=request_generator()) + + # Handle the response for response in stream: print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py index a66fcd660a01..72d74351e860 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py @@ -50,6 +50,8 @@ async def sample_method_lro_signatures(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py index 2a8fd20deb6d..285e55c2b247 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py @@ -50,6 +50,8 @@ def sample_method_lro_signatures(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py index 36f286da93fd..527bb4b81325 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py @@ -47,7 +47,7 @@ async def sample_method_one_signature(): # Make the request response = await client.method_one_signature(request=request) - # Handle response + # Handle the response print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py index fad4a7d79539..7e0101279b14 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py @@ -47,7 +47,7 @@ def sample_method_one_signature(): # Make the request response = client.method_one_signature(request=request) - # Handle response + # Handle the response print(response) # [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py index 4a397d798500..2812acd9d989 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py @@ -46,6 +46,8 @@ async def sample_method_server_streaming(): # Make the request stream = await client.method_server_streaming(request=request) + + # Handle the response async for response in stream: print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py index 1701a3abc2e0..3d94b3238008 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py @@ -46,6 +46,8 @@ def sample_method_server_streaming(): # Make the request stream = client.method_server_streaming(request=request) + + # Handle the response for response in stream: print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py index 4a6d9f682726..5ac2996b8c91 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py @@ -40,7 +40,7 @@ async def sample_one_of_method(): # Make the request response = await client.one_of_method(request=request) - # Handle response + # Handle the response print(response) # [END mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py index 92a5bef4b412..52273f55f5db 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py @@ -40,7 +40,7 @@ async def sample_one_of_method_required_field(): # Make the request response = await client.one_of_method_required_field(request=request) - # Handle response + # Handle the response print(response) # [END mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py index 3a3c4818c36a..fe0b5eb7eef3 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py @@ -40,7 +40,7 @@ def sample_one_of_method_required_field(): # Make the request response = client.one_of_method_required_field(request=request) - # Handle response + # Handle the response print(response) # [END mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py index 7372dfbd76e2..b5bb96552c07 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py @@ -40,7 +40,7 @@ def sample_one_of_method(): # Make the request response = client.one_of_method(request=request) - # Handle response + # Handle the response print(response) # [END mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index 76b0b58bff74..a0f06b285504 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -48,7 +48,7 @@ def sample_classify(video, location): # Make the request response = client.classify(request=request) - # Handle response + # Handle the response print(f"Mollusc is a \"{response.taxonomy}\"") # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index 71a6d3853cad..fee93ebf2dfd 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -48,7 +48,7 @@ async def sample_classify(video, location): # Make the request response = await client.classify(request=request) - # Handle response + # Handle the response print(f"Mollusc is a \"{response.taxonomy}\"") # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index 76b0b58bff74..a0f06b285504 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -48,7 +48,7 @@ def sample_classify(video, location): # Make the request response = client.classify(request=request) - # Handle response + # Handle the response print(f"Mollusc is a \"{response.taxonomy}\"") # [END mollusc_classify_sync] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index 8f9db942369f..e1334a6b268f 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -49,7 +49,7 @@ def sample_classify(video, location): # Make the request response = client.classify(request=request) - # Handle response + # Handle the response print(f"Mollusc is a \"{response.taxonomy}\"") # [END mollusc_classify_sync]""" @@ -103,7 +103,7 @@ def sample_classify(video, location): # Make the request response = client.classify(request=request) - # Handle response + # Handle the response print(f"Mollusc is a \"{response.taxonomy}\"") """ diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 15b59e5369d0..4d1d26c2e804 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -746,7 +746,7 @@ def test_render_calling_form_request(): # Make the request response = TEST_INVOCATION_TXT - # Handle response + # Handle the response print("Test print statement") ''', calling_form_enum=CallingForm, @@ -759,6 +759,8 @@ def test_render_calling_form_paged_all(): ''' # Make the request page_result = TEST_INVOCATION_TXT + + # Handle the response for response in page_result: print("Test print statement") ''', @@ -772,6 +774,8 @@ def test_render_calling_form_paged_all_async(): ''' # Make the request page_result = TEST_INVOCATION_TXT + + # Handle the response async for response in page_result: print("Test print statement") ''', @@ -785,6 +789,8 @@ def test_render_calling_form_paged(): ''' # Make the request page_result = TEST_INVOCATION_TXT + + # Handle the response for page in page_result.pages(): for response in page: print("Test print statement") @@ -799,6 +805,8 @@ def test_render_calling_form_paged_async(): ''' # Make the request page_result = TEST_INVOCATION_TXT + + # Handle the response async for page in page_result.pages(): for response in page: print("Test print statement") @@ -813,6 +821,8 @@ def test_render_calling_form_streaming_server(): ''' # Make the request stream = TEST_INVOCATION_TXT + + # Handle the response for response in stream: print("Test print statement") ''', @@ -826,6 +836,8 @@ def test_render_calling_form_streaming_server_async(): ''' # Make the request stream = TEST_INVOCATION_TXT + + # Handle the response async for response in stream: print("Test print statement") ''', @@ -839,6 +851,8 @@ def test_render_calling_form_streaming_bidi(): ''' # Make the request stream = TEST_INVOCATION_TXT + + # Handle the response for response in stream: print("Test print statement") ''', @@ -852,6 +866,8 @@ def test_render_calling_form_streaming_bidi_async(): ''' # Make the request stream = TEST_INVOCATION_TXT + + # Handle the response async for response in stream: print("Test print statement") ''', @@ -869,6 +885,8 @@ def test_render_calling_form_longrunning(): print("Waiting for operation to complete...") response = operation.result() + + # Handle the response print("Test print statement") ''', calling_form_enum=CallingForm, @@ -885,6 +903,8 @@ def test_render_calling_form_longrunning_async(): print("Waiting for operation to complete...") response = await operation.result() + + # Handle the response print("Test print statement") ''', calling_form_enum=CallingForm, From 084408a569c4dd8f8b6b933f4d66f72da910fdd2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 7 Feb 2022 18:33:56 +0100 Subject: [PATCH 0733/1339] chore(deps): update dependency setuptools to v60.8.1 (#1194) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 49cc7b92a720..354218733905 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.7.1 +setuptools==60.8.1 From b37098ee7a8f6df1028d0eda961596f5e85e3ff5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 9 Feb 2022 18:59:59 +0100 Subject: [PATCH 0734/1339] chore(deps): update dependency setuptools to v60.8.2 (#1196) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 354218733905..3eb78a745614 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.8.1 +setuptools==60.8.2 From 45ec9454845eaecf24e96ac20d10a3a2738ccbef Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 9 Feb 2022 18:03:07 -0500 Subject: [PATCH 0735/1339] fix: fix lint sessions on generated samples (#1192) --- .../.github/sync-repo-settings.yaml | 1 + .../.github/workflows/tests.yaml | 18 +++++ .../gapic/templates/.flake8.j2 | 33 +++++++++ .../templates/examples/feature_fragments.j2 | 5 +- .../gapic/templates/noxfile.py.j2 | 44 +++++++++++- .../gapic/templates/setup.py.j2 | 3 + .../tests/integration/goldens/asset/.flake8 | 33 +++++++++ .../services/asset_service/async_client.py | 2 +- .../asset_v1/services/asset_service/client.py | 2 +- .../integration/goldens/asset/noxfile.py | 43 ++++++++++- ...sset_v1_asset_service_delete_feed_async.py | 2 +- ...asset_v1_asset_service_delete_feed_sync.py | 2 +- .../tests/integration/goldens/asset/setup.py | 3 + .../integration/goldens/credentials/.flake8 | 33 +++++++++ .../goldens/credentials/noxfile.py | 43 ++++++++++- .../integration/goldens/credentials/setup.py | 3 + .../tests/integration/goldens/logging/.flake8 | 33 +++++++++ .../config_service_v2/async_client.py | 10 +-- .../services/config_service_v2/client.py | 10 +-- .../logging_service_v2/async_client.py | 3 +- .../services/logging_service_v2/client.py | 3 +- .../metrics_service_v2/async_client.py | 2 +- .../services/metrics_service_v2/client.py | 2 +- .../integration/goldens/logging/noxfile.py | 43 ++++++++++- ...2_config_service_v2_delete_bucket_async.py | 2 +- ...v2_config_service_v2_delete_bucket_sync.py | 2 +- ...onfig_service_v2_delete_exclusion_async.py | 2 +- ...config_service_v2_delete_exclusion_sync.py | 2 +- ..._v2_config_service_v2_delete_sink_async.py | 2 +- ...g_v2_config_service_v2_delete_sink_sync.py | 2 +- ..._v2_config_service_v2_delete_view_async.py | 2 +- ...g_v2_config_service_v2_delete_view_sync.py | 2 +- ...config_service_v2_undelete_bucket_async.py | 2 +- ..._config_service_v2_undelete_bucket_sync.py | 2 +- ..._v2_logging_service_v2_delete_log_async.py | 2 +- ...g_v2_logging_service_v2_delete_log_sync.py | 2 +- ...gging_service_v2_tail_log_entries_async.py | 1 + ...ogging_service_v2_tail_log_entries_sync.py | 1 + ...rics_service_v2_delete_log_metric_async.py | 2 +- ...trics_service_v2_delete_log_metric_sync.py | 2 +- .../snippet_metadata_logging_v2.json | 28 ++++---- .../integration/goldens/logging/setup.py | 3 + .../tests/integration/goldens/redis/.flake8 | 33 +++++++++ .../integration/goldens/redis/noxfile.py | 43 ++++++++++- .../tests/integration/goldens/redis/setup.py | 3 + ...v1_snippets_method_bidi_streaming_async.py | 1 + ..._v1_snippets_method_bidi_streaming_sync.py | 1 + .../goldens/snippet_metadata_mollusca_v1.json | 72 +++++++++++-------- .../sample_basic_void_method.py | 2 +- 49 files changed, 505 insertions(+), 87 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/.flake8.j2 create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/.flake8 create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/.flake8 create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/.flake8 create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/.flake8 diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 8dc6d0203ff4..a735a93570aa 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -23,6 +23,7 @@ branchProtectionRules: - 'showcase-unit (3.9, _alternative_templates)' - 'showcase-unit-add-iam-methods' - 'integration' + - 'goldens-lint' - 'style-check' - 'snippetgen' - 'unit (3.6)' diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index f65833509fd5..ad504630fcca 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -316,6 +316,24 @@ jobs: echo "The old one will disappear after 7 days." - name: Integration Tests run: bazel test tests/integration:asset tests/integration:credentials tests/integration:logging tests/integration:redis + goldens-lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.9 + uses: actions/setup-python@v2 + with: + python-version: 3.9 + cache: 'pip' + - name: Install nox. + run: | + python -m pip install nox + - name: Run blacken and lint on the generated output. + run: | + nox -f tests/integration/goldens/asset/noxfile.py -s blacken lint_setup_py lint + nox -f tests/integration/goldens/credentials/noxfile.py -s blacken lint_setup_py lint + nox -f tests/integration/goldens/logging/noxfile.py -s blacken lint_setup_py lint + nox -f tests/integration/goldens/redis/noxfile.py -s blacken lint_setup_py lint style-check: runs-on: ubuntu-latest steps: diff --git a/packages/gapic-generator/gapic/templates/.flake8.j2 b/packages/gapic-generator/gapic/templates/.flake8.j2 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/.flake8.j2 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index 7c30912c76d4..d6d0459d4aad 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -190,6 +190,7 @@ configs the client streaming logic should be modified to allow 2+ request object # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request @@ -245,8 +246,8 @@ client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request {# it's just easier to set up client side streaming and other things from outside this macro. #} {% macro render_calling_form(method_invocation_text, calling_form, calling_form_enum, transport, response_statements ) %} # Make the request -{% if calling_form == calling_form_enum.Request %} -response = {{ method_invocation_text|trim }} +{% if calling_form in [calling_form_enum.Request, calling_form_enum.RequestStreamingClient] %} +{% if response_statements %}response = {% endif %}{{ method_invocation_text|trim }} {% if response_statements %} # Handle the response diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index de8dcac80415..34e19ac4722f 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -16,6 +16,9 @@ CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.9" nox.sessions = [ "unit", @@ -24,6 +27,9 @@ nox.sessions = [ "check_lower_bounds" # exclude update_lower_bounds from default "docs", + "blacken", + "lint", + "lint_setup_py", ] @nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) @@ -44,7 +50,7 @@ def unit(session): ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -103,7 +109,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" @@ -123,4 +129,38 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 575539a36560..223147682849 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -16,6 +16,9 @@ with io.open(readme_filename, encoding='utf-8') as readme_file: setuptools.setup( name='{{ api.naming.warehouse_package_name }}', + author="Google LLC", + author_email="googleapis-packages@google.com", + url="https://github.com/googleapis/python-{{ api.naming.warehouse_package_name }}", version=version, long_description=readme, {% if api.naming.namespace %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/.flake8 b/packages/gapic-generator/tests/integration/goldens/asset/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 13fc9b54e629..b072b917179e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -943,7 +943,7 @@ def sample_delete_feed(): ) # Make the request - response = client.delete_feed(request=request) + client.delete_feed(request=request) Args: request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 3b8b459ab5b2..c43399267b1b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1128,7 +1128,7 @@ def sample_delete_feed(): ) # Make the request - response = client.delete_feed(request=request) + client.delete_feed(request=request) Args: request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 0c0e075dd885..b96e8d8e174f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -27,6 +27,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.9" nox.sessions = [ "unit", @@ -35,6 +38,9 @@ "check_lower_bounds" # exclude update_lower_bounds from default "docs", + "blacken", + "lint", + "lint_setup_py", ] @nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) @@ -55,7 +61,7 @@ def unit(session): ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -110,7 +116,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" @@ -130,3 +136,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py index d9fea4281440..3af184162064 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py @@ -37,7 +37,7 @@ async def sample_delete_feed(): ) # Make the request - response = await client.delete_feed(request=request) + await client.delete_feed(request=request) # [END cloudasset_generated_asset_v1_AssetService_DeleteFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py index f9008baa7b8b..f8e462e5899e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py @@ -37,7 +37,7 @@ def sample_delete_feed(): ) # Make the request - response = client.delete_feed(request=request) + client.delete_feed(request=request) # [END cloudasset_generated_asset_v1_AssetService_DeleteFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 82da50b971e2..75d7fc7c80c2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -27,6 +27,9 @@ setuptools.setup( name='google-cloud-asset', + author="Google LLC", + author_email="googleapis-packages@google.com", + url="https://github.com/googleapis/python-google-cloud-asset", version=version, long_description=readme, packages=setuptools.PEP420PackageFinder.find(), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 b/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index b8c5b4e7fab9..c05f13f142d4 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -27,6 +27,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.9" nox.sessions = [ "unit", @@ -35,6 +38,9 @@ "check_lower_bounds" # exclude update_lower_bounds from default "docs", + "blacken", + "lint", + "lint_setup_py", ] @nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) @@ -55,7 +61,7 @@ def unit(session): ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -110,7 +116,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" @@ -130,3 +136,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 1b831af2363b..f3c9b857e2e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -27,6 +27,9 @@ setuptools.setup( name='google-iam-credentials', + author="Google LLC", + author_email="googleapis-packages@google.com", + url="https://github.com/googleapis/python-google-iam-credentials", version=version, long_description=readme, packages=setuptools.PEP420PackageFinder.find(), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/.flake8 b/packages/gapic-generator/tests/integration/goldens/logging/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 572a93204cab..3c5176df02af 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -563,7 +563,7 @@ def sample_delete_bucket(): ) # Make the request - response = client.delete_bucket(request=request) + client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -626,7 +626,7 @@ def sample_undelete_bucket(): ) # Make the request - response = client.undelete_bucket(request=request) + client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -1019,7 +1019,7 @@ def sample_delete_view(): ) # Make the request - response = client.delete_view(request=request) + client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -1609,7 +1609,7 @@ def sample_delete_sink(): ) # Make the request - response = client.delete_sink(request=request) + client.delete_sink(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): @@ -2224,7 +2224,7 @@ def sample_delete_exclusion(): ) # Make the request - response = client.delete_exclusion(request=request) + client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 8234a71c56a0..b5d060646a37 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -794,7 +794,7 @@ def sample_delete_bucket(): ) # Make the request - response = client.delete_bucket(request=request) + client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -859,7 +859,7 @@ def sample_undelete_bucket(): ) # Make the request - response = client.undelete_bucket(request=request) + client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -1261,7 +1261,7 @@ def sample_delete_view(): ) # Make the request - response = client.delete_view(request=request) + client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -1833,7 +1833,7 @@ def sample_delete_sink(): ) # Make the request - response = client.delete_sink(request=request) + client.delete_sink(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): @@ -2429,7 +2429,7 @@ def sample_delete_exclusion(): ) # Make the request - response = client.delete_exclusion(request=request) + client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 43b19c64260f..b12aad3fa617 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -214,7 +214,7 @@ def sample_delete_log(): ) # Make the request - response = client.delete_log(request=request) + client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -881,6 +881,7 @@ def sample_tail_log_entries(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 5553d1cc8357..04ff20311c1a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -402,7 +402,7 @@ def sample_delete_log(): ) # Make the request - response = client.delete_log(request=request) + client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -1034,6 +1034,7 @@ def sample_tail_log_entries(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 7774489bb88c..b0f35bbd9819 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -695,7 +695,7 @@ def sample_delete_log_metric(): ) # Make the request - response = client.delete_log_metric(request=request) + client.delete_log_metric(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 3c521d4b0989..dd56772e8ebe 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -863,7 +863,7 @@ def sample_delete_log_metric(): ) # Make the request - response = client.delete_log_metric(request=request) + client.delete_log_metric(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 4cbb5eafbbb6..342a72cfe845 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -27,6 +27,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.9" nox.sessions = [ "unit", @@ -35,6 +38,9 @@ "check_lower_bounds" # exclude update_lower_bounds from default "docs", + "blacken", + "lint", + "lint_setup_py", ] @nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) @@ -55,7 +61,7 @@ def unit(session): ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -110,7 +116,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" @@ -130,3 +136,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py index 4ad451d92fa7..fd625364a199 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py @@ -37,7 +37,7 @@ async def sample_delete_bucket(): ) # Make the request - response = await client.delete_bucket(request=request) + await client.delete_bucket(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py index 011e9a1b6f63..32a98c84974c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py @@ -37,7 +37,7 @@ def sample_delete_bucket(): ) # Make the request - response = client.delete_bucket(request=request) + client.delete_bucket(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py index db7b9de949fd..4793d48bab33 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py @@ -37,7 +37,7 @@ async def sample_delete_exclusion(): ) # Make the request - response = await client.delete_exclusion(request=request) + await client.delete_exclusion(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py index ba4202f5e2de..a4dbefa4ff21 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py @@ -37,7 +37,7 @@ def sample_delete_exclusion(): ) # Make the request - response = client.delete_exclusion(request=request) + client.delete_exclusion(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py index f4d7d596d422..bcb30f6c1d63 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py @@ -37,7 +37,7 @@ async def sample_delete_sink(): ) # Make the request - response = await client.delete_sink(request=request) + await client.delete_sink(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_DeleteSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py index 8781209f91c6..0b79d20ce458 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py @@ -37,7 +37,7 @@ def sample_delete_sink(): ) # Make the request - response = client.delete_sink(request=request) + client.delete_sink(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_DeleteSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py index 7796ff52f417..39f4b320d928 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py @@ -37,7 +37,7 @@ async def sample_delete_view(): ) # Make the request - response = await client.delete_view(request=request) + await client.delete_view(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_DeleteView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py index 5c53ebc0c732..967a0e866df7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py @@ -37,7 +37,7 @@ def sample_delete_view(): ) # Make the request - response = client.delete_view(request=request) + client.delete_view(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_DeleteView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py index 1c7f06ffdfa9..cbc048ccea92 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py @@ -37,7 +37,7 @@ async def sample_undelete_bucket(): ) # Make the request - response = await client.undelete_bucket(request=request) + await client.undelete_bucket(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py index 0b1484594963..43e9cbb03277 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py @@ -37,7 +37,7 @@ def sample_undelete_bucket(): ) # Make the request - response = client.undelete_bucket(request=request) + client.undelete_bucket(request=request) # [END logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py index 2e291f52922b..40306083aa2a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py @@ -37,7 +37,7 @@ async def sample_delete_log(): ) # Make the request - response = await client.delete_log(request=request) + await client.delete_log(request=request) # [END logging_generated_logging_v2_LoggingServiceV2_DeleteLog_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py index 5f7540b92692..ce8a5d95ad69 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py @@ -37,7 +37,7 @@ def sample_delete_log(): ) # Make the request - response = client.delete_log(request=request) + client.delete_log(request=request) # [END logging_generated_logging_v2_LoggingServiceV2_DeleteLog_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py index 0416cc76196a..2053a8350f50 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py @@ -41,6 +41,7 @@ async def sample_tail_log_entries(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py index b341d9aab536..070e8d0abb15 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py @@ -41,6 +41,7 @@ def sample_tail_log_entries(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py index 21ac440ddcdb..da864c3e2d90 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py @@ -37,7 +37,7 @@ async def sample_delete_log_metric(): ) # Make the request - response = await client.delete_log_metric(request=request) + await client.delete_log_metric(request=request) # [END logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py index 4ba7cf79ddb0..d6cdc96a3a48 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py @@ -37,7 +37,7 @@ def sample_delete_log_metric(): ) # Make the request - response = client.delete_log_metric(request=request) + client.delete_log_metric(request=request) # [END logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json index 9ee0df5de427..099739a006c2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -2393,12 +2393,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2408,18 +2408,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 48, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 51, + "end": 56, + "start": 52, "type": "RESPONSE_HANDLING" } ] @@ -2437,12 +2437,12 @@ "regionTag": "logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2452,18 +2452,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 48, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 51, + "end": 56, + "start": 52, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 5d505b2fa2c3..bf6c27489bfa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -27,6 +27,9 @@ setuptools.setup( name='google-cloud-logging', + author="Google LLC", + author_email="googleapis-packages@google.com", + url="https://github.com/googleapis/python-google-cloud-logging", version=version, long_description=readme, packages=setuptools.PEP420PackageFinder.find(), diff --git a/packages/gapic-generator/tests/integration/goldens/redis/.flake8 b/packages/gapic-generator/tests/integration/goldens/redis/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 947a5981261b..90788a5afc4d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -27,6 +27,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.9" nox.sessions = [ "unit", @@ -35,6 +38,9 @@ "check_lower_bounds" # exclude update_lower_bounds from default "docs", + "blacken", + "lint", + "lint_setup_py", ] @nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) @@ -55,7 +61,7 @@ def unit(session): ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit @@ -110,7 +116,7 @@ def check_lower_bounds(session): str(LOWER_BOUND_CONSTRAINTS_FILE), ) -@nox.session(python='3.9') +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" @@ -130,3 +136,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 46d59d5f348a..49cdb8f70a99 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -27,6 +27,9 @@ setuptools.setup( name='google-cloud-redis', + author="Google LLC", + author_email="googleapis-packages@google.com", + url="https://github.com/googleapis/python-google-cloud-redis", version=version, long_description=readme, packages=setuptools.PEP420PackageFinder.find(), diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py index d22dd32e829d..294878876e6b 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py @@ -41,6 +41,7 @@ async def sample_method_bidi_streaming(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py index 2d1ae20b55ad..a61e415c5a4e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py @@ -41,6 +41,7 @@ def sample_method_bidi_streaming(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request diff --git a/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json index a5346a359c54..0010781108ec 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json +++ b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json @@ -14,12 +14,12 @@ "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_async", "segments": [ { - "end": 44, + "end": 46, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 46, "start": 27, "type": "SHORT" }, @@ -34,11 +34,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 42, "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 47, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -56,12 +58,12 @@ "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_sync", "segments": [ { - "end": 44, + "end": 46, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 46, "start": 27, "type": "SHORT" }, @@ -76,11 +78,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 42, "start": 40, "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 47, + "start": 43, "type": "RESPONSE_HANDLING" } ] @@ -99,12 +103,12 @@ "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -114,16 +118,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 48, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 48, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 56, + "start": 52, "type": "RESPONSE_HANDLING" } ] @@ -141,12 +147,12 @@ "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -156,16 +162,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 48, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 48, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 56, + "start": 52, "type": "RESPONSE_HANDLING" } ] @@ -184,12 +192,12 @@ "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -204,11 +212,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 53, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ] @@ -226,12 +236,12 @@ "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync", "segments": [ { - "end": 54, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 56, "start": 27, "type": "SHORT" }, @@ -246,11 +256,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 53, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ] @@ -358,12 +370,12 @@ "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -378,11 +390,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 49, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ] @@ -400,12 +414,12 @@ "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -420,11 +434,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 49, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ] diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py index 0b617f40ed95..39764bac8129 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -46,7 +46,7 @@ def sample_classify(video, location): ) # Make the request - response = client.classify(request=request) + client.classify(request=request) # [END mollusc_classify_sync] From d6c73e30fb94e9ca575416d974b100bdb137cf8a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 10 Feb 2022 08:39:33 -0700 Subject: [PATCH 0736/1339] chore(main): release 0.63.2 (#1198) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 9714814514f4..f2b702ad6015 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.63.2](https://github.com/googleapis/gapic-generator-python/compare/v0.63.1...v0.63.2) (2022-02-09) + + +### Bug Fixes + +* fix lint sessions on generated samples ([#1192](https://github.com/googleapis/gapic-generator-python/issues/1192)) ([4d0ea18](https://github.com/googleapis/gapic-generator-python/commit/4d0ea182aa5500eee0f5485e88e14043974ae78b)) + ### [0.63.1](https://github.com/googleapis/gapic-generator-python/compare/v0.63.0...v0.63.1) (2022-02-03) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 57e3acd867f8..7c728f62aad6 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.1" +version = "0.63.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 6aacf960edee9015bb9d985694800df3be17109c Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 10 Feb 2022 13:22:53 -0800 Subject: [PATCH 0737/1339] fix: HTTP body field messages with enums or recursive fields (#1201) Minor fix for a generating unit tests for methods where the message that is the body field has a field that is an enum or a recursive field message type. --- .../gapic-generator/gapic/schema/wrappers.py | 57 ++++++++++++------- .../gapic-generator/test_utils/test_utils.py | 2 +- .../fragments/test_non_primitive_body.proto | 48 ++++++++++++++++ .../tests/unit/schema/wrappers/test_field.py | 40 ++++++++++++- 4 files changed, 123 insertions(+), 24 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index b9741dacc9ec..9449320156e0 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -97,29 +97,42 @@ def map(self) -> bool: @utils.cached_property def mock_value_original_type(self) -> Union[bool, str, bytes, int, float, Dict[str, Any], List[Any], None]: - # Return messages as dicts and let the message ctor handle the conversion. - if self.message: - if self.map: - # Not worth the hassle, just return an empty map. - return {} + visited_messages = set() - msg_dict = { - f.name: f.mock_value_original_type - for f in self.message.fields.values() - } + def recursive_mock_original_type(field): + if field.message: + # Return messages as dicts and let the message ctor handle the conversion. + if field.message in visited_messages: + return {} - return [msg_dict] if self.repeated else msg_dict + visited_messages.add(field.message) + if field.map: + # Not worth the hassle, just return an empty map. + return {} - answer = self.primitive_mock() or None + msg_dict = { + f.name: recursive_mock_original_type(f) + for f in field.message.fields.values() + } - # If this is a repeated field, then the mock answer should - # be a list. - if self.repeated: - first_item = self.primitive_mock(suffix=1) or None - second_item = self.primitive_mock(suffix=2) or None - answer = [first_item, second_item] + return [msg_dict] if field.repeated else msg_dict - return answer + if field.enum: + # First Truthy value, fallback to the first value + return next((v for v in field.type.values if v.number), field.type.values[0]).number + + answer = field.primitive_mock() or None + + # If this is a repeated field, then the mock answer should + # be a list. + if field.repeated: + first_item = field.primitive_mock(suffix=1) or None + second_item = field.primitive_mock(suffix=2) or None + answer = [first_item, second_item] + + return answer + + return recursive_mock_original_type(self) @utils.cached_property def mock_value(self) -> str: @@ -887,8 +900,12 @@ class HttpRule: def path_fields(self, method: "Method") -> List[Tuple[Field, str, str]]: """return list of (name, template) tuples extracted from uri.""" input = method.input - return [(input.get_field(*match.group("name").split(".")), match.group("name"), match.group("template")) - for match in path_template._VARIABLE_RE.finditer(self.uri)] + return [ + (input.get_field(*match.group("name").split(".")), + match.group("name"), match.group("template")) + for match in path_template._VARIABLE_RE.finditer(self.uri) + if match.group("name") + ] def sample_request(self, method: "Method") -> Dict[str, Any]: """return json dict for sample request matching the uri template.""" diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index 89e341b5fba7..6da5f76b8da9 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -297,7 +297,7 @@ def make_enum( name: str, package: str = 'foo.bar.v1', module: str = 'baz', - values: typing.Tuple[str, int] = (), + values: typing.Sequence[typing.Tuple[str, int]] = (), meta: metadata.Metadata = None, options: desc.EnumOptions = None, ) -> wrappers.EnumType: diff --git a/packages/gapic-generator/tests/fragments/test_non_primitive_body.proto b/packages/gapic-generator/tests/fragments/test_non_primitive_body.proto index f322a747a445..06a16da6ff95 100644 --- a/packages/gapic-generator/tests/fragments/test_non_primitive_body.proto +++ b/packages/gapic-generator/tests/fragments/test_non_primitive_body.proto @@ -29,6 +29,20 @@ service SmallCompute { post: "/computation/v1/first_name/{first_name}/last_name/{last_name}" }; }; + + rpc EnumBody(EnumBodyRequest) returns (EnumBodyResponse) { + option (google.api.http) = { + body: "resource" + post: "/enum_body/v1/names/{name}" + }; + } + + rpc RecursiveBody(RecursiveBodyRequest) returns (RecursiveBodyResponse) { + option (google.api.http) = { + body: "resource" + post: "/recursive_body/v1/names/{name}" + }; + } } message SerialNumber { @@ -50,4 +64,38 @@ message MethodRequest { message MethodResponse { string name = 1; +} + +message EnumBodyRequest { + message Resource{ + enum Ordering { + UNKNOWN = 0; + CHRONOLOGICAL = 1; + ALPHABETICAL = 2; + DIFFICULTY = 3; + } + + Ordering ordering = 1; + } + + string name = 1; + Resource resource = 2; +} + +message EnumBodyResponse { + string data = 1; +} + +message RecursiveBodyRequest { + message Resource { + int32 depth = 1; + Resource child_resource = 2; + } + + string name = 1; + Resource resource = 2; +} + +message RecursiveBodyResponse { + string data = 1; } \ No newline at end of file diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 151b2762b8a3..b68d6f81c6b3 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -27,6 +27,7 @@ from test_utils.test_utils import ( make_field, make_message, + make_enum, ) @@ -343,7 +344,41 @@ def test_mock_value_original_type_message(): assert entry_field.mock_value_original_type == {} -def test_mock_value_recursive(): +def test_mock_value_original_type_enum(): + mollusc_field = make_field( + name="class", + enum=make_enum( + name="Class", + values=[ + ("UNKNOWN", 0), + ("GASTROPOD", 1), + ("BIVALVE", 2), + ("CEPHALOPOD", 3), + ], + ), + ) + + assert mollusc_field.mock_value_original_type == 1 + + empty_field = make_field( + name="empty", + enum=make_enum( + name="Empty", + values=[("UNKNOWN", 0)], + ), + ) + + assert empty_field.mock_value_original_type == 0 + + +@pytest.mark.parametrize( + "mock_method,expected", + [ + ("mock_value", "ac_turtle.Turtle(turtle=ac_turtle.Turtle(turtle=turtle.Turtle(turtle=None)))"), + ("mock_value_original_type", {"turtle": {}}), + ], +) +def test_mock_value_recursive(mock_method, expected): # The elaborate setup is an unfortunate requirement. file_pb = descriptor_pb2.FileDescriptorProto( name="turtle.proto", @@ -367,8 +402,7 @@ def test_mock_value_recursive(): turtle_field = my_api.messages["animalia.chordata.v2.Turtle"].fields["turtle"] # If not handled properly, this will run forever and eventually OOM. - actual = turtle_field.mock_value - expected = "ac_turtle.Turtle(turtle=ac_turtle.Turtle(turtle=turtle.Turtle(turtle=None)))" + actual = getattr(turtle_field, mock_method) assert actual == expected From bbe5acdbff50aa635d90321215d9b54100018357 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 12 Feb 2022 12:10:08 -0500 Subject: [PATCH 0738/1339] fix: disambiguate field headers whose names are reserved python words (#1178) * fix: disambiguate field headers whose names are reserved python words * Add FieldHeader class * update tests and templates --- .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 6 +++--- .../gapic-generator/gapic/schema/wrappers.py | 21 ++++++++++++++++--- .../%sub/services/%service/async_client.py.j2 | 2 +- .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 10 ++++----- .../tests/unit/schema/wrappers/test_method.py | 11 +++++++++- 7 files changed, 39 insertions(+), 15 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index b31354897236..d6299831ecb2 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -452,7 +452,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} - ("{{ field_header }}", request.{{ field_header }}), + ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), {% endif %} {% endfor %} )), diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3592c4a70ed0..ec91ca41b5df 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -595,7 +595,7 @@ def test_{{ method_name }}_field_headers(): request = {{ method.input.ident }}() {% for field_header in method.field_headers %} - request.{{ field_header }} = '{{ field_header }}/value' + request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. @@ -623,7 +623,7 @@ def test_{{ method_name }}_field_headers(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header }}={{ field_header }}/value + {{ field_header.raw }}={{ field_header.raw }}/value {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] @@ -783,7 +783,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} - ('{{ field_header }}', ''), + ('{{ field_header.raw }}', ''), {% endif %} {% endfor %} )), diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 9449320156e0..ac5b8f63c960 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -332,6 +332,15 @@ def with_context( ) +@dataclasses.dataclass(frozen=True) +class FieldHeader: + raw: str + + @property + def disambiguated(self) -> str: + return self.raw + "_" if self.raw in utils.RESERVED_NAMES else self.raw + + @dataclasses.dataclass(frozen=True) class Oneof: """Description of a field.""" @@ -1070,8 +1079,9 @@ def is_deprecated(self) -> bool: # e.g. doesn't work with basic case of gRPC transcoding @property - def field_headers(self) -> Sequence[str]: + def field_headers(self) -> Sequence[FieldHeader]: """Return the field headers defined for this method.""" + http = self.options.Extensions[annotations_pb2.http] pattern = re.compile(r'\{([a-z][\w\d_.]+)=') @@ -1084,8 +1094,13 @@ def field_headers(self) -> Sequence[str]: http.patch, http.custom.path, ] - - return next((tuple(pattern.findall(verb)) for verb in potential_verbs if verb), ()) + field_headers = ( + tuple(FieldHeader(field_header) + for field_header in pattern.findall(verb)) + for verb in potential_verbs + if verb + ) + return next(field_headers, ()) @property def explicit_routing(self): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 348f3bf32f00..4493b832aab0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -327,7 +327,7 @@ class {{ service.async_client_name }}: gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} - ("{{ field_header }}", request.{{ field_header }}), + ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), {% endif %} {% endfor %} )), diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 3180125d8217..a28289976671 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -509,7 +509,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} - ("{{ field_header }}", request.{{ field_header }}), + ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), {% endif %} {% endfor %} )), diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 77a125d222ed..7352e6c555e0 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -805,7 +805,7 @@ def test_{{ method_name }}_field_headers(): request = {{ method.input.ident }}() {% for field_header in method.field_headers %} - request.{{ field_header }} = '{{ field_header }}/value' + request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. @@ -833,7 +833,7 @@ def test_{{ method_name }}_field_headers(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header }}={{ field_header }}/value + {{ field_header.raw }}={{ field_header.raw }}/value {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] @@ -850,7 +850,7 @@ async def test_{{ method_name }}_field_headers_async(): request = {{ method.input.ident }}() {% for field_header in method.field_headers %} - request.{{ field_header }} = '{{ field_header }}/value' + request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. @@ -879,7 +879,7 @@ async def test_{{ method_name }}_field_headers_async(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header }}={{ field_header }}/value + {{ field_header.raw }}={{ field_header.raw }}/value {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] @@ -1128,7 +1128,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} - ('{{ field_header }}', ''), + ('{{ field_header.raw }}', ''), {% endif %} {% endfor %} )), diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 2aba8aa44c63..a131a28e1987 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -287,7 +287,16 @@ def test_method_field_headers_present(): for v in verbs: rule = http_pb2.HttpRule(**{v: '/v1/{parent=projects/*}/topics'}) method = make_method('DoSomething', http_rule=rule) - assert method.field_headers == ('parent',) + assert method.field_headers == (wrappers.FieldHeader('parent'),) + assert method.field_headers[0].raw == 'parent' + assert method.field_headers[0].disambiguated == 'parent' + + # test that reserved keyword in field header is disambiguated + rule = http_pb2.HttpRule(**{v: '/v1/{object=objects/*}/topics'}) + method = make_method('DoSomething', http_rule=rule) + assert method.field_headers == (wrappers.FieldHeader('object'),) + assert method.field_headers[0].raw == 'object' + assert method.field_headers[0].disambiguated == 'object_' def test_method_routing_rule(): From e4a415d051af3c11e4db228e9a40eb5be74633ff Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 14 Feb 2022 12:26:55 +0100 Subject: [PATCH 0739/1339] chore(deps): update dependency setuptools to v60.9.0 (#1205) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 3eb78a745614..ef9ec5847d9f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.8.2 +setuptools==60.9.0 From e51684f37132fd929ee711d2ab8bda403dc0d654 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 15 Feb 2022 13:12:25 -0700 Subject: [PATCH 0740/1339] fix: fix docstring code block formatting (#1206) Add `python` to code-blocks and use a consistent number of newlines. Addresses issues identified in 219067629. Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --- .../services/%service/transports/rest.py.j2 | 2 +- .../%sub/services/%service/async_client.py.j2 | 2 +- .../%sub/services/%service/client.py.j2 | 3 +- .../services/%service/transports/rest.py.j2 | 2 +- .../services/asset_service/async_client.py | 24 +++---- .../asset_v1/services/asset_service/client.py | 36 ++++------ .../services/iam_credentials/async_client.py | 8 +-- .../services/iam_credentials/client.py | 12 ++-- .../config_service_v2/async_client.py | 46 ++++++------- .../services/config_service_v2/client.py | 69 +++++++------------ .../logging_service_v2/async_client.py | 12 ++-- .../services/logging_service_v2/client.py | 18 ++--- .../metrics_service_v2/async_client.py | 10 +-- .../services/metrics_service_v2/client.py | 15 ++-- .../services/cloud_redis/async_client.py | 18 ++--- .../redis_v1/services/cloud_redis/client.py | 27 +++----- 16 files changed, 122 insertions(+), 182 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 9f00f84ebe25..6c7b6b8dd9b0 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -66,7 +66,7 @@ class {{ service.name }}RestInterceptor: These use cases and more can be enabled by injecting an instance of a custom subclass when constructing the {{ service.name }}RestTransport. - .. code-block: + .. code-block:: python class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): {% for _, method in service.methods|dictsort if not method.client_streaming %} def pre_{{ method.name|snake_case }}(request, metadata): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 4493b832aab0..569f8d060716 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -209,7 +209,7 @@ class {{ service.async_client_name }}: {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} {% if snippet is not none %} - .. code-block:: + .. code-block:: python {{ snippet.full_snippet|indent(width=12, first=True) }} {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index a28289976671..da207d8b8c0e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -371,11 +371,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): ) -> Iterable[{{ method.client_output.ident }}]: {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8) }} - {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} {% if snippet is not none %} - .. code-block:: + .. code-block:: python {{ snippet.full_snippet|indent(width=12, first=True) }} {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index dc9c58357979..4021915fe844 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -66,7 +66,7 @@ class {{ service.name }}RestInterceptor: These use cases and more can be enabled by injecting an instance of a custom subclass when constructing the {{ service.name }}RestTransport. - .. code-block: + .. code-block:: python class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): {% for _, method in service.methods|dictsort if not method.client_streaming %} def pre_{{ method.name|snake_case }}(request, metadata): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index b072b917179e..532af146cc07 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -211,7 +211,7 @@ async def export_assets(self, the export operation usually finishes within 5 minutes. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -308,7 +308,7 @@ async def list_assets(self, paged results in response. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -422,7 +422,7 @@ async def batch_get_assets_history(self, INVALID_ARGUMENT error. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -504,7 +504,7 @@ async def create_feed(self, updates. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -614,7 +614,7 @@ async def get_feed(self, ) -> asset_service.Feed: r"""Gets details about an asset feed. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -723,7 +723,7 @@ async def list_feeds(self, project/folder/organization. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -825,7 +825,7 @@ async def update_feed(self, ) -> asset_service.Feed: r"""Updates an asset feed configuration. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -929,7 +929,7 @@ async def delete_feed(self, ) -> None: r"""Deletes an asset feed. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1025,7 +1025,7 @@ async def search_all_resources(self, desired scope, otherwise the request will be rejected. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1232,7 +1232,7 @@ async def search_all_iam_policies(self, desired scope, otherwise the request will be rejected. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1415,7 +1415,7 @@ async def analyze_iam_policy(self, what accesses on which resources. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1510,7 +1510,7 @@ async def analyze_iam_policy_longrunning(self, to help callers to map responses to requests. - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index c43399267b1b..538c6ef62db5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -407,8 +407,7 @@ def export_assets(self, the export operation usually finishes within 5 minutes. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -506,8 +505,7 @@ def list_assets(self, paged results in response. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -621,8 +619,7 @@ def batch_get_assets_history(self, INVALID_ARGUMENT error. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -698,8 +695,7 @@ def create_feed(self, updates. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -809,8 +805,7 @@ def get_feed(self, ) -> asset_service.Feed: r"""Gets details about an asset feed. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -912,8 +907,7 @@ def list_feeds(self, project/folder/organization. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1008,8 +1002,7 @@ def update_feed(self, ) -> asset_service.Feed: r"""Updates an asset feed configuration. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1113,8 +1106,7 @@ def delete_feed(self, ) -> None: r"""Deletes an asset feed. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1203,8 +1195,7 @@ def search_all_resources(self, desired scope, otherwise the request will be rejected. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1404,8 +1395,7 @@ def search_all_iam_policies(self, desired scope, otherwise the request will be rejected. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1581,8 +1571,7 @@ def analyze_iam_policy(self, what accesses on which resources. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 @@ -1672,8 +1661,7 @@ def analyze_iam_policy_longrunning(self, to help callers to map responses to requests. - - .. code-block:: + .. code-block:: python from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 45f512ccfc85..cde4cb77c9b2 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -209,7 +209,7 @@ async def generate_access_token(self, account. - .. code-block:: + .. code-block:: python from google.iam import credentials_v1 @@ -363,7 +363,7 @@ async def generate_id_token(self, account. - .. code-block:: + .. code-block:: python from google.iam import credentials_v1 @@ -510,7 +510,7 @@ async def sign_blob(self, private key. - .. code-block:: + .. code-block:: python from google.iam import credentials_v1 @@ -644,7 +644,7 @@ async def sign_jwt(self, private key. - .. code-block:: + .. code-block:: python from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 68b1418fdd4a..f495c9e4dab2 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -396,8 +396,7 @@ def generate_access_token(self, account. - - .. code-block:: + .. code-block:: python from google.iam import credentials_v1 @@ -544,8 +543,7 @@ def generate_id_token(self, account. - - .. code-block:: + .. code-block:: python from google.iam import credentials_v1 @@ -685,8 +683,7 @@ def sign_blob(self, private key. - - .. code-block:: + .. code-block:: python from google.iam import credentials_v1 @@ -813,8 +810,7 @@ def sign_jwt(self, private key. - - .. code-block:: + .. code-block:: python from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 3c5176df02af..a44f709cef58 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -203,7 +203,7 @@ async def list_buckets(self, ) -> pagers.ListBucketsAsyncPager: r"""Lists buckets. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -318,7 +318,7 @@ async def get_bucket(self, ) -> logging_config.LogBucket: r"""Gets a bucket. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -392,7 +392,7 @@ async def create_bucket(self, cannot be changed. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -475,7 +475,7 @@ async def update_bucket(self, A buckets region may not be modified after it is created. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -549,7 +549,7 @@ async def delete_bucket(self, the bucket will be permanently deleted. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -612,7 +612,7 @@ async def undelete_bucket(self, may be undeleted within the grace period of 7 days. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -674,7 +674,7 @@ async def list_views(self, ) -> pagers.ListViewsAsyncPager: r"""Lists views on a bucket. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -781,7 +781,7 @@ async def get_view(self, ) -> logging_config.LogView: r"""Gets a view. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -856,7 +856,7 @@ async def create_view(self, contain a maximum of 50 views. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -932,7 +932,7 @@ async def update_view(self, existing view with values from the new view: ``filter``. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1005,7 +1005,7 @@ async def delete_view(self, ) -> None: r"""Deletes a view from a bucket. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1067,7 +1067,7 @@ async def list_sinks(self, ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1187,7 +1187,7 @@ async def get_sink(self, ) -> logging_config.LogSink: r"""Gets a sink. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1309,7 +1309,7 @@ async def create_sink(self, entries only from the resource owning the sink. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1440,7 +1440,7 @@ async def update_sink(self, the ``unique_writer_identity`` field. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1595,7 +1595,7 @@ async def delete_sink(self, then that service account is also deleted. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1693,7 +1693,7 @@ async def list_exclusions(self, ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions in a parent resource. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1813,7 +1813,7 @@ async def get_exclusion(self, ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1937,7 +1937,7 @@ async def create_exclusion(self, resource. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2068,7 +2068,7 @@ async def update_exclusion(self, exclusion. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2210,7 +2210,7 @@ async def delete_exclusion(self, ) -> None: r"""Deletes an exclusion. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2317,7 +2317,7 @@ async def get_cmek_settings(self, for more information. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2419,7 +2419,7 @@ async def update_cmek_settings(self, for more information. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index b5d060646a37..c65fd24e6287 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -426,8 +426,7 @@ def list_buckets(self, ) -> pagers.ListBucketsPager: r"""Lists buckets. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -542,8 +541,7 @@ def get_bucket(self, ) -> logging_config.LogBucket: r"""Gets a bucket. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -618,8 +616,7 @@ def create_bucket(self, cannot be changed. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -703,8 +700,7 @@ def update_bucket(self, A buckets region may not be modified after it is created. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -779,8 +775,7 @@ def delete_bucket(self, the bucket will be permanently deleted. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -844,8 +839,7 @@ def undelete_bucket(self, may be undeleted within the grace period of 7 days. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -908,8 +902,7 @@ def list_views(self, ) -> pagers.ListViewsPager: r"""Lists views on a bucket. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1016,8 +1009,7 @@ def get_view(self, ) -> logging_config.LogView: r"""Gets a view. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1093,8 +1085,7 @@ def create_view(self, contain a maximum of 50 views. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1171,8 +1162,7 @@ def update_view(self, existing view with values from the new view: ``filter``. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1246,8 +1236,7 @@ def delete_view(self, ) -> None: r"""Deletes a view from a bucket. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1310,8 +1299,7 @@ def list_sinks(self, ) -> pagers.ListSinksPager: r"""Lists sinks. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1423,8 +1411,7 @@ def get_sink(self, ) -> logging_config.LogSink: r"""Gets a sink. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1538,8 +1525,7 @@ def create_sink(self, entries only from the resource owning the sink. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1670,8 +1656,7 @@ def update_sink(self, the ``unique_writer_identity`` field. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1818,8 +1803,7 @@ def delete_sink(self, then that service account is also deleted. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1909,8 +1893,7 @@ def list_exclusions(self, ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions in a parent resource. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2022,8 +2005,7 @@ def get_exclusion(self, ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2139,8 +2121,7 @@ def create_exclusion(self, resource. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2271,8 +2252,7 @@ def update_exclusion(self, exclusion. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2414,8 +2394,7 @@ def delete_exclusion(self, ) -> None: r"""Deletes an exclusion. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2514,8 +2493,7 @@ def get_cmek_settings(self, for more information. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -2618,8 +2596,7 @@ def update_cmek_settings(self, for more information. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index b12aad3fa617..628a11e4ffed 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -200,7 +200,7 @@ async def delete_log(self, with a timestamp before the operation will be deleted. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -311,7 +311,7 @@ async def write_log_entries(self, organizations, billing accounts or folders) - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -499,7 +499,7 @@ async def list_log_entries(self, Logs `__. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -652,7 +652,7 @@ async def list_monitored_resource_descriptors(self, used by Logging. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -742,7 +742,7 @@ async def list_logs(self, listed. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -863,7 +863,7 @@ def tail_log_entries(self, logs. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 04ff20311c1a..6d6abf1b1613 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -387,8 +387,7 @@ def delete_log(self, with a timestamp before the operation will be deleted. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -491,8 +490,7 @@ def write_log_entries(self, organizations, billing accounts or folders) - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -671,8 +669,7 @@ def list_log_entries(self, Logs `__. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -817,8 +814,7 @@ def list_monitored_resource_descriptors(self, used by Logging. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -901,8 +897,7 @@ def list_logs(self, listed. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -1015,8 +1010,7 @@ def tail_log_entries(self, logs. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index b0f35bbd9819..9831beb94af1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -196,7 +196,7 @@ async def list_log_metrics(self, ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -313,7 +313,7 @@ async def get_log_metric(self, ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -427,7 +427,7 @@ async def create_log_metric(self, ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -551,7 +551,7 @@ async def update_log_metric(self, ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -681,7 +681,7 @@ async def delete_log_metric(self, ) -> None: r"""Deletes a logs-based metric. - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index dd56772e8ebe..8858d085e126 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -383,8 +383,7 @@ def list_log_metrics(self, ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -493,8 +492,7 @@ def get_log_metric(self, ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -600,8 +598,7 @@ def create_log_metric(self, ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -725,8 +722,7 @@ def update_log_metric(self, ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 @@ -848,8 +844,7 @@ def delete_log_metric(self, ) -> None: r"""Deletes a logs-based metric. - - .. code-block:: + .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 16ab081efd37..189333cf5ba2 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -229,7 +229,7 @@ async def list_instances(self, are aggregated. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -338,7 +338,7 @@ async def get_instance(self, ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -447,7 +447,7 @@ async def create_instance(self, hours, so there is no need to call DeleteOperation. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -595,7 +595,7 @@ async def update_instance(self, there is no need to call DeleteOperation. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -728,7 +728,7 @@ async def upgrade_instance(self, specified in the request. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -858,7 +858,7 @@ async def import_instance(self, few hours, so there is no need to call DeleteOperation. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -987,7 +987,7 @@ async def export_instance(self, few hours, so there is no need to call DeleteOperation. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -1114,7 +1114,7 @@ async def failover_instance(self, Memorystore for Redis instance. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -1236,7 +1236,7 @@ async def delete_instance(self, serving and data is deleted. - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 5ee6ec46251a..2b3eab7424cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -416,8 +416,7 @@ def list_instances(self, are aggregated. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -526,8 +525,7 @@ def get_instance(self, ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -636,8 +634,7 @@ def create_instance(self, hours, so there is no need to call DeleteOperation. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -785,8 +782,7 @@ def update_instance(self, there is no need to call DeleteOperation. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -919,8 +915,7 @@ def upgrade_instance(self, specified in the request. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -1050,8 +1045,7 @@ def import_instance(self, few hours, so there is no need to call DeleteOperation. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -1180,8 +1174,7 @@ def export_instance(self, few hours, so there is no need to call DeleteOperation. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -1308,8 +1301,7 @@ def failover_instance(self, Memorystore for Redis instance. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 @@ -1431,8 +1423,7 @@ def delete_instance(self, serving and data is deleted. - - .. code-block:: + .. code-block:: python from google.cloud import redis_v1 From d99e4307ca5fab38667c82d617fb976ccd530684 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 16 Feb 2022 11:53:51 +0100 Subject: [PATCH 0741/1339] chore(deps): update dependency setuptools to v60.9.1 (#1207) * chore(deps): update dependency setuptools to v60.9.1 * serialize showcase mtls tests Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/.github/workflows/tests.yaml | 1 + packages/gapic-generator/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index ad504630fcca..193b4af84df8 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -83,6 +83,7 @@ jobs: strategy: matrix: target: [showcase_mtls, showcase_mtls_alternative_templates] + max-parallel: 1 runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ef9ec5847d9f..2f08e316c427 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.9.0 +setuptools==60.9.1 From 7003e54043f0fc02d32aa9349abf9813eaa7f346 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 17 Feb 2022 11:08:40 -0800 Subject: [PATCH 0742/1339] chore(main): release 0.63.3 (#1202) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f2b702ad6015..c28e6d40c9ac 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +### [0.63.3](https://github.com/googleapis/gapic-generator-python/compare/v0.63.2...v0.63.3) (2022-02-16) + + +### Bug Fixes + +* disambiguate field headers whose names are reserved python words ([#1178](https://github.com/googleapis/gapic-generator-python/issues/1178)) ([98aa690](https://github.com/googleapis/gapic-generator-python/commit/98aa6906031276dcad899fdb88f47cbafc651ae4)) +* fix docstring code block formatting ([#1206](https://github.com/googleapis/gapic-generator-python/issues/1206)) ([500dfe7](https://github.com/googleapis/gapic-generator-python/commit/500dfe7e401888b3bea6488d6e6fee9955f1a2ab)) +* HTTP body field messages with enums or recursive fields ([#1201](https://github.com/googleapis/gapic-generator-python/issues/1201)) ([246bfe2](https://github.com/googleapis/gapic-generator-python/commit/246bfe2948362bc8f6035aafc0dbd9e65f5acc2b)) + ### [0.63.2](https://github.com/googleapis/gapic-generator-python/compare/v0.63.1...v0.63.2) (2022-02-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 7c728f62aad6..0772b210c507 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.2" +version = "0.63.3" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 40703b9dba2c8273cb656cd110dde44c2ed2a621 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 17 Feb 2022 14:08:12 -0800 Subject: [PATCH 0743/1339] chore: add tests for python 3.10 (#1020) --- .../.github/workflows/tests.yaml | 61 ++++++++++--------- .../gapic/ads-templates/noxfile.py.j2 | 11 +++- .../gapic/ads-templates/setup.py.j2 | 1 + .../gapic/templates/noxfile.py.j2 | 12 +++- .../gapic/templates/setup.py.j2 | 1 + packages/gapic-generator/noxfile.py | 5 +- .../integration/goldens/asset/noxfile.py | 12 +++- .../tests/integration/goldens/asset/setup.py | 1 + .../goldens/credentials/noxfile.py | 12 +++- .../integration/goldens/credentials/setup.py | 1 + .../integration/goldens/logging/noxfile.py | 12 +++- .../integration/goldens/logging/setup.py | 1 + .../integration/goldens/redis/noxfile.py | 12 +++- .../tests/integration/goldens/redis/setup.py | 1 + 14 files changed, 99 insertions(+), 44 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 193b4af84df8..38e059a3e42d 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -15,13 +15,14 @@ concurrency: jobs: docs: + # Don't upgrade python version; there's a bug in 3.10 sphinx runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.9" cache: 'pip' - name: Install nox. run: python -m pip install nox @@ -31,10 +32,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python "3.10" uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" cache: 'pip' - name: Install nox. run: python -m pip install nox @@ -47,10 +48,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python "3.10" uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" cache: 'pip' - name: Install system dependencies. run: | @@ -66,7 +67,7 @@ jobs: ./gapic-showcase run & cd - env: - SHOWCASE_VERSION: 0.18.0 + SHOWCASE_VERSION: 0.19.0 - name: Install nox. run: python -m pip install nox - name: Install protoc 3.12.1. @@ -91,10 +92,10 @@ jobs: run: | sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - - name: Set up Python 3.9 + - name: Set up Python "3.10" uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" cache: 'pip' - name: Copy mtls files run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ @@ -133,23 +134,23 @@ jobs: cd .. nox -s ${{ matrix.target }} env: - SHOWCASE_VERSION: 0.18.0 + SHOWCASE_VERSION: 0.19.0 # TODO(yon-mg): add compute unit tests showcase-unit: strategy: matrix: - python: [3.6, 3.7, 3.8, 3.9] + python: ["3.6", "3.7", "3.8", "3.9", "3.10"] variant: ['', _alternative_templates] exclude: - - python: 3.6 + - python: "3.6" variant: _alternative_templates runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python }} + - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python }} + python-version: "${{ matrix.python }}" cache: 'pip' - name: Install system dependencies. run: | @@ -168,15 +169,15 @@ jobs: - name: Run unit tests. run: nox -s showcase_unit${{ matrix.variant }}-${{ matrix.python }} env: - SHOWCASE_VERSION: 0.18.0 + SHOWCASE_VERSION: 0.19.0 showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python "3.10" uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" cache: 'pip' - name: Install system dependencies. run: | @@ -195,7 +196,7 @@ jobs: - name: Run unit tests. run: nox -s showcase_unit_add_iam_methods env: - SHOWCASE_VERSION: 0.18.0 + SHOWCASE_VERSION: 0.19.0 showcase-mypy: runs-on: ubuntu-latest strategy: @@ -203,10 +204,10 @@ jobs: variant: ['', _alternative_templates] steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python "3.10" uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" cache: 'pip' - name: Install system dependencies. run: | @@ -225,15 +226,15 @@ jobs: - name: Typecheck the generated output. run: nox -s showcase_mypy${{ matrix.variant }} env: - SHOWCASE_VERSION: 0.18.0 + SHOWCASE_VERSION: 0.19.0 snippetgen: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python "3.10" uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" cache: 'pip' - name: Install system dependencies. run: | @@ -246,7 +247,7 @@ jobs: unit: strategy: matrix: - python: [3.6, 3.7, 3.8, 3.9] + python: ["3.6", "3.7", "3.8", "3.9", "3.10"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -267,10 +268,10 @@ jobs: fragment: strategy: matrix: - python: [3.6, 3.7, 3.8, 3.9] + python: ["3.6", "3.7", "3.8", "3.9", "3.10"] variant: ['', _alternative_templates] exclude: - - python: 3.6 + - python: "3.6" variant: _alternative_templates runs-on: ubuntu-latest steps: @@ -321,10 +322,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python 3.10 uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" cache: 'pip' - name: Install nox. run: | @@ -339,10 +340,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - name: Set up Python 3.9 + - name: Set up Python "3.10" uses: actions/setup-python@v2 with: - python-version: 3.9 + python-version: "3.10" cache: 'pip' - name: Install autopep8 run: | diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index cfe6b94671c5..075643aac8d3 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -7,7 +7,14 @@ import os import nox # type: ignore -@nox.session(python=['3.7', '3.8', '3.9']) +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", +] + +@nox.session(python=ALL_PYTHON) def unit(session): """Run the unit test suite.""" @@ -25,7 +32,7 @@ def unit(session): ) -@nox.session(python=['3.7', '3.8', '3.9']) +@nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" session.install('mypy') diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 9d96e1809ee0..7f3716da90b4 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -44,6 +44,7 @@ setuptools.setup( 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 34e19ac4722f..4793094e882d 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -11,6 +11,14 @@ import sys import nox # type: ignore +ALL_PYTHON = [ + "3.6", + "3.7", + "3.8", + "3.9", + "3.10", +] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" @@ -32,7 +40,7 @@ nox.sessions = [ "lint_setup_py", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) +@nox.session(python=ALL_PYTHON) def unit(session): """Run the unit test suite.""" @@ -62,7 +70,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 223147682849..864183cc236c 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -51,6 +51,7 @@ setuptools.setup( 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 3e1b9638c6a3..0982b167ee5d 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -29,7 +29,7 @@ nox.options.error_on_missing_interpreters = True -showcase_version = os.environ.get("SHOWCASE_VERSION", "0.18.0") +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.19.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") @@ -38,6 +38,7 @@ "3.7", "3.8", "3.9", + "3.10", ) NEWEST_PYTHON = ALL_PYTHON[-1] @@ -393,7 +394,7 @@ def snippetgen(session): session.run("py.test", "-vv", "tests/snippetgen") -@nox.session(python=NEWEST_PYTHON) +@nox.session(python="3.9") def docs(session): """Build the docs.""" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index b96e8d8e174f..4833faf3eb83 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -22,6 +22,14 @@ import nox # type: ignore +ALL_PYTHON = [ + "3.6", + "3.7", + "3.8", + "3.9", + "3.10", +] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" @@ -43,7 +51,7 @@ "lint_setup_py", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) +@nox.session(python=ALL_PYTHON) def unit(session): """Run the unit test suite.""" @@ -73,7 +81,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 75d7fc7c80c2..38fae1451be6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -51,6 +51,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index c05f13f142d4..69a64f4ce411 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -22,6 +22,14 @@ import nox # type: ignore +ALL_PYTHON = [ + "3.6", + "3.7", + "3.8", + "3.9", + "3.10", +] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" @@ -43,7 +51,7 @@ "lint_setup_py", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) +@nox.session(python=ALL_PYTHON) def unit(session): """Run the unit test suite.""" @@ -73,7 +81,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index f3c9b857e2e3..3d0b28176155 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -50,6 +50,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 342a72cfe845..516ab61925d8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -22,6 +22,14 @@ import nox # type: ignore +ALL_PYTHON = [ + "3.6", + "3.7", + "3.8", + "3.9", + "3.10", +] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" @@ -43,7 +51,7 @@ "lint_setup_py", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) +@nox.session(python=ALL_PYTHON) def unit(session): """Run the unit test suite.""" @@ -73,7 +81,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index bf6c27489bfa..dbbcbb08b571 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -50,6 +50,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 90788a5afc4d..78a45ee2e947 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -22,6 +22,14 @@ import nox # type: ignore +ALL_PYTHON = [ + "3.6", + "3.7", + "3.8", + "3.9", + "3.10", +] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" @@ -43,7 +51,7 @@ "lint_setup_py", ] -@nox.session(python=['3.6', '3.7', '3.8', '3.9', '3.10']) +@nox.session(python=ALL_PYTHON) def unit(session): """Run the unit test suite.""" @@ -73,7 +81,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +@nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" session.install('mypy', 'types-pkg_resources') diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 49cdb8f70a99..2941b8365185 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -50,6 +50,7 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], From babc665f6c9fc72ec46c74220d25742e7470beaf Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 17 Feb 2022 16:10:06 -0800 Subject: [PATCH 0744/1339] ci: make showcase-mtls tests less flaky in CI (#1212) --- .../.github/workflows/tests.yaml | 39 +++++++------------ 1 file changed, 15 insertions(+), 24 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 38e059a3e42d..cafa68430f1f 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -70,13 +70,13 @@ jobs: SHOWCASE_VERSION: 0.19.0 - name: Install nox. run: python -m pip install nox - - name: Install protoc 3.12.1. + - name: Install protoc 3.19.0. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip cd /usr/src/protoc/ - unzip protoc-3.12.1.zip + unzip protoc-3.19.0.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Run showcase tests. run: nox -s ${{ matrix.target }} @@ -105,13 +105,13 @@ jobs: sudo apt-get install -y curl pandoc unzip gcc - name: Install nox. run: python -m pip install nox - - name: Install protoc 3.12.1. + - name: Install protoc 3.19.0. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip cd /usr/src/protoc/ - unzip protoc-3.12.1.zip + unzip protoc-3.19.0.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc cd - - name: Run showcase tests. @@ -121,15 +121,6 @@ jobs: cd gapic_showcase curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v${SHOWCASE_VERSION}/gapic-showcase-${SHOWCASE_VERSION}-linux-amd64.tar.gz | tar xz ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & - showcase_pid=$! - - cleanup() { - echo "kill showcase server" - kill $showcase_pid - # Wait for the process to die, but don't report error from the kill. - wait $showcase_pid || exit $exit_code - } - trap cleanup EXIT cd .. nox -s ${{ matrix.target }} @@ -156,13 +147,13 @@ jobs: run: | sudo apt-get update sudo apt-get install -y curl pandoc unzip gcc - - name: Install protoc 3.12.1. + - name: Install protoc 3.19.0. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip cd /usr/src/protoc/ - unzip protoc-3.12.1.zip + unzip protoc-3.19.0.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Install nox. run: python -m pip install nox @@ -183,13 +174,13 @@ jobs: run: | sudo apt-get update sudo apt-get install -y curl pandoc unzip gcc - - name: Install protoc 3.12.1. + - name: Install protoc 3.19.0. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip cd /usr/src/protoc/ - unzip protoc-3.12.1.zip + unzip protoc-3.19.0.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Install nox. run: python -m pip install nox @@ -213,13 +204,13 @@ jobs: run: | sudo apt-get update sudo apt-get install -y curl pandoc unzip gcc - - name: Install protoc 3.12.1. + - name: Install protoc 3.19.0. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip + curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip cd /usr/src/protoc/ - unzip protoc-3.12.1.zip + unzip protoc-3.19.0.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Install nox. run: python -m pip install nox From 7e6def55d99eb2be0c8880a8e3f9c6441a8d730d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 17 Feb 2022 18:11:45 -0700 Subject: [PATCH 0745/1339] fix: fix snippet region tag format (#1210) * fix: fix snippet region tags * chore: delete obsolete files * chore: delete old files Co-authored-by: Dov Shlachter Co-authored-by: Anthonios Partheniou --- .../gapic/samplegen/samplegen.py | 5 +- ...asset_service_analyze_iam_policy_async.py} | 4 +- ...e_analyze_iam_policy_longrunning_async.py} | 4 +- ...ce_analyze_iam_policy_longrunning_sync.py} | 4 +- ..._asset_service_analyze_iam_policy_sync.py} | 4 +- ...service_batch_get_assets_history_async.py} | 4 +- ..._service_batch_get_assets_history_sync.py} | 4 +- ...erated_asset_service_create_feed_async.py} | 4 +- ...nerated_asset_service_create_feed_sync.py} | 4 +- ...erated_asset_service_delete_feed_async.py} | 4 +- ...nerated_asset_service_delete_feed_sync.py} | 4 +- ...ated_asset_service_export_assets_async.py} | 4 +- ...rated_asset_service_export_assets_sync.py} | 4 +- ...generated_asset_service_get_feed_async.py} | 4 +- ..._generated_asset_service_get_feed_sync.py} | 4 +- ...erated_asset_service_list_assets_async.py} | 4 +- ...nerated_asset_service_list_assets_sync.py} | 4 +- ...nerated_asset_service_list_feeds_async.py} | 4 +- ...enerated_asset_service_list_feeds_sync.py} | 4 +- ..._service_search_all_iam_policies_async.py} | 4 +- ...t_service_search_all_iam_policies_sync.py} | 4 +- ...set_service_search_all_resources_async.py} | 4 +- ...sset_service_search_all_resources_sync.py} | 4 +- ...erated_asset_service_update_feed_async.py} | 4 +- ...nerated_asset_service_update_feed_sync.py} | 4 +- .../snippet_metadata_asset_v1.json | 96 +-- ...redentials_generate_access_token_async.py} | 4 +- ...credentials_generate_access_token_sync.py} | 4 +- ...am_credentials_generate_id_token_async.py} | 4 +- ...iam_credentials_generate_id_token_sync.py} | 4 +- ...erated_iam_credentials_sign_blob_async.py} | 4 +- ...nerated_iam_credentials_sign_blob_sync.py} | 4 +- ...nerated_iam_credentials_sign_jwt_async.py} | 4 +- ...enerated_iam_credentials_sign_jwt_sync.py} | 4 +- .../snippet_metadata_credentials_v1.json | 32 +- ..._config_service_v2_create_bucket_async.py} | 4 +- ...d_config_service_v2_create_bucket_sync.py} | 4 +- ...nfig_service_v2_create_exclusion_async.py} | 4 +- ...onfig_service_v2_create_exclusion_sync.py} | 4 +- ...ed_config_service_v2_create_sink_async.py} | 4 +- ...ted_config_service_v2_create_sink_sync.py} | 4 +- ...ed_config_service_v2_create_view_async.py} | 4 +- ...ted_config_service_v2_create_view_sync.py} | 4 +- ..._config_service_v2_delete_bucket_async.py} | 4 +- ...d_config_service_v2_delete_bucket_sync.py} | 4 +- ...nfig_service_v2_delete_exclusion_async.py} | 4 +- ...onfig_service_v2_delete_exclusion_sync.py} | 4 +- ...ed_config_service_v2_delete_sink_async.py} | 4 +- ...ted_config_service_v2_delete_sink_sync.py} | 4 +- ...ed_config_service_v2_delete_view_async.py} | 4 +- ...ted_config_service_v2_delete_view_sync.py} | 4 +- ...ted_config_service_v2_get_bucket_async.py} | 4 +- ...ated_config_service_v2_get_bucket_sync.py} | 4 +- ...fig_service_v2_get_cmek_settings_async.py} | 4 +- ...nfig_service_v2_get_cmek_settings_sync.py} | 4 +- ..._config_service_v2_get_exclusion_async.py} | 4 +- ...d_config_service_v2_get_exclusion_sync.py} | 4 +- ...rated_config_service_v2_get_sink_async.py} | 4 +- ...erated_config_service_v2_get_sink_sync.py} | 4 +- ...rated_config_service_v2_get_view_async.py} | 4 +- ...erated_config_service_v2_get_view_sync.py} | 4 +- ...d_config_service_v2_list_buckets_async.py} | 4 +- ...ed_config_service_v2_list_buckets_sync.py} | 4 +- ...onfig_service_v2_list_exclusions_async.py} | 4 +- ...config_service_v2_list_exclusions_sync.py} | 4 +- ...ted_config_service_v2_list_sinks_async.py} | 4 +- ...ated_config_service_v2_list_sinks_sync.py} | 4 +- ...ted_config_service_v2_list_views_async.py} | 4 +- ...ated_config_service_v2_list_views_sync.py} | 4 +- ...onfig_service_v2_undelete_bucket_async.py} | 4 +- ...config_service_v2_undelete_bucket_sync.py} | 4 +- ..._config_service_v2_update_bucket_async.py} | 4 +- ...d_config_service_v2_update_bucket_sync.py} | 4 +- ..._service_v2_update_cmek_settings_async.py} | 4 +- ...g_service_v2_update_cmek_settings_sync.py} | 4 +- ...nfig_service_v2_update_exclusion_async.py} | 4 +- ...onfig_service_v2_update_exclusion_sync.py} | 4 +- ...ed_config_service_v2_update_sink_async.py} | 4 +- ...ted_config_service_v2_update_sink_sync.py} | 4 +- ...ed_config_service_v2_update_view_async.py} | 4 +- ...ted_config_service_v2_update_view_sync.py} | 4 +- ...ed_logging_service_v2_delete_log_async.py} | 4 +- ...ted_logging_service_v2_delete_log_sync.py} | 4 +- ...ging_service_v2_list_log_entries_async.py} | 4 +- ...gging_service_v2_list_log_entries_sync.py} | 4 +- ...ted_logging_service_v2_list_logs_async.py} | 4 +- ...ated_logging_service_v2_list_logs_sync.py} | 4 +- ...t_monitored_resource_descriptors_async.py} | 4 +- ...st_monitored_resource_descriptors_sync.py} | 4 +- ...ging_service_v2_tail_log_entries_async.py} | 4 +- ...gging_service_v2_tail_log_entries_sync.py} | 4 +- ...ing_service_v2_write_log_entries_async.py} | 4 +- ...ging_service_v2_write_log_entries_sync.py} | 4 +- ...ics_service_v2_create_log_metric_async.py} | 4 +- ...rics_service_v2_create_log_metric_sync.py} | 4 +- ...ics_service_v2_delete_log_metric_async.py} | 4 +- ...rics_service_v2_delete_log_metric_sync.py} | 4 +- ...etrics_service_v2_get_log_metric_async.py} | 4 +- ...metrics_service_v2_get_log_metric_sync.py} | 4 +- ...rics_service_v2_list_log_metrics_async.py} | 4 +- ...trics_service_v2_list_log_metrics_sync.py} | 4 +- ...ics_service_v2_update_log_metric_async.py} | 4 +- ...rics_service_v2_update_log_metric_sync.py} | 4 +- .../snippet_metadata_logging_v2.json | 272 ++++---- ...ated_cloud_redis_create_instance_async.py} | 4 +- ...rated_cloud_redis_create_instance_sync.py} | 4 +- ...ated_cloud_redis_delete_instance_async.py} | 4 +- ...rated_cloud_redis_delete_instance_sync.py} | 4 +- ...ated_cloud_redis_export_instance_async.py} | 4 +- ...rated_cloud_redis_export_instance_sync.py} | 4 +- ...ed_cloud_redis_failover_instance_async.py} | 4 +- ...ted_cloud_redis_failover_instance_sync.py} | 4 +- ...nerated_cloud_redis_get_instance_async.py} | 4 +- ...enerated_cloud_redis_get_instance_sync.py} | 4 +- ...ated_cloud_redis_import_instance_async.py} | 4 +- ...rated_cloud_redis_import_instance_sync.py} | 4 +- ...rated_cloud_redis_list_instances_async.py} | 4 +- ...erated_cloud_redis_list_instances_sync.py} | 4 +- ...ated_cloud_redis_update_instance_async.py} | 4 +- ...rated_cloud_redis_update_instance_sync.py} | 4 +- ...ted_cloud_redis_upgrade_instance_async.py} | 4 +- ...ated_cloud_redis_upgrade_instance_sync.py} | 4 +- .../snippet_metadata_redis_v1.json | 72 +-- ...enerated_snippets_list_resources_async.py} | 4 +- ...generated_snippets_list_resources_sync.py} | 4 +- ...d_snippets_method_bidi_streaming_async.py} | 4 +- ...ed_snippets_method_bidi_streaming_sync.py} | 4 +- ...d_snippets_method_lro_signatures_async.py} | 4 +- ...ed_snippets_method_lro_signatures_sync.py} | 4 +- ...ed_snippets_method_one_signature_async.py} | 4 +- ...ted_snippets_method_one_signature_sync.py} | 4 +- ...snippets_method_server_streaming_async.py} | 4 +- ..._snippets_method_server_streaming_sync.py} | 4 +- ...generated_snippets_one_of_method_async.py} | 4 +- ...ets_one_of_method_required_field_async.py} | 4 +- ...pets_one_of_method_required_field_sync.py} | 4 +- ..._generated_snippets_one_of_method_sync.py} | 4 +- .../goldens/snippet_metadata_mollusca_v1.json | 56 +- .../snippet_metadata_v1_mollusca_v1.json | 611 ------------------ .../tests/unit/samplegen/test_samplegen.py | 4 +- 140 files changed, 533 insertions(+), 1143 deletions(-) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py => cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py => cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py => cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py => cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py => cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py => cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_create_feed_async.py => cloudasset_v1_generated_asset_service_create_feed_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_create_feed_sync.py => cloudasset_v1_generated_asset_service_create_feed_sync.py} (91%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_delete_feed_async.py => cloudasset_v1_generated_asset_service_delete_feed_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py => cloudasset_v1_generated_asset_service_delete_feed_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_export_assets_async.py => cloudasset_v1_generated_asset_service_export_assets_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_export_assets_sync.py => cloudasset_v1_generated_asset_service_export_assets_sync.py} (91%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_get_feed_async.py => cloudasset_v1_generated_asset_service_get_feed_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_get_feed_sync.py => cloudasset_v1_generated_asset_service_get_feed_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_list_assets_async.py => cloudasset_v1_generated_asset_service_list_assets_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_list_assets_sync.py => cloudasset_v1_generated_asset_service_list_assets_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_list_feeds_async.py => cloudasset_v1_generated_asset_service_list_feeds_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py => cloudasset_v1_generated_asset_service_list_feeds_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py => cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py => cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py => cloudasset_v1_generated_asset_service_search_all_resources_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py => cloudasset_v1_generated_asset_service_search_all_resources_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_update_feed_async.py => cloudasset_v1_generated_asset_service_update_feed_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{cloudasset_generated_asset_v1_asset_service_update_feed_sync.py => cloudasset_v1_generated_asset_service_update_feed_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py => iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py} (88%) rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py => iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py} (88%) rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py => iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py} (88%) rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py => iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py} (88%) rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py => iamcredentials_v1_generated_iam_credentials_sign_blob_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py => iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py => iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py => iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_bucket_async.py => logging_v2_generated_config_service_v2_create_bucket_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_bucket_sync.py => logging_v2_generated_config_service_v2_create_bucket_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_exclusion_async.py => logging_v2_generated_config_service_v2_create_exclusion_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py => logging_v2_generated_config_service_v2_create_exclusion_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_sink_async.py => logging_v2_generated_config_service_v2_create_sink_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_sink_sync.py => logging_v2_generated_config_service_v2_create_sink_sync.py} (91%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_view_async.py => logging_v2_generated_config_service_v2_create_view_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_create_view_sync.py => logging_v2_generated_config_service_v2_create_view_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_bucket_async.py => logging_v2_generated_config_service_v2_delete_bucket_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py => logging_v2_generated_config_service_v2_delete_bucket_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py => logging_v2_generated_config_service_v2_delete_exclusion_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py => logging_v2_generated_config_service_v2_delete_exclusion_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_sink_async.py => logging_v2_generated_config_service_v2_delete_sink_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_sink_sync.py => logging_v2_generated_config_service_v2_delete_sink_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_view_async.py => logging_v2_generated_config_service_v2_delete_view_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_delete_view_sync.py => logging_v2_generated_config_service_v2_delete_view_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_bucket_async.py => logging_v2_generated_config_service_v2_get_bucket_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_bucket_sync.py => logging_v2_generated_config_service_v2_get_bucket_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py => logging_v2_generated_config_service_v2_get_cmek_settings_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py => logging_v2_generated_config_service_v2_get_cmek_settings_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_exclusion_async.py => logging_v2_generated_config_service_v2_get_exclusion_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py => logging_v2_generated_config_service_v2_get_exclusion_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_sink_async.py => logging_v2_generated_config_service_v2_get_sink_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_sink_sync.py => logging_v2_generated_config_service_v2_get_sink_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_view_async.py => logging_v2_generated_config_service_v2_get_view_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_get_view_sync.py => logging_v2_generated_config_service_v2_get_view_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_buckets_async.py => logging_v2_generated_config_service_v2_list_buckets_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_buckets_sync.py => logging_v2_generated_config_service_v2_list_buckets_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_exclusions_async.py => logging_v2_generated_config_service_v2_list_exclusions_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py => logging_v2_generated_config_service_v2_list_exclusions_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_sinks_async.py => logging_v2_generated_config_service_v2_list_sinks_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_sinks_sync.py => logging_v2_generated_config_service_v2_list_sinks_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_views_async.py => logging_v2_generated_config_service_v2_list_views_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_list_views_sync.py => logging_v2_generated_config_service_v2_list_views_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py => logging_v2_generated_config_service_v2_undelete_bucket_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py => logging_v2_generated_config_service_v2_undelete_bucket_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_bucket_async.py => logging_v2_generated_config_service_v2_update_bucket_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_bucket_sync.py => logging_v2_generated_config_service_v2_update_bucket_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py => logging_v2_generated_config_service_v2_update_cmek_settings_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py => logging_v2_generated_config_service_v2_update_cmek_settings_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_exclusion_async.py => logging_v2_generated_config_service_v2_update_exclusion_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py => logging_v2_generated_config_service_v2_update_exclusion_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_sink_async.py => logging_v2_generated_config_service_v2_update_sink_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_sink_sync.py => logging_v2_generated_config_service_v2_update_sink_sync.py} (91%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_view_async.py => logging_v2_generated_config_service_v2_update_view_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_config_service_v2_update_view_sync.py => logging_v2_generated_config_service_v2_update_view_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_delete_log_async.py => logging_v2_generated_logging_service_v2_delete_log_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_delete_log_sync.py => logging_v2_generated_logging_service_v2_delete_log_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py => logging_v2_generated_logging_service_v2_list_log_entries_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py => logging_v2_generated_logging_service_v2_list_log_entries_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_logs_async.py => logging_v2_generated_logging_service_v2_list_logs_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_logs_sync.py => logging_v2_generated_logging_service_v2_list_logs_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py => logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py => logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py => logging_v2_generated_logging_service_v2_tail_log_entries_async.py} (92%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py => logging_v2_generated_logging_service_v2_tail_log_entries_sync.py} (92%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py => logging_v2_generated_logging_service_v2_write_log_entries_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py => logging_v2_generated_logging_service_v2_write_log_entries_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py => logging_v2_generated_metrics_service_v2_create_log_metric_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py => logging_v2_generated_metrics_service_v2_create_log_metric_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py => logging_v2_generated_metrics_service_v2_delete_log_metric_async.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py => logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py => logging_v2_generated_metrics_service_v2_get_log_metric_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py => logging_v2_generated_metrics_service_v2_get_log_metric_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py => logging_v2_generated_metrics_service_v2_list_log_metrics_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py => logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py => logging_v2_generated_metrics_service_v2_update_log_metric_async.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py => logging_v2_generated_metrics_service_v2_update_log_metric_sync.py} (90%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_create_instance_async.py => redis_v1_generated_cloud_redis_create_instance_async.py} (92%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_create_instance_sync.py => redis_v1_generated_cloud_redis_create_instance_sync.py} (92%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_delete_instance_async.py => redis_v1_generated_cloud_redis_delete_instance_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_delete_instance_sync.py => redis_v1_generated_cloud_redis_delete_instance_sync.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_export_instance_async.py => redis_v1_generated_cloud_redis_export_instance_async.py} (92%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_export_instance_sync.py => redis_v1_generated_cloud_redis_export_instance_sync.py} (92%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_failover_instance_async.py => redis_v1_generated_cloud_redis_failover_instance_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_failover_instance_sync.py => redis_v1_generated_cloud_redis_failover_instance_sync.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_get_instance_async.py => redis_v1_generated_cloud_redis_get_instance_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_get_instance_sync.py => redis_v1_generated_cloud_redis_get_instance_sync.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_import_instance_async.py => redis_v1_generated_cloud_redis_import_instance_async.py} (92%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_import_instance_sync.py => redis_v1_generated_cloud_redis_import_instance_sync.py} (92%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_list_instances_async.py => redis_v1_generated_cloud_redis_list_instances_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_list_instances_sync.py => redis_v1_generated_cloud_redis_list_instances_sync.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_update_instance_async.py => redis_v1_generated_cloud_redis_update_instance_async.py} (92%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_update_instance_sync.py => redis_v1_generated_cloud_redis_update_instance_sync.py} (92%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py => redis_v1_generated_cloud_redis_upgrade_instance_async.py} (91%) rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py => redis_v1_generated_cloud_redis_upgrade_instance_sync.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_list_resources_async.py => mollusca_v1_generated_snippets_list_resources_async.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_list_resources_sync.py => mollusca_v1_generated_snippets_list_resources_sync.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py => mollusca_v1_generated_snippets_method_bidi_streaming_async.py} (92%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py => mollusca_v1_generated_snippets_method_bidi_streaming_sync.py} (92%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py => mollusca_v1_generated_snippets_method_lro_signatures_async.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py => mollusca_v1_generated_snippets_method_lro_signatures_sync.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py => mollusca_v1_generated_snippets_method_one_signature_async.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py => mollusca_v1_generated_snippets_method_one_signature_sync.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py => mollusca_v1_generated_snippets_method_server_streaming_async.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py => mollusca_v1_generated_snippets_method_server_streaming_sync.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_one_of_method_async.py => mollusca_v1_generated_snippets_one_of_method_async.py} (91%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py => mollusca_v1_generated_snippets_one_of_method_required_field_async.py} (89%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py => mollusca_v1_generated_snippets_one_of_method_required_field_sync.py} (89%) rename packages/gapic-generator/tests/snippetgen/goldens/{mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py => mollusca_v1_generated_snippets_one_of_method_sync.py} (91%) delete mode 100644 packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_v1_mollusca_v1.json diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index b71dd8b52976..7d3fd98eacbb 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -996,12 +996,13 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A for service_name, service in gapic_metadata.services.items(): api_short_name = api_schema.services[f"{api_schema.naming.proto_package}.{service_name}"].shortname + api_version = api_schema.naming.version for transport, client in service.clients.items(): transport_type = "async" if transport == api.TRANSPORT_GRPC_ASYNC else "sync" for rpc_name, method_list in client.rpcs.items(): # Region Tag Format: - # [{START|END} ${apishortname}_generated_${api}_${apiVersion}_${serviceName}_${rpcName}_{sync|async}_${overloadDisambiguation}] - region_tag = f"{api_short_name}_generated_{api_schema.naming.versioned_module_name}_{service_name}_{rpc_name}_{transport_type}" + # [{START|END} ${apishortname}_${apiVersion}_generated_${serviceName}_${rpcName}_{sync|async}] + region_tag = f"{api_short_name}_{api_version}_generated_{service_name}_{rpc_name}_{transport_type}" spec = { "rpc": rpc_name, "transport": transport, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py index a5d56bf2cbc9..8f0644233143 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_async] +# [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async] from google.cloud import asset_v1 @@ -45,4 +45,4 @@ async def sample_analyze_iam_policy(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_async] +# [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py index 623137191de1..adaeb096c445 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async] +# [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async] from google.cloud import asset_v1 @@ -53,4 +53,4 @@ async def sample_analyze_iam_policy_longrunning(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async] +# [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py index 17a51630b690..490f82262516 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync] +# [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync] from google.cloud import asset_v1 @@ -53,4 +53,4 @@ def sample_analyze_iam_policy_longrunning(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync] +# [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py index ddf13b735780..1fb3097ad575 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_sync] +# [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync] from google.cloud import asset_v1 @@ -45,4 +45,4 @@ def sample_analyze_iam_policy(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_sync] +# [END cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py index b84841a0d8cb..80b3d45858dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_async] +# [START cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async] from google.cloud import asset_v1 @@ -42,4 +42,4 @@ async def sample_batch_get_assets_history(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_async] +# [END cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py index df9319427adf..baf15f6d4d8c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_sync] +# [START cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync] from google.cloud import asset_v1 @@ -42,4 +42,4 @@ def sample_batch_get_assets_history(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_sync] +# [END cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py index 9894a01dca1a..2a1f800722a1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_CreateFeed_async] +# [START cloudasset_v1_generated_AssetService_CreateFeed_async] from google.cloud import asset_v1 @@ -47,4 +47,4 @@ async def sample_create_feed(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_CreateFeed_async] +# [END cloudasset_v1_generated_AssetService_CreateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py index 5efaf5e1b12e..880514078cea 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_CreateFeed_sync] +# [START cloudasset_v1_generated_AssetService_CreateFeed_sync] from google.cloud import asset_v1 @@ -47,4 +47,4 @@ def sample_create_feed(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_CreateFeed_sync] +# [END cloudasset_v1_generated_AssetService_CreateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py index 3af184162064..e32050772beb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_DeleteFeed_async] +# [START cloudasset_v1_generated_AssetService_DeleteFeed_async] from google.cloud import asset_v1 @@ -40,4 +40,4 @@ async def sample_delete_feed(): await client.delete_feed(request=request) -# [END cloudasset_generated_asset_v1_AssetService_DeleteFeed_async] +# [END cloudasset_v1_generated_AssetService_DeleteFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py index f8e462e5899e..d592f05e075f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_DeleteFeed_sync] +# [START cloudasset_v1_generated_AssetService_DeleteFeed_sync] from google.cloud import asset_v1 @@ -40,4 +40,4 @@ def sample_delete_feed(): client.delete_feed(request=request) -# [END cloudasset_generated_asset_v1_AssetService_DeleteFeed_sync] +# [END cloudasset_v1_generated_AssetService_DeleteFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py index e9cd14623189..b544fe05cca4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_ExportAssets_async] +# [START cloudasset_v1_generated_AssetService_ExportAssets_async] from google.cloud import asset_v1 @@ -50,4 +50,4 @@ async def sample_export_assets(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_ExportAssets_async] +# [END cloudasset_v1_generated_AssetService_ExportAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py index 8a1a4fffb10f..9092f49d1f24 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_ExportAssets_sync] +# [START cloudasset_v1_generated_AssetService_ExportAssets_sync] from google.cloud import asset_v1 @@ -50,4 +50,4 @@ def sample_export_assets(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_ExportAssets_sync] +# [END cloudasset_v1_generated_AssetService_ExportAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py index 7ce97094b421..99becdfc8506 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_GetFeed_async] +# [START cloudasset_v1_generated_AssetService_GetFeed_async] from google.cloud import asset_v1 @@ -42,4 +42,4 @@ async def sample_get_feed(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_GetFeed_async] +# [END cloudasset_v1_generated_AssetService_GetFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py index 12f433dc1bab..139e812cd8d0 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_GetFeed_sync] +# [START cloudasset_v1_generated_AssetService_GetFeed_sync] from google.cloud import asset_v1 @@ -42,4 +42,4 @@ def sample_get_feed(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_GetFeed_sync] +# [END cloudasset_v1_generated_AssetService_GetFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py index 67b2c65501a9..e460c974c003 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_ListAssets_async] +# [START cloudasset_v1_generated_AssetService_ListAssets_async] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ async def sample_list_assets(): async for response in page_result: print(response) -# [END cloudasset_generated_asset_v1_AssetService_ListAssets_async] +# [END cloudasset_v1_generated_AssetService_ListAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py index 212999ece715..eee88ec49225 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_ListAssets_sync] +# [START cloudasset_v1_generated_AssetService_ListAssets_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_list_assets(): for response in page_result: print(response) -# [END cloudasset_generated_asset_v1_AssetService_ListAssets_sync] +# [END cloudasset_v1_generated_AssetService_ListAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py index 82031eb9a334..d109f0a053a0 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_ListFeeds_async] +# [START cloudasset_v1_generated_AssetService_ListFeeds_async] from google.cloud import asset_v1 @@ -42,4 +42,4 @@ async def sample_list_feeds(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_ListFeeds_async] +# [END cloudasset_v1_generated_AssetService_ListFeeds_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py index dc811344f52a..faec88fedabe 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_ListFeeds_sync] +# [START cloudasset_v1_generated_AssetService_ListFeeds_sync] from google.cloud import asset_v1 @@ -42,4 +42,4 @@ def sample_list_feeds(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_ListFeeds_sync] +# [END cloudasset_v1_generated_AssetService_ListFeeds_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py index 610d8d6d419e..553ad8d7f4fc 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_async] +# [START cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ async def sample_search_all_iam_policies(): async for response in page_result: print(response) -# [END cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_async] +# [END cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py index d73fcfd4ce41..d97ee0d08054 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_sync] +# [START cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_search_all_iam_policies(): for response in page_result: print(response) -# [END cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_sync] +# [END cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py index 7d635dce5280..7a2736b9eb30 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_SearchAllResources_async] +# [START cloudasset_v1_generated_AssetService_SearchAllResources_async] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ async def sample_search_all_resources(): async for response in page_result: print(response) -# [END cloudasset_generated_asset_v1_AssetService_SearchAllResources_async] +# [END cloudasset_v1_generated_AssetService_SearchAllResources_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py index 061437752016..5021d6586d31 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_SearchAllResources_sync] +# [START cloudasset_v1_generated_AssetService_SearchAllResources_sync] from google.cloud import asset_v1 @@ -43,4 +43,4 @@ def sample_search_all_resources(): for response in page_result: print(response) -# [END cloudasset_generated_asset_v1_AssetService_SearchAllResources_sync] +# [END cloudasset_v1_generated_AssetService_SearchAllResources_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py index 6f1d9b65f2a7..65a6d688f282 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_UpdateFeed_async] +# [START cloudasset_v1_generated_AssetService_UpdateFeed_async] from google.cloud import asset_v1 @@ -45,4 +45,4 @@ async def sample_update_feed(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_async] +# [END cloudasset_v1_generated_AssetService_UpdateFeed_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py index 9d99cb0070fb..467c81dbb9c5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_generated_asset_v1_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-asset -# [START cloudasset_generated_asset_v1_AssetService_UpdateFeed_sync] +# [START cloudasset_v1_generated_AssetService_UpdateFeed_sync] from google.cloud import asset_v1 @@ -45,4 +45,4 @@ def sample_update_feed(): # Handle the response print(response) -# [END cloudasset_generated_asset_v1_AssetService_UpdateFeed_sync] +# [END cloudasset_v1_generated_AssetService_UpdateFeed_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json index 6a052eb1f941..c3a46b5f47b4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json @@ -10,8 +10,8 @@ "shortName": "AnalyzeIamPolicyLongrunning" } }, - "file": "cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_async", + "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async", "segments": [ { "end": 55, @@ -54,8 +54,8 @@ "shortName": "AnalyzeIamPolicyLongrunning" } }, - "file": "cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_longrunning_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicyLongrunning_sync", + "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync", "segments": [ { "end": 55, @@ -99,8 +99,8 @@ "shortName": "AnalyzeIamPolicy" } }, - "file": "cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_async", + "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async", "segments": [ { "end": 47, @@ -143,8 +143,8 @@ "shortName": "AnalyzeIamPolicy" } }, - "file": "cloudasset_generated_asset_v1_asset_service_analyze_iam_policy_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_AnalyzeIamPolicy_sync", + "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync", "segments": [ { "end": 47, @@ -188,8 +188,8 @@ "shortName": "BatchGetAssetsHistory" } }, - "file": "cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_async", + "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async", "segments": [ { "end": 44, @@ -232,8 +232,8 @@ "shortName": "BatchGetAssetsHistory" } }, - "file": "cloudasset_generated_asset_v1_asset_service_batch_get_assets_history_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_BatchGetAssetsHistory_sync", + "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync", "segments": [ { "end": 44, @@ -277,8 +277,8 @@ "shortName": "CreateFeed" } }, - "file": "cloudasset_generated_asset_v1_asset_service_create_feed_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_CreateFeed_async", + "file": "cloudasset_v1_generated_asset_service_create_feed_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_async", "segments": [ { "end": 49, @@ -321,8 +321,8 @@ "shortName": "CreateFeed" } }, - "file": "cloudasset_generated_asset_v1_asset_service_create_feed_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_CreateFeed_sync", + "file": "cloudasset_v1_generated_asset_service_create_feed_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_sync", "segments": [ { "end": 49, @@ -366,8 +366,8 @@ "shortName": "DeleteFeed" } }, - "file": "cloudasset_generated_asset_v1_asset_service_delete_feed_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_DeleteFeed_async", + "file": "cloudasset_v1_generated_asset_service_delete_feed_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_async", "segments": [ { "end": 42, @@ -408,8 +408,8 @@ "shortName": "DeleteFeed" } }, - "file": "cloudasset_generated_asset_v1_asset_service_delete_feed_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_DeleteFeed_sync", + "file": "cloudasset_v1_generated_asset_service_delete_feed_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_sync", "segments": [ { "end": 42, @@ -451,8 +451,8 @@ "shortName": "ExportAssets" } }, - "file": "cloudasset_generated_asset_v1_asset_service_export_assets_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_ExportAssets_async", + "file": "cloudasset_v1_generated_asset_service_export_assets_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_async", "segments": [ { "end": 52, @@ -495,8 +495,8 @@ "shortName": "ExportAssets" } }, - "file": "cloudasset_generated_asset_v1_asset_service_export_assets_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_ExportAssets_sync", + "file": "cloudasset_v1_generated_asset_service_export_assets_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_sync", "segments": [ { "end": 52, @@ -540,8 +540,8 @@ "shortName": "GetFeed" } }, - "file": "cloudasset_generated_asset_v1_asset_service_get_feed_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_GetFeed_async", + "file": "cloudasset_v1_generated_asset_service_get_feed_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_async", "segments": [ { "end": 44, @@ -584,8 +584,8 @@ "shortName": "GetFeed" } }, - "file": "cloudasset_generated_asset_v1_asset_service_get_feed_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_GetFeed_sync", + "file": "cloudasset_v1_generated_asset_service_get_feed_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_sync", "segments": [ { "end": 44, @@ -629,8 +629,8 @@ "shortName": "ListAssets" } }, - "file": "cloudasset_generated_asset_v1_asset_service_list_assets_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_ListAssets_async", + "file": "cloudasset_v1_generated_asset_service_list_assets_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_async", "segments": [ { "end": 45, @@ -673,8 +673,8 @@ "shortName": "ListAssets" } }, - "file": "cloudasset_generated_asset_v1_asset_service_list_assets_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_ListAssets_sync", + "file": "cloudasset_v1_generated_asset_service_list_assets_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_sync", "segments": [ { "end": 45, @@ -718,8 +718,8 @@ "shortName": "ListFeeds" } }, - "file": "cloudasset_generated_asset_v1_asset_service_list_feeds_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_ListFeeds_async", + "file": "cloudasset_v1_generated_asset_service_list_feeds_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_async", "segments": [ { "end": 44, @@ -762,8 +762,8 @@ "shortName": "ListFeeds" } }, - "file": "cloudasset_generated_asset_v1_asset_service_list_feeds_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_ListFeeds_sync", + "file": "cloudasset_v1_generated_asset_service_list_feeds_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_sync", "segments": [ { "end": 44, @@ -807,8 +807,8 @@ "shortName": "SearchAllIamPolicies" } }, - "file": "cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_async", + "file": "cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async", "segments": [ { "end": 45, @@ -851,8 +851,8 @@ "shortName": "SearchAllIamPolicies" } }, - "file": "cloudasset_generated_asset_v1_asset_service_search_all_iam_policies_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllIamPolicies_sync", + "file": "cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync", "segments": [ { "end": 45, @@ -896,8 +896,8 @@ "shortName": "SearchAllResources" } }, - "file": "cloudasset_generated_asset_v1_asset_service_search_all_resources_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllResources_async", + "file": "cloudasset_v1_generated_asset_service_search_all_resources_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_SearchAllResources_async", "segments": [ { "end": 45, @@ -940,8 +940,8 @@ "shortName": "SearchAllResources" } }, - "file": "cloudasset_generated_asset_v1_asset_service_search_all_resources_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_SearchAllResources_sync", + "file": "cloudasset_v1_generated_asset_service_search_all_resources_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_SearchAllResources_sync", "segments": [ { "end": 45, @@ -985,8 +985,8 @@ "shortName": "UpdateFeed" } }, - "file": "cloudasset_generated_asset_v1_asset_service_update_feed_async.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_UpdateFeed_async", + "file": "cloudasset_v1_generated_asset_service_update_feed_async.py", + "regionTag": "cloudasset_v1_generated_AssetService_UpdateFeed_async", "segments": [ { "end": 47, @@ -1029,8 +1029,8 @@ "shortName": "UpdateFeed" } }, - "file": "cloudasset_generated_asset_v1_asset_service_update_feed_sync.py", - "regionTag": "cloudasset_generated_asset_v1_AssetService_UpdateFeed_sync", + "file": "cloudasset_v1_generated_asset_service_update_feed_sync.py", + "regionTag": "cloudasset_v1_generated_AssetService_UpdateFeed_sync", "segments": [ { "end": 47, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py index 275d3046ce0d..74ca09bac30d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async] +# [START iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_async] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ async def sample_generate_access_token(): # Handle the response print(response) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async] +# [END iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py index db976fd8a51f..01d48e2d2700 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync] +# [START iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_sync] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ def sample_generate_access_token(): # Handle the response print(response) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync] +# [END iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py index ca03fcee62c8..44fadf728ff5 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async] +# [START iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_async] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ async def sample_generate_id_token(): # Handle the response print(response) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async] +# [END iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py index df64df2aced2..28870a6edd37 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync] +# [START iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_sync] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ def sample_generate_id_token(): # Handle the response print(response) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync] +# [END iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py index b4d89faa1f34..f3695380f55f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async] +# [START iamcredentials_v1_generated_IAMCredentials_SignBlob_async] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ async def sample_sign_blob(): # Handle the response print(response) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async] +# [END iamcredentials_v1_generated_IAMCredentials_SignBlob_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py index 0f874a6a8594..17ef6e0105d1 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync] +# [START iamcredentials_v1_generated_IAMCredentials_SignBlob_sync] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ def sample_sign_blob(): # Handle the response print(response) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync] +# [END iamcredentials_v1_generated_IAMCredentials_SignBlob_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py index 677d1cc5bc4a..a1ba9ec8fde7 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async] +# [START iamcredentials_v1_generated_IAMCredentials_SignJwt_async] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ async def sample_sign_jwt(): # Handle the response print(response) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async] +# [END iamcredentials_v1_generated_IAMCredentials_SignJwt_async] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py index a0f3d1df8e10..44dffaf9d2d0 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-iam-credentials -# [START iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync] +# [START iamcredentials_v1_generated_IAMCredentials_SignJwt_sync] from google.iam import credentials_v1 @@ -43,4 +43,4 @@ def sample_sign_jwt(): # Handle the response print(response) -# [END iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync] +# [END iamcredentials_v1_generated_IAMCredentials_SignJwt_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json index 3b1a110447a6..bfe0c2f97c74 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json @@ -10,8 +10,8 @@ "shortName": "GenerateAccessToken" } }, - "file": "iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_async.py", - "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_async", + "file": "iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py", + "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_async", "segments": [ { "end": 45, @@ -54,8 +54,8 @@ "shortName": "GenerateAccessToken" } }, - "file": "iamcredentials_generated_credentials_v1_iam_credentials_generate_access_token_sync.py", - "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateAccessToken_sync", + "file": "iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py", + "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_sync", "segments": [ { "end": 45, @@ -99,8 +99,8 @@ "shortName": "GenerateIdToken" } }, - "file": "iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_async.py", - "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_async", + "file": "iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py", + "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_async", "segments": [ { "end": 45, @@ -143,8 +143,8 @@ "shortName": "GenerateIdToken" } }, - "file": "iamcredentials_generated_credentials_v1_iam_credentials_generate_id_token_sync.py", - "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_GenerateIdToken_sync", + "file": "iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py", + "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_sync", "segments": [ { "end": 45, @@ -188,8 +188,8 @@ "shortName": "SignBlob" } }, - "file": "iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_async.py", - "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_async", + "file": "iamcredentials_v1_generated_iam_credentials_sign_blob_async.py", + "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignBlob_async", "segments": [ { "end": 45, @@ -232,8 +232,8 @@ "shortName": "SignBlob" } }, - "file": "iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py", - "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync", + "file": "iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py", + "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignBlob_sync", "segments": [ { "end": 45, @@ -277,8 +277,8 @@ "shortName": "SignJwt" } }, - "file": "iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_async.py", - "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_async", + "file": "iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py", + "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignJwt_async", "segments": [ { "end": 45, @@ -321,8 +321,8 @@ "shortName": "SignJwt" } }, - "file": "iamcredentials_generated_credentials_v1_iam_credentials_sign_jwt_sync.py", - "regionTag": "iamcredentials_generated_credentials_v1_IAMCredentials_SignJwt_sync", + "file": "iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py", + "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignJwt_sync", "segments": [ { "end": 45, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index 1ea807e1ad05..2a5057cf46a4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async] +# [START logging_v2_generated_ConfigServiceV2_CreateBucket_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_create_bucket(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async] +# [END logging_v2_generated_ConfigServiceV2_CreateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index f6a5a79199cb..cc1b100c1267 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync] +# [START logging_v2_generated_ConfigServiceV2_CreateBucket_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_create_bucket(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync] +# [END logging_v2_generated_ConfigServiceV2_CreateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index a019ffdfa82a..2ad228aad11f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async] +# [START logging_v2_generated_ConfigServiceV2_CreateExclusion_async] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ async def sample_create_exclusion(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async] +# [END logging_v2_generated_ConfigServiceV2_CreateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index e85600e9a04c..4e749f6032be 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync] +# [START logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ def sample_create_exclusion(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync] +# [END logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 9cd9c0ff71e3..73ff5dfd71b7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateSink_async] +# [START logging_v2_generated_ConfigServiceV2_CreateSink_async] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ async def sample_create_sink(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_async] +# [END logging_v2_generated_ConfigServiceV2_CreateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index fdd970694e18..63dfef2f236c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync] +# [START logging_v2_generated_ConfigServiceV2_CreateSink_sync] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ def sample_create_sink(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync] +# [END logging_v2_generated_ConfigServiceV2_CreateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index e3cd7f8519e2..059e9ecdd2db 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateView_async] +# [START logging_v2_generated_ConfigServiceV2_CreateView_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_create_view(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateView_async] +# [END logging_v2_generated_ConfigServiceV2_CreateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 1da4d815a515..8b83b9246933 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_CreateView_sync] +# [START logging_v2_generated_ConfigServiceV2_CreateView_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_create_view(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_CreateView_sync] +# [END logging_v2_generated_ConfigServiceV2_CreateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index fd625364a199..918ecb9b6680 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_async] +# [START logging_v2_generated_ConfigServiceV2_DeleteBucket_async] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ async def sample_delete_bucket(): await client.delete_bucket(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_async] +# [END logging_v2_generated_ConfigServiceV2_DeleteBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index 32a98c84974c..85cd5be4b4a6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_sync] +# [START logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ def sample_delete_bucket(): client.delete_bucket(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_sync] +# [END logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index 4793d48bab33..4ec09c9ef4ff 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_async] +# [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_async] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ async def sample_delete_exclusion(): await client.delete_exclusion(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_async] +# [END logging_v2_generated_ConfigServiceV2_DeleteExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index a4dbefa4ff21..9e53098c7d36 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_sync] +# [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ def sample_delete_exclusion(): client.delete_exclusion(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_sync] +# [END logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index bcb30f6c1d63..dc7696efda5f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteSink_async] +# [START logging_v2_generated_ConfigServiceV2_DeleteSink_async] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ async def sample_delete_sink(): await client.delete_sink(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteSink_async] +# [END logging_v2_generated_ConfigServiceV2_DeleteSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index 0b79d20ce458..34b211765312 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteSink_sync] +# [START logging_v2_generated_ConfigServiceV2_DeleteSink_sync] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ def sample_delete_sink(): client.delete_sink(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteSink_sync] +# [END logging_v2_generated_ConfigServiceV2_DeleteSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index 39f4b320d928..aa829578ce21 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteView_async] +# [START logging_v2_generated_ConfigServiceV2_DeleteView_async] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ async def sample_delete_view(): await client.delete_view(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteView_async] +# [END logging_v2_generated_ConfigServiceV2_DeleteView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index 967a0e866df7..d7cbca49be05 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_DeleteView_sync] +# [START logging_v2_generated_ConfigServiceV2_DeleteView_sync] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ def sample_delete_view(): client.delete_view(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_DeleteView_sync] +# [END logging_v2_generated_ConfigServiceV2_DeleteView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index ba6a4a4ba08e..d1db784af90e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetBucket_async] +# [START logging_v2_generated_ConfigServiceV2_GetBucket_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_get_bucket(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_async] +# [END logging_v2_generated_ConfigServiceV2_GetBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 92a01864ad21..c9adebb0e12b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync] +# [START logging_v2_generated_ConfigServiceV2_GetBucket_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_get_bucket(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync] +# [END logging_v2_generated_ConfigServiceV2_GetBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 521315b0c3b5..cd19694dba61 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async] +# [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_get_cmek_settings(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async] +# [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index 5f698d823b07..9c49e0da9c36 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync] +# [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_get_cmek_settings(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync] +# [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 2af03e6b1d1a..1833917f1c47 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async] +# [START logging_v2_generated_ConfigServiceV2_GetExclusion_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_get_exclusion(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async] +# [END logging_v2_generated_ConfigServiceV2_GetExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index 453029905f1f..aa362876bfbe 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync] +# [START logging_v2_generated_ConfigServiceV2_GetExclusion_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_get_exclusion(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync] +# [END logging_v2_generated_ConfigServiceV2_GetExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index 1d2cbe30a4f8..199a5058ae95 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetSink_async] +# [START logging_v2_generated_ConfigServiceV2_GetSink_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_get_sink(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetSink_async] +# [END logging_v2_generated_ConfigServiceV2_GetSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index 596f9c2342c5..67a772129dc7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetSink_sync] +# [START logging_v2_generated_ConfigServiceV2_GetSink_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_get_sink(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetSink_sync] +# [END logging_v2_generated_ConfigServiceV2_GetSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 15becab59450..5a15bac763e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetView_async] +# [START logging_v2_generated_ConfigServiceV2_GetView_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_get_view(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetView_async] +# [END logging_v2_generated_ConfigServiceV2_GetView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index 188b2b80c768..983f7412fb3e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_GetView_sync] +# [START logging_v2_generated_ConfigServiceV2_GetView_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_get_view(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_GetView_sync] +# [END logging_v2_generated_ConfigServiceV2_GetView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 80a8fe9f6d17..14aeafc213d3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async] +# [START logging_v2_generated_ConfigServiceV2_ListBuckets_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_list_buckets(): async for response in page_result: print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async] +# [END logging_v2_generated_ConfigServiceV2_ListBuckets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 39943150e817..0238fd074c8c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync] +# [START logging_v2_generated_ConfigServiceV2_ListBuckets_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_list_buckets(): for response in page_result: print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync] +# [END logging_v2_generated_ConfigServiceV2_ListBuckets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index 82f229bcad9b..d28a1b38dff0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async] +# [START logging_v2_generated_ConfigServiceV2_ListExclusions_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_list_exclusions(): async for response in page_result: print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async] +# [END logging_v2_generated_ConfigServiceV2_ListExclusions_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 888e6f2e3f05..ab1a8a3197bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync] +# [START logging_v2_generated_ConfigServiceV2_ListExclusions_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_list_exclusions(): for response in page_result: print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync] +# [END logging_v2_generated_ConfigServiceV2_ListExclusions_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index e5a8bf2f723a..07f7dbb507fb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListSinks_async] +# [START logging_v2_generated_ConfigServiceV2_ListSinks_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_list_sinks(): async for response in page_result: print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_ListSinks_async] +# [END logging_v2_generated_ConfigServiceV2_ListSinks_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index f5286da0a53f..286e67df7c5a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync] +# [START logging_v2_generated_ConfigServiceV2_ListSinks_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_list_sinks(): for response in page_result: print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync] +# [END logging_v2_generated_ConfigServiceV2_ListSinks_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index b089db3c89e9..0cca13d1d1be 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListViews_async] +# [START logging_v2_generated_ConfigServiceV2_ListViews_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_list_views(): async for response in page_result: print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_ListViews_async] +# [END logging_v2_generated_ConfigServiceV2_ListViews_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index a02d5b03b3bf..8fdaa7e074e7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_ListViews_sync] +# [START logging_v2_generated_ConfigServiceV2_ListViews_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_list_views(): for response in page_result: print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_ListViews_sync] +# [END logging_v2_generated_ConfigServiceV2_ListViews_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index cbc048ccea92..7ebb1b4ee576 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_async] +# [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ async def sample_undelete_bucket(): await client.undelete_bucket(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_async] +# [END logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 43e9cbb03277..1bce802721ec 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_sync] +# [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ def sample_undelete_bucket(): client.undelete_bucket(request=request) -# [END logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_sync] +# [END logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index e1781886c601..079ade835903 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async] +# [START logging_v2_generated_ConfigServiceV2_UpdateBucket_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_update_bucket(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async] +# [END logging_v2_generated_ConfigServiceV2_UpdateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 6ba5cd29de25..f7d62c0be9cf 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync] +# [START logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_update_bucket(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync] +# [END logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index 318d33d234f7..4117f50db85e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_async] +# [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_update_cmek_settings(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_async] +# [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 02145d1ede6a..af42078fbef5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_sync] +# [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_update_cmek_settings(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_sync] +# [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index 2301627f27e5..0390fbf11cc0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async] +# [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ async def sample_update_exclusion(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async] +# [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index 5bfa0c017b70..23dcd5f9197c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync] +# [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ def sample_update_exclusion(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync] +# [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index 2983000ce4e7..94b996e2666a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async] +# [START logging_v2_generated_ConfigServiceV2_UpdateSink_async] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ async def sample_update_sink(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async] +# [END logging_v2_generated_ConfigServiceV2_UpdateSink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index fc06b2f07bb6..280caac95f1c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync] +# [START logging_v2_generated_ConfigServiceV2_UpdateSink_sync] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ def sample_update_sink(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync] +# [END logging_v2_generated_ConfigServiceV2_UpdateSink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index 882fafea62e8..35b593c28f35 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateView_async] +# [START logging_v2_generated_ConfigServiceV2_UpdateView_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_update_view(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_async] +# [END logging_v2_generated_ConfigServiceV2_UpdateView_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index be01a3fe2218..8ee7abf2c90a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_ConfigServiceV2_UpdateView_sync] +# [START logging_v2_generated_ConfigServiceV2_UpdateView_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_update_view(): # Handle the response print(response) -# [END logging_generated_logging_v2_ConfigServiceV2_UpdateView_sync] +# [END logging_v2_generated_ConfigServiceV2_UpdateView_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 40306083aa2a..36fde8f53926 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_DeleteLog_async] +# [START logging_v2_generated_LoggingServiceV2_DeleteLog_async] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ async def sample_delete_log(): await client.delete_log(request=request) -# [END logging_generated_logging_v2_LoggingServiceV2_DeleteLog_async] +# [END logging_v2_generated_LoggingServiceV2_DeleteLog_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index ce8a5d95ad69..29df813e7082 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_DeleteLog_sync] +# [START logging_v2_generated_LoggingServiceV2_DeleteLog_sync] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ def sample_delete_log(): client.delete_log(request=request) -# [END logging_generated_logging_v2_LoggingServiceV2_DeleteLog_sync] +# [END logging_v2_generated_LoggingServiceV2_DeleteLog_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index 5493f7d5ea52..58dadb56c27c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async] +# [START logging_v2_generated_LoggingServiceV2_ListLogEntries_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_list_log_entries(): async for response in page_result: print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async] +# [END logging_v2_generated_LoggingServiceV2_ListLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index 2a08cb42a85d..034028cc870c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync] +# [START logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_list_log_entries(): for response in page_result: print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync] +# [END logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 9cd5472c4c8e..6a39828b35a3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListLogs_async] +# [START logging_v2_generated_LoggingServiceV2_ListLogs_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_list_logs(): async for response in page_result: print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_ListLogs_async] +# [END logging_v2_generated_LoggingServiceV2_ListLogs_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index cda949536987..e0f51877e058 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync] +# [START logging_v2_generated_LoggingServiceV2_ListLogs_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_list_logs(): for response in page_result: print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync] +# [END logging_v2_generated_LoggingServiceV2_ListLogs_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 63b4fbcff815..5bc4722f7d57 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_async] +# [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_list_monitored_resource_descriptors(): async for response in page_result: print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_async] +# [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index c1789fb8e226..bd16bdee7b42 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] +# [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_list_monitored_resource_descriptors(): for response in page_result: print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] +# [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 2053a8350f50..4269591903f8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async] +# [START logging_v2_generated_LoggingServiceV2_TailLogEntries_async] from google.cloud import logging_v2 @@ -53,4 +53,4 @@ def request_generator(): async for response in stream: print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async] +# [END logging_v2_generated_LoggingServiceV2_TailLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 070e8d0abb15..8a9436ed6a1d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync] +# [START logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] from google.cloud import logging_v2 @@ -53,4 +53,4 @@ def request_generator(): for response in stream: print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync] +# [END logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index 4cff3bca7439..b91f5b262a12 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_async] +# [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] from google.cloud import logging_v2 @@ -45,4 +45,4 @@ async def sample_write_log_entries(): # Handle the response print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_async] +# [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index eb77c2e21f0e..af76795921b5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_sync] +# [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] from google.cloud import logging_v2 @@ -45,4 +45,4 @@ def sample_write_log_entries(): # Handle the response print(response) -# [END logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_sync] +# [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index 1175b4f0e827..7f195d95bd78 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async] +# [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ async def sample_create_log_metric(): # Handle the response print(response) -# [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async] +# [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index a228c92efa6f..ada74b622f6e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync] +# [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ def sample_create_log_metric(): # Handle the response print(response) -# [END logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync] +# [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index da864c3e2d90..f63b64615a5b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_async] +# [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ async def sample_delete_log_metric(): await client.delete_log_metric(request=request) -# [END logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_async] +# [END logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index d6cdc96a3a48..625137321833 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_sync] +# [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync] from google.cloud import logging_v2 @@ -40,4 +40,4 @@ def sample_delete_log_metric(): client.delete_log_metric(request=request) -# [END logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_sync] +# [END logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 77352b3dda73..00b685b76739 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async] +# [START logging_v2_generated_MetricsServiceV2_GetLogMetric_async] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ async def sample_get_log_metric(): # Handle the response print(response) -# [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async] +# [END logging_v2_generated_MetricsServiceV2_GetLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index cf49377ad04e..03121500c323 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync] +# [START logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] from google.cloud import logging_v2 @@ -42,4 +42,4 @@ def sample_get_log_metric(): # Handle the response print(response) -# [END logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync] +# [END logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 6d270523716b..ccaf104c7c8f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async] +# [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ async def sample_list_log_metrics(): async for response in page_result: print(response) -# [END logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async] +# [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index d4fe00402286..89ffe596ce8d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync] +# [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] from google.cloud import logging_v2 @@ -43,4 +43,4 @@ def sample_list_log_metrics(): for response in page_result: print(response) -# [END logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync] +# [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 55bec2442ced..a9756d9827b5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async] +# [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ async def sample_update_log_metric(): # Handle the response print(response) -# [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async] +# [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index 0cb8bc9054a5..a0addeec29d5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync] +# [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] from google.cloud import logging_v2 @@ -47,4 +47,4 @@ def sample_update_log_metric(): # Handle the response print(response) -# [END logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync] +# [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json index 099739a006c2..3cc40b6fa584 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -10,8 +10,8 @@ "shortName": "CreateBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_create_bucket_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateBucket_async", + "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { "end": 45, @@ -54,8 +54,8 @@ "shortName": "CreateBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_create_bucket_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateBucket_sync", + "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { "end": 45, @@ -99,8 +99,8 @@ "shortName": "CreateExclusion" } }, - "file": "logging_generated_logging_v2_config_service_v2_create_exclusion_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_async", + "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", "segments": [ { "end": 49, @@ -143,8 +143,8 @@ "shortName": "CreateExclusion" } }, - "file": "logging_generated_logging_v2_config_service_v2_create_exclusion_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateExclusion_sync", + "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", "segments": [ { "end": 49, @@ -188,8 +188,8 @@ "shortName": "CreateSink" } }, - "file": "logging_generated_logging_v2_config_service_v2_create_sink_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateSink_async", + "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", "segments": [ { "end": 49, @@ -232,8 +232,8 @@ "shortName": "CreateSink" } }, - "file": "logging_generated_logging_v2_config_service_v2_create_sink_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateSink_sync", + "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", "segments": [ { "end": 49, @@ -277,8 +277,8 @@ "shortName": "CreateView" } }, - "file": "logging_generated_logging_v2_config_service_v2_create_view_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateView_async", + "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", "segments": [ { "end": 45, @@ -321,8 +321,8 @@ "shortName": "CreateView" } }, - "file": "logging_generated_logging_v2_config_service_v2_create_view_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_CreateView_sync", + "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", "segments": [ { "end": 45, @@ -366,8 +366,8 @@ "shortName": "DeleteBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_delete_bucket_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_async", + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", "segments": [ { "end": 42, @@ -408,8 +408,8 @@ "shortName": "DeleteBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_delete_bucket_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteBucket_sync", + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", "segments": [ { "end": 42, @@ -451,8 +451,8 @@ "shortName": "DeleteExclusion" } }, - "file": "logging_generated_logging_v2_config_service_v2_delete_exclusion_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_async", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", "segments": [ { "end": 42, @@ -493,8 +493,8 @@ "shortName": "DeleteExclusion" } }, - "file": "logging_generated_logging_v2_config_service_v2_delete_exclusion_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteExclusion_sync", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { "end": 42, @@ -536,8 +536,8 @@ "shortName": "DeleteSink" } }, - "file": "logging_generated_logging_v2_config_service_v2_delete_sink_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteSink_async", + "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", "segments": [ { "end": 42, @@ -578,8 +578,8 @@ "shortName": "DeleteSink" } }, - "file": "logging_generated_logging_v2_config_service_v2_delete_sink_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteSink_sync", + "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { "end": 42, @@ -621,8 +621,8 @@ "shortName": "DeleteView" } }, - "file": "logging_generated_logging_v2_config_service_v2_delete_view_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteView_async", + "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", "segments": [ { "end": 42, @@ -663,8 +663,8 @@ "shortName": "DeleteView" } }, - "file": "logging_generated_logging_v2_config_service_v2_delete_view_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_DeleteView_sync", + "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", "segments": [ { "end": 42, @@ -706,8 +706,8 @@ "shortName": "GetBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_bucket_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetBucket_async", + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { "end": 44, @@ -750,8 +750,8 @@ "shortName": "GetBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_bucket_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetBucket_sync", + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { "end": 44, @@ -795,8 +795,8 @@ "shortName": "GetCmekSettings" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_cmek_settings_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_async", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { "end": 44, @@ -839,8 +839,8 @@ "shortName": "GetCmekSettings" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_cmek_settings_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetCmekSettings_sync", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { "end": 44, @@ -884,8 +884,8 @@ "shortName": "GetExclusion" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_exclusion_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetExclusion_async", + "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { "end": 44, @@ -928,8 +928,8 @@ "shortName": "GetExclusion" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_exclusion_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetExclusion_sync", + "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { "end": 44, @@ -973,8 +973,8 @@ "shortName": "GetSink" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_sink_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetSink_async", + "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { "end": 44, @@ -1017,8 +1017,8 @@ "shortName": "GetSink" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_sink_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetSink_sync", + "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { "end": 44, @@ -1062,8 +1062,8 @@ "shortName": "GetView" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_view_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetView_async", + "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { "end": 44, @@ -1106,8 +1106,8 @@ "shortName": "GetView" } }, - "file": "logging_generated_logging_v2_config_service_v2_get_view_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_GetView_sync", + "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { "end": 44, @@ -1151,8 +1151,8 @@ "shortName": "ListBuckets" } }, - "file": "logging_generated_logging_v2_config_service_v2_list_buckets_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListBuckets_async", + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { "end": 45, @@ -1195,8 +1195,8 @@ "shortName": "ListBuckets" } }, - "file": "logging_generated_logging_v2_config_service_v2_list_buckets_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListBuckets_sync", + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { "end": 45, @@ -1240,8 +1240,8 @@ "shortName": "ListExclusions" } }, - "file": "logging_generated_logging_v2_config_service_v2_list_exclusions_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListExclusions_async", + "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { "end": 45, @@ -1284,8 +1284,8 @@ "shortName": "ListExclusions" } }, - "file": "logging_generated_logging_v2_config_service_v2_list_exclusions_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListExclusions_sync", + "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { "end": 45, @@ -1329,8 +1329,8 @@ "shortName": "ListSinks" } }, - "file": "logging_generated_logging_v2_config_service_v2_list_sinks_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListSinks_async", + "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", "segments": [ { "end": 45, @@ -1373,8 +1373,8 @@ "shortName": "ListSinks" } }, - "file": "logging_generated_logging_v2_config_service_v2_list_sinks_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListSinks_sync", + "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", "segments": [ { "end": 45, @@ -1418,8 +1418,8 @@ "shortName": "ListViews" } }, - "file": "logging_generated_logging_v2_config_service_v2_list_views_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListViews_async", + "file": "logging_v2_generated_config_service_v2_list_views_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", "segments": [ { "end": 45, @@ -1462,8 +1462,8 @@ "shortName": "ListViews" } }, - "file": "logging_generated_logging_v2_config_service_v2_list_views_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_ListViews_sync", + "file": "logging_v2_generated_config_service_v2_list_views_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", "segments": [ { "end": 45, @@ -1507,8 +1507,8 @@ "shortName": "UndeleteBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_undelete_bucket_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_async", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { "end": 42, @@ -1549,8 +1549,8 @@ "shortName": "UndeleteBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_undelete_bucket_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UndeleteBucket_sync", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { "end": 42, @@ -1592,8 +1592,8 @@ "shortName": "UpdateBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_bucket_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_async", + "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", "segments": [ { "end": 44, @@ -1636,8 +1636,8 @@ "shortName": "UpdateBucket" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_bucket_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateBucket_sync", + "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", "segments": [ { "end": 44, @@ -1681,8 +1681,8 @@ "shortName": "UpdateCmekSettings" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_cmek_settings_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_async", + "file": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async", "segments": [ { "end": 44, @@ -1725,8 +1725,8 @@ "shortName": "UpdateCmekSettings" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_cmek_settings_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateCmekSettings_sync", + "file": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync", "segments": [ { "end": 44, @@ -1770,8 +1770,8 @@ "shortName": "UpdateExclusion" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_exclusion_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_async", + "file": "logging_v2_generated_config_service_v2_update_exclusion_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async", "segments": [ { "end": 49, @@ -1814,8 +1814,8 @@ "shortName": "UpdateExclusion" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_exclusion_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateExclusion_sync", + "file": "logging_v2_generated_config_service_v2_update_exclusion_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync", "segments": [ { "end": 49, @@ -1859,8 +1859,8 @@ "shortName": "UpdateSink" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_sink_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateSink_async", + "file": "logging_v2_generated_config_service_v2_update_sink_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async", "segments": [ { "end": 49, @@ -1903,8 +1903,8 @@ "shortName": "UpdateSink" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_sink_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateSink_sync", + "file": "logging_v2_generated_config_service_v2_update_sink_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync", "segments": [ { "end": 49, @@ -1948,8 +1948,8 @@ "shortName": "UpdateView" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_view_async.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateView_async", + "file": "logging_v2_generated_config_service_v2_update_view_async.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async", "segments": [ { "end": 44, @@ -1992,8 +1992,8 @@ "shortName": "UpdateView" } }, - "file": "logging_generated_logging_v2_config_service_v2_update_view_sync.py", - "regionTag": "logging_generated_logging_v2_ConfigServiceV2_UpdateView_sync", + "file": "logging_v2_generated_config_service_v2_update_view_sync.py", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync", "segments": [ { "end": 44, @@ -2037,8 +2037,8 @@ "shortName": "DeleteLog" } }, - "file": "logging_generated_logging_v2_logging_service_v2_delete_log_async.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_DeleteLog_async", + "file": "logging_v2_generated_logging_service_v2_delete_log_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", "segments": [ { "end": 42, @@ -2079,8 +2079,8 @@ "shortName": "DeleteLog" } }, - "file": "logging_generated_logging_v2_logging_service_v2_delete_log_sync.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_DeleteLog_sync", + "file": "logging_v2_generated_logging_service_v2_delete_log_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", "segments": [ { "end": 42, @@ -2122,8 +2122,8 @@ "shortName": "ListLogEntries" } }, - "file": "logging_generated_logging_v2_logging_service_v2_list_log_entries_async.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_async", + "file": "logging_v2_generated_logging_service_v2_list_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", "segments": [ { "end": 45, @@ -2166,8 +2166,8 @@ "shortName": "ListLogEntries" } }, - "file": "logging_generated_logging_v2_logging_service_v2_list_log_entries_sync.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogEntries_sync", + "file": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", "segments": [ { "end": 45, @@ -2211,8 +2211,8 @@ "shortName": "ListLogs" } }, - "file": "logging_generated_logging_v2_logging_service_v2_list_logs_async.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogs_async", + "file": "logging_v2_generated_logging_service_v2_list_logs_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", "segments": [ { "end": 45, @@ -2255,8 +2255,8 @@ "shortName": "ListLogs" } }, - "file": "logging_generated_logging_v2_logging_service_v2_list_logs_sync.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListLogs_sync", + "file": "logging_v2_generated_logging_service_v2_list_logs_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", "segments": [ { "end": 45, @@ -2300,8 +2300,8 @@ "shortName": "ListMonitoredResourceDescriptors" } }, - "file": "logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_async.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_async", + "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", "segments": [ { "end": 44, @@ -2344,8 +2344,8 @@ "shortName": "ListMonitoredResourceDescriptors" } }, - "file": "logging_generated_logging_v2_logging_service_v2_list_monitored_resource_descriptors_sync.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", + "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", "segments": [ { "end": 44, @@ -2389,8 +2389,8 @@ "shortName": "TailLogEntries" } }, - "file": "logging_generated_logging_v2_logging_service_v2_tail_log_entries_async.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_async", + "file": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", "segments": [ { "end": 55, @@ -2433,8 +2433,8 @@ "shortName": "TailLogEntries" } }, - "file": "logging_generated_logging_v2_logging_service_v2_tail_log_entries_sync.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_TailLogEntries_sync", + "file": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", "segments": [ { "end": 55, @@ -2478,8 +2478,8 @@ "shortName": "WriteLogEntries" } }, - "file": "logging_generated_logging_v2_logging_service_v2_write_log_entries_async.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_async", + "file": "logging_v2_generated_logging_service_v2_write_log_entries_async.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", "segments": [ { "end": 47, @@ -2522,8 +2522,8 @@ "shortName": "WriteLogEntries" } }, - "file": "logging_generated_logging_v2_logging_service_v2_write_log_entries_sync.py", - "regionTag": "logging_generated_logging_v2_LoggingServiceV2_WriteLogEntries_sync", + "file": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py", + "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", "segments": [ { "end": 47, @@ -2567,8 +2567,8 @@ "shortName": "CreateLogMetric" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_create_log_metric_async.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_async", + "file": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async", "segments": [ { "end": 49, @@ -2611,8 +2611,8 @@ "shortName": "CreateLogMetric" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_create_log_metric_sync.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_CreateLogMetric_sync", + "file": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync", "segments": [ { "end": 49, @@ -2656,8 +2656,8 @@ "shortName": "DeleteLogMetric" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_delete_log_metric_async.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_async", + "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async", "segments": [ { "end": 42, @@ -2698,8 +2698,8 @@ "shortName": "DeleteLogMetric" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_delete_log_metric_sync.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_DeleteLogMetric_sync", + "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync", "segments": [ { "end": 42, @@ -2741,8 +2741,8 @@ "shortName": "GetLogMetric" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_get_log_metric_async.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_async", + "file": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async", "segments": [ { "end": 44, @@ -2785,8 +2785,8 @@ "shortName": "GetLogMetric" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_get_log_metric_sync.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_GetLogMetric_sync", + "file": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync", "segments": [ { "end": 44, @@ -2830,8 +2830,8 @@ "shortName": "ListLogMetrics" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_list_log_metrics_async.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_async", + "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async", "segments": [ { "end": 45, @@ -2874,8 +2874,8 @@ "shortName": "ListLogMetrics" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_list_log_metrics_sync.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_ListLogMetrics_sync", + "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync", "segments": [ { "end": 45, @@ -2919,8 +2919,8 @@ "shortName": "UpdateLogMetric" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_update_log_metric_async.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_async", + "file": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async", "segments": [ { "end": 49, @@ -2963,8 +2963,8 @@ "shortName": "UpdateLogMetric" } }, - "file": "logging_generated_logging_v2_metrics_service_v2_update_log_metric_sync.py", - "regionTag": "logging_generated_logging_v2_MetricsServiceV2_UpdateLogMetric_sync", + "file": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py", + "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync", "segments": [ { "end": 49, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index 500ed1b479c0..28669bb756b5 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_CreateInstance_async] +# [START redis_v1_generated_CloudRedis_CreateInstance_async] from google.cloud import redis_v1 @@ -53,4 +53,4 @@ async def sample_create_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_CreateInstance_async] +# [END redis_v1_generated_CloudRedis_CreateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py index 66fcf9e4ca48..1343e2a70e25 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_CreateInstance_sync] +# [START redis_v1_generated_CloudRedis_CreateInstance_sync] from google.cloud import redis_v1 @@ -53,4 +53,4 @@ def sample_create_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_CreateInstance_sync] +# [END redis_v1_generated_CloudRedis_CreateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index 359aab4e5b0d..b04eaa2a823b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_DeleteInstance_async] +# [START redis_v1_generated_CloudRedis_DeleteInstance_async] from google.cloud import redis_v1 @@ -46,4 +46,4 @@ async def sample_delete_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_DeleteInstance_async] +# [END redis_v1_generated_CloudRedis_DeleteInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py index 1dbbc9a65871..fcf701aad286 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_DeleteInstance_sync] +# [START redis_v1_generated_CloudRedis_DeleteInstance_sync] from google.cloud import redis_v1 @@ -46,4 +46,4 @@ def sample_delete_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_DeleteInstance_sync] +# [END redis_v1_generated_CloudRedis_DeleteInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py index 8b0e87eaeed3..73da1cfb444e 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ExportInstance_async] +# [START redis_v1_generated_CloudRedis_ExportInstance_async] from google.cloud import redis_v1 @@ -50,4 +50,4 @@ async def sample_export_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_ExportInstance_async] +# [END redis_v1_generated_CloudRedis_ExportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py index a33bbd295ed7..cc2b7119b6d3 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ExportInstance_sync] +# [START redis_v1_generated_CloudRedis_ExportInstance_sync] from google.cloud import redis_v1 @@ -50,4 +50,4 @@ def sample_export_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_ExportInstance_sync] +# [END redis_v1_generated_CloudRedis_ExportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py index c1e0c8c99a81..19fdde92bc6c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_FailoverInstance_async] +# [START redis_v1_generated_CloudRedis_FailoverInstance_async] from google.cloud import redis_v1 @@ -46,4 +46,4 @@ async def sample_failover_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_FailoverInstance_async] +# [END redis_v1_generated_CloudRedis_FailoverInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py index a6fa6dd166d5..ac5425144e49 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_FailoverInstance_sync] +# [START redis_v1_generated_CloudRedis_FailoverInstance_sync] from google.cloud import redis_v1 @@ -46,4 +46,4 @@ def sample_failover_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_FailoverInstance_sync] +# [END redis_v1_generated_CloudRedis_FailoverInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py index bd446a23c874..94d4936cabc8 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_GetInstance_async] +# [START redis_v1_generated_CloudRedis_GetInstance_async] from google.cloud import redis_v1 @@ -42,4 +42,4 @@ async def sample_get_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_GetInstance_async] +# [END redis_v1_generated_CloudRedis_GetInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py index abb06d38a31a..a62b3aba8a5a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_GetInstance_sync] +# [START redis_v1_generated_CloudRedis_GetInstance_sync] from google.cloud import redis_v1 @@ -42,4 +42,4 @@ def sample_get_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_GetInstance_sync] +# [END redis_v1_generated_CloudRedis_GetInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py index 3e5f7f479e67..be02f0c9a25b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ImportInstance_async] +# [START redis_v1_generated_CloudRedis_ImportInstance_async] from google.cloud import redis_v1 @@ -50,4 +50,4 @@ async def sample_import_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_ImportInstance_async] +# [END redis_v1_generated_CloudRedis_ImportInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py index 761227a26dbd..a00ebe83a74c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ImportInstance_sync] +# [START redis_v1_generated_CloudRedis_ImportInstance_sync] from google.cloud import redis_v1 @@ -50,4 +50,4 @@ def sample_import_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_ImportInstance_sync] +# [END redis_v1_generated_CloudRedis_ImportInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py index c4962e994081..06fcc10321a2 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ListInstances_async] +# [START redis_v1_generated_CloudRedis_ListInstances_async] from google.cloud import redis_v1 @@ -43,4 +43,4 @@ async def sample_list_instances(): async for response in page_result: print(response) -# [END redis_generated_redis_v1_CloudRedis_ListInstances_async] +# [END redis_v1_generated_CloudRedis_ListInstances_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py index 597721915a23..0daec59cba80 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_ListInstances_sync] +# [START redis_v1_generated_CloudRedis_ListInstances_sync] from google.cloud import redis_v1 @@ -43,4 +43,4 @@ def sample_list_instances(): for response in page_result: print(response) -# [END redis_generated_redis_v1_CloudRedis_ListInstances_sync] +# [END redis_v1_generated_CloudRedis_ListInstances_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index 63d5a05ffbf0..3198e2f7777c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_UpdateInstance_async] +# [START redis_v1_generated_CloudRedis_UpdateInstance_async] from google.cloud import redis_v1 @@ -51,4 +51,4 @@ async def sample_update_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_UpdateInstance_async] +# [END redis_v1_generated_CloudRedis_UpdateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py similarity index 92% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py index d692938f99ca..598530bfe6f9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_UpdateInstance_sync] +# [START redis_v1_generated_CloudRedis_UpdateInstance_sync] from google.cloud import redis_v1 @@ -51,4 +51,4 @@ def sample_update_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_UpdateInstance_sync] +# [END redis_v1_generated_CloudRedis_UpdateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py index 7ae624c30e64..3191dfb62c76 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_UpgradeInstance_async] +# [START redis_v1_generated_CloudRedis_UpgradeInstance_async] from google.cloud import redis_v1 @@ -47,4 +47,4 @@ async def sample_upgrade_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_async] +# [END redis_v1_generated_CloudRedis_UpgradeInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py similarity index 91% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py index 4858c84fddd0..f146947b8a52 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-redis -# [START redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync] +# [START redis_v1_generated_CloudRedis_UpgradeInstance_sync] from google.cloud import redis_v1 @@ -47,4 +47,4 @@ def sample_upgrade_instance(): # Handle the response print(response) -# [END redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync] +# [END redis_v1_generated_CloudRedis_UpgradeInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json index a246d7922870..1718e17d9bcd 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json @@ -10,8 +10,8 @@ "shortName": "CreateInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_create_instance_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_CreateInstance_async", + "file": "redis_v1_generated_cloud_redis_create_instance_async.py", + "regionTag": "redis_v1_generated_CloudRedis_CreateInstance_async", "segments": [ { "end": 55, @@ -54,8 +54,8 @@ "shortName": "CreateInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_create_instance_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_CreateInstance_sync", + "file": "redis_v1_generated_cloud_redis_create_instance_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_CreateInstance_sync", "segments": [ { "end": 55, @@ -99,8 +99,8 @@ "shortName": "DeleteInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_delete_instance_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_DeleteInstance_async", + "file": "redis_v1_generated_cloud_redis_delete_instance_async.py", + "regionTag": "redis_v1_generated_CloudRedis_DeleteInstance_async", "segments": [ { "end": 48, @@ -143,8 +143,8 @@ "shortName": "DeleteInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_delete_instance_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_DeleteInstance_sync", + "file": "redis_v1_generated_cloud_redis_delete_instance_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_DeleteInstance_sync", "segments": [ { "end": 48, @@ -188,8 +188,8 @@ "shortName": "ExportInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_export_instance_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_ExportInstance_async", + "file": "redis_v1_generated_cloud_redis_export_instance_async.py", + "regionTag": "redis_v1_generated_CloudRedis_ExportInstance_async", "segments": [ { "end": 52, @@ -232,8 +232,8 @@ "shortName": "ExportInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_export_instance_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_ExportInstance_sync", + "file": "redis_v1_generated_cloud_redis_export_instance_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_ExportInstance_sync", "segments": [ { "end": 52, @@ -277,8 +277,8 @@ "shortName": "FailoverInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_failover_instance_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_FailoverInstance_async", + "file": "redis_v1_generated_cloud_redis_failover_instance_async.py", + "regionTag": "redis_v1_generated_CloudRedis_FailoverInstance_async", "segments": [ { "end": 48, @@ -321,8 +321,8 @@ "shortName": "FailoverInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_failover_instance_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_FailoverInstance_sync", + "file": "redis_v1_generated_cloud_redis_failover_instance_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_FailoverInstance_sync", "segments": [ { "end": 48, @@ -366,8 +366,8 @@ "shortName": "GetInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_get_instance_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_GetInstance_async", + "file": "redis_v1_generated_cloud_redis_get_instance_async.py", + "regionTag": "redis_v1_generated_CloudRedis_GetInstance_async", "segments": [ { "end": 44, @@ -410,8 +410,8 @@ "shortName": "GetInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_get_instance_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_GetInstance_sync", + "file": "redis_v1_generated_cloud_redis_get_instance_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_GetInstance_sync", "segments": [ { "end": 44, @@ -455,8 +455,8 @@ "shortName": "ImportInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_import_instance_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_ImportInstance_async", + "file": "redis_v1_generated_cloud_redis_import_instance_async.py", + "regionTag": "redis_v1_generated_CloudRedis_ImportInstance_async", "segments": [ { "end": 52, @@ -499,8 +499,8 @@ "shortName": "ImportInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_import_instance_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_ImportInstance_sync", + "file": "redis_v1_generated_cloud_redis_import_instance_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_ImportInstance_sync", "segments": [ { "end": 52, @@ -544,8 +544,8 @@ "shortName": "ListInstances" } }, - "file": "redis_generated_redis_v1_cloud_redis_list_instances_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_ListInstances_async", + "file": "redis_v1_generated_cloud_redis_list_instances_async.py", + "regionTag": "redis_v1_generated_CloudRedis_ListInstances_async", "segments": [ { "end": 45, @@ -588,8 +588,8 @@ "shortName": "ListInstances" } }, - "file": "redis_generated_redis_v1_cloud_redis_list_instances_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_ListInstances_sync", + "file": "redis_v1_generated_cloud_redis_list_instances_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_ListInstances_sync", "segments": [ { "end": 45, @@ -633,8 +633,8 @@ "shortName": "UpdateInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_update_instance_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_UpdateInstance_async", + "file": "redis_v1_generated_cloud_redis_update_instance_async.py", + "regionTag": "redis_v1_generated_CloudRedis_UpdateInstance_async", "segments": [ { "end": 53, @@ -677,8 +677,8 @@ "shortName": "UpdateInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_update_instance_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_UpdateInstance_sync", + "file": "redis_v1_generated_cloud_redis_update_instance_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_UpdateInstance_sync", "segments": [ { "end": 53, @@ -722,8 +722,8 @@ "shortName": "UpgradeInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_upgrade_instance_async.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_UpgradeInstance_async", + "file": "redis_v1_generated_cloud_redis_upgrade_instance_async.py", + "regionTag": "redis_v1_generated_CloudRedis_UpgradeInstance_async", "segments": [ { "end": 49, @@ -766,8 +766,8 @@ "shortName": "UpgradeInstance" } }, - "file": "redis_generated_redis_v1_cloud_redis_upgrade_instance_sync.py", - "regionTag": "redis_generated_redis_v1_CloudRedis_UpgradeInstance_sync", + "file": "redis_v1_generated_cloud_redis_upgrade_instance_sync.py", + "regionTag": "redis_v1_generated_CloudRedis_UpgradeInstance_sync", "segments": [ { "end": 49, diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py index b862da753932..b2606d93e1c1 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_ListResources_async] +# [START mollusca_v1_generated_Snippets_ListResources_async] from animalia import mollusca_v1 @@ -44,4 +44,4 @@ async def sample_list_resources(): async for response in page_result: print(response) -# [END mollusca_generated_mollusca_v1_Snippets_ListResources_async] +# [END mollusca_v1_generated_Snippets_ListResources_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py index cca546067d58..6f9a12e71f76 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_ListResources_sync] +# [START mollusca_v1_generated_Snippets_ListResources_sync] from animalia import mollusca_v1 @@ -44,4 +44,4 @@ def sample_list_resources(): for response in page_result: print(response) -# [END mollusca_generated_mollusca_v1_Snippets_ListResources_sync] +# [END mollusca_v1_generated_Snippets_ListResources_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py similarity index 92% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py index 294878876e6b..78bac5823ef2 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async] +# [START mollusca_v1_generated_Snippets_MethodBidiStreaming_async] from animalia import mollusca_v1 @@ -53,4 +53,4 @@ def request_generator(): async for response in stream: print(response) -# [END mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async] +# [END mollusca_v1_generated_Snippets_MethodBidiStreaming_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py similarity index 92% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py index a61e415c5a4e..31cae8ddcc45 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync] +# [START mollusca_v1_generated_Snippets_MethodBidiStreaming_sync] from animalia import mollusca_v1 @@ -53,4 +53,4 @@ def request_generator(): for response in stream: print(response) -# [END mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync] +# [END mollusca_v1_generated_Snippets_MethodBidiStreaming_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py index 72d74351e860..b0225bd297c3 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async] +# [START mollusca_v1_generated_Snippets_MethodLroSignatures_async] from animalia import mollusca_v1 @@ -54,4 +54,4 @@ async def sample_method_lro_signatures(): # Handle the response print(response) -# [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async] +# [END mollusca_v1_generated_Snippets_MethodLroSignatures_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py index 285e55c2b247..1c53ebe1ee9e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync] +# [START mollusca_v1_generated_Snippets_MethodLroSignatures_sync] from animalia import mollusca_v1 @@ -54,4 +54,4 @@ def sample_method_lro_signatures(): # Handle the response print(response) -# [END mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync] +# [END mollusca_v1_generated_Snippets_MethodLroSignatures_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py index 527bb4b81325..a87a510f7589 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async] +# [START mollusca_v1_generated_Snippets_MethodOneSignature_async] from animalia import mollusca_v1 @@ -50,4 +50,4 @@ async def sample_method_one_signature(): # Handle the response print(response) -# [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async] +# [END mollusca_v1_generated_Snippets_MethodOneSignature_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py index 7e0101279b14..534a1b62e42d 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync] +# [START mollusca_v1_generated_Snippets_MethodOneSignature_sync] from animalia import mollusca_v1 @@ -50,4 +50,4 @@ def sample_method_one_signature(): # Handle the response print(response) -# [END mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync] +# [END mollusca_v1_generated_Snippets_MethodOneSignature_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py index 2812acd9d989..50e71265714e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async] +# [START mollusca_v1_generated_Snippets_MethodServerStreaming_async] from animalia import mollusca_v1 @@ -51,4 +51,4 @@ async def sample_method_server_streaming(): async for response in stream: print(response) -# [END mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async] +# [END mollusca_v1_generated_Snippets_MethodServerStreaming_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py index 3d94b3238008..1a561cf9559c 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync] +# [START mollusca_v1_generated_Snippets_MethodServerStreaming_sync] from animalia import mollusca_v1 @@ -51,4 +51,4 @@ def sample_method_server_streaming(): for response in stream: print(response) -# [END mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync] +# [END mollusca_v1_generated_Snippets_MethodServerStreaming_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py index 5ac2996b8c91..a68b0c5c0543 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async] +# [START mollusca_v1_generated_Snippets_OneOfMethod_async] from animalia import mollusca_v1 @@ -43,4 +43,4 @@ async def sample_one_of_method(): # Handle the response print(response) -# [END mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async] +# [END mollusca_v1_generated_Snippets_OneOfMethod_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py similarity index 89% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py index 52273f55f5db..6f5c988f9014 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async] +# [START mollusca_v1_generated_Snippets_OneOfMethodRequiredField_async] from animalia import mollusca_v1 @@ -43,4 +43,4 @@ async def sample_one_of_method_required_field(): # Handle the response print(response) -# [END mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async] +# [END mollusca_v1_generated_Snippets_OneOfMethodRequiredField_async] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py similarity index 89% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py index fe0b5eb7eef3..329193218bdf 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync] +# [START mollusca_v1_generated_Snippets_OneOfMethodRequiredField_sync] from animalia import mollusca_v1 @@ -43,4 +43,4 @@ def sample_one_of_method_required_field(): # Handle the response print(response) -# [END mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync] +# [END mollusca_v1_generated_Snippets_OneOfMethodRequiredField_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py similarity index 91% rename from packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py rename to packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py index b5bb96552c07..d8956a7bf4d9 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install animalia-mollusca -# [START mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync] +# [START mollusca_v1_generated_Snippets_OneOfMethod_sync] from animalia import mollusca_v1 @@ -43,4 +43,4 @@ def sample_one_of_method(): # Handle the response print(response) -# [END mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync] +# [END mollusca_v1_generated_Snippets_OneOfMethod_sync] diff --git a/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json index 0010781108ec..4bdf3e50cb95 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json +++ b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_mollusca_v1.json @@ -10,8 +10,8 @@ "shortName": "ListResources" } }, - "file": "mollusca_generated_mollusca_v1_snippets_list_resources_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_async", + "file": "mollusca_v1_generated_snippets_list_resources_async.py", + "regionTag": "mollusca_v1_generated_Snippets_ListResources_async", "segments": [ { "end": 46, @@ -54,8 +54,8 @@ "shortName": "ListResources" } }, - "file": "mollusca_generated_mollusca_v1_snippets_list_resources_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_sync", + "file": "mollusca_v1_generated_snippets_list_resources_sync.py", + "regionTag": "mollusca_v1_generated_Snippets_ListResources_sync", "segments": [ { "end": 46, @@ -99,8 +99,8 @@ "shortName": "MethodBidiStreaming" } }, - "file": "mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async", + "file": "mollusca_v1_generated_snippets_method_bidi_streaming_async.py", + "regionTag": "mollusca_v1_generated_Snippets_MethodBidiStreaming_async", "segments": [ { "end": 55, @@ -143,8 +143,8 @@ "shortName": "MethodBidiStreaming" } }, - "file": "mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync", + "file": "mollusca_v1_generated_snippets_method_bidi_streaming_sync.py", + "regionTag": "mollusca_v1_generated_Snippets_MethodBidiStreaming_sync", "segments": [ { "end": 55, @@ -188,8 +188,8 @@ "shortName": "MethodLroSignatures" } }, - "file": "mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async", + "file": "mollusca_v1_generated_snippets_method_lro_signatures_async.py", + "regionTag": "mollusca_v1_generated_Snippets_MethodLroSignatures_async", "segments": [ { "end": 56, @@ -232,8 +232,8 @@ "shortName": "MethodLroSignatures" } }, - "file": "mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync", + "file": "mollusca_v1_generated_snippets_method_lro_signatures_sync.py", + "regionTag": "mollusca_v1_generated_Snippets_MethodLroSignatures_sync", "segments": [ { "end": 56, @@ -277,8 +277,8 @@ "shortName": "MethodOneSignature" } }, - "file": "mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async", + "file": "mollusca_v1_generated_snippets_method_one_signature_async.py", + "regionTag": "mollusca_v1_generated_Snippets_MethodOneSignature_async", "segments": [ { "end": 52, @@ -321,8 +321,8 @@ "shortName": "MethodOneSignature" } }, - "file": "mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync", + "file": "mollusca_v1_generated_snippets_method_one_signature_sync.py", + "regionTag": "mollusca_v1_generated_Snippets_MethodOneSignature_sync", "segments": [ { "end": 52, @@ -366,8 +366,8 @@ "shortName": "MethodServerStreaming" } }, - "file": "mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async", + "file": "mollusca_v1_generated_snippets_method_server_streaming_async.py", + "regionTag": "mollusca_v1_generated_Snippets_MethodServerStreaming_async", "segments": [ { "end": 53, @@ -410,8 +410,8 @@ "shortName": "MethodServerStreaming" } }, - "file": "mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync", + "file": "mollusca_v1_generated_snippets_method_server_streaming_sync.py", + "regionTag": "mollusca_v1_generated_Snippets_MethodServerStreaming_sync", "segments": [ { "end": 53, @@ -455,8 +455,8 @@ "shortName": "OneOfMethodRequiredField" } }, - "file": "mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async", + "file": "mollusca_v1_generated_snippets_one_of_method_required_field_async.py", + "regionTag": "mollusca_v1_generated_Snippets_OneOfMethodRequiredField_async", "segments": [ { "end": 45, @@ -499,8 +499,8 @@ "shortName": "OneOfMethodRequiredField" } }, - "file": "mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync", + "file": "mollusca_v1_generated_snippets_one_of_method_required_field_sync.py", + "regionTag": "mollusca_v1_generated_Snippets_OneOfMethodRequiredField_sync", "segments": [ { "end": 45, @@ -544,8 +544,8 @@ "shortName": "OneOfMethod" } }, - "file": "mollusca_generated_mollusca_v1_snippets_one_of_method_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async", + "file": "mollusca_v1_generated_snippets_one_of_method_async.py", + "regionTag": "mollusca_v1_generated_Snippets_OneOfMethod_async", "segments": [ { "end": 45, @@ -588,8 +588,8 @@ "shortName": "OneOfMethod" } }, - "file": "mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync", + "file": "mollusca_v1_generated_snippets_one_of_method_sync.py", + "regionTag": "mollusca_v1_generated_Snippets_OneOfMethod_sync", "segments": [ { "end": 45, diff --git a/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_v1_mollusca_v1.json b/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_v1_mollusca_v1.json deleted file mode 100644 index c5badb1099b7..000000000000 --- a/packages/gapic-generator/tests/snippetgen/goldens/snippet_metadata_v1_mollusca_v1.json +++ /dev/null @@ -1,611 +0,0 @@ -{ - "snippets": [ - { - "clientMethod": { - "method": { - "fullName": "MethodServerStreaming", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_server_streaming_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_sync", - "segment": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "method": { - "fullName": "OneOfMethodRequiredField", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_sync", - "segment": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "method": { - "fullName": "OneOfMethod", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_one_of_method_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethod_sync", - "segment": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "method": { - "fullName": "MethodBidiStreaming", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_sync", - "segment": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "method": { - "fullName": "ListResources", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_list_resources_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_sync", - "segment": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "method": { - "fullName": "MethodOneSignature", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_one_signature_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_sync", - "segment": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "method": { - "fullName": "MethodLroSignatures", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_sync.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_sync", - "segment": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "async": true, - "method": { - "fullName": "MethodBidiStreaming", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_bidi_streaming_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodBidiStreaming_async", - "segment": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "async": true, - "method": { - "fullName": "ListResources", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_list_resources_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_ListResources_async", - "segment": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "async": true, - "method": { - "fullName": "OneOfMethodRequiredField", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_one_of_method_required_field_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethodRequiredField_async", - "segment": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "async": true, - "method": { - "fullName": "MethodServerStreaming", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_server_streaming_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodServerStreaming_async", - "segment": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "async": true, - "method": { - "fullName": "OneOfMethod", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_one_of_method_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_OneOfMethod_async", - "segment": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "async": true, - "method": { - "fullName": "MethodOneSignature", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_one_signature_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodOneSignature_async", - "segment": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ] - }, - { - "clientMethod": { - "async": true, - "method": { - "fullName": "MethodLroSignatures", - "service": { - "shortName": "Snippets" - } - } - }, - "file": "samples/generated_samples/mollusca_generated_mollusca_v1_snippets_method_lro_signatures_async.py", - "regionTag": "mollusca_generated_mollusca_v1_Snippets_MethodLroSignatures_async", - "segment": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "type": "RESPONSE_HANDLING" - } - ] - } - ] -} diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 2f7574e48448..ba5840a51c04 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -2053,7 +2053,7 @@ def test_generate_sample_spec_basic(): "rpc": "Ramshorn", "transport": "grpc", "service": "animalia.mollusca.v1.Squid", - "region_tag": "example_generated_mollusca_v1_Squid_Ramshorn_sync", + "region_tag": "example_v1_generated_Squid_Ramshorn_sync", "description": "Snippet for ramshorn" } @@ -2061,7 +2061,7 @@ def test_generate_sample_spec_basic(): "rpc": "Ramshorn", "transport": "grpc-async", "service": "animalia.mollusca.v1.Squid", - "region_tag": "example_generated_mollusca_v1_Squid_Ramshorn_async", + "region_tag": "example_v1_generated_Squid_Ramshorn_async", "description": "Snippet for ramshorn" } From 5103fba146f518187d5c6a0d7ff8471c2795fa61 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 18 Feb 2022 04:49:00 +0100 Subject: [PATCH 0746/1339] chore(deps): update dependency setuptools to v60.9.2 (#1209) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [setuptools](https://togithub.com/pypa/setuptools) | `==60.9.1` -> `==60.9.2` | [![age](https://badges.renovateapi.com/packages/pypi/setuptools/60.9.2/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/setuptools/60.9.2/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/setuptools/60.9.2/compatibility-slim/60.9.1)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/setuptools/60.9.2/confidence-slim/60.9.1)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
pypa/setuptools ### [`v60.9.2`](https://togithub.com/pypa/setuptools/blob/HEAD/CHANGES.rst#v6092) [Compare Source](https://togithub.com/pypa/setuptools/compare/v60.9.1...v60.9.2) Misc ^^^^ - [#​3035](https://togithub.com/pypa/setuptools/issues/3035): When loading distutils from the vendored copy, rewrite `__name__` to ensure consistent importing from inside and out.
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, click this checkbox. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2f08e316c427..0efd28c872b7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.9.1 +setuptools==60.9.2 From 868d8d52178c529e485b9ab0722f23b470979b0d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 18 Feb 2022 13:10:54 +0100 Subject: [PATCH 0747/1339] chore(deps): update dependency markupsafe to v2.1.0 (#1215) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0efd28c872b7..e6fbf2a106cd 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -2,7 +2,7 @@ click==8.0.3 google-api-core==2.1.1 googleapis-common-protos==1.54.0 jinja2==3.0.3 -MarkupSafe==2.0.1 +MarkupSafe==2.1.0 protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 From dae094e9df55c315ce16c87e1157ef38be274949 Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Tue, 22 Feb 2022 13:08:41 -0800 Subject: [PATCH 0748/1339] chore: disable mtls showcase tests (#1219) The mtls showcase_mtls and showcase_mtls_alternative_templates tests have started timing out in CI and are blocking PRs. Disable them until further notice. See #1218 for details. --- packages/gapic-generator/.github/sync-repo-settings.yaml | 6 ++++-- packages/gapic-generator/.github/workflows/tests.yaml | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index a735a93570aa..8c6fac341e10 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -10,8 +10,10 @@ branchProtectionRules: - 'mypy' - 'showcase (showcase)' - 'showcase (showcase_alternative_templates)' - - 'showcase-mtls (showcase_mtls)' - - 'showcase-mtls (showcase_mtls_alternative_templates)' + # TODO(dovs): reenable these when the mtls tests have been debugged and fixed + # See #1218 for details + # - 'showcase-mtls (showcase_mtls)' + # - 'showcase-mtls (showcase_mtls_alternative_templates)' - 'showcase-mypy' - 'showcase-mypy (_alternative_templates)' - 'showcase-unit (3.6)' diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index cafa68430f1f..6ca76ab3f943 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -81,6 +81,7 @@ jobs: - name: Run showcase tests. run: nox -s ${{ matrix.target }} showcase-mtls: + if: ${{ false }} # TODO(dovs): reenable when #1218 is fixed strategy: matrix: target: [showcase_mtls, showcase_mtls_alternative_templates] From 208c647c1e5a630089df2b7cffab0fd20fcfb52c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 22 Feb 2022 22:16:48 +0100 Subject: [PATCH 0749/1339] chore(deps): update dependency setuptools to v60.9.3 (#1214) Co-authored-by: Anthonios Partheniou Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e6fbf2a106cd..64eb6f4a5657 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.9.2 +setuptools==60.9.3 From 4617aa04bbaff8d925d4a70e3b0bffa99e4018b5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 22 Feb 2022 22:22:10 +0100 Subject: [PATCH 0750/1339] chore(deps): update dependency click to v8.0.4 (#1216) Co-authored-by: Dov Shlachter --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 64eb6f4a5657..fd146ce9755f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==8.0.3 +click==8.0.4 google-api-core==2.1.1 googleapis-common-protos==1.54.0 jinja2==3.0.3 From f2eec0eba55528fa6bfc4566a8ce3fd784b915f3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 22 Feb 2022 14:52:58 -0800 Subject: [PATCH 0751/1339] chore(main): release 0.63.4 (#1213) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Dov Shlachter --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c28e6d40c9ac..509a05761a40 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.63.4](https://github.com/googleapis/gapic-generator-python/compare/v0.63.3...v0.63.4) (2022-02-22) + + +### Bug Fixes + +* fix snippet region tag format ([#1210](https://github.com/googleapis/gapic-generator-python/issues/1210)) ([e895106](https://github.com/googleapis/gapic-generator-python/commit/e8951064827c726bb651801336b86188e2628386)) + ### [0.63.3](https://github.com/googleapis/gapic-generator-python/compare/v0.63.2...v0.63.3) (2022-02-16) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 0772b210c507..721cf505c67e 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.3" +version = "0.63.4" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 1bba7c0e31a7d1db3e41cda19f5ad6499723d010 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 23 Feb 2022 20:06:10 +0100 Subject: [PATCH 0752/1339] chore(deps): update dependency googleapis-common-protos to v1.55.0 (#1221) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index fd146ce9755f..af29d63a9c86 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==8.0.4 google-api-core==2.1.1 -googleapis-common-protos==1.54.0 +googleapis-common-protos==1.55.0 jinja2==3.0.3 MarkupSafe==2.1.0 protobuf==3.19.4 From e3b3e0aa9ee46a2ead2d602c7715a57c412ca4dd Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 24 Feb 2022 17:00:19 -0700 Subject: [PATCH 0753/1339] fix: update license year to 2022 (#1199) * fix: update license year to 2022 * chore: update year in snippet goldens --- packages/gapic-generator/gapic/ads-templates/_license.j2 | 2 +- packages/gapic-generator/gapic/templates/_license.j2 | 2 +- packages/gapic-generator/gapic/templates/docs/conf.py.j2 | 2 +- .../tests/integration/goldens/asset/docs/conf.py | 4 ++-- .../integration/goldens/asset/google/cloud/asset/__init__.py | 2 +- .../goldens/asset/google/cloud/asset_v1/__init__.py | 2 +- .../goldens/asset/google/cloud/asset_v1/services/__init__.py | 2 +- .../google/cloud/asset_v1/services/asset_service/__init__.py | 2 +- .../cloud/asset_v1/services/asset_service/async_client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/pagers.py | 2 +- .../asset_v1/services/asset_service/transports/__init__.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/base.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/grpc.py | 2 +- .../services/asset_service/transports/grpc_asyncio.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/__init__.py | 2 +- .../asset/google/cloud/asset_v1/types/asset_service.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/assets.py | 2 +- .../tests/integration/goldens/asset/noxfile.py | 2 +- ...set_v1_generated_asset_service_analyze_iam_policy_async.py | 2 +- ...ated_asset_service_analyze_iam_policy_longrunning_async.py | 2 +- ...rated_asset_service_analyze_iam_policy_longrunning_sync.py | 2 +- ...sset_v1_generated_asset_service_analyze_iam_policy_sync.py | 2 +- ..._generated_asset_service_batch_get_assets_history_async.py | 2 +- ...1_generated_asset_service_batch_get_assets_history_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_create_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_create_feed_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_delete_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_delete_feed_sync.py | 2 +- ...oudasset_v1_generated_asset_service_export_assets_async.py | 2 +- ...loudasset_v1_generated_asset_service_export_assets_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_get_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_get_feed_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_list_assets_async.py | 2 +- .../cloudasset_v1_generated_asset_service_list_assets_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_list_feeds_async.py | 2 +- .../cloudasset_v1_generated_asset_service_list_feeds_sync.py | 2 +- ...1_generated_asset_service_search_all_iam_policies_async.py | 2 +- ...v1_generated_asset_service_search_all_iam_policies_sync.py | 2 +- ...t_v1_generated_asset_service_search_all_resources_async.py | 2 +- ...et_v1_generated_asset_service_search_all_resources_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_update_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_update_feed_sync.py | 2 +- .../goldens/asset/scripts/fixup_asset_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../tests/integration/goldens/asset/tests/__init__.py | 2 +- .../tests/integration/goldens/asset/tests/unit/__init__.py | 2 +- .../integration/goldens/asset/tests/unit/gapic/__init__.py | 2 +- .../goldens/asset/tests/unit/gapic/asset_v1/__init__.py | 2 +- .../asset/tests/unit/gapic/asset_v1/test_asset_service.py | 2 +- .../tests/integration/goldens/credentials/docs/conf.py | 4 ++-- .../goldens/credentials/google/iam/credentials/__init__.py | 2 +- .../goldens/credentials/google/iam/credentials_v1/__init__.py | 2 +- .../google/iam/credentials_v1/services/__init__.py | 2 +- .../iam/credentials_v1/services/iam_credentials/__init__.py | 2 +- .../credentials_v1/services/iam_credentials/async_client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../services/iam_credentials/transports/__init__.py | 2 +- .../services/iam_credentials/transports/base.py | 2 +- .../services/iam_credentials/transports/grpc.py | 2 +- .../services/iam_credentials/transports/grpc_asyncio.py | 2 +- .../credentials/google/iam/credentials_v1/types/__init__.py | 2 +- .../credentials/google/iam/credentials_v1/types/common.py | 2 +- .../google/iam/credentials_v1/types/iamcredentials.py | 2 +- .../tests/integration/goldens/credentials/noxfile.py | 2 +- ...1_generated_iam_credentials_generate_access_token_async.py | 2 +- ...v1_generated_iam_credentials_generate_access_token_sync.py | 2 +- ...ls_v1_generated_iam_credentials_generate_id_token_async.py | 2 +- ...als_v1_generated_iam_credentials_generate_id_token_sync.py | 2 +- ...redentials_v1_generated_iam_credentials_sign_blob_async.py | 2 +- ...credentials_v1_generated_iam_credentials_sign_blob_sync.py | 2 +- ...credentials_v1_generated_iam_credentials_sign_jwt_async.py | 2 +- ...mcredentials_v1_generated_iam_credentials_sign_jwt_sync.py | 2 +- .../credentials/scripts/fixup_credentials_v1_keywords.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- .../tests/integration/goldens/credentials/tests/__init__.py | 2 +- .../integration/goldens/credentials/tests/unit/__init__.py | 2 +- .../goldens/credentials/tests/unit/gapic/__init__.py | 2 +- .../credentials/tests/unit/gapic/credentials_v1/__init__.py | 2 +- .../tests/unit/gapic/credentials_v1/test_iam_credentials.py | 2 +- .../tests/integration/goldens/logging/docs/conf.py | 4 ++-- .../goldens/logging/google/cloud/logging/__init__.py | 2 +- .../goldens/logging/google/cloud/logging_v2/__init__.py | 2 +- .../logging/google/cloud/logging_v2/services/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/__init__.py | 2 +- .../logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/pagers.py | 2 +- .../services/config_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/config_service_v2/transports/base.py | 2 +- .../logging_v2/services/config_service_v2/transports/grpc.py | 2 +- .../services/config_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/pagers.py | 2 +- .../services/logging_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/transports/base.py | 2 +- .../logging_v2/services/logging_service_v2/transports/grpc.py | 2 +- .../services/logging_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/pagers.py | 2 +- .../services/metrics_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/base.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/grpc.py | 2 +- .../services/metrics_service_v2/transports/grpc_asyncio.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/__init__.py | 2 +- .../logging/google/cloud/logging_v2/types/log_entry.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/logging.py | 2 +- .../logging/google/cloud/logging_v2/types/logging_config.py | 2 +- .../logging/google/cloud/logging_v2/types/logging_metrics.py | 2 +- .../tests/integration/goldens/logging/noxfile.py | 2 +- ...ging_v2_generated_config_service_v2_create_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_create_bucket_sync.py | 2 +- ...g_v2_generated_config_service_v2_create_exclusion_async.py | 2 +- ...ng_v2_generated_config_service_v2_create_exclusion_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_create_sink_async.py | 2 +- ...logging_v2_generated_config_service_v2_create_sink_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_create_view_async.py | 2 +- ...logging_v2_generated_config_service_v2_create_view_sync.py | 2 +- ...ging_v2_generated_config_service_v2_delete_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_delete_bucket_sync.py | 2 +- ...g_v2_generated_config_service_v2_delete_exclusion_async.py | 2 +- ...ng_v2_generated_config_service_v2_delete_exclusion_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_delete_sink_async.py | 2 +- ...logging_v2_generated_config_service_v2_delete_sink_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_delete_view_async.py | 2 +- ...logging_v2_generated_config_service_v2_delete_view_sync.py | 2 +- ...logging_v2_generated_config_service_v2_get_bucket_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_sync.py | 2 +- ..._v2_generated_config_service_v2_get_cmek_settings_async.py | 2 +- ...g_v2_generated_config_service_v2_get_cmek_settings_sync.py | 2 +- ...ging_v2_generated_config_service_v2_get_exclusion_async.py | 2 +- ...gging_v2_generated_config_service_v2_get_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_sync.py | 2 +- ...gging_v2_generated_config_service_v2_list_buckets_async.py | 2 +- ...ogging_v2_generated_config_service_v2_list_buckets_sync.py | 2 +- ...ng_v2_generated_config_service_v2_list_exclusions_async.py | 2 +- ...ing_v2_generated_config_service_v2_list_exclusions_sync.py | 2 +- ...logging_v2_generated_config_service_v2_list_sinks_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_sinks_sync.py | 2 +- ...logging_v2_generated_config_service_v2_list_views_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_views_sync.py | 2 +- ...ng_v2_generated_config_service_v2_undelete_bucket_async.py | 2 +- ...ing_v2_generated_config_service_v2_undelete_bucket_sync.py | 2 +- ...ging_v2_generated_config_service_v2_update_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_update_bucket_sync.py | 2 +- ..._generated_config_service_v2_update_cmek_settings_async.py | 2 +- ...2_generated_config_service_v2_update_cmek_settings_sync.py | 2 +- ...g_v2_generated_config_service_v2_update_exclusion_async.py | 2 +- ...ng_v2_generated_config_service_v2_update_exclusion_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_update_sink_async.py | 2 +- ...logging_v2_generated_config_service_v2_update_sink_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_update_view_async.py | 2 +- ...logging_v2_generated_config_service_v2_update_view_sync.py | 2 +- ...ogging_v2_generated_logging_service_v2_delete_log_async.py | 2 +- ...logging_v2_generated_logging_service_v2_delete_log_sync.py | 2 +- ..._v2_generated_logging_service_v2_list_log_entries_async.py | 2 +- ...g_v2_generated_logging_service_v2_list_log_entries_sync.py | 2 +- ...logging_v2_generated_logging_service_v2_list_logs_async.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_sync.py | 2 +- ...ng_service_v2_list_monitored_resource_descriptors_async.py | 2 +- ...ing_service_v2_list_monitored_resource_descriptors_sync.py | 2 +- ..._v2_generated_logging_service_v2_tail_log_entries_async.py | 2 +- ...g_v2_generated_logging_service_v2_tail_log_entries_sync.py | 2 +- ...v2_generated_logging_service_v2_write_log_entries_async.py | 2 +- ..._v2_generated_logging_service_v2_write_log_entries_sync.py | 2 +- ...v2_generated_metrics_service_v2_create_log_metric_async.py | 2 +- ..._v2_generated_metrics_service_v2_create_log_metric_sync.py | 2 +- ...v2_generated_metrics_service_v2_delete_log_metric_async.py | 2 +- ..._v2_generated_metrics_service_v2_delete_log_metric_sync.py | 2 +- ...ng_v2_generated_metrics_service_v2_get_log_metric_async.py | 2 +- ...ing_v2_generated_metrics_service_v2_get_log_metric_sync.py | 2 +- ..._v2_generated_metrics_service_v2_list_log_metrics_async.py | 2 +- ...g_v2_generated_metrics_service_v2_list_log_metrics_sync.py | 2 +- ...v2_generated_metrics_service_v2_update_log_metric_async.py | 2 +- ..._v2_generated_metrics_service_v2_update_log_metric_sync.py | 2 +- .../goldens/logging/scripts/fixup_logging_v2_keywords.py | 2 +- .../tests/integration/goldens/logging/setup.py | 2 +- .../tests/integration/goldens/logging/tests/__init__.py | 2 +- .../tests/integration/goldens/logging/tests/unit/__init__.py | 2 +- .../integration/goldens/logging/tests/unit/gapic/__init__.py | 2 +- .../goldens/logging/tests/unit/gapic/logging_v2/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 2 +- .../tests/integration/goldens/redis/docs/conf.py | 4 ++-- .../integration/goldens/redis/google/cloud/redis/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/services/__init__.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/__init__.py | 2 +- .../cloud/redis_v1/services/cloud_redis/async_client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/pagers.py | 2 +- .../redis_v1/services/cloud_redis/transports/__init__.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/base.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/grpc.py | 2 +- .../redis_v1/services/cloud_redis/transports/grpc_asyncio.py | 2 +- .../goldens/redis/google/cloud/redis_v1/types/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/types/cloud_redis.py | 2 +- .../tests/integration/goldens/redis/noxfile.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_export_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_export_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_failover_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_failover_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_import_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_import_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_async.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_sync.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_upgrade_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_upgrade_instance_sync.py | 2 +- .../goldens/redis/scripts/fixup_redis_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 +- .../tests/integration/goldens/redis/tests/__init__.py | 2 +- .../tests/integration/goldens/redis/tests/unit/__init__.py | 2 +- .../integration/goldens/redis/tests/unit/gapic/__init__.py | 2 +- .../goldens/redis/tests/unit/gapic/redis_v1/__init__.py | 2 +- .../redis/tests/unit/gapic/redis_v1/test_cloud_redis.py | 2 +- .../mollusca_v1_generated_snippets_list_resources_async.py | 2 +- .../mollusca_v1_generated_snippets_list_resources_sync.py | 2 +- ...lusca_v1_generated_snippets_method_bidi_streaming_async.py | 2 +- ...llusca_v1_generated_snippets_method_bidi_streaming_sync.py | 2 +- ...lusca_v1_generated_snippets_method_lro_signatures_async.py | 2 +- ...llusca_v1_generated_snippets_method_lro_signatures_sync.py | 2 +- ...llusca_v1_generated_snippets_method_one_signature_async.py | 2 +- ...ollusca_v1_generated_snippets_method_one_signature_sync.py | 2 +- ...sca_v1_generated_snippets_method_server_streaming_async.py | 2 +- ...usca_v1_generated_snippets_method_server_streaming_sync.py | 2 +- .../mollusca_v1_generated_snippets_one_of_method_async.py | 2 +- ...1_generated_snippets_one_of_method_required_field_async.py | 2 +- ...v1_generated_snippets_one_of_method_required_field_sync.py | 2 +- .../mollusca_v1_generated_snippets_one_of_method_sync.py | 2 +- .../tests/unit/samplegen/golden_snippets/sample_basic.py | 2 +- .../unit/samplegen/golden_snippets/sample_basic_async.py | 2 +- .../samplegen/golden_snippets/sample_basic_unflattenable.py | 2 +- .../samplegen/golden_snippets/sample_basic_void_method.py | 2 +- 249 files changed, 253 insertions(+), 253 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/_license.j2 b/packages/gapic-generator/gapic/ads-templates/_license.j2 index 03ddf2e6ea0f..b00563f3379b 100644 --- a/packages/gapic-generator/gapic/ads-templates/_license.j2 +++ b/packages/gapic-generator/gapic/ads-templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/gapic/templates/_license.j2 b/packages/gapic-generator/gapic/templates/_license.j2 index 03ddf2e6ea0f..b00563f3379b 100644 --- a/packages/gapic-generator/gapic/templates/_license.j2 +++ b/packages/gapic-generator/gapic/templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index bbd444270e29..3dbdeeeeade8 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -68,7 +68,7 @@ root_doc = "index" # General information about the project. project = u"{{ api.naming.warehouse_package_name }}" -copyright = u"2020, Google, LLC" +copyright = u"2022, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index 8af34db8a130..991fc2c82590 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-cloud-asset" -copyright = u"2020, Google, LLC" +copyright = u"2022, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index 815c196c23c6..1f4a2dd59690 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 35fdc0668d2a..5a31481e893f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py index 357f952048fc..5f66b6251136 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 532af146cc07..6fdea6e03301 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 538c6ef62db5..27b373525319 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 9513d57df8e1..1c01392cc3d7 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py index 2c12069f8d49..252f766b88e8 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 9ae3eddd3201..a873db9094de 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index b4ddfa267bbe..658b5faf4ead 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 76849ef54c28..dbc83c57a7cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index f20e22242252..dd04dee8c92e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 106c76c23838..787a6b307cdb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index 230326d4f310..dda640458cc5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 4833faf3eb83..a4ccb3bfab66 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py index 8f0644233143..46380a92b6e2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py index adaeb096c445..db412762851d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py index 490f82262516..c3aa140669f8 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py index 1fb3097ad575..9a0a2e54c8b6 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py index 80b3d45858dc..36489d63b00e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py index baf15f6d4d8c..680f02c0f5c7 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py index 2a1f800722a1..7a0a5bf4bd01 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py index 880514078cea..612e6e13af6f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py index e32050772beb..86660f4f2050 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py index d592f05e075f..ec710e646bb9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py index b544fe05cca4..aea177ab7ff9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py index 9092f49d1f24..c536997de12c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py index 99becdfc8506..d9adab2adc00 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py index 139e812cd8d0..81b3b9adcab1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py index e460c974c003..2e48093c8077 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py index eee88ec49225..ed8981813fb8 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py index d109f0a053a0..ec138b7375b1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py index faec88fedabe..2822c78bd066 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py index 553ad8d7f4fc..282ea53bcbea 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py index d97ee0d08054..542da876b8d3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py index 7a2736b9eb30..c43226442b15 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py index 5021d6586d31..c2bf14027ae4 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py index 65a6d688f282..1d8dc82b9a59 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py index 467c81dbb9c5..e48a0b6684f1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py index c52bebe8e71b..a978950855af 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 38fae1451be6..7700dea02f55 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index a71bb7f5d0e9..772bf7df4a33 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index 0a00d31a38e9..7eb953881c25 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-iam-credentials" -copyright = u"2020, Google, LLC" +copyright = u"2022, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py index 1bfd4c8c0975..461cabe788cc 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 02be17e16c05..989a46cc864f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py index 9cd541f4dbe7..ff711c8a5c7b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index cde4cb77c9b2..d8f271b3bb67 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index f495c9e4dab2..5747e7e30ef4 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py index d4e5cd93f4c0..b9626aef55ca 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index ec719c2a8038..265e6c8ee853 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 8826e8749529..ef1a961d311d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 7a45b94ccc8d..3f1ec7d18286 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py index 40f194ee4a4b..67117910ad4f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index 9789d7f27190..a14b7573504b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py index 14f0e8ae5fb8..3cdecabdc9d8 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 69a64f4ce411..6d8542945a5a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py index 74ca09bac30d..d2fb32cff1e6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py index 01d48e2d2700..93e49f178cfd 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py index 44fadf728ff5..5f63373078ac 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py index 28870a6edd37..039d59763441 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py index f3695380f55f..fffa6de4bc73 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py index 17ef6e0105d1..e04b7b589440 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py index a1ba9ec8fde7..eae43440b8fd 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py index 44dffaf9d2d0..e0eed353c542 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py index 9510dabdd80a..55a3779b1e28 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 3d0b28176155..5f1b455f9010 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 1ac841d60e3d..ec644b8726d6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index 1c0f97d4adfc..f7bb3f4c94e1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-cloud-logging" -copyright = u"2020, Google, LLC" +copyright = u"2022, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py index 16e3d0cc06cf..712e121b87ce 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 1dc1e1eac254..1929cecec6a3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index 2b27a12e93f8..24dd59314313 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index a44f709cef58..21a9edb9b2ad 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index c65fd24e6287..d32b932fa5c9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index d6809771c986..f08c20f2d43c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 6e18c331ff70..5472642dc8a1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 6cfeeedee906..b5c472fc6224 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index a2abc2070821..dc81dff6b136 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index c23fe70ca640..6a11f16ca00d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index ed08d1888503..8180e704b8d8 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 628a11e4ffed..f6431a96b6aa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 6d6abf1b1613..9faf86790e41 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 3cb42b00af37..490d11ca7c44 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 46e9a1fcbf4c..19559a52b02b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 154dc6bc6e1e..a8c76b1d12bd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index a1730e78a706..d0c62097b806 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index a4511afa8a58..c787f62ed459 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 1b5d1805cdcd..293e70fbb71c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 9831beb94af1..49ad0eb64e81 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 8858d085e126..1cf7c076a851 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 0ab85ca09718..d92cf40d47cc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 28e9b710ec84..8315e03cfae6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index bf92724ce659..6f2396f5cebb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 08b62a437b29..f4f901240e66 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 44e6b95b0867..a8a020bcfd7d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py index 38c93c541801..b9b3f21a5f07 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index c9fbf4ff058b..7ab991846c13 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index edd2a7c33a9e..a4610dd59de0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 068af0b23643..1fd81ef43382 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 1ec255b25388..e2602c4c49ee 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 516ab61925d8..1e28e50b4f4b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index 2a5057cf46a4..1c5c329c802c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index cc1b100c1267..9b309322914f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 2ad228aad11f..5be1a9ad3da5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 4e749f6032be..3b57560f3cec 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 73ff5dfd71b7..789598d4c7f2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index 63dfef2f236c..e22bc60555c5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 059e9ecdd2db..499d4eeba920 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 8b83b9246933..8e6425d712b1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index 918ecb9b6680..def3e5abcb4a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index 85cd5be4b4a6..64c95c992455 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index 4ec09c9ef4ff..4c042c3bec23 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index 9e53098c7d36..dc313658435a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index dc7696efda5f..fe5acb523f2d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index 34b211765312..d9ddc66a02a4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index aa829578ce21..fd1eee969886 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index d7cbca49be05..1169b400046e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index d1db784af90e..4b964aa7435d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index c9adebb0e12b..1b299dd569ac 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index cd19694dba61..356f0db9fbf1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index 9c49e0da9c36..83dfc8d2c997 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 1833917f1c47..27a7644458dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index aa362876bfbe..980914dac1cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index 199a5058ae95..fa3d7cf7f5b6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index 67a772129dc7..48581e4706bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 5a15bac763e9..9f26a54178fa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index 983f7412fb3e..f88c15d2e30f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 14aeafc213d3..4e3bfea5582b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 0238fd074c8c..3522c4c8979d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index d28a1b38dff0..788436d6ad15 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index ab1a8a3197bc..8ea9407a1066 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index 07f7dbb507fb..b43b5682a28e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index 286e67df7c5a..235395e6d593 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 0cca13d1d1be..27910c9f7cfa 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index 8fdaa7e074e7..2e5b6e53b3d1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index 7ebb1b4ee576..020866b755b9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 1bce802721ec..0dfb39a11a07 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index 079ade835903..78245abfc6f0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index f7d62c0be9cf..c285fd542862 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index 4117f50db85e..8d49b85e7b8c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index af42078fbef5..7b04208d4c58 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index 0390fbf11cc0..d06cf80d4f63 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index 23dcd5f9197c..c0dba34ccb9e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index 94b996e2666a..c46b9ab427d2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index 280caac95f1c..9639ece285e4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index 35b593c28f35..250d3f9dc881 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 8ee7abf2c90a..1397848800fe 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 36fde8f53926..6338b9abcbd0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 29df813e7082..36280057bfe7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index 58dadb56c27c..4a8692b04e47 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index 034028cc870c..062075af9091 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 6a39828b35a3..fb0106199bd0 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index e0f51877e058..0f775572f57b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 5bc4722f7d57..b8f3397012de 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index bd16bdee7b42..736d64d614d1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 4269591903f8..3e77920f81e1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 8a9436ed6a1d..ee1108b33f02 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index b91f5b262a12..28025d777037 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index af76795921b5..31569811c655 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index 7f195d95bd78..96690c2c36b4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index ada74b622f6e..051694d31470 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index f63b64615a5b..bf2ee5e4abc2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index 625137321833..eae109200224 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 00b685b76739..cea94a356e29 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index 03121500c323..eea36222a80b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index ccaf104c7c8f..9dac7793736b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index 89ffe596ce8d..97b3c2f1364b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index a9756d9827b5..c94c70e76264 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index a0addeec29d5..bcdff32693bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py index 2a368fb9ccea..c24078ce4df5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index dbbcbb08b571..49e692453f6d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 75cd76119dfb..608de96fcf59 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index bca0c669efcc..fd574b59c20d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index ffb6e2282722..efbcb370b197 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index 7e4dd684d931..72426508f412 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-cloud-redis" -copyright = u"2020, Google, LLC" +copyright = u"2022, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py index 40db9a635620..350ba306355a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index 2ec655b66627..d9600edc259d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py index 900f778f73b9..acb9d69179d1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 189333cf5ba2..7051f5ff3145 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 2b3eab7424cb..94d494d4594f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index a856b50cc9fc..b601a7a057f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 165d31b2295f..2622f89f5de9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 7d874a3182a4..3459483dfbdb 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index fabc8d5817f8..054e20172bd1 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index ea35b3a00504..0ffce85705a7 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py index 1d86627eef24..5d35c41cc999 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index e228a379822b..a4aec5eabc71 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 78a45ee2e947..d0c0f468c0b8 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index 28669bb756b5..fd2dd59fed3a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py index 1343e2a70e25..f90f1e07aa36 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index b04eaa2a823b..e75a5e9dd2a7 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py index fcf701aad286..bd3aeb3fc9cd 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py index 73da1cfb444e..99579233fc58 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py index cc2b7119b6d3..a5c8c94bddc2 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py index 19fdde92bc6c..5430b08c1d4a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py index ac5425144e49..48f45a18336c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py index 94d4936cabc8..916207eef803 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py index a62b3aba8a5a..3b62906b0b9a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py index be02f0c9a25b..4e5424858e96 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py index a00ebe83a74c..1d1f3f8b5c2f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py index 06fcc10321a2..2711798851d5 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py index 0daec59cba80..f19416d028db 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index 3198e2f7777c..50d6f22a3846 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py index 598530bfe6f9..6cf9a6668bdc 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py index 3191dfb62c76..a5aee00e7755 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py index f146947b8a52..02c68da78509 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py index a8415f0b249a..6df69450aa37 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 2941b8365185..63c965649f9d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py index b54a5fcc42cd..231bc125017b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 5f38005d629c..0744ec57277b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py index b2606d93e1c1..0c28b8ebcb70 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py index 6f9a12e71f76..fb92e25300e8 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py index 78bac5823ef2..3f1f31a87a02 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py index 31cae8ddcc45..d9874ea670fb 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py index b0225bd297c3..8d7f588ca427 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py index 1c53ebe1ee9e..0620df227bfd 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py index a87a510f7589..3303a4ece0e6 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py index 534a1b62e42d..bccffa76fc01 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py index 50e71265714e..c13f1ceeabe1 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py index 1a561cf9559c..6c886fc0e337 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py index a68b0c5c0543..85f712b4ddce 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py index 6f5c988f9014..60293b9108c0 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py index 329193218bdf..e427930852fc 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py index d8956a7bf4d9..b45ffcc97bd4 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index a0f06b285504..e36c7c6bc366 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index fee93ebf2dfd..4ba792333173 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index a0f06b285504..e36c7c6bc366 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py index 39764bac8129..f47b9c124453 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 0091f36a3f69e62afdf89d4d165ae9e7fd226e50 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 24 Feb 2022 17:19:03 -0700 Subject: [PATCH 0754/1339] chore(main): release 0.63.5 (#1222) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 509a05761a40..858da09ee3a3 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.63.5](https://github.com/googleapis/gapic-generator-python/compare/v0.63.4...v0.63.5) (2022-02-25) + + +### Bug Fixes + +* update license year to 2022 ([#1199](https://github.com/googleapis/gapic-generator-python/issues/1199)) ([31292d5](https://github.com/googleapis/gapic-generator-python/commit/31292d59c8d08695f3e2dfa75861c86d723a9d35)) + ### [0.63.4](https://github.com/googleapis/gapic-generator-python/compare/v0.63.3...v0.63.4) (2022-02-22) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 721cf505c67e..fdbb7776d6e5 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.4" +version = "0.63.5" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From f042ca161a5914029ed4b1b3e3f3f9a5fa7be8e8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 25 Feb 2022 20:05:30 -0500 Subject: [PATCH 0755/1339] chore: track coverage in tests (#1174) * chore: track coverage in tests * update goldens * attempt to fix showcase_mtls ci --- .../gapic/ads-templates/noxfile.py.j2 | 1 + .../gapic/templates/noxfile.py.j2 | 1 + .../%name_%version/%sub/test_%service.py.j2 | 4 ++-- .../tests/integration/goldens/asset/noxfile.py | 1 + .../unit/gapic/asset_v1/test_asset_service.py | 12 ++++++------ .../integration/goldens/credentials/noxfile.py | 1 + .../tests/integration/goldens/logging/noxfile.py | 1 + .../gapic/logging_v2/test_config_service_v2.py | 16 ++++++++-------- .../gapic/logging_v2/test_logging_service_v2.py | 12 ++++++------ .../gapic/logging_v2/test_metrics_service_v2.py | 4 ++-- .../tests/integration/goldens/redis/noxfile.py | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 4 ++-- 12 files changed, 32 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 075643aac8d3..5d45dee21a4f 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -25,6 +25,7 @@ def unit(session): 'py.test', '--quiet', '--cov={{ api.naming.module_namespace|join("/") }}/{{ api.naming.versioned_module_name }}/', + '--cov=tests/', '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 4793094e882d..95d9a0d803e1 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -51,6 +51,7 @@ def unit(session): 'py.test', '--quiet', '--cov={{ api.naming.module_namespace|join("/") }}/{{ api.naming.versioned_module_name }}/', + '--cov=tests/', '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 7352e6c555e0..3e400cdd9c2f 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1285,7 +1285,7 @@ async def test_{{ method_name }}_async_pager(): async_pager = await client.{{ method_name }}(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1373,7 +1373,7 @@ async def test_{{ method_name }}_async_pages(): {% endif %} ) pages = [] - async for page_ in (await client.{{ method_name }}(request={})).pages: + async for page_ in (await client.{{ method_name }}(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index a4ccb3bfab66..d797a0310cae 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -62,6 +62,7 @@ def unit(session): 'py.test', '--quiet', '--cov=google/cloud/asset_v1/', + '--cov=tests/', '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 772bf7df4a33..3fe3b8fa1436 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1009,7 +1009,7 @@ async def test_list_assets_async_pager(): async_pager = await client.list_assets(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1056,7 +1056,7 @@ async def test_list_assets_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_assets(request={})).pages: + async for page_ in (await client.list_assets(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -2800,7 +2800,7 @@ async def test_search_all_resources_async_pager(): async_pager = await client.search_all_resources(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2847,7 +2847,7 @@ async def test_search_all_resources_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.search_all_resources(request={})).pages: + async for page_ in (await client.search_all_resources(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -3233,7 +3233,7 @@ async def test_search_all_iam_policies_async_pager(): async_pager = await client.search_all_iam_policies(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -3280,7 +3280,7 @@ async def test_search_all_iam_policies_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.search_all_iam_policies(request={})).pages: + async for page_ in (await client.search_all_iam_policies(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 6d8542945a5a..c34c6850f4bf 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -62,6 +62,7 @@ def unit(session): 'py.test', '--quiet', '--cov=google/iam/credentials_v1/', + '--cov=tests/', '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 1e28e50b4f4b..430a4c506418 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -62,6 +62,7 @@ def unit(session): 'py.test', '--quiet', '--cov=google/cloud/logging_v2/', + '--cov=tests/', '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 608de96fcf59..01f0c724159d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -857,7 +857,7 @@ async def test_list_buckets_async_pager(): async_pager = await client.list_buckets(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -904,7 +904,7 @@ async def test_list_buckets_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_buckets(request={})).pages: + async for page_ in (await client.list_buckets(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -2071,7 +2071,7 @@ async def test_list_views_async_pager(): async_pager = await client.list_views(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2118,7 +2118,7 @@ async def test_list_views_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_views(request={})).pages: + async for page_ in (await client.list_views(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -3116,7 +3116,7 @@ async def test_list_sinks_async_pager(): async_pager = await client.list_sinks(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -3163,7 +3163,7 @@ async def test_list_sinks_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_sinks(request={})).pages: + async for page_ in (await client.list_sinks(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -4590,7 +4590,7 @@ async def test_list_exclusions_async_pager(): async_pager = await client.list_exclusions(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -4637,7 +4637,7 @@ async def test_list_exclusions_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_exclusions(request={})).pages: + async for page_ in (await client.list_exclusions(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index fd574b59c20d..63816c158c3a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1243,7 +1243,7 @@ async def test_list_log_entries_async_pager(): async_pager = await client.list_log_entries(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1290,7 +1290,7 @@ async def test_list_log_entries_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_log_entries(request={})).pages: + async for page_ in (await client.list_log_entries(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -1514,7 +1514,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): async_pager = await client.list_monitored_resource_descriptors(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1561,7 +1561,7 @@ async def test_list_monitored_resource_descriptors_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_monitored_resource_descriptors(request={})).pages: + async for page_ in (await client.list_monitored_resource_descriptors(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -1941,7 +1941,7 @@ async def test_list_logs_async_pager(): async_pager = await client.list_logs(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1988,7 +1988,7 @@ async def test_list_logs_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_logs(request={})).pages: + async for page_ in (await client.list_logs(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index efbcb370b197..058a7be55486 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -862,7 +862,7 @@ async def test_list_log_metrics_async_pager(): async_pager = await client.list_log_metrics(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -909,7 +909,7 @@ async def test_list_log_metrics_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_log_metrics(request={})).pages: + async for page_ in (await client.list_log_metrics(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index d0c0f468c0b8..6cce26dcc4aa 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -62,6 +62,7 @@ def unit(session): 'py.test', '--quiet', '--cov=google/cloud/redis_v1/', + '--cov=tests/', '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 0744ec57277b..dacb2f205d96 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -863,7 +863,7 @@ async def test_list_instances_async_pager(): async_pager = await client.list_instances(request={},) assert async_pager.next_page_token == 'abc' responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -910,7 +910,7 @@ async def test_list_instances_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_instances(request={})).pages: + async for page_ in (await client.list_instances(request={})).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token From 3dcd78896b57ae2f2d86f95828822eaa559d44de Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 28 Feb 2022 19:13:26 +0100 Subject: [PATCH 0756/1339] chore(deps): update actions/setup-python action to v3 (#1223) --- .../.github/workflows/pypi-upload.yaml | 2 +- .../.github/workflows/tests.yaml | 24 +++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/pypi-upload.yaml b/packages/gapic-generator/.github/workflows/pypi-upload.yaml index 60e1bd3c82e5..0ddc030d4886 100644 --- a/packages/gapic-generator/.github/workflows/pypi-upload.yaml +++ b/packages/gapic-generator/.github/workflows/pypi-upload.yaml @@ -11,7 +11,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: '3.x' - name: Install dependencies diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 6ca76ab3f943..224d4628c88b 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -20,7 +20,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.9" cache: 'pip' @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python "3.10" - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' @@ -49,7 +49,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python "3.10" - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' @@ -94,7 +94,7 @@ jobs: sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - name: Set up Python "3.10" - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' @@ -140,7 +140,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "${{ matrix.python }}" cache: 'pip' @@ -167,7 +167,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python "3.10" - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' @@ -197,7 +197,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python "3.10" - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' @@ -224,7 +224,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python "3.10" - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' @@ -244,7 +244,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} cache: 'pip' @@ -269,7 +269,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} cache: 'pip' @@ -315,7 +315,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python 3.10 - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' @@ -333,7 +333,7 @@ jobs: steps: - uses: actions/checkout@v2 - name: Set up Python "3.10" - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" cache: 'pip' From 2462acad586999b571cb87fa275b4234c7943850 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 1 Mar 2022 20:15:55 +0100 Subject: [PATCH 0757/1339] chore(deps): update actions/checkout action to v3 (#1224) --- .../.github/workflows/pypi-upload.yaml | 2 +- .../.github/workflows/tests.yaml | 26 +++++++++---------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/pypi-upload.yaml b/packages/gapic-generator/.github/workflows/pypi-upload.yaml index 0ddc030d4886..48d7582178b6 100644 --- a/packages/gapic-generator/.github/workflows/pypi-upload.yaml +++ b/packages/gapic-generator/.github/workflows/pypi-upload.yaml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest environment: PyPI steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v3 with: diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 224d4628c88b..df8e1779b7e0 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -18,7 +18,7 @@ jobs: # Don't upgrade python version; there's a bug in 3.10 sphinx runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python uses: actions/setup-python@v3 with: @@ -31,7 +31,7 @@ jobs: mypy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python "3.10" uses: actions/setup-python@v3 with: @@ -47,7 +47,7 @@ jobs: target: [showcase, showcase_alternative_templates] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python "3.10" uses: actions/setup-python@v3 with: @@ -88,7 +88,7 @@ jobs: max-parallel: 1 runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Setup temp directory run: | sudo mkdir -p /tmp/workspace/tests/cert/ @@ -138,7 +138,7 @@ jobs: variant: _alternative_templates runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v3 with: @@ -165,7 +165,7 @@ jobs: showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python "3.10" uses: actions/setup-python@v3 with: @@ -195,7 +195,7 @@ jobs: matrix: variant: ['', _alternative_templates] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python "3.10" uses: actions/setup-python@v3 with: @@ -222,7 +222,7 @@ jobs: snippetgen: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python "3.10" uses: actions/setup-python@v3 with: @@ -242,7 +242,7 @@ jobs: python: ["3.6", "3.7", "3.8", "3.9", "3.10"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v3 with: @@ -267,7 +267,7 @@ jobs: variant: _alternative_templates runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v3 with: @@ -286,7 +286,7 @@ jobs: runs-on: ubuntu-latest container: gcr.io/gapic-images/googleapis steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Cache Bazel files id: cache-bazel uses: actions/cache@v2 @@ -313,7 +313,7 @@ jobs: goldens-lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.10 uses: actions/setup-python@v3 with: @@ -331,7 +331,7 @@ jobs: style-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python "3.10" uses: actions/setup-python@v3 with: From 1895bad769c99b53d6cc606a8205c25bc3f68579 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 4 Mar 2022 11:20:57 -0500 Subject: [PATCH 0758/1339] fix(deps): require google-api-core >=2.3.2 (#1225) --- packages/gapic-generator/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index fdbb7776d6e5..a9f716556d0d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -44,7 +44,7 @@ include_package_data=True, install_requires=( "click >= 6.7", - "google-api-core >= 2.2.0", + "google-api-core >= 2.3.2", "googleapis-common-protos >= 1.54.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", From 9801fde81c343e0c7a9c1adbe6df227d25958b4d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 7 Mar 2022 14:21:27 -0800 Subject: [PATCH 0759/1339] chore(main): release 0.63.6 (#1226) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 858da09ee3a3..55b545bef719 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.63.6](https://github.com/googleapis/gapic-generator-python/compare/v0.63.5...v0.63.6) (2022-03-04) + + +### Bug Fixes + +* **deps:** require google-api-core >=2.3.2 ([#1225](https://github.com/googleapis/gapic-generator-python/issues/1225)) ([f59917f](https://github.com/googleapis/gapic-generator-python/commit/f59917fdbdf5ee4091e35d721811dcd7f4b9a3f3)) + ### [0.63.5](https://github.com/googleapis/gapic-generator-python/compare/v0.63.4...v0.63.5) (2022-02-25) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a9f716556d0d..9f54f613c610 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.5" +version = "0.63.6" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From b1a56bae41a27e32fb966c3acbb7cddb555bfd01 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 8 Mar 2022 15:05:58 -0500 Subject: [PATCH 0760/1339] fix: resolve issue where rest transport is not used in certain tests (#1231) --- .../%name_%version/%sub/test_%service.py.j2 | 84 ++++++++++++++---- .../%name_%version/%sub/test_%service.py.j2 | 87 +++++++++++++++---- .../unit/gapic/asset_v1/test_asset_service.py | 53 +++++++---- .../credentials_v1/test_iam_credentials.py | 53 +++++++---- .../logging_v2/test_config_service_v2.py | 53 +++++++---- .../logging_v2/test_logging_service_v2.py | 53 +++++++---- .../logging_v2/test_metrics_service_v2.py | 53 +++++++---- .../unit/gapic/redis_v1/test_cloud_redis.py | 53 +++++++---- 8 files changed, 348 insertions(+), 141 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ec91ca41b5df..ae21c130d8b1 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -85,20 +85,32 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [ - {{ service.client_name }}, +@pytest.mark.parametrize("client_class,transport_name", [ + {% if 'grpc' in opts.transport %} + ({{ service.client_name }}, "grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, "rest"), + {% endif %} ]) -def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class): +def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) {% if service.host %} - assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + assert client.transport._host == ( + '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + {% if 'rest' in opts.transport %} + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://{{ service.host }}' + {% endif %} + ) {% endif %} @@ -122,23 +134,35 @@ def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(tra use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [ - {{ service.client_name }}, +@pytest.mark.parametrize("client_class,transport_name", [ + {% if 'grpc' in opts.transport %} + ({{ service.client_name }}, "grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, "rest"), + {% endif %} ]) -def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class): +def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) {% if service.host %} - assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + assert client.transport._host == ( + '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + {% if 'rest' in opts.transport %} + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://{{ service.host }}' + {% endif %} + ) {% endif %} @@ -1853,23 +1877,53 @@ def test_{{ service.name|snake_case }}_rest_lro_client(): {%- endif %} {% endif %} {# rest #} -def test_{{ service.name|snake_case }}_host_no_port(): +@pytest.mark.parametrize("transport_name", [ + {% if 'grpc' in opts.transport %} + "grpc", + {% endif %} + {% if 'rest' in opts.transport %} + "rest", + {% endif %} +]) +def test_{{ service.name|snake_case }}_host_no_port(transport_name): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), + transport=transport_name, + ) + assert client.transport._host == ( + '{{ host }}:443' + {% if 'rest' in opts.transport %} + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://{{ host }}' + {% endif %} ) - assert client.transport._host == '{{ host }}:443' {% endwith %} -def test_{{ service.name|snake_case }}_host_with_port(): +@pytest.mark.parametrize("transport_name", [ + {% if 'grpc' in opts.transport %} + "grpc", + {% endif %} + {% if 'rest' in opts.transport %} + "rest", + {% endif %} +]) +def test_{{ service.name|snake_case }}_host_with_port(transport_name): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + '{{ host }}:8000' + {% if 'rest' in opts.transport %} + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://{{ host }}:8000' + {% endif %} ) - assert client.transport._host == '{{ host }}:8000' {% endwith %} {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3e400cdd9c2f..1981b30a397d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -89,23 +89,33 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [ - {{ service.client_name }}, +@pytest.mark.parametrize("client_class,transport_name", [ {% if 'grpc' in opts.transport %} - {{ service.async_client_name }}, + ({{ service.client_name }}, "grpc"), + ({{ service.async_client_name }}, "grpc_asyncio"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, "rest"), {% endif %} ]) -def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class): +def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) {% if service.host %} - assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + assert client.transport._host == ( + '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + {% if 'rest' in opts.transport %} + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://{{ service.host }}' + {% endif %} + ) {% endif %} @@ -130,26 +140,36 @@ def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(tra use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [ - {{ service.client_name }}, +@pytest.mark.parametrize("client_class,transport_name", [ {% if 'grpc' in opts.transport %} - {{ service.async_client_name }}, + ({{ service.client_name }}, "grpc"), + ({{ service.async_client_name }}, "grpc_asyncio"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, "rest"), {% endif %} ]) -def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class): +def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) {% if service.host %} - assert client.transport._host == '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + assert client.transport._host == ( + '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' + {% if 'rest' in opts.transport %} + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://{{ service.host }}' + {% endif %} + ) {% endif %} @@ -2323,23 +2343,54 @@ def test_{{ service.name|snake_case }}_rest_lro_client(): {% endif %} {# rest #} -def test_{{ service.name|snake_case }}_host_no_port(): +@pytest.mark.parametrize("transport_name", [ + {% if 'grpc' in opts.transport %} + "grpc", + "grpc_asyncio", + {% endif %} + {% if 'rest' in opts.transport %} + "rest", + {% endif %} +]) +def test_{{ service.name|snake_case }}_host_no_port(transport_name): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), + transport=transport_name, + ) + assert client.transport._host == ( + '{{ host }}:443' + {% if 'rest' in opts.transport %} + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://{{ host }}' + {% endif %} ) - assert client.transport._host == '{{ host }}:443' {% endwith %} - -def test_{{ service.name|snake_case }}_host_with_port(): +@pytest.mark.parametrize("transport_name", [ + {% if 'grpc' in opts.transport %} + "grpc", + "grpc_asyncio", + {% endif %} + {% if 'rest' in opts.transport %} + "rest", + {% endif %} +]) +def test_{{ service.name|snake_case }}_host_with_port(transport_name): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + '{{ host }}:8000' + {% if 'rest' in opts.transport %} + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://{{ host }}:8000' + {% endif %} ) - assert client.transport._host == '{{ host }}:8000' {% endwith %} {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 3fe3b8fa1436..b211ce806d5f 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -76,20 +76,22 @@ def test__get_default_mtls_endpoint(): assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, - AssetServiceAsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (AssetServiceClient, "grpc"), + (AssetServiceAsyncClient, "grpc_asyncio"), ]) -def test_asset_service_client_from_service_account_info(client_class): +def test_asset_service_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'cloudasset.googleapis.com:443' + assert client.transport._host == ( + 'cloudasset.googleapis.com:443' + ) @pytest.mark.parametrize("transport_class,transport_name", [ @@ -108,23 +110,25 @@ def test_asset_service_client_service_account_always_use_jwt(transport_class, tr use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [ - AssetServiceClient, - AssetServiceAsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (AssetServiceClient, "grpc"), + (AssetServiceAsyncClient, "grpc_asyncio"), ]) -def test_asset_service_client_from_service_account_file(client_class): +def test_asset_service_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'cloudasset.googleapis.com:443' + assert client.transport._host == ( + 'cloudasset.googleapis.com:443' + ) def test_asset_service_client_get_transport_class(): @@ -3866,20 +3870,33 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( ) -def test_asset_service_host_no_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_asset_service_host_no_port(transport_name): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudasset.googleapis.com:443' ) - assert client.transport._host == 'cloudasset.googleapis.com:443' - -def test_asset_service_host_with_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_asset_service_host_with_port(transport_name): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'cloudasset.googleapis.com:8000' ) - assert client.transport._host == 'cloudasset.googleapis.com:8000' def test_asset_service_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index ec644b8726d6..d357ec3600be 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -67,20 +67,22 @@ def test__get_default_mtls_endpoint(): assert IAMCredentialsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [ - IAMCredentialsClient, - IAMCredentialsAsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (IAMCredentialsClient, "grpc"), + (IAMCredentialsAsyncClient, "grpc_asyncio"), ]) -def test_iam_credentials_client_from_service_account_info(client_class): +def test_iam_credentials_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'iamcredentials.googleapis.com:443' + assert client.transport._host == ( + 'iamcredentials.googleapis.com:443' + ) @pytest.mark.parametrize("transport_class,transport_name", [ @@ -99,23 +101,25 @@ def test_iam_credentials_client_service_account_always_use_jwt(transport_class, use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [ - IAMCredentialsClient, - IAMCredentialsAsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (IAMCredentialsClient, "grpc"), + (IAMCredentialsAsyncClient, "grpc_asyncio"), ]) -def test_iam_credentials_client_from_service_account_file(client_class): +def test_iam_credentials_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'iamcredentials.googleapis.com:443' + assert client.transport._host == ( + 'iamcredentials.googleapis.com:443' + ) def test_iam_credentials_client_get_transport_class(): @@ -1796,20 +1800,33 @@ def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( ) -def test_iam_credentials_host_no_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_iam_credentials_host_no_port(transport_name): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'iamcredentials.googleapis.com:443' ) - assert client.transport._host == 'iamcredentials.googleapis.com:443' - -def test_iam_credentials_host_with_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_iam_credentials_host_with_port(transport_name): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'iamcredentials.googleapis.com:8000' ) - assert client.transport._host == 'iamcredentials.googleapis.com:8000' def test_iam_credentials_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 01f0c724159d..9a818be44901 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -68,20 +68,22 @@ def test__get_default_mtls_endpoint(): assert ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (ConfigServiceV2Client, "grpc"), + (ConfigServiceV2AsyncClient, "grpc_asyncio"), ]) -def test_config_service_v2_client_from_service_account_info(client_class): +def test_config_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'logging.googleapis.com:443' + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) @pytest.mark.parametrize("transport_class,transport_name", [ @@ -100,23 +102,25 @@ def test_config_service_v2_client_service_account_always_use_jwt(transport_class use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [ - ConfigServiceV2Client, - ConfigServiceV2AsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (ConfigServiceV2Client, "grpc"), + (ConfigServiceV2AsyncClient, "grpc_asyncio"), ]) -def test_config_service_v2_client_from_service_account_file(client_class): +def test_config_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'logging.googleapis.com:443' + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) def test_config_service_v2_client_get_transport_class(): @@ -6258,20 +6262,33 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( ) -def test_config_service_v2_host_no_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:443' ) - assert client.transport._host == 'logging.googleapis.com:443' - -def test_config_service_v2_host_with_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:8000' ) - assert client.transport._host == 'logging.googleapis.com:8000' def test_config_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 63816c158c3a..de5b11753723 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -74,20 +74,22 @@ def test__get_default_mtls_endpoint(): assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), ]) -def test_logging_service_v2_client_from_service_account_info(client_class): +def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'logging.googleapis.com:443' + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) @pytest.mark.parametrize("transport_class,transport_name", [ @@ -106,23 +108,25 @@ def test_logging_service_v2_client_service_account_always_use_jwt(transport_clas use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [ - LoggingServiceV2Client, - LoggingServiceV2AsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), ]) -def test_logging_service_v2_client_from_service_account_file(client_class): +def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'logging.googleapis.com:443' + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) def test_logging_service_v2_client_get_transport_class(): @@ -2346,20 +2350,33 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( ) -def test_logging_service_v2_host_no_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:443' ) - assert client.transport._host == 'logging.googleapis.com:443' - -def test_logging_service_v2_host_with_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:8000' ) - assert client.transport._host == 'logging.googleapis.com:8000' def test_logging_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 058a7be55486..07fb89958bf7 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -72,20 +72,22 @@ def test__get_default_mtls_endpoint(): assert MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (MetricsServiceV2Client, "grpc"), + (MetricsServiceV2AsyncClient, "grpc_asyncio"), ]) -def test_metrics_service_v2_client_from_service_account_info(client_class): +def test_metrics_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'logging.googleapis.com:443' + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) @pytest.mark.parametrize("transport_class,transport_name", [ @@ -104,23 +106,25 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(transport_clas use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [ - MetricsServiceV2Client, - MetricsServiceV2AsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (MetricsServiceV2Client, "grpc"), + (MetricsServiceV2AsyncClient, "grpc_asyncio"), ]) -def test_metrics_service_v2_client_from_service_account_file(client_class): +def test_metrics_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'logging.googleapis.com:443' + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) def test_metrics_service_v2_client_get_transport_class(): @@ -2199,20 +2203,33 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( ) -def test_metrics_service_v2_host_no_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:443' ) - assert client.transport._host == 'logging.googleapis.com:443' - -def test_metrics_service_v2_host_with_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:8000' ) - assert client.transport._host == 'logging.googleapis.com:8000' def test_metrics_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index dacb2f205d96..8675180642b7 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -73,20 +73,22 @@ def test__get_default_mtls_endpoint(): assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [ - CloudRedisClient, - CloudRedisAsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudRedisClient, "grpc"), + (CloudRedisAsyncClient, "grpc_asyncio"), ]) -def test_cloud_redis_client_from_service_account_info(client_class): +def test_cloud_redis_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'redis.googleapis.com:443' + assert client.transport._host == ( + 'redis.googleapis.com:443' + ) @pytest.mark.parametrize("transport_class,transport_name", [ @@ -105,23 +107,25 @@ def test_cloud_redis_client_service_account_always_use_jwt(transport_class, tran use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [ - CloudRedisClient, - CloudRedisAsyncClient, +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudRedisClient, "grpc"), + (CloudRedisAsyncClient, "grpc_asyncio"), ]) -def test_cloud_redis_client_from_service_account_file(client_class): +def test_cloud_redis_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == 'redis.googleapis.com:443' + assert client.transport._host == ( + 'redis.googleapis.com:443' + ) def test_cloud_redis_client_get_transport_class(): @@ -3191,20 +3195,33 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( ) -def test_cloud_redis_host_no_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_cloud_redis_host_no_port(transport_name): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'redis.googleapis.com:443' ) - assert client.transport._host == 'redis.googleapis.com:443' - -def test_cloud_redis_host_with_port(): +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_cloud_redis_host_with_port(transport_name): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'redis.googleapis.com:8000' ) - assert client.transport._host == 'redis.googleapis.com:8000' def test_cloud_redis_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) From e2a714a9ff4355bf8cfd815939589c12753b2898 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 8 Mar 2022 20:20:38 +0000 Subject: [PATCH 0761/1339] chore(main): release 0.63.7 (#1232) :robot: I have created a release *beep* *boop* --- ### [0.63.7](https://github.com/googleapis/gapic-generator-python/compare/v0.63.6...v0.63.7) (2022-03-08) ### Bug Fixes * resolve issue where rest transport is not used in certain tests ([#1231](https://github.com/googleapis/gapic-generator-python/issues/1231)) ([90ab41a](https://github.com/googleapis/gapic-generator-python/commit/90ab41ab1f1b058ec0eb4a96b973031898f64df0)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 55b545bef719..8086fd095e0a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.63.7](https://github.com/googleapis/gapic-generator-python/compare/v0.63.6...v0.63.7) (2022-03-08) + + +### Bug Fixes + +* resolve issue where rest transport is not used in certain tests ([#1231](https://github.com/googleapis/gapic-generator-python/issues/1231)) ([90ab41a](https://github.com/googleapis/gapic-generator-python/commit/90ab41ab1f1b058ec0eb4a96b973031898f64df0)) + ### [0.63.6](https://github.com/googleapis/gapic-generator-python/compare/v0.63.5...v0.63.6) (2022-03-04) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9f54f613c610..da4e19f6189a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.6" +version = "0.63.7" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From fe1afec200765bed92ac6cf2f110ea5c9543a95b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 15 Mar 2022 15:48:32 +0100 Subject: [PATCH 0762/1339] chore(deps): update dependency markupsafe to v2.1.1 (#1235) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index af29d63a9c86..50c3e6bcc86d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -2,7 +2,7 @@ click==8.0.4 google-api-core==2.1.1 googleapis-common-protos==1.55.0 jinja2==3.0.3 -MarkupSafe==2.1.0 +MarkupSafe==2.1.1 protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 From 87d5fc0bea823e5cf8061fc6bbe3d1d7693a9fca Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 17 Mar 2022 00:11:52 +0100 Subject: [PATCH 0763/1339] chore(deps): update dependency setuptools to v60.10.0 (#1237) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 50c3e6bcc86d..98fc54948a47 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.2 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.9.3 +setuptools==60.10.0 From e656a6fd06d2458711c64514661c6a1f766f3fed Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 17 Mar 2022 10:35:43 +0100 Subject: [PATCH 0764/1339] chore(deps): update dependency pypandoc to v1.7.4 (#1239) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 98fc54948a47..633f9e206d01 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,7 +4,7 @@ googleapis-common-protos==1.55.0 jinja2==3.0.3 MarkupSafe==2.1.1 protobuf==3.19.4 -pypandoc==1.7.2 +pypandoc==1.7.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==60.10.0 From 709101da4155f7d7b9d3c0fb33c3070df43c15d3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 19 Mar 2022 11:58:53 +0100 Subject: [PATCH 0765/1339] chore(deps): update dependency googleapis-common-protos to v1.56.0 (#1241) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 633f9e206d01..f41da5282981 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==8.0.4 google-api-core==2.1.1 -googleapis-common-protos==1.55.0 +googleapis-common-protos==1.56.0 jinja2==3.0.3 MarkupSafe==2.1.1 protobuf==3.19.4 From 28921fe0865ed114ccadc039821074f00ee19b5b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 21 Mar 2022 22:06:48 +0100 Subject: [PATCH 0766/1339] chore(deps): update actions/cache action to v3 (#1243) --- packages/gapic-generator/.github/workflows/tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index df8e1779b7e0..6983714b61f7 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -289,7 +289,7 @@ jobs: - uses: actions/checkout@v3 - name: Cache Bazel files id: cache-bazel - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/.cache/bazel # Note: if the container is updated, the key needs to be updated as well. From 0977310822eb5ab5941a326ff07e5c59969821f6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 23 Mar 2022 11:39:59 -0400 Subject: [PATCH 0767/1339] fix: sanitize file names (#1236) * fix: sanitize file names * lint * address review feedback --- packages/gapic-generator/gapic/schema/api.py | 14 ++++++++--- .../tests/unit/schema/test_api.py | 24 +++++++++++++++++++ 2 files changed, 35 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index c6b69cf34a3d..4cce5909a002 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -266,16 +266,24 @@ def build( invalid_module_names = set(keyword.kwlist) | { "metadata", "retry", "timeout", "request"} - def disambiguate_keyword_fname( + def disambiguate_keyword_sanitize_fname( full_path: str, visited_names: Container[str]) -> str: path, fname = os.path.split(full_path) name, ext = os.path.splitext(fname) + + # Replace `.` with `_` in the basename as + # `.` is not a valid character for modules names. + # See https://peps.python.org/pep-0008/#package-and-module-names + if "." in name: + name = name.replace(".", "_") + full_path = os.path.join(path, name + ext) + if name in invalid_module_names or full_path in visited_names: name += "_" full_path = os.path.join(path, name + ext) if full_path in visited_names: - return disambiguate_keyword_fname(full_path, visited_names) + return disambiguate_keyword_sanitize_fname(full_path, visited_names) return full_path @@ -294,7 +302,7 @@ def disambiguate_keyword_fname( # load the services and methods with the full scope of types. pre_protos: Dict[str, Proto] = dict(prior_protos or {}) for fd in file_descriptors: - fd.name = disambiguate_keyword_fname(fd.name, pre_protos) + fd.name = disambiguate_keyword_sanitize_fname(fd.name, pre_protos) pre_protos[fd.name] = Proto.build( file_descriptor=fd, file_to_generate=fd.package.startswith(package), diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 212c86daedb3..b6b644023992 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -209,6 +209,30 @@ def test_proto_names(): assert proto.disambiguate('foo') == '_foo' +def test_proto_with_invalid_characters(): + # Protos with filenames that contain `.` in the basename + # cannot be directly imported. Test that `.` is changed to `_` + # See https://peps.python.org/pep-0008/#package-and-module-names + + test_cases = [ + {'name': 'k8s.min.proto', 'expected': 'k8s_min.proto'}, + {'name': 'k8s.min.test.proto', 'expected': 'k8s_min_test.proto'} + ] + + for test_case in test_cases: + fd = ( + make_file_pb2( + name=test_case['name'], + package='google.keywords.v1', + messages=(make_message_pb2(name='ImportRequest', fields=()),), + ), + ) + api_schema = api.API.build(fd, package='google.keywords.v1') + assert set(api_schema.protos.keys()) == { + test_case['expected'], + } + + def test_proto_keyword_fname(): # Protos with filenames that happen to be python keywords # cannot be directly imported. From f10c4da1b552cee89b8bee3fe452e568217130c4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 25 Mar 2022 23:44:03 +0100 Subject: [PATCH 0768/1339] chore(deps): update dependency setuptools to v61 (#1248) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f41da5282981..c2b74bb7d3eb 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==60.10.0 +setuptools==61.1.0 From 345efb6850dfaaf2470e3c13f4108e06b77795a6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 27 Mar 2022 16:54:42 +0200 Subject: [PATCH 0769/1339] chore(deps): update dependency setuptools to v61.1.1 (#1250) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index c2b74bb7d3eb..04602029c492 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==61.1.0 +setuptools==61.1.1 From dfbde95757c4c63d85e21e75fed46b88e32b5369 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 30 Mar 2022 05:07:19 -0600 Subject: [PATCH 0770/1339] fix: fix docstring for map fields (#1249) * fix: fix docstring for map fields * chore: format * chore: fix typing Co-authored-by: Anthonios Partheniou --- .../gapic-generator/gapic/schema/metadata.py | 7 ++- .../gapic-generator/gapic/schema/wrappers.py | 4 ++ .../google/cloud/asset_v1/types/assets.py | 4 +- .../logging_service_v2/async_client.py | 4 +- .../services/logging_service_v2/client.py | 4 +- .../cloud/logging_v2/types/log_entry.py | 2 +- .../google/cloud/logging_v2/types/logging.py | 4 +- .../cloud/logging_v2/types/logging_metrics.py | 2 +- .../cloud/redis_v1/types/cloud_redis.py | 6 +- .../tests/unit/schema/wrappers/test_field.py | 59 +++++++++++++++++++ 10 files changed, 82 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 91e690577755..5bd23e1b522d 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -27,7 +27,7 @@ """ import dataclasses -from typing import FrozenSet, Tuple +from typing import FrozenSet, Tuple, Optional from google.protobuf import descriptor_pb2 @@ -362,14 +362,19 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Metadata': class FieldIdentifier: ident: Address repeated: bool + mapping: Optional[tuple] = None def __str__(self) -> str: + if self.mapping: + return f'Mapping[{self.mapping[0].ident}, {self.mapping[1].ident}]' if self.repeated: return f'Sequence[{self.ident}]' return str(self.ident) @property def sphinx(self) -> str: + if self.mapping: + return f'Mapping[{self.mapping[0].ident.sphinx}, {self.mapping[1].ident.sphinx}]' if self.repeated: return f'Sequence[{self.ident.sphinx}]' return self.ident.sphinx diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index ac5b8f63c960..fa8ad2dd68ef 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -80,9 +80,13 @@ def name(self) -> str: @utils.cached_property def ident(self) -> metadata.FieldIdentifier: """Return the identifier to be used in templates.""" + mapping: Union[None, Tuple[Field, Field]] = None + if self.map: + mapping = (self.type.fields["key"], self.type.fields["value"]) return metadata.FieldIdentifier( ident=self.type.ident, repeated=self.repeated, + mapping=mapping, ) @property diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index dda640458cc5..4a4cdb8c56db 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -428,7 +428,7 @@ class ResourceSearchResult(proto.Message): - use a field query. Example: ``location:us-west*`` - use a free text query. Example: ``us-west*`` - labels (Sequence[google.cloud.asset_v1.types.ResourceSearchResult.LabelsEntry]): + labels (Mapping[str, str]): Labels associated with this resource. See `Labelling and grouping GCP resources `__ @@ -724,7 +724,7 @@ class Explanation(proto.Message): r"""Explanation about the IAM policy search result. Attributes: - matched_permissions (Sequence[google.cloud.asset_v1.types.IamPolicySearchResult.Explanation.MatchedPermissionsEntry]): + matched_permissions (Mapping[str, google.cloud.asset_v1.types.IamPolicySearchResult.Explanation.Permissions]): The map from roles to their included permissions that match the permission query (i.e., a query containing ``policy.role.permissions:``). Example: if query diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index f6431a96b6aa..a01f02d4c705 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -296,7 +296,7 @@ async def write_log_entries(self, *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, - labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + labels: Mapping[str, str] = None, entries: Sequence[log_entry.LogEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -380,7 +380,7 @@ def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]`): + labels (:class:`Mapping[str, str]`): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 9faf86790e41..92a2ff322f84 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -475,7 +475,7 @@ def write_log_entries(self, *, log_name: str = None, resource: monitored_resource_pb2.MonitoredResource = None, - labels: Sequence[logging.WriteLogEntriesRequest.LabelsEntry] = None, + labels: Mapping[str, str] = None, entries: Sequence[log_entry.LogEntry] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -559,7 +559,7 @@ def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + labels (Mapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 7ab991846c13..f6a4c1d3ac55 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -142,7 +142,7 @@ class LogEntry(proto.Message): http_request (google.logging.type.http_request_pb2.HttpRequest): Optional. Information about the HTTP request associated with this log entry, if applicable. - labels (Sequence[google.cloud.logging_v2.types.LogEntry.LabelsEntry]): + labels (Mapping[str, str]): Optional. A set of user-defined (key, value) data that provides additional information about the log entry. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index a4610dd59de0..f6073b02a46c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -106,7 +106,7 @@ class WriteLogEntriesRequest(proto.Message): "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See [LogEntry][google.logging.v2.LogEntry]. - labels (Sequence[google.cloud.logging_v2.types.WriteLogEntriesRequest.LabelsEntry]): + labels (Mapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this @@ -196,7 +196,7 @@ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. Attributes: - log_entry_errors (Sequence[google.cloud.logging_v2.types.WriteLogEntriesPartialErrors.LogEntryErrorsEntry]): + log_entry_errors (Mapping[int, google.rpc.status_pb2.Status]): When ``WriteLogEntriesRequest.partial_success`` is true, records the error status for entries that were not written due to a permanent error, keyed by the entry's zero-based diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index e2602c4c49ee..c39349904eed 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -125,7 +125,7 @@ class LogMetric(proto.Message): Example: ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors (Sequence[google.cloud.logging_v2.types.LogMetric.LabelExtractorsEntry]): + label_extractors (Mapping[str, str]): Optional. A map from a label key string to an extractor expression which is used to extract data from a log entry field and assign as the label value. Each label key diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index a4aec5eabc71..bf54ebc15a29 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -65,7 +65,7 @@ class Instance(proto.Message): display_name (str): An arbitrary and optional user-provided name for the instance. - labels (Sequence[google.cloud.redis_v1.types.Instance.LabelsEntry]): + labels (Mapping[str, str]): Resource labels to represent user provided metadata location_id (str): @@ -124,7 +124,7 @@ class Instance(proto.Message): status_message (str): Output only. Additional information about the current status of this instance, if available. - redis_configs (Sequence[google.cloud.redis_v1.types.Instance.RedisConfigsEntry]): + redis_configs (Mapping[str, str]): Optional. Redis configuration parameters, according to http://redis.io/topics/config. Currently, the only supported parameters are: @@ -697,7 +697,7 @@ class LocationMetadata(proto.Message): ``google.cloud.location.Location.metadata`` field. Attributes: - available_zones (Sequence[google.cloud.redis_v1.types.LocationMetadata.AvailableZonesEntry]): + available_zones (Mapping[str, google.cloud.redis_v1.types.ZoneMetadata]): Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by GCE. These keys can be specified in ``location_id`` or diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index b68d6f81c6b3..993671574ddd 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -86,6 +86,46 @@ def test_not_repeated(): assert not field.repeated +def test_map(): + entry_msg = make_message( + name='SquidEntry', + fields=( + make_field(name='key', type='TYPE_STRING'), + make_field(name='value', type='TYPE_STRING'), + ), + options=descriptor_pb2.MessageOptions(map_entry=True), + ) + field = make_field( + name='squids', + type_name='mollusc.SquidEntry', + message=entry_msg, + label=3, + type='TYPE_MESSAGE', + ) + + assert field.map + + +def test_ident_map(): + entry_msg = make_message( + name='SquidEntry', + fields=( + make_field(name='key', type='TYPE_STRING'), + make_field(name='value', type='TYPE_STRING'), + ), + options=descriptor_pb2.MessageOptions(map_entry=True), + ) + field = make_field( + name='squids', + type_name='mollusc.SquidEntry', + message=entry_msg, + label=3, + type='TYPE_MESSAGE', + ) + + assert str(field.ident) == "Mapping[str, str]" + + def test_required(): field = make_field() field.options.Extensions[field_behavior_pb2.field_behavior].append( @@ -110,6 +150,25 @@ def test_ident_sphinx_repeated(): assert field.ident.sphinx == 'Sequence[bool]' +def test_ident_sphinx_map(): + entry_msg = make_message( + name='SquidEntry', + fields=( + make_field(name='key', type='TYPE_STRING'), + make_field(name='value', type='TYPE_STRING'), + ), + options=descriptor_pb2.MessageOptions(map_entry=True), + ) + field = make_field( + name='squids', + type_name='mollusc.SquidEntry', + message=entry_msg, + label=3, + type='TYPE_MESSAGE', + ) + assert field.ident.sphinx == 'Mapping[str, str]' + + def test_resource_reference(): field = make_field(type='TYPE_STRING') field.options.Extensions[resource_pb2.resource_reference].type = "translate.googleapis.com/Glossary" From 2d57900d6bdfa2c15b48b82ae86851711e4b7f61 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 30 Mar 2022 10:12:36 -0400 Subject: [PATCH 0771/1339] fix(deps): exclude click 8.1.0 (#1255) --- packages/gapic-generator/setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index da4e19f6189a..9df3fe28612c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -43,7 +43,9 @@ platforms="Posix; MacOS X", include_package_data=True, install_requires=( - "click >= 6.7", + # There is a typing issue with click==8.1.0 + # See https://github.com/pallets/click/issues/2227 + "click >= 6.7,!=8.1.0", "google-api-core >= 2.3.2", "googleapis-common-protos >= 1.54.0", "grpcio >= 1.24.3", From bb95395d449974ad6feec935d02c1e092f3e7dc0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 30 Mar 2022 16:47:50 +0200 Subject: [PATCH 0772/1339] chore(deps): update dependency setuptools to v61.2.0 (#1251) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 04602029c492..d87df34a3fff 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==61.1.1 +setuptools==61.2.0 From cccc50f2dc49dab256a5e20ecce13229e11d2264 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 30 Mar 2022 17:33:52 +0200 Subject: [PATCH 0773/1339] chore(deps): update dependency jinja2 to v3.1.1 (#1247) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d87df34a3fff..e334e88fb78b 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,7 +1,7 @@ click==8.0.4 google-api-core==2.1.1 googleapis-common-protos==1.56.0 -jinja2==3.0.3 +jinja2==3.1.1 MarkupSafe==2.1.1 protobuf==3.19.4 pypandoc==1.7.4 From 4cfcaeecc53baa42c2e0f95288217d26edb8809c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 31 Mar 2022 01:45:10 +0200 Subject: [PATCH 0774/1339] chore(deps): update dependency click to v8.1.1 (#1256) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e334e88fb78b..f5261127dbd0 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==8.0.4 +click==8.1.1 google-api-core==2.1.1 googleapis-common-protos==1.56.0 jinja2==3.1.1 From 2f7d84230cceb4467fadd95de3c7b66090603521 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 1 Apr 2022 02:57:07 +0200 Subject: [PATCH 0775/1339] chore(deps): update dependency setuptools to v61.3.0 (#1258) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f5261127dbd0..57d11b2d3920 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.19.4 pypandoc==1.7.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==61.2.0 +setuptools==61.3.0 From bcce8b1281a8cf08ba09441b8151d6963f7c0447 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 1 Apr 2022 04:07:10 +0200 Subject: [PATCH 0776/1339] chore(deps): update dependency click to v8.1.2 (#1257) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 57d11b2d3920..b4898de7cd69 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==8.1.1 +click==8.1.2 google-api-core==2.1.1 googleapis-common-protos==1.56.0 jinja2==3.1.1 From bc1065db9a28902647281f2709470f37c948128b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 2 Apr 2022 00:02:10 +0200 Subject: [PATCH 0777/1339] chore(deps): update dependency protobuf to v3.20.0 (#1259) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b4898de7cd69..2bd0235f259a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.1.1 googleapis-common-protos==1.56.0 jinja2==3.1.1 MarkupSafe==2.1.1 -protobuf==3.19.4 +protobuf==3.20.0 pypandoc==1.7.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From ca377e191db90c65cc7c230a3df88fad0bedfe69 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 3 Apr 2022 21:39:22 +0200 Subject: [PATCH 0778/1339] chore(deps): update dependency setuptools to v61.3.1 (#1260) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2bd0235f259a..2890536f31d7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.20.0 pypandoc==1.7.4 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==61.3.0 +setuptools==61.3.1 From bd9e5caa74a554d4abe10ef3b48b32135d0b4d7c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 4 Apr 2022 01:47:09 +0200 Subject: [PATCH 0779/1339] chore(deps): update dependency pypandoc to v1.7.5 (#1261) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2890536f31d7..0c9d4a78080e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,7 +4,7 @@ googleapis-common-protos==1.56.0 jinja2==3.1.1 MarkupSafe==2.1.1 protobuf==3.20.0 -pypandoc==1.7.4 +pypandoc==1.7.5 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==61.3.1 From 0186d7a8f77d46f3bb3ceabd86a16b0b289307a1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 4 Apr 2022 17:25:49 +0200 Subject: [PATCH 0780/1339] chore(deps): update dependency setuptools to v62 (#1262) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0c9d4a78080e..19fd6dfeff29 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,4 +7,4 @@ protobuf==3.20.0 pypandoc==1.7.5 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==61.3.1 +setuptools==62.0.0 From 0b44657e2f27fd9b2181d1a61c6e7f7aa5e3b1e4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 4 Apr 2022 15:53:53 -0400 Subject: [PATCH 0781/1339] chore(main): release 0.63.8 (#1246) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8086fd095e0a..61f9d3e3d371 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +### [0.63.8](https://github.com/googleapis/gapic-generator-python/compare/v0.63.7...v0.63.8) (2022-04-04) + + +### Bug Fixes + +* **deps:** exclude click 8.1.0 ([#1255](https://github.com/googleapis/gapic-generator-python/issues/1255)) ([daf63eb](https://github.com/googleapis/gapic-generator-python/commit/daf63ebe2392fd6fde65326fffc5519cd126c2ae)) +* fix docstring for map fields ([#1249](https://github.com/googleapis/gapic-generator-python/issues/1249)) ([3100464](https://github.com/googleapis/gapic-generator-python/commit/310046478092b4fc4ef9dfdd1e50363ca6fc72c5)) +* sanitize file names ([#1236](https://github.com/googleapis/gapic-generator-python/issues/1236)) ([3072ffb](https://github.com/googleapis/gapic-generator-python/commit/3072ffb6000983ecb06d8dd7b44f77da61cc992e)) + ### [0.63.7](https://github.com/googleapis/gapic-generator-python/compare/v0.63.6...v0.63.7) (2022-03-08) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9df3fe28612c..f2b2ad1bf1fd 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.7" +version = "0.63.8" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From cdbb4109dd7d78b3924583d5ece60e8d1bbe42ff Mon Sep 17 00:00:00 2001 From: Dov Shlachter Date: Thu, 7 Apr 2022 13:03:39 -0700 Subject: [PATCH 0782/1339] feat: full LRO for Extended Operations (#1234) * feat: full LRO for Extended Operations Implementation and tests for Extended Operations. The full description of Extended Operations is beyond the scope of this changelog, but three important differences from 'normal' LROs are 1) The type of the 'Operation' is not fixed but is determined by specification. 2) The operation service is not fixed and is determined by specification. 3) The polling method in the operation service is not fixed and is determined by specification. Methods that meet the Extended Operations criteria will generate two methods in the client class: a 'plain' version with the "_unary" suffix that returns the custom Operation type as a raw proto message, and a 'rich' version that returns google.api_core.ExtendedOperation, which can be polled and waited on for completion. --- .../%sub/services/%service/client.py.j2 | 4 +- .../%name_%version/%sub/test_%service.py.j2 | 10 +- packages/gapic-generator/gapic/schema/api.py | 94 +- .../gapic-generator/gapic/schema/wrappers.py | 154 +- .../%sub/services/%service/_client_macros.j2 | 262 +++ .../%sub/services/%service/client.py.j2 | 225 +-- .../services/%service/transports/base.py.j2 | 29 + .../services/%service/transports/grpc.py.j2 | 5 + .../services/%service/transports/rest.py.j2 | 4 + .../%name_%version/%sub/types/_message.py.j2 | 18 + .../gapic/templates/setup.py.j2 | 4 +- .../%name_%version/%sub/test_%service.py.j2 | 1449 +---------------- .../gapic/%name_%version/%sub/test_macros.j2 | 1433 ++++++++++++++++ .../gapic-generator/test_utils/test_utils.py | 4 +- ...xtended_operation_forwardcompat_lro.proto} | 15 +- .../asset_v1/services/asset_service/client.py | 24 +- .../services/asset_service/transports/base.py | 5 + .../services/asset_service/transports/grpc.py | 5 + .../tests/integration/goldens/asset/setup.py | 2 +- .../unit/gapic/asset_v1/test_asset_service.py | 45 +- .../services/iam_credentials/client.py | 8 +- .../iam_credentials/transports/base.py | 5 + .../iam_credentials/transports/grpc.py | 5 + .../integration/goldens/credentials/setup.py | 2 +- .../credentials_v1/test_iam_credentials.py | 29 +- .../services/config_service_v2/client.py | 46 +- .../config_service_v2/transports/base.py | 5 + .../config_service_v2/transports/grpc.py | 5 + .../services/logging_service_v2/client.py | 12 +- .../logging_service_v2/transports/base.py | 5 + .../logging_service_v2/transports/grpc.py | 5 + .../services/metrics_service_v2/client.py | 10 +- .../metrics_service_v2/transports/base.py | 5 + .../metrics_service_v2/transports/grpc.py | 5 + .../integration/goldens/logging/setup.py | 2 +- .../logging_v2/test_config_service_v2.py | 64 +- .../logging_v2/test_logging_service_v2.py | 30 +- .../logging_v2/test_metrics_service_v2.py | 32 +- .../redis_v1/services/cloud_redis/client.py | 18 +- .../services/cloud_redis/transports/base.py | 5 + .../services/cloud_redis/transports/grpc.py | 5 + .../tests/integration/goldens/redis/setup.py | 2 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 44 +- .../tests/unit/schema/test_api.py | 319 +++- .../tests/unit/schema/wrappers/test_field.py | 11 + .../unit/schema/wrappers/test_message.py | 65 +- .../tests/unit/schema/wrappers/test_method.py | 79 +- .../unit/schema/wrappers/test_service.py | 19 +- 48 files changed, 2763 insertions(+), 1870 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 create mode 100644 packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 rename packages/gapic-generator/tests/fragments/{test_diregapic_forwardcompat_lro.proto => test_extended_operation_forwardcompat_lro.proto} (88%) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index d6299831ecb2..4b6e26c29155 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -322,11 +322,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() %} - {% if method.operation_service %}{# DIREGAPIC LRO #} + {% if method.operation_service %}{# Extended Operations LRO #} def {{ method.name|snake_case }}_unary(self, {% else %} def {{ method.name|snake_case }}(self, - {% endif %}{# DIREGAPIC LRO #} + {% endif %}{# Extended Operations LRO #} {% if not method.client_streaming %} request: Union[{{ method.input.ident }}, dict] = None, *, diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ae21c130d8b1..e483effc140a 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -107,7 +107,7 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(client_c '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' {% if 'rest' in opts.transport %} if transport_name in ['grpc', 'grpc_asyncio'] - else + else 'https://{{ service.host }}' {% endif %} ) @@ -159,7 +159,7 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(client_c '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' {% if 'rest' in opts.transport %} if transport_name in ['grpc', 'grpc_asyncio'] - else + else 'https://{{ service.host }}' {% endif %} ) @@ -525,6 +525,8 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): call.return_value = None {% elif method.lro %} call.return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.extended_lro %} + call.return_value = {{ method.extended_lro.operation_type.ident }}() {% elif method.server_streaming %} call.return_value = iter([{{ method.output.ident }}()]) {% else %} @@ -561,6 +563,8 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): assert response is None {% elif method.lro %} assert isinstance(response, future.Future) + {% elif method.extended_lro %} + assert isinstance(response, {{ method.extended_lro.operation_type.ident }}) {% elif method.server_streaming %} for message in response: assert isinstance(message, {{ method.output.ident }}) @@ -1055,6 +1059,8 @@ def test_{{ method.name|snake_case }}_rest(request_type): assert response is None {% elif method.lro %} assert response.operation.name == "operations/spam" + {% elif method.extended_lro %} + assert isinstance(response, {{ method.extended_lro.operation_type.ident }}) {% else %} assert isinstance(response, {{ method.client_output.ident }}) {% for field in method.output.fields.values() | rejectattr('message') %} diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 4cce5909a002..5d3f6b87bf37 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -488,19 +488,41 @@ def requires_package(self, pkg: Tuple[str, ...]) -> bool: ) def get_custom_operation_service(self, method: "wrappers.Method") -> "wrappers.Service": + """Return the extended operation service that should be polled for progress + from a given initial method. + + Precondition: `method` returns an Extended Operation type message + and has an `operation_polling_service` annotation. + """ if not method.output.is_extended_operation: raise ValueError( f"Method is not an extended operation LRO: {method.name}") op_serv_name = self.naming.proto_package + "." + \ method.options.Extensions[ex_ops_pb2.operation_service] - op_serv = self.services[op_serv_name] - if not op_serv.custom_polling_method: + op_serv = self.services.get(op_serv_name) + if not op_serv: + raise ValueError( + f"No such service: {op_serv_name}" + ) + + if not op_serv.operation_polling_method: raise ValueError( f"Service is not an extended operation operation service: {op_serv.name}") return op_serv + def get_extended_operations_services(self, service) -> Set["wrappers.Service"]: + """Return a set of all the extended operation services used by the input service. + + Precondition: `service` is NOT an extended operation service + """ + return set( + self.get_custom_operation_service(m) + for m in service.methods.values() + if m.operation_service + ) + class _ProtoBuilder: """A "builder class" for Proto objects. @@ -902,6 +924,70 @@ def _maybe_get_lro( return lro + def _maybe_get_extended_lro( + self, + service_address: metadata.Address, + meth_pb: descriptor_pb2.MethodDescriptorProto, + ) -> Optional[wrappers.ExtendedOperationInfo]: + op_service_name = meth_pb.options.Extensions[ex_ops_pb2.operation_service] + if not op_service_name: + return None + + # Manual lookups because services and methods haven't been loaded. + # Note: this assumes that the operation service lives in the same proto file. + # This is a reasonable assumption as of March '22, but it may (unlikely) + # change in the future. + op_service_pb = next( + (s for s in self.file_descriptor.service if s.name == op_service_name), + None, + ) + if not op_service_pb: + raise ValueError( + f"Could not find custom operation service: {op_service_name}" + ) + + operation_polling_method_pb = next( + ( + m + for m in op_service_pb.method + if m.options.Extensions[ex_ops_pb2.operation_polling_method] + ), + None, + ) + if not operation_polling_method_pb: + raise ValueError( + f"Could not find operation polling method for custom operation service: {op_service_name}" + ) + + operation_request_key = service_address.resolve( + operation_polling_method_pb.input_type.lstrip(".") + ) + operation_request_message = self.api_messages[operation_request_key] + + operation_type = service_address.resolve( + operation_polling_method_pb.output_type.lstrip(".") + ) + method_output_type = service_address.resolve( + meth_pb.output_type.lstrip(".") + ) + if operation_type != method_output_type: + raise ValueError( + f"Inconsistent return types between extended lro method '{meth_pb.name}'" + f" and extended lro polling method '{operation_polling_method_pb.name}':" + f" '{method_output_type}' and '{operation_type}'" + ) + + operation_message = self.api_messages[operation_type] + if not operation_message.is_extended_operation: + raise ValueError( + f"Message is not an extended operation: {operation_type}" + ) + + return wrappers.ExtendedOperationInfo( + request_type=operation_request_message, + operation_type=operation_message, + ) + def _get_methods(self, methods: Sequence[descriptor_pb2.MethodDescriptorProto], service_address: metadata.Address, path: Tuple[int, ...], @@ -932,6 +1018,10 @@ def _get_methods(self, answer[meth_pb.name] = wrappers.Method( input=self.api_messages[meth_pb.input_type.lstrip('.')], lro=self._maybe_get_lro(service_address, meth_pb), + extended_lro=self._maybe_get_extended_lro( + service_address, + meth_pb, + ), method_pb=meth_pb, meta=metadata.Metadata( address=service_address.child(meth_pb.name, path + (i,)), diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index fa8ad2dd68ef..ffcc123fd52b 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -99,6 +99,18 @@ def map(self) -> bool: """Return True if this field is a map, False otherwise.""" return bool(self.repeated and self.message and self.message.map) + @property + def operation_field(self) -> Optional[str]: + return self.options.Extensions[ex_ops_pb2.operation_field] + + @property + def operation_request_field(self) -> Optional[str]: + return self.options.Extensions[ex_ops_pb2.operation_request_field] + + @property + def operation_response_field(self) -> Optional[str]: + return self.options.Extensions[ex_ops_pb2.operation_response_field] + @utils.cached_property def mock_value_original_type(self) -> Union[bool, str, bytes, int, float, Dict[str, Any], List[Any], None]: visited_messages = set() @@ -387,6 +399,51 @@ def oneof_fields(self, include_optional=False): return oneof_fields + @utils.cached_property + def extended_operation_request_fields(self) -> Sequence[Field]: + """ + If this message is the request for a method that uses extended operations, + return the fields that correspond to operation request fields in the operation message. + """ + return tuple( + f + for f in self.fields.values() + if f.operation_request_field + ) + + @utils.cached_property + def extended_operation_response_fields(self) -> Sequence[Field]: + """ + If this message is the request for a method that uses extended operations, + return the fields that correspond to operation response fields in the polling message. + """ + return tuple( + f + for f in self.fields.values() + if f.operation_response_field + ) + + @utils.cached_property + def differently_named_extended_operation_fields(self) -> Optional[Dict[str, Field]]: + if not self.is_extended_operation: + return None + + def canonical_name(field): + return OperationResponseMapping.Name(field.operation_field).lower() + + OperationResponseMapping = ex_ops_pb2.OperationResponseMapping + default_field_names = [ + k.lower() + # The first variant is UNKNOWN + for k in ex_ops_pb2.OperationResponseMapping.keys()[1:] + ] + + return { + canonical_name(f): f + for f in self.fields.values() + if f.operation_field and f.name not in default_field_names + } + @utils.cached_property def is_extended_operation(self) -> bool: if not self.name == "Operation": @@ -394,7 +451,7 @@ def is_extended_operation(self) -> bool: name, status, error_code, error_message = False, False, False, False duplicate_msg = f"Message '{self.name}' has multiple fields with the same operation response mapping: {{}}" - for f in self.field: + for f in self.fields.values(): maybe_op_mapping = f.options.Extensions[ex_ops_pb2.operation_field] OperationResponseMapping = ex_ops_pb2.OperationResponseMapping @@ -420,6 +477,18 @@ def is_extended_operation(self) -> bool: return name and status and error_code and error_message + @utils.cached_property + def extended_operation_status_field(self) -> Optional[Field]: + STATUS = ex_ops_pb2.OperationResponseMapping.STATUS + return next( + ( + f + for f in self.fields.values() + if f.options.Extensions[ex_ops_pb2.operation_field] == STATUS + ), + None, + ) + @utils.cached_property def required_fields(self) -> Sequence['Field']: required_fields = [ @@ -757,6 +826,32 @@ def __eq__(self, other): return super().__eq__(other) +@dataclasses.dataclass(frozen=True) +class ExtendedOperationInfo: + """A handle to the request type of the extended operation polling method + and the underlying operation type. + """ + request_type: MessageType + operation_type: MessageType + + def with_context(self, *, collisions: FrozenSet[str]) -> 'ExtendedOperationInfo': + """Return a derivative of this OperationInfo with the provided context. + + This method is used to address naming collisions. The returned + ``OperationInfo`` object aliases module names to avoid naming collisions + in the file being written. + """ + return self if not collisions else dataclasses.replace( + self, + request_type=self.request_type.with_context( + collisions=collisions + ), + operation_type=self.operation_type.with_context( + collisions=collisions, + ), + ) + + @dataclasses.dataclass(frozen=True) class OperationInfo: """Representation of long-running operation info.""" @@ -969,6 +1064,8 @@ class Method: input: MessageType output: MessageType lro: Optional[OperationInfo] = dataclasses.field(default=None) + extended_lro: Optional[ExtendedOperationInfo] = dataclasses.field( + default=None) retry: Optional[RetryInfo] = dataclasses.field(default=None) timeout: Optional[float] = None meta: metadata.Metadata = dataclasses.field( @@ -1048,6 +1145,21 @@ def _client_output(self, enable_asyncio: bool): ), )) + if self.extended_lro: + return PythonType( + meta=metadata.Metadata( + address=metadata.Address( + name="ExtendedOperation", + module="extended_operation", + package=("google", "api_core"), + collisions=self.extended_lro.operation_type.ident.collisions, + ), + documentation=utils.doc( + "An object representing a extended long-running operation." + ), + ), + ) + # If this method is paginated, return that method's pager class. if self.paged_result_field: return PythonType(meta=metadata.Metadata( @@ -1325,6 +1437,11 @@ def _ref_types(self, recursive: bool) -> Sequence[Union[MessageType, EnumType]]: answer.append(self.lro.response_type) answer.append(self.lro.metadata_type) + # Extended operation + if self.extended_lro: + answer.append(self.extended_lro.request_type) + answer.append(self.extended_lro.operation_type) + # If this message paginates its responses, it is possible # that the individual result messages reside in a different module. if self.paged_result_field and self.paged_result_field.message: @@ -1351,9 +1468,16 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': collisions=collisions ) if collisions else self.lro + maybe_extended_lro = ( + self.extended_lro.with_context( + collisions=collisions + ) if self.extended_lro else None + ) + return dataclasses.replace( self, lro=maybe_lro, + extended_lro=maybe_extended_lro, input=self.input.with_context(collisions=collisions), output=self.output.with_context(collisions=collisions), meta=self.meta.with_context(collisions=collisions), @@ -1427,13 +1551,12 @@ class Service: compare=False, ) + def __hash__(self): + return hash(f"{self.meta.address.api_naming.module_name}.{self.name}") + def __getattr__(self, name): return getattr(self.service_pb, name) - @property - def custom_polling_method(self) -> Optional[Method]: - return next((m for m in self.methods.values() if m.is_operation_polling_method), None) - @property def client_name(self) -> str: """Returns the name of the generated client class""" @@ -1463,7 +1586,11 @@ def rest_transport_name(self): @property def has_lro(self) -> bool: """Return whether the service has a long-running method.""" - return any([m.lro for m in self.methods.values()]) + return any(m.lro for m in self.methods.values()) + + @property + def has_extended_lro(self) -> bool: + return any(m.extended_lro for m in self.methods.values()) @property def has_pagers(self) -> bool: @@ -1618,6 +1745,21 @@ def any_server_streaming(self) -> bool: def any_deprecated(self) -> bool: return any(m.is_deprecated for m in self.methods.values()) + @utils.cached_property + def any_extended_operations_methods(self) -> bool: + return any(m.operation_service for m in self.methods.values()) + + @utils.cached_property + def operation_polling_method(self) -> Optional[Method]: + return next( + ( + m + for m in self.methods.values() + if m.is_operation_polling_method + ), + None + ) + def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': """Return a derivative of this service with the provided context. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 new file mode 100644 index 000000000000..d384fe86ea09 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -0,0 +1,262 @@ +{# + # Copyright (C) 2022 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} + +{% macro client_method(method, name, snippet_index, api, service, full_extended_lro=False) %} + def {{ name }}(self, + {% if not method.client_streaming %} + request: Union[{{ method.input.ident }}, dict] = None, + *, + {% for field in method.flattened_fields.values() %} + {{ field.name }}: {{ field.ident }} = None, + {% endfor %} + {% else %} + requests: Iterator[{{ method.input.ident }}] = None, + *, + {% endif %} + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + {% if method.extended_lro and not full_extended_lro %}{# This is a hack to preserve backwards compatibility with the "unary" surfaces #} + ) -> {{ method.extended_lro.operation_type.ident }}: + {% elif not method.server_streaming %} + ) -> {{ method.client_output.ident }}: + {% else %} + ) -> Iterable[{{ method.client_output.ident }}]: + {% endif %} + r"""{{ method.meta.doc|rst(width=72, indent=8) }} + + {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} + {% if snippet is not none %} + .. code-block:: python + + {{ snippet.full_snippet|indent(width=12, first=True) }} + {% endif %} + {% endwith %} + + Args: + {% if not method.client_streaming %} + request (Union[{{ method.input.ident.sphinx }}, dict]): + The request object.{{ " " }} + {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {% for key, field in method.flattened_fields.items() %} + {{ field.name }} ({{ field.ident.sphinx }}): + {{ field.meta.doc|rst(width=72, indent=16) }} + This corresponds to the ``{{ key }}`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + {% endfor %} + {% else %} + requests (Iterator[{{ method.input.ident.sphinx }}]): + The request object iterator.{{ " " }} + {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {% endif %} + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {% if not method.void %} + + Returns: + {% if not method.server_streaming %} + {{ method.client_output.ident.sphinx }}: + {% else %} + Iterable[{{ method.client_output.ident.sphinx }}]: + {% endif %} + {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format="rst") }} + {% endif %} + """ + {% if method.is_deprecated %} + warnings.warn("{{ service.client_name }}.{{ method.name|snake_case }} is deprecated", + DeprecationWarning) + + {% endif %} + {% if not method.client_streaming %} + # Create or coerce a protobuf request object. + {% if method.flattened_fields %} + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + {% endif %} + {% if method.input.ident.package != method.ident.package %}{# request lives in a different package, so there is no proto wrapper #} + if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + request = {{ method.input.ident }}(**request) + elif not request: + # Null request, just make one. + request = {{ method.input.ident }}() + {% else %} + # Minor optimization to avoid making a copy if the user passes + # in a {{ method.input.ident }}. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, {{ method.input.ident }}): + request = {{ method.input.ident }}(request) + {% endif %}{# different request package #} + + {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} + {% if method.flattened_fields and method.input.ident.package == method.ident.package %} + # If we have keyword arguments corresponding to fields on the + # request, apply these. + {% endif %} + {% for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} + if {{ field.name }} is not None: + {# Repeated values is a special case, because values can be lists. #} + {# In order to not confuse the marshalling logic, extend these fields instead of assigning #} + {% if field.ident.ident|string() == "struct_pb2.Value" and field.repeated %} + request.{{ key }}.extend({{ field.name }}) + {% else %} + request.{{ key }} = {{ field.name }} + {% endif %}{# struct_pb2.Value #} + {% endfor %} + {# Map-y fields can be _updated_, however #} + {% for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} + {% if field.map %} {# map implies repeated, but repeated does NOT imply map#} + if {{ field.name }}: + request.{{ key }}.update({{ field.name }}) + {% else %} + {# And list-y fields can be _extended_ #} + if {{ field.name }}: + request.{{ key }}.extend({{ field.name }}) + {% endif %} {# field.map #} + {% endfor %} {# method.flattened_fields.items() #} + {% endif %} {# method.client_streaming #} + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.{{ method.transport_safe_name|snake_case}}] + + {% if method.explicit_routing %} + header_params = {} + {% for routing_param in method.routing_rule.routing_parameters %} + {% if routing_param.path_template %} {# Need to match. #} + + routing_param_regex = {{ routing_param.to_regex() }} + regex_match = routing_param_regex.match(request.{{ routing_param.field }}) + if regex_match and regex_match.group("{{ routing_param.key }}"): + header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") + + {% else %} + + if request.{{ routing_param.field }}: + header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} + + {% endif %} + {% endfor %} {# method.routing_rule.routing_parameters #} + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + {% elif method.field_headers %} {# implicit routing #} + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {% for field_header in method.field_headers %} + {% if not method.client_streaming %} + ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), + {% endif %} + {% endfor %} + )), + ) + {% endif %} {# method.explicit_routing #} + + # Send the request. + {%+ if not method.void %}response = {% endif %}rpc( + {% if not method.client_streaming %} + request, + {% else %} + requests, + {% endif %} + retry=retry, + timeout=timeout, + metadata=metadata, + ) + {% if method.lro %} + + # Wrap the response in an operation future. + response = {{ method.client_output.ident.module_alias or method.client_output.ident.module }}.from_gapic( + response, + self._transport.operations_client, + {{ method.lro.response_type.ident }}, + metadata_type={{ method.lro.metadata_type.ident }}, + ) + {% elif method.paged_result_field %} + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = {{ method.client_output.ident }}( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + {% elif method.extended_lro and full_extended_lro %} +{{ extended_operation_service_setup(api, method) }} + {% endif %} + {% if not method.void %} + + # Done; return the response. + return response + {% endif %} + {{ "\n" }} + +{% endmacro %} + +{% macro extended_operation_service_setup(api, method) %} +{% with op_service = api.get_custom_operation_service(method) %} + + operation_service = self._transport._{{ op_service.client_name|snake_case }} + operation_request = {{ op_service.operation_polling_method.input.ident }}() +{% for field in method.input.extended_operation_request_fields %} + operation_request.{{ field.operation_request_field }} = request.{{ field.name }} +{% endfor %} +{% for field in op_service.operation_polling_method.input.extended_operation_response_fields %} + operation_request.{{ field.name }} = response.{{ field.operation_response_field }} +{% endfor %} + + get_operation = functools.partial(operation_service.{{ op_service.operation_polling_method.name|snake_case }}, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + {% if method.output.differently_named_extended_operation_fields %} + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + {% for default_name, field in method.output.differently_named_extended_operation_fields.items() %} + @property + def {{ default_name }}(self): + return self._extended_operation.{{ field.name }} + + {% endfor %} + {% endif %} + + response = {{ "_CustomOperation" if method.output.differently_named_extended_operation_fields else "extended_operation.ExtendedOperation" }}.make(get_operation, cancel_operation, response) + +{% endwith %} +{% endmacro %} + +{% macro define_extended_operation_subclass(extended_operation) %} +{% endmacro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index da207d8b8c0e..17ecfebbdc79 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -1,8 +1,12 @@ {% extends '_base.py.j2' %} {% block content %} +{% import "%namespace/%name_%version/%sub/services/%service/_client_macros.j2" as macros %} from collections import OrderedDict +{% if service.any_extended_operations_methods %} +import functools +{% endif %} import os import re from typing import Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union @@ -13,6 +17,9 @@ import warnings from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions +{% if service.any_extended_operations_methods %} +from google.api_core import extended_operation +{% endif %} from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore @@ -202,7 +209,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -210,7 +217,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): (2) if `client_options.client_cert_source` is provided, use the provided one; if the default client cert source exists, use the default one; otherwise the client cert source is None. - + The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the @@ -228,7 +235,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Returns: Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the client cert source to use. - + Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ @@ -256,7 +263,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): api_endpoint = cls.DEFAULT_MTLS_ENDPOINT else: api_endpoint = cls.DEFAULT_ENDPOINT - + return api_endpoint, client_cert_source def __init__(self, *, @@ -347,211 +354,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() %} - {% if method.operation_service %}{# DIREGAPIC LRO #} - def {{ method.name|snake_case }}_unary(self, - {% else %} - def {{ method.name|snake_case }}(self, - {% endif %}{# DIREGAPIC LRO #} - {% if not method.client_streaming %} - request: Union[{{ method.input.ident }}, dict] = None, - *, - {% for field in method.flattened_fields.values() %} - {{ field.name }}: {{ field.ident }} = None, - {% endfor %} - {% else %} - requests: Iterator[{{ method.input.ident }}] = None, - *, - {% endif %} - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - {% if not method.server_streaming %} - ) -> {{ method.client_output.ident }}: - {% else %} - ) -> Iterable[{{ method.client_output.ident }}]: - {% endif %} - r"""{{ method.meta.doc|rst(width=72, indent=8) }} - - {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} - {% if snippet is not none %} - .. code-block:: python - -{{ snippet.full_snippet|indent(width=12, first=True) }} - {% endif %} - {% endwith %} - - Args: - {% if not method.client_streaming %} - request (Union[{{ method.input.ident.sphinx }}, dict]): - The request object.{{ " " }} - {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} - {% for key, field in method.flattened_fields.items() %} - {{ field.name }} ({{ field.ident.sphinx }}): - {{ field.meta.doc|rst(width=72, indent=16) }} - This corresponds to the ``{{ key }}`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - {% endfor %} - {% else %} - requests (Iterator[{{ method.input.ident.sphinx }}]): - The request object iterator.{{ " " }} - {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} - {% endif %} - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - {% if not method.void %} - - Returns: - {% if not method.server_streaming %} - {{ method.client_output.ident.sphinx }}: - {% else %} - Iterable[{{ method.client_output.ident.sphinx }}]: - {% endif %} - {{ method.client_output.meta.doc|rst(width=72, indent=16, source_format="rst") }} - {% endif %} - """ - {% if method.is_deprecated %} - warnings.warn("{{ service.client_name }}.{{ method.name|snake_case }} is deprecated", - DeprecationWarning) - - {% endif %} - {% if not method.client_streaming %} - # Create or coerce a protobuf request object. - {% if method.flattened_fields %} - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - {% endif %} - {% if method.input.ident.package != method.ident.package %}{# request lives in a different package, so there is no proto wrapper #} - if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - request = {{ method.input.ident }}(**request) - elif not request: - # Null request, just make one. - request = {{ method.input.ident }}() - {% else %} - # Minor optimization to avoid making a copy if the user passes - # in a {{ method.input.ident }}. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, {{ method.input.ident }}): - request = {{ method.input.ident }}(request) - {% endif %}{# different request package #} - - {#- Vanilla python protobuf wrapper types cannot _set_ repeated fields #} - {% if method.flattened_fields and method.input.ident.package == method.ident.package %} - # If we have keyword arguments corresponding to fields on the - # request, apply these. - {% endif %} - {% for key, field in method.flattened_fields.items() if not field.repeated or method.input.ident.package == method.ident.package %} - if {{ field.name }} is not None: - {# Repeated values is a special case, because values can be lists. #} - {# In order to not confuse the marshalling logic, extend these fields instead of assigning #} - {% if field.ident.ident|string() == "struct_pb2.Value" and field.repeated %} - request.{{ key }}.extend({{ field.name }}) - {% else %} - request.{{ key }} = {{ field.name }} - {% endif %}{# struct_pb2.Value #} - {% endfor %} - {# Map-y fields can be _updated_, however #} - {% for key, field in method.flattened_fields.items() if field.repeated and method.input.ident.package != method.ident.package %} - {% if field.map %} {# map implies repeated, but repeated does NOT imply map#} - if {{ field.name }}: - request.{{ key }}.update({{ field.name }}) - {% else %} - {# And list-y fields can be _extended_ #} - if {{ field.name }}: - request.{{ key }}.extend({{ field.name }}) - {% endif %} {# field.map #} - {% endfor %} {# method.flattened_fields.items() #} - {% endif %} {# method.client_streaming #} - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.{{ method.transport_safe_name|snake_case}}] +{% if method.operation_service %}{# Uses extended operations #} +{{ macros.client_method(method, method.name|snake_case + "_unary", snippet_index, api, service) }} - {% if method.explicit_routing %} - header_params = {} - {% for routing_param in method.routing_rule.routing_parameters %} - {% if routing_param.path_template %} {# Need to match. #} - - routing_param_regex = {{ routing_param.to_regex() }} - regex_match = routing_param_regex.match(request.{{ routing_param.field }}) - if regex_match and regex_match.group("{{ routing_param.key }}"): - header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") - - {% else %} - - if request.{{ routing_param.field }}: - header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} - - {% endif %} - {% endfor %} {# method.routing_rule.routing_parameters #} - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - {% elif method.field_headers %} {# implicit routing #} - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - {% for field_header in method.field_headers %} - {% if not method.client_streaming %} - ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), - {% endif %} - {% endfor %} - )), - ) - {% endif %} {# method.explicit_routing #} - - # Send the request. - {%+ if not method.void %}response = {% endif %}rpc( - {% if not method.client_streaming %} - request, - {% else %} - requests, - {% endif %} - retry=retry, - timeout=timeout, - metadata=metadata, - ) - {% if method.lro %} - - # Wrap the response in an operation future. - response = {{ method.client_output.ident.module_alias or method.client_output.ident.module }}.from_gapic( - response, - self._transport.operations_client, - {{ method.lro.response_type.ident }}, - metadata_type={{ method.lro.metadata_type.ident }}, - ) - {% elif method.paged_result_field %} - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = {{ method.client_output.ident }}( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - {% endif %} - {% if not method.void %} - - # Done; return the response. - return response - {% endif %} - {{ "\n" }} +{{ macros.client_method(method, method.name|snake_case, snippet_index, api, service, full_extended_lro=True) }} +{% else %} +{{ macros.client_method(method, method.name|snake_case, snippet_index, api, service) }} +{% endif %} {% endfor %} def __enter__(self): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index bc37e7602f47..7b36a39a4cec 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -27,6 +27,11 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} +{% filter sort_lines %} +{% for operations_service in api.get_extended_operations_services(service) %} +from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services import {{ operations_service.name|snake_case }} +{% endfor %} +{% endfilter %} try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -84,6 +89,10 @@ class {{ service.name }}Transport(abc.ABC): always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + {% if service.any_extended_operations_methods %} + self._extended_operations_services: Dict[str, Any] = {} + {% endif %} + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: host += ':443' @@ -202,6 +211,26 @@ class {{ service.name }}Transport(abc.ABC): raise NotImplementedError() {% endif %} + @property + def kind(self) -> str: + raise NotImplementedError() + + {% for operations_service in api.get_extended_operations_services(service)|sort(attribute="name") %} + @property + def _{{ operations_service.client_name|snake_case }}(self) -> {{ operations_service.name|snake_case }}.{{ operations_service.client_name }}: + ex_op_service = self._extended_operations_services.get("{{ operations_service.name|snake_case }}") + if not ex_op_service: + ex_op_service = {{ operations_service.name|snake_case }}.{{ operations_service.client_name }}( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["{{ operations_service.name|snake_case }}"] = ex_op_service + + return ex_op_service + + {% endfor %} + + __all__ = ( '{{ service.name }}Transport', ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index cabc67e443e3..dfaa1edf3db0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -362,6 +362,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + + __all__ = ( '{{ service.name }}GrpcTransport', ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 4021915fe844..053bba3d5273 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -409,6 +409,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %} + @property + def kind(self) -> str: + return "rest" + def close(self): self._session.close() diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 67039ea79877..b6e93cefcb84 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -75,4 +75,22 @@ class {{ message.name }}({{ p }}.Message): ) {% endif %}{# field.map #} {% endfor %}{# for field in message.fields.values#} + + {% if message.extended_operation_status_field %} + {% with status_field = message.extended_operation_status_field %} + @property + def done(self) -> bool: + """Return True if the backing extended operation is completed, False otherwise.""" + {% if status_field.enum %} + return self.{{ status_field.name }} == type(self.{{ status_field.name }}).DONE + {% elif status_field.is_primitive and status_field.type.python_type == str %} + return self.{{ status_field.name }} == "DONE" + {% elif status_field.is_primitive and status_field.type.python_type == bool %} + return self.{{ status_field.name }} + {% else %} + raise ValueError("Unexpected type for field {{ status_field.name }}: {{ status_field.type }}") + {% endif %} + {% endwith %} + {% endif %} + {{ '\n\n' }} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 864183cc236c..939b85745dc6 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -32,9 +32,9 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.4.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.7.0, < 3.0.0dev', {% else %} - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', {% endif %} 'libcst >= 0.2.5', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 1981b30a397d..1f8337a419c7 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1,6 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} +{% import "tests/unit/gapic/%name_%version/%sub/test_macros.j2" as test_macros %} import os import mock @@ -38,8 +39,10 @@ from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template -{% if service.has_lro %} +{% if service.has_lro or service.has_extended_lro %} from google.api_core import future +{% endif %} +{% if service.has_lro %} from google.api_core import operation from google.api_core import operations_v1 from google.longrunning import operations_pb2 @@ -112,7 +115,7 @@ def test_{{ service.client_name|snake_case }}_from_service_account_info(client_c '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' {% if 'rest' in opts.transport %} if transport_name in ['grpc', 'grpc_asyncio'] - else + else 'https://{{ service.host }}' {% endif %} ) @@ -166,7 +169,7 @@ def test_{{ service.client_name|snake_case }}_from_service_account_file(client_c '{{ service.host }}{% if ":" not in service.host %}:443{% endif %}' {% if 'rest' in opts.transport %} if transport_name in ['grpc', 'grpc_asyncio'] - else + else 'https://{{ service.host }}' {% endif %} ) @@ -482,7 +485,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl options = client_options.ClientOptions( credentials_file="credentials.json" ) - + with mock.patch.object(transport_class, '__init__') as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -576,1416 +579,20 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% endif %} -{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} -@pytest.mark.parametrize("request_type", [ - {{ method.input.ident }}, - dict, -]) -def test_{{ method_name }}(request_type, transport: str = 'grpc'): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - {% if method.client_streaming %} - requests = [request] - {% endif %} - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void %} - call.return_value = None - {% elif method.lro %} - call.return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.server_streaming %} - call.return_value = iter([{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ method.output.ident }}( - {% for field in method.output.fields.values() | rejectattr('message')%} - {% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.output.oneof_fields().values() %} - {% with field = oneof_fields[0] %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} - {% endfor %} - ) - {% endif %} - {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) - {% else %} - response = client.{{ method_name }}(request) - {% endif %} - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - {% if method.client_streaming %} - assert next(args[0]) == request - {% else %} - assert args[0] == {{ method.input.ident }}() - {% endif %} - - # Establish that the response is the type that we expect. - {% if method.void %} - assert response is None - {% elif method.lro %} - assert isinstance(response, future.Future) - {% elif method.server_streaming %} - for message in response: - assert isinstance(message, {{ method.output.ident }}) - {% else %} - {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} - {# Cheeser assertion to force code coverage for bad paginated methods #} - assert response.raw_page is response - {% endif %} - assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') %} - {% if not field.oneof or field.proto3_optional %} - {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} - assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} - assert response.{{ field.name }} is {{ field.mock_value }} - {% else %} - assert response.{{ field.name }} == {{ field.mock_value }} - {% endif %} - {% endif %}{# end oneof/optional #} - {% endfor %} - {% endif %} - - -{% if not method.client_streaming %} -def test_{{ method_name }}_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - client.{{ method_name }}() - call.assert_called() - _, args, _ = call.mock_calls[0] - {% if method.client_streaming %} - assert next(args[0]) == request - {% else %} - assert args[0] == {{ method.input.ident }}() - {% endif %} -{% endif %} - - -@pytest.mark.asyncio -async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - {% if method.client_streaming %} - requests = [request] - {% endif %} - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - {% elif method.lro %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - {% elif not method.client_streaming and method.server_streaming %} - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) - {% elif method.client_streaming and method.server_streaming %} - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) - {% else %} - call.return_value ={{ '' }} - {%- if not method.client_streaming and not method.server_streaming -%} - grpc_helpers_async.FakeUnaryUnaryCall - {%- else -%} - grpc_helpers_async.FakeStreamUnaryCall - {%- endif -%}({{ method.output.ident }}( - {% for field in method.output.fields.values() | rejectattr('message') %}{% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %} - {% endfor %} - )) - {% endif %} - {% if method.client_streaming and method.server_streaming %} - response = await client.{{ method.name|snake_case }}(iter(requests)) - {% elif method.client_streaming and not method.server_streaming %} - response = await (await client.{{ method.name|snake_case }}(iter(requests))) - {% else %} - response = await client.{{ method_name }}(request) - {% endif %} - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - {% if method.client_streaming %} - assert next(args[0]) == request - {% else %} - assert args[0] == {{ method.input.ident }}() - {% endif %} - - # Establish that the response is the type that we expect. - {% if method.void %} - assert response is None - {% elif method.lro %} - assert isinstance(response, future.Future) - {% elif method.server_streaming %} - message = await response.read() - assert isinstance(message, {{ method.output.ident }}) - {% else %} - assert isinstance(response, {{ method.client_output_async.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') %} - {% if not field.oneof or field.proto3_optional %} - {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} - assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} - assert response.{{ field.name }} is {{ field.mock_value }} - {% else %} - assert response.{{ field.name }} == {{ field.mock_value }} - {% endif %} - {% endif %}{# oneof/optional #} - {% endfor %} - {% endif %} - - -@pytest.mark.asyncio -async def test_{{ method_name }}_async_from_dict(): - await test_{{ method_name }}_async(request_type=dict) - - -{% if method.explicit_routing %} -def test_{{ method.name|snake_case }}_routing_parameters(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - {% for routing_param in method.routing_rule.routing_parameters %} - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}({{ routing_param.sample_request }}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - {% if method.void %} - call.return_value = None - {% elif method.lro %} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming %} - call.return_value = iter([{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ method.output.ident }}() - {% endif %} - client.{{ method.name|snake_case }}(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] - {% endfor %} -{% endif %} - - -{% if method.field_headers and not method.client_streaming and not method.explicit_routing %} -def test_{{ method_name }}_field_headers(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}() - - {% for field_header in method.field_headers %} - request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' - {% endfor %} - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - {% if method.void %} - call.return_value = None - {% elif method.lro %} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming %} - call.return_value = iter([{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ method.output.ident }}() - {% endif %} - client.{{ method_name }}(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - '{% for field_header in method.field_headers -%} - {{ field_header.raw }}={{ field_header.raw }}/value - {%- if not loop.last %}&{% endif %} - {%- endfor -%}', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_{{ method_name }}_field_headers_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}() - - {% for field_header in method.field_headers %} - request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' - {% endfor %} - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - {% if method.void %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - {% elif method.lro %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) - {% elif method.server_streaming %} - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) - {% else %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall({{ method.output.ident }}()) - {% endif %} - await client.{{ method_name }}(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - '{% for field_header in method.field_headers -%} - {{ field_header.raw }}={{ field_header.raw }}/value - {%- if not loop.last %}&{% endif %} - {%- endfor -%}', - ) in kw['metadata'] -{% endif %} - -{% if method.ident.package != method.input.ident.package %} -def test_{{ method_name }}_from_dict_foreign(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void %} - call.return_value = None - {% elif method.lro %} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming %} - call.return_value = iter([{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ method.output.ident }}() - {% endif %} - response = client.{{ method_name }}(request={ - {% for field in method.input.fields.values() %} - '{{ field.name }}': {{ field.mock_value }}, - {% endfor %} - } - ) - call.assert_called() - -{% endif %} - -{% if method.flattened_fields %} -def test_{{ method_name }}_flattened(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void %} - call.return_value = None - {% elif method.lro %} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming %} - call.return_value = iter([{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ method.output.ident }}() - {% endif %} - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.{{ method_name }}( - {% for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {% endfor %} - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} - assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration_pb2.Duration' %} - assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% else %} - arg = args[0].{{ key }} - mock_val = {{ field.mock_value }} - {% if field.ident|string() == "struct_pb2.Value" %} - from proto.marshal import Marshal - from proto.marshal.rules.struct import ValueRule - rule = ValueRule(marshal=Marshal(name="Test")) - mock_val = rule.to_python(mock_val) - {% endif %}{# struct_pb2.Value #} - assert arg == mock_val - {% endif %} - {% endif %}{% endfor %} - {% for oneofs in method.flattened_oneof_fields().values() %} - {% with field = oneofs[-1] %} - assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} - {% endwith %} - {% endfor %} - - - -def test_{{ method_name }}_flattened_error(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.{{ method_name }}( - {{ method.input.ident }}(), - {% for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {% endfor %} - ) - - -@pytest.mark.asyncio -async def test_{{ method_name }}_flattened_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void %} - call.return_value = None - {% elif method.lro %} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming %} - call.return_value = iter([{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ method.output.ident }}() - {% endif %} - - - {% if method.void %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - {% elif method.lro %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - {% elif not method.client_streaming and method.server_streaming %} - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - {% elif method.client_streaming and method.server_streaming %} - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - {% else %} - call.return_value = {{ '' }} - {%- if not method.client_streaming and not method.server_streaming -%} - grpc_helpers_async.FakeUnaryUnaryCall - {%- else -%} - grpc_helpers_async.FakeStreamUnaryCall - {%- endif -%}({{ method.output.ident }}()) - {% endif %} - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.{{ method_name }}( - {% for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {% endfor %} - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} - {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} - assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% elif field.ident|string() == 'duration_pb2.Duration' %} - assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} - {% else %} - arg = args[0].{{ key }} - mock_val = {{ field.mock_value }} - {% if field.ident|string() == "struct_pb2.Value" %} - from proto.marshal import Marshal - from proto.marshal.rules.struct import ValueRule - rule = ValueRule(marshal=Marshal(name="Test")) - mock_val = rule.to_python(mock_val) - {% endif %}{# struct_pb2.Value #} - assert arg == mock_val - {% endif %} - {% endif %}{% endfor %} - {% for oneofs in method.flattened_oneof_fields().values() %} - {% with field = oneofs[-1] %} - assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} - {% endwith %} - {% endfor %} - - -@pytest.mark.asyncio -async def test_{{ method_name }}_flattened_error_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.{{ method_name }}( - {{ method.input.ident }}(), - {% for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {% endfor %} - ) -{% endif %} - - -{% if method.paged_result_field %} -{% if not method.paged_result_field.map %} -def test_{{ method_name }}_pager(transport_name: str = "grpc"): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[], - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - ), - RuntimeError, - ) - - metadata = () - {% if method.field_headers %} - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - {% for field_header in method.field_headers %} - {% if not method.client_streaming %} - ('{{ field_header.raw }}', ''), - {% endif %} - {% endfor %} - )), - ) - {% endif %} - pager = client.{{ method_name }}(request={}) - - assert pager._metadata == metadata +{% for method in service.methods.values() if 'grpc' in opts.transport %}{# method_name #} +{% if method.extended_lro %} +{{ test_macros.grpc_required_tests(method, service, full_extended_lro=True) }} - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) - for i in results) {% endif %} -def test_{{ method_name }}_pages(transport_name: str = "grpc"): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - # Set the response to a series of pages. - {% if method.paged_result_field.map %} - call.side_effect = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={}, - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - ), - RuntimeError, - ) - {% else %} - call.side_effect = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[], - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - ), - RuntimeError, - ) - {% endif %} - {# method.paged_result_field.map #} - pages = list(client.{{ method_name }}(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_{{ method_name }}_async_pager(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - {% if method.paged_result_field.map %} - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={}, - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - ), - RuntimeError, - {% else %} - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[], - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - ), - RuntimeError, - {% endif %} - ) - async_pager = await client.{{ method_name }}(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - {% if method.paged_result_field.map %} - - assert all( - isinstance(i, tuple) - for i in responses) - for result in responses: - assert isinstance(result, tuple) - assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) - - assert async_pager.get('a') is None - assert isinstance(async_pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) - {% else %} - assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) - for i in responses) - {% endif %} - - -@pytest.mark.asyncio -async def test_{{ method_name }}_async_pages(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - {% if method.paged_result_field.map %} - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={}, - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - ), - RuntimeError, - {% else %} - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[], - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - ), - RuntimeError, - {% endif %} - ) - pages = [] - async for page_ in (await client.{{ method_name }}(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token -{% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} -def test_{{ method_name }}_raw_page_lro(): - response = {{ method.lro.response_type.ident }}() - assert response.raw_page is response -{% endif %}{# method.paged_result_field #}{% endwith %}{# method_name #} - +{{ test_macros.grpc_required_tests(method, service) }} {% endfor %} {# method in methods for grpc #} -{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %}{% if method.http_options %} -{# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} -{% if not method.client_streaming %} -@pytest.mark.parametrize("request_type", [ - {{ method.input.ident }}, - dict, -]) -def test_{{ method_name }}_rest(request_type): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {{ method.http_options[0].sample_request(method) }} - {% for field in method.body_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} - {% endif %} - {% endfor %} - request = request_type(request_init) - {% if method.client_streaming %} - requests = [request] - {% endif %} - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - {% if method.void %} - return_value = None - {% elif method.lro %} - return_value = operations_pb2.Operation(name='operations/spam') - {% else %} - return_value = {{ method.output.ident }}( - {% for field in method.output.fields.values() | rejectattr('message')%} - {% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.output.oneof_fields().values() %} - {% with field = oneof_fields[0] %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} - {% endfor %} - ) - {% endif %} - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - {% if method.void %} - json_return_value = '' - {% elif method.lro %} - json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) - {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) - {% endif %} - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - {% if method.client_streaming %} - response = client.{{ method_name }}(iter(requests)) - {% elif method.server_streaming %} - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.{{ method_name }}(request) - {% else %} - response = client.{{ method_name }}(request) - {% endif %} - - {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} - {# Cheeser assertion to force code coverage for bad paginated methods #} - assert response.raw_page is response - - {% endif %} - - {% if method.server_streaming %} - assert isinstance(response, Iterable) - response = next(response) - {% endif %} - - # Establish that the response is the type that we expect. - {% if method.void %} - assert response is None - {% elif method.lro %} - assert response.operation.name == "operations/spam" - {% else %} - assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method.output.fields.values() | rejectattr('message') %} - {% if not field.oneof or field.proto3_optional %} - {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} - assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} - assert response.{{ field.name }} is {{ field.mock_value }} - {% else %} - assert response.{{ field.name }} == {{ field.mock_value }} - {% endif %} - {% endif %}{# end oneof/optional #} - {% endfor %} - {% endif %} - - - {% if method.input.required_fields %} -def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ident }}): - transport_class = transports.{{ service.rest_transport_name }} - - request_init = {} - {% for req_field in method.input.required_fields if req_field.is_primitive %} - {% if req_field.field_pb.type == 9 %} - request_init["{{ req_field.name }}"] = "{{ req_field.field_pb.default_value }}" - {% else %} - request_init["{{ req_field.name }}"] = {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }} - {% endif %}{# default is str #} - {% endfor %} - request = request_type(request_init) - jsonified_request = json.loads(request_type.to_json( - request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} - {% set field_name = req_field.name | camel_case %} - assert "{{ field_name }}" not in jsonified_request - {% endfor %} - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} - {% set field_name = req_field.name | camel_case %} - assert "{{ field_name }}" in jsonified_request - assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] - {% endfor %} - - {% for req_field in method.input.required_fields if req_field.is_primitive %} - {% set field_name = req_field.name | camel_case %} - {% set mock_value = req_field.primitive_mock_as_str() %} - jsonified_request["{{ field_name }}"] = {{ mock_value }} - {% endfor %} - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) - {% if method.query_params %} - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", {% endfor %})) - {% endif %} - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - {% for req_field in method.input.required_fields if req_field.is_primitive %} - {% set field_name = req_field.name | camel_case %} - {% set mock_value = req_field.primitive_mock_as_str() %} - assert "{{ field_name }}" in jsonified_request - assert jsonified_request["{{ field_name }}"] == {{ mock_value }} - {% endfor %} - - - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(request_init) - - # Designate an appropriate value for the returned response. - {% if method.void %} - return_value = None - {% elif method.lro %} - return_value = operations_pb2.Operation(name='operations/spam') - {% else %} - return_value = {{ method.output.ident }}() - {% endif %} - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "{{ method.http_options[0].method }}", - 'query_params': request_init, - } - {% if method.http_options[0].body %} - transcode_result['body'] = {} - {% endif %} - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - {% if method.void %} - json_return_value = '' - {% elif method.lro %} - json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) - {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) - {% endif %} - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - {% if method.client_streaming %} - response = client.{{ method_name }}(iter(requests)) - {% elif method.server_streaming %} - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.{{ method_name }}(request) - {% else %} - response = client.{{ method_name }}(request) - {% endif %} - - expected_params = [ - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} - ( - "{{ req_field.name | camel_case }}", - {% if req_field.field_pb.type == 9 %} - "{{ req_field.field_pb.default_value }}", - {% else %} - {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}, - {% endif %}{# default is str #} - ), - {% endfor %} - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_{{ method_name }}_rest_unset_required_fields(): - transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.{{ method.transport_safe_name|snake_case }}._get_unset_required_fields({}) - assert set(unset_fields) == (set(({% for param in method.query_params|sort %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{ param.name|camel_case }}", {% endfor %}))) - - {% endif %}{# required_fields #} - - -{% if not method.client_streaming %} -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_{{ method_name }}_rest_interceptors(null_interceptor): - transport = transports.{{ service.name }}RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.{{ service.name}}RestInterceptor(), - ) - client = {{ service.client_name }}(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - {% if method.lro %} - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - {% endif %} - {% if not method.void %} - mock.patch.object(transports.{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ - {% endif %} - mock.patch.object(transports.{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: - pre.assert_not_called() - {% if not method.void %} - post.assert_not_called() - {% endif %} - - transcode.return_value = {"method": "post", "uri": "my_uri", "body": None, "query_params": {},} - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - {% if not method.void %} - req.return_value._content = {% if method.output.ident.package == method.ident.package %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} - - {% if method.server_streaming %} - req.return_value._content = "[{}]".format(req.return_value._content) - {% endif %} - - {% endif %} - - request = {{ method.input.ident }}() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - {% if not method.void %} - post.return_value = {{ method.output.ident }} - {% endif %} - - client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - {% if not method.void %} - post.assert_called_once() - {% endif %} - -{% endif %}{# streaming #} - - -def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {{ method.http_options[0].sample_request(method) }} - {% for field in method.body_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} - {% endif %} - {% endfor %} - request = request_type(request_init) - {% if method.client_streaming %} - requests = [request] - {% endif %} - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - {% if method.client_streaming %} - client.{{ method_name }}(iter(requests)) - {% else %} - client.{{ method_name }}(request) - {% endif %} - +{% for method in service.methods.values() if 'rest' in opts.transport %} +{% if method.extended_lro %} +{{ test_macros.rest_required_tests(method, service, full_extended_lro=True) }} -{% if method.flattened_fields and "rest" in opts.transport %} -def test_{{ method_name }}_rest_flattened(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - {% if method.void %} - return_value = None - {% elif method.lro %} - return_value = operations_pb2.Operation(name='operations/spam') - {% else %} - return_value = {{ method.output.ident }}() - {% endif %} - - # get arguments that satisfy an http rule for this method - sample_request = {{ method.http_options[0].sample_request(method) }} - - # get truthy value for each flattened field - mock_args = dict( - {% for field in method.flattened_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - {{ field.name }}={{ field.mock_value }}, - {% endif %} - {% endfor %} - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - {% if method.void %} - json_return_value = '' - {% elif method.lro %} - json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) - {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) - {% endif %} - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - {% if method.server_streaming %} - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - client.{{ method_name }}(**mock_args) - {% else %} - client.{{ method_name }}(**mock_args) - {% endif %} - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - {% with uri = method.http_options[0].uri %} - assert path_template.validate("%s{{ uri }}" % client.transport._host, args[1]) - {% endwith %} - {# TODO(kbandes) - reverse-transcode request args to check all request fields #} - - -def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.{{ method_name }}( - {{ method.input.ident }}(), - {% for field in method.flattened_fields.values() %} - {{ field.name }}={{ field.mock_value }}, - {% endfor %} - ) -{% endif %}{# flattened fields #} - - -{% if method.paged_result_field %} -def test_{{ method_name }}_rest_pager(transport: str = 'rest'): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - {% if method.paged_result_field.map%} - response = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={}, - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}={ - 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), - 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), - }, - ), - ) - {% else %} - response = ( - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='abc', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[], - next_page_token='def', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - ], - next_page_token='ghi', - ), - {{ method.output.ident }}( - {{ method.paged_result_field.name }}=[ - {{ method.paged_result_field.type.ident }}(), - {{ method.paged_result_field.type.ident }}(), - ], - ), - ) - {% endif %} - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple({{ method.output.ident }}.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - {% if method.server_streaming %} - response_val = "[{}]".format({{ method.output.ident }}.to_json(response_val)) - {% endif %} - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {{ method.http_options[0].sample_request(method) }} - {% for field in method.body_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - sample_request["{{ field.name }}"] = {{ field.mock_value }} - {% endif %} - {% endfor %} - - - pager = client.{{ method_name }}(request=sample_request) - - {% if method.paged_result_field.map %} - assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) - assert pager.get('h') is None - {% endif %} - - results = list(pager) - assert len(results) == 6 - {% if method.paged_result_field.map %} - assert all( - isinstance(i, tuple) - for i in results) - for result in results: - assert isinstance(result, tuple) - assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) - - assert pager.get('a') is None - assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) - {% else %} - assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) - for i in results) - {% endif %} - - pages = list(client.{{ method_name }}(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -{%- else %}{# paged_result_field #} - -def test_{{ method_name }}_rest_error(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - {%- if not method.http_options %} - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(RuntimeError) as runtime_error: - client.{{ method_name }}({}) - assert ('Cannot define a method without a valid `google.api.http` annotation.' - in str(runtime_error.value)) - - {%- endif %} -{% endif %}{# flattened_fields #} - -{% else %}{# this is an lro or streaming method #} -def test_{{ method_name }}_rest_unimplemented(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = {{ method.input.ident }}() - {% if method.client_streaming %} - requests = [request] - {% endif %} - with pytest.raises(NotImplementedError): - client.{{ method_name }}({% if method.client_streaming %}requests{% else %}request{% endif %}) - -{% endif %}{# not lro and not streaming #}{% else %}{# not method.http_options #} -def test_{{ method_name }}_rest_no_http_options(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = {{ method.input.ident }}() - {% if method.client_streaming %} - requests = [request] - {% endif %} - with pytest.raises(RuntimeError): - client.{{ method_name }}({% if method.client_streaming %}requests{% else %}request{% endif %}) - - -{% endif %}{# not method.http_options #} -{% endwith %}{# method_name #} +{% endif %} +{{ test_macros.rest_required_tests(method, service) }} {% endfor -%} {#- method in methods for rest #} @@ -2101,6 +708,20 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() +@pytest.mark.parametrize("transport_name", [ + {% if "grpc" in opts.transport %} + "grpc", + {% endif %} + {% if "rest" in opts.transport %} + "rest", + {% endif %} +]) +def test_transport_kind(transport_name): + transport = {{ service.client_name }}.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + {% if 'grpc' in opts.transport %} def test_transport_grpc_default(): # A client should use the gRPC transport by default. @@ -2156,6 +777,14 @@ def test_{{ service.name|snake_case }}_base_transport(): transport.operations_client {% endif %} + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -3179,4 +1808,4 @@ def test_api_key_credentials(client_class, transport_class): always_use_jwt_access=True, ) -{% endblock %} \ No newline at end of file +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 new file mode 100644 index 000000000000..ddb3ea47026a --- /dev/null +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -0,0 +1,1433 @@ +{% macro grpc_required_tests(method, service, full_extended_lro=False) %} +{% with method_name = method.name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +def test_{{ method_name }}(request_type, transport: str = 'grpc'): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endfor %} + ) + {% endif %} + {% if method.client_streaming %} + response = client.{{ method.name|snake_case }}(iter(requests)) + {% else %} + response = client.{{ method_name }}(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + {% if method.client_streaming %} + assert next(args[0]) == request + {% else %} + assert args[0] == {{ method.input.ident }}() + {% endif %} + + # Establish that the response is the type that we expect. + {% if method.void %} + assert response is None + {% elif method.lro or (method.extended_lro and full_extended_lro) %} + assert isinstance(response, future.Future) + {% elif method.extended_lro and not full_extended_lro %} + assert isinstance(response, {{ method.extended_lro.operation_type.ident }}) + {% elif method.server_streaming %} + for message in response: + assert isinstance(message, {{ method.output.ident }}) + {% else %} + {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {# Cheeser assertion to force code coverage for bad paginated methods #} + assert response.raw_page is response + {% endif %} + assert isinstance(response, {{ method.client_output.ident }}) + {% for field in method_output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else %} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif %} + {% endif %}{# end oneof/optional #} + {% endfor %} + {% endif %} + + +{% if not method.client_streaming %} +def test_{{ method_name }}_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + client.{{ method_name }}() + call.assert_called() + _, args, _ = call.mock_calls[0] + {% if method.client_streaming %} + assert next(args[0]) == request + {% else %} + assert args[0] == {{ method.input.ident }}() + {% endif %} +{% endif %} + +{% if not full_extended_lro %} +@pytest.mark.asyncio +async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + {% elif method.lro %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + {% elif not method.client_streaming and method.server_streaming %} + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% elif method.client_streaming and method.server_streaming %} + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% else %} + call.return_value ={{ '' }} + {%- if not method.client_streaming and not method.server_streaming -%} + grpc_helpers_async.FakeUnaryUnaryCall + {%- else -%} + grpc_helpers_async.FakeStreamUnaryCall + {%- endif -%}({{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message') %}{% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %} + {% endfor %} + )) + {% endif %} + {% if method.client_streaming and method.server_streaming %} + response = await client.{{ method.name|snake_case }}(iter(requests)) + {% elif method.client_streaming and not method.server_streaming %} + response = await (await client.{{ method.name|snake_case }}(iter(requests))) + {% else %} + response = await client.{{ method_name }}(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + {% if method.client_streaming %} + assert next(args[0]) == request + {% else %} + assert args[0] == {{ method.input.ident }}() + {% endif %} + + # Establish that the response is the type that we expect. + {% if method.void %} + assert response is None + {% elif method.lro %} + assert isinstance(response, future.Future) + {% elif method.extended_lro and not full_extended_lro %} + assert isinstance(response, {{ method.extended_lro.operation_type.ident }}) + {% elif method.server_streaming %} + message = await response.read() + assert isinstance(message, {{ method.output.ident }}) + {% else %} + assert isinstance(response, {{ method.client_output_async.ident }}) + {% for field in method_output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else %} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif %} + {% endif %}{# oneof/optional #} + {% endfor %} + {% endif %} + + +@pytest.mark.asyncio +async def test_{{ method_name }}_async_from_dict(): + await test_{{ method_name }}_async(request_type=dict) +{% endif %}{# full_extended_lro #} + +{% if method.explicit_routing %} +def test_{{ method.name|snake_case }}_routing_parameters(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + {% for routing_param in method.routing_rule.routing_parameters %} + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = {{ method.input.ident }}({{ routing_param.sample_request }}) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}() + {% endif %} + client.{{ method.name|snake_case }}(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + _, _, kw = call.mock_calls[0] + # This test doesn't assert anything useful. + assert kw['metadata'] + {% endfor %} +{% endif %} + + +{% if method.field_headers and not method.client_streaming and not method.explicit_routing %} +def test_{{ method_name }}_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = {{ method.input.ident }}() + + {% for field_header in method.field_headers %} + request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' + {% endfor %} + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}() + {% endif %} + client.{{ method_name }}(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + '{% for field_header in method.field_headers -%} + {{ field_header.raw }}={{ field_header.raw }}/value + {%- if not loop.last %}&{% endif %} + {%- endfor -%}', + ) in kw['metadata'] + + +{% if not full_extended_lro %} +@pytest.mark.asyncio +async def test_{{ method_name }}_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = {{ method.input.ident }}() + + {% for field_header in method.field_headers %} + request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' + {% endfor %} + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + {% if method.void %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + {% elif method.lro %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + {% elif method.server_streaming %} + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall({{ method.output.ident }}()) + {% endif %} + await client.{{ method_name }}(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + '{% for field_header in method.field_headers -%} + {{ field_header.raw }}={{ field_header.raw }}/value + {%- if not loop.last %}&{% endif %} + {%- endfor -%}', + ) in kw['metadata'] +{% endif %} +{% endif %}{# full_extended_lro #} + +{% if method.ident.package != method.input.ident.package %} +def test_{{ method_name }}_from_dict_foreign(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}() + {% endif %} + response = client.{{ method_name }}(request={ + {% for field in method.input.fields.values() %} + '{{ field.name }}': {{ field.mock_value }}, + {% endfor %} + } + ) + call.assert_called() + +{% endif %} + +{% if method.flattened_fields %} +def test_{{ method_name }}_flattened(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}() + {% endif %} + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.{{ method_name }}( + {% for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} + assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif field.ident|string() == 'duration_pb2.Duration' %} + assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% else %} + arg = args[0].{{ key }} + mock_val = {{ field.mock_value }} + {% if field.ident|string() == "struct_pb2.Value" %} + from proto.marshal import Marshal + from proto.marshal.rules.struct import ValueRule + rule = ValueRule(marshal=Marshal(name="Test")) + mock_val = rule.to_python(mock_val) + {% endif %}{# struct_pb2.Value #} + assert arg == mock_val + {% endif %} + {% endif %}{% endfor %} + {% for oneofs in method.flattened_oneof_fields().values() %} + {% with field = oneofs[-1] %} + assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} + {% endwith %} + {% endfor %} + + + +def test_{{ method_name }}_flattened_error(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method_name }}( + {{ method.input.ident }}(), + {% for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) + +{% if not full_extended_lro %} +@pytest.mark.asyncio +async def test_{{ method_name }}_flattened_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}() + {% endif %} + + + {% if method.void %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + {% elif method.lro %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + {% elif not method.client_streaming and method.server_streaming %} + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + {% elif method.client_streaming and method.server_streaming %} + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + {% else %} + call.return_value = {{ '' }} + {%- if not method.client_streaming and not method.server_streaming -%} + grpc_helpers_async.FakeUnaryUnaryCall + {%- else -%} + grpc_helpers_async.FakeStreamUnaryCall + {%- endif -%}({{ method.output.ident }}()) + {% endif %} + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.{{ method_name }}( + {% for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + {% for key, field in method.flattened_fields.items() %}{% if not field.oneof or field.proto3_optional %} + {% if field.ident|string() == 'timestamp_pb2.Timestamp' %} + assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif field.ident|string() == 'duration_pb2.Duration' %} + assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% else %} + arg = args[0].{{ key }} + mock_val = {{ field.mock_value }} + {% if field.ident|string() == "struct_pb2.Value" %} + from proto.marshal import Marshal + from proto.marshal.rules.struct import ValueRule + rule = ValueRule(marshal=Marshal(name="Test")) + mock_val = rule.to_python(mock_val) + {% endif %}{# struct_pb2.Value #} + assert arg == mock_val + {% endif %} + {% endif %}{% endfor %} + {% for oneofs in method.flattened_oneof_fields().values() %} + {% with field = oneofs[-1] %} + assert args[0].{{ method.flattened_field_to_key[field.name] }} == {{ field.mock_value }} + {% endwith %} + {% endfor %} + +@pytest.mark.asyncio +async def test_{{ method_name }}_flattened_error_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.{{ method_name }}( + {{ method.input.ident }}(), + {% for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) +{% endif %} +{% endif %}{# full_extended_lro #} + + +{% if method.paged_result_field %} +{% if not method.paged_result_field.map %} +def test_{{ method_name }}_pager(transport_name: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + RuntimeError, + ) + + metadata = () + {% if method.field_headers %} + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {% for field_header in method.field_headers %} + {% if not method.client_streaming %} + ('{{ field_header.raw }}', ''), + {% endif %} + {% endfor %} + )), + ) + {% endif %} + pager = client.{{ method_name }}(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) + for i in results) +{% endif %} +def test_{{ method_name }}_pages(transport_name: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + # Set the response to a series of pages. + {% if method.paged_result_field.map %} + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + RuntimeError, + ) + {% else %} + call.side_effect = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + RuntimeError, + ) + {% endif %} + {# method.paged_result_field.map #} + pages = list(client.{{ method_name }}(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_{{ method_name }}_async_pager(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + {% if method.paged_result_field.map %} + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + RuntimeError, + {% else %} + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + RuntimeError, + {% endif %} + ) + async_pager = await client.{{ method_name }}(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + {% if method.paged_result_field.map %} + + assert all( + isinstance(i, tuple) + for i in responses) + for result in responses: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) + + assert async_pager.get('a') is None + assert isinstance(async_pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) + {% else %} + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) + for i in responses) + {% endif %} + + +@pytest.mark.asyncio +async def test_{{ method_name }}_async_pages(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + {% if method.paged_result_field.map %} + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + RuntimeError, + {% else %} + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + RuntimeError, + {% endif %} + ) + pages = [] + async for page_ in (await client.{{ method_name }}(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token +{% elif method.lro and "next_page_token" in method.lro.response_type.fields.keys() %} +def test_{{ method_name }}_raw_page_lro(): + response = {{ method.lro.response_type.ident }}() + assert response.raw_page is response +{% endif %}{# method.paged_result_field #}{% endwith %} +{% endmacro %} + +{% macro rest_required_tests(method, service, full_extended_lro=False) %} +{% with method_name = method.name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} +{# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} +{% if not method.client_streaming %} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +def test_{{ method_name }}_rest(request_type): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} + {% endif %} + {% endfor %} + request = request_type(request_init) + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.extended_lro %} + return_value = {{ method.extended_lro.operation_type.ident }}( + {% for field in method.extended_lro.operation_type.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.extended_lro.operation_type.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endfor %} + ) + {% else %} + return_value = {{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% with field = oneof_fields[0] %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endfor %} + ) + {% endif %} + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + {% if method.client_streaming %} + response = client.{{ method_name }}(iter(requests)) + {% elif method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.{{ method_name }}(request) + {% else %} + response = client.{{ method_name }}(request) + {% endif %} + + {% if "next_page_token" in method_output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {# Cheeser assertion to force code coverage for bad paginated methods #} + assert response.raw_page is response + + {% endif %} + + {% if method.server_streaming %} + assert isinstance(response, Iterable) + response = next(response) + {% endif %} + + # Establish that the response is the type that we expect. + {% if method.void %} + assert response is None + {% elif method.lro %} + assert response.operation.name == "operations/spam" + {% elif method.extended_lro and not full_extended_lro %} + assert isinstance(response, {{ method.extended_lro.operation_type.ident }}) + {% else %} + assert isinstance(response, {{ method.client_output.ident }}) + {% for field in method_output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else %} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif %} + {% endif %}{# end oneof/optional #} + {% endfor %} + {% endif %} + + + {% if method.input.required_fields %} +def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ident }}): + transport_class = transports.{{ service.rest_transport_name }} + + request_init = {} + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% if req_field.field_pb.type == 9 %} + request_init["{{ req_field.name }}"] = "{{ req_field.field_pb.default_value }}" + {% else %} + request_init["{{ req_field.name }}"] = {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }} + {% endif %}{# default is str #} + {% endfor %} + request = request_type(request_init) + jsonified_request = json.loads(request_type.to_json( + request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% set field_name = req_field.name | camel_case %} + assert "{{ field_name }}" not in jsonified_request + {% endfor %} + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% set field_name = req_field.name | camel_case %} + assert "{{ field_name }}" in jsonified_request + assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] + {% endfor %} + + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + {% set mock_value = req_field.primitive_mock_as_str() %} + jsonified_request["{{ field_name }}"] = {{ mock_value }} + {% endfor %} + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + {% if method.query_params %} + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", {% endfor %})) + {% endif %} + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + {% for req_field in method.input.required_fields if req_field.is_primitive %} + {% set field_name = req_field.name | camel_case %} + {% set mock_value = req_field.primitive_mock_as_str() %} + assert "{{ field_name }}" in jsonified_request + assert jsonified_request["{{ field_name }}"] == {{ mock_value }} + {% endfor %} + + + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(request_init) + + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% else %} + return_value = {{ method.output.ident }}() + {% endif %} + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "{{ method.http_options[0].method }}", + 'query_params': request_init, + } + {% if method.http_options[0].body %} + transcode_result['body'] = {} + {% endif %} + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + {% if method.client_streaming %} + response = client.{{ method_name }}(iter(requests)) + {% elif method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + response = client.{{ method_name }}(request) + {% else %} + response = client.{{ method_name }}(request) + {% endif %} + + expected_params = [ + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + ( + "{{ req_field.name | camel_case }}", + {% if req_field.field_pb.type == 9 %} + "{{ req_field.field_pb.default_value }}", + {% else %} + {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}, + {% endif %}{# default is str #} + ), + {% endfor %} + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_{{ method_name }}_rest_unset_required_fields(): + transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.{{ method.transport_safe_name|snake_case }}._get_unset_required_fields({}) + assert set(unset_fields) == (set(({% for param in method.query_params|sort %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{ param.name|camel_case }}", {% endfor %}))) + + {% endif %}{# required_fields #} + + +{% if not method.client_streaming %} +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_{{ method_name }}_rest_interceptors(null_interceptor): + transport = transports.{{ service.name }}RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.{{ service.name}}RestInterceptor(), + ) + client = {{ service.client_name }}(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + {% if method.lro %} + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + {% endif %} + {% if not method.void %} + mock.patch.object(transports.{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ + {% endif %} + mock.patch.object(transports.{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: + pre.assert_not_called() + {% if not method.void %} + post.assert_not_called() + {% endif %} + + transcode.return_value = {"method": "post", "uri": "my_uri", "body": None, "query_params": {},} + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + {% if not method.void %} + req.return_value._content = {% if method.output.ident.package == method.ident.package %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + + {% if method.server_streaming %} + req.return_value._content = "[{}]".format(req.return_value._content) + {% endif %} + + {% endif %} + + request = {{ method.input.ident }}() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + {% if not method.void %} + post.return_value = {{ method.output.ident }} + {% endif %} + + client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + {% if not method.void %} + post.assert_called_once() + {% endif %} + +{% endif %}{# streaming #} + + +def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} + {% endif %} + {% endfor %} + request = request_type(request_init) + {% if method.client_streaming %} + requests = [request] + {% endif %} + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + {% if method.client_streaming %} + client.{{ method_name }}(iter(requests)) + {% else %} + client.{{ method_name }}(request) + {% endif %} + + +{% if method.flattened_fields %} +def test_{{ method_name }}_rest_flattened(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% else %} + return_value = {{ method.output.ident }}() + {% endif %} + + # get arguments that satisfy an http rule for this method + sample_request = {{ method.http_options[0].sample_request(method) }} + + # get truthy value for each flattened field + mock_args = dict( + {% for field in method.flattened_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + {{ field.name }}={{ field.mock_value }}, + {% endif %} + {% endfor %} + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% elif method.server_streaming %} + json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) + {% else %} + json_return_value = {{ method.output.ident }}.to_json(return_value) + {% endif %} + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + {% if method.server_streaming %} + with mock.patch.object(response_value, 'iter_content') as iter_content: + iter_content.return_value = iter(json_return_value) + client.{{ method_name }}(**mock_args) + {% else %} + client.{{ method_name }}(**mock_args) + {% endif %} + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + {% with uri = method.http_options[0].uri %} + assert path_template.validate("%s{{ uri }}" % client.transport._host, args[1]) + {% endwith %} + {# TODO(kbandes) - reverse-transcode request args to check all request fields #} + + +def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.{{ method_name }}( + {{ method.input.ident }}(), + {% for field in method.flattened_fields.values() %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) +{% endif %}{# flattened fields #} + + +{% if method.paged_result_field %} +def test_{{ method_name }}_rest_pager(transport: str = 'rest'): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + {% if method.paged_result_field.map%} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'a':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'b':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'c':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={}, + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'g':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}={ + 'h':{{ method.paged_result_field.type.fields.get('value').ident }}(), + 'i':{{ method.paged_result_field.type.fields.get('value').ident }}(), + }, + ), + ) + {% else %} + response = ( + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='abc', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[], + next_page_token='def', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + ], + next_page_token='ghi', + ), + {{ method.output.ident }}( + {{ method.paged_result_field.name }}=[ + {{ method.paged_result_field.type.ident }}(), + {{ method.paged_result_field.type.ident }}(), + ], + ), + ) + {% endif %} + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple({{ method.output.ident }}.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + {% if method.server_streaming %} + response_val = "[{}]".format({{ method.output.ident }}.to_json(response_val)) + {% endif %} + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + sample_request["{{ field.name }}"] = {{ field.mock_value }} + {% endif %} + {% endfor %} + + + pager = client.{{ method_name }}(request=sample_request) + + {% if method.paged_result_field.map %} + assert isinstance(pager.get('a'), {{ method.paged_result_field.type.fields.get('value').ident }}) + assert pager.get('h') is None + {% endif %} + + results = list(pager) + assert len(results) == 6 + {% if method.paged_result_field.map %} + assert all( + isinstance(i, tuple) + for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == (str, {{ method.paged_result_field.type.fields.get('value').ident }}) + + assert pager.get('a') is None + assert isinstance(pager.get('h'), {{ method.paged_result_field.type.fields.get('value').ident }}) + {% else %} + assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) + for i in results) + {% endif %} + + pages = list(client.{{ method_name }}(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +{%- else %}{# paged_result_field #} + +def test_{{ method_name }}_rest_error(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + {%- if not method.http_options %} + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(RuntimeError) as runtime_error: + client.{{ method_name }}({}) + assert ('Cannot define a method without a valid `google.api.http` annotation.' + in str(runtime_error.value)) + + {%- endif %} +{% endif %}{# flattened_fields #} + +{% else %}{# this is an lro or streaming method #} +def test_{{ method_name }}_rest_unimplemented(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} + with pytest.raises(NotImplementedError): + client.{{ method_name }}({% if method.client_streaming %}requests{% else %}request{% endif %}) + +{% endif %}{# not lro and not streaming #}{% else %}{# not method.http_options #} +def test_{{ method_name }}_rest_no_http_options(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = {{ method.input.ident }}() + {% if method.client_streaming %} + requests = [request] + {% endif %} + with pytest.raises(RuntimeError): + client.{{ method_name }}({% if method.client_streaming %}requests{% else %}request{% endif %}) + + +{% endif %}{# not method.http_options #} +{% endwith %}{# method_name #} +{% endmacro %} diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index 6da5f76b8da9..ac186d293e78 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -355,7 +355,8 @@ def make_message_pb2( def make_field_pb2(name: str, number: int, type: int = 11, # 11 == message type_name: str = None, - oneof_index: int = None + oneof_index: int = None, + **kwargs, ) -> desc.FieldDescriptorProto: return desc.FieldDescriptorProto( name=name, @@ -363,6 +364,7 @@ def make_field_pb2(name: str, number: int, type=type, type_name=type_name, oneof_index=oneof_index, + **kwargs, ) def make_oneof_pb2(name: str) -> desc.OneofDescriptorProto: diff --git a/packages/gapic-generator/tests/fragments/test_diregapic_forwardcompat_lro.proto b/packages/gapic-generator/tests/fragments/test_extended_operation_forwardcompat_lro.proto similarity index 88% rename from packages/gapic-generator/tests/fragments/test_diregapic_forwardcompat_lro.proto rename to packages/gapic-generator/tests/fragments/test_extended_operation_forwardcompat_lro.proto index f7206a7c6b85..4d0da511068e 100644 --- a/packages/gapic-generator/tests/fragments/test_diregapic_forwardcompat_lro.proto +++ b/packages/gapic-generator/tests/fragments/test_extended_operation_forwardcompat_lro.proto @@ -28,7 +28,10 @@ message Operation { RUNNING = 121282975; } - optional string name = 1 [(google.cloud.operation_field) = NAME]; + optional string name = 1 [ + (google.cloud.operation_field) = NAME, + (google.cloud.operation_response_field) = "name" + ]; optional string http_error_message = 202521945 [(google.cloud.operation_field) = ERROR_MESSAGE]; optional int32 http_error_status_code = 312345196 [(google.cloud.operation_field) = ERROR_CODE]; optional Status status = 181260274 [(google.cloud.operation_field) = STATUS]; @@ -50,8 +53,12 @@ message DeleteOperationResponse { } message StartExpensiveComputationRequest { - string computation = 1; - string fragment = 2; + string computation = 1 [ + (google.cloud.operation_request_field) = "computation" + ]; + string fragment = 2 [ + (google.cloud.operation_request_field) = "fragment" + ]; } message StartCheapComputationRequest { @@ -87,7 +94,7 @@ service DoStuff { option (google.api.default_host) = "my.example.com"; rpc StartExpensiveComputation(StartExpensiveComputationRequest) returns (Operation) { - option (google.cloud.operation_service) = "GlobalOperations"; + option (google.cloud.operation_service) = "DefactoOperationsClient"; option (google.api.http) = { post: "/expensive/v1/computations/{computation}/fragments/{fragment}" }; diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 27b373525319..eb762260afb3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -409,7 +409,7 @@ def export_assets(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_export_assets(): # Create a client @@ -507,7 +507,7 @@ def list_assets(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_list_assets(): # Create a client @@ -621,7 +621,7 @@ def batch_get_assets_history(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_batch_get_assets_history(): # Create a client @@ -697,7 +697,7 @@ def create_feed(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_create_feed(): # Create a client @@ -807,7 +807,7 @@ def get_feed(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_get_feed(): # Create a client @@ -909,7 +909,7 @@ def list_feeds(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_list_feeds(): # Create a client @@ -1004,7 +1004,7 @@ def update_feed(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_update_feed(): # Create a client @@ -1108,7 +1108,7 @@ def delete_feed(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_delete_feed(): # Create a client @@ -1197,7 +1197,7 @@ def search_all_resources(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_search_all_resources(): # Create a client @@ -1397,7 +1397,7 @@ def search_all_iam_policies(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_search_all_iam_policies(): # Create a client @@ -1573,7 +1573,7 @@ def analyze_iam_policy(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_analyze_iam_policy(): # Create a client @@ -1663,7 +1663,7 @@ def analyze_iam_policy_longrunning(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_analyze_iam_policy_longrunning(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index a873db9094de..d0596429dfa5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -83,6 +83,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: host += ':443' @@ -349,6 +350,10 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ]]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ( 'AssetServiceTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 658b5faf4ead..64cdc5350be8 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -598,6 +598,11 @@ def analyze_iam_policy_longrunning(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + + __all__ = ( 'AssetServiceGrpcTransport', ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 7700dea02f55..3133564d4db9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.19.7', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index b211ce806d5f..840d32681ce9 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -543,7 +543,6 @@ def test_export_assets_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ExportAssetsRequest() - @pytest.mark.asyncio async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): client = AssetServiceAsyncClient( @@ -693,7 +692,6 @@ def test_list_assets_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListAssetsRequest() - @pytest.mark.asyncio async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): client = AssetServiceAsyncClient( @@ -832,7 +830,6 @@ def test_list_assets_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_assets_flattened_async(): client = AssetServiceAsyncClient( @@ -861,7 +858,6 @@ async def test_list_assets_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_assets_flattened_error_async(): client = AssetServiceAsyncClient( @@ -1114,7 +1110,6 @@ def test_batch_get_assets_history_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetAssetsHistoryRequest() - @pytest.mark.asyncio async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceAsyncClient( @@ -1269,7 +1264,6 @@ def test_create_feed_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateFeedRequest() - @pytest.mark.asyncio async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): client = AssetServiceAsyncClient( @@ -1414,7 +1408,6 @@ def test_create_feed_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_create_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -1443,7 +1436,6 @@ async def test_create_feed_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_create_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -1516,7 +1508,6 @@ def test_get_feed_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetFeedRequest() - @pytest.mark.asyncio async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): client = AssetServiceAsyncClient( @@ -1661,7 +1652,6 @@ def test_get_feed_flattened_error(): name='name_value', ) - @pytest.mark.asyncio async def test_get_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -1690,7 +1680,6 @@ async def test_get_feed_flattened_async(): mock_val = 'name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_get_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -1755,7 +1744,6 @@ def test_list_feeds_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListFeedsRequest() - @pytest.mark.asyncio async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): client = AssetServiceAsyncClient( @@ -1892,7 +1880,6 @@ def test_list_feeds_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_feeds_flattened_async(): client = AssetServiceAsyncClient( @@ -1921,7 +1908,6 @@ async def test_list_feeds_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_feeds_flattened_error_async(): client = AssetServiceAsyncClient( @@ -1994,7 +1980,6 @@ def test_update_feed_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.UpdateFeedRequest() - @pytest.mark.asyncio async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): client = AssetServiceAsyncClient( @@ -2139,7 +2124,6 @@ def test_update_feed_flattened_error(): feed=asset_service.Feed(name='name_value'), ) - @pytest.mark.asyncio async def test_update_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -2168,7 +2152,6 @@ async def test_update_feed_flattened_async(): mock_val = asset_service.Feed(name='name_value') assert arg == mock_val - @pytest.mark.asyncio async def test_update_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2232,7 +2215,6 @@ def test_delete_feed_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteFeedRequest() - @pytest.mark.asyncio async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): client = AssetServiceAsyncClient( @@ -2368,7 +2350,6 @@ def test_delete_feed_flattened_error(): name='name_value', ) - @pytest.mark.asyncio async def test_delete_feed_flattened_async(): client = AssetServiceAsyncClient( @@ -2397,7 +2378,6 @@ async def test_delete_feed_flattened_async(): mock_val = 'name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_delete_feed_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2464,7 +2444,6 @@ def test_search_all_resources_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllResourcesRequest() - @pytest.mark.asyncio async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceAsyncClient( @@ -2613,7 +2592,6 @@ def test_search_all_resources_flattened_error(): asset_types=['asset_types_value'], ) - @pytest.mark.asyncio async def test_search_all_resources_flattened_async(): client = AssetServiceAsyncClient( @@ -2650,7 +2628,6 @@ async def test_search_all_resources_flattened_async(): mock_val = ['asset_types_value'] assert arg == mock_val - @pytest.mark.asyncio async def test_search_all_resources_flattened_error_async(): client = AssetServiceAsyncClient( @@ -2907,7 +2884,6 @@ def test_search_all_iam_policies_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllIamPoliciesRequest() - @pytest.mark.asyncio async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceAsyncClient( @@ -3051,7 +3027,6 @@ def test_search_all_iam_policies_flattened_error(): query='query_value', ) - @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_async(): client = AssetServiceAsyncClient( @@ -3084,7 +3059,6 @@ async def test_search_all_iam_policies_flattened_async(): mock_val = 'query_value' assert arg == mock_val - @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_error_async(): client = AssetServiceAsyncClient( @@ -3340,7 +3314,6 @@ def test_analyze_iam_policy_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyRequest() - @pytest.mark.asyncio async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceAsyncClient( @@ -3488,7 +3461,6 @@ def test_analyze_iam_policy_longrunning_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() - @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceAsyncClient( @@ -3673,6 +3645,15 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = AssetServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AssetServiceClient( @@ -3728,6 +3709,14 @@ def test_asset_service_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_asset_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 5747e7e30ef4..a8a378a6a196 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -398,7 +398,7 @@ def generate_access_token(self, .. code-block:: python - from google.iam import credentials_v1 + from google.iam import credentials_v1 def sample_generate_access_token(): # Create a client @@ -545,7 +545,7 @@ def generate_id_token(self, .. code-block:: python - from google.iam import credentials_v1 + from google.iam import credentials_v1 def sample_generate_id_token(): # Create a client @@ -685,7 +685,7 @@ def sign_blob(self, .. code-block:: python - from google.iam import credentials_v1 + from google.iam import credentials_v1 def sample_sign_blob(): # Create a client @@ -812,7 +812,7 @@ def sign_jwt(self, .. code-block:: python - from google.iam import credentials_v1 + from google.iam import credentials_v1 def sample_sign_jwt(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 265e6c8ee853..b6ffa602ad85 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -80,6 +80,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: host += ':443' @@ -209,6 +210,10 @@ def sign_jwt(self) -> Callable[ ]]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ( 'IAMCredentialsTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index ef1a961d311d..ff47dca82592 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -343,6 +343,11 @@ def sign_jwt(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + + __all__ = ( 'IAMCredentialsGrpcTransport', ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 5f1b455f9010..4756b3dc163d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.19.7', ), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index d357ec3600be..587f6c9ef585 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -537,7 +537,6 @@ def test_generate_access_token_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateAccessTokenRequest() - @pytest.mark.asyncio async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): client = IAMCredentialsAsyncClient( @@ -689,7 +688,6 @@ def test_generate_access_token_flattened_error(): lifetime=duration_pb2.Duration(seconds=751), ) - @pytest.mark.asyncio async def test_generate_access_token_flattened_async(): client = IAMCredentialsAsyncClient( @@ -728,7 +726,6 @@ async def test_generate_access_token_flattened_async(): assert arg == mock_val assert DurationRule().to_proto(args[0].lifetime) == duration_pb2.Duration(seconds=751) - @pytest.mark.asyncio async def test_generate_access_token_flattened_error_async(): client = IAMCredentialsAsyncClient( @@ -798,7 +795,6 @@ def test_generate_id_token_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateIdTokenRequest() - @pytest.mark.asyncio async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): client = IAMCredentialsAsyncClient( @@ -952,7 +948,6 @@ def test_generate_id_token_flattened_error(): include_email=True, ) - @pytest.mark.asyncio async def test_generate_id_token_flattened_async(): client = IAMCredentialsAsyncClient( @@ -993,7 +988,6 @@ async def test_generate_id_token_flattened_async(): mock_val = True assert arg == mock_val - @pytest.mark.asyncio async def test_generate_id_token_flattened_error_async(): client = IAMCredentialsAsyncClient( @@ -1065,7 +1059,6 @@ def test_sign_blob_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == common.SignBlobRequest() - @pytest.mark.asyncio async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): client = IAMCredentialsAsyncClient( @@ -1216,7 +1209,6 @@ def test_sign_blob_flattened_error(): payload=b'payload_blob', ) - @pytest.mark.asyncio async def test_sign_blob_flattened_async(): client = IAMCredentialsAsyncClient( @@ -1253,7 +1245,6 @@ async def test_sign_blob_flattened_async(): mock_val = b'payload_blob' assert arg == mock_val - @pytest.mark.asyncio async def test_sign_blob_flattened_error_async(): client = IAMCredentialsAsyncClient( @@ -1324,7 +1315,6 @@ def test_sign_jwt_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == common.SignJwtRequest() - @pytest.mark.asyncio async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): client = IAMCredentialsAsyncClient( @@ -1475,7 +1465,6 @@ def test_sign_jwt_flattened_error(): payload='payload_value', ) - @pytest.mark.asyncio async def test_sign_jwt_flattened_async(): client = IAMCredentialsAsyncClient( @@ -1512,7 +1501,6 @@ async def test_sign_jwt_flattened_async(): mock_val = 'payload_value' assert arg == mock_val - @pytest.mark.asyncio async def test_sign_jwt_flattened_error_async(): client = IAMCredentialsAsyncClient( @@ -1616,6 +1604,15 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = IAMCredentialsClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = IAMCredentialsClient( @@ -1658,6 +1655,14 @@ def test_iam_credentials_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_iam_credentials_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index d32b932fa5c9..113ae1b900a5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -428,7 +428,7 @@ def list_buckets(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_buckets(): # Create a client @@ -543,7 +543,7 @@ def get_bucket(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_bucket(): # Create a client @@ -618,7 +618,7 @@ def create_bucket(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_bucket(): # Create a client @@ -702,7 +702,7 @@ def update_bucket(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_bucket(): # Create a client @@ -777,7 +777,7 @@ def delete_bucket(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_bucket(): # Create a client @@ -841,7 +841,7 @@ def undelete_bucket(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_undelete_bucket(): # Create a client @@ -904,7 +904,7 @@ def list_views(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_views(): # Create a client @@ -1011,7 +1011,7 @@ def get_view(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_view(): # Create a client @@ -1087,7 +1087,7 @@ def create_view(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_view(): # Create a client @@ -1164,7 +1164,7 @@ def update_view(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_view(): # Create a client @@ -1238,7 +1238,7 @@ def delete_view(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_view(): # Create a client @@ -1301,7 +1301,7 @@ def list_sinks(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_sinks(): # Create a client @@ -1413,7 +1413,7 @@ def get_sink(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_sink(): # Create a client @@ -1527,7 +1527,7 @@ def create_sink(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_sink(): # Create a client @@ -1658,7 +1658,7 @@ def update_sink(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_sink(): # Create a client @@ -1805,7 +1805,7 @@ def delete_sink(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_sink(): # Create a client @@ -1895,7 +1895,7 @@ def list_exclusions(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_exclusions(): # Create a client @@ -2007,7 +2007,7 @@ def get_exclusion(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_exclusion(): # Create a client @@ -2123,7 +2123,7 @@ def create_exclusion(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_exclusion(): # Create a client @@ -2254,7 +2254,7 @@ def update_exclusion(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_exclusion(): # Create a client @@ -2396,7 +2396,7 @@ def delete_exclusion(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_exclusion(): # Create a client @@ -2495,7 +2495,7 @@ def get_cmek_settings(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_cmek_settings(): # Create a client @@ -2598,7 +2598,7 @@ def update_cmek_settings(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_cmek_settings(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index b5c472fc6224..88ba88234db2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -84,6 +84,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: host += ':443' @@ -507,6 +508,10 @@ def update_cmek_settings(self) -> Callable[ ]]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ( 'ConfigServiceV2Transport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index dc81dff6b136..5d0779f047da 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -878,6 +878,11 @@ def update_cmek_settings(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + + __all__ = ( 'ConfigServiceV2GrpcTransport', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 92a2ff322f84..491ebddf909f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -389,7 +389,7 @@ def delete_log(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_log(): # Create a client @@ -492,7 +492,7 @@ def write_log_entries(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_write_log_entries(): # Create a client @@ -671,7 +671,7 @@ def list_log_entries(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_log_entries(): # Create a client @@ -816,7 +816,7 @@ def list_monitored_resource_descriptors(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_monitored_resource_descriptors(): # Create a client @@ -899,7 +899,7 @@ def list_logs(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_logs(): # Create a client @@ -1012,7 +1012,7 @@ def tail_log_entries(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_tail_log_entries(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index a8c76b1d12bd..54c0c48b7f3b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -85,6 +85,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: host += ':443' @@ -262,6 +263,10 @@ def tail_log_entries(self) -> Callable[ ]]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ( 'LoggingServiceV2Transport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index d0c62097b806..20f4c5133462 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -402,6 +402,11 @@ def tail_log_entries(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + + __all__ = ( 'LoggingServiceV2GrpcTransport', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 1cf7c076a851..ee1772ad120c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -385,7 +385,7 @@ def list_log_metrics(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_log_metrics(): # Create a client @@ -494,7 +494,7 @@ def get_log_metric(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_log_metric(): # Create a client @@ -600,7 +600,7 @@ def create_log_metric(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_log_metric(): # Create a client @@ -724,7 +724,7 @@ def update_log_metric(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_log_metric(): # Create a client @@ -846,7 +846,7 @@ def delete_log_metric(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_log_metric(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 6f2396f5cebb..487dd0bdc0f5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -85,6 +85,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: host += ':443' @@ -232,6 +233,10 @@ def delete_log_metric(self) -> Callable[ ]]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ( 'MetricsServiceV2Transport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index f4f901240e66..d12216949f95 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -357,6 +357,11 @@ def delete_log_metric(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + + __all__ = ( 'MetricsServiceV2GrpcTransport', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 49e692453f6d..5622fedf4398 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.19.7', ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 9a818be44901..a5562f81d382 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -541,7 +541,6 @@ def test_list_buckets_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListBucketsRequest() - @pytest.mark.asyncio async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): client = ConfigServiceV2AsyncClient( @@ -680,7 +679,6 @@ def test_list_buckets_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_buckets_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -709,7 +707,6 @@ async def test_list_buckets_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -972,7 +969,6 @@ def test_get_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetBucketRequest() - @pytest.mark.asyncio async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1139,7 +1135,6 @@ def test_create_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest() - @pytest.mark.asyncio async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1306,7 +1301,6 @@ def test_update_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() - @pytest.mark.asyncio async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1462,7 +1456,6 @@ def test_delete_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteBucketRequest() - @pytest.mark.asyncio async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1607,7 +1600,6 @@ def test_undelete_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UndeleteBucketRequest() - @pytest.mark.asyncio async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1755,7 +1747,6 @@ def test_list_views_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListViewsRequest() - @pytest.mark.asyncio async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): client = ConfigServiceV2AsyncClient( @@ -1894,7 +1885,6 @@ def test_list_views_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_views_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -1923,7 +1913,6 @@ async def test_list_views_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_views_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -2182,7 +2171,6 @@ def test_get_view_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetViewRequest() - @pytest.mark.asyncio async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): client = ConfigServiceV2AsyncClient( @@ -2341,7 +2329,6 @@ def test_create_view_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateViewRequest() - @pytest.mark.asyncio async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): client = ConfigServiceV2AsyncClient( @@ -2500,7 +2487,6 @@ def test_update_view_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateViewRequest() - @pytest.mark.asyncio async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): client = ConfigServiceV2AsyncClient( @@ -2652,7 +2638,6 @@ def test_delete_view_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteViewRequest() - @pytest.mark.asyncio async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): client = ConfigServiceV2AsyncClient( @@ -2800,7 +2785,6 @@ def test_list_sinks_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListSinksRequest() - @pytest.mark.asyncio async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): client = ConfigServiceV2AsyncClient( @@ -2939,7 +2923,6 @@ def test_list_sinks_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_sinks_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -2968,7 +2951,6 @@ async def test_list_sinks_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -3238,7 +3220,6 @@ def test_get_sink_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSinkRequest() - @pytest.mark.asyncio async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): client = ConfigServiceV2AsyncClient( @@ -3391,7 +3372,6 @@ def test_get_sink_flattened_error(): sink_name='sink_name_value', ) - @pytest.mark.asyncio async def test_get_sink_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -3420,7 +3400,6 @@ async def test_get_sink_flattened_async(): mock_val = 'sink_name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -3502,7 +3481,6 @@ def test_create_sink_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateSinkRequest() - @pytest.mark.asyncio async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): client = ConfigServiceV2AsyncClient( @@ -3660,7 +3638,6 @@ def test_create_sink_flattened_error(): sink=logging_config.LogSink(name='name_value'), ) - @pytest.mark.asyncio async def test_create_sink_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -3693,7 +3670,6 @@ async def test_create_sink_flattened_async(): mock_val = logging_config.LogSink(name='name_value') assert arg == mock_val - @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -3776,7 +3752,6 @@ def test_update_sink_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest() - @pytest.mark.asyncio async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): client = ConfigServiceV2AsyncClient( @@ -3939,7 +3914,6 @@ def test_update_sink_flattened_error(): update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) - @pytest.mark.asyncio async def test_update_sink_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -3976,7 +3950,6 @@ async def test_update_sink_flattened_async(): mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) assert arg == mock_val - @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -4042,7 +4015,6 @@ def test_delete_sink_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteSinkRequest() - @pytest.mark.asyncio async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): client = ConfigServiceV2AsyncClient( @@ -4178,7 +4150,6 @@ def test_delete_sink_flattened_error(): sink_name='sink_name_value', ) - @pytest.mark.asyncio async def test_delete_sink_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -4207,7 +4178,6 @@ async def test_delete_sink_flattened_async(): mock_val = 'sink_name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -4274,7 +4244,6 @@ def test_list_exclusions_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListExclusionsRequest() - @pytest.mark.asyncio async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): client = ConfigServiceV2AsyncClient( @@ -4413,7 +4382,6 @@ def test_list_exclusions_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -4442,7 +4410,6 @@ async def test_list_exclusions_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -4703,7 +4670,6 @@ def test_get_exclusion_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetExclusionRequest() - @pytest.mark.asyncio async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -4848,7 +4814,6 @@ def test_get_exclusion_flattened_error(): name='name_value', ) - @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -4877,7 +4842,6 @@ async def test_get_exclusion_flattened_async(): mock_val = 'name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -4950,7 +4914,6 @@ def test_create_exclusion_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest() - @pytest.mark.asyncio async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -5100,7 +5063,6 @@ def test_create_exclusion_flattened_error(): exclusion=logging_config.LogExclusion(name='name_value'), ) - @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -5133,7 +5095,6 @@ async def test_create_exclusion_flattened_async(): mock_val = logging_config.LogExclusion(name='name_value') assert arg == mock_val - @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -5207,7 +5168,6 @@ def test_update_exclusion_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateExclusionRequest() - @pytest.mark.asyncio async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -5362,7 +5322,6 @@ def test_update_exclusion_flattened_error(): update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) - @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -5399,7 +5358,6 @@ async def test_update_exclusion_flattened_async(): mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) assert arg == mock_val - @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -5465,7 +5423,6 @@ def test_delete_exclusion_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteExclusionRequest() - @pytest.mark.asyncio async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -5601,7 +5558,6 @@ def test_delete_exclusion_flattened_error(): name='name_value', ) - @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( @@ -5630,7 +5586,6 @@ async def test_delete_exclusion_flattened_async(): mock_val = 'name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( @@ -5701,7 +5656,6 @@ def test_get_cmek_settings_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetCmekSettingsRequest() - @pytest.mark.asyncio async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -5860,7 +5814,6 @@ def test_update_cmek_settings_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateCmekSettingsRequest() - @pytest.mark.asyncio async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -6050,6 +6003,15 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = ConfigServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ConfigServiceV2Client( @@ -6111,6 +6073,14 @@ def test_config_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index de5b11753723..483394f936dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -545,7 +545,6 @@ def test_delete_log_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.DeleteLogRequest() - @pytest.mark.asyncio async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): client = LoggingServiceV2AsyncClient( @@ -681,7 +680,6 @@ def test_delete_log_flattened_error(): log_name='log_name_value', ) - @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -710,7 +708,6 @@ async def test_delete_log_flattened_async(): mock_val = 'log_name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -775,7 +772,6 @@ def test_write_log_entries_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.WriteLogEntriesRequest() - @pytest.mark.asyncio async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): client = LoggingServiceV2AsyncClient( @@ -864,7 +860,6 @@ def test_write_log_entries_flattened_error(): entries=[log_entry.LogEntry(log_name='log_name_value')], ) - @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -905,7 +900,6 @@ async def test_write_log_entries_flattened_async(): mock_val = [log_entry.LogEntry(log_name='log_name_value')] assert arg == mock_val - @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -975,7 +969,6 @@ def test_list_log_entries_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogEntriesRequest() - @pytest.mark.asyncio async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): client = LoggingServiceV2AsyncClient( @@ -1061,7 +1054,6 @@ def test_list_log_entries_flattened_error(): order_by='order_by_value', ) - @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -1098,7 +1090,6 @@ async def test_list_log_entries_flattened_async(): mock_val = 'order_by_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -1350,7 +1341,6 @@ def test_list_monitored_resource_descriptors_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): client = LoggingServiceV2AsyncClient( @@ -1623,7 +1613,6 @@ def test_list_logs_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogsRequest() - @pytest.mark.asyncio async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): client = LoggingServiceV2AsyncClient( @@ -1764,7 +1753,6 @@ def test_list_logs_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( @@ -1793,7 +1781,6 @@ async def test_list_logs_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( @@ -2152,6 +2139,15 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = LoggingServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = LoggingServiceV2Client( @@ -2196,6 +2192,14 @@ def test_logging_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 07fb89958bf7..e04e14481038 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -546,7 +546,6 @@ def test_list_log_metrics_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.ListLogMetricsRequest() - @pytest.mark.asyncio async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): client = MetricsServiceV2AsyncClient( @@ -685,7 +684,6 @@ def test_list_log_metrics_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -714,7 +712,6 @@ async def test_list_log_metrics_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -977,7 +974,6 @@ def test_get_log_metric_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.GetLogMetricRequest() - @pytest.mark.asyncio async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1124,7 +1120,6 @@ def test_get_log_metric_flattened_error(): metric_name='metric_name_value', ) - @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -1153,7 +1148,6 @@ async def test_get_log_metric_flattened_async(): mock_val = 'metric_name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -1228,7 +1222,6 @@ def test_create_log_metric_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.CreateLogMetricRequest() - @pytest.mark.asyncio async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1380,7 +1373,6 @@ def test_create_log_metric_flattened_error(): metric=logging_metrics.LogMetric(name='name_value'), ) - @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -1413,7 +1405,6 @@ async def test_create_log_metric_flattened_async(): mock_val = logging_metrics.LogMetric(name='name_value') assert arg == mock_val - @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -1489,7 +1480,6 @@ def test_update_log_metric_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.UpdateLogMetricRequest() - @pytest.mark.asyncio async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1641,7 +1631,6 @@ def test_update_log_metric_flattened_error(): metric=logging_metrics.LogMetric(name='name_value'), ) - @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -1674,7 +1663,6 @@ async def test_update_log_metric_flattened_async(): mock_val = logging_metrics.LogMetric(name='name_value') assert arg == mock_val - @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -1739,7 +1727,6 @@ def test_delete_log_metric_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.DeleteLogMetricRequest() - @pytest.mark.asyncio async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1875,7 +1862,6 @@ def test_delete_log_metric_flattened_error(): metric_name='metric_name_value', ) - @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( @@ -1904,7 +1890,6 @@ async def test_delete_log_metric_flattened_async(): mock_val = 'metric_name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( @@ -2006,6 +1991,15 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = MetricsServiceV2Client.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MetricsServiceV2Client( @@ -2049,6 +2043,14 @@ def test_metrics_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 94d494d4594f..098878cb70f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -418,7 +418,7 @@ def list_instances(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_list_instances(): # Create a client @@ -527,7 +527,7 @@ def get_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_get_instance(): # Create a client @@ -636,7 +636,7 @@ def create_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_create_instance(): # Create a client @@ -784,7 +784,7 @@ def update_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_update_instance(): # Create a client @@ -917,7 +917,7 @@ def upgrade_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_upgrade_instance(): # Create a client @@ -1047,7 +1047,7 @@ def import_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_import_instance(): # Create a client @@ -1176,7 +1176,7 @@ def export_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_export_instance(): # Create a client @@ -1303,7 +1303,7 @@ def failover_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_failover_instance(): # Create a client @@ -1425,7 +1425,7 @@ def delete_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_delete_instance(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 3459483dfbdb..92633afcf21a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -82,6 +82,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ':' not in host: host += ':443' @@ -258,6 +259,10 @@ def delete_instance(self) -> Callable[ ]]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ( 'CloudRedisTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 054e20172bd1..fbdc767496e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -542,6 +542,11 @@ def delete_instance(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + + __all__ = ( 'CloudRedisGrpcTransport', ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 63c965649f9d..bcb797f25299 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', 'libcst >= 0.2.5', 'proto-plus >= 1.19.7', ), diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 8675180642b7..fa213d081ce9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -545,7 +545,6 @@ def test_list_instances_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ListInstancesRequest() - @pytest.mark.asyncio async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): client = CloudRedisAsyncClient( @@ -686,7 +685,6 @@ def test_list_instances_flattened_error(): parent='parent_value', ) - @pytest.mark.asyncio async def test_list_instances_flattened_async(): client = CloudRedisAsyncClient( @@ -715,7 +713,6 @@ async def test_list_instances_flattened_async(): mock_val = 'parent_value' assert arg == mock_val - @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): client = CloudRedisAsyncClient( @@ -1000,7 +997,6 @@ def test_get_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceRequest() - @pytest.mark.asyncio async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): client = CloudRedisAsyncClient( @@ -1169,7 +1165,6 @@ def test_get_instance_flattened_error(): name='name_value', ) - @pytest.mark.asyncio async def test_get_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -1198,7 +1193,6 @@ async def test_get_instance_flattened_async(): mock_val = 'name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -1262,7 +1256,6 @@ def test_create_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.CreateInstanceRequest() - @pytest.mark.asyncio async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisAsyncClient( @@ -1410,7 +1403,6 @@ def test_create_instance_flattened_error(): instance=cloud_redis.Instance(name='name_value'), ) - @pytest.mark.asyncio async def test_create_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -1449,7 +1441,6 @@ async def test_create_instance_flattened_async(): mock_val = cloud_redis.Instance(name='name_value') assert arg == mock_val - @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -1515,7 +1506,6 @@ def test_update_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpdateInstanceRequest() - @pytest.mark.asyncio async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): client = CloudRedisAsyncClient( @@ -1658,7 +1648,6 @@ def test_update_instance_flattened_error(): instance=cloud_redis.Instance(name='name_value'), ) - @pytest.mark.asyncio async def test_update_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -1693,7 +1682,6 @@ async def test_update_instance_flattened_async(): mock_val = cloud_redis.Instance(name='name_value') assert arg == mock_val - @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -1758,7 +1746,6 @@ def test_upgrade_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpgradeInstanceRequest() - @pytest.mark.asyncio async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): client = CloudRedisAsyncClient( @@ -1901,7 +1888,6 @@ def test_upgrade_instance_flattened_error(): redis_version='redis_version_value', ) - @pytest.mark.asyncio async def test_upgrade_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -1936,7 +1922,6 @@ async def test_upgrade_instance_flattened_async(): mock_val = 'redis_version_value' assert arg == mock_val - @pytest.mark.asyncio async def test_upgrade_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2001,7 +1986,6 @@ def test_import_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ImportInstanceRequest() - @pytest.mark.asyncio async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): client = CloudRedisAsyncClient( @@ -2144,7 +2128,6 @@ def test_import_instance_flattened_error(): input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), ) - @pytest.mark.asyncio async def test_import_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2179,7 +2162,6 @@ async def test_import_instance_flattened_async(): mock_val = cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')) assert arg == mock_val - @pytest.mark.asyncio async def test_import_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2244,7 +2226,6 @@ def test_export_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ExportInstanceRequest() - @pytest.mark.asyncio async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): client = CloudRedisAsyncClient( @@ -2387,7 +2368,6 @@ def test_export_instance_flattened_error(): output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), ) - @pytest.mark.asyncio async def test_export_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2422,7 +2402,6 @@ async def test_export_instance_flattened_async(): mock_val = cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')) assert arg == mock_val - @pytest.mark.asyncio async def test_export_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2487,7 +2466,6 @@ def test_failover_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.FailoverInstanceRequest() - @pytest.mark.asyncio async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): client = CloudRedisAsyncClient( @@ -2630,7 +2608,6 @@ def test_failover_instance_flattened_error(): data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, ) - @pytest.mark.asyncio async def test_failover_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2665,7 +2642,6 @@ async def test_failover_instance_flattened_async(): mock_val = cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS assert arg == mock_val - @pytest.mark.asyncio async def test_failover_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -2730,7 +2706,6 @@ def test_delete_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.DeleteInstanceRequest() - @pytest.mark.asyncio async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): client = CloudRedisAsyncClient( @@ -2868,7 +2843,6 @@ def test_delete_instance_flattened_error(): name='name_value', ) - @pytest.mark.asyncio async def test_delete_instance_flattened_async(): client = CloudRedisAsyncClient( @@ -2899,7 +2873,6 @@ async def test_delete_instance_flattened_async(): mock_val = 'name_value' assert arg == mock_val - @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): client = CloudRedisAsyncClient( @@ -3001,6 +2974,15 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = CloudRedisClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudRedisClient( @@ -3053,6 +3035,14 @@ def test_cloud_redis_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_cloud_redis_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index b6b644023992..ab7178670c27 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -1158,6 +1158,305 @@ def test_cross_file_lro(): assert method.lro.metadata_type.name == 'AsyncDoThingMetadata' +def test_extended_lro(): + initiate_options = descriptor_pb2.MethodOptions() + initiate_options.Extensions[ex_ops_pb2.operation_service] = "OpsService" + + polling_method_options = descriptor_pb2.MethodOptions() + polling_method_options.Extensions[ex_ops_pb2.operation_polling_method] = True + + T = descriptor_pb2.FieldDescriptorProto.Type + operation_fields = tuple( + make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ) + for f in operation_fields: + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + api_schema = api.Proto.build( + make_file_pb2( + "extended_lro.proto", + package="exlro", + messages=( + make_message_pb2(name="Operation", fields=operation_fields), + make_message_pb2(name="InitialRequest"), + make_message_pb2(name="GetOperationRequest"), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="OpsService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Get", + input_type="exlro.GetOperationRequest", + output_type="exlro.Operation", + options=polling_method_options, + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name="BasicService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Initiate", + input_type="exlro.InitialRequest", + output_type="exlro.Operation", + options=initiate_options, + ), + ), + ), + ), + ), + file_to_generate=True, + naming=make_naming(), + ) + + initiate = api_schema.services["exlro.BasicService"].methods["Initiate"] + assert initiate.extended_lro + assert initiate.extended_lro.request_type == api_schema.messages["exlro.GetOperationRequest"] + assert initiate.extended_lro.operation_type == api_schema.messages["exlro.Operation"] + + +def test_extended_lro_no_such_service(): + initiate_options = descriptor_pb2.MethodOptions() + initiate_options.Extensions[ex_ops_pb2.operation_service] = "Nonesuch" + + polling_method_options = descriptor_pb2.MethodOptions() + polling_method_options.Extensions[ex_ops_pb2.operation_polling_method] = True + + T = descriptor_pb2.FieldDescriptorProto.Type + operation_fields = tuple( + make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ) + for f in operation_fields: + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + with pytest.raises(ValueError): + api_schema = api.Proto.build( + make_file_pb2( + "extended_lro.proto", + package="exlro", + messages=( + make_message_pb2( + name="Operation", + fields=operation_fields, + ), + make_message_pb2( + name="InitialRequest" + ), + make_message_pb2( + name="GetOperationRequest" + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="OpsService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Get", + input_type="exlro.GetOperationRequest", + output_type="exlro.Operation", + options=polling_method_options, + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name="BasicService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Initiate", + input_type="exlro.InitialRequest", + output_type="exlro.Operation", + options=initiate_options, + ), + ), + ), + ), + ), + file_to_generate=True, + naming=make_naming(), + ) + + +def test_extended_lro_no_polling_method(): + initiate_options = descriptor_pb2.MethodOptions() + initiate_options.Extensions[ex_ops_pb2.operation_service] = "OpsService" + + T = descriptor_pb2.FieldDescriptorProto.Type + operation_fields = tuple( + make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ) + for f in operation_fields: + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + with pytest.raises(ValueError): + api_schema = api.Proto.build( + make_file_pb2( + "extended_lro.proto", + package="exlro", + messages=( + make_message_pb2( + name="Operation", + fields=operation_fields, + ), + make_message_pb2( + name="InitialRequest", + ), + make_message_pb2( + name="GetOperationRequest", + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="OpsService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Get", + input_type="exlro.GetOperationRequest", + output_type="exlro.Operation", + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name="BasicService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Initiate", + input_type="exlro.InitialRequest", + output_type="exlro.Operation", + options=initiate_options, + ), + ), + ), + ), + ), + file_to_generate=True, + naming=make_naming(), + ) + + +def test_extended_lro_different_output_types(): + initiate_options = descriptor_pb2.MethodOptions() + initiate_options.Extensions[ex_ops_pb2.operation_service] = "OpsService" + + polling_method_options = descriptor_pb2.MethodOptions() + polling_method_options.Extensions[ex_ops_pb2.operation_polling_method] = True + + T = descriptor_pb2.FieldDescriptorProto.Type + operation_fields = tuple( + make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ) + for f in operation_fields: + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + with pytest.raises(ValueError): + api_schema = api.Proto.build( + make_file_pb2( + "extended_lro.proto", + package="exlro", + messages=( + make_message_pb2( + name="Operation", + fields=operation_fields, + ), + make_message_pb2( + name="InitialRequest", + ), + make_message_pb2( + name="GetOperationRequest", + ), + make_message_pb2( + name="GetOperationResponse", + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="OpsService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Get", + input_type="exlro.GetOperationRequest", + output_type="exlro.GetOperationResponse", + options=polling_method_options, + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name="BasicService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Initiate", + input_type="exlro.InitialRequest", + output_type="exlro.Operation", + options=initiate_options, + ), + ), + ), + ), + ), + file_to_generate=True, + naming=make_naming(), + ) + + +def test_extended_lro_not_an_operation(): + initiate_options = descriptor_pb2.MethodOptions() + initiate_options.Extensions[ex_ops_pb2.operation_service] = "OpsService" + + polling_method_options = descriptor_pb2.MethodOptions() + polling_method_options.Extensions[ex_ops_pb2.operation_polling_method] = True + + with pytest.raises(ValueError): + api_schema = api.Proto.build( + make_file_pb2( + "extended_lro.proto", + package="exlro", + messages=( + make_message_pb2(name="Operation"), + make_message_pb2(name="InitialRequest"), + make_message_pb2(name="GetOperationRequest"), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="OpsService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Get", + input_type="exlro.GetOperationRequest", + output_type="exlro.Operation", + options=polling_method_options, + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name="BasicService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Initiate", + input_type="exlro.InitialRequest", + output_type="exlro.Operation", + options=initiate_options, + ), + ), + ), + ), + ), + file_to_generate=True, + naming=make_naming(), + ) + + def test_enums(): L = descriptor_pb2.SourceCodeInfo.Location enum_pb = descriptor_pb2.EnumDescriptorProto(name='Silly', value=( @@ -1723,8 +2022,8 @@ def generate_basic_extended_operations_setup(): initial_input_message, ], services=[ - regular_service, ops_service, + regular_service, ], ), ] @@ -1735,25 +2034,31 @@ def generate_basic_extended_operations_setup(): def test_extended_operations_lro_operation_service(): file_protos = generate_basic_extended_operations_setup() api_schema = api.API.build(file_protos) - initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + regular_service = api_schema.services["google.extended_operations.v1.stuff.RegularService"] + initial_method = regular_service.methods["CreateTask"] - expected = api_schema.services['google.extended_operations.v1.stuff.CustomOperations'] + operation_service = api_schema.services['google.extended_operations.v1.stuff.CustomOperations'] + expected = operation_service actual = api_schema.get_custom_operation_service(initial_method) assert expected is actual - assert actual.custom_polling_method is actual.methods["Get"] + assert actual.operation_polling_method is actual.methods["Get"] + + expected = {operation_service} + actual = api_schema.get_extended_operations_services(regular_service) + assert expected == actual def test_extended_operations_lro_operation_service_no_annotation(): file_protos = generate_basic_extended_operations_setup() - api_schema = api.API.build(file_protos) initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + # It's easier to manipulate data structures after building the API. del initial_method.options.Extensions[ex_ops_pb2.operation_service] - with pytest.raises(KeyError): + with pytest.raises(ValueError): api_schema.get_custom_operation_service(initial_method) @@ -1764,7 +2069,7 @@ def test_extended_operations_lro_operation_service_no_such_service(): initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] initial_method.options.Extensions[ex_ops_pb2.operation_service] = "UnrealService" - with pytest.raises(KeyError): + with pytest.raises(ValueError): api_schema.get_custom_operation_service(initial_method) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 993671574ddd..7b5c8bd1c744 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -18,6 +18,7 @@ from google.api import field_behavior_pb2 from google.api import resource_pb2 +from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 from gapic.schema import api @@ -485,3 +486,13 @@ def test_field_name_kword_disambiguation(): def test_field_resource_reference(): field = make_field(name='parent', type='TYPE_STRING') + + +def test_extended_operation_properties(): + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_request_field] = "squid" + options.Extensions[ex_ops_pb2.operation_response_field] = "clam" + f = make_field(options=options) + + assert f.operation_request_field == "squid" + assert f.operation_response_field == "clam" diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 7cd5910c3f08..9e9baf4e117a 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -338,30 +338,30 @@ def test_is_extended_operation(): operation = make_message( name="Operation", - fields=( + fields=tuple( make_field(name=name, type=T.Value("TYPE_STRING"), number=i) for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) ) ) - for f in operation.field: + for f in operation.fields.values(): options = descriptor_pb2.FieldOptions() # Note: The field numbers were carefully chosen to be the corresponding enum values. options.Extensions[ex_ops_pb2.operation_field] = f.number f.options.MergeFrom(options) assert operation.is_extended_operation + assert operation.extended_operation_status_field == operation.fields["status"] # Missing a required field - missing = make_message( name="Operation", - fields=( + fields=tuple( make_field(name=name, type=T.Value("TYPE_STRING"), number=i) # Missing error_message for i, name in enumerate(("name", "status", "error_code"), start=1) ) ) - for f in missing.field: + for f in missing.fields.values(): options = descriptor_pb2.FieldOptions() # Note: The field numbers were carefully chosen to be the corresponding enum values. options.Extensions[ex_ops_pb2.operation_field] = f.number @@ -370,15 +370,14 @@ def test_is_extended_operation(): assert not missing.is_extended_operation # Named incorrectly - my_message = make_message( name="MyMessage", - fields=( + fields=tuple( make_field(name=name, type=T.Value("TYPE_STRING"), number=i) for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) ) ) - for f in my_message.field: + for f in my_message.fields.values(): options = descriptor_pb2.FieldOptions() options.Extensions[ex_ops_pb2.operation_field] = f.number f.options.MergeFrom(options) @@ -389,12 +388,12 @@ def test_is_extended_operation(): for mapping in range(1, 5): duplicate = make_message( name="Operation", - fields=( + fields=tuple( make_field(name=name, type=T.Value("TYPE_STRING"), number=i) for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) ) ) - for f in duplicate.field: + for f in duplicate.fields.values(): options = descriptor_pb2.FieldOptions() # All set to the same value options.Extensions[ex_ops_pb2.operation_field] = mapping @@ -402,3 +401,49 @@ def test_is_extended_operation(): with pytest.raises(TypeError): duplicate.is_extended_operation + + # Just totally not an operation + random_message = make_message( + "MyOperation", + fields=[ + make_field(name="moniker", type=T.Value("TYPE_STRING"), number=1), + ], + ) + + assert not random_message.is_extended_operation + assert not random_message.extended_operation_status_field + + +def test_extended_operation_request_response_fields(): + T = descriptor_pb2.FieldDescriptorProto.Type + # Operation request + request = make_message( + name="Request", + fields=[ + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("project", "name", "armor_class", "id"), start=1) + ], + ) + expected = (request.fields["project"], request.fields["id"]) + for field in expected: + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_request_field] = field.name + field.options.MergeFrom(options) + + actual = request.extended_operation_request_fields + assert actual == expected + + # Operation response + poll_request = make_message( + name="GetRequest", + fields=[ + make_field(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "rank", "affinity", "serial")) + ] + ) + expected = (poll_request.fields["name"], poll_request.fields["affinity"]) + for field in expected: + field.options.Extensions[ex_ops_pb2.operation_response_field] = field.name + + actual = poll_request.extended_operation_response_fields + assert actual == expected diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index a131a28e1987..a2e2dc38b313 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -15,6 +15,7 @@ import collections import dataclasses import json +import pytest from typing import Sequence from google.api import field_behavior_pb2 @@ -821,12 +822,12 @@ def test_is_operation_polling_method(): operation = make_message( name="Operation", - fields=( + fields=[ make_field(name=name, type=T.Value("TYPE_STRING"), number=i) for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) - ), + ], ) - for f in operation.field: + for f in operation.fields.values(): options = descriptor_pb2.FieldOptions() # Note: The field numbers were carefully chosen to be the corresponding enum values. options.Extensions[ex_ops_pb2.operation_field] = f.number @@ -873,6 +874,78 @@ def test_is_operation_polling_method(): assert not invalid_method.is_operation_polling_method +@pytest.mark.parametrize( + "all_field_names,canonical_name_to_field_name", + [ + [ + [ + "name", + "status", + "error_code", + "error_message", + ], + {}, + ], + [ + [ + "moniker", + "done_ness", + "errno", + "warning", + ], + { + "name": "moniker", + "status": "done_ness", + "error_code": "errno", + "error_message": "warning", + }, + ], + [ + [ + "name", + "status", + "http_error_code", + "http_error_message", + ], + { + "error_code": "http_error_code", + "error_message": "http_error_message", + }, + ], + # No fields means this cannot be an extended operation. + [[], None], + ], +) +def test_differently_named_extended_operation_fields( + all_field_names, + canonical_name_to_field_name, +): + T = descriptor_pb2.FieldDescriptorProto.Type + operation = make_message( + name="Operation", + fields=[ + make_field( + name=name.lower(), + type=T.Value("TYPE_STRING"), + number=i, + ) + for i, name in enumerate(all_field_names, start=1) + ] + ) + for f in operation.fields.values(): + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + expected = { + k: operation.fields[v] + for k, v in canonical_name_to_field_name.items() + } if canonical_name_to_field_name is not None else None + actual = operation.differently_named_extended_operation_fields + + assert expected == actual + + def test_transport_safe_name(): unsafe_methods = { name: make_method(name=name) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 33e83494f7c7..0ee88f921c6d 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -532,12 +532,12 @@ def test_operation_polling_method(): operation = make_message( name="Operation", - fields=( + fields=[ make_field(name=name, type=T.Value("TYPE_STRING"), number=i) for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) - ), + ], ) - for f in operation.field: + for f in operation.fields.values(): options = descriptor_pb2.FieldOptions() # Note: The field numbers were carefully chosen to be the corresponding enum values. options.Extensions[ex_ops_pb2.operation_field] = f.number @@ -572,7 +572,7 @@ def test_operation_polling_method(): ], ) - assert ops_service.custom_polling_method == polling_method + assert ops_service.operation_polling_method == polling_method # Methods are LROs, so they are not polling methods user_service = make_service( @@ -586,7 +586,7 @@ def test_operation_polling_method(): ], ) - assert not user_service.custom_polling_method + assert not user_service.operation_polling_method def test_extended_operations_lro_detection(): @@ -594,12 +594,12 @@ def test_extended_operations_lro_detection(): operation = make_message( name="Operation", - fields=( + fields=[ make_field(name=name, type=T.Value("TYPE_STRING"), number=i) for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) - ), + ], ) - for f in operation.field: + for f in operation.fields.values(): options = descriptor_pb2.FieldOptions() # Note: The field numbers were carefully chosen to be the corresponding enum values. options.Extensions[ex_ops_pb2.operation_field] = f.number @@ -633,6 +633,8 @@ def test_extended_operations_lro_detection(): ], ) + assert not ops_service.has_extended_lro + assert not ops_service.any_extended_operations_methods assert not polling_method.operation_service # Methods are LROs, so they are not polling methods @@ -651,6 +653,7 @@ def test_extended_operations_lro_detection(): ], ) + assert user_service.any_extended_operations_methods # Note: we can't have the operation_serivce property point to the actual operation service # because Service objects can't perform the lookup. # Instead we kick that can to the API object and make it do the lookup and verification. From 8e15e9a4afaeaf6eeb7875bbd9e28cc08d94addb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 8 Apr 2022 01:45:55 +0200 Subject: [PATCH 0783/1339] chore(deps): update dependency google-api-core to v2.7.0 (#1227) * chore(deps): update dependency google-api-core to v2.7.0 * update golden files Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- .../services/asset_service/async_client.py | 20 +++++-------- .../services/asset_service/transports/base.py | 20 +++++-------- .../services/iam_credentials/async_client.py | 12 +++----- .../iam_credentials/transports/base.py | 12 +++----- .../config_service_v2/async_client.py | 28 +++++-------------- .../config_service_v2/transports/base.py | 28 +++++-------------- .../logging_service_v2/async_client.py | 24 ++++------------ .../logging_service_v2/transports/base.py | 24 ++++------------ .../metrics_service_v2/async_client.py | 16 +++-------- .../metrics_service_v2/transports/base.py | 16 +++-------- 11 files changed, 57 insertions(+), 145 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 19fd6dfeff29..a31bb96c1d88 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.2 -google-api-core==2.1.1 +google-api-core==2.7.0 googleapis-common-protos==1.56.0 jinja2==3.1.1 MarkupSafe==2.1.1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 6fdea6e03301..74748cd5d490 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -463,8 +463,7 @@ def sample_batch_get_assets_history(): self._client._transport.batch_get_assets_history, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -683,8 +682,7 @@ def sample_get_feed(): self._client._transport.get_feed, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -787,8 +785,7 @@ def sample_list_feeds(): self._client._transport.list_feeds, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -984,8 +981,7 @@ def sample_delete_feed(): self._client._transport.delete_feed, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -1180,8 +1176,7 @@ def sample_search_all_resources(): self._client._transport.search_all_resources, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=15.0, ), @@ -1367,8 +1362,7 @@ def sample_search_all_iam_policies(): self._client._transport.search_all_iam_policies, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=15.0, ), @@ -1462,7 +1456,7 @@ def sample_analyze_iam_policy(): self._client._transport.analyze_iam_policy, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=300.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index d0596429dfa5..023764abfa14 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -132,8 +132,7 @@ def _prep_wrapped_messages(self, client_info): self.batch_get_assets_history, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -149,8 +148,7 @@ def _prep_wrapped_messages(self, client_info): self.get_feed, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -161,8 +159,7 @@ def _prep_wrapped_messages(self, client_info): self.list_feeds, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -178,8 +175,7 @@ def _prep_wrapped_messages(self, client_info): self.delete_feed, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -190,8 +186,7 @@ def _prep_wrapped_messages(self, client_info): self.search_all_resources, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=15.0, ), @@ -202,8 +197,7 @@ def _prep_wrapped_messages(self, client_info): self.search_all_iam_policies, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=15.0, ), @@ -214,7 +208,7 @@ def _prep_wrapped_messages(self, client_info): self.analyze_iam_policy, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=300.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index d8f271b3bb67..b1655acd2cac 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -320,8 +320,7 @@ def sample_generate_access_token(): self._client._transport.generate_access_token, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -468,8 +467,7 @@ def sample_generate_id_token(): self._client._transport.generate_id_token, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -602,8 +600,7 @@ def sample_sign_blob(): self._client._transport.sign_blob, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -739,8 +736,7 @@ def sample_sign_jwt(): self._client._transport.sign_jwt, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index b6ffa602ad85..0d959ea0c534 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -119,8 +119,7 @@ def _prep_wrapped_messages(self, client_info): self.generate_access_token, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -131,8 +130,7 @@ def _prep_wrapped_messages(self, client_info): self.generate_id_token, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -143,8 +141,7 @@ def _prep_wrapped_messages(self, client_info): self.sign_blob, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -155,8 +152,7 @@ def _prep_wrapped_messages(self, client_info): self.sign_jwt, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 21a9edb9b2ad..17bd6354ca38 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1139,9 +1139,7 @@ def sample_list_sinks(): self._client._transport.list_sinks, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -1264,9 +1262,7 @@ def sample_get_sink(): self._client._transport.get_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -1554,9 +1550,7 @@ def sample_update_sink(): self._client._transport.update_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -1657,9 +1651,7 @@ def sample_delete_sink(): self._client._transport.delete_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -1765,9 +1757,7 @@ def sample_list_exclusions(): self._client._transport.list_exclusions, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -1893,9 +1883,7 @@ def sample_get_exclusion(): self._client._transport.get_exclusion, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -2273,9 +2261,7 @@ def sample_delete_exclusion(): self._client._transport.delete_exclusion, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 88ba88234db2..0d04db4b63b1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -178,9 +178,7 @@ def _prep_wrapped_messages(self, client_info): self.list_sinks, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -191,9 +189,7 @@ def _prep_wrapped_messages(self, client_info): self.get_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -209,9 +205,7 @@ def _prep_wrapped_messages(self, client_info): self.update_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -222,9 +216,7 @@ def _prep_wrapped_messages(self, client_info): self.delete_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -235,9 +227,7 @@ def _prep_wrapped_messages(self, client_info): self.list_exclusions, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -248,9 +238,7 @@ def _prep_wrapped_messages(self, client_info): self.get_exclusion, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -271,9 +259,7 @@ def _prep_wrapped_messages(self, client_info): self.delete_exclusion, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index a01f02d4c705..8d444035c380 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -265,9 +265,7 @@ def sample_delete_log(): self._client._transport.delete_log, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -462,9 +460,7 @@ def sample_write_log_entries(): self._client._transport.write_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -611,9 +607,7 @@ def sample_list_log_entries(): self._client._transport.list_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -699,9 +693,7 @@ def sample_list_monitored_resource_descriptors(): self._client._transport.list_monitored_resource_descriptors, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -813,9 +805,7 @@ def sample_list_logs(): self._client._transport.list_logs, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -913,9 +903,7 @@ def request_generator(): self._client._transport.tail_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=3600.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 54c0c48b7f3b..66c579dcd608 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -124,9 +124,7 @@ def _prep_wrapped_messages(self, client_info): self.delete_log, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -137,9 +135,7 @@ def _prep_wrapped_messages(self, client_info): self.write_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -150,9 +146,7 @@ def _prep_wrapped_messages(self, client_info): self.list_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -163,9 +157,7 @@ def _prep_wrapped_messages(self, client_info): self.list_monitored_resource_descriptors, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -176,9 +168,7 @@ def _prep_wrapped_messages(self, client_info): self.list_logs, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -189,9 +179,7 @@ def _prep_wrapped_messages(self, client_info): self.tail_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=3600.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 49ad0eb64e81..587435e79b4e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -265,9 +265,7 @@ def sample_list_log_metrics(): self._client._transport.list_log_metrics, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -387,9 +385,7 @@ def sample_get_log_metric(): self._client._transport.get_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -642,9 +638,7 @@ def sample_update_log_metric(): self._client._transport.update_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -737,9 +731,7 @@ def sample_delete_log_metric(): self._client._transport.delete_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 487dd0bdc0f5..fa09436f8c69 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -124,9 +124,7 @@ def _prep_wrapped_messages(self, client_info): self.list_log_metrics, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -137,9 +135,7 @@ def _prep_wrapped_messages(self, client_info): self.get_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -155,9 +151,7 @@ def _prep_wrapped_messages(self, client_info): self.update_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), @@ -168,9 +162,7 @@ def _prep_wrapped_messages(self, client_info): self.delete_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, + core_exceptions.GoogleAPICallError, ), deadline=60.0, ), From 3b584f15d29b0bcec19c270c13a066a00a2139f1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 8 Apr 2022 01:57:54 +0200 Subject: [PATCH 0784/1339] chore(deps): update dependency google-api-core to v2.7.1 (#1265) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index a31bb96c1d88..58c0abd2f013 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.2 -google-api-core==2.7.0 +google-api-core==2.7.1 googleapis-common-protos==1.56.0 jinja2==3.1.1 MarkupSafe==2.1.1 From 58120cce162b3a8229a0912c72184c1f00a8f100 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 8 Apr 2022 11:44:49 -0400 Subject: [PATCH 0785/1339] fix: add missing import for Mapping (#1267) --- .../%name/%version/%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/async_client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- .../cloud/asset_v1/services/asset_service/async_client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/async_client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/async_client.py | 2 +- .../redis/google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- 15 files changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 4b6e26c29155..1fbe33409288 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -5,7 +5,7 @@ from collections import OrderedDict import os import re -from typing import Callable, Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Callable, Dict, Mapping, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources {% if service.any_deprecated %} import warnings diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 569f8d060716..f62e2d51e3f5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -5,7 +5,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources {% if service.any_deprecated %} import warnings diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 17ecfebbdc79..e7ea5a605f6b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -9,7 +9,7 @@ import functools {% endif %} import os import re -from typing import Dict, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources {% if service.any_deprecated %} import warnings diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 74748cd5d490..23c93ea3eba2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index eb762260afb3..8df7cf614dc1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index b1655acd2cac..20655c3cbc2d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index a8a378a6a196..35956c17d51d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 17bd6354ca38..2e365dd15143 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 113ae1b900a5..1fce0f5de6ed 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 8d444035c380..0799272f3803 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 491ebddf909f..189af2c0a28d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 587435e79b4e..7969894e996f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index ee1772ad120c..de527da50b0d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 7051f5ff3145..5fab72341a66 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 098878cb70f6..987b3c542cbd 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib From df0ff9df4569ca10bb66d988d192d2223b9ada78 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 8 Apr 2022 12:06:43 -0400 Subject: [PATCH 0786/1339] chore(main): release 0.64.0 (#1264) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 61f9d3e3d371..625e6db21c80 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,18 @@ # Changelog +## [0.64.0](https://github.com/googleapis/gapic-generator-python/compare/v0.63.8...v0.64.0) (2022-04-08) + + +### Features + +* full LRO for Extended Operations ([#1234](https://github.com/googleapis/gapic-generator-python/issues/1234)) ([4d1bccc](https://github.com/googleapis/gapic-generator-python/commit/4d1bccc965a6908e9b1aeaebf1327918f3e2042a)) + + +### Bug Fixes + +* add missing import for Mapping ([#1267](https://github.com/googleapis/gapic-generator-python/issues/1267)) ([f416622](https://github.com/googleapis/gapic-generator-python/commit/f416622c753a56036cf9ac1fa6eb818a6e557018)) + ### [0.63.8](https://github.com/googleapis/gapic-generator-python/compare/v0.63.7...v0.63.8) (2022-04-04) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f2b2ad1bf1fd..34109142d1cc 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.63.8" +version = "0.64.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 8b2b371b6633dc40eb4d3907c2360cdd991c972a Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Fri, 8 Apr 2022 16:04:12 -0700 Subject: [PATCH 0787/1339] feat: adds support for MixIns. (#1240) * feat: adds starter code for mixins. --- .../.github/workflows/tests.yaml | 4 +- packages/gapic-generator/BUILD.bazel | 1 + .../%sub/services/%service/_mixins.py.j2 | 397 ++++++ .../%sub/services/%service/client.py.j2 | 8 +- .../%service/transports/_mixins.py.j2 | 136 ++ .../services/%service/transports/base.py.j2 | 66 +- .../services/%service/transports/grpc.py.j2 | 8 +- .../%name_%version/%sub/_test_mixins.py.j2 | 592 +++++++++ .../%name_%version/%sub/test_%service.py.j2 | 244 +--- packages/gapic-generator/gapic/schema/api.py | 64 +- .../services/%service/_async_mixins.py.j2 | 398 ++++++ .../%sub/services/%service/_mixins.py.j2 | 397 ++++++ .../%sub/services/%service/async_client.py.j2 | 8 +- .../%sub/services/%service/client.py.j2 | 10 +- .../%service/transports/_mixins.py.j2 | 136 ++ .../services/%service/transports/base.py.j2 | 66 +- .../services/%service/transports/grpc.py.j2 | 9 +- .../%service/transports/grpc_asyncio.py.j2 | 8 +- .../gapic/templates/setup.py.j2 | 4 +- .../%name_%version/%sub/_test_mixins.py.j2 | 1183 +++++++++++++++++ .../%name_%version/%sub/test_%service.py.j2 | 501 +------ packages/gapic-generator/noxfile.py | 38 +- packages/gapic-generator/requirements.txt | 1 + packages/gapic-generator/setup.py | 5 +- .../asset_v1/services/asset_service/client.py | 3 + .../tests/integration/goldens/asset/setup.py | 1 + .../unit/gapic/asset_v1/test_asset_service.py | 2 +- .../services/iam_credentials/client.py | 3 + .../integration/goldens/credentials/setup.py | 1 + .../credentials_v1/test_iam_credentials.py | 2 +- .../services/config_service_v2/client.py | 3 + .../services/logging_service_v2/client.py | 3 + .../services/metrics_service_v2/client.py | 3 + .../integration/goldens/logging/setup.py | 1 + .../logging_v2/test_config_service_v2.py | 2 +- .../logging_v2/test_logging_service_v2.py | 2 +- .../logging_v2/test_metrics_service_v2.py | 2 +- .../redis_v1/services/cloud_redis/client.py | 3 + .../tests/integration/goldens/redis/setup.py | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 2 +- .../tests/unit/schema/test_api.py | 179 +++ 41 files changed, 3752 insertions(+), 745 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 6983714b61f7..b413027bbbb6 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -132,10 +132,12 @@ jobs: strategy: matrix: python: ["3.6", "3.7", "3.8", "3.9", "3.10"] - variant: ['', _alternative_templates] + variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins] exclude: - python: "3.6" variant: _alternative_templates + - python: "3.6" + variant: _alternative_templates_mixins runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 38ae745d0d3e..80308f33f594 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -69,6 +69,7 @@ py_binary( requirement("MarkupSafe"), requirement("pypandoc"), requirement("PyYAML"), + requirement("grpc-google-iam-v1"), ] + select({ ":gapic_gen_python_3_6": [requirement("dataclasses")], "//conditions:default": [], diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 new file mode 100644 index 000000000000..b398283458ba --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 @@ -0,0 +1,397 @@ + {# IAM mixins #} + + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + {% if "SetIamPolicy" in api.mixin_api_methods %} + def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "GetIamPolicy" in api.mixin_api_methods %} + def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "TestIamPermissions" in api.mixin_api_methods %} + def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} + + {# Location mixin #} + + {% if api.has_location_mixin %} + {% if "GetLocation" in api.mixin_api_methods %} + def get_location( + self, + request: locations_pb2.GetLocationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "ListLocations" in api.mixin_api_methods %} + def list_locations( + self, + request: locations_pb2.ListLocationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 1fbe33409288..a00aa8f25631 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -32,10 +32,13 @@ except AttributeError: # pragma: NO COVER {{ ref_type.ident.python_import }} {% endfor %} {% endfor %} -{% if opts.add_iam_methods %} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO {% if 'grpc' in opts.transport %} @@ -498,6 +501,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {{ "\n" }} {% endfor %} + {% include '%namespace/%name/%version/%sub/services/%service/_mixins.py.j2' %} + + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} def set_iam_policy( self, diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 new file mode 100644 index 000000000000..d2842263a425 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 @@ -0,0 +1,136 @@ + {% if "grpc" in opts.transport %} + + {% if api.has_location_mixin %} + + {% if "ListLocations" in api.mixin_api_methods %} + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + {% endif %} + + {% if "GetLocation" in api.mixin_api_methods %} + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + {% endif %} + + {% endif %} {# Location #} + + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + + {% if "SetIamPolicy" in api.mixin_api_methods %} + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + {% endif %} + + {% if "GetIamPolicy" in api.mixin_api_methods %} + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + {% endif %} + + {% if "TestIamPermissions" in api.mixin_api_methods %} + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + {% endif %} + {% endif %} {# IAM #} + + {% endif %} {# grpc is transport #} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index ea01ac991bc2..cb1bf8eaa0a0 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -22,10 +22,13 @@ from google.oauth2 import service_account # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor %} -{% if opts.add_iam_methods %} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} {% endfilter %} try: @@ -169,7 +172,68 @@ class {{ service.name }}Transport(abc.ABC): raise NotImplementedError() {% endfor %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + {% if "SetIamPolicy" in api.mixin_api_methods %} + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "GetIamPolicy" in api.mixin_api_methods %} + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "TestIamPermissions" in api.mixin_api_methods %} + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + {% endif %} + {% endif %} + + {% if api.has_location_mixin %} + {% if "GetLocation" in api.mixin_api_methods %} + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "ListLocations" in api.mixin_api_methods %} + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + {% endif %} + {% endif %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} @property def set_iam_policy( diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index cabc67e443e3..fdb71fc98b24 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -21,10 +21,13 @@ import grpc # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor %} -{% if opts.add_iam_methods %} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -278,6 +281,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): return self._stubs['{{ method.transport_safe_name|snake_case }}'] {% endfor %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} @property def set_iam_policy( @@ -362,6 +366,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def close(self): self.grpc_channel.close() + {% include '%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2' %} + __all__ = ( '{{ service.name }}GrpcTransport', ) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 new file mode 100644 index 000000000000..a9f7aa7b9ec7 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -0,0 +1,592 @@ +{% if api.has_location_mixin and 'grpc' in opts.transport %} + +{# ListLocation #} + +{% if "ListLocations" in api.mixin_api_methods %} +def test_list_locations(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + +{% endif %} {# ListLocations #} + +{# GetLocation #} + +{% if "GetLocation" in api.mixin_api_methods %} + +def test_get_location(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials()) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + +{% endif %} {# GetLocation #} + +{% endif %} {# location_mixin #} + +{# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} +{% if opts.add_iam_methods and 'grpc' in opts.transport %} + +def test_set_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_get_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +{% endif %} + +{% if (not opts.add_iam_methods and api.has_iam_mixin) and 'grpc' in opts.transport %} + +{% if "SetIamPolicy" in api.mixin_api_methods %} +def test_set_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() +{% endif %} + +{% if "GetIamPolicy" in api.mixin_api_methods %} +def test_get_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_get_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() +{% endif %} + +{% if "TestIamPermissions" in api.mixin_api_methods %} +def test_test_iam_permissions(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() +{% endif %} + +{% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e483effc140a..52fd7959a829 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -43,6 +43,9 @@ from google.longrunning import operations_pb2 from google.protobuf import json_format {% endif %}{# rest transport #} {% endif %}{# lro #} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 +{% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} {% for ref_type in method.ref_types @@ -51,7 +54,8 @@ from google.api_core import gapic_v1 {{ ref_type.ident.python_import }} {% endfor %} {% endfor %} -{% if opts.add_iam_methods %} +{# TODO: Remove once https://github.com/googleapis/gapic-generator-python/pull/1240 is merged #} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -1677,11 +1681,16 @@ def test_{{ service.name|snake_case }}_base_transport(): {% for method in service.methods.values() %} '{{ method.transport_safe_name|snake_case }}', {% endfor %} - {% if opts.add_iam_methods %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if opts.add_iam_methods or api.has_iam_mixin %} 'set_iam_policy', 'get_iam_policy', 'test_iam_permissions', {% endif %} + {% if api.has_location_mixin %} + 'get_location', + 'list_locations', + {% endif %} ) for method in methods: with pytest.raises(NotImplementedError): @@ -2125,236 +2134,7 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) - -{% if opts.add_iam_methods and 'grpc' in opts.transport %} -def test_set_iam_policy(transport: str = "grpc"): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_set_iam_policy_field_headers(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_set_iam_policy_from_dict(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_get_iam_policy(transport: str = "grpc"): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_test_iam_permissions(transport: str = "grpc"): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -{% endif %} +{% include 'tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2' %} def test_transport_close(): transports = { diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 5d3f6b87bf37..afed38045590 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -33,12 +33,15 @@ from google.cloud import extended_operations_pb2 as ex_ops_pb2 # type: ignore from google.gapic.metadata import gapic_metadata_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import descriptor_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import descriptor_pb2 # type: ignore from google.protobuf.json_format import MessageToJson from google.protobuf.json_format import ParseDict - +from google.protobuf.descriptor import ServiceDescriptor import grpc # type: ignore - +from google.protobuf.descriptor_pb2 import MethodDescriptorProto +from google.api import annotations_pb2 # type: ignore from gapic.schema import metadata from gapic.schema import wrappers from gapic.schema import naming as api_naming @@ -512,6 +515,61 @@ def get_custom_operation_service(self, method: "wrappers.Method") -> "wrappers.S return op_serv + @cached_property + def mixin_api_methods(self) -> Dict[str, MethodDescriptorProto]: + methods: Dict[str, MethodDescriptorProto] = {} + if self.has_location_mixin: + methods = {**methods, ** + self._get_methods_from_service(locations_pb2)} + if not self._has_iam_overrides and self.has_iam_mixin: + methods = {**methods, ** + self._get_methods_from_service(iam_policy_pb2)} + # For LRO, expose operations client instead. + return methods + + @cached_property + def has_location_mixin(self) -> bool: + return len(list(filter(lambda api: api.name == "google.cloud.location.Locations", self.service_yaml_config.apis))) > 0 + + @cached_property + def has_iam_mixin(self) -> bool: + return len(list(filter(lambda api: api.name == "google.iam.v1.IAMPolicy", self.service_yaml_config.apis))) > 0 + + @cached_property + def has_operations_mixin(self) -> bool: + return len(list(filter(lambda api: api.name == "google.longrunning.Operations", self.service_yaml_config.apis))) > 0 + + @cached_property + def _has_iam_overrides(self) -> bool: + if not self.has_iam_mixin: + return False + iam_mixin_methods: Dict[str, MethodDescriptorProto] = self._get_methods_from_service( + iam_policy_pb2) + for (_, s) in self.services.items(): + for m_name in iam_mixin_methods: + if m_name in s.methods: + return True + return False + + def _get_methods_from_service(self, service_pb) -> Dict[str, MethodDescriptorProto]: + services = service_pb.DESCRIPTOR.services_by_name + methods = {} + methods_to_generate = {} + for service_name in services: + service: ServiceDescriptor = services[service_name] + for method in service.methods: + fqn = "{}.{}.{}".format( + service_pb.DESCRIPTOR.package, service.name, method.name) + methods[fqn] = method + for rule in self.service_yaml_config.http.rules: + if rule.selector in methods: + m = methods[rule.selector] + x = descriptor_pb2.MethodDescriptorProto() + m.CopyToProto(x) + x.options.Extensions[annotations_pb2.http].CopyFrom(rule) + methods_to_generate[x.name] = x + return methods_to_generate + def get_extended_operations_services(self, service) -> Set["wrappers.Service"]: """Return a set of all the extended operation services used by the input service. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 new file mode 100644 index 000000000000..aa2a3c561c49 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -0,0 +1,398 @@ + {# IAM mixins #} + + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + {% if "SetIamPolicy" in api.mixin_api_methods %} + async def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "GetIamPolicy" in api.mixin_api_methods %} + async def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "TestIamPermissions" in api.mixin_api_methods %} + async def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} + + {# Location mixin #} + + {% if api.has_location_mixin %} + {% if "GetLocation" in api.mixin_api_methods %} + async def get_location( + self, + request: locations_pb2.GetLocationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "ListLocations" in api.mixin_api_methods %} + async def list_locations( + self, + request: locations_pb2.ListLocationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} + diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 new file mode 100644 index 000000000000..b398283458ba --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -0,0 +1,397 @@ + {# IAM mixins #} + + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + {% if "SetIamPolicy" in api.mixin_api_methods %} + def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "GetIamPolicy" in api.mixin_api_methods %} + def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "TestIamPermissions" in api.mixin_api_methods %} + def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} + + {# Location mixin #} + + {% if api.has_location_mixin %} + {% if "GetLocation" in api.mixin_api_methods %} + def get_location( + self, + request: locations_pb2.GetLocationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "ListLocations" in api.mixin_api_methods %} + def list_locations( + self, + request: locations_pb2.ListLocationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index f62e2d51e3f5..913759067d45 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -29,10 +29,14 @@ except AttributeError: # pragma: NO COVER {{ ref_type.ident.python_import }} {% endfor %} {% endfor %} -{% if opts.add_iam_methods %} +{# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} @@ -374,7 +378,9 @@ class {{ service.async_client_name }}: {{ "\n" }} {% endfor %} + {% include '%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2' %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} async def set_iam_policy( self, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index e7ea5a605f6b..45a437168be2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -39,10 +39,14 @@ except AttributeError: # pragma: NO COVER {{ ref_type.ident.python_import }} {% endfor %} {% endfor %} -{% if opts.add_iam_methods %} +{# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO {% if 'grpc' in opts.transport %} @@ -376,6 +380,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ self.transport.close() + {% include '%namespace/%name_%version/%sub/services/%service/_mixins.py.j2' %} + + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} def set_iam_policy( self, @@ -655,7 +662,6 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return response {% endif %} - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 new file mode 100644 index 000000000000..d2842263a425 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 @@ -0,0 +1,136 @@ + {% if "grpc" in opts.transport %} + + {% if api.has_location_mixin %} + + {% if "ListLocations" in api.mixin_api_methods %} + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + {% endif %} + + {% if "GetLocation" in api.mixin_api_methods %} + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + {% endif %} + + {% endif %} {# Location #} + + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + + {% if "SetIamPolicy" in api.mixin_api_methods %} + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + {% endif %} + + {% if "GetIamPolicy" in api.mixin_api_methods %} + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + {% endif %} + + {% if "TestIamPermissions" in api.mixin_api_methods %} + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + {% endif %} + {% endif %} {# IAM #} + + {% endif %} {# grpc is transport #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 7b36a39a4cec..99b35fd5f6c2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -22,10 +22,13 @@ from google.oauth2 import service_account # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor %} -{% if opts.add_iam_methods %} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} {% endfilter %} {% filter sort_lines %} {% for operations_service in api.get_extended_operations_services(service) %} @@ -178,7 +181,68 @@ class {{ service.name }}Transport(abc.ABC): raise NotImplementedError() {% endfor %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + {% if "SetIamPolicy" in api.mixin_api_methods %} + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "GetIamPolicy" in api.mixin_api_methods %} + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "TestIamPermissions" in api.mixin_api_methods %} + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + {% endif %} + {% endif %} + + {% if api.has_location_mixin %} + {% if "GetLocation" in api.mixin_api_methods %} + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "ListLocations" in api.mixin_api_methods %} + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + {% endif %} + {% endif %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} @property def set_iam_policy( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index dfaa1edf3db0..dfcbf833a24b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -21,10 +21,13 @@ import grpc # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor %} -{% if opts.add_iam_methods %} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO @@ -278,6 +281,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): return self._stubs['{{ method.transport_safe_name|snake_case }}'] {% endfor %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} @property def set_iam_policy( @@ -362,6 +366,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def close(self): self.grpc_channel.close() + {% include '%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2' %} + + @property def kind(self) -> str: return "grpc" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 9c9e1e164c68..b29d68495cb6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -21,10 +21,13 @@ from grpc.experimental import aio # type: ignore {{ method.input.ident.python_import }} {{ method.output.ident.python_import }} {% endfor %} -{% if opts.add_iam_methods %} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .grpc import {{ service.name }}GrpcTransport @@ -279,6 +282,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): return self._stubs['{{ method.transport_safe_name|snake_case }}'] {% endfor %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if opts.add_iam_methods %} @property def set_iam_policy( @@ -364,6 +368,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): def close(self): return self.grpc_channel.close() + {% include '%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2' %} + __all__ = ( '{{ service.name }}GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 939b85745dc6..231a31082d29 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -37,8 +37,10 @@ setuptools.setup( 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', {% endif %} 'libcst >= 0.2.5', + 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', - {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', {% endif %} ), diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 new file mode 100644 index 000000000000..134bdfe1bc7a --- /dev/null +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -0,0 +1,1183 @@ +{% if api.has_location_mixin and ('grpc' in opts.transport or 'grpc_asyncio' in opts.transport) %} + +{# ListLocation #} + +{% if "ListLocations" in api.mixin_api_methods %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_list_locations(transport: str = "grpc"): +{% else %} +def test_list_locations(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): +{% else %} +def test_list_locations_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + {% if mode == "" %} + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): +{% else %} +def test_list_locations_from_dict(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + {% endif %} + call.assert_called() +{% endfor %} + +{% endif %} {# ListLocations #} + +{# GetLocation #} + +{% if "GetLocation" in api.mixin_api_methods %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): +{% else %} +def test_get_location(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = locations_pb2.Location() + response = client.get_location(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): +{% else %} +def test_get_location_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials()) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials() + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + {% if mode == "" %} + call.return_value = locations_pb2.Location() + + client.get_location(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): +{% else %} +def test_get_location_from_dict(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + {% endif %} + call.assert_called() +{% endfor %} + +{% endif %} {# GetLocation #} + +{% endif %} {# location_mixin #} + +{# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} +{% if opts.add_iam_methods and ('grpc' in opts.transport or 'grpc_asyncio' in opts.transport) %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): +{% else %} +def test_set_iam_policy(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + {% else %} + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): +{% else %} +def test_set_iam_policy_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + {% if mode == "" %} + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +{% endfor %} + +def test_set_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +{% endif %} + +{% if (not opts.add_iam_methods and api.has_iam_mixin) and ('grpc' in opts.transport or 'grpc_asyncio' in opts.transport) %} + +{% if "SetIamPolicy" in api.mixin_api_methods %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): +{% else %} +def test_set_iam_policy(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + {% else %} + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): +{% else %} +def test_set_iam_policy_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + {% if mode == "" %} + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +{% endfor %} + +def test_set_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() +{% endif %} + +{% if "GetIamPolicy" in api.mixin_api_methods %} +def test_get_iam_policy(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() +{% endif %} + +{% if "TestIamPermissions" in api.mixin_api_methods %} +def test_test_iam_permissions(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() +{% endif %} +{% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 1f8337a419c7..1a6e3c00cb41 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -50,6 +50,9 @@ from google.longrunning import operations_pb2 from google.protobuf import json_format {% endif %}{# rest transport #} {% endif %}{# lro #} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 +{% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} {% for ref_type in method.ref_types @@ -58,7 +61,8 @@ from google.api_core import gapic_v1 {{ ref_type.ident.python_import }} {% endfor %} {% endfor %} -{% if opts.add_iam_methods %} +{# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} +{% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -757,11 +761,16 @@ def test_{{ service.name|snake_case }}_base_transport(): {% for method in service.methods.values() %} '{{ method.transport_safe_name|snake_case }}', {% endfor %} - {% if opts.add_iam_methods %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} + {% if opts.add_iam_methods or api.has_iam_mixin %} 'set_iam_policy', 'get_iam_policy', 'test_iam_permissions', {% endif %} + {% if api.has_location_mixin %} + 'get_location', + 'list_locations', + {% endif %} ) for method in methods: with pytest.raises(NotImplementedError): @@ -1238,492 +1247,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) - -{% if opts.add_iam_methods and 'grpc' in opts.transport %} -def test_set_iam_policy(transport: str = "grpc"): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_set_iam_policy_field_headers(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.set_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_set_iam_policy_from_dict(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_get_iam_policy(transport: str = "grpc"): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_test_iam_permissions(transport: str = "grpc"): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -{% endif %} - {% if 'grpc' in opts.transport %} @pytest.mark.asyncio async def test_transport_close_async(): @@ -1737,6 +1260,8 @@ async def test_transport_close_async(): close.assert_called_once() {% endif %} +{% include 'tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2' %} + def test_transport_close(): transports = { {% if 'rest' in opts.transport %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 0982b167ee5d..88cde0d74af1 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -165,7 +165,8 @@ def fragment_alternative_templates(session): @contextmanager def showcase_library( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = () + session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), + include_service_yaml=False, ): """Install the generated library into the session for showcase tests.""" @@ -198,11 +199,25 @@ def showcase_library( external=True, silent=True, ) - + if include_service_yaml: + session.run( + "curl", + "https://github.com/googleapis/gapic-showcase/releases/" + f"download/v{showcase_version}/" + f"showcase_v1beta1.yaml", + "-L", + "--output", + path.join(tmp_dir, "showcase_v1beta1.yaml"), + external=True, + silent=True, + ) # Write out a client library for Showcase. template_opt = f"python-gapic-templates={templates}" opts = "--python_gapic_opt=" - opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest")) + if include_service_yaml: + opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest", f"service-yaml={tmp_dir}/showcase_v1beta1.yaml")) + else: + opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest",)) cmd_tup = ( "python", "-m", @@ -349,6 +364,23 @@ def showcase_unit_add_iam_methods(session): run_showcase_unit_tests(session, fail_under=100) +@nox.session(python=ALL_PYTHON) +def showcase_unit_mixins(session): + with showcase_library(session, include_service_yaml=True) as lib: + session.chdir(lib) + run_showcase_unit_tests(session) + + +@nox.session(python=ALL_PYTHON[1:]) # Do not test 3.6 +def showcase_unit_alternative_templates_mixins(session): + with showcase_library( + session, templates=ADS_TEMPLATES, other_opts=("old-naming",), + include_service_yaml=True + ) as lib: + session.chdir(lib) + run_showcase_unit_tests(session) + + @nox.session(python=NEWEST_PYTHON) def showcase_mypy( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 58c0abd2f013..8ad333d616b1 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,3 +8,4 @@ pypandoc==1.7.5 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==62.0.0 +grpc-google-iam-v1==0.12.3 diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 34109142d1cc..e7b15e70f39e 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -47,13 +47,14 @@ # See https://github.com/pallets/click/issues/2227 "click >= 6.7,!=8.1.0", "google-api-core >= 2.3.2", - "googleapis-common-protos >= 1.54.0", + "googleapis-common-protos >= 1.55.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", "protobuf >= 3.18.0", "pypandoc >= 1.4", "PyYAML >= 5.1.1", - "dataclasses < 0.8; python_version < '3.7'" + "dataclasses < 0.8; python_version < '3.7'", + "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", ), extras_require={':python_version<"3.7"': ("dataclasses >= 0.4, < 0.8",),}, tests_require=("pyfakefs >= 3.6",), diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 8df7cf614dc1..3012ce544ef8 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1765,6 +1765,9 @@ def __exit__(self, type, value, traceback): + + + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 3133564d4db9..aa33abcc546e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -39,6 +39,7 @@ install_requires=( 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', 'libcst >= 0.2.5', + 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 840d32681ce9..4a987e861e44 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -4171,7 +4171,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) - @pytest.mark.asyncio async def test_transport_close_async(): client = AssetServiceAsyncClient( @@ -4183,6 +4182,7 @@ async def test_transport_close_async(): close.assert_not_called() close.assert_called_once() + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 35956c17d51d..135e1cc6cc0a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -941,6 +941,9 @@ def __exit__(self, type, value, traceback): + + + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 4756b3dc163d..3a648c3910fd 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -39,6 +39,7 @@ install_requires=( 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', 'libcst >= 0.2.5', + 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', ), python_requires='>=3.6', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 587f6c9ef585..f6a1dad6418a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -2068,7 +2068,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) - @pytest.mark.asyncio async def test_transport_close_async(): client = IAMCredentialsAsyncClient( @@ -2080,6 +2079,7 @@ async def test_transport_close_async(): close.assert_not_called() close.assert_called_once() + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 1fce0f5de6ed..b4e629c724dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -2690,6 +2690,9 @@ def __exit__(self, type, value, traceback): + + + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 189af2c0a28d..1c5dbe13537d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1084,6 +1084,9 @@ def __exit__(self, type, value, traceback): + + + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index de527da50b0d..b5fab8cf0575 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -933,6 +933,9 @@ def __exit__(self, type, value, traceback): + + + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 5622fedf4398..aef2423cf351 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -39,6 +39,7 @@ install_requires=( 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', 'libcst >= 0.2.5', + 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', ), python_requires='>=3.6', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index a5562f81d382..b385a5a02341 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -6575,7 +6575,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) - @pytest.mark.asyncio async def test_transport_close_async(): client = ConfigServiceV2AsyncClient( @@ -6587,6 +6586,7 @@ async def test_transport_close_async(): close.assert_not_called() close.assert_called_once() + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 483394f936dc..93bb1808e608 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2617,7 +2617,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) - @pytest.mark.asyncio async def test_transport_close_async(): client = LoggingServiceV2AsyncClient( @@ -2629,6 +2628,7 @@ async def test_transport_close_async(): close.assert_not_called() close.assert_called_once() + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index e04e14481038..4e0e72937cbb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2468,7 +2468,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) - @pytest.mark.asyncio async def test_transport_close_async(): client = MetricsServiceV2AsyncClient( @@ -2480,6 +2479,7 @@ async def test_transport_close_async(): close.assert_not_called() close.assert_called_once() + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 987b3c542cbd..002e187b219f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1548,6 +1548,9 @@ def __exit__(self, type, value, traceback): + + + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index bcb797f25299..1cbd3b83ac4a 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -39,6 +39,7 @@ install_requires=( 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', 'libcst >= 0.2.5', + 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', ), python_requires='>=3.6', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index fa213d081ce9..1273022f176e 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -3484,7 +3484,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) - @pytest.mark.asyncio async def test_transport_close_async(): client = CloudRedisAsyncClient( @@ -3496,6 +3495,7 @@ async def test_transport_close_async(): close.assert_not_called() close.assert_called_once() + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index ab7178670c27..bb76f1420492 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -19,6 +19,7 @@ import pytest +from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import resource_pb2 from google.api_core import exceptions @@ -27,6 +28,8 @@ from google.longrunning import operations_pb2 from google.protobuf import descriptor_pb2 from google.protobuf.json_format import MessageToJson +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore from gapic.schema import api from gapic.schema import imp @@ -39,6 +42,7 @@ make_field_pb2, make_file_pb2, make_message_pb2, + make_method, make_naming, make_oneof_pb2, ) @@ -82,6 +86,7 @@ def test_api_build(): ), ), ),), + ), ) @@ -2097,3 +2102,177 @@ def test_extended_operations_lro_operation_service_no_polling_method(): with pytest.raises(ValueError): api_schema.get_custom_operation_service(initial_method) + + +def methods_from_service(service_pb, name: str): + service = service_pb.DESCRIPTOR.services_by_name[name] + res = {} + for m in service.methods: + x = descriptor_pb2.MethodDescriptorProto() + m.CopyToProto(x) + res[x.name] = x + return res + + +def test_mixin_api_methods_locations(): + fd = ( + make_file_pb2( + name='example.proto', + package='google.example.v1', + messages=(make_message_pb2(name='ExampleRequest', fields=()),), + ),) + opts = Options(service_yaml_config={ + 'apis': [ + { + 'name': 'google.cloud.location.Locations' + } + ], + 'http': { + 'rules': [ + { + 'selector': 'google.cloud.location.Locations.ListLocations', + 'get': '/v1/{name=examples/*}/*', + 'body': '*' + }, + { + 'selector': 'google.cloud.location.Locations.GetLocation', + 'get': '/v1/{name=examples/*}/*', + 'body': '*' + }, + { + 'selector': 'google.example.v1.Example', + }] + } + }) + ms = methods_from_service(locations_pb2, 'Locations') + assert len(ms) == 2 + m1 = ms['ListLocations'] + m1.options.ClearExtension(annotations_pb2.http) + m1.options.Extensions[annotations_pb2.http].selector = 'google.cloud.location.Locations.ListLocations' + m1.options.Extensions[annotations_pb2.http].get = '/v1/{name=examples/*}/*' + m1.options.Extensions[annotations_pb2.http].body = '*' + m2 = ms['GetLocation'] + m2.options.ClearExtension(annotations_pb2.http) + m2.options.Extensions[annotations_pb2.http].selector = 'google.cloud.location.Locations.GetLocation' + m2.options.Extensions[annotations_pb2.http].get = '/v1/{name=examples/*}/*' + m2.options.Extensions[annotations_pb2.http].body = '*' + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + assert api_schema.mixin_api_methods == { + 'ListLocations': m1, 'GetLocation': m2} + + +def test_mixin_api_methods_iam(): + fd = ( + make_file_pb2( + name='example.proto', + package='google.example.v1', + messages=(make_message_pb2(name='ExampleRequest', fields=()), + make_message_pb2(name='ExampleResponse', fields=())), + services=(descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='FooMethod', + # Input and output types don't matter. + input_type='google.example.v1.ExampleRequest', + output_type='google.example.v1.ExampleResponse', + ), + ), + ),), + ),) + r1 = { + 'selector': 'google.iam.v1.IAMPolicy.SetIamPolicy', + 'post': '/v1/{resource=examples/*}/*', + 'body': '*' + } + r2 = { + 'selector': 'google.iam.v1.IAMPolicy.GetIamPolicy', + 'get': '/v1/{resource=examples/*}/*', + 'body': '*' + } + r3 = { + 'selector': 'google.iam.v1.IAMPolicy.TestIamPermissions', + 'post': '/v1/{resource=examples/*}/*', + 'body': '*' + } + opts = Options(service_yaml_config={ + 'apis': [ + { + 'name': 'google.iam.v1.IAMPolicy' + } + ], + 'http': { + 'rules': [r1, r2, r3] + } + }) + ms = methods_from_service(iam_policy_pb2, 'IAMPolicy') + assert len(ms) == 3 + m1 = ms['SetIamPolicy'] + m1.options.ClearExtension(annotations_pb2.http) + m1.options.Extensions[annotations_pb2.http].selector = r1['selector'] + m1.options.Extensions[annotations_pb2.http].post = r1['post'] + m1.options.Extensions[annotations_pb2.http].body = r1['body'] + m2 = ms['GetIamPolicy'] + m2.options.ClearExtension(annotations_pb2.http) + m2.options.Extensions[annotations_pb2.http].selector = r2['selector'] + m2.options.Extensions[annotations_pb2.http].get = r2['get'] + m2.options.Extensions[annotations_pb2.http].body = r2['body'] + m3 = ms['TestIamPermissions'] + m3.options.ClearExtension(annotations_pb2.http) + m3.options.Extensions[annotations_pb2.http].selector = r3['selector'] + m3.options.Extensions[annotations_pb2.http].post = r3['post'] + m3.options.Extensions[annotations_pb2.http].body = r3['body'] + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + assert api_schema.mixin_api_methods == { + 'SetIamPolicy': m1, 'GetIamPolicy': m2, 'TestIamPermissions': m3} + assert not api_schema.has_operations_mixin + + +def test_mixin_api_methods_iam_overrides(): + fd = ( + make_file_pb2( + name='example.proto', + package='google.example.v1', + messages=(make_message_pb2(name='ExampleRequest', fields=()), + make_message_pb2(name='ExampleResponse', fields=()), + ), + services=(descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='TestIamPermissions', + # Input and output types don't matter. + input_type='google.example.v1.ExampleRequest', + output_type='google.example.v1.ExampleResponse', + ), + ), + ),), + ), + ) + r1 = { + 'selector': 'google.iam.v1.IAMPolicy.SetIamPolicy', + 'post': '/v1/{resource=examples/*}/*', + 'body': '*' + } + r2 = { + 'selector': 'google.iam.v1.IAMPolicy.GetIamPolicy', + 'get': '/v1/{resource=examples/*}/*', + 'body': '*' + } + r3 = { + 'selector': 'google.iam.v1.IAMPolicy.TestIamPermissions', + 'post': '/v1/{resource=examples/*}/*', + 'body': '*' + } + opts = Options(service_yaml_config={ + 'apis': [ + { + 'name': 'google.iam.v1.IAMPolicy' + } + ], + 'http': { + 'rules': [r1, r2, r3] + } + }) + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + assert api_schema.mixin_api_methods == {} From 00d348808bb76a4ba43d49b621c48a8208edb215 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 9 Apr 2022 18:54:10 +0200 Subject: [PATCH 0788/1339] chore(deps): update dependency grpc-google-iam-v1 to v0.12.4 (#1270) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8ad333d616b1..5a0d202f0573 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,4 +8,4 @@ pypandoc==1.7.5 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==62.0.0 -grpc-google-iam-v1==0.12.3 +grpc-google-iam-v1==0.12.4 From 5653a46b6e6358507d0724726b0210287529a718 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Sat, 9 Apr 2022 15:56:57 -0700 Subject: [PATCH 0789/1339] chore: remove extra newline above snippet code block (#1271) * fix: remove extra newline above snippet code block * fix indent * update tests/integration/goldens * fix tests/integration/goldens import indents Co-authored-by: Anthonios Partheniou --- .../%sub/services/%service/_client_macros.j2 | 4 +- .../%sub/services/%service/async_client.py.j2 | 2 +- .../services/asset_service/async_client.py | 9 --- .../asset_v1/services/asset_service/client.py | 33 ++++------- .../services/iam_credentials/async_client.py | 4 -- .../services/iam_credentials/client.py | 12 ++-- .../config_service_v2/async_client.py | 13 ---- .../services/config_service_v2/client.py | 59 ++++++++----------- .../logging_service_v2/async_client.py | 6 -- .../services/logging_service_v2/client.py | 18 ++---- .../services/metrics_service_v2/client.py | 10 ++-- .../services/cloud_redis/async_client.py | 8 --- .../redis_v1/services/cloud_redis/client.py | 26 +++----- 13 files changed, 62 insertions(+), 142 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index d384fe86ea09..e5fc01d5a592 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -36,13 +36,13 @@ {% else %} ) -> Iterable[{{ method.client_output.ident }}]: {% endif %} - r"""{{ method.meta.doc|rst(width=72, indent=8) }} + r"""{{ method.meta.doc|rst(width=72, indent=8)|trim }} {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} {% if snippet is not none %} .. code-block:: python - {{ snippet.full_snippet|indent(width=12, first=True) }} +{{ snippet.full_snippet|indent(width=12, first=True) }} {% endif %} {% endwith %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 913759067d45..96dd56db5d29 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -209,7 +209,7 @@ class {{ service.async_client_name }}: {% else %} ) -> Awaitable[AsyncIterable[{{ method.client_output_async.ident }}]]: {% endif %} - r"""{{ method.meta.doc|rst(width=72, indent=8) }} + r"""{{ method.meta.doc|rst(width=72, indent=8)|trim }} {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} {% if snippet is not none %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 23c93ea3eba2..f80edc7e5116 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -210,7 +210,6 @@ async def export_assets(self, the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. - .. code-block:: python from google.cloud import asset_v1 @@ -307,7 +306,6 @@ async def list_assets(self, r"""Lists assets with time and resource types and returns paged results in response. - .. code-block:: python from google.cloud import asset_v1 @@ -421,7 +419,6 @@ async def batch_get_assets_history(self, specified asset does not exist, this API returns an INVALID_ARGUMENT error. - .. code-block:: python from google.cloud import asset_v1 @@ -502,7 +499,6 @@ async def create_feed(self, project/folder/organization to listen to its asset updates. - .. code-block:: python from google.cloud import asset_v1 @@ -720,7 +716,6 @@ async def list_feeds(self, r"""Lists all asset feeds in a parent project/folder/organization. - .. code-block:: python from google.cloud import asset_v1 @@ -1020,7 +1015,6 @@ async def search_all_resources(self, the ``cloudasset.assets.searchAllResources`` permission on the desired scope, otherwise the request will be rejected. - .. code-block:: python from google.cloud import asset_v1 @@ -1226,7 +1220,6 @@ async def search_all_iam_policies(self, ``cloudasset.assets.searchAllIamPolicies`` permission on the desired scope, otherwise the request will be rejected. - .. code-block:: python from google.cloud import asset_v1 @@ -1408,7 +1401,6 @@ async def analyze_iam_policy(self, r"""Analyzes IAM policies to answer which identities have what accesses on which resources. - .. code-block:: python from google.cloud import asset_v1 @@ -1503,7 +1495,6 @@ async def analyze_iam_policy_longrunning(self, to poll the operation result. The metadata contains the request to help callers to map responses to requests. - .. code-block:: python from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 3012ce544ef8..def497d49f08 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -406,10 +406,9 @@ def export_assets(self, the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_export_assets(): # Create a client @@ -504,10 +503,9 @@ def list_assets(self, r"""Lists assets with time and resource types and returns paged results in response. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_list_assets(): # Create a client @@ -618,10 +616,9 @@ def batch_get_assets_history(self, specified asset does not exist, this API returns an INVALID_ARGUMENT error. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_batch_get_assets_history(): # Create a client @@ -694,10 +691,9 @@ def create_feed(self, project/folder/organization to listen to its asset updates. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_create_feed(): # Create a client @@ -807,7 +803,7 @@ def get_feed(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_get_feed(): # Create a client @@ -906,10 +902,9 @@ def list_feeds(self, r"""Lists all asset feeds in a parent project/folder/organization. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_list_feeds(): # Create a client @@ -1004,7 +999,7 @@ def update_feed(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_update_feed(): # Create a client @@ -1108,7 +1103,7 @@ def delete_feed(self, .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_delete_feed(): # Create a client @@ -1194,10 +1189,9 @@ def search_all_resources(self, the ``cloudasset.assets.searchAllResources`` permission on the desired scope, otherwise the request will be rejected. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_search_all_resources(): # Create a client @@ -1394,10 +1388,9 @@ def search_all_iam_policies(self, ``cloudasset.assets.searchAllIamPolicies`` permission on the desired scope, otherwise the request will be rejected. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_search_all_iam_policies(): # Create a client @@ -1570,10 +1563,9 @@ def analyze_iam_policy(self, r"""Analyzes IAM policies to answer which identities have what accesses on which resources. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_analyze_iam_policy(): # Create a client @@ -1660,10 +1652,9 @@ def analyze_iam_policy_longrunning(self, to poll the operation result. The metadata contains the request to help callers to map responses to requests. - .. code-block:: python - from google.cloud import asset_v1 + from google.cloud import asset_v1 def sample_analyze_iam_policy_longrunning(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 20655c3cbc2d..ae3f0dce2a4a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -208,7 +208,6 @@ async def generate_access_token(self, r"""Generates an OAuth 2.0 access token for a service account. - .. code-block:: python from google.iam import credentials_v1 @@ -361,7 +360,6 @@ async def generate_id_token(self, r"""Generates an OpenID Connect ID token for a service account. - .. code-block:: python from google.iam import credentials_v1 @@ -507,7 +505,6 @@ async def sign_blob(self, r"""Signs a blob using a service account's system-managed private key. - .. code-block:: python from google.iam import credentials_v1 @@ -640,7 +637,6 @@ async def sign_jwt(self, r"""Signs a JWT using a service account's system-managed private key. - .. code-block:: python from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 135e1cc6cc0a..dd070bbe0eb6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -395,10 +395,9 @@ def generate_access_token(self, r"""Generates an OAuth 2.0 access token for a service account. - .. code-block:: python - from google.iam import credentials_v1 + from google.iam import credentials_v1 def sample_generate_access_token(): # Create a client @@ -542,10 +541,9 @@ def generate_id_token(self, r"""Generates an OpenID Connect ID token for a service account. - .. code-block:: python - from google.iam import credentials_v1 + from google.iam import credentials_v1 def sample_generate_id_token(): # Create a client @@ -682,10 +680,9 @@ def sign_blob(self, r"""Signs a blob using a service account's system-managed private key. - .. code-block:: python - from google.iam import credentials_v1 + from google.iam import credentials_v1 def sample_sign_blob(): # Create a client @@ -809,10 +806,9 @@ def sign_jwt(self, r"""Signs a JWT using a service account's system-managed private key. - .. code-block:: python - from google.iam import credentials_v1 + from google.iam import credentials_v1 def sample_sign_jwt(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 2e365dd15143..16ac65406c70 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -391,7 +391,6 @@ async def create_bucket(self, entries. Once a bucket has been created, the region cannot be changed. - .. code-block:: python from google.cloud import logging_v2 @@ -474,7 +473,6 @@ async def update_bucket(self, A buckets region may not be modified after it is created. - .. code-block:: python from google.cloud import logging_v2 @@ -548,7 +546,6 @@ async def delete_bucket(self, state. After 7 days, the bucket will be purged and all logs in the bucket will be permanently deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -611,7 +608,6 @@ async def undelete_bucket(self, r"""Undeletes a bucket. A bucket that has been deleted may be undeleted within the grace period of 7 days. - .. code-block:: python from google.cloud import logging_v2 @@ -855,7 +851,6 @@ async def create_view(self, r"""Creates a view over logs in a bucket. A bucket may contain a maximum of 50 views. - .. code-block:: python from google.cloud import logging_v2 @@ -931,7 +926,6 @@ async def update_view(self, r"""Updates a view. This method replaces the following fields in the existing view with values from the new view: ``filter``. - .. code-block:: python from google.cloud import logging_v2 @@ -1304,7 +1298,6 @@ async def create_sink(self, permitted to write to the destination. A sink can export log entries only from the resource owning the sink. - .. code-block:: python from google.cloud import logging_v2 @@ -1435,7 +1428,6 @@ async def update_sink(self, The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. - .. code-block:: python from google.cloud import logging_v2 @@ -1588,7 +1580,6 @@ async def delete_sink(self, r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -1924,7 +1915,6 @@ async def create_exclusion(self, can be excluded. You can have up to 10 exclusions in a resource. - .. code-block:: python from google.cloud import logging_v2 @@ -2055,7 +2045,6 @@ async def update_exclusion(self, r"""Changes one or more properties of an existing exclusion. - .. code-block:: python from google.cloud import logging_v2 @@ -2302,7 +2291,6 @@ async def get_cmek_settings(self, Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 @@ -2404,7 +2392,6 @@ async def update_cmek_settings(self, Router `__ for more information. - .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index b4e629c724dc..580743c9d20e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -428,7 +428,7 @@ def list_buckets(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_buckets(): # Create a client @@ -543,7 +543,7 @@ def get_bucket(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_bucket(): # Create a client @@ -615,10 +615,9 @@ def create_bucket(self, entries. Once a bucket has been created, the region cannot be changed. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_bucket(): # Create a client @@ -699,10 +698,9 @@ def update_bucket(self, A buckets region may not be modified after it is created. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_bucket(): # Create a client @@ -774,10 +772,9 @@ def delete_bucket(self, state. After 7 days, the bucket will be purged and all logs in the bucket will be permanently deleted. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_bucket(): # Create a client @@ -838,10 +835,9 @@ def undelete_bucket(self, r"""Undeletes a bucket. A bucket that has been deleted may be undeleted within the grace period of 7 days. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_undelete_bucket(): # Create a client @@ -904,7 +900,7 @@ def list_views(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_views(): # Create a client @@ -1011,7 +1007,7 @@ def get_view(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_view(): # Create a client @@ -1084,10 +1080,9 @@ def create_view(self, r"""Creates a view over logs in a bucket. A bucket may contain a maximum of 50 views. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_view(): # Create a client @@ -1161,10 +1156,9 @@ def update_view(self, r"""Updates a view. This method replaces the following fields in the existing view with values from the new view: ``filter``. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_view(): # Create a client @@ -1238,7 +1232,7 @@ def delete_view(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_view(): # Create a client @@ -1301,7 +1295,7 @@ def list_sinks(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_sinks(): # Create a client @@ -1413,7 +1407,7 @@ def get_sink(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_sink(): # Create a client @@ -1524,10 +1518,9 @@ def create_sink(self, permitted to write to the destination. A sink can export log entries only from the resource owning the sink. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_sink(): # Create a client @@ -1655,10 +1648,9 @@ def update_sink(self, The updated sink might also have a new ``writer_identity``; see the ``unique_writer_identity`` field. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_sink(): # Create a client @@ -1802,10 +1794,9 @@ def delete_sink(self, r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_sink(): # Create a client @@ -1895,7 +1886,7 @@ def list_exclusions(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_exclusions(): # Create a client @@ -2007,7 +1998,7 @@ def get_exclusion(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_exclusion(): # Create a client @@ -2120,10 +2111,9 @@ def create_exclusion(self, can be excluded. You can have up to 10 exclusions in a resource. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_exclusion(): # Create a client @@ -2251,10 +2241,9 @@ def update_exclusion(self, r"""Changes one or more properties of an existing exclusion. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_exclusion(): # Create a client @@ -2396,7 +2385,7 @@ def delete_exclusion(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_exclusion(): # Create a client @@ -2492,10 +2481,9 @@ def get_cmek_settings(self, Router `__ for more information. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_cmek_settings(): # Create a client @@ -2595,10 +2583,9 @@ def update_cmek_settings(self, Router `__ for more information. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_cmek_settings(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 0799272f3803..5d3478a99544 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -199,7 +199,6 @@ async def delete_log(self, deleted. Entries received after the delete operation with a timestamp before the operation will be deleted. - .. code-block:: python from google.cloud import logging_v2 @@ -308,7 +307,6 @@ async def write_log_entries(self, maximum of 1000 different resources (projects, organizations, billing accounts or folders) - .. code-block:: python from google.cloud import logging_v2 @@ -494,7 +492,6 @@ async def list_log_entries(self, For ways to export log entries, see `Exporting Logs `__. - .. code-block:: python from google.cloud import logging_v2 @@ -645,7 +642,6 @@ async def list_monitored_resource_descriptors(self, r"""Lists the descriptors for monitored resource types used by Logging. - .. code-block:: python from google.cloud import logging_v2 @@ -733,7 +729,6 @@ async def list_logs(self, or billing accounts. Only logs that have entries are listed. - .. code-block:: python from google.cloud import logging_v2 @@ -852,7 +847,6 @@ def tail_log_entries(self, Until the stream is terminated, it will continue reading logs. - .. code-block:: python from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 1c5dbe13537d..c632e6369c24 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -386,10 +386,9 @@ def delete_log(self, deleted. Entries received after the delete operation with a timestamp before the operation will be deleted. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_log(): # Create a client @@ -489,10 +488,9 @@ def write_log_entries(self, maximum of 1000 different resources (projects, organizations, billing accounts or folders) - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_write_log_entries(): # Create a client @@ -668,10 +666,9 @@ def list_log_entries(self, For ways to export log entries, see `Exporting Logs `__. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_log_entries(): # Create a client @@ -813,10 +810,9 @@ def list_monitored_resource_descriptors(self, r"""Lists the descriptors for monitored resource types used by Logging. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_monitored_resource_descriptors(): # Create a client @@ -896,10 +892,9 @@ def list_logs(self, or billing accounts. Only logs that have entries are listed. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_logs(): # Create a client @@ -1009,10 +1004,9 @@ def tail_log_entries(self, Until the stream is terminated, it will continue reading logs. - .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_tail_log_entries(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index b5fab8cf0575..1adc45ad2235 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -385,7 +385,7 @@ def list_log_metrics(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_list_log_metrics(): # Create a client @@ -494,7 +494,7 @@ def get_log_metric(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_get_log_metric(): # Create a client @@ -600,7 +600,7 @@ def create_log_metric(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_create_log_metric(): # Create a client @@ -724,7 +724,7 @@ def update_log_metric(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_update_log_metric(): # Create a client @@ -846,7 +846,7 @@ def delete_log_metric(self, .. code-block:: python - from google.cloud import logging_v2 + from google.cloud import logging_v2 def sample_delete_log_metric(): # Create a client diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 5fab72341a66..54034706e68c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -228,7 +228,6 @@ async def list_instances(self, regions available to the project are queried, and the results are aggregated. - .. code-block:: python from google.cloud import redis_v1 @@ -446,7 +445,6 @@ async def create_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. - .. code-block:: python from google.cloud import redis_v1 @@ -594,7 +592,6 @@ async def update_instance(self, operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. - .. code-block:: python from google.cloud import redis_v1 @@ -727,7 +724,6 @@ async def upgrade_instance(self, r"""Upgrades Redis instance to the newer Redis version specified in the request. - .. code-block:: python from google.cloud import redis_v1 @@ -857,7 +853,6 @@ async def import_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. - .. code-block:: python from google.cloud import redis_v1 @@ -986,7 +981,6 @@ async def export_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. - .. code-block:: python from google.cloud import redis_v1 @@ -1113,7 +1107,6 @@ async def failover_instance(self, replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. - .. code-block:: python from google.cloud import redis_v1 @@ -1235,7 +1228,6 @@ async def delete_instance(self, r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. - .. code-block:: python from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 002e187b219f..2a63bcd27771 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -415,10 +415,9 @@ def list_instances(self, regions available to the project are queried, and the results are aggregated. - .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_list_instances(): # Create a client @@ -527,7 +526,7 @@ def get_instance(self, .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_get_instance(): # Create a client @@ -633,10 +632,9 @@ def create_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. - .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_create_instance(): # Create a client @@ -781,10 +779,9 @@ def update_instance(self, operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. - .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_update_instance(): # Create a client @@ -914,10 +911,9 @@ def upgrade_instance(self, r"""Upgrades Redis instance to the newer Redis version specified in the request. - .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_upgrade_instance(): # Create a client @@ -1044,10 +1040,9 @@ def import_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. - .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_import_instance(): # Create a client @@ -1173,10 +1168,9 @@ def export_instance(self, The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. - .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_export_instance(): # Create a client @@ -1300,10 +1294,9 @@ def failover_instance(self, replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. - .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_failover_instance(): # Create a client @@ -1422,10 +1415,9 @@ def delete_instance(self, r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. - .. code-block:: python - from google.cloud import redis_v1 + from google.cloud import redis_v1 def sample_delete_instance(): # Create a client From 5c1dacbd27a52649d307a28ea75ebf109969b613 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sat, 9 Apr 2022 19:07:45 -0400 Subject: [PATCH 0790/1339] chore(main): release 0.65.0 (#1269) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 625e6db21c80..1200bd537f30 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [0.65.0](https://github.com/googleapis/gapic-generator-python/compare/v0.64.0...v0.65.0) (2022-04-09) + + +### Features + +* adds support for MixIns. ([#1240](https://github.com/googleapis/gapic-generator-python/issues/1240)) ([856af2e](https://github.com/googleapis/gapic-generator-python/commit/856af2ef406e0ea380fcfaa5d505435124330c25)) + ## [0.64.0](https://github.com/googleapis/gapic-generator-python/compare/v0.63.8...v0.64.0) (2022-04-08) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e7b15e70f39e..daff028a0409 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.64.0" +version = "0.65.0" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 228c116f8c868b036a7d23f725cc398a014b7085 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Sat, 9 Apr 2022 17:30:52 -0600 Subject: [PATCH 0791/1339] snippetgen: generate all snippet metadata fields (#1230) * feat: generate all snippet metadata fields * feat: generate all snippet metadata fields * chore: fix some unit tests * test: fix more tests * chore: add ignores to _pb2 types * test: fix all existing unit tests * test: get to 100% unit test coverage * chore: rename tests to match new method name * chore: update logging goldens Co-authored-by: Anthonios Partheniou --- .../gapic/generator/generator.py | 1 + .../gapic/samplegen/samplegen.py | 75 +- .../gapic/samplegen_utils/snippet_index.py | 8 + .../snippet_metadata_asset_v1.json | 960 +++++- .../snippet_metadata_credentials_v1.json | 410 ++- .../snippet_metadata_logging_v2.json | 2716 ++++++++++++++++- .../snippet_metadata_redis_v1.json | 786 ++++- .../tests/unit/common_types.py | 9 +- .../tests/unit/generator/test_generator.py | 83 +- .../tests/unit/samplegen/test_integration.py | 275 +- .../tests/unit/samplegen/test_samplegen.py | 157 + .../unit/samplegen/test_snippet_index.py | 121 +- 12 files changed, 5232 insertions(+), 369 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index bf05f961a3ff..963bbf8b04e8 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -197,6 +197,7 @@ def _generate_samples_and_manifest( ) snippet_metadata.file = fpath + snippet_metadata.title = fpath index.add_snippet( snippet_index.Snippet(sample, snippet_metadata)) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 7d3fd98eacbb..4c06e80d6662 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -1015,6 +1015,70 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A yield spec +def _fill_sample_metadata(sample: dict, api_schema: api.API): + """Returns snippet metadata for the sample.""" + + # Snippet Metadata can't be fully filled out in any one function + # In this function we add information from + # the API schema and sample dictionary. + # See `snippet_metadata.proto` for documentation on the fields + + service = api_schema.services[sample["service"]] + method = service.methods[sample["rpc"]] + async_ = sample["transport"] == api.TRANSPORT_GRPC_ASYNC + + snippet_metadata = snippet_metadata_pb2.Snippet() # type: ignore + snippet_metadata.region_tag = sample["region_tag"] + snippet_metadata.description = f"Sample for {sample['rpc']}" + snippet_metadata.language = snippet_metadata_pb2.Language.PYTHON # type: ignore + snippet_metadata.canonical = True + snippet_metadata.origin = snippet_metadata_pb2.Snippet.Origin.API_DEFINITION # type: ignore + + # Service Client + snippet_metadata.client_method.client.short_name = service.async_client_name if async_ else service.client_name + snippet_metadata.client_method.client.full_name = f"{'.'.join(sample['module_namespace'])}.{sample['module_name']}.{snippet_metadata.client_method.client.short_name}" + + # Service + snippet_metadata.client_method.method.service.short_name = service.name + snippet_metadata.client_method.method.service.full_name = f"{api_schema.naming.proto_package}.{service.name}" + + # RPC + snippet_metadata.client_method.method.short_name = method.name + snippet_metadata.client_method.method.full_name = f"{api_schema.naming.proto_package}.{service.name}.{method.name}" + + # Client Method + setattr(snippet_metadata.client_method, "async", async_) + snippet_metadata.client_method.short_name = utils.to_snake_case( + method.name) + snippet_metadata.client_method.full_name = f"{snippet_metadata.client_method.client.full_name}.{snippet_metadata.client_method.short_name}" + + if not method.void: + snippet_metadata.client_method.result_type = method.client_output_async.ident.sphinx if async_ else method.client_output.ident.sphinx + if method.server_streaming: + snippet_metadata.client_method.result_type = f"Iterable[{snippet_metadata.client_method.result_type }]" + + # Client Method Parameters + parameters = snippet_metadata.client_method.parameters + if not method.client_streaming: + parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + type=method.input.ident.sphinx, name="request")) + for field in method.flattened_fields.values(): + parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + type=field.ident.sphinx, name=field.name)) + else: + parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + type=f"Iterator[{method.input.ident.sphinx}]", name="requests")) + + parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + name="retry", type="google.api_core.retry.Retry")) + parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + name="timeout", type="float")) + parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + name="metadata", type="Sequence[Tuple[str, str]")) + + return snippet_metadata + + def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> Tuple[str, Any]: """Generate a standalone, runnable sample. @@ -1053,16 +1117,7 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> Tup v.validate_response(sample["response"]) - # Snippet Metadata can't be fully filled out in any one function - # In this function we add information from - # the API schema and sample dictionary. - snippet_metadata = snippet_metadata_pb2.Snippet() # type: ignore - snippet_metadata.region_tag = sample["region_tag"] - setattr(snippet_metadata.client_method, "async", - sample["transport"] == api.TRANSPORT_GRPC_ASYNC) - snippet_metadata.client_method.method.short_name = sample["rpc"] - snippet_metadata.client_method.method.service.short_name = sample["service"].split( - ".")[-1] + snippet_metadata = _fill_sample_metadata(sample, api_schema) return sample_template.render( sample=sample, diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py index f04912d6f7ff..1ca10a426470 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -102,6 +102,14 @@ class SnippetIndex: def __init__(self, api_schema: api.API): self.metadata_index = snippet_metadata_pb2.Index() # type: ignore + self.metadata_index.client_library.name = api_schema.naming.warehouse_package_name + self.metadata_index.client_library.language = snippet_metadata_pb2.Language.PYTHON # type: ignore + + self.metadata_index.client_library.apis.append(snippet_metadata_pb2.Api( # type: ignore + id=api_schema.naming.proto_package, + version=api_schema.naming.version + )) + # Construct a dictionary to insert samples into based on the API schema # NOTE: In the future we expect the generator to support configured samples, # which will result in more than one sample variant per RPC. At that diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json index c3a46b5f47b4..148bb0b4dc01 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json @@ -1,16 +1,57 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.asset.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-asset" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_iam_policy_longrunning", "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "AnalyzeIamPolicyLongrunning" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "analyze_iam_policy_longrunning" }, + "description": "Sample for AnalyzeIamPolicyLongrunning", "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async", "segments": [ { @@ -43,18 +84,50 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy_longrunning", "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "AnalyzeIamPolicyLongrunning" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "analyze_iam_policy_longrunning" }, + "description": "Sample for AnalyzeIamPolicyLongrunning", "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync", "segments": [ { @@ -87,19 +160,51 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_iam_policy", "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeIamPolicy", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "AnalyzeIamPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.AnalyzeIamPolicyResponse", + "shortName": "analyze_iam_policy" }, + "description": "Sample for AnalyzeIamPolicy", "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async", "segments": [ { @@ -132,18 +237,50 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy", "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeIamPolicy", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "AnalyzeIamPolicy" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeIamPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.AnalyzeIamPolicyResponse", + "shortName": "analyze_iam_policy" }, + "description": "Sample for AnalyzeIamPolicy", "file": "cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync", "segments": [ { @@ -176,19 +313,51 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.batch_get_assets_history", "method": { + "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "BatchGetAssetsHistory" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", + "shortName": "batch_get_assets_history" }, + "description": "Sample for BatchGetAssetsHistory", "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async", "segments": [ { @@ -221,18 +390,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", "method": { + "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "BatchGetAssetsHistory" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", + "shortName": "batch_get_assets_history" }, + "description": "Sample for BatchGetAssetsHistory", "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync", "segments": [ { @@ -265,19 +466,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_feed", "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "CreateFeed" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateFeedRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "create_feed" }, + "description": "Sample for CreateFeed", "file": "cloudasset_v1_generated_asset_service_create_feed_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_async", "segments": [ { @@ -310,18 +547,54 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_create_feed_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.create_feed", "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "CreateFeed" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateFeedRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "create_feed" }, + "description": "Sample for CreateFeed", "file": "cloudasset_v1_generated_asset_service_create_feed_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_sync", "segments": [ { @@ -354,19 +627,54 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_create_feed_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_feed", "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "DeleteFeed" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_feed" }, + "description": "Sample for DeleteFeed", "file": "cloudasset_v1_generated_asset_service_delete_feed_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_async", "segments": [ { @@ -397,18 +705,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_delete_feed_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_feed", "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "DeleteFeed" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_feed" }, + "description": "Sample for DeleteFeed", "file": "cloudasset_v1_generated_asset_service_delete_feed_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_sync", "segments": [ { @@ -439,19 +782,51 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_delete_feed_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.export_assets", "method": { + "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "ExportAssets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.ExportAssetsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_assets" }, + "description": "Sample for ExportAssets", "file": "cloudasset_v1_generated_asset_service_export_assets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_async", "segments": [ { @@ -484,18 +859,50 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_export_assets_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.export_assets", "method": { + "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "ExportAssets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.ExportAssetsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_assets" }, + "description": "Sample for ExportAssets", "file": "cloudasset_v1_generated_asset_service_export_assets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_sync", "segments": [ { @@ -528,19 +935,55 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_export_assets_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_feed", "method": { + "fullName": "google.cloud.asset.v1.AssetService.GetFeed", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "GetFeed" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.GetFeedRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "get_feed" }, + "description": "Sample for GetFeed", "file": "cloudasset_v1_generated_asset_service_get_feed_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_async", "segments": [ { @@ -573,18 +1016,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_get_feed_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.get_feed", "method": { + "fullName": "google.cloud.asset.v1.AssetService.GetFeed", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "GetFeed" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.GetFeedRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "get_feed" }, + "description": "Sample for GetFeed", "file": "cloudasset_v1_generated_asset_service_get_feed_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_sync", "segments": [ { @@ -617,19 +1096,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_get_feed_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_assets", "method": { + "fullName": "google.cloud.asset.v1.AssetService.ListAssets", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "ListAssets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager", + "shortName": "list_assets" }, + "description": "Sample for ListAssets", "file": "cloudasset_v1_generated_asset_service_list_assets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_async", "segments": [ { @@ -662,18 +1177,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_list_assets_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.list_assets", "method": { + "fullName": "google.cloud.asset.v1.AssetService.ListAssets", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "ListAssets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager", + "shortName": "list_assets" }, + "description": "Sample for ListAssets", "file": "cloudasset_v1_generated_asset_service_list_assets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_sync", "segments": [ { @@ -706,19 +1257,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_list_assets_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_feeds", "method": { + "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "ListFeeds" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.ListFeedsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", + "shortName": "list_feeds" }, + "description": "Sample for ListFeeds", "file": "cloudasset_v1_generated_asset_service_list_feeds_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_async", "segments": [ { @@ -751,18 +1338,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_list_feeds_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.list_feeds", "method": { + "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "ListFeeds" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.ListFeedsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", + "shortName": "list_feeds" }, + "description": "Sample for ListFeeds", "file": "cloudasset_v1_generated_asset_service_list_feeds_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_sync", "segments": [ { @@ -795,19 +1418,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_list_feeds_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.search_all_iam_policies", "method": { + "fullName": "google.cloud.asset.v1.AssetService.SearchAllIamPolicies", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "SearchAllIamPolicies" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.SearchAllIamPoliciesRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager", + "shortName": "search_all_iam_policies" }, + "description": "Sample for SearchAllIamPolicies", "file": "cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async", "segments": [ { @@ -840,18 +1503,58 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.search_all_iam_policies", "method": { + "fullName": "google.cloud.asset.v1.AssetService.SearchAllIamPolicies", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "SearchAllIamPolicies" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.SearchAllIamPoliciesRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager", + "shortName": "search_all_iam_policies" }, + "description": "Sample for SearchAllIamPolicies", "file": "cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync", "segments": [ { @@ -884,19 +1587,63 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.search_all_resources", "method": { + "fullName": "google.cloud.asset.v1.AssetService.SearchAllResources", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "SearchAllResources" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.SearchAllResourcesRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "asset_types", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager", + "shortName": "search_all_resources" }, + "description": "Sample for SearchAllResources", "file": "cloudasset_v1_generated_asset_service_search_all_resources_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_SearchAllResources_async", "segments": [ { @@ -929,18 +1676,62 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_search_all_resources_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.search_all_resources", "method": { + "fullName": "google.cloud.asset.v1.AssetService.SearchAllResources", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "SearchAllResources" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.SearchAllResourcesRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "query", + "type": "str" + }, + { + "name": "asset_types", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager", + "shortName": "search_all_resources" }, + "description": "Sample for SearchAllResources", "file": "cloudasset_v1_generated_asset_service_search_all_resources_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_SearchAllResources_sync", "segments": [ { @@ -973,19 +1764,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_search_all_resources_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.update_feed", "method": { + "fullName": "google.cloud.asset.v1.AssetService.UpdateFeed", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "UpdateFeed" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.UpdateFeedRequest" + }, + { + "name": "feed", + "type": "google.cloud.asset_v1.types.Feed" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "update_feed" }, + "description": "Sample for UpdateFeed", "file": "cloudasset_v1_generated_asset_service_update_feed_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_UpdateFeed_async", "segments": [ { @@ -1018,18 +1845,54 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_update_feed_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.update_feed", "method": { + "fullName": "google.cloud.asset.v1.AssetService.UpdateFeed", "service": { + "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, "shortName": "UpdateFeed" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.UpdateFeedRequest" + }, + { + "name": "feed", + "type": "google.cloud.asset_v1.types.Feed" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "update_feed" }, + "description": "Sample for UpdateFeed", "file": "cloudasset_v1_generated_asset_service_update_feed_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "cloudasset_v1_generated_AssetService_UpdateFeed_sync", "segments": [ { @@ -1062,7 +1925,8 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "cloudasset_v1_generated_asset_service_update_feed_sync.py" } ] } diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json index bfe0c2f97c74..ba587abf1c65 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json @@ -1,16 +1,73 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.iam.credentials.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-iam-credentials" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.iam.credentials_v1.IAMCredentialsAsyncClient", + "shortName": "IAMCredentialsAsyncClient" + }, + "fullName": "google.iam.credentials_v1.IAMCredentialsAsyncClient.generate_access_token", "method": { + "fullName": "google.iam.credentials.v1.IAMCredentials.GenerateAccessToken", "service": { + "fullName": "google.iam.credentials.v1.IAMCredentials", "shortName": "IAMCredentials" }, "shortName": "GenerateAccessToken" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.credentials_v1.types.GenerateAccessTokenRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "delegates", + "type": "Sequence[str]" + }, + { + "name": "scope", + "type": "Sequence[str]" + }, + { + "name": "lifetime", + "type": "google.protobuf.duration_pb2.Duration" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.credentials_v1.types.GenerateAccessTokenResponse", + "shortName": "generate_access_token" }, + "description": "Sample for GenerateAccessToken", "file": "iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_async", "segments": [ { @@ -43,18 +100,66 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.iam.credentials_v1.IAMCredentialsClient", + "shortName": "IAMCredentialsClient" + }, + "fullName": "google.iam.credentials_v1.IAMCredentialsClient.generate_access_token", "method": { + "fullName": "google.iam.credentials.v1.IAMCredentials.GenerateAccessToken", "service": { + "fullName": "google.iam.credentials.v1.IAMCredentials", "shortName": "IAMCredentials" }, "shortName": "GenerateAccessToken" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.credentials_v1.types.GenerateAccessTokenRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "delegates", + "type": "Sequence[str]" + }, + { + "name": "scope", + "type": "Sequence[str]" + }, + { + "name": "lifetime", + "type": "google.protobuf.duration_pb2.Duration" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.credentials_v1.types.GenerateAccessTokenResponse", + "shortName": "generate_access_token" }, + "description": "Sample for GenerateAccessToken", "file": "iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_sync", "segments": [ { @@ -87,19 +192,67 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.iam.credentials_v1.IAMCredentialsAsyncClient", + "shortName": "IAMCredentialsAsyncClient" + }, + "fullName": "google.iam.credentials_v1.IAMCredentialsAsyncClient.generate_id_token", "method": { + "fullName": "google.iam.credentials.v1.IAMCredentials.GenerateIdToken", "service": { + "fullName": "google.iam.credentials.v1.IAMCredentials", "shortName": "IAMCredentials" }, "shortName": "GenerateIdToken" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.credentials_v1.types.GenerateIdTokenRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "delegates", + "type": "Sequence[str]" + }, + { + "name": "audience", + "type": "str" + }, + { + "name": "include_email", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.credentials_v1.types.GenerateIdTokenResponse", + "shortName": "generate_id_token" }, + "description": "Sample for GenerateIdToken", "file": "iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_async", "segments": [ { @@ -132,18 +285,66 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.iam.credentials_v1.IAMCredentialsClient", + "shortName": "IAMCredentialsClient" + }, + "fullName": "google.iam.credentials_v1.IAMCredentialsClient.generate_id_token", "method": { + "fullName": "google.iam.credentials.v1.IAMCredentials.GenerateIdToken", "service": { + "fullName": "google.iam.credentials.v1.IAMCredentials", "shortName": "IAMCredentials" }, "shortName": "GenerateIdToken" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.credentials_v1.types.GenerateIdTokenRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "delegates", + "type": "Sequence[str]" + }, + { + "name": "audience", + "type": "str" + }, + { + "name": "include_email", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.credentials_v1.types.GenerateIdTokenResponse", + "shortName": "generate_id_token" }, + "description": "Sample for GenerateIdToken", "file": "iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_sync", "segments": [ { @@ -176,19 +377,63 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.iam.credentials_v1.IAMCredentialsAsyncClient", + "shortName": "IAMCredentialsAsyncClient" + }, + "fullName": "google.iam.credentials_v1.IAMCredentialsAsyncClient.sign_blob", "method": { + "fullName": "google.iam.credentials.v1.IAMCredentials.SignBlob", "service": { + "fullName": "google.iam.credentials.v1.IAMCredentials", "shortName": "IAMCredentials" }, "shortName": "SignBlob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.credentials_v1.types.SignBlobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "delegates", + "type": "Sequence[str]" + }, + { + "name": "payload", + "type": "bytes" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.credentials_v1.types.SignBlobResponse", + "shortName": "sign_blob" }, + "description": "Sample for SignBlob", "file": "iamcredentials_v1_generated_iam_credentials_sign_blob_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignBlob_async", "segments": [ { @@ -221,18 +466,62 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "iamcredentials_v1_generated_iam_credentials_sign_blob_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.iam.credentials_v1.IAMCredentialsClient", + "shortName": "IAMCredentialsClient" + }, + "fullName": "google.iam.credentials_v1.IAMCredentialsClient.sign_blob", "method": { + "fullName": "google.iam.credentials.v1.IAMCredentials.SignBlob", "service": { + "fullName": "google.iam.credentials.v1.IAMCredentials", "shortName": "IAMCredentials" }, "shortName": "SignBlob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.credentials_v1.types.SignBlobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "delegates", + "type": "Sequence[str]" + }, + { + "name": "payload", + "type": "bytes" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.credentials_v1.types.SignBlobResponse", + "shortName": "sign_blob" }, + "description": "Sample for SignBlob", "file": "iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignBlob_sync", "segments": [ { @@ -265,19 +554,63 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.iam.credentials_v1.IAMCredentialsAsyncClient", + "shortName": "IAMCredentialsAsyncClient" + }, + "fullName": "google.iam.credentials_v1.IAMCredentialsAsyncClient.sign_jwt", "method": { + "fullName": "google.iam.credentials.v1.IAMCredentials.SignJwt", "service": { + "fullName": "google.iam.credentials.v1.IAMCredentials", "shortName": "IAMCredentials" }, "shortName": "SignJwt" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.credentials_v1.types.SignJwtRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "delegates", + "type": "Sequence[str]" + }, + { + "name": "payload", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.credentials_v1.types.SignJwtResponse", + "shortName": "sign_jwt" }, + "description": "Sample for SignJwt", "file": "iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignJwt_async", "segments": [ { @@ -310,18 +643,62 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.iam.credentials_v1.IAMCredentialsClient", + "shortName": "IAMCredentialsClient" + }, + "fullName": "google.iam.credentials_v1.IAMCredentialsClient.sign_jwt", "method": { + "fullName": "google.iam.credentials.v1.IAMCredentials.SignJwt", "service": { + "fullName": "google.iam.credentials.v1.IAMCredentials", "shortName": "IAMCredentials" }, "shortName": "SignJwt" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.iam.credentials_v1.types.SignJwtRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "delegates", + "type": "Sequence[str]" + }, + { + "name": "payload", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.iam.credentials_v1.types.SignJwtResponse", + "shortName": "sign_jwt" }, + "description": "Sample for SignJwt", "file": "iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignJwt_sync", "segments": [ { @@ -354,7 +731,8 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py" } ] } diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json index 3cc40b6fa584..a72b4aebc560 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -1,16 +1,57 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.logging.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-logging" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, + "description": "Sample for CreateBucket", "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { @@ -43,18 +84,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, + "description": "Sample for CreateBucket", "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { @@ -87,19 +160,59 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, + "description": "Sample for CreateExclusion", "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", "segments": [ { @@ -132,18 +245,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, + "description": "Sample for CreateExclusion", "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", "segments": [ { @@ -176,19 +329,59 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" }, + "description": "Sample for CreateSink", "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", "segments": [ { @@ -221,18 +414,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" }, + "description": "Sample for CreateSink", "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", "segments": [ { @@ -265,19 +498,51 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" }, + "description": "Sample for CreateView", "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", "segments": [ { @@ -310,18 +575,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "CreateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" }, + "description": "Sample for CreateView", "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", "segments": [ { @@ -354,19 +651,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_create_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" }, + "description": "Sample for DeleteBucket", "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", "segments": [ { @@ -397,18 +725,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" }, + "description": "Sample for DeleteBucket", "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", "segments": [ { @@ -439,19 +798,54 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" }, + "description": "Sample for DeleteExclusion", "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", "segments": [ { @@ -482,18 +876,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" }, + "description": "Sample for DeleteExclusion", "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { @@ -524,19 +953,54 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_sink" }, + "description": "Sample for DeleteSink", "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", "segments": [ { @@ -567,18 +1031,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_sink" }, + "description": "Sample for DeleteSink", "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { @@ -609,19 +1108,50 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" }, + "description": "Sample for DeleteView", "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", "segments": [ { @@ -652,18 +1182,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "DeleteView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" }, + "description": "Sample for DeleteView", "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", "segments": [ { @@ -694,19 +1255,51 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, + "description": "Sample for GetBucket", "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { @@ -739,18 +1332,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, + "description": "Sample for GetBucket", "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { @@ -783,19 +1408,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, + "description": "Sample for GetCmekSettings", "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { @@ -828,18 +1485,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, + "description": "Sample for GetCmekSettings", "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { @@ -872,19 +1561,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, + "description": "Sample for GetExclusion", "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { @@ -917,18 +1642,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, + "description": "Sample for GetExclusion", "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { @@ -961,19 +1722,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, + "description": "Sample for GetSink", "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { @@ -1006,18 +1803,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, + "description": "Sample for GetSink", "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { @@ -1050,19 +1883,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, + "description": "Sample for GetView", "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { @@ -1095,18 +1960,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "GetView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, + "description": "Sample for GetView", "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { @@ -1139,19 +2036,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_get_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListBuckets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" }, + "description": "Sample for ListBuckets", "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { @@ -1184,18 +2117,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListBuckets" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" }, + "description": "Sample for ListBuckets", "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { @@ -1228,19 +2197,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListExclusions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", + "shortName": "list_exclusions" }, + "description": "Sample for ListExclusions", "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { @@ -1273,18 +2278,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListExclusions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", + "shortName": "list_exclusions" }, + "description": "Sample for ListExclusions", "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { @@ -1317,19 +2358,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_sinks", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListSinks" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", + "shortName": "list_sinks" }, + "description": "Sample for ListSinks", "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", "segments": [ { @@ -1362,18 +2439,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_sinks_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_sinks", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListSinks" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", + "shortName": "list_sinks" }, + "description": "Sample for ListSinks", "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", "segments": [ { @@ -1406,19 +2519,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_sinks_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_views", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListViews" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListViewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", + "shortName": "list_views" }, + "description": "Sample for ListViews", "file": "logging_v2_generated_config_service_v2_list_views_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", "segments": [ { @@ -1451,18 +2600,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_views_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_views", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "ListViews" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListViewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", + "shortName": "list_views" }, + "description": "Sample for ListViews", "file": "logging_v2_generated_config_service_v2_list_views_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", "segments": [ { @@ -1495,19 +2680,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_list_views_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.undelete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UndeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "undelete_bucket" }, + "description": "Sample for UndeleteBucket", "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { @@ -1538,18 +2754,49 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.undelete_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UndeleteBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "undelete_bucket" }, + "description": "Sample for UndeleteBucket", "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { @@ -1580,19 +2827,51 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" }, + "description": "Sample for UpdateBucket", "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", "segments": [ { @@ -1625,18 +2904,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_bucket", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateBucket" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" }, + "description": "Sample for UpdateBucket", "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", "segments": [ { @@ -1669,19 +2980,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "update_cmek_settings" }, + "description": "Sample for UpdateCmekSettings", "file": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async", "segments": [ { @@ -1714,18 +3057,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_cmek_settings_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_cmek_settings", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateCmekSettings" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "update_cmek_settings" }, + "description": "Sample for UpdateCmekSettings", "file": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync", "segments": [ { @@ -1758,19 +3133,63 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_cmek_settings_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "update_exclusion" }, + "description": "Sample for UpdateExclusion", "file": "logging_v2_generated_config_service_v2_update_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async", "segments": [ { @@ -1803,18 +3222,62 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_exclusion_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_exclusion", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateExclusion" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "update_exclusion" }, + "description": "Sample for UpdateExclusion", "file": "logging_v2_generated_config_service_v2_update_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync", "segments": [ { @@ -1847,19 +3310,63 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_exclusion_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "update_sink" }, + "description": "Sample for UpdateSink", "file": "logging_v2_generated_config_service_v2_update_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async", "segments": [ { @@ -1892,18 +3399,62 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_sink_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_sink", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateSink" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "update_sink" }, + "description": "Sample for UpdateSink", "file": "logging_v2_generated_config_service_v2_update_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync", "segments": [ { @@ -1936,19 +3487,51 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_sink_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "update_view" }, + "description": "Sample for UpdateView", "file": "logging_v2_generated_config_service_v2_update_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async", "segments": [ { @@ -1981,18 +3564,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_view_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_view", "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateView", "service": { + "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, "shortName": "UpdateView" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "update_view" }, + "description": "Sample for UpdateView", "file": "logging_v2_generated_config_service_v2_update_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync", "segments": [ { @@ -2025,19 +3640,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_config_service_v2_update_view_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.delete_log", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "DeleteLog" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log" }, + "description": "Sample for DeleteLog", "file": "logging_v2_generated_logging_service_v2_delete_log_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", "segments": [ { @@ -2068,18 +3718,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_delete_log_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.delete_log", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "DeleteLog" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log" }, + "description": "Sample for DeleteLog", "file": "logging_v2_generated_logging_service_v2_delete_log_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", "segments": [ { @@ -2110,19 +3795,63 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_delete_log_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogEntriesRequest" + }, + { + "name": "resource_names", + "type": "Sequence[str]" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "order_by", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager", + "shortName": "list_log_entries" }, + "description": "Sample for ListLogEntries", "file": "logging_v2_generated_logging_service_v2_list_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", "segments": [ { @@ -2155,18 +3884,62 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogEntriesRequest" + }, + { + "name": "resource_names", + "type": "Sequence[str]" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "order_by", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager", + "shortName": "list_log_entries" }, + "description": "Sample for ListLogEntries", "file": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", "segments": [ { @@ -2199,19 +3972,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_logs", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogs", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager", + "shortName": "list_logs" }, + "description": "Sample for ListLogs", "file": "logging_v2_generated_logging_service_v2_list_logs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", "segments": [ { @@ -2244,18 +4053,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_logs_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_logs", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogs", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListLogs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager", + "shortName": "list_logs" }, + "description": "Sample for ListLogs", "file": "logging_v2_generated_logging_service_v2_list_logs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", "segments": [ { @@ -2288,19 +4133,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_logs_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_monitored_resource_descriptors", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListMonitoredResourceDescriptors" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager", + "shortName": "list_monitored_resource_descriptors" }, + "description": "Sample for ListMonitoredResourceDescriptors", "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", "segments": [ { @@ -2333,18 +4210,50 @@ "start": 41, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_monitored_resource_descriptors", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "ListMonitoredResourceDescriptors" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager", + "shortName": "list_monitored_resource_descriptors" }, + "description": "Sample for ListMonitoredResourceDescriptors", "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", "segments": [ { @@ -2377,19 +4286,51 @@ "start": 41, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.tail_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "TailLogEntries" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", + "shortName": "tail_log_entries" }, + "description": "Sample for TailLogEntries", "file": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", "segments": [ { @@ -2422,18 +4363,50 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.tail_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "TailLogEntries" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", + "shortName": "tail_log_entries" }, + "description": "Sample for TailLogEntries", "file": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", "segments": [ { @@ -2466,19 +4439,67 @@ "start": 52, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.write_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "WriteLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.WriteLogEntriesRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "resource", + "type": "google.api.monitored_resource_pb2.MonitoredResource" + }, + { + "name": "labels", + "type": "Mapping[str, str]" + }, + { + "name": "entries", + "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", + "shortName": "write_log_entries" }, + "description": "Sample for WriteLogEntries", "file": "logging_v2_generated_logging_service_v2_write_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", "segments": [ { @@ -2511,18 +4532,66 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_write_log_entries_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.write_log_entries", "method": { + "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries", "service": { + "fullName": "google.logging.v2.LoggingServiceV2", "shortName": "LoggingServiceV2" }, "shortName": "WriteLogEntries" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.WriteLogEntriesRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "resource", + "type": "google.api.monitored_resource_pb2.MonitoredResource" + }, + { + "name": "labels", + "type": "Mapping[str, str]" + }, + { + "name": "entries", + "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", + "shortName": "write_log_entries" }, + "description": "Sample for WriteLogEntries", "file": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", "segments": [ { @@ -2555,19 +4624,59 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.create_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "CreateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLogMetricRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "create_log_metric" }, + "description": "Sample for CreateLogMetric", "file": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async", "segments": [ { @@ -2600,18 +4709,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_create_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.create_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "CreateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLogMetricRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "create_log_metric" }, + "description": "Sample for CreateLogMetric", "file": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync", "segments": [ { @@ -2644,19 +4793,54 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_create_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.delete_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "DeleteLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log_metric" }, + "description": "Sample for DeleteLogMetric", "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async", "segments": [ { @@ -2687,18 +4871,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_delete_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.delete_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "DeleteLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_log_metric" }, + "description": "Sample for DeleteLogMetric", "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync", "segments": [ { @@ -2729,19 +4948,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.get_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "GetLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "get_log_metric" }, + "description": "Sample for GetLogMetric", "file": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async", "segments": [ { @@ -2774,18 +5029,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_get_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.get_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "GetLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "get_log_metric" }, + "description": "Sample for GetLogMetric", "file": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync", "segments": [ { @@ -2818,19 +5109,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_get_log_metric_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.list_log_metrics", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "ListLogMetrics" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogMetricsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager", + "shortName": "list_log_metrics" }, + "description": "Sample for ListLogMetrics", "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async", "segments": [ { @@ -2863,18 +5190,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_list_log_metrics_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.list_log_metrics", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "ListLogMetrics" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogMetricsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager", + "shortName": "list_log_metrics" }, + "description": "Sample for ListLogMetrics", "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync", "segments": [ { @@ -2907,19 +5270,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient", + "shortName": "MetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2AsyncClient.update_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "UpdateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "update_log_metric" }, + "description": "Sample for UpdateLogMetric", "file": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async", "segments": [ { @@ -2952,18 +5355,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_update_log_metric_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client", + "shortName": "MetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.MetricsServiceV2Client.update_log_metric", "method": { + "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric", "service": { + "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, "shortName": "UpdateLogMetric" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "update_log_metric" }, + "description": "Sample for UpdateLogMetric", "file": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync", "segments": [ { @@ -2996,7 +5439,8 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "logging_v2_generated_metrics_service_v2_update_log_metric_sync.py" } ] } diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json index 1718e17d9bcd..3d687b63c092 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json @@ -1,16 +1,69 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.redis.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-redis" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.CreateInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "CreateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.redis_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" }, + "description": "Sample for CreateInstance", "file": "redis_v1_generated_cloud_redis_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_CreateInstance_async", "segments": [ { @@ -43,18 +96,62 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_create_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.create_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.CreateInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "CreateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.redis_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" }, + "description": "Sample for CreateInstance", "file": "redis_v1_generated_cloud_redis_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_CreateInstance_sync", "segments": [ { @@ -87,19 +184,55 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_create_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.DeleteInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "DeleteInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" }, + "description": "Sample for DeleteInstance", "file": "redis_v1_generated_cloud_redis_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_DeleteInstance_async", "segments": [ { @@ -132,18 +265,54 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_delete_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.delete_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.DeleteInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "DeleteInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" }, + "description": "Sample for DeleteInstance", "file": "redis_v1_generated_cloud_redis_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_DeleteInstance_sync", "segments": [ { @@ -176,19 +345,59 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_delete_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.export_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.ExportInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "ExportInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.ExportInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "output_config", + "type": "google.cloud.redis_v1.types.OutputConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_instance" }, + "description": "Sample for ExportInstance", "file": "redis_v1_generated_cloud_redis_export_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_ExportInstance_async", "segments": [ { @@ -221,18 +430,58 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_export_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.export_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.ExportInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "ExportInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.ExportInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "output_config", + "type": "google.cloud.redis_v1.types.OutputConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_instance" }, + "description": "Sample for ExportInstance", "file": "redis_v1_generated_cloud_redis_export_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_ExportInstance_sync", "segments": [ { @@ -265,19 +514,59 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_export_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.failover_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.FailoverInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "FailoverInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.FailoverInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "data_protection_mode", + "type": "google.cloud.redis_v1.types.FailoverInstanceRequest.DataProtectionMode" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "failover_instance" }, + "description": "Sample for FailoverInstance", "file": "redis_v1_generated_cloud_redis_failover_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_FailoverInstance_async", "segments": [ { @@ -310,18 +599,58 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_failover_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.failover_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.FailoverInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "FailoverInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.FailoverInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "data_protection_mode", + "type": "google.cloud.redis_v1.types.FailoverInstanceRequest.DataProtectionMode" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "failover_instance" }, + "description": "Sample for FailoverInstance", "file": "redis_v1_generated_cloud_redis_failover_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_FailoverInstance_sync", "segments": [ { @@ -354,19 +683,55 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_failover_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.GetInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "GetInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_v1.types.Instance", + "shortName": "get_instance" }, + "description": "Sample for GetInstance", "file": "redis_v1_generated_cloud_redis_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_GetInstance_async", "segments": [ { @@ -399,18 +764,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_get_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.get_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.GetInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "GetInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_v1.types.Instance", + "shortName": "get_instance" }, + "description": "Sample for GetInstance", "file": "redis_v1_generated_cloud_redis_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_GetInstance_sync", "segments": [ { @@ -443,19 +844,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_get_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.import_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.ImportInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "ImportInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.ImportInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "input_config", + "type": "google.cloud.redis_v1.types.InputConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_instance" }, + "description": "Sample for ImportInstance", "file": "redis_v1_generated_cloud_redis_import_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_ImportInstance_async", "segments": [ { @@ -488,18 +929,58 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_import_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.import_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.ImportInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "ImportInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.ImportInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "input_config", + "type": "google.cloud.redis_v1.types.InputConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_instance" }, + "description": "Sample for ImportInstance", "file": "redis_v1_generated_cloud_redis_import_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_ImportInstance_sync", "segments": [ { @@ -532,19 +1013,55 @@ "start": 50, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_import_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.ListInstances", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "ListInstances" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" }, + "description": "Sample for ListInstances", "file": "redis_v1_generated_cloud_redis_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_ListInstances_async", "segments": [ { @@ -577,18 +1094,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_list_instances_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.list_instances", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.ListInstances", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "ListInstances" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesPager", + "shortName": "list_instances" }, + "description": "Sample for ListInstances", "file": "redis_v1_generated_cloud_redis_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_ListInstances_sync", "segments": [ { @@ -621,19 +1174,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_list_instances_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.UpdateInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "UpdateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.UpdateInstanceRequest" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "instance", + "type": "google.cloud.redis_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" }, + "description": "Sample for UpdateInstance", "file": "redis_v1_generated_cloud_redis_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_UpdateInstance_async", "segments": [ { @@ -666,18 +1259,58 @@ "start": 51, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_update_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.update_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.UpdateInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "UpdateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.UpdateInstanceRequest" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "instance", + "type": "google.cloud.redis_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" }, + "description": "Sample for UpdateInstance", "file": "redis_v1_generated_cloud_redis_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_UpdateInstance_sync", "segments": [ { @@ -710,19 +1343,59 @@ "start": 51, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_update_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.upgrade_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.UpgradeInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "UpgradeInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.UpgradeInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "redis_version", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "upgrade_instance" }, + "description": "Sample for UpgradeInstance", "file": "redis_v1_generated_cloud_redis_upgrade_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_UpgradeInstance_async", "segments": [ { @@ -755,18 +1428,58 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_upgrade_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.upgrade_instance", "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.UpgradeInstance", "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", "shortName": "CloudRedis" }, "shortName": "UpgradeInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.UpgradeInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "redis_version", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "upgrade_instance" }, + "description": "Sample for UpgradeInstance", "file": "redis_v1_generated_cloud_redis_upgrade_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "redis_v1_generated_CloudRedis_UpgradeInstance_sync", "segments": [ { @@ -799,7 +1512,8 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "redis_v1_generated_cloud_redis_upgrade_instance_sync.py" } ] } diff --git a/packages/gapic-generator/tests/unit/common_types.py b/packages/gapic-generator/tests/unit/common_types.py index 1d10553b7dd4..6972c4726eb9 100644 --- a/packages/gapic-generator/tests/unit/common_types.py +++ b/packages/gapic-generator/tests/unit/common_types.py @@ -27,6 +27,7 @@ @dataclasses.dataclass(frozen=True) class DummyMethod: + name: bool = False input: bool = False output: bool = False lro: bool = False @@ -35,9 +36,12 @@ class DummyMethod: client_streaming: bool = False server_streaming: bool = False flattened_fields: Dict[str, Any] = dataclasses.field(default_factory=dict) + client_output: bool = False + client_output_async: bool = False -DummyIdent = namedtuple("DummyIdent", ["name"]) +DummyIdent = namedtuple("DummyIdent", ["name", "sphinx"]) +DummyIdent.__new__.__defaults__ = (False,) * len(DummyIdent._fields) DummyMessageTypePB = namedtuple("DummyMessageTypePB", ["name"]) @@ -57,6 +61,7 @@ class DummyMethod: "field_pb", "meta", "is_primitive", + "ident", "type"]) DummyFieldBase.__new__.__defaults__ = (False,) * len(DummyFieldBase._fields) @@ -99,7 +104,7 @@ def resource_path_args(self): DummyApiSchema.__new__.__defaults__ = (False,) * len(DummyApiSchema._fields) DummyNaming = namedtuple( - "DummyNaming", ["warehouse_package_name", "name", "version", "versioned_module_name", "module_namespace"]) + "DummyNaming", ["warehouse_package_name", "name", "version", "versioned_module_name", "module_namespace", "proto_package"]) DummyNaming.__new__.__defaults__ = (False,) * len(DummyNaming._fields) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 3482a0df4141..7c634fc2dacb 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -464,7 +464,7 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): }, )}, naming=DummyNaming(name="mollusc", version="v1", warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca"), ) with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): @@ -472,16 +472,24 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): api_schema, opts=Options.build("autogen-snippets=False")) expected_snippet_index_json = { + "clientLibrary": { + "apis": [{ + "id": "google.mollusca", + "version": "v1" + }], + "language": "PYTHON", + "name": "mollusc-cephalopod-teuthida-" + }, "snippets": [ { "clientMethod": { "method": { - "shortName": "GetSquidStreaming", "service": { "shortName": "Mollusc" - } - } - }, + }, + "shortName": "GetSquidStreaming" + } + }, "file": "squid_sample.py", "segments": [ {"type": "FULL"}, @@ -490,17 +498,18 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, {"type": "RESPONSE_HANDLING"} - ] + ], + "title": "squid_sample.py" }, { "clientMethod": { "method": { - "shortName": "GetClam", "service": { "shortName": "Mollusc" - } - } - }, + }, + "shortName": "GetClam" + } + }, "file": "clam_sample.py", "segments": [ {"type": "FULL"}, @@ -509,10 +518,11 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, {"type": "RESPONSE_HANDLING"} - ] + ], + "title": "clam_sample.py" } ] - } + } assert actual_response.supported_features == CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL @@ -523,6 +533,7 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): name="samples/generated_samples/clam_sample.py", content="\n",) assert actual_response.file[2].name == "samples/generated_samples/snippet_metadata_mollusc_v1.json" + assert json.loads( actual_response.file[2].content) == expected_snippet_index_json @@ -605,23 +616,33 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): }, )}, naming=DummyNaming(name="mollusc", version="v1", warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca"), ) with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): actual_response = g.get_response(api_schema, opts=Options.build("autogen-snippets=False")) expected_snippet_metadata_json = { + "clientLibrary": { + "apis": [ + { + "id": "google.mollusca", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "mollusc-cephalopod-teuthida-" + }, "snippets": [ { "clientMethod": { "method": { - "shortName": "GetSquidStreaming", "service": { "shortName": "Mollusc" - } - } - }, + }, + "shortName": "GetSquidStreaming" + } + }, "file": "squid_sample_1cfd0b3d.py", "segments": [ {"type": "FULL"}, @@ -630,17 +651,18 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, {"type": "RESPONSE_HANDLING"} - ] + ], + "title": "squid_sample_1cfd0b3d.py" }, { "clientMethod": { "method": { - "shortName": "GetSquidStreaming", "service": { "shortName": "Mollusc" - } - } - }, + }, + "shortName": "GetSquidStreaming" + } + }, "file": "squid_sample_cf4d4fa4.py", "segments": [ {"type": "FULL"}, @@ -649,17 +671,18 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, {"type": "RESPONSE_HANDLING"} - ] + ], + "title": "squid_sample_cf4d4fa4.py" }, { "clientMethod": { "method": { - "shortName": "GetSquidStreaming", "service": { "shortName": "Mollusc" - } - } - }, + }, + "shortName": "GetSquidStreaming" + } + }, "file": "7384949e.py", "segments": [ {"type": "FULL"}, @@ -668,10 +691,11 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, {"type": "RESPONSE_HANDLING"} - ] + ], + "title": "7384949e.py" } ] - } + } assert actual_response.supported_features == CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL assert len(actual_response.file) == 4 @@ -684,6 +708,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): assert actual_response.file[2] == CodeGeneratorResponse.File( name="samples/generated_samples/7384949e.py", content="\n", ) + print(actual_response.file[3].content) assert actual_response.file[3].name == "samples/generated_samples/snippet_metadata_mollusc_v1.json" assert json.loads( actual_response.file[3].content) == expected_snippet_metadata_json diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index b1b439549e6b..39e8ec095632 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -63,27 +63,42 @@ def test_generate_sample_basic(): # or in features that are sufficiently small and trivial that it doesn't make sense # to have standalone tests. + classify_target_field = DummyField( + name="classify_target", + type=DummyMessageTypePB(name="ClassifyTarget"), + message=DummyMessage( + type="CLASSIFY TYPE", + fields={ + "video": DummyField( + type=DummyMessageTypePB(name="Video"), + message=DummyMessage(type="VIDEO TYPE"), + ), + "location_annotation": DummyField( + type=DummyMessageTypePB(name="Location"), + message=DummyMessage(type="LOCATION TYPE"), + ) + }, + ), + ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget") + ) + input_type = DummyMessage( type="REQUEST TYPE", fields={ - "classify_target": DummyField( - type=DummyMessageTypePB(name="ClassifyTarget"), - message=DummyMessage( - type="CLASSIFY TYPE", - fields={ - "video": DummyField( - type=DummyMessageTypePB(name="Video"), - message=DummyMessage(type="VIDEO TYPE"), - ), - "location_annotation": DummyField( - type=DummyMessageTypePB(name="Location"), - message=DummyMessage(type="LOCATION TYPE"), - ) - }, - ) + "classify_target": classify_target_field + }, + ident=DummyIdent(name="molluscs.v1.ClassifyRequest", + sphinx="molluscs_v1.classify_request") + ) + + output_type = DummyMessage( + type="RESPONSE TYPE", + fields={ + "classification": DummyField( + type=DummyMessageTypePB(name="Classification"), ) }, - ident=DummyIdent(name="molluscs.v1.ClassifyRequest") + ident=DummyIdent(sphinx="molluscs_v1.classification") ) api_naming = naming.NewNaming( @@ -92,10 +107,12 @@ def test_generate_sample_basic(): service_pb=namedtuple('service_pb', ['name'])('MolluscService'), methods={ "Classify": DummyMethod( + name="Classify", input=input_type, output=message_factory("$resp.taxonomy"), + client_output=output_type, flattened_fields={ - "classify_target": DummyField(name="classify_target") + "classify_target": classify_target_field } ) }, @@ -130,14 +147,34 @@ def test_generate_sample_basic(): ) assert sample_str == golden_snippet("sample_basic.py") + assert json_format.MessageToDict(metadata) == { 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', - 'clientMethod': - {'method': { + 'description': 'Sample for Classify', + 'language': 'PYTHON', + 'clientMethod': { + 'shortName': 'classify', + 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient.classify', + 'parameters': [ + {'type': 'molluscs_v1.classify_request', 'name': 'request'}, + {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, + {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, + {'type': 'float', 'name': 'timeout'}, + {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + ], + 'resultType': 'molluscs_v1.classification', + 'client': { + 'shortName': 'MolluscServiceClient', + 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient' + }, + 'method': { 'shortName': 'Classify', - 'service': {'shortName': 'Mollusc'} - }} - } + 'fullName': '.MolluscService.Classify', + 'service': {'shortName': 'MolluscService', 'fullName': '.MolluscService'}} + }, + 'canonical': True, + 'origin': 'API_DEFINITION' + } def test_generate_sample_basic_async(): @@ -147,27 +184,42 @@ def test_generate_sample_basic_async(): # or in features that are sufficiently small and trivial that it doesn't make sense # to have standalone tests. + classify_target_field = DummyField( + name="classify_target", + type=DummyMessageTypePB(name="ClassifyTarget"), + message=DummyMessage( + type="CLASSIFY TYPE", + fields={ + "video": DummyField( + type=DummyMessageTypePB(name="Video"), + message=DummyMessage(type="VIDEO TYPE"), + ), + "location_annotation": DummyField( + type=DummyMessageTypePB(name="Location"), + message=DummyMessage(type="LOCATION TYPE"), + ) + }, + ), + ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget") + ) + input_type = DummyMessage( type="REQUEST TYPE", fields={ - "classify_target": DummyField( - type=DummyMessageTypePB(name="ClassifyTarget"), - message=DummyMessage( - type=DummyMessageTypePB(name="CLASSIFY TYPE"), - fields={ - "video": DummyField( - type=DummyMessageTypePB(name="Video"), - message=DummyMessage(type="VIDEO TYPE"), - ), - "location_annotation": DummyField( - type=DummyMessageTypePB(name="Location"), - message=DummyMessage(type="LOCATION TYPE"), - ) - }, - ) + "classify_target": classify_target_field + }, + ident=DummyIdent(name="molluscs.v1.ClassifyRequest", + sphinx="molluscs_v1.classify_request") + ) + + output_type = DummyMessage( + type="RESPONSE TYPE", + fields={ + "classification": DummyField( + type=DummyMessageTypePB(name="Classification"), ) }, - ident=DummyIdent(name="molluscs.v1.ClassifyRequest") + ident=DummyIdent(sphinx="molluscs_v1.classification") ) api_naming = naming.NewNaming( @@ -176,10 +228,13 @@ def test_generate_sample_basic_async(): service_pb=namedtuple('service_pb', ['name'])('MolluscService'), methods={ "Classify": DummyMethod( + name="Classify", input=input_type, output=message_factory("$resp.taxonomy"), + client_output_async=output_type, + client_output=output_type, flattened_fields={ - "classify_target": DummyField(name="classify_target") + "classify_target": classify_target_field } ) }, @@ -215,15 +270,38 @@ def test_generate_sample_basic_async(): ) assert sample_str == golden_snippet("sample_basic_async.py") + assert json_format.MessageToDict(metadata) == { 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_async', - 'clientMethod': - { + 'description': 'Sample for Classify', + 'language': 'PYTHON', + 'clientMethod': { + 'shortName': 'classify', + 'fullName': 'molluscs.v1.molluscclient.MolluscServiceAsyncClient.classify', 'async': True, + 'parameters': [ + {'type': 'molluscs_v1.classify_request', 'name': 'request'}, + {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, + {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, + {'type': 'float', 'name': 'timeout'}, + {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + ], + 'resultType': 'molluscs_v1.classification', + 'client': { + 'shortName': 'MolluscServiceAsyncClient', + 'fullName': 'molluscs.v1.molluscclient.MolluscServiceAsyncClient' + }, 'method': { 'shortName': 'Classify', - 'service': {'shortName': 'Mollusc'} - }} + 'fullName': '.MolluscService.Classify', + 'service': { + 'shortName': 'MolluscService', + 'fullName': '.MolluscService' + } + } + }, + 'canonical': True, + 'origin': 'API_DEFINITION' } @@ -252,7 +330,18 @@ def test_generate_sample_basic_unflattenable(): ) ) }, - ident=DummyIdent(name="molluscs.v1.ClassifyRequest") + ident=DummyIdent(name="molluscs.v1.ClassifyRequest", + sphinx="molluscs_v1.classify_request") + ) + + output_type = DummyMessage( + type="RESPONSE TYPE", + fields={ + "classification": DummyField( + type=DummyMessageTypePB(name="Classification"), + ) + }, + ident=DummyIdent(sphinx="molluscs_v1.classification") ) api_naming = naming.NewNaming( @@ -261,8 +350,10 @@ def test_generate_sample_basic_unflattenable(): service_pb=namedtuple('service_pb', ['name'])('MolluscService'), methods={ "Classify": DummyMethod( + name="Classify", input=input_type, output=message_factory("$resp.taxonomy"), + client_output=output_type, ) }, visible_resources={}, @@ -296,38 +387,63 @@ def test_generate_sample_basic_unflattenable(): ) assert sample_str == golden_snippet("sample_basic_unflattenable.py") + assert json_format.MessageToDict(metadata) == { 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', - 'clientMethod': - { + 'description': 'Sample for Classify', + 'language': 'PYTHON', + 'clientMethod': { + 'shortName': 'classify', + 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient.classify', + 'parameters': [ + {'type': 'molluscs_v1.classify_request', 'name': 'request'}, + {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, + {'type': 'float', 'name': 'timeout'}, + {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + ], + 'resultType': 'molluscs_v1.classification', + 'client': { + 'shortName': 'MolluscServiceClient', + 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient' + }, 'method': { 'shortName': 'Classify', - 'service': {'shortName': 'Mollusc'} - }} + 'fullName': '.MolluscService.Classify', + 'service': {'shortName': 'MolluscService', 'fullName': '.MolluscService'} + } + }, + 'canonical': True, + 'origin': 'API_DEFINITION' } def test_generate_sample_void_method(): + classify_target_field = DummyField( + name="classify_target", + type=DummyMessageTypePB(name="ClassifyTarget"), + message=DummyMessage( + type="CLASSIFY TYPE", + fields={ + "video": DummyField( + type=DummyMessageTypePB(name="Video"), + message=DummyMessage(type="VIDEO TYPE"), + ), + "location_annotation": DummyField( + type=DummyMessageTypePB(name="Location"), + message=DummyMessage(type="LOCATION TYPE"), + ) + }, + ), + ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget") + ) + input_type = DummyMessage( type="REQUEST TYPE", fields={ - "classify_target": DummyField( - type=DummyMessageTypePB(name="ClassifyTarget"), - message=DummyMessage( - fields={ - "video": DummyField( - type=DummyMessageTypePB(name="Video"), - message=DummyMessage(type="VIDEO TYPE"), - ), - "location_annotation": DummyField( - type=DummyMessageTypePB(name="Location"), - message=DummyMessage(type="LOCATION TYPE"), - ) - }, - ) - ) + "classify_target": classify_target_field }, - ident=DummyIdent(name="molluscs.v1.ClassifyRequest") + ident=DummyIdent(name="molluscs.v1.ClassifyRequest", + sphinx="molluscs_v1.classify_request") ) api_naming = naming.NewNaming( @@ -336,11 +452,13 @@ def test_generate_sample_void_method(): service_pb=namedtuple('service_pb', ['name'])('MolluscService'), methods={ "Classify": DummyMethod( + name="Classify", + client_output=DummyIdent(name="classify", sphinx="classify"), void=True, input=input_type, output=message_factory("$resp.taxonomy"), flattened_fields={ - "classify_target": DummyField(name="classify_target") + "classify_target": classify_target_field, } ) }, @@ -374,14 +492,33 @@ def test_generate_sample_void_method(): ) assert sample_str == golden_snippet("sample_basic_void_method.py") + assert json_format.MessageToDict(metadata) == { 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', - 'clientMethod': - { + 'description': 'Sample for Classify', + 'language': 'PYTHON', + 'clientMethod': { + 'shortName': 'classify', + 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient.classify', + 'parameters': [ + {'type': 'molluscs_v1.classify_request', 'name': 'request'}, + {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, + {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, + {'type': 'float', 'name': 'timeout'}, + {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + ], + 'client': { + 'shortName': 'MolluscServiceClient', + 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient' + }, 'method': { 'shortName': 'Classify', - 'service': {'shortName': 'Mollusc'} - }} + 'fullName': '.MolluscService.Classify', + 'service': {'shortName': 'MolluscService', 'fullName': '.MolluscService'} + } + }, + 'canonical': True, + 'origin': 'API_DEFINITION' } diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index ba5840a51c04..a85e0e832fc5 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -21,6 +21,8 @@ from google.api import client_pb2 from google.api import resource_pb2 from google.protobuf import descriptor_pb2 +from google.protobuf import json_format + import gapic.samplegen.samplegen as samplegen import gapic.samplegen_utils.types as types @@ -2066,6 +2068,161 @@ def test_generate_sample_spec_basic(): } +def test__set_sample_metadata_server_streaming(): + sample = { + "rpc": "Ramshorn", + "transport": "grpc", + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_v1_generated_Squid_Ramshorn_sync", + "description": "Snippet for ramshorn", + "module_namespace": ["animalia"], + "module_name": "mollusca_v1" + } + + service_options = descriptor_pb2.ServiceOptions() + service_options.Extensions[client_pb2.default_host] = "example.googleapis.com" + + api_schema = api.API.build( + file_descriptors=[ + descriptor_pb2.FileDescriptorProto( + name="cephalopod.proto", + package="animalia.mollusca.v1", + message_type=[ + descriptor_pb2.DescriptorProto( + name="MolluscRequest", + ), + descriptor_pb2.DescriptorProto( + name="Mollusc", + ), + ], + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="Squid", + options=service_options, + method=[ + descriptor_pb2.MethodDescriptorProto( + server_streaming=True, + name="Ramshorn", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + ], + ), + ], + ) + ] + ) + + snippet_metadata = samplegen._fill_sample_metadata(sample, api_schema) + + assert json_format.MessageToDict(snippet_metadata) == { + 'regionTag': 'example_v1_generated_Squid_Ramshorn_sync', + 'description': 'Sample for Ramshorn', + 'language': 'PYTHON', + 'clientMethod': { + 'shortName': 'ramshorn', + 'fullName': 'animalia.mollusca_v1.SquidClient.ramshorn', + 'parameters': [ + {'type': 'animalia.mollusca_v1.types.MolluscRequest', 'name': 'request'}, + {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, + {'type': 'float', 'name': 'timeout'}, + {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + ], + 'resultType': 'Iterable[animalia.mollusca_v1.types.Mollusc]', + 'client': { + 'shortName': 'SquidClient', + 'fullName': 'animalia.mollusca_v1.SquidClient' # FIX THE FULL NAME + }, + 'method': { + 'shortName': 'Ramshorn', + 'fullName': 'animalia.mollusca.v1.Squid.Ramshorn', + 'service': {'shortName': 'Squid', 'fullName': 'animalia.mollusca.v1.Squid'} + } + }, + 'canonical': True, + 'origin': 'API_DEFINITION' + } + + +def test__set_sample_metadata_client_streaming(): + sample = { + "rpc": "Ramshorn", + "transport": "grpc", + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_v1_generated_Squid_Ramshorn_sync", + "description": "Snippet for ramshorn", + "module_namespace": ["animalia"], + "module_name": "mollusca_v1" + } + + service_options = descriptor_pb2.ServiceOptions() + service_options.Extensions[client_pb2.default_host] = "example.googleapis.com" + + api_schema = api.API.build( + file_descriptors=[ + descriptor_pb2.FileDescriptorProto( + name="cephalopod.proto", + package="animalia.mollusca.v1", + message_type=[ + descriptor_pb2.DescriptorProto( + name="MolluscRequest", + ), + descriptor_pb2.DescriptorProto( + name="Mollusc", + ), + ], + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="Squid", + options=service_options, + method=[ + descriptor_pb2.MethodDescriptorProto( + client_streaming=True, + name="Ramshorn", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + ], + ), + ], + ) + ] + ) + + snippet_metadata = samplegen._fill_sample_metadata(sample, api_schema) + + print(json_format.MessageToDict(snippet_metadata)) + + assert json_format.MessageToDict(snippet_metadata) == { + 'regionTag': 'example_v1_generated_Squid_Ramshorn_sync', + 'description': 'Sample for Ramshorn', + 'language': 'PYTHON', + 'clientMethod': { + 'shortName': 'ramshorn', + 'fullName': 'animalia.mollusca_v1.SquidClient.ramshorn', + 'parameters': [ + {'type': 'Iterator[animalia.mollusca_v1.types.MolluscRequest]', + 'name': 'requests'}, + {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, + {'type': 'float', 'name': 'timeout'}, + {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + ], + 'resultType': 'animalia.mollusca_v1.types.Mollusc', + 'client': { + 'shortName': 'SquidClient', + 'fullName': 'animalia.mollusca_v1.SquidClient' + }, + 'method': { + 'shortName': 'Ramshorn', + 'fullName': 'animalia.mollusca.v1.Squid.Ramshorn', + 'service': {'shortName': 'Squid', 'fullName': 'animalia.mollusca.v1.Squid'} + } + }, + 'canonical': True, + 'origin': 'API_DEFINITION' + } + + def make_message(name: str, package: str = 'animalia.mollusca.v1', module: str = 'cephalopoda', fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, options: descriptor_pb2.MethodOptions = None, diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index e1334a6b268f..b5840fc2fd06 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -19,7 +19,7 @@ from gapic.samplegen_utils import snippet_metadata_pb2 from gapic.samplegen_utils import snippet_index, types -from ..common_types import DummyApiSchema, DummyService, DummyMethod +from ..common_types import DummyApiSchema, DummyService, DummyMethod, DummyNaming @pytest.fixture @@ -119,7 +119,13 @@ def test_add_snippet_no_matching_service(sample_str): # No 'Clam' service in API Schema index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - services={"Squid": DummyService(name="Squid", methods={})} + services={"Squid": DummyService(name="Squid", methods={})}, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1" + ), + )) with pytest.raises(types.UnknownService): index.add_snippet(snippet) @@ -134,7 +140,12 @@ def test_add_snippet_no_matching_rpc(sample_str): # No 'classify' method in 'Squid' service index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - services={"Squid": DummyService(name="Squid", methods={"list": None})} + services={"Squid": DummyService(name="Squid", methods={"list": None})}, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1" + ), )) with pytest.raises(types.RpcMethodNotFound): index.add_snippet(snippet) @@ -142,6 +153,11 @@ def test_add_snippet_no_matching_rpc(sample_str): def test_get_snippet_no_matching_service(): index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1" + ), services={"Squid": DummyService( name="Squid", methods={"classify": DummyMethod()})} )) @@ -154,7 +170,12 @@ def test_get_snippet_no_matching_service(): def test_get_snippet_no_matching_rpc(): index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})} + name="Squid", methods={"classify": DummyMethod()})}, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1" + ), )) # No 'list' RPC in 'Squid' service @@ -170,7 +191,12 @@ def test_add_and_get_snippet_sync(sample_str): index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})} + name="Squid", methods={"classify": DummyMethod()})}, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1" + ), )) index.add_snippet(snippet) @@ -187,7 +213,12 @@ def test_add_and_get_snippet_async(sample_str): index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})} + name="Squid", methods={"classify": DummyMethod()})}, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1" + ), )) index.add_snippet(snippet) @@ -203,26 +234,70 @@ def test_get_metadata_json(sample_str): index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})} + name="Squid", methods={"classify": DummyMethod()})}, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1" + ), )) index.add_snippet(snippet) + print(index.get_metadata_json()) assert json.loads(index.get_metadata_json()) == { - 'snippets': [{'clientMethod': {'method': {'shortName': 'classify', - 'service': {'shortName': 'Squid'}}}, - 'segments': [{'end': 28, 'start': 2, 'type': 'FULL'}, - {'end': 28, 'start': 2, 'type': 'SHORT'}, - {'end': 8, - 'start': 6, - 'type': 'CLIENT_INITIALIZATION'}, - {'end': 22, - 'start': 9, - 'type': 'REQUEST_INITIALIZATION'}, - {'end': 25, - 'start': 23, - 'type': 'REQUEST_EXECUTION'}, - {'end': 29, - 'start': 26, - 'type': 'RESPONSE_HANDLING'}]}] + "clientLibrary": { + "apis": [ + { + "id": "google.mollusca", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-mollusca" + }, + "snippets": [ + { + "clientMethod": { + "method": { + "service": { + "shortName": "Squid" + }, + "shortName": "classify" + } + }, + "segments": [ + { + "end": 28, + "start": 2, + "type": "FULL" + }, + { + "end": 28, + "start": 2, + "type": "SHORT" + }, + { + "end": 8, + "start": 6, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 22, + "start": 9, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 25, + "start": 23, + "type": "REQUEST_EXECUTION" + }, + { + "end": 29, + "start": 26, + "type": "RESPONSE_HANDLING" + } + ] + } + ] } From e07af72c111c545a84efe465581194d455d06852 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 11 Apr 2022 02:00:58 +0200 Subject: [PATCH 0792/1339] chore(deps): update dependency setuptools to v62.1.0 (#1272) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 5a0d202f0573..55904c39801c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,5 +7,5 @@ protobuf==3.20.0 pypandoc==1.7.5 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==62.0.0 +setuptools==62.1.0 grpc-google-iam-v1==0.12.4 From b8f3d4c572b97107d78b3822d1a7ae77378c110d Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Wed, 13 Apr 2022 09:38:04 -0700 Subject: [PATCH 0793/1339] fix: correct import for request message type (#1273) * fix: correct import for request message type * use existing imports field in template * add request_module_name to sample * fix tests/unit/samplegen/test_integration.py * fix tests/unit/samplegen/test_samplegen.py * fix style * remove strip * fix tests/unit/samplegen/test_snippet_index.py --- .../gapic/samplegen/samplegen.py | 29 +++++- .../gapic/samplegen_utils/snippet_index.py | 1 - .../gapic/templates/examples/sample.py.j2 | 3 +- .../tests/unit/common_types.py | 4 +- .../tests/unit/samplegen/test_samplegen.py | 91 +++++++++++++++++-- .../unit/samplegen/test_snippet_index.py | 1 + 6 files changed, 117 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 4c06e80d6662..12ebd93d0441 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -305,6 +305,14 @@ def preprocess_sample(sample, api_schema: api.API, rpc: wrappers.Method): # the MessageType of the request object passed to the rpc e.g, `ListRequest` sample["request_type"] = rpc.input + # We check if the request object is part of the service proto package. + # If not, it comes from a different module. + address = rpc.input.meta.address + if address.proto_package.startswith(address.api_naming.proto_package): + sample["request_module_name"] = sample["module_name"] + else: + sample["request_module_name"] = address.python_import.module + # If no request was specified in the config # Add reasonable default values as placeholders if "request" not in sample: @@ -1079,6 +1087,22 @@ def _fill_sample_metadata(sample: dict, api_schema: api.API): return snippet_metadata +def _get_sample_imports(sample: Dict, rpc: wrappers.Method) -> List[str]: + """Returns sorted sample import statements.""" + module_namespace = ".".join(sample["module_namespace"]) + module_name = sample["module_name"] + module_import = f"from {module_namespace} import {module_name}" + + address = rpc.input.meta.address + # This checks if the request message is part of the service proto package. + # If not, we should try to include a separate import statement. + if address.proto_package.startswith(address.api_naming.proto_package): + return [module_import] + else: + request_import = str(address.python_import) + return sorted([module_import, request_import]) + + def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> Tuple[str, Any]: """Generate a standalone, runnable sample. @@ -1119,9 +1143,12 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> Tup snippet_metadata = _fill_sample_metadata(sample, api_schema) + # The sample must be preprocessed before calling _get_sample_imports. + imports = _get_sample_imports(sample, rpc) + return sample_template.render( sample=sample, - imports=[], + imports=imports, calling_form=calling_form, calling_form_enum=types.CallingForm, trim_blocks=True, diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py index 1ca10a426470..0d179d2d413d 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -88,7 +88,6 @@ def full_snippet(self) -> str: """The portion between the START and END region tags.""" start_idx = self._full_snippet.start - 1 end_idx = self._full_snippet.end - self.sample_lines[start_idx] = self.sample_lines[start_idx].strip() return "".join(self.sample_lines[start_idx:end_idx]) diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index a75476580432..c6fb6a085fb5 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -27,13 +27,12 @@ {% for import_statement in imports %} {{ import_statement }} {% endfor %} -from {{ sample.module_namespace|join(".") }} import {{ sample.module_name }} {# also need calling form #} {% if sample.transport == "grpc-async" %}async {% endif %}def sample_{{ frags.render_method_name(sample.rpc)|trim }}({{ frags.print_input_params(sample.request)|trim }}): {{ frags.render_client_setup(sample.module_name, sample.client_name)|indent }} - {{ frags.render_request_setup(sample.request, sample.module_name, sample.request_type, calling_form, calling_form_enum)|indent }} + {{ frags.render_request_setup(sample.request, sample.request_module_name, sample.request_type, calling_form, calling_form_enum)|indent }} {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum, sample.transport) %} {{ frags.render_calling_form(method_call, calling_form, calling_form_enum, sample.transport, sample.response)|indent -}} {% endwith %} diff --git a/packages/gapic-generator/tests/unit/common_types.py b/packages/gapic-generator/tests/unit/common_types.py index 6972c4726eb9..a703481c429d 100644 --- a/packages/gapic-generator/tests/unit/common_types.py +++ b/packages/gapic-generator/tests/unit/common_types.py @@ -20,6 +20,7 @@ from google.protobuf import descriptor_pb2 +from gapic.schema import metadata from gapic.schema import wrappers # Injected dummy test types @@ -73,12 +74,13 @@ def mock_value_original_type(self): class DummyMessage: - def __init__(self, *, fields={}, type="", options=False, ident=False, resource_path=False): + def __init__(self, *, fields={}, type="", options=False, ident=False, resource_path=False, meta=None): self.fields = fields self.type = type self.options = options self.ident = ident self.resource_path = resource_path + self.meta = meta or metadata.Metadata() def get_field(self, field_name: str): return self.fields[field_name] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index a85e0e832fc5..45f29cdac466 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -36,28 +36,67 @@ from gapic.samplegen_utils import utils -# validate_response tests +@pytest.fixture(scope="module") +def api_naming(): + return DummyNaming( + warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", + module_namespace=("mollusc", "cephalopod"), + proto_package="mollusc.cephalopod" + ) + @pytest.fixture(scope="module") -def dummy_api_schema(): - # For most of the unit tests in this file the internals of API Schema do not matter - classify_request_message = DummyMessage( +def request_message(): + return DummyMessage( fields={ "parent": DummyField(is_primitive=True, type=str, required=True, name="parent"), }, type=DummyMessageTypePB(name="ClassifyRequest"), ident=DummyIdent(name="ClassifyRequest") + ) + + +@pytest.fixture(scope="module") +def request_message_from_another_package(api_naming): + return DummyMessage( + fields={ + "parent": DummyField(is_primitive=True, type=str, required=True, name="parent"), + }, + type=DummyMessageTypePB(name="ClassifyRequest"), + ident=DummyIdent(name="ClassifyRequest"), + meta=metadata.Metadata( + address=metadata.Address( + api_naming=api_naming, + package=('a', 'b',), + module='c' + ) ) + ) + +@pytest.fixture(scope="module") +def dummy_api_schema(request_message, api_naming): return DummyApiSchema( services={"Mollusc": DummyService( methods={}, client_name="MolluscClient", resource_messages_dict={} )}, - naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), - messages=classify_request_message, + naming=api_naming, + messages=request_message, + ) + +@pytest.fixture(scope="module") +def dummy_api_schema_with_request_from_another_package( + request_message_from_another_package, api_naming): + return DummyApiSchema( + services={"Mollusc": DummyService( + methods={}, client_name="MolluscClient", + resource_messages_dict={} + )}, + naming=api_naming, + messages=request_message_from_another_package, ) @@ -278,6 +317,44 @@ def test_preprocess_sample_void_method(): assert sample["response"] == [] +def test_preprocess_sample_with_request_module_name( + dummy_api_schema_with_request_from_another_package): + sample = {"service": "Mollusc", "rpc": "Classify"} + api_schema = dummy_api_schema_with_request_from_another_package + rpc = DummyMethod(input=api_schema.messages) + + samplegen.Validator.preprocess_sample(sample, api_schema, rpc) + + request_module_name = sample.get("request_module_name") + assert request_module_name == 'c_pb2' + + +def test_get_sample_imports(dummy_api_schema): + sample = {"service": "Mollusc", "rpc": "Classify"} + api_schema = dummy_api_schema + rpc = DummyMethod(input=api_schema.messages) + + samplegen.Validator.preprocess_sample(sample, api_schema, rpc) + imports = samplegen._get_sample_imports(sample, rpc) + + assert imports == ["from mollusc.cephalopod import teuthida_v1"] + + +def test_get_sample_imports_with_request_from_another_package( + dummy_api_schema_with_request_from_another_package): + sample = {"service": "Mollusc", "rpc": "Classify"} + api_schema = dummy_api_schema_with_request_from_another_package + rpc = DummyMethod(input=api_schema.messages) + + samplegen.Validator.preprocess_sample(sample, api_schema, rpc) + imports = samplegen._get_sample_imports(sample, rpc) + + assert imports == [ + "from a.b import c_pb2 # type: ignore", + "from mollusc.cephalopod import teuthida_v1" + ] + + def test_define_input_param(dummy_api_schema): v = samplegen.Validator( DummyMethod(input=message_factory("mollusc.squid.mantle_length")), diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index b5840fc2fd06..f4bc08661a58 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -82,6 +82,7 @@ def test_snippet_init(sample_str): # and # [END ...] lines expected_full_snipppet = """from molluscs.v1 import molluscclient + def sample_classify(video, location): # Create a client client = molluscclient.MolluscServiceClient() From c064d387a6ba415d2d4a28bff309f825f2a71476 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Apr 2022 13:02:47 -0400 Subject: [PATCH 0794/1339] fix: use google-api-core==2.7.2 (#1276) * fix: use google-api-core==2.7.2 * update golden files --- packages/gapic-generator/requirements.txt | 2 +- .../services/asset_service/async_client.py | 20 ++++++++----- .../services/asset_service/transports/base.py | 20 ++++++++----- .../services/iam_credentials/async_client.py | 12 +++++--- .../iam_credentials/transports/base.py | 12 +++++--- .../config_service_v2/async_client.py | 28 ++++++++++++++----- .../config_service_v2/transports/base.py | 28 ++++++++++++++----- .../logging_service_v2/async_client.py | 24 ++++++++++++---- .../logging_service_v2/transports/base.py | 24 ++++++++++++---- .../metrics_service_v2/async_client.py | 16 ++++++++--- .../metrics_service_v2/transports/base.py | 16 ++++++++--- 11 files changed, 145 insertions(+), 57 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 55904c39801c..17afa6c4f27c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.2 -google-api-core==2.7.1 +google-api-core==2.7.2 googleapis-common-protos==1.56.0 jinja2==3.1.1 MarkupSafe==2.1.1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index f80edc7e5116..8ffd9aa924ec 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -460,7 +460,8 @@ def sample_batch_get_assets_history(): self._client._transport.batch_get_assets_history, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -678,7 +679,8 @@ def sample_get_feed(): self._client._transport.get_feed, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -780,7 +782,8 @@ def sample_list_feeds(): self._client._transport.list_feeds, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -976,7 +979,8 @@ def sample_delete_feed(): self._client._transport.delete_feed, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1170,7 +1174,8 @@ def sample_search_all_resources(): self._client._transport.search_all_resources, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=15.0, ), @@ -1355,7 +1360,8 @@ def sample_search_all_iam_policies(): self._client._transport.search_all_iam_policies, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=15.0, ), @@ -1448,7 +1454,7 @@ def sample_analyze_iam_policy(): self._client._transport.analyze_iam_policy, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 023764abfa14..d0596429dfa5 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -132,7 +132,8 @@ def _prep_wrapped_messages(self, client_info): self.batch_get_assets_history, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -148,7 +149,8 @@ def _prep_wrapped_messages(self, client_info): self.get_feed, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -159,7 +161,8 @@ def _prep_wrapped_messages(self, client_info): self.list_feeds, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -175,7 +178,8 @@ def _prep_wrapped_messages(self, client_info): self.delete_feed, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -186,7 +190,8 @@ def _prep_wrapped_messages(self, client_info): self.search_all_resources, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=15.0, ), @@ -197,7 +202,8 @@ def _prep_wrapped_messages(self, client_info): self.search_all_iam_policies, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=15.0, ), @@ -208,7 +214,7 @@ def _prep_wrapped_messages(self, client_info): self.analyze_iam_policy, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.ServiceUnavailable, ), deadline=300.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index ae3f0dce2a4a..2b979d4ff31d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -319,7 +319,8 @@ def sample_generate_access_token(): self._client._transport.generate_access_token, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -465,7 +466,8 @@ def sample_generate_id_token(): self._client._transport.generate_id_token, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -597,7 +599,8 @@ def sample_sign_blob(): self._client._transport.sign_blob, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -732,7 +735,8 @@ def sample_sign_jwt(): self._client._transport.sign_jwt, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 0d959ea0c534..b6ffa602ad85 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -119,7 +119,8 @@ def _prep_wrapped_messages(self, client_info): self.generate_access_token, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -130,7 +131,8 @@ def _prep_wrapped_messages(self, client_info): self.generate_id_token, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -141,7 +143,8 @@ def _prep_wrapped_messages(self, client_info): self.sign_blob, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -152,7 +155,8 @@ def _prep_wrapped_messages(self, client_info): self.sign_jwt, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 16ac65406c70..2d6c0e9f7620 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1133,7 +1133,9 @@ def sample_list_sinks(): self._client._transport.list_sinks, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1256,7 +1258,9 @@ def sample_get_sink(): self._client._transport.get_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1542,7 +1546,9 @@ def sample_update_sink(): self._client._transport.update_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1642,7 +1648,9 @@ def sample_delete_sink(): self._client._transport.delete_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1748,7 +1756,9 @@ def sample_list_exclusions(): self._client._transport.list_exclusions, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -1874,7 +1884,9 @@ def sample_get_exclusion(): self._client._transport.get_exclusion, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -2250,7 +2262,9 @@ def sample_delete_exclusion(): self._client._transport.delete_exclusion, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 0d04db4b63b1..88ba88234db2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -178,7 +178,9 @@ def _prep_wrapped_messages(self, client_info): self.list_sinks, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -189,7 +191,9 @@ def _prep_wrapped_messages(self, client_info): self.get_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -205,7 +209,9 @@ def _prep_wrapped_messages(self, client_info): self.update_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -216,7 +222,9 @@ def _prep_wrapped_messages(self, client_info): self.delete_sink, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -227,7 +235,9 @@ def _prep_wrapped_messages(self, client_info): self.list_exclusions, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -238,7 +248,9 @@ def _prep_wrapped_messages(self, client_info): self.get_exclusion, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -259,7 +271,9 @@ def _prep_wrapped_messages(self, client_info): self.delete_exclusion, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 5d3478a99544..a60387641cc4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -264,7 +264,9 @@ def sample_delete_log(): self._client._transport.delete_log, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -458,7 +460,9 @@ def sample_write_log_entries(): self._client._transport.write_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -604,7 +608,9 @@ def sample_list_log_entries(): self._client._transport.list_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -689,7 +695,9 @@ def sample_list_monitored_resource_descriptors(): self._client._transport.list_monitored_resource_descriptors, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -800,7 +808,9 @@ def sample_list_logs(): self._client._transport.list_logs, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -897,7 +907,9 @@ def request_generator(): self._client._transport.tail_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=3600.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 66c579dcd608..54c0c48b7f3b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -124,7 +124,9 @@ def _prep_wrapped_messages(self, client_info): self.delete_log, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -135,7 +137,9 @@ def _prep_wrapped_messages(self, client_info): self.write_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -146,7 +150,9 @@ def _prep_wrapped_messages(self, client_info): self.list_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -157,7 +163,9 @@ def _prep_wrapped_messages(self, client_info): self.list_monitored_resource_descriptors, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -168,7 +176,9 @@ def _prep_wrapped_messages(self, client_info): self.list_logs, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -179,7 +189,9 @@ def _prep_wrapped_messages(self, client_info): self.tail_log_entries, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=3600.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 7969894e996f..4c119a0c875b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -265,7 +265,9 @@ def sample_list_log_metrics(): self._client._transport.list_log_metrics, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -385,7 +387,9 @@ def sample_get_log_metric(): self._client._transport.get_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -638,7 +642,9 @@ def sample_update_log_metric(): self._client._transport.update_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -731,7 +737,9 @@ def sample_delete_log_metric(): self._client._transport.delete_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index fa09436f8c69..487dd0bdc0f5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -124,7 +124,9 @@ def _prep_wrapped_messages(self, client_info): self.list_log_metrics, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -135,7 +137,9 @@ def _prep_wrapped_messages(self, client_info): self.get_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -151,7 +155,9 @@ def _prep_wrapped_messages(self, client_info): self.update_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), @@ -162,7 +168,9 @@ def _prep_wrapped_messages(self, client_info): self.delete_log_metric, default_retry=retries.Retry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.GoogleAPICallError, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, ), deadline=60.0, ), From a3b566b200254e80eabfcb56cdb4d077783ab016 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 13 Apr 2022 13:09:28 -0400 Subject: [PATCH 0795/1339] chore(main): release 0.65.1 (#1275) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1200bd537f30..747e8425d1de 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +### [0.65.1](https://github.com/googleapis/gapic-generator-python/compare/v0.65.0...v0.65.1) (2022-04-13) + + +### Bug Fixes + +* correct import for request message type ([#1273](https://github.com/googleapis/gapic-generator-python/issues/1273)) ([3406d9e](https://github.com/googleapis/gapic-generator-python/commit/3406d9e0336d2fe698b90a95e20f6aacec79763b)) +* use google-api-core==2.7.2 ([#1276](https://github.com/googleapis/gapic-generator-python/issues/1276)) ([5ab8eb5](https://github.com/googleapis/gapic-generator-python/commit/5ab8eb5a36c64e521b475cb0c045f507400d8f27)) + ## [0.65.0](https://github.com/googleapis/gapic-generator-python/compare/v0.64.0...v0.65.0) (2022-04-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index daff028a0409..0723294ee08a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.65.0" +version = "0.65.1" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 209e353f6c7589d04a364fe66983c68136b519b3 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Tue, 19 Apr 2022 11:56:38 -0700 Subject: [PATCH 0796/1339] fix: use async snippet in async client method docstring (#1280) * fix: use async snippet in async client method docstring * update integration test golden files * fix golden_update_script --- .../%sub/services/%service/async_client.py.j2 | 2 +- .../test/integration_test.bzl | 4 +- .../services/asset_service/async_client.py | 72 ++++----- .../services/iam_credentials/async_client.py | 24 +-- .../config_service_v2/async_client.py | 138 +++++++++--------- .../logging_service_v2/async_client.py | 38 ++--- .../metrics_service_v2/async_client.py | 30 ++-- .../services/cloud_redis/async_client.py | 54 +++---- 8 files changed, 181 insertions(+), 181 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 96dd56db5d29..1e870c3d2cd7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -211,7 +211,7 @@ class {{ service.async_client_name }}: {% endif %} r"""{{ method.meta.doc|rst(width=72, indent=8)|trim }} - {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=True) %} + {% with snippet = snippet_index.get_snippet(service.name, method.name, sync=False) %} {% if snippet is not none %} .. code-block:: python diff --git a/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl b/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl index ef064a323430..0705972ec7b6 100644 --- a/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl +++ b/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl @@ -114,8 +114,8 @@ def _overwrite_golden_impl(ctx): golden_update_script_content = """ cd ${{BUILD_WORKSPACE_DIRECTORY}} # Filename pattern-based removal is needed to preserve the BUILD.bazel file. - find tests/Integration/goldens/{api_name}/ -name \\*.py-type f -delete - find tests/Integration/goldens/{api_name}/ -name \\*.json -type f -delete + find tests/integration/goldens/{api_name}/ -name \\*.py-type f -delete + find tests/integration/goldens/{api_name}/ -name \\*.json -type f -delete unzip -ao {goldens_output_zip} -d tests/integration/goldens/{api_name} """.format( goldens_output_zip = goldens_output_zip.path, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 8ffd9aa924ec..9fd39edd2f35 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -214,9 +214,9 @@ async def export_assets(self, from google.cloud import asset_v1 - def sample_export_assets(): + async def sample_export_assets(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) output_config = asset_v1.OutputConfig() @@ -232,7 +232,7 @@ def sample_export_assets(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -310,9 +310,9 @@ async def list_assets(self, from google.cloud import asset_v1 - def sample_list_assets(): + async def sample_list_assets(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) request = asset_v1.ListAssetsRequest( @@ -323,7 +323,7 @@ def sample_list_assets(): page_result = client.list_assets(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -423,9 +423,9 @@ async def batch_get_assets_history(self, from google.cloud import asset_v1 - def sample_batch_get_assets_history(): + async def sample_batch_get_assets_history(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) request = asset_v1.BatchGetAssetsHistoryRequest( @@ -433,7 +433,7 @@ def sample_batch_get_assets_history(): ) # Make the request - response = client.batch_get_assets_history(request=request) + response = await client.batch_get_assets_history(request=request) # Handle the response print(response) @@ -504,9 +504,9 @@ async def create_feed(self, from google.cloud import asset_v1 - def sample_create_feed(): + async def sample_create_feed(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) feed = asset_v1.Feed() @@ -519,7 +519,7 @@ def sample_create_feed(): ) # Make the request - response = client.create_feed(request=request) + response = await client.create_feed(request=request) # Handle the response print(response) @@ -614,9 +614,9 @@ async def get_feed(self, from google.cloud import asset_v1 - def sample_get_feed(): + async def sample_get_feed(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) request = asset_v1.GetFeedRequest( @@ -624,7 +624,7 @@ def sample_get_feed(): ) # Make the request - response = client.get_feed(request=request) + response = await client.get_feed(request=request) # Handle the response print(response) @@ -722,9 +722,9 @@ async def list_feeds(self, from google.cloud import asset_v1 - def sample_list_feeds(): + async def sample_list_feeds(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) request = asset_v1.ListFeedsRequest( @@ -732,7 +732,7 @@ def sample_list_feeds(): ) # Make the request - response = client.list_feeds(request=request) + response = await client.list_feeds(request=request) # Handle the response print(response) @@ -824,9 +824,9 @@ async def update_feed(self, from google.cloud import asset_v1 - def sample_update_feed(): + async def sample_update_feed(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) feed = asset_v1.Feed() @@ -837,7 +837,7 @@ def sample_update_feed(): ) # Make the request - response = client.update_feed(request=request) + response = await client.update_feed(request=request) # Handle the response print(response) @@ -928,9 +928,9 @@ async def delete_feed(self, from google.cloud import asset_v1 - def sample_delete_feed(): + async def sample_delete_feed(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) request = asset_v1.DeleteFeedRequest( @@ -938,7 +938,7 @@ def sample_delete_feed(): ) # Make the request - client.delete_feed(request=request) + await client.delete_feed(request=request) Args: request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): @@ -1023,9 +1023,9 @@ async def search_all_resources(self, from google.cloud import asset_v1 - def sample_search_all_resources(): + async def sample_search_all_resources(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) request = asset_v1.SearchAllResourcesRequest( @@ -1036,7 +1036,7 @@ def sample_search_all_resources(): page_result = client.search_all_resources(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1229,9 +1229,9 @@ async def search_all_iam_policies(self, from google.cloud import asset_v1 - def sample_search_all_iam_policies(): + async def sample_search_all_iam_policies(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) request = asset_v1.SearchAllIamPoliciesRequest( @@ -1242,7 +1242,7 @@ def sample_search_all_iam_policies(): page_result = client.search_all_iam_policies(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1411,9 +1411,9 @@ async def analyze_iam_policy(self, from google.cloud import asset_v1 - def sample_analyze_iam_policy(): + async def sample_analyze_iam_policy(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) analysis_query = asset_v1.IamPolicyAnalysisQuery() @@ -1424,7 +1424,7 @@ def sample_analyze_iam_policy(): ) # Make the request - response = client.analyze_iam_policy(request=request) + response = await client.analyze_iam_policy(request=request) # Handle the response print(response) @@ -1505,9 +1505,9 @@ async def analyze_iam_policy_longrunning(self, from google.cloud import asset_v1 - def sample_analyze_iam_policy_longrunning(): + async def sample_analyze_iam_policy_longrunning(): # Create a client - client = asset_v1.AssetServiceClient() + client = asset_v1.AssetServiceAsyncClient() # Initialize request argument(s) analysis_query = asset_v1.IamPolicyAnalysisQuery() @@ -1526,7 +1526,7 @@ def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 2b979d4ff31d..8306403e4d65 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -212,9 +212,9 @@ async def generate_access_token(self, from google.iam import credentials_v1 - def sample_generate_access_token(): + async def sample_generate_access_token(): # Create a client - client = credentials_v1.IAMCredentialsClient() + client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( @@ -223,7 +223,7 @@ def sample_generate_access_token(): ) # Make the request - response = client.generate_access_token(request=request) + response = await client.generate_access_token(request=request) # Handle the response print(response) @@ -365,9 +365,9 @@ async def generate_id_token(self, from google.iam import credentials_v1 - def sample_generate_id_token(): + async def sample_generate_id_token(): # Create a client - client = credentials_v1.IAMCredentialsClient() + client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) request = credentials_v1.GenerateIdTokenRequest( @@ -376,7 +376,7 @@ def sample_generate_id_token(): ) # Make the request - response = client.generate_id_token(request=request) + response = await client.generate_id_token(request=request) # Handle the response print(response) @@ -511,9 +511,9 @@ async def sign_blob(self, from google.iam import credentials_v1 - def sample_sign_blob(): + async def sample_sign_blob(): # Create a client - client = credentials_v1.IAMCredentialsClient() + client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) request = credentials_v1.SignBlobRequest( @@ -522,7 +522,7 @@ def sample_sign_blob(): ) # Make the request - response = client.sign_blob(request=request) + response = await client.sign_blob(request=request) # Handle the response print(response) @@ -644,9 +644,9 @@ async def sign_jwt(self, from google.iam import credentials_v1 - def sample_sign_jwt(): + async def sample_sign_jwt(): # Create a client - client = credentials_v1.IAMCredentialsClient() + client = credentials_v1.IAMCredentialsAsyncClient() # Initialize request argument(s) request = credentials_v1.SignJwtRequest( @@ -655,7 +655,7 @@ def sample_sign_jwt(): ) # Make the request - response = client.sign_jwt(request=request) + response = await client.sign_jwt(request=request) # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 2d6c0e9f7620..4ecc37923f29 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -207,9 +207,9 @@ async def list_buckets(self, from google.cloud import logging_v2 - def sample_list_buckets(): + async def sample_list_buckets(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListBucketsRequest( @@ -220,7 +220,7 @@ def sample_list_buckets(): page_result = client.list_buckets(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -322,9 +322,9 @@ async def get_bucket(self, from google.cloud import logging_v2 - def sample_get_bucket(): + async def sample_get_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetBucketRequest( @@ -332,7 +332,7 @@ def sample_get_bucket(): ) # Make the request - response = client.get_bucket(request=request) + response = await client.get_bucket(request=request) # Handle the response print(response) @@ -395,9 +395,9 @@ async def create_bucket(self, from google.cloud import logging_v2 - def sample_create_bucket(): + async def sample_create_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.CreateBucketRequest( @@ -406,7 +406,7 @@ def sample_create_bucket(): ) # Make the request - response = client.create_bucket(request=request) + response = await client.create_bucket(request=request) # Handle the response print(response) @@ -477,9 +477,9 @@ async def update_bucket(self, from google.cloud import logging_v2 - def sample_update_bucket(): + async def sample_update_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateBucketRequest( @@ -487,7 +487,7 @@ def sample_update_bucket(): ) # Make the request - response = client.update_bucket(request=request) + response = await client.update_bucket(request=request) # Handle the response print(response) @@ -550,9 +550,9 @@ async def delete_bucket(self, from google.cloud import logging_v2 - def sample_delete_bucket(): + async def sample_delete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteBucketRequest( @@ -560,7 +560,7 @@ def sample_delete_bucket(): ) # Make the request - client.delete_bucket(request=request) + await client.delete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): @@ -612,9 +612,9 @@ async def undelete_bucket(self, from google.cloud import logging_v2 - def sample_undelete_bucket(): + async def sample_undelete_bucket(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UndeleteBucketRequest( @@ -622,7 +622,7 @@ def sample_undelete_bucket(): ) # Make the request - client.undelete_bucket(request=request) + await client.undelete_bucket(request=request) Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): @@ -674,9 +674,9 @@ async def list_views(self, from google.cloud import logging_v2 - def sample_list_views(): + async def sample_list_views(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListViewsRequest( @@ -687,7 +687,7 @@ def sample_list_views(): page_result = client.list_views(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -781,9 +781,9 @@ async def get_view(self, from google.cloud import logging_v2 - def sample_get_view(): + async def sample_get_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetViewRequest( @@ -791,7 +791,7 @@ def sample_get_view(): ) # Make the request - response = client.get_view(request=request) + response = await client.get_view(request=request) # Handle the response print(response) @@ -855,9 +855,9 @@ async def create_view(self, from google.cloud import logging_v2 - def sample_create_view(): + async def sample_create_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.CreateViewRequest( @@ -866,7 +866,7 @@ def sample_create_view(): ) # Make the request - response = client.create_view(request=request) + response = await client.create_view(request=request) # Handle the response print(response) @@ -930,9 +930,9 @@ async def update_view(self, from google.cloud import logging_v2 - def sample_update_view(): + async def sample_update_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateViewRequest( @@ -940,7 +940,7 @@ def sample_update_view(): ) # Make the request - response = client.update_view(request=request) + response = await client.update_view(request=request) # Handle the response print(response) @@ -1003,9 +1003,9 @@ async def delete_view(self, from google.cloud import logging_v2 - def sample_delete_view(): + async def sample_delete_view(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteViewRequest( @@ -1013,7 +1013,7 @@ def sample_delete_view(): ) # Make the request - client.delete_view(request=request) + await client.delete_view(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): @@ -1065,9 +1065,9 @@ async def list_sinks(self, from google.cloud import logging_v2 - def sample_list_sinks(): + async def sample_list_sinks(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListSinksRequest( @@ -1078,7 +1078,7 @@ def sample_list_sinks(): page_result = client.list_sinks(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1185,9 +1185,9 @@ async def get_sink(self, from google.cloud import logging_v2 - def sample_get_sink(): + async def sample_get_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetSinkRequest( @@ -1195,7 +1195,7 @@ def sample_get_sink(): ) # Make the request - response = client.get_sink(request=request) + response = await client.get_sink(request=request) # Handle the response print(response) @@ -1306,9 +1306,9 @@ async def create_sink(self, from google.cloud import logging_v2 - def sample_create_sink(): + async def sample_create_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) sink = logging_v2.LogSink() @@ -1321,7 +1321,7 @@ def sample_create_sink(): ) # Make the request - response = client.create_sink(request=request) + response = await client.create_sink(request=request) # Handle the response print(response) @@ -1436,9 +1436,9 @@ async def update_sink(self, from google.cloud import logging_v2 - def sample_update_sink(): + async def sample_update_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) sink = logging_v2.LogSink() @@ -1451,7 +1451,7 @@ def sample_update_sink(): ) # Make the request - response = client.update_sink(request=request) + response = await client.update_sink(request=request) # Handle the response print(response) @@ -1590,9 +1590,9 @@ async def delete_sink(self, from google.cloud import logging_v2 - def sample_delete_sink(): + async def sample_delete_sink(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteSinkRequest( @@ -1600,7 +1600,7 @@ def sample_delete_sink(): ) # Make the request - client.delete_sink(request=request) + await client.delete_sink(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): @@ -1688,9 +1688,9 @@ async def list_exclusions(self, from google.cloud import logging_v2 - def sample_list_exclusions(): + async def sample_list_exclusions(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListExclusionsRequest( @@ -1701,7 +1701,7 @@ def sample_list_exclusions(): page_result = client.list_exclusions(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1808,9 +1808,9 @@ async def get_exclusion(self, from google.cloud import logging_v2 - def sample_get_exclusion(): + async def sample_get_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetExclusionRequest( @@ -1818,7 +1818,7 @@ def sample_get_exclusion(): ) # Make the request - response = client.get_exclusion(request=request) + response = await client.get_exclusion(request=request) # Handle the response print(response) @@ -1931,9 +1931,9 @@ async def create_exclusion(self, from google.cloud import logging_v2 - def sample_create_exclusion(): + async def sample_create_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) exclusion = logging_v2.LogExclusion() @@ -1946,7 +1946,7 @@ def sample_create_exclusion(): ) # Make the request - response = client.create_exclusion(request=request) + response = await client.create_exclusion(request=request) # Handle the response print(response) @@ -2061,9 +2061,9 @@ async def update_exclusion(self, from google.cloud import logging_v2 - def sample_update_exclusion(): + async def sample_update_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) exclusion = logging_v2.LogExclusion() @@ -2076,7 +2076,7 @@ def sample_update_exclusion(): ) # Make the request - response = client.update_exclusion(request=request) + response = await client.update_exclusion(request=request) # Handle the response print(response) @@ -2203,9 +2203,9 @@ async def delete_exclusion(self, from google.cloud import logging_v2 - def sample_delete_exclusion(): + async def sample_delete_exclusion(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteExclusionRequest( @@ -2213,7 +2213,7 @@ def sample_delete_exclusion(): ) # Make the request - client.delete_exclusion(request=request) + await client.delete_exclusion(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): @@ -2309,9 +2309,9 @@ async def get_cmek_settings(self, from google.cloud import logging_v2 - def sample_get_cmek_settings(): + async def sample_get_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetCmekSettingsRequest( @@ -2319,7 +2319,7 @@ def sample_get_cmek_settings(): ) # Make the request - response = client.get_cmek_settings(request=request) + response = await client.get_cmek_settings(request=request) # Handle the response print(response) @@ -2410,9 +2410,9 @@ async def update_cmek_settings(self, from google.cloud import logging_v2 - def sample_update_cmek_settings(): + async def sample_update_cmek_settings(): # Create a client - client = logging_v2.ConfigServiceV2Client() + client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.UpdateCmekSettingsRequest( @@ -2420,7 +2420,7 @@ def sample_update_cmek_settings(): ) # Make the request - response = client.update_cmek_settings(request=request) + response = await client.update_cmek_settings(request=request) # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index a60387641cc4..4948fbc3cdaf 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -203,9 +203,9 @@ async def delete_log(self, from google.cloud import logging_v2 - def sample_delete_log(): + async def sample_delete_log(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteLogRequest( @@ -213,7 +213,7 @@ def sample_delete_log(): ) # Make the request - client.delete_log(request=request) + await client.delete_log(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): @@ -313,9 +313,9 @@ async def write_log_entries(self, from google.cloud import logging_v2 - def sample_write_log_entries(): + async def sample_write_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) entries = logging_v2.LogEntry() @@ -326,7 +326,7 @@ def sample_write_log_entries(): ) # Make the request - response = client.write_log_entries(request=request) + response = await client.write_log_entries(request=request) # Handle the response print(response) @@ -500,9 +500,9 @@ async def list_log_entries(self, from google.cloud import logging_v2 - def sample_list_log_entries(): + async def sample_list_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( @@ -513,7 +513,7 @@ def sample_list_log_entries(): page_result = client.list_log_entries(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -652,9 +652,9 @@ async def list_monitored_resource_descriptors(self, from google.cloud import logging_v2 - def sample_list_monitored_resource_descriptors(): + async def sample_list_monitored_resource_descriptors(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListMonitoredResourceDescriptorsRequest( @@ -664,7 +664,7 @@ def sample_list_monitored_resource_descriptors(): page_result = client.list_monitored_resource_descriptors(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -741,9 +741,9 @@ async def list_logs(self, from google.cloud import logging_v2 - def sample_list_logs(): + async def sample_list_logs(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogsRequest( @@ -754,7 +754,7 @@ def sample_list_logs(): page_result = client.list_logs(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -861,9 +861,9 @@ def tail_log_entries(self, from google.cloud import logging_v2 - def sample_tail_log_entries(): + async def sample_tail_log_entries(): # Create a client - client = logging_v2.LoggingServiceV2Client() + client = logging_v2.LoggingServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( @@ -881,10 +881,10 @@ def request_generator(): yield request # Make the request - stream = client.tail_log_entries(requests=request_generator()) + stream = await client.tail_log_entries(requests=request_generator()) # Handle the response - for response in stream: + async for response in stream: print(response) Args: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 4c119a0c875b..2368d8a72805 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -200,9 +200,9 @@ async def list_log_metrics(self, from google.cloud import logging_v2 - def sample_list_log_metrics(): + async def sample_list_log_metrics(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.ListLogMetricsRequest( @@ -213,7 +213,7 @@ def sample_list_log_metrics(): page_result = client.list_log_metrics(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -317,9 +317,9 @@ async def get_log_metric(self, from google.cloud import logging_v2 - def sample_get_log_metric(): + async def sample_get_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.GetLogMetricRequest( @@ -327,7 +327,7 @@ def sample_get_log_metric(): ) # Make the request - response = client.get_log_metric(request=request) + response = await client.get_log_metric(request=request) # Handle the response print(response) @@ -431,9 +431,9 @@ async def create_log_metric(self, from google.cloud import logging_v2 - def sample_create_log_metric(): + async def sample_create_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) metric = logging_v2.LogMetric() @@ -446,7 +446,7 @@ def sample_create_log_metric(): ) # Make the request - response = client.create_log_metric(request=request) + response = await client.create_log_metric(request=request) # Handle the response print(response) @@ -555,9 +555,9 @@ async def update_log_metric(self, from google.cloud import logging_v2 - def sample_update_log_metric(): + async def sample_update_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) metric = logging_v2.LogMetric() @@ -570,7 +570,7 @@ def sample_update_log_metric(): ) # Make the request - response = client.update_log_metric(request=request) + response = await client.update_log_metric(request=request) # Handle the response print(response) @@ -685,9 +685,9 @@ async def delete_log_metric(self, from google.cloud import logging_v2 - def sample_delete_log_metric(): + async def sample_delete_log_metric(): # Create a client - client = logging_v2.MetricsServiceV2Client() + client = logging_v2.MetricsServiceV2AsyncClient() # Initialize request argument(s) request = logging_v2.DeleteLogMetricRequest( @@ -695,7 +695,7 @@ def sample_delete_log_metric(): ) # Make the request - client.delete_log_metric(request=request) + await client.delete_log_metric(request=request) Args: request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 54034706e68c..97a99adf33d0 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -232,9 +232,9 @@ async def list_instances(self, from google.cloud import redis_v1 - def sample_list_instances(): + async def sample_list_instances(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) request = redis_v1.ListInstancesRequest( @@ -245,7 +245,7 @@ def sample_list_instances(): page_result = client.list_instances(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -341,9 +341,9 @@ async def get_instance(self, from google.cloud import redis_v1 - def sample_get_instance(): + async def sample_get_instance(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) request = redis_v1.GetInstanceRequest( @@ -351,7 +351,7 @@ def sample_get_instance(): ) # Make the request - response = client.get_instance(request=request) + response = await client.get_instance(request=request) # Handle the response print(response) @@ -449,9 +449,9 @@ async def create_instance(self, from google.cloud import redis_v1 - def sample_create_instance(): + async def sample_create_instance(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) instance = redis_v1.Instance() @@ -470,7 +470,7 @@ def sample_create_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -596,9 +596,9 @@ async def update_instance(self, from google.cloud import redis_v1 - def sample_update_instance(): + async def sample_update_instance(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) instance = redis_v1.Instance() @@ -615,7 +615,7 @@ def sample_update_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -728,9 +728,9 @@ async def upgrade_instance(self, from google.cloud import redis_v1 - def sample_upgrade_instance(): + async def sample_upgrade_instance(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) request = redis_v1.UpgradeInstanceRequest( @@ -743,7 +743,7 @@ def sample_upgrade_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -857,9 +857,9 @@ async def import_instance(self, from google.cloud import redis_v1 - def sample_import_instance(): + async def sample_import_instance(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) input_config = redis_v1.InputConfig() @@ -875,7 +875,7 @@ def sample_import_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -985,9 +985,9 @@ async def export_instance(self, from google.cloud import redis_v1 - def sample_export_instance(): + async def sample_export_instance(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) output_config = redis_v1.OutputConfig() @@ -1003,7 +1003,7 @@ def sample_export_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -1111,9 +1111,9 @@ async def failover_instance(self, from google.cloud import redis_v1 - def sample_failover_instance(): + async def sample_failover_instance(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) request = redis_v1.FailoverInstanceRequest( @@ -1125,7 +1125,7 @@ def sample_failover_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -1232,9 +1232,9 @@ async def delete_instance(self, from google.cloud import redis_v1 - def sample_delete_instance(): + async def sample_delete_instance(): # Create a client - client = redis_v1.CloudRedisClient() + client = redis_v1.CloudRedisAsyncClient() # Initialize request argument(s) request = redis_v1.DeleteInstanceRequest( @@ -1246,7 +1246,7 @@ def sample_delete_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) From b87571fc941c7d40dc75dca7669d5206dca0a665 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 20 Apr 2022 13:01:51 -0400 Subject: [PATCH 0797/1339] fix: disambiguate method names (#1282) * fix: disambiguate method names * add eventarc golden files * update golden files --- .../.github/workflows/tests.yaml | 3 +- packages/gapic-generator/DEVELOPMENT.md | 1 + .../gapic-generator/gapic/schema/wrappers.py | 23 +- .../%sub/services/%service/async_client.py.j2 | 2 +- .../%sub/services/%service/client.py.j2 | 6 +- .../gapic/%name_%version/%sub/test_macros.j2 | 8 +- .../tests/integration/BUILD.bazel | 13 + .../eventarc_grpc_service_config.json | 34 + .../integration/goldens/eventarc/.coveragerc | 17 + .../integration/goldens/eventarc/.flake8 | 33 + .../integration/goldens/eventarc/BUILD.bazel | 12 + .../integration/goldens/eventarc/MANIFEST.in | 2 + .../integration/goldens/eventarc/README.rst | 49 + .../integration/goldens/eventarc/docs/conf.py | 376 +++ .../eventarc/docs/eventarc_v1/eventarc.rst | 10 + .../eventarc/docs/eventarc_v1/services.rst | 6 + .../eventarc/docs/eventarc_v1/types.rst | 7 + .../goldens/eventarc/docs/index.rst | 7 + .../google/cloud/eventarc/__init__.py | 49 + .../eventarc/google/cloud/eventarc/py.typed | 2 + .../google/cloud/eventarc_v1/__init__.py | 50 + .../cloud/eventarc_v1/gapic_metadata.json | 73 + .../google/cloud/eventarc_v1/py.typed | 2 + .../cloud/eventarc_v1/services/__init__.py | 15 + .../eventarc_v1/services/eventarc/__init__.py | 22 + .../services/eventarc/async_client.py | 807 +++++ .../eventarc_v1/services/eventarc/client.py | 1024 +++++++ .../eventarc_v1/services/eventarc/pagers.py | 140 + .../services/eventarc/transports/__init__.py | 33 + .../services/eventarc/transports/base.py | 214 ++ .../services/eventarc/transports/grpc.py | 389 +++ .../eventarc/transports/grpc_asyncio.py | 388 +++ .../cloud/eventarc_v1/types/__init__.py | 48 + .../cloud/eventarc_v1/types/eventarc.py | 305 ++ .../google/cloud/eventarc_v1/types/trigger.py | 279 ++ .../integration/goldens/eventarc/mypy.ini | 3 + .../integration/goldens/eventarc/noxfile.py | 180 ++ ...generated_eventarc_create_trigger_async.py | 59 + ..._generated_eventarc_create_trigger_sync.py | 59 + ...generated_eventarc_delete_trigger_async.py | 50 + ..._generated_eventarc_delete_trigger_sync.py | 50 + ...v1_generated_eventarc_get_trigger_async.py | 45 + ..._v1_generated_eventarc_get_trigger_sync.py | 45 + ..._generated_eventarc_list_triggers_async.py | 46 + ...1_generated_eventarc_list_triggers_sync.py | 46 + ...generated_eventarc_update_trigger_async.py | 49 + ..._generated_eventarc_update_trigger_sync.py | 49 + .../snippet_metadata_eventarc_v1.json | 859 ++++++ .../scripts/fixup_eventarc_v1_keywords.py | 180 ++ .../integration/goldens/eventarc/setup.py | 59 + .../goldens/eventarc/tests/__init__.py | 16 + .../goldens/eventarc/tests/unit/__init__.py | 16 + .../eventarc/tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/eventarc_v1/__init__.py | 16 + .../unit/gapic/eventarc_v1/test_eventarc.py | 2598 +++++++++++++++++ .../tests/unit/schema/wrappers/test_method.py | 22 +- 56 files changed, 8895 insertions(+), 17 deletions(-) create mode 100644 packages/gapic-generator/tests/integration/eventarc_grpc_service_config.json create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/BUILD.bazel create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/README.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/eventarc.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/py.typed create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/setup.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index b413027bbbb6..6101f68d395a 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -311,7 +311,7 @@ jobs: echo "and it will start over with a clean cache." echo "The old one will disappear after 7 days." - name: Integration Tests - run: bazel test tests/integration:asset tests/integration:credentials tests/integration:logging tests/integration:redis + run: bazel test tests/integration:asset tests/integration:credentials tests/integration:eventarc tests/integration:logging tests/integration:redis goldens-lint: runs-on: ubuntu-latest steps: @@ -328,6 +328,7 @@ jobs: run: | nox -f tests/integration/goldens/asset/noxfile.py -s blacken lint_setup_py lint nox -f tests/integration/goldens/credentials/noxfile.py -s blacken lint_setup_py lint + nox -f tests/integration/goldens/eventarc/noxfile.py -s blacken lint_setup_py lint nox -f tests/integration/goldens/logging/noxfile.py -s blacken lint_setup_py lint nox -f tests/integration/goldens/redis/noxfile.py -s blacken lint_setup_py lint style-check: diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index 2079f744bed3..3e77e54a118e 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -57,6 +57,7 @@ Execute unit tests by running one of the sessions prefixed with `unit-`. ```sh bazel run //tests/integration:asset_update bazel run //tests/integration:credentials_update + bazel run //tests/integration:eventarc_update bazel run //tests/integration:logging_update bazel run //tests/integration:redis_update ``` diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index ffcc123fd52b..34c0b665010c 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -30,6 +30,7 @@ import collections import dataclasses import json +import keyword import re from itertools import chain from typing import (Any, cast, Dict, FrozenSet, Iterator, Iterable, List, Mapping, @@ -1075,6 +1076,13 @@ class Method: def __getattr__(self, name): return getattr(self.method_pb, name) + @property + def safe_name(self) -> str: + # Used to prevent collisions with python keywords at the client level + + name = self.name + return name + "_" if name.lower() in keyword.kwlist else name + @property def transport_safe_name(self) -> str: # These names conflict with other methods in the transport. @@ -1083,12 +1091,15 @@ def transport_safe_name(self) -> str: # # Note: this should really be a class variable, # but python 3.6 can't handle that. - TRANSPORT_UNSAFE_NAMES = { - "CreateChannel", - "GrpcChannel", - "OperationsClient", - } - return f"{self.name}_" if self.name in TRANSPORT_UNSAFE_NAMES else self.name + TRANSPORT_UNSAFE_NAMES = chain( + { + "createchannel", + "grpcchannel", + "operationsclient", + }, + keyword.kwlist, + ) + return f"{self.name}_" if self.name.lower() in TRANSPORT_UNSAFE_NAMES else self.name @property def is_operation_polling_method(self): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 1e870c3d2cd7..6bfb20f587fc 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -188,7 +188,7 @@ class {{ service.async_client_name }}: ) {% for method in service.methods.values() %} - {% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} + {% with method_name = method.safe_name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} {%+ if not method.server_streaming %}async {% endif %}def {{ method_name }}(self, {% endwith %} {% if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 45a437168be2..89fe4e9b33e4 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -359,11 +359,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() %} {% if method.operation_service %}{# Uses extended operations #} -{{ macros.client_method(method, method.name|snake_case + "_unary", snippet_index, api, service) }} +{{ macros.client_method(method, method.safe_name|snake_case + "_unary", snippet_index, api, service) }} -{{ macros.client_method(method, method.name|snake_case, snippet_index, api, service, full_extended_lro=True) }} +{{ macros.client_method(method, method.safe_name|snake_case, snippet_index, api, service, full_extended_lro=True) }} {% else %} -{{ macros.client_method(method, method.name|snake_case, snippet_index, api, service) }} +{{ macros.client_method(method, method.safe_name|snake_case, snippet_index, api, service) }} {% endif %} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index ddb3ea47026a..7995c2ee8534 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1,5 +1,5 @@ {% macro grpc_required_tests(method, service, full_extended_lro=False) %} -{% with method_name = method.name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %} +{% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.safe_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -43,7 +43,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): ) {% endif %} {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) + response = client.{{ method.safe_name|snake_case }}(iter(requests)) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -227,7 +227,7 @@ def test_{{ method.name|snake_case }}_routing_parameters(): {% else %} call.return_value = {{ method.output.ident }}() {% endif %} - client.{{ method.name|snake_case }}(request) + client.{{ method.safe_name|snake_case }}(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -834,7 +834,7 @@ def test_{{ method_name }}_raw_page_lro(): {% endmacro %} {% macro rest_required_tests(method, service, full_extended_lro=False) %} -{% with method_name = method.name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} +{% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} {% if not method.client_streaming %} @pytest.mark.parametrize("request_type", [ diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 95538a05055d..c39661a3028d 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -26,6 +26,7 @@ package(default_visibility = ["//visibility:public"]) INTEGRATION_TEST_LIBRARIES = [ "asset", # Basic case. "credentials", # Check that the capital name edge case is handled. + "eventarc", # create_channel is a reserved term in transport layer. "logging", # Java package remapping in gapic.yaml. "redis", # Has a gapic.yaml. ] @@ -66,6 +67,18 @@ py_gapic_library( ], ) +# Eventarc. +py_gapic_library( + name = "eventarc_py_gapic", + srcs = ["@com_google_googleapis//google/cloud/eventarc/v1:eventarc_proto"], + grpc_service_config = "eventarc_grpc_service_config.json", + opt_args = [ + "python-gapic-namespace=google.cloud", + "python-gapic-name=eventarc", + "autogen-snippets", + ], +) + # Logging. py_gapic_library( name = "logging_py_gapic", diff --git a/packages/gapic-generator/tests/integration/eventarc_grpc_service_config.json b/packages/gapic-generator/tests/integration/eventarc_grpc_service_config.json new file mode 100644 index 000000000000..215a22a2b023 --- /dev/null +++ b/packages/gapic-generator/tests/integration/eventarc_grpc_service_config.json @@ -0,0 +1,34 @@ +{ + "methodConfig": [{ + "name": [ + { "service": "google.cloud.eventarc.v1", "method": "ListTriggers"}, + { "service": "google.cloud.eventarc.v1", "method": "GetTrigger"}, + { "service": "google.cloud.eventarc.v1", "method": "ListChannels"}, + { "service": "google.cloud.eventarc.v1", "method": "GetChannel"}, + { "service": "google.cloud.eventarc.v1", "method": "ListChannelConnections"}, + { "service": "google.cloud.eventarc.v1", "method": "GetChannelConnection"} + ], + "timeout": "60s", + "retryPolicy": { + "maxAttempts": 5, + "initialBackoff": "1s", + "maxBackoff": "60s", + "backoffMultiplier": 1.3, + "retryableStatusCodes": ["UNAVAILABLE", "UNKNOWN"] + } + }, + { + "name": [ + { "service": "google.cloud.eventarc.v1", "method": "CreateTrigger" }, + { "service": "google.cloud.eventarc.v1", "method": "DeleteTrigger" }, + { "service": "google.cloud.eventarc.v1", "method": "UpdateTrigger" }, + { "service": "google.cloud.eventarc.v1", "method": "CreateChannel" }, + { "service": "google.cloud.eventarc.v1", "method": "DeleteChannel" }, + { "service": "google.cloud.eventarc.v1", "method": "UpdateChannel" }, + { "service": "google.cloud.eventarc.v1", "method": "CreateChannelConnection" }, + { "service": "google.cloud.eventarc.v1", "method": "DeleteChannelConnection" }, + { "service": "google.cloud.eventarc.v1", "method": "UpdateChannelConnection" } + ], + "timeout": "60s" + }] +} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc b/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc new file mode 100644 index 000000000000..c069b0b0fc04 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/eventarc/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 b/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/BUILD.bazel b/packages/gapic-generator/tests/integration/goldens/eventarc/BUILD.bazel new file mode 100644 index 000000000000..2822013159c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/BUILD.bazel @@ -0,0 +1,12 @@ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "goldens_files", + srcs = glob( + ["**/*"], + exclude = [ + "BUILD.bazel", + ".*.sw*", + ], + ), +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in new file mode 100644 index 000000000000..a7ef8bafb9c6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/eventarc *.py +recursive-include google/cloud/eventarc_v1 *.py diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst new file mode 100644 index 000000000000..4c79a1d0f7db --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Eventarc API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Eventarc API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py new file mode 100644 index 000000000000..d3b29380b0a1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-eventarc documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-eventarc" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-eventarc-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-eventarc.tex", + u"google-cloud-eventarc Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-eventarc", + u"Google Cloud Eventarc Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-eventarc", + u"google-cloud-eventarc Documentation", + author, + "google-cloud-eventarc", + "GAPIC library for Google Cloud Eventarc API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/eventarc.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/eventarc.rst new file mode 100644 index 000000000000..335577a9f38d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/eventarc.rst @@ -0,0 +1,10 @@ +Eventarc +-------------------------- + +.. automodule:: google.cloud.eventarc_v1.services.eventarc + :members: + :inherited-members: + +.. automodule:: google.cloud.eventarc_v1.services.eventarc.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst new file mode 100644 index 000000000000..7ea77f99bdb4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Eventarc v1 API +========================================= +.. toctree:: + :maxdepth: 2 + + eventarc diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst new file mode 100644 index 000000000000..5d1111031f59 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Eventarc v1 API +====================================== + +.. automodule:: google.cloud.eventarc_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst new file mode 100644 index 000000000000..cd50176117ee --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + eventarc_v1/services + eventarc_v1/types diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py new file mode 100644 index 000000000000..d557c9f0b4de --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.eventarc_v1.services.eventarc.client import EventarcClient +from google.cloud.eventarc_v1.services.eventarc.async_client import EventarcAsyncClient + +from google.cloud.eventarc_v1.types.eventarc import CreateTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import DeleteTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import GetTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import ListTriggersRequest +from google.cloud.eventarc_v1.types.eventarc import ListTriggersResponse +from google.cloud.eventarc_v1.types.eventarc import OperationMetadata +from google.cloud.eventarc_v1.types.eventarc import UpdateTriggerRequest +from google.cloud.eventarc_v1.types.trigger import CloudRun +from google.cloud.eventarc_v1.types.trigger import Destination +from google.cloud.eventarc_v1.types.trigger import EventFilter +from google.cloud.eventarc_v1.types.trigger import Pubsub +from google.cloud.eventarc_v1.types.trigger import Transport +from google.cloud.eventarc_v1.types.trigger import Trigger + +__all__ = ('EventarcClient', + 'EventarcAsyncClient', + 'CreateTriggerRequest', + 'DeleteTriggerRequest', + 'GetTriggerRequest', + 'ListTriggersRequest', + 'ListTriggersResponse', + 'OperationMetadata', + 'UpdateTriggerRequest', + 'CloudRun', + 'Destination', + 'EventFilter', + 'Pubsub', + 'Transport', + 'Trigger', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed new file mode 100644 index 000000000000..ccbcd1359343 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-eventarc package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py new file mode 100644 index 000000000000..070e8b44991c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.eventarc import EventarcClient +from .services.eventarc import EventarcAsyncClient + +from .types.eventarc import CreateTriggerRequest +from .types.eventarc import DeleteTriggerRequest +from .types.eventarc import GetTriggerRequest +from .types.eventarc import ListTriggersRequest +from .types.eventarc import ListTriggersResponse +from .types.eventarc import OperationMetadata +from .types.eventarc import UpdateTriggerRequest +from .types.trigger import CloudRun +from .types.trigger import Destination +from .types.trigger import EventFilter +from .types.trigger import Pubsub +from .types.trigger import Transport +from .types.trigger import Trigger + +__all__ = ( + 'EventarcAsyncClient', +'CloudRun', +'CreateTriggerRequest', +'DeleteTriggerRequest', +'Destination', +'EventFilter', +'EventarcClient', +'GetTriggerRequest', +'ListTriggersRequest', +'ListTriggersResponse', +'OperationMetadata', +'Pubsub', +'Transport', +'Trigger', +'UpdateTriggerRequest', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json new file mode 100644 index 000000000000..62658e1a8252 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json @@ -0,0 +1,73 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.eventarc_v1", + "protoPackage": "google.cloud.eventarc.v1", + "schema": "1.0", + "services": { + "Eventarc": { + "clients": { + "grpc": { + "libraryClient": "EventarcClient", + "rpcs": { + "CreateTrigger": { + "methods": [ + "create_trigger" + ] + }, + "DeleteTrigger": { + "methods": [ + "delete_trigger" + ] + }, + "GetTrigger": { + "methods": [ + "get_trigger" + ] + }, + "ListTriggers": { + "methods": [ + "list_triggers" + ] + }, + "UpdateTrigger": { + "methods": [ + "update_trigger" + ] + } + } + }, + "grpc-async": { + "libraryClient": "EventarcAsyncClient", + "rpcs": { + "CreateTrigger": { + "methods": [ + "create_trigger" + ] + }, + "DeleteTrigger": { + "methods": [ + "delete_trigger" + ] + }, + "GetTrigger": { + "methods": [ + "get_trigger" + ] + }, + "ListTriggers": { + "methods": [ + "list_triggers" + ] + }, + "UpdateTrigger": { + "methods": [ + "update_trigger" + ] + } + } + } + } + } + } +} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/py.typed new file mode 100644 index 000000000000..ccbcd1359343 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-eventarc package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py new file mode 100644 index 000000000000..fbe8bc381042 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import EventarcClient +from .async_client import EventarcAsyncClient + +__all__ = ( + 'EventarcClient', + 'EventarcAsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py new file mode 100644 index 000000000000..0c3a1af1b10d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -0,0 +1,807 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import trigger +from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport +from .client import EventarcClient + + +class EventarcAsyncClient: + """Eventarc allows users to subscribe to various events that are + provided by Google Cloud services and forward them to supported + destinations. + """ + + _client: EventarcClient + + DEFAULT_ENDPOINT = EventarcClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = EventarcClient.DEFAULT_MTLS_ENDPOINT + + service_path = staticmethod(EventarcClient.service_path) + parse_service_path = staticmethod(EventarcClient.parse_service_path) + service_account_path = staticmethod(EventarcClient.service_account_path) + parse_service_account_path = staticmethod(EventarcClient.parse_service_account_path) + trigger_path = staticmethod(EventarcClient.trigger_path) + parse_trigger_path = staticmethod(EventarcClient.parse_trigger_path) + common_billing_account_path = staticmethod(EventarcClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(EventarcClient.parse_common_billing_account_path) + common_folder_path = staticmethod(EventarcClient.common_folder_path) + parse_common_folder_path = staticmethod(EventarcClient.parse_common_folder_path) + common_organization_path = staticmethod(EventarcClient.common_organization_path) + parse_common_organization_path = staticmethod(EventarcClient.parse_common_organization_path) + common_project_path = staticmethod(EventarcClient.common_project_path) + parse_common_project_path = staticmethod(EventarcClient.parse_common_project_path) + common_location_path = staticmethod(EventarcClient.common_location_path) + parse_common_location_path = staticmethod(EventarcClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EventarcAsyncClient: The constructed client. + """ + return EventarcClient.from_service_account_info.__func__(EventarcAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EventarcAsyncClient: The constructed client. + """ + return EventarcClient.from_service_account_file.__func__(EventarcAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return EventarcClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> EventarcTransport: + """Returns the transport used by the client instance. + + Returns: + EventarcTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(EventarcClient).get_transport_class, type(EventarcClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, EventarcTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the eventarc client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.EventarcTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = EventarcClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def get_trigger(self, + request: Union[eventarc.GetTriggerRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> trigger.Trigger: + r"""Get a single trigger. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + async def sample_get_trigger(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetTriggerRequest, dict]): + The request object. The request message for the + GetTrigger method. + name (:class:`str`): + Required. The name of the trigger to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Trigger: + A representation of the trigger + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_trigger, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_triggers(self, + request: Union[eventarc.ListTriggersRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTriggersAsyncPager: + r"""List triggers. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + async def sample_list_triggers(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListTriggersRequest, dict]): + The request object. The request message for the + ListTriggers method. + parent (:class:`str`): + Required. The parent collection to + list triggers on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager: + The response message for the + ListTriggers method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.ListTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_triggers, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTriggersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_trigger(self, + request: Union[eventarc.CreateTriggerRequest, dict] = None, + *, + parent: str = None, + trigger: gce_trigger.Trigger = None, + trigger_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new trigger in a particular project and + location. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + async def sample_create_trigger(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + trigger = eventarc_v1.Trigger() + trigger.name = "name_value" + trigger.event_filters.attribute = "attribute_value" + trigger.event_filters.value = "value_value" + trigger.destination.cloud_run.service = "service_value" + trigger.destination.cloud_run.region = "region_value" + + request = eventarc_v1.CreateTriggerRequest( + parent="parent_value", + trigger=trigger, + trigger_id="trigger_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_trigger(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreateTriggerRequest, dict]): + The request object. The request message for the + CreateTrigger method. + parent (:class:`str`): + Required. The parent collection in + which to add this trigger. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (:class:`google.cloud.eventarc_v1.types.Trigger`): + Required. The trigger to create. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (:class:`str`): + Required. The user-provided ID to be + assigned to the trigger. + + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.eventarc_v1.types.Trigger` A + representation of the trigger resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, trigger, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.CreateTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if trigger is not None: + request.trigger = trigger + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_trigger, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_trigger.Trigger, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_trigger(self, + request: Union[eventarc.UpdateTriggerRequest, dict] = None, + *, + trigger: gce_trigger.Trigger = None, + update_mask: field_mask_pb2.FieldMask = None, + allow_missing: bool = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single trigger. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + async def sample_update_trigger(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateTriggerRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_trigger(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateTriggerRequest, dict]): + The request object. The request message for the + UpdateTrigger method. + trigger (:class:`google.cloud.eventarc_v1.types.Trigger`): + The trigger to be updated. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The fields to be updated; only fields explicitly + provided will be updated. If no field mask is provided, + all provided fields in the request will be updated. To + update all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + allow_missing (:class:`bool`): + If set to true, and the trigger is not found, a new + trigger will be created. In this situation, + ``update_mask`` is ignored. + + This corresponds to the ``allow_missing`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.eventarc_v1.types.Trigger` A + representation of the trigger resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([trigger, update_mask, allow_missing]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.UpdateTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if trigger is not None: + request.trigger = trigger + if update_mask is not None: + request.update_mask = update_mask + if allow_missing is not None: + request.allow_missing = allow_missing + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_trigger, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("trigger.name", request.trigger.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_trigger.Trigger, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_trigger(self, + request: Union[eventarc.DeleteTriggerRequest, dict] = None, + *, + name: str = None, + allow_missing: bool = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single trigger. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + async def sample_delete_trigger(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteTriggerRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_trigger(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteTriggerRequest, dict]): + The request object. The request message for the + DeleteTrigger method. + name (:class:`str`): + Required. The name of the trigger to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + allow_missing (:class:`bool`): + If set to true, and the trigger is + not found, the request will succeed but + no action will be taken on the server. + + This corresponds to the ``allow_missing`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.eventarc_v1.types.Trigger` A + representation of the trigger resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, allow_missing]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.DeleteTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if allow_missing is not None: + request.allow_missing = allow_missing + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_trigger, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + trigger.Trigger, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-eventarc", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "EventarcAsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py new file mode 100644 index 000000000000..6d6c658d0e56 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -0,0 +1,1024 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import trigger +from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import EventarcGrpcTransport +from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport + + +class EventarcClientMeta(type): + """Metaclass for the Eventarc client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[EventarcTransport]] + _transport_registry["grpc"] = EventarcGrpcTransport + _transport_registry["grpc_asyncio"] = EventarcGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[EventarcTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class EventarcClient(metaclass=EventarcClientMeta): + """Eventarc allows users to subscribe to various events that are + provided by Google Cloud services and forward them to supported + destinations. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "eventarc.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EventarcClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + EventarcClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> EventarcTransport: + """Returns the transport used by the client instance. + + Returns: + EventarcTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def service_path() -> str: + """Returns a fully-qualified service string.""" + return "*".format() + + @staticmethod + def parse_service_path(path: str) -> Dict[str,str]: + """Parses a service path into its component segments.""" + m = re.match(r"^*$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_account_path(project: str,service_account: str,) -> str: + """Returns a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str,str]: + """Parses a service_account path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def trigger_path(project: str,location: str,trigger: str,) -> str: + """Returns a fully-qualified trigger string.""" + return "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + + @staticmethod + def parse_trigger_path(path: str) -> Dict[str,str]: + """Parses a trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, EventarcTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the eventarc client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, EventarcTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, EventarcTransport): + # transport is a EventarcTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + def get_trigger(self, + request: Union[eventarc.GetTriggerRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> trigger.Trigger: + r"""Get a single trigger. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + def sample_get_trigger(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_trigger(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetTriggerRequest, dict]): + The request object. The request message for the + GetTrigger method. + name (str): + Required. The name of the trigger to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Trigger: + A representation of the trigger + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetTriggerRequest): + request = eventarc.GetTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_triggers(self, + request: Union[eventarc.ListTriggersRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTriggersPager: + r"""List triggers. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + def sample_list_triggers(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListTriggersRequest, dict]): + The request object. The request message for the + ListTriggers method. + parent (str): + Required. The parent collection to + list triggers on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager: + The response message for the + ListTriggers method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.ListTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.ListTriggersRequest): + request = eventarc.ListTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTriggersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_trigger(self, + request: Union[eventarc.CreateTriggerRequest, dict] = None, + *, + parent: str = None, + trigger: gce_trigger.Trigger = None, + trigger_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a new trigger in a particular project and + location. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + def sample_create_trigger(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + trigger = eventarc_v1.Trigger() + trigger.name = "name_value" + trigger.event_filters.attribute = "attribute_value" + trigger.event_filters.value = "value_value" + trigger.destination.cloud_run.service = "service_value" + trigger.destination.cloud_run.region = "region_value" + + request = eventarc_v1.CreateTriggerRequest( + parent="parent_value", + trigger=trigger, + trigger_id="trigger_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreateTriggerRequest, dict]): + The request object. The request message for the + CreateTrigger method. + parent (str): + Required. The parent collection in + which to add this trigger. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger (google.cloud.eventarc_v1.types.Trigger): + Required. The trigger to create. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + trigger_id (str): + Required. The user-provided ID to be + assigned to the trigger. + + This corresponds to the ``trigger_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.eventarc_v1.types.Trigger` A + representation of the trigger resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, trigger, trigger_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.CreateTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.CreateTriggerRequest): + request = eventarc.CreateTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if trigger is not None: + request.trigger = trigger + if trigger_id is not None: + request.trigger_id = trigger_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_trigger.Trigger, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_trigger(self, + request: Union[eventarc.UpdateTriggerRequest, dict] = None, + *, + trigger: gce_trigger.Trigger = None, + update_mask: field_mask_pb2.FieldMask = None, + allow_missing: bool = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update a single trigger. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + def sample_update_trigger(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateTriggerRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateTriggerRequest, dict]): + The request object. The request message for the + UpdateTrigger method. + trigger (google.cloud.eventarc_v1.types.Trigger): + The trigger to be updated. + This corresponds to the ``trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly + provided will be updated. If no field mask is provided, + all provided fields in the request will be updated. To + update all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + allow_missing (bool): + If set to true, and the trigger is not found, a new + trigger will be created. In this situation, + ``update_mask`` is ignored. + + This corresponds to the ``allow_missing`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.eventarc_v1.types.Trigger` A + representation of the trigger resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([trigger, update_mask, allow_missing]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.UpdateTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.UpdateTriggerRequest): + request = eventarc.UpdateTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if trigger is not None: + request.trigger = trigger + if update_mask is not None: + request.update_mask = update_mask + if allow_missing is not None: + request.allow_missing = allow_missing + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("trigger.name", request.trigger.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_trigger.Trigger, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_trigger(self, + request: Union[eventarc.DeleteTriggerRequest, dict] = None, + *, + name: str = None, + allow_missing: bool = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete a single trigger. + + .. code-block:: python + + from google.cloud import eventarc_v1 + + def sample_delete_trigger(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteTriggerRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteTriggerRequest, dict]): + The request object. The request message for the + DeleteTrigger method. + name (str): + Required. The name of the trigger to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + allow_missing (bool): + If set to true, and the trigger is + not found, the request will succeed but + no action will be taken on the server. + + This corresponds to the ``allow_missing`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.eventarc_v1.types.Trigger` A + representation of the trigger resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, allow_missing]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.DeleteTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.DeleteTriggerRequest): + request = eventarc.DeleteTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if allow_missing is not None: + request.allow_missing = allow_missing + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + trigger.Trigger, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-eventarc", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "EventarcClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py new file mode 100644 index 000000000000..f1121f3037bb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import trigger + + +class ListTriggersPager: + """A pager for iterating through ``list_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTriggers`` requests and continue to iterate + through the ``triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListTriggersResponse], + request: eventarc.ListTriggersRequest, + response: eventarc.ListTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListTriggersRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[trigger.Trigger]: + for page in self.pages: + yield from page.triggers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTriggersAsyncPager: + """A pager for iterating through ``list_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTriggers`` requests and continue to iterate + through the ``triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListTriggersResponse]], + request: eventarc.ListTriggersRequest, + response: eventarc.ListTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListTriggersRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[trigger.Trigger]: + async def async_generator(): + async for page in self.pages: + for response in page.triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py new file mode 100644 index 000000000000..f9a66261b154 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import EventarcTransport +from .grpc import EventarcGrpcTransport +from .grpc_asyncio import EventarcGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[EventarcTransport]] +_transport_registry['grpc'] = EventarcGrpcTransport +_transport_registry['grpc_asyncio'] = EventarcGrpcAsyncIOTransport + +__all__ = ( + 'EventarcTransport', + 'EventarcGrpcTransport', + 'EventarcGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py new file mode 100644 index 000000000000..96dbf3028026 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -0,0 +1,214 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import trigger +from google.longrunning import operations_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-eventarc', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class EventarcTransport(abc.ABC): + """Abstract transport class for Eventarc.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'eventarc.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get_trigger: gapic_v1.method.wrap_method( + self.get_trigger, + default_timeout=None, + client_info=client_info, + ), + self.list_triggers: gapic_v1.method.wrap_method( + self.list_triggers, + default_timeout=None, + client_info=client_info, + ), + self.create_trigger: gapic_v1.method.wrap_method( + self.create_trigger, + default_timeout=None, + client_info=client_info, + ), + self.update_trigger: gapic_v1.method.wrap_method( + self.update_trigger, + default_timeout=None, + client_info=client_info, + ), + self.delete_trigger: gapic_v1.method.wrap_method( + self.delete_trigger, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def get_trigger(self) -> Callable[ + [eventarc.GetTriggerRequest], + Union[ + trigger.Trigger, + Awaitable[trigger.Trigger] + ]]: + raise NotImplementedError() + + @property + def list_triggers(self) -> Callable[ + [eventarc.ListTriggersRequest], + Union[ + eventarc.ListTriggersResponse, + Awaitable[eventarc.ListTriggersResponse] + ]]: + raise NotImplementedError() + + @property + def create_trigger(self) -> Callable[ + [eventarc.CreateTriggerRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_trigger(self) -> Callable[ + [eventarc.UpdateTriggerRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_trigger(self) -> Callable[ + [eventarc.DeleteTriggerRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'EventarcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py new file mode 100644 index 000000000000..81e56bf8cae9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -0,0 +1,389 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import trigger +from google.longrunning import operations_pb2 # type: ignore +from .base import EventarcTransport, DEFAULT_CLIENT_INFO + + +class EventarcGrpcTransport(EventarcTransport): + """gRPC backend transport for Eventarc. + + Eventarc allows users to subscribe to various events that are + provided by Google Cloud services and forward them to supported + destinations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'eventarc.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'eventarc.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_trigger(self) -> Callable[ + [eventarc.GetTriggerRequest], + trigger.Trigger]: + r"""Return a callable for the get trigger method over gRPC. + + Get a single trigger. + + Returns: + Callable[[~.GetTriggerRequest], + ~.Trigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_trigger' not in self._stubs: + self._stubs['get_trigger'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetTrigger', + request_serializer=eventarc.GetTriggerRequest.serialize, + response_deserializer=trigger.Trigger.deserialize, + ) + return self._stubs['get_trigger'] + + @property + def list_triggers(self) -> Callable[ + [eventarc.ListTriggersRequest], + eventarc.ListTriggersResponse]: + r"""Return a callable for the list triggers method over gRPC. + + List triggers. + + Returns: + Callable[[~.ListTriggersRequest], + ~.ListTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_triggers' not in self._stubs: + self._stubs['list_triggers'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListTriggers', + request_serializer=eventarc.ListTriggersRequest.serialize, + response_deserializer=eventarc.ListTriggersResponse.deserialize, + ) + return self._stubs['list_triggers'] + + @property + def create_trigger(self) -> Callable[ + [eventarc.CreateTriggerRequest], + operations_pb2.Operation]: + r"""Return a callable for the create trigger method over gRPC. + + Create a new trigger in a particular project and + location. + + Returns: + Callable[[~.CreateTriggerRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_trigger' not in self._stubs: + self._stubs['create_trigger'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', + request_serializer=eventarc.CreateTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_trigger'] + + @property + def update_trigger(self) -> Callable[ + [eventarc.UpdateTriggerRequest], + operations_pb2.Operation]: + r"""Return a callable for the update trigger method over gRPC. + + Update a single trigger. + + Returns: + Callable[[~.UpdateTriggerRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_trigger' not in self._stubs: + self._stubs['update_trigger'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', + request_serializer=eventarc.UpdateTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_trigger'] + + @property + def delete_trigger(self) -> Callable[ + [eventarc.DeleteTriggerRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete trigger method over gRPC. + + Delete a single trigger. + + Returns: + Callable[[~.DeleteTriggerRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_trigger' not in self._stubs: + self._stubs['delete_trigger'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', + request_serializer=eventarc.DeleteTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_trigger'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'EventarcGrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d72d2a72528d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -0,0 +1,388 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import trigger +from google.longrunning import operations_pb2 # type: ignore +from .base import EventarcTransport, DEFAULT_CLIENT_INFO +from .grpc import EventarcGrpcTransport + + +class EventarcGrpcAsyncIOTransport(EventarcTransport): + """gRPC AsyncIO backend transport for Eventarc. + + Eventarc allows users to subscribe to various events that are + provided by Google Cloud services and forward them to supported + destinations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'eventarc.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'eventarc.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def get_trigger(self) -> Callable[ + [eventarc.GetTriggerRequest], + Awaitable[trigger.Trigger]]: + r"""Return a callable for the get trigger method over gRPC. + + Get a single trigger. + + Returns: + Callable[[~.GetTriggerRequest], + Awaitable[~.Trigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_trigger' not in self._stubs: + self._stubs['get_trigger'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetTrigger', + request_serializer=eventarc.GetTriggerRequest.serialize, + response_deserializer=trigger.Trigger.deserialize, + ) + return self._stubs['get_trigger'] + + @property + def list_triggers(self) -> Callable[ + [eventarc.ListTriggersRequest], + Awaitable[eventarc.ListTriggersResponse]]: + r"""Return a callable for the list triggers method over gRPC. + + List triggers. + + Returns: + Callable[[~.ListTriggersRequest], + Awaitable[~.ListTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_triggers' not in self._stubs: + self._stubs['list_triggers'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListTriggers', + request_serializer=eventarc.ListTriggersRequest.serialize, + response_deserializer=eventarc.ListTriggersResponse.deserialize, + ) + return self._stubs['list_triggers'] + + @property + def create_trigger(self) -> Callable[ + [eventarc.CreateTriggerRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create trigger method over gRPC. + + Create a new trigger in a particular project and + location. + + Returns: + Callable[[~.CreateTriggerRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_trigger' not in self._stubs: + self._stubs['create_trigger'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', + request_serializer=eventarc.CreateTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_trigger'] + + @property + def update_trigger(self) -> Callable[ + [eventarc.UpdateTriggerRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update trigger method over gRPC. + + Update a single trigger. + + Returns: + Callable[[~.UpdateTriggerRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_trigger' not in self._stubs: + self._stubs['update_trigger'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', + request_serializer=eventarc.UpdateTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_trigger'] + + @property + def delete_trigger(self) -> Callable[ + [eventarc.DeleteTriggerRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete trigger method over gRPC. + + Delete a single trigger. + + Returns: + Callable[[~.DeleteTriggerRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_trigger' not in self._stubs: + self._stubs['delete_trigger'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', + request_serializer=eventarc.DeleteTriggerRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_trigger'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'EventarcGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py new file mode 100644 index 000000000000..5b82d00a1937 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .eventarc import ( + CreateTriggerRequest, + DeleteTriggerRequest, + GetTriggerRequest, + ListTriggersRequest, + ListTriggersResponse, + OperationMetadata, + UpdateTriggerRequest, +) +from .trigger import ( + CloudRun, + Destination, + EventFilter, + Pubsub, + Transport, + Trigger, +) + +__all__ = ( + 'CreateTriggerRequest', + 'DeleteTriggerRequest', + 'GetTriggerRequest', + 'ListTriggersRequest', + 'ListTriggersResponse', + 'OperationMetadata', + 'UpdateTriggerRequest', + 'CloudRun', + 'Destination', + 'EventFilter', + 'Pubsub', + 'Transport', + 'Trigger', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py new file mode 100644 index 000000000000..657966bc72b0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'GetTriggerRequest', + 'ListTriggersRequest', + 'ListTriggersResponse', + 'CreateTriggerRequest', + 'UpdateTriggerRequest', + 'DeleteTriggerRequest', + 'OperationMetadata', + }, +) + + +class GetTriggerRequest(proto.Message): + r"""The request message for the GetTrigger method. + + Attributes: + name (str): + Required. The name of the trigger to get. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListTriggersRequest(proto.Message): + r"""The request message for the ListTriggers method. + + Attributes: + parent (str): + Required. The parent collection to list + triggers on. + page_size (int): + The maximum number of triggers to return on + each page. Note: The service may send fewer. + page_token (str): + The page token; provide the value from the + ``next_page_token`` field in a previous ``ListTriggers`` + call to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListTriggers`` must match the call that provided the page + token. + order_by (str): + The sorting order of the resources returned. Value should be + a comma separated list of fields. The default sorting oder + is ascending. To specify descending order for a field, + append a ``desc`` suffix; for example: + ``name desc, trigger_id``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + order_by = proto.Field( + proto.STRING, + number=4, + ) + + +class ListTriggersResponse(proto.Message): + r"""The response message for the ListTriggers method. + + Attributes: + triggers (Sequence[google.cloud.eventarc_v1.types.Trigger]): + The requested triggers, up to the number specified in + ``page_size``. + next_page_token (str): + A page token that can be sent to ListTriggers + to request the next page. If this is empty, then + there are no more pages. + unreachable (Sequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + triggers = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_trigger.Trigger, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateTriggerRequest(proto.Message): + r"""The request message for the CreateTrigger method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this trigger. + trigger (google.cloud.eventarc_v1.types.Trigger): + Required. The trigger to create. + trigger_id (str): + Required. The user-provided ID to be assigned + to the trigger. + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not actually post it. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + trigger = proto.Field( + proto.MESSAGE, + number=2, + message=gce_trigger.Trigger, + ) + trigger_id = proto.Field( + proto.STRING, + number=3, + ) + validate_only = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateTriggerRequest(proto.Message): + r"""The request message for the UpdateTrigger method. + + Attributes: + trigger (google.cloud.eventarc_v1.types.Trigger): + The trigger to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly provided + will be updated. If no field mask is provided, all provided + fields in the request will be updated. To update all fields, + provide a field mask of "*". + allow_missing (bool): + If set to true, and the trigger is not found, a new trigger + will be created. In this situation, ``update_mask`` is + ignored. + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not actually post it. + """ + + trigger = proto.Field( + proto.MESSAGE, + number=1, + message=gce_trigger.Trigger, + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + allow_missing = proto.Field( + proto.BOOL, + number=3, + ) + validate_only = proto.Field( + proto.BOOL, + number=4, + ) + + +class DeleteTriggerRequest(proto.Message): + r"""The request message for the DeleteTrigger method. + + Attributes: + name (str): + Required. The name of the trigger to be + deleted. + etag (str): + If provided, the trigger will only be deleted + if the etag matches the current etag on the + resource. + allow_missing (bool): + If set to true, and the trigger is not found, + the request will succeed but no action will be + taken on the server. + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not actually post it. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + etag = proto.Field( + proto.STRING, + number=2, + ) + allow_missing = proto.Field( + proto.BOOL, + number=3, + ) + validate_only = proto.Field( + proto.BOOL, + number=4, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation finished + running. + target (str): + Output only. Server-defined resource path for + the target of the operation. + verb (str): + Output only. Name of the verb executed by the + operation. + status_message (str): + Output only. Human-readable status of the + operation, if any. + requested_cancellation (bool): + Output only. Identifies whether the user has requested + cancellation of the operation. Operations that have + successfully been cancelled have [Operation.error][] value + with a [google.rpc.Status.code][google.rpc.Status.code] of + 1, corresponding to ``Code.CANCELLED``. + api_version (str): + Output only. API version used to start the + operation. + """ + + create_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target = proto.Field( + proto.STRING, + number=3, + ) + verb = proto.Field( + proto.STRING, + number=4, + ) + status_message = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation = proto.Field( + proto.BOOL, + number=6, + ) + api_version = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py new file mode 100644 index 000000000000..e5db1c8227ef --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'Trigger', + 'EventFilter', + 'Destination', + 'Transport', + 'CloudRun', + 'Pubsub', + }, +) + + +class Trigger(proto.Message): + r"""A representation of the trigger resource. + + Attributes: + name (str): + Required. The resource name of the trigger. Must be unique + within the location on the project and must be in + ``projects/{project}/locations/{location}/triggers/{trigger}`` + format. + uid (str): + Output only. Server assigned unique + identifier for the trigger. The value is a UUID4 + string and guaranteed to remain unchanged until + the resource is deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + event_filters (Sequence[google.cloud.eventarc_v1.types.EventFilter]): + Required. null The list of filters that + applies to event attributes. Only events that + match all the provided filters will be sent to + the destination. + service_account (str): + Optional. The IAM service account email associated with the + trigger. The service account represents the identity of the + trigger. + + The principal who calls this API must have + ``iam.serviceAccounts.actAs`` permission in the service + account. See + https://cloud.google.com/iam/docs/understanding-service-accounts?hl=en#sa_common + for more information. + + For Cloud Run destinations, this service account is used to + generate identity tokens when invoking the service. See + https://cloud.google.com/run/docs/triggering/pubsub-push#create-service-account + for information on how to invoke authenticated Cloud Run + services. In order to create Audit Log triggers, the service + account should also have ``roles/eventarc.eventReceiver`` + IAM role. + destination (google.cloud.eventarc_v1.types.Destination): + Required. Destination specifies where the + events should be sent to. + transport (google.cloud.eventarc_v1.types.Transport): + Optional. In order to deliver messages, + Eventarc may use other GCP products as transport + intermediary. This field contains a reference to + that transport intermediary. This information + can be used for debugging purposes. + labels (Mapping[str, str]): + Optional. User labels attached to the + triggers that can be used to group resources. + etag (str): + Output only. This checksum is computed by the + server based on the value of other fields, and + may be sent only on create requests to ensure + the client has an up-to-date value before + proceeding. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + uid = proto.Field( + proto.STRING, + number=2, + ) + create_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + event_filters = proto.RepeatedField( + proto.MESSAGE, + number=8, + message='EventFilter', + ) + service_account = proto.Field( + proto.STRING, + number=9, + ) + destination = proto.Field( + proto.MESSAGE, + number=10, + message='Destination', + ) + transport = proto.Field( + proto.MESSAGE, + number=11, + message='Transport', + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=12, + ) + etag = proto.Field( + proto.STRING, + number=99, + ) + + +class EventFilter(proto.Message): + r"""Filters events based on exact matches on the CloudEvents + attributes. + + Attributes: + attribute (str): + Required. The name of a CloudEvents + attribute. Currently, only a subset of + attributes are supported for filtering. + All triggers MUST provide a filter for the + 'type' attribute. + value (str): + Required. The value for the attribute. + """ + + attribute = proto.Field( + proto.STRING, + number=1, + ) + value = proto.Field( + proto.STRING, + number=2, + ) + + +class Destination(proto.Message): + r"""Represents a target of an invocation over HTTP. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cloud_run (google.cloud.eventarc_v1.types.CloudRun): + Cloud Run fully-managed service that receives + the events. The service should be running in the + same project of the trigger. + + This field is a member of `oneof`_ ``descriptor``. + """ + + cloud_run = proto.Field( + proto.MESSAGE, + number=1, + oneof='descriptor', + message='CloudRun', + ) + + +class Transport(proto.Message): + r"""Represents the transport intermediaries created for the + trigger in order to deliver events. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + pubsub (google.cloud.eventarc_v1.types.Pubsub): + The Pub/Sub topic and subscription used by + Eventarc as delivery intermediary. + + This field is a member of `oneof`_ ``intermediary``. + """ + + pubsub = proto.Field( + proto.MESSAGE, + number=1, + oneof='intermediary', + message='Pubsub', + ) + + +class CloudRun(proto.Message): + r"""Represents a Cloud Run destination. + + Attributes: + service (str): + Required. The name of the Cloud Run service + being addressed. See + https://cloud.google.com/run/docs/reference/rest/v1/namespaces.services. + Only services located in the same project of the + trigger object can be addressed. + path (str): + Optional. The relative path on the Cloud Run + service the events should be sent to. + + The value must conform to the definition of URI + path segment (section 3.3 of RFC2396). Examples: + "/route", "route", "route/subroute". + region (str): + Required. The region the Cloud Run service is + deployed in. + """ + + service = proto.Field( + proto.STRING, + number=1, + ) + path = proto.Field( + proto.STRING, + number=2, + ) + region = proto.Field( + proto.STRING, + number=3, + ) + + +class Pubsub(proto.Message): + r"""Represents a Pub/Sub transport. + + Attributes: + topic (str): + Optional. The name of the Pub/Sub topic created and managed + by Eventarc system as a transport for the event delivery. + Format: ``projects/{PROJECT_ID}/topics/{TOPIC_NAME}``. + + You may set an existing topic for triggers of the type + ``google.cloud.pubsub.topic.v1.messagePublished`` only. The + topic you provide here will not be deleted by Eventarc at + trigger deletion. + subscription (str): + Output only. The name of the Pub/Sub subscription created + and managed by Eventarc system as a transport for the event + delivery. Format: + ``projects/{PROJECT_ID}/subscriptions/{SUBSCRIPTION_NAME}``. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + subscription = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini b/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py new file mode 100644 index 000000000000..7ae686f15a04 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.6", + "3.7", + "3.8", + "3.9", + "3.10", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.9" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/eventarc_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py new file mode 100644 index 000000000000..4e94d14f9e33 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateTrigger_async] +from google.cloud import eventarc_v1 + + +async def sample_create_trigger(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + trigger = eventarc_v1.Trigger() + trigger.name = "name_value" + trigger.event_filters.attribute = "attribute_value" + trigger.event_filters.value = "value_value" + trigger.destination.cloud_run.service = "service_value" + trigger.destination.cloud_run.region = "region_value" + + request = eventarc_v1.CreateTriggerRequest( + parent="parent_value", + trigger=trigger, + trigger_id="trigger_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_trigger(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateTrigger_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py new file mode 100644 index 000000000000..bfbe4ab89235 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateTrigger_sync] +from google.cloud import eventarc_v1 + + +def sample_create_trigger(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + trigger = eventarc_v1.Trigger() + trigger.name = "name_value" + trigger.event_filters.attribute = "attribute_value" + trigger.event_filters.value = "value_value" + trigger.destination.cloud_run.service = "service_value" + trigger.destination.cloud_run.region = "region_value" + + request = eventarc_v1.CreateTriggerRequest( + parent="parent_value", + trigger=trigger, + trigger_id="trigger_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateTrigger_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py new file mode 100644 index 000000000000..d55bba5a923e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteTrigger_async] +from google.cloud import eventarc_v1 + + +async def sample_delete_trigger(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteTriggerRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_trigger(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteTrigger_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py new file mode 100644 index 000000000000..1e421fbaeda2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteTrigger_sync] +from google.cloud import eventarc_v1 + + +def sample_delete_trigger(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteTriggerRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteTrigger_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py new file mode 100644 index 000000000000..028b5c3732ca --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetTrigger_async] +from google.cloud import eventarc_v1 + + +async def sample_get_trigger(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetTriggerRequest( + name="name_value", + ) + + # Make the request + response = await client.get_trigger(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetTrigger_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py new file mode 100644 index 000000000000..93b3510145c5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetTrigger_sync] +from google.cloud import eventarc_v1 + + +def sample_get_trigger(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetTriggerRequest( + name="name_value", + ) + + # Make the request + response = client.get_trigger(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetTrigger_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py new file mode 100644 index 000000000000..5c5ed4475183 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListTriggers_async] +from google.cloud import eventarc_v1 + + +async def sample_list_triggers(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_triggers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListTriggers_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py new file mode 100644 index 000000000000..b036610806f2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTriggers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListTriggers_sync] +from google.cloud import eventarc_v1 + + +def sample_list_triggers(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListTriggersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_triggers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListTriggers_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py new file mode 100644 index 000000000000..42f8321c77db --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateTrigger_async] +from google.cloud import eventarc_v1 + + +async def sample_update_trigger(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateTriggerRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_trigger(request=request) + + print("Waiting for operation to complete...") + + response = await operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateTrigger_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py new file mode 100644 index 000000000000..84518de7ef52 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTrigger +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateTrigger_sync] +from google.cloud import eventarc_v1 + + +def sample_update_trigger(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateTriggerRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_trigger(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateTrigger_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json new file mode 100644 index 000000000000..e9ca79d81d95 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json @@ -0,0 +1,859 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.eventarc.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-eventarc" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "CreateTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_trigger" + }, + "description": "Sample for CreateTrigger", + "file": "eventarc_v1_generated_eventarc_create_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_async", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_create_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "CreateTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "trigger_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_trigger" + }, + "description": "Sample for CreateTrigger", + "file": "eventarc_v1_generated_eventarc_create_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_sync", + "segments": [ + { + "end": 58, + "start": 27, + "type": "FULL" + }, + { + "end": 58, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 55, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 59, + "start": 56, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_create_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_trigger" + }, + "description": "Sample for DeleteTrigger", + "file": "eventarc_v1_generated_eventarc_delete_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_trigger" + }, + "description": "Sample for DeleteTrigger", + "file": "eventarc_v1_generated_eventarc_delete_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 46, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "start": 47, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Trigger", + "shortName": "get_trigger" + }, + "description": "Sample for GetTrigger", + "file": "eventarc_v1_generated_eventarc_get_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Trigger", + "shortName": "get_trigger" + }, + "description": "Sample for GetTrigger", + "file": "eventarc_v1_generated_eventarc_get_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_triggers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", + "shortName": "list_triggers" + }, + "description": "Sample for ListTriggers", + "file": "eventarc_v1_generated_eventarc_list_triggers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_triggers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", + "shortName": "list_triggers" + }, + "description": "Sample for ListTriggers", + "file": "eventarc_v1_generated_eventarc_list_triggers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateTriggerRequest" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_trigger" + }, + "description": "Sample for UpdateTrigger", + "file": "eventarc_v1_generated_eventarc_update_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateTrigger_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateTriggerRequest" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_trigger" + }, + "description": "Sample for UpdateTrigger", + "file": "eventarc_v1_generated_eventarc_update_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateTrigger_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_trigger_sync.py" + } + ] +} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py new file mode 100644 index 000000000000..ba8400a43a57 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py @@ -0,0 +1,180 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class eventarcCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_trigger': ('parent', 'trigger', 'trigger_id', 'validate_only', ), + 'delete_trigger': ('name', 'validate_only', 'etag', 'allow_missing', ), + 'get_trigger': ('name', ), + 'list_triggers': ('parent', 'page_size', 'page_token', 'order_by', ), + 'update_trigger': ('validate_only', 'trigger', 'update_mask', 'allow_missing', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=eventarcCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the eventarc client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py new file mode 100644 index 000000000000..da806747c686 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-eventarc', + author="Google LLC", + author_email="googleapis-packages@google.com", + url="https://github.com/googleapis/python-google-cloud-eventarc", + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'googleapis-common-protos >= 1.55.0, <2.0.0dev', + 'proto-plus >= 1.19.7', + ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py new file mode 100644 index 000000000000..231bc125017b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py new file mode 100644 index 000000000000..231bc125017b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..231bc125017b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py new file mode 100644 index 000000000000..231bc125017b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py new file mode 100644 index 000000000000..4ea2713ef3bc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -0,0 +1,2598 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.eventarc_v1.services.eventarc import EventarcAsyncClient +from google.cloud.eventarc_v1.services.eventarc import EventarcClient +from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.services.eventarc import transports +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import trigger +from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert EventarcClient._get_default_mtls_endpoint(None) is None + assert EventarcClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert EventarcClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert EventarcClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert EventarcClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert EventarcClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (EventarcClient, "grpc"), + (EventarcAsyncClient, "grpc_asyncio"), +]) +def test_eventarc_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'eventarc.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.EventarcGrpcTransport, "grpc"), + (transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_eventarc_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (EventarcClient, "grpc"), + (EventarcAsyncClient, "grpc_asyncio"), +]) +def test_eventarc_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'eventarc.googleapis.com:443' + ) + + +def test_eventarc_client_get_transport_class(): + transport = EventarcClient.get_transport_class() + available_transports = [ + transports.EventarcGrpcTransport, + ] + assert transport in available_transports + + transport = EventarcClient.get_transport_class("grpc") + assert transport == transports.EventarcGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc"), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +def test_eventarc_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(EventarcClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(EventarcClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc", "true"), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (EventarcClient, transports.EventarcGrpcTransport, "grpc", "false"), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class", [ + EventarcClient, EventarcAsyncClient +]) +@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc"), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_eventarc_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc", grpc_helpers), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_eventarc_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +def test_eventarc_client_client_options_from_dict(): + with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = EventarcClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc", grpc_helpers), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_eventarc_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "eventarc.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="eventarc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetTriggerRequest, + dict, +]) +def test_get_trigger(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + etag='etag_value', + ) + response = client.get_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.etag == 'etag_value' + + +def test_get_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + client.get_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetTriggerRequest() + +@pytest.mark.asyncio +async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetTriggerRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + etag='etag_value', + )) + response = await client.get_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.etag == 'etag_value' + + +@pytest.mark.asyncio +async def test_get_trigger_async_from_dict(): + await test_get_trigger_async(request_type=dict) + + +def test_get_trigger_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + call.return_value = trigger.Trigger() + client.get_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_trigger_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger()) + await client.get_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_trigger_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = trigger.Trigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_trigger_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_trigger( + eventarc.GetTriggerRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_trigger_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = trigger.Trigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_trigger_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_trigger( + eventarc.GetTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListTriggersRequest, + dict, +]) +def test_list_triggers(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTriggersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_triggers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + client.list_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListTriggersRequest() + +@pytest.mark.asyncio +async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListTriggersRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTriggersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_triggers_async_from_dict(): + await test_list_triggers_async(request_type=dict) + + +def test_list_triggers_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListTriggersRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + call.return_value = eventarc.ListTriggersResponse() + client.list_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_triggers_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListTriggersRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse()) + await client.list_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_triggers_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_triggers_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_triggers( + eventarc.ListTriggersRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_triggers_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_triggers_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_triggers( + eventarc.ListTriggersRequest(), + parent='parent_value', + ) + + +def test_list_triggers_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + trigger.Trigger(), + ], + next_page_token='abc', + ), + eventarc.ListTriggersResponse( + triggers=[], + next_page_token='def', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + ], + next_page_token='ghi', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_triggers(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, trigger.Trigger) + for i in results) +def test_list_triggers_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + trigger.Trigger(), + ], + next_page_token='abc', + ), + eventarc.ListTriggersResponse( + triggers=[], + next_page_token='def', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + ], + next_page_token='ghi', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_triggers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_triggers_async_pager(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + trigger.Trigger(), + ], + next_page_token='abc', + ), + eventarc.ListTriggersResponse( + triggers=[], + next_page_token='def', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + ], + next_page_token='ghi', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_triggers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, trigger.Trigger) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_triggers_async_pages(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + trigger.Trigger(), + ], + next_page_token='abc', + ), + eventarc.ListTriggersResponse( + triggers=[], + next_page_token='def', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + ], + next_page_token='ghi', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_triggers(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateTriggerRequest, + dict, +]) +def test_create_trigger(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + client.create_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateTriggerRequest() + +@pytest.mark.asyncio +async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateTriggerRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_trigger_async_from_dict(): + await test_create_trigger_async(request_type=dict) + + +def test_create_trigger_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateTriggerRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_trigger_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateTriggerRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_trigger_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_trigger( + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = gce_trigger.Trigger(name='name_value') + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + + +def test_create_trigger_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_trigger( + eventarc.CreateTriggerRequest(), + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + +@pytest.mark.asyncio +async def test_create_trigger_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_trigger( + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].trigger + mock_val = gce_trigger.Trigger(name='name_value') + assert arg == mock_val + arg = args[0].trigger_id + mock_val = 'trigger_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_trigger_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_trigger( + eventarc.CreateTriggerRequest(), + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateTriggerRequest, + dict, +]) +def test_update_trigger(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + client.update_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateTriggerRequest() + +@pytest.mark.asyncio +async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateTriggerRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_trigger_async_from_dict(): + await test_update_trigger_async(request_type=dict) + + +def test_update_trigger_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateTriggerRequest() + + request.trigger.name = 'trigger.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'trigger.name=trigger.name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_trigger_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateTriggerRequest() + + request.trigger.name = 'trigger.name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'trigger.name=trigger.name/value', + ) in kw['metadata'] + + +def test_update_trigger_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_trigger( + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].trigger + mock_val = gce_trigger.Trigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + arg = args[0].allow_missing + mock_val = True + assert arg == mock_val + + +def test_update_trigger_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_trigger( + eventarc.UpdateTriggerRequest(), + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + +@pytest.mark.asyncio +async def test_update_trigger_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_trigger( + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].trigger + mock_val = gce_trigger.Trigger(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + arg = args[0].allow_missing + mock_val = True + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_trigger_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_trigger( + eventarc.UpdateTriggerRequest(), + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteTriggerRequest, + dict, +]) +def test_delete_trigger(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + client.delete_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteTriggerRequest() + +@pytest.mark.asyncio +async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteTriggerRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_trigger_async_from_dict(): + await test_delete_trigger_async(request_type=dict) + + +def test_delete_trigger_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_trigger_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_trigger_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_trigger( + name='name_value', + allow_missing=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].allow_missing + mock_val = True + assert arg == mock_val + + +def test_delete_trigger_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_trigger( + eventarc.DeleteTriggerRequest(), + name='name_value', + allow_missing=True, + ) + +@pytest.mark.asyncio +async def test_delete_trigger_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_trigger( + name='name_value', + allow_missing=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].allow_missing + mock_val = True + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_trigger_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_trigger( + eventarc.DeleteTriggerRequest(), + name='name_value', + allow_missing=True, + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EventarcClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = EventarcClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = EventarcClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = EventarcClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.EventarcGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.EventarcGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = EventarcClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.EventarcGrpcTransport, + ) + +def test_eventarc_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EventarcTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_eventarc_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.EventarcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get_trigger', + 'list_triggers', + 'create_trigger', + 'update_trigger', + 'delete_trigger', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_eventarc_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EventarcTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_eventarc_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EventarcTransport() + adc.assert_called_once() + + +def test_eventarc_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EventarcClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + ], +) +def test_eventarc_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.EventarcGrpcTransport, grpc_helpers), + (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_eventarc_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "eventarc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="eventarc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_eventarc_host_no_port(transport_name): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'eventarc.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_eventarc_host_with_port(transport_name): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'eventarc.googleapis.com:8000' + ) + +def test_eventarc_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EventarcGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_eventarc_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EventarcGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_eventarc_grpc_lro_client(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_eventarc_grpc_lro_async_client(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_service_path(): + expected = "*".format() + actual = EventarcClient.service_path() + assert expected == actual + + +def test_parse_service_path(): + expected = { + } + path = EventarcClient.service_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_service_path(path) + assert expected == actual + +def test_service_account_path(): + project = "squid" + service_account = "clam" + expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + actual = EventarcClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "whelk", + "service_account": "octopus", + } + path = EventarcClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_service_account_path(path) + assert expected == actual + +def test_trigger_path(): + project = "oyster" + location = "nudibranch" + trigger = "cuttlefish" + expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + actual = EventarcClient.trigger_path(project, location, trigger) + assert expected == actual + + +def test_parse_trigger_path(): + expected = { + "project": "mussel", + "location": "winkle", + "trigger": "nautilus", + } + path = EventarcClient.trigger_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_trigger_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = EventarcClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = EventarcClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = EventarcClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = EventarcClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = EventarcClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = EventarcClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = EventarcClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = EventarcClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = EventarcClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = EventarcClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + transport_class = EventarcClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (EventarcClient, transports.EventarcGrpcTransport), + (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index a2e2dc38b313..48ad8d51fee8 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -949,12 +949,12 @@ def test_differently_named_extended_operation_fields( def test_transport_safe_name(): unsafe_methods = { name: make_method(name=name) - for name in ["CreateChannel", "GrpcChannel", "OperationsClient"] + for name in ["CreateChannel", "GrpcChannel", "OperationsClient", "import", "Import", "Raise"] } safe_methods = { name: make_method(name=name) - for name in ["Call", "Put", "Hold", "Raise"] + for name in ["Call", "Put", "Hold"] } for name, method in safe_methods.items(): @@ -962,3 +962,21 @@ def test_transport_safe_name(): for name, method in unsafe_methods.items(): assert method.transport_safe_name == f"{name}_" + + +def test_safe_name(): + unsafe_methods = { + name: make_method(name=name) + for name in ["import", "Import", "Raise"] + } + + safe_methods = { + name: make_method(name=name) + for name in ["Call", "Put", "Hold"] + } + + for name, method in safe_methods.items(): + assert method.safe_name == name + + for name, method in unsafe_methods.items(): + assert method.safe_name == f"{name}_" From 8670330e9d98eed200005012a5a575c5fe0a957c Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Wed, 20 Apr 2022 16:52:53 -0700 Subject: [PATCH 0798/1339] fix: fixes bug when an annotation path in an http rule does not have =. (#1284) * fix: fixes bug when field header does not have =. * fix: fixes bug with incorrect mock value. * chore: adds url. * fix: fix mypy * chore: update ads templates. * chore: updates golden files. * chore: update golden files Co-authored-by: Anthonios Partheniou --- .../%name_%version/%sub/test_%service.py.j2 | 4 +- .../gapic-generator/gapic/schema/wrappers.py | 8 +- .../gapic/%name_%version/%sub/test_macros.j2 | 8 +- .../fragments/test_required_non_string.proto | 2 +- .../unit/gapic/asset_v1/test_asset_service.py | 96 ++++----- .../credentials_v1/test_iam_credentials.py | 32 +-- .../unit/gapic/eventarc_v1/test_eventarc.py | 40 ++-- .../logging_v2/test_config_service_v2.py | 184 +++++++++--------- .../logging_v2/test_logging_service_v2.py | 16 +- .../logging_v2/test_metrics_service_v2.py | 40 ++-- .../unit/gapic/redis_v1/test_cloud_redis.py | 72 +++---- .../unit/schema/wrappers/test_message.py | 1 + .../tests/unit/schema/wrappers/test_method.py | 7 + 13 files changed, 262 insertions(+), 248 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 52fd7959a829..4ef22474befd 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -627,7 +627,7 @@ def test_{{ method_name }}_field_headers(): request = {{ method.input.ident }}() {% for field_header in method.field_headers %} - request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' + request.{{ field_header.disambiguated }} = {{ method.input.get_field(field_header.disambiguated).mock_value }} {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. @@ -655,7 +655,7 @@ def test_{{ method_name }}_field_headers(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header.raw }}={{ field_header.raw }}/value + {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim("'") }} {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 34c0b665010c..816b4be91750 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -618,6 +618,10 @@ def get_field(self, *field_path: str, KeyError: If a repeated field is used in the non-terminal position in the path. """ + # This covers the case when field_path is a string path. + if len(field_path) == 1 and '.' in field_path[0]: + field_path = tuple(field_path[0].split('.')) + # If collisions are not explicitly specified, retrieve them # from this message's address. # This ensures that calls to `get_field` will return a field with @@ -1211,7 +1215,9 @@ def field_headers(self) -> Sequence[FieldHeader]: http = self.options.Extensions[annotations_pb2.http] - pattern = re.compile(r'\{([a-z][\w\d_.]+)=') + # Copied from Node generator. + # https://github.com/googleapis/gapic-generator-typescript/blob/3ab47f04678d72171ddf25b439d50f6dfb44584c/typescript/src/schema/proto.ts#L587 + pattern = re.compile(r'{(.*?)[=}]') potential_verbs = [ http.get, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 7995c2ee8534..c190979c8402 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -252,7 +252,7 @@ def test_{{ method_name }}_field_headers(): request = {{ method.input.ident }}() {% for field_header in method.field_headers %} - request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' + request.{{ field_header.disambiguated }} = {{ method.input.get_field(field_header.disambiguated).mock_value }} {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. @@ -280,7 +280,7 @@ def test_{{ method_name }}_field_headers(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header.raw }}={{ field_header.raw }}/value + {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim("'") }} {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] @@ -298,7 +298,7 @@ async def test_{{ method_name }}_field_headers_async(): request = {{ method.input.ident }}() {% for field_header in method.field_headers %} - request.{{ field_header.disambiguated }} = '{{ field_header.raw }}/value' + request.{{ field_header.disambiguated }} = {{ method.input.get_field(field_header.disambiguated).mock_value }} {% endfor %} # Mock the actual call within the gRPC stub, and fake the request. @@ -327,7 +327,7 @@ async def test_{{ method_name }}_field_headers_async(): assert ( 'x-goog-request-params', '{% for field_header in method.field_headers -%} - {{ field_header.raw }}={{ field_header.raw }}/value + {{ field_header.raw }}={{ method.input.get_field(field_header.disambiguated).mock_value|trim("'") }} {%- if not loop.last %}&{% endif %} {%- endfor -%}', ) in kw['metadata'] diff --git a/packages/gapic-generator/tests/fragments/test_required_non_string.proto b/packages/gapic-generator/tests/fragments/test_required_non_string.proto index fb055d60193b..7186a83c879d 100644 --- a/packages/gapic-generator/tests/fragments/test_required_non_string.proto +++ b/packages/gapic-generator/tests/fragments/test_required_non_string.proto @@ -33,7 +33,7 @@ service RestService { message MethodRequest { int32 mass_kg = 1 [(google.api.field_behavior) = REQUIRED]; - float length_cm = 2 [(google.api.field_behavior) = REQUIRED]; + int32 length_cm = 2 [(google.api.field_behavior) = REQUIRED]; } message MethodResponse { diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 4a987e861e44..42a5cc00753a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -587,7 +587,7 @@ def test_export_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ExportAssetsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -605,7 +605,7 @@ def test_export_assets_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -619,7 +619,7 @@ async def test_export_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ExportAssetsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -637,7 +637,7 @@ async def test_export_assets_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -737,7 +737,7 @@ def test_list_assets_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListAssetsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -755,7 +755,7 @@ def test_list_assets_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -769,7 +769,7 @@ async def test_list_assets_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListAssetsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -787,7 +787,7 @@ async def test_list_assets_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1153,7 +1153,7 @@ def test_batch_get_assets_history_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetAssetsHistoryRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1171,7 +1171,7 @@ def test_batch_get_assets_history_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1185,7 +1185,7 @@ async def test_batch_get_assets_history_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.BatchGetAssetsHistoryRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1203,7 +1203,7 @@ async def test_batch_get_assets_history_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1315,7 +1315,7 @@ def test_create_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.CreateFeedRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1333,7 +1333,7 @@ def test_create_feed_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1347,7 +1347,7 @@ async def test_create_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.CreateFeedRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1365,7 +1365,7 @@ async def test_create_feed_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1559,7 +1559,7 @@ def test_get_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.GetFeedRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1577,7 +1577,7 @@ def test_get_feed_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1591,7 +1591,7 @@ async def test_get_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.GetFeedRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1609,7 +1609,7 @@ async def test_get_feed_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1787,7 +1787,7 @@ def test_list_feeds_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.ListFeedsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1805,7 +1805,7 @@ def test_list_feeds_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1819,7 +1819,7 @@ async def test_list_feeds_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.ListFeedsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1837,7 +1837,7 @@ async def test_list_feeds_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -2031,7 +2031,7 @@ def test_update_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.UpdateFeedRequest() - request.feed.name = 'feed.name/value' + request.feed.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2049,7 +2049,7 @@ def test_update_feed_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'feed.name=feed.name/value', + 'feed.name=name_value', ) in kw['metadata'] @@ -2063,7 +2063,7 @@ async def test_update_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.UpdateFeedRequest() - request.feed.name = 'feed.name/value' + request.feed.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2081,7 +2081,7 @@ async def test_update_feed_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'feed.name=feed.name/value', + 'feed.name=name_value', ) in kw['metadata'] @@ -2257,7 +2257,7 @@ def test_delete_feed_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.DeleteFeedRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2275,7 +2275,7 @@ def test_delete_feed_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2289,7 +2289,7 @@ async def test_delete_feed_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.DeleteFeedRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2307,7 +2307,7 @@ async def test_delete_feed_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2489,7 +2489,7 @@ def test_search_all_resources_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllResourcesRequest() - request.scope = 'scope/value' + request.scope = 'scope_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2507,7 +2507,7 @@ def test_search_all_resources_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'scope=scope/value', + 'scope=scope_value', ) in kw['metadata'] @@ -2521,7 +2521,7 @@ async def test_search_all_resources_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllResourcesRequest() - request.scope = 'scope/value' + request.scope = 'scope_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2539,7 +2539,7 @@ async def test_search_all_resources_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'scope=scope/value', + 'scope=scope_value', ) in kw['metadata'] @@ -2929,7 +2929,7 @@ def test_search_all_iam_policies_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllIamPoliciesRequest() - request.scope = 'scope/value' + request.scope = 'scope_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2947,7 +2947,7 @@ def test_search_all_iam_policies_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'scope=scope/value', + 'scope=scope_value', ) in kw['metadata'] @@ -2961,7 +2961,7 @@ async def test_search_all_iam_policies_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.SearchAllIamPoliciesRequest() - request.scope = 'scope/value' + request.scope = 'scope_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2979,7 +2979,7 @@ async def test_search_all_iam_policies_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'scope=scope/value', + 'scope=scope_value', ) in kw['metadata'] @@ -3359,7 +3359,7 @@ def test_analyze_iam_policy_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyRequest() - request.analysis_query.scope = 'analysis_query.scope/value' + request.analysis_query.scope = 'scope_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3377,7 +3377,7 @@ def test_analyze_iam_policy_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'analysis_query.scope=analysis_query.scope/value', + 'analysis_query.scope=scope_value', ) in kw['metadata'] @@ -3391,7 +3391,7 @@ async def test_analyze_iam_policy_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyRequest() - request.analysis_query.scope = 'analysis_query.scope/value' + request.analysis_query.scope = 'scope_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3409,7 +3409,7 @@ async def test_analyze_iam_policy_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'analysis_query.scope=analysis_query.scope/value', + 'analysis_query.scope=scope_value', ) in kw['metadata'] @@ -3505,7 +3505,7 @@ def test_analyze_iam_policy_longrunning_field_headers(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyLongrunningRequest() - request.analysis_query.scope = 'analysis_query.scope/value' + request.analysis_query.scope = 'scope_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3523,7 +3523,7 @@ def test_analyze_iam_policy_longrunning_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'analysis_query.scope=analysis_query.scope/value', + 'analysis_query.scope=scope_value', ) in kw['metadata'] @@ -3537,7 +3537,7 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): # a field header. Set these to a non-empty value. request = asset_service.AnalyzeIamPolicyLongrunningRequest() - request.analysis_query.scope = 'analysis_query.scope/value' + request.analysis_query.scope = 'scope_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3555,7 +3555,7 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'analysis_query.scope=analysis_query.scope/value', + 'analysis_query.scope=scope_value', ) in kw['metadata'] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index f6a1dad6418a..df3f450167fc 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -582,7 +582,7 @@ def test_generate_access_token_field_headers(): # a field header. Set these to a non-empty value. request = common.GenerateAccessTokenRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -600,7 +600,7 @@ def test_generate_access_token_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -614,7 +614,7 @@ async def test_generate_access_token_field_headers_async(): # a field header. Set these to a non-empty value. request = common.GenerateAccessTokenRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -632,7 +632,7 @@ async def test_generate_access_token_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -840,7 +840,7 @@ def test_generate_id_token_field_headers(): # a field header. Set these to a non-empty value. request = common.GenerateIdTokenRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -858,7 +858,7 @@ def test_generate_id_token_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -872,7 +872,7 @@ async def test_generate_id_token_field_headers_async(): # a field header. Set these to a non-empty value. request = common.GenerateIdTokenRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -890,7 +890,7 @@ async def test_generate_id_token_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1106,7 +1106,7 @@ def test_sign_blob_field_headers(): # a field header. Set these to a non-empty value. request = common.SignBlobRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1124,7 +1124,7 @@ def test_sign_blob_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1138,7 +1138,7 @@ async def test_sign_blob_field_headers_async(): # a field header. Set these to a non-empty value. request = common.SignBlobRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1156,7 +1156,7 @@ async def test_sign_blob_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1362,7 +1362,7 @@ def test_sign_jwt_field_headers(): # a field header. Set these to a non-empty value. request = common.SignJwtRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1380,7 +1380,7 @@ def test_sign_jwt_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1394,7 +1394,7 @@ async def test_sign_jwt_field_headers_async(): # a field header. Set these to a non-empty value. request = common.SignJwtRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1412,7 +1412,7 @@ async def test_sign_jwt_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 4ea2713ef3bc..5f77a567bab8 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -602,7 +602,7 @@ def test_get_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.GetTriggerRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -620,7 +620,7 @@ def test_get_trigger_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -634,7 +634,7 @@ async def test_get_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.GetTriggerRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -652,7 +652,7 @@ async def test_get_trigger_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -838,7 +838,7 @@ def test_list_triggers_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.ListTriggersRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -856,7 +856,7 @@ def test_list_triggers_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -870,7 +870,7 @@ async def test_list_triggers_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.ListTriggersRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -888,7 +888,7 @@ async def test_list_triggers_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1254,7 +1254,7 @@ def test_create_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.CreateTriggerRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1272,7 +1272,7 @@ def test_create_trigger_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1286,7 +1286,7 @@ async def test_create_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.CreateTriggerRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1304,7 +1304,7 @@ async def test_create_trigger_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1504,7 +1504,7 @@ def test_update_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.UpdateTriggerRequest() - request.trigger.name = 'trigger.name/value' + request.trigger.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1522,7 +1522,7 @@ def test_update_trigger_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'trigger.name=trigger.name/value', + 'trigger.name=name_value', ) in kw['metadata'] @@ -1536,7 +1536,7 @@ async def test_update_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.UpdateTriggerRequest() - request.trigger.name = 'trigger.name/value' + request.trigger.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1554,7 +1554,7 @@ async def test_update_trigger_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'trigger.name=trigger.name/value', + 'trigger.name=name_value', ) in kw['metadata'] @@ -1754,7 +1754,7 @@ def test_delete_trigger_field_headers(): # a field header. Set these to a non-empty value. request = eventarc.DeleteTriggerRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1772,7 +1772,7 @@ def test_delete_trigger_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1786,7 +1786,7 @@ async def test_delete_trigger_field_headers_async(): # a field header. Set these to a non-empty value. request = eventarc.DeleteTriggerRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1804,7 +1804,7 @@ async def test_delete_trigger_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index b385a5a02341..1a8d1717e8b4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -586,7 +586,7 @@ def test_list_buckets_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -604,7 +604,7 @@ def test_list_buckets_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -618,7 +618,7 @@ async def test_list_buckets_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListBucketsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -636,7 +636,7 @@ async def test_list_buckets_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1022,7 +1022,7 @@ def test_get_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1040,7 +1040,7 @@ def test_get_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1054,7 +1054,7 @@ async def test_get_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetBucketRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1072,7 +1072,7 @@ async def test_get_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1188,7 +1188,7 @@ def test_create_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1206,7 +1206,7 @@ def test_create_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1220,7 +1220,7 @@ async def test_create_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateBucketRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1238,7 +1238,7 @@ async def test_create_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1354,7 +1354,7 @@ def test_update_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1372,7 +1372,7 @@ def test_update_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1386,7 +1386,7 @@ async def test_update_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateBucketRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1404,7 +1404,7 @@ async def test_update_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1498,7 +1498,7 @@ def test_delete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1516,7 +1516,7 @@ def test_delete_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1530,7 +1530,7 @@ async def test_delete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteBucketRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1548,7 +1548,7 @@ async def test_delete_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1642,7 +1642,7 @@ def test_undelete_bucket_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1660,7 +1660,7 @@ def test_undelete_bucket_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1674,7 +1674,7 @@ async def test_undelete_bucket_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UndeleteBucketRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1692,7 +1692,7 @@ async def test_undelete_bucket_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1792,7 +1792,7 @@ def test_list_views_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1810,7 +1810,7 @@ def test_list_views_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1824,7 +1824,7 @@ async def test_list_views_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListViewsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1842,7 +1842,7 @@ async def test_list_views_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -2220,7 +2220,7 @@ def test_get_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2238,7 +2238,7 @@ def test_get_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2252,7 +2252,7 @@ async def test_get_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetViewRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2270,7 +2270,7 @@ async def test_get_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2378,7 +2378,7 @@ def test_create_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2396,7 +2396,7 @@ def test_create_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -2410,7 +2410,7 @@ async def test_create_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateViewRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2428,7 +2428,7 @@ async def test_create_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -2536,7 +2536,7 @@ def test_update_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2554,7 +2554,7 @@ def test_update_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2568,7 +2568,7 @@ async def test_update_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateViewRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2586,7 +2586,7 @@ async def test_update_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2680,7 +2680,7 @@ def test_delete_view_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2698,7 +2698,7 @@ def test_delete_view_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2712,7 +2712,7 @@ async def test_delete_view_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteViewRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2730,7 +2730,7 @@ async def test_delete_view_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2830,7 +2830,7 @@ def test_list_sinks_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2848,7 +2848,7 @@ def test_list_sinks_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -2862,7 +2862,7 @@ async def test_list_sinks_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListSinksRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2880,7 +2880,7 @@ async def test_list_sinks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -3279,7 +3279,7 @@ def test_get_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name/value' + request.sink_name = 'sink_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3297,7 +3297,7 @@ def test_get_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'sink_name=sink_name/value', + 'sink_name=sink_name_value', ) in kw['metadata'] @@ -3311,7 +3311,7 @@ async def test_get_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetSinkRequest() - request.sink_name = 'sink_name/value' + request.sink_name = 'sink_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3329,7 +3329,7 @@ async def test_get_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'sink_name=sink_name/value', + 'sink_name=sink_name_value', ) in kw['metadata'] @@ -3540,7 +3540,7 @@ def test_create_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3558,7 +3558,7 @@ def test_create_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -3572,7 +3572,7 @@ async def test_create_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateSinkRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3590,7 +3590,7 @@ async def test_create_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -3811,7 +3811,7 @@ def test_update_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name/value' + request.sink_name = 'sink_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3829,7 +3829,7 @@ def test_update_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'sink_name=sink_name/value', + 'sink_name=sink_name_value', ) in kw['metadata'] @@ -3843,7 +3843,7 @@ async def test_update_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateSinkRequest() - request.sink_name = 'sink_name/value' + request.sink_name = 'sink_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3861,7 +3861,7 @@ async def test_update_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'sink_name=sink_name/value', + 'sink_name=sink_name_value', ) in kw['metadata'] @@ -4057,7 +4057,7 @@ def test_delete_sink_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name/value' + request.sink_name = 'sink_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4075,7 +4075,7 @@ def test_delete_sink_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'sink_name=sink_name/value', + 'sink_name=sink_name_value', ) in kw['metadata'] @@ -4089,7 +4089,7 @@ async def test_delete_sink_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteSinkRequest() - request.sink_name = 'sink_name/value' + request.sink_name = 'sink_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4107,7 +4107,7 @@ async def test_delete_sink_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'sink_name=sink_name/value', + 'sink_name=sink_name_value', ) in kw['metadata'] @@ -4289,7 +4289,7 @@ def test_list_exclusions_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4307,7 +4307,7 @@ def test_list_exclusions_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -4321,7 +4321,7 @@ async def test_list_exclusions_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.ListExclusionsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4339,7 +4339,7 @@ async def test_list_exclusions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -4721,7 +4721,7 @@ def test_get_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4739,7 +4739,7 @@ def test_get_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -4753,7 +4753,7 @@ async def test_get_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetExclusionRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4771,7 +4771,7 @@ async def test_get_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -4965,7 +4965,7 @@ def test_create_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4983,7 +4983,7 @@ def test_create_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -4997,7 +4997,7 @@ async def test_create_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.CreateExclusionRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5015,7 +5015,7 @@ async def test_create_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -5219,7 +5219,7 @@ def test_update_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5237,7 +5237,7 @@ def test_update_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -5251,7 +5251,7 @@ async def test_update_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateExclusionRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5269,7 +5269,7 @@ async def test_update_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -5465,7 +5465,7 @@ def test_delete_exclusion_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5483,7 +5483,7 @@ def test_delete_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -5497,7 +5497,7 @@ async def test_delete_exclusion_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.DeleteExclusionRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5515,7 +5515,7 @@ async def test_delete_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -5705,7 +5705,7 @@ def test_get_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5723,7 +5723,7 @@ def test_get_cmek_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -5737,7 +5737,7 @@ async def test_get_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.GetCmekSettingsRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5755,7 +5755,7 @@ async def test_get_cmek_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -5863,7 +5863,7 @@ def test_update_cmek_settings_field_headers(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5881,7 +5881,7 @@ def test_update_cmek_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -5895,7 +5895,7 @@ async def test_update_cmek_settings_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_config.UpdateCmekSettingsRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -5913,7 +5913,7 @@ async def test_update_cmek_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 93bb1808e608..cf4641872844 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -587,7 +587,7 @@ def test_delete_log_field_headers(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name/value' + request.log_name = 'log_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -605,7 +605,7 @@ def test_delete_log_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'log_name=log_name/value', + 'log_name=log_name_value', ) in kw['metadata'] @@ -619,7 +619,7 @@ async def test_delete_log_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.DeleteLogRequest() - request.log_name = 'log_name/value' + request.log_name = 'log_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -637,7 +637,7 @@ async def test_delete_log_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'log_name=log_name/value', + 'log_name=log_name_value', ) in kw['metadata'] @@ -1660,7 +1660,7 @@ def test_list_logs_field_headers(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1678,7 +1678,7 @@ def test_list_logs_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1692,7 +1692,7 @@ async def test_list_logs_field_headers_async(): # a field header. Set these to a non-empty value. request = logging.ListLogsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1710,7 +1710,7 @@ async def test_list_logs_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 4e0e72937cbb..b5655574817d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -591,7 +591,7 @@ def test_list_log_metrics_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -609,7 +609,7 @@ def test_list_log_metrics_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -623,7 +623,7 @@ async def test_list_log_metrics_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.ListLogMetricsRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -641,7 +641,7 @@ async def test_list_log_metrics_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1027,7 +1027,7 @@ def test_get_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name/value' + request.metric_name = 'metric_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1045,7 +1045,7 @@ def test_get_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'metric_name=metric_name/value', + 'metric_name=metric_name_value', ) in kw['metadata'] @@ -1059,7 +1059,7 @@ async def test_get_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.GetLogMetricRequest() - request.metric_name = 'metric_name/value' + request.metric_name = 'metric_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1077,7 +1077,7 @@ async def test_get_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'metric_name=metric_name/value', + 'metric_name=metric_name_value', ) in kw['metadata'] @@ -1275,7 +1275,7 @@ def test_create_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1293,7 +1293,7 @@ def test_create_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1307,7 +1307,7 @@ async def test_create_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.CreateLogMetricRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1325,7 +1325,7 @@ async def test_create_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1533,7 +1533,7 @@ def test_update_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name/value' + request.metric_name = 'metric_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1551,7 +1551,7 @@ def test_update_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'metric_name=metric_name/value', + 'metric_name=metric_name_value', ) in kw['metadata'] @@ -1565,7 +1565,7 @@ async def test_update_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.UpdateLogMetricRequest() - request.metric_name = 'metric_name/value' + request.metric_name = 'metric_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1583,7 +1583,7 @@ async def test_update_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'metric_name=metric_name/value', + 'metric_name=metric_name_value', ) in kw['metadata'] @@ -1769,7 +1769,7 @@ def test_delete_log_metric_field_headers(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name/value' + request.metric_name = 'metric_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1787,7 +1787,7 @@ def test_delete_log_metric_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'metric_name=metric_name/value', + 'metric_name=metric_name_value', ) in kw['metadata'] @@ -1801,7 +1801,7 @@ async def test_delete_log_metric_field_headers_async(): # a field header. Set these to a non-empty value. request = logging_metrics.DeleteLogMetricRequest() - request.metric_name = 'metric_name/value' + request.metric_name = 'metric_name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1819,7 +1819,7 @@ async def test_delete_log_metric_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'metric_name=metric_name/value', + 'metric_name=metric_name_value', ) in kw['metadata'] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 1273022f176e..84ef6b505a47 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -592,7 +592,7 @@ def test_list_instances_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -610,7 +610,7 @@ def test_list_instances_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -624,7 +624,7 @@ async def test_list_instances_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ListInstancesRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -642,7 +642,7 @@ async def test_list_instances_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1072,7 +1072,7 @@ def test_get_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1090,7 +1090,7 @@ def test_get_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1104,7 +1104,7 @@ async def test_get_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.GetInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1122,7 +1122,7 @@ async def test_get_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1300,7 +1300,7 @@ def test_create_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1318,7 +1318,7 @@ def test_create_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1332,7 +1332,7 @@ async def test_create_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.CreateInstanceRequest() - request.parent = 'parent/value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1350,7 +1350,7 @@ async def test_create_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'parent=parent/value', + 'parent=parent_value', ) in kw['metadata'] @@ -1550,7 +1550,7 @@ def test_update_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'instance.name/value' + request.instance.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1568,7 +1568,7 @@ def test_update_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'instance.name=instance.name/value', + 'instance.name=name_value', ) in kw['metadata'] @@ -1582,7 +1582,7 @@ async def test_update_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.UpdateInstanceRequest() - request.instance.name = 'instance.name/value' + request.instance.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1600,7 +1600,7 @@ async def test_update_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'instance.name=instance.name/value', + 'instance.name=name_value', ) in kw['metadata'] @@ -1790,7 +1790,7 @@ def test_upgrade_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.UpgradeInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1808,7 +1808,7 @@ def test_upgrade_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -1822,7 +1822,7 @@ async def test_upgrade_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.UpgradeInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1840,7 +1840,7 @@ async def test_upgrade_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2030,7 +2030,7 @@ def test_import_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ImportInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2048,7 +2048,7 @@ def test_import_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2062,7 +2062,7 @@ async def test_import_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ImportInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2080,7 +2080,7 @@ async def test_import_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2270,7 +2270,7 @@ def test_export_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.ExportInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2288,7 +2288,7 @@ def test_export_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2302,7 +2302,7 @@ async def test_export_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.ExportInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2320,7 +2320,7 @@ async def test_export_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2510,7 +2510,7 @@ def test_failover_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.FailoverInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2528,7 +2528,7 @@ def test_failover_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2542,7 +2542,7 @@ async def test_failover_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.FailoverInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2560,7 +2560,7 @@ async def test_failover_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2750,7 +2750,7 @@ def test_delete_instance_field_headers(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2768,7 +2768,7 @@ def test_delete_instance_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] @@ -2782,7 +2782,7 @@ async def test_delete_instance_field_headers_async(): # a field header. Set these to a non-empty value. request = cloud_redis.DeleteInstanceRequest() - request.name = 'name/value' + request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2800,7 +2800,7 @@ async def test_delete_instance_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name/value', + 'name=name_value', ) in kw['metadata'] diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 9e9baf4e117a..a54a619e9f35 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -154,6 +154,7 @@ def test_get_field_recursive(): # Assert that a recusive retrieval works. assert outer.get_field('inner', 'zero') == inner_fields[0] assert outer.get_field('inner', 'one') == inner_fields[1] + assert outer.get_field('inner.one') == inner_fields[1] def test_get_field_nested_not_found_error(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 48ad8d51fee8..70bc769b8051 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -299,6 +299,13 @@ def test_method_field_headers_present(): assert method.field_headers[0].raw == 'object' assert method.field_headers[0].disambiguated == 'object_' + # test w/o equal sign + rule = http_pb2.HttpRule(**{v: '/v1/{parent}/topics'}) + method = make_method('DoSomething', http_rule=rule) + assert method.field_headers == (wrappers.FieldHeader('parent'),) + assert method.field_headers[0].raw == 'parent' + assert method.field_headers[0].disambiguated == 'parent' + def test_method_routing_rule(): routing_rule = routing_pb2.RoutingRule() From 43e2659fab80020ed3a16ae2378a9c074b55cc2b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 20 Apr 2022 20:07:54 -0400 Subject: [PATCH 0799/1339] chore: use list() in tests (#1281) * chore: use list() in tests * update golden files --- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 +- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 2 +- .../asset/tests/unit/gapic/asset_v1/test_asset_service.py | 6 +++--- .../tests/unit/gapic/eventarc_v1/test_eventarc.py | 2 +- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 8 ++++---- .../unit/gapic/logging_v2/test_logging_service_v2.py | 6 +++--- .../unit/gapic/logging_v2/test_metrics_service_v2.py | 2 +- .../redis/tests/unit/gapic/redis_v1/test_cloud_redis.py | 2 +- 8 files changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4ef22474befd..70a6e3f48865 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -825,7 +825,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) for i in results) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index c190979c8402..7361662c4143 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -587,7 +587,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, {{ method.paged_result_field.type.ident }}) for i in results) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 42a5cc00753a..fe7455ab7c5c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -922,7 +922,7 @@ def test_list_assets_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, assets.Asset) for i in results) @@ -2694,7 +2694,7 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, assets.ResourceSearchResult) for i in results) @@ -3124,7 +3124,7 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, assets.IamPolicySearchResult) for i in results) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 5f77a567bab8..5c2da6d6a24b 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -1023,7 +1023,7 @@ def test_list_triggers_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, trigger.Trigger) for i in results) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 1a8d1717e8b4..1920f259ad70 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -771,7 +771,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogBucket) for i in results) @@ -1977,7 +1977,7 @@ def test_list_views_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogView) for i in results) @@ -3015,7 +3015,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogSink) for i in results) @@ -4474,7 +4474,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_config.LogExclusion) for i in results) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index cf4641872844..0f5ee1b14052 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1151,7 +1151,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, log_entry.LogEntry) for i in results) @@ -1421,7 +1421,7 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) for i in results) @@ -1845,7 +1845,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, str) for i in results) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index b5655574817d..42e27b74fecb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -776,7 +776,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, logging_metrics.LogMetric) for i in results) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 84ef6b505a47..185ec21bc6cd 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -777,7 +777,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, cloud_redis.Instance) for i in results) From f052aaff78d68cf40e2e726b11243cf909d54a13 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 16:33:32 -0400 Subject: [PATCH 0800/1339] chore(main): release 0.65.2 (#1283) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 747e8425d1de..af680ea4f83b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +### [0.65.2](https://github.com/googleapis/gapic-generator-python/compare/v0.65.1...v0.65.2) (2022-04-21) + + +### Bug Fixes + +* disambiguate method names ([#1282](https://github.com/googleapis/gapic-generator-python/issues/1282)) ([efe7216](https://github.com/googleapis/gapic-generator-python/commit/efe7216d8d59d945b4ea90ad109248b9eecc33e5)) +* fixes bug when an annotation path in an http rule does not have =. ([#1284](https://github.com/googleapis/gapic-generator-python/issues/1284)) ([6dcb97c](https://github.com/googleapis/gapic-generator-python/commit/6dcb97cfb60d7d81dc205c20c762dfd5e74659e1)) +* use async snippet in async client method docstring ([#1280](https://github.com/googleapis/gapic-generator-python/issues/1280)) ([b72e1e0](https://github.com/googleapis/gapic-generator-python/commit/b72e1e063d587a93b65aa77dd222341bcc87ba39)) + ### [0.65.1](https://github.com/googleapis/gapic-generator-python/compare/v0.65.0...v0.65.1) (2022-04-13) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 0723294ee08a..2e4206422b82 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -20,7 +20,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -version = "0.65.1" +version = "0.65.2" with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: README = file_obj.read() From 62dd4fd436bb6c492777da230d3b4653b44897ab Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 22 Apr 2022 10:30:04 +0200 Subject: [PATCH 0801/1339] chore(deps): update dependency protobuf to v3.20.1 (#1286) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 17afa6c4f27c..a18974171d9e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.7.2 googleapis-common-protos==1.56.0 jinja2==3.1.1 MarkupSafe==2.1.1 -protobuf==3.20.0 +protobuf==3.20.1 pypandoc==1.7.5 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped From 1d96e86b8b80bc6c5cbad6ae16ed68b57c521e81 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 28 Apr 2022 23:46:20 +0200 Subject: [PATCH 0802/1339] chore(deps): update dependency click to v8.1.3 (#1289) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index a18974171d9e..fcd262b9e935 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==8.1.2 +click==8.1.3 google-api-core==2.7.2 googleapis-common-protos==1.56.0 jinja2==3.1.1 From acf77f2236f34232a2284863cd5d4e140c63814e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 29 Apr 2022 00:35:18 +0200 Subject: [PATCH 0803/1339] chore(deps): update dependency jinja2 to v3.1.2 (#1290) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index fcd262b9e935..4391f6828d71 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,7 +1,7 @@ click==8.1.3 google-api-core==2.7.2 googleapis-common-protos==1.56.0 -jinja2==3.1.1 +jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.1 pypandoc==1.7.5 From 8f1f97d30c0d904d2b75b0784e5e555d7dff0df3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 2 May 2022 16:49:44 +0200 Subject: [PATCH 0804/1339] chore(deps): update dependency google-api-core to v2.7.3 (#1298) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 4391f6828d71..1a145f3c1851 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.3 -google-api-core==2.7.2 +google-api-core==2.7.3 googleapis-common-protos==1.56.0 jinja2==3.1.2 MarkupSafe==2.1.1 From d5bc9f83170b296ec3e40dca6bf1a4ddd3929cfc Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 2 May 2022 17:43:51 -0400 Subject: [PATCH 0805/1339] ci: remove unused code for circleci/codecov (#1292) --- .../gapic-generator/.circleci/.pypirc.enc | 2 - packages/gapic-generator/.circleci/config.yml | 716 ------------------ packages/gapic-generator/README.rst | 6 +- packages/gapic-generator/noxfile.py | 12 +- 4 files changed, 5 insertions(+), 731 deletions(-) delete mode 100644 packages/gapic-generator/.circleci/.pypirc.enc delete mode 100644 packages/gapic-generator/.circleci/config.yml diff --git a/packages/gapic-generator/.circleci/.pypirc.enc b/packages/gapic-generator/.circleci/.pypirc.enc deleted file mode 100644 index 880107333821..000000000000 --- a/packages/gapic-generator/.circleci/.pypirc.enc +++ /dev/null @@ -1,2 +0,0 @@ -Salted__ykÌ^Å7l#på×¾ä¼ßPn—.¢6c.æ»ÙÁ;ÅÀ­1 cý1ßàdýÊ`âÐöÄ_ \ñå Ëô¡Ÿ•R -–jáÖÝú…Qõ85t°%»¥ËH e \ No newline at end of file diff --git a/packages/gapic-generator/.circleci/config.yml b/packages/gapic-generator/.circleci/config.yml deleted file mode 100644 index 6fef7704cf50..000000000000 --- a/packages/gapic-generator/.circleci/config.yml +++ /dev/null @@ -1,716 +0,0 @@ ---- -version: 2 -workflows: - version: 2 - tests: - jobs: - - style-check: - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - unit-3.6: - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - unit-3.7: - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - unit-3.8: - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - unit-3.9: - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-3.6: - requires: - - unit-3.6 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-3.7: - requires: - - unit-3.7 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-3.8: - requires: - - unit-3.8 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-3.9: - requires: - - unit-3.9 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-alternative-templates-3.7: - requires: - - unit-3.7 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-alternative-templates-3.8: - requires: - - unit-3.8 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-alternative-templates-3.9: - requires: - - unit-3.9 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-unit-add-iam-methods: - requires: - - unit-3.8 - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-mypy: - requires: - - mypy - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-mypy-alternative-templates: - requires: - - mypy - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase: - requires: - - docs - - mypy - - showcase-unit-3.6 - - showcase-unit-3.7 - - showcase-unit-3.8 - - showcase-unit-3.9 - - showcase-mypy - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-mtls: - requires: - - docs - - mypy - - showcase-unit-3.6 - - showcase-unit-3.7 - - showcase-unit-3.8 - - showcase-unit-3.9 - - showcase-mypy - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-alternative-templates: - requires: - - docs - - mypy - - showcase-unit-alternative-templates-3.7 - - showcase-unit-alternative-templates-3.8 - - showcase-unit-alternative-templates-3.9 - - showcase-mypy-alternative-templates - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - showcase-mtls-alternative-templates: - requires: - - docs - - mypy - - showcase-unit-alternative-templates-3.7 - - showcase-unit-alternative-templates-3.8 - - showcase-unit-alternative-templates-3.9 - - showcase-mypy-alternative-templates - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - docs: - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - mypy: - filters: - tags: - only: /^v\d+\.\d+\.\d+$/ - - publish_package: - requires: - - showcase - - style-check - filters: - branches: - ignore: /.*/ - tags: - only: /^v\d+\.\d+\.\d+$/ - - publish_image: - requires: - - showcase - - style-check - filters: - branches: - ignore: /.*/ - tags: - only: /^v\d+\.\d+\.\d+$/ -jobs: - docs: - docker: - - image: python:3.6-slim - steps: - - checkout - - run: - name: Install nox. - command: pip install nox - - run: - name: Build the documentation. - command: nox -s docs - mypy: - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install nox. - command: pip install nox - - run: - name: Check type annotations. - command: nox -s mypy - publish_image: - docker: - - image: docker - steps: - - checkout - - setup_remote_docker - - run: - name: Build Docker image. - command: docker build . -t gcr.io/gapic-images/gapic-generator-python:latest - - run: - name: Download curl - command: apk add --no-cache curl - - run: - name: Download the GCR credential helper. - command: | - curl -fsSL https://github.com/GoogleCloudPlatform/docker-credential-gcr/releases/download/v1.5.0/docker-credential-gcr_linux_amd64-1.5.0.tar.gz \ - | tar xz --to-stdout ./docker-credential-gcr \ - > /usr/bin/docker-credential-gcr && chmod a+x /usr/bin/docker-credential-gcr - - run: - name: Set up authentication to Google Container Registry. - command: | - echo ${GCLOUD_SERVICE_KEY} > ${GOOGLE_APPLICATION_CREDENTIALS} - docker-credential-gcr configure-docker - - run: - name: Tag the Docker image and push it to Google Container Registry. - command: | - if [ -n "$CIRCLE_TAG" ]; then - export MAJOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $1; }'` - export MINOR=`echo $CIRCLE_TAG | awk -F '.' '{ print $2; }'` - export PATCH=`echo $CIRCLE_TAG | awk -F '.' '{ print $3; }'` - docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH - docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR - docker tag gcr.io/gapic-images/gapic-generator-python:latest gcr.io/gapic-images/gapic-generator-python:$MAJOR - docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR.$PATCH - docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR.$MINOR - docker push gcr.io/gapic-images/gapic-generator-python:$MAJOR - fi - docker push gcr.io/gapic-images/gapic-generator-python:latest - publish_package: - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Decrypt the PyPI key. - command: | - openssl aes-256-cbc -d \ - -in .circleci/.pypirc.enc \ - -out ~/.pypirc \ - -k "${PYPIRC_ENCRYPTION_KEY}" - - run: - name: Publish to PyPI. - command: python setup.py sdist upload - showcase: - docker: - - image: python:3.8-slim - - image: gcr.io/gapic-images/gapic-showcase:0.11.0 - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install nox. - command: pip install nox - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Run showcase tests. - command: nox -s showcase - showcase-mtls: - working_directory: /tmp/workspace - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install nox. - command: pip install nox - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Run showcase tests. - command: | - mkdir gapic_showcase - cd gapic_showcase - curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.11.0/gapic-showcase-0.11.0-linux-amd64.tar.gz | tar xz - ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & - showcase_pid=$! - - cleanup() { - echo "kill showcase server" - kill $showcase_pid - # Wait for the process to die, but don't report error from the kill. - wait $showcase_pid || exit $exit_code - } - trap cleanup EXIT - - cd .. - nox -s showcase_mtls - showcase-alternative-templates: - docker: - - image: python:3.8-slim - - image: gcr.io/gapic-images/gapic-showcase:0.11.0 - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install nox. - command: pip install nox - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Run showcase tests. - command: nox -s showcase_alternative_templates - showcase-mtls-alternative-templates: - working_directory: /tmp/workspace - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install nox. - command: pip install nox - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Run showcase tests. - command: | - mkdir gapic_showcase - cd gapic_showcase - curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v0.11.0/gapic-showcase-0.11.0-linux-amd64.tar.gz | tar xz - ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & - showcase_pid=$! - - cleanup() { - echo "kill showcase server" - kill $showcase_pid - # Wait for the process to die, but don't report error from the kill. - wait $showcase_pid || exit $exit_code - } - trap cleanup EXIT - - cd .. - nox -s showcase_mtls_alternative_templates - # TODO(yon-mg): add compute unit tests - showcase-unit-3.6: - docker: - - image: python:3.6-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit-3.6 - showcase-unit-3.7: - docker: - - image: python:3.7-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit-3.7 - showcase-unit-3.8: - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit-3.8 - showcase-unit-3.9: - docker: - - image: python:3.9-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit-3.9 - showcase-unit-alternative-templates-3.7: - docker: - - image: python:3.7-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit_alternative_templates-3.7 - showcase-unit-alternative-templates-3.8: - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit_alternative_templates-3.8 - showcase-unit-alternative-templates-3.9: - docker: - - image: python:3.9-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit_alternative_templates-3.9 - showcase-unit-add-iam-methods: - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Run unit tests. - command: nox -s showcase_unit_add_iam_methods-3.8 - showcase-mypy: - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Typecheck the generated output. - command: nox -s showcase_mypy - showcase-mypy-alternative-templates: - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install system dependencies. - command: | - apt-get update - apt-get install -y curl pandoc unzip gcc - - run: - name: Install protoc 3.12.1. - command: | - mkdir -p /usr/src/protoc/ - curl --location https://github.com/google/protobuf/releases/download/v3.12.1/protoc-3.12.1-linux-x86_64.zip --output /usr/src/protoc/protoc-3.12.1.zip - cd /usr/src/protoc/ - unzip protoc-3.12.1.zip - ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - run: - name: Install nox. - command: pip install nox - - run: - name: Typecheck the generated output. - command: nox -s showcase_mypy_alternative_templates - unit-3.6: - docker: - - image: python:3.6-slim - steps: - - checkout - - run: - name: Install pandoc - command: | - apt-get update - apt-get install -y pandoc gcc git - - run: - name: Install nox and codecov. - command: | - pip install nox - pip install codecov - - run: - name: Run unit tests. - command: nox -s unit-3.6 - - run: - name: Submit coverage data to codecov. - command: codecov - when: always - unit-3.7: - docker: - - image: python:3.7-slim - steps: - - checkout - - run: - name: Install pandoc - command: | - apt-get update - apt-get install -y pandoc gcc git - - run: - name: Install nox and codecov. - command: | - pip install nox - pip install codecov - - run: - name: Run unit tests. - command: nox -s unit-3.7 - - run: - name: Submit coverage data to codecov. - command: codecov - when: always - unit-3.8: - docker: - - image: python:3.8-slim - steps: - - checkout - - run: - name: Install pandoc - command: | - apt-get update - apt-get install -y pandoc gcc git - - run: - name: Install nox and codecov. - command: | - pip install nox - pip install codecov - - run: - name: Run unit tests. - command: nox -s unit-3.8 - - run: - name: Submit coverage data to codecov. - command: codecov - when: always - unit-3.9: - docker: - - image: python:3.9-slim - steps: - - checkout - - run: - name: Install pandoc - command: | - apt-get update - apt-get install -y pandoc gcc git - - run: - name: Install nox and codecov. - command: | - pip install nox - pip install codecov - - run: - name: Run unit tests. - command: nox -s unit-3.9 - - run: - name: Submit coverage data to codecov. - command: codecov - when: always - style-check: - docker: - - image: python:3.6-alpine - steps: - - checkout - - run: - name: Install git - command: | - apk add git - - run: - name: Install autopep8 - command: | - pip install autopep8 - - run: - name: Format files - command: | - find gapic tests -name "*.py" | xargs autopep8 --in-place - - run: - name: Check diff - command: | - git diff --ignore-submodules=all --color --exit-code . diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index 5a2a087b60c6..cb3a352cd892 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -1,7 +1,7 @@ API Client Generator for Python =============================== -|release level| |ci| |docs| |codecov| +|release level| |docs| A generator for protocol buffer described APIs for and in Python 3. @@ -32,7 +32,3 @@ Documentation :target: https://cloud.google.com/terms/launch-stages .. |docs| image:: https://readthedocs.org/projects/gapic-generator-python/badge/?version=latest :target: https://gapic-generator-python.readthedocs.io/ -.. |ci| image:: https://circleci.com/gh/googleapis/gapic-generator-python.svg?style=shield - :target: https://circleci.com/gh/googleapis/gapic-generator-python -.. |codecov| image:: https://codecov.io/gh/googleapis/gapic-generator-python/graph/badge.svg - :target: https://codecov.io/gh/googleapis/gapic-generator-python diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 88cde0d74af1..9dfb14d25ed0 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -170,14 +170,10 @@ def showcase_library( ): """Install the generated library into the session for showcase tests.""" - # Try to make it clear if Showcase is not running, so that - # people do not end up with tons of difficult-to-debug failures over - # an obvious problem. - if not os.environ.get("CIRCLECI"): - session.log("-" * 70) - session.log("Note: Showcase must be running for these tests to work.") - session.log("See https://github.com/googleapis/gapic-showcase") - session.log("-" * 70) + session.log("-" * 70) + session.log("Note: Showcase must be running for these tests to work.") + session.log("See https://github.com/googleapis/gapic-showcase") + session.log("-" * 70) # Install gapic-generator-python session.install("-e", ".") From 94dff146af84d3ccdf2e191a8c78a21bfe1b049c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 2 May 2022 17:54:32 -0400 Subject: [PATCH 0806/1339] ci: configure release automation using owl bot (#1293) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: configure release automation using owl bot * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 17 + .../gapic-generator/.github/CONTRIBUTING.md | 28 + .../.github/ISSUE_TEMPLATE/bug_report.md | 43 ++ .../.github/ISSUE_TEMPLATE/feature_request.md | 18 + .../.github/ISSUE_TEMPLATE/support_request.md | 7 + .../.github/PULL_REQUEST_TEMPLATE.md | 7 + .../gapic-generator/.github/auto-label.yaml | 15 + .../.github/header-checker-lint.yml | 15 + .../.github/release-trigger.yml | 1 + .../gapic-generator/.github/snippet-bot.yml | 3 - packages/gapic-generator/.kokoro/build.sh | 59 +++ .../.kokoro/continuous/common.cfg | 27 + .../.kokoro/continuous/continuous.cfg | 1 + .../.kokoro/docker/docs/Dockerfile | 83 +++ .../.kokoro/populate-secrets.sh | 43 ++ .../.kokoro/presubmit/common.cfg | 27 + .../.kokoro/presubmit/presubmit.cfg | 1 + packages/gapic-generator/.kokoro/release.sh | 32 ++ .../.kokoro/release/common.cfg | 40 ++ .../.kokoro/release/release.cfg | 1 + .../gapic-generator/.kokoro/trampoline.sh | 28 + .../gapic-generator/.kokoro/trampoline_v2.sh | 487 ++++++++++++++++++ packages/gapic-generator/.repo-metadata.json | 13 + packages/gapic-generator/owlbot.py | 24 + 24 files changed, 1017 insertions(+), 3 deletions(-) create mode 100644 packages/gapic-generator/.github/.OwlBot.lock.yaml create mode 100644 packages/gapic-generator/.github/CONTRIBUTING.md create mode 100644 packages/gapic-generator/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 packages/gapic-generator/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 packages/gapic-generator/.github/ISSUE_TEMPLATE/support_request.md create mode 100644 packages/gapic-generator/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/gapic-generator/.github/auto-label.yaml create mode 100644 packages/gapic-generator/.github/header-checker-lint.yml create mode 100644 packages/gapic-generator/.github/release-trigger.yml create mode 100755 packages/gapic-generator/.kokoro/build.sh create mode 100644 packages/gapic-generator/.kokoro/continuous/common.cfg create mode 100644 packages/gapic-generator/.kokoro/continuous/continuous.cfg create mode 100644 packages/gapic-generator/.kokoro/docker/docs/Dockerfile create mode 100755 packages/gapic-generator/.kokoro/populate-secrets.sh create mode 100644 packages/gapic-generator/.kokoro/presubmit/common.cfg create mode 100644 packages/gapic-generator/.kokoro/presubmit/presubmit.cfg create mode 100755 packages/gapic-generator/.kokoro/release.sh create mode 100644 packages/gapic-generator/.kokoro/release/common.cfg create mode 100644 packages/gapic-generator/.kokoro/release/release.cfg create mode 100755 packages/gapic-generator/.kokoro/trampoline.sh create mode 100755 packages/gapic-generator/.kokoro/trampoline_v2.sh create mode 100644 packages/gapic-generator/.repo-metadata.json create mode 100644 packages/gapic-generator/owlbot.py diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000000..368c491fdbfe --- /dev/null +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -0,0 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd diff --git a/packages/gapic-generator/.github/CONTRIBUTING.md b/packages/gapic-generator/.github/CONTRIBUTING.md new file mode 100644 index 000000000000..939e5341e74d --- /dev/null +++ b/packages/gapic-generator/.github/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/gapic-generator/.github/ISSUE_TEMPLATE/bug_report.md b/packages/gapic-generator/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000000..bb9de926a0e2 --- /dev/null +++ b/packages/gapic-generator/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,43 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/gapic-generator-python/issues + - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python + +If you are still having issues, please be sure to include as much information as possible: + +#### Environment details + + - OS type and version: + - Python version: `python --version` + - pip version: `pip --version` + - `gapic-generator` version: `pip show gapic-generator` + +#### Steps to reproduce + + 1. ? + 2. ? + +#### Code example + +```python +# example +``` + +#### Stack trace +``` +# example +``` + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/packages/gapic-generator/.github/ISSUE_TEMPLATE/feature_request.md b/packages/gapic-generator/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000000..6365857f33c6 --- /dev/null +++ b/packages/gapic-generator/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/packages/gapic-generator/.github/ISSUE_TEMPLATE/support_request.md b/packages/gapic-generator/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 000000000000..995869032125 --- /dev/null +++ b/packages/gapic-generator/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/gapic-generator/.github/PULL_REQUEST_TEMPLATE.md b/packages/gapic-generator/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000000..40d1f1146bf4 --- /dev/null +++ b/packages/gapic-generator/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/gapic-generator-python/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/packages/gapic-generator/.github/auto-label.yaml b/packages/gapic-generator/.github/auto-label.yaml new file mode 100644 index 000000000000..41bff0b5375a --- /dev/null +++ b/packages/gapic-generator/.github/auto-label.yaml @@ -0,0 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +requestsize: + enabled: true diff --git a/packages/gapic-generator/.github/header-checker-lint.yml b/packages/gapic-generator/.github/header-checker-lint.yml new file mode 100644 index 000000000000..6fe78aa7987a --- /dev/null +++ b/packages/gapic-generator/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/packages/gapic-generator/.github/release-trigger.yml b/packages/gapic-generator/.github/release-trigger.yml new file mode 100644 index 000000000000..d4ca94189e16 --- /dev/null +++ b/packages/gapic-generator/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/packages/gapic-generator/.github/snippet-bot.yml b/packages/gapic-generator/.github/snippet-bot.yml index 19254c01c97d..e69de29bb2d1 100644 --- a/packages/gapic-generator/.github/snippet-bot.yml +++ b/packages/gapic-generator/.github/snippet-bot.yml @@ -1,3 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/snippet-bot -ignoreFiles: - - "**/*.py" diff --git a/packages/gapic-generator/.kokoro/build.sh b/packages/gapic-generator/.kokoro/build.sh new file mode 100755 index 000000000000..a8340f3a586e --- /dev/null +++ b/packages/gapic-generator/.kokoro/build.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/gapic-generator-python" +fi + +cd "${PROJECT_ROOT}" + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json + +# Setup project id. +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") + +# Remove old nox +python3 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3 -m nox -s ${NOX_SESSION:-} +else + python3 -m nox +fi diff --git a/packages/gapic-generator/.kokoro/continuous/common.cfg b/packages/gapic-generator/.kokoro/continuous/common.cfg new file mode 100644 index 000000000000..edae760eb5ef --- /dev/null +++ b/packages/gapic-generator/.kokoro/continuous/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "gapic-generator-python/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/gapic-generator-python/.kokoro/build.sh" +} diff --git a/packages/gapic-generator/.kokoro/continuous/continuous.cfg b/packages/gapic-generator/.kokoro/continuous/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/gapic-generator/.kokoro/continuous/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile new file mode 100644 index 000000000000..238b87b9d1c9 --- /dev/null +++ b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,83 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:22.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + python3-distutils \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +# Test pip +RUN python3 -m pip + +CMD ["python3.8"] diff --git a/packages/gapic-generator/.kokoro/populate-secrets.sh b/packages/gapic-generator/.kokoro/populate-secrets.sh new file mode 100755 index 000000000000..f52514257ef0 --- /dev/null +++ b/packages/gapic-generator/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/packages/gapic-generator/.kokoro/presubmit/common.cfg b/packages/gapic-generator/.kokoro/presubmit/common.cfg new file mode 100644 index 000000000000..edae760eb5ef --- /dev/null +++ b/packages/gapic-generator/.kokoro/presubmit/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "gapic-generator-python/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/gapic-generator-python/.kokoro/build.sh" +} diff --git a/packages/gapic-generator/.kokoro/presubmit/presubmit.cfg b/packages/gapic-generator/.kokoro/presubmit/presubmit.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/gapic-generator/.kokoro/presubmit/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/release.sh b/packages/gapic-generator/.kokoro/release.sh new file mode 100755 index 000000000000..c78b93700e13 --- /dev/null +++ b/packages/gapic-generator/.kokoro/release.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Start the releasetool reporter +python3 -m pip install gcp-releasetool +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Move into the package, build the distribution and upload. +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +cd github/gapic-generator-python +python3 setup.py sdist bdist_wheel +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/gapic-generator/.kokoro/release/common.cfg b/packages/gapic-generator/.kokoro/release/common.cfg new file mode 100644 index 000000000000..72000839ce3c --- /dev/null +++ b/packages/gapic-generator/.kokoro/release/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "gapic-generator-python/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/gapic-generator-python/.kokoro/release.sh" +} + +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} diff --git a/packages/gapic-generator/.kokoro/release/release.cfg b/packages/gapic-generator/.kokoro/release/release.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/gapic-generator/.kokoro/release/release.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/trampoline.sh b/packages/gapic-generator/.kokoro/trampoline.sh new file mode 100755 index 000000000000..f39236e943a8 --- /dev/null +++ b/packages/gapic-generator/.kokoro/trampoline.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT + +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/trampoline_v2.sh b/packages/gapic-generator/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000000..4af6cdc26dbc --- /dev/null +++ b/packages/gapic-generator/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For FlakyBot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/packages/gapic-generator/.repo-metadata.json b/packages/gapic-generator/.repo-metadata.json new file mode 100644 index 000000000000..a4126e1b7889 --- /dev/null +++ b/packages/gapic-generator/.repo-metadata.json @@ -0,0 +1,13 @@ +{ + "name": "gapic-generator-python", + "name_pretty": "Google API Client Generator for Python", + "client_documentation": "https://gapic-generator-python.readthedocs.io/en/stable/", + "issue_tracker": "https://github.com/googleapis/gapic-generator-python/issues", + "release_level": "preview", + "language": "python", + "library_type": "CORE", + "repo": "googleapis/gapic-generator-python", + "distribution_name": "gapic-generator", + "default_version": "", + "codeowner_team": "@googleapis/actools-python" +} diff --git a/packages/gapic-generator/owlbot.py b/packages/gapic-generator/owlbot.py new file mode 100644 index 000000000000..618b7f054e83 --- /dev/null +++ b/packages/gapic-generator/owlbot.py @@ -0,0 +1,24 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import synthtool as s +from synthtool.gcp import CommonTemplates + +templated_files = CommonTemplates().py_library() +s.move( + templated_files / ".kokoro", + excludes=["docs/**/*", "publish-docs.sh", "samples/**/*", "test-samples*"], +) +s.move(templated_files / "LICENSE") +s.move(templated_files / ".github", excludes=["workflows", "CODEOWNERS"]) From d9fa5c619bcbbe10f62f554a9763ebce69fbbed9 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 2 May 2022 18:02:53 -0400 Subject: [PATCH 0807/1339] ci: update required checks (#1294) --- .../.github/sync-repo-settings.yaml | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 8c6fac341e10..30a3766f0417 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -18,11 +18,22 @@ branchProtectionRules: - 'showcase-mypy (_alternative_templates)' - 'showcase-unit (3.6)' - 'showcase-unit (3.7)' - - 'showcase-unit (3.7, _alternative_templates)' - 'showcase-unit (3.8)' - - 'showcase-unit (3.8, _alternative_templates)' - 'showcase-unit (3.9)' + - 'showcase-unit (3.10)' + - 'showcase-unit (3.7, _alternative_templates)' + - 'showcase-unit (3.8, _alternative_templates)' - 'showcase-unit (3.9, _alternative_templates)' + - 'showcase-unit (3.10, _alternative_templates)' + - 'showcase-unit (3.7, _alternative_templates_mixins)' + - 'showcase-unit (3.8, _alternative_templates_mixins)' + - 'showcase-unit (3.9, _alternative_templates_mixins)' + - 'showcase-unit (3.10, _alternative_templates_mixins)' + - 'showcase-unit (3.6, _mixins)' + - 'showcase-unit (3.7, _mixins)' + - 'showcase-unit (3.8, _mixins)' + - 'showcase-unit (3.9, _mixins)' + - 'showcase-unit (3.10, _mixins)' - 'showcase-unit-add-iam-methods' - 'integration' - 'goldens-lint' @@ -32,13 +43,17 @@ branchProtectionRules: - 'unit (3.7)' - 'unit (3.8)' - 'unit (3.9)' + - 'unit (3.10)' - 'fragment (3.6)' - 'fragment (3.7)' - 'fragment (3.8)' - 'fragment (3.9)' + - 'fragment (3.10)' - 'fragment (3.7, _alternative_templates)' - 'fragment (3.8, _alternative_templates)' - 'fragment (3.9, _alternative_templates)' + - 'fragment (3.10, _alternative_templates)' + - 'OwlBot Post Processor' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true requiresStrictStatusChecks: true From 485a0ec35f283ba74cd5b7ab6fcc5f7227c14582 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 2 May 2022 19:54:20 -0400 Subject: [PATCH 0808/1339] chore: update getting-started doc (#1295) * chore: update getting-started doc * formatting --- .../docs/getting-started/local.rst | 32 +++++++------------ 1 file changed, 12 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/docs/getting-started/local.rst b/packages/gapic-generator/docs/getting-started/local.rst index 9c2a989fb84b..db474e3a2d4e 100644 --- a/packages/gapic-generator/docs/getting-started/local.rst +++ b/packages/gapic-generator/docs/getting-started/local.rst @@ -75,28 +75,20 @@ the usual ways. It fundamentally provides a CLI command, ``snake_case`` is weird, sorry), so you will want to install using a mechanism that is conducive to making CLI commands available. -Additionally, this program currently only runs against Python 3.6 or -Python 3.7, so you will need that installed. (Most Linux distributions ship -with earlier versions.) Use `pyenv`_ to get Python 3.7 installed in a -friendly way. - .. code-block:: shell - # Due to its experimental state, this tool is not published to a - # package manager; you should clone it. - # (You can pip install it from GitHub, not not if you want to tinker.) - git clone https://github.com/googleapis/gapic-generator-python.git - cd gapic-generator-python/ - - # Install a version of python that is supported by the microgenerator. - # We use 3.8.6 as an example. - # You may need to install additional packages in order to - # build python from source. - # Setting a 'global' python is convenient for development but may interfere - # with other system activities. Adjust as your environment requires. - pyenv install 3.8.6 && pyenv global 3.8.6 - - # Install the tool. This will handle the virtualenv for you, and + # Install this package using + pip install gapic-generator + + # Install a version of python that is supported by the microgenerator. + # We use 3.9.12 as an example. + # You may need to install additional packages in order to + # build python from source. + # Setting a 'global' python is convenient for development but may interfere + # with other system activities. Adjust as your environment requires. + pyenv install 3.9.12 && pyenv global 3.9.12 + + # Install the tool. This will handle the virtualenv for you, and # make an appropriately-aliased executable. # The `--editable` flag is only necessary if you want to work on the # tool (as opposed to just use it). From 3433b62041d6d9b5ad50d1373a1578c30708688b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 2 May 2022 20:17:24 -0400 Subject: [PATCH 0809/1339] chore: update setup.py (#1297) * chore: update setup.py * revert change to copyright year --- packages/gapic-generator/setup.py | 82 ++++++++++++++++--------------- 1 file changed, 43 insertions(+), 39 deletions(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2e4206422b82..5610ef7e881b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,63 +17,65 @@ import io import os -from setuptools import find_packages, setup # type: ignore - - -PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) +import setuptools +name = "gapic-generator" +description = "Google API Client Generator for Python" +url = "https://github.com/googleapis/gapic-generator-python" version = "0.65.2" +release_status = "Development Status :: 4 - Beta" +dependencies = [ + "click >= 6.7", + "google-api-core >= 2.3.2", + "googleapis-common-protos >= 1.55.0", + "grpcio >= 1.24.3", + "jinja2 >= 2.10", + "protobuf >= 3.18.0", + "pypandoc >= 1.4", + "PyYAML >= 5.1.1", + "dataclasses < 0.8; python_version < '3.7'", + "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", +] -with io.open(os.path.join(PACKAGE_ROOT, "README.rst")) as file_obj: - README = file_obj.read() +package_root = os.path.abspath(os.path.dirname(__file__)) -setup( - name="gapic-generator", +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +setuptools.setup( + name=name, version=version, - license="Apache 2.0", - author="Dov Shlachter", - author_email="dovs@google.com", - url="https://github.com/googleapis/gapic-generator-python.git", - packages=find_packages(exclude=["docs", "tests"]), - description="Python client library generator for APIs defined by protocol" - "buffers", - long_description=README, + description=description, + long_description=readme, entry_points="""[console_scripts] protoc-gen-dump=gapic.cli.dump:dump protoc-gen-python_gapic=gapic.cli.generate:generate """, - platforms="Posix; MacOS X", - include_package_data=True, - install_requires=( - # There is a typing issue with click==8.1.0 - # See https://github.com/pallets/click/issues/2227 - "click >= 6.7,!=8.1.0", - "google-api-core >= 2.3.2", - "googleapis-common-protos >= 1.55.0", - "grpcio >= 1.24.3", - "jinja2 >= 2.10", - "protobuf >= 3.18.0", - "pypandoc >= 1.4", - "PyYAML >= 5.1.1", - "dataclasses < 0.8; python_version < '3.7'", - "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - ), - extras_require={':python_version<"3.7"': ("dataclasses >= 0.4, < 0.8",),}, - tests_require=("pyfakefs >= 3.6",), - python_requires=">=3.6", - classifiers=( - "Development Status :: 4 - Beta", + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + packages=setuptools.find_packages(exclude=["docs", "tests"]), + url=url, + classifiers=[ + release_status, "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", - "Operating System :: POSIX", + "Programming Language :: Python", + "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Libraries :: Python Modules", - ), + ], + platforms="Posix; MacOS X", + python_requires=">=3.6", + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) From 55851818442ef45bd4441069085a9d93cb0d5879 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 2 May 2022 20:24:29 -0400 Subject: [PATCH 0810/1339] fix: don't use stale session in rest transport (#1291) * fix: don't use stale session in rest transport * add test --- .../services/%service/transports/rest.py.j2 | 7 +----- .../%name_%version/%sub/test_%service.py.j2 | 22 +++++++++++++++++++ .../services/%service/transports/rest.py.j2 | 7 +----- .../%name_%version/%sub/test_%service.py.j2 | 22 +++++++++++++++++++ 4 files changed, 46 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 6c7b6b8dd9b0..4162d66c8037 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -129,7 +129,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - _STUBS: Dict[str, {{service.name}}RestStub] = {} {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} @@ -399,13 +398,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): def {{method.transport_safe_name | snake_case}}(self) -> Callable[ [{{method.input.ident}}], {{method.output.ident}}]: - stub = self._STUBS.get("{{method.transport_safe_name | snake_case}}") - if not stub: - stub = self._STUBS["{{method.transport_safe_name | snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return stub # type: ignore + return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore {% endfor %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 70a6e3f48865..1c8ff54ac4df 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1941,6 +1941,28 @@ def test_{{ service.name|snake_case }}_host_with_port(transport_name): ) {% endwith %} +{% if 'rest' in opts.transport %} +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_{{ service.name|snake_case }}_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = {{ service.client_name }}( + credentials=creds1, + transport=transport_name, + ) + client2 = {{ service.client_name }}( + credentials=creds2, + transport=transport_name, + ) + {% for method in service.methods.values() %} + session1 = client1.transport.{{ method.transport_safe_name|snake_case }}._session + session2 = client2.transport.{{ method.transport_safe_name|snake_case }}._session + assert session1 != session2 + {% endfor %} +{% endif -%} + {% if 'grpc' in opts.transport %} def test_{{ service.name|snake_case }}_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 053bba3d5273..c4f1d89893e3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -129,7 +129,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - _STUBS: Dict[str, {{service.name}}RestStub] = {} {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} @@ -399,13 +398,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): def {{method.transport_safe_name|snake_case}}(self) -> Callable[ [{{method.input.ident}}], {{method.output.ident}}]: - stub = self._STUBS.get("{{method.transport_safe_name|snake_case}}") - if not stub: - stub = self._STUBS["{{method.transport_safe_name|snake_case}}"] = self._{{method.name}}(self._session, self._host, self._interceptor) - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return stub # type: ignore + return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 1a6e3c00cb41..1c54a8c54ec7 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1031,6 +1031,28 @@ def test_{{ service.name|snake_case }}_host_with_port(transport_name): ) {% endwith %} +{% if 'rest' in opts.transport %} +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_{{ service.name|snake_case }}_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = {{ service.client_name }}( + credentials=creds1, + transport=transport_name, + ) + client2 = {{ service.client_name }}( + credentials=creds2, + transport=transport_name, + ) + {% for method in service.methods.values() %} + session1 = client1.transport.{{ method.transport_safe_name|snake_case }}._session + session2 = client2.transport.{{ method.transport_safe_name|snake_case }}._session + assert session1 != session2 + {% endfor %} +{% endif -%} + {% if 'grpc' in opts.transport %} def test_{{ service.name|snake_case }}_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) From 383354e95fd7dce64c84710fe31418cdfce203fc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 3 May 2022 07:16:27 -0400 Subject: [PATCH 0811/1339] chore(main): release 0.65.3 (#1299) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index af680ea4f83b..b4759a23a18e 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +### [0.65.3](https://github.com/googleapis/gapic-generator-python/compare/v0.65.2...v0.65.3) (2022-05-03) + + +### Bug Fixes + +* don't use stale session in rest transport ([#1291](https://github.com/googleapis/gapic-generator-python/issues/1291)) ([a96ef9e](https://github.com/googleapis/gapic-generator-python/commit/a96ef9ef3f99b0114f1d5630ee6e8907dd24bec2)) + ### [0.65.2](https://github.com/googleapis/gapic-generator-python/compare/v0.65.1...v0.65.2) (2022-04-21) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 5610ef7e881b..43e8d873aca4 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "0.65.2" +version = "0.65.3" release_status = "Development Status :: 4 - Beta" dependencies = [ "click >= 6.7", From 23f5e0efd7d1ca05c166e395d82f2c523af87734 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 3 May 2022 11:09:03 -0400 Subject: [PATCH 0812/1339] ci: fix kokoro docs build (#1300) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: fix kokoro docs build * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../gapic-generator/.kokoro/docs/common.cfg | 67 +++++++++++++++++++ .../.kokoro/docs/docs-presubmit.cfg | 28 ++++++++ .../gapic-generator/.kokoro/docs/docs.cfg | 1 + .../gapic-generator/.kokoro/publish-docs.sh | 64 ++++++++++++++++++ packages/gapic-generator/owlbot.py | 2 +- 5 files changed, 161 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/.kokoro/docs/common.cfg create mode 100644 packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg create mode 100644 packages/gapic-generator/.kokoro/docs/docs.cfg create mode 100755 packages/gapic-generator/.kokoro/publish-docs.sh diff --git a/packages/gapic-generator/.kokoro/docs/common.cfg b/packages/gapic-generator/.kokoro/docs/common.cfg new file mode 100644 index 000000000000..dcab742e0888 --- /dev/null +++ b/packages/gapic-generator/.kokoro/docs/common.cfg @@ -0,0 +1,67 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "gapic-generator-python/.kokoro/trampoline_v2.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/gapic-generator-python/.kokoro/publish-docs.sh" +} + +env_vars: { + key: "STAGING_BUCKET" + value: "docs-staging" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + # Push non-cloud library docs to `docs-staging-v2-staging` instead of the + # Cloud RAD bucket `docs-staging-v2` + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg b/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 000000000000..36e81baf2c06 --- /dev/null +++ b/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,28 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/gapic-generator-python/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/packages/gapic-generator/.kokoro/docs/docs.cfg b/packages/gapic-generator/.kokoro/docs/docs.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/gapic-generator/.kokoro/docs/docs.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/publish-docs.sh b/packages/gapic-generator/.kokoro/publish-docs.sh new file mode 100755 index 000000000000..8acb14e802b0 --- /dev/null +++ b/packages/gapic-generator/.kokoro/publish-docs.sh @@ -0,0 +1,64 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +export PATH="${HOME}/.local/bin:${PATH}" + +# Install nox +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version + +# build docs +nox -s docs + +python3 -m pip install --user gcp-docuploader + +# create metadata +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/gapic-generator/owlbot.py b/packages/gapic-generator/owlbot.py index 618b7f054e83..b1a9b93f4f5d 100644 --- a/packages/gapic-generator/owlbot.py +++ b/packages/gapic-generator/owlbot.py @@ -18,7 +18,7 @@ templated_files = CommonTemplates().py_library() s.move( templated_files / ".kokoro", - excludes=["docs/**/*", "publish-docs.sh", "samples/**/*", "test-samples*"], + excludes=["samples/**/*", "test-samples*"], ) s.move(templated_files / "LICENSE") s.move(templated_files / ".github", excludes=["workflows", "CODEOWNERS"]) From bc24d3d34d3caa85c7d1563a0f4a044cfbe6bd23 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 3 May 2022 19:44:21 -0400 Subject: [PATCH 0813/1339] ci: use python 3.8 in docs build (#1302) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: use python 3.8 in docs build * add files required for docs build * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update docs link * remove docfx session from publish-docs.sh Co-authored-by: Owl Bot --- .../.github/workflows/tests.yaml | 2 +- .../.kokoro/docs/docs-presubmit.cfg | 2 +- .../gapic-generator/.kokoro/publish-docs.sh | 19 ------ packages/gapic-generator/.repo-metadata.json | 3 +- packages/gapic-generator/.trampolinerc | 63 +++++++++++++++++++ packages/gapic-generator/README.rst | 6 +- packages/gapic-generator/noxfile.py | 2 +- packages/gapic-generator/owlbot.py | 13 +++- 8 files changed, 82 insertions(+), 28 deletions(-) create mode 100644 packages/gapic-generator/.trampolinerc diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 6101f68d395a..d1e39ed71a82 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -22,7 +22,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v3 with: - python-version: "3.9" + python-version: "3.8" cache: 'pip' - name: Install nox. run: python -m pip install nox diff --git a/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg b/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg index 36e81baf2c06..1c32c81df282 100644 --- a/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg +++ b/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg @@ -24,5 +24,5 @@ env_vars: { # Only run this nox session. env_vars: { key: "NOX_SESSION" - value: "docs docfx" + value: "docs" } diff --git a/packages/gapic-generator/.kokoro/publish-docs.sh b/packages/gapic-generator/.kokoro/publish-docs.sh index 8acb14e802b0..322d8522ef0d 100755 --- a/packages/gapic-generator/.kokoro/publish-docs.sh +++ b/packages/gapic-generator/.kokoro/publish-docs.sh @@ -43,22 +43,3 @@ cat docs.metadata # upload docs python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/gapic-generator/.repo-metadata.json b/packages/gapic-generator/.repo-metadata.json index a4126e1b7889..85297663062f 100644 --- a/packages/gapic-generator/.repo-metadata.json +++ b/packages/gapic-generator/.repo-metadata.json @@ -1,7 +1,8 @@ { - "name": "gapic-generator-python", + "name": "gapic-generator", "name_pretty": "Google API Client Generator for Python", "client_documentation": "https://gapic-generator-python.readthedocs.io/en/stable/", + "product_documentation": "", "issue_tracker": "https://github.com/googleapis/gapic-generator-python/issues", "release_level": "preview", "language": "python", diff --git a/packages/gapic-generator/.trampolinerc b/packages/gapic-generator/.trampolinerc new file mode 100644 index 000000000000..0eee72ab62aa --- /dev/null +++ b/packages/gapic-generator/.trampolinerc @@ -0,0 +1,63 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### + "STAGING_BUCKET" + "V2_STAGING_BUCKET" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index cb3a352cd892..8331bae9203e 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -24,11 +24,9 @@ regarding: Documentation ------------- -`Documentation`_ is available on Read the Docs. +See the `documentation`_. -.. _documentation: https://gapic-generator-python.readthedocs.io/ +.. _documentation: https://googleapis.dev/python/gapic-generator-python/latest .. |release level| image:: https://img.shields.io/badge/release%20level-beta-yellow.svg?style=flat :target: https://cloud.google.com/terms/launch-stages -.. |docs| image:: https://readthedocs.org/projects/gapic-generator-python/badge/?version=latest - :target: https://gapic-generator-python.readthedocs.io/ diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 9dfb14d25ed0..9a1e5624679d 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -422,7 +422,7 @@ def snippetgen(session): session.run("py.test", "-vv", "tests/snippetgen") -@nox.session(python="3.9") +@nox.session(python="3.8") def docs(session): """Build the docs.""" diff --git a/packages/gapic-generator/owlbot.py b/packages/gapic-generator/owlbot.py index b1a9b93f4f5d..f05ef4bdb23a 100644 --- a/packages/gapic-generator/owlbot.py +++ b/packages/gapic-generator/owlbot.py @@ -18,7 +18,18 @@ templated_files = CommonTemplates().py_library() s.move( templated_files / ".kokoro", - excludes=["samples/**/*", "test-samples*"], + excludes=["samples/**/*", "test-samples*", "publish-docs.sh"], ) + +# remove docfx build +assert 1 == s.replace( + ".kokoro/docs/docs-presubmit.cfg", + 'value: "docs docfx"', + 'value: "docs"', +) + +# needed for docs build +s.move(templated_files / ".trampolinerc") + s.move(templated_files / "LICENSE") s.move(templated_files / ".github", excludes=["workflows", "CODEOWNERS"]) From cf41d7636ab9bee6734a875e66b1ac95d2c714f8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 5 May 2022 13:51:22 -0400 Subject: [PATCH 0814/1339] feat: release as stable (#1301) * feat: release as stable * typo * add links for pypi and versions --- packages/gapic-generator/.repo-metadata.json | 2 +- packages/gapic-generator/README.rst | 10 +++++++--- packages/gapic-generator/setup.py | 2 +- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/.repo-metadata.json b/packages/gapic-generator/.repo-metadata.json index 85297663062f..ab7d4ad59ad7 100644 --- a/packages/gapic-generator/.repo-metadata.json +++ b/packages/gapic-generator/.repo-metadata.json @@ -4,7 +4,7 @@ "client_documentation": "https://gapic-generator-python.readthedocs.io/en/stable/", "product_documentation": "", "issue_tracker": "https://github.com/googleapis/gapic-generator-python/issues", - "release_level": "preview", + "release_level": "stable", "language": "python", "library_type": "CORE", "repo": "googleapis/gapic-generator-python", diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index 8331bae9203e..d5888edc5d52 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -1,7 +1,7 @@ API Client Generator for Python =============================== -|release level| |docs| +|release level| |pypi| |versions| A generator for protocol buffer described APIs for and in Python 3. @@ -28,5 +28,9 @@ See the `documentation`_. .. _documentation: https://googleapis.dev/python/gapic-generator-python/latest -.. |release level| image:: https://img.shields.io/badge/release%20level-beta-yellow.svg?style=flat - :target: https://cloud.google.com/terms/launch-stages +.. |release level| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/gapic-generator.svg + :target: https://pypi.org/project/gapic-generator/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/gapic-generator.svg + :target: https://pypi.org/project/gapic-generator/ diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 43e8d873aca4..0b7816d20cbf 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -23,7 +23,7 @@ description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" version = "0.65.3" -release_status = "Development Status :: 4 - Beta" +release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", "google-api-core >= 2.3.2", From aedc3c83460020f7666e6af06061584f182677b3 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 7 May 2022 17:12:38 -0400 Subject: [PATCH 0815/1339] chore: remove obsolete workflow file (#1306) --- .../.github/workflows/pypi-upload.yaml | 25 ------------------- 1 file changed, 25 deletions(-) delete mode 100644 packages/gapic-generator/.github/workflows/pypi-upload.yaml diff --git a/packages/gapic-generator/.github/workflows/pypi-upload.yaml b/packages/gapic-generator/.github/workflows/pypi-upload.yaml deleted file mode 100644 index 48d7582178b6..000000000000 --- a/packages/gapic-generator/.github/workflows/pypi-upload.yaml +++ /dev/null @@ -1,25 +0,0 @@ -name: Upload Python Package to PyPI - -on: - release: - types: [created] - -jobs: - publish: - runs-on: ubuntu-latest - environment: PyPI - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v3 - with: - python-version: '3.x' - - name: Install dependencies - run: python -m pip install twine wheel - - name: Package and upload modulee - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - python setup.py sdist bdist_wheel - twine upload dist/* From a12f0e64aeddabe11d7ab8d471e98445a3d51255 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 11 May 2022 15:44:30 +0200 Subject: [PATCH 0816/1339] chore(deps): update dependency pypandoc to v1.8 (#1309) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 1a145f3c1851..420093ea74e9 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,7 +4,7 @@ googleapis-common-protos==1.56.0 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.1 -pypandoc==1.7.5 +pypandoc==1.8 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==62.1.0 From 1b11dc89265194bce7a98b59a119cea54c4a93de Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 12 May 2022 20:24:48 +0200 Subject: [PATCH 0817/1339] chore(deps): update dependency setuptools to v62.2.0 (#1308) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 420093ea74e9..0b829bd45257 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,5 +7,5 @@ protobuf==3.20.1 pypandoc==1.8 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==62.1.0 +setuptools==62.2.0 grpc-google-iam-v1==0.12.4 From e55a27b2e9a9819231903ecdd378f19f909f5cd9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 13 May 2022 15:05:15 +0200 Subject: [PATCH 0818/1339] chore(deps): update dependency googleapis-common-protos to v1.56.1 (#1311) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0b829bd45257..cd80ffb0383a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==8.1.3 google-api-core==2.7.3 -googleapis-common-protos==1.56.0 +googleapis-common-protos==1.56.1 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.1 From aa525f7cc8c5b957f8f5b14a839b022749094efd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 17 May 2022 14:26:53 +0200 Subject: [PATCH 0819/1339] chore(deps): update dependency setuptools to v62.3.1 (#1315) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index cd80ffb0383a..3264f444b28f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,5 +7,5 @@ protobuf==3.20.1 pypandoc==1.8 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==62.2.0 +setuptools==62.3.1 grpc-google-iam-v1==0.12.4 From 942d30f025dab54a7efd0d516d02e382cfc7a329 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 18 May 2022 21:22:32 +0200 Subject: [PATCH 0820/1339] chore(deps): update dependency setuptools to v62.3.2 (#1318) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 3264f444b28f..18016663dca6 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,5 +7,5 @@ protobuf==3.20.1 pypandoc==1.8 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==62.3.1 +setuptools==62.3.2 grpc-google-iam-v1==0.12.4 From dacbd40087e43ff6d38160561ce5a069422eaea8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 19 May 2022 14:00:43 +0200 Subject: [PATCH 0821/1339] chore(deps): update dependency google-api-core to v2.8.0 (#1319) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 18016663dca6..0f6feaaa5509 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.3 -google-api-core==2.7.3 +google-api-core==2.8.0 googleapis-common-protos==1.56.1 jinja2==3.1.2 MarkupSafe==2.1.1 From c6d4a6246a2129129b25f5b36a1064542a740c85 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sun, 22 May 2022 14:10:07 -0400 Subject: [PATCH 0822/1339] test: use unittest.mock (#1320) * test: use unittest.mock * test: use unittest.mock * install asyncmock for python_version < '3.8' * fix showcase alternative templates test * fix fragment tests --- .../gapic-generator/gapic/ads-templates/noxfile.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 7 ++++++- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 7 ++++++- packages/gapic-generator/noxfile.py | 10 +++++----- .../tests/integration/goldens/asset/noxfile.py | 2 +- .../tests/unit/gapic/asset_v1/test_asset_service.py | 7 ++++++- .../tests/integration/goldens/credentials/noxfile.py | 2 +- .../unit/gapic/credentials_v1/test_iam_credentials.py | 7 ++++++- .../tests/integration/goldens/eventarc/noxfile.py | 2 +- .../tests/unit/gapic/eventarc_v1/test_eventarc.py | 7 ++++++- .../tests/integration/goldens/logging/noxfile.py | 2 +- .../unit/gapic/logging_v2/test_config_service_v2.py | 7 ++++++- .../unit/gapic/logging_v2/test_logging_service_v2.py | 7 ++++++- .../unit/gapic/logging_v2/test_metrics_service_v2.py | 7 ++++++- .../tests/integration/goldens/redis/noxfile.py | 2 +- .../tests/unit/gapic/redis_v1/test_cloud_redis.py | 7 ++++++- packages/gapic-generator/tests/system/conftest.py | 2 +- packages/gapic-generator/tests/unit/schema/test_api.py | 1 - .../gapic-generator/tests/unit/utils/test_uri_conv.py | 1 - 20 files changed, 67 insertions(+), 24 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 5d45dee21a4f..6d9d548323d2 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -18,7 +18,7 @@ ALL_PYTHON = [ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'mock', 'pytest', 'pytest-cov') + session.install('coverage', 'pytest', 'pytest-cov') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 1c8ff54ac4df..dbaa5115f360 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -3,7 +3,12 @@ {% block content %} import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 95d9a0d803e1..994c793256a8 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -44,7 +44,7 @@ nox.sessions = [ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 1c54a8c54ec7..6492d5b753a0 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -4,7 +4,12 @@ {% import "tests/unit/gapic/%name_%version/%sub/test_macros.j2" as test_macros %} import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 9a1e5624679d..1322dd120f08 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -138,7 +138,7 @@ def fragment(session, use_ads_templates=False): "pytest", "pytest-cov", "pytest-xdist", - "asyncmock", + "asyncmock; python_version < '3.8'", "pytest-asyncio", "grpcio-tools", ) @@ -246,7 +246,7 @@ def showcase( """Run the Showcase test suite.""" with showcase_library(session, templates=templates, other_opts=other_opts): - session.install("mock", "pytest", "pytest-asyncio") + session.install("pytest", "pytest-asyncio") session.run( "py.test", "--quiet", @@ -265,7 +265,7 @@ def showcase_mtls( """Run the Showcase mtls test suite.""" with showcase_library(session, templates=templates, other_opts=other_opts): - session.install("mock", "pytest", "pytest-asyncio") + session.install("pytest", "pytest-asyncio") session.run( "py.test", "--quiet", @@ -303,7 +303,7 @@ def run_showcase_unit_tests(session, fail_under=100): "pytest", "pytest-cov", "pytest-xdist", - "asyncmock", + "asyncmock; python_version < '3.8'", "pytest-asyncio", ) @@ -417,7 +417,7 @@ def snippetgen(session): # Install gapic-generator-python session.install("-e", ".") - session.install("grpcio-tools", "mock", "pytest", "pytest-asyncio") + session.install("grpcio-tools", "pytest", "pytest-asyncio") session.run("py.test", "-vv", "tests/snippetgen") diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index d797a0310cae..7b144a0216e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -55,7 +55,7 @@ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index fe7455ab7c5c..7af0c175051b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -14,7 +14,12 @@ # limitations under the License. # import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index c34c6850f4bf..7dc0b7b5bee5 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -55,7 +55,7 @@ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index df3f450167fc..62ceb5bf7c29 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -14,7 +14,12 @@ # limitations under the License. # import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 7ae686f15a04..05e8b9a45747 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -55,7 +55,7 @@ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 5c2da6d6a24b..b7e41e077e78 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -14,7 +14,12 @@ # limitations under the License. # import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 430a4c506418..2cf8ceab4875 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -55,7 +55,7 @@ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 1920f259ad70..4f8141e0f58c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -14,7 +14,12 @@ # limitations under the License. # import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 0f5ee1b14052..f5c3ec05d9a9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -14,7 +14,12 @@ # limitations under the License. # import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 42e27b74fecb..52126b86a3eb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -14,7 +14,12 @@ # limitations under the License. # import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 6cce26dcc4aa..d376055b2408 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -55,7 +55,7 @@ def unit(session): """Run the unit test suite.""" - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') session.install('-e', '.') session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 185ec21bc6cd..a836902e8554 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -14,7 +14,12 @@ # limitations under the License. # import os -import mock +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 86935c77420b..15b4f4c300f5 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -14,7 +14,7 @@ import collections import grpc -import mock +from unittest import mock import os import pytest diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index bb76f1420492..67b1a304a5c2 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -14,7 +14,6 @@ import collections -from typing import Sequence from unittest import mock import pytest diff --git a/packages/gapic-generator/tests/unit/utils/test_uri_conv.py b/packages/gapic-generator/tests/unit/utils/test_uri_conv.py index 5c870b430af2..ccb7d4f35936 100644 --- a/packages/gapic-generator/tests/unit/utils/test_uri_conv.py +++ b/packages/gapic-generator/tests/unit/utils/test_uri_conv.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from unittest import mock import pypandoc From beb4d27f9b8ca4aa63097c71881f4cb7f742c8ac Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 24 May 2022 18:11:47 +0200 Subject: [PATCH 0823/1339] chore(deps): update dependency pypandoc to v1.8.1 (#1322) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0f6feaaa5509..4d6b56bd0cd8 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,7 +4,7 @@ googleapis-common-protos==1.56.1 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.1 -pypandoc==1.8 +pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==62.3.2 From e5b8417d037fb5f3f30433db27ec079bdafe102a Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Tue, 24 May 2022 11:14:42 -0700 Subject: [PATCH 0824/1339] feat: adds LRO mixin. (#1304) Adds support for LRO to existing MixIns implementation. It extends client interface with get_operation, wait_operation, cancel_operation, delete_operation, list_operations methods (if they were defined in service yaml). --- .../%sub/services/%service/_mixins.py.j2 | 269 ++++++++ .../%sub/services/%service/client.py.j2 | 3 + .../%service/transports/_mixins.py.j2 | 104 +++ .../services/%service/transports/base.py.j2 | 60 ++ .../services/%service/transports/grpc.py.j2 | 3 + .../%name_%version/%sub/_test_mixins.py.j2 | 341 +++++++++ .../%name_%version/%sub/test_%service.py.j2 | 37 +- packages/gapic-generator/gapic/schema/api.py | 4 +- .../services/%service/_async_mixins.py.j2 | 271 ++++++++ .../%sub/services/%service/_mixins.py.j2 | 269 ++++++++ .../%sub/services/%service/async_client.py.j2 | 3 + .../%sub/services/%service/client.py.j2 | 4 +- .../%service/transports/_mixins.py.j2 | 104 +++ .../services/%service/transports/base.py.j2 | 60 ++ .../services/%service/transports/grpc.py.j2 | 3 + .../%service/transports/grpc_asyncio.py.j2 | 3 + .../%name_%version/%sub/_test_mixins.py.j2 | 648 ++++++++++++++++++ .../%name_%version/%sub/test_%service.py.j2 | 37 +- .../tests/unit/schema/test_api.py | 81 +++ 19 files changed, 2300 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 index b398283458ba..a5ef010df563 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 @@ -1,3 +1,272 @@ + {# LRO mixins #} + {% if api.has_operations_mixin %} + {% if "ListOperations" in api.mixin_api_methods %} + def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + {% endif %} + + {% if "GetOperation" in api.mixin_api_methods %} + def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "DeleteOperation" in api.mixin_api_methods %} + def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + {% endif %} + + {% if "CancelOperation" in api.mixin_api_methods %} + def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + {% endif %} + + {% if "WaitOperation" in api.mixin_api_methods %} + def wait_operation( + self, + request: operations_pb2.WaitOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + metadata = tuple(metadata) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} {# LRO #} + {# IAM mixins #} {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index a00aa8f25631..e1db165909db 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -39,6 +39,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 index d2842263a425..00b0d53b7112 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 @@ -1,5 +1,109 @@ {% if "grpc" in opts.transport %} + {% if api.has_operations_mixin %} + + {% if "DeleteOperation" in api.mixin_api_methods %} + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + {% endif %} + + {% if "CancelOperation" in api.mixin_api_methods %} + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + {% endif %} + + {% if "WaitOperation" in api.mixin_api_methods %} + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + {% endif %} + + {% if "GetOperation" in api.mixin_api_methods %} + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + {% endif %} + + {% if "ListOperations" in api.mixin_api_methods %} + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + {% endif %} + + {% endif %} {# LRO #} + {% if api.has_location_mixin %} {% if "ListLocations" in api.mixin_api_methods %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index cb1bf8eaa0a0..19f6f2f93b05 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -29,6 +29,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} {% endfilter %} try: @@ -172,6 +175,63 @@ class {{ service.name }}Transport(abc.ABC): raise NotImplementedError() {% endfor %} + {% if api.has_operations_mixin %} + {% if "ListOperations" in api.mixin_api_methods %} + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "GetOperation" in api.mixin_api_methods %} + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "CancelOperation" in api.mixin_api_methods %} + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + {% endif %} + + {% if "DeleteOperation" in api.mixin_api_methods %} + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + {% endif %} + + {% if "WaitOperation" in api.mixin_api_methods %} + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + {% endif %} + {% endif %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if not opts.add_iam_methods and api.has_iam_mixin %} {% if "SetIamPolicy" in api.mixin_api_methods %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index fdb71fc98b24..b9d46f1cd775 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -28,6 +28,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index a9f7aa7b9ec7..8175f2e0e5de 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -1,3 +1,344 @@ +{% if api.has_operations_mixin and 'grpc' in opts.transport %} + +{% if "DeleteOperation" in api.mixin_api_methods %} +def test_delete_operation(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + +{% endif %} {# DeleteOperation #} + +{% if "CancelOperation" in api.mixin_api_methods %} +def test_cancel_operation(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +{% endif %} {# CancelOperation #} + +{% if "WaitOperation" in api.mixin_api_methods %} + +def test_wait_operation(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_wait_operation_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_wait_operation_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + +{% endif %} {# WaitOperation #} + +{% if "GetOperation" in api.mixin_api_methods %} + +def test_get_operation(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + + call.assert_called() + +{% endif %} {# GetOperation #} + +{% if "ListOperations" in api.mixin_api_methods %} +def test_list_operations(transport: str = "grpc"): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + + call.assert_called() + +{% endif %} {# ListOperations #} + +{% endif %} {# LRO #} + {% if api.has_location_mixin and 'grpc' in opts.transport %} {# ListLocation #} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index dbaa5115f360..7b714030738f 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -51,6 +51,9 @@ from google.protobuf import json_format {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} {% for ref_type in method.ref_types @@ -1687,15 +1690,47 @@ def test_{{ service.name|snake_case }}_base_transport(): '{{ method.transport_safe_name|snake_case }}', {% endfor %} {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} - {% if opts.add_iam_methods or api.has_iam_mixin %} + {% if opts.add_iam_methods %} 'set_iam_policy', 'get_iam_policy', 'test_iam_permissions', {% endif %} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + {% if "SetIamPolicy" in api.mixin_api_methods %} + 'set_iam_policy', + {% endif %} + {% if "GetIamPolicy" in api.mixin_api_methods %} + 'get_iam_policy', + {% endif %} + {% if "TestIamPermissions" in api.mixin_api_methods %} + 'test_iam_permissions', + {% endif %} + {% endif %} {# has_iam_mixin #} {% if api.has_location_mixin %} + {% if "GetLocation" in api.mixin_api_methods %} 'get_location', + {% endif %} + {% if "ListLocations" in api.mixin_api_methods %} 'list_locations', {% endif %} + {% endif %} {# Location Mixin #} + {% if api.has_operations_mixin %} + {% if "GetOperation" in api.mixin_api_methods %} + 'get_operation', + {% endif %} + {% if "WaitOperation" in api.mixin_api_methods %} + 'wait_operation', + {% endif %} + {% if "CancelOperation" in api.mixin_api_methods %} + 'cancel_operation', + {% endif %} + {% if "DeleteOperation" in api.mixin_api_methods %} + 'delete_operation', + {% endif %} + {% if "ListOperations" in api.mixin_api_methods %} + 'list_operations', + {% endif %} + {% endif %} {# has_operations_mixin #} ) for method in methods: with pytest.raises(NotImplementedError): diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index afed38045590..e5132f33840e 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -524,7 +524,9 @@ def mixin_api_methods(self) -> Dict[str, MethodDescriptorProto]: if not self._has_iam_overrides and self.has_iam_mixin: methods = {**methods, ** self._get_methods_from_service(iam_policy_pb2)} - # For LRO, expose operations client instead. + if self.has_operations_mixin: + methods = {**methods, ** + self._get_methods_from_service(operations_pb2)} return methods @cached_property diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index aa2a3c561c49..3259a653aade 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -1,3 +1,274 @@ + {# LRO mixins #} + + {% if api.has_operations_mixin %} + + {% if "ListOperations" in api.mixin_api_methods %} + async def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + {% endif %} + + {% if "GetOperation" in api.mixin_api_methods %} + async def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "DeleteOperation" in api.mixin_api_methods %} + async def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + {% endif %} + + {% if "CancelOperation" in api.mixin_api_methods %} + async def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + {% endif %} + + {% if "WaitOperation" in api.mixin_api_methods %} + async def wait_operation( + self, + request: operations_pb2.WaitOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + metadata = tuple(metadata) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} {# LRO #} + {# IAM mixins #} {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index b398283458ba..5336f9a95d69 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -1,3 +1,272 @@ + {# LRO mixins #} + {% if api.has_operations_mixin %} + {% if "ListOperations" in api.mixin_api_methods %} + def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + {% endif %} + + {% if "GetOperation" in api.mixin_api_methods %} + def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + + {% if "DeleteOperation" in api.mixin_api_methods %} + def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + {% endif %} + + {% if "CancelOperation" in api.mixin_api_methods %} + def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + {% endif %} + + {% if "WaitOperation" in api.mixin_api_methods %} + def wait_operation( + self, + request: operations_pb2.WaitOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.wait_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + metadata = tuple(metadata) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + {% endif %} + {% endif %} {# LRO #} + {# IAM mixins #} {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 6bfb20f587fc..df2298abb702 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -37,6 +37,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 89fe4e9b33e4..8bdef4b8487d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -47,6 +47,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO {% if 'grpc' in opts.transport %} @@ -661,7 +664,6 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Done; return the response. return response {% endif %} - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 index d2842263a425..ff573768121f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 @@ -1,5 +1,109 @@ {% if "grpc" in opts.transport %} + {% if api.has_operations_mixin %} + + {% if "DeleteOperation" in api.mixin_api_methods %} + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + {% endif %} + + {% if "CancelOperation" in api.mixin_api_methods %} + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + {% endif %} + + {% if "WaitOperation" in api.mixin_api_methods %} + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + {% endif %} + + {% if "GetOperation" in api.mixin_api_methods %} + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + {% endif %} + + {% if "ListOperations" in api.mixin_api_methods %} + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + {% endif %} + + {% endif %} {# LRO #} + {% if api.has_location_mixin %} {% if "ListLocations" in api.mixin_api_methods %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 99b35fd5f6c2..7788d2e0c736 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -29,6 +29,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} {% endfilter %} {% filter sort_lines %} {% for operations_service in api.get_extended_operations_services(service) %} @@ -181,6 +184,63 @@ class {{ service.name }}Transport(abc.ABC): raise NotImplementedError() {% endfor %} + {% if api.has_operations_mixin %} + {% if "ListOperations" in api.mixin_api_methods %} + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "GetOperation" in api.mixin_api_methods %} + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + {% endif %} + + {% if "CancelOperation" in api.mixin_api_methods %} + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + {% endif %} + + {% if "DeleteOperation" in api.mixin_api_methods %} + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + {% endif %} + + {% if "WaitOperation" in api.mixin_api_methods %} + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + {% endif %} + {% endif %} + {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if not opts.add_iam_methods and api.has_iam_mixin %} {% if "SetIamPolicy" in api.mixin_api_methods %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index dfcbf833a24b..e2422154fdb1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -28,6 +28,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index b29d68495cb6..eec77586acab 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -28,6 +28,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .grpc import {{ service.name }}GrpcTransport diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 134bdfe1bc7a..4e7f525e5563 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -1,3 +1,651 @@ +{% if api.has_operations_mixin and ('grpc' in opts.transport or 'grpc_asyncio' in opts.transport) %} + +{% if "DeleteOperation" in api.mixin_api_methods %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_delete_operation(transport: str = "grpc"): +{% else %} +def test_delete_operation(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = None + response = client.delete_operation(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): +{% else %} +def test_delete_operation_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + {% if mode == "" %} + call.return_value = None + + client.delete_operation(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): +{% else %} +def test_delete_operation_from_dict(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + {% endif %} + call.assert_called() +{% endfor %} + +{% endif %} {# DeleteOperation #} + +{% if "CancelOperation" in api.mixin_api_methods %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_cancel_operation(transport: str = "grpc"): +{% else %} +def test_cancel_operation(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = None + response = client.cancel_operation(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): +{% else %} +def test_cancel_operation_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + {% if mode == "" %} + call.return_value = None + + client.cancel_operation(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): +{% else %} +def test_cancel_operation_from_dict(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + {% endif %} + call.assert_called() +{% endfor %} +{% endif %} {# CancelOperation #} + +{% if "WaitOperation" in api.mixin_api_methods %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_wait_operation(transport: str = "grpc"): +{% else %} +def test_wait_operation(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_wait_operation_field_headers_async(): +{% else %} +def test_wait_operation_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + {% if mode == "" %} + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.wait_operation(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_wait_operation_from_dict_async(): +{% else %} +def test_wait_operation_from_dict(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } + ) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation( + request={ + "name": "locations", + } + ) + {% endif %} + call.assert_called() +{% endfor %} + +{% endif %} {# WaitOperation #} + +{% if "GetOperation" in api.mixin_api_methods %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_get_operation(transport: str = "grpc"): +{% else %} +def test_get_operation(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): +{% else %} +def test_get_operation_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + {% if mode == "" %} + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): +{% else %} +def test_get_operation_from_dict(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + {% endif %} + call.assert_called() +{% endfor %} + +{% endif %} {# GetOperation #} + +{% if "ListOperations" in api.mixin_api_methods %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_list_operations(transport: str = "grpc"): +{% else %} +def test_list_operations(transport: str = "grpc"): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + {% endif %} + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): +{% else %} +def test_list_operations_field_headers(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + {% if mode == "" %} + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + {% endif %} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +{% endfor %} + +{% for mode in ["", "async"] %} +{% if mode == "async" %} +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): +{% else %} +def test_list_operations_from_dict(): +{% endif %} + {% if mode == "" %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% else %} + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + ) + {% endif %} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + {% if mode == "" %} + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + {% else %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + {% endif %} + call.assert_called() +{% endfor %} + +{% endif %} {# ListOperations #} + +{% endif %} {# LRO #} + {% if api.has_location_mixin and ('grpc' in opts.transport or 'grpc_asyncio' in opts.transport) %} {# ListLocation #} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 6492d5b753a0..aeecc8a71c03 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -58,6 +58,9 @@ from google.protobuf import json_format {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 {% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} {% for ref_type in method.ref_types @@ -767,15 +770,47 @@ def test_{{ service.name|snake_case }}_base_transport(): '{{ method.transport_safe_name|snake_case }}', {% endfor %} {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} - {% if opts.add_iam_methods or api.has_iam_mixin %} + {% if opts.add_iam_methods %} 'set_iam_policy', 'get_iam_policy', 'test_iam_permissions', {% endif %} + {% if not opts.add_iam_methods and api.has_iam_mixin %} + {% if "SetIamPolicy" in api.mixin_api_methods %} + 'set_iam_policy', + {% endif %} + {% if "GetIamPolicy" in api.mixin_api_methods %} + 'get_iam_policy', + {% endif %} + {% if "TestIamPermissions" in api.mixin_api_methods %} + 'test_iam_permissions', + {% endif %} + {% endif %} {% if api.has_location_mixin %} + {% if "GetLocation" in api.mixin_api_methods %} 'get_location', + {% endif %} + {% if "ListLocations" in api.mixin_api_methods %} 'list_locations', {% endif %} + {% endif %} + {% if api.has_operations_mixin %} + {% if "GetOperation" in api.mixin_api_methods %} + 'get_operation', + {% endif %} + {% if "WaitOperation" in api.mixin_api_methods %} + 'wait_operation', + {% endif %} + {% if "CancelOperation" in api.mixin_api_methods %} + 'cancel_operation', + {% endif %} + {% if "DeleteOperation" in api.mixin_api_methods %} + 'delete_operation', + {% endif %} + {% if "ListOperations" in api.mixin_api_methods %} + 'list_operations', + {% endif %} + {% endif %} ) for method in methods: with pytest.raises(NotImplementedError): diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 67b1a304a5c2..d897255068ab 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -2275,3 +2275,84 @@ def test_mixin_api_methods_iam_overrides(): }) api_schema = api.API.build(fd, 'google.example.v1', opts=opts) assert api_schema.mixin_api_methods == {} + + +def test_mixin_api_methods_lro(): + fd = ( + make_file_pb2( + name='example.proto', + package='google.example.v1', + messages=(make_message_pb2(name='ExampleRequest', fields=()), + make_message_pb2(name='ExampleResponse', fields=()), + ), + services=(descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='FooMethod', + # Input and output types don't matter. + input_type='google.example.v1.ExampleRequest', + output_type='google.example.v1.ExampleResponse', + ), + ), + ),), + ), + ) + r1 = { + 'selector': 'google.longrunning.Operations.CancelOperation', + 'post': '/v1/{name=examples/*}/*', + 'body': '*' + } + r2 = { + 'selector': 'google.longrunning.Operations.DeleteOperation', + 'get': '/v1/{name=examples/*}/*', + 'body': '*' + } + r3 = { + 'selector': 'google.longrunning.Operations.WaitOperation', + 'post': '/v1/{name=examples/*}/*', + 'body': '*' + } + r4 = { + 'selector': 'google.longrunning.Operations.GetOperation', + 'post': '/v1/{name=examples/*}/*', + 'body': '*' + } + opts = Options(service_yaml_config={ + 'apis': [ + { + 'name': 'google.longrunning.Operations' + } + ], + 'http': { + 'rules': [r1, r2, r3, r4] + } + }) + + ms = methods_from_service(operations_pb2, 'Operations') + assert len(ms) == 5 + m1 = ms['CancelOperation'] + m1.options.ClearExtension(annotations_pb2.http) + m1.options.Extensions[annotations_pb2.http].selector = r1['selector'] + m1.options.Extensions[annotations_pb2.http].post = r1['post'] + m1.options.Extensions[annotations_pb2.http].body = r1['body'] + m2 = ms['DeleteOperation'] + m2.options.ClearExtension(annotations_pb2.http) + m2.options.Extensions[annotations_pb2.http].selector = r2['selector'] + m2.options.Extensions[annotations_pb2.http].get = r2['get'] + m2.options.Extensions[annotations_pb2.http].body = r2['body'] + m3 = ms['WaitOperation'] + m3.options.ClearExtension(annotations_pb2.http) + m3.options.Extensions[annotations_pb2.http].selector = r3['selector'] + m3.options.Extensions[annotations_pb2.http].post = r3['post'] + m3.options.Extensions[annotations_pb2.http].body = r3['body'] + m4 = ms['GetOperation'] + m4.options.ClearExtension(annotations_pb2.http) + m4.options.Extensions[annotations_pb2.http].selector = r4['selector'] + m4.options.Extensions[annotations_pb2.http].post = r4['post'] + m4.options.Extensions[annotations_pb2.http].body = r4['body'] + + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + assert api_schema.mixin_api_methods == { + 'CancelOperation': m1, 'DeleteOperation': m2, 'WaitOperation': m3, + 'GetOperation': m4} From 49b52e696b17b3efe03abbd9553199b590850056 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 25 May 2022 11:46:44 -0400 Subject: [PATCH 0825/1339] chore(main): release 1.0.0 (#1303) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b4759a23a18e..92d5a0406064 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.0.0](https://github.com/googleapis/gapic-generator-python/compare/v0.65.3...v1.0.0) (2022-05-24) + + +### Features + +* adds LRO mixin. ([#1304](https://github.com/googleapis/gapic-generator-python/issues/1304)) ([18af90a](https://github.com/googleapis/gapic-generator-python/commit/18af90a8b51159da85f3f93bd37248a58ede4ab3)) +* release as stable ([767aaba](https://github.com/googleapis/gapic-generator-python/commit/767aaba6d623fab2af972140ffd4de3b43a92439)) + ### [0.65.3](https://github.com/googleapis/gapic-generator-python/compare/v0.65.2...v0.65.3) (2022-05-03) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 0b7816d20cbf..6b30a64e56ff 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "0.65.3" +version = "1.0.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 354ba17c46251b1203e087452a9d73d5879d16c5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 27 May 2022 14:21:30 +0200 Subject: [PATCH 0826/1339] chore(deps): update dependency googleapis-common-protos to v1.56.2 (#1326) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 4d6b56bd0cd8..8c20afad8e8c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==8.1.3 google-api-core==2.8.0 -googleapis-common-protos==1.56.1 +googleapis-common-protos==1.56.2 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.1 From d50d462e9969d33123b8e96054dd919910ccb4f2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 27 May 2022 19:02:58 +0200 Subject: [PATCH 0827/1339] chore(deps): update dependency google-api-core to v2.8.1 (#1325) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8c20afad8e8c..9715527c8133 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.3 -google-api-core==2.8.0 +google-api-core==2.8.1 googleapis-common-protos==1.56.2 jinja2==3.1.2 MarkupSafe==2.1.1 From 400877569a28ff2b75b51390a30dc5fb4c0c1873 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Fri, 10 Jun 2022 11:20:00 -0700 Subject: [PATCH 0828/1339] fix: add missing metadata. (#1335) * fix: add missing metadata. * chore: update dep. * chore: remove flaky setup. --- .../%sub/services/%service/_async_mixins.py.j2 | 7 ++++++- .../%sub/services/%service/_mixins.py.j2 | 7 ++++++- packages/gapic-generator/noxfile.py | 10 ---------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 3259a653aade..f88e747eae43 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -258,7 +258,12 @@ client_info=DEFAULT_CLIENT_INFO, ) - metadata = tuple(metadata) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) # Send the request. response = await rpc( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index 5336f9a95d69..cf75daaeacbe 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -256,7 +256,12 @@ client_info=DEFAULT_CLIENT_INFO, ) - metadata = tuple(metadata) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) # Send the request. response = rpc( diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 1322dd120f08..3bc5d4898b37 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -347,16 +347,6 @@ def showcase_unit_alternative_templates(session): def showcase_unit_add_iam_methods(session): with showcase_library(session, other_opts=("add-iam-methods",)) as lib: session.chdir(lib) - - # Unit tests are run twice with different dependencies. - # 1. Run tests at lower bound of dependencies. - session.install("nox") - session.run("nox", "-s", "update_lower_bounds") - session.install(".", "--force-reinstall", "-c", "constraints.txt") - run_showcase_unit_tests(session, fail_under=0) - - # 2. Run the tests again with latest version of dependencies. - session.install(".", "--upgrade", "--force-reinstall") run_showcase_unit_tests(session, fail_under=100) From be091f55f19a0b00cac7ed082c5e0285ac70bdae Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 10 Jun 2022 14:33:35 -0400 Subject: [PATCH 0829/1339] chore(main): release 1.0.1 (#1336) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 92d5a0406064..116890487aa0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.0.1](https://github.com/googleapis/gapic-generator-python/compare/v1.0.0...v1.0.1) (2022-06-10) + + +### Bug Fixes + +* add missing metadata. ([#1335](https://github.com/googleapis/gapic-generator-python/issues/1335)) ([228efda](https://github.com/googleapis/gapic-generator-python/commit/228efdab198dd18de82df2be1b8d858b78811a89)) + ## [1.0.0](https://github.com/googleapis/gapic-generator-python/compare/v0.65.3...v1.0.0) (2022-05-24) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 6b30a64e56ff..9cd4d10ba13f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.0.0" +version = "1.0.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 06577f8f1f6320e4bf06c379f4ce684000b8bd28 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 16 Jun 2022 19:48:27 +0200 Subject: [PATCH 0830/1339] chore(deps): update dependency google-api-core to v2.8.2 (#1337) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 9715527c8133..5b8db7a67f93 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.3 -google-api-core==2.8.1 +google-api-core==2.8.2 googleapis-common-protos==1.56.2 jinja2==3.1.2 MarkupSafe==2.1.1 From 95f07a87c142f0f8f885f86bd70ca8e5fd7f37ad Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Sat, 18 Jun 2022 22:20:27 -0400 Subject: [PATCH 0831/1339] docs: fix changelog header to consistent size (#1331) --- packages/gapic-generator/CHANGELOG.md | 200 +++++++++++++------------- 1 file changed, 100 insertions(+), 100 deletions(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 116890487aa0..05acb90a2474 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -16,14 +16,14 @@ * adds LRO mixin. ([#1304](https://github.com/googleapis/gapic-generator-python/issues/1304)) ([18af90a](https://github.com/googleapis/gapic-generator-python/commit/18af90a8b51159da85f3f93bd37248a58ede4ab3)) * release as stable ([767aaba](https://github.com/googleapis/gapic-generator-python/commit/767aaba6d623fab2af972140ffd4de3b43a92439)) -### [0.65.3](https://github.com/googleapis/gapic-generator-python/compare/v0.65.2...v0.65.3) (2022-05-03) +## [0.65.3](https://github.com/googleapis/gapic-generator-python/compare/v0.65.2...v0.65.3) (2022-05-03) ### Bug Fixes * don't use stale session in rest transport ([#1291](https://github.com/googleapis/gapic-generator-python/issues/1291)) ([a96ef9e](https://github.com/googleapis/gapic-generator-python/commit/a96ef9ef3f99b0114f1d5630ee6e8907dd24bec2)) -### [0.65.2](https://github.com/googleapis/gapic-generator-python/compare/v0.65.1...v0.65.2) (2022-04-21) +## [0.65.2](https://github.com/googleapis/gapic-generator-python/compare/v0.65.1...v0.65.2) (2022-04-21) ### Bug Fixes @@ -32,7 +32,7 @@ * fixes bug when an annotation path in an http rule does not have =. ([#1284](https://github.com/googleapis/gapic-generator-python/issues/1284)) ([6dcb97c](https://github.com/googleapis/gapic-generator-python/commit/6dcb97cfb60d7d81dc205c20c762dfd5e74659e1)) * use async snippet in async client method docstring ([#1280](https://github.com/googleapis/gapic-generator-python/issues/1280)) ([b72e1e0](https://github.com/googleapis/gapic-generator-python/commit/b72e1e063d587a93b65aa77dd222341bcc87ba39)) -### [0.65.1](https://github.com/googleapis/gapic-generator-python/compare/v0.65.0...v0.65.1) (2022-04-13) +## [0.65.1](https://github.com/googleapis/gapic-generator-python/compare/v0.65.0...v0.65.1) (2022-04-13) ### Bug Fixes @@ -59,7 +59,7 @@ * add missing import for Mapping ([#1267](https://github.com/googleapis/gapic-generator-python/issues/1267)) ([f416622](https://github.com/googleapis/gapic-generator-python/commit/f416622c753a56036cf9ac1fa6eb818a6e557018)) -### [0.63.8](https://github.com/googleapis/gapic-generator-python/compare/v0.63.7...v0.63.8) (2022-04-04) +## [0.63.8](https://github.com/googleapis/gapic-generator-python/compare/v0.63.7...v0.63.8) (2022-04-04) ### Bug Fixes @@ -68,35 +68,35 @@ * fix docstring for map fields ([#1249](https://github.com/googleapis/gapic-generator-python/issues/1249)) ([3100464](https://github.com/googleapis/gapic-generator-python/commit/310046478092b4fc4ef9dfdd1e50363ca6fc72c5)) * sanitize file names ([#1236](https://github.com/googleapis/gapic-generator-python/issues/1236)) ([3072ffb](https://github.com/googleapis/gapic-generator-python/commit/3072ffb6000983ecb06d8dd7b44f77da61cc992e)) -### [0.63.7](https://github.com/googleapis/gapic-generator-python/compare/v0.63.6...v0.63.7) (2022-03-08) +## [0.63.7](https://github.com/googleapis/gapic-generator-python/compare/v0.63.6...v0.63.7) (2022-03-08) ### Bug Fixes * resolve issue where rest transport is not used in certain tests ([#1231](https://github.com/googleapis/gapic-generator-python/issues/1231)) ([90ab41a](https://github.com/googleapis/gapic-generator-python/commit/90ab41ab1f1b058ec0eb4a96b973031898f64df0)) -### [0.63.6](https://github.com/googleapis/gapic-generator-python/compare/v0.63.5...v0.63.6) (2022-03-04) +## [0.63.6](https://github.com/googleapis/gapic-generator-python/compare/v0.63.5...v0.63.6) (2022-03-04) ### Bug Fixes * **deps:** require google-api-core >=2.3.2 ([#1225](https://github.com/googleapis/gapic-generator-python/issues/1225)) ([f59917f](https://github.com/googleapis/gapic-generator-python/commit/f59917fdbdf5ee4091e35d721811dcd7f4b9a3f3)) -### [0.63.5](https://github.com/googleapis/gapic-generator-python/compare/v0.63.4...v0.63.5) (2022-02-25) +## [0.63.5](https://github.com/googleapis/gapic-generator-python/compare/v0.63.4...v0.63.5) (2022-02-25) ### Bug Fixes * update license year to 2022 ([#1199](https://github.com/googleapis/gapic-generator-python/issues/1199)) ([31292d5](https://github.com/googleapis/gapic-generator-python/commit/31292d59c8d08695f3e2dfa75861c86d723a9d35)) -### [0.63.4](https://github.com/googleapis/gapic-generator-python/compare/v0.63.3...v0.63.4) (2022-02-22) +## [0.63.4](https://github.com/googleapis/gapic-generator-python/compare/v0.63.3...v0.63.4) (2022-02-22) ### Bug Fixes * fix snippet region tag format ([#1210](https://github.com/googleapis/gapic-generator-python/issues/1210)) ([e895106](https://github.com/googleapis/gapic-generator-python/commit/e8951064827c726bb651801336b86188e2628386)) -### [0.63.3](https://github.com/googleapis/gapic-generator-python/compare/v0.63.2...v0.63.3) (2022-02-16) +## [0.63.3](https://github.com/googleapis/gapic-generator-python/compare/v0.63.2...v0.63.3) (2022-02-16) ### Bug Fixes @@ -105,14 +105,14 @@ * fix docstring code block formatting ([#1206](https://github.com/googleapis/gapic-generator-python/issues/1206)) ([500dfe7](https://github.com/googleapis/gapic-generator-python/commit/500dfe7e401888b3bea6488d6e6fee9955f1a2ab)) * HTTP body field messages with enums or recursive fields ([#1201](https://github.com/googleapis/gapic-generator-python/issues/1201)) ([246bfe2](https://github.com/googleapis/gapic-generator-python/commit/246bfe2948362bc8f6035aafc0dbd9e65f5acc2b)) -### [0.63.2](https://github.com/googleapis/gapic-generator-python/compare/v0.63.1...v0.63.2) (2022-02-09) +## [0.63.2](https://github.com/googleapis/gapic-generator-python/compare/v0.63.1...v0.63.2) (2022-02-09) ### Bug Fixes * fix lint sessions on generated samples ([#1192](https://github.com/googleapis/gapic-generator-python/issues/1192)) ([4d0ea18](https://github.com/googleapis/gapic-generator-python/commit/4d0ea182aa5500eee0f5485e88e14043974ae78b)) -### [0.63.1](https://github.com/googleapis/gapic-generator-python/compare/v0.63.0...v0.63.1) (2022-02-03) +## [0.63.1](https://github.com/googleapis/gapic-generator-python/compare/v0.63.0...v0.63.1) (2022-02-03) ### Bug Fixes @@ -128,7 +128,7 @@ * adds dynamic routing. ([#1135](https://github.com/googleapis/gapic-generator-python/issues/1135)) ([8c191a5](https://github.com/googleapis/gapic-generator-python/commit/8c191a5475f208213806fff81c0efa1d26216cd9)) * enable snippetgen for default templates ([#1171](https://github.com/googleapis/gapic-generator-python/issues/1171)) ([c1af051](https://github.com/googleapis/gapic-generator-python/commit/c1af051743dde2fb40e028c51de85dfea47a793d)) -### [0.62.1](https://github.com/googleapis/gapic-generator-python/compare/v0.62.0...v0.62.1) (2022-02-02) +## [0.62.1](https://github.com/googleapis/gapic-generator-python/compare/v0.62.0...v0.62.1) (2022-02-02) ### Bug Fixes @@ -176,7 +176,7 @@ * only set unset fields if they are query params ([#1130](https://github.com/googleapis/gapic-generator-python/issues/1130)) ([9ad98ca](https://github.com/googleapis/gapic-generator-python/commit/9ad98ca6833f1b280bf3c04c858f92276d59ffbe)) -### [0.59.1](https://github.com/googleapis/gapic-generator-python/compare/v0.59.0...v0.59.1) (2022-01-10) +## [0.59.1](https://github.com/googleapis/gapic-generator-python/compare/v0.59.0...v0.59.1) (2022-01-10) ### Bug Fixes @@ -190,7 +190,7 @@ * add snippet index ([#1121](https://www.github.com/googleapis/gapic-generator-python/issues/1121)) ([55d2bc6](https://www.github.com/googleapis/gapic-generator-python/commit/55d2bc6580e5db0f837de1b245533a8f1f2e9beb)) -### [0.58.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.3...v0.58.4) (2021-12-30) +## [0.58.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.3...v0.58.4) (2021-12-30) ### Bug Fixes @@ -198,14 +198,14 @@ * handle message bodies ([#1117](https://www.github.com/googleapis/gapic-generator-python/issues/1117)) ([36e3236](https://github.com/googleapis/gapic-generator-python/commit/36e3236b3832993331d8d99c10e72797a8851390)) -### [0.58.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.2...v0.58.3) (2021-12-28) +## [0.58.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.2...v0.58.3) (2021-12-28) ### Bug Fixes * add additional reserved names for disambiguation ([#1114](https://www.github.com/googleapis/gapic-generator-python/issues/1114)) ([1cffd8d](https://www.github.com/googleapis/gapic-generator-python/commit/1cffd8d99936cd10649faf05e0288b693e718f81)) -### [0.58.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.1...v0.58.2) (2021-12-13) +## [0.58.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.1...v0.58.2) (2021-12-13) ### Bug Fixes @@ -214,7 +214,7 @@ * non-string required fields provide correct values ([#1108](https://www.github.com/googleapis/gapic-generator-python/issues/1108)) ([bc5f729](https://www.github.com/googleapis/gapic-generator-python/commit/bc5f729cf777d30e1053e23a1d115460952478af)) * syntax fix and test for multiple required fields ([#1105](https://www.github.com/googleapis/gapic-generator-python/issues/1105)) ([4e5fe2d](https://www.github.com/googleapis/gapic-generator-python/commit/4e5fe2db9d0d81929cc1559d3a134c9a38ae595c)) -### [0.58.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.0...v0.58.1) (2021-12-09) +## [0.58.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.58.0...v0.58.1) (2021-12-09) ### Bug Fixes @@ -242,14 +242,14 @@ * forward compatible diregapic LRO support ([#1085](https://www.github.com/googleapis/gapic-generator-python/issues/1085)) ([aa7f4d5](https://www.github.com/googleapis/gapic-generator-python/commit/aa7f4d568f7f43738ab3489fc84ce6bc5d6bda18)) -### [0.56.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.1...v0.56.2) (2021-11-08) +## [0.56.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.1...v0.56.2) (2021-11-08) ### Bug Fixes * don't enable snippetgen by default ([#1078](https://www.github.com/googleapis/gapic-generator-python/issues/1078)) ([8bdb709](https://www.github.com/googleapis/gapic-generator-python/commit/8bdb70931a9ecb1c89fda9608697b0762770bc12)) -### [0.56.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.0...v0.56.1) (2021-11-08) +## [0.56.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.56.0...v0.56.1) (2021-11-08) ### Bug Fixes @@ -268,7 +268,7 @@ * fix rest unit test ([#1074](https://www.github.com/googleapis/gapic-generator-python/issues/1074)) ([3b2918e](https://www.github.com/googleapis/gapic-generator-python/commit/3b2918ecaeb90229f22834438dc31755498ee2d0)) -### [0.55.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.55.0...v0.55.1) (2021-11-04) +## [0.55.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.55.0...v0.55.1) (2021-11-04) ### Bug Fixes @@ -294,7 +294,7 @@ * generate code snippets by default ([#1044](https://www.github.com/googleapis/gapic-generator-python/issues/1044)) ([e46f443](https://www.github.com/googleapis/gapic-generator-python/commit/e46f443dbeffe16b63f97668801b06189769e972)) -### [0.53.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.3...v0.53.4) (2021-10-29) +## [0.53.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.3...v0.53.4) (2021-10-29) ### Bug Fixes @@ -303,14 +303,14 @@ * fix tests generation logic ([#1049](https://www.github.com/googleapis/gapic-generator-python/issues/1049)) ([8f213ad](https://www.github.com/googleapis/gapic-generator-python/commit/8f213add4cb02366bb370ef46a686c6f0c37a575)) * methods returning Operation w/o operation_info are now allowed. ([#1047](https://www.github.com/googleapis/gapic-generator-python/issues/1047)) ([6b640af](https://www.github.com/googleapis/gapic-generator-python/commit/6b640afbd93ea8c861b902211dc34e188234d072)) -### [0.53.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.2...v0.53.3) (2021-10-27) +## [0.53.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.2...v0.53.3) (2021-10-27) ### Bug Fixes * more fixes for rest transport ([#1042](https://www.github.com/googleapis/gapic-generator-python/issues/1042)) ([13d5f77](https://www.github.com/googleapis/gapic-generator-python/commit/13d5f77f8b6d4ce1181b29f2335d7584783be753)) -### [0.53.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.1...v0.53.2) (2021-10-27) +## [0.53.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.1...v0.53.2) (2021-10-27) ### Bug Fixes @@ -319,7 +319,7 @@ * Fix rest transport logic ([#1039](https://www.github.com/googleapis/gapic-generator-python/issues/1039)) ([50d61af](https://www.github.com/googleapis/gapic-generator-python/commit/50d61afd30b021835fe898e41b783f4d04acff09)) * list oneofs in docstring ([#1030](https://www.github.com/googleapis/gapic-generator-python/issues/1030)) ([a0e25c8](https://www.github.com/googleapis/gapic-generator-python/commit/a0e25c8c00391b99a351e667eddc8b4fecad30d8)) -### [0.53.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.0...v0.53.1) (2021-10-13) +## [0.53.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.53.0...v0.53.1) (2021-10-13) ### Bug Fixes @@ -353,7 +353,7 @@ * improper types in pagers generation ([#970](https://www.github.com/googleapis/gapic-generator-python/issues/970)) ([bba3eea](https://www.github.com/googleapis/gapic-generator-python/commit/bba3eea5d45fe57c0395ceef30402ad7880013d7)) -### [0.51.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.1...v0.51.2) (2021-09-13) +## [0.51.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.1...v0.51.2) (2021-09-13) ### Bug Fixes @@ -361,7 +361,7 @@ * add a separate DEFAULT_CLIENT_INFO for rest clients ([#988](https://www.github.com/googleapis/gapic-generator-python/issues/988)) ([22ac400](https://www.github.com/googleapis/gapic-generator-python/commit/22ac40097ab50bb2d3a7f1a2d35d659c391e0927)) * **snippetgen:** use f-strings in print statements ([#975](https://www.github.com/googleapis/gapic-generator-python/issues/975)) ([122e85c](https://www.github.com/googleapis/gapic-generator-python/commit/122e85c37ff6aa0a99f64361397eb3df5495a3b4)) -### [0.51.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.0...v0.51.1) (2021-08-20) +## [0.51.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.51.0...v0.51.1) (2021-08-20) ### Bug Fixes @@ -380,7 +380,7 @@ * add 'dict' type annotation to 'request' ([#966](https://www.github.com/googleapis/gapic-generator-python/issues/966)) ([49205d9](https://www.github.com/googleapis/gapic-generator-python/commit/49205d99dd440690b838c8eb3f6a695f35b061c2)) -### [0.50.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.4...v0.50.5) (2021-07-22) +## [0.50.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.4...v0.50.5) (2021-07-22) ### Bug Fixes @@ -388,21 +388,21 @@ * enable self signed jwt for grpc ([#958](https://www.github.com/googleapis/gapic-generator-python/issues/958)) ([af02a9c](https://www.github.com/googleapis/gapic-generator-python/commit/af02a9cae522ff2cdc8e97cfffe2ba2bb84d6b6a)) * fix rest transport unit test and required query prams handling ([#951](https://www.github.com/googleapis/gapic-generator-python/issues/951)) ([b793017](https://www.github.com/googleapis/gapic-generator-python/commit/b7930177da9a8be556bf6485febcc0a9bdef897b)) -### [0.50.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.3...v0.50.4) (2021-06-30) +## [0.50.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.3...v0.50.4) (2021-06-30) ### Bug Fixes * fix syntax for Deprecationwarning ([#942](https://www.github.com/googleapis/gapic-generator-python/issues/942)) ([82dbddb](https://www.github.com/googleapis/gapic-generator-python/commit/82dbddb6a9caf1227c4b335345f365dd01025794)) -### [0.50.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.2...v0.50.3) (2021-06-29) +## [0.50.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.2...v0.50.3) (2021-06-29) ### Bug Fixes * disable always_use_jwt_access ([#939](https://www.github.com/googleapis/gapic-generator-python/issues/939)) ([1302352](https://www.github.com/googleapis/gapic-generator-python/commit/130235220849987df572c1840735b3c199b85dfc)) -### [0.50.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.1...v0.50.2) (2021-06-28) +## [0.50.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.1...v0.50.2) (2021-06-28) ### Bug Fixes @@ -410,7 +410,7 @@ * fix wrong scopes for self signed jwt ([#935](https://www.github.com/googleapis/gapic-generator-python/issues/935)) ([e033acd](https://www.github.com/googleapis/gapic-generator-python/commit/e033acd44763f7cf65eabb6b35f66093022b1bcb)) * import warnings when needed ([#930](https://www.github.com/googleapis/gapic-generator-python/issues/930)) ([d4270ae](https://www.github.com/googleapis/gapic-generator-python/commit/d4270ae5805f44ab8ee30bb11fe42a0da6d79755)) -### [0.50.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.0...v0.50.1) (2021-06-24) +## [0.50.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.50.0...v0.50.1) (2021-06-24) ### Bug Fixes @@ -437,7 +437,7 @@ * add async samples ([#861](https://www.github.com/googleapis/gapic-generator-python/issues/861)) ([e385ffd](https://www.github.com/googleapis/gapic-generator-python/commit/e385ffd7f012c6a38c9fcd7c5f36ce090311032b)) -### [0.48.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.48.0...v0.48.1) (2021-06-09) +## [0.48.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.48.0...v0.48.1) (2021-06-09) ### Bug Fixes @@ -472,21 +472,21 @@ * support protobuf method deprecation option [gapic-generator-python] ([#875](https://www.github.com/googleapis/gapic-generator-python/issues/875)) ([5a5a839](https://www.github.com/googleapis/gapic-generator-python/commit/5a5a839b99d78ec5a5c52452e57c289b55ad1db5)) -### [0.46.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.2...v0.46.3) (2021-05-12) +## [0.46.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.2...v0.46.3) (2021-05-12) ### Bug Fixes * consistently use _pb2 identifier ([#883](https://www.github.com/googleapis/gapic-generator-python/issues/883)) ([d789c84](https://www.github.com/googleapis/gapic-generator-python/commit/d789c84d0d686bdb2d88179041b4c04cc32a3e66)) -### [0.46.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.1...v0.46.2) (2021-05-12) +## [0.46.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.1...v0.46.2) (2021-05-12) ### Bug Fixes * fix incorrectly referenced exceptions, add missing port to tests ([#873](https://www.github.com/googleapis/gapic-generator-python/issues/873)) ([40078c4](https://www.github.com/googleapis/gapic-generator-python/commit/40078c46b21a0dfa489d4cd80ed7d95bb542f3c3)), closes [#872](https://www.github.com/googleapis/gapic-generator-python/issues/872) -### [0.46.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.0...v0.46.1) (2021-05-07) +## [0.46.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.46.0...v0.46.1) (2021-05-07) ### Bug Fixes @@ -505,14 +505,14 @@ * Check for default value presence for non-optional fields in REST ([#868](https://www.github.com/googleapis/gapic-generator-python/issues/868)) ([5748001](https://www.github.com/googleapis/gapic-generator-python/commit/57480019c3e77c6b3a85bdaf8441334170b318e8)) -### [0.45.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.1...v0.45.2) (2021-05-06) +## [0.45.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.1...v0.45.2) (2021-05-06) ### Bug Fixes * remove extra space before_pb_options ([#863](https://www.github.com/googleapis/gapic-generator-python/issues/863)) ([f0532e7](https://www.github.com/googleapis/gapic-generator-python/commit/f0532e7a88479aeb805c1509239008bdd19e9d85)) -### [0.45.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.0...v0.45.1) (2021-05-04) +## [0.45.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.45.0...v0.45.1) (2021-05-04) ### Bug Fixes @@ -526,21 +526,21 @@ * add autogenerated snippets ([#845](https://www.github.com/googleapis/gapic-generator-python/issues/845)) ([abdf5ec](https://www.github.com/googleapis/gapic-generator-python/commit/abdf5ec00261e5500dbdd190c23b0b2b05836799)) -### [0.44.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.2...v0.44.3) (2021-05-03) +## [0.44.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.2...v0.44.3) (2021-05-03) ### Performance Improvements * reduce unnecessary copies, optimize Address comparison ([#855](https://www.github.com/googleapis/gapic-generator-python/issues/855)) ([e843540](https://www.github.com/googleapis/gapic-generator-python/commit/e8435400257707458e83424019c9b1a16fac9a99)) -### [0.44.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.1...v0.44.2) (2021-04-30) +## [0.44.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.1...v0.44.2) (2021-04-30) ### Bug Fixes * remove auth, policy, and options from the reserved names list ([#851](https://www.github.com/googleapis/gapic-generator-python/issues/851)) ([d3f31a0](https://www.github.com/googleapis/gapic-generator-python/commit/d3f31a0d33411b3248871ddbe51135e83b699a73)) -### [0.44.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.0...v0.44.1) (2021-04-28) +## [0.44.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.44.0...v0.44.1) (2021-04-28) ### Bug Fixes @@ -559,14 +559,14 @@ * enable GAPIC metadata generation ([#843](https://www.github.com/googleapis/gapic-generator-python/issues/843)) ([697816c](https://www.github.com/googleapis/gapic-generator-python/commit/697816ce7d5b201d6ced85fadd89f9140da67b37)) -### [0.43.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.2...v0.43.3) (2021-04-12) +## [0.43.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.2...v0.43.3) (2021-04-12) ### Bug Fixes * sort subpackages in %namespace/%name/__init__.py ([#836](https://www.github.com/googleapis/gapic-generator-python/issues/836)) ([90cf882](https://www.github.com/googleapis/gapic-generator-python/commit/90cf882b20f430499f692e6b9b23497b3555e928)) -### [0.43.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.1...v0.43.2) (2021-03-24) +## [0.43.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.1...v0.43.2) (2021-03-24) ### Bug Fixes @@ -574,7 +574,7 @@ * add certain raw imports to RESERVED_NAMES ([#824](https://www.github.com/googleapis/gapic-generator-python/issues/824)) ([04bd8aa](https://www.github.com/googleapis/gapic-generator-python/commit/04bd8aaf0fc2c2c0615105cab39dc33266b66775)) * Update module alias to resolve naming conflict ([#820](https://www.github.com/googleapis/gapic-generator-python/issues/820)) ([f5e9f36](https://www.github.com/googleapis/gapic-generator-python/commit/f5e9f367ec6a72b4272f559a93f6fbb3d7e54b8b)), closes [#819](https://www.github.com/googleapis/gapic-generator-python/issues/819) -### [0.43.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.0...v0.43.1) (2021-03-19) +## [0.43.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.43.0...v0.43.1) (2021-03-19) ### Bug Fixes @@ -589,14 +589,14 @@ * add bazel support for gapic metadata ([#811](https://www.github.com/googleapis/gapic-generator-python/issues/811)) ([7ced24a](https://www.github.com/googleapis/gapic-generator-python/commit/7ced24a0b20cb6505587b946c03b1b038eef4b4a)) * update templates to permit enum aliases ([#809](https://www.github.com/googleapis/gapic-generator-python/issues/809)) ([2e7ea11](https://www.github.com/googleapis/gapic-generator-python/commit/2e7ea11f80210459106f9780e5f013e2a0381d29)) -### [0.42.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.1...v0.42.2) (2021-03-05) +## [0.42.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.1...v0.42.2) (2021-03-05) ### Bug Fixes * s/grpcAsync/grpc-async for gapic metadata ([#803](https://www.github.com/googleapis/gapic-generator-python/issues/803)) ([96f7864](https://www.github.com/googleapis/gapic-generator-python/commit/96f78640d90cf50c6b525924d14c6afe31874be6)) -### [0.42.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.0...v0.42.1) (2021-03-04) +## [0.42.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.42.0...v0.42.1) (2021-03-04) ### Bug Fixes @@ -617,35 +617,35 @@ * add gapic metadata file ([#781](https://www.github.com/googleapis/gapic-generator-python/issues/781)) ([5dd8fcc](https://www.github.com/googleapis/gapic-generator-python/commit/5dd8fccf6b4da57edef0347beb07102634daa992)) -### [0.40.12](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.11...v0.40.12) (2021-02-26) +## [0.40.12](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.11...v0.40.12) (2021-02-26) ### Bug Fixes * exclude 'input' from reserved names list ([#788](https://www.github.com/googleapis/gapic-generator-python/issues/788)) ([da2ff71](https://www.github.com/googleapis/gapic-generator-python/commit/da2ff717b82357359baeeafad9a3e48a70e194cb)) -### [0.40.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.10...v0.40.11) (2021-02-24) +## [0.40.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.10...v0.40.11) (2021-02-24) ### Bug Fixes * remove duplicate field entries ([#786](https://www.github.com/googleapis/gapic-generator-python/issues/786)) ([9f4dfa4](https://www.github.com/googleapis/gapic-generator-python/commit/9f4dfa46cb6a67081563ce096452fedd9e35051d)) -### [0.40.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.9...v0.40.10) (2021-02-17) +## [0.40.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.9...v0.40.10) (2021-02-17) ### Bug Fixes * ignore unknown fields returned from server for REST ([#777](https://www.github.com/googleapis/gapic-generator-python/issues/777)) ([a70b078](https://www.github.com/googleapis/gapic-generator-python/commit/a70b0787f7e3d40642a4f68574f0cc493cc4e054)) -### [0.40.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.8...v0.40.9) (2021-02-10) +## [0.40.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.8...v0.40.9) (2021-02-10) ### Bug Fixes * fix rest transport tests ([#772](https://www.github.com/googleapis/gapic-generator-python/issues/772)) ([ce110a3](https://www.github.com/googleapis/gapic-generator-python/commit/ce110a35894aa1a838649f9782294b3b8446be5c)) -### [0.40.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.7...v0.40.8) (2021-02-05) +## [0.40.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.7...v0.40.8) (2021-02-05) ### Bug Fixes @@ -653,28 +653,28 @@ * body encoding for rest transport ([#768](https://www.github.com/googleapis/gapic-generator-python/issues/768)) ([cc55a18](https://www.github.com/googleapis/gapic-generator-python/commit/cc55a182b878d78f92aba259c067d47ab1d01e5b)) * update paging implementation to handle unconventional pagination ([#750](https://www.github.com/googleapis/gapic-generator-python/issues/750)) ([eaac3e6](https://www.github.com/googleapis/gapic-generator-python/commit/eaac3e69d366b610ae7551d94d4f546819e24bc2)) -### [0.40.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.6...v0.40.7) (2021-02-03) +## [0.40.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.6...v0.40.7) (2021-02-03) ### Bug Fixes * don't use integer for enums in json encoding ([#761](https://www.github.com/googleapis/gapic-generator-python/issues/761)) ([6d37a73](https://www.github.com/googleapis/gapic-generator-python/commit/6d37a7388995b90428ee6293bcce5d48cd9a48f8)) -### [0.40.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.5...v0.40.6) (2021-02-02) +## [0.40.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.5...v0.40.6) (2021-02-02) ### Bug Fixes * remove duplicate assignment of certain flattened, repeated fields ([#760](https://www.github.com/googleapis/gapic-generator-python/issues/760)) ([cdbc221](https://www.github.com/googleapis/gapic-generator-python/commit/cdbc22130a176e733c529f60a6b8b1d224e82e89)) -### [0.40.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.4...v0.40.5) (2021-02-01) +## [0.40.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.4...v0.40.5) (2021-02-01) ### Bug Fixes * Fix namespace packages conflict issue ([#757](https://www.github.com/googleapis/gapic-generator-python/issues/757)) ([8035662](https://www.github.com/googleapis/gapic-generator-python/commit/8035662bdcfbdffd1c294c5d28479733358407ca)) -### [0.40.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.3...v0.40.4) (2021-01-28) +## [0.40.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.3...v0.40.4) (2021-01-28) ### Bug Fixes @@ -682,21 +682,21 @@ * Make gapic-generator-python compatible with protobuf 3.14.0 (packaged as native namespace package) ([#753](https://www.github.com/googleapis/gapic-generator-python/issues/753)) ([45212af](https://www.github.com/googleapis/gapic-generator-python/commit/45212afb9f523a416d86272798d71ce05dc292f0)) * mypy 0.800 update errors ([#754](https://www.github.com/googleapis/gapic-generator-python/issues/754)) ([608275a](https://www.github.com/googleapis/gapic-generator-python/commit/608275aa923f495520dea8ebddb94a99f26e27a5)) -### [0.40.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.2...v0.40.3) (2021-01-21) +## [0.40.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.2...v0.40.3) (2021-01-21) ### Bug Fixes * stabilize order of query_params ([#742](https://www.github.com/googleapis/gapic-generator-python/issues/742)) ([2835ddb](https://www.github.com/googleapis/gapic-generator-python/commit/2835ddbe62b520e2e4c84f02810b1ac936c9cbb9)) -### [0.40.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.1...v0.40.2) (2021-01-21) +## [0.40.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.1...v0.40.2) (2021-01-21) ### Bug Fixes * fix rest transport unit test template ([#741](https://www.github.com/googleapis/gapic-generator-python/issues/741)) ([54b9806](https://www.github.com/googleapis/gapic-generator-python/commit/54b98060f881c8f0424c7e146488d3adc19fec7a)) -### [0.40.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.0...v0.40.1) (2021-01-20) +## [0.40.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.40.0...v0.40.1) (2021-01-20) ### Bug Fixes @@ -710,7 +710,7 @@ * add mtls feature to rest transport ([#731](https://www.github.com/googleapis/gapic-generator-python/issues/731)) ([524dbab](https://www.github.com/googleapis/gapic-generator-python/commit/524dbab16d248198ca10a08ecede4600fd36cefc)) -### [0.39.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.39.0...v0.39.1) (2021-01-05) +## [0.39.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.39.0...v0.39.1) (2021-01-05) ### Bug Fixes @@ -738,7 +738,7 @@ * add 'from_service_account_info' factory to clients ([#706](https://www.github.com/googleapis/gapic-generator-python/issues/706)) ([94d5f0c](https://www.github.com/googleapis/gapic-generator-python/commit/94d5f0c11b8041cbae8e4a89bb504d6c6e200a95)), closes [#705](https://www.github.com/googleapis/gapic-generator-python/issues/705) -### [0.37.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.37.0...v0.37.1) (2020-12-10) +## [0.37.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.37.0...v0.37.1) (2020-12-10) ### Bug Fixes @@ -759,7 +759,7 @@ * add rest transport generation for clients with optional transport flag ([#688](https://www.github.com/googleapis/gapic-generator-python/issues/688)) ([af59c2c](https://www.github.com/googleapis/gapic-generator-python/commit/af59c2c3c3d6b7e1f626c3fbc2c03f99ca31b4a4)) -### [0.35.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.10...v0.35.11) (2020-11-12) +## [0.35.11](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.10...v0.35.11) (2020-11-12) ### Bug Fixes @@ -767,70 +767,70 @@ * add enums to types/__init__.py ([#695](https://www.github.com/googleapis/gapic-generator-python/issues/695)) ([e1d4a4a](https://www.github.com/googleapis/gapic-generator-python/commit/e1d4a4ae768a631f6e6dc28f2acfde8be8dc4a8f)) * update protobuf version [gapic-generator-python] ([#696](https://www.github.com/googleapis/gapic-generator-python/issues/696)) ([ea3e519](https://www.github.com/googleapis/gapic-generator-python/commit/ea3e5198862881f5b142638df6ea604654f81f82)) -### [0.35.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.9...v0.35.10) (2020-11-09) +## [0.35.10](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.9...v0.35.10) (2020-11-09) ### Documentation * fix a few typos ([#690](https://www.github.com/googleapis/gapic-generator-python/issues/690)) ([2716838](https://www.github.com/googleapis/gapic-generator-python/commit/2716838fb739c9350eee2c95b5cf207c4d83423d)) -### [0.35.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.8...v0.35.9) (2020-10-27) +## [0.35.9](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.8...v0.35.9) (2020-10-27) ### Performance Improvements * collisions don't contain reserved names by default ([#684](https://www.github.com/googleapis/gapic-generator-python/issues/684)) ([2ec6ea6](https://www.github.com/googleapis/gapic-generator-python/commit/2ec6ea6835256c0d7b252e035cf4eac1ff442647)) -### [0.35.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.7...v0.35.8) (2020-10-21) +## [0.35.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.7...v0.35.8) (2020-10-21) ### Documentation * generated message types reference proto-plus ([#680](https://www.github.com/googleapis/gapic-generator-python/issues/680)) ([23327b2](https://www.github.com/googleapis/gapic-generator-python/commit/23327b275fb5a3fefe6c47cb15b9d9ecb02aac1f)) -### [0.35.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.6...v0.35.7) (2020-10-21) +## [0.35.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.6...v0.35.7) (2020-10-21) ### Bug Fixes * expose ssl credentials from transport ([#677](https://www.github.com/googleapis/gapic-generator-python/issues/677)) ([da0ee3e](https://www.github.com/googleapis/gapic-generator-python/commit/da0ee3eab4f80bf3d70fa5e06a2dcef7e1d4d22e)) -### [0.35.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.5...v0.35.6) (2020-10-20) +## [0.35.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.5...v0.35.6) (2020-10-20) ### Bug Fixes * unknown resources do not cause a generator crash ([#675](https://www.github.com/googleapis/gapic-generator-python/issues/675)) ([2d23d7d](https://www.github.com/googleapis/gapic-generator-python/commit/2d23d7d202099ccf145c01aeb9a03ae46b4e1b00)) -### [0.35.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.4...v0.35.5) (2020-10-19) +## [0.35.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.4...v0.35.5) (2020-10-19) ### Bug Fixes * numerous small fixes to allow bigtable-admin ([#660](https://www.github.com/googleapis/gapic-generator-python/issues/660)) ([09692c4](https://www.github.com/googleapis/gapic-generator-python/commit/09692c4e889ccde3b0ca31a5e8476c1679804beb)) -### [0.35.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.3...v0.35.4) (2020-10-16) +## [0.35.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.3...v0.35.4) (2020-10-16) ### Bug Fixes * minor typo in ads template ([#664](https://www.github.com/googleapis/gapic-generator-python/issues/664)) ([816f965](https://www.github.com/googleapis/gapic-generator-python/commit/816f965c8560bf65d8043bd67672c660a2b1300b)) -### [0.35.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.2...v0.35.3) (2020-10-13) +## [0.35.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.2...v0.35.3) (2020-10-13) ### Documentation * remove references to pipsi ([#656](https://www.github.com/googleapis/gapic-generator-python/issues/656)) ([39c612b](https://www.github.com/googleapis/gapic-generator-python/commit/39c612b545bc93c7c738a78f074672ee66365efb)) -### [0.35.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.1...v0.35.2) (2020-10-13) +## [0.35.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.1...v0.35.2) (2020-10-13) ### Bug Fixes * modules referenced in MapField message type are properly aliased ([#654](https://www.github.com/googleapis/gapic-generator-python/issues/654)) ([2c79349](https://www.github.com/googleapis/gapic-generator-python/commit/2c79349e7b89435bc45e499885f7b12ac0bc2d9f)), closes [#618](https://www.github.com/googleapis/gapic-generator-python/issues/618) -### [0.35.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.0...v0.35.1) (2020-10-09) +## [0.35.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.35.0...v0.35.1) (2020-10-09) ### Bug Fixes @@ -844,28 +844,28 @@ * file_level and indirectly used resources generate helper methods ([#642](https://www.github.com/googleapis/gapic-generator-python/issues/642)) ([42e224c](https://www.github.com/googleapis/gapic-generator-python/commit/42e224cb100f6e2aa9370bc6a5179d62979b5c4d)), closes [#637](https://www.github.com/googleapis/gapic-generator-python/issues/637) -### [0.34.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.3...v0.34.4) (2020-10-09) +## [0.34.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.3...v0.34.4) (2020-10-09) ### Bug Fixes * expose transport property for clients ([#645](https://www.github.com/googleapis/gapic-generator-python/issues/645)) ([13cddda](https://www.github.com/googleapis/gapic-generator-python/commit/13cddda0623bd4d24ae7973752b1be0eaa40523a)), closes [#640](https://www.github.com/googleapis/gapic-generator-python/issues/640) -### [0.34.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.2...v0.34.3) (2020-10-08) +## [0.34.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.2...v0.34.3) (2020-10-08) ### Bug Fixes * fix types on server and bidi streaming callables ([#641](https://www.github.com/googleapis/gapic-generator-python/issues/641)) ([d92c202](https://www.github.com/googleapis/gapic-generator-python/commit/d92c2029398c969ebf2a68a5bf77c5eb4fff7b31)) -### [0.34.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.1...v0.34.2) (2020-09-30) +## [0.34.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.1...v0.34.2) (2020-09-30) ### Bug Fixes * resource messages in method response types generate helpers ([#629](https://www.github.com/googleapis/gapic-generator-python/issues/629)) ([52bfd6d](https://www.github.com/googleapis/gapic-generator-python/commit/52bfd6d5d5821b33e78e6b9867a3be2865cdbc74)) -### [0.34.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.0...v0.34.1) (2020-09-30) +## [0.34.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.34.0...v0.34.1) (2020-09-30) ### Bug Fixes @@ -879,56 +879,56 @@ * add support for common resource paths ([#622](https://www.github.com/googleapis/gapic-generator-python/issues/622)) ([15a7fde](https://www.github.com/googleapis/gapic-generator-python/commit/15a7fdeb966cb64a742b6305d2c71dd3d485d0f9)) -### [0.33.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.7...v0.33.8) (2020-09-25) +## [0.33.8](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.7...v0.33.8) (2020-09-25) ### Bug Fixes * handle repeated fields in method signatures ([#445](https://www.github.com/googleapis/gapic-generator-python/issues/445)) ([3aae799](https://www.github.com/googleapis/gapic-generator-python/commit/3aae799f62a1f5d3b0506d919cc6080ee417f14b)) -### [0.33.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.6...v0.33.7) (2020-09-24) +## [0.33.7](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.6...v0.33.7) (2020-09-24) ### Bug Fixes * retriable exceptions are deterministically ordered in GAPICs ([#619](https://www.github.com/googleapis/gapic-generator-python/issues/619)) ([f7b1164](https://www.github.com/googleapis/gapic-generator-python/commit/f7b11640b74d8c64747b33783976d6e0ab9c61c4)) -### [0.33.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.5...v0.33.6) (2020-09-22) +## [0.33.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.5...v0.33.6) (2020-09-22) ### Bug Fixes * operation module is properly aliased if necessary ([#615](https://www.github.com/googleapis/gapic-generator-python/issues/615)) ([8f92fd9](https://www.github.com/googleapis/gapic-generator-python/commit/8f92fd9999286ef3f916119be78dbeb838a15550)), closes [#610](https://www.github.com/googleapis/gapic-generator-python/issues/610) -### [0.33.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.4...v0.33.5) (2020-09-22) +## [0.33.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.4...v0.33.5) (2020-09-22) ### Bug Fixes * remove 'property' from reserved names ([#613](https://www.github.com/googleapis/gapic-generator-python/issues/613)) ([8338a51](https://www.github.com/googleapis/gapic-generator-python/commit/8338a51a81f5f5b8ebacf68c8e46d3e1804d3f8b)) -### [0.33.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.3...v0.33.4) (2020-09-17) +## [0.33.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.3...v0.33.4) (2020-09-17) ### Bug Fixes * 'id' should not be a reserved name ([#602](https://www.github.com/googleapis/gapic-generator-python/issues/602)) ([c43c574](https://www.github.com/googleapis/gapic-generator-python/commit/c43c5740db099be19c5f6e52b3a917a631003411)) -### [0.33.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.2...v0.33.3) (2020-09-15) +## [0.33.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.2...v0.33.3) (2020-09-15) ### Bug Fixes * module names can no longer collide with keywords or builtins ([#595](https://www.github.com/googleapis/gapic-generator-python/issues/595)) ([960d550](https://www.github.com/googleapis/gapic-generator-python/commit/960d550c4a8fd09b052cce785d76243a5d4525d7)) -### [0.33.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.1...v0.33.2) (2020-09-15) +## [0.33.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.1...v0.33.2) (2020-09-15) ### Bug Fixes * ignore types for imports generated from 'google.api_core' ([#597](https://www.github.com/googleapis/gapic-generator-python/issues/597)) ([8440e09](https://www.github.com/googleapis/gapic-generator-python/commit/8440e09855d399d647b62238a9697e04ea4d0d41)), closes [#596](https://www.github.com/googleapis/gapic-generator-python/issues/596) -### [0.33.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.0...v0.33.1) (2020-09-15) +## [0.33.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.33.0...v0.33.1) (2020-09-15) ### Bug Fixes @@ -942,28 +942,28 @@ * support mtls env variables ([#589](https://www.github.com/googleapis/gapic-generator-python/issues/589)) ([b19026d](https://www.github.com/googleapis/gapic-generator-python/commit/b19026d9cca26ebd1cd0c3e73f738c4d1870d987)) -### [0.32.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.3...v0.32.4) (2020-09-03) +## [0.32.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.3...v0.32.4) (2020-09-03) ### Bug Fixes * rendering mock values for recursive messages no longer crashes ([#587](https://www.github.com/googleapis/gapic-generator-python/issues/587)) ([c2a83e5](https://www.github.com/googleapis/gapic-generator-python/commit/c2a83e561bf46b4af21e9008c7d67a1c609d7d06)) -### [0.32.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.2...v0.32.3) (2020-08-28) +## [0.32.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.2...v0.32.3) (2020-08-28) ### Bug Fixes * stabilize the order of resource helper methods and ([#582](https://www.github.com/googleapis/gapic-generator-python/issues/582)) ([7d2adde](https://www.github.com/googleapis/gapic-generator-python/commit/7d2adde3a1ae81ac88ced822d6dfdfb26ffbfdf0)) -### [0.32.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.1...v0.32.2) (2020-08-20) +## [0.32.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.1...v0.32.2) (2020-08-20) ### Bug Fixes * add 'type: ignore' comment for 'google.auth' ([#579](https://www.github.com/googleapis/gapic-generator-python/issues/579)) ([af17501](https://www.github.com/googleapis/gapic-generator-python/commit/af17501d258c7c37fc1081fcad5fe18f7629f4c3)) -### [0.32.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.0...v0.32.1) (2020-08-19) +## [0.32.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.32.0...v0.32.1) (2020-08-19) ### Bug Fixes @@ -977,7 +977,7 @@ * allow user-provided client info ([#573](https://www.github.com/googleapis/gapic-generator-python/issues/573)) ([b2e5274](https://www.github.com/googleapis/gapic-generator-python/commit/b2e52746c7ce4b983482fb776224b30767978c79)), closes [googleapis/python-kms#37](https://www.github.com/googleapis/python-kms/issues/37) [#566](https://www.github.com/googleapis/gapic-generator-python/issues/566) -### [0.31.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.31.0...v0.31.1) (2020-08-17) +## [0.31.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.31.0...v0.31.1) (2020-08-17) ### Bug Fixes @@ -1003,14 +1003,14 @@ * precache wrapped rpcs ([#553](https://www.github.com/googleapis/gapic-generator-python/issues/553)) ([2f2fb5d](https://www.github.com/googleapis/gapic-generator-python/commit/2f2fb5d3d9472a79c80be6d052129d07d2bbb835)) -### [0.29.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.1...v0.29.2) (2020-07-23) +## [0.29.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.1...v0.29.2) (2020-07-23) ### Bug Fixes * rename __init__.py to __init__.py.j2 ([#550](https://www.github.com/googleapis/gapic-generator-python/issues/550)) ([71a7062](https://www.github.com/googleapis/gapic-generator-python/commit/71a7062b918136b916cc5bfc7dbdf64f870edf6a)), closes [#437](https://www.github.com/googleapis/gapic-generator-python/issues/437) -### [0.29.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.0...v0.29.1) (2020-07-23) +## [0.29.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.29.0...v0.29.1) (2020-07-23) ### Bug Fixes @@ -1030,7 +1030,7 @@ * make # after alpha/beta optional ([#540](https://www.github.com/googleapis/gapic-generator-python/issues/540)) ([f86a47b](https://www.github.com/googleapis/gapic-generator-python/commit/f86a47b6431e374ae1797061511b49fe6bf22daf)) -### [0.28.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.28.0...v0.28.1) (2020-07-16) +## [0.28.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.28.0...v0.28.1) (2020-07-16) ### Bug Fixes @@ -1056,21 +1056,21 @@ * support for proto3 optional fields ([#519](https://www.github.com/googleapis/gapic-generator-python/issues/519)) ([1aa729c](https://www.github.com/googleapis/gapic-generator-python/commit/1aa729cc8d2f7f0de25c8348fdbf9d6dd96f5847)) -### [0.26.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.5...v0.26.6) (2020-07-10) +## [0.26.6](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.5...v0.26.6) (2020-07-10) ### Bug Fixes * primitive repeated fields are now correctly auto paginated ([#517](https://www.github.com/googleapis/gapic-generator-python/issues/517)) ([61a2cc0](https://www.github.com/googleapis/gapic-generator-python/commit/61a2cc0d4c08064d442fd4d7aa4b1b9e56158eaa)) -### [0.26.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.4...v0.26.5) (2020-07-10) +## [0.26.5](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.4...v0.26.5) (2020-07-10) ### Bug Fixes * convert datetime back to proto for unit tests ([#511](https://www.github.com/googleapis/gapic-generator-python/issues/511)) ([e1c787d](https://www.github.com/googleapis/gapic-generator-python/commit/e1c787d3b6fe09dc0b4e00f07a7bd77fb5f1e6a3)) -### [0.26.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.3...v0.26.4) (2020-07-10) +## [0.26.4](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.3...v0.26.4) (2020-07-10) ### Bug Fixes @@ -1078,21 +1078,21 @@ * require min google-api-core version of 1.21.0 ([#506](https://www.github.com/googleapis/gapic-generator-python/issues/506)) ([bf787bd](https://www.github.com/googleapis/gapic-generator-python/commit/bf787bd36198288d6a40e45e44e43f0098cfec7c)), closes [#461](https://www.github.com/googleapis/gapic-generator-python/issues/461) * tweak oneof detection ([#505](https://www.github.com/googleapis/gapic-generator-python/issues/505)) ([1632e25](https://www.github.com/googleapis/gapic-generator-python/commit/1632e250cfc01a17ccad128c3e065008b334473a)) -### [0.26.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.2...v0.26.3) (2020-07-08) +## [0.26.3](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.2...v0.26.3) (2020-07-08) ### Bug Fixes * fix wrong unit test ([#502](https://www.github.com/googleapis/gapic-generator-python/issues/502)) ([c95bd45](https://www.github.com/googleapis/gapic-generator-python/commit/c95bd45506df7973758b9e1249586597d8214985)) -### [0.26.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.1...v0.26.2) (2020-07-07) +## [0.26.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.1...v0.26.2) (2020-07-07) ### Bug Fixes * add oneof fields to generated protoplus init ([#485](https://www.github.com/googleapis/gapic-generator-python/issues/485)) ([be5a847](https://www.github.com/googleapis/gapic-generator-python/commit/be5a847aeff6687679f7bca46308362d588f5c77)), closes [#484](https://www.github.com/googleapis/gapic-generator-python/issues/484) -### [0.26.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.0...v0.26.1) (2020-07-07) +## [0.26.1](https://www.github.com/googleapis/gapic-generator-python/compare/v0.26.0...v0.26.1) (2020-07-07) ### Bug Fixes @@ -1112,14 +1112,14 @@ * add name and version info to fixup script name ([#490](https://www.github.com/googleapis/gapic-generator-python/issues/490)) ([16fe7e7](https://www.github.com/googleapis/gapic-generator-python/commit/16fe7e7885b7e17bf16b4f1f8f8844b9f5d0bdfe)) * Temporarily define a fixed testing event loop ([#493](https://www.github.com/googleapis/gapic-generator-python/issues/493)) ([2d22d91](https://www.github.com/googleapis/gapic-generator-python/commit/2d22d919bc8c08e03f501ff2f23152b761467c80)) -### [0.25.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.25.1...v0.25.2) (2020-06-23) +## [0.25.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.25.1...v0.25.2) (2020-06-23) ### Bug Fixes * always use dataclasses 0.6 ([#481](https://www.github.com/googleapis/gapic-generator-python/issues/481)) ([066d04e](https://www.github.com/googleapis/gapic-generator-python/commit/066d04e7d53301024106f244280502f16af46b79)) -### [0.25.1](https://www.github.com/googleapis/gapic-generator-python/compare/0.25.0...v0.25.1) (2020-06-23) +## [0.25.1](https://www.github.com/googleapis/gapic-generator-python/compare/0.25.0...v0.25.1) (2020-06-23) ### Bug Fixes @@ -1133,14 +1133,14 @@ * provide AsyncIO support for generated code ([#365](https://www.github.com/googleapis/gapic-generator-python/issues/365)) ([305ed34](https://www.github.com/googleapis/gapic-generator-python/commit/305ed34cfc1607c990f2f88b27f53358da25c366)) -### [0.24.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.24.1...v0.24.2) (2020-06-13) +## [0.24.2](https://www.github.com/googleapis/gapic-generator-python/compare/v0.24.1...v0.24.2) (2020-06-13) ### Bug Fixes * generated unit tests live in the 'tests/gapic' subdir ([#456](https://www.github.com/googleapis/gapic-generator-python/issues/456)) ([1ed7c9d](https://www.github.com/googleapis/gapic-generator-python/commit/1ed7c9d6fe9595c390387d72113d741ebf28538d)), closes [#454](https://www.github.com/googleapis/gapic-generator-python/issues/454) -### [0.24.1](https://www.github.com/googleapis/gapic-generator-python/compare/0.24.0...v0.24.1) (2020-06-12) +## [0.24.1](https://www.github.com/googleapis/gapic-generator-python/compare/0.24.0...v0.24.1) (2020-06-12) ### Bug Fixes From a7126b62c068c1538562b836c68803eefc8664f3 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Wed, 22 Jun 2022 17:06:14 -0700 Subject: [PATCH 0832/1339] feat: adds audience. (#1312) * feat: adds audience Co-authored-by: Viacheslav Rostovtsev <58152857+viacheslav-rostovtsev@users.noreply.github.com> --- .../%sub/services/%service/client.py.j2 | 1 + .../services/%service/transports/base.py.j2 | 14 +++-- .../services/%service/transports/grpc.py.j2 | 2 + .../%service/transports/grpc_asyncio.py.j2 | 2 + .../services/%service/transports/rest.py.j2 | 2 + .../gapic/templates/setup.py.j2 | 6 +-- .../%name_%version/%sub/test_%service.py.j2 | 49 ++++++++++++++++++ packages/gapic-generator/setup.py | 4 +- .../asset_v1/services/asset_service/client.py | 1 + .../services/asset_service/transports/base.py | 14 +++-- .../services/asset_service/transports/grpc.py | 2 + .../asset_service/transports/grpc_asyncio.py | 2 + .../tests/integration/goldens/asset/setup.py | 4 +- .../unit/gapic/asset_v1/test_asset_service.py | 51 +++++++++++++++++++ .../services/iam_credentials/client.py | 1 + .../iam_credentials/transports/base.py | 14 +++-- .../iam_credentials/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../integration/goldens/credentials/setup.py | 2 +- .../credentials_v1/test_iam_credentials.py | 51 +++++++++++++++++++ .../eventarc_v1/services/eventarc/client.py | 1 + .../services/eventarc/transports/base.py | 14 +++-- .../services/eventarc/transports/grpc.py | 2 + .../eventarc/transports/grpc_asyncio.py | 2 + .../integration/goldens/eventarc/setup.py | 2 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 51 +++++++++++++++++++ .../services/config_service_v2/client.py | 1 + .../config_service_v2/transports/base.py | 14 +++-- .../config_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../services/logging_service_v2/client.py | 1 + .../logging_service_v2/transports/base.py | 14 +++-- .../logging_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../services/metrics_service_v2/client.py | 1 + .../metrics_service_v2/transports/base.py | 14 +++-- .../metrics_service_v2/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../integration/goldens/logging/setup.py | 2 +- .../logging_v2/test_config_service_v2.py | 51 +++++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 51 +++++++++++++++++++ .../logging_v2/test_metrics_service_v2.py | 51 +++++++++++++++++++ .../redis_v1/services/cloud_redis/client.py | 1 + .../services/cloud_redis/transports/base.py | 14 +++-- .../services/cloud_redis/transports/grpc.py | 2 + .../cloud_redis/transports/grpc_asyncio.py | 2 + .../tests/integration/goldens/redis/setup.py | 2 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 51 +++++++++++++++++++ 48 files changed, 531 insertions(+), 51 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 8bdef4b8487d..440548be8e35 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -357,6 +357,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 7788d2e0c736..bfca9d99007a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -69,6 +69,7 @@ class {{ service.name }}Transport(abc.ABC): quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -99,11 +100,6 @@ class {{ service.name }}Transport(abc.ABC): self._extended_operations_services: Dict[str, Any] = {} {% endif %} - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -122,6 +118,9 @@ class {{ service.name }}Transport(abc.ABC): ) elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): @@ -130,6 +129,11 @@ class {{ service.name }}Transport(abc.ABC): # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index e2422154fdb1..0e0ada45421b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -62,6 +62,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -160,6 +161,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index eec77586acab..d47e3eee6342 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -106,6 +106,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -204,6 +205,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index c4f1d89893e3..81f00b22e918 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -144,6 +144,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): always_use_jwt_access: Optional[bool]=False, url_scheme: str='https', interceptor: Optional[{{ service.name }}RestInterceptor] = None, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -194,6 +195,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience ) self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 231a31082d29..ef10a8b69751 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -32,16 +32,16 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.7.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', {% else %} - 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', {% endif %} 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} - 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', + 'grpc-google-iam-v1 >= 0.12.4, < 0.13dev', {% endif %} ), python_requires='>=3.6', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index aeecc8a71c03..9d0c21cf3950 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -242,6 +242,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -259,6 +260,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -276,6 +278,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -303,6 +306,23 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -350,6 +370,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -376,6 +397,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -393,6 +415,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -481,6 +504,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ @@ -510,6 +534,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) {% if 'grpc' in opts.transport %} {# TODO(dovs): genericize this function#} @@ -529,6 +554,7 @@ def test_{{ service.client_name|snake_case }}_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -554,6 +580,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -902,6 +929,27 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.{{ service.name }}GrpcTransport, + transports.{{ service.name }}GrpcAsyncIOTransport, + transports.{{ service.name }}RestTransport, + ], +) +def test_{{ service.name|snake_case }}_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) @pytest.mark.parametrize( @@ -1393,6 +1441,7 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) {% endblock %} diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9cd4d10ba13f..3aea76c8ba03 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -26,11 +26,11 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", - "google-api-core >= 2.3.2", + "google-api-core >= 2.8.0", "googleapis-common-protos >= 1.55.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", - "protobuf >= 3.18.0", + "protobuf >= 3.18.0, <4.0.0dev", "pypandoc >= 1.4", "PyYAML >= 5.1.1", "dataclasses < 0.8; python_version < '3.7'", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index def497d49f08..5a31bb2800ce 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -383,6 +383,7 @@ def __init__(self, *, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def export_assets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index d0596429dfa5..8a2127eeef7b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -57,6 +57,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -84,11 +85,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -107,6 +103,9 @@ def __init__( ) elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): @@ -115,6 +114,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 64cdc5350be8..340bd09b0287 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -58,6 +58,7 @@ def __init__(self, *, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -154,6 +155,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index dbc83c57a7cb..00e15c96e284 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -102,6 +102,7 @@ def __init__(self, *, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -198,6 +199,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index aa33abcc546e..385d78bdea1d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -37,11 +37,11 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', - 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', + 'grpc-google-iam-v1 >= 0.12.4, < 0.13dev', ), python_requires='>=3.6', classifiers=[ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 7af0c175051b..4b3bc533948b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -181,6 +181,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -198,6 +199,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -215,6 +217,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -242,6 +245,23 @@ def test_asset_service_client_client_options(client_class, transport_class, tran quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -281,6 +301,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -307,6 +328,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -324,6 +346,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -401,6 +424,7 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ @@ -425,6 +449,7 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) def test_asset_service_client_client_options_from_dict(): @@ -442,6 +467,7 @@ def test_asset_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -467,6 +493,7 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -3784,6 +3811,29 @@ def test_asset_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.AssetServiceGrpcTransport, + transports.AssetServiceGrpcAsyncIOTransport, + transports.AssetServiceRestTransport, + ], +) +def test_asset_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -4243,4 +4293,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index dd070bbe0eb6..70ec15e547c4 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -379,6 +379,7 @@ def __init__(self, *, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def generate_access_token(self, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index b6ffa602ad85..93b227c32e30 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -54,6 +54,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -81,11 +82,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -104,6 +100,9 @@ def __init__( ) elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): @@ -112,6 +111,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index ff47dca82592..a3e8efecdf32 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -64,6 +64,7 @@ def __init__(self, *, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -159,6 +160,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 3f1ec7d18286..a99e2df95dae 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -108,6 +108,7 @@ def __init__(self, *, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -203,6 +204,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 3a648c3910fd..056323e24bf8 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 62ceb5bf7c29..05d01c230b00 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -172,6 +172,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -189,6 +190,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -206,6 +208,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -233,6 +236,23 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -272,6 +292,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -298,6 +319,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -315,6 +337,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -392,6 +415,7 @@ def test_iam_credentials_client_client_options_scopes(client_class, transport_cl quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ @@ -416,6 +440,7 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) def test_iam_credentials_client_client_options_from_dict(): @@ -433,6 +458,7 @@ def test_iam_credentials_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -458,6 +484,7 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -1730,6 +1757,29 @@ def test_iam_credentials_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsGrpcAsyncIOTransport, + transports.IAMCredentialsRestTransport, + ], +) +def test_iam_credentials_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2140,4 +2190,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 6d6c658d0e56..056b82fb35a5 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -399,6 +399,7 @@ def __init__(self, *, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def get_trigger(self, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 96dbf3028026..d15e1f50a8e4 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -57,6 +57,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -84,11 +85,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -107,6 +103,9 @@ def __init__( ) elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): @@ -115,6 +114,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 81e56bf8cae9..73d6ca9fbf68 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -60,6 +60,7 @@ def __init__(self, *, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -156,6 +157,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index d72d2a72528d..780b937d8037 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -104,6 +104,7 @@ def __init__(self, *, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -200,6 +201,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index da806747c686..86fc0557d369 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index b7e41e077e78..5f2fbfa66a21 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -180,6 +180,7 @@ def test_eventarc_client_client_options(client_class, transport_class, transport quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -197,6 +198,7 @@ def test_eventarc_client_client_options(client_class, transport_class, transport quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -214,6 +216,7 @@ def test_eventarc_client_client_options(client_class, transport_class, transport quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -241,6 +244,23 @@ def test_eventarc_client_client_options(client_class, transport_class, transport quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -280,6 +300,7 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -306,6 +327,7 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -323,6 +345,7 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -400,6 +423,7 @@ def test_eventarc_client_client_options_scopes(client_class, transport_class, tr quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ @@ -424,6 +448,7 @@ def test_eventarc_client_client_options_credentials_file(client_class, transport quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) def test_eventarc_client_client_options_from_dict(): @@ -441,6 +466,7 @@ def test_eventarc_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -466,6 +492,7 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2120,6 +2147,29 @@ def test_eventarc_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, + ], +) +def test_eventarc_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2600,4 +2650,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 580743c9d20e..764529bc9634 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -414,6 +414,7 @@ def __init__(self, *, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def list_buckets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 88ba88234db2..ed139098d841 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -58,6 +58,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -85,11 +86,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -108,6 +104,9 @@ def __init__( ) elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): @@ -116,6 +115,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 5d0779f047da..9d01152fb660 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -56,6 +56,7 @@ def __init__(self, *, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -151,6 +152,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 6a11f16ca00d..dca620f3109f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -100,6 +100,7 @@ def __init__(self, *, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -195,6 +196,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index c632e6369c24..bb4155398c36 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -370,6 +370,7 @@ def __init__(self, *, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def delete_log(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 54c0c48b7f3b..63a61cf7de93 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -59,6 +59,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -86,11 +87,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -109,6 +105,9 @@ def __init__( ) elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): @@ -117,6 +116,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 20f4c5133462..6e19b459b101 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -56,6 +56,7 @@ def __init__(self, *, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -151,6 +152,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index c787f62ed459..e4098c8b1e0a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -100,6 +100,7 @@ def __init__(self, *, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -195,6 +196,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 1adc45ad2235..061b0b332f98 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -371,6 +371,7 @@ def __init__(self, *, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def list_log_metrics(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 487dd0bdc0f5..0f1ef810eedc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -59,6 +59,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -86,11 +87,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -109,6 +105,9 @@ def __init__( ) elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): @@ -117,6 +116,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index d12216949f95..abe1685b1360 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -56,6 +56,7 @@ def __init__(self, *, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -151,6 +152,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index a8a020bcfd7d..ce1d083c4c53 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -100,6 +100,7 @@ def __init__(self, *, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -195,6 +196,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index aef2423cf351..c07123193eb1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 4f8141e0f58c..86fbed2a71cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -173,6 +173,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -190,6 +191,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -207,6 +209,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -234,6 +237,23 @@ def test_config_service_v2_client_client_options(client_class, transport_class, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -273,6 +293,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -299,6 +320,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -316,6 +338,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -393,6 +416,7 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ @@ -417,6 +441,7 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) def test_config_service_v2_client_client_options_from_dict(): @@ -434,6 +459,7 @@ def test_config_service_v2_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -459,6 +485,7 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -6154,6 +6181,29 @@ def test_config_service_v2_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + transports.ConfigServiceV2RestTransport, + ], +) +def test_config_service_v2_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -6647,4 +6697,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index f5c3ec05d9a9..a806cac78bf4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -179,6 +179,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -196,6 +197,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -213,6 +215,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -240,6 +243,23 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -279,6 +299,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -305,6 +326,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -322,6 +344,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -399,6 +422,7 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ @@ -423,6 +447,7 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) def test_logging_service_v2_client_client_options_from_dict(): @@ -440,6 +465,7 @@ def test_logging_service_v2_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -465,6 +491,7 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2275,6 +2302,29 @@ def test_logging_service_v2_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + transports.LoggingServiceV2RestTransport, + ], +) +def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2689,4 +2739,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 52126b86a3eb..5292fcdf03bd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -177,6 +177,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -194,6 +195,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,6 +213,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -238,6 +241,23 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -277,6 +297,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -303,6 +324,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -320,6 +342,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -397,6 +420,7 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ @@ -421,6 +445,7 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) def test_metrics_service_v2_client_client_options_from_dict(): @@ -438,6 +463,7 @@ def test_metrics_service_v2_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -463,6 +489,7 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2126,6 +2153,29 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + transports.MetricsServiceV2RestTransport, + ], +) +def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2540,4 +2590,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 2a63bcd27771..6f439999bcb9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -394,6 +394,7 @@ def __init__(self, *, quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def list_instances(self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 92633afcf21a..5e0e5bd15103 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -56,6 +56,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -83,11 +84,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -106,6 +102,9 @@ def __init__( ) elif credentials is None: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) # If the credentials are service account credentials, then always try to use self signed JWT. if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): @@ -114,6 +113,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index fbdc767496e3..c645f37caff4 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -77,6 +77,7 @@ def __init__(self, *, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -173,6 +174,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 0ffce85705a7..c9a27ef42e17 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -121,6 +121,7 @@ def __init__(self, *, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -217,6 +218,7 @@ def __init__(self, *, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 1cbd3b83ac4a..28544f8e65fc 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index a836902e8554..c5428450bdca 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -178,6 +178,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -195,6 +196,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,6 +214,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -239,6 +242,23 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" ) @pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ @@ -278,6 +298,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -304,6 +325,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -321,6 +343,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -398,6 +421,7 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ @@ -422,6 +446,7 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) def test_cloud_redis_client_client_options_from_dict(): @@ -439,6 +464,7 @@ def test_cloud_redis_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -464,6 +490,7 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -3110,6 +3137,29 @@ def test_cloud_redis_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, + ], +) +def test_cloud_redis_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -3556,4 +3606,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) From 13d207941b386c1d9033a1b6b19ac5911d0ab591 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 23 Jun 2022 09:47:35 -0700 Subject: [PATCH 0833/1339] chore(main): release 1.1.0 (#1338) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 05acb90a2474..f0b4089310cc 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,18 @@ # Changelog +## [1.1.0](https://github.com/googleapis/gapic-generator-python/compare/v1.0.1...v1.1.0) (2022-06-23) + + +### Features + +* adds audience. ([#1312](https://github.com/googleapis/gapic-generator-python/issues/1312)) ([a2c8a1e](https://github.com/googleapis/gapic-generator-python/commit/a2c8a1e01ce62286fc6fa5017397b86abac4a054)) + + +### Documentation + +* fix changelog header to consistent size ([#1331](https://github.com/googleapis/gapic-generator-python/issues/1331)) ([8110fc6](https://github.com/googleapis/gapic-generator-python/commit/8110fc670528afacf96abb2065893ff3ed654316)) + ## [1.0.1](https://github.com/googleapis/gapic-generator-python/compare/v1.0.0...v1.0.1) (2022-06-10) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 3aea76c8ba03..478cb317c2d5 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.0.1" +version = "1.1.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 0b2bfd9de9d331bbcc72eb1b79e9c8aec2c79805 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Mon, 4 Jul 2022 08:04:54 -0700 Subject: [PATCH 0834/1339] fix: resolve issue where rest test appears in grpc-only client (#1343) * fix: fixes bug with generated test. * chore: update integration tests. * chore: fix bug. --- .../%name_%version/%sub/test_%service.py.j2 | 17 +++++++++++++++++ .../unit/gapic/asset_v1/test_asset_service.py | 1 - .../credentials_v1/test_iam_credentials.py | 1 - .../unit/gapic/eventarc_v1/test_eventarc.py | 1 - .../gapic/logging_v2/test_config_service_v2.py | 1 - .../gapic/logging_v2/test_logging_service_v2.py | 1 - .../gapic/logging_v2/test_metrics_service_v2.py | 1 - .../unit/gapic/redis_v1/test_cloud_redis.py | 1 - 8 files changed, 17 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 9d0c21cf3950..4d22b4827710 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -929,6 +929,7 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): ) +{% if 'grpc' in opts.transport and 'rest' in opts.transport %} @pytest.mark.parametrize( "transport_class", [ @@ -937,6 +938,22 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): transports.{{ service.name }}RestTransport, ], ) +{% elif 'grpc' in opts.transport %} +@pytest.mark.parametrize( + "transport_class", + [ + transports.{{ service.name }}GrpcTransport, + transports.{{ service.name }}GrpcAsyncIOTransport, + ], +) +{% elif 'rest' in opts.transport %} +@pytest.mark.parametrize( + "transport_class", + [ + transports.{{ service.name }}RestTransport, + ], +) +{% endif %} def test_{{ service.name|snake_case }}_transport_auth_gdch_credentials(transport_class): host = 'https://language.com' api_audience_tests = [None, 'https://language2.com'] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 4b3bc533948b..beca979169c7 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -3816,7 +3816,6 @@ def test_asset_service_transport_auth_adc(transport_class): [ transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport, - transports.AssetServiceRestTransport, ], ) def test_asset_service_transport_auth_gdch_credentials(transport_class): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 05d01c230b00..b0d869d6bbca 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1762,7 +1762,6 @@ def test_iam_credentials_transport_auth_adc(transport_class): [ transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport, - transports.IAMCredentialsRestTransport, ], ) def test_iam_credentials_transport_auth_gdch_credentials(transport_class): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 5f2fbfa66a21..566ed4a4779d 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -2152,7 +2152,6 @@ def test_eventarc_transport_auth_adc(transport_class): [ transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport, - transports.EventarcRestTransport, ], ) def test_eventarc_transport_auth_gdch_credentials(transport_class): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 86fbed2a71cb..d05326717171 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -6186,7 +6186,6 @@ def test_config_service_v2_transport_auth_adc(transport_class): [ transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport, - transports.ConfigServiceV2RestTransport, ], ) def test_config_service_v2_transport_auth_gdch_credentials(transport_class): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index a806cac78bf4..0094c686fec1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2307,7 +2307,6 @@ def test_logging_service_v2_transport_auth_adc(transport_class): [ transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport, - transports.LoggingServiceV2RestTransport, ], ) def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 5292fcdf03bd..782bb603aaef 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2158,7 +2158,6 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): [ transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport, - transports.MetricsServiceV2RestTransport, ], ) def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index c5428450bdca..510c48270b42 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -3142,7 +3142,6 @@ def test_cloud_redis_transport_auth_adc(transport_class): [ transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport, - transports.CloudRedisRestTransport, ], ) def test_cloud_redis_transport_auth_gdch_credentials(transport_class): From 981cc73832d76dda2367109d246d45cce6e77cdd Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 5 Jul 2022 11:47:57 -0400 Subject: [PATCH 0835/1339] chore(main): release 1.1.1 (#1347) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f0b4089310cc..449f39dd16f3 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.1.1](https://github.com/googleapis/gapic-generator-python/compare/v1.1.0...v1.1.1) (2022-07-04) + + +### Bug Fixes + +* resolve issue where rest test appears in grpc-only client ([#1343](https://github.com/googleapis/gapic-generator-python/issues/1343)) ([22cd2ca](https://github.com/googleapis/gapic-generator-python/commit/22cd2cafe830f383229f0e556beead05e63a055c)) + ## [1.1.0](https://github.com/googleapis/gapic-generator-python/compare/v1.0.1...v1.1.0) (2022-06-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 478cb317c2d5..83431e79cf54 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.1.0" +version = "1.1.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 41235f6978389d1cb2601bb4d8145a7884746109 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 15 Jul 2022 14:53:27 -0400 Subject: [PATCH 0836/1339] ci: fix ci (#1352) * ci: fix ci * remove .bazelversion --- packages/gapic-generator/.bazelversion | 1 - packages/gapic-generator/WORKSPACE | 18 ++++++++++++------ packages/gapic-generator/repositories.bzl | 6 +++--- 3 files changed, 15 insertions(+), 10 deletions(-) delete mode 100644 packages/gapic-generator/.bazelversion diff --git a/packages/gapic-generator/.bazelversion b/packages/gapic-generator/.bazelversion deleted file mode 100644 index fcdb2e109f68..000000000000 --- a/packages/gapic-generator/.bazelversion +++ /dev/null @@ -1 +0,0 @@ -4.0.0 diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 8456723018ac..d5239a9aac85 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -12,9 +12,19 @@ http_archive( url = "https://github.com/bazelbuild/bazel-skylib/releases/download/{0}/bazel_skylib-{0}.tar.gz".format(_bazel_skylib_version), ) -_rules_python_version = "0.5.0" +_io_bazel_rules_go_version = "0.33.0" +http_archive( + name = "io_bazel_rules_go", + sha256 = "685052b498b6ddfe562ca7a97736741d87916fe536623afb7da2824c0211c369", + urls = [ + "https://mirror.bazel.build/github.com/bazelbuild/rules_go/releases/download/v{0}/rules_go-v{0}.zip".format(_io_bazel_rules_go_version), + "https://github.com/bazelbuild/rules_go/releases/download/v{0}/rules_go-v{0}.zip".format(_io_bazel_rules_go_version), + ], +) + +_rules_python_version = "0.9.0" -_rules_python_sha256 = "a2fd4c2a8bcf897b718e5643040b03d9528ac6179f6990774b7c19b2dc6cd96b" +_rules_python_sha256 = "5fa3c738d33acca3b97622a13a741129f67ef43f5fdfcec63b29374cc0574c29" http_archive( name = "rules_python", @@ -50,10 +60,6 @@ load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") grpc_deps() -load("@com_github_grpc_grpc//bazel:grpc_extra_deps.bzl", "grpc_extra_deps") - -grpc_extra_deps() - load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies") apple_rules_dependencies() diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 0f23aba3ca59..c374fc751888 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -33,14 +33,14 @@ def gapic_generator_python(): urls = ["https://github.com/bazelbuild/bazel-skylib/archive/2169ae1c374aab4a09aa90e65efe1a3aad4e279b.tar.gz"], ) - _grpc_version = "1.43.0" - _grpc_sha256 = "9647220c699cea4dafa92ec0917c25c7812be51a18143af047e20f3fb05adddc" + _grpc_version = "1.47.0" + _grpc_sha256 = "edf25f4db6c841853b7a29d61b0980b516dc31a1b6cdc399bcf24c1446a4a249" _maybe( http_archive, name = "com_github_grpc_grpc", sha256 = _grpc_sha256, strip_prefix = "grpc-{}".format(_grpc_version), - url = "https://github.com/grpc/grpc/archive/v{}.tar.gz".format(_grpc_version), + url = "https://github.com/grpc/grpc/archive/v{}.zip".format(_grpc_version), ) _maybe( From e15ff3c735094f97e89deabbc3e135e17febd7d3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 16 Jul 2022 20:12:54 +0200 Subject: [PATCH 0837/1339] chore(deps): update dependency setuptools to v63 (#1346) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 5b8db7a67f93..33069baad066 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,5 +7,5 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==62.3.2 +setuptools==63.2.0 grpc-google-iam-v1==0.12.4 From 522dec2b18ebb8ab02d62107666fdf276b139d73 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 18 Jul 2022 15:04:41 +0200 Subject: [PATCH 0838/1339] chore(deps): update dependency googleapis-common-protos to v1.56.4 (#1339) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 33069baad066..551aff5c097a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==8.1.3 google-api-core==2.8.2 -googleapis-common-protos==1.56.2 +googleapis-common-protos==1.56.4 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.1 From 115f679c641d9244629b256cb2f1685ae0af31e3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 18 Jul 2022 21:15:11 +0200 Subject: [PATCH 0839/1339] chore(deps): update actions/setup-python action to v4 (#1334) Co-authored-by: Anthonios Partheniou --- .../.github/workflows/tests.yaml | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index d1e39ed71a82..7737cb5db601 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -20,7 +20,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.8" cache: 'pip' @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python "3.10" - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' @@ -49,7 +49,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python "3.10" - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' @@ -94,7 +94,7 @@ jobs: sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - name: Set up Python "3.10" - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' @@ -142,7 +142,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "${{ matrix.python }}" cache: 'pip' @@ -169,7 +169,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python "3.10" - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' @@ -199,7 +199,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python "3.10" - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' @@ -226,7 +226,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python "3.10" - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' @@ -246,7 +246,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} cache: 'pip' @@ -271,7 +271,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} cache: 'pip' @@ -317,7 +317,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python 3.10 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' @@ -336,7 +336,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python "3.10" - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" cache: 'pip' From 12ee205b15a6b049545589da25ad22f015908a21 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Wed, 20 Jul 2022 13:10:39 -0700 Subject: [PATCH 0840/1339] feat: Introduce proper py_library and py_test support in bazel rules (#1360) These changes allow running the generated python gapic unit tests on the fly by bazel right after generation in our CI. As a result of this change some of the issues with the generated code became immediatelly visible (because now we ran the generated tests) and will be addressed separately in these issues: https://github.com/googleapis/gapic-generator-python/issues/1358 https://github.com/googleapis/gapic-generator-python/issues/1359 The tests can be now added in BUILD.bazel files as follows: ```bzl py_test( name = "credentials_py_gapic_test", srcs = [ "credentials_py_gapic_pytest.py", "credentials_py_gapic_test.py", ], legacy_create_init = False, deps = [ ":credentials_py_gapic", ], ) ``` --- .../.github/workflows/tests.yaml | 3 +- packages/gapic-generator/requirements.txt | 2 + .../rules_python_gapic/BUILD.bazel | 1 + .../rules_python_gapic/py_gapic.bzl | 63 ++++++++++++++++++- .../rules_python_gapic/py_gapic_pkg.bzl | 13 +++- .../rules_python_gapic/pytest.py | 11 ++++ .../rules_python_gapic/test.py | 11 ++++ .../test/integration_test.bzl | 4 +- .../tests/integration/BUILD.bazel | 60 +++++++++++++++++- 9 files changed, 159 insertions(+), 9 deletions(-) create mode 100644 packages/gapic-generator/rules_python_gapic/pytest.py create mode 100644 packages/gapic-generator/rules_python_gapic/test.py diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 7737cb5db601..29281780dd44 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -311,7 +311,8 @@ jobs: echo "and it will start over with a clean cache." echo "The old one will disappear after 7 days." - name: Integration Tests - run: bazel test tests/integration:asset tests/integration:credentials tests/integration:eventarc tests/integration:logging tests/integration:redis + run: bazel test //tests/integration/... + goldens-lint: runs-on: ubuntu-latest steps: diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 551aff5c097a..b0f8ee8a676a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -9,3 +9,5 @@ PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==63.2.0 grpc-google-iam-v1==0.12.4 +proto-plus==1.20.6 +pytest-asyncio==0.19.0 \ No newline at end of file diff --git a/packages/gapic-generator/rules_python_gapic/BUILD.bazel b/packages/gapic-generator/rules_python_gapic/BUILD.bazel index e69de29bb2d1..41f2ac97c6df 100644 --- a/packages/gapic-generator/rules_python_gapic/BUILD.bazel +++ b/packages/gapic-generator/rules_python_gapic/BUILD.bazel @@ -0,0 +1 @@ +exports_files(["test.py", "pytest.py"]) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index 659996dd67b4..8453d6859781 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -12,7 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -load("@rules_gapic//:gapic.bzl", "proto_custom_library") +load("@rules_gapic//:gapic.bzl", "proto_custom_library", "unzipped_srcjar") +load("@rules_python//python:defs.bzl", "py_library") +load("@gapic_generator_python_pip_deps//:requirements.bzl", "requirement") + +def _gapic_test_file_impl(ctx): + generated_test_file = ctx.actions.declare_file(ctx.label.name) + + ctx.actions.expand_template( + template = ctx.attr.template.files.to_list()[0], + output = generated_test_file, + substitutions = {}, + ) + + return [DefaultInfo(files = depset(direct = [generated_test_file]))] + +gapic_test_file = rule( + _gapic_test_file_impl, + attrs = { + "template": attr.label(allow_files = True), + }, +) def py_gapic_library( name, @@ -22,9 +42,9 @@ def py_gapic_library( opt_args = None, metadata = True, service_yaml = None, + deps = [], **kwargs): - # srcjar_target_name = "%s_srcjar" % name - srcjar_target_name = name + srcjar_target_name = "%s_srcjar" % name srcjar_output_suffix = ".srcjar" plugin_args = plugin_args or [] @@ -50,3 +70,40 @@ def py_gapic_library( output_suffix = srcjar_output_suffix, **kwargs ) + + main_file = "%s" % srcjar_target_name + srcjar_output_suffix + main_dir = "%s.py" % srcjar_target_name + + unzipped_srcjar( + name = main_dir, + srcjar = ":%s" % main_file, + ) + + actual_deps = deps + [ + "@com_github_grpc_grpc//src/python/grpcio/grpc:grpcio", + requirement("protobuf"), + requirement("proto-plus"), + requirement("google-api-core"), + requirement("googleapis-common-protos"), + requirement("pytest-asyncio"), + ] + + py_library( + name = name, + srcs = [":%s" % main_dir], + deps = actual_deps, + ) + + test_file_target_name = "%s_test.py" % name + + gapic_test_file( + name = test_file_target_name, + template = Label("//rules_python_gapic:test.py"), + ) + + test_runner_file_target_name = "%s_pytest.py" % name + + gapic_test_file( + name = test_runner_file_target_name, + template = Label("//rules_python_gapic:pytest.py"), + ) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl index a590a3aa7484..55ba8fde6467 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl @@ -16,9 +16,12 @@ load("@rules_gapic//:gapic_pkg.bzl", "construct_package_dir_paths") def _py_gapic_src_pkg_impl(ctx): srcjar_srcs = [] + dir_srcs = [] for dep in ctx.attr.deps: for f in dep.files.to_list(): - if f.extension in ("srcjar", "jar", "zip"): + if f.is_directory: + dir_srcs.append(f) + elif f.extension in ("srcjar", "jar", "zip"): srcjar_srcs.append(f) paths = construct_package_dir_paths(ctx.attr.package_dir, ctx.outputs.pkg, ctx.label.name) @@ -28,6 +31,11 @@ def _py_gapic_src_pkg_impl(ctx): for srcjar_src in {srcjar_srcs}; do unzip -q -o $srcjar_src -d {package_dir_path} done + for dir_src in {dir_srcs}; do + cp -rT -L $dir_src {package_dir_path} + done + # Replace 555 (forced by Bazel) permissions with 644 + find {package_dir_path} -type f -exec chmod 644 {{}} \\; cd {package_dir_path}/.. tar -zchpf {package_dir}/{package_dir}.tar.gz {package_dir} cd - @@ -35,6 +43,7 @@ def _py_gapic_src_pkg_impl(ctx): rm -rf {package_dir_path} """.format( srcjar_srcs = " ".join(["'%s'" % f.path for f in srcjar_srcs]), + dir_srcs = " ".join(["'%s'" % f.path for f in dir_srcs]), package_dir_path = paths.package_dir_path, package_dir = paths.package_dir, pkg = ctx.outputs.pkg.path, @@ -42,7 +51,7 @@ def _py_gapic_src_pkg_impl(ctx): ) ctx.actions.run_shell( - inputs = srcjar_srcs, + inputs = srcjar_srcs + dir_srcs, command = script, outputs = [ctx.outputs.pkg], ) diff --git a/packages/gapic-generator/rules_python_gapic/pytest.py b/packages/gapic-generator/rules_python_gapic/pytest.py new file mode 100644 index 000000000000..934fdfa68096 --- /dev/null +++ b/packages/gapic-generator/rules_python_gapic/pytest.py @@ -0,0 +1,11 @@ +import sys +import pytest +import os + + +if __name__ == '__main__': + sys.exit(pytest.main([ + '--disable-pytest-warnings', + '--quiet', + os.path.dirname(os.path.abspath(__file__)) + ])) diff --git a/packages/gapic-generator/rules_python_gapic/test.py b/packages/gapic-generator/rules_python_gapic/test.py new file mode 100644 index 000000000000..be6764cecf64 --- /dev/null +++ b/packages/gapic-generator/rules_python_gapic/test.py @@ -0,0 +1,11 @@ +import os +import sys + + +if __name__ == '__main__': + os.environ['PYTHONNOUSERSITE'] = 'True' + entry_point_script = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + os.path.basename(__file__).replace("_test.py", "_pytest.py")) + args = [sys.executable, entry_point_script] + sys.argv[1:] + os.execv(args[0], args) diff --git a/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl b/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl index 0705972ec7b6..82e27a7ad5ce 100644 --- a/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl +++ b/packages/gapic-generator/rules_python_gapic/test/integration_test.bzl @@ -12,7 +12,7 @@ def _diff_integration_goldens_impl(ctx): script = """ mkdir codegen_tmp - unzip {input_srcs} -d codegen_tmp + cp -rT -L {input_srcs} codegen_tmp diff -r codegen_tmp $PWD/tests/integration/goldens/{api_name} > {diff_output} exit 0 # Avoid a build failure. """.format( @@ -94,7 +94,7 @@ def _overwrite_golden_impl(ctx): script = """ mkdir codegen_tmp - unzip {input_srcs} -d codegen_tmp + cp -rT -L {input_srcs} codegen_tmp cd codegen_tmp zip -r ../{goldens_output_zip} . """.format( diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index c39661a3028d..0e4a3275f490 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -26,7 +26,7 @@ package(default_visibility = ["//visibility:public"]) INTEGRATION_TEST_LIBRARIES = [ "asset", # Basic case. "credentials", # Check that the capital name edge case is handled. - "eventarc", # create_channel is a reserved term in transport layer. + "eventarc", # create_channel is a reserved term in transport layer. "logging", # Java package remapping in gapic.yaml. "redis", # Has a gapic.yaml. ] @@ -67,6 +67,18 @@ py_gapic_library( ], ) +py_test( + name = "credentials_py_gapic_test", + srcs = [ + "credentials_py_gapic_pytest.py", + "credentials_py_gapic_test.py", + ], + legacy_create_init = False, + deps = [ + ":credentials_py_gapic", + ], +) + # Eventarc. py_gapic_library( name = "eventarc_py_gapic", @@ -79,6 +91,19 @@ py_gapic_library( ], ) +# Uncomment once https://github.com/googleapis/gapic-generator-python/issues/1358 is fixed +#py_test( +# name = "eventarc_py_gapic_test", +# srcs = [ +# "eventarc_py_gapic_pytest.py", +# "eventarc_py_gapic_test.py", +# ], +# legacy_create_init = False, +# deps = [ +# ":eventarc_py_gapic", +# ], +#) + # Logging. py_gapic_library( name = "logging_py_gapic", @@ -91,6 +116,19 @@ py_gapic_library( ], ) +# Uncomment once https://github.com/googleapis/gapic-generator-python/issues/1359 is fixed +#py_test( +# name = "logging_py_gapic_test", +# srcs = [ +# "logging_py_gapic_pytest.py", +# "logging_py_gapic_test.py", +# ], +# legacy_create_init = False, +# deps = [ +# ":logging_py_gapic", +# ], +#) + py_gapic_library( name = "redis_py_gapic", srcs = ["@com_google_googleapis//google/cloud/redis/v1:redis_proto"], @@ -99,3 +137,23 @@ py_gapic_library( "autogen-snippets", ], ) + +py_test( + name = "redis_py_gapic_test", + srcs = [ + "redis_py_gapic_pytest.py", + "redis_py_gapic_test.py", + ], + legacy_create_init = False, + deps = [ + ":redis_py_gapic", + ], +) + +test_suite( + name = "googleapis_test_suite", + tests = [ + ":credentials_py_gapic_test", + ":redis_py_gapic_test", + ], +) From 77c159f542a74d2dfd120d1ace430a92a674c8f2 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Thu, 21 Jul 2022 13:21:51 -0700 Subject: [PATCH 0841/1339] fix: fix wildcard resource names helper method (#1363) This fixes https://github.com/googleapis/gapic-generator-python/issues/1358 --- .../gapic-generator/gapic/schema/wrappers.py | 4 ++++ .../tests/integration/BUILD.bazel | 24 +++++++++---------- .../asset_v1/services/asset_service/client.py | 2 +- .../eventarc_v1/services/eventarc/client.py | 2 +- .../unit/schema/wrappers/test_message.py | 23 ++++++++++++++++++ 5 files changed, 41 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 816b4be91750..019d13cb6789 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -596,6 +596,10 @@ def path_regex_str(self) -> str: ) + "$" ) + # Special case for wildcard resource names + if parsing_regex_str == "^*$": + parsing_regex_str = "^.*$" + return parsing_regex_str def get_field(self, *field_path: str, diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 0e4a3275f490..04b3c07bf899 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -91,18 +91,17 @@ py_gapic_library( ], ) -# Uncomment once https://github.com/googleapis/gapic-generator-python/issues/1358 is fixed -#py_test( -# name = "eventarc_py_gapic_test", -# srcs = [ -# "eventarc_py_gapic_pytest.py", -# "eventarc_py_gapic_test.py", -# ], -# legacy_create_init = False, -# deps = [ -# ":eventarc_py_gapic", -# ], -#) +py_test( + name = "eventarc_py_gapic_test", + srcs = [ + "eventarc_py_gapic_pytest.py", + "eventarc_py_gapic_test.py", + ], + legacy_create_init = False, + deps = [ + ":eventarc_py_gapic", + ], +) # Logging. py_gapic_library( @@ -154,6 +153,7 @@ test_suite( name = "googleapis_test_suite", tests = [ ":credentials_py_gapic_test", + ":eventarc_py_gapic_test", ":redis_py_gapic_test", ], ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 5a31bb2800ce..d2ef6794ea32 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -171,7 +171,7 @@ def asset_path() -> str: @staticmethod def parse_asset_path(path: str) -> Dict[str,str]: """Parses a asset path into its component segments.""" - m = re.match(r"^*$", path) + m = re.match(r"^.*$", path) return m.groupdict() if m else {} @staticmethod diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 056b82fb35a5..b24b0d1599a6 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -176,7 +176,7 @@ def service_path() -> str: @staticmethod def parse_service_path(path: str) -> Dict[str,str]: """Parses a service path into its component segments.""" - m = re.match(r"^*$", path) + m = re.match(r"^.*$", path) return m.groupdict() if m else {} @staticmethod diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index a54a619e9f35..8eea36e5d031 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -223,6 +223,29 @@ def test_resource_path_with_wildcard(): "kingdoms/my-kingdom/phyla/my-phylum/classes/") is None +def test_resource_path_pure_wildcard(): + options = descriptor_pb2.MessageOptions() + resource = options.Extensions[resource_pb2.resource] + resource.pattern.append("*") + resource.type = "taxonomy.biology.com/Class" + message = make_message('Squid', options=options) + + # Pure wildcard resource names do not really help construct resources + # but they are a part of the spec so we need to support them, which means at + # least not failing. + assert message.resource_path == "*" + assert message.resource_path_args == [] + assert message.resource_type == "Class" + + # Pure wildcard resource names match everything... + assert re.match(message.path_regex_str, + "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass") + assert re.match(message.path_regex_str, + "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass/additional-segment") + assert re.match(message.path_regex_str, + "kingdoms/my-kingdom/phyla/my-phylum/classes/") + + def test_parse_resource_path(): options = descriptor_pb2.MessageOptions() resource = options.Extensions[resource_pb2.resource] From d8d8e23acf5bbefd21c350fa5603828dfac6ccaf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 21 Jul 2022 13:59:51 -0700 Subject: [PATCH 0842/1339] chore(main): release 1.1.2 (#1364) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 449f39dd16f3..cf7757900693 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.1.2](https://github.com/googleapis/gapic-generator-python/compare/v1.1.1...v1.1.2) (2022-07-21) + + +### Bug Fixes + +* fix wildcard resource names helper method ([#1363](https://github.com/googleapis/gapic-generator-python/issues/1363)) ([b4ecb44](https://github.com/googleapis/gapic-generator-python/commit/b4ecb44e190005f87ccbc7d1aae6f5f2e5f20115)) + ## [1.1.1](https://github.com/googleapis/gapic-generator-python/compare/v1.1.0...v1.1.1) (2022-07-04) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 83431e79cf54..a1c94682a705 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.1.1" +version = "1.1.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From b025df24aabe0aae00f14b3bee1116eef511b923 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Fri, 22 Jul 2022 08:54:16 -0700 Subject: [PATCH 0843/1339] chore(Showcase): Update to Showcase 0.22.0 (#1362) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 3bc5d4898b37..c9fbed57f3e6 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -29,7 +29,7 @@ nox.options.error_on_missing_interpreters = True -showcase_version = os.environ.get("SHOWCASE_VERSION", "0.19.0") +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.22.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") From 77a66983d0bbb3902f1868ff6f7dc935599633f4 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Tue, 26 Jul 2022 15:25:04 -0700 Subject: [PATCH 0844/1339] feat: regenerates pb2 file with new protoc. (#1344) * feat: regenerates pb2 file with new protoc. Co-authored-by: Anthonios Partheniou --- .../samplegen_utils/snippet_metadata_pb2.py | 799 ++---------------- 1 file changed, 56 insertions(+), 743 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py index 73a75da6c289..17febba690ea 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata_pb2.py @@ -1,12 +1,11 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: snippet_metadata.proto - +# source: gapic/samplegen_utils/snippet_metadata.proto # type: ignore - """Generated protocol buffer code.""" from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database @@ -15,120 +14,9 @@ _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor.FileDescriptor( - name='snippet_metadata.proto', - package='google.cloud.tools.snippetgen.snippetindex.v1', - syntax='proto3', - serialized_options=b'\252\002-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\312\002-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\352\0022Google::Cloud::Tools::SnippetGen::SnippetIndex::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\x16snippet_metadata.proto\x12-google.cloud.tools.snippetgen.snippetindex.v1\"\xa7\x01\n\x05Index\x12T\n\x0e\x63lient_library\x18\x01 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary\x12H\n\x08snippets\x18\x02 \x03(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Snippet\"\x9f\x06\n\x07Snippet\x12\x12\n\nregion_tag\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0c\n\x04\x66ile\x18\x04 \x01(\t\x12I\n\x08language\x18\x05 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12R\n\rclient_method\x18\x06 \x01(\x0b\x32;.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod\x12\x11\n\tcanonical\x18\x07 \x01(\x08\x12M\n\x06origin\x18\x08 \x01(\x0e\x32=.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin\x12P\n\x08segments\x18\t \x03(\x0b\x32>.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment\x1a\xa7\x02\n\x07Segment\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12X\n\x04type\x18\x03 \x01(\x0e\x32J.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType\"\xa5\x01\n\x0bSegmentType\x12\x1c\n\x18SEGMENT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\t\n\x05SHORT\x10\x02\x12\x19\n\x15\x43LIENT_INITIALIZATION\x10\x03\x12\x1a\n\x16REQUEST_INITIALIZATION\x10\x04\x12\x15\n\x11REQUEST_EXECUTION\x10\x05\x12\x15\n\x11RESPONSE_HANDLING\x10\x06\"Q\n\x06Origin\x12\x16\n\x12ORIGIN_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x41PI_DEFINITION\x10\x01\x12\n\n\x06\x43ONFIG\x10\x02\x12\x0f\n\x0bHANDWRITTEN\x10\x03\"\xf2\x02\n\x0c\x43lientMethod\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\r\n\x05\x61sync\x18\x03 \x01(\x08\x12Y\n\nparameters\x18\x04 \x03(\x0b\x32\x45.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter\x12\x13\n\x0bresult_type\x18\x05 \x01(\t\x12L\n\x06\x63lient\x18\x06 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient\x12\x45\n\x06method\x18\x07 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.snippetindex.v1.Method\x1a\'\n\tParameter\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\rServiceClient\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\xbb\x01\n\rClientLibrary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12I\n\x08language\x18\x03 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12@\n\x04\x61pis\x18\x04 \x03(\x0b\x32\x32.google.cloud.tools.snippetgen.snippetindex.v1.Api\"x\n\x06Method\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12G\n\x07service\x18\x03 \x01(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Service\"0\n\x07Service\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\"\n\x03\x41pi\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t*\xef\x01\n\x08Language\x12\x18\n\x14LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x08\n\x04\x44\x41RT\x10\x03\x12\n\n\x06\x45LIXIR\x10\x04\x12\n\n\x06\x45RLANG\x10\x05\x12\x0b\n\x07\x46_SHARP\x10\x06\x12\x06\n\x02GO\x10\x07\x12\x08\n\x04JAVA\x10\x08\x12\x0e\n\nJAVASCRIPT\x10\t\x12\n\n\x06KOTLIN\x10\n\x12\x07\n\x03PHP\x10\x0b\x12\n\n\x06PYTHON\x10\x0c\x12\x08\n\x04RUBY\x10\r\x12\x08\n\x04RUST\x10\x0e\x12\t\n\x05SWIFT\x10\x0f\x12\x0e\n\nTYPESCRIPT\x10\x10\x12\n\n\x06VB_NET\x10\x11\x42\x95\x01\xaa\x02-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\xca\x02-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\xea\x02\x32Google::Cloud::Tools::SnippetGen::SnippetIndex::V1b\x06proto3' -) - -_LANGUAGE = _descriptor.EnumDescriptor( - name='Language', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Language', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='LANGUAGE_UNSPECIFIED', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='C_PLUS_PLUS', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='C_SHARP', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='DART', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ELIXIR', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ERLANG', index=5, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='F_SHARP', index=6, number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='GO', index=7, number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='JAVA', index=8, number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='JAVASCRIPT', index=9, number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='KOTLIN', index=10, number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='PHP', index=11, number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='PYTHON', index=12, number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='RUBY', index=13, number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='RUST', index=14, number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='SWIFT', index=15, number=15, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TYPESCRIPT', index=16, number=16, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='VB_NET', index=17, number=17, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=1873, - serialized_end=2112, -) -_sym_db.RegisterEnumDescriptor(_LANGUAGE) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n,gapic/samplegen_utils/snippet_metadata.proto\x12-google.cloud.tools.snippetgen.snippetindex.v1\"\xa7\x01\n\x05Index\x12T\n\x0e\x63lient_library\x18\x01 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary\x12H\n\x08snippets\x18\x02 \x03(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Snippet\"\x9f\x06\n\x07Snippet\x12\x12\n\nregion_tag\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0c\n\x04\x66ile\x18\x04 \x01(\t\x12I\n\x08language\x18\x05 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12R\n\rclient_method\x18\x06 \x01(\x0b\x32;.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod\x12\x11\n\tcanonical\x18\x07 \x01(\x08\x12M\n\x06origin\x18\x08 \x01(\x0e\x32=.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin\x12P\n\x08segments\x18\t \x03(\x0b\x32>.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment\x1a\xa7\x02\n\x07Segment\x12\r\n\x05start\x18\x01 \x01(\x05\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x05\x12X\n\x04type\x18\x03 \x01(\x0e\x32J.google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType\"\xa5\x01\n\x0bSegmentType\x12\x1c\n\x18SEGMENT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\t\n\x05SHORT\x10\x02\x12\x19\n\x15\x43LIENT_INITIALIZATION\x10\x03\x12\x1a\n\x16REQUEST_INITIALIZATION\x10\x04\x12\x15\n\x11REQUEST_EXECUTION\x10\x05\x12\x15\n\x11RESPONSE_HANDLING\x10\x06\"Q\n\x06Origin\x12\x16\n\x12ORIGIN_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x41PI_DEFINITION\x10\x01\x12\n\n\x06\x43ONFIG\x10\x02\x12\x0f\n\x0bHANDWRITTEN\x10\x03\"\xf2\x02\n\x0c\x43lientMethod\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\r\n\x05\x61sync\x18\x03 \x01(\x08\x12Y\n\nparameters\x18\x04 \x03(\x0b\x32\x45.google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter\x12\x13\n\x0bresult_type\x18\x05 \x01(\t\x12L\n\x06\x63lient\x18\x06 \x01(\x0b\x32<.google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient\x12\x45\n\x06method\x18\x07 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.snippetindex.v1.Method\x1a\'\n\tParameter\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"6\n\rServiceClient\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\xbb\x01\n\rClientLibrary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12I\n\x08language\x18\x03 \x01(\x0e\x32\x37.google.cloud.tools.snippetgen.snippetindex.v1.Language\x12@\n\x04\x61pis\x18\x04 \x03(\x0b\x32\x32.google.cloud.tools.snippetgen.snippetindex.v1.Api\"x\n\x06Method\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12G\n\x07service\x18\x03 \x01(\x0b\x32\x36.google.cloud.tools.snippetgen.snippetindex.v1.Service\"0\n\x07Service\x12\x12\n\nshort_name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\"\"\n\x03\x41pi\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t*\xef\x01\n\x08Language\x12\x18\n\x14LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x08\n\x04\x44\x41RT\x10\x03\x12\n\n\x06\x45LIXIR\x10\x04\x12\n\n\x06\x45RLANG\x10\x05\x12\x0b\n\x07\x46_SHARP\x10\x06\x12\x06\n\x02GO\x10\x07\x12\x08\n\x04JAVA\x10\x08\x12\x0e\n\nJAVASCRIPT\x10\t\x12\n\n\x06KOTLIN\x10\n\x12\x07\n\x03PHP\x10\x0b\x12\n\n\x06PYTHON\x10\x0c\x12\x08\n\x04RUBY\x10\r\x12\x08\n\x04RUST\x10\x0e\x12\t\n\x05SWIFT\x10\x0f\x12\x0e\n\nTYPESCRIPT\x10\x10\x12\n\n\x06VB_NET\x10\x11\x42\x95\x01\xaa\x02-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\xca\x02-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\xea\x02\x32Google::Cloud::Tools::SnippetGen::SnippetIndex::V1b\x06proto3') +_LANGUAGE = DESCRIPTOR.enum_types_by_name['Language'] Language = enum_type_wrapper.EnumTypeWrapper(_LANGUAGE) LANGUAGE_UNSPECIFIED = 0 C_PLUS_PLUS = 1 @@ -150,624 +38,21 @@ VB_NET = 17 -_SNIPPET_SEGMENT_SEGMENTTYPE = _descriptor.EnumDescriptor( - name='SegmentType', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.SegmentType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='SEGMENT_TYPE_UNSPECIFIED', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='FULL', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='SHORT', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='CLIENT_INITIALIZATION', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='REQUEST_INITIALIZATION', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='REQUEST_EXECUTION', index=5, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='RESPONSE_HANDLING', index=6, number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=795, - serialized_end=960, -) -_sym_db.RegisterEnumDescriptor(_SNIPPET_SEGMENT_SEGMENTTYPE) - -_SNIPPET_ORIGIN = _descriptor.EnumDescriptor( - name='Origin', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Origin', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='ORIGIN_UNSPECIFIED', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='API_DEFINITION', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='CONFIG', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='HANDWRITTEN', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=962, - serialized_end=1043, -) -_sym_db.RegisterEnumDescriptor(_SNIPPET_ORIGIN) - - -_INDEX = _descriptor.Descriptor( - name='Index', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='client_library', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index.client_library', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='snippets', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Index.snippets', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=74, - serialized_end=241, -) - - -_SNIPPET_SEGMENT = _descriptor.Descriptor( - name='Segment', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='start', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.start', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='end', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.end', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment.type', index=2, - number=3, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _SNIPPET_SEGMENT_SEGMENTTYPE, - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=665, - serialized_end=960, -) - -_SNIPPET = _descriptor.Descriptor( - name='Snippet', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='region_tag', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.region_tag', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='title', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.title', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='description', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.description', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='file', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.file', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='language', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.language', index=4, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='client_method', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.client_method', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='canonical', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.canonical', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='origin', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.origin', index=7, - number=8, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='segments', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Snippet.segments', index=8, - number=9, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_SNIPPET_SEGMENT, ], - enum_types=[ - _SNIPPET_ORIGIN, - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=244, - serialized_end=1043, -) - - -_CLIENTMETHOD_PARAMETER = _descriptor.Descriptor( - name='Parameter', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter.type', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1377, - serialized_end=1416, -) - -_CLIENTMETHOD = _descriptor.Descriptor( - name='ClientMethod', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.short_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.full_name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='async', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.async', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='parameters', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.parameters', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='result_type', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.result_type', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='client', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.client', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='method', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.method', index=6, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[_CLIENTMETHOD_PARAMETER, ], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1046, - serialized_end=1416, -) - - -_SERVICECLIENT = _descriptor.Descriptor( - name='ServiceClient', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient.short_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient.full_name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1418, - serialized_end=1472, -) - - -_CLIENTLIBRARY = _descriptor.Descriptor( - name='ClientLibrary', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='version', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.version', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='language', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.language', index=2, - number=3, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='apis', full_name='google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary.apis', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1475, - serialized_end=1662, -) - - -_METHOD = _descriptor.Descriptor( - name='Method', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.short_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.full_name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='service', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Method.service', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1664, - serialized_end=1784, -) - - -_SERVICE = _descriptor.Descriptor( - name='Service', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='short_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service.short_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='full_name', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Service.full_name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1786, - serialized_end=1834, -) - - -_API = _descriptor.Descriptor( - name='Api', - full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='id', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api.id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='version', full_name='google.cloud.tools.snippetgen.snippetindex.v1.Api.version', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1836, - serialized_end=1870, -) - -_INDEX.fields_by_name['client_library'].message_type = _CLIENTLIBRARY -_INDEX.fields_by_name['snippets'].message_type = _SNIPPET -_SNIPPET_SEGMENT.fields_by_name['type'].enum_type = _SNIPPET_SEGMENT_SEGMENTTYPE -_SNIPPET_SEGMENT.containing_type = _SNIPPET -_SNIPPET_SEGMENT_SEGMENTTYPE.containing_type = _SNIPPET_SEGMENT -_SNIPPET.fields_by_name['language'].enum_type = _LANGUAGE -_SNIPPET.fields_by_name['client_method'].message_type = _CLIENTMETHOD -_SNIPPET.fields_by_name['origin'].enum_type = _SNIPPET_ORIGIN -_SNIPPET.fields_by_name['segments'].message_type = _SNIPPET_SEGMENT -_SNIPPET_ORIGIN.containing_type = _SNIPPET -_CLIENTMETHOD_PARAMETER.containing_type = _CLIENTMETHOD -_CLIENTMETHOD.fields_by_name['parameters'].message_type = _CLIENTMETHOD_PARAMETER -_CLIENTMETHOD.fields_by_name['client'].message_type = _SERVICECLIENT -_CLIENTMETHOD.fields_by_name['method'].message_type = _METHOD -_CLIENTLIBRARY.fields_by_name['language'].enum_type = _LANGUAGE -_CLIENTLIBRARY.fields_by_name['apis'].message_type = _API -_METHOD.fields_by_name['service'].message_type = _SERVICE -DESCRIPTOR.message_types_by_name['Index'] = _INDEX -DESCRIPTOR.message_types_by_name['Snippet'] = _SNIPPET -DESCRIPTOR.message_types_by_name['ClientMethod'] = _CLIENTMETHOD -DESCRIPTOR.message_types_by_name['ServiceClient'] = _SERVICECLIENT -DESCRIPTOR.message_types_by_name['ClientLibrary'] = _CLIENTLIBRARY -DESCRIPTOR.message_types_by_name['Method'] = _METHOD -DESCRIPTOR.message_types_by_name['Service'] = _SERVICE -DESCRIPTOR.message_types_by_name['Api'] = _API -DESCRIPTOR.enum_types_by_name['Language'] = _LANGUAGE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - +_INDEX = DESCRIPTOR.message_types_by_name['Index'] +_SNIPPET = DESCRIPTOR.message_types_by_name['Snippet'] +_SNIPPET_SEGMENT = _SNIPPET.nested_types_by_name['Segment'] +_CLIENTMETHOD = DESCRIPTOR.message_types_by_name['ClientMethod'] +_CLIENTMETHOD_PARAMETER = _CLIENTMETHOD.nested_types_by_name['Parameter'] +_SERVICECLIENT = DESCRIPTOR.message_types_by_name['ServiceClient'] +_CLIENTLIBRARY = DESCRIPTOR.message_types_by_name['ClientLibrary'] +_METHOD = DESCRIPTOR.message_types_by_name['Method'] +_SERVICE = DESCRIPTOR.message_types_by_name['Service'] +_API = DESCRIPTOR.message_types_by_name['Api'] +_SNIPPET_SEGMENT_SEGMENTTYPE = _SNIPPET_SEGMENT.enum_types_by_name['SegmentType'] +_SNIPPET_ORIGIN = _SNIPPET.enum_types_by_name['Origin'] Index = _reflection.GeneratedProtocolMessageType('Index', (_message.Message,), { 'DESCRIPTOR': _INDEX, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Index) }) _sym_db.RegisterMessage(Index) @@ -776,11 +61,11 @@ 'Segment': _reflection.GeneratedProtocolMessageType('Segment', (_message.Message,), { 'DESCRIPTOR': _SNIPPET_SEGMENT, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet.Segment) }), 'DESCRIPTOR': _SNIPPET, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Snippet) }) _sym_db.RegisterMessage(Snippet) @@ -790,11 +75,11 @@ 'Parameter': _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), { 'DESCRIPTOR': _CLIENTMETHOD_PARAMETER, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod.Parameter) }), 'DESCRIPTOR': _CLIENTMETHOD, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientMethod) }) _sym_db.RegisterMessage(ClientMethod) @@ -802,39 +87,67 @@ ServiceClient = _reflection.GeneratedProtocolMessageType('ServiceClient', (_message.Message,), { 'DESCRIPTOR': _SERVICECLIENT, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ServiceClient) }) _sym_db.RegisterMessage(ServiceClient) ClientLibrary = _reflection.GeneratedProtocolMessageType('ClientLibrary', (_message.Message,), { 'DESCRIPTOR': _CLIENTLIBRARY, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.ClientLibrary) }) _sym_db.RegisterMessage(ClientLibrary) Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), { 'DESCRIPTOR': _METHOD, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Method) }) _sym_db.RegisterMessage(Method) Service = _reflection.GeneratedProtocolMessageType('Service', (_message.Message,), { 'DESCRIPTOR': _SERVICE, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Service) }) _sym_db.RegisterMessage(Service) Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), { 'DESCRIPTOR': _API, - '__module__': 'snippet_metadata_pb2' + '__module__': 'gapic.samplegen_utils.snippet_metadata_pb2' # @@protoc_insertion_point(class_scope:google.cloud.tools.snippetgen.snippetindex.v1.Api) }) _sym_db.RegisterMessage(Api) - -DESCRIPTOR._options = None +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\252\002-Google.Cloud.Tools.SnippetGen.SnippetIndex.V1\312\002-Google\\Cloud\\Tools\\SnippetGen\\SnippetIndex\\V1\352\0022Google::Cloud::Tools::SnippetGen::SnippetIndex::V1' + _LANGUAGE._serialized_start = 1895 + _LANGUAGE._serialized_end = 2134 + _INDEX._serialized_start = 96 + _INDEX._serialized_end = 263 + _SNIPPET._serialized_start = 266 + _SNIPPET._serialized_end = 1065 + _SNIPPET_SEGMENT._serialized_start = 687 + _SNIPPET_SEGMENT._serialized_end = 982 + _SNIPPET_SEGMENT_SEGMENTTYPE._serialized_start = 817 + _SNIPPET_SEGMENT_SEGMENTTYPE._serialized_end = 982 + _SNIPPET_ORIGIN._serialized_start = 984 + _SNIPPET_ORIGIN._serialized_end = 1065 + _CLIENTMETHOD._serialized_start = 1068 + _CLIENTMETHOD._serialized_end = 1438 + _CLIENTMETHOD_PARAMETER._serialized_start = 1399 + _CLIENTMETHOD_PARAMETER._serialized_end = 1438 + _SERVICECLIENT._serialized_start = 1440 + _SERVICECLIENT._serialized_end = 1494 + _CLIENTLIBRARY._serialized_start = 1497 + _CLIENTLIBRARY._serialized_end = 1684 + _METHOD._serialized_start = 1686 + _METHOD._serialized_end = 1806 + _SERVICE._serialized_start = 1808 + _SERVICE._serialized_end = 1856 + _API._serialized_start = 1858 + _API._serialized_end = 1892 # @@protoc_insertion_point(module_scope) From 123ee798842f563fa857a6e8d1f587b7faec2340 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 26 Jul 2022 22:02:29 -0400 Subject: [PATCH 0845/1339] chore: resolve sphinx warning in generated clients with mixins (#1367) --- .../%version/%sub/services/%service/_mixins.py.j2 | 14 ++++++++++++++ .../%version/%sub/services/%service/client.py.j2 | 13 +++++++++++++ .../%sub/services/%service/_async_mixins.py.j2 | 14 ++++++++++++++ .../%sub/services/%service/_mixins.py.j2 | 14 ++++++++++++++ .../%sub/services/%service/async_client.py.j2 | 13 +++++++++++++ .../%sub/services/%service/client.py.j2 | 14 ++++++++++++++ 6 files changed, 82 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 index a5ef010df563..27f7000ae2cf 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 @@ -308,8 +308,11 @@ expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -333,8 +336,11 @@ } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -349,6 +355,7 @@ title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -421,8 +428,11 @@ expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -446,8 +456,11 @@ } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -462,6 +475,7 @@ title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index e1db165909db..2426f5bc4593 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -544,8 +544,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -569,8 +572,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -585,6 +591,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -655,8 +662,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -680,7 +690,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): } ] } + **YAML Example** + :: bindings: - members: @@ -696,6 +708,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index f88e747eae43..b578108a171e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -315,8 +315,11 @@ expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -340,8 +343,11 @@ } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -356,6 +362,7 @@ title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -428,8 +435,11 @@ expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -453,8 +463,11 @@ } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -469,6 +482,7 @@ title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index cf75daaeacbe..6db4ca8a1b1e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -313,8 +313,11 @@ expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -338,8 +341,11 @@ } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -354,6 +360,7 @@ title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -426,8 +433,11 @@ expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -451,8 +461,11 @@ } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -467,6 +480,7 @@ title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index df2298abb702..3d3b29f4004a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -421,7 +421,9 @@ class {{ service.async_client_name }}: expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: { "bindings": [ @@ -446,8 +448,11 @@ class {{ service.async_client_name }}: } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -462,6 +467,7 @@ class {{ service.async_client_name }}: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -530,8 +536,11 @@ class {{ service.async_client_name }}: expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -555,8 +564,11 @@ class {{ service.async_client_name }}: } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -571,6 +583,7 @@ class {{ service.async_client_name }}: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 440548be8e35..9a4bc5d3e64c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -424,8 +424,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -449,8 +452,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -465,6 +471,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -535,8 +542,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -560,8 +570,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -576,6 +589,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. From 0f82f82bdc579c12f6790941c1f34314f6897e78 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 27 Jul 2022 10:39:15 -0400 Subject: [PATCH 0846/1339] test: add async suffix to tests to avoid duplication with sync functions (#1368) --- .../unit/gapic/%name_%version/%sub/_test_mixins.py.j2 | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 4e7f525e5563..4140b61a6ea2 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -4,7 +4,7 @@ {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_delete_operation(transport: str = "grpc"): +async def test_delete_operation_async(transport: str = "grpc"): {% else %} def test_delete_operation(transport: str = "grpc"): {% endif %} @@ -133,7 +133,7 @@ def test_delete_operation_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_cancel_operation(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc"): {% else %} def test_cancel_operation(transport: str = "grpc"): {% endif %} @@ -390,7 +390,7 @@ def test_wait_operation_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_get_operation(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc"): {% else %} def test_get_operation(transport: str = "grpc"): {% endif %} @@ -519,7 +519,7 @@ def test_get_operation_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_list_operations(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc"): {% else %} def test_list_operations(transport: str = "grpc"): {% endif %} @@ -654,7 +654,7 @@ def test_list_operations_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_list_locations(transport: str = "grpc"): +async def test_list_locations_async(transport: str = "grpc"): {% else %} def test_list_locations(transport: str = "grpc"): {% endif %} From 8d59ee879bdfe276eec2b5dc09b533a85a47ca7f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Aug 2022 14:14:08 +0200 Subject: [PATCH 0847/1339] chore(deps): update dependency setuptools to v63.3.0 (#1381) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b0f8ee8a676a..df264df8b9bb 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==63.2.0 +setuptools==63.3.0 grpc-google-iam-v1==0.12.4 proto-plus==1.20.6 pytest-asyncio==0.19.0 \ No newline at end of file From 0bd022413523a0cf1167ec8e71bc9e71b38a2e34 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 2 Aug 2022 15:19:24 -0700 Subject: [PATCH 0848/1339] chore: add `transport` argument to py_gapic_library rule to make its interface consistent with the other languages and regapic spec (#1382) --- packages/gapic-generator/rules_python_gapic/py_gapic.bzl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index 8453d6859781..1799513f9225 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -42,6 +42,7 @@ def py_gapic_library( opt_args = None, metadata = True, service_yaml = None, + transport = None, deps = [], **kwargs): srcjar_target_name = "%s_srcjar" % name @@ -59,6 +60,9 @@ def py_gapic_library( if service_yaml: file_args[service_yaml] = "service-yaml" + if transport: + opt_args = opt_args + ["transport=%s" % transport] + proto_custom_library( name = srcjar_target_name, deps = srcs, From a3062e6faf2c1359af825302b480d83f3ad3c1b0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 3 Aug 2022 14:00:14 +0200 Subject: [PATCH 0849/1339] chore(deps): update dependency setuptools to v63.4.0 (#1383) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index df264df8b9bb..b60c29f07a0d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==63.3.0 +setuptools==63.4.0 grpc-google-iam-v1==0.12.4 proto-plus==1.20.6 pytest-asyncio==0.19.0 \ No newline at end of file From 73a36c3a838c6b026ee09a6c95157c7302b77a3f Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Thu, 4 Aug 2022 13:11:47 -0700 Subject: [PATCH 0850/1339] feat: Add rest-numeric-enums option (#1385) This option is currently a no-op; functionality will be implemented in upcoming commits. This also adds tests for the transport option --- .../%sub/services/%service/transports/rest.py.j2 | 6 ++++++ packages/gapic-generator/gapic/utils/options.py | 4 ++++ .../tests/unit/generator/test_options.py | 15 +++++++++++++++ 3 files changed, 25 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 81f00b22e918..948cdaabe200 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -52,6 +52,12 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=requests_version, ) +{% if opts.rest_numeric_enums %} +# TODO (numeric enums): This file was generated with the option to +# request that the server respond with enums JSON-encoded as +# numbers. The code below does not implement that functionality yet. + +{% endif %} class {{ service.name }}RestInterceptor: """Interceptor for {{ service.name }}. diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index 8efd401c8514..1785e93a55e9 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -48,6 +48,7 @@ class Options: transport: List[str] = dataclasses.field(default_factory=lambda: []) service_yaml_config: Dict[str, Any] = dataclasses.field( default_factory=dict) + rest_numeric_enums: bool = False # Class constants PYTHON_GAPIC_PREFIX: str = 'python-gapic-' @@ -63,6 +64,8 @@ class Options: # transport type(s) delineated by '+' (i.e. grpc, rest, custom.[something], etc?) 'transport', 'warehouse-package-name', # change the package name on PyPI + # when transport includes "rest", request that response enums be JSON-encoded as numbers + 'rest-numeric-enums', )) @classmethod @@ -178,6 +181,7 @@ def tweak_path(p): # transport should include desired transports delimited by '+', e.g. transport='grpc+rest' transport=opts.pop('transport', ['grpc'])[0].split('+'), service_yaml_config=service_yaml_config, + rest_numeric_enums=bool(opts.pop('rest-numeric-enums', False)), ) # Note: if we ever need to recursively check directories for sample diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index edf9846872b2..91da98de5188 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -170,6 +170,20 @@ def test_options_service_yaml_config(fs): assert opts.service_yaml_config == expected_config +def test_options_transport(): + opts = Options.build("") + assert opts.transport == ["grpc"] + + opts = Options.build("transport=rest") + assert opts.transport == ["rest"] + + opts = Options.build("transport=grpc+rest") + assert opts.transport == ["grpc", "rest"] + + opts = Options.build("transport=alpha+beta+gamma") + assert opts.transport == ["alpha", "beta", "gamma"] + + def test_options_bool_flags(): # Most options are default False. # New options should follow the dash-case/snake_case convention. @@ -181,6 +195,7 @@ def test_options_bool_flags(): "add-iam-methods", "metadata", "warehouse-package-name", + "rest-numeric-enums", ]} for opt, attr in opt_str_to_attr_name.items(): From 96b49fd1ef84b2bbb005b12f1889a52bffc2618c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Aug 2022 21:46:34 +0200 Subject: [PATCH 0851/1339] chore(deps): update dependency setuptools to v63.4.1 (#1384) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b60c29f07a0d..90ee8986a7ab 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==63.4.0 +setuptools==63.4.1 grpc-google-iam-v1==0.12.4 proto-plus==1.20.6 pytest-asyncio==0.19.0 \ No newline at end of file From 94f9a8d9eb152fff014d4817ad8e5afc1220b8fa Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 5 Aug 2022 17:05:29 -0700 Subject: [PATCH 0852/1339] fix: Unit test for nested fields in url path (#1387) * fix: Unit test for nested fields in url path This fixes https://github.com/googleapis/gapic-generator-python/issues/1386 --- .../%name_%version/%sub/test_%service.py.j2 | 4 +-- .../gapic-generator/gapic/schema/wrappers.py | 8 +++++ .../gapic/%name_%version/%sub/test_macros.j2 | 4 +-- .../tests/unit/schema/wrappers/test_field.py | 32 +++++++++++++++++++ 4 files changed, 44 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 7b714030738f..76667e207bbb 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1001,7 +1001,7 @@ def test_{{ method.name|snake_case }}_rest(request_type): {% for field in method.body_fields.values() %} {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} + request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} {% endif %} {% endfor %} request = request_type(request_init) @@ -1293,7 +1293,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ {% for field in method.body_fields.values() %} {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} + request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} {% endif %} {% endfor %} request = request_type(request_init) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 019d13cb6789..0ac92bd8fcae 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -28,6 +28,7 @@ """ import collections +import copy import dataclasses import json import keyword @@ -151,6 +152,13 @@ def recursive_mock_original_type(field): return recursive_mock_original_type(self) + def merged_mock_value(self, other_mock: Dict[Any, Any]): + mock = self.mock_value_original_type + if isinstance(mock, dict) and isinstance(other_mock, dict): + mock = copy.deepcopy(mock) + mock.update(other_mock) + return mock + @utils.cached_property def mock_value(self) -> str: visited_fields: Set["Field"] = set() diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 7361662c4143..b11b3c37a252 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -852,7 +852,7 @@ def test_{{ method_name }}_rest(request_type): {% for field in method.body_fields.values() %} {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} + request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} {% endif %} {% endfor %} request = request_type(request_init) @@ -1158,7 +1158,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ {% for field in method.body_fields.values() %} {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.mock_value_original_type }} + request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} {% endif %} {% endfor %} request = request_type(request_init) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 7b5c8bd1c744..e580d43e4afe 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -404,6 +404,38 @@ def test_mock_value_original_type_message(): assert entry_field.mock_value_original_type == {} +def test_merged_mock_value_message(): + subfields = collections.OrderedDict(( + ('foo', make_field(name='foo', type='TYPE_INT32')), + ('bar', make_field(name='bar', type='TYPE_STRING')) + )) + + message = wrappers.MessageType( + fields=subfields, + message_pb=descriptor_pb2.DescriptorProto(name="Message", field=[ + i.field_pb for i in subfields.values() + ]), + meta=metadata.Metadata(address=metadata.Address( + module="bogus", + name="Message", + )), + nested_enums={}, + nested_messages={}, + ) + + field = make_field( + type="TYPE_MESSAGE", + type_name="bogus.Message", + message=message, + ) + + mock = field.merged_mock_value({"foo": 777, "another": "another_value"}) + assert mock == {"foo": 777, "bar": "bar_value", "another": "another_value"} + + mock = field.merged_mock_value(None) + assert mock == {"bar": "bar_value", "foo": 324} + + def test_mock_value_original_type_enum(): mollusc_field = make_field( name="class", From 55f552b31ab4b92c09d39fd5202769843eaa4af4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 7 Aug 2022 13:28:48 +0200 Subject: [PATCH 0853/1339] chore(deps): update dependency setuptools to v63.4.2 (#1388) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 90ee8986a7ab..adda9ba73a9d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==63.4.1 +setuptools==63.4.2 grpc-google-iam-v1==0.12.4 proto-plus==1.20.6 pytest-asyncio==0.19.0 \ No newline at end of file From d1c4c8e1921d152f575ecf7e7abba9e240b21335 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 9 Aug 2022 04:09:31 -0700 Subject: [PATCH 0854/1339] fix: Fix required fields test for REST transport (#1389) * fix: Fix required fields test for REST transport This fixes the assumption that query param fields can be only primitives. They also can be enums and special proto objects which serialize to string in json mapping: FieldMask, Timestam, Duration. The tests will follow in a form of integration tests once grpc+rest transport is enabled in googleapis * update python version in noxfile template to match CI * regenerate goldens * remove assumption that query param fields can be only primitives * update black version in templated noxfile to fix CI Co-authored-by: Anthonios Partheniou --- .../%sub/services/%service/transports/rest.py.j2 | 4 ++-- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 10 +++++----- .../%sub/services/%service/transports/rest.py.j2 | 4 ++-- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 4 ++-- .../unit/gapic/%name_%version/%sub/test_macros.j2 | 6 +++--- .../tests/integration/goldens/asset/noxfile.py | 4 ++-- .../tests/integration/goldens/credentials/noxfile.py | 4 ++-- .../tests/integration/goldens/eventarc/noxfile.py | 4 ++-- .../tests/integration/goldens/logging/noxfile.py | 4 ++-- .../tests/integration/goldens/redis/noxfile.py | 4 ++-- 10 files changed, 24 insertions(+), 24 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 4162d66c8037..93c0a0ac620c 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -256,8 +256,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not method.client_streaming %} {% if method.input.required_fields %} __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} } diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 76667e207bbb..e64db39bdc99 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1109,7 +1109,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide )) # verify fields with default values are dropped - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" not in jsonified_request {% endfor %} @@ -1118,13 +1118,13 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request.update(unset_fields) # verify required fields with default values are now present - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" in jsonified_request assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] {% endfor %} - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} {% set mock_value = req_field.primitive_mock_as_str() %} {% if method.query_params %} @@ -1143,7 +1143,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} {% set mock_value = req_field.primitive_mock_as_str() %} assert "{{ field_name }}" in jsonified_request @@ -1208,7 +1208,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %} expected_params = [ - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} ( "{{ req_field.name | camel_case }}", {% if req_field.field_pb.type == 9 %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 948cdaabe200..0a2d7519e0d6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -264,8 +264,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not method.client_streaming %} {% if method.input.required_fields %} __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} } diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 994c793256a8..b241fd0e4de3 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -24,9 +24,9 @@ CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.9" +DEFAULT_PYTHON_VERSION = "3.10" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index b11b3c37a252..a96f52966921 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -973,7 +973,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide )) # verify fields with default values are dropped - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" not in jsonified_request {% endfor %} @@ -982,7 +982,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request.update(unset_fields) # verify required fields with default values are now present - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" in jsonified_request assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] @@ -1067,7 +1067,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %} expected_params = [ - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} ( "{{ req_field.name | camel_case }}", {% if req_field.field_pb.type == 9 %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 7b144a0216e3..a75af25554cb 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -35,9 +35,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.9" +DEFAULT_PYTHON_VERSION = "3.10" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 7dc0b7b5bee5..d27424901199 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -35,9 +35,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.9" +DEFAULT_PYTHON_VERSION = "3.10" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 05e8b9a45747..41c88e77c529 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -35,9 +35,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.9" +DEFAULT_PYTHON_VERSION = "3.10" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 2cf8ceab4875..77a2ce3f55f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -35,9 +35,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.9" +DEFAULT_PYTHON_VERSION = "3.10" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index d376055b2408..b35bce4b0a2f 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -35,9 +35,9 @@ LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.9" +DEFAULT_PYTHON_VERSION = "3.10" nox.sessions = [ "unit", From cdff1f4f2930f10a434e162f0f8e6f3102aee905 Mon Sep 17 00:00:00 2001 From: Gal Zahavi <38544478+galz10@users.noreply.github.com> Date: Tue, 9 Aug 2022 15:32:21 +0000 Subject: [PATCH 0855/1339] Chore: Added document ai to setup.py (#1349) * Chore: Added document ai to setup.py * updated ads-templates setup.py * removed readme from ads template * reverted ads-templates changes * Update gapic/templates/setup.py.j2 Co-authored-by: Anthonios Partheniou * add new line * lint Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/gapic/templates/setup.py.j2 | 3 +++ packages/gapic-generator/tests/unit/common_types.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index ef10a8b69751..45bead1a4a7a 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -43,6 +43,9 @@ setuptools.setup( {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} 'grpc-google-iam-v1 >= 0.12.4, < 0.13dev', {% endif %} + {% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} + 'google-cloud-documentai >= 1.2.1, < 2.0.0dev', + {% endif %} ), python_requires='>=3.6', classifiers=[ diff --git a/packages/gapic-generator/tests/unit/common_types.py b/packages/gapic-generator/tests/unit/common_types.py index a703481c429d..a5d4a88d6922 100644 --- a/packages/gapic-generator/tests/unit/common_types.py +++ b/packages/gapic-generator/tests/unit/common_types.py @@ -16,7 +16,7 @@ import itertools from collections import namedtuple -from typing import(Any, Dict, Iterable, Optional) +from typing import (Any, Dict, Iterable, Optional) from google.protobuf import descriptor_pb2 From a901f96e63687af6304f797efa3d3af4f771682c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 11 Aug 2022 02:26:15 +0200 Subject: [PATCH 0856/1339] chore(deps): update dependency proto-plus to v1.22.0 (#1393) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index adda9ba73a9d..ec20a0893a98 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -9,5 +9,5 @@ PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==63.4.2 grpc-google-iam-v1==0.12.4 -proto-plus==1.20.6 +proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From 5e79016f923cc11b0f39af3d6e4b31bd63d3e82c Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 11 Aug 2022 02:51:29 +0200 Subject: [PATCH 0857/1339] chore(deps): update dependency setuptools to v63.4.3 (#1394) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ec20a0893a98..f9431eeebfc8 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==63.4.2 +setuptools==63.4.3 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From 5077381e115c09c5cc78854395f023b51d47ddf5 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 11 Aug 2022 18:10:16 +0200 Subject: [PATCH 0858/1339] chore(deps): update dependency setuptools to v64 (#1396) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f9431eeebfc8..4d86ca51239f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==63.4.3 +setuptools==64.0.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From 99b865c7e05fbba71a35a6d63755b166c6fb19f9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Aug 2022 13:14:31 +0200 Subject: [PATCH 0859/1339] chore(deps): update dependency setuptools to v64.0.1 (#1398) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 4d86ca51239f..9138316c0214 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==64.0.0 +setuptools==64.0.1 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From adea5af020c279caae917613db29e78af4010804 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Fri, 12 Aug 2022 05:54:03 -0700 Subject: [PATCH 0860/1339] fix: fixes bug in a test with explicit_routing (#1397) Co-authored-by: Anthonios Partheniou --- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index a96f52966921..b2e849568773 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -572,7 +572,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): ) metadata = () - {% if method.field_headers %} + {% if not method.explicit_routing and method.field_headers %} metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} From c66c4f280f6e1e9bf16f323c2bb760c7f0bf2719 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 12 Aug 2022 11:36:34 -0400 Subject: [PATCH 0861/1339] chore(main): release 1.2.0 (#1373) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 15 +++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index cf7757900693..bbb9cb20c1f5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,21 @@ # Changelog +## [1.2.0](https://github.com/googleapis/gapic-generator-python/compare/v1.1.2...v1.2.0) (2022-08-12) + + +### Features + +* Add rest-numeric-enums option ([#1385](https://github.com/googleapis/gapic-generator-python/issues/1385)) ([099d31a](https://github.com/googleapis/gapic-generator-python/commit/099d31af79914439be0be5477a0e44ec816e9ff3)) +* regenerates pb2 file with new protoc. ([#1344](https://github.com/googleapis/gapic-generator-python/issues/1344)) ([e74940f](https://github.com/googleapis/gapic-generator-python/commit/e74940f626502afd13fc5dcdde6dc4c4c11d3237)) + + +### Bug Fixes + +* Fix required fields test for REST transport ([#1389](https://github.com/googleapis/gapic-generator-python/issues/1389)) ([c3ffee8](https://github.com/googleapis/gapic-generator-python/commit/c3ffee8cf902a25a9343f0e27c9a7c28bd81f1b0)) +* fixes bug in a test with explicit_routing ([#1397](https://github.com/googleapis/gapic-generator-python/issues/1397)) ([6d974a1](https://github.com/googleapis/gapic-generator-python/commit/6d974a12cb91846b40323c7aa64af50ba7410b99)) +* Unit test for nested fields in url path ([#1387](https://github.com/googleapis/gapic-generator-python/issues/1387)) ([35f6fa3](https://github.com/googleapis/gapic-generator-python/commit/35f6fa30b523736d3aa0fbc17400fe0213391c74)) + ## [1.1.2](https://github.com/googleapis/gapic-generator-python/compare/v1.1.1...v1.1.2) (2022-07-21) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a1c94682a705..37dfd7934225 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.1.2" +version = "1.2.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 073d6be030ecede02407101cd47cd58e52fa9b05 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Fri, 12 Aug 2022 10:46:00 -0700 Subject: [PATCH 0862/1339] feat: Encode numeric enums parameter with REST requests (#1399) This adds `$alt=json;enum-encoding=int` to the query string if numeric enums are enabled for REST transport. --- .../%version/%sub/services/%service/transports/rest.py.j2 | 4 ++++ .../%sub/services/%service/transports/rest.py.j2 | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 93c0a0ac620c..feaa18f44b0b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -341,6 +341,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): query_params.update(self._get_unset_required_fields(query_params)) {% endif %}{# required fields #} + {% if opts.rest_numeric_enums %} + query_params["$alt"] = "json;enum-encoding=int" + {% endif %} + # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 0a2d7519e0d6..ea1d45af0fd6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -349,6 +349,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): query_params.update(self._get_unset_required_fields(query_params)) {% endif %}{# required fields #} + {% if opts.rest_numeric_enums %} + query_params["$alt"] = "json;enum-encoding=int" + {% endif %} + # Send the request headers = dict(metadata) headers['Content-Type'] = 'application/json' From 6d26bc138fbc651a7e34c63bfcd379f504ab03fd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Aug 2022 20:56:23 +0200 Subject: [PATCH 0863/1339] chore(deps): update dependency setuptools to v64.0.2 (#1402) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 9138316c0214..09d0abbc8928 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==64.0.1 +setuptools==64.0.2 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From 46fe7f2b99900767e28b2642465347ac0d7bc979 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Fri, 12 Aug 2022 15:54:18 -0700 Subject: [PATCH 0864/1339] feat: Note that "rest" transport support is beta. (#1403) This beta notice is triggered by having numeric enum support disabled (the default), since numeric enum support is currently the only blocker to supporting REST transport fully. This condition can be changed if further blockers arise to a GA release of REST transport. This logic and notice can be removed entirely once we are satisfied that REST transport works with or without numeric enums. --- .../%version/%sub/services/%service/client.py.j2 | 5 +++++ .../%sub/services/%service/transports/rest.py.j2 | 12 ++++++++++++ .../%sub/services/%service/client.py.j2 | 5 +++++ .../%sub/services/%service/transports/rest.py.j2 | 12 ++++++++++++ 4 files changed, 34 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 2426f5bc4593..4666ab3e0f1c 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -233,6 +233,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport (Union[str, {{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. + {% if 'rest' in opts.transport and not opts.rest_numeric_enums %} + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + {% endif %} client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index feaa18f44b0b..99156e32518c 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -128,6 +128,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 + + {% if not opts.rest_numeric_enums %} + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + {% endif %} """ @@ -147,6 +153,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): ) -> None: """Instantiate the transport. + {% if not opts.rest_numeric_enums %} + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + {% endif %} + Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): {{ ' ' }}The hostname to connect to. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 9a4bc5d3e64c..be54b73db8f4 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -290,6 +290,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport (Union[str, {{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. + {% if 'rest' in opts.transport and not opts.rest_numeric_enums %} + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + {% endif %} client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index ea1d45af0fd6..5f5c46660da0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -134,6 +134,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 + + {% if not opts.rest_numeric_enums %} + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + {% endif %} """ @@ -154,6 +160,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): ) -> None: """Instantiate the transport. + {% if not opts.rest_numeric_enums %} + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + {% endif %} + Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): {{ ' ' }}The hostname to connect to. From 8f3f81c7c83b69cb906129fb1d914d17779612aa Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Fri, 12 Aug 2022 17:06:56 -0700 Subject: [PATCH 0865/1339] feat: When requesting numeric enums in responses, also send them in requests (#1405) --- .../%version/%sub/services/%service/transports/rest.py.j2 | 8 ++++---- .../%sub/services/%service/transports/rest.py.j2 | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 99156e32518c..436573a1874f 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -330,12 +330,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): body = {% if body_spec == '*' -%} {{method.input.ident}}.to_json( {{method.input.ident}}(transcoded_request['body']), - {%- else -%} + {% else -%} {{method.input.fields[body_spec].type.ident}}.to_json( {{method.input.fields[body_spec].type.ident}}(transcoded_request['body']), - {%- endif %}{# body_spec == "*" #} + {% endif %}{# body_spec == "*" #} including_default_value_fields=False, - use_integers_for_enums=False + use_integers_for_enums={{ opts.rest_numeric_enums }} ) {%- endif %}{# body_spec #} @@ -346,7 +346,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): query_params = json.loads({{method.input.ident}}.to_json( {{method.input.ident}}(transcoded_request['query_params']), including_default_value_fields=False, - use_integers_for_enums=False + use_integers_for_enums={{ opts.rest_numeric_enums }} )) {% if method.input.required_fields %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 5f5c46660da0..c8491b5acb27 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -338,12 +338,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): body = {% if body_spec == '*' -%} {{method.input.ident}}.to_json( {{method.input.ident}}(transcoded_request['body']), - {%- else -%} + {% else -%} {{method.input.fields[body_spec].type.ident}}.to_json( {{method.input.fields[body_spec].type.ident}}(transcoded_request['body']), - {%- endif %}{# body_spec == "*" #} + {% endif %}{# body_spec == "*" #} including_default_value_fields=False, - use_integers_for_enums=False + use_integers_for_enums={{ opts.rest_numeric_enums }} ) {%- endif %}{# body_spec #} @@ -354,7 +354,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): query_params = json.loads({{method.input.ident}}.to_json( {{method.input.ident}}(transcoded_request['query_params']), including_default_value_fields=False, - use_integers_for_enums=False + use_integers_for_enums={{ opts.rest_numeric_enums }} )) {% if method.input.required_fields %} From b8beedf7478c6668b8732763a1af67dc015e2de2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 15 Aug 2022 15:54:33 +0200 Subject: [PATCH 0866/1339] chore(deps): update dependency setuptools to v65 (#1409) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 09d0abbc8928..b004a4640233 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==64.0.2 +setuptools==65.0.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From 5ed10ed51ccfb4743aa41b36767d8ef7dcd90983 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Aug 2022 15:55:46 +0200 Subject: [PATCH 0867/1339] chore(deps): update dependency setuptools to v65.0.1 (#1410) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b004a4640233..f00556895f30 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==65.0.0 +setuptools==65.0.1 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From 8011c967a43d7de8c4430eddc1a606139977b9e0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 17 Aug 2022 16:23:29 +0200 Subject: [PATCH 0868/1339] chore(deps): update dependency setuptools to v65.0.2 (#1416) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f00556895f30..e44006ceebec 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==65.0.1 +setuptools==65.0.2 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From eec5d5e0e4ad11e5767e7ee48fe55ff29d3e8162 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Wed, 17 Aug 2022 10:47:05 -0700 Subject: [PATCH 0869/1339] feat: Add BUILD rule parameter to allow setting numeric enums (#1411) This allows generator users to set the numeric-enum-response parameter at generation time via the BUILD rule. The default is `False` for backward compatibility. --- .../gapic-generator/docs/getting-started/bazel.rst | 10 ++++++++++ .../gapic-generator/rules_python_gapic/py_gapic.bzl | 4 ++++ 2 files changed, 14 insertions(+) diff --git a/packages/gapic-generator/docs/getting-started/bazel.rst b/packages/gapic-generator/docs/getting-started/bazel.rst index 1e67bfa045c4..21af6d43fb42 100644 --- a/packages/gapic-generator/docs/getting-started/bazel.rst +++ b/packages/gapic-generator/docs/getting-started/bazel.rst @@ -94,6 +94,16 @@ your ``BUILD.bazel`` file. You can use the Python section of the Document AI ":documentai_py_gapic", ], ) + +Some of the ``py_gapic_library`` rule that may be of interest: + * ``transport``: the desired transport(s) to generate, delimited by ``+`` e.g. ``grpc+rest``. + - Acceptable values are ``grpc`` and ``rest``. + - Defaults to ``grpc``. + + * ``rest_numeric_enums``: if ``True``, enables generation of system parameter requesting response enums be encoded as numbers. + - Default is ``False``. + - Only effective when ``rest`` is included as a ``transport`` to be generated. + .. _BUILD.bazel: https://github.com/googleapis/googleapis/blob/master/google/cloud/documentai/v1beta2/BUILD.bazel Compiling an API diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index 1799513f9225..cf5e225c65bf 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -43,6 +43,7 @@ def py_gapic_library( metadata = True, service_yaml = None, transport = None, + rest_numeric_enums = False, deps = [], **kwargs): srcjar_target_name = "%s_srcjar" % name @@ -63,6 +64,9 @@ def py_gapic_library( if transport: opt_args = opt_args + ["transport=%s" % transport] + if rest_numeric_enums: + opt_args = opt_args + ["rest-numeric-enums"] + proto_custom_library( name = srcjar_target_name, deps = srcs, From ed5dff829c161fa34d8d07271d8308a47d712491 Mon Sep 17 00:00:00 2001 From: Travis Cline Date: Wed, 17 Aug 2022 12:35:33 -0700 Subject: [PATCH 0870/1339] feat(docker-entrypoint): Add --experimental_allow_proto3_optional (#1414) --- packages/gapic-generator/docker-entrypoint.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/gapic-generator/docker-entrypoint.sh b/packages/gapic-generator/docker-entrypoint.sh index b6737d7c8a71..944ab9297ca6 100755 --- a/packages/gapic-generator/docker-entrypoint.sh +++ b/packages/gapic-generator/docker-entrypoint.sh @@ -44,6 +44,7 @@ while [ -n "$1" ]; do done protoc --proto_path=/protos/ --proto_path=/in/ \ + --experimental_allow_proto3_optional \ --python_gapic_out=/out/ \ --python_gapic_opt=${PLUGIN_OPTIONS:1} \ `find /in/ -name *.proto` From 6afc90f427970306db5c837de4c66908b3a5e52f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Aug 2022 18:31:22 +0200 Subject: [PATCH 0871/1339] chore(deps): update dependency setuptools to v65.1.0 (#1418) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e44006ceebec..58d0487a8879 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==65.0.2 +setuptools==65.1.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From 56fcb5f3bd6f2fa0bfaf48a5e1910d4642424da9 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 22 Aug 2022 15:09:59 -0700 Subject: [PATCH 0872/1339] fix: partial rollback of https://github.com/googleapis/gapic-generator-python/pull/1389/commits/73b1373e4cc4a7ce05e7abf726521c58c2fb881e (#1420) The original commit changed more that it should have. Only initialization of `__REQUIRED_FIELDS_DEFAULT_VALUE` should have been affected. --- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 10 +++++----- .../unit/gapic/%name_%version/%sub/test_macros.j2 | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index e64db39bdc99..76667e207bbb 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1109,7 +1109,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide )) # verify fields with default values are dropped - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" not in jsonified_request {% endfor %} @@ -1118,13 +1118,13 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request.update(unset_fields) # verify required fields with default values are now present - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" in jsonified_request assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] {% endfor %} - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} {% set mock_value = req_field.primitive_mock_as_str() %} {% if method.query_params %} @@ -1143,7 +1143,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} {% set mock_value = req_field.primitive_mock_as_str() %} assert "{{ field_name }}" in jsonified_request @@ -1208,7 +1208,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %} expected_params = [ - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} ( "{{ req_field.name | camel_case }}", {% if req_field.field_pb.type == 9 %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index b2e849568773..2a7c87ecdf53 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -973,7 +973,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide )) # verify fields with default values are dropped - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" not in jsonified_request {% endfor %} @@ -982,7 +982,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request.update(unset_fields) # verify required fields with default values are now present - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} {% set field_name = req_field.name | camel_case %} assert "{{ field_name }}" in jsonified_request assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] @@ -1067,7 +1067,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %} expected_params = [ - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} ( "{{ req_field.name | camel_case }}", {% if req_field.field_pb.type == 9 %} From 5e67ede3fd20d5ffeee46e0d272806bea3526f1f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 23 Aug 2022 16:24:39 +0200 Subject: [PATCH 0873/1339] chore(deps): update dependency setuptools to v65.2.0 (#1419) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 58d0487a8879..69e749db886a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==65.1.0 +setuptools==65.2.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From fc54f08d652fce912bae3bf9264811cd668a45cb Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 23 Aug 2022 09:06:41 -0700 Subject: [PATCH 0874/1339] fix: Fix REST tests generation for repeated enums (#1421) Co-authored-by: Anthonios Partheniou --- .../gapic-generator/gapic/schema/wrappers.py | 6 +++- .../tests/unit/schema/wrappers/test_field.py | 28 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 0ac92bd8fcae..d66fe03a48b8 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -137,7 +137,11 @@ def recursive_mock_original_type(field): if field.enum: # First Truthy value, fallback to the first value - return next((v for v in field.type.values if v.number), field.type.values[0]).number + answer = next( + (v for v in field.type.values if v.number), field.type.values[0]).number + if field.repeated: + answer = [answer] + return answer answer = field.primitive_mock() or None diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index e580d43e4afe..d89bdf66ac79 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -463,6 +463,34 @@ def test_mock_value_original_type_enum(): assert empty_field.mock_value_original_type == 0 +def test_mock_value_original_type_enum_repeated(): + mollusc_field = make_field( + name="class", + enum=make_enum( + name="Class", + values=[ + ("UNKNOWN", 0), + ("BIVALVE", 2), + ("CEPHALOPOD", 3), + ], + ), + label=3, + ) + + assert mollusc_field.mock_value_original_type == [2] + + empty_field = make_field( + name="empty", + enum=make_enum( + name="Empty", + values=[("UNKNOWN", 0)], + ), + label=3, + ) + + assert empty_field.mock_value_original_type == [0] + + @pytest.mark.parametrize( "mock_method,expected", [ From b1f351bceed2648ef3bb5cd5f8b074a107d1b1e3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 24 Aug 2022 16:54:01 +0200 Subject: [PATCH 0875/1339] chore(deps): update dependency setuptools to v65.3.0 (#1422) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 69e749db886a..728dfb300ad4 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -7,7 +7,7 @@ protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped -setuptools==65.2.0 +setuptools==65.3.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.0 pytest-asyncio==0.19.0 \ No newline at end of file From 49231372c4802e3f102cc4ca2a70acbe4591b526 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Wed, 24 Aug 2022 16:36:22 -0700 Subject: [PATCH 0876/1339] feat: Make REST unit tests support numeric enums (#1423) Support numeric enums in generated unit tests: default templates and Ads templates --- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 5 ++++- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 7 +++++-- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 76667e207bbb..9563ed4fa478 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1217,7 +1217,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}, {% endif %}{# default is str #} ), - {% endfor %} + {% endfor %} + {% if opts.rest_numeric_enums %} + ('$alt', 'json;enum-encoding=int') + {% endif %} ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 4d22b4827710..1d58164908c3 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -628,10 +628,10 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% for method in service.methods.values() if 'rest' in opts.transport %} {% if method.extended_lro %} -{{ test_macros.rest_required_tests(method, service, full_extended_lro=True) }} +{{ test_macros.rest_required_tests(method, service, numeric_enums=opts.rest_numeric_enums, full_extended_lro=True) }} {% endif %} -{{ test_macros.rest_required_tests(method, service) }} +{{ test_macros.rest_required_tests(method, service, numeric_enums=opts.rest_numeric_enums) }} {% endfor -%} {#- method in methods for rest #} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 2a7c87ecdf53..9158aea9fafb 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -833,7 +833,7 @@ def test_{{ method_name }}_raw_page_lro(): {% endif %}{# method.paged_result_field #}{% endwith %} {% endmacro %} -{% macro rest_required_tests(method, service, full_extended_lro=False) %} +{% macro rest_required_tests(method, service, numeric_enums=False, full_extended_lro=False) %} {% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} {% if not method.client_streaming %} @@ -1076,7 +1076,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}, {% endif %}{# default is str #} ), - {% endfor %} + {% endfor %} + {% if numeric_enums %} + ('$alt', 'json;enum-encoding=int') + {% endif %} ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params From aec3f798d8667ec7536844b2d63cd23b5a44d094 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 31 Aug 2022 15:38:35 +0200 Subject: [PATCH 0877/1339] chore(deps): update dependency proto-plus to v1.22.1 (#1425) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 728dfb300ad4..324196de441d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -9,5 +9,5 @@ PyYAML==6.0 dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==65.3.0 grpc-google-iam-v1==0.12.4 -proto-plus==1.22.0 +proto-plus==1.22.1 pytest-asyncio==0.19.0 \ No newline at end of file From a51456c9d4275b78fb22863be47efc5bbf8bac25 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 2 Sep 2022 13:12:14 +0200 Subject: [PATCH 0878/1339] chore(deps): update dependency google-api-core to v2.10.0 (#1429) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 324196de441d..b46a320c9cc6 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.3 -google-api-core==2.8.2 +google-api-core==2.10.0 googleapis-common-protos==1.56.4 jinja2==3.1.2 MarkupSafe==2.1.1 From c687e2c4e87a374a481153b2d1628e175dd3e71f Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 6 Sep 2022 09:00:35 -0700 Subject: [PATCH 0879/1339] chore: Remove Python 3.6 support (#1430) * chore: Remove Python 3.6 support * fix 3.7 test fragment_alternative_templates test --- .../.github/sync-repo-settings.yaml | 4 ---- .../gapic-generator/.github/workflows/tests.yaml | 14 +++----------- packages/gapic-generator/.readthedocs.yml | 2 +- packages/gapic-generator/BUILD.bazel | 13 +++---------- .../gapic-generator/docs/getting-started/index.rst | 2 +- .../gapic/ads-templates/mypy.ini.j2 | 2 +- .../gapic-generator/gapic/templates/mypy.ini.j2 | 2 +- .../gapic-generator/gapic/templates/noxfile.py.j2 | 1 - .../gapic-generator/gapic/templates/setup.py.j2 | 3 +-- packages/gapic-generator/mypy.ini | 2 +- packages/gapic-generator/noxfile.py | 7 +++---- packages/gapic-generator/requirements.txt | 1 - packages/gapic-generator/setup.py | 3 +-- .../tests/integration/goldens/asset/mypy.ini | 2 +- .../tests/integration/goldens/asset/noxfile.py | 1 - .../tests/integration/goldens/asset/setup.py | 3 +-- .../tests/integration/goldens/credentials/mypy.ini | 2 +- .../integration/goldens/credentials/noxfile.py | 1 - .../tests/integration/goldens/credentials/setup.py | 3 +-- .../tests/integration/goldens/eventarc/mypy.ini | 2 +- .../tests/integration/goldens/eventarc/noxfile.py | 1 - .../tests/integration/goldens/eventarc/setup.py | 3 +-- .../tests/integration/goldens/logging/mypy.ini | 2 +- .../tests/integration/goldens/logging/noxfile.py | 1 - .../tests/integration/goldens/logging/setup.py | 3 +-- .../tests/integration/goldens/redis/mypy.ini | 2 +- .../tests/integration/goldens/redis/noxfile.py | 1 - .../tests/integration/goldens/redis/setup.py | 3 +-- 28 files changed, 26 insertions(+), 60 deletions(-) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 30a3766f0417..7e3f77b03772 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -16,7 +16,6 @@ branchProtectionRules: # - 'showcase-mtls (showcase_mtls_alternative_templates)' - 'showcase-mypy' - 'showcase-mypy (_alternative_templates)' - - 'showcase-unit (3.6)' - 'showcase-unit (3.7)' - 'showcase-unit (3.8)' - 'showcase-unit (3.9)' @@ -29,7 +28,6 @@ branchProtectionRules: - 'showcase-unit (3.8, _alternative_templates_mixins)' - 'showcase-unit (3.9, _alternative_templates_mixins)' - 'showcase-unit (3.10, _alternative_templates_mixins)' - - 'showcase-unit (3.6, _mixins)' - 'showcase-unit (3.7, _mixins)' - 'showcase-unit (3.8, _mixins)' - 'showcase-unit (3.9, _mixins)' @@ -39,12 +37,10 @@ branchProtectionRules: - 'goldens-lint' - 'style-check' - 'snippetgen' - - 'unit (3.6)' - 'unit (3.7)' - 'unit (3.8)' - 'unit (3.9)' - 'unit (3.10)' - - 'fragment (3.6)' - 'fragment (3.7)' - 'fragment (3.8)' - 'fragment (3.9)' diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 29281780dd44..3791de537808 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -131,13 +131,8 @@ jobs: showcase-unit: strategy: matrix: - python: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python: ["3.7", "3.8", "3.9", "3.10"] variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins] - exclude: - - python: "3.6" - variant: _alternative_templates - - python: "3.6" - variant: _alternative_templates_mixins runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -241,7 +236,7 @@ jobs: unit: strategy: matrix: - python: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python: ["3.7", "3.8", "3.9", "3.10"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -262,11 +257,8 @@ jobs: fragment: strategy: matrix: - python: ["3.6", "3.7", "3.8", "3.9", "3.10"] + python: ["3.7", "3.8", "3.9", "3.10"] variant: ['', _alternative_templates] - exclude: - - python: "3.6" - variant: _alternative_templates runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 diff --git a/packages/gapic-generator/.readthedocs.yml b/packages/gapic-generator/.readthedocs.yml index ebea21ff0c06..96e0f2252b93 100644 --- a/packages/gapic-generator/.readthedocs.yml +++ b/packages/gapic-generator/.readthedocs.yml @@ -3,4 +3,4 @@ build: image: latest python: pip_install: true - version: 3.6 + version: 3.7 diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 80308f33f594..2a0a225e3316 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -27,11 +27,6 @@ pandoc_binary( name = "pandoc_binary", ) -config_setting( - name = "gapic_gen_python_3_6", - values = {"define": "gapic_gen_python=3.6"}, -) - py_runtime( name = "pyenv3_runtime", interpreter = ":pyenv3wrapper.sh", @@ -69,9 +64,7 @@ py_binary( requirement("MarkupSafe"), requirement("pypandoc"), requirement("PyYAML"), - requirement("grpc-google-iam-v1"), - ] + select({ - ":gapic_gen_python_3_6": [requirement("dataclasses")], - "//conditions:default": [], - }), + requirement("grpc-google-iam-v1"), + ] ) + diff --git a/packages/gapic-generator/docs/getting-started/index.rst b/packages/gapic-generator/docs/getting-started/index.rst index ec9149a582b7..4d4893613911 100644 --- a/packages/gapic-generator/docs/getting-started/index.rst +++ b/packages/gapic-generator/docs/getting-started/index.rst @@ -2,7 +2,7 @@ Getting Started --------------- This code generator is implemented as a plugin to ``protoc``, the compiler -for `protocol buffers`_, and will run in any environment that Python 3.6+ and +for `protocol buffers`_, and will run in any environment that Python 3.7+ and protocol buffers do. Because dependency management and such can be a significant undertaking, we diff --git a/packages/gapic-generator/gapic/ads-templates/mypy.ini.j2 b/packages/gapic-generator/gapic/ads-templates/mypy.ini.j2 index 4505b485436b..574c5aed394b 100644 --- a/packages/gapic-generator/gapic/ads-templates/mypy.ini.j2 +++ b/packages/gapic-generator/gapic/ads-templates/mypy.ini.j2 @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/gapic-generator/gapic/templates/mypy.ini.j2 b/packages/gapic-generator/gapic/templates/mypy.ini.j2 index 4505b485436b..574c5aed394b 100644 --- a/packages/gapic-generator/gapic/templates/mypy.ini.j2 +++ b/packages/gapic-generator/gapic/templates/mypy.ini.j2 @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index b241fd0e4de3..d66e18cb23b5 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -12,7 +12,6 @@ import sys import nox # type: ignore ALL_PYTHON = [ - "3.6", "3.7", "3.8", "3.9", diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 45bead1a4a7a..cd8901a8c8a5 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -47,12 +47,11 @@ setuptools.setup( 'google-cloud-documentai >= 1.2.1, < 2.0.0dev', {% endif %} ), - python_requires='>=3.6', + python_requires='>=3.7', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', diff --git a/packages/gapic-generator/mypy.ini b/packages/gapic-generator/mypy.ini index b3d7f6e18c49..78cfb8988b2e 100644 --- a/packages/gapic-generator/mypy.ini +++ b/packages/gapic-generator/mypy.ini @@ -1,2 +1,2 @@ [mypy] -python_version = 3.6 +python_version = 3.7 diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index c9fbed57f3e6..39f24d71fa69 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -34,7 +34,6 @@ ALL_PYTHON = ( - "3.6", "3.7", "3.8", "3.9", @@ -158,7 +157,7 @@ def fragment(session, use_ads_templates=False): session.log(tester(frag)) -@nox.session(python=ALL_PYTHON[1:]) +@nox.session(python=ALL_PYTHON) def fragment_alternative_templates(session): fragment(session, use_ads_templates=True) @@ -334,7 +333,7 @@ def showcase_unit( run_showcase_unit_tests(session) -@nox.session(python=ALL_PYTHON[1:]) # Do not test 3.6 +@nox.session(python=ALL_PYTHON) def showcase_unit_alternative_templates(session): with showcase_library( session, templates=ADS_TEMPLATES, other_opts=("old-naming",) @@ -357,7 +356,7 @@ def showcase_unit_mixins(session): run_showcase_unit_tests(session) -@nox.session(python=ALL_PYTHON[1:]) # Do not test 3.6 +@nox.session(python=ALL_PYTHON) def showcase_unit_alternative_templates_mixins(session): with showcase_library( session, templates=ADS_TEMPLATES, other_opts=("old-naming",), diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b46a320c9cc6..9c71d3d19d15 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,7 +6,6 @@ MarkupSafe==2.1.1 protobuf==3.20.1 pypandoc==1.8.1 PyYAML==6.0 -dataclasses==0.6 # TODO(busunkim) remove when 3.6 support is dropped setuptools==65.3.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 37dfd7934225..7e569d8cac99 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -64,7 +64,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -74,7 +73,7 @@ "Topic :: Software Development :: Libraries :: Python Modules", ], platforms="Posix; MacOS X", - python_requires=">=3.6", + python_requires=">=3.7", install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini b/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini index 4505b485436b..574c5aed394b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini +++ b/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index a75af25554cb..2a7c0060f67a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -23,7 +23,6 @@ import nox # type: ignore ALL_PYTHON = [ - "3.6", "3.7", "3.8", "3.9", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 385d78bdea1d..851b197bb3be 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -43,12 +43,11 @@ 'proto-plus >= 1.19.7', 'grpc-google-iam-v1 >= 0.12.4, < 0.13dev', ), - python_requires='>=3.6', + python_requires='>=3.7', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini b/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini index 4505b485436b..574c5aed394b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini +++ b/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index d27424901199..9a3857b8a77d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -23,7 +23,6 @@ import nox # type: ignore ALL_PYTHON = [ - "3.6", "3.7", "3.8", "3.9", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 056323e24bf8..f157ddec4193 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -42,12 +42,11 @@ 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', ), - python_requires='>=3.6', + python_requires='>=3.7', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini b/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini index 4505b485436b..574c5aed394b 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 41c88e77c529..3e8ab34def43 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -23,7 +23,6 @@ import nox # type: ignore ALL_PYTHON = [ - "3.6", "3.7", "3.8", "3.9", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 86fc0557d369..8fd2cec92612 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -42,12 +42,11 @@ 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', ), - python_requires='>=3.6', + python_requires='>=3.7', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini b/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini index 4505b485436b..574c5aed394b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini +++ b/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 77a2ce3f55f6..0d59e8201b42 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -23,7 +23,6 @@ import nox # type: ignore ALL_PYTHON = [ - "3.6", "3.7", "3.8", "3.9", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index c07123193eb1..7814ef83872b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -42,12 +42,11 @@ 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', ), - python_requires='>=3.6', + python_requires='>=3.7', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini b/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini index 4505b485436b..574c5aed394b 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini +++ b/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index b35bce4b0a2f..611bd142e6a9 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -23,7 +23,6 @@ import nox # type: ignore ALL_PYTHON = [ - "3.6", "3.7", "3.8", "3.9", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 28544f8e65fc..4e0bc16fafd4 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -42,12 +42,11 @@ 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', ), - python_requires='>=3.6', + python_requires='>=3.7', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', From 548a86a9a59a22403ccea7c58f7517921b9c2651 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 6 Sep 2022 09:25:47 -0700 Subject: [PATCH 0880/1339] fix: Fix remaining REST transport issues (#1428) * fix: Fix remaining REST transport issues This includes: 1) Use message-based transcoding logic instead of dictionary-based. This fixes multiple bugs caused by dict-json-object representation mismatches fro protobuf objects. This also removes a lot of `json->dict->object->proto-plus` object conversion overhead, which was slow and very error-prone. Now all transcodding and rest-transport-specific logic is executed in terms of native protobuf (`_pb`) stubs objects. No dicts, no proto-plus, no unnecessary conversions. 2) Fix for non-proto-plus objects in rest.py transport and in tests. This essential for APIs which have IAM methods defined on their surface. 3) Fix support for Any field in tests 4) Fix creation of mock_values (do `mock_value1` instead of `mock_value_1`, since `mock_value_1` causes issues for FieldMask mocks * update setup template to depend on latest api-core Co-authored-by: Anthonios Partheniou --- .../gapic-generator/gapic/schema/metadata.py | 6 +- .../gapic-generator/gapic/schema/wrappers.py | 28 ++++-- .../services/%service/transports/rest.py.j2 | 40 +++++---- .../gapic/templates/setup.py.j2 | 4 +- .../%name_%version/%sub/test_%service.py.j2 | 4 +- .../gapic/%name_%version/%sub/test_macros.j2 | 88 ++++++++++++++----- .../tests/integration/goldens/asset/setup.py | 2 +- .../services/iam_credentials/async_client.py | 2 +- .../services/iam_credentials/client.py | 2 +- ...credentials_generate_access_token_async.py | 2 +- ..._credentials_generate_access_token_sync.py | 2 +- .../integration/goldens/credentials/setup.py | 2 +- .../integration/goldens/eventarc/setup.py | 2 +- .../logging_service_v2/async_client.py | 4 +- .../services/logging_service_v2/client.py | 4 +- ...gging_service_v2_list_log_entries_async.py | 2 +- ...ogging_service_v2_list_log_entries_sync.py | 2 +- ...gging_service_v2_tail_log_entries_async.py | 2 +- ...ogging_service_v2_tail_log_entries_sync.py | 2 +- .../integration/goldens/logging/setup.py | 2 +- .../tests/integration/goldens/redis/setup.py | 2 +- .../tests/unit/schema/wrappers/test_field.py | 39 +++++++- 22 files changed, 168 insertions(+), 75 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 5bd23e1b522d..d53c002e6779 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -92,7 +92,7 @@ def __str__(self) -> str: # This module is from a different proto package # Most commonly happens for a common proto # https://pypi.org/project/googleapis-common-protos/ - if not self.proto_package.startswith(self.api_naming.proto_package): + if self.is_external_type: module_name = f'{self.module}_pb2' # Return the dot-separated Python identifier. @@ -102,6 +102,10 @@ def __str__(self) -> str: # Return the Python identifier. return '.'.join(self.parent + (self.name,)) + @property + def is_external_type(self): + return not self.proto_package.startswith(self.api_naming.proto_package) + @cached_property def __cached_string_repr(self): return "({})".format( diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index d66fe03a48b8..87a5b4598096 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -128,10 +128,19 @@ def recursive_mock_original_type(field): # Not worth the hassle, just return an empty map. return {} - msg_dict = { - f.name: recursive_mock_original_type(f) - for f in field.message.fields.values() - } + adr = field.type.meta.address + if adr.name == "Any" and adr.package == ("google", "protobuf"): + # If it is Any type pack a random but validly encoded type, + # Duration in this specific case. + msg_dict = { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b'\x08\x0c\x10\xdb\x07', + } + else: + msg_dict = { + f.name: recursive_mock_original_type(f) + for f in field.message.fields.values() + } return [msg_dict] if field.repeated else msg_dict @@ -237,9 +246,16 @@ def primitive_mock(self, suffix: int = 0) -> Union[bool, str, bytes, int, float, if self.type.python_type == bool: answer = True elif self.type.python_type == str: - answer = f"{self.name}_value_{suffix}" if suffix else f"{self.name}_value" + if self.name == "type_url": + # It is most likely a mock for Any type. We don't really care + # which mock value to put, so lets put a value which makes + # Any deserializer happy, which will wtill work even if it + # is not Any. + answer = "type.googleapis.com/google.protobuf.Empty" + else: + answer = f"{self.name}_value{suffix}" if suffix else f"{self.name}_value" elif self.type.python_type == bytes: - answer_str = f"{self.name}_blob_{suffix}" if suffix else f"{self.name}_blob" + answer_str = f"{self.name}_blob{suffix}" if suffix else f"{self.name}_blob" answer = bytes(answer_str, encoding="utf-8") elif self.type.python_type == int: answer = sum([ord(i) for i in self.name]) + suffix diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index c8491b5acb27..9f6489af27d6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -15,9 +15,9 @@ from google.api_core import rest_streaming from google.api_core import path_template from google.api_core import gapic_v1 +from google.protobuf import json_format {% if service.has_lro %} from google.api_core import operations_v1 -from google.protobuf import json_format {% endif %} from requests import __version__ as requests_version import dataclasses @@ -328,20 +328,19 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %}{# rule in method.http_options #} ] request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) - request_kwargs = {{method.input.ident}}.to_dict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + {% if method.input.ident.is_external_type %} + pb_request = request + {% else %} + pb_request = {{method.input.ident}}.pb(request) + {% endif %} + transcoded_request = path_template.transcode(http_options, pb_request) {% set body_spec = method.http_options[0].body %} {%- if body_spec %} # Jsonify the request body - body = {% if body_spec == '*' -%} - {{method.input.ident}}.to_json( - {{method.input.ident}}(transcoded_request['body']), - {% else -%} - {{method.input.fields[body_spec].type.ident}}.to_json( - {{method.input.fields[body_spec].type.ident}}(transcoded_request['body']), - {% endif %}{# body_spec == "*" #} + + body = json_format.MessageToJson( + transcoded_request['body'], including_default_value_fields=False, use_integers_for_enums={{ opts.rest_numeric_enums }} ) @@ -351,12 +350,11 @@ class {{service.name}}RestTransport({{service.name}}Transport): method = transcoded_request['method'] # Jsonify the query params - query_params = json.loads({{method.input.ident}}.to_json( - {{method.input.ident}}(transcoded_request['query_params']), + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], including_default_value_fields=False, - use_integers_for_enums={{ opts.rest_numeric_enums }} + use_integers_for_enums={{ opts.rest_numeric_enums }}, )) - {% if method.input.required_fields %} query_params.update(self._get_unset_required_fields(query_params)) {% endif %}{# required fields #} @@ -391,10 +389,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% elif method.server_streaming %} resp = rest_streaming.ResponseIterator(response, {{method.output.ident}}) {% else %} - resp = {{method.output.ident}}.from_json( - response.content, - ignore_unknown_fields=True - ) + resp = {{method.output.ident}}() + {% if method.output.ident.is_external_type %} + pb_resp = resp + {% else %} + pb_resp = {{method.output.ident}}.pb(resp) + {% endif %} + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) {% endif %}{# method.lro #} resp = self._interceptor.post_{{ method.name|snake_case }}(resp) return resp diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index cd8901a8c8a5..7e1567f303c5 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -32,9 +32,9 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', {% else %} - 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', {% endif %} 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 1d58164908c3..3e166c029c7b 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -15,6 +15,7 @@ import grpc from grpc.experimental import aio {% if "rest" in opts.transport %} from collections.abc import Iterable +from google.protobuf import json_format import json {% endif %} import math @@ -51,9 +52,6 @@ from google.api_core import future from google.api_core import operation from google.api_core import operations_v1 from google.longrunning import operations_pb2 -{% if "rest" in opts.transport %} -from google.protobuf import json_format -{% endif %}{# rest transport #} {% endif %}{# lro #} {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 9158aea9fafb..532070be3bb8 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -855,7 +855,7 @@ def test_{{ method_name }}_rest(request_type): request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} {% endif %} {% endfor %} - request = request_type(request_init) + request = request_type(**request_init) {% if method.client_streaming %} requests = [request] {% endif %} @@ -902,11 +902,19 @@ def test_{{ method_name }}_rest(request_type): json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) + {% if method.output.ident.is_external_type %} + pb_return_value = return_value + {% else %} + pb_return_value = {{ method.output.ident }}.pb(return_value) {% endif %} + json_return_value = json_format.MessageToJson(pb_return_value) + {% endif %} + + {% if method.server_streaming %} + json_return_value = "[{}]".format(json_return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value {% if method.client_streaming %} @@ -965,12 +973,17 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide request_init["{{ req_field.name }}"] = {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }} {% endif %}{# default is str #} {% endfor %} - request = request_type(request_init) - jsonified_request = json.loads(request_type.to_json( - request, + request = request_type(**request_init) + {% if method.input.ident.is_external_type %} + pb_request = request + {% else %} + pb_request = request_type.pb(request) + {% endif %} + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, including_default_value_fields=False, use_integers_for_enums=False - )) + )) # verify fields with default values are dropped {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} @@ -978,7 +991,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide assert "{{ field_name }}" not in jsonified_request {% endfor %} - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -994,7 +1007,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request["{{ field_name }}"] = {{ mock_value }} {% endfor %} - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) {% if method.query_params %} # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", {% endfor %})) @@ -1014,7 +1027,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) - request = request_type(request_init) + request = request_type(**request_init) # Designate an appropriate value for the returned response. {% if method.void %} @@ -1032,13 +1045,18 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide with mock.patch.object(path_template, 'transcode') as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. + {% if method.input.ident.is_external_type %} + pb_request = request + {% else %} + pb_request = request_type.pb(request) + {% endif %} transcode_result = { 'uri': 'v1/sample_method', 'method': "{{ method.http_options[0].method }}", - 'query_params': request_init, + 'query_params': pb_request, } {% if method.http_options[0].body %} - transcode_result['body'] = {} + transcode_result['body'] = pb_request {% endif %} transcode.return_value = transcode_result @@ -1048,11 +1066,19 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) + + {% if method.output.ident.is_external_type %} + pb_return_value = return_value + {% else %} + pb_return_value = {{ method.output.ident }}.pb(return_value) + {% endif %} + json_return_value = json_format.MessageToJson(pb_return_value) + {% endif %} + {% if method.server_streaming %} + json_return_value = "[{}]".format(json_return_value) {% endif %} + response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -1115,8 +1141,17 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): {% if not method.void %} post.assert_not_called() {% endif %} - - transcode.return_value = {"method": "post", "uri": "my_uri", "body": None, "query_params": {},} + {% if method.input.ident.is_external_type %} + pb_message = {{ method.input.ident }}() + {% else %} + pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) + {% endif %} + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } req.return_value = Response() req.return_value.status_code = 200 @@ -1164,7 +1199,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} {% endif %} {% endfor %} - request = request_type(request_init) + request = request_type(**request_init) {% if method.client_streaming %} requests = [request] {% endif %} @@ -1222,12 +1257,17 @@ def test_{{ method_name }}_rest_flattened(): json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) + {% if method.output.ident.is_external_type %} + pb_return_value = return_value + {% else %} + pb_return_value = {{ method.output.ident }}.pb(return_value) + {% endif %} + json_return_value = json_format.MessageToJson(pb_return_value) + {% endif %} + {% if method.server_streaming %} + json_return_value = "[{}]".format(json_return_value) {% endif %} - response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -1342,7 +1382,7 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): {% if method.server_streaming %} - response_val = "[{}]".format({{ method.output.ident }}.to_json(response_val)) + response_val = "[{}]".format(response_val) {% endif %} return_val._content = response_val.encode('UTF-8') return_val.status_code = 200 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 851b197bb3be..40a197078af1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 8306403e4d65..7c75fb16f8e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -219,7 +219,7 @@ async def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( name="name_value", - scope=['scope_value_1', 'scope_value_2'], + scope=['scope_value1', 'scope_value2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 70ec15e547c4..f94c44d21a9f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -407,7 +407,7 @@ def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( name="name_value", - scope=['scope_value_1', 'scope_value_2'], + scope=['scope_value1', 'scope_value2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py index d2fb32cff1e6..825f6c072c35 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py @@ -34,7 +34,7 @@ async def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( name="name_value", - scope=['scope_value_1', 'scope_value_2'], + scope=['scope_value1', 'scope_value2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py index 93e49f178cfd..16be47cf95a7 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py @@ -34,7 +34,7 @@ def sample_generate_access_token(): # Initialize request argument(s) request = credentials_v1.GenerateAccessTokenRequest( name="name_value", - scope=['scope_value_1', 'scope_value_2'], + scope=['scope_value1', 'scope_value2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index f157ddec4193..817c1a038dc8 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 8fd2cec92612..903b2b6e08e9 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 4948fbc3cdaf..f74115ea9a2d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -506,7 +506,7 @@ async def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # Make the request @@ -867,7 +867,7 @@ async def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index bb4155398c36..c386f0920f42 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -677,7 +677,7 @@ def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # Make the request @@ -1015,7 +1015,7 @@ def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index 4a8692b04e47..fa7ad59dbac4 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -33,7 +33,7 @@ async def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index 062075af9091..29d1dc7d13fd 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -33,7 +33,7 @@ def sample_list_log_entries(): # Initialize request argument(s) request = logging_v2.ListLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # Make the request diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 3e77920f81e1..7a937948cd0d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -33,7 +33,7 @@ async def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index ee1108b33f02..a38e7b14d389 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -33,7 +33,7 @@ def sample_tail_log_entries(): # Initialize request argument(s) request = logging_v2.TailLogEntriesRequest( - resource_names=['resource_names_value_1', 'resource_names_value_2'], + resource_names=['resource_names_value1', 'resource_names_value2'], ) # This method expects an iterator which contains diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 7814ef83872b..0ca8e0943cd6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 4e0bc16fafd4..6fbd9aacb19d 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -37,7 +37,7 @@ platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - 'google-api-core[grpc] >= 2.8.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index d89bdf66ac79..f23a2c9d8571 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -283,7 +283,7 @@ def test_mock_value_repeated(): def test_mock_value_original_type_repeated(): field = make_field(name='foo_bar', type='TYPE_STRING', label=3) assert field.mock_value_original_type == [ - "foo_bar_value_1", "foo_bar_value_2"] + "foo_bar_value1", "foo_bar_value2"] def test_mock_value_map(): @@ -353,10 +353,38 @@ def test_mock_value_message(): def test_mock_value_original_type_message(): + any_message_subfields = collections.OrderedDict(( + ('type_url', make_field(name='type_url', number=1, type='TYPE_STRING')), + ('value', make_field(name='value', number=2, type='TYPE_BYTES')), + )) + + any_message = wrappers.MessageType( + fields=any_message_subfields, + message_pb=descriptor_pb2.DescriptorProto(name='Any', field=[ + i.field_pb for i in any_message_subfields.values() + ]), + meta=metadata.Metadata(address=metadata.Address( + module='bogus', + name='Any', + package=('google', 'protobuf') + )), + nested_enums={}, + nested_messages={}, + ) + + any_field = make_field( + name='surprise', + type='TYPE_MESSAGE', + type_name='google.protobuf.Any', + message=any_message + ) + subfields = collections.OrderedDict(( ('foo', make_field(name='foo', type='TYPE_INT32')), - ('bar', make_field(name='bar', type='TYPE_STRING')) + ('bar', make_field(name='bar', type='TYPE_STRING')), + ('surprise', any_field), )) + message = wrappers.MessageType( fields=subfields, message_pb=descriptor_pb2.DescriptorProto(name='Message', field=[ @@ -378,7 +406,9 @@ def test_mock_value_original_type_message(): mock = field.mock_value_original_type - assert mock == {"foo": 324, "bar": "bar_value"} + assert mock == {"foo": 324, "bar": "bar_value", "surprise": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07"}} # Messages by definition aren't primitive with pytest.raises(TypeError): @@ -403,6 +433,9 @@ def test_mock_value_original_type_message(): assert entry_field.mock_value_original_type == {} + assert any_message.fields['type_url'].primitive_mock( + ) == "type.googleapis.com/google.protobuf.Empty" + def test_merged_mock_value_message(): subfields = collections.OrderedDict(( From 1de990a03edfd89de719eead1bca3a9fe7e54f87 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Tue, 6 Sep 2022 11:23:32 -0700 Subject: [PATCH 0881/1339] feat: add generated sample comment (#1417) * add generated sample comment * update golden files * update tests/unit/samplegen/golden_snippets * update tests/snippetgen/goldens * update tests/integration/goldens/eventarc/samples/generated_samples * update golden clients and metadata Co-authored-by: Anthonios Partheniou --- .../examples/_generated_sample_comment.j2 | 7 + .../gapic/templates/examples/sample.py.j2 | 2 + .../services/asset_service/async_client.py | 84 ++ .../asset_v1/services/asset_service/client.py | 84 ++ ..._asset_service_analyze_iam_policy_async.py | 7 + ...ce_analyze_iam_policy_longrunning_async.py | 7 + ...ice_analyze_iam_policy_longrunning_sync.py | 7 + ...d_asset_service_analyze_iam_policy_sync.py | 7 + ..._service_batch_get_assets_history_async.py | 7 + ...t_service_batch_get_assets_history_sync.py | 7 + ...nerated_asset_service_create_feed_async.py | 7 + ...enerated_asset_service_create_feed_sync.py | 7 + ...nerated_asset_service_delete_feed_async.py | 7 + ...enerated_asset_service_delete_feed_sync.py | 7 + ...rated_asset_service_export_assets_async.py | 7 + ...erated_asset_service_export_assets_sync.py | 7 + ..._generated_asset_service_get_feed_async.py | 7 + ...1_generated_asset_service_get_feed_sync.py | 7 + ...nerated_asset_service_list_assets_async.py | 7 + ...enerated_asset_service_list_assets_sync.py | 7 + ...enerated_asset_service_list_feeds_async.py | 7 + ...generated_asset_service_list_feeds_sync.py | 7 + ...t_service_search_all_iam_policies_async.py | 7 + ...et_service_search_all_iam_policies_sync.py | 7 + ...sset_service_search_all_resources_async.py | 7 + ...asset_service_search_all_resources_sync.py | 7 + ...nerated_asset_service_update_feed_async.py | 7 + ...enerated_asset_service_update_feed_sync.py | 7 + .../snippet_metadata_asset_v1.json | 472 +++--- .../services/iam_credentials/async_client.py | 28 + .../services/iam_credentials/client.py | 28 + ...credentials_generate_access_token_async.py | 7 + ..._credentials_generate_access_token_sync.py | 7 + ...iam_credentials_generate_id_token_async.py | 7 + ..._iam_credentials_generate_id_token_sync.py | 7 + ...nerated_iam_credentials_sign_blob_async.py | 7 + ...enerated_iam_credentials_sign_blob_sync.py | 7 + ...enerated_iam_credentials_sign_jwt_async.py | 7 + ...generated_iam_credentials_sign_jwt_sync.py | 7 + .../snippet_metadata_credentials_v1.json | 160 +- .../services/eventarc/async_client.py | 35 + .../eventarc_v1/services/eventarc/client.py | 35 + ...generated_eventarc_create_trigger_async.py | 7 + ..._generated_eventarc_create_trigger_sync.py | 7 + ...generated_eventarc_delete_trigger_async.py | 7 + ..._generated_eventarc_delete_trigger_sync.py | 7 + ...v1_generated_eventarc_get_trigger_async.py | 7 + ..._v1_generated_eventarc_get_trigger_sync.py | 7 + ..._generated_eventarc_list_triggers_async.py | 7 + ...1_generated_eventarc_list_triggers_sync.py | 7 + ...generated_eventarc_update_trigger_async.py | 7 + ..._generated_eventarc_update_trigger_sync.py | 7 + .../snippet_metadata_eventarc_v1.json | 200 +-- .../config_service_v2/async_client.py | 161 ++ .../services/config_service_v2/client.py | 161 ++ .../logging_service_v2/async_client.py | 42 + .../services/logging_service_v2/client.py | 42 + .../metrics_service_v2/async_client.py | 35 + .../services/metrics_service_v2/client.py | 35 + ...d_config_service_v2_create_bucket_async.py | 7 + ...ed_config_service_v2_create_bucket_sync.py | 7 + ...onfig_service_v2_create_exclusion_async.py | 7 + ...config_service_v2_create_exclusion_sync.py | 7 + ...ted_config_service_v2_create_sink_async.py | 7 + ...ated_config_service_v2_create_sink_sync.py | 7 + ...ted_config_service_v2_create_view_async.py | 7 + ...ated_config_service_v2_create_view_sync.py | 7 + ...d_config_service_v2_delete_bucket_async.py | 7 + ...ed_config_service_v2_delete_bucket_sync.py | 7 + ...onfig_service_v2_delete_exclusion_async.py | 7 + ...config_service_v2_delete_exclusion_sync.py | 7 + ...ted_config_service_v2_delete_sink_async.py | 7 + ...ated_config_service_v2_delete_sink_sync.py | 7 + ...ted_config_service_v2_delete_view_async.py | 7 + ...ated_config_service_v2_delete_view_sync.py | 7 + ...ated_config_service_v2_get_bucket_async.py | 7 + ...rated_config_service_v2_get_bucket_sync.py | 7 + ...nfig_service_v2_get_cmek_settings_async.py | 7 + ...onfig_service_v2_get_cmek_settings_sync.py | 7 + ...d_config_service_v2_get_exclusion_async.py | 7 + ...ed_config_service_v2_get_exclusion_sync.py | 7 + ...erated_config_service_v2_get_sink_async.py | 7 + ...nerated_config_service_v2_get_sink_sync.py | 7 + ...erated_config_service_v2_get_view_async.py | 7 + ...nerated_config_service_v2_get_view_sync.py | 7 + ...ed_config_service_v2_list_buckets_async.py | 7 + ...ted_config_service_v2_list_buckets_sync.py | 7 + ...config_service_v2_list_exclusions_async.py | 7 + ..._config_service_v2_list_exclusions_sync.py | 7 + ...ated_config_service_v2_list_sinks_async.py | 7 + ...rated_config_service_v2_list_sinks_sync.py | 7 + ...ated_config_service_v2_list_views_async.py | 7 + ...rated_config_service_v2_list_views_sync.py | 7 + ...config_service_v2_undelete_bucket_async.py | 7 + ..._config_service_v2_undelete_bucket_sync.py | 7 + ...d_config_service_v2_update_bucket_async.py | 7 + ...ed_config_service_v2_update_bucket_sync.py | 7 + ...g_service_v2_update_cmek_settings_async.py | 7 + ...ig_service_v2_update_cmek_settings_sync.py | 7 + ...onfig_service_v2_update_exclusion_async.py | 7 + ...config_service_v2_update_exclusion_sync.py | 7 + ...ted_config_service_v2_update_sink_async.py | 7 + ...ated_config_service_v2_update_sink_sync.py | 7 + ...ted_config_service_v2_update_view_async.py | 7 + ...ated_config_service_v2_update_view_sync.py | 7 + ...ted_logging_service_v2_delete_log_async.py | 7 + ...ated_logging_service_v2_delete_log_sync.py | 7 + ...gging_service_v2_list_log_entries_async.py | 7 + ...ogging_service_v2_list_log_entries_sync.py | 7 + ...ated_logging_service_v2_list_logs_async.py | 7 + ...rated_logging_service_v2_list_logs_sync.py | 7 + ...st_monitored_resource_descriptors_async.py | 7 + ...ist_monitored_resource_descriptors_sync.py | 7 + ...gging_service_v2_tail_log_entries_async.py | 7 + ...ogging_service_v2_tail_log_entries_sync.py | 7 + ...ging_service_v2_write_log_entries_async.py | 7 + ...gging_service_v2_write_log_entries_sync.py | 7 + ...rics_service_v2_create_log_metric_async.py | 7 + ...trics_service_v2_create_log_metric_sync.py | 7 + ...rics_service_v2_delete_log_metric_async.py | 7 + ...trics_service_v2_delete_log_metric_sync.py | 7 + ...metrics_service_v2_get_log_metric_async.py | 7 + ..._metrics_service_v2_get_log_metric_sync.py | 7 + ...trics_service_v2_list_log_metrics_async.py | 7 + ...etrics_service_v2_list_log_metrics_sync.py | 7 + ...rics_service_v2_update_log_metric_async.py | 7 + ...trics_service_v2_update_log_metric_sync.py | 7 + .../snippet_metadata_logging_v2.json | 1304 ++++++++--------- .../services/cloud_redis/async_client.py | 63 + .../redis_v1/services/cloud_redis/client.py | 63 + ...rated_cloud_redis_create_instance_async.py | 7 + ...erated_cloud_redis_create_instance_sync.py | 7 + ...rated_cloud_redis_delete_instance_async.py | 7 + ...erated_cloud_redis_delete_instance_sync.py | 7 + ...rated_cloud_redis_export_instance_async.py | 7 + ...erated_cloud_redis_export_instance_sync.py | 7 + ...ted_cloud_redis_failover_instance_async.py | 7 + ...ated_cloud_redis_failover_instance_sync.py | 7 + ...enerated_cloud_redis_get_instance_async.py | 7 + ...generated_cloud_redis_get_instance_sync.py | 7 + ...rated_cloud_redis_import_instance_async.py | 7 + ...erated_cloud_redis_import_instance_sync.py | 7 + ...erated_cloud_redis_list_instances_async.py | 7 + ...nerated_cloud_redis_list_instances_sync.py | 7 + ...rated_cloud_redis_update_instance_async.py | 7 + ...erated_cloud_redis_update_instance_sync.py | 7 + ...ated_cloud_redis_upgrade_instance_async.py | 7 + ...rated_cloud_redis_upgrade_instance_sync.py | 7 + .../snippet_metadata_redis_v1.json | 360 ++--- ...generated_snippets_list_resources_async.py | 7 + ..._generated_snippets_list_resources_sync.py | 7 + ...ed_snippets_method_bidi_streaming_async.py | 7 + ...ted_snippets_method_bidi_streaming_sync.py | 7 + ...ed_snippets_method_lro_signatures_async.py | 7 + ...ted_snippets_method_lro_signatures_sync.py | 7 + ...ted_snippets_method_one_signature_async.py | 7 + ...ated_snippets_method_one_signature_sync.py | 7 + ..._snippets_method_server_streaming_async.py | 7 + ...d_snippets_method_server_streaming_sync.py | 7 + ..._generated_snippets_one_of_method_async.py | 7 + ...pets_one_of_method_required_field_async.py | 7 + ...ppets_one_of_method_required_field_sync.py | 7 + ...1_generated_snippets_one_of_method_sync.py | 7 + .../samplegen/golden_snippets/sample_basic.py | 7 + .../golden_snippets/sample_basic_async.py | 7 + .../sample_basic_unflattenable.py | 7 + .../sample_basic_void_method.py | 7 + 167 files changed, 3175 insertions(+), 1248 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/examples/_generated_sample_comment.j2 mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json diff --git a/packages/gapic-generator/gapic/templates/examples/_generated_sample_comment.j2 b/packages/gapic-generator/gapic/templates/examples/_generated_sample_comment.j2 new file mode 100644 index 000000000000..9e926c4900db --- /dev/null +++ b/packages/gapic-generator/gapic/templates/examples/_generated_sample_comment.j2 @@ -0,0 +1,7 @@ +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index c6fb6a085fb5..1da8ecf6b2f9 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -23,6 +23,8 @@ {{ frags.sample_header(sample, calling_form) }} # [START {{ sample.id }}] +{% include "examples/_generated_sample_comment.j2" %} + {# python code is responsible for all transformations: all we do here is render #} {% for import_statement in imports %} {{ import_statement }} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py old mode 100644 new mode 100755 index 9fd39edd2f35..28f1cf2148ac --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -212,6 +212,13 @@ async def export_assets(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_export_assets(): @@ -308,6 +315,13 @@ async def list_assets(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_list_assets(): @@ -421,6 +435,13 @@ async def batch_get_assets_history(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_batch_get_assets_history(): @@ -502,6 +523,13 @@ async def create_feed(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_create_feed(): @@ -612,6 +640,13 @@ async def get_feed(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_get_feed(): @@ -720,6 +755,13 @@ async def list_feeds(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_list_feeds(): @@ -822,6 +864,13 @@ async def update_feed(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_update_feed(): @@ -926,6 +975,13 @@ async def delete_feed(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_delete_feed(): @@ -1021,6 +1077,13 @@ async def search_all_resources(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_search_all_resources(): @@ -1227,6 +1290,13 @@ async def search_all_iam_policies(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_search_all_iam_policies(): @@ -1409,6 +1479,13 @@ async def analyze_iam_policy(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_analyze_iam_policy(): @@ -1503,6 +1580,13 @@ async def analyze_iam_policy_longrunning(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 async def sample_analyze_iam_policy_longrunning(): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py old mode 100644 new mode 100755 index d2ef6794ea32..e9ddfbf6d6aa --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -409,6 +409,13 @@ def export_assets(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_export_assets(): @@ -506,6 +513,13 @@ def list_assets(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_list_assets(): @@ -619,6 +633,13 @@ def batch_get_assets_history(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_batch_get_assets_history(): @@ -694,6 +715,13 @@ def create_feed(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_create_feed(): @@ -804,6 +832,13 @@ def get_feed(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_get_feed(): @@ -905,6 +940,13 @@ def list_feeds(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_list_feeds(): @@ -1000,6 +1042,13 @@ def update_feed(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_update_feed(): @@ -1104,6 +1153,13 @@ def delete_feed(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_delete_feed(): @@ -1192,6 +1248,13 @@ def search_all_resources(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_search_all_resources(): @@ -1391,6 +1454,13 @@ def search_all_iam_policies(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_search_all_iam_policies(): @@ -1566,6 +1636,13 @@ def analyze_iam_policy(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_analyze_iam_policy(): @@ -1655,6 +1732,13 @@ def analyze_iam_policy_longrunning(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 def sample_analyze_iam_policy_longrunning(): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py old mode 100644 new mode 100755 index 46380a92b6e2..f9caabe8e6e1 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py old mode 100644 new mode 100755 index db412762851d..e1305583a82d --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py old mode 100644 new mode 100755 index c3aa140669f8..dd4fb419dad4 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py old mode 100644 new mode 100755 index 9a0a2e54c8b6..3d592e390b7c --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py old mode 100644 new mode 100755 index 36489d63b00e..a54a5e1daafc --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py old mode 100644 new mode 100755 index 680f02c0f5c7..5df919fe4d72 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py old mode 100644 new mode 100755 index 7a0a5bf4bd01..668903c5ee28 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_CreateFeed_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py old mode 100644 new mode 100755 index 612e6e13af6f..ce7b974b87fd --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_CreateFeed_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py old mode 100644 new mode 100755 index 86660f4f2050..0dcea55aade1 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_DeleteFeed_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py old mode 100644 new mode 100755 index ec710e646bb9..735baf2c1d80 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_DeleteFeed_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py old mode 100644 new mode 100755 index aea177ab7ff9..f9ca7f174252 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_ExportAssets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py old mode 100644 new mode 100755 index c536997de12c..60816961b550 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_ExportAssets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py old mode 100644 new mode 100755 index d9adab2adc00..b5b9a6e1a391 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_GetFeed_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py old mode 100644 new mode 100755 index 81b3b9adcab1..41d119391094 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_GetFeed_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py old mode 100644 new mode 100755 index 2e48093c8077..65ef19c875f5 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_ListAssets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py old mode 100644 new mode 100755 index ed8981813fb8..f2e41b183ba1 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_ListAssets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py old mode 100644 new mode 100755 index ec138b7375b1..88f0613f33b1 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_ListFeeds_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py old mode 100644 new mode 100755 index 2822c78bd066..3ee6b48bdb12 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_ListFeeds_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py old mode 100644 new mode 100755 index 282ea53bcbea..367eae93c841 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py old mode 100644 new mode 100755 index 542da876b8d3..ca656c399840 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py old mode 100644 new mode 100755 index c43226442b15..1757ab26d5b5 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_SearchAllResources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py old mode 100644 new mode 100755 index c2bf14027ae4..770ee7670ac6 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_SearchAllResources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py old mode 100644 new mode 100755 index 1d8dc82b9a59..fe93ab55ce55 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_UpdateFeed_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py old mode 100644 new mode 100755 index e48a0b6684f1..320e04de983b --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py @@ -24,6 +24,13 @@ # [START cloudasset_v1_generated_AssetService_UpdateFeed_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import asset_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json old mode 100644 new mode 100755 index 148bb0b4dc01..7843f9af6ef5 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json @@ -55,33 +55,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_async", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 45, - "start": 34, + "end": 52, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], @@ -131,33 +131,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicyLongrunning_sync", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 45, - "start": 34, + "end": 52, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], @@ -208,33 +208,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_async", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -284,33 +284,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeIamPolicy_sync", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -361,33 +361,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -437,33 +437,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -518,33 +518,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -598,33 +598,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -678,31 +678,31 @@ "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -755,31 +755,31 @@ "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -830,33 +830,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_async", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 43, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], @@ -906,33 +906,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 43, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], @@ -987,33 +987,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1067,33 +1067,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1148,33 +1148,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1228,33 +1228,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1309,33 +1309,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1389,33 +1389,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1474,33 +1474,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_SearchAllIamPolicies_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1558,33 +1558,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_SearchAllIamPolicies_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1647,33 +1647,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_SearchAllResources_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1735,33 +1735,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_SearchAllResources_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1816,33 +1816,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_UpdateFeed_async", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -1896,33 +1896,33 @@ "regionTag": "cloudasset_v1_generated_AssetService_UpdateFeed_sync", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py old mode 100644 new mode 100755 index 7c75fb16f8e3..28f232336332 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -210,6 +210,13 @@ async def generate_access_token(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 async def sample_generate_access_token(): @@ -363,6 +370,13 @@ async def generate_id_token(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 async def sample_generate_id_token(): @@ -509,6 +523,13 @@ async def sign_blob(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 async def sample_sign_blob(): @@ -642,6 +663,13 @@ async def sign_jwt(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 async def sample_sign_jwt(): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py old mode 100644 new mode 100755 index f94c44d21a9f..8303262fc7c8 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -398,6 +398,13 @@ def generate_access_token(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 def sample_generate_access_token(): @@ -544,6 +551,13 @@ def generate_id_token(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 def sample_generate_id_token(): @@ -683,6 +697,13 @@ def sign_blob(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 def sample_sign_blob(): @@ -809,6 +830,13 @@ def sign_jwt(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 def sample_sign_jwt(): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py old mode 100644 new mode 100755 index 825f6c072c35..0650407c09a2 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py @@ -24,6 +24,13 @@ # [START iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py old mode 100644 new mode 100755 index 16be47cf95a7..cea70e026d40 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py @@ -24,6 +24,13 @@ # [START iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py old mode 100644 new mode 100755 index 5f63373078ac..fca37957ef11 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py @@ -24,6 +24,13 @@ # [START iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py old mode 100644 new mode 100755 index 039d59763441..982dfd05b051 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py @@ -24,6 +24,13 @@ # [START iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py old mode 100644 new mode 100755 index fffa6de4bc73..315a82abde5f --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py @@ -24,6 +24,13 @@ # [START iamcredentials_v1_generated_IAMCredentials_SignBlob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py old mode 100644 new mode 100755 index e04b7b589440..83f5193c443c --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py @@ -24,6 +24,13 @@ # [START iamcredentials_v1_generated_IAMCredentials_SignBlob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py old mode 100644 new mode 100755 index eae43440b8fd..9d897adc4a68 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py @@ -24,6 +24,13 @@ # [START iamcredentials_v1_generated_IAMCredentials_SignJwt_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py old mode 100644 new mode 100755 index e0eed353c542..ca9f76b3592f --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py @@ -24,6 +24,13 @@ # [START iamcredentials_v1_generated_IAMCredentials_SignJwt_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.iam import credentials_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json old mode 100644 new mode 100755 index ba587abf1c65..35377b0f7cfb --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json @@ -71,33 +71,33 @@ "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -163,33 +163,33 @@ "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateAccessToken_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -256,33 +256,33 @@ "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -348,33 +348,33 @@ "regionTag": "iamcredentials_v1_generated_IAMCredentials_GenerateIdToken_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -437,33 +437,33 @@ "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignBlob_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -525,33 +525,33 @@ "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignBlob_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -614,33 +614,33 @@ "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignJwt_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -702,33 +702,33 @@ "regionTag": "iamcredentials_v1_generated_IAMCredentials_SignJwt_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py old mode 100644 new mode 100755 index 0c3a1af1b10d..2d6f5252e85e --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -208,6 +208,13 @@ async def get_trigger(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 async def sample_get_trigger(): @@ -302,6 +309,13 @@ async def list_triggers(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 async def sample_list_triggers(): @@ -412,6 +426,13 @@ async def create_trigger(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 async def sample_create_trigger(): @@ -549,6 +570,13 @@ async def update_trigger(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 async def sample_update_trigger(): @@ -678,6 +706,13 @@ async def delete_trigger(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 async def sample_delete_trigger(): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py old mode 100644 new mode 100755 index b24b0d1599a6..d46c6e44133d --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -414,6 +414,13 @@ def get_trigger(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 def sample_get_trigger(): @@ -508,6 +515,13 @@ def list_triggers(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 def sample_list_triggers(): @@ -618,6 +632,13 @@ def create_trigger(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 def sample_create_trigger(): @@ -755,6 +776,13 @@ def update_trigger(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 def sample_update_trigger(): @@ -884,6 +912,13 @@ def delete_trigger(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 def sample_delete_trigger(): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py old mode 100644 new mode 100755 index 4e94d14f9e33..958bbd175557 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_CreateTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py old mode 100644 new mode 100755 index bfbe4ab89235..998fdb703976 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_CreateTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py old mode 100644 new mode 100755 index d55bba5a923e..fc2b876000eb --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_DeleteTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py old mode 100644 new mode 100755 index 1e421fbaeda2..880f2bad4a9b --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_DeleteTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py old mode 100644 new mode 100755 index 028b5c3732ca..228e7e550a79 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_GetTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py old mode 100644 new mode 100755 index 93b3510145c5..ef1277a73fc6 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_GetTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py old mode 100644 new mode 100755 index 5c5ed4475183..a7a99b5a0f6a --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_ListTriggers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py old mode 100644 new mode 100755 index b036610806f2..ea6b79848847 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_ListTriggers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py old mode 100644 new mode 100755 index 42f8321c77db..9d9b886721bb --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_UpdateTrigger_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py old mode 100644 new mode 100755 index 84518de7ef52..b17b34a63b52 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py @@ -24,6 +24,13 @@ # [START eventarc_v1_generated_Eventarc_UpdateTrigger_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import eventarc_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json old mode 100644 new mode 100755 index e9ca79d81d95..3d364cea8077 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json @@ -67,33 +67,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_async", "segments": [ { - "end": 58, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 65, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 49, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], @@ -155,33 +155,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_sync", "segments": [ { - "end": 58, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 58, + "end": 65, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 55, - "start": 49, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 59, - "start": 56, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], @@ -240,33 +240,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 40, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -324,33 +324,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 40, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -405,33 +405,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -485,33 +485,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -566,33 +566,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -646,33 +646,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -735,33 +735,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_UpdateTrigger_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -823,33 +823,33 @@ "regionTag": "eventarc_v1_generated_Eventarc_UpdateTrigger_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py old mode 100644 new mode 100755 index 4ecc37923f29..da931475a8d8 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -205,6 +205,13 @@ async def list_buckets(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_buckets(): @@ -320,6 +327,13 @@ async def get_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_bucket(): @@ -393,6 +407,13 @@ async def create_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_bucket(): @@ -475,6 +496,13 @@ async def update_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_bucket(): @@ -548,6 +576,13 @@ async def delete_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_bucket(): @@ -610,6 +645,13 @@ async def undelete_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_undelete_bucket(): @@ -672,6 +714,13 @@ async def list_views(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_views(): @@ -779,6 +828,13 @@ async def get_view(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_view(): @@ -853,6 +909,13 @@ async def create_view(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_view(): @@ -928,6 +991,13 @@ async def update_view(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_view(): @@ -1001,6 +1071,13 @@ async def delete_view(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_view(): @@ -1063,6 +1140,13 @@ async def list_sinks(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_sinks(): @@ -1183,6 +1267,13 @@ async def get_sink(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_sink(): @@ -1304,6 +1395,13 @@ async def create_sink(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_sink(): @@ -1434,6 +1532,13 @@ async def update_sink(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_sink(): @@ -1588,6 +1693,13 @@ async def delete_sink(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_sink(): @@ -1686,6 +1798,13 @@ async def list_exclusions(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_exclusions(): @@ -1806,6 +1925,13 @@ async def get_exclusion(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_exclusion(): @@ -1929,6 +2055,13 @@ async def create_exclusion(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_exclusion(): @@ -2059,6 +2192,13 @@ async def update_exclusion(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_exclusion(): @@ -2201,6 +2341,13 @@ async def delete_exclusion(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_exclusion(): @@ -2307,6 +2454,13 @@ async def get_cmek_settings(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_cmek_settings(): @@ -2408,6 +2562,13 @@ async def update_cmek_settings(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_cmek_settings(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py old mode 100644 new mode 100755 index 764529bc9634..7f4af3e10838 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -429,6 +429,13 @@ def list_buckets(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_buckets(): @@ -544,6 +551,13 @@ def get_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_bucket(): @@ -618,6 +632,13 @@ def create_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_bucket(): @@ -701,6 +722,13 @@ def update_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_bucket(): @@ -775,6 +803,13 @@ def delete_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_bucket(): @@ -838,6 +873,13 @@ def undelete_bucket(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_undelete_bucket(): @@ -901,6 +943,13 @@ def list_views(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_views(): @@ -1008,6 +1057,13 @@ def get_view(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_view(): @@ -1083,6 +1139,13 @@ def create_view(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_view(): @@ -1159,6 +1222,13 @@ def update_view(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_view(): @@ -1233,6 +1303,13 @@ def delete_view(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_view(): @@ -1296,6 +1373,13 @@ def list_sinks(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_sinks(): @@ -1408,6 +1492,13 @@ def get_sink(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_sink(): @@ -1521,6 +1612,13 @@ def create_sink(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_sink(): @@ -1651,6 +1749,13 @@ def update_sink(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_sink(): @@ -1797,6 +1902,13 @@ def delete_sink(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_sink(): @@ -1887,6 +1999,13 @@ def list_exclusions(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_exclusions(): @@ -1999,6 +2118,13 @@ def get_exclusion(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_exclusion(): @@ -2114,6 +2240,13 @@ def create_exclusion(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_exclusion(): @@ -2244,6 +2377,13 @@ def update_exclusion(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_exclusion(): @@ -2386,6 +2526,13 @@ def delete_exclusion(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_exclusion(): @@ -2484,6 +2631,13 @@ def get_cmek_settings(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_cmek_settings(): @@ -2586,6 +2740,13 @@ def update_cmek_settings(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_cmek_settings(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py old mode 100644 new mode 100755 index f74115ea9a2d..2f20f04b9a94 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -201,6 +201,13 @@ async def delete_log(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_log(): @@ -311,6 +318,13 @@ async def write_log_entries(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_write_log_entries(): @@ -498,6 +512,13 @@ async def list_log_entries(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_log_entries(): @@ -650,6 +671,13 @@ async def list_monitored_resource_descriptors(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_monitored_resource_descriptors(): @@ -739,6 +767,13 @@ async def list_logs(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_logs(): @@ -859,6 +894,13 @@ def tail_log_entries(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_tail_log_entries(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py old mode 100644 new mode 100755 index c386f0920f42..e278c6deac03 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -389,6 +389,13 @@ def delete_log(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_log(): @@ -491,6 +498,13 @@ def write_log_entries(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_write_log_entries(): @@ -669,6 +683,13 @@ def list_log_entries(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_log_entries(): @@ -813,6 +834,13 @@ def list_monitored_resource_descriptors(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_monitored_resource_descriptors(): @@ -895,6 +923,13 @@ def list_logs(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_logs(): @@ -1007,6 +1042,13 @@ def tail_log_entries(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_tail_log_entries(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py old mode 100644 new mode 100755 index 2368d8a72805..a64b92d72fd8 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -198,6 +198,13 @@ async def list_log_metrics(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_list_log_metrics(): @@ -315,6 +322,13 @@ async def get_log_metric(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_get_log_metric(): @@ -429,6 +443,13 @@ async def create_log_metric(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_create_log_metric(): @@ -553,6 +574,13 @@ async def update_log_metric(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_update_log_metric(): @@ -683,6 +711,13 @@ async def delete_log_metric(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 async def sample_delete_log_metric(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py old mode 100644 new mode 100755 index 061b0b332f98..b4bcfb5e1e1d --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -386,6 +386,13 @@ def list_log_metrics(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_list_log_metrics(): @@ -495,6 +502,13 @@ def get_log_metric(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_get_log_metric(): @@ -601,6 +615,13 @@ def create_log_metric(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_create_log_metric(): @@ -725,6 +746,13 @@ def update_log_metric(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_update_log_metric(): @@ -847,6 +875,13 @@ def delete_log_metric(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 def sample_delete_log_metric(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py old mode 100644 new mode 100755 index 1c5c329c802c..f399b226f588 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py old mode 100644 new mode 100755 index 9b309322914f..374173f52b55 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py old mode 100644 new mode 100755 index 5be1a9ad3da5..8f14d777f8cb --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateExclusion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py old mode 100644 new mode 100755 index 3b57560f3cec..11bd92ad58fa --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateExclusion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py old mode 100644 new mode 100755 index 789598d4c7f2..0fc007986813 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateSink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py old mode 100644 new mode 100755 index e22bc60555c5..d6d2a0d7db7a --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateSink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py old mode 100644 new mode 100755 index 499d4eeba920..016f3e70b081 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateView_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py old mode 100644 new mode 100755 index 8e6425d712b1..3ee9567f1a8c --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_CreateView_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py old mode 100644 new mode 100755 index def3e5abcb4a..6633aa3c4344 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py old mode 100644 new mode 100755 index 64c95c992455..ec39ca1a09b7 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py old mode 100644 new mode 100755 index 4c042c3bec23..9d6146c57c8b --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py old mode 100644 new mode 100755 index dc313658435a..bc051e4e025e --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py old mode 100644 new mode 100755 index fe5acb523f2d..62a27ea1d1a8 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteSink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py old mode 100644 new mode 100755 index d9ddc66a02a4..fa7d6f6e7b91 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteSink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py old mode 100644 new mode 100755 index fd1eee969886..2fed68bbcbe9 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteView_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py old mode 100644 new mode 100755 index 1169b400046e..53bda04c937e --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_DeleteView_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py old mode 100644 new mode 100755 index 4b964aa7435d..1a91ecdd63d9 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py old mode 100644 new mode 100755 index 1b299dd569ac..337050c45a7e --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py old mode 100644 new mode 100755 index 356f0db9fbf1..6998c4b4af97 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py old mode 100644 new mode 100755 index 83dfc8d2c997..a91ec6042d7a --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py old mode 100644 new mode 100755 index 27a7644458dc..83a5bcda4016 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetExclusion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py old mode 100644 new mode 100755 index 980914dac1cb..913ec9968d8c --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetExclusion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py old mode 100644 new mode 100755 index fa3d7cf7f5b6..0739e175be61 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetSink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py old mode 100644 new mode 100755 index 48581e4706bc..ff34156f3826 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetSink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py old mode 100644 new mode 100755 index 9f26a54178fa..5de975ecc9cd --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetView_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py old mode 100644 new mode 100755 index f88c15d2e30f..a9818b572059 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_GetView_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py old mode 100644 new mode 100755 index 4e3bfea5582b..614e9ec66b6c --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListBuckets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py old mode 100644 new mode 100755 index 3522c4c8979d..0c7912f7f038 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListBuckets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py old mode 100644 new mode 100755 index 788436d6ad15..a1aa5ed6f4dc --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListExclusions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py old mode 100644 new mode 100755 index 8ea9407a1066..8cdac9d12438 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListExclusions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py old mode 100644 new mode 100755 index b43b5682a28e..ea81f5c8b29c --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListSinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py old mode 100644 new mode 100755 index 235395e6d593..05a35323d6bb --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListSinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py old mode 100644 new mode 100755 index 27910c9f7cfa..c39fb2d9e0a1 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListViews_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py old mode 100644 new mode 100755 index 2e5b6e53b3d1..270e5c14578f --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_ListViews_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py old mode 100644 new mode 100755 index 020866b755b9..eeca015d1b41 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py old mode 100644 new mode 100755 index 0dfb39a11a07..6355e2a62226 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py old mode 100644 new mode 100755 index 78245abfc6f0..450f5662a3fc --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py old mode 100644 new mode 100755 index c285fd542862..81aa0fb920f4 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py old mode 100644 new mode 100755 index 8d49b85e7b8c..e1a2421fec56 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py old mode 100644 new mode 100755 index 7b04208d4c58..1828ce84c3a7 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py old mode 100644 new mode 100755 index d06cf80d4f63..873fecb737b1 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py old mode 100644 new mode 100755 index c0dba34ccb9e..958572b9301a --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py old mode 100644 new mode 100755 index c46b9ab427d2..400d57897c58 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateSink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py old mode 100644 new mode 100755 index 9639ece285e4..cc3a1be435c6 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateSink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py old mode 100644 new mode 100755 index 250d3f9dc881..8ccc9f3c3319 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateView_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py old mode 100644 new mode 100755 index 1397848800fe..33014bf23658 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_ConfigServiceV2_UpdateView_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py old mode 100644 new mode 100755 index 6338b9abcbd0..209dd510d93c --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_DeleteLog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py old mode 100644 new mode 100755 index 36280057bfe7..86f6c9ccb3aa --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_DeleteLog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py old mode 100644 new mode 100755 index fa7ad59dbac4..d968835f75a2 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py old mode 100644 new mode 100755 index 29d1dc7d13fd..d200793fa90c --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py old mode 100644 new mode 100755 index fb0106199bd0..eebad0bfd01e --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListLogs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py old mode 100644 new mode 100755 index 0f775572f57b..8d132377e740 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListLogs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py old mode 100644 new mode 100755 index b8f3397012de..4b99bc6f038d --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py old mode 100644 new mode 100755 index 736d64d614d1..c54a2201fc14 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py old mode 100644 new mode 100755 index 7a937948cd0d..f1afa6fae199 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_TailLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py old mode 100644 new mode 100755 index a38e7b14d389..29461c6f8613 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py old mode 100644 new mode 100755 index 28025d777037..bd7954828811 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py old mode 100644 new mode 100755 index 31569811c655..d28fbe7a037c --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py old mode 100644 new mode 100755 index 96690c2c36b4..d351980e9679 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py old mode 100644 new mode 100755 index 051694d31470..bb9a56579e6f --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py old mode 100644 new mode 100755 index bf2ee5e4abc2..54a73b14122e --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py old mode 100644 new mode 100755 index eae109200224..73276ef182fb --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py old mode 100644 new mode 100755 index cea94a356e29..d6ef03c486b6 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_GetLogMetric_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py old mode 100644 new mode 100755 index eea36222a80b..6ab2bb57fd83 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_GetLogMetric_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py old mode 100644 new mode 100755 index 9dac7793736b..92c92395a253 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py old mode 100644 new mode 100755 index 97b3c2f1364b..5a3e646926f7 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py old mode 100644 new mode 100755 index c94c70e76264..9a794a4ccb6d --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py old mode 100644 new mode 100755 index bcdff32693bc..39a6e72e3072 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -24,6 +24,13 @@ # [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json old mode 100644 new mode 100755 index a72b4aebc560..e26012d6452e --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json @@ -55,33 +55,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -131,33 +131,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -216,33 +216,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -300,33 +300,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -385,33 +385,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -469,33 +469,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -546,33 +546,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -622,33 +622,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -698,31 +698,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -771,31 +771,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -849,31 +849,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -926,31 +926,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1004,31 +1004,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1081,31 +1081,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1155,31 +1155,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1228,31 +1228,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1303,33 +1303,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1379,33 +1379,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1456,33 +1456,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1532,33 +1532,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1613,33 +1613,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1693,33 +1693,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1774,33 +1774,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1854,33 +1854,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1931,33 +1931,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2007,33 +2007,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2088,33 +2088,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2168,33 +2168,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2249,33 +2249,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2329,33 +2329,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2410,33 +2410,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2490,33 +2490,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2571,33 +2571,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2651,33 +2651,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2727,31 +2727,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -2800,31 +2800,31 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -2875,33 +2875,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2951,33 +2951,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3028,33 +3028,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3104,33 +3104,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3193,33 +3193,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -3281,33 +3281,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -3370,33 +3370,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -3458,33 +3458,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -3535,33 +3535,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3611,33 +3611,33 @@ "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3691,31 +3691,31 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -3768,31 +3768,31 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -3855,33 +3855,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3943,33 +3943,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4024,33 +4024,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4104,33 +4104,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4181,33 +4181,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -4257,33 +4257,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 41, + "end": 52, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -4334,33 +4334,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 58, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 52, + "end": 63, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -4410,33 +4410,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 48, - "start": 34, + "end": 55, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 49, + "end": 58, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 52, + "end": 63, + "start": 59, "type": "RESPONSE_HANDLING" } ], @@ -4503,33 +4503,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4595,33 +4595,33 @@ "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -4680,33 +4680,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -4764,33 +4764,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -4844,31 +4844,31 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -4921,31 +4921,31 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -5000,33 +5000,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -5080,33 +5080,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -5161,33 +5161,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -5241,33 +5241,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -5326,33 +5326,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -5410,33 +5410,33 @@ "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 44, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py old mode 100644 new mode 100755 index 97a99adf33d0..d8a9a13b40cb --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -230,6 +230,13 @@ async def list_instances(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_list_instances(): @@ -339,6 +346,13 @@ async def get_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_get_instance(): @@ -447,6 +461,13 @@ async def create_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_create_instance(): @@ -594,6 +615,13 @@ async def update_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_update_instance(): @@ -726,6 +754,13 @@ async def upgrade_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_upgrade_instance(): @@ -855,6 +890,13 @@ async def import_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_import_instance(): @@ -983,6 +1025,13 @@ async def export_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_export_instance(): @@ -1109,6 +1158,13 @@ async def failover_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_failover_instance(): @@ -1230,6 +1286,13 @@ async def delete_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 async def sample_delete_instance(): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py old mode 100644 new mode 100755 index 6f439999bcb9..d84ba29430f1 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -418,6 +418,13 @@ def list_instances(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_list_instances(): @@ -527,6 +534,13 @@ def get_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_get_instance(): @@ -635,6 +649,13 @@ def create_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_create_instance(): @@ -782,6 +803,13 @@ def update_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_update_instance(): @@ -914,6 +942,13 @@ def upgrade_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_upgrade_instance(): @@ -1043,6 +1078,13 @@ def import_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_import_instance(): @@ -1171,6 +1213,13 @@ def export_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_export_instance(): @@ -1297,6 +1346,13 @@ def failover_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_failover_instance(): @@ -1418,6 +1474,13 @@ def delete_instance(self, .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 def sample_delete_instance(): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py old mode 100644 new mode 100755 index fd2dd59fed3a..2e9194f99935 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py old mode 100644 new mode 100755 index f90f1e07aa36..7eed2cf2e0a8 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py old mode 100644 new mode 100755 index e75a5e9dd2a7..38769231234a --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py old mode 100644 new mode 100755 index bd3aeb3fc9cd..625687593833 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py old mode 100644 new mode 100755 index 99579233fc58..6770074db514 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_ExportInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py old mode 100644 new mode 100755 index a5c8c94bddc2..39eaa613b3b4 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_ExportInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py old mode 100644 new mode 100755 index 5430b08c1d4a..10b7b450b933 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_FailoverInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py old mode 100644 new mode 100755 index 48f45a18336c..1c735ff82f1e --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_FailoverInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py old mode 100644 new mode 100755 index 916207eef803..93294bca1eb2 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py old mode 100644 new mode 100755 index 3b62906b0b9a..69cb243a5069 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py old mode 100644 new mode 100755 index 4e5424858e96..36a58d830b48 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_ImportInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py old mode 100644 new mode 100755 index 1d1f3f8b5c2f..47995a43b688 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_ImportInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py old mode 100644 new mode 100755 index 2711798851d5..087145fca3da --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py old mode 100644 new mode 100755 index f19416d028db..ef788e11feec --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py old mode 100644 new mode 100755 index 50d6f22a3846..6d1f1704a7eb --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py old mode 100644 new mode 100755 index 6cf9a6668bdc..a805e2417bfd --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py old mode 100644 new mode 100755 index a5aee00e7755..9dfd7b08de65 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_UpgradeInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py old mode 100644 new mode 100755 index 02c68da78509..fc02e45b9313 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py @@ -24,6 +24,13 @@ # [START redis_v1_generated_CloudRedis_UpgradeInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import redis_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json old mode 100644 new mode 100755 index 3d687b63c092..9bdcf65a3081 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json @@ -67,33 +67,33 @@ "regionTag": "redis_v1_generated_CloudRedis_CreateInstance_async", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 45, - "start": 34, + "end": 52, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], @@ -155,33 +155,33 @@ "regionTag": "redis_v1_generated_CloudRedis_CreateInstance_sync", "segments": [ { - "end": 55, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 62, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 45, - "start": 34, + "end": 52, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], @@ -236,33 +236,33 @@ "regionTag": "redis_v1_generated_CloudRedis_DeleteInstance_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -316,33 +316,33 @@ "regionTag": "redis_v1_generated_CloudRedis_DeleteInstance_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -401,33 +401,33 @@ "regionTag": "redis_v1_generated_CloudRedis_ExportInstance_async", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 43, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], @@ -485,33 +485,33 @@ "regionTag": "redis_v1_generated_CloudRedis_ExportInstance_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 43, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], @@ -570,33 +570,33 @@ "regionTag": "redis_v1_generated_CloudRedis_FailoverInstance_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -654,33 +654,33 @@ "regionTag": "redis_v1_generated_CloudRedis_FailoverInstance_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 39, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -735,33 +735,33 @@ "regionTag": "redis_v1_generated_CloudRedis_GetInstance_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -815,33 +815,33 @@ "regionTag": "redis_v1_generated_CloudRedis_GetInstance_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -900,33 +900,33 @@ "regionTag": "redis_v1_generated_CloudRedis_ImportInstance_async", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 43, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], @@ -984,33 +984,33 @@ "regionTag": "redis_v1_generated_CloudRedis_ImportInstance_sync", "segments": [ { - "end": 52, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 59, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 43, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], @@ -1065,33 +1065,33 @@ "regionTag": "redis_v1_generated_CloudRedis_ListInstances_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1145,33 +1145,33 @@ "regionTag": "redis_v1_generated_CloudRedis_ListInstances_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1230,33 +1230,33 @@ "regionTag": "redis_v1_generated_CloudRedis_UpdateInstance_async", "segments": [ { - "end": 53, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 60, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 44, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -1314,33 +1314,33 @@ "regionTag": "redis_v1_generated_CloudRedis_UpdateInstance_sync", "segments": [ { - "end": 53, + "end": 60, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 60, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 43, - "start": 34, + "end": 50, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 44, + "end": 57, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 61, + "start": 58, "type": "RESPONSE_HANDLING" } ], @@ -1399,33 +1399,33 @@ "regionTag": "redis_v1_generated_CloudRedis_UpgradeInstance_async", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 40, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], @@ -1483,33 +1483,33 @@ "regionTag": "redis_v1_generated_CloudRedis_UpgradeInstance_sync", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 46, - "start": 40, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 50, - "start": 47, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py index 0c28b8ebcb70..31979bd6cfc6 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_ListResources_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py index fb92e25300e8..b5063387739f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_ListResources_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py index 3f1f31a87a02..aa07573e88b0 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_MethodBidiStreaming_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py index d9874ea670fb..ff38c6742da3 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_MethodBidiStreaming_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py index 8d7f588ca427..588e8a2bc8cb 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_MethodLroSignatures_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py index 0620df227bfd..4fa56bee0609 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_MethodLroSignatures_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py index 3303a4ece0e6..ae8ffbd1e074 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_MethodOneSignature_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py index bccffa76fc01..37ff6c9eefc0 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_MethodOneSignature_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py index c13f1ceeabe1..508e54665437 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_MethodServerStreaming_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py index 6c886fc0e337..5c4e056dc256 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_MethodServerStreaming_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py index 85f712b4ddce..9fd7592c7a1f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_OneOfMethod_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py index 60293b9108c0..0677e4f5b889 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_OneOfMethodRequiredField_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py index e427930852fc..bf709e649969 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_OneOfMethodRequiredField_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py index b45ffcc97bd4..b59e3369fe6b 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py @@ -24,6 +24,13 @@ # [START mollusca_v1_generated_Snippets_OneOfMethod_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from animalia import mollusca_v1 diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index e36c7c6bc366..d8f4cb53cbad 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -24,6 +24,13 @@ # [START mollusc_classify_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from molluscs.v1 import molluscclient diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index 4ba792333173..2b6d88360b03 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -24,6 +24,13 @@ # [START mollusc_classify_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from molluscs.v1 import molluscclient diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index e36c7c6bc366..d8f4cb53cbad 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -24,6 +24,13 @@ # [START mollusc_classify_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from molluscs.v1 import molluscclient diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py index f47b9c124453..d9f3269019c7 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -24,6 +24,13 @@ # [START mollusc_classify_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from molluscs.v1 import molluscclient From 3f5e63a49ce674ea703b2f572dc11a7ed8d61330 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Sep 2022 14:42:35 -0400 Subject: [PATCH 0882/1339] chore(main): release 1.3.0 (#1401) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 20 ++++++++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index bbb9cb20c1f5..f660de7f5613 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,26 @@ # Changelog +## [1.3.0](https://github.com/googleapis/gapic-generator-python/compare/v1.2.0...v1.3.0) (2022-09-06) + + +### Features + +* Add BUILD rule parameter to allow setting numeric enums ([#1411](https://github.com/googleapis/gapic-generator-python/issues/1411)) ([5c578ed](https://github.com/googleapis/gapic-generator-python/commit/5c578ed371b5f33814e67217981898bd76687184)) +* Add generated sample comment ([#1417](https://github.com/googleapis/gapic-generator-python/issues/1417)) ([ef55bce](https://github.com/googleapis/gapic-generator-python/commit/ef55bce5b8e9cc5d1869fe0472d6221b58409908)) +* **docker-entrypoint:** Add --experimental_allow_proto3_optional ([#1414](https://github.com/googleapis/gapic-generator-python/issues/1414)) ([b92ab8c](https://github.com/googleapis/gapic-generator-python/commit/b92ab8ce739663e9bb68fe9c736e851042301023)) +* Encode numeric enums parameter with REST requests ([#1399](https://github.com/googleapis/gapic-generator-python/issues/1399)) ([63599bb](https://github.com/googleapis/gapic-generator-python/commit/63599bbeee1842f6219590bdc67498a942f3523c)) +* Make REST unit tests support numeric enums ([#1423](https://github.com/googleapis/gapic-generator-python/issues/1423)) ([8839c6f](https://github.com/googleapis/gapic-generator-python/commit/8839c6fc593d62a10a069cb28f72cd76f080ecc5)) +* Note that "rest" transport support is beta. ([#1403](https://github.com/googleapis/gapic-generator-python/issues/1403)) ([faba515](https://github.com/googleapis/gapic-generator-python/commit/faba515775265b21557b5820106a73a2eb8344e2)) +* When requesting numeric enums in responses, also send them in requests ([#1405](https://github.com/googleapis/gapic-generator-python/issues/1405)) ([31b1b16](https://github.com/googleapis/gapic-generator-python/commit/31b1b163a0433ea22e1a7f2295b5671406feb5fa)) + + +### Bug Fixes + +* Fix remaining REST transport issues ([#1428](https://github.com/googleapis/gapic-generator-python/issues/1428)) ([d30a80e](https://github.com/googleapis/gapic-generator-python/commit/d30a80ee7e95eef90a567f80f7c7414bdb52e0ac)) +* Fix REST tests generation for repeated enums ([#1421](https://github.com/googleapis/gapic-generator-python/issues/1421)) ([488ddf8](https://github.com/googleapis/gapic-generator-python/commit/488ddf8b69793274b57c2d9a76463818a3dd76bc)) +* Partial rollback of https ([20c3403](https://github.com/googleapis/gapic-generator-python/commit/20c340385ae6539f86f3e0fc1d592c94c675e695)) + ## [1.2.0](https://github.com/googleapis/gapic-generator-python/compare/v1.1.2...v1.2.0) (2022-08-12) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 7e569d8cac99..8faf1dabfa8d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.2.0" +version = "1.3.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 4a933688205c065f84fd717277bd22103139b0b8 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 7 Sep 2022 16:50:14 +0200 Subject: [PATCH 0883/1339] chore(deps): remove dependency dataclasses (#1432) * chore(deps): update dependency dataclasses to <0.9 * remove dependency dataclasses Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 8faf1dabfa8d..e8b99c9b99d7 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -33,7 +33,6 @@ "protobuf >= 3.18.0, <4.0.0dev", "pypandoc >= 1.4", "PyYAML >= 5.1.1", - "dataclasses < 0.8; python_version < '3.7'", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", ] From f3d1a3e7d945734570069f761b5006901e58e03c Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Wed, 7 Sep 2022 08:57:25 -0700 Subject: [PATCH 0884/1339] fix: Fix LRO method test and rest test coverage (#1433) This does: 1) fixes LRO method mock value which for watever reason worked under proto 3.x.x (cpp implementaiton) but does not work under proto 4 2) Add `NO COVER` to mock imports in tests as those lines are mutually exclusive depending on the python version it is runniin gunder. Co-authored-by: Anthonios Partheniou --- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 2 +- .../asset/tests/unit/gapic/asset_v1/test_asset_service.py | 4 ++-- .../tests/unit/gapic/credentials_v1/test_iam_credentials.py | 4 ++-- .../eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py | 4 ++-- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 4 ++-- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 4 ++-- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 4 ++-- .../redis/tests/unit/gapic/redis_v1/test_cloud_redis.py | 4 ++-- 9 files changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3e166c029c7b..9207a1d9b60b 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -7,8 +7,8 @@ import os # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 532070be3bb8..731482be48b8 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1172,7 +1172,7 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata {% if not method.void %} - post.return_value = {{ method.output.ident }} + post.return_value = {{ method.output.ident }}() {% endif %} client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index beca979169c7..44b675b470cf 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -17,8 +17,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index b0d869d6bbca..56311032b9a0 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -17,8 +17,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 566ed4a4779d..7e74a1ebe3e2 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -17,8 +17,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index d05326717171..827611076f9e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -17,8 +17,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 0094c686fec1..76a58681929f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -17,8 +17,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 782bb603aaef..dbd281a03447 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -17,8 +17,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 510c48270b42..ad48f0866de7 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -17,8 +17,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc From 2126669cac1adb94fb99cd577e4cf77f7a3fa65e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 7 Sep 2022 14:09:48 -0400 Subject: [PATCH 0885/1339] chore(main): release 1.3.1 (#1434) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f660de7f5613..a71692b38f92 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.3.1](https://github.com/googleapis/gapic-generator-python/compare/v1.3.0...v1.3.1) (2022-09-07) + + +### Bug Fixes + +* Fix LRO method test and rest test coverage ([#1433](https://github.com/googleapis/gapic-generator-python/issues/1433)) ([c57a93f](https://github.com/googleapis/gapic-generator-python/commit/c57a93fee8b7861e6396c0e34586ffa00478ce25)) + ## [1.3.0](https://github.com/googleapis/gapic-generator-python/compare/v1.2.0...v1.3.0) (2022-09-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e8b99c9b99d7..5749639b375b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.3.0" +version = "1.3.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 2a5a14049d6bc1d19ee4b2f68e99eb70955b936b Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Wed, 7 Sep 2022 16:25:29 -0700 Subject: [PATCH 0886/1339] feat: Implement REST support for MixIns (#1378) Implements MixIns support in REST. The implementation is different than what was used for GRPC version. Special wrappers for MixIns are created to make the code less repetitive. Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/gapic/schema/api.py | 25 +++ .../gapic-generator/gapic/schema/mixins.py | 68 ++++++++ .../gapic-generator/gapic/schema/wrappers.py | 26 +++ .../%service/transports/_rest_mixins.py.j2 | 93 +++++++++++ .../services/%service/transports/rest.py.j2 | 32 +++- .../%name_%version/%sub/_test_mixins.py.j2 | 64 ++++++++ .../%name_%version/%sub/test_%service.py.j2 | 1 + .../tests/unit/schema/test_api.py | 152 ++++++++++++++++++ .../tests/unit/schema/wrappers/test_method.py | 17 ++ 9 files changed, 477 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/gapic/schema/mixins.py create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index e5132f33840e..19e1746347c4 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -43,6 +43,7 @@ from google.protobuf.descriptor_pb2 import MethodDescriptorProto from google.api import annotations_pb2 # type: ignore from gapic.schema import metadata +from gapic.schema import mixins from gapic.schema import wrappers from gapic.schema import naming as api_naming from gapic.utils import cached_property @@ -515,6 +516,16 @@ def get_custom_operation_service(self, method: "wrappers.Method") -> "wrappers.S return op_serv + @cached_property + def mixin_api_signatures(self): + """Compile useful info about MixIn API signatures. + + Returns: + Mapping[str, wrappers.MixinMethod]: Useful info + about MixIn methods present for the main API. + """ + return {name: mixins.MIXINS_MAP[name] for name in self.mixin_api_methods} + @cached_property def mixin_api_methods(self) -> Dict[str, MethodDescriptorProto]: methods: Dict[str, MethodDescriptorProto] = {} @@ -529,6 +540,20 @@ def mixin_api_methods(self) -> Dict[str, MethodDescriptorProto]: self._get_methods_from_service(operations_pb2)} return methods + @cached_property + def mixin_http_options(self): + """Gather HTTP options for the MixIn methods.""" + api_methods = self.mixin_api_methods + res = {} + for s in api_methods: + m = api_methods[s] + http = m.options.Extensions[annotations_pb2.http] + http_options = [http] + list(http.additional_bindings) + opt_gen = (wrappers.MixinHttpRule.try_parse_http_rule(http_rule) + for http_rule in http_options) + res[s] = [rule for rule in opt_gen if rule] + return res + @cached_property def has_location_mixin(self) -> bool: return len(list(filter(lambda api: api.name == "google.cloud.location.Locations", self.service_yaml_config.apis))) > 0 diff --git a/packages/gapic-generator/gapic/schema/mixins.py b/packages/gapic-generator/gapic/schema/mixins.py new file mode 100644 index 000000000000..89fccdd573dc --- /dev/null +++ b/packages/gapic-generator/gapic/schema/mixins.py @@ -0,0 +1,68 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gapic.schema import wrappers + +MIXINS_MAP = { + 'DeleteOperation': wrappers.MixinMethod( + 'DeleteOperation', + request_type='operations_pb2.DeleteOperationRequest', + response_type='None' + ), + 'WaitOperation': wrappers.MixinMethod( + 'WaitOperation', + request_type='operations_pb2.WaitOperationRequest', + response_type='operations_pb2.Operation' + ), + 'ListOperations': wrappers.MixinMethod( + 'ListOperations', + request_type='operations_pb2.ListOperationsRequest', + response_type='operations_pb2.ListOperationsResponse' + ), + 'CancelOperation': wrappers.MixinMethod( + 'CancelOperation', + request_type='operations_pb2.CancelOperationRequest', + response_type='None' + ), + 'GetOperation': wrappers.MixinMethod( + 'GetOperation', + request_type='operations_pb2.GetOperationRequest', + response_type='operations_pb2.Operation' + ), + 'TestIamPermissions': wrappers.MixinMethod( + 'TestIamPermissions', + request_type='iam_policy_pb2.TestIamPermissionsRequest', + response_type='iam_policy_pb2.TestIamPermissionsResponse' + ), + 'GetIamPolicy': wrappers.MixinMethod( + 'GetIamPolicy', + request_type='iam_policy_pb2.GetIamPolicyRequest', + response_type='policy_pb2.Policy' + ), + 'SetIamPolicy': wrappers.MixinMethod( + 'SetIamPolicy', + request_type='iam_policy_pb2.SetIamPolicyRequest', + response_type='policy_pb2.Policy' + ), + 'ListLocations': wrappers.MixinMethod( + 'ListLocations', + request_type='locations_pb2.ListLocationsRequest', + response_type='locations_pb2.ListLocationsResponse' + ), + 'GetLocation': wrappers.MixinMethod( + 'GetLocation', + request_type='locations_pb2.GetLocationRequest', + response_type='locations_pb2.Location' + ) +} diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 87a5b4598096..c62425187d24 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1094,6 +1094,32 @@ def try_parse_http_rule(cls, http_rule) -> Optional['HttpRule']: return cls(method, uri, body) +@dataclasses.dataclass(frozen=True) +class MixinMethod: + name: str + request_type: str + response_type: str + + +@dataclasses.dataclass(frozen=True) +class MixinHttpRule(HttpRule): + def path_fields(self, uri): + """return list of (name, template) tuples extracted from uri.""" + return [ + (match.group("name"), match.group("template")) + for match in path_template._VARIABLE_RE.finditer(uri) + if match.group("name") + ] + + @property + def sample_request(self): + req = uri_sample.sample_from_path_fields(self.path_fields(self.uri)) + if not self.body or self.body == "" or self.body == "*": + return req + req[self.body] = {} # just an empty json. + return req + + @dataclasses.dataclass(frozen=True) class Method: """Description of a method (defined with the ``rpc`` keyword).""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 new file mode 100644 index 000000000000..3bf58c3c75d4 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -0,0 +1,93 @@ + {% if "rest" in opts.transport %} + + {% for name, sig in api.mixin_api_signatures.items() %} + @property + def {{ name|snake_case }}(self): + return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore + + class _{{ name }}({{service.name}}RestStub): + def __call__(self, + request: {{ sig.request_type }}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> {{ sig.response_type }}: + + r"""Call the {{- ' ' -}} + {{ (name|snake_case).replace('_',' ')|wrap( + width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request ({{ sig.request_type }}): + The request object for {{ name }} method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {% if sig.response_type != 'None' %} + + Returns: + {{ sig.response_type }}: Response from {{ name }} method. + {% endif %} + """ + + http_options: List[Dict[str, str]] = [ + {%- for rule in api.mixin_http_options["{}".format(name)] %}{ + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %}{# rule.body #} + }, + {% endfor %} + ] + + request, metadata = self._interceptor.pre_{{ name|snake_case }}(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {%- if body_spec %} + body = json.loads(json.dumps(transcoded_request['body'])) + {%- endif %} + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + {% if body_spec %} + data=body, + {% endif %} + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + {% if sig.response_type == "None" %} + return self._interceptor.post_{{ name|snake_case }}(None) + {% else %} + + resp = {{ sig.response_type }}() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_{{ name|snake_case }}(resp) + return resp + {% endif %} + + {% endfor %} + {% endif %} {# rest in opts.transport #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 9f6489af27d6..1ee22af81c3c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -19,6 +19,16 @@ from google.protobuf import json_format {% if service.has_lro %} from google.api_core import operations_v1 {% endif %} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} +{% if api.has_operations_mixin %} +from google.longrunning import operations_pb2 +{% endif %} from requests import __version__ as requests_version import dataclasses import re @@ -113,7 +123,25 @@ class {{ service.name }}RestInterceptor: """ return response {% endif %} + {% endfor %} + + {% for name, signature in api.mixin_api_signatures.items() %} + def pre_{{ name|snake_case }}(self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]]) -> {{signature.response_type}}: + """Pre-rpc interceptor for {{ name|snake_case }} + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + def post_{{ name|snake_case }}(self, response: {{signature.request_type}}) -> {{signature.response_type}}: + """Post-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response {% endfor %} @@ -344,7 +372,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): including_default_value_fields=False, use_integers_for_enums={{ opts.rest_numeric_enums }} ) - {%- endif %}{# body_spec #} + {%- endif %} uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -430,6 +458,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %} + {% include '%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2' %} + @property def kind(self) -> str: return "rest" diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 4140b61a6ea2..f0a0256211cc 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -1,3 +1,67 @@ +{% if 'rest' in opts.transport %} +{% for name, sig in api.mixin_api_signatures.items() %} + +def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ sig.request_type }}): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({{ api.mixin_http_options["{}".format(name)][0].sample_request }}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.{{ name|snake_case }}(request) + +@pytest.mark.parametrize("request_type", [ + {{ sig.request_type }}, + dict, +]) +def test_{{ name|snake_case }}_rest(request_type): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if sig.response_type == "None" %} + return_value = None + {% else %} + return_value = {{ sig.response_type }}() + {% endif %} + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + {% if sig.response_type == "None" %} + json_return_value = '{}' + {% else %} + json_return_value = json_format.MessageToJson(return_value) + {% endif %} + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.{{ name|snake_case }}(request) + + # Establish that the response is the type that we expect. + {% if sig.response_type == "None" %} + assert response is None + {% else %} + assert isinstance(response, {{ sig.response_type }}) + {% endif %} +{% endfor %} +{% endif %} + {% if api.has_operations_mixin and ('grpc' in opts.transport or 'grpc_asyncio' in opts.transport) %} {% if "DeleteOperation" in api.mixin_api_methods %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 9207a1d9b60b..58ae08c11434 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -26,6 +26,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response from requests import Request, PreparedRequest from requests.sessions import Session +from google.protobuf import json_format {% endif %} {# Import the service itself as well as every proto module that it imports. #} diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index d897255068ab..607d4858bbf4 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -32,6 +32,7 @@ from gapic.schema import api from gapic.schema import imp +from gapic.schema import mixins from gapic.schema import naming from gapic.schema import wrappers from gapic.utils import Options @@ -2277,6 +2278,157 @@ def test_mixin_api_methods_iam_overrides(): assert api_schema.mixin_api_methods == {} +def create_service_config_with_all_mixins(http_opt_uri='/v1/{name=examples/*}/*'): + service_yaml_config = { + 'apis': [ + { + 'name': 'google.cloud.location.Locations', + }, + { + 'name': 'google.longrunning.Operations', + }, + { + 'name': 'google.iam.v1.IAMPolicy', + }, + ], + 'http': { + 'rules': [ + # Locations + { + 'selector': 'google.cloud.location.Locations.ListLocations', + 'get': http_opt_uri, + 'body': '*' + }, + { + 'selector': 'google.cloud.location.Locations.GetLocation', + 'get': http_opt_uri, + 'body': '*' + }, + # LRO + { + 'selector': 'google.longrunning.Operations.CancelOperation', + 'post': http_opt_uri, + 'body': '*', + }, + { + 'selector': 'google.longrunning.Operations.DeleteOperation', + 'get': http_opt_uri, + 'body': '*' + }, + { + 'selector': 'google.longrunning.Operations.WaitOperation', + 'post': http_opt_uri, + 'body': '*' + }, + { + 'selector': 'google.longrunning.Operations.GetOperation', + 'post': http_opt_uri, + 'body': '*' + }, + { + 'selector': 'google.longrunning.Operations.ListOperations', + 'post': http_opt_uri, + 'body': '*' + }, + # IAM + { + 'selector': 'google.iam.v1.IAMPolicy.SetIamPolicy', + 'post': http_opt_uri, + 'body': '*' + }, + { + 'selector': 'google.iam.v1.IAMPolicy.GetIamPolicy', + 'get': http_opt_uri, + 'body': '*' + }, + { + 'selector': 'google.iam.v1.IAMPolicy.TestIamPermissions', + 'post': http_opt_uri, + 'body': '*' + }, + { + 'selector': 'google.example.v1.Example', + } + ] + } + } + return service_yaml_config + + +def test_mixin_api_signatures(): + fd = ( + make_file_pb2( + name='example.proto', + package='google.example.v1', + messages=(make_message_pb2(name='ExampleRequest', fields=()),), + ),) + opts = Options(service_yaml_config=create_service_config_with_all_mixins()) + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + res = api_schema.mixin_api_signatures + assert res == mixins.MIXINS_MAP + + +def test_mixin_http_options(): + fd = ( + make_file_pb2( + name='example.proto', + package='google.example.v1', + messages=(make_message_pb2(name='ExampleRequest', fields=()),), + ),) + opts = Options(service_yaml_config={ + 'apis': [ + { + 'name': 'google.cloud.location.Locations', + }, + { + 'name': 'google.longrunning.Operations', + }, + { + 'name': 'google.iam.v1.IAMPolicy', + }, + ], + 'http': { + 'rules': [ + # LRO + { + 'selector': 'google.longrunning.Operations.CancelOperation', + 'post': '/v1/{name=examples/*}/*', + 'body': '*', + }, + { + 'selector': 'google.longrunning.Operations.DeleteOperation', + 'get': '/v1/{name=examples/*}/*', + 'body': '*' + }, + { + 'selector': 'google.longrunning.Operations.WaitOperation', + 'post': '/v1/{name=examples/*}/*', + 'body': '*' + }, + { + 'selector': 'google.longrunning.Operations.GetOperation', + 'post': '/v1/{name=examples/*}/*', + 'body': '*' + }, + { + 'selector': 'google.longrunning.Operations.ListOperations', + 'post': '/v1/{name=examples/*}/*', + 'body': '*' + }, + ] + } + }) + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + res = api_schema.mixin_http_options + assert res == { + 'ListOperations': [wrappers.MixinHttpRule('post', '/v1/{name=examples/*}/*', '*')], + 'GetOperation': [wrappers.MixinHttpRule('post', '/v1/{name=examples/*}/*', '*')], + 'DeleteOperation': [wrappers.MixinHttpRule('get', '/v1/{name=examples/*}/*', '*')], + 'CancelOperation': [wrappers.MixinHttpRule('post', '/v1/{name=examples/*}/*', '*')], + 'WaitOperation': [wrappers.MixinHttpRule('post', '/v1/{name=examples/*}/*', '*')], + } + + def test_mixin_api_methods_lro(): fd = ( make_file_pb2( diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 70bc769b8051..3e27fed5756b 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -987,3 +987,20 @@ def test_safe_name(): for name, method in unsafe_methods.items(): assert method.safe_name == f"{name}_" + + +def test_mixin_rule(): + m = wrappers.MixinHttpRule( + 'get', '/v1beta1/{name=projects/*}/locations', None) + e = { + 'name': 'projects/sample1' + } + assert e == m.sample_request + + m = wrappers.MixinHttpRule( + 'get', '/v1beta1/{name=projects/*}/locations', 'city') + e = { + 'name': 'projects/sample1', + 'city': {}, + } + assert e == m.sample_request From 409c90dee8e9b9184fceff51ca00e6034fa386b0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 7 Sep 2022 17:11:15 -0700 Subject: [PATCH 0887/1339] chore(main): release 1.4.0 (#1435) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a71692b38f92..aba013e42b50 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.4.0](https://github.com/googleapis/gapic-generator-python/compare/v1.3.1...v1.4.0) (2022-09-07) + + +### Features + +* Implement REST support for MixIns ([#1378](https://github.com/googleapis/gapic-generator-python/issues/1378)) ([0e38fa8](https://github.com/googleapis/gapic-generator-python/commit/0e38fa839460401453b510b616f1e22bcdead60d)) + ## [1.3.1](https://github.com/googleapis/gapic-generator-python/compare/v1.3.0...v1.3.1) (2022-09-07) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 5749639b375b..09866be4e977 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.3.1" +version = "1.4.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From b9442061edae2d611e308e1130cdfd61caab8e5e Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Fri, 9 Sep 2022 10:07:02 -0700 Subject: [PATCH 0888/1339] fix(rest): Use strict encoding to lowercase query string bools (#1436) * fix(rest): Use strict encoding to lowercase query string bools - depend on google-api-core >= 2.10.0 for REST transport - use `strict` to force query string booleans to be JSON encoded in lower case * Stringify test expected value to adapt to google-api-core changes --- .../%version/%sub/services/%service/transports/rest.py.j2 | 2 +- packages/gapic-generator/gapic/ads-templates/setup.py.j2 | 2 +- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 +- .../%sub/services/%service/transports/rest.py.j2 | 2 +- packages/gapic-generator/gapic/templates/setup.py.j2 | 5 ----- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 2 +- 6 files changed, 5 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 436573a1874f..4bfbbff8c9c6 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -364,7 +364,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, - params=rest_helpers.flatten_query_params(query_params), + params=rest_helpers.flatten_query_params(query_params, strict=True), {% if body_spec %} data=body, {% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 7f3716da90b4..7b983cb273c7 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -19,7 +19,7 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.4.0, < 3.0.0dev', + 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', {% else %} 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', {% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 9563ed4fa478..198efeca7854 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1214,7 +1214,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% if req_field.field_pb.type == 9 %} "{{ req_field.field_pb.default_value }}", {% else %} - {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}, + str({{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}), {% endif %}{# default is str #} ), {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 1ee22af81c3c..a7624f1a87c1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -398,7 +398,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, headers=headers, - params=rest_helpers.flatten_query_params(query_params), + params=rest_helpers.flatten_query_params(query_params, strict=True), {% if body_spec %} data=body, {% endif %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 7e1567f303c5..9d582615552e 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -30,12 +30,7 @@ setuptools.setup( platforms='Posix; MacOS X; Windows', include_package_data=True, install_requires=( - {# TODO(dovs): remove when 1.x deprecation is complete #} - {% if 'rest' in opts.transport %} 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', - {% else %} - 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', - {% endif %} 'libcst >= 0.2.5', 'googleapis-common-protos >= 1.55.0, <2.0.0dev', 'proto-plus >= 1.19.7', diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 731482be48b8..55fd9e58b786 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1099,7 +1099,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% if req_field.field_pb.type == 9 %} "{{ req_field.field_pb.default_value }}", {% else %} - {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}, + str({{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}), {% endif %}{# default is str #} ), {% endfor %} From 7cd41630dd4f69d60fa79cdcc43a324bb71cbee4 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Fri, 9 Sep 2022 11:33:20 -0700 Subject: [PATCH 0889/1339] fix: Fix test generation for `*Value` wrapper classes (#1437) This fixes https://github.com/googleapis/gapic-generator-python/issues/1372 Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/.bazelrc | 2 ++ packages/gapic-generator/WORKSPACE | 14 ++++++++++++++ .../gapic/%name_%version/%sub/test_%service.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_macros.j2 | 4 ++++ .../unit/gapic/asset_v1/test_asset_service.py | 2 +- .../gapic/credentials_v1/test_iam_credentials.py | 2 +- .../tests/unit/gapic/eventarc_v1/test_eventarc.py | 2 +- .../gapic/logging_v2/test_config_service_v2.py | 2 +- .../gapic/logging_v2/test_logging_service_v2.py | 2 +- .../gapic/logging_v2/test_metrics_service_v2.py | 2 +- .../tests/unit/gapic/redis_v1/test_cloud_redis.py | 2 +- 11 files changed, 28 insertions(+), 8 deletions(-) create mode 100644 packages/gapic-generator/.bazelrc diff --git a/packages/gapic-generator/.bazelrc b/packages/gapic-generator/.bazelrc new file mode 100644 index 000000000000..19ab83c1cdb4 --- /dev/null +++ b/packages/gapic-generator/.bazelrc @@ -0,0 +1,2 @@ +# New boringssl requires C++14 +build --repo_env=BAZEL_CXXOPTS="-std=c++14" diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index d5239a9aac85..3c08a04e7ad3 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -50,6 +50,20 @@ load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") protobuf_deps() +# Import boringssl explicitly to override what gRPC imports as its dependency. +# Boringssl build fails on gcc12 without this fix: +# https://github.com/google/boringssl/commit/8462a367bb57e9524c3d8eca9c62733c63a63cf4, +# which is present only in the newest version of boringssl, not the one imported +# by gRPC. Remove this import once gRPC depends on a newer version. +http_archive( + name = "boringssl", + sha256 = "b460f8673f3393e58ce506e9cdde7f2c3b2575b075f214cb819fb57d809f052b", + strip_prefix = "boringssl-bb41bc007079982da419c0ec3186e510cbcf09d0", + urls = [ + "https://github.com/google/boringssl/archive/bb41bc007079982da419c0ec3186e510cbcf09d0.zip", + ], +) + # # Import grpc as a native bazel dependency. This avoids duplication and also # speeds up loading phase a lot (otherwise python_rules will be building grpcio diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 58ae08c11434..40a99150b56a 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -21,7 +21,7 @@ import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers {% if 'rest' in opts.transport %} from requests import Response from requests import Request, PreparedRequest diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 55fd9e58b786..0ab177be7be0 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -400,6 +400,8 @@ def test_{{ method_name }}_flattened(): assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif (field.ident|string()).startswith('wrappers_pb2.') %} + assert wrappers.{{ (field.ident|string())[13:] }}Rule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% else %} arg = args[0].{{ key }} mock_val = {{ field.mock_value }} @@ -493,6 +495,8 @@ async def test_{{ method_name }}_flattened_async(): assert TimestampRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% elif field.ident|string() == 'duration_pb2.Duration' %} assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif (field.ident|string()).startswith('wrappers_pb2.') %} + assert wrappers.{{ (field.ident|string())[13:] }}Rule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% else %} arg = args[0].{{ key }} mock_val = {{ field.mock_value }} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 44b675b470cf..4fc860508d78 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -26,7 +26,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 56311032b9a0..cc9650d79905 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -26,7 +26,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 7e74a1ebe3e2..4b89a35c2a43 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -26,7 +26,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 827611076f9e..af8c6186c49b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -26,7 +26,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 76a58681929f..d7697c40153d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -26,7 +26,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index dbd281a03447..c193ecc27dd6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -26,7 +26,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api import distribution_pb2 # type: ignore from google.api import label_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ad48f0866de7..d9802ee941f6 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -26,7 +26,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions From 22c57a736655db6296463478883d4a5dbe450c6e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 9 Sep 2022 16:09:01 -0400 Subject: [PATCH 0890/1339] fix: remove grpc only methods from rest.py (#1440) --- .../%name/%version/%sub/services/%service/transports/rest.py.j2 | 2 +- .../%name_%version/%sub/services/%service/transports/rest.py.j2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 4bfbbff8c9c6..249bbb44cf6a 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -84,7 +84,7 @@ class {{ service.name }}RestInterceptor: """ - {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming %} + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for {{ method.name|snake_case }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index a7624f1a87c1..db5eaaf120ef 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -100,7 +100,7 @@ class {{ service.name }}RestInterceptor: """ - {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming %} + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for {{ method.name|snake_case }} From ddd372386471c48fe7448814854675314ab3bfdf Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Fri, 9 Sep 2022 14:07:46 -0700 Subject: [PATCH 0891/1339] chore(test): update Showcase version; make protoc version an env var (#1439) Also fixes Showcase tests to check for the correct exceptions. Co-authored-by: Anthonios Partheniou --- .../gapic-generator/.github/workflows/tests.yaml | 10 ++++++---- packages/gapic-generator/noxfile.py | 2 +- .../gapic-generator/tests/system/test_retry.py | 14 ++++++++++++-- .../gapic-generator/tests/system/test_unary.py | 15 ++++++++++++++- 4 files changed, 33 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 3791de537808..8864a4157501 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -67,17 +67,19 @@ jobs: ./gapic-showcase run & cd - env: - SHOWCASE_VERSION: 0.19.0 + SHOWCASE_VERSION: 0.25.0 - name: Install nox. run: python -m pip install nox - - name: Install protoc 3.19.0. + - name: Install protoc run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip + curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip cd /usr/src/protoc/ - unzip protoc-3.19.0.zip + unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc + env: + PROTOC_VERSION: 3.19.0 - name: Run showcase tests. run: nox -s ${{ matrix.target }} showcase-mtls: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 39f24d71fa69..04ca408d8679 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -29,7 +29,7 @@ nox.options.error_on_missing_interpreters = True -showcase_version = os.environ.get("SHOWCASE_VERSION", "0.22.0") +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.25.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 9c7970210f50..0d2f94109034 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -20,8 +20,7 @@ def test_retry_bubble(echo): - # Note: InvalidArgument is from gRPC, InternalServerError from http - with pytest.raises((exceptions.DeadlineExceeded, exceptions.InternalServerError)): + with pytest.raises(exceptions.GatewayTimeout): echo.echo({ 'error': { 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), @@ -29,6 +28,17 @@ def test_retry_bubble(echo): }, }) + if isinstance(echo.transport, type(echo).get_transport_class("grpc")): + # Under gRPC, we raise exceptions.DeadlineExceeded, which is a + # sub-class of exceptions.GatewayTimeout. + with pytest.raises(exceptions.DeadlineExceeded): + echo.echo({ + 'error': { + 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), + 'message': 'This took longer than you said it should.', + }, + }) + if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": diff --git a/packages/gapic-generator/tests/system/test_unary.py b/packages/gapic-generator/tests/system/test_unary.py index aafc50066d78..bc72f352af9d 100644 --- a/packages/gapic-generator/tests/system/test_unary.py +++ b/packages/gapic-generator/tests/system/test_unary.py @@ -38,7 +38,7 @@ def test_unary_with_dict(echo): def test_unary_error(echo): message = 'Bad things! Bad things!' # Note: InvalidArgument is from gRPC, InternalServerError from http - with pytest.raises((exceptions.InvalidArgument, exceptions.InternalServerError)) as exc: + with pytest.raises(exceptions.BadRequest) as exc: echo.echo({ 'error': { 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), @@ -48,6 +48,19 @@ def test_unary_error(echo): assert exc.value.code == 400 assert exc.value.message == message + if isinstance(echo.transport, type(echo).get_transport_class("grpc")): + # Under gRPC, we raise exceptions.InvalidArgument, which is a + # sub-class of exceptions.BadRequest. + with pytest.raises(exceptions.InvalidArgument) as exc: + echo.echo({ + 'error': { + 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), + 'message': message, + }, + }) + assert exc.value.code == 400 + assert exc.value.message == message + if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": import asyncio From b4675f8bf1a226c376f807bb5d59a101960009f4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 12 Sep 2022 12:39:39 -0700 Subject: [PATCH 0892/1339] chore(main): release 1.4.1 (#1438) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index aba013e42b50..c9aee317a6b3 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [1.4.1](https://github.com/googleapis/gapic-generator-python/compare/v1.4.0...v1.4.1) (2022-09-09) + + +### Bug Fixes + +* Fix test generation for `*Value` wrapper classes ([#1437](https://github.com/googleapis/gapic-generator-python/issues/1437)) ([9e9971f](https://github.com/googleapis/gapic-generator-python/commit/9e9971f6321207fe33a0d28f32a07e3b1f0e795a)) +* Remove grpc only methods from rest.py ([#1440](https://github.com/googleapis/gapic-generator-python/issues/1440)) ([c12a1c2](https://github.com/googleapis/gapic-generator-python/commit/c12a1c208dc91d8b222653b0d9d9696448751c91)) +* **rest:** Use strict encoding to lowercase query string bools ([#1436](https://github.com/googleapis/gapic-generator-python/issues/1436)) ([e667406](https://github.com/googleapis/gapic-generator-python/commit/e6674061ebd919281b49838f44fc0be8730595dc)) + ## [1.4.0](https://github.com/googleapis/gapic-generator-python/compare/v1.3.1...v1.4.0) (2022-09-07) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 09866be4e977..ac024c02d92f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.4.0" +version = "1.4.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From d37aec542da4d8ea73d0c4023e9c719d67135180 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 13 Sep 2022 21:29:18 +0000 Subject: [PATCH 0893/1339] fix: unit test generation for boolean query prams (#1447) Without this fix the current compute engine tests are failing. Also 3 other APIs in googleapis would have failed if grpc+rest transport was enabled on them. --- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 ++ .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 2 ++ 2 files changed, 4 insertions(+) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 198efeca7854..beeac730a015 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1213,6 +1213,8 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide "{{ req_field.name | camel_case }}", {% if req_field.field_pb.type == 9 %} "{{ req_field.field_pb.default_value }}", + {% elif req_field.field_pb.type == 8 %} + str({{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}).lower(), {% else %} str({{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}), {% endif %}{# default is str #} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 0ab177be7be0..365f13463079 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1102,6 +1102,8 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide "{{ req_field.name | camel_case }}", {% if req_field.field_pb.type == 9 %} "{{ req_field.field_pb.default_value }}", + {% elif req_field.field_pb.type == 8 %} + str({{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}).lower(), {% else %} str({{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}), {% endif %}{# default is str #} From bcf36f8ee71f909608f4cd35faa6ab43c7c5c9b0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 18:09:46 -0700 Subject: [PATCH 0894/1339] chore(main): release 1.4.2 (#1448) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c9aee317a6b3..57ab06000877 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.4.2](https://github.com/googleapis/gapic-generator-python/compare/v1.4.1...v1.4.2) (2022-09-13) + + +### Bug Fixes + +* Unit test generation for boolean query prams ([#1447](https://github.com/googleapis/gapic-generator-python/issues/1447)) ([dd68dd1](https://github.com/googleapis/gapic-generator-python/commit/dd68dd1f288c4fbbdb7f54900094379ee1d771c0)) + ## [1.4.1](https://github.com/googleapis/gapic-generator-python/compare/v1.4.0...v1.4.1) (2022-09-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index ac024c02d92f..229226208d08 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.4.1" +version = "1.4.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 3c54fed80fb9f96a358f973180d8c0ecdba1124f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 14 Sep 2022 16:00:08 +0200 Subject: [PATCH 0895/1339] chore(deps): update dependency protobuf to v3.20.2 (#1449) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 9c71d3d19d15..85a730189ae5 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.10.0 googleapis-common-protos==1.56.4 jinja2==3.1.2 MarkupSafe==2.1.1 -protobuf==3.20.1 +protobuf==3.20.2 pypandoc==1.8.1 PyYAML==6.0 setuptools==65.3.0 From 56c0bf9c9a428e7717b27eae2ce70ac0c9ff84b7 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 15 Sep 2022 15:25:33 +0200 Subject: [PATCH 0896/1339] chore(deps): update dependency google-api-core to v2.10.1 (#1450) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 85a730189ae5..2ac3b7f815e8 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.3 -google-api-core==2.10.0 +google-api-core==2.10.1 googleapis-common-protos==1.56.4 jinja2==3.1.2 MarkupSafe==2.1.1 From 11088215670324fcd01f616034ef4c8a382ce99f Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Thu, 15 Sep 2022 22:19:23 +0000 Subject: [PATCH 0897/1339] fix: femove `vars` and `set`from reserved names (#1451) This fixes https://github.com/googleapis/gapic-generator-python/issues/1348. This is also backwar compatible with already released clients dataform and network-services, because removal of unnecessary `_` is already happening in ppostprocesing scripts for those two affected apis. --- packages/gapic-generator/gapic/utils/reserved_names.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index 40d9301fa950..6ab813223c09 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -24,7 +24,8 @@ # We CANNOT make exceptions for keywords. keyword.kwlist, # We make SOME exceptions for certain names that collide with builtins. - set(dir(builtins)) - {"filter", "map", "id", "input", "property"}, + set(dir(builtins)) - {"filter", "map", "id", + "input", "property", "vars", "set"}, # "mapping" and "ignore_unknown_fields" have special uses # in the constructor of proto.Message {"mapping", "ignore_unknown_fields"}, From caa6ca1a590982630a8ed3df49da433ad1c80ee1 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 19 Sep 2022 20:12:28 +0000 Subject: [PATCH 0898/1339] fix: accept 4.x protobuf for gapic-generator-python (#1453) Also update github actions configs and make SHOWCASE and PROTOC global environment variables This is needed because newest version of grpc already require protobuf 4.x and it makes our showcase tests fail --- .../.github/workflows/tests.yaml | 40 ++++++++----------- packages/gapic-generator/setup.py | 2 +- 2 files changed, 17 insertions(+), 25 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 8864a4157501..31a40e06aed2 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -13,6 +13,10 @@ concurrency: group: tests-${{ github.head_ref }} cancel-in-progress: true +env: + SHOWCASE_VERSION: 0.25.0 + PROTOC_VERSION: 3.20.2 + jobs: docs: # Don't upgrade python version; there's a bug in 3.10 sphinx @@ -66,8 +70,6 @@ jobs: tar -xf showcase-* ./gapic-showcase run & cd - - env: - SHOWCASE_VERSION: 0.25.0 - name: Install nox. run: python -m pip install nox - name: Install protoc @@ -78,8 +80,6 @@ jobs: cd /usr/src/protoc/ unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - env: - PROTOC_VERSION: 3.19.0 - name: Run showcase tests. run: nox -s ${{ matrix.target }} showcase-mtls: @@ -108,13 +108,13 @@ jobs: sudo apt-get install -y curl pandoc unzip gcc - name: Install nox. run: python -m pip install nox - - name: Install protoc 3.19.0. + - name: Install protoc. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src - curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip + curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip cd /usr/src/protoc/ - unzip protoc-3.19.0.zip + unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc cd - - name: Run showcase tests. @@ -127,8 +127,6 @@ jobs: cd .. nox -s ${{ matrix.target }} - env: - SHOWCASE_VERSION: 0.19.0 # TODO(yon-mg): add compute unit tests showcase-unit: strategy: @@ -147,20 +145,18 @@ jobs: run: | sudo apt-get update sudo apt-get install -y curl pandoc unzip gcc - - name: Install protoc 3.19.0. + - name: Install protoc. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip + curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip cd /usr/src/protoc/ - unzip protoc-3.19.0.zip + unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Install nox. run: python -m pip install nox - name: Run unit tests. run: nox -s showcase_unit${{ matrix.variant }}-${{ matrix.python }} - env: - SHOWCASE_VERSION: 0.19.0 showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: @@ -174,20 +170,18 @@ jobs: run: | sudo apt-get update sudo apt-get install -y curl pandoc unzip gcc - - name: Install protoc 3.19.0. + - name: Install protoc. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip + curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip cd /usr/src/protoc/ - unzip protoc-3.19.0.zip + unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Install nox. run: python -m pip install nox - name: Run unit tests. run: nox -s showcase_unit_add_iam_methods - env: - SHOWCASE_VERSION: 0.19.0 showcase-mypy: runs-on: ubuntu-latest strategy: @@ -204,20 +198,18 @@ jobs: run: | sudo apt-get update sudo apt-get install -y curl pandoc unzip gcc - - name: Install protoc 3.19.0. + - name: Install protoc. run: | sudo mkdir -p /usr/src/protoc/ sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v3.19.0/protoc-3.19.0-linux-x86_64.zip --output /usr/src/protoc/protoc-3.19.0.zip + curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip cd /usr/src/protoc/ - unzip protoc-3.19.0.zip + unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Install nox. run: python -m pip install nox - name: Typecheck the generated output. run: nox -s showcase_mypy${{ matrix.variant }} - env: - SHOWCASE_VERSION: 0.19.0 snippetgen: runs-on: ubuntu-latest steps: diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 229226208d08..9f5cbc588340 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -30,7 +30,7 @@ "googleapis-common-protos >= 1.55.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", - "protobuf >= 3.18.0, <4.0.0dev", + "protobuf >= 3.18.0, < 5.0.0dev", "pypandoc >= 1.4", "PyYAML >= 5.1.1", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", From f5aeee1952b84bfb5568aad5d984ae04f73dda64 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 19 Sep 2022 13:45:02 -0700 Subject: [PATCH 0899/1339] chore(main): release 1.4.3 (#1452) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 57ab06000877..745951b2d9c5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.4.3](https://github.com/googleapis/gapic-generator-python/compare/v1.4.2...v1.4.3) (2022-09-19) + + +### Bug Fixes + +* Accept 4.x protobuf for gapic-generator-python ([#1453](https://github.com/googleapis/gapic-generator-python/issues/1453)) ([d9099dd](https://github.com/googleapis/gapic-generator-python/commit/d9099ddaff1fadb9fc3ebbab1702d50e609986cc)) +* Femove `vars` and `set`from reserved names ([#1451](https://github.com/googleapis/gapic-generator-python/issues/1451)) ([ae3e6bf](https://github.com/googleapis/gapic-generator-python/commit/ae3e6bf350191cf65337a37822c6d1dff8e6dca4)) + ## [1.4.2](https://github.com/googleapis/gapic-generator-python/compare/v1.4.1...v1.4.2) (2022-09-13) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9f5cbc588340..29baeb4d447b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.4.2" +version = "1.4.3" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From c6e2655eb4021dac307a3ba81ea577f2f84f725a Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 20 Sep 2022 12:25:41 +0000 Subject: [PATCH 0900/1339] fix: do not generate _flattened() unit tests for client streaming methods (#1454) The generator does not generate flattened parameters for client streamin methods, so generating unit tests for them does not make sense --- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 365f13463079..6e56cb560c33 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -363,7 +363,7 @@ def test_{{ method_name }}_from_dict_foreign(): {% endif %} -{% if method.flattened_fields %} +{% if method.flattened_fields and not method.client_streaming %} def test_{{ method_name }}_flattened(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), @@ -1224,7 +1224,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ {% endif %} -{% if method.flattened_fields %} +{% if method.flattened_fields and not method.client_streaming %} def test_{{ method_name }}_rest_flattened(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), From 712d8fe417c1615d7c69a1272a87bb8d860bdd3f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 20 Sep 2022 16:04:25 -0700 Subject: [PATCH 0901/1339] chore(main): release 1.4.4 (#1456) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 745951b2d9c5..a0fd184faae5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.4.4](https://github.com/googleapis/gapic-generator-python/compare/v1.4.3...v1.4.4) (2022-09-20) + + +### Bug Fixes + +* Do not generate _flattened() unit tests for client streaming methods ([#1454](https://github.com/googleapis/gapic-generator-python/issues/1454)) ([29610ad](https://github.com/googleapis/gapic-generator-python/commit/29610ad68cfd8326e5430413d0ead1951a9b06b2)) + ## [1.4.3](https://github.com/googleapis/gapic-generator-python/compare/v1.4.2...v1.4.3) (2022-09-19) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 29baeb4d447b..92d9402ce43b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.4.3" +version = "1.4.4" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From b7686fe30ea65b3adbaeb6649a132c806669e4de Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 27 Sep 2022 17:41:27 +0000 Subject: [PATCH 0902/1339] fix: fix multiple gapic-generator-python bugs (#1458) * fix: fix multiple gapic-generator-python bugs Specifically it fixes: https://github.com/googleapis/gapic-generator-python/issues/1359 https://github.com/googleapis/gapic-generator-python/issues/1377 https://github.com/googleapis/gapic-generator-python/issues/1375 --- .../services/%service/transports/rest.py.j2 | 44 +++++----- .../%name_%version/%sub/test_%service.py.j2 | 85 ++++++++++++------- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../%sub/services/%service/_client_macros.j2 | 2 + .../%name_%version/%sub/test_%service.py.j2 | 3 +- .../gapic/%name_%version/%sub/test_macros.j2 | 2 +- .../logging_v2/test_logging_service_v2.py | 12 +-- .../unit/gapic/redis_v1/test_cloud_redis.py | 1 + 8 files changed, 90 insertions(+), 61 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 249bbb44cf6a..b693f7530362 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -15,9 +15,9 @@ from google.api_core import rest_streaming from google.api_core import path_template from google.api_core import gapic_v1 +from google.protobuf import json_format {% if service.has_lro %} from google.api_core import operations_v1 -from google.protobuf import json_format {% endif %} from requests import __version__ as requests_version import dataclasses @@ -320,35 +320,33 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %}{# rule in method.http_options #} ] request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) - request_kwargs = {{method.input.ident}}.to_dict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + {% if method.input.ident.is_external_type %} + pb_request = request + {% else %} + pb_request = {{method.input.ident}}.pb(request) + {% endif %} + transcoded_request = path_template.transcode(http_options, pb_request) {% set body_spec = method.http_options[0].body %} {%- if body_spec %} # Jsonify the request body - body = {% if body_spec == '*' -%} - {{method.input.ident}}.to_json( - {{method.input.ident}}(transcoded_request['body']), - {% else -%} - {{method.input.fields[body_spec].type.ident}}.to_json( - {{method.input.fields[body_spec].type.ident}}(transcoded_request['body']), - {% endif %}{# body_spec == "*" #} + + body = json_format.MessageToJson( + transcoded_request['body'], including_default_value_fields=False, use_integers_for_enums={{ opts.rest_numeric_enums }} ) - {%- endif %}{# body_spec #} + {%- endif %} uri = transcoded_request['uri'] method = transcoded_request['method'] # Jsonify the query params - query_params = json.loads({{method.input.ident}}.to_json( - {{method.input.ident}}(transcoded_request['query_params']), + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], including_default_value_fields=False, - use_integers_for_enums={{ opts.rest_numeric_enums }} + use_integers_for_enums={{ opts.rest_numeric_enums }}, )) - {% if method.input.required_fields %} query_params.update(self._get_unset_required_fields(query_params)) {% endif %}{# required fields #} @@ -383,10 +381,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% elif method.server_streaming %} resp = rest_streaming.ResponseIterator(response, {{method.output.ident}}) {% else %} - resp = {{method.output.ident}}.from_json( - response.content, - ignore_unknown_fields=True - ) + resp = {{method.output.ident}}() + {% if method.output.ident.is_external_type %} + pb_resp = resp + {% else %} + pb_resp = {{method.output.ident}}.pb(resp) + {% endif %} + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) {% endif %}{# method.lro #} resp = self._interceptor.post_{{ method.name|snake_case }}(resp) return resp @@ -411,7 +413,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% for method in service.methods.values()|sort(attribute="name") %} @property - def {{method.transport_safe_name | snake_case}}(self) -> Callable[ + def {{method.transport_safe_name|snake_case}}(self) -> Callable[ [{{method.input.ident}}], {{method.output.ident}}]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index beeac730a015..3c95d6296627 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -6,14 +6,15 @@ import os # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc from grpc.experimental import aio {% if "rest" in opts.transport %} from collections.abc import Iterable +from google.protobuf import json_format import json {% endif %} import math @@ -44,9 +45,6 @@ from google.api_core import future from google.api_core import operation from google.api_core import operations_v1 from google.longrunning import operations_pb2 -{% if "rest" in opts.transport %} -from google.protobuf import json_format -{% endif %}{# rest transport #} {% endif %}{# lro #} {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 @@ -1101,12 +1099,17 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide request_init["{{ req_field.name }}"] = {{ req_field.type.python_type(req_field.field_pb.default_value or 0) }} {% endif %}{# default is str #} {% endfor %} - request = request_type(request_init) - jsonified_request = json.loads(request_type.to_json( - request, + request = request_type(**request_init) + {% if method.input.ident.is_external_type %} + pb_request = request + {% else %} + pb_request = request_type.pb(request) + {% endif %} + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, including_default_value_fields=False, use_integers_for_enums=False - )) + )) # verify fields with default values are dropped {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} @@ -1114,7 +1117,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide assert "{{ field_name }}" not in jsonified_request {% endfor %} - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1124,26 +1127,21 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide assert jsonified_request["{{ field_name }}"] == request_init["{{ req_field.name }}"] {% endfor %} - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive %} {% set field_name = req_field.name | camel_case %} {% set mock_value = req_field.primitive_mock_as_str() %} - {% if method.query_params %} - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param|camel_case }}", {% endfor %})) - {% endif %} jsonified_request["{{ field_name }}"] = {{ mock_value }} {% endfor %} - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.name | snake_case }}._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) {% if method.query_params %} # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", -{% endfor %})) + assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", {% endfor %})) {% endif %} jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - {% for req_field in method.input.required_fields if req_field.is_primitive and req_field.name in method.query_params %} + {% for req_field in method.input.required_fields if req_field.is_primitive %} {% set field_name = req_field.name | camel_case %} {% set mock_value = req_field.primitive_mock_as_str() %} assert "{{ field_name }}" in jsonified_request @@ -1155,7 +1153,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) - request = request_type(request_init) + request = request_type(**request_init) # Designate an appropriate value for the returned response. {% if method.void %} @@ -1173,13 +1171,18 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide with mock.patch.object(path_template, 'transcode') as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. + {% if method.input.ident.is_external_type %} + pb_request = request + {% else %} + pb_request = request_type.pb(request) + {% endif %} transcode_result = { 'uri': 'v1/sample_method', 'method': "{{ method.http_options[0].method }}", - 'query_params': request_init, + 'query_params': pb_request, } {% if method.http_options[0].body %} - transcode_result['body'] = {} + transcode_result['body'] = pb_request {% endif %} transcode.return_value = transcode_result @@ -1189,16 +1192,24 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) + + {% if method.output.ident.is_external_type %} + pb_return_value = return_value + {% else %} + pb_return_value = {{ method.output.ident }}.pb(return_value) + {% endif %} + json_return_value = json_format.MessageToJson(pb_return_value) {% endif %} + {% if method.server_streaming %} + json_return_value = "[{}]".format(json_return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) + response = client.{{ method_name }}(iter(requests)) {% elif method.server_streaming %} with mock.patch.object(response_value, 'iter_content') as iter_content: iter_content.return_value = iter(json_return_value) @@ -1237,7 +1248,7 @@ def test_{{ method_name }}_rest_unset_required_fields(): {% endif %}{# required_fields #} -{% if not (method.server_streaming or method.client_streaming) %} +{% if not method.client_streaming %} @pytest.mark.parametrize("null_interceptor", [True, False]) def test_{{ method_name }}_rest_interceptors(null_interceptor): transport = transports.{{ service.name }}RestTransport( @@ -1258,14 +1269,28 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): {% if not method.void %} post.assert_not_called() {% endif %} - - transcode.return_value = {"method": "post", "uri": "my_uri", "body": None, "query_params": {},} + {% if method.input.ident.is_external_type %} + pb_message = {{ method.input.ident }}() + {% else %} + pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) + {% endif %} + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() {% if not method.void %} req.return_value._content = {% if method.output.ident.package == method.ident.package %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + + {% if method.server_streaming %} + req.return_value._content = "[{}]".format(req.return_value._content) + {% endif %} + {% endif %} request = {{ method.input.ident }}() @@ -1275,7 +1300,7 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata {% if not method.void %} - post.return_value = {{ method.output.ident }} + post.return_value = {{ method.output.ident }}() {% endif %} client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index c62425187d24..cf8690e5920a 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -77,7 +77,7 @@ def __hash__(self): def name(self) -> str: """Used to prevent collisions with python keywords""" name = self.field_pb.name - return name + "_" if name in utils.RESERVED_NAMES else name + return name + "_" if name in utils.RESERVED_NAMES and not self.meta.address.is_external_type else name @utils.cached_property def ident(self) -> metadata.FieldIdentifier: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index e5fc01d5a592..705b92189800 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -146,6 +146,7 @@ {% if method.explicit_routing %} header_params = {} + {% if not method.client_streaming %} {% for routing_param in method.routing_rule.routing_parameters %} {% if routing_param.path_template %} {# Need to match. #} @@ -161,6 +162,7 @@ {% endif %} {% endfor %} {# method.routing_rule.routing_parameters #} + {% endif %} {# if not method.client_streaming #} if header_params: metadata = tuple(metadata) + ( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 40a99150b56a..0b2c3b373694 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -63,8 +63,7 @@ from google.longrunning import operations_pb2 from google.api_core import gapic_v1 {% for method in service.methods.values() %} {% for ref_type in method.ref_types - if not ((ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') - or ref_type.ident.python_import.package == ('google', 'protobuf') and ref_type.ident.python_import.module == 'empty_pb2') %} + if not (ref_type.ident.python_import.package == ('google', 'api_core') and ref_type.ident.python_import.module == 'operation') %} {{ ref_type.ident.python_import }} {% endfor %} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 6e56cb560c33..a76c758334d0 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -212,7 +212,7 @@ def test_{{ method.name|snake_case }}_routing_parameters(): {% for routing_param in method.routing_rule.routing_parameters %} # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}({{ routing_param.sample_request }}) + request = {{ method.input.ident }}(**{{ routing_param.sample_request }}) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index d7697c40153d..8ede77f6e754 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -853,7 +853,7 @@ def test_write_log_entries_flattened(): # using the keyword arguments to the method. client.write_log_entries( log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), labels={'key_value': 'value_value'}, entries=[log_entry.LogEntry(log_name='log_name_value')], ) @@ -866,7 +866,7 @@ def test_write_log_entries_flattened(): mock_val = 'log_name_value' assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type_='type__value') + mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') assert arg == mock_val arg = args[0].labels mock_val = {'key_value': 'value_value'} @@ -887,7 +887,7 @@ def test_write_log_entries_flattened_error(): client.write_log_entries( logging.WriteLogEntriesRequest(), log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), labels={'key_value': 'value_value'}, entries=[log_entry.LogEntry(log_name='log_name_value')], ) @@ -910,7 +910,7 @@ async def test_write_log_entries_flattened_async(): # using the keyword arguments to the method. response = await client.write_log_entries( log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), labels={'key_value': 'value_value'}, entries=[log_entry.LogEntry(log_name='log_name_value')], ) @@ -923,7 +923,7 @@ async def test_write_log_entries_flattened_async(): mock_val = 'log_name_value' assert arg == mock_val arg = args[0].resource - mock_val = monitored_resource_pb2.MonitoredResource(type_='type__value') + mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') assert arg == mock_val arg = args[0].labels mock_val = {'key_value': 'value_value'} @@ -944,7 +944,7 @@ async def test_write_log_entries_flattened_error_async(): await client.write_log_entries( logging.WriteLogEntriesRequest(), log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type_='type__value'), + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), labels={'key_value': 'value_value'}, entries=[log_entry.LogEntry(log_name='log_name_value')], ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index d9802ee941f6..ad84ef3c0a2c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -47,6 +47,7 @@ from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import google.auth From 044e47ff386636c12c6d31ac85e3225cbb9b09d2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 3 Oct 2022 19:04:59 +0200 Subject: [PATCH 0903/1339] chore(deps): update dependency protobuf to v3.20.3 (#1462) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2ac3b7f815e8..664cfbd7c899 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.10.1 googleapis-common-protos==1.56.4 jinja2==3.1.2 MarkupSafe==2.1.1 -protobuf==3.20.2 +protobuf==3.20.3 pypandoc==1.8.1 PyYAML==6.0 setuptools==65.3.0 From e31087ad9ae4448498bafe4c1c11887f757813f2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 Oct 2022 02:55:00 +0200 Subject: [PATCH 0904/1339] chore(deps): update dependency pypandoc to v1.9 (#1459) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 664cfbd7c899..2a9b487ea67f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,7 +4,7 @@ googleapis-common-protos==1.56.4 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.3 -pypandoc==1.8.1 +pypandoc==1.9 PyYAML==6.0 setuptools==65.3.0 grpc-google-iam-v1==0.12.4 From b6468c52a873bafcc0a488a05ec2dc4d71046888 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 Oct 2022 03:03:32 +0200 Subject: [PATCH 0905/1339] chore(deps): update dependency setuptools to v65.4.1 (#1460) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2a9b487ea67f..ee9d087f8c0a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,7 +6,7 @@ MarkupSafe==2.1.1 protobuf==3.20.3 pypandoc==1.9 PyYAML==6.0 -setuptools==65.3.0 +setuptools==65.4.1 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 pytest-asyncio==0.19.0 \ No newline at end of file From 887b2d5e613f8043b11ea05f1386d51c4f0e5c19 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Tue, 4 Oct 2022 09:05:35 -0700 Subject: [PATCH 0906/1339] fix: snippetgen skip REST snippets (#1463) * fix: snippetgen skip REST snippets * upgrade test case to include REST * refactor transport_type computation * revert chunks from incorrect branch * enable REST transport for goldens and update golden files * update golden files with REST enabled Co-authored-by: Anthonios Partheniou --- .../gapic/generator/generator.py | 6 + .../gapic/samplegen/samplegen.py | 13 +- .../tests/integration/BUILD.bazel | 5 + .../google/cloud/asset_v1/gapic_metadata.json | 65 + .../asset_v1/services/asset_service/client.py | 5 + .../asset_service/transports/__init__.py | 5 + .../services/asset_service/transports/rest.py | 1540 +++++ .../unit/gapic/asset_v1/test_asset_service.py | 2851 +++++++++ .../iam/credentials_v1/gapic_metadata.json | 25 + .../services/iam_credentials/client.py | 5 + .../iam_credentials/transports/__init__.py | 5 + .../iam_credentials/transports/rest.py | 658 +++ .../credentials_v1/test_iam_credentials.py | 1040 ++++ .../cloud/eventarc_v1/gapic_metadata.json | 30 + .../eventarc_v1/services/eventarc/client.py | 5 + .../services/eventarc/transports/__init__.py | 5 + .../services/eventarc/transports/rest.py | 785 +++ .../unit/gapic/eventarc_v1/test_eventarc.py | 1357 +++++ .../cloud/logging_v2/gapic_metadata.json | 185 + .../services/config_service_v2/client.py | 5 + .../config_service_v2/transports/__init__.py | 5 + .../config_service_v2/transports/rest.py | 3125 ++++++++++ .../services/logging_service_v2/client.py | 5 + .../logging_service_v2/transports/__init__.py | 5 + .../logging_service_v2/transports/rest.py | 766 +++ .../services/metrics_service_v2/client.py | 5 + .../metrics_service_v2/transports/__init__.py | 5 + .../metrics_service_v2/transports/rest.py | 749 +++ .../logging_v2/test_config_service_v2.py | 5245 +++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 1278 ++++ .../logging_v2/test_metrics_service_v2.py | 1310 ++++ .../google/cloud/redis_v1/gapic_metadata.json | 50 + .../redis_v1/services/cloud_redis/client.py | 5 + .../cloud_redis/transports/__init__.py | 5 + .../services/cloud_redis/transports/rest.py | 1276 ++++ .../unit/gapic/redis_v1/test_cloud_redis.py | 2273 +++++++ .../tests/unit/samplegen/test_samplegen.py | 12 +- 37 files changed, 24710 insertions(+), 4 deletions(-) mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 963bbf8b04e8..341fa8a26f2d 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -153,6 +153,12 @@ def _generate_samples_and_manifest( autogen_specs = list( samplegen.generate_sample_specs(api_schema, opts=opts)) + # TODO: Support the generation of REST snippets. + autogen_specs = [ + spec for spec in autogen_specs + if spec["transport"] != api.TRANSPORT_REST + ] + # Also process any handwritten sample specs handwritten_specs = samplegen.parse_handwritten_specs( self._sample_configs) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 12ebd93d0441..b0481acc1c21 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -990,6 +990,15 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess return request +def _transport_type_from_transport(transport: str) -> str: + if transport == api.TRANSPORT_GRPC: + return "sync" + elif transport == api.TRANSPORT_GRPC_ASYNC: + return "async" + else: # api.TRANSPORT_REST + return "rest" + + def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, Any], None, None]: """Given an API, generate basic sample specs for each method. @@ -1006,10 +1015,10 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A api_short_name = api_schema.services[f"{api_schema.naming.proto_package}.{service_name}"].shortname api_version = api_schema.naming.version for transport, client in service.clients.items(): - transport_type = "async" if transport == api.TRANSPORT_GRPC_ASYNC else "sync" + transport_type = _transport_type_from_transport(transport) for rpc_name, method_list in client.rpcs.items(): # Region Tag Format: - # [{START|END} ${apishortname}_${apiVersion}_generated_${serviceName}_${rpcName}_{sync|async}] + # [{START|END} ${apishortname}_${apiVersion}_generated_${serviceName}_${rpcName}_{sync|async|rest}] region_tag = f"{api_short_name}_{api_version}_generated_{service_name}_{rpc_name}_{transport_type}" spec = { "rpc": rpc_name, diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 04b3c07bf899..5f1f0dd762f0 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -55,6 +55,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + transport = "grpc+rest", ) # Credentials. @@ -65,6 +66,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + transport = "grpc+rest", ) py_test( @@ -89,6 +91,7 @@ py_gapic_library( "python-gapic-name=eventarc", "autogen-snippets", ], + transport = "grpc+rest", ) py_test( @@ -113,6 +116,7 @@ py_gapic_library( "python-gapic-name=logging", "autogen-snippets", ], + transport = "grpc+rest", ) # Uncomment once https://github.com/googleapis/gapic-generator-python/issues/1359 is fixed @@ -135,6 +139,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + transport = "grpc+rest", ) py_test( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json old mode 100644 new mode 100755 index eebf25a106eb..c87ac115e961 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json @@ -136,6 +136,71 @@ ] } } + }, + "rest": { + "libraryClient": "AssetServiceClient", + "rpcs": { + "AnalyzeIamPolicy": { + "methods": [ + "analyze_iam_policy" + ] + }, + "AnalyzeIamPolicyLongrunning": { + "methods": [ + "analyze_iam_policy_longrunning" + ] + }, + "BatchGetAssetsHistory": { + "methods": [ + "batch_get_assets_history" + ] + }, + "CreateFeed": { + "methods": [ + "create_feed" + ] + }, + "DeleteFeed": { + "methods": [ + "delete_feed" + ] + }, + "ExportAssets": { + "methods": [ + "export_assets" + ] + }, + "GetFeed": { + "methods": [ + "get_feed" + ] + }, + "ListAssets": { + "methods": [ + "list_assets" + ] + }, + "ListFeeds": { + "methods": [ + "list_feeds" + ] + }, + "SearchAllIamPolicies": { + "methods": [ + "search_all_iam_policies" + ] + }, + "SearchAllResources": { + "methods": [ + "search_all_resources" + ] + }, + "UpdateFeed": { + "methods": [ + "update_feed" + ] + } + } } } } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index e9ddfbf6d6aa..7d4ee39dac21 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -43,6 +43,7 @@ from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import AssetServiceGrpcTransport from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport +from .transports.rest import AssetServiceRestTransport class AssetServiceClientMeta(type): @@ -55,6 +56,7 @@ class AssetServiceClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] _transport_registry["grpc"] = AssetServiceGrpcTransport _transport_registry["grpc_asyncio"] = AssetServiceGrpcAsyncIOTransport + _transport_registry["rest"] = AssetServiceRestTransport def get_transport_class(cls, label: str = None, @@ -316,6 +318,9 @@ def __init__(self, *, transport (Union[str, AssetServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py old mode 100644 new mode 100755 index 252f766b88e8..81433d87a9a7 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -19,15 +19,20 @@ from .base import AssetServiceTransport from .grpc import AssetServiceGrpcTransport from .grpc_asyncio import AssetServiceGrpcAsyncIOTransport +from .rest import AssetServiceRestTransport +from .rest import AssetServiceRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[AssetServiceTransport]] _transport_registry['grpc'] = AssetServiceGrpcTransport _transport_registry['grpc_asyncio'] = AssetServiceGrpcAsyncIOTransport +_transport_registry['rest'] = AssetServiceRestTransport __all__ = ( 'AssetServiceTransport', 'AssetServiceGrpcTransport', 'AssetServiceGrpcAsyncIOTransport', + 'AssetServiceRestTransport', + 'AssetServiceRestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py new file mode 100755 index 000000000000..ddd8b7a9f465 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -0,0 +1,1540 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.asset_v1.types import asset_service +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class AssetServiceRestInterceptor: + """Interceptor for AssetService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AssetServiceRestTransport. + + .. code-block:: python + class MyCustomAssetServiceInterceptor(AssetServiceRestInterceptor): + def pre_analyze_iam_policy(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_iam_policy(response): + logging.log(f"Received response: {response}") + + def pre_analyze_iam_policy_longrunning(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_iam_policy_longrunning(response): + logging.log(f"Received response: {response}") + + def pre_batch_get_assets_history(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_assets_history(response): + logging.log(f"Received response: {response}") + + def pre_create_feed(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_feed(response): + logging.log(f"Received response: {response}") + + def pre_delete_feed(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_export_assets(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_assets(response): + logging.log(f"Received response: {response}") + + def pre_get_feed(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_feed(response): + logging.log(f"Received response: {response}") + + def pre_list_assets(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_assets(response): + logging.log(f"Received response: {response}") + + def pre_list_feeds(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_feeds(response): + logging.log(f"Received response: {response}") + + def pre_search_all_iam_policies(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_all_iam_policies(response): + logging.log(f"Received response: {response}") + + def pre_search_all_resources(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_all_resources(response): + logging.log(f"Received response: {response}") + + def pre_update_feed(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_feed(response): + logging.log(f"Received response: {response}") + + transport = AssetServiceRestTransport(interceptor=MyCustomAssetServiceInterceptor()) + client = AssetServiceClient(transport=transport) + + + """ + def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyResponse) -> asset_service.AnalyzeIamPolicyResponse: + """Post-rpc interceptor for analyze_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_iam_policy_longrunning + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for analyze_iam_policy_longrunning + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_get_assets_history + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: + """Post-rpc interceptor for batch_get_assets_history + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_feed + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: + """Post-rpc interceptor for create_feed + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_feed + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_assets + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_export_assets(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for export_assets + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_feed + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: + """Post-rpc interceptor for get_feed + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_assets + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_service.ListAssetsResponse: + """Post-rpc interceptor for list_assets + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_feeds + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: + """Post-rpc interceptor for list_feeds + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_all_iam_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPoliciesResponse) -> asset_service.SearchAllIamPoliciesResponse: + """Post-rpc interceptor for search_all_iam_policies + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for search_all_resources + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_search_all_resources(self, response: asset_service.SearchAllResourcesResponse) -> asset_service.SearchAllResourcesResponse: + """Post-rpc interceptor for search_all_resources + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_feed + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: + """Post-rpc interceptor for update_feed + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AssetServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AssetServiceRestInterceptor + + +class AssetServiceRestTransport(AssetServiceTransport): + """REST backend transport for AssetService. + + Asset service definition. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'cloudasset.googleapis.com', + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', + interceptor: Optional[AssetServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AssetServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options) + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _AnalyzeIamPolicy(AssetServiceRestStub): + def __hash__(self): + return hash("AnalyzeIamPolicy") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "analysisQuery" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.AnalyzeIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.AnalyzeIamPolicyResponse: + r"""Call the analyze iam policy method over HTTP. + + Args: + request (~.asset_service.AnalyzeIamPolicyRequest): + The request object. A request message for + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.AnalyzeIamPolicyResponse: + A response message for + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicy', + }, + ] + request, metadata = self._interceptor.pre_analyze_iam_policy(request, metadata) + pb_request = asset_service.AnalyzeIamPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.AnalyzeIamPolicyResponse() + pb_resp = asset_service.AnalyzeIamPolicyResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_iam_policy(resp) + return resp + + class _AnalyzeIamPolicyLongrunning(AssetServiceRestStub): + def __hash__(self): + return hash("AnalyzeIamPolicyLongrunning") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the analyze iam policy + longrunning method over HTTP. + + Args: + request (~.asset_service.AnalyzeIamPolicyLongrunningRequest): + The request object. A request message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning(request, metadata) + pb_request = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) + return resp + + class _BatchGetAssetsHistory(AssetServiceRestStub): + def __hash__(self): + return hash("BatchGetAssetsHistory") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.BatchGetAssetsHistoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.BatchGetAssetsHistoryResponse: + r"""Call the batch get assets history method over HTTP. + + Args: + request (~.asset_service.BatchGetAssetsHistoryRequest): + The request object. Batch get assets history request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.BatchGetAssetsHistoryResponse: + Batch get assets history response. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', + }, + ] + request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) + pb_request = asset_service.BatchGetAssetsHistoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.BatchGetAssetsHistoryResponse() + pb_resp = asset_service.BatchGetAssetsHistoryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_assets_history(resp) + return resp + + class _CreateFeed(AssetServiceRestStub): + def __hash__(self): + return hash("CreateFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.CreateFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.Feed: + r"""Call the create feed method over HTTP. + + Args: + request (~.asset_service.CreateFeedRequest): + The request object. Create asset feed request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}/feeds', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_feed(request, metadata) + pb_request = asset_service.CreateFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.Feed() + pb_resp = asset_service.Feed.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_feed(resp) + return resp + + class _DeleteFeed(AssetServiceRestStub): + def __hash__(self): + return hash("DeleteFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.DeleteFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete feed method over HTTP. + + Args: + request (~.asset_service.DeleteFeedRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=*/*/feeds/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_feed(request, metadata) + pb_request = asset_service.DeleteFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExportAssets(AssetServiceRestStub): + def __hash__(self): + return hash("ExportAssets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.ExportAssetsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the export assets method over HTTP. + + Args: + request (~.asset_service.ExportAssetsRequest): + The request object. Export asset request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}:exportAssets', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_export_assets(request, metadata) + pb_request = asset_service.ExportAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_assets(resp) + return resp + + class _GetFeed(AssetServiceRestStub): + def __hash__(self): + return hash("GetFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.GetFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.Feed: + r"""Call the get feed method over HTTP. + + Args: + request (~.asset_service.GetFeedRequest): + The request object. Get asset feed request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/feeds/*}', + }, + ] + request, metadata = self._interceptor.pre_get_feed(request, metadata) + pb_request = asset_service.GetFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.Feed() + pb_resp = asset_service.Feed.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_feed(resp) + return resp + + class _ListAssets(AssetServiceRestStub): + def __hash__(self): + return hash("ListAssets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.ListAssetsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.ListAssetsResponse: + r"""Call the list assets method over HTTP. + + Args: + request (~.asset_service.ListAssetsRequest): + The request object. ListAssets request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.ListAssetsResponse: + ListAssets response. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}/assets', + }, + ] + request, metadata = self._interceptor.pre_list_assets(request, metadata) + pb_request = asset_service.ListAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.ListAssetsResponse() + pb_resp = asset_service.ListAssetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_assets(resp) + return resp + + class _ListFeeds(AssetServiceRestStub): + def __hash__(self): + return hash("ListFeeds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.ListFeedsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.ListFeedsResponse: + r"""Call the list feeds method over HTTP. + + Args: + request (~.asset_service.ListFeedsRequest): + The request object. List asset feeds request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.ListFeedsResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}/feeds', + }, + ] + request, metadata = self._interceptor.pre_list_feeds(request, metadata) + pb_request = asset_service.ListFeedsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.ListFeedsResponse() + pb_resp = asset_service.ListFeedsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_feeds(resp) + return resp + + class _SearchAllIamPolicies(AssetServiceRestStub): + def __hash__(self): + return hash("SearchAllIamPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.SearchAllIamPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.SearchAllIamPoliciesResponse: + r"""Call the search all iam policies method over HTTP. + + Args: + request (~.asset_service.SearchAllIamPoliciesRequest): + The request object. Search all IAM policies request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.SearchAllIamPoliciesResponse: + Search all IAM policies response. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:searchAllIamPolicies', + }, + ] + request, metadata = self._interceptor.pre_search_all_iam_policies(request, metadata) + pb_request = asset_service.SearchAllIamPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.SearchAllIamPoliciesResponse() + pb_resp = asset_service.SearchAllIamPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_all_iam_policies(resp) + return resp + + class _SearchAllResources(AssetServiceRestStub): + def __hash__(self): + return hash("SearchAllResources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.SearchAllResourcesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.SearchAllResourcesResponse: + r"""Call the search all resources method over HTTP. + + Args: + request (~.asset_service.SearchAllResourcesRequest): + The request object. Search all resources request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.SearchAllResourcesResponse: + Search all resources response. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:searchAllResources', + }, + ] + request, metadata = self._interceptor.pre_search_all_resources(request, metadata) + pb_request = asset_service.SearchAllResourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.SearchAllResourcesResponse() + pb_resp = asset_service.SearchAllResourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_all_resources(resp) + return resp + + class _UpdateFeed(AssetServiceRestStub): + def __hash__(self): + return hash("UpdateFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.UpdateFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.Feed: + r"""Call the update feed method over HTTP. + + Args: + request (~.asset_service.UpdateFeedRequest): + The request object. Update asset feed request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + Pub/Sub topics. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{feed.name=*/*/feeds/*}', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_update_feed(request, metadata) + pb_request = asset_service.UpdateFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.Feed() + pb_resp = asset_service.Feed.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_feed(resp) + return resp + + @property + def analyze_iam_policy(self) -> Callable[ + [asset_service.AnalyzeIamPolicyRequest], + asset_service.AnalyzeIamPolicyResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_iam_policy_longrunning(self) -> Callable[ + [asset_service.AnalyzeIamPolicyLongrunningRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore + + @property + def batch_get_assets_history(self) -> Callable[ + [asset_service.BatchGetAssetsHistoryRequest], + asset_service.BatchGetAssetsHistoryResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_feed(self) -> Callable[ + [asset_service.CreateFeedRequest], + asset_service.Feed]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_feed(self) -> Callable[ + [asset_service.DeleteFeedRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_assets(self) -> Callable[ + [asset_service.ExportAssetsRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_feed(self) -> Callable[ + [asset_service.GetFeedRequest], + asset_service.Feed]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_assets(self) -> Callable[ + [asset_service.ListAssetsRequest], + asset_service.ListAssetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_feeds(self) -> Callable[ + [asset_service.ListFeedsRequest], + asset_service.ListFeedsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_all_iam_policies(self) -> Callable[ + [asset_service.SearchAllIamPoliciesRequest], + asset_service.SearchAllIamPoliciesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_all_resources(self) -> Callable[ + [asset_service.SearchAllResourcesRequest], + asset_service.SearchAllResourcesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_feed(self) -> Callable[ + [asset_service.UpdateFeedRequest], + asset_service.Feed]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'AssetServiceRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py old mode 100644 new mode 100755 index 4fc860508d78..0ffce3d9bdcd --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -23,10 +23,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -84,6 +91,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (AssetServiceClient, "grpc"), (AssetServiceAsyncClient, "grpc_asyncio"), + (AssetServiceClient, "rest"), ]) def test_asset_service_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -96,12 +104,16 @@ def test_asset_service_client_from_service_account_info(client_class, transport_ assert client.transport._host == ( 'cloudasset.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudasset.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.AssetServiceGrpcTransport, "grpc"), (transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AssetServiceRestTransport, "rest"), ]) def test_asset_service_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -118,6 +130,7 @@ def test_asset_service_client_service_account_always_use_jwt(transport_class, tr @pytest.mark.parametrize("client_class,transport_name", [ (AssetServiceClient, "grpc"), (AssetServiceAsyncClient, "grpc_asyncio"), + (AssetServiceClient, "rest"), ]) def test_asset_service_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -133,6 +146,9 @@ def test_asset_service_client_from_service_account_file(client_class, transport_ assert client.transport._host == ( 'cloudasset.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://cloudasset.googleapis.com' ) @@ -140,6 +156,7 @@ def test_asset_service_client_get_transport_class(): transport = AssetServiceClient.get_transport_class() available_transports = [ transports.AssetServiceGrpcTransport, + transports.AssetServiceRestTransport, ] assert transport in available_transports @@ -150,6 +167,7 @@ def test_asset_service_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), ]) @mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) @mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) @@ -269,6 +287,8 @@ def test_asset_service_client_client_options(client_class, transport_class, tran (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", "false"), (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), ]) @mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) @mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) @@ -406,6 +426,7 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), ]) def test_asset_service_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -430,6 +451,7 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc", grpc_helpers), (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest", None), ]) def test_asset_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -3591,6 +3613,2748 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): ) in kw['metadata'] +@pytest.mark.parametrize("request_type", [ + asset_service.ExportAssetsRequest, + dict, +]) +def test_export_assets_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.export_assets(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.export_assets(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_export_assets_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.export_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = asset_service.ExportAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_assets(request) + + +def test_export_assets_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListAssetsRequest, + dict, +]) +def test_list_assets_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_assets(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_assets_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + + request = asset_service.ListAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListAssetsResponse() + + client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_assets(request) + + +def test_list_assets_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_assets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/assets" % client.transport._host, args[1]) + + +def test_list_assets_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assets( + asset_service.ListAssetsRequest(), + parent='parent_value', + ) + + +def test_list_assets_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + assets.Asset(), + ], + next_page_token='abc', + ), + asset_service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + ], + next_page_token='ghi', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2'} + + pager = client.list_assets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.Asset) + for i in results) + + pages = list(client.list_assets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetAssetsHistoryRequest, + dict, +]) +def test_batch_get_assets_history_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_get_assets_history(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + + +def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.batch_get_assets_history(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_get_assets_history_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_assets_history_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + + request = asset_service.BatchGetAssetsHistoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.BatchGetAssetsHistoryResponse() + + client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_get_assets_history(request) + + +def test_batch_get_assets_history_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateFeedRequest, + dict, +]) +def test_create_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["feed_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["feedId"] = 'feed_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "feedId" in jsonified_request + assert jsonified_request["feedId"] == 'feed_id_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.CreateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_feed(request) + + +def test_create_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + + +def test_create_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_feed( + asset_service.CreateFeedRequest(), + parent='parent_value', + ) + + +def test_create_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetFeedRequest, + dict, +]) +def test_get_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.GetFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_feed(request) + + +def test_get_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/feeds/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_get_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_feed( + asset_service.GetFeedRequest(), + name='name_value', + ) + + +def test_get_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListFeedsRequest, + dict, +]) +def test_list_feeds_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_feeds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.ListFeedsResponse) + + +def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_feeds(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_feeds_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_feeds._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_feeds_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + + request = asset_service.ListFeedsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListFeedsResponse() + + client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_feeds(request) + + +def test_list_feeds_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_feeds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + + +def test_list_feeds_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_feeds( + asset_service.ListFeedsRequest(), + parent='parent_value', + ) + + +def test_list_feeds_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateFeedRequest, + dict, +]) +def test_update_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + + +def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.UpdateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_feed(request) + + +def test_update_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + feed=asset_service.Feed(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_update_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_feed( + asset_service.UpdateFeedRequest(), + feed=asset_service.Feed(name='name_value'), + ) + + +def test_update_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteFeedRequest, + dict, +]) +def test_delete_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_feed(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + pre.assert_not_called() + pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = asset_service.DeleteFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_feed(request) + + +def test_delete_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/feeds/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_delete_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_feed( + asset_service.DeleteFeedRequest(), + name='name_value', + ) + + +def test_delete_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllResourcesRequest, + dict, +]) +def test_search_all_resources_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.search_all_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllResourcesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["scope"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["scope"] = 'scope_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.search_all_resources(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_search_all_resources_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.search_all_resources._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_resources_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + + request = asset_service.SearchAllResourcesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllResourcesResponse() + + client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_all_resources(request) + + +def test_search_all_resources_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.search_all_resources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1]) + + +def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_resources( + asset_service.SearchAllResourcesRequest(), + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + + +def test_search_all_resources_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllResourcesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.search_all_resources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.ResourceSearchResult) + for i in results) + + pages = list(client.search_all_resources(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllIamPoliciesRequest, + dict, +]) +def test_search_all_iam_policies_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.search_all_iam_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllIamPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["scope"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["scope"] = 'scope_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.search_all_iam_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_search_all_iam_policies_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_iam_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + + request = asset_service.SearchAllIamPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllIamPoliciesResponse() + + client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_all_iam_policies(request) + + +def test_search_all_iam_policies_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + query='query_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.search_all_iam_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1]) + + +def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_iam_policies( + asset_service.SearchAllIamPoliciesRequest(), + scope='scope_value', + query='query_value', + ) + + +def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.search_all_iam_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.IamPolicySearchResult) + for i in results) + + pages = list(client.search_all_iam_policies(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyRequest, + dict, +]) +def test_analyze_iam_policy_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) + assert response.fully_explored is True + + +def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("analysis_query", "execution_timeout", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeIamPolicyResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_iam_policy_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", )) & set(("analysisQuery", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_iam_policy_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) + + request = asset_service.AnalyzeIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeIamPolicyResponse() + + client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_iam_policy(request) + + +def test_analyze_iam_policy_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, +]) +def test_analyze_iam_policy_longrunning_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_iam_policy_longrunning(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_iam_policy_longrunning(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = asset_service.AnalyzeIamPolicyLongrunningRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_iam_policy_longrunning(request) + + +def test_analyze_iam_policy_longrunning_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AssetServiceGrpcTransport( @@ -3669,6 +6433,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport, + transports.AssetServiceRestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -3679,6 +6444,7 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", + "rest", ]) def test_transport_kind(transport_name): transport = AssetServiceClient.get_transport_class(transport_name)( @@ -3816,6 +6582,7 @@ def test_asset_service_transport_auth_adc(transport_class): [ transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport, + transports.AssetServiceRestTransport, ], ) def test_asset_service_transport_auth_gdch_credentials(transport_class): @@ -3912,10 +6679,37 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) +def test_asset_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.AssetServiceRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_asset_service_rest_lro_client(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_asset_service_host_no_port(transport_name): client = AssetServiceClient( @@ -3925,11 +6719,14 @@ def test_asset_service_host_no_port(transport_name): ) assert client.transport._host == ( 'cloudasset.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudasset.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_asset_service_host_with_port(transport_name): client = AssetServiceClient( @@ -3939,8 +6736,60 @@ def test_asset_service_host_with_port(transport_name): ) assert client.transport._host == ( 'cloudasset.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://cloudasset.googleapis.com:8000' ) +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_asset_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AssetServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AssetServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.export_assets._session + session2 = client2.transport.export_assets._session + assert session1 != session2 + session1 = client1.transport.list_assets._session + session2 = client2.transport.list_assets._session + assert session1 != session2 + session1 = client1.transport.batch_get_assets_history._session + session2 = client2.transport.batch_get_assets_history._session + assert session1 != session2 + session1 = client1.transport.create_feed._session + session2 = client2.transport.create_feed._session + assert session1 != session2 + session1 = client1.transport.get_feed._session + session2 = client2.transport.get_feed._session + assert session1 != session2 + session1 = client1.transport.list_feeds._session + session2 = client2.transport.list_feeds._session + assert session1 != session2 + session1 = client1.transport.update_feed._session + session2 = client2.transport.update_feed._session + assert session1 != session2 + session1 = client1.transport.delete_feed._session + session2 = client2.transport.delete_feed._session + assert session1 != session2 + session1 = client1.transport.search_all_resources._session + session2 = client2.transport.search_all_resources._session + assert session1 != session2 + session1 = client1.transport.search_all_iam_policies._session + session2 = client2.transport.search_all_iam_policies._session + assert session1 != session2 + session1 = client1.transport.analyze_iam_policy._session + session2 = client2.transport.analyze_iam_policy._session + assert session1 != session2 + session1 = client1.transport.analyze_iam_policy_longrunning._session + session2 = client2.transport.analyze_iam_policy_longrunning._session + assert session1 != session2 def test_asset_service_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -4239,6 +7088,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -4254,6 +7104,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json old mode 100644 new mode 100755 index 82b1d8ae9f6d..511810ee2c7c --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_metadata.json @@ -56,6 +56,31 @@ ] } } + }, + "rest": { + "libraryClient": "IAMCredentialsClient", + "rpcs": { + "GenerateAccessToken": { + "methods": [ + "generate_access_token" + ] + }, + "GenerateIdToken": { + "methods": [ + "generate_id_token" + ] + }, + "SignBlob": { + "methods": [ + "sign_blob" + ] + }, + "SignJwt": { + "methods": [ + "sign_jwt" + ] + } + } } } } diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 8303262fc7c8..6f6e7c1a9615 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -40,6 +40,7 @@ from .transports.base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO from .transports.grpc import IAMCredentialsGrpcTransport from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport +from .transports.rest import IAMCredentialsRestTransport class IAMCredentialsClientMeta(type): @@ -52,6 +53,7 @@ class IAMCredentialsClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]] _transport_registry["grpc"] = IAMCredentialsGrpcTransport _transport_registry["grpc_asyncio"] = IAMCredentialsGrpcAsyncIOTransport + _transport_registry["rest"] = IAMCredentialsRestTransport def get_transport_class(cls, label: str = None, @@ -312,6 +314,9 @@ def __init__(self, *, transport (Union[str, IAMCredentialsTransport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py old mode 100644 new mode 100755 index b9626aef55ca..c0a6b189dc4b --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py @@ -19,15 +19,20 @@ from .base import IAMCredentialsTransport from .grpc import IAMCredentialsGrpcTransport from .grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport +from .rest import IAMCredentialsRestTransport +from .rest import IAMCredentialsRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[IAMCredentialsTransport]] _transport_registry['grpc'] = IAMCredentialsGrpcTransport _transport_registry['grpc_asyncio'] = IAMCredentialsGrpcAsyncIOTransport +_transport_registry['rest'] = IAMCredentialsRestTransport __all__ = ( 'IAMCredentialsTransport', 'IAMCredentialsGrpcTransport', 'IAMCredentialsGrpcAsyncIOTransport', + 'IAMCredentialsRestTransport', + 'IAMCredentialsRestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py new file mode 100755 index 000000000000..e99175ac4f93 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -0,0 +1,658 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.credentials_v1.types import common + +from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class IAMCredentialsRestInterceptor: + """Interceptor for IAMCredentials. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the IAMCredentialsRestTransport. + + .. code-block:: python + class MyCustomIAMCredentialsInterceptor(IAMCredentialsRestInterceptor): + def pre_generate_access_token(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_access_token(response): + logging.log(f"Received response: {response}") + + def pre_generate_id_token(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_generate_id_token(response): + logging.log(f"Received response: {response}") + + def pre_sign_blob(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_sign_blob(response): + logging.log(f"Received response: {response}") + + def pre_sign_jwt(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_sign_jwt(response): + logging.log(f"Received response: {response}") + + transport = IAMCredentialsRestTransport(interceptor=MyCustomIAMCredentialsInterceptor()) + client = IAMCredentialsClient(transport=transport) + + + """ + def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.GenerateAccessTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_access_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the IAMCredentials server. + """ + return request, metadata + + def post_generate_access_token(self, response: common.GenerateAccessTokenResponse) -> common.GenerateAccessTokenResponse: + """Post-rpc interceptor for generate_access_token + + Override in a subclass to manipulate the response + after it is returned by the IAMCredentials server but before + it is returned to user code. + """ + return response + def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for generate_id_token + + Override in a subclass to manipulate the request or metadata + before they are sent to the IAMCredentials server. + """ + return request, metadata + + def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> common.GenerateIdTokenResponse: + """Post-rpc interceptor for generate_id_token + + Override in a subclass to manipulate the response + after it is returned by the IAMCredentials server but before + it is returned to user code. + """ + return response + def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for sign_blob + + Override in a subclass to manipulate the request or metadata + before they are sent to the IAMCredentials server. + """ + return request, metadata + + def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobResponse: + """Post-rpc interceptor for sign_blob + + Override in a subclass to manipulate the response + after it is returned by the IAMCredentials server but before + it is returned to user code. + """ + return response + def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for sign_jwt + + Override in a subclass to manipulate the request or metadata + before they are sent to the IAMCredentials server. + """ + return request, metadata + + def post_sign_jwt(self, response: common.SignJwtResponse) -> common.SignJwtResponse: + """Post-rpc interceptor for sign_jwt + + Override in a subclass to manipulate the response + after it is returned by the IAMCredentials server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class IAMCredentialsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: IAMCredentialsRestInterceptor + + +class IAMCredentialsRestTransport(IAMCredentialsTransport): + """REST backend transport for IAMCredentials. + + A service account is a special type of Google account that + belongs to your application or a virtual machine (VM), instead + of to an individual end user. Your application assumes the + identity of the service account to call Google APIs, so that the + users aren't directly involved. + + Service account credentials are used to temporarily assume the + identity of the service account. Supported credential types + include OAuth 2.0 access tokens, OpenID Connect ID tokens, + self-signed JSON Web Tokens (JWTs), and more. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'iamcredentials.googleapis.com', + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', + interceptor: Optional[IAMCredentialsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or IAMCredentialsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GenerateAccessToken(IAMCredentialsRestStub): + def __hash__(self): + return hash("GenerateAccessToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: common.GenerateAccessTokenRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> common.GenerateAccessTokenResponse: + r"""Call the generate access token method over HTTP. + + Args: + request (~.common.GenerateAccessTokenRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common.GenerateAccessTokenResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_generate_access_token(request, metadata) + pb_request = common.GenerateAccessTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common.GenerateAccessTokenResponse() + pb_resp = common.GenerateAccessTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_access_token(resp) + return resp + + class _GenerateIdToken(IAMCredentialsRestStub): + def __hash__(self): + return hash("GenerateIdToken") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: common.GenerateIdTokenRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> common.GenerateIdTokenResponse: + r"""Call the generate id token method over HTTP. + + Args: + request (~.common.GenerateIdTokenRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common.GenerateIdTokenResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateIdToken', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_generate_id_token(request, metadata) + pb_request = common.GenerateIdTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common.GenerateIdTokenResponse() + pb_resp = common.GenerateIdTokenResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_id_token(resp) + return resp + + class _SignBlob(IAMCredentialsRestStub): + def __hash__(self): + return hash("SignBlob") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: common.SignBlobRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> common.SignBlobResponse: + r"""Call the sign blob method over HTTP. + + Args: + request (~.common.SignBlobRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common.SignBlobResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signBlob', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_sign_blob(request, metadata) + pb_request = common.SignBlobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common.SignBlobResponse() + pb_resp = common.SignBlobResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_sign_blob(resp) + return resp + + class _SignJwt(IAMCredentialsRestStub): + def __hash__(self): + return hash("SignJwt") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: common.SignJwtRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> common.SignJwtResponse: + r"""Call the sign jwt method over HTTP. + + Args: + request (~.common.SignJwtRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common.SignJwtResponse: + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signJwt', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_sign_jwt(request, metadata) + pb_request = common.SignJwtRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common.SignJwtResponse() + pb_resp = common.SignJwtResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_sign_jwt(resp) + return resp + + @property + def generate_access_token(self) -> Callable[ + [common.GenerateAccessTokenRequest], + common.GenerateAccessTokenResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def generate_id_token(self) -> Callable[ + [common.GenerateIdTokenRequest], + common.GenerateIdTokenResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GenerateIdToken(self._session, self._host, self._interceptor) # type: ignore + + @property + def sign_blob(self) -> Callable[ + [common.SignBlobRequest], + common.SignBlobResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SignBlob(self._session, self._host, self._interceptor) # type: ignore + + @property + def sign_jwt(self) -> Callable[ + [common.SignJwtRequest], + common.SignJwtResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SignJwt(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'IAMCredentialsRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py old mode 100644 new mode 100755 index cc9650d79905..d0d236c8b6da --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -23,10 +23,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -75,6 +82,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (IAMCredentialsClient, "grpc"), (IAMCredentialsAsyncClient, "grpc_asyncio"), + (IAMCredentialsClient, "rest"), ]) def test_iam_credentials_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -87,12 +95,16 @@ def test_iam_credentials_client_from_service_account_info(client_class, transpor assert client.transport._host == ( 'iamcredentials.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://iamcredentials.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.IAMCredentialsGrpcTransport, "grpc"), (transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.IAMCredentialsRestTransport, "rest"), ]) def test_iam_credentials_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -109,6 +121,7 @@ def test_iam_credentials_client_service_account_always_use_jwt(transport_class, @pytest.mark.parametrize("client_class,transport_name", [ (IAMCredentialsClient, "grpc"), (IAMCredentialsAsyncClient, "grpc_asyncio"), + (IAMCredentialsClient, "rest"), ]) def test_iam_credentials_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -124,6 +137,9 @@ def test_iam_credentials_client_from_service_account_file(client_class, transpor assert client.transport._host == ( 'iamcredentials.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://iamcredentials.googleapis.com' ) @@ -131,6 +147,7 @@ def test_iam_credentials_client_get_transport_class(): transport = IAMCredentialsClient.get_transport_class() available_transports = [ transports.IAMCredentialsGrpcTransport, + transports.IAMCredentialsRestTransport, ] assert transport in available_transports @@ -141,6 +158,7 @@ def test_iam_credentials_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), ]) @mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) @mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) @@ -260,6 +278,8 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", "true"), (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", "false"), (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", "true"), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", "false"), ]) @mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) @mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) @@ -397,6 +417,7 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), ]) def test_iam_credentials_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -421,6 +442,7 @@ def test_iam_credentials_client_client_options_scopes(client_class, transport_cl @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc", grpc_helpers), (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", None), ]) def test_iam_credentials_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -1550,6 +1572,978 @@ async def test_sign_jwt_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [ + common.GenerateAccessTokenRequest, + dict, +]) +def test_generate_access_token_rest(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateAccessTokenResponse( + access_token='access_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.generate_access_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateAccessTokenResponse) + assert response.access_token == 'access_token_value' + + +def test_generate_access_token_rest_required_fields(request_type=common.GenerateAccessTokenRequest): + transport_class = transports.IAMCredentialsRestTransport + + request_init = {} + request_init["name"] = "" + request_init["scope"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + jsonified_request["scope"] = 'scope_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.GenerateAccessTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.generate_access_token(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_generate_access_token_rest_unset_required_fields(): + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.generate_access_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "scope", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_access_token_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_access_token") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = common.GenerateAccessTokenRequest.pb(common.GenerateAccessTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.GenerateAccessTokenResponse.to_json(common.GenerateAccessTokenResponse()) + + request = common.GenerateAccessTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GenerateAccessTokenResponse() + + client.generate_access_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_access_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateAccessTokenRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_access_token(request) + + +def test_generate_access_token_rest_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateAccessTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + delegates=['delegates_value'], + scope=['scope_value'], + lifetime=duration_pb2.Duration(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.generate_access_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken" % client.transport._host, args[1]) + + +def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_access_token( + common.GenerateAccessTokenRequest(), + name='name_value', + delegates=['delegates_value'], + scope=['scope_value'], + lifetime=duration_pb2.Duration(seconds=751), + ) + + +def test_generate_access_token_rest_error(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + common.GenerateIdTokenRequest, + dict, +]) +def test_generate_id_token_rest(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateIdTokenResponse( + token='token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.generate_id_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateIdTokenResponse) + assert response.token == 'token_value' + + +def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTokenRequest): + transport_class = transports.IAMCredentialsRestTransport + + request_init = {} + request_init["name"] = "" + request_init["audience"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + jsonified_request["audience"] = 'audience_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "audience" in jsonified_request + assert jsonified_request["audience"] == 'audience_value' + + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.GenerateIdTokenResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.generate_id_token(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_generate_id_token_rest_unset_required_fields(): + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.generate_id_token._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "audience", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_id_token_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_id_token") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = common.GenerateIdTokenRequest.pb(common.GenerateIdTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.GenerateIdTokenResponse.to_json(common.GenerateIdTokenResponse()) + + request = common.GenerateIdTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GenerateIdTokenResponse() + + client.generate_id_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_id_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateIdTokenRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.generate_id_token(request) + + +def test_generate_id_token_rest_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateIdTokenResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + delegates=['delegates_value'], + audience='audience_value', + include_email=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.generate_id_token(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:generateIdToken" % client.transport._host, args[1]) + + +def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.generate_id_token( + common.GenerateIdTokenRequest(), + name='name_value', + delegates=['delegates_value'], + audience='audience_value', + include_email=True, + ) + + +def test_generate_id_token_rest_error(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + common.SignBlobRequest, + dict, +]) +def test_sign_blob_rest(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.sign_blob(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignBlobResponse) + assert response.key_id == 'key_id_value' + assert response.signed_blob == b'signed_blob_blob' + + +def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): + transport_class = transports.IAMCredentialsRestTransport + + request_init = {} + request_init["name"] = "" + request_init["payload"] = b'' + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + jsonified_request["payload"] = b'payload_blob' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "payload" in jsonified_request + assert jsonified_request["payload"] == b'payload_blob' + + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.SignBlobResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.sign_blob(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_sign_blob_rest_unset_required_fields(): + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.sign_blob._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "payload", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_sign_blob_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_blob") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = common.SignBlobRequest.pb(common.SignBlobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.SignBlobResponse.to_json(common.SignBlobResponse()) + + request = common.SignBlobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.SignBlobResponse() + + client.sign_blob(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_sign_blob_rest_bad_request(transport: str = 'rest', request_type=common.SignBlobRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.sign_blob(request) + + +def test_sign_blob_rest_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignBlobResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + delegates=['delegates_value'], + payload=b'payload_blob', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.sign_blob(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:signBlob" % client.transport._host, args[1]) + + +def test_sign_blob_rest_flattened_error(transport: str = 'rest'): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.sign_blob( + common.SignBlobRequest(), + name='name_value', + delegates=['delegates_value'], + payload=b'payload_blob', + ) + + +def test_sign_blob_rest_error(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + common.SignJwtRequest, + dict, +]) +def test_sign_jwt_rest(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.sign_jwt(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignJwtResponse) + assert response.key_id == 'key_id_value' + assert response.signed_jwt == 'signed_jwt_value' + + +def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): + transport_class = transports.IAMCredentialsRestTransport + + request_init = {} + request_init["name"] = "" + request_init["payload"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + jsonified_request["payload"] = 'payload_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "payload" in jsonified_request + assert jsonified_request["payload"] == 'payload_value' + + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.SignJwtResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.sign_jwt(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_sign_jwt_rest_unset_required_fields(): + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.sign_jwt._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "payload", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_sign_jwt_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_jwt") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = common.SignJwtRequest.pb(common.SignJwtRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.SignJwtResponse.to_json(common.SignJwtResponse()) + + request = common.SignJwtRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.SignJwtResponse() + + client.sign_jwt(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_sign_jwt_rest_bad_request(transport: str = 'rest', request_type=common.SignJwtRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.sign_jwt(request) + + +def test_sign_jwt_rest_flattened(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignJwtResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/serviceAccounts/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + delegates=['delegates_value'], + payload='payload_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.sign_jwt(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/serviceAccounts/*}:signJwt" % client.transport._host, args[1]) + + +def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.sign_jwt( + common.SignJwtRequest(), + name='name_value', + delegates=['delegates_value'], + payload='payload_value', + ) + + +def test_sign_jwt_rest_error(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.IAMCredentialsGrpcTransport( @@ -1628,6 +2622,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport, + transports.IAMCredentialsRestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -1638,6 +2633,7 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", + "rest", ]) def test_transport_kind(transport_name): transport = IAMCredentialsClient.get_transport_class(transport_name)( @@ -1762,6 +2758,7 @@ def test_iam_credentials_transport_auth_adc(transport_class): [ transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport, + transports.IAMCredentialsRestTransport, ], ) def test_iam_credentials_transport_auth_gdch_credentials(transport_class): @@ -1858,10 +2855,20 @@ def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) +def test_iam_credentials_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.IAMCredentialsRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_iam_credentials_host_no_port(transport_name): client = IAMCredentialsClient( @@ -1871,11 +2878,14 @@ def test_iam_credentials_host_no_port(transport_name): ) assert client.transport._host == ( 'iamcredentials.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://iamcredentials.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_iam_credentials_host_with_port(transport_name): client = IAMCredentialsClient( @@ -1885,8 +2895,36 @@ def test_iam_credentials_host_with_port(transport_name): ) assert client.transport._host == ( 'iamcredentials.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://iamcredentials.googleapis.com:8000' ) +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_iam_credentials_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = IAMCredentialsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = IAMCredentialsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.generate_access_token._session + session2 = client2.transport.generate_access_token._session + assert session1 != session2 + session1 = client1.transport.generate_id_token._session + session2 = client2.transport.generate_id_token._session + assert session1 != session2 + session1 = client1.transport.sign_blob._session + session2 = client2.transport.sign_blob._session + assert session1 != session2 + session1 = client1.transport.sign_jwt._session + session2 = client2.transport.sign_jwt._session + assert session1 != session2 def test_iam_credentials_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -2136,6 +3174,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2151,6 +3190,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json old mode 100644 new mode 100755 index 62658e1a8252..f9d69749721b --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json @@ -66,6 +66,36 @@ ] } } + }, + "rest": { + "libraryClient": "EventarcClient", + "rpcs": { + "CreateTrigger": { + "methods": [ + "create_trigger" + ] + }, + "DeleteTrigger": { + "methods": [ + "delete_trigger" + ] + }, + "GetTrigger": { + "methods": [ + "get_trigger" + ] + }, + "ListTriggers": { + "methods": [ + "list_triggers" + ] + }, + "UpdateTrigger": { + "methods": [ + "update_trigger" + ] + } + } } } } diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index d46c6e44133d..64156b8d954c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -45,6 +45,7 @@ from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO from .transports.grpc import EventarcGrpcTransport from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport +from .transports.rest import EventarcRestTransport class EventarcClientMeta(type): @@ -57,6 +58,7 @@ class EventarcClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[EventarcTransport]] _transport_registry["grpc"] = EventarcGrpcTransport _transport_registry["grpc_asyncio"] = EventarcGrpcAsyncIOTransport + _transport_registry["rest"] = EventarcRestTransport def get_transport_class(cls, label: str = None, @@ -332,6 +334,9 @@ def __init__(self, *, transport (Union[str, EventarcTransport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py old mode 100644 new mode 100755 index f9a66261b154..ceddbaa0edbd --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py @@ -19,15 +19,20 @@ from .base import EventarcTransport from .grpc import EventarcGrpcTransport from .grpc_asyncio import EventarcGrpcAsyncIOTransport +from .rest import EventarcRestTransport +from .rest import EventarcRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[EventarcTransport]] _transport_registry['grpc'] = EventarcGrpcTransport _transport_registry['grpc_asyncio'] = EventarcGrpcAsyncIOTransport +_transport_registry['rest'] = EventarcRestTransport __all__ = ( 'EventarcTransport', 'EventarcGrpcTransport', 'EventarcGrpcAsyncIOTransport', + 'EventarcRestTransport', + 'EventarcRestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py new file mode 100755 index 000000000000..edceb0477687 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -0,0 +1,785 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import trigger +from google.longrunning import operations_pb2 # type: ignore + +from .base import EventarcTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class EventarcRestInterceptor: + """Interceptor for Eventarc. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the EventarcRestTransport. + + .. code-block:: python + class MyCustomEventarcInterceptor(EventarcRestInterceptor): + def pre_create_trigger(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_trigger(response): + logging.log(f"Received response: {response}") + + def pre_delete_trigger(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_trigger(response): + logging.log(f"Received response: {response}") + + def pre_get_trigger(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_trigger(response): + logging.log(f"Received response: {response}") + + def pre_list_triggers(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_triggers(response): + logging.log(f"Received response: {response}") + + def pre_update_trigger(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_trigger(response): + logging.log(f"Received response: {response}") + + transport = EventarcRestTransport(interceptor=MyCustomEventarcInterceptor()) + client = EventarcClient(transport=transport) + + + """ + def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_trigger + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_trigger + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: + """Post-rpc interceptor for get_trigger + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: + """Post-rpc interceptor for list_triggers + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_trigger + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class EventarcRestStub: + _session: AuthorizedSession + _host: str + _interceptor: EventarcRestInterceptor + + +class EventarcRestTransport(EventarcTransport): + """REST backend transport for Eventarc. + + Eventarc allows users to subscribe to various events that are + provided by Google Cloud services and forward them to supported + destinations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'eventarc.googleapis.com', + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', + interceptor: Optional[EventarcRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EventarcRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options) + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateTrigger(EventarcRestStub): + def __hash__(self): + return hash("CreateTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "triggerId" : "", "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.CreateTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create trigger method over HTTP. + + Args: + request (~.eventarc.CreateTriggerRequest): + The request object. The request message for the + CreateTrigger method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_create_trigger(request, metadata) + pb_request = eventarc.CreateTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_trigger(resp) + return resp + + class _DeleteTrigger(EventarcRestStub): + def __hash__(self): + return hash("DeleteTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.DeleteTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete trigger method over HTTP. + + Args: + request (~.eventarc.DeleteTriggerRequest): + The request object. The request message for the + DeleteTrigger method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_trigger(request, metadata) + pb_request = eventarc.DeleteTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_trigger(resp) + return resp + + class _GetTrigger(EventarcRestStub): + def __hash__(self): + return hash("GetTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> trigger.Trigger: + r"""Call the get trigger method over HTTP. + + Args: + request (~.eventarc.GetTriggerRequest): + The request object. The request message for the + GetTrigger method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.trigger.Trigger: + A representation of the trigger + resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_trigger(request, metadata) + pb_request = eventarc.GetTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = trigger.Trigger() + pb_resp = trigger.Trigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_trigger(resp) + return resp + + class _ListTriggers(EventarcRestStub): + def __hash__(self): + return hash("ListTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.ListTriggersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> eventarc.ListTriggersResponse: + r"""Call the list triggers method over HTTP. + + Args: + request (~.eventarc.ListTriggersRequest): + The request object. The request message for the + ListTriggers method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.eventarc.ListTriggersResponse: + The response message for the + ListTriggers method. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + }, + ] + request, metadata = self._interceptor.pre_list_triggers(request, metadata) + pb_request = eventarc.ListTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = eventarc.ListTriggersResponse() + pb_resp = eventarc.ListTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_triggers(resp) + return resp + + class _UpdateTrigger(EventarcRestStub): + def __hash__(self): + return hash("UpdateTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.UpdateTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update trigger method over HTTP. + + Args: + request (~.eventarc.UpdateTriggerRequest): + The request object. The request message for the + UpdateTrigger method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{trigger.name=projects/*/locations/*/triggers/*}', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_update_trigger(request, metadata) + pb_request = eventarc.UpdateTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_trigger(resp) + return resp + + @property + def create_trigger(self) -> Callable[ + [eventarc.CreateTriggerRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_trigger(self) -> Callable[ + [eventarc.DeleteTriggerRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_trigger(self) -> Callable[ + [eventarc.GetTriggerRequest], + trigger.Trigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_triggers(self) -> Callable[ + [eventarc.ListTriggersRequest], + eventarc.ListTriggersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_trigger(self) -> Callable[ + [eventarc.UpdateTriggerRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'EventarcRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py old mode 100644 new mode 100755 index 4b89a35c2a43..6c692beb9d2c --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -23,10 +23,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -83,6 +90,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (EventarcClient, "grpc"), (EventarcAsyncClient, "grpc_asyncio"), + (EventarcClient, "rest"), ]) def test_eventarc_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -95,12 +103,16 @@ def test_eventarc_client_from_service_account_info(client_class, transport_name) assert client.transport._host == ( 'eventarc.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://eventarc.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.EventarcGrpcTransport, "grpc"), (transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.EventarcRestTransport, "rest"), ]) def test_eventarc_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -117,6 +129,7 @@ def test_eventarc_client_service_account_always_use_jwt(transport_class, transpo @pytest.mark.parametrize("client_class,transport_name", [ (EventarcClient, "grpc"), (EventarcAsyncClient, "grpc_asyncio"), + (EventarcClient, "rest"), ]) def test_eventarc_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -132,6 +145,9 @@ def test_eventarc_client_from_service_account_file(client_class, transport_name) assert client.transport._host == ( 'eventarc.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://eventarc.googleapis.com' ) @@ -139,6 +155,7 @@ def test_eventarc_client_get_transport_class(): transport = EventarcClient.get_transport_class() available_transports = [ transports.EventarcGrpcTransport, + transports.EventarcRestTransport, ] assert transport in available_transports @@ -149,6 +166,7 @@ def test_eventarc_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (EventarcClient, transports.EventarcGrpcTransport, "grpc"), (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), + (EventarcClient, transports.EventarcRestTransport, "rest"), ]) @mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) @mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) @@ -268,6 +286,8 @@ def test_eventarc_client_client_options(client_class, transport_class, transport (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", "true"), (EventarcClient, transports.EventarcGrpcTransport, "grpc", "false"), (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (EventarcClient, transports.EventarcRestTransport, "rest", "true"), + (EventarcClient, transports.EventarcRestTransport, "rest", "false"), ]) @mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) @mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) @@ -405,6 +425,7 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (EventarcClient, transports.EventarcGrpcTransport, "grpc"), (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), + (EventarcClient, transports.EventarcRestTransport, "rest"), ]) def test_eventarc_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -429,6 +450,7 @@ def test_eventarc_client_client_options_scopes(client_class, transport_class, tr @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (EventarcClient, transports.EventarcGrpcTransport, "grpc", grpc_helpers), (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (EventarcClient, transports.EventarcRestTransport, "rest", None), ]) def test_eventarc_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -1934,6 +1956,1275 @@ async def test_delete_trigger_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [ + eventarc.GetTriggerRequest, + dict, +]) +def test_get_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.etag == 'etag_value' + + +def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_trigger(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = trigger.Trigger.to_json(trigger.Trigger()) + + request = eventarc.GetTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = trigger.Trigger() + + client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_trigger(request) + + +def test_get_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_get_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_trigger( + eventarc.GetTriggerRequest(), + name='name_value', + ) + + +def test_get_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListTriggersRequest, + dict, +]) +def test_list_triggers_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTriggersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_triggers(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_triggers_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_triggers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + + request = eventarc.ListTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListTriggersResponse() + + client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_triggers(request) + + +def test_list_triggers_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + + +def test_list_triggers_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_triggers( + eventarc.ListTriggersRequest(), + parent='parent_value', + ) + + +def test_list_triggers_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + trigger.Trigger(), + ], + next_page_token='abc', + ), + eventarc.ListTriggersResponse( + triggers=[], + next_page_token='def', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + ], + next_page_token='ghi', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, trigger.Trigger) + for i in results) + + pages = list(client.list_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateTriggerRequest, + dict, +]) +def test_create_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["trigger_id"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "triggerId" not in jsonified_request + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == request_init["trigger_id"] + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["triggerId"] = 'trigger_id_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("trigger_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_trigger(request) + + expected_params = [ + ( + "triggerId", + "", + ), + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.CreateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_trigger(request) + + +def test_create_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + + +def test_create_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_trigger( + eventarc.CreateTriggerRequest(), + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + + +def test_create_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateTriggerRequest, + dict, +]) +def test_update_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_trigger(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.UpdateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_trigger(request) + + +def test_update_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_update_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_trigger( + eventarc.UpdateTriggerRequest(), + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + + +def test_update_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteTriggerRequest, + dict, +]) +def test_delete_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["name"] = 'name_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_trigger(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.DeleteTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_trigger(request) + + +def test_delete_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + allow_missing=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_trigger( + eventarc.DeleteTriggerRequest(), + name='name_value', + allow_missing=True, + ) + + +def test_delete_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EventarcGrpcTransport( @@ -2012,6 +3303,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -2022,6 +3314,7 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", + "rest", ]) def test_transport_kind(transport_name): transport = EventarcClient.get_transport_class(transport_name)( @@ -2152,6 +3445,7 @@ def test_eventarc_transport_auth_adc(transport_class): [ transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, ], ) def test_eventarc_transport_auth_gdch_credentials(transport_class): @@ -2248,10 +3542,37 @@ def test_eventarc_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) +def test_eventarc_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.EventarcRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_eventarc_rest_lro_client(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_eventarc_host_no_port(transport_name): client = EventarcClient( @@ -2261,11 +3582,14 @@ def test_eventarc_host_no_port(transport_name): ) assert client.transport._host == ( 'eventarc.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://eventarc.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_eventarc_host_with_port(transport_name): client = EventarcClient( @@ -2275,8 +3599,39 @@ def test_eventarc_host_with_port(transport_name): ) assert client.transport._host == ( 'eventarc.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://eventarc.googleapis.com:8000' ) +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_eventarc_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EventarcClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EventarcClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_trigger._session + session2 = client2.transport.get_trigger._session + assert session1 != session2 + session1 = client1.transport.list_triggers._session + session2 = client2.transport.list_triggers._session + assert session1 != session2 + session1 = client1.transport.create_trigger._session + session2 = client2.transport.create_trigger._session + assert session1 != session2 + session1 = client1.transport.update_trigger._session + session2 = client2.transport.update_trigger._session + assert session1 != session2 + session1 = client1.transport.delete_trigger._session + session2 = client2.transport.delete_trigger._session + assert session1 != session2 def test_eventarc_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -2596,6 +3951,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2611,6 +3967,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json old mode 100644 new mode 100755 index da4eefd477fc..69112af60b31 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json @@ -246,6 +246,126 @@ ] } } + }, + "rest": { + "libraryClient": "ConfigServiceV2Client", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateExclusion": { + "methods": [ + "create_exclusion" + ] + }, + "CreateSink": { + "methods": [ + "create_sink" + ] + }, + "CreateView": { + "methods": [ + "create_view" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { + "methods": [ + "delete_exclusion" + ] + }, + "DeleteSink": { + "methods": [ + "delete_sink" + ] + }, + "DeleteView": { + "methods": [ + "delete_view" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { + "methods": [ + "get_cmek_settings" + ] + }, + "GetExclusion": { + "methods": [ + "get_exclusion" + ] + }, + "GetSink": { + "methods": [ + "get_sink" + ] + }, + "GetView": { + "methods": [ + "get_view" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { + "methods": [ + "list_exclusions" + ] + }, + "ListSinks": { + "methods": [ + "list_sinks" + ] + }, + "ListViews": { + "methods": [ + "list_views" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateCmekSettings": { + "methods": [ + "update_cmek_settings" + ] + }, + "UpdateExclusion": { + "methods": [ + "update_exclusion" + ] + }, + "UpdateSink": { + "methods": [ + "update_sink" + ] + }, + "UpdateView": { + "methods": [ + "update_view" + ] + } + } } } }, @@ -320,6 +440,41 @@ ] } } + }, + "rest": { + "libraryClient": "LoggingServiceV2Client", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } } } }, @@ -384,6 +539,36 @@ ] } } + }, + "rest": { + "libraryClient": "MetricsServiceV2Client", + "rpcs": { + "CreateLogMetric": { + "methods": [ + "create_log_metric" + ] + }, + "DeleteLogMetric": { + "methods": [ + "delete_log_metric" + ] + }, + "GetLogMetric": { + "methods": [ + "get_log_metric" + ] + }, + "ListLogMetrics": { + "methods": [ + "list_log_metrics" + ] + }, + "UpdateLogMetric": { + "methods": [ + "update_log_metric" + ] + } + } } } } diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 7f4af3e10838..393c816a60a5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -41,6 +41,7 @@ from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import ConfigServiceV2GrpcTransport from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport +from .transports.rest import ConfigServiceV2RestTransport class ConfigServiceV2ClientMeta(type): @@ -53,6 +54,7 @@ class ConfigServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport + _transport_registry["rest"] = ConfigServiceV2RestTransport def get_transport_class(cls, label: str = None, @@ -347,6 +349,9 @@ def __init__(self, *, transport (Union[str, ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py old mode 100644 new mode 100755 index 5472642dc8a1..1fc7dd79c53a --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -19,15 +19,20 @@ from .base import ConfigServiceV2Transport from .grpc import ConfigServiceV2GrpcTransport from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport +from .rest import ConfigServiceV2RestTransport +from .rest import ConfigServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry['grpc'] = ConfigServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport +_transport_registry['rest'] = ConfigServiceV2RestTransport __all__ = ( 'ConfigServiceV2Transport', 'ConfigServiceV2GrpcTransport', 'ConfigServiceV2GrpcAsyncIOTransport', + 'ConfigServiceV2RestTransport', + 'ConfigServiceV2RestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py new file mode 100755 index 000000000000..f27ee0360b62 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py @@ -0,0 +1,3125 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore + +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ConfigServiceV2RestInterceptor: + """Interceptor for ConfigServiceV2. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ConfigServiceV2RestTransport. + + .. code-block:: python + class MyCustomConfigServiceV2Interceptor(ConfigServiceV2RestInterceptor): + def pre_create_bucket(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_bucket(response): + logging.log(f"Received response: {response}") + + def pre_create_exclusion(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_exclusion(response): + logging.log(f"Received response: {response}") + + def pre_create_sink(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_sink(response): + logging.log(f"Received response: {response}") + + def pre_create_view(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_view(response): + logging.log(f"Received response: {response}") + + def pre_delete_bucket(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_exclusion(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_sink(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_view(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_bucket(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_bucket(response): + logging.log(f"Received response: {response}") + + def pre_get_cmek_settings(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_cmek_settings(response): + logging.log(f"Received response: {response}") + + def pre_get_exclusion(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_exclusion(response): + logging.log(f"Received response: {response}") + + def pre_get_sink(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_sink(response): + logging.log(f"Received response: {response}") + + def pre_get_view(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_view(response): + logging.log(f"Received response: {response}") + + def pre_list_buckets(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_buckets(response): + logging.log(f"Received response: {response}") + + def pre_list_exclusions(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_exclusions(response): + logging.log(f"Received response: {response}") + + def pre_list_sinks(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_sinks(response): + logging.log(f"Received response: {response}") + + def pre_list_views(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_views(response): + logging.log(f"Received response: {response}") + + def pre_undelete_bucket(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_update_bucket(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_bucket(response): + logging.log(f"Received response: {response}") + + def pre_update_cmek_settings(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_cmek_settings(response): + logging.log(f"Received response: {response}") + + def pre_update_exclusion(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_exclusion(response): + logging.log(f"Received response: {response}") + + def pre_update_sink(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_sink(response): + logging.log(f"Received response: {response}") + + def pre_update_view(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_view(response): + logging.log(f"Received response: {response}") + + transport = ConfigServiceV2RestTransport(interceptor=MyCustomConfigServiceV2Interceptor()) + client = ConfigServiceV2Client(transport=transport) + + + """ + def pre_create_bucket(self, request: logging_config.CreateBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_bucket + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_create_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: + """Post-rpc interceptor for create_bucket + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_create_exclusion(self, request: logging_config.CreateExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateExclusionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_exclusion + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_create_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: + """Post-rpc interceptor for create_exclusion + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_create_sink(self, request: logging_config.CreateSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateSinkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_sink + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_create_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: + """Post-rpc interceptor for create_sink + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_create_view(self, request: logging_config.CreateViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateViewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_view + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_create_view(self, response: logging_config.LogView) -> logging_config.LogView: + """Post-rpc interceptor for create_view + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_delete_bucket(self, request: logging_config.DeleteBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_bucket + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def pre_delete_exclusion(self, request: logging_config.DeleteExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteExclusionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_exclusion + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def pre_delete_sink(self, request: logging_config.DeleteSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteSinkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_sink + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def pre_delete_view(self, request: logging_config.DeleteViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteViewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_view + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def pre_get_bucket(self, request: logging_config.GetBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_bucket + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_get_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: + """Post-rpc interceptor for get_bucket + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_get_cmek_settings(self, request: logging_config.GetCmekSettingsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetCmekSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_cmek_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_get_cmek_settings(self, response: logging_config.CmekSettings) -> logging_config.CmekSettings: + """Post-rpc interceptor for get_cmek_settings + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_get_exclusion(self, request: logging_config.GetExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetExclusionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_exclusion + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_get_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: + """Post-rpc interceptor for get_exclusion + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_get_sink(self, request: logging_config.GetSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetSinkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_sink + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_get_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: + """Post-rpc interceptor for get_sink + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_get_view(self, request: logging_config.GetViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetViewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_view + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_get_view(self, response: logging_config.LogView) -> logging_config.LogView: + """Post-rpc interceptor for get_view + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_list_buckets(self, request: logging_config.ListBucketsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListBucketsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_buckets + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_list_buckets(self, response: logging_config.ListBucketsResponse) -> logging_config.ListBucketsResponse: + """Post-rpc interceptor for list_buckets + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_list_exclusions(self, request: logging_config.ListExclusionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListExclusionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_exclusions + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_list_exclusions(self, response: logging_config.ListExclusionsResponse) -> logging_config.ListExclusionsResponse: + """Post-rpc interceptor for list_exclusions + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_list_sinks(self, request: logging_config.ListSinksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListSinksRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_sinks + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_list_sinks(self, response: logging_config.ListSinksResponse) -> logging_config.ListSinksResponse: + """Post-rpc interceptor for list_sinks + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_list_views(self, request: logging_config.ListViewsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListViewsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_views + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_list_views(self, response: logging_config.ListViewsResponse) -> logging_config.ListViewsResponse: + """Post-rpc interceptor for list_views + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_undelete_bucket(self, request: logging_config.UndeleteBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UndeleteBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for undelete_bucket + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def pre_update_bucket(self, request: logging_config.UpdateBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateBucketRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_bucket + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_update_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: + """Post-rpc interceptor for update_bucket + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_update_cmek_settings(self, request: logging_config.UpdateCmekSettingsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateCmekSettingsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_cmek_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_update_cmek_settings(self, response: logging_config.CmekSettings) -> logging_config.CmekSettings: + """Post-rpc interceptor for update_cmek_settings + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_update_exclusion(self, request: logging_config.UpdateExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateExclusionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_exclusion + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_update_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: + """Post-rpc interceptor for update_exclusion + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_update_sink(self, request: logging_config.UpdateSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateSinkRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_sink + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_update_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: + """Post-rpc interceptor for update_sink + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + def pre_update_view(self, request: logging_config.UpdateViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateViewRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_view + + Override in a subclass to manipulate the request or metadata + before they are sent to the ConfigServiceV2 server. + """ + return request, metadata + + def post_update_view(self, response: logging_config.LogView) -> logging_config.LogView: + """Post-rpc interceptor for update_view + + Override in a subclass to manipulate the response + after it is returned by the ConfigServiceV2 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ConfigServiceV2RestStub: + _session: AuthorizedSession + _host: str + _interceptor: ConfigServiceV2RestInterceptor + + +class ConfigServiceV2RestTransport(ConfigServiceV2Transport): + """REST backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', + interceptor: Optional[ConfigServiceV2RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ConfigServiceV2RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateBucket(ConfigServiceV2RestStub): + def __hash__(self): + return hash("CreateBucket") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "bucketId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.CreateBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogBucket: + r"""Call the create bucket method over HTTP. + + Args: + request (~.logging_config.CreateBucketRequest): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=*/*/locations/*}/buckets', + 'body': 'bucket', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*}/buckets', + 'body': 'bucket', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*}/buckets', + 'body': 'bucket', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=folders/*/locations/*}/buckets', + 'body': 'bucket', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=billingAccounts/*/locations/*}/buckets', + 'body': 'bucket', + }, + ] + request, metadata = self._interceptor.pre_create_bucket(request, metadata) + pb_request = logging_config.CreateBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogBucket() + pb_resp = logging_config.LogBucket.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_bucket(resp) + return resp + + class _CreateExclusion(ConfigServiceV2RestStub): + def __hash__(self): + return hash("CreateExclusion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.CreateExclusionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogExclusion: + r"""Call the create exclusion method over HTTP. + + Args: + request (~.logging_config.CreateExclusionRequest): + The request object. The parameters to ``CreateExclusion``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=*/*}/exclusions', + 'body': 'exclusion', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/exclusions', + 'body': 'exclusion', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/exclusions', + 'body': 'exclusion', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=folders/*}/exclusions', + 'body': 'exclusion', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=billingAccounts/*}/exclusions', + 'body': 'exclusion', + }, + ] + request, metadata = self._interceptor.pre_create_exclusion(request, metadata) + pb_request = logging_config.CreateExclusionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogExclusion() + pb_resp = logging_config.LogExclusion.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_exclusion(resp) + return resp + + class _CreateSink(ConfigServiceV2RestStub): + def __hash__(self): + return hash("CreateSink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.CreateSinkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogSink: + r"""Call the create sink method over HTTP. + + Args: + request (~.logging_config.CreateSinkRequest): + The request object. The parameters to ``CreateSink``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=*/*}/sinks', + 'body': 'sink', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/sinks', + 'body': 'sink', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*}/sinks', + 'body': 'sink', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=folders/*}/sinks', + 'body': 'sink', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=billingAccounts/*}/sinks', + 'body': 'sink', + }, + ] + request, metadata = self._interceptor.pre_create_sink(request, metadata) + pb_request = logging_config.CreateSinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogSink() + pb_resp = logging_config.LogSink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_sink(resp) + return resp + + class _CreateView(ConfigServiceV2RestStub): + def __hash__(self): + return hash("CreateView") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "viewId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.CreateViewRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogView: + r"""Call the create view method over HTTP. + + Args: + request (~.logging_config.CreateViewRequest): + The request object. The parameters to ``CreateView``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=*/*/locations/*/buckets/*}/views', + 'body': 'view', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*/locations/*/buckets/*}/views', + 'body': 'view', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=organizations/*/locations/*/buckets/*}/views', + 'body': 'view', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=folders/*/locations/*/buckets/*}/views', + 'body': 'view', + }, +{ + 'method': 'post', + 'uri': '/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views', + 'body': 'view', + }, + ] + request, metadata = self._interceptor.pre_create_view(request, metadata) + pb_request = logging_config.CreateViewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogView() + pb_resp = logging_config.LogView.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_view(resp) + return resp + + class _DeleteBucket(ConfigServiceV2RestStub): + def __hash__(self): + return hash("DeleteBucket") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.DeleteBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete bucket method over HTTP. + + Args: + request (~.logging_config.DeleteBucketRequest): + The request object. The parameters to ``DeleteBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=*/*/locations/*/buckets/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_bucket(request, metadata) + pb_request = logging_config.DeleteBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteExclusion(ConfigServiceV2RestStub): + def __hash__(self): + return hash("DeleteExclusion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.DeleteExclusionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete exclusion method over HTTP. + + Args: + request (~.logging_config.DeleteExclusionRequest): + The request object. The parameters to ``DeleteExclusion``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=*/*/exclusions/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/exclusions/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/exclusions/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=folders/*/exclusions/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_exclusion(request, metadata) + pb_request = logging_config.DeleteExclusionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteSink(ConfigServiceV2RestStub): + def __hash__(self): + return hash("DeleteSink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.DeleteSinkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete sink method over HTTP. + + Args: + request (~.logging_config.DeleteSinkRequest): + The request object. The parameters to ``DeleteSink``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{sink_name=*/*/sinks/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{sink_name=projects/*/sinks/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{sink_name=organizations/*/sinks/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{sink_name=folders/*/sinks/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_sink(request, metadata) + pb_request = logging_config.DeleteSinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteView(ConfigServiceV2RestStub): + def __hash__(self): + return hash("DeleteView") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.DeleteViewRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete view method over HTTP. + + Args: + request (~.logging_config.DeleteViewRequest): + The request object. The parameters to ``DeleteView``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_view(request, metadata) + pb_request = logging_config.DeleteViewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetBucket(ConfigServiceV2RestStub): + def __hash__(self): + return hash("GetBucket") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.GetBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogBucket: + r"""Call the get bucket method over HTTP. + + Args: + request (~.logging_config.GetBucketRequest): + The request object. The parameters to ``GetBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=*/*/locations/*/buckets/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=billingAccounts/*/buckets/*}', + }, + ] + request, metadata = self._interceptor.pre_get_bucket(request, metadata) + pb_request = logging_config.GetBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogBucket() + pb_resp = logging_config.LogBucket.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_bucket(resp) + return resp + + class _GetCmekSettings(ConfigServiceV2RestStub): + def __hash__(self): + return hash("GetCmekSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.GetCmekSettingsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.CmekSettings: + r"""Call the get cmek settings method over HTTP. + + Args: + request (~.logging_config.GetCmekSettingsRequest): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.CmekSettings: + Describes the customer-managed encryption key (CMEK) + settings associated with a project, folder, + organization, billing account, or flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=*/*}/cmekSettings', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*}/cmekSettings', + }, + ] + request, metadata = self._interceptor.pre_get_cmek_settings(request, metadata) + pb_request = logging_config.GetCmekSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.CmekSettings() + pb_resp = logging_config.CmekSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_cmek_settings(resp) + return resp + + class _GetExclusion(ConfigServiceV2RestStub): + def __hash__(self): + return hash("GetExclusion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.GetExclusionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogExclusion: + r"""Call the get exclusion method over HTTP. + + Args: + request (~.logging_config.GetExclusionRequest): + The request object. The parameters to ``GetExclusion``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=*/*/exclusions/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/exclusions/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/exclusions/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=folders/*/exclusions/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', + }, + ] + request, metadata = self._interceptor.pre_get_exclusion(request, metadata) + pb_request = logging_config.GetExclusionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogExclusion() + pb_resp = logging_config.LogExclusion.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_exclusion(resp) + return resp + + class _GetSink(ConfigServiceV2RestStub): + def __hash__(self): + return hash("GetSink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.GetSinkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogSink: + r"""Call the get sink method over HTTP. + + Args: + request (~.logging_config.GetSinkRequest): + The request object. The parameters to ``GetSink``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{sink_name=*/*/sinks/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{sink_name=projects/*/sinks/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{sink_name=organizations/*/sinks/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{sink_name=folders/*/sinks/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', + }, + ] + request, metadata = self._interceptor.pre_get_sink(request, metadata) + pb_request = logging_config.GetSinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogSink() + pb_resp = logging_config.LogSink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_sink(resp) + return resp + + class _GetView(ConfigServiceV2RestStub): + def __hash__(self): + return hash("GetView") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.GetViewRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogView: + r"""Call the get view method over HTTP. + + Args: + request (~.logging_config.GetViewRequest): + The request object. The parameters to ``GetView``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', + }, +{ + 'method': 'get', + 'uri': '/v2/{name=billingAccounts/*/buckets/*/views/*}', + }, + ] + request, metadata = self._interceptor.pre_get_view(request, metadata) + pb_request = logging_config.GetViewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogView() + pb_resp = logging_config.LogView.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_view(resp) + return resp + + class _ListBuckets(ConfigServiceV2RestStub): + def __hash__(self): + return hash("ListBuckets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.ListBucketsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.ListBucketsResponse: + r"""Call the list buckets method over HTTP. + + Args: + request (~.logging_config.ListBucketsRequest): + The request object. The parameters to ``ListBuckets``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.ListBucketsResponse: + The response from ListBuckets. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=*/*/locations/*}/buckets', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*}/buckets', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*}/buckets', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=folders/*/locations/*}/buckets', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=billingAccounts/*/locations/*}/buckets', + }, + ] + request, metadata = self._interceptor.pre_list_buckets(request, metadata) + pb_request = logging_config.ListBucketsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.ListBucketsResponse() + pb_resp = logging_config.ListBucketsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_buckets(resp) + return resp + + class _ListExclusions(ConfigServiceV2RestStub): + def __hash__(self): + return hash("ListExclusions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.ListExclusionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.ListExclusionsResponse: + r"""Call the list exclusions method over HTTP. + + Args: + request (~.logging_config.ListExclusionsRequest): + The request object. The parameters to ``ListExclusions``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.ListExclusionsResponse: + Result returned from ``ListExclusions``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=*/*}/exclusions', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/exclusions', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/exclusions', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=folders/*}/exclusions', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=billingAccounts/*}/exclusions', + }, + ] + request, metadata = self._interceptor.pre_list_exclusions(request, metadata) + pb_request = logging_config.ListExclusionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.ListExclusionsResponse() + pb_resp = logging_config.ListExclusionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_exclusions(resp) + return resp + + class _ListSinks(ConfigServiceV2RestStub): + def __hash__(self): + return hash("ListSinks") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.ListSinksRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.ListSinksResponse: + r"""Call the list sinks method over HTTP. + + Args: + request (~.logging_config.ListSinksRequest): + The request object. The parameters to ``ListSinks``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.ListSinksResponse: + Result returned from ``ListSinks``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=*/*}/sinks', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/sinks', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/sinks', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=folders/*}/sinks', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=billingAccounts/*}/sinks', + }, + ] + request, metadata = self._interceptor.pre_list_sinks(request, metadata) + pb_request = logging_config.ListSinksRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.ListSinksResponse() + pb_resp = logging_config.ListSinksResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_sinks(resp) + return resp + + class _ListViews(ConfigServiceV2RestStub): + def __hash__(self): + return hash("ListViews") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.ListViewsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.ListViewsResponse: + r"""Call the list views method over HTTP. + + Args: + request (~.logging_config.ListViewsRequest): + The request object. The parameters to ``ListViews``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.ListViewsResponse: + The response from ListViews. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=*/*/locations/*/buckets/*}/views', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*/locations/*/buckets/*}/views', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*/locations/*/buckets/*}/views', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=folders/*/locations/*/buckets/*}/views', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views', + }, + ] + request, metadata = self._interceptor.pre_list_views(request, metadata) + pb_request = logging_config.ListViewsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.ListViewsResponse() + pb_resp = logging_config.ListViewsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_views(resp) + return resp + + class _UndeleteBucket(ConfigServiceV2RestStub): + def __hash__(self): + return hash("UndeleteBucket") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.UndeleteBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the undelete bucket method over HTTP. + + Args: + request (~.logging_config.UndeleteBucketRequest): + The request object. The parameters to ``UndeleteBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=*/*/locations/*/buckets/*}:undelete', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/buckets/*}:undelete', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}:undelete', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=folders/*/locations/*/buckets/*}:undelete', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}:undelete', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_undelete_bucket(request, metadata) + pb_request = logging_config.UndeleteBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _UpdateBucket(ConfigServiceV2RestStub): + def __hash__(self): + return hash("UpdateBucket") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.UpdateBucketRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogBucket: + r"""Call the update bucket method over HTTP. + + Args: + request (~.logging_config.UpdateBucketRequest): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogBucket: + Describes a repository of logs. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=*/*/locations/*/buckets/*}', + 'body': 'bucket', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', + 'body': 'bucket', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', + 'body': 'bucket', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', + 'body': 'bucket', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}', + 'body': 'bucket', + }, + ] + request, metadata = self._interceptor.pre_update_bucket(request, metadata) + pb_request = logging_config.UpdateBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogBucket() + pb_resp = logging_config.LogBucket.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_bucket(resp) + return resp + + class _UpdateCmekSettings(ConfigServiceV2RestStub): + def __hash__(self): + return hash("UpdateCmekSettings") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.UpdateCmekSettingsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.CmekSettings: + r"""Call the update cmek settings method over HTTP. + + Args: + request (~.logging_config.UpdateCmekSettingsRequest): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.CmekSettings: + Describes the customer-managed encryption key (CMEK) + settings associated with a project, folder, + organization, billing account, or flexible resource. + + Note: CMEK for the Logs Router can currently only be + configured for GCP organizations. Once configured, it + applies to all projects and folders in the GCP + organization. + + See `Enabling CMEK for Logs + Router `__ + for more information. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=*/*}/cmekSettings', + 'body': 'cmek_settings', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*}/cmekSettings', + 'body': 'cmek_settings', + }, + ] + request, metadata = self._interceptor.pre_update_cmek_settings(request, metadata) + pb_request = logging_config.UpdateCmekSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.CmekSettings() + pb_resp = logging_config.CmekSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_cmek_settings(resp) + return resp + + class _UpdateExclusion(ConfigServiceV2RestStub): + def __hash__(self): + return hash("UpdateExclusion") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.UpdateExclusionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogExclusion: + r"""Call the update exclusion method over HTTP. + + Args: + request (~.logging_config.UpdateExclusionRequest): + The request object. The parameters to ``UpdateExclusion``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogExclusion: + Specifies a set of log entries that + are not to be stored in Logging. If your + GCP resource receives a large volume of + logs, you can use exclusions to reduce + your chargeable logs. Exclusions are + processed after log sinks, so you can + export log entries before they are + excluded. Note that organization-level + and folder-level exclusions don't apply + to child resources, and that you can't + exclude audit log entries. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=*/*/exclusions/*}', + 'body': 'exclusion', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/exclusions/*}', + 'body': 'exclusion', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/exclusions/*}', + 'body': 'exclusion', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=folders/*/exclusions/*}', + 'body': 'exclusion', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', + 'body': 'exclusion', + }, + ] + request, metadata = self._interceptor.pre_update_exclusion(request, metadata) + pb_request = logging_config.UpdateExclusionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogExclusion() + pb_resp = logging_config.LogExclusion.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_exclusion(resp) + return resp + + class _UpdateSink(ConfigServiceV2RestStub): + def __hash__(self): + return hash("UpdateSink") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.UpdateSinkRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogSink: + r"""Call the update sink method over HTTP. + + Args: + request (~.logging_config.UpdateSinkRequest): + The request object. The parameters to ``UpdateSink``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, or a + Cloud Pub/Sub topic. A logs filter + controls which log entries are exported. + The sink must be created within a + project, organization, billing account, + or folder. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/v2/{sink_name=*/*/sinks/*}', + 'body': 'sink', + }, +{ + 'method': 'put', + 'uri': '/v2/{sink_name=projects/*/sinks/*}', + 'body': 'sink', + }, +{ + 'method': 'put', + 'uri': '/v2/{sink_name=organizations/*/sinks/*}', + 'body': 'sink', + }, +{ + 'method': 'put', + 'uri': '/v2/{sink_name=folders/*/sinks/*}', + 'body': 'sink', + }, +{ + 'method': 'put', + 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', + 'body': 'sink', + }, +{ + 'method': 'patch', + 'uri': '/v2/{sink_name=projects/*/sinks/*}', + 'body': 'sink', + }, +{ + 'method': 'patch', + 'uri': '/v2/{sink_name=organizations/*/sinks/*}', + 'body': 'sink', + }, +{ + 'method': 'patch', + 'uri': '/v2/{sink_name=folders/*/sinks/*}', + 'body': 'sink', + }, +{ + 'method': 'patch', + 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', + 'body': 'sink', + }, + ] + request, metadata = self._interceptor.pre_update_sink(request, metadata) + pb_request = logging_config.UpdateSinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogSink() + pb_resp = logging_config.LogSink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_sink(resp) + return resp + + class _UpdateView(ConfigServiceV2RestStub): + def __hash__(self): + return hash("UpdateView") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_config.UpdateViewRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_config.LogView: + r"""Call the update view method over HTTP. + + Args: + request (~.logging_config.UpdateViewRequest): + The request object. The parameters to ``UpdateView``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_config.LogView: + Describes a view over logs in a + bucket. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', + 'body': 'view', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', + 'body': 'view', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', + 'body': 'view', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', + 'body': 'view', + }, +{ + 'method': 'patch', + 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}', + 'body': 'view', + }, + ] + request, metadata = self._interceptor.pre_update_view(request, metadata) + pb_request = logging_config.UpdateViewRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_config.LogView() + pb_resp = logging_config.LogView.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_view(resp) + return resp + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + logging_config.LogBucket]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBucket(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + logging_config.LogExclusion]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateExclusion(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + logging_config.LogSink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSink(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + logging_config.LogView]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateView(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBucket(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteExclusion(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSink(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteView(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + logging_config.LogBucket]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBucket(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + logging_config.CmekSettings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetCmekSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + logging_config.LogExclusion]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetExclusion(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + logging_config.LogSink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSink(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_view(self) -> Callable[ + [logging_config.GetViewRequest], + logging_config.LogView]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetView(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + logging_config.ListBucketsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBuckets(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + logging_config.ListExclusionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListExclusions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + logging_config.ListSinksResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSinks(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + logging_config.ListViewsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListViews(self._session, self._host, self._interceptor) # type: ignore + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UndeleteBucket(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + logging_config.LogBucket]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBucket(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + logging_config.CmekSettings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateCmekSettings(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + logging_config.LogExclusion]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateExclusion(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + logging_config.LogSink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSink(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + logging_config.LogView]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateView(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'ConfigServiceV2RestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index e278c6deac03..899efe17817c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -41,6 +41,7 @@ from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport +from .transports.rest import LoggingServiceV2RestTransport class LoggingServiceV2ClientMeta(type): @@ -53,6 +54,7 @@ class LoggingServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport + _transport_registry["rest"] = LoggingServiceV2RestTransport def get_transport_class(cls, label: str = None, @@ -303,6 +305,9 @@ def __init__(self, *, transport (Union[str, LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py old mode 100644 new mode 100755 index 19559a52b02b..1ffa88e973e0 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -19,15 +19,20 @@ from .base import LoggingServiceV2Transport from .grpc import LoggingServiceV2GrpcTransport from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport +from .rest import LoggingServiceV2RestTransport +from .rest import LoggingServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry['grpc'] = LoggingServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport +_transport_registry['rest'] = LoggingServiceV2RestTransport __all__ = ( 'LoggingServiceV2Transport', 'LoggingServiceV2GrpcTransport', 'LoggingServiceV2GrpcAsyncIOTransport', + 'LoggingServiceV2RestTransport', + 'LoggingServiceV2RestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py new file mode 100755 index 000000000000..5a12926f94e6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py @@ -0,0 +1,766 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.logging_v2.types import logging +from google.protobuf import empty_pb2 # type: ignore + +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class LoggingServiceV2RestInterceptor: + """Interceptor for LoggingServiceV2. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the LoggingServiceV2RestTransport. + + .. code-block:: python + class MyCustomLoggingServiceV2Interceptor(LoggingServiceV2RestInterceptor): + def pre_delete_log(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_list_log_entries(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_log_entries(response): + logging.log(f"Received response: {response}") + + def pre_list_logs(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_logs(response): + logging.log(f"Received response: {response}") + + def pre_list_monitored_resource_descriptors(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_monitored_resource_descriptors(response): + logging.log(f"Received response: {response}") + + def pre_write_log_entries(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_write_log_entries(response): + logging.log(f"Received response: {response}") + + transport = LoggingServiceV2RestTransport(interceptor=MyCustomLoggingServiceV2Interceptor()) + client = LoggingServiceV2Client(transport=transport) + + + """ + def pre_delete_log(self, request: logging.DeleteLogRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.DeleteLogRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_log + + Override in a subclass to manipulate the request or metadata + before they are sent to the LoggingServiceV2 server. + """ + return request, metadata + + def pre_list_log_entries(self, request: logging.ListLogEntriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListLogEntriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_log_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the LoggingServiceV2 server. + """ + return request, metadata + + def post_list_log_entries(self, response: logging.ListLogEntriesResponse) -> logging.ListLogEntriesResponse: + """Post-rpc interceptor for list_log_entries + + Override in a subclass to manipulate the response + after it is returned by the LoggingServiceV2 server but before + it is returned to user code. + """ + return response + def pre_list_logs(self, request: logging.ListLogsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListLogsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_logs + + Override in a subclass to manipulate the request or metadata + before they are sent to the LoggingServiceV2 server. + """ + return request, metadata + + def post_list_logs(self, response: logging.ListLogsResponse) -> logging.ListLogsResponse: + """Post-rpc interceptor for list_logs + + Override in a subclass to manipulate the response + after it is returned by the LoggingServiceV2 server but before + it is returned to user code. + """ + return response + def pre_list_monitored_resource_descriptors(self, request: logging.ListMonitoredResourceDescriptorsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListMonitoredResourceDescriptorsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_monitored_resource_descriptors + + Override in a subclass to manipulate the request or metadata + before they are sent to the LoggingServiceV2 server. + """ + return request, metadata + + def post_list_monitored_resource_descriptors(self, response: logging.ListMonitoredResourceDescriptorsResponse) -> logging.ListMonitoredResourceDescriptorsResponse: + """Post-rpc interceptor for list_monitored_resource_descriptors + + Override in a subclass to manipulate the response + after it is returned by the LoggingServiceV2 server but before + it is returned to user code. + """ + return response + def pre_write_log_entries(self, request: logging.WriteLogEntriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.WriteLogEntriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for write_log_entries + + Override in a subclass to manipulate the request or metadata + before they are sent to the LoggingServiceV2 server. + """ + return request, metadata + + def post_write_log_entries(self, response: logging.WriteLogEntriesResponse) -> logging.WriteLogEntriesResponse: + """Post-rpc interceptor for write_log_entries + + Override in a subclass to manipulate the response + after it is returned by the LoggingServiceV2 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class LoggingServiceV2RestStub: + _session: AuthorizedSession + _host: str + _interceptor: LoggingServiceV2RestInterceptor + + +class LoggingServiceV2RestTransport(LoggingServiceV2Transport): + """REST backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', + interceptor: Optional[LoggingServiceV2RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or LoggingServiceV2RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DeleteLog(LoggingServiceV2RestStub): + def __hash__(self): + return hash("DeleteLog") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging.DeleteLogRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete log method over HTTP. + + Args: + request (~.logging.DeleteLogRequest): + The request object. The parameters to DeleteLog. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{log_name=projects/*/logs/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{log_name=*/*/logs/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{log_name=organizations/*/logs/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{log_name=folders/*/logs/*}', + }, +{ + 'method': 'delete', + 'uri': '/v2/{log_name=billingAccounts/*/logs/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_log(request, metadata) + pb_request = logging.DeleteLogRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ListLogEntries(LoggingServiceV2RestStub): + def __hash__(self): + return hash("ListLogEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging.ListLogEntriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging.ListLogEntriesResponse: + r"""Call the list log entries method over HTTP. + + Args: + request (~.logging.ListLogEntriesRequest): + The request object. The parameters to ``ListLogEntries``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging.ListLogEntriesResponse: + Result returned from ``ListLogEntries``. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/entries:list', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_list_log_entries(request, metadata) + pb_request = logging.ListLogEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging.ListLogEntriesResponse() + pb_resp = logging.ListLogEntriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_log_entries(resp) + return resp + + class _ListLogs(LoggingServiceV2RestStub): + def __hash__(self): + return hash("ListLogs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging.ListLogsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging.ListLogsResponse: + r"""Call the list logs method over HTTP. + + Args: + request (~.logging.ListLogsRequest): + The request object. The parameters to ListLogs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging.ListLogsResponse: + Result returned from ListLogs. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=*/*}/logs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/logs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=organizations/*}/logs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=folders/*}/logs', + }, +{ + 'method': 'get', + 'uri': '/v2/{parent=billingAccounts/*}/logs', + }, + ] + request, metadata = self._interceptor.pre_list_logs(request, metadata) + pb_request = logging.ListLogsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging.ListLogsResponse() + pb_resp = logging.ListLogsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_logs(resp) + return resp + + class _ListMonitoredResourceDescriptors(LoggingServiceV2RestStub): + def __hash__(self): + return hash("ListMonitoredResourceDescriptors") + + def __call__(self, + request: logging.ListMonitoredResourceDescriptorsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging.ListMonitoredResourceDescriptorsResponse: + r"""Call the list monitored resource + descriptors method over HTTP. + + Args: + request (~.logging.ListMonitoredResourceDescriptorsRequest): + The request object. The parameters to + ListMonitoredResourceDescriptors + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging.ListMonitoredResourceDescriptorsResponse: + Result returned from + ListMonitoredResourceDescriptors. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/monitoredResourceDescriptors', + }, + ] + request, metadata = self._interceptor.pre_list_monitored_resource_descriptors(request, metadata) + pb_request = logging.ListMonitoredResourceDescriptorsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging.ListMonitoredResourceDescriptorsResponse() + pb_resp = logging.ListMonitoredResourceDescriptorsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_monitored_resource_descriptors(resp) + return resp + + class _TailLogEntries(LoggingServiceV2RestStub): + def __hash__(self): + return hash("TailLogEntries") + + def __call__(self, + request: logging.TailLogEntriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Client streaming over REST is not yet defined for python client") + + class _WriteLogEntries(LoggingServiceV2RestStub): + def __hash__(self): + return hash("WriteLogEntries") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging.WriteLogEntriesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging.WriteLogEntriesResponse: + r"""Call the write log entries method over HTTP. + + Args: + request (~.logging.WriteLogEntriesRequest): + The request object. The parameters to WriteLogEntries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/entries:write', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_write_log_entries(request, metadata) + pb_request = logging.WriteLogEntriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging.WriteLogEntriesResponse() + pb_resp = logging.WriteLogEntriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_write_log_entries(resp) + return resp + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteLog(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + logging.ListLogEntriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLogEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + logging.ListLogsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLogs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + logging.ListMonitoredResourceDescriptorsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListMonitoredResourceDescriptors(self._session, self._host, self._interceptor) # type: ignore + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + logging.TailLogEntriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TailLogEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + logging.WriteLogEntriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._WriteLogEntries(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'LoggingServiceV2RestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index b4bcfb5e1e1d..2ad30449f76e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -42,6 +42,7 @@ from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport +from .transports.rest import MetricsServiceV2RestTransport class MetricsServiceV2ClientMeta(type): @@ -54,6 +55,7 @@ class MetricsServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport + _transport_registry["rest"] = MetricsServiceV2RestTransport def get_transport_class(cls, label: str = None, @@ -304,6 +306,9 @@ def __init__(self, *, transport (Union[str, MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py old mode 100644 new mode 100755 index 8315e03cfae6..0b03495bfd7f --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -19,15 +19,20 @@ from .base import MetricsServiceV2Transport from .grpc import MetricsServiceV2GrpcTransport from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport +from .rest import MetricsServiceV2RestTransport +from .rest import MetricsServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry['grpc'] = MetricsServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport +_transport_registry['rest'] = MetricsServiceV2RestTransport __all__ = ( 'MetricsServiceV2Transport', 'MetricsServiceV2GrpcTransport', 'MetricsServiceV2GrpcAsyncIOTransport', + 'MetricsServiceV2RestTransport', + 'MetricsServiceV2RestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py new file mode 100755 index 000000000000..c549623f5f0d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py @@ -0,0 +1,749 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.logging_v2.types import logging_metrics +from google.protobuf import empty_pb2 # type: ignore + +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class MetricsServiceV2RestInterceptor: + """Interceptor for MetricsServiceV2. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the MetricsServiceV2RestTransport. + + .. code-block:: python + class MyCustomMetricsServiceV2Interceptor(MetricsServiceV2RestInterceptor): + def pre_create_log_metric(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_log_metric(response): + logging.log(f"Received response: {response}") + + def pre_delete_log_metric(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_log_metric(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_log_metric(response): + logging.log(f"Received response: {response}") + + def pre_list_log_metrics(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_log_metrics(response): + logging.log(f"Received response: {response}") + + def pre_update_log_metric(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_log_metric(response): + logging.log(f"Received response: {response}") + + transport = MetricsServiceV2RestTransport(interceptor=MyCustomMetricsServiceV2Interceptor()) + client = MetricsServiceV2Client(transport=transport) + + + """ + def pre_create_log_metric(self, request: logging_metrics.CreateLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.CreateLogMetricRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_log_metric + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsServiceV2 server. + """ + return request, metadata + + def post_create_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: + """Post-rpc interceptor for create_log_metric + + Override in a subclass to manipulate the response + after it is returned by the MetricsServiceV2 server but before + it is returned to user code. + """ + return response + def pre_delete_log_metric(self, request: logging_metrics.DeleteLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.DeleteLogMetricRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_log_metric + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsServiceV2 server. + """ + return request, metadata + + def pre_get_log_metric(self, request: logging_metrics.GetLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.GetLogMetricRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_log_metric + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsServiceV2 server. + """ + return request, metadata + + def post_get_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: + """Post-rpc interceptor for get_log_metric + + Override in a subclass to manipulate the response + after it is returned by the MetricsServiceV2 server but before + it is returned to user code. + """ + return response + def pre_list_log_metrics(self, request: logging_metrics.ListLogMetricsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.ListLogMetricsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_log_metrics + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsServiceV2 server. + """ + return request, metadata + + def post_list_log_metrics(self, response: logging_metrics.ListLogMetricsResponse) -> logging_metrics.ListLogMetricsResponse: + """Post-rpc interceptor for list_log_metrics + + Override in a subclass to manipulate the response + after it is returned by the MetricsServiceV2 server but before + it is returned to user code. + """ + return response + def pre_update_log_metric(self, request: logging_metrics.UpdateLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.UpdateLogMetricRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_log_metric + + Override in a subclass to manipulate the request or metadata + before they are sent to the MetricsServiceV2 server. + """ + return request, metadata + + def post_update_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: + """Post-rpc interceptor for update_log_metric + + Override in a subclass to manipulate the response + after it is returned by the MetricsServiceV2 server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class MetricsServiceV2RestStub: + _session: AuthorizedSession + _host: str + _interceptor: MetricsServiceV2RestInterceptor + + +class MetricsServiceV2RestTransport(MetricsServiceV2Transport): + """REST backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', + interceptor: Optional[MetricsServiceV2RestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or MetricsServiceV2RestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateLogMetric(MetricsServiceV2RestStub): + def __hash__(self): + return hash("CreateLogMetric") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_metrics.CreateLogMetricRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_metrics.LogMetric: + r"""Call the create log metric method over HTTP. + + Args: + request (~.logging_metrics.CreateLogMetricRequest): + The request object. The parameters to CreateLogMetric. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{parent=projects/*}/metrics', + 'body': 'metric', + }, + ] + request, metadata = self._interceptor.pre_create_log_metric(request, metadata) + pb_request = logging_metrics.CreateLogMetricRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_metrics.LogMetric() + pb_resp = logging_metrics.LogMetric.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_log_metric(resp) + return resp + + class _DeleteLogMetric(MetricsServiceV2RestStub): + def __hash__(self): + return hash("DeleteLogMetric") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_metrics.DeleteLogMetricRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete log metric method over HTTP. + + Args: + request (~.logging_metrics.DeleteLogMetricRequest): + The request object. The parameters to DeleteLogMetric. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v2/{metric_name=projects/*/metrics/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_log_metric(request, metadata) + pb_request = logging_metrics.DeleteLogMetricRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetLogMetric(MetricsServiceV2RestStub): + def __hash__(self): + return hash("GetLogMetric") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_metrics.GetLogMetricRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_metrics.LogMetric: + r"""Call the get log metric method over HTTP. + + Args: + request (~.logging_metrics.GetLogMetricRequest): + The request object. The parameters to GetLogMetric. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{metric_name=projects/*/metrics/*}', + }, + ] + request, metadata = self._interceptor.pre_get_log_metric(request, metadata) + pb_request = logging_metrics.GetLogMetricRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_metrics.LogMetric() + pb_resp = logging_metrics.LogMetric.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_log_metric(resp) + return resp + + class _ListLogMetrics(MetricsServiceV2RestStub): + def __hash__(self): + return hash("ListLogMetrics") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_metrics.ListLogMetricsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_metrics.ListLogMetricsResponse: + r"""Call the list log metrics method over HTTP. + + Args: + request (~.logging_metrics.ListLogMetricsRequest): + The request object. The parameters to ListLogMetrics. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.ListLogMetricsResponse: + Result returned from ListLogMetrics. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v2/{parent=projects/*}/metrics', + }, + ] + request, metadata = self._interceptor.pre_list_log_metrics(request, metadata) + pb_request = logging_metrics.ListLogMetricsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_metrics.ListLogMetricsResponse() + pb_resp = logging_metrics.ListLogMetricsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_log_metrics(resp) + return resp + + class _UpdateLogMetric(MetricsServiceV2RestStub): + def __hash__(self): + return hash("UpdateLogMetric") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: logging_metrics.UpdateLogMetricRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> logging_metrics.LogMetric: + r"""Call the update log metric method over HTTP. + + Args: + request (~.logging_metrics.UpdateLogMetricRequest): + The request object. The parameters to UpdateLogMetric. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.logging_metrics.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'put', + 'uri': '/v2/{metric_name=projects/*/metrics/*}', + 'body': 'metric', + }, + ] + request, metadata = self._interceptor.pre_update_log_metric(request, metadata) + pb_request = logging_metrics.UpdateLogMetricRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = logging_metrics.LogMetric() + pb_resp = logging_metrics.LogMetric.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_log_metric(resp) + return resp + + @property + def create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + logging_metrics.LogMetric]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateLogMetric(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteLogMetric(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + logging_metrics.LogMetric]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetLogMetric(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + logging_metrics.ListLogMetricsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListLogMetrics(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + logging_metrics.LogMetric]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateLogMetric(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'MetricsServiceV2RestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py old mode 100644 new mode 100755 index af8c6186c49b..9492117b61a5 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -23,10 +23,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -76,6 +83,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), (ConfigServiceV2AsyncClient, "grpc_asyncio"), + (ConfigServiceV2Client, "rest"), ]) def test_config_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -88,12 +96,16 @@ def test_config_service_v2_client_from_service_account_info(client_class, transp assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.ConfigServiceV2GrpcTransport, "grpc"), (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ConfigServiceV2RestTransport, "rest"), ]) def test_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -110,6 +122,7 @@ def test_config_service_v2_client_service_account_always_use_jwt(transport_class @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), (ConfigServiceV2AsyncClient, "grpc_asyncio"), + (ConfigServiceV2Client, "rest"), ]) def test_config_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -125,6 +138,9 @@ def test_config_service_v2_client_from_service_account_file(client_class, transp assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://logging.googleapis.com' ) @@ -132,6 +148,7 @@ def test_config_service_v2_client_get_transport_class(): transport = ConfigServiceV2Client.get_transport_class() available_transports = [ transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2RestTransport, ] assert transport in available_transports @@ -142,6 +159,7 @@ def test_config_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest"), ]) @mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) @mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) @@ -261,6 +279,8 @@ def test_config_service_v2_client_client_options(client_class, transport_class, (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), + (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", "true"), + (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) @mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) @@ -398,6 +418,7 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest"), ]) def test_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -422,6 +443,7 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", None), ]) def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -5949,6 +5971,5126 @@ async def test_update_cmek_settings_field_headers_async(): ) in kw['metadata'] +@pytest.mark.parametrize("request_type", [ + logging_config.ListBucketsRequest, + dict, +]) +def test_list_buckets_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2/locations/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.ListBucketsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_buckets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_buckets_rest_required_fields(request_type=logging_config.ListBucketsRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_buckets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_buckets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.ListBucketsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.ListBucketsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_buckets(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_buckets_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_buckets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_buckets_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_buckets") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_buckets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.ListBucketsRequest.pb(logging_config.ListBucketsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.ListBucketsResponse.to_json(logging_config.ListBucketsResponse()) + + request = logging_config.ListBucketsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.ListBucketsResponse() + + client.list_buckets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_buckets_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListBucketsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2/locations/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_buckets(request) + + +def test_list_buckets_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.ListBucketsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2/locations/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.ListBucketsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_buckets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=*/*/locations/*}/buckets" % client.transport._host, args[1]) + + +def test_list_buckets_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_buckets( + logging_config.ListBucketsRequest(), + parent='parent_value', + ) + + +def test_list_buckets_rest_pager(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(logging_config.ListBucketsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2/locations/sample3'} + + pager = client.list_buckets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogBucket) + for i in results) + + pages = list(client.list_buckets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + logging_config.GetBucketRequest, + dict, +]) +def test_get_bucket_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_bucket(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_get_bucket_rest_required_fields(request_type=logging_config.GetBucketRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogBucket() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_bucket(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_bucket_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_bucket._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_bucket_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_bucket") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_bucket") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.GetBucketRequest.pb(logging_config.GetBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) + + request = logging_config.GetBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogBucket() + + client.get_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_bucket(request) + + +def test_get_bucket_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateBucketRequest, + dict, +]) +def test_create_bucket_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2/locations/sample3'} + request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_bucket(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_create_bucket_rest_required_fields(request_type=logging_config.CreateBucketRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request_init["bucket_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "bucketId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "bucketId" in jsonified_request + assert jsonified_request["bucketId"] == request_init["bucket_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["bucketId"] = 'bucket_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_bucket._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("bucket_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "bucketId" in jsonified_request + assert jsonified_request["bucketId"] == 'bucket_id_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogBucket() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_bucket(request) + + expected_params = [ + ( + "bucketId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_bucket_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_bucket._get_unset_required_fields({}) + assert set(unset_fields) == (set(("bucketId", )) & set(("parent", "bucketId", "bucket", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_bucket_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_bucket") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_bucket") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.CreateBucketRequest.pb(logging_config.CreateBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) + + request = logging_config.CreateBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogBucket() + + client.create_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2/locations/sample3'} + request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_bucket(request) + + +def test_create_bucket_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateBucketRequest, + dict, +]) +def test_update_bucket_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} + request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_bucket(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test_update_bucket_rest_required_fields(request_type=logging_config.UpdateBucketRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_bucket._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogBucket() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogBucket.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_bucket(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_bucket_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_bucket._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("name", "bucket", "updateMask", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_bucket_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_bucket") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_bucket") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.UpdateBucketRequest.pb(logging_config.UpdateBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) + + request = logging_config.UpdateBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogBucket() + + client.update_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} + request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_bucket(request) + + +def test_update_bucket_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteBucketRequest, + dict, +]) +def test_delete_bucket_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_bucket(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_bucket_rest_required_fields(request_type=logging_config.DeleteBucketRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_bucket(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_bucket_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_bucket._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_bucket_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_bucket") as pre: + pre.assert_not_called() + pb_message = logging_config.DeleteBucketRequest.pb(logging_config.DeleteBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = logging_config.DeleteBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_bucket(request) + + +def test_delete_bucket_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UndeleteBucketRequest, + dict, +]) +def test_undelete_bucket_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.undelete_bucket(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_undelete_bucket_rest_required_fields(request_type=logging_config.UndeleteBucketRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undelete_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undelete_bucket._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.undelete_bucket(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_undelete_bucket_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.undelete_bucket._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_undelete_bucket_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_undelete_bucket") as pre: + pre.assert_not_called() + pb_message = logging_config.UndeleteBucketRequest.pb(logging_config.UndeleteBucketRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = logging_config.UndeleteBucketRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.undelete_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_undelete_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.UndeleteBucketRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.undelete_bucket(request) + + +def test_undelete_bucket_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.ListViewsRequest, + dict, +]) +def test_list_views_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.ListViewsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_views(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListViewsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_views_rest_required_fields(request_type=logging_config.ListViewsRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_views._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_views._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.ListViewsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.ListViewsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_views(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_views_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_views._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_views_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_views") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_views") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.ListViewsRequest.pb(logging_config.ListViewsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.ListViewsResponse.to_json(logging_config.ListViewsResponse()) + + request = logging_config.ListViewsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.ListViewsResponse() + + client.list_views(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_views_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListViewsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_views(request) + + +def test_list_views_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.ListViewsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.ListViewsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_views(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=*/*/locations/*/buckets/*}/views" % client.transport._host, args[1]) + + +def test_list_views_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_views( + logging_config.ListViewsRequest(), + parent='parent_value', + ) + + +def test_list_views_rest_pager(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(logging_config.ListViewsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} + + pager = client.list_views(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogView) + for i in results) + + pages = list(client.list_views(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + logging_config.GetViewRequest, + dict, +]) +def test_get_view_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_view(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_get_view_rest_required_fields(request_type=logging_config.GetViewRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_view._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_view._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogView() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_view(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_view_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_view._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_view_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_view") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_view") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.GetViewRequest.pb(logging_config.GetViewRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) + + request = logging_config.GetViewRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogView() + + client.get_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_view(request) + + +def test_get_view_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateViewRequest, + dict, +]) +def test_create_view_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} + request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_view(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_create_view_rest_required_fields(request_type=logging_config.CreateViewRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request_init["view_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "viewId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_view._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "viewId" in jsonified_request + assert jsonified_request["viewId"] == request_init["view_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["viewId"] = 'view_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_view._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "viewId" in jsonified_request + assert jsonified_request["viewId"] == 'view_id_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogView() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_view(request) + + expected_params = [ + ( + "viewId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_view_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_view._get_unset_required_fields({}) + assert set(unset_fields) == (set(("viewId", )) & set(("parent", "viewId", "view", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_view_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_view") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_view") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.CreateViewRequest.pb(logging_config.CreateViewRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) + + request = logging_config.CreateViewRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogView() + + client.create_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} + request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_view(request) + + +def test_create_view_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateViewRequest, + dict, +]) +def test_update_view_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} + request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_view(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test_update_view_rest_required_fields(request_type=logging_config.UpdateViewRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_view._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_view._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogView() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogView.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_view(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_view_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_view._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("name", "view", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_view_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_view") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_view") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.UpdateViewRequest.pb(logging_config.UpdateViewRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) + + request = logging_config.UpdateViewRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogView() + + client.update_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} + request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_view(request) + + +def test_update_view_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteViewRequest, + dict, +]) +def test_delete_view_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_view(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_view_rest_required_fields(request_type=logging_config.DeleteViewRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_view._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_view._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_view(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_view_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_view._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_view_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_view") as pre: + pre.assert_not_called() + pb_message = logging_config.DeleteViewRequest.pb(logging_config.DeleteViewRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = logging_config.DeleteViewRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteViewRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_view(request) + + +def test_delete_view_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.ListSinksRequest, + dict, +]) +def test_list_sinks_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.ListSinksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_sinks(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSinksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_sinks_rest_required_fields(request_type=logging_config.ListSinksRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sinks._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sinks._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.ListSinksResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.ListSinksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_sinks(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_sinks_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_sinks._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_sinks_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_sinks") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_sinks") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.ListSinksRequest.pb(logging_config.ListSinksRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.ListSinksResponse.to_json(logging_config.ListSinksResponse()) + + request = logging_config.ListSinksRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.ListSinksResponse() + + client.list_sinks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_sinks_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListSinksRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_sinks(request) + + +def test_list_sinks_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.ListSinksResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.ListSinksResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_sinks(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=*/*}/sinks" % client.transport._host, args[1]) + + +def test_list_sinks_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_sinks( + logging_config.ListSinksRequest(), + parent='parent_value', + ) + + +def test_list_sinks_rest_pager(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(logging_config.ListSinksResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2'} + + pager = client.list_sinks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogSink) + for i in results) + + pages = list(client.list_sinks(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + logging_config.GetSinkRequest, + dict, +]) +def test_get_sink_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_sink(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_get_sink_rest_required_fields(request_type=logging_config.GetSinkRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["sink_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_sink._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["sinkName"] = 'sink_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_sink._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "sinkName" in jsonified_request + assert jsonified_request["sinkName"] == 'sink_name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_sink(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_sink_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_sink._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("sinkName", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_sink_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_sink") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_sink") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.GetSinkRequest.pb(logging_config.GetSinkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) + + request = logging_config.GetSinkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogSink() + + client.get_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_sink(request) + + +def test_get_sink_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink() + + # get arguments that satisfy an http rule for this method + sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + sink_name='sink_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_sink(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) + + +def test_get_sink_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_sink( + logging_config.GetSinkRequest(), + sink_name='sink_name_value', + ) + + +def test_get_sink_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateSinkRequest, + dict, +]) +def test_create_sink_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_sink(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_create_sink_rest_required_fields(request_type=logging_config.CreateSinkRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_sink._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_sink._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("unique_writer_identity", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_sink(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_sink_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_sink._get_unset_required_fields({}) + assert set(unset_fields) == (set(("uniqueWriterIdentity", )) & set(("parent", "sink", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_sink_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_sink") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_sink") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.CreateSinkRequest.pb(logging_config.CreateSinkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) + + request = logging_config.CreateSinkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogSink() + + client.create_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_sink(request) + + +def test_create_sink_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_sink(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=*/*}/sinks" % client.transport._host, args[1]) + + +def test_create_sink_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_sink( + logging_config.CreateSinkRequest(), + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + +def test_create_sink_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateSinkRequest, + dict, +]) +def test_update_sink_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} + request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_sink(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test_update_sink_rest_required_fields(request_type=logging_config.UpdateSinkRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["sink_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_sink._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["sinkName"] = 'sink_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_sink._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("unique_writer_identity", "update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "sinkName" in jsonified_request + assert jsonified_request["sinkName"] == 'sink_name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_sink(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_sink_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_sink._get_unset_required_fields({}) + assert set(unset_fields) == (set(("uniqueWriterIdentity", "updateMask", )) & set(("sinkName", "sink", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_sink_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_sink") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_sink") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.UpdateSinkRequest.pb(logging_config.UpdateSinkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) + + request = logging_config.UpdateSinkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogSink() + + client.update_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} + request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_sink(request) + + +def test_update_sink_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogSink() + + # get arguments that satisfy an http rule for this method + sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogSink.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_sink(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) + + +def test_update_sink_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_sink( + logging_config.UpdateSinkRequest(), + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_sink_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteSinkRequest, + dict, +]) +def test_delete_sink_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_sink(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_sink_rest_required_fields(request_type=logging_config.DeleteSinkRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["sink_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_sink._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["sinkName"] = 'sink_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_sink._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "sinkName" in jsonified_request + assert jsonified_request["sinkName"] == 'sink_name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_sink(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_sink_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_sink._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("sinkName", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_sink_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_sink") as pre: + pre.assert_not_called() + pb_message = logging_config.DeleteSinkRequest.pb(logging_config.DeleteSinkRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = logging_config.DeleteSinkRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteSinkRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_sink(request) + + +def test_delete_sink_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + sink_name='sink_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_sink(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) + + +def test_delete_sink_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_sink( + logging_config.DeleteSinkRequest(), + sink_name='sink_name_value', + ) + + +def test_delete_sink_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.ListExclusionsRequest, + dict, +]) +def test_list_exclusions_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_exclusions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListExclusionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_exclusions_rest_required_fields(request_type=logging_config.ListExclusionsRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_exclusions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_exclusions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.ListExclusionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_exclusions(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_exclusions_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_exclusions._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_exclusions_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_exclusions") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_exclusions") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.ListExclusionsRequest.pb(logging_config.ListExclusionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.ListExclusionsResponse.to_json(logging_config.ListExclusionsResponse()) + + request = logging_config.ListExclusionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.ListExclusionsResponse() + + client.list_exclusions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_exclusions_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListExclusionsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_exclusions(request) + + +def test_list_exclusions_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.ListExclusionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_exclusions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=*/*}/exclusions" % client.transport._host, args[1]) + + +def test_list_exclusions_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', + ) + + +def test_list_exclusions_rest_pager(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(logging_config.ListExclusionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2'} + + pager = client.list_exclusions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in results) + + pages = list(client.list_exclusions(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + logging_config.GetExclusionRequest, + dict, +]) +def test_get_exclusion_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/exclusions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_exclusion(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_get_exclusion_rest_required_fields(request_type=logging_config.GetExclusionRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_exclusion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_exclusion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_exclusion(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_exclusion_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_exclusion._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_exclusion_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_exclusion") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_exclusion") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.GetExclusionRequest.pb(logging_config.GetExclusionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) + + request = logging_config.GetExclusionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogExclusion() + + client.get_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/exclusions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_exclusion(request) + + +def test_get_exclusion_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/exclusions/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_exclusion(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) + + +def test_get_exclusion_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + + +def test_get_exclusion_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateExclusionRequest, + dict, +]) +def test_create_exclusion_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_exclusion(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_create_exclusion_rest_required_fields(request_type=logging_config.CreateExclusionRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_exclusion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_exclusion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_exclusion(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_exclusion_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_exclusion._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "exclusion", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_exclusion_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_exclusion") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_exclusion") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.CreateExclusionRequest.pb(logging_config.CreateExclusionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) + + request = logging_config.CreateExclusionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogExclusion() + + client.create_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_exclusion(request) + + +def test_create_exclusion_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_exclusion(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=*/*}/exclusions" % client.transport._host, args[1]) + + +def test_create_exclusion_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + +def test_create_exclusion_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateExclusionRequest, + dict, +]) +def test_update_exclusion_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/exclusions/sample3'} + request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_exclusion(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_update_exclusion_rest_required_fields(request_type=logging_config.UpdateExclusionRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_exclusion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_exclusion._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_exclusion(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_exclusion_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_exclusion._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("name", "exclusion", "updateMask", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_exclusion_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_exclusion") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_exclusion") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.UpdateExclusionRequest.pb(logging_config.UpdateExclusionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) + + request = logging_config.UpdateExclusionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.LogExclusion() + + client.update_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/exclusions/sample3'} + request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_exclusion(request) + + +def test_update_exclusion_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.LogExclusion() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/exclusions/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.LogExclusion.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_exclusion(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) + + +def test_update_exclusion_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_exclusion_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteExclusionRequest, + dict, +]) +def test_delete_exclusion_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/exclusions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_exclusion(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_exclusion_rest_required_fields(request_type=logging_config.DeleteExclusionRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_exclusion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_exclusion._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_exclusion(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_exclusion_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_exclusion._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_exclusion_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_exclusion") as pre: + pre.assert_not_called() + pb_message = logging_config.DeleteExclusionRequest.pb(logging_config.DeleteExclusionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = logging_config.DeleteExclusionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteExclusionRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/exclusions/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_exclusion(request) + + +def test_delete_exclusion_rest_flattened(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/exclusions/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_exclusion(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) + + +def test_delete_exclusion_rest_flattened_error(transport: str = 'rest'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) + + +def test_delete_exclusion_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.GetCmekSettingsRequest, + dict, +]) +def test_get_cmek_settings_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.CmekSettings.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_cmek_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +def test_get_cmek_settings_rest_required_fields(request_type=logging_config.GetCmekSettingsRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cmek_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cmek_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.CmekSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.CmekSettings.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_cmek_settings(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_cmek_settings_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_cmek_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_cmek_settings_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_cmek_settings") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_cmek_settings") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.GetCmekSettingsRequest.pb(logging_config.GetCmekSettingsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.CmekSettings.to_json(logging_config.CmekSettings()) + + request = logging_config.GetCmekSettingsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.CmekSettings() + + client.get_cmek_settings(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_cmek_settings_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetCmekSettingsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_cmek_settings(request) + + +def test_get_cmek_settings_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateCmekSettingsRequest, + dict, +]) +def test_update_cmek_settings_rest(request_type): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2'} + request_init["cmek_settings"] = {'name': 'name_value', 'kms_key_name': 'kms_key_name_value', 'service_account_id': 'service_account_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + service_account_id='service_account_id_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_config.CmekSettings.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_cmek_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.service_account_id == 'service_account_id_value' + + +def test_update_cmek_settings_rest_required_fields(request_type=logging_config.UpdateCmekSettingsRequest): + transport_class = transports.ConfigServiceV2RestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_cmek_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_cmek_settings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_config.CmekSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_config.CmekSettings.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_cmek_settings(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_cmek_settings_rest_unset_required_fields(): + transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_cmek_settings._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("name", "cmekSettings", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_cmek_settings_rest_interceptors(null_interceptor): + transport = transports.ConfigServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), + ) + client = ConfigServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_cmek_settings") as post, \ + mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_cmek_settings") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_config.UpdateCmekSettingsRequest.pb(logging_config.UpdateCmekSettingsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_config.CmekSettings.to_json(logging_config.CmekSettings()) + + request = logging_config.UpdateCmekSettingsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_config.CmekSettings() + + client.update_cmek_settings(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_cmek_settings_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateCmekSettingsRequest): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2'} + request_init["cmek_settings"] = {'name': 'name_value', 'kms_key_name': 'kms_key_name_value', 'service_account_id': 'service_account_id_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_cmek_settings(request) + + +def test_update_cmek_settings_rest_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( @@ -6027,6 +11169,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport, + transports.ConfigServiceV2RestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -6037,6 +11180,7 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", + "rest", ]) def test_transport_kind(transport_name): transport = ConfigServiceV2Client.get_transport_class(transport_name)( @@ -6186,6 +11330,7 @@ def test_config_service_v2_transport_auth_adc(transport_class): [ transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport, + transports.ConfigServiceV2RestTransport, ], ) def test_config_service_v2_transport_auth_gdch_credentials(transport_class): @@ -6285,10 +11430,20 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) +def test_config_service_v2_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.ConfigServiceV2RestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( @@ -6298,11 +11453,14 @@ def test_config_service_v2_host_no_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( @@ -6312,8 +11470,93 @@ def test_config_service_v2_host_with_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://logging.googleapis.com:8000' ) +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_config_service_v2_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ConfigServiceV2Client( + credentials=creds1, + transport=transport_name, + ) + client2 = ConfigServiceV2Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_buckets._session + session2 = client2.transport.list_buckets._session + assert session1 != session2 + session1 = client1.transport.get_bucket._session + session2 = client2.transport.get_bucket._session + assert session1 != session2 + session1 = client1.transport.create_bucket._session + session2 = client2.transport.create_bucket._session + assert session1 != session2 + session1 = client1.transport.update_bucket._session + session2 = client2.transport.update_bucket._session + assert session1 != session2 + session1 = client1.transport.delete_bucket._session + session2 = client2.transport.delete_bucket._session + assert session1 != session2 + session1 = client1.transport.undelete_bucket._session + session2 = client2.transport.undelete_bucket._session + assert session1 != session2 + session1 = client1.transport.list_views._session + session2 = client2.transport.list_views._session + assert session1 != session2 + session1 = client1.transport.get_view._session + session2 = client2.transport.get_view._session + assert session1 != session2 + session1 = client1.transport.create_view._session + session2 = client2.transport.create_view._session + assert session1 != session2 + session1 = client1.transport.update_view._session + session2 = client2.transport.update_view._session + assert session1 != session2 + session1 = client1.transport.delete_view._session + session2 = client2.transport.delete_view._session + assert session1 != session2 + session1 = client1.transport.list_sinks._session + session2 = client2.transport.list_sinks._session + assert session1 != session2 + session1 = client1.transport.get_sink._session + session2 = client2.transport.get_sink._session + assert session1 != session2 + session1 = client1.transport.create_sink._session + session2 = client2.transport.create_sink._session + assert session1 != session2 + session1 = client1.transport.update_sink._session + session2 = client2.transport.update_sink._session + assert session1 != session2 + session1 = client1.transport.delete_sink._session + session2 = client2.transport.delete_sink._session + assert session1 != session2 + session1 = client1.transport.list_exclusions._session + session2 = client2.transport.list_exclusions._session + assert session1 != session2 + session1 = client1.transport.get_exclusion._session + session2 = client2.transport.get_exclusion._session + assert session1 != session2 + session1 = client1.transport.create_exclusion._session + session2 = client2.transport.create_exclusion._session + assert session1 != session2 + session1 = client1.transport.update_exclusion._session + session2 = client2.transport.update_exclusion._session + assert session1 != session2 + session1 = client1.transport.delete_exclusion._session + session2 = client2.transport.delete_exclusion._session + assert session1 != session2 + session1 = client1.transport.get_cmek_settings._session + session2 = client2.transport.get_cmek_settings._session + assert session1 != session2 + session1 = client1.transport.update_cmek_settings._session + session2 = client2.transport.update_cmek_settings._session + assert session1 != session2 def test_config_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -6643,6 +11886,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -6658,6 +11902,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py old mode 100644 new mode 100755 index 8ede77f6e754..cdbd0fafc78f --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -23,10 +23,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options @@ -82,6 +89,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), (LoggingServiceV2AsyncClient, "grpc_asyncio"), + (LoggingServiceV2Client, "rest"), ]) def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -94,12 +102,16 @@ def test_logging_service_v2_client_from_service_account_info(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.LoggingServiceV2GrpcTransport, "grpc"), (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.LoggingServiceV2RestTransport, "rest"), ]) def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -116,6 +128,7 @@ def test_logging_service_v2_client_service_account_always_use_jwt(transport_clas @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), (LoggingServiceV2AsyncClient, "grpc_asyncio"), + (LoggingServiceV2Client, "rest"), ]) def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -131,6 +144,9 @@ def test_logging_service_v2_client_from_service_account_file(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://logging.googleapis.com' ) @@ -138,6 +154,7 @@ def test_logging_service_v2_client_get_transport_class(): transport = LoggingServiceV2Client.get_transport_class() available_transports = [ transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2RestTransport, ] assert transport in available_transports @@ -148,6 +165,7 @@ def test_logging_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest"), ]) @mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) @mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) @@ -267,6 +285,8 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), + (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", "true"), + (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) @mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) @@ -404,6 +424,7 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest"), ]) def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -428,6 +449,7 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", None), ]) def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -2085,6 +2107,1210 @@ async def test_tail_log_entries_async_from_dict(): await test_tail_log_entries_async(request_type=dict) +@pytest.mark.parametrize("request_type", [ + logging.DeleteLogRequest, + dict, +]) +def test_delete_log_rest(request_type): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'log_name': 'projects/sample1/logs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_log(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_rest_required_fields(request_type=logging.DeleteLogRequest): + transport_class = transports.LoggingServiceV2RestTransport + + request_init = {} + request_init["log_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["logName"] = 'log_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "logName" in jsonified_request + assert jsonified_request["logName"] == 'log_name_value' + + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_log(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_log_rest_unset_required_fields(): + transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_log._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("logName", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_log_rest_interceptors(null_interceptor): + transport = transports.LoggingServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), + ) + client = LoggingServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_delete_log") as pre: + pre.assert_not_called() + pb_message = logging.DeleteLogRequest.pb(logging.DeleteLogRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = logging.DeleteLogRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_log(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_log_rest_bad_request(transport: str = 'rest', request_type=logging.DeleteLogRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'log_name': 'projects/sample1/logs/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_log(request) + + +def test_delete_log_rest_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'log_name': 'projects/sample1/logs/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + log_name='log_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_log(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{log_name=projects/*/logs/*}" % client.transport._host, args[1]) + + +def test_delete_log_rest_flattened_error(transport: str = 'rest'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log( + logging.DeleteLogRequest(), + log_name='log_name_value', + ) + + +def test_delete_log_rest_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging.WriteLogEntriesRequest, + dict, +]) +def test_write_log_entries_rest(request_type): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging.WriteLogEntriesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.write_log_entries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) + + +def test_write_log_entries_rest_required_fields(request_type=logging.WriteLogEntriesRequest): + transport_class = transports.LoggingServiceV2RestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).write_log_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).write_log_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging.WriteLogEntriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.write_log_entries(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_write_log_entries_rest_unset_required_fields(): + transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.write_log_entries._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("entries", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_write_log_entries_rest_interceptors(null_interceptor): + transport = transports.LoggingServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), + ) + client = LoggingServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_write_log_entries") as post, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_write_log_entries") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging.WriteLogEntriesRequest.pb(logging.WriteLogEntriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging.WriteLogEntriesResponse.to_json(logging.WriteLogEntriesResponse()) + + request = logging.WriteLogEntriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging.WriteLogEntriesResponse() + + client.write_log_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_write_log_entries_rest_bad_request(transport: str = 'rest', request_type=logging.WriteLogEntriesRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.write_log_entries(request) + + +def test_write_log_entries_rest_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging.WriteLogEntriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.write_log_entries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/entries:write" % client.transport._host, args[1]) + + +def test_write_log_entries_rest_flattened_error(transport: str = 'rest'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + +def test_write_log_entries_rest_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging.ListLogEntriesRequest, + dict, +]) +def test_list_log_entries_rest(request_type): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging.ListLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_log_entries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_log_entries_rest_required_fields(request_type=logging.ListLogEntriesRequest): + transport_class = transports.LoggingServiceV2RestTransport + + request_init = {} + request_init["resource_names"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["resourceNames"] = 'resource_names_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_entries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "resourceNames" in jsonified_request + assert jsonified_request["resourceNames"] == 'resource_names_value' + + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging.ListLogEntriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging.ListLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_log_entries(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_log_entries_rest_unset_required_fields(): + transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_log_entries._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("resourceNames", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_log_entries_rest_interceptors(null_interceptor): + transport = transports.LoggingServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), + ) + client = LoggingServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_log_entries") as post, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_log_entries") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging.ListLogEntriesRequest.pb(logging.ListLogEntriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging.ListLogEntriesResponse.to_json(logging.ListLogEntriesResponse()) + + request = logging.ListLogEntriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging.ListLogEntriesResponse() + + client.list_log_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_log_entries_rest_bad_request(transport: str = 'rest', request_type=logging.ListLogEntriesRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_log_entries(request) + + +def test_list_log_entries_rest_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging.ListLogEntriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging.ListLogEntriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_log_entries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/entries:list" % client.transport._host, args[1]) + + +def test_list_log_entries_rest_flattened_error(transport: str = 'rest'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + +def test_list_log_entries_rest_pager(transport: str = 'rest'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(logging.ListLogEntriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_log_entries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, log_entry.LogEntry) + for i in results) + + pages = list(client.list_log_entries(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + logging.ListMonitoredResourceDescriptorsRequest, + dict, +]) +def test_list_monitored_resource_descriptors_rest(request_type): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging.ListMonitoredResourceDescriptorsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_monitored_resource_descriptors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_monitored_resource_descriptors_rest_interceptors(null_interceptor): + transport = transports.LoggingServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), + ) + client = LoggingServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_monitored_resource_descriptors") as post, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_monitored_resource_descriptors") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging.ListMonitoredResourceDescriptorsRequest.pb(logging.ListMonitoredResourceDescriptorsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging.ListMonitoredResourceDescriptorsResponse.to_json(logging.ListMonitoredResourceDescriptorsResponse()) + + request = logging.ListMonitoredResourceDescriptorsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging.ListMonitoredResourceDescriptorsResponse() + + client.list_monitored_resource_descriptors(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_monitored_resource_descriptors_rest_bad_request(transport: str = 'rest', request_type=logging.ListMonitoredResourceDescriptorsRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_monitored_resource_descriptors(request) + + +def test_list_monitored_resource_descriptors_rest_pager(transport: str = 'rest'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(logging.ListMonitoredResourceDescriptorsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.list_monitored_resource_descriptors(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in results) + + pages = list(client.list_monitored_resource_descriptors(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + logging.ListLogsRequest, + dict, +]) +def test_list_logs_rest(request_type): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging.ListLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_logs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogsPager) + assert response.log_names == ['log_names_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_list_logs_rest_required_fields(request_type=logging.ListLogsRequest): + transport_class = transports.LoggingServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_logs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_logs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", "resource_names", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging.ListLogsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging.ListLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_logs(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_logs_rest_unset_required_fields(): + transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_logs._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", "resourceNames", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_logs_rest_interceptors(null_interceptor): + transport = transports.LoggingServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), + ) + client = LoggingServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_logs") as post, \ + mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_logs") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging.ListLogsRequest.pb(logging.ListLogsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging.ListLogsResponse.to_json(logging.ListLogsResponse()) + + request = logging.ListLogsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging.ListLogsResponse() + + client.list_logs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_logs_rest_bad_request(transport: str = 'rest', request_type=logging.ListLogsRequest): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_logs(request) + + +def test_list_logs_rest_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging.ListLogsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging.ListLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_logs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=*/*}/logs" % client.transport._host, args[1]) + + +def test_list_logs_rest_flattened_error(transport: str = 'rest'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_logs( + logging.ListLogsRequest(), + parent='parent_value', + ) + + +def test_list_logs_rest_pager(transport: str = 'rest'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(logging.ListLogsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2'} + + pager = client.list_logs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) + + pages = list(client.list_logs(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_tail_log_entries_rest_unimplemented(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = logging.TailLogEntriesRequest() + requests = [request] + with pytest.raises(NotImplementedError): + client.tail_log_entries(requests) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( @@ -2163,6 +3389,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport, + transports.LoggingServiceV2RestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -2173,6 +3400,7 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", + "rest", ]) def test_transport_kind(transport_name): transport = LoggingServiceV2Client.get_transport_class(transport_name)( @@ -2307,6 +3535,7 @@ def test_logging_service_v2_transport_auth_adc(transport_class): [ transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport, + transports.LoggingServiceV2RestTransport, ], ) def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): @@ -2407,10 +3636,20 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) +def test_logging_service_v2_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.LoggingServiceV2RestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( @@ -2420,11 +3659,14 @@ def test_logging_service_v2_host_no_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( @@ -2434,8 +3676,42 @@ def test_logging_service_v2_host_with_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://logging.googleapis.com:8000' ) +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_logging_service_v2_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = LoggingServiceV2Client( + credentials=creds1, + transport=transport_name, + ) + client2 = LoggingServiceV2Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete_log._session + session2 = client2.transport.delete_log._session + assert session1 != session2 + session1 = client1.transport.write_log_entries._session + session2 = client2.transport.write_log_entries._session + assert session1 != session2 + session1 = client1.transport.list_log_entries._session + session2 = client2.transport.list_log_entries._session + assert session1 != session2 + session1 = client1.transport.list_monitored_resource_descriptors._session + session2 = client2.transport.list_monitored_resource_descriptors._session + assert session1 != session2 + session1 = client1.transport.list_logs._session + session2 = client2.transport.list_logs._session + assert session1 != session2 + session1 = client1.transport.tail_log_entries._session + session2 = client2.transport.tail_log_entries._session + assert session1 != session2 def test_logging_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -2685,6 +3961,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2700,6 +3977,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index c193ecc27dd6..0e79159169e6 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -23,10 +23,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api import distribution_pb2 # type: ignore from google.api import label_pb2 # type: ignore @@ -80,6 +87,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), (MetricsServiceV2AsyncClient, "grpc_asyncio"), + (MetricsServiceV2Client, "rest"), ]) def test_metrics_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -92,12 +100,16 @@ def test_metrics_service_v2_client_from_service_account_info(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.MetricsServiceV2GrpcTransport, "grpc"), (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (transports.MetricsServiceV2RestTransport, "rest"), ]) def test_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -114,6 +126,7 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(transport_clas @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), (MetricsServiceV2AsyncClient, "grpc_asyncio"), + (MetricsServiceV2Client, "rest"), ]) def test_metrics_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -129,6 +142,9 @@ def test_metrics_service_v2_client_from_service_account_file(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://logging.googleapis.com' ) @@ -136,6 +152,7 @@ def test_metrics_service_v2_client_get_transport_class(): transport = MetricsServiceV2Client.get_transport_class() available_transports = [ transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2RestTransport, ] assert transport in available_transports @@ -146,6 +163,7 @@ def test_metrics_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest"), ]) @mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) @mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) @@ -265,6 +283,8 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), + (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", "true"), + (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) @mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) @@ -402,6 +422,7 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), + (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest"), ]) def test_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -426,6 +447,7 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", None), ]) def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -1937,6 +1959,1245 @@ async def test_delete_log_metric_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [ + logging_metrics.ListLogMetricsRequest, + dict, +]) +def test_list_log_metrics_rest(request_type): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_log_metrics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogMetricsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_log_metrics_rest_required_fields(request_type=logging_metrics.ListLogMetricsRequest): + transport_class = transports.MetricsServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_metrics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_metrics._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_metrics.ListLogMetricsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_log_metrics(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_log_metrics_rest_unset_required_fields(): + transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_log_metrics._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_log_metrics_rest_interceptors(null_interceptor): + transport = transports.MetricsServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), + ) + client = MetricsServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_list_log_metrics") as post, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_list_log_metrics") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_metrics.ListLogMetricsRequest.pb(logging_metrics.ListLogMetricsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_metrics.ListLogMetricsResponse.to_json(logging_metrics.ListLogMetricsResponse()) + + request = logging_metrics.ListLogMetricsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_metrics.ListLogMetricsResponse() + + client.list_log_metrics(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_log_metrics_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.ListLogMetricsRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_log_metrics(request) + + +def test_list_log_metrics_rest_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_metrics.ListLogMetricsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_log_metrics(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/metrics" % client.transport._host, args[1]) + + +def test_list_log_metrics_rest_flattened_error(transport: str = 'rest'): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_metrics( + logging_metrics.ListLogMetricsRequest(), + parent='parent_value', + ) + + +def test_list_log_metrics_rest_pager(transport: str = 'rest'): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(logging_metrics.ListLogMetricsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1'} + + pager = client.list_log_metrics(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) + for i in results) + + pages = list(client.list_log_metrics(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + logging_metrics.GetLogMetricRequest, + dict, +]) +def test_get_log_metric_rest(request_type): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'metric_name': 'projects/sample1/metrics/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_log_metric(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_get_log_metric_rest_required_fields(request_type=logging_metrics.GetLogMetricRequest): + transport_class = transports.MetricsServiceV2RestTransport + + request_init = {} + request_init["metric_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_log_metric._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["metricName"] = 'metric_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_log_metric._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "metricName" in jsonified_request + assert jsonified_request["metricName"] == 'metric_name_value' + + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_log_metric(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_log_metric_rest_unset_required_fields(): + transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_log_metric._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("metricName", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_log_metric_rest_interceptors(null_interceptor): + transport = transports.MetricsServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), + ) + client = MetricsServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_get_log_metric") as post, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_get_log_metric") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_metrics.GetLogMetricRequest.pb(logging_metrics.GetLogMetricRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) + + request = logging_metrics.GetLogMetricRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_metrics.LogMetric() + + client.get_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.GetLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'metric_name': 'projects/sample1/metrics/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_log_metric(request) + + +def test_get_log_metric_rest_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric() + + # get arguments that satisfy an http rule for this method + sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + metric_name='metric_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_log_metric(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) + + +def test_get_log_metric_rest_flattened_error(transport: str = 'rest'): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_log_metric( + logging_metrics.GetLogMetricRequest(), + metric_name='metric_name_value', + ) + + +def test_get_log_metric_rest_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_metrics.CreateLogMetricRequest, + dict, +]) +def test_create_log_metric_rest(request_type): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_log_metric(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_create_log_metric_rest_required_fields(request_type=logging_metrics.CreateLogMetricRequest): + transport_class = transports.MetricsServiceV2RestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_log_metric._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_log_metric._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_log_metric(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_log_metric_rest_unset_required_fields(): + transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_log_metric._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "metric", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_log_metric_rest_interceptors(null_interceptor): + transport = transports.MetricsServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), + ) + client = MetricsServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_create_log_metric") as post, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_create_log_metric") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_metrics.CreateLogMetricRequest.pb(logging_metrics.CreateLogMetricRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) + + request = logging_metrics.CreateLogMetricRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_metrics.LogMetric() + + client.create_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.CreateLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1'} + request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_log_metric(request) + + +def test_create_log_metric_rest_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_log_metric(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{parent=projects/*}/metrics" % client.transport._host, args[1]) + + +def test_create_log_metric_rest_flattened_error(transport: str = 'rest'): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +def test_create_log_metric_rest_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_metrics.UpdateLogMetricRequest, + dict, +]) +def test_update_log_metric_rest(request_type): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'metric_name': 'projects/sample1/metrics/sample2'} + request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_log_metric(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test_update_log_metric_rest_required_fields(request_type=logging_metrics.UpdateLogMetricRequest): + transport_class = transports.MetricsServiceV2RestTransport + + request_init = {} + request_init["metric_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_log_metric._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["metricName"] = 'metric_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_log_metric._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "metricName" in jsonified_request + assert jsonified_request["metricName"] == 'metric_name_value' + + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "put", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_log_metric(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_log_metric_rest_unset_required_fields(): + transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_log_metric._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("metricName", "metric", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_log_metric_rest_interceptors(null_interceptor): + transport = transports.MetricsServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), + ) + client = MetricsServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_update_log_metric") as post, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_update_log_metric") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = logging_metrics.UpdateLogMetricRequest.pb(logging_metrics.UpdateLogMetricRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) + + request = logging_metrics.UpdateLogMetricRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = logging_metrics.LogMetric() + + client.update_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.UpdateLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'metric_name': 'projects/sample1/metrics/sample2'} + request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_log_metric(request) + + +def test_update_log_metric_rest_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = logging_metrics.LogMetric() + + # get arguments that satisfy an http rule for this method + sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = logging_metrics.LogMetric.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_log_metric(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) + + +def test_update_log_metric_rest_flattened_error(transport: str = 'rest'): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +def test_update_log_metric_rest_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + logging_metrics.DeleteLogMetricRequest, + dict, +]) +def test_delete_log_metric_rest(request_type): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'metric_name': 'projects/sample1/metrics/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_log_metric(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_metric_rest_required_fields(request_type=logging_metrics.DeleteLogMetricRequest): + transport_class = transports.MetricsServiceV2RestTransport + + request_init = {} + request_init["metric_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log_metric._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["metricName"] = 'metric_name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log_metric._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "metricName" in jsonified_request + assert jsonified_request["metricName"] == 'metric_name_value' + + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_log_metric(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_log_metric_rest_unset_required_fields(): + transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_log_metric._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("metricName", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_log_metric_rest_interceptors(null_interceptor): + transport = transports.MetricsServiceV2RestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), + ) + client = MetricsServiceV2Client(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_delete_log_metric") as pre: + pre.assert_not_called() + pb_message = logging_metrics.DeleteLogMetricRequest.pb(logging_metrics.DeleteLogMetricRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = logging_metrics.DeleteLogMetricRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.DeleteLogMetricRequest): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'metric_name': 'projects/sample1/metrics/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_log_metric(request) + + +def test_delete_log_metric_rest_flattened(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + metric_name='metric_name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_log_metric(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) + + +def test_delete_log_metric_rest_flattened_error(transport: str = 'rest'): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), + metric_name='metric_name_value', + ) + + +def test_delete_log_metric_rest_error(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( @@ -2015,6 +3276,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport, + transports.MetricsServiceV2RestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -2025,6 +3287,7 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", + "rest", ]) def test_transport_kind(transport_name): transport = MetricsServiceV2Client.get_transport_class(transport_name)( @@ -2158,6 +3421,7 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): [ transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport, + transports.MetricsServiceV2RestTransport, ], ) def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): @@ -2258,10 +3522,20 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) +def test_metrics_service_v2_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.MetricsServiceV2RestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( @@ -2271,11 +3545,14 @@ def test_metrics_service_v2_host_no_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( @@ -2285,8 +3562,39 @@ def test_metrics_service_v2_host_with_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://logging.googleapis.com:8000' ) +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_metrics_service_v2_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = MetricsServiceV2Client( + credentials=creds1, + transport=transport_name, + ) + client2 = MetricsServiceV2Client( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_log_metrics._session + session2 = client2.transport.list_log_metrics._session + assert session1 != session2 + session1 = client1.transport.get_log_metric._session + session2 = client2.transport.get_log_metric._session + assert session1 != session2 + session1 = client1.transport.create_log_metric._session + session2 = client2.transport.create_log_metric._session + assert session1 != session2 + session1 = client1.transport.update_log_metric._session + session2 = client2.transport.update_log_metric._session + assert session1 != session2 + session1 = client1.transport.delete_log_metric._session + session2 = client2.transport.delete_log_metric._session + assert session1 != session2 def test_metrics_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -2536,6 +3844,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2551,6 +3860,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json old mode 100644 new mode 100755 index 038bb9952176..804956f47760 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json @@ -106,6 +106,56 @@ ] } } + }, + "rest": { + "libraryClient": "CloudRedisClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "ExportInstance": { + "methods": [ + "export_instance" + ] + }, + "FailoverInstance": { + "methods": [ + "failover_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ImportInstance": { + "methods": [ + "import_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + }, + "UpgradeInstance": { + "methods": [ + "upgrade_instance" + ] + } + } } } } diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index d84ba29430f1..25f00e1b7773 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -44,6 +44,7 @@ from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudRedisGrpcTransport from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport +from .transports.rest import CloudRedisRestTransport class CloudRedisClientMeta(type): @@ -56,6 +57,7 @@ class CloudRedisClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport + _transport_registry["rest"] = CloudRedisRestTransport def get_transport_class(cls, label: str = None, @@ -327,6 +329,9 @@ def __init__(self, *, transport (Union[str, CloudRedisTransport]): The transport to use. If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py old mode 100644 new mode 100755 index 2622f89f5de9..bfee7cc29940 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -19,15 +19,20 @@ from .base import CloudRedisTransport from .grpc import CloudRedisGrpcTransport from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport +from .rest import CloudRedisRestTransport +from .rest import CloudRedisRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] _transport_registry['grpc'] = CloudRedisGrpcTransport _transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport +_transport_registry['rest'] = CloudRedisRestTransport __all__ = ( 'CloudRedisTransport', 'CloudRedisGrpcTransport', 'CloudRedisGrpcAsyncIOTransport', + 'CloudRedisRestTransport', + 'CloudRedisRestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py new file mode 100755 index 000000000000..2d2c0e9ca396 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -0,0 +1,1276 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + +from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudRedisRestInterceptor: + """Interceptor for CloudRedis. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudRedisRestTransport. + + .. code-block:: python + class MyCustomCloudRedisInterceptor(CloudRedisRestInterceptor): + def pre_create_instance(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(response): + logging.log(f"Received response: {response}") + + def pre_delete_instance(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(response): + logging.log(f"Received response: {response}") + + def pre_export_instance(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_instance(response): + logging.log(f"Received response: {response}") + + def pre_failover_instance(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_failover_instance(response): + logging.log(f"Received response: {response}") + + def pre_get_instance(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(response): + logging.log(f"Received response: {response}") + + def pre_import_instance(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_import_instance(response): + logging.log(f"Received response: {response}") + + def pre_list_instances(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(response): + logging.log(f"Received response: {response}") + + def pre_update_instance(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(response): + logging.log(f"Received response: {response}") + + def pre_upgrade_instance(request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_upgrade_instance(response): + logging.log(f"Received response: {response}") + + transport = CloudRedisRestTransport(interceptor=MyCustomCloudRedisInterceptor()) + client = CloudRedisClient(transport=transport) + + + """ + def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for export_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for failover_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for failover_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for import_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for upgrade_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for upgrade_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudRedisRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudRedisRestInterceptor + + +class CloudRedisRestTransport(CloudRedisTransport): + """REST backend transport for CloudRedis. + + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via an issue in this + library's source repository. Thank you! + """ + + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: ga_credentials.Credentials=None, + credentials_file: str=None, + scopes: Sequence[str]=None, + client_cert_source_for_mtls: Callable[[ + ], Tuple[bytes, bytes]]=None, + quota_project_id: Optional[str]=None, + client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool]=False, + url_scheme: str='https', + interceptor: Optional[CloudRedisRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudRedisRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options) + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateInstance(CloudRedisRestStub): + def __hash__(self): + return hash("CreateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "instanceId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.cloud_redis.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/instances', + 'body': 'instance', + }, + ] + request, metadata = self._interceptor.pre_create_instance(request, metadata) + pb_request = cloud_redis.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + return resp + + class _DeleteInstance(CloudRedisRestStub): + def __hash__(self): + return hash("DeleteInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.cloud_redis.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + pb_request = cloud_redis.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + return resp + + class _ExportInstance(CloudRedisRestStub): + def __hash__(self): + return hash("ExportInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.ExportInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the export instance method over HTTP. + + Args: + request (~.cloud_redis.ExportInstanceRequest): + The request object. Request for + [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:export', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_export_instance(request, metadata) + pb_request = cloud_redis.ExportInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_instance(resp) + return resp + + class _FailoverInstance(CloudRedisRestStub): + def __hash__(self): + return hash("FailoverInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.FailoverInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the failover instance method over HTTP. + + Args: + request (~.cloud_redis.FailoverInstanceRequest): + The request object. Request for + [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:failover', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_failover_instance(request, metadata) + pb_request = cloud_redis.FailoverInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_failover_instance(resp) + return resp + + class _GetInstance(CloudRedisRestStub): + def __hash__(self): + return hash("GetInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloud_redis.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis.Instance: + A Google Cloud Redis instance. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}', + }, + ] + request, metadata = self._interceptor.pre_get_instance(request, metadata) + pb_request = cloud_redis.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_redis.Instance() + pb_resp = cloud_redis.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + return resp + + class _ImportInstance(CloudRedisRestStub): + def __hash__(self): + return hash("ImportInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.ImportInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the import instance method over HTTP. + + Args: + request (~.cloud_redis.ImportInstanceRequest): + The request object. Request for + [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:import', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_import_instance(request, metadata) + pb_request = cloud_redis.ImportInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_instance(resp) + return resp + + class _ListInstances(CloudRedisRestStub): + def __hash__(self): + return hash("ListInstances") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloud_redis.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.cloud_redis.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis.ListInstancesResponse: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/instances', + }, + ] + request, metadata = self._interceptor.pre_list_instances(request, metadata) + pb_request = cloud_redis.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_redis.ListInstancesResponse() + pb_resp = cloud_redis.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + return resp + + class _UpdateInstance(CloudRedisRestStub): + def __hash__(self): + return hash("UpdateInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.cloud_redis.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', + 'body': 'instance', + }, + ] + request, metadata = self._interceptor.pre_update_instance(request, metadata) + pb_request = cloud_redis.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + return resp + + class _UpgradeInstance(CloudRedisRestStub): + def __hash__(self): + return hash("UpgradeInstance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.UpgradeInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: float=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the upgrade instance method over HTTP. + + Args: + request (~.cloud_redis.UpgradeInstanceRequest): + The request object. Request for + [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:upgrade', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_upgrade_instance(request, metadata) + pb_request = cloud_redis.UpgradeInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_upgrade_instance(resp) + return resp + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_instance(self) -> Callable[ + [cloud_redis.ExportInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def failover_instance(self) -> Callable[ + [cloud_redis.FailoverInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + cloud_redis.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_instance(self) -> Callable[ + [cloud_redis.ImportInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + cloud_redis.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def upgrade_instance(self) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'CloudRedisRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ad84ef3c0a2c..913c4397b6eb 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -23,10 +23,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -82,6 +89,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (CloudRedisClient, "grpc"), (CloudRedisAsyncClient, "grpc_asyncio"), + (CloudRedisClient, "rest"), ]) def test_cloud_redis_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -94,12 +102,16 @@ def test_cloud_redis_client_from_service_account_info(client_class, transport_na assert client.transport._host == ( 'redis.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://redis.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.CloudRedisGrpcTransport, "grpc"), (transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudRedisRestTransport, "rest"), ]) def test_cloud_redis_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -116,6 +128,7 @@ def test_cloud_redis_client_service_account_always_use_jwt(transport_class, tran @pytest.mark.parametrize("client_class,transport_name", [ (CloudRedisClient, "grpc"), (CloudRedisAsyncClient, "grpc_asyncio"), + (CloudRedisClient, "rest"), ]) def test_cloud_redis_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -131,6 +144,9 @@ def test_cloud_redis_client_from_service_account_file(client_class, transport_na assert client.transport._host == ( 'redis.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://redis.googleapis.com' ) @@ -138,6 +154,7 @@ def test_cloud_redis_client_get_transport_class(): transport = CloudRedisClient.get_transport_class() available_transports = [ transports.CloudRedisGrpcTransport, + transports.CloudRedisRestTransport, ] assert transport in available_transports @@ -148,6 +165,7 @@ def test_cloud_redis_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), ]) @mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) @mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) @@ -267,6 +285,8 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "true"), (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"), (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "true"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "false"), ]) @mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) @mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) @@ -404,6 +424,7 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), ]) def test_cloud_redis_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -428,6 +449,7 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", None), ]) def test_cloud_redis_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -2921,6 +2943,2179 @@ async def test_delete_instance_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +def test_list_instances_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + + request = cloud_redis.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.ListInstancesResponse() + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ListInstancesRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) + + +def test_list_instances_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + + +def test_list_instances_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_redis.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_rest_pager(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_redis.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_redis.Instance) + for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +def test_get_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + + +def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_redis.Instance.to_json(cloud_redis.Instance()) + + request = cloud_redis.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.Instance() + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_get_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_redis.GetInstanceRequest(), + name='name_value', + ) + + +def test_get_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceId"] = 'instance_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == 'instance_id_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.CreateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instance(request) + + +def test_create_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + + +def test_create_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_redis.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + +def test_create_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +def test_update_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("updateMask", "instance", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpdateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_instance(request) + + +def test_update_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_update_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_redis.UpdateInstanceRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + +def test_update_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpgradeInstanceRequest, + dict, +]) +def test_upgrade_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.upgrade_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request_init["redis_version"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + jsonified_request["redisVersion"] = 'redis_version_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "redisVersion" in jsonified_request + assert jsonified_request["redisVersion"] == 'redis_version_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.upgrade_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_upgrade_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.upgrade_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "redisVersion", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upgrade_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_upgrade_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.UpgradeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upgrade_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpgradeInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.upgrade_instance(request) + + +def test_upgrade_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + redis_version='redis_version_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.upgrade_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:upgrade" % client.transport._host, args[1]) + + +def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.upgrade_instance( + cloud_redis.UpgradeInstanceRequest(), + name='name_value', + redis_version='redis_version_value', + ) + + +def test_upgrade_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ImportInstanceRequest, + dict, +]) +def test_import_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.import_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.import_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_import_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.import_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "inputConfig", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_import_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.ImportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ImportInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.import_instance(request) + + +def test_import_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.import_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:import" % client.transport._host, args[1]) + + +def test_import_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.import_instance( + cloud_redis.ImportInstanceRequest(), + name='name_value', + input_config=cloud_redis.InputConfig(gcs_source=cloud_redis.GcsSource(uri='uri_value')), + ) + + +def test_import_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ExportInstanceRequest, + dict, +]) +def test_export_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.export_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.export_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_export_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.export_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_export_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.ExportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ExportInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_instance(request) + + +def test_export_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.export_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:export" % client.transport._host, args[1]) + + +def test_export_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_instance( + cloud_redis.ExportInstanceRequest(), + name='name_value', + output_config=cloud_redis.OutputConfig(gcs_destination=cloud_redis.GcsDestination(uri='uri_value')), + ) + + +def test_export_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.FailoverInstanceRequest, + dict, +]) +def test_failover_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.failover_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_failover_instance_rest_required_fields(request_type=cloud_redis.FailoverInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.failover_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_failover_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.failover_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_failover_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_failover_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.FailoverInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_failover_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.FailoverInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.failover_instance(request) + + +def test_failover_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.failover_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:failover" % client.transport._host, args[1]) + + +def test_failover_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.failover_instance( + cloud_redis.FailoverInstanceRequest(), + name='name_value', + data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS, + ) + + +def test_failover_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +def test_delete_instance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.DeleteInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instance(request) + + +def test_delete_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_delete_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_redis.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_delete_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudRedisGrpcTransport( @@ -2999,6 +5194,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -3009,6 +5205,7 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", + "rest", ]) def test_transport_kind(transport_name): transport = CloudRedisClient.get_transport_class(transport_name)( @@ -3143,6 +5340,7 @@ def test_cloud_redis_transport_auth_adc(transport_class): [ transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, ], ) def test_cloud_redis_transport_auth_gdch_credentials(transport_class): @@ -3239,10 +5437,37 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) +def test_cloud_redis_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CloudRedisRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_cloud_redis_rest_lro_client(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_cloud_redis_host_no_port(transport_name): client = CloudRedisClient( @@ -3252,11 +5477,14 @@ def test_cloud_redis_host_no_port(transport_name): ) assert client.transport._host == ( 'redis.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://redis.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", + "rest", ]) def test_cloud_redis_host_with_port(transport_name): client = CloudRedisClient( @@ -3266,8 +5494,51 @@ def test_cloud_redis_host_with_port(transport_name): ) assert client.transport._host == ( 'redis.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://redis.googleapis.com:8000' ) +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_cloud_redis_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudRedisClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudRedisClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.upgrade_instance._session + session2 = client2.transport.upgrade_instance._session + assert session1 != session2 + session1 = client1.transport.import_instance._session + session2 = client2.transport.import_instance._session + assert session1 != session2 + session1 = client1.transport.export_instance._session + session2 = client2.transport.export_instance._session + assert session1 != session2 + session1 = client1.transport.failover_instance._session + session2 = client2.transport.failover_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 def test_cloud_redis_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -3553,6 +5824,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -3568,6 +5840,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 45f29cdac466..5f3f832b141e 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -2122,11 +2122,11 @@ def test_generate_sample_spec_basic(): ) ] ) - opts = Options.build("transport=grpc") + opts = Options.build("transport=grpc+rest") specs = sorted(samplegen.generate_sample_specs( api_schema, opts=opts), key=lambda x: x["transport"]) specs.sort(key=lambda x: x["transport"]) - assert len(specs) == 2 + assert len(specs) == 3 assert specs[0] == { "rpc": "Ramshorn", @@ -2144,6 +2144,14 @@ def test_generate_sample_spec_basic(): "description": "Snippet for ramshorn" } + assert specs[2] == { + "rpc": "Ramshorn", + "transport": "rest", + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_v1_generated_Squid_Ramshorn_rest", + "description": "Snippet for ramshorn" + } + def test__set_sample_metadata_server_streaming(): sample = { From acb0caaa5c8a99aa3c8477b30e246574e4261db2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 8 Oct 2022 19:50:33 +0200 Subject: [PATCH 0907/1339] chore(deps): update dependency google-api-core to v2.10.2 (#1467) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ee9d087f8c0a..b04816e9d7fd 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.3 -google-api-core==2.10.1 +google-api-core==2.10.2 googleapis-common-protos==1.56.4 jinja2==3.1.2 MarkupSafe==2.1.1 From de3e2a36be0895cd250b38154f5bc52a64d6d3b5 Mon Sep 17 00:00:00 2001 From: Gal Zahavi <38544478+galz10@users.noreply.github.com> Date: Sun, 16 Oct 2022 09:55:51 +0000 Subject: [PATCH 0908/1339] feat: add __version__ in GAPIC clients (#1350) * Chore: update setup.py to new standards * added end block * changed required python version * Edited golden files * Edited ads-templates setup.py * added missing commas to setup.py * changed classifiers for ads-template setup.py * changed ads-templates setup.py * Reverted ads-template changes * Added templeted version and updated protobuf * added package_root * Updated golden test files * Revert "added package_root" This reverts commit b8ce6142048b2c51f923948aa6ddb007bde50b51. * Revert "Updated golden test files" This reverts commit ba37d1342853745471932f2157fa5e385598c498. * updated setup.py * Updated Golden files * added version to __init__.py * updated api-core dependency * updated setup.py to match golden * Changed version.py path to be set automatically * added version import into _base.py.j2 * changed version import placement * added changes from incremental-changes-setup-py * moved constraints file * updated golden files Co-authored-by: Anthonios Partheniou --- .../templates/%namespace/%name/__init__.py.j2 | 5 + .../templates/%namespace/%name/version.py.j2 | 5 + .../%name_%version/%sub/__init__.py.j2 | 5 + .../gapic/templates/setup.py.j2 | 106 +++++++++++------- .../templates/testing/_default_constraints.j2 | 11 ++ .../templates/testing/constraints-3.10.txt.j2 | 4 + .../templates/testing/constraints-3.11.txt.j2 | 4 + .../templates/testing/constraints-3.7.txt.j2 | 17 +++ .../templates/testing/constraints-3.8.txt.j2 | 4 + .../templates/testing/constraints-3.9.txt.j2 | 4 + .../asset/google/cloud/asset/__init__.py | 4 + .../asset/google/cloud/asset/version.py | 16 +++ .../asset/google/cloud/asset_v1/__init__.py | 4 + .../tests/integration/goldens/asset/setup.py | 90 ++++++++++----- .../asset/testing/constraints-3.10.txt | 7 ++ .../asset/testing/constraints-3.11.txt | 7 ++ .../goldens/asset/testing/constraints-3.7.txt | 12 ++ .../goldens/asset/testing/constraints-3.8.txt | 7 ++ .../goldens/asset/testing/constraints-3.9.txt | 7 ++ .../google/iam/credentials/__init__.py | 4 + .../google/iam/credentials/version.py | 16 +++ .../google/iam/credentials_v1/__init__.py | 4 + .../integration/goldens/credentials/setup.py | 88 ++++++++++----- .../credentials/testing/constraints-3.10.txt | 6 + .../credentials/testing/constraints-3.11.txt | 6 + .../credentials/testing/constraints-3.7.txt | 11 ++ .../credentials/testing/constraints-3.8.txt | 6 + .../credentials/testing/constraints-3.9.txt | 6 + .../google/cloud/eventarc/__init__.py | 4 + .../eventarc/google/cloud/eventarc/version.py | 16 +++ .../google/cloud/eventarc_v1/__init__.py | 4 + .../integration/goldens/eventarc/setup.py | 88 ++++++++++----- .../eventarc/testing/constraints-3.10.txt | 6 + .../eventarc/testing/constraints-3.11.txt | 6 + .../eventarc/testing/constraints-3.7.txt | 11 ++ .../eventarc/testing/constraints-3.8.txt | 6 + .../eventarc/testing/constraints-3.9.txt | 6 + .../logging/google/cloud/logging/__init__.py | 4 + .../logging/google/cloud/logging/version.py | 16 +++ .../google/cloud/logging_v2/__init__.py | 4 + .../integration/goldens/logging/setup.py | 88 ++++++++++----- .../logging/testing/constraints-3.10.txt | 6 + .../logging/testing/constraints-3.11.txt | 6 + .../logging/testing/constraints-3.7.txt | 11 ++ .../logging/testing/constraints-3.8.txt | 6 + .../logging/testing/constraints-3.9.txt | 6 + .../redis/google/cloud/redis/__init__.py | 4 + .../redis/google/cloud/redis/version.py | 16 +++ .../redis/google/cloud/redis_v1/__init__.py | 4 + .../tests/integration/goldens/redis/setup.py | 88 ++++++++++----- .../redis/testing/constraints-3.10.txt | 6 + .../redis/testing/constraints-3.11.txt | 6 + .../goldens/redis/testing/constraints-3.7.txt | 11 ++ .../goldens/redis/testing/constraints-3.8.txt | 6 + .../goldens/redis/testing/constraints-3.9.txt | 6 + 55 files changed, 728 insertions(+), 179 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name/version.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 create mode 100644 packages/gapic-generator/gapic/templates/testing/constraints-3.10.txt.j2 create mode 100644 packages/gapic-generator/gapic/templates/testing/constraints-3.11.txt.j2 create mode 100644 packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 create mode 100644 packages/gapic-generator/gapic/templates/testing/constraints-3.8.txt.j2 create mode 100644 packages/gapic-generator/gapic/templates/testing/constraints-3.9.txt.j2 create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index 73b871932ade..a0458fe24131 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -1,6 +1,11 @@ {% extends '_base.py.j2' %} {% block content %} +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.module_name %} +from {{package_path}} import version as package_version + +__version__ = package_version.__version__ + {# Import subpackages. -#} {% for subpackage in api.subpackages|dictsort %} from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/version.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/version.py.j2 new file mode 100644 index 000000000000..425d6ab7cca5 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/version.py.j2 @@ -0,0 +1,5 @@ +{% extends '_base.py.j2' %} +{% block content %} + +__version__ = "0.1.0" +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 176cfd7f5d84..97618b7d725a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -2,6 +2,11 @@ {% block content %} +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.module_name %} +from {{package_path}} import version as package_version + +__version__ = package_version.__version__ + {# Import subpackages. -#} {% for subpackage, _ in api.subpackages|dictsort %} from . import {{ subpackage }} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 9d582615552e..4d4e3330094d 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -4,56 +4,86 @@ import io import os -import setuptools # type: ignore -version = '0.1.0' +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() +name = '{{ api.naming.warehouse_package_name }}' -setuptools.setup( - name='{{ api.naming.warehouse_package_name }}', - author="Google LLC", - author_email="googleapis-packages@google.com", - url="https://github.com/googleapis/python-{{ api.naming.warehouse_package_name }}", - version=version, - long_description=readme, - {% if api.naming.namespace %} - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages={{ api.naming.namespace_packages }}, - {% else %} - packages=setuptools.PEP420PackageFinder.find(), - {% endif %} - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'googleapis-common-protos >= 1.55.0, <2.0.0dev', - 'proto-plus >= 1.19.7', +{% set warehouse_description = api.naming.warehouse_package_name.replace('-',' ')|title %} +{% set package_path = api.naming.module_namespace|join('/') + "/" + api.naming.module_name %} + +description = "{{ warehouse_description }} API client library" + +version = {} +with open(os.path.join(package_root, '{{ package_path }}/version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} - 'grpc-google-iam-v1 >= 0.12.4, < 0.13dev', + 'grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev', {% endif %} {% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} 'google-cloud-documentai >= 1.2.1, < 2.0.0dev', {% endif %} - ), - python_requires='>=3.7', +] +url = "https://github.com/googleapis/python-{{ api.naming.warehouse_package_name|replace("google-cloud-", "") }}" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", + "Topic :: Internet", ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 b/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 new file mode 100644 index 000000000000..fe0a64d4b542 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 @@ -0,0 +1,11 @@ +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +{% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} +grpc-google-iam-v1 +{% endif %} +{% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} +google-cloud-documentai +{% endif %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.10.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.10.txt.j2 new file mode 100644 index 000000000000..e0f6dc759047 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.10.txt.j2 @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +{% block constraints %} +{% include "testing/_default_constraints.j2" %} +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.11.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.11.txt.j2 new file mode 100644 index 000000000000..e0f6dc759047 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.11.txt.j2 @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +{% block constraints %} +{% include "testing/_default_constraints.j2" %} +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 new file mode 100644 index 000000000000..8026a1a321cb --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -0,0 +1,17 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +# This file is intentionally left empty to test the +# latest versions of dependencies. +google-api-core==1.33.2 +proto-plus==1.22.0 +protobuf==3.19.5 +{% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} +grpc-google-iam-v1=0.12.4 +{% endif %} +{% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} +google-cloud-documentai=1.2.1 +{% endif %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.8.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.8.txt.j2 new file mode 100644 index 000000000000..e0f6dc759047 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.8.txt.j2 @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +{% block constraints %} +{% include "testing/_default_constraints.j2" %} +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.9.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.9.txt.j2 new file mode 100644 index 000000000000..e0f6dc759047 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.9.txt.j2 @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +{% block constraints %} +{% include "testing/_default_constraints.j2" %} +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index 1f4a2dd59690..673ae0cce674 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.asset import version as package_version + +__version__ = package_version.__version__ + from google.cloud.asset_v1.services.asset_service.client import AssetServiceClient from google.cloud.asset_v1.services.asset_service.async_client import AssetServiceAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/version.py new file mode 100644 index 000000000000..35859c3f7fc1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 5a31481e893f..3f0c4e4a1b90 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.asset import version as package_version + +__version__ = package_version.__version__ + from .services.asset_service import AssetServiceClient from .services.asset_service import AssetServiceAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 40a197078af1..849ede464ee2 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -15,45 +15,77 @@ # import io import os -import setuptools # type: ignore -version = '0.1.0' +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: +name = 'google-cloud-asset' + + +description = "Google Cloud Asset API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/asset/version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + 'grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev', +] +url = "https://github.com/googleapis/python-asset" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + setuptools.setup( - name='google-cloud-asset', - author="Google LLC", - author_email="googleapis-packages@google.com", - url="https://github.com/googleapis/python-google-cloud-asset", + name=name, version=version, + description=description, long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'googleapis-common-protos >= 1.55.0, <2.0.0dev', - 'proto-plus >= 1.19.7', - 'grpc-google-iam-v1 >= 0.12.4, < 0.13dev', - ), - python_requires='>=3.7', + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", + "Topic :: Internet", ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt new file mode 100644 index 000000000000..e78e83cfcfd8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -0,0 +1,12 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +# This file is intentionally left empty to test the +# latest versions of dependencies. +google-api-core==1.33.2 +proto-plus==1.22.0 +protobuf==3.19.5 +grpc-google-iam-v1=0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py index 461cabe788cc..86aee5377afa 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.iam.credentials import version as package_version + +__version__ = package_version.__version__ + from google.iam.credentials_v1.services.iam_credentials.client import IAMCredentialsClient from google.iam.credentials_v1.services.iam_credentials.async_client import IAMCredentialsAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/version.py new file mode 100644 index 000000000000..35859c3f7fc1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 989a46cc864f..6b49346369f3 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.iam.credentials import version as package_version + +__version__ = package_version.__version__ + from .services.iam_credentials import IAMCredentialsClient from .services.iam_credentials import IAMCredentialsAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 817c1a038dc8..05229604a938 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -15,44 +15,76 @@ # import io import os -import setuptools # type: ignore -version = '0.1.0' +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: +name = 'google-iam-credentials' + + +description = "Google Iam Credentials API client library" + +version = {} +with open(os.path.join(package_root, 'google/iam/credentials/version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-google-iam-credentials" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + setuptools.setup( - name='google-iam-credentials', - author="Google LLC", - author_email="googleapis-packages@google.com", - url="https://github.com/googleapis/python-google-iam-credentials", + name=name, version=version, + description=description, long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.iam'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'googleapis-common-protos >= 1.55.0, <2.0.0dev', - 'proto-plus >= 1.19.7', - ), - python_requires='>=3.7', + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", + "Topic :: Internet", ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt new file mode 100644 index 000000000000..aca9f2d36553 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +# This file is intentionally left empty to test the +# latest versions of dependencies. +google-api-core==1.33.2 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py index d557c9f0b4de..43ea075288fb 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.eventarc import version as package_version + +__version__ = package_version.__version__ + from google.cloud.eventarc_v1.services.eventarc.client import EventarcClient from google.cloud.eventarc_v1.services.eventarc.async_client import EventarcAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/version.py new file mode 100644 index 000000000000..35859c3f7fc1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index 070e8b44991c..bb9b7763c0f8 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.eventarc import version as package_version + +__version__ = package_version.__version__ + from .services.eventarc import EventarcClient from .services.eventarc import EventarcAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 903b2b6e08e9..fd03b3f1a5c8 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -15,44 +15,76 @@ # import io import os -import setuptools # type: ignore -version = '0.1.0' +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: +name = 'google-cloud-eventarc' + + +description = "Google Cloud Eventarc API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/eventarc/version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-eventarc" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + setuptools.setup( - name='google-cloud-eventarc', - author="Google LLC", - author_email="googleapis-packages@google.com", - url="https://github.com/googleapis/python-google-cloud-eventarc", + name=name, version=version, + description=description, long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'googleapis-common-protos >= 1.55.0, <2.0.0dev', - 'proto-plus >= 1.19.7', - ), - python_requires='>=3.7', + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", + "Topic :: Internet", ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt new file mode 100644 index 000000000000..aca9f2d36553 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +# This file is intentionally left empty to test the +# latest versions of dependencies. +google-api-core==1.33.2 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py index 712e121b87ce..9ee2c6a4bb7f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.logging import version as package_version + +__version__ = package_version.__version__ + from google.cloud.logging_v2.services.config_service_v2.client import ConfigServiceV2Client from google.cloud.logging_v2.services.config_service_v2.async_client import ConfigServiceV2AsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/version.py new file mode 100644 index 000000000000..35859c3f7fc1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 1929cecec6a3..6e529d0b9b54 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.logging import version as package_version + +__version__ = package_version.__version__ + from .services.config_service_v2 import ConfigServiceV2Client from .services.config_service_v2 import ConfigServiceV2AsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 0ca8e0943cd6..deb483fcd7cc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -15,44 +15,76 @@ # import io import os -import setuptools # type: ignore -version = '0.1.0' +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: +name = 'google-cloud-logging' + + +description = "Google Cloud Logging API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/logging/version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-logging" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + setuptools.setup( - name='google-cloud-logging', - author="Google LLC", - author_email="googleapis-packages@google.com", - url="https://github.com/googleapis/python-google-cloud-logging", + name=name, version=version, + description=description, long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'googleapis-common-protos >= 1.55.0, <2.0.0dev', - 'proto-plus >= 1.19.7', - ), - python_requires='>=3.7', + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", + "Topic :: Internet", ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt new file mode 100644 index 000000000000..aca9f2d36553 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +# This file is intentionally left empty to test the +# latest versions of dependencies. +google-api-core==1.33.2 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py index 350ba306355a..e0dab1df78b7 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.redis import version as package_version + +__version__ = package_version.__version__ + from google.cloud.redis_v1.services.cloud_redis.client import CloudRedisClient from google.cloud.redis_v1.services.cloud_redis.async_client import CloudRedisAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/version.py new file mode 100644 index 000000000000..35859c3f7fc1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index d9600edc259d..38248ca89eee 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.redis import version as package_version + +__version__ = package_version.__version__ + from .services.cloud_redis import CloudRedisClient from .services.cloud_redis import CloudRedisAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 6fbd9aacb19d..84a048b4c253 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -15,44 +15,76 @@ # import io import os -import setuptools # type: ignore -version = '0.1.0' +import setuptools # type: ignore package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: +name = 'google-cloud-redis' + + +description = "Google Cloud Redis API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/redis/version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-redis" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + setuptools.setup( - name='google-cloud-redis', - author="Google LLC", - author_email="googleapis-packages@google.com", - url="https://github.com/googleapis/python-google-cloud-redis", + name=name, version=version, + description=description, long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'googleapis-common-protos >= 1.55.0, <2.0.0dev', - 'proto-plus >= 1.19.7', - ), - python_requires='>=3.7', + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: OS Independent", + "Topic :: Internet", ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt new file mode 100644 index 000000000000..aca9f2d36553 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +# This file is intentionally left empty to test the +# latest versions of dependencies. +google-api-core==1.33.2 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf From 6be32e71431fd76c6adbb98f74c178656145a0d4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 16 Oct 2022 13:01:55 +0200 Subject: [PATCH 0909/1339] chore(deps): update dependency setuptools to v65.5.0 (#1470) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b04816e9d7fd..c6406188084d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,7 +6,7 @@ MarkupSafe==2.1.1 protobuf==3.20.3 pypandoc==1.9 PyYAML==6.0 -setuptools==65.4.1 +setuptools==65.5.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 pytest-asyncio==0.19.0 \ No newline at end of file From f73dc2edaa069da787d0bc57e4096651dc53284f Mon Sep 17 00:00:00 2001 From: Laurent Picard Date: Mon, 17 Oct 2022 15:58:34 +0200 Subject: [PATCH 0910/1339] fix: Add supported dict typing for client_options (#1464) Fixes #1380 --- .../%name/%version/%sub/services/%service/client.py.j2 | 7 ++++--- .../%name_%version/%sub/services/%service/client.py.j2 | 7 ++++--- .../google/cloud/asset_v1/services/asset_service/client.py | 7 ++++--- .../iam/credentials_v1/services/iam_credentials/client.py | 7 ++++--- .../google/cloud/eventarc_v1/services/eventarc/client.py | 7 ++++--- .../cloud/logging_v2/services/config_service_v2/client.py | 7 ++++--- .../cloud/logging_v2/services/logging_service_v2/client.py | 7 ++++--- .../cloud/logging_v2/services/metrics_service_v2/client.py | 7 ++++--- .../google/cloud/redis_v1/services/cloud_redis/client.py | 7 ++++--- 9 files changed, 36 insertions(+), 27 deletions(-) mode change 100755 => 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py mode change 100755 => 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py mode change 100755 => 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py mode change 100755 => 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py mode change 100755 => 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py mode change 100755 => 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py mode change 100755 => 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 4666ab3e0f1c..29f0bd35c839 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -5,7 +5,7 @@ from collections import OrderedDict import os import re -from typing import Callable, Dict, Mapping, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Callable, Dict, Mapping, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast import pkg_resources {% if service.any_deprecated %} import warnings @@ -219,7 +219,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, {{ service.name }}Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -238,7 +238,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): beta state (preview). We welcome your feedback via an issue in this library's source repository. {% endif %} - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -268,6 +268,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) # Create SSL credentials for mutual TLS if needed. if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index be54b73db8f4..c19e3bb9b20d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -9,7 +9,7 @@ import functools {% endif %} import os import re -from typing import Dict, Mapping, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast import pkg_resources {% if service.any_deprecated %} import warnings @@ -276,7 +276,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, {{ service.name }}Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the {{ (service.client_name|snake_case).replace('_', ' ') }}. @@ -295,7 +295,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): beta state (preview). We welcome your feedback via an issue in this library's source repository. {% endif %} - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -325,6 +325,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py old mode 100755 new mode 100644 index 7d4ee39dac21..33d47f6b99a0 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -304,7 +304,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, AssetServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the asset service client. @@ -321,7 +321,7 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -351,6 +351,7 @@ def __init__(self, *, client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py old mode 100755 new mode 100644 index 6f6e7c1a9615..9e88f41d2fe0 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -300,7 +300,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, IAMCredentialsTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the iam credentials client. @@ -317,7 +317,7 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -347,6 +347,7 @@ def __init__(self, *, client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py old mode 100755 new mode 100644 index 64156b8d954c..5a2bb9949b1a --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -320,7 +320,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, EventarcTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the eventarc client. @@ -337,7 +337,7 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -367,6 +367,7 @@ def __init__(self, *, client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py old mode 100755 new mode 100644 index 393c816a60a5..8e5b28b9e8fb --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -335,7 +335,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ConfigServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the config service v2 client. @@ -352,7 +352,7 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -382,6 +382,7 @@ def __init__(self, *, client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py old mode 100755 new mode 100644 index 899efe17817c..e9a1f7ba4c06 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -291,7 +291,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, LoggingServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the logging service v2 client. @@ -308,7 +308,7 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -338,6 +338,7 @@ def __init__(self, *, client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py old mode 100755 new mode 100644 index 2ad30449f76e..b4a5c2cddba5 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -292,7 +292,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, MetricsServiceV2Transport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the metrics service v2 client. @@ -309,7 +309,7 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -339,6 +339,7 @@ def __init__(self, *, client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py old mode 100755 new mode 100644 index 25f00e1b7773..3a1341ac6a17 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -315,7 +315,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudRedisTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud redis client. @@ -332,7 +332,7 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -362,6 +362,7 @@ def __init__(self, *, client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) From 374f69c207eafb3d33ba48cc9ffaa31c6d1467cb Mon Sep 17 00:00:00 2001 From: Gal Zahavi <38544478+galz10@users.noreply.github.com> Date: Mon, 17 Oct 2022 19:46:13 +0000 Subject: [PATCH 0911/1339] chore: changed version to gapic_version (#1472) This is a follow PR to https://togithub.com/googleapis/gapic-generator-python/pull/1350 which has not been released yet. --- .../gapic/templates/%namespace/%name/__init__.py.j2 | 2 +- .../%namespace/%name/{version.py.j2 => gapic_version.py.j2} | 0 .../templates/%namespace/%name_%version/%sub/__init__.py.j2 | 2 +- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- .../integration/goldens/asset/google/cloud/asset/__init__.py | 2 +- .../asset/google/cloud/asset/{version.py => gapic_version.py} | 0 .../integration/goldens/asset/google/cloud/asset_v1/__init__.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../goldens/credentials/google/iam/credentials/__init__.py | 2 +- .../google/iam/credentials/{version.py => gapic_version.py} | 0 .../goldens/credentials/google/iam/credentials_v1/__init__.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- .../goldens/eventarc/google/cloud/eventarc/__init__.py | 2 +- .../google/cloud/eventarc/{version.py => gapic_version.py} | 0 .../goldens/eventarc/google/cloud/eventarc_v1/__init__.py | 2 +- .../gapic-generator/tests/integration/goldens/eventarc/setup.py | 2 +- .../goldens/logging/google/cloud/logging/__init__.py | 2 +- .../google/cloud/logging/{version.py => gapic_version.py} | 0 .../goldens/logging/google/cloud/logging_v2/__init__.py | 2 +- .../gapic-generator/tests/integration/goldens/logging/setup.py | 2 +- .../integration/goldens/redis/google/cloud/redis/__init__.py | 2 +- .../redis/google/cloud/redis/{version.py => gapic_version.py} | 0 .../integration/goldens/redis/google/cloud/redis_v1/__init__.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 +- 24 files changed, 18 insertions(+), 18 deletions(-) rename packages/gapic-generator/gapic/templates/%namespace/%name/{version.py.j2 => gapic_version.py.j2} (100%) rename packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/{version.py => gapic_version.py} (100%) rename packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/{version.py => gapic_version.py} (100%) rename packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/{version.py => gapic_version.py} (100%) rename packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/{version.py => gapic_version.py} (100%) rename packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/{version.py => gapic_version.py} (100%) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 index a0458fe24131..a5e61f219a99 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/__init__.py.j2 @@ -2,7 +2,7 @@ {% block content %} {% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.module_name %} -from {{package_path}} import version as package_version +from {{package_path}} import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/version.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/%namespace/%name/version.py.j2 rename to packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 97618b7d725a..c2aefdd7805f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -3,7 +3,7 @@ {% block content %} {% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.module_name %} -from {{package_path}} import version as package_version +from {{package_path}} import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 4d4e3330094d..6f5c02e11765 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -17,7 +17,7 @@ name = '{{ api.naming.warehouse_package_name }}' description = "{{ warehouse_description }} API client library" version = {} -with open(os.path.join(package_root, '{{ package_path }}/version.py')) as fp: +with open(os.path.join(package_root, '{{ package_path }}/gapic_version.py')) as fp: exec(fp.read(), version) version = version["__version__"] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index 673ae0cce674..00f87944327e 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.asset import version as package_version +from google.cloud.asset import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/version.py rename to packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 3f0c4e4a1b90..98cbb2d2eed1 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.asset import version as package_version +from google.cloud.asset import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 849ede464ee2..1609b700c6e3 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -26,7 +26,7 @@ description = "Google Cloud Asset API client library" version = {} -with open(os.path.join(package_root, 'google/cloud/asset/version.py')) as fp: +with open(os.path.join(package_root, 'google/cloud/asset/gapic_version.py')) as fp: exec(fp.read(), version) version = version["__version__"] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py index 86aee5377afa..6427ccb12a74 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.iam.credentials import version as package_version +from google.iam.credentials import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/version.py rename to packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 6b49346369f3..7ed4efa745e8 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.iam.credentials import version as package_version +from google.iam.credentials import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 05229604a938..86455ee808ff 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -26,7 +26,7 @@ description = "Google Iam Credentials API client library" version = {} -with open(os.path.join(package_root, 'google/iam/credentials/version.py')) as fp: +with open(os.path.join(package_root, 'google/iam/credentials/gapic_version.py')) as fp: exec(fp.read(), version) version = version["__version__"] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py index 43ea075288fb..b1e28383519a 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.eventarc import version as package_version +from google.cloud.eventarc import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/version.py rename to packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index bb9b7763c0f8..407198da720e 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.eventarc import version as package_version +from google.cloud.eventarc import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index fd03b3f1a5c8..ad9c94433440 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -26,7 +26,7 @@ description = "Google Cloud Eventarc API client library" version = {} -with open(os.path.join(package_root, 'google/cloud/eventarc/version.py')) as fp: +with open(os.path.join(package_root, 'google/cloud/eventarc/gapic_version.py')) as fp: exec(fp.read(), version) version = version["__version__"] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py index 9ee2c6a4bb7f..71b274bca5ee 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.logging import version as package_version +from google.cloud.logging import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/version.py rename to packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 6e529d0b9b54..3d439a7a298d 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.logging import version as package_version +from google.cloud.logging import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index deb483fcd7cc..a9b84915eeec 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -26,7 +26,7 @@ description = "Google Cloud Logging API client library" version = {} -with open(os.path.join(package_root, 'google/cloud/logging/version.py')) as fp: +with open(os.path.join(package_root, 'google/cloud/logging/gapic_version.py')) as fp: exec(fp.read(), version) version = version["__version__"] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py index e0dab1df78b7..5b74fd732e02 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.redis import version as package_version +from google.cloud.redis import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/version.py rename to packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index 38248ca89eee..de56a8982f36 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.redis import version as package_version +from google.cloud.redis import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 84a048b4c253..ceb7c8d01990 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -26,7 +26,7 @@ description = "Google Cloud Redis API client library" version = {} -with open(os.path.join(package_root, 'google/cloud/redis/version.py')) as fp: +with open(os.path.join(package_root, 'google/cloud/redis/gapic_version.py')) as fp: exec(fp.read(), version) version = version["__version__"] From d2f4adf8e0643e91dd17bf7bd181dd1bd9ebc0a9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 17 Oct 2022 18:28:45 -0400 Subject: [PATCH 0912/1339] chore(main): release 1.5.0 (#1473) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 14 ++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a0fd184faae5..7f554e34fa84 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,20 @@ # Changelog +## [1.5.0](https://github.com/googleapis/gapic-generator-python/compare/v1.4.4...v1.5.0) (2022-10-17) + + +### Features + +* Add __version__ in GAPIC clients ([#1350](https://github.com/googleapis/gapic-generator-python/issues/1350)) ([1c91347](https://github.com/googleapis/gapic-generator-python/commit/1c913476b9d2efe8a8db803bfe1d47ad8697bc72)) + + +### Bug Fixes + +* Add supported dict typing for client_options ([#1464](https://github.com/googleapis/gapic-generator-python/issues/1464)) ([de62f12](https://github.com/googleapis/gapic-generator-python/commit/de62f12b23ce8f434dd429f7761ae0001c8ad34f)), closes [#1380](https://github.com/googleapis/gapic-generator-python/issues/1380) +* Fix multiple gapic-generator-python bugs ([#1458](https://github.com/googleapis/gapic-generator-python/issues/1458)) ([ab3e361](https://github.com/googleapis/gapic-generator-python/commit/ab3e361c5e69060afb721e76e6fd4fac0d367d15)) +* Snippetgen skip REST snippets ([#1463](https://github.com/googleapis/gapic-generator-python/issues/1463)) ([119a3f1](https://github.com/googleapis/gapic-generator-python/commit/119a3f18d671664309efbf1aee7bafc94b401eed)) + ## [1.4.4](https://github.com/googleapis/gapic-generator-python/compare/v1.4.3...v1.4.4) (2022-09-20) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 92d9402ce43b..54982cdbf904 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.4.4" +version = "1.5.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From f87675a477241e0f5dc26ecbdecb295ac4bbb479 Mon Sep 17 00:00:00 2001 From: Laurent Picard Date: Wed, 19 Oct 2022 16:00:38 +0200 Subject: [PATCH 0913/1339] fix: Detect changed Python files in Git pre-commit hook (#1475) --- packages/gapic-generator/.githooks/pre-commit | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/.githooks/pre-commit b/packages/gapic-generator/.githooks/pre-commit index 580aaffbcaae..b7ffeb44dbb6 100755 --- a/packages/gapic-generator/.githooks/pre-commit +++ b/packages/gapic-generator/.githooks/pre-commit @@ -76,7 +76,7 @@ fi # Check only the staged files. NUM_TOTAL_FILES_CHANGED=$(git diff --cached --name-only | wc -l) -NUM_PYTHON_FILES_CHANGED=$(git diff --cached --name-only "*.java" | wc -l) +NUM_PYTHON_FILES_CHANGED=$(git diff --cached --name-only "*.py" | wc -l) NUM_UNIT_GOLDEN_FILES_CHANGED=$(git diff --cached --name-only "src/test/*/*.golden" | wc -l) NUM_INTEGRATION_GOLDEN_FILES_CHANGED=$(git diff --cached --name-only "tests/integration/goldens/*/*.golden" | wc -l) NUM_INTEGRATION_BAZEL_FILES_CHANGED=$(git diff --cached --name-only "tests/integration/*/BUILD.bazel" | wc -l) From f62186a3fb0036c26caf685b42b315c4420b2cc2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 24 Oct 2022 14:58:07 +0200 Subject: [PATCH 0914/1339] chore(deps): update dependency pytest-asyncio to v0.20.1 (#1478) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index c6406188084d..e85dfd3e54b5 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -9,4 +9,4 @@ PyYAML==6.0 setuptools==65.5.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 -pytest-asyncio==0.19.0 \ No newline at end of file +pytest-asyncio==0.20.1 \ No newline at end of file From eb3e015f277ddb5e30f9497ad0c028dca453c278 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 29 Oct 2022 04:15:40 +0200 Subject: [PATCH 0915/1339] chore(deps): update dependency pypandoc to v1.10 (#1482) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e85dfd3e54b5..16ed3b6437fe 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,7 +4,7 @@ googleapis-common-protos==1.56.4 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.3 -pypandoc==1.9 +pypandoc==1.10 PyYAML==6.0 setuptools==65.5.0 grpc-google-iam-v1==0.12.4 From fdf23137bfe529d59187488aadb5544cfa02da6e Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Mon, 31 Oct 2022 13:04:47 -0700 Subject: [PATCH 0916/1339] chore: fix snippet metadata filename and add placeholder library version (#1479) * chore: fix snippet metadata filename and add placeholder library version --- .../gapic/generator/generator.py | 5 +++-- .../gapic/samplegen_utils/snippet_index.py | 4 ++++ ...nippet_metadata_google.cloud.asset.v1.json} | 3 ++- ...et_metadata_google.iam.credentials.v1.json} | 3 ++- ...pet_metadata_google.cloud.eventarc.v1.json} | 3 ++- ...=> snippet_metadata_google.logging.v2.json} | 3 ++- ...nippet_metadata_google.cloud.redis.v1.json} | 3 ++- .../tests/unit/generator/test_generator.py | 18 ++++++++++-------- .../tests/unit/samplegen/test_snippet_index.py | 3 ++- 9 files changed, 29 insertions(+), 16 deletions(-) rename packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/{snippet_metadata_asset_v1.json => snippet_metadata_google.cloud.asset.v1.json} (99%) mode change 100755 => 100644 rename packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/{snippet_metadata_credentials_v1.json => snippet_metadata_google.iam.credentials.v1.json} (99%) mode change 100755 => 100644 rename packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/{snippet_metadata_eventarc_v1.json => snippet_metadata_google.cloud.eventarc.v1.json} (99%) mode change 100755 => 100644 rename packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/{snippet_metadata_logging_v2.json => snippet_metadata_google.logging.v2.json} (99%) mode change 100755 => 100644 rename packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/{snippet_metadata_redis_v1.json => snippet_metadata_google.cloud.redis.v1.json} (99%) mode change 100755 => 100644 diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 341fa8a26f2d..3b4884156a13 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -217,9 +217,10 @@ def _generate_samples_and_manifest( if index.metadata_index.snippets: # NOTE(busunkim): Not all fields are yet populated in the snippet metadata. - # Expected filename: snippet_metadata_{apishortname}_{apiversion}.json + # Expected filename: snippet_metadata.{proto_package}.json + # For example: snippet_metadata_google.cloud.aiplatform.v1.json snippet_metadata_path = str(pathlib.Path( - out_dir) / f"snippet_metadata_{api_schema.naming.name}_{api_schema.naming.version}.json").lower() + out_dir) / f"snippet_metadata_{api_schema.naming.proto_package}.json").lower() output_files[snippet_metadata_path] = CodeGeneratorResponse.File( content=formatter.fix_whitespace(index.get_metadata_json()), name=snippet_metadata_path) diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py index 0d179d2d413d..29e3a5923e11 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -104,6 +104,10 @@ def __init__(self, api_schema: api.API): self.metadata_index.client_library.name = api_schema.naming.warehouse_package_name self.metadata_index.client_library.language = snippet_metadata_pb2.Language.PYTHON # type: ignore + # This is just a placeholder. release-please is responsible for + # updating the metadata file to the correct library version. + self.metadata_index.client_library.version = "0.1.0" + self.metadata_index.client_library.apis.append(snippet_metadata_pb2.Api( # type: ignore id=api_schema.naming.proto_package, version=api_schema.naming.version diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json old mode 100755 new mode 100644 similarity index 99% rename from packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json rename to packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 7843f9af6ef5..ef7a24c377d6 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_asset_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-asset" + "name": "google-cloud-asset", + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json old mode 100755 new mode 100644 similarity index 99% rename from packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json rename to packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json index 35377b0f7cfb..8f25e0e91799 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_credentials_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-iam-credentials" + "name": "google-iam-credentials", + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json old mode 100755 new mode 100644 similarity index 99% rename from packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json rename to packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index 3d364cea8077..d26627397be9 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_eventarc_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-eventarc" + "name": "google-cloud-eventarc", + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json old mode 100755 new mode 100644 similarity index 99% rename from packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json rename to packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index e26012d6452e..78f6e43dbb5b --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_logging_v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-logging" + "name": "google-cloud-logging", + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json old mode 100755 new mode 100644 similarity index 99% rename from packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json rename to packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 9bdcf65a3081..18c8da345d71 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_redis_v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-redis" + "name": "google-cloud-redis", + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 7c634fc2dacb..5cac04e6f756 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -464,7 +464,7 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): }, )}, naming=DummyNaming(name="mollusc", version="v1", warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca"), + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca.v1"), ) with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): @@ -474,11 +474,12 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): expected_snippet_index_json = { "clientLibrary": { "apis": [{ - "id": "google.mollusca", + "id": "google.mollusca.v1", "version": "v1" }], "language": "PYTHON", - "name": "mollusc-cephalopod-teuthida-" + "name": "mollusc-cephalopod-teuthida-", + "version": "0.1.0" }, "snippets": [ { @@ -532,7 +533,7 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): assert actual_response.file[1] == CodeGeneratorResponse.File( name="samples/generated_samples/clam_sample.py", content="\n",) - assert actual_response.file[2].name == "samples/generated_samples/snippet_metadata_mollusc_v1.json" + assert actual_response.file[2].name == "samples/generated_samples/snippet_metadata_google.mollusca.v1.json" assert json.loads( actual_response.file[2].content) == expected_snippet_index_json @@ -616,7 +617,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): }, )}, naming=DummyNaming(name="mollusc", version="v1", warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca"), + versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca.v1"), ) with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): actual_response = g.get_response(api_schema, @@ -626,12 +627,13 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): "clientLibrary": { "apis": [ { - "id": "google.mollusca", + "id": "google.mollusca.v1", "version": "v1" } ], "language": "PYTHON", - "name": "mollusc-cephalopod-teuthida-" + "name": "mollusc-cephalopod-teuthida-", + "version": "0.1.0" }, "snippets": [ { @@ -709,7 +711,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): name="samples/generated_samples/7384949e.py", content="\n", ) print(actual_response.file[3].content) - assert actual_response.file[3].name == "samples/generated_samples/snippet_metadata_mollusc_v1.json" + assert actual_response.file[3].name == "samples/generated_samples/snippet_metadata_google.mollusca.v1.json" assert json.loads( actual_response.file[3].content) == expected_snippet_metadata_json diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index f4bc08661a58..ade49db562a2 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -255,7 +255,8 @@ def test_get_metadata_json(sample_str): } ], "language": "PYTHON", - "name": "google-mollusca" + "name": "google-mollusca", + "version": "0.1.0" }, "snippets": [ { From f656d40878baacc0c82ae03c9defea4be0cd52d5 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Wed, 2 Nov 2022 08:47:02 -0700 Subject: [PATCH 0917/1339] fix: snippetgen handling of repeated enum field (#1443) * fix: snippetgen handling of repeated enum field --- .../gapic-generator/gapic/samplegen/samplegen.py | 13 ++++++++++--- .../tests/unit/samplegen/test_samplegen.py | 7 +++++-- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index b0481acc1c21..0ed54be530ae 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -364,7 +364,8 @@ def _normal_request_setup(self, base_param_to_attrs, val, request, field): elif attr.enum: # A little bit hacky, but 'values' is a list, and this is the easiest # way to verify that the value is a valid enum variant. - witness = any(e.name == val for e in attr.enum.values) + # Here val could be a list of a single enum value name. + witness = any(e.name in val for e in attr.enum.values) if not witness: raise types.InvalidEnumVariant( "Invalid variant for enum {}: '{}'".format(attr, val) @@ -974,8 +975,14 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess {"field": field_name, "value": field.mock_value_original_type}) elif field.enum: # Choose the last enum value in the list since index 0 is often "unspecified" + enum_value = field.enum.values[-1].name + if field.repeated: + field_value = [enum_value] + else: + field_value = enum_value + request.append( - {"field": field_name, "value": field.enum.values[-1].name}) + {"field": field_name, "value": field_value}) else: # This is a message type, recurse # TODO(busunkim): Some real world APIs have @@ -1023,7 +1030,7 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A spec = { "rpc": rpc_name, "transport": transport, - # `request` and `response` is populated in `preprocess_sample` + # `request` and `response` are populated in `preprocess_sample` "service": f"{api_schema.naming.proto_package}.{service_name}", "region_tag": region_tag, "description": f"Snippet for {utils.to_snake_case(rpc_name)}" diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 5f3f832b141e..0ae9f3d1dbe4 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -203,7 +203,9 @@ def test_preprocess_sample(): ] -def test_preprocess_sample_with_enum_field(): +@pytest.mark.parametrize( + 'repeated_enum,expected', [(False, "TYPE_2"), (True, ["TYPE_2"])]) +def test_preprocess_sample_with_enum_field(repeated_enum, expected): # Verify that the default response is added. sample = {"service": "Mollusc", "rpc": "Classify"} @@ -212,6 +214,7 @@ def test_preprocess_sample_with_enum_field(): "type": DummyField( name="type", required=True, + repeated=repeated_enum, type=enum_factory("type", ["TYPE_1", "TYPE_2"]), enum=enum_factory("type", ["TYPE_1", "TYPE_2"]) ) @@ -255,7 +258,7 @@ def test_preprocess_sample_with_enum_field(): assert sample["request"] == [ { "field": "type", - "value": "TYPE_2" + "value": expected } ] From d914456e4604d92143d65ec810b9508616fcb486 Mon Sep 17 00:00:00 2001 From: Laurent Picard Date: Wed, 9 Nov 2022 16:10:06 +0100 Subject: [PATCH 0918/1339] feat: Add typing to proto.Message based class attributes (#1474) * feat: Add typing to proto.Message based class attributes * fix: Apply actual mutable flavor for Sequence/Mapping * fix: Update iterators and all tests * fix: Update client and goldens * fix: Remove goldens to resolve conflicts with updated main branch * fix: Conform with PEP 484 and mypy new default (no_implicit_optional=True) * fix: Conform with PEP 484 and mypy new default (no_implicit_optional=True) Co-authored-by: Anthonios Partheniou --- .../%sub/services/%service/_mixins.py.j2 | 40 +-- .../%sub/services/%service/client.py.j2 | 30 +-- .../%sub/services/%service/pagers.py.j2 | 2 +- .../services/%service/transports/base.py.j2 | 2 +- .../services/%service/transports/grpc.py.j2 | 20 +- .../services/%service/transports/rest.py.j2 | 24 +- .../%name/%version/%sub/types/%proto.py.j2 | 2 + .../%name/%version/%sub/types/_message.py.j2 | 4 +- .../gapic/generator/generator.py | 4 +- .../gapic/samplegen/manifest.py | 4 +- packages/gapic-generator/gapic/schema/api.py | 6 +- .../gapic-generator/gapic/schema/metadata.py | 8 +- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../services/%service/_async_mixins.py.j2 | 40 +-- .../%sub/services/%service/_client_macros.j2 | 8 +- .../%sub/services/%service/_mixins.py.j2 | 40 +-- .../%sub/services/%service/async_client.py.j2 | 28 +- .../%sub/services/%service/client.py.j2 | 18 +- .../%sub/services/%service/pagers.py.j2 | 4 +- .../%service/transports/_rest_mixins.py.j2 | 6 +- .../services/%service/transports/base.py.j2 | 2 +- .../services/%service/transports/grpc.py.j2 | 20 +- .../%service/transports/grpc_asyncio.py.j2 | 16 +- .../services/%service/transports/rest.py.j2 | 24 +- .../%name_%version/%sub/types/%proto.py.j2 | 2 + .../%name_%version/%sub/types/_message.py.j2 | 4 +- packages/gapic-generator/gapic/utils/lines.py | 4 +- packages/gapic-generator/gapic/utils/rst.py | 3 +- .../services/asset_service/async_client.py | 102 ++++---- .../asset_v1/services/asset_service/client.py | 78 +++--- .../services/asset_service/transports/base.py | 2 +- .../services/asset_service/transports/grpc.py | 20 +- .../asset_service/transports/grpc_asyncio.py | 16 +- .../services/asset_service/transports/rest.py | 90 +++---- .../cloud/asset_v1/types/asset_service.py | 242 +++++++++--------- .../google/cloud/asset_v1/types/assets.py | 178 ++++++------- ...nippet_metadata_google.cloud.asset.v1.json | 4 +- .../services/iam_credentials/async_client.py | 68 ++--- .../services/iam_credentials/client.py | 60 ++--- .../iam_credentials/transports/base.py | 2 +- .../iam_credentials/transports/grpc.py | 20 +- .../transports/grpc_asyncio.py | 16 +- .../iam_credentials/transports/rest.py | 42 +-- .../google/iam/credentials_v1/types/common.py | 54 ++-- ...et_metadata_google.iam.credentials.v1.json | 20 +- .../services/eventarc/async_client.py | 56 ++-- .../eventarc_v1/services/eventarc/client.py | 46 ++-- .../services/eventarc/transports/base.py | 2 +- .../services/eventarc/transports/grpc.py | 20 +- .../eventarc/transports/grpc_asyncio.py | 16 +- .../services/eventarc/transports/rest.py | 48 ++-- .../cloud/eventarc_v1/types/eventarc.py | 60 ++--- .../google/cloud/eventarc_v1/types/trigger.py | 44 ++-- .../config_service_v2/async_client.py | 180 ++++++------- .../services/config_service_v2/client.py | 134 +++++----- .../config_service_v2/transports/base.py | 2 +- .../config_service_v2/transports/grpc.py | 20 +- .../transports/grpc_asyncio.py | 16 +- .../config_service_v2/transports/rest.py | 156 +++++------ .../logging_service_v2/async_client.py | 64 ++--- .../services/logging_service_v2/client.py | 54 ++-- .../logging_service_v2/transports/base.py | 2 +- .../logging_service_v2/transports/grpc.py | 20 +- .../transports/grpc_asyncio.py | 16 +- .../logging_service_v2/transports/rest.py | 54 ++-- .../metrics_service_v2/async_client.py | 50 ++-- .../services/metrics_service_v2/client.py | 40 +-- .../metrics_service_v2/transports/base.py | 2 +- .../metrics_service_v2/transports/grpc.py | 20 +- .../transports/grpc_asyncio.py | 16 +- .../metrics_service_v2/transports/rest.py | 48 ++-- .../cloud/logging_v2/types/log_entry.py | 50 ++-- .../google/cloud/logging_v2/types/logging.py | 88 +++---- .../cloud/logging_v2/types/logging_config.py | 196 +++++++------- .../cloud/logging_v2/types/logging_metrics.py | 48 ++-- .../snippet_metadata_google.logging.v2.json | 12 +- .../services/cloud_redis/async_client.py | 92 +++---- .../redis_v1/services/cloud_redis/client.py | 74 +++--- .../services/cloud_redis/transports/base.py | 2 +- .../services/cloud_redis/transports/grpc.py | 20 +- .../cloud_redis/transports/grpc_asyncio.py | 16 +- .../services/cloud_redis/transports/rest.py | 72 +++--- .../cloud/redis_v1/types/cloud_redis.py | 116 ++++----- .../tests/unit/schema/wrappers/test_field.py | 8 +- 84 files changed, 1693 insertions(+), 1668 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 index 27f7000ae2cf..3179d8bbd9bb 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 @@ -3,10 +3,10 @@ {% if "ListOperations" in api.mixin_api_methods %} def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -57,10 +57,10 @@ {% if "GetOperation" in api.mixin_api_methods %} def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -110,10 +110,10 @@ {% if "DeleteOperation" in api.mixin_api_methods %} def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -163,10 +163,10 @@ {% if "CancelOperation" in api.mixin_api_methods %} def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -215,10 +215,10 @@ {% if "WaitOperation" in api.mixin_api_methods %} def wait_operation( self, - request: operations_pb2.WaitOperationRequest = None, + request: Optional[operations_pb2.WaitOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most @@ -274,10 +274,10 @@ {% if "SetIamPolicy" in api.mixin_api_methods %} def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -393,10 +393,10 @@ {% if "GetIamPolicy" in api.mixin_api_methods %} def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -513,10 +513,10 @@ {% if "TestIamPermissions" in api.mixin_api_methods %} def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -575,10 +575,10 @@ {% if "GetLocation" in api.mixin_api_methods %} def get_location( self, - request: locations_pb2.GetLocationRequest = None, + request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -628,10 +628,10 @@ {% if "ListLocations" in api.mixin_api_methods %} def list_locations( self, - request: locations_pb2.ListLocationsRequest = None, + request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 29f0bd35c839..d0066988cbd2 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -5,7 +5,7 @@ from collections import OrderedDict import os import re -from typing import Callable, Dict, Mapping, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast +from typing import Callable, Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast import pkg_resources {% if service.any_deprecated %} import warnings @@ -68,7 +68,7 @@ class {{ service.client_name }}Meta(type): {% endif %} def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[{{ service.name }}Transport]: """Returns an appropriate transport class. @@ -218,7 +218,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, {{ service.name }}Transport, None] = None, + transport: Optional[Union[str, {{ service.name }}Transport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -230,7 +230,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, {{ service.name }}Transport]): The + transport (Optional[Union[str, {{ service.name }}Transport]]): The transport to use. If set to None, a transport is chosen automatically. {% if 'rest' in opts.transport and not opts.rest_numeric_enums %} @@ -340,17 +340,17 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def {{ method.name|snake_case }}(self, {% endif %}{# Extended Operations LRO #} {% if not method.client_streaming %} - request: Union[{{ method.input.ident }}, dict] = None, + request: Optional[Union[{{ method.input.ident }}, dict]] = None, *, {% for field in method.flattened_fields.values() %} - {{ field.name }}: {{ field.ident }} = None, + {{ field.name }}: Optional[{{ field.ident }}] = None, {% endfor %} {% else %} - requests: Iterator[{{ method.input.ident }}] = None, + requests: Optional[Iterator[{{ method.input.ident }}]] = None, *, {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), {% if not method.server_streaming %} ) -> {{ method.client_output.ident }}: @@ -361,7 +361,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Args: {% if not method.client_streaming %} - request (Union[{{ method.input.ident.sphinx }}, dict]): + request (Union[{{ method.input.ident.sphinx }}, dict, None]): The request object.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} @@ -516,10 +516,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if opts.add_iam_methods %} def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -633,10 +633,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -750,10 +750,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 index 9b99ad48f596..7be0c3a5c5bf 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 @@ -78,7 +78,7 @@ class {{ method.name }}Pager: def get(self, key: str) -> Optional[{{ method.paged_result_field.type.fields.get('value').ident }}]: return self._response.{{ method.paged_result_field.name }}.get(key) {% else %} - def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterator') }}: + def __iter__(self) -> {{ method.paged_result_field.ident | replace('MutableSequence', 'Iterator') }}: for page in self.pages: yield from page.{{ method.paged_result_field.name }} {% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 19f6f2f93b05..f22a5dbe032e 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -58,7 +58,7 @@ class {{ service.name }}Transport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index b9d46f1cd775..374e6ddbc2cc 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -51,14 +51,14 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -186,8 +186,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index b693f7530362..ef32dbd4ae15 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -140,15 +140,15 @@ class {{service.name}}RestTransport({{service.name}}Transport): {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[{{ service.name }}RestInterceptor] = None, ) -> None: """Instantiate the transport. @@ -282,9 +282,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __call__(self, request: {{method.input.ident}}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 index c200027ac27a..81a0755195ba 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 @@ -4,6 +4,8 @@ {% with p = proto.disambiguate('proto') %} {% if proto.messages|length or proto.all_enums|length %} +from typing import MutableMapping, MutableSequence + import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index 71ab95457a68..320498ba217b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -51,7 +51,7 @@ class {{ message.name }}({{ p }}.Message): {% for field in message.fields.values() %} {% if field.map %} {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] %} - {{ field.name }} = {{ p }}.MapField( + {{ field.name }}: MutableMapping[{{ key_field.type.ident.rel(message.ident) }}, {{ value_field.type.ident.rel(message.ident) }}] = {{ p }}.MapField( {{ p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, number={{ field.number }}, @@ -61,7 +61,7 @@ class {{ message.name }}({{ p }}.Message): ) {% endwith %} {% else %} - {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( + {{ field.name }}: {% if field.is_primitive %}{{ field.ident }}{% else %}{% if field.repeated %}MutableSequence[{% endif %}{{ field.type.ident.rel(message.ident) }}{% if field.repeated %}]{% endif %}{% endif %} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( {{ p }}.{{ field.proto_type }}, number={{ field.number }}, {% if field.proto3_optional %} diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 3b4884156a13..541e1ab61b18 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -19,7 +19,7 @@ import os import pathlib import typing -from typing import Any, DefaultDict, Dict, Mapping, Tuple +from typing import Any, DefaultDict, Dict, Mapping, Optional, Tuple from hashlib import sha256 from collections import OrderedDict, defaultdict from gapic.samplegen_utils.utils import coerce_response_name, is_valid_sample_cfg, render_format_string @@ -362,7 +362,7 @@ def _get_file( return {fn: cgr_file} def _get_filename( - self, template_name: str, *, api_schema: api.API, context: dict = None, + self, template_name: str, *, api_schema: api.API, context: Optional[dict] = None, ) -> str: """Return the appropriate output filename for this template. diff --git a/packages/gapic-generator/gapic/samplegen/manifest.py b/packages/gapic-generator/gapic/samplegen/manifest.py index b56b0159f227..47727e08ef35 100644 --- a/packages/gapic-generator/gapic/samplegen/manifest.py +++ b/packages/gapic-generator/gapic/samplegen/manifest.py @@ -14,7 +14,7 @@ import os import time -from typing import Tuple +from typing import Optional, Tuple from gapic.samplegen_utils import (types, yaml) from gapic.utils import case @@ -45,7 +45,7 @@ def generate( api_schema, *, environment: yaml.Map = PYTHON3_ENVIRONMENT, - manifest_time: int = None + manifest_time: Optional[int] = None ) -> Tuple[str, yaml.Doc]: """Generate a samplegen manifest for use by sampletest diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 19e1746347c4..b9af9e348ab4 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -81,7 +81,7 @@ def build( file_to_generate: bool, naming: api_naming.Naming, opts: Options = Options(), - prior_protos: Mapping[str, 'Proto'] = None, + prior_protos: Optional[Mapping[str, 'Proto']] = None, load_services: bool = True, all_resources: Optional[Mapping[str, wrappers.MessageType]] = None, ) -> 'Proto': @@ -243,7 +243,7 @@ def build( file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], package: str = '', opts: Options = Options(), - prior_protos: Mapping[str, 'Proto'] = None, + prior_protos: Optional[Mapping[str, 'Proto']] = None, ) -> 'API': """Build the internal API schema based on the request. @@ -631,7 +631,7 @@ def __init__( file_to_generate: bool, naming: api_naming.Naming, opts: Options = Options(), - prior_protos: Mapping[str, Proto] = None, + prior_protos: Optional[Mapping[str, Proto]] = None, load_services: bool = True, all_resources: Optional[Mapping[str, wrappers.MessageType]] = None, ): diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index d53c002e6779..8dc4d5dc67f8 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -370,15 +370,15 @@ class FieldIdentifier: def __str__(self) -> str: if self.mapping: - return f'Mapping[{self.mapping[0].ident}, {self.mapping[1].ident}]' + return f'MutableMapping[{self.mapping[0].ident}, {self.mapping[1].ident}]' if self.repeated: - return f'Sequence[{self.ident}]' + return f'MutableSequence[{self.ident}]' return str(self.ident) @property def sphinx(self) -> str: if self.mapping: - return f'Mapping[{self.mapping[0].ident.sphinx}, {self.mapping[1].ident.sphinx}]' + return f'MutableMapping[{self.mapping[0].ident.sphinx}, {self.mapping[1].ident.sphinx}]' if self.repeated: - return f'Sequence[{self.ident.sphinx}]' + return f'MutableSequence[{self.ident.sphinx}]' return self.ident.sphinx diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index cf8690e5920a..4d9830d5c6a9 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -82,7 +82,7 @@ def name(self) -> str: @utils.cached_property def ident(self) -> metadata.FieldIdentifier: """Return the identifier to be used in templates.""" - mapping: Union[None, Tuple[Field, Field]] = None + mapping: Optional[Tuple[Field, Field]] = None if self.map: mapping = (self.type.fields["key"], self.type.fields["value"]) return metadata.FieldIdentifier( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index b578108a171e..0f1727f0de35 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -5,10 +5,10 @@ {% if "ListOperations" in api.mixin_api_methods %} async def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -59,10 +59,10 @@ {% if "GetOperation" in api.mixin_api_methods %} async def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -112,10 +112,10 @@ {% if "DeleteOperation" in api.mixin_api_methods %} async def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -165,10 +165,10 @@ {% if "CancelOperation" in api.mixin_api_methods %} async def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -217,10 +217,10 @@ {% if "WaitOperation" in api.mixin_api_methods %} async def wait_operation( self, - request: operations_pb2.WaitOperationRequest = None, + request: Optional[operations_pb2.WaitOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most @@ -281,10 +281,10 @@ {% if "SetIamPolicy" in api.mixin_api_methods %} async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -400,10 +400,10 @@ {% if "GetIamPolicy" in api.mixin_api_methods %} async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -520,10 +520,10 @@ {% if "TestIamPermissions" in api.mixin_api_methods %} async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -582,10 +582,10 @@ {% if "GetLocation" in api.mixin_api_methods %} async def get_location( self, - request: locations_pb2.GetLocationRequest = None, + request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -635,10 +635,10 @@ {% if "ListLocations" in api.mixin_api_methods %} async def list_locations( self, - request: locations_pb2.ListLocationsRequest = None, + request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 705b92189800..440d162b145a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -17,17 +17,17 @@ {% macro client_method(method, name, snippet_index, api, service, full_extended_lro=False) %} def {{ name }}(self, {% if not method.client_streaming %} - request: Union[{{ method.input.ident }}, dict] = None, + request: Optional[Union[{{ method.input.ident }}, dict]] = None, *, {% for field in method.flattened_fields.values() %} - {{ field.name }}: {{ field.ident }} = None, + {{ field.name }}: Optional[{{ field.ident }}] = None, {% endfor %} {% else %} - requests: Iterator[{{ method.input.ident }}] = None, + requests: Optional[Iterator[{{ method.input.ident }}]] = None, *, {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), {% if method.extended_lro and not full_extended_lro %}{# This is a hack to preserve backwards compatibility with the "unary" surfaces #} ) -> {{ method.extended_lro.operation_type.ident }}: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index 6db4ca8a1b1e..ec1387acdd17 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -3,10 +3,10 @@ {% if "ListOperations" in api.mixin_api_methods %} def list_operations( self, - request: operations_pb2.ListOperationsRequest = None, + request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -57,10 +57,10 @@ {% if "GetOperation" in api.mixin_api_methods %} def get_operation( self, - request: operations_pb2.GetOperationRequest = None, + request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -110,10 +110,10 @@ {% if "DeleteOperation" in api.mixin_api_methods %} def delete_operation( self, - request: operations_pb2.DeleteOperationRequest = None, + request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -163,10 +163,10 @@ {% if "CancelOperation" in api.mixin_api_methods %} def cancel_operation( self, - request: operations_pb2.CancelOperationRequest = None, + request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -215,10 +215,10 @@ {% if "WaitOperation" in api.mixin_api_methods %} def wait_operation( self, - request: operations_pb2.WaitOperationRequest = None, + request: Optional[operations_pb2.WaitOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most @@ -279,10 +279,10 @@ {% if "SetIamPolicy" in api.mixin_api_methods %} def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -398,10 +398,10 @@ {% if "GetIamPolicy" in api.mixin_api_methods %} def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -518,10 +518,10 @@ {% if "TestIamPermissions" in api.mixin_api_methods %} def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -580,10 +580,10 @@ {% if "GetLocation" in api.mixin_api_methods %} def get_location( self, - request: locations_pb2.GetLocationRequest = None, + request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -633,10 +633,10 @@ {% if "ListLocations" in api.mixin_api_methods %} def list_locations( self, - request: locations_pb2.ListLocationsRequest = None, + request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 3d3b29f4004a..88b142dd60fd 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -5,7 +5,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union import pkg_resources {% if service.any_deprecated %} import warnings @@ -144,9 +144,9 @@ class {{ service.async_client_name }}: get_transport_class = functools.partial(type({{ service.client_name }}).get_transport_class, type({{ service.client_name }})) def __init__(self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, {{ service.name }}Transport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the {{ (service.client_name|snake_case).replace("_", " ") }}. @@ -195,17 +195,17 @@ class {{ service.async_client_name }}: {%+ if not method.server_streaming %}async {% endif %}def {{ method_name }}(self, {% endwith %} {% if not method.client_streaming %} - request: Union[{{ method.input.ident }}, dict] = None, + request: Optional[Union[{{ method.input.ident }}, dict]] = None, *, {% for field in method.flattened_fields.values() %} - {{ field.name }}: {{ field.ident }} = None, + {{ field.name }}: Optional[{{ field.ident }}] = None, {% endfor %} {% else %} - requests: AsyncIterator[{{ method.input.ident }}] = None, + requests: Optional[AsyncIterator[{{ method.input.ident }}]] = None, *, {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), {% if not method.server_streaming %} ) -> {{ method.client_output_async.ident }}: @@ -224,7 +224,7 @@ class {{ service.async_client_name }}: Args: {% if not method.client_streaming %} - request (Union[{{ method.input.ident.sphinx }}, dict]): + request (Optional[Union[{{ method.input.ident.sphinx }}, dict]]): The request object.{{ " " }} {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} {% for key, field in method.flattened_fields.items() %} @@ -387,10 +387,10 @@ class {{ service.async_client_name }}: {% if opts.add_iam_methods %} async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -501,10 +501,10 @@ class {{ service.async_client_name }}: async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -617,10 +617,10 @@ class {{ service.async_client_name }}: async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index c19e3bb9b20d..d7667abd1fa7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -9,7 +9,7 @@ import functools {% endif %} import os import re -from typing import Dict, Mapping, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast import pkg_resources {% if service.any_deprecated %} import warnings @@ -78,7 +78,7 @@ class {{ service.client_name }}Meta(type): {% endif %} def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[{{ service.name }}Transport]: """Returns an appropriate transport class. @@ -275,7 +275,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, {{ service.name }}Transport, None] = None, + transport: Optional[Union[str, {{ service.name }}Transport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -396,10 +396,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if opts.add_iam_methods %} def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -513,10 +513,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -631,10 +631,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index cc8d34921e25..fc3aa4f44c01 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -80,7 +80,7 @@ class {{ method.name }}Pager: def get(self, key: str) -> Optional[{{ method.paged_result_field.type.fields.get('value').ident }}]: return self._response.{{ method.paged_result_field.name }}.get(key) {% else %} - def __iter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'Iterator') }}: + def __iter__(self) -> {{ method.paged_result_field.ident | replace('MutableSequence', 'Iterator') }}: for page in self.pages: yield from page.{{ method.paged_result_field.name }} {% endif %} @@ -154,7 +154,7 @@ class {{ method.name }}AsyncPager: return self._response.{{ method.paged_result_field.name }}.get(key) {% else %} - def __aiter__(self) -> {{ method.paged_result_field.ident | replace('Sequence', 'AsyncIterator') }}: + def __aiter__(self) -> {{ method.paged_result_field.ident | replace('MutableSequence', 'AsyncIterator') }}: async def async_generator(): async for page in self.pages: for response in page.{{ method.paged_result_field.name }}: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 index 3bf58c3c75d4..4dee95ceca23 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -8,9 +8,9 @@ class _{{ name }}({{service.name}}RestStub): def __call__(self, request: {{ sig.request_type }}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> {{ sig.response_type }}: r"""Call the {{- ' ' -}} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index bfca9d99007a..50781784395e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -63,7 +63,7 @@ class {{ service.name }}Transport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 0e0ada45421b..96923c5dbd11 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -51,14 +51,14 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -188,8 +188,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index d47e3eee6342..25b7b4db5c15 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -55,7 +55,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): @classmethod def create_channel(cls, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -95,15 +95,15 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index db5eaaf120ef..126d6e00a615 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -174,15 +174,15 @@ class {{service.name}}RestTransport({{service.name}}Transport): {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[{{ service.name }}RestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: @@ -318,9 +318,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __call__(self, request: {{method.input.ident}}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 index f4e4be617d69..f5aebf7364fb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 @@ -4,6 +4,8 @@ {% with p = proto.disambiguate('proto') %} {% if proto.messages|length or proto.all_enums|length %} +from typing import MutableMapping, MutableSequence + import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index b6e93cefcb84..31b6e795aa4e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -51,7 +51,7 @@ class {{ message.name }}({{ p }}.Message): {% for field in message.fields.values() %} {% if field.map %} {% with key_field = field.message.fields['key'], value_field = field.message.fields['value'] %} - {{ field.name }} = {{ p }}.MapField( + {{ field.name }}: MutableMapping[{{ key_field.type.ident.rel(message.ident) }}, {{ value_field.type.ident.rel(message.ident) }}] = {{ p }}.MapField( {{ p }}.{{ key_field.proto_type }}, {{ p }}.{{ value_field.proto_type }}, number={{ field.number }}, @@ -61,7 +61,7 @@ class {{ message.name }}({{ p }}.Message): ) {% endwith %} {% else %}{# field.map #} - {{ field.name }} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( + {{ field.name }}: {% if field.is_primitive %}{{ field.ident }}{% else %}{% if field.repeated %}MutableSequence[{% endif %}{{ field.type.ident.rel(message.ident) }}{% if field.repeated %}]{% endif %}{% endif %} = {{ p }}.{% if field.repeated %}Repeated{% endif %}Field( {{ p }}.{{ field.proto_type }}, number={{ field.number }}, {% if field.proto3_optional %} diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 4e268f284ee1..9e66a88ae98a 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -13,7 +13,7 @@ # limitations under the License. import textwrap -from typing import Iterable +from typing import Iterable, Optional def sort_lines(text: str, dedupe: bool = True) -> str: @@ -39,7 +39,7 @@ def sort_lines(text: str, dedupe: bool = True) -> str: return f'{leading}{answer}{trailing}' -def wrap(text: str, width: int, *, offset: int = None, indent: int = 0) -> str: +def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0) -> str: """Wrap the given string to the given width. This uses :meth:`textwrap.fill` under the hood, but provides useful diff --git a/packages/gapic-generator/gapic/utils/rst.py b/packages/gapic-generator/gapic/utils/rst.py index e9a76956a412..dc4912547ce9 100644 --- a/packages/gapic-generator/gapic/utils/rst.py +++ b/packages/gapic-generator/gapic/utils/rst.py @@ -13,13 +13,14 @@ # limitations under the License. import re +from typing import Optional import pypandoc # type: ignore from gapic.utils.lines import wrap -def rst(text: str, width: int = 72, indent: int = 0, nl: bool = None, +def rst(text: str, width: int = 72, indent: int = 0, nl: Optional[bool] = None, source_format: str = 'commonmark'): """Convert the given text to ReStructured Text. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 28f1cf2148ac..977b03e8876b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -144,9 +144,9 @@ def transport(self) -> AssetServiceTransport: get_transport_class = functools.partial(type(AssetServiceClient).get_transport_class, type(AssetServiceClient)) def __init__(self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, AssetServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the asset service client. @@ -190,10 +190,10 @@ def __init__(self, *, ) async def export_assets(self, - request: Union[asset_service.ExportAssetsRequest, dict] = None, + request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Exports assets with time and resource types to a given Cloud @@ -245,7 +245,7 @@ async def sample_export_assets(): print(response) Args: - request (Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]]): The request object. Export asset request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -303,11 +303,11 @@ async def sample_export_assets(): return response async def list_assets(self, - request: Union[asset_service.ListAssetsRequest, dict] = None, + request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAssetsAsyncPager: r"""Lists assets with time and resource types and returns @@ -341,7 +341,7 @@ async def sample_list_assets(): print(response) Args: - request (Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]]): The request object. ListAssets request. parent (:class:`str`): Required. Name of the organization or project the assets @@ -419,10 +419,10 @@ async def sample_list_assets(): return response async def batch_get_assets_history(self, - request: Union[asset_service.BatchGetAssetsHistoryRequest, dict] = None, + request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time @@ -460,7 +460,7 @@ async def sample_batch_get_assets_history(): print(response) Args: - request (Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]]): The request object. Batch get assets history request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -510,11 +510,11 @@ async def sample_batch_get_assets_history(): return response async def create_feed(self, - request: Union[asset_service.CreateFeedRequest, dict] = None, + request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Creates a feed in a parent @@ -553,7 +553,7 @@ async def sample_create_feed(): print(response) Args: - request (Union[google.cloud.asset_v1.types.CreateFeedRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.CreateFeedRequest, dict]]): The request object. Create asset feed request. parent (:class:`str`): Required. The name of the @@ -629,11 +629,11 @@ async def sample_create_feed(): return response async def get_feed(self, - request: Union[asset_service.GetFeedRequest, dict] = None, + request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Gets details about an asset feed. @@ -665,7 +665,7 @@ async def sample_get_feed(): print(response) Args: - request (Union[google.cloud.asset_v1.types.GetFeedRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.GetFeedRequest, dict]]): The request object. Get asset feed request. name (:class:`str`): Required. The name of the Feed and it must be in the @@ -743,11 +743,11 @@ async def sample_get_feed(): return response async def list_feeds(self, - request: Union[asset_service.ListFeedsRequest, dict] = None, + request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent @@ -780,7 +780,7 @@ async def sample_list_feeds(): print(response) Args: - request (Union[google.cloud.asset_v1.types.ListFeedsRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.ListFeedsRequest, dict]]): The request object. List asset feeds request. parent (:class:`str`): Required. The parent @@ -853,11 +853,11 @@ async def sample_list_feeds(): return response async def update_feed(self, - request: Union[asset_service.UpdateFeedRequest, dict] = None, + request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, *, - feed: asset_service.Feed = None, + feed: Optional[asset_service.Feed] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Updates an asset feed configuration. @@ -892,7 +892,7 @@ async def sample_update_feed(): print(response) Args: - request (Union[google.cloud.asset_v1.types.UpdateFeedRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.UpdateFeedRequest, dict]]): The request object. Update asset feed request. feed (:class:`google.cloud.asset_v1.types.Feed`): Required. The new values of feed details. It must match @@ -964,11 +964,11 @@ async def sample_update_feed(): return response async def delete_feed(self, - request: Union[asset_service.DeleteFeedRequest, dict] = None, + request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an asset feed. @@ -997,7 +997,7 @@ async def sample_delete_feed(): await client.delete_feed(request=request) Args: - request (Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.DeleteFeedRequest, dict]]): The request object. name (:class:`str`): Required. The name of the feed and it must be in the @@ -1061,13 +1061,13 @@ async def sample_delete_feed(): ) async def search_all_resources(self, - request: Union[asset_service.SearchAllResourcesRequest, dict] = None, + request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, *, - scope: str = None, - query: str = None, - asset_types: Sequence[str] = None, + scope: Optional[str] = None, + query: Optional[str] = None, + asset_types: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesAsyncPager: r"""Searches all Cloud resources within the specified scope, such as @@ -1103,7 +1103,7 @@ async def sample_search_all_resources(): print(response) Args: - request (Union[google.cloud.asset_v1.types.SearchAllResourcesRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.SearchAllResourcesRequest, dict]]): The request object. Search all resources request. scope (:class:`str`): Required. A scope can be a project, a folder, or an @@ -1175,7 +1175,7 @@ async def sample_search_all_resources(): This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - asset_types (:class:`Sequence[str]`): + asset_types (:class:`MutableSequence[str]`): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset @@ -1275,12 +1275,12 @@ async def sample_search_all_resources(): return response async def search_all_iam_policies(self, - request: Union[asset_service.SearchAllIamPoliciesRequest, dict] = None, + request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, *, - scope: str = None, - query: str = None, + scope: Optional[str] = None, + query: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllIamPoliciesAsyncPager: r"""Searches all IAM policies within the specified scope, such as a @@ -1316,7 +1316,7 @@ async def sample_search_all_iam_policies(): print(response) Args: - request (Union[google.cloud.asset_v1.types.SearchAllIamPoliciesRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.SearchAllIamPoliciesRequest, dict]]): The request object. Search all IAM policies request. scope (:class:`str`): Required. A scope can be a project, a folder, or an @@ -1468,10 +1468,10 @@ async def sample_search_all_iam_policies(): return response async def analyze_iam_policy(self, - request: Union[asset_service.AnalyzeIamPolicyRequest, dict] = None, + request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have @@ -1507,7 +1507,7 @@ async def sample_analyze_iam_policy(): print(response) Args: - request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]]): The request object. A request message for [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1559,10 +1559,10 @@ async def sample_analyze_iam_policy(): return response async def analyze_iam_policy_longrunning(self, - request: Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] = None, + request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Analyzes IAM policies asynchronously to answer which identities @@ -1616,7 +1616,7 @@ async def sample_analyze_iam_policy_longrunning(): print(response) Args: - request (Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]): + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]]): The request object. A request message for [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 33d47f6b99a0..315906adf87d 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -59,7 +59,7 @@ class AssetServiceClientMeta(type): _transport_registry["rest"] = AssetServiceRestTransport def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[AssetServiceTransport]: """Returns an appropriate transport class. @@ -303,7 +303,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, AssetServiceTransport, None] = None, + transport: Optional[Union[str, AssetServiceTransport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -393,10 +393,10 @@ def __init__(self, *, ) def export_assets(self, - request: Union[asset_service.ExportAssetsRequest, dict] = None, + request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Exports assets with time and resource types to a given Cloud @@ -507,11 +507,11 @@ def sample_export_assets(): return response def list_assets(self, - request: Union[asset_service.ListAssetsRequest, dict] = None, + request: Optional[Union[asset_service.ListAssetsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAssetsPager: r"""Lists assets with time and resource types and returns @@ -623,10 +623,10 @@ def sample_list_assets(): return response def batch_get_assets_history(self, - request: Union[asset_service.BatchGetAssetsHistoryRequest, dict] = None, + request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time @@ -708,11 +708,11 @@ def sample_batch_get_assets_history(): return response def create_feed(self, - request: Union[asset_service.CreateFeedRequest, dict] = None, + request: Optional[Union[asset_service.CreateFeedRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Creates a feed in a parent @@ -827,11 +827,11 @@ def sample_create_feed(): return response def get_feed(self, - request: Union[asset_service.GetFeedRequest, dict] = None, + request: Optional[Union[asset_service.GetFeedRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Gets details about an asset feed. @@ -934,11 +934,11 @@ def sample_get_feed(): return response def list_feeds(self, - request: Union[asset_service.ListFeedsRequest, dict] = None, + request: Optional[Union[asset_service.ListFeedsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent @@ -1037,11 +1037,11 @@ def sample_list_feeds(): return response def update_feed(self, - request: Union[asset_service.UpdateFeedRequest, dict] = None, + request: Optional[Union[asset_service.UpdateFeedRequest, dict]] = None, *, - feed: asset_service.Feed = None, + feed: Optional[asset_service.Feed] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Updates an asset feed configuration. @@ -1148,11 +1148,11 @@ def sample_update_feed(): return response def delete_feed(self, - request: Union[asset_service.DeleteFeedRequest, dict] = None, + request: Optional[Union[asset_service.DeleteFeedRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an asset feed. @@ -1238,13 +1238,13 @@ def sample_delete_feed(): ) def search_all_resources(self, - request: Union[asset_service.SearchAllResourcesRequest, dict] = None, + request: Optional[Union[asset_service.SearchAllResourcesRequest, dict]] = None, *, - scope: str = None, - query: str = None, - asset_types: Sequence[str] = None, + scope: Optional[str] = None, + query: Optional[str] = None, + asset_types: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesPager: r"""Searches all Cloud resources within the specified scope, such as @@ -1352,7 +1352,7 @@ def sample_search_all_resources(): This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - asset_types (Sequence[str]): + asset_types (MutableSequence[str]): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset @@ -1445,12 +1445,12 @@ def sample_search_all_resources(): return response def search_all_iam_policies(self, - request: Union[asset_service.SearchAllIamPoliciesRequest, dict] = None, + request: Optional[Union[asset_service.SearchAllIamPoliciesRequest, dict]] = None, *, - scope: str = None, - query: str = None, + scope: Optional[str] = None, + query: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllIamPoliciesPager: r"""Searches all IAM policies within the specified scope, such as a @@ -1631,10 +1631,10 @@ def sample_search_all_iam_policies(): return response def analyze_iam_policy(self, - request: Union[asset_service.AnalyzeIamPolicyRequest, dict] = None, + request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have @@ -1717,10 +1717,10 @@ def sample_analyze_iam_policy(): return response def analyze_iam_policy_longrunning(self, - request: Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict] = None, + request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Analyzes IAM policies asynchronously to answer which identities diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 8a2127eeef7b..ad1d09b62633 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -51,7 +51,7 @@ class AssetServiceTransport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 340bd09b0287..845421d5e2d8 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -47,14 +47,14 @@ class AssetServiceGrpcTransport(AssetServiceTransport): def __init__(self, *, host: str = 'cloudasset.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -181,8 +181,8 @@ def __init__(self, *, @classmethod def create_channel(cls, host: str = 'cloudasset.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 00e15c96e284..d579bca88a69 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -51,7 +51,7 @@ class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): @classmethod def create_channel(cls, host: str = 'cloudasset.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -91,15 +91,15 @@ def create_channel(cls, def __init__(self, *, host: str = 'cloudasset.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index ddd8b7a9f465..eab4b8c22991 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -366,15 +366,15 @@ class AssetServiceRestTransport(AssetServiceTransport): def __init__(self, *, host: str = 'cloudasset.googleapis.com', - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[AssetServiceRestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: @@ -478,9 +478,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.AnalyzeIamPolicyRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Call the analyze iam policy method over HTTP. @@ -558,9 +558,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the analyze iam policy longrunning method over HTTP. @@ -647,9 +647,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.BatchGetAssetsHistoryRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Call the batch get assets history method over HTTP. @@ -723,9 +723,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.CreateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Call the create feed method over HTTP. @@ -815,9 +815,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.DeleteFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ): r"""Call the delete feed method over HTTP. @@ -879,9 +879,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.ExportAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the export assets method over HTTP. @@ -965,9 +965,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.GetFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Call the get feed method over HTTP. @@ -1048,9 +1048,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.ListAssetsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.ListAssetsResponse: r"""Call the list assets method over HTTP. @@ -1124,9 +1124,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.ListFeedsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.ListFeedsResponse: r"""Call the list feeds method over HTTP. @@ -1200,9 +1200,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.SearchAllIamPoliciesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.SearchAllIamPoliciesResponse: r"""Call the search all iam policies method over HTTP. @@ -1276,9 +1276,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.SearchAllResourcesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.SearchAllResourcesResponse: r"""Call the search all resources method over HTTP. @@ -1352,9 +1352,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.UpdateFeedRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Call the update feed method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 787a6b307cdb..047032e1a0dc 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.asset_v1.types import assets as gca_assets @@ -91,7 +93,7 @@ class ExportAssetsRequest(proto.Message): collection and indexing, there is a volatile window during which running the same query may get different results. - asset_types (Sequence[str]): + asset_types (MutableSequence[str]): A list of asset types to take a snapshot for. For example: "compute.googleapis.com/Disk". @@ -122,25 +124,25 @@ class ExportAssetsRequest(proto.Message): where the results will be output to. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - asset_types = proto.RepeatedField( + asset_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - content_type = proto.Field( + content_type: 'ContentType' = proto.Field( proto.ENUM, number=4, enum='ContentType', ) - output_config = proto.Field( + output_config: 'OutputConfig' = proto.Field( proto.MESSAGE, number=5, message='OutputConfig', @@ -169,17 +171,17 @@ class ExportAssetsResponse(proto.Message): it exceeds a single Google Cloud Storage object limit. """ - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - output_config = proto.Field( + output_config: 'OutputConfig' = proto.Field( proto.MESSAGE, number=2, message='OutputConfig', ) - output_result = proto.Field( + output_result: 'OutputResult' = proto.Field( proto.MESSAGE, number=3, message='OutputResult', @@ -205,7 +207,7 @@ class ListAssetsRequest(proto.Message): collection and indexing, there is a volatile window during which running the same query may get different results. - asset_types (Sequence[str]): + asset_types (MutableSequence[str]): A list of asset types to take a snapshot for. For example: "compute.googleapis.com/Disk". @@ -243,29 +245,29 @@ class ListAssetsRequest(proto.Message): of assets. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - asset_types = proto.RepeatedField( + asset_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - content_type = proto.Field( + content_type: 'ContentType' = proto.Field( proto.ENUM, number=4, enum='ContentType', ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=5, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=6, ) @@ -277,7 +279,7 @@ class ListAssetsResponse(proto.Message): Attributes: read_time (google.protobuf.timestamp_pb2.Timestamp): Time the snapshot was taken. - assets (Sequence[google.cloud.asset_v1.types.Asset]): + assets (MutableSequence[google.cloud.asset_v1.types.Asset]): Assets. next_page_token (str): Token to retrieve the next page of results. @@ -290,17 +292,17 @@ class ListAssetsResponse(proto.Message): def raw_page(self): return self - read_time = proto.Field( + read_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - assets = proto.RepeatedField( + assets: MutableSequence[gca_assets.Asset] = proto.RepeatedField( proto.MESSAGE, number=2, message=gca_assets.Asset, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=3, ) @@ -316,7 +318,7 @@ class BatchGetAssetsHistoryRequest(proto.Message): (such as "organizations/123"), a project ID (such as "projects/my-project-id")", or a project number (such as "projects/12345"). - asset_names (Sequence[str]): + asset_names (MutableSequence[str]): A list of the full names of the assets. See: https://cloud.google.com/asset-inventory/docs/resource-name-format Example: @@ -338,20 +340,20 @@ class BatchGetAssetsHistoryRequest(proto.Message): whose time window overlap with read_time_window. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - asset_names = proto.RepeatedField( + asset_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - content_type = proto.Field( + content_type: 'ContentType' = proto.Field( proto.ENUM, number=3, enum='ContentType', ) - read_time_window = proto.Field( + read_time_window: gca_assets.TimeWindow = proto.Field( proto.MESSAGE, number=4, message=gca_assets.TimeWindow, @@ -362,11 +364,11 @@ class BatchGetAssetsHistoryResponse(proto.Message): r"""Batch get assets history response. Attributes: - assets (Sequence[google.cloud.asset_v1.types.TemporalAsset]): + assets (MutableSequence[google.cloud.asset_v1.types.TemporalAsset]): A list of assets with valid time windows. """ - assets = proto.RepeatedField( + assets: MutableSequence[gca_assets.TemporalAsset] = proto.RepeatedField( proto.MESSAGE, number=1, message=gca_assets.TemporalAsset, @@ -398,15 +400,15 @@ class CreateFeedRequest(proto.Message): organizations/organization_number/feeds/feed_id """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - feed_id = proto.Field( + feed_id: str = proto.Field( proto.STRING, number=2, ) - feed = proto.Field( + feed: 'Feed' = proto.Field( proto.MESSAGE, number=3, message='Feed', @@ -424,7 +426,7 @@ class GetFeedRequest(proto.Message): organizations/organization_number/feeds/feed_id """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -443,7 +445,7 @@ class ListFeedsRequest(proto.Message): "projects/my-project-id"). """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) @@ -453,11 +455,11 @@ class ListFeedsResponse(proto.Message): r""" Attributes: - feeds (Sequence[google.cloud.asset_v1.types.Feed]): + feeds (MutableSequence[google.cloud.asset_v1.types.Feed]): A list of feeds. """ - feeds = proto.RepeatedField( + feeds: MutableSequence['Feed'] = proto.RepeatedField( proto.MESSAGE, number=1, message='Feed', @@ -480,12 +482,12 @@ class UpdateFeedRequest(proto.Message): contain fields that are immutable or only set by the server. """ - feed = proto.Field( + feed: 'Feed' = proto.Field( proto.MESSAGE, number=1, message='Feed', ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, @@ -503,7 +505,7 @@ class DeleteFeedRequest(proto.Message): organizations/organization_number/feeds/feed_id """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -532,13 +534,13 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - gcs_destination = proto.Field( + gcs_destination: 'GcsDestination' = proto.Field( proto.MESSAGE, number=1, oneof='destination', message='GcsDestination', ) - bigquery_destination = proto.Field( + bigquery_destination: 'BigQueryDestination' = proto.Field( proto.MESSAGE, number=2, oneof='destination', @@ -558,7 +560,7 @@ class OutputResult(proto.Message): This field is a member of `oneof`_ ``result``. """ - gcs_result = proto.Field( + gcs_result: 'GcsOutputResult' = proto.Field( proto.MESSAGE, number=1, oneof='result', @@ -570,12 +572,12 @@ class GcsOutputResult(proto.Message): r"""A Cloud Storage output result. Attributes: - uris (Sequence[str]): + uris (MutableSequence[str]): List of uris of the Cloud Storage objects. Example: "gs://bucket_name/object_name". """ - uris = proto.RepeatedField( + uris: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) @@ -619,12 +621,12 @@ class GcsDestination(proto.Message): This field is a member of `oneof`_ ``object_uri``. """ - uri = proto.Field( + uri: str = proto.Field( proto.STRING, number=1, oneof='object_uri', ) - uri_prefix = proto.Field( + uri_prefix: str = proto.Field( proto.STRING, number=2, oneof='object_uri', @@ -709,24 +711,24 @@ class BigQueryDestination(proto.Message): a table. """ - dataset = proto.Field( + dataset: str = proto.Field( proto.STRING, number=1, ) - table = proto.Field( + table: str = proto.Field( proto.STRING, number=2, ) - force = proto.Field( + force: bool = proto.Field( proto.BOOL, number=3, ) - partition_spec = proto.Field( + partition_spec: 'PartitionSpec' = proto.Field( proto.MESSAGE, number=4, message='PartitionSpec', ) - separate_tables_per_asset_type = proto.Field( + separate_tables_per_asset_type: bool = proto.Field( proto.BOOL, number=5, ) @@ -752,7 +754,7 @@ class PartitionKey(proto.Enum): READ_TIME = 1 REQUEST_TIME = 2 - partition_key = proto.Field( + partition_key: PartitionKey = proto.Field( proto.ENUM, number=1, enum=PartitionKey, @@ -768,7 +770,7 @@ class PubsubDestination(proto.Message): ``projects/PROJECT_ID/topics/TOPIC_ID``. """ - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=1, ) @@ -786,7 +788,7 @@ class FeedOutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - pubsub_destination = proto.Field( + pubsub_destination: 'PubsubDestination' = proto.Field( proto.MESSAGE, number=1, oneof='destination', @@ -812,7 +814,7 @@ class Feed(proto.Message): The client-assigned feed identifier must be unique within the parent project/folder/organization. - asset_names (Sequence[str]): + asset_names (MutableSequence[str]): A list of the full names of the assets to receive updates. You must specify either or both of asset_names and asset_types. Only asset updates matching specified @@ -822,7 +824,7 @@ class Feed(proto.Message): See `Resource Names `__ for more info. - asset_types (Sequence[str]): + asset_types (MutableSequence[str]): A list of types of the assets to receive updates. You must specify either or both of asset_names and asset_types. Only asset updates matching specified asset_names or asset_types @@ -855,29 +857,29 @@ class Feed(proto.Message): for detailed instructions. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - asset_names = proto.RepeatedField( + asset_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - asset_types = proto.RepeatedField( + asset_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - content_type = proto.Field( + content_type: 'ContentType' = proto.Field( proto.ENUM, number=4, enum='ContentType', ) - feed_output_config = proto.Field( + feed_output_config: 'FeedOutputConfig' = proto.Field( proto.MESSAGE, number=5, message='FeedOutputConfig', ) - condition = proto.Field( + condition: expr_pb2.Expr = proto.Field( proto.MESSAGE, number=6, message=expr_pb2.Expr, @@ -948,7 +950,7 @@ class SearchAllResourcesRequest(proto.Message): resources that contain "Important" as a word in any of the searchable fields and are also located in the "us-west1" region or the "global" location. - asset_types (Sequence[str]): + asset_types (MutableSequence[str]): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset types `__. @@ -1004,27 +1006,27 @@ class SearchAllResourcesRequest(proto.Message): ``additionalAttributes``) are not supported. """ - scope = proto.Field( + scope: str = proto.Field( proto.STRING, number=1, ) - query = proto.Field( + query: str = proto.Field( proto.STRING, number=2, ) - asset_types = proto.RepeatedField( + asset_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=4, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=5, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=6, ) @@ -1034,7 +1036,7 @@ class SearchAllResourcesResponse(proto.Message): r"""Search all resources response. Attributes: - results (Sequence[google.cloud.asset_v1.types.ResourceSearchResult]): + results (MutableSequence[google.cloud.asset_v1.types.ResourceSearchResult]): A list of Resources that match the search query. It contains the resource standard metadata information. @@ -1049,12 +1051,12 @@ class SearchAllResourcesResponse(proto.Message): def raw_page(self): return self - results = proto.RepeatedField( + results: MutableSequence[gca_assets.ResourceSearchResult] = proto.RepeatedField( proto.MESSAGE, number=1, message=gca_assets.ResourceSearchResult, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -1138,7 +1140,7 @@ class SearchAllIamPoliciesRequest(proto.Message): be the value of ``next_page_token`` from the previous response. The values of all other method parameters must be identical to those in the previous call. - asset_types (Sequence[str]): + asset_types (MutableSequence[str]): Optional. A list of asset types that the IAM policies are attached to. If empty, it will search the IAM policies that are attached to all the `searchable asset @@ -1173,27 +1175,27 @@ class SearchAllIamPoliciesRequest(proto.Message): ``policy``) are not supported. """ - scope = proto.Field( + scope: str = proto.Field( proto.STRING, number=1, ) - query = proto.Field( + query: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) - asset_types = proto.RepeatedField( + asset_types: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=5, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=7, ) @@ -1203,7 +1205,7 @@ class SearchAllIamPoliciesResponse(proto.Message): r"""Search all IAM policies response. Attributes: - results (Sequence[google.cloud.asset_v1.types.IamPolicySearchResult]): + results (MutableSequence[google.cloud.asset_v1.types.IamPolicySearchResult]): A list of IamPolicy that match the search query. Related information such as the associated resource is returned along with the @@ -1218,12 +1220,12 @@ class SearchAllIamPoliciesResponse(proto.Message): def raw_page(self): return self - results = proto.RepeatedField( + results: MutableSequence[gca_assets.IamPolicySearchResult] = proto.RepeatedField( proto.MESSAGE, number=1, message=gca_assets.IamPolicySearchResult, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -1276,7 +1278,7 @@ class ResourceSelector(proto.Message): types `__. """ - full_resource_name = proto.Field( + full_resource_name: str = proto.Field( proto.STRING, number=1, ) @@ -1301,7 +1303,7 @@ class IdentitySelector(proto.Message): supported. You must give a specific identity. """ - identity = proto.Field( + identity: str = proto.Field( proto.STRING, number=1, ) @@ -1314,18 +1316,18 @@ class AccessSelector(proto.Message): roles and permissions should be equal or less than 10. Attributes: - roles (Sequence[str]): + roles (MutableSequence[str]): Optional. The roles to appear in result. - permissions (Sequence[str]): + permissions (MutableSequence[str]): Optional. The permissions to appear in result. """ - roles = proto.RepeatedField( + roles: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - permissions = proto.RepeatedField( + permissions: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) @@ -1422,27 +1424,27 @@ class Options(proto.Message): Default is false. """ - expand_groups = proto.Field( + expand_groups: bool = proto.Field( proto.BOOL, number=1, ) - expand_roles = proto.Field( + expand_roles: bool = proto.Field( proto.BOOL, number=2, ) - expand_resources = proto.Field( + expand_resources: bool = proto.Field( proto.BOOL, number=3, ) - output_resource_edges = proto.Field( + output_resource_edges: bool = proto.Field( proto.BOOL, number=4, ) - output_group_edges = proto.Field( + output_group_edges: bool = proto.Field( proto.BOOL, number=5, ) - analyze_service_account_impersonation = proto.Field( + analyze_service_account_impersonation: bool = proto.Field( proto.BOOL, number=6, ) @@ -1462,38 +1464,38 @@ class ConditionContext(proto.Message): This field is a member of `oneof`_ ``TimeContext``. """ - access_time = proto.Field( + access_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, oneof='TimeContext', message=timestamp_pb2.Timestamp, ) - scope = proto.Field( + scope: str = proto.Field( proto.STRING, number=1, ) - resource_selector = proto.Field( + resource_selector: ResourceSelector = proto.Field( proto.MESSAGE, number=2, message=ResourceSelector, ) - identity_selector = proto.Field( + identity_selector: IdentitySelector = proto.Field( proto.MESSAGE, number=3, message=IdentitySelector, ) - access_selector = proto.Field( + access_selector: AccessSelector = proto.Field( proto.MESSAGE, number=4, message=AccessSelector, ) - options = proto.Field( + options: Options = proto.Field( proto.MESSAGE, number=5, message=Options, ) - condition_context = proto.Field( + condition_context: ConditionContext = proto.Field( proto.MESSAGE, number=6, message=ConditionContext, @@ -1522,12 +1524,12 @@ class AnalyzeIamPolicyRequest(proto.Message): Default is empty. """ - analysis_query = proto.Field( + analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( proto.MESSAGE, number=1, message='IamPolicyAnalysisQuery', ) - execution_timeout = proto.Field( + execution_timeout: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=2, message=duration_pb2.Duration, @@ -1542,7 +1544,7 @@ class AnalyzeIamPolicyResponse(proto.Message): main_analysis (google.cloud.asset_v1.types.AnalyzeIamPolicyResponse.IamPolicyAnalysis): The main analysis that matches the original request. - service_account_impersonation_analysis (Sequence[google.cloud.asset_v1.types.AnalyzeIamPolicyResponse.IamPolicyAnalysis]): + service_account_impersonation_analysis (MutableSequence[google.cloud.asset_v1.types.AnalyzeIamPolicyResponse.IamPolicyAnalysis]): The service account impersonation analysis if [AnalyzeIamPolicyRequest.analyze_service_account_impersonation][] is enabled. @@ -1560,7 +1562,7 @@ class IamPolicyAnalysis(proto.Message): Attributes: analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): The analysis query. - analysis_results (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult]): + analysis_results (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult]): A list of [IamPolicyAnalysisResult][google.cloud.asset.v1.IamPolicyAnalysisResult] that matches the analysis query, or empty if no result is @@ -1569,42 +1571,42 @@ class IamPolicyAnalysis(proto.Message): Represents whether all entries in the [analysis_results][google.cloud.asset.v1.AnalyzeIamPolicyResponse.IamPolicyAnalysis.analysis_results] have been fully explored to answer the query. - non_critical_errors (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisState]): + non_critical_errors (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisState]): A list of non-critical errors happened during the query handling. """ - analysis_query = proto.Field( + analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( proto.MESSAGE, number=1, message='IamPolicyAnalysisQuery', ) - analysis_results = proto.RepeatedField( + analysis_results: MutableSequence[gca_assets.IamPolicyAnalysisResult] = proto.RepeatedField( proto.MESSAGE, number=2, message=gca_assets.IamPolicyAnalysisResult, ) - fully_explored = proto.Field( + fully_explored: bool = proto.Field( proto.BOOL, number=3, ) - non_critical_errors = proto.RepeatedField( + non_critical_errors: MutableSequence[gca_assets.IamPolicyAnalysisState] = proto.RepeatedField( proto.MESSAGE, number=5, message=gca_assets.IamPolicyAnalysisState, ) - main_analysis = proto.Field( + main_analysis: IamPolicyAnalysis = proto.Field( proto.MESSAGE, number=1, message=IamPolicyAnalysis, ) - service_account_impersonation_analysis = proto.RepeatedField( + service_account_impersonation_analysis: MutableSequence[IamPolicyAnalysis] = proto.RepeatedField( proto.MESSAGE, number=2, message=IamPolicyAnalysis, ) - fully_explored = proto.Field( + fully_explored: bool = proto.Field( proto.BOOL, number=3, ) @@ -1650,7 +1652,7 @@ class GcsDestination(proto.Message): it will be overwritten with the analysis result. """ - uri = proto.Field( + uri: str = proto.Field( proto.STRING, number=1, ) @@ -1708,31 +1710,31 @@ class PartitionKey(proto.Enum): PARTITION_KEY_UNSPECIFIED = 0 REQUEST_TIME = 1 - dataset = proto.Field( + dataset: str = proto.Field( proto.STRING, number=1, ) - table_prefix = proto.Field( + table_prefix: str = proto.Field( proto.STRING, number=2, ) - partition_key = proto.Field( + partition_key: 'IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey' = proto.Field( proto.ENUM, number=3, enum='IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey', ) - write_disposition = proto.Field( + write_disposition: str = proto.Field( proto.STRING, number=4, ) - gcs_destination = proto.Field( + gcs_destination: GcsDestination = proto.Field( proto.MESSAGE, number=1, oneof='destination', message=GcsDestination, ) - bigquery_destination = proto.Field( + bigquery_destination: BigQueryDestination = proto.Field( proto.MESSAGE, number=2, oneof='destination', @@ -1752,12 +1754,12 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): where the results will be output to. """ - analysis_query = proto.Field( + analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( proto.MESSAGE, number=1, message='IamPolicyAnalysisQuery', ) - output_config = proto.Field( + output_config: 'IamPolicyAnalysisOutputConfig' = proto.Field( proto.MESSAGE, number=2, message='IamPolicyAnalysisOutputConfig', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index 4a4cdb8c56db..55770d261e04 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.orgpolicy.v1 import orgpolicy_pb2 # type: ignore @@ -70,26 +72,26 @@ class PriorAssetState(proto.Enum): DOES_NOT_EXIST = 3 DELETED = 4 - window = proto.Field( + window: 'TimeWindow' = proto.Field( proto.MESSAGE, number=1, message='TimeWindow', ) - deleted = proto.Field( + deleted: bool = proto.Field( proto.BOOL, number=2, ) - asset = proto.Field( + asset: 'Asset' = proto.Field( proto.MESSAGE, number=3, message='Asset', ) - prior_asset_state = proto.Field( + prior_asset_state: PriorAssetState = proto.Field( proto.ENUM, number=4, enum=PriorAssetState, ) - prior_asset = proto.Field( + prior_asset: 'Asset' = proto.Field( proto.MESSAGE, number=5, message='Asset', @@ -108,12 +110,12 @@ class TimeWindow(proto.Message): instead. """ - start_time = proto.Field( + start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, @@ -170,7 +172,7 @@ class Asset(proto.Message): See `this topic `__ for more information. - org_policy (Sequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): + org_policy (MutableSequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): A representation of an `organization policy `__. There can be more than one organization policy with @@ -195,7 +197,7 @@ class Asset(proto.Message): `this topic `__ for more information. - ancestors (Sequence[str]): + ancestors (MutableSequence[str]): The ancestry path of an asset in Google Cloud `resource hierarchy `__, represented as a list of relative resource names. An @@ -208,58 +210,58 @@ class Asset(proto.Message): ``["projects/123456789", "folders/5432", "organizations/1234"]`` """ - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, message=timestamp_pb2.Timestamp, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - asset_type = proto.Field( + asset_type: str = proto.Field( proto.STRING, number=2, ) - resource = proto.Field( + resource: 'Resource' = proto.Field( proto.MESSAGE, number=3, message='Resource', ) - iam_policy = proto.Field( + iam_policy: policy_pb2.Policy = proto.Field( proto.MESSAGE, number=4, message=policy_pb2.Policy, ) - org_policy = proto.RepeatedField( + org_policy: MutableSequence[orgpolicy_pb2.Policy] = proto.RepeatedField( proto.MESSAGE, number=6, message=orgpolicy_pb2.Policy, ) - access_policy = proto.Field( + access_policy: access_policy_pb2.AccessPolicy = proto.Field( proto.MESSAGE, number=7, oneof='access_context_policy', message=access_policy_pb2.AccessPolicy, ) - access_level = proto.Field( + access_level: access_level_pb2.AccessLevel = proto.Field( proto.MESSAGE, number=8, oneof='access_context_policy', message=access_level_pb2.AccessLevel, ) - service_perimeter = proto.Field( + service_perimeter: service_perimeter_pb2.ServicePerimeter = proto.Field( proto.MESSAGE, number=9, oneof='access_context_policy', message=service_perimeter_pb2.ServicePerimeter, ) - os_inventory = proto.Field( + os_inventory: inventory_pb2.Inventory = proto.Field( proto.MESSAGE, number=12, message=inventory_pb2.Inventory, ) - ancestors = proto.RepeatedField( + ancestors: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=10, ) @@ -314,32 +316,32 @@ class Resource(proto.Message): https://cloud.google.com/about/locations/. """ - version = proto.Field( + version: str = proto.Field( proto.STRING, number=1, ) - discovery_document_uri = proto.Field( + discovery_document_uri: str = proto.Field( proto.STRING, number=2, ) - discovery_name = proto.Field( + discovery_name: str = proto.Field( proto.STRING, number=3, ) - resource_url = proto.Field( + resource_url: str = proto.Field( proto.STRING, number=4, ) - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=5, ) - data = proto.Field( + data: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=6, message=struct_pb2.Struct, ) - location = proto.Field( + location: str = proto.Field( proto.STRING, number=8, ) @@ -379,7 +381,7 @@ class ResourceSearchResult(proto.Message): - use a free text query. Example: ``12345`` - specify the ``scope`` field as this project in your search request. - folders (Sequence[str]): + folders (MutableSequence[str]): The folder(s) that this resource belongs to, in the form of folders/{FOLDER_NUMBER}. This field is available when the resource belongs to one or more folders. @@ -428,7 +430,7 @@ class ResourceSearchResult(proto.Message): - use a field query. Example: ``location:us-west*`` - use a free text query. Example: ``us-west*`` - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Labels associated with this resource. See `Labelling and grouping GCP resources `__ @@ -446,7 +448,7 @@ class ResourceSearchResult(proto.Message): ``labels.env:*`` - use a free text query. Example: ``prod`` - network_tags (Sequence[str]): + network_tags (MutableSequence[str]): Network tags associated with this resource. Like labels, network tags are a type of annotations used to group GCP resources. See `Labelling GCP @@ -564,75 +566,75 @@ class ResourceSearchResult(proto.Message): ``cloudresourcemanager.googleapis.com/Project`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - asset_type = proto.Field( + asset_type: str = proto.Field( proto.STRING, number=2, ) - project = proto.Field( + project: str = proto.Field( proto.STRING, number=3, ) - folders = proto.RepeatedField( + folders: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=17, ) - organization = proto.Field( + organization: str = proto.Field( proto.STRING, number=18, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=4, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=5, ) - location = proto.Field( + location: str = proto.Field( proto.STRING, number=6, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=7, ) - network_tags = proto.RepeatedField( + network_tags: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=8, ) - kms_key = proto.Field( + kms_key: str = proto.Field( proto.STRING, number=10, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=12, message=timestamp_pb2.Timestamp, ) - state = proto.Field( + state: str = proto.Field( proto.STRING, number=13, ) - additional_attributes = proto.Field( + additional_attributes: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=9, message=struct_pb2.Struct, ) - parent_full_resource_name = proto.Field( + parent_full_resource_name: str = proto.Field( proto.STRING, number=19, ) - parent_asset_type = proto.Field( + parent_asset_type: str = proto.Field( proto.STRING, number=103, ) @@ -674,7 +676,7 @@ class IamPolicySearchResult(proto.Message): - specify the ``scope`` field as this project in your search request. - folders (Sequence[str]): + folders (MutableSequence[str]): The folder(s) that the IAM policy belongs to, in the form of folders/{FOLDER_NUMBER}. This field is available when the IAM policy belongs to one or more folders. @@ -724,7 +726,7 @@ class Explanation(proto.Message): r"""Explanation about the IAM policy search result. Attributes: - matched_permissions (Mapping[str, google.cloud.asset_v1.types.IamPolicySearchResult.Explanation.Permissions]): + matched_permissions (MutableMapping[str, google.cloud.asset_v1.types.IamPolicySearchResult.Explanation.Permissions]): The map from roles to their included permissions that match the permission query (i.e., a query containing ``policy.role.permissions:``). Example: if query @@ -741,49 +743,49 @@ class Permissions(proto.Message): r"""IAM permissions Attributes: - permissions (Sequence[str]): + permissions (MutableSequence[str]): A list of permissions. A sample permission string: ``compute.disk.get``. """ - permissions = proto.RepeatedField( + permissions: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - matched_permissions = proto.MapField( + matched_permissions: MutableMapping[str, 'IamPolicySearchResult.Explanation.Permissions'] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, message='IamPolicySearchResult.Explanation.Permissions', ) - resource = proto.Field( + resource: str = proto.Field( proto.STRING, number=1, ) - asset_type = proto.Field( + asset_type: str = proto.Field( proto.STRING, number=5, ) - project = proto.Field( + project: str = proto.Field( proto.STRING, number=2, ) - folders = proto.RepeatedField( + folders: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=6, ) - organization = proto.Field( + organization: str = proto.Field( proto.STRING, number=7, ) - policy = proto.Field( + policy: policy_pb2.Policy = proto.Field( proto.MESSAGE, number=3, message=policy_pb2.Policy, ) - explanation = proto.Field( + explanation: Explanation = proto.Field( proto.MESSAGE, number=4, message=Explanation, @@ -810,12 +812,12 @@ class IamPolicyAnalysisState(proto.Message): of failure. """ - code = proto.Field( + code: code_pb2.Code = proto.Field( proto.ENUM, number=1, enum=code_pb2.Code, ) - cause = proto.Field( + cause: str = proto.Field( proto.STRING, number=2, ) @@ -835,7 +837,7 @@ class EvaluationValue(proto.Enum): FALSE = 2 CONDITIONAL = 3 - evaluation_value = proto.Field( + evaluation_value: EvaluationValue = proto.Field( proto.ENUM, number=1, enum=EvaluationValue, @@ -855,7 +857,7 @@ class IamPolicyAnalysisResult(proto.Message): policy attaches. iam_binding (google.iam.v1.policy_pb2.Binding): The Cloud IAM policy binding under analysis. - access_control_lists (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.AccessControlList]): + access_control_lists (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.AccessControlList]): The access control lists derived from the [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] that match or potentially match resource and access @@ -882,11 +884,11 @@ class Resource(proto.Message): The analysis state of this resource. """ - full_resource_name = proto.Field( + full_resource_name: str = proto.Field( proto.STRING, number=1, ) - analysis_state = proto.Field( + analysis_state: 'IamPolicyAnalysisState' = proto.Field( proto.MESSAGE, number=2, message='IamPolicyAnalysisState', @@ -915,17 +917,17 @@ class Access(proto.Message): The analysis state of this access. """ - role = proto.Field( + role: str = proto.Field( proto.STRING, number=1, oneof='oneof_access', ) - permission = proto.Field( + permission: str = proto.Field( proto.STRING, number=2, oneof='oneof_access', ) - analysis_state = proto.Field( + analysis_state: 'IamPolicyAnalysisState' = proto.Field( proto.MESSAGE, number=3, message='IamPolicyAnalysisState', @@ -952,11 +954,11 @@ class Identity(proto.Message): The analysis state of this identity. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - analysis_state = proto.Field( + analysis_state: 'IamPolicyAnalysisState' = proto.Field( proto.MESSAGE, number=2, message='IamPolicyAnalysisState', @@ -976,11 +978,11 @@ class Edge(proto.Message): node or an email of an identity. """ - source_node = proto.Field( + source_node: str = proto.Field( proto.STRING, number=1, ) - target_node = proto.Field( + target_node: str = proto.Field( proto.STRING, number=2, ) @@ -1006,19 +1008,19 @@ class AccessControlList(proto.Message): - AccessControlList 2: [R2, R3], [P3] Attributes: - resources (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Resource]): + resources (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Resource]): The resources that match one of the following conditions: - The resource_selector, if it is specified in request; - Otherwise, resources reachable from the policy attached resource. - accesses (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Access]): + accesses (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Access]): The accesses that match one of the following conditions: - The access_selector, if it is specified in request; - Otherwise, access specifiers reachable from the policy binding's role. - resource_edges (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): + resource_edges (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): Resource edges of the graph starting from the policy attached resource to any descendant resources. The [Edge.source_node][google.cloud.asset.v1.IamPolicyAnalysisResult.Edge.source_node] @@ -1033,22 +1035,22 @@ class AccessControlList(proto.Message): defined in the above IAM policy binding. """ - resources = proto.RepeatedField( + resources: MutableSequence['IamPolicyAnalysisResult.Resource'] = proto.RepeatedField( proto.MESSAGE, number=1, message='IamPolicyAnalysisResult.Resource', ) - accesses = proto.RepeatedField( + accesses: MutableSequence['IamPolicyAnalysisResult.Access'] = proto.RepeatedField( proto.MESSAGE, number=2, message='IamPolicyAnalysisResult.Access', ) - resource_edges = proto.RepeatedField( + resource_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( proto.MESSAGE, number=3, message='IamPolicyAnalysisResult.Edge', ) - condition_evaluation = proto.Field( + condition_evaluation: 'ConditionEvaluation' = proto.Field( proto.MESSAGE, number=4, message='ConditionEvaluation', @@ -1058,14 +1060,14 @@ class IdentityList(proto.Message): r"""The identities and group edges. Attributes: - identities (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Identity]): + identities (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Identity]): Only the identities that match one of the following conditions will be presented: - The identity_selector, if it is specified in request; - Otherwise, identities reachable from the policy binding's members. - group_edges (Sequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): + group_edges (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): Group identity edges of the graph starting from the binding's group members to any node of the [identities][google.cloud.asset.v1.IamPolicyAnalysisResult.IdentityList.identities]. @@ -1079,37 +1081,37 @@ class IdentityList(proto.Message): enabled in request. """ - identities = proto.RepeatedField( + identities: MutableSequence['IamPolicyAnalysisResult.Identity'] = proto.RepeatedField( proto.MESSAGE, number=1, message='IamPolicyAnalysisResult.Identity', ) - group_edges = proto.RepeatedField( + group_edges: MutableSequence['IamPolicyAnalysisResult.Edge'] = proto.RepeatedField( proto.MESSAGE, number=2, message='IamPolicyAnalysisResult.Edge', ) - attached_resource_full_name = proto.Field( + attached_resource_full_name: str = proto.Field( proto.STRING, number=1, ) - iam_binding = proto.Field( + iam_binding: policy_pb2.Binding = proto.Field( proto.MESSAGE, number=2, message=policy_pb2.Binding, ) - access_control_lists = proto.RepeatedField( + access_control_lists: MutableSequence[AccessControlList] = proto.RepeatedField( proto.MESSAGE, number=3, message=AccessControlList, ) - identity_list = proto.Field( + identity_list: IdentityList = proto.Field( proto.MESSAGE, number=4, message=IdentityList, ) - fully_explored = proto.Field( + fully_explored: bool = proto.Field( proto.BOOL, number=5, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index ef7a24c377d6..5db3da586776 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -1623,7 +1623,7 @@ }, { "name": "asset_types", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "retry", @@ -1711,7 +1711,7 @@ }, { "name": "asset_types", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "retry", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 28f232336332..05f61eb26986 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -149,9 +149,9 @@ def transport(self) -> IAMCredentialsTransport: get_transport_class = functools.partial(type(IAMCredentialsClient).get_transport_class, type(IAMCredentialsClient)) def __init__(self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, IAMCredentialsTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the iam credentials client. @@ -195,14 +195,14 @@ def __init__(self, *, ) async def generate_access_token(self, - request: Union[common.GenerateAccessTokenRequest, dict] = None, + request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, *, - name: str = None, - delegates: Sequence[str] = None, - scope: Sequence[str] = None, - lifetime: duration_pb2.Duration = None, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + scope: Optional[MutableSequence[str]] = None, + lifetime: Optional[duration_pb2.Duration] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service @@ -236,7 +236,7 @@ async def sample_generate_access_token(): print(response) Args: - request (Union[google.iam.credentials_v1.types.GenerateAccessTokenRequest, dict]): + request (Optional[Union[google.iam.credentials_v1.types.GenerateAccessTokenRequest, dict]]): The request object. name (:class:`str`): Required. The resource name of the service account for @@ -249,7 +249,7 @@ async def sample_generate_access_token(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - delegates (:class:`Sequence[str]`): + delegates (:class:`MutableSequence[str]`): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its @@ -267,7 +267,7 @@ async def sample_generate_access_token(): This corresponds to the ``delegates`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - scope (:class:`Sequence[str]`): + scope (:class:`MutableSequence[str]`): Required. Code to identify the scopes to be included in the OAuth 2.0 access token. See @@ -355,14 +355,14 @@ async def sample_generate_access_token(): return response async def generate_id_token(self, - request: Union[common.GenerateIdTokenRequest, dict] = None, + request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, *, - name: str = None, - delegates: Sequence[str] = None, - audience: str = None, - include_email: bool = None, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + audience: Optional[str] = None, + include_email: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service @@ -396,7 +396,7 @@ async def sample_generate_id_token(): print(response) Args: - request (Union[google.iam.credentials_v1.types.GenerateIdTokenRequest, dict]): + request (Optional[Union[google.iam.credentials_v1.types.GenerateIdTokenRequest, dict]]): The request object. name (:class:`str`): Required. The resource name of the service account for @@ -409,7 +409,7 @@ async def sample_generate_id_token(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - delegates (:class:`Sequence[str]`): + delegates (:class:`MutableSequence[str]`): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its @@ -509,13 +509,13 @@ async def sample_generate_id_token(): return response async def sign_blob(self, - request: Union[common.SignBlobRequest, dict] = None, + request: Optional[Union[common.SignBlobRequest, dict]] = None, *, - name: str = None, - delegates: Sequence[str] = None, - payload: bytes = None, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed @@ -549,7 +549,7 @@ async def sample_sign_blob(): print(response) Args: - request (Union[google.iam.credentials_v1.types.SignBlobRequest, dict]): + request (Optional[Union[google.iam.credentials_v1.types.SignBlobRequest, dict]]): The request object. name (:class:`str`): Required. The resource name of the service account for @@ -562,7 +562,7 @@ async def sample_sign_blob(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - delegates (:class:`Sequence[str]`): + delegates (:class:`MutableSequence[str]`): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its @@ -649,13 +649,13 @@ async def sample_sign_blob(): return response async def sign_jwt(self, - request: Union[common.SignJwtRequest, dict] = None, + request: Optional[Union[common.SignJwtRequest, dict]] = None, *, - name: str = None, - delegates: Sequence[str] = None, - payload: str = None, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed @@ -689,7 +689,7 @@ async def sample_sign_jwt(): print(response) Args: - request (Union[google.iam.credentials_v1.types.SignJwtRequest, dict]): + request (Optional[Union[google.iam.credentials_v1.types.SignJwtRequest, dict]]): The request object. name (:class:`str`): Required. The resource name of the service account for @@ -702,7 +702,7 @@ async def sample_sign_jwt(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - delegates (:class:`Sequence[str]`): + delegates (:class:`MutableSequence[str]`): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 9e88f41d2fe0..bcf6585a0e23 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -56,7 +56,7 @@ class IAMCredentialsClientMeta(type): _transport_registry["rest"] = IAMCredentialsRestTransport def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[IAMCredentialsTransport]: """Returns an appropriate transport class. @@ -299,7 +299,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, IAMCredentialsTransport, None] = None, + transport: Optional[Union[str, IAMCredentialsTransport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -389,14 +389,14 @@ def __init__(self, *, ) def generate_access_token(self, - request: Union[common.GenerateAccessTokenRequest, dict] = None, + request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, *, - name: str = None, - delegates: Sequence[str] = None, - scope: Sequence[str] = None, - lifetime: duration_pb2.Duration = None, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + scope: Optional[MutableSequence[str]] = None, + lifetime: Optional[duration_pb2.Duration] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service @@ -443,7 +443,7 @@ def sample_generate_access_token(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - delegates (Sequence[str]): + delegates (MutableSequence[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its @@ -461,7 +461,7 @@ def sample_generate_access_token(): This corresponds to the ``delegates`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - scope (Sequence[str]): + scope (MutableSequence[str]): Required. Code to identify the scopes to be included in the OAuth 2.0 access token. See @@ -542,14 +542,14 @@ def sample_generate_access_token(): return response def generate_id_token(self, - request: Union[common.GenerateIdTokenRequest, dict] = None, + request: Optional[Union[common.GenerateIdTokenRequest, dict]] = None, *, - name: str = None, - delegates: Sequence[str] = None, - audience: str = None, - include_email: bool = None, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + audience: Optional[str] = None, + include_email: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service @@ -596,7 +596,7 @@ def sample_generate_id_token(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - delegates (Sequence[str]): + delegates (MutableSequence[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its @@ -689,13 +689,13 @@ def sample_generate_id_token(): return response def sign_blob(self, - request: Union[common.SignBlobRequest, dict] = None, + request: Optional[Union[common.SignBlobRequest, dict]] = None, *, - name: str = None, - delegates: Sequence[str] = None, - payload: bytes = None, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed @@ -742,7 +742,7 @@ def sample_sign_blob(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - delegates (Sequence[str]): + delegates (MutableSequence[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its @@ -822,13 +822,13 @@ def sample_sign_blob(): return response def sign_jwt(self, - request: Union[common.SignJwtRequest, dict] = None, + request: Optional[Union[common.SignJwtRequest, dict]] = None, *, - name: str = None, - delegates: Sequence[str] = None, - payload: str = None, + name: Optional[str] = None, + delegates: Optional[MutableSequence[str]] = None, + payload: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed @@ -875,7 +875,7 @@ def sample_sign_jwt(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - delegates (Sequence[str]): + delegates (MutableSequence[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 93b227c32e30..eadf66db9aa6 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -48,7 +48,7 @@ class IAMCredentialsTransport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index a3e8efecdf32..c6f3288e9134 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -53,14 +53,14 @@ class IAMCredentialsGrpcTransport(IAMCredentialsTransport): def __init__(self, *, host: str = 'iamcredentials.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -186,8 +186,8 @@ def __init__(self, *, @classmethod def create_channel(cls, host: str = 'iamcredentials.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index a99e2df95dae..524245c8c1d0 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -57,7 +57,7 @@ class IAMCredentialsGrpcAsyncIOTransport(IAMCredentialsTransport): @classmethod def create_channel(cls, host: str = 'iamcredentials.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -97,15 +97,15 @@ def create_channel(cls, def __init__(self, *, host: str = 'iamcredentials.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index e99175ac4f93..7bc75d5d65da 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -199,15 +199,15 @@ class IAMCredentialsRestTransport(IAMCredentialsTransport): def __init__(self, *, host: str = 'iamcredentials.googleapis.com', - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[IAMCredentialsRestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: @@ -286,9 +286,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: common.GenerateAccessTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateAccessTokenResponse: r"""Call the generate access token method over HTTP. @@ -371,9 +371,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: common.GenerateIdTokenRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateIdTokenResponse: r"""Call the generate id token method over HTTP. @@ -456,9 +456,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: common.SignBlobRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignBlobResponse: r"""Call the sign blob method over HTTP. @@ -541,9 +541,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: common.SignJwtRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignJwtResponse: r"""Call the sign jwt method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index a14b7573504b..e7c9076d39bd 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -44,7 +46,7 @@ class GenerateAccessTokenRequest(proto.Message): ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. The ``-`` wildcard character is required; replacing it with a project ID is invalid. - delegates (Sequence[str]): + delegates (MutableSequence[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next @@ -58,7 +60,7 @@ class GenerateAccessTokenRequest(proto.Message): ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. The ``-`` wildcard character is required; replacing it with a project ID is invalid. - scope (Sequence[str]): + scope (MutableSequence[str]): Required. Code to identify the scopes to be included in the OAuth 2.0 access token. See https://developers.google.com/identity/protocols/googlescopes @@ -72,19 +74,19 @@ class GenerateAccessTokenRequest(proto.Message): to a default value of one hour. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - delegates = proto.RepeatedField( + delegates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - scope = proto.RepeatedField( + scope: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=4, ) - lifetime = proto.Field( + lifetime: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=7, message=duration_pb2.Duration, @@ -102,11 +104,11 @@ class GenerateAccessTokenResponse(proto.Message): The expiration time is always set. """ - access_token = proto.Field( + access_token: str = proto.Field( proto.STRING, number=1, ) - expire_time = proto.Field( + expire_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, @@ -123,7 +125,7 @@ class SignBlobRequest(proto.Message): ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. The ``-`` wildcard character is required; replacing it with a project ID is invalid. - delegates (Sequence[str]): + delegates (MutableSequence[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next @@ -141,15 +143,15 @@ class SignBlobRequest(proto.Message): Required. The bytes to sign. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - delegates = proto.RepeatedField( + delegates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - payload = proto.Field( + payload: bytes = proto.Field( proto.BYTES, number=5, ) @@ -165,11 +167,11 @@ class SignBlobResponse(proto.Message): The signed blob. """ - key_id = proto.Field( + key_id: str = proto.Field( proto.STRING, number=1, ) - signed_blob = proto.Field( + signed_blob: bytes = proto.Field( proto.BYTES, number=4, ) @@ -185,7 +187,7 @@ class SignJwtRequest(proto.Message): ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. The ``-`` wildcard character is required; replacing it with a project ID is invalid. - delegates (Sequence[str]): + delegates (MutableSequence[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next @@ -204,15 +206,15 @@ class SignJwtRequest(proto.Message): object that contains a JWT Claims Set. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - delegates = proto.RepeatedField( + delegates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - payload = proto.Field( + payload: str = proto.Field( proto.STRING, number=5, ) @@ -228,11 +230,11 @@ class SignJwtResponse(proto.Message): The signed JWT. """ - key_id = proto.Field( + key_id: str = proto.Field( proto.STRING, number=1, ) - signed_jwt = proto.Field( + signed_jwt: str = proto.Field( proto.STRING, number=2, ) @@ -248,7 +250,7 @@ class GenerateIdTokenRequest(proto.Message): ``projects/-/serviceAccounts/{ACCOUNT_EMAIL_OR_UNIQUEID}``. The ``-`` wildcard character is required; replacing it with a project ID is invalid. - delegates (Sequence[str]): + delegates (MutableSequence[str]): The sequence of service accounts in a delegation chain. Each service account must be granted the ``roles/iam.serviceAccountTokenCreator`` role on its next @@ -272,19 +274,19 @@ class GenerateIdTokenRequest(proto.Message): ``email_verified`` claims. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - delegates = proto.RepeatedField( + delegates: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - audience = proto.Field( + audience: str = proto.Field( proto.STRING, number=3, ) - include_email = proto.Field( + include_email: bool = proto.Field( proto.BOOL, number=4, ) @@ -298,7 +300,7 @@ class GenerateIdTokenResponse(proto.Message): The OpenId Connect ID token. """ - token = proto.Field( + token: str = proto.Field( proto.STRING, number=1, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json index 8f25e0e91799..317a7f4a2dab 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json @@ -39,11 +39,11 @@ }, { "name": "delegates", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "scope", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "lifetime", @@ -131,11 +131,11 @@ }, { "name": "delegates", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "scope", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "lifetime", @@ -224,7 +224,7 @@ }, { "name": "delegates", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "audience", @@ -316,7 +316,7 @@ }, { "name": "delegates", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "audience", @@ -409,7 +409,7 @@ }, { "name": "delegates", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "payload", @@ -497,7 +497,7 @@ }, { "name": "delegates", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "payload", @@ -586,7 +586,7 @@ }, { "name": "delegates", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "payload", @@ -674,7 +674,7 @@ }, { "name": "delegates", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "payload", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 2d6f5252e85e..d799051df108 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -151,9 +151,9 @@ def transport(self) -> EventarcTransport: get_transport_class = functools.partial(type(EventarcClient).get_transport_class, type(EventarcClient)) def __init__(self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, EventarcTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the eventarc client. @@ -197,11 +197,11 @@ def __init__(self, *, ) async def get_trigger(self, - request: Union[eventarc.GetTriggerRequest, dict] = None, + request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> trigger.Trigger: r"""Get a single trigger. @@ -233,7 +233,7 @@ async def sample_get_trigger(): print(response) Args: - request (Union[google.cloud.eventarc_v1.types.GetTriggerRequest, dict]): + request (Optional[Union[google.cloud.eventarc_v1.types.GetTriggerRequest, dict]]): The request object. The request message for the GetTrigger method. name (:class:`str`): @@ -298,11 +298,11 @@ async def sample_get_trigger(): return response async def list_triggers(self, - request: Union[eventarc.ListTriggersRequest, dict] = None, + request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTriggersAsyncPager: r"""List triggers. @@ -335,7 +335,7 @@ async def sample_list_triggers(): print(response) Args: - request (Union[google.cloud.eventarc_v1.types.ListTriggersRequest, dict]): + request (Optional[Union[google.cloud.eventarc_v1.types.ListTriggersRequest, dict]]): The request object. The request message for the ListTriggers method. parent (:class:`str`): @@ -412,13 +412,13 @@ async def sample_list_triggers(): return response async def create_trigger(self, - request: Union[eventarc.CreateTriggerRequest, dict] = None, + request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, *, - parent: str = None, - trigger: gce_trigger.Trigger = None, - trigger_id: str = None, + parent: Optional[str] = None, + trigger: Optional[gce_trigger.Trigger] = None, + trigger_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Create a new trigger in a particular project and @@ -465,7 +465,7 @@ async def sample_create_trigger(): print(response) Args: - request (Union[google.cloud.eventarc_v1.types.CreateTriggerRequest, dict]): + request (Optional[Union[google.cloud.eventarc_v1.types.CreateTriggerRequest, dict]]): The request object. The request message for the CreateTrigger method. parent (:class:`str`): @@ -557,13 +557,13 @@ async def sample_create_trigger(): return response async def update_trigger(self, - request: Union[eventarc.UpdateTriggerRequest, dict] = None, + request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, *, - trigger: gce_trigger.Trigger = None, - update_mask: field_mask_pb2.FieldMask = None, - allow_missing: bool = None, + trigger: Optional[gce_trigger.Trigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Update a single trigger. @@ -599,7 +599,7 @@ async def sample_update_trigger(): print(response) Args: - request (Union[google.cloud.eventarc_v1.types.UpdateTriggerRequest, dict]): + request (Optional[Union[google.cloud.eventarc_v1.types.UpdateTriggerRequest, dict]]): The request object. The request message for the UpdateTrigger method. trigger (:class:`google.cloud.eventarc_v1.types.Trigger`): @@ -694,12 +694,12 @@ async def sample_update_trigger(): return response async def delete_trigger(self, - request: Union[eventarc.DeleteTriggerRequest, dict] = None, + request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, *, - name: str = None, - allow_missing: bool = None, + name: Optional[str] = None, + allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Delete a single trigger. @@ -736,7 +736,7 @@ async def sample_delete_trigger(): print(response) Args: - request (Union[google.cloud.eventarc_v1.types.DeleteTriggerRequest, dict]): + request (Optional[Union[google.cloud.eventarc_v1.types.DeleteTriggerRequest, dict]]): The request object. The request message for the DeleteTrigger method. name (:class:`str`): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 5a2bb9949b1a..b6659bbfce6f 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -61,7 +61,7 @@ class EventarcClientMeta(type): _transport_registry["rest"] = EventarcRestTransport def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[EventarcTransport]: """Returns an appropriate transport class. @@ -319,7 +319,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, EventarcTransport, None] = None, + transport: Optional[Union[str, EventarcTransport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -409,11 +409,11 @@ def __init__(self, *, ) def get_trigger(self, - request: Union[eventarc.GetTriggerRequest, dict] = None, + request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> trigger.Trigger: r"""Get a single trigger. @@ -510,11 +510,11 @@ def sample_get_trigger(): return response def list_triggers(self, - request: Union[eventarc.ListTriggersRequest, dict] = None, + request: Optional[Union[eventarc.ListTriggersRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTriggersPager: r"""List triggers. @@ -624,13 +624,13 @@ def sample_list_triggers(): return response def create_trigger(self, - request: Union[eventarc.CreateTriggerRequest, dict] = None, + request: Optional[Union[eventarc.CreateTriggerRequest, dict]] = None, *, - parent: str = None, - trigger: gce_trigger.Trigger = None, - trigger_id: str = None, + parent: Optional[str] = None, + trigger: Optional[gce_trigger.Trigger] = None, + trigger_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Create a new trigger in a particular project and @@ -769,13 +769,13 @@ def sample_create_trigger(): return response def update_trigger(self, - request: Union[eventarc.UpdateTriggerRequest, dict] = None, + request: Optional[Union[eventarc.UpdateTriggerRequest, dict]] = None, *, - trigger: gce_trigger.Trigger = None, - update_mask: field_mask_pb2.FieldMask = None, - allow_missing: bool = None, + trigger: Optional[gce_trigger.Trigger] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Update a single trigger. @@ -906,12 +906,12 @@ def sample_update_trigger(): return response def delete_trigger(self, - request: Union[eventarc.DeleteTriggerRequest, dict] = None, + request: Optional[Union[eventarc.DeleteTriggerRequest, dict]] = None, *, - name: str = None, - allow_missing: bool = None, + name: Optional[str] = None, + allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Delete a single trigger. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index d15e1f50a8e4..808dc4f3b5ee 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -51,7 +51,7 @@ class EventarcTransport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 73d6ca9fbf68..fd8dae846b57 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -49,14 +49,14 @@ class EventarcGrpcTransport(EventarcTransport): def __init__(self, *, host: str = 'eventarc.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -183,8 +183,8 @@ def __init__(self, *, @classmethod def create_channel(cls, host: str = 'eventarc.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 780b937d8037..20982c46e3b8 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -53,7 +53,7 @@ class EventarcGrpcAsyncIOTransport(EventarcTransport): @classmethod def create_channel(cls, host: str = 'eventarc.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -93,15 +93,15 @@ def create_channel(cls, def __init__(self, *, host: str = 'eventarc.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index edceb0477687..96bd6f82df74 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -218,15 +218,15 @@ class EventarcRestTransport(EventarcTransport): def __init__(self, *, host: str = 'eventarc.googleapis.com', - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[EventarcRestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: @@ -330,9 +330,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.CreateTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the create trigger method over HTTP. @@ -418,9 +418,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.DeleteTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the delete trigger method over HTTP. @@ -497,9 +497,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.GetTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> trigger.Trigger: r"""Call the get trigger method over HTTP. @@ -577,9 +577,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.ListTriggersRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> eventarc.ListTriggersResponse: r"""Call the list triggers method over HTTP. @@ -657,9 +657,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.UpdateTriggerRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the update trigger method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index 657966bc72b0..9a1cc43bb4a4 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.eventarc_v1.types import trigger as gce_trigger @@ -42,7 +44,7 @@ class GetTriggerRequest(proto.Message): Required. The name of the trigger to get. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -74,19 +76,19 @@ class ListTriggersRequest(proto.Message): ``name desc, trigger_id``. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=4, ) @@ -96,14 +98,14 @@ class ListTriggersResponse(proto.Message): r"""The response message for the ListTriggers method. Attributes: - triggers (Sequence[google.cloud.eventarc_v1.types.Trigger]): + triggers (MutableSequence[google.cloud.eventarc_v1.types.Trigger]): The requested triggers, up to the number specified in ``page_size``. next_page_token (str): A page token that can be sent to ListTriggers to request the next page. If this is empty, then there are no more pages. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Unreachable resources, if any. """ @@ -111,16 +113,16 @@ class ListTriggersResponse(proto.Message): def raw_page(self): return self - triggers = proto.RepeatedField( + triggers: MutableSequence[gce_trigger.Trigger] = proto.RepeatedField( proto.MESSAGE, number=1, message=gce_trigger.Trigger, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -143,20 +145,20 @@ class CreateTriggerRequest(proto.Message): preview the review, but do not actually post it. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - trigger = proto.Field( + trigger: gce_trigger.Trigger = proto.Field( proto.MESSAGE, number=2, message=gce_trigger.Trigger, ) - trigger_id = proto.Field( + trigger_id: str = proto.Field( proto.STRING, number=3, ) - validate_only = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, number=4, ) @@ -182,21 +184,21 @@ class UpdateTriggerRequest(proto.Message): preview the review, but do not actually post it. """ - trigger = proto.Field( + trigger: gce_trigger.Trigger = proto.Field( proto.MESSAGE, number=1, message=gce_trigger.Trigger, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, ) - allow_missing = proto.Field( + allow_missing: bool = proto.Field( proto.BOOL, number=3, ) - validate_only = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, number=4, ) @@ -222,19 +224,19 @@ class DeleteTriggerRequest(proto.Message): preview the review, but do not actually post it. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - etag = proto.Field( + etag: str = proto.Field( proto.STRING, number=2, ) - allow_missing = proto.Field( + allow_missing: bool = proto.Field( proto.BOOL, number=3, ) - validate_only = proto.Field( + validate_only: bool = proto.Field( proto.BOOL, number=4, ) @@ -270,33 +272,33 @@ class OperationMetadata(proto.Message): operation. """ - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - target = proto.Field( + target: str = proto.Field( proto.STRING, number=3, ) - verb = proto.Field( + verb: str = proto.Field( proto.STRING, number=4, ) - status_message = proto.Field( + status_message: str = proto.Field( proto.STRING, number=5, ) - requested_cancellation = proto.Field( + requested_cancellation: bool = proto.Field( proto.BOOL, number=6, ) - api_version = proto.Field( + api_version: str = proto.Field( proto.STRING, number=7, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index e5db1c8227ef..77b1cb69c83d 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -49,7 +51,7 @@ class Trigger(proto.Message): Output only. The creation time. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last-modified time. - event_filters (Sequence[google.cloud.eventarc_v1.types.EventFilter]): + event_filters (MutableSequence[google.cloud.eventarc_v1.types.EventFilter]): Required. null The list of filters that applies to event attributes. Only events that match all the provided filters will be sent to @@ -81,7 +83,7 @@ class Trigger(proto.Message): intermediary. This field contains a reference to that transport intermediary. This information can be used for debugging purposes. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. User labels attached to the triggers that can be used to group resources. etag (str): @@ -92,49 +94,49 @@ class Trigger(proto.Message): proceeding. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - uid = proto.Field( + uid: str = proto.Field( proto.STRING, number=2, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, ) - event_filters = proto.RepeatedField( + event_filters: MutableSequence['EventFilter'] = proto.RepeatedField( proto.MESSAGE, number=8, message='EventFilter', ) - service_account = proto.Field( + service_account: str = proto.Field( proto.STRING, number=9, ) - destination = proto.Field( + destination: 'Destination' = proto.Field( proto.MESSAGE, number=10, message='Destination', ) - transport = proto.Field( + transport: 'Transport' = proto.Field( proto.MESSAGE, number=11, message='Transport', ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=12, ) - etag = proto.Field( + etag: str = proto.Field( proto.STRING, number=99, ) @@ -155,11 +157,11 @@ class EventFilter(proto.Message): Required. The value for the attribute. """ - attribute = proto.Field( + attribute: str = proto.Field( proto.STRING, number=1, ) - value = proto.Field( + value: str = proto.Field( proto.STRING, number=2, ) @@ -179,7 +181,7 @@ class Destination(proto.Message): This field is a member of `oneof`_ ``descriptor``. """ - cloud_run = proto.Field( + cloud_run: 'CloudRun' = proto.Field( proto.MESSAGE, number=1, oneof='descriptor', @@ -202,7 +204,7 @@ class Transport(proto.Message): This field is a member of `oneof`_ ``intermediary``. """ - pubsub = proto.Field( + pubsub: 'Pubsub' = proto.Field( proto.MESSAGE, number=1, oneof='intermediary', @@ -232,15 +234,15 @@ class CloudRun(proto.Message): deployed in. """ - service = proto.Field( + service: str = proto.Field( proto.STRING, number=1, ) - path = proto.Field( + path: str = proto.Field( proto.STRING, number=2, ) - region = proto.Field( + region: str = proto.Field( proto.STRING, number=3, ) @@ -266,11 +268,11 @@ class Pubsub(proto.Message): ``projects/{PROJECT_ID}/subscriptions/{SUBSCRIPTION_NAME}``. """ - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=1, ) - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=2, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index da931475a8d8..f2e13824a031 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -148,9 +148,9 @@ def transport(self) -> ConfigServiceV2Transport: get_transport_class = functools.partial(type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client)) def __init__(self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the config service v2 client. @@ -194,11 +194,11 @@ def __init__(self, *, ) async def list_buckets(self, - request: Union[logging_config.ListBucketsRequest, dict] = None, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: r"""Lists buckets. @@ -231,7 +231,7 @@ async def sample_list_buckets(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]]): The request object. The parameters to `ListBuckets`. parent (:class:`str`): Required. The parent resource whose buckets are to be @@ -317,10 +317,10 @@ async def sample_list_buckets(): return response async def get_bucket(self, - request: Union[logging_config.GetBucketRequest, dict] = None, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Gets a bucket. @@ -352,7 +352,7 @@ async def sample_get_bucket(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): The request object. The parameters to `GetBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -395,10 +395,10 @@ async def sample_get_bucket(): return response async def create_bucket(self, - request: Union[logging_config.CreateBucketRequest, dict] = None, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Creates a bucket that can be used to store log @@ -433,7 +433,7 @@ async def sample_create_bucket(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to `CreateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -476,10 +476,10 @@ async def sample_create_bucket(): return response async def update_bucket(self, - request: Union[logging_config.UpdateBucketRequest, dict] = None, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Updates a bucket. This method replaces the following fields in @@ -521,7 +521,7 @@ async def sample_update_bucket(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to `UpdateBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -564,10 +564,10 @@ async def sample_update_bucket(): return response async def delete_bucket(self, - request: Union[logging_config.DeleteBucketRequest, dict] = None, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -598,7 +598,7 @@ async def sample_delete_bucket(): await client.delete_bucket(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): The request object. The parameters to `DeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -634,10 +634,10 @@ async def sample_delete_bucket(): ) async def undelete_bucket(self, - request: Union[logging_config.UndeleteBucketRequest, dict] = None, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Undeletes a bucket. A bucket that has been deleted @@ -667,7 +667,7 @@ async def sample_undelete_bucket(): await client.undelete_bucket(request=request) Args: - request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): The request object. The parameters to `UndeleteBucket`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -703,11 +703,11 @@ async def sample_undelete_bucket(): ) async def list_views(self, - request: Union[logging_config.ListViewsRequest, dict] = None, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: r"""Lists views on a bucket. @@ -740,7 +740,7 @@ async def sample_list_views(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListViewsRequest, dict]]): The request object. The parameters to `ListViews`. parent (:class:`str`): Required. The bucket whose views are to be listed: @@ -818,10 +818,10 @@ async def sample_list_views(): return response async def get_view(self, - request: Union[logging_config.GetViewRequest, dict] = None, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Gets a view. @@ -853,7 +853,7 @@ async def sample_get_view(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): The request object. The parameters to `GetView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -898,10 +898,10 @@ async def sample_get_view(): return response async def create_view(self, - request: Union[logging_config.CreateViewRequest, dict] = None, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Creates a view over logs in a bucket. A bucket may @@ -935,7 +935,7 @@ async def sample_create_view(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): The request object. The parameters to `CreateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -980,10 +980,10 @@ async def sample_create_view(): return response async def update_view(self, - request: Union[logging_config.UpdateViewRequest, dict] = None, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Updates a view. This method replaces the following fields in the @@ -1016,7 +1016,7 @@ async def sample_update_view(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): The request object. The parameters to `UpdateView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1061,10 +1061,10 @@ async def sample_update_view(): return response async def delete_view(self, - request: Union[logging_config.DeleteViewRequest, dict] = None, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a view from a bucket. @@ -1093,7 +1093,7 @@ async def sample_delete_view(): await client.delete_view(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): The request object. The parameters to `DeleteView`. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1129,11 +1129,11 @@ async def sample_delete_view(): ) async def list_sinks(self, - request: Union[logging_config.ListSinksRequest, dict] = None, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. @@ -1166,7 +1166,7 @@ async def sample_list_sinks(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListSinksRequest, dict]]): The request object. The parameters to `ListSinks`. parent (:class:`str`): Required. The parent resource whose sinks are to be @@ -1256,11 +1256,11 @@ async def sample_list_sinks(): return response async def get_sink(self, - request: Union[logging_config.GetSinkRequest, dict] = None, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, *, - sink_name: str = None, + sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1292,7 +1292,7 @@ async def sample_get_sink(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetSinkRequest, dict]]): The request object. The parameters to `GetSink`. sink_name (:class:`str`): Required. The resource name of the sink: @@ -1379,12 +1379,12 @@ async def sample_get_sink(): return response async def create_sink(self, - request: Union[logging_config.CreateSinkRequest, dict] = None, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, *, - parent: str = None, - sink: logging_config.LogSink = None, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a @@ -1425,7 +1425,7 @@ async def sample_create_sink(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]]): The request object. The parameters to `CreateSink`. parent (:class:`str`): Required. The resource in which to create the sink: @@ -1514,13 +1514,13 @@ async def sample_create_sink(): return response async def update_sink(self, - request: Union[logging_config.UpdateSinkRequest, dict] = None, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, *, - sink_name: str = None, - sink: logging_config.LogSink = None, - update_mask: field_mask_pb2.FieldMask = None, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the @@ -1562,7 +1562,7 @@ async def sample_update_sink(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]]): The request object. The parameters to `UpdateSink`. sink_name (:class:`str`): Required. The full resource name of the sink to update, @@ -1681,11 +1681,11 @@ async def sample_update_sink(): return response async def delete_sink(self, - request: Union[logging_config.DeleteSinkRequest, dict] = None, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, *, - sink_name: str = None, + sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, @@ -1715,7 +1715,7 @@ async def sample_delete_sink(): await client.delete_sink(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]]): The request object. The parameters to `DeleteSink`. sink_name (:class:`str`): Required. The full resource name of the sink to delete, @@ -1787,11 +1787,11 @@ async def sample_delete_sink(): ) async def list_exclusions(self, - request: Union[logging_config.ListExclusionsRequest, dict] = None, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions in a parent resource. @@ -1824,7 +1824,7 @@ async def sample_list_exclusions(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): The request object. The parameters to `ListExclusions`. parent (:class:`str`): Required. The parent resource whose exclusions are to be @@ -1914,11 +1914,11 @@ async def sample_list_exclusions(): return response async def get_exclusion(self, - request: Union[logging_config.GetExclusionRequest, dict] = None, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. @@ -1950,7 +1950,7 @@ async def sample_get_exclusion(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): The request object. The parameters to `GetExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion: @@ -2040,12 +2040,12 @@ async def sample_get_exclusion(): return response async def create_exclusion(self, - request: Union[logging_config.CreateExclusionRequest, dict] = None, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, *, - parent: str = None, - exclusion: logging_config.LogExclusion = None, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in a specified parent @@ -2085,7 +2085,7 @@ async def sample_create_exclusion(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): The request object. The parameters to `CreateExclusion`. parent (:class:`str`): Required. The parent resource in which to create the @@ -2178,13 +2178,13 @@ async def sample_create_exclusion(): return response async def update_exclusion(self, - request: Union[logging_config.UpdateExclusionRequest, dict] = None, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, *, - name: str = None, - exclusion: logging_config.LogExclusion = None, - update_mask: field_mask_pb2.FieldMask = None, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing @@ -2222,7 +2222,7 @@ async def sample_update_exclusion(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): The request object. The parameters to `UpdateExclusion`. name (:class:`str`): Required. The resource name of the exclusion to update: @@ -2330,11 +2330,11 @@ async def sample_update_exclusion(): return response async def delete_exclusion(self, - request: Union[logging_config.DeleteExclusionRequest, dict] = None, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an exclusion. @@ -2363,7 +2363,7 @@ async def sample_delete_exclusion(): await client.delete_exclusion(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): The request object. The parameters to `DeleteExclusion`. name (:class:`str`): Required. The resource name of an existing exclusion to @@ -2436,10 +2436,10 @@ async def sample_delete_exclusion(): ) async def get_cmek_settings(self, - request: Union[logging_config.GetCmekSettingsRequest, dict] = None, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logs Router CMEK settings for the given resource. @@ -2479,7 +2479,7 @@ async def sample_get_cmek_settings(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. See [Enabling CMEK for Logs @@ -2538,10 +2538,10 @@ async def sample_get_cmek_settings(): return response async def update_cmek_settings(self, - request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Updates the Logs Router CMEK settings for the given resource. @@ -2587,7 +2587,7 @@ async def sample_update_cmek_settings(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. See [Enabling CMEK for Logs diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 8e5b28b9e8fb..92de685b1ac3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -57,7 +57,7 @@ class ConfigServiceV2ClientMeta(type): _transport_registry["rest"] = ConfigServiceV2RestTransport def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[ConfigServiceV2Transport]: """Returns an appropriate transport class. @@ -334,7 +334,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ConfigServiceV2Transport, None] = None, + transport: Optional[Union[str, ConfigServiceV2Transport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -424,11 +424,11 @@ def __init__(self, *, ) def list_buckets(self, - request: Union[logging_config.ListBucketsRequest, dict] = None, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: r"""Lists buckets. @@ -547,10 +547,10 @@ def sample_list_buckets(): return response def get_bucket(self, - request: Union[logging_config.GetBucketRequest, dict] = None, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Gets a bucket. @@ -626,10 +626,10 @@ def sample_get_bucket(): return response def create_bucket(self, - request: Union[logging_config.CreateBucketRequest, dict] = None, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Creates a bucket that can be used to store log @@ -708,10 +708,10 @@ def sample_create_bucket(): return response def update_bucket(self, - request: Union[logging_config.UpdateBucketRequest, dict] = None, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Updates a bucket. This method replaces the following fields in @@ -797,10 +797,10 @@ def sample_update_bucket(): return response def delete_bucket(self, - request: Union[logging_config.DeleteBucketRequest, dict] = None, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -868,10 +868,10 @@ def sample_delete_bucket(): ) def undelete_bucket(self, - request: Union[logging_config.UndeleteBucketRequest, dict] = None, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Undeletes a bucket. A bucket that has been deleted @@ -938,11 +938,11 @@ def sample_undelete_bucket(): ) def list_views(self, - request: Union[logging_config.ListViewsRequest, dict] = None, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: r"""Lists views on a bucket. @@ -1053,10 +1053,10 @@ def sample_list_views(): return response def get_view(self, - request: Union[logging_config.GetViewRequest, dict] = None, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Gets a view. @@ -1134,10 +1134,10 @@ def sample_get_view(): return response def create_view(self, - request: Union[logging_config.CreateViewRequest, dict] = None, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Creates a view over logs in a bucket. A bucket may @@ -1217,10 +1217,10 @@ def sample_create_view(): return response def update_view(self, - request: Union[logging_config.UpdateViewRequest, dict] = None, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Updates a view. This method replaces the following fields in the @@ -1299,10 +1299,10 @@ def sample_update_view(): return response def delete_view(self, - request: Union[logging_config.DeleteViewRequest, dict] = None, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a view from a bucket. @@ -1368,11 +1368,11 @@ def sample_delete_view(): ) def list_sinks(self, - request: Union[logging_config.ListSinksRequest, dict] = None, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksPager: r"""Lists sinks. @@ -1487,11 +1487,11 @@ def sample_list_sinks(): return response def get_sink(self, - request: Union[logging_config.GetSinkRequest, dict] = None, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, *, - sink_name: str = None, + sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1602,12 +1602,12 @@ def sample_get_sink(): return response def create_sink(self, - request: Union[logging_config.CreateSinkRequest, dict] = None, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, *, - parent: str = None, - sink: logging_config.LogSink = None, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a @@ -1737,13 +1737,13 @@ def sample_create_sink(): return response def update_sink(self, - request: Union[logging_config.UpdateSinkRequest, dict] = None, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, *, - sink_name: str = None, - sink: logging_config.LogSink = None, - update_mask: field_mask_pb2.FieldMask = None, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the @@ -1896,11 +1896,11 @@ def sample_update_sink(): return response def delete_sink(self, - request: Union[logging_config.DeleteSinkRequest, dict] = None, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, *, - sink_name: str = None, + sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, @@ -1994,11 +1994,11 @@ def sample_delete_sink(): ) def list_exclusions(self, - request: Union[logging_config.ListExclusionsRequest, dict] = None, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions in a parent resource. @@ -2113,11 +2113,11 @@ def sample_list_exclusions(): return response def get_exclusion(self, - request: Union[logging_config.GetExclusionRequest, dict] = None, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. @@ -2231,12 +2231,12 @@ def sample_get_exclusion(): return response def create_exclusion(self, - request: Union[logging_config.CreateExclusionRequest, dict] = None, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, *, - parent: str = None, - exclusion: logging_config.LogExclusion = None, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in a specified parent @@ -2369,13 +2369,13 @@ def sample_create_exclusion(): return response def update_exclusion(self, - request: Union[logging_config.UpdateExclusionRequest, dict] = None, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, *, - name: str = None, - exclusion: logging_config.LogExclusion = None, - update_mask: field_mask_pb2.FieldMask = None, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing @@ -2521,11 +2521,11 @@ def sample_update_exclusion(): return response def delete_exclusion(self, - request: Union[logging_config.DeleteExclusionRequest, dict] = None, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an exclusion. @@ -2619,10 +2619,10 @@ def sample_delete_exclusion(): ) def get_cmek_settings(self, - request: Union[logging_config.GetCmekSettingsRequest, dict] = None, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logs Router CMEK settings for the given resource. @@ -2722,10 +2722,10 @@ def sample_get_cmek_settings(): return response def update_cmek_settings(self, - request: Union[logging_config.UpdateCmekSettingsRequest, dict] = None, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Updates the Logs Router CMEK settings for the given resource. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index ed139098d841..df6f3e7a1f9b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -52,7 +52,7 @@ class ConfigServiceV2Transport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 9d01152fb660..8b912be15423 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -45,14 +45,14 @@ class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -178,8 +178,8 @@ def __init__(self, *, @classmethod def create_channel(cls, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index dca620f3109f..7808861720ca 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -49,7 +49,7 @@ class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): @classmethod def create_channel(cls, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -89,15 +89,15 @@ def create_channel(cls, def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py index f27ee0360b62..fe03b8b2b9f5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py @@ -573,15 +573,15 @@ class ConfigServiceV2RestTransport(ConfigServiceV2Transport): def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[ConfigServiceV2RestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: @@ -660,9 +660,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.CreateBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Call the create bucket method over HTTP. @@ -765,9 +765,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.CreateExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Call the create exclusion method over HTTP. @@ -881,9 +881,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.CreateSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Call the create sink method over HTTP. @@ -995,9 +995,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.CreateViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Call the create view method over HTTP. @@ -1102,9 +1102,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.DeleteBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ): r"""Call the delete bucket method over HTTP. @@ -1182,9 +1182,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.DeleteExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ): r"""Call the delete exclusion method over HTTP. @@ -1262,9 +1262,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.DeleteSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ): r"""Call the delete sink method over HTTP. @@ -1342,9 +1342,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.DeleteViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ): r"""Call the delete view method over HTTP. @@ -1422,9 +1422,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Call the get bucket method over HTTP. @@ -1514,9 +1514,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetCmekSettingsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Call the get cmek settings method over HTTP. @@ -1612,9 +1612,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Call the get exclusion method over HTTP. @@ -1715,9 +1715,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Call the get sink method over HTTP. @@ -1816,9 +1816,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Call the get view method over HTTP. @@ -1910,9 +1910,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.ListBucketsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.ListBucketsResponse: r"""Call the list buckets method over HTTP. @@ -2002,9 +2002,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.ListExclusionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.ListExclusionsResponse: r"""Call the list exclusions method over HTTP. @@ -2094,9 +2094,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.ListSinksRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.ListSinksResponse: r"""Call the list sinks method over HTTP. @@ -2186,9 +2186,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.ListViewsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.ListViewsResponse: r"""Call the list views method over HTTP. @@ -2278,9 +2278,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UndeleteBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ): r"""Call the undelete bucket method over HTTP. @@ -2371,9 +2371,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Call the update bucket method over HTTP. @@ -2476,9 +2476,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateCmekSettingsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Call the update cmek settings method over HTTP. @@ -2584,9 +2584,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Call the update exclusion method over HTTP. @@ -2700,9 +2700,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Call the update sink method over HTTP. @@ -2834,9 +2834,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Call the update view method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 2f20f04b9a94..1927be3cca3b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -140,9 +140,9 @@ def transport(self) -> LoggingServiceV2Transport: get_transport_class = functools.partial(type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client)) def __init__(self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the logging service v2 client. @@ -186,11 +186,11 @@ def __init__(self, *, ) async def delete_log(self, - request: Union[logging.DeleteLogRequest, dict] = None, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, *, - log_name: str = None, + log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes all the log entries in a log. The log @@ -223,7 +223,7 @@ async def sample_delete_log(): await client.delete_log(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]]): The request object. The parameters to DeleteLog. log_name (:class:`str`): Required. The resource name of the log to delete: @@ -298,14 +298,14 @@ async def sample_delete_log(): ) async def write_log_entries(self, - request: Union[logging.WriteLogEntriesRequest, dict] = None, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, *, - log_name: str = None, - resource: monitored_resource_pb2.MonitoredResource = None, - labels: Mapping[str, str] = None, - entries: Sequence[log_entry.LogEntry] = None, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the @@ -346,7 +346,7 @@ async def sample_write_log_entries(): print(response) Args: - request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]]): The request object. The parameters to WriteLogEntries. log_name (:class:`str`): Optional. A default log resource name that is assigned @@ -392,7 +392,7 @@ async def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Mapping[str, str]`): + labels (:class:`MutableMapping[str, str]`): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a @@ -402,7 +402,7 @@ async def sample_write_log_entries(): This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entries (:class:`Sequence[google.cloud.logging_v2.types.LogEntry]`): + entries (:class:`MutableSequence[google.cloud.logging_v2.types.LogEntry]`): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, @@ -496,13 +496,13 @@ async def sample_write_log_entries(): return response async def list_log_entries(self, - request: Union[logging.ListLogEntriesRequest, dict] = None, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, *, - resource_names: Sequence[str] = None, - filter: str = None, - order_by: str = None, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that @@ -538,9 +538,9 @@ async def sample_list_log_entries(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]]): The request object. The parameters to `ListLogEntries`. - resource_names (:class:`Sequence[str]`): + resource_names (:class:`MutableSequence[str]`): Required. Names of one or more parent resources from which to retrieve log entries: @@ -660,10 +660,10 @@ async def sample_list_log_entries(): return response async def list_monitored_resource_descriptors(self, - request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, + request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types @@ -696,7 +696,7 @@ async def sample_list_monitored_resource_descriptors(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]]): The request object. The parameters to ListMonitoredResourceDescriptors retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -754,11 +754,11 @@ async def sample_list_monitored_resource_descriptors(): return response async def list_logs(self, - request: Union[logging.ListLogsRequest, dict] = None, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, @@ -793,7 +793,7 @@ async def sample_list_logs(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListLogsRequest, dict]]): The request object. The parameters to ListLogs. parent (:class:`str`): Required. The resource name that owns the logs: @@ -882,10 +882,10 @@ async def sample_list_logs(): return response def tail_log_entries(self, - requests: AsyncIterator[logging.TailLogEntriesRequest] = None, + requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index e9a1f7ba4c06..a3db9d981378 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -57,7 +57,7 @@ class LoggingServiceV2ClientMeta(type): _transport_registry["rest"] = LoggingServiceV2RestTransport def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[LoggingServiceV2Transport]: """Returns an appropriate transport class. @@ -290,7 +290,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LoggingServiceV2Transport, None] = None, + transport: Optional[Union[str, LoggingServiceV2Transport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -380,11 +380,11 @@ def __init__(self, *, ) def delete_log(self, - request: Union[logging.DeleteLogRequest, dict] = None, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, *, - log_name: str = None, + log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes all the log entries in a log. The log @@ -484,14 +484,14 @@ def sample_delete_log(): ) def write_log_entries(self, - request: Union[logging.WriteLogEntriesRequest, dict] = None, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, *, - log_name: str = None, - resource: monitored_resource_pb2.MonitoredResource = None, - labels: Mapping[str, str] = None, - entries: Sequence[log_entry.LogEntry] = None, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the @@ -578,7 +578,7 @@ def sample_write_log_entries(): This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a @@ -588,7 +588,7 @@ def sample_write_log_entries(): This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, @@ -673,13 +673,13 @@ def sample_write_log_entries(): return response def list_log_entries(self, - request: Union[logging.ListLogEntriesRequest, dict] = None, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, *, - resource_names: Sequence[str] = None, - filter: str = None, - order_by: str = None, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that @@ -717,7 +717,7 @@ def sample_list_log_entries(): Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): The request object. The parameters to `ListLogEntries`. - resource_names (Sequence[str]): + resource_names (MutableSequence[str]): Required. Names of one or more parent resources from which to retrieve log entries: @@ -829,10 +829,10 @@ def sample_list_log_entries(): return response def list_monitored_resource_descriptors(self, - request: Union[logging.ListMonitoredResourceDescriptorsRequest, dict] = None, + request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types @@ -916,11 +916,11 @@ def sample_list_monitored_resource_descriptors(): return response def list_logs(self, - request: Union[logging.ListLogsRequest, dict] = None, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, @@ -1036,10 +1036,10 @@ def sample_list_logs(): return response def tail_log_entries(self, - requests: Iterator[logging.TailLogEntriesRequest] = None, + requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 63a61cf7de93..75bf31462908 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -53,7 +53,7 @@ class LoggingServiceV2Transport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 6e19b459b101..6f6b9a4fe67e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -45,14 +45,14 @@ class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -178,8 +178,8 @@ def __init__(self, *, @classmethod def create_channel(cls, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index e4098c8b1e0a..8d9c613a671e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -49,7 +49,7 @@ class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): @classmethod def create_channel(cls, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -89,15 +89,15 @@ def create_channel(cls, def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py index 5a12926f94e6..a735292d9215 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py @@ -203,15 +203,15 @@ class LoggingServiceV2RestTransport(LoggingServiceV2Transport): def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[LoggingServiceV2RestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: @@ -290,9 +290,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging.DeleteLogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ): r"""Call the delete log method over HTTP. @@ -370,9 +370,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging.ListLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging.ListLogEntriesResponse: r"""Call the list log entries method over HTTP. @@ -455,9 +455,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging.ListLogsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging.ListLogsResponse: r"""Call the list logs method over HTTP. @@ -540,9 +540,9 @@ def __hash__(self): def __call__(self, request: logging.ListMonitoredResourceDescriptorsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging.ListMonitoredResourceDescriptorsResponse: r"""Call the list monitored resource descriptors method over HTTP. @@ -613,9 +613,9 @@ def __hash__(self): def __call__(self, request: logging.TailLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> rest_streaming.ResponseIterator: raise NotImplementedError( "Client streaming over REST is not yet defined for python client") @@ -633,9 +633,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging.WriteLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: r"""Call the write log entries method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index a64b92d72fd8..54bc33ba5cfd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -141,9 +141,9 @@ def transport(self) -> MetricsServiceV2Transport: get_transport_class = functools.partial(type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client)) def __init__(self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the metrics service v2 client. @@ -187,11 +187,11 @@ def __init__(self, *, ) async def list_log_metrics(self, - request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. @@ -224,7 +224,7 @@ async def sample_list_log_metrics(): print(response) Args: - request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]]): The request object. The parameters to ListLogMetrics. parent (:class:`str`): Required. The name of the project containing the @@ -311,11 +311,11 @@ async def sample_list_log_metrics(): return response async def get_log_metric(self, - request: Union[logging_metrics.GetLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, *, - metric_name: str = None, + metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -347,7 +347,7 @@ async def sample_get_log_metric(): print(response) Args: - request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]]): The request object. The parameters to GetLogMetric. metric_name (:class:`str`): Required. The resource name of the desired metric: @@ -431,12 +431,12 @@ async def sample_get_log_metric(): return response async def create_log_metric(self, - request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, *, - parent: str = None, - metric: logging_metrics.LogMetric = None, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -473,7 +473,7 @@ async def sample_create_log_metric(): print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]]): The request object. The parameters to CreateLogMetric. parent (:class:`str`): Required. The resource name of the project in which to @@ -562,12 +562,12 @@ async def sample_create_log_metric(): return response async def update_log_metric(self, - request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, *, - metric_name: str = None, - metric: logging_metrics.LogMetric = None, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -604,7 +604,7 @@ async def sample_update_log_metric(): print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]]): The request object. The parameters to UpdateLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to update: @@ -700,11 +700,11 @@ async def sample_update_log_metric(): return response async def delete_log_metric(self, - request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, *, - metric_name: str = None, + metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -733,7 +733,7 @@ async def sample_delete_log_metric(): await client.delete_log_metric(request=request) Args: - request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): + request (Optional[Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]]): The request object. The parameters to DeleteLogMetric. metric_name (:class:`str`): Required. The resource name of the metric to delete: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index b4a5c2cddba5..fc7b586d79f3 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -58,7 +58,7 @@ class MetricsServiceV2ClientMeta(type): _transport_registry["rest"] = MetricsServiceV2RestTransport def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[MetricsServiceV2Transport]: """Returns an appropriate transport class. @@ -291,7 +291,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricsServiceV2Transport, None] = None, + transport: Optional[Union[str, MetricsServiceV2Transport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -381,11 +381,11 @@ def __init__(self, *, ) def list_log_metrics(self, - request: Union[logging_metrics.ListLogMetricsRequest, dict] = None, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. @@ -497,11 +497,11 @@ def sample_list_log_metrics(): return response def get_log_metric(self, - request: Union[logging_metrics.GetLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, *, - metric_name: str = None, + metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -609,12 +609,12 @@ def sample_get_log_metric(): return response def create_log_metric(self, - request: Union[logging_metrics.CreateLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, *, - parent: str = None, - metric: logging_metrics.LogMetric = None, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -740,12 +740,12 @@ def sample_create_log_metric(): return response def update_log_metric(self, - request: Union[logging_metrics.UpdateLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, *, - metric_name: str = None, - metric: logging_metrics.LogMetric = None, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -870,11 +870,11 @@ def sample_update_log_metric(): return response def delete_log_metric(self, - request: Union[logging_metrics.DeleteLogMetricRequest, dict] = None, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, *, - metric_name: str = None, + metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a logs-based metric. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 0f1ef810eedc..2aa2b3e76835 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -53,7 +53,7 @@ class MetricsServiceV2Transport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index abe1685b1360..8ce93312c74c 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -45,14 +45,14 @@ class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -178,8 +178,8 @@ def __init__(self, *, @classmethod def create_channel(cls, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index ce1d083c4c53..bdc115947558 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -49,7 +49,7 @@ class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): @classmethod def create_channel(cls, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -89,15 +89,15 @@ def create_channel(cls, def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py index c549623f5f0d..d892b5d297f3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py @@ -203,15 +203,15 @@ class MetricsServiceV2RestTransport(MetricsServiceV2Transport): def __init__(self, *, host: str = 'logging.googleapis.com', - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[MetricsServiceV2RestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: @@ -290,9 +290,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.CreateLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Call the create log metric method over HTTP. @@ -386,9 +386,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.DeleteLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ): r"""Call the delete log metric method over HTTP. @@ -450,9 +450,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.GetLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Call the get log metric method over HTTP. @@ -537,9 +537,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.ListLogMetricsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.ListLogMetricsResponse: r"""Call the list log metrics method over HTTP. @@ -613,9 +613,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.UpdateLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Call the update log metric method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index f6a4c1d3ac55..a0a5cc98a931 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import monitored_resource_pb2 # type: ignore @@ -142,7 +144,7 @@ class LogEntry(proto.Message): http_request (google.logging.type.http_request_pb2.HttpRequest): Optional. Information about the HTTP request associated with this log entry, if applicable. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. A set of user-defined (key, value) data that provides additional information about the log entry. @@ -177,79 +179,79 @@ class LogEntry(proto.Message): associated with the log entry, if any. """ - log_name = proto.Field( + log_name: str = proto.Field( proto.STRING, number=12, ) - resource = proto.Field( + resource: monitored_resource_pb2.MonitoredResource = proto.Field( proto.MESSAGE, number=8, message=monitored_resource_pb2.MonitoredResource, ) - proto_payload = proto.Field( + proto_payload: any_pb2.Any = proto.Field( proto.MESSAGE, number=2, oneof='payload', message=any_pb2.Any, ) - text_payload = proto.Field( + text_payload: str = proto.Field( proto.STRING, number=3, oneof='payload', ) - json_payload = proto.Field( + json_payload: struct_pb2.Struct = proto.Field( proto.MESSAGE, number=6, oneof='payload', message=struct_pb2.Struct, ) - timestamp = proto.Field( + timestamp: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp, ) - receive_timestamp = proto.Field( + receive_timestamp: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=24, message=timestamp_pb2.Timestamp, ) - severity = proto.Field( + severity: log_severity_pb2.LogSeverity = proto.Field( proto.ENUM, number=10, enum=log_severity_pb2.LogSeverity, ) - insert_id = proto.Field( + insert_id: str = proto.Field( proto.STRING, number=4, ) - http_request = proto.Field( + http_request: http_request_pb2.HttpRequest = proto.Field( proto.MESSAGE, number=7, message=http_request_pb2.HttpRequest, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=11, ) - operation = proto.Field( + operation: 'LogEntryOperation' = proto.Field( proto.MESSAGE, number=15, message='LogEntryOperation', ) - trace = proto.Field( + trace: str = proto.Field( proto.STRING, number=22, ) - span_id = proto.Field( + span_id: str = proto.Field( proto.STRING, number=27, ) - trace_sampled = proto.Field( + trace_sampled: bool = proto.Field( proto.BOOL, number=30, ) - source_location = proto.Field( + source_location: 'LogEntrySourceLocation' = proto.Field( proto.MESSAGE, number=23, message='LogEntrySourceLocation', @@ -278,19 +280,19 @@ class LogEntryOperation(proto.Message): last log entry in the operation. """ - id = proto.Field( + id: str = proto.Field( proto.STRING, number=1, ) - producer = proto.Field( + producer: str = proto.Field( proto.STRING, number=2, ) - first = proto.Field( + first: bool = proto.Field( proto.BOOL, number=3, ) - last = proto.Field( + last: bool = proto.Field( proto.BOOL, number=4, ) @@ -318,15 +320,15 @@ class LogEntrySourceLocation(proto.Message): (Go), ``function`` (Python). """ - file = proto.Field( + file: str = proto.Field( proto.STRING, number=1, ) - line = proto.Field( + line: int = proto.Field( proto.INT64, number=2, ) - function = proto.Field( + function: str = proto.Field( proto.STRING, number=3, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index f6073b02a46c..0d1b9779ac1f 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import monitored_resource_pb2 # type: ignore @@ -61,7 +63,7 @@ class DeleteLogRequest(proto.Message): [LogEntry][google.logging.v2.LogEntry]. """ - log_name = proto.Field( + log_name: str = proto.Field( proto.STRING, number=1, ) @@ -106,13 +108,13 @@ class WriteLogEntriesRequest(proto.Message): "zone": "us-central1-a", "instance_id": "00000000000000000000" }} See [LogEntry][google.logging.v2.LogEntry]. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Optional. Default labels that are added to the ``labels`` field of all log entries in ``entries``. If a log entry already has a label with the same key as a label in this parameter, then the log entry's label is not changed. See [LogEntry][google.logging.v2.LogEntry]. - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): Required. The log entries to send to Logging. The order of log entries in this list does not matter. Values supplied in this method's ``log_name``, ``resource``, and ``labels`` @@ -158,30 +160,30 @@ class WriteLogEntriesRequest(proto.Message): properly before sending valuable data. """ - log_name = proto.Field( + log_name: str = proto.Field( proto.STRING, number=1, ) - resource = proto.Field( + resource: monitored_resource_pb2.MonitoredResource = proto.Field( proto.MESSAGE, number=2, message=monitored_resource_pb2.MonitoredResource, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - entries = proto.RepeatedField( + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( proto.MESSAGE, number=4, message=log_entry.LogEntry, ) - partial_success = proto.Field( + partial_success: bool = proto.Field( proto.BOOL, number=5, ) - dry_run = proto.Field( + dry_run: bool = proto.Field( proto.BOOL, number=6, ) @@ -196,7 +198,7 @@ class WriteLogEntriesPartialErrors(proto.Message): r"""Error details for WriteLogEntries with partial success. Attributes: - log_entry_errors (Mapping[int, google.rpc.status_pb2.Status]): + log_entry_errors (MutableMapping[int, google.rpc.status_pb2.Status]): When ``WriteLogEntriesRequest.partial_success`` is true, records the error status for entries that were not written due to a permanent error, keyed by the entry's zero-based @@ -206,7 +208,7 @@ class WriteLogEntriesPartialErrors(proto.Message): include per-entry errors. """ - log_entry_errors = proto.MapField( + log_entry_errors: MutableMapping[int, status_pb2.Status] = proto.MapField( proto.INT32, proto.MESSAGE, number=1, @@ -218,7 +220,7 @@ class ListLogEntriesRequest(proto.Message): r"""The parameters to ``ListLogEntries``. Attributes: - resource_names (Sequence[str]): + resource_names (MutableSequence[str]): Required. Names of one or more parent resources from which to retrieve log entries: @@ -270,23 +272,23 @@ class ListLogEntriesRequest(proto.Message): should be identical to those in the previous call. """ - resource_names = proto.RepeatedField( + resource_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=8, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - order_by = proto.Field( + order_by: str = proto.Field( proto.STRING, number=3, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=4, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=5, ) @@ -296,7 +298,7 @@ class ListLogEntriesResponse(proto.Message): r"""Result returned from ``ListLogEntries``. Attributes: - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. If ``entries`` is empty, ``nextPageToken`` may still be returned, indicating that more entries may exist. See ``nextPageToken`` for more @@ -321,12 +323,12 @@ class ListLogEntriesResponse(proto.Message): def raw_page(self): return self - entries = proto.RepeatedField( + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( proto.MESSAGE, number=1, message=log_entry.LogEntry, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -349,11 +351,11 @@ class ListMonitoredResourceDescriptorsRequest(proto.Message): should be identical to those in the previous call. """ - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) @@ -363,7 +365,7 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): r"""Result returned from ListMonitoredResourceDescriptors. Attributes: - resource_descriptors (Sequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): + resource_descriptors (MutableSequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): A list of resource descriptors. next_page_token (str): If there might be more results than those appearing in this @@ -376,12 +378,12 @@ class ListMonitoredResourceDescriptorsResponse(proto.Message): def raw_page(self): return self - resource_descriptors = proto.RepeatedField( + resource_descriptors: MutableSequence[monitored_resource_pb2.MonitoredResourceDescriptor] = proto.RepeatedField( proto.MESSAGE, number=1, message=monitored_resource_pb2.MonitoredResourceDescriptor, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -411,7 +413,7 @@ class ListLogsRequest(proto.Message): ``pageToken`` must be the value of ``nextPageToken`` from the previous response. The values of other method parameters should be identical to those in the previous call. - resource_names (Sequence[str]): + resource_names (MutableSequence[str]): Optional. The resource name that owns the logs: projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] @@ -423,19 +425,19 @@ class ListLogsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) - resource_names = proto.RepeatedField( + resource_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=8, ) @@ -445,7 +447,7 @@ class ListLogsResponse(proto.Message): r"""Result returned from ListLogs. Attributes: - log_names (Sequence[str]): + log_names (MutableSequence[str]): A list of log names. For example, ``"projects/my-project/logs/syslog"`` or ``"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"``. @@ -460,11 +462,11 @@ class ListLogsResponse(proto.Message): def raw_page(self): return self - log_names = proto.RepeatedField( + log_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -474,7 +476,7 @@ class TailLogEntriesRequest(proto.Message): r"""The parameters to ``TailLogEntries``. Attributes: - resource_names (Sequence[str]): + resource_names (MutableSequence[str]): Required. Name of a parent resource from which to retrieve log entries: @@ -509,15 +511,15 @@ class TailLogEntriesRequest(proto.Message): milliseconds. """ - resource_names = proto.RepeatedField( + resource_names: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=2, ) - buffer_window = proto.Field( + buffer_window: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=3, message=duration_pb2.Duration, @@ -528,12 +530,12 @@ class TailLogEntriesResponse(proto.Message): r"""Result returned from ``TailLogEntries``. Attributes: - entries (Sequence[google.cloud.logging_v2.types.LogEntry]): + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will order entries with increasing values of ``LogEntry.timestamp``. Ordering is not guaranteed between separate responses. - suppression_info (Sequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): + suppression_info (MutableSequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): If entries that otherwise would have been included in the session were not sent back to the client, counts of relevant entries omitted @@ -561,22 +563,22 @@ class Reason(proto.Enum): RATE_LIMIT = 1 NOT_CONSUMED = 2 - reason = proto.Field( + reason: 'TailLogEntriesResponse.SuppressionInfo.Reason' = proto.Field( proto.ENUM, number=1, enum='TailLogEntriesResponse.SuppressionInfo.Reason', ) - suppressed_count = proto.Field( + suppressed_count: int = proto.Field( proto.INT32, number=2, ) - entries = proto.RepeatedField( + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( proto.MESSAGE, number=1, message=log_entry.LogEntry, ) - suppression_info = proto.RepeatedField( + suppression_info: MutableSequence[SuppressionInfo] = proto.RepeatedField( proto.MESSAGE, number=2, message=SuppressionInfo, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 1fd81ef43382..4eb3b13afdca 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -104,33 +106,33 @@ class LogBucket(proto.Message): Output only. The bucket lifecycle state. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=3, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - retention_days = proto.Field( + retention_days: int = proto.Field( proto.INT32, number=11, ) - locked = proto.Field( + locked: bool = proto.Field( proto.BOOL, number=9, ) - lifecycle_state = proto.Field( + lifecycle_state: 'LifecycleState' = proto.Field( proto.ENUM, number=12, enum='LifecycleState', @@ -162,25 +164,25 @@ class LogView(proto.Message): resource.type = "gce_instance" AND LOG_ID("stdout") """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=3, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=7, ) @@ -236,7 +238,7 @@ class LogSink(proto.Message): disabled (bool): Optional. If set to True, then this sink is disabled and it does not export any log entries. - exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): + exclusions (MutableSequence[google.cloud.logging_v2.types.LogExclusion]): Optional. Log entries that match any of the exclusion filters will not be exported. If a log entry is matched by both ``filter`` and one of ``exclusion_filters`` it will not @@ -298,56 +300,56 @@ class VersionFormat(proto.Enum): V2 = 1 V1 = 2 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - destination = proto.Field( + destination: str = proto.Field( proto.STRING, number=3, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=5, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=18, ) - disabled = proto.Field( + disabled: bool = proto.Field( proto.BOOL, number=19, ) - exclusions = proto.RepeatedField( + exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( proto.MESSAGE, number=16, message='LogExclusion', ) - output_version_format = proto.Field( + output_version_format: VersionFormat = proto.Field( proto.ENUM, number=6, enum=VersionFormat, ) - writer_identity = proto.Field( + writer_identity: str = proto.Field( proto.STRING, number=8, ) - include_children = proto.Field( + include_children: bool = proto.Field( proto.BOOL, number=9, ) - bigquery_options = proto.Field( + bigquery_options: 'BigQueryOptions' = proto.Field( proto.MESSAGE, number=12, oneof='options', message='BigQueryOptions', ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp, @@ -379,11 +381,11 @@ class BigQueryOptions(proto.Message): will have this field set to false. """ - use_partitioned_tables = proto.Field( + use_partitioned_tables: bool = proto.Field( proto.BOOL, number=1, ) - uses_timestamp_column_partitioning = proto.Field( + uses_timestamp_column_partitioning: bool = proto.Field( proto.BOOL, number=3, ) @@ -420,15 +422,15 @@ class ListBucketsRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -438,7 +440,7 @@ class ListBucketsResponse(proto.Message): r"""The response from ListBuckets. Attributes: - buckets (Sequence[google.cloud.logging_v2.types.LogBucket]): + buckets (MutableSequence[google.cloud.logging_v2.types.LogBucket]): A list of buckets. next_page_token (str): If there might be more results than appear in this response, @@ -451,12 +453,12 @@ class ListBucketsResponse(proto.Message): def raw_page(self): return self - buckets = proto.RepeatedField( + buckets: MutableSequence['LogBucket'] = proto.RepeatedField( proto.MESSAGE, number=1, message='LogBucket', ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -486,15 +488,15 @@ class CreateBucketRequest(proto.Message): name field in the bucket is ignored. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - bucket_id = proto.Field( + bucket_id: str = proto.Field( proto.STRING, number=2, ) - bucket = proto.Field( + bucket: 'LogBucket' = proto.Field( proto.MESSAGE, number=3, message='LogBucket', @@ -534,16 +536,16 @@ class UpdateBucketRequest(proto.Message): Example: ``updateMask=retention_days``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - bucket = proto.Field( + bucket: 'LogBucket' = proto.Field( proto.MESSAGE, number=2, message='LogBucket', ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, @@ -568,7 +570,7 @@ class GetBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -592,7 +594,7 @@ class DeleteBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -616,7 +618,7 @@ class UndeleteBucketRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -645,15 +647,15 @@ class ListViewsRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -663,7 +665,7 @@ class ListViewsResponse(proto.Message): r"""The response from ListViews. Attributes: - views (Sequence[google.cloud.logging_v2.types.LogView]): + views (MutableSequence[google.cloud.logging_v2.types.LogView]): A list of views. next_page_token (str): If there might be more results than appear in this response, @@ -676,12 +678,12 @@ class ListViewsResponse(proto.Message): def raw_page(self): return self - views = proto.RepeatedField( + views: MutableSequence['LogView'] = proto.RepeatedField( proto.MESSAGE, number=1, message='LogView', ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -706,15 +708,15 @@ class CreateViewRequest(proto.Message): Required. The new view. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - view_id = proto.Field( + view_id: str = proto.Field( proto.STRING, number=2, ) - view = proto.Field( + view: 'LogView' = proto.Field( proto.MESSAGE, number=3, message='LogView', @@ -748,16 +750,16 @@ class UpdateViewRequest(proto.Message): Example: ``updateMask=filter``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - view = proto.Field( + view: 'LogView' = proto.Field( proto.MESSAGE, number=2, message='LogView', ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, @@ -779,7 +781,7 @@ class GetViewRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -800,7 +802,7 @@ class DeleteViewRequest(proto.Message): ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -832,15 +834,15 @@ class ListSinksRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -850,7 +852,7 @@ class ListSinksResponse(proto.Message): r"""Result returned from ``ListSinks``. Attributes: - sinks (Sequence[google.cloud.logging_v2.types.LogSink]): + sinks (MutableSequence[google.cloud.logging_v2.types.LogSink]): A list of sinks. next_page_token (str): If there might be more results than appear in this response, @@ -863,12 +865,12 @@ class ListSinksResponse(proto.Message): def raw_page(self): return self - sinks = proto.RepeatedField( + sinks: MutableSequence['LogSink'] = proto.RepeatedField( proto.MESSAGE, number=1, message='LogSink', ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -891,7 +893,7 @@ class GetSinkRequest(proto.Message): Example: ``"projects/my-project-id/sinks/my-sink-id"``. """ - sink_name = proto.Field( + sink_name: str = proto.Field( proto.STRING, number=1, ) @@ -933,16 +935,16 @@ class CreateSinkRequest(proto.Message): [LogSink][google.logging.v2.LogSink]. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - sink = proto.Field( + sink: 'LogSink' = proto.Field( proto.MESSAGE, number=2, message='LogSink', ) - unique_writer_identity = proto.Field( + unique_writer_identity: bool = proto.Field( proto.BOOL, number=3, ) @@ -1001,20 +1003,20 @@ class UpdateSinkRequest(proto.Message): Example: ``updateMask=filter``. """ - sink_name = proto.Field( + sink_name: str = proto.Field( proto.STRING, number=1, ) - sink = proto.Field( + sink: 'LogSink' = proto.Field( proto.MESSAGE, number=2, message='LogSink', ) - unique_writer_identity = proto.Field( + unique_writer_identity: bool = proto.Field( proto.BOOL, number=3, ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=4, message=field_mask_pb2.FieldMask, @@ -1039,7 +1041,7 @@ class DeleteSinkRequest(proto.Message): Example: ``"projects/my-project-id/sinks/my-sink-id"``. """ - sink_name = proto.Field( + sink_name: str = proto.Field( proto.STRING, number=1, ) @@ -1091,28 +1093,28 @@ class LogExclusion(proto.Message): exclusions. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=2, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=3, ) - disabled = proto.Field( + disabled: bool = proto.Field( proto.BOOL, number=4, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, @@ -1146,15 +1148,15 @@ class ListExclusionsRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -1164,7 +1166,7 @@ class ListExclusionsResponse(proto.Message): r"""Result returned from ``ListExclusions``. Attributes: - exclusions (Sequence[google.cloud.logging_v2.types.LogExclusion]): + exclusions (MutableSequence[google.cloud.logging_v2.types.LogExclusion]): A list of exclusions. next_page_token (str): If there might be more results than appear in this response, @@ -1177,12 +1179,12 @@ class ListExclusionsResponse(proto.Message): def raw_page(self): return self - exclusions = proto.RepeatedField( + exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( proto.MESSAGE, number=1, message='LogExclusion', ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -1206,7 +1208,7 @@ class GetExclusionRequest(proto.Message): ``"projects/my-project-id/exclusions/my-exclusion-id"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -1235,11 +1237,11 @@ class CreateExclusionRequest(proto.Message): resource. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - exclusion = proto.Field( + exclusion: 'LogExclusion' = proto.Field( proto.MESSAGE, number=2, message='LogExclusion', @@ -1278,16 +1280,16 @@ class UpdateExclusionRequest(proto.Message): ``"filter,description"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - exclusion = proto.Field( + exclusion: 'LogExclusion' = proto.Field( proto.MESSAGE, number=2, message='LogExclusion', ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, @@ -1313,7 +1315,7 @@ class DeleteExclusionRequest(proto.Message): ``"projects/my-project-id/exclusions/my-exclusion-id"``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -1345,7 +1347,7 @@ class GetCmekSettingsRequest(proto.Message): applies to all projects and folders in the GCP organization. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -1393,16 +1395,16 @@ class UpdateCmekSettingsRequest(proto.Message): Example: ``"updateMask=kmsKeyName"`` """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - cmek_settings = proto.Field( + cmek_settings: 'CmekSettings' = proto.Field( proto.MESSAGE, number=2, message='CmekSettings', ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=3, message=field_mask_pb2.FieldMask, @@ -1471,15 +1473,15 @@ class CmekSettings(proto.Message): for more information. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - kms_key_name = proto.Field( + kms_key_name: str = proto.Field( proto.STRING, number=2, ) - service_account_id = proto.Field( + service_account_id: str = proto.Field( proto.STRING, number=3, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index c39349904eed..6b34228d58e5 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.api import distribution_pb2 # type: ignore @@ -125,7 +127,7 @@ class LogMetric(proto.Message): Example: ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors (Mapping[str, str]): + label_extractors (MutableMapping[str, str]): Optional. A map from a label key string to an extractor expression which is used to extract data from a log entry field and assign as the label value. Each label key @@ -166,48 +168,48 @@ class ApiVersion(proto.Enum): V2 = 0 V1 = 1 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - description = proto.Field( + description: str = proto.Field( proto.STRING, number=2, ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=3, ) - metric_descriptor = proto.Field( + metric_descriptor: metric_pb2.MetricDescriptor = proto.Field( proto.MESSAGE, number=5, message=metric_pb2.MetricDescriptor, ) - value_extractor = proto.Field( + value_extractor: str = proto.Field( proto.STRING, number=6, ) - label_extractors = proto.MapField( + label_extractors: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=7, ) - bucket_options = proto.Field( + bucket_options: distribution_pb2.Distribution.BucketOptions = proto.Field( proto.MESSAGE, number=8, message=distribution_pb2.Distribution.BucketOptions, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=9, message=timestamp_pb2.Timestamp, ) - update_time = proto.Field( + update_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp, ) - version = proto.Field( + version: ApiVersion = proto.Field( proto.ENUM, number=4, enum=ApiVersion, @@ -237,15 +239,15 @@ class ListLogMetricsRequest(proto.Message): results might be available. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=2, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) @@ -255,7 +257,7 @@ class ListLogMetricsResponse(proto.Message): r"""Result returned from ListLogMetrics. Attributes: - metrics (Sequence[google.cloud.logging_v2.types.LogMetric]): + metrics (MutableSequence[google.cloud.logging_v2.types.LogMetric]): A list of logs-based metrics. next_page_token (str): If there might be more results than appear in this response, @@ -268,12 +270,12 @@ class ListLogMetricsResponse(proto.Message): def raw_page(self): return self - metrics = proto.RepeatedField( + metrics: MutableSequence['LogMetric'] = proto.RepeatedField( proto.MESSAGE, number=1, message='LogMetric', ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -291,7 +293,7 @@ class GetLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field( + metric_name: str = proto.Field( proto.STRING, number=1, ) @@ -315,11 +317,11 @@ class CreateLogMetricRequest(proto.Message): must not have an identifier that already exists. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - metric = proto.Field( + metric: 'LogMetric' = proto.Field( proto.MESSAGE, number=2, message='LogMetric', @@ -345,11 +347,11 @@ class UpdateLogMetricRequest(proto.Message): Required. The updated metric. """ - metric_name = proto.Field( + metric_name: str = proto.Field( proto.STRING, number=1, ) - metric = proto.Field( + metric: 'LogMetric' = proto.Field( proto.MESSAGE, number=2, message='LogMetric', @@ -368,7 +370,7 @@ class DeleteLogMetricRequest(proto.Message): "projects/[PROJECT_ID]/metrics/[METRIC_ID]". """ - metric_name = proto.Field( + metric_name: str = proto.Field( proto.STRING, number=1, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 78f6e43dbb5b..cf16dded69b9 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -3823,7 +3823,7 @@ }, { "name": "resource_names", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "filter", @@ -3911,7 +3911,7 @@ }, { "name": "resource_names", - "type": "Sequence[str]" + "type": "MutableSequence[str]" }, { "name": "filter", @@ -4475,11 +4475,11 @@ }, { "name": "labels", - "type": "Mapping[str, str]" + "type": "MutableMapping[str, str]" }, { "name": "entries", - "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + "type": "MutableSequence[google.cloud.logging_v2.types.LogEntry]" }, { "name": "retry", @@ -4567,11 +4567,11 @@ }, { "name": "labels", - "type": "Mapping[str, str]" + "type": "MutableMapping[str, str]" }, { "name": "entries", - "type": "Sequence[google.cloud.logging_v2.types.LogEntry]" + "type": "MutableSequence[google.cloud.logging_v2.types.LogEntry]" }, { "name": "retry", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index d8a9a13b40cb..7da01bfa47f1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -164,9 +164,9 @@ def transport(self) -> CloudRedisTransport: get_transport_class = functools.partial(type(CloudRedisClient).get_transport_class, type(CloudRedisClient)) def __init__(self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, CloudRedisTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the cloud redis client. @@ -210,11 +210,11 @@ def __init__(self, *, ) async def list_instances(self, - request: Union[cloud_redis.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists all Redis instances owned by a project in either the @@ -256,7 +256,7 @@ async def sample_list_instances(): print(response) Args: - request (Union[google.cloud.redis_v1.types.ListInstancesRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.ListInstancesRequest, dict]]): The request object. Request for [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. parent (:class:`str`): @@ -335,11 +335,11 @@ async def sample_list_instances(): return response async def get_instance(self, - request: Union[cloud_redis.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. @@ -371,7 +371,7 @@ async def sample_get_instance(): print(response) Args: - request (Union[google.cloud.redis_v1.types.GetInstanceRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.GetInstanceRequest, dict]]): The request object. Request for [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. name (:class:`str`): @@ -435,13 +435,13 @@ async def sample_get_instance(): return response async def create_instance(self, - request: Union[cloud_redis.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance_id: str = None, - instance: cloud_redis.Instance = None, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a Redis instance based on the specified tier and memory @@ -497,7 +497,7 @@ async def sample_create_instance(): print(response) Args: - request (Union[google.cloud.redis_v1.types.CreateInstanceRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.CreateInstanceRequest, dict]]): The request object. Request for [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. parent (:class:`str`): @@ -598,12 +598,12 @@ async def sample_create_instance(): return response async def update_instance(self, - request: Union[cloud_redis.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - instance: cloud_redis.Instance = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates the metadata and configuration of a specific @@ -649,7 +649,7 @@ async def sample_update_instance(): print(response) Args: - request (Union[google.cloud.redis_v1.types.UpdateInstanceRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.UpdateInstanceRequest, dict]]): The request object. Request for [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): @@ -741,12 +741,12 @@ async def sample_update_instance(): return response async def upgrade_instance(self, - request: Union[cloud_redis.UpgradeInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, *, - name: str = None, - redis_version: str = None, + name: Optional[str] = None, + redis_version: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Upgrades Redis instance to the newer Redis version @@ -784,7 +784,7 @@ async def sample_upgrade_instance(): print(response) Args: - request (Union[google.cloud.redis_v1.types.UpgradeInstanceRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.UpgradeInstanceRequest, dict]]): The request object. Request for [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. name (:class:`str`): @@ -870,12 +870,12 @@ async def sample_upgrade_instance(): return response async def import_instance(self, - request: Union[cloud_redis.ImportInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, *, - name: str = None, - input_config: cloud_redis.InputConfig = None, + name: Optional[str] = None, + input_config: Optional[cloud_redis.InputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Import a Redis RDB snapshot file from Cloud Storage @@ -923,7 +923,7 @@ async def sample_import_instance(): print(response) Args: - request (Union[google.cloud.redis_v1.types.ImportInstanceRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.ImportInstanceRequest, dict]]): The request object. Request for [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. name (:class:`str`): @@ -1009,12 +1009,12 @@ async def sample_import_instance(): return response async def export_instance(self, - request: Union[cloud_redis.ExportInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, *, - name: str = None, - output_config: cloud_redis.OutputConfig = None, + name: Optional[str] = None, + output_config: Optional[cloud_redis.OutputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Export Redis instance data into a Redis RDB format @@ -1058,7 +1058,7 @@ async def sample_export_instance(): print(response) Args: - request (Union[google.cloud.redis_v1.types.ExportInstanceRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.ExportInstanceRequest, dict]]): The request object. Request for [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. name (:class:`str`): @@ -1144,12 +1144,12 @@ async def sample_export_instance(): return response async def failover_instance(self, - request: Union[cloud_redis.FailoverInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, *, - name: str = None, - data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None, + name: Optional[str] = None, + data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Initiates a failover of the master node to current @@ -1187,7 +1187,7 @@ async def sample_failover_instance(): print(response) Args: - request (Union[google.cloud.redis_v1.types.FailoverInstanceRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.FailoverInstanceRequest, dict]]): The request object. Request for [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. name (:class:`str`): @@ -1274,11 +1274,11 @@ async def sample_failover_instance(): return response async def delete_instance(self, - request: Union[cloud_redis.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a specific Redis instance. Instance stops @@ -1315,7 +1315,7 @@ async def sample_delete_instance(): print(response) Args: - request (Union[google.cloud.redis_v1.types.DeleteInstanceRequest, dict]): + request (Optional[Union[google.cloud.redis_v1.types.DeleteInstanceRequest, dict]]): The request object. Request for [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. name (:class:`str`): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 3a1341ac6a17..1ddfce8650bb 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import pkg_resources from google.api_core import client_options as client_options_lib @@ -60,7 +60,7 @@ class CloudRedisClientMeta(type): _transport_registry["rest"] = CloudRedisRestTransport def get_transport_class(cls, - label: str = None, + label: Optional[str] = None, ) -> Type[CloudRedisTransport]: """Returns an appropriate transport class. @@ -314,7 +314,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudRedisTransport, None] = None, + transport: Optional[Union[str, CloudRedisTransport]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -404,11 +404,11 @@ def __init__(self, *, ) def list_instances(self, - request: Union[cloud_redis.ListInstancesRequest, dict] = None, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists all Redis instances owned by a project in either the @@ -529,11 +529,11 @@ def sample_list_instances(): return response def get_instance(self, - request: Union[cloud_redis.GetInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. @@ -629,13 +629,13 @@ def sample_get_instance(): return response def create_instance(self, - request: Union[cloud_redis.CreateInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, *, - parent: str = None, - instance_id: str = None, - instance: cloud_redis.Instance = None, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a Redis instance based on the specified tier and memory @@ -792,12 +792,12 @@ def sample_create_instance(): return response def update_instance(self, - request: Union[cloud_redis.UpdateInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, *, - update_mask: field_mask_pb2.FieldMask = None, - instance: cloud_redis.Instance = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates the metadata and configuration of a specific @@ -935,12 +935,12 @@ def sample_update_instance(): return response def upgrade_instance(self, - request: Union[cloud_redis.UpgradeInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.UpgradeInstanceRequest, dict]] = None, *, - name: str = None, - redis_version: str = None, + name: Optional[str] = None, + redis_version: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Upgrades Redis instance to the newer Redis version @@ -1064,12 +1064,12 @@ def sample_upgrade_instance(): return response def import_instance(self, - request: Union[cloud_redis.ImportInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.ImportInstanceRequest, dict]] = None, *, - name: str = None, - input_config: cloud_redis.InputConfig = None, + name: Optional[str] = None, + input_config: Optional[cloud_redis.InputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Import a Redis RDB snapshot file from Cloud Storage @@ -1203,12 +1203,12 @@ def sample_import_instance(): return response def export_instance(self, - request: Union[cloud_redis.ExportInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.ExportInstanceRequest, dict]] = None, *, - name: str = None, - output_config: cloud_redis.OutputConfig = None, + name: Optional[str] = None, + output_config: Optional[cloud_redis.OutputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Export Redis instance data into a Redis RDB format @@ -1338,12 +1338,12 @@ def sample_export_instance(): return response def failover_instance(self, - request: Union[cloud_redis.FailoverInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.FailoverInstanceRequest, dict]] = None, *, - name: str = None, - data_protection_mode: cloud_redis.FailoverInstanceRequest.DataProtectionMode = None, + name: Optional[str] = None, + data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Initiates a failover of the master node to current @@ -1468,11 +1468,11 @@ def sample_failover_instance(): return response def delete_instance(self, - request: Union[cloud_redis.DeleteInstanceRequest, dict] = None, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes a specific Redis instance. Instance stops diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 5e0e5bd15103..7afa09785635 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -50,7 +50,7 @@ class CloudRedisTransport(abc.ABC): def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index c645f37caff4..da987a8f865c 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -66,14 +66,14 @@ class CloudRedisGrpcTransport(CloudRedisTransport): def __init__(self, *, host: str = 'redis.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -200,8 +200,8 @@ def __init__(self, *, @classmethod def create_channel(cls, host: str = 'redis.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs) -> grpc.Channel: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index c9a27ef42e17..0495dbc63921 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -70,7 +70,7 @@ class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): @classmethod def create_channel(cls, host: str = 'redis.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -110,15 +110,15 @@ def create_channel(cls, def __init__(self, *, host: str = 'redis.googleapis.com', - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 2d2c0e9ca396..33f92413867a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -327,15 +327,15 @@ class CloudRedisRestTransport(CloudRedisTransport): def __init__(self, *, host: str = 'redis.googleapis.com', - credentials: ga_credentials.Credentials=None, - credentials_file: str=None, - scopes: Sequence[str]=None, - client_cert_source_for_mtls: Callable[[ - ], Tuple[bytes, bytes]]=None, - quota_project_id: Optional[str]=None, - client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool]=False, - url_scheme: str='https', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', interceptor: Optional[CloudRedisRestInterceptor] = None, api_audience: Optional[str] = None, ) -> None: @@ -439,9 +439,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. @@ -527,9 +527,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. @@ -606,9 +606,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.ExportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. @@ -694,9 +694,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.FailoverInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. @@ -782,9 +782,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. @@ -860,9 +860,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.ImportInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. @@ -948,9 +948,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. @@ -1028,9 +1028,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. @@ -1116,9 +1116,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.UpgradeInstanceRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: float=None, - metadata: Sequence[Tuple[str, str]]=(), + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index bf54ebc15a29..c05c497a1995 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -65,7 +67,7 @@ class Instance(proto.Message): display_name (str): An arbitrary and optional user-provided name for the instance. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): Resource labels to represent user provided metadata location_id (str): @@ -124,7 +126,7 @@ class Instance(proto.Message): status_message (str): Output only. Additional information about the current status of this instance, if available. - redis_configs (Mapping[str, str]): + redis_configs (MutableMapping[str, str]): Optional. Redis configuration parameters, according to http://redis.io/topics/config. Currently, the only supported parameters are: @@ -188,84 +190,84 @@ class ConnectMode(proto.Enum): DIRECT_PEERING = 1 PRIVATE_SERVICE_ACCESS = 2 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - display_name = proto.Field( + display_name: str = proto.Field( proto.STRING, number=2, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, ) - location_id = proto.Field( + location_id: str = proto.Field( proto.STRING, number=4, ) - alternative_location_id = proto.Field( + alternative_location_id: str = proto.Field( proto.STRING, number=5, ) - redis_version = proto.Field( + redis_version: str = proto.Field( proto.STRING, number=7, ) - reserved_ip_range = proto.Field( + reserved_ip_range: str = proto.Field( proto.STRING, number=9, ) - host = proto.Field( + host: str = proto.Field( proto.STRING, number=10, ) - port = proto.Field( + port: int = proto.Field( proto.INT32, number=11, ) - current_location_id = proto.Field( + current_location_id: str = proto.Field( proto.STRING, number=12, ) - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=14, enum=State, ) - status_message = proto.Field( + status_message: str = proto.Field( proto.STRING, number=15, ) - redis_configs = proto.MapField( + redis_configs: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=16, ) - tier = proto.Field( + tier: Tier = proto.Field( proto.ENUM, number=17, enum=Tier, ) - memory_size_gb = proto.Field( + memory_size_gb: int = proto.Field( proto.INT32, number=18, ) - authorized_network = proto.Field( + authorized_network: str = proto.Field( proto.STRING, number=20, ) - persistence_iam_identity = proto.Field( + persistence_iam_identity: str = proto.Field( proto.STRING, number=21, ) - connect_mode = proto.Field( + connect_mode: ConnectMode = proto.Field( proto.ENUM, number=22, enum=ConnectMode, @@ -296,15 +298,15 @@ class ListInstancesRequest(proto.Message): request, if any. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -315,7 +317,7 @@ class ListInstancesResponse(proto.Message): [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. Attributes: - instances (Sequence[google.cloud.redis_v1.types.Instance]): + instances (MutableSequence[google.cloud.redis_v1.types.Instance]): A list of Redis instances in the project in the specified location, or across all locations. @@ -332,7 +334,7 @@ class ListInstancesResponse(proto.Message): Token to retrieve the next page of results, or empty if there are no more results in the list. - unreachable (Sequence[str]): + unreachable (MutableSequence[str]): Locations that could not be reached. """ @@ -340,16 +342,16 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances = proto.RepeatedField( + instances: MutableSequence['Instance'] = proto.RepeatedField( proto.MESSAGE, number=1, message='Instance', ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - unreachable = proto.RepeatedField( + unreachable: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -366,7 +368,7 @@ class GetInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -395,15 +397,15 @@ class CreateInstanceRequest(proto.Message): Required. A Redis [Instance] resource """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - instance_id = proto.Field( + instance_id: str = proto.Field( proto.STRING, number=2, ) - instance = proto.Field( + instance: 'Instance' = proto.Field( proto.MESSAGE, number=3, message='Instance', @@ -430,12 +432,12 @@ class UpdateInstanceRequest(proto.Message): update_mask are updated. """ - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=1, message=field_mask_pb2.FieldMask, ) - instance = proto.Field( + instance: 'Instance' = proto.Field( proto.MESSAGE, number=2, message='Instance', @@ -456,11 +458,11 @@ class UpgradeInstanceRequest(proto.Message): Redis software to upgrade to. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - redis_version = proto.Field( + redis_version: str = proto.Field( proto.STRING, number=2, ) @@ -477,7 +479,7 @@ class DeleteInstanceRequest(proto.Message): where ``location_id`` refers to a GCP region. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -492,7 +494,7 @@ class GcsSource(proto.Message): 'gs://my_bucket/my_object'). """ - uri = proto.Field( + uri: str = proto.Field( proto.STRING, number=1, ) @@ -511,7 +513,7 @@ class InputConfig(proto.Message): This field is a member of `oneof`_ ``source``. """ - gcs_source = proto.Field( + gcs_source: 'GcsSource' = proto.Field( proto.MESSAGE, number=1, oneof='source', @@ -532,11 +534,11 @@ class ImportInstanceRequest(proto.Message): Required. Specify data to be imported. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - input_config = proto.Field( + input_config: 'InputConfig' = proto.Field( proto.MESSAGE, number=3, message='InputConfig', @@ -553,7 +555,7 @@ class GcsDestination(proto.Message): overwritten. """ - uri = proto.Field( + uri: str = proto.Field( proto.STRING, number=1, ) @@ -572,7 +574,7 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. """ - gcs_destination = proto.Field( + gcs_destination: 'GcsDestination' = proto.Field( proto.MESSAGE, number=1, oneof='destination', @@ -593,11 +595,11 @@ class ExportInstanceRequest(proto.Message): Required. Specify data to be exported. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - output_config = proto.Field( + output_config: 'OutputConfig' = proto.Field( proto.MESSAGE, number=3, message='OutputConfig', @@ -626,11 +628,11 @@ class DataProtectionMode(proto.Enum): LIMITED_DATA_LOSS = 1 FORCE_DATA_LOSS = 2 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - data_protection_mode = proto.Field( + data_protection_mode: DataProtectionMode = proto.Field( proto.ENUM, number=2, enum=DataProtectionMode, @@ -658,33 +660,33 @@ class OperationMetadata(proto.Message): API version. """ - create_time = proto.Field( + create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - target = proto.Field( + target: str = proto.Field( proto.STRING, number=3, ) - verb = proto.Field( + verb: str = proto.Field( proto.STRING, number=4, ) - status_detail = proto.Field( + status_detail: str = proto.Field( proto.STRING, number=5, ) - cancel_requested = proto.Field( + cancel_requested: bool = proto.Field( proto.BOOL, number=6, ) - api_version = proto.Field( + api_version: str = proto.Field( proto.STRING, number=7, ) @@ -697,7 +699,7 @@ class LocationMetadata(proto.Message): ``google.cloud.location.Location.metadata`` field. Attributes: - available_zones (Mapping[str, google.cloud.redis_v1.types.ZoneMetadata]): + available_zones (MutableMapping[str, google.cloud.redis_v1.types.ZoneMetadata]): Output only. The set of available zones in the location. The map is keyed by the lowercase ID of each zone, as defined by GCE. These keys can be specified in ``location_id`` or @@ -705,7 +707,7 @@ class LocationMetadata(proto.Message): instance. """ - available_zones = proto.MapField( + available_zones: MutableMapping[str, 'ZoneMetadata'] = proto.MapField( proto.STRING, proto.MESSAGE, number=1, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index f23a2c9d8571..7bded119c5a0 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -72,7 +72,7 @@ def test_ident(): def test_ident_repeated(): REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') field = make_field(type='TYPE_BOOL', label=REP) - assert str(field.ident) == 'Sequence[bool]' + assert str(field.ident) == 'MutableSequence[bool]' def test_repeated(): @@ -124,7 +124,7 @@ def test_ident_map(): type='TYPE_MESSAGE', ) - assert str(field.ident) == "Mapping[str, str]" + assert str(field.ident) == "MutableMapping[str, str]" def test_required(): @@ -148,7 +148,7 @@ def test_ident_sphinx(): def test_ident_sphinx_repeated(): REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') field = make_field(type='TYPE_BOOL', label=REP) - assert field.ident.sphinx == 'Sequence[bool]' + assert field.ident.sphinx == 'MutableSequence[bool]' def test_ident_sphinx_map(): @@ -167,7 +167,7 @@ def test_ident_sphinx_map(): label=3, type='TYPE_MESSAGE', ) - assert field.ident.sphinx == 'Mapping[str, str]' + assert field.ident.sphinx == 'MutableMapping[str, str]' def test_resource_reference(): From 8d3ffd54e64f51c640a2cca241417fdb3e3c4fe9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 9 Nov 2022 15:24:03 -0500 Subject: [PATCH 0919/1339] chore(main): release 1.6.0 (#1476) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 7f554e34fa84..87e1a8f5e819 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.6.0](https://github.com/googleapis/gapic-generator-python/compare/v1.5.0...v1.6.0) (2022-11-09) + + +### Features + +* Add typing to proto.Message based class attributes ([#1474](https://github.com/googleapis/gapic-generator-python/issues/1474)) ([3bd2f87](https://github.com/googleapis/gapic-generator-python/commit/3bd2f8703e4a1a67e6a4c281890c4f99eda13fe7)) + + +### Bug Fixes + +* Detect changed Python files in Git pre-commit hook ([#1475](https://github.com/googleapis/gapic-generator-python/issues/1475)) ([2a232fc](https://github.com/googleapis/gapic-generator-python/commit/2a232fcc5297d6cd2ee1562c01843a3074a4da1f)) +* Snippetgen handling of repeated enum field ([#1443](https://github.com/googleapis/gapic-generator-python/issues/1443)) ([70d7882](https://github.com/googleapis/gapic-generator-python/commit/70d7882b39c754ebebfd2b57fa5e89f515e3192f)) + ## [1.5.0](https://github.com/googleapis/gapic-generator-python/compare/v1.4.4...v1.5.0) (2022-10-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 54982cdbf904..a07e30273d70 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.5.0" +version = "1.6.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 978a3d00488b2dfd8d6114e3e58236cba22e7b21 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Nov 2022 19:42:58 +0100 Subject: [PATCH 0920/1339] chore(deps): update dependency pytest-asyncio to v0.20.2 (#1489) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 16ed3b6437fe..909bfbc9f786 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -9,4 +9,4 @@ PyYAML==6.0 setuptools==65.5.0 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 -pytest-asyncio==0.20.1 \ No newline at end of file +pytest-asyncio==0.20.2 \ No newline at end of file From 95c215c60354a2b61bb0ed02ad627991c6ea0bda Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 11 Nov 2022 20:19:22 +0100 Subject: [PATCH 0921/1339] chore(deps): update dependency setuptools to v65.5.1 (#1485) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 909bfbc9f786..8c7e3f941a16 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,7 +6,7 @@ MarkupSafe==2.1.1 protobuf==3.20.3 pypandoc==1.10 PyYAML==6.0 -setuptools==65.5.0 +setuptools==65.5.1 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 pytest-asyncio==0.20.2 \ No newline at end of file From dc8958994fe3774cf1aeddd27021f6dd56988b0c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 11 Nov 2022 15:13:22 -0500 Subject: [PATCH 0922/1339] fix: allow google-cloud-documentai < 3 (#1487) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 6f5c02e11765..527241b82b75 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -35,7 +35,7 @@ dependencies = [ 'grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev', {% endif %} {% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} - 'google-cloud-documentai >= 1.2.1, < 2.0.0dev', + 'google-cloud-documentai >= 1.2.1, < 3.0.0dev', {% endif %} ] url = "https://github.com/googleapis/python-{{ api.naming.warehouse_package_name|replace("google-cloud-", "") }}" @@ -86,4 +86,4 @@ setuptools.setup( include_package_data=True, zip_safe=False, ) -{% endblock %} \ No newline at end of file +{% endblock %} From c7991c79e764893492738ad6465aa001582b0a5d Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 11 Nov 2022 16:01:20 -0500 Subject: [PATCH 0923/1339] chore: fix docs build (#1488) * chore: fix docs build This addresses the error "duplicate object description" seen in the docs build of downstream clients. * regenerate golden files --- .../gapic/templates/docs/%name_%version/types.rst.j2 | 1 - .../tests/integration/goldens/asset/docs/asset_v1/types.rst | 1 - .../goldens/credentials/docs/credentials_v1/types.rst | 1 - .../integration/goldens/eventarc/docs/eventarc_v1/types.rst | 1 - .../tests/integration/goldens/logging/docs/logging_v2/types.rst | 1 - .../tests/integration/goldens/redis/docs/redis_v1/types.rst | 1 - 6 files changed, 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 index 2f453dd8cd4d..a77df003f7bd 100644 --- a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 @@ -3,5 +3,4 @@ Types for {{ api.naming.long_name }} {{ api.naming.version }} API .. automodule:: {{ api.naming.namespace|join('.')|lower }}.{{ api.naming.versioned_module_name }}.types :members: - :undoc-members: :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst index c75a1efdeabc..750d9c160357 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Asset v1 API .. automodule:: google.cloud.asset_v1.types :members: - :undoc-members: :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst index 97befa67ef15..c84bb633650f 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst @@ -3,5 +3,4 @@ Types for Google Iam Credentials v1 API .. automodule:: google.iam.credentials_v1.types :members: - :undoc-members: :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst index 5d1111031f59..5a74926d5bf1 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Eventarc v1 API .. automodule:: google.cloud.eventarc_v1.types :members: - :undoc-members: :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst index 843c0dc370d4..a3ac7064b329 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Logging v2 API .. automodule:: google.cloud.logging_v2.types :members: - :undoc-members: :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst index 38a6d6595f8a..7eb7c77e4ced 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Redis v1 API .. automodule:: google.cloud.redis_v1.types :members: - :undoc-members: :show-inheritance: From ae9a73eadccea9808512edcbbd3a7d194433374e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 11 Nov 2022 17:51:17 -0500 Subject: [PATCH 0924/1339] chore: fix url in templated setup.py (#1486) * chore: fix url in templated setup.py * regenerate golden files --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 527241b82b75..6449154259a4 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -38,7 +38,7 @@ dependencies = [ 'google-cloud-documentai >= 1.2.1, < 3.0.0dev', {% endif %} ] -url = "https://github.com/googleapis/python-{{ api.naming.warehouse_package_name|replace("google-cloud-", "") }}" +url = "https://github.com/googleapis/python-{{ api.naming.warehouse_package_name|replace("google-cloud-", "")|replace("google-", "") }}" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 86455ee808ff..57ac52c86d6a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -40,7 +40,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] -url = "https://github.com/googleapis/python-google-iam-credentials" +url = "https://github.com/googleapis/python-iam-credentials" package_root = os.path.abspath(os.path.dirname(__file__)) From 32c23b37998e1239b8b7e939924e7444780e8b49 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 11 Nov 2022 18:45:31 -0500 Subject: [PATCH 0925/1339] fix: fix typo in testing/constraints-3.7.txt (#1483) * fix: fix typo in testing/constraints-3.7.txt * generate goldens * Remove erroneous comment * generate golden files --- .../gapic/templates/testing/constraints-3.7.txt.j2 | 8 +++----- .../integration/goldens/asset/testing/constraints-3.7.txt | 4 +--- .../goldens/credentials/testing/constraints-3.7.txt | 2 -- .../goldens/eventarc/testing/constraints-3.7.txt | 2 -- .../goldens/logging/testing/constraints-3.7.txt | 2 -- .../integration/goldens/redis/testing/constraints-3.7.txt | 2 -- 6 files changed, 4 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index 8026a1a321cb..a747f71dbb67 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -4,14 +4,12 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -# This file is intentionally left empty to test the -# latest versions of dependencies. google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} -grpc-google-iam-v1=0.12.4 +grpc-google-iam-v1==0.12.4 {% endif %} {% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} -google-cloud-documentai=1.2.1 -{% endif %} \ No newline at end of file +google-cloud-documentai==1.2.1 +{% endif %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index e78e83cfcfd8..883a87aefd06 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -4,9 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -# This file is intentionally left empty to test the -# latest versions of dependencies. google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 -grpc-google-iam-v1=0.12.4 +grpc-google-iam-v1==0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt index aca9f2d36553..6f3158cc2034 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -4,8 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -# This file is intentionally left empty to test the -# latest versions of dependencies. google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index aca9f2d36553..6f3158cc2034 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -4,8 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -# This file is intentionally left empty to test the -# latest versions of dependencies. google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt index aca9f2d36553..6f3158cc2034 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -4,8 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -# This file is intentionally left empty to test the -# latest versions of dependencies. google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt index aca9f2d36553..6f3158cc2034 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -4,8 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -# This file is intentionally left empty to test the -# latest versions of dependencies. google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 From bd57f19c6d033caf368b1c946b88e43e5c404722 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sat, 12 Nov 2022 00:05:58 +0000 Subject: [PATCH 0926/1339] chore(main): release 1.6.1 (#1490) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 87e1a8f5e819..1cb15307fae2 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.6.1](https://github.com/googleapis/gapic-generator-python/compare/v1.6.0...v1.6.1) (2022-11-11) + + +### Bug Fixes + +* Allow google-cloud-documentai < 3 ([#1487](https://github.com/googleapis/gapic-generator-python/issues/1487)) ([b717e92](https://github.com/googleapis/gapic-generator-python/commit/b717e92f8e184edcc6e2ad3b696817435e5e37e5)) +* Fix typo in testing/constraints-3.7.txt ([#1483](https://github.com/googleapis/gapic-generator-python/issues/1483)) ([0ba5bc1](https://github.com/googleapis/gapic-generator-python/commit/0ba5bc16e4ad2197cb7071a9f8c9164b1c6f080d)) + ## [1.6.0](https://github.com/googleapis/gapic-generator-python/compare/v1.5.0...v1.6.0) (2022-11-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a07e30273d70..788891f55884 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.6.0" +version = "1.6.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 1b75666aff7c69bfc2a07fb23154bbde719c870e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 15 Nov 2022 22:51:14 +0100 Subject: [PATCH 0927/1339] chore(deps): update dependency googleapis-common-protos to v1.57.0 (#1493) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8c7e3f941a16..2e478bda01fa 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,6 +1,6 @@ click==8.1.3 google-api-core==2.10.2 -googleapis-common-protos==1.56.4 +googleapis-common-protos==1.57.0 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==3.20.3 From 1d3b4d0b1daff4166b41f801c4d1b8adbaec301e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 15 Nov 2022 22:13:26 +0000 Subject: [PATCH 0928/1339] chore(main): release 1.6.2 (#1494) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1cb15307fae2..121beab74950 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.6.2](https://github.com/googleapis/gapic-generator-python/compare/v1.6.1...v1.6.2) (2022-11-15) + + +### Bug Fixes + +* update dependency googleapis-common-protos to v1.57.0 ([786b0d4](https://github.com/googleapis/gapic-generator-python/commit/786b0d401fe6a188005b4bf076595a37821ee761)) + ## [1.6.1](https://github.com/googleapis/gapic-generator-python/compare/v1.6.0...v1.6.1) (2022-11-11) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 788891f55884..9c0fdd507e8c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.6.1" +version = "1.6.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 91f1884ba931a54ee240403df8080e9f90a44e36 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Wed, 16 Nov 2022 05:31:43 -0800 Subject: [PATCH 0929/1339] fix: snippetgen should call await on the operation coroutine before calling result (#1495) * fix: snippetgen should call await on the operation coroutine before calling result * update golden files --- .../gapic/templates/examples/feature_fragments.j2 | 12 +++++++++++- .../services/asset_service/async_client.py | 4 ++-- ...service_analyze_iam_policy_longrunning_async.py | 2 +- ..._generated_asset_service_export_assets_async.py | 2 +- .../eventarc_v1/services/eventarc/async_client.py | 6 +++--- ...c_v1_generated_eventarc_create_trigger_async.py | 2 +- ...c_v1_generated_eventarc_delete_trigger_async.py | 2 +- ...c_v1_generated_eventarc_update_trigger_async.py | 2 +- .../redis_v1/services/cloud_redis/async_client.py | 14 +++++++------- ..._generated_cloud_redis_create_instance_async.py | 2 +- ..._generated_cloud_redis_delete_instance_async.py | 2 +- ..._generated_cloud_redis_export_instance_async.py | 2 +- ...enerated_cloud_redis_failover_instance_async.py | 2 +- ..._generated_cloud_redis_import_instance_async.py | 2 +- ..._generated_cloud_redis_update_instance_async.py | 2 +- ...generated_cloud_redis_upgrade_instance_async.py | 2 +- ...nerated_snippets_method_lro_signatures_async.py | 2 +- .../tests/unit/samplegen/test_template.py | 2 +- 18 files changed, 37 insertions(+), 27 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index d6d0459d4aad..aa1ae3208b3b 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -242,6 +242,14 @@ client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request {% endif %} {% endmacro %} +{% macro operation_text(transport) %} +{% if transport == "grpc-async" %} +(await operation) +{% else %} +operation +{% endif %} +{% endmacro %} + {# Setting up the method invocation is the responsibility of the caller: #} {# it's just easier to set up client side streaming and other things from outside this macro. #} {% macro render_calling_form(method_invocation_text, calling_form, calling_form_enum, transport, response_statements ) %} @@ -286,7 +294,9 @@ operation = {{ method_invocation_text|trim }} print("Waiting for operation to complete...") -response = {% if transport == "grpc-async" %}await {% endif %}operation.result() +{% with operation_text = operation_text(transport) %} +response = {{ operation_text|trim }}.result() +{% endwith %} # Handle the response {% for statement in response_statements %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 977b03e8876b..53e7e2f127b4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -239,7 +239,7 @@ async def sample_export_assets(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1610,7 +1610,7 @@ async def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py index e1305583a82d..0fbd794418ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -55,7 +55,7 @@ async def sample_analyze_iam_policy_longrunning(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py index f9ca7f174252..36815d3c7c29 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -52,7 +52,7 @@ async def sample_export_assets(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index d799051df108..0b4e1988fdf5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -459,7 +459,7 @@ async def sample_create_trigger(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -593,7 +593,7 @@ async def sample_update_trigger(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -730,7 +730,7 @@ async def sample_delete_trigger(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py index 958bbd175557..711417033982 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py @@ -58,7 +58,7 @@ async def sample_create_trigger(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py index fc2b876000eb..d5063eff520f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py @@ -49,7 +49,7 @@ async def sample_delete_trigger(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py index 9d9b886721bb..a0e2a98f606f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py @@ -48,7 +48,7 @@ async def sample_update_trigger(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 7da01bfa47f1..0cdf3da95d62 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -491,7 +491,7 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -643,7 +643,7 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -778,7 +778,7 @@ async def sample_upgrade_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -917,7 +917,7 @@ async def sample_import_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1052,7 +1052,7 @@ async def sample_export_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1181,7 +1181,7 @@ async def sample_failover_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) @@ -1309,7 +1309,7 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index 2e9194f99935..d8dadeb17b0d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -55,7 +55,7 @@ async def sample_create_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index 38769231234a..5db2cb522ae4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -48,7 +48,7 @@ async def sample_delete_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py index 6770074db514..0a8be9007b5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py @@ -52,7 +52,7 @@ async def sample_export_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py index 10b7b450b933..a92223c78562 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py @@ -48,7 +48,7 @@ async def sample_failover_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py index 36a58d830b48..168b60a07ab8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py @@ -52,7 +52,7 @@ async def sample_import_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index 6d1f1704a7eb..fc085a4783e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -53,7 +53,7 @@ async def sample_update_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py index 9dfd7b08de65..00c64cbebb7a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py @@ -49,7 +49,7 @@ async def sample_upgrade_instance(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py index 588e8a2bc8cb..cc387195b745 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py @@ -56,7 +56,7 @@ async def sample_method_lro_signatures(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print(response) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index 4d1d26c2e804..a5405b18b0f8 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -902,7 +902,7 @@ def test_render_calling_form_longrunning_async(): print("Waiting for operation to complete...") - response = await operation.result() + response = (await operation).result() # Handle the response print("Test print statement") From 488eb1182f2578af43621799ad17ce830b706092 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sun, 27 Nov 2022 01:04:39 +0100 Subject: [PATCH 0930/1339] chore(deps): update dependency setuptools to v65.6.3 (#1497) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2e478bda01fa..7725e71b02d7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,7 +6,7 @@ MarkupSafe==2.1.1 protobuf==3.20.3 pypandoc==1.10 PyYAML==6.0 -setuptools==65.5.1 +setuptools==65.6.3 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 pytest-asyncio==0.20.2 \ No newline at end of file From c6c562688d2e41597a46c8b08c97b6fc920892aa Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Dec 2022 17:29:09 +0100 Subject: [PATCH 0931/1339] chore(deps): update dependency google-api-core to v2.11.0 (#1505) --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 7725e71b02d7..068aa6233547 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ click==8.1.3 -google-api-core==2.10.2 +google-api-core==2.11.0 googleapis-common-protos==1.57.0 jinja2==3.1.2 MarkupSafe==2.1.1 From 7cfcf7466e09ab80d0434d4dc05b2965a9c6e5d6 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Mon, 5 Dec 2022 10:57:34 -0800 Subject: [PATCH 0932/1339] feat: add snippetgen config language and testing resource files (#1504) * feat: add snippetgen config language and testing resource files --- .../gapic/configurable_snippetgen/__init__.py | 13 + .../snippet_config_language.proto | 1110 +++++++++++++++++ .../snippet_config_language_pb2.py | 158 +++ .../resources/README.md | 38 + .../resources/speech/request.desc | Bin 0 -> 213102 bytes .../speech/speech_createCustomClass.json | 179 +++ ...ptation_create_custom_class_basic_async.py | 72 ++ .../configurable_snippetgen/test_resources.py | 49 + 8 files changed, 1619 insertions(+) create mode 100644 packages/gapic-generator/gapic/configurable_snippetgen/__init__.py create mode 100644 packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language.proto create mode 100644 packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.py create mode 100644 packages/gapic-generator/tests/configurable_snippetgen/resources/README.md create mode 100644 packages/gapic-generator/tests/configurable_snippetgen/resources/speech/request.desc create mode 100644 packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_createCustomClass.json create mode 100644 packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py create mode 100644 packages/gapic-generator/tests/configurable_snippetgen/test_resources.py diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/__init__.py b/packages/gapic-generator/gapic/configurable_snippetgen/__init__.py new file mode 100644 index 000000000000..63f163286275 --- /dev/null +++ b/packages/gapic-generator/gapic/configurable_snippetgen/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language.proto b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language.proto new file mode 100644 index 000000000000..d1aecb9b983a --- /dev/null +++ b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language.proto @@ -0,0 +1,1110 @@ +// Copyright 2022 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.tools.snippetgen.configlanguage.v1; + +import "google/protobuf/descriptor.proto"; + +option csharp_namespace = "Google.Cloud.Tools.SnippetGen.ConfigLanguage.V1"; +option php_namespace = "Google\\Cloud\\Tools\\SnippetGen\\ConfigLanguage\\V1"; +option ruby_package = "Google::Cloud::Tools::SnippetGen::ConfigLanguage::V1"; +option java_multiple_files = true; +option java_outer_classname = "SnippetConfigLanguageProto"; +option java_package = "com.google.cloud.tools.snippetgen.configlanguage.v1"; + +// The snippet configuration for a single snippet that will be generated across +// all languages. +message SnippetConfig { + // Metadata for the snippet configuration. Some information contained here + // will be included in the generated snippet own metadata. + SnippetConfigMetadata metadata = 1; + + // The RPC this snippet is for. + Rpc rpc = 2; + + // The generated snippet method signature. + SnippetSignature signature = 3; + + // The actual snippet (code). + Snippet snippet = 4; +} + +// Metadata for the snippet configuration. Some information contained here will +// be included in the generated snippet's own metadata. +message SnippetConfigMetadata { + // Whether this snippet config should be skipped for/ generation. + // This is useful when snippets are developed before features are released. + // Defaults to false. + bool skipped = 1; + + // List of languages to skip snippet generation for. + // Config producers should specify here languages for which some of the + // capabilities required by the snippet are not supported. For instance, if a + // specific client option is required by the snippet, and that option is not + // supported by .NET client libraries, then C_SHARP should be specified here. + repeated GeneratorOutputLanguage skipped_languages = 2; + + // The config id. This will be used to generate the region tag of the snippet. + // Required. + // The region tag format will be: + // [{START|END} + // ${apishortname}_${apiVersion}_config_${ServiceName}_${RpcName}_${config_id}_{sync|async}] + // - config_id must be unique for a given Service/RPC pair. + // - config_id must not contain the API, Service or RPC identifiers as that + // will be automatically included in the region tag. + // - config_id may only contain letters and numbers + // - config_id should be PascalCased + // - Preferable, config_id should not exceed 50 characters, although this is + // not a hard requirement. + // - config_id may be somewhat descriptive of the snippet or just a random + // identifier. If it's descriptive, do not make it overly verbose, there are + // the human readable snippet_name and snippet_description fields for properly + // describing the snippet. For instance, prefer DefaultDatasetCreation to + // DatasetCreationUsingDefaultValuesExceptForDatasetNameWhichIsRequired. + string config_id = 3; + + // The human readable name of the snippet. + // To be included in metadata and in the sample itself in the top-level + // description. + string snippet_name = 4; + + // The description of the snippet. + // To be included in metadata and in the sample itself in the top-level + // description. + string snippet_description = 5; +} + +// A programming language in which snippets are generated. +// Note that this is different from +// google.cloud.tools.snippetgen.snippetindex.v1.Language, i.e. language +// specified in snippet metadata, as metadata can be written for both generated +// and handwritten snippets. In turn, we'll always know which generators we are +// writing snippet configs for and which are the output languages of those +// generators. +enum GeneratorOutputLanguage { + // The language has not been specified. Consumers should not see this value. + GENERATOR_OUTPUT_LANGUAGE_UNSPECIFIED = 0; + C_PLUS_PLUS = 1; + C_SHARP = 2; + GO = 3; + JAVA = 4; + JAVASCRIPT = 5; + PHP = 6; + PYTHON = 7; + RUBY = 8; +} + +// An RPC for which a Snippet may be defined. +message Rpc { + // This is identical to the protobuf package ending with a version number, + // after removing said version number. For instance, where the api ID is + // "google.cloud.translate.v3" the API name is "google.cloud.translate". + string proto_package = 1; + + // The list of API versions for which one snippet is defined. + // The same RPC may exist in different versions (v1, v1beta, v2) of the API + // and may be covered by the same snippet config. + repeated string api_version = 2; + + // The protobuf service name relative to the api name + version where the RPC + // is defined. Example: "TransalationService". + string service_name = 3; + + // The RPC name relative to the service name. + string rpc_name = 4; +} + +message SnippetSignature { + // The name for the snippet method specified in snake_case. Required. + string snippet_method_name = 1; + + // The return type for the snippet method. Unset for methods that do not + // return a value or if the sample is to be generated for non statically-typed + // languages only. + Type return_type = 2; + + // Synchronous preference indicator for the generated snippet. + SyncPreference sync_preference = 3; + + // The list of parameters that the snippet will receive. May be empty if the + // snippet method does not receive parameters. If any, parameters should + // appear in generated code in the same order as they appear in this field. + // Note also that if parameters have assignments, some languages + // will represent these as default parameter values which are + // usually scalars. + repeated Statement.Declaration parameters = 4; + + // Synchronous preference indicator for the generated snippet. + // Note that some languages only support one-off sync or async methods so this + // is just a preference indicator. + enum SyncPreference { + // Generate an async, sync or both snippets as per language + // (style, guidelines, community) preference. + LANGUAGE_PREFERRED = 0; + + // Languages supporting async methods should generate an async snippet. + // Languages that do not support async methods will generate a sync snippet + // instead. + PREFER_ASYNC = 1; + + // Languages supporting sync methods should generate a sync snippet. + // Languages that do not support sync methods will generate an async snippet + // instead. + PREFER_SYNC = 2; + + // Languages that support both async and sync methods should generate both a + // sync and an async snippets. Note that different samples should be in + // differnt files. + // Languages that support only one of async and sync methods should + // generate the supported snippet. + BOTH = 3; + } +} + +// The actual snippet (code), including client and request initialization, +// client call and response handling. +message Snippet { + // Service client initialization. Optional. If unset language-specific + // defaults will be applied. + ClientInitialization service_client_initialization = 1; + + // Call configuration including request initialization and response handling. + // Required. + oneof call { + // A standard RPC operation. + Standard standard = 2; + + // A paginated RPC operation. + Paginated paginated = 3; + + // A long-running RPC operation. + Lro lro = 4; + + // A client streaming RPC operation. + ClientStreaming client_streaming = 5; + + // A server streaming RPC operation. + ServerStreaming server_streaming = 6; + + // A bidirectional streaming RPC operation. + BidiStreaming bidi_streaming = 7; + } + // Statements to be executed before the snippet ends. For instance, some + // Statement.StandardOutput statements and the Statement.Return statement. + // May be empty. If any, statements should appear on generated code in the + // same order as they appear on this field. + repeated Statement final_statements = 8; + + // Options to initialize the client with. + message ClientInitialization { + // Statements to be executed before the service client is initialized. For + // instance, some Statement.Declaration statements of variables to be used + // in service client initialization. May be empty. If any, statements will + // be executed in the same order as they appear on + // pre_request_initialization. + repeated Statement pre_client_initialization = 1; + + // Custom endpoint to use in client initialization. Optional. + ServiceEndpoint custom_service_endpoint = 2; + + // Different languages have different requirements for how a service + // endpoint should be specified, so in sample configuration the endpoint + // needs to be specified in parts that each language generator can then use + // to compose the string literal to include in the sample according to their + // own requirements. None of this manipulation should be included in the + // sample itself, where only a string literal is expected as the value for + // the custom endpoint. + message ServiceEndpoint { + // The schema for the service endpoint. + ServiceEndpointSchema schema = 1; + + // The unregionalized host for the service endpoint. + // For instance "pubsub.googleapis.com". Required. + string host = 2; + + // The region if this is a regional endpoint. + // For instance "us-east1". Optional. + // If present the regional host should be constructed as follows: + // {region}-{host}. + string region = 3; + + // The port for the service endpoint. Optional. + // Languages that require a port to be specified should use the same + // default value as they do in library generation. + int32 port = 4; + + // Schemas for the service endpoint. + enum ServiceEndpointSchema { + // Each language to use their own default. + // Languages that do not require the schema to be specified, + // may omit it when building the custom endpoint. + // Languages that require the schema to be specified should use the + // same default value as they do in library generation. + LANGUAGE_DEFAULT = 0; + + // Use HTTPS for service endpoint schema. + HTTPS = 1; + + // Use HTTP for service endpoint schema. + HTTP = 2; + } + } + } + + // A standard RPC operation. + message Standard { + // Describes how to initialize the request object. Required. + SimpleRequestInitialization request_initialization = 1; + + // Client call configuration. Optional. + ClientCall call = 2; + + // Describes how to handle simple responses. Optional. + SimpleResponseHandling response_handling = 3; + } + + // A paginated RPC operation. + message Paginated { + // Describes how to initialize the request object. This is the request + // object used for the initial paginated RPC call. Depending on how the + // response should be handled subsequent requests will be initialized either + // implicitly by the library or explicitly by the snippet. + SimpleRequestInitialization request_initialization = 1; + + // Client call configuration. Optional. + // This configures the client call, which in turn may result in several RPC + // calls. How this configuration is applied to RPC calls may be language + // specific. + ClientCall paginated_call = 2; + + // Describes how to handle paginated responses. + PaginatedResponseHandling paginated_handling = 3; + } + + // A long-running RPC operation. + message Lro { + // Describes how to initialize the request object. Required. + // This describes the request to the LRO operation itself and not to polling + // operations. + SimpleRequestInitialization request_initialization = 1; + + // Client call configuration. Optional. + // This configures the call to the LRO operation itself and not to polling + // operations. + ClientCall call = 2; + + // Describes how to handle the LRO response. Required. + LroResponseHandling lro_handling = 3; + } + + // A client streaming RPC operation. + message ClientStreaming { + // Client call configuration. Optional. + // This configures the call that initializes the stream. + ClientCall initialization_call = 1; + + // The name of the variable to capture the client stream in, i.e. the object + // in which streaming requests are written. Required. + string client_stream_name = 2; + + // Describes how to initialize the streaming request objects. Required. + StreamingRequestInitialization request_initialization = 3; + + // Describes how to handle the response. Optional. + // Note that the response will be available after all the streaming requests + // have been written, each language should generate code accordingly. + SimpleResponseHandling response_handling = 4; + } + + // A server streaming RPC operation. + message ServerStreaming { + // Describes how to initialize the request object. Required. + SimpleRequestInitialization request_initialization = 1; + + // Client call configuration. Optional. + ClientCall initialization_call = 2; + + // The name of the variable to capture the server stream in, i.e. the object + // from which streaming responses will be read. Optional if the stream is + // not to be read. + string server_stream_name = 3; + + // Describes how to handle the streaming responses. Optional if the stream + // is not to be read. + StreamingResponseHandling response_handling = 4; + } + + // A bidirectional streaming RPC operation. + message BidiStreaming { + // Client call configuration. Optional. + // This configures the call that initializes de stream. + ClientCall initialization_call = 1; + + // The name of the variable to capture the client stream in, i.e. the object + // in which streaming requests are written. Required. + string client_stream_name = 2; + + // Describes how to initialize the streaming request objects. Required. + StreamingRequestInitialization request_initialization = 3; + + // The name of the variable to capture the server stream in, i.e. the object + // from which streaming responses will be read. Optional if the stream is + // not to be read. + string server_stream_name = 4; + + // Describes how to handle the streaming responses. Optional if the stream + // is not to be read. + StreamingResponseHandling response_handling = 5; + } + + // An actual client service call. + // Note: Just pre_call statements for now, but this message is included so + // that adding per call options later on is not a breaking change. + message ClientCall { + // Statements to be executed before the initial call to the service client + // method is made. Whether that results in an RPC call or not is operation + // type and language dependent. May be empty. + // If any, statements should appear in generated code in the same order as + // they appear on this field. + repeated Statement pre_call = 2; + + // TODO: Add per call options, including retries, etc. + } + + // Describes how to initialize a simple request object. + message SimpleRequestInitialization { + // Statements to be executed before the request object is initialized. For + // instance, some Statement.Declaration statements of variables to be used + // in request initialization. May be empty. If any, statements will be + // executed in the same order as they appear on pre_request_initialization. + repeated Statement pre_request_initialization = 1; + + // The request value. Required. + // Should resolve to a type that is assignable to the request type of the + // RPC. + Expression request_value = 2; + + // The name for the variable that will hold the request object. + // For example "request". Required. + string request_name = 3; + } + + // Describes how to make requests to client streaming RPCs. + // An iteration is defined which makes some per-iteration + // Expression.NameValue availables that may be used to define + // streaming_request. + message StreamingRequestInitialization { + // Describes how to initialize the first streaming request. Optional for + // operations that do not require a specific first request. + SimpleRequestInitialization first_streaming_request = 1; + + // The iteration to use for defining the streaming requests. Required. + Statement.Iteration iteration = 3; + + // The streaming request that may be defined with iteration-specific + // variables, and will result in a sequence of requests. Required. + // Initialization of streaming_request should be placed, in generated code, + // on the inner-most iteration defined by the iteration field as iterations + // may be nested. + SimpleRequestInitialization streaming_request = 4; + } + + // Describes how to handle a simple response object. + message SimpleResponseHandling { + // The name of the variable to capture the response in. May be unset if + // the RPC does not return anything or the response is not to be captured. + string response_name = 1; + } + + // Describes how to handle paginated responses. + message PaginatedResponseHandling { + // The name of the variable to capture the initial client call response in. + // Required. Note that this will capture the object representing the lazy + // item sequence. + string response_name = 1; + + // How to iterate over the items. Optional. If unset, no iterations will be + // performed over the paginated response. + oneof pagination_kind { + // Iterate item by item, lazily and automatically fetching pages as + // needed. + ByItem by_item = 2; + + // Iterate page by page, lazily and automatically fetching pages as + // needed. + ByPage by_page = 3; + + // Iterate page by page, explicitly using the next page token. + // This pagination mode will modify the original request by subsequently + // setting the next page token obtained from the previous response. + NextPageToken next_page_token = 4; + } + + // Iterate item by item, lazily and automatically fetching pages as needed. + message ByItem { + // The name of the variable to capture the current item in. Required. + string item_name = 1; + + // Statements to execute for each item. Optional. + repeated Statement per_item_statements = 2; + } + + // Iterate page by page, lazily and automatically fetching pages as needed. + message ByPage { + // The name of the variable to capture the current page in. Required. + string page_name = 1; + + // Statements to execute for each page. Optional. + repeated Statement per_page_statements = 2; + + // By item iteration configuration within the current page. Optional. + ByItem by_item = 3; + } + + // Iterate page by page, explicitly using the next page token. + // This pagination mode will modify the original request by subsequently + // setting the next page token obtained from the previous response. + message NextPageToken { + // The name of the variable to capture the next page token in. Required. + string next_page_token_name = 1; + + // The explicit and guaranteed page size for fetched pages. Required. + // TODO: Double check that all languages have this modality. + // Otherwise, they may fallback to by page iteration. + Expression explicit_page_size = 2; + + // Configures how to iterate over the explicitly fetched page. + ByPage by_page = 3; + } + } + + // Describes how to handle LRO responses. + message LroResponseHandling { + // The name of the variable to capture the LRO response in. Required. + // This will capture the response to the LRO operaion call and not to + // polling operations. + string response_name = 1; + + // How to perform polling. Required. + PollingType polling_type = 2; + + // The name of the variable to capture the polling response in. Optional if + // the polling result should not be captured. Should be unset if + // PollingType.NONE is specified. + string polling_response_name = 3; + + // Configures the polling call. Optional. Should be unset if + // PollingType.NONE is specified. + ClientCall polling_call = 4; + + enum PollingType { + // Poll until completion. Default value. + UNTIL_COMPLETION = 0; + + // Poll just once. + ONCE = 1; + + // Do not poll. + NONE = 2; + } + } + + // Describes how to handle streaming responses. + message StreamingResponseHandling { + // The name of the variable to capture the current response in the stream. + // Required. + string current_response_name = 1; + + // Statements to execute for each stream response. Optional. + repeated Statement per_stream_response_statements = 2; + } +} + +// A stament that will translate into equivalent language-specific statements. +message Statement { + oneof statement_type { + // A declaration which may be used for parameters or variables. Note that + // the only form of assignment supported is on declaration. + // Note that a declaration has scope depending on whether it's included as + // a top level statement in any of the snippet sections or is a nested + // statement. + Declaration declaration = 1; + + // A statement to write information to sdtout. + StandardOutput standard_output = 2; + + // A return statement. + Return return = 3; + + // A conditional statement. One of two given sets of statements will be + // executed depending on the result of evaluating a given condition. + Conditional conditional = 4; + + // An iteration statement. A given Statement set will be executed + // repeatedly according to the iteration definition. + // Each iteration type will make a per-step Expression.NameValue set + // available that may be used withing the given Statement set. + Iteration iteration = 5; + } + + // A declaration which may be used for parameters or variables. + // Note that the only form of assignment supported is on declaration. + message Declaration { + // The type of this declaration. Required unless the snippet is to be + // generated in non-statically typed languages only. + Type type = 1; + + // The name of the variable or parameter. Required. + string name = 2; + + // The value to assign to the variable or parameter. + // Optional for parameters. + // Required for variables as assignment outside of a declaration is not + // supported. + // Should resolve to a type that is assignable to this Declaration type. + Expression value = 3; + + // An optional description that will be included alongside the declaration + // likely as a code comment. + string description = 4; + } + + // A statement to write information to sdtout. + message StandardOutput { + // The value to write to sdtout. + // Should evaluate to a string value or resolve to a type that all languages + // are able to convert to string without specific code, for instance numeric + // values, and possibly protobuf messages as they may be converted to their + // JSON representation. + Expression value = 2; + } + + // A return statement. + message Return { + // The value to return. Should resolve to a type that is assignable to + // SnippetSignature.return_type. + Expression result = 1; + } + + // A conditional statement. One of two given sets of statements will be + // executed depending on the result of evaluating a given condition. + message Conditional { + // The condition to evaluate. Should evaluate to a bolean value. + Expression condition = 1; + + // The set of statements to execute if condition evaluates to true. + // The statements should be executed in the order that they appear. + repeated Statement on_true = 2; + + // The set of statements to execute if condition evaluates to false. + // The statements should be executed in the order that they appear. + repeated Statement on_false = 3; + } + + // An iteration statement. A given Statement set will be executed + // repeatedly according to the iteration definition. + // Each iteration type will make a per-step Expression.NameValue set + // available that may be used withing the given Statement set. + message Iteration { + oneof iteration_type { + // A numeric sequence iteration. + NumericSequenceIteration numeric_sequence_iteration = 1; + + // An iteration over repeated elements, i.e. an iteration over a list. + RepeatedIteration repeated_iteration = 2; + + // A map iteration. + MapIteration map_iteration = 3; + + // A bytes sequence iteration. + BytesIteration bytes_iteration = 4; + } + + // The set of statements to execute on each step of the iteration. + // The statements should be executed in the order that they appear. + // May be empty as iterations will also be used for streaming request + // initialization or streaming response handling. + repeated Statement statements = 5; + + // Represents a numeric sequence iteration. + // A numeric sequence is defined over which to iterate making the current + // element of the sequence available in a variable. + // It's ultimately the responsability of the user to define a finite + // sequence, although tooling may be provided to help. + message NumericSequenceIteration { + // Where to start the sequence at, ie. the first element of the iteration. + // Required. + // The Statement.Declaration.type should be a numeric type. + // The Statement.Declaration.value is required. + // The Statement.Declaration.name will be the name used to make the + // current element of the iteration available. + Statement.Declaration start_at = 1; + + // The step to advance the sequence with. Required. + oneof step { + // An increment, which may be a positive or negative value. + // Should resolve to a numeric type. + Expression increment = 3; + + // A multiplier, which may be less than or greater than 1. + // Should resolve to a numeric type. + Expression multiplier = 4; + } + + // When to end the sequence. Required. + oneof end { + // When the current value is less than or equal to this value. + // Should resolve to a numeric type. + Expression less_than_or_equal = 7; + + // When the current value is less than this value. + // Should resolve to a numeric type. + Expression less_than = 8; + + // When the current value is greater than or equal to this value. + // Should resolve to a numeric type. + Expression greater_than_or_equal = 9; + + // When the current value is greater than this value. + // Should resolve to a numeric type. + Expression greater_than = 10; + + // After a set number of steps. Must be non-negative. + // Should resolve to an integer type. + Expression total_steps = 11; + } + } + + // Represents an iteration over repeated elements. + // A repeated value is provided over which the iteration will occur making + // the current element of the sequence available in a variable. + message RepeatedIteration { + // The repeated elements to iterate over. Required. + // The Statement.Declaration.type should be Type.RepeatedType. + // The Statement.Declaration.value is required. + // TODO: Consider this to be a oneof Declaration or Expression.NameValue. + Statement.Declaration repeated_elements = 1; + + // The name of the variable that will hold the value of the current + // element on each iteration. For example "item". Required. The type of + // this variable will be the same as that of the elements in + // repeated_elements. + string current_name = 2; + + // TODO: Do we expose the element index as well? Optionally? + } + + // Represents an iteration over a map. + // A map value is provided over which the iteration will occur making + // the current key and element of the map availables in variables. + message MapIteration { + // The map to iterate over. Required. + // The Statement.Declaration.type should be Type.MapType. + // The Statement.Declaration.value is required. + // TODO: Consider this to be a oneof Declaration or Expression.NameValue. + Statement.Declaration map = 1; + + // The name of the variable that will hold the value of the current key + // on each iteration. For example "key". Required. + // The type of this variable will be the same as that of the keys in map. + string current_key_name = 2; + + // The name of the variable that will hold the value associated to the + // current key on each iteration. For example "value". Required. + // The type of this variable will be the same as that of the values in + // map. + string current_value_name = 3; + } + + // Represents an iteration over a byte sequence. + // A byte sequence is provided over which the iteration will occur making + // the current chunk of bytes available in a variable. + message BytesIteration { + // The byte sequence to iterate over. Required. + // The Statement.Declaration.type should be Type.BytesType. + // The Statement.Declaration.value is required. + Statement.Declaration byte_sequence = 1; + + // How to split the byte sequence in chunks to iterate over. + oneof chunk { + // The size of the chuncks to split the byte sequence in. The last chunk + // will be at most chunk_size. Must be positive. + // Should resolve to an integer type. + Expression chunk_size = 2; + + // The total amount of chunks to split the byte sequence into. Note that + // this is only possible when the byte sequence has a known length so it + // might not be possible with certain streams, for instance, with + // network streams. Must be positive. + // Should resolve to an integer type. + Expression total_chunks = 3; + } + // The type in which the chunk will be made available. + Type.BytesType chunk_type = 4; + + // The name of the variable that will hold the value of the current chunk + // on each iteration. For example "chunk". Required. + // Will be of type chunk_type. + string current_name = 5; + } + } +} + +// Represents type of values. To be used, for instance, for +// Statement.Declaration or for specifying the return type of the snippet. +message Type { + oneof type_kind { + // Protobuf scalar types. + ScalarType scalar_type = 1; + + // Protobuf enum types. + EnumType enum_type = 2; + + // The bytes type + BytesType bytes_type = 3; + + // Protobuf message types. + MessageType message_type = 4; + + // The repeated type. + RepeatedType repeated_type = 5; + + // The map type. + MapType map_type = 6; + } + + // Represents protobuf scalar types that should be translated to the usual + // language-specific types. + // https://developers.google.com/protocol-buffers/docs/proto3#scalar + // This could have been FieldDescriptorProto.Type except that proto2 enums + // cannot be used in proto3. Also, a few of the types in + // FieldDescriptorsProto.Type are individually supported in SnippetGen Config. + // Values allowed for this type are: + // - Expression.default_value, + // - Expression.name_value as long as the identifier resolves to a type that + // is assignable to this one. + // - Expression.number_value for the numeric FieldDescriptorProto.Type(s). + // - Expression.boolean_value for FieldDescriptorProto.Type.TYPE_BOOL. + // - Expression.string_value for FieldDescriptorProto.Type.TYPE_STRING. + // - Any other value that resolves to a type that is assignable to this one. + enum ScalarType { + // The scalar type has not been specified. Consumers should not see this + // value. + SCALAR_TYPE_UNDEFINED = 0; + TYPE_DOUBLE = 1; + TYPE_FLOAT = 2; + TYPE_INT64 = 3; + TYPE_UINT64 = 4; + TYPE_INT32 = 5; + TYPE_FIXED64 = 6; + TYPE_FIXED32 = 7; + TYPE_BOOL = 8; + TYPE_STRING = 9; + TYPE_UINT32 = 13; + TYPE_SFIXED32 = 15; + TYPE_SFIXED64 = 16; + TYPE_SINT32 = 17; + TYPE_SINT64 = 18; + } + + // Represents protobuf enum types. These should be known by the + // microgenerators on generation time, so they'll probably be enums defined + // within the API being generated or one of its mixins. + message EnumType { + // The protobuf full enum name, including the protobuf package. + string enum_full_name = 1; + } + + // Represents a bytes type. + message BytesType { + // The language-specific type that this bytes type should be generated as. + LanguageEquivalent language_equivalent = 1; + + // Possible language-specific equivalents to a bytes type. + enum LanguageEquivalent { + // The same type used for representing protobuf bytes fields. + PROTOBUF_BYTES = 0; + + // Language-specific string type, whose value will be understood as a + // base64 string representation of the bytes. + BASE64 = 1; + + // Language-specific byte array type. + BYTE_ARRAY = 2; + + // Language-specific stream type. + STREAM = 3; + } + } + + // Represents protobuf message types. These should be known by the + // microgenerators at generation time, so they'll usually be well known types, + // messaged defined within the API being generated or one of its mixins. + message MessageType { + // The protobuf full message name, including the protobuf package. + string message_full_name = 1; + } + + // Represents a repeated type. + message RepeatedType { + // The type of the elements. + Type element_type = 1; + + // The language-specific type that this repeated type should be generated + // as. + LanguageEquivalent language_equivalent = 2; + + // Possible language-specific equivalents to a repeated type. + enum LanguageEquivalent { + // The same type used for representing protobuf repeated fields. + PROTOBUF_REPEATED = 0; + + // Language-specific array type. + ARRAY = 1; + + // Language-specific list type. + LIST = 2; + } + } + + // Represents a map type. + message MapType { + // The type of the keys. + Type key_type = 1; + + // The type of the values. + Type value_type = 2; + + // The language-specific type that this map type should be generated as. + LanguageEquivalent language_equivalent = 3; + + // Possible language-specific equivalents to a map type. + enum LanguageEquivalent { + // The same type used for representing protobuf map fields. + PROTOBUF_MAP = 0; + + // Language-specific dictionary or map type. + DICTIONARY = 1; + } + } +} + +// An expression to be used, for instance, for parameter, variable and request +// initialization. +// On type inference: +// - When used for parameter or variable initialization, that is, when used in a +// Statement.Declaration, we have Statement.Declaration.Type. +// - When used for request initialization, we know the request type. +// - When used to initialize other elements, like client options, etc. we will +// know which types may be infer on a case by case basis as specified by each +// of these elements documentation. +message Expression { + // The literal or calculated value. + oneof value { + // The null value. Might not be accepted by all types. + NullValue null_value = 1; + + // The default value. + // Note that when a value is used, type is always known/inferred. Supported + // types are currently protobuf scalar types and protobuf message types so + // all types should have a default value. + DefaultValue default_value = 2; + + // A variable or parameter name, and possibly a path within. + NameValue name_value = 3; + + // A number literal. + double number_value = 4; + + // A Boolean literal. + bool boolean_value = 5; + + // A string literal. + string string_value = 6; + + // An enum value. The text is to be the enum value name as defined in + // protobuf. For instance, for a protobuf enum + // `enum SampleEnum { VALUE_0 = 0; VALUE_1 = 1; }` + // the accepted values here would be VALUE_0 and VALUE_1. + string enum_value = 7; + + // A bytes value. This represents initialization of objects from which + // arbitrary byte sequences may be obtained. + BytesValue bytes_value = 8; + + // A complex value. This represents initialization of complex objects, most + // likely of protobuf messages. + ComplexValue complex_value = 9; + + // A list value. This represents initialization of collections, list, arrays + // and similar values. + RepeatedValue list_value = 10; + + // A map value. This represents initialization of maps, dictionaries and + // similar values. + MapValue map_value = 11; + + // A conditional value. This expression has one of two given values + // depending on the result of evaluating a given condition. + ConditionalOperator conditional_value = 12; + + // TODO: We will need a resource name operator, languages that have resource + // name support can translate this operator to their language-specific + // resource name helpers and languages that don't can simply build the + // resource name string. + } + + // An optional description that will be included in the snippet alongside the + // value, likely as a code comment. + string description = 13; + + // The null value. Might not be accepted by all types. + enum NullValue { + // Null value. + NULL_VALUE = 0; + } + + // The default value. + // Note that when a value is used, the type is always known/inferred. + // Supported types are currently protobuf scalar types and protobuf message + // types so all types should have a default value. + // For protobuf message types, the default value should be the empty message. + enum DefaultValue { + // Default value. + DEFAULT_VALUE = 0; + } + + // A variable or parameter name. + message NameValue { + // The name of the variable or parameter name. Required. + string name = 1; + + // A path within name that refers to a nested value. Optional. + // Note that this path must be valid across all languages, so, the following + // rules apply. + // - A path in a NameValue is only allowed if name refers to a value of + // google.cloud.tools.snippetgen.configlanguage.v1.Type.MessageType, i.e. + // a protobuf message. + // - The path segments should be named as they appear on the proto + // definition and not as they are on any specific language. + // For instance, if the following message is defined: + // ``` + // message SampleMessage { + // int one_field = 0; + // SampleMessage nested_field = 1; + // } + // ``` + // and a Declaration of + // - Declaration.type => SampleMessage and + // - Declaration.name => sample_value + // then posible NameValues that refer to the variable declared are: + // - NameValue.name => sample_value and NameValue.path => unset to reference + // the value of the sample_value variable, i.e. using the variable + // sample_value + // - NameValue.name => sample_value and NameValue.path => one_field to + // reference the value of the one_field value of the message instance + // stored in the sample_value variable, i.e. using the variable + // sample_value.one_field. + // - NameValue.sample_value and NameValue.path => nested_field, one_field to + // reference the one_field value of the nested_field value of the message + // instance stored in the sample_value variable, i.e. using the variable + // sample_value.nested_field.one_field. + repeated string path = 2; + } + + // A bytes value. This represents initialization of objects from which + // arbitrary byte sequences may be ontained. + message BytesValue { + oneof value { + // A Base64 encoded string. + // This expression should resolve to a string value. + Expression base64_string = 1; + + // A file stream. + FileStream file_stream = 2; + } + + // A language-specific file stream. + message FileStream { + // The path of the file to build the stream from. + // This expression should evaluate to a string value. + Expression file_path = 1; + } + } + + // A complex value. This represents initialization of complex objects, most + // likely of protobuf messages. + message ComplexValue { + // This is a simple map from message property name to Expression. + // - All keys in the map should correspond to top level properties of the + // protobuf message. + // - Nested properties may be initialized thanks to the recursive nature of + // ComplexValue. + // - Each Expression in the map should resolve to the type of the property + // whose name is the associated key. + map properties = 1; + } + + // A list value. This represens initialization of collections, list, arrays + // and similar values. + message RepeatedValue { + // The values that should be used to initialize a language-specific + // collection, list, array or similar. + // - The values should be used in the same order as they appear in values, + // regardless of whether the target collection type represents an ordered + // collection or not. + // - Each Expression should resolve to a type that is assignable to the type + // of the elements in the target collection. + repeated Expression values = 1; + } + + // A map value. This represents initialization of maps, dictionaries and + // similar values. + // Note that we cannot use a protobuf map for the definition of MapValue + // because protobuf map do not accept message types as the key type. + message MapValue { + // The keys to use for initializing a language-specific map, dictionary or + // similar. + // - Each key Expression should resolve to a type that is assignable to + // the key type of the target map. + repeated Expression keys = 1; + + // The values to use for initializing a language-specific map, dictionary or + // similar. + // - Each value Expression should resolve to a type that is assignable to + // the value type of the target map. + // - Each value should be present in the same order as the corresponding key + // is in keys. + repeated Expression values = 2; + } + + // A conditional value. This expression has one of two given values + // depending on the result of evaluating a given condition. + message ConditionalOperator { + // The condition to evaluate. Should resolve to a Boolean value. + Expression condition = 1; + + // The value of this expression if condition evaluates to true. on_true + // should resolve to a type that is assignable to the target type of this + // expression. + Expression on_true = 2; + + // The value of this expression if condition evaluates to false. on_false + // should resolve to a type that is assignable to the target type of this + // expression. + Expression on_false = 3; + } +} diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.py b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.py new file mode 100644 index 000000000000..938060d5eaf5 --- /dev/null +++ b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: snippet_config_language.proto +# type: ignore +"""Generated protocol buffer code.""" +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x1dsnippet_config_language.proto\x12/google.cloud.tools.snippetgen.configlanguage.v1\x1a google/protobuf/descriptor.proto\"\xcd\x02\n\rSnippetConfig\x12X\n\x08metadata\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.SnippetConfigMetadata\x12\x41\n\x03rpc\x18\x02 \x01(\x0b\x32\x34.google.cloud.tools.snippetgen.configlanguage.v1.Rpc\x12T\n\tsignature\x18\x03 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.SnippetSignature\x12I\n\x07snippet\x18\x04 \x01(\x0b\x32\x38.google.cloud.tools.snippetgen.configlanguage.v1.Snippet\"\xd3\x01\n\x15SnippetConfigMetadata\x12\x0f\n\x07skipped\x18\x01 \x01(\x08\x12\x63\n\x11skipped_languages\x18\x02 \x03(\x0e\x32H.google.cloud.tools.snippetgen.configlanguage.v1.GeneratorOutputLanguage\x12\x11\n\tconfig_id\x18\x03 \x01(\t\x12\x14\n\x0csnippet_name\x18\x04 \x01(\t\x12\x1b\n\x13snippet_description\x18\x05 \x01(\t\"Y\n\x03Rpc\x12\x15\n\rproto_package\x18\x01 \x01(\t\x12\x13\n\x0b\x61pi_version\x18\x02 \x03(\t\x12\x14\n\x0cservice_name\x18\x03 \x01(\t\x12\x10\n\x08rpc_name\x18\x04 \x01(\t\"\x99\x03\n\x10SnippetSignature\x12\x1b\n\x13snippet_method_name\x18\x01 \x01(\t\x12J\n\x0breturn_type\x18\x02 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12i\n\x0fsync_preference\x18\x03 \x01(\x0e\x32P.google.cloud.tools.snippetgen.configlanguage.v1.SnippetSignature.SyncPreference\x12Z\n\nparameters\x18\x04 \x03(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\"U\n\x0eSyncPreference\x12\x16\n\x12LANGUAGE_PREFERRED\x10\x00\x12\x10\n\x0cPREFER_ASYNC\x10\x01\x12\x0f\n\x0bPREFER_SYNC\x10\x02\x12\x08\n\x04\x42OTH\x10\x03\"\x91,\n\x07Snippet\x12t\n\x1dservice_client_initialization\x18\x01 \x01(\x0b\x32M.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization\x12U\n\x08standard\x18\x02 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StandardH\x00\x12W\n\tpaginated\x18\x03 \x01(\x0b\x32\x42.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedH\x00\x12K\n\x03lro\x18\x04 \x01(\x0b\x32<.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroH\x00\x12\x64\n\x10\x63lient_streaming\x18\x05 \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientStreamingH\x00\x12\x64\n\x10server_streaming\x18\x06 \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ServerStreamingH\x00\x12`\n\x0e\x62idi_streaming\x18\x07 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.BidiStreamingH\x00\x12T\n\x10\x66inal_statements\x18\x08 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xff\x03\n\x14\x43lientInitialization\x12]\n\x19pre_client_initialization\x18\x01 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12~\n\x17\x63ustom_service_endpoint\x18\x02 \x01(\x0b\x32].google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization.ServiceEndpoint\x1a\x87\x02\n\x0fServiceEndpoint\x12\x83\x01\n\x06schema\x18\x01 \x01(\x0e\x32s.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0c\n\x04port\x18\x04 \x01(\x05\"B\n\x15ServiceEndpointSchema\x12\x14\n\x10LANGUAGE_DEFAULT\x10\x00\x12\t\n\x05HTTPS\x10\x01\x12\x08\n\x04HTTP\x10\x02\x1a\xbf\x02\n\x08Standard\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12Q\n\x04\x63\x61ll\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12j\n\x11response_handling\x18\x03 \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleResponseHandling\x1a\xce\x02\n\tPaginated\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12[\n\x0epaginated_call\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12n\n\x12paginated_handling\x18\x03 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling\x1a\xb2\x02\n\x03Lro\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12Q\n\x04\x63\x61ll\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x62\n\x0clro_handling\x18\x03 \x01(\x0b\x32L.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroResponseHandling\x1a\xf4\x02\n\x0f\x43lientStreaming\x12`\n\x13initialization_call\x18\x01 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12\x63lient_stream_name\x18\x02 \x01(\t\x12w\n\x16request_initialization\x18\x03 \x01(\x0b\x32W.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingRequestInitialization\x12j\n\x11response_handling\x18\x04 \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleResponseHandling\x1a\xf4\x02\n\x0fServerStreaming\x12t\n\x16request_initialization\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12`\n\x13initialization_call\x18\x02 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12server_stream_name\x18\x03 \x01(\t\x12m\n\x11response_handling\x18\x04 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingResponseHandling\x1a\x91\x03\n\rBidiStreaming\x12`\n\x13initialization_call\x18\x01 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\x12\x1a\n\x12\x63lient_stream_name\x18\x02 \x01(\t\x12w\n\x16request_initialization\x18\x03 \x01(\x0b\x32W.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingRequestInitialization\x12\x1a\n\x12server_stream_name\x18\x04 \x01(\t\x12m\n\x11response_handling\x18\x05 \x01(\x0b\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.StreamingResponseHandling\x1aZ\n\nClientCall\x12L\n\x08pre_call\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xe7\x01\n\x1bSimpleRequestInitialization\x12^\n\x1apre_request_initialization\x18\x01 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12R\n\rrequest_value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12\x14\n\x0crequest_name\x18\x03 \x01(\t\x1a\xe1\x02\n\x1eStreamingRequestInitialization\x12u\n\x17\x66irst_streaming_request\x18\x01 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x12W\n\titeration\x18\x03 \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration\x12o\n\x11streaming_request\x18\x04 \x01(\x0b\x32T.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.SimpleRequestInitialization\x1a/\n\x16SimpleResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x1a\xec\x07\n\x19PaginatedResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x12l\n\x07\x62y_item\x18\x02 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByItemH\x00\x12l\n\x07\x62y_page\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByPageH\x00\x12{\n\x0fnext_page_token\x18\x04 \x01(\x0b\x32`.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.NextPageTokenH\x00\x1at\n\x06\x42yItem\x12\x11\n\titem_name\x18\x01 \x01(\t\x12W\n\x13per_item_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xe0\x01\n\x06\x42yPage\x12\x11\n\tpage_name\x18\x01 \x01(\t\x12W\n\x13per_page_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12j\n\x07\x62y_item\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByItem\x1a\xf2\x01\n\rNextPageToken\x12\x1c\n\x14next_page_token_name\x18\x01 \x01(\t\x12W\n\x12\x65xplicit_page_size\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12j\n\x07\x62y_page\x18\x03 \x01(\x0b\x32Y.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.PaginatedResponseHandling.ByPageB\x11\n\x0fpagination_kind\x1a\xcf\x02\n\x13LroResponseHandling\x12\x15\n\rresponse_name\x18\x01 \x01(\t\x12n\n\x0cpolling_type\x18\x02 \x01(\x0e\x32X.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.LroResponseHandling.PollingType\x12\x1d\n\x15polling_response_name\x18\x03 \x01(\t\x12Y\n\x0cpolling_call\x18\x04 \x01(\x0b\x32\x43.google.cloud.tools.snippetgen.configlanguage.v1.Snippet.ClientCall\"7\n\x0bPollingType\x12\x14\n\x10UNTIL_COMPLETION\x10\x00\x12\x08\n\x04ONCE\x10\x01\x12\x08\n\x04NONE\x10\x02\x1a\x9e\x01\n\x19StreamingResponseHandling\x12\x1d\n\x15\x63urrent_response_name\x18\x01 \x01(\t\x12\x62\n\x1eper_stream_response_statements\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.StatementB\x06\n\x04\x63\x61ll\"\xc5\x18\n\tStatement\x12]\n\x0b\x64\x65\x63laration\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.DeclarationH\x00\x12\x64\n\x0fstandard_output\x18\x02 \x01(\x0b\x32I.google.cloud.tools.snippetgen.configlanguage.v1.Statement.StandardOutputH\x00\x12S\n\x06return\x18\x03 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Statement.ReturnH\x00\x12]\n\x0b\x63onditional\x18\x04 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.ConditionalH\x00\x12Y\n\titeration\x18\x05 \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Statement.IterationH\x00\x1a\xc1\x01\n\x0b\x44\x65\x63laration\x12\x43\n\x04type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12\x0c\n\x04name\x18\x02 \x01(\t\x12J\n\x05value\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x1a\\\n\x0eStandardOutput\x12J\n\x05value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1aU\n\x06Return\x12K\n\x06result\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\xf8\x01\n\x0b\x43onditional\x12N\n\tcondition\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12K\n\x07on_true\x18\x02 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x12L\n\x08on_false\x18\x03 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xdd\x0f\n\tIteration\x12\x83\x01\n\x1anumeric_sequence_iteration\x18\x01 \x01(\x0b\x32].google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.NumericSequenceIterationH\x00\x12t\n\x12repeated_iteration\x18\x02 \x01(\x0b\x32V.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.RepeatedIterationH\x00\x12j\n\rmap_iteration\x18\x03 \x01(\x0b\x32Q.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.MapIterationH\x00\x12n\n\x0f\x62ytes_iteration\x18\x04 \x01(\x0b\x32S.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Iteration.BytesIterationH\x00\x12N\n\nstatements\x18\x05 \x03(\x0b\x32:.google.cloud.tools.snippetgen.configlanguage.v1.Statement\x1a\xdc\x05\n\x18NumericSequenceIteration\x12X\n\x08start_at\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12P\n\tincrement\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12Q\n\nmultiplier\x18\x04 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12Y\n\x12less_than_or_equal\x18\x07 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12P\n\tless_than\x18\x08 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12\\\n\x15greater_than_or_equal\x18\t \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12S\n\x0cgreater_than\x18\n \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x12R\n\x0btotal_steps\x18\x0b \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x01\x42\x06\n\x04stepB\x05\n\x03\x65nd\x1a\x8c\x01\n\x11RepeatedIteration\x12\x61\n\x11repeated_elements\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12\x14\n\x0c\x63urrent_name\x18\x02 \x01(\t\x1a\x99\x01\n\x0cMapIteration\x12S\n\x03map\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12\x18\n\x10\x63urrent_key_name\x18\x02 \x01(\t\x12\x1a\n\x12\x63urrent_value_name\x18\x03 \x01(\t\x1a\x8b\x03\n\x0e\x42ytesIteration\x12]\n\rbyte_sequence\x18\x01 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Statement.Declaration\x12Q\n\nchunk_size\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12S\n\x0ctotal_chunks\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12S\n\nchunk_type\x18\x04 \x01(\x0b\x32?.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesType\x12\x14\n\x0c\x63urrent_name\x18\x05 \x01(\tB\x07\n\x05\x63hunkB\x10\n\x0eiteration_typeB\x10\n\x0estatement_type\"\xb6\r\n\x04Type\x12W\n\x0bscalar_type\x18\x01 \x01(\x0e\x32@.google.cloud.tools.snippetgen.configlanguage.v1.Type.ScalarTypeH\x00\x12S\n\tenum_type\x18\x02 \x01(\x0b\x32>.google.cloud.tools.snippetgen.configlanguage.v1.Type.EnumTypeH\x00\x12U\n\nbytes_type\x18\x03 \x01(\x0b\x32?.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesTypeH\x00\x12Y\n\x0cmessage_type\x18\x04 \x01(\x0b\x32\x41.google.cloud.tools.snippetgen.configlanguage.v1.Type.MessageTypeH\x00\x12[\n\rrepeated_type\x18\x05 \x01(\x0b\x32\x42.google.cloud.tools.snippetgen.configlanguage.v1.Type.RepeatedTypeH\x00\x12Q\n\x08map_type\x18\x06 \x01(\x0b\x32=.google.cloud.tools.snippetgen.configlanguage.v1.Type.MapTypeH\x00\x1a\"\n\x08\x45numType\x12\x16\n\x0e\x65num_full_name\x18\x01 \x01(\t\x1a\xce\x01\n\tBytesType\x12o\n\x13language_equivalent\x18\x01 \x01(\x0e\x32R.google.cloud.tools.snippetgen.configlanguage.v1.Type.BytesType.LanguageEquivalent\"P\n\x12LanguageEquivalent\x12\x12\n\x0ePROTOBUF_BYTES\x10\x00\x12\n\n\x06\x42\x41SE64\x10\x01\x12\x0e\n\nBYTE_ARRAY\x10\x02\x12\n\n\x06STREAM\x10\x03\x1a(\n\x0bMessageType\x12\x19\n\x11message_full_name\x18\x01 \x01(\t\x1a\x91\x02\n\x0cRepeatedType\x12K\n\x0c\x65lement_type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12r\n\x13language_equivalent\x18\x02 \x01(\x0e\x32U.google.cloud.tools.snippetgen.configlanguage.v1.Type.RepeatedType.LanguageEquivalent\"@\n\x12LanguageEquivalent\x12\x15\n\x11PROTOBUF_REPEATED\x10\x00\x12\t\n\x05\x41RRAY\x10\x01\x12\x08\n\x04LIST\x10\x02\x1a\xc4\x02\n\x07MapType\x12G\n\x08key_type\x18\x01 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12I\n\nvalue_type\x18\x02 \x01(\x0b\x32\x35.google.cloud.tools.snippetgen.configlanguage.v1.Type\x12m\n\x13language_equivalent\x18\x03 \x01(\x0e\x32P.google.cloud.tools.snippetgen.configlanguage.v1.Type.MapType.LanguageEquivalent\"6\n\x12LanguageEquivalent\x12\x10\n\x0cPROTOBUF_MAP\x10\x00\x12\x0e\n\nDICTIONARY\x10\x01\"\x96\x02\n\nScalarType\x12\x19\n\x15SCALAR_TYPE_UNDEFINED\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\x42\x0b\n\ttype_kind\"\xa8\x10\n\nExpression\x12[\n\nnull_value\x18\x01 \x01(\x0e\x32\x45.google.cloud.tools.snippetgen.configlanguage.v1.Expression.NullValueH\x00\x12\x61\n\rdefault_value\x18\x02 \x01(\x0e\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Expression.DefaultValueH\x00\x12[\n\nname_value\x18\x03 \x01(\x0b\x32\x45.google.cloud.tools.snippetgen.configlanguage.v1.Expression.NameValueH\x00\x12\x16\n\x0cnumber_value\x18\x04 \x01(\x01H\x00\x12\x17\n\rboolean_value\x18\x05 \x01(\x08H\x00\x12\x16\n\x0cstring_value\x18\x06 \x01(\tH\x00\x12\x14\n\nenum_value\x18\x07 \x01(\tH\x00\x12]\n\x0b\x62ytes_value\x18\x08 \x01(\x0b\x32\x46.google.cloud.tools.snippetgen.configlanguage.v1.Expression.BytesValueH\x00\x12\x61\n\rcomplex_value\x18\t \x01(\x0b\x32H.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ComplexValueH\x00\x12_\n\nlist_value\x18\n \x01(\x0b\x32I.google.cloud.tools.snippetgen.configlanguage.v1.Expression.RepeatedValueH\x00\x12Y\n\tmap_value\x18\x0b \x01(\x0b\x32\x44.google.cloud.tools.snippetgen.configlanguage.v1.Expression.MapValueH\x00\x12l\n\x11\x63onditional_value\x18\x0c \x01(\x0b\x32O.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ConditionalOperatorH\x00\x12\x13\n\x0b\x64\x65scription\x18\r \x01(\t\x1a\'\n\tNameValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x03(\t\x1a\xb3\x02\n\nBytesValue\x12T\n\rbase64_string\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionH\x00\x12h\n\x0b\x66ile_stream\x18\x02 \x01(\x0b\x32Q.google.cloud.tools.snippetgen.configlanguage.v1.Expression.BytesValue.FileStreamH\x00\x1a\\\n\nFileStream\x12N\n\tfile_path\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.ExpressionB\x07\n\x05value\x1a\xec\x01\n\x0c\x43omplexValue\x12l\n\nproperties\x18\x01 \x03(\x0b\x32X.google.cloud.tools.snippetgen.configlanguage.v1.Expression.ComplexValue.PropertiesEntry\x1an\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12J\n\x05value\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression:\x02\x38\x01\x1a\\\n\rRepeatedValue\x12K\n\x06values\x18\x01 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\xa2\x01\n\x08MapValue\x12I\n\x04keys\x18\x01 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12K\n\x06values\x18\x02 \x03(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x1a\x82\x02\n\x13\x43onditionalOperator\x12N\n\tcondition\x18\x01 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12L\n\x07on_true\x18\x02 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\x12M\n\x08on_false\x18\x03 \x01(\x0b\x32;.google.cloud.tools.snippetgen.configlanguage.v1.Expression\"\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\"!\n\x0c\x44\x65\x66\x61ultValue\x12\x11\n\rDEFAULT_VALUE\x10\x00\x42\x07\n\x05value*\xa3\x01\n\x17GeneratorOutputLanguage\x12)\n%GENERATOR_OUTPUT_LANGUAGE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43_PLUS_PLUS\x10\x01\x12\x0b\n\x07\x43_SHARP\x10\x02\x12\x06\n\x02GO\x10\x03\x12\x08\n\x04JAVA\x10\x04\x12\x0e\n\nJAVASCRIPT\x10\x05\x12\x07\n\x03PHP\x10\x06\x12\n\n\x06PYTHON\x10\x07\x12\x08\n\x04RUBY\x10\x08\x42\xee\x01\n3com.google.cloud.tools.snippetgen.configlanguage.v1B\x1aSnippetConfigLanguageProtoP\x01\xaa\x02/Google.Cloud.Tools.SnippetGen.ConfigLanguage.V1\xca\x02/Google\\Cloud\\Tools\\SnippetGen\\ConfigLanguage\\V1\xea\x02\x34Google::Cloud::Tools::SnippetGen::ConfigLanguage::V1b\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, 'snippet_config_language_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b'\n3com.google.cloud.tools.snippetgen.configlanguage.v1B\032SnippetConfigLanguageProtoP\001\252\002/Google.Cloud.Tools.SnippetGen.ConfigLanguage.V1\312\002/Google\\Cloud\\Tools\\SnippetGen\\ConfigLanguage\\V1\352\0024Google::Cloud::Tools::SnippetGen::ConfigLanguage::V1' + _EXPRESSION_COMPLEXVALUE_PROPERTIESENTRY._options = None + _EXPRESSION_COMPLEXVALUE_PROPERTIESENTRY._serialized_options = b'8\001' + _GENERATOROUTPUTLANGUAGE._serialized_start = 13778 + _GENERATOROUTPUTLANGUAGE._serialized_end = 13941 + _SNIPPETCONFIG._serialized_start = 117 + _SNIPPETCONFIG._serialized_end = 450 + _SNIPPETCONFIGMETADATA._serialized_start = 453 + _SNIPPETCONFIGMETADATA._serialized_end = 664 + _RPC._serialized_start = 666 + _RPC._serialized_end = 755 + _SNIPPETSIGNATURE._serialized_start = 758 + _SNIPPETSIGNATURE._serialized_end = 1167 + _SNIPPETSIGNATURE_SYNCPREFERENCE._serialized_start = 1082 + _SNIPPETSIGNATURE_SYNCPREFERENCE._serialized_end = 1167 + _SNIPPET._serialized_start = 1170 + _SNIPPET._serialized_end = 6819 + _SNIPPET_CLIENTINITIALIZATION._serialized_start = 1941 + _SNIPPET_CLIENTINITIALIZATION._serialized_end = 2452 + _SNIPPET_CLIENTINITIALIZATION_SERVICEENDPOINT._serialized_start = 2189 + _SNIPPET_CLIENTINITIALIZATION_SERVICEENDPOINT._serialized_end = 2452 + _SNIPPET_CLIENTINITIALIZATION_SERVICEENDPOINT_SERVICEENDPOINTSCHEMA._serialized_start = 2386 + _SNIPPET_CLIENTINITIALIZATION_SERVICEENDPOINT_SERVICEENDPOINTSCHEMA._serialized_end = 2452 + _SNIPPET_STANDARD._serialized_start = 2455 + _SNIPPET_STANDARD._serialized_end = 2774 + _SNIPPET_PAGINATED._serialized_start = 2777 + _SNIPPET_PAGINATED._serialized_end = 3111 + _SNIPPET_LRO._serialized_start = 3114 + _SNIPPET_LRO._serialized_end = 3420 + _SNIPPET_CLIENTSTREAMING._serialized_start = 3423 + _SNIPPET_CLIENTSTREAMING._serialized_end = 3795 + _SNIPPET_SERVERSTREAMING._serialized_start = 3798 + _SNIPPET_SERVERSTREAMING._serialized_end = 4170 + _SNIPPET_BIDISTREAMING._serialized_start = 4173 + _SNIPPET_BIDISTREAMING._serialized_end = 4574 + _SNIPPET_CLIENTCALL._serialized_start = 4576 + _SNIPPET_CLIENTCALL._serialized_end = 4666 + _SNIPPET_SIMPLEREQUESTINITIALIZATION._serialized_start = 4669 + _SNIPPET_SIMPLEREQUESTINITIALIZATION._serialized_end = 4900 + _SNIPPET_STREAMINGREQUESTINITIALIZATION._serialized_start = 4903 + _SNIPPET_STREAMINGREQUESTINITIALIZATION._serialized_end = 5256 + _SNIPPET_SIMPLERESPONSEHANDLING._serialized_start = 5258 + _SNIPPET_SIMPLERESPONSEHANDLING._serialized_end = 5305 + _SNIPPET_PAGINATEDRESPONSEHANDLING._serialized_start = 5308 + _SNIPPET_PAGINATEDRESPONSEHANDLING._serialized_end = 6312 + _SNIPPET_PAGINATEDRESPONSEHANDLING_BYITEM._serialized_start = 5705 + _SNIPPET_PAGINATEDRESPONSEHANDLING_BYITEM._serialized_end = 5821 + _SNIPPET_PAGINATEDRESPONSEHANDLING_BYPAGE._serialized_start = 5824 + _SNIPPET_PAGINATEDRESPONSEHANDLING_BYPAGE._serialized_end = 6048 + _SNIPPET_PAGINATEDRESPONSEHANDLING_NEXTPAGETOKEN._serialized_start = 6051 + _SNIPPET_PAGINATEDRESPONSEHANDLING_NEXTPAGETOKEN._serialized_end = 6293 + _SNIPPET_LRORESPONSEHANDLING._serialized_start = 6315 + _SNIPPET_LRORESPONSEHANDLING._serialized_end = 6650 + _SNIPPET_LRORESPONSEHANDLING_POLLINGTYPE._serialized_start = 6595 + _SNIPPET_LRORESPONSEHANDLING_POLLINGTYPE._serialized_end = 6650 + _SNIPPET_STREAMINGRESPONSEHANDLING._serialized_start = 6653 + _SNIPPET_STREAMINGRESPONSEHANDLING._serialized_end = 6811 + _STATEMENT._serialized_start = 6822 + _STATEMENT._serialized_end = 9963 + _STATEMENT_DECLARATION._serialized_start = 7304 + _STATEMENT_DECLARATION._serialized_end = 7497 + _STATEMENT_STANDARDOUTPUT._serialized_start = 7499 + _STATEMENT_STANDARDOUTPUT._serialized_end = 7591 + _STATEMENT_RETURN._serialized_start = 7593 + _STATEMENT_RETURN._serialized_end = 7678 + _STATEMENT_CONDITIONAL._serialized_start = 7681 + _STATEMENT_CONDITIONAL._serialized_end = 7929 + _STATEMENT_ITERATION._serialized_start = 7932 + _STATEMENT_ITERATION._serialized_end = 9945 + _STATEMENT_ITERATION_NUMERICSEQUENCEITERATION._serialized_start = 8498 + _STATEMENT_ITERATION_NUMERICSEQUENCEITERATION._serialized_end = 9230 + _STATEMENT_ITERATION_REPEATEDITERATION._serialized_start = 9233 + _STATEMENT_ITERATION_REPEATEDITERATION._serialized_end = 9373 + _STATEMENT_ITERATION_MAPITERATION._serialized_start = 9376 + _STATEMENT_ITERATION_MAPITERATION._serialized_end = 9529 + _STATEMENT_ITERATION_BYTESITERATION._serialized_start = 9532 + _STATEMENT_ITERATION_BYTESITERATION._serialized_end = 9927 + _TYPE._serialized_start = 9966 + _TYPE._serialized_end = 11684 + _TYPE_ENUMTYPE._serialized_start = 10502 + _TYPE_ENUMTYPE._serialized_end = 10536 + _TYPE_BYTESTYPE._serialized_start = 10539 + _TYPE_BYTESTYPE._serialized_end = 10745 + _TYPE_BYTESTYPE_LANGUAGEEQUIVALENT._serialized_start = 10665 + _TYPE_BYTESTYPE_LANGUAGEEQUIVALENT._serialized_end = 10745 + _TYPE_MESSAGETYPE._serialized_start = 10747 + _TYPE_MESSAGETYPE._serialized_end = 10787 + _TYPE_REPEATEDTYPE._serialized_start = 10790 + _TYPE_REPEATEDTYPE._serialized_end = 11063 + _TYPE_REPEATEDTYPE_LANGUAGEEQUIVALENT._serialized_start = 10999 + _TYPE_REPEATEDTYPE_LANGUAGEEQUIVALENT._serialized_end = 11063 + _TYPE_MAPTYPE._serialized_start = 11066 + _TYPE_MAPTYPE._serialized_end = 11390 + _TYPE_MAPTYPE_LANGUAGEEQUIVALENT._serialized_start = 11336 + _TYPE_MAPTYPE_LANGUAGEEQUIVALENT._serialized_end = 11390 + _TYPE_SCALARTYPE._serialized_start = 11393 + _TYPE_SCALARTYPE._serialized_end = 11671 + _EXPRESSION._serialized_start = 11687 + _EXPRESSION._serialized_end = 13775 + _EXPRESSION_NAMEVALUE._serialized_start = 12594 + _EXPRESSION_NAMEVALUE._serialized_end = 12633 + _EXPRESSION_BYTESVALUE._serialized_start = 12636 + _EXPRESSION_BYTESVALUE._serialized_end = 12943 + _EXPRESSION_BYTESVALUE_FILESTREAM._serialized_start = 12842 + _EXPRESSION_BYTESVALUE_FILESTREAM._serialized_end = 12934 + _EXPRESSION_COMPLEXVALUE._serialized_start = 12946 + _EXPRESSION_COMPLEXVALUE._serialized_end = 13182 + _EXPRESSION_COMPLEXVALUE_PROPERTIESENTRY._serialized_start = 13072 + _EXPRESSION_COMPLEXVALUE_PROPERTIESENTRY._serialized_end = 13182 + _EXPRESSION_REPEATEDVALUE._serialized_start = 13184 + _EXPRESSION_REPEATEDVALUE._serialized_end = 13276 + _EXPRESSION_MAPVALUE._serialized_start = 13279 + _EXPRESSION_MAPVALUE._serialized_end = 13441 + _EXPRESSION_CONDITIONALOPERATOR._serialized_start = 13444 + _EXPRESSION_CONDITIONALOPERATOR._serialized_end = 13702 + _EXPRESSION_NULLVALUE._serialized_start = 13704 + _EXPRESSION_NULLVALUE._serialized_end = 13731 + _EXPRESSION_DEFAULTVALUE._serialized_start = 13733 + _EXPRESSION_DEFAULTVALUE._serialized_end = 13766 +# @@protoc_insertion_point(module_scope) diff --git a/packages/gapic-generator/tests/configurable_snippetgen/resources/README.md b/packages/gapic-generator/tests/configurable_snippetgen/resources/README.md new file mode 100644 index 000000000000..53bd169411f8 --- /dev/null +++ b/packages/gapic-generator/tests/configurable_snippetgen/resources/README.md @@ -0,0 +1,38 @@ +# Resources for testing Configurable SnippetGen + +Each subdirectory should correspond to an API and contain three types of files. For example: + +``` +. +├── README.md +└── speech + ├── request.desc + ├── speech_createCustomClass.json + └── speech_v1_generated_adaptation_create_custom_class_basic_async.py +``` + +### `request.desc`: + +This is a copy of the CodeGeneratorRequest message used by the GAPIC generator. To generate it: + +1. Install `protoc`. +1. Install [gapic-generator-python](https://github.com/googleapis/gapic-generator-python). +1. Run the following command from the root of a local copy of [googleapis](https://github.com/googleapis/googleapis): + + ``` + API=speech + VERSION=v1 + + protoc google/cloud/$API/$VERSION/*.proto \ + --experimental_allow_proto3_optional \ + --proto_path=. \ + --dump_out=. + ``` + +### Snippet config files + +Handwritten json file containing the configuration of a code snippet. Each config file typically represents both `sync` and `async` Python snippets, and could specify more than one API version. + +### Golden files + +One or more expected snippet for each config file. diff --git a/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/request.desc b/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/request.desc new file mode 100644 index 0000000000000000000000000000000000000000..44518c7599624663bc3ca2c7cefc81d461cc5212 GIT binary patch literal 213102 zcmd?S4|H8uedp_Y?!B^nF34oX67$ChI!5O5kYvaL7@{x^~g4ZL7qN!PLx zSy!Vg#W95Gv=pYm>g1IH!qAq?OKJJjH8$m%vf{hX*=O(H{_WrX?ccxs+v%>Ec6(;7wQp*!y*$0I zv(Rcyo!|HHelMT#^3cL!d#Rn?{tT*~X-+p6mYPem?fLN4O_%vOVb_UJ<@&F3j#bzqGXAbsDKZ9^&!frL=Z0ZyLMP`r`6jtFxn$Rd@Cs z*gjO$8B&EYUXFQ{#z=box#hXJOJ}BAQ|;;289-<)KHQo<(|Tl~Io|>19Z6Q%**kWv zRXbv}#zLi$SZDAvN!lBmZVaTooz`4yYN@>l2$h{1#`1Ebkyd9~OV;Lwd)JMrpl1uq zOFOEns%I1$+tb=YyR)>T79Luuu_Nu7rsbv9j(T_=%8e`2`a*MQ>imwL@WP9YJJO!1 z<<3(3!j4{E^c}dajEuwfMsS~ME-ked=jlCE2dLU)d-~Fj^=fa-em8DNH-Vpp_I#&x z#$Il;mmA~5Lwo>6w>77yXEoi;xigcq^V75QGo2lM-(@Zu#RrE%{X-kl^$bWr4c?h< zT?MJ&dx*LL!S-9v_~yK0cg%htdNAEQ)xJ;~^`Q+mV3F^s%FBlXZOpXin)5S5?ZugW zGp%_e?tT6W%IfSB)ivkm+fK%vJ4?U+s#19}sU00YHG1EBI_dgGee=4%NGj>C*V4+y zMs@SL#({rb%MP~}E-lW^oL|Zg+`9ky*-=|y*|B4X(=}%iY<4~a2Mcqv&H1TTc5!y;y!8^=7)rDILksQ6r6!e|RA0E1wa=BSWz8jk)!%st z=HR}47cX99*$GfX+nf)hv+vmG;gJ*LBYOds4^Gds5mM!8E;7i;OIdRPP^Oxb z065pYn6($P=FDP?cS~)7dT|lrnV;E{b=v2aE;biiX*NCESz4T(TwdzJQh+~WwB-um zsX3nw4v%M}qd}3mBWIQ`LmK{EM;>hU4=*bh5 z?#YHv+@HN<^u&=pSqmI77cc>6yCV>1g}>G`$Qo}6#rf=H=Xk3!jB#rA-0T#Cnx9#2 z&a|?b_QTLGW62g;ix*})n!^sjO{dx1?1kAtOr?3xXC&=SD@mi;ziyj;^){-Fb=RjG z(xmr#|LG66t@~kn*w?7)r$5}jZqOd4jjDe7!zh45pD4 zN;Vv!^Uu47W?KqCG?$EkFFCbN&ptdmz1*Bz(M`5{_lc7eBL{cy&W7jaA}&@S%Ue9x zo||i5%m(M0LJUK=2%-iV2+RP0H>P#LSQC(WsfSYEw)l?WWZSQfr7ERsuS?&O*x-1# zGr48p^-1=!^R0ORSrXh!i_5J&+3B%knfN9DTWo?Nm<*i*?qn;o-HsZgdwpY^E_g|r zO*K1^0CUh`E?C8#*35<0{1UUn4#XQ4G0VpGQ7s|gNB3kGSyHgBn-1JF82e!%w9YMM zt$A&}EUq-0V3wy_=bFoNOTcly`S7e39|YGn{^rnRWA_}+Uhw=oZY}zmI?o`cYz%aN zfzM~tm4M~wtki{;o`AB7khK1Ju>=Bl?%JJxa2(37G0*mHX!JEBUIa0Rci)VPGhSaE3!|?X znSZ#w_SY@FVS|6<68-XXS7wEXDbU6YhWYcxw5QH4*<8*w7vbxjsU}p^!_-diLBomJTaSb7`AM z*qd!awYRol9v9fEwC%8E8qxWVDz?c=Pi?9DqNn=erP8Y3V>mOG8QYS07s#&DHvYZX zp6eJiG#JP}%6)myU`|6QIu32R%m@vt(Gr4Y8Q{{T1%v@a@VJ5UbyEbIoh|JPqP(*_ zj3O9o#x?{)fG;H>e?)u6DlD=&+5%cZ&y^E~{7zG3N=O&poAuI_8vk$kjovIY3 zb(b*JLjDkkG`5|?sO>eMo&Z-MkT{=f(uR115r?&B_KEAK{-+y98>0ujodiB_mVg^uGVjY~MckVWi?n zI3QksE1C^MTh-W7>yf3rF-;gt#Z+5cO049(In5`_rZq2l*vP&RUD)%SzJt^Q6-~nY zFKT+M)}!`5&Gzc=QTsm(=;+8qCVb_X+JE4-+n@h}vsv~iFN-hSB#?DP+aQbU8;`10iO!0Yq%$YOOm?krC_HxVtb zSQL4s0<@e;FH7)o;5A>uI#{_S_`Yz$%^UB&c;v926E5JpaUqK-}Vff zqV`Z6T5Rq$5Nh}8af-Qb{NxF-Yts*YAZzFDIFh>#Jg*GNUFX{Ev)Q9sbj7+Vkjv`w zwKte!?_hQSQDgF82@^ZxxDKY-APo-gvL$$;%`Rm^Tv&+3iYqo*v8NSiXAj1%lOZ~@ z$+7L{yNUJ>)3yO7>^p0{?An+j$%VaV)OTGe0H%W#Evdquu=;~Gfu1lgT_W!d!8~j@ zwj-V$8XCIWbdS4+pXdJ`I*ZV;vX3$hK`nF&_dO1|IV^@Q@dKf_rA9$wuHRr&SQgVl z?6A&3_E3|~_3z|ehwR^}yADHN&Sy%(fmB|pex|H78539s%VJ7C?6dlL)|n=nhZ_Un z=o)d5NQSTjVN;Zqr|Hu7^1`$L47z~JG{tyIo5q~%iAa9WaA!Vk?H+f$?CdIq{qF~~ zKf~&$&WHUuOhaeiWjJ4E*eZ6M?_v91YXqw{-OxY$HfL(ZLdgkFoGS|wjWF*rbKq&P zwC+zdsd49b(aZI9Neo6EYw?~jaR*TBcJvt5YZSzuJG zDYCMNvsWV}KO*7Gzc;2e0ks?LV-him>g?{bkq@9pP0Kc*sSsY{vJk6wRms*^nxW#N zpZFU8?!raZIp1DJ1BT0V&6|*2EYP8>dz%md#WXhOWMyVf;Fku5*{)%4E*>wJm+1Gk z{Ej<~{e7;u-QUAn3*bK8@`|EG5IcgzdmxA$Y|Y=ta`}58<`N07*mW^i&0|m3N*58Y zLAY2-Fs@5dm1d54S0-(|V!rnpd&svQ%qr4qv95*Pd~L$U6hcol{x~nK`M{JTV zaZI=8Z;qY?R~LOFG>gcRtYN6frbDrrHMMtQ@&(coO5*x9yBrFp^G@IPu(lx5%kn9C z4Hd$0?kG50c!qLU(%FE5p={)TH8r5nRRfRwE_>k`BP*7iZ&F2b%Rg=%-% ztu`pCYUuB-8;8-vopr_14^Xk`mRA3Ft1dn?K}-XxU8bvt>+fdvsZ;p zjI~}U%8uO&tpcr4WA1J!4Adl;!6-U_l+xyCJmiKOGWnRDuw909Erg}9e<(ZFYM#sY zr@#$%F1DtYaq_VTylmA@Q*_ zojm^L@;wQ4f!4aV^DvBIm?>6mr0Iaf%K*DlKXP*?ABMJb9_p4cwf))<`G;cs9S zo@&a~9Zk8NONic&um<%v5ue_Lad9vkcNbPCdxf0sli34V5M=Tfcf~rAoQp5A*Q}_S z!3UWEgz3TE_Rq1=iIK74V?20ib8*((-l6Th%ond7^kr!vzaksFs|3>n+2CsiV>kC` z@mj6Bve6?WCnf~X&|vtTLt~@RKxzBnU>1S6Gl*r53)}jm*Dm)W<>(E5JzJo7hlD-8 zKo$z0q}i)li}+oG&voT@h&(Ppm^}g!UCYYcoUAxGfxFlVV!uZ#sMcA5Z)*4irPy`E zR}O$Q`&A8Tu|@UUOp>!pax;Ya)~bCt&O?0Y786Ni!BZXhN~|ak93JQVLsazO>dcX5 z%rhWNav@A4QkYx_j6Ns+*3b-IkXO_AYuJTYu&64t593n8!2{Q}H+SuR&8p#tIV?s$ zH_IN1qGhsoU{Y}s3}QoN{0zTs?uN&@E8e6?r!4oJ2J1*(293FPg~O33m8HGGQ=2j% zq&A;Y!v$ipNq%KC6&+%*omxUtm6X?wsbG-F&JG$v29fR-*_y2c5(dwj3XO}+3^(}< z7~14o#%;{L2--?Mp5PmrB4A^F30n*n7w31Sp#@?)`-;71hW_!bdtcPt`>Nr+FMj|Z zlXkrB(b@u#HyL~_T65WSJD7Gb3?iCIb99&R5QaTWOgZHUhZnQug|KLv8=Aosiqz*g3tXWP?-(?2!?Wc@>R%kc=WzUpj z1-fKa$65G0O#@_S^Y9F&Vc*lugR7L!XdW;WIy>=r1388Hb#Bn+S3w6U`!4h_KSSKX z-_xAkVk|(D#$I*WG)?pO$c;bl;WM%r7V$OI{O%R9okEUOK-N0n?b13?I9I8WJU%Hk z#Vfv(o*P0FVjHZg;H_y&$6VZdt~sS@!Sh{O7DbQhu%lopcLm))eEgWCq%{m*W%q6v zEo5glt|;;fHN)lYKxc9xf>aivDPV2z(aL+oNl48#*v_AR8A`X=UT)5e9K$Aj+{u;eYB9 z21-oZ9;f^=B|%2U;w_fjBk7nK=k04_#nht-+B8w1kW_Z>HcE12+EL9lQJ=L2 zLov;aXe%~|CLqL3er|RWqokIipS24F)UzuDK1EGvQ- z4nQf>E3j#}?C?!kF6i?h%L5uU(6Q))4d#Ss#E5f1WwC}3OeL{?rQu*tM1BgQyC>k1 z`CLNqpQD?BTiCT(l-wL8Fr?SaTlDHYA=uDbTZ{>YWQ?e}xQOy1`)pLu?Pn>T%WUA< zoPgDBwwJ5v`(aK>MnFT^z3q#VpbjmKdAuU3IV>gUr&-N2Y#fqfdr2$qjeS4n$eCEw-TSC5clDkjdaTlvgv91+^6?UUL>Mm!_MDO>-hOMz!RdM?j>F*ivz3Uo1 zznLWeDX9+phN7>7KhSwX(Dh_U6?<}>G(_QO>z55Ohz}5_RdX={*;hh=$D6UfgDU=N z49<^o`s7g^J$ckmnmo#vQ|Dh3eF9RSySxOf3-|K72ai(>CdX=d-s}}`cFK<9?JqQbTLkVwBNJyykX4R0-MAi zA<@GmEy}7uHj%z+KH^@a@mopX=78)9$o{QlYa{&!21?SXeL|od=mzD~=`tt~@SKam ziUJ4+mSXz}-O)r|q`OZP-6eGQ33d0w)*YpPm(;co+_$2;ljG(7rr3Xc?0I)$!>Fhl zz9R*(QfP=MPO5-G(-Dj+VEntJZ%Y_;)jDo$OAi?swMOkzN$tvk+q=OyGIDHWVx%+& zMi16s4oKc%jSgufc1YYb#lA?>H9Gv1XnNQ>r0(w}wJQd0>+bNBT-Bxi%<+1e{_5UF z?5|FLznAp&hXK~<@Ar~z+tarz2)>f^G-`j8)Sf%=hQzj)K)-QGL{Ut1R+jR+VncoJ zrz%?-yNU%!yI3tsKO5Q#;U9%oeecLHr%7m77D1Q|I~EpxJ_!MQfekIs7X~YVk#i&8 z%#gLG_f9nzr)^0G*%8U#H~A>eJBSEuoJcVJkG;tZN-t|oW;^elIDTwb*d&VO(c`GO zA|S2@JpNH~<+ZU09{(u0K1+Yg@wmQG`;#PT41CD(X#V}cIwI3It`1LavxgwXEL$XB zv5g5?WqU931I_`%ki3u@yDHnPv!!;m>e5x%o&%S283x%nus2YgcagUo3~$!DBoghx z#!D`B!D665(V3mxuy%3}i4V)RX13*^&2l2jYAv-F_Rc9RKl0H4b`i+q(Nh7d>!DrH z82SHtaQ-LBhE4G;IRBHRe{1?L9Ou1_+Mg!LwF7TVvXd4w+njTv=*E87k)I*@jZ@k= zJ|DCzDY=1CD1>%6ucejl^`ev#4~S?3u_&$_-+5K@TjzM3pt0{8kKXxZ5ZMbN!A(SD zFNplpWW$vKk-Z@DPm`;!Nq^0yfDMh>my=}g!2iiPpZt8H6F@wqXcVIH#C;qM*IM_Z;yW|IIRsVbq9uO+=#rw8DviG-@Jix=EzEVUp;np~p+!O1sF7$5nBQKZGM zN6M!N*k2drKi@jwi*)C%rs;6#o*^_>9#fA>OiY78xOhaH)nu37(4Y)bEXx}xyEASIcYkLQh`Yv zorL4WRSxc!o-}Srw=OJCa`ckJ&Uv+zYW1;xe{m##-ncp4aulBN3taig`@&ZfeAF0DH?-!LFL(=eYhgA1MyRo}m0o=onphup zLe4-7gqv4^Fz&X|m2JfD8i#@65q3@Kq8xlyBR8#T>!>&Cnwu2x#uwH#X0VPqQ&h@qz)*;x3@2d0Do z){2kIGX!|E#<|2^HjF7lfIU0`2OD@*y15%YWIL#LmauFaqpOd3q0ygKVP?kV>SOxZ z_y@&+l`Wy~!1Gpt+uicYaSz;)Zt5DqGwpit(e#Qnn{NDYx_y~5-e}i|g&0@P`0C&B zr_MlD)7VfD&c{6O8Ao_%@EiWte1Okdq1)njsBIKpm7#_YhY8(pG6TZVOZ2oZGRLwtL4m`(yv0 zIXC#Li1b26V@ujMasR22Ge=IIK6GrPztY&8ruN{TV<(3v`jfmodSc=QxA#}`4^I1o zTDih)2m0%*m5tuO=nsz^;gg>3C)8Qr*pzOtI)_f4Jl5Zvw>dsBHhSV{|AxHHqhlve zpXyKZ7LJdMj}IRm>F>*H9J+sEWW0Z4-h{xSi%oeKBPUKD@88_O0AM|i-y^W7v1P?0 zAnngv8TVGUc0Hg*WAJcVH!_6WHy;~5G;-|B$y3VOFnkQ$<)uM))SP5j~fmO`k_s18k3Biiahty;j4;_2DO+cIG%1wZzE~^@P z#?1J&m2}7Dkh3PL3zn$qELh^5#hP~vK6gzN*d(k(6PPbYdF7G;Al-4UYz#0 zxhIHKtM{(s<~Lr^)>!zeyxTjht9SbV%_>OWkfc|v+4!1(UYDk`^O%8M_7cK2jBT(d z#=OL3mzVPQf(1RpA6bPL7D!e4qP_#qS%p*wW+j}BTho3I7CFPIf>u-3P0MuDd$4}4 zIoD~8ZSmJQ0sXDMFv()E^o917;tPMBe=+z|8`3`MTA}mCbJL9vHXm-DiAt4cZ0ggq zQ?6&x$#$#47>U2OC|ldWv`uUnYv|ocy*jM;TfJ#KKiy_uM4$T^DTbXL+H!KpwyMT) zC91%$DTq3Vh1c0^Ucor9oyBwp=3v8>}pj!8tRQ$wc+{c zd#Thom|khEfI|>?hQtF?51mKJ?+H?1U1vn5!`23+c49P=ml9*Q)g_< z=%$)4R#az4e;L@VQ&TqB*{ML|9oEAY1+vN?+U6T6#zwjFQ+iyuw7SPeSC0#qR(^Iz zx_#mN!s_pKm%nRJW#xxAnPyIiH*-3@rPd+&16nTbb&dolu<3>K%Ft$Z6j2=(fx5q=}V*42mRU6&pD~6wcSSDYjYmE2M@6w{@jq4rj<-v%`|5MZ? zuvzP`^Z$B(5C8A!U(f%0`#13a4gKlhn``MtQ*ktLc<-eUO-cJOjBY+~!>VnnTzlxS zHn)R4E-sJxmol}mlpbPL8$QQEsc|&zd9Y(`Yd6|_V6__9)%Jbk*7%0|#?PEMId**b z*jV_yadldoYrg7I_cmycU>94K6$E4_KF(OJE#`J$sWl zxG$|6ej8~TFxle1L1Qwj&5~U`42>4w9h1NE;5H7B96v>(x)X=*KXdxT zOHQ18*@?0KQu9{rSzPv~(*Bh|HLgn60*jTsE!}eBB)ckG>&QL#j2xaAcgsmW2rMV% zRrZc*x@`?(8*JabDz0|#UB&g+_-05_$0;n7WA>?}DX@f#^xP~Ofot)u2yH2zo7ULa zlQsyav3yP#^`rSpict->OL#Lnzm(UWZ;{N5gMg*lGPhO7`s16tHWJWu8<$QifS|pm zGS=tMyg>+@$`1^ZQVh76p_p%xl)&1Q4{*eVX(m-a?H$don@P7uD# zY?~l+_UeIb6DSOCpV!Ez8jIJ+l_(t(>vGpU{!WxnZ$Rlnw#!V zhK_~$WBHc@zgtOr<72jG&j_pL>W7m4if4t@bEHw__RXKGta%MVg?M2uDD14`d3ndS zgy)CDV|mjEn4VQUV^P=A^^Wz&7kTA_>D94ag@d7zO&1@@NwQ(=s!)L#FMA)q9sG19 z-D(Pjl!x5Mb39Fp81aCv)ug=YyP@Gch1yuDfq@HYTD-X|+SsSu)nxjt?Ng}!{zw|3 z3^`2`p2kzMONNuQ#;gO2%o1c*Tub+m+uX}*HVVwSbv`c zECQ8t>2_h#vhh}ro;vitWZnrK|wpgm( zn@QFCn;SE$P8@W0$m5_19ruQACLagN3Bf<8`hIdm0RxEJX;*&VTVELS`;N{}k$PfS zfh@D+{_QB5!rUMn>{O_`QYo%|;KxQ-4#u{C(LBp7(yN8SDN>W83_bz&gg}L+a%TXS zj^8QOFEKUx7vlwS6r`;rMXd$R5w+?^o%~Ry&J8GXh%~$kQ&5CLkt2g(S5N6|alMMi zSc<7Ib`!~32Fw$K#$;ziB3aZ7t`QQzTC2qB5d)SKA27=B?xQIY7MshZKM6SW10}3%UGf^GiHynxyGa!kYw`g0hf#I3kp$THew3q zNI~&nuIN3w~Nd#u~l-zn?!@sY8^_mUEN_|PbN z`uzsVJ);vR)Y&~J$2ftMof;mS7(IOY*zj0(>h##Dlj9?qMs{R${P3}1bowKlwmda`-57%%#I8n$6+&`?R+|{FfoTukBuA`)SzPg^r7*I(TUR& zBiYfDCyyAi$4ACqN=EPTJF{aah5mc8)8iuma%6a7*t(_-(8YTOaOgBn93c)@&dAu< zY4g(TVwPV9X2!C^!+fjhJd%;2jae0~jGP?1U(E^+HUoRIm)$$U^D!aSh+kxF-X zqFjv*p%o^&7-x23*X@rv(DlZd>4x9++2kR0gx=?`x$GO+8XKfHb2wds$LmVHB` z%FltNfZHN;%r_oI_5~xk$Fmde3&!Hv87#CxFxfjPav-j9P8mCv&R>8lTlSrc?Zt<( z>1E5ZGudu0A;0HmI36U-k8r#ft~*#6c%?_nMG}JK7MO@dn=W_JteP7vhuUqmKO@T; zQTRj?Ud*>(pX^3WHVu(g%%A>30s;Go)3fyHFG_aC?BnDqe^FPS@)z|zC%z-i_KU8+ zA(lw9{i5gHobCg7V)X7>_wvfX4G^-Erk!ZgffXVu%48QHAgZr=SMN3Hi!8jG1nw_R zt|8}zQMdNOJxE#*rLw6&jEmq)J7*A+@Ex^0lR9THfcM1#UOSmleKAq%%DGO7PF{>C z97@3Z;;RSJ-PR?q?oOUFcwJ^BYe^gTSnkH5$>t)Ew3em-)xWIJ$K81!NvLiGqC`aV>SBvg-V9gHRQe$&nAs|*xAJDNOi;KE8!WR-kofWWgbga=V+uLlP97lB$``_0$5PINLL-oFEZjL4TyKna`w(4}Gq56_! zZ+O*1BQMDtA*R3jlCbKiNFy(~-q$B38hObrdqN}Y8`WdUP-tX5jU3AxSx+O!`q1NT zDG;MxJvO*2mT2VIp1q+Fu9P^QH_}TZ$MZ&dY2qOQT z6Q@B2Xi%7d=q7GkxY+Da z&f+p}mCffIX$G{tV%lBGPg)f!Ylq~`#&gS10lZG*vD`c`oSi@Di84t*Uph4#5tM?i z(>c0Q&~>^ma+DNwogUa7W*`M!r}qw}4;aaEYsmfFdvVHkjBr*hoaR6!fNyH4gMkUS zKsap&KxReq)18A>?EonikQ%&j+h7h>A7I^|gVo0Zxj$z_eSmd8k<3nAeSmfUHP@v# zTey1dfpxE`3|!N-6$CjZWI(q~NWAQU-m8oV6^&kfWpa(RM}eMS8G5Ef&#w$UQ{uB% z2BuDlo?i)5*G`tG#j}ayiUN(C%^RtJ)w6+;d8eA$O`$tt%ByF0?h1{-wVPo%B#D@D zGjBwTwwX7g?wVmaP?JWQVL4b~D!C=JFE%=rH&UgMslXzrNF!6%_|+AZXk?13D||S7 zHXT?5fLO^%h7yfT2NpqzMy3OcphP3nx9p*Fa8gCCIF~$Bbx^9p-nn!~dQIA+cS8G2 z?YhB@(njZ&mA#DV-<+=ZmvlK(+gOyDh?z}9X2th3HnVL)N%=D%JIn(rDZD0Oc6ZK%JIZf4rn+A*bH22^Q zyNfc-J;+LOOw;#VKIh`-bNO_MqtAuqm?Yxpa|6x-DbdJWKIc@NPxgg57e}AZ8xcpJ z&k08yeLjnHDvmy%Ljp%%NFq6iqc7x*h@&s$kcgu%3^;#JlJzvQaN|ws`@}~@RJG;g zk?O!tS27FhmGBfzSJ_gNT=^DW%GuHw2UdP+>{2IWkb2GRAjQR)Bs^HT+k;g&Lv@&u zcFnG=hN&0-@`f8 zxXAZ#PLh{r|OqqsYLnA*M{9lViMB(78c zk#(=HRE)Wct^G*vb?HgSv8tm+KUzr+4h*|)BUOZxnpn3arMfF$I*Jl7C{Za|XnSU! zc%)4C4UyZ29Eedc)fpigB~6; zrGvGvt0Y$s9M2A0RwGV3aRAK3JZzetgCU1$z94q9&Y9E@_Ofoc#B8f|2Ku_nrXAjK zUB{y+`XO3gayt2Sl`D3nw}Gu%qvs8kbziAe25#!wC<|GHsch!U$xm=e&w4|p_gc7E zjXzG7zp;|Mw^FtKDD(P_mGyn;I6TrdyV_4yIG=ntyH#gS^V!C@oRRkhJ0jA-N!?SW z*B4u}98lFeWG^O1*FEr!a%V_Z*#g(MM(KrCw-gT;*H2b(?rjd;QsJLfYB>AC7pxmS z*jzl|i+@((81=>Q1?8XOi-8#>w8>sRJM(gQ6y8xJIC492hR+>pk!``O$iuP!Rum{w zv{2zR22qbww(x2a>cDH_MfV;&S=^zg;w8tcWl3lXGHj=$j9Q+5Xuf@MK5+07ZNh3b$YMG-fQENgwr)%J1}vnF+7p%9l~^&3wdQA* z&aaJ!9P#K9s$a}qN`p^Swq23l5E|@j)SeVtx3T0dYF}rs)>%4|m=B-bYr9Tb7i90$QL{)CUhfT{9FZRWBg)MqxP;c%h<#hJSd1}6JNZm(qF90&5hd6 zS87+~gWJp(JlI-1;ESKHY?lP-+;B^y_6y}Nw(tcHiZ8bC#V_cKo5L6VjoP~z$KX|B zFr6;GgrTj362cf%vlt*zgFIk>v8k<%+IyZ+Q(I|@nnhDvY3ezKuBzI5;IS-?l-W-oaz06H4Lij|6us+u0yU*D;7{ zD)90mL;G+gayT zR{oPQQev#2)NhT*M;hOd@JxpYb%`Zz?`C%M(9M>eh>z_$%e{({w_r)WJIPmz7iT-z ziVxp%%U(;(MUqXWo*+AdW$zZEOo1_5h>#e%&34WX(f7I8MG{I`@17+sfhov@%4ZnqHK!P~V*wChS00y-7qCm9_;`hbfDXIzxO}{F-E-2Pa*BbxK3Pd_ z9QZM(7>ERq^;9I3;KOiA$*p;o8<(6tl3qg+sYpR4*|E{fRE$rB_?vm~FsM*_-M@-G zUuRCRXju)9n4&^gKjHkLr{L8aI_c=%KJvhEO{B7SF~{c_`1wJKieX@eS<@m%_KymPf^v9jKAZvZ9 zf{Wo{@e5-KQuG{wc7#%x;?7FN$3`{1>?vY)35v1Tc95CdToL_QToZ-)mBlALM8{Db z@5nIrMLq<~6apg-LFrLGRq#j2f1j#sjcgf91x36T-|`_K{QZH3@ZT$*u&m$^TJ^Hb zdbSipYT%l3oS3*KCu{VO#eB(@8Vel;>IEafKA4{vg(yth)G{^3YVC^$YfSbEbXZSv zb-H*b1~OFUHnU3#Bx)%P#X~KOgIH6%F!+HFG$TAIKL`?}EWYTO7g}?NaS!H*!%CpR zN5m-|${!SSFERKJ3O=B8D1T6Sj=UFQzDU4-$WjV~q4XjYV@sIaDfY$PO_JmvR(3=x zLTjf86@74nHH8@YWVk~A~1hIqE5 zUBc=WDWi2u#E!xf7pHMYnkH@N@-2XP6HBq_9py)GZObTN@ekr=r&|=|>Xf6}savX{!#9&D12^r>~h&Au2^LFVUb{pm>pS4^$(~2_vOHo_X z3Wp0!FYn58_K9I}m5w9@TcoUIg*+)}s-S1>6d()Mu9-J;kQ>&NnpebuHa9=LmxwIS zYbAN16iBfunIwi3GSbL(-Hj^UATiPi zGZE*L$&2*p6e_|vC3n+-oB??2ihQt|U8c9V zsVP^I7YbQ6k|&o$zNCcZisKK z@{PM#sUNdjgM z-mae4Cdunbw)xIv4cA8$IBknBdq4canvM_TK}&}lBz^O8XpyYx;xKwIJRGI`+_nH% zZ;G^q`*9mNL7HLe6m4WnhzMtoFLtkub;+)!?UYaX%zbp857v{BDL*gr=>mM1h)x)x z`M86$Y)p><$Pz^m#ccmF&|JAf&6%~%Ksu$uDxY)c)iiA zEcO7B`SAoNNnV2ne>}kjlRsdJemubk6MLY->k@Oql%0;Tx(JEaMGsQkUsvuy%i?tj zewh3ddUzcwwtK&Nq;kJ0Np4-+`*m3o=zg)16v%UU$+g-*Dl5qWiI9k9k50KPX~5lb z@0Sy>6mWoz)Q)qQVxJ4kjD(@SDY@>JK<&!=`KDxOUpi-NnNJ@}(57BlIEzg0HV9qs zT*G7J`Q()bb`7Tx!<3}vLlxT@!Y)Pc{)|iEqw^ljWh$zz1Ta#cTS}!Zs z;|Xzd!VV=`q8M8e*LpnZa~+5}i}@&M7jCpNR;e$frLO5kis9e5RNRIZxc`>@ueF%(r3e*){BGHRG`9l|hW8}U_U3Nwqw z6ns`sdzD26Vb-pBxo3^sb3?h21=)x$OzI+{R5GjdnRb4ul>` zznCO9g_q)G6k|)`Wxtr<_V!Q3%YHGr+G&YT?b{n}Oh>FGK7D_Z+%a%R#&JSFmdA#7 zE+$3Cc0<09(j9EB)kWNJO@ntLCPh1Wk-@w_=1tyUOzQnfe*{5H>ir2Igl{oLyg#|= z1+j>6<^9R6&kq37(g)T8&{(Cs*((4H&%@;90LpwuI}t#c&puE9D6#Vc1%NW2eV_oa z4uBsh0IUPx2L#|RZD!S^jh_FQtosku%7GKvU4QpK{KD>-PY?Xwrfh@*ZEpPOjyE^2 zyls_sS$WW@Is}w0`5CzK%1E;Zb*(wJ#dgdEA8r?8afCYvaQKSo*@U9%$~_EoI(vm! z!^tNW1=KM|C<)q%A{l};3zxObcGA}5G)C@0Y~t#Q*#&M|k#cBis?0@kLRd+81b`Y^ zI|2Z;zc8^z4D1f5DYd1*2G)P6uG*9ucM0W(;RGv!2V%<LPw$L3R@ znY=5b7Ji4RMefKo(?D#!Ktc90gIKrH>IdSgtC-B@LfdCSBhn>tA$+JK)f_82w<;`? zADaObrVOkYOChoULYt|zPHhf5%Kog27<56dX}5fd#RH9%@s(Sb_21C0sUC^F<}n0D zE8X!Pnlt8VO3AN5sOU+D`gQH*ZZm$kZYsd-)(=?-W8z=)GelWN1`ge=f>=(rRzFFm zThTJeb_S|^RXD!~xe5#}9KjB5E@m3^b+^Y5={Q6*#roRGo_{NUYK412&06px{fR2f zYT%v+ZW7WGA_?lU5fO;gqo9lX>@W>km|1JupopoCA_N#jE<0+lWO3ykydj2u8@`8{ zvr>_jV{7V3rfAPsi%}iszQpw+m_7EQiE!(NYzLg~+&eSZo@~zT&FNz=7DJS!#Y?5* zH#T?<1h|A+%yyhr3|w;8n|au>s55jsDEA;ITiiBlhg8(g$w@uLPP6xLwlKHM*&VK8 zgOa*Fx!9WI=ywZ;5n{)~z$f<&!?0?^Zzj4S zkh>behhdC^>tW_GhAOPvwgTrL+%VFFSxiRRFE`N3ytK^KCBO#{;Ls$;G!?r9hIN}R z2d})3oJL&5kcsF&CcWD{_B@r;_?aZRVc@@32smRe(O~am%3(5=tXbR&>@x>krrRm) zPo~Hl;K3>wJO{8R?wPOvrbD*BDEXzF4Xhmt&yEelE46%F`ipjgHfC5j90$q@)gI>L z`()Smxt)!}q!(=1=0$a~m<0HX*xj*cyXGGxRu@1RHS!3<&Wp$+pNY~RZ^6-L67o_@ zLZXN~@|gr{%Tuzpq>>FknzEUwtfA77S4JJoPxvUC zQ)PyQXL(gaWwb#05vP?Oummxn`?(_-Qg-fQQ!-6K_bOhL#Z$(6?9j!jj(m~Hk#lZ# z9wrv_`EE#{NXt+bmz-yTlPv}_jyQqegv(w*FB*uSiHkH6D1&ufYBr}^PjyVky57>h z4!cG~87lz9Sz|%&2j$EcIFIusx&O1pd`a&AY|?NYjLOW{XA|Pdf1zsgh2a0CB)N0o z@v7FErzwjg$jRO0ryzYXJvIYT7-I!?T5P(ARN0U)<0s6VA4oZH~GT2nEi5r2samIVVMfuB-VlL-{V8o$|TH|bfQ!SI9Ot#d$RC=nr zMJ_CujC7&t8b;U|nrz?mdrwc?v-gFv@Ilrc+%D#?)Mg^JPQtp0VaP+rJ)s-&M?CN8 zB!h4zvXHDyt(?FNpdjrAaoCBgNn*AI_L#i$?!Fb4Tb>{Jvbi8ETr-0JF}F}q10gz9p_LsyKfeAs6Z3$Qi$9_>hL#{U2beN!xY6mcZDg%ibwH|A4-pj zkQ96T*Gcj}Ftqt8|SNVn$$j**evOv*+Gl4tPD}A_V6}qg#psc2D22~132Kfl8u|VSi}3F z!r!pk273I!=iojIB`)%eob(H=@(|F#mwbbXQPFByoR^k7V9_4X)!*ncjMx?Bzva@T z!8^)Bf*3h<1BC_3I;L5a5Grz7F{Ltht7UDwwB%u&_Q0AZXjev=RNUe_aQQHAGB{9- zB*1YNiowN1UE2CivcpXv6d{4{BsWK1B7?(s3SJ_E!*|3>KI^NPre0r71_%BiDI}(& zii0i7ORS*fDM-EGrB5#}u{m{JWm9f6L}ipyT^O?RR3|2n?v+BG=`40x7KxC#s}w+& z&J(L}PU4s)PGQ;UqR~=hnWiCrg;WI_L4R38RtU|oXU+SirK?(52}?;L@H8)yck}%r zu8c(CK_EF!pTdJ)Uk&k)sm_K{40b+>OyBFP*FQJ?j}Qu!zKbL0jCj1U!V@8<|QINg*DZN_wX%_eXxW=Z80=}Ng zgSsd|T~lXD(L<=8GTlV+BS0awrU(Gn5vP{j$s0nRElMTieTXV5v$DD~$HqlQVS*eU zdjuT(Zwg8_OOn{3;?ur96&} zW1ot81Mes8@GLCG%@wYLGp>2>wTv;3X_s+E28&r)+uUo9Z3$SkJLe>+a4*OzlBB}D z-dYVYqtF~A`POPKohaPvt<_vQSr12fYc-cn)*FSwQJiz}@@>^5?l_8$q8R&8bky65 ze&i5&TeaV9&m?f5pSM+a?hah9*P6QJhhmX_-d26VouMCMrygfrgqMn)q8R((yc5NI zJn}6(UW`YvQ;!$p*+4&!7vtGLKaW@Q@eo+`_G%J=P{abo*iTA7Z!h|hI{Nl%f8@l9 zSa^H&X7`wH90WwdE69I!4T~vP?iYsZt}gD z4Mv*{6rj~XtSy?8wA3X?2_fA+)ZX19qm=4LDlXYRKr6y90w_efMQ1U}zcz0LzdD1y&-RMggD3PkAS{KAt}oDu&h^K8wo z)yi}x=aZ#wHrRwjSH*+2W4tRzf3T{W5s;gtog1;5;+p4ZdoU&lr35kOL)*yEfFKTS zBSU+#7@Ce#JXsBH44`FbPgY4};Aq*%(4MS5&)pcjWoS=U_dh>Yz92jgrw}2J}K)-`G6GMV2JR2PN)!5rYQv3z^n4% zZeq9q6^FZt;l8`r4j9uK+^_Ox{;*xZ5mEHUq}{s-(QI(B?GZbjb&*9@<=3 zd$Mt*;f@ooY+klB?D$$iJW;@q+;^BkSdgd%eH2%f5(>|^M8y+|hADVT4g%E|QQAhr zn8Kvl(w5@9uXE|bBsLl49xx+Eh}^V!33fNlNECle>n6@wtJmA*r3VvWq>$S|y6k1U z$$NA1Lst<2N6u=C-9R;PV63=iXpp?rFl&n_tniE)=3ADa4;Cb*kzr z0)TU^qRJzFC5^Bk4N*q}oyz)w9RTtou>VQ{yUzRkNB0DVJvF`O^y=>(IS8xMh}-Qw;I8>8O#M%6MZ8uHf<3;BVxd; zM2ZV>)Gi?M!Ea^o3?L4ED}(2j6JaK3YwN27c}D zFnIJUOBEj(BtqV*nR;3*n2pu8!7?Jk&B@}pPmhQOh+r@g5e*RW(E1`!dk4Fr6=K)^N-@bO{^Zvz1z*AjlzIP>;K?YFC(`kpskOv*aem}h+In?!jW z`xbq6CuG#b>FMA}_mfF6aUVtbh{q1TvBFGt- zToF?#LhbKXlg%#TZs5b;t@inywCM)f@ykU%wBkn0JbjIxPgmFd^BNh=X+~nxr>nhJ zrhi;#68mI``Fs^i-6!i}#HI*{hxzt^JQU;X_eTqcYL!pEYQA!{HaBdQb{*TTC!?EU zuVhC2H)UmWuH^~#7!8!JWa+PAZ z)hPs&opY#KvuNyl?6ku*Wi3 z?(#O@6C;c6<19MLaU-|0SIf?TFW4=Wp`n$xL}Ygvw(s=11q!psuuldoWn>0#S=-trj=UcQ#>~y^OfoR zp=a8*oNCOP!a4+nMHO27socK!1Nev8kxXeya{N+V9ttil+ulGw+KA)H_co6>S-NOG zV2rc2^r^)hX}B$ABhGPr+6VJPj2{}iAvSARAc3x!Be?H3Cyk#AHzS{4hZWLh< zpRW??Ch348GWO@I!8S?}8T<29Y@`2OoWD;7x-V9fdj|fWN_NEkM6N*uk;EOU1|Bvg zyWk@mEMgya?jn9B+}si4H^HM4-OiswJJ9gAP5<1_G9(vkOKWq{3^0xU@ZuX39AJeBb{7KP$5NB0VId&0xsl{VzhY?Jbtmj zqwJzztoHAVmBHf|s|P$a2Z#cXU##9f65oQyFIMk9l77|k2%Gw|YO;UePYSg*l6O=K zThl*a%~rS!oqUh#Y|v&!caHw2Q)f$L?_?#v4+D; z33WA_<)YQ)t=F{@*(MAii=BL`S`_2kf7xSe1VZ?U8rjQ!sz-txSodu`&abchZf|}e z(Q_#cOP$aV^AJW|T;@dV^}$&ajw|I)JWv5f++6sfm$WWTpdszaUaD&+_4CDgj(b;` z;5e)aZ=DeLkkBG~o|0P1xtz_l+Z<}sVPms17kvGAbKxF+Jz`)54qxW-@nW5BzFySe z#7h8+th1K_5PGdYXMt470S=1UUD<)KL$DL*FK4RV0Ie8ZnvKevQp##!!F1=_L_QfK zWs8aEHuj?L4dZvo7Feth0#;3;#&$E$wyi@?%uTQf1p`wwCu`q2FNr~Gj4_J2$9!~1 z_(FKevBq(=jvDGLW0B4sFcvt)**Z(UVR17wYPnVA?Z=j{wtX5!LIpY^Vdd0uf?G2V z#tf3d*crEEx-Hq~IVdvh$I+tL!x({qb5PAk6ry3ev6(nx1A&K{{TbOn;Frv*ZvFPX z$wUk6S}0~NBIk@%4p^2*+7KUNC~p~!1k zdvgt8|5-=~)hGG=&9xxEQvwc(F27R*DQ~VJze^0?*ywp{ZQZwn7%mgvTWh`B)1Q;8 zZKK5Schpd--;&reu}iNat%?5J$0pjxjOg2VSibNqR)`3f_{tbSt8@QYi{!1c1trO@ zSUAX%^D^V+@kAHf%Q}XgYh=;!6A$ajjYOPelstl5%leZ@rU2M{`MO6$UA3_%hJ?O? zMwZ}~!s@u$Y>`8(7h5+Ydm4x5D54HNA(;RZE0b+?nGms7OiuG$pLLJ~5rCz<2ob!a z7BYVD7L>lD7V>mZgb3bIBhp{H6-Bnschs)&cm|4WpYNzWZw~p<7`T5yeZOg!x+)lwo4RV>tX=@eeof@SFO#aW-Hf^>L3Duy&6Sd$# zwz5Cy&6`lBl_#{ql+Z>sDE}X|kj=@;e8B^E4BG>%fn)eHPDQ+bPc6A-;NLqHN!e#Y z!gNW+g;j;9Alp;(y@Zoxcm%}VjeVL0)hpr+bPXw2rdE-bI~$}EaxSXL+-s+&v)H!o)AVgw=yTvBmt&m z35cnl5bw}?a+jAJUIqhHro-gsMRQ}Qi^9Qu=SZ_2fjDsMjea1bAU->g5xF&Q4fXwg zijgA3_Z|`7JEUcA)J%S;mfSY*rn)U}zj!WAOf*x4BhL51(Z@|3aSXAvJ-)&$wVkk( zf!}i{ScAoiBor^)7Gi@JdS7rm7j0-|Ew#w5Otc)h$M{dHngKAnlUFaeoq*)0k4M`?oyV|_tfiQ9_!InUYSxlb zN!LXif`ra1wlIXlG;v$w&x!_X4sv@228~0ZJm==x@H%Xs9#CF%i|8?Ke%DKSo8L7T zrOGS}!Xhub^f+2BT5+Z;!ihpc>D{15a1R>BNdsA5(DM){*_HWmW)OOOda)GZp* zTK}jT`y<<0gs{)mh@t(&<`|=4>Z30rW+|v3Tn;%qcZq-uT?Fap3g8E}hMCPE-0l@x z!pc`##8J+`C?}5iEmi}awy>?&;!2}HEb5wv`%_hw91f~~Rug?d=c6afj%1t*sF z_5wZ~TnmD*+juSXA@F5D!mk_XzN^*1U#FHlBED$PfE8DgKBp{AZV@4xI#RrdUi`ah zXMZ%>6&j6fLg%idsLM2h%;(q)R$-J6DLv4HMOCBm4oR$r+3TBFAp z=*Nkjuy@$pg|%pkQfGOF6uA~o&J?Jv+#-c?%N--&kUXTGv`1{scbP7D%w2DYkao%s z^c@U9YOE9xLjt6S9%{8b|CNpzpVv7dC?dqdhkWE9BWETER5>~W&@mL0G$7}&t2@M8 zP)D!`T)(w=j)-<4LMFwiASy-sbPF|9j^Pk{ZK+r!rFB53MfnI^3y{pRdzOKpjY#n; z^q>q7dzGF&Pp>Z8i=8oEk($E;%QfXGpM^U`m5@h>$cvq+HQU8?4#53>F@`&IXyiP# zyy)l2Y|kNJLIIiG4T|Vz=a*SlU@qk3KxT+1@trKeV%ZLxBRFWQt95I;(mOVnTiLW{ z&SL1(Rt7zG9ngTyLqM}wPBkQv>1b!v*R3KfiFyxF9Ue6vR;^4T%6YFN6>hw@WFtXO z?p}(DGQBX%i3wW*o_xeN^Rn4~zNPDjbwyb`j9>2A|B2e&MPzDckc_!)A% zC7cmf*`~xF>~llR9$Bj~4GR)mGRuqHvuJcD#CRjTWC;$VsdTOT5VPP5-bsfp*2Fi+ z(Sbymj;5TWMliPw1G89}C&3CBmz1<3z^yblkz00yVNHkIzjpfxhTQ=SrTU54T>^s5 z)VOR&%27Fp0!c}DJ%%;rluzbh37W%LWQU`85uz!zJSXk|h zpLH;`L})$BLY<4ZT5}!~ZHHUK?KNfWo9H15)76K@XBWA@o;44FNsKfujzBUyFDr!B zt|F|xH6>ZXp;p>znaaGI%0fo)JPrd{B(Nv)v40G=Uz>v3=RVOp^9) z!cjuFAG^qToMf((G5ui;HObf@bvh<(_VLI%0>lv)3IqQ_feTqjzEBIfau^*F$roz) z^TW5`;tREKDw?<8;tMqn9e=@bK~rC?VWR)fg^AupmuJmD@AN?|&G%=XhpI(+pz!I; z^D78Sfz%8&B2g!|zgi$sfz)5Eg>bWt7D)ZoS}@L21c_g*1>-zLkoeUa#`(LAo{*@Y zs&V(semi68*8@JoJ8yiB6eLUw5i|t}KUILHAmOJ9T33+pQ?;wyaJVt)0ko%T1Mb

W;E0(2=1?*B}3gt z8S4Hj8tNzk2Sqp3QAB3{D;esp#T2>`|c2G*R zR4&qz6=m|<)ad!!+Pc@)-Se(6x4)IiPbSh$vM7D$2Z(tZ1oO1$n5PKlzY{S}k>0-( zF;5Z9ei-KzEVt?3C z=m7-Mg(Hw6Sb0M|*yAYz&Kv6ep7Dhu2z*1GtgZi5d~&nYz{l$F$v-Oiq-p8T8k^K8 z{OBFK1Bz6P1ACg7+;T|nGvbow&d+fqXE5W8IFc3Yu>wcZtRJffCJ9c!(PQ<%Bq_p) z9;*i?NfF-jSRE!Q|HEb$-A~oOi~nJ>*4(gga3gd<9Iud`G=^NBWl{-z{2QPl$ZKu!4MDF@DyG*jM8Bx8Xl?f4nV78&M20 zXW&dMBln4=$ZS0k37R5kdLj}uMJD!%NYE4^=qDmUQ-q+O5J8Iuw}>X5`~lJ+4FW1H zMo^Wz>B&fgytV!U4N?TuCnF7RVN^f=EYaW=(csTV8l(gq6rBbsLW4gaX|TW1^Pc*; ze-mg>LALLy6N>hqb$Yp9H2A?fha5`LAn{1gvr2%WlV_XyLN}1!0an=wg1S(5?y-P` zU{=ibvp0F-53U7~c`M|>aQnXtj@aqUcA`YMoWfF2z_~@XHu=cWm&wx7Txid=XLP-e z^xE@nvI7J~8x=ct#Xd;pj82P8RtVbW<{ow)tBeK#l@=qYT4lc)R~c`uzp%wC=@zZ0Hg)>$lo_fAZll6i^% zA+AU8$)Dzo1H2DUujTlfcN5y(Ue_*Hw!@_>jD95x>(P-3MFvb7cT-Q8`79p@d`vc} zQ`{MzzX!U|U91H6v~Zr4ZaHoYp9uVl5reMZsjt7vT=5jOYCcu3U2DF2D^r07em#vn zUK1{#f?&(_!dMZGfcH{3>_TXl2axpr()-+KhPbUm~3a;C)%kflrXf|XI$MLY}m z%ZX`($iufGGhQc~!f2=i%ON&RYNDdj4jw$^2RH0wACBT8(atGu+>ivKY?tO2cLy6$ zBueKd0E@ceydX}=2^lsKJalr|!OuYes8Xb*pD^rXeyO~>FdV+3*@Fi|6FYY~yTB~! z_cAbya<#+lg-a9dox66q9&CnoUmT}72ZgJN=mJK6($AF0{UkHM1s1#xMd+ZhvEGjkEF39Ho=eIm0vFm5y8t+D7v#5q=n!s86i8ZnUGXKj-ToF z(%zV2hbw^)-1v63b0&|%m*vvT8N_mxzAiV32dSvOlQ04Jso!rbDuJ~sw@w!gD4@$8 zl%pPXw0?x(fIY9yUJ))xx0sj*_E4T|E^<@)tGFp$z}TY;?Y6FP*WvRAve%Rbnfk@* zJHt1-hEy}~QjiI9zfe#8W-p53rC+Sq28~f!nbph#&j@V~SfpR9Z*rz+4`||x_3hUc zpRlICSSL-XLZwuL3SX+%ZpbT$nem`#Q3jg*SEWSfg&w^xlWLTT@lp~a9^t@H|vThR!7;{ zj5DB2o`IT3HldgomMi<12LUmqn64gb>DEjlN!d#9T4KP)U5rbs6I?gOBv_b3q(k&W zDoPmVbj#umL94hRaA(K)F}@FYacDx+*Xkku1K17ta4MD}L;G6&CS71FB*D_2swaMk zof1f*7+X?!`%^_rLc>$_hWj0;OiNGINk!r-7-n$6H$Z zdVT8+0s9J#{d%2j%ia>7exn|;%{MG?lwxd2u90sPFv&IYje0nvN@ZI5MnMA9{quU_ z2Z9@k`~wtYOY#r=c>$CB1AkuM8tFxeR{mTB>M%hr->iq!$qmc-K{2)@bN)9Am}JiX zX1(F5lc`Kg->j25`7;%^jcu5e>+AkUPi5diTxDEe)Cu`d%a@b8i&G?%U$PXgOt;gL zc1qS$Mv&j@9(I9(lbRC8O-QADE(p_Se~=kx(OXLwxI^ma+RlY_+VF2>5BRaI5Z5-X z!1wl$oB134Y>>y=(Ki-JU%Yi;7H_o(yt_r-lo*|RjCH&QCuj#3htSUGkr6rVb+SfzFdyOcHvaAQ6n9_6=-WJ0 zH;AYmqAdw#Q^9_9Xhrj?ZSsj+(LdJA-aE89WxVJLozE)p!pGp zEaT9?v6h{PBYK29S)QU-q!GP%xM*$McHF^t+qIi|rA{C4_e%evfArrgUnzyZX|J+u za>{;yi|kyM&77a*?w>Fc>(ma+NpzSC^XaEHI3qpJhf3HJ8jAlE`<}XaMRU*WF8;{~ zO1yPXc7Xq94_MU+hIEm`2df9hE@vN3uJ~{d2krP)ACmO5HJ9y-qu0jBCsTW-SAFvQ z_=!A1*-poJsNdRiZdLvLvA#(VJI}S-q4Lb0^Q$V~mRI(yo;&wb+2c-xroAkKh_(>+ zk>?NxO)lQ~v=Um-nmZS6!L!H#%iyD7m5AQR6@dvOfa1Xw-RY#?$$iu=NJ)qxaFLXP z9;D0CTSV>30B0sd9}x*gAgA^t1#pK5T|aK-@(+#QgKC%)xrH$*%2(}}ID>4;@E*!` z5)0Cqoh0;2u{k6V620WnJ%!{`tl5r>eQL=U4brd0z~$(IG}r)*zK~9pYd1K82J`kh zgIUC)8gfJmIhL46F%B$lC}VGSJSJ$3&0|7|x|pckj_Rqeh)@q>C9X>mdJ#J%{K=dK zKk#@5m`w-qk$4O1*mE-H!?%>9iv1JaUTAlSDC6|dB!Q;ON@Z;v#6(D5``D%j174dm zO2$M8J}@ro9FIbLa>S~g@-&F$siu!c7cat@EPE7mP|A;N2+!-;sJlrOgQYER#wAJ& z$v(lt@?@YY1;V)C8tOW&I7ERbs)sP1mM;HO{E5SBA;V<^MjZ%l$5(1)l0?JvMn-&B zYAMGIW&@%9JY;Pel7h+#!U#)5E$L&UQX_=>JiN#vhnd$%4y6&7huLZCZcJLxDKp>A zLQS=tD|U+Lq#uPQDISb=r8KemaMp9xBFYBsy5T|U=jV#Cx53=1Z2)*Dp^3~5$b|FE>ov*(5Q(flS9R?K1Tj!raw*$$26 zYFW`~lu*&~COOAs^vU4KEyT1a^cq#k@(j_X)^o&?aHosqJ$H(Z++qr1QR-y3;I*)* zh;vk79d|KI$->Sk+1Der`_Ypi93z7u^dW?}s74V%?!7%h?x%>v^4^~99t}+qQSH4w zT*IKq-)+(&-`A7;ZBNzYRLazTUk{4V8`h(>P?m)Av7XwVfxqg>MhmgV@gzPSMEA?e zXg=x8uqa(oFL8|8HcTN64BP3*XUshS3W|l!oT(W%xnx1SP>XR%35oKofgx@=T@!*p z5XO9ePZ5Br;KDA>FHo%$6FF=zzs>gYJNDoKc|hS!fkRsb$v;Cu>^Y;^ z+<6;2=e~WC0dCVQ>Tk%#w(}0&gg+`E9^8>v&<0EpW${Oa{b|18#bC!Z#Nn>a*NH6P`kC_q|WlOTnk$Qt})z7a=p327uaW5@v#eGXF@Wt-6YvJe_4C9+|0gx_Td z>$yDnn)#eG3K}q)>Xz(^y>e&B_@mPnfyHBWavtwOUF2Gp5yvA=xkGRkKUI5P(v`G- zTZ$}n)}k8>>;YQY$9lrWSN4GA`>~#FZt}GUEccJ~VDin|zzX`Yo@;MO`_o=;R2SQS ztmnGj>F+0D$KJ|83q7#;onGKMoX~)Lk&r*_t zDCfLG4#jv7Z0Zt|EH@@I1OpNbPTJmN?rBt$7%kd+-MBCLBac~EhI|WLxfl`|5bvJK z4N5Xx9SRE$N0M>FW$gHl!9)b5&aXtg%h&@_i@B2x$~8?BBrPg5;zZ=>k3P;JTZ7b9`X(w)T;vvUb4 zL>x;gy&yf2ZQd-m+GxX|wUqs?PUorx)d$_(J`k!Db{#KdiQ9B}gI8oQ61?4V$Ko(* zS)*S9Z51conp(D~Q&mG>wdhnLUX7-tv0{WOLu+xw+Klu1$WF+W3Idi{5~9nAD0Hgb zNdd(SmRpJaBAGKeKt>Nb=pdP}$%(av^R|EI6^z^*U5bE~tvTDN^C=Q9837UO>UgQW z%XmjP0}xjewlPsw-Zo39u*YImwiH||2O~3D=s&g-1%}A@e@=`l>Q zc40&tOGT@54L0-0;foJRA#pqSzI|L$A*D#FB{OVVKoI*}J&JfB#M^Nw!HO$zShk`m zFpiFai)(SV^H6ciao*f&jwyy|tIY_*1ypeC(!U+ffjyjPU9<@{$_?jrbD;@Eh3h9w z=x_}LbyXX0zOledh-g6jHN%AM-iO2R%Y;h>y1MLZ*_YR7md7+pnuM-*!5D%Ko0qp) z4h44nwR)vW>~$CtGWlE#wG5fn`7WSg@eXZ-N|$XZ*35A;-RAg>bU7gh@gOvCBrJ@a zPQ-hWRcF{+CnQ1fp9nq@0dKiPmaIx}kVqn-DOiMBkKeOqTw~EtofS>up?%1SKCT9} zg974mpiPPN3eB+#CH zF^!zOO-rFljC~Bk#!yBOmcYDR%d{C9hrOW8#}9HqonHMVfRb1?3_GMbg{3>&ia;x% zEN(|KgR@T*+mQ_6-|7ke3`q_$g@3DO`;EmXY)9Yf!5A)ohH6m$c28}0UO@(N9u#fL zB>wGkn=*=jyJ!1N#V6Lr&Ruz%H1Rh*wF7wtof_al(WZ>!e^YK#=JCJj*}k{fEQt7073fTzf%$m_TKf3_EEnU<>mG1Lo#;c_IuP{=J zks|T4WgU%q=kxtFT0fzEB%Ah>?n4cpFFRc!KArah54GLd+ACb`3q%@r0dIm|@7KE@Rh2Lx+5=pTq zXwS&OqaI2zUBhXEq-V0LP!cL>4VK^l44ns{XJ(GB^CDX5>(}>P5#Ivl>(^iDx8PBk zIeYzjZo!jQx?Ni7KV6?ZzTUM`%GCa+>(lM&V-;wRvc~ns^@ylHQdUZ}o#o1;Zw)Kv ztqW3TQ6@B6lrhYU!^gHQ(@>N~yQQb0W5Fsii8!wWOK>)n~MLp~gAOMn(Kmfr* z5~Z+l?Iy94G^y=Lrg=I|+N9%j>L!^at96>!#M6$_annxvn51diY1)@uh!e zd)Nr<4eRrP$FiY{Fa0yysjK=%016;KxxMAlD&c^+hCq{HoV*BR@yZqH(I72FUu9~$ zMmm#d1(R;e(g|K|6wHWjjV}PNU5uV{;I)f=vWRr;VxQa|DgkRW+OE@VYz3T+amCW0`omEp(datw|8n^Sb`}M8l>(=43*pB z>sC!&F>4)^m$OHfQzD9xwq~ERCcD_hkT9djsNrf)W^_4*v?qUG5kuOOzi)e}WNe=p zLP$LMTW!fBo_xN0YH6;!kbm3PdnNIwHJ(=Y7T&zJy1L}AR4t}|e%9-3QDChbRar=5 z+KeFRXUDyZend-e9T) zsG)4$hq2N4=`URhsuUy{MaQJN>8M+}G4@H;v{L_j=xy*!`lz~?^eAU z?$g7ggeBQK&HS!ZFSptPJMq>(e<*p>SDCG_`Kh&q>1&rbDv%oNmEyi~XYzkf%vt#^3rT<@N8r7dac8=UARm3C`VKHE$F z?cGT-Fv$1)y=rY+-^&{}m;!WhgXy~C4dz*Pnz)(#+JnBCoV~l5+}%u&5xyRqi7)X= z(?Hxd^wV$NpKlxb={Fz9w+;RDn|t$ZLqGjy+;!MC^z)+#TkUQ=OBo8_m)6 zm2U5#%Q{dI9&I=BXl<@;+mm!9Ex5C?eLYfsteVCgi`j7rFqY}aLaq38SF%m7^YJ6) zYP@21dxX%ybJi4(9xFdUjR%b_WN3~Gj&=FvYD(~fx=iVz;vsEmPP`X1y)>narRr1r z{$rKW10Yh9ZoW72}DJaqQciK)}jk(hA#I@zgo&YI!U z@)~4ggB7OfouOCU;+osZn)-#Zb;};zS-!ve3uQEq%V-9BE%$F6CY{iM9btZI!!Wof zg;{ouaUj7A(7@k@v^JcOC|o1#N1bVrE!wJ~QYKh@#|+$U&eiEySvxljI2AHe7|6N`(|cwbzp;Q}!Vj1Twv zP>nz=-I#{T02!&zEsBrnYYXxciq*K_jOXXBI=>b|&-ftJC1K{TpwGppdF?7*3>J1~ zqHBf*TltvFJfoCvI#wYa;ZAcqyYTwl^5TM9RhVbZb)bYUdz+V5g8A`cddJ9bqKH|rFK+sl2)3<2S=WIjG+$%? zvylZy4Nx9u)%QLeMzGx|11GGuIC0s7!c0lBF|u%P7Qv17pOx?Oyi-5Rq|l#J$jTZ# zx)wk45m~m=Vx~$Qoga^VTj*f-cfxmJ=+_h^?nSMpi9eh z1_$bSZWMNMb~c5}$j`^OD;FW~@X6umS0DtyzQyq>06a3c5^l&9bdCr zQc(w103!)-+_sY#X9(z>jgGPzj>!`O9(M&CZwJR`?%k6~IsfQTav(@K%#kTHx@Xqh zeJ+SO_Xa5;K5svrv<-w;NM^VlE2sQ`_m@z6G}q&0<$+|IQXjAEFLQre-7oq+R!Z(? zg+wMPw%K=2Y;jCbp!_(#Q8tM$cbgwl_iGExbO!9w<$qr){aCsA>{#Dt%Lklqt65{H zb7~hUZT&&LKOq;U=YRWgi7*G6R!Yg=-evlb1L#A3b(dDN1>VM}Tl}rODZI}Bbgf<>wZ_#jfCAgV`(F<(>>A>hW9P+0!EfLz`? z-KZ<-vzaPEWGedBBIg7Qih13+Oo}LyjR~%3ZD#ESze_^<7SYUedN}#(NHOu>RLS7h zY^m(u<^iR>MQ556i<(~ZYqnj%LIm9=DU(toV0H)xh)gGMslA97lKMtV!98E3#xbnu zfS?X0JEFM&=mgaoGqN42>FfRutBiMh=NH4>CPLYuh}ffy&o)M$&N z)ljNuhIgzT8WBnLrIJzwHJvc*i%^Pfktm9}Yfdo$ab5l7*0qhQ`zJ(K8chv<6XirDOHc7jWz!Nc&#m;UK!L zv6t#YXU|Qf=SGJI2J7SKw+5u~ny3%;U6>fHj}rtHv}AfB?bqOoC^yDUH72zQT9ksq z>}WbP(m#A*U})s*(Uca_k+<#(>#a`=^ZXDcD#kDLjZX|sT$mV4&yJ1` z7_!F)>o1@-9e*|*9u@k}q!-2q0c4q4p#MXwN|oXGtX#n3Z79lb8%>Vu#*WJ0=BbiQ{;59mrfxRIu=C&7gMC)1=0n{%x5}; z`gPz18Ubp58wwa73gp7A@eBRu0?r|Sln$J*Y196oerRuc)>ImY5M%l2I*G?!XWI5y zrCC4y=E0_OQTd^te)CY%(WnX4Prvy@Q_XIoGu2POc{o>-=%?T8XnK#miSk1~e`F7q zW|SXSIQXicR47n9bvc|0-c}O6lyaaE$l0t0l}!?nhs_INM#mgmoR@?*funt9 z>PEZ8zqem!@4MC>n}J0)lQsxm2!IBeRNQGH z1{f{>z^>d!1P^uMZnAG;NADw0>4_&4ra5I)yIASKw^*Z`ty5bY30?l7U@pFw+o<5o5IlK~vmQQ;8#OI;#!3HYGziGsC5r761 z=O1i}-F$lU7r&u64TZTZ_R&B``KRjoRx@}apTP{*Hh~Bgv#YGZn*K90J8vRJI!3){ zae|KIRUv)jiydryE6?HXf1RAmsB4}(ehitjNZ))Gd&76B;Zf}jTa>8B`s`Sp>9oD) z`Eics^XSCj`?)c+Kx%#NxTIqaw^@rG6a$+W5|{fi5ZuyOnzB=x>!ZrCxl4pHN5{~H z@FFP8+$>fQcPOOXPV^XM6lP3PLHr2BgDxeCXzv*Zeh5-QqHtPtUk ziV8JN-kQ#ibyPcHb;eQlZF zJtxy^%iSysexG>iRITR}%jU*Pw+>fs*$VZO))e6y9V*6GUHzylhW=`s89a1_vEfw= z9v^EqMDT36T&ez1IU*twk;z{8Wjo^GEo}4htDLzJClm4IaTSKJxIR~KK1GFX3q~F` zu{DODv1SmjiHuVzsGRgSRwm)nlSCR)^iDU-+X-WqW^#}L0x>z*b zS8r|QzrAZv^)?E$_CH%n_QAfvoel>NjBzi4uu?giR4hVEzLS>-bYHo&wmfH=v>o+5 z*^>btTv(o~oJ)3L86nlXF+S3)HFxapIT&?Eay06Lmi^&@)U^ z9*@GN`g(CHK6Fp-ELB8v=k% zFD>O?@3XHP0PSB|im%sq@^ZY;AgL{$fBWFZZ^y1>uh)0-a`tV1a_^EI-1+DG?el68 za6jAQNA=zQNxatB7vSCVCLY>2_7Pr)AJlj0^>|eQhM7f@)34?YR5!vfV6ViF>bw1u zcr6%I;W}^P;f*lVc_DsK-=){%Rpseq=hemh<45e{eMKCcU5qc+an6gs+y7~El8mW< z2CmKvS2Y&TMwYiFir?AOfKPY7i*3|*`zP^Q*;c65ddZ9_wzy^iXGeV}H-}_i)Vg{y zxvwF~guQpg-rLjgKriF`%R;*Z6R-?Dv)%2#YH#l1?znD_aIZqPlB@V?tz9P9JqZ!^ zU-Mr5-mq>}PA2;#rJSu>g1M$&pJ--Q232&leqU%Szt7Tdh0|b{wtZ=5PkXV=&G+wm_@X+H z?AyE}9J!h)-x;uaa(`kwF+meu(IeNyR3_T~*-3h6kgt8KuR>-X!Sy%~~& z@Tl_sWH)8piye!3xMjz?_#p2T=vNn4r{=v0N!!@k_qW`xkv;a2nvt-~Cr5514{f=1 za*`bG$2u`GacOv{ua18H(rEo`@5s>0z0!rR^8&@AvwwK>!T`>l+mq(rfpJ_rlcaU* z+~~yUIL@8-COc7k4-QUTdSPhr;+~cxSCjn>T^IPf%8}%Wbv+LZqRt=LtYbPki`(&q zzC8)@&Ans2{U5|)aOr&S2>+|^dEoT($sRvOFNu{m|ApH+zW-pGom%0@mau5wo$eRY zYEEpINHnopd29ozgq<^b(0_j$lfnrdzBPB9{2LY$K>=tQ%5lR?8}|+_0ugu59a0}{q&prgLMf*l79NlhjK4k{q&pF z@HvJg|0(DDN1A#|$+zxw@B0swk5vDI%%$c&YwiMOyukxgayM>cCX&Vk+1b7#CFc-c zVa2z|@Eh_Y*>Pe?jEP%CPRugSNo5gq6)_tz0ovveCanW%MRPkfi;Fy^&^)OYoNLbX z%e51R@Og6>Bmq38_Yw0~o2RO>IVAWlhze$U7EKE3D0CYO3X!3FswJgMJGez!Ps;xR zj`AUUS*aXc-j5zWbU{aSE*% zXu*X*A6>!5g$*9n5Z#jt55tklP@+D@DJ$;5XnD|;nU|S1wl$b+vo%eCDeDjwmb+)H z|I+kt-kYgTEd=NK^{G7Xsz=weTQZMlfFV<%Y8vwCGxby!*Xiyf%S+Q~zoj<76UxJ_ zP&;&Nyj6Fc1r>G*4ppO%F#qCrnZn*9kJFB@nM*6O{xIuAGu|%Q?da+vH6Po}FmaO+ zTr|zD5HIj^7Z`~YX~e&@)0!M7awtph2Fy+2kW3^P+HzzlV4e-wG+Y?aQrO!B!kf6CPIM4np)8~O2fLf@)iu2ILdF4kyE+s{(q4uj z;uPKGL1Q9TOq**c4e^OF3u+h}uVr1hH6ivxjInOqvdM@cU!<`tf@f^O?Z;3i6U9an z%*e(9^)nDmFN}Dr<&B+A6lT8Vb2#bgPqTuQf2#(yJy--I6ROu&L};ku!f4LRVahfK z2;1BVg)fjZ`=&#Do3Weu1Zz;XX+fM`HM5Is0H`)>Z=?}k(Je0KIC(mrtQ%&P%(_z;8rxcZn3`} zVbVnc9}7g;IATwxBO4o(?K#^Z5LfVv38pYPwiU%uHf-WL!XQJ<$5$~mU$5aD#f`)N zZi@%BG;@f*z`PL$nstSZ#91fUni=(3S8&O6m~}#|o2Qxer`###YSTigDu2ch5L)mR zwpmvKX^MA-^V3(H8OyG=9Zx_;tha+Vn>RknRo)2BS;J6+(%*9@Eyg!`x;g0wATky zGFyymj5n~5VTxoV`lhx=ljj&F)z2%oFA9v++VcZ`>qgzHRxU|=`9Z;&=o^`Yx_fC^jb}CJ@RNWW}tC# zv)o?o$6G!iQ5)wx7Rl)wsWDU0ksm+&wIu<}<{D1npbto2>ODW4p`{tL-1Lwo`j+r> z^WH~7-()RszWLaDk{;`uR01Cz(-1; zJv=4Vz(-0{QSuH*P4KZzIv6`Qwsi;F=zx!62TF1NvAhGNIRBVBIAtT?-ES+EpQs*g z9D!?Lx9+3WzuOa_R`T<2E8XY0OG=jf{M$;8A4*PG9~}6fC^1Ec8vBrLcIzHgf|Rdz zhaOad^b;kf!O^4=q@O4;58eY=LcU9*Sl2^sgrYMITL~cRRUQ1^-UI5xC z@Nv8{chzdbJ7Zc{Mq$Gf-`E&g(r+*+B1p0}XuU=J2uxErLF{ky3E(ki-8X!&!5{U>jMv$AsD{9vhEtIneghz|YQUL#{4b_68++SQt|-Ir$x zgOW$NHVJ>tTp?oG2pnysnC=QnZH0Kb@1JtW;8rhBDUlC-v-s&eyJRkYu(Z?b_wp1# zf3Q^b8Vg)97e81!+Lio(PH)R3^_dc^-**;{@6=PWT2zDv0sF)zDc?`rGT61?vlOGa z>A7u>_&CBmBAJ$0_4|>_i-i(%k@D zTNtO3Fn*@AV_&>vB%dig^hh#q10m7PkC({y|BV?N0`sl-I}T6y_$3HzQ-C)x8pcu( zZ~8hKoW z1s}^D3wG3yBzoqM)WNR2AoH?t=pLSLQ{n6v2{H?cRtV8(A(ah!1KObk!4*7kk?9nP zDE8uD2R;LR{!3%9^mT1dZdxdc@OdyZL>FW3w~hCfv*KTwBnv!CKPoqSX?>)J=SR8;Ve*jaY9aaAL zTq#sYfxloppDR^5!gzEney;RnEtxf>vY3BXq;rvtCCHU5M`IgsJ3{jX)mUz9=!aTm zR_J)e!kvO|$amcoDN`7kRfz1!h`*xzzM~9G>eq4C5~wX23ZNomE=4zg(hn_@9?oU$YLY z-_V}_sb&*=;=W+|4k$*_GD|=$*1-7UW#f^x4ZSJ{rTDt*D-02F37NV>hS2po5+m6o zN2IDcazrky+R&uCUi4t#s2KyPMoLlQ%)BpZ>YBW6wy&8NM7wKoP0M?ggMGSoW9s@m zf#{jg7F0AEj)0HAZ9WQ@tU~1obYYoNz!myp%QODO+)GZIA}41{EFZPE9ZAD)QRO1I zk_7Ni)6n(oY_#+lG_$gHx%T>TCkdvA^~fCgdgPKB{EgBMFA&8gB=H+15~ck-*$bFZ zp8k%wkDpX>tH49qm;+UCq?s~R`+?6cLHjBY5!^G~#e{HA?Q0>TE?OS6f^()wnYH=@ zA6QF2ID11U5Lz;YKX+x}&ms!vnM6~PwFHeNo03;*5P7l*LA8uKY|zVkW3EZwf($Gk zl|ygV-C+VHsM&RdKr1G`2THJ@L`!QOX`%?Gpd`eFvr1tp0Z2=C;3?0(t|SpUE_(Gu zYDozT32OX|ENWve?^AD&BpwiZ?+gyWQFFA}2SK0C(mmavZubvuYk_Xqcpw}d)Q{*%%}kAZDKn1|Mybc{RT~o_ORdI8XBGh4KFw}T=MnrM`*YNn%|Gm zY^${XL8<9qZG;AAqRm7eL8C+a4E8g&9ZSLjNGJBcE>ZkSlhrDK{%~u6cn(0k-~e&S*MArR;u3)VPyp%3*fb-EZ({-HbN-C}2R&M$P;M|e? zE?^|Ce_I(@R+E(on2&D_49@|E7aSNa`TFA#7%lAULC>!vKXKGKiDtC&-9#lrsvA+ zdr!W%r|0Qsrca(ccJjT)PIRv<6E}ho({0~M829!qLLh6|d+IjfoYP4^FgSGAXYm{z z59*74N$xx}Zmd&Ty~NKlBh1jJ%E{j3HL}ifDH8hLa?8o;E4VpeF(%{)Nt6&3uSbx( zELe7U0yuI}5U!gGB(>{P3g%>f3%S5fr>B@^2xdmgGv&*+&Y(wn^Ww^O18}~#{AhRn z1~mD-<(?DC$CX%CukgkXl*!fmW-P`9PcOnDW%rCH27)KSf(h^FkcVJduR0}z>8gP( z_m$PTc^C%?<;uUQ1O!E&y^UiyUOQEQUP=x?*^vROQy7n7&*^Kh0{stYMNJ0aIf;Ly z+;XfsM${TnwQGG}#L0g`0G{u7jBg+8d!cxfx1zmeWO!QX%X|%8x#oL80>1@! zj|d7qZe?bZP^KX2bAgE|1t@3UxnvAKS>EAwoVjEKKUscYZ}L-O-ggSKf4z*=03Ok)X#03XIR*N^5-_srTEaJ8zC!G62i1lT=vaGn>>N98{ zyE6^siU$-iZ_#k~?t~=A;iaenjf^kR(80r(z)^FN>75zCkXT@J1x|GHji1SS8r*FQ zuCm3aY1}kUD{}5}f)Qb!39ifdXQ~>q1-Wx?!8iAG%2F-7)VQ#>=;4HQ@l+)4!si=Z zaT}Y+p*(ex$Uy*o#^U@G=Nfiep57Mm)i0!zHBk*UQ+-FwH9CW9u)Mo4&~5 z`w?Fp#psB2tDxbuLUUjz+XnSb+DmR8=^73RkWu(@76ay(jWbyJQM+*75v}ky0vWPC(^M19AEONv?<>9ZF z$=CNtOb&yZqL8dICA1@JNjT!=?V(|OBe;aIc*0uZ>Tj0INp(M-L7dFJGbnIYNmaOT zwX>lWEmL0H7Fto|;=fsL+YyEW-rr$X7|Pf+LhV)>KGhz^Q`-(0N;99nQ{K5NNeuv0 zLH`S_RV#9InnnNd0&3=l?)$QQ;}+}g#A29_AntCM z)1syN{XQi^&fhI>do=k*fRUxR<+sY^E>4#Ib*3Iamlr~M9$LyLT{(HO`PLyC^c)1| z22FFA3)-hUIxtP2x??E6Rethla?^U};Xf*ucUKqFv!QZ>5n97IOglTpdA5Wq!*Xse`yOOq1 z%WKm+jN3j}brSE8v}XX{ef;%FDl>Lr;?n5I@Jl%G?@p4Tk?gufIi82kpT9t|q2a+j z+bWcCzc4adCk4{r0PzCj6MI@Kdz1S{2oxZ@&%{fY1_sacUKpO(v;FjBvb%A(l}Dr2 zERL>b0~rlz=zmDw_`CDC4EabkjI@4_H5dopx3s}IzS8ji*w@`h{x_;JmF{kO>8^|X z-8|A?r##YGQu>&Q{(+_}6TN=AH~ZeEEEBzc`ptb!Ct_A5{q&m;=Faf?={NU>&vAzL zpYoopHg%Me-kFVHfMNu!o~vSd|{T1-&P-TO0ax)$U6tC8%zQc0_hIS zm(@82Oz{ZPah)T$n~Q*W?8tM58ezf6V~jotNzzw}KG4=-wq45YFm7ERHuq-&wMj38 zn-c%MgO3Y4oqz8;vv0%20I+m*bzTiR_jenR^6hf-Mki#gvP4c~%K%-MOxnBC)(bFm zDB9nxF_i7rKx3YwDkMV*G!#)yIYvVP7Cp|*C9O0E=j0}w| z=1R{%K@S0s}_wl-ZMkD3!H6GZYCnKaG5 z=qWB4bFuhxpiUN_Na6}!oyv?1!$`(5ss@?9D^vi-$$ohm8tzzIaV6B`(8vqD!$Sj? zdh2H|sBFfhlx{vfnu7Ob!;|yx`xxGT^5L-r`w%Mr&rOF{*lce*LeL9lePFiK6plnP zYfILRCrBUS$Ithx`+J4^`+znUM@$bFO+SLxmb@|hBsw8vb!S3Bdm;N^Lh<9 z0jp|t1tWgkJghscmiR@iD1sr!4I&rgH-A!S6^X044}s}N95-xXXUVbrXE()-?GLuD z-#uz<#9&7A%q}ADk)jnfUM%73 zzISuruU}uYB-yuxJtRp*XUzyDk?g)B= z{mBDoE=-Vn_tJUf`=RVfx&6J#HtS4^fbof!_^PpT6kiSul6SaHT>lG$m(J8lHZXv) zq11jRxi3OqpCxU$PJvCQiV2y9`Ixz6lUO#)?65mZj(&PxDXE-jE@iS>vKBX~>vvDY!r0eMe`0y=y{_L6^m>|s|D$wQ zhjLek@?TDeVkQ2RuKIy2UG=*u@t38melSZ{{ccM9JyuFyalOic@-yh;5uS4s9Vazn zh--G``e2N)*DB6-FRS%|lR;dI+4l3{S3bZwQxtPt&0i%6f~IJy;&7F5fX+`96olgSP}y`BWo+tQrfZ#B)21}L#Yt5BTsfG|8y7bGEJ3jA8PuaaBcc~ zyU?R~W`i)zVnSS-7PoTCMaxVQz3C@c4nicAM$!oA#r?z!YT|}_eU`{MYc_Xuj31l& zZrRiVNC)B#BZKW8JDo1*;9Kf-w2fQLP!ynnF>i`wQE9lXd z5YQ?p-wsH)+5I-2_A@t&?VMqXKDdUozFYS8VwBaX%dD`{{kHx8 zX7<%h(~zdAQbbIEgZf4?dkK&0;J@X1jQPdI*UawaQSQoG;k1de*3jMcZs2}bP*W%Z zw^XS{!FSfJyO&4L|A+_8$PK(`Ek613tZQGGBjwy*P$(G*WZk$}_!8!FwWxoNLyV+0 zym)m`Y4-9-de@UVzjzhd^6L88j4fwVbDIgJ8K#r}=95|cf(#!By1d~(ZsO!|%lC_J z1AJAyyzRFX&6B#bciKuN@O8LxY0B?x+Q5HN$~(+xZ-wN;af#iM`ZhsX{{EdHIDfl{ z&pSbH{^h$vu8tmy-TzQqHJQTos7YlpE|p(vu8zCa#_YtwYNH*?WmGK_DKA{r+@&SX zj0U5LtMT()Kdvwr+|>nV-KFijlgDur<4RiTsg_5o57?H{PR+;J)pqx(% zG}ZE8e$7ETRedmTyUbh0QtW96DL&y>i znfN7_HQy!isFGI;$!?@`LLnu^Rs>B-<=K`;^J_qxP2b}s>jgX4ueLl|{S9}y$$(SN z7X=lCK3tn7B+DpZ=}?2pgJTrmGLrFl^)~N6?T8L(`b4pbR?_GS6<@Xxxc!JlGb+_T zp*F@bT1cyV^(>Ix32uw+%JhFUt0D7mVaKzhd8J0uoj7`NF0Z!i&#yIF;!s{Ta>A8i zq5O7pbr^CYN0Y+PTs3PUjpU39mI7!8)q;amU|l(c=LpP~_K_eKPF#LwbR@hlY~O`P zy68xrD$PqRRpJLGKnP^yO*3Jps45yYvdJ^|6TaFTz&HPs=B51)CBIz`tFz>A}P^+w$B*TfYb8pvt3t+JI0)ILxGcln`nHHwWKIC&lYcqN@Tp8 zUd3+0h_PhQ1CeN^2xnaw)=bFn|@8L zI_-E?hqp4?G5?z8x1gObd#ZW%qHl+-ul}wFrDdoWpJUZ+BI4GwWJ$)8SvU|ZRs7bJ zJ(rwKGdh{v_+&Cp2r)}RQD(b^^(Jj)P}FDHD?-i)&2(f=$@dO-t^nh8Y*^nKVbH6S zfq`-rs_&Q?LF6zxAfA597)F9PrMJ^80k^V~Cu#R9y0gCheda2`rKIn)0wrj7} zi{9GZjD75|!I(B;Ye=8U#RaLzq^Rv_Q!GQBlp+9Y@)+kBz(tnnb{APQHFCMb-5R;X zpYGdBSZuzqO&bflPRkD|2=?cJwASpieczn zsfzZ^=?kPpu&Sxyo$Q)iaBsI~FSg%orZ=v@`M1*C{XslsX78&!%PFd+|(m{cvB70~+#NO6JkqPbZS{|I4Us`{p ztZRG|kub_HWL7JVV1sBubh}J!5m`JBcP_H-N2EQ=&P9EX26JMS95XW?h>FCOzccESC_A2UK5!99SH@b2Ssg>}J93|s+>>FQitZGO(CjrG!DfVlAc9oqh zYrV!H$4(U!1O|??`}nD+;vTBpAfL}SBe}|dzO>`sKp&(_;F5IT|H1cAUj7wrMqhH_ zENt}H3=4RRT;Tt=m?LJ}2rfH+336Z0UTr}qW3J(wboM49N=Zi;d)&1Zzx*qu5R=qY zR??AQDY0_?2S*gk>t3}0khWv%n zeV#suONRV~(nAj?b12>kqj;y(ghi2v#Q_;kIRhvxCG#Z<7cszOr0i4;5vP^~S-KEE zgG~<0_yUBX1rzQgH3O!iS#`|6Q)+uC`Boi}5@cI`Pjb&!Hj#V0L#^=ugt%<;v9fMt z?Y@;pGUNa-|GQi(860L!+Y3|K)95i9q0PCLW}AGD?L!I*k;94C5X$(b-NO zG1*vzvD_agLc&%jB|Ky#F$NwDS=0jaa#}%mcMj@JGKMDvIf6uoE^zd>mp`!vOxx{Pw2qcr)es<3<43C5bN(rAo@c* zf_NJm`m&h8ec_y#O+Z;ucSh=D-w)w?1omM(Kn&g3}Gf`<rl!%m?%HWw_qVBk3zAlS9EH$W6zgDx?YW^CDV5(a?G_`+*@U?!_NQ>9|NmjHXhZk`Y(KkuI zD^Dc%`dJ-PjLgl1(mVI~ZToU)hI&)m=HT~KmrU*@U4iRZ4^FMnANAJgb%?L({%w_l z%h|oW)KAI0Y(lA@`7ia(CGQEtTc;)|v|Sm9Y);NQVV^+b#nu6f8h_pljE8kA!Lm5m zo9*9MPIio4TV}r)pIzM)KYjRa*$c6nQpZ*!&)TW^FTA|`Lyh$EB8j5SiK|@qi|R?L znb-@PV^O)F9@O}6rBb@t{>PcD?6>%^?|#QjJuFZdsnx}?(#!7) zEA%!MQr4~WXMHZsajE%i|M>I2bJYEoK5rXg$cNHg zX?i@wX~|iMfUxy`OXGHPXQg>>(|CAuUx?^RD$V*CW4}(?n^Z@6u<2yDxj#gSVI9#= zfAgUxc6~Y`AWT31jZ(Y9%N}StQL6raN$xB1Eyu$b4`{hTr@pp>xgxU+iO-f<0}cfn zh)H6Ko5ds|*4uJ%nXmQ@+?z-&J~OxI2^?m+0G0K)j-pp1q#9 z97QRe^pK2U-UQ5+r<-0cRVPzU6~zp!Q1<|;qwF`7ry==q!Dy!!uTsJiGUmaRsn=OK zJhBL@u$D|iW#kAkySyVL5f~#VMU)!s({1;;ya`dhxBRRVWyy%rE`M$pV=sv+UntKs zSARktXK{(L63ISqSx~dAhU?TO*-iLy(#ug+j!bwibYg*f4D)hXkJ=f|y=I?G$RM)A@1% zoT+)n#s-nWgxW#s-l%$Tl!R$}9p3b^%ybyioi?*!(x$1eURks3*7I|y*>9Dd+9{xG zb+K!8_RZDqZ(2j`u=2R(0(ncZO=LD^tFv9qHZcP<+9xqzbzNSYQ*;ytzrs?3)C%)y zZfDR9z=m;XqszAAb$fBxtrs;BA~h>-hqN6NGg@J7(g^!z+!#BfdGvZsXi|(^vCxn< zNt5YyCD`;ampR%Jt5NQ$vC{TAQ?XxOTvo^^Ni`SNKrkiX*FCs0dpSIy{t5nw%$NvA zzKjtWlLya_j!c}JjBDA+%Xbzd&ADrfl#SA2m$}J|@x%iySXMDJ$JwD{v<@r^cLSjY zc}V279;k!%u-#%z7r`vTwFo;0$#xZasReKyw8Pl_u3sk1lov^cEKdWo4Ne}M1i{K0 z9)|*{7TR4m)pl-~|BAal>TJVcZxLRGni!5~2b)%zfx~e!xo-bOfyfDrZq8~k;dVfH zo87`NYTx8uKP)e_=Aol7JS`}e*t#*!F{))9>lP+>aPn;??q(cd`@y%b-{^AO-fX|+ zFyo=H0gLR24P8tgeEa5PAS-_7?OP#mCwdN*RbI~v)@*o!r8JB|Kc{-iEO3t;Z z`N^Z{$e9p9Lp$M<1`q9S5;07y7t23eHmFMdNHhIn`Ny|{MuS~_WOu?I@QYQ@?Z;2WTj zU`IG+c0abv*rBnNtgGv4Sj$&xS3f|BHBbm4@w*(>Mm7F2eR}G-W400W9IK@l9Z}bD zSWs0$5f){<2%O}gTp_`(tQ(5i#*%O6z7nj()ge3S^-QO$^TN3Q2n)xmt=c#fLXE%? z3SNWrAB(Jdu;bFNA<0$5?DTR&i8LuLahmLV*|lhnEO*&8G&zk0$;*2{=2u~wpV*pd z&Z&=1Y^@l}d^IqlrrB-28dzpi>^5J8WhSkasdbxQ#m(hWSg|P8=#Vy~dc%=R-Ysx9U(W` z_9wq6yQa#tP*3Sl^{+@j(J1QW>sD3}GBcP2q0>Nq&MPlM@mPOF4VpyY4b-)M?YHX##dvz8f*KE-Rk;i zsg``3Y+9E2?BgX0>i)ZplT*Zo>dvg2pTKc$Hbc(bzT3GuD#Nr+Bc-K411HuOerP>| zTuV(&Eli0_0{3co7zo1Bu&Gog>*jP}TAKj=nkMDJ>@s(~l11<=FElJbe5)7(_sa}O) z2X0IctYq{>$%^r;WYz={E87480!iF*1$85|*JmjJf)GU_BI!jWDQT5M7-6~TPT-6L z7p#}DRQ{JxFVmo#B+MDp4b?`laXpuxZayAQGGmo3rN7RJ9R!g4iQ zUyrN<%$yhlL{?J9JfO+rfFj7qn@WW2dlb}kj45jaB_7EOVz&9nuM~S^Tv``7II4s) zkCVS5D$FrpBM@{}z=iwJN;{!aPPXf+Dv}uA43Mu0j!c2pP!>m%)D8ZfwTDt>@uJ1k zK0~!lkR1WVxy=SO5+W=$`lwNL!p4^Y+9IQgVa7LFI8xgZq@^)3S;VnajuAlzrTqmi zn!9l7R#Wyme)D*a#GqE5yi=B*I#j$0u9U?DBPL zGC@`B^XUY69n$g9GZPnk>w_u3$Lgam3=Iqpq-}_o*U#o6FuVC>J>u9A&&lu;%jsWtuWE~IMb2A zv%^DY2S@q`1<$COxi~aF*vW(qDV2!Y6q+yg(kX;O$D&B!Vv6*;K)N8A`Amn-q`d<# z(8xf18wwa73gp5^;}`nR1)M|vn6(g%(x&~vz}4RLtQoitRhsqFtzM5eDTut>h9ylu z{pP`@qjnP(Oh5hRp(fRsF1KMx(@($oL~e7_PrrFMx1{OkSN51CZQD2NkgNW|o^+B! z71^ELfvs4owt|xs!Z1LW|PC`?) zTv=P7+?@)Qu_qNY?0IfsPZgdy*sD!}Du6=tO?vz^0W9=q2dj6j0uq)=K&LDT8o}{7TxPmo8XO6N?>B=&jD& zy)JuGPHz#$p4-5vN)UGdB!C01mBG3taC0ji3>6<9EO$lNeCz_>=cw>j?(dWVed0Di zdui4Pwzo~U9bka$o%*9E2e~7^VdypJ{hbV7U{;U~IqU<2vu_mp1XewM&vWSl z#UI`dpJip(8T!p|FB{(a?&1rfDesBlQNnNK+SELh1x7ocy?sCI#VlZ3a&fg}dM41PFK^z-Gp9;og#M8VA@OD!AR}$)Uwq`d9(XQ z?X7n9HQ4kU<`b)rf=_+Bo%m`^b;W$`wGV5;L!{3#eV8Ltm_9shYvG^ z-^t3c{u9B>{+aEQ5DSH*Eg&RpJb{N-Hi5#v=Dxd=9}uLI6BH+>QgKAY5m*2<2oXVhX65gV7GJ;eYK0zz8Hn1Ml;0zaf7r0M)1 z*AWGyD|L96$&p5lin7L^#I2t;_3LI&(h7&)x+#r%%d8vCnQ5$T#*o&HHsX}YN5pBQ z(340vPIc4>gqhA_t>SUO(pJr1KcZdLYIu%KG!`}B*&V<*ji%k_K!$zW+ zJYMTL-m?-sUct5L%cMs7=C+;5KTOcc$gI=y{R)`)TjHi!@|p9hi#3KzSC6p9u_vH9 z1!IM6g7qc3&X)=ruPCTD@+mS_NOEziG~qKWjg$#P>87NOilA5QV-|g?ufqPZ5~3qR zmq3f;ZvmM>k`1faRm|(RCEJn0r?v_^Af-8$@1&Y2ka#0vu_77-_IU`Yl>r7?kUnow z!P@3V4dY`mgr+|<;%cw82laAL3(wA&k$*^Ouc_$kvbEJOjOkz*0a}+OCDBEj;aaC4 zEn{4?MIqd~8UW>@$6ZK^#mhnIpy?1|W$t9Wy_~7(IpTLQW%+Ik;#gOcmf!?nvHZ05{8V5<3u*~PlQMMrHmw9hh8tLvQiJ}eAQUt;0D`6lJXm-% z1DsZYC=9uauYr@`vXd$hR@o{X)2X1d*3Fk5aIe!(oc7?UmRZth6AvLQ-f$@b)|)nx z#PNWf*W{USo15r{T_xB!7iMR*gPEI)Eekj4kpmSsJX4X`Q_T+Q){YC%fl^MeDFLd@ znF!t|&1!t8^UK&%Ej37JL5U+mN^4U}L~b8>1v4wtgaV1Z8n255r9glM#)68KMXN{C zw0H%T7m`TWGT1J_nKlAOCDsG74vj#kA^bcer^}ed7u8lxgkU znlTq9kAXWB9)awoaTI269btG0Pl4ix2s+7h%m4YlIqr{*vwl0@02hZv|(Hj5K9#!2q(A2Z%uF}{- zy|ZmoB=04cb1f~W!(F9ulx=dfjzZh3LRs+17GVo?UVkKug6(;bK%Wy#qEMr*YkBu`ZQuUzHP=7q#r zy=bTgl`ks;KJNQb^DKp!yxo`PloL`)A^FRkcAW;gq8a3E({IEb-^Fjg74{^_(l3Q0 zL@tD?zUA>h2%!8l1wd%T(4zreqrMLZ11_$wxqVU+eb9rvqRoH65?G~Y*HA6y*s8VSl<=-Bu-ZTkXjJ~j( zG0A{bbYE9+u_S^I7t5}l2g1b4m$d7#C*l<#cXb}gScAXuq2zH#;Vp}1S1JL|h05C1 zN5Y2?GMoZxF;gG~#-bWI4m}MxwNv^QTt@XkXwhDj0%ax`6o7n1!&kR0 zF5CWhO398@BrI9p70iqtjNm-<2cDHe2bI zdSPczCGtXApH8CZn(F>arN_#j7E*(*1HyLYE<9d$yid`q&cNa~ftn{`xtBSLg*N@_oBKvXOHHv^*k$PwW1 zHSs5td)E;}Wz)f~dntmkFT$HmvI%{$|0B2U|Kp|P-gWf5IRNWkx*Y(=4FLz+AKMB6 zrxCqbHK$8QA}U@^?rEU*N|v9dNFWWbL?~`E8%@Eg&`kEPt-ibFqRJi&4zZ~{#EnmUW{1;oy z(R??K=0Dxi;4^+#xOHpiUE$U_wp(Uv;bf1(?GyGBe)4|z{cMKieSjP;dI7$2Z&Qc; zgc;q}bjW_fJU@j5hA-)UtU)XrXp3`-;SYAn*fNO!1h6PNjIP^DYO%-QA94)r>)v4FOl^RT~csNoI+b-7XRb+dlZjK+Bm0gAo znK%LpUdi_1Ob*->8y}V^FOkee6xR{t*Vct;mYF{p(BOO#bn!1yE`#Y=A{gvzd zx+{aH$b~*l9pHt8$~f5qWxaT$E>B1KvyD_2e%)@p`Vrcn)t zw44#C?)T~%zhbYAow~Y3ga?K8wLaifX(i_S_8y5>P;B3+56T%uZxR*R-!j-r0O=09 z{HyQ$Vtcq{G_5z9&$K*4$Z4Cs@Plm}Kf3Wblx^I>xn;QgCz`82T1t^&9Qt`xQX!=Q z3Kt81Ae-2xn=Wg;f1ahqXPV-CA9G3;k!fQxjAW?Rrle}+(gSO|lPwsxhr&K^nBJaAA&`VdPqp#3kBE96U+<05A}%9W4U;t8G%R$Eo}2~cd->hqj$Ku;KUiu@ujx&TjUxx8si|M3{AhrHp;`N4y{k(RnrzAu3wQL1R zuDO=2Ac;2DvK7?K3Yu$~k1MD-tRTA;+F}J=&sI>guOQtBYzqzCD8FTFixeTsl}TWk z;X7wjHs8cV?ouji&S5zr5wI?cmAuTe~}-BR$T{98JO7di%vy7rN$P2+|H)=L&U0XZaxbs*1GYp)W7zZrKPk zko7d)g$#WpYJ>A@^VMxGxwm1}?qD}HuV3cD4g~s(XN5w8#2c|Ub%nhN?`PCb>@*0vtwkdREF?u_kFPl5_y19)*>c~x|1(cEyPW5dyWiU9BjgUJc#&zhpi~)QcQ`xo` z!Ctp!HHq2DOuc~S8?E<;gMq9h%{LzCh*uE#jT7%nUh^og5g5BP~v`SI$Hl+$r{9_k8_ReTG8F0ZzpL% z!67p%dpbGo^7eyBkShNeCy9s{HHWI#M$VrVWA5d0kVoaJ00);(B8)&G*)y!Gg2n=R zqJHhBJZv-rN2gUItk|HrU2z6^3}!?wS*2RQ*WQ3@`G_M*_lkm$vVWmyGVaOS|t)e$PgS zFwy$yQu#+q&DAgZipnT6tj=hn=k%8@B>==l0jB7;pTibLsqJMleeSs&Qw!@xq?{4? zXWGTFY;X8FU<=PX%VcRM97x;(E8c_;xJ@w#pDy7nalw`f!~fn=OIkf^+lMs}1}dgt z-En=-0eR?sp(2i+Y;Y$-)=&PT5kAEHl>_RkI4-gWN+T&(8v z7fNjpXZZnJenN-tA?w^uvkf%L@O>72mn8=v`w(Bn(jr=iKM{{GFa7cpI>MG2Ub)ix z*;3QbGrXBQ8eXFT`PKr%N^RZSVTjrnKU->hG^p z7wdrWly*O-I$%ewT@I?BQB>=JP1==*1DQ1660;``gS=!k&FkCaB~AZK2|g=4rRkqh zs8*LX&DGDTFwbLKHEo_7S=-ID&HE8-?S?-WtICi++*+fS;tEsWTxtDlrKVqI>SwL( zY!z)q+UsWXHpIQ|?J)NO=&zO94kRxyx@OHi#nuj1&lNngvSU#DheRmlku`M?c+3Qs z&6UNDV%lY&r{V09_5S%1sqKVfo&v<@OM4%Smtf`drS`{@9)pR8zf>a6yjOzGh(zOo zQ+{)`%VxBQ_GY6NK612kX>E`k-un}^{8EYR@T;J`rPBIqrKT@{_A3GHrsv*rL+NjH zf4SX~TmO2%ySBJr6W*1;uSIzOb*;Hm2Ewhk73m^jKf>XwAXpm~jX-O(;)CXrE`B{O zJuVsauWRXP(Az4lf2-8=8+SbDb^aK)8}$0u!=P1@?r&+(y$pJr2K|l(ed5js9h53z z$TY!_X~T!iB^|sIhs-5Ien&%IV#uwP)-RTt{sBY2d`I)#;CXSo0XM!JhO2sXUo5pf zl6(WhZPjppQ^P%Whr@jXjR!1C7%Sg1RvPfJa!KpI8OO>cWBpBy)jUsIwWt66Qu%Q8 zA3HA_3p__JK@pL45XEk?8zYjT*EdOTe)bB=DkZvfu0yYe47`wD(4!bTA>pQwt1hU! zt)|KR4K*3{6Jq-l`JLEx~j9q zm$SXw+tsu{m79%gl3Bq#(sb=v#esKD-kXPguka=_kZZZD(fwd8uu0 z@-HQ>Z`Ycj=Gw#6e^|~;_0D!g>=(B}GTV_mCo0mM;T5|lhk>nRVu;NYGscM)X-5kH zRd3AjVpBd`ABD4th+*MHMnB#X4~+mjRrK-?d0Uj5&l+Lk5m-GdPFI!{CwEV_5m~|t zR+6YV=B^JGj7-`XN7S2M^(^)y@6#?7rBw9X8dq-B> zj!UM2;&z^LluH)KmrD;-lfUS*!qY#}tbE^{%t|DQ#`$PaQ4|#I|9?WNSW@uh8}fGB zb^a(vs+f#F0;!q*_}@I9Jmzzp2iI%&grm8>v{isT8R{nDJBl4!VIQH|iMbRv*W;yf zxGv!Jpki#Lj%{_n==;{v=27{(vnI!a<6Tc6zJlXjq{D{D{0!cA6`lWuZqMoZDO~OD z=5)QA(^ZKz(0qQ?xn<>DWP5GYbh-@{&CA`KuH$!ex=!2;&%Ya<-x!{sl}vm*bD=%h zG!(sM_0!#559Nto_0w-Y5gcQ2snbutc{um0)la|K;SRhinW&%tJyy;t}?q7zv!YqhoYHA)+ntW#Q z3B+8FTbh`_=|#-s`YtdFWM6% zX(d7*Y?KIs490ZS$!2fmesKVCg90-W2gOeIVpT$TrOYFPN8SC@v25THg=*>W?9@_r zOK-HVV4^}%-7e=#(`m_#iMQmUCH@L`T1!)f;TSg=@G3FGT)dbvkKI!}Cb8nUT=bmm zB8fY*La^`@{wdaa9?hD2cgE-yEL+{0&u)9ua>7l>25gz@LAS{g$BrF4-evzx96NT} z{&`vWe}=!V<2_v`j!*QQIDP6F{;BDn{k>dE`!HD|uaISr40DoY!w{`?Oad6LqQ@UI z$R&y-N#gkbm?M#FZ3e(A^)vlRdg8>1XY#SU@x~jqx!Kh#wZ-MD-OE>|^{?Kjt-iV1 zDI*IiLCf!_OjzmTQ6yf;rDJ9D?Z>f^=_;%R>yYHlLoZ?v1T)wa)PVl-1)~SXEQcHp z*>w#=8I_HVGw!g@M=lHxcgEDdjqe$~cO2tkCb6ev@4YolS7%q%%HoxosT&2@7%L*0 ztxcq{boKQBS;Om3tiFCUHGtIRS2d*9YpbtofVUdas=b6jIZckb<4{k-2ubz3#RxCX zEu82vYt{G-9F?w5FwWkUGo&EYufkx@3=I!XkW)RqvKoeog=^Dqp186aA-zDp@uyCL zq3PFFNKV?((cyPGudLQ)-Z;m`F#yr1r_S_!?@Legbf(`-2Rs{Id?P-JcTGQvppE`N+3P(z}O&0TYtTXK@f^!=RRy#shZ0As+9U}nIe@^0LP2Pe4 zl$tu7^ogv6Y<-%L^O)<&Go7zQ?WXLsy315<{Xv{AHDc@47Ks0PE?>{Jh1oaysAoxa zgN{xu;qkD7Ll=&iOeaR}W!@Q)l-q$j5Zg^hM|jhR-Wlj#nCz^nQszvDP>|!`)NODu zWa(#CYg8jC3_ILKVtIpMH@tJpkve%akjR!prtKP=#-I*XM?*(vVdQEgAANXPsNk(l zNBY#3BMlR5Q=-$kF~|36Hf4p0in+bBhw|U-_q2|hW1efTx9rR3-4lo#LkoU~rG&-E z6}bh9aLaHb<66DBV~cfZoC`I97qMnpnrR)|Z=j;|*5ttGJXz%9 z#Qy=GltD$ml&b_oEE$^+Kkb;SVc++g#|t}Uf!njvmTY)2t*iIc{xbX#Rc_b>i$37) z&t#lvM>`B)J1NYiFBf&_t~m(EDnAE@rDRd2NIK!fkm*~AfdK9EaIB6>&Mo66Jt?ny zoXdHhaw9LdQ@~Q8Pw0Pf{|xH+b)+P&g(|dCXN9haTcEetjq-RJPX}J7yhht)f`qJ3 zeH+q)hB1&m3QjtX+MM`dl1Nv|4rJ`w8Ti4>TcW+^*zu=zIvhVWaqRf%6UR;;KUF(+ z9P~L`&LN}gI9)_k$OqNlwEl5R$@u0|M=4Zv{Ap8@>h|kX;}mgNQaaj@dpELbQ>l)} zbn)`5)Us7}VPJ#kbhrr;NLsUFfrIRy! zA;PJ}A2@dTfW(!`%IS=0h**=RzhQ`M5|iv2SJcL2VMIV(Te?BG--3;$r#sZNFt)pP zZT0&6(KSCLo*YH&YeDV{`8^o+Mz2iA4agCj=e$x zP0SI3@>znOy|w2TXQu4T!?N(oB={GSCg=hZVqdg@>yiWK!yR6I>U4Lv`0Q8t(xBIm zEHPsewcFzI9xZrG|Ml}c#TXmG-Sh6-F( zVlv|fHY>}Cdn>Uh*-S2anI#S zq-?hM*p4hNbY+Dh<1TE;lptuA8aZ`Ly6`KGhQ%mN$5NGX&vxStrv#q3NLxwitMMK zEH$JF{v@X5%#!hU9#1x;pYfv4%Q08phB|bQCp!lg*O01Tn3`Xk&5O<{<3=eIoogqA z_RRd^)aoYhlnd_+Ev!Cua+7zO<2!VAVXK!~;!AYKJ12TJX$T9->jD!W6l-YHm+px# zkyWg3adCc=cG}`AwB!5kCNJ%XF9A^BjsLg3Hvz9JtL}Vt&%L^mj^|sGOI1|eQc>jr zOP*@NH4s^rZGkNrNiO3Qv?R;2tsqNAL%FJG-j{;(gQi21fixs!=!8x}LkM(3(&3Si zkbyLLRCgdrL*O-pG%p`XXu^=8!`J=2_xrE4_c`ZYNw#W8Ub(iwKfl>hCv1Pd zM&I)_`kra6z~|3%p0CmOe2u>6YxMP``@XkQkCIHfscgQB6bj+!VY`$el+gv^chO-L zm1TA+hNFjQ$+M5z&HYTX`9sAfDY026L5U?KzB$;2%QhkDr}@7%^2`L+W~1$opIk=) z(xp@olwOxe%!q~JrMYDQPn;p$A_}bzZ1N$|)Bsh<6sBbDVrt4e5U=!^mLX#g|34Fn z!q`(la3~S3x9QHQEaY%;i<2W=EDD|Cj<8dc$GNjO`D(X1CPEnyfy*sa3`Ez&1>UR~ zwn(cbT-IDtwWOpLC-pQypa36-DxgWXD)5WDGA~Acmcq*3wThi`CS&*rZyOfEBmYiz zUUrYN5mfNPu^M;pc4j3TcjYA;^{1b@b>|s!^rt`UX*n8?_w=Vf+?^lq=}&*Sr=`c< zuLQ6XLaD_1bH%J;Tklh%ow!)&RQUz}Xid5!3X>!;GI-7?^9uGYGp!z=Bv z#n&(H*ce;fV0`hGS~|r9Q1xHZaiE^+&$aedz{jL0zl2OBw*o#_ z9%|)Ytg4!UZ$mUd&kJNj>J2GYY4LGyRxX}swQ?_ku7aLG|069gT@`x#Mfcowb?9?b zA%`B%0Gm7@Us(f=1n6}^qCAj6Z$mUdZ+9Sg!;9V96^QacfZlFElm~!b#DlRp)H1v( z@hq(o{DP^o3j9R*mxluJD9FG3@{D+VkcJ8H2IzlzARY_PFTXqx4=vFD@<2R9^`C0_ z&MTpR|GqV$KNwg!_zkoVk(N&d^b^QlE1xc+pVO41J(Kz}-* zAES4y<;)e)zZUR?{wu>z+~t_A}EpN4{UlG+`C)b%uRFM0@!>uAW;$Kzn(bY|C407i+MF zbs!|2b(rS%rB;O&-%huAo@83A!!&u4wGGH9HrF!0>N@yNy!u2626jG`hHXsrad|GR zgJO@%ugTWI-)oqFMa}YxuY(26mR}Rr!D431uL;|ry;ix5ExZ~JJtL$3>hR~b|0>|O z1ODZ(4cZ0%i-3)$4=$WIy$UpB~-&Ow%04uvq0kHVt9y%oP4F>X?VL#CNO$np|vlU(y`^ zooh+YeAUFs5g6;zDpN%lXaMFauCG;-D|h`wDczji0|+7iwZmx@A2(T`ETnhczAQ~_ z6K;FLmvR*L*#`&9f7F`{hp%gWY3sz&+3C5E4mV3CdWCL!^}hGw-*$aZlHRoPJpj8C z^$!Zz+AGoxTvs8GWGo<1$(Psd>&c>Ql;|xd&Yn>sU{n$N(&CVPmSeFryxx2AM_mWg z>sNlcc4OLhesW=KX@1(1I+Iem%7c^h(+4-C>jF4Z_`80ol->wOqPin$#IjW%OE<%D z{Kp8sJfZ9M?OP4}=FW0L0EkUlZ$5?E9q9%m2X#(dm1jv_&ztM*F>mSV!E4ej*_UJC zDy=~8Tz$0Xj0kps18_4lHMT@ZG>bFo>9E_GjXggZAJ?{}o6v>0`*0B;m2yiZQcBy~ zhc|}D!~V4E6KT3-*+!|2r0sSRN3XVQ8~5G28kU!R++(|>U-5NX$gCq0ly4z<1{oH| z$b~*N#!c(RCqnrBCjViW=STIJ(k|D8)u8_1?9>daLWthhHs;Veyv0|+qb!aZs?oGFb z)go)l*)g4Mjrm#e!dB%?ZXdqRe?P^~I*J>zgXD$UUFr43580~cNCcvNu*p8VHvf#l z!f&oe>pL4EAJt;_war@rZp}?Dtq2AVyoTey1+Zeb@BD)*G(cWv$p{zkO93OKYS~ znxB5crlqx#l)T3J+7rQR7H_N+sR`x}c^~4P zh(+m?S}c}U+%N$@ViD7`;S1-5dKYfe_^!CWvy+B*?@ZUl_ZBD48z*T!*t)g!N4@3Y zH2g9;alY%L9qBbIENSgry4eJZpU37U0XFWt|7lQ04+bIf%+7R2(FKNGc;ydj52ZKb zD5KQt{Nq$+r*5oqR+z?^$pT}{8-};WcLw!}cH00o z6EqP|t)AXYq-XzdZ{O&^(BRnU=@b1;oMB^{9vT?wJ2^7K3-AVN?wp{oRR7>;uReiC z@WzRwLxcTeeZ9wy!6?}24-XCW^&jsYI0n18DZSyu@X+CY>wq^-kI_7IvJbw~s^09~ zu@QJr-%)r7nKArH-_Y^ny@Q9GbsQS#3*)tSolR@Y35!A+w;mtp8y<2{0C`l|1)C-V zXkv0Oy<>-%3*77Yz#(rXY8{ThmAeMhEz77$gA5H192pqwJvMf{|Ik41N*IG@_MSX6 zFvI}-`S8G@{-MoD*T3RyeFbVkm%eSdzi(*RnDU|ihe6y*or2_%Mms&J>tXak?=x<(ii9%60p$Q2u3;eRI%0k@nT~%Sn3k%2w__ zRFJV?nrvM$!l!NPHk!G$yVGFyj#=_A<8v!kS{e?PZNF-9b$zL{GWw!oB>f+)(Net0 zRGF0dDS|>^$LCljdR%*Pnj*XA7fopnu8ZZWr&i5^(1ttgZMAEM(#d9IsoZ8?p*U46O+}HBM(TfB z_I>jPn8oI_Z9k@bmtalO#-;wjRd-eNGlY$mPou{-T)(<9u^7yv?7_+#PD20bZMb9U zrlz54H=|pkhxQ#E>wg$Q)g%NENd1F{#)b}$ji3l0Lq?2B0wOIom%4r>S-v)6Vva1_ zLhBnW&yq2nyH-Vy;j_SW3ic_^lH3Yj!1>cYy?~WUia8_I_Tjc^F3QkkI-*QJbTy8Y z7^(V$T5r0^i2!3Da4q*$U4d(|>H{|=GYGyzxkixYSbkhILfAyQot2mb;dwhRY_m9O2Bri+=s(0zTwg(vzQ zSsBfB%_dlqw|_8kW7I&-X)J$ zATXM^q1O~OX;;M zDFn6st1a6~Z%06{Ql0CuVyUnI+)SR2@CtDLPVWONOI|pi>sl+SHJMy~vXs`E{koJK z+`HN&m%kgQxMm?%Sd^CSr9Gw2f#2!9&Lh;-!TU{O$)n@yFeGx-R%Z zR5x)si^NP^JUE|rPR?Ci1$b&<+q$0!2~HjeCsu_7Co9#$HYyAx1M%GNpn5^j zhc9RuiIGzJ(;wcF$3N*$e|T#MpQ2Cw=@08I`|Weim-VMVyp17MpNvEZ`tu>LfAzwa zqovM$F)_DaVaSRkEyOL;X9b#k`zi%`j|D(kt)mx`yg1f6x+8f|kp-dFky`PWJCldQ zFF&G`4YVGOMUZwTd&->`>X;v5A*Acgt4}HE90oPl;xha5fEre~%9Sx9?atxZBVKK$ zZjyuhI>g?cnc{Mqm0DJKA102 zx1240DbD)u&cwB@1^N;x)a|}%XKb;@@4M-q*kZZw>wQ&NZdLVpapi%y+_9>UJqmrX z+z)2U9jf~1fzyjRc&O4_g&f*cGpIt@FMseeT*mUMKZjNK>+PkS9L`Li7VQ(c3h~!o1Ni*xcbmFO4nmdH7{h=`dkLJQ6O} z?is9Pa)X4-(6&QEe#%h6NU@iYKz)2TYKP-t9?ohfjnTdDOBqAmxMqttqk9^}_s z(JyrGLz_Vy#pvBHW0KD!c51aeC_?LSH?`|@D@aQb3{nlE3T9PZpzfLc+bwsxKuohV_h zT4vxXqb%e{U*y%~TPskdmU_Cz7D+8&csV(A!<)eH@)z_67)lPq%WpZ9{v%d~7}JMa z#!H>U&tPR%Vgn>IcWMi(B)}eSzui{Jf=f^TJ*^UhfuYl3l{`@ObXX-?VCZyMC0bzU zbXX;|Y?V%jRVrx%yz&{X5}yLYE5j<$Qcq!(XaU13!z$4NhF6AFQp;d?C98DG!gASY zW65aeOY2_tcVeceju@VtCMFBfr;@+%}1%zWS8ZkOj%OD)vHJl!Uj)?Z1RJ`MNJZrENQ7teg zNk+V$xhHIy%GqSPY+%shx3feFen59xE#mh=<%Z6mDisF~l_WSCdt0k3w7^run9Op%!22q z9^(KnVBABHFI0RVdkFG{b-s^11o^^s*QftQ>2S-n*4MPWf$UU&kUWFkx!gp|87LE; zS)3+7hU6kq$P7o87%WM>Z8tigFGRRlv?m`pH$BhX!jEC5K|PRikV&l&7LfI0I5NCQ z$b>IYMvU9(WgTx&!a+%+7M8fNL0}unvN9;_(>PGmSL!=}Qt14O-~G-b*cn;Z3v1WeU$ex?;ll0V zb7`K+2LRP@3jng|h)tuS^oH2vipGkB5|5b5PnMFKJO5?7)p_y?QDR(kEFIU^JSR|)mE^wh-0oP7l846E$m&O3Xfd43 zCa-VT67mDlm{PPYiM@1?g6$o4vd+j<@Jb>RVk0x(r3wBuQl5A<-|r~Y6ATssrk~!> zC)2aU76-;#-yW@(9ZVoCd>0ezFLEa7M49^pu&k(*K@yzfdEvW@K`5@k8uCM|@vwyi z#&|q)8n2(5nK+M(v}`YaM*}i$uA$Y8&v5^bBo)jUVT=V$$X`8+xg}wmlItkHzrU*4 zlUpKU&8B0Bv-+6}7*@K^2s7IlUXVI(_IYu(HW1Hg<7k zNfOi8(<~N}S@yuA3aKk75(Shw(^(y3KxMVFgtAfLwK@10{MsE&-AFVOHs)tHZkDq@ zi|$(>ms3^}p83?heikBok!t3bXPu>v+qeSSklF%+5l}vb@j{T{_=7%geQEP&?+{gvD;U0O=}v$sIMqEoEO6|;KgtZuh&M?7)YD*Vw&)upA;|tZ_;eg%4?OMRhIfcwQ6)e zNoC?;+?UELV6K)V+c>IzeWPk6lQk2RG{?r-q3z2ARdo!4p%DcoL)i*;1Ql z^XFIa^s~z5FPTN5m!B)O{6hKfQf8^F%g>ciIltAx!r(WkRVhRBmngV@++idagYTte ziJ%IKmN{jD85Gq@h+pb?{0e@(bm@}H92>6oqOn=%dwn};+~z=$)Hc$w&dy*;sDF04 zJ&Q5&9$z|cTxUE+tzIRrKI~b!BF!$5e(mMiF?}O(xd8LocnnmYZmdo|CHSEzOIDlC zl3nT{ieSWYaUyCK4CXk~`u1@J^?Gr`80<>{2-jyPN!>Rw$u4sw!jN!RH_FD4G>cJ} zh&Yz)(-@B%sISB^#2FUL>pABV>ZdE;4Ba(0O> z;K_gOeVDa1-JKf*F?E+ba-=z;uI5Dj0`+C=_yw_s>w-H#6sUDd2`{lq)5NR9Zr!=a@b1s*a35_P(c(#5*aX?3|Pir)13eFxi&3x^x^JPHrW3IiFm=D#2{-|`_mQC~JC8T6}6%=S(J3^0#b3x%5l$?Oljshr0Mifn_#)$?Rm zk(F=^V=H`rK_u(5eDVBrLaIrul9m;U9~gYN_t?OpvEJb$Cyx_~l$Cl~{f!{(wW|V8 zhUvisf3wtfEf@5w;(=1#KVGW5pmVd#qTG7o1Dy5#2A}ZC*8Bk??c=ITb|Un_3!f-e zw&3tO$s%6Vp40Z-dr$Wnw60L-cLrs8W~y5*c^1U49+ANCm}R4`pXfU-V<(O&nM#-I z^9PLii4r$)e=`Y#^59dY%7)GlCXpn!AD-u|L>t}Ml~GknI$4;?4)FrVwX_w00KROw z5T??&r6PKX9`B*Mn=j@)$!>9D(UqIQ%oH1esdjg>#ogh6zdar;oZ+G!YUzwK!ZW;b zdHO7g)ZmQyw(E(|yMLuXYC)VVL_CU%ki*+hYB##bsELOj^MTCj0!Wsu+px9;$45qo z`+JWM3?3eW@kl(le6l0Ih-e9A(Ab_KTKdYTO6%6A-x=_XnEw4zWh3s^`-)?OP0Q?d z@si-$>u@TF9gpVp%6dR`W0U3P%XWtZLXxyk>C(s~~$cMF5dhNbVeY;ZYl zKU1Or)GNZ^=r5luRW_;2psr{&4StFPm9-8{dgq3*rHjyYPVlXa{khVHjp+wVVQ`Gx zFKSca%^kY5gpV!e&UX&|mpZ}w2koxm3`v-w>R})TEOAe^>eOqN-U(~++!)llp*XX}i zs@&T7@2e^lMRwKm&OEgN8P7F=xHXxfZ#M0}uy0YGmAVG9CzZ_H93UWQaKq_yv1z1!l0y{@P6um}EA&GVnL9WR>Vh?jPN|SlaP`IO>y_#<-%L3aR z6kt|qCfVg?Vr$lAZ&S8zZj!QfxS@D*TnWn+CL5K)-H)jt_|#+FQrWv*ZglH=bPE}v zOhq-ERiVIYD2TTa)88}^WTjb4#t$q{@!faddsjZCMY16*^kkXxm55WR^7WF)W0X#v zE%|uvVf5TV20k1}WMY4-x}Je>2{nw0(DnXomX_UhUfG6~N0Rbi!h>`eXx98Aj6;ct z^UfPJm4F;DW3IVfi2oVZTKQEu8a2`k$LVCH)JA}Ed=2G<#D$k-Cq~xD);d3r)#=$f zcvros4#W1jNfNutera?Z)|Jv(STHlw?L~`|q=bcLB~1I8;CU6XSr(>Hrag0EtTatW zl~1;(J~Rt08r-T{7}yCN=A8P9TeO+0sB9g3t25(hZS20U$rtva$W`!$eT(Dx&Uamf ztB_0gcwC~V$yTleGTw-1!d6HrA+R@O6KiK;-y+v%`p;DgPRU9tf2-oF|4*sjA2zW_ z!sU^<%A*A*Avr#rtI4J%Waly}_HJPU?x0{Pvcp7Er(O4zym8Ii2XOsbPb1s$)Lt6P zi6^{gNAGyv55qc9V+lQA2Pweg5d$NFp6PZT;UPvqUhPNsE>Et|cOCHP!t4MkDP%O3c6157aGJa7s)%9{H6+0`k?t}ub{Z0snUD>glE$Bk6BGE4 z&auGV5e(;$fW9)Vh*Zoj#7ZjIeJuDH)I#Z*kyz|J_WwItkvog!_pG`QgHZ!+5~n1& zvtFOv{DSmrPAhD@w{e1F-*x2f)Ty+3YuJ72{@P`Wir+-($Xz?7SeP&BNlDGk(|T3E~?}E$U>R zQkFaV3DbVUsnU;><0nYF&>NqP&Pm9Eii==3FTGSi8y&iYir=$$*r03nCIRp^`Sw6k zSbsV3Snx<4;uUX6+HZ=#LLF~OZn-smpHm0TA4-zDI)7Z$!PZbRLPJC5GY;-;6(u0_ zc6yQd8#^^ZX?BG@RNu>foFmisV&k+jl;PnIF0z@&U2Q93!@aBK_s=~aoXZF7=#EPC zdNT@FW8iwkjv?VA{>;h=-JF?H06ZIaaV{_yW~fQRU4%5NT4ZMoS6fzt)ER&{9er>G zhpU41$t4b6j4go|(Y)9~fJJ(S?T;s>&K`(*Vl&3Vxy>|vg&49%#mdl}EiQ4nS}*s| zwk9&iekf_{35%<;??066-Jib8g)Fw%k0r^W&bKA?;hs~L`l=aP8u?1rB;(C`Q{_o-u z2d?!;VrmbXZsq3VtSL+~4udXeXy=Sf=i))g;L@DOKhK(Eni*LvJx7UIo~k3|{1R8T zu;kTNTP>*U$C6OOmL@3uu>_}gco}u<$C8`+>~NA_{c!TflA8~v|EbkPGrhexN%nRA zb}J<+1d~AWDjE9=2P)4AdUaT&AtYj>w1fchwgpKUr)=uBczokR|AScz@#K}^#tq_g zlx4UIr-*8A!|%cOe~}x?;ONy;Gx!xFI?N-9At7&{e7Vu2xrlj_y=18qR^w2iW_ff4 z3$xXrJp5rQEXpHqG-%ml*D znnvgAxUz5RtK1{+O-Nf1n(UhQCcF2hf8mS><@SRK+WsFU99cg$eR1hxZrU}hT7Hx! zDS`~j&!}W}DP&MkKJF(Z@r*pzX~t!nOIorncq=sNiNf)k+l38BYi>R@-!pITY%?n}^oc>x; z?@IsH>8_(z`Ryd>?)KO%UxbErpe=|t>zhvp8TL3_iYW9_ z#HFNK#>$PU1|x|dvc59pr0h&476XkglpHPP&Q3?wfWl4x7=zCv~#8T#;&sIu4B5)AWnB3)BSYP7B|g0ru%7an(uc; zw7ypPY?AEge0?IV$&Ej}(q}Q-3fbC3^aT=+M3Ku^lWnZZxhAz^BD-G$bN0ZTOtrxg zHll*1ijWHqfy^c{H-f7dQR~5Zk0k+pOc9t#s(6l{5C=9e$;GG6O^8gl869m4Qt|8@ z1ZziQXVGHv)N(!@qwC@n0!6F`f#4(}a6Jh8Y|?gn{1pU#Ho0?q`dK%5H`FR$N|O6K zKj99M5VuwwEM|!fgb+0&&@Is5jOLQcv+ZZ^XHiQY!~{E=ydcGmLqdr#ortje3UOC1 z_}$uvGg$;(10d*ke&)wz(qLIZx=X8Te3X6`~?FC)CxHvKY>aGyMM76{4j>QbT1c@wrF5aJ+s2JNk{Eq?_B=~Figo+u` zC>NHdt$+ncgoJU&$gweL@*Ku9=x#j4r5Xc+kQ@q(ODb_#REacJ;opU27O-+=>iqPq z9e%N^O<=gMBrcaEZ@~;;pD894vkM7988@n*a}pj8_>YJm+ZEO9`~pzU$_V2-bYNK* zbM>G|qF^&I&uK^n@$(Bh{n9hzG77Yum}f#nwW9S9!+ega?o1$jk@PY zn#cz$he4K#pe(yEg6ge~iYw#Fg=kKS{gguQrE4ivQx-PR>x*V&ILhjWl|3z}&jk~0 z*c6s*7jJ^JpqA)z*#v2QGr6fdkk%$h>zm1*ed!*ftj)E`UrG3P)nhsePwaRwlj0&BD6G6BW{G$gnRsz?9=KkSCuTb)F^7s!-+*)8ZkDZORx7>--KOlDlA8T)RK# z!S1u7-a;s?Y)G~~D}+zL!f3TB`3}k!s;~;aF(HQstVsPs3aWizao+ybUuPeB;LJnE z)$0Sh&phW`Yks0g1w@p$hw!jcDsUj+u|h30z?*(r?+6Kw@4 z=H4ajipR-EDG!cgMk*@Y4v!Yheztz!t1mp!cxt#~jgX27%{_Af$t> zvA|(C2!*H@2*SwkDzJ+S9mQ(5I$d2Onu6B7c0@Vrk#8ybGy~_E3}*!ft)y7|PtqjQ zC$($4ZPyx}RzaO&e7a?P5pwuAKJED5l~LMAC~HK3_yzPQ2Sz3g*N%Xj z-GbePbc{zQTM(u1DO1VBy%t*#rSB|V50jdp{|37lyD6~VKq3a;WjMS-%ff_jw6bVq=^ zb-^08u}m3r{VJdZ2(?AP3@40W(RCg~P~;3@fb3YTX@pGWsWf%>GF)Rj!W_KK>t#?y`H{bP8;g<)GEJRPVVgd zm*%vIcC9PpGHZiPV1?U+A`yc{aup^l4IPH<=wC(U46j@8x9hF}PXv91Ea5t@iE3=7 z7@eYT<k4J1Q zJN}8|N86#uQU5_>qAyB z#%!k^Lh)g0%8?@WSyc+I3NOY^b=3cfGN~iNuXOr}^7S_mU}rD4tJkfcESJ-bgzfTm zJ1_iBxpJMIRI9Cw7nT3BT4p0m^sLrehOp{Ufz>W$O>h$={UAorCq0;nb0HM#4K0rO>u% zjAGS3Xya}*|C8O80f_8|hC)FDlIR~u{-Deiwe!rHrufQd%auDiPvyfwxFOvE7xHJh zLuq;*Oosb-cmXgg+-(Ugx}cdUI`%Y;k#LG7iIMO&5F;~E-69iWnlq=VCejX+ev`J28=ajhuqi>?YWD!i{+n-$Gpkj_&o&_z+Ugjp})Rg&$+`o63 zK&22+8@f~(0MZf>Ds8u{rZ1snP-w7D2HvP~N7shxmg+<}d}%vSc(^>bL@iN}9n5%C zRs2wMDDm5;&G(6Jrr^gs=lc#1h_~7LbT?)QAqySzr+3i_4I$Od2p+@nmt#W_00> z$8WaEg5=<8O9M=fu`=NPi7+-8FvcO#pOTfY^C6(s-Y`=s

34uAs_xl5Cs+4(`6_ z_* zf^PCP*9C^ZD6{4fiSVaymMhnT#KJg@<}qI8 zU^WYv(7@t6;p?Kl2^f#cL=;98_oKrt*~}EQ{mt^V*QE<#W}FrOwU%wNI3l}DX#m3v z^zWpZh756eUU>^)28kt@z`+!h6}bk@qK`PwSv`meRNqsEFU z9PsUO@}^3;^AI7|OBa=o;&t6ag<>LECrns16L=V@~uF8C=nute^r1iaUWO z8s@>qVT!(mY@~_HoWy5-?-d6W=c2!SKyj`&YQXO>T1#rxe^yD}2CeO}0fXJFoOw8t zO%n~@*(YaQQSY3oe)8QjY1RQcgbWhd_RQHU*@d+Zd%1RW{^)RHIZqdUSb<^h2I zaAo_B^q$Zwqx@(Ep?hZxooQNiX1{baPx?;D4L!F(Q!=)KnRymQ}<_o_(EG+1vLWj`GORiXv$v3 z)eCY+T+p|s*2Y9 z6XCNy9jVT7G37u>kt)zLo8PE#r20R6HemV_6)q%wO};S< z%qX}tg(w}1s%E{x7#=qQ+kNq_38nD}t;f12xToIDY3KB$l6-`BE|KaZHuj&tbV;GP zc~E{HF>iEuF~Z_|lr5;+uAq2=y{v5$DhP0acf#&ID5miIEU5_=7j~Np(yf^^{N3}I zZs^`_-~`PkQD`pnkH`{t{6wY011Z`Ph30;uf|c}Lw({66PgRo6&Q~@&X@so6|6IZC z8aDgTw00QEIYKsdgO-pQj+ON8(!AzDsDQm<`rW=gd-ri6BT${rcYN|{tWNYc%HGY3t!6OLYGKV&^7W$rD5ZTa}$q3V*F&0+#$wd z0uwJ38OwNHh%*2<`Iuzee7e$pUHWIbWT6`6l`mFsuYSIG$%60&o0`zV%89FZ3`e1V zf|ne~1p%p*T@I@Gij7U9`1{oXfu3dAVQ$F73?_nT3V}2MCt;BuotvCEql7$J z`k!o?^hsbB@vln@N<;hX^CH%dDGZhH9P;A}ZxPUNax*!@hQbULHk1_TlG!}If+k6@ zc-`Q{^d>mmto8NMOob997KfSLde&O+Wv4_N=?Uz@qQAtjtAXF}O2N3I>0 z*ro+%RuC`?0m>+bnS!$5O;sylR^ zxWEELb-&3$4#U|l#dy3tWg(@51pU~Ithea2b%&-QU*T3JF3G*PwC6-c-((|*N%#hq})bnDD+wkL}Z;lryMV<2RpqoKO|&-MaQOOnj_X`aE(wW*YR}@!?#jrUPzPNO1RKxI$gy zV(-XYPiBV0I8PU&@KN1jgmLMw$F;-G;1-ouGwVkH3Rm`=P0vNXrpO@!NY4R}l(19q z|JN&A);Yr#?%>p>`mIXxWNSI3xpz$ZP50#)ou_AId5SZdX~fJY8tR{L-7@4A2X$e6 z8G|9eRcXIAeM%}?hve(uRB)MowCHuS#q%^AwsumD%nb5CG*%CNY=xx@QJ31QmQ6%~ zgO<$0Qy)F;^u3xiJ~{e(>B!I^3mU|N1vcWKfOr-5cF$dLdd4%EkLTiLcFgu~Dyf@= zG$G%=so=i-mf{pT#52FU6)XSCWy`&8vgOj8i3OzQgr_dEr^0eGb^FxTh8S@|t|npY zD&dhlq_7lZnew7*arLYb4*vz=Z24KfrShJshT z9mNR4!Q5u}Lh>YR&84$h^)4B=PVin`#nK4rc&8isU(kVt1Sg9Qhhx>VrV+t94`tz3 zNaVX)@z%Vz1cys&)i<`b{Bt47^_ti#OlBcu8#|U~G zUb0~Ezzz$mO?*&RKfv7}&&_Iw98*D!%w?(f-ornkB$6jT2|lO1Xc8HKGs=y$dSq%b zE4Cz^MhB&yS!UdK_kjZn@SFF!9ypM_yg+ZdGwKItB12tj`k~hL7aAF^6B)ka?}H2( z2{L3TCqtSL!8;-u(gdt`L^7la5xgUk;W|lzcRt5tNT)zRuTF+EZID2QG=boqBEw!b z!a8}Nf4nt$5ywMAp}xIt+bVxJl&^9PoD>D|7qzEGvZo8iqYuAYNR#n?yp<{`;(Ih1 z@5fsylOkWlRmVu)a*?u?Mfn>Pg_@mcJOrcI%XYCo$@@lL5- z>$NN1)5CSPNOWWm zs>I$sd$Eh=91;@f+ylbfAVX)?n5M5zaC1a`&; zv^RJ82w~;S+l++>y5a<-&v-5GY%1Ckd>jYTmiR#H%`OSn+m`r%w%}zOh+#j}nmpJ! z-59MUp&-2{4D737%A7m2W!668K?bD)z!=&6 zaqDf*e5kc!z3myA1{Wu;+Q&4BIrvcPmIo}{OHB*+`cP}_{`7xrW>ambRlm~O@}_F3 z^J|;y<0C87&57~>0uhvJ6L@Tg<$YO}1K7U;yg*Nb4zt4RXf7_Aa^1yjV84z8F)K}U zqc&x+ia?2_47l7dWyviJR(3lW)fOQL2fAJ%8$VwJmy>xGLRc-GM9ryTbi&P+4^Ncu zQ3fYbnZkJ=^Av2_o7&)E+E<#XP>j##B3=;b`~}gAijF-H_NGYJ=Ayj)dwVjC?6t+4 zW()P#W=^F`iwEjmi%Vywy6Ue}6tKsN9;ol>*|WF)Iy%|6=1z=(Osv+={-@Q?vR+;c zFp7|OnqHR%#Um9%~_*d0$G=N=z9#?FaB$i)~%Sc>C-hs0~8?q2JBjFh(FcVBtM?|EANu$`85^fVVs%p-twmQ>be{^ne7tdBSZyCp zaDG+BC)fp0%Y{2f0ycTes70N}d8sqW{w8)|JH1Gtj)NcvGNdDynAxaAgSyTRs)3J) z3Myla2k3$6_WeG;d$Rcn8R12Nk-ZDW)1#-*3$|oT!3%TDd6@MFNR}u(JCspZaulDR z@*MPBw3U~)u_NMkm-l17{T2FAf}WEDihIMg3j&=*a}Px6FWAi7?z`-=QNHRzzuAm( zJY~l?;~6Us#UgSN9gnVRb=);XzhBK`#Nb5RJQ$ zIR<{5Kj#%2%{mPrm*nw{G#7kDd3ZC^asON9$44mQ@zHW0c6eRG8o!=4SbK!UFdT2vsoN-{}Dwc~=qH#QWZl$N+z>GHcRzQcC1m9v7p zXPF&-Ay>i*wY1=Bc~PM-c(LqNz^irW6w%aUIOz-vgMuj+s1#{TVYE@dggcfYo77q? zQPB6sbqfpPm11amBlU^oXEla)nYsW4Vv+R$DR}&{s7JA9f7Qy#{Od%*BE4_7as)jm zOPT(a^vX{bCK`O73b7!!d(qh$9z}iA*sYO?InsUBUiK+Ew&pMa+5K63B*SCEFuohq zG=cWpt=IZ7AWe*eZ?|#`_^%b(w8323Rgx0^h@4r>JZ@C?{IOi_#%|Em!{P+!awnj; zAYWuT4T>H>?EojlJOQJdLz%US1H0$|i$jW!WU}o+TPN`@Wr$IsB+hRx+d8WxMaJNF zz@cKEaS6xRnQ^1MH_ucQx?Ax6qR`6dKkyi8UE}Jl27eKl1JB=GUFV@sG{N(CSMT&> z1T?|(cUSl9HLG`nyftsACflDDQ#1bzU-@YSeK5issx?nuLz5BSP$g>h30p{7Z>(bV z&OANGsGCxteuP_+wqx)cnF}XKc@D5Gf3xW)2x{BQ@hj)P=EGrdcU)~Sd-RRf;JBh` zpkXyXDId&RHA<)|O= z7z6`KpA;y}M2=FDd~X$z^9JpKjoPr^Urk=z`Im$p!59P4HwdsISb3VE`gTv>2^nIc zhWl$wBs=z6AqSw&X+}*IYfv^#uq(Rdo+p(4{_=S2~am!$eK|1_gB|>m>a)>vhS~MzsnY! zCQJJL)qC$tKjj$Y*LPIOh4Jf_c`8c)W^}a7+SS?)`=PQ9z>`q9}xlBSWejXh@Tp<^d->bFWa8hd$1wav4@(_{hOQLSB* ze%coR{_+#mX$+iQia*r{87!!ha74_~n zm>Q`9Hr>p?!=6Il^2}jO8(72&J#ERTLA@Ef>d{F zZuB7VYP)k|nykQ2R5=^gb;gZqy{pRU*@?QjH9cEi)b}#g4~PP3&ri*`(hg>fW}CAJ zxwOoHk(-Hy!Du-mBmAiPO!cgB`O}2J^g4(FqZ zZ9lUJmWtIsvuGIWj6Cn6oNa?Kpp==hC`>Ibimt7_yBhAk@+)|Fca^)ZUvfO~^82dE zkr>w3&wOO%ptP zMB7_DcaxOrU;Vq|x%AJXFo5%1ngH{wk>}C`m|qpoHD0qx&ENcAn%6KzR);w{ub~M- zelzkKnudYEYiPo2elzkKHI3K&rg+T*@S4rF>c6kH{9o{z9u*JE64MvdsO=LyukM-X zsn2#>(4%5e6qX~NqTFo%zRLONzm)#7Sv=(_?TGKkj16J5N<^(3znO|@)&p#|5I8KZ zD`yP64i1aF!Z{YXb#Un@Xl#KxSUP7`YB!}yQ)vlqT!$oh1*Oh?g5u~ZlR)-8bCB5Q zI*-TmGQFV0(;C-3o6b@5>Yo@N!DoxdD7fh`brBFm`<#(6?jJWgYt1-7lx9($0ZwW$ zIEMj4tQ{!wwWA3FpQ?7amw+Z~_mphZ)#QfGk9sClpo$rAGu<+#G?Jzz zWfuL8l??H_A&ZDK`sl;cd05a(&&J$Wcp1pQ@3GL!HB(RejS?qsx5=DIB9uq-_yPp2 z{5PgCMqOluY%de2c7+^1>sz zEpNN$;-<;s|EKD8*QdWCt!cCP>gTICuzsFkgDe6}1g|=F7U|8hf^VggVJ^aaAk~-+ z#I^@h8O*b4j(h0z7+ok=+E^iVq-AN6V`rCTwDCY{l1V74NIS*$8vY#Rj8prHsxsgs zT7f?jzPR}3tLq}Ai?lyq4gN4*X6~P_;tzWXJZVd<`o(I?>)T4)&OLR-`4)6xgD$Wo z<+=J|wSBXvhTkH#^e0s=4w4$aXcjan|Eo+0^K-e8=*l=0%ca<8UIg_}fGnfY-~^Qq zG^-t?7sq5RkAQZyBeemOO*BnCZfrFn4iQrfxEoi zHzZ9ZO?q$RwTZJ~>6e zXd$RMA2f=k`DG<_FWdhw<*V zCuf$b2Z6(drg@vu9dU<=tus06= zlTvyO{=pc7A3r^~G5hd@AUIL_&V8Zx^K;V2?N)7kd$(VvwNxJtmY}+H=D(FnKb4e^ zoH+QNkhXli0BG;l(1ETpVW0A?p@4XdYm9!BDScld|$kFF)%yH zb?aV~82im!e|B#RqKKF6xv)+4FlC{LH~~G5bb#mvbOEuOgZp9fqoAiwQ!+0gWk-5)$_Ws=kbrvk(hF!DuZ7~F(SJLe0JB4j0Fk-7>`n>p@@Tnu9K&S9n5D_WNgw=?@xDQE#Gq5;;3yl~ z%mgY0*3BwFfR+L-{1%N2LCaL-DvIJMWLCtbgWo=5I5Lf?p^APT1I~&=waAZ7TZB@Qy>(?~53jkiEkW`Y|%> zEE_S7io;Wj%K(UrXy*ck>|Jv4}I}U;0r(WFr6HVZ$kki1A$!lXyjzy(SUO#a(hZRNV(p! zRe!bD%3Uq@ryXh1euw|*4{vYTWe-cWvi|gkFKpRu5BW%c`olX~y6j zBs|G4swqQ92#Y(zwIO?vBP+K}!#j8%xJ9mqbnNjv<7fFOz?=OF56yze{q3GJ+#TkG zN-7I{l5%n;5hJT~R;Mi37AR%N2oRWr3{IVf5N!fYCal117L}v$UYELXR{cC1ZX8e~ zI}6-tgh5Ff+=ECU1A$f-{W5EIDSEr|Y+BAyIYaBWhp(k|8FEg{&wZDNSS|4LDc$j~ zIR(5SSk`Q=d_5w{BEWA-t}pw!o_ULKQ?TvG*8j-TRg?e3_qCK6#=m z6d@dTdZxd6Tnnqqgvbj-@Qiw2X*Pj3`AuaM0#|Y&Q}XM$sh}X!pnH;wf)E*DIqdKV zQlNCSR+TDZ(oum1ge+9u)MNzt9 zh(Nax2q2s(@x4nfjg2xTFaLpY--tL3eo1+Gyc6zz#}sb%b$~U1;A>(a zTUr#ItrfvzmuB)^%wSUKV(2*t8G)BEkmXz{pN@Fz3L=(Llv9@@Hbin-Xl*#`AtN*c z$4V&(+sz{cXSl6TwlR8gz5(WzeG}rCBC~`bG~?x5vOaEHa=e*MHi_MQimUac?hD++ zF+u#alWN91Z6!#A6!C#+2Zsb3SkvciEuFc+-r4xVMgfwHO*pOQxPbl-gZiwvikd)c2x{GMK}>mm_lRg}q0i16Z z_GXjDgN*WB>RdA0s+mTh#2N)yGDWA*rL>HE?iYY#nWB^Pt2i51xjdED0xbUG1XpC{ zX3W7TSb$T+r0U7EloHQrK_niNmt=)#9ER)h0`-%>Hv4x2%t`FQo2h-2FBj>WB^ zoswy~D8XTcOyC$40J1cDSr^y~B3c$pV2hJbk%#&U-XIeBIb2$SD#Izezs~_@QPVTx zzy~H~$4vcx1)3y+NL1g8yjJKn!nq*LeXV9ll4qMPh}cMPe%g?WM3;AsGBK_~y!amX z+gUcDQkS^uQq+52jpAmi*vhJoUnndPg%TS?{UE3i7v?N0Srn|&o|xk8x9hIp0^Grs zD;ZJq^`oPsC+gB%d1I6nx3*xgy%mWz$4NM1!Pu1lpa6$lz_$0pDxc^b#bXk+Q&4vT zPfT$$s}kh`wJL4I1?Z9x-bp62T`!tL*)*M?TpF-;d(jbWxqDFBEC*D7%q7M!ht1SK z&PJ8OHZW^%h>QxbLTYAdL$A;=6b4nObe|b9%2t3*=5`I*8P;)#2Xf;cy@fKq7`7S+ zvqnB!=4DE^J~9Ubb|I|TNO^H?w*@uGpqi)L#6_ZW`ACFlZ!v9_B@m%V)9;h-(yz8! zwwrRtcuqo@o}Agi@wXEcZ_q{Z%ql*q_$L-^o`a{o_eY_4f?9=h%8k$ef3UtUJP)EW z_GO=D9R(cW%BB|W<6}1aVwd4}Zh5gy78-iB)qt03rxsU3WH=rmCMyz~duDFV*sQ8D zM3HK&Hv=Qc$I-Ka z^)kH{#~oxzcQp^w=&_No8JeAnt6s z_!+mc2m^LasS2+H6;>?592aoJ@)I0)cUw*pHV94y>Tmj=*&L}@={q5;?p^msf~k{Q z?@a6oopCIV@nHSM*-L?bglut?!oE}F_nD~-J_*xo(72)+v#?x4^H+Yo07T)d+}aPb zOy^cS3cz~XV0v?Y|0&qT9IKAS&mMe4XnUEeaVgs?1wo=Ij3vc?AOZ`$fWDJ@cB^tDeYr zeWH|J-)EI$PFNn+k*URDPlQ){Nm?av6$Zw6d}({f!EJxkn|!vt6@y;#?X>(0rcB2&BJR=lX&LHQ0RC?dyz?#wF-E977t0Q-58X zylXhUhGb96K)XM!h)FdfIlU^9?W3+AETz{T!{}bt>$6_Go6?R;$ z4@PL$UciFs$3=y_*@ATp+v}rx(DjB=dR-G#eo9a~n1+QHNqC^gS{7nB&h+gtN?)#*6;>BJ^@?~#mV&gL)6V)oLYvQXNB_5l+vxfWBTl5^P3|WhLm^vOXxcX${5Knvo%6DEeevb6cW#dq*~G?r3gh|i zSy$+;>t8W5-#>--;>?;y@;&s-MtXMp=;8D__R$I;KG?960_xK>px*b=4>NsHZK3C> z=W=16&5n9&DZOz8*ut)QX}ZzZ(Me`1ZmhMe&Tx^Urq;f@PN!RacNNeTY3121#J+#N zDNTDb$z1zbDc#g0qG~;>QuvCptn=#9r0bE_|K68>FxlVLBQ4>zzKb7(rR%_HVfW76 z(QfDO*X_O(h2_G5($3HI-s-QTw$G#aiMJL0WCz!Ntdy=VGXL6(R+~Wat$@$9K>wgY zzo+KVOQ5U*djWJN^0{84{o4CV>4pZ5UE8%9fDNxj2)oz#wi9gEOB$eEGp;|^8=2%M zp@}9@zdB7cy%=F$15KPWIFIamvC~Ahx)i%vBZ3bUG@H-$USA*(*9=UNsJ(;fG|8&f zr_=OeoX+)e2{JXr@&tAT%Gmh~DdRts(k;uxU2RVsY?*|y{kNPabgqRLb)7MM-_x~s zEql;eo=HM7WoaAU9=2hT8SW}Jv=x7*lx}R4AGKAfxADD*=`~H|J8UZZYd)1L(M!WT z3VV{OgK{M9kP?9~YF-qqG>g)hSXJlFvIRrY5C{P`CBJm4?WCRPW3*{c!!HDP94#}f!7;bzLAvDUnT=Nf!7-ey#6

vCpOIjiB({WzwF|WN`$;9;#e%U3jiwe?`#*!=(xJv3>;is-0 zc%P_UVctWzaPZIJOTo!uZqN|Lf`0|9P$R+)gkoH7?4Bd1)1ilO2U8`QFJ+Z+!ccXN zNZcJ(C<--UfLZ|ij89`>_c0>U{xA%f%7!01uMMCNg| zA++?+fZ{kU_*1cyFC#XUa+%FCr18)=>DK<1f_!^@4UHGvvDd{{KJ zgmH&Y0~eMRWXfZRae~cq&SPbT+%;2J_s}J7DFPe`wA}0gj^&nGxz=*0Vja1DQohCm zB+I;TZOaaOTCSC^Yq{P2tklZad#oilSIRfE++k0vwer@Mogr?rGmo3ppB_wkQ_G0n zY$w7}fBM6l^8ii#=?`Dfa*w@9D5w7Phqtusw}-%|KmFmYc_^p;97vNYmj=69UR$Cf z>qt1&vf!l5iMS9qN1wbYvD9L|W#w1|KWSR7;#(_saTCY>1_QSz4^oW1wc1ktVL($W z{_?`);qc3kDE^ezm$?jD#UEFz>_~Q(JO7|mchy0MBJS{W1<30nk;z!Ml~u#ZJ*Vy# zbhoZP@pBa2qk)`t0Ov_b?16XWaV=g2%Ke@76Hzs{lM^cu2Kjnah|ae10*eG$a(Ke| z%&cv8eD!$h@!E1I>e%xH-+mC6bqksd9dbgJcT}&7E$C*)^}AvV#O=8EJ9K?U_`sRD zv(jCqNJxh@Kkz%h+!Y>$zM!w3%5icDw%ZHuq)>}JxXloXyK`T1Z@KfDIvNQu1}uWp z3>xn1mG@O^VSJJKzH9D`Euh)A_dtL~Imz}{?vBuuDh`c3a%gy?d{1RxfTmP2X!O9L z;qu>sQ?Xd+k2lgKg(CBX0{gnqJG)aKQ9)-Su z=Ap`60h+|2(SsKnZt(pcOpca2udllbu`+tJ!w+VQrA>Y?TP$vyl@Df%MXBvdU*&KF zs2l*YM-C8gl#f&n1%S#9kRCWdeE+iKgaC4>zcN5vxGKLa1Bj|x<(FjuRRHK^_Z|xv zqF{C9SYj74~=0Umqg@bE@?JVQ~{ z;n4$MGb$sVO{TSGM$$!a$c-AQnj(hfvl)UY)LuTDA&82G<+JyX1_ZSM&{XAo1gI?l zWRDym-Y8$lsJhJo(gR;JF5Jx|^IEfvs*6BnAW_S(Jd=S$k@oUT22wj9&17rV4oDX( zmm)~*0VI3mAn``|HI>6*&DtF#J#dgZYL%tr34xT+^@<=-ajLwOfz$y=OBqNVfV6b~ ziGZaJKzg+DSOlpffMkyxB;F`Lo-JF4gQN#9vr7rB|69vDxRdZuJv&6?xFQBBgh-C4 zNQ^i6kg|$7CiZk%RsP@FZ%MzTWEK)()i;%rx0EWKzgw}Rki~C?s}18z3*+_eCr60mA>mxTmw-J~L?yb^5%T!d62JB@%@aOE z%2q-(rSmF+R5DNz1yaDqT_10<6S!M}xU@o6Dq-llTO2q8283BAT{JjdbKv9|ecRtB z*834}6H*+zde!!5nYfV=3<%wCbSN}?1+IJfrTEzbNexZ^=Dyq{B^Sxlrim*MKvRsU z@gOm_PB>zOFS5pd|C|*^$iAu6;o0(NB3Is2B3s@jKaZqH>6=Q|zNJ*QJXrkhcVPc% zsj?HX-X@Z1{|R1tbE)#ZrB=@|Lz>rG%JS zl$^Vth&pS5Dd`AZ*m!3NuJm>l;4HzFD(}|x-c-5b{M=FDi{C^Z<06akaPF>Cdzr|f zgg7oDhuEcNbD?DNO!t($t7pr;@TX_K)7L%E%bsgq24O(LP==E*P3Y#`k%VbN!tahG zOcN4*cO+poZ5O>;B)r$ijIZBYqWt;o7OF8LB|pY$AZ{A&75FwTS!QFt@gg`ht&5w9 z0Dyba1c3LJsCbU8imF?Ef2rkH*&_F^Np2TUM4I-8w(9#!?Kh>Lm()%qwLe@+K8n=- zoo1;mx?j#PH5+1GsIv0t)Uy=P&(nBgokVjAK6mn(#YIxEx-Kbbvi2V?g?uqIA+Zma z$QL7dO%qZ4;Zp6RL0;>DQThK7<+YwjUVo%i`4!~#OCij1t@`VwmM=n>53GqWWh$)3 zxs}l5*Gug;r=JpKmPMJLEG3_UGJma^GQ*)o2p=7K$<_ixwcN@sUIJ3v?xt&5RtEIB`_;#?IjLiz_F=rxAjY94^f*1Ji5DdOW961pRTT z{RQc*kY+`?=$A_rcidu>Szu4D67T}nJ#S=9RFp55)?I5V0l(7QmrK`q(MFoA?Uzdw zZTvp1ZAH`iN{QlyPik$$vU)nE;2u_FwW68Fg(D9{8`q?wG`HC)AS|q!%v)Pu4>^TO z!;NS&V6b^*_fkKF2;ky!M?(dOKs+Eh;%OR$9bO2NCh&iyM8!fG0j;&_S4%B_y=Ei8 z{VA)765Mi?ugVB`i%`=lQS$Xt@>htG*ELxItLr#8x*^JEA~}ljB28MnbUVNSFTmq? zp$W8Kk7Ai7c=>u1%QWF|UoX}EDzHI4FgEy&$OiR9Z19_<%3s0;2f=2wR{h&j%MT`{ z&hN;`*j7If?aF?hzC1XuNomBbSozyhdp-RViRP;C`GzEUQ&Q>tgOx=yG_~B-Q*?dh zjO|%Cd%VLC&hWgvey-&;>kOf?&`zi{A%iz0!QVp@LVZIL{5>?G!8at=zA15kj~*DI zeoqqoJ$fQS{U=G~jY(_zE+v|*ifz3$Ne*|u?dtxXA^|;Jk5BR2Z8>@VPvP%D+h9Q2$zPpEilwcWH`0((LwWo@1lqr}!(L!8N&lir)xp z()ON>56JrqTy6Jjz3E}9&pU#ibEh_>0$D>VPS!M`q<2NKrU_ZUE0Q%$$ogH8tktx= z^)8XM96@c`6hD(Z9Y+wK007^(BZ#JT5gb7@0pMp696>t2fqT9uY55S_=D{@y_Qq(u zCaKOrSmiwl+UeV+CACYc|9p~s5UKvZmzp?3QB7*5_ozNTBUR}t#4d&gU~oSmbM8kI zxPLwgZVZ|r=;xE*#-Is~em<#vFo+>Nur+#r6hnF6P`RD9u)LWfXC)%9GFF zWUK+x&nrvMwX(z_L#3>;Qz=bo;A2r)q6w9LEGkPhA;6C%*M2-usU8@W{(7WRJrR}u zMpF4LsMPHD4sn-HCO9{+-$yY;qZ=W2ZZr}~uDawax@J6Ox+wz95gZWNPnW0 z-%EmO4 zKTndcBXIs>v%tBMD1R0{4`1Tvwbn}u2X=dWc6Nv0?;sR|6^7VYVkb755Wt^DorosH n_UBP2qKP2+^Q89mAZ_%(q|H~Ow9ykuoBx_r{vv6Om%{&FnfE&p literal 0 HcmV?d00001 diff --git a/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_createCustomClass.json b/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_createCustomClass.json new file mode 100644 index 000000000000..a86d14c52181 --- /dev/null +++ b/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_createCustomClass.json @@ -0,0 +1,179 @@ +{ + "metadata": { + "configId": "Basic", + "snippetName": "Custom Class Creation", + "snippetDescription": "Shows how to create a custom class" + }, + "rpc": { + "protoPackage": "google.cloud.speech", + "apiVersion": [ + "v1" + ], + "serviceName": "Adaptation", + "rpcName": "CreateCustomClass" + }, + "signature": { + "snippetMethodName": "create_custom_class", + "returnType": { + "messageType": { + "messageFullName": "google.cloud.speech.v1.CustomClass" + } + }, + "syncPreference": "PREFER_ASYNC", + "parameters": [ + { + "type": { + "scalarType": "TYPE_STRING" + }, + "name": "parent", + "value": { + "stringValue": "projects/[PROJECT]/locations/us" + }, + "description": "The custom class parent element" + }, + { + "type": { + "scalarType": "TYPE_STRING" + }, + "name": "custom_class_id", + "value": { + "stringValue": "passengerships" + }, + "description": "The id for the custom class" + } + ] + }, + "snippet": { + "serviceClientInitialization": { + "customServiceEndpoint": { + "schema": "HTTPS", + "host": "speech.googleapis.com", + "region": "us" + } + }, + "standard": { + "requestInitialization": { + "requestValue": { + "complexValue": { + "properties": { + "parent": { + "nameValue": { + "name": "parent" + } + }, + "custom_class_id": { + "nameValue": { + "name": "custom_class_id" + } + }, + "custom_class": { + "complexValue": { + "properties": { + "items": { + "listValue": { + "values": [ + { + "stringValue": "Titanic" + }, + { + "stringValue": "RMS Queen Mary" + } + ] + } + } + } + } + } + } + } + }, + "requestName": "create_custom_class_request" + }, + "call": { + "preCall": [ + { + "standardOutput": { + "value": { + "stringValue": "Calling the CreateCustomClass operation." + } + } + } + ] + }, + "responseHandling": { + "responseName": "created_custom_class" + } + }, + "finalStatements": [ + { + "standardOutput": { + "value": { + "stringValue": "A Custom Class with the following name has been created." + } + } + }, + { + "standardOutput": { + "value": { + "nameValue": { + "name": "created_custom_class", + "path": ["name"] + } + } + } + }, + { + "standardOutput": { + "value": { + "stringValue": "The Custom class contains the following items." + } + } + }, + { + "iteration": { + "repeatedIteration": { + "repeatedElements": { + "type": { + "repeatedType": { + "elementType": { + "messageType": { + "messageFullName": "google.cloud.speech.v1.CustomClass.ClassItem" + } + } + } + }, + "name": "items_list", + "value": { + "nameValue": { + "name": "created_custom_class", + "path": ["items"] + } + } + }, + "currentName": "item" + }, + "statements": [ + { + "standardOutput": { + "value": { + "nameValue": { + "name": "item" + } + } + } + } + ] + } + }, + { + "return": { + "result": { + "nameValue": { + "name": "created_custom_class" + } + } + } + } + ] + } + } diff --git a/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py b/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py new file mode 100644 index 000000000000..82c3010f3a0c --- /dev/null +++ b/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-speech + +# [START speech_v1_generated_Adaptation_CreateCustomClass_Basic_async] +from google.cloud import speech_v1 + + +async def sample_create_custom_class_basic( + parent: str = "projects/[PROJECT]/locations/us", + custom_class_id: str = "passengerships", +) -> speech_v1.CustomClass: + """Custom Class Creation. + + Shows how to create a custom class. + + Args: + parent: The custom class parent element + custom_class_id: The id for the custom class + + Returns: + a CustomClass + """ + client = speech_v1.AdaptationAsyncClient( + client_options={"api_endpoint": "https://us-speech.googleapis.com"} + ) + + request = speech_v1.CreateCustomClassRequest( + parent=parent, + custom_class_id=custom_class_id, + custom_class=speech_v1.CustomClass( + items=[ + speech_v1.CustomClass.ClassItem(value="Titanic"), + speech_v1.CustomClass.ClassItem(value="RMS Queen Mary"), + ] + ), + ) + + print("Calling the CreateCustomClass operation.") + response = await client.create_custom_class(request=request) + created_custom_class = response.result() + + print("A Custom Class with the following name has been created.") + print(created_custom_class.name) + + print("The Custom class contains the following items.") + items_list = created_custom_class.items + for item in items_list: + print(item) + + return created_custom_class + + +# [END speech_v1_generated_Adaptation_CreateCustomClass_Basic_async] diff --git a/packages/gapic-generator/tests/configurable_snippetgen/test_resources.py b/packages/gapic-generator/tests/configurable_snippetgen/test_resources.py new file mode 100644 index 000000000000..ce0720574c07 --- /dev/null +++ b/packages/gapic-generator/tests/configurable_snippetgen/test_resources.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path + +from google.protobuf.compiler import plugin_pb2 +import pytest + +from gapic import utils +from gapic.schema import api + + +CURRENT_DIRECTORY = Path(__file__).parent.absolute() +SPEECH_V1_REQUEST_PATH = (CURRENT_DIRECTORY / + "resources" / "speech" / "request.desc") + + +def test_request(): + with open(SPEECH_V1_REQUEST_PATH, "rb") as f: + req = plugin_pb2.CodeGeneratorRequest.FromString(f.read()) + + # From gapic/cli/generator.py. + opts = utils.Options.build(req.parameter) + api_schema = api.API.build( + req.proto_file, + opts=opts, + package="google.cloud.speech.v1", + ) + + expected_services = [ + "google.cloud.speech.v1.Adaptation", + "google.cloud.speech.v1.Speech", + ] + + # We are only making sure that the dumped request.desc file can be + # successfully loaded to rebuild API schema. + assert list(api_schema.services.keys()) == expected_services From 5c1652d6f6c7c3c7affea69476e3803d469ae4e3 Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Mon, 5 Dec 2022 21:25:12 +0000 Subject: [PATCH 0933/1339] fix: Fix timeout default values (#1480) * fix: Fix timeout default values Also fix LRO for REST transport. This PR makes generated gapics appeciate timeout values from grpc_service_config.json instead of overriding them with None (which means no timeout) It is basically a direct fix for https://github.com/googleapis/gapic-generator-python/issues/1477. This PR depends on https://github.com/googleapis/python-api-core/pull/462, and expects `setup.py.j2` templates to be updated after https://github.com/googleapis/python-api-core/pull/462 gets pushed and released with new version. * rename uri_prefix to path_prefix to match corresponding python-api-core change * fix unnecessary `gapic_v1.method.DEFAULT` in rest stubs * fix(deps): require google-api-core >=1.34.0 * fix(deps): require google-api-core >=2.11.0 * revert changes to WORKSPACE * fix typo * fix mypy error * revert local change for debugging Co-authored-by: Anthonios Partheniou --- .../%sub/services/%service/_mixins.py.j2 | 20 +-- .../%sub/services/%service/client.py.j2 | 8 +- .../services/%service/transports/rest.py.j2 | 9 +- .../gapic-generator/gapic/schema/wrappers.py | 4 + .../services/%service/_async_mixins.py.j2 | 20 +-- .../%sub/services/%service/_client_macros.j2 | 2 +- .../%sub/services/%service/_mixins.py.j2 | 20 +-- .../%sub/services/%service/async_client.py.j2 | 8 +- .../%sub/services/%service/client.py.j2 | 6 +- .../%service/transports/_rest_mixins.py.j2 | 6 +- .../services/%service/transports/rest.py.j2 | 9 +- .../gapic/templates/noxfile.py.j2 | 7 +- .../gapic/templates/setup.py.j2 | 2 +- .../templates/testing/constraints-3.7.txt.j2 | 2 +- .../tests/integration/BUILD.bazel | 23 ++- .../services/asset_service/async_client.py | 24 +-- .../asset_v1/services/asset_service/client.py | 24 +-- .../services/asset_service/transports/rest.py | 75 +++++----- .../integration/goldens/asset/noxfile.py | 7 +- .../tests/integration/goldens/asset/setup.py | 2 +- .../goldens/asset/testing/constraints-3.7.txt | 2 +- .../services/iam_credentials/async_client.py | 8 +- .../services/iam_credentials/client.py | 8 +- .../iam_credentials/transports/rest.py | 24 +-- .../goldens/credentials/noxfile.py | 7 +- .../integration/goldens/credentials/setup.py | 2 +- .../credentials/testing/constraints-3.7.txt | 2 +- .../services/eventarc/async_client.py | 10 +- .../eventarc_v1/services/eventarc/client.py | 10 +- .../services/eventarc/transports/rest.py | 33 +++-- .../integration/goldens/eventarc/noxfile.py | 7 +- .../integration/goldens/eventarc/setup.py | 2 +- .../eventarc/testing/constraints-3.7.txt | 2 +- .../config_service_v2/async_client.py | 46 +++--- .../services/config_service_v2/client.py | 46 +++--- .../config_service_v2/transports/rest.py | 138 +++++++++--------- .../logging_service_v2/async_client.py | 12 +- .../services/logging_service_v2/client.py | 12 +- .../logging_service_v2/transports/rest.py | 36 ++--- .../metrics_service_v2/async_client.py | 10 +- .../services/metrics_service_v2/client.py | 10 +- .../metrics_service_v2/transports/rest.py | 30 ++-- .../integration/goldens/logging/noxfile.py | 7 +- .../integration/goldens/logging/setup.py | 2 +- .../logging/testing/constraints-3.7.txt | 2 +- .../redis/google/cloud/redis_v1/py.typed | 0 .../services/cloud_redis/async_client.py | 18 +-- .../redis_v1/services/cloud_redis/client.py | 18 +-- .../services/cloud_redis/transports/base.py | 0 .../services/cloud_redis/transports/grpc.py | 0 .../cloud_redis/transports/grpc_asyncio.py | 0 .../services/cloud_redis/transports/rest.py | 57 ++++---- .../integration/goldens/redis/noxfile.py | 7 +- .../tests/integration/goldens/redis/setup.py | 2 +- .../goldens/redis/testing/constraints-3.7.txt | 2 +- .../unit/schema/wrappers/test_service.py | 1 + 56 files changed, 445 insertions(+), 406 deletions(-) mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 index 3179d8bbd9bb..f264ce88866f 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 @@ -6,7 +6,7 @@ request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -60,7 +60,7 @@ request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -113,7 +113,7 @@ request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -166,7 +166,7 @@ request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -218,7 +218,7 @@ request: Optional[operations_pb2.WaitOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most @@ -277,7 +277,7 @@ request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -396,7 +396,7 @@ request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -516,7 +516,7 @@ request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -578,7 +578,7 @@ request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -631,7 +631,7 @@ request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index d0066988cbd2..ea09cad6a716 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -350,7 +350,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): *, {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), {% if not method.server_streaming %} ) -> {{ method.client_output.ident }}: @@ -519,7 +519,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -636,7 +636,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -753,7 +753,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index ef32dbd4ae15..98ba58df17ae 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -251,7 +251,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): # use the credentials which are saved credentials=self._credentials, scopes=self._scopes, - http_options=http_options) + http_options=http_options, + path_prefix="{{ service.client_package_version }}") self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -282,9 +283,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __call__(self, request: {{method.input.ident}}, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 4d9830d5c6a9..b32af50b9467 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1637,6 +1637,10 @@ def client_name(self) -> str: """Returns the name of the generated client class""" return self.name + "Client" + @property + def client_package_version(self) -> str: + return self.meta.address.package[-1] if self.meta.address.package else "" + @property def async_client_name(self) -> str: """Returns the name of the generated AsyncIO client class""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 0f1727f0de35..9fc240a34c29 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -8,7 +8,7 @@ request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -62,7 +62,7 @@ request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -115,7 +115,7 @@ request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -168,7 +168,7 @@ request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -220,7 +220,7 @@ request: Optional[operations_pb2.WaitOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most @@ -284,7 +284,7 @@ request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -403,7 +403,7 @@ request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -523,7 +523,7 @@ request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -585,7 +585,7 @@ request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -638,7 +638,7 @@ request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 440d162b145a..1502f8fcc39c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -27,7 +27,7 @@ *, {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), {% if method.extended_lro and not full_extended_lro %}{# This is a hack to preserve backwards compatibility with the "unary" surfaces #} ) -> {{ method.extended_lro.operation_type.ident }}: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index ec1387acdd17..8287ecf05965 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -6,7 +6,7 @@ request: Optional[operations_pb2.ListOperationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -60,7 +60,7 @@ request: Optional[operations_pb2.GetOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -113,7 +113,7 @@ request: Optional[operations_pb2.DeleteOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a long-running operation. @@ -166,7 +166,7 @@ request: Optional[operations_pb2.CancelOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -218,7 +218,7 @@ request: Optional[operations_pb2.WaitOperationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most @@ -282,7 +282,7 @@ request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -401,7 +401,7 @@ request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -521,7 +521,7 @@ request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -583,7 +583,7 @@ request: Optional[locations_pb2.GetLocationRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -636,7 +636,7 @@ request: Optional[locations_pb2.ListLocationsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 88b142dd60fd..7bb2e8a9fe38 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -205,7 +205,7 @@ class {{ service.async_client_name }}: *, {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), {% if not method.server_streaming %} ) -> {{ method.client_output_async.ident }}: @@ -390,7 +390,7 @@ class {{ service.async_client_name }}: request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -504,7 +504,7 @@ class {{ service.async_client_name }}: request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -620,7 +620,7 @@ class {{ service.async_client_name }}: request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index d7667abd1fa7..92f65e78fd71 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -399,7 +399,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -516,7 +516,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -634,7 +634,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 index 4dee95ceca23..4a3726b1d880 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -8,9 +8,9 @@ class _{{ name }}({{service.name}}RestStub): def __call__(self, request: {{ sig.request_type }}, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> {{ sig.response_type }}: r"""Call the {{- ' ' -}} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 126d6e00a615..5cb1667063ec 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -287,7 +287,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): # use the credentials which are saved credentials=self._credentials, scopes=self._scopes, - http_options=http_options) + http_options=http_options, + path_prefix="{{ service.client_package_version }}") self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -318,9 +319,9 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __call__(self, request: {{method.input.ident}}, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index d66e18cb23b5..9cfb353e8430 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -73,7 +73,12 @@ def cover(session): @nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') + session.install( + 'mypy', + 'types-pkg_resources', + 'types-requests', + 'types-protobuf' + ) session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 6449154259a4..1c01e8f74897 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -27,7 +27,7 @@ else: release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index a747f71dbb67..73c04e56c368 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 5f1f0dd762f0..4e151c3146ef 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -119,18 +119,17 @@ py_gapic_library( transport = "grpc+rest", ) -# Uncomment once https://github.com/googleapis/gapic-generator-python/issues/1359 is fixed -#py_test( -# name = "logging_py_gapic_test", -# srcs = [ -# "logging_py_gapic_pytest.py", -# "logging_py_gapic_test.py", -# ], -# legacy_create_init = False, -# deps = [ -# ":logging_py_gapic", -# ], -#) +py_test( + name = "logging_py_gapic_test", + srcs = [ + "logging_py_gapic_pytest.py", + "logging_py_gapic_test.py", + ], + legacy_create_init = False, + deps = [ + ":logging_py_gapic", + ], +) py_gapic_library( name = "redis_py_gapic", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 53e7e2f127b4..7373ac54cc27 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -193,7 +193,7 @@ async def export_assets(self, request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Exports assets with time and resource types to a given Cloud @@ -307,7 +307,7 @@ async def list_assets(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAssetsAsyncPager: r"""Lists assets with time and resource types and returns @@ -422,7 +422,7 @@ async def batch_get_assets_history(self, request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time @@ -514,7 +514,7 @@ async def create_feed(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Creates a feed in a parent @@ -633,7 +633,7 @@ async def get_feed(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Gets details about an asset feed. @@ -747,7 +747,7 @@ async def list_feeds(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent @@ -857,7 +857,7 @@ async def update_feed(self, *, feed: Optional[asset_service.Feed] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Updates an asset feed configuration. @@ -968,7 +968,7 @@ async def delete_feed(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an asset feed. @@ -1067,7 +1067,7 @@ async def search_all_resources(self, query: Optional[str] = None, asset_types: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesAsyncPager: r"""Searches all Cloud resources within the specified scope, such as @@ -1280,7 +1280,7 @@ async def search_all_iam_policies(self, scope: Optional[str] = None, query: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllIamPoliciesAsyncPager: r"""Searches all IAM policies within the specified scope, such as a @@ -1471,7 +1471,7 @@ async def analyze_iam_policy(self, request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have @@ -1562,7 +1562,7 @@ async def analyze_iam_policy_longrunning(self, request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Analyzes IAM policies asynchronously to answer which identities diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 315906adf87d..771fc25d40fc 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -396,7 +396,7 @@ def export_assets(self, request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Exports assets with time and resource types to a given Cloud @@ -511,7 +511,7 @@ def list_assets(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListAssetsPager: r"""Lists assets with time and resource types and returns @@ -626,7 +626,7 @@ def batch_get_assets_history(self, request: Optional[Union[asset_service.BatchGetAssetsHistoryRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time @@ -712,7 +712,7 @@ def create_feed(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Creates a feed in a parent @@ -831,7 +831,7 @@ def get_feed(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Gets details about an asset feed. @@ -938,7 +938,7 @@ def list_feeds(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent @@ -1041,7 +1041,7 @@ def update_feed(self, *, feed: Optional[asset_service.Feed] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.Feed: r"""Updates an asset feed configuration. @@ -1152,7 +1152,7 @@ def delete_feed(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an asset feed. @@ -1244,7 +1244,7 @@ def search_all_resources(self, query: Optional[str] = None, asset_types: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesPager: r"""Searches all Cloud resources within the specified scope, such as @@ -1450,7 +1450,7 @@ def search_all_iam_policies(self, scope: Optional[str] = None, query: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllIamPoliciesPager: r"""Searches all IAM policies within the specified scope, such as a @@ -1634,7 +1634,7 @@ def analyze_iam_policy(self, request: Optional[Union[asset_service.AnalyzeIamPolicyRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have @@ -1720,7 +1720,7 @@ def analyze_iam_policy_longrunning(self, request: Optional[Union[asset_service.AnalyzeIamPolicyLongrunningRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Analyzes IAM policies asynchronously to answer which identities diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index eab4b8c22991..b46a0a4ce0a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -458,7 +458,8 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # use the credentials which are saved credentials=self._credentials, scopes=self._scopes, - http_options=http_options) + http_options=http_options, + path_prefix="v1") self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -478,9 +479,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.AnalyzeIamPolicyRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Call the analyze iam policy method over HTTP. @@ -558,9 +559,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the analyze iam policy longrunning method over HTTP. @@ -647,9 +648,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.BatchGetAssetsHistoryRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Call the batch get assets history method over HTTP. @@ -723,9 +724,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.CreateFeedRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.Feed: r"""Call the create feed method over HTTP. @@ -815,9 +816,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.DeleteFeedRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ): r"""Call the delete feed method over HTTP. @@ -879,9 +880,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.ExportAssetsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the export assets method over HTTP. @@ -965,9 +966,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.GetFeedRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.Feed: r"""Call the get feed method over HTTP. @@ -1048,9 +1049,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.ListAssetsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.ListAssetsResponse: r"""Call the list assets method over HTTP. @@ -1124,9 +1125,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.ListFeedsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.ListFeedsResponse: r"""Call the list feeds method over HTTP. @@ -1200,9 +1201,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.SearchAllIamPoliciesRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.SearchAllIamPoliciesResponse: r"""Call the search all iam policies method over HTTP. @@ -1276,9 +1277,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.SearchAllResourcesRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.SearchAllResourcesResponse: r"""Call the search all resources method over HTTP. @@ -1352,9 +1353,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: asset_service.UpdateFeedRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> asset_service.Feed: r"""Call the update feed method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 2a7c0060f67a..46247d5dc8bc 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -84,7 +84,12 @@ def cover(session): @nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') + session.install( + 'mypy', + 'types-pkg_resources', + 'types-requests', + 'types-protobuf' + ) session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 1609b700c6e3..1bff80e4a911 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -36,7 +36,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", 'grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index 883a87aefd06..2beecf99e0be 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 grpc-google-iam-v1==0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 05f61eb26986..4f0c9d6ef577 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -202,7 +202,7 @@ async def generate_access_token(self, scope: Optional[MutableSequence[str]] = None, lifetime: Optional[duration_pb2.Duration] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service @@ -362,7 +362,7 @@ async def generate_id_token(self, audience: Optional[str] = None, include_email: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service @@ -515,7 +515,7 @@ async def sign_blob(self, delegates: Optional[MutableSequence[str]] = None, payload: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed @@ -655,7 +655,7 @@ async def sign_jwt(self, delegates: Optional[MutableSequence[str]] = None, payload: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index bcf6585a0e23..1395c18494f0 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -396,7 +396,7 @@ def generate_access_token(self, scope: Optional[MutableSequence[str]] = None, lifetime: Optional[duration_pb2.Duration] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service @@ -549,7 +549,7 @@ def generate_id_token(self, audience: Optional[str] = None, include_email: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service @@ -695,7 +695,7 @@ def sign_blob(self, delegates: Optional[MutableSequence[str]] = None, payload: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed @@ -828,7 +828,7 @@ def sign_jwt(self, delegates: Optional[MutableSequence[str]] = None, payload: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 7bc75d5d65da..f805bbff13d5 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -286,9 +286,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: common.GenerateAccessTokenRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> common.GenerateAccessTokenResponse: r"""Call the generate access token method over HTTP. @@ -371,9 +371,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: common.GenerateIdTokenRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> common.GenerateIdTokenResponse: r"""Call the generate id token method over HTTP. @@ -456,9 +456,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: common.SignBlobRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> common.SignBlobResponse: r"""Call the sign blob method over HTTP. @@ -541,9 +541,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: common.SignJwtRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> common.SignJwtResponse: r"""Call the sign jwt method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 9a3857b8a77d..f2c2998450b2 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -84,7 +84,12 @@ def cover(session): @nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') + session.install( + 'mypy', + 'types-pkg_resources', + 'types-requests', + 'types-protobuf' + ) session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 57ac52c86d6a..c28f750303fc 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -36,7 +36,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt index 6f3158cc2034..6c44adfea7ee 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 0b4e1988fdf5..887654736272 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -201,7 +201,7 @@ async def get_trigger(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> trigger.Trigger: r"""Get a single trigger. @@ -302,7 +302,7 @@ async def list_triggers(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTriggersAsyncPager: r"""List triggers. @@ -418,7 +418,7 @@ async def create_trigger(self, trigger: Optional[gce_trigger.Trigger] = None, trigger_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Create a new trigger in a particular project and @@ -563,7 +563,7 @@ async def update_trigger(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Update a single trigger. @@ -699,7 +699,7 @@ async def delete_trigger(self, name: Optional[str] = None, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Delete a single trigger. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index b6659bbfce6f..550433432a8f 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -413,7 +413,7 @@ def get_trigger(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> trigger.Trigger: r"""Get a single trigger. @@ -514,7 +514,7 @@ def list_triggers(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTriggersPager: r"""List triggers. @@ -630,7 +630,7 @@ def create_trigger(self, trigger: Optional[gce_trigger.Trigger] = None, trigger_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Create a new trigger in a particular project and @@ -775,7 +775,7 @@ def update_trigger(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Update a single trigger. @@ -911,7 +911,7 @@ def delete_trigger(self, name: Optional[str] = None, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Delete a single trigger. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 96bd6f82df74..c212e44aff56 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -310,7 +310,8 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # use the credentials which are saved credentials=self._credentials, scopes=self._scopes, - http_options=http_options) + http_options=http_options, + path_prefix="v1") self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -330,9 +331,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.CreateTriggerRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the create trigger method over HTTP. @@ -418,9 +419,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.DeleteTriggerRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the delete trigger method over HTTP. @@ -497,9 +498,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.GetTriggerRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> trigger.Trigger: r"""Call the get trigger method over HTTP. @@ -577,9 +578,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.ListTriggersRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> eventarc.ListTriggersResponse: r"""Call the list triggers method over HTTP. @@ -657,9 +658,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: eventarc.UpdateTriggerRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the update trigger method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 3e8ab34def43..a7502780a670 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -84,7 +84,12 @@ def cover(session): @nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') + session.install( + 'mypy', + 'types-pkg_resources', + 'types-requests', + 'types-protobuf' + ) session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index ad9c94433440..3be22cba3ed8 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -36,7 +36,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index 6f3158cc2034..6c44adfea7ee 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index f2e13824a031..7168f0fa30e8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -198,7 +198,7 @@ async def list_buckets(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: r"""Lists buckets. @@ -320,7 +320,7 @@ async def get_bucket(self, request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Gets a bucket. @@ -398,7 +398,7 @@ async def create_bucket(self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Creates a bucket that can be used to store log @@ -479,7 +479,7 @@ async def update_bucket(self, request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Updates a bucket. This method replaces the following fields in @@ -567,7 +567,7 @@ async def delete_bucket(self, request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -637,7 +637,7 @@ async def undelete_bucket(self, request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Undeletes a bucket. A bucket that has been deleted @@ -707,7 +707,7 @@ async def list_views(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: r"""Lists views on a bucket. @@ -821,7 +821,7 @@ async def get_view(self, request: Optional[Union[logging_config.GetViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Gets a view. @@ -901,7 +901,7 @@ async def create_view(self, request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Creates a view over logs in a bucket. A bucket may @@ -983,7 +983,7 @@ async def update_view(self, request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Updates a view. This method replaces the following fields in the @@ -1064,7 +1064,7 @@ async def delete_view(self, request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a view from a bucket. @@ -1133,7 +1133,7 @@ async def list_sinks(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. @@ -1260,7 +1260,7 @@ async def get_sink(self, *, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1384,7 +1384,7 @@ async def create_sink(self, parent: Optional[str] = None, sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a @@ -1520,7 +1520,7 @@ async def update_sink(self, sink: Optional[logging_config.LogSink] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the @@ -1685,7 +1685,7 @@ async def delete_sink(self, *, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, @@ -1791,7 +1791,7 @@ async def list_exclusions(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions in a parent resource. @@ -1918,7 +1918,7 @@ async def get_exclusion(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. @@ -2045,7 +2045,7 @@ async def create_exclusion(self, parent: Optional[str] = None, exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in a specified parent @@ -2184,7 +2184,7 @@ async def update_exclusion(self, exclusion: Optional[logging_config.LogExclusion] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing @@ -2334,7 +2334,7 @@ async def delete_exclusion(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an exclusion. @@ -2439,7 +2439,7 @@ async def get_cmek_settings(self, request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logs Router CMEK settings for the given resource. @@ -2541,7 +2541,7 @@ async def update_cmek_settings(self, request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Updates the Logs Router CMEK settings for the given resource. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py old mode 100644 new mode 100755 index 92de685b1ac3..8bd53387ce64 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -428,7 +428,7 @@ def list_buckets(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: r"""Lists buckets. @@ -550,7 +550,7 @@ def get_bucket(self, request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Gets a bucket. @@ -629,7 +629,7 @@ def create_bucket(self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Creates a bucket that can be used to store log @@ -711,7 +711,7 @@ def update_bucket(self, request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: r"""Updates a bucket. This method replaces the following fields in @@ -800,7 +800,7 @@ def delete_bucket(self, request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED @@ -871,7 +871,7 @@ def undelete_bucket(self, request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Undeletes a bucket. A bucket that has been deleted @@ -942,7 +942,7 @@ def list_views(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: r"""Lists views on a bucket. @@ -1056,7 +1056,7 @@ def get_view(self, request: Optional[Union[logging_config.GetViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Gets a view. @@ -1137,7 +1137,7 @@ def create_view(self, request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Creates a view over logs in a bucket. A bucket may @@ -1220,7 +1220,7 @@ def update_view(self, request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: r"""Updates a view. This method replaces the following fields in the @@ -1302,7 +1302,7 @@ def delete_view(self, request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a view from a bucket. @@ -1372,7 +1372,7 @@ def list_sinks(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSinksPager: r"""Lists sinks. @@ -1491,7 +1491,7 @@ def get_sink(self, *, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1607,7 +1607,7 @@ def create_sink(self, parent: Optional[str] = None, sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a @@ -1743,7 +1743,7 @@ def update_sink(self, sink: Optional[logging_config.LogSink] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the @@ -1900,7 +1900,7 @@ def delete_sink(self, *, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, @@ -1998,7 +1998,7 @@ def list_exclusions(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions in a parent resource. @@ -2117,7 +2117,7 @@ def get_exclusion(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion. @@ -2236,7 +2236,7 @@ def create_exclusion(self, parent: Optional[str] = None, exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in a specified parent @@ -2375,7 +2375,7 @@ def update_exclusion(self, exclusion: Optional[logging_config.LogExclusion] = None, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing @@ -2525,7 +2525,7 @@ def delete_exclusion(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an exclusion. @@ -2622,7 +2622,7 @@ def get_cmek_settings(self, request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logs Router CMEK settings for the given resource. @@ -2725,7 +2725,7 @@ def update_cmek_settings(self, request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.CmekSettings: r"""Updates the Logs Router CMEK settings for the given resource. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py index fe03b8b2b9f5..2a4a22284c94 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py @@ -660,9 +660,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.CreateBucketRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogBucket: r"""Call the create bucket method over HTTP. @@ -765,9 +765,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.CreateExclusionRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogExclusion: r"""Call the create exclusion method over HTTP. @@ -881,9 +881,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.CreateSinkRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogSink: r"""Call the create sink method over HTTP. @@ -995,9 +995,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.CreateViewRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogView: r"""Call the create view method over HTTP. @@ -1102,9 +1102,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.DeleteBucketRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ): r"""Call the delete bucket method over HTTP. @@ -1182,9 +1182,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.DeleteExclusionRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ): r"""Call the delete exclusion method over HTTP. @@ -1262,9 +1262,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.DeleteSinkRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ): r"""Call the delete sink method over HTTP. @@ -1342,9 +1342,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.DeleteViewRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ): r"""Call the delete view method over HTTP. @@ -1422,9 +1422,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetBucketRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogBucket: r"""Call the get bucket method over HTTP. @@ -1514,9 +1514,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetCmekSettingsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.CmekSettings: r"""Call the get cmek settings method over HTTP. @@ -1612,9 +1612,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetExclusionRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogExclusion: r"""Call the get exclusion method over HTTP. @@ -1715,9 +1715,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetSinkRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogSink: r"""Call the get sink method over HTTP. @@ -1816,9 +1816,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.GetViewRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogView: r"""Call the get view method over HTTP. @@ -1910,9 +1910,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.ListBucketsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.ListBucketsResponse: r"""Call the list buckets method over HTTP. @@ -2002,9 +2002,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.ListExclusionsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.ListExclusionsResponse: r"""Call the list exclusions method over HTTP. @@ -2094,9 +2094,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.ListSinksRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.ListSinksResponse: r"""Call the list sinks method over HTTP. @@ -2186,9 +2186,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.ListViewsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.ListViewsResponse: r"""Call the list views method over HTTP. @@ -2278,9 +2278,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UndeleteBucketRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ): r"""Call the undelete bucket method over HTTP. @@ -2371,9 +2371,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateBucketRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogBucket: r"""Call the update bucket method over HTTP. @@ -2476,9 +2476,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateCmekSettingsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.CmekSettings: r"""Call the update cmek settings method over HTTP. @@ -2584,9 +2584,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateExclusionRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogExclusion: r"""Call the update exclusion method over HTTP. @@ -2700,9 +2700,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateSinkRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogSink: r"""Call the update sink method over HTTP. @@ -2834,9 +2834,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_config.UpdateViewRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_config.LogView: r"""Call the update view method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 1927be3cca3b..5c9df587834b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -190,7 +190,7 @@ async def delete_log(self, *, log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes all the log entries in a log. The log @@ -305,7 +305,7 @@ async def write_log_entries(self, labels: Optional[MutableMapping[str, str]] = None, entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the @@ -502,7 +502,7 @@ async def list_log_entries(self, filter: Optional[str] = None, order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that @@ -663,7 +663,7 @@ async def list_monitored_resource_descriptors(self, request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types @@ -758,7 +758,7 @@ async def list_logs(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, @@ -885,7 +885,7 @@ def tail_log_entries(self, requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py old mode 100644 new mode 100755 index a3db9d981378..90d0950c4ed4 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -384,7 +384,7 @@ def delete_log(self, *, log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes all the log entries in a log. The log @@ -491,7 +491,7 @@ def write_log_entries(self, labels: Optional[MutableMapping[str, str]] = None, entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the @@ -679,7 +679,7 @@ def list_log_entries(self, filter: Optional[str] = None, order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that @@ -832,7 +832,7 @@ def list_monitored_resource_descriptors(self, request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types @@ -920,7 +920,7 @@ def list_logs(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, @@ -1039,7 +1039,7 @@ def tail_log_entries(self, requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py index a735292d9215..7f52c18e9de2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py @@ -290,9 +290,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging.DeleteLogRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ): r"""Call the delete log method over HTTP. @@ -370,9 +370,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging.ListLogEntriesRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging.ListLogEntriesResponse: r"""Call the list log entries method over HTTP. @@ -455,9 +455,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging.ListLogsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging.ListLogsResponse: r"""Call the list logs method over HTTP. @@ -540,9 +540,9 @@ def __hash__(self): def __call__(self, request: logging.ListMonitoredResourceDescriptorsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging.ListMonitoredResourceDescriptorsResponse: r"""Call the list monitored resource descriptors method over HTTP. @@ -613,9 +613,9 @@ def __hash__(self): def __call__(self, request: logging.TailLogEntriesRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> rest_streaming.ResponseIterator: raise NotImplementedError( "Client streaming over REST is not yet defined for python client") @@ -633,9 +633,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging.WriteLogEntriesRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging.WriteLogEntriesResponse: r"""Call the write log entries method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 54bc33ba5cfd..6f507390828f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -191,7 +191,7 @@ async def list_log_metrics(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. @@ -315,7 +315,7 @@ async def get_log_metric(self, *, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -436,7 +436,7 @@ async def create_log_metric(self, parent: Optional[str] = None, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -567,7 +567,7 @@ async def update_log_metric(self, metric_name: Optional[str] = None, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -704,7 +704,7 @@ async def delete_log_metric(self, *, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a logs-based metric. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py old mode 100644 new mode 100755 index fc7b586d79f3..8dabfc38a75c --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -385,7 +385,7 @@ def list_log_metrics(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. @@ -501,7 +501,7 @@ def get_log_metric(self, *, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -614,7 +614,7 @@ def create_log_metric(self, parent: Optional[str] = None, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -745,7 +745,7 @@ def update_log_metric(self, metric_name: Optional[str] = None, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -874,7 +874,7 @@ def delete_log_metric(self, *, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a logs-based metric. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py index d892b5d297f3..6bdc187c8155 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py @@ -290,9 +290,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.CreateLogMetricRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_metrics.LogMetric: r"""Call the create log metric method over HTTP. @@ -386,9 +386,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.DeleteLogMetricRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ): r"""Call the delete log metric method over HTTP. @@ -450,9 +450,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.GetLogMetricRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_metrics.LogMetric: r"""Call the get log metric method over HTTP. @@ -537,9 +537,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.ListLogMetricsRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_metrics.ListLogMetricsResponse: r"""Call the list log metrics method over HTTP. @@ -613,9 +613,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: logging_metrics.UpdateLogMetricRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> logging_metrics.LogMetric: r"""Call the update log metric method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 0d59e8201b42..fbe05683f99a 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -84,7 +84,12 @@ def cover(session): @nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') + session.install( + 'mypy', + 'types-pkg_resources', + 'types-requests', + 'types-protobuf' + ) session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index a9b84915eeec..370f18029b24 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -36,7 +36,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt index 6f3158cc2034..6c44adfea7ee 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 0cdf3da95d62..12a879028093 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -214,7 +214,7 @@ async def list_instances(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists all Redis instances owned by a project in either the @@ -339,7 +339,7 @@ async def get_instance(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. @@ -441,7 +441,7 @@ async def create_instance(self, instance_id: Optional[str] = None, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Creates a Redis instance based on the specified tier and memory @@ -603,7 +603,7 @@ async def update_instance(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Updates the metadata and configuration of a specific @@ -746,7 +746,7 @@ async def upgrade_instance(self, name: Optional[str] = None, redis_version: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Upgrades Redis instance to the newer Redis version @@ -875,7 +875,7 @@ async def import_instance(self, name: Optional[str] = None, input_config: Optional[cloud_redis.InputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Import a Redis RDB snapshot file from Cloud Storage @@ -1014,7 +1014,7 @@ async def export_instance(self, name: Optional[str] = None, output_config: Optional[cloud_redis.OutputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Export Redis instance data into a Redis RDB format @@ -1149,7 +1149,7 @@ async def failover_instance(self, name: Optional[str] = None, data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Initiates a failover of the master node to current @@ -1278,7 +1278,7 @@ async def delete_instance(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a specific Redis instance. Instance stops diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py old mode 100644 new mode 100755 index 1ddfce8650bb..3deba04cd290 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -408,7 +408,7 @@ def list_instances(self, *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: r"""Lists all Redis instances owned by a project in either the @@ -533,7 +533,7 @@ def get_instance(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. @@ -635,7 +635,7 @@ def create_instance(self, instance_id: Optional[str] = None, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Creates a Redis instance based on the specified tier and memory @@ -797,7 +797,7 @@ def update_instance(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Updates the metadata and configuration of a specific @@ -940,7 +940,7 @@ def upgrade_instance(self, name: Optional[str] = None, redis_version: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Upgrades Redis instance to the newer Redis version @@ -1069,7 +1069,7 @@ def import_instance(self, name: Optional[str] = None, input_config: Optional[cloud_redis.InputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Import a Redis RDB snapshot file from Cloud Storage @@ -1208,7 +1208,7 @@ def export_instance(self, name: Optional[str] = None, output_config: Optional[cloud_redis.OutputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Export Redis instance data into a Redis RDB format @@ -1343,7 +1343,7 @@ def failover_instance(self, name: Optional[str] = None, data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Initiates a failover of the master node to current @@ -1472,7 +1472,7 @@ def delete_instance(self, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: r"""Deletes a specific Redis instance. Instance stops diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 33f92413867a..38e7567054a4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -419,7 +419,8 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # use the credentials which are saved credentials=self._credentials, scopes=self._scopes, - http_options=http_options) + http_options=http_options, + path_prefix="v1") self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) @@ -439,9 +440,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.CreateInstanceRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. @@ -527,9 +528,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.DeleteInstanceRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. @@ -606,9 +607,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.ExportInstanceRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. @@ -694,9 +695,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.FailoverInstanceRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. @@ -782,9 +783,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.GetInstanceRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. @@ -860,9 +861,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.ImportInstanceRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. @@ -948,9 +949,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.ListInstancesRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. @@ -1028,9 +1029,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.UpdateInstanceRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. @@ -1116,9 +1117,9 @@ def _get_unset_required_fields(cls, message_dict): def __call__(self, request: cloud_redis.UpgradeInstanceRequest, *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 611bd142e6a9..2d9776b927ce 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -84,7 +84,12 @@ def cover(session): @nox.session(python=ALL_PYTHON) def mypy(session): """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') + session.install( + 'mypy', + 'types-pkg_resources', + 'types-requests', + 'types-protobuf' + ) session.install('.') session.run( 'mypy', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index ceb7c8d01990..b9ba0c50948e 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -36,7 +36,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt index 6f3158cc2034..6c44adfea7ee 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -4,6 +4,6 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 0ee88f921c6d..2664167c7264 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -58,6 +58,7 @@ def test_service_properties(): service = make_service(name='ThingDoer') assert service.name == 'ThingDoer' assert service.client_name == 'ThingDoerClient' + assert service.client_package_version == '' assert service.async_client_name == 'ThingDoerAsyncClient' assert service.transport_name == 'ThingDoerTransport' assert service.grpc_transport_name == 'ThingDoerGrpcTransport' From 4cd0bfcbe56d360416705c38181638e2dc756985 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 5 Dec 2022 18:56:55 -0500 Subject: [PATCH 0934/1339] ci: use hashes in pip install statements in release.sh (#1484) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ci: use hashes in pip install statements in release.sh * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.yaml | 18 + .../gapic-generator/.kokoro/requirements.in | 8 + .../gapic-generator/.kokoro/requirements.txt | 471 ++++++++++++++++++ packages/gapic-generator/owlbot.py | 4 +- 4 files changed, 499 insertions(+), 2 deletions(-) create mode 100644 packages/gapic-generator/.github/.OwlBot.yaml create mode 100644 packages/gapic-generator/.kokoro/requirements.in create mode 100644 packages/gapic-generator/.kokoro/requirements.txt diff --git a/packages/gapic-generator/.github/.OwlBot.yaml b/packages/gapic-generator/.github/.OwlBot.yaml new file mode 100644 index 000000000000..0afcbd1eff16 --- /dev/null +++ b/packages/gapic-generator/.github/.OwlBot.yaml @@ -0,0 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + +begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 diff --git a/packages/gapic-generator/.kokoro/requirements.in b/packages/gapic-generator/.kokoro/requirements.in new file mode 100644 index 000000000000..7718391a34d7 --- /dev/null +++ b/packages/gapic-generator/.kokoro/requirements.in @@ -0,0 +1,8 @@ +gcp-docuploader +gcp-releasetool +importlib-metadata +typing-extensions +twine +wheel +setuptools +nox \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt new file mode 100644 index 000000000000..d15994bac93c --- /dev/null +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -0,0 +1,471 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==2.0.0 \ + --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ + --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e + # via nox +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via gcp-releasetool +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +cachetools==5.2.0 \ + --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ + --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db + # via google-auth +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb + # via + # gcp-docuploader + # gcp-releasetool +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 + # via + # gcp-docuploader + # nox +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +cryptography==37.0.4 \ + --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ + --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ + --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ + --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ + --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ + --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ + --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ + --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ + --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ + --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ + --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ + --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ + --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ + --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ + --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ + --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ + --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ + --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ + --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ + --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ + --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ + --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 + # via + # gcp-releasetool + # secretstorage +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e + # via virtualenv +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via readme-renderer +filelock==3.8.0 \ + --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ + --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 + # via virtualenv +gcp-docuploader==0.6.3 \ + --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ + --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b + # via -r requirements.in +gcp-releasetool==1.8.7 \ + --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ + --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d + # via -r requirements.in +google-api-core==2.8.2 \ + --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ + --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.11.0 \ + --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ + --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.3.2 \ + --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ + --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a + # via google-cloud-storage +google-cloud-storage==2.5.0 \ + --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ + --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 + # via gcp-docuploader +google-crc32c==1.3.0 \ + --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ + --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ + --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ + --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ + --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ + --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ + --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ + --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ + --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ + --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ + --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ + --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ + --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ + --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ + --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ + --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ + --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ + --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ + --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ + --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ + --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ + --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ + --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ + --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ + --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ + --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ + --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ + --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ + --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ + --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ + --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ + --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ + --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ + --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ + --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ + --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ + --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ + --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ + --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ + --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ + --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ + --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ + --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 + # via google-resumable-media +google-resumable-media==2.3.3 \ + --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ + --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 + # via google-cloud-storage +googleapis-common-protos==1.56.4 \ + --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ + --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 + # via google-api-core +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==4.12.0 \ + --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ + --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 + # via + # -r requirements.in + # twine +jaraco-classes==3.2.2 \ + --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ + --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 + # via keyring +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via gcp-releasetool +keyring==23.9.0 \ + --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ + --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db + # via + # gcp-releasetool + # twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via jinja2 +more-itertools==8.14.0 \ + --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ + --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 + # via jaraco-classes +nox==2022.8.7 \ + --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ + --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c + # via -r requirements.in +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # gcp-releasetool + # nox +pkginfo==1.8.3 \ + --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ + --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c + # via twine +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via virtualenv +protobuf==3.20.2 \ + --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ + --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ + --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ + --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ + --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ + --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ + --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ + --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ + --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ + --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ + --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ + --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ + --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ + --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ + --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ + --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ + --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ + --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ + --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ + --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ + --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ + --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ + --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via nox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich +pyjwt==2.4.0 \ + --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ + --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba + # via gcp-releasetool +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyperclip==1.8.2 \ + --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 + # via gcp-releasetool +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via gcp-releasetool +readme-renderer==37.0 \ + --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ + --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.5.1 \ + --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ + --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca + # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # bleach + # gcp-docuploader + # google-auth + # python-dateutil +twine==4.0.1 \ + --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ + --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 + # via -r requirements.in +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via -r requirements.in +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # twine +virtualenv==20.16.4 \ + --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ + --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 + # via nox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements.in +zipp==3.8.1 \ + --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ + --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +setuptools==65.2.0 \ + --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ + --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 + # via -r requirements.in diff --git a/packages/gapic-generator/owlbot.py b/packages/gapic-generator/owlbot.py index f05ef4bdb23a..dcb31e46eb4a 100644 --- a/packages/gapic-generator/owlbot.py +++ b/packages/gapic-generator/owlbot.py @@ -18,7 +18,7 @@ templated_files = CommonTemplates().py_library() s.move( templated_files / ".kokoro", - excludes=["samples/**/*", "test-samples*", "publish-docs.sh"], + excludes=["samples/**/*", "test-samples*", "publish-docs.sh", "*/prerelease-deps.cfg"], ) # remove docfx build @@ -32,4 +32,4 @@ s.move(templated_files / ".trampolinerc") s.move(templated_files / "LICENSE") -s.move(templated_files / ".github", excludes=["workflows", "CODEOWNERS"]) +s.move(templated_files / ".github", excludes=["workflows", "CODEOWNERS", "auto-approve.yml"]) From 951818adac0d43ec2faf1ea5820652c9308171fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Dec 2022 00:17:43 +0000 Subject: [PATCH 0935/1339] chore(deps): bump cryptography from 37.0.4 to 38.0.3 in /.kokoro (#1507) Bumps [cryptography](https://github.com/pyca/cryptography) from 37.0.4 to 38.0.3. - [Release notes](https://github.com/pyca/cryptography/releases) - [Changelog](https://github.com/pyca/cryptography/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pyca/cryptography/compare/37.0.4...38.0.3) --- updated-dependencies: - dependency-name: cryptography dependency-type: indirect ... Signed-off-by: dependabot[bot] Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .../gapic-generator/.kokoro/requirements.txt | 50 ++++++++++--------- 1 file changed, 27 insertions(+), 23 deletions(-) diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index d15994bac93c..7f3eedec957c 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -110,29 +110,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage From bb819c2730dd8e2add6a2536188109b2ea807bd6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 5 Dec 2022 19:33:17 -0500 Subject: [PATCH 0936/1339] fix: drop usage of pkg_resources (#1471) * fix: drop pkg_resources * fix typo --- .../%namespace/%name/gapic_version.py.j2 | 2 +- .../%sub/services/%service/async_client.py.j2 | 13 ++++--------- .../%sub/services/%service/client.py.j2 | 14 +++++--------- .../%sub/services/%service/transports/base.py.j2 | 13 ++++--------- .../%name_%version/gapic_version.py.j2 | 5 +++++ .../gapic/templates/.coveragerc.j2 | 5 ----- .../gapic/templates/noxfile.py.j2 | 1 - .../tests/integration/goldens/asset/.coveragerc | 5 ----- .../asset/google/cloud/asset/gapic_version.py | 2 +- .../asset/google/cloud/asset_v1/gapic_version.py | 16 ++++++++++++++++ .../services/asset_service/async_client.py | 12 +++--------- .../asset_v1/services/asset_service/client.py | 13 ++++--------- .../services/asset_service/transports/base.py | 12 +++--------- .../tests/integration/goldens/asset/noxfile.py | 1 - .../integration/goldens/credentials/.coveragerc | 5 ----- .../google/iam/credentials/gapic_version.py | 2 +- .../google/iam/credentials_v1/gapic_version.py | 16 ++++++++++++++++ .../services/iam_credentials/async_client.py | 12 +++--------- .../services/iam_credentials/client.py | 13 ++++--------- .../services/iam_credentials/transports/base.py | 12 +++--------- .../integration/goldens/credentials/noxfile.py | 1 - .../integration/goldens/eventarc/.coveragerc | 5 ----- .../google/cloud/eventarc/gapic_version.py | 2 +- .../google/cloud/eventarc_v1/gapic_version.py | 16 ++++++++++++++++ .../services/eventarc/async_client.py | 12 +++--------- .../eventarc_v1/services/eventarc/client.py | 13 ++++--------- .../services/eventarc/transports/base.py | 12 +++--------- .../integration/goldens/eventarc/noxfile.py | 1 - .../integration/goldens/logging/.coveragerc | 5 ----- .../google/cloud/logging/gapic_version.py | 2 +- .../google/cloud/logging_v2/gapic_version.py | 16 ++++++++++++++++ .../services/config_service_v2/async_client.py | 12 +++--------- .../services/config_service_v2/client.py | 13 ++++--------- .../config_service_v2/transports/base.py | 12 +++--------- .../services/logging_service_v2/async_client.py | 12 +++--------- .../services/logging_service_v2/client.py | 13 ++++--------- .../logging_service_v2/transports/base.py | 12 +++--------- .../services/metrics_service_v2/async_client.py | 12 +++--------- .../services/metrics_service_v2/client.py | 13 ++++--------- .../metrics_service_v2/transports/base.py | 12 +++--------- .../tests/integration/goldens/logging/noxfile.py | 1 - .../tests/integration/goldens/redis/.coveragerc | 5 ----- .../redis/google/cloud/redis/gapic_version.py | 2 +- .../redis/google/cloud/redis_v1/gapic_version.py | 16 ++++++++++++++++ .../services/cloud_redis/async_client.py | 12 +++--------- .../redis_v1/services/cloud_redis/client.py | 13 ++++--------- .../services/cloud_redis/transports/base.py | 12 +++--------- .../tests/integration/goldens/redis/noxfile.py | 1 - 48 files changed, 174 insertions(+), 258 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 index 425d6ab7cca5..d6db9873846b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 @@ -1,5 +1,5 @@ {% extends '_base.py.j2' %} {% block content %} -__version__ = "0.1.0" +__version__ = "0.1.0" # {x-release-please-version} {% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 7bb2e8a9fe38..ba5c54d86792 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -6,11 +6,13 @@ from collections import OrderedDict import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union -import pkg_resources {% if service.any_deprecated %} import warnings {% endif %} +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} +from {{package_path}} import gapic_version as package_version + from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -676,14 +678,7 @@ class {{ service.async_client_name }}: async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "{{ api.naming.warehouse_package_name }}", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 92f65e78fd71..b7e485a3db2f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -10,11 +10,13 @@ import functools import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast -import pkg_resources {% if service.any_deprecated %} import warnings {% endif %} +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} +from {{package_path}} import gapic_version as package_version + from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions {% if service.any_extended_operations_methods %} @@ -685,14 +687,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Done; return the response. return response {% endif %} -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "{{ api.naming.warehouse_package_name }}", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 50781784395e..1043178b5021 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -4,7 +4,9 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} +from {{package_path}} import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -39,14 +41,7 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser {% endfor %} {% endfilter %} -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - '{{ api.naming.warehouse_package_name }}', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class {{ service.name }}Transport(abc.ABC): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 new file mode 100644 index 000000000000..d6db9873846b --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 @@ -0,0 +1,5 @@ +{% extends '_base.py.j2' %} +{% block content %} + +__version__ = "0.1.0" # {x-release-please-version} +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/.coveragerc.j2 b/packages/gapic-generator/gapic/templates/.coveragerc.j2 index 6e2f585cbd56..cabb94bb9cf1 100644 --- a/packages/gapic-generator/gapic/templates/.coveragerc.j2 +++ b/packages/gapic-generator/gapic/templates/.coveragerc.j2 @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 9cfb353e8430..b78cd5a4b53e 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -75,7 +75,6 @@ def mypy(session): """Run the type checker.""" session.install( 'mypy', - 'types-pkg_resources', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc b/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc index 3425850c049e..f5ee43d5b135 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py index 35859c3f7fc1..405b1cebcf15 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py new file mode 100644 index 000000000000..405b1cebcf15 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 7373ac54cc27..69b3a4dcbfb4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -17,7 +17,8 @@ import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import pkg_resources + +from google.cloud.asset_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -1679,14 +1680,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-asset", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 771fc25d40fc..23d576cd103a 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -17,7 +17,8 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import pkg_resources + +from google.cloud.asset_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -1850,14 +1851,8 @@ def __exit__(self, type, value, traceback): -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-asset", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index ad1d09b62633..7bcf53ed9e1b 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.asset_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,7 @@ from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-asset', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class AssetServiceTransport(abc.ABC): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 46247d5dc8bc..b66ae298136c 100644 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -86,7 +86,6 @@ def mypy(session): """Run the type checker.""" session.install( 'mypy', - 'types-pkg_resources', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc b/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc index 9fd3c4f8b3e4..0f45bde5469b 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py index 35859c3f7fc1..405b1cebcf15 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py new file mode 100644 index 000000000000..405b1cebcf15 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 4f0c9d6ef577..cf5c77907604 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -17,7 +17,8 @@ import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import pkg_resources + +from google.iam.credentials_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -797,14 +798,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-iam-credentials", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 1395c18494f0..f1b895a7e97a 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -17,7 +17,8 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import pkg_resources + +from google.iam.credentials_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -975,14 +976,8 @@ def __exit__(self, type, value, traceback): -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-iam-credentials", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index eadf66db9aa6..11ba17c52235 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.iam.credentials_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -27,14 +28,7 @@ from google.iam.credentials_v1.types import common -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-iam-credentials', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class IAMCredentialsTransport(abc.ABC): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index f2c2998450b2..ce9e5ee5518d 100644 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -86,7 +86,6 @@ def mypy(session): """Run the type checker.""" session.install( 'mypy', - 'types-pkg_resources', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc b/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc index c069b0b0fc04..04f855c37f0e 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py index 35859c3f7fc1..405b1cebcf15 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py new file mode 100644 index 000000000000..405b1cebcf15 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 887654736272..c522df549bfe 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -17,7 +17,8 @@ import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import pkg_resources + +from google.cloud.eventarc_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -827,14 +828,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-eventarc", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 550433432a8f..3b2ff888aa91 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -17,7 +17,8 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import pkg_resources + +from google.cloud.eventarc_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -1051,14 +1052,8 @@ def __exit__(self, type, value, traceback): -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-eventarc", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 808dc4f3b5ee..e86f94857286 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.eventarc_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,7 @@ from google.cloud.eventarc_v1.types import trigger from google.longrunning import operations_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-eventarc', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class EventarcTransport(abc.ABC): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index a7502780a670..10a122e19b17 100644 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -86,7 +86,6 @@ def mypy(session): """Run the type checker.""" session.install( 'mypy', - 'types-pkg_resources', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc b/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc index b38d22e21fd1..c0f6e82dff6e 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py index 35859c3f7fc1..405b1cebcf15 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py new file mode 100644 index 000000000000..405b1cebcf15 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 7168f0fa30e8..070dfee7a20e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -17,7 +17,8 @@ import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -2651,14 +2652,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 8bd53387ce64..7a50bf7e6280 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -17,7 +17,8 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -2848,14 +2849,8 @@ def __exit__(self, type, value, traceback): -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index df6f3e7a1f9b..94d16a4d38c2 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -28,14 +29,7 @@ from google.cloud.logging_v2.types import logging_config from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-logging', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class ConfigServiceV2Transport(abc.ABC): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 5c9df587834b..1e92b2865739 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -17,7 +17,8 @@ import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -976,14 +977,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 90d0950c4ed4..03c62abdaaa5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -17,7 +17,8 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -1130,14 +1131,8 @@ def __exit__(self, type, value, traceback): -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 75bf31462908..0cf5ac2edab1 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -28,14 +29,7 @@ from google.cloud.logging_v2.types import logging from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-logging', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class LoggingServiceV2Transport(abc.ABC): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 6f507390828f..415d708a5e55 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -17,7 +17,8 @@ import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -804,14 +805,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 8dabfc38a75c..c206dc3f1bef 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -17,7 +17,8 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -978,14 +979,8 @@ def __exit__(self, type, value, traceback): -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-logging", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 2aa2b3e76835..01cd0d37cfbc 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.logging_v2 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -28,14 +29,7 @@ from google.cloud.logging_v2.types import logging_metrics from google.protobuf import empty_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-logging', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class MetricsServiceV2Transport(abc.ABC): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index fbe05683f99a..79baa30d769b 100644 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -86,7 +86,6 @@ def mypy(session): """Run the type checker.""" session.install( 'mypy', - 'types-pkg_resources', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc b/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc index f77eadc824ae..d36404f34db3 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py index 35859c3f7fc1..405b1cebcf15 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py new file mode 100644 index 000000000000..405b1cebcf15 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 12a879028093..bd3f2c590fd8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -17,7 +17,8 @@ import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union -import pkg_resources + +from google.cloud.redis_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -1407,14 +1408,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-redis", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 3deba04cd290..83507eb4d325 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -17,7 +17,8 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast -import pkg_resources + +from google.cloud.redis_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -1613,14 +1614,8 @@ def __exit__(self, type, value, traceback): -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-redis", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 7afa09785635..d00e009be978 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.redis_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -29,14 +30,7 @@ from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-redis', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class CloudRedisTransport(abc.ABC): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 2d9776b927ce..0e2d8b6d6f4e 100644 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -86,7 +86,6 @@ def mypy(session): """Run the type checker.""" session.install( 'mypy', - 'types-pkg_resources', 'types-requests', 'types-protobuf' ) From bfeced09bfacd68b26a057420842de91b1dfc115 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Dec 2022 00:37:34 +0000 Subject: [PATCH 0937/1339] chore(main): release 1.7.0 (#1496) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 15 +++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 121beab74950..8c24ece7fae1 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,21 @@ # Changelog +## [1.7.0](https://github.com/googleapis/gapic-generator-python/compare/v1.6.2...v1.7.0) (2022-12-06) + + +### Features + +* Add snippetgen config language and testing resource files ([#1504](https://github.com/googleapis/gapic-generator-python/issues/1504)) ([5b98659](https://github.com/googleapis/gapic-generator-python/commit/5b98659816b38ab5f376a5748b6275f1c2667aaf)) + + +### Bug Fixes + +* **deps:** require google-api-core >=1.34.0, >=2.11.0 ([6de9e28](https://github.com/googleapis/gapic-generator-python/commit/6de9e2881171d873ab3d76bfa386667ae745f0d9)) +* Drop usage of pkg_resources ([#1471](https://github.com/googleapis/gapic-generator-python/issues/1471)) ([a50c290](https://github.com/googleapis/gapic-generator-python/commit/a50c2909b5eb14c16acaf16057944688891eb7af)) +* Fix timeout default values ([6de9e28](https://github.com/googleapis/gapic-generator-python/commit/6de9e2881171d873ab3d76bfa386667ae745f0d9)) +* Snippetgen should call await on the operation coroutine before calling result ([#1495](https://github.com/googleapis/gapic-generator-python/issues/1495)) ([69a49c6](https://github.com/googleapis/gapic-generator-python/commit/69a49c6b9e8a45c87e8f2a9d4b25f00b9a4b01be)) + ## [1.6.2](https://github.com/googleapis/gapic-generator-python/compare/v1.6.1...v1.6.2) (2022-11-15) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9c0fdd507e8c..0ac668bec3d0 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.6.2" +version = "1.7.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From f3def5e60e3cceb970a6af71da0c7ebb2d2a6ea6 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Wed, 7 Dec 2022 10:20:00 -0800 Subject: [PATCH 0938/1339] chore: add configured_snippet module (#1506) * chore: add configured_snippet module --- .../configured_snippet.py | 65 + .../snippet_config_language_pb2.pyi | 2114 +++++++++++++++++ packages/gapic-generator/requirements.txt | 3 +- packages/gapic-generator/setup.py | 1 + .../resources/README.md | 0 .../resources/speech/request.desc | Bin .../speech/speech_createCustomClass.json | 0 ...ptation_create_custom_class_basic_async.py | 6 +- .../test_configured_snippet.py | 114 + .../configurable_snippetgen/test_resources.py | 0 10 files changed, 2299 insertions(+), 4 deletions(-) create mode 100644 packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py create mode 100644 packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi rename packages/gapic-generator/tests/{ => unit}/configurable_snippetgen/resources/README.md (100%) rename packages/gapic-generator/tests/{ => unit}/configurable_snippetgen/resources/speech/request.desc (100%) rename packages/gapic-generator/tests/{ => unit}/configurable_snippetgen/resources/speech/speech_createCustomClass.json (100%) rename packages/gapic-generator/tests/{ => unit}/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py (92%) create mode 100644 packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py rename packages/gapic-generator/tests/{ => unit}/configurable_snippetgen/test_resources.py (100%) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py new file mode 100644 index 000000000000..c9ff39adb83a --- /dev/null +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -0,0 +1,65 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import dataclasses + +import libcst + +from gapic.configurable_snippetgen import snippet_config_language_pb2 +from gapic.schema import api + + +def _make_empty_module() -> libcst.Module: + return libcst.Module(body=[]) + + +@dataclasses.dataclass(frozen=True) +class ConfiguredSnippet: + api_schema: api.API + config: snippet_config_language_pb2.SnippetConfig + api_version: str + is_sync: bool + _module: libcst.Module = dataclasses.field( + default_factory=_make_empty_module, init=False + ) + + @property + def code(self) -> str: + """The code of the configured snippet.""" + return self._module.code + + @property + def region_tag(self) -> str: + """The region tag of the snippet. + + For example: + "speech_v1_config_Adaptation_CreateCustomClass_Basic_async" + """ + module_name = self.config.rpc.proto_package.split(".")[-1] + service_name = self.config.rpc.service_name + rpc_name = self.config.rpc.rpc_name + config_id = self.config.metadata.config_id + sync_or_async = "sync" if self.is_sync else "async" + return f"{module_name}_{self.api_version}_config_{service_name}_{rpc_name}_{config_id}_{sync_or_async}" + + @property + def sample_function_name(self) -> str: + """The sample function's name. + + For example: + "sample_create_custom_class_basic" + """ + snippet_method_name = self.config.signature.snippet_method_name + config_id = self.config.metadata.config_id + return f"sample_{snippet_method_name}_{config_id}" diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi new file mode 100644 index 000000000000..73f90c543f29 --- /dev/null +++ b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi @@ -0,0 +1,2114 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2022 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _GeneratorOutputLanguage: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _GeneratorOutputLanguageEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_GeneratorOutputLanguage.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + GENERATOR_OUTPUT_LANGUAGE_UNSPECIFIED: _GeneratorOutputLanguage.ValueType # 0 + """The language has not been specified. Consumers should not see this value.""" + C_PLUS_PLUS: _GeneratorOutputLanguage.ValueType # 1 + C_SHARP: _GeneratorOutputLanguage.ValueType # 2 + GO: _GeneratorOutputLanguage.ValueType # 3 + JAVA: _GeneratorOutputLanguage.ValueType # 4 + JAVASCRIPT: _GeneratorOutputLanguage.ValueType # 5 + PHP: _GeneratorOutputLanguage.ValueType # 6 + PYTHON: _GeneratorOutputLanguage.ValueType # 7 + RUBY: _GeneratorOutputLanguage.ValueType # 8 + +class GeneratorOutputLanguage(_GeneratorOutputLanguage, metaclass=_GeneratorOutputLanguageEnumTypeWrapper): + """A programming language in which snippets are generated. + Note that this is different from + google.cloud.tools.snippetgen.snippetindex.v1.Language, i.e. language + specified in snippet metadata, as metadata can be written for both generated + and handwritten snippets. In turn, we'll always know which generators we are + writing snippet configs for and which are the output languages of those + generators. + """ + +GENERATOR_OUTPUT_LANGUAGE_UNSPECIFIED: GeneratorOutputLanguage.ValueType # 0 +"""The language has not been specified. Consumers should not see this value.""" +C_PLUS_PLUS: GeneratorOutputLanguage.ValueType # 1 +C_SHARP: GeneratorOutputLanguage.ValueType # 2 +GO: GeneratorOutputLanguage.ValueType # 3 +JAVA: GeneratorOutputLanguage.ValueType # 4 +JAVASCRIPT: GeneratorOutputLanguage.ValueType # 5 +PHP: GeneratorOutputLanguage.ValueType # 6 +PYTHON: GeneratorOutputLanguage.ValueType # 7 +RUBY: GeneratorOutputLanguage.ValueType # 8 +global___GeneratorOutputLanguage = GeneratorOutputLanguage + +@typing_extensions.final +class SnippetConfig(google.protobuf.message.Message): + """The snippet configuration for a single snippet that will be generated across + all languages. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + METADATA_FIELD_NUMBER: builtins.int + RPC_FIELD_NUMBER: builtins.int + SIGNATURE_FIELD_NUMBER: builtins.int + SNIPPET_FIELD_NUMBER: builtins.int + @property + def metadata(self) -> global___SnippetConfigMetadata: + """Metadata for the snippet configuration. Some information contained here + will be included in the generated snippet own metadata. + """ + @property + def rpc(self) -> global___Rpc: + """The RPC this snippet is for.""" + @property + def signature(self) -> global___SnippetSignature: + """The generated snippet method signature.""" + @property + def snippet(self) -> global___Snippet: + """The actual snippet (code).""" + def __init__( + self, + *, + metadata: global___SnippetConfigMetadata | None = ..., + rpc: global___Rpc | None = ..., + signature: global___SnippetSignature | None = ..., + snippet: global___Snippet | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "rpc", b"rpc", "signature", b"signature", "snippet", b"snippet"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "rpc", b"rpc", "signature", b"signature", "snippet", b"snippet"]) -> None: ... + +global___SnippetConfig = SnippetConfig + +@typing_extensions.final +class SnippetConfigMetadata(google.protobuf.message.Message): + """Metadata for the snippet configuration. Some information contained here will + be included in the generated snippet's own metadata. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SKIPPED_FIELD_NUMBER: builtins.int + SKIPPED_LANGUAGES_FIELD_NUMBER: builtins.int + CONFIG_ID_FIELD_NUMBER: builtins.int + SNIPPET_NAME_FIELD_NUMBER: builtins.int + SNIPPET_DESCRIPTION_FIELD_NUMBER: builtins.int + skipped: builtins.bool + """Whether this snippet config should be skipped for/ generation. + This is useful when snippets are developed before features are released. + Defaults to false. + """ + @property + def skipped_languages(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[global___GeneratorOutputLanguage.ValueType]: + """List of languages to skip snippet generation for. + Config producers should specify here languages for which some of the + capabilities required by the snippet are not supported. For instance, if a + specific client option is required by the snippet, and that option is not + supported by .NET client libraries, then C_SHARP should be specified here. + """ + config_id: builtins.str + """The config id. This will be used to generate the region tag of the snippet. + Required. + The region tag format will be: + [{START|END} + ${apishortname}_${apiVersion}_config_${ServiceName}_${RpcName}_${config_id}_{sync|async}] + - config_id must be unique for a given Service/RPC pair. + - config_id must not contain the API, Service or RPC identifiers as that + will be automatically included in the region tag. + - config_id may only contain letters and numbers + - config_id should be PascalCased + - Preferable, config_id should not exceed 50 characters, although this is + not a hard requirement. + - config_id may be somewhat descriptive of the snippet or just a random + identifier. If it's descriptive, do not make it overly verbose, there are + the human readable snippet_name and snippet_description fields for properly + describing the snippet. For instance, prefer DefaultDatasetCreation to + DatasetCreationUsingDefaultValuesExceptForDatasetNameWhichIsRequired. + """ + snippet_name: builtins.str + """The human readable name of the snippet. + To be included in metadata and in the sample itself in the top-level + description. + """ + snippet_description: builtins.str + """The description of the snippet. + To be included in metadata and in the sample itself in the top-level + description. + """ + def __init__( + self, + *, + skipped: builtins.bool = ..., + skipped_languages: collections.abc.Iterable[global___GeneratorOutputLanguage.ValueType] | None = ..., + config_id: builtins.str = ..., + snippet_name: builtins.str = ..., + snippet_description: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["config_id", b"config_id", "skipped", b"skipped", "skipped_languages", b"skipped_languages", "snippet_description", b"snippet_description", "snippet_name", b"snippet_name"]) -> None: ... + +global___SnippetConfigMetadata = SnippetConfigMetadata + +@typing_extensions.final +class Rpc(google.protobuf.message.Message): + """An RPC for which a Snippet may be defined.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PROTO_PACKAGE_FIELD_NUMBER: builtins.int + API_VERSION_FIELD_NUMBER: builtins.int + SERVICE_NAME_FIELD_NUMBER: builtins.int + RPC_NAME_FIELD_NUMBER: builtins.int + proto_package: builtins.str + """This is identical to the protobuf package ending with a version number, + after removing said version number. For instance, where the api ID is + "google.cloud.translate.v3" the API name is "google.cloud.translate". + """ + @property + def api_version(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """The list of API versions for which one snippet is defined. + The same RPC may exist in different versions (v1, v1beta, v2) of the API + and may be covered by the same snippet config. + """ + service_name: builtins.str + """The protobuf service name relative to the api name + version where the RPC + is defined. Example: "TransalationService". + """ + rpc_name: builtins.str + """The RPC name relative to the service name.""" + def __init__( + self, + *, + proto_package: builtins.str = ..., + api_version: collections.abc.Iterable[builtins.str] | None = ..., + service_name: builtins.str = ..., + rpc_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["api_version", b"api_version", "proto_package", b"proto_package", "rpc_name", b"rpc_name", "service_name", b"service_name"]) -> None: ... + +global___Rpc = Rpc + +@typing_extensions.final +class SnippetSignature(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _SyncPreference: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _SyncPreferenceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SnippetSignature._SyncPreference.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LANGUAGE_PREFERRED: SnippetSignature._SyncPreference.ValueType # 0 + """Generate an async, sync or both snippets as per language + (style, guidelines, community) preference. + """ + PREFER_ASYNC: SnippetSignature._SyncPreference.ValueType # 1 + """Languages supporting async methods should generate an async snippet. + Languages that do not support async methods will generate a sync snippet + instead. + """ + PREFER_SYNC: SnippetSignature._SyncPreference.ValueType # 2 + """Languages supporting sync methods should generate a sync snippet. + Languages that do not support sync methods will generate an async snippet + instead. + """ + BOTH: SnippetSignature._SyncPreference.ValueType # 3 + """Languages that support both async and sync methods should generate both a + sync and an async snippets. Note that different samples should be in + differnt files. + Languages that support only one of async and sync methods should + generate the supported snippet. + """ + + class SyncPreference(_SyncPreference, metaclass=_SyncPreferenceEnumTypeWrapper): + """Synchronous preference indicator for the generated snippet. + Note that some languages only support one-off sync or async methods so this + is just a preference indicator. + """ + + LANGUAGE_PREFERRED: SnippetSignature.SyncPreference.ValueType # 0 + """Generate an async, sync or both snippets as per language + (style, guidelines, community) preference. + """ + PREFER_ASYNC: SnippetSignature.SyncPreference.ValueType # 1 + """Languages supporting async methods should generate an async snippet. + Languages that do not support async methods will generate a sync snippet + instead. + """ + PREFER_SYNC: SnippetSignature.SyncPreference.ValueType # 2 + """Languages supporting sync methods should generate a sync snippet. + Languages that do not support sync methods will generate an async snippet + instead. + """ + BOTH: SnippetSignature.SyncPreference.ValueType # 3 + """Languages that support both async and sync methods should generate both a + sync and an async snippets. Note that different samples should be in + differnt files. + Languages that support only one of async and sync methods should + generate the supported snippet. + """ + + SNIPPET_METHOD_NAME_FIELD_NUMBER: builtins.int + RETURN_TYPE_FIELD_NUMBER: builtins.int + SYNC_PREFERENCE_FIELD_NUMBER: builtins.int + PARAMETERS_FIELD_NUMBER: builtins.int + snippet_method_name: builtins.str + """The name for the snippet method specified in snake_case. Required.""" + @property + def return_type(self) -> global___Type: + """The return type for the snippet method. Unset for methods that do not + return a value or if the sample is to be generated for non statically-typed + languages only. + """ + sync_preference: global___SnippetSignature.SyncPreference.ValueType + """Synchronous preference indicator for the generated snippet.""" + @property + def parameters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement.Declaration]: + """The list of parameters that the snippet will receive. May be empty if the + snippet method does not receive parameters. If any, parameters should + appear in generated code in the same order as they appear in this field. + Note also that if parameters have assignments, some languages + will represent these as default parameter values which are + usually scalars. + """ + def __init__( + self, + *, + snippet_method_name: builtins.str = ..., + return_type: global___Type | None = ..., + sync_preference: global___SnippetSignature.SyncPreference.ValueType = ..., + parameters: collections.abc.Iterable[global___Statement.Declaration] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["return_type", b"return_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["parameters", b"parameters", "return_type", b"return_type", "snippet_method_name", b"snippet_method_name", "sync_preference", b"sync_preference"]) -> None: ... + +global___SnippetSignature = SnippetSignature + +@typing_extensions.final +class Snippet(google.protobuf.message.Message): + """The actual snippet (code), including client and request initialization, + client call and response handling. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class ClientInitialization(google.protobuf.message.Message): + """Options to initialize the client with.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class ServiceEndpoint(google.protobuf.message.Message): + """Different languages have different requirements for how a service + endpoint should be specified, so in sample configuration the endpoint + needs to be specified in parts that each language generator can then use + to compose the string literal to include in the sample according to their + own requirements. None of this manipulation should be included in the + sample itself, where only a string literal is expected as the value for + the custom endpoint. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _ServiceEndpointSchema: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ServiceEndpointSchemaEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Snippet.ClientInitialization.ServiceEndpoint._ServiceEndpointSchema.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LANGUAGE_DEFAULT: Snippet.ClientInitialization.ServiceEndpoint._ServiceEndpointSchema.ValueType # 0 + """Each language to use their own default. + Languages that do not require the schema to be specified, + may omit it when building the custom endpoint. + Languages that require the schema to be specified should use the + same default value as they do in library generation. + """ + HTTPS: Snippet.ClientInitialization.ServiceEndpoint._ServiceEndpointSchema.ValueType # 1 + """Use HTTPS for service endpoint schema.""" + HTTP: Snippet.ClientInitialization.ServiceEndpoint._ServiceEndpointSchema.ValueType # 2 + """Use HTTP for service endpoint schema.""" + + class ServiceEndpointSchema(_ServiceEndpointSchema, metaclass=_ServiceEndpointSchemaEnumTypeWrapper): + """Schemas for the service endpoint.""" + + LANGUAGE_DEFAULT: Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema.ValueType # 0 + """Each language to use their own default. + Languages that do not require the schema to be specified, + may omit it when building the custom endpoint. + Languages that require the schema to be specified should use the + same default value as they do in library generation. + """ + HTTPS: Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema.ValueType # 1 + """Use HTTPS for service endpoint schema.""" + HTTP: Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema.ValueType # 2 + """Use HTTP for service endpoint schema.""" + + SCHEMA_FIELD_NUMBER: builtins.int + HOST_FIELD_NUMBER: builtins.int + REGION_FIELD_NUMBER: builtins.int + PORT_FIELD_NUMBER: builtins.int + schema: global___Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema.ValueType + """The schema for the service endpoint.""" + host: builtins.str + """The unregionalized host for the service endpoint. + For instance "pubsub.googleapis.com". Required. + """ + region: builtins.str + """The region if this is a regional endpoint. + For instance "us-east1". Optional. + If present the regional host should be constructed as follows: + {region}-{host}. + """ + port: builtins.int + """The port for the service endpoint. Optional. + Languages that require a port to be specified should use the same + default value as they do in library generation. + """ + def __init__( + self, + *, + schema: global___Snippet.ClientInitialization.ServiceEndpoint.ServiceEndpointSchema.ValueType = ..., + host: builtins.str = ..., + region: builtins.str = ..., + port: builtins.int = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["host", b"host", "port", b"port", "region", b"region", "schema", b"schema"]) -> None: ... + + PRE_CLIENT_INITIALIZATION_FIELD_NUMBER: builtins.int + CUSTOM_SERVICE_ENDPOINT_FIELD_NUMBER: builtins.int + @property + def pre_client_initialization(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """Statements to be executed before the service client is initialized. For + instance, some Statement.Declaration statements of variables to be used + in service client initialization. May be empty. If any, statements will + be executed in the same order as they appear on + pre_request_initialization. + """ + @property + def custom_service_endpoint(self) -> global___Snippet.ClientInitialization.ServiceEndpoint: + """Custom endpoint to use in client initialization. Optional.""" + def __init__( + self, + *, + pre_client_initialization: collections.abc.Iterable[global___Statement] | None = ..., + custom_service_endpoint: global___Snippet.ClientInitialization.ServiceEndpoint | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["custom_service_endpoint", b"custom_service_endpoint"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["custom_service_endpoint", b"custom_service_endpoint", "pre_client_initialization", b"pre_client_initialization"]) -> None: ... + + @typing_extensions.final + class Standard(google.protobuf.message.Message): + """A standard RPC operation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REQUEST_INITIALIZATION_FIELD_NUMBER: builtins.int + CALL_FIELD_NUMBER: builtins.int + RESPONSE_HANDLING_FIELD_NUMBER: builtins.int + @property + def request_initialization(self) -> global___Snippet.SimpleRequestInitialization: + """Describes how to initialize the request object. Required.""" + @property + def call(self) -> global___Snippet.ClientCall: + """Client call configuration. Optional.""" + @property + def response_handling(self) -> global___Snippet.SimpleResponseHandling: + """Describes how to handle simple responses. Optional.""" + def __init__( + self, + *, + request_initialization: global___Snippet.SimpleRequestInitialization | None = ..., + call: global___Snippet.ClientCall | None = ..., + response_handling: global___Snippet.SimpleResponseHandling | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["call", b"call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["call", b"call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> None: ... + + @typing_extensions.final + class Paginated(google.protobuf.message.Message): + """A paginated RPC operation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REQUEST_INITIALIZATION_FIELD_NUMBER: builtins.int + PAGINATED_CALL_FIELD_NUMBER: builtins.int + PAGINATED_HANDLING_FIELD_NUMBER: builtins.int + @property + def request_initialization(self) -> global___Snippet.SimpleRequestInitialization: + """Describes how to initialize the request object. This is the request + object used for the initial paginated RPC call. Depending on how the + response should be handled subsequent requests will be initialized either + implicitly by the library or explicitly by the snippet. + """ + @property + def paginated_call(self) -> global___Snippet.ClientCall: + """Client call configuration. Optional. + This configures the client call, which in turn may result in several RPC + calls. How this configuration is applied to RPC calls may be language + specific. + """ + @property + def paginated_handling(self) -> global___Snippet.PaginatedResponseHandling: + """Describes how to handle paginated responses.""" + def __init__( + self, + *, + request_initialization: global___Snippet.SimpleRequestInitialization | None = ..., + paginated_call: global___Snippet.ClientCall | None = ..., + paginated_handling: global___Snippet.PaginatedResponseHandling | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["paginated_call", b"paginated_call", "paginated_handling", b"paginated_handling", "request_initialization", b"request_initialization"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["paginated_call", b"paginated_call", "paginated_handling", b"paginated_handling", "request_initialization", b"request_initialization"]) -> None: ... + + @typing_extensions.final + class Lro(google.protobuf.message.Message): + """A long-running RPC operation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REQUEST_INITIALIZATION_FIELD_NUMBER: builtins.int + CALL_FIELD_NUMBER: builtins.int + LRO_HANDLING_FIELD_NUMBER: builtins.int + @property + def request_initialization(self) -> global___Snippet.SimpleRequestInitialization: + """Describes how to initialize the request object. Required. + This describes the request to the LRO operation itself and not to polling + operations. + """ + @property + def call(self) -> global___Snippet.ClientCall: + """Client call configuration. Optional. + This configures the call to the LRO operation itself and not to polling + operations. + """ + @property + def lro_handling(self) -> global___Snippet.LroResponseHandling: + """Describes how to handle the LRO response. Required.""" + def __init__( + self, + *, + request_initialization: global___Snippet.SimpleRequestInitialization | None = ..., + call: global___Snippet.ClientCall | None = ..., + lro_handling: global___Snippet.LroResponseHandling | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["call", b"call", "lro_handling", b"lro_handling", "request_initialization", b"request_initialization"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["call", b"call", "lro_handling", b"lro_handling", "request_initialization", b"request_initialization"]) -> None: ... + + @typing_extensions.final + class ClientStreaming(google.protobuf.message.Message): + """A client streaming RPC operation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INITIALIZATION_CALL_FIELD_NUMBER: builtins.int + CLIENT_STREAM_NAME_FIELD_NUMBER: builtins.int + REQUEST_INITIALIZATION_FIELD_NUMBER: builtins.int + RESPONSE_HANDLING_FIELD_NUMBER: builtins.int + @property + def initialization_call(self) -> global___Snippet.ClientCall: + """Client call configuration. Optional. + This configures the call that initializes the stream. + """ + client_stream_name: builtins.str + """The name of the variable to capture the client stream in, i.e. the object + in which streaming requests are written. Required. + """ + @property + def request_initialization(self) -> global___Snippet.StreamingRequestInitialization: + """Describes how to initialize the streaming request objects. Required.""" + @property + def response_handling(self) -> global___Snippet.SimpleResponseHandling: + """Describes how to handle the response. Optional. + Note that the response will be available after all the streaming requests + have been written, each language should generate code accordingly. + """ + def __init__( + self, + *, + initialization_call: global___Snippet.ClientCall | None = ..., + client_stream_name: builtins.str = ..., + request_initialization: global___Snippet.StreamingRequestInitialization | None = ..., + response_handling: global___Snippet.SimpleResponseHandling | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["client_stream_name", b"client_stream_name", "initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> None: ... + + @typing_extensions.final + class ServerStreaming(google.protobuf.message.Message): + """A server streaming RPC operation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REQUEST_INITIALIZATION_FIELD_NUMBER: builtins.int + INITIALIZATION_CALL_FIELD_NUMBER: builtins.int + SERVER_STREAM_NAME_FIELD_NUMBER: builtins.int + RESPONSE_HANDLING_FIELD_NUMBER: builtins.int + @property + def request_initialization(self) -> global___Snippet.SimpleRequestInitialization: + """Describes how to initialize the request object. Required.""" + @property + def initialization_call(self) -> global___Snippet.ClientCall: + """Client call configuration. Optional.""" + server_stream_name: builtins.str + """The name of the variable to capture the server stream in, i.e. the object + from which streaming responses will be read. Optional if the stream is + not to be read. + """ + @property + def response_handling(self) -> global___Snippet.StreamingResponseHandling: + """Describes how to handle the streaming responses. Optional if the stream + is not to be read. + """ + def __init__( + self, + *, + request_initialization: global___Snippet.SimpleRequestInitialization | None = ..., + initialization_call: global___Snippet.ClientCall | None = ..., + server_stream_name: builtins.str = ..., + response_handling: global___Snippet.StreamingResponseHandling | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling", "server_stream_name", b"server_stream_name"]) -> None: ... + + @typing_extensions.final + class BidiStreaming(google.protobuf.message.Message): + """A bidirectional streaming RPC operation.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + INITIALIZATION_CALL_FIELD_NUMBER: builtins.int + CLIENT_STREAM_NAME_FIELD_NUMBER: builtins.int + REQUEST_INITIALIZATION_FIELD_NUMBER: builtins.int + SERVER_STREAM_NAME_FIELD_NUMBER: builtins.int + RESPONSE_HANDLING_FIELD_NUMBER: builtins.int + @property + def initialization_call(self) -> global___Snippet.ClientCall: + """Client call configuration. Optional. + This configures the call that initializes de stream. + """ + client_stream_name: builtins.str + """The name of the variable to capture the client stream in, i.e. the object + in which streaming requests are written. Required. + """ + @property + def request_initialization(self) -> global___Snippet.StreamingRequestInitialization: + """Describes how to initialize the streaming request objects. Required.""" + server_stream_name: builtins.str + """The name of the variable to capture the server stream in, i.e. the object + from which streaming responses will be read. Optional if the stream is + not to be read. + """ + @property + def response_handling(self) -> global___Snippet.StreamingResponseHandling: + """Describes how to handle the streaming responses. Optional if the stream + is not to be read. + """ + def __init__( + self, + *, + initialization_call: global___Snippet.ClientCall | None = ..., + client_stream_name: builtins.str = ..., + request_initialization: global___Snippet.StreamingRequestInitialization | None = ..., + server_stream_name: builtins.str = ..., + response_handling: global___Snippet.StreamingResponseHandling | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["client_stream_name", b"client_stream_name", "initialization_call", b"initialization_call", "request_initialization", b"request_initialization", "response_handling", b"response_handling", "server_stream_name", b"server_stream_name"]) -> None: ... + + @typing_extensions.final + class ClientCall(google.protobuf.message.Message): + """An actual client service call. + Note: Just pre_call statements for now, but this message is included so + that adding per call options later on is not a breaking change. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PRE_CALL_FIELD_NUMBER: builtins.int + @property + def pre_call(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """Statements to be executed before the initial call to the service client + method is made. Whether that results in an RPC call or not is operation + type and language dependent. May be empty. + If any, statements should appear in generated code in the same order as + they appear on this field. + """ + def __init__( + self, + *, + pre_call: collections.abc.Iterable[global___Statement] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["pre_call", b"pre_call"]) -> None: ... + + @typing_extensions.final + class SimpleRequestInitialization(google.protobuf.message.Message): + """Describes how to initialize a simple request object.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PRE_REQUEST_INITIALIZATION_FIELD_NUMBER: builtins.int + REQUEST_VALUE_FIELD_NUMBER: builtins.int + REQUEST_NAME_FIELD_NUMBER: builtins.int + @property + def pre_request_initialization(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """Statements to be executed before the request object is initialized. For + instance, some Statement.Declaration statements of variables to be used + in request initialization. May be empty. If any, statements will be + executed in the same order as they appear on pre_request_initialization. + """ + @property + def request_value(self) -> global___Expression: + """The request value. Required. + Should resolve to a type that is assignable to the request type of the + RPC. + """ + request_name: builtins.str + """The name for the variable that will hold the request object. + For example "request". Required. + """ + def __init__( + self, + *, + pre_request_initialization: collections.abc.Iterable[global___Statement] | None = ..., + request_value: global___Expression | None = ..., + request_name: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["request_value", b"request_value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["pre_request_initialization", b"pre_request_initialization", "request_name", b"request_name", "request_value", b"request_value"]) -> None: ... + + @typing_extensions.final + class StreamingRequestInitialization(google.protobuf.message.Message): + """Describes how to make requests to client streaming RPCs. + An iteration is defined which makes some per-iteration + Expression.NameValue availables that may be used to define + streaming_request. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FIRST_STREAMING_REQUEST_FIELD_NUMBER: builtins.int + ITERATION_FIELD_NUMBER: builtins.int + STREAMING_REQUEST_FIELD_NUMBER: builtins.int + @property + def first_streaming_request(self) -> global___Snippet.SimpleRequestInitialization: + """Describes how to initialize the first streaming request. Optional for + operations that do not require a specific first request. + """ + @property + def iteration(self) -> global___Statement.Iteration: + """The iteration to use for defining the streaming requests. Required.""" + @property + def streaming_request(self) -> global___Snippet.SimpleRequestInitialization: + """The streaming request that may be defined with iteration-specific + variables, and will result in a sequence of requests. Required. + Initialization of streaming_request should be placed, in generated code, + on the inner-most iteration defined by the iteration field as iterations + may be nested. + """ + def __init__( + self, + *, + first_streaming_request: global___Snippet.SimpleRequestInitialization | None = ..., + iteration: global___Statement.Iteration | None = ..., + streaming_request: global___Snippet.SimpleRequestInitialization | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["first_streaming_request", b"first_streaming_request", "iteration", b"iteration", "streaming_request", b"streaming_request"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["first_streaming_request", b"first_streaming_request", "iteration", b"iteration", "streaming_request", b"streaming_request"]) -> None: ... + + @typing_extensions.final + class SimpleResponseHandling(google.protobuf.message.Message): + """Describes how to handle a simple response object.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESPONSE_NAME_FIELD_NUMBER: builtins.int + response_name: builtins.str + """The name of the variable to capture the response in. May be unset if + the RPC does not return anything or the response is not to be captured. + """ + def __init__( + self, + *, + response_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["response_name", b"response_name"]) -> None: ... + + @typing_extensions.final + class PaginatedResponseHandling(google.protobuf.message.Message): + """Describes how to handle paginated responses.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class ByItem(google.protobuf.message.Message): + """Iterate item by item, lazily and automatically fetching pages as needed.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ITEM_NAME_FIELD_NUMBER: builtins.int + PER_ITEM_STATEMENTS_FIELD_NUMBER: builtins.int + item_name: builtins.str + """The name of the variable to capture the current item in. Required.""" + @property + def per_item_statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """Statements to execute for each item. Optional.""" + def __init__( + self, + *, + item_name: builtins.str = ..., + per_item_statements: collections.abc.Iterable[global___Statement] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["item_name", b"item_name", "per_item_statements", b"per_item_statements"]) -> None: ... + + @typing_extensions.final + class ByPage(google.protobuf.message.Message): + """Iterate page by page, lazily and automatically fetching pages as needed.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PAGE_NAME_FIELD_NUMBER: builtins.int + PER_PAGE_STATEMENTS_FIELD_NUMBER: builtins.int + BY_ITEM_FIELD_NUMBER: builtins.int + page_name: builtins.str + """The name of the variable to capture the current page in. Required.""" + @property + def per_page_statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """Statements to execute for each page. Optional.""" + @property + def by_item(self) -> global___Snippet.PaginatedResponseHandling.ByItem: + """By item iteration configuration within the current page. Optional.""" + def __init__( + self, + *, + page_name: builtins.str = ..., + per_page_statements: collections.abc.Iterable[global___Statement] | None = ..., + by_item: global___Snippet.PaginatedResponseHandling.ByItem | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["by_item", b"by_item"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["by_item", b"by_item", "page_name", b"page_name", "per_page_statements", b"per_page_statements"]) -> None: ... + + @typing_extensions.final + class NextPageToken(google.protobuf.message.Message): + """Iterate page by page, explicitly using the next page token. + This pagination mode will modify the original request by subsequently + setting the next page token obtained from the previous response. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NEXT_PAGE_TOKEN_NAME_FIELD_NUMBER: builtins.int + EXPLICIT_PAGE_SIZE_FIELD_NUMBER: builtins.int + BY_PAGE_FIELD_NUMBER: builtins.int + next_page_token_name: builtins.str + """The name of the variable to capture the next page token in. Required.""" + @property + def explicit_page_size(self) -> global___Expression: + """The explicit and guaranteed page size for fetched pages. Required. + TODO: Double check that all languages have this modality. + Otherwise, they may fallback to by page iteration. + """ + @property + def by_page(self) -> global___Snippet.PaginatedResponseHandling.ByPage: + """Configures how to iterate over the explicitly fetched page.""" + def __init__( + self, + *, + next_page_token_name: builtins.str = ..., + explicit_page_size: global___Expression | None = ..., + by_page: global___Snippet.PaginatedResponseHandling.ByPage | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["by_page", b"by_page", "explicit_page_size", b"explicit_page_size"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["by_page", b"by_page", "explicit_page_size", b"explicit_page_size", "next_page_token_name", b"next_page_token_name"]) -> None: ... + + RESPONSE_NAME_FIELD_NUMBER: builtins.int + BY_ITEM_FIELD_NUMBER: builtins.int + BY_PAGE_FIELD_NUMBER: builtins.int + NEXT_PAGE_TOKEN_FIELD_NUMBER: builtins.int + response_name: builtins.str + """The name of the variable to capture the initial client call response in. + Required. Note that this will capture the object representing the lazy + item sequence. + """ + @property + def by_item(self) -> global___Snippet.PaginatedResponseHandling.ByItem: + """Iterate item by item, lazily and automatically fetching pages as + needed. + """ + @property + def by_page(self) -> global___Snippet.PaginatedResponseHandling.ByPage: + """Iterate page by page, lazily and automatically fetching pages as + needed. + """ + @property + def next_page_token(self) -> global___Snippet.PaginatedResponseHandling.NextPageToken: + """Iterate page by page, explicitly using the next page token. + This pagination mode will modify the original request by subsequently + setting the next page token obtained from the previous response. + """ + def __init__( + self, + *, + response_name: builtins.str = ..., + by_item: global___Snippet.PaginatedResponseHandling.ByItem | None = ..., + by_page: global___Snippet.PaginatedResponseHandling.ByPage | None = ..., + next_page_token: global___Snippet.PaginatedResponseHandling.NextPageToken | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["by_item", b"by_item", "by_page", b"by_page", "next_page_token", b"next_page_token", "pagination_kind", b"pagination_kind"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["by_item", b"by_item", "by_page", b"by_page", "next_page_token", b"next_page_token", "pagination_kind", b"pagination_kind", "response_name", b"response_name"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["pagination_kind", b"pagination_kind"]) -> typing_extensions.Literal["by_item", "by_page", "next_page_token"] | None: ... + + @typing_extensions.final + class LroResponseHandling(google.protobuf.message.Message): + """Describes how to handle LRO responses.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _PollingType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _PollingTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Snippet.LroResponseHandling._PollingType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + UNTIL_COMPLETION: Snippet.LroResponseHandling._PollingType.ValueType # 0 + """Poll until completion. Default value.""" + ONCE: Snippet.LroResponseHandling._PollingType.ValueType # 1 + """Poll just once.""" + NONE: Snippet.LroResponseHandling._PollingType.ValueType # 2 + """Do not poll.""" + + class PollingType(_PollingType, metaclass=_PollingTypeEnumTypeWrapper): ... + UNTIL_COMPLETION: Snippet.LroResponseHandling.PollingType.ValueType # 0 + """Poll until completion. Default value.""" + ONCE: Snippet.LroResponseHandling.PollingType.ValueType # 1 + """Poll just once.""" + NONE: Snippet.LroResponseHandling.PollingType.ValueType # 2 + """Do not poll.""" + + RESPONSE_NAME_FIELD_NUMBER: builtins.int + POLLING_TYPE_FIELD_NUMBER: builtins.int + POLLING_RESPONSE_NAME_FIELD_NUMBER: builtins.int + POLLING_CALL_FIELD_NUMBER: builtins.int + response_name: builtins.str + """The name of the variable to capture the LRO response in. Required. + This will capture the response to the LRO operaion call and not to + polling operations. + """ + polling_type: global___Snippet.LroResponseHandling.PollingType.ValueType + """How to perform polling. Required.""" + polling_response_name: builtins.str + """The name of the variable to capture the polling response in. Optional if + the polling result should not be captured. Should be unset if + PollingType.NONE is specified. + """ + @property + def polling_call(self) -> global___Snippet.ClientCall: + """Configures the polling call. Optional. Should be unset if + PollingType.NONE is specified. + """ + def __init__( + self, + *, + response_name: builtins.str = ..., + polling_type: global___Snippet.LroResponseHandling.PollingType.ValueType = ..., + polling_response_name: builtins.str = ..., + polling_call: global___Snippet.ClientCall | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["polling_call", b"polling_call"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["polling_call", b"polling_call", "polling_response_name", b"polling_response_name", "polling_type", b"polling_type", "response_name", b"response_name"]) -> None: ... + + @typing_extensions.final + class StreamingResponseHandling(google.protobuf.message.Message): + """Describes how to handle streaming responses.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CURRENT_RESPONSE_NAME_FIELD_NUMBER: builtins.int + PER_STREAM_RESPONSE_STATEMENTS_FIELD_NUMBER: builtins.int + current_response_name: builtins.str + """The name of the variable to capture the current response in the stream. + Required. + """ + @property + def per_stream_response_statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """Statements to execute for each stream response. Optional.""" + def __init__( + self, + *, + current_response_name: builtins.str = ..., + per_stream_response_statements: collections.abc.Iterable[global___Statement] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["current_response_name", b"current_response_name", "per_stream_response_statements", b"per_stream_response_statements"]) -> None: ... + + SERVICE_CLIENT_INITIALIZATION_FIELD_NUMBER: builtins.int + STANDARD_FIELD_NUMBER: builtins.int + PAGINATED_FIELD_NUMBER: builtins.int + LRO_FIELD_NUMBER: builtins.int + CLIENT_STREAMING_FIELD_NUMBER: builtins.int + SERVER_STREAMING_FIELD_NUMBER: builtins.int + BIDI_STREAMING_FIELD_NUMBER: builtins.int + FINAL_STATEMENTS_FIELD_NUMBER: builtins.int + @property + def service_client_initialization(self) -> global___Snippet.ClientInitialization: + """Service client initialization. Optional. If unset language-specific + defaults will be applied. + """ + @property + def standard(self) -> global___Snippet.Standard: + """A standard RPC operation.""" + @property + def paginated(self) -> global___Snippet.Paginated: + """A paginated RPC operation.""" + @property + def lro(self) -> global___Snippet.Lro: + """A long-running RPC operation.""" + @property + def client_streaming(self) -> global___Snippet.ClientStreaming: + """A client streaming RPC operation.""" + @property + def server_streaming(self) -> global___Snippet.ServerStreaming: + """A server streaming RPC operation.""" + @property + def bidi_streaming(self) -> global___Snippet.BidiStreaming: + """A bidirectional streaming RPC operation.""" + @property + def final_statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """Statements to be executed before the snippet ends. For instance, some + Statement.StandardOutput statements and the Statement.Return statement. + May be empty. If any, statements should appear on generated code in the + same order as they appear on this field. + """ + def __init__( + self, + *, + service_client_initialization: global___Snippet.ClientInitialization | None = ..., + standard: global___Snippet.Standard | None = ..., + paginated: global___Snippet.Paginated | None = ..., + lro: global___Snippet.Lro | None = ..., + client_streaming: global___Snippet.ClientStreaming | None = ..., + server_streaming: global___Snippet.ServerStreaming | None = ..., + bidi_streaming: global___Snippet.BidiStreaming | None = ..., + final_statements: collections.abc.Iterable[global___Statement] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["bidi_streaming", b"bidi_streaming", "call", b"call", "client_streaming", b"client_streaming", "lro", b"lro", "paginated", b"paginated", "server_streaming", b"server_streaming", "service_client_initialization", b"service_client_initialization", "standard", b"standard"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bidi_streaming", b"bidi_streaming", "call", b"call", "client_streaming", b"client_streaming", "final_statements", b"final_statements", "lro", b"lro", "paginated", b"paginated", "server_streaming", b"server_streaming", "service_client_initialization", b"service_client_initialization", "standard", b"standard"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["call", b"call"]) -> typing_extensions.Literal["standard", "paginated", "lro", "client_streaming", "server_streaming", "bidi_streaming"] | None: ... + +global___Snippet = Snippet + +@typing_extensions.final +class Statement(google.protobuf.message.Message): + """A stament that will translate into equivalent language-specific statements.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class Declaration(google.protobuf.message.Message): + """A declaration which may be used for parameters or variables. + Note that the only form of assignment supported is on declaration. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + @property + def type(self) -> global___Type: + """The type of this declaration. Required unless the snippet is to be + generated in non-statically typed languages only. + """ + name: builtins.str + """The name of the variable or parameter. Required.""" + @property + def value(self) -> global___Expression: + """The value to assign to the variable or parameter. + Optional for parameters. + Required for variables as assignment outside of a declaration is not + supported. + Should resolve to a type that is assignable to this Declaration type. + """ + description: builtins.str + """An optional description that will be included alongside the declaration + likely as a code comment. + """ + def __init__( + self, + *, + type: global___Type | None = ..., + name: builtins.str = ..., + value: global___Expression | None = ..., + description: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["type", b"type", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "type", b"type", "value", b"value"]) -> None: ... + + @typing_extensions.final + class StandardOutput(google.protobuf.message.Message): + """A statement to write information to sdtout.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUE_FIELD_NUMBER: builtins.int + @property + def value(self) -> global___Expression: + """The value to write to sdtout. + Should evaluate to a string value or resolve to a type that all languages + are able to convert to string without specific code, for instance numeric + values, and possibly protobuf messages as they may be converted to their + JSON representation. + """ + def __init__( + self, + *, + value: global___Expression | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ... + + @typing_extensions.final + class Return(google.protobuf.message.Message): + """A return statement.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RESULT_FIELD_NUMBER: builtins.int + @property + def result(self) -> global___Expression: + """The value to return. Should resolve to a type that is assignable to + SnippetSignature.return_type. + """ + def __init__( + self, + *, + result: global___Expression | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["result", b"result"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["result", b"result"]) -> None: ... + + @typing_extensions.final + class Conditional(google.protobuf.message.Message): + """A conditional statement. One of two given sets of statements will be + executed depending on the result of evaluating a given condition. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CONDITION_FIELD_NUMBER: builtins.int + ON_TRUE_FIELD_NUMBER: builtins.int + ON_FALSE_FIELD_NUMBER: builtins.int + @property + def condition(self) -> global___Expression: + """The condition to evaluate. Should evaluate to a bolean value.""" + @property + def on_true(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """The set of statements to execute if condition evaluates to true. + The statements should be executed in the order that they appear. + """ + @property + def on_false(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """The set of statements to execute if condition evaluates to false. + The statements should be executed in the order that they appear. + """ + def __init__( + self, + *, + condition: global___Expression | None = ..., + on_true: collections.abc.Iterable[global___Statement] | None = ..., + on_false: collections.abc.Iterable[global___Statement] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["condition", b"condition"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["condition", b"condition", "on_false", b"on_false", "on_true", b"on_true"]) -> None: ... + + @typing_extensions.final + class Iteration(google.protobuf.message.Message): + """An iteration statement. A given Statement set will be executed + repeatedly according to the iteration definition. + Each iteration type will make a per-step Expression.NameValue set + available that may be used withing the given Statement set. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class NumericSequenceIteration(google.protobuf.message.Message): + """Represents a numeric sequence iteration. + A numeric sequence is defined over which to iterate making the current + element of the sequence available in a variable. + It's ultimately the responsability of the user to define a finite + sequence, although tooling may be provided to help. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + START_AT_FIELD_NUMBER: builtins.int + INCREMENT_FIELD_NUMBER: builtins.int + MULTIPLIER_FIELD_NUMBER: builtins.int + LESS_THAN_OR_EQUAL_FIELD_NUMBER: builtins.int + LESS_THAN_FIELD_NUMBER: builtins.int + GREATER_THAN_OR_EQUAL_FIELD_NUMBER: builtins.int + GREATER_THAN_FIELD_NUMBER: builtins.int + TOTAL_STEPS_FIELD_NUMBER: builtins.int + @property + def start_at(self) -> global___Statement.Declaration: + """Where to start the sequence at, ie. the first element of the iteration. + Required. + The Statement.Declaration.type should be a numeric type. + The Statement.Declaration.value is required. + The Statement.Declaration.name will be the name used to make the + current element of the iteration available. + """ + @property + def increment(self) -> global___Expression: + """An increment, which may be a positive or negative value. + Should resolve to a numeric type. + """ + @property + def multiplier(self) -> global___Expression: + """A multiplier, which may be less than or greater than 1. + Should resolve to a numeric type. + """ + @property + def less_than_or_equal(self) -> global___Expression: + """When the current value is less than or equal to this value. + Should resolve to a numeric type. + """ + @property + def less_than(self) -> global___Expression: + """When the current value is less than this value. + Should resolve to a numeric type. + """ + @property + def greater_than_or_equal(self) -> global___Expression: + """When the current value is greater than or equal to this value. + Should resolve to a numeric type. + """ + @property + def greater_than(self) -> global___Expression: + """When the current value is greater than this value. + Should resolve to a numeric type. + """ + @property + def total_steps(self) -> global___Expression: + """After a set number of steps. Must be non-negative. + Should resolve to an integer type. + """ + def __init__( + self, + *, + start_at: global___Statement.Declaration | None = ..., + increment: global___Expression | None = ..., + multiplier: global___Expression | None = ..., + less_than_or_equal: global___Expression | None = ..., + less_than: global___Expression | None = ..., + greater_than_or_equal: global___Expression | None = ..., + greater_than: global___Expression | None = ..., + total_steps: global___Expression | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end", b"end", "greater_than", b"greater_than", "greater_than_or_equal", b"greater_than_or_equal", "increment", b"increment", "less_than", b"less_than", "less_than_or_equal", b"less_than_or_equal", "multiplier", b"multiplier", "start_at", b"start_at", "step", b"step", "total_steps", b"total_steps"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end", b"end", "greater_than", b"greater_than", "greater_than_or_equal", b"greater_than_or_equal", "increment", b"increment", "less_than", b"less_than", "less_than_or_equal", b"less_than_or_equal", "multiplier", b"multiplier", "start_at", b"start_at", "step", b"step", "total_steps", b"total_steps"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["end", b"end"]) -> typing_extensions.Literal["less_than_or_equal", "less_than", "greater_than_or_equal", "greater_than", "total_steps"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing_extensions.Literal["step", b"step"]) -> typing_extensions.Literal["increment", "multiplier"] | None: ... + + @typing_extensions.final + class RepeatedIteration(google.protobuf.message.Message): + """Represents an iteration over repeated elements. + A repeated value is provided over which the iteration will occur making + the current element of the sequence available in a variable. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REPEATED_ELEMENTS_FIELD_NUMBER: builtins.int + CURRENT_NAME_FIELD_NUMBER: builtins.int + @property + def repeated_elements(self) -> global___Statement.Declaration: + """The repeated elements to iterate over. Required. + The Statement.Declaration.type should be Type.RepeatedType. + The Statement.Declaration.value is required. + TODO: Consider this to be a oneof Declaration or Expression.NameValue. + """ + current_name: builtins.str + """The name of the variable that will hold the value of the current + element on each iteration. For example "item". Required. The type of + this variable will be the same as that of the elements in + repeated_elements. + """ + def __init__( + self, + *, + repeated_elements: global___Statement.Declaration | None = ..., + current_name: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["repeated_elements", b"repeated_elements"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["current_name", b"current_name", "repeated_elements", b"repeated_elements"]) -> None: ... + + @typing_extensions.final + class MapIteration(google.protobuf.message.Message): + """Represents an iteration over a map. + A map value is provided over which the iteration will occur making + the current key and element of the map availables in variables. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MAP_FIELD_NUMBER: builtins.int + CURRENT_KEY_NAME_FIELD_NUMBER: builtins.int + CURRENT_VALUE_NAME_FIELD_NUMBER: builtins.int + @property + def map(self) -> global___Statement.Declaration: + """The map to iterate over. Required. + The Statement.Declaration.type should be Type.MapType. + The Statement.Declaration.value is required. + TODO: Consider this to be a oneof Declaration or Expression.NameValue. + """ + current_key_name: builtins.str + """The name of the variable that will hold the value of the current key + on each iteration. For example "key". Required. + The type of this variable will be the same as that of the keys in map. + """ + current_value_name: builtins.str + """The name of the variable that will hold the value associated to the + current key on each iteration. For example "value". Required. + The type of this variable will be the same as that of the values in + map. + """ + def __init__( + self, + *, + map: global___Statement.Declaration | None = ..., + current_key_name: builtins.str = ..., + current_value_name: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["map", b"map"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["current_key_name", b"current_key_name", "current_value_name", b"current_value_name", "map", b"map"]) -> None: ... + + @typing_extensions.final + class BytesIteration(google.protobuf.message.Message): + """Represents an iteration over a byte sequence. + A byte sequence is provided over which the iteration will occur making + the current chunk of bytes available in a variable. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + BYTE_SEQUENCE_FIELD_NUMBER: builtins.int + CHUNK_SIZE_FIELD_NUMBER: builtins.int + TOTAL_CHUNKS_FIELD_NUMBER: builtins.int + CHUNK_TYPE_FIELD_NUMBER: builtins.int + CURRENT_NAME_FIELD_NUMBER: builtins.int + @property + def byte_sequence(self) -> global___Statement.Declaration: + """The byte sequence to iterate over. Required. + The Statement.Declaration.type should be Type.BytesType. + The Statement.Declaration.value is required. + """ + @property + def chunk_size(self) -> global___Expression: + """The size of the chuncks to split the byte sequence in. The last chunk + will be at most chunk_size. Must be positive. + Should resolve to an integer type. + """ + @property + def total_chunks(self) -> global___Expression: + """The total amount of chunks to split the byte sequence into. Note that + this is only possible when the byte sequence has a known length so it + might not be possible with certain streams, for instance, with + network streams. Must be positive. + Should resolve to an integer type. + """ + @property + def chunk_type(self) -> global___Type.BytesType: + """The type in which the chunk will be made available.""" + current_name: builtins.str + """The name of the variable that will hold the value of the current chunk + on each iteration. For example "chunk". Required. + Will be of type chunk_type. + """ + def __init__( + self, + *, + byte_sequence: global___Statement.Declaration | None = ..., + chunk_size: global___Expression | None = ..., + total_chunks: global___Expression | None = ..., + chunk_type: global___Type.BytesType | None = ..., + current_name: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["byte_sequence", b"byte_sequence", "chunk", b"chunk", "chunk_size", b"chunk_size", "chunk_type", b"chunk_type", "total_chunks", b"total_chunks"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["byte_sequence", b"byte_sequence", "chunk", b"chunk", "chunk_size", b"chunk_size", "chunk_type", b"chunk_type", "current_name", b"current_name", "total_chunks", b"total_chunks"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["chunk", b"chunk"]) -> typing_extensions.Literal["chunk_size", "total_chunks"] | None: ... + + NUMERIC_SEQUENCE_ITERATION_FIELD_NUMBER: builtins.int + REPEATED_ITERATION_FIELD_NUMBER: builtins.int + MAP_ITERATION_FIELD_NUMBER: builtins.int + BYTES_ITERATION_FIELD_NUMBER: builtins.int + STATEMENTS_FIELD_NUMBER: builtins.int + @property + def numeric_sequence_iteration(self) -> global___Statement.Iteration.NumericSequenceIteration: + """A numeric sequence iteration.""" + @property + def repeated_iteration(self) -> global___Statement.Iteration.RepeatedIteration: + """An iteration over repeated elements, i.e. an iteration over a list.""" + @property + def map_iteration(self) -> global___Statement.Iteration.MapIteration: + """A map iteration.""" + @property + def bytes_iteration(self) -> global___Statement.Iteration.BytesIteration: + """A bytes sequence iteration.""" + @property + def statements(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Statement]: + """The set of statements to execute on each step of the iteration. + The statements should be executed in the order that they appear. + May be empty as iterations will also be used for streaming request + initialization or streaming response handling. + """ + def __init__( + self, + *, + numeric_sequence_iteration: global___Statement.Iteration.NumericSequenceIteration | None = ..., + repeated_iteration: global___Statement.Iteration.RepeatedIteration | None = ..., + map_iteration: global___Statement.Iteration.MapIteration | None = ..., + bytes_iteration: global___Statement.Iteration.BytesIteration | None = ..., + statements: collections.abc.Iterable[global___Statement] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["bytes_iteration", b"bytes_iteration", "iteration_type", b"iteration_type", "map_iteration", b"map_iteration", "numeric_sequence_iteration", b"numeric_sequence_iteration", "repeated_iteration", b"repeated_iteration"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bytes_iteration", b"bytes_iteration", "iteration_type", b"iteration_type", "map_iteration", b"map_iteration", "numeric_sequence_iteration", b"numeric_sequence_iteration", "repeated_iteration", b"repeated_iteration", "statements", b"statements"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["iteration_type", b"iteration_type"]) -> typing_extensions.Literal["numeric_sequence_iteration", "repeated_iteration", "map_iteration", "bytes_iteration"] | None: ... + + DECLARATION_FIELD_NUMBER: builtins.int + STANDARD_OUTPUT_FIELD_NUMBER: builtins.int + RETURN_FIELD_NUMBER: builtins.int + CONDITIONAL_FIELD_NUMBER: builtins.int + ITERATION_FIELD_NUMBER: builtins.int + @property + def declaration(self) -> global___Statement.Declaration: + """A declaration which may be used for parameters or variables. Note that + the only form of assignment supported is on declaration. + Note that a declaration has scope depending on whether it's included as + a top level statement in any of the snippet sections or is a nested + statement. + """ + @property + def standard_output(self) -> global___Statement.StandardOutput: + """A statement to write information to sdtout.""" + @property + def conditional(self) -> global___Statement.Conditional: + """A conditional statement. One of two given sets of statements will be + executed depending on the result of evaluating a given condition. + """ + @property + def iteration(self) -> global___Statement.Iteration: + """An iteration statement. A given Statement set will be executed + repeatedly according to the iteration definition. + Each iteration type will make a per-step Expression.NameValue set + available that may be used withing the given Statement set. + """ + def __init__( + self, + *, + declaration: global___Statement.Declaration | None = ..., + standard_output: global___Statement.StandardOutput | None = ..., + conditional: global___Statement.Conditional | None = ..., + iteration: global___Statement.Iteration | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["conditional", b"conditional", "declaration", b"declaration", "iteration", b"iteration", "return", b"return", "standard_output", b"standard_output", "statement_type", b"statement_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["conditional", b"conditional", "declaration", b"declaration", "iteration", b"iteration", "return", b"return", "standard_output", b"standard_output", "statement_type", b"statement_type"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["statement_type", b"statement_type"]) -> typing_extensions.Literal["declaration", "standard_output", "return", "conditional", "iteration"] | None: ... + +global___Statement = Statement + +@typing_extensions.final +class Type(google.protobuf.message.Message): + """Represents type of values. To be used, for instance, for + Statement.Declaration or for specifying the return type of the snippet. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _ScalarType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _ScalarTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type._ScalarType.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SCALAR_TYPE_UNDEFINED: Type._ScalarType.ValueType # 0 + """The scalar type has not been specified. Consumers should not see this + value. + """ + TYPE_DOUBLE: Type._ScalarType.ValueType # 1 + TYPE_FLOAT: Type._ScalarType.ValueType # 2 + TYPE_INT64: Type._ScalarType.ValueType # 3 + TYPE_UINT64: Type._ScalarType.ValueType # 4 + TYPE_INT32: Type._ScalarType.ValueType # 5 + TYPE_FIXED64: Type._ScalarType.ValueType # 6 + TYPE_FIXED32: Type._ScalarType.ValueType # 7 + TYPE_BOOL: Type._ScalarType.ValueType # 8 + TYPE_STRING: Type._ScalarType.ValueType # 9 + TYPE_UINT32: Type._ScalarType.ValueType # 13 + TYPE_SFIXED32: Type._ScalarType.ValueType # 15 + TYPE_SFIXED64: Type._ScalarType.ValueType # 16 + TYPE_SINT32: Type._ScalarType.ValueType # 17 + TYPE_SINT64: Type._ScalarType.ValueType # 18 + + class ScalarType(_ScalarType, metaclass=_ScalarTypeEnumTypeWrapper): + """Represents protobuf scalar types that should be translated to the usual + language-specific types. + https://developers.google.com/protocol-buffers/docs/proto3#scalar + This could have been FieldDescriptorProto.Type except that proto2 enums + cannot be used in proto3. Also, a few of the types in + FieldDescriptorsProto.Type are individually supported in SnippetGen Config. + Values allowed for this type are: + - Expression.default_value, + - Expression.name_value as long as the identifier resolves to a type that + is assignable to this one. + - Expression.number_value for the numeric FieldDescriptorProto.Type(s). + - Expression.boolean_value for FieldDescriptorProto.Type.TYPE_BOOL. + - Expression.string_value for FieldDescriptorProto.Type.TYPE_STRING. + - Any other value that resolves to a type that is assignable to this one. + """ + + SCALAR_TYPE_UNDEFINED: Type.ScalarType.ValueType # 0 + """The scalar type has not been specified. Consumers should not see this + value. + """ + TYPE_DOUBLE: Type.ScalarType.ValueType # 1 + TYPE_FLOAT: Type.ScalarType.ValueType # 2 + TYPE_INT64: Type.ScalarType.ValueType # 3 + TYPE_UINT64: Type.ScalarType.ValueType # 4 + TYPE_INT32: Type.ScalarType.ValueType # 5 + TYPE_FIXED64: Type.ScalarType.ValueType # 6 + TYPE_FIXED32: Type.ScalarType.ValueType # 7 + TYPE_BOOL: Type.ScalarType.ValueType # 8 + TYPE_STRING: Type.ScalarType.ValueType # 9 + TYPE_UINT32: Type.ScalarType.ValueType # 13 + TYPE_SFIXED32: Type.ScalarType.ValueType # 15 + TYPE_SFIXED64: Type.ScalarType.ValueType # 16 + TYPE_SINT32: Type.ScalarType.ValueType # 17 + TYPE_SINT64: Type.ScalarType.ValueType # 18 + + @typing_extensions.final + class EnumType(google.protobuf.message.Message): + """Represents protobuf enum types. These should be known by the + microgenerators on generation time, so they'll probably be enums defined + within the API being generated or one of its mixins. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENUM_FULL_NAME_FIELD_NUMBER: builtins.int + enum_full_name: builtins.str + """The protobuf full enum name, including the protobuf package.""" + def __init__( + self, + *, + enum_full_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["enum_full_name", b"enum_full_name"]) -> None: ... + + @typing_extensions.final + class BytesType(google.protobuf.message.Message): + """Represents a bytes type.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _LanguageEquivalent: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _LanguageEquivalentEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type.BytesType._LanguageEquivalent.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + PROTOBUF_BYTES: Type.BytesType._LanguageEquivalent.ValueType # 0 + """The same type used for representing protobuf bytes fields.""" + BASE64: Type.BytesType._LanguageEquivalent.ValueType # 1 + """Language-specific string type, whose value will be understood as a + base64 string representation of the bytes. + """ + BYTE_ARRAY: Type.BytesType._LanguageEquivalent.ValueType # 2 + """Language-specific byte array type.""" + STREAM: Type.BytesType._LanguageEquivalent.ValueType # 3 + """Language-specific stream type.""" + + class LanguageEquivalent(_LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper): + """Possible language-specific equivalents to a bytes type.""" + + PROTOBUF_BYTES: Type.BytesType.LanguageEquivalent.ValueType # 0 + """The same type used for representing protobuf bytes fields.""" + BASE64: Type.BytesType.LanguageEquivalent.ValueType # 1 + """Language-specific string type, whose value will be understood as a + base64 string representation of the bytes. + """ + BYTE_ARRAY: Type.BytesType.LanguageEquivalent.ValueType # 2 + """Language-specific byte array type.""" + STREAM: Type.BytesType.LanguageEquivalent.ValueType # 3 + """Language-specific stream type.""" + + LANGUAGE_EQUIVALENT_FIELD_NUMBER: builtins.int + language_equivalent: global___Type.BytesType.LanguageEquivalent.ValueType + """The language-specific type that this bytes type should be generated as.""" + def __init__( + self, + *, + language_equivalent: global___Type.BytesType.LanguageEquivalent.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["language_equivalent", b"language_equivalent"]) -> None: ... + + @typing_extensions.final + class MessageType(google.protobuf.message.Message): + """Represents protobuf message types. These should be known by the + microgenerators at generation time, so they'll usually be well known types, + messaged defined within the API being generated or one of its mixins. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + MESSAGE_FULL_NAME_FIELD_NUMBER: builtins.int + message_full_name: builtins.str + """The protobuf full message name, including the protobuf package.""" + def __init__( + self, + *, + message_full_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["message_full_name", b"message_full_name"]) -> None: ... + + @typing_extensions.final + class RepeatedType(google.protobuf.message.Message): + """Represents a repeated type.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _LanguageEquivalent: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _LanguageEquivalentEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type.RepeatedType._LanguageEquivalent.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + PROTOBUF_REPEATED: Type.RepeatedType._LanguageEquivalent.ValueType # 0 + """The same type used for representing protobuf repeated fields.""" + ARRAY: Type.RepeatedType._LanguageEquivalent.ValueType # 1 + """Language-specific array type.""" + LIST: Type.RepeatedType._LanguageEquivalent.ValueType # 2 + """Language-specific list type.""" + + class LanguageEquivalent(_LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper): + """Possible language-specific equivalents to a repeated type.""" + + PROTOBUF_REPEATED: Type.RepeatedType.LanguageEquivalent.ValueType # 0 + """The same type used for representing protobuf repeated fields.""" + ARRAY: Type.RepeatedType.LanguageEquivalent.ValueType # 1 + """Language-specific array type.""" + LIST: Type.RepeatedType.LanguageEquivalent.ValueType # 2 + """Language-specific list type.""" + + ELEMENT_TYPE_FIELD_NUMBER: builtins.int + LANGUAGE_EQUIVALENT_FIELD_NUMBER: builtins.int + @property + def element_type(self) -> global___Type: + """The type of the elements.""" + language_equivalent: global___Type.RepeatedType.LanguageEquivalent.ValueType + """The language-specific type that this repeated type should be generated + as. + """ + def __init__( + self, + *, + element_type: global___Type | None = ..., + language_equivalent: global___Type.RepeatedType.LanguageEquivalent.ValueType = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["element_type", b"element_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["element_type", b"element_type", "language_equivalent", b"language_equivalent"]) -> None: ... + + @typing_extensions.final + class MapType(google.protobuf.message.Message): + """Represents a map type.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _LanguageEquivalent: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _LanguageEquivalentEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Type.MapType._LanguageEquivalent.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + PROTOBUF_MAP: Type.MapType._LanguageEquivalent.ValueType # 0 + """The same type used for representing protobuf map fields.""" + DICTIONARY: Type.MapType._LanguageEquivalent.ValueType # 1 + """Language-specific dictionary or map type.""" + + class LanguageEquivalent(_LanguageEquivalent, metaclass=_LanguageEquivalentEnumTypeWrapper): + """Possible language-specific equivalents to a map type.""" + + PROTOBUF_MAP: Type.MapType.LanguageEquivalent.ValueType # 0 + """The same type used for representing protobuf map fields.""" + DICTIONARY: Type.MapType.LanguageEquivalent.ValueType # 1 + """Language-specific dictionary or map type.""" + + KEY_TYPE_FIELD_NUMBER: builtins.int + VALUE_TYPE_FIELD_NUMBER: builtins.int + LANGUAGE_EQUIVALENT_FIELD_NUMBER: builtins.int + @property + def key_type(self) -> global___Type: + """The type of the keys.""" + @property + def value_type(self) -> global___Type: + """The type of the values.""" + language_equivalent: global___Type.MapType.LanguageEquivalent.ValueType + """The language-specific type that this map type should be generated as.""" + def __init__( + self, + *, + key_type: global___Type | None = ..., + value_type: global___Type | None = ..., + language_equivalent: global___Type.MapType.LanguageEquivalent.ValueType = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["key_type", b"key_type", "value_type", b"value_type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key_type", b"key_type", "language_equivalent", b"language_equivalent", "value_type", b"value_type"]) -> None: ... + + SCALAR_TYPE_FIELD_NUMBER: builtins.int + ENUM_TYPE_FIELD_NUMBER: builtins.int + BYTES_TYPE_FIELD_NUMBER: builtins.int + MESSAGE_TYPE_FIELD_NUMBER: builtins.int + REPEATED_TYPE_FIELD_NUMBER: builtins.int + MAP_TYPE_FIELD_NUMBER: builtins.int + scalar_type: global___Type.ScalarType.ValueType + """Protobuf scalar types.""" + @property + def enum_type(self) -> global___Type.EnumType: + """Protobuf enum types.""" + @property + def bytes_type(self) -> global___Type.BytesType: + """The bytes type""" + @property + def message_type(self) -> global___Type.MessageType: + """Protobuf message types.""" + @property + def repeated_type(self) -> global___Type.RepeatedType: + """The repeated type.""" + @property + def map_type(self) -> global___Type.MapType: + """The map type.""" + def __init__( + self, + *, + scalar_type: global___Type.ScalarType.ValueType = ..., + enum_type: global___Type.EnumType | None = ..., + bytes_type: global___Type.BytesType | None = ..., + message_type: global___Type.MessageType | None = ..., + repeated_type: global___Type.RepeatedType | None = ..., + map_type: global___Type.MapType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["bytes_type", b"bytes_type", "enum_type", b"enum_type", "map_type", b"map_type", "message_type", b"message_type", "repeated_type", b"repeated_type", "scalar_type", b"scalar_type", "type_kind", b"type_kind"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bytes_type", b"bytes_type", "enum_type", b"enum_type", "map_type", b"map_type", "message_type", b"message_type", "repeated_type", b"repeated_type", "scalar_type", b"scalar_type", "type_kind", b"type_kind"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["type_kind", b"type_kind"]) -> typing_extensions.Literal["scalar_type", "enum_type", "bytes_type", "message_type", "repeated_type", "map_type"] | None: ... + +global___Type = Type + +@typing_extensions.final +class Expression(google.protobuf.message.Message): + """An expression to be used, for instance, for parameter, variable and request + initialization. + On type inference: + - When used for parameter or variable initialization, that is, when used in a + Statement.Declaration, we have Statement.Declaration.Type. + - When used for request initialization, we know the request type. + - When used to initialize other elements, like client options, etc. we will + know which types may be infer on a case by case basis as specified by each + of these elements documentation. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + class _NullValue: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _NullValueEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Expression._NullValue.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NULL_VALUE: Expression._NullValue.ValueType # 0 + """Null value.""" + + class NullValue(_NullValue, metaclass=_NullValueEnumTypeWrapper): + """The null value. Might not be accepted by all types.""" + + NULL_VALUE: Expression.NullValue.ValueType # 0 + """Null value.""" + + class _DefaultValue: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + + class _DefaultValueEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Expression._DefaultValue.ValueType], builtins.type): # noqa: F821 + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + DEFAULT_VALUE: Expression._DefaultValue.ValueType # 0 + """Default value.""" + + class DefaultValue(_DefaultValue, metaclass=_DefaultValueEnumTypeWrapper): + """The default value. + Note that when a value is used, the type is always known/inferred. + Supported types are currently protobuf scalar types and protobuf message + types so all types should have a default value. + For protobuf message types, the default value should be the empty message. + """ + + DEFAULT_VALUE: Expression.DefaultValue.ValueType # 0 + """Default value.""" + + @typing_extensions.final + class NameValue(google.protobuf.message.Message): + """A variable or parameter name.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + PATH_FIELD_NUMBER: builtins.int + name: builtins.str + """The name of the variable or parameter name. Required.""" + @property + def path(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: + """A path within name that refers to a nested value. Optional. + Note that this path must be valid across all languages, so, the following + rules apply. + - A path in a NameValue is only allowed if name refers to a value of + google.cloud.tools.snippetgen.configlanguage.v1.Type.MessageType, i.e. + a protobuf message. + - The path segments should be named as they appear on the proto + definition and not as they are on any specific language. + For instance, if the following message is defined: + ``` + message SampleMessage { + int one_field = 0; + SampleMessage nested_field = 1; + } + ``` + and a Declaration of + - Declaration.type => SampleMessage and + - Declaration.name => sample_value + then posible NameValues that refer to the variable declared are: + - NameValue.name => sample_value and NameValue.path => unset to reference + the value of the sample_value variable, i.e. using the variable + sample_value + - NameValue.name => sample_value and NameValue.path => one_field to + reference the value of the one_field value of the message instance + stored in the sample_value variable, i.e. using the variable + sample_value.one_field. + - NameValue.sample_value and NameValue.path => nested_field, one_field to + reference the one_field value of the nested_field value of the message + instance stored in the sample_value variable, i.e. using the variable + sample_value.nested_field.one_field. + """ + def __init__( + self, + *, + name: builtins.str = ..., + path: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "path", b"path"]) -> None: ... + + @typing_extensions.final + class BytesValue(google.protobuf.message.Message): + """A bytes value. This represents initialization of objects from which + arbitrary byte sequences may be ontained. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class FileStream(google.protobuf.message.Message): + """A language-specific file stream.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + FILE_PATH_FIELD_NUMBER: builtins.int + @property + def file_path(self) -> global___Expression: + """The path of the file to build the stream from. + This expression should evaluate to a string value. + """ + def __init__( + self, + *, + file_path: global___Expression | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["file_path", b"file_path"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["file_path", b"file_path"]) -> None: ... + + BASE64_STRING_FIELD_NUMBER: builtins.int + FILE_STREAM_FIELD_NUMBER: builtins.int + @property + def base64_string(self) -> global___Expression: + """A Base64 encoded string. + This expression should resolve to a string value. + """ + @property + def file_stream(self) -> global___Expression.BytesValue.FileStream: + """A file stream.""" + def __init__( + self, + *, + base64_string: global___Expression | None = ..., + file_stream: global___Expression.BytesValue.FileStream | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["base64_string", b"base64_string", "file_stream", b"file_stream", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["base64_string", b"base64_string", "file_stream", b"file_stream", "value", b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["base64_string", "file_stream"] | None: ... + + @typing_extensions.final + class ComplexValue(google.protobuf.message.Message): + """A complex value. This represents initialization of complex objects, most + likely of protobuf messages. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing_extensions.final + class PropertiesEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + @property + def value(self) -> global___Expression: ... + def __init__( + self, + *, + key: builtins.str = ..., + value: global___Expression | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ... + + PROPERTIES_FIELD_NUMBER: builtins.int + @property + def properties(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Expression]: + """This is a simple map from message property name to Expression. + - All keys in the map should correspond to top level properties of the + protobuf message. + - Nested properties may be initialized thanks to the recursive nature of + ComplexValue. + - Each Expression in the map should resolve to the type of the property + whose name is the associated key. + """ + def __init__( + self, + *, + properties: collections.abc.Mapping[builtins.str, global___Expression] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["properties", b"properties"]) -> None: ... + + @typing_extensions.final + class RepeatedValue(google.protobuf.message.Message): + """A list value. This represens initialization of collections, list, arrays + and similar values. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + VALUES_FIELD_NUMBER: builtins.int + @property + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]: + """The values that should be used to initialize a language-specific + collection, list, array or similar. + - The values should be used in the same order as they appear in values, + regardless of whether the target collection type represents an ordered + collection or not. + - Each Expression should resolve to a type that is assignable to the type + of the elements in the target collection. + """ + def __init__( + self, + *, + values: collections.abc.Iterable[global___Expression] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ... + + @typing_extensions.final + class MapValue(google.protobuf.message.Message): + """A map value. This represents initialization of maps, dictionaries and + similar values. + Note that we cannot use a protobuf map for the definition of MapValue + because protobuf map do not accept message types as the key type. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEYS_FIELD_NUMBER: builtins.int + VALUES_FIELD_NUMBER: builtins.int + @property + def keys(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]: + """The keys to use for initializing a language-specific map, dictionary or + similar. + - Each key Expression should resolve to a type that is assignable to + the key type of the target map. + """ + @property + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]: + """The values to use for initializing a language-specific map, dictionary or + similar. + - Each value Expression should resolve to a type that is assignable to + the value type of the target map. + - Each value should be present in the same order as the corresponding key + is in keys. + """ + def __init__( + self, + *, + keys: collections.abc.Iterable[global___Expression] | None = ..., + values: collections.abc.Iterable[global___Expression] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["keys", b"keys", "values", b"values"]) -> None: ... + + @typing_extensions.final + class ConditionalOperator(google.protobuf.message.Message): + """A conditional value. This expression has one of two given values + depending on the result of evaluating a given condition. + """ + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CONDITION_FIELD_NUMBER: builtins.int + ON_TRUE_FIELD_NUMBER: builtins.int + ON_FALSE_FIELD_NUMBER: builtins.int + @property + def condition(self) -> global___Expression: + """The condition to evaluate. Should resolve to a Boolean value.""" + @property + def on_true(self) -> global___Expression: + """The value of this expression if condition evaluates to true. on_true + should resolve to a type that is assignable to the target type of this + expression. + """ + @property + def on_false(self) -> global___Expression: + """The value of this expression if condition evaluates to false. on_false + should resolve to a type that is assignable to the target type of this + expression. + """ + def __init__( + self, + *, + condition: global___Expression | None = ..., + on_true: global___Expression | None = ..., + on_false: global___Expression | None = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["condition", b"condition", "on_false", b"on_false", "on_true", b"on_true"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["condition", b"condition", "on_false", b"on_false", "on_true", b"on_true"]) -> None: ... + + NULL_VALUE_FIELD_NUMBER: builtins.int + DEFAULT_VALUE_FIELD_NUMBER: builtins.int + NAME_VALUE_FIELD_NUMBER: builtins.int + NUMBER_VALUE_FIELD_NUMBER: builtins.int + BOOLEAN_VALUE_FIELD_NUMBER: builtins.int + STRING_VALUE_FIELD_NUMBER: builtins.int + ENUM_VALUE_FIELD_NUMBER: builtins.int + BYTES_VALUE_FIELD_NUMBER: builtins.int + COMPLEX_VALUE_FIELD_NUMBER: builtins.int + LIST_VALUE_FIELD_NUMBER: builtins.int + MAP_VALUE_FIELD_NUMBER: builtins.int + CONDITIONAL_VALUE_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + null_value: global___Expression.NullValue.ValueType + """The null value. Might not be accepted by all types.""" + default_value: global___Expression.DefaultValue.ValueType + """The default value. + Note that when a value is used, type is always known/inferred. Supported + types are currently protobuf scalar types and protobuf message types so + all types should have a default value. + """ + @property + def name_value(self) -> global___Expression.NameValue: + """A variable or parameter name, and possibly a path within.""" + number_value: builtins.float + """A number literal.""" + boolean_value: builtins.bool + """A Boolean literal.""" + string_value: builtins.str + """A string literal.""" + enum_value: builtins.str + """An enum value. The text is to be the enum value name as defined in + protobuf. For instance, for a protobuf enum + `enum SampleEnum { VALUE_0 = 0; VALUE_1 = 1; }` + the accepted values here would be VALUE_0 and VALUE_1. + """ + @property + def bytes_value(self) -> global___Expression.BytesValue: + """A bytes value. This represents initialization of objects from which + arbitrary byte sequences may be obtained. + """ + @property + def complex_value(self) -> global___Expression.ComplexValue: + """A complex value. This represents initialization of complex objects, most + likely of protobuf messages. + """ + @property + def list_value(self) -> global___Expression.RepeatedValue: + """A list value. This represents initialization of collections, list, arrays + and similar values. + """ + @property + def map_value(self) -> global___Expression.MapValue: + """A map value. This represents initialization of maps, dictionaries and + similar values. + """ + @property + def conditional_value(self) -> global___Expression.ConditionalOperator: + """A conditional value. This expression has one of two given values + depending on the result of evaluating a given condition. + """ + description: builtins.str + """An optional description that will be included in the snippet alongside the + value, likely as a code comment. + """ + def __init__( + self, + *, + null_value: global___Expression.NullValue.ValueType = ..., + default_value: global___Expression.DefaultValue.ValueType = ..., + name_value: global___Expression.NameValue | None = ..., + number_value: builtins.float = ..., + boolean_value: builtins.bool = ..., + string_value: builtins.str = ..., + enum_value: builtins.str = ..., + bytes_value: global___Expression.BytesValue | None = ..., + complex_value: global___Expression.ComplexValue | None = ..., + list_value: global___Expression.RepeatedValue | None = ..., + map_value: global___Expression.MapValue | None = ..., + conditional_value: global___Expression.ConditionalOperator | None = ..., + description: builtins.str = ..., + ) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["boolean_value", b"boolean_value", "bytes_value", b"bytes_value", "complex_value", b"complex_value", "conditional_value", b"conditional_value", "default_value", b"default_value", "enum_value", b"enum_value", "list_value", b"list_value", "map_value", b"map_value", "name_value", b"name_value", "null_value", b"null_value", "number_value", b"number_value", "string_value", b"string_value", "value", b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["boolean_value", b"boolean_value", "bytes_value", b"bytes_value", "complex_value", b"complex_value", "conditional_value", b"conditional_value", "default_value", b"default_value", "description", b"description", "enum_value", b"enum_value", "list_value", b"list_value", "map_value", b"map_value", "name_value", b"name_value", "null_value", b"null_value", "number_value", b"number_value", "string_value", b"string_value", "value", b"value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["null_value", "default_value", "name_value", "number_value", "boolean_value", "string_value", "enum_value", "bytes_value", "complex_value", "list_value", "map_value", "conditional_value"] | None: ... + +global___Expression = Expression diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 068aa6233547..6a02ecb9af04 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -9,4 +9,5 @@ PyYAML==6.0 setuptools==65.6.3 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 -pytest-asyncio==0.20.2 \ No newline at end of file +pytest-asyncio==0.20.2 +libcst==0.4.9 \ No newline at end of file diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 0ac668bec3d0..2407627b7d1a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -34,6 +34,7 @@ "pypandoc >= 1.4", "PyYAML >= 5.1.1", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", + "libcst >= 0.4.9", ] package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/configurable_snippetgen/resources/README.md b/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/README.md similarity index 100% rename from packages/gapic-generator/tests/configurable_snippetgen/resources/README.md rename to packages/gapic-generator/tests/unit/configurable_snippetgen/resources/README.md diff --git a/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/request.desc b/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/request.desc similarity index 100% rename from packages/gapic-generator/tests/configurable_snippetgen/resources/speech/request.desc rename to packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/request.desc diff --git a/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_createCustomClass.json b/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_createCustomClass.json similarity index 100% rename from packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_createCustomClass.json rename to packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_createCustomClass.json diff --git a/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py similarity index 92% rename from packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py rename to packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py index 82c3010f3a0c..52d4563c8f34 100644 --- a/packages/gapic-generator/tests/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py @@ -20,11 +20,11 @@ # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-speech -# [START speech_v1_generated_Adaptation_CreateCustomClass_Basic_async] +# [START speech_v1_config_Adaptation_CreateCustomClass_Basic_async] from google.cloud import speech_v1 -async def sample_create_custom_class_basic( +async def sample_create_custom_class_Basic( parent: str = "projects/[PROJECT]/locations/us", custom_class_id: str = "passengerships", ) -> speech_v1.CustomClass: @@ -69,4 +69,4 @@ async def sample_create_custom_class_basic( return created_custom_class -# [END speech_v1_generated_Adaptation_CreateCustomClass_Basic_async] +# [END speech_v1_config_Adaptation_CreateCustomClass_Basic_async] diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py new file mode 100644 index 000000000000..4d015eec81d7 --- /dev/null +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path + +from google.protobuf import json_format +from google.protobuf.compiler import plugin_pb2 +import pytest + +from gapic import utils +from gapic.configurable_snippetgen import configured_snippet +from gapic.configurable_snippetgen import snippet_config_language_pb2 +from gapic.schema import api + + +CURRENT_DIRECTORY = Path(__file__).parent.absolute() +SPEECH_V1_REQUEST_PATH = (CURRENT_DIRECTORY / + "resources" / "speech" / "request.desc") +CONFIG_JSON_PATH = ( + CURRENT_DIRECTORY / + "resources" / + "speech" / + "speech_createCustomClass.json") + + +def _load_api_schema(request_path: Path) -> api.API: + with open(request_path, "rb") as f: + request_bytes = f.read() + + req = plugin_pb2.CodeGeneratorRequest.FromString(request_bytes) + + # From gapic/cli/generator.py. + opts = utils.Options.build(req.parameter) + api_schema = api.API.build( + req.proto_file, + opts=opts, + package="google.cloud.speech.v1", + ) + + return api_schema + + +def _load_snippet_config( + config_path: Path, +) -> snippet_config_language_pb2.SnippetConfig: + with open(config_path, "r") as f: + config_json = f.read() + + snippet_config = json_format.Parse( + config_json, snippet_config_language_pb2.SnippetConfig() + ) + + return snippet_config + + +def _make_configured_snippet( + request_path: Path, config_path: Path, api_version: str, is_sync: bool +) -> configured_snippet.ConfiguredSnippet: + api_schema = _load_api_schema(request_path) + snippet_config = _load_snippet_config(config_path) + + return configured_snippet.ConfiguredSnippet( + api_schema, snippet_config, api_version, is_sync + ) + + +def test_region_tag(): + snippet = _make_configured_snippet( + SPEECH_V1_REQUEST_PATH, + CONFIG_JSON_PATH, + api_version="v1", + is_sync=True) + + assert ( + snippet.region_tag + == "speech_v1_config_Adaptation_CreateCustomClass_Basic_sync" + ) + + +def test_sample_function_name(): + snippet = _make_configured_snippet( + SPEECH_V1_REQUEST_PATH, + CONFIG_JSON_PATH, + api_version="v1", + is_sync=True) + + assert snippet.sample_function_name == "sample_create_custom_class_Basic" + + +def test_code(): + snippet = _make_configured_snippet( + SPEECH_V1_REQUEST_PATH, + CONFIG_JSON_PATH, + api_version="v1", + is_sync=True) + + # https://github.com/googleapis/gapic-generator-python/issues/1522 + # Placeholder code. We will gradually add to the ConfiguredSnippet class + # until the generated code is the same as that of the golden file. + expected_code = "\n" + + assert snippet.code == expected_code diff --git a/packages/gapic-generator/tests/configurable_snippetgen/test_resources.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py similarity index 100% rename from packages/gapic-generator/tests/configurable_snippetgen/test_resources.py rename to packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py From c7747d2881680faf0daf6fd25221e8437dfcf520 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Thu, 8 Dec 2022 07:05:01 -0800 Subject: [PATCH 0939/1339] chore: add ConfiguredSnippet.filename (#1523) * chore: add ConfiguredSnippet.filename * add inflection to dependency * set upperbound on dependencies in setup.py --- .../configured_snippet.py | 17 ++++++++++++- packages/gapic-generator/requirements.txt | 3 ++- packages/gapic-generator/setup.py | 3 ++- .../test_configured_snippet.py | 25 ++++++++----------- 4 files changed, 30 insertions(+), 18 deletions(-) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index c9ff39adb83a..b2960fdcf9e7 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -14,6 +14,7 @@ import dataclasses +import inflection import libcst from gapic.configurable_snippetgen import snippet_config_language_pb2 @@ -58,8 +59,22 @@ def sample_function_name(self) -> str: """The sample function's name. For example: - "sample_create_custom_class_basic" + "sample_create_custom_class_Basic" """ snippet_method_name = self.config.signature.snippet_method_name config_id = self.config.metadata.config_id return f"sample_{snippet_method_name}_{config_id}" + + @property + def filename(self) -> str: + """The snippet's file name. + + For example: + "speech_v1_generated_Adaptation_create_custom_class_Basic_async.py" + """ + module_name = self.config.rpc.proto_package.split(".")[-1] + service_name = self.config.rpc.service_name + snake_case_rpc_name = inflection.underscore(self.config.rpc.rpc_name) + config_id = self.config.metadata.config_id + sync_or_async = "sync" if self.is_sync else "async" + return f"{module_name}_{self.api_version}_generated_{service_name}_{snake_case_rpc_name}_{config_id}_{sync_or_async}.py" diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 6a02ecb9af04..aa540b7dcba7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -10,4 +10,5 @@ setuptools==65.6.3 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 pytest-asyncio==0.20.2 -libcst==0.4.9 \ No newline at end of file +libcst==0.4.9 +inflection==0.5.1 \ No newline at end of file diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2407627b7d1a..9772e569e1d2 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -34,7 +34,8 @@ "pypandoc >= 1.4", "PyYAML >= 5.1.1", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", - "libcst >= 0.4.9", + "libcst >= 0.4.9, < 1.0.0dev", + "inflection >= 0.5.1, < 1.0.0dev", ] package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index 4d015eec81d7..e7b44fa7fe76 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -76,36 +76,31 @@ def _make_configured_snippet( ) -def test_region_tag(): - snippet = _make_configured_snippet( +@pytest.fixture +def snippet(): + return _make_configured_snippet( SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=True) + +def test_region_tag(snippet): assert ( snippet.region_tag == "speech_v1_config_Adaptation_CreateCustomClass_Basic_sync" ) -def test_sample_function_name(): - snippet = _make_configured_snippet( - SPEECH_V1_REQUEST_PATH, - CONFIG_JSON_PATH, - api_version="v1", - is_sync=True) - +def test_sample_function_name(snippet): assert snippet.sample_function_name == "sample_create_custom_class_Basic" -def test_code(): - snippet = _make_configured_snippet( - SPEECH_V1_REQUEST_PATH, - CONFIG_JSON_PATH, - api_version="v1", - is_sync=True) +def test_filename(snippet): + assert snippet.filename == "speech_v1_generated_Adaptation_create_custom_class_Basic_sync.py" + +def test_code(snippet): # https://github.com/googleapis/gapic-generator-python/issues/1522 # Placeholder code. We will gradually add to the ConfiguredSnippet class # until the generated code is the same as that of the golden file. From 4ebe0964f0a2c95947927b95f36bd4fc68560dc2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Dec 2022 14:53:23 -0500 Subject: [PATCH 0940/1339] build(deps): bump certifi from 2022.9.24 to 2022.12.7 in /synthtool/gcp/templates/python_library/.kokoro (#1528) Source-Link: https://github.com/googleapis/synthtool/commit/b4fe62efb5114b6738ad4b13d6f654f2bf4b7cc0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 5 +- .../.kokoro/docker/docs/Dockerfile | 12 +- packages/gapic-generator/.kokoro/release.sh | 5 +- .../gapic-generator/.kokoro/requirements.in | 4 +- .../gapic-generator/.kokoro/requirements.txt | 304 ++++++++++-------- 5 files changed, 178 insertions(+), 152 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 368c491fdbfe..fccaa8e84449 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -4,14 +4,13 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 diff --git a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile index 238b87b9d1c9..f8137d0ae497 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile +++ b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/packages/gapic-generator/.kokoro/release.sh b/packages/gapic-generator/.kokoro/release.sh index c78b93700e13..336f0eca1e1f 100755 --- a/packages/gapic-generator/.kokoro/release.sh +++ b/packages/gapic-generator/.kokoro/release.sh @@ -16,12 +16,9 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install gcp-releasetool +python3 -m pip install --require-hashes -r github/gapic-generator-python/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 diff --git a/packages/gapic-generator/.kokoro/requirements.in b/packages/gapic-generator/.kokoro/requirements.in index 7718391a34d7..cbd7e77f44db 100644 --- a/packages/gapic-generator/.kokoro/requirements.in +++ b/packages/gapic-generator/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 7f3eedec957c..05dc4672edaa 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -152,23 +155,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -178,76 +181,102 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -259,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -307,9 +336,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -325,34 +354,33 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool @@ -381,9 +409,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -396,9 +424,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -409,17 +437,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -441,9 +469,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -451,25 +479,25 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in From 3b38b452060a6758401b393be4e04ff4ebd425fb Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Mon, 12 Dec 2022 10:46:06 -0800 Subject: [PATCH 0941/1339] chore: configured snippet generates empty sample function (#1525) * chore: configured snippet generates empty sample function --- .../configured_snippet.py | 39 +++++++++++++---- .../configurable_snippetgen/libcst_utils.py | 36 ++++++++++++++++ .../test_configured_snippet.py | 31 ++++++------- .../test_libcst_utils.py | 43 +++++++++++++++++++ .../configurable_snippetgen/test_resources.py | 4 +- 5 files changed, 128 insertions(+), 25 deletions(-) create mode 100644 packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py create mode 100644 packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index b2960fdcf9e7..46bc79f82896 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -17,23 +17,23 @@ import inflection import libcst +from gapic.configurable_snippetgen import libcst_utils from gapic.configurable_snippetgen import snippet_config_language_pb2 from gapic.schema import api -def _make_empty_module() -> libcst.Module: - return libcst.Module(body=[]) - - -@dataclasses.dataclass(frozen=True) +@dataclasses.dataclass class ConfiguredSnippet: api_schema: api.API config: snippet_config_language_pb2.SnippetConfig api_version: str is_sync: bool - _module: libcst.Module = dataclasses.field( - default_factory=_make_empty_module, init=False - ) + + def __post_init__(self): + self._module: libcst.Module = libcst_utils.empty_module() + self._sample_function_def: libcst.FunctionDef = libcst_utils.base_function_def( + function_name=self.sample_function_name, is_sync=self.is_sync + ) @property def code(self) -> str: @@ -78,3 +78,26 @@ def filename(self) -> str: config_id = self.config.metadata.config_id sync_or_async = "sync" if self.is_sync else "async" return f"{module_name}_{self.api_version}_generated_{service_name}_{snake_case_rpc_name}_{config_id}_{sync_or_async}.py" + + def _build_sample_function(self) -> None: + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1536, add return type. + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1537, add sample function parameters. + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1538, add docstring. + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, add sample function body. + pass + + def _add_sample_function(self) -> None: + self._module = self._module.with_changes( + body=[self._sample_function_def]) + + def generate(self) -> None: + """Generates the snippet. + + This is the main entrypoint of a ConfiguredSnippet instance, calling + other methods to update self._module. + """ + self._build_sample_function() + self._add_sample_function() + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1535, add imports. + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1534, add region tag. + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1533, add header. diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py b/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py new file mode 100644 index 000000000000..f60a579f26b3 --- /dev/null +++ b/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py @@ -0,0 +1,36 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import libcst + +from gapic.configurable_snippetgen import snippet_config_language_pb2 + + +def empty_module() -> libcst.Module: + return libcst.Module(body=[]) + + +def base_function_def(function_name: str, is_sync: bool) -> libcst.FunctionDef: + """Returns a FunctionDef node with a placeholder docstring.""" + params = libcst.Parameters(params=[]) + body = libcst.IndentedBlock(body=[libcst.parse_statement('""')]) + asynchronous = None if is_sync else libcst.Asynchronous() + function_def = libcst.FunctionDef( + name=libcst.Name(value=function_name), + params=params, + body=body, + asynchronous=asynchronous, + ) + + return function_def diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index e7b44fa7fe76..1804f11e94be 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -26,13 +26,11 @@ CURRENT_DIRECTORY = Path(__file__).parent.absolute() -SPEECH_V1_REQUEST_PATH = (CURRENT_DIRECTORY / - "resources" / "speech" / "request.desc") +SPEECH_V1_REQUEST_PATH = CURRENT_DIRECTORY / \ + "resources" / "speech" / "request.desc" CONFIG_JSON_PATH = ( - CURRENT_DIRECTORY / - "resources" / - "speech" / - "speech_createCustomClass.json") + CURRENT_DIRECTORY / "resources" / "speech" / "speech_createCustomClass.json" +) def _load_api_schema(request_path: Path) -> api.API: @@ -79,16 +77,13 @@ def _make_configured_snippet( @pytest.fixture def snippet(): return _make_configured_snippet( - SPEECH_V1_REQUEST_PATH, - CONFIG_JSON_PATH, - api_version="v1", - is_sync=True) + SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=True + ) def test_region_tag(snippet): assert ( - snippet.region_tag - == "speech_v1_config_Adaptation_CreateCustomClass_Basic_sync" + snippet.region_tag == "speech_v1_config_Adaptation_CreateCustomClass_Basic_sync" ) @@ -97,13 +92,19 @@ def test_sample_function_name(snippet): def test_filename(snippet): - assert snippet.filename == "speech_v1_generated_Adaptation_create_custom_class_Basic_sync.py" + assert ( + snippet.filename + == "speech_v1_generated_Adaptation_create_custom_class_Basic_sync.py" + ) def test_code(snippet): + snippet.generate() + # https://github.com/googleapis/gapic-generator-python/issues/1522 # Placeholder code. We will gradually add to the ConfiguredSnippet class # until the generated code is the same as that of the golden file. - expected_code = "\n" - + expected_code = """def sample_create_custom_class_Basic(): + \"\" +""" assert snippet.code == expected_code diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py new file mode 100644 index 000000000000..03844f329df0 --- /dev/null +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import libcst +import pytest + +from gapic.configurable_snippetgen import libcst_utils + + +def _assert_code_equal(node: libcst.CSTNode, code: str) -> str: + assert libcst.Module(body=[node]).code == code + + +@pytest.mark.parametrize( + "is_sync,expected_code", + [ + (True, 'def some_function():\n ""\n'), + (False, 'async def some_function():\n ""\n'), + ], +) +def test_base_function_def(is_sync, expected_code): + node = libcst_utils.base_function_def("some_function", is_sync) + + expected_node = libcst.parse_statement(expected_code) + + # Whenever possible we try to control the shape of the nodes, + # because we will be manipulating them during snippet generation. + assert node.deep_equals(expected_node), (node, expected_node) + + # Sometimes it is more convenient to just verify the code. + _assert_code_equal(node, expected_code) diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py index ce0720574c07..d48304b8d7d3 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py @@ -23,8 +23,8 @@ CURRENT_DIRECTORY = Path(__file__).parent.absolute() -SPEECH_V1_REQUEST_PATH = (CURRENT_DIRECTORY / - "resources" / "speech" / "request.desc") +SPEECH_V1_REQUEST_PATH = CURRENT_DIRECTORY / \ + "resources" / "speech" / "request.desc" def test_request(): From f7a78f7d0d1656327afbae41c6f3dfc7f47c2aa9 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 13 Dec 2022 11:35:36 -0500 Subject: [PATCH 0942/1339] fix: fix unit test with float comparison (#1541) --- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index a76c758334d0..6a898e47e825 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -402,6 +402,8 @@ def test_{{ method_name }}_flattened(): assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% elif (field.ident|string()).startswith('wrappers_pb2.') %} assert wrappers.{{ (field.ident|string())[13:] }}Rule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif field.field_pb.type == 2 %}{# Use approx eq for floats #} + assert math.isclose(args[0].{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) {% else %} arg = args[0].{{ key }} mock_val = {{ field.mock_value }} @@ -497,6 +499,8 @@ async def test_{{ method_name }}_flattened_async(): assert DurationRule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} {% elif (field.ident|string()).startswith('wrappers_pb2.') %} assert wrappers.{{ (field.ident|string())[13:] }}Rule().to_proto(args[0].{{ key }}) == {{ field.mock_value }} + {% elif field.field_pb.type == 2 %}{# Use approx eq for floats #} + assert math.isclose(args[0].{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) {% else %} arg = args[0].{{ key }} mock_val = {{ field.mock_value }} From 1177f87e35bf070321517129678884bbc90d1079 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Tue, 13 Dec 2022 08:40:39 -0800 Subject: [PATCH 0943/1339] chore: tidy up RestInterceptor comments (#1524) * chore: Tidy up RestInterceptor comments A manual check showed that REST interceptors work fine with numeric enums. This just cleans up some of the surrounding documentation, including the in-code sample. * Update golden files Co-authored-by: Anthonios Partheniou --- .../services/%service/transports/rest.py.j2 | 5 +- .../services/%service/transports/rest.py.j2 | 11 +- .../integration/goldens/asset/.coveragerc | 0 .../tests/integration/goldens/asset/.flake8 | 0 .../integration/goldens/asset/MANIFEST.in | 0 .../integration/goldens/asset/README.rst | 0 .../asset/docs/asset_v1/asset_service.rst | 0 .../goldens/asset/docs/asset_v1/services.rst | 0 .../goldens/asset/docs/asset_v1/types.rst | 0 .../integration/goldens/asset/docs/conf.py | 0 .../integration/goldens/asset/docs/index.rst | 0 .../asset/google/cloud/asset/__init__.py | 0 .../asset/google/cloud/asset/gapic_version.py | 0 .../goldens/asset/google/cloud/asset/py.typed | 0 .../asset/google/cloud/asset_v1/__init__.py | 0 .../google/cloud/asset_v1/gapic_version.py | 0 .../asset/google/cloud/asset_v1/py.typed | 0 .../cloud/asset_v1/services/__init__.py | 0 .../services/asset_service/__init__.py | 0 .../asset_v1/services/asset_service/client.py | 0 .../asset_v1/services/asset_service/pagers.py | 0 .../services/asset_service/transports/base.py | 0 .../services/asset_service/transports/grpc.py | 0 .../asset_service/transports/grpc_asyncio.py | 0 .../services/asset_service/transports/rest.py | 57 ++++++---- .../google/cloud/asset_v1/types/__init__.py | 0 .../cloud/asset_v1/types/asset_service.py | 0 .../google/cloud/asset_v1/types/assets.py | 0 .../tests/integration/goldens/asset/mypy.ini | 0 .../integration/goldens/asset/noxfile.py | 0 ...nippet_metadata_google.cloud.asset.v1.json | 0 .../asset/scripts/fixup_asset_v1_keywords.py | 0 .../tests/integration/goldens/asset/setup.py | 0 .../asset/testing/constraints-3.10.txt | 0 .../asset/testing/constraints-3.11.txt | 0 .../goldens/asset/testing/constraints-3.7.txt | 0 .../goldens/asset/testing/constraints-3.8.txt | 0 .../goldens/asset/testing/constraints-3.9.txt | 0 .../goldens/asset/tests/__init__.py | 0 .../goldens/asset/tests/unit/__init__.py | 0 .../asset/tests/unit/gapic/__init__.py | 0 .../tests/unit/gapic/asset_v1/__init__.py | 0 .../goldens/credentials/.coveragerc | 0 .../integration/goldens/credentials/.flake8 | 0 .../goldens/credentials/MANIFEST.in | 0 .../goldens/credentials/README.rst | 0 .../goldens/credentials/docs/conf.py | 0 .../docs/credentials_v1/iam_credentials.rst | 0 .../docs/credentials_v1/services.rst | 0 .../credentials/docs/credentials_v1/types.rst | 0 .../goldens/credentials/docs/index.rst | 0 .../google/iam/credentials/__init__.py | 0 .../google/iam/credentials/gapic_version.py | 0 .../google/iam/credentials/py.typed | 0 .../google/iam/credentials_v1/__init__.py | 0 .../iam/credentials_v1/gapic_version.py | 0 .../google/iam/credentials_v1/py.typed | 0 .../iam/credentials_v1/services/__init__.py | 0 .../services/iam_credentials/__init__.py | 0 .../services/iam_credentials/client.py | 0 .../iam_credentials/transports/base.py | 0 .../iam_credentials/transports/grpc.py | 0 .../transports/grpc_asyncio.py | 0 .../iam_credentials/transports/rest.py | 20 ++-- .../iam/credentials_v1/types/__init__.py | 0 .../google/iam/credentials_v1/types/common.py | 0 .../credentials_v1/types/iamcredentials.py | 0 .../integration/goldens/credentials/mypy.ini | 0 .../goldens/credentials/noxfile.py | 0 ...et_metadata_google.iam.credentials.v1.json | 0 .../scripts/fixup_credentials_v1_keywords.py | 0 .../integration/goldens/credentials/setup.py | 0 .../credentials/testing/constraints-3.10.txt | 0 .../credentials/testing/constraints-3.11.txt | 0 .../credentials/testing/constraints-3.7.txt | 0 .../credentials/testing/constraints-3.8.txt | 0 .../credentials/testing/constraints-3.9.txt | 0 .../goldens/credentials/tests/__init__.py | 0 .../credentials/tests/unit/__init__.py | 0 .../credentials/tests/unit/gapic/__init__.py | 0 .../unit/gapic/credentials_v1/__init__.py | 0 .../integration/goldens/eventarc/.coveragerc | 0 .../integration/goldens/eventarc/.flake8 | 0 .../integration/goldens/eventarc/MANIFEST.in | 0 .../integration/goldens/eventarc/README.rst | 0 .../integration/goldens/eventarc/docs/conf.py | 0 .../eventarc/docs/eventarc_v1/eventarc.rst | 0 .../eventarc/docs/eventarc_v1/services.rst | 0 .../eventarc/docs/eventarc_v1/types.rst | 0 .../goldens/eventarc/docs/index.rst | 0 .../google/cloud/eventarc/__init__.py | 0 .../google/cloud/eventarc/gapic_version.py | 0 .../eventarc/google/cloud/eventarc/py.typed | 0 .../google/cloud/eventarc_v1/__init__.py | 0 .../google/cloud/eventarc_v1/gapic_version.py | 0 .../google/cloud/eventarc_v1/py.typed | 0 .../cloud/eventarc_v1/services/__init__.py | 0 .../eventarc_v1/services/eventarc/__init__.py | 0 .../eventarc_v1/services/eventarc/client.py | 0 .../eventarc_v1/services/eventarc/pagers.py | 0 .../services/eventarc/transports/base.py | 0 .../services/eventarc/transports/grpc.py | 0 .../eventarc/transports/grpc_asyncio.py | 0 .../services/eventarc/transports/rest.py | 25 +++-- .../cloud/eventarc_v1/types/__init__.py | 0 .../cloud/eventarc_v1/types/eventarc.py | 0 .../google/cloud/eventarc_v1/types/trigger.py | 0 .../integration/goldens/eventarc/mypy.ini | 0 .../integration/goldens/eventarc/noxfile.py | 0 ...pet_metadata_google.cloud.eventarc.v1.json | 0 .../scripts/fixup_eventarc_v1_keywords.py | 0 .../integration/goldens/eventarc/setup.py | 0 .../eventarc/testing/constraints-3.10.txt | 0 .../eventarc/testing/constraints-3.11.txt | 0 .../eventarc/testing/constraints-3.7.txt | 0 .../eventarc/testing/constraints-3.8.txt | 0 .../eventarc/testing/constraints-3.9.txt | 0 .../goldens/eventarc/tests/__init__.py | 0 .../goldens/eventarc/tests/unit/__init__.py | 0 .../eventarc/tests/unit/gapic/__init__.py | 0 .../tests/unit/gapic/eventarc_v1/__init__.py | 0 .../integration/goldens/logging/.coveragerc | 0 .../tests/integration/goldens/logging/.flake8 | 0 .../integration/goldens/logging/MANIFEST.in | 0 .../integration/goldens/logging/README.rst | 0 .../integration/goldens/logging/docs/conf.py | 0 .../goldens/logging/docs/index.rst | 0 .../docs/logging_v2/config_service_v2.rst | 0 .../docs/logging_v2/logging_service_v2.rst | 0 .../docs/logging_v2/metrics_service_v2.rst | 0 .../logging/docs/logging_v2/services.rst | 0 .../goldens/logging/docs/logging_v2/types.rst | 0 .../logging/google/cloud/logging/__init__.py | 0 .../google/cloud/logging/gapic_version.py | 0 .../logging/google/cloud/logging/py.typed | 0 .../google/cloud/logging_v2/__init__.py | 0 .../google/cloud/logging_v2/gapic_version.py | 0 .../logging/google/cloud/logging_v2/py.typed | 0 .../cloud/logging_v2/services/__init__.py | 0 .../services/config_service_v2/__init__.py | 0 .../services/config_service_v2/pagers.py | 0 .../config_service_v2/transports/base.py | 0 .../config_service_v2/transports/grpc.py | 0 .../transports/grpc_asyncio.py | 0 .../config_service_v2/transports/rest.py | 100 +++++++++++------- .../services/logging_service_v2/__init__.py | 0 .../services/logging_service_v2/pagers.py | 0 .../logging_service_v2/transports/base.py | 0 .../logging_service_v2/transports/grpc.py | 0 .../transports/grpc_asyncio.py | 0 .../logging_service_v2/transports/rest.py | 22 ++-- .../services/metrics_service_v2/__init__.py | 0 .../services/metrics_service_v2/pagers.py | 0 .../metrics_service_v2/transports/base.py | 0 .../metrics_service_v2/transports/grpc.py | 0 .../transports/grpc_asyncio.py | 0 .../metrics_service_v2/transports/rest.py | 22 ++-- .../google/cloud/logging_v2/types/__init__.py | 0 .../cloud/logging_v2/types/log_entry.py | 0 .../google/cloud/logging_v2/types/logging.py | 0 .../cloud/logging_v2/types/logging_config.py | 0 .../cloud/logging_v2/types/logging_metrics.py | 0 .../integration/goldens/logging/mypy.ini | 0 .../integration/goldens/logging/noxfile.py | 0 .../snippet_metadata_google.logging.v2.json | 0 .../scripts/fixup_logging_v2_keywords.py | 0 .../integration/goldens/logging/setup.py | 0 .../logging/testing/constraints-3.10.txt | 0 .../logging/testing/constraints-3.11.txt | 0 .../logging/testing/constraints-3.7.txt | 0 .../logging/testing/constraints-3.8.txt | 0 .../logging/testing/constraints-3.9.txt | 0 .../goldens/logging/tests/__init__.py | 0 .../goldens/logging/tests/unit/__init__.py | 0 .../logging/tests/unit/gapic/__init__.py | 0 .../tests/unit/gapic/logging_v2/__init__.py | 0 .../logging_v2/test_metrics_service_v2.py | 0 .../integration/goldens/redis/.coveragerc | 0 .../tests/integration/goldens/redis/.flake8 | 0 .../integration/goldens/redis/MANIFEST.in | 0 .../integration/goldens/redis/README.rst | 0 .../integration/goldens/redis/docs/conf.py | 0 .../integration/goldens/redis/docs/index.rst | 0 .../redis/docs/redis_v1/cloud_redis.rst | 0 .../goldens/redis/docs/redis_v1/services.rst | 0 .../goldens/redis/docs/redis_v1/types.rst | 0 .../redis/google/cloud/redis/__init__.py | 0 .../redis/google/cloud/redis/gapic_version.py | 0 .../goldens/redis/google/cloud/redis/py.typed | 0 .../redis/google/cloud/redis_v1/__init__.py | 0 .../google/cloud/redis_v1/gapic_version.py | 0 .../cloud/redis_v1/services/__init__.py | 0 .../redis_v1/services/cloud_redis/__init__.py | 0 .../redis_v1/services/cloud_redis/pagers.py | 0 .../services/cloud_redis/transports/rest.py | 45 ++++---- .../google/cloud/redis_v1/types/__init__.py | 0 .../cloud/redis_v1/types/cloud_redis.py | 0 .../tests/integration/goldens/redis/mypy.ini | 0 .../integration/goldens/redis/noxfile.py | 0 ...nippet_metadata_google.cloud.redis.v1.json | 0 .../redis/scripts/fixup_redis_v1_keywords.py | 0 .../tests/integration/goldens/redis/setup.py | 0 .../redis/testing/constraints-3.10.txt | 0 .../redis/testing/constraints-3.11.txt | 0 .../goldens/redis/testing/constraints-3.7.txt | 0 .../goldens/redis/testing/constraints-3.8.txt | 0 .../goldens/redis/testing/constraints-3.9.txt | 0 .../goldens/redis/tests/__init__.py | 0 .../goldens/redis/tests/unit/__init__.py | 0 .../redis/tests/unit/gapic/__init__.py | 0 .../tests/unit/gapic/redis_v1/__init__.py | 0 .../unit/gapic/redis_v1/test_cloud_redis.py | 0 212 files changed, 179 insertions(+), 128 deletions(-) mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/.coveragerc mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/.flake8 mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/README.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/asset_service.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/mypy.ini mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/noxfile.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/setup.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/.flake8 mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/README.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/iam_credentials.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/setup.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/README.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/eventarc.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/setup.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/.coveragerc mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/.flake8 mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/README.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/config_service_v2.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/logging_service_v2.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/metrics_service_v2.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/mypy.ini mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/noxfile.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/setup.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/.coveragerc mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/.flake8 mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/README.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/cloud_redis.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/py.typed mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/mypy.ini mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/noxfile.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/setup.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 98ba58df17ae..ec4246a784b4 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -69,13 +69,14 @@ class {{ service.name }}RestInterceptor: .. code-block:: python class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): {% for _, method in service.methods|dictsort if not method.client_streaming %} - def pre_{{ method.name|snake_case }}(request, metadata): + def pre_{{ method.name|snake_case }}(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata {% if not method.void %} - def post_{{ method.name|snake_case }}(response): + def post_{{ method.name|snake_case }}(self, response): logging.log(f"Received response: {response}") + return response {% endif %} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 5cb1667063ec..c2cbbe889fa2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -62,12 +62,6 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=requests_version, ) -{% if opts.rest_numeric_enums %} -# TODO (numeric enums): This file was generated with the option to -# request that the server respond with enums JSON-encoded as -# numbers. The code below does not implement that functionality yet. - -{% endif %} class {{ service.name }}RestInterceptor: """Interceptor for {{ service.name }}. @@ -85,13 +79,14 @@ class {{ service.name }}RestInterceptor: .. code-block:: python class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): {% for _, method in service.methods|dictsort if not method.client_streaming %} - def pre_{{ method.name|snake_case }}(request, metadata): + def pre_{{ method.name|snake_case }}(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata {% if not method.void %} - def post_{{ method.name|snake_case }}(response): + def post_{{ method.name|snake_case }}(self, response): logging.log(f"Received response: {response}") + return response {% endif %} {% endfor %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc b/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/.flake8 b/packages/gapic-generator/tests/integration/goldens/asset/.flake8 old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/README.rst b/packages/gapic-generator/tests/integration/goldens/asset/README.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/asset_service.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/asset_service.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/py.typed b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index b46a0a4ce0a3..529d6614a351 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -69,86 +69,97 @@ class AssetServiceRestInterceptor: .. code-block:: python class MyCustomAssetServiceInterceptor(AssetServiceRestInterceptor): - def pre_analyze_iam_policy(request, metadata): + def pre_analyze_iam_policy(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_analyze_iam_policy(response): + def post_analyze_iam_policy(self, response): logging.log(f"Received response: {response}") + return response - def pre_analyze_iam_policy_longrunning(request, metadata): + def pre_analyze_iam_policy_longrunning(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_analyze_iam_policy_longrunning(response): + def post_analyze_iam_policy_longrunning(self, response): logging.log(f"Received response: {response}") + return response - def pre_batch_get_assets_history(request, metadata): + def pre_batch_get_assets_history(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_batch_get_assets_history(response): + def post_batch_get_assets_history(self, response): logging.log(f"Received response: {response}") + return response - def pre_create_feed(request, metadata): + def pre_create_feed(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_feed(response): + def post_create_feed(self, response): logging.log(f"Received response: {response}") + return response - def pre_delete_feed(request, metadata): + def pre_delete_feed(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_export_assets(request, metadata): + def pre_export_assets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_export_assets(response): + def post_export_assets(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_feed(request, metadata): + def pre_get_feed(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_feed(response): + def post_get_feed(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_assets(request, metadata): + def pre_list_assets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_assets(response): + def post_list_assets(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_feeds(request, metadata): + def pre_list_feeds(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_feeds(response): + def post_list_feeds(self, response): logging.log(f"Received response: {response}") + return response - def pre_search_all_iam_policies(request, metadata): + def pre_search_all_iam_policies(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_search_all_iam_policies(response): + def post_search_all_iam_policies(self, response): logging.log(f"Received response: {response}") + return response - def pre_search_all_resources(request, metadata): + def pre_search_all_resources(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_search_all_resources(response): + def post_search_all_resources(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_feed(request, metadata): + def pre_update_feed(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_feed(response): + def post_update_feed(self, response): logging.log(f"Received response: {response}") + return response transport = AssetServiceRestTransport(interceptor=MyCustomAssetServiceInterceptor()) client = AssetServiceClient(transport=transport) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini b/packages/gapic-generator/tests/integration/goldens/asset/mypy.ini old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc b/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 b/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/README.rst b/packages/gapic-generator/tests/integration/goldens/credentials/README.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/iam_credentials.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/iam_credentials.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/py.typed b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index f805bbff13d5..6bb11e7d4845 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -66,33 +66,37 @@ class IAMCredentialsRestInterceptor: .. code-block:: python class MyCustomIAMCredentialsInterceptor(IAMCredentialsRestInterceptor): - def pre_generate_access_token(request, metadata): + def pre_generate_access_token(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_generate_access_token(response): + def post_generate_access_token(self, response): logging.log(f"Received response: {response}") + return response - def pre_generate_id_token(request, metadata): + def pre_generate_id_token(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_generate_id_token(response): + def post_generate_id_token(self, response): logging.log(f"Received response: {response}") + return response - def pre_sign_blob(request, metadata): + def pre_sign_blob(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_sign_blob(response): + def post_sign_blob(self, response): logging.log(f"Received response: {response}") + return response - def pre_sign_jwt(request, metadata): + def pre_sign_jwt(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_sign_jwt(response): + def post_sign_jwt(self, response): logging.log(f"Received response: {response}") + return response transport = IAMCredentialsRestTransport(interceptor=MyCustomIAMCredentialsInterceptor()) client = IAMCredentialsClient(transport=transport) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini b/packages/gapic-generator/tests/integration/goldens/credentials/mypy.ini old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc b/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 b/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/eventarc.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/eventarc.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index c212e44aff56..d107065ba70b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -69,40 +69,45 @@ class EventarcRestInterceptor: .. code-block:: python class MyCustomEventarcInterceptor(EventarcRestInterceptor): - def pre_create_trigger(request, metadata): + def pre_create_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_trigger(response): + def post_create_trigger(self, response): logging.log(f"Received response: {response}") + return response - def pre_delete_trigger(request, metadata): + def pre_delete_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_delete_trigger(response): + def post_delete_trigger(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_trigger(request, metadata): + def pre_get_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_trigger(response): + def post_get_trigger(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_triggers(request, metadata): + def pre_list_triggers(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_triggers(response): + def post_list_triggers(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_trigger(request, metadata): + def pre_update_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_trigger(response): + def post_update_trigger(self, response): logging.log(f"Received response: {response}") + return response transport = EventarcRestTransport(interceptor=MyCustomEventarcInterceptor()) client = EventarcClient(transport=transport) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini b/packages/gapic-generator/tests/integration/goldens/eventarc/mypy.ini old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc b/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/.flake8 b/packages/gapic-generator/tests/integration/goldens/logging/.flake8 old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/README.rst b/packages/gapic-generator/tests/integration/goldens/logging/README.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/config_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/config_service_v2.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/logging_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/logging_service_v2.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/metrics_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/metrics_service_v2.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/py.typed b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/py.typed b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py index 2a4a22284c94..d414a966e501 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py @@ -67,151 +67,169 @@ class ConfigServiceV2RestInterceptor: .. code-block:: python class MyCustomConfigServiceV2Interceptor(ConfigServiceV2RestInterceptor): - def pre_create_bucket(request, metadata): + def pre_create_bucket(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_bucket(response): + def post_create_bucket(self, response): logging.log(f"Received response: {response}") + return response - def pre_create_exclusion(request, metadata): + def pre_create_exclusion(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_exclusion(response): + def post_create_exclusion(self, response): logging.log(f"Received response: {response}") + return response - def pre_create_sink(request, metadata): + def pre_create_sink(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_sink(response): + def post_create_sink(self, response): logging.log(f"Received response: {response}") + return response - def pre_create_view(request, metadata): + def pre_create_view(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_view(response): + def post_create_view(self, response): logging.log(f"Received response: {response}") + return response - def pre_delete_bucket(request, metadata): + def pre_delete_bucket(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_delete_exclusion(request, metadata): + def pre_delete_exclusion(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_delete_sink(request, metadata): + def pre_delete_sink(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_delete_view(request, metadata): + def pre_delete_view(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_get_bucket(request, metadata): + def pre_get_bucket(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_bucket(response): + def post_get_bucket(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_cmek_settings(request, metadata): + def pre_get_cmek_settings(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_cmek_settings(response): + def post_get_cmek_settings(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_exclusion(request, metadata): + def pre_get_exclusion(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_exclusion(response): + def post_get_exclusion(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_sink(request, metadata): + def pre_get_sink(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_sink(response): + def post_get_sink(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_view(request, metadata): + def pre_get_view(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_view(response): + def post_get_view(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_buckets(request, metadata): + def pre_list_buckets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_buckets(response): + def post_list_buckets(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_exclusions(request, metadata): + def pre_list_exclusions(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_exclusions(response): + def post_list_exclusions(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_sinks(request, metadata): + def pre_list_sinks(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_sinks(response): + def post_list_sinks(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_views(request, metadata): + def pre_list_views(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_views(response): + def post_list_views(self, response): logging.log(f"Received response: {response}") + return response - def pre_undelete_bucket(request, metadata): + def pre_undelete_bucket(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_update_bucket(request, metadata): + def pre_update_bucket(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_bucket(response): + def post_update_bucket(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_cmek_settings(request, metadata): + def pre_update_cmek_settings(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_cmek_settings(response): + def post_update_cmek_settings(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_exclusion(request, metadata): + def pre_update_exclusion(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_exclusion(response): + def post_update_exclusion(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_sink(request, metadata): + def pre_update_sink(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_sink(response): + def post_update_sink(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_view(request, metadata): + def pre_update_view(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_view(response): + def post_update_view(self, response): logging.log(f"Received response: {response}") + return response transport = ConfigServiceV2RestTransport(interceptor=MyCustomConfigServiceV2Interceptor()) client = ConfigServiceV2Client(transport=transport) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py index 7f52c18e9de2..38ad43e2b104 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py @@ -67,37 +67,41 @@ class LoggingServiceV2RestInterceptor: .. code-block:: python class MyCustomLoggingServiceV2Interceptor(LoggingServiceV2RestInterceptor): - def pre_delete_log(request, metadata): + def pre_delete_log(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_list_log_entries(request, metadata): + def pre_list_log_entries(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_log_entries(response): + def post_list_log_entries(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_logs(request, metadata): + def pre_list_logs(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_logs(response): + def post_list_logs(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_monitored_resource_descriptors(request, metadata): + def pre_list_monitored_resource_descriptors(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_monitored_resource_descriptors(response): + def post_list_monitored_resource_descriptors(self, response): logging.log(f"Received response: {response}") + return response - def pre_write_log_entries(request, metadata): + def pre_write_log_entries(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_write_log_entries(response): + def post_write_log_entries(self, response): logging.log(f"Received response: {response}") + return response transport = LoggingServiceV2RestTransport(interceptor=MyCustomLoggingServiceV2Interceptor()) client = LoggingServiceV2Client(transport=transport) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py index 6bdc187c8155..a7d2de532b86 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py @@ -67,37 +67,41 @@ class MetricsServiceV2RestInterceptor: .. code-block:: python class MyCustomMetricsServiceV2Interceptor(MetricsServiceV2RestInterceptor): - def pre_create_log_metric(request, metadata): + def pre_create_log_metric(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_log_metric(response): + def post_create_log_metric(self, response): logging.log(f"Received response: {response}") + return response - def pre_delete_log_metric(request, metadata): + def pre_delete_log_metric(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def pre_get_log_metric(request, metadata): + def pre_get_log_metric(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_log_metric(response): + def post_get_log_metric(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_log_metrics(request, metadata): + def pre_list_log_metrics(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_log_metrics(response): + def post_list_log_metrics(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_log_metric(request, metadata): + def pre_update_log_metric(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_log_metric(response): + def post_update_log_metric(self, response): logging.log(f"Received response: {response}") + return response transport = MetricsServiceV2RestTransport(interceptor=MyCustomMetricsServiceV2Interceptor()) client = MetricsServiceV2Client(transport=transport) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini b/packages/gapic-generator/tests/integration/goldens/logging/mypy.ini old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc b/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/.flake8 b/packages/gapic-generator/tests/integration/goldens/redis/.flake8 old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/README.rst b/packages/gapic-generator/tests/integration/goldens/redis/README.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/cloud_redis.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/cloud_redis.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/py.typed b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/py.typed old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 38e7567054a4..c9bda09ab8ba 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -68,68 +68,77 @@ class CloudRedisRestInterceptor: .. code-block:: python class MyCustomCloudRedisInterceptor(CloudRedisRestInterceptor): - def pre_create_instance(request, metadata): + def pre_create_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_create_instance(response): + def post_create_instance(self, response): logging.log(f"Received response: {response}") + return response - def pre_delete_instance(request, metadata): + def pre_delete_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_delete_instance(response): + def post_delete_instance(self, response): logging.log(f"Received response: {response}") + return response - def pre_export_instance(request, metadata): + def pre_export_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_export_instance(response): + def post_export_instance(self, response): logging.log(f"Received response: {response}") + return response - def pre_failover_instance(request, metadata): + def pre_failover_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_failover_instance(response): + def post_failover_instance(self, response): logging.log(f"Received response: {response}") + return response - def pre_get_instance(request, metadata): + def pre_get_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_get_instance(response): + def post_get_instance(self, response): logging.log(f"Received response: {response}") + return response - def pre_import_instance(request, metadata): + def pre_import_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_import_instance(response): + def post_import_instance(self, response): logging.log(f"Received response: {response}") + return response - def pre_list_instances(request, metadata): + def pre_list_instances(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_list_instances(response): + def post_list_instances(self, response): logging.log(f"Received response: {response}") + return response - def pre_update_instance(request, metadata): + def pre_update_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_update_instance(response): + def post_update_instance(self, response): logging.log(f"Received response: {response}") + return response - def pre_upgrade_instance(request, metadata): + def pre_upgrade_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_upgrade_instance(response): + def post_upgrade_instance(self, response): logging.log(f"Received response: {response}") + return response transport = CloudRedisRestTransport(interceptor=MyCustomCloudRedisInterceptor()) client = CloudRedisClient(transport=transport) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini b/packages/gapic-generator/tests/integration/goldens/redis/mypy.ini old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py old mode 100644 new mode 100755 From 2085e8d8fa62215d88891fc4976a730f3ce9661f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 13 Dec 2022 17:07:12 +0000 Subject: [PATCH 0944/1339] chore(main): release 1.7.1 (#1542) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8c24ece7fae1..501acfa66ad8 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.7.1](https://github.com/googleapis/gapic-generator-python/compare/v1.7.0...v1.7.1) (2022-12-13) + + +### Bug Fixes + +* Fix unit test with float comparison ([#1541](https://github.com/googleapis/gapic-generator-python/issues/1541)) ([c5741ff](https://github.com/googleapis/gapic-generator-python/commit/c5741ff2f0658800210060392727016d1a5b4a8b)) + ## [1.7.0](https://github.com/googleapis/gapic-generator-python/compare/v1.6.2...v1.7.0) (2022-12-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9772e569e1d2..67f4bad92779 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.7.0" +version = "1.7.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 3ef1aad88e33ad94a0f688d137a1df53edae7c67 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Mon, 19 Dec 2022 15:56:23 -0800 Subject: [PATCH 0945/1339] chore: add sample function parameters, without type annotations (#1540) * chore: add sample function parameters, without type annotations --- .../configured_snippet.py | 13 +++++-- .../configurable_snippetgen/libcst_utils.py | 23 +++++++++++++ .../test_configured_snippet.py | 2 +- .../test_libcst_utils.py | 34 +++++++++++++++++++ 4 files changed, 69 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index 46bc79f82896..f609e06ac32b 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -79,12 +79,21 @@ def filename(self) -> str: sync_or_async = "sync" if self.is_sync else "async" return f"{module_name}_{self.api_version}_generated_{service_name}_{snake_case_rpc_name}_{config_id}_{sync_or_async}.py" + def _add_sample_function_parameters(self) -> None: + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1537, add typing annotation in sample function parameters. + params = [] + for config_parameter in self.config.signature.parameters: + params.append(libcst_utils.convert_parameter(config_parameter)) + parameters = libcst.Parameters(params=params) + self._sample_function_def = self._sample_function_def.with_changes( + params=parameters + ) + def _build_sample_function(self) -> None: # TODO: https://github.com/googleapis/gapic-generator-python/issues/1536, add return type. - # TODO: https://github.com/googleapis/gapic-generator-python/issues/1537, add sample function parameters. # TODO: https://github.com/googleapis/gapic-generator-python/issues/1538, add docstring. # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, add sample function body. - pass + self._add_sample_function_parameters() def _add_sample_function(self) -> None: self._module = self._module.with_changes( diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py b/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py index f60a579f26b3..871f65088c08 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py @@ -34,3 +34,26 @@ def base_function_def(function_name: str, is_sync: bool) -> libcst.FunctionDef: ) return function_def + + +def convert_expression( + config_expression: snippet_config_language_pb2.Expression, +) -> libcst.BaseExpression: + value_name = config_expression.WhichOneof("value") + if value_name == "string_value": + string_value = config_expression.string_value + return libcst.SimpleString(value=f'"{string_value}"') + else: + raise ValueError( + f"Conversion from Expression value {value_name} unsupported.") + + +def convert_parameter( + config_parameter: snippet_config_language_pb2.Statement.Declaration, +) -> libcst.Param: + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1537, add typing annotation in sample function parameters. + param = libcst.Param( + name=libcst.Name(value=config_parameter.name), + default=convert_expression(config_parameter.value), + ) + return param diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index 1804f11e94be..522b976630fb 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -104,7 +104,7 @@ def test_code(snippet): # https://github.com/googleapis/gapic-generator-python/issues/1522 # Placeholder code. We will gradually add to the ConfiguredSnippet class # until the generated code is the same as that of the golden file. - expected_code = """def sample_create_custom_class_Basic(): + expected_code = """def sample_create_custom_class_Basic(parent = "projects/[PROJECT]/locations/us", custom_class_id = "passengerships"): \"\" """ assert snippet.code == expected_code diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py index 03844f329df0..3696c9023823 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py @@ -17,6 +17,7 @@ import pytest from gapic.configurable_snippetgen import libcst_utils +from gapic.configurable_snippetgen import snippet_config_language_pb2 def _assert_code_equal(node: libcst.CSTNode, code: str) -> str: @@ -41,3 +42,36 @@ def test_base_function_def(is_sync, expected_code): # Sometimes it is more convenient to just verify the code. _assert_code_equal(node, expected_code) + + +def test_convert_expression_string_value(): + config_expression = snippet_config_language_pb2.Expression( + string_value="hello world" + ) + node = libcst_utils.convert_expression(config_expression) + expected_node = libcst.SimpleString(value='"hello world"') + + assert node.deep_equals(expected_node), (node, expected_node) + + +def test_convert_expression_should_raise_error_if_unsupported(): + config_expression = snippet_config_language_pb2.Expression( + default_value=snippet_config_language_pb2.Expression.DefaultValue.DEFAULT_VALUE + ) + with pytest.raises(ValueError): + libcst_utils.convert_expression(config_expression) + + +def test_convert_parameter(): + config_parameter = snippet_config_language_pb2.Statement.Declaration( + name="some_variable", + value=snippet_config_language_pb2.Expression( + string_value="hello world"), + ) + node = libcst_utils.convert_parameter(config_parameter) + expected_node = libcst.Param( + name=libcst.Name(value="some_variable"), + default=libcst.SimpleString(value='"hello world"'), + ) + + assert node.deep_equals(expected_node), (node, expected_node) From f1dc0dcd82486db1fc114abe2ad67cbd72d9a421 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Sat, 24 Dec 2022 21:41:23 -0800 Subject: [PATCH 0946/1339] chore: add service client initialization (#1546) * chore: add service client initialization --- .../configured_snippet.py | 76 +++++++++++++++++-- .../test_configured_snippet.py | 61 +++++++++++++-- 2 files changed, 124 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index f609e06ac32b..d4664e174084 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -22,6 +22,24 @@ from gapic.schema import api +class _AppendToSampleFunctionBody(libcst.CSTTransformer): + def __init__(self, statement: libcst.BaseStatement): + self.statement = statement + + def visit_IndentedBlock(self, node: libcst.IndentedBlock) -> bool: + # Do not visit any nested indented blocks. + return False + + def leave_IndentedBlock( + self, original_node: libcst.IndentedBlock, updated_node: libcst.IndentedBlock + ) -> libcst.IndentedBlock: + del original_node + # FunctionDef.body is an IndentedBlock, and IndentedBlock.body + # is the actual sequence of statements. + new_body = list(updated_node.body) + [self.statement] + return updated_node.with_changes(body=new_body) + + @dataclasses.dataclass class ConfiguredSnippet: api_schema: api.API @@ -29,7 +47,7 @@ class ConfiguredSnippet: api_version: str is_sync: bool - def __post_init__(self): + def __post_init__(self) -> None: self._module: libcst.Module = libcst_utils.empty_module() self._sample_function_def: libcst.FunctionDef = libcst_utils.base_function_def( function_name=self.sample_function_name, is_sync=self.is_sync @@ -40,6 +58,16 @@ def code(self) -> str: """The code of the configured snippet.""" return self._module.code + @property + def gapic_module_name(self) -> str: + """The GAPIC module name. + + For example: + "speech_v1" + """ + module_name = self.config.rpc.proto_package.split(".")[-1] + return f"{module_name}_{self.api_version}" + @property def region_tag(self) -> str: """The region tag of the snippet. @@ -47,12 +75,11 @@ def region_tag(self) -> str: For example: "speech_v1_config_Adaptation_CreateCustomClass_Basic_async" """ - module_name = self.config.rpc.proto_package.split(".")[-1] service_name = self.config.rpc.service_name rpc_name = self.config.rpc.rpc_name config_id = self.config.metadata.config_id sync_or_async = "sync" if self.is_sync else "async" - return f"{module_name}_{self.api_version}_config_{service_name}_{rpc_name}_{config_id}_{sync_or_async}" + return f"{self.gapic_module_name}_config_{service_name}_{rpc_name}_{config_id}_{sync_or_async}" @property def sample_function_name(self) -> str: @@ -65,6 +92,20 @@ def sample_function_name(self) -> str: config_id = self.config.metadata.config_id return f"sample_{snippet_method_name}_{config_id}" + @property + def client_class_name(self) -> str: + """The service client's class name. + + For example: + "AdaptationClient" + "AdaptationAsyncClient" + """ + if self.is_sync: + client_class_name = f"{self.config.rpc.service_name}Client" + else: + client_class_name = f"{self.config.rpc.service_name}AsyncClient" + return client_class_name + @property def filename(self) -> str: """The snippet's file name. @@ -72,12 +113,23 @@ def filename(self) -> str: For example: "speech_v1_generated_Adaptation_create_custom_class_Basic_async.py" """ - module_name = self.config.rpc.proto_package.split(".")[-1] service_name = self.config.rpc.service_name snake_case_rpc_name = inflection.underscore(self.config.rpc.rpc_name) config_id = self.config.metadata.config_id sync_or_async = "sync" if self.is_sync else "async" - return f"{module_name}_{self.api_version}_generated_{service_name}_{snake_case_rpc_name}_{config_id}_{sync_or_async}.py" + return f"{self.gapic_module_name}_generated_{service_name}_{snake_case_rpc_name}_{config_id}_{sync_or_async}.py" + + def _append_to_sample_function_def_body( + self, statement: libcst.BaseStatement + ) -> None: + """Appends the statement node to the current sample function def.""" + transformer = _AppendToSampleFunctionBody(statement) + + # The result of applying a transformer could be of a different type + # in general, but we will only update the sample function def here. + self._sample_function_def = self._sample_function_def.visit( + transformer + ) # type: ignore def _add_sample_function_parameters(self) -> None: # TODO: https://github.com/googleapis/gapic-generator-python/issues/1537, add typing annotation in sample function parameters. @@ -89,11 +141,23 @@ def _add_sample_function_parameters(self) -> None: params=parameters ) + def _append_service_client_initialization(self) -> None: + initialization_call = libcst.parse_statement( + f"client = {self.gapic_module_name}.{self.client_class_name}()" + ) + self._append_to_sample_function_def_body(initialization_call) + + def _add_sample_function_body(self) -> None: + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, add sample function body. + # Each call below appends one or more statements to the sample + # function's body. + self._append_service_client_initialization() + def _build_sample_function(self) -> None: # TODO: https://github.com/googleapis/gapic-generator-python/issues/1536, add return type. # TODO: https://github.com/googleapis/gapic-generator-python/issues/1538, add docstring. - # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, add sample function body. self._add_sample_function_parameters() + self._add_sample_function_body() def _add_sample_function(self) -> None: self._module = self._module.with_changes( diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index 522b976630fb..e4562db30e6a 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -17,6 +17,7 @@ from google.protobuf import json_format from google.protobuf.compiler import plugin_pb2 +import libcst import pytest from gapic import utils @@ -81,21 +82,66 @@ def snippet(): ) -def test_region_tag(snippet): - assert ( - snippet.region_tag == "speech_v1_config_Adaptation_CreateCustomClass_Basic_sync" +def test_gapic_module_name(snippet): + assert snippet.gapic_module_name == "speech_v1" + + +@pytest.mark.parametrize( + "is_sync,expected", + [ + (True, "speech_v1_config_Adaptation_CreateCustomClass_Basic_sync"), + (False, "speech_v1_config_Adaptation_CreateCustomClass_Basic_async"), + ], +) +def test_region_tag(is_sync, expected): + snippet = _make_configured_snippet( + SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=is_sync ) + assert snippet.region_tag == expected def test_sample_function_name(snippet): assert snippet.sample_function_name == "sample_create_custom_class_Basic" -def test_filename(snippet): - assert ( - snippet.filename - == "speech_v1_generated_Adaptation_create_custom_class_Basic_sync.py" +@pytest.mark.parametrize( + "is_sync,expected", + [ + (True, "AdaptationClient"), + (False, "AdaptationAsyncClient"), + ], +) +def test_client_class_name(is_sync, expected): + snippet = _make_configured_snippet( + SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=is_sync + ) + assert snippet.client_class_name == expected + + +@pytest.mark.parametrize( + "is_sync,expected", + [ + (True, "speech_v1_generated_Adaptation_create_custom_class_Basic_sync.py"), + (False, "speech_v1_generated_Adaptation_create_custom_class_Basic_async.py"), + ], +) +def test_filename(is_sync, expected): + snippet = _make_configured_snippet( + SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=is_sync ) + assert snippet.filename == expected + + +def test_AppendToSampleFunctionBody(): + # Start with a function def with nonempty body to we can be sure the + # transformer appends the statement. + function_def = libcst.parse_statement("def f():\n 'hello'") + statement = libcst.parse_statement("'world'") + transformer = configured_snippet._AppendToSampleFunctionBody(statement) + updated_function_def = function_def.visit(transformer) + expected_function_def = libcst.parse_statement( + "def f():\n 'hello'\n 'world'") + assert updated_function_def.deep_equals(expected_function_def) def test_code(snippet): @@ -106,5 +152,6 @@ def test_code(snippet): # until the generated code is the same as that of the golden file. expected_code = """def sample_create_custom_class_Basic(parent = "projects/[PROJECT]/locations/us", custom_class_id = "passengerships"): \"\" + client = speech_v1.AdaptationClient() """ assert snippet.code == expected_code From 69cdcc7fe8d35ec19d0f18661046aebee2eac167 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 4 Jan 2023 14:02:29 -0500 Subject: [PATCH 0947/1339] chore: resolve docstring issue where oneof reference is missing (#1548) --- .../%namespace/%name/%version/%sub/types/_message.py.j2 | 4 +++- .../%namespace/%name_%version/%sub/types/_message.py.j2 | 6 ++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index 320498ba217b..ea4d5eb25535 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -1,7 +1,6 @@ class {{ message.name }}({{ p }}.Message): r"""{{ message.meta.doc|rst(indent=4) }} {% if message.fields|length %} - {# Only include note if a oneof has more than one member field. #} {% if message.oneof_fields() %} {% if message.oneof_fields().values() | map('length') | max > 1 %} @@ -11,6 +10,9 @@ class {{ message.name }}({{ p }}.Message): members. {% endif %} + {% endif %} + {# Use select filter to capture nested values. See https://github.com/googleapis/gapic-generator-python/issues/1083 #} + {%- if message.fields.values() | map(attribute="oneof") | select | list %} .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index 31b6e795aa4e..eb2dc2399830 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -1,8 +1,7 @@ class {{ message.name }}({{ p }}.Message): r"""{{ message.meta.doc|rst(indent=4) }} {% if message.fields|length %} - - {# Only include note if a oneof has more than one member field. #} + {# Only include note if a oneof has more than one member field. #} {% if message.oneof_fields() %} {% if message.oneof_fields().values() | map('length') | max > 1 %} This message has `oneof`_ fields (mutually exclusive fields). @@ -11,6 +10,9 @@ class {{ message.name }}({{ p }}.Message): members. {% endif %} + {% endif %} + {# Use select filter to capture nested values. See https://github.com/googleapis/gapic-generator-python/issues/1083 #} + {%- if message.fields.values() | map(attribute="oneof") | select | list %} .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields {% endif %} From 783e38a3074ca8aa1d9ddd36e2c8490c53347aac Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 4 Jan 2023 21:45:41 +0100 Subject: [PATCH 0948/1339] chore: update renovate config (#1532) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update dependency importlib-metadata to v5.1.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: update renovate config Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/renovate.json | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/renovate.json b/packages/gapic-generator/renovate.json index 4de84a027424..5568d37c0400 100644 --- a/packages/gapic-generator/renovate.json +++ b/packages/gapic-generator/renovate.json @@ -1,7 +1,11 @@ { "extends": [ "config:base", - "docker:disable", - ":disableDependencyDashboard" + "group:all", + ":disableDependencyDashboard", + "schedule:weekly" + ], + "ignorePaths": [ + ".kokoro/requirements.txt" ] } From 4e9e35e86c33442972069922d0fa5ab5f76f51ab Mon Sep 17 00:00:00 2001 From: "Bob \"Wombat\" Hogg" Date: Wed, 4 Jan 2023 19:17:59 -0500 Subject: [PATCH 0949/1339] docs(utils): Fix typos in nth function docstring (#1553) --- packages/gapic-generator/gapic/utils/code.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/code.py b/packages/gapic-generator/gapic/utils/code.py index 15f327983c85..836b5676b09b 100644 --- a/packages/gapic-generator/gapic/utils/code.py +++ b/packages/gapic-generator/gapic/utils/code.py @@ -58,8 +58,8 @@ def nth(iterable: Iterable[T], n: int, default: Optional[T] = None) -> Optional[ Args iterable (Iterable(T)): An iterable on any type. - n (int): The 'index' of the lement to retrieve. - default (Optional(T)): An optional default elemnt if the iterable has + n (int): The 'index' of the element to retrieve. + default (Optional(T)): An optional default element if the iterable has fewer than n elements. """ return next(itertools.islice(iterable, n, None), default) From 5854cdcb8e5718b2ef65e27a6dd2d9d77cf5d5c2 Mon Sep 17 00:00:00 2001 From: "Bob \"Wombat\" Hogg" Date: Thu, 5 Jan 2023 12:35:52 -0500 Subject: [PATCH 0950/1339] docs(client): Fix typo in get_mtls_endpoint_and_cert_source doc (#1552) --- .../%name_%version/%sub/services/%service/async_client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- .../cloud/asset_v1/services/asset_service/async_client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/async_client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/async_client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/async_client.py | 2 +- .../redis/google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index ba5c54d86792..3a1df2b857d1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -113,7 +113,7 @@ class {{ service.async_client_name }}: The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index b7e485a3db2f..2ae661bb49ad 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -230,7 +230,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 69b3a4dcbfb4..8c3355ca8951 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -113,7 +113,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 23d576cd103a..8695e53c6728 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -257,7 +257,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index cf5c77907604..7b4ecc386b7f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -118,7 +118,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index f1b895a7e97a..91e8d7a9ae99 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -253,7 +253,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index c522df549bfe..2a5ea08d65fa 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -120,7 +120,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 3b2ff888aa91..5d344f9721ad 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -273,7 +273,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 070dfee7a20e..85c6dfdfbd21 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -117,7 +117,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 7a50bf7e6280..ddb921837f53 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -288,7 +288,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 1e92b2865739..e03941e09995 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -109,7 +109,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 03c62abdaaa5..2047d73eb5a7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -244,7 +244,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 415d708a5e55..891f40ddab53 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -110,7 +110,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index c206dc3f1bef..8986dadb9416 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -245,7 +245,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index bd3f2c590fd8..f49d7d0f9402 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -133,7 +133,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 83507eb4d325..d2c77834e122 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -268,7 +268,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. From 64d5586d37795f0ae12d8fd111e42ee856dbbe92 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 9 Jan 2023 15:59:07 +0000 Subject: [PATCH 0951/1339] chore(deps): update all dependencies (#1556) * chore(deps): update all dependencies * revert Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/Dockerfile | 2 +- packages/gapic-generator/requirements.txt | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile index 750c79a1c115..840b88f6ae30 100644 --- a/packages/gapic-generator/Dockerfile +++ b/packages/gapic-generator/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.7-slim +FROM python:3.11-slim # Install system packages. RUN apt-get update \ diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index aa540b7dcba7..89d668d9ec02 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,14 +1,14 @@ click==8.1.3 google-api-core==2.11.0 -googleapis-common-protos==1.57.0 +googleapis-common-protos==1.57.1 jinja2==3.1.2 MarkupSafe==2.1.1 -protobuf==3.20.3 +protobuf==4.21.12 pypandoc==1.10 PyYAML==6.0 setuptools==65.6.3 grpc-google-iam-v1==0.12.4 proto-plus==1.22.1 -pytest-asyncio==0.20.2 +pytest-asyncio==0.20.3 libcst==0.4.9 inflection==0.5.1 \ No newline at end of file From db0c7cc74b545284307a9bb243d8210f31f9dd3b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 9 Jan 2023 12:12:48 -0500 Subject: [PATCH 0952/1339] feat: add support for python 3.11 (#1555) * feat: add support for python 3.11 * update setup.py template * update depedency proto-plus --- .../.github/sync-repo-settings.yaml | 7 ++++ .../.github/workflows/tests.yaml | 40 +++++++++---------- .../gapic/ads-templates/noxfile.py.j2 | 1 + .../gapic/ads-templates/setup.py.j2 | 12 +++--- .../gapic/templates/noxfile.py.j2 | 3 +- .../gapic/templates/setup.py.j2 | 2 + .../templates/testing/constraints-3.12.txt.j2 | 4 ++ packages/gapic-generator/noxfile.py | 3 +- packages/gapic-generator/requirements.txt | 2 +- packages/gapic-generator/setup.py | 1 + .../integration/goldens/asset/noxfile.py | 3 +- .../tests/integration/goldens/asset/setup.py | 2 + .../asset/testing/constraints-3.12.txt | 7 ++++ .../goldens/credentials/noxfile.py | 3 +- .../integration/goldens/credentials/setup.py | 2 + .../credentials/testing/constraints-3.12.txt | 6 +++ .../integration/goldens/eventarc/noxfile.py | 3 +- .../integration/goldens/eventarc/setup.py | 2 + .../eventarc/testing/constraints-3.12.txt | 6 +++ .../integration/goldens/logging/noxfile.py | 3 +- .../integration/goldens/logging/setup.py | 2 + .../logging/testing/constraints-3.12.txt | 6 +++ .../integration/goldens/redis/noxfile.py | 3 +- .../tests/integration/goldens/redis/setup.py | 2 + .../redis/testing/constraints-3.12.txt | 6 +++ 25 files changed, 98 insertions(+), 33 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/testing/constraints-3.12.txt.j2 create mode 100644 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt create mode 100644 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 7e3f77b03772..bae183f3b068 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -20,18 +20,22 @@ branchProtectionRules: - 'showcase-unit (3.8)' - 'showcase-unit (3.9)' - 'showcase-unit (3.10)' + - 'showcase-unit (3.11)' - 'showcase-unit (3.7, _alternative_templates)' - 'showcase-unit (3.8, _alternative_templates)' - 'showcase-unit (3.9, _alternative_templates)' - 'showcase-unit (3.10, _alternative_templates)' + - 'showcase-unit (3.11, _alternative_templates)' - 'showcase-unit (3.7, _alternative_templates_mixins)' - 'showcase-unit (3.8, _alternative_templates_mixins)' - 'showcase-unit (3.9, _alternative_templates_mixins)' - 'showcase-unit (3.10, _alternative_templates_mixins)' + - 'showcase-unit (3.11, _alternative_templates_mixins)' - 'showcase-unit (3.7, _mixins)' - 'showcase-unit (3.8, _mixins)' - 'showcase-unit (3.9, _mixins)' - 'showcase-unit (3.10, _mixins)' + - 'showcase-unit (3.11, _mixins)' - 'showcase-unit-add-iam-methods' - 'integration' - 'goldens-lint' @@ -41,14 +45,17 @@ branchProtectionRules: - 'unit (3.8)' - 'unit (3.9)' - 'unit (3.10)' + - 'unit (3.11)' - 'fragment (3.7)' - 'fragment (3.8)' - 'fragment (3.9)' - 'fragment (3.10)' + - 'fragment (3.11)' - 'fragment (3.7, _alternative_templates)' - 'fragment (3.8, _alternative_templates)' - 'fragment (3.9, _alternative_templates)' - 'fragment (3.10, _alternative_templates)' + - 'fragment (3.11, _alternative_templates)' - 'OwlBot Post Processor' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 31a40e06aed2..9d853ddc1ccf 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -26,7 +26,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v4 with: - python-version: "3.8" + python-version: "3.9" cache: 'pip' - name: Install nox. run: python -m pip install nox @@ -36,10 +36,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Python "3.10" + - name: Set up Python "3.11" uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.11" cache: 'pip' - name: Install nox. run: python -m pip install nox @@ -52,10 +52,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Python "3.10" + - name: Set up Python "3.11" uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.11" cache: 'pip' - name: Install system dependencies. run: | @@ -95,10 +95,10 @@ jobs: run: | sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - - name: Set up Python "3.10" + - name: Set up Python "3.11" uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.11" cache: 'pip' - name: Copy mtls files run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ @@ -131,7 +131,7 @@ jobs: showcase-unit: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11"] variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins] runs-on: ubuntu-latest steps: @@ -161,10 +161,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Python "3.10" + - name: Set up Python "3.11" uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.11" cache: 'pip' - name: Install system dependencies. run: | @@ -189,10 +189,10 @@ jobs: variant: ['', _alternative_templates] steps: - uses: actions/checkout@v3 - - name: Set up Python "3.10" + - name: Set up Python "3.11" uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.11" cache: 'pip' - name: Install system dependencies. run: | @@ -214,10 +214,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Python "3.10" + - name: Set up Python "3.11" uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.11" cache: 'pip' - name: Install system dependencies. run: | @@ -230,7 +230,7 @@ jobs: unit: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -251,7 +251,7 @@ jobs: fragment: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11"] variant: ['', _alternative_templates] runs-on: ubuntu-latest steps: @@ -303,10 +303,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Python 3.10 + - name: Set up Python 3.11 uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.11" cache: 'pip' - name: Install nox. run: | @@ -322,10 +322,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up Python "3.10" + - name: Set up Python "3.11" uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.11" cache: 'pip' - name: Install autopep8 run: | diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 6d9d548323d2..91380ededde6 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -12,6 +12,7 @@ ALL_PYTHON = [ "3.8", "3.9", "3.10", + "3.11", ] @nox.session(python=ALL_PYTHON) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 7b983cb273c7..584d3c53cf89 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -19,13 +19,14 @@ setuptools.setup( install_requires=( {# TODO(dovs): remove when 1.x deprecation is complete #} {% if 'rest' in opts.transport %} - 'google-api-core[grpc] >= 2.10.0, < 3.0.0dev', + "google-api-core[grpc] >= 2.10.0, < 3.0.0dev", {% else %} - 'google-api-core[grpc] >= 1.28.0, < 3.0.0dev', + "google-api-core[grpc] >= 1.28.0, < 3.0.0dev", {% endif %} - 'googleapis-common-protos >= 1.53.0', - 'grpcio >= 1.10.0', - 'proto-plus >= 1.19.4', + "googleapis-common-protos >= 1.53.0", + "grpcio >= 1.10.0", + "proto-plus >= 1.19.4, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", {% if api.requires_package(('google', 'iam', 'v1')) %} 'grpc-google-iam-v1', {% endif %} @@ -45,6 +46,7 @@ setuptools.setup( 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index b78cd5a4b53e..eec54b8adea7 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -16,6 +16,7 @@ ALL_PYTHON = [ "3.8", "3.9", "3.10", + "3.11", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -25,7 +26,7 @@ PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], e BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.10" +DEFAULT_PYTHON_VERSION = "3.11" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 1c01e8f74897..af3ff4f668ba 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -29,6 +29,7 @@ else: dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} @@ -75,6 +76,7 @@ setuptools.setup( "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.12.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.12.txt.j2 new file mode 100644 index 000000000000..615c99518a63 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.12.txt.j2 @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +{% block constraints %} +{% include "testing/_default_constraints.j2" %} +{% endblock %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 04ca408d8679..afe77cfd9ac0 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -38,6 +38,7 @@ "3.8", "3.9", "3.10", + "3.11", ) NEWEST_PYTHON = ALL_PYTHON[-1] @@ -411,7 +412,7 @@ def snippetgen(session): session.run("py.test", "-vv", "tests/snippetgen") -@nox.session(python="3.8") +@nox.session(python="3.9") def docs(session): """Build the docs.""" diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 89d668d9ec02..fd91391522d7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,7 +8,7 @@ pypandoc==1.10 PyYAML==6.0 setuptools==65.6.3 grpc-google-iam-v1==0.12.4 -proto-plus==1.22.1 +proto-plus==1.22.2 pytest-asyncio==0.20.3 libcst==0.4.9 inflection==0.5.1 \ No newline at end of file diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 67f4bad92779..f321fc181820 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -69,6 +69,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Libraries :: Python Modules", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index b66ae298136c..3da412a665ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -27,6 +27,7 @@ "3.8", "3.9", "3.10", + "3.11", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -36,7 +37,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.10" +DEFAULT_PYTHON_VERSION = "3.11" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 1bff80e4a911..08b8055cfaa7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -38,6 +38,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", 'grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev', ] @@ -78,6 +79,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index ce9e5ee5518d..a70485abca6a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -27,6 +27,7 @@ "3.8", "3.9", "3.10", + "3.11", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -36,7 +37,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.10" +DEFAULT_PYTHON_VERSION = "3.11" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index c28f750303fc..fa13d9d813a4 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -38,6 +38,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-iam-credentials" @@ -77,6 +78,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 10a122e19b17..8e13edbf420d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -27,6 +27,7 @@ "3.8", "3.9", "3.10", + "3.11", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -36,7 +37,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.10" +DEFAULT_PYTHON_VERSION = "3.11" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 3be22cba3ed8..5f0d060219b5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -38,6 +38,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-eventarc" @@ -77,6 +78,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 79baa30d769b..9ce7df50a39c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -27,6 +27,7 @@ "3.8", "3.9", "3.10", + "3.11", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -36,7 +37,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.10" +DEFAULT_PYTHON_VERSION = "3.11" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 370f18029b24..1dadcc698a12 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -38,6 +38,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-logging" @@ -77,6 +78,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 0e2d8b6d6f4e..ab743ed4ff1a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -27,6 +27,7 @@ "3.8", "3.9", "3.10", + "3.11", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -36,7 +37,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.10" +DEFAULT_PYTHON_VERSION = "3.11" nox.sessions = [ "unit", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index b9ba0c50948e..8ca327d9dc95 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -38,6 +38,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-redis" @@ -77,6 +78,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf From 128734b7c8176f0b4bc992cccf4dce14a69a3a0c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 12:57:23 -0500 Subject: [PATCH 0953/1339] chore(main): release 1.8.0 (#1554) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 501acfa66ad8..62adf6bd2a73 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.8.0](https://github.com/googleapis/gapic-generator-python/compare/v1.7.1...v1.8.0) (2023-01-09) + + +### Features + +* Add support for python 3.11 ([#1555](https://github.com/googleapis/gapic-generator-python/issues/1555)) ([ece3686](https://github.com/googleapis/gapic-generator-python/commit/ece3686bb8dfd65686209723d9a35501bbde5773)) + + +### Documentation + +* **client:** Fix typo in get_mtls_endpoint_and_cert_source doc ([#1552](https://github.com/googleapis/gapic-generator-python/issues/1552)) ([57e8abc](https://github.com/googleapis/gapic-generator-python/commit/57e8abc5ec7e2ad430e88e68d98a56a5713acf71)) +* **utils:** Fix typos in nth function docstring ([#1553](https://github.com/googleapis/gapic-generator-python/issues/1553)) ([e4be9ae](https://github.com/googleapis/gapic-generator-python/commit/e4be9aefc3ad08a6708524b1483f3abf6ce68a05)) + ## [1.7.1](https://github.com/googleapis/gapic-generator-python/compare/v1.7.0...v1.7.1) (2022-12-13) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index f321fc181820..6aaf0e478d55 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.7.1" +version = "1.8.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From a62b6764943e4fc01ae1e8c73591b1779549510d Mon Sep 17 00:00:00 2001 From: henribru <6639509+henribru@users.noreply.github.com> Date: Mon, 9 Jan 2023 19:19:56 +0100 Subject: [PATCH 0954/1339] fix: add context manager return types (#1468) Co-authored-by: Anthonios Partheniou --- .../%name/%version/%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../redis/google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index ea09cad6a716..c5233c2703c8 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -175,7 +175,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ return self._transport - def __enter__(self): + def __enter__(self) -> "{{ service.client_name }}": return self def __exit__(self, type, value, traceback): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 2ae661bb49ad..bf6e34da7bc2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -379,7 +379,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} {% endfor %} - def __enter__(self): + def __enter__(self) -> "{{ service.client_name }}": return self def __exit__(self, type, value, traceback): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 8695e53c6728..7aff28b059a2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1833,7 +1833,7 @@ def sample_analyze_iam_policy_longrunning(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "AssetServiceClient": return self def __exit__(self, type, value, traceback): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 91e8d7a9ae99..1d9015f8c4c9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -958,7 +958,7 @@ def sample_sign_jwt(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "IAMCredentialsClient": return self def __exit__(self, type, value, traceback): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 5d344f9721ad..bfb002a3c4a1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -1034,7 +1034,7 @@ def sample_delete_trigger(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "EventarcClient": return self def __exit__(self, type, value, traceback): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index ddb921837f53..603e2caab746 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -2831,7 +2831,7 @@ def sample_update_cmek_settings(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "ConfigServiceV2Client": return self def __exit__(self, type, value, traceback): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 2047d73eb5a7..e55c65a0534a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1113,7 +1113,7 @@ def request_generator(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "LoggingServiceV2Client": return self def __exit__(self, type, value, traceback): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 8986dadb9416..536fa0952248 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -961,7 +961,7 @@ def sample_delete_log_metric(): metadata=metadata, ) - def __enter__(self): + def __enter__(self) -> "MetricsServiceV2Client": return self def __exit__(self, type, value, traceback): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index d2c77834e122..8b2f05fcd84f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1596,7 +1596,7 @@ def sample_delete_instance(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "CloudRedisClient": return self def __exit__(self, type, value, traceback): From 92794c2a72ce6083a0c974477f8b952e6d4682ca Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 16 Jan 2023 20:12:07 +0000 Subject: [PATCH 0955/1339] chore(deps): update all dependencies (#1560) * chore(deps): update all dependencies * revert Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index fd91391522d7..ddc0eedaf439 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,13 +1,13 @@ click==8.1.3 google-api-core==2.11.0 -googleapis-common-protos==1.57.1 +googleapis-common-protos==1.58.0 jinja2==3.1.2 MarkupSafe==2.1.1 protobuf==4.21.12 pypandoc==1.10 PyYAML==6.0 -setuptools==65.6.3 -grpc-google-iam-v1==0.12.4 +setuptools==66.0.0 +grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.20.3 libcst==0.4.9 From 9639f8dd3b914a62aba99ef69f92fb9f3bf02d7c Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Wed, 18 Jan 2023 10:20:21 -0800 Subject: [PATCH 0956/1339] chore: add client_options to service client initialization (#1547) * chore: add client_options --- .../configured_snippet.py | 58 ++++++++-- .../configurable_snippetgen/libcst_utils.py | 17 +++ ...ptation_create_custom_class_basic_async.py | 2 +- .../test_configured_snippet.py | 104 +++++++++++++++--- .../test_libcst_utils.py | 23 ++++ 5 files changed, 178 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index d4664e174084..f2f6affaa822 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -13,6 +13,7 @@ # limitations under the License. import dataclasses +from typing import Optional import inflection import libcst @@ -119,6 +120,31 @@ def filename(self) -> str: sync_or_async = "sync" if self.is_sync else "async" return f"{self.gapic_module_name}_generated_{service_name}_{snake_case_rpc_name}_{config_id}_{sync_or_async}.py" + @property + def api_endpoint(self) -> Optional[str]: + """The api_endpoint in client_options.""" + service_endpoint = ( + self.config.snippet.service_client_initialization.custom_service_endpoint + ) + + if not service_endpoint.host: + return None + + # GAPIC Python libraries do not require the schema to be specified. + host = service_endpoint.host + region = service_endpoint.region + port = service_endpoint.port + + if port: + host_maybe_with_port = f"{host}:{port}" + else: + host_maybe_with_port = host + + if region: + return f"{region}-{host_maybe_with_port}" + else: + return host_maybe_with_port + def _append_to_sample_function_def_body( self, statement: libcst.BaseStatement ) -> None: @@ -141,23 +167,33 @@ def _add_sample_function_parameters(self) -> None: params=parameters ) - def _append_service_client_initialization(self) -> None: - initialization_call = libcst.parse_statement( - f"client = {self.gapic_module_name}.{self.client_class_name}()" - ) - self._append_to_sample_function_def_body(initialization_call) + def _get_service_client_initialization(self) -> libcst.BaseStatement: + if self.api_endpoint is not None: + client_options_arg = libcst.Arg( + keyword=libcst.Name("client_options"), + value=libcst_utils.convert_py_dict( + [("api_endpoint", self.api_endpoint)] + ), + ) + service_client_initialization = libcst.helpers.parse_template_statement( + f"client = {self.gapic_module_name}.{self.client_class_name}({{arg}})", + arg=client_options_arg, + ) + else: + service_client_initialization = libcst.parse_statement( + f"client = {self.gapic_module_name}.{self.client_class_name}()" + ) - def _add_sample_function_body(self) -> None: - # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, add sample function body. - # Each call below appends one or more statements to the sample - # function's body. - self._append_service_client_initialization() + return service_client_initialization def _build_sample_function(self) -> None: # TODO: https://github.com/googleapis/gapic-generator-python/issues/1536, add return type. # TODO: https://github.com/googleapis/gapic-generator-python/issues/1538, add docstring. + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, add sample function body. self._add_sample_function_parameters() - self._add_sample_function_body() + self._append_to_sample_function_def_body( + self._get_service_client_initialization() + ) def _add_sample_function(self) -> None: self._module = self._module.with_changes( diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py b/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py index 871f65088c08..6124b262a70f 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Sequence, Tuple + import libcst from gapic.configurable_snippetgen import snippet_config_language_pb2 @@ -57,3 +59,18 @@ def convert_parameter( default=convert_expression(config_parameter.value), ) return param + + +def convert_py_dict(key_value_pairs: Sequence[Tuple[str, str]]) -> libcst.Dict: + elements = [] + for key, value in key_value_pairs: + if not (isinstance(key, str) and isinstance(value, str)): + raise ValueError( + f"convert_py_dict supports only string keys and values.") + elements.append( + libcst.DictElement( + libcst.SimpleString( + f'"{key}"'), libcst.SimpleString(f'"{value}"') + ) + ) + return libcst.Dict(elements=elements) diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py index 52d4563c8f34..e453701f8cf9 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/resources/speech/speech_v1_generated_adaptation_create_custom_class_basic_async.py @@ -40,7 +40,7 @@ async def sample_create_custom_class_Basic( a CustomClass """ client = speech_v1.AdaptationAsyncClient( - client_options={"api_endpoint": "https://us-speech.googleapis.com"} + client_options={"api_endpoint": "us-speech.googleapis.com"} ) request = speech_v1.CreateCustomClassRequest( diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index e4562db30e6a..69fed656cdef 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -65,11 +65,13 @@ def _load_snippet_config( def _make_configured_snippet( - request_path: Path, config_path: Path, api_version: str, is_sync: bool + request_path: Path = SPEECH_V1_REQUEST_PATH, + config_path: Path = CONFIG_JSON_PATH, + api_version: str = "v1", + is_sync: bool = True, ) -> configured_snippet.ConfiguredSnippet: api_schema = _load_api_schema(request_path) snippet_config = _load_snippet_config(config_path) - return configured_snippet.ConfiguredSnippet( api_schema, snippet_config, api_version, is_sync ) @@ -77,9 +79,16 @@ def _make_configured_snippet( @pytest.fixture def snippet(): - return _make_configured_snippet( - SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=True + return _make_configured_snippet() + + +@pytest.fixture +def snippet_without_endpoint(): + snippet = _make_configured_snippet() + snippet.config.snippet.service_client_initialization.ClearField( + "custom_service_endpoint" ) + return snippet def test_gapic_module_name(snippet): @@ -94,9 +103,7 @@ def test_gapic_module_name(snippet): ], ) def test_region_tag(is_sync, expected): - snippet = _make_configured_snippet( - SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=is_sync - ) + snippet = _make_configured_snippet(is_sync=is_sync) assert snippet.region_tag == expected @@ -112,9 +119,7 @@ def test_sample_function_name(snippet): ], ) def test_client_class_name(is_sync, expected): - snippet = _make_configured_snippet( - SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=is_sync - ) + snippet = _make_configured_snippet(is_sync=is_sync) assert snippet.client_class_name == expected @@ -126,12 +131,72 @@ def test_client_class_name(is_sync, expected): ], ) def test_filename(is_sync, expected): - snippet = _make_configured_snippet( - SPEECH_V1_REQUEST_PATH, CONFIG_JSON_PATH, api_version="v1", is_sync=is_sync - ) + snippet = _make_configured_snippet(is_sync=is_sync) assert snippet.filename == expected +@pytest.mark.parametrize( + "snippet_config_dict,expected", + [ + ({}, None), + ( + { + "snippet": { + "serviceClientInitialization": { + "customServiceEndpoint": { + "host": "speech.googleapis.com", + } + }, + } + }, + "speech.googleapis.com", + ), + ( + { + "snippet": { + "serviceClientInitialization": { + "customServiceEndpoint": { + "host": "speech.googleapis.com", + "region": "us", + } + }, + } + }, + "us-speech.googleapis.com", + ), + ( + { + "snippet": { + "serviceClientInitialization": { + "customServiceEndpoint": { + "host": "speech.googleapis.com", + "region": "us", + "port": 123, + } + }, + } + }, + "us-speech.googleapis.com:123", + ), + ], +) +def test_api_endpoint(snippet_config_dict, expected): + # api_schema, api_version, and is_sync do not matter here. + api_schema = _load_api_schema(SPEECH_V1_REQUEST_PATH) + api_version = "v1" + is_sync = True + + snippet_config = json_format.ParseDict( + snippet_config_dict, snippet_config_language_pb2.SnippetConfig() + ) + + snippet = configured_snippet.ConfiguredSnippet( + api_schema, snippet_config, api_version, is_sync + ) + + assert snippet.api_endpoint == expected + + def test_AppendToSampleFunctionBody(): # Start with a function def with nonempty body to we can be sure the # transformer appends the statement. @@ -152,6 +217,17 @@ def test_code(snippet): # until the generated code is the same as that of the golden file. expected_code = """def sample_create_custom_class_Basic(parent = "projects/[PROJECT]/locations/us", custom_class_id = "passengerships"): \"\" - client = speech_v1.AdaptationClient() + client = speech_v1.AdaptationClient(client_options = {"api_endpoint": "us-speech.googleapis.com"}) """ assert snippet.code == expected_code + + +def test_code_without_endpoint(snippet_without_endpoint): + snippet_without_endpoint.generate() + + # https://github.com/googleapis/gapic-generator-python/issues/1522 + expected_code = """def sample_create_custom_class_Basic(parent = "projects/[PROJECT]/locations/us", custom_class_id = "passengerships"): + \"\" + client = speech_v1.AdaptationClient() +""" + assert snippet_without_endpoint.code == expected_code diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py index 3696c9023823..6d2e62036b6a 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py @@ -75,3 +75,26 @@ def test_convert_parameter(): ) assert node.deep_equals(expected_node), (node, expected_node) + + +def test_convert_py_dict(): + key_value_pairs = [("key1", "value1"), ("key2", "value2")] + node = libcst_utils.convert_py_dict(key_value_pairs) + expected_node = libcst.Dict( + [ + libcst.DictElement( + libcst.SimpleString('"key1"'), libcst.SimpleString('"value1"') + ), + libcst.DictElement( + libcst.SimpleString('"key2"'), libcst.SimpleString('"value2"') + ), + ] + ) + + assert node.deep_equals(expected_node), (node, expected_node) + + +def test_convert_py_dict_should_raise_error_if_unsupported(): + key_value_pairs = [("key1", 5)] + with pytest.raises(ValueError): + libcst_utils.convert_py_dict(key_value_pairs) From deeee04e653720e64b8eb11c6d768cc2395e8b76 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 19 Jan 2023 12:13:49 -0500 Subject: [PATCH 0957/1339] docs: add documentation for enums (#1568) * docs: add documentation for enums * add enum values --- .../%name_%version/%sub/types/_enum.py.j2 | 9 ++- .../cloud/asset_v1/types/asset_service.py | 49 ++++++++++++- .../google/cloud/asset_v1/types/assets.py | 30 +++++++- .../google/cloud/logging_v2/types/logging.py | 15 +++- .../cloud/logging_v2/types/logging_config.py | 25 ++++++- .../cloud/logging_v2/types/logging_metrics.py | 9 ++- .../cloud/redis_v1/types/cloud_redis.py | 71 ++++++++++++++++++- 7 files changed, 197 insertions(+), 11 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 index cf82d19ea163..cac30e87bb26 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 @@ -1,5 +1,12 @@ class {{ enum.name }}({{ p }}.Enum): - r"""{{ enum.meta.doc|rst(indent=4) }}""" + r"""{{ enum.meta.doc|rst(indent=4) }} + + Values: + {% for enum_value in enum.values %} + {{ enum_value.name }} ({{ enum_value.number }}): + {{ enum_value.meta.doc|rst(width=72, indent=12, nl=False) }} + {% endfor %} + """ {% if enum.enum_pb.HasField("options") %} _pb_options = {{ enum.options_dict }} {% endif %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 047032e1a0dc..78ce93397f43 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -64,7 +64,24 @@ class ContentType(proto.Enum): - r"""Asset content type.""" + r"""Asset content type. + + Values: + CONTENT_TYPE_UNSPECIFIED (0): + Unspecified content type. + RESOURCE (1): + Resource metadata. + IAM_POLICY (2): + The actual IAM policy set on a resource. + ORG_POLICY (4): + The Cloud Organization Policy set on an + asset. + ACCESS_POLICY (5): + The Cloud Access context manager Policy set + on an asset. + OS_INVENTORY (6): + The runtime OS Inventory information. + """ CONTENT_TYPE_UNSPECIFIED = 0 RESOURCE = 1 IAM_POLICY = 2 @@ -749,6 +766,25 @@ class PartitionKey(proto.Enum): partition key is a timestamp column, the actual partition is based on its date value (expressed in UTC. see details in https://cloud.google.com/bigquery/docs/partitioned-tables#date_timestamp_partitioned_tables). + + Values: + PARTITION_KEY_UNSPECIFIED (0): + Unspecified partition key. If used, it means + using non-partitioned table. + READ_TIME (1): + The time when the snapshot is taken. If specified as + partition key, the result table(s) is partitoned by the + additional timestamp column, readTime. If [read_time] in + ExportAssetsRequest is specified, the readTime column's + value will be the same as it. Otherwise, its value will be + the current time that is used to take the snapshot. + REQUEST_TIME (2): + The time when the request is received and + started to be processed. If specified as + partition key, the result table(s) is partitoned + by the requestTime column, an additional + timestamp column representing when the request + was received. """ PARTITION_KEY_UNSPECIFIED = 0 READ_TIME = 1 @@ -1706,6 +1742,17 @@ class PartitionKey(proto.Enum): reduce query cost by filtering partitions. Refer to https://cloud.google.com/bigquery/docs/partitioned-tables for details. + + Values: + PARTITION_KEY_UNSPECIFIED (0): + Unspecified partition key. Tables won't be + partitioned using this option. + REQUEST_TIME (1): + The time when the request is received. If + specified as partition key, the result table(s) + is partitoned by the RequestTime column, an + additional timestamp column representing when + the request was received. """ PARTITION_KEY_UNSPECIFIED = 0 REQUEST_TIME = 1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index 55770d261e04..b14b05ed75d7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -65,7 +65,20 @@ class TemporalAsset(proto.Message): Real-Time Feed. """ class PriorAssetState(proto.Enum): - r"""State of prior asset.""" + r"""State of prior asset. + + Values: + PRIOR_ASSET_STATE_UNSPECIFIED (0): + prior_asset is not applicable for the current asset. + PRESENT (1): + prior_asset is populated correctly. + INVALID (2): + Failed to set prior_asset. + DOES_NOT_EXIST (3): + Current asset is the first known state. + DELETED (4): + prior_asset is a deletion. + """ PRIOR_ASSET_STATE_UNSPECIFIED = 0 PRESENT = 1 INVALID = 2 @@ -831,7 +844,20 @@ class ConditionEvaluation(proto.Message): The evaluation result. """ class EvaluationValue(proto.Enum): - r"""Value of this expression.""" + r"""Value of this expression. + + Values: + EVALUATION_VALUE_UNSPECIFIED (0): + Reserved for future use. + TRUE (1): + The evaluation result is ``true``. + FALSE (2): + The evaluation result is ``false``. + CONDITIONAL (3): + The evaluation result is ``conditional`` when the condition + expression contains variables that are either missing input + values or have not been supported by Analyzer yet. + """ EVALUATION_VALUE_UNSPECIFIED = 0 TRUE = 1 FALSE = 2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index 0d1b9779ac1f..b130ad088483 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -558,7 +558,20 @@ class SuppressionInfo(proto.Message): ``reason``. """ class Reason(proto.Enum): - r"""An indicator of why entries were omitted.""" + r"""An indicator of why entries were omitted. + + Values: + REASON_UNSPECIFIED (0): + Unexpected default. + RATE_LIMIT (1): + Indicates suppression occurred due to relevant entries being + received in excess of rate limits. For quotas and limits, + see `Logging API quotas and + limits `__. + NOT_CONSUMED (2): + Indicates suppression occurred due to the + client not consuming responses quickly enough. + """ REASON_UNSPECIFIED = 0 RATE_LIMIT = 1 NOT_CONSUMED = 2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 4eb3b13afdca..fcd8a6e666c0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -63,7 +63,18 @@ class LifecycleState(proto.Enum): - r"""LogBucket lifecycle states.""" + r"""LogBucket lifecycle states. + + Values: + LIFECYCLE_STATE_UNSPECIFIED (0): + Unspecified state. This is only used/useful + for distinguishing unset values. + ACTIVE (1): + The normal and active state. + DELETE_REQUESTED (2): + The bucket has been marked for deletion by + the user. + """ LIFECYCLE_STATE_UNSPECIFIED = 0 ACTIVE = 1 DELETE_REQUESTED = 2 @@ -295,7 +306,17 @@ class LogSink(proto.Message): This field may not be present for older sinks. """ class VersionFormat(proto.Enum): - r"""Deprecated. This is unused.""" + r"""Deprecated. This is unused. + + Values: + VERSION_FORMAT_UNSPECIFIED (0): + An unspecified format version that will + default to V2. + V2 (1): + ``LogEntry`` version 2 format. + V1 (2): + ``LogEntry`` version 1 format. + """ VERSION_FORMAT_UNSPECIFIED = 0 V2 = 1 V1 = 2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 6b34228d58e5..6baeab1db568 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -164,7 +164,14 @@ class LogMetric(proto.Message): default and cannot be changed. """ class ApiVersion(proto.Enum): - r"""Logging API version.""" + r"""Logging API version. + + Values: + V2 (0): + Logging API v2. + V1 (1): + Logging API v1. + """ V2 = 0 V1 = 1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index c05c497a1995..7d8a289aa841 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -167,7 +167,36 @@ class Instance(proto.Message): not provided, the connect mode defaults to DIRECT_PEERING. """ class State(proto.Enum): - r"""Represents the different states of a Redis instance.""" + r"""Represents the different states of a Redis instance. + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + Redis instance is being created. + READY (2): + Redis instance has been created and is fully + usable. + UPDATING (3): + Redis instance configuration is being + updated. Certain kinds of updates may cause the + instance to become unusable while the update is + in progress. + DELETING (4): + Redis instance is being deleted. + REPAIRING (5): + Redis instance is being repaired and may be + unusable. + MAINTENANCE (6): + Maintenance is being performed on this Redis + instance. + IMPORTING (8): + Redis instance is importing data + (availability may be affected). + FAILING_OVER (9): + Redis instance is failing over (availability + may be affected). + """ STATE_UNSPECIFIED = 0 CREATING = 1 READY = 2 @@ -179,13 +208,35 @@ class State(proto.Enum): FAILING_OVER = 9 class Tier(proto.Enum): - r"""Available service tiers to choose from""" + r"""Available service tiers to choose from + + Values: + TIER_UNSPECIFIED (0): + Not set. + BASIC (1): + BASIC tier: standalone instance + STANDARD_HA (3): + STANDARD_HA tier: highly available primary/replica instances + """ TIER_UNSPECIFIED = 0 BASIC = 1 STANDARD_HA = 3 class ConnectMode(proto.Enum): - r"""Available connection modes.""" + r"""Available connection modes. + + Values: + CONNECT_MODE_UNSPECIFIED (0): + Not set. + DIRECT_PEERING (1): + Connect via direct peering to the Memorystore + for Redis hosted service. + PRIVATE_SERVICE_ACCESS (2): + Connect your Memorystore for Redis instance + using Private Service Access. Private services + access provides an IP address range for multiple + Google Cloud services, including Memorystore. + """ CONNECT_MODE_UNSPECIFIED = 0 DIRECT_PEERING = 1 PRIVATE_SERVICE_ACCESS = 2 @@ -623,6 +674,20 @@ class FailoverInstanceRequest(proto.Message): class DataProtectionMode(proto.Enum): r"""Specifies different modes of operation in relation to the data retention. + + Values: + DATA_PROTECTION_MODE_UNSPECIFIED (0): + Defaults to LIMITED_DATA_LOSS if a data protection mode is + not specified. + LIMITED_DATA_LOSS (1): + Instance failover will be protected with data + loss control. More specifically, the failover + will only be performed if the current + replication offset diff between master and + replica is under a certain threshold. + FORCE_DATA_LOSS (2): + Instance failover will be performed without + data loss control. """ DATA_PROTECTION_MODE_UNSPECIFIED = 0 LIMITED_DATA_LOSS = 1 From 6e52bd52e3c8aa44c06f6077e8ac543915363071 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 Jan 2023 17:27:30 +0000 Subject: [PATCH 0958/1339] chore(main): release 1.8.1 (#1557) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 62adf6bd2a73..fbd57a242017 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,18 @@ # Changelog +## [1.8.1](https://github.com/googleapis/gapic-generator-python/compare/v1.8.0...v1.8.1) (2023-01-19) + + +### Bug Fixes + +* Add context manager return types ([#1468](https://github.com/googleapis/gapic-generator-python/issues/1468)) ([e0b38d3](https://github.com/googleapis/gapic-generator-python/commit/e0b38d35b168dabacfa6d841b29149523e9e34ca)) + + +### Documentation + +* Add documentation for enums ([#1568](https://github.com/googleapis/gapic-generator-python/issues/1568)) ([69097ac](https://github.com/googleapis/gapic-generator-python/commit/69097acb5d102eb14579d16235a9b6b901ba14a1)) + ## [1.8.0](https://github.com/googleapis/gapic-generator-python/compare/v1.7.1...v1.8.0) (2023-01-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 6aaf0e478d55..0d252032886d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.8.0" +version = "1.8.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 3db399173dcbf0760f0b3bd99f29d66b06dc07db Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 20 Jan 2023 14:25:02 -0500 Subject: [PATCH 0959/1339] ci: attempt to speed up ci (#1569) * ci: attempt to speed up ci * revert --- .../gapic-generator/.github/workflows/tests.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 9d853ddc1ccf..f5d93d6fe2a8 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -60,7 +60,7 @@ jobs: - name: Install system dependencies. run: | sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc + sudo apt-get install -y curl pandoc unzip - name: Install showcase run: | sudo mkdir -p /usr/src/showcase @@ -105,7 +105,7 @@ jobs: - name: Install system dependencies. run: | sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc + sudo apt-get install -y curl pandoc unzip - name: Install nox. run: python -m pip install nox - name: Install protoc. @@ -144,7 +144,7 @@ jobs: - name: Install system dependencies. run: | sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc + sudo apt-get install -y curl pandoc unzip - name: Install protoc. run: | sudo mkdir -p /usr/src/protoc/ @@ -169,7 +169,7 @@ jobs: - name: Install system dependencies. run: | sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc + sudo apt-get install -y curl pandoc unzip - name: Install protoc. run: | sudo mkdir -p /usr/src/protoc/ @@ -197,7 +197,7 @@ jobs: - name: Install system dependencies. run: | sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc + sudo apt-get install -y curl pandoc unzip - name: Install protoc. run: | sudo mkdir -p /usr/src/protoc/ @@ -222,7 +222,7 @@ jobs: - name: Install system dependencies. run: | sudo apt-get update - sudo apt-get install -y curl pandoc unzip gcc + sudo apt-get install -y curl pandoc unzip - name: Install nox. run: python -m pip install nox - name: Check autogenerated snippets. From 2470c2ff6ef50996e0738bd778ceddc83ee20050 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 20 Jan 2023 14:56:20 -0500 Subject: [PATCH 0960/1339] chore: add default description for enums (#1570) --- .../%namespace/%name_%version/%sub/types/_enum.py.j2 | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 index cac30e87bb26..ad761301463b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_enum.py.j2 @@ -4,8 +4,12 @@ class {{ enum.name }}({{ p }}.Enum): Values: {% for enum_value in enum.values %} {{ enum_value.name }} ({{ enum_value.number }}): + {% if enum_value.meta.doc|length > 0 %} {{ enum_value.meta.doc|rst(width=72, indent=12, nl=False) }} - {% endfor %} + {% else %} + No description available. + {% endif %} + {%- endfor %} """ {% if enum.enum_pb.HasField("options") %} _pb_options = {{ enum.options_dict }} From 6e8ef10c6d285ce78e8ae09c503827a482791570 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Mon, 23 Jan 2023 08:29:58 -0800 Subject: [PATCH 0961/1339] chore: support url schema in api endpoint (#1566) * chore: support HTTP url schema in api endpoint in snippetgen --- .../configured_snippet.py | 15 ++++- .../test_configured_snippet.py | 67 +++++++++++-------- 2 files changed, 52 insertions(+), 30 deletions(-) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index f2f6affaa822..47e9bb6e821e 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -130,7 +130,7 @@ def api_endpoint(self) -> Optional[str]: if not service_endpoint.host: return None - # GAPIC Python libraries do not require the schema to be specified. + schema = service_endpoint.schema host = service_endpoint.host region = service_endpoint.region port = service_endpoint.port @@ -141,9 +141,18 @@ def api_endpoint(self) -> Optional[str]: host_maybe_with_port = host if region: - return f"{region}-{host_maybe_with_port}" + host_maybe_with_port_and_region = f"{region}-{host_maybe_with_port}" else: - return host_maybe_with_port + host_maybe_with_port_and_region = host_maybe_with_port + + if ( + schema + == snippet_config_language_pb2.Snippet.ClientInitialization.ServiceEndpoint.HTTP + ): + return f"http://{host_maybe_with_port_and_region}" + else: + # Either the default or HTTPS, in which case the schema is not needed. + return host_maybe_with_port_and_region def _append_to_sample_function_def_body( self, statement: libcst.BaseStatement diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index 69fed656cdef..213398bb94cd 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -136,56 +136,69 @@ def test_filename(is_sync, expected): @pytest.mark.parametrize( - "snippet_config_dict,expected", + "custom_service_endpoint_dict,expected", [ - ({}, None), ( { - "snippet": { - "serviceClientInitialization": { - "customServiceEndpoint": { - "host": "speech.googleapis.com", - } - }, - } + "region": "us", + "port": 123, + "schema": "HTTP", + }, + None, # host is missing. + ), + ( + { + "host": "speech.googleapis.com", }, "speech.googleapis.com", ), ( { - "snippet": { - "serviceClientInitialization": { - "customServiceEndpoint": { - "host": "speech.googleapis.com", - "region": "us", - } - }, - } + "host": "speech.googleapis.com", + "region": "us", }, "us-speech.googleapis.com", ), ( { - "snippet": { - "serviceClientInitialization": { - "customServiceEndpoint": { - "host": "speech.googleapis.com", - "region": "us", - "port": 123, - } - }, - } + "host": "speech.googleapis.com", + "region": "us", + "port": 123, }, "us-speech.googleapis.com:123", ), + ( + { + "host": "speech.googleapis.com", + "region": "us", + "port": 123, + "schema": "HTTP", + }, + "http://us-speech.googleapis.com:123", + ), + ( + { + "host": "speech.googleapis.com", + "schema": "HTTPS", + }, + "speech.googleapis.com", + ), ], ) -def test_api_endpoint(snippet_config_dict, expected): +def test_api_endpoint(custom_service_endpoint_dict, expected): # api_schema, api_version, and is_sync do not matter here. api_schema = _load_api_schema(SPEECH_V1_REQUEST_PATH) api_version = "v1" is_sync = True + snippet_config_dict = { + "snippet": { + "serviceClientInitialization": { + "customServiceEndpoint": custom_service_endpoint_dict + }, + } + } + snippet_config = json_format.ParseDict( snippet_config_dict, snippet_config_language_pb2.SnippetConfig() ) From 0dabd4e99de9ce3533592bfa858d5df946eaebd9 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 23 Jan 2023 18:46:42 +0000 Subject: [PATCH 0962/1339] chore(deps): update all dependencies (#1571) * chore(deps): update all dependencies * revert Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ddc0eedaf439..72fe299f1c33 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -2,11 +2,11 @@ click==8.1.3 google-api-core==2.11.0 googleapis-common-protos==1.58.0 jinja2==3.1.2 -MarkupSafe==2.1.1 +MarkupSafe==2.1.2 protobuf==4.21.12 pypandoc==1.10 PyYAML==6.0 -setuptools==66.0.0 +setuptools==66.1.1 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.20.3 From 4c097b5bc3952c0b343a8c31df501c8a7f2d7d1c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 23 Jan 2023 15:51:17 -0500 Subject: [PATCH 0963/1339] fix: use gapic_version from versioned module (#1573) * fix: use gapic_version from versioned module * omit gapic_version.py from coverage * update golden files --- .../templates/%namespace/%name_%version/%sub/__init__.py.j2 | 2 +- packages/gapic-generator/gapic/templates/.coveragerc.j2 | 1 + .../gapic-generator/tests/integration/goldens/asset/.coveragerc | 1 + .../integration/goldens/asset/google/cloud/asset_v1/__init__.py | 2 +- .../tests/integration/goldens/credentials/.coveragerc | 1 + .../goldens/credentials/google/iam/credentials_v1/__init__.py | 2 +- .../tests/integration/goldens/eventarc/.coveragerc | 1 + .../goldens/eventarc/google/cloud/eventarc_v1/__init__.py | 2 +- .../tests/integration/goldens/logging/.coveragerc | 1 + .../goldens/logging/google/cloud/logging_v2/__init__.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/.coveragerc | 1 + .../integration/goldens/redis/google/cloud/redis_v1/__init__.py | 2 +- 12 files changed, 12 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index c2aefdd7805f..8ee16704313e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -2,7 +2,7 @@ {% block content %} -{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.module_name %} +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} from {{package_path}} import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/gapic/templates/.coveragerc.j2 b/packages/gapic-generator/gapic/templates/.coveragerc.j2 index cabb94bb9cf1..3a41208b3f9b 100644 --- a/packages/gapic-generator/gapic/templates/.coveragerc.j2 +++ b/packages/gapic-generator/gapic/templates/.coveragerc.j2 @@ -5,6 +5,7 @@ branch = True show_missing = True omit = {{ api.naming.module_namespace|join("/") }}/{{ api.naming.module_name }}/__init__.py + {{ api.naming.module_namespace|join("/") }}/{{ api.naming.module_name }}/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc b/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc index f5ee43d5b135..801f6d8a1da7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/asset/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/asset/__init__.py + google/cloud/asset/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 98cbb2d2eed1..efe650f89632 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.asset import gapic_version as package_version +from google.cloud.asset_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc b/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc index 0f45bde5469b..2071975b8211 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/credentials/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/iam/credentials/__init__.py + google/iam/credentials/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 7ed4efa745e8..79bfe836a877 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.iam.credentials import gapic_version as package_version +from google.iam.credentials_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc b/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc index 04f855c37f0e..a0c8f972f201 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/eventarc/__init__.py + google/cloud/eventarc/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index 407198da720e..ae4e2a1cba77 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.eventarc import gapic_version as package_version +from google.cloud.eventarc_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc b/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc index c0f6e82dff6e..5ae4d709b8ba 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/logging/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/logging/__init__.py + google/cloud/logging/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 3d439a7a298d..5c5e6494844f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.logging import gapic_version as package_version +from google.cloud.logging_v2 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc b/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc index d36404f34db3..5aa9171cc8a2 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc +++ b/packages/gapic-generator/tests/integration/goldens/redis/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/redis/__init__.py + google/cloud/redis/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index de56a8982f36..4cc6efcb549e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.redis import gapic_version as package_version +from google.cloud.redis_v1 import gapic_version as package_version __version__ = package_version.__version__ From bbc028cc53f4bc21d3a62718208b06ed72abced8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Jan 2023 21:19:43 +0000 Subject: [PATCH 0964/1339] chore(main): release 1.8.2 (#1574) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index fbd57a242017..fae91795e756 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.8.2](https://github.com/googleapis/gapic-generator-python/compare/v1.8.1...v1.8.2) (2023-01-23) + + +### Bug Fixes + +* Use gapic_version from versioned module ([#1573](https://github.com/googleapis/gapic-generator-python/issues/1573)) ([dfba51a](https://github.com/googleapis/gapic-generator-python/commit/dfba51a587072a6624496d93e8ff2b5e105284f9)) + ## [1.8.1](https://github.com/googleapis/gapic-generator-python/compare/v1.8.0...v1.8.1) (2023-01-19) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 0d252032886d..87a7db869151 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.8.1" +version = "1.8.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From d5ed1f1668c30e2883914ac12d70b01d12824ce3 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 3 Feb 2023 12:59:52 -0500 Subject: [PATCH 0965/1339] fix: raise not implemented error when REST transport is not supported (#1578) * fix: use not implemented error when REST transport is not supported * lint * revert local changes * update error message and combine not implemented errors * fix typo * fix typo * update ads templates * formatting --- .../services/%service/transports/rest.py.j2 | 17 ++++------------- .../%name_%version/%sub/test_%service.py.j2 | 18 +++++++++++------- .../services/%service/transports/rest.py.j2 | 18 ++++-------------- .../%name_%version/%sub/test_%service.py.j2 | 8 +++++--- .../gapic/%name_%version/%sub/test_macros.j2 | 10 ++++++---- .../logging_service_v2/transports/rest.py | 4 ++-- 6 files changed, 32 insertions(+), 43 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index ec4246a784b4..bf8946c5292a 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -396,21 +396,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): return resp {% endif %}{# method.void #} - {% else %}{# method.http_options and not method.client_streaming #} - {% if not method.http_options %} - raise RuntimeError( - "Cannot define a method without a valid 'google.api.http' annotation.") - - {% elif method.client_streaming %} + {% else %}{# method.http_options and not method.client_streaming #} raise NotImplementedError( - "Client streaming over REST is not yet defined for python client") - - {% else %} - raise NotImplementedError() - - {% endif %}{# method.http_options #} + "Method {{ method.name }} is not available over REST transport" + ) - {%- endif %}{# unary method #} + {% endif %}{# method.http_options and not method.client_streaming #} {% endfor %} {% for method in service.methods.values()|sort(attribute="name") %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3c95d6296627..a025cc586e55 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1558,12 +1558,14 @@ def test_{{ method_name }}_rest_error(): {%- if not method.http_options %} # Since a `google.api.http` annotation is required for using a rest transport # method, this should error. - with pytest.raises(RuntimeError) as runtime_error: + with pytest.raises(NotImplementedError) as not_implemented_error: client.{{ method_name }}({}) - assert ('Cannot define a method without a valid `google.api.http` annotation.' - in str(runtime_error.value)) + assert ( + "Method {{ method.name }} is not available over REST transport" + in str(not_implemented_error.value) + ) - {%- endif %} + {%- endif %}{# not method.http_options #} {% endif %}{# flattened_fields #} {% else %}{# this is an lro or streaming method #} @@ -1608,10 +1610,12 @@ def test_{{ method_name }}_rest_error(): ) # Since a `google.api.http` annotation is required for using a rest transport # method, this should error. - with pytest.raises(RuntimeError) as runtime_error: + with pytest.raises(NotImplementedError) as not_implemented_error: client.{{ method_name }}({}) - assert ("Cannot define a method without a valid 'google.api.http' annotation." - in str(runtime_error.value)) + assert ( + "Method {{ method.name }} is not available over REST transport" + in str(not_implemented_error.value) + ) {% endwith %}{# method_name #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index c2cbbe889fa2..8d3661c0fcd1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -426,21 +426,11 @@ class {{service.name}}RestTransport({{service.name}}Transport): return resp {% endif %}{# method.void #} - {% else %}{# method.http_options and not method.client_streaming #} - {% if not method.http_options %} - raise RuntimeError( - "Cannot define a method without a valid 'google.api.http' annotation.") - - {% elif method.client_streaming %} + {% else %}{# method.http_options and not method.client_streaming #} raise NotImplementedError( - "Client streaming over REST is not yet defined for python client") - - {% else %} - raise NotImplementedError() - - {% endif %}{# method.http_options #} - - {%- endif %}{# unary method #} + "Method {{ method.name }} is not available over REST transport" + ) + {% endif %}{# method.http_options and not method.client_streaming #} {% endfor %} {% for method in service.methods.values()|sort(attribute="name") %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 0b2c3b373694..68508abc9205 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -642,10 +642,12 @@ def test_{{ method_name }}_rest_error(): ) # Since a `google.api.http` annotation is required for using a rest transport # method, this should error. - with pytest.raises(RuntimeError) as runtime_error: + with pytest.raises(NotImplementedError) as not_implemented_error: client.{{ method_name }}({}) - assert ("Cannot define a method without a valid 'google.api.http' annotation." - in str(runtime_error.value)) + assert ( + "Method {{ method.name }} is not available over REST transport" + in str(not_implemented_error.value) + ) {% endwith %}{# method_name #} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 6a898e47e825..6d6245d63499 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1446,12 +1446,14 @@ def test_{{ method_name }}_rest_error(): {%- if not method.http_options %} # Since a `google.api.http` annotation is required for using a rest transport # method, this should error. - with pytest.raises(RuntimeError) as runtime_error: + with pytest.raises(NotImplementedError) as not_implemented_error: client.{{ method_name }}({}) - assert ('Cannot define a method without a valid `google.api.http` annotation.' - in str(runtime_error.value)) + assert ( + "Method {{ method.name }} is not available over REST transport" + in str(not_implemented_error.value) + ) - {%- endif %} + {%- endif %}{# not method.http_options #} {% endif %}{# flattened_fields #} {% else %}{# this is an lro or streaming method #} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py index 38ad43e2b104..6dca3bcbb91e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py @@ -622,8 +622,8 @@ def __call__(self, metadata: Sequence[Tuple[str, str]]=(), ) -> rest_streaming.ResponseIterator: raise NotImplementedError( - "Client streaming over REST is not yet defined for python client") - + "Method TailLogEntries is not available over REST transport" + ) class _WriteLogEntries(LoggingServiceV2RestStub): def __hash__(self): return hash("WriteLogEntries") From 01984437dbfaa48be7c5810f57fab099f5dd7614 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Fri, 3 Feb 2023 11:50:13 -0800 Subject: [PATCH 0966/1339] chore: prepare to implement snippetgen for standard call (#1572) * chore: prepare to implement request initialization Co-authored-by: Anthonios Partheniou --- .../configured_snippet.py | 62 ++++++++++++++----- .../test_configured_snippet.py | 41 +++++++++++- 2 files changed, 85 insertions(+), 18 deletions(-) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index 47e9bb6e821e..6f13043b8fe6 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -13,7 +13,7 @@ # limitations under the License. import dataclasses -from typing import Optional +from typing import List, Optional import inflection import libcst @@ -154,19 +154,29 @@ def api_endpoint(self) -> Optional[str]: # Either the default or HTTPS, in which case the schema is not needed. return host_maybe_with_port_and_region - def _append_to_sample_function_def_body( - self, statement: libcst.BaseStatement + def _extend_sample_function_def_body( + self, statements: List[libcst.BaseStatement] ) -> None: - """Appends the statement node to the current sample function def.""" - transformer = _AppendToSampleFunctionBody(statement) + """Appends the statements to the current sample function def.""" + for statement in statements: + transformer = _AppendToSampleFunctionBody(statement) - # The result of applying a transformer could be of a different type - # in general, but we will only update the sample function def here. - self._sample_function_def = self._sample_function_def.visit( - transformer - ) # type: ignore + # The result of applying a transformer could be of a different type + # in general, but we will only update the sample function def here. + self._sample_function_def = self._sample_function_def.visit( + transformer + ) # type: ignore def _add_sample_function_parameters(self) -> None: + """Adds sample function parameters. + + Before: + def sample_create_custom_class_Basic(): + ... + After: + def sample_create_custom_class_Basic(parent = "projects/..."): + ... + """ # TODO: https://github.com/googleapis/gapic-generator-python/issues/1537, add typing annotation in sample function parameters. params = [] for config_parameter in self.config.signature.parameters: @@ -176,7 +186,14 @@ def _add_sample_function_parameters(self) -> None: params=parameters ) - def _get_service_client_initialization(self) -> libcst.BaseStatement: + def _get_service_client_initialization(self) -> List[libcst.BaseStatement]: + """Returns the service client initialization statements. + + Examples: + client = speech_v1.AdaptationClient() + + client = speech_v1.AdaptationClient(client_options = {"api_endpoint": "us-speech.googleapis.com"}) + """ if self.api_endpoint is not None: client_options_arg = libcst.Arg( keyword=libcst.Name("client_options"), @@ -193,16 +210,29 @@ def _get_service_client_initialization(self) -> libcst.BaseStatement: f"client = {self.gapic_module_name}.{self.client_class_name}()" ) - return service_client_initialization + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, support pre_client_initialization statements. + return [service_client_initialization] + + def _get_standard_call(self) -> List[libcst.BaseStatement]: + """Returns the standard call statements.""" + # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, support standard call statements. + return [] + + def _get_call(self) -> List[libcst.BaseStatement]: + """Returns the snippet call statements.""" + call_type = self.config.snippet.WhichOneof("call") + if call_type == "standard": + return self._get_standard_call() + else: + raise ValueError(f"Snippet call type {call_type} not supported.") def _build_sample_function(self) -> None: # TODO: https://github.com/googleapis/gapic-generator-python/issues/1536, add return type. # TODO: https://github.com/googleapis/gapic-generator-python/issues/1538, add docstring. - # TODO: https://github.com/googleapis/gapic-generator-python/issues/1539, add sample function body. self._add_sample_function_parameters() - self._append_to_sample_function_def_body( - self._get_service_client_initialization() - ) + self._extend_sample_function_def_body( + self._get_service_client_initialization()) + self._extend_sample_function_def_body(self._get_call()) def _add_sample_function(self) -> None: self._module = self._module.with_changes( diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index 213398bb94cd..6ba39640462b 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -91,6 +91,16 @@ def snippet_without_endpoint(): return snippet +@pytest.fixture +def snippet_bidi_streaming(): + snippet = _make_configured_snippet() + snippet.config.snippet.ClearField("standard") + snippet.config.snippet.bidi_streaming.CopyFrom( + snippet_config_language_pb2.Snippet.BidiStreaming() + ) + return snippet + + def test_gapic_module_name(snippet): assert snippet.gapic_module_name == "speech_v1" @@ -202,14 +212,36 @@ def test_api_endpoint(custom_service_endpoint_dict, expected): snippet_config = json_format.ParseDict( snippet_config_dict, snippet_config_language_pb2.SnippetConfig() ) - snippet = configured_snippet.ConfiguredSnippet( api_schema, snippet_config, api_version, is_sync ) - assert snippet.api_endpoint == expected +@pytest.mark.parametrize( + "is_sync,expected", + [ + (True, "speech_v1_generated_Adaptation_create_custom_class_Basic_sync.py"), + (False, "speech_v1_generated_Adaptation_create_custom_class_Basic_async.py"), + ], +) +def test_filename(is_sync, expected): + snippet = _make_configured_snippet(is_sync=is_sync) + assert snippet.filename == expected + + +def test_AppendToSampleFunctionBody(): + # Start with a function def with nonempty body to we can be sure the + # transformer appends the statement. + function_def = libcst.parse_statement("def f():\n 'hello'") + statement = libcst.parse_statement("'world'") + transformer = configured_snippet._AppendToSampleFunctionBody(statement) + updated_function_def = function_def.visit(transformer) + expected_function_def = libcst.parse_statement( + "def f():\n 'hello'\n 'world'") + assert updated_function_def.deep_equals(expected_function_def) + + def test_AppendToSampleFunctionBody(): # Start with a function def with nonempty body to we can be sure the # transformer appends the statement. @@ -244,3 +276,8 @@ def test_code_without_endpoint(snippet_without_endpoint): client = speech_v1.AdaptationClient() """ assert snippet_without_endpoint.code == expected_code + + +def test_generate_should_raise_error_if_unsupported(snippet_bidi_streaming): + with pytest.raises(ValueError): + snippet_bidi_streaming.generate() From aaa4b71ddfc5daffd2dc6f4c59b192d9ca1386e7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 6 Feb 2023 10:57:13 +0000 Subject: [PATCH 0967/1339] chore(deps): update all dependencies (#1580) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 72fe299f1c33..42141f9a588d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,7 +6,7 @@ MarkupSafe==2.1.2 protobuf==4.21.12 pypandoc==1.10 PyYAML==6.0 -setuptools==66.1.1 +setuptools==67.1.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.20.3 From 4ea91a8f4c30c44cd1064d4580ef51170c55d786 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 6 Feb 2023 13:47:52 -0500 Subject: [PATCH 0968/1339] chore(main): release 1.8.3 (#1579) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index fae91795e756..1083f6527ffc 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.8.3](https://github.com/googleapis/gapic-generator-python/compare/v1.8.2...v1.8.3) (2023-02-06) + + +### Bug Fixes + +* Raise not implemented error when REST transport is not supported ([#1578](https://github.com/googleapis/gapic-generator-python/issues/1578)) ([af6e77c](https://github.com/googleapis/gapic-generator-python/commit/af6e77c3dacaef039b1eb07ea3c9b348074d5448)) + ## [1.8.2](https://github.com/googleapis/gapic-generator-python/compare/v1.8.1...v1.8.2) (2023-01-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 87a7db869151..abbc07f10300 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.8.2" +version = "1.8.3" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 954a3240d806e78ce205e6178ead34f2a36ee33c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 6 Feb 2023 19:13:55 -0500 Subject: [PATCH 0969/1339] fix: use protobuf 3.21.12 in bazel build rules (#1584) --- packages/gapic-generator/BUILD.bazel | 8 ++++---- packages/gapic-generator/repositories.bzl | 6 +++--- .../cloud/redis_v1/services/cloud_redis/async_client.py | 3 --- .../google/cloud/redis_v1/services/cloud_redis/client.py | 3 --- 4 files changed, 7 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 2a0a225e3316..8d98b4264d93 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -9,16 +9,16 @@ toolchain_type( pandoc_toolchain( exec_compatible_with = [ - "@bazel_tools//platforms:linux", - "@bazel_tools//platforms:x86_64", + "@platforms//os:linux", + "@platforms//cpu:x86_64", ], platform = "linux", ) pandoc_toolchain( exec_compatible_with = [ - "@bazel_tools//platforms:osx", - "@bazel_tools//platforms:x86_64", + "@platforms//os:osx", + "@platforms//cpu:x86_64", ], platform = "macOS", ) diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index c374fc751888..772ad5400ee9 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -15,14 +15,14 @@ def gapic_generator_python(): requirements = "@gapic_generator_python//:requirements.txt", ) - _protobuf_version = "3.19.2" - _protobuf_sha256 = "9ceef0daf7e8be16cd99ac759271eb08021b53b1c7b6edd399953a76390234cd" + _protobuf_version = "3.21.12" + _protobuf_sha256 = "930c2c3b5ecc6c9c12615cf5ad93f1cd6e12d0aba862b572e076259970ac3a53" _protobuf_version_in_link = "v{}".format(_protobuf_version) _maybe( http_archive, name = "com_google_protobuf", sha256 = _protobuf_sha256, - url = "https://github.com/protocolbuffers/protobuf/archive/refs/tags/{}.zip".format(_protobuf_version_in_link), + url = "https://github.com/protocolbuffers/protobuf/archive/refs/tags/{}.tar.gz".format(_protobuf_version_in_link), strip_prefix = "protobuf-{}".format(_protobuf_version), ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index f49d7d0f9402..eb6b14d8b3e1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1348,9 +1348,6 @@ async def sample_delete_instance(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 8b2f05fcd84f..66bca156fd72 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1542,9 +1542,6 @@ def sample_delete_instance(): } - The JSON representation for Empty is empty JSON - object {}. - """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have From 7ae3f64fc9d5a60b51a628eefd88fa95efd43d64 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 7 Feb 2023 06:25:53 -0500 Subject: [PATCH 0970/1339] fix: fix install issue for packages without google namespace (#1576) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 6 ++---- .../tests/integration/goldens/asset/setup.py | 4 +--- .../tests/integration/goldens/credentials/setup.py | 4 +--- .../tests/integration/goldens/eventarc/setup.py | 4 +--- .../tests/integration/goldens/logging/setup.py | 4 +--- .../tests/integration/goldens/redis/setup.py | 4 +--- 6 files changed, 7 insertions(+), 19 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index af3ff4f668ba..4e39c141b473 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -50,12 +50,10 @@ with io.open(readme_filename, encoding="utf-8") as readme_file: packages = [ package for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") + if package.startswith("{{ api.naming.namespace_packages|first }}") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = [{% for namespace_packages in api.naming.namespace_packages %}"{{ namespace_packages }}"{% if not loop.last %}, {% endif %}{% endfor %}] setuptools.setup( name=name, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 08b8055cfaa7..fb0f7e544c1a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -56,9 +56,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index fa13d9d813a4..6f128c3927cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -55,9 +55,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.iam"] setuptools.setup( name=name, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 5f0d060219b5..718ff3fd68cd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -55,9 +55,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 1dadcc698a12..034ef836e59f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -55,9 +55,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 8ca327d9dc95..08dd761fb213 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -55,9 +55,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, From b815fbe352b0ea4239f967bcd58ace83b5249564 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 7 Feb 2023 07:18:14 -0500 Subject: [PATCH 0971/1339] fix: fix generated unit tests with repeated double (#1582) * chore: fix generated unit tests that fail with repeated double * apply fix in ads templates --- .../%name_%version/%sub/test_%service.py.j2 | 18 +++++++++ .../gapic/%name_%version/%sub/test_macros.j2 | 27 +++++++++++++ .../fragments/test_repeated_double.proto | 38 +++++++++++++++++++ 3 files changed, 83 insertions(+) create mode 100644 packages/gapic-generator/tests/fragments/test_repeated_double.proto diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index a025cc586e55..254fa5e801b6 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -587,7 +587,16 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% for field in method.output.fields.values() | rejectattr('message') %} {% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + {% if field.repeated %} + for index in range(len(response.{{ field.name }})): + assert math.isclose( + response.{{ field.name }}[index], + {{ field.mock_value }}[index], + rel_tol=1e-6, + ) + {% else %}{# field.repeated #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% endif %}{# field.repeated #} {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} {% else %} @@ -1076,7 +1085,16 @@ def test_{{ method.name|snake_case }}_rest(request_type): {% for field in method.output.fields.values() | rejectattr('message') %} {% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + {% if field.repeated %} + for index in range(len(response.{{ field.name }})): + assert math.isclose( + response.{{ field.name }}[index], + {{ field.mock_value }}[index], + rel_tol=1e-6, + ) + {% else %}{# field.repeated #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% endif %} {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} {% else %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 6d6245d63499..eeda2a3863c0 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -76,7 +76,16 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% for field in method_output.fields.values() | rejectattr('message') %} {% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + {% if field.repeated %} + for index in range(len(response.{{ field.name }})): + assert math.isclose( + response.{{ field.name }}[index], + {{ field.mock_value }}[index], + rel_tol=1e-6, + ) + {% else %}{# field.repeated #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% endif %}{# field.repeated #} {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} {% else %} @@ -187,7 +196,16 @@ async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_ {% for field in method_output.fields.values() | rejectattr('message') %} {% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + {% if field.repeated %} + for index in range(len(response.{{ field.name }})): + assert math.isclose( + response.{{ field.name }}[index], + {{ field.mock_value }}[index], + rel_tol=1e-6, + ) + {% else %}{# field.repeated #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% endif %}{# field.repeated #} {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} {% else %} @@ -958,7 +976,16 @@ def test_{{ method_name }}_rest(request_type): {% for field in method_output.fields.values() | rejectattr('message') %} {% if not field.oneof or field.proto3_optional %} {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + {% if field.repeated %} + for index in range(len(response.{{ field.name }})): + assert math.isclose( + response.{{ field.name }}[index], + {{ field.mock_value }}[index], + rel_tol=1e-6, + ) + {% else %}{# field.repeated #} assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% endif %}{# field.repeated #} {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} assert response.{{ field.name }} is {{ field.mock_value }} {% else %} diff --git a/packages/gapic-generator/tests/fragments/test_repeated_double.proto b/packages/gapic-generator/tests/fragments/test_repeated_double.proto new file mode 100644 index 000000000000..4deed3271958 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_repeated_double.proto @@ -0,0 +1,38 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/protobuf/struct.proto"; +import "google/api/client.proto"; + +service MyServiceRepeatedDouble { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequestWithRepeatedDouble) returns (MethodResponseWithRepeatedDouble) { + option (google.api.method_signature) = "parameter,items"; + } +} + +message MethodRequestWithRepeatedDouble { + google.protobuf.Value parameter = 1; + repeated google.protobuf.Value items = 2; + repeated double repeated_items = 3; +} + +message MethodResponseWithRepeatedDouble { + repeated double result = 1; +} From d3aa6ead5c7301391b7fe454753243af8f9395b9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Feb 2023 10:26:57 -0500 Subject: [PATCH 0972/1339] chore(main): release 1.8.4 (#1591) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1083f6527ffc..3562eee3e67b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [1.8.4](https://github.com/googleapis/gapic-generator-python/compare/v1.8.3...v1.8.4) (2023-02-07) + + +### Bug Fixes + +* Fix generated unit tests with repeated double ([#1582](https://github.com/googleapis/gapic-generator-python/issues/1582)) ([4c7dd53](https://github.com/googleapis/gapic-generator-python/commit/4c7dd53c9507f37b6c885c2b65b4f2cb98aad5fe)) +* Fix install issue for packages without google namespace ([#1576](https://github.com/googleapis/gapic-generator-python/issues/1576)) ([862cc64](https://github.com/googleapis/gapic-generator-python/commit/862cc646c4e6d50654201fe2d7c48d5ca5bf427b)) +* Use protobuf 3.21.12 in bazel build rules ([#1584](https://github.com/googleapis/gapic-generator-python/issues/1584)) ([550a5f1](https://github.com/googleapis/gapic-generator-python/commit/550a5f1b1841dd90c143466a42058b6f3c583dd2)) + ## [1.8.3](https://github.com/googleapis/gapic-generator-python/compare/v1.8.2...v1.8.3) (2023-02-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index abbc07f10300..633f6ba2bab4 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.8.3" +version = "1.8.4" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 451940cb0dbb6c059370cc46c5b659814ee78553 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 10:16:56 -0500 Subject: [PATCH 0973/1339] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#1594) Source-Link: https://github.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 49 +++++++++---------- 2 files changed, 24 insertions(+), 29 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index fccaa8e84449..894fb6bc9b47 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 05dc4672edaa..096e4800a9ac 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From ca2f20afcc636eee6a851e5f80578c1e1d32034b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 16 Feb 2023 11:57:24 +0000 Subject: [PATCH 0974/1339] chore(deps): update all dependencies (#1595) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 42141f9a588d..025828805574 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,7 +6,7 @@ MarkupSafe==2.1.2 protobuf==4.21.12 pypandoc==1.10 PyYAML==6.0 -setuptools==67.1.0 +setuptools==67.3.2 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.20.3 From f1ba1717fb095cafbd7af9535f4e7553a0415f0c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 17 Feb 2023 09:17:53 -0500 Subject: [PATCH 0975/1339] fix: fix mypy errors in rest.py (#1599) * fix: run mypy on golden files * fix mypy error * update golden files * use type Any * update goldens * fix test * fix mypy error in types * remove typing Any * Revert "remove typing Any" This reverts commit 2f074630b7d892e103c6598bf0bb7b68c86ced53. --- .../.github/workflows/tests.yaml | 10 ++-- .../services/%service/transports/rest.py.j2 | 4 +- .../%name/%version/%sub/types/%proto.py.j2 | 2 +- .../services/%service/transports/rest.py.j2 | 4 +- .../%name_%version/%sub/types/%proto.py.j2 | 2 +- .../services/asset_service/transports/rest.py | 26 +++++----- .../iam_credentials/transports/rest.py | 10 ++-- .../credentials_v1/types/iamcredentials.py | 1 + .../services/eventarc/transports/rest.py | 12 ++--- .../config_service_v2/transports/rest.py | 48 +++++++++---------- .../logging_service_v2/transports/rest.py | 10 ++-- .../metrics_service_v2/transports/rest.py | 12 ++--- .../services/cloud_redis/transports/rest.py | 20 ++++---- 13 files changed, 81 insertions(+), 80 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index f5d93d6fe2a8..775861986c1b 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -313,11 +313,11 @@ jobs: python -m pip install nox - name: Run blacken and lint on the generated output. run: | - nox -f tests/integration/goldens/asset/noxfile.py -s blacken lint_setup_py lint - nox -f tests/integration/goldens/credentials/noxfile.py -s blacken lint_setup_py lint - nox -f tests/integration/goldens/eventarc/noxfile.py -s blacken lint_setup_py lint - nox -f tests/integration/goldens/logging/noxfile.py -s blacken lint_setup_py lint - nox -f tests/integration/goldens/redis/noxfile.py -s blacken lint_setup_py lint + nox -f tests/integration/goldens/asset/noxfile.py -s mypy-3.11 blacken lint_setup_py lint + nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-3.11 blacken lint_setup_py lint + nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.11 blacken lint_setup_py lint + nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.11 blacken lint_setup_py lint + nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.11 blacken lint_setup_py lint style-check: runs-on: ubuntu-latest steps: diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index bf8946c5292a..5c2d102f48b7 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -22,7 +22,7 @@ from google.api_core import operations_v1 from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -269,7 +269,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not method.client_streaming %} {% if method.input.required_fields %} - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { {% for req_field in method.input.required_fields if req_field.name in method.query_params %} "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 index 81a0755195ba..8418d42959bf 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 @@ -6,8 +6,8 @@ {% if proto.messages|length or proto.all_enums|length %} from typing import MutableMapping, MutableSequence -import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endif %} +import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% filter sort_lines %} {% for import_ in proto.python_modules %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 8d3661c0fcd1..7d8e72846e19 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -32,7 +32,7 @@ from google.longrunning import operations_pb2 from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -299,7 +299,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% if not method.client_streaming %} {% if method.input.required_fields %} - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { {% for req_field in method.input.required_fields if req_field.name in method.query_params %} "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 index f5aebf7364fb..1d94e5a135ba 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 @@ -6,8 +6,8 @@ {% if proto.messages|length or proto.all_enums|length %} from typing import MutableMapping, MutableSequence -import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% endif %} +import proto{% if p != 'proto' %} as {{ p }}{% endif %} # type: ignore {% filter sort_lines %} {% for import_ in proto.python_modules %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 529d6614a351..ef684a1424b3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -31,7 +31,7 @@ from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -481,7 +481,7 @@ class _AnalyzeIamPolicy(AssetServiceRestStub): def __hash__(self): return hash("AnalyzeIamPolicy") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "analysisQuery" : {}, } @classmethod @@ -561,7 +561,7 @@ class _AnalyzeIamPolicyLongrunning(AssetServiceRestStub): def __hash__(self): return hash("AnalyzeIamPolicyLongrunning") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -650,7 +650,7 @@ class _BatchGetAssetsHistory(AssetServiceRestStub): def __hash__(self): return hash("BatchGetAssetsHistory") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -726,7 +726,7 @@ class _CreateFeed(AssetServiceRestStub): def __hash__(self): return hash("CreateFeed") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -818,7 +818,7 @@ class _DeleteFeed(AssetServiceRestStub): def __hash__(self): return hash("DeleteFeed") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -882,7 +882,7 @@ class _ExportAssets(AssetServiceRestStub): def __hash__(self): return hash("ExportAssets") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -968,7 +968,7 @@ class _GetFeed(AssetServiceRestStub): def __hash__(self): return hash("GetFeed") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1051,7 +1051,7 @@ class _ListAssets(AssetServiceRestStub): def __hash__(self): return hash("ListAssets") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1127,7 +1127,7 @@ class _ListFeeds(AssetServiceRestStub): def __hash__(self): return hash("ListFeeds") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1203,7 +1203,7 @@ class _SearchAllIamPolicies(AssetServiceRestStub): def __hash__(self): return hash("SearchAllIamPolicies") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1279,7 +1279,7 @@ class _SearchAllResources(AssetServiceRestStub): def __hash__(self): return hash("SearchAllResources") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1355,7 +1355,7 @@ class _UpdateFeed(AssetServiceRestStub): def __hash__(self): return hash("UpdateFeed") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 6bb11e7d4845..32eff78924f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -30,7 +30,7 @@ from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -281,7 +281,7 @@ class _GenerateAccessToken(IAMCredentialsRestStub): def __hash__(self): return hash("GenerateAccessToken") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -366,7 +366,7 @@ class _GenerateIdToken(IAMCredentialsRestStub): def __hash__(self): return hash("GenerateIdToken") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -451,7 +451,7 @@ class _SignBlob(IAMCredentialsRestStub): def __hash__(self): return hash("SignBlob") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -536,7 +536,7 @@ class _SignJwt(IAMCredentialsRestStub): def __hash__(self): return hash("SignJwt") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py index 3cdecabdc9d8..ade7a6e16e9d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import proto # type: ignore __protobuf__ = proto.module( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index d107065ba70b..956df809561f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -31,7 +31,7 @@ from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -327,7 +327,7 @@ class _CreateTrigger(EventarcRestStub): def __hash__(self): return hash("CreateTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "triggerId" : "", "validateOnly" : False, } @classmethod @@ -415,7 +415,7 @@ class _DeleteTrigger(EventarcRestStub): def __hash__(self): return hash("DeleteTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "validateOnly" : False, } @classmethod @@ -494,7 +494,7 @@ class _GetTrigger(EventarcRestStub): def __hash__(self): return hash("GetTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -574,7 +574,7 @@ class _ListTriggers(EventarcRestStub): def __hash__(self): return hash("ListTriggers") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -654,7 +654,7 @@ class _UpdateTrigger(EventarcRestStub): def __hash__(self): return hash("UpdateTrigger") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "validateOnly" : False, } @classmethod diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py index d414a966e501..9fb4a17a303a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py @@ -30,7 +30,7 @@ from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -669,7 +669,7 @@ class _CreateBucket(ConfigServiceV2RestStub): def __hash__(self): return hash("CreateBucket") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "bucketId" : "", } @classmethod @@ -774,7 +774,7 @@ class _CreateExclusion(ConfigServiceV2RestStub): def __hash__(self): return hash("CreateExclusion") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -890,7 +890,7 @@ class _CreateSink(ConfigServiceV2RestStub): def __hash__(self): return hash("CreateSink") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1004,7 +1004,7 @@ class _CreateView(ConfigServiceV2RestStub): def __hash__(self): return hash("CreateView") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "viewId" : "", } @classmethod @@ -1111,7 +1111,7 @@ class _DeleteBucket(ConfigServiceV2RestStub): def __hash__(self): return hash("DeleteBucket") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1191,7 +1191,7 @@ class _DeleteExclusion(ConfigServiceV2RestStub): def __hash__(self): return hash("DeleteExclusion") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1271,7 +1271,7 @@ class _DeleteSink(ConfigServiceV2RestStub): def __hash__(self): return hash("DeleteSink") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1351,7 +1351,7 @@ class _DeleteView(ConfigServiceV2RestStub): def __hash__(self): return hash("DeleteView") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1431,7 +1431,7 @@ class _GetBucket(ConfigServiceV2RestStub): def __hash__(self): return hash("GetBucket") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1523,7 +1523,7 @@ class _GetCmekSettings(ConfigServiceV2RestStub): def __hash__(self): return hash("GetCmekSettings") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1621,7 +1621,7 @@ class _GetExclusion(ConfigServiceV2RestStub): def __hash__(self): return hash("GetExclusion") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1724,7 +1724,7 @@ class _GetSink(ConfigServiceV2RestStub): def __hash__(self): return hash("GetSink") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1825,7 +1825,7 @@ class _GetView(ConfigServiceV2RestStub): def __hash__(self): return hash("GetView") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1919,7 +1919,7 @@ class _ListBuckets(ConfigServiceV2RestStub): def __hash__(self): return hash("ListBuckets") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -2011,7 +2011,7 @@ class _ListExclusions(ConfigServiceV2RestStub): def __hash__(self): return hash("ListExclusions") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -2103,7 +2103,7 @@ class _ListSinks(ConfigServiceV2RestStub): def __hash__(self): return hash("ListSinks") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -2195,7 +2195,7 @@ class _ListViews(ConfigServiceV2RestStub): def __hash__(self): return hash("ListViews") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -2287,7 +2287,7 @@ class _UndeleteBucket(ConfigServiceV2RestStub): def __hash__(self): return hash("UndeleteBucket") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -2380,7 +2380,7 @@ class _UpdateBucket(ConfigServiceV2RestStub): def __hash__(self): return hash("UpdateBucket") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask" : {}, } @classmethod @@ -2485,7 +2485,7 @@ class _UpdateCmekSettings(ConfigServiceV2RestStub): def __hash__(self): return hash("UpdateCmekSettings") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -2593,7 +2593,7 @@ class _UpdateExclusion(ConfigServiceV2RestStub): def __hash__(self): return hash("UpdateExclusion") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask" : {}, } @classmethod @@ -2709,7 +2709,7 @@ class _UpdateSink(ConfigServiceV2RestStub): def __hash__(self): return hash("UpdateSink") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -2843,7 +2843,7 @@ class _UpdateView(ConfigServiceV2RestStub): def __hash__(self): return hash("UpdateView") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py index 6dca3bcbb91e..309e5179fc04 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py @@ -30,7 +30,7 @@ from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -285,7 +285,7 @@ class _DeleteLog(LoggingServiceV2RestStub): def __hash__(self): return hash("DeleteLog") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -365,7 +365,7 @@ class _ListLogEntries(LoggingServiceV2RestStub): def __hash__(self): return hash("ListLogEntries") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -450,7 +450,7 @@ class _ListLogs(LoggingServiceV2RestStub): def __hash__(self): return hash("ListLogs") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -628,7 +628,7 @@ class _WriteLogEntries(LoggingServiceV2RestStub): def __hash__(self): return hash("WriteLogEntries") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py index a7d2de532b86..059ddf19d45d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py @@ -30,7 +30,7 @@ from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -285,7 +285,7 @@ class _CreateLogMetric(MetricsServiceV2RestStub): def __hash__(self): return hash("CreateLogMetric") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -381,7 +381,7 @@ class _DeleteLogMetric(MetricsServiceV2RestStub): def __hash__(self): return hash("DeleteLogMetric") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -445,7 +445,7 @@ class _GetLogMetric(MetricsServiceV2RestStub): def __hash__(self): return hash("GetLogMetric") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -532,7 +532,7 @@ class _ListLogMetrics(MetricsServiceV2RestStub): def __hash__(self): return hash("ListLogMetrics") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -608,7 +608,7 @@ class _UpdateLogMetric(MetricsServiceV2RestStub): def __hash__(self): return hash("UpdateLogMetric") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index c9bda09ab8ba..e27a5b53743d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -31,7 +31,7 @@ from requests import __version__ as requests_version import dataclasses import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: @@ -440,7 +440,7 @@ class _CreateInstance(CloudRedisRestStub): def __hash__(self): return hash("CreateInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "instanceId" : "", } @classmethod @@ -528,7 +528,7 @@ class _DeleteInstance(CloudRedisRestStub): def __hash__(self): return hash("DeleteInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -607,7 +607,7 @@ class _ExportInstance(CloudRedisRestStub): def __hash__(self): return hash("ExportInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -695,7 +695,7 @@ class _FailoverInstance(CloudRedisRestStub): def __hash__(self): return hash("FailoverInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -783,7 +783,7 @@ class _GetInstance(CloudRedisRestStub): def __hash__(self): return hash("GetInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -861,7 +861,7 @@ class _ImportInstance(CloudRedisRestStub): def __hash__(self): return hash("ImportInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -949,7 +949,7 @@ class _ListInstances(CloudRedisRestStub): def __hash__(self): return hash("ListInstances") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod @@ -1029,7 +1029,7 @@ class _UpdateInstance(CloudRedisRestStub): def __hash__(self): return hash("UpdateInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { "updateMask" : {}, } @classmethod @@ -1117,7 +1117,7 @@ class _UpgradeInstance(CloudRedisRestStub): def __hash__(self): return hash("UpgradeInstance") - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @classmethod From ee89e277d713cf4cc42631a2611ce7df554cc803 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 21 Feb 2023 14:59:16 -0500 Subject: [PATCH 0976/1339] fix: fix mypy error with rest interceptors (#1603) --- .../%sub/services/%service/transports/rest.py.j2 | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 7d8e72846e19..95ce8eec6c8a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -121,7 +121,9 @@ class {{ service.name }}RestInterceptor: {% endfor %} {% for name, signature in api.mixin_api_signatures.items() %} - def pre_{{ name|snake_case }}(self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]]) -> {{signature.response_type}}: + def pre_{{ name|snake_case }}( + self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[{{signature.request_type}}, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for {{ name|snake_case }} Override in a subclass to manipulate the request or metadata @@ -129,7 +131,9 @@ class {{ service.name }}RestInterceptor: """ return request, metadata - def post_{{ name|snake_case }}(self, response: {{signature.request_type}}) -> {{signature.response_type}}: + def post_{{ name|snake_case }}( + self, response: {{signature.response_type}} + ) -> {{signature.response_type}}: """Post-rpc interceptor for {{ name|snake_case }} Override in a subclass to manipulate the response From 935434b14839591eaff2e5aeaf1a9bff97f437dc Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Thu, 23 Feb 2023 12:30:29 -0800 Subject: [PATCH 0977/1339] fix: Resolve errors from annotating containers with non-local enums (#1608) The key was `from __future__ import annotations`, implementing PEP 563 (https://www.python.org/dev/peps/pep-0563/), which, though approved in 2017, is now planned for inclusion in Python 3.11 (see this Python.org announcement: https://mail.python.org/archives/list/python-dev@python.org/thread/CLVXXPQ2T2LQ5MP2Y53VVQFCXYWQJHKZ/)/ This fixes https://github.com/googleapis/gapic-generator-python/issues/1607. --- .../%name/%version/%sub/types/%proto.py.j2 | 2 + .../%name_%version/%sub/types/%proto.py.j2 | 2 + .../fragments/google/type/dayofweek.proto | 50 +++++++++++ .../fragments/test_enum_indexed_types.proto | 86 +++++++++++++++++++ .../test_enum_indexed_types_nonlocal.proto | 54 ++++++++++++ .../fragments/test_repeated_double.proto | 2 +- .../cloud/asset_v1/types/asset_service.py | 2 + .../google/cloud/asset_v1/types/assets.py | 2 + .../asset/testing/constraints-3.12.txt | 0 .../google/iam/credentials_v1/types/common.py | 2 + .../credentials/testing/constraints-3.12.txt | 0 .../cloud/eventarc_v1/types/eventarc.py | 2 + .../google/cloud/eventarc_v1/types/trigger.py | 2 + .../eventarc/testing/constraints-3.12.txt | 0 .../cloud/logging_v2/types/log_entry.py | 2 + .../google/cloud/logging_v2/types/logging.py | 2 + .../cloud/logging_v2/types/logging_config.py | 2 + .../cloud/logging_v2/types/logging_metrics.py | 2 + .../logging/testing/constraints-3.12.txt | 0 .../cloud/redis_v1/types/cloud_redis.py | 2 + .../redis/testing/constraints-3.12.txt | 0 21 files changed, 215 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/tests/fragments/google/type/dayofweek.proto create mode 100644 packages/gapic-generator/tests/fragments/test_enum_indexed_types.proto create mode 100644 packages/gapic-generator/tests/fragments/test_enum_indexed_types_nonlocal.proto mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt mode change 100644 => 100755 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 index 8418d42959bf..579739beb9d0 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/%proto.py.j2 @@ -4,6 +4,8 @@ {% with p = proto.disambiguate('proto') %} {% if proto.messages|length or proto.all_enums|length %} +from __future__ import annotations + from typing import MutableMapping, MutableSequence {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 index 1d94e5a135ba..01f529129346 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/%proto.py.j2 @@ -4,6 +4,8 @@ {% with p = proto.disambiguate('proto') %} {% if proto.messages|length or proto.all_enums|length %} +from __future__ import annotations + from typing import MutableMapping, MutableSequence {% endif %} diff --git a/packages/gapic-generator/tests/fragments/google/type/dayofweek.proto b/packages/gapic-generator/tests/fragments/google/type/dayofweek.proto new file mode 100644 index 000000000000..f56677dc112a --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/type/dayofweek.proto @@ -0,0 +1,50 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.type; + +option go_package = "google.golang.org/genproto/googleapis/type/dayofweek;dayofweek"; +option java_multiple_files = true; +option java_outer_classname = "DayOfWeekProto"; +option java_package = "com.google.type"; +option objc_class_prefix = "GTP"; + +// Represents a day of the week. +enum DayOfWeek { + // The day of the week is unspecified. + DAY_OF_WEEK_UNSPECIFIED = 0; + + // Monday + MONDAY = 1; + + // Tuesday + TUESDAY = 2; + + // Wednesday + WEDNESDAY = 3; + + // Thursday + THURSDAY = 4; + + // Friday + FRIDAY = 5; + + // Saturday + SATURDAY = 6; + + // Sunday + SUNDAY = 7; +} diff --git a/packages/gapic-generator/tests/fragments/test_enum_indexed_types.proto b/packages/gapic-generator/tests/fragments/test_enum_indexed_types.proto new file mode 100644 index 000000000000..3fb60b5e1377 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_enum_indexed_types.proto @@ -0,0 +1,86 @@ +// Copyright (C) 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "google/type/dayofweek.proto"; + +service EnumIndexedTypes { + option (google.api.default_host) = "my.example.com"; + + rpc MethodWithRepeatedEnums(MethodRequestWithRepeatedEnums) returns (MethodResponseWithRepeatedEnums) { + } +} + +// Represents a day of the week. Copied from "google/type/dayofweek.proto" +enum Weekday { + // The day of the week is unspecified. + DAY_OF_WEEK_UNSPECIFIED = 0; + + // Monday + MONDAY = 1; + + // Tuesday + TUESDAY = 2; + + // Wednesday + WEDNESDAY = 3; + + // Thursday + THURSDAY = 4; + + // Friday + FRIDAY = 5; + + // Saturday + SATURDAY = 6; + + // Sunday + SUNDAY = 7; +} + + +message MethodRequestWithRepeatedEnums { + // Without proper handling, the two fields below referencing + // google.type.DayOfWeek trigger errors like: + // + // TypeError: Parameters to generic types must be types. + // Got + // + // Interestingly, the fields referencing the Weekday type defined in this file + // do not trigger that error. + // + // The generated code needs to either quote the type, or `from __future__ + // import annotations`. This may be solved by PEP 563, currently scheduled for + // Python 3.11 + // (cf. https://mail.python.org/archives/list/python-dev@python.org/message/CLVXXPQ2T2LQ5MP2Y53VVQFCXYWQJHKZ/) + + repeated Weekday which_days = 1; + repeated google.type.DayOfWeek holidays = 2; + map string_days = 3; + map string_holidays = 4; +} + +message MethodResponseWithRepeatedEnums { + // The commentary in MethodRequestWithRepeatedEnums describes what is tested + // below. + + repeated Weekday which_days = 1; + repeated google.type.DayOfWeek holidays = 2; + map string_days = 3; + map string_holidays = 4; +} diff --git a/packages/gapic-generator/tests/fragments/test_enum_indexed_types_nonlocal.proto b/packages/gapic-generator/tests/fragments/test_enum_indexed_types_nonlocal.proto new file mode 100644 index 000000000000..09af4b4abf2e --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_enum_indexed_types_nonlocal.proto @@ -0,0 +1,54 @@ +// Copyright (C) 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "google/type/dayofweek.proto"; + +// This test differs from the one implemented by service EnumIndexedTypes in +// that this file does not define any enum types, which may lead to a +// slightly distinct template generation path. +service EnumIndexedTypesNonLocal { + option (google.api.default_host) = "my.example.com"; + + rpc MethodWithRepeatedEnums(MethodRequestWithRepeatedEnums) returns (MethodResponseWithRepeatedEnums) { + } +} + +message MethodRequestWithRepeatedEnums { + // Without proper handling, the two fields below referencing + // google.type.DayOfWeek trigger errors like: + // + // TypeError: Parameters to generic types must be types. + // Got + // + // The generated code needs to either quote the type, or `from __future__ + // import annotations`. This may be solved by PEP 563, currently scheduled for + // Python 3.11 + // (cf. https://mail.python.org/archives/list/python-dev@python.org/message/CLVXXPQ2T2LQ5MP2Y53VVQFCXYWQJHKZ/) + + repeated google.type.DayOfWeek holidays = 2; + map string_holidays = 4; +} + +message MethodResponseWithRepeatedEnums { + // The commentary in MethodRequestWithRepeatedEnums describes what is tested + // below. + + repeated google.type.DayOfWeek holidays = 2; + map string_holidays = 4; +} diff --git a/packages/gapic-generator/tests/fragments/test_repeated_double.proto b/packages/gapic-generator/tests/fragments/test_repeated_double.proto index 4deed3271958..63db0371b63e 100644 --- a/packages/gapic-generator/tests/fragments/test_repeated_double.proto +++ b/packages/gapic-generator/tests/fragments/test_repeated_double.proto @@ -1,4 +1,4 @@ -// Copyright (C) 2021 Google LLC +// Copyright (C) 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 78ce93397f43..d010a66232df 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index b14b05ed75d7..84e819663539 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index e7c9076d39bd..f7a056998add 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index 9a1cc43bb4a4..b6ea5be2623d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index 77b1cb69c83d..6d9658ec6c40 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index a0a5cc98a931..e5f3dcf7d6d2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index b130ad088483..39904f8f8a03 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index fcd8a6e666c0..e32b8e0d3c48 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 6baeab1db568..738077d4401b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt old mode 100644 new mode 100755 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 7d8a289aa841..d045c7196340 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt old mode 100644 new mode 100755 From 2c9b4b9533e91a8e300ea65c33cb7bc21be3e9c7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 23 Feb 2023 14:21:33 -0800 Subject: [PATCH 0978/1339] chore(main): release 1.8.5 (#1602) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3562eee3e67b..ac26df93abb6 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [1.8.5](https://github.com/googleapis/gapic-generator-python/compare/v1.8.4...v1.8.5) (2023-02-23) + + +### Bug Fixes + +* Fix mypy error with rest interceptors ([#1603](https://github.com/googleapis/gapic-generator-python/issues/1603)) ([c36876f](https://github.com/googleapis/gapic-generator-python/commit/c36876f65cc6105cb38282ca1a8680f9d220d0bd)) +* Fix mypy errors in rest.py ([#1599](https://github.com/googleapis/gapic-generator-python/issues/1599)) ([120f19e](https://github.com/googleapis/gapic-generator-python/commit/120f19eef34ca3d722f079683222be99931363da)) +* Resolve errors from annotating containers with non-local enums ([#1608](https://github.com/googleapis/gapic-generator-python/issues/1608)) ([73652e3](https://github.com/googleapis/gapic-generator-python/commit/73652e3b88fe30f2fca21a900b8eacbf8822d2ce)) + ## [1.8.4](https://github.com/googleapis/gapic-generator-python/compare/v1.8.3...v1.8.4) (2023-02-07) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 633f6ba2bab4..2e943cd88dfd 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.8.4" +version = "1.8.5" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From aa9d3da9a6443319ae5289040b924340b7eb6912 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 27 Feb 2023 12:18:21 -0500 Subject: [PATCH 0979/1339] chore(python): upgrade gcp-releasetool in .kokoro [autoapprove] (#1612) Source-Link: https://github.com/googleapis/synthtool/commit/5f2a6089f73abf06238fe4310f6a14d6f6d1eed3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 2 +- packages/gapic-generator/.kokoro/requirements.in | 2 +- packages/gapic-generator/.kokoro/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 894fb6bc9b47..5fc5daa31783 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf + digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 diff --git a/packages/gapic-generator/.kokoro/requirements.in b/packages/gapic-generator/.kokoro/requirements.in index cbd7e77f44db..882178ce6001 100644 --- a/packages/gapic-generator/.kokoro/requirements.in +++ b/packages/gapic-generator/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x importlib-metadata typing-extensions twine diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 096e4800a9ac..fa99c12908f0 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -154,9 +154,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.10.0 \ - --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ - --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d +gcp-releasetool==1.10.5 \ + --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ + --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ From 4a04bb439f2be32b46d98d8ed4489bfb26f3b461 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 28 Feb 2023 20:37:24 +0000 Subject: [PATCH 0980/1339] chore(deps): update all dependencies (#1606) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 025828805574..505a03449364 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,10 +3,10 @@ google-api-core==2.11.0 googleapis-common-protos==1.58.0 jinja2==3.1.2 MarkupSafe==2.1.2 -protobuf==4.21.12 +protobuf==4.22.0 pypandoc==1.10 PyYAML==6.0 -setuptools==67.3.2 +setuptools==67.4.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.20.3 From 32bfcdb07e749f480142ecc6f9894b01f9e067ad Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 6 Mar 2023 11:47:49 +0000 Subject: [PATCH 0981/1339] chore(deps): update all dependencies (#1614) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 505a03449364..f4502b511e2e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,9 +4,9 @@ googleapis-common-protos==1.58.0 jinja2==3.1.2 MarkupSafe==2.1.2 protobuf==4.22.0 -pypandoc==1.10 +pypandoc==1.11 PyYAML==6.0 -setuptools==67.4.0 +setuptools==67.5.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.20.3 From cca6aa45417187590be4a3f383424091ca0498aa Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Tue, 14 Mar 2023 04:29:36 -0700 Subject: [PATCH 0982/1339] fix: ignore_unknown_fields parsing service config (#1618) * fix: ignore_unknown_fields parsing service config * oops capital True * reformat args Co-authored-by: Alexander Fenster --------- Co-authored-by: Alexander Fenster --- packages/gapic-generator/gapic/schema/api.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index b9af9e348ab4..9b5c29439d3b 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -341,7 +341,11 @@ def disambiguate_keyword_sanitize_fname( # Parse the google.api.Service proto from the service_yaml data. service_yaml_config = service_pb2.Service() - ParseDict(opts.service_yaml_config, service_yaml_config) + ParseDict( + opts.service_yaml_config, + service_yaml_config, + ignore_unknown_fields=True + ) # Done; return the API. return cls(naming=naming, From aa8b0d448e120c819a2ae15c86562b28f344c299 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 14 Mar 2023 14:41:13 +0000 Subject: [PATCH 0983/1339] chore(deps): update all dependencies (#1619) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f4502b511e2e..ea8ef8900097 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,10 +3,10 @@ google-api-core==2.11.0 googleapis-common-protos==1.58.0 jinja2==3.1.2 MarkupSafe==2.1.2 -protobuf==4.22.0 +protobuf==4.22.1 pypandoc==1.11 PyYAML==6.0 -setuptools==67.5.0 +setuptools==67.6.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.20.3 From e05d88c115816ffb5d924c99a4032cec998813de Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 15 Mar 2023 17:28:11 +0000 Subject: [PATCH 0984/1339] chore(main): release 1.8.6 (#1621) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index ac26df93abb6..c9afdd4026d4 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.8.6](https://github.com/googleapis/gapic-generator-python/compare/v1.8.5...v1.8.6) (2023-03-14) + + +### Bug Fixes + +* Ignore_unknown_fields parsing service config ([#1618](https://github.com/googleapis/gapic-generator-python/issues/1618)) ([de6386f](https://github.com/googleapis/gapic-generator-python/commit/de6386fc0115e5a05bfc25b9a06e22eeccc091ce)) + ## [1.8.5](https://github.com/googleapis/gapic-generator-python/compare/v1.8.4...v1.8.5) (2023-02-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2e943cd88dfd..67b82194db25 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.8.5" +version = "1.8.6" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 3e5f87c1b6867aa33c4e637d921b96c97ee80794 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Mar 2023 08:00:02 -0400 Subject: [PATCH 0985/1339] chore(deps): Update nox in .kokoro/requirements.in [autoapprove] (#1622) Source-Link: https://github.com/googleapis/synthtool/commit/92006bb3cdc84677aa93c7f5235424ec2b157146 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 2 +- packages/gapic-generator/.kokoro/requirements.in | 2 +- packages/gapic-generator/.kokoro/requirements.txt | 14 +++++--------- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 5fc5daa31783..b8edda51cf46 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 + digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 diff --git a/packages/gapic-generator/.kokoro/requirements.in b/packages/gapic-generator/.kokoro/requirements.in index 882178ce6001..ec867d9fd65a 100644 --- a/packages/gapic-generator/.kokoro/requirements.in +++ b/packages/gapic-generator/.kokoro/requirements.in @@ -5,6 +5,6 @@ typing-extensions twine wheel setuptools -nox +nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index fa99c12908f0..66a2172a76a8 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # @@ -335,9 +335,9 @@ more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes -nox==2022.8.7 \ - --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ - --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c +nox==2022.11.21 \ + --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ + --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 # via -r requirements.in packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ @@ -380,10 +380,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core -py==1.11.0 \ - --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ - --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 - # via nox pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 412cc667e952abb0dea42c60b8037dc3636779b1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 17 Mar 2023 06:57:27 -0400 Subject: [PATCH 0986/1339] feat: add generator option proto-plus-dep (#1596) * feat: add generator option proto-plus-dep * fix mypy * fix style * update goldens * fix tests * clean up * clean up * add support for dependency google-cloud-kms * Address review comments * add missing file * run formatting tool * style * address review comments * formatting * address review feedback * style * add pypi package google-geo-type * bump google-cloud-documentai dependency to 2.0.0 * address review comment * add docstring * address review comment * fix docs --- .../services/%service/transports/rest.py.j2 | 12 +- .../%name_%version/%sub/test_%service.py.j2 | 24 +- .../gapic-generator/gapic/schema/metadata.py | 49 ++- .../gapic-generator/gapic/schema/naming.py | 6 + .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../services/%service/transports/rest.py.j2 | 12 +- .../gapic/templates/_pypi_packages.j2 | 15 + .../gapic/templates/setup.py.j2 | 13 +- .../templates/testing/_default_constraints.j2 | 13 +- .../templates/testing/constraints-3.7.txt.j2 | 11 +- .../gapic/%name_%version/%sub/test_macros.j2 | 36 +-- .../gapic-generator/gapic/utils/options.py | 9 + .../tests/integration/goldens/asset/setup.py | 2 +- .../tests/unit/generator/test_options.py | 28 ++ .../tests/unit/schema/test_api.py | 283 ++++++++++++++++-- .../tests/unit/schema/test_naming.py | 11 + 16 files changed, 432 insertions(+), 94 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/_pypi_packages.j2 diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 5c2d102f48b7..e10fa77c2945 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -322,10 +322,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %}{# rule in method.http_options #} ] request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) - {% if method.input.ident.is_external_type %} - pb_request = request - {% else %} + {% if method.input.ident.is_proto_plus_type %} pb_request = {{method.input.ident}}.pb(request) + {% else %} + pb_request = request {% endif %} transcoded_request = path_template.transcode(http_options, pb_request) @@ -384,10 +384,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): resp = rest_streaming.ResponseIterator(response, {{method.output.ident}}) {% else %} resp = {{method.output.ident}}() - {% if method.output.ident.is_external_type %} - pb_resp = resp - {% else %} + {% if method.output.ident.is_proto_plus_type %} pb_resp = {{method.output.ident}}.pb(resp) + {% else %} + pb_resp = resp {% endif %} json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 254fa5e801b6..c174d37e8849 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1118,10 +1118,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %}{# default is str #} {% endfor %} request = request_type(**request_init) - {% if method.input.ident.is_external_type %} - pb_request = request - {% else %} + {% if method.input.ident.is_proto_plus_type %} pb_request = request_type.pb(request) + {% else %} + pb_request = request {% endif %} jsonified_request = json.loads(json_format.MessageToJson( pb_request, @@ -1189,10 +1189,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide with mock.patch.object(path_template, 'transcode') as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - {% if method.input.ident.is_external_type %} - pb_request = request - {% else %} + {% if method.input.ident.is_proto_plus_type %} pb_request = request_type.pb(request) + {% else %} + pb_request = request {% endif %} transcode_result = { 'uri': 'v1/sample_method', @@ -1212,10 +1212,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide json_return_value = json_format.MessageToJson(return_value) {% else %} - {% if method.output.ident.is_external_type %} - pb_return_value = return_value - {% else %} + {% if method.output.ident.is_proto_plus_type %} pb_return_value = {{ method.output.ident }}.pb(return_value) + {% else %} + pb_return_value = return_value {% endif %} json_return_value = json_format.MessageToJson(pb_return_value) {% endif %} @@ -1287,10 +1287,10 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): {% if not method.void %} post.assert_not_called() {% endif %} - {% if method.input.ident.is_external_type %} - pb_message = {{ method.input.ident }}() - {% else %} + {% if method.input.ident.is_proto_plus_type %} pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) + {% else %} + pb_message = {{ method.input.ident }}() {% endif %} transcode.return_value = { "method": "post", diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 8dc4d5dc67f8..f4ec88e7cba6 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -27,6 +27,7 @@ """ import dataclasses +import re from typing import FrozenSet, Tuple, Optional from google.protobuf import descriptor_pb2 @@ -89,10 +90,8 @@ def __str__(self) -> str: if self.module_alias: module_name = self.module_alias - # This module is from a different proto package - # Most commonly happens for a common proto - # https://pypi.org/project/googleapis-common-protos/ - if self.is_external_type: + # Add _pb2 suffix except when it is a proto-plus type + if not self.is_proto_plus_type: module_name = f'{self.module}_pb2' # Return the dot-separated Python identifier. @@ -103,8 +102,23 @@ def __str__(self) -> str: return '.'.join(self.parent + (self.name,)) @property - def is_external_type(self): - return not self.proto_package.startswith(self.api_naming.proto_package) + def is_proto_plus_type(self) -> bool: + """This function is used to determine whether a given package `self.proto_package` + is using proto-plus types or protobuf types. There are 2 scenarios where the package + is expected to use proto-plus types: + 1) When `self.proto_package` starts with `self.api_naming.proto_package`, then + the given package has the same namespace as the one that is being generated. It is assumed + that the gapic generator always generates packages with proto-plus types. + 2) When `self.proto_package` is explicitly in `self.api_naming.proto_plus_deps` which is + populated via the generator option `proto-plus-deps`. + + Returns: + bool: Whether the given package uses proto-plus types or not. + """ + return self.proto_package.startswith(self.api_naming.proto_package) or ( + hasattr(self.api_naming, "proto_plus_deps") + and self.proto_package in self.api_naming.proto_plus_deps + ) @cached_property def __cached_string_repr(self): @@ -188,6 +202,29 @@ def python_import(self) -> imp.Import: alias=self.module_alias, ) + if self.is_proto_plus_type: + # We need to change the import statement to use an + # underscore between the module and the version. For example, + # change google.cloud.documentai.v1 to google.cloud.documentai_v1. + # Check if the package name contains a version. + version_regex = "^v\d[^/]*$" + regex_match = re.match(version_regex, self.package[-1]) + + if regex_match and len(self.package) > 1: + versioned_module = f"{self.package[-2]}_{regex_match[0]}" + return imp.Import( + package=self.package[:-2] + + (versioned_module, 'types'), + module=self.module, + alias=self.module_alias, + ) + else: + return imp.Import( + package=self.package + ('types',), + module=self.module, + alias=self.module_alias, + ) + # Return the standard import. return imp.Import( package=self.package, diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index fa6000f88a67..1d055490ff4a 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -43,6 +43,7 @@ class Naming(abc.ABC): product_name: str = '' proto_package: str = '' _warehouse_package_name: str = '' + proto_plus_deps: Tuple[str, ...] = dataclasses.field(default_factory=tuple) def __post_init__(self): if not self.product_name: @@ -146,6 +147,11 @@ def build( package_info = dataclasses.replace(package_info, _warehouse_package_name=opts.warehouse_package_name ) + if opts.proto_plus_deps: + package_info = dataclasses.replace( + package_info, + proto_plus_deps=opts.proto_plus_deps, + ) # Done; return the naming information. return package_info diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index b32af50b9467..50127732aa5d 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -77,7 +77,7 @@ def __hash__(self): def name(self) -> str: """Used to prevent collisions with python keywords""" name = self.field_pb.name - return name + "_" if name in utils.RESERVED_NAMES and not self.meta.address.is_external_type else name + return name + "_" if name in utils.RESERVED_NAMES and self.meta.address.is_proto_plus_type else name @utils.cached_property def ident(self) -> metadata.FieldIdentifier: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 95ce8eec6c8a..566b93ccda47 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -356,10 +356,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %}{# rule in method.http_options #} ] request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) - {% if method.input.ident.is_external_type %} - pb_request = request - {% else %} + {% if method.input.ident.is_proto_plus_type %} pb_request = {{method.input.ident}}.pb(request) + {% else %} + pb_request = request {% endif %} transcoded_request = path_template.transcode(http_options, pb_request) @@ -418,10 +418,10 @@ class {{service.name}}RestTransport({{service.name}}Transport): resp = rest_streaming.ResponseIterator(response, {{method.output.ident}}) {% else %} resp = {{method.output.ident}}() - {% if method.output.ident.is_external_type %} - pb_resp = resp - {% else %} + {% if method.output.ident.is_proto_plus_type %} pb_resp = {{method.output.ident}}.pb(resp) + {% else %} + pb_resp = resp {% endif %} json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 new file mode 100644 index 000000000000..af25d77b3d55 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -0,0 +1,15 @@ + +{% set pypi_packages = { + ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0dev"}, + ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, + ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "3.0.0dev"}, + ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "3.0.0dev"}, + ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"} +} +%} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 4e39c141b473..72b5c44a8781 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -1,5 +1,5 @@ {% extends '_base.py.j2' %} - +{% from '_pypi_packages.j2' import pypi_packages %} {% block content %} import io @@ -31,13 +31,14 @@ dependencies = [ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - {# TODO: Remove after https://github.com/googleapis/gapic-generator-python/pull/1240 is merged. #} - {% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} - 'grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev', + {% for package_tuple, package_info in pypi_packages.items() %} + {# Quick check to make sure the package is different from this setup.py #} + {% if api.naming.warehouse_package_name != package_info.package_name %} + {% if api.requires_package(package_tuple) %} + "{{ package_info.package_name }} >= {{ package_info.lower_bound }}, <{{ package_info.upper_bound }}", {% endif %} - {% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} - 'google-cloud-documentai >= 1.2.1, < 3.0.0dev', {% endif %} + {% endfor %} ] url = "https://github.com/googleapis/python-{{ api.naming.warehouse_package_name|replace("google-cloud-", "")|replace("google-", "") }}" diff --git a/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 b/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 index fe0a64d4b542..6a62cfc52d5e 100644 --- a/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 +++ b/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 @@ -1,11 +1,14 @@ +{% from '_pypi_packages.j2' import pypi_packages %} # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core proto-plus protobuf -{% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} -grpc-google-iam-v1 +{% for package_tuple, package_info in pypi_packages.items() %} +{# Quick check to make sure the package is different from this setup.py #} +{% if api.naming.warehouse_package_name != package_info.package_name %} +{% if api.requires_package(package_tuple) %} +{{ package_info.package_name }} {% endif %} -{% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} -google-cloud-documentai -{% endif %} \ No newline at end of file +{% endif %} +{% endfor %} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index 73c04e56c368..ae6c992c406c 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -1,3 +1,4 @@ +{% from '_pypi_packages.j2' import pypi_packages %} # This constraints file is used to check that lower bounds # are correct in setup.py # List all library dependencies and extras in this file. @@ -7,9 +8,11 @@ google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 -{% if api.requires_package(('google', 'iam', 'v1')) or opts.add_iam_methods or api.has_iam_mixin %} -grpc-google-iam-v1==0.12.4 +{% for package_tuple, package_info in pypi_packages.items() %} +{# Quick check to make sure the package is different from this setup.py #} +{% if api.naming.warehouse_package_name != package_info.package_name %} +{% if api.requires_package(package_tuple) %} +{{ package_info.package_name }}=={{ package_info.lower_bound }} {% endif %} -{% if api.requires_package(('google', 'cloud', 'documentai', 'v1')) %} -google-cloud-documentai==1.2.1 {% endif %} +{% endfor %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index eeda2a3863c0..8c10abdf2082 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -929,10 +929,10 @@ def test_{{ method_name }}_rest(request_type): {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) {% else %} - {% if method.output.ident.is_external_type %} - pb_return_value = return_value - {% else %} + {% if method.output.ident.is_proto_plus_type %} pb_return_value = {{ method.output.ident }}.pb(return_value) + {% else %} + pb_return_value = return_value {% endif %} json_return_value = json_format.MessageToJson(pb_return_value) {% endif %} @@ -1009,10 +1009,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %}{# default is str #} {% endfor %} request = request_type(**request_init) - {% if method.input.ident.is_external_type %} - pb_request = request - {% else %} + {% if method.input.ident.is_proto_plus_type %} pb_request = request_type.pb(request) + {% else %} + pb_request = request {% endif %} jsonified_request = json.loads(json_format.MessageToJson( pb_request, @@ -1080,10 +1080,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide with mock.patch.object(path_template, 'transcode') as transcode: # A uri without fields and an empty body will force all the # request fields to show up in the query_params. - {% if method.input.ident.is_external_type %} - pb_request = request - {% else %} + {% if method.input.ident.is_proto_plus_type %} pb_request = request_type.pb(request) + {% else %} + pb_request = request {% endif %} transcode_result = { 'uri': 'v1/sample_method', @@ -1103,10 +1103,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide json_return_value = json_format.MessageToJson(return_value) {% else %} - {% if method.output.ident.is_external_type %} - pb_return_value = return_value - {% else %} + {% if method.output.ident.is_proto_plus_type %} pb_return_value = {{ method.output.ident }}.pb(return_value) + {% else %} + pb_return_value = return_value {% endif %} json_return_value = json_format.MessageToJson(pb_return_value) {% endif %} @@ -1178,10 +1178,10 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): {% if not method.void %} post.assert_not_called() {% endif %} - {% if method.input.ident.is_external_type %} - pb_message = {{ method.input.ident }}() - {% else %} + {% if method.input.ident.is_proto_plus_type %} pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) + {% else %} + pb_message = {{ method.input.ident }}() {% endif %} transcode.return_value = { "method": "post", @@ -1295,10 +1295,10 @@ def test_{{ method_name }}_rest_flattened(): {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) {% else %} - {% if method.output.ident.is_external_type %} - pb_return_value = return_value - {% else %} + {% if method.output.ident.is_proto_plus_type %} pb_return_value = {{ method.output.ident }}.pb(return_value) + {% else %} + pb_return_value = return_value {% endif %} json_return_value = json_format.MessageToJson(pb_return_value) {% endif %} diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index 1785e93a55e9..daa77ff6b59f 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -49,6 +49,7 @@ class Options: service_yaml_config: Dict[str, Any] = dataclasses.field( default_factory=dict) rest_numeric_enums: bool = False + proto_plus_deps: Tuple[str, ...] = dataclasses.field(default=('',)) # Class constants PYTHON_GAPIC_PREFIX: str = 'python-gapic-' @@ -66,6 +67,9 @@ class Options: 'warehouse-package-name', # change the package name on PyPI # when transport includes "rest", request that response enums be JSON-encoded as numbers 'rest-numeric-enums', + # proto plus dependencies delineated by '+' + # For example, 'google.cloud.api.v1+google.cloud.anotherapi.v2' + 'proto-plus-deps', )) @classmethod @@ -161,6 +165,10 @@ def tweak_path(p): if old_naming: autogen_snippets = False + proto_plus_deps = tuple(opts.pop('proto-plus-deps', '')) + if len(proto_plus_deps): + proto_plus_deps = tuple(proto_plus_deps[0].split('+')) + answer = Options( name=opts.pop('name', ['']).pop(), namespace=tuple(opts.pop('namespace', [])), @@ -182,6 +190,7 @@ def tweak_path(p): transport=opts.pop('transport', ['grpc'])[0].split('+'), service_yaml_config=service_yaml_config, rest_numeric_enums=bool(opts.pop('rest-numeric-enums', False)), + proto_plus_deps=proto_plus_deps, ) # Note: if we ever need to recursively check directories for sample diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index fb0f7e544c1a..ccbd7491c3ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -40,7 +40,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - 'grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev', + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/python-asset" diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 91da98de5188..6716e21fe6a0 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -223,3 +223,31 @@ def test_options_autogen_snippets_false_for_old_naming(): # Even if autogen-snippets is set to True, do not enable snippetgen options = Options.build("old-naming,autogen-snippets=True") assert not options.autogen_snippets + + +def test_options_proto_plus_deps(): + opts = Options.build("proto-plus-deps=") + assert opts.proto_plus_deps == ('',) + + opts = Options.build("proto-plus-deps=google.apps.script.type.calendar") + assert opts.proto_plus_deps == ('google.apps.script.type.calendar',) + + opts = Options.build( + "proto-plus-deps=\ +google.apps.script.type.calendar+\ +google.apps.script.type.docs+\ +google.apps.script.type.drive+\ +google.apps.script.type.gmail+\ +google.apps.script.type.sheets+\ +google.apps.script.type.slides+\ +google.apps.script.type" + ) + assert opts.proto_plus_deps == ( + "google.apps.script.type.calendar", + "google.apps.script.type.docs", + "google.apps.script.type.drive", + "google.apps.script.type.gmail", + "google.apps.script.type.sheets", + "google.apps.script.type.slides", + "google.apps.script.type" + ) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 607d4858bbf4..d5d3ca4e65e3 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -109,6 +109,7 @@ def test_api_build(): ) assert api_schema.requires_package(('google', 'example', 'v1')) + assert not api_schema.requires_package(('elgoog', 'example', 'v1')) # Establish that the subpackages work. @@ -718,70 +719,294 @@ def test_undefined_type(): def test_python_modules_nested(): fd = ( make_file_pb2( - name='dep.proto', - package='google.dep', - messages=(make_message_pb2(name='ImportedMessage', fields=()),), + name="dep.proto", + package="google.dep", + messages=(make_message_pb2(name="ImportedMessage", fields=()),), ), make_file_pb2( - name='common.proto', - package='google.example.v1.common', - messages=(make_message_pb2(name='Bar'),), + name="baa.proto", + package="google.baa", + messages=(make_message_pb2(name="ImportedMessageBaa", fields=()),), ), make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="bab.v1.proto", + package="google.bab.v1", + messages=(make_message_pb2(name="ImportedMessageBab", fields=()),), + ), + make_file_pb2( + name="bac.v10.proto", + package="google.bac.v10", + messages=(make_message_pb2(name="ImportedMessageBac", fields=()),), + ), + make_file_pb2( + name="bad.v2beta.proto", + package="google.bad.v2beta", + messages=(make_message_pb2(name="ImportedMessageBad", fields=()),), + ), + make_file_pb2( + name="bae.v2beta20.proto", + package="google.bae.v2beta20", + messages=(make_message_pb2(name="ImportedMessageBae", fields=()),), + ), + make_file_pb2( + name="baf.v20beta.proto", + package="google.baf.v20beta", + messages=(make_message_pb2(name="ImportedMessageBaf", fields=()),), + ), + make_file_pb2( + name="bag.v20p1.proto", + package="google.bag.v20p1", + messages=(make_message_pb2(name="ImportedMessageBag", fields=()),), + ), + make_file_pb2( + name="bah.v20p1alpha3p5.proto", + package="google.bah.v20p1alpha3p5", + messages=(make_message_pb2(name="ImportedMessageBah", fields=()),), + ), + make_file_pb2( + name="bah.v20p1.bai.proto", + package="google.bah.v20p1.bai", + messages=(make_message_pb2(name="ImportedMessageBai", fields=()),), + ), + make_file_pb2( + name="bah.v20p1.baj.v3.proto", + package="google.bah.v20p1.baj.v3", + messages=(make_message_pb2(name="ImportedMessageBaj", fields=()),), + ), + make_file_pb2( + name="common.proto", + package="google.example.v1.common", + messages=(make_message_pb2(name="Bar"),), + ), + make_file_pb2( + name="foo.proto", + package="google.example.v1", messages=( make_message_pb2( - name='GetFooRequest', + name="GetFooRequest", fields=( - make_field_pb2(name='primitive', number=2, type=1), + make_field_pb2(name="primitive", number=2, type=1), make_field_pb2( - name='foo', + name="foo", number=3, type=1, - type_name='.google.example.v1.GetFooRequest.Foo', + type_name=".google.example.v1.GetFooRequest.Foo", ), ), nested_type=( make_message_pb2( - name='Foo', + name="Foo", fields=( make_field_pb2( - name='imported_message', + name="imported_message", number=1, - type_name='.google.dep.ImportedMessage'), + type_name=".google.dep.ImportedMessage", + ), + ), + ), + make_message_pb2( + name="Baa", + fields=( + make_field_pb2( + name="imported_message_baa", + number=1, + type_name=".google.baa.ImportedMessageBaa", + ), + ), + ), + make_message_pb2( + name="Bab", + fields=( + make_field_pb2( + name="imported_message_bab", + number=1, + type_name=".google.bab.v1.ImportedMessageBab", + ), + ), + ), + make_message_pb2( + name="Bac", + fields=( + make_field_pb2( + name="imported_message_bac", + number=1, + type_name=".google.bac.v10.ImportedMessageBac", + ), + ), + ), + make_message_pb2( + name="Bad", + fields=( + make_field_pb2( + name="imported_message_bad", + number=1, + type_name=".google.bad.v2beta.ImportedMessageBad", + ), + ), + ), + make_message_pb2( + name="Bae", + fields=( + make_field_pb2( + name="imported_message_bae", + number=1, + type_name=".google.bae.v2beta20.ImportedMessageBae", + ), + ), + ), + make_message_pb2( + name="Baf", + fields=( + make_field_pb2( + name="imported_message_baf", + number=1, + type_name=".google.baf.v20beta.ImportedMessageBaf", + ), + ), + ), + make_message_pb2( + name="Bag", + fields=( + make_field_pb2( + name="imported_message_bag", + number=1, + type_name=".google.bag.v20p1.ImportedMessageBag", + ), + ), + ), + make_message_pb2( + name="Bah", + fields=( + make_field_pb2( + name="imported_message_bah", + number=1, + type_name=".google.bah.v20p1alpha3p5.ImportedMessageBah", + ), + ), + ), + make_message_pb2( + name="Bai", + fields=( + make_field_pb2( + name="imported_message_bai", + number=1, + type_name=".google.bah.v20p1.bai.ImportedMessageBai", + ), + ), + ), + make_message_pb2( + name="Baj", + fields=( + make_field_pb2( + name="imported_message_baj", + number=1, + type_name=".google.bah.v20p1.baj.v3.ImportedMessageBaj", + ), ), ), ), ), make_message_pb2( - name='GetFooResponse', + name="GetFooResponse", fields=( make_field_pb2( - name='foo', + name="foo", number=1, - type_name='.google.example.v1.GetFooRequest.Foo', + type_name=".google.example.v1.GetFooRequest.Foo", ), ), ), ), - services=(descriptor_pb2.ServiceDescriptorProto( - name='FooService', - method=( - descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v1.GetFooRequest', - output_type='google.example.v1.GetFooResponse', + services=( + descriptor_pb2.ServiceDescriptorProto( + name="FooService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="GetFoo", + input_type="google.example.v1.GetFooRequest", + output_type="google.example.v1.GetFooResponse", + ), ), ), - ),), + ), ), ) - api_schema = api.API.build(fd, package='google.example.v1') + api_schema = api.API.build(fd, package="google.example.v1") - assert api_schema.protos['foo.proto'].python_modules == ( - imp.Import(package=('google', 'dep'), module='dep_pb2'), + assert api_schema.protos["foo.proto"].python_modules == ( + imp.Import(package=("google", "baa"), module="baa_pb2"), + imp.Import(package=("google", "bab", "v1"), module="bab_v1_pb2"), + imp.Import(package=("google", "bac", "v10"), module="bac_v10_pb2"), + imp.Import(package=("google", "bad", "v2beta"), + module="bad_v2beta_pb2"), + imp.Import(package=("google", "bae", "v2beta20"), + module="bae_v2beta20_pb2"), + imp.Import(package=("google", "baf", "v20beta"), + module="baf_v20beta_pb2"), + imp.Import(package=("google", "bag", "v20p1"), module="bag_v20p1_pb2"), + imp.Import( + package=("google", "bah", "v20p1", "bai"), + module="bah_v20p1_bai_pb2" + ), + imp.Import( + package=("google", "bah", "v20p1", "baj", "v3"), + module="bah_v20p1_baj_v3_pb2", + ), + imp.Import( + package=("google", "bah", "v20p1alpha3p5"), module="bah_v20p1alpha3p5_pb2" + ), + imp.Import(package=("google", "dep"), module="dep_pb2"), + ) + + # Ensure that we can change the import statements to cater for a + # dependency that uses proto-plus types. + # For example, + # `from google.bar import bar_pb2` becomes `from google.bar.types import bar`` + # `from google.baz.v2 import baz_pb2` becomes `from google.baz_v2.types improt baz_v2` + api_schema = api.API.build( + fd, + package="google.example.v1", + opts=Options( + proto_plus_deps="+".join( + ( + "google.baa", + "google.bab.v1", + "google.bac.v10", + "google.bad.v2beta", + "google.bae.v2beta20", + "google.baf.v20beta", + "google.bag.v20p1", + "google.bah.v20p1alpha3p5", + "google.bah.v20p1.bai", + "google.bah.v20p1.baj.v3", + ) + ) + ), + ) + assert api_schema.protos["foo.proto"].python_modules == ( + imp.Import(package=("google", "baa", "types"), module="baa"), + imp.Import(package=("google", "bab_v1", "types"), module="bab_v1"), + imp.Import(package=("google", "bac_v10", "types"), module="bac_v10"), + imp.Import(package=("google", "bad_v2beta", "types"), + module="bad_v2beta"), + imp.Import(package=("google", "bae_v2beta20", "types"), + module="bae_v2beta20"), + imp.Import(package=("google", "baf_v20beta", "types"), + module="baf_v20beta"), + imp.Import(package=("google", "bag_v20p1", "types"), + module="bag_v20p1"), + imp.Import( + package=("google", "bah", "v20p1", "bai", "types"), module="bah_v20p1_bai" + ), + imp.Import( + package=("google", "bah", "v20p1", "baj_v3", "types"), + module="bah_v20p1_baj_v3", + ), + imp.Import( + package=("google", "bah_v20p1alpha3p5", "types"), module="bah_v20p1alpha3p5" + ), + imp.Import(package=("google", "dep"), module="dep_pb2"), ) diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index c0487b7d2620..5a839e4c8c80 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -228,6 +228,17 @@ def test_cli_override_warehouse_package_name(): assert n.warehouse_package_name == "google-cloud-foo" +def test_cli_override_proto_plus_deps(): + FileDesc = descriptor_pb2.FileDescriptorProto + proto1 = FileDesc(package='google.translation') + n = naming.Naming.build( + proto1, + opts=Options( + proto_plus_deps=('google.dep1', 'google.dep2')), + ) + assert n.proto_plus_deps == ('google.dep1', 'google.dep2') + + def test_build_factory(): proto = descriptor_pb2.FileDescriptorProto( package='google.mollusc.v1alpha1' From 27f859ef2653c156ac10d3079bcaf9ad4e32b2bf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 17 Mar 2023 07:23:24 -0400 Subject: [PATCH 0987/1339] chore(main): release 1.9.0 (#1623) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c9afdd4026d4..2a329dc19ca9 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.9.0](https://github.com/googleapis/gapic-generator-python/compare/v1.8.6...v1.9.0) (2023-03-17) + + +### Features + +* Add generator option proto-plus-dep ([#1596](https://github.com/googleapis/gapic-generator-python/issues/1596)) ([1c4de67](https://github.com/googleapis/gapic-generator-python/commit/1c4de6728e02d2fa1aa2c48e1a86def6aef5c563)) + ## [1.8.6](https://github.com/googleapis/gapic-generator-python/compare/v1.8.5...v1.8.6) (2023-03-14) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 67b82194db25..b3a2d5f6d76b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.8.6" +version = "1.9.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 39e8ab6efe2783d94b1b992deee3817850c4d5a9 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 20 Mar 2023 15:41:29 +0000 Subject: [PATCH 0988/1339] chore(deps): update all dependencies (#1626) * chore(deps): update all dependencies * Update WORKSPACE --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ea8ef8900097..8b102fbc54fc 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -9,6 +9,6 @@ PyYAML==6.0 setuptools==67.6.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 -pytest-asyncio==0.20.3 +pytest-asyncio==0.21.0 libcst==0.4.9 inflection==0.5.1 \ No newline at end of file From cd28aa3213a974506db0111fa45ed26454baae67 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 21 Mar 2023 09:21:53 -0400 Subject: [PATCH 0989/1339] docs: fix formatting of request arg in docstring (#1628) --- .../%sub/services/%service/_client_macros.j2 | 4 +- .../%sub/services/%service/async_client.py.j2 | 4 +- .../services/%service/transports/rest.py.j2 | 2 +- .../services/asset_service/transports/rest.py | 2 - .../services/eventarc/transports/rest.py | 5 -- .../config_service_v2/async_client.py | 52 ++++++++++--------- .../services/config_service_v2/client.py | 52 ++++++++++--------- .../config_service_v2/transports/rest.py | 2 - .../logging_service_v2/async_client.py | 4 +- .../services/logging_service_v2/client.py | 4 +- .../logging_service_v2/transports/rest.py | 1 - .../services/cloud_redis/transports/rest.py | 9 ---- 12 files changed, 63 insertions(+), 78 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 1502f8fcc39c..23bef26fe810 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -50,7 +50,7 @@ {% if not method.client_streaming %} request (Union[{{ method.input.ident.sphinx }}, dict]): The request object.{{ " " }} - {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} {% for key, field in method.flattened_fields.items() %} {{ field.name }} ({{ field.ident.sphinx }}): {{ field.meta.doc|rst(width=72, indent=16) }} @@ -61,7 +61,7 @@ {% else %} requests (Iterator[{{ method.input.ident.sphinx }}]): The request object iterator.{{ " " }} - {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 3a1df2b857d1..8f10ab7edec5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -228,7 +228,7 @@ class {{ service.async_client_name }}: {% if not method.client_streaming %} request (Optional[Union[{{ method.input.ident.sphinx }}, dict]]): The request object.{{ " " }} - {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} {% for key, field in method.flattened_fields.items() %} {{ field.name }} (:class:`{{ field.ident.sphinx }}`): {{ field.meta.doc|rst(width=72, indent=16) }} @@ -239,7 +239,7 @@ class {{ service.async_client_name }}: {% else %} requests (AsyncIterator[`{{ method.input.ident.sphinx }}`]): The request object AsyncIterator.{{ " " }} - {{- method.input.meta.doc|wrap(width=72, offset=36, indent=16) }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} {% endif %} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 566b93ccda47..3b93a8b80a3f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -331,7 +331,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): Args: request (~.{{ method.input.ident }}): The request object.{{ ' ' }} - {{- method.input.meta.doc|rst(width=72, indent=16) }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index ef684a1424b3..8818b94e6c69 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -500,7 +500,6 @@ def __call__(self, request (~.asset_service.AnalyzeIamPolicyRequest): The request object. A request message for [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -581,7 +580,6 @@ def __call__(self, request (~.asset_service.AnalyzeIamPolicyLongrunningRequest): The request object. A request message for [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 956df809561f..d6a879a22912 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -346,7 +346,6 @@ def __call__(self, request (~.eventarc.CreateTriggerRequest): The request object. The request message for the CreateTrigger method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -434,7 +433,6 @@ def __call__(self, request (~.eventarc.DeleteTriggerRequest): The request object. The request message for the DeleteTrigger method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -513,7 +511,6 @@ def __call__(self, request (~.eventarc.GetTriggerRequest): The request object. The request message for the GetTrigger method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -593,7 +590,6 @@ def __call__(self, request (~.eventarc.ListTriggersRequest): The request object. The request message for the ListTriggers method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -673,7 +669,6 @@ def __call__(self, request (~.eventarc.UpdateTriggerRequest): The request object. The request message for the UpdateTrigger method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 85c6dfdfbd21..aa607edd4043 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -233,7 +233,7 @@ async def sample_list_buckets(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]]): - The request object. The parameters to `ListBuckets`. + The request object. The parameters to ``ListBuckets``. parent (:class:`str`): Required. The parent resource whose buckets are to be listed: @@ -354,7 +354,7 @@ async def sample_get_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): - The request object. The parameters to `GetBucket`. + The request object. The parameters to ``GetBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -435,7 +435,7 @@ async def sample_create_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): - The request object. The parameters to `CreateBucket`. + The request object. The parameters to ``CreateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -523,7 +523,7 @@ async def sample_update_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): - The request object. The parameters to `UpdateBucket`. + The request object. The parameters to ``UpdateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -600,7 +600,7 @@ async def sample_delete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): - The request object. The parameters to `DeleteBucket`. + The request object. The parameters to ``DeleteBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -669,7 +669,7 @@ async def sample_undelete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): - The request object. The parameters to `UndeleteBucket`. + The request object. The parameters to ``UndeleteBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -742,7 +742,7 @@ async def sample_list_views(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListViewsRequest, dict]]): - The request object. The parameters to `ListViews`. + The request object. The parameters to ``ListViews``. parent (:class:`str`): Required. The bucket whose views are to be listed: @@ -855,7 +855,7 @@ async def sample_get_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): - The request object. The parameters to `GetView`. + The request object. The parameters to ``GetView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -937,7 +937,7 @@ async def sample_create_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): - The request object. The parameters to `CreateView`. + The request object. The parameters to ``CreateView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1018,7 +1018,7 @@ async def sample_update_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): - The request object. The parameters to `UpdateView`. + The request object. The parameters to ``UpdateView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1095,7 +1095,7 @@ async def sample_delete_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): - The request object. The parameters to `DeleteView`. + The request object. The parameters to ``DeleteView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1168,7 +1168,7 @@ async def sample_list_sinks(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListSinksRequest, dict]]): - The request object. The parameters to `ListSinks`. + The request object. The parameters to ``ListSinks``. parent (:class:`str`): Required. The parent resource whose sinks are to be listed: @@ -1294,7 +1294,7 @@ async def sample_get_sink(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetSinkRequest, dict]]): - The request object. The parameters to `GetSink`. + The request object. The parameters to ``GetSink``. sink_name (:class:`str`): Required. The resource name of the sink: @@ -1427,7 +1427,7 @@ async def sample_create_sink(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]]): - The request object. The parameters to `CreateSink`. + The request object. The parameters to ``CreateSink``. parent (:class:`str`): Required. The resource in which to create the sink: @@ -1564,7 +1564,7 @@ async def sample_update_sink(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]]): - The request object. The parameters to `UpdateSink`. + The request object. The parameters to ``UpdateSink``. sink_name (:class:`str`): Required. The full resource name of the sink to update, including the parent resource and the sink identifier: @@ -1717,7 +1717,7 @@ async def sample_delete_sink(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]]): - The request object. The parameters to `DeleteSink`. + The request object. The parameters to ``DeleteSink``. sink_name (:class:`str`): Required. The full resource name of the sink to delete, including the parent resource and the sink identifier: @@ -1826,7 +1826,7 @@ async def sample_list_exclusions(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): - The request object. The parameters to `ListExclusions`. + The request object. The parameters to ``ListExclusions``. parent (:class:`str`): Required. The parent resource whose exclusions are to be listed. @@ -1952,7 +1952,7 @@ async def sample_get_exclusion(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): - The request object. The parameters to `GetExclusion`. + The request object. The parameters to ``GetExclusion``. name (:class:`str`): Required. The resource name of an existing exclusion: @@ -2087,7 +2087,7 @@ async def sample_create_exclusion(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): - The request object. The parameters to `CreateExclusion`. + The request object. The parameters to ``CreateExclusion``. parent (:class:`str`): Required. The parent resource in which to create the exclusion: @@ -2224,7 +2224,7 @@ async def sample_update_exclusion(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): - The request object. The parameters to `UpdateExclusion`. + The request object. The parameters to ``UpdateExclusion``. name (:class:`str`): Required. The resource name of the exclusion to update: @@ -2365,7 +2365,7 @@ async def sample_delete_exclusion(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): - The request object. The parameters to `DeleteExclusion`. + The request object. The parameters to ``DeleteExclusion``. name (:class:`str`): Required. The resource name of an existing exclusion to delete: @@ -2483,8 +2483,9 @@ async def sample_get_cmek_settings(): request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Logs + Router `__ for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -2591,8 +2592,9 @@ async def sample_update_cmek_settings(): request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Logs + Router `__ for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 603e2caab746..947cafee32da 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -463,7 +463,7 @@ def sample_list_buckets(): Args: request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): - The request object. The parameters to `ListBuckets`. + The request object. The parameters to ``ListBuckets``. parent (str): Required. The parent resource whose buckets are to be listed: @@ -584,7 +584,7 @@ def sample_get_bucket(): Args: request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): - The request object. The parameters to `GetBucket`. + The request object. The parameters to ``GetBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -666,7 +666,7 @@ def sample_create_bucket(): Args: request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): - The request object. The parameters to `CreateBucket`. + The request object. The parameters to ``CreateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -755,7 +755,7 @@ def sample_update_bucket(): Args: request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): - The request object. The parameters to `UpdateBucket`. + The request object. The parameters to ``UpdateBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -833,7 +833,7 @@ def sample_delete_bucket(): Args: request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): - The request object. The parameters to `DeleteBucket`. + The request object. The parameters to ``DeleteBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -903,7 +903,7 @@ def sample_undelete_bucket(): Args: request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): - The request object. The parameters to `UndeleteBucket`. + The request object. The parameters to ``UndeleteBucket``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -977,7 +977,7 @@ def sample_list_views(): Args: request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): - The request object. The parameters to `ListViews`. + The request object. The parameters to ``ListViews``. parent (str): Required. The bucket whose views are to be listed: @@ -1090,7 +1090,7 @@ def sample_get_view(): Args: request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): - The request object. The parameters to `GetView`. + The request object. The parameters to ``GetView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1173,7 +1173,7 @@ def sample_create_view(): Args: request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): - The request object. The parameters to `CreateView`. + The request object. The parameters to ``CreateView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1255,7 +1255,7 @@ def sample_update_view(): Args: request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): - The request object. The parameters to `UpdateView`. + The request object. The parameters to ``UpdateView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1333,7 +1333,7 @@ def sample_delete_view(): Args: request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): - The request object. The parameters to `DeleteView`. + The request object. The parameters to ``DeleteView``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1407,7 +1407,7 @@ def sample_list_sinks(): Args: request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): - The request object. The parameters to `ListSinks`. + The request object. The parameters to ``ListSinks``. parent (str): Required. The parent resource whose sinks are to be listed: @@ -1525,7 +1525,7 @@ def sample_get_sink(): Args: request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): - The request object. The parameters to `GetSink`. + The request object. The parameters to ``GetSink``. sink_name (str): Required. The resource name of the sink: @@ -1650,7 +1650,7 @@ def sample_create_sink(): Args: request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): - The request object. The parameters to `CreateSink`. + The request object. The parameters to ``CreateSink``. parent (str): Required. The resource in which to create the sink: @@ -1787,7 +1787,7 @@ def sample_update_sink(): Args: request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): - The request object. The parameters to `UpdateSink`. + The request object. The parameters to ``UpdateSink``. sink_name (str): Required. The full resource name of the sink to update, including the parent resource and the sink identifier: @@ -1932,7 +1932,7 @@ def sample_delete_sink(): Args: request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): - The request object. The parameters to `DeleteSink`. + The request object. The parameters to ``DeleteSink``. sink_name (str): Required. The full resource name of the sink to delete, including the parent resource and the sink identifier: @@ -2033,7 +2033,7 @@ def sample_list_exclusions(): Args: request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): - The request object. The parameters to `ListExclusions`. + The request object. The parameters to ``ListExclusions``. parent (str): Required. The parent resource whose exclusions are to be listed. @@ -2151,7 +2151,7 @@ def sample_get_exclusion(): Args: request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): - The request object. The parameters to `GetExclusion`. + The request object. The parameters to ``GetExclusion``. name (str): Required. The resource name of an existing exclusion: @@ -2278,7 +2278,7 @@ def sample_create_exclusion(): Args: request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): - The request object. The parameters to `CreateExclusion`. + The request object. The parameters to ``CreateExclusion``. parent (str): Required. The parent resource in which to create the exclusion: @@ -2415,7 +2415,7 @@ def sample_update_exclusion(): Args: request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): - The request object. The parameters to `UpdateExclusion`. + The request object. The parameters to ``UpdateExclusion``. name (str): Required. The resource name of the exclusion to update: @@ -2556,7 +2556,7 @@ def sample_delete_exclusion(): Args: request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): - The request object. The parameters to `DeleteExclusion`. + The request object. The parameters to ``DeleteExclusion``. name (str): Required. The resource name of an existing exclusion to delete: @@ -2666,8 +2666,9 @@ def sample_get_cmek_settings(): request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): The request object. The parameters to [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Logs + Router `__ for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -2775,8 +2776,9 @@ def sample_update_cmek_settings(): request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): The request object. The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See [Enabling CMEK for Logs - Router](https://cloud.google.com/logging/docs/routing/managed-encryption) + + See `Enabling CMEK for Logs + Router `__ for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py index 9fb4a17a303a..48446d14e3fe 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py @@ -1546,7 +1546,6 @@ def __call__(self, See `Enabling CMEK for Logs Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2508,7 +2507,6 @@ def __call__(self, See `Enabling CMEK for Logs Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index e03941e09995..3c38c790b7a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -540,7 +540,7 @@ async def sample_list_log_entries(): Args: request (Optional[Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]]): - The request object. The parameters to `ListLogEntries`. + The request object. The parameters to ``ListLogEntries``. resource_names (:class:`MutableSequence[str]`): Required. Names of one or more parent resources from which to retrieve log entries: @@ -932,7 +932,7 @@ def request_generator(): Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): - The request object AsyncIterator. The parameters to `TailLogEntries`. + The request object AsyncIterator. The parameters to ``TailLogEntries``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index e55c65a0534a..c9fa648737da 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -717,7 +717,7 @@ def sample_list_log_entries(): Args: request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): - The request object. The parameters to `ListLogEntries`. + The request object. The parameters to ``ListLogEntries``. resource_names (MutableSequence[str]): Required. Names of one or more parent resources from which to retrieve log entries: @@ -1086,7 +1086,7 @@ def request_generator(): Args: requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): - The request object iterator. The parameters to `TailLogEntries`. + The request object iterator. The parameters to ``TailLogEntries``. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py index 309e5179fc04..ee4b6e9a7444 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py @@ -555,7 +555,6 @@ def __call__(self, request (~.logging.ListMonitoredResourceDescriptorsRequest): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index e27a5b53743d..2f514a9d286e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -459,7 +459,6 @@ def __call__(self, request (~.cloud_redis.CreateInstanceRequest): The request object. Request for [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -547,7 +546,6 @@ def __call__(self, request (~.cloud_redis.DeleteInstanceRequest): The request object. Request for [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -626,7 +624,6 @@ def __call__(self, request (~.cloud_redis.ExportInstanceRequest): The request object. Request for [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -714,7 +711,6 @@ def __call__(self, request (~.cloud_redis.FailoverInstanceRequest): The request object. Request for [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -802,7 +798,6 @@ def __call__(self, request (~.cloud_redis.GetInstanceRequest): The request object. Request for [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -880,7 +875,6 @@ def __call__(self, request (~.cloud_redis.ImportInstanceRequest): The request object. Request for [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -968,7 +962,6 @@ def __call__(self, request (~.cloud_redis.ListInstancesRequest): The request object. Request for [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1048,7 +1041,6 @@ def __call__(self, request (~.cloud_redis.UpdateInstanceRequest): The request object. Request for [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1136,7 +1128,6 @@ def __call__(self, request (~.cloud_redis.UpgradeInstanceRequest): The request object. Request for [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. From 1af60369d17adfd9fcfaa379d164c766a1e080bc Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 21 Mar 2023 11:24:32 -0400 Subject: [PATCH 0990/1339] docs: fix docstring with proto-plus dependency (#1624) * docs: fix docstring with proto-plus dependency * fix mypy * add tests * style * address offline feedback to reduce the size of tests * run black --- .../gapic-generator/gapic/schema/metadata.py | 46 +++-- .../tests/unit/schema/test_api.py | 181 ++---------------- .../tests/unit/schema/test_metadata.py | 32 ++++ 3 files changed, 72 insertions(+), 187 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index f4ec88e7cba6..3dbc7389663a 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -173,6 +173,19 @@ def proto_package(self) -> str: """Return the proto package for this type.""" return '.'.join(self.package) + def convert_to_versioned_package(self) -> Tuple[str, ...]: + # We need to change the import statement to use an + # underscore between the module and the version. For example, + # change google.cloud.documentai.v1 to google.cloud.documentai_v1. + # Check if the package name contains a version. + version_regex = "^v\d[^/]*$" + regex_match = re.match(version_regex, self.package[-1]) + if regex_match and len(self.package) > 1: + versioned_module = f"{self.package[-2]}_{regex_match[0]}" + return self.package[:-2] + (versioned_module,) + else: + return self.package + @cached_property def python_import(self) -> imp.Import: """Return the Python import for this type.""" @@ -203,27 +216,11 @@ def python_import(self) -> imp.Import: ) if self.is_proto_plus_type: - # We need to change the import statement to use an - # underscore between the module and the version. For example, - # change google.cloud.documentai.v1 to google.cloud.documentai_v1. - # Check if the package name contains a version. - version_regex = "^v\d[^/]*$" - regex_match = re.match(version_regex, self.package[-1]) - - if regex_match and len(self.package) > 1: - versioned_module = f"{self.package[-2]}_{regex_match[0]}" - return imp.Import( - package=self.package[:-2] + - (versioned_module, 'types'), - module=self.module, - alias=self.module_alias, - ) - else: - return imp.Import( - package=self.package + ('types',), - module=self.module, - alias=self.module_alias, - ) + return imp.Import( + package=self.convert_to_versioned_package() + ('types',), + module=self.module, + alias=self.module_alias, + ) # Return the standard import. return imp.Import( @@ -247,6 +244,13 @@ def sphinx(self) -> str: return '.'.join(self.api_naming.module_namespace + ( self.api_naming.versioned_module_name, ) + self.subpackage + ('types',) + self.parent + (self.name, )) + elif self.is_proto_plus_type: + return ".".join( + self.convert_to_versioned_package() + + ("types",) + + self.parent + + (self.name,) + ) # Anything left is a standard _pb2 type return f'{self.proto_package}.{self.module}_pb2.{self.name}' diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index d5d3ca4e65e3..7f26fc46d7ca 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -733,46 +733,6 @@ def test_python_modules_nested(): package="google.bab.v1", messages=(make_message_pb2(name="ImportedMessageBab", fields=()),), ), - make_file_pb2( - name="bac.v10.proto", - package="google.bac.v10", - messages=(make_message_pb2(name="ImportedMessageBac", fields=()),), - ), - make_file_pb2( - name="bad.v2beta.proto", - package="google.bad.v2beta", - messages=(make_message_pb2(name="ImportedMessageBad", fields=()),), - ), - make_file_pb2( - name="bae.v2beta20.proto", - package="google.bae.v2beta20", - messages=(make_message_pb2(name="ImportedMessageBae", fields=()),), - ), - make_file_pb2( - name="baf.v20beta.proto", - package="google.baf.v20beta", - messages=(make_message_pb2(name="ImportedMessageBaf", fields=()),), - ), - make_file_pb2( - name="bag.v20p1.proto", - package="google.bag.v20p1", - messages=(make_message_pb2(name="ImportedMessageBag", fields=()),), - ), - make_file_pb2( - name="bah.v20p1alpha3p5.proto", - package="google.bah.v20p1alpha3p5", - messages=(make_message_pb2(name="ImportedMessageBah", fields=()),), - ), - make_file_pb2( - name="bah.v20p1.bai.proto", - package="google.bah.v20p1.bai", - messages=(make_message_pb2(name="ImportedMessageBai", fields=()),), - ), - make_file_pb2( - name="bah.v20p1.baj.v3.proto", - package="google.bah.v20p1.baj.v3", - messages=(make_message_pb2(name="ImportedMessageBaj", fields=()),), - ), make_file_pb2( name="common.proto", package="google.example.v1.common", @@ -824,86 +784,6 @@ def test_python_modules_nested(): ), ), ), - make_message_pb2( - name="Bac", - fields=( - make_field_pb2( - name="imported_message_bac", - number=1, - type_name=".google.bac.v10.ImportedMessageBac", - ), - ), - ), - make_message_pb2( - name="Bad", - fields=( - make_field_pb2( - name="imported_message_bad", - number=1, - type_name=".google.bad.v2beta.ImportedMessageBad", - ), - ), - ), - make_message_pb2( - name="Bae", - fields=( - make_field_pb2( - name="imported_message_bae", - number=1, - type_name=".google.bae.v2beta20.ImportedMessageBae", - ), - ), - ), - make_message_pb2( - name="Baf", - fields=( - make_field_pb2( - name="imported_message_baf", - number=1, - type_name=".google.baf.v20beta.ImportedMessageBaf", - ), - ), - ), - make_message_pb2( - name="Bag", - fields=( - make_field_pb2( - name="imported_message_bag", - number=1, - type_name=".google.bag.v20p1.ImportedMessageBag", - ), - ), - ), - make_message_pb2( - name="Bah", - fields=( - make_field_pb2( - name="imported_message_bah", - number=1, - type_name=".google.bah.v20p1alpha3p5.ImportedMessageBah", - ), - ), - ), - make_message_pb2( - name="Bai", - fields=( - make_field_pb2( - name="imported_message_bai", - number=1, - type_name=".google.bah.v20p1.bai.ImportedMessageBai", - ), - ), - ), - make_message_pb2( - name="Baj", - fields=( - make_field_pb2( - name="imported_message_baj", - number=1, - type_name=".google.bah.v20p1.baj.v3.ImportedMessageBaj", - ), - ), - ), ), ), make_message_pb2( @@ -937,27 +817,15 @@ def test_python_modules_nested(): assert api_schema.protos["foo.proto"].python_modules == ( imp.Import(package=("google", "baa"), module="baa_pb2"), imp.Import(package=("google", "bab", "v1"), module="bab_v1_pb2"), - imp.Import(package=("google", "bac", "v10"), module="bac_v10_pb2"), - imp.Import(package=("google", "bad", "v2beta"), - module="bad_v2beta_pb2"), - imp.Import(package=("google", "bae", "v2beta20"), - module="bae_v2beta20_pb2"), - imp.Import(package=("google", "baf", "v20beta"), - module="baf_v20beta_pb2"), - imp.Import(package=("google", "bag", "v20p1"), module="bag_v20p1_pb2"), - imp.Import( - package=("google", "bah", "v20p1", "bai"), - module="bah_v20p1_bai_pb2" - ), - imp.Import( - package=("google", "bah", "v20p1", "baj", "v3"), - module="bah_v20p1_baj_v3_pb2", - ), - imp.Import( - package=("google", "bah", "v20p1alpha3p5"), module="bah_v20p1alpha3p5_pb2" - ), imp.Import(package=("google", "dep"), module="dep_pb2"), ) + assert ( + api_schema.protos["foo.proto"] + .all_messages["google.example.v1.GetFooRequest.Bab"] + .fields["imported_message_bab"] + .ident.sphinx + == "google.bab.v1.bab_v1_pb2.ImportedMessageBab" + ) # Ensure that we can change the import statements to cater for a # dependency that uses proto-plus types. @@ -972,14 +840,6 @@ def test_python_modules_nested(): ( "google.baa", "google.bab.v1", - "google.bac.v10", - "google.bad.v2beta", - "google.bae.v2beta20", - "google.baf.v20beta", - "google.bag.v20p1", - "google.bah.v20p1alpha3p5", - "google.bah.v20p1.bai", - "google.bah.v20p1.baj.v3", ) ) ), @@ -987,28 +847,17 @@ def test_python_modules_nested(): assert api_schema.protos["foo.proto"].python_modules == ( imp.Import(package=("google", "baa", "types"), module="baa"), imp.Import(package=("google", "bab_v1", "types"), module="bab_v1"), - imp.Import(package=("google", "bac_v10", "types"), module="bac_v10"), - imp.Import(package=("google", "bad_v2beta", "types"), - module="bad_v2beta"), - imp.Import(package=("google", "bae_v2beta20", "types"), - module="bae_v2beta20"), - imp.Import(package=("google", "baf_v20beta", "types"), - module="baf_v20beta"), - imp.Import(package=("google", "bag_v20p1", "types"), - module="bag_v20p1"), - imp.Import( - package=("google", "bah", "v20p1", "bai", "types"), module="bah_v20p1_bai" - ), - imp.Import( - package=("google", "bah", "v20p1", "baj_v3", "types"), - module="bah_v20p1_baj_v3", - ), - imp.Import( - package=("google", "bah_v20p1alpha3p5", "types"), module="bah_v20p1alpha3p5" - ), imp.Import(package=("google", "dep"), module="dep_pb2"), ) + assert ( + api_schema.protos["foo.proto"] + .all_messages["google.example.v1.GetFooRequest.Bab"] + .fields["imported_message_bab"] + .ident.sphinx + == "google.bab_v1.types.ImportedMessageBab" + ) + def test_services(): L = descriptor_pb2.SourceCodeInfo.Location diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 179361e039f4..df8967da71b4 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -66,6 +66,38 @@ def test_address_proto(): assert addr.proto_package == 'foo.bar' +def test_proto_package_version_parsing(): + addr = metadata.Address(package=("baa")) + assert addr.convert_to_versioned_package() == ("baa") + + addr = metadata.Address(package=("bab", "v1")) + assert addr.convert_to_versioned_package() == ("bab_v1",) + + addr = metadata.Address(package=("bac", "v10")) + assert addr.convert_to_versioned_package() == ("bac_v10",) + + addr = metadata.Address(package=("bad", "v2beta")) + assert addr.convert_to_versioned_package() == ("bad_v2beta",) + + addr = metadata.Address(package=("bae", "v2beta20")) + assert addr.convert_to_versioned_package() == ("bae_v2beta20",) + + addr = metadata.Address(package=("baf", "v20beta")) + assert addr.convert_to_versioned_package() == ("baf_v20beta",) + + addr = metadata.Address(package=("bag", "v20p1")) + assert addr.convert_to_versioned_package() == ("bag_v20p1",) + + addr = metadata.Address(package=("bah", "v20p1alpha3p5")) + assert addr.convert_to_versioned_package() == ("bah_v20p1alpha3p5",) + + addr = metadata.Address(package=("bai", "v20p1")) + assert addr.convert_to_versioned_package() == ("bai_v20p1",) + + addr = metadata.Address(package=("bah", "v20p1", "baj", "v3")) + assert addr.convert_to_versioned_package() == ("bah", "v20p1", "baj_v3") + + def test_address_child_no_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') child = addr.child('Bacon', path=(4, 0)) From b59a5ee41b9db9d51cc93a0ca2cd9bea93a46c9d Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 22 Mar 2023 12:31:14 -0400 Subject: [PATCH 0991/1339] fix: restore grpc-google-iam-v1 dependency when api.has_iam_mixin is True (#1631) * fix: restore grpc-google-iam-v1 dependency when api.has_iam_mixin is True * add IAM dependency to eventarc golden files * address review comments * clean up * style * style --- packages/gapic-generator/gapic/schema/api.py | 4 +++- .../tests/integration/BUILD.bazel | 12 +++++++++++ .../tests/integration/eventarc_v1.yaml | 9 +++++++++ .../services/eventarc/async_client.py | 3 +++ .../eventarc_v1/services/eventarc/client.py | 6 ++++++ .../services/eventarc/transports/base.py | 3 +++ .../services/eventarc/transports/grpc.py | 3 +++ .../eventarc/transports/grpc_asyncio.py | 3 +++ .../services/eventarc/transports/rest.py | 3 +++ .../integration/goldens/eventarc/setup.py | 1 + .../eventarc/testing/constraints-3.10.txt | 1 + .../eventarc/testing/constraints-3.11.txt | 1 + .../eventarc/testing/constraints-3.12.txt | 1 + .../eventarc/testing/constraints-3.7.txt | 1 + .../eventarc/testing/constraints-3.8.txt | 1 + .../eventarc/testing/constraints-3.9.txt | 1 + .../unit/gapic/eventarc_v1/test_eventarc.py | 4 ++++ .../tests/unit/schema/test_api.py | 20 +++++++++++++++++++ 18 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 packages/gapic-generator/tests/integration/eventarc_v1.yaml diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 9b5c29439d3b..c20d33a750b8 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -489,7 +489,9 @@ def gapic_metadata_json(self, options: Options) -> str: return MessageToJson(self.gapic_metadata(options), sort_keys=True) def requires_package(self, pkg: Tuple[str, ...]) -> bool: - return any( + pkg_has_iam_mixin = self.has_iam_mixin and \ + pkg == ('google', 'iam', 'v1') + return pkg_has_iam_mixin or any( message.ident.package == pkg for proto in self.all_protos.values() for message in proto.all_messages.values() diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 4e151c3146ef..347510d83311 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -11,6 +11,11 @@ load( "golden_update", "integration_test", ) +load( + "@com_github_grpc_grpc//bazel:python_rules.bzl", + "py_proto_library", +) + load("@rules_proto//proto:defs.bzl", "proto_library") package(default_visibility = ["//visibility:public"]) @@ -81,6 +86,11 @@ py_test( ], ) +py_proto_library( + name = "iam_policy_py_proto", + deps = ["@com_google_googleapis//google/iam/v1:iam_policy_proto"], +) + # Eventarc. py_gapic_library( name = "eventarc_py_gapic", @@ -91,7 +101,9 @@ py_gapic_library( "python-gapic-name=eventarc", "autogen-snippets", ], + service_yaml = "eventarc_v1.yaml", transport = "grpc+rest", + deps = [":iam_policy_py_proto"] ) py_test( diff --git a/packages/gapic-generator/tests/integration/eventarc_v1.yaml b/packages/gapic-generator/tests/integration/eventarc_v1.yaml new file mode 100644 index 000000000000..afd816204495 --- /dev/null +++ b/packages/gapic-generator/tests/integration/eventarc_v1.yaml @@ -0,0 +1,9 @@ +type: google.api.Service +config_version: 3 +name: eventarc.googleapis.com +title: Eventarc API + +apis: +- name: google.cloud.eventarc.v1.Eventarc +- name: google.cloud.location.Locations +- name: google.iam.v1.IAMPolicy diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 2a5ea08d65fa..a6aa557c768e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -38,6 +38,9 @@ from google.cloud.eventarc_v1.types import eventarc from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index bfb002a3c4a1..63915520d9d2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -41,6 +41,9 @@ from google.cloud.eventarc_v1.types import eventarc from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -1053,6 +1056,9 @@ def __exit__(self, type, value, traceback): + + + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index e86f94857286..655845217600 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -29,6 +29,9 @@ from google.cloud.eventarc_v1.types import eventarc from google.cloud.eventarc_v1.types import trigger +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index fd8dae846b57..adf068fcf91f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -27,6 +27,9 @@ from google.cloud.eventarc_v1.types import eventarc from google.cloud.eventarc_v1.types import trigger +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 20982c46e3b8..644a6f7f6c51 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -27,6 +27,9 @@ from google.cloud.eventarc_v1.types import eventarc from google.cloud.eventarc_v1.types import trigger +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO from .grpc import EventarcGrpcTransport diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index d6a879a22912..bd5c2e0ab41b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -28,6 +28,9 @@ from google.protobuf import json_format from google.api_core import operations_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses import re diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 718ff3fd68cd..f0ba7a537edd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -40,6 +40,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/python-eventarc" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt index ed7f9aed2559..ad3f0fa58e2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt index ed7f9aed2559..ad3f0fa58e2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt index ed7f9aed2559..ad3f0fa58e2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index 6c44adfea7ee..2beecf99e0be 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -7,3 +7,4 @@ google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt index ed7f9aed2559..ad3f0fa58e2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt index ed7f9aed2559..ad3f0fa58e2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 6c692beb9d2c..178df760f509 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -54,6 +54,10 @@ from google.cloud.eventarc_v1.types import eventarc from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 7f26fc46d7ca..e493e68d0c11 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -2582,3 +2582,23 @@ def test_mixin_api_methods_lro(): assert api_schema.mixin_api_methods == { 'CancelOperation': m1, 'DeleteOperation': m2, 'WaitOperation': m3, 'GetOperation': m4} + + +def test_has_iam_mixin(): + # Check that has_iam_mixin() property of API returns True when the + # service YAML contains `google.iam.v1.IAMPolicy`. + fd = ( + make_file_pb2( + name='example.proto', + package='google.example.v1', + messages=(make_message_pb2(name='ExampleRequest', fields=()),), + ),) + opts = Options(service_yaml_config={ + 'apis': [ + { + 'name': 'google.iam.v1.IAMPolicy' + } + ], + }) + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + assert api_schema.has_iam_mixin From b4b65a83fae2388b71c3ebb29c278d51ae09f46d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 22 Mar 2023 16:36:49 +0000 Subject: [PATCH 0992/1339] chore(main): release 1.9.1 (#1629) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 2a329dc19ca9..ec479fb7b094 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.9.1](https://github.com/googleapis/gapic-generator-python/compare/v1.9.0...v1.9.1) (2023-03-22) + + +### Bug Fixes + +* Restore grpc-google-iam-v1 dependency when api.has_iam_mixin is True ([#1631](https://github.com/googleapis/gapic-generator-python/issues/1631)) ([a7ed16d](https://github.com/googleapis/gapic-generator-python/commit/a7ed16dbc1098772a5d9ff1f48f0651cc90fe5b4)) + + +### Documentation + +* Fix docstring with proto-plus dependency ([#1624](https://github.com/googleapis/gapic-generator-python/issues/1624)) ([dce071d](https://github.com/googleapis/gapic-generator-python/commit/dce071d71c66c12cf001a66d238f6f6ea3c67c04)) +* Fix formatting of request arg in docstring ([#1628](https://github.com/googleapis/gapic-generator-python/issues/1628)) ([8b4f5ca](https://github.com/googleapis/gapic-generator-python/commit/8b4f5caf1f496b36a02ed26b5af65b0d58ab1b6e)) + ## [1.9.0](https://github.com/googleapis/gapic-generator-python/compare/v1.8.6...v1.9.0) (2023-03-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index b3a2d5f6d76b..cce276faa93d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.9.0" +version = "1.9.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 5a0328db5f6e5795f4e49a78f4ec567c2718b637 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Tue, 28 Mar 2023 14:02:34 -0700 Subject: [PATCH 0993/1339] feat: freezes reserved names list. (#1575) * feat: freezes reserved names list. * chore: fix bugs. * chore: fix bug in async client. * chore: fix bug in test. * chore: update goldens. * chore: revert some files. --------- Co-authored-by: Anthonios Partheniou --- .../gapic/utils/reserved_names.py | 91 +++++++++++++++---- .../tests/unit/utils/test_uri_conv.py | 4 +- 2 files changed, 77 insertions(+), 18 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index 6ab813223c09..c17e5ea3240c 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -12,22 +12,81 @@ # See the License for the specific language governing permissions and # limitations under the License. -import builtins -import itertools -import keyword - +# DO NOT CHANGE this file, except when you need to add a new reserved keyword +# from Python's new major release. +# In an unforseen case if you have to make changes, please follow the process: +# 1. Run the internal script to check if any of the existing Google APIs use the +# item to be added/removed. For external contributors, ask a Googler to do that +# during code review. +# 2. If no APIs are using it, it's safe to add. Otherwise, consult with your TL. +# +# Changing this list will lead to breaking changes. This is happening because +# GAPIC will add "_" to field names from that list. This will change the generated +# client library surface (i.e. breaking change). Example of when this happened: +# https://github.com/googleapis/gapic-generator-python/issues/835. -# The exceptions to builtins are frequent and useful. -# They are explicitly allowed message, module, and field names. +# Each item in the list belongs to one of the following categories: +# 1. Python keyword +# 2. Used in Google APIs at the time of writing this PR +# 3. Reserved word from Protoplus. RESERVED_NAMES = frozenset( - itertools.chain( - # We CANNOT make exceptions for keywords. - keyword.kwlist, - # We make SOME exceptions for certain names that collide with builtins. - set(dir(builtins)) - {"filter", "map", "id", - "input", "property", "vars", "set"}, - # "mapping" and "ignore_unknown_fields" have special uses - # in the constructor of proto.Message - {"mapping", "ignore_unknown_fields"}, - ) + [ + "any", + "format", + "yield", + "await", + "False", + "return", + "continue", + "as", + "pass", + "next", + "class", + "list", + "breakpoint", + "import", + "mapping", + "zip", + "locals", + "max", + "and", + "finally", + "dir", + "def", + "elif", + "from", + "nonlocal", + "min", + "not", + "object", + "global", + "with", + "else", + "__peg_parser__", + "del", + "range", + "open", + "assert", + "all", + "except", + "while", + "license", + "raise", + "True", + "lambda", + "for", + "or", + "if", + "in", + "async", + "slice", + "is", + "break", + "hash", + "None", + "try", + "type", + # Comes from Protoplus + "ignore_unknown_fields" + ] ) diff --git a/packages/gapic-generator/tests/unit/utils/test_uri_conv.py b/packages/gapic-generator/tests/unit/utils/test_uri_conv.py index ccb7d4f35936..eeb6d253486e 100644 --- a/packages/gapic-generator/tests/unit/utils/test_uri_conv.py +++ b/packages/gapic-generator/tests/unit/utils/test_uri_conv.py @@ -19,8 +19,8 @@ def test_convert_uri_fieldname(): - uri = "abc/*/license/{license}/{xyz.reversed=reversed/*}" - expected_uri = "abc/*/license/{license_}/{xyz.reversed_=reversed/*}" + uri = "abc/*/license/{license}/{xyz.class=class/*}" + expected_uri = "abc/*/license/{license_}/{xyz.class_=class/*}" assert utils.convert_uri_fieldnames(uri) == expected_uri From 082d68f8d5437b38ca3e7467823268a6d59727d7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 28 Mar 2023 21:20:42 +0000 Subject: [PATCH 0994/1339] chore(main): release 1.10.0 (#1636) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index ec479fb7b094..4005e32b35c0 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.10.0](https://github.com/googleapis/gapic-generator-python/compare/v1.9.1...v1.10.0) (2023-03-28) + + +### Features + +* Freezes reserved names list. ([#1575](https://github.com/googleapis/gapic-generator-python/issues/1575)) ([b1b56ab](https://github.com/googleapis/gapic-generator-python/commit/b1b56ab22442d6d00ad2381ea14ce46aeda96e6e)) + ## [1.9.1](https://github.com/googleapis/gapic-generator-python/compare/v1.9.0...v1.9.1) (2023-03-22) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index cce276faa93d..d1f5c4259850 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.9.1" +version = "1.10.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 6273abe02d2e09c568752e35f87491262d91ec81 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Thu, 30 Mar 2023 11:04:18 -0700 Subject: [PATCH 0995/1339] fix: fix bug with quote replacement. (#1613) Co-authored-by: Anthonios Partheniou --- .../%sub/services/%service/transports/_rest_mixins.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 index 4a3726b1d880..460df244cc79 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -51,7 +51,7 @@ {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} {%- if body_spec %} - body = json.loads(json.dumps(transcoded_request['body'])) + body = json.dumps(transcoded_request['body']) {%- endif %} uri = transcoded_request['uri'] From 3dec983bee494deb931ed901cacf8890c2cf08e5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 13 Apr 2023 15:44:22 +0100 Subject: [PATCH 0996/1339] chore(deps): update all dependencies (#1634) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8b102fbc54fc..9e6b67a73af4 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,12 +1,12 @@ click==8.1.3 google-api-core==2.11.0 -googleapis-common-protos==1.58.0 +googleapis-common-protos==1.59.0 jinja2==3.1.2 MarkupSafe==2.1.2 protobuf==4.22.1 pypandoc==1.11 PyYAML==6.0 -setuptools==67.6.0 +setuptools==67.6.1 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.21.0 From 7097c0f2d1dee19ad2b90eb2aaa63ebdb2162f3c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 14 Apr 2023 13:58:13 -0400 Subject: [PATCH 0997/1339] fix: fix code coverage in async test (#1646) * fix: fix code coverage * update goldens * update comment * renable code formatting --- .../gapic/%name_%version/%sub/test_macros.j2 | 6 ++++- .../unit/gapic/asset_v1/test_asset_service.py | 18 +++++++++++--- .../unit/gapic/eventarc_v1/test_eventarc.py | 6 ++++- .../logging_v2/test_config_service_v2.py | 24 +++++++++++++++---- .../logging_v2/test_logging_service_v2.py | 18 +++++++++++--- .../logging_v2/test_metrics_service_v2.py | 6 ++++- .../unit/gapic/redis_v1/test_cloud_redis.py | 6 ++++- 7 files changed, 70 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 8c10abdf2082..952751323b12 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -848,7 +848,11 @@ async def test_{{ method_name }}_async_pages(): {% endif %} ) pages = [] - async for page_ in (await client.{{ method_name }}(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.{{ method_name }}(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 0ffce3d9bdcd..fdd2ffb030b1 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1110,7 +1110,11 @@ async def test_list_assets_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_assets(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_assets(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -2882,7 +2886,11 @@ async def test_search_all_resources_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.search_all_resources(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_all_resources(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -3312,7 +3320,11 @@ async def test_search_all_iam_policies_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.search_all_iam_policies(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_all_iam_policies(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 178df760f509..d9a15d9a1dcd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -1215,7 +1215,11 @@ async def test_list_triggers_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_triggers(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_triggers(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 9492117b61a5..a11636125c6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -959,7 +959,11 @@ async def test_list_buckets_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_buckets(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_buckets(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -2165,7 +2169,11 @@ async def test_list_views_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_views(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_views(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -3203,7 +3211,11 @@ async def test_list_sinks_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_sinks(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_sinks(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -4662,7 +4674,11 @@ async def test_list_exclusions_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_exclusions(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_exclusions(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index cdbd0fafc78f..c42c689fd3f8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1339,7 +1339,11 @@ async def test_list_log_entries_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_log_entries(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_log_entries(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -1609,7 +1613,11 @@ async def test_list_monitored_resource_descriptors_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_monitored_resource_descriptors(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_monitored_resource_descriptors(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @@ -2033,7 +2041,11 @@ async def test_list_logs_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_logs(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_logs(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 0e79159169e6..70df8f2d2640 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -964,7 +964,11 @@ async def test_list_log_metrics_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_log_metrics(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_log_metrics(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 913c4397b6eb..6c31c2cdc8be 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -966,7 +966,11 @@ async def test_list_instances_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_instances(request={})).pages: # pragma: no branch + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instances(request={}) + ).pages: pages.append(page_) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token From d5f2f040f51157facfd311a95d9604521129aace Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 14 Apr 2023 14:30:00 -0400 Subject: [PATCH 0998/1339] chore: exclude rest code for rpcs without http annotations (#1645) --- .../%name/%version/%sub/services/%service/transports/rest.py.j2 | 2 +- .../%name_%version/%sub/services/%service/transports/rest.py.j2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index e10fa77c2945..adc150a4487c 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -267,7 +267,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __hash__(self): return hash("{{method.name}}") - {% if not method.client_streaming %} + {% if method.http_options and not method.client_streaming %} {% if method.input.required_fields %} __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { {% for req_field in method.input.required_fields if req_field.name in method.query_params %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 3b93a8b80a3f..bf9b5b36e208 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -301,7 +301,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): def __hash__(self): return hash("{{method.name}}") - {% if not method.client_streaming %} + {% if method.http_options and not method.client_streaming %} {% if method.input.required_fields %} __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { {% for req_field in method.input.required_fields if req_field.name in method.query_params %} From 996fd29ed9b44db7ba919d1dc55bb40057eb0627 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 18 Apr 2023 17:59:33 +0200 Subject: [PATCH 0999/1339] chore(deps): update all dependencies (#1648) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 9e6b67a73af4..496dc72aa5fa 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.11.0 googleapis-common-protos==1.59.0 jinja2==3.1.2 MarkupSafe==2.1.2 -protobuf==4.22.1 +protobuf==4.22.3 pypandoc==1.11 PyYAML==6.0 setuptools==67.6.1 From 584442568f1644590508e53f736d45b89e4de485 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Mon, 8 May 2023 11:32:48 -0700 Subject: [PATCH 1000/1339] feat: support snippet generation for services that only support REST transport (#1656) * feat: support snippet generation for services that only support REST transport * formatting --- .../gapic/generator/generator.py | 6 - .../gapic/samplegen/samplegen.py | 23 +- .../tests/unit/samplegen/test_samplegen.py | 1072 +++++++++-------- 3 files changed, 603 insertions(+), 498 deletions(-) diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 541e1ab61b18..73accd9a22ee 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -153,12 +153,6 @@ def _generate_samples_and_manifest( autogen_specs = list( samplegen.generate_sample_specs(api_schema, opts=opts)) - # TODO: Support the generation of REST snippets. - autogen_specs = [ - spec for spec in autogen_specs - if spec["transport"] != api.TRANSPORT_REST - ] - # Also process any handwritten sample specs handwritten_specs = samplegen.parse_handwritten_specs( self._sample_configs) diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 0ed54be530ae..ebc70936a292 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -997,18 +997,24 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess return request -def _transport_type_from_transport(transport: str) -> str: - if transport == api.TRANSPORT_GRPC: +def _sync_or_async_from_transport(transport: str) -> str: + if transport in (api.TRANSPORT_GRPC, api.TRANSPORT_REST): return "sync" - elif transport == api.TRANSPORT_GRPC_ASYNC: + else: # transport is api.TRANSPORT_GRPC_ASYNC + # Currently the REST transport does not support async. return "async" - else: # api.TRANSPORT_REST - return "rest" + + +def _supports_grpc(service) -> bool: + return api.TRANSPORT_GRPC in service.clients.keys() def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, Any], None, None]: """Given an API, generate basic sample specs for each method. + If a service supports gRPC transport, we do not generate + spec for REST even if it also supports REST transport. + Args: api_schema (api.API): The schema that defines the API. @@ -1021,12 +1027,15 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A for service_name, service in gapic_metadata.services.items(): api_short_name = api_schema.services[f"{api_schema.naming.proto_package}.{service_name}"].shortname api_version = api_schema.naming.version + supports_grpc = _supports_grpc(service) for transport, client in service.clients.items(): - transport_type = _transport_type_from_transport(transport) + if supports_grpc and transport == api.TRANSPORT_REST: + continue + sync_or_async = _sync_or_async_from_transport(transport) for rpc_name, method_list in client.rpcs.items(): # Region Tag Format: # [{START|END} ${apishortname}_${apiVersion}_generated_${serviceName}_${rpcName}_{sync|async|rest}] - region_tag = f"{api_short_name}_{api_version}_generated_{service_name}_{rpc_name}_{transport_type}" + region_tag = f"{api_short_name}_{api_version}_generated_{service_name}_{rpc_name}_{sync_or_async}" spec = { "rpc": rpc_name, "transport": transport, diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 0ae9f3d1dbe4..7665c831aef8 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -16,8 +16,8 @@ import pytest from textwrap import dedent -from typing import (TypeVar, Sequence) -from collections import (OrderedDict, namedtuple) +from typing import TypeVar, Sequence +from collections import OrderedDict, namedtuple from google.api import client_pb2 from google.api import resource_pb2 from google.protobuf import descriptor_pb2 @@ -27,12 +27,22 @@ import gapic.samplegen.samplegen as samplegen import gapic.samplegen_utils.types as types import gapic.samplegen_utils.yaml as gapic_yaml -from gapic.schema import (api, metadata, naming) +from gapic.schema import api, metadata, naming import gapic.schema.wrappers as wrappers from gapic.utils import Options -from ..common_types import (DummyApiSchema, DummyField, DummyIdent, DummyNaming, DummyMessage, DummyMessageTypePB, - DummyService, DummyMethod, message_factory, enum_factory) +from ..common_types import ( + DummyApiSchema, + DummyField, + DummyIdent, + DummyNaming, + DummyMessage, + DummyMessageTypePB, + DummyService, + DummyMethod, + message_factory, + enum_factory, +) from gapic.samplegen_utils import utils @@ -42,7 +52,7 @@ def api_naming(): warehouse_package_name="mollusc-cephalopod-teuthida-", versioned_module_name="teuthida_v1", module_namespace=("mollusc", "cephalopod"), - proto_package="mollusc.cephalopod" + proto_package="mollusc.cephalopod", ) @@ -50,10 +60,12 @@ def api_naming(): def request_message(): return DummyMessage( fields={ - "parent": DummyField(is_primitive=True, type=str, required=True, name="parent"), - }, + "parent": DummyField( + is_primitive=True, type=str, required=True, name="parent" + ), + }, type=DummyMessageTypePB(name="ClassifyRequest"), - ident=DummyIdent(name="ClassifyRequest") + ident=DummyIdent(name="ClassifyRequest"), ) @@ -61,27 +73,33 @@ def request_message(): def request_message_from_another_package(api_naming): return DummyMessage( fields={ - "parent": DummyField(is_primitive=True, type=str, required=True, name="parent"), - }, + "parent": DummyField( + is_primitive=True, type=str, required=True, name="parent" + ), + }, type=DummyMessageTypePB(name="ClassifyRequest"), ident=DummyIdent(name="ClassifyRequest"), meta=metadata.Metadata( address=metadata.Address( api_naming=api_naming, - package=('a', 'b',), - module='c' + package=( + "a", + "b", + ), + module="c", ) - ) + ), ) @pytest.fixture(scope="module") def dummy_api_schema(request_message, api_naming): return DummyApiSchema( - services={"Mollusc": DummyService( - methods={}, client_name="MolluscClient", - resource_messages_dict={} - )}, + services={ + "Mollusc": DummyService( + methods={}, client_name="MolluscClient", resource_messages_dict={} + ) + }, naming=api_naming, messages=request_message, ) @@ -89,12 +107,14 @@ def dummy_api_schema(request_message, api_naming): @pytest.fixture(scope="module") def dummy_api_schema_with_request_from_another_package( - request_message_from_another_package, api_naming): + request_message_from_another_package, api_naming +): return DummyApiSchema( - services={"Mollusc": DummyService( - methods={}, client_name="MolluscClient", - resource_messages_dict={} - )}, + services={ + "Mollusc": DummyService( + methods={}, client_name="MolluscClient", resource_messages_dict={} + ) + }, naming=api_naming, messages=request_message_from_another_package, ) @@ -102,39 +122,43 @@ def dummy_api_schema_with_request_from_another_package( def test_define(dummy_api_schema): define = {"define": "squid=$resp"} - v = samplegen.Validator(DummyMethod( - output=message_factory("mollusc")), api_schema=dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("mollusc")), api_schema=dummy_api_schema + ) v.validate_response([define]) def test_define_undefined_var(dummy_api_schema): define = {"define": "squid=humboldt"} - v = samplegen.Validator(DummyMethod( - output=message_factory("mollusc")), api_schema=dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("mollusc")), api_schema=dummy_api_schema + ) with pytest.raises(types.UndefinedVariableReference): v.validate_response([define]) def test_define_reserved_varname(dummy_api_schema): define = {"define": "class=$resp"} - v = samplegen.Validator(DummyMethod( - output=message_factory("mollusc")), api_schema=dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("mollusc")), api_schema=dummy_api_schema + ) with pytest.raises(types.ReservedVariableName): v.validate_response([define]) def test_define_add_var(dummy_api_schema): - v = samplegen.Validator(DummyMethod( - output=message_factory("mollusc.name")), - api_schema=dummy_api_schema) - v.validate_response([{"define": "squid=$resp"}, - {"define": "name=squid.name"}]) + v = samplegen.Validator( + DummyMethod(output=message_factory("mollusc.name")), api_schema=dummy_api_schema + ) + v.validate_response([{"define": "squid=$resp"}, { + "define": "name=squid.name"}]) def test_define_bad_form(dummy_api_schema): define = {"define": "mollusc=$resp.squid=$resp.clam"} - v = samplegen.Validator(DummyMethod( - output=message_factory("mollusc")), api_schema=dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("mollusc")), api_schema=dummy_api_schema + ) with pytest.raises(types.BadAssignment): v.validate_response([define]) @@ -144,9 +168,11 @@ def test_define_redefinition(dummy_api_schema): {"define": "molluscs=$resp.molluscs"}, {"define": "molluscs=$resp.molluscs"}, ] - v = samplegen.Validator(DummyMethod(output=message_factory("$resp.molluscs", - repeated_iter=[True])), - api_schema=dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory( + "$resp.molluscs", repeated_iter=[True])), + api_schema=dummy_api_schema, + ) with pytest.raises(types.RedefinedVariable): v.validate_response(statements) @@ -157,19 +183,26 @@ def test_preprocess_sample(): classify_request_message = DummyMessage( fields={ - "parent": DummyField(is_primitive=True, type=str, required=True, name="parent"), - }, + "parent": DummyField( + is_primitive=True, type=str, required=True, name="parent" + ), + }, type=DummyMessageTypePB(name="ClassifyRequest"), - ident=DummyIdent(name="ClassifyRequest") - ) + ident=DummyIdent(name="ClassifyRequest"), + ) api_schema = DummyApiSchema( - services={"Mollusc": DummyService( - methods={}, client_name="MolluscClient", - resource_messages_dict={})}, - naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), - messages=classify_request_message + services={ + "Mollusc": DummyService( + methods={}, client_name="MolluscClient", resource_messages_dict={} + ) + }, + naming=DummyNaming( + warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", + module_namespace="mollusc.cephalopod", + ), + messages=classify_request_message, ) rpc = DummyMethod(input=classify_request_message) @@ -195,16 +228,12 @@ def test_preprocess_sample(): assert request_type.ident.name == "ClassifyRequest" # assert mock request is created - assert sample["request"] == [ - { - "field": "parent", - "value": "mock_value" - } - ] + assert sample["request"] == [{"field": "parent", "value": "mock_value"}] @pytest.mark.parametrize( - 'repeated_enum,expected', [(False, "TYPE_2"), (True, ["TYPE_2"])]) + "repeated_enum,expected", [(False, "TYPE_2"), (True, ["TYPE_2"])] +) def test_preprocess_sample_with_enum_field(repeated_enum, expected): # Verify that the default response is added. sample = {"service": "Mollusc", "rpc": "Classify"} @@ -216,20 +245,25 @@ def test_preprocess_sample_with_enum_field(repeated_enum, expected): required=True, repeated=repeated_enum, type=enum_factory("type", ["TYPE_1", "TYPE_2"]), - enum=enum_factory("type", ["TYPE_1", "TYPE_2"]) - ) - }, + enum=enum_factory("type", ["TYPE_1", "TYPE_2"]), + ) + }, type=DummyMessageTypePB(name="ClassifyRequest"), - ident=DummyIdent(name="ClassifyRequest") - ) + ident=DummyIdent(name="ClassifyRequest"), + ) api_schema = DummyApiSchema( - services={"Mollusc": DummyService( - methods={}, client_name="MolluscClient", - resource_messages_dict={})}, - naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), - messages=classify_request_message + services={ + "Mollusc": DummyService( + methods={}, client_name="MolluscClient", resource_messages_dict={} + ) + }, + naming=DummyNaming( + warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", + module_namespace="mollusc.cephalopod", + ), + messages=classify_request_message, ) rpc = DummyMethod(input=classify_request_message) @@ -255,12 +289,7 @@ def test_preprocess_sample_with_enum_field(repeated_enum, expected): assert request_type.ident.name == "ClassifyRequest" # assert mock request is created - assert sample["request"] == [ - { - "field": "type", - "value": expected - } - ] + assert sample["request"] == [{"field": "type", "value": expected}] def test_preprocess_sample_nested_message_field(): @@ -269,24 +298,40 @@ def test_preprocess_sample_nested_message_field(): classify_request_message = DummyMessage( fields={ - "config": DummyField(name="config", is_primitive=False, required=True, oneof=False, type=DummyMessage( - fields={"name": DummyField( - is_primitive=True, type=str, name="name", required=True, oneof=False)}, - )) - }, + "config": DummyField( + name="config", + is_primitive=False, + required=True, + oneof=False, + type=DummyMessage( + fields={ + "name": DummyField( + is_primitive=True, + type=str, + name="name", + required=True, + oneof=False, + ) + }, + ), + ) + }, type=DummyMessageTypePB(name="ClassifyRequest"), - ident=DummyIdent(name="ClassifyRequest") + ident=DummyIdent(name="ClassifyRequest"), ) api_schema = DummyApiSchema( - services={"Mollusc": DummyService( - methods={}, client_name="MolluscClient", - resource_messages_dict={} - )}, - naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + services={ + "Mollusc": DummyService( + methods={}, client_name="MolluscClient", resource_messages_dict={} + ) + }, + naming=DummyNaming( + warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", + module_namespace="mollusc.cephalopod", + ), messages=classify_request_message, - ) rpc = DummyMethod(input=classify_request_message) @@ -295,11 +340,7 @@ def test_preprocess_sample_nested_message_field(): # assert mock request is created assert sample["request"] == [ - { - "field": "config.name", - "value": "mock_value" - }, - + {"field": "config.name", "value": "mock_value"}, ] @@ -308,12 +349,16 @@ def test_preprocess_sample_void_method(): api_schema = DummyApiSchema( services={"Mollusc": DummyService( methods={}, client_name="MolluscClient")}, - naming=DummyNaming(warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod"), + naming=DummyNaming( + warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", + module_namespace="mollusc.cephalopod", + ), ) - rpc = DummyMethod(void=True, input=DummyMessage( - ident=DummyIdent(name="ClassifyRequest"))) + rpc = DummyMethod( + void=True, input=DummyMessage(ident=DummyIdent(name="ClassifyRequest")) + ) samplegen.Validator.preprocess_sample(sample, api_schema, rpc) @@ -321,7 +366,8 @@ def test_preprocess_sample_void_method(): def test_preprocess_sample_with_request_module_name( - dummy_api_schema_with_request_from_another_package): + dummy_api_schema_with_request_from_another_package, +): sample = {"service": "Mollusc", "rpc": "Classify"} api_schema = dummy_api_schema_with_request_from_another_package rpc = DummyMethod(input=api_schema.messages) @@ -329,7 +375,7 @@ def test_preprocess_sample_with_request_module_name( samplegen.Validator.preprocess_sample(sample, api_schema, rpc) request_module_name = sample.get("request_module_name") - assert request_module_name == 'c_pb2' + assert request_module_name == "c_pb2" def test_get_sample_imports(dummy_api_schema): @@ -344,7 +390,8 @@ def test_get_sample_imports(dummy_api_schema): def test_get_sample_imports_with_request_from_another_package( - dummy_api_schema_with_request_from_another_package): + dummy_api_schema_with_request_from_another_package, +): sample = {"service": "Mollusc", "rpc": "Classify"} api_schema = dummy_api_schema_with_request_from_another_package rpc = DummyMethod(input=api_schema.messages) @@ -354,14 +401,15 @@ def test_get_sample_imports_with_request_from_another_package( assert imports == [ "from a.b import c_pb2 # type: ignore", - "from mollusc.cephalopod import teuthida_v1" + "from mollusc.cephalopod import teuthida_v1", ] def test_define_input_param(dummy_api_schema): v = samplegen.Validator( DummyMethod(input=message_factory("mollusc.squid.mantle_length")), - dummy_api_schema) + dummy_api_schema, + ) v.validate_and_transform_request( types.CallingForm.Request, [ @@ -376,9 +424,10 @@ def test_define_input_param(dummy_api_schema): def test_define_input_param_redefinition(dummy_api_schema): - v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid.mantle_length")), - dummy_api_schema) + v = samplegen.Validator( + DummyMethod(input=message_factory("mollusc.squid.mantle_length")), + dummy_api_schema, + ) v.validate_and_transform_request( types.CallingForm.Request, [ @@ -390,43 +439,47 @@ def test_define_input_param_redefinition(dummy_api_schema): ], ) with pytest.raises(types.RedefinedVariable): - v.validate_response( - [{"define": "mantle_length=mantle_length"}]) + v.validate_response([{"define": "mantle_length=mantle_length"}]) def test_print_basic(dummy_api_schema): print_statement = {"print": ["This is a squid"]} samplegen.Validator(DummyMethod(), dummy_api_schema).validate_response( - [print_statement]) + [print_statement] + ) def test_print_fmt_str(dummy_api_schema): print_statement = {"print": ["This is a squid named %s", "$resp.name"]} - v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.name")), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("$resp.name")), dummy_api_schema + ) v.validate_response([print_statement]) def test_print_fmt_mismatch(dummy_api_schema): print_statement = {"print": ["This is a squid named %s"]} - v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.name")), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("$resp.name")), dummy_api_schema + ) with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([print_statement]) def test_print_fmt_mismatch2(dummy_api_schema): print_statement = {"print": ["This is a squid", "$resp.name"]} - v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.name")), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("$resp.name")), dummy_api_schema + ) with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response([print_statement]) def test_print_undefined_var(dummy_api_schema): print_statement = {"print": ["This mollusc is a %s", "mollusc.type"]} - v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.type")), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("$resp.type")), dummy_api_schema + ) with pytest.raises(types.UndefinedVariableReference): v.validate_response([print_statement]) @@ -439,8 +492,9 @@ def test_comment(dummy_api_schema): def test_comment_fmt_str(dummy_api_schema): comment = {"comment": ["This is a mollusc of class %s", "$resp.klass"]} - v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.klass")), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory("$resp.klass")), dummy_api_schema + ) v.validate_response([comment]) @@ -491,7 +545,10 @@ def test_loop_collection_redefinition(dummy_api_schema): }, ] v = samplegen.Validator( - DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True])), dummy_api_schema) + DummyMethod(output=message_factory( + "$resp.molluscs", repeated_iter=[True])), + dummy_api_schema, + ) with pytest.raises(types.RedefinedVariable): v.validate_response(statements) @@ -543,8 +600,11 @@ def test_loop_collection_reserved_loop_var(dummy_api_schema): "body": [{"print": ["Mollusc: %s", "class.name"]}], } } - v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.molluscs", repeated_iter=[True])), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory( + "$resp.molluscs", repeated_iter=[True])), + dummy_api_schema, + ) with pytest.raises(types.ReservedVariableName): v.validate_response([loop]) @@ -566,18 +626,16 @@ def test_loop_map(dummy_api_schema): "key": DummyField(), "value": DummyField( message=DummyMessage( - fields={}, - type="MOLLUSC_TYPE" - ) - ) + fields={}, type="MOLLUSC_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) v.validate_response([loop]) @@ -594,9 +652,11 @@ def test_collection_loop_lexical_scope_variable(dummy_api_schema): }, {"define": "cephalopod=m"}, ] - v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.molluscs", repeated_iter=[True])), - dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory( + "$resp.molluscs", repeated_iter=[True])), + dummy_api_schema, + ) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -612,9 +672,11 @@ def test_collection_loop_lexical_scope_inline(dummy_api_schema): }, {"define": "cephalopod=squid"}, ] - v = samplegen.Validator(DummyMethod( - output=message_factory("$resp.molluscs", repeated_iter=[True])), - dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory( + "$resp.molluscs", repeated_iter=[True])), + dummy_api_schema, + ) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -641,18 +703,16 @@ def test_map_loop_lexical_scope_key(dummy_api_schema): "key": DummyField(), "value": DummyField( message=DummyMessage( - fields={}, - type="MOLLUSC_TYPE" - ) - ) + fields={}, type="MOLLUSC_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -682,18 +742,16 @@ def test_map_loop_lexical_scope_value(dummy_api_schema): "key": DummyField(), "value": DummyField( message=DummyMessage( - fields={}, - type="MOLLUSC_TYPE" - ) - ) + fields={}, type="MOLLUSC_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -723,18 +781,16 @@ def test_map_loop_lexical_scope_inline(dummy_api_schema): "key": DummyField(), "value": DummyField( message=DummyMessage( - fields={}, - type="MOLLUSC_TYPE" - ) - ) + fields={}, type="MOLLUSC_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): @@ -759,18 +815,16 @@ def test_loop_map_reserved_key(dummy_api_schema): "key": DummyField(), "value": DummyField( message=DummyMessage( - fields={}, - type="MOLLUSC_TYPE" - ) - ) + fields={}, type="MOLLUSC_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -795,19 +849,16 @@ def test_loop_map_reserved_val(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, - type="CLASS_TYPE" - ) - ) + message=DummyMessage(fields={}, type="CLASS_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -844,19 +895,16 @@ def test_loop_map_no_key(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, - type="CLASS_TYPE" - ) - ) + message=DummyMessage(fields={}, type="CLASS_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -878,19 +926,16 @@ def test_loop_map_no_value(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, - type="CLASS_TYPE" - ) - ) + message=DummyMessage(fields={}, type="CLASS_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -898,9 +943,13 @@ def test_loop_map_no_value(dummy_api_schema): def test_loop_map_no_key_or_value(dummy_api_schema): - loop = {"loop": {"map": "$resp.molluscs", - # Need at least one of 'key' or 'value' - "body": [{"print": ["Dead loop"]}]}} + loop = { + "loop": { + "map": "$resp.molluscs", + # Need at least one of 'key' or 'value' + "body": [{"print": ["Dead loop"]}], + } + } OutputType = DummyMessage( fields={ "molluscs": DummyField( @@ -908,19 +957,16 @@ def test_loop_map_no_key_or_value(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, - type="CLASS_TYPE" - ) - ) + message=DummyMessage(fields={}, type="CLASS_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -982,19 +1028,16 @@ def test_loop_map_redefined_key(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, - type="CLASS_TYPE" - ) - ) + message=DummyMessage(fields={}, type="CLASS_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -1021,19 +1064,16 @@ def test_loop_map_redefined_value(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, - type="CLASS_TYPE" - ) - ) + message=DummyMessage(fields={}, type="CLASS_TYPE") + ), }, type="MOLLUSCS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True) + options=namedtuple("MessageOptions", ["map_field"])(True), ), - repeated=True + repeated=True, ), }, - type="RESPONSE_TYPE" + type="RESPONSE_TYPE", ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -1053,7 +1093,7 @@ def test_validate_write_file(dummy_api_schema): OutputType = DummyMessage( fields={ "species": DummyField(message=DummyMessage(fields={})), - "photo": DummyField(message=DummyMessage(fields={})) + "photo": DummyField(message=DummyMessage(fields={})), } ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -1061,20 +1101,24 @@ def test_validate_write_file(dummy_api_schema): def test_validate_write_file_fname_fmt(dummy_api_schema): - statements = [{"write_file": - {"filename": ["specimen-%s"], "contents": "$resp.photo"}}] + statements = [ + {"write_file": {"filename": [ + "specimen-%s"], "contents": "$resp.photo"}} + ] v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.MismatchedFormatSpecifier): v.validate_response(statements) def test_validate_write_file_fname_bad_var(dummy_api_schema): - statements = [{ - "write_file": { - "filename": ["specimen-%s", "squid.species"], - "contents": "$resp.photo", + statements = [ + { + "write_file": { + "filename": ["specimen-%s", "squid.species"], + "contents": "$resp.photo", + } } - }] + ] v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.UndefinedVariableReference): v.validate_response(statements) @@ -1085,7 +1129,7 @@ def test_validate_write_file_missing_fname(dummy_api_schema): OutputType = DummyMessage( fields={ "filename": DummyField(message=DummyMessage(fields={})), - "photo": DummyField(message=DummyMessage(fields={})) + "photo": DummyField(message=DummyMessage(fields={})), } ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -1094,12 +1138,12 @@ def test_validate_write_file_missing_fname(dummy_api_schema): def test_validate_write_file_missing_contents(dummy_api_schema): - statements = [{"write_file": {"filename": ["specimen-%s", - "$resp.species"]}}] + statements = [ + {"write_file": {"filename": ["specimen-%s", "$resp.species"]}}] OutputType = DummyMessage( fields={ "species": DummyField(message=DummyMessage(fields={})), - "photo": DummyField(message=DummyMessage(fields={})) + "photo": DummyField(message=DummyMessage(fields={})), } ) @@ -1109,16 +1153,18 @@ def test_validate_write_file_missing_contents(dummy_api_schema): def test_validate_write_file_bad_contents_var(dummy_api_schema): - statements = [{ - "write_file": { - "filename": ["specimen-%s", "$resp.species"], - "contents": "squid.photo", + statements = [ + { + "write_file": { + "filename": ["specimen-%s", "$resp.species"], + "contents": "squid.photo", + } } - }] + ] OutputType = DummyMessage( fields={ "species": DummyField(message=DummyMessage(fields={})), - "photo": DummyField(message=DummyMessage(fields={})) + "photo": DummyField(message=DummyMessage(fields={})), } ) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) @@ -1148,17 +1194,20 @@ def test_validate_request_basic(dummy_api_schema): message=DummyMessage( fields={ "mantle_length": DummyField( - message=DummyMessage(type="LENGTH_TYPE")), + message=DummyMessage(type="LENGTH_TYPE") + ), "mantle_mass": DummyField( - message=DummyMessage(type="MASS_TYPE")), + message=DummyMessage(type="MASS_TYPE") + ), "num_tentacles": DummyField( - message=DummyMessage(type="MASS_TYPE")) + message=DummyMessage(type="MASS_TYPE") + ), }, - type="SQUID_TYPE" + type="SQUID_TYPE", ) ) }, - type="REQUEST_TYPE" + type="REQUEST_TYPE", ) v = samplegen.Validator(DummyMethod(input=input_type), dummy_api_schema) @@ -1175,14 +1224,16 @@ def test_validate_request_basic(dummy_api_schema): samplegen.TransformedRequest( base="squid", body=[ - samplegen.AttributeRequestSetup(field="mantle_length", - value='"100 \\"cm"'), - samplegen.AttributeRequestSetup(field="mantle_mass", - value='"10 kg"'), - samplegen.AttributeRequestSetup(field="num_tentacles", - value=10) + samplegen.AttributeRequestSetup( + field="mantle_length", value='"100 \\"cm"' + ), + samplegen.AttributeRequestSetup( + field="mantle_mass", value='"10 kg"' + ), + samplegen.AttributeRequestSetup( + field="num_tentacles", value=10), ], - single=None + single=None, ) ] ) @@ -1195,38 +1246,37 @@ def test_validate_request_no_field_parameter(dummy_api_schema): v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( - types.CallingForm.Request, [{"squid": "humboldt", - "value": "teuthida"}] + types.CallingForm.Request, [ + {"squid": "humboldt", "value": "teuthida"}] ) def test_validate_request_no_such_attribute(dummy_api_schema): - v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid.mantle")), - dummy_api_schema) + v = samplegen.Validator( + DummyMethod(input=message_factory( + "mollusc.squid.mantle")), dummy_api_schema + ) with pytest.raises(types.BadAttributeLookup): v.validate_and_transform_request( - types.CallingForm.Request, - [{"field": "clam.shell", "value": "20"}] + types.CallingForm.Request, [{"field": "clam.shell", "value": "20"}] ) def test_validate_request_top_level_field(dummy_api_schema): - v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid")), - dummy_api_schema) + v = samplegen.Validator( + DummyMethod(input=message_factory("mollusc.squid")), dummy_api_schema + ) actual = v.validate_and_transform_request( - types.CallingForm.Request, - [{"field": "squid", "value": "humboldt"}] + types.CallingForm.Request, [{"field": "squid", "value": "humboldt"}] ) expected = samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="squid", - body=None, - single=samplegen.AttributeRequestSetup( - value='"humboldt"' - )) + samplegen.TransformedRequest( + base="squid", + body=None, + single=samplegen.AttributeRequestSetup(value='"humboldt"'), + ) ] ) @@ -1234,14 +1284,12 @@ def test_validate_request_top_level_field(dummy_api_schema): def test_validate_request_missing_keyword(dummy_api_schema, kword="field"): - v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid")), - dummy_api_schema) + v = samplegen.Validator( + DummyMethod(input=message_factory("mollusc.squid")), dummy_api_schema + ) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( - types.CallingForm.Request, - [{kword: "squid"}] - ) + types.CallingForm.Request, [{kword: "squid"}]) def test_validate_request_missing_value(dummy_api_schema): @@ -1250,33 +1298,38 @@ def test_validate_request_missing_value(dummy_api_schema): def test_validate_request_spurious_kword(dummy_api_schema): v = samplegen.Validator( - DummyMethod(input=message_factory("mollusc.squid")), - dummy_api_schema) + DummyMethod(input=message_factory("mollusc.squid")), dummy_api_schema + ) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( types.CallingForm.Request, - [{"field": "mollusc.squid", "value": "humboldt", "order": "teuthida"}] + [{"field": "mollusc.squid", "value": "humboldt", "order": "teuthida"}], ) def test_validate_request_unknown_field_type(dummy_api_schema): - v = samplegen.Validator(DummyMethod( - input=DummyMessage(fields={"squid": DummyField()})), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(input=DummyMessage(fields={"squid": DummyField()})), + dummy_api_schema, + ) with pytest.raises(TypeError): v.validate_and_transform_request( - types.CallingForm.Request, - [{"field": "squid", "value": "humboldt"}] + types.CallingForm.Request, [ + {"field": "squid", "value": "humboldt"}] ) def test_validate_request_duplicate_top_level_fields(dummy_api_schema): - v = samplegen.Validator(DummyMethod( - input=message_factory("mollusc.squid")), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(input=message_factory("mollusc.squid")), dummy_api_schema + ) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( types.CallingForm.Request, - [{"field": "squid", "value": "humboldt"}, - {"field": "squid", "value": "bobtail"}] + [ + {"field": "squid", "value": "humboldt"}, + {"field": "squid", "value": "bobtail"}, + ], ) @@ -1285,30 +1338,32 @@ def test_validate_request_multiple_arguments(dummy_api_schema): fields={ "squid": DummyField( message=DummyMessage( - fields={"mantle_length": DummyField( - message=DummyMessage(type="LENGTH_TYPE"))}, - type="SQUID_TYPE" + fields={ + "mantle_length": DummyField( + message=DummyMessage(type="LENGTH_TYPE") + ) + }, + type="SQUID_TYPE", ) ), "clam": DummyField( message=DummyMessage( - fields={"shell_mass": DummyField( - message=DummyMessage(type="MASS_TYPE"))}, - type="CLAM_TYPE" + fields={ + "shell_mass": DummyField(message=DummyMessage(type="MASS_TYPE")) + }, + type="CLAM_TYPE", ) ), }, - type="REQUEST_TYPE" + type="REQUEST_TYPE", ) v = samplegen.Validator(DummyMethod(input=input_type), dummy_api_schema) actual = v.validate_and_transform_request( types.CallingForm.Request, [ - { - "field": "squid.mantle_length", - "value": "100 cm", "value_is_file": True - }, + {"field": "squid.mantle_length", + "value": "100 cm", "value_is_file": True}, { "field": "clam.shell_mass", "value": "100 kg", @@ -1320,19 +1375,23 @@ def test_validate_request_multiple_arguments(dummy_api_schema): request_list=[ samplegen.TransformedRequest( base="squid", - body=[samplegen.AttributeRequestSetup( - field="mantle_length", - value='"100 cm"', - value_is_file=True)], - single=None + body=[ + samplegen.AttributeRequestSetup( + field="mantle_length", value='"100 cm"', value_is_file=True + ) + ], + single=None, ), samplegen.TransformedRequest( base="clam", - body=[samplegen.AttributeRequestSetup( - field="shell_mass", - value='"100 kg"', - comment="Clams can be large")], - single=None + body=[ + samplegen.AttributeRequestSetup( + field="shell_mass", + value='"100 kg"', + comment="Clams can be large", + ) + ], + single=None, ), ] ) @@ -1345,20 +1404,26 @@ def test_validate_request_duplicate_input_param(dummy_api_schema): fields={ "squid": DummyField( message=DummyMessage( - fields={"mantle_mass": DummyField( - message=DummyMessage(type="MASS_TYPE"))}, - type="SQUID_TYPE" + fields={ + "mantle_mass": DummyField( + message=DummyMessage(type="MASS_TYPE") + ) + }, + type="SQUID_TYPE", ) ), "clam": DummyField( message=DummyMessage( - fields={"mantle_mass": DummyField( - message=DummyMessage(type="MASS_TYPE"))}, - type="CLAM_TYPE" + fields={ + "mantle_mass": DummyField( + message=DummyMessage(type="MASS_TYPE") + ) + }, + type="CLAM_TYPE", ) ), }, - type="REQUEST_TYPE" + type="REQUEST_TYPE", ) v = samplegen.Validator(DummyMethod(input=input_type), dummy_api_schema) @@ -1592,20 +1657,22 @@ def test_validate_expression_map_lookup(dummy_api_schema): message=DummyMessage( fields={ "mantle": DummyField( - message=DummyMessage(type="MANTLE_TYPE", - fields={}), + message=DummyMessage( + type="MANTLE_TYPE", fields={} + ), ) }, - type="CEPHALOPOD_TYPE" + type="CEPHALOPOD_TYPE", ) ), }, type="CEPHALOPODS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True)), + options=namedtuple("MessageOptions", ["map_field"])(True), + ), repeated=True, ) }, - type="MOLLUSC_TYPE" + type="MOLLUSC_TYPE", ) method = DummyMethod(output=OutputType) v = samplegen.Validator(method, dummy_api_schema) @@ -1623,20 +1690,22 @@ def test_validate_expression_map_lookup_terminal_lookup(dummy_api_schema): message=DummyMessage( fields={ "mantle": DummyField( - message=DummyMessage(type="MANTLE_TYPE", - fields={}), + message=DummyMessage( + type="MANTLE_TYPE", fields={} + ), ) }, - type="CEPHALOPOD_TYPE" + type="CEPHALOPOD_TYPE", ) ), }, type="CEPHALOPODS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True)), + options=namedtuple("MessageOptions", ["map_field"])(True), + ), repeated=True, ) }, - type="MOLLUSC_TYPE" + type="MOLLUSC_TYPE", ) method = DummyMethod(output=OutputType) v = samplegen.Validator(method, dummy_api_schema) @@ -1654,21 +1723,24 @@ def test_validate_expression_mapped_no_map_field(dummy_api_schema): message=DummyMessage( fields={ "mantle": DummyField( - message=DummyMessage(type="MANTLE_TYPE", - fields={}), + message=DummyMessage( + type="MANTLE_TYPE", fields={} + ), ) }, - type="CEPHALOPOD_TYPE" + type="CEPHALOPOD_TYPE", ) - )}, + ), + }, type="CEPHALOPODS_TYPE", # The map_field attribute in the options indicates whether # a message type is 'really' a map or just looks like one. - options=namedtuple("MessageOptions", ["map_field"])(False)), + options=namedtuple("MessageOptions", ["map_field"])(False), + ), repeated=True, ) }, - type="MOLLUSC_TYPE" + type="MOLLUSC_TYPE", ) method = DummyMethod(output=OutputType) v = samplegen.Validator(method, dummy_api_schema) @@ -1684,11 +1756,12 @@ def test_validate_expression_mapped_no_value(dummy_api_schema): # Maps need 'key' AND 'value' attributes. fields={"key": DummyField()}, type="CEPHALOPODS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True)), + options=namedtuple("MessageOptions", ["map_field"])(True), + ), repeated=True, ) }, - type="MOLLUSC_TYPE" + type="MOLLUSC_TYPE", ) method = DummyMethod(output=OutputType) v = samplegen.Validator(method, dummy_api_schema) @@ -1707,11 +1780,12 @@ def test_validate_expression_mapped_no_message(dummy_api_schema): "value": DummyField(), }, type="CEPHALOPODS_TYPE", - options=namedtuple("MessageOptions", ["map_field"])(True)), + options=namedtuple("MessageOptions", ["map_field"])(True), + ), repeated=True, ) }, - type="MOLLUSC_TYPE" + type="MOLLUSC_TYPE", ) method = DummyMethod(output=OutputType) v = samplegen.Validator(method, dummy_api_schema) @@ -1755,15 +1829,18 @@ def test_validate_request_enum(dummy_api_schema): v = samplegen.Validator(DummyMethod(input=request_type), dummy_api_schema) actual = v.validate_and_transform_request( types.CallingForm.Request, - [{"field": "cephalopod.subclass", "value": "COLEOIDEA"}] + [{"field": "cephalopod.subclass", "value": "COLEOIDEA"}], ) expected = samplegen.FullRequest( request_list=[ samplegen.TransformedRequest( "cephalopod", - body=[samplegen.AttributeRequestSetup(field="subclass", - value='"COLEOIDEA"')], - single=None + body=[ + samplegen.AttributeRequestSetup( + field="subclass", value='"COLEOIDEA"' + ) + ], + single=None, ) ] ) @@ -1776,26 +1853,34 @@ def test_validate_request_enum_top_level(dummy_api_schema): v = samplegen.Validator(DummyMethod(input=request_type), dummy_api_schema) actual = v.validate_and_transform_request( - types.CallingForm.Request, - [{"field": "subclass", "value": "COLEOIDEA"}] + types.CallingForm.Request, [ + {"field": "subclass", "value": "COLEOIDEA"}] + ) + expected = samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest( + "subclass", + single=samplegen.AttributeRequestSetup(value='"COLEOIDEA"'), + body=None, + ) + ] ) - expected = samplegen.FullRequest(request_list=[samplegen.TransformedRequest( - "subclass", - single=samplegen.AttributeRequestSetup(value='"COLEOIDEA"'), - body=None)]) assert actual == expected def test_validate_request_enum_invalid_value(dummy_api_schema): enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) request_type = message_factory("mollusc.cephalopod.subclass", enum=enum) - v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), - input=request_type), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory( + "mollusc_result"), input=request_type), + dummy_api_schema, + ) with pytest.raises(types.InvalidEnumVariant): v.validate_and_transform_request( types.CallingForm.Request, # Heterodonta are bivalves, not cephalopods - [{"field": "cephalopod.subclass", "value": "HETERODONTA"}] + [{"field": "cephalopod.subclass", "value": "HETERODONTA"}], ) @@ -1809,25 +1894,28 @@ def test_validate_request_enum_not_last_attr(dummy_api_schema): name="subclass", enum=enum_factory( "subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"] - ) + ), ) - ] + ], ) # request_type = message_factory("mollusc.subclass", enum=enum) - v = samplegen.Validator(DummyMethod(output=message_factory("mollusc_result"), - input=request_type), dummy_api_schema) + v = samplegen.Validator( + DummyMethod(output=message_factory( + "mollusc_result"), input=request_type), + dummy_api_schema, + ) with pytest.raises(types.NonTerminalPrimitiveOrEnum): v.validate_and_transform_request( types.CallingForm.Request, - [{"field": "subclass.order", "value": "COLEOIDEA"}] + [{"field": "subclass.order", "value": "COLEOIDEA"}], ) def test_validate_request_resource_name(): request = [ {"field": "taxon%kingdom", "value": "animalia"}, - {"field": "taxon%phylum", "value": "mollusca", "input_parameter": "phylum"} + {"field": "taxon%phylum", "value": "mollusca", "input_parameter": "phylum"}, ] resource_type = "taxonomy.google.com/Linnaean" @@ -1851,18 +1939,18 @@ def test_validate_request_resource_name(): api_schema = DummyApiSchema( messages={ k: v - for k, v in enumerate([ - request_descriptor, - phylum_descriptor, - ]) + for k, v in enumerate( + [ + request_descriptor, + phylum_descriptor, + ] + ) }, services={ "Mollusc": DummyService( methods={}, client_name="MolluscClient", - resource_messages_dict={ - resource_type: phylum_descriptor - } + resource_messages_dict={resource_type: phylum_descriptor}, ) }, ) @@ -1870,9 +1958,7 @@ def test_validate_request_resource_name(): v = samplegen.Validator(method=method, api_schema=api_schema) actual = v.validate_and_transform_request( - types.CallingForm.Request, - request - ) + types.CallingForm.Request, request) expected = samplegen.FullRequest( request_list=[ @@ -1890,7 +1976,7 @@ def test_validate_request_resource_name(): value="mollusca", input_parameter="phylum", ), - ] + ], ) ] ) @@ -1904,23 +1990,20 @@ def test_validate_request_primitive_field(dummy_api_schema): request = [{"field": "species", "value": "Architeuthis dux"}] v = samplegen.Validator( - DummyMethod( - output=message_factory("mollusc_result"), - input=request_type - ), - dummy_api_schema + DummyMethod(output=message_factory( + "mollusc_result"), input=request_type), + dummy_api_schema, ) - actual = v.validate_and_transform_request(types.CallingForm.Request, - request) + actual = v.validate_and_transform_request( + types.CallingForm.Request, request) expected = samplegen.FullRequest( request_list=[ samplegen.TransformedRequest( base="species", body=None, single=samplegen.AttributeRequestSetup( - value='"Architeuthis dux"' - ) + value='"Architeuthis dux"'), ) ] ) @@ -1945,23 +2028,19 @@ def test_validate_request_resource_name_mixed(request=None): name="Taxon", fields=[ make_field( - name="domain", - message=make_message(name="Domain") + name="domain", message=make_message(name="Domain") ) - ] - ) + ], + ), ) - ] + ], ), ), - api_schema=None + api_schema=None, ) with pytest.raises(types.ResourceRequestMismatch): - v.validate_and_transform_request( - types.CallingForm.Request, - request - ) + v.validate_and_transform_request(types.CallingForm.Request, request) def test_validate_request_resource_name_mixed_reversed(): @@ -1974,9 +2053,7 @@ def test_validate_request_resource_name_mixed_reversed(): def test_validate_request_no_such_attr(dummy_api_schema): - request = [ - {"field": "taxon%kingdom", "value": "animalia"} - ] + request = [{"field": "taxon%kingdom", "value": "animalia"}] method = DummyMethod(input=make_message(name="Request")) v = samplegen.Validator(method, dummy_api_schema) @@ -1985,9 +2062,7 @@ def test_validate_request_no_such_attr(dummy_api_schema): def test_validate_request_no_such_resource(): - request = [ - {"field": "taxon%kingdom", "value": "animalia"} - ] + request = [{"field": "taxon%kingdom", "value": "animalia"}] resource_type = "taxonomy.google.com/Linnaean" taxon_field = make_field(name="taxon") rr = taxon_field.options.Extensions[resource_pb2.resource_reference] @@ -1999,9 +2074,7 @@ def test_validate_request_no_such_resource(): messages={k: v for k, v in enumerate([request_descriptor])}, services={ "Mollusc": DummyService( - methods={}, - client_name="MolluscClient", - resource_messages_dict={} + methods={}, client_name="MolluscClient", resource_messages_dict={} ) }, ) @@ -2034,18 +2107,18 @@ def test_validate_request_no_such_pattern(): api_schema = DummyApiSchema( messages={ k: v - for k, v in enumerate([ - request_descriptor, - phylum_descriptor, - ]) + for k, v in enumerate( + [ + request_descriptor, + phylum_descriptor, + ] + ) }, services={ "Mollusc": DummyService( methods={}, client_name="MolluscClient", - resource_messages_dict={ - resource_type: phylum_descriptor - } + resource_messages_dict={resource_type: phylum_descriptor}, ) }, ) @@ -2061,16 +2134,13 @@ def test_validate_request_non_terminal_primitive_field(dummy_api_schema): request = [{"field": "species.nomenclature", "value": "Architeuthis dux"}] v = samplegen.Validator( - DummyMethod( - output=message_factory("mollusc_result"), - input=request_type - ), - dummy_api_schema + DummyMethod(output=message_factory( + "mollusc_result"), input=request_type), + dummy_api_schema, ) with pytest.raises(types.NonTerminalPrimitiveOrEnum): - v.validate_and_transform_request(types.CallingForm.Request, - request) + v.validate_and_transform_request(types.CallingForm.Request, request) def test_parse_invalid_handwritten_spec(fs): @@ -2092,7 +2162,43 @@ def test_parse_invalid_handwritten_spec(fs): list(samplegen.parse_handwritten_specs(sample_configs=[fpath])) -def test_generate_sample_spec_basic(): +@pytest.mark.parametrize( + "opts_transport,expected", + [ + ( + "transport=grpc+rest", + [ + { + "rpc": "Ramshorn", + "transport": "grpc", + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_v1_generated_Squid_Ramshorn_sync", + "description": "Snippet for ramshorn", + }, + { + "rpc": "Ramshorn", + "transport": "grpc-async", + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_v1_generated_Squid_Ramshorn_async", + "description": "Snippet for ramshorn", + }, + ], + ), + ( + "transport=rest", + [ + { + "rpc": "Ramshorn", + "transport": "rest", + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_v1_generated_Squid_Ramshorn_sync", + "description": "Snippet for ramshorn", + } + ], + ), + ], +) +def test_generate_sample_spec_basic(opts_transport, expected): service_options = descriptor_pb2.ServiceOptions() service_options.Extensions[client_pb2.default_host] = "example.googleapis.com" @@ -2125,35 +2231,13 @@ def test_generate_sample_spec_basic(): ) ] ) - opts = Options.build("transport=grpc+rest") - specs = sorted(samplegen.generate_sample_specs( - api_schema, opts=opts), key=lambda x: x["transport"]) + opts = Options.build(opts_transport) + specs = sorted( + samplegen.generate_sample_specs(api_schema, opts=opts), + key=lambda x: x["transport"], + ) specs.sort(key=lambda x: x["transport"]) - assert len(specs) == 3 - - assert specs[0] == { - "rpc": "Ramshorn", - "transport": "grpc", - "service": "animalia.mollusca.v1.Squid", - "region_tag": "example_v1_generated_Squid_Ramshorn_sync", - "description": "Snippet for ramshorn" - } - - assert specs[1] == { - "rpc": "Ramshorn", - "transport": "grpc-async", - "service": "animalia.mollusca.v1.Squid", - "region_tag": "example_v1_generated_Squid_Ramshorn_async", - "description": "Snippet for ramshorn" - } - - assert specs[2] == { - "rpc": "Ramshorn", - "transport": "rest", - "service": "animalia.mollusca.v1.Squid", - "region_tag": "example_v1_generated_Squid_Ramshorn_rest", - "description": "Snippet for ramshorn" - } + assert specs == expected def test__set_sample_metadata_server_streaming(): @@ -2164,7 +2248,7 @@ def test__set_sample_metadata_server_streaming(): "region_tag": "example_v1_generated_Squid_Ramshorn_sync", "description": "Snippet for ramshorn", "module_namespace": ["animalia"], - "module_name": "mollusca_v1" + "module_name": "mollusca_v1", } service_options = descriptor_pb2.ServiceOptions() @@ -2204,31 +2288,37 @@ def test__set_sample_metadata_server_streaming(): snippet_metadata = samplegen._fill_sample_metadata(sample, api_schema) assert json_format.MessageToDict(snippet_metadata) == { - 'regionTag': 'example_v1_generated_Squid_Ramshorn_sync', - 'description': 'Sample for Ramshorn', - 'language': 'PYTHON', - 'clientMethod': { - 'shortName': 'ramshorn', - 'fullName': 'animalia.mollusca_v1.SquidClient.ramshorn', - 'parameters': [ - {'type': 'animalia.mollusca_v1.types.MolluscRequest', 'name': 'request'}, - {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, - {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + "regionTag": "example_v1_generated_Squid_Ramshorn_sync", + "description": "Sample for Ramshorn", + "language": "PYTHON", + "clientMethod": { + "shortName": "ramshorn", + "fullName": "animalia.mollusca_v1.SquidClient.ramshorn", + "parameters": [ + { + "type": "animalia.mollusca_v1.types.MolluscRequest", + "name": "request", + }, + {"type": "google.api_core.retry.Retry", "name": "retry"}, + {"type": "float", "name": "timeout"}, + {"type": "Sequence[Tuple[str, str]", "name": "metadata"}, ], - 'resultType': 'Iterable[animalia.mollusca_v1.types.Mollusc]', - 'client': { - 'shortName': 'SquidClient', - 'fullName': 'animalia.mollusca_v1.SquidClient' # FIX THE FULL NAME + "resultType": "Iterable[animalia.mollusca_v1.types.Mollusc]", + "client": { + "shortName": "SquidClient", + "fullName": "animalia.mollusca_v1.SquidClient", # FIX THE FULL NAME + }, + "method": { + "shortName": "Ramshorn", + "fullName": "animalia.mollusca.v1.Squid.Ramshorn", + "service": { + "shortName": "Squid", + "fullName": "animalia.mollusca.v1.Squid", + }, }, - 'method': { - 'shortName': 'Ramshorn', - 'fullName': 'animalia.mollusca.v1.Squid.Ramshorn', - 'service': {'shortName': 'Squid', 'fullName': 'animalia.mollusca.v1.Squid'} - } }, - 'canonical': True, - 'origin': 'API_DEFINITION' + "canonical": True, + "origin": "API_DEFINITION", } @@ -2240,7 +2330,7 @@ def test__set_sample_metadata_client_streaming(): "region_tag": "example_v1_generated_Squid_Ramshorn_sync", "description": "Snippet for ramshorn", "module_namespace": ["animalia"], - "module_name": "mollusca_v1" + "module_name": "mollusca_v1", } service_options = descriptor_pb2.ServiceOptions() @@ -2282,39 +2372,48 @@ def test__set_sample_metadata_client_streaming(): print(json_format.MessageToDict(snippet_metadata)) assert json_format.MessageToDict(snippet_metadata) == { - 'regionTag': 'example_v1_generated_Squid_Ramshorn_sync', - 'description': 'Sample for Ramshorn', - 'language': 'PYTHON', - 'clientMethod': { - 'shortName': 'ramshorn', - 'fullName': 'animalia.mollusca_v1.SquidClient.ramshorn', - 'parameters': [ - {'type': 'Iterator[animalia.mollusca_v1.types.MolluscRequest]', - 'name': 'requests'}, - {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, - {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + "regionTag": "example_v1_generated_Squid_Ramshorn_sync", + "description": "Sample for Ramshorn", + "language": "PYTHON", + "clientMethod": { + "shortName": "ramshorn", + "fullName": "animalia.mollusca_v1.SquidClient.ramshorn", + "parameters": [ + { + "type": "Iterator[animalia.mollusca_v1.types.MolluscRequest]", + "name": "requests", + }, + {"type": "google.api_core.retry.Retry", "name": "retry"}, + {"type": "float", "name": "timeout"}, + {"type": "Sequence[Tuple[str, str]", "name": "metadata"}, ], - 'resultType': 'animalia.mollusca_v1.types.Mollusc', - 'client': { - 'shortName': 'SquidClient', - 'fullName': 'animalia.mollusca_v1.SquidClient' + "resultType": "animalia.mollusca_v1.types.Mollusc", + "client": { + "shortName": "SquidClient", + "fullName": "animalia.mollusca_v1.SquidClient", + }, + "method": { + "shortName": "Ramshorn", + "fullName": "animalia.mollusca.v1.Squid.Ramshorn", + "service": { + "shortName": "Squid", + "fullName": "animalia.mollusca.v1.Squid", + }, }, - 'method': { - 'shortName': 'Ramshorn', - 'fullName': 'animalia.mollusca.v1.Squid.Ramshorn', - 'service': {'shortName': 'Squid', 'fullName': 'animalia.mollusca.v1.Squid'} - } }, - 'canonical': True, - 'origin': 'API_DEFINITION' + "canonical": True, + "origin": "API_DEFINITION", } -def make_message(name: str, package: str = 'animalia.mollusca.v1', module: str = 'cephalopoda', - fields: Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, - options: descriptor_pb2.MethodOptions = None, - ) -> wrappers.MessageType: +def make_message( + name: str, + package: str = "animalia.mollusca.v1", + module: str = "cephalopoda", + fields: Sequence[wrappers.Field] = (), + meta: metadata.Metadata = None, + options: descriptor_pb2.MethodOptions = None, +) -> wrappers.MessageType: message_pb = descriptor_pb2.DescriptorProto( name=name, field=[i.field_pb for i in fields], @@ -2325,21 +2424,24 @@ def make_message(name: str, package: str = 'animalia.mollusca.v1', module: str = fields=OrderedDict((i.name, i) for i in fields), nested_messages={}, nested_enums={}, - meta=meta or metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), + meta=meta + or metadata.Metadata( + address=metadata.Address( + name=name, + package=tuple(package.split(".")), + module=module, + ) + ), ) # Borrowed from test_field.py def make_field(*, message=None, enum=None, **kwargs) -> wrappers.Field: T = descriptor_pb2.FieldDescriptorProto.Type - kwargs.setdefault('name', 'my_field') - kwargs.setdefault('number', 1) - kwargs.setdefault('type', T.Value('TYPE_BOOL')) - if isinstance(kwargs['type'], str): - kwargs['type'] = T.Value(kwargs['type']) + kwargs.setdefault("name", "my_field") + kwargs.setdefault("number", 1) + kwargs.setdefault("type", T.Value("TYPE_BOOL")) + if isinstance(kwargs["type"], str): + kwargs["type"] = T.Value(kwargs["type"]) field_pb = descriptor_pb2.FieldDescriptorProto(**kwargs) return wrappers.Field(field_pb=field_pb, message=message, enum=enum) From 6a0ab64aac4b92b639a2c85ca5dbcc1095588499 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 13:11:55 -0400 Subject: [PATCH 1001/1339] build(deps): bump requests from 2.28.1 to 2.31.0 in /synthtool/gcp/templates/python_library/.kokoro (#1662) Source-Link: https://github.com/googleapis/synthtool/commit/30bd01b4ab78bf1b2a425816e15b3e7e090993dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 3 ++- packages/gapic-generator/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index b8edda51cf46..32b3c486591a 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b +# created: 2023-05-25T14:56:16.294623272Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 66a2172a76a8..3b8d7ee81848 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -419,9 +419,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core From a5e5dbc1230417678f32828f48498121bdec2516 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Jun 2023 13:30:47 +0200 Subject: [PATCH 1002/1339] chore(deps): update all dependencies (#1654) * chore(deps): update all dependencies * revert * allow newer versions of libcst --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 6 +++--- packages/gapic-generator/setup.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 496dc72aa5fa..d114ba068342 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,12 +3,12 @@ google-api-core==2.11.0 googleapis-common-protos==1.59.0 jinja2==3.1.2 MarkupSafe==2.1.2 -protobuf==4.22.3 +protobuf==4.23.2 pypandoc==1.11 PyYAML==6.0 -setuptools==67.6.1 +setuptools==67.8.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.21.0 -libcst==0.4.9 +libcst==1.0.0 inflection==0.5.1 \ No newline at end of file diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index d1f5c4259850..2dcd3fcdc8fd 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -34,7 +34,7 @@ "pypandoc >= 1.4", "PyYAML >= 5.1.1", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", - "libcst >= 0.4.9, < 1.0.0dev", + "libcst >= 0.4.9, < 2.0.0dev", "inflection >= 0.5.1, < 1.0.0dev", ] From 414f310e173e83da98eb292e545fbfa83b4e59db Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 3 Jun 2023 18:14:03 -0400 Subject: [PATCH 1003/1339] build(deps): bump cryptography from 39.0.1 to 41.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#1669) Source-Link: https://github.com/googleapis/synthtool/commit/d0f51a0c2a9a6bcca86911eabea9e484baadf64b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 42 +++++++++---------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 32b3c486591a..02a4dedced74 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b -# created: 2023-05-25T14:56:16.294623272Z + digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc +# created: 2023-06-03T21:25:37.968717478Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 3b8d7ee81848..c7929db6d152 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -113,28 +113,26 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.0 \ + --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ + --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ + --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ + --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ + --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ + --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ + --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ + --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ + --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ + --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ + --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ + --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ + --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ + --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ + --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ + --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ + --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ + --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ + --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be # via # gcp-releasetool # secretstorage From b887c7336a6d1c7146c755d49582fbade1d5004f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 13 Jun 2023 19:56:15 +0200 Subject: [PATCH 1004/1339] chore(deps): update all dependencies (#1670) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d114ba068342..7f4136d1eeaf 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,8 +1,8 @@ click==8.1.3 google-api-core==2.11.0 -googleapis-common-protos==1.59.0 +googleapis-common-protos==1.59.1 jinja2==3.1.2 -MarkupSafe==2.1.2 +MarkupSafe==2.1.3 protobuf==4.23.2 pypandoc==1.11 PyYAML==6.0 @@ -10,5 +10,5 @@ setuptools==67.8.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.2 pytest-asyncio==0.21.0 -libcst==1.0.0 +libcst==1.0.1 inflection==0.5.1 \ No newline at end of file From 1d287750caf46888ac45c33abd9a23983f706807 Mon Sep 17 00:00:00 2001 From: Yu-Han Liu Date: Thu, 15 Jun 2023 12:18:44 -0700 Subject: [PATCH 1005/1339] fix: mock return_value should not populate oneof message fields (#1668) * fix: mock return_value should not populate oneof message fields --- .../%name_%version/%sub/test_%service.py.j2 | 20 ++++++--- .../gapic/%name_%version/%sub/test_macros.j2 | 34 +++++++++------ .../test_oneof_imported_response.proto | 41 +++++++++++++++++++ .../logging_v2/test_config_service_v2.py | 6 --- 4 files changed, 76 insertions(+), 25 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/test_oneof_imported_response.proto diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index c174d37e8849..3ad8ef76891c 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -545,11 +545,15 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {% endif %}{% endfor %} - {# This is a hack to only pick one field #} + {# This is a hack to only pick one field #} {% for oneof_fields in method.output.oneof_fields().values() %} - {% with field = oneof_fields[0] %} + {# Take the first non-message oneof field. In the corner case when all the fields of a oneof are messages, the oneof will not be populated #} + {# Use an outer if-statement here because `first` raises an error if called on an empty sequence #} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} {{ field.name }}={{ field.mock_value }}, {% endwith %} + {% endif %} {% endfor %} ) {% endif %} @@ -959,11 +963,13 @@ def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {% endif %}{% endfor %} - {# This is a hack to only pick one field #} + {# This is a hack to only pick one field #} {% for oneof_fields in method.output.oneof_fields().values() %} - {% with field = oneof_fields[0] %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} {{ field.name }}={{ field.mock_value }}, {% endwith %} + {% endif %} {% endfor %} ) {% endif %} @@ -1029,11 +1035,13 @@ def test_{{ method.name|snake_case }}_rest(request_type): {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {% endif %}{% endfor %} - {# This is a hack to only pick one field #} + {# This is a hack to only pick one field #} {% for oneof_fields in method.output.oneof_fields().values() %} - {% with field = oneof_fields[0] %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} {{ field.name }}={{ field.mock_value }}, {% endwith %} + {% endif %} {% endfor %} ) {% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 952751323b12..2d895cab03e4 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -34,11 +34,15 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {% endif %}{% endfor %} - {# This is a hack to only pick one field #} + {# This is a hack to only pick one field #} {% for oneof_fields in method.output.oneof_fields().values() %} - {% with field = oneof_fields[0] %} + {# Take the first non-message oneof field. In the corner case when all the fields of a oneof are messages, the oneof will not be populated #} + {# Use an outer if-statement here because `first` raises an error if called on an empty sequence #} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} {{ field.name }}={{ field.mock_value }}, {% endwith %} + {% endif %} {% endfor %} ) {% endif %} @@ -903,11 +907,13 @@ def test_{{ method_name }}_rest(request_type): {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.extended_lro.operation_type.oneof_fields().values() %} - {% with field = oneof_fields[0] %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endif %} {% endfor %} ) {% else %} @@ -916,12 +922,14 @@ def test_{{ method_name }}_rest(request_type): {% if not field.oneof or field.proto3_optional %} {{ field.name }}={{ field.mock_value }}, {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.output.oneof_fields().values() %} - {% with field = oneof_fields[0] %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} - {% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endif %} + {% endfor %} ) {% endif %} diff --git a/packages/gapic-generator/tests/fragments/test_oneof_imported_response.proto b/packages/gapic-generator/tests/fragments/test_oneof_imported_response.proto new file mode 100644 index 000000000000..53f2a2cbd2e7 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_oneof_imported_response.proto @@ -0,0 +1,41 @@ +// Copyright (C) 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "import.proto"; + +service MyService { + option (google.api.default_host) = "my.example.com"; + rpc MyMethod(MethodRequest) returns (MethodResponse) {} +} + +message MethodRequest { + string input = 1; +} + +message Container { + Import import = 1; +} + +message MethodResponse { + string parent = 1; + oneof format { + Container container = 2; + string name = 3; + } +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index a11636125c6e..079e9cd542dc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -3248,7 +3248,6 @@ def test_get_sink(request_type, transport: str = 'grpc'): output_version_format=logging_config.LogSink.VersionFormat.V2, writer_identity='writer_identity_value', include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), ) response = client.get_sink(request) @@ -3509,7 +3508,6 @@ def test_create_sink(request_type, transport: str = 'grpc'): output_version_format=logging_config.LogSink.VersionFormat.V2, writer_identity='writer_identity_value', include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), ) response = client.create_sink(request) @@ -3780,7 +3778,6 @@ def test_update_sink(request_type, transport: str = 'grpc'): output_version_format=logging_config.LogSink.VersionFormat.V2, writer_identity='writer_identity_value', include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), ) response = client.update_sink(request) @@ -8546,7 +8543,6 @@ def test_get_sink_rest(request_type): output_version_format=logging_config.LogSink.VersionFormat.V2, writer_identity='writer_identity_value', include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), ) # Wrap the value into a proper Response obj @@ -8794,7 +8790,6 @@ def test_create_sink_rest(request_type): output_version_format=logging_config.LogSink.VersionFormat.V2, writer_identity='writer_identity_value', include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), ) # Wrap the value into a proper Response obj @@ -9048,7 +9043,6 @@ def test_update_sink_rest(request_type): output_version_format=logging_config.LogSink.VersionFormat.V2, writer_identity='writer_identity_value', include_children=True, - bigquery_options=logging_config.BigQueryOptions(use_partitioned_tables=True), ) # Wrap the value into a proper Response obj From 37c98e13fb954cfe45bcf005dd7718d431eb74e8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 15 Jun 2023 16:20:06 -0400 Subject: [PATCH 1006/1339] fix: add `exec` as a reserved word (#1673) `exec` was removed in #1575 --- packages/gapic-generator/gapic/utils/reserved_names.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index c17e5ea3240c..fe7b2218bba4 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -86,6 +86,7 @@ "None", "try", "type", + "exec", # Comes from Protoplus "ignore_unknown_fields" ] From c09bc76f08d6e289ac8f301fdb8f4f872e33a278 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 15 Jun 2023 17:16:02 -0400 Subject: [PATCH 1007/1339] chore: set initial gapic version to 0.0.0 (#1665) Co-authored-by: Victor Chudnovsky --- .../gapic/templates/%namespace/%name/gapic_version.py.j2 | 4 ++-- .../templates/%namespace/%name_%version/gapic_version.py.j2 | 4 ++-- .../goldens/asset/google/cloud/asset/gapic_version.py | 2 +- .../goldens/asset/google/cloud/asset_v1/gapic_version.py | 2 +- .../credentials/google/iam/credentials/gapic_version.py | 2 +- .../credentials/google/iam/credentials_v1/gapic_version.py | 2 +- .../goldens/eventarc/google/cloud/eventarc/gapic_version.py | 2 +- .../eventarc/google/cloud/eventarc_v1/gapic_version.py | 2 +- .../goldens/logging/google/cloud/logging/gapic_version.py | 2 +- .../goldens/logging/google/cloud/logging_v2/gapic_version.py | 2 +- .../goldens/redis/google/cloud/redis/gapic_version.py | 2 +- .../goldens/redis/google/cloud/redis_v1/gapic_version.py | 2 +- 12 files changed, 14 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 index d6db9873846b..b3243d8eff10 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 @@ -1,5 +1,5 @@ {% extends '_base.py.j2' %} {% block content %} -__version__ = "0.1.0" # {x-release-please-version} -{% endblock %} \ No newline at end of file +__version__ = "0.0.0" # {x-release-please-version} +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 index d6db9873846b..b3243d8eff10 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 @@ -1,5 +1,5 @@ {% extends '_base.py.j2' %} {% block content %} -__version__ = "0.1.0" # {x-release-please-version} -{% endblock %} \ No newline at end of file +__version__ = "0.0.0" # {x-release-please-version} +{% endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py index 405b1cebcf15..30274cc6e9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} From 247b768df12ba4573be5c1c94cf33184f1056860 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 19 Jun 2023 20:05:11 +0200 Subject: [PATCH 1008/1339] chore(deps): update all dependencies (#1674) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 7f4136d1eeaf..4389d88fd5b8 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,9 +1,9 @@ click==8.1.3 -google-api-core==2.11.0 +google-api-core==2.11.1 googleapis-common-protos==1.59.1 jinja2==3.1.2 MarkupSafe==2.1.3 -protobuf==4.23.2 +protobuf==4.23.3 pypandoc==1.11 PyYAML==6.0 setuptools==67.8.0 From 338ecbaea63a4fe06bc6419bc14f786c6e204f22 Mon Sep 17 00:00:00 2001 From: henribru <6639509+henribru@users.noreply.github.com> Date: Mon, 19 Jun 2023 20:29:48 +0200 Subject: [PATCH 1009/1339] fix: add async context manager return types (#1660) Co-authored-by: Anthonios Partheniou --- .../%name_%version/%sub/services/%service/async_client.py.j2 | 2 +- .../cloud/asset_v1/services/asset_service/async_client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/async_client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/async_client.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/async_client.py | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 8f10ab7edec5..c2fd3d57d483 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -672,7 +672,7 @@ class {{ service.async_client_name }}: return response {% endif %} - async def __aenter__(self): + async def __aenter__(self) -> "{{ service.async_client_name }}": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 8c3355ca8951..594106688782 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1674,7 +1674,7 @@ async def sample_analyze_iam_policy_longrunning(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "AssetServiceAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 7b4ecc386b7f..e1550e3665b9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -792,7 +792,7 @@ async def sample_sign_jwt(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "IAMCredentialsAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index a6aa557c768e..6febf4f1cc94 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -825,7 +825,7 @@ async def sample_delete_trigger(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "EventarcAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index aa607edd4043..10966d07a302 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -2648,7 +2648,7 @@ async def sample_update_cmek_settings(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "ConfigServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 3c38c790b7a3..d8ff79b4b5f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -971,7 +971,7 @@ def request_generator(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 891f40ddab53..931dccbb1558 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -799,7 +799,7 @@ async def sample_delete_log_metric(): metadata=metadata, ) - async def __aenter__(self): + async def __aenter__(self) -> "MetricsServiceV2AsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index eb6b14d8b3e1..fb4688580e18 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1399,7 +1399,7 @@ async def sample_delete_instance(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "CloudRedisAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): From c2a8a71d01ff0c39be57124c5d2c664e2e31b9e3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 27 Jun 2023 15:47:21 +0200 Subject: [PATCH 1010/1339] chore(deps): update all dependencies (#1676) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 4389d88fd5b8..cb79d3a4e5fa 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -6,9 +6,9 @@ MarkupSafe==2.1.3 protobuf==4.23.3 pypandoc==1.11 PyYAML==6.0 -setuptools==67.8.0 +setuptools==68.0.0 grpc-google-iam-v1==0.12.6 -proto-plus==1.22.2 +proto-plus==1.22.3 pytest-asyncio==0.21.0 libcst==1.0.1 inflection==0.5.1 \ No newline at end of file From dd90be6bf29a5f46368d07d1090377b1603cacbf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 14:11:02 +0000 Subject: [PATCH 1011/1339] chore(main): release 1.11.0 (#1638) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 16 ++++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 4005e32b35c0..6767fa3d0068 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,22 @@ # Changelog +## [1.11.0](https://github.com/googleapis/gapic-generator-python/compare/v1.10.0...v1.11.0) (2023-06-27) + + +### Features + +* Support snippet generation for services that only support REST transport ([#1656](https://github.com/googleapis/gapic-generator-python/issues/1656)) ([bb60a3d](https://github.com/googleapis/gapic-generator-python/commit/bb60a3d87b252f276f07f9ab21a17cd279b702c1)) + + +### Bug Fixes + +* Add `exec` as a reserved word ([#1673](https://github.com/googleapis/gapic-generator-python/issues/1673)) ([90af1e6](https://github.com/googleapis/gapic-generator-python/commit/90af1e6a97297c8bfbf8951e09922f3727b7074f)) +* Add async context manager return types ([#1660](https://github.com/googleapis/gapic-generator-python/issues/1660)) ([7f58100](https://github.com/googleapis/gapic-generator-python/commit/7f58100e645d9f868953deeac50992a44f6048e2)) +* Fix bug with quote replacement. ([#1613](https://github.com/googleapis/gapic-generator-python/issues/1613)) ([5268045](https://github.com/googleapis/gapic-generator-python/commit/5268045c34b36f4a89e00559bedde04d2a35bdd7)) +* Fix code coverage in async test ([#1646](https://github.com/googleapis/gapic-generator-python/issues/1646)) ([ccada98](https://github.com/googleapis/gapic-generator-python/commit/ccada9880890d6fb10ee02dad14b960d77335b82)) +* Mock return_value should not populate oneof message fields ([#1668](https://github.com/googleapis/gapic-generator-python/issues/1668)) ([34d1a5d](https://github.com/googleapis/gapic-generator-python/commit/34d1a5d6455af7ee5bd57743e55e1ada16985372)) + ## [1.10.0](https://github.com/googleapis/gapic-generator-python/compare/v1.9.1...v1.10.0) (2023-03-28) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2dcd3fcdc8fd..abe5f630f9fc 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.10.0" +version = "1.11.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From cf5d7cc257be284aca2b8d7f71ad484a3ad07bd8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 29 Jun 2023 12:39:55 -0400 Subject: [PATCH 1012/1339] chore: store artifacts in placer (#1678) Source-Link: https://github.com/googleapis/synthtool/commit/cb960373d12d20f8dc38beee2bf884d49627165e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/release/common.cfg | 9 +++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 02a4dedced74..98994f474104 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc -# created: 2023-06-03T21:25:37.968717478Z + digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd +# created: 2023-06-28T17:03:33.371210701Z diff --git a/packages/gapic-generator/.kokoro/release/common.cfg b/packages/gapic-generator/.kokoro/release/common.cfg index 72000839ce3c..1f5dced3c103 100644 --- a/packages/gapic-generator/.kokoro/release/common.cfg +++ b/packages/gapic-generator/.kokoro/release/common.cfg @@ -38,3 +38,12 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } + +# Store the packages we uploaded to PyPI. That way, we have a record of exactly +# what we published, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/gapic-generator-python/**/*.tar.gz" + strip_prefix: "github/gapic-generator-python" + } +} From 01251dfd3f87f398d4f5d59afb6b2b72dd6227f0 Mon Sep 17 00:00:00 2001 From: Aza Tulepbergenov Date: Tue, 4 Jul 2023 09:57:54 -0700 Subject: [PATCH 1013/1339] fix: fix issue with reserved names and http body. (#1657) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/gapic/schema/wrappers.py | 3 +++ .../tests/unit/schema/wrappers/test_method.py | 13 +++++++++++++ 2 files changed, 16 insertions(+) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 50127732aa5d..7f6a0c0ce05d 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1091,6 +1091,9 @@ def try_parse_http_rule(cls, http_rule) -> Optional['HttpRule']: uri = utils.convert_uri_fieldnames(uri) body = http_rule.body or None + # Ensure body doesn't conflict with reserved names. + if body in utils.RESERVED_NAMES and not body.endswith("_"): + body += "_" return cls(method, uri, body) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 3e27fed5756b..f6cf2e881876 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -511,6 +511,19 @@ def test_method_http_options_reserved_name_in_url(): }] +def test_method_http_options_reserved_name_in_body(): + http_rule = http_pb2.HttpRule( + post='/v1/license/{license=lic/*}', + body='breakpoint' + ) + method = make_method('DoSomething', http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [{ + 'method': 'post', + 'uri': '/v1/license/{license_=lic/*}', + 'body': 'breakpoint_' + }] + + def test_method_http_options_generate_sample(): http_rule = http_pb2.HttpRule( get='/v1/{resource.id=projects/*/regions/*/id/**}/stuff', From 2a2c73c028d9e70e32c056579cebe71f91d80be1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 5 Jul 2023 10:39:28 -0400 Subject: [PATCH 1014/1339] fix: add `help` as a reserved word (#1682) --- packages/gapic-generator/gapic/utils/reserved_names.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index fe7b2218bba4..0914cb02e3ce 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -87,6 +87,7 @@ "try", "type", "exec", + "help", # Comes from Protoplus "ignore_unknown_fields" ] From 456e34b26f759178c1295bfc1695fd03861279fc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 10:48:03 -0400 Subject: [PATCH 1015/1339] chore(main): release 1.11.1 (#1680) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 6767fa3d0068..5d4a739cbd50 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.11.1](https://github.com/googleapis/gapic-generator-python/compare/v1.11.0...v1.11.1) (2023-07-05) + + +### Bug Fixes + +* Add `help` as a reserved word ([#1682](https://github.com/googleapis/gapic-generator-python/issues/1682)) ([23fe890](https://github.com/googleapis/gapic-generator-python/commit/23fe890136144567e153d257faa31086f18544df)) +* Fix issue with reserved names and http body. ([#1657](https://github.com/googleapis/gapic-generator-python/issues/1657)) ([e51109d](https://github.com/googleapis/gapic-generator-python/commit/e51109da85bbd53e5b2af90fbce031b0dd7e2be5)) + ## [1.11.0](https://github.com/googleapis/gapic-generator-python/compare/v1.10.0...v1.11.0) (2023-06-27) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index abe5f630f9fc..08af668bc0c7 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.0" +version = "1.11.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 17e6065657d094cf2d08cd0165523b00a65fd9a4 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 5 Jul 2023 16:29:36 -0400 Subject: [PATCH 1016/1339] test: fix issue in tests related to json serialization (#1684) --- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 2 +- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3ad8ef76891c..186e5b4e04ea 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1311,7 +1311,7 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() {% if not method.void %} - req.return_value._content = {% if method.output.ident.package == method.ident.package %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + req.return_value._content = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} {% if method.server_streaming %} req.return_value._content = "[{}]".format(req.return_value._content) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 2d895cab03e4..91140618017f 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1206,7 +1206,7 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() {% if not method.void %} - req.return_value._content = {% if method.output.ident.package == method.ident.package %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + req.return_value._content = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} {% if method.server_streaming %} req.return_value._content = "[{}]".format(req.return_value._content) From 52cb0f8d3e97ee8fcaaf07f81777351698b64107 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 6 Jul 2023 13:32:32 -0400 Subject: [PATCH 1017/1339] docs: update copyright year (#1685) * docs: update copyright year * update copyright year in tests/unit/samplegen/golden_snippets/ * update copyright year --- packages/gapic-generator/gapic/ads-templates/_license.j2 | 2 +- packages/gapic-generator/gapic/templates/_license.j2 | 2 +- packages/gapic-generator/gapic/templates/docs/conf.py.j2 | 2 +- .../tests/integration/goldens/asset/docs/conf.py | 4 ++-- .../integration/goldens/asset/google/cloud/asset/__init__.py | 2 +- .../goldens/asset/google/cloud/asset/gapic_version.py | 2 +- .../goldens/asset/google/cloud/asset_v1/__init__.py | 2 +- .../goldens/asset/google/cloud/asset_v1/gapic_version.py | 2 +- .../goldens/asset/google/cloud/asset_v1/services/__init__.py | 2 +- .../google/cloud/asset_v1/services/asset_service/__init__.py | 2 +- .../cloud/asset_v1/services/asset_service/async_client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/pagers.py | 2 +- .../asset_v1/services/asset_service/transports/__init__.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/base.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/grpc.py | 2 +- .../services/asset_service/transports/grpc_asyncio.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/rest.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/__init__.py | 2 +- .../asset/google/cloud/asset_v1/types/asset_service.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/assets.py | 2 +- .../tests/integration/goldens/asset/noxfile.py | 2 +- ...set_v1_generated_asset_service_analyze_iam_policy_async.py | 2 +- ...ated_asset_service_analyze_iam_policy_longrunning_async.py | 2 +- ...rated_asset_service_analyze_iam_policy_longrunning_sync.py | 2 +- ...sset_v1_generated_asset_service_analyze_iam_policy_sync.py | 2 +- ..._generated_asset_service_batch_get_assets_history_async.py | 2 +- ...1_generated_asset_service_batch_get_assets_history_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_create_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_create_feed_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_delete_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_delete_feed_sync.py | 2 +- ...oudasset_v1_generated_asset_service_export_assets_async.py | 2 +- ...loudasset_v1_generated_asset_service_export_assets_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_get_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_get_feed_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_list_assets_async.py | 2 +- .../cloudasset_v1_generated_asset_service_list_assets_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_list_feeds_async.py | 2 +- .../cloudasset_v1_generated_asset_service_list_feeds_sync.py | 2 +- ...1_generated_asset_service_search_all_iam_policies_async.py | 2 +- ...v1_generated_asset_service_search_all_iam_policies_sync.py | 2 +- ...t_v1_generated_asset_service_search_all_resources_async.py | 2 +- ...et_v1_generated_asset_service_search_all_resources_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_update_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_update_feed_sync.py | 2 +- .../goldens/asset/scripts/fixup_asset_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../tests/integration/goldens/asset/tests/__init__.py | 2 +- .../tests/integration/goldens/asset/tests/unit/__init__.py | 2 +- .../integration/goldens/asset/tests/unit/gapic/__init__.py | 2 +- .../goldens/asset/tests/unit/gapic/asset_v1/__init__.py | 2 +- .../asset/tests/unit/gapic/asset_v1/test_asset_service.py | 2 +- .../tests/integration/goldens/credentials/docs/conf.py | 4 ++-- .../goldens/credentials/google/iam/credentials/__init__.py | 2 +- .../credentials/google/iam/credentials/gapic_version.py | 2 +- .../goldens/credentials/google/iam/credentials_v1/__init__.py | 2 +- .../credentials/google/iam/credentials_v1/gapic_version.py | 2 +- .../google/iam/credentials_v1/services/__init__.py | 2 +- .../iam/credentials_v1/services/iam_credentials/__init__.py | 2 +- .../credentials_v1/services/iam_credentials/async_client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../services/iam_credentials/transports/__init__.py | 2 +- .../services/iam_credentials/transports/base.py | 2 +- .../services/iam_credentials/transports/grpc.py | 2 +- .../services/iam_credentials/transports/grpc_asyncio.py | 2 +- .../services/iam_credentials/transports/rest.py | 2 +- .../credentials/google/iam/credentials_v1/types/__init__.py | 2 +- .../credentials/google/iam/credentials_v1/types/common.py | 2 +- .../google/iam/credentials_v1/types/iamcredentials.py | 2 +- .../tests/integration/goldens/credentials/noxfile.py | 2 +- ...1_generated_iam_credentials_generate_access_token_async.py | 2 +- ...v1_generated_iam_credentials_generate_access_token_sync.py | 2 +- ...ls_v1_generated_iam_credentials_generate_id_token_async.py | 2 +- ...als_v1_generated_iam_credentials_generate_id_token_sync.py | 2 +- ...redentials_v1_generated_iam_credentials_sign_blob_async.py | 2 +- ...credentials_v1_generated_iam_credentials_sign_blob_sync.py | 2 +- ...credentials_v1_generated_iam_credentials_sign_jwt_async.py | 2 +- ...mcredentials_v1_generated_iam_credentials_sign_jwt_sync.py | 2 +- .../credentials/scripts/fixup_credentials_v1_keywords.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- .../tests/integration/goldens/credentials/tests/__init__.py | 2 +- .../integration/goldens/credentials/tests/unit/__init__.py | 2 +- .../goldens/credentials/tests/unit/gapic/__init__.py | 2 +- .../credentials/tests/unit/gapic/credentials_v1/__init__.py | 2 +- .../tests/unit/gapic/credentials_v1/test_iam_credentials.py | 2 +- .../tests/integration/goldens/eventarc/docs/conf.py | 4 ++-- .../goldens/eventarc/google/cloud/eventarc/__init__.py | 2 +- .../goldens/eventarc/google/cloud/eventarc/gapic_version.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/__init__.py | 2 +- .../eventarc/google/cloud/eventarc_v1/gapic_version.py | 2 +- .../eventarc/google/cloud/eventarc_v1/services/__init__.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/__init__.py | 2 +- .../cloud/eventarc_v1/services/eventarc/async_client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/pagers.py | 2 +- .../eventarc_v1/services/eventarc/transports/__init__.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/base.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/grpc.py | 2 +- .../eventarc_v1/services/eventarc/transports/grpc_asyncio.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/rest.py | 2 +- .../eventarc/google/cloud/eventarc_v1/types/__init__.py | 2 +- .../eventarc/google/cloud/eventarc_v1/types/eventarc.py | 2 +- .../eventarc/google/cloud/eventarc_v1/types/trigger.py | 2 +- .../tests/integration/goldens/eventarc/noxfile.py | 2 +- .../eventarc_v1_generated_eventarc_create_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_create_trigger_sync.py | 2 +- .../eventarc_v1_generated_eventarc_delete_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_delete_trigger_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_get_trigger_sync.py | 2 +- .../eventarc_v1_generated_eventarc_list_triggers_async.py | 2 +- .../eventarc_v1_generated_eventarc_list_triggers_sync.py | 2 +- .../eventarc_v1_generated_eventarc_update_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_update_trigger_sync.py | 2 +- .../goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py | 2 +- .../tests/integration/goldens/eventarc/setup.py | 2 +- .../tests/integration/goldens/eventarc/tests/__init__.py | 2 +- .../tests/integration/goldens/eventarc/tests/unit/__init__.py | 2 +- .../integration/goldens/eventarc/tests/unit/gapic/__init__.py | 2 +- .../goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py | 2 +- .../eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py | 2 +- .../tests/integration/goldens/logging/docs/conf.py | 4 ++-- .../goldens/logging/google/cloud/logging/__init__.py | 2 +- .../goldens/logging/google/cloud/logging/gapic_version.py | 2 +- .../goldens/logging/google/cloud/logging_v2/__init__.py | 2 +- .../goldens/logging/google/cloud/logging_v2/gapic_version.py | 2 +- .../logging/google/cloud/logging_v2/services/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/__init__.py | 2 +- .../logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/pagers.py | 2 +- .../services/config_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/config_service_v2/transports/base.py | 2 +- .../logging_v2/services/config_service_v2/transports/grpc.py | 2 +- .../services/config_service_v2/transports/grpc_asyncio.py | 2 +- .../logging_v2/services/config_service_v2/transports/rest.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/pagers.py | 2 +- .../services/logging_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/transports/base.py | 2 +- .../logging_v2/services/logging_service_v2/transports/grpc.py | 2 +- .../services/logging_service_v2/transports/grpc_asyncio.py | 2 +- .../logging_v2/services/logging_service_v2/transports/rest.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/pagers.py | 2 +- .../services/metrics_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/base.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/grpc.py | 2 +- .../services/metrics_service_v2/transports/grpc_asyncio.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/rest.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/__init__.py | 2 +- .../logging/google/cloud/logging_v2/types/log_entry.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/logging.py | 2 +- .../logging/google/cloud/logging_v2/types/logging_config.py | 2 +- .../logging/google/cloud/logging_v2/types/logging_metrics.py | 2 +- .../tests/integration/goldens/logging/noxfile.py | 2 +- ...ging_v2_generated_config_service_v2_create_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_create_bucket_sync.py | 2 +- ...g_v2_generated_config_service_v2_create_exclusion_async.py | 2 +- ...ng_v2_generated_config_service_v2_create_exclusion_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_create_sink_async.py | 2 +- ...logging_v2_generated_config_service_v2_create_sink_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_create_view_async.py | 2 +- ...logging_v2_generated_config_service_v2_create_view_sync.py | 2 +- ...ging_v2_generated_config_service_v2_delete_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_delete_bucket_sync.py | 2 +- ...g_v2_generated_config_service_v2_delete_exclusion_async.py | 2 +- ...ng_v2_generated_config_service_v2_delete_exclusion_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_delete_sink_async.py | 2 +- ...logging_v2_generated_config_service_v2_delete_sink_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_delete_view_async.py | 2 +- ...logging_v2_generated_config_service_v2_delete_view_sync.py | 2 +- ...logging_v2_generated_config_service_v2_get_bucket_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_sync.py | 2 +- ..._v2_generated_config_service_v2_get_cmek_settings_async.py | 2 +- ...g_v2_generated_config_service_v2_get_cmek_settings_sync.py | 2 +- ...ging_v2_generated_config_service_v2_get_exclusion_async.py | 2 +- ...gging_v2_generated_config_service_v2_get_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_sync.py | 2 +- ...gging_v2_generated_config_service_v2_list_buckets_async.py | 2 +- ...ogging_v2_generated_config_service_v2_list_buckets_sync.py | 2 +- ...ng_v2_generated_config_service_v2_list_exclusions_async.py | 2 +- ...ing_v2_generated_config_service_v2_list_exclusions_sync.py | 2 +- ...logging_v2_generated_config_service_v2_list_sinks_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_sinks_sync.py | 2 +- ...logging_v2_generated_config_service_v2_list_views_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_views_sync.py | 2 +- ...ng_v2_generated_config_service_v2_undelete_bucket_async.py | 2 +- ...ing_v2_generated_config_service_v2_undelete_bucket_sync.py | 2 +- ...ging_v2_generated_config_service_v2_update_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_update_bucket_sync.py | 2 +- ..._generated_config_service_v2_update_cmek_settings_async.py | 2 +- ...2_generated_config_service_v2_update_cmek_settings_sync.py | 2 +- ...g_v2_generated_config_service_v2_update_exclusion_async.py | 2 +- ...ng_v2_generated_config_service_v2_update_exclusion_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_update_sink_async.py | 2 +- ...logging_v2_generated_config_service_v2_update_sink_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_update_view_async.py | 2 +- ...logging_v2_generated_config_service_v2_update_view_sync.py | 2 +- ...ogging_v2_generated_logging_service_v2_delete_log_async.py | 2 +- ...logging_v2_generated_logging_service_v2_delete_log_sync.py | 2 +- ..._v2_generated_logging_service_v2_list_log_entries_async.py | 2 +- ...g_v2_generated_logging_service_v2_list_log_entries_sync.py | 2 +- ...logging_v2_generated_logging_service_v2_list_logs_async.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_sync.py | 2 +- ...ng_service_v2_list_monitored_resource_descriptors_async.py | 2 +- ...ing_service_v2_list_monitored_resource_descriptors_sync.py | 2 +- ..._v2_generated_logging_service_v2_tail_log_entries_async.py | 2 +- ...g_v2_generated_logging_service_v2_tail_log_entries_sync.py | 2 +- ...v2_generated_logging_service_v2_write_log_entries_async.py | 2 +- ..._v2_generated_logging_service_v2_write_log_entries_sync.py | 2 +- ...v2_generated_metrics_service_v2_create_log_metric_async.py | 2 +- ..._v2_generated_metrics_service_v2_create_log_metric_sync.py | 2 +- ...v2_generated_metrics_service_v2_delete_log_metric_async.py | 2 +- ..._v2_generated_metrics_service_v2_delete_log_metric_sync.py | 2 +- ...ng_v2_generated_metrics_service_v2_get_log_metric_async.py | 2 +- ...ing_v2_generated_metrics_service_v2_get_log_metric_sync.py | 2 +- ..._v2_generated_metrics_service_v2_list_log_metrics_async.py | 2 +- ...g_v2_generated_metrics_service_v2_list_log_metrics_sync.py | 2 +- ...v2_generated_metrics_service_v2_update_log_metric_async.py | 2 +- ..._v2_generated_metrics_service_v2_update_log_metric_sync.py | 2 +- .../goldens/logging/scripts/fixup_logging_v2_keywords.py | 2 +- .../tests/integration/goldens/logging/setup.py | 2 +- .../tests/integration/goldens/logging/tests/__init__.py | 2 +- .../tests/integration/goldens/logging/tests/unit/__init__.py | 2 +- .../integration/goldens/logging/tests/unit/gapic/__init__.py | 2 +- .../goldens/logging/tests/unit/gapic/logging_v2/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 2 +- .../tests/integration/goldens/redis/docs/conf.py | 4 ++-- .../integration/goldens/redis/google/cloud/redis/__init__.py | 2 +- .../goldens/redis/google/cloud/redis/gapic_version.py | 2 +- .../goldens/redis/google/cloud/redis_v1/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/gapic_version.py | 2 +- .../goldens/redis/google/cloud/redis_v1/services/__init__.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/__init__.py | 2 +- .../cloud/redis_v1/services/cloud_redis/async_client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/pagers.py | 2 +- .../redis_v1/services/cloud_redis/transports/__init__.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/base.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/grpc.py | 2 +- .../redis_v1/services/cloud_redis/transports/grpc_asyncio.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/rest.py | 2 +- .../goldens/redis/google/cloud/redis_v1/types/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/types/cloud_redis.py | 2 +- .../tests/integration/goldens/redis/noxfile.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_export_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_export_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_failover_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_failover_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_import_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_import_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_async.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_sync.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_upgrade_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_upgrade_instance_sync.py | 2 +- .../goldens/redis/scripts/fixup_redis_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 +- .../tests/integration/goldens/redis/tests/__init__.py | 2 +- .../tests/integration/goldens/redis/tests/unit/__init__.py | 2 +- .../integration/goldens/redis/tests/unit/gapic/__init__.py | 2 +- .../goldens/redis/tests/unit/gapic/redis_v1/__init__.py | 2 +- .../redis/tests/unit/gapic/redis_v1/test_cloud_redis.py | 2 +- .../mollusca_v1_generated_snippets_list_resources_async.py | 2 +- .../mollusca_v1_generated_snippets_list_resources_sync.py | 2 +- ...lusca_v1_generated_snippets_method_bidi_streaming_async.py | 2 +- ...llusca_v1_generated_snippets_method_bidi_streaming_sync.py | 2 +- ...lusca_v1_generated_snippets_method_lro_signatures_async.py | 2 +- ...llusca_v1_generated_snippets_method_lro_signatures_sync.py | 2 +- ...llusca_v1_generated_snippets_method_one_signature_async.py | 2 +- ...ollusca_v1_generated_snippets_method_one_signature_sync.py | 2 +- ...sca_v1_generated_snippets_method_server_streaming_async.py | 2 +- ...usca_v1_generated_snippets_method_server_streaming_sync.py | 2 +- .../mollusca_v1_generated_snippets_one_of_method_async.py | 2 +- ...1_generated_snippets_one_of_method_required_field_async.py | 2 +- ...v1_generated_snippets_one_of_method_required_field_sync.py | 2 +- .../mollusca_v1_generated_snippets_one_of_method_sync.py | 2 +- .../tests/unit/samplegen/golden_snippets/sample_basic.py | 2 +- .../unit/samplegen/golden_snippets/sample_basic_async.py | 2 +- .../samplegen/golden_snippets/sample_basic_unflattenable.py | 2 +- .../samplegen/golden_snippets/sample_basic_void_method.py | 2 +- 299 files changed, 304 insertions(+), 304 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/_license.j2 b/packages/gapic-generator/gapic/ads-templates/_license.j2 index b00563f3379b..eee20db57924 100644 --- a/packages/gapic-generator/gapic/ads-templates/_license.j2 +++ b/packages/gapic-generator/gapic/ads-templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/gapic/templates/_license.j2 b/packages/gapic-generator/gapic/templates/_license.j2 index b00563f3379b..eee20db57924 100644 --- a/packages/gapic-generator/gapic/templates/_license.j2 +++ b/packages/gapic-generator/gapic/templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index 3dbdeeeeade8..5c7c4b999870 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -68,7 +68,7 @@ root_doc = "index" # General information about the project. project = u"{{ api.naming.warehouse_package_name }}" -copyright = u"2022, Google, LLC" +copyright = u"2023, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index 991fc2c82590..9e78282ded9a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-cloud-asset" -copyright = u"2022, Google, LLC" +copyright = u"2023, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index 00f87944327e..f8aff0bc41f5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index efe650f89632..78280c4b82b8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py index 5f66b6251136..542f923f08c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 594106688782..e5af385f5713 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 7aff28b059a2..73875148e27b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 1c01392cc3d7..8f7f9706d4a5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py index 81433d87a9a7..c34a5f8f6e44 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 7bcf53ed9e1b..55b9835f017f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 845421d5e2d8..ab3a577ca8e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index d579bca88a69..bd9417a8d230 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 8818b94e6c69..d3e95dc93d53 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index dd04dee8c92e..893bf916368b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index d010a66232df..23bb830fbf21 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index 84e819663539..fca415244b79 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 3da412a665ac..e8c4449425fe 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py index f9caabe8e6e1..4a13b28117ea 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py index 0fbd794418ab..47fb7196c09f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py index dd4fb419dad4..5f29caec5251 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py index 3d592e390b7c..c2aa70280658 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py index a54a5e1daafc..6dd1e1dc4e5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py index 5df919fe4d72..87e233fb64d7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py index 668903c5ee28..f5aaf9a07c86 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py index ce7b974b87fd..4ef06854bf6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py index 0dcea55aade1..cd3fab7c0047 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py index 735baf2c1d80..b6edad9d3957 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py index 36815d3c7c29..bc5930773f55 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py index 60816961b550..0e402bbff7ba 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py index b5b9a6e1a391..75bdd2fdd431 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py index 41d119391094..01cd552cf69f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py index 65ef19c875f5..91bc6e83d934 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py index f2e41b183ba1..cbd0599118be 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py index 88f0613f33b1..52059178faaf 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py index 3ee6b48bdb12..2c516167b319 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py index 367eae93c841..f99c45036215 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py index ca656c399840..2095ee02945d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py index 1757ab26d5b5..6e22841996ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py index 770ee7670ac6..4a82b7eb50de 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py index fe93ab55ce55..80d4dca4dea6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py index 320e04de983b..da3483f24a55 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py index a978950855af..631ec8e32a32 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index ccbd7491c3ab..166120b041d5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index fdd2ffb030b1..4a12e28e3cdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index 7eb953881c25..8ab9f3cbcbc9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-iam-credentials" -copyright = u"2022, Google, LLC" +copyright = u"2023, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py index 6427ccb12a74..6344d9b69f5e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 79bfe836a877..10dccc1f3672 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py index ff711c8a5c7b..308f0419c2e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index e1550e3665b9..54189ad74743 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 1d9015f8c4c9..c2414fe97d31 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py index c0a6b189dc4b..fcc2e07495e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 11ba17c52235..f7b9b4714fb9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index c6f3288e9134..bb2fe56cfab9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 524245c8c1d0..4cd9e1f2b9b7 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 32eff78924f4..817fe8928950 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py index 67117910ad4f..5f6aa27a12bd 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index f7a056998add..fb3a30054371 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py index ade7a6e16e9d..3ee0123b6eec 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index a70485abca6a..a9202ef71b7c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py index 0650407c09a2..4fb685e783f9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py index cea70e026d40..f5cde6216918 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py index fca37957ef11..4de35e89a022 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py index 982dfd05b051..761870af59c8 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py index 315a82abde5f..42f0d1197050 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py index 83f5193c443c..16cb3f41c415 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py index 9d897adc4a68..3d5c88eb756f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py index ca9f76b3592f..0ff30f162316 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py index 55a3779b1e28..90c337555a5b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 6f128c3927cf..98a42b7ba219 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index d0d236c8b6da..38b124448418 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py index d3b29380b0a1..5a1bf0a7424b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-cloud-eventarc" -copyright = u"2022, Google, LLC" +copyright = u"2023, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py index b1e28383519a..db48ba5fdfe8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index ae4e2a1cba77..a9881629c6ad 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py index fbe8bc381042..0f441a7b5f7f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 6febf4f1cc94..77bf35afad16 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 63915520d9d2..0049fbba676e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index f1121f3037bb..1956f7bb1ade 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py index ceddbaa0edbd..2a8db61b7d48 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 655845217600..7a2e60bc0b3c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index adf068fcf91f..26f3ff79e742 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 644a6f7f6c51..2d8824845bd6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index bd5c2e0ab41b..028e474937a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py index 5b82d00a1937..09b1dcca725c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index b6ea5be2623d..cb07e3c99e2f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index 6d9658ec6c40..e7f7acdd0bc8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 8e13edbf420d..08024a91125c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py index 711417033982..601c9defba1a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py index 998fdb703976..a0bd61ac12c6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py index d5063eff520f..cf8e3997acd0 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py index 880f2bad4a9b..5a33153b0b3d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py index 228e7e550a79..ea3d734762c8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py index ef1277a73fc6..7bc98b9cdeea 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py index a7a99b5a0f6a..1a2962e813ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py index ea6b79848847..3d9f79df1f2f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py index a0e2a98f606f..a142df42f5cb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py index b17b34a63b52..c09e035c9f73 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py index ba8400a43a57..2b6ac4b47971 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index f0ba7a537edd..6cba716beac8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index d9a15d9a1dcd..e87ed68bde0b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index f7bb3f4c94e1..6291cb766d25 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-cloud-logging" -copyright = u"2022, Google, LLC" +copyright = u"2023, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py index 71b274bca5ee..d8b1e1bfee36 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 5c5e6494844f..c40cb2b0d9be 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index 24dd59314313..2e513bad23c3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 10966d07a302..746d2e487861 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 947cafee32da..4a515374cdf8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index f08c20f2d43c..fb524efd8bb9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 1fc7dd79c53a..8aaccee9e983 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 94d16a4d38c2..e30689cd0309 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 8b912be15423..8a58c15f81c7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 7808861720ca..6e208e9a7535 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py index 48446d14e3fe..0a90ea99c0eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index 8180e704b8d8..fde7d503251d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index d8ff79b4b5f0..698b72276c31 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index c9fa648737da..a77eb555cefd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 490d11ca7c44..18997810173d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 1ffa88e973e0..c1d66e378abf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 0cf5ac2edab1..637083ce9004 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 6f6b9a4fe67e..fdaa104c76d0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 8d9c613a671e..0f1aaa222348 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py index ee4b6e9a7444..b77514b574cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 293e70fbb71c..03275e543107 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 931dccbb1558..ffcebb38ae3e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 536fa0952248..09e173e72d04 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index d92cf40d47cc..6f8bc8e01f74 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 0b03495bfd7f..7f421d905965 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 01cd0d37cfbc..a67ba7fd8997 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 8ce93312c74c..11116216065e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index bdc115947558..3ddcc80df579 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py index 059ddf19d45d..c68edad36d43 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py index b9b3f21a5f07..29636d30ebdc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index e5f3dcf7d6d2..040a12549b99 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index 39904f8f8a03..fffc74c192cc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index e32b8e0d3c48..a7ef42ec824c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 738077d4401b..8d39eb807f72 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 9ce7df50a39c..e87b6c033b8c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index f399b226f588..c1f028fb0464 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 374173f52b55..dc73253f4897 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 8f14d777f8cb..17490c61ef37 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 11bd92ad58fa..75ec32f48eeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 0fc007986813..277e83055225 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index d6d2a0d7db7a..a4df0299426a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 016f3e70b081..5cd201276977 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 3ee9567f1a8c..cd3ca94e19c6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index 6633aa3c4344..fcffb6db861d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index ec39ca1a09b7..a8f902116832 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index 9d6146c57c8b..b426d4703e1c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index bc051e4e025e..5d98f782bde2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index 62a27ea1d1a8..11d91947e3bc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index fa7d6f6e7b91..bf9875b0aa94 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index 2fed68bbcbe9..fe9c7031ef46 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index 53bda04c937e..b5539c04f291 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 1a91ecdd63d9..c29c35a4e213 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 337050c45a7e..818ab646fdf3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 6998c4b4af97..3ae13401d749 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index a91ec6042d7a..499d68bdbf44 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 83a5bcda4016..03b7dc7d8f71 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index 913ec9968d8c..a445ed396c8f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index 0739e175be61..b6fe5b11e08f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index ff34156f3826..ecebaf1194ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 5de975ecc9cd..5992e53ee4aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index a9818b572059..14d8679bce7f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index 614e9ec66b6c..dfbc3b411480 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 0c7912f7f038..8626f7cae9fd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index a1aa5ed6f4dc..ad42edff534e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 8cdac9d12438..727723a7a559 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index ea81f5c8b29c..b642d38eec23 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index 05a35323d6bb..b4fc92452254 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index c39fb2d9e0a1..1542a5a387f2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index 270e5c14578f..b273c465d3ec 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index eeca015d1b41..d2695708ddd3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 6355e2a62226..8d25c7d33f73 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index 450f5662a3fc..e1c741b67075 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 81aa0fb920f4..7b4a3c597f2b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index e1a2421fec56..96fc8ff9788c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 1828ce84c3a7..9bbc7dcb1c5d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index 873fecb737b1..d8b8d7f7bca5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index 958572b9301a..36d5776e36dc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index 400d57897c58..aef8473798e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index cc3a1be435c6..e84230202450 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index 8ccc9f3c3319..f143a772c3d3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 33014bf23658..3867d3702391 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 209dd510d93c..c6469053baa4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 86f6c9ccb3aa..1e4e28abc08c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index d968835f75a2..d5cfe190c8dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index d200793fa90c..d24923cb1e75 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index eebad0bfd01e..71859024dbb7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 8d132377e740..5a5ff140c42a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 4b99bc6f038d..519a2498ac3a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index c54a2201fc14..ca97be4b3d86 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index f1afa6fae199..24e9e200951f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 29461c6f8613..dc9a545e7c4b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index bd7954828811..1a0d48664303 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index d28fbe7a037c..de4bfe6c822b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index d351980e9679..557d3229302a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index bb9a56579e6f..b9e9cade9e94 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index 54a73b14122e..fea40e7a4957 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index 73276ef182fb..ccf2983be6d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index d6ef03c486b6..4b6984171895 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index 6ab2bb57fd83..abb071c655da 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 92c92395a253..f280ec9dea62 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index 5a3e646926f7..bffbe10a8eac 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 9a794a4ccb6d..59bfeeaaaa3f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index 39a6e72e3072..ed4dd0126e75 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py index c24078ce4df5..a8da3c59cb52 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 034ef836e59f..96a1235326b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 079e9cd542dc..ad8daf514f55 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index c42c689fd3f8..e477e51adcb8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 70df8f2d2640..a28f2b1a2542 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index 72426508f412..f4c7ab4631b0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ # General information about the project. project = u"google-cloud-redis" -copyright = u"2022, Google, LLC" +copyright = u"2023, Google, LLC" author = u"Google APIs" # TODO: autogenerate this bit # The version info for the project you're documenting, acts as replacement for diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py index 5b74fd732e02..71f3599d0ace 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index 4cc6efcb549e..a3273e7a5f4d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py index 30274cc6e9dd..360a0d13ebdd 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py index acb9d69179d1..bdef6fef167e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index fb4688580e18..976bb3bd7173 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 66bca156fd72..414d578bfeaa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index b601a7a057f6..2efaf76cfe5d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index bfee7cc29940..ec40765ecc00 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index d00e009be978..19c96cc87f8b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index da987a8f865c..3c22cd06830e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 0495dbc63921..806b87287fb1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 2f514a9d286e..0908104c606c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py index 5d35c41cc999..f616df1f68c0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index d045c7196340..d0e12a5796f8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index ab743ed4ff1a..5308e1ac4ba7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index d8dadeb17b0d..33f13c2311dc 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py index 7eed2cf2e0a8..28b2a638cb6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index 5db2cb522ae4..361dbe4f4b3d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py index 625687593833..3cc2c500d2d3 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py index 0a8be9007b5f..b1080a1af5ef 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py index 39eaa613b3b4..5c4ffd0cef26 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py index a92223c78562..59321ba49ec2 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py index 1c735ff82f1e..31564909c30a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py index 93294bca1eb2..9ca737687131 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py index 69cb243a5069..96f25ba4b399 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py index 168b60a07ab8..6c092f684b1a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py index 47995a43b688..5a88ac2b0744 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py index 087145fca3da..93173fc27de1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py index ef788e11feec..e7ee022f94d9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index fc085a4783e4..b54b18108137 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py index a805e2417bfd..fbcabb81e6ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py index 00c64cbebb7a..cb82a9b6eb1b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py index fc02e45b9313..d21c2dc08db9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py index 6df69450aa37..b83d29e1c49b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 08dd761fb213..4ccd159d1211 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py index 231bc125017b..1b4db446eb8d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 6c31c2cdc8be..ca2466444ceb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py index 31979bd6cfc6..02c96869da19 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py index b5063387739f..d7434c0776ae 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py index aa07573e88b0..b9cdaa1f7c7f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py index ff38c6742da3..229f7cb9d784 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py index cc387195b745..6ef4757d3f3d 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py index 4fa56bee0609..fcfb29df413a 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py index ae8ffbd1e074..94f25ad6a056 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py index 37ff6c9eefc0..196bb80d44d7 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py index 508e54665437..ccf631119e6e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py index 5c4e056dc256..236efda92ec7 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py index 9fd7592c7a1f..ce315bc224a0 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py index 0677e4f5b889..0408577440af 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py index bf709e649969..a91e3e54e4ed 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py index b59e3369fe6b..e7ac3c078827 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index d8f4cb53cbad..422b1250e00f 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index 2b6d88360b03..f42269db19dc 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index d8f4cb53cbad..422b1250e00f 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py index d9f3269019c7..ea15a2fa37c9 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From db1633d1a7511203ac5dc1e44b5ad324cc8806d0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 6 Jul 2023 13:52:01 -0400 Subject: [PATCH 1018/1339] chore(main): release 1.11.2 (#1686) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5d4a739cbd50..142d4504e6cf 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.11.2](https://github.com/googleapis/gapic-generator-python/compare/v1.11.1...v1.11.2) (2023-07-06) + + +### Documentation + +* Update copyright year ([#1685](https://github.com/googleapis/gapic-generator-python/issues/1685)) ([6e1bcde](https://github.com/googleapis/gapic-generator-python/commit/6e1bcde56f232789d356ca5617aa668d5d4cf37f)) + ## [1.11.1](https://github.com/googleapis/gapic-generator-python/compare/v1.11.0...v1.11.1) (2023-07-05) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 08af668bc0c7..42a7e1f69fa3 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.1" +version = "1.11.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 899605659ca7b9568e5e493df110d593ce450dd4 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 7 Jul 2023 13:02:47 -0400 Subject: [PATCH 1019/1339] docs: fix formatting of docstring with lists (#1687) * add test to show issue with lists * apply fix * work around click issue with mypy * style * apply more fixes for lists * formatting * add comment * style * coverage * move tests to test_lines * coverage * style * add test --- packages/gapic-generator/gapic/utils/lines.py | 16 ++++- packages/gapic-generator/noxfile.py | 10 ++- .../tests/unit/utils/test_lines.py | 68 +++++++++++++++++++ 3 files changed, 89 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 9e66a88ae98a..fffa125e80a5 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -86,8 +86,12 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 break_on_hyphens=False, ) # Strip the first \n from the text so it is not misidentified as an - # intentionally short line below. - text = text.replace('\n', ' ', 1) + # intentionally short line below, except when the text contains `:` + # as the new line is required for lists. + if '\n' in text: + initial_text = text.split('\n')[0] + if ":" not in initial_text: + text = text.replace('\n', ' ', 1) # Save the new `first` line. first = f'{initial[0]}\n' @@ -100,6 +104,10 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 tokens = [] token = '' for line in text.split('\n'): + # Ensure that lines that start with a hyphen are always on a new line + if line.strip().startswith('-') and token: + tokens.append(token) + token = '' token += line + '\n' if len(line) < width * 0.75: tokens.append(token) @@ -115,7 +123,9 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 text='\n'.join([textwrap.fill( break_long_words=False, initial_indent=' ' * indent, - subsequent_indent=' ' * indent, + # ensure that subsequent lines for lists are indented 2 spaces + subsequent_indent=' ' * indent + \ + (' ' if token.strip().startswith('-') else ''), text=token, width=width, break_on_hyphens=False, diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index afe77cfd9ac0..3786be6c1708 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -436,7 +436,13 @@ def docs(session): @nox.session(python=NEWEST_PYTHON) def mypy(session): """Perform typecheck analysis.""" - - session.install("mypy", "types-protobuf<=3.19.7", "types-PyYAML", "types-dataclasses") + # Pin to click==8.1.3 to workaround https://github.com/pallets/click/issues/2558 + session.install( + "mypy", + "types-protobuf<=3.19.7", + "types-PyYAML", + "types-dataclasses", + "click==8.1.3", + ) session.install(".") session.run("mypy", "gapic") diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index c471cb677bf6..51dccf5c397d 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -91,3 +91,71 @@ def test_wrap_short_line_preserved(): def test_wrap_does_not_break_hyphenated_word(): assert lines.wrap('do-not-break', width=5) == 'do-not-break' + + +def test_wrap_with_short_lines(): + input = """The hail in Wales falls mainly on the snails. The hail in Wales falls mainly +on the snails.""" + expected = """The hail in Wales falls mainly on the snails. The hail in +Wales falls mainly on the snails.""" + assert lines.wrap(input, width=60) == expected + + +def test_list_each_item_in_list_has_new_line(): + s = """Type of weather: +- Hail +- Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain +- Snow""" + assert lines.wrap(s, width=80) == s + + +def test_list_items_are_indented(): + input = """Type of weather. +Some types of weather: + +- A mix of hail and snow, followed by rain clouds, then finally clear sky +- Rain +- Snow""" + expected = """Type of weather. +Some types of weather: + +- A mix of hail and snow, followed by rain clouds, then + finally clear sky +- Rain +- Snow""" + assert lines.wrap(input, width=60) == expected + + +def test_list_new_line_preserved_after_colon(): + input = """Today's forecast will have different types of weather: + +- A mix of hail and snow, followed by rain clouds, then finally clear sky +- Rain +- Snow""" + expected = """Today's forecast will have different types + of weather: + + - A mix of hail and snow, followed by rain + clouds, then finally clear sky + - Rain + - Snow""" + assert lines.wrap(input, width=60, indent=16) == expected + + +def test_list_items_longer_text_before_list(): + input = """Weather Weather Weather Weather Weather Weather Weather +Weather Weather Weather Weather Weather Weather Weather +Type of weather: + +- Hail +- Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain +- Snow""" + expected = """Weather Weather Weather Weather Weather Weather Weather +Weather Weather Weather Weather Weather Weather Weather Type +of weather: + +- Hail +- Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain + Rain +- Snow""" + assert lines.wrap(input, width=60) == expected From 0bf17d314bfdf8919973d9973ba0157b246e5479 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 08:47:59 -0400 Subject: [PATCH 1020/1339] chore(main): release 1.11.3 (#1688) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 142d4504e6cf..715482c64309 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.11.3](https://github.com/googleapis/gapic-generator-python/compare/v1.11.2...v1.11.3) (2023-07-07) + + +### Documentation + +* Fix formatting of docstring with lists ([#1687](https://github.com/googleapis/gapic-generator-python/issues/1687)) ([abe0f3f](https://github.com/googleapis/gapic-generator-python/commit/abe0f3f3dc444fd12faf017e8ad73c941608e120)) + ## [1.11.2](https://github.com/googleapis/gapic-generator-python/compare/v1.11.1...v1.11.2) (2023-07-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 42a7e1f69fa3..2bc04d9c2552 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.2" +version = "1.11.3" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From 234946517cdc95927bfb5db4fc03c5fefbfbfa29 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 10 Jul 2023 17:09:59 +0200 Subject: [PATCH 1021/1339] chore(deps): update all dependencies (#1689) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index cb79d3a4e5fa..ebc2313f4f66 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -3,7 +3,7 @@ google-api-core==2.11.1 googleapis-common-protos==1.59.1 jinja2==3.1.2 MarkupSafe==2.1.3 -protobuf==4.23.3 +protobuf==4.23.4 pypandoc==1.11 PyYAML==6.0 setuptools==68.0.0 From 21cdaa06af9b0d16a56b6468711dacb82efc2230 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 11 Jul 2023 12:59:21 -0400 Subject: [PATCH 1022/1339] docs: ensure new line after colon (#1691) * docs: ensure new line after colon * add comment * update goldens * add another test case * add another test case * remove test code --- packages/gapic-generator/gapic/utils/lines.py | 16 ++++++++- .../services/asset_service/async_client.py | 3 ++ .../asset_v1/services/asset_service/client.py | 3 ++ .../services/asset_service/transports/rest.py | 3 ++ .../cloud/asset_v1/types/asset_service.py | 1 + .../tests/unit/utils/test_lines.py | 34 ++++++++++++++----- 6 files changed, 51 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index fffa125e80a5..2c5cd4021ca5 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import re import textwrap from typing import Iterable, Optional @@ -77,6 +78,12 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 # Break off the first line of the string to address non-zero offsets. first = text.split('\n')[0] + '\n' + + # Ensure that there are 2 new lines after a colon, otherwise + # the sphinx docs build will fail. + if first.endswith(":\n"): + first += "\n" + if len(first) > width - offset: # Ensure `break_on_hyphens` is set to `False` when using # `textwrap.wrap` to avoid breaking hyperlinks with hyphens. @@ -95,6 +102,11 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 # Save the new `first` line. first = f'{initial[0]}\n' + + # Ensure that there are 2 new lines after a colon, otherwise + # the sphinx docs build will fail. + text = re.sub(r':\n([^\n])', r':\n\n\1', text) + text = text[len(first):].strip() if not text: return first.strip() @@ -109,7 +121,9 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 tokens.append(token) token = '' token += line + '\n' - if len(line) < width * 0.75: + + # Preserve line breaks for lines that are short or end with colon. + if len(line) < width * 0.75 or line.endswith(':'): tokens.append(token) token = '' if token: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index e5af385f5713..c5106ca73b2e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -584,6 +584,7 @@ async def sample_create_feed(): exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ @@ -691,6 +692,7 @@ async def sample_get_feed(): exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ @@ -919,6 +921,7 @@ async def sample_update_feed(): exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 73875148e27b..aeccfe63ae3f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -782,6 +782,7 @@ def sample_create_feed(): exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ @@ -889,6 +890,7 @@ def sample_get_feed(): exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ @@ -1103,6 +1105,7 @@ def sample_update_feed(): exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index d3e95dc93d53..6d2777d96d02 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -756,6 +756,7 @@ def __call__(self, exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ @@ -998,6 +999,7 @@ def __call__(self, exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ @@ -1385,6 +1387,7 @@ def __call__(self, exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. """ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 23bb830fbf21..af29c724b9a7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -839,6 +839,7 @@ class Feed(proto.Message): An asset feed filter controls what updates are exported. The asset feed must be created within a project, organization, or folder. Supported destinations are: + Pub/Sub topics. Attributes: diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 51dccf5c397d..934f2ad66689 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -102,11 +102,16 @@ def test_wrap_with_short_lines(): def test_list_each_item_in_list_has_new_line(): - s = """Type of weather: + input = """Type of weather: - Hail - Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain - Snow""" - assert lines.wrap(s, width=80) == s + expected = """Type of weather: + +- Hail +- Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain +- Snow""" + assert lines.wrap(input, width=80) == expected def test_list_items_are_indented(): @@ -126,7 +131,7 @@ def test_list_items_are_indented(): assert lines.wrap(input, width=60) == expected -def test_list_new_line_preserved_after_colon(): +def test_list_items_short_text_before_list_with_new_line_preserved(): input = """Today's forecast will have different types of weather: - A mix of hail and snow, followed by rain clouds, then finally clear sky @@ -142,20 +147,33 @@ def test_list_new_line_preserved_after_colon(): assert lines.wrap(input, width=60, indent=16) == expected -def test_list_items_longer_text_before_list(): +def test_list_items_long_text_before_list_with_new_line_preserved(): input = """Weather Weather Weather Weather Weather Weather Weather -Weather Weather Weather Weather Weather Weather Weather -Type of weather: +Weather Weather Weather Weather Weather Weather Type of weather: - Hail - Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain - Snow""" expected = """Weather Weather Weather Weather Weather Weather Weather -Weather Weather Weather Weather Weather Weather Weather Type -of weather: +Weather Weather Weather Weather Weather Weather Type of +weather: - Hail - Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain Rain - Snow""" assert lines.wrap(input, width=60) == expected + + +def test_new_line_added_short_text_before_list(): + input = """Today's forecast will have different weather: +- A mix of hail and snow, followed by rain clouds, then finally clear sky +- Rain +- Snow""" + expected = """Today's forecast will have different weather: + +- A mix of hail and snow, followed by rain clouds, then + finally clear sky +- Rain +- Snow""" + assert lines.wrap(input, width=60) == expected From c9bbbea9f237b034360bcce632047bae7ec44851 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 13 Jul 2023 15:00:56 -0400 Subject: [PATCH 1023/1339] chore(main): release 1.11.4 (#1692) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 715482c64309..8a746b6f0d91 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.11.4](https://github.com/googleapis/gapic-generator-python/compare/v1.11.3...v1.11.4) (2023-07-11) + + +### Documentation + +* Ensure new line after colon ([#1691](https://github.com/googleapis/gapic-generator-python/issues/1691)) ([e400fba](https://github.com/googleapis/gapic-generator-python/commit/e400fba0e75f3f8789e9b6ba00b949a012b17496)) + ## [1.11.3](https://github.com/googleapis/gapic-generator-python/compare/v1.11.2...v1.11.3) (2023-07-07) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2bc04d9c2552..3840434c3997 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.3" +version = "1.11.4" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From c932c24261afe2e06444d72cf5e7c8cbc71bd757 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 17 Jul 2023 16:17:36 +0200 Subject: [PATCH 1024/1339] chore(deps): update all dependencies (#1697) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ebc2313f4f66..30d519285b30 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,4 +1,4 @@ -click==8.1.3 +click==8.1.5 google-api-core==2.11.1 googleapis-common-protos==1.59.1 jinja2==3.1.2 @@ -9,6 +9,6 @@ PyYAML==6.0 setuptools==68.0.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.3 -pytest-asyncio==0.21.0 +pytest-asyncio==0.21.1 libcst==1.0.1 inflection==0.5.1 \ No newline at end of file From 2e0ac5b088b015c5936d2565befa1037cfa16457 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 11:45:25 -0400 Subject: [PATCH 1025/1339] build(deps): [autoapprove] bump cryptography from 41.0.0 to 41.0.2 (#1698) Source-Link: https://github.com/googleapis/synthtool/commit/d6103f4a3540ba60f633a9e25c37ec5fe7e6286d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.github/auto-label.yaml | 2 +- packages/gapic-generator/.kokoro/build.sh | 2 +- .../.kokoro/docker/docs/Dockerfile | 2 +- .../.kokoro/populate-secrets.sh | 2 +- packages/gapic-generator/.kokoro/release.sh | 2 +- .../gapic-generator/.kokoro/requirements.txt | 44 ++++++++++--------- .../gapic-generator/.kokoro/trampoline.sh | 2 +- .../gapic-generator/.kokoro/trampoline_v2.sh | 2 +- packages/gapic-generator/.trampolinerc | 4 +- 10 files changed, 34 insertions(+), 32 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 98994f474104..ae4a522b9e5f 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd -# created: 2023-06-28T17:03:33.371210701Z + digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb +# created: 2023-07-17T15:20:13.819193964Z diff --git a/packages/gapic-generator/.github/auto-label.yaml b/packages/gapic-generator/.github/auto-label.yaml index 41bff0b5375a..b2016d119b40 100644 --- a/packages/gapic-generator/.github/auto-label.yaml +++ b/packages/gapic-generator/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/build.sh b/packages/gapic-generator/.kokoro/build.sh index a8340f3a586e..ecf29aa67f54 100755 --- a/packages/gapic-generator/.kokoro/build.sh +++ b/packages/gapic-generator/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile index f8137d0ae497..8e39a2cc438d 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile +++ b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/populate-secrets.sh b/packages/gapic-generator/.kokoro/populate-secrets.sh index f52514257ef0..6f3972140e80 100755 --- a/packages/gapic-generator/.kokoro/populate-secrets.sh +++ b/packages/gapic-generator/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC. +# Copyright 2023 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/release.sh b/packages/gapic-generator/.kokoro/release.sh index 336f0eca1e1f..ea4f0153bf97 100755 --- a/packages/gapic-generator/.kokoro/release.sh +++ b/packages/gapic-generator/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index c7929db6d152..67d70a110897 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -113,26 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via # gcp-releasetool # secretstorage diff --git a/packages/gapic-generator/.kokoro/trampoline.sh b/packages/gapic-generator/.kokoro/trampoline.sh index f39236e943a8..d85b1f267693 100755 --- a/packages/gapic-generator/.kokoro/trampoline.sh +++ b/packages/gapic-generator/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/trampoline_v2.sh b/packages/gapic-generator/.kokoro/trampoline_v2.sh index 4af6cdc26dbc..59a7cf3a9373 100755 --- a/packages/gapic-generator/.kokoro/trampoline_v2.sh +++ b/packages/gapic-generator/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.trampolinerc b/packages/gapic-generator/.trampolinerc index 0eee72ab62aa..a7dfeb42c6d0 100644 --- a/packages/gapic-generator/.trampolinerc +++ b/packages/gapic-generator/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Template for .trampolinerc - # Add required env vars here. required_envvars+=( ) From d61a92b863f38e39d3157befb388eed7ecb4ff53 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jul 2023 11:32:26 -0400 Subject: [PATCH 1026/1339] build(deps): [autoapprove] bump pygments from 2.13.0 to 2.15.0 (#1701) Source-Link: https://github.com/googleapis/synthtool/commit/eaef28efd179e6eeb9f4e9bf697530d074a6f3b9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index ae4a522b9e5f..17c21d96d654 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb -# created: 2023-07-17T15:20:13.819193964Z + digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e +# created: 2023-07-21T02:12:46.49799314Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 67d70a110897..b563eb284459 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -396,9 +396,9 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.13.0 \ - --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ - --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 +pygments==2.15.0 \ + --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ + --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 # via # readme-renderer # rich From 62625b355852197740d7aef16ac3f3ef014be4f5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 25 Jul 2023 17:38:00 +0200 Subject: [PATCH 1027/1339] chore(deps): update all dependencies (#1702) * chore(deps): update all dependencies * revert --------- Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 30d519285b30..ac70dddfee79 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,11 +1,11 @@ -click==8.1.5 +click==8.1.6 google-api-core==2.11.1 googleapis-common-protos==1.59.1 jinja2==3.1.2 MarkupSafe==2.1.3 protobuf==4.23.4 pypandoc==1.11 -PyYAML==6.0 +PyYAML==6.0.1 setuptools==68.0.0 grpc-google-iam-v1==0.12.6 proto-plus==1.22.3 From 4634b1c4ee8b071260797c6b5e4c058e21676377 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 07:14:08 -0400 Subject: [PATCH 1028/1339] build(deps): [autoapprove] bump certifi from 2022.12.7 to 2023.7.22 (#1704) Source-Link: https://github.com/googleapis/synthtool/commit/395d53adeeacfca00b73abf197f65f3c17c8f1e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 17c21d96d654..0ddd0e4d1873 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e -# created: 2023-07-21T02:12:46.49799314Z + digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 +# created: 2023-07-25T21:01:10.396410762Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index b563eb284459..76d9bba0f7d0 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ From 745ffa63630dcbf75c5e54ad06c7605ac4033a03 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 28 Jul 2023 13:09:43 -0400 Subject: [PATCH 1029/1339] chore: set CODEOWNER to @googleapis/python-core-client-libraries (#1652) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: set CODEOWNER to @googleapis/python-core-client-libraries * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/gapic-generator/.github/CODEOWNERS | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/gapic-generator/.github/CODEOWNERS b/packages/gapic-generator/.github/CODEOWNERS index 279eedf06a67..993a4580d773 100644 --- a/packages/gapic-generator/.github/CODEOWNERS +++ b/packages/gapic-generator/.github/CODEOWNERS @@ -1,8 +1,4 @@ # Code owners file. # This file controls who is tagged for review for any given pull request. -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax - -* @googleapis/actools-python @googleapis/yoshi-python -*.yaml @googleapis/yoshi-python @googleapis/actools-python +* @googleapis/python-core-client-libraries From 8d3a572ce5d6a04772c43dfa48599325a4ae475c Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 1 Aug 2023 12:24:46 -0400 Subject: [PATCH 1030/1339] chore: fix whitespace (#1710) Co-authored-by: Omair Naveed --- packages/gapic-generator/tests/unit/generator/test_options.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 6716e21fe6a0..3a12bf474e81 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -133,7 +133,7 @@ def test_options_service_config(fs): "UNKNOWN" ] }, - "timeout":"5s" + "timeout": "5s" } ] } From 79a1680f8fe2732113999ad2c8a631b0c4a27383 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 2 Aug 2023 12:07:37 -0400 Subject: [PATCH 1031/1339] build: include error information when integration test fails (#1712) --- packages/gapic-generator/.github/workflows/tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 775861986c1b..8a7211a685a6 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -297,7 +297,7 @@ jobs: echo "and it will start over with a clean cache." echo "The old one will disappear after 7 days." - name: Integration Tests - run: bazel test //tests/integration/... + run: bazel test //tests/integration/... --test_output=errors goldens-lint: runs-on: ubuntu-latest From c5560389131f6a052f42e683022080521136876b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Aug 2023 10:04:57 -0400 Subject: [PATCH 1032/1339] build: [autoapprove] bump cryptography from 41.0.2 to 41.0.3 (#1717) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 48 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 0ddd0e4d1873..a3da1b0d4cd3 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 -# created: 2023-07-25T21:01:10.396410762Z + digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 +# created: 2023-08-02T10:53:29.114535628Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 76d9bba0f7d0..029bd342de94 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.2 \ - --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ - --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ - --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ - --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ - --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ - --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ - --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ - --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ - --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ - --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ - --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ - --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ - --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ - --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ - --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ - --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ - --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ - --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ - --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ - --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ - --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ - --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ - --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 +cryptography==41.0.3 \ + --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ + --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ + --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ + --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ + --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ + --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ + --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ + --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ + --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ + --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ + --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ + --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ + --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ + --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ + --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ + --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ + --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ + --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ + --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ + --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ + --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ + --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ + --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de # via # gcp-releasetool # secretstorage From 6d4abf2b2641851840a4062aa4628a90eb3bd770 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 3 Aug 2023 13:27:47 -0400 Subject: [PATCH 1033/1339] fix: fix docs build for generated clients (#1715) --- .../ads-templates/docs/_static/{custom.css => custom.css.j2} | 0 packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 | 2 +- .../gapic/templates/docs/_static/{custom.css => custom.css.j2} | 0 packages/gapic-generator/gapic/templates/docs/conf.py.j2 | 2 +- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 2 +- .../tests/integration/goldens/asset/docs/_static/custom.css | 3 +++ .../tests/integration/goldens/asset/docs/conf.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/noxfile.py | 2 +- .../integration/goldens/credentials/docs/_static/custom.css | 3 +++ .../tests/integration/goldens/credentials/docs/conf.py | 2 +- .../tests/integration/goldens/credentials/noxfile.py | 2 +- .../tests/integration/goldens/eventarc/docs/_static/custom.css | 3 +++ .../tests/integration/goldens/eventarc/docs/conf.py | 2 +- .../tests/integration/goldens/eventarc/noxfile.py | 2 +- .../tests/integration/goldens/logging/docs/_static/custom.css | 3 +++ .../tests/integration/goldens/logging/docs/conf.py | 2 +- .../tests/integration/goldens/logging/noxfile.py | 2 +- .../tests/integration/goldens/redis/docs/_static/custom.css | 3 +++ .../tests/integration/goldens/redis/docs/conf.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/noxfile.py | 2 +- 20 files changed, 28 insertions(+), 13 deletions(-) rename packages/gapic-generator/gapic/ads-templates/docs/_static/{custom.css => custom.css.j2} (100%) rename packages/gapic-generator/gapic/templates/docs/_static/{custom.css => custom.css.j2} (100%) create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/docs/_static/custom.css create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/docs/_static/custom.css create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/docs/_static/custom.css create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/_static/custom.css create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/docs/_static/custom.css diff --git a/packages/gapic-generator/gapic/ads-templates/docs/_static/custom.css b/packages/gapic-generator/gapic/ads-templates/docs/_static/custom.css.j2 similarity index 100% rename from packages/gapic-generator/gapic/ads-templates/docs/_static/custom.css rename to packages/gapic-generator/gapic/ads-templates/docs/_static/custom.css.j2 diff --git a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 index 5b3946301db4..80e398ddf7ab 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 @@ -85,7 +85,7 @@ version = ".".join(release.split(".")[0:2]) # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/packages/gapic-generator/gapic/templates/docs/_static/custom.css b/packages/gapic-generator/gapic/templates/docs/_static/custom.css.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/docs/_static/custom.css rename to packages/gapic-generator/gapic/templates/docs/_static/custom.css.j2 diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index 5c7c4b999870..b1eb707e8c69 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -85,7 +85,7 @@ version = ".".join(release.split(".")[0:2]) # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index eec54b8adea7..c592f651466e 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -127,7 +127,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/asset/docs/_static/custom.css new file mode 100755 index 000000000000..06423be0b592 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index 9e78282ded9a..b6b4f0941d46 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index e8c4449425fe..c333de24aff6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/credentials/docs/_static/custom.css new file mode 100755 index 000000000000..06423be0b592 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index 8ab9f3cbcbc9..760611c76f53 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index a9202ef71b7c..b4f8b235098e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_static/custom.css new file mode 100755 index 000000000000..06423be0b592 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py index 5a1bf0a7424b..3859f04c4dc9 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 08024a91125c..6d840f965896 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/logging/docs/_static/custom.css new file mode 100755 index 000000000000..06423be0b592 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index 6291cb766d25..c78410beee73 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index e87b6c033b8c..db2905b4a388 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/redis/docs/_static/custom.css new file mode 100755 index 000000000000..06423be0b592 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index f4c7ab4631b0..b95d38e026c2 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -96,7 +96,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 5308e1ac4ba7..0b02ca125e95 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -134,7 +134,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( From de76f6803ade838cf4789fe1a23a773184012c8a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 8 Aug 2023 19:16:40 -0400 Subject: [PATCH 1034/1339] fix: preserve new lines (#1721) --- packages/gapic-generator/gapic/utils/lines.py | 10 ++- .../services/asset_service/async_client.py | 3 + .../asset_v1/services/asset_service/client.py | 3 + .../google/cloud/eventarc_v1/types/trigger.py | 2 + .../config_service_v2/async_client.py | 2 + .../services/config_service_v2/client.py | 2 + .../logging_service_v2/async_client.py | 1 + .../services/logging_service_v2/client.py | 1 + .../metrics_service_v2/async_client.py | 4 ++ .../services/metrics_service_v2/client.py | 4 ++ .../metrics_service_v2/transports/rest.py | 3 + .../cloud/logging_v2/types/log_entry.py | 1 + .../cloud/logging_v2/types/logging_metrics.py | 1 + .../services/cloud_redis/async_client.py | 1 + .../redis_v1/services/cloud_redis/client.py | 1 + .../services/cloud_redis/transports/grpc.py | 1 + .../cloud_redis/transports/grpc_asyncio.py | 1 + .../tests/unit/utils/test_lines.py | 69 +++++++++++++++++++ 18 files changed, 108 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 2c5cd4021ca5..358759b558f0 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -107,17 +107,23 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 # the sphinx docs build will fail. text = re.sub(r':\n([^\n])', r':\n\n\1', text) - text = text[len(first):].strip() + text = text[len(first):] if not text: return first.strip() + # Strip leading and ending whitespace. + # Preserve new line at the beginning. + new_line = '\n' if text[0] == '\n' else '' + text = new_line + text.strip() + # Tokenize the rest of the text to try to preserve line breaks # that semantically matter. tokens = [] token = '' for line in text.split('\n'): # Ensure that lines that start with a hyphen are always on a new line - if line.strip().startswith('-') and token: + # Ensure that blank lines are preserved + if (line.strip().startswith('-') or not len(line)) and token: tokens.append(token) token = '' token += line + '\n' diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index c5106ca73b2e..9d0193c98c4f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -363,6 +363,7 @@ async def sample_list_assets(): Returns: google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager: ListAssets response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1211,6 +1212,7 @@ async def sample_search_all_resources(): Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager: Search all resources response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1406,6 +1408,7 @@ async def sample_search_all_iam_policies(): Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager: Search all IAM policies response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index aeccfe63ae3f..42b9b727b2d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -567,6 +567,7 @@ def sample_list_assets(): Returns: google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager: ListAssets response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1388,6 +1389,7 @@ def sample_search_all_resources(): Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager: Search all resources response. + Iterating over this object will yield results and resolve additional pages automatically. @@ -1576,6 +1578,7 @@ def sample_search_all_iam_policies(): Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager: Search all IAM policies response. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index e7f7acdd0bc8..504afb5fe6ee 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -153,6 +153,7 @@ class EventFilter(proto.Message): Required. The name of a CloudEvents attribute. Currently, only a subset of attributes are supported for filtering. + All triggers MUST provide a filter for the 'type' attribute. value (str): @@ -222,6 +223,7 @@ class CloudRun(proto.Message): Required. The name of the Cloud Run service being addressed. See https://cloud.google.com/run/docs/reference/rest/v1/namespaces.services. + Only services located in the same project of the trigger object can be addressed. path (str): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 746d2e487861..e42f50769483 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -261,6 +261,7 @@ async def sample_list_buckets(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: The response from ListBuckets. + Iterating over this object will yield results and resolve additional pages automatically. @@ -762,6 +763,7 @@ async def sample_list_views(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: The response from ListViews. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 4a515374cdf8..419f7094486f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -491,6 +491,7 @@ def sample_list_buckets(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: The response from ListBuckets. + Iterating over this object will yield results and resolve additional pages automatically. @@ -997,6 +998,7 @@ def sample_list_views(): Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: The response from ListViews. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 698b72276c31..efc2a54185b4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -818,6 +818,7 @@ async def sample_list_logs(): Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: Result returned from ListLogs. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a77eb555cefd..c9ad88c97eeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -980,6 +980,7 @@ def sample_list_logs(): Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: Result returned from ListLogs. + Iterating over this object will yield results and resolve additional pages automatically. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index ffcebb38ae3e..f6f7e55ff827 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -247,6 +247,7 @@ async def sample_list_log_metrics(): Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: Result returned from ListLogMetrics. + Iterating over this object will yield results and resolve additional pages automatically. @@ -372,6 +373,7 @@ async def sample_get_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -509,6 +511,7 @@ async def sample_create_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -639,6 +642,7 @@ async def sample_update_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 09e173e72d04..7d74c4f61307 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -441,6 +441,7 @@ def sample_list_log_metrics(): Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: Result returned from ListLogMetrics. + Iterating over this object will yield results and resolve additional pages automatically. @@ -558,6 +559,7 @@ def sample_get_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -687,6 +689,7 @@ def sample_create_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -817,6 +820,7 @@ def sample_update_log_metric(): value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py index c68edad36d43..9a834ebfadfe 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py @@ -315,6 +315,7 @@ def __call__(self, value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -475,6 +476,7 @@ def __call__(self, value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The @@ -638,6 +640,7 @@ def __call__(self, value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 040a12549b99..21f6b0cd67d3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -90,6 +90,7 @@ class LogEntry(proto.Message): protocol buffer. Some Google Cloud Platform services use this field for their log entry payloads. + The following protocol buffer types are supported; user-defined types are not supported: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 8d39eb807f72..1259e001a861 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -42,6 +42,7 @@ class LogMetric(proto.Message): r"""Describes a logs-based metric. The value of the metric is the number of log entries that match a logs filter in a given time interval. + Logs-based metrics can also be used to extract values from logs and create a distribution of the values. The distribution records the statistics of the extracted values along with an diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 976bb3bd7173..5babf20b1037 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1021,6 +1021,7 @@ async def export_instance(self, r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. + The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 414d578bfeaa..5f804c725a7c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1215,6 +1215,7 @@ def export_instance(self, r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. + The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 3c22cd06830e..0c162e8bb15b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -465,6 +465,7 @@ def export_instance(self) -> Callable[ Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. + The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 806b87287fb1..082123d4651e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -468,6 +468,7 @@ def export_instance(self) -> Callable[ Export Redis instance data into a Redis RDB format file in Cloud Storage. Redis will continue serving during this operation. + The returned operation is automatically deleted after a few hours, so there is no need to call DeleteOperation. diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 934f2ad66689..7a0638b71add 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -101,6 +101,15 @@ def test_wrap_with_short_lines(): assert lines.wrap(input, width=60) == expected +def test_lines_which_have_2_spaces_following_period(): + input = """Information related to the a standard versioned package. This includes +package info for APT, Yum, Zypper, and Googet package managers.""" + expected = """Information related to the a standard versioned package. +This includes package info for APT, Yum, Zypper, and Googet +package managers.""" + assert lines.wrap(input, width=60) == expected + + def test_list_each_item_in_list_has_new_line(): input = """Type of weather: - Hail @@ -177,3 +186,63 @@ def test_new_line_added_short_text_before_list(): - Rain - Snow""" assert lines.wrap(input, width=60) == expected + + +def test_new_line_preserved_short_text_before_list_without_colon(): + input = """Today's forecast will have different weather. + +- A mix of hail and snow, followed by rain clouds, then finally clear sky +- Rain +- Snow""" + expected = """Today's forecast will have different weather. + +- A mix of hail and snow, followed by rain clouds, then + finally clear sky +- Rain +- Snow""" + assert lines.wrap(input, width=60) == expected + + +def test_list_with_multiple_paragraphs(): + input = """Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec porta euismod est a viverra. Integer vulputate ipsum id lacus tincidunt, id tincidunt tortor ullamcorper. Vestibulum facilisis at nulla nec lobortis. Nunc consectetur suscipit lacus id aliquam. + +Donec et urna aliquam, efficitur mauris et, consectetur enim. Aliquam aliquet turpis eget erat gravida condimentum. Sed vel feugiat risus. + +Sed interdum. + +Convallis turpis nec congue. Integer vulputate sed urna eu mollis. Mauris in congue nisi, sed pellentesque ex. + +- Ut vestibulum +- consequat imperdiet +- Integer rhoncus varius. Ante, ac tempus augue +finibus sit amet. Integer ac fermentum neque, a sodales nibh. Mauris et dictum ipsum. Integer sit amet posuere urna. Nullam cursus molestie posuere. Praesent imperdiet cursus purus, in posuere odio. +- Orci varius natoque penatibus et + +Aagnis dis parturient montes, nascetur ridiculus mus. Mauris mattis turpis quis hendrerit gravida. Curabitur nec diam erat. In nec est nisl. Quisque ut orci efficitur, vestibulum ante non, vestibulum erat. Donec mollis ultricies nisl.""" + expected = """Lorem ipsum dolor sit amet, consectetur adipiscing elit. +Donec porta euismod est a viverra. Integer vulputate ipsum +id lacus tincidunt, id tincidunt tortor ullamcorper. +Vestibulum facilisis at nulla nec lobortis. Nunc consectetur +suscipit lacus id aliquam. Donec et urna aliquam, efficitur +mauris et, consectetur enim. Aliquam aliquet turpis eget +erat gravida condimentum. Sed vel feugiat risus. + +Sed interdum. + +Convallis turpis nec congue. Integer vulputate sed urna eu +mollis. Mauris in congue nisi, sed pellentesque ex. + +- Ut vestibulum +- consequat imperdiet +- Integer rhoncus varius. Ante, ac tempus augue finibus sit + amet. Integer ac fermentum neque, a sodales nibh. Mauris + et dictum ipsum. Integer sit amet posuere urna. Nullam + cursus molestie posuere. Praesent imperdiet cursus purus, + in posuere odio. +- Orci varius natoque penatibus et + +Aagnis dis parturient montes, nascetur ridiculus mus. Mauris +mattis turpis quis hendrerit gravida. Curabitur nec diam +erat. In nec est nisl. Quisque ut orci efficitur, vestibulum +ante non, vestibulum erat. Donec mollis ultricies nisl.""" + assert lines.wrap(input, width=60) == expected From 7a3a6f126aafe6faf5d80430a7d3febc6bd834bc Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Aug 2023 15:34:58 +0200 Subject: [PATCH 1035/1339] chore(deps): update all dependencies (#1707) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ac70dddfee79..396d1b8c4b52 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,9 +1,9 @@ click==8.1.6 google-api-core==2.11.1 -googleapis-common-protos==1.59.1 +googleapis-common-protos==1.60.0 jinja2==3.1.2 MarkupSafe==2.1.3 -protobuf==4.23.4 +protobuf==4.24.0 pypandoc==1.11 PyYAML==6.0.1 setuptools==68.0.0 From 69fe6ca0e4ad4246f9997cdc11aca266f945fd22 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 10 Aug 2023 13:04:12 -0400 Subject: [PATCH 1036/1339] chore: remove experimental note from README (#1719) Co-authored-by: Omair Naveed --- packages/gapic-generator/README.rst | 75 ++++++++++++++++++++++++++++- 1 file changed, 73 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index d5888edc5d52..ef24297730a6 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -1,3 +1,5 @@ +.. _codingstyle: + API Client Generator for Python =============================== @@ -15,12 +17,81 @@ to generate a client library. Purpose ------- -This library primarily exists to facilitate experimentation, particularly -regarding: +This library replaces the `monolithic generator`_ +with some improvements: - An explicit normalized format for specifying APIs. - Light weight, in-language code generators. +.. _monolithic generator: https://github.com/googleapis/gapic-generator + + +Bazel +------------- +This generator can be called from Bazel, which is a recommended way of using it inside a continuous integration build or any other automated pipeline. + +Clone the googleapis repository +$ git clone https://github.com/googleapis/googleapis.git + +Create the targets +------------------ +You need to add the following targets to your BUILD.bazel file. + +.. code-block:: c + + load( + "@gapic_generator_python//rules_python_gapic:py_gapic.bzl", + "py_gapic_library" + ) + + load( + "@gapic_generator_python//rules_python_gapic:py_gapic_pkg.bzl", + "py_gapic_assembly_pkg" + ) + + py_gapic_library( + name = "documentai_py_gapic", + srcs = [":documentai_proto"], + ) + + py_gapic_assembly_pkg( + name = "documentai-v1beta2-py", + deps = [ + ":documentai_py_gapic", + ], + ) + + +Compiling an API +---------------- + +Using Bazel: + +.. code-block:: c + + bazel build //google/cloud/documentai/v1beta2:documentai-v1beta2-py + +Using Protoc: + +.. code-block:: c + + # This is assumed to be in the `googleapis` project root. + $ protoc google/cloud/vision/v1/*.proto \ + --python_gapic_out=/dest/ + +Development +------------- +`Development`_ + +.. _Development: https://github.com/googleapis/gapic-generator-python/blob/main/DEVELOPMENT.md + +Contributing +------------- +If you are looking to contribute to the project, please see `Contributing`_ +for guidlines. + +.. _Contributing: https://github.com/googleapis/gapic-generator-python/blob/main/CONTRIBUTING.md + Documentation ------------- From 9a5cbc09d0c2152338d078b51067d3545aea4fc6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 10 Aug 2023 19:37:15 -0400 Subject: [PATCH 1037/1339] build: upgrade rules_python to 0.24.0 (#1731) --- packages/gapic-generator/WORKSPACE | 17 +- packages/gapic-generator/repositories.bzl | 6 - packages/gapic-generator/requirements.in | 14 + packages/gapic-generator/requirements.txt | 423 +++++++++++++++++++++- 4 files changed, 438 insertions(+), 22 deletions(-) create mode 100644 packages/gapic-generator/requirements.in diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 3c08a04e7ad3..6748cf6cae71 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -22,9 +22,9 @@ http_archive( ], ) -_rules_python_version = "0.9.0" +_rules_python_version = "0.24.0" -_rules_python_sha256 = "5fa3c738d33acca3b97622a13a741129f67ef43f5fdfcec63b29374cc0574c29" +_rules_python_sha256 = "0a8003b044294d7840ac7d9d73eef05d6ceb682d7516781a4ec62eeb34702578" http_archive( name = "rules_python", @@ -33,6 +33,19 @@ http_archive( url = "https://github.com/bazelbuild/rules_python/archive/{}.tar.gz".format(_rules_python_version), ) +load("@rules_python//python:repositories.bzl", "py_repositories") + +load("@rules_python//python:pip.bzl", "pip_parse") + +py_repositories() + +pip_parse( + name = "gapic_generator_python_pip_deps", + requirements_lock = "//:requirements.txt", +) +load("@gapic_generator_python_pip_deps//:requirements.bzl", "install_deps") + +install_deps() # # Import gapic-generator-python specific dependencies # diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 772ad5400ee9..0c8394275dfe 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -1,5 +1,4 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") -load("@rules_python//python:pip.bzl", "pip_install") _PANDOC_BUILD_FILE = """ filegroup( @@ -9,11 +8,6 @@ filegroup( )""" def gapic_generator_python(): - _maybe( - pip_install, - name = "gapic_generator_python_pip_deps", - requirements = "@gapic_generator_python//:requirements.txt", - ) _protobuf_version = "3.21.12" _protobuf_sha256 = "930c2c3b5ecc6c9c12615cf5ad93f1cd6e12d0aba862b572e076259970ac3a53" diff --git a/packages/gapic-generator/requirements.in b/packages/gapic-generator/requirements.in new file mode 100644 index 000000000000..8dc2b43cf304 --- /dev/null +++ b/packages/gapic-generator/requirements.in @@ -0,0 +1,14 @@ +click +google-api-core +googleapis-common-protos +jinja2 +MarkupSafe +protobuf +pypandoc +PyYAML +setuptools +grpc-google-iam-v1 +proto-plus +pytest-asyncio +libcst +inflection diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 396d1b8c4b52..4d9e2607bb65 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,14 +1,409 @@ -click==8.1.6 -google-api-core==2.11.1 -googleapis-common-protos==1.60.0 -jinja2==3.1.2 -MarkupSafe==2.1.3 -protobuf==4.24.0 -pypandoc==1.11 -PyYAML==6.0.1 -setuptools==68.0.0 -grpc-google-iam-v1==0.12.6 -proto-plus==1.22.3 -pytest-asyncio==0.21.1 -libcst==1.0.1 -inflection==0.5.1 \ No newline at end of file +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +cachetools==5.3.1 \ + --hash=sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590 \ + --hash=sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b + # via google-auth +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 + # via requests +charset-normalizer==3.2.0 \ + --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ + --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ + --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \ + --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \ + --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \ + --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \ + --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \ + --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \ + --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \ + --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \ + --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \ + --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \ + --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \ + --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \ + --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \ + --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \ + --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \ + --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \ + --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \ + --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \ + --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \ + --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \ + --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \ + --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \ + --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \ + --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \ + --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \ + --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \ + --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \ + --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \ + --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \ + --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \ + --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \ + --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \ + --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \ + --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \ + --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \ + --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \ + --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \ + --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \ + --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \ + --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \ + --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \ + --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \ + --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \ + --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \ + --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \ + --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \ + --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \ + --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \ + --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \ + --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \ + --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \ + --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \ + --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \ + --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \ + --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \ + --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \ + --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \ + --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \ + --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \ + --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \ + --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \ + --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \ + --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \ + --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \ + --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \ + --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \ + --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \ + --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \ + --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \ + --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \ + --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \ + --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \ + --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa + # via requests +click==8.1.6 \ + --hash=sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd \ + --hash=sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5 + # via -r requirements.in +exceptiongroup==1.1.2 \ + --hash=sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5 \ + --hash=sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f + # via pytest +google-api-core==2.11.1 \ + --hash=sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a \ + --hash=sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a + # via -r requirements.in +google-auth==2.22.0 \ + --hash=sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce \ + --hash=sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873 + # via google-api-core +googleapis-common-protos[grpc]==1.60.0 \ + --hash=sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918 \ + --hash=sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708 + # via + # -r requirements.in + # google-api-core + # grpc-google-iam-v1 +grpc-google-iam-v1==0.12.6 \ + --hash=sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f \ + --hash=sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c + # via -r requirements.in +grpcio==1.56.2 \ + --hash=sha256:06e84ad9ae7668a109e970c7411e7992751a116494cba7c4fb877656527f9a57 \ + --hash=sha256:0ff789ae7d8ddd76d2ac02e7d13bfef6fc4928ac01e1dcaa182be51b6bcc0aaa \ + --hash=sha256:10954662f77dc36c9a1fb5cc4a537f746580d6b5734803be1e587252682cda8d \ + --hash=sha256:139f66656a762572ae718fa0d1f2dce47c05e9fbf7a16acd704c354405b97df9 \ + --hash=sha256:1c31e52a04e62c8577a7bf772b3e7bed4df9c9e0dd90f92b6ffa07c16cab63c9 \ + --hash=sha256:33971197c47965cc1d97d78d842163c283e998223b151bab0499b951fd2c0b12 \ + --hash=sha256:345356b307cce5d14355e8e055b4ca5f99bc857c33a3dc1ddbc544fca9cd0475 \ + --hash=sha256:373b48f210f43327a41e397391715cd11cfce9ded2fe76a5068f9bacf91cc226 \ + --hash=sha256:3ccb621749a81dc7755243665a70ce45536ec413ef5818e013fe8dfbf5aa497b \ + --hash=sha256:42a3bbb2bc07aef72a7d97e71aabecaf3e4eb616d39e5211e2cfe3689de860ca \ + --hash=sha256:42e63904ee37ae46aa23de50dac8b145b3596f43598fa33fe1098ab2cbda6ff5 \ + --hash=sha256:4eb37dd8dd1aa40d601212afa27ca5be255ba792e2e0b24d67b8af5e012cdb7d \ + --hash=sha256:51173e8fa6d9a2d85c14426bdee5f5c4a0654fd5fddcc21fe9d09ab0f6eb8b35 \ + --hash=sha256:5144feb20fe76e73e60c7d73ec3bf54f320247d1ebe737d10672480371878b48 \ + --hash=sha256:5344be476ac37eb9c9ad09c22f4ea193c1316bf074f1daf85bddb1b31fda5116 \ + --hash=sha256:6108e5933eb8c22cd3646e72d5b54772c29f57482fd4c41a0640aab99eb5071d \ + --hash=sha256:6a007a541dff984264981fbafeb052bfe361db63578948d857907df9488d8774 \ + --hash=sha256:6ee26e9dfb3996aff7c870f09dc7ad44a5f6732b8bdb5a5f9905737ac6fd4ef1 \ + --hash=sha256:750de923b456ca8c0f1354d6befca45d1f3b3a789e76efc16741bd4132752d95 \ + --hash=sha256:7c5ede2e2558f088c49a1ddda19080e4c23fb5d171de80a726b61b567e3766ed \ + --hash=sha256:830215173ad45d670140ff99aac3b461f9be9a6b11bee1a17265aaaa746a641a \ + --hash=sha256:8391cea5ce72f4a12368afd17799474015d5d3dc00c936a907eb7c7eaaea98a5 \ + --hash=sha256:8940d6de7068af018dfa9a959a3510e9b7b543f4c405e88463a1cbaa3b2b379a \ + --hash=sha256:89a49cc5ad08a38b6141af17e00d1dd482dc927c7605bc77af457b5a0fca807c \ + --hash=sha256:900bc0096c2ca2d53f2e5cebf98293a7c32f532c4aeb926345e9747452233950 \ + --hash=sha256:97e0efaebbfd222bcaac2f1735c010c1d3b167112d9d237daebbeedaaccf3d1d \ + --hash=sha256:9e04d4e4cfafa7c5264e535b5d28e786f0571bea609c3f0aaab13e891e933e9c \ + --hash=sha256:a4c60abd950d6de3e4f1ddbc318075654d275c29c846ab6a043d6ed2c52e4c8c \ + --hash=sha256:a6ff459dac39541e6a2763a4439c4ca6bc9ecb4acc05a99b79246751f9894756 \ + --hash=sha256:a72797549935c9e0b9bc1def1768c8b5a709538fa6ab0678e671aec47ebfd55e \ + --hash=sha256:af4063ef2b11b96d949dccbc5a987272f38d55c23c4c01841ea65a517906397f \ + --hash=sha256:b975b85d1d5efc36cf8b237c5f3849b64d1ba33d6282f5e991f28751317504a1 \ + --hash=sha256:bf0b9959e673505ee5869950642428046edb91f99942607c2ecf635f8a4b31c9 \ + --hash=sha256:c0c85c5cbe8b30a32fa6d802588d55ffabf720e985abe9590c7c886919d875d4 \ + --hash=sha256:c3f3237a57e42f79f1e560726576aedb3a7ef931f4e3accb84ebf6acc485d316 \ + --hash=sha256:c3fa3ab0fb200a2c66493828ed06ccd1a94b12eddbfb985e7fd3e5723ff156c6 \ + --hash=sha256:c435f5ce1705de48e08fcbcfaf8aee660d199c90536e3e06f2016af7d6a938dd \ + --hash=sha256:c90da4b124647547a68cf2f197174ada30c7bb9523cb976665dfd26a9963d328 \ + --hash=sha256:cbdf2c498e077282cd427cfd88bdce4668019791deef0be8155385ab2ba7837f \ + --hash=sha256:d1fbad1f9077372b6587ec589c1fc120b417b6c8ad72d3e3cc86bbbd0a3cee93 \ + --hash=sha256:d39f5d4af48c138cb146763eda14eb7d8b3ccbbec9fe86fb724cd16e0e914c64 \ + --hash=sha256:ddb4a6061933bd9332b74eac0da25f17f32afa7145a33a0f9711ad74f924b1b8 \ + --hash=sha256:ded637176addc1d3eef35331c39acc598bac550d213f0a1bedabfceaa2244c87 \ + --hash=sha256:f20fd21f7538f8107451156dd1fe203300b79a9ddceba1ee0ac8132521a008ed \ + --hash=sha256:fda2783c12f553cdca11c08e5af6eecbd717280dc8fbe28a110897af1c15a88c + # via + # googleapis-common-protos + # grpc-google-iam-v1 +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 + # via requests +inflection==0.5.1 \ + --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ + --hash=sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2 + # via -r requirements.in +iniconfig==2.0.0 \ + --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ + --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 + # via pytest +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via -r requirements.in +libcst==1.0.1 \ + --hash=sha256:0138068baf09561268c7f079373bda45f0e2b606d2d19df1307ca8a5134fc465 \ + --hash=sha256:119ba709f1dcb785a4458cf36cedb51d6f9cb2eec0acd7bb171f730eac7cb6ce \ + --hash=sha256:1adcfa7cafb6a0d39a1a0bec541355608038b45815e0c5019c95f91921d42884 \ + --hash=sha256:37187337f979ba426d8bfefc08008c3c1b09b9e9f9387050804ed2da88107570 \ + --hash=sha256:414350df5e334ddf0db1732d63da44e81b734d45abe1c597b5e5c0dd46aa4156 \ + --hash=sha256:440887e5f82efb299f2e98d4bfa5663851a878cfc0efed652ab8c50205191436 \ + --hash=sha256:47dba43855e9c7b06d8b256ee81f0ebec6a4f43605456519577e09dfe4b4288c \ + --hash=sha256:4840a3de701778f0a19582bb3085c61591329153f801dc25da84689a3733960b \ + --hash=sha256:4b4e336f6d68456017671cdda8ddebf9caebce8052cc21a3f494b03d7bd28386 \ + --hash=sha256:5599166d5fec40e18601fb8868519dde99f77b6e4ad6074958018f9545da7abd \ + --hash=sha256:5e3293e77657ba62533553bb9f0c5fb173780e164c65db1ea2a3e0d03944a284 \ + --hash=sha256:600c4d3a9a2f75d5a055fed713a5a4d812709947909610aa6527abe08a31896f \ + --hash=sha256:6caa33430c0c7a0fcad921b0deeec61ddb96796b6f88dca94966f6db62065f4f \ + --hash=sha256:80423311f09fc5fc3270ede44d30d9d8d3c2d3dd50dbf703a581ca7346949fa6 \ + --hash=sha256:8420926791b0b6206cb831a7ec73d26ae820e65bdf07ce9813c7754c7722c07a \ + --hash=sha256:8c50541c3fd6b1d5a3765c4bb5ee8ecbba9d0e798e48f79fd5adf3b6752de4d0 \ + --hash=sha256:8d31ce2790eab59c1bd8e33fe72d09cfc78635c145bdc3f08296b360abb5f443 \ + --hash=sha256:967c66fabd52102954207bf1541312b467afc210fdf7033f32da992fb6c2372c \ + --hash=sha256:9a4931feceab171e6fce73de94e13880424367247dad6ff2b49cabfec733e144 \ + --hash=sha256:9d6dec2a3c443792e6af7c36fadc256e4ea586214c76b52f0d18118811dbe351 \ + --hash=sha256:a6b5aea04c35e13109edad3cf83bc6dcd74309b150a781d2189eecb288b73a87 \ + --hash=sha256:ae49dcbfadefb82e830d41d9f0a1db0af3b771224768f431f1b7b3a9803ed7e3 \ + --hash=sha256:ae7f4e71d714f256b5f2ff98b5a9effba0f9dff4d779d8f35d7eb157bef78f59 \ + --hash=sha256:b0533de4e35396c61aeb3a6266ac30369a855910c2385aaa902ff4aabd60d409 \ + --hash=sha256:b666a605f4205c8357696f3b6571a38f6a8537cdcbb8f357587d35168298af34 \ + --hash=sha256:b97f652b15c50e91df411a9c8d5e6f75882b30743a49b387dcedd3f68ed94d75 \ + --hash=sha256:c90c74a8a314f0774f045122323fb60bacba79cbf5f71883c0848ecd67179541 \ + --hash=sha256:d237e9164a43caa7d6765ee560412264484e7620c546a2ee10a8d01bd56884e0 \ + --hash=sha256:ddd4e0eeec499d1c824ab545e62e957dbbd69a16bc4273208817638eb7d6b3c6 \ + --hash=sha256:f2cb687e1514625e91024e50a5d2e485c0ad3be24f199874ebf32b5de0346150 + # via -r requirements.in +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 + # via + # -r requirements.in + # jinja2 +mypy-extensions==1.0.0 \ + --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ + --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 + # via typing-inspect +packaging==23.1 \ + --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ + --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f + # via pytest +pluggy==1.2.0 \ + --hash=sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849 \ + --hash=sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3 + # via pytest +proto-plus==1.22.3 \ + --hash=sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df \ + --hash=sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b + # via -r requirements.in +protobuf==4.24.0 \ + --hash=sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf \ + --hash=sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d \ + --hash=sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61 \ + --hash=sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85 \ + --hash=sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3 \ + --hash=sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52 \ + --hash=sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201 \ + --hash=sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109 \ + --hash=sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04 \ + --hash=sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7 \ + --hash=sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e \ + --hash=sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5 \ + --hash=sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653 + # via + # -r requirements.in + # google-api-core + # googleapis-common-protos + # grpc-google-iam-v1 + # proto-plus +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d + # via google-auth +pypandoc==1.11 \ + --hash=sha256:7f6d68db0e57e0f6961bec2190897118c4d305fc2d31c22cd16037f22ee084a5 \ + --hash=sha256:b260596934e9cfc6513056110a7c8600171d414f90558bf4407e68b209be8007 + # via -r requirements.in +pytest==7.4.0 \ + --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ + --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a + # via pytest-asyncio +pytest-asyncio==0.21.1 \ + --hash=sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d \ + --hash=sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b + # via -r requirements.in +pyyaml==6.0.1 \ + --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ + --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \ + --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \ + --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \ + --hash=sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595 \ + --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \ + --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \ + --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \ + --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \ + --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \ + --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \ + --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \ + --hash=sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6 \ + --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \ + --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \ + --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \ + --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \ + --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \ + --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \ + --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \ + --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \ + --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \ + --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \ + --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \ + --hash=sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0 \ + --hash=sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515 \ + --hash=sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c \ + --hash=sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c \ + --hash=sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924 \ + --hash=sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34 \ + --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \ + --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \ + --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \ + --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \ + --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \ + --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \ + --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \ + --hash=sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585 \ + --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \ + --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f + # via + # -r requirements.in + # libcst +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 + # via google-api-core +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via google-auth +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via pytest +typing-extensions==4.7.1 \ + --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ + --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 + # via + # libcst + # typing-inspect +typing-inspect==0.9.0 \ + --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ + --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 + # via libcst +urllib3==1.26.16 \ + --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ + --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 + # via + # google-auth + # requests + +# The following packages are considered to be unsafe in a requirements file: +setuptools==68.0.0 \ + --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ + --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 + # via -r requirements.in From 47c24ec6d986f6b8bbd390aace5ba71ea665a704 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 14 Aug 2023 19:00:51 +0200 Subject: [PATCH 1038/1339] chore(deps): update all dependencies (#1739) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 104 +++++++++++----------- 1 file changed, 52 insertions(+), 52 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 4d9e2607bb65..6b69ecc88291 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -93,9 +93,9 @@ click==8.1.6 \ --hash=sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd \ --hash=sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5 # via -r requirements.in -exceptiongroup==1.1.2 \ - --hash=sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5 \ - --hash=sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f +exceptiongroup==1.1.3 \ + --hash=sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9 \ + --hash=sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3 # via pytest google-api-core==2.11.1 \ --hash=sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a \ @@ -116,52 +116,52 @@ grpc-google-iam-v1==0.12.6 \ --hash=sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f \ --hash=sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c # via -r requirements.in -grpcio==1.56.2 \ - --hash=sha256:06e84ad9ae7668a109e970c7411e7992751a116494cba7c4fb877656527f9a57 \ - --hash=sha256:0ff789ae7d8ddd76d2ac02e7d13bfef6fc4928ac01e1dcaa182be51b6bcc0aaa \ - --hash=sha256:10954662f77dc36c9a1fb5cc4a537f746580d6b5734803be1e587252682cda8d \ - --hash=sha256:139f66656a762572ae718fa0d1f2dce47c05e9fbf7a16acd704c354405b97df9 \ - --hash=sha256:1c31e52a04e62c8577a7bf772b3e7bed4df9c9e0dd90f92b6ffa07c16cab63c9 \ - --hash=sha256:33971197c47965cc1d97d78d842163c283e998223b151bab0499b951fd2c0b12 \ - --hash=sha256:345356b307cce5d14355e8e055b4ca5f99bc857c33a3dc1ddbc544fca9cd0475 \ - --hash=sha256:373b48f210f43327a41e397391715cd11cfce9ded2fe76a5068f9bacf91cc226 \ - --hash=sha256:3ccb621749a81dc7755243665a70ce45536ec413ef5818e013fe8dfbf5aa497b \ - --hash=sha256:42a3bbb2bc07aef72a7d97e71aabecaf3e4eb616d39e5211e2cfe3689de860ca \ - --hash=sha256:42e63904ee37ae46aa23de50dac8b145b3596f43598fa33fe1098ab2cbda6ff5 \ - --hash=sha256:4eb37dd8dd1aa40d601212afa27ca5be255ba792e2e0b24d67b8af5e012cdb7d \ - --hash=sha256:51173e8fa6d9a2d85c14426bdee5f5c4a0654fd5fddcc21fe9d09ab0f6eb8b35 \ - --hash=sha256:5144feb20fe76e73e60c7d73ec3bf54f320247d1ebe737d10672480371878b48 \ - --hash=sha256:5344be476ac37eb9c9ad09c22f4ea193c1316bf074f1daf85bddb1b31fda5116 \ - --hash=sha256:6108e5933eb8c22cd3646e72d5b54772c29f57482fd4c41a0640aab99eb5071d \ - --hash=sha256:6a007a541dff984264981fbafeb052bfe361db63578948d857907df9488d8774 \ - --hash=sha256:6ee26e9dfb3996aff7c870f09dc7ad44a5f6732b8bdb5a5f9905737ac6fd4ef1 \ - --hash=sha256:750de923b456ca8c0f1354d6befca45d1f3b3a789e76efc16741bd4132752d95 \ - --hash=sha256:7c5ede2e2558f088c49a1ddda19080e4c23fb5d171de80a726b61b567e3766ed \ - --hash=sha256:830215173ad45d670140ff99aac3b461f9be9a6b11bee1a17265aaaa746a641a \ - --hash=sha256:8391cea5ce72f4a12368afd17799474015d5d3dc00c936a907eb7c7eaaea98a5 \ - --hash=sha256:8940d6de7068af018dfa9a959a3510e9b7b543f4c405e88463a1cbaa3b2b379a \ - --hash=sha256:89a49cc5ad08a38b6141af17e00d1dd482dc927c7605bc77af457b5a0fca807c \ - --hash=sha256:900bc0096c2ca2d53f2e5cebf98293a7c32f532c4aeb926345e9747452233950 \ - --hash=sha256:97e0efaebbfd222bcaac2f1735c010c1d3b167112d9d237daebbeedaaccf3d1d \ - --hash=sha256:9e04d4e4cfafa7c5264e535b5d28e786f0571bea609c3f0aaab13e891e933e9c \ - --hash=sha256:a4c60abd950d6de3e4f1ddbc318075654d275c29c846ab6a043d6ed2c52e4c8c \ - --hash=sha256:a6ff459dac39541e6a2763a4439c4ca6bc9ecb4acc05a99b79246751f9894756 \ - --hash=sha256:a72797549935c9e0b9bc1def1768c8b5a709538fa6ab0678e671aec47ebfd55e \ - --hash=sha256:af4063ef2b11b96d949dccbc5a987272f38d55c23c4c01841ea65a517906397f \ - --hash=sha256:b975b85d1d5efc36cf8b237c5f3849b64d1ba33d6282f5e991f28751317504a1 \ - --hash=sha256:bf0b9959e673505ee5869950642428046edb91f99942607c2ecf635f8a4b31c9 \ - --hash=sha256:c0c85c5cbe8b30a32fa6d802588d55ffabf720e985abe9590c7c886919d875d4 \ - --hash=sha256:c3f3237a57e42f79f1e560726576aedb3a7ef931f4e3accb84ebf6acc485d316 \ - --hash=sha256:c3fa3ab0fb200a2c66493828ed06ccd1a94b12eddbfb985e7fd3e5723ff156c6 \ - --hash=sha256:c435f5ce1705de48e08fcbcfaf8aee660d199c90536e3e06f2016af7d6a938dd \ - --hash=sha256:c90da4b124647547a68cf2f197174ada30c7bb9523cb976665dfd26a9963d328 \ - --hash=sha256:cbdf2c498e077282cd427cfd88bdce4668019791deef0be8155385ab2ba7837f \ - --hash=sha256:d1fbad1f9077372b6587ec589c1fc120b417b6c8ad72d3e3cc86bbbd0a3cee93 \ - --hash=sha256:d39f5d4af48c138cb146763eda14eb7d8b3ccbbec9fe86fb724cd16e0e914c64 \ - --hash=sha256:ddb4a6061933bd9332b74eac0da25f17f32afa7145a33a0f9711ad74f924b1b8 \ - --hash=sha256:ded637176addc1d3eef35331c39acc598bac550d213f0a1bedabfceaa2244c87 \ - --hash=sha256:f20fd21f7538f8107451156dd1fe203300b79a9ddceba1ee0ac8132521a008ed \ - --hash=sha256:fda2783c12f553cdca11c08e5af6eecbd717280dc8fbe28a110897af1c15a88c +grpcio==1.57.0 \ + --hash=sha256:00258cbe3f5188629828363ae8ff78477ce976a6f63fb2bb5e90088396faa82e \ + --hash=sha256:092fa155b945015754bdf988be47793c377b52b88d546e45c6a9f9579ac7f7b6 \ + --hash=sha256:0f80bf37f09e1caba6a8063e56e2b87fa335add314cf2b78ebf7cb45aa7e3d06 \ + --hash=sha256:20ec6fc4ad47d1b6e12deec5045ec3cd5402d9a1597f738263e98f490fe07056 \ + --hash=sha256:2313b124e475aa9017a9844bdc5eafb2d5abdda9d456af16fc4535408c7d6da6 \ + --hash=sha256:23e7d8849a0e58b806253fd206ac105b328171e01b8f18c7d5922274958cc87e \ + --hash=sha256:2f708a6a17868ad8bf586598bee69abded4996b18adf26fd2d91191383b79019 \ + --hash=sha256:2f7349786da979a94690cc5c2b804cab4e8774a3cf59be40d037c4342c906649 \ + --hash=sha256:34950353539e7d93f61c6796a007c705d663f3be41166358e3d88c45760c7d98 \ + --hash=sha256:40b72effd4c789de94ce1be2b5f88d7b9b5f7379fe9645f198854112a6567d9a \ + --hash=sha256:4b089f7ad1eb00a104078bab8015b0ed0ebcb3b589e527ab009c53893fd4e613 \ + --hash=sha256:4faea2cfdf762a664ab90589b66f416274887641ae17817de510b8178356bf73 \ + --hash=sha256:5371bcd861e679d63b8274f73ac281751d34bd54eccdbfcd6aa00e692a82cd7b \ + --hash=sha256:5613a2fecc82f95d6c51d15b9a72705553aa0d7c932fad7aed7afb51dc982ee5 \ + --hash=sha256:57b183e8b252825c4dd29114d6c13559be95387aafc10a7be645462a0fc98bbb \ + --hash=sha256:5b7a4ce8f862fe32b2a10b57752cf3169f5fe2915acfe7e6a1e155db3da99e79 \ + --hash=sha256:5e5b58e32ae14658085c16986d11e99abd002ddbf51c8daae8a0671fffb3467f \ + --hash=sha256:60fe15288a0a65d5c1cb5b4a62b1850d07336e3ba728257a810317be14f0c527 \ + --hash=sha256:6907b1cf8bb29b058081d2aad677b15757a44ef2d4d8d9130271d2ad5e33efca \ + --hash=sha256:76c44efa4ede1f42a9d5b2fed1fe9377e73a109bef8675fb0728eb80b0b8e8f2 \ + --hash=sha256:7a635589201b18510ff988161b7b573f50c6a48fae9cb567657920ca82022b37 \ + --hash=sha256:7b400807fa749a9eb286e2cd893e501b110b4d356a218426cb9c825a0474ca56 \ + --hash=sha256:82640e57fb86ea1d71ea9ab54f7e942502cf98a429a200b2e743d8672171734f \ + --hash=sha256:871f9999e0211f9551f368612460442a5436d9444606184652117d6a688c9f51 \ + --hash=sha256:9338bacf172e942e62e5889b6364e56657fbf8ac68062e8b25c48843e7b202bb \ + --hash=sha256:a8a8e560e8dbbdf29288872e91efd22af71e88b0e5736b0daf7773c1fecd99f0 \ + --hash=sha256:aed90d93b731929e742967e236f842a4a2174dc5db077c8f9ad2c5996f89f63e \ + --hash=sha256:b363bbb5253e5f9c23d8a0a034dfdf1b7c9e7f12e602fc788c435171e96daccc \ + --hash=sha256:b4098b6b638d9e0ca839a81656a2fd4bc26c9486ea707e8b1437d6f9d61c3941 \ + --hash=sha256:b53333627283e7241fcc217323f225c37783b5f0472316edcaa4479a213abfa6 \ + --hash=sha256:b670c2faa92124b7397b42303e4d8eb64a4cd0b7a77e35a9e865a55d61c57ef9 \ + --hash=sha256:bb396952cfa7ad2f01061fbc7dc1ad91dd9d69243bcb8110cf4e36924785a0fe \ + --hash=sha256:c60b83c43faeb6d0a9831f0351d7787a0753f5087cc6fa218d78fdf38e5acef0 \ + --hash=sha256:c6ebecfb7a31385393203eb04ed8b6a08f5002f53df3d59e5e795edb80999652 \ + --hash=sha256:d78d8b86fcdfa1e4c21f8896614b6cc7ee01a2a758ec0c4382d662f2a62cf766 \ + --hash=sha256:d7f8df114d6b4cf5a916b98389aeaf1e3132035420a88beea4e3d977e5f267a5 \ + --hash=sha256:e1cb52fa2d67d7f7fab310b600f22ce1ff04d562d46e9e0ac3e3403c2bb4cc16 \ + --hash=sha256:e3fdf04e402f12e1de8074458549337febb3b45f21076cc02ef4ff786aff687e \ + --hash=sha256:e503cb45ed12b924b5b988ba9576dc9949b2f5283b8e33b21dcb6be74a7c58d0 \ + --hash=sha256:f19ac6ac0a256cf77d3cc926ef0b4e64a9725cc612f97228cd5dc4bd9dbab03b \ + --hash=sha256:f1fb0fd4a1e9b11ac21c30c169d169ef434c6e9344ee0ab27cfa6f605f6387b2 \ + --hash=sha256:fada6b07ec4f0befe05218181f4b85176f11d531911b64c715d1875c4736d73a \ + --hash=sha256:fd173b4cf02b20f60860dc2ffe30115c18972d7d6d2d69df97ac38dee03be5bf \ + --hash=sha256:fe752639919aad9ffb0dee0d87f29a6467d1ef764f13c4644d212a9a853a078d \ + --hash=sha256:fee387d2fab144e8a34e0e9c5ca0f45c9376b99de45628265cfa9886b1dbe62b # via # googleapis-common-protos # grpc-google-iam-v1 @@ -395,9 +395,9 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==1.26.16 \ - --hash=sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f \ - --hash=sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14 +urllib3==2.0.4 \ + --hash=sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11 \ + --hash=sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4 # via # google-auth # requests From d1a4e5b823675d78eee7264bb7dc161e43e8b843 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 14 Aug 2023 13:07:01 -0400 Subject: [PATCH 1039/1339] build: Update googleapis version (#1736) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/BUILD.bazel | 3 +- packages/gapic-generator/repositories.bzl | 6 +- .../rules_python_gapic/BUILD.bazel | 5 +- .../tests/integration/BUILD.bazel | 14 +- .../asset/google/cloud/asset/__init__.py | 72 + .../asset/google/cloud/asset_v1/__init__.py | 72 + .../google/cloud/asset_v1/gapic_metadata.json | 165 + .../services/asset_service/async_client.py | 1465 ++- .../asset_v1/services/asset_service/client.py | 1513 +++- .../asset_v1/services/asset_service/pagers.py | 484 + .../services/asset_service/transports/base.py | 154 + .../services/asset_service/transports/grpc.py | 349 +- .../asset_service/transports/grpc_asyncio.py | 349 +- .../services/asset_service/transports/rest.py | 1495 +++- .../google/cloud/asset_v1/types/__init__.py | 72 + .../cloud/asset_v1/types/asset_service.py | 2397 ++++- .../google/cloud/asset_v1/types/assets.py | 525 +- ...erated_asset_service_analyze_move_async.py | 53 + ...nerated_asset_service_analyze_move_sync.py | 53 + ...sset_service_analyze_org_policies_async.py | 54 + ...asset_service_analyze_org_policies_sync.py | 54 + ...nalyze_org_policy_governed_assets_async.py | 54 + ...analyze_org_policy_governed_assets_sync.py | 54 + ...ze_org_policy_governed_containers_async.py | 54 + ...yze_org_policy_governed_containers_sync.py | 54 + ..._batch_get_effective_iam_policies_async.py | 53 + ...e_batch_get_effective_iam_policies_sync.py | 53 + ..._asset_service_create_saved_query_async.py | 53 + ...d_asset_service_create_saved_query_sync.py | 53 + ..._asset_service_delete_saved_query_async.py | 50 + ...d_asset_service_delete_saved_query_sync.py | 50 + ...ted_asset_service_get_saved_query_async.py | 52 + ...ated_asset_service_get_saved_query_sync.py | 52 + ..._asset_service_list_saved_queries_async.py | 53 + ...d_asset_service_list_saved_queries_sync.py | 53 + ...erated_asset_service_query_assets_async.py | 53 + ...nerated_asset_service_query_assets_sync.py | 53 + ..._asset_service_update_saved_query_async.py | 51 + ...d_asset_service_update_saved_query_sync.py | 51 + ...nippet_metadata_google.cloud.asset.v1.json | 2231 ++++- .../asset/scripts/fixup_asset_v1_keywords.py | 23 +- .../unit/gapic/asset_v1/test_asset_service.py | 7427 ++++++++++++++-- .../google/cloud/eventarc/__init__.py | 48 + .../google/cloud/eventarc_v1/__init__.py | 48 + .../cloud/eventarc_v1/gapic_metadata.json | 195 + .../services/eventarc/async_client.py | 1574 +++- .../eventarc_v1/services/eventarc/client.py | 1637 +++- .../eventarc_v1/services/eventarc/pagers.py | 366 + .../services/eventarc/transports/base.py | 187 + .../services/eventarc/transports/grpc.py | 345 + .../eventarc/transports/grpc_asyncio.py | 345 + .../services/eventarc/transports/rest.py | 1739 +++- .../cloud/eventarc_v1/types/__init__.py | 56 + .../google/cloud/eventarc_v1/types/channel.py | 160 + .../eventarc_v1/types/channel_connection.py | 92 + .../cloud/eventarc_v1/types/discovery.py | 149 + .../cloud/eventarc_v1/types/eventarc.py | 501 +- .../types/google_channel_config.py | 70 + .../google/cloud/eventarc_v1/types/trigger.py | 202 +- ...generated_eventarc_create_channel_async.py | 63 + ...ventarc_create_channel_connection_async.py | 62 + ...eventarc_create_channel_connection_sync.py | 62 + ..._generated_eventarc_create_channel_sync.py | 63 + ...generated_eventarc_delete_channel_async.py | 57 + ...ventarc_delete_channel_connection_async.py | 56 + ...eventarc_delete_channel_connection_sync.py | 56 + ..._generated_eventarc_delete_channel_sync.py | 57 + ...v1_generated_eventarc_get_channel_async.py | 52 + ...d_eventarc_get_channel_connection_async.py | 52 + ...ed_eventarc_get_channel_connection_sync.py | 52 + ..._v1_generated_eventarc_get_channel_sync.py | 52 + ...ventarc_get_google_channel_config_async.py | 52 + ...eventarc_get_google_channel_config_sync.py | 52 + ...1_generated_eventarc_get_provider_async.py | 52 + ...v1_generated_eventarc_get_provider_sync.py | 52 + ...eventarc_list_channel_connections_async.py | 53 + ..._eventarc_list_channel_connections_sync.py | 53 + ..._generated_eventarc_list_channels_async.py | 53 + ...1_generated_eventarc_list_channels_sync.py | 53 + ...generated_eventarc_list_providers_async.py | 53 + ..._generated_eventarc_list_providers_sync.py | 53 + ...generated_eventarc_update_channel_async.py | 56 + ..._generated_eventarc_update_channel_sync.py | 56 + ...tarc_update_google_channel_config_async.py | 55 + ...ntarc_update_google_channel_config_sync.py | 55 + ...pet_metadata_google.cloud.eventarc.v1.json | 2429 ++++- .../scripts/fixup_eventarc_v1_keywords.py | 15 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 7872 ++++++++++++++++- .../logging/google/cloud/logging/__init__.py | 40 + .../google/cloud/logging_v2/__init__.py | 40 + .../cloud/logging_v2/gapic_metadata.json | 263 +- .../config_service_v2/async_client.py | 1858 +++- .../services/config_service_v2/client.py | 1856 +++- .../services/config_service_v2/pagers.py | 121 + .../config_service_v2/transports/__init__.py | 5 - .../config_service_v2/transports/base.py | 133 + .../config_service_v2/transports/grpc.py | 391 +- .../transports/grpc_asyncio.py | 391 +- .../config_service_v2/transports/rest.py | 3141 ------- .../logging_service_v2/async_client.py | 100 +- .../services/logging_service_v2/client.py | 105 +- .../logging_service_v2/transports/__init__.py | 5 - .../logging_service_v2/transports/grpc.py | 10 +- .../transports/grpc_asyncio.py | 10 +- .../logging_service_v2/transports/rest.py | 769 -- .../services/metrics_service_v2/client.py | 5 - .../metrics_service_v2/transports/__init__.py | 5 - .../metrics_service_v2/transports/rest.py | 756 -- .../google/cloud/logging_v2/types/__init__.py | 40 + .../cloud/logging_v2/types/log_entry.py | 125 +- .../google/cloud/logging_v2/types/logging.py | 169 +- .../cloud/logging_v2/types/logging_config.py | 1178 ++- .../cloud/logging_v2/types/logging_metrics.py | 38 +- ...onfig_service_v2_copy_log_entries_async.py | 57 + ...config_service_v2_copy_log_entries_sync.py | 57 + ...ig_service_v2_create_bucket_async_async.py | 57 + ...fig_service_v2_create_bucket_async_sync.py | 57 + ...ted_config_service_v2_create_link_async.py | 57 + ...ated_config_service_v2_create_link_sync.py | 57 + ...ted_config_service_v2_delete_link_async.py | 56 + ...ated_config_service_v2_delete_link_sync.py | 56 + ...erated_config_service_v2_get_link_async.py | 52 + ...nerated_config_service_v2_get_link_sync.py | 52 + ...ed_config_service_v2_get_settings_async.py | 52 + ...ted_config_service_v2_get_settings_sync.py | 52 + ...ated_config_service_v2_list_links_async.py | 53 + ...rated_config_service_v2_list_links_sync.py | 53 + ...ig_service_v2_update_bucket_async_async.py | 56 + ...fig_service_v2_update_bucket_async_sync.py | 56 + ...config_service_v2_update_settings_async.py | 52 + ..._config_service_v2_update_settings_sync.py | 52 + .../snippet_metadata_google.logging.v2.json | 2451 +++-- .../scripts/fixup_logging_v2_keywords.py | 11 +- .../logging_v2/test_config_service_v2.py | 7843 +++++----------- .../logging_v2/test_logging_service_v2.py | 1278 --- .../logging_v2/test_metrics_service_v2.py | 1334 +-- .../redis/google/cloud/redis/__init__.py | 18 + .../redis/google/cloud/redis_v1/__init__.py | 18 + .../google/cloud/redis_v1/gapic_metadata.json | 30 + .../services/cloud_redis/async_client.py | 272 +- .../redis_v1/services/cloud_redis/client.py | 272 +- .../services/cloud_redis/transports/base.py | 28 + .../services/cloud_redis/transports/grpc.py | 58 +- .../cloud_redis/transports/grpc_asyncio.py | 58 +- .../services/cloud_redis/transports/rest.py | 230 +- .../google/cloud/redis_v1/types/__init__.py | 18 + .../cloud/redis_v1/types/cloud_redis.py | 617 +- ...ud_redis_get_instance_auth_string_async.py | 52 + ...oud_redis_get_instance_auth_string_sync.py | 52 + ...loud_redis_reschedule_maintenance_async.py | 57 + ...cloud_redis_reschedule_maintenance_sync.py | 57 + ...nippet_metadata_google.cloud.redis.v1.json | 338 + .../redis/scripts/fixup_redis_v1_keywords.py | 2 + .../unit/gapic/redis_v1/test_cloud_redis.py | 1202 ++- 154 files changed, 51425 insertions(+), 16852 deletions(-) create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 8d98b4264d93..7dab7fa25386 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -65,6 +65,5 @@ py_binary( requirement("pypandoc"), requirement("PyYAML"), requirement("grpc-google-iam-v1"), - ] + ], ) - diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 0c8394275dfe..b79bc62f98ee 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -60,12 +60,12 @@ def gapic_generator_python(): strip_prefix = "rules_gapic-%s" % _rules_gapic_version, urls = ["https://github.com/googleapis/rules_gapic/archive/v%s.tar.gz" % _rules_gapic_version], ) - + _commit_sha = "fae3e6e091418d6343902debaf545cfc8f32c3ff" _maybe( http_archive, name = "com_google_googleapis", - strip_prefix = "googleapis-ffc531383747ebb702dad3db237ef5fdea796363", - urls = ["https://github.com/googleapis/googleapis/archive/ffc531383747ebb702dad3db237ef5fdea796363.zip"], + strip_prefix = "googleapis-{}".format(_commit_sha), + urls = ["https://github.com/googleapis/googleapis/archive/{}.zip".format(_commit_sha)], ) def gapic_generator_register_toolchains(): diff --git a/packages/gapic-generator/rules_python_gapic/BUILD.bazel b/packages/gapic-generator/rules_python_gapic/BUILD.bazel index 41f2ac97c6df..9c6e7a3d618a 100644 --- a/packages/gapic-generator/rules_python_gapic/BUILD.bazel +++ b/packages/gapic-generator/rules_python_gapic/BUILD.bazel @@ -1 +1,4 @@ -exports_files(["test.py", "pytest.py"]) +exports_files([ + "test.py", + "pytest.py", +]) diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 347510d83311..d9ce330dc650 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -2,10 +2,6 @@ load( "@gapic_generator_python//rules_python_gapic:py_gapic.bzl", "py_gapic_library", ) -load( - "@gapic_generator_python//rules_python_gapic:py_gapic_pkg.bzl", - "py_gapic_assembly_pkg", -) load( "@gapic_generator_python//rules_python_gapic/test:integration_test.bzl", "golden_update", @@ -16,8 +12,6 @@ load( "py_proto_library", ) -load("@rules_proto//proto:defs.bzl", "proto_library") - package(default_visibility = ["//visibility:public"]) #################################################### @@ -103,7 +97,7 @@ py_gapic_library( ], service_yaml = "eventarc_v1.yaml", transport = "grpc+rest", - deps = [":iam_policy_py_proto"] + deps = [":iam_policy_py_proto"], ) py_test( @@ -128,7 +122,11 @@ py_gapic_library( "python-gapic-name=logging", "autogen-snippets", ], - transport = "grpc+rest", + # REST is not generated because of the following issues: + # - REST unit test `test_update_settings_rest_flattened` in logging_v2 fails. See #1728 + # - REST is not generated in the public `BUILD.bazel` + # https://github.com/googleapis/googleapis/blob/e85662e798a0a9495a035839f66d0c037c481e2c/google/logging/v2/BUILD.bazel#L201 + transport = "grpc", ) py_test( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index f8aff0bc41f5..366ca0861def 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -21,15 +21,30 @@ from google.cloud.asset_v1.services.asset_service.client import AssetServiceClient from google.cloud.asset_v1.services.asset_service.async_client import AssetServiceAsyncClient +from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningMetadata from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningRequest from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyLongrunningResponse from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyRequest from google.cloud.asset_v1.types.asset_service import AnalyzeIamPolicyResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeMoveRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeMoveResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPoliciesResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest +from google.cloud.asset_v1.types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse +from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicy +from google.cloud.asset_v1.types.asset_service import AnalyzerOrgPolicyConstraint from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryRequest from google.cloud.asset_v1.types.asset_service import BatchGetAssetsHistoryResponse +from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesRequest +from google.cloud.asset_v1.types.asset_service import BatchGetEffectiveIamPoliciesResponse from google.cloud.asset_v1.types.asset_service import BigQueryDestination from google.cloud.asset_v1.types.asset_service import CreateFeedRequest +from google.cloud.asset_v1.types.asset_service import CreateSavedQueryRequest from google.cloud.asset_v1.types.asset_service import DeleteFeedRequest +from google.cloud.asset_v1.types.asset_service import DeleteSavedQueryRequest from google.cloud.asset_v1.types.asset_service import ExportAssetsRequest from google.cloud.asset_v1.types.asset_service import ExportAssetsResponse from google.cloud.asset_v1.types.asset_service import Feed @@ -37,43 +52,79 @@ from google.cloud.asset_v1.types.asset_service import GcsDestination from google.cloud.asset_v1.types.asset_service import GcsOutputResult from google.cloud.asset_v1.types.asset_service import GetFeedRequest +from google.cloud.asset_v1.types.asset_service import GetSavedQueryRequest from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisOutputConfig from google.cloud.asset_v1.types.asset_service import IamPolicyAnalysisQuery from google.cloud.asset_v1.types.asset_service import ListAssetsRequest from google.cloud.asset_v1.types.asset_service import ListAssetsResponse from google.cloud.asset_v1.types.asset_service import ListFeedsRequest from google.cloud.asset_v1.types.asset_service import ListFeedsResponse +from google.cloud.asset_v1.types.asset_service import ListSavedQueriesRequest +from google.cloud.asset_v1.types.asset_service import ListSavedQueriesResponse +from google.cloud.asset_v1.types.asset_service import MoveAnalysis +from google.cloud.asset_v1.types.asset_service import MoveAnalysisResult +from google.cloud.asset_v1.types.asset_service import MoveImpact from google.cloud.asset_v1.types.asset_service import OutputConfig from google.cloud.asset_v1.types.asset_service import OutputResult from google.cloud.asset_v1.types.asset_service import PartitionSpec from google.cloud.asset_v1.types.asset_service import PubsubDestination +from google.cloud.asset_v1.types.asset_service import QueryAssetsOutputConfig +from google.cloud.asset_v1.types.asset_service import QueryAssetsRequest +from google.cloud.asset_v1.types.asset_service import QueryAssetsResponse +from google.cloud.asset_v1.types.asset_service import QueryResult +from google.cloud.asset_v1.types.asset_service import SavedQuery from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesRequest from google.cloud.asset_v1.types.asset_service import SearchAllIamPoliciesResponse from google.cloud.asset_v1.types.asset_service import SearchAllResourcesRequest from google.cloud.asset_v1.types.asset_service import SearchAllResourcesResponse +from google.cloud.asset_v1.types.asset_service import TableFieldSchema +from google.cloud.asset_v1.types.asset_service import TableSchema from google.cloud.asset_v1.types.asset_service import UpdateFeedRequest +from google.cloud.asset_v1.types.asset_service import UpdateSavedQueryRequest from google.cloud.asset_v1.types.asset_service import ContentType from google.cloud.asset_v1.types.assets import Asset +from google.cloud.asset_v1.types.assets import AttachedResource from google.cloud.asset_v1.types.assets import ConditionEvaluation from google.cloud.asset_v1.types.assets import IamPolicyAnalysisResult from google.cloud.asset_v1.types.assets import IamPolicyAnalysisState from google.cloud.asset_v1.types.assets import IamPolicySearchResult +from google.cloud.asset_v1.types.assets import RelatedAsset +from google.cloud.asset_v1.types.assets import RelatedAssets +from google.cloud.asset_v1.types.assets import RelatedResource +from google.cloud.asset_v1.types.assets import RelatedResources +from google.cloud.asset_v1.types.assets import RelationshipAttributes from google.cloud.asset_v1.types.assets import Resource from google.cloud.asset_v1.types.assets import ResourceSearchResult from google.cloud.asset_v1.types.assets import TemporalAsset from google.cloud.asset_v1.types.assets import TimeWindow +from google.cloud.asset_v1.types.assets import VersionedResource __all__ = ('AssetServiceClient', 'AssetServiceAsyncClient', + 'AnalyzeIamPolicyLongrunningMetadata', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', 'AnalyzeIamPolicyRequest', 'AnalyzeIamPolicyResponse', + 'AnalyzeMoveRequest', + 'AnalyzeMoveResponse', + 'AnalyzeOrgPoliciesRequest', + 'AnalyzeOrgPoliciesResponse', + 'AnalyzeOrgPolicyGovernedAssetsRequest', + 'AnalyzeOrgPolicyGovernedAssetsResponse', + 'AnalyzeOrgPolicyGovernedContainersRequest', + 'AnalyzeOrgPolicyGovernedContainersResponse', + 'AnalyzerOrgPolicy', + 'AnalyzerOrgPolicyConstraint', 'BatchGetAssetsHistoryRequest', 'BatchGetAssetsHistoryResponse', + 'BatchGetEffectiveIamPoliciesRequest', + 'BatchGetEffectiveIamPoliciesResponse', 'BigQueryDestination', 'CreateFeedRequest', + 'CreateSavedQueryRequest', 'DeleteFeedRequest', + 'DeleteSavedQueryRequest', 'ExportAssetsRequest', 'ExportAssetsResponse', 'Feed', @@ -81,29 +132,50 @@ 'GcsDestination', 'GcsOutputResult', 'GetFeedRequest', + 'GetSavedQueryRequest', 'IamPolicyAnalysisOutputConfig', 'IamPolicyAnalysisQuery', 'ListAssetsRequest', 'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', + 'ListSavedQueriesRequest', + 'ListSavedQueriesResponse', + 'MoveAnalysis', + 'MoveAnalysisResult', + 'MoveImpact', 'OutputConfig', 'OutputResult', 'PartitionSpec', 'PubsubDestination', + 'QueryAssetsOutputConfig', + 'QueryAssetsRequest', + 'QueryAssetsResponse', + 'QueryResult', + 'SavedQuery', 'SearchAllIamPoliciesRequest', 'SearchAllIamPoliciesResponse', 'SearchAllResourcesRequest', 'SearchAllResourcesResponse', + 'TableFieldSchema', + 'TableSchema', 'UpdateFeedRequest', + 'UpdateSavedQueryRequest', 'ContentType', 'Asset', + 'AttachedResource', 'ConditionEvaluation', 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', + 'RelatedAsset', + 'RelatedAssets', + 'RelatedResource', + 'RelatedResources', + 'RelationshipAttributes', 'Resource', 'ResourceSearchResult', 'TemporalAsset', 'TimeWindow', + 'VersionedResource', ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 78280c4b82b8..8fbe3ed54f62 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -21,15 +21,30 @@ from .services.asset_service import AssetServiceClient from .services.asset_service import AssetServiceAsyncClient +from .types.asset_service import AnalyzeIamPolicyLongrunningMetadata from .types.asset_service import AnalyzeIamPolicyLongrunningRequest from .types.asset_service import AnalyzeIamPolicyLongrunningResponse from .types.asset_service import AnalyzeIamPolicyRequest from .types.asset_service import AnalyzeIamPolicyResponse +from .types.asset_service import AnalyzeMoveRequest +from .types.asset_service import AnalyzeMoveResponse +from .types.asset_service import AnalyzeOrgPoliciesRequest +from .types.asset_service import AnalyzeOrgPoliciesResponse +from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsRequest +from .types.asset_service import AnalyzeOrgPolicyGovernedAssetsResponse +from .types.asset_service import AnalyzeOrgPolicyGovernedContainersRequest +from .types.asset_service import AnalyzeOrgPolicyGovernedContainersResponse +from .types.asset_service import AnalyzerOrgPolicy +from .types.asset_service import AnalyzerOrgPolicyConstraint from .types.asset_service import BatchGetAssetsHistoryRequest from .types.asset_service import BatchGetAssetsHistoryResponse +from .types.asset_service import BatchGetEffectiveIamPoliciesRequest +from .types.asset_service import BatchGetEffectiveIamPoliciesResponse from .types.asset_service import BigQueryDestination from .types.asset_service import CreateFeedRequest +from .types.asset_service import CreateSavedQueryRequest from .types.asset_service import DeleteFeedRequest +from .types.asset_service import DeleteSavedQueryRequest from .types.asset_service import ExportAssetsRequest from .types.asset_service import ExportAssetsResponse from .types.asset_service import Feed @@ -37,47 +52,84 @@ from .types.asset_service import GcsDestination from .types.asset_service import GcsOutputResult from .types.asset_service import GetFeedRequest +from .types.asset_service import GetSavedQueryRequest from .types.asset_service import IamPolicyAnalysisOutputConfig from .types.asset_service import IamPolicyAnalysisQuery from .types.asset_service import ListAssetsRequest from .types.asset_service import ListAssetsResponse from .types.asset_service import ListFeedsRequest from .types.asset_service import ListFeedsResponse +from .types.asset_service import ListSavedQueriesRequest +from .types.asset_service import ListSavedQueriesResponse +from .types.asset_service import MoveAnalysis +from .types.asset_service import MoveAnalysisResult +from .types.asset_service import MoveImpact from .types.asset_service import OutputConfig from .types.asset_service import OutputResult from .types.asset_service import PartitionSpec from .types.asset_service import PubsubDestination +from .types.asset_service import QueryAssetsOutputConfig +from .types.asset_service import QueryAssetsRequest +from .types.asset_service import QueryAssetsResponse +from .types.asset_service import QueryResult +from .types.asset_service import SavedQuery from .types.asset_service import SearchAllIamPoliciesRequest from .types.asset_service import SearchAllIamPoliciesResponse from .types.asset_service import SearchAllResourcesRequest from .types.asset_service import SearchAllResourcesResponse +from .types.asset_service import TableFieldSchema +from .types.asset_service import TableSchema from .types.asset_service import UpdateFeedRequest +from .types.asset_service import UpdateSavedQueryRequest from .types.asset_service import ContentType from .types.assets import Asset +from .types.assets import AttachedResource from .types.assets import ConditionEvaluation from .types.assets import IamPolicyAnalysisResult from .types.assets import IamPolicyAnalysisState from .types.assets import IamPolicySearchResult +from .types.assets import RelatedAsset +from .types.assets import RelatedAssets +from .types.assets import RelatedResource +from .types.assets import RelatedResources +from .types.assets import RelationshipAttributes from .types.assets import Resource from .types.assets import ResourceSearchResult from .types.assets import TemporalAsset from .types.assets import TimeWindow +from .types.assets import VersionedResource __all__ = ( 'AssetServiceAsyncClient', +'AnalyzeIamPolicyLongrunningMetadata', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', 'AnalyzeIamPolicyRequest', 'AnalyzeIamPolicyResponse', +'AnalyzeMoveRequest', +'AnalyzeMoveResponse', +'AnalyzeOrgPoliciesRequest', +'AnalyzeOrgPoliciesResponse', +'AnalyzeOrgPolicyGovernedAssetsRequest', +'AnalyzeOrgPolicyGovernedAssetsResponse', +'AnalyzeOrgPolicyGovernedContainersRequest', +'AnalyzeOrgPolicyGovernedContainersResponse', +'AnalyzerOrgPolicy', +'AnalyzerOrgPolicyConstraint', 'Asset', 'AssetServiceClient', +'AttachedResource', 'BatchGetAssetsHistoryRequest', 'BatchGetAssetsHistoryResponse', +'BatchGetEffectiveIamPoliciesRequest', +'BatchGetEffectiveIamPoliciesResponse', 'BigQueryDestination', 'ConditionEvaluation', 'ContentType', 'CreateFeedRequest', +'CreateSavedQueryRequest', 'DeleteFeedRequest', +'DeleteSavedQueryRequest', 'ExportAssetsRequest', 'ExportAssetsResponse', 'Feed', @@ -85,6 +137,7 @@ 'GcsDestination', 'GcsOutputResult', 'GetFeedRequest', +'GetSavedQueryRequest', 'IamPolicyAnalysisOutputConfig', 'IamPolicyAnalysisQuery', 'IamPolicyAnalysisResult', @@ -94,17 +147,36 @@ 'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', +'ListSavedQueriesRequest', +'ListSavedQueriesResponse', +'MoveAnalysis', +'MoveAnalysisResult', +'MoveImpact', 'OutputConfig', 'OutputResult', 'PartitionSpec', 'PubsubDestination', +'QueryAssetsOutputConfig', +'QueryAssetsRequest', +'QueryAssetsResponse', +'QueryResult', +'RelatedAsset', +'RelatedAssets', +'RelatedResource', +'RelatedResources', +'RelationshipAttributes', 'Resource', 'ResourceSearchResult', +'SavedQuery', 'SearchAllIamPoliciesRequest', 'SearchAllIamPoliciesResponse', 'SearchAllResourcesRequest', 'SearchAllResourcesResponse', +'TableFieldSchema', +'TableSchema', 'TemporalAsset', 'TimeWindow', 'UpdateFeedRequest', +'UpdateSavedQueryRequest', +'VersionedResource', ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json index c87ac115e961..e39e5043139f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_metadata.json @@ -20,21 +20,56 @@ "analyze_iam_policy_longrunning" ] }, + "AnalyzeMove": { + "methods": [ + "analyze_move" + ] + }, + "AnalyzeOrgPolicies": { + "methods": [ + "analyze_org_policies" + ] + }, + "AnalyzeOrgPolicyGovernedAssets": { + "methods": [ + "analyze_org_policy_governed_assets" + ] + }, + "AnalyzeOrgPolicyGovernedContainers": { + "methods": [ + "analyze_org_policy_governed_containers" + ] + }, "BatchGetAssetsHistory": { "methods": [ "batch_get_assets_history" ] }, + "BatchGetEffectiveIamPolicies": { + "methods": [ + "batch_get_effective_iam_policies" + ] + }, "CreateFeed": { "methods": [ "create_feed" ] }, + "CreateSavedQuery": { + "methods": [ + "create_saved_query" + ] + }, "DeleteFeed": { "methods": [ "delete_feed" ] }, + "DeleteSavedQuery": { + "methods": [ + "delete_saved_query" + ] + }, "ExportAssets": { "methods": [ "export_assets" @@ -45,6 +80,11 @@ "get_feed" ] }, + "GetSavedQuery": { + "methods": [ + "get_saved_query" + ] + }, "ListAssets": { "methods": [ "list_assets" @@ -55,6 +95,16 @@ "list_feeds" ] }, + "ListSavedQueries": { + "methods": [ + "list_saved_queries" + ] + }, + "QueryAssets": { + "methods": [ + "query_assets" + ] + }, "SearchAllIamPolicies": { "methods": [ "search_all_iam_policies" @@ -69,6 +119,11 @@ "methods": [ "update_feed" ] + }, + "UpdateSavedQuery": { + "methods": [ + "update_saved_query" + ] } } }, @@ -85,21 +140,56 @@ "analyze_iam_policy_longrunning" ] }, + "AnalyzeMove": { + "methods": [ + "analyze_move" + ] + }, + "AnalyzeOrgPolicies": { + "methods": [ + "analyze_org_policies" + ] + }, + "AnalyzeOrgPolicyGovernedAssets": { + "methods": [ + "analyze_org_policy_governed_assets" + ] + }, + "AnalyzeOrgPolicyGovernedContainers": { + "methods": [ + "analyze_org_policy_governed_containers" + ] + }, "BatchGetAssetsHistory": { "methods": [ "batch_get_assets_history" ] }, + "BatchGetEffectiveIamPolicies": { + "methods": [ + "batch_get_effective_iam_policies" + ] + }, "CreateFeed": { "methods": [ "create_feed" ] }, + "CreateSavedQuery": { + "methods": [ + "create_saved_query" + ] + }, "DeleteFeed": { "methods": [ "delete_feed" ] }, + "DeleteSavedQuery": { + "methods": [ + "delete_saved_query" + ] + }, "ExportAssets": { "methods": [ "export_assets" @@ -110,6 +200,11 @@ "get_feed" ] }, + "GetSavedQuery": { + "methods": [ + "get_saved_query" + ] + }, "ListAssets": { "methods": [ "list_assets" @@ -120,6 +215,16 @@ "list_feeds" ] }, + "ListSavedQueries": { + "methods": [ + "list_saved_queries" + ] + }, + "QueryAssets": { + "methods": [ + "query_assets" + ] + }, "SearchAllIamPolicies": { "methods": [ "search_all_iam_policies" @@ -134,6 +239,11 @@ "methods": [ "update_feed" ] + }, + "UpdateSavedQuery": { + "methods": [ + "update_saved_query" + ] } } }, @@ -150,21 +260,56 @@ "analyze_iam_policy_longrunning" ] }, + "AnalyzeMove": { + "methods": [ + "analyze_move" + ] + }, + "AnalyzeOrgPolicies": { + "methods": [ + "analyze_org_policies" + ] + }, + "AnalyzeOrgPolicyGovernedAssets": { + "methods": [ + "analyze_org_policy_governed_assets" + ] + }, + "AnalyzeOrgPolicyGovernedContainers": { + "methods": [ + "analyze_org_policy_governed_containers" + ] + }, "BatchGetAssetsHistory": { "methods": [ "batch_get_assets_history" ] }, + "BatchGetEffectiveIamPolicies": { + "methods": [ + "batch_get_effective_iam_policies" + ] + }, "CreateFeed": { "methods": [ "create_feed" ] }, + "CreateSavedQuery": { + "methods": [ + "create_saved_query" + ] + }, "DeleteFeed": { "methods": [ "delete_feed" ] }, + "DeleteSavedQuery": { + "methods": [ + "delete_saved_query" + ] + }, "ExportAssets": { "methods": [ "export_assets" @@ -175,6 +320,11 @@ "get_feed" ] }, + "GetSavedQuery": { + "methods": [ + "get_saved_query" + ] + }, "ListAssets": { "methods": [ "list_assets" @@ -185,6 +335,16 @@ "list_feeds" ] }, + "ListSavedQueries": { + "methods": [ + "list_saved_queries" + ] + }, + "QueryAssets": { + "methods": [ + "query_assets" + ] + }, "SearchAllIamPolicies": { "methods": [ "search_all_iam_policies" @@ -199,6 +359,11 @@ "methods": [ "update_feed" ] + }, + "UpdateSavedQuery": { + "methods": [ + "update_saved_query" + ] } } } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 9d0193c98c4f..113bbdea7682 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -37,6 +37,9 @@ from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport @@ -51,10 +54,20 @@ class AssetServiceAsyncClient: DEFAULT_ENDPOINT = AssetServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = AssetServiceClient.DEFAULT_MTLS_ENDPOINT + access_level_path = staticmethod(AssetServiceClient.access_level_path) + parse_access_level_path = staticmethod(AssetServiceClient.parse_access_level_path) + access_policy_path = staticmethod(AssetServiceClient.access_policy_path) + parse_access_policy_path = staticmethod(AssetServiceClient.parse_access_policy_path) asset_path = staticmethod(AssetServiceClient.asset_path) parse_asset_path = staticmethod(AssetServiceClient.parse_asset_path) feed_path = staticmethod(AssetServiceClient.feed_path) parse_feed_path = staticmethod(AssetServiceClient.parse_feed_path) + inventory_path = staticmethod(AssetServiceClient.inventory_path) + parse_inventory_path = staticmethod(AssetServiceClient.parse_inventory_path) + saved_query_path = staticmethod(AssetServiceClient.saved_query_path) + parse_saved_query_path = staticmethod(AssetServiceClient.parse_saved_query_path) + service_perimeter_path = staticmethod(AssetServiceClient.service_perimeter_path) + parse_service_perimeter_path = staticmethod(AssetServiceClient.parse_service_perimeter_path) common_billing_account_path = staticmethod(AssetServiceClient.common_billing_account_path) parse_common_billing_account_path = staticmethod(AssetServiceClient.parse_common_billing_account_path) common_folder_path = staticmethod(AssetServiceClient.common_folder_path) @@ -203,10 +216,10 @@ async def export_assets(self, line represents a [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in the JSON format; for BigQuery table destinations, the output - table stores the fields in asset proto as columns. This API + table stores the fields in asset Protobuf as columns. This API implements the - [google.longrunning.Operation][google.longrunning.Operation] API - , which allows you to keep track of the export. We recommend + [google.longrunning.Operation][google.longrunning.Operation] + API, which allows you to keep track of the export. We recommend intervals of at least 2 seconds with exponential retry to poll the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. @@ -345,11 +358,13 @@ async def sample_list_assets(): request (Optional[Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]]): The request object. ListAssets request. parent (:class:`str`): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" - (such as "projects/my-project-id"), or - "projects/[project-number]" (such as "projects/12345"). + Required. Name of the organization, folder, or project + the assets belong to. Format: + "organizations/[organization-number]" (such as + "organizations/123"), "projects/[project-id]" (such as + "projects/my-project-id"), "projects/[project-number]" + (such as "projects/12345"), or "folders/[folder-number]" + (such as "folders/12345"). This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -564,8 +579,8 @@ async def sample_create_feed(): be an organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID - (such as "projects/my-project-id")", or - a project number (such as + (such as "projects/my-project-id"), or a + project number (such as "projects/12345"). This corresponds to the ``parent`` field @@ -1075,10 +1090,10 @@ async def search_all_resources(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesAsyncPager: - r"""Searches all Cloud resources within the specified scope, such as - a project, folder, or organization. The caller must be granted - the ``cloudasset.assets.searchAllResources`` permission on the - desired scope, otherwise the request will be rejected. + r"""Searches all Google Cloud resources within the specified scope, + such as a project, folder, or organization. The caller must be + granted the ``cloudasset.assets.searchAllResources`` permission + on the desired scope, otherwise the request will be rejected. .. code-block:: python @@ -1136,46 +1151,64 @@ async def sample_search_all_resources(): Examples: - - ``name:Important`` to find Cloud resources whose name - contains "Important" as a word. - - ``name=Important`` to find the Cloud resource whose - name is exactly "Important". - - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix of any word - in the field. - - ``location:us-west*`` to find Cloud resources whose - location contains both "us" and "west" as prefixes. - - ``labels:prod`` to find Cloud resources whose labels - contain "prod" as a key or value. - - ``labels.env:prod`` to find Cloud resources that have - a label "env" and its value is "prod". - - ``labels.env:*`` to find Cloud resources that have a - label "env". - - ``kmsKey:key`` to find Cloud resources encrypted with - a customer-managed encryption key whose name contains - the word "key". - - ``state:ACTIVE`` to find Cloud resources whose state - contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find {{gcp_name}} resources + - ``name:Important`` to find Google Cloud resources + whose name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource + whose name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of + any word in the field. + - ``location:us-west*`` to find Google Cloud resources + whose location contains both "us" and "west" as + prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources + that have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that + have a label "env". + - ``kmsKey:key`` to find Google Cloud resources + encrypted with a customer-managed encryption key + whose name contains "key" as a word. This field is + deprecated. Please use the ``kmsKeys`` field to + retrieve Cloud KMS key information. + - ``kmsKeys:key`` to find Google Cloud resources + encrypted with customer-managed encryption keys whose + name contains the word "key". + - ``relationships:instance-group-1`` to find Google + Cloud resources that have relationships with + "instance-group-1" in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of + type "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have + relationships with "instance-group-1" in the Compute + Engine instance group resource name, for relationship + type "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources whose state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Cloud resources - that were created before "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 - 00:00:00 UTC" in seconds. - - ``updateTime>1609459200`` to find Cloud resources - that were updated after "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 - 00:00:00 UTC" in seconds. - - ``Important`` to find Cloud resources that contain - "Important" as a word in any of the searchable - fields. - - ``Impor*`` to find Cloud resources that contain - "Impor" as a prefix of any word in any of the + - ``createTime<1609459200`` to find Google Cloud + resources that were created before "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud + resources that were updated after "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``Important`` to find Google Cloud resources that + contain "Important" as a word in any of the + searchable fields. + - ``Impor*`` to find Google Cloud resources that + contain "Impor" as a prefix of any word in any of the searchable fields. - ``Important location:(us-west1 OR global)`` to find - Cloud resources that contain "Important" as a word in - any of the searchable fields and are also located in - the "us-west1" region or the "global" location. + Google Cloud resources that contain "Important" as a + word in any of the searchable fields and are also + located in the "us-west1" region or the "global" + location. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1348,12 +1381,12 @@ async def sample_search_all_iam_policies(): for more information. If not specified or empty, it will search all the IAM policies within the specified ``scope``. Note that the query string is compared - against each Cloud IAM policy binding, including its - members, roles, and Cloud IAM conditions. The returned - Cloud IAM policies will only contain the bindings that - match your query. To learn more about the IAM policy - structure, see `IAM policy - doc `__. + against each IAM policy binding, including its + principals, roles, and IAM conditions. The returned IAM + policies will only contain the bindings that match your + query. To learn more about the IAM policy structure, see + the `IAM policy + documentation `__. Examples: @@ -1394,7 +1427,7 @@ async def sample_search_all_iam_policies(): - ``roles:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. - ``memberTypes:user`` to find IAM policy bindings that - contain the "user" member type. + contain the principal type "user". This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1526,7 +1559,7 @@ async def sample_analyze_iam_policy(): Returns: google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: A response message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. """ # Create or coerce a protobuf request object. @@ -1582,8 +1615,8 @@ async def analyze_iam_policy_longrunning(self, [google.longrunning.Operation][google.longrunning.Operation], which allows you to track the operation status. We recommend intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the request - to help callers to map responses to requests. + to poll the operation result. The metadata contains the metadata + for the long-running operation. .. code-block:: python @@ -1636,10 +1669,8 @@ async def sample_analyze_iam_policy_longrunning(): google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` - A response message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + The result type for the operation will be :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` A response message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. """ # Create or coerce a protobuf request object. @@ -1674,7 +1705,1309 @@ async def sample_analyze_iam_policy_longrunning(): response, self._client._transport.operations_client, asset_service.AnalyzeIamPolicyLongrunningResponse, - metadata_type=asset_service.AnalyzeIamPolicyLongrunningRequest, + metadata_type=asset_service.AnalyzeIamPolicyLongrunningMetadata, + ) + + # Done; return the response. + return response + + async def analyze_move(self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.AnalyzeMoveResponse: + r"""Analyze moving a resource to a specified destination + without kicking off the actual move. The analysis is + best effort depending on the user's permissions of + viewing different hierarchical policies and + configurations. The policies and configuration are + subject to change before the actual resource migration + takes place. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_analyze_move(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeMoveRequest( + resource="resource_value", + destination_parent="destination_parent_value", + ) + + # Make the request + response = await client.analyze_move(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeMoveRequest, dict]]): + The request object. The request message for performing + resource move analysis. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.AnalyzeMoveResponse: + The response message for resource + move analysis. + + """ + # Create or coerce a protobuf request object. + request = asset_service.AnalyzeMoveRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_move, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def query_assets(self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.QueryAssetsResponse: + r"""Issue a job that queries assets using a SQL statement compatible + with `BigQuery Standard + SQL `__. + + If the query execution finishes within timeout and there's no + pagination, the full query results will be returned in the + ``QueryAssetsResponse``. + + Otherwise, full query results can be obtained by issuing extra + requests with the ``job_reference`` from the a previous + ``QueryAssets`` call. + + Note, the query result has approximately 10 GB limitation + enforced by BigQuery + https://cloud.google.com/bigquery/docs/best-practices-performance-output, + queries return larger results will result in errors. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_query_assets(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.QueryAssetsRequest( + statement="statement_value", + parent="parent_value", + ) + + # Make the request + response = await client.query_assets(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.QueryAssetsRequest, dict]]): + The request object. QueryAssets request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.QueryAssetsResponse: + QueryAssets response. + """ + # Create or coerce a protobuf request object. + request = asset_service.QueryAssetsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.query_assets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_saved_query(self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Creates a saved query in a parent + project/folder/organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_create_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.CreateSavedQueryRequest( + parent="parent_value", + saved_query_id="saved_query_id_value", + ) + + # Make the request + response = await client.create_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.CreateSavedQueryRequest, dict]]): + The request object. Request to create a saved query. + parent (:class:`str`): + Required. The name of the project/folder/organization + where this saved_query should be created in. It can only + be an organization number (such as "organizations/123"), + a folder number (such as "folders/123"), a project ID + (such as "projects/my-project-id"), or a project number + (such as "projects/12345"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + saved_query (:class:`google.cloud.asset_v1.types.SavedQuery`): + Required. The saved_query details. The ``name`` field + must be empty as it will be generated based on the + parent and saved_query_id. + + This corresponds to the ``saved_query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + saved_query_id (:class:`str`): + Required. The ID to use for the saved query, which must + be unique in the specified parent. It will become the + final component of the saved query's resource name. + + This value should be 4-63 characters, and valid + characters are ``[a-z][0-9]-``. + + Notice that this field is required in the saved query + creation, and the ``name`` field of the ``saved_query`` + will be ignored. + + This corresponds to the ``saved_query_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, saved_query, saved_query_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.CreateSavedQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if saved_query is not None: + request.saved_query = saved_query + if saved_query_id is not None: + request.saved_query_id = saved_query_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_saved_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_saved_query(self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Gets details about a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_get_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.GetSavedQueryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.GetSavedQueryRequest, dict]]): + The request object. Request to get a saved query. + name (:class:`str`): + Required. The name of the saved query and it must be in + the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.GetSavedQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_saved_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_saved_queries(self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSavedQueriesAsyncPager: + r"""Lists all saved queries in a parent + project/folder/organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_list_saved_queries(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.ListSavedQueriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_saved_queries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.ListSavedQueriesRequest, dict]]): + The request object. Request to list saved queries. + parent (:class:`str`): + Required. The parent + project/folder/organization whose + savedQueries are to be listed. It can + only be using + project/folder/organization number (such + as "folders/12345")", or a project ID + (such as "projects/my-project-id"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager: + Response of listing saved queries. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.ListSavedQueriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_saved_queries, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSavedQueriesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_saved_query(self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Updates a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_update_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.UpdateSavedQueryRequest( + ) + + # Make the request + response = await client.update_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.UpdateSavedQueryRequest, dict]]): + The request object. Request to update a saved query. + saved_query (:class:`google.cloud.asset_v1.types.SavedQuery`): + Required. The saved query to update. + + The saved query's ``name`` field is used to identify the + one to update, which has format as below: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``saved_query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([saved_query, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.UpdateSavedQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if saved_query is not None: + request.saved_query = saved_query + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_saved_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("saved_query.name", request.saved_query.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_saved_query(self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_delete_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.DeleteSavedQueryRequest( + name="name_value", + ) + + # Make the request + await client.delete_saved_query(request=request) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.DeleteSavedQueryRequest, dict]]): + The request object. Request to delete a saved query. + name (:class:`str`): + Required. The name of the saved query to delete. It must + be in the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.DeleteSavedQueryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_saved_query, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def batch_get_effective_iam_policies(self, + request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + r"""Gets effective IAM policies for a batch of resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_batch_get_effective_iam_policies(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetEffectiveIamPoliciesRequest( + scope="scope_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + response = await client.batch_get_effective_iam_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest, dict]]): + The request object. A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: + A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + """ + # Create or coerce a protobuf request object. + request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.batch_get_effective_iam_policies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_org_policies(self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPoliciesAsyncPager: + r"""Analyzes organization policies under a scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_analyze_org_policies(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPoliciesRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest, dict]]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + scope (:class:`str`): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (:class:`str`): + Required. The name of the constraint + to analyze organization policies for. + The response only contains analyzed + organization policies for the provided + constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The expression to filter + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. + The only supported field is + ``consolidated_policy.attached_resource``, and the only + supported operator is ``=``. + + Example: + consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" + will return the org policy results of"folders/001". + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager: + The response message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.AnalyzeOrgPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_org_policies, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AnalyzeOrgPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_org_policy_governed_containers(self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: + r"""Analyzes organization policies governed containers + (projects, folders or organization) under a scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_analyze_org_policy_governed_containers(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_containers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest, dict]]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + scope (:class:`str`): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + The output containers will also be limited to the ones + governed by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (:class:`str`): + Required. The name of the constraint + to analyze governed containers for. The + analysis only contains organization + policies for the provided constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The expression to filter the governed containers in + result. The only supported field is ``parent``, and the + only supported operator is ``=``. + + Example: + parent="//cloudresourcemanager.googleapis.com/folders/001" + will return all containers under "folders/001". + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_org_policy_governed_containers, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def analyze_org_policy_governed_assets(self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: + r"""Analyzes organization policies governed assets (Google Cloud + resources or policies) under a scope. This RPC supports custom + constraints and the following 10 canned constraints: + + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization + + This RPC only returns either resources of types supported by + `searchable asset + types `__, + or IAM policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + async def sample_analyze_org_policy_governed_assets(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_assets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest, dict]]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + scope (:class:`str`): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + The output assets will also be limited to the ones + governed by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (:class:`str`): + Required. The name of the constraint + to analyze governed assets for. The + analysis only contains analyzed + organization policies for the provided + constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + The expression to filter the governed assets in result. + The only supported fields for governed resources are + ``governed_resource.project`` and + ``governed_resource.folders``. The only supported fields + for governed iam policies are + ``governed_iam_policy.project`` and + ``governed_iam_policy.folders``. The only supported + operator is ``=``. + + Example 1: governed_resource.project="projects/12345678" + filter will return all governed resources under + projects/12345678 including the project ifself, if + applicable. + + Example 2: + governed_iam_policy.folders="folders/12345678" filter + will return all governed iam policies under + folders/12345678, if applicable. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.analyze_org_policy_governed_assets, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 42b9b727b2d8..63c80bcea508 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -40,6 +40,9 @@ from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore from .transports.base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import AssetServiceGrpcTransport @@ -166,6 +169,28 @@ def transport(self) -> AssetServiceTransport: """ return self._transport + @staticmethod + def access_level_path(access_policy: str,access_level: str,) -> str: + """Returns a fully-qualified access_level string.""" + return "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + + @staticmethod + def parse_access_level_path(path: str) -> Dict[str,str]: + """Parses a access_level path into its component segments.""" + m = re.match(r"^accessPolicies/(?P.+?)/accessLevels/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def access_policy_path(access_policy: str,) -> str: + """Returns a fully-qualified access_policy string.""" + return "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + + @staticmethod + def parse_access_policy_path(path: str) -> Dict[str,str]: + """Parses a access_policy path into its component segments.""" + m = re.match(r"^accessPolicies/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def asset_path() -> str: """Returns a fully-qualified asset string.""" @@ -188,6 +213,39 @@ def parse_feed_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/feeds/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def inventory_path(project: str,location: str,instance: str,) -> str: + """Returns a fully-qualified inventory string.""" + return "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + + @staticmethod + def parse_inventory_path(path: str) -> Dict[str,str]: + """Parses a inventory path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/inventory$", path) + return m.groupdict() if m else {} + + @staticmethod + def saved_query_path(project: str,saved_query: str,) -> str: + """Returns a fully-qualified saved_query string.""" + return "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + + @staticmethod + def parse_saved_query_path(path: str) -> Dict[str,str]: + """Parses a saved_query path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/savedQueries/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_perimeter_path(access_policy: str,service_perimeter: str,) -> str: + """Returns a fully-qualified service_perimeter string.""" + return "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + + @staticmethod + def parse_service_perimeter_path(path: str) -> Dict[str,str]: + """Parses a service_perimeter path into its component segments.""" + m = re.match(r"^accessPolicies/(?P.+?)/servicePerimeters/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str, ) -> str: """Returns a fully-qualified billing_account string.""" @@ -406,10 +464,10 @@ def export_assets(self, line represents a [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in the JSON format; for BigQuery table destinations, the output - table stores the fields in asset proto as columns. This API + table stores the fields in asset Protobuf as columns. This API implements the - [google.longrunning.Operation][google.longrunning.Operation] API - , which allows you to keep track of the export. We recommend + [google.longrunning.Operation][google.longrunning.Operation] + API, which allows you to keep track of the export. We recommend intervals of at least 2 seconds with exponential retry to poll the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. @@ -549,11 +607,13 @@ def sample_list_assets(): request (Union[google.cloud.asset_v1.types.ListAssetsRequest, dict]): The request object. ListAssets request. parent (str): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" - (such as "projects/my-project-id"), or - "projects/[project-number]" (such as "projects/12345"). + Required. Name of the organization, folder, or project + the assets belong to. Format: + "organizations/[organization-number]" (such as + "organizations/123"), "projects/[project-id]" (such as + "projects/my-project-id"), "projects/[project-number]" + (such as "projects/12345"), or "folders/[folder-number]" + (such as "folders/12345"). This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -762,8 +822,8 @@ def sample_create_feed(): be an organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID - (such as "projects/my-project-id")", or - a project number (such as + (such as "projects/my-project-id"), or a + project number (such as "projects/12345"). This corresponds to the ``parent`` field @@ -1252,10 +1312,10 @@ def search_all_resources(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.SearchAllResourcesPager: - r"""Searches all Cloud resources within the specified scope, such as - a project, folder, or organization. The caller must be granted - the ``cloudasset.assets.searchAllResources`` permission on the - desired scope, otherwise the request will be rejected. + r"""Searches all Google Cloud resources within the specified scope, + such as a project, folder, or organization. The caller must be + granted the ``cloudasset.assets.searchAllResources`` permission + on the desired scope, otherwise the request will be rejected. .. code-block:: python @@ -1313,46 +1373,64 @@ def sample_search_all_resources(): Examples: - - ``name:Important`` to find Cloud resources whose name - contains "Important" as a word. - - ``name=Important`` to find the Cloud resource whose - name is exactly "Important". - - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix of any word - in the field. - - ``location:us-west*`` to find Cloud resources whose - location contains both "us" and "west" as prefixes. - - ``labels:prod`` to find Cloud resources whose labels - contain "prod" as a key or value. - - ``labels.env:prod`` to find Cloud resources that have - a label "env" and its value is "prod". - - ``labels.env:*`` to find Cloud resources that have a - label "env". - - ``kmsKey:key`` to find Cloud resources encrypted with - a customer-managed encryption key whose name contains - the word "key". - - ``state:ACTIVE`` to find Cloud resources whose state - contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find {{gcp_name}} resources + - ``name:Important`` to find Google Cloud resources + whose name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource + whose name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of + any word in the field. + - ``location:us-west*`` to find Google Cloud resources + whose location contains both "us" and "west" as + prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources + that have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that + have a label "env". + - ``kmsKey:key`` to find Google Cloud resources + encrypted with a customer-managed encryption key + whose name contains "key" as a word. This field is + deprecated. Please use the ``kmsKeys`` field to + retrieve Cloud KMS key information. + - ``kmsKeys:key`` to find Google Cloud resources + encrypted with customer-managed encryption keys whose + name contains the word "key". + - ``relationships:instance-group-1`` to find Google + Cloud resources that have relationships with + "instance-group-1" in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of + type "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have + relationships with "instance-group-1" in the Compute + Engine instance group resource name, for relationship + type "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources whose state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Cloud resources - that were created before "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 - 00:00:00 UTC" in seconds. - - ``updateTime>1609459200`` to find Cloud resources - that were updated after "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 - 00:00:00 UTC" in seconds. - - ``Important`` to find Cloud resources that contain - "Important" as a word in any of the searchable - fields. - - ``Impor*`` to find Cloud resources that contain - "Impor" as a prefix of any word in any of the + - ``createTime<1609459200`` to find Google Cloud + resources that were created before "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud + resources that were updated after "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``Important`` to find Google Cloud resources that + contain "Important" as a word in any of the + searchable fields. + - ``Impor*`` to find Google Cloud resources that + contain "Impor" as a prefix of any word in any of the searchable fields. - ``Important location:(us-west1 OR global)`` to find - Cloud resources that contain "Important" as a word in - any of the searchable fields and are also located in - the "us-west1" region or the "global" location. + Google Cloud resources that contain "Important" as a + word in any of the searchable fields and are also + located in the "us-west1" region or the "global" + location. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1518,12 +1596,12 @@ def sample_search_all_iam_policies(): for more information. If not specified or empty, it will search all the IAM policies within the specified ``scope``. Note that the query string is compared - against each Cloud IAM policy binding, including its - members, roles, and Cloud IAM conditions. The returned - Cloud IAM policies will only contain the bindings that - match your query. To learn more about the IAM policy - structure, see `IAM policy - doc `__. + against each IAM policy binding, including its + principals, roles, and IAM conditions. The returned IAM + policies will only contain the bindings that match your + query. To learn more about the IAM policy structure, see + the `IAM policy + documentation `__. Examples: @@ -1564,7 +1642,7 @@ def sample_search_all_iam_policies(): - ``roles:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. - ``memberTypes:user`` to find IAM policy bindings that - contain the "user" member type. + contain the principal type "user". This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1689,7 +1767,7 @@ def sample_analyze_iam_policy(): Returns: google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: A response message for - [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. """ # Create or coerce a protobuf request object. @@ -1740,8 +1818,8 @@ def analyze_iam_policy_longrunning(self, [google.longrunning.Operation][google.longrunning.Operation], which allows you to track the operation status. We recommend intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the request - to help callers to map responses to requests. + to poll the operation result. The metadata contains the metadata + for the long-running operation. .. code-block:: python @@ -1794,10 +1872,8 @@ def sample_analyze_iam_policy_longrunning(): google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` - A response message for - [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. + The result type for the operation will be :class:`google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningResponse` A response message for + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. """ # Create or coerce a protobuf request object. @@ -1833,7 +1909,1312 @@ def sample_analyze_iam_policy_longrunning(): response, self._transport.operations_client, asset_service.AnalyzeIamPolicyLongrunningResponse, - metadata_type=asset_service.AnalyzeIamPolicyLongrunningRequest, + metadata_type=asset_service.AnalyzeIamPolicyLongrunningMetadata, + ) + + # Done; return the response. + return response + + def analyze_move(self, + request: Optional[Union[asset_service.AnalyzeMoveRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.AnalyzeMoveResponse: + r"""Analyze moving a resource to a specified destination + without kicking off the actual move. The analysis is + best effort depending on the user's permissions of + viewing different hierarchical policies and + configurations. The policies and configuration are + subject to change before the actual resource migration + takes place. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_analyze_move(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeMoveRequest( + resource="resource_value", + destination_parent="destination_parent_value", + ) + + # Make the request + response = client.analyze_move(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.AnalyzeMoveRequest, dict]): + The request object. The request message for performing + resource move analysis. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.AnalyzeMoveResponse: + The response message for resource + move analysis. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeMoveRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeMoveRequest): + request = asset_service.AnalyzeMoveRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_move] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("resource", request.resource), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def query_assets(self, + request: Optional[Union[asset_service.QueryAssetsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.QueryAssetsResponse: + r"""Issue a job that queries assets using a SQL statement compatible + with `BigQuery Standard + SQL `__. + + If the query execution finishes within timeout and there's no + pagination, the full query results will be returned in the + ``QueryAssetsResponse``. + + Otherwise, full query results can be obtained by issuing extra + requests with the ``job_reference`` from the a previous + ``QueryAssets`` call. + + Note, the query result has approximately 10 GB limitation + enforced by BigQuery + https://cloud.google.com/bigquery/docs/best-practices-performance-output, + queries return larger results will result in errors. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_query_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.QueryAssetsRequest( + statement="statement_value", + parent="parent_value", + ) + + # Make the request + response = client.query_assets(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.QueryAssetsRequest, dict]): + The request object. QueryAssets request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.QueryAssetsResponse: + QueryAssets response. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.QueryAssetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.QueryAssetsRequest): + request = asset_service.QueryAssetsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.query_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_saved_query(self, + request: Optional[Union[asset_service.CreateSavedQueryRequest, dict]] = None, + *, + parent: Optional[str] = None, + saved_query: Optional[asset_service.SavedQuery] = None, + saved_query_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Creates a saved query in a parent + project/folder/organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_create_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.CreateSavedQueryRequest( + parent="parent_value", + saved_query_id="saved_query_id_value", + ) + + # Make the request + response = client.create_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.CreateSavedQueryRequest, dict]): + The request object. Request to create a saved query. + parent (str): + Required. The name of the project/folder/organization + where this saved_query should be created in. It can only + be an organization number (such as "organizations/123"), + a folder number (such as "folders/123"), a project ID + (such as "projects/my-project-id"), or a project number + (such as "projects/12345"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + saved_query (google.cloud.asset_v1.types.SavedQuery): + Required. The saved_query details. The ``name`` field + must be empty as it will be generated based on the + parent and saved_query_id. + + This corresponds to the ``saved_query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + saved_query_id (str): + Required. The ID to use for the saved query, which must + be unique in the specified parent. It will become the + final component of the saved query's resource name. + + This value should be 4-63 characters, and valid + characters are ``[a-z][0-9]-``. + + Notice that this field is required in the saved query + creation, and the ``name`` field of the ``saved_query`` + will be ignored. + + This corresponds to the ``saved_query_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, saved_query, saved_query_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.CreateSavedQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.CreateSavedQueryRequest): + request = asset_service.CreateSavedQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if saved_query is not None: + request.saved_query = saved_query + if saved_query_id is not None: + request.saved_query_id = saved_query_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_saved_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_saved_query(self, + request: Optional[Union[asset_service.GetSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Gets details about a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_get_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.GetSavedQueryRequest( + name="name_value", + ) + + # Make the request + response = client.get_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.GetSavedQueryRequest, dict]): + The request object. Request to get a saved query. + name (str): + Required. The name of the saved query and it must be in + the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.GetSavedQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.GetSavedQueryRequest): + request = asset_service.GetSavedQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_saved_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_saved_queries(self, + request: Optional[Union[asset_service.ListSavedQueriesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSavedQueriesPager: + r"""Lists all saved queries in a parent + project/folder/organization. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_list_saved_queries(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListSavedQueriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_saved_queries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.ListSavedQueriesRequest, dict]): + The request object. Request to list saved queries. + parent (str): + Required. The parent + project/folder/organization whose + savedQueries are to be listed. It can + only be using + project/folder/organization number (such + as "folders/12345")", or a project ID + (such as "projects/my-project-id"). + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager: + Response of listing saved queries. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.ListSavedQueriesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.ListSavedQueriesRequest): + request = asset_service.ListSavedQueriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_saved_queries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSavedQueriesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_saved_query(self, + request: Optional[Union[asset_service.UpdateSavedQueryRequest, dict]] = None, + *, + saved_query: Optional[asset_service.SavedQuery] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.SavedQuery: + r"""Updates a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_update_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.UpdateSavedQueryRequest( + ) + + # Make the request + response = client.update_saved_query(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.UpdateSavedQueryRequest, dict]): + The request object. Request to update a saved query. + saved_query (google.cloud.asset_v1.types.SavedQuery): + Required. The saved query to update. + + The saved query's ``name`` field is used to identify the + one to update, which has format as below: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``saved_query`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([saved_query, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.UpdateSavedQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.UpdateSavedQueryRequest): + request = asset_service.UpdateSavedQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if saved_query is not None: + request.saved_query = saved_query + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_saved_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("saved_query.name", request.saved_query.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_saved_query(self, + request: Optional[Union[asset_service.DeleteSavedQueryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a saved query. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_delete_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.DeleteSavedQueryRequest( + name="name_value", + ) + + # Make the request + client.delete_saved_query(request=request) + + Args: + request (Union[google.cloud.asset_v1.types.DeleteSavedQueryRequest, dict]): + The request object. Request to delete a saved query. + name (str): + Required. The name of the saved query to delete. It must + be in the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.DeleteSavedQueryRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.DeleteSavedQueryRequest): + request = asset_service.DeleteSavedQueryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_saved_query] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def batch_get_effective_iam_policies(self, + request: Optional[Union[asset_service.BatchGetEffectiveIamPoliciesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + r"""Gets effective IAM policies for a batch of resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_batch_get_effective_iam_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetEffectiveIamPoliciesRequest( + scope="scope_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + response = client.batch_get_effective_iam_policies(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest, dict]): + The request object. A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: + A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.BatchGetEffectiveIamPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.BatchGetEffectiveIamPoliciesRequest): + request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.batch_get_effective_iam_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_org_policies(self, + request: Optional[Union[asset_service.AnalyzeOrgPoliciesRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPoliciesPager: + r"""Analyzes organization policies under a scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_analyze_org_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPoliciesRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest, dict]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (str): + Required. The name of the constraint + to analyze organization policies for. + The response only contains analyzed + organization policies for the provided + constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + The expression to filter + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. + The only supported field is + ``consolidated_policy.attached_resource``, and the only + supported operator is ``=``. + + Example: + consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" + will return the org policy results of"folders/001". + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager: + The response message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeOrgPoliciesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeOrgPoliciesRequest): + request = asset_service.AnalyzeOrgPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_org_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AnalyzeOrgPoliciesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_org_policy_governed_containers(self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: + r"""Analyzes organization policies governed containers + (projects, folders or organization) under a scope. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_analyze_org_policy_governed_containers(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_containers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest, dict]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + The output containers will also be limited to the ones + governed by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (str): + Required. The name of the constraint + to analyze governed containers for. The + analysis only contains organization + policies for the provided constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + The expression to filter the governed containers in + result. The only supported field is ``parent``, and the + only supported operator is ``=``. + + Example: + parent="//cloudresourcemanager.googleapis.com/folders/001" + will return all containers under "folders/001". + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeOrgPolicyGovernedContainersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_containers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AnalyzeOrgPolicyGovernedContainersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def analyze_org_policy_governed_assets(self, + request: Optional[Union[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict]] = None, + *, + scope: Optional[str] = None, + constraint: Optional[str] = None, + filter: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: + r"""Analyzes organization policies governed assets (Google Cloud + resources or policies) under a scope. This RPC supports custom + constraints and the following 10 canned constraints: + + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization + + This RPC only returns either resources of types supported by + `searchable asset + types `__, + or IAM policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import asset_v1 + + def sample_analyze_org_policy_governed_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_assets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest, dict]): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + The output assets will also be limited to the ones + governed by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + + This corresponds to the ``scope`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + constraint (str): + Required. The name of the constraint + to analyze governed assets for. The + analysis only contains analyzed + organization policies for the provided + constraint. + + This corresponds to the ``constraint`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + The expression to filter the governed assets in result. + The only supported fields for governed resources are + ``governed_resource.project`` and + ``governed_resource.folders``. The only supported fields + for governed iam policies are + ``governed_iam_policy.project`` and + ``governed_iam_policy.folders``. The only supported + operator is ``=``. + + Example 1: governed_resource.project="projects/12345678" + filter will return all governed resources under + projects/12345678 including the project ifself, if + applicable. + + Example 2: + governed_iam_policy.folders="folders/12345678" filter + will return all governed iam policies under + folders/12345678, if applicable. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([scope, constraint, filter]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a asset_service.AnalyzeOrgPolicyGovernedAssetsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if scope is not None: + request.scope = scope + if constraint is not None: + request.constraint = constraint + if filter is not None: + request.filter = filter + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_assets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("scope", request.scope), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AnalyzeOrgPolicyGovernedAssetsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 8f7f9706d4a5..8f71ac0df30c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -380,3 +380,487 @@ async def async_generator(): def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSavedQueriesPager: + """A pager for iterating through ``list_saved_queries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``saved_queries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSavedQueries`` requests and continue to iterate + through the ``saved_queries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.ListSavedQueriesResponse], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.ListSavedQueriesRequest): + The initial request object. + response (google.cloud.asset_v1.types.ListSavedQueriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.ListSavedQueriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[asset_service.ListSavedQueriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[asset_service.SavedQuery]: + for page in self.pages: + yield from page.saved_queries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSavedQueriesAsyncPager: + """A pager for iterating through ``list_saved_queries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``saved_queries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSavedQueries`` requests and continue to iterate + through the ``saved_queries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.ListSavedQueriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.ListSavedQueriesResponse]], + request: asset_service.ListSavedQueriesRequest, + response: asset_service.ListSavedQueriesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.ListSavedQueriesRequest): + The initial request object. + response (google.cloud.asset_v1.types.ListSavedQueriesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.ListSavedQueriesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[asset_service.ListSavedQueriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.SavedQuery]: + async def async_generator(): + async for page in self.pages: + for response in page.saved_queries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPoliciesPager: + """A pager for iterating through ``analyze_org_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``org_policy_results`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AnalyzeOrgPolicies`` requests and continue to iterate + through the ``org_policy_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.AnalyzeOrgPoliciesResponse], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: + for page in self.pages: + yield from page.org_policy_results + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPoliciesAsyncPager: + """A pager for iterating through ``analyze_org_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``org_policy_results`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AnalyzeOrgPolicies`` requests and continue to iterate + through the ``org_policy_results`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPoliciesResponse]], + request: asset_service.AnalyzeOrgPoliciesRequest, + response: asset_service.AnalyzeOrgPoliciesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPoliciesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: + async def async_generator(): + async for page in self.pages: + for response in page.org_policy_results: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPolicyGovernedContainersPager: + """A pager for iterating through ``analyze_org_policy_governed_containers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``governed_containers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AnalyzeOrgPolicyGovernedContainers`` requests and continue to iterate + through the ``governed_containers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedContainersResponse], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: + for page in self.pages: + yield from page.governed_containers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPolicyGovernedContainersAsyncPager: + """A pager for iterating through ``analyze_org_policy_governed_containers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``governed_containers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AnalyzeOrgPolicyGovernedContainers`` requests and continue to iterate + through the ``governed_containers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]], + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: + async def async_generator(): + async for page in self.pages: + for response in page.governed_containers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPolicyGovernedAssetsPager: + """A pager for iterating through ``analyze_org_policy_governed_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``governed_assets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AnalyzeOrgPolicyGovernedAssets`` requests and continue to iterate + through the ``governed_assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., asset_service.AnalyzeOrgPolicyGovernedAssetsResponse], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: + for page in self.pages: + yield from page.governed_assets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class AnalyzeOrgPolicyGovernedAssetsAsyncPager: + """A pager for iterating through ``analyze_org_policy_governed_assets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``governed_assets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``AnalyzeOrgPolicyGovernedAssets`` requests and continue to iterate + through the ``governed_assets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]], + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest): + The initial request object. + response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: + async def async_generator(): + async for page in self.pages: + for response in page.governed_assets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 55b9835f017f..dcf9370a2fb2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -224,6 +224,61 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.analyze_move: gapic_v1.method.wrap_method( + self.analyze_move, + default_timeout=None, + client_info=client_info, + ), + self.query_assets: gapic_v1.method.wrap_method( + self.query_assets, + default_timeout=None, + client_info=client_info, + ), + self.create_saved_query: gapic_v1.method.wrap_method( + self.create_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.get_saved_query: gapic_v1.method.wrap_method( + self.get_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.list_saved_queries: gapic_v1.method.wrap_method( + self.list_saved_queries, + default_timeout=None, + client_info=client_info, + ), + self.update_saved_query: gapic_v1.method.wrap_method( + self.update_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.delete_saved_query: gapic_v1.method.wrap_method( + self.delete_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.batch_get_effective_iam_policies: gapic_v1.method.wrap_method( + self.batch_get_effective_iam_policies, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policies: gapic_v1.method.wrap_method( + self.analyze_org_policies, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policy_governed_containers: gapic_v1.method.wrap_method( + self.analyze_org_policy_governed_containers, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policy_governed_assets: gapic_v1.method.wrap_method( + self.analyze_org_policy_governed_assets, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -348,6 +403,105 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ]]: raise NotImplementedError() + @property + def analyze_move(self) -> Callable[ + [asset_service.AnalyzeMoveRequest], + Union[ + asset_service.AnalyzeMoveResponse, + Awaitable[asset_service.AnalyzeMoveResponse] + ]]: + raise NotImplementedError() + + @property + def query_assets(self) -> Callable[ + [asset_service.QueryAssetsRequest], + Union[ + asset_service.QueryAssetsResponse, + Awaitable[asset_service.QueryAssetsResponse] + ]]: + raise NotImplementedError() + + @property + def create_saved_query(self) -> Callable[ + [asset_service.CreateSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ]]: + raise NotImplementedError() + + @property + def get_saved_query(self) -> Callable[ + [asset_service.GetSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ]]: + raise NotImplementedError() + + @property + def list_saved_queries(self) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Union[ + asset_service.ListSavedQueriesResponse, + Awaitable[asset_service.ListSavedQueriesResponse] + ]]: + raise NotImplementedError() + + @property + def update_saved_query(self) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + Union[ + asset_service.SavedQuery, + Awaitable[asset_service.SavedQuery] + ]]: + raise NotImplementedError() + + @property + def delete_saved_query(self) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def batch_get_effective_iam_policies(self) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Union[ + asset_service.BatchGetEffectiveIamPoliciesResponse, + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_org_policies(self) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Union[ + asset_service.AnalyzeOrgPoliciesResponse, + Awaitable[asset_service.AnalyzeOrgPoliciesResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_org_policy_governed_containers(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse] + ]]: + raise NotImplementedError() + + @property + def analyze_org_policy_governed_assets(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Union[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse] + ]]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index ab3a577ca8e4..a565057b1d1c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -257,10 +257,10 @@ def export_assets(self) -> Callable[ line represents a [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in the JSON format; for BigQuery table destinations, the output - table stores the fields in asset proto as columns. This API + table stores the fields in asset Protobuf as columns. This API implements the - [google.longrunning.Operation][google.longrunning.Operation] API - , which allows you to keep track of the export. We recommend + [google.longrunning.Operation][google.longrunning.Operation] + API, which allows you to keep track of the export. We recommend intervals of at least 2 seconds with exponential retry to poll the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. @@ -481,10 +481,10 @@ def search_all_resources(self) -> Callable[ asset_service.SearchAllResourcesResponse]: r"""Return a callable for the search all resources method over gRPC. - Searches all Cloud resources within the specified scope, such as - a project, folder, or organization. The caller must be granted - the ``cloudasset.assets.searchAllResources`` permission on the - desired scope, otherwise the request will be rejected. + Searches all Google Cloud resources within the specified scope, + such as a project, folder, or organization. The caller must be + granted the ``cloudasset.assets.searchAllResources`` permission + on the desired scope, otherwise the request will be rejected. Returns: Callable[[~.SearchAllResourcesRequest], @@ -576,8 +576,8 @@ def analyze_iam_policy_longrunning(self) -> Callable[ [google.longrunning.Operation][google.longrunning.Operation], which allows you to track the operation status. We recommend intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the request - to help callers to map responses to requests. + to poll the operation result. The metadata contains the metadata + for the long-running operation. Returns: Callable[[~.AnalyzeIamPolicyLongrunningRequest], @@ -597,6 +597,337 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ) return self._stubs['analyze_iam_policy_longrunning'] + @property + def analyze_move(self) -> Callable[ + [asset_service.AnalyzeMoveRequest], + asset_service.AnalyzeMoveResponse]: + r"""Return a callable for the analyze move method over gRPC. + + Analyze moving a resource to a specified destination + without kicking off the actual move. The analysis is + best effort depending on the user's permissions of + viewing different hierarchical policies and + configurations. The policies and configuration are + subject to change before the actual resource migration + takes place. + + Returns: + Callable[[~.AnalyzeMoveRequest], + ~.AnalyzeMoveResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_move' not in self._stubs: + self._stubs['analyze_move'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeMove', + request_serializer=asset_service.AnalyzeMoveRequest.serialize, + response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, + ) + return self._stubs['analyze_move'] + + @property + def query_assets(self) -> Callable[ + [asset_service.QueryAssetsRequest], + asset_service.QueryAssetsResponse]: + r"""Return a callable for the query assets method over gRPC. + + Issue a job that queries assets using a SQL statement compatible + with `BigQuery Standard + SQL `__. + + If the query execution finishes within timeout and there's no + pagination, the full query results will be returned in the + ``QueryAssetsResponse``. + + Otherwise, full query results can be obtained by issuing extra + requests with the ``job_reference`` from the a previous + ``QueryAssets`` call. + + Note, the query result has approximately 10 GB limitation + enforced by BigQuery + https://cloud.google.com/bigquery/docs/best-practices-performance-output, + queries return larger results will result in errors. + + Returns: + Callable[[~.QueryAssetsRequest], + ~.QueryAssetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_assets' not in self._stubs: + self._stubs['query_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/QueryAssets', + request_serializer=asset_service.QueryAssetsRequest.serialize, + response_deserializer=asset_service.QueryAssetsResponse.deserialize, + ) + return self._stubs['query_assets'] + + @property + def create_saved_query(self) -> Callable[ + [asset_service.CreateSavedQueryRequest], + asset_service.SavedQuery]: + r"""Return a callable for the create saved query method over gRPC. + + Creates a saved query in a parent + project/folder/organization. + + Returns: + Callable[[~.CreateSavedQueryRequest], + ~.SavedQuery]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_saved_query' not in self._stubs: + self._stubs['create_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + request_serializer=asset_service.CreateSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['create_saved_query'] + + @property + def get_saved_query(self) -> Callable[ + [asset_service.GetSavedQueryRequest], + asset_service.SavedQuery]: + r"""Return a callable for the get saved query method over gRPC. + + Gets details about a saved query. + + Returns: + Callable[[~.GetSavedQueryRequest], + ~.SavedQuery]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_saved_query' not in self._stubs: + self._stubs['get_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/GetSavedQuery', + request_serializer=asset_service.GetSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['get_saved_query'] + + @property + def list_saved_queries(self) -> Callable[ + [asset_service.ListSavedQueriesRequest], + asset_service.ListSavedQueriesResponse]: + r"""Return a callable for the list saved queries method over gRPC. + + Lists all saved queries in a parent + project/folder/organization. + + Returns: + Callable[[~.ListSavedQueriesRequest], + ~.ListSavedQueriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_saved_queries' not in self._stubs: + self._stubs['list_saved_queries'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ListSavedQueries', + request_serializer=asset_service.ListSavedQueriesRequest.serialize, + response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, + ) + return self._stubs['list_saved_queries'] + + @property + def update_saved_query(self) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + asset_service.SavedQuery]: + r"""Return a callable for the update saved query method over gRPC. + + Updates a saved query. + + Returns: + Callable[[~.UpdateSavedQueryRequest], + ~.SavedQuery]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_saved_query' not in self._stubs: + self._stubs['update_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + request_serializer=asset_service.UpdateSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['update_saved_query'] + + @property + def delete_saved_query(self) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete saved query method over gRPC. + + Deletes a saved query. + + Returns: + Callable[[~.DeleteSavedQueryRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_saved_query' not in self._stubs: + self._stubs['delete_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + request_serializer=asset_service.DeleteSavedQueryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_saved_query'] + + @property + def batch_get_effective_iam_policies(self) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + asset_service.BatchGetEffectiveIamPoliciesResponse]: + r"""Return a callable for the batch get effective iam + policies method over gRPC. + + Gets effective IAM policies for a batch of resources. + + Returns: + Callable[[~.BatchGetEffectiveIamPoliciesRequest], + ~.BatchGetEffectiveIamPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_effective_iam_policies' not in self._stubs: + self._stubs['batch_get_effective_iam_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', + request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, + response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, + ) + return self._stubs['batch_get_effective_iam_policies'] + + @property + def analyze_org_policies(self) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + asset_service.AnalyzeOrgPoliciesResponse]: + r"""Return a callable for the analyze org policies method over gRPC. + + Analyzes organization policies under a scope. + + Returns: + Callable[[~.AnalyzeOrgPoliciesRequest], + ~.AnalyzeOrgPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policies' not in self._stubs: + self._stubs['analyze_org_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, + ) + return self._stubs['analyze_org_policies'] + + @property + def analyze_org_policy_governed_containers(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + r"""Return a callable for the analyze org policy governed + containers method over gRPC. + + Analyzes organization policies governed containers + (projects, folders or organization) under a scope. + + Returns: + Callable[[~.AnalyzeOrgPolicyGovernedContainersRequest], + ~.AnalyzeOrgPolicyGovernedContainersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policy_governed_containers' not in self._stubs: + self._stubs['analyze_org_policy_governed_containers'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', + request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, + ) + return self._stubs['analyze_org_policy_governed_containers'] + + @property + def analyze_org_policy_governed_assets(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + r"""Return a callable for the analyze org policy governed + assets method over gRPC. + + Analyzes organization policies governed assets (Google Cloud + resources or policies) under a scope. This RPC supports custom + constraints and the following 10 canned constraints: + + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization + + This RPC only returns either resources of types supported by + `searchable asset + types `__, + or IAM policies. + + Returns: + Callable[[~.AnalyzeOrgPolicyGovernedAssetsRequest], + ~.AnalyzeOrgPolicyGovernedAssetsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policy_governed_assets' not in self._stubs: + self._stubs['analyze_org_policy_governed_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', + request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, + ) + return self._stubs['analyze_org_policy_governed_assets'] + def close(self): self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index bd9417a8d230..d018d120c725 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -260,10 +260,10 @@ def export_assets(self) -> Callable[ line represents a [google.cloud.asset.v1.Asset][google.cloud.asset.v1.Asset] in the JSON format; for BigQuery table destinations, the output - table stores the fields in asset proto as columns. This API + table stores the fields in asset Protobuf as columns. This API implements the - [google.longrunning.Operation][google.longrunning.Operation] API - , which allows you to keep track of the export. We recommend + [google.longrunning.Operation][google.longrunning.Operation] + API, which allows you to keep track of the export. We recommend intervals of at least 2 seconds with exponential retry to poll the export operation result. For regular-size resource parent, the export operation usually finishes within 5 minutes. @@ -484,10 +484,10 @@ def search_all_resources(self) -> Callable[ Awaitable[asset_service.SearchAllResourcesResponse]]: r"""Return a callable for the search all resources method over gRPC. - Searches all Cloud resources within the specified scope, such as - a project, folder, or organization. The caller must be granted - the ``cloudasset.assets.searchAllResources`` permission on the - desired scope, otherwise the request will be rejected. + Searches all Google Cloud resources within the specified scope, + such as a project, folder, or organization. The caller must be + granted the ``cloudasset.assets.searchAllResources`` permission + on the desired scope, otherwise the request will be rejected. Returns: Callable[[~.SearchAllResourcesRequest], @@ -579,8 +579,8 @@ def analyze_iam_policy_longrunning(self) -> Callable[ [google.longrunning.Operation][google.longrunning.Operation], which allows you to track the operation status. We recommend intervals of at least 2 seconds with exponential backoff retry - to poll the operation result. The metadata contains the request - to help callers to map responses to requests. + to poll the operation result. The metadata contains the metadata + for the long-running operation. Returns: Callable[[~.AnalyzeIamPolicyLongrunningRequest], @@ -600,6 +600,337 @@ def analyze_iam_policy_longrunning(self) -> Callable[ ) return self._stubs['analyze_iam_policy_longrunning'] + @property + def analyze_move(self) -> Callable[ + [asset_service.AnalyzeMoveRequest], + Awaitable[asset_service.AnalyzeMoveResponse]]: + r"""Return a callable for the analyze move method over gRPC. + + Analyze moving a resource to a specified destination + without kicking off the actual move. The analysis is + best effort depending on the user's permissions of + viewing different hierarchical policies and + configurations. The policies and configuration are + subject to change before the actual resource migration + takes place. + + Returns: + Callable[[~.AnalyzeMoveRequest], + Awaitable[~.AnalyzeMoveResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_move' not in self._stubs: + self._stubs['analyze_move'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeMove', + request_serializer=asset_service.AnalyzeMoveRequest.serialize, + response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, + ) + return self._stubs['analyze_move'] + + @property + def query_assets(self) -> Callable[ + [asset_service.QueryAssetsRequest], + Awaitable[asset_service.QueryAssetsResponse]]: + r"""Return a callable for the query assets method over gRPC. + + Issue a job that queries assets using a SQL statement compatible + with `BigQuery Standard + SQL `__. + + If the query execution finishes within timeout and there's no + pagination, the full query results will be returned in the + ``QueryAssetsResponse``. + + Otherwise, full query results can be obtained by issuing extra + requests with the ``job_reference`` from the a previous + ``QueryAssets`` call. + + Note, the query result has approximately 10 GB limitation + enforced by BigQuery + https://cloud.google.com/bigquery/docs/best-practices-performance-output, + queries return larger results will result in errors. + + Returns: + Callable[[~.QueryAssetsRequest], + Awaitable[~.QueryAssetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'query_assets' not in self._stubs: + self._stubs['query_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/QueryAssets', + request_serializer=asset_service.QueryAssetsRequest.serialize, + response_deserializer=asset_service.QueryAssetsResponse.deserialize, + ) + return self._stubs['query_assets'] + + @property + def create_saved_query(self) -> Callable[ + [asset_service.CreateSavedQueryRequest], + Awaitable[asset_service.SavedQuery]]: + r"""Return a callable for the create saved query method over gRPC. + + Creates a saved query in a parent + project/folder/organization. + + Returns: + Callable[[~.CreateSavedQueryRequest], + Awaitable[~.SavedQuery]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_saved_query' not in self._stubs: + self._stubs['create_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/CreateSavedQuery', + request_serializer=asset_service.CreateSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['create_saved_query'] + + @property + def get_saved_query(self) -> Callable[ + [asset_service.GetSavedQueryRequest], + Awaitable[asset_service.SavedQuery]]: + r"""Return a callable for the get saved query method over gRPC. + + Gets details about a saved query. + + Returns: + Callable[[~.GetSavedQueryRequest], + Awaitable[~.SavedQuery]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_saved_query' not in self._stubs: + self._stubs['get_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/GetSavedQuery', + request_serializer=asset_service.GetSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['get_saved_query'] + + @property + def list_saved_queries(self) -> Callable[ + [asset_service.ListSavedQueriesRequest], + Awaitable[asset_service.ListSavedQueriesResponse]]: + r"""Return a callable for the list saved queries method over gRPC. + + Lists all saved queries in a parent + project/folder/organization. + + Returns: + Callable[[~.ListSavedQueriesRequest], + Awaitable[~.ListSavedQueriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_saved_queries' not in self._stubs: + self._stubs['list_saved_queries'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/ListSavedQueries', + request_serializer=asset_service.ListSavedQueriesRequest.serialize, + response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, + ) + return self._stubs['list_saved_queries'] + + @property + def update_saved_query(self) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + Awaitable[asset_service.SavedQuery]]: + r"""Return a callable for the update saved query method over gRPC. + + Updates a saved query. + + Returns: + Callable[[~.UpdateSavedQueryRequest], + Awaitable[~.SavedQuery]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_saved_query' not in self._stubs: + self._stubs['update_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', + request_serializer=asset_service.UpdateSavedQueryRequest.serialize, + response_deserializer=asset_service.SavedQuery.deserialize, + ) + return self._stubs['update_saved_query'] + + @property + def delete_saved_query(self) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete saved query method over gRPC. + + Deletes a saved query. + + Returns: + Callable[[~.DeleteSavedQueryRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_saved_query' not in self._stubs: + self._stubs['delete_saved_query'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', + request_serializer=asset_service.DeleteSavedQueryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_saved_query'] + + @property + def batch_get_effective_iam_policies(self) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + Awaitable[asset_service.BatchGetEffectiveIamPoliciesResponse]]: + r"""Return a callable for the batch get effective iam + policies method over gRPC. + + Gets effective IAM policies for a batch of resources. + + Returns: + Callable[[~.BatchGetEffectiveIamPoliciesRequest], + Awaitable[~.BatchGetEffectiveIamPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'batch_get_effective_iam_policies' not in self._stubs: + self._stubs['batch_get_effective_iam_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', + request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, + response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, + ) + return self._stubs['batch_get_effective_iam_policies'] + + @property + def analyze_org_policies(self) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + Awaitable[asset_service.AnalyzeOrgPoliciesResponse]]: + r"""Return a callable for the analyze org policies method over gRPC. + + Analyzes organization policies under a scope. + + Returns: + Callable[[~.AnalyzeOrgPoliciesRequest], + Awaitable[~.AnalyzeOrgPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policies' not in self._stubs: + self._stubs['analyze_org_policies'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', + request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, + ) + return self._stubs['analyze_org_policies'] + + @property + def analyze_org_policy_governed_containers(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + Awaitable[asset_service.AnalyzeOrgPolicyGovernedContainersResponse]]: + r"""Return a callable for the analyze org policy governed + containers method over gRPC. + + Analyzes organization policies governed containers + (projects, folders or organization) under a scope. + + Returns: + Callable[[~.AnalyzeOrgPolicyGovernedContainersRequest], + Awaitable[~.AnalyzeOrgPolicyGovernedContainersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policy_governed_containers' not in self._stubs: + self._stubs['analyze_org_policy_governed_containers'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', + request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, + ) + return self._stubs['analyze_org_policy_governed_containers'] + + @property + def analyze_org_policy_governed_assets(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + Awaitable[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]]: + r"""Return a callable for the analyze org policy governed + assets method over gRPC. + + Analyzes organization policies governed assets (Google Cloud + resources or policies) under a scope. This RPC supports custom + constraints and the following 10 canned constraints: + + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization + + This RPC only returns either resources of types supported by + `searchable asset + types `__, + or IAM policies. + + Returns: + Callable[[~.AnalyzeOrgPolicyGovernedAssetsRequest], + Awaitable[~.AnalyzeOrgPolicyGovernedAssetsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'analyze_org_policy_governed_assets' not in self._stubs: + self._stubs['analyze_org_policy_governed_assets'] = self.grpc_channel.unary_unary( + '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', + request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, + response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, + ) + return self._stubs['analyze_org_policy_governed_assets'] + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 6d2777d96d02..d68de1ab528a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -85,6 +85,38 @@ def post_analyze_iam_policy_longrunning(self, response): logging.log(f"Received response: {response}") return response + def pre_analyze_move(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_move(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_org_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_org_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_org_policy_governed_assets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_org_policy_governed_assets(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_analyze_org_policy_governed_containers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_analyze_org_policy_governed_containers(self, response): + logging.log(f"Received response: {response}") + return response + def pre_batch_get_assets_history(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -93,6 +125,14 @@ def post_batch_get_assets_history(self, response): logging.log(f"Received response: {response}") return response + def pre_batch_get_effective_iam_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_batch_get_effective_iam_policies(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_feed(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -101,10 +141,22 @@ def post_create_feed(self, response): logging.log(f"Received response: {response}") return response + def pre_create_saved_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_saved_query(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_feed(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_delete_saved_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + def pre_export_assets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -121,6 +173,14 @@ def post_get_feed(self, response): logging.log(f"Received response: {response}") return response + def pre_get_saved_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_saved_query(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_assets(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -137,6 +197,22 @@ def post_list_feeds(self, response): logging.log(f"Received response: {response}") return response + def pre_list_saved_queries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_saved_queries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_query_assets(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_query_assets(self, response): + logging.log(f"Received response: {response}") + return response + def pre_search_all_iam_policies(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -161,6 +237,14 @@ def post_update_feed(self, response): logging.log(f"Received response: {response}") return response + def pre_update_saved_query(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_saved_query(self, response): + logging.log(f"Received response: {response}") + return response + transport = AssetServiceRestTransport(interceptor=MyCustomAssetServiceInterceptor()) client = AssetServiceClient(transport=transport) @@ -193,6 +277,70 @@ def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPo def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for analyze_iam_policy_longrunning + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_move + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asset_service.AnalyzeMoveResponse: + """Post-rpc interceptor for analyze_move + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_org_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesResponse) -> asset_service.AnalyzeOrgPoliciesResponse: + """Post-rpc interceptor for analyze_org_policies + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_org_policy_governed_assets + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_org_policy_governed_assets(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + """Post-rpc interceptor for analyze_org_policy_governed_assets + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for analyze_org_policy_governed_containers + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_analyze_org_policy_governed_containers(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + """Post-rpc interceptor for analyze_org_policy_governed_containers + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -209,6 +357,22 @@ def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHist def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: """Post-rpc interceptor for batch_get_assets_history + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for batch_get_effective_iam_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + """Post-rpc interceptor for batch_get_effective_iam_policies + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -225,6 +389,22 @@ def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Se def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for create_feed + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_saved_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + """Post-rpc interceptor for create_saved_query + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -238,6 +418,14 @@ def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Se """ return request, metadata + def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_saved_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for export_assets @@ -265,6 +453,22 @@ def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for get_feed + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_saved_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + """Post-rpc interceptor for get_saved_query + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -297,6 +501,38 @@ def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_saved_queries + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_list_saved_queries(self, response: asset_service.ListSavedQueriesResponse) -> asset_service.ListSavedQueriesResponse: + """Post-rpc interceptor for list_saved_queries + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for query_assets + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asset_service.QueryAssetsResponse: + """Post-rpc interceptor for query_assets + Override in a subclass to manipulate the response after it is returned by the AssetService server but before it is returned to user code. @@ -350,6 +586,22 @@ def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: it is returned to user code. """ return response + def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_saved_query + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: + """Post-rpc interceptor for update_saved_query + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response @dataclasses.dataclass @@ -644,28 +896,29 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) return resp - class _BatchGetAssetsHistory(AssetServiceRestStub): + class _AnalyzeMove(AssetServiceRestStub): def __hash__(self): - return hash("BatchGetAssetsHistory") + return hash("AnalyzeMove") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + "destinationParent" : "", } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.BatchGetAssetsHistoryRequest, *, + request: asset_service.AnalyzeMoveRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.BatchGetAssetsHistoryResponse: - r"""Call the batch get assets history method over HTTP. + ) -> asset_service.AnalyzeMoveResponse: + r"""Call the analyze move method over HTTP. Args: - request (~.asset_service.BatchGetAssetsHistoryRequest): - The request object. Batch get assets history request. + request (~.asset_service.AnalyzeMoveRequest): + The request object. The request message for performing + resource move analysis. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -673,17 +926,19 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.BatchGetAssetsHistoryResponse: - Batch get assets history response. + ~.asset_service.AnalyzeMoveResponse: + The response message for resource + move analysis. + """ http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', + 'uri': '/v1/{resource=*/*}:analyzeMove', }, ] - request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) - pb_request = asset_service.BatchGetAssetsHistoryRequest.pb(request) + request, metadata = self._interceptor.pre_analyze_move(request, metadata) + pb_request = asset_service.AnalyzeMoveRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request['uri'] @@ -713,35 +968,36 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.BatchGetAssetsHistoryResponse() - pb_resp = asset_service.BatchGetAssetsHistoryResponse.pb(resp) + resp = asset_service.AnalyzeMoveResponse() + pb_resp = asset_service.AnalyzeMoveResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_batch_get_assets_history(resp) + resp = self._interceptor.post_analyze_move(resp) return resp - class _CreateFeed(AssetServiceRestStub): + class _AnalyzeOrgPolicies(AssetServiceRestStub): def __hash__(self): - return hash("CreateFeed") + return hash("AnalyzeOrgPolicies") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + "constraint" : "", } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.CreateFeedRequest, *, + request: asset_service.AnalyzeOrgPoliciesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.Feed: - r"""Call the create feed method over HTTP. + ) -> asset_service.AnalyzeOrgPoliciesResponse: + r"""Call the analyze org policies method over HTTP. Args: - request (~.asset_service.CreateFeedRequest): - The request object. Create asset feed request. + request (~.asset_service.AnalyzeOrgPoliciesRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -749,35 +1005,21 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. + ~.asset_service.AnalyzeOrgPoliciesResponse: + The response message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. """ http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/feeds', - 'body': '*', + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicies', }, ] - request, metadata = self._interceptor.pre_create_feed(request, metadata) - pb_request = asset_service.CreateFeedRequest.pb(request) + request, metadata = self._interceptor.pre_analyze_org_policies(request, metadata) + pb_request = asset_service.AnalyzeOrgPoliciesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -797,7 +1039,6 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -806,45 +1047,541 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) + resp = asset_service.AnalyzeOrgPoliciesResponse() + pb_resp = asset_service.AnalyzeOrgPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_feed(resp) + resp = self._interceptor.post_analyze_org_policies(resp) return resp - class _DeleteFeed(AssetServiceRestStub): + class _AnalyzeOrgPolicyGovernedAssets(AssetServiceRestStub): def __hash__(self): - return hash("DeleteFeed") + return hash("AnalyzeOrgPolicyGovernedAssets") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + "constraint" : "", } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.DeleteFeedRequest, *, + request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete feed method over HTTP. + ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + r"""Call the analyze org policy + governed assets method over HTTP. Args: - request (~.asset_service.DeleteFeedRequest): - The request object. + request (~.asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + """ http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/feeds/*}', + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets', + }, + ] + request, metadata = self._interceptor.pre_analyze_org_policy_governed_assets(request, metadata) + pb_request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + pb_resp = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) + return resp + + class _AnalyzeOrgPolicyGovernedContainers(AssetServiceRestStub): + def __hash__(self): + return hash("AnalyzeOrgPolicyGovernedContainers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + r"""Call the analyze org policy + governed containers method over HTTP. + + Args: + request (~.asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + The request object. A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: + The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers', + }, + ] + request, metadata = self._interceptor.pre_analyze_org_policy_governed_containers(request, metadata) + pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + pb_resp = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) + return resp + + class _BatchGetAssetsHistory(AssetServiceRestStub): + def __hash__(self): + return hash("BatchGetAssetsHistory") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.BatchGetAssetsHistoryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.BatchGetAssetsHistoryResponse: + r"""Call the batch get assets history method over HTTP. + + Args: + request (~.asset_service.BatchGetAssetsHistoryRequest): + The request object. Batch get assets history request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.BatchGetAssetsHistoryResponse: + Batch get assets history response. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', + }, + ] + request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) + pb_request = asset_service.BatchGetAssetsHistoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.BatchGetAssetsHistoryResponse() + pb_resp = asset_service.BatchGetAssetsHistoryResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_assets_history(resp) + return resp + + class _BatchGetEffectiveIamPolicies(AssetServiceRestStub): + def __hash__(self): + return hash("BatchGetEffectiveIamPolicies") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "names" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: + r"""Call the batch get effective iam + policies method over HTTP. + + Args: + request (~.asset_service.BatchGetEffectiveIamPoliciesRequest): + The request object. A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.BatchGetEffectiveIamPoliciesResponse: + A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}/effectiveIamPolicies:batchGet', + }, + ] + request, metadata = self._interceptor.pre_batch_get_effective_iam_policies(request, metadata) + pb_request = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.BatchGetEffectiveIamPoliciesResponse() + pb_resp = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_effective_iam_policies(resp) + return resp + + class _CreateFeed(AssetServiceRestStub): + def __hash__(self): + return hash("CreateFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.CreateFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.Feed: + r"""Call the create feed method over HTTP. + + Args: + request (~.asset_service.CreateFeedRequest): + The request object. Create asset feed request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + + Pub/Sub topics. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}/feeds', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_create_feed(request, metadata) + pb_request = asset_service.CreateFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.Feed() + pb_resp = asset_service.Feed.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_feed(resp) + return resp + + class _CreateSavedQuery(AssetServiceRestStub): + def __hash__(self): + return hash("CreateSavedQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "savedQueryId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.CreateSavedQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.SavedQuery: + r"""Call the create saved query method over HTTP. + + Args: + request (~.asset_service.CreateSavedQueryRequest): + The request object. Request to create a saved query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}/savedQueries', + 'body': 'saved_query', + }, + ] + request, metadata = self._interceptor.pre_create_saved_query(request, metadata) + pb_request = asset_service.CreateSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.SavedQuery() + pb_resp = asset_service.SavedQuery.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_saved_query(resp) + return resp + + class _DeleteFeed(AssetServiceRestStub): + def __hash__(self): + return hash("DeleteFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.DeleteFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete feed method over HTTP. + + Args: + request (~.asset_service.DeleteFeedRequest): + The request object. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=*/*/feeds/*}', }, ] request, metadata = self._interceptor.pre_delete_feed(request, metadata) @@ -877,9 +1614,321 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _ExportAssets(AssetServiceRestStub): + class _DeleteSavedQuery(AssetServiceRestStub): + def __hash__(self): + return hash("DeleteSavedQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.DeleteSavedQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ): + r"""Call the delete saved query method over HTTP. + + Args: + request (~.asset_service.DeleteSavedQueryRequest): + The request object. Request to delete a saved query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=*/*/savedQueries/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_saved_query(request, metadata) + pb_request = asset_service.DeleteSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ExportAssets(AssetServiceRestStub): + def __hash__(self): + return hash("ExportAssets") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.ExportAssetsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the export assets method over HTTP. + + Args: + request (~.asset_service.ExportAssetsRequest): + The request object. Export asset request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}:exportAssets', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_export_assets(request, metadata) + pb_request = asset_service.ExportAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_assets(resp) + return resp + + class _GetFeed(AssetServiceRestStub): + def __hash__(self): + return hash("GetFeed") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.GetFeedRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.Feed: + r"""Call the get feed method over HTTP. + + Args: + request (~.asset_service.GetFeedRequest): + The request object. Get asset feed request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.Feed: + An asset feed used to export asset + updates to a destinations. An asset feed + filter controls what updates are + exported. The asset feed must be created + within a project, organization, or + folder. Supported destinations are: + + Pub/Sub topics. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/feeds/*}', + }, + ] + request, metadata = self._interceptor.pre_get_feed(request, metadata) + pb_request = asset_service.GetFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.Feed() + pb_resp = asset_service.Feed.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_feed(resp) + return resp + + class _GetSavedQuery(AssetServiceRestStub): + def __hash__(self): + return hash("GetSavedQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.GetSavedQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.SavedQuery: + r"""Call the get saved query method over HTTP. + + Args: + request (~.asset_service.GetSavedQueryRequest): + The request object. Request to get a saved query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/savedQueries/*}', + }, + ] + request, metadata = self._interceptor.pre_get_saved_query(request, metadata) + pb_request = asset_service.GetSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.SavedQuery() + pb_resp = asset_service.SavedQuery.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_saved_query(resp) + return resp + + class _ListAssets(AssetServiceRestStub): def __hash__(self): - return hash("ExportAssets") + return hash("ListAssets") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -889,16 +1938,16 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.ExportAssetsRequest, *, + request: asset_service.ListAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the export assets method over HTTP. + ) -> asset_service.ListAssetsResponse: + r"""Call the list assets method over HTTP. Args: - request (~.asset_service.ExportAssetsRequest): - The request object. Export asset request. + request (~.asset_service.ListAssetsRequest): + The request object. ListAssets request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -906,30 +1955,19 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - + ~.asset_service.ListAssetsResponse: + ListAssets response. """ http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:exportAssets', - 'body': '*', + 'method': 'get', + 'uri': '/v1/{parent=*/*}/assets', }, ] - request, metadata = self._interceptor.pre_export_assets(request, metadata) - pb_request = asset_service.ExportAssetsRequest.pb(request) + request, metadata = self._interceptor.pre_list_assets(request, metadata) + pb_request = asset_service.ListAssetsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -949,7 +1987,6 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -958,14 +1995,16 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_assets(resp) + resp = asset_service.ListAssetsResponse() + pb_resp = asset_service.ListAssetsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_assets(resp) return resp - class _GetFeed(AssetServiceRestStub): + class _ListFeeds(AssetServiceRestStub): def __hash__(self): - return hash("GetFeed") + return hash("ListFeeds") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -975,16 +2014,16 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.GetFeedRequest, *, + request: asset_service.ListFeedsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.Feed: - r"""Call the get feed method over HTTP. + ) -> asset_service.ListFeedsResponse: + r"""Call the list feeds method over HTTP. Args: - request (~.asset_service.GetFeedRequest): - The request object. Get asset feed request. + request (~.asset_service.ListFeedsRequest): + The request object. List asset feeds request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -992,25 +2031,17 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.Feed: - An asset feed used to export asset - updates to a destinations. An asset feed - filter controls what updates are - exported. The asset feed must be created - within a project, organization, or - folder. Supported destinations are: - - Pub/Sub topics. + ~.asset_service.ListFeedsResponse: """ http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{name=*/*/feeds/*}', + 'uri': '/v1/{parent=*/*}/feeds', }, ] - request, metadata = self._interceptor.pre_get_feed(request, metadata) - pb_request = asset_service.GetFeedRequest.pb(request) + request, metadata = self._interceptor.pre_list_feeds(request, metadata) + pb_request = asset_service.ListFeedsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request['uri'] @@ -1040,16 +2071,16 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.Feed() - pb_resp = asset_service.Feed.pb(resp) + resp = asset_service.ListFeedsResponse() + pb_resp = asset_service.ListFeedsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_feed(resp) + resp = self._interceptor.post_list_feeds(resp) return resp - class _ListAssets(AssetServiceRestStub): + class _ListSavedQueries(AssetServiceRestStub): def __hash__(self): - return hash("ListAssets") + return hash("ListSavedQueries") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -1059,16 +2090,16 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.ListAssetsRequest, *, + request: asset_service.ListSavedQueriesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.ListAssetsResponse: - r"""Call the list assets method over HTTP. + ) -> asset_service.ListSavedQueriesResponse: + r"""Call the list saved queries method over HTTP. Args: - request (~.asset_service.ListAssetsRequest): - The request object. ListAssets request. + request (~.asset_service.ListSavedQueriesRequest): + The request object. Request to list saved queries. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1076,17 +2107,17 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.ListAssetsResponse: - ListAssets response. + ~.asset_service.ListSavedQueriesResponse: + Response of listing saved queries. """ http_options: List[Dict[str, str]] = [{ 'method': 'get', - 'uri': '/v1/{parent=*/*}/assets', + 'uri': '/v1/{parent=*/*}/savedQueries', }, ] - request, metadata = self._interceptor.pre_list_assets(request, metadata) - pb_request = asset_service.ListAssetsRequest.pb(request) + request, metadata = self._interceptor.pre_list_saved_queries(request, metadata) + pb_request = asset_service.ListSavedQueriesRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request['uri'] @@ -1116,16 +2147,16 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.ListAssetsResponse() - pb_resp = asset_service.ListAssetsResponse.pb(resp) + resp = asset_service.ListSavedQueriesResponse() + pb_resp = asset_service.ListSavedQueriesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_assets(resp) + resp = self._interceptor.post_list_saved_queries(resp) return resp - class _ListFeeds(AssetServiceRestStub): + class _QueryAssets(AssetServiceRestStub): def __hash__(self): - return hash("ListFeeds") + return hash("QueryAssets") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -1135,16 +2166,16 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: asset_service.ListFeedsRequest, *, + request: asset_service.QueryAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> asset_service.ListFeedsResponse: - r"""Call the list feeds method over HTTP. + ) -> asset_service.QueryAssetsResponse: + r"""Call the query assets method over HTTP. Args: - request (~.asset_service.ListFeedsRequest): - The request object. List asset feeds request. + request (~.asset_service.QueryAssetsRequest): + The request object. QueryAssets request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1152,19 +2183,27 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.asset_service.ListFeedsResponse: - + ~.asset_service.QueryAssetsResponse: + QueryAssets response. """ http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/feeds', + 'method': 'post', + 'uri': '/v1/{parent=*/*}:queryAssets', + 'body': '*', }, ] - request, metadata = self._interceptor.pre_list_feeds(request, metadata) - pb_request = asset_service.ListFeedsRequest.pb(request) + request, metadata = self._interceptor.pre_query_assets(request, metadata) + pb_request = asset_service.QueryAssetsRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -1184,6 +2223,7 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1192,11 +2232,11 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = asset_service.ListFeedsResponse() - pb_resp = asset_service.ListFeedsResponse.pb(resp) + resp = asset_service.QueryAssetsResponse() + pb_resp = asset_service.QueryAssetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_feeds(resp) + resp = self._interceptor.post_query_assets(resp) return resp class _SearchAllIamPolicies(AssetServiceRestStub): @@ -1444,6 +2484,93 @@ def __call__(self, resp = self._interceptor.post_update_feed(resp) return resp + class _UpdateSavedQuery(AssetServiceRestStub): + def __hash__(self): + return hash("UpdateSavedQuery") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: asset_service.UpdateSavedQueryRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> asset_service.SavedQuery: + r"""Call the update saved query method over HTTP. + + Args: + request (~.asset_service.UpdateSavedQueryRequest): + The request object. Request to update a saved query. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.asset_service.SavedQuery: + A saved query which can be shared + with others or used later. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{saved_query.name=*/*/savedQueries/*}', + 'body': 'saved_query', + }, + ] + request, metadata = self._interceptor.pre_update_saved_query(request, metadata) + pb_request = asset_service.UpdateSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = asset_service.SavedQuery() + pb_resp = asset_service.SavedQuery.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_saved_query(resp) + return resp + @property def analyze_iam_policy(self) -> Callable[ [asset_service.AnalyzeIamPolicyRequest], @@ -1460,6 +2587,38 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # In C++ this would require a dynamic_cast return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore + @property + def analyze_move(self) -> Callable[ + [asset_service.AnalyzeMoveRequest], + asset_service.AnalyzeMoveResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_org_policies(self) -> Callable[ + [asset_service.AnalyzeOrgPoliciesRequest], + asset_service.AnalyzeOrgPoliciesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_org_policy_governed_assets(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedAssetsRequest], + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore + + @property + def analyze_org_policy_governed_containers(self) -> Callable[ + [asset_service.AnalyzeOrgPolicyGovernedContainersRequest], + asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore + @property def batch_get_assets_history(self) -> Callable[ [asset_service.BatchGetAssetsHistoryRequest], @@ -1468,6 +2627,14 @@ def batch_get_assets_history(self) -> Callable[ # In C++ this would require a dynamic_cast return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore + @property + def batch_get_effective_iam_policies(self) -> Callable[ + [asset_service.BatchGetEffectiveIamPoliciesRequest], + asset_service.BatchGetEffectiveIamPoliciesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore + @property def create_feed(self) -> Callable[ [asset_service.CreateFeedRequest], @@ -1476,6 +2643,14 @@ def create_feed(self) -> Callable[ # In C++ this would require a dynamic_cast return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def create_saved_query(self) -> Callable[ + [asset_service.CreateSavedQueryRequest], + asset_service.SavedQuery]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + @property def delete_feed(self) -> Callable[ [asset_service.DeleteFeedRequest], @@ -1484,6 +2659,14 @@ def delete_feed(self) -> Callable[ # In C++ this would require a dynamic_cast return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_saved_query(self) -> Callable[ + [asset_service.DeleteSavedQueryRequest], + empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore + @property def export_assets(self) -> Callable[ [asset_service.ExportAssetsRequest], @@ -1500,6 +2683,14 @@ def get_feed(self) -> Callable[ # In C++ this would require a dynamic_cast return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def get_saved_query(self) -> Callable[ + [asset_service.GetSavedQueryRequest], + asset_service.SavedQuery]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore + @property def list_assets(self) -> Callable[ [asset_service.ListAssetsRequest], @@ -1516,6 +2707,22 @@ def list_feeds(self) -> Callable[ # In C++ this would require a dynamic_cast return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore + @property + def list_saved_queries(self) -> Callable[ + [asset_service.ListSavedQueriesRequest], + asset_service.ListSavedQueriesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore + + @property + def query_assets(self) -> Callable[ + [asset_service.QueryAssetsRequest], + asset_service.QueryAssetsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore + @property def search_all_iam_policies(self) -> Callable[ [asset_service.SearchAllIamPoliciesRequest], @@ -1540,6 +2747,14 @@ def update_feed(self) -> Callable[ # In C++ this would require a dynamic_cast return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + @property + def update_saved_query(self) -> Callable[ + [asset_service.UpdateSavedQueryRequest], + asset_service.SavedQuery]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index 893bf916368b..24f59fb0cc06 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -14,15 +14,30 @@ # limitations under the License. # from .asset_service import ( + AnalyzeIamPolicyLongrunningMetadata, AnalyzeIamPolicyLongrunningRequest, AnalyzeIamPolicyLongrunningResponse, AnalyzeIamPolicyRequest, AnalyzeIamPolicyResponse, + AnalyzeMoveRequest, + AnalyzeMoveResponse, + AnalyzeOrgPoliciesRequest, + AnalyzeOrgPoliciesResponse, + AnalyzeOrgPolicyGovernedAssetsRequest, + AnalyzeOrgPolicyGovernedAssetsResponse, + AnalyzeOrgPolicyGovernedContainersRequest, + AnalyzeOrgPolicyGovernedContainersResponse, + AnalyzerOrgPolicy, + AnalyzerOrgPolicyConstraint, BatchGetAssetsHistoryRequest, BatchGetAssetsHistoryResponse, + BatchGetEffectiveIamPoliciesRequest, + BatchGetEffectiveIamPoliciesResponse, BigQueryDestination, CreateFeedRequest, + CreateSavedQueryRequest, DeleteFeedRequest, + DeleteSavedQueryRequest, ExportAssetsRequest, ExportAssetsResponse, Feed, @@ -30,45 +45,81 @@ GcsDestination, GcsOutputResult, GetFeedRequest, + GetSavedQueryRequest, IamPolicyAnalysisOutputConfig, IamPolicyAnalysisQuery, ListAssetsRequest, ListAssetsResponse, ListFeedsRequest, ListFeedsResponse, + ListSavedQueriesRequest, + ListSavedQueriesResponse, + MoveAnalysis, + MoveAnalysisResult, + MoveImpact, OutputConfig, OutputResult, PartitionSpec, PubsubDestination, + QueryAssetsOutputConfig, + QueryAssetsRequest, + QueryAssetsResponse, + QueryResult, + SavedQuery, SearchAllIamPoliciesRequest, SearchAllIamPoliciesResponse, SearchAllResourcesRequest, SearchAllResourcesResponse, + TableFieldSchema, + TableSchema, UpdateFeedRequest, + UpdateSavedQueryRequest, ContentType, ) from .assets import ( Asset, + AttachedResource, ConditionEvaluation, IamPolicyAnalysisResult, IamPolicyAnalysisState, IamPolicySearchResult, + RelatedAsset, + RelatedAssets, + RelatedResource, + RelatedResources, + RelationshipAttributes, Resource, ResourceSearchResult, TemporalAsset, TimeWindow, + VersionedResource, ) __all__ = ( + 'AnalyzeIamPolicyLongrunningMetadata', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', 'AnalyzeIamPolicyRequest', 'AnalyzeIamPolicyResponse', + 'AnalyzeMoveRequest', + 'AnalyzeMoveResponse', + 'AnalyzeOrgPoliciesRequest', + 'AnalyzeOrgPoliciesResponse', + 'AnalyzeOrgPolicyGovernedAssetsRequest', + 'AnalyzeOrgPolicyGovernedAssetsResponse', + 'AnalyzeOrgPolicyGovernedContainersRequest', + 'AnalyzeOrgPolicyGovernedContainersResponse', + 'AnalyzerOrgPolicy', + 'AnalyzerOrgPolicyConstraint', 'BatchGetAssetsHistoryRequest', 'BatchGetAssetsHistoryResponse', + 'BatchGetEffectiveIamPoliciesRequest', + 'BatchGetEffectiveIamPoliciesResponse', 'BigQueryDestination', 'CreateFeedRequest', + 'CreateSavedQueryRequest', 'DeleteFeedRequest', + 'DeleteSavedQueryRequest', 'ExportAssetsRequest', 'ExportAssetsResponse', 'Feed', @@ -76,29 +127,50 @@ 'GcsDestination', 'GcsOutputResult', 'GetFeedRequest', + 'GetSavedQueryRequest', 'IamPolicyAnalysisOutputConfig', 'IamPolicyAnalysisQuery', 'ListAssetsRequest', 'ListAssetsResponse', 'ListFeedsRequest', 'ListFeedsResponse', + 'ListSavedQueriesRequest', + 'ListSavedQueriesResponse', + 'MoveAnalysis', + 'MoveAnalysisResult', + 'MoveImpact', 'OutputConfig', 'OutputResult', 'PartitionSpec', 'PubsubDestination', + 'QueryAssetsOutputConfig', + 'QueryAssetsRequest', + 'QueryAssetsResponse', + 'QueryResult', + 'SavedQuery', 'SearchAllIamPoliciesRequest', 'SearchAllIamPoliciesResponse', 'SearchAllResourcesRequest', 'SearchAllResourcesResponse', + 'TableFieldSchema', + 'TableSchema', 'UpdateFeedRequest', + 'UpdateSavedQueryRequest', 'ContentType', 'Asset', + 'AttachedResource', 'ConditionEvaluation', 'IamPolicyAnalysisResult', 'IamPolicyAnalysisState', 'IamPolicySearchResult', + 'RelatedAsset', + 'RelatedAssets', + 'RelatedResource', + 'RelatedResources', + 'RelationshipAttributes', 'Resource', 'ResourceSearchResult', 'TemporalAsset', 'TimeWindow', + 'VersionedResource', ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index af29c724b9a7..00ef45dfcea0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -20,9 +20,12 @@ import proto # type: ignore from google.cloud.asset_v1.types import assets as gca_assets +from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore @@ -30,6 +33,7 @@ package='google.cloud.asset.v1', manifest={ 'ContentType', + 'AnalyzeIamPolicyLongrunningMetadata', 'ExportAssetsRequest', 'ExportAssetsResponse', 'ListAssetsRequest', @@ -61,6 +65,34 @@ 'IamPolicyAnalysisOutputConfig', 'AnalyzeIamPolicyLongrunningRequest', 'AnalyzeIamPolicyLongrunningResponse', + 'SavedQuery', + 'CreateSavedQueryRequest', + 'GetSavedQueryRequest', + 'ListSavedQueriesRequest', + 'ListSavedQueriesResponse', + 'UpdateSavedQueryRequest', + 'DeleteSavedQueryRequest', + 'AnalyzeMoveRequest', + 'AnalyzeMoveResponse', + 'MoveAnalysis', + 'MoveAnalysisResult', + 'MoveImpact', + 'QueryAssetsOutputConfig', + 'QueryAssetsRequest', + 'QueryAssetsResponse', + 'QueryResult', + 'TableSchema', + 'TableFieldSchema', + 'BatchGetEffectiveIamPoliciesRequest', + 'BatchGetEffectiveIamPoliciesResponse', + 'AnalyzerOrgPolicy', + 'AnalyzerOrgPolicyConstraint', + 'AnalyzeOrgPoliciesRequest', + 'AnalyzeOrgPoliciesResponse', + 'AnalyzeOrgPolicyGovernedContainersRequest', + 'AnalyzeOrgPolicyGovernedContainersResponse', + 'AnalyzeOrgPolicyGovernedAssetsRequest', + 'AnalyzeOrgPolicyGovernedAssetsResponse', }, ) @@ -76,13 +108,14 @@ class ContentType(proto.Enum): IAM_POLICY (2): The actual IAM policy set on a resource. ORG_POLICY (4): - The Cloud Organization Policy set on an - asset. + The organization policy set on an asset. ACCESS_POLICY (5): - The Cloud Access context manager Policy set - on an asset. + The Access Context Manager policy set on an + asset. OS_INVENTORY (6): The runtime OS Inventory information. + RELATIONSHIP (7): + The related resources. """ CONTENT_TYPE_UNSPECIFIED = 0 RESOURCE = 1 @@ -90,6 +123,24 @@ class ContentType(proto.Enum): ORG_POLICY = 4 ACCESS_POLICY = 5 OS_INVENTORY = 6 + RELATIONSHIP = 7 + + +class AnalyzeIamPolicyLongrunningMetadata(proto.Message): + r"""Represents the metadata of the longrunning operation for the + AnalyzeIamPolicyLongrunning RPC. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the operation was + created. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) class ExportAssetsRequest(proto.Message): @@ -141,6 +192,23 @@ class ExportAssetsRequest(proto.Message): output_config (google.cloud.asset_v1.types.OutputConfig): Required. Output configuration indicating where the results will be output to. + relationship_types (MutableSequence[str]): + A list of relationship types to export, for example: + ``INSTANCE_TO_INSTANCEGROUP``. This field should only be + specified if content_type=RELATIONSHIP. + + - If specified: it snapshots specified relationships. It + returns an error if any of the [relationship_types] + doesn't belong to the supported relationship types of the + [asset_types] or if any of the [asset_types] doesn't + belong to the source types of the [relationship_types]. + - Otherwise: it snapshots the supported relationships for + all [asset_types] or returns an error if any of the + [asset_types] has no relationship support. An unspecified + asset types field means all supported asset_types. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -166,6 +234,10 @@ class ExportAssetsRequest(proto.Message): number=5, message='OutputConfig', ) + relationship_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) class ExportAssetsResponse(proto.Message): @@ -183,11 +255,11 @@ class ExportAssetsResponse(proto.Message): results were output to. output_result (google.cloud.asset_v1.types.OutputResult): Output result indicating where the assets were exported to. - For example, a set of actual Google Cloud Storage object - uris where the assets are exported to. The uris can be - different from what [output_config] has specified, as the - service will split the output object into multiple ones once - it exceeds a single Google Cloud Storage object limit. + For example, a set of actual Cloud Storage object URIs where + the assets are exported to. The URIs can be different from + what [output_config] has specified, as the service will + split the output object into multiple ones once it exceeds a + single Cloud Storage object limit. """ read_time: timestamp_pb2.Timestamp = proto.Field( @@ -212,11 +284,13 @@ class ListAssetsRequest(proto.Message): Attributes: parent (str): - Required. Name of the organization or project the assets - belong to. Format: "organizations/[organization-number]" - (such as "organizations/123"), "projects/[project-id]" (such - as "projects/my-project-id"), or "projects/[project-number]" - (such as "projects/12345"). + Required. Name of the organization, folder, or project the + assets belong to. Format: + "organizations/[organization-number]" (such as + "organizations/123"), "projects/[project-id]" (such as + "projects/my-project-id"), "projects/[project-number]" (such + as "projects/12345"), or "folders/[folder-number]" (such as + "folders/12345"). read_time (google.protobuf.timestamp_pb2.Timestamp): Timestamp to take an asset snapshot. This can only be set to a timestamp between the current @@ -262,6 +336,23 @@ class ListAssetsRequest(proto.Message): ``ListAssetsRequest``. It is a continuation of a prior ``ListAssets`` call, and the API should return the next page of assets. + relationship_types (MutableSequence[str]): + A list of relationship types to output, for example: + ``INSTANCE_TO_INSTANCEGROUP``. This field should only be + specified if content_type=RELATIONSHIP. + + - If specified: it snapshots specified relationships. It + returns an error if any of the [relationship_types] + doesn't belong to the supported relationship types of the + [asset_types] or if any of the [asset_types] doesn't + belong to the source types of the [relationship_types]. + - Otherwise: it snapshots the supported relationships for + all [asset_types] or returns an error if any of the + [asset_types] has no relationship support. An unspecified + asset types field means all supported asset_types. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -290,6 +381,10 @@ class ListAssetsRequest(proto.Message): proto.STRING, number=6, ) + relationship_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) class ListAssetsResponse(proto.Message): @@ -357,6 +452,23 @@ class BatchGetAssetsHistoryRequest(proto.Message): not set, the snapshot of the assets at end_time will be returned. The returned results contain all temporal assets whose time window overlap with read_time_window. + relationship_types (MutableSequence[str]): + Optional. A list of relationship types to output, for + example: ``INSTANCE_TO_INSTANCEGROUP``. This field should + only be specified if content_type=RELATIONSHIP. + + - If specified: it outputs specified relationships' history + on the [asset_names]. It returns an error if any of the + [relationship_types] doesn't belong to the supported + relationship types of the [asset_names] or if any of the + [asset_names]'s types doesn't belong to the source types + of the [relationship_types]. + - Otherwise: it outputs the supported relationships' + history on the [asset_names] or returns an error if any + of the [asset_names]'s types has no relationship support. + See `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -377,6 +489,10 @@ class BatchGetAssetsHistoryRequest(proto.Message): number=4, message=gca_assets.TimeWindow, ) + relationship_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) class BatchGetAssetsHistoryResponse(proto.Message): @@ -405,7 +521,7 @@ class CreateFeedRequest(proto.Message): organization number (such as "organizations/123"), a folder number (such as "folders/123"), a project ID (such as - "projects/my-project-id")", or a project number + "projects/my-project-id"), or a project number (such as "projects/12345"). feed_id (str): Required. This is the client-assigned asset @@ -547,8 +663,8 @@ class OutputConfig(proto.Message): This field is a member of `oneof`_ ``destination``. bigquery_destination (google.cloud.asset_v1.types.BigQueryDestination): Destination on BigQuery. The output table - stores the fields in asset proto as columns in - BigQuery. + stores the fields in asset Protobuf as columns + in BigQuery. This field is a member of `oneof`_ ``destination``. """ @@ -592,7 +708,7 @@ class GcsOutputResult(proto.Message): Attributes: uris (MutableSequence[str]): - List of uris of the Cloud Storage objects. Example: + List of URIs of the Cloud Storage objects. Example: "gs://bucket_name/object_name". """ @@ -614,7 +730,7 @@ class GcsDestination(proto.Message): Attributes: uri (str): - The uri of the Cloud Storage object. It's the same uri that + The URI of the Cloud Storage object. It's the same URI that is used by gsutil. Example: "gs://bucket_name/object_name". See `Viewing and Editing Object Metadata `__ @@ -627,9 +743,9 @@ class GcsDestination(proto.Message): This field is a member of `oneof`_ ``object_uri``. uri_prefix (str): - The uri prefix of all generated Cloud Storage objects. + The URI prefix of all generated Cloud Storage objects. Example: "gs://bucket_name/object_name_prefix". Each object - uri is in format: "gs://bucket_name/object_name_prefix// and + URI is in format: "gs://bucket_name/object_name_prefix// and only contains assets for that type. starts from 0. Example: "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is the first shard of output objects containing all @@ -661,6 +777,12 @@ class BigQueryDestination(proto.Message): "projects/projectId/datasets/datasetId", to which the snapshot result should be exported. If this dataset does not exist, the export call returns an INVALID_ARGUMENT error. + Setting the ``contentType`` for ``exportAssets`` determines + the + `schema `__ + of the BigQuery table. Setting + ``separateTablesPerAssetType`` to ``TRUE`` also influences + the schema. table (str): Required. The BigQuery table to which the snapshot result should be written. If this table @@ -860,9 +982,9 @@ class Feed(proto.Message): asset_names or asset_types are exported to the feed. Example: ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. - See `Resource - Names `__ - for more info. + For a list of the full names for supported asset types, see + `Resource name + format `__. asset_types (MutableSequence[str]): A list of types of the assets to receive updates. You must specify either or both of asset_names and asset_types. Only @@ -870,9 +992,9 @@ class Feed(proto.Message): are exported to the feed. Example: ``"compute.googleapis.com/Disk"`` - See `this - topic `__ - for a list of all supported asset types. + For a list of all supported asset types, see `Supported + asset + types `__. content_type (google.cloud.asset_v1.types.ContentType): Asset content type. If not specified, no content but the asset name and type will be @@ -892,8 +1014,27 @@ class Feed(proto.Message): optional. See our `user - guide `__ + guide `__ for detailed instructions. + relationship_types (MutableSequence[str]): + A list of relationship types to output, for example: + ``INSTANCE_TO_INSTANCEGROUP``. This field should only be + specified if content_type=RELATIONSHIP. + + - If specified: it outputs specified relationship updates + on the [asset_names] or the [asset_types]. It returns an + error if any of the [relationship_types] doesn't belong + to the supported relationship types of the [asset_names] + or [asset_types], or any of the [asset_names] or the + [asset_types] doesn't belong to the source types of the + [relationship_types]. + - Otherwise: it outputs the supported relationships of the + types of [asset_names] and [asset_types] or returns an + error if any of the [asset_names] or the [asset_types] + has no replationship support. See `Introduction to Cloud + Asset + Inventory `__ + for all supported asset types and relationship types. """ name: str = proto.Field( @@ -923,6 +1064,10 @@ class Feed(proto.Message): number=6, message=expr_pb2.Expr, ) + relationship_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=7, + ) class SearchAllResourcesRequest(proto.Message): @@ -951,44 +1096,61 @@ class SearchAllResourcesRequest(proto.Message): Examples: - - ``name:Important`` to find Cloud resources whose name - contains "Important" as a word. - - ``name=Important`` to find the Cloud resource whose name - is exactly "Important". - - ``displayName:Impor*`` to find Cloud resources whose - display name contains "Impor" as a prefix of any word in - the field. - - ``location:us-west*`` to find Cloud resources whose - location contains both "us" and "west" as prefixes. - - ``labels:prod`` to find Cloud resources whose labels - contain "prod" as a key or value. - - ``labels.env:prod`` to find Cloud resources that have a - label "env" and its value is "prod". - - ``labels.env:*`` to find Cloud resources that have a - label "env". - - ``kmsKey:key`` to find Cloud resources encrypted with a - customer-managed encryption key whose name contains the - word "key". - - ``state:ACTIVE`` to find Cloud resources whose state - contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find {{gcp_name}} resources whose + - ``name:Important`` to find Google Cloud resources whose + name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource + whose name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of any + word in the field. + - ``location:us-west*`` to find Google Cloud resources + whose location contains both "us" and "west" as prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources that + have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that have + a label "env". + - ``kmsKey:key`` to find Google Cloud resources encrypted + with a customer-managed encryption key whose name + contains "key" as a word. This field is deprecated. + Please use the ``kmsKeys`` field to retrieve Cloud KMS + key information. + - ``kmsKeys:key`` to find Google Cloud resources encrypted + with customer-managed encryption keys whose name contains + the word "key". + - ``relationships:instance-group-1`` to find Google Cloud + resources that have relationships with "instance-group-1" + in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of type + "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have relationships + with "instance-group-1" in the Compute Engine instance + group resource name, for relationship type + "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources whose state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Cloud resources that - were created before "2021-01-01 00:00:00 UTC". 1609459200 - is the epoch timestamp of "2021-01-01 00:00:00 UTC" in - seconds. - - ``updateTime>1609459200`` to find Cloud resources that - were updated after "2021-01-01 00:00:00 UTC". 1609459200 - is the epoch timestamp of "2021-01-01 00:00:00 UTC" in - seconds. - - ``Important`` to find Cloud resources that contain + - ``createTime<1609459200`` to find Google Cloud resources + that were created before "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 00:00:00 + UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud resources + that were updated after "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 00:00:00 + UTC" in seconds. + - ``Important`` to find Google Cloud resources that contain "Important" as a word in any of the searchable fields. - - ``Impor*`` to find Cloud resources that contain "Impor" - as a prefix of any word in any of the searchable fields. - - ``Important location:(us-west1 OR global)`` to find Cloud - resources that contain "Important" as a word in any of - the searchable fields and are also located in the - "us-west1" region or the "global" location. + - ``Impor*`` to find Google Cloud resources that contain + "Impor" as a prefix of any word in any of the searchable + fields. + - ``Important location:(us-west1 OR global)`` to find + Google Cloud resources that contain "Important" as a word + in any of the searchable fields and are also located in + the "us-west1" region or the "global" location. asset_types (MutableSequence[str]): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset @@ -1034,15 +1196,52 @@ class SearchAllResourcesRequest(proto.Message): - displayName - description - location - - kmsKey - createTime - updateTime - state - parentFullResourceName - - parentAssetType All the other fields such as repeated - fields (e.g., ``networkTags``), map fields (e.g., - ``labels``) and struct fields (e.g., - ``additionalAttributes``) are not supported. + - parentAssetType + + All the other fields such as repeated fields (e.g., + ``networkTags``, ``kmsKeys``), map fields (e.g., ``labels``) + and struct fields (e.g., ``additionalAttributes``) are not + supported. + read_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. A comma-separated list of fields specifying which + fields to be returned in ResourceSearchResult. Only '*' or + combination of top level fields can be specified. Field + names of both snake_case and camelCase are supported. + Examples: ``"*"``, ``"name,location"``, + ``"name,versionedResources"``. + + The read_mask paths must be valid field paths listed but not + limited to (both snake_case and camelCase are supported): + + - name + - assetType + - project + - displayName + - description + - location + - tagKeys + - tagValues + - tagValueIds + - labels + - networkTags + - kmsKey (This field is deprecated. Please use the + ``kmsKeys`` field to retrieve Cloud KMS key information.) + - kmsKeys + - createTime + - updateTime + - state + - additionalAttributes + - versionedResources + + If read_mask is not specified, all fields except + versionedResources will be returned. If only '*' is + specified, all fields including versionedResources will be + returned. Any invalid field path will trigger + INVALID_ARGUMENT error. """ scope: str = proto.Field( @@ -1069,6 +1268,11 @@ class SearchAllResourcesRequest(proto.Message): proto.STRING, number=6, ) + read_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=8, + message=field_mask_pb2.FieldMask, + ) class SearchAllResourcesResponse(proto.Message): @@ -1124,12 +1328,12 @@ class SearchAllIamPoliciesRequest(proto.Message): query `__ for more information. If not specified or empty, it will search all the IAM policies within the specified ``scope``. - Note that the query string is compared against each Cloud - IAM policy binding, including its members, roles, and Cloud - IAM conditions. The returned Cloud IAM policies will only - contain the bindings that match your query. To learn more - about the IAM policy structure, see `IAM policy - doc `__. + Note that the query string is compared against each IAM + policy binding, including its principals, roles, and IAM + conditions. The returned IAM policies will only contain the + bindings that match your query. To learn more about the IAM + policy structure, see the `IAM policy + documentation `__. Examples: @@ -1165,7 +1369,7 @@ class SearchAllIamPoliciesRequest(proto.Message): - ``roles:roles/compute.admin`` to find IAM policy bindings that specify the Compute Admin role. - ``memberTypes:user`` to find IAM policy bindings that - contain the "user" member type. + contain the principal type "user". page_size (int): Optional. The page size for search result pagination. Page size is capped at 500 even if a larger value is given. If @@ -1245,7 +1449,7 @@ class SearchAllIamPoliciesResponse(proto.Message): Attributes: results (MutableSequence[google.cloud.asset_v1.types.IamPolicySearchResult]): - A list of IamPolicy that match the search + A list of IAM policies that match the search query. Related information such as the associated resource is returned along with the policy. @@ -1271,7 +1475,7 @@ def raw_page(self): class IamPolicyAnalysisQuery(proto.Message): - r"""## IAM policy analysis query message. + r"""IAM policy analysis query message. Attributes: scope (str): @@ -1329,8 +1533,8 @@ class IdentitySelector(proto.Message): Attributes: identity (str): - Required. The identity appear in the form of members in `IAM - policy + Required. The identity appear in the form of principals in + `IAM policy binding `__. The examples of supported forms are: @@ -1384,6 +1588,9 @@ class Options(proto.Message): is specified, the identity in the result will be determined by the selector, and this flag is not allowed to set. + If true, the default max expansion per group is 1000 for + AssetService.AnalyzeIamPolicy][]. + Default is false. expand_roles (bool): Optional. If true, the access section of result will expand @@ -1406,33 +1613,36 @@ class Options(proto.Message): For example, if the request analyzes for which resources user A has permission P, and the results include an IAM - policy with P on a GCP folder, the results will also include - resources in that folder with permission P. + policy with P on a Google Cloud folder, the results will + also include resources in that folder with permission P. If true and [IamPolicyAnalysisQuery.resource_selector][google.cloud.asset.v1.IamPolicyAnalysisQuery.resource_selector] is specified, the resource section of the result will expand the specified resource to include resources lower in the resource hierarchy. Only project or lower resources are - supported. Folder and organization resource cannot be used + supported. Folder and organization resources cannot be used together with this option. For example, if the request analyzes for which users have - permission P on a GCP project with this option enabled, the - results will include all users who have permission P on that - project or any lower resource. + permission P on a Google Cloud project with this option + enabled, the results will include all users who have + permission P on that project or any lower resource. + + If true, the default max expansion per resource is 1000 for + AssetService.AnalyzeIamPolicy][] and 100000 for + AssetService.AnalyzeIamPolicyLongrunning][]. Default is false. output_resource_edges (bool): - Optional. If true, the result will output - resource edges, starting from the policy - attached resource, to any expanded resources. - Default is false. + Optional. If true, the result will output the + relevant parent/child relationships between + resources. Default is false. output_group_edges (bool): - Optional. If true, the result will output - group identity edges, starting from the - binding's group members, to any expanded - identities. Default is false. + Optional. If true, the result will output the + relevant membership relationships between groups + and other groups, and between groups and + principals. Default is false. analyze_service_account_impersonation (bool): Optional. If true, the response will include access analysis from identities to resources via service account @@ -1440,26 +1650,38 @@ class Options(proto.Message): many derived queries will be executed. We highly recommend you use [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning] - rpc instead. + RPC instead. For example, if the request analyzes for which resources user A has permission P, and there's an IAM policy states user A has iam.serviceAccounts.getAccessToken permission to a service account SA, and there's another IAM policy states - service account SA has permission P to a GCP folder F, then - user A potentially has access to the GCP folder F. And those - advanced analysis results will be included in + service account SA has permission P to a Google Cloud folder + F, then user A potentially has access to the Google Cloud + folder F. And those advanced analysis results will be + included in [AnalyzeIamPolicyResponse.service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis]. Another example, if the request analyzes for who has - permission P to a GCP folder F, and there's an IAM policy - states user A has iam.serviceAccounts.actAs permission to a - service account SA, and there's another IAM policy states - service account SA has permission P to the GCP folder F, - then user A potentially has access to the GCP folder F. And - those advanced analysis results will be included in + permission P to a Google Cloud folder F, and there's an IAM + policy states user A has iam.serviceAccounts.actAs + permission to a service account SA, and there's another IAM + policy states service account SA has permission P to the + Google Cloud folder F, then user A potentially has access to + the Google Cloud folder F. And those advanced analysis + results will be included in [AnalyzeIamPolicyResponse.service_account_impersonation_analysis][google.cloud.asset.v1.AnalyzeIamPolicyResponse.service_account_impersonation_analysis]. + Only the following permissions are considered in this + analysis: + + - ``iam.serviceAccounts.actAs`` + - ``iam.serviceAccounts.signBlob`` + - ``iam.serviceAccounts.signJwt`` + - ``iam.serviceAccounts.getAccessToken`` + - ``iam.serviceAccounts.getOpenIdToken`` + - ``iam.serviceAccounts.implicitDelegation`` + Default is false. """ @@ -1548,6 +1770,25 @@ class AnalyzeIamPolicyRequest(proto.Message): Attributes: analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): Required. The request query. + saved_analysis_query (str): + Optional. The name of a saved query, which must be in the + format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + If both ``analysis_query`` and ``saved_analysis_query`` are + provided, they will be merged together with the + ``saved_analysis_query`` as base and the ``analysis_query`` + as overrides. For more details of the merge behavior, please + refer to the + `MergeFrom `__ + page. + + Note that you cannot override primitive fields with default + value, such as 0 or empty string, etc., because we use + proto3, which doesn't support field presence yet. execution_timeout (google.protobuf.duration_pb2.Duration): Optional. Amount of time executable has to complete. See JSON representation of @@ -1568,6 +1809,10 @@ class AnalyzeIamPolicyRequest(proto.Message): number=1, message='IamPolicyAnalysisQuery', ) + saved_analysis_query: str = proto.Field( + proto.STRING, + number=3, + ) execution_timeout: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=2, @@ -1678,8 +1923,8 @@ class GcsDestination(proto.Message): Attributes: uri (str): - Required. The uri of the Cloud Storage object. It's the same - uri that is used by gsutil. Example: + Required. The URI of the Cloud Storage object. It's the same + URI that is used by gsutil. Example: "gs://bucket_name/object_name". See `Viewing and Editing Object Metadata `__ @@ -1799,6 +2044,25 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): Attributes: analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): Required. The request query. + saved_analysis_query (str): + Optional. The name of a saved query, which must be in the + format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + + If both ``analysis_query`` and ``saved_analysis_query`` are + provided, they will be merged together with the + ``saved_analysis_query`` as base and the ``analysis_query`` + as overrides. For more details of the merge behavior, please + refer to the + `MergeFrom `__ + doc. + + Note that you cannot override primitive fields with default + value, such as 0 or empty string, etc., because we use + proto3, which doesn't support field presence yet. output_config (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig): Required. Output configuration indicating where the results will be output to. @@ -1809,6 +2073,10 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): number=1, message='IamPolicyAnalysisQuery', ) + saved_analysis_query: str = proto.Field( + proto.STRING, + number=3, + ) output_config: 'IamPolicyAnalysisOutputConfig' = proto.Field( proto.MESSAGE, number=2, @@ -1823,4 +2091,1921 @@ class AnalyzeIamPolicyLongrunningResponse(proto.Message): """ +class SavedQuery(proto.Message): + r"""A saved query which can be shared with others or used later. + + Attributes: + name (str): + The resource name of the saved query. The format must be: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + description (str): + The description of this saved query. This + value should be fewer than 255 characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The create time of this saved + query. + creator (str): + Output only. The account's email address who + has created this saved query. + last_update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update time of this + saved query. + last_updater (str): + Output only. The account's email address who + has updated this saved query most recently. + labels (MutableMapping[str, str]): + Labels applied on the resource. + This value should not contain more than 10 + entries. The key and value of each entry must be + non-empty and fewer than 64 characters. + content (google.cloud.asset_v1.types.SavedQuery.QueryContent): + The query content. + """ + + class QueryContent(proto.Message): + r"""The query content. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + iam_policy_analysis_query (google.cloud.asset_v1.types.IamPolicyAnalysisQuery): + An IAM Policy Analysis query, which could be used in the + [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy] + RPC or the + [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning] + RPC. + + This field is a member of `oneof`_ ``query_content``. + """ + + iam_policy_analysis_query: 'IamPolicyAnalysisQuery' = proto.Field( + proto.MESSAGE, + number=1, + oneof='query_content', + message='IamPolicyAnalysisQuery', + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + creator: str = proto.Field( + proto.STRING, + number=4, + ) + last_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + last_updater: str = proto.Field( + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + content: QueryContent = proto.Field( + proto.MESSAGE, + number=8, + message=QueryContent, + ) + + +class CreateSavedQueryRequest(proto.Message): + r"""Request to create a saved query. + + Attributes: + parent (str): + Required. The name of the project/folder/organization where + this saved_query should be created in. It can only be an + organization number (such as "organizations/123"), a folder + number (such as "folders/123"), a project ID (such as + "projects/my-project-id"), or a project number (such as + "projects/12345"). + saved_query (google.cloud.asset_v1.types.SavedQuery): + Required. The saved_query details. The ``name`` field must + be empty as it will be generated based on the parent and + saved_query_id. + saved_query_id (str): + Required. The ID to use for the saved query, which must be + unique in the specified parent. It will become the final + component of the saved query's resource name. + + This value should be 4-63 characters, and valid characters + are ``[a-z][0-9]-``. + + Notice that this field is required in the saved query + creation, and the ``name`` field of the ``saved_query`` will + be ignored. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + saved_query: 'SavedQuery' = proto.Field( + proto.MESSAGE, + number=2, + message='SavedQuery', + ) + saved_query_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetSavedQueryRequest(proto.Message): + r"""Request to get a saved query. + + Attributes: + name (str): + Required. The name of the saved query and it must be in the + format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSavedQueriesRequest(proto.Message): + r"""Request to list saved queries. + + Attributes: + parent (str): + Required. The parent + project/folder/organization whose savedQueries + are to be listed. It can only be using + project/folder/organization number (such as + "folders/12345")", or a project ID (such as + "projects/my-project-id"). + filter (str): + Optional. The expression to filter resources. The expression + is a list of zero or more restrictions combined via logical + operators ``AND`` and ``OR``. When ``AND`` and ``OR`` are + both used in the expression, parentheses must be + appropriately used to group the combinations. The expression + may also contain regular expressions. + + See https://google.aip.dev/160 for more information on the + grammar. + page_size (int): + Optional. The maximum number of saved queries + to return per page. The service may return fewer + than this value. If unspecified, at most 50 will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListSavedQueries`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListSavedQueries`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSavedQueriesResponse(proto.Message): + r"""Response of listing saved queries. + + Attributes: + saved_queries (MutableSequence[google.cloud.asset_v1.types.SavedQuery]): + A list of savedQueries. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + saved_queries: MutableSequence['SavedQuery'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='SavedQuery', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class UpdateSavedQueryRequest(proto.Message): + r"""Request to update a saved query. + + Attributes: + saved_query (google.cloud.asset_v1.types.SavedQuery): + Required. The saved query to update. + + The saved query's ``name`` field is used to identify the one + to update, which has format as below: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + saved_query: 'SavedQuery' = proto.Field( + proto.MESSAGE, + number=1, + message='SavedQuery', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteSavedQueryRequest(proto.Message): + r"""Request to delete a saved query. + + Attributes: + name (str): + Required. The name of the saved query to delete. It must be + in the format of: + + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class AnalyzeMoveRequest(proto.Message): + r"""The request message for performing resource move analysis. + + Attributes: + resource (str): + Required. Name of the resource to perform the + analysis against. Only Google Cloud projects are + supported as of today. Hence, this can only be a + project ID (such as "projects/my-project-id") or + a project number (such as "projects/12345"). + destination_parent (str): + Required. Name of the Google Cloud folder or + organization to reparent the target resource. + The analysis will be performed against + hypothetically moving the resource to this + specified desitination parent. This can only be + a folder number (such as "folders/123") or an + organization number (such as + "organizations/123"). + view (google.cloud.asset_v1.types.AnalyzeMoveRequest.AnalysisView): + Analysis view indicating what information + should be included in the analysis response. If + unspecified, the default view is FULL. + """ + class AnalysisView(proto.Enum): + r"""View enum for supporting partial analysis responses. + + Values: + ANALYSIS_VIEW_UNSPECIFIED (0): + The default/unset value. + The API will default to the FULL view. + FULL (1): + Full analysis including all level of impacts + of the specified resource move. + BASIC (2): + Basic analysis only including blockers which + will prevent the specified resource move at + runtime. + """ + ANALYSIS_VIEW_UNSPECIFIED = 0 + FULL = 1 + BASIC = 2 + + resource: str = proto.Field( + proto.STRING, + number=1, + ) + destination_parent: str = proto.Field( + proto.STRING, + number=2, + ) + view: AnalysisView = proto.Field( + proto.ENUM, + number=3, + enum=AnalysisView, + ) + + +class AnalyzeMoveResponse(proto.Message): + r"""The response message for resource move analysis. + + Attributes: + move_analysis (MutableSequence[google.cloud.asset_v1.types.MoveAnalysis]): + The list of analyses returned from performing + the intended resource move analysis. The + analysis is grouped by different Google Cloud + services. + """ + + move_analysis: MutableSequence['MoveAnalysis'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='MoveAnalysis', + ) + + +class MoveAnalysis(proto.Message): + r"""A message to group the analysis information. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + display_name (str): + The user friendly display name of the + analysis. E.g. IAM, organization policy etc. + analysis (google.cloud.asset_v1.types.MoveAnalysisResult): + Analysis result of moving the target + resource. + + This field is a member of `oneof`_ ``result``. + error (google.rpc.status_pb2.Status): + Description of error encountered when + performing the analysis. + + This field is a member of `oneof`_ ``result``. + """ + + display_name: str = proto.Field( + proto.STRING, + number=1, + ) + analysis: 'MoveAnalysisResult' = proto.Field( + proto.MESSAGE, + number=2, + oneof='result', + message='MoveAnalysisResult', + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + oneof='result', + message=status_pb2.Status, + ) + + +class MoveAnalysisResult(proto.Message): + r"""An analysis result including blockers and warnings. + + Attributes: + blockers (MutableSequence[google.cloud.asset_v1.types.MoveImpact]): + Blocking information that would prevent the + target resource from moving to the specified + destination at runtime. + warnings (MutableSequence[google.cloud.asset_v1.types.MoveImpact]): + Warning information indicating that moving + the target resource to the specified destination + might be unsafe. This can include important + policy information and configuration changes, + but will not block moves at runtime. + """ + + blockers: MutableSequence['MoveImpact'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='MoveImpact', + ) + warnings: MutableSequence['MoveImpact'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='MoveImpact', + ) + + +class MoveImpact(proto.Message): + r"""A message to group impacts of moving the target resource. + + Attributes: + detail (str): + User friendly impact detail in a free form + message. + """ + + detail: str = proto.Field( + proto.STRING, + number=1, + ) + + +class QueryAssetsOutputConfig(proto.Message): + r"""Output configuration query assets. + + Attributes: + bigquery_destination (google.cloud.asset_v1.types.QueryAssetsOutputConfig.BigQueryDestination): + BigQuery destination where the query results + will be saved. + """ + + class BigQueryDestination(proto.Message): + r"""BigQuery destination. + + Attributes: + dataset (str): + Required. The BigQuery dataset where the + query results will be saved. It has the format + of "projects/{projectId}/datasets/{datasetId}". + table (str): + Required. The BigQuery table where the query + results will be saved. If this table does not + exist, a new table with the given name will be + created. + write_disposition (str): + Specifies the action that occurs if the destination table or + partition already exists. The following values are + supported: + + - WRITE_TRUNCATE: If the table or partition already exists, + BigQuery overwrites the entire table or all the + partitions data. + - WRITE_APPEND: If the table or partition already exists, + BigQuery appends the data to the table or the latest + partition. + - WRITE_EMPTY: If the table already exists and contains + data, a 'duplicate' error is returned in the job result. + + The default value is WRITE_EMPTY. + """ + + dataset: str = proto.Field( + proto.STRING, + number=1, + ) + table: str = proto.Field( + proto.STRING, + number=2, + ) + write_disposition: str = proto.Field( + proto.STRING, + number=3, + ) + + bigquery_destination: BigQueryDestination = proto.Field( + proto.MESSAGE, + number=1, + message=BigQueryDestination, + ) + + +class QueryAssetsRequest(proto.Message): + r"""QueryAssets request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The relative name of the root asset. This can only + be an organization number (such as "organizations/123"), a + project ID (such as "projects/my-project-id"), or a project + number (such as "projects/12345"), or a folder number (such + as "folders/123"). + + Only assets belonging to the ``parent`` will be returned. + statement (str): + Optional. A SQL statement that's compatible with `BigQuery + Standard + SQL `__. + + This field is a member of `oneof`_ ``query``. + job_reference (str): + Optional. Reference to the query job, which is from the + ``QueryAssetsResponse`` of previous ``QueryAssets`` call. + + This field is a member of `oneof`_ ``query``. + page_size (int): + Optional. The maximum number of rows to return in the + results. Responses are limited to 10 MB and 1000 rows. + + By default, the maximum row count is 1000. When the byte or + row count limit is reached, the rest of the query results + will be paginated. + + The field will be ignored when [output_config] is specified. + page_token (str): + Optional. A page token received from previous + ``QueryAssets``. + + The field will be ignored when [output_config] is specified. + timeout (google.protobuf.duration_pb2.Duration): + Optional. Specifies the maximum amount of time that the + client is willing to wait for the query to complete. By + default, this limit is 5 min for the first query, and 1 + minute for the following queries. If the query is complete, + the ``done`` field in the ``QueryAssetsResponse`` is true, + otherwise false. + + Like BigQuery `jobs.query + API `__ + The call is not guaranteed to wait for the specified + timeout; it typically returns after around 200 seconds + (200,000 milliseconds), even if the query is not complete. + + The field will be ignored when [output_config] is specified. + read_time_window (google.cloud.asset_v1.types.TimeWindow): + Optional. [start_time] is required. [start_time] must be + less than [end_time] Defaults [end_time] to now if + [start_time] is set and [end_time] isn't. Maximum permitted + time range is 7 days. + + This field is a member of `oneof`_ ``time``. + read_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Queries cloud assets as they + appeared at the specified point in time. + + This field is a member of `oneof`_ ``time``. + output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): + Optional. Destination where the query results will be saved. + + When this field is specified, the query results won't be + saved in the [QueryAssetsResponse.query_result]. Instead + [QueryAssetsResponse.output_config] will be set. + + Meanwhile, [QueryAssetsResponse.job_reference] will be set + and can be used to check the status of the query job when + passed to a following [QueryAssets] API call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + statement: str = proto.Field( + proto.STRING, + number=2, + oneof='query', + ) + job_reference: str = proto.Field( + proto.STRING, + number=3, + oneof='query', + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + timeout: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + read_time_window: gca_assets.TimeWindow = proto.Field( + proto.MESSAGE, + number=7, + oneof='time', + message=gca_assets.TimeWindow, + ) + read_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=8, + oneof='time', + message=timestamp_pb2.Timestamp, + ) + output_config: 'QueryAssetsOutputConfig' = proto.Field( + proto.MESSAGE, + number=9, + message='QueryAssetsOutputConfig', + ) + + +class QueryAssetsResponse(proto.Message): + r"""QueryAssets response. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + job_reference (str): + Reference to a query job. + done (bool): + The query response, which can be either an ``error`` or a + valid ``response``. + + If ``done`` == ``false`` and the query result is being saved + in a output, the output_config field will be set. If + ``done`` == ``true``, exactly one of ``error``, + ``query_result`` or ``output_config`` will be set. + error (google.rpc.status_pb2.Status): + Error status. + + This field is a member of `oneof`_ ``response``. + query_result (google.cloud.asset_v1.types.QueryResult): + Result of the query. + + This field is a member of `oneof`_ ``response``. + output_config (google.cloud.asset_v1.types.QueryAssetsOutputConfig): + Output configuration which indicates instead + of being returned in API response on the fly, + the query result will be saved in a specific + output. + + This field is a member of `oneof`_ ``response``. + """ + + job_reference: str = proto.Field( + proto.STRING, + number=1, + ) + done: bool = proto.Field( + proto.BOOL, + number=2, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=3, + oneof='response', + message=status_pb2.Status, + ) + query_result: 'QueryResult' = proto.Field( + proto.MESSAGE, + number=4, + oneof='response', + message='QueryResult', + ) + output_config: 'QueryAssetsOutputConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='response', + message='QueryAssetsOutputConfig', + ) + + +class QueryResult(proto.Message): + r"""Execution results of the query. + + The result is formatted as rows represented by BigQuery compatible + [schema]. When pagination is necessary, it will contains the page + token to retrieve the results of following pages. + + Attributes: + rows (MutableSequence[google.protobuf.struct_pb2.Struct]): + Each row hold a query result in the format of ``Struct``. + schema (google.cloud.asset_v1.types.TableSchema): + Describes the format of the [rows]. + next_page_token (str): + Token to retrieve the next page of the + results. + total_rows (int): + Total rows of the whole query results. + """ + + @property + def raw_page(self): + return self + + rows: MutableSequence[struct_pb2.Struct] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + schema: 'TableSchema' = proto.Field( + proto.MESSAGE, + number=2, + message='TableSchema', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + total_rows: int = proto.Field( + proto.INT64, + number=4, + ) + + +class TableSchema(proto.Message): + r"""BigQuery Compatible table schema. + + Attributes: + fields (MutableSequence[google.cloud.asset_v1.types.TableFieldSchema]): + Describes the fields in a table. + """ + + fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='TableFieldSchema', + ) + + +class TableFieldSchema(proto.Message): + r"""A field in TableSchema. + + Attributes: + field (str): + The field name. The name must contain only letters (a-z, + A-Z), numbers (0-9), or underscores (_), and must start with + a letter or underscore. The maximum length is 128 + characters. + type_ (str): + The field data type. Possible values include + + - STRING + - BYTES + - INTEGER + - FLOAT + - BOOLEAN + - TIMESTAMP + - DATE + - TIME + - DATETIME + - GEOGRAPHY, + - NUMERIC, + - BIGNUMERIC, + - RECORD (where RECORD indicates that the field contains a + nested schema). + mode (str): + The field mode. Possible values include + NULLABLE, REQUIRED and REPEATED. The default + value is NULLABLE. + fields (MutableSequence[google.cloud.asset_v1.types.TableFieldSchema]): + Describes the nested schema fields if the + type property is set to RECORD. + """ + + field: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + mode: str = proto.Field( + proto.STRING, + number=3, + ) + fields: MutableSequence['TableFieldSchema'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='TableFieldSchema', + ) + + +class BatchGetEffectiveIamPoliciesRequest(proto.Message): + r"""A request message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + Attributes: + scope (str): + Required. Only IAM policies on or below the scope will be + returned. + + This can only be an organization number (such as + "organizations/123"), a folder number (such as + "folders/123"), a project ID (such as + "projects/my-project-id"), or a project number (such as + "projects/12345"). + + To know how to get organization id, visit + `here `__. + + To know how to get folder or project id, visit + `here `__. + names (MutableSequence[str]): + Required. The names refer to the [full_resource_names] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of `searchable asset + types `__. + A maximum of 20 resources' effective policies can be + retrieved in a batch. + """ + + scope: str = proto.Field( + proto.STRING, + number=1, + ) + names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class BatchGetEffectiveIamPoliciesResponse(proto.Message): + r"""A response message for + [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. + + Attributes: + policy_results (MutableSequence[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy]): + The effective policies for a batch of resources. Note that + the results order is the same as the order of + [BatchGetEffectiveIamPoliciesRequest.names][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.names]. + When a resource does not have any effective IAM policies, + its corresponding policy_result will contain empty + [EffectiveIamPolicy.policies][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.policies]. + """ + + class EffectiveIamPolicy(proto.Message): + r"""The effective IAM policies on one resource. + + Attributes: + full_resource_name (str): + The [full_resource_name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + for which the + [policies][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.policies] + are computed. This is one of the + [BatchGetEffectiveIamPoliciesRequest.names][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.names] + the caller provides in the request. + policies (MutableSequence[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo]): + The effective policies for the + [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name]. + + These policies include the policy set on the + [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name] + and those set on its parents and ancestors up to the + [BatchGetEffectiveIamPoliciesRequest.scope][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesRequest.scope]. + Note that these policies are not filtered according to the + resource type of the + [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name]. + + These policies are hierarchically ordered by + [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource] + starting from + [full_resource_name][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.full_resource_name] + itself to its parents and ancestors, such that policies[i]'s + [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource] + is the child of policies[i+1]'s + [PolicyInfo.attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource], + if policies[i+1] exists. + """ + + class PolicyInfo(proto.Message): + r"""The IAM policy and its attached resource. + + Attributes: + attached_resource (str): + The full resource name the + [policy][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.policy] + is directly attached to. + policy (google.iam.v1.policy_pb2.Policy): + The IAM policy that's directly attached to the + [attached_resource][google.cloud.asset.v1.BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo.attached_resource]. + """ + + attached_resource: str = proto.Field( + proto.STRING, + number=1, + ) + policy: policy_pb2.Policy = proto.Field( + proto.MESSAGE, + number=2, + message=policy_pb2.Policy, + ) + + full_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + policies: MutableSequence['BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='BatchGetEffectiveIamPoliciesResponse.EffectiveIamPolicy.PolicyInfo', + ) + + policy_results: MutableSequence[EffectiveIamPolicy] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=EffectiveIamPolicy, + ) + + +class AnalyzerOrgPolicy(proto.Message): + r"""This organization policy message is a modified version of the + one defined in the Organization Policy system. This message + contains several fields defined in the original organization + policy with some new fields for analysis purpose. + + Attributes: + attached_resource (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of an organization/folder/project resource where this + organization policy is set. + + Notice that some type of constraints are defined with + default policy. This field will be empty for them. + applied_resource (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of an organization/folder/project resource where this + organization policy applies to. + + For any user defined org policies, this field has the same + value as the [attached_resource] field. Only for default + policy, this field has the different value. + rules (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy.Rule]): + List of rules for this organization policy. + inherit_from_parent (bool): + If ``inherit_from_parent`` is true, Rules set higher up in + the hierarchy (up to the closest root) are inherited and + present in the effective policy. If it is false, then no + rules are inherited, and this policy becomes the effective + root for evaluation. + reset (bool): + Ignores policies set above this resource and restores the + default behavior of the constraint at this resource. This + field can be set in policies for either list or boolean + constraints. If set, ``rules`` must be empty and + ``inherit_from_parent`` must be set to false. + """ + + class Rule(proto.Message): + r"""Represents a rule defined in an organization policy + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + values (google.cloud.asset_v1.types.AnalyzerOrgPolicy.Rule.StringValues): + List of values to be used for this + PolicyRule. This field can be set only in + Policies for list constraints. + + This field is a member of `oneof`_ ``kind``. + allow_all (bool): + Setting this to true means that all values + are allowed. This field can be set only in + Policies for list constraints. + + This field is a member of `oneof`_ ``kind``. + deny_all (bool): + Setting this to true means that all values + are denied. This field can be set only in + Policies for list constraints. + + This field is a member of `oneof`_ ``kind``. + enforce (bool): + If ``true``, then the ``Policy`` is enforced. If ``false``, + then any configuration is acceptable. This field can be set + only in Policies for boolean constraints. + + This field is a member of `oneof`_ ``kind``. + condition (google.type.expr_pb2.Expr): + The evaluating condition for this rule. + """ + + class StringValues(proto.Message): + r"""The string values for the list constraints. + + Attributes: + allowed_values (MutableSequence[str]): + List of values allowed at this resource. + denied_values (MutableSequence[str]): + List of values denied at this resource. + """ + + allowed_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + denied_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + values: 'AnalyzerOrgPolicy.Rule.StringValues' = proto.Field( + proto.MESSAGE, + number=3, + oneof='kind', + message='AnalyzerOrgPolicy.Rule.StringValues', + ) + allow_all: bool = proto.Field( + proto.BOOL, + number=4, + oneof='kind', + ) + deny_all: bool = proto.Field( + proto.BOOL, + number=5, + oneof='kind', + ) + enforce: bool = proto.Field( + proto.BOOL, + number=6, + oneof='kind', + ) + condition: expr_pb2.Expr = proto.Field( + proto.MESSAGE, + number=7, + message=expr_pb2.Expr, + ) + + attached_resource: str = proto.Field( + proto.STRING, + number=1, + ) + applied_resource: str = proto.Field( + proto.STRING, + number=5, + ) + rules: MutableSequence[Rule] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Rule, + ) + inherit_from_parent: bool = proto.Field( + proto.BOOL, + number=3, + ) + reset: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class AnalyzerOrgPolicyConstraint(proto.Message): + r"""The organization policy constraint definition. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + google_defined_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint): + The definition of the canned constraint + defined by Google. + + This field is a member of `oneof`_ ``constraint_definition``. + custom_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint): + The definition of the custom constraint. + + This field is a member of `oneof`_ ``constraint_definition``. + """ + + class Constraint(proto.Message): + r"""The definition of a constraint. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The unique name of the constraint. Format of the name should + be + + - ``constraints/{constraint_name}`` + + For example, + ``constraints/compute.disableSerialPortAccess``. + display_name (str): + The human readable name of the constraint. + description (str): + Detailed description of what this ``Constraint`` controls as + well as how and where it is enforced. + constraint_default (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault): + The evaluation behavior of this constraint in + the absence of 'Policy'. + list_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.ListConstraint): + Defines this constraint as being a + ListConstraint. + + This field is a member of `oneof`_ ``constraint_type``. + boolean_constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint): + Defines this constraint as being a + BooleanConstraint. + + This field is a member of `oneof`_ ``constraint_type``. + """ + class ConstraintDefault(proto.Enum): + r"""Specifies the default behavior in the absence of any ``Policy`` for + the ``Constraint``. This must not be + ``CONSTRAINT_DEFAULT_UNSPECIFIED``. + + Values: + CONSTRAINT_DEFAULT_UNSPECIFIED (0): + This is only used for distinguishing unset + values and should never be used. + ALLOW (1): + Indicate that all values are allowed for list + constraints. Indicate that enforcement is off + for boolean constraints. + DENY (2): + Indicate that all values are denied for list + constraints. Indicate that enforcement is on for + boolean constraints. + """ + CONSTRAINT_DEFAULT_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + class ListConstraint(proto.Message): + r"""A ``Constraint`` that allows or disallows a list of string values, + which are configured by an organization's policy administrator with + a ``Policy``. + + Attributes: + supports_in (bool): + Indicates whether values grouped into categories can be used + in ``Policy.allowed_values`` and ``Policy.denied_values``. + For example, ``"in:Python"`` would match any value in the + 'Python' group. + supports_under (bool): + Indicates whether subtrees of Cloud Resource Manager + resource hierarchy can be used in ``Policy.allowed_values`` + and ``Policy.denied_values``. For example, + ``"under:folders/123"`` would match any resource under the + 'folders/123' folder. + """ + + supports_in: bool = proto.Field( + proto.BOOL, + number=1, + ) + supports_under: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class BooleanConstraint(proto.Message): + r"""A ``Constraint`` that is either enforced or not. + + For example a constraint + ``constraints/compute.disableSerialPortAccess``. If it is enforced + on a VM instance, serial port connections will not be opened to that + instance. + + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + constraint_default: 'AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault' = proto.Field( + proto.ENUM, + number=4, + enum='AnalyzerOrgPolicyConstraint.Constraint.ConstraintDefault', + ) + list_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.ListConstraint' = proto.Field( + proto.MESSAGE, + number=5, + oneof='constraint_type', + message='AnalyzerOrgPolicyConstraint.Constraint.ListConstraint', + ) + boolean_constraint: 'AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint' = proto.Field( + proto.MESSAGE, + number=6, + oneof='constraint_type', + message='AnalyzerOrgPolicyConstraint.Constraint.BooleanConstraint', + ) + + class CustomConstraint(proto.Message): + r"""The definition of a custom constraint. + + Attributes: + name (str): + Name of the constraint. This is unique within the + organization. Format of the name should be + + - ``organizations/{organization_id}/customConstraints/{custom_constraint_id}`` + + Example : + "organizations/123/customConstraints/custom.createOnlyE2TypeVms". + resource_types (MutableSequence[str]): + The Resource Instance type on which this policy applies to. + Format will be of the form : "/" Example: + + - ``compute.googleapis.com/Instance``. + method_types (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType]): + All the operations being applied for this + constraint. + condition (str): + Organization Policy condition/expression. For example: + ``resource.instanceName.matches("[production|test]_.*_(\d)+")'`` + or, ``resource.management.auto_upgrade == true`` + action_type (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType): + Allow or deny type. + display_name (str): + One line display name for the UI. + description (str): + Detailed information about this custom policy + constraint. + """ + class MethodType(proto.Enum): + r"""The operation in which this constraint will be applied. For example: + If the constraint applies only when create VMs, the method_types + will be "CREATE" only. If the constraint applied when create or + delete VMs, the method_types will be "CREATE" and "DELETE". + + Values: + METHOD_TYPE_UNSPECIFIED (0): + Unspecified. Will results in user error. + CREATE (1): + Constraint applied when creating the + resource. + UPDATE (2): + Constraint applied when updating the + resource. + DELETE (3): + Constraint applied when deleting the + resource. + """ + METHOD_TYPE_UNSPECIFIED = 0 + CREATE = 1 + UPDATE = 2 + DELETE = 3 + + class ActionType(proto.Enum): + r"""Allow or deny type. + + Values: + ACTION_TYPE_UNSPECIFIED (0): + Unspecified. Will results in user error. + ALLOW (1): + Allowed action type. + DENY (2): + Deny action type. + """ + ACTION_TYPE_UNSPECIFIED = 0 + ALLOW = 1 + DENY = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + resource_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + method_types: MutableSequence['AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType'] = proto.RepeatedField( + proto.ENUM, + number=3, + enum='AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType', + ) + condition: str = proto.Field( + proto.STRING, + number=4, + ) + action_type: 'AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType' = proto.Field( + proto.ENUM, + number=5, + enum='AnalyzerOrgPolicyConstraint.CustomConstraint.ActionType', + ) + display_name: str = proto.Field( + proto.STRING, + number=6, + ) + description: str = proto.Field( + proto.STRING, + number=7, + ) + + google_defined_constraint: Constraint = proto.Field( + proto.MESSAGE, + number=1, + oneof='constraint_definition', + message=Constraint, + ) + custom_constraint: CustomConstraint = proto.Field( + proto.MESSAGE, + number=2, + oneof='constraint_definition', + message=CustomConstraint, + ) + + +class AnalyzeOrgPoliciesRequest(proto.Message): + r"""A request message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + constraint (str): + Required. The name of the constraint to + analyze organization policies for. The response + only contains analyzed organization policies for + the provided constraint. + filter (str): + The expression to filter + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. + The only supported field is + ``consolidated_policy.attached_resource``, and the only + supported operator is ``=``. + + Example: + consolidated_policy.attached_resource="//cloudresourcemanager.googleapis.com/folders/001" + will return the org policy results of"folders/001". + page_size (int): + The maximum number of items to return per page. If + unspecified, + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results] + will contain 20 items with a maximum of 200. + + This field is a member of `oneof`_ ``_page_size``. + page_token (str): + The pagination token to retrieve the next + page. + """ + + scope: str = proto.Field( + proto.STRING, + number=1, + ) + constraint: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AnalyzeOrgPoliciesResponse(proto.Message): + r"""The response message for + [AssetService.AnalyzeOrgPolicies][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies]. + + Attributes: + org_policy_results (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse.OrgPolicyResult]): + The organization policies under the + [AnalyzeOrgPoliciesRequest.scope][google.cloud.asset.v1.AnalyzeOrgPoliciesRequest.scope] + with the + [AnalyzeOrgPoliciesRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPoliciesRequest.constraint]. + constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): + The definition of the constraint in the + request. + next_page_token (str): + The page token to fetch the next page for + [AnalyzeOrgPoliciesResponse.org_policy_results][google.cloud.asset.v1.AnalyzeOrgPoliciesResponse.org_policy_results]. + """ + + class OrgPolicyResult(proto.Message): + r"""The organization policy result to the query. + + Attributes: + consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): + The consolidated organization policy for the analyzed + resource. The consolidated organization policy is computed + by merging and evaluating + [AnalyzeOrgPoliciesResponse.policy_bundle][]. The evaluation + will respect the organization policy `hierarchy + rules `__. + policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): + The ordered list of all organization policies from the + [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][]. + to the scope specified in the request. + + If the constraint is defined with default policy, it will + also appear in the list. + """ + + consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + proto.MESSAGE, + number=1, + message='AnalyzerOrgPolicy', + ) + policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='AnalyzerOrgPolicy', + ) + + @property + def raw_page(self): + return self + + org_policy_results: MutableSequence[OrgPolicyResult] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=OrgPolicyResult, + ) + constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + proto.MESSAGE, + number=2, + message='AnalyzerOrgPolicyConstraint', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AnalyzeOrgPolicyGovernedContainersRequest(proto.Message): + r"""A request message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. The + output containers will also be limited to the ones governed + by those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + constraint (str): + Required. The name of the constraint to + analyze governed containers for. The analysis + only contains organization policies for the + provided constraint. + filter (str): + The expression to filter the governed containers in result. + The only supported field is ``parent``, and the only + supported operator is ``=``. + + Example: + parent="//cloudresourcemanager.googleapis.com/folders/001" + will return all containers under "folders/001". + page_size (int): + The maximum number of items to return per page. If + unspecified, + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers] + will contain 100 items with a maximum of 200. + + This field is a member of `oneof`_ ``_page_size``. + page_token (str): + The pagination token to retrieve the next + page. + """ + + scope: str = proto.Field( + proto.STRING, + number=1, + ) + constraint: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AnalyzeOrgPolicyGovernedContainersResponse(proto.Message): + r"""The response message for + [AssetService.AnalyzeOrgPolicyGovernedContainers][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers]. + + Attributes: + governed_containers (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]): + The list of the analyzed governed containers. + constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): + The definition of the constraint in the + request. + next_page_token (str): + The page token to fetch the next page for + [AnalyzeOrgPolicyGovernedContainersResponse.governed_containers][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.governed_containers]. + """ + + class GovernedContainer(proto.Message): + r"""The organization/folder/project resource governed by organization + policies of + [AnalyzeOrgPolicyGovernedContainersRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersRequest.constraint]. + + Attributes: + full_resource_name (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of an organization/folder/project resource. + parent (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of the parent of + [AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.full_resource_name][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.full_resource_name]. + consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): + The consolidated organization policy for the analyzed + resource. The consolidated organization policy is computed + by merging and evaluating + [AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.policy_bundle][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer.policy_bundle]. + The evaluation will respect the organization policy + `hierarchy + rules `__. + policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): + The ordered list of all organization policies from the + [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][]. + to the scope specified in the request. + + If the constraint is defined with default policy, it will + also appear in the list. + """ + + full_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + parent: str = proto.Field( + proto.STRING, + number=2, + ) + consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + proto.MESSAGE, + number=3, + message='AnalyzerOrgPolicy', + ) + policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzerOrgPolicy', + ) + + @property + def raw_page(self): + return self + + governed_containers: MutableSequence[GovernedContainer] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=GovernedContainer, + ) + constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + proto.MESSAGE, + number=2, + message='AnalyzerOrgPolicyConstraint', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class AnalyzeOrgPolicyGovernedAssetsRequest(proto.Message): + r"""A request message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + scope (str): + Required. The organization to scope the request. Only + organization policies within the scope will be analyzed. The + output assets will also be limited to the ones governed by + those in-scope organization policies. + + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") + constraint (str): + Required. The name of the constraint to + analyze governed assets for. The analysis only + contains analyzed organization policies for the + provided constraint. + filter (str): + The expression to filter the governed assets in result. The + only supported fields for governed resources are + ``governed_resource.project`` and + ``governed_resource.folders``. The only supported fields for + governed iam policies are ``governed_iam_policy.project`` + and ``governed_iam_policy.folders``. The only supported + operator is ``=``. + + Example 1: governed_resource.project="projects/12345678" + filter will return all governed resources under + projects/12345678 including the project ifself, if + applicable. + + Example 2: governed_iam_policy.folders="folders/12345678" + filter will return all governed iam policies under + folders/12345678, if applicable. + page_size (int): + The maximum number of items to return per page. If + unspecified, + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets] + will contain 100 items with a maximum of 200. + + This field is a member of `oneof`_ ``_page_size``. + page_token (str): + The pagination token to retrieve the next + page. + """ + + scope: str = proto.Field( + proto.STRING, + number=1, + ) + constraint: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AnalyzeOrgPolicyGovernedAssetsResponse(proto.Message): + r"""The response message for + [AssetService.AnalyzeOrgPolicyGovernedAssets][google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets]. + + Attributes: + governed_assets (MutableSequence[google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]): + The list of the analyzed governed assets. + constraint (google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint): + The definition of the constraint in the + request. + next_page_token (str): + The page token to fetch the next page for + [AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.governed_assets]. + """ + + class GovernedResource(proto.Message): + r"""The Google Cloud resources governed by the organization policies of + the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + Attributes: + full_resource_name (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of the Google Cloud resource. + parent (str): + The [full resource name] + (https://cloud.google.com/asset-inventory/docs/resource-name-format) + of the parent of + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource.full_resource_name]. + project (str): + The project that this resource belongs to, in the format of + projects/{PROJECT_NUMBER}. This field is available when the + resource belongs to a project. + folders (MutableSequence[str]): + The folder(s) that this resource belongs to, in the format + of folders/{FOLDER_NUMBER}. This field is available when the + resource belongs (directly or cascadingly) to one or more + folders. + organization (str): + The organization that this resource belongs to, in the + format of organizations/{ORGANIZATION_NUMBER}. This field is + available when the resource belongs (directly or + cascadingly) to an organization. + """ + + full_resource_name: str = proto.Field( + proto.STRING, + number=1, + ) + parent: str = proto.Field( + proto.STRING, + number=2, + ) + project: str = proto.Field( + proto.STRING, + number=5, + ) + folders: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + organization: str = proto.Field( + proto.STRING, + number=7, + ) + + class GovernedIamPolicy(proto.Message): + r"""The IAM policies governed by the organization policies of the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + Attributes: + attached_resource (str): + The full resource name of the resource associated with this + IAM policy. Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1``. + See `Cloud Asset Inventory Resource Name + Format `__ + for more information. + policy (google.iam.v1.policy_pb2.Policy): + The IAM policy directly set on the given + resource. + project (str): + The project that this IAM policy belongs to, in the format + of projects/{PROJECT_NUMBER}. This field is available when + the IAM policy belongs to a project. + folders (MutableSequence[str]): + The folder(s) that this IAM policy belongs to, in the format + of folders/{FOLDER_NUMBER}. This field is available when the + IAM policy belongs (directly or cascadingly) to one or more + folders. + organization (str): + The organization that this IAM policy belongs to, in the + format of organizations/{ORGANIZATION_NUMBER}. This field is + available when the IAM policy belongs (directly or + cascadingly) to an organization. + """ + + attached_resource: str = proto.Field( + proto.STRING, + number=1, + ) + policy: policy_pb2.Policy = proto.Field( + proto.MESSAGE, + number=2, + message=policy_pb2.Policy, + ) + project: str = proto.Field( + proto.STRING, + number=5, + ) + folders: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) + organization: str = proto.Field( + proto.STRING, + number=7, + ) + + class GovernedAsset(proto.Message): + r"""Represents a Google Cloud asset(resource or IAM policy) governed by + the organization policies of the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + governed_resource (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource): + A Google Cloud resource governed by the organization + policies of the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + This field is a member of `oneof`_ ``governed_asset``. + governed_iam_policy (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy): + An IAM policy governed by the organization policies of the + [AnalyzeOrgPolicyGovernedAssetsRequest.constraint][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsRequest.constraint]. + + This field is a member of `oneof`_ ``governed_asset``. + consolidated_policy (google.cloud.asset_v1.types.AnalyzerOrgPolicy): + The consolidated policy for the analyzed asset. The + consolidated policy is computed by merging and evaluating + [AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.policy_bundle][google.cloud.asset.v1.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset.policy_bundle]. + The evaluation will respect the organization policy + `hierarchy + rules `__. + policy_bundle (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicy]): + The ordered list of all organization policies from the + [AnalyzeOrgPoliciesResponse.OrgPolicyResult.consolidated_policy.attached_resource][] + to the scope specified in the request. + + If the constraint is defined with default policy, it will + also appear in the list. + """ + + governed_resource: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource' = proto.Field( + proto.MESSAGE, + number=1, + oneof='governed_asset', + message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedResource', + ) + governed_iam_policy: 'AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy' = proto.Field( + proto.MESSAGE, + number=2, + oneof='governed_asset', + message='AnalyzeOrgPolicyGovernedAssetsResponse.GovernedIamPolicy', + ) + consolidated_policy: 'AnalyzerOrgPolicy' = proto.Field( + proto.MESSAGE, + number=3, + message='AnalyzerOrgPolicy', + ) + policy_bundle: MutableSequence['AnalyzerOrgPolicy'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzerOrgPolicy', + ) + + @property + def raw_page(self): + return self + + governed_assets: MutableSequence[GovernedAsset] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=GovernedAsset, + ) + constraint: 'AnalyzerOrgPolicyConstraint' = proto.Field( + proto.MESSAGE, + number=2, + message='AnalyzerOrgPolicyConstraint', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index fca415244b79..41a578401e64 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -37,7 +37,14 @@ 'TimeWindow', 'Asset', 'Resource', + 'RelatedAssets', + 'RelationshipAttributes', + 'RelatedAsset', 'ResourceSearchResult', + 'VersionedResource', + 'AttachedResource', + 'RelatedResources', + 'RelatedResource', 'IamPolicySearchResult', 'IamPolicyAnalysisState', 'ConditionEvaluation', @@ -143,8 +150,8 @@ class Asset(proto.Message): hierarchy `__, a resource outside the Google Cloud resource hierarchy (such as Google Kubernetes Engine clusters and objects), or a policy (e.g. - Cloud IAM policy), or a relationship (e.g. an - INSTANCE_TO_INSTANCEGROUP relationship). See `Supported asset + IAM policy), or a relationship (e.g. an INSTANCE_TO_INSTANCEGROUP + relationship). See `Supported asset types `__ for more information. @@ -176,16 +183,15 @@ class Asset(proto.Message): resource (google.cloud.asset_v1.types.Resource): A representation of the resource. iam_policy (google.iam.v1.policy_pb2.Policy): - A representation of the Cloud IAM policy set on a Google - Cloud resource. There can be a maximum of one Cloud IAM - policy set on any given resource. In addition, Cloud IAM - policies inherit their granted access scope from any - policies set on parent resources in the resource hierarchy. - Therefore, the effectively policy is the union of both the - policy set on this resource and each policy set on all of - the resource's ancestry resource levels in the hierarchy. - See `this - topic `__ + A representation of the IAM policy set on a Google Cloud + resource. There can be a maximum of one IAM policy set on + any given resource. In addition, IAM policies inherit their + granted access scope from any policies set on parent + resources in the resource hierarchy. Therefore, the + effectively policy is the union of both the policy set on + this resource and each policy set on all of the resource's + ancestry resource levels in the hierarchy. See `this + topic `__ for more information. org_policy (MutableSequence[google.cloud.orgpolicy.v1.orgpolicy_pb2.Policy]): A representation of an `organization @@ -212,6 +218,15 @@ class Asset(proto.Message): `this topic `__ for more information. + related_assets (google.cloud.asset_v1.types.RelatedAssets): + DEPRECATED. This field only presents for the + purpose of backward-compatibility. The server + will never generate responses with this field. + The related assets of the asset of one + relationship type. One asset only represents one + type of relationship. + related_asset (google.cloud.asset_v1.types.RelatedAsset): + One related asset of the current asset. ancestors (MutableSequence[str]): The ancestry path of an asset in Google Cloud `resource hierarchy `__, @@ -276,6 +291,16 @@ class Asset(proto.Message): number=12, message=inventory_pb2.Inventory, ) + related_assets: 'RelatedAssets' = proto.Field( + proto.MESSAGE, + number=13, + message='RelatedAssets', + ) + related_asset: 'RelatedAsset' = proto.Field( + proto.MESSAGE, + number=15, + message='RelatedAsset', + ) ancestors: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=10, @@ -314,7 +339,7 @@ class Resource(proto.Message): for more information. For Google Cloud assets, this value is the parent resource - defined in the `Cloud IAM policy + defined in the `IAM policy hierarchy `__. Example: ``//cloudresourcemanager.googleapis.com/projects/my_project_123`` @@ -362,9 +387,131 @@ class Resource(proto.Message): ) +class RelatedAssets(proto.Message): + r"""DEPRECATED. This message only presents for the purpose of + backward-compatibility. The server will never populate this message + in responses. The detailed related assets with the + ``relationship_type``. + + Attributes: + relationship_attributes (google.cloud.asset_v1.types.RelationshipAttributes): + The detailed relationship attributes. + assets (MutableSequence[google.cloud.asset_v1.types.RelatedAsset]): + The peer resources of the relationship. + """ + + relationship_attributes: 'RelationshipAttributes' = proto.Field( + proto.MESSAGE, + number=1, + message='RelationshipAttributes', + ) + assets: MutableSequence['RelatedAsset'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RelatedAsset', + ) + + +class RelationshipAttributes(proto.Message): + r"""DEPRECATED. This message only presents for the purpose of + backward-compatibility. The server will never populate this message + in responses. The relationship attributes which include ``type``, + ``source_resource_type``, ``target_resource_type`` and ``action``. + + Attributes: + type_ (str): + The unique identifier of the relationship type. Example: + ``INSTANCE_TO_INSTANCEGROUP`` + source_resource_type (str): + The source asset type. Example: + ``compute.googleapis.com/Instance`` + target_resource_type (str): + The target asset type. Example: + ``compute.googleapis.com/Disk`` + action (str): + The detail of the relationship, e.g. ``contains``, + ``attaches`` + """ + + type_: str = proto.Field( + proto.STRING, + number=4, + ) + source_resource_type: str = proto.Field( + proto.STRING, + number=1, + ) + target_resource_type: str = proto.Field( + proto.STRING, + number=2, + ) + action: str = proto.Field( + proto.STRING, + number=3, + ) + + +class RelatedAsset(proto.Message): + r"""An asset identifier in Google Cloud which contains its name, type + and ancestors. An asset can be any resource in the Google Cloud + `resource + hierarchy `__, + a resource outside the Google Cloud resource hierarchy (such as + Google Kubernetes Engine clusters and objects), or a policy (e.g. + IAM policy). See `Supported asset + types `__ + for more information. + + Attributes: + asset (str): + The full name of the asset. Example: + ``//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`` + + See `Resource + names `__ + for more information. + asset_type (str): + The type of the asset. Example: + ``compute.googleapis.com/Disk`` + + See `Supported asset + types `__ + for more information. + ancestors (MutableSequence[str]): + The ancestors of an asset in Google Cloud `resource + hierarchy `__, + represented as a list of relative resource names. An + ancestry path starts with the closest ancestor in the + hierarchy and ends at root. + + Example: + ``["projects/123456789", "folders/5432", "organizations/1234"]`` + relationship_type (str): + The unique identifier of the relationship type. Example: + ``INSTANCE_TO_INSTANCEGROUP`` + """ + + asset: str = proto.Field( + proto.STRING, + number=1, + ) + asset_type: str = proto.Field( + proto.STRING, + number=2, + ) + ancestors: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + relationship_type: str = proto.Field( + proto.STRING, + number=4, + ) + + class ResourceSearchResult(proto.Message): r"""A result of Resource Search, containing information of a - cloud resource. + cloud resource. Next ID: 32 Attributes: name (str): @@ -376,15 +523,15 @@ class ResourceSearchResult(proto.Message): To search against the ``name``: - - use a field query. Example: ``name:instance1`` - - use a free text query. Example: ``instance1`` + - Use a field query. Example: ``name:instance1`` + - Use a free text query. Example: ``instance1`` asset_type (str): The type of this resource. Example: ``compute.googleapis.com/Disk``. To search against the ``asset_type``: - - specify the ``asset_type`` field in your search request. + - Specify the ``asset_type`` field in your search request. project (str): The project that this resource belongs to, in the form of projects/{PROJECT_NUMBER}. This field is available when the @@ -392,9 +539,9 @@ class ResourceSearchResult(proto.Message): To search against ``project``: - - use a field query. Example: ``project:12345`` - - use a free text query. Example: ``12345`` - - specify the ``scope`` field as this project in your + - Use a field query. Example: ``project:12345`` + - Use a free text query. Example: ``12345`` + - Specify the ``scope`` field as this project in your search request. folders (MutableSequence[str]): The folder(s) that this resource belongs to, in the form of @@ -403,9 +550,9 @@ class ResourceSearchResult(proto.Message): To search against ``folders``: - - use a field query. Example: ``folders:(123 OR 456)`` - - use a free text query. Example: ``123`` - - specify the ``scope`` field as this folder in your search + - Use a field query. Example: ``folders:(123 OR 456)`` + - Use a free text query. Example: ``123`` + - Specify the ``scope`` field as this folder in your search request. organization (str): The organization that this resource belongs to, in the form @@ -414,47 +561,47 @@ class ResourceSearchResult(proto.Message): To search against ``organization``: - - use a field query. Example: ``organization:123`` - - use a free text query. Example: ``123`` - - specify the ``scope`` field as this organization in your + - Use a field query. Example: ``organization:123`` + - Use a free text query. Example: ``123`` + - Specify the ``scope`` field as this organization in your search request. display_name (str): The display name of this resource. This field is available - only when the resource's proto contains it. + only when the resource's Protobuf contains it. To search against the ``display_name``: - - use a field query. Example: ``displayName:"My Instance"`` - - use a free text query. Example: ``"My Instance"`` + - Use a field query. Example: ``displayName:"My Instance"`` + - Use a free text query. Example: ``"My Instance"`` description (str): One or more paragraphs of text description of this resource. Maximum length could be up to 1M bytes. This field is - available only when the resource's proto contains it. + available only when the resource's Protobuf contains it. To search against the ``description``: - - use a field query. Example: + - Use a field query. Example: ``description:"important instance"`` - - use a free text query. Example: ``"important instance"`` + - Use a free text query. Example: ``"important instance"`` location (str): Location can be ``global``, regional like ``us-east1``, or zonal like ``us-west1-b``. This field is available only when - the resource's proto contains it. + the resource's Protobuf contains it. To search against the ``location``: - - use a field query. Example: ``location:us-west*`` - - use a free text query. Example: ``us-west*`` + - Use a field query. Example: ``location:us-west*`` + - Use a free text query. Example: ``us-west*`` labels (MutableMapping[str, str]): Labels associated with this resource. See `Labelling and - grouping GCP + grouping Google Cloud resources `__ for more information. This field is available only when the - resource's proto contains it. + resource's Protobuf contains it. To search against the ``labels``: - - use a field query: + - Use a field query: - query on any label's key or value. Example: ``labels:prod`` @@ -462,40 +609,59 @@ class ResourceSearchResult(proto.Message): - query by a given label's existence. Example: ``labels.env:*`` - - use a free text query. Example: ``prod`` + - Use a free text query. Example: ``prod`` network_tags (MutableSequence[str]): Network tags associated with this resource. Like labels, - network tags are a type of annotations used to group GCP - resources. See `Labelling GCP + network tags are a type of annotations used to group Google + Cloud resources. See `Labelling Google Cloud resources `__ for more information. This field is available only when the - resource's proto contains it. + resource's Protobuf contains it. To search against the ``network_tags``: - - use a field query. Example: ``networkTags:internal`` - - use a free text query. Example: ``internal`` + - Use a field query. Example: ``networkTags:internal`` + - Use a free text query. Example: ``internal`` kms_key (str): The Cloud KMS - `CryptoKey `__ + `CryptoKey `__ name or - `CryptoKeyVersion `__ - name. This field is available only when the resource's proto - contains it. + `CryptoKeyVersion `__ + name. + + This field only presents for the purpose of backward + compatibility. Please use the ``kms_keys`` field to retrieve + Cloud KMS key information. This field is available only when + the resource's Protobuf contains it and will only be + populated for `these resource + types `__ + for backward compatible purposes. To search against the ``kms_key``: - - use a field query. Example: ``kmsKey:key`` - - use a free text query. Example: ``key`` + - Use a field query. Example: ``kmsKey:key`` + - Use a free text query. Example: ``key`` + kms_keys (MutableSequence[str]): + The Cloud KMS + `CryptoKey `__ + names or + `CryptoKeyVersion `__ + names. This field is available only when the resource's + Protobuf contains it. + + To search against the ``kms_keys``: + + - Use a field query. Example: ``kmsKeys:key`` + - Use a free text query. Example: ``key`` create_time (google.protobuf.timestamp_pb2.Timestamp): The create timestamp of this resource, at which the resource was created. The granularity is in seconds. Timestamp.nanos will always be 0. This field is available only when the - resource's proto contains it. + resource's Protobuf contains it. To search against ``create_time``: - - use a field query. + - Use a field query. - value in seconds since unix epoch. Example: ``createTime > 1609459200`` @@ -507,11 +673,11 @@ class ResourceSearchResult(proto.Message): The last update timestamp of this resource, at which the resource was last modified or deleted. The granularity is in seconds. Timestamp.nanos will always be 0. This field is - available only when the resource's proto contains it. + available only when the resource's Protobuf contains it. To search against ``update_time``: - - use a field query. + - Use a field query. - value in seconds since unix epoch. Example: ``updateTime < 1609459200`` @@ -523,42 +689,44 @@ class ResourceSearchResult(proto.Message): The state of this resource. Different resources types have different state definitions that are mapped from various fields of different resource types. This field is available - only when the resource's proto contains it. + only when the resource's Protobuf contains it. Example: If the resource is an instance provided by Compute Engine, its state will include PROVISIONING, STAGING, RUNNING, STOPPING, SUSPENDING, SUSPENDED, REPAIRING, and TERMINATED. See ``status`` definition in `API Reference `__. - If the resource is a project provided by Cloud Resource - Manager, its state will include LIFECYCLE_STATE_UNSPECIFIED, - ACTIVE, DELETE_REQUESTED and DELETE_IN_PROGRESS. See + If the resource is a project provided by Resource Manager, + its state will include LIFECYCLE_STATE_UNSPECIFIED, ACTIVE, + DELETE_REQUESTED and DELETE_IN_PROGRESS. See ``lifecycleState`` definition in `API Reference `__. To search against the ``state``: - - use a field query. Example: ``state:RUNNING`` - - use a free text query. Example: ``RUNNING`` + - Use a field query. Example: ``state:RUNNING`` + - Use a free text query. Example: ``RUNNING`` additional_attributes (google.protobuf.struct_pb2.Struct): The additional searchable attributes of this resource. The attributes may vary from one resource type to another. Examples: ``projectId`` for Project, ``dnsName`` for DNS ManagedZone. This field contains a subset of the resource metadata fields that are returned by the List or Get APIs - provided by the corresponding GCP service (e.g., Compute - Engine). see `API references and supported searchable + provided by the corresponding Google Cloud service (e.g., + Compute Engine). see `API references and supported + searchable attributes `__ to see which fields are included. You can search values of these fields through free text search. However, you should not consume the field programically as the field names and values may change as - the GCP service updates to a new incompatible API version. + the Google Cloud service updates to a new incompatible API + version. To search against the ``additional_attributes``: - - use a free text query to match the attributes values. + - Use a free text query to match the attributes values. Example: to search ``additional_attributes = { dnsName: "foobar" }``, you can issue a query ``foobar``. @@ -566,18 +734,85 @@ class ResourceSearchResult(proto.Message): The full resource name of this resource's parent, if it has one. To search against the ``parent_full_resource_name``: - - use a field query. Example: + - Use a field query. Example: ``parentFullResourceName:"project-name"`` - - use a free text query. Example: ``project-name`` + - Use a free text query. Example: ``project-name`` + versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): + Versioned resource representations of this resource. This is + repeated because there could be multiple versions of + resource representations during version migration. + + This ``versioned_resources`` field is not searchable. Some + attributes of the resource representations are exposed in + ``additional_attributes`` field, so as to allow users to + search on them. + attached_resources (MutableSequence[google.cloud.asset_v1.types.AttachedResource]): + Attached resources of this resource. For example, an + OSConfig Inventory is an attached resource of a Compute + Instance. This field is repeated because a resource could + have multiple attached resources. + + This ``attached_resources`` field is not searchable. Some + attributes of the attached resources are exposed in + ``additional_attributes`` field, so as to allow users to + search on them. + relationships (MutableMapping[str, google.cloud.asset_v1.types.RelatedResources]): + A map of related resources of this resource, keyed by the + relationship type. A relationship type is in the format of + {SourceType}*{ACTION}*\ {DestType}. Example: + ``DISK_TO_INSTANCE``, ``DISK_TO_NETWORK``, + ``INSTANCE_TO_INSTANCEGROUP``. See `supported relationship + types `__. + tag_keys (MutableSequence[str]): + TagKey namespaced names, in the format of + {ORG_ID}/{TAG_KEY_SHORT_NAME}. To search against the + ``tagKeys``: + + - Use a field query. Example: + + - ``tagKeys:"123456789/env*"`` + - ``tagKeys="123456789/env"`` + - ``tagKeys:"env"`` + + - Use a free text query. Example: + + - ``env`` + tag_values (MutableSequence[str]): + TagValue namespaced names, in the format of + {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}. To + search against the ``tagValues``: + + - Use a field query. Example: + + - ``tagValues:"env"`` + - ``tagValues:"env/prod"`` + - ``tagValues:"123456789/env/prod*"`` + - ``tagValues="123456789/env/prod"`` + + - Use a free text query. Example: + + - ``prod`` + tag_value_ids (MutableSequence[str]): + TagValue IDs, in the format of tagValues/{TAG_VALUE_ID}. To + search against the ``tagValueIds``: + + - Use a field query. Example: + + - ``tagValueIds:"456"`` + - ``tagValueIds="tagValues/456"`` + + - Use a free text query. Example: + + - ``456`` parent_asset_type (str): The type of this resource's immediate parent, if there is one. To search against the ``parent_asset_type``: - - use a field query. Example: + - Use a field query. Example: ``parentAssetType:"cloudresourcemanager.googleapis.com/Project"`` - - use a free text query. Example: + - Use a free text query. Example: ``cloudresourcemanager.googleapis.com/Project`` """ @@ -626,6 +861,10 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=10, ) + kms_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=28, + ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=11, @@ -649,12 +888,148 @@ class ResourceSearchResult(proto.Message): proto.STRING, number=19, ) + versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message='VersionedResource', + ) + attached_resources: MutableSequence['AttachedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message='AttachedResource', + ) + relationships: MutableMapping[str, 'RelatedResources'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=21, + message='RelatedResources', + ) + tag_keys: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=23, + ) + tag_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=25, + ) + tag_value_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=26, + ) parent_asset_type: str = proto.Field( proto.STRING, number=103, ) +class VersionedResource(proto.Message): + r"""Resource representation as defined by the corresponding + service providing the resource for a given API version. + + Attributes: + version (str): + API version of the resource. + + Example: If the resource is an instance provided by Compute + Engine v1 API as defined in + ``https://cloud.google.com/compute/docs/reference/rest/v1/instances``, + version will be "v1". + resource (google.protobuf.struct_pb2.Struct): + JSON representation of the resource as defined by the + corresponding service providing this resource. + + Example: If the resource is an instance provided by Compute + Engine, this field will contain the JSON representation of + the instance as defined by Compute Engine: + ``https://cloud.google.com/compute/docs/reference/rest/v1/instances``. + + You can find the resource definition for each supported + resource type in this table: + ``https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types`` + """ + + version: str = proto.Field( + proto.STRING, + number=1, + ) + resource: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class AttachedResource(proto.Message): + r"""Attached resource representation, which is defined by the + corresponding service provider. It represents an attached + resource's payload. + + Attributes: + asset_type (str): + The type of this attached resource. + + Example: ``osconfig.googleapis.com/Inventory`` + + You can find the supported attached asset types of each + resource in this table: + ``https://cloud.google.com/asset-inventory/docs/supported-asset-types#searchable_asset_types`` + versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): + Versioned resource representations of this + attached resource. This is repeated because + there could be multiple versions of the attached + resource representations during version + migration. + """ + + asset_type: str = proto.Field( + proto.STRING, + number=1, + ) + versioned_resources: MutableSequence['VersionedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='VersionedResource', + ) + + +class RelatedResources(proto.Message): + r"""The related resources of the primary resource. + + Attributes: + related_resources (MutableSequence[google.cloud.asset_v1.types.RelatedResource]): + The detailed related resources of the primary + resource. + """ + + related_resources: MutableSequence['RelatedResource'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RelatedResource', + ) + + +class RelatedResource(proto.Message): + r"""The detailed related resource. + + Attributes: + asset_type (str): + The type of the asset. Example: + ``compute.googleapis.com/Instance`` + full_resource_name (str): + The full resource name of the related resource. Example: + ``//compute.googleapis.com/projects/my_proj_123/zones/instance/instance123`` + """ + + asset_type: str = proto.Field( + proto.STRING, + number=1, + ) + full_resource_name: str = proto.Field( + proto.STRING, + number=2, + ) + + class IamPolicySearchResult(proto.Message): r"""A result of IAM Policy search, containing information of an IAM policy. @@ -680,12 +1055,12 @@ class IamPolicySearchResult(proto.Message): - specify the ``asset_types`` field in your search request. project (str): - The project that the associated GCP resource belongs to, in - the form of projects/{PROJECT_NUMBER}. If an IAM policy is - set on a resource (like VM instance, Cloud Storage bucket), - the project field will indicate the project that contains - the resource. If an IAM policy is set on a folder or - orgnization, this field will be empty. + The project that the associated Google Cloud resource + belongs to, in the form of projects/{PROJECT_NUMBER}. If an + IAM policy is set on a resource (like VM instance, Cloud + Storage bucket), the project field will indicate the project + that contains the resource. If an IAM policy is set on a + folder or orgnization, this field will be empty. To search against the ``project``: @@ -884,7 +1259,7 @@ class IamPolicyAnalysisResult(proto.Message): [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] policy attaches. iam_binding (google.iam.v1.policy_pb2.Binding): - The Cloud IAM policy binding under analysis. + The IAM policy binding under analysis. access_control_lists (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.AccessControlList]): The access control lists derived from the [iam_binding][google.cloud.asset.v1.IamPolicyAnalysisResult.iam_binding] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py new file mode 100755 index 000000000000..377f2e12b2a8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeMove +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeMove_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_analyze_move(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeMoveRequest( + resource="resource_value", + destination_parent="destination_parent_value", + ) + + # Make the request + response = await client.analyze_move(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeMove_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py new file mode 100755 index 000000000000..5ec453c16dc9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeMove +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeMove_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_analyze_move(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeMoveRequest( + resource="resource_value", + destination_parent="destination_parent_value", + ) + + # Make the request + response = client.analyze_move(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeMove_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py new file mode 100755 index 000000000000..625f3dec46e1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_analyze_org_policies(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPoliciesRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py new file mode 100755 index 000000000000..892f5a15b3a2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_analyze_org_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPoliciesRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py new file mode 100755 index 000000000000..81b67efb9d02 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicyGovernedAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_analyze_org_policy_governed_assets(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_assets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py new file mode 100755 index 000000000000..86aca87da3e7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicyGovernedAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_analyze_org_policy_governed_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedAssetsRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_assets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py new file mode 100755 index 000000000000..3a24500e39ca --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicyGovernedContainers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_analyze_org_policy_governed_containers(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_containers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py new file mode 100755 index 000000000000..4aa243c92f3d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AnalyzeOrgPolicyGovernedContainers +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_analyze_org_policy_governed_containers(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.AnalyzeOrgPolicyGovernedContainersRequest( + scope="scope_value", + constraint="constraint_value", + ) + + # Make the request + page_result = client.analyze_org_policy_governed_containers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py new file mode 100755 index 000000000000..30ef0c4e5388 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetEffectiveIamPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_batch_get_effective_iam_policies(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetEffectiveIamPoliciesRequest( + scope="scope_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + response = await client.batch_get_effective_iam_policies(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py new file mode 100755 index 000000000000..f64e953a894c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BatchGetEffectiveIamPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_batch_get_effective_iam_policies(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.BatchGetEffectiveIamPoliciesRequest( + scope="scope_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + response = client.batch_get_effective_iam_policies(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py new file mode 100755 index 000000000000..b495af664e8c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_CreateSavedQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_create_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.CreateSavedQueryRequest( + parent="parent_value", + saved_query_id="saved_query_id_value", + ) + + # Make the request + response = await client.create_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_CreateSavedQuery_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py new file mode 100755 index 000000000000..bdebace2df7c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_CreateSavedQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_create_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.CreateSavedQueryRequest( + parent="parent_value", + saved_query_id="saved_query_id_value", + ) + + # Make the request + response = client.create_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_CreateSavedQuery_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py new file mode 100755 index 000000000000..40cdf7f6ffa9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_DeleteSavedQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_delete_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.DeleteSavedQueryRequest( + name="name_value", + ) + + # Make the request + await client.delete_saved_query(request=request) + + +# [END cloudasset_v1_generated_AssetService_DeleteSavedQuery_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py new file mode 100755 index 000000000000..29d2ed16f12f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_delete_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.DeleteSavedQueryRequest( + name="name_value", + ) + + # Make the request + client.delete_saved_query(request=request) + + +# [END cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py new file mode 100755 index 000000000000..7f2185105f81 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_GetSavedQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_get_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.GetSavedQueryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_GetSavedQuery_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py new file mode 100755 index 000000000000..947c761d88b6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_GetSavedQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_get_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.GetSavedQueryRequest( + name="name_value", + ) + + # Make the request + response = client.get_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_GetSavedQuery_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py new file mode 100755 index 000000000000..c4be56a4f015 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSavedQueries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_ListSavedQueries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_list_saved_queries(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.ListSavedQueriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_saved_queries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_ListSavedQueries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py new file mode 100755 index 000000000000..9825502a6044 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSavedQueries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_ListSavedQueries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_list_saved_queries(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.ListSavedQueriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_saved_queries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END cloudasset_v1_generated_AssetService_ListSavedQueries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py new file mode 100755 index 000000000000..bd68125cafa6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_QueryAssets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_query_assets(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.QueryAssetsRequest( + statement="statement_value", + parent="parent_value", + ) + + # Make the request + response = await client.query_assets(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_QueryAssets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py new file mode 100755 index 000000000000..e64414fdbcc0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for QueryAssets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_QueryAssets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_query_assets(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.QueryAssetsRequest( + statement="statement_value", + parent="parent_value", + ) + + # Make the request + response = client.query_assets(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_QueryAssets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py new file mode 100755 index 000000000000..56aaab9fdc4a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_UpdateSavedQuery_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +async def sample_update_saved_query(): + # Create a client + client = asset_v1.AssetServiceAsyncClient() + + # Initialize request argument(s) + request = asset_v1.UpdateSavedQueryRequest( + ) + + # Make the request + response = await client.update_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_async] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py new file mode 100755 index 000000000000..ef6a45d6374a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSavedQuery +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-asset + + +# [START cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import asset_v1 + + +def sample_update_saved_query(): + # Create a client + client = asset_v1.AssetServiceClient() + + # Initialize request argument(s) + request = asset_v1.UpdateSavedQueryRequest( + ) + + # Make the request + response = client.update_saved_query(request=request) + + # Handle the response + print(response) + +# [END cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 5db3da586776..5a90dfa88b31 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -317,6 +317,690 @@ ], "title": "cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_move", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeMove", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeMove" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", + "shortName": "analyze_move" + }, + "description": "Sample for AnalyzeMove", + "file": "cloudasset_v1_generated_asset_service_analyze_move_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeMove_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_move_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_move", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeMove", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeMove" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeMoveRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", + "shortName": "analyze_move" + }, + "description": "Sample for AnalyzeMove", + "file": "cloudasset_v1_generated_asset_service_analyze_move_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeMove_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_move_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policies", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager", + "shortName": "analyze_org_policies" + }, + "description": "Sample for AnalyzeOrgPolicies", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicies", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPoliciesRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager", + "shortName": "analyze_org_policies" + }, + "description": "Sample for AnalyzeOrgPolicies", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicies_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policy_governed_assets", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicyGovernedAssets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager", + "shortName": "analyze_org_policy_governed_assets" + }, + "description": "Sample for AnalyzeOrgPolicyGovernedAssets", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedAssets", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicyGovernedAssets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager", + "shortName": "analyze_org_policy_governed_assets" + }, + "description": "Sample for AnalyzeOrgPolicyGovernedAssets", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedAssets_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.analyze_org_policy_governed_containers", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicyGovernedContainers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager", + "shortName": "analyze_org_policy_governed_containers" + }, + "description": "Sample for AnalyzeOrgPolicyGovernedContainers", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.AnalyzeOrgPolicyGovernedContainers", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "AnalyzeOrgPolicyGovernedContainers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersRequest" + }, + { + "name": "scope", + "type": "str" + }, + { + "name": "constraint", + "type": "str" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager", + "shortName": "analyze_org_policy_governed_containers" + }, + "description": "Sample for AnalyzeOrgPolicyGovernedContainers", + "file": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_AnalyzeOrgPolicyGovernedContainers_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py" + }, { "canonical": true, "clientMethod": { @@ -327,17 +1011,971 @@ }, "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.batch_get_assets_history", "method": { - "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", + "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "BatchGetAssetsHistory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", + "shortName": "batch_get_assets_history" + }, + "description": "Sample for BatchGetAssetsHistory", + "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "BatchGetAssetsHistory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", + "shortName": "batch_get_assets_history" + }, + "description": "Sample for BatchGetAssetsHistory", + "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.batch_get_effective_iam_policies", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "BatchGetEffectiveIamPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", + "shortName": "batch_get_effective_iam_policies" + }, + "description": "Sample for BatchGetEffectiveIamPolicies", + "file": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "BatchGetEffectiveIamPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", + "shortName": "batch_get_effective_iam_policies" + }, + "description": "Sample for BatchGetEffectiveIamPolicies", + "file": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_BatchGetEffectiveIamPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_feed", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "CreateFeed" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateFeedRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "create_feed" + }, + "description": "Sample for CreateFeed", + "file": "cloudasset_v1_generated_asset_service_create_feed_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_create_feed_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.create_feed", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "CreateFeed" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateFeedRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.Feed", + "shortName": "create_feed" + }, + "description": "Sample for CreateFeed", + "file": "cloudasset_v1_generated_asset_service_create_feed_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_create_feed_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "CreateSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateSavedQueryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "saved_query", + "type": "google.cloud.asset_v1.types.SavedQuery" + }, + { + "name": "saved_query_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "create_saved_query" + }, + "description": "Sample for CreateSavedQuery", + "file": "cloudasset_v1_generated_asset_service_create_saved_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_CreateSavedQuery_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_create_saved_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.create_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.CreateSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "CreateSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.CreateSavedQueryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "saved_query", + "type": "google.cloud.asset_v1.types.SavedQuery" + }, + { + "name": "saved_query_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "create_saved_query" + }, + "description": "Sample for CreateSavedQuery", + "file": "cloudasset_v1_generated_asset_service_create_saved_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_CreateSavedQuery_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_create_saved_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_feed", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "DeleteFeed" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_feed" + }, + "description": "Sample for DeleteFeed", + "file": "cloudasset_v1_generated_asset_service_delete_feed_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_delete_feed_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_feed", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "DeleteFeed" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_feed" + }, + "description": "Sample for DeleteFeed", + "file": "cloudasset_v1_generated_asset_service_delete_feed_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_delete_feed_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "DeleteSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteSavedQueryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_saved_query" + }, + "description": "Sample for DeleteSavedQuery", + "file": "cloudasset_v1_generated_asset_service_delete_saved_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteSavedQuery_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_delete_saved_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.DeleteSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "DeleteSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.DeleteSavedQueryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_saved_query" + }, + "description": "Sample for DeleteSavedQuery", + "file": "cloudasset_v1_generated_asset_service_delete_saved_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_DeleteSavedQuery_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_delete_saved_query_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.export_assets", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "BatchGetAssetsHistory" + "shortName": "ExportAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + "type": "google.cloud.asset_v1.types.ExportAssetsRequest" }, { "name": "retry", @@ -352,22 +1990,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", - "shortName": "batch_get_assets_history" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_assets" }, - "description": "Sample for BatchGetAssetsHistory", - "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py", + "description": "Sample for ExportAssets", + "file": "cloudasset_v1_generated_asset_service_export_assets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_async", + "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_async", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -377,22 +2015,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py" + "title": "cloudasset_v1_generated_asset_service_export_assets_async.py" }, { "canonical": true, @@ -401,19 +2039,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", + "fullName": "google.cloud.asset_v1.AssetServiceClient.export_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.BatchGetAssetsHistory", + "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "BatchGetAssetsHistory" + "shortName": "ExportAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest" + "type": "google.cloud.asset_v1.types.ExportAssetsRequest" }, { "name": "retry", @@ -428,22 +2066,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", - "shortName": "batch_get_assets_history" + "resultType": "google.api_core.operation.Operation", + "shortName": "export_assets" }, - "description": "Sample for BatchGetAssetsHistory", - "file": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py", + "description": "Sample for ExportAssets", + "file": "cloudasset_v1_generated_asset_service_export_assets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_BatchGetAssetsHistory_sync", + "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_sync", "segments": [ { - "end": 51, + "end": 59, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 59, "start": 27, "type": "SHORT" }, @@ -453,22 +2091,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 56, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 60, + "start": 57, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py" + "title": "cloudasset_v1_generated_asset_service_export_assets_sync.py" }, { "canonical": true, @@ -478,22 +2116,22 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.create_feed", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_feed", "method": { - "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", + "fullName": "google.cloud.asset.v1.AssetService.GetFeed", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "CreateFeed" + "shortName": "GetFeed" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.CreateFeedRequest" + "type": "google.cloud.asset_v1.types.GetFeedRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -510,21 +2148,21 @@ } ], "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "create_feed" + "shortName": "get_feed" }, - "description": "Sample for CreateFeed", - "file": "cloudasset_v1_generated_asset_service_create_feed_async.py", + "description": "Sample for GetFeed", + "file": "cloudasset_v1_generated_asset_service_get_feed_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_async", + "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_async", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -534,22 +2172,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_create_feed_async.py" + "title": "cloudasset_v1_generated_asset_service_get_feed_async.py" }, { "canonical": true, @@ -558,22 +2196,22 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.create_feed", + "fullName": "google.cloud.asset_v1.AssetServiceClient.get_feed", "method": { - "fullName": "google.cloud.asset.v1.AssetService.CreateFeed", + "fullName": "google.cloud.asset.v1.AssetService.GetFeed", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "CreateFeed" + "shortName": "GetFeed" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.CreateFeedRequest" + "type": "google.cloud.asset_v1.types.GetFeedRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -590,21 +2228,21 @@ } ], "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "create_feed" + "shortName": "get_feed" }, - "description": "Sample for CreateFeed", - "file": "cloudasset_v1_generated_asset_service_create_feed_sync.py", + "description": "Sample for GetFeed", + "file": "cloudasset_v1_generated_asset_service_get_feed_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_CreateFeed_sync", + "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_sync", "segments": [ { - "end": 56, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 51, "start": 27, "type": "SHORT" }, @@ -614,22 +2252,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_create_feed_sync.py" + "title": "cloudasset_v1_generated_asset_service_get_feed_sync.py" }, { "canonical": true, @@ -639,19 +2277,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.delete_feed", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_saved_query", "method": { - "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", + "fullName": "google.cloud.asset.v1.AssetService.GetSavedQuery", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "DeleteFeed" + "shortName": "GetSavedQuery" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + "type": "google.cloud.asset_v1.types.GetSavedQueryRequest" }, { "name": "name", @@ -670,21 +2308,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_feed" + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "get_saved_query" }, - "description": "Sample for DeleteFeed", - "file": "cloudasset_v1_generated_asset_service_delete_feed_async.py", + "description": "Sample for GetSavedQuery", + "file": "cloudasset_v1_generated_asset_service_get_saved_query_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_async", + "regionTag": "cloudasset_v1_generated_AssetService_GetSavedQuery_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -699,15 +2338,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_delete_feed_async.py" + "title": "cloudasset_v1_generated_asset_service_get_saved_query_async.py" }, { "canonical": true, @@ -716,19 +2357,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.delete_feed", + "fullName": "google.cloud.asset_v1.AssetServiceClient.get_saved_query", "method": { - "fullName": "google.cloud.asset.v1.AssetService.DeleteFeed", + "fullName": "google.cloud.asset.v1.AssetService.GetSavedQuery", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "DeleteFeed" + "shortName": "GetSavedQuery" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.DeleteFeedRequest" + "type": "google.cloud.asset_v1.types.GetSavedQueryRequest" }, { "name": "name", @@ -747,21 +2388,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_feed" + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "get_saved_query" }, - "description": "Sample for DeleteFeed", - "file": "cloudasset_v1_generated_asset_service_delete_feed_sync.py", + "description": "Sample for GetSavedQuery", + "file": "cloudasset_v1_generated_asset_service_get_saved_query_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_DeleteFeed_sync", + "regionTag": "cloudasset_v1_generated_AssetService_GetSavedQuery_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -776,15 +2418,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_delete_feed_sync.py" + "title": "cloudasset_v1_generated_asset_service_get_saved_query_sync.py" }, { "canonical": true, @@ -794,19 +2438,23 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.export_assets", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", + "fullName": "google.cloud.asset.v1.AssetService.ListAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ExportAssets" + "shortName": "ListAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ExportAssetsRequest" + "type": "google.cloud.asset_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -821,22 +2469,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "export_assets" + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager", + "shortName": "list_assets" }, - "description": "Sample for ExportAssets", - "file": "cloudasset_v1_generated_asset_service_export_assets_async.py", + "description": "Sample for ListAssets", + "file": "cloudasset_v1_generated_asset_service_list_assets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_async", + "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_async", "segments": [ { - "end": 59, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 52, "start": 27, "type": "SHORT" }, @@ -846,22 +2494,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_export_assets_async.py" + "title": "cloudasset_v1_generated_asset_service_list_assets_async.py" }, { "canonical": true, @@ -870,19 +2518,23 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.export_assets", + "fullName": "google.cloud.asset_v1.AssetServiceClient.list_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ExportAssets", + "fullName": "google.cloud.asset.v1.AssetService.ListAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ExportAssets" + "shortName": "ListAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ExportAssetsRequest" + "type": "google.cloud.asset_v1.types.ListAssetsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -897,22 +2549,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "export_assets" + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager", + "shortName": "list_assets" }, - "description": "Sample for ExportAssets", - "file": "cloudasset_v1_generated_asset_service_export_assets_sync.py", + "description": "Sample for ListAssets", + "file": "cloudasset_v1_generated_asset_service_list_assets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ExportAssets_sync", + "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_sync", "segments": [ { - "end": 59, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 59, + "end": 52, "start": 27, "type": "SHORT" }, @@ -922,22 +2574,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 56, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 60, - "start": 57, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_export_assets_sync.py" + "title": "cloudasset_v1_generated_asset_service_list_assets_sync.py" }, { "canonical": true, @@ -947,22 +2599,22 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.get_feed", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_feeds", "method": { - "fullName": "google.cloud.asset.v1.AssetService.GetFeed", + "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "GetFeed" + "shortName": "ListFeeds" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.GetFeedRequest" + "type": "google.cloud.asset_v1.types.ListFeedsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -978,14 +2630,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "get_feed" + "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", + "shortName": "list_feeds" }, - "description": "Sample for GetFeed", - "file": "cloudasset_v1_generated_asset_service_get_feed_async.py", + "description": "Sample for ListFeeds", + "file": "cloudasset_v1_generated_asset_service_list_feeds_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_async", + "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_async", "segments": [ { "end": 51, @@ -1018,7 +2670,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_get_feed_async.py" + "title": "cloudasset_v1_generated_asset_service_list_feeds_async.py" }, { "canonical": true, @@ -1027,22 +2679,22 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.get_feed", + "fullName": "google.cloud.asset_v1.AssetServiceClient.list_feeds", "method": { - "fullName": "google.cloud.asset.v1.AssetService.GetFeed", + "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "GetFeed" + "shortName": "ListFeeds" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.GetFeedRequest" + "type": "google.cloud.asset_v1.types.ListFeedsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -1058,14 +2710,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.Feed", - "shortName": "get_feed" + "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", + "shortName": "list_feeds" }, - "description": "Sample for GetFeed", - "file": "cloudasset_v1_generated_asset_service_get_feed_sync.py", + "description": "Sample for ListFeeds", + "file": "cloudasset_v1_generated_asset_service_list_feeds_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_GetFeed_sync", + "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_sync", "segments": [ { "end": 51, @@ -1098,7 +2750,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_get_feed_sync.py" + "title": "cloudasset_v1_generated_asset_service_list_feeds_sync.py" }, { "canonical": true, @@ -1108,19 +2760,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_assets", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_saved_queries", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListAssets", + "fullName": "google.cloud.asset.v1.AssetService.ListSavedQueries", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ListAssets" + "shortName": "ListSavedQueries" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ListAssetsRequest" + "type": "google.cloud.asset_v1.types.ListSavedQueriesRequest" }, { "name": "parent", @@ -1139,14 +2791,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager", - "shortName": "list_assets" + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager", + "shortName": "list_saved_queries" }, - "description": "Sample for ListAssets", - "file": "cloudasset_v1_generated_asset_service_list_assets_async.py", + "description": "Sample for ListSavedQueries", + "file": "cloudasset_v1_generated_asset_service_list_saved_queries_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_async", + "regionTag": "cloudasset_v1_generated_AssetService_ListSavedQueries_async", "segments": [ { "end": 52, @@ -1179,7 +2831,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_list_assets_async.py" + "title": "cloudasset_v1_generated_asset_service_list_saved_queries_async.py" }, { "canonical": true, @@ -1188,19 +2840,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.list_assets", + "fullName": "google.cloud.asset_v1.AssetServiceClient.list_saved_queries", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListAssets", + "fullName": "google.cloud.asset.v1.AssetService.ListSavedQueries", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ListAssets" + "shortName": "ListSavedQueries" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ListAssetsRequest" + "type": "google.cloud.asset_v1.types.ListSavedQueriesRequest" }, { "name": "parent", @@ -1219,14 +2871,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager", - "shortName": "list_assets" + "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager", + "shortName": "list_saved_queries" }, - "description": "Sample for ListAssets", - "file": "cloudasset_v1_generated_asset_service_list_assets_sync.py", + "description": "Sample for ListSavedQueries", + "file": "cloudasset_v1_generated_asset_service_list_saved_queries_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListAssets_sync", + "regionTag": "cloudasset_v1_generated_AssetService_ListSavedQueries_sync", "segments": [ { "end": 52, @@ -1259,7 +2911,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_list_assets_sync.py" + "title": "cloudasset_v1_generated_asset_service_list_saved_queries_sync.py" }, { "canonical": true, @@ -1269,23 +2921,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", "shortName": "AssetServiceAsyncClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.list_feeds", + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.query_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", + "fullName": "google.cloud.asset.v1.AssetService.QueryAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ListFeeds" + "shortName": "QueryAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ListFeedsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.asset_v1.types.QueryAssetsRequest" }, { "name": "retry", @@ -1300,22 +2948,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", - "shortName": "list_feeds" + "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", + "shortName": "query_assets" }, - "description": "Sample for ListFeeds", - "file": "cloudasset_v1_generated_asset_service_list_feeds_async.py", + "description": "Sample for QueryAssets", + "file": "cloudasset_v1_generated_asset_service_query_assets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_async", + "regionTag": "cloudasset_v1_generated_AssetService_QueryAssets_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1325,22 +2973,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_list_feeds_async.py" + "title": "cloudasset_v1_generated_asset_service_query_assets_async.py" }, { "canonical": true, @@ -1349,23 +2997,19 @@ "fullName": "google.cloud.asset_v1.AssetServiceClient", "shortName": "AssetServiceClient" }, - "fullName": "google.cloud.asset_v1.AssetServiceClient.list_feeds", + "fullName": "google.cloud.asset_v1.AssetServiceClient.query_assets", "method": { - "fullName": "google.cloud.asset.v1.AssetService.ListFeeds", + "fullName": "google.cloud.asset.v1.AssetService.QueryAssets", "service": { "fullName": "google.cloud.asset.v1.AssetService", "shortName": "AssetService" }, - "shortName": "ListFeeds" + "shortName": "QueryAssets" }, "parameters": [ { "name": "request", - "type": "google.cloud.asset_v1.types.ListFeedsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.asset_v1.types.QueryAssetsRequest" }, { "name": "retry", @@ -1380,22 +3024,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", - "shortName": "list_feeds" + "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", + "shortName": "query_assets" }, - "description": "Sample for ListFeeds", - "file": "cloudasset_v1_generated_asset_service_list_feeds_sync.py", + "description": "Sample for QueryAssets", + "file": "cloudasset_v1_generated_asset_service_query_assets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "cloudasset_v1_generated_AssetService_ListFeeds_sync", + "regionTag": "cloudasset_v1_generated_AssetService_QueryAssets_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1405,22 +3049,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "cloudasset_v1_generated_asset_service_list_feeds_sync.py" + "title": "cloudasset_v1_generated_asset_service_query_assets_sync.py" }, { "canonical": true, @@ -1928,6 +3572,175 @@ } ], "title": "cloudasset_v1_generated_asset_service_update_feed_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient", + "shortName": "AssetServiceAsyncClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceAsyncClient.update_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.UpdateSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "UpdateSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.UpdateSavedQueryRequest" + }, + { + "name": "saved_query", + "type": "google.cloud.asset_v1.types.SavedQuery" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "update_saved_query" + }, + "description": "Sample for UpdateSavedQuery", + "file": "cloudasset_v1_generated_asset_service_update_saved_query_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_UpdateSavedQuery_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_update_saved_query_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.asset_v1.AssetServiceClient", + "shortName": "AssetServiceClient" + }, + "fullName": "google.cloud.asset_v1.AssetServiceClient.update_saved_query", + "method": { + "fullName": "google.cloud.asset.v1.AssetService.UpdateSavedQuery", + "service": { + "fullName": "google.cloud.asset.v1.AssetService", + "shortName": "AssetService" + }, + "shortName": "UpdateSavedQuery" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.asset_v1.types.UpdateSavedQueryRequest" + }, + { + "name": "saved_query", + "type": "google.cloud.asset_v1.types.SavedQuery" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.asset_v1.types.SavedQuery", + "shortName": "update_saved_query" + }, + "description": "Sample for UpdateSavedQuery", + "file": "cloudasset_v1_generated_asset_service_update_saved_query_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "cloudasset_v1_generated_AssetService_UpdateSavedQuery_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "cloudasset_v1_generated_asset_service_update_saved_query_sync.py" } ] } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py index 631ec8e32a32..979373d9256e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -39,18 +39,29 @@ def partition( class assetCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_iam_policy': ('analysis_query', 'execution_timeout', ), - 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', ), - 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', ), + 'analyze_iam_policy': ('analysis_query', 'saved_analysis_query', 'execution_timeout', ), + 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', 'saved_analysis_query', ), + 'analyze_move': ('resource', 'destination_parent', 'view', ), + 'analyze_org_policies': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), + 'analyze_org_policy_governed_assets': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), + 'analyze_org_policy_governed_containers': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), + 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', 'relationship_types', ), + 'batch_get_effective_iam_policies': ('scope', 'names', ), 'create_feed': ('parent', 'feed_id', 'feed', ), + 'create_saved_query': ('parent', 'saved_query', 'saved_query_id', ), 'delete_feed': ('name', ), - 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', ), + 'delete_saved_query': ('name', ), + 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', 'relationship_types', ), 'get_feed': ('name', ), - 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', ), + 'get_saved_query': ('name', ), + 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', 'relationship_types', ), 'list_feeds': ('parent', ), + 'list_saved_queries': ('parent', 'filter', 'page_size', 'page_token', ), + 'query_assets': ('parent', 'statement', 'job_reference', 'page_size', 'page_token', 'timeout', 'read_time_window', 'read_time', 'output_config', ), 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', 'asset_types', 'order_by', ), - 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', ), + 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', 'read_mask', ), 'update_feed': ('feed', 'update_mask', ), + 'update_saved_query': ('saved_query', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 4a12e28e3cdd..e4947013d9dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -58,6 +58,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore from google.type import expr_pb2 # type: ignore import google.auth @@ -1289,6 +1290,7 @@ def test_create_feed(request_type, transport: str = 'grpc'): asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], ) response = client.create_feed(request) @@ -1303,6 +1305,7 @@ def test_create_feed(request_type, transport: str = 'grpc'): assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] def test_create_feed_empty_call(): @@ -1343,6 +1346,7 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], )) response = await client.create_feed(request) @@ -1357,6 +1361,7 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] @pytest.mark.asyncio @@ -1533,6 +1538,7 @@ def test_get_feed(request_type, transport: str = 'grpc'): asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], ) response = client.get_feed(request) @@ -1547,6 +1553,7 @@ def test_get_feed(request_type, transport: str = 'grpc'): assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] def test_get_feed_empty_call(): @@ -1587,6 +1594,7 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], )) response = await client.get_feed(request) @@ -1601,6 +1609,7 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] @pytest.mark.asyncio @@ -2005,6 +2014,7 @@ def test_update_feed(request_type, transport: str = 'grpc'): asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], ) response = client.update_feed(request) @@ -2019,6 +2029,7 @@ def test_update_feed(request_type, transport: str = 'grpc'): assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] def test_update_feed_empty_call(): @@ -2059,6 +2070,7 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a asset_names=['asset_names_value'], asset_types=['asset_types_value'], content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], )) response = await client.update_feed(request) @@ -2073,6 +2085,7 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a assert response.asset_names == ['asset_names_value'] assert response.asset_types == ['asset_types_value'] assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] @pytest.mark.asyncio @@ -3625,18 +3638,5786 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): ) in kw['metadata'] +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeMoveRequest, + dict, +]) +def test_analyze_move(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeMoveResponse( + ) + response = client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeMoveResponse) + + +def test_analyze_move_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + client.analyze_move() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + +@pytest.mark.asyncio +async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( + )) + response = await client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeMoveResponse) + + +@pytest.mark.asyncio +async def test_analyze_move_async_from_dict(): + await test_analyze_move_async(request_type=dict) + + +def test_analyze_move_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeMoveRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + call.return_value = asset_service.AnalyzeMoveResponse() + client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_move_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeMoveRequest() + + request.resource = 'resource_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse()) + await client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'resource=resource_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + asset_service.QueryAssetsRequest, + dict, +]) +def test_query_assets(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + ) + response = client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True + + +def test_query_assets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + client.query_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + +@pytest.mark.asyncio +async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + )) + response = await client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True + + +@pytest.mark.asyncio +async def test_query_assets_async_from_dict(): + await test_query_assets_async(request_type=dict) + + +def test_query_assets_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.QueryAssetsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + call.return_value = asset_service.QueryAssetsResponse() + client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_query_assets_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.QueryAssetsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse()) + await client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateSavedQueryRequest, + dict, +]) +def test_create_saved_query(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + response = client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_create_saved_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + client.create_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + +@pytest.mark.asyncio +async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.asyncio +async def test_create_saved_query_async_from_dict(): + await test_create_saved_query_async(request_type=dict) + + +def test_create_saved_query_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.CreateSavedQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_saved_query_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.CreateSavedQueryRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + await client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_saved_query_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_saved_query( + parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].saved_query + mock_val = asset_service.SavedQuery(name='name_value') + assert arg == mock_val + arg = args[0].saved_query_id + mock_val = 'saved_query_id_value' + assert arg == mock_val + + +def test_create_saved_query_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_saved_query( + asset_service.CreateSavedQueryRequest(), + parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', + ) + +@pytest.mark.asyncio +async def test_create_saved_query_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_saved_query( + parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].saved_query + mock_val = asset_service.SavedQuery(name='name_value') + assert arg == mock_val + arg = args[0].saved_query_id + mock_val = 'saved_query_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_saved_query_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_saved_query( + asset_service.CreateSavedQueryRequest(), + parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetSavedQueryRequest, + dict, +]) +def test_get_saved_query(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + response = client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_get_saved_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + client.get_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + +@pytest.mark.asyncio +async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.asyncio +async def test_get_saved_query_async_from_dict(): + await test_get_saved_query_async(request_type=dict) + + +def test_get_saved_query_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.GetSavedQueryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_saved_query_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.GetSavedQueryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + await client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_saved_query_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_saved_query( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_saved_query_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_saved_query( + asset_service.GetSavedQueryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_saved_query_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_saved_query( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_saved_query_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_saved_query( + asset_service.GetSavedQueryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListSavedQueriesRequest, + dict, +]) +def test_list_saved_queries(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSavedQueriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_saved_queries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + client.list_saved_queries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + +@pytest.mark.asyncio +async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSavedQueriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_saved_queries_async_from_dict(): + await test_list_saved_queries_async(request_type=dict) + + +def test_list_saved_queries_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ListSavedQueriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + call.return_value = asset_service.ListSavedQueriesResponse() + client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_saved_queries_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.ListSavedQueriesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) + await client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_saved_queries_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListSavedQueriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_saved_queries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_saved_queries_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_saved_queries( + asset_service.ListSavedQueriesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_saved_queries_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.ListSavedQueriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_saved_queries( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_saved_queries_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_saved_queries( + asset_service.ListSavedQueriesRequest(), + parent='parent_value', + ) + + +def test_list_saved_queries_pager(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_saved_queries(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.SavedQuery) + for i in results) +def test_list_saved_queries_pages(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + RuntimeError, + ) + pages = list(client.list_saved_queries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_saved_queries_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_saved_queries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, asset_service.SavedQuery) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_saved_queries_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_saved_queries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateSavedQueryRequest, + dict, +]) +def test_update_saved_query(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + response = client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +def test_update_saved_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + client.update_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + +@pytest.mark.asyncio +async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.asyncio +async def test_update_saved_query_async_from_dict(): + await test_update_saved_query_async(request_type=dict) + + +def test_update_saved_query_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.UpdateSavedQueryRequest() + + request.saved_query.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'saved_query.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_saved_query_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.UpdateSavedQueryRequest() + + request.saved_query.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + await client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'saved_query.name=name_value', + ) in kw['metadata'] + + +def test_update_saved_query_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_saved_query( + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].saved_query + mock_val = asset_service.SavedQuery(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_saved_query_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_saved_query( + asset_service.UpdateSavedQueryRequest(), + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_saved_query_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.SavedQuery() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_saved_query( + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].saved_query + mock_val = asset_service.SavedQuery(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_saved_query_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_saved_query( + asset_service.UpdateSavedQueryRequest(), + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteSavedQueryRequest, + dict, +]) +def test_delete_saved_query(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_saved_query_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + client.delete_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + +@pytest.mark.asyncio +async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_saved_query_async_from_dict(): + await test_delete_saved_query_async(request_type=dict) + + +def test_delete_saved_query_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.DeleteSavedQueryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + call.return_value = None + client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_saved_query_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.DeleteSavedQueryRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_saved_query_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_saved_query( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_saved_query_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_saved_query( + asset_service.DeleteSavedQueryRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_saved_query_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_saved_query( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_saved_query_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_saved_query( + asset_service.DeleteSavedQueryRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, +]) +def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( + ) + response = client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) + + +def test_batch_get_effective_iam_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + client.batch_get_effective_iam_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( + )) + response = await client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) + + +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_async_from_dict(): + await test_batch_get_effective_iam_policies_async(request_type=dict) + + +def test_batch_get_effective_iam_policies_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.BatchGetEffectiveIamPoliciesRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() + client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.BatchGetEffectiveIamPoliciesRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse()) + await client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPoliciesRequest, + dict, +]) +def test_analyze_org_policies(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + ) + response = client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_analyze_org_policies_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + client.analyze_org_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + +@pytest.mark.asyncio +async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPoliciesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_analyze_org_policies_async_from_dict(): + await test_analyze_org_policies_async(request_type=dict) + + +def test_analyze_org_policies_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPoliciesRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPoliciesResponse() + client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_org_policies_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPoliciesRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) + await client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +def test_analyze_org_policies_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_org_policies( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_analyze_org_policies_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policies( + asset_service.AnalyzeOrgPoliciesRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policies_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_org_policies( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_org_policies_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_org_policies( + asset_service.AnalyzeOrgPoliciesRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + +def test_analyze_org_policies_pager(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('scope', ''), + )), + ) + pager = client.analyze_org_policies(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in results) +def test_analyze_org_policies_pages(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + ), + RuntimeError, + ) + pages = list(client.analyze_org_policies(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_analyze_org_policies_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + ), + RuntimeError, + ) + async_pager = await client.analyze_org_policies(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) + for i in responses) + + +@pytest.mark.asyncio +async def test_analyze_org_policies_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.analyze_org_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + dict, +]) +def test_analyze_org_policy_governed_containers(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + ) + response = client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_analyze_org_policy_governed_containers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + client.analyze_org_policy_governed_containers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async_from_dict(): + await test_analyze_org_policy_governed_containers_async(request_type=dict) + + +def test_analyze_org_policy_governed_containers_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + await client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +def test_analyze_org_policy_governed_containers_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_org_policy_governed_containers( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_analyze_org_policy_governed_containers_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policy_governed_containers( + asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_org_policy_governed_containers( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_org_policy_governed_containers( + asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + +def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('scope', ''), + )), + ) + pager = client.analyze_org_policy_governed_containers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) + for i in results) +def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + RuntimeError, + ) + pages = list(client.analyze_org_policy_governed_containers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + RuntimeError, + ) + async_pager = await client.analyze_org_policy_governed_containers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) + for i in responses) + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.analyze_org_policy_governed_containers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + dict, +]) +def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + ) + response = client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_analyze_org_policy_governed_assets_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + client.analyze_org_policy_governed_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async_from_dict(): + await test_analyze_org_policy_governed_assets_async(request_type=dict) + + +def test_analyze_org_policy_governed_assets_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + request.scope = 'scope_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + await client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'scope=scope_value', + ) in kw['metadata'] + + +def test_analyze_org_policy_governed_assets_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.analyze_org_policy_governed_assets( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + + +def test_analyze_org_policy_governed_assets_flattened_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policy_governed_assets( + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_flattened_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.analyze_org_policy_governed_assets( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].scope + mock_val = 'scope_value' + assert arg == mock_val + arg = args[0].constraint + mock_val = 'constraint_value' + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_flattened_error_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.analyze_org_policy_governed_assets( + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + +def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('scope', ''), + )), + ) + pager = client.analyze_org_policy_governed_assets(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) + for i in results) +def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + RuntimeError, + ) + pages = list(client.analyze_org_policy_governed_assets(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async_pager(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + RuntimeError, + ) + async_pager = await client.analyze_org_policy_governed_assets(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) + for i in responses) + + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async_pages(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.analyze_org_policy_governed_assets(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize("request_type", [ asset_service.ExportAssetsRequest, dict, ]) -def test_export_assets_rest(request_type): +def test_export_assets_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.export_assets(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.export_assets(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_export_assets_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.export_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = asset_service.ExportAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.export_assets(request) + + +def test_export_assets_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListAssetsRequest, + dict, +]) +def test_list_assets_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", "relationship_types", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_assets(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_assets_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + + request = asset_service.ListAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListAssetsResponse() + + client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_assets(request) + + +def test_list_assets_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_assets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/assets" % client.transport._host, args[1]) + + +def test_list_assets_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_assets( + asset_service.ListAssetsRequest(), + parent='parent_value', + ) + + +def test_list_assets_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + assets.Asset(), + ], + next_page_token='abc', + ), + asset_service.ListAssetsResponse( + assets=[], + next_page_token='def', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + ], + next_page_token='ghi', + ), + asset_service.ListAssetsResponse( + assets=[ + assets.Asset(), + assets.Asset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2'} + + pager = client.list_assets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.Asset) + for i in results) + + pages = list(client.list_assets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetAssetsHistoryRequest, + dict, +]) +def test_batch_get_assets_history_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_get_assets_history(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + + +def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", "relationship_types", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.batch_get_assets_history(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_batch_get_assets_history_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_assets_history_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + + request = asset_service.BatchGetAssetsHistoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.BatchGetAssetsHistoryResponse() + + client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.batch_get_assets_history(request) + + +def test_batch_get_assets_history_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateFeedRequest, + dict, +]) +def test_create_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["feed_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + jsonified_request["feedId"] = 'feed_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "feedId" in jsonified_request + assert jsonified_request["feedId"] == 'feed_id_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.CreateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_feed(request) + + +def test_create_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + + +def test_create_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_feed( + asset_service.CreateFeedRequest(), + parent='parent_value', + ) + + +def test_create_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetFeedRequest, + dict, +]) +def test_get_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.GetFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_feed(request) + + +def test_get_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/feeds/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_get_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_feed( + asset_service.GetFeedRequest(), + name='name_value', + ) + + +def test_get_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListFeedsRequest, + dict, +]) +def test_list_feeds_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_feeds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.ListFeedsResponse) + + +def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_feeds(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_feeds_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_feeds._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_feeds_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + + request = asset_service.ListFeedsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListFeedsResponse() + + client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_feeds(request) + + +def test_list_feeds_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_feeds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + + +def test_list_feeds_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_feeds( + asset_service.ListFeedsRequest(), + parent='parent_value', + ) + + +def test_list_feeds_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateFeedRequest, + dict, +]) +def test_update_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + + request = asset_service.UpdateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_feed(request) + + +def test_update_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed() + + # get arguments that satisfy an http rule for this method + sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + feed=asset_service.Feed(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_update_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_feed( + asset_service.UpdateFeedRequest(), + feed=asset_service.Feed(name='name_value'), + ) + + +def test_update_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteFeedRequest, + dict, +]) +def test_delete_feed_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_feed(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_feed(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_feed_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_feed._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + pre.assert_not_called() + pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = asset_service.DeleteFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_feed(request) + + +def test_delete_feed_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'sample1/sample2/feeds/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '' + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_feed(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + + +def test_delete_feed_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_feed( + asset_service.DeleteFeedRequest(), + name='name_value', + ) + + +def test_delete_feed_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllResourcesRequest, + dict, +]) +def test_search_all_resources_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.search_all_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllResourcesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["scope"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["scope"] = 'scope_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", "read_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.search_all_resources(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_search_all_resources_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.search_all_resources._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_resources_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + + request = asset_service.SearchAllResourcesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllResourcesResponse() + + client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_all_resources(request) + + +def test_search_all_resources_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.search_all_resources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1]) + + +def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_resources( + asset_service.SearchAllResourcesRequest(), + scope='scope_value', + query='query_value', + asset_types=['asset_types_value'], + ) + + +def test_search_all_resources_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllResourcesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllResourcesResponse( + results=[ + assets.ResourceSearchResult(), + assets.ResourceSearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.search_all_resources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.ResourceSearchResult) + for i in results) + + pages = list(client.search_all_resources(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllIamPoliciesRequest, + dict, +]) +def test_search_all_iam_policies_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.search_all_iam_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllIamPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["scope"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["scope"] = 'scope_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.search_all_iam_policies(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_search_all_iam_policies_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_iam_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + + request = asset_service.SearchAllIamPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllIamPoliciesResponse() + + client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.search_all_iam_policies(request) + + +def test_search_all_iam_policies_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + query='query_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.search_all_iam_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1]) + + +def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_all_iam_policies( + asset_service.SearchAllIamPoliciesRequest(), + scope='scope_value', + query='query_value', + ) + + +def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + next_page_token='abc', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[], + next_page_token='def', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + ], + next_page_token='ghi', + ), + asset_service.SearchAllIamPoliciesResponse( + results=[ + assets.IamPolicySearchResult(), + assets.IamPolicySearchResult(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.search_all_iam_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, assets.IamPolicySearchResult) + for i in results) + + pages = list(client.search_all_iam_policies(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyRequest, + dict, +]) +def test_analyze_iam_policy_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) + assert response.fully_explored is True + + +def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("analysis_query", "execution_timeout", "saved_analysis_query", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeIamPolicyResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.analyze_iam_policy(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_analyze_iam_policy_rest_unset_required_fields(): + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_iam_policy_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) + + request = asset_service.AnalyzeIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeIamPolicyResponse() + + client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_iam_policy(request) + + +def test_analyze_iam_policy_rest_error(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, +]) +def test_analyze_iam_policy_longrunning_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3651,17 +9432,16 @@ def test_export_assets_rest(request_type): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.export_assets(request) + response = client.analyze_iam_policy_longrunning(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): +def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): transport_class = transports.AssetServiceRestTransport request_init = {} - request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -3672,19 +9452,15 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3718,7 +9494,7 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.export_assets(request) + response = client.analyze_iam_policy_longrunning(request) expected_params = [ ] @@ -3726,15 +9502,15 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss assert expected_params == actual_params -def test_export_assets_rest_unset_required_fields(): +def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.export_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) + unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_assets_rest_interceptors(null_interceptor): +def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -3743,11 +9519,11 @@ def test_export_assets_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3760,7 +9536,7 @@ def test_export_assets_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - request = asset_service.ExportAssetsRequest() + request = asset_service.AnalyzeIamPolicyLongrunningRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -3768,20 +9544,20 @@ def test_export_assets_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): +def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3791,10 +9567,10 @@ def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=as response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.export_assets(request) + client.analyze_iam_policy_longrunning(request) -def test_export_assets_rest_error(): +def test_analyze_iam_policy_longrunning_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -3802,46 +9578,45 @@ def test_export_assets_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, + asset_service.AnalyzeMoveRequest, dict, ]) -def test_list_assets_rest(request_type): +def test_analyze_move_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'resource': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', + return_value = asset_service.AnalyzeMoveResponse( ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeMoveResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_assets(request) + response = client.analyze_move(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' + assert isinstance(response, asset_service.AnalyzeMoveResponse) -def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsRequest): +def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMoveRequest): transport_class = transports.AssetServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["resource"] = "" + request_init["destination_parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -3851,22 +9626,28 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR )) # verify fields with default values are dropped + assert "destinationParent" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "destinationParent" in jsonified_request + assert jsonified_request["destinationParent"] == request_init["destination_parent"] - jsonified_request["parent"] = 'parent_value' + jsonified_request["resource"] = 'resource_value' + jsonified_request["destinationParent"] = 'destination_parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", )) + assert not set(unset_fields) - set(("destination_parent", "view", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' + assert "resource" in jsonified_request + assert jsonified_request["resource"] == 'resource_value' + assert "destinationParent" in jsonified_request + assert jsonified_request["destinationParent"] == 'destination_parent_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3875,7 +9656,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse() + return_value = asset_service.AnalyzeMoveResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -3895,29 +9676,33 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeMoveResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_assets(request) + response = client.analyze_move(request) expected_params = [ + ( + "destinationParent", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_list_assets_rest_unset_required_fields(): +def test_analyze_move_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.list_assets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", )) & set(("parent", ))) + unset_fields = transport.analyze_move._get_unset_required_fields({}) + assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assets_rest_interceptors(null_interceptor): +def test_analyze_move_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -3925,11 +9710,11 @@ def test_list_assets_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3940,30 +9725,30 @@ def test_list_assets_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + req.return_value._content = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) - request = asset_service.ListAssetsRequest() + request = asset_service.AnalyzeMoveRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.ListAssetsResponse() + post.return_value = asset_service.AnalyzeMoveResponse() - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): +def test_analyze_move_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} + request_init = {'resource': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3973,128 +9758,21 @@ def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asse response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_assets(request) - - -def test_list_assets_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_assets(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/assets" % client.transport._host, args[1]) - - -def test_list_assets_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_assets( - asset_service.ListAssetsRequest(), - parent='parent_value', - ) + client.analyze_move(request) -def test_list_assets_rest_pager(transport: str = 'rest'): +def test_analyze_move_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='rest' ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - assets.Asset(), - ], - next_page_token='abc', - ), - asset_service.ListAssetsResponse( - assets=[], - next_page_token='def', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - ], - next_page_token='ghi', - ), - asset_service.ListAssetsResponse( - assets=[ - assets.Asset(), - assets.Asset(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.ListAssetsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_assets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.Asset) - for i in results) - - pages = list(client.list_assets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, + asset_service.QueryAssetsRequest, dict, ]) -def test_batch_get_assets_history_rest(request_type): +def test_query_assets_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4107,24 +9785,28 @@ def test_batch_get_assets_history_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse( + return_value = asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + pb_return_value = asset_service.QueryAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.batch_get_assets_history(request) + response = client.query_assets(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True -def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): +def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -4139,16 +9821,14 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", )) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4162,7 +9842,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse() + return_value = asset_service.QueryAssetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -4174,21 +9854,22 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "get", + 'method': "post", 'query_params': pb_request, } + transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + pb_return_value = asset_service.QueryAssetsResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.batch_get_assets_history(request) + response = client.query_assets(request) expected_params = [ ] @@ -4196,15 +9877,15 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic assert expected_params == actual_params -def test_batch_get_assets_history_rest_unset_required_fields(): +def test_query_assets_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", )) & set(("parent", ))) + unset_fields = transport.query_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_assets_history_rest_interceptors(null_interceptor): +def test_query_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -4212,11 +9893,11 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4227,23 +9908,23 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + req.return_value._content = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) - request = asset_service.BatchGetAssetsHistoryRequest() + request = asset_service.QueryAssetsRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.BatchGetAssetsHistoryResponse() + post.return_value = asset_service.QueryAssetsResponse() - client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): +def test_query_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.QueryAssetsRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4260,10 +9941,10 @@ def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', requ response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.batch_get_assets_history(request) + client.query_assets(request) -def test_batch_get_assets_history_rest_error(): +def test_query_assets_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -4271,10 +9952,10 @@ def test_batch_get_assets_history_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, + asset_service.CreateSavedQueryRequest, dict, ]) -def test_create_feed_rest(request_type): +def test_create_saved_query_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4282,42 +9963,43 @@ def test_create_feed_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'sample1/sample2'} + request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( + return_value = asset_service.SavedQuery( name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, + description='description_value', + creator='creator_value', + last_updater='last_updater_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.create_feed(request) + response = client.create_saved_query(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) + assert isinstance(response, asset_service.SavedQuery) assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' -def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedRequest): +def test_create_saved_query_rest_required_fields(request_type=asset_service.CreateSavedQueryRequest): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["parent"] = "" - request_init["feed_id"] = "" + request_init["saved_query_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -4327,23 +10009,28 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR )) # verify fields with default values are dropped + assert "savedQueryId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "savedQueryId" in jsonified_request + assert jsonified_request["savedQueryId"] == request_init["saved_query_id"] jsonified_request["parent"] = 'parent_value' - jsonified_request["feedId"] = 'feed_id_value' + jsonified_request["savedQueryId"] = 'saved_query_id_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("saved_query_id", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == 'parent_value' - assert "feedId" in jsonified_request - assert jsonified_request["feedId"] == 'feed_id_value' + assert "savedQueryId" in jsonified_request + assert jsonified_request["savedQueryId"] == 'saved_query_id_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4352,7 +10039,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -4373,29 +10060,33 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.create_feed(request) + response = client.create_saved_query(request) expected_params = [ + ( + "savedQueryId", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_create_feed_rest_unset_required_fields(): +def test_create_saved_query_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.create_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) + unset_fields = transport.create_saved_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_feed_rest_interceptors(null_interceptor): +def test_create_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -4403,11 +10094,11 @@ def test_create_feed_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4418,23 +10109,23 @@ def test_create_feed_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - request = asset_service.CreateFeedRequest() + request = asset_service.CreateSavedQueryRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.Feed() + post.return_value = asset_service.SavedQuery() - client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): +def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4442,6 +10133,7 @@ def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asse # send a request that will satisfy transcoding request_init = {'parent': 'sample1/sample2'} + request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4451,10 +10143,10 @@ def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asse response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_feed(request) + client.create_saved_query(request) -def test_create_feed_rest_flattened(): +def test_create_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4463,7 +10155,7 @@ def test_create_feed_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method sample_request = {'parent': 'sample1/sample2'} @@ -4471,27 +10163,29 @@ def test_create_feed_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.create_feed(**mock_args) + client.create_saved_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) -def test_create_feed_rest_flattened_error(transport: str = 'rest'): +def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4500,13 +10194,15 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_feed( - asset_service.CreateFeedRequest(), + client.create_saved_query( + asset_service.CreateSavedQueryRequest(), parent='parent_value', + saved_query=asset_service.SavedQuery(name='name_value'), + saved_query_id='saved_query_id_value', ) -def test_create_feed_rest_error(): +def test_create_saved_query_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -4514,48 +10210,48 @@ def test_create_feed_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, + asset_service.GetSavedQueryRequest, dict, ]) -def test_get_feed_rest(request_type): +def test_get_saved_query_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( + return_value = asset_service.SavedQuery( name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, + description='description_value', + creator='creator_value', + last_updater='last_updater_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.get_feed(request) + response = client.get_saved_query(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) + assert isinstance(response, asset_service.SavedQuery) assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' -def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest): +def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSavedQueryRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -4570,14 +10266,14 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4591,7 +10287,7 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -4611,13 +10307,13 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.get_feed(request) + response = client.get_saved_query(request) expected_params = [ ] @@ -4625,15 +10321,15 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest assert expected_params == actual_params -def test_get_feed_rest_unset_required_fields(): +def test_get_saved_query_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.get_feed._get_unset_required_fields({}) + unset_fields = transport.get_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_feed_rest_interceptors(null_interceptor): +def test_get_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -4641,11 +10337,11 @@ def test_get_feed_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) + pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4656,30 +10352,30 @@ def test_get_feed_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - request = asset_service.GetFeedRequest() + request = asset_service.GetSavedQueryRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.Feed() + post.return_value = asset_service.SavedQuery() - client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): +def test_get_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4689,10 +10385,10 @@ def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_s response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_feed(request) + client.get_saved_query(request) -def test_get_feed_rest_flattened(): +def test_get_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4701,10 +10397,10 @@ def test_get_feed_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} + sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} # get truthy value for each flattened field mock_args = dict( @@ -4715,21 +10411,21 @@ def test_get_feed_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.get_feed(**mock_args) + client.get_saved_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) -def test_get_feed_rest_flattened_error(transport: str = 'rest'): +def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4738,13 +10434,13 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_feed( - asset_service.GetFeedRequest(), + client.get_saved_query( + asset_service.GetSavedQueryRequest(), name='name_value', ) -def test_get_feed_rest_error(): +def test_get_saved_query_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -4752,10 +10448,10 @@ def test_get_feed_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, + asset_service.ListSavedQueriesRequest, dict, ]) -def test_list_feeds_rest(request_type): +def test_list_saved_queries_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4768,24 +10464,26 @@ def test_list_feeds_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( + return_value = asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_feeds(request) + response = client.list_saved_queries(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) + assert isinstance(response, pagers.ListSavedQueriesPager) + assert response.next_page_token == 'next_page_token_value' -def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsRequest): +def test_list_saved_queries_rest_required_fields(request_type=asset_service.ListSavedQueriesRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -4800,14 +10498,16 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4821,7 +10521,7 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse() + return_value = asset_service.ListSavedQueriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -4841,13 +10541,13 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_feeds(request) + response = client.list_saved_queries(request) expected_params = [ ] @@ -4855,15 +10555,15 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq assert expected_params == actual_params -def test_list_feeds_rest_unset_required_fields(): +def test_list_saved_queries_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.list_feeds._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", ))) + unset_fields = transport.list_saved_queries._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_feeds_rest_interceptors(null_interceptor): +def test_list_saved_queries_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -4871,11 +10571,11 @@ def test_list_feeds_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) + pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -4886,23 +10586,23 @@ def test_list_feeds_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + req.return_value._content = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) - request = asset_service.ListFeedsRequest() + request = asset_service.ListSavedQueriesRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.ListFeedsResponse() + post.return_value = asset_service.ListSavedQueriesResponse() - client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): +def test_list_saved_queries_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListSavedQueriesRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4919,10 +10619,10 @@ def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_feeds(request) + client.list_saved_queries(request) -def test_list_feeds_rest_flattened(): +def test_list_saved_queries_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -4931,7 +10631,7 @@ def test_list_feeds_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse() + return_value = asset_service.ListSavedQueriesResponse() # get arguments that satisfy an http rule for this method sample_request = {'parent': 'sample1/sample2'} @@ -4945,21 +10645,21 @@ def test_list_feeds_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) + pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.list_feeds(**mock_args) + client.list_saved_queries(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=*/*}/feeds" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{parent=*/*}/savedQueries" % client.transport._host, args[1]) -def test_list_feeds_rest_flattened_error(transport: str = 'rest'): +def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4968,62 +10668,118 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_feeds( - asset_service.ListFeedsRequest(), + client.list_saved_queries( + asset_service.ListSavedQueriesRequest(), parent='parent_value', ) -def test_list_feeds_rest_error(): +def test_list_saved_queries_rest_pager(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + next_page_token='abc', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[], + next_page_token='def', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + ], + next_page_token='ghi', + ), + asset_service.ListSavedQueriesResponse( + saved_queries=[ + asset_service.SavedQuery(), + asset_service.SavedQuery(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.ListSavedQueriesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'sample1/sample2'} + + pager = client.list_saved_queries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.SavedQuery) + for i in results) + + pages = list(client.list_saved_queries(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, + asset_service.UpdateSavedQueryRequest, dict, ]) -def test_update_feed_rest(request_type): +def test_update_saved_query_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( + return_value = asset_service.SavedQuery( name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, + description='description_value', + creator='creator_value', + last_updater='last_updater_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.update_feed(request) + response = client.update_saved_query(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) + assert isinstance(response, asset_service.SavedQuery) assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' -def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedRequest): +def test_update_saved_query_rest_required_fields(request_type=asset_service.UpdateSavedQueryRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -5037,12 +10793,14 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5054,7 +10812,7 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -5075,13 +10833,13 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.update_feed(request) + response = client.update_saved_query(request) expected_params = [ ] @@ -5089,15 +10847,15 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR assert expected_params == actual_params -def test_update_feed_rest_unset_required_fields(): +def test_update_saved_query_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.update_feed._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) + unset_fields = transport.update_saved_query._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_feed_rest_interceptors(null_interceptor): +def test_update_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -5105,11 +10863,11 @@ def test_update_feed_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5120,30 +10878,31 @@ def test_update_feed_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - request = asset_service.UpdateFeedRequest() + request = asset_service.UpdateSavedQueryRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.Feed() + post.return_value = asset_service.SavedQuery() - client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): +def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5153,10 +10912,10 @@ def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asse response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_feed(request) + client.update_saved_query(request) -def test_update_feed_rest_flattened(): +def test_update_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5165,35 +10924,36 @@ def test_update_feed_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.Feed() + return_value = asset_service.SavedQuery() # get arguments that satisfy an http rule for this method - sample_request = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + sample_request = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} # get truthy value for each flattened field mock_args = dict( - feed=asset_service.Feed(name='name_value'), + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) + pb_return_value = asset_service.SavedQuery.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.update_feed(**mock_args) + client.update_saved_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{feed.name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{saved_query.name=*/*/savedQueries/*}" % client.transport._host, args[1]) -def test_update_feed_rest_flattened_error(transport: str = 'rest'): +def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5202,13 +10962,14 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_feed( - asset_service.UpdateFeedRequest(), - feed=asset_service.Feed(name='name_value'), + client.update_saved_query( + asset_service.UpdateSavedQueryRequest(), + saved_query=asset_service.SavedQuery(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) -def test_update_feed_rest_error(): +def test_update_saved_query_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -5216,17 +10977,17 @@ def test_update_feed_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, + asset_service.DeleteSavedQueryRequest, dict, ]) -def test_delete_feed_rest(request_type): +def test_delete_saved_query_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -5241,13 +11002,13 @@ def test_delete_feed_rest(request_type): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.delete_feed(request) + response = client.delete_saved_query(request) # Establish that the response is the type that we expect. assert response is None -def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedRequest): +def test_delete_saved_query_rest_required_fields(request_type=asset_service.DeleteSavedQueryRequest): transport_class = transports.AssetServiceRestTransport request_init = {} @@ -5262,14 +11023,14 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5307,7 +11068,7 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.delete_feed(request) + response = client.delete_saved_query(request) expected_params = [ ] @@ -5315,15 +11076,15 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR assert expected_params == actual_params -def test_delete_feed_rest_unset_required_fields(): +def test_delete_saved_query_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.delete_feed._get_unset_required_fields({}) + unset_fields = transport.delete_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_feed_rest_interceptors(null_interceptor): +def test_delete_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -5331,9 +11092,9 @@ def test_delete_feed_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: pre.assert_not_called() - pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5345,26 +11106,26 @@ def test_delete_feed_rest_interceptors(null_interceptor): req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - request = asset_service.DeleteFeedRequest() + request = asset_service.DeleteSavedQueryRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() -def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): +def test_delete_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5374,10 +11135,10 @@ def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asse response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_feed(request) + client.delete_saved_query(request) -def test_delete_feed_rest_flattened(): +def test_delete_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5389,7 +11150,7 @@ def test_delete_feed_rest_flattened(): return_value = None # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/feeds/sample3'} + sample_request = {'name': 'sample1/sample2/savedQueries/sample3'} # get truthy value for each flattened field mock_args = dict( @@ -5404,16 +11165,16 @@ def test_delete_feed_rest_flattened(): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.delete_feed(**mock_args) + client.delete_saved_query(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=*/*/feeds/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{name=*/*/savedQueries/*}" % client.transport._host, args[1]) -def test_delete_feed_rest_flattened_error(transport: str = 'rest'): +def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5422,13 +11183,13 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_feed( - asset_service.DeleteFeedRequest(), + client.delete_saved_query( + asset_service.DeleteSavedQueryRequest(), name='name_value', ) -def test_delete_feed_rest_error(): +def test_delete_saved_query_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -5436,10 +11197,10 @@ def test_delete_feed_rest_error(): @pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, + asset_service.BatchGetEffectiveIamPoliciesRequest, dict, ]) -def test_search_all_resources_rest(request_type): +def test_batch_get_effective_iam_policies_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5452,30 +11213,29 @@ def test_search_all_resources_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + pb_return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.search_all_resources(request) + response = client.batch_get_effective_iam_policies(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) -def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): +def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["scope"] = "" + request_init["names"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -5485,22 +11245,28 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se )) # verify fields with default values are dropped + assert "names" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "names" in jsonified_request + assert jsonified_request["names"] == request_init["names"] jsonified_request["scope"] = 'scope_value' + jsonified_request["names"] = 'names_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + assert not set(unset_fields) - set(("names", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request assert jsonified_request["scope"] == 'scope_value' + assert "names" in jsonified_request + assert jsonified_request["names"] == 'names_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5509,7 +11275,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse() + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -5529,29 +11295,33 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + pb_return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.search_all_resources(request) + response = client.batch_get_effective_iam_policies(request) expected_params = [ + ( + "names", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_search_all_resources_rest_unset_required_fields(): +def test_batch_get_effective_iam_policies_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.search_all_resources._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) - + unset_fields = transport.batch_get_effective_iam_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) + @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_resources_rest_interceptors(null_interceptor): +def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -5559,11 +11329,11 @@ def test_search_all_resources_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5574,23 +11344,23 @@ def test_search_all_resources_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + req.return_value._content = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) - request = asset_service.SearchAllResourcesRequest() + request = asset_service.BatchGetEffectiveIamPoliciesRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.SearchAllResourcesResponse() + post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): +def test_batch_get_effective_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5607,132 +11377,21 @@ def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_ response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.search_all_resources(request) - - -def test_search_all_resources_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.search_all_resources(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllResources" % client.transport._host, args[1]) - - -def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.search_all_resources( - asset_service.SearchAllResourcesRequest(), - scope='scope_value', - query='query_value', - asset_types=['asset_types_value'], - ) + client.batch_get_effective_iam_policies(request) -def test_search_all_resources_rest_pager(transport: str = 'rest'): +def test_batch_get_effective_iam_policies_rest_error(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='rest' ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - next_page_token='abc', - ), - asset_service.SearchAllResourcesResponse( - results=[], - next_page_token='def', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - ], - next_page_token='ghi', - ), - asset_service.SearchAllResourcesResponse( - results=[ - assets.ResourceSearchResult(), - assets.ResourceSearchResult(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllResourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'scope': 'sample1/sample2'} - - pager = client.search_all_resources(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, assets.ResourceSearchResult) - for i in results) - - pages = list(client.search_all_resources(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, + asset_service.AnalyzeOrgPoliciesRequest, dict, ]) -def test_search_all_iam_policies_rest(request_type): +def test_analyze_org_policies_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5745,30 +11404,31 @@ def test_search_all_iam_policies_rest(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse( + return_value = asset_service.AnalyzeOrgPoliciesResponse( next_page_token='next_page_token_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.search_all_iam_policies(request) + response = client.analyze_org_policies(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesPager) + assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) assert response.next_page_token == 'next_page_token_value' -def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): +def test_analyze_org_policies_rest_required_fields(request_type=asset_service.AnalyzeOrgPoliciesRequest): transport_class = transports.AssetServiceRestTransport request_init = {} request_init["scope"] = "" + request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -5778,22 +11438,28 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service )) # verify fields with default values are dropped + assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == request_init["constraint"] jsonified_request["scope"] = 'scope_value' + jsonified_request["constraint"] = 'constraint_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) + assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "scope" in jsonified_request assert jsonified_request["scope"] == 'scope_value' + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5802,7 +11468,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse() + return_value = asset_service.AnalyzeOrgPoliciesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -5822,29 +11488,33 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.search_all_iam_policies(request) + response = client.analyze_org_policies(request) expected_params = [ + ( + "constraint", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_search_all_iam_policies_rest_unset_required_fields(): +def test_analyze_org_policies_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) - assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) + unset_fields = transport.analyze_org_policies._get_unset_required_fields({}) + assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_iam_policies_rest_interceptors(null_interceptor): +def test_analyze_org_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -5852,11 +11522,11 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -5867,23 +11537,23 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + req.return_value._content = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) - request = asset_service.SearchAllIamPoliciesRequest() + request = asset_service.AnalyzeOrgPoliciesRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.SearchAllIamPoliciesResponse() + post.return_value = asset_service.AnalyzeOrgPoliciesResponse() - client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): +def test_analyze_org_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5900,10 +11570,10 @@ def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', reque response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.search_all_iam_policies(request) + client.analyze_org_policies(request) -def test_search_all_iam_policies_rest_flattened(): +def test_analyze_org_policies_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -5912,7 +11582,7 @@ def test_search_all_iam_policies_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse() + return_value = asset_service.AnalyzeOrgPoliciesResponse() # get arguments that satisfy an http rule for this method sample_request = {'scope': 'sample1/sample2'} @@ -5920,28 +11590,29 @@ def test_search_all_iam_policies_rest_flattened(): # get truthy value for each flattened field mock_args = dict( scope='scope_value', - query='query_value', + constraint='constraint_value', + filter='filter_value', ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.search_all_iam_policies(**mock_args) + client.analyze_org_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{scope=*/*}:searchAllIamPolicies" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicies" % client.transport._host, args[1]) -def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): +def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5950,14 +11621,15 @@ def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.search_all_iam_policies( - asset_service.SearchAllIamPoliciesRequest(), + client.analyze_org_policies( + asset_service.AnalyzeOrgPoliciesRequest(), scope='scope_value', - query='query_value', + constraint='constraint_value', + filter='filter_value', ) -def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): +def test_analyze_org_policies_rest_pager(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5969,28 +11641,28 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): #with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], next_page_token='abc', ), - asset_service.SearchAllIamPoliciesResponse( - results=[], + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[], next_page_token='def', ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], next_page_token='ghi', ), - asset_service.SearchAllIamPoliciesResponse( - results=[ - assets.IamPolicySearchResult(), - assets.IamPolicySearchResult(), + asset_service.AnalyzeOrgPoliciesResponse( + org_policy_results=[ + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), + asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult(), ], ), ) @@ -5998,7 +11670,7 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): response = response + response # Wrap the values into proper Response objs - response = tuple(asset_service.SearchAllIamPoliciesResponse.to_json(x) for x in response) + response = tuple(asset_service.AnalyzeOrgPoliciesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode('UTF-8') @@ -6007,58 +11679,60 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): sample_request = {'scope': 'sample1/sample2'} - pager = client.search_all_iam_policies(request=sample_request) + pager = client.analyze_org_policies(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, assets.IamPolicySearchResult) + assert all(isinstance(i, asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult) for i in results) - pages = list(client.search_all_iam_policies(request=sample_request).pages) + pages = list(client.analyze_org_policies(request=sample_request).pages) for page_, token in zip(pages, ['abc','def','ghi', '']): assert page_.raw_page.next_page_token == token @pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, dict, ]) -def test_analyze_iam_policy_rest(request_type): +def test_analyze_org_policy_governed_containers_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {'scope': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.analyze_iam_policy(request) + response = client.analyze_org_policy_governed_containers(request) # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) - assert response.fully_explored is True + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) + assert response.next_page_token == 'next_page_token_value' -def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): +def test_analyze_org_policy_governed_containers_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): transport_class = transports.AssetServiceRestTransport request_init = {} + request_init["scope"] = "" + request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -6068,18 +11742,28 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal )) # verify fields with default values are dropped + assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == request_init["constraint"] - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request["scope"] = 'scope_value' + jsonified_request["constraint"] = 'constraint_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("analysis_query", "execution_timeout", )) + assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6088,7 +11772,7 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse() + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -6108,29 +11792,33 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.analyze_iam_policy(request) + response = client.analyze_org_policy_governed_containers(request) expected_params = [ + ( + "constraint", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_analyze_iam_policy_rest_unset_required_fields(): +def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) - assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", )) & set(("analysisQuery", ))) + unset_fields = transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_rest_interceptors(null_interceptor): +def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -6138,11 +11826,11 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) + pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6153,85 +11841,202 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) + req.return_value._content = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) - request = asset_service.AnalyzeIamPolicyRequest() + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeIamPolicyResponse() + post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + + client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_org_policy_governed_containers_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.analyze_org_policy_governed_containers(request) + + +def test_analyze_org_policy_governed_containers_rest_flattened(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value - client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policy_governed_containers(**mock_args) - pre.assert_called_once() - post.assert_called_once() + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers" % client.transport._host, args[1]) -def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): +def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_iam_policy(request) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policy_governed_containers( + asset_service.AnalyzeOrgPolicyGovernedContainersRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) -def test_analyze_iam_policy_rest_error(): +def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'rest'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + governed_containers=[ + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.analyze_org_policy_governed_containers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer) + for i in results) + + pages = list(client.analyze_org_policy_governed_containers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, dict, ]) -def test_analyze_iam_policy_longrunning_rest(request_type): +def test_analyze_org_policy_governed_assets_rest(request_type): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {'scope': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.analyze_iam_policy_longrunning(request) + response = client.analyze_org_policy_governed_assets(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) + assert response.next_page_token == 'next_page_token_value' -def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): transport_class = transports.AssetServiceRestTransport request_init = {} + request_init["scope"] = "" + request_init["constraint"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -6241,16 +12046,28 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ )) # verify fields with default values are dropped + assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == request_init["constraint"] - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + jsonified_request["scope"] = 'scope_value' + jsonified_request["constraint"] = 'constraint_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "scope" in jsonified_request + assert jsonified_request["scope"] == 'scope_value' + assert "constraint" in jsonified_request + assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6259,7 +12076,7 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -6271,36 +12088,41 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "post", + 'method': "get", 'query_params': pb_request, } - transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.analyze_iam_policy_longrunning(request) + response = client.analyze_org_policy_governed_assets(request) expected_params = [ + ( + "constraint", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): +def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) + unset_fields = transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) + assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): +def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), @@ -6308,12 +12130,11 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): client = AssetServiceClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) + pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -6324,30 +12145,30 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) - request = asset_service.AnalyzeIamPolicyLongrunningRequest() + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): +def test_analyze_org_policy_governed_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request_init = {'scope': 'sample1/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6357,15 +12178,126 @@ def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest' response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.analyze_iam_policy_longrunning(request) + client.analyze_org_policy_governed_assets(request) -def test_analyze_iam_policy_longrunning_rest_error(): +def test_analyze_org_policy_governed_assets_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.analyze_org_policy_governed_assets(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets" % client.transport._host, args[1]) + + +def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.analyze_org_policy_governed_assets( + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(), + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + ) + + +def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='abc', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[], + next_page_token='def', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + next_page_token='ghi', + ), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + governed_assets=[ + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'scope': 'sample1/sample2'} + + pager = client.analyze_org_policy_governed_assets(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset) + for i in results) + + pages = list(client.analyze_org_policy_governed_assets(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. @@ -6506,6 +12438,17 @@ def test_asset_service_base_transport(): 'search_all_iam_policies', 'analyze_iam_policy', 'analyze_iam_policy_longrunning', + 'analyze_move', + 'query_assets', + 'create_saved_query', + 'get_saved_query', + 'list_saved_queries', + 'update_saved_query', + 'delete_saved_query', + 'batch_get_effective_iam_policies', + 'analyze_org_policies', + 'analyze_org_policy_governed_containers', + 'analyze_org_policy_governed_assets', ) for method in methods: with pytest.raises(NotImplementedError): @@ -6802,6 +12745,39 @@ def test_asset_service_client_transport_session_collision(transport_name): session1 = client1.transport.analyze_iam_policy_longrunning._session session2 = client2.transport.analyze_iam_policy_longrunning._session assert session1 != session2 + session1 = client1.transport.analyze_move._session + session2 = client2.transport.analyze_move._session + assert session1 != session2 + session1 = client1.transport.query_assets._session + session2 = client2.transport.query_assets._session + assert session1 != session2 + session1 = client1.transport.create_saved_query._session + session2 = client2.transport.create_saved_query._session + assert session1 != session2 + session1 = client1.transport.get_saved_query._session + session2 = client2.transport.get_saved_query._session + assert session1 != session2 + session1 = client1.transport.list_saved_queries._session + session2 = client2.transport.list_saved_queries._session + assert session1 != session2 + session1 = client1.transport.update_saved_query._session + session2 = client2.transport.update_saved_query._session + assert session1 != session2 + session1 = client1.transport.delete_saved_query._session + session2 = client2.transport.delete_saved_query._session + assert session1 != session2 + session1 = client1.transport.batch_get_effective_iam_policies._session + session2 = client2.transport.batch_get_effective_iam_policies._session + assert session1 != session2 + session1 = client1.transport.analyze_org_policies._session + session2 = client2.transport.analyze_org_policies._session + assert session1 != session2 + session1 = client1.transport.analyze_org_policy_governed_containers._session + session2 = client2.transport.analyze_org_policy_governed_containers._session + assert session1 != session2 + session1 = client1.transport.analyze_org_policy_governed_assets._session + session2 = client2.transport.analyze_org_policy_governed_assets._session + assert session1 != session2 def test_asset_service_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -6946,6 +12922,42 @@ def test_asset_service_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_access_level_path(): + access_policy = "squid" + access_level = "clam" + expected = "accessPolicies/{access_policy}/accessLevels/{access_level}".format(access_policy=access_policy, access_level=access_level, ) + actual = AssetServiceClient.access_level_path(access_policy, access_level) + assert expected == actual + + +def test_parse_access_level_path(): + expected = { + "access_policy": "whelk", + "access_level": "octopus", + } + path = AssetServiceClient.access_level_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_access_level_path(path) + assert expected == actual + +def test_access_policy_path(): + access_policy = "oyster" + expected = "accessPolicies/{access_policy}".format(access_policy=access_policy, ) + actual = AssetServiceClient.access_policy_path(access_policy) + assert expected == actual + + +def test_parse_access_policy_path(): + expected = { + "access_policy": "nudibranch", + } + path = AssetServiceClient.access_policy_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_access_policy_path(path) + assert expected == actual + def test_asset_path(): expected = "*".format() actual = AssetServiceClient.asset_path() @@ -6962,8 +12974,8 @@ def test_parse_asset_path(): assert expected == actual def test_feed_path(): - project = "squid" - feed = "clam" + project = "cuttlefish" + feed = "mussel" expected = "projects/{project}/feeds/{feed}".format(project=project, feed=feed, ) actual = AssetServiceClient.feed_path(project, feed) assert expected == actual @@ -6971,8 +12983,8 @@ def test_feed_path(): def test_parse_feed_path(): expected = { - "project": "whelk", - "feed": "octopus", + "project": "winkle", + "feed": "nautilus", } path = AssetServiceClient.feed_path(**expected) @@ -6980,8 +12992,67 @@ def test_parse_feed_path(): actual = AssetServiceClient.parse_feed_path(path) assert expected == actual +def test_inventory_path(): + project = "scallop" + location = "abalone" + instance = "squid" + expected = "projects/{project}/locations/{location}/instances/{instance}/inventory".format(project=project, location=location, instance=instance, ) + actual = AssetServiceClient.inventory_path(project, location, instance) + assert expected == actual + + +def test_parse_inventory_path(): + expected = { + "project": "clam", + "location": "whelk", + "instance": "octopus", + } + path = AssetServiceClient.inventory_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_inventory_path(path) + assert expected == actual + +def test_saved_query_path(): + project = "oyster" + saved_query = "nudibranch" + expected = "projects/{project}/savedQueries/{saved_query}".format(project=project, saved_query=saved_query, ) + actual = AssetServiceClient.saved_query_path(project, saved_query) + assert expected == actual + + +def test_parse_saved_query_path(): + expected = { + "project": "cuttlefish", + "saved_query": "mussel", + } + path = AssetServiceClient.saved_query_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_saved_query_path(path) + assert expected == actual + +def test_service_perimeter_path(): + access_policy = "winkle" + service_perimeter = "nautilus" + expected = "accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}".format(access_policy=access_policy, service_perimeter=service_perimeter, ) + actual = AssetServiceClient.service_perimeter_path(access_policy, service_perimeter) + assert expected == actual + + +def test_parse_service_perimeter_path(): + expected = { + "access_policy": "scallop", + "service_perimeter": "abalone", + } + path = AssetServiceClient.service_perimeter_path(**expected) + + # Check that the path construction is reversible. + actual = AssetServiceClient.parse_service_perimeter_path(path) + assert expected == actual + def test_common_billing_account_path(): - billing_account = "oyster" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) actual = AssetServiceClient.common_billing_account_path(billing_account) assert expected == actual @@ -6989,7 +13060,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nudibranch", + "billing_account": "clam", } path = AssetServiceClient.common_billing_account_path(**expected) @@ -6998,7 +13069,7 @@ def test_parse_common_billing_account_path(): assert expected == actual def test_common_folder_path(): - folder = "cuttlefish" + folder = "whelk" expected = "folders/{folder}".format(folder=folder, ) actual = AssetServiceClient.common_folder_path(folder) assert expected == actual @@ -7006,7 +13077,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "mussel", + "folder": "octopus", } path = AssetServiceClient.common_folder_path(**expected) @@ -7015,7 +13086,7 @@ def test_parse_common_folder_path(): assert expected == actual def test_common_organization_path(): - organization = "winkle" + organization = "oyster" expected = "organizations/{organization}".format(organization=organization, ) actual = AssetServiceClient.common_organization_path(organization) assert expected == actual @@ -7023,7 +13094,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nautilus", + "organization": "nudibranch", } path = AssetServiceClient.common_organization_path(**expected) @@ -7032,7 +13103,7 @@ def test_parse_common_organization_path(): assert expected == actual def test_common_project_path(): - project = "scallop" + project = "cuttlefish" expected = "projects/{project}".format(project=project, ) actual = AssetServiceClient.common_project_path(project) assert expected == actual @@ -7040,7 +13111,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "abalone", + "project": "mussel", } path = AssetServiceClient.common_project_path(**expected) @@ -7049,8 +13120,8 @@ def test_parse_common_project_path(): assert expected == actual def test_common_location_path(): - project = "squid" - location = "clam" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) actual = AssetServiceClient.common_location_path(project, location) assert expected == actual @@ -7058,8 +13129,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "whelk", - "location": "octopus", + "project": "scallop", + "location": "abalone", } path = AssetServiceClient.common_location_path(**expected) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py index db48ba5fdfe8..76921a7ee06c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py @@ -21,33 +21,81 @@ from google.cloud.eventarc_v1.services.eventarc.client import EventarcClient from google.cloud.eventarc_v1.services.eventarc.async_client import EventarcAsyncClient +from google.cloud.eventarc_v1.types.channel import Channel +from google.cloud.eventarc_v1.types.channel_connection import ChannelConnection +from google.cloud.eventarc_v1.types.discovery import EventType +from google.cloud.eventarc_v1.types.discovery import FilteringAttribute +from google.cloud.eventarc_v1.types.discovery import Provider +from google.cloud.eventarc_v1.types.eventarc import CreateChannelConnectionRequest +from google.cloud.eventarc_v1.types.eventarc import CreateChannelRequest from google.cloud.eventarc_v1.types.eventarc import CreateTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import DeleteChannelConnectionRequest +from google.cloud.eventarc_v1.types.eventarc import DeleteChannelRequest from google.cloud.eventarc_v1.types.eventarc import DeleteTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import GetChannelConnectionRequest +from google.cloud.eventarc_v1.types.eventarc import GetChannelRequest +from google.cloud.eventarc_v1.types.eventarc import GetGoogleChannelConfigRequest +from google.cloud.eventarc_v1.types.eventarc import GetProviderRequest from google.cloud.eventarc_v1.types.eventarc import GetTriggerRequest +from google.cloud.eventarc_v1.types.eventarc import ListChannelConnectionsRequest +from google.cloud.eventarc_v1.types.eventarc import ListChannelConnectionsResponse +from google.cloud.eventarc_v1.types.eventarc import ListChannelsRequest +from google.cloud.eventarc_v1.types.eventarc import ListChannelsResponse +from google.cloud.eventarc_v1.types.eventarc import ListProvidersRequest +from google.cloud.eventarc_v1.types.eventarc import ListProvidersResponse from google.cloud.eventarc_v1.types.eventarc import ListTriggersRequest from google.cloud.eventarc_v1.types.eventarc import ListTriggersResponse from google.cloud.eventarc_v1.types.eventarc import OperationMetadata +from google.cloud.eventarc_v1.types.eventarc import UpdateChannelRequest +from google.cloud.eventarc_v1.types.eventarc import UpdateGoogleChannelConfigRequest from google.cloud.eventarc_v1.types.eventarc import UpdateTriggerRequest +from google.cloud.eventarc_v1.types.google_channel_config import GoogleChannelConfig from google.cloud.eventarc_v1.types.trigger import CloudRun from google.cloud.eventarc_v1.types.trigger import Destination from google.cloud.eventarc_v1.types.trigger import EventFilter +from google.cloud.eventarc_v1.types.trigger import GKE from google.cloud.eventarc_v1.types.trigger import Pubsub +from google.cloud.eventarc_v1.types.trigger import StateCondition from google.cloud.eventarc_v1.types.trigger import Transport from google.cloud.eventarc_v1.types.trigger import Trigger __all__ = ('EventarcClient', 'EventarcAsyncClient', + 'Channel', + 'ChannelConnection', + 'EventType', + 'FilteringAttribute', + 'Provider', + 'CreateChannelConnectionRequest', + 'CreateChannelRequest', 'CreateTriggerRequest', + 'DeleteChannelConnectionRequest', + 'DeleteChannelRequest', 'DeleteTriggerRequest', + 'GetChannelConnectionRequest', + 'GetChannelRequest', + 'GetGoogleChannelConfigRequest', + 'GetProviderRequest', 'GetTriggerRequest', + 'ListChannelConnectionsRequest', + 'ListChannelConnectionsResponse', + 'ListChannelsRequest', + 'ListChannelsResponse', + 'ListProvidersRequest', + 'ListProvidersResponse', 'ListTriggersRequest', 'ListTriggersResponse', 'OperationMetadata', + 'UpdateChannelRequest', + 'UpdateGoogleChannelConfigRequest', 'UpdateTriggerRequest', + 'GoogleChannelConfig', 'CloudRun', 'Destination', 'EventFilter', + 'GKE', 'Pubsub', + 'StateCondition', 'Transport', 'Trigger', ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index a9881629c6ad..5b322f29c5d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -21,34 +21,82 @@ from .services.eventarc import EventarcClient from .services.eventarc import EventarcAsyncClient +from .types.channel import Channel +from .types.channel_connection import ChannelConnection +from .types.discovery import EventType +from .types.discovery import FilteringAttribute +from .types.discovery import Provider +from .types.eventarc import CreateChannelConnectionRequest +from .types.eventarc import CreateChannelRequest from .types.eventarc import CreateTriggerRequest +from .types.eventarc import DeleteChannelConnectionRequest +from .types.eventarc import DeleteChannelRequest from .types.eventarc import DeleteTriggerRequest +from .types.eventarc import GetChannelConnectionRequest +from .types.eventarc import GetChannelRequest +from .types.eventarc import GetGoogleChannelConfigRequest +from .types.eventarc import GetProviderRequest from .types.eventarc import GetTriggerRequest +from .types.eventarc import ListChannelConnectionsRequest +from .types.eventarc import ListChannelConnectionsResponse +from .types.eventarc import ListChannelsRequest +from .types.eventarc import ListChannelsResponse +from .types.eventarc import ListProvidersRequest +from .types.eventarc import ListProvidersResponse from .types.eventarc import ListTriggersRequest from .types.eventarc import ListTriggersResponse from .types.eventarc import OperationMetadata +from .types.eventarc import UpdateChannelRequest +from .types.eventarc import UpdateGoogleChannelConfigRequest from .types.eventarc import UpdateTriggerRequest +from .types.google_channel_config import GoogleChannelConfig from .types.trigger import CloudRun from .types.trigger import Destination from .types.trigger import EventFilter +from .types.trigger import GKE from .types.trigger import Pubsub +from .types.trigger import StateCondition from .types.trigger import Transport from .types.trigger import Trigger __all__ = ( 'EventarcAsyncClient', +'Channel', +'ChannelConnection', 'CloudRun', +'CreateChannelConnectionRequest', +'CreateChannelRequest', 'CreateTriggerRequest', +'DeleteChannelConnectionRequest', +'DeleteChannelRequest', 'DeleteTriggerRequest', 'Destination', 'EventFilter', +'EventType', 'EventarcClient', +'FilteringAttribute', +'GKE', +'GetChannelConnectionRequest', +'GetChannelRequest', +'GetGoogleChannelConfigRequest', +'GetProviderRequest', 'GetTriggerRequest', +'GoogleChannelConfig', +'ListChannelConnectionsRequest', +'ListChannelConnectionsResponse', +'ListChannelsRequest', +'ListChannelsResponse', +'ListProvidersRequest', +'ListProvidersResponse', 'ListTriggersRequest', 'ListTriggersResponse', 'OperationMetadata', +'Provider', 'Pubsub', +'StateCondition', 'Transport', 'Trigger', +'UpdateChannelRequest', +'UpdateGoogleChannelConfigRequest', 'UpdateTriggerRequest', ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json index f9d69749721b..e56055316014 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_metadata.json @@ -10,26 +10,91 @@ "grpc": { "libraryClient": "EventarcClient", "rpcs": { + "CreateChannel": { + "methods": [ + "create_channel" + ] + }, + "CreateChannelConnection": { + "methods": [ + "create_channel_connection" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" ] }, + "DeleteChannel": { + "methods": [ + "delete_channel" + ] + }, + "DeleteChannelConnection": { + "methods": [ + "delete_channel_connection" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" ] }, + "GetChannel": { + "methods": [ + "get_channel" + ] + }, + "GetChannelConnection": { + "methods": [ + "get_channel_connection" + ] + }, + "GetGoogleChannelConfig": { + "methods": [ + "get_google_channel_config" + ] + }, + "GetProvider": { + "methods": [ + "get_provider" + ] + }, "GetTrigger": { "methods": [ "get_trigger" ] }, + "ListChannelConnections": { + "methods": [ + "list_channel_connections" + ] + }, + "ListChannels": { + "methods": [ + "list_channels" + ] + }, + "ListProviders": { + "methods": [ + "list_providers" + ] + }, "ListTriggers": { "methods": [ "list_triggers" ] }, + "UpdateChannel": { + "methods": [ + "update_channel" + ] + }, + "UpdateGoogleChannelConfig": { + "methods": [ + "update_google_channel_config" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" @@ -40,26 +105,91 @@ "grpc-async": { "libraryClient": "EventarcAsyncClient", "rpcs": { + "CreateChannel": { + "methods": [ + "create_channel" + ] + }, + "CreateChannelConnection": { + "methods": [ + "create_channel_connection" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" ] }, + "DeleteChannel": { + "methods": [ + "delete_channel" + ] + }, + "DeleteChannelConnection": { + "methods": [ + "delete_channel_connection" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" ] }, + "GetChannel": { + "methods": [ + "get_channel" + ] + }, + "GetChannelConnection": { + "methods": [ + "get_channel_connection" + ] + }, + "GetGoogleChannelConfig": { + "methods": [ + "get_google_channel_config" + ] + }, + "GetProvider": { + "methods": [ + "get_provider" + ] + }, "GetTrigger": { "methods": [ "get_trigger" ] }, + "ListChannelConnections": { + "methods": [ + "list_channel_connections" + ] + }, + "ListChannels": { + "methods": [ + "list_channels" + ] + }, + "ListProviders": { + "methods": [ + "list_providers" + ] + }, "ListTriggers": { "methods": [ "list_triggers" ] }, + "UpdateChannel": { + "methods": [ + "update_channel" + ] + }, + "UpdateGoogleChannelConfig": { + "methods": [ + "update_google_channel_config" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" @@ -70,26 +200,91 @@ "rest": { "libraryClient": "EventarcClient", "rpcs": { + "CreateChannel": { + "methods": [ + "create_channel" + ] + }, + "CreateChannelConnection": { + "methods": [ + "create_channel_connection" + ] + }, "CreateTrigger": { "methods": [ "create_trigger" ] }, + "DeleteChannel": { + "methods": [ + "delete_channel" + ] + }, + "DeleteChannelConnection": { + "methods": [ + "delete_channel_connection" + ] + }, "DeleteTrigger": { "methods": [ "delete_trigger" ] }, + "GetChannel": { + "methods": [ + "get_channel" + ] + }, + "GetChannelConnection": { + "methods": [ + "get_channel_connection" + ] + }, + "GetGoogleChannelConfig": { + "methods": [ + "get_google_channel_config" + ] + }, + "GetProvider": { + "methods": [ + "get_provider" + ] + }, "GetTrigger": { "methods": [ "get_trigger" ] }, + "ListChannelConnections": { + "methods": [ + "list_channel_connections" + ] + }, + "ListChannels": { + "methods": [ + "list_channels" + ] + }, + "ListProviders": { + "methods": [ + "list_providers" + ] + }, "ListTriggers": { "methods": [ "list_triggers" ] }, + "UpdateChannel": { + "methods": [ + "update_channel" + ] + }, + "UpdateGoogleChannelConfig": { + "methods": [ + "update_google_channel_config" + ] + }, "UpdateTrigger": { "methods": [ "update_trigger" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 77bf35afad16..aae8f9df411f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -35,7 +35,14 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 # type: ignore @@ -59,12 +66,26 @@ class EventarcAsyncClient: DEFAULT_ENDPOINT = EventarcClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = EventarcClient.DEFAULT_MTLS_ENDPOINT + channel_path = staticmethod(EventarcClient.channel_path) + parse_channel_path = staticmethod(EventarcClient.parse_channel_path) + channel_connection_path = staticmethod(EventarcClient.channel_connection_path) + parse_channel_connection_path = staticmethod(EventarcClient.parse_channel_connection_path) + cloud_function_path = staticmethod(EventarcClient.cloud_function_path) + parse_cloud_function_path = staticmethod(EventarcClient.parse_cloud_function_path) + crypto_key_path = staticmethod(EventarcClient.crypto_key_path) + parse_crypto_key_path = staticmethod(EventarcClient.parse_crypto_key_path) + google_channel_config_path = staticmethod(EventarcClient.google_channel_config_path) + parse_google_channel_config_path = staticmethod(EventarcClient.parse_google_channel_config_path) + provider_path = staticmethod(EventarcClient.provider_path) + parse_provider_path = staticmethod(EventarcClient.parse_provider_path) service_path = staticmethod(EventarcClient.service_path) parse_service_path = staticmethod(EventarcClient.parse_service_path) service_account_path = staticmethod(EventarcClient.service_account_path) parse_service_account_path = staticmethod(EventarcClient.parse_service_account_path) trigger_path = staticmethod(EventarcClient.trigger_path) parse_trigger_path = staticmethod(EventarcClient.parse_trigger_path) + workflow_path = staticmethod(EventarcClient.workflow_path) + parse_workflow_path = staticmethod(EventarcClient.parse_workflow_path) common_billing_account_path = staticmethod(EventarcClient.common_billing_account_path) parse_common_billing_account_path = staticmethod(EventarcClient.parse_common_billing_account_path) common_folder_path = staticmethod(EventarcClient.common_folder_path) @@ -357,11 +378,10 @@ async def sample_list_triggers(): Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager: - The response message for the - ListTriggers method. - Iterating over this object will yield - results and resolve additional pages - automatically. + The response message for the ListTriggers method. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -613,9 +633,9 @@ async def sample_update_trigger(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): The fields to be updated; only fields explicitly - provided will be updated. If no field mask is provided, - all provided fields in the request will be updated. To - update all fields, provide a field mask of "*". + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -825,6 +845,1544 @@ async def sample_delete_trigger(): # Done; return the response. return response + async def get_channel(self, + request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> channel.Channel: + r"""Get a single Channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_channel(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetChannelRequest, dict]]): + The request object. The request message for the + GetChannel method. + name (:class:`str`): + Required. The name of the channel to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Channel: + A representation of the Channel + resource. A Channel is a resource on + which event providers publish their + events. The published events are + delivered through the transport + associated with the channel. Note that a + channel is associated with exactly one + event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_channel, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_channels(self, + request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChannelsAsyncPager: + r"""List channels. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_channels(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channels(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListChannelsRequest, dict]]): + The request object. The request message for the + ListChannels method. + parent (:class:`str`): + Required. The parent collection to + list channels on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager: + The response message for the ListChannels method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.ListChannelsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_channels, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListChannelsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_channel(self, + request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel: Optional[gce_channel.Channel] = None, + channel_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new channel in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_create_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + channel = eventarc_v1.Channel() + channel.pubsub_topic = "pubsub_topic_value" + channel.name = "name_value" + + request = eventarc_v1.CreateChannelRequest( + parent="parent_value", + channel=channel, + channel_id="channel_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.CreateChannelRequest, dict]]): + The request object. The request message for the + CreateChannel method. + parent (:class:`str`): + Required. The parent collection in + which to add this channel. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel (:class:`google.cloud.eventarc_v1.types.Channel`): + Required. The channel to create. + This corresponds to the ``channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_id (:class:`str`): + Required. The user-provided ID to be + assigned to the channel. + + This corresponds to the ``channel_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, channel, channel_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.CreateChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if channel is not None: + request.channel = channel + if channel_id is not None: + request.channel_id = channel_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_channel_, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_channel(self, + request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, + *, + channel: Optional[gce_channel.Channel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Update a single channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_update_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateChannelRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.UpdateChannelRequest, dict]]): + The request object. The request message for the + UpdateChannel method. + channel (:class:`google.cloud.eventarc_v1.types.Channel`): + The channel to be updated. + This corresponds to the ``channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The fields to be updated; only fields explicitly + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([channel, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.UpdateChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if channel is not None: + request.channel = channel + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_channel, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("channel.name", request.channel.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_channel(self, + request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_delete_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.DeleteChannelRequest, dict]]): + The request object. The request message for the + DeleteChannel method. + name (:class:`str`): + Required. The name of the channel to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.DeleteChannelRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_channel, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_provider(self, + request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discovery.Provider: + r"""Get a single Provider. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_provider(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetProviderRequest( + name="name_value", + ) + + # Make the request + response = await client.get_provider(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetProviderRequest, dict]]): + The request object. The request message for the + GetProvider method. + name (:class:`str`): + Required. The name of the provider to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Provider: + A representation of the Provider + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetProviderRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_provider, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_providers(self, + request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListProvidersAsyncPager: + r"""List providers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_providers(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListProvidersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_providers(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListProvidersRequest, dict]]): + The request object. The request message for the + ListProviders method. + parent (:class:`str`): + Required. The parent of the provider + to get. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager: + The response message for the ListProviders method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.ListProvidersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_providers, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListProvidersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_channel_connection(self, + request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> channel_connection.ChannelConnection: + r"""Get a single ChannelConnection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_channel_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetChannelConnectionRequest, dict]]): + The request object. The request message for the + GetChannelConnection method. + name (:class:`str`): + Required. The name of the channel + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.ChannelConnection: + A representation of the + ChannelConnection resource. A + ChannelConnection is a resource which + event providers create during the + activation process to establish a + connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetChannelConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_channel_connection, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_channel_connections(self, + request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChannelConnectionsAsyncPager: + r"""List channel connections. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_list_channel_connections(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channel_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.ListChannelConnectionsRequest, dict]]): + The request object. The request message for the + ListChannelConnections method. + parent (:class:`str`): + Required. The parent collection from + which to list channel connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager: + The response message for the ListChannelConnections + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.ListChannelConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_channel_connections, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListChannelConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_channel_connection(self, + request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, + channel_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Create a new ChannelConnection in a particular + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_create_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + channel_connection = eventarc_v1.ChannelConnection() + channel_connection.name = "name_value" + channel_connection.channel = "channel_value" + + request = eventarc_v1.CreateChannelConnectionRequest( + parent="parent_value", + channel_connection=channel_connection, + channel_connection_id="channel_connection_id_value", + ) + + # Make the request + operation = client.create_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.CreateChannelConnectionRequest, dict]]): + The request object. The request message for the + CreateChannelConnection method. + parent (:class:`str`): + Required. The parent collection in + which to add this channel connection. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_connection (:class:`google.cloud.eventarc_v1.types.ChannelConnection`): + Required. Channel connection to + create. + + This corresponds to the ``channel_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_connection_id (:class:`str`): + Required. The user-provided ID to be + assigned to the channel connection. + + This corresponds to the ``channel_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.ChannelConnection` A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event + providers create during the activation process to + establish a connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, channel_connection, channel_connection_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.CreateChannelConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if channel_connection is not None: + request.channel_connection = channel_connection + if channel_connection_id is not None: + request.channel_connection_id = channel_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_channel_connection, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + gce_channel_connection.ChannelConnection, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_channel_connection(self, + request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Delete a single ChannelConnection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_delete_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest, dict]]): + The request object. The request message for the + DeleteChannelConnection method. + name (:class:`str`): + Required. The name of the channel + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.ChannelConnection` A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event + providers create during the activation process to + establish a connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.DeleteChannelConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_channel_connection, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + channel_connection.ChannelConnection, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_google_channel_config(self, + request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> google_channel_config.GoogleChannelConfig: + r"""Get a GoogleChannelConfig + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_get_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleChannelConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_google_channel_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest, dict]]): + The request object. The request message for the + GetGoogleChannelConfig method. + name (:class:`str`): + Required. The name of the config to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.GetGoogleChannelConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_google_channel_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_google_channel_config(self, + request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, + *, + google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: + r"""Update a single GoogleChannelConfig + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + async def sample_update_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + google_channel_config = eventarc_v1.GoogleChannelConfig() + google_channel_config.name = "name_value" + + request = eventarc_v1.UpdateGoogleChannelConfigRequest( + google_channel_config=google_channel_config, + ) + + # Make the request + response = await client.update_google_channel_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest, dict]]): + The request object. The request message for the + UpdateGoogleChannelConfig method. + google_channel_config (:class:`google.cloud.eventarc_v1.types.GoogleChannelConfig`): + Required. The config to be updated. + This corresponds to the ``google_channel_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + The fields to be updated; only fields explicitly + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([google_channel_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = eventarc.UpdateGoogleChannelConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if google_channel_config is not None: + request.google_channel_config = google_channel_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_google_channel_config, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("google_channel_config.name", request.google_channel_config.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "EventarcAsyncClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 0049fbba676e..404f72b0bee1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -38,7 +38,14 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.eventarc_v1.services.eventarc import pagers +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 # type: ignore @@ -174,6 +181,72 @@ def transport(self) -> EventarcTransport: """ return self._transport + @staticmethod + def channel_path(project: str,location: str,channel: str,) -> str: + """Returns a fully-qualified channel string.""" + return "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + + @staticmethod + def parse_channel_path(path: str) -> Dict[str,str]: + """Parses a channel path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/channels/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def channel_connection_path(project: str,location: str,channel_connection: str,) -> str: + """Returns a fully-qualified channel_connection string.""" + return "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) + + @staticmethod + def parse_channel_connection_path(path: str) -> Dict[str,str]: + """Parses a channel_connection path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/channelConnections/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def cloud_function_path(project: str,location: str,function: str,) -> str: + """Returns a fully-qualified cloud_function string.""" + return "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + + @staticmethod + def parse_cloud_function_path(path: str) -> Dict[str,str]: + """Parses a cloud_function path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/functions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def crypto_key_path(project: str,location: str,key_ring: str,crypto_key: str,) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str,str]: + """Parses a crypto_key path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def google_channel_config_path(project: str,location: str,) -> str: + """Returns a fully-qualified google_channel_config string.""" + return "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + + @staticmethod + def parse_google_channel_config_path(path: str) -> Dict[str,str]: + """Parses a google_channel_config path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/googleChannelConfig$", path) + return m.groupdict() if m else {} + + @staticmethod + def provider_path(project: str,location: str,provider: str,) -> str: + """Returns a fully-qualified provider string.""" + return "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + + @staticmethod + def parse_provider_path(path: str) -> Dict[str,str]: + """Parses a provider path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/providers/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def service_path() -> str: """Returns a fully-qualified service string.""" @@ -207,6 +280,17 @@ def parse_trigger_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/triggers/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def workflow_path(project: str,location: str,workflow: str,) -> str: + """Returns a fully-qualified workflow string.""" + return "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + + @staticmethod + def parse_workflow_path(path: str) -> Dict[str,str]: + """Parses a workflow path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/workflows/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str, ) -> str: """Returns a fully-qualified billing_account string.""" @@ -569,11 +653,10 @@ def sample_list_triggers(): Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager: - The response message for the - ListTriggers method. - Iterating over this object will yield - results and resolve additional pages - automatically. + The response message for the ListTriggers method. + + Iterating over this object will yield results and + resolve additional pages automatically. """ # Create or coerce a protobuf request object. @@ -825,9 +908,9 @@ def sample_update_trigger(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): The fields to be updated; only fields explicitly - provided will be updated. If no field mask is provided, - all provided fields in the request will be updated. To - update all fields, provide a field mask of "*". + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1037,6 +1120,1544 @@ def sample_delete_trigger(): # Done; return the response. return response + def get_channel(self, + request: Optional[Union[eventarc.GetChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> channel.Channel: + r"""Get a single Channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelRequest( + name="name_value", + ) + + # Make the request + response = client.get_channel(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetChannelRequest, dict]): + The request object. The request message for the + GetChannel method. + name (str): + Required. The name of the channel to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Channel: + A representation of the Channel + resource. A Channel is a resource on + which event providers publish their + events. The published events are + delivered through the transport + associated with the channel. Note that a + channel is associated with exactly one + event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetChannelRequest): + request = eventarc.GetChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_channels(self, + request: Optional[Union[eventarc.ListChannelsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChannelsPager: + r"""List channels. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_channels(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channels(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListChannelsRequest, dict]): + The request object. The request message for the + ListChannels method. + parent (str): + Required. The parent collection to + list channels on. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager: + The response message for the ListChannels method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.ListChannelsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.ListChannelsRequest): + request = eventarc.ListChannelsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_channels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChannelsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_channel(self, + request: Optional[Union[eventarc.CreateChannelRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel: Optional[gce_channel.Channel] = None, + channel_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a new channel in a particular project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_create_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + channel = eventarc_v1.Channel() + channel.pubsub_topic = "pubsub_topic_value" + channel.name = "name_value" + + request = eventarc_v1.CreateChannelRequest( + parent="parent_value", + channel=channel, + channel_id="channel_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreateChannelRequest, dict]): + The request object. The request message for the + CreateChannel method. + parent (str): + Required. The parent collection in + which to add this channel. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel (google.cloud.eventarc_v1.types.Channel): + Required. The channel to create. + This corresponds to the ``channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_id (str): + Required. The user-provided ID to be + assigned to the channel. + + This corresponds to the ``channel_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, channel, channel_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.CreateChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.CreateChannelRequest): + request = eventarc.CreateChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if channel is not None: + request.channel = channel + if channel_id is not None: + request.channel_id = channel_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_channel_] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_channel(self, + request: Optional[Union[eventarc.UpdateChannelRequest, dict]] = None, + *, + channel: Optional[gce_channel.Channel] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Update a single channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_update_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateChannelRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateChannelRequest, dict]): + The request object. The request message for the + UpdateChannel method. + channel (google.cloud.eventarc_v1.types.Channel): + The channel to be updated. + This corresponds to the ``channel`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([channel, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.UpdateChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.UpdateChannelRequest): + request = eventarc.UpdateChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if channel is not None: + request.channel = channel + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("channel.name", request.channel.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_channel(self, + request: Optional[Union[eventarc.DeleteChannelRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete a single channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_delete_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteChannelRequest, dict]): + The request object. The request message for the + DeleteChannel method. + name (str): + Required. The name of the channel to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.Channel` A representation of the Channel resource. + A Channel is a resource on which event providers + publish their events. The published events are + delivered through the transport associated with the + channel. Note that a channel is associated with + exactly one event provider. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.DeleteChannelRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.DeleteChannelRequest): + request = eventarc.DeleteChannelRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_channel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + channel.Channel, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_provider(self, + request: Optional[Union[eventarc.GetProviderRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> discovery.Provider: + r"""Get a single Provider. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_provider(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetProviderRequest( + name="name_value", + ) + + # Make the request + response = client.get_provider(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetProviderRequest, dict]): + The request object. The request message for the + GetProvider method. + name (str): + Required. The name of the provider to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.Provider: + A representation of the Provider + resource. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetProviderRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetProviderRequest): + request = eventarc.GetProviderRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_provider] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_providers(self, + request: Optional[Union[eventarc.ListProvidersRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListProvidersPager: + r"""List providers. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_providers(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListProvidersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_providers(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListProvidersRequest, dict]): + The request object. The request message for the + ListProviders method. + parent (str): + Required. The parent of the provider + to get. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager: + The response message for the ListProviders method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.ListProvidersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.ListProvidersRequest): + request = eventarc.ListProvidersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_providers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListProvidersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_channel_connection(self, + request: Optional[Union[eventarc.GetChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> channel_connection.ChannelConnection: + r"""Get a single ChannelConnection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_channel_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetChannelConnectionRequest, dict]): + The request object. The request message for the + GetChannelConnection method. + name (str): + Required. The name of the channel + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.ChannelConnection: + A representation of the + ChannelConnection resource. A + ChannelConnection is a resource which + event providers create during the + activation process to establish a + connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetChannelConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetChannelConnectionRequest): + request = eventarc.GetChannelConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_channel_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_channel_connections(self, + request: Optional[Union[eventarc.ListChannelConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListChannelConnectionsPager: + r"""List channel connections. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_list_channel_connections(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channel_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.ListChannelConnectionsRequest, dict]): + The request object. The request message for the + ListChannelConnections method. + parent (str): + Required. The parent collection from + which to list channel connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager: + The response message for the ListChannelConnections + method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.ListChannelConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.ListChannelConnectionsRequest): + request = eventarc.ListChannelConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_channel_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListChannelConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_channel_connection(self, + request: Optional[Union[eventarc.CreateChannelConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + channel_connection: Optional[gce_channel_connection.ChannelConnection] = None, + channel_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Create a new ChannelConnection in a particular + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_create_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + channel_connection = eventarc_v1.ChannelConnection() + channel_connection.name = "name_value" + channel_connection.channel = "channel_value" + + request = eventarc_v1.CreateChannelConnectionRequest( + parent="parent_value", + channel_connection=channel_connection, + channel_connection_id="channel_connection_id_value", + ) + + # Make the request + operation = client.create_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.CreateChannelConnectionRequest, dict]): + The request object. The request message for the + CreateChannelConnection method. + parent (str): + Required. The parent collection in + which to add this channel connection. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_connection (google.cloud.eventarc_v1.types.ChannelConnection): + Required. Channel connection to + create. + + This corresponds to the ``channel_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + channel_connection_id (str): + Required. The user-provided ID to be + assigned to the channel connection. + + This corresponds to the ``channel_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.ChannelConnection` A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event + providers create during the activation process to + establish a connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, channel_connection, channel_connection_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.CreateChannelConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.CreateChannelConnectionRequest): + request = eventarc.CreateChannelConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if channel_connection is not None: + request.channel_connection = channel_connection + if channel_connection_id is not None: + request.channel_connection_id = channel_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_channel_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + gce_channel_connection.ChannelConnection, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_channel_connection(self, + request: Optional[Union[eventarc.DeleteChannelConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Delete a single ChannelConnection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_delete_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest, dict]): + The request object. The request message for the + DeleteChannelConnection method. + name (str): + Required. The name of the channel + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.eventarc_v1.types.ChannelConnection` A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event + providers create during the activation process to + establish a connection between the provider and the + subscriber channel. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.DeleteChannelConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.DeleteChannelConnectionRequest): + request = eventarc.DeleteChannelConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_channel_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + channel_connection.ChannelConnection, + metadata_type=eventarc.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_google_channel_config(self, + request: Optional[Union[eventarc.GetGoogleChannelConfigRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> google_channel_config.GoogleChannelConfig: + r"""Get a GoogleChannelConfig + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_get_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleChannelConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_google_channel_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest, dict]): + The request object. The request message for the + GetGoogleChannelConfig method. + name (str): + Required. The name of the config to + get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.GetGoogleChannelConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.GetGoogleChannelConfigRequest): + request = eventarc.GetGoogleChannelConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_google_channel_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_google_channel_config(self, + request: Optional[Union[eventarc.UpdateGoogleChannelConfigRequest, dict]] = None, + *, + google_channel_config: Optional[gce_google_channel_config.GoogleChannelConfig] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gce_google_channel_config.GoogleChannelConfig: + r"""Update a single GoogleChannelConfig + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import eventarc_v1 + + def sample_update_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + google_channel_config = eventarc_v1.GoogleChannelConfig() + google_channel_config.name = "name_value" + + request = eventarc_v1.UpdateGoogleChannelConfigRequest( + google_channel_config=google_channel_config, + ) + + # Make the request + response = client.update_google_channel_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest, dict]): + The request object. The request message for the + UpdateGoogleChannelConfig method. + google_channel_config (google.cloud.eventarc_v1.types.GoogleChannelConfig): + Required. The config to be updated. + This corresponds to the ``google_channel_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly + provided are updated. If no field mask is provided, all + provided fields in the request are updated. To update + all fields, provide a field mask of "*". + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.eventarc_v1.types.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([google_channel_config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a eventarc.UpdateGoogleChannelConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, eventarc.UpdateGoogleChannelConfigRequest): + request = eventarc.UpdateGoogleChannelConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if google_channel_config is not None: + request.google_channel_config = google_channel_config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_google_channel_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("google_channel_config.name", request.google_channel_config.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "EventarcClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 1956f7bb1ade..773f83d8d86c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -15,6 +15,9 @@ # from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc from google.cloud.eventarc_v1.types import trigger @@ -138,3 +141,366 @@ async def async_generator(): def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListChannelsPager: + """A pager for iterating through ``list_channels`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListChannelsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``channels`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListChannels`` requests and continue to iterate + through the ``channels`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListChannelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListChannelsResponse], + request: eventarc.ListChannelsRequest, + response: eventarc.ListChannelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListChannelsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListChannelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListChannelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListChannelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[channel.Channel]: + for page in self.pages: + yield from page.channels + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListChannelsAsyncPager: + """A pager for iterating through ``list_channels`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListChannelsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``channels`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListChannels`` requests and continue to iterate + through the ``channels`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListChannelsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListChannelsResponse]], + request: eventarc.ListChannelsRequest, + response: eventarc.ListChannelsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListChannelsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListChannelsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListChannelsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListChannelsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[channel.Channel]: + async def async_generator(): + async for page in self.pages: + for response in page.channels: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListProvidersPager: + """A pager for iterating through ``list_providers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListProvidersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``providers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListProviders`` requests and continue to iterate + through the ``providers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListProvidersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListProvidersResponse], + request: eventarc.ListProvidersRequest, + response: eventarc.ListProvidersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListProvidersRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListProvidersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListProvidersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListProvidersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[discovery.Provider]: + for page in self.pages: + yield from page.providers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListProvidersAsyncPager: + """A pager for iterating through ``list_providers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListProvidersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``providers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListProviders`` requests and continue to iterate + through the ``providers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListProvidersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListProvidersResponse]], + request: eventarc.ListProvidersRequest, + response: eventarc.ListProvidersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListProvidersRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListProvidersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListProvidersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListProvidersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[discovery.Provider]: + async def async_generator(): + async for page in self.pages: + for response in page.providers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListChannelConnectionsPager: + """A pager for iterating through ``list_channel_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListChannelConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``channel_connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListChannelConnections`` requests and continue to iterate + through the ``channel_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListChannelConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., eventarc.ListChannelConnectionsResponse], + request: eventarc.ListChannelConnectionsRequest, + response: eventarc.ListChannelConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListChannelConnectionsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListChannelConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListChannelConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[eventarc.ListChannelConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[channel_connection.ChannelConnection]: + for page in self.pages: + yield from page.channel_connections + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListChannelConnectionsAsyncPager: + """A pager for iterating through ``list_channel_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.eventarc_v1.types.ListChannelConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``channel_connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListChannelConnections`` requests and continue to iterate + through the ``channel_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.eventarc_v1.types.ListChannelConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[eventarc.ListChannelConnectionsResponse]], + request: eventarc.ListChannelConnectionsRequest, + response: eventarc.ListChannelConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.eventarc_v1.types.ListChannelConnectionsRequest): + The initial request object. + response (google.cloud.eventarc_v1.types.ListChannelConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = eventarc.ListChannelConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[eventarc.ListChannelConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[channel_connection.ChannelConnection]: + async def async_generator(): + async for page in self.pages: + for response in page.channel_connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 7a2e60bc0b3c..4f519f422c6c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -27,7 +27,12 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -144,6 +149,71 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_channel: gapic_v1.method.wrap_method( + self.get_channel, + default_timeout=None, + client_info=client_info, + ), + self.list_channels: gapic_v1.method.wrap_method( + self.list_channels, + default_timeout=None, + client_info=client_info, + ), + self.create_channel_: gapic_v1.method.wrap_method( + self.create_channel_, + default_timeout=None, + client_info=client_info, + ), + self.update_channel: gapic_v1.method.wrap_method( + self.update_channel, + default_timeout=None, + client_info=client_info, + ), + self.delete_channel: gapic_v1.method.wrap_method( + self.delete_channel, + default_timeout=None, + client_info=client_info, + ), + self.get_provider: gapic_v1.method.wrap_method( + self.get_provider, + default_timeout=None, + client_info=client_info, + ), + self.list_providers: gapic_v1.method.wrap_method( + self.list_providers, + default_timeout=None, + client_info=client_info, + ), + self.get_channel_connection: gapic_v1.method.wrap_method( + self.get_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.list_channel_connections: gapic_v1.method.wrap_method( + self.list_channel_connections, + default_timeout=None, + client_info=client_info, + ), + self.create_channel_connection: gapic_v1.method.wrap_method( + self.create_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.delete_channel_connection: gapic_v1.method.wrap_method( + self.delete_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.get_google_channel_config: gapic_v1.method.wrap_method( + self.get_google_channel_config, + default_timeout=None, + client_info=client_info, + ), + self.update_google_channel_config: gapic_v1.method.wrap_method( + self.update_google_channel_config, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -205,6 +275,123 @@ def delete_trigger(self) -> Callable[ ]]: raise NotImplementedError() + @property + def get_channel(self) -> Callable[ + [eventarc.GetChannelRequest], + Union[ + channel.Channel, + Awaitable[channel.Channel] + ]]: + raise NotImplementedError() + + @property + def list_channels(self) -> Callable[ + [eventarc.ListChannelsRequest], + Union[ + eventarc.ListChannelsResponse, + Awaitable[eventarc.ListChannelsResponse] + ]]: + raise NotImplementedError() + + @property + def create_channel_(self) -> Callable[ + [eventarc.CreateChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_channel(self) -> Callable[ + [eventarc.UpdateChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_channel(self) -> Callable[ + [eventarc.DeleteChannelRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_provider(self) -> Callable[ + [eventarc.GetProviderRequest], + Union[ + discovery.Provider, + Awaitable[discovery.Provider] + ]]: + raise NotImplementedError() + + @property + def list_providers(self) -> Callable[ + [eventarc.ListProvidersRequest], + Union[ + eventarc.ListProvidersResponse, + Awaitable[eventarc.ListProvidersResponse] + ]]: + raise NotImplementedError() + + @property + def get_channel_connection(self) -> Callable[ + [eventarc.GetChannelConnectionRequest], + Union[ + channel_connection.ChannelConnection, + Awaitable[channel_connection.ChannelConnection] + ]]: + raise NotImplementedError() + + @property + def list_channel_connections(self) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + Union[ + eventarc.ListChannelConnectionsResponse, + Awaitable[eventarc.ListChannelConnectionsResponse] + ]]: + raise NotImplementedError() + + @property + def create_channel_connection(self) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_channel_connection(self) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def get_google_channel_config(self) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + Union[ + google_channel_config.GoogleChannelConfig, + Awaitable[google_channel_config.GoogleChannelConfig] + ]]: + raise NotImplementedError() + + @property + def update_google_channel_config(self) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + Union[ + gce_google_channel_config.GoogleChannelConfig, + Awaitable[gce_google_channel_config.GoogleChannelConfig] + ]]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 26f3ff79e742..71712a1d6d3e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -25,7 +25,12 @@ import grpc # type: ignore +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -381,6 +386,346 @@ def delete_trigger(self) -> Callable[ ) return self._stubs['delete_trigger'] + @property + def get_channel(self) -> Callable[ + [eventarc.GetChannelRequest], + channel.Channel]: + r"""Return a callable for the get channel method over gRPC. + + Get a single Channel. + + Returns: + Callable[[~.GetChannelRequest], + ~.Channel]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_channel' not in self._stubs: + self._stubs['get_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetChannel', + request_serializer=eventarc.GetChannelRequest.serialize, + response_deserializer=channel.Channel.deserialize, + ) + return self._stubs['get_channel'] + + @property + def list_channels(self) -> Callable[ + [eventarc.ListChannelsRequest], + eventarc.ListChannelsResponse]: + r"""Return a callable for the list channels method over gRPC. + + List channels. + + Returns: + Callable[[~.ListChannelsRequest], + ~.ListChannelsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_channels' not in self._stubs: + self._stubs['list_channels'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListChannels', + request_serializer=eventarc.ListChannelsRequest.serialize, + response_deserializer=eventarc.ListChannelsResponse.deserialize, + ) + return self._stubs['list_channels'] + + @property + def create_channel_(self) -> Callable[ + [eventarc.CreateChannelRequest], + operations_pb2.Operation]: + r"""Return a callable for the create channel method over gRPC. + + Create a new channel in a particular project and + location. + + Returns: + Callable[[~.CreateChannelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_channel_' not in self._stubs: + self._stubs['create_channel_'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateChannel', + request_serializer=eventarc.CreateChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_channel_'] + + @property + def update_channel(self) -> Callable[ + [eventarc.UpdateChannelRequest], + operations_pb2.Operation]: + r"""Return a callable for the update channel method over gRPC. + + Update a single channel. + + Returns: + Callable[[~.UpdateChannelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_channel' not in self._stubs: + self._stubs['update_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', + request_serializer=eventarc.UpdateChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_channel'] + + @property + def delete_channel(self) -> Callable[ + [eventarc.DeleteChannelRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete channel method over gRPC. + + Delete a single channel. + + Returns: + Callable[[~.DeleteChannelRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_channel' not in self._stubs: + self._stubs['delete_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', + request_serializer=eventarc.DeleteChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_channel'] + + @property + def get_provider(self) -> Callable[ + [eventarc.GetProviderRequest], + discovery.Provider]: + r"""Return a callable for the get provider method over gRPC. + + Get a single Provider. + + Returns: + Callable[[~.GetProviderRequest], + ~.Provider]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_provider' not in self._stubs: + self._stubs['get_provider'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetProvider', + request_serializer=eventarc.GetProviderRequest.serialize, + response_deserializer=discovery.Provider.deserialize, + ) + return self._stubs['get_provider'] + + @property + def list_providers(self) -> Callable[ + [eventarc.ListProvidersRequest], + eventarc.ListProvidersResponse]: + r"""Return a callable for the list providers method over gRPC. + + List providers. + + Returns: + Callable[[~.ListProvidersRequest], + ~.ListProvidersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_providers' not in self._stubs: + self._stubs['list_providers'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListProviders', + request_serializer=eventarc.ListProvidersRequest.serialize, + response_deserializer=eventarc.ListProvidersResponse.deserialize, + ) + return self._stubs['list_providers'] + + @property + def get_channel_connection(self) -> Callable[ + [eventarc.GetChannelConnectionRequest], + channel_connection.ChannelConnection]: + r"""Return a callable for the get channel connection method over gRPC. + + Get a single ChannelConnection. + + Returns: + Callable[[~.GetChannelConnectionRequest], + ~.ChannelConnection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_channel_connection' not in self._stubs: + self._stubs['get_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', + request_serializer=eventarc.GetChannelConnectionRequest.serialize, + response_deserializer=channel_connection.ChannelConnection.deserialize, + ) + return self._stubs['get_channel_connection'] + + @property + def list_channel_connections(self) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + eventarc.ListChannelConnectionsResponse]: + r"""Return a callable for the list channel connections method over gRPC. + + List channel connections. + + Returns: + Callable[[~.ListChannelConnectionsRequest], + ~.ListChannelConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_channel_connections' not in self._stubs: + self._stubs['list_channel_connections'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', + request_serializer=eventarc.ListChannelConnectionsRequest.serialize, + response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, + ) + return self._stubs['list_channel_connections'] + + @property + def create_channel_connection(self) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the create channel connection method over gRPC. + + Create a new ChannelConnection in a particular + project and location. + + Returns: + Callable[[~.CreateChannelConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_channel_connection' not in self._stubs: + self._stubs['create_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', + request_serializer=eventarc.CreateChannelConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_channel_connection'] + + @property + def delete_channel_connection(self) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete channel connection method over gRPC. + + Delete a single ChannelConnection. + + Returns: + Callable[[~.DeleteChannelConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_channel_connection' not in self._stubs: + self._stubs['delete_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', + request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_channel_connection'] + + @property + def get_google_channel_config(self) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + google_channel_config.GoogleChannelConfig]: + r"""Return a callable for the get google channel config method over gRPC. + + Get a GoogleChannelConfig + + Returns: + Callable[[~.GetGoogleChannelConfigRequest], + ~.GoogleChannelConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_google_channel_config' not in self._stubs: + self._stubs['get_google_channel_config'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', + request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, + response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, + ) + return self._stubs['get_google_channel_config'] + + @property + def update_google_channel_config(self) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + gce_google_channel_config.GoogleChannelConfig]: + r"""Return a callable for the update google channel config method over gRPC. + + Update a single GoogleChannelConfig + + Returns: + Callable[[~.UpdateGoogleChannelConfigRequest], + ~.GoogleChannelConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_google_channel_config' not in self._stubs: + self._stubs['update_google_channel_config'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', + request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, + response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, + ) + return self._stubs['update_google_channel_config'] + def close(self): self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 2d8824845bd6..cee7bc068b8c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -25,7 +25,12 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -384,6 +389,346 @@ def delete_trigger(self) -> Callable[ ) return self._stubs['delete_trigger'] + @property + def get_channel(self) -> Callable[ + [eventarc.GetChannelRequest], + Awaitable[channel.Channel]]: + r"""Return a callable for the get channel method over gRPC. + + Get a single Channel. + + Returns: + Callable[[~.GetChannelRequest], + Awaitable[~.Channel]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_channel' not in self._stubs: + self._stubs['get_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetChannel', + request_serializer=eventarc.GetChannelRequest.serialize, + response_deserializer=channel.Channel.deserialize, + ) + return self._stubs['get_channel'] + + @property + def list_channels(self) -> Callable[ + [eventarc.ListChannelsRequest], + Awaitable[eventarc.ListChannelsResponse]]: + r"""Return a callable for the list channels method over gRPC. + + List channels. + + Returns: + Callable[[~.ListChannelsRequest], + Awaitable[~.ListChannelsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_channels' not in self._stubs: + self._stubs['list_channels'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListChannels', + request_serializer=eventarc.ListChannelsRequest.serialize, + response_deserializer=eventarc.ListChannelsResponse.deserialize, + ) + return self._stubs['list_channels'] + + @property + def create_channel_(self) -> Callable[ + [eventarc.CreateChannelRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create channel method over gRPC. + + Create a new channel in a particular project and + location. + + Returns: + Callable[[~.CreateChannelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_channel_' not in self._stubs: + self._stubs['create_channel_'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateChannel', + request_serializer=eventarc.CreateChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_channel_'] + + @property + def update_channel(self) -> Callable[ + [eventarc.UpdateChannelRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update channel method over gRPC. + + Update a single channel. + + Returns: + Callable[[~.UpdateChannelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_channel' not in self._stubs: + self._stubs['update_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', + request_serializer=eventarc.UpdateChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_channel'] + + @property + def delete_channel(self) -> Callable[ + [eventarc.DeleteChannelRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete channel method over gRPC. + + Delete a single channel. + + Returns: + Callable[[~.DeleteChannelRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_channel' not in self._stubs: + self._stubs['delete_channel'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', + request_serializer=eventarc.DeleteChannelRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_channel'] + + @property + def get_provider(self) -> Callable[ + [eventarc.GetProviderRequest], + Awaitable[discovery.Provider]]: + r"""Return a callable for the get provider method over gRPC. + + Get a single Provider. + + Returns: + Callable[[~.GetProviderRequest], + Awaitable[~.Provider]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_provider' not in self._stubs: + self._stubs['get_provider'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetProvider', + request_serializer=eventarc.GetProviderRequest.serialize, + response_deserializer=discovery.Provider.deserialize, + ) + return self._stubs['get_provider'] + + @property + def list_providers(self) -> Callable[ + [eventarc.ListProvidersRequest], + Awaitable[eventarc.ListProvidersResponse]]: + r"""Return a callable for the list providers method over gRPC. + + List providers. + + Returns: + Callable[[~.ListProvidersRequest], + Awaitable[~.ListProvidersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_providers' not in self._stubs: + self._stubs['list_providers'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListProviders', + request_serializer=eventarc.ListProvidersRequest.serialize, + response_deserializer=eventarc.ListProvidersResponse.deserialize, + ) + return self._stubs['list_providers'] + + @property + def get_channel_connection(self) -> Callable[ + [eventarc.GetChannelConnectionRequest], + Awaitable[channel_connection.ChannelConnection]]: + r"""Return a callable for the get channel connection method over gRPC. + + Get a single ChannelConnection. + + Returns: + Callable[[~.GetChannelConnectionRequest], + Awaitable[~.ChannelConnection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_channel_connection' not in self._stubs: + self._stubs['get_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', + request_serializer=eventarc.GetChannelConnectionRequest.serialize, + response_deserializer=channel_connection.ChannelConnection.deserialize, + ) + return self._stubs['get_channel_connection'] + + @property + def list_channel_connections(self) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + Awaitable[eventarc.ListChannelConnectionsResponse]]: + r"""Return a callable for the list channel connections method over gRPC. + + List channel connections. + + Returns: + Callable[[~.ListChannelConnectionsRequest], + Awaitable[~.ListChannelConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_channel_connections' not in self._stubs: + self._stubs['list_channel_connections'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', + request_serializer=eventarc.ListChannelConnectionsRequest.serialize, + response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, + ) + return self._stubs['list_channel_connections'] + + @property + def create_channel_connection(self) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create channel connection method over gRPC. + + Create a new ChannelConnection in a particular + project and location. + + Returns: + Callable[[~.CreateChannelConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_channel_connection' not in self._stubs: + self._stubs['create_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', + request_serializer=eventarc.CreateChannelConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_channel_connection'] + + @property + def delete_channel_connection(self) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete channel connection method over gRPC. + + Delete a single ChannelConnection. + + Returns: + Callable[[~.DeleteChannelConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_channel_connection' not in self._stubs: + self._stubs['delete_channel_connection'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', + request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_channel_connection'] + + @property + def get_google_channel_config(self) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + Awaitable[google_channel_config.GoogleChannelConfig]]: + r"""Return a callable for the get google channel config method over gRPC. + + Get a GoogleChannelConfig + + Returns: + Callable[[~.GetGoogleChannelConfigRequest], + Awaitable[~.GoogleChannelConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_google_channel_config' not in self._stubs: + self._stubs['get_google_channel_config'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', + request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, + response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, + ) + return self._stubs['get_google_channel_config'] + + @property + def update_google_channel_config(self) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + Awaitable[gce_google_channel_config.GoogleChannelConfig]]: + r"""Return a callable for the update google channel config method over gRPC. + + Update a single GoogleChannelConfig + + Returns: + Callable[[~.UpdateGoogleChannelConfigRequest], + Awaitable[~.GoogleChannelConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_google_channel_config' not in self._stubs: + self._stubs['update_google_channel_config'] = self.grpc_channel.unary_unary( + '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', + request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, + response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, + ) + return self._stubs['update_google_channel_config'] + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 028e474937a8..6e8a1e6e4561 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -43,7 +43,12 @@ OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.longrunning import operations_pb2 # type: ignore @@ -72,6 +77,22 @@ class EventarcRestInterceptor: .. code-block:: python class MyCustomEventarcInterceptor(EventarcRestInterceptor): + def pre_create_channel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_channel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_channel_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_channel_connection(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -80,6 +101,22 @@ def post_create_trigger(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_channel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_channel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_channel_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_channel_connection(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -88,6 +125,38 @@ def post_delete_trigger(self, response): logging.log(f"Received response: {response}") return response + def pre_get_channel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_channel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_channel_connection(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_channel_connection(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_google_channel_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_google_channel_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_provider(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_provider(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -96,6 +165,30 @@ def post_get_trigger(self, response): logging.log(f"Received response: {response}") return response + def pre_list_channel_connections(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_channel_connections(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_channels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_channels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_providers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_providers(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_triggers(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -104,6 +197,22 @@ def post_list_triggers(self, response): logging.log(f"Received response: {response}") return response + def pre_update_channel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_channel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_google_channel_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_google_channel_config(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_trigger(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -117,6 +226,38 @@ def post_update_trigger(self, response): """ + def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_channel + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_channel_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_create_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_channel_connection + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_trigger @@ -128,6 +269,38 @@ def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: S def post_create_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_trigger + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_channel + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_channel_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_channel_connection + Override in a subclass to manipulate the response after it is returned by the Eventarc server but before it is returned to user code. @@ -144,6 +317,70 @@ def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: S def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_trigger + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_channel(self, response: channel.Channel) -> channel.Channel: + """Post-rpc interceptor for get_channel + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_channel_connection + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_channel_connection(self, response: channel_connection.ChannelConnection) -> channel_connection.ChannelConnection: + """Post-rpc interceptor for get_channel_connection + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_google_channel_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_google_channel_config(self, response: google_channel_config.GoogleChannelConfig) -> google_channel_config.GoogleChannelConfig: + """Post-rpc interceptor for get_google_channel_config + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_provider + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: + """Post-rpc interceptor for get_provider + Override in a subclass to manipulate the response after it is returned by the Eventarc server but before it is returned to user code. @@ -160,6 +397,54 @@ def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequenc def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: """Post-rpc interceptor for get_trigger + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_channel_connections + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_channel_connections(self, response: eventarc.ListChannelConnectionsResponse) -> eventarc.ListChannelConnectionsResponse: + """Post-rpc interceptor for list_channel_connections + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_channels + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventarc.ListChannelsResponse: + """Post-rpc interceptor for list_channels + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_providers + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_providers(self, response: eventarc.ListProvidersResponse) -> eventarc.ListProvidersResponse: + """Post-rpc interceptor for list_providers + Override in a subclass to manipulate the response after it is returned by the Eventarc server but before it is returned to user code. @@ -176,6 +461,38 @@ def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Seq def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: """Post-rpc interceptor for list_triggers + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_channel + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_channel + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_google_channel_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_update_google_channel_config(self, response: gce_google_channel_config.GoogleChannelConfig) -> gce_google_channel_config.GoogleChannelConfig: + """Post-rpc interceptor for update_google_channel_config + Override in a subclass to manipulate the response after it is returned by the Eventarc server but before it is returned to user code. @@ -284,71 +601,1134 @@ def __init__(self, *, url_match_items = maybe_url_match.groupdict() - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or EventarcRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateChannel(EventarcRestStub): + def __hash__(self): + return hash("CreateChannel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelId" : "", "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.CreateChannelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create channel method over HTTP. + + Args: + request (~.eventarc.CreateChannelRequest): + The request object. The request message for the + CreateChannel method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/channels', + 'body': 'channel', + }, + ] + request, metadata = self._interceptor.pre_create_channel(request, metadata) + pb_request = eventarc.CreateChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_channel(resp) + return resp + + class _CreateChannelConnection(EventarcRestStub): + def __hash__(self): + return hash("CreateChannelConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelConnectionId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.CreateChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create channel connection method over HTTP. + + Args: + request (~.eventarc.CreateChannelConnectionRequest): + The request object. The request message for the + CreateChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', + 'body': 'channel_connection', + }, + ] + request, metadata = self._interceptor.pre_create_channel_connection(request, metadata) + pb_request = eventarc.CreateChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_channel_connection(resp) + return resp + + class _CreateTrigger(EventarcRestStub): + def __hash__(self): + return hash("CreateTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "triggerId" : "", "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.CreateTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create trigger method over HTTP. + + Args: + request (~.eventarc.CreateTriggerRequest): + The request object. The request message for the + CreateTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'body': 'trigger', + }, + ] + request, metadata = self._interceptor.pre_create_trigger(request, metadata) + pb_request = eventarc.CreateTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_trigger(resp) + return resp + + class _DeleteChannel(EventarcRestStub): + def __hash__(self): + return hash("DeleteChannel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.DeleteChannelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete channel method over HTTP. + + Args: + request (~.eventarc.DeleteChannelRequest): + The request object. The request message for the + DeleteChannel method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_channel(request, metadata) + pb_request = eventarc.DeleteChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_channel(resp) + return resp + + class _DeleteChannelConnection(EventarcRestStub): + def __hash__(self): + return hash("DeleteChannelConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.DeleteChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete channel connection method over HTTP. + + Args: + request (~.eventarc.DeleteChannelConnectionRequest): + The request object. The request message for the + DeleteChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_channel_connection(request, metadata) + pb_request = eventarc.DeleteChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_channel_connection(resp) + return resp + + class _DeleteTrigger(EventarcRestStub): + def __hash__(self): + return hash("DeleteTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.DeleteTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete trigger method over HTTP. + + Args: + request (~.eventarc.DeleteTriggerRequest): + The request object. The request message for the + DeleteTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_delete_trigger(request, metadata) + pb_request = eventarc.DeleteTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_trigger(resp) + return resp + + class _GetChannel(EventarcRestStub): + def __hash__(self): + return hash("GetChannel") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetChannelRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> channel.Channel: + r"""Call the get channel method over HTTP. + + Args: + request (~.eventarc.GetChannelRequest): + The request object. The request message for the + GetChannel method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.channel.Channel: + A representation of the Channel + resource. A Channel is a resource on + which event providers publish their + events. The published events are + delivered through the transport + associated with the channel. Note that a + channel is associated with exactly one + event provider. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + }, + ] + request, metadata = self._interceptor.pre_get_channel(request, metadata) + pb_request = eventarc.GetChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = channel.Channel() + pb_resp = channel.Channel.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_channel(resp) + return resp + + class _GetChannelConnection(EventarcRestStub): + def __hash__(self): + return hash("GetChannelConnection") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetChannelConnectionRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> channel_connection.ChannelConnection: + r"""Call the get channel connection method over HTTP. + + Args: + request (~.eventarc.GetChannelConnectionRequest): + The request object. The request message for the + GetChannelConnection method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.channel_connection.ChannelConnection: + A representation of the + ChannelConnection resource. A + ChannelConnection is a resource which + event providers create during the + activation process to establish a + connection between the provider and the + subscriber channel. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + }, + ] + request, metadata = self._interceptor.pre_get_channel_connection(request, metadata) + pb_request = eventarc.GetChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = channel_connection.ChannelConnection() + pb_resp = channel_connection.ChannelConnection.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_channel_connection(resp) + return resp + + class _GetGoogleChannelConfig(EventarcRestStub): + def __hash__(self): + return hash("GetGoogleChannelConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetGoogleChannelConfigRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> google_channel_config.GoogleChannelConfig: + r"""Call the get google channel config method over HTTP. + + Args: + request (~.eventarc.GetGoogleChannelConfigRequest): + The request object. The request message for the + GetGoogleChannelConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.google_channel_config.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/googleChannelConfig}', + }, + ] + request, metadata = self._interceptor.pre_get_google_channel_config(request, metadata) + pb_request = eventarc.GetGoogleChannelConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = google_channel_config.GoogleChannelConfig() + pb_resp = google_channel_config.GoogleChannelConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_google_channel_config(resp) + return resp + + class _GetProvider(EventarcRestStub): + def __hash__(self): + return hash("GetProvider") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetProviderRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> discovery.Provider: + r"""Call the get provider method over HTTP. + + Args: + request (~.eventarc.GetProviderRequest): + The request object. The request message for the + GetProvider method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.discovery.Provider: + A representation of the Provider + resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/providers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_provider(request, metadata) + pb_request = eventarc.GetProviderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = discovery.Provider() + pb_resp = discovery.Provider.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_provider(resp) + return resp + + class _GetTrigger(EventarcRestStub): + def __hash__(self): + return hash("GetTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.GetTriggerRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> trigger.Trigger: + r"""Call the get trigger method over HTTP. + + Args: + request (~.eventarc.GetTriggerRequest): + The request object. The request message for the + GetTrigger method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.trigger.Trigger: + A representation of the trigger + resource. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + request, metadata = self._interceptor.pre_get_trigger(request, metadata) + pb_request = eventarc.GetTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = trigger.Trigger() + pb_resp = trigger.Trigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_trigger(resp) + return resp + + class _ListChannelConnections(EventarcRestStub): + def __hash__(self): + return hash("ListChannelConnections") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.ListChannelConnectionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> eventarc.ListChannelConnectionsResponse: + r"""Call the list channel connections method over HTTP. + + Args: + request (~.eventarc.ListChannelConnectionsRequest): + The request object. The request message for the + ListChannelConnections method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.eventarc.ListChannelConnectionsResponse: + The response message for the ``ListChannelConnections`` + method. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', + }, + ] + request, metadata = self._interceptor.pre_list_channel_connections(request, metadata) + pb_request = eventarc.ListChannelConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = eventarc.ListChannelConnectionsResponse() + pb_resp = eventarc.ListChannelConnectionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_channel_connections(resp) + return resp + + class _ListChannels(EventarcRestStub): + def __hash__(self): + return hash("ListChannels") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: eventarc.ListChannelsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> eventarc.ListChannelsResponse: + r"""Call the list channels method over HTTP. + + Args: + request (~.eventarc.ListChannelsRequest): + The request object. The request message for the + ListChannels method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.eventarc.ListChannelsResponse: + The response message for the ``ListChannels`` method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/channels', + }, + ] + request, metadata = self._interceptor.pre_list_channels(request, metadata) + pb_request = eventarc.ListChannelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or EventarcRestInterceptor() - self._prep_wrapped_messages(client_info) + uri = transcoded_request['uri'] + method = transcoded_request['method'] - @property - def operations_client(self) -> operations_v1.AbstractOperationsClient: - """Create the client designed to process long-running operations. + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) - This property caches on the instance; repeated calls return the same - client. - """ - # Only create a new client if we do not already have one. - if self._operations_client is None: - http_options: Dict[str, List[Dict[str, str]]] = { - } + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) - rest_transport = operations_v1.OperationsRestTransport( - host=self._host, - # use the credentials which are saved - credentials=self._credentials, - scopes=self._scopes, - http_options=http_options, - path_prefix="v1") + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) - self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + # Return the response + resp = eventarc.ListChannelsResponse() + pb_resp = eventarc.ListChannelsResponse.pb(resp) - # Return the client from cache. - return self._operations_client + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_channels(resp) + return resp - class _CreateTrigger(EventarcRestStub): + class _ListProviders(EventarcRestStub): def __hash__(self): - return hash("CreateTrigger") + return hash("ListProviders") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "triggerId" : "", "validateOnly" : False, } + } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: eventarc.CreateTriggerRequest, *, + request: eventarc.ListProvidersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the create trigger method over HTTP. + ) -> eventarc.ListProvidersResponse: + r"""Call the list providers method over HTTP. Args: - request (~.eventarc.CreateTriggerRequest): + request (~.eventarc.ListProvidersRequest): The request object. The request message for the - CreateTrigger method. + ListProviders method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -356,30 +1736,19 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - + ~.eventarc.ListProvidersResponse: + The response message for the ``ListProviders`` method. """ http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - 'body': 'trigger', + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/providers', }, ] - request, metadata = self._interceptor.pre_create_trigger(request, metadata) - pb_request = eventarc.CreateTriggerRequest.pb(request) + request, metadata = self._interceptor.pre_list_providers(request, metadata) + pb_request = eventarc.ListProvidersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -399,7 +1768,6 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -408,34 +1776,36 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_trigger(resp) + resp = eventarc.ListProvidersResponse() + pb_resp = eventarc.ListProvidersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_providers(resp) return resp - class _DeleteTrigger(EventarcRestStub): + class _ListTriggers(EventarcRestStub): def __hash__(self): - return hash("DeleteTrigger") + return hash("ListTriggers") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: eventarc.DeleteTriggerRequest, *, + request: eventarc.ListTriggersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> operations_pb2.Operation: - r"""Call the delete trigger method over HTTP. + ) -> eventarc.ListTriggersResponse: + r"""Call the list triggers method over HTTP. Args: - request (~.eventarc.DeleteTriggerRequest): + request (~.eventarc.ListTriggersRequest): The request object. The request message for the - DeleteTrigger method. + ListTriggers method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -443,20 +1813,17 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. - + ~.eventarc.ListTriggersResponse: + The response message for the ``ListTriggers`` method. """ http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', }, ] - request, metadata = self._interceptor.pre_delete_trigger(request, metadata) - pb_request = eventarc.DeleteTriggerRequest.pb(request) + request, metadata = self._interceptor.pre_list_triggers(request, metadata) + pb_request = eventarc.ListTriggersRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) uri = transcoded_request['uri'] @@ -486,34 +1853,36 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_trigger(resp) + resp = eventarc.ListTriggersResponse() + pb_resp = eventarc.ListTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_triggers(resp) return resp - class _GetTrigger(EventarcRestStub): + class _UpdateChannel(EventarcRestStub): def __hash__(self): - return hash("GetTrigger") + return hash("UpdateChannel") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + "validateOnly" : False, } @classmethod def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: eventarc.GetTriggerRequest, *, + request: eventarc.UpdateChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> trigger.Trigger: - r"""Call the get trigger method over HTTP. + ) -> operations_pb2.Operation: + r"""Call the update channel method over HTTP. Args: - request (~.eventarc.GetTriggerRequest): + request (~.eventarc.UpdateChannelRequest): The request object. The request message for the - GetTrigger method. + UpdateChannel method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -521,21 +1890,30 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.trigger.Trigger: - A representation of the trigger - resource. + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + 'method': 'patch', + 'uri': '/v1/{channel.name=projects/*/locations/*/channels/*}', + 'body': 'channel', }, ] - request, metadata = self._interceptor.pre_get_trigger(request, metadata) - pb_request = eventarc.GetTriggerRequest.pb(request) + request, metadata = self._interceptor.pre_update_channel(request, metadata) + pb_request = eventarc.UpdateChannelRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -555,6 +1933,7 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -563,16 +1942,14 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = trigger.Trigger() - pb_resp = trigger.Trigger.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_trigger(resp) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_channel(resp) return resp - class _ListTriggers(EventarcRestStub): + class _UpdateGoogleChannelConfig(EventarcRestStub): def __hash__(self): - return hash("ListTriggers") + return hash("UpdateGoogleChannelConfig") __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { } @@ -582,17 +1959,18 @@ def _get_unset_required_fields(cls, message_dict): return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} def __call__(self, - request: eventarc.ListTriggersRequest, *, + request: eventarc.UpdateGoogleChannelConfigRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, metadata: Sequence[Tuple[str, str]]=(), - ) -> eventarc.ListTriggersResponse: - r"""Call the list triggers method over HTTP. + ) -> gce_google_channel_config.GoogleChannelConfig: + r"""Call the update google channel + config method over HTTP. Args: - request (~.eventarc.ListTriggersRequest): + request (~.eventarc.UpdateGoogleChannelConfigRequest): The request object. The request message for the - ListTriggers method. + UpdateGoogleChannelConfig method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -600,21 +1978,35 @@ def __call__(self, sent along with the request as metadata. Returns: - ~.eventarc.ListTriggersResponse: - The response message for the - ListTriggers method. + ~.gce_google_channel_config.GoogleChannelConfig: + A GoogleChannelConfig is a resource + that stores the custom settings + respected by Eventarc first-party + triggers in the matching region. Once + configured, first-party event data will + be protected using the specified custom + managed encryption key instead of + Google-managed encryption keys. """ http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'method': 'patch', + 'uri': '/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}', + 'body': 'google_channel_config', }, ] - request, metadata = self._interceptor.pre_list_triggers(request, metadata) - pb_request = eventarc.ListTriggersRequest.pb(request) + request, metadata = self._interceptor.pre_update_google_channel_config(request, metadata) + pb_request = eventarc.UpdateGoogleChannelConfigRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) uri = transcoded_request['uri'] method = transcoded_request['method'] @@ -634,6 +2026,7 @@ def __call__(self, timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -642,11 +2035,11 @@ def __call__(self, raise core_exceptions.from_http_response(response) # Return the response - resp = eventarc.ListTriggersResponse() - pb_resp = eventarc.ListTriggersResponse.pb(resp) + resp = gce_google_channel_config.GoogleChannelConfig() + pb_resp = gce_google_channel_config.GoogleChannelConfig.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_triggers(resp) + resp = self._interceptor.post_update_google_channel_config(resp) return resp class _UpdateTrigger(EventarcRestStub): @@ -736,6 +2129,22 @@ def __call__(self, resp = self._interceptor.post_update_trigger(resp) return resp + @property + def create_channel_(self) -> Callable[ + [eventarc.CreateChannelRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateChannel(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_channel_connection(self) -> Callable[ + [eventarc.CreateChannelConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateChannelConnection(self._session, self._host, self._interceptor) # type: ignore + @property def create_trigger(self) -> Callable[ [eventarc.CreateTriggerRequest], @@ -744,6 +2153,22 @@ def create_trigger(self) -> Callable[ # In C++ this would require a dynamic_cast return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_channel(self) -> Callable[ + [eventarc.DeleteChannelRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteChannel(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_channel_connection(self) -> Callable[ + [eventarc.DeleteChannelConnectionRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteChannelConnection(self._session, self._host, self._interceptor) # type: ignore + @property def delete_trigger(self) -> Callable[ [eventarc.DeleteTriggerRequest], @@ -752,6 +2177,38 @@ def delete_trigger(self) -> Callable[ # In C++ this would require a dynamic_cast return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore + @property + def get_channel(self) -> Callable[ + [eventarc.GetChannelRequest], + channel.Channel]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetChannel(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_channel_connection(self) -> Callable[ + [eventarc.GetChannelConnectionRequest], + channel_connection.ChannelConnection]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_google_channel_config(self) -> Callable[ + [eventarc.GetGoogleChannelConfigRequest], + google_channel_config.GoogleChannelConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_provider(self) -> Callable[ + [eventarc.GetProviderRequest], + discovery.Provider]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetProvider(self._session, self._host, self._interceptor) # type: ignore + @property def get_trigger(self) -> Callable[ [eventarc.GetTriggerRequest], @@ -760,6 +2217,30 @@ def get_trigger(self) -> Callable[ # In C++ this would require a dynamic_cast return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore + @property + def list_channel_connections(self) -> Callable[ + [eventarc.ListChannelConnectionsRequest], + eventarc.ListChannelConnectionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListChannelConnections(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_channels(self) -> Callable[ + [eventarc.ListChannelsRequest], + eventarc.ListChannelsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_providers(self) -> Callable[ + [eventarc.ListProvidersRequest], + eventarc.ListProvidersResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListProviders(self._session, self._host, self._interceptor) # type: ignore + @property def list_triggers(self) -> Callable[ [eventarc.ListTriggersRequest], @@ -768,6 +2249,22 @@ def list_triggers(self) -> Callable[ # In C++ this would require a dynamic_cast return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore + @property + def update_channel(self) -> Callable[ + [eventarc.UpdateChannelRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_google_channel_config(self) -> Callable[ + [eventarc.UpdateGoogleChannelConfigRequest], + gce_google_channel_config.GoogleChannelConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + @property def update_trigger(self) -> Callable[ [eventarc.UpdateTriggerRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py index 09b1dcca725c..66303876a1cc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -13,36 +13,92 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .channel import ( + Channel, +) +from .channel_connection import ( + ChannelConnection, +) +from .discovery import ( + EventType, + FilteringAttribute, + Provider, +) from .eventarc import ( + CreateChannelConnectionRequest, + CreateChannelRequest, CreateTriggerRequest, + DeleteChannelConnectionRequest, + DeleteChannelRequest, DeleteTriggerRequest, + GetChannelConnectionRequest, + GetChannelRequest, + GetGoogleChannelConfigRequest, + GetProviderRequest, GetTriggerRequest, + ListChannelConnectionsRequest, + ListChannelConnectionsResponse, + ListChannelsRequest, + ListChannelsResponse, + ListProvidersRequest, + ListProvidersResponse, ListTriggersRequest, ListTriggersResponse, OperationMetadata, + UpdateChannelRequest, + UpdateGoogleChannelConfigRequest, UpdateTriggerRequest, ) +from .google_channel_config import ( + GoogleChannelConfig, +) from .trigger import ( CloudRun, Destination, EventFilter, + GKE, Pubsub, + StateCondition, Transport, Trigger, ) __all__ = ( + 'Channel', + 'ChannelConnection', + 'EventType', + 'FilteringAttribute', + 'Provider', + 'CreateChannelConnectionRequest', + 'CreateChannelRequest', 'CreateTriggerRequest', + 'DeleteChannelConnectionRequest', + 'DeleteChannelRequest', 'DeleteTriggerRequest', + 'GetChannelConnectionRequest', + 'GetChannelRequest', + 'GetGoogleChannelConfigRequest', + 'GetProviderRequest', 'GetTriggerRequest', + 'ListChannelConnectionsRequest', + 'ListChannelConnectionsResponse', + 'ListChannelsRequest', + 'ListChannelsResponse', + 'ListProvidersRequest', + 'ListProvidersResponse', 'ListTriggersRequest', 'ListTriggersResponse', 'OperationMetadata', + 'UpdateChannelRequest', + 'UpdateGoogleChannelConfigRequest', 'UpdateTriggerRequest', + 'GoogleChannelConfig', 'CloudRun', 'Destination', 'EventFilter', + 'GKE', 'Pubsub', + 'StateCondition', 'Transport', 'Trigger', ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py new file mode 100755 index 000000000000..10b33b682336 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'Channel', + }, +) + + +class Channel(proto.Message): + r"""A representation of the Channel resource. + A Channel is a resource on which event providers publish their + events. The published events are delivered through the transport + associated with the channel. Note that a channel is associated + with exactly one event provider. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The resource name of the channel. Must be unique + within the location on the project and must be in + ``projects/{project}/locations/{location}/channels/{channel_id}`` + format. + uid (str): + Output only. Server assigned unique + identifier for the channel. The value is a UUID4 + string and guaranteed to remain unchanged until + the resource is deleted. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + provider (str): + The name of the event provider (e.g. Eventarc SaaS partner) + associated with the channel. This provider will be granted + permissions to publish events to the channel. Format: + ``projects/{project}/locations/{location}/providers/{provider_id}``. + pubsub_topic (str): + Output only. The name of the Pub/Sub topic created and + managed by Eventarc system as a transport for the event + delivery. Format: ``projects/{project}/topics/{topic_id}``. + + This field is a member of `oneof`_ ``transport``. + state (google.cloud.eventarc_v1.types.Channel.State): + Output only. The state of a Channel. + activation_token (str): + Output only. The activation token for the + channel. The token must be used by the provider + to register the channel for publishing. + crypto_key_name (str): + Optional. Resource name of a KMS crypto key (managed by the + user) used to encrypt/decrypt their event data. + + It must match the pattern + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + """ + class State(proto.Enum): + r"""State lists all the possible states of a Channel + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + PENDING (1): + The PENDING state indicates that a Channel + has been created successfully and there is a new + activation token available for the subscriber to + use to convey the Channel to the provider in + order to create a Connection. + ACTIVE (2): + The ACTIVE state indicates that a Channel has + been successfully connected with the event + provider. An ACTIVE Channel is ready to receive + and route events from the event provider. + INACTIVE (3): + The INACTIVE state indicates that the Channel + cannot receive events permanently. There are two + possible cases this state can happen: + + 1. The SaaS provider disconnected from this + Channel. 2. The Channel activation token has + expired but the SaaS provider wasn't + connected. + + To re-establish a Connection with a provider, + the subscriber should create a new Channel and + give it to the provider. + """ + STATE_UNSPECIFIED = 0 + PENDING = 1 + ACTIVE = 2 + INACTIVE = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + provider: str = proto.Field( + proto.STRING, + number=7, + ) + pubsub_topic: str = proto.Field( + proto.STRING, + number=8, + oneof='transport', + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + activation_token: str = proto.Field( + proto.STRING, + number=10, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=11, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py new file mode 100755 index 000000000000..301d8832c118 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'ChannelConnection', + }, +) + + +class ChannelConnection(proto.Message): + r"""A representation of the ChannelConnection resource. + A ChannelConnection is a resource which event providers create + during the activation process to establish a connection between + the provider and the subscriber channel. + + Attributes: + name (str): + Required. The name of the connection. + uid (str): + Output only. Server assigned ID of the + resource. The server guarantees uniqueness and + immutability until deleted. + channel (str): + Required. The name of the connected subscriber Channel. This + is a weak reference to avoid cross project and cross + accounts references. This must be in + ``projects/{project}/location/{location}/channels/{channel_id}`` + format. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation time. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + activation_token (str): + Input only. Activation token for the channel. + The token will be used during the creation of + ChannelConnection to bind the channel with the + provider project. This field will not be stored + in the provider resource. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + channel: str = proto.Field( + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + activation_token: str = proto.Field( + proto.STRING, + number=8, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py new file mode 100755 index 000000000000..36a152e7df8a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'Provider', + 'EventType', + 'FilteringAttribute', + }, +) + + +class Provider(proto.Message): + r"""A representation of the Provider resource. + + Attributes: + name (str): + Output only. In + ``projects/{project}/locations/{location}/providers/{provider_id}`` + format. + display_name (str): + Output only. Human friendly name for the + Provider. For example "Cloud Storage". + event_types (MutableSequence[google.cloud.eventarc_v1.types.EventType]): + Output only. Event types for this provider. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + event_types: MutableSequence['EventType'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='EventType', + ) + + +class EventType(proto.Message): + r"""A representation of the event type resource. + + Attributes: + type_ (str): + Output only. The full name of the event type + (for example, + "google.cloud.storage.object.v1.finalized"). In + the form of + {provider-specific-prefix}.{resource}.{version}.{verb}. + Types MUST be versioned and event schemas are + guaranteed to remain backward compatible within + one version. Note that event type versions and + API versions do not need to match. + description (str): + Output only. Human friendly description of + what the event type is about. For example + "Bucket created in Cloud Storage". + filtering_attributes (MutableSequence[google.cloud.eventarc_v1.types.FilteringAttribute]): + Output only. Filtering attributes for the + event type. + event_schema_uri (str): + Output only. URI for the event schema. + For example + "https://github.com/googleapis/google-cloudevents/blob/master/proto/google/events/cloud/storage/v1/events.proto". + """ + + type_: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + filtering_attributes: MutableSequence['FilteringAttribute'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='FilteringAttribute', + ) + event_schema_uri: str = proto.Field( + proto.STRING, + number=4, + ) + + +class FilteringAttribute(proto.Message): + r"""A representation of the FilteringAttribute resource. + Filtering attributes are per event type. + + Attributes: + attribute (str): + Output only. Attribute used for filtering the + event type. + description (str): + Output only. Description of the purpose of + the attribute. + required (bool): + Output only. If true, the triggers for this + provider should always specify a filter on these + attributes. Trigger creation will fail + otherwise. + path_pattern_supported (bool): + Output only. If true, the attribute accepts + matching expressions in the Eventarc PathPattern + format. + """ + + attribute: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + required: bool = proto.Field( + proto.BOOL, + number=3, + ) + path_pattern_supported: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index cb07e3c99e2f..0e737707c81e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -19,6 +19,10 @@ import proto # type: ignore +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -33,6 +37,22 @@ 'CreateTriggerRequest', 'UpdateTriggerRequest', 'DeleteTriggerRequest', + 'GetChannelRequest', + 'ListChannelsRequest', + 'ListChannelsResponse', + 'CreateChannelRequest', + 'UpdateChannelRequest', + 'DeleteChannelRequest', + 'GetProviderRequest', + 'ListProvidersRequest', + 'ListProvidersResponse', + 'GetChannelConnectionRequest', + 'ListChannelConnectionsRequest', + 'ListChannelConnectionsResponse', + 'CreateChannelConnectionRequest', + 'DeleteChannelConnectionRequest', + 'UpdateGoogleChannelConfigRequest', + 'GetGoogleChannelConfigRequest', 'OperationMetadata', }, ) @@ -61,7 +81,8 @@ class ListTriggersRequest(proto.Message): triggers on. page_size (int): The maximum number of triggers to return on - each page. Note: The service may send fewer. + each page. + Note: The service may send fewer. page_token (str): The page token; provide the value from the ``next_page_token`` field in a previous ``ListTriggers`` @@ -72,10 +93,16 @@ class ListTriggersRequest(proto.Message): token. order_by (str): The sorting order of the resources returned. Value should be - a comma separated list of fields. The default sorting oder + a comma-separated list of fields. The default sorting order is ascending. To specify descending order for a field, append a ``desc`` suffix; for example: ``name desc, trigger_id``. + filter (str): + Filter field. Used to filter the Triggers to + be listed. Possible filters are described in + https://google.aip.dev/160. For example, using + "?filter=destination:gke" would list only + Triggers with a gke destination. """ parent: str = proto.Field( @@ -94,19 +121,23 @@ class ListTriggersRequest(proto.Message): proto.STRING, number=4, ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) class ListTriggersResponse(proto.Message): - r"""The response message for the ListTriggers method. + r"""The response message for the ``ListTriggers`` method. Attributes: triggers (MutableSequence[google.cloud.eventarc_v1.types.Trigger]): The requested triggers, up to the number specified in ``page_size``. next_page_token (str): - A page token that can be sent to ListTriggers - to request the next page. If this is empty, then - there are no more pages. + A page token that can be sent to ``ListTriggers`` to request + the next page. If this is empty, then there are no more + pages. unreachable (MutableSequence[str]): Unreachable resources, if any. """ @@ -144,7 +175,7 @@ class CreateTriggerRequest(proto.Message): to the trigger. validate_only (bool): Required. If set, validate the request and - preview the review, but do not actually post it. + preview the review, but do not post it. """ parent: str = proto.Field( @@ -174,8 +205,8 @@ class UpdateTriggerRequest(proto.Message): The trigger to be updated. update_mask (google.protobuf.field_mask_pb2.FieldMask): The fields to be updated; only fields explicitly provided - will be updated. If no field mask is provided, all provided - fields in the request will be updated. To update all fields, + are updated. If no field mask is provided, all provided + fields in the request are updated. To update all fields, provide a field mask of "*". allow_missing (bool): If set to true, and the trigger is not found, a new trigger @@ -183,7 +214,7 @@ class UpdateTriggerRequest(proto.Message): ignored. validate_only (bool): Required. If set, validate the request and - preview the review, but do not actually post it. + preview the review, but do not post it. """ trigger: gce_trigger.Trigger = proto.Field( @@ -223,7 +254,7 @@ class DeleteTriggerRequest(proto.Message): taken on the server. validate_only (bool): Required. If set, validate the request and - preview the review, but do not actually post it. + preview the review, but do not post it. """ name: str = proto.Field( @@ -244,6 +275,454 @@ class DeleteTriggerRequest(proto.Message): ) +class GetChannelRequest(proto.Message): + r"""The request message for the GetChannel method. + + Attributes: + name (str): + Required. The name of the channel to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListChannelsRequest(proto.Message): + r"""The request message for the ListChannels method. + + Attributes: + parent (str): + Required. The parent collection to list + channels on. + page_size (int): + The maximum number of channels to return on + each page. + Note: The service may send fewer. + page_token (str): + The page token; provide the value from the + ``next_page_token`` field in a previous ``ListChannels`` + call to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListChannels`` must match the call that provided the page + token. + order_by (str): + The sorting order of the resources returned. Value should be + a comma-separated list of fields. The default sorting order + is ascending. To specify descending order for a field, + append a ``desc`` suffix; for example: + ``name desc, channel_id``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListChannelsResponse(proto.Message): + r"""The response message for the ``ListChannels`` method. + + Attributes: + channels (MutableSequence[google.cloud.eventarc_v1.types.Channel]): + The requested channels, up to the number specified in + ``page_size``. + next_page_token (str): + A page token that can be sent to ``ListChannels`` to request + the next page. If this is empty, then there are no more + pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + channels: MutableSequence[gce_channel.Channel] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_channel.Channel, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateChannelRequest(proto.Message): + r"""The request message for the CreateChannel method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this channel. + channel (google.cloud.eventarc_v1.types.Channel): + Required. The channel to create. + channel_id (str): + Required. The user-provided ID to be assigned + to the channel. + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not post it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + channel: gce_channel.Channel = proto.Field( + proto.MESSAGE, + number=2, + message=gce_channel.Channel, + ) + channel_id: str = proto.Field( + proto.STRING, + number=3, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateChannelRequest(proto.Message): + r"""The request message for the UpdateChannel method. + + Attributes: + channel (google.cloud.eventarc_v1.types.Channel): + The channel to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly provided + are updated. If no field mask is provided, all provided + fields in the request are updated. To update all fields, + provide a field mask of "*". + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not post it. + """ + + channel: gce_channel.Channel = proto.Field( + proto.MESSAGE, + number=1, + message=gce_channel.Channel, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteChannelRequest(proto.Message): + r"""The request message for the DeleteChannel method. + + Attributes: + name (str): + Required. The name of the channel to be + deleted. + validate_only (bool): + Required. If set, validate the request and + preview the review, but do not post it. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class GetProviderRequest(proto.Message): + r"""The request message for the GetProvider method. + + Attributes: + name (str): + Required. The name of the provider to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListProvidersRequest(proto.Message): + r"""The request message for the ListProviders method. + + Attributes: + parent (str): + Required. The parent of the provider to get. + page_size (int): + The maximum number of providers to return on + each page. + page_token (str): + The page token; provide the value from the + ``next_page_token`` field in a previous ``ListProviders`` + call to retrieve the subsequent page. + + When paginating, all other parameters provided to + ``ListProviders`` must match the call that provided the page + token. + order_by (str): + The sorting order of the resources returned. Value should be + a comma-separated list of fields. The default sorting oder + is ascending. To specify descending order for a field, + append a ``desc`` suffix; for example: ``name desc, _id``. + filter (str): + The filter field that the list request will + filter on. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListProvidersResponse(proto.Message): + r"""The response message for the ``ListProviders`` method. + + Attributes: + providers (MutableSequence[google.cloud.eventarc_v1.types.Provider]): + The requested providers, up to the number specified in + ``page_size``. + next_page_token (str): + A page token that can be sent to ``ListProviders`` to + request the next page. If this is empty, then there are no + more pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + providers: MutableSequence[discovery.Provider] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=discovery.Provider, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetChannelConnectionRequest(proto.Message): + r"""The request message for the GetChannelConnection method. + + Attributes: + name (str): + Required. The name of the channel connection + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListChannelConnectionsRequest(proto.Message): + r"""The request message for the ListChannelConnections method. + + Attributes: + parent (str): + Required. The parent collection from which to + list channel connections. + page_size (int): + The maximum number of channel connections to + return on each page. + Note: The service may send fewer responses. + page_token (str): + The page token; provide the value from the + ``next_page_token`` field in a previous + ``ListChannelConnections`` call to retrieve the subsequent + page. + + When paginating, all other parameters provided to + ``ListChannelConnetions`` match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListChannelConnectionsResponse(proto.Message): + r"""The response message for the ``ListChannelConnections`` method. + + Attributes: + channel_connections (MutableSequence[google.cloud.eventarc_v1.types.ChannelConnection]): + The requested channel connections, up to the number + specified in ``page_size``. + next_page_token (str): + A page token that can be sent to ``ListChannelConnections`` + to request the next page. If this is empty, then there are + no more pages. + unreachable (MutableSequence[str]): + Unreachable resources, if any. + """ + + @property + def raw_page(self): + return self + + channel_connections: MutableSequence[gce_channel_connection.ChannelConnection] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=gce_channel_connection.ChannelConnection, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateChannelConnectionRequest(proto.Message): + r"""The request message for the CreateChannelConnection method. + + Attributes: + parent (str): + Required. The parent collection in which to + add this channel connection. + channel_connection (google.cloud.eventarc_v1.types.ChannelConnection): + Required. Channel connection to create. + channel_connection_id (str): + Required. The user-provided ID to be assigned + to the channel connection. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + channel_connection: gce_channel_connection.ChannelConnection = proto.Field( + proto.MESSAGE, + number=2, + message=gce_channel_connection.ChannelConnection, + ) + channel_connection_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteChannelConnectionRequest(proto.Message): + r"""The request message for the DeleteChannelConnection method. + + Attributes: + name (str): + Required. The name of the channel connection + to delete. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateGoogleChannelConfigRequest(proto.Message): + r"""The request message for the UpdateGoogleChannelConfig method. + + Attributes: + google_channel_config (google.cloud.eventarc_v1.types.GoogleChannelConfig): + Required. The config to be updated. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + The fields to be updated; only fields explicitly provided + are updated. If no field mask is provided, all provided + fields in the request are updated. To update all fields, + provide a field mask of "*". + """ + + google_channel_config: gce_google_channel_config.GoogleChannelConfig = proto.Field( + proto.MESSAGE, + number=1, + message=gce_google_channel_config.GoogleChannelConfig, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class GetGoogleChannelConfigRequest(proto.Message): + r"""The request message for the GetGoogleChannelConfig method. + + Attributes: + name (str): + Required. The name of the config to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py new file mode 100755 index 000000000000..291ebd01bd46 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.eventarc.v1', + manifest={ + 'GoogleChannelConfig', + }, +) + + +class GoogleChannelConfig(proto.Message): + r"""A GoogleChannelConfig is a resource that stores the custom + settings respected by Eventarc first-party triggers in the + matching region. Once configured, first-party event data will be + protected using the specified custom managed encryption key + instead of Google-managed encryption keys. + + Attributes: + name (str): + Required. The resource name of the config. Must be in the + format of, + ``projects/{project}/locations/{location}/googleChannelConfig``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last-modified time. + crypto_key_name (str): + Optional. Resource name of a KMS crypto key (managed by the + user) used to encrypt/decrypt their event data. + + It must match the pattern + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + crypto_key_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index 504afb5fe6ee..86ba6a41cae3 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -20,6 +20,7 @@ import proto # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore __protobuf__ = proto.module( @@ -27,9 +28,11 @@ manifest={ 'Trigger', 'EventFilter', + 'StateCondition', 'Destination', 'Transport', 'CloudRun', + 'GKE', 'Pubsub', }, ) @@ -41,11 +44,11 @@ class Trigger(proto.Message): Attributes: name (str): Required. The resource name of the trigger. Must be unique - within the location on the project and must be in + within the location of the project and must be in ``projects/{project}/locations/{location}/triggers/{trigger}`` format. uid (str): - Output only. Server assigned unique + Output only. Server-assigned unique identifier for the trigger. The value is a UUID4 string and guaranteed to remain unchanged until the resource is deleted. @@ -54,16 +57,16 @@ class Trigger(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last-modified time. event_filters (MutableSequence[google.cloud.eventarc_v1.types.EventFilter]): - Required. null The list of filters that - applies to event attributes. Only events that - match all the provided filters will be sent to + Required. Unordered list. The list of filters + that applies to event attributes. Only events + that match all the provided filters are sent to the destination. service_account (str): Optional. The IAM service account email associated with the trigger. The service account represents the identity of the trigger. - The principal who calls this API must have + The principal who calls this API must have the ``iam.serviceAccounts.actAs`` permission in the service account. See https://cloud.google.com/iam/docs/understanding-service-accounts?hl=en#sa_common @@ -73,26 +76,35 @@ class Trigger(proto.Message): generate identity tokens when invoking the service. See https://cloud.google.com/run/docs/triggering/pubsub-push#create-service-account for information on how to invoke authenticated Cloud Run - services. In order to create Audit Log triggers, the service - account should also have ``roles/eventarc.eventReceiver`` - IAM role. + services. To create Audit Log triggers, the service account + should also have the ``roles/eventarc.eventReceiver`` IAM + role. destination (google.cloud.eventarc_v1.types.Destination): Required. Destination specifies where the events should be sent to. transport (google.cloud.eventarc_v1.types.Transport): - Optional. In order to deliver messages, - Eventarc may use other GCP products as transport + Optional. To deliver messages, Eventarc might + use other GCP products as a transport intermediary. This field contains a reference to that transport intermediary. This information can be used for debugging purposes. labels (MutableMapping[str, str]): Optional. User labels attached to the triggers that can be used to group resources. + channel (str): + Optional. The name of the channel associated with the + trigger in + ``projects/{project}/locations/{location}/channels/{channel}`` + format. You must provide a channel to receive events from + Eventarc SaaS partners. + conditions (MutableMapping[str, google.cloud.eventarc_v1.types.StateCondition]): + Output only. The reason(s) why a trigger is + in FAILED state. etag (str): Output only. This checksum is computed by the server based on the value of other fields, and - may be sent only on create requests to ensure - the client has an up-to-date value before + might be sent only on create requests to ensure + that the client has an up-to-date value before proceeding. """ @@ -138,6 +150,16 @@ class Trigger(proto.Message): proto.STRING, number=12, ) + channel: str = proto.Field( + proto.STRING, + number=13, + ) + conditions: MutableMapping[str, 'StateCondition'] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=15, + message='StateCondition', + ) etag: str = proto.Field( proto.STRING, number=99, @@ -158,6 +180,11 @@ class EventFilter(proto.Message): 'type' attribute. value (str): Required. The value for the attribute. + operator (str): + Optional. The operator used for matching the events with the + value of the filter. If not specified, only events that have + an exact key-value pair specified in the filter are matched. + The only allowed value is ``match-path-pattern``. """ attribute: str = proto.Field( @@ -168,18 +195,67 @@ class EventFilter(proto.Message): proto.STRING, number=2, ) + operator: str = proto.Field( + proto.STRING, + number=3, + ) + + +class StateCondition(proto.Message): + r"""A condition that is part of the trigger state computation. + + Attributes: + code (google.rpc.code_pb2.Code): + The canonical code of the condition. + message (str): + Human-readable message. + """ + + code: code_pb2.Code = proto.Field( + proto.ENUM, + number=1, + enum=code_pb2.Code, + ) + message: str = proto.Field( + proto.STRING, + number=2, + ) class Destination(proto.Message): r"""Represents a target of an invocation over HTTP. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: cloud_run (google.cloud.eventarc_v1.types.CloudRun): - Cloud Run fully-managed service that receives - the events. The service should be running in the - same project of the trigger. + Cloud Run fully-managed resource that + receives the events. The resource should be in + the same project as the trigger. + + This field is a member of `oneof`_ ``descriptor``. + cloud_function (str): + The Cloud Function resource name. Only Cloud Functions V2 is + supported. Format: + ``projects/{project}/locations/{location}/functions/{function}`` + + This field is a member of `oneof`_ ``descriptor``. + gke (google.cloud.eventarc_v1.types.GKE): + A GKE service capable of receiving events. + The service should be running in the same + project as the trigger. + + This field is a member of `oneof`_ ``descriptor``. + workflow (str): + The resource name of the Workflow whose Executions are + triggered by the events. The Workflow resource should be + deployed in the same project as the trigger. Format: + ``projects/{project}/locations/{location}/workflows/{workflow}`` This field is a member of `oneof`_ ``descriptor``. """ @@ -190,11 +266,27 @@ class Destination(proto.Message): oneof='descriptor', message='CloudRun', ) + cloud_function: str = proto.Field( + proto.STRING, + number=2, + oneof='descriptor', + ) + gke: 'GKE' = proto.Field( + proto.MESSAGE, + number=3, + oneof='descriptor', + message='GKE', + ) + workflow: str = proto.Field( + proto.STRING, + number=4, + oneof='descriptor', + ) class Transport(proto.Message): r"""Represents the transport intermediaries created for the - trigger in order to deliver events. + trigger to deliver events. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -202,7 +294,7 @@ class Transport(proto.Message): Attributes: pubsub (google.cloud.eventarc_v1.types.Pubsub): The Pub/Sub topic and subscription used by - Eventarc as delivery intermediary. + Eventarc as a transport intermediary. This field is a member of `oneof`_ ``intermediary``. """ @@ -224,15 +316,14 @@ class CloudRun(proto.Message): being addressed. See https://cloud.google.com/run/docs/reference/rest/v1/namespaces.services. - Only services located in the same project of the + Only services located in the same project as the trigger object can be addressed. path (str): Optional. The relative path on the Cloud Run service the events should be sent to. - - The value must conform to the definition of URI - path segment (section 3.3 of RFC2396). Examples: - "/route", "route", "route/subroute". + The value must conform to the definition of a + URI path segment (section 3.3 of RFC2396). + Examples: "/route", "route", "route/subroute". region (str): Required. The region the Cloud Run service is deployed in. @@ -252,22 +343,73 @@ class CloudRun(proto.Message): ) +class GKE(proto.Message): + r"""Represents a GKE destination. + + Attributes: + cluster (str): + Required. The name of the cluster the GKE + service is running in. The cluster must be + running in the same project as the trigger being + created. + location (str): + Required. The name of the Google Compute + Engine in which the cluster resides, which can + either be compute zone (for example, + us-central1-a) for the zonal clusters or region + (for example, us-central1) for regional + clusters. + namespace (str): + Required. The namespace the GKE service is + running in. + service (str): + Required. Name of the GKE service. + path (str): + Optional. The relative path on the GKE + service the events should be sent to. + The value must conform to the definition of a + URI path segment (section 3.3 of RFC2396). + Examples: "/route", "route", "route/subroute". + """ + + cluster: str = proto.Field( + proto.STRING, + number=1, + ) + location: str = proto.Field( + proto.STRING, + number=2, + ) + namespace: str = proto.Field( + proto.STRING, + number=3, + ) + service: str = proto.Field( + proto.STRING, + number=4, + ) + path: str = proto.Field( + proto.STRING, + number=5, + ) + + class Pubsub(proto.Message): r"""Represents a Pub/Sub transport. Attributes: topic (str): Optional. The name of the Pub/Sub topic created and managed - by Eventarc system as a transport for the event delivery. - Format: ``projects/{PROJECT_ID}/topics/{TOPIC_NAME}``. + by Eventarc as a transport for the event delivery. Format: + ``projects/{PROJECT_ID}/topics/{TOPIC_NAME}``. - You may set an existing topic for triggers of the type - ``google.cloud.pubsub.topic.v1.messagePublished`` only. The - topic you provide here will not be deleted by Eventarc at - trigger deletion. + You can set an existing topic for triggers of the type + ``google.cloud.pubsub.topic.v1.messagePublished``. The topic + you provide here is not deleted by Eventarc at trigger + deletion. subscription (str): Output only. The name of the Pub/Sub subscription created - and managed by Eventarc system as a transport for the event + and managed by Eventarc as a transport for the event delivery. Format: ``projects/{PROJECT_ID}/subscriptions/{SUBSCRIPTION_NAME}``. """ diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py new file mode 100755 index 000000000000..4dbb67d6ad9e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateChannel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_create_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + channel = eventarc_v1.Channel() + channel.pubsub_topic = "pubsub_topic_value" + channel.name = "name_value" + + request = eventarc_v1.CreateChannelRequest( + parent="parent_value", + channel=channel, + channel_id="channel_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateChannel_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py new file mode 100755 index 000000000000..d740fe73e578 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateChannelConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_create_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + channel_connection = eventarc_v1.ChannelConnection() + channel_connection.name = "name_value" + channel_connection.channel = "channel_value" + + request = eventarc_v1.CreateChannelConnectionRequest( + parent="parent_value", + channel_connection=channel_connection, + channel_connection_id="channel_connection_id_value", + ) + + # Make the request + operation = client.create_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateChannelConnection_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py new file mode 100755 index 000000000000..1743fe90e282 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateChannelConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_create_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + channel_connection = eventarc_v1.ChannelConnection() + channel_connection.name = "name_value" + channel_connection.channel = "channel_value" + + request = eventarc_v1.CreateChannelConnectionRequest( + parent="parent_value", + channel_connection=channel_connection, + channel_connection_id="channel_connection_id_value", + ) + + # Make the request + operation = client.create_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateChannelConnection_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py new file mode 100755 index 000000000000..899225b0e4cb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_CreateChannel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_create_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + channel = eventarc_v1.Channel() + channel.pubsub_topic = "pubsub_topic_value" + channel.name = "name_value" + + request = eventarc_v1.CreateChannelRequest( + parent="parent_value", + channel=channel, + channel_id="channel_id_value", + validate_only=True, + ) + + # Make the request + operation = client.create_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_CreateChannel_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py new file mode 100755 index 000000000000..8cffd31dc159 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteChannel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_delete_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteChannel_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py new file mode 100755 index 000000000000..4de62a3619c8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteChannelConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_delete_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteChannelConnection_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py new file mode 100755 index 000000000000..b549b4daf80d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_delete_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_channel_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py new file mode 100755 index 000000000000..d640d9872629 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_DeleteChannel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_delete_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.DeleteChannelRequest( + name="name_value", + validate_only=True, + ) + + # Make the request + operation = client.delete_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_DeleteChannel_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py new file mode 100755 index 000000000000..482e80511e3f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetChannel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelRequest( + name="name_value", + ) + + # Make the request + response = await client.get_channel(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetChannel_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py new file mode 100755 index 000000000000..e3300768b66b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetChannelConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_channel_connection(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_channel_connection(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetChannelConnection_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py new file mode 100755 index 000000000000..23fe5839bd31 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChannelConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetChannelConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_channel_connection(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_channel_connection(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetChannelConnection_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py new file mode 100755 index 000000000000..f56a7c44e50f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetChannel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetChannelRequest( + name="name_value", + ) + + # Make the request + response = client.get_channel(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetChannel_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py new file mode 100755 index 000000000000..fd03d024ccb9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGoogleChannelConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleChannelConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.get_google_channel_config(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py new file mode 100755 index 000000000000..273ee21a7aec --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGoogleChannelConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetGoogleChannelConfigRequest( + name="name_value", + ) + + # Make the request + response = client.get_google_channel_config(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py new file mode 100755 index 000000000000..477e2b47508f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProvider +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetProvider_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_get_provider(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.GetProviderRequest( + name="name_value", + ) + + # Make the request + response = await client.get_provider(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetProvider_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py new file mode 100755 index 000000000000..121fe759f9d8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetProvider +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_GetProvider_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_get_provider(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.GetProviderRequest( + name="name_value", + ) + + # Make the request + response = client.get_provider(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_GetProvider_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py new file mode 100755 index 000000000000..a9f93081bc3d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChannelConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListChannelConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_channel_connections(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channel_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListChannelConnections_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py new file mode 100755 index 000000000000..0f6bbfb23c02 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChannelConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListChannelConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_channel_connections(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channel_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListChannelConnections_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py new file mode 100755 index 000000000000..4cfb527880bc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChannels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListChannels_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_channels(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channels(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListChannels_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py new file mode 100755 index 000000000000..6caa5f6e86f5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListChannels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListChannels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_channels(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListChannelsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_channels(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListChannels_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py new file mode 100755 index 000000000000..af9e9b81bcf6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProviders +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListProviders_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_list_providers(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.ListProvidersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_providers(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListProviders_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py new file mode 100755 index 000000000000..6f24cdd55718 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListProviders +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_ListProviders_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_list_providers(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.ListProvidersRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_providers(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END eventarc_v1_generated_Eventarc_ListProviders_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py new file mode 100755 index 000000000000..83c308256a42 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateChannel_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_update_channel(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateChannelRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_channel(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateChannel_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py new file mode 100755 index 000000000000..9f2001c813d2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateChannel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateChannel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_update_channel(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + request = eventarc_v1.UpdateChannelRequest( + validate_only=True, + ) + + # Make the request + operation = client.update_channel(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateChannel_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py new file mode 100755 index 000000000000..59d955d63169 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGoogleChannelConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +async def sample_update_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcAsyncClient() + + # Initialize request argument(s) + google_channel_config = eventarc_v1.GoogleChannelConfig() + google_channel_config.name = "name_value" + + request = eventarc_v1.UpdateGoogleChannelConfigRequest( + google_channel_config=google_channel_config, + ) + + # Make the request + response = await client.update_google_channel_config(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py new file mode 100755 index 000000000000..d39449bc2556 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGoogleChannelConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-eventarc + + +# [START eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import eventarc_v1 + + +def sample_update_google_channel_config(): + # Create a client + client = eventarc_v1.EventarcClient() + + # Initialize request argument(s) + google_channel_config = eventarc_v1.GoogleChannelConfig() + google_channel_config.name = "name_value" + + request = eventarc_v1.UpdateGoogleChannelConfigRequest( + google_channel_config=google_channel_config, + ) + + # Make the request + response = client.update_google_channel_config(request=request) + + # Handle the response + print(response) + +# [END eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index d26627397be9..596aaecb1b9c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -19,30 +19,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "CreateTrigger" + "shortName": "CreateChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateChannelConnectionRequest" }, { "name": "parent", "type": "str" }, { - "name": "trigger", - "type": "google.cloud.eventarc_v1.types.Trigger" + "name": "channel_connection", + "type": "google.cloud.eventarc_v1.types.ChannelConnection" }, { - "name": "trigger_id", + "name": "channel_connection_id", "type": "str" }, { @@ -59,21 +59,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_trigger" + "shortName": "create_channel_connection" }, - "description": "Sample for CreateTrigger", - "file": "eventarc_v1_generated_eventarc_create_trigger_async.py", + "description": "Sample for CreateChannelConnection", + "file": "eventarc_v1_generated_eventarc_create_channel_connection_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateChannelConnection_async", "segments": [ { - "end": 65, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 65, + "end": 61, "start": 27, "type": "SHORT" }, @@ -83,22 +83,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 55, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 62, - "start": 56, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 66, - "start": 63, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_create_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_create_channel_connection_async.py" }, { "canonical": true, @@ -107,30 +107,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.create_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "CreateTrigger" + "shortName": "CreateChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateChannelConnectionRequest" }, { "name": "parent", "type": "str" }, { - "name": "trigger", - "type": "google.cloud.eventarc_v1.types.Trigger" + "name": "channel_connection", + "type": "google.cloud.eventarc_v1.types.ChannelConnection" }, { - "name": "trigger_id", + "name": "channel_connection_id", "type": "str" }, { @@ -147,21 +147,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_trigger" + "shortName": "create_channel_connection" }, - "description": "Sample for CreateTrigger", - "file": "eventarc_v1_generated_eventarc_create_trigger_sync.py", + "description": "Sample for CreateChannelConnection", + "file": "eventarc_v1_generated_eventarc_create_channel_connection_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateChannelConnection_sync", "segments": [ { - "end": 65, + "end": 61, "start": 27, "type": "FULL" }, { - "end": 65, + "end": 61, "start": 27, "type": "SHORT" }, @@ -171,22 +171,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 55, + "end": 51, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 62, - "start": 56, + "end": 58, + "start": 52, "type": "REQUEST_EXECUTION" }, { - "end": 66, - "start": 63, + "end": 62, + "start": 59, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_create_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_create_channel_connection_sync.py" }, { "canonical": true, @@ -196,27 +196,31 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_channel", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateChannel", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteTrigger" + "shortName": "CreateChannel" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateChannelRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "allow_missing", - "type": "bool" + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "channel_id", + "type": "str" }, { "name": "retry", @@ -232,21 +236,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_trigger" + "shortName": "create_channel" }, - "description": "Sample for DeleteTrigger", - "file": "eventarc_v1_generated_eventarc_delete_trigger_async.py", + "description": "Sample for CreateChannel", + "file": "eventarc_v1_generated_eventarc_create_channel_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateChannel_async", "segments": [ { - "end": 56, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 62, "start": 27, "type": "SHORT" }, @@ -256,22 +260,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_create_channel_async.py" }, { "canonical": true, @@ -280,27 +284,31 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_channel", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateChannel", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "DeleteTrigger" + "shortName": "CreateChannel" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateChannelRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { - "name": "allow_missing", - "type": "bool" + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "channel_id", + "type": "str" }, { "name": "retry", @@ -316,21 +324,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_trigger" + "shortName": "create_channel" }, - "description": "Sample for DeleteTrigger", - "file": "eventarc_v1_generated_eventarc_delete_trigger_sync.py", + "description": "Sample for CreateChannel", + "file": "eventarc_v1_generated_eventarc_create_channel_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateChannel_sync", "segments": [ { - "end": 56, + "end": 62, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 62, "start": 27, "type": "SHORT" }, @@ -340,22 +348,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 59, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 63, + "start": 60, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_delete_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_create_channel_sync.py" }, { "canonical": true, @@ -365,22 +373,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.create_trigger", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetTrigger" + "shortName": "CreateTrigger" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "trigger_id", "type": "str" }, { @@ -396,22 +412,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.eventarc_v1.types.Trigger", - "shortName": "get_trigger" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_trigger" }, - "description": "Sample for GetTrigger", - "file": "eventarc_v1_generated_eventarc_get_trigger_async.py", + "description": "Sample for CreateTrigger", + "file": "eventarc_v1_generated_eventarc_create_trigger_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_async", + "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_async", "segments": [ { - "end": 51, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 65, "start": 27, "type": "SHORT" }, @@ -421,22 +437,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 55, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_trigger_async.py" + "title": "eventarc_v1_generated_eventarc_create_trigger_async.py" }, { "canonical": true, @@ -445,22 +461,30 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.get_trigger", + "fullName": "google.cloud.eventarc_v1.EventarcClient.create_trigger", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "fullName": "google.cloud.eventarc.v1.Eventarc.CreateTrigger", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "GetTrigger" + "shortName": "CreateTrigger" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + "type": "google.cloud.eventarc_v1.types.CreateTriggerRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "trigger", + "type": "google.cloud.eventarc_v1.types.Trigger" + }, + { + "name": "trigger_id", "type": "str" }, { @@ -476,22 +500,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.eventarc_v1.types.Trigger", - "shortName": "get_trigger" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_trigger" }, - "description": "Sample for GetTrigger", - "file": "eventarc_v1_generated_eventarc_get_trigger_sync.py", + "description": "Sample for CreateTrigger", + "file": "eventarc_v1_generated_eventarc_create_trigger_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_sync", + "regionTag": "eventarc_v1_generated_Eventarc_CreateTrigger_sync", "segments": [ { - "end": 51, + "end": 65, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 65, "start": 27, "type": "SHORT" }, @@ -501,22 +525,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 55, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 62, + "start": 56, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 66, + "start": 63, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_get_trigger_sync.py" + "title": "eventarc_v1_generated_eventarc_create_trigger_sync.py" }, { "canonical": true, @@ -526,22 +550,22 @@ "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", "shortName": "EventarcAsyncClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_triggers", + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListTriggers" + "shortName": "DeleteChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + "type": "google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -557,22 +581,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", - "shortName": "list_triggers" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_channel_connection" }, - "description": "Sample for ListTriggers", - "file": "eventarc_v1_generated_eventarc_list_triggers_async.py", + "description": "Sample for DeleteChannelConnection", + "file": "eventarc_v1_generated_eventarc_delete_channel_connection_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_async", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannelConnection_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -587,17 +611,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_triggers_async.py" + "title": "eventarc_v1_generated_eventarc_delete_channel_connection_async.py" }, { "canonical": true, @@ -606,22 +630,22 @@ "fullName": "google.cloud.eventarc_v1.EventarcClient", "shortName": "EventarcClient" }, - "fullName": "google.cloud.eventarc_v1.EventarcClient.list_triggers", + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", "method": { - "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannelConnection", "service": { "fullName": "google.cloud.eventarc.v1.Eventarc", "shortName": "Eventarc" }, - "shortName": "ListTriggers" + "shortName": "DeleteChannelConnection" }, "parameters": [ { "name": "request", - "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + "type": "google.cloud.eventarc_v1.types.DeleteChannelConnectionRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -637,22 +661,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", - "shortName": "list_triggers" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_channel_connection" }, - "description": "Sample for ListTriggers", - "file": "eventarc_v1_generated_eventarc_list_triggers_sync.py", + "description": "Sample for DeleteChannelConnection", + "file": "eventarc_v1_generated_eventarc_delete_channel_connection_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_sync", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannelConnection_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -667,17 +691,2134 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "eventarc_v1_generated_eventarc_list_triggers_sync.py" + "title": "eventarc_v1_generated_eventarc_delete_channel_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_channel" + }, + "description": "Sample for DeleteChannel", + "file": "eventarc_v1_generated_eventarc_delete_channel_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannel_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_channel_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_channel" + }, + "description": "Sample for DeleteChannel", + "file": "eventarc_v1_generated_eventarc_delete_channel_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteChannel_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_channel_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.delete_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_trigger" + }, + "description": "Sample for DeleteTrigger", + "file": "eventarc_v1_generated_eventarc_delete_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.delete_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.DeleteTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "DeleteTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.DeleteTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "allow_missing", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_trigger" + }, + "description": "Sample for DeleteTrigger", + "file": "eventarc_v1_generated_eventarc_delete_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_DeleteTrigger_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_delete_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_channel_connection", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannelConnection", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannelConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", + "shortName": "get_channel_connection" + }, + "description": "Sample for GetChannelConnection", + "file": "eventarc_v1_generated_eventarc_get_channel_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannelConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_channel_connection", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannelConnection", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannelConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", + "shortName": "get_channel_connection" + }, + "description": "Sample for GetChannelConnection", + "file": "eventarc_v1_generated_eventarc_get_channel_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannelConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Channel", + "shortName": "get_channel" + }, + "description": "Sample for GetChannel", + "file": "eventarc_v1_generated_eventarc_get_channel_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannel_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetChannelRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Channel", + "shortName": "get_channel" + }, + "description": "Sample for GetChannel", + "file": "eventarc_v1_generated_eventarc_get_channel_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetChannel_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_channel_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "get_google_channel_config" + }, + "description": "Sample for GetGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_get_google_channel_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_google_channel_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetGoogleChannelConfigRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "get_google_channel_config" + }, + "description": "Sample for GetGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_get_google_channel_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetGoogleChannelConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_google_channel_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_provider", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetProvider", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetProvider" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetProviderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Provider", + "shortName": "get_provider" + }, + "description": "Sample for GetProvider", + "file": "eventarc_v1_generated_eventarc_get_provider_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetProvider_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_provider_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_provider", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetProvider", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetProvider" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetProviderRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Provider", + "shortName": "get_provider" + }, + "description": "Sample for GetProvider", + "file": "eventarc_v1_generated_eventarc_get_provider_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetProvider_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_provider_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.get_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Trigger", + "shortName": "get_trigger" + }, + "description": "Sample for GetTrigger", + "file": "eventarc_v1_generated_eventarc_get_trigger_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_trigger_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.get_trigger", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.GetTrigger", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "GetTrigger" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.GetTriggerRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.Trigger", + "shortName": "get_trigger" + }, + "description": "Sample for GetTrigger", + "file": "eventarc_v1_generated_eventarc_get_trigger_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_GetTrigger_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_get_trigger_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_channel_connections", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannelConnections", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannelConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager", + "shortName": "list_channel_connections" + }, + "description": "Sample for ListChannelConnections", + "file": "eventarc_v1_generated_eventarc_list_channel_connections_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannelConnections_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channel_connections_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_channel_connections", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannelConnections", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannelConnections" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelConnectionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager", + "shortName": "list_channel_connections" + }, + "description": "Sample for ListChannelConnections", + "file": "eventarc_v1_generated_eventarc_list_channel_connections_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannelConnections_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channel_connections_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_channels", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannels", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager", + "shortName": "list_channels" + }, + "description": "Sample for ListChannels", + "file": "eventarc_v1_generated_eventarc_list_channels_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannels_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channels_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_channels", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListChannels", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListChannels" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListChannelsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager", + "shortName": "list_channels" + }, + "description": "Sample for ListChannels", + "file": "eventarc_v1_generated_eventarc_list_channels_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListChannels_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_channels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_providers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListProviders", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListProviders" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListProvidersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager", + "shortName": "list_providers" + }, + "description": "Sample for ListProviders", + "file": "eventarc_v1_generated_eventarc_list_providers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListProviders_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_providers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_providers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListProviders", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListProviders" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListProvidersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager", + "shortName": "list_providers" + }, + "description": "Sample for ListProviders", + "file": "eventarc_v1_generated_eventarc_list_providers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListProviders_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_providers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.list_triggers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", + "shortName": "list_triggers" + }, + "description": "Sample for ListTriggers", + "file": "eventarc_v1_generated_eventarc_list_triggers_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_triggers_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.list_triggers", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.ListTriggers", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "ListTriggers" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.ListTriggersRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", + "shortName": "list_triggers" + }, + "description": "Sample for ListTriggers", + "file": "eventarc_v1_generated_eventarc_list_triggers_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_ListTriggers_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_list_triggers_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateChannelRequest" + }, + { + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_channel" + }, + "description": "Sample for UpdateChannel", + "file": "eventarc_v1_generated_eventarc_update_channel_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateChannel_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_channel_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_channel", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateChannel", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateChannel" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateChannelRequest" + }, + { + "name": "channel", + "type": "google.cloud.eventarc_v1.types.Channel" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_channel" + }, + "description": "Sample for UpdateChannel", + "file": "eventarc_v1_generated_eventarc_update_channel_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateChannel_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_channel_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient", + "shortName": "EventarcAsyncClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcAsyncClient.update_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest" + }, + { + "name": "google_channel_config", + "type": "google.cloud.eventarc_v1.types.GoogleChannelConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "update_google_channel_config" + }, + "description": "Sample for UpdateGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_update_google_channel_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_google_channel_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.eventarc_v1.EventarcClient", + "shortName": "EventarcClient" + }, + "fullName": "google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", + "method": { + "fullName": "google.cloud.eventarc.v1.Eventarc.UpdateGoogleChannelConfig", + "service": { + "fullName": "google.cloud.eventarc.v1.Eventarc", + "shortName": "Eventarc" + }, + "shortName": "UpdateGoogleChannelConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.eventarc_v1.types.UpdateGoogleChannelConfigRequest" + }, + { + "name": "google_channel_config", + "type": "google.cloud.eventarc_v1.types.GoogleChannelConfig" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", + "shortName": "update_google_channel_config" + }, + "description": "Sample for UpdateGoogleChannelConfig", + "file": "eventarc_v1_generated_eventarc_update_google_channel_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "eventarc_v1_generated_Eventarc_UpdateGoogleChannelConfig_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "eventarc_v1_generated_eventarc_update_google_channel_config_sync.py" }, { "canonical": true, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py index 2b6ac4b47971..bdd67ffcdc7c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py @@ -39,10 +39,23 @@ def partition( class eventarcCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_channel': ('parent', 'channel', 'channel_id', 'validate_only', ), + 'create_channel_connection': ('parent', 'channel_connection', 'channel_connection_id', ), 'create_trigger': ('parent', 'trigger', 'trigger_id', 'validate_only', ), + 'delete_channel': ('name', 'validate_only', ), + 'delete_channel_connection': ('name', ), 'delete_trigger': ('name', 'validate_only', 'etag', 'allow_missing', ), + 'get_channel': ('name', ), + 'get_channel_connection': ('name', ), + 'get_google_channel_config': ('name', ), + 'get_provider': ('name', ), 'get_trigger': ('name', ), - 'list_triggers': ('parent', 'page_size', 'page_token', 'order_by', ), + 'list_channel_connections': ('parent', 'page_size', 'page_token', ), + 'list_channels': ('parent', 'page_size', 'page_token', 'order_by', ), + 'list_providers': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), + 'list_triggers': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), + 'update_channel': ('validate_only', 'channel', 'update_mask', ), + 'update_google_channel_config': ('google_channel_config', 'update_mask', ), 'update_trigger': ('validate_only', 'trigger', 'update_mask', 'allow_missing', ), } diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index e87ed68bde0b..c61f8327b218 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -51,7 +51,14 @@ from google.cloud.eventarc_v1.services.eventarc import EventarcClient from google.cloud.eventarc_v1.services.eventarc import pagers from google.cloud.eventarc_v1.services.eventarc import transports +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel as gce_channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import channel_connection as gce_channel_connection +from google.cloud.eventarc_v1.types import discovery from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config from google.cloud.eventarc_v1.types import trigger from google.cloud.eventarc_v1.types import trigger as gce_trigger from google.cloud.location import locations_pb2 @@ -62,6 +69,7 @@ from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import code_pb2 # type: ignore import google.auth @@ -575,6 +583,7 @@ def test_get_trigger(request_type, transport: str = 'grpc'): name='name_value', uid='uid_value', service_account='service_account_value', + channel='channel_value', etag='etag_value', ) response = client.get_trigger(request) @@ -589,6 +598,7 @@ def test_get_trigger(request_type, transport: str = 'grpc'): assert response.name == 'name_value' assert response.uid == 'uid_value' assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' assert response.etag == 'etag_value' @@ -629,6 +639,7 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e name='name_value', uid='uid_value', service_account='service_account_value', + channel='channel_value', etag='etag_value', )) response = await client.get_trigger(request) @@ -643,6 +654,7 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e assert response.name == 'name_value' assert response.uid == 'uid_value' assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' assert response.etag == 'etag_value' @@ -1964,53 +1976,7052 @@ async def test_delete_trigger_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelRequest, + dict, +]) +def test_get_channel(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + pubsub_topic='pubsub_topic_value', + ) + response = client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_get_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + client.get_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest() + +@pytest.mark.asyncio +async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.asyncio +async def test_get_channel_async_from_dict(): + await test_get_channel_async(request_type=dict) + + +def test_get_channel_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetChannelRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + call.return_value = channel.Channel() + client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_channel_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetChannelRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel()) + await client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_channel_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel.Channel() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_channel( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_channel_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel( + eventarc.GetChannelRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_channel_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel.Channel() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_channel( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_channel_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_channel( + eventarc.GetChannelRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelsRequest, + dict, +]) +def test_list_channels(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_channels_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + client.list_channels() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() + +@pytest.mark.asyncio +async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_channels_async_from_dict(): + await test_list_channels_async(request_type=dict) + + +def test_list_channels_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListChannelsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + call.return_value = eventarc.ListChannelsResponse() + client.list_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_channels_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListChannelsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse()) + await client.list_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_channels_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_channels( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_channels_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channels( + eventarc.ListChannelsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_channels_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_channels( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_channels_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_channels( + eventarc.ListChannelsRequest(), + parent='parent_value', + ) + + +def test_list_channels_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_channels(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel.Channel) + for i in results) +def test_list_channels_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + RuntimeError, + ) + pages = list(client.list_channels(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_channels_async_pager(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_channels(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, channel.Channel) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_channels_async_pages(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_channels(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelRequest, + dict, +]) +def test_create_channel(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + client.create_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() + +@pytest.mark.asyncio +async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_channel_async_from_dict(): + await test_create_channel_async(request_type=dict) + + +def test_create_channel_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateChannelRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_channel_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateChannelRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_channel_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_channel( + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].channel + mock_val = gce_channel.Channel(name='name_value') + assert arg == mock_val + arg = args[0].channel_id + mock_val = 'channel_id_value' + assert arg == mock_val + + +def test_create_channel_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_channel( + eventarc.CreateChannelRequest(), + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + +@pytest.mark.asyncio +async def test_create_channel_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_channel( + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].channel + mock_val = gce_channel.Channel(name='name_value') + assert arg == mock_val + arg = args[0].channel_id + mock_val = 'channel_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_channel_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_channel( + eventarc.CreateChannelRequest(), + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateChannelRequest, + dict, +]) +def test_update_channel(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + client.update_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() + +@pytest.mark.asyncio +async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_channel_async_from_dict(): + await test_update_channel_async(request_type=dict) + + +def test_update_channel_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateChannelRequest() + + request.channel.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'channel.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_channel_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateChannelRequest() + + request.channel.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'channel.name=name_value', + ) in kw['metadata'] + + +def test_update_channel_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_channel( + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].channel + mock_val = gce_channel.Channel(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_channel_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_channel( + eventarc.UpdateChannelRequest(), + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_channel_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_channel( + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].channel + mock_val = gce_channel.Channel(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_channel_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_channel( + eventarc.UpdateChannelRequest(), + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelRequest, + dict, +]) +def test_delete_channel(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_channel_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + client.delete_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() + +@pytest.mark.asyncio +async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_channel_async_from_dict(): + await test_delete_channel_async(request_type=dict) + + +def test_delete_channel_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteChannelRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_channel_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteChannelRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_channel_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_channel( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_channel_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel( + eventarc.DeleteChannelRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_channel_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_channel( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_channel_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_channel( + eventarc.DeleteChannelRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetProviderRequest, + dict, +]) +def test_get_provider(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = discovery.Provider( + name='name_value', + display_name='display_name_value', + ) + response = client.get_provider(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_get_provider_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + client.get_provider() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() + +@pytest.mark.asyncio +async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( + name='name_value', + display_name='display_name_value', + )) + response = await client.get_provider(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.asyncio +async def test_get_provider_async_from_dict(): + await test_get_provider_async(request_type=dict) + + +def test_get_provider_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetProviderRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + call.return_value = discovery.Provider() + client.get_provider(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_provider_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetProviderRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider()) + await client.get_provider(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_provider_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = discovery.Provider() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_provider( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_provider_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_provider( + eventarc.GetProviderRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_provider_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = discovery.Provider() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_provider( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_provider_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_provider( + eventarc.GetProviderRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListProvidersRequest, + dict, +]) +def test_list_providers(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_providers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + client.list_providers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + +@pytest.mark.asyncio +async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_providers_async_from_dict(): + await test_list_providers_async(request_type=dict) + + +def test_list_providers_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListProvidersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + call.return_value = eventarc.ListProvidersResponse() + client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_providers_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListProvidersRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse()) + await client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_providers_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListProvidersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_providers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_providers_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_providers( + eventarc.ListProvidersRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_providers_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListProvidersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_providers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_providers_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_providers( + eventarc.ListProvidersRequest(), + parent='parent_value', + ) + + +def test_list_providers_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_providers(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, discovery.Provider) + for i in results) +def test_list_providers_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + RuntimeError, + ) + pages = list(client.list_providers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_providers_async_pager(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_providers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, discovery.Provider) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_providers_async_pages(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_providers(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelConnectionRequest, + dict, +]) +def test_get_channel_connection(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + ) + response = client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +def test_get_channel_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + client.get_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + +@pytest.mark.asyncio +async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + )) + response = await client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +@pytest.mark.asyncio +async def test_get_channel_connection_async_from_dict(): + await test_get_channel_connection_async(request_type=dict) + + +def test_get_channel_connection_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetChannelConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + call.return_value = channel_connection.ChannelConnection() + client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_channel_connection_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetChannelConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection()) + await client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_channel_connection_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel_connection.ChannelConnection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_channel_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_channel_connection_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel_connection( + eventarc.GetChannelConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_channel_connection_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = channel_connection.ChannelConnection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_channel_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_channel_connection_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_channel_connection( + eventarc.GetChannelConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelConnectionsRequest, + dict, +]) +def test_list_channel_connections(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_channel_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + client.list_channel_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + +@pytest.mark.asyncio +async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_channel_connections_async_from_dict(): + await test_list_channel_connections_async(request_type=dict) + + +def test_list_channel_connections_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListChannelConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + call.return_value = eventarc.ListChannelConnectionsResponse() + client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_channel_connections_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.ListChannelConnectionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse()) + await client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_channel_connections_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_channel_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_channel_connections_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channel_connections( + eventarc.ListChannelConnectionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_channel_connections_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = eventarc.ListChannelConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_channel_connections( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_channel_connections_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_channel_connections( + eventarc.ListChannelConnectionsRequest(), + parent='parent_value', + ) + + +def test_list_channel_connections_pager(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_channel_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel_connection.ChannelConnection) + for i in results) +def test_list_channel_connections_pages(transport_name: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_channel_connections(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_channel_connections_async_pager(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_channel_connections(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, channel_connection.ChannelConnection) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_channel_connections_async_pages(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_channel_connections(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelConnectionRequest, + dict, +]) +def test_create_channel_connection(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_channel_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + client.create_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + +@pytest.mark.asyncio +async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_channel_connection_async_from_dict(): + await test_create_channel_connection_async(request_type=dict) + + +def test_create_channel_connection_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateChannelConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_channel_connection_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.CreateChannelConnectionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_channel_connection_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_channel_connection( + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].channel_connection + mock_val = gce_channel_connection.ChannelConnection(name='name_value') + assert arg == mock_val + arg = args[0].channel_connection_id + mock_val = 'channel_connection_id_value' + assert arg == mock_val + + +def test_create_channel_connection_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_channel_connection( + eventarc.CreateChannelConnectionRequest(), + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + +@pytest.mark.asyncio +async def test_create_channel_connection_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_channel_connection( + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].channel_connection + mock_val = gce_channel_connection.ChannelConnection(name='name_value') + assert arg == mock_val + arg = args[0].channel_connection_id + mock_val = 'channel_connection_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_channel_connection_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_channel_connection( + eventarc.CreateChannelConnectionRequest(), + parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelConnectionRequest, + dict, +]) +def test_delete_channel_connection(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_channel_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + client.delete_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + +@pytest.mark.asyncio +async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_channel_connection_async_from_dict(): + await test_delete_channel_connection_async(request_type=dict) + + +def test_delete_channel_connection_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteChannelConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_channel_connection_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.DeleteChannelConnectionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_channel_connection_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_channel_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_channel_connection_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel_connection( + eventarc.DeleteChannelConnectionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_channel_connection_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_channel_connection( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_channel_connection_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_channel_connection( + eventarc.DeleteChannelConnectionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetGoogleChannelConfigRequest, + dict, +]) +def test_get_google_channel_config(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + response = client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_get_google_channel_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + client.get_google_channel_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + +@pytest.mark.asyncio +async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.asyncio +async def test_get_google_channel_config_async_from_dict(): + await test_get_google_channel_config_async(request_type=dict) + + +def test_get_google_channel_config_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetGoogleChannelConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + call.return_value = google_channel_config.GoogleChannelConfig() + client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_google_channel_config_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.GetGoogleChannelConfigRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig()) + await client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_google_channel_config_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = google_channel_config.GoogleChannelConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_google_channel_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_google_channel_config_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_google_channel_config( + eventarc.GetGoogleChannelConfigRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_google_channel_config_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = google_channel_config.GoogleChannelConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_google_channel_config( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_google_channel_config_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_google_channel_config( + eventarc.GetGoogleChannelConfigRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, +]) +def test_update_google_channel_config(request_type, transport: str = 'grpc'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + response = client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_update_google_channel_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + client.update_google_channel_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + +@pytest.mark.asyncio +async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.asyncio +async def test_update_google_channel_config_async_from_dict(): + await test_update_google_channel_config_async(request_type=dict) + + +def test_update_google_channel_config_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateGoogleChannelConfigRequest() + + request.google_channel_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + call.return_value = gce_google_channel_config.GoogleChannelConfig() + client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'google_channel_config.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_google_channel_config_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = eventarc.UpdateGoogleChannelConfigRequest() + + request.google_channel_config.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig()) + await client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'google_channel_config.name=name_value', + ) in kw['metadata'] + + +def test_update_google_channel_config_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gce_google_channel_config.GoogleChannelConfig() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_google_channel_config( + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].google_channel_config + mock_val = gce_google_channel_config.GoogleChannelConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test_update_google_channel_config_flattened_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_google_channel_config( + eventarc.UpdateGoogleChannelConfigRequest(), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test_update_google_channel_config_flattened_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gce_google_channel_config.GoogleChannelConfig() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_google_channel_config( + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].google_channel_config + mock_val = gce_google_channel_config.GoogleChannelConfig(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_google_channel_config_flattened_error_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_google_channel_config( + eventarc.UpdateGoogleChannelConfigRequest(), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + @pytest.mark.parametrize("request_type", [ eventarc.GetTriggerRequest, dict, ]) -def test_get_trigger_rest(request_type): +def test_get_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + etag='etag_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' + assert response.etag == 'etag_value' + + +def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_trigger(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = trigger.Trigger.to_json(trigger.Trigger()) + + request = eventarc.GetTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = trigger.Trigger() + + client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_trigger(request) + + +def test_get_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_get_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_trigger( + eventarc.GetTriggerRequest(), + name='name_value', + ) + + +def test_get_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListTriggersRequest, + dict, +]) +def test_list_triggers_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTriggersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_triggers(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_triggers_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_triggers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_triggers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + + request = eventarc.ListTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListTriggersResponse() + + client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_triggers(request) + + +def test_list_triggers_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + + +def test_list_triggers_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_triggers( + eventarc.ListTriggersRequest(), + parent='parent_value', + ) + + +def test_list_triggers_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + trigger.Trigger(), + ], + next_page_token='abc', + ), + eventarc.ListTriggersResponse( + triggers=[], + next_page_token='def', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + ], + next_page_token='ghi', + ), + eventarc.ListTriggersResponse( + triggers=[ + trigger.Trigger(), + trigger.Trigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListTriggersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, trigger.Trigger) + for i in results) + + pages = list(client.list_triggers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateTriggerRequest, + dict, +]) +def test_create_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["trigger_id"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "triggerId" not in jsonified_request + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == request_init["trigger_id"] + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["triggerId"] = 'trigger_id_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("trigger_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == 'trigger_id_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_trigger(request) + + expected_params = [ + ( + "triggerId", + "", + ), + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.CreateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_trigger(request) + + +def test_create_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + + +def test_create_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_trigger( + eventarc.CreateTriggerRequest(), + parent='parent_value', + trigger=gce_trigger.Trigger(name='name_value'), + trigger_id='trigger_id_value', + ) + + +def test_create_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateTriggerRequest, + dict, +]) +def test_update_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_trigger(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.UpdateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_trigger(request) + + +def test_update_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_update_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_trigger( + eventarc.UpdateTriggerRequest(), + trigger=gce_trigger.Trigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + allow_missing=True, + ) + + +def test_update_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteTriggerRequest, + dict, +]) +def test_delete_trigger_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["name"] = 'name_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_trigger(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_trigger_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_trigger._get_unset_required_fields({}) + assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.DeleteTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_trigger(request) + + +def test_delete_trigger_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + allow_missing=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + + +def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_trigger( + eventarc.DeleteTriggerRequest(), + name='name_value', + allow_missing=True, + ) + + +def test_delete_trigger_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelRequest, + dict, +]) +def test_get_channel_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + pubsub_topic='pubsub_topic_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_channel(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = channel.Channel() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_channel(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_channel._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = channel.Channel.to_json(channel.Channel()) + + request = eventarc.GetChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel.Channel() + + client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_channel(request) + + +def test_get_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_get_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel( + eventarc.GetChannelRequest(), + name='name_value', + ) + + +def test_get_channel_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelsRequest, + dict, +]) +def test_list_channels_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_channels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_channels(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_channels_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_channels._get_unset_required_fields({}) + assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channels_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) + + request = eventarc.ListChannelsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelsResponse() + + client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_channels_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_channels(request) + + +def test_list_channels_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_channels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + + +def test_list_channels_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_channels( + eventarc.ListChannelsRequest(), + parent='parent_value', + ) + + +def test_list_channels_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + channel.Channel(), + ], + next_page_token='abc', + ), + eventarc.ListChannelsResponse( + channels=[], + next_page_token='def', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelsResponse( + channels=[ + channel.Channel(), + channel.Channel(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListChannelsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_channels(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel.Channel) + for i in results) + + pages = list(client.list_channels(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelRequest, + dict, +]) +def test_create_channel_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_channel(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["channel_id"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "channelId" not in jsonified_request + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "channelId" in jsonified_request + assert jsonified_request["channelId"] == request_init["channel_id"] + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["channelId"] = 'channel_id_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("channel_id", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "channelId" in jsonified_request + assert jsonified_request["channelId"] == 'channel_id_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.create_channel(request) + + expected_params = [ + ( + "channelId", + "", + ), + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_channel_._get_unset_required_fields({}) + assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.CreateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_channel(request) + + +def test_create_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.create_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channels" % client.transport._host, args[1]) + + +def test_create_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_channel( + eventarc.CreateChannelRequest(), + parent='parent_value', + channel=gce_channel.Channel(name='name_value'), + channel_id='channel_id_value', + ) + + +def test_create_channel_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateChannelRequest, + dict, +]) +def test_update_channel_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_channel(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", "validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.update_channel(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_channel._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.UpdateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_channel(request) + + +def test_update_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.update_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{channel.name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_update_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_channel( + eventarc.UpdateChannelRequest(), + channel=gce_channel.Channel(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_update_channel_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelRequest, + dict, +]) +def test_delete_channel_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_channel(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannelRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request_init["validate_only"] = False + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "validateOnly" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == request_init["validate_only"] + + jsonified_request["name"] = 'name_value' + jsonified_request["validateOnly"] = True + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + assert "validateOnly" in jsonified_request + assert jsonified_request["validateOnly"] == True + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_channel(request) + + expected_params = [ + ( + "validateOnly", + str(False).lower(), + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_channel_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_channel._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly", )) & set(("name", "validateOnly", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = eventarc.DeleteChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_channel(request) + + +def test_delete_channel_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.delete_channel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channels/*}" % client.transport._host, args[1]) + + +def test_delete_channel_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_channel( + eventarc.DeleteChannelRequest(), + name='name_value', + ) + + +def test_delete_channel_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetProviderRequest, + dict, +]) +def test_get_provider_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider( + name='name_value', + display_name='display_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_provider(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = discovery.Provider() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_provider(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_provider_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_provider._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_provider_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = discovery.Provider.to_json(discovery.Provider()) + + request = eventarc.GetProviderRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discovery.Provider() + + client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_provider_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetProviderRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_provider(request) + + +def test_get_provider_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_provider(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/providers/*}" % client.transport._host, args[1]) + + +def test_get_provider_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_provider( + eventarc.GetProviderRequest(), + name='name_value', + ) + + +def test_get_provider_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListProvidersRequest, + dict, +]) +def test_list_providers_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_providers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_providers_rest_required_fields(request_type=eventarc.ListProvidersRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_providers(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_providers_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_providers._get_unset_required_fields({}) + assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_providers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) + + request = eventarc.ListProvidersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListProvidersResponse() + + client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_providers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListProvidersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_providers(request) + + +def test_list_providers_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.list_providers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/providers" % client.transport._host, args[1]) + + +def test_list_providers_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_providers( + eventarc.ListProvidersRequest(), + parent='parent_value', + ) + + +def test_list_providers_rest_pager(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + discovery.Provider(), + ], + next_page_token='abc', + ), + eventarc.ListProvidersResponse( + providers=[], + next_page_token='def', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + ], + next_page_token='ghi', + ), + eventarc.ListProvidersResponse( + providers=[ + discovery.Provider(), + discovery.Provider(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListProvidersResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_providers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, discovery.Provider) + for i in results) + + pages = list(client.list_providers(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelConnectionRequest, + dict, +]) +def test_get_channel_connection_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = trigger.Trigger( + return_value = channel_connection.ChannelConnection( name='name_value', uid='uid_value', - service_account='service_account_value', - etag='etag_value', + channel='channel_value', + activation_token='activation_token_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) + pb_return_value = channel_connection.ChannelConnection.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.get_trigger(request) + response = client.get_channel_connection(request) # Establish that the response is the type that we expect. - assert isinstance(response, trigger.Trigger) + assert isinstance(response, channel_connection.ChannelConnection) assert response.name == 'name_value' assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.etag == 'etag_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetChannelConnectionRequest): + transport_class = transports.EventarcRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_channel_connection(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_channel_connection_rest_unset_required_fields(): + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) + + request = eventarc.GetChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel_connection.ChannelConnection() + + client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_channel_connection(request) + + +def test_get_channel_connection_rest_flattened(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_channel_connection(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) + + +def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_channel_connection( + eventarc.GetChannelConnectionRequest(), + name='name_value', + ) + + +def test_get_channel_connection_rest_error(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelConnectionsRequest, + dict, +]) +def test_list_channel_connections_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_channel_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] -def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerRequest): +def test_list_channel_connections_rest_required_fields(request_type=eventarc.ListChannelConnectionsRequest): transport_class = transports.EventarcRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -2021,19 +9032,21 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = 'name_value' + jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2042,7 +9055,7 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = trigger.Trigger() + return_value = eventarc.ListChannelConnectionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -2062,13 +9075,13 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques response_value = Response() response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) + pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.get_trigger(request) + response = client.list_channel_connections(request) expected_params = [ ] @@ -2076,15 +9089,15 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques assert expected_params == actual_params -def test_get_trigger_rest_unset_required_fields(): +def test_list_channel_connections_rest_unset_required_fields(): transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.get_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + unset_fields = transport.list_channel_connections._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_trigger_rest_interceptors(null_interceptor): +def test_list_channel_connections_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -2092,11 +9105,11 @@ def test_get_trigger_rest_interceptors(null_interceptor): client = EventarcClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) + pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2107,30 +9120,30 @@ def test_get_trigger_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = trigger.Trigger.to_json(trigger.Trigger()) + req.return_value._content = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) - request = eventarc.GetTriggerRequest() + request = eventarc.ListChannelConnectionsRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = trigger.Trigger() + post.return_value = eventarc.ListChannelConnectionsResponse() - client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): +def test_list_channel_connections_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelConnectionsRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'parent': 'projects/sample1/locations/sample2'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2140,10 +9153,10 @@ def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=even response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_trigger(request) + client.list_channel_connections(request) -def test_get_trigger_rest_flattened(): +def test_list_channel_connections_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2152,35 +9165,35 @@ def test_get_trigger_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = trigger.Trigger() + return_value = eventarc.ListChannelConnectionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + sample_request = {'parent': 'projects/sample1/locations/sample2'} # get truthy value for each flattened field mock_args = dict( - name='name_value', + parent='parent_value', ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) + pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.get_trigger(**mock_args) + client.list_channel_connections(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) -def test_get_trigger_rest_flattened_error(transport: str = 'rest'): +def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2189,24 +9202,79 @@ def test_get_trigger_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_trigger( - eventarc.GetTriggerRequest(), - name='name_value', + client.list_channel_connections( + eventarc.ListChannelConnectionsRequest(), + parent='parent_value', ) -def test_get_trigger_rest_error(): +def test_list_channel_connections_rest_pager(transport: str = 'rest'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport=transport, ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + next_page_token='abc', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[], + next_page_token='def', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + ], + next_page_token='ghi', + ), + eventarc.ListChannelConnectionsResponse( + channel_connections=[ + channel_connection.ChannelConnection(), + channel_connection.ChannelConnection(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(eventarc.ListChannelConnectionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_channel_connections(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, channel_connection.ChannelConnection) + for i in results) + + pages = list(client.list_channel_connections(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize("request_type", [ - eventarc.ListTriggersRequest, + eventarc.CreateChannelConnectionRequest, dict, ]) -def test_list_triggers_rest(request_type): +def test_create_channel_connection_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2214,37 +9282,33 @@ def test_list_triggers_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) + return_value = operations_pb2.Operation(name='operations/spam') # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_triggers(request) + response = client.create_channel_connection(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTriggersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] + assert response.operation.name == "operations/spam" -def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRequest): +def test_create_channel_connection_rest_required_fields(request_type=eventarc.CreateChannelConnectionRequest): transport_class = transports.EventarcRestTransport request_init = {} request_init["parent"] = "" + request_init["channel_connection_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -2254,22 +9318,28 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe )) # verify fields with default values are dropped + assert "channelConnectionId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "channelConnectionId" in jsonified_request + assert jsonified_request["channelConnectionId"] == request_init["channel_connection_id"] jsonified_request["parent"] = 'parent_value' + jsonified_request["channelConnectionId"] = 'channel_connection_id_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) + assert not set(unset_fields) - set(("channel_connection_id", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == 'parent_value' + assert "channelConnectionId" in jsonified_request + assert jsonified_request["channelConnectionId"] == 'channel_connection_id_value' client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2278,7 +9348,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse() + return_value = operations_pb2.Operation(name='operations/spam') # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -2290,37 +9360,40 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "get", + 'method': "post", 'query_params': pb_request, } + transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.list_triggers(request) + response = client.create_channel_connection(request) expected_params = [ + ( + "channelConnectionId", + "", + ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_list_triggers_rest_unset_required_fields(): +def test_create_channel_connection_rest_unset_required_fields(): transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.list_triggers._get_unset_required_fields({}) - assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) + unset_fields = transport.create_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_triggers_rest_interceptors(null_interceptor): +def test_create_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -2328,11 +9401,12 @@ def test_list_triggers_rest_interceptors(null_interceptor): client = EventarcClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) + pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2343,23 +9417,23 @@ def test_list_triggers_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - request = eventarc.ListTriggersRequest() + request = eventarc.CreateChannelConnectionRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = eventarc.ListTriggersResponse() + post.return_value = operations_pb2.Operation() - client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): +def test_create_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelConnectionRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2367,6 +9441,7 @@ def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=ev # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2376,10 +9451,10 @@ def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=ev response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.list_triggers(request) + client.create_channel_connection(request) -def test_list_triggers_rest_flattened(): +def test_create_channel_connection_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2388,7 +9463,7 @@ def test_list_triggers_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse() + return_value = operations_pb2.Operation(name='operations/spam') # get arguments that satisfy an http rule for this method sample_request = {'parent': 'projects/sample1/locations/sample2'} @@ -2396,27 +9471,28 @@ def test_list_triggers_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.list_triggers(**mock_args) + client.create_channel_connection(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/channelConnections" % client.transport._host, args[1]) -def test_list_triggers_rest_flattened_error(transport: str = 'rest'): +def test_create_channel_connection_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2425,87 +9501,33 @@ def test_list_triggers_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_triggers( - eventarc.ListTriggersRequest(), + client.create_channel_connection( + eventarc.CreateChannelConnectionRequest(), parent='parent_value', + channel_connection=gce_channel_connection.ChannelConnection(name='name_value'), + channel_connection_id='channel_connection_id_value', ) -def test_list_triggers_rest_pager(transport: str = 'rest'): +def test_create_channel_connection_rest_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='rest' ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), - trigger.Trigger(), - trigger.Trigger(), - ], - next_page_token='abc', - ), - eventarc.ListTriggersResponse( - triggers=[], - next_page_token='def', - ), - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), - ], - next_page_token='ghi', - ), - eventarc.ListTriggersResponse( - triggers=[ - trigger.Trigger(), - trigger.Trigger(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(eventarc.ListTriggersResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1/locations/sample2'} - - pager = client.list_triggers(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, trigger.Trigger) - for i in results) - - pages = list(client.list_triggers(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize("request_type", [ - eventarc.CreateTriggerRequest, + eventarc.DeleteChannelConnectionRequest, dict, ]) -def test_create_trigger_rest(request_type): +def test_delete_channel_connection_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -2520,19 +9542,17 @@ def test_create_trigger_rest(request_type): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.create_trigger(request) + response = client.delete_channel_connection(request) # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" -def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): +def test_delete_channel_connection_rest_required_fields(request_type=eventarc.DeleteChannelConnectionRequest): transport_class = transports.EventarcRestTransport request_init = {} - request_init["parent"] = "" - request_init["trigger_id"] = "" - request_init["validate_only"] = False + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -2542,34 +9562,20 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger )) # verify fields with default values are dropped - assert "triggerId" not in jsonified_request - assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == request_init["trigger_id"] - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["parent"] = 'parent_value' - jsonified_request["triggerId"] = 'trigger_id_value' - jsonified_request["validateOnly"] = True + jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("trigger_id", "validate_only", )) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "triggerId" in jsonified_request - assert jsonified_request["triggerId"] == 'trigger_id_value' - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2590,10 +9596,9 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "post", + 'method': "delete", 'query_params': pb_request, } - transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -2603,31 +9608,23 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.create_trigger(request) + response = client.delete_channel_connection(request) expected_params = [ - ( - "triggerId", - "", - ), - ( - "validateOnly", - str(False).lower(), - ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_create_trigger_rest_unset_required_fields(): +def test_delete_channel_connection_rest_unset_required_fields(): transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.create_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) + unset_fields = transport.delete_channel_connection._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_trigger_rest_interceptors(null_interceptor): +def test_delete_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -2636,11 +9633,11 @@ def test_create_trigger_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) + pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2653,7 +9650,7 @@ def test_create_trigger_rest_interceptors(null_interceptor): req.return_value.request = PreparedRequest() req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - request = eventarc.CreateTriggerRequest() + request = eventarc.DeleteChannelConnectionRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), @@ -2661,21 +9658,20 @@ def test_create_trigger_rest_interceptors(null_interceptor): pre.return_value = request, metadata post.return_value = operations_pb2.Operation() - client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): +def test_delete_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelConnectionRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2685,10 +9681,10 @@ def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=e response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.create_trigger(request) + client.delete_channel_connection(request) -def test_create_trigger_rest_flattened(): +def test_delete_channel_connection_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2700,13 +9696,11 @@ def test_create_trigger_rest_flattened(): return_value = operations_pb2.Operation(name='operations/spam') # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1/locations/sample2'} + sample_request = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} # get truthy value for each flattened field mock_args = dict( - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + name='name_value', ) mock_args.update(sample_request) @@ -2717,16 +9711,16 @@ def test_create_trigger_rest_flattened(): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.create_trigger(**mock_args) + client.delete_channel_connection(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/triggers" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{name=projects/*/locations/*/channelConnections/*}" % client.transport._host, args[1]) -def test_create_trigger_rest_flattened_error(transport: str = 'rest'): +def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2735,15 +9729,13 @@ def test_create_trigger_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_trigger( - eventarc.CreateTriggerRequest(), - parent='parent_value', - trigger=gce_trigger.Trigger(name='name_value'), - trigger_id='trigger_id_value', + client.delete_channel_connection( + eventarc.DeleteChannelConnectionRequest(), + name='name_value', ) -def test_create_trigger_rest_error(): +def test_delete_channel_connection_rest_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -2751,43 +9743,48 @@ def test_create_trigger_rest_error(): @pytest.mark.parametrize("request_type", [ - eventarc.UpdateTriggerRequest, + eventarc.GetGoogleChannelConfigRequest, dict, ]) -def test_update_trigger_rest(request_type): +def test_get_google_channel_config_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.update_trigger(request) + response = client.get_google_channel_config(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' -def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTriggerRequest): +def test_get_google_channel_config_rest_required_fields(request_type=eventarc.GetGoogleChannelConfigRequest): transport_class = transports.EventarcRestTransport request_init = {} - request_init["validate_only"] = False + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -2797,25 +9794,20 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger )) # verify fields with default values are dropped - assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] - jsonified_request["validateOnly"] = True + jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2824,7 +9816,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = google_channel_config.GoogleChannelConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -2836,40 +9828,37 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "patch", + 'method': "get", 'query_params': pb_request, } - transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.update_trigger(request) + response = client.get_google_channel_config(request) expected_params = [ - ( - "validateOnly", - str(False).lower(), - ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_update_trigger_rest_unset_required_fields(): +def test_get_google_channel_config_rest_unset_required_fields(): transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.update_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) + unset_fields = transport.get_google_channel_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_trigger_rest_interceptors(null_interceptor): +def test_get_google_channel_config_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -2877,12 +9866,11 @@ def test_update_trigger_rest_interceptors(null_interceptor): client = EventarcClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) + pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -2893,31 +9881,30 @@ def test_update_trigger_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) - request = eventarc.UpdateTriggerRequest() + request = eventarc.GetGoogleChannelConfigRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = google_channel_config.GoogleChannelConfig() - client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): +def test_get_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetGoogleChannelConfigRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'etag': 'etag_value'} + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -2927,10 +9914,10 @@ def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=e response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.update_trigger(request) + client.get_google_channel_config(request) -def test_update_trigger_rest_flattened(): +def test_get_google_channel_config_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -2939,36 +9926,35 @@ def test_update_trigger_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = google_channel_config.GoogleChannelConfig() # get arguments that satisfy an http rule for this method - sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + sample_request = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} # get truthy value for each flattened field mock_args = dict( - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - allow_missing=True, + name='name_value', ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.update_trigger(**mock_args) + client.get_google_channel_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{trigger.name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) -def test_update_trigger_rest_flattened_error(transport: str = 'rest'): +def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2977,15 +9963,13 @@ def test_update_trigger_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_trigger( - eventarc.UpdateTriggerRequest(), - trigger=gce_trigger.Trigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - allow_missing=True, + client.get_google_channel_config( + eventarc.GetGoogleChannelConfigRequest(), + name='name_value', ) -def test_update_trigger_rest_error(): +def test_get_google_channel_config_rest_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -2993,43 +9977,48 @@ def test_update_trigger_rest_error(): @pytest.mark.parametrize("request_type", [ - eventarc.DeleteTriggerRequest, + eventarc.UpdateGoogleChannelConfigRequest, dict, ]) -def test_delete_trigger_rest(request_type): +def test_update_google_channel_config_rest(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.delete_trigger(request) + response = client.update_google_channel_config(request) # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' -def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): +def test_update_google_channel_config_rest_required_fields(request_type=eventarc.UpdateGoogleChannelConfigRequest): transport_class = transports.EventarcRestTransport request_init = {} - request_init["name"] = "" - request_init["validate_only"] = False request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( @@ -3039,28 +10028,18 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger )) # verify fields with default values are dropped - assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == request_init["validate_only"] - - jsonified_request["name"] = 'name_value' - jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) + assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - assert "validateOnly" in jsonified_request - assert jsonified_request["validateOnly"] == True client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3069,7 +10048,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = gce_google_channel_config.GoogleChannelConfig() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -3081,39 +10060,38 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger pb_request = request_type.pb(request) transcode_result = { 'uri': 'v1/sample_method', - 'method': "delete", + 'method': "patch", 'query_params': pb_request, } + transcode_result['body'] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + + pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.delete_trigger(request) + response = client.update_google_channel_config(request) expected_params = [ - ( - "validateOnly", - str(False).lower(), - ), ] actual_params = req.call_args.kwargs['params'] assert expected_params == actual_params -def test_delete_trigger_rest_unset_required_fields(): +def test_update_google_channel_config_rest_unset_required_fields(): transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.delete_trigger._get_unset_required_fields({}) - assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) + unset_fields = transport.update_google_channel_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_trigger_rest_interceptors(null_interceptor): +def test_update_google_channel_config_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), @@ -3121,12 +10099,11 @@ def test_delete_trigger_rest_interceptors(null_interceptor): client = EventarcClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) + pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3137,30 +10114,31 @@ def test_delete_trigger_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value._content = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) - request = eventarc.DeleteTriggerRequest() + request = eventarc.UpdateGoogleChannelConfigRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() + post.return_value = gce_google_channel_config.GoogleChannelConfig() - client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): +def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3170,10 +10148,10 @@ def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=e response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.delete_trigger(request) + client.update_google_channel_config(request) -def test_delete_trigger_rest_flattened(): +def test_update_google_channel_config_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3182,35 +10160,36 @@ def test_delete_trigger_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + return_value = gce_google_channel_config.GoogleChannelConfig() # get arguments that satisfy an http rule for this method - sample_request = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + sample_request = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} # get truthy value for each flattened field mock_args = dict( - name='name_value', - allow_missing=True, + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.delete_trigger(**mock_args) + client.update_google_channel_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/triggers/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}" % client.transport._host, args[1]) -def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): +def test_update_google_channel_config_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3219,14 +10198,14 @@ def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_trigger( - eventarc.DeleteTriggerRequest(), - name='name_value', - allow_missing=True, + client.update_google_channel_config( + eventarc.UpdateGoogleChannelConfigRequest(), + google_channel_config=gce_google_channel_config.GoogleChannelConfig(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) -def test_delete_trigger_rest_error(): +def test_update_google_channel_config_rest_error(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -3365,6 +10344,19 @@ def test_eventarc_base_transport(): 'create_trigger', 'update_trigger', 'delete_trigger', + 'get_channel', + 'list_channels', + 'create_channel_', + 'update_channel', + 'delete_channel', + 'get_provider', + 'list_providers', + 'get_channel_connection', + 'list_channel_connections', + 'create_channel_connection', + 'delete_channel_connection', + 'get_google_channel_config', + 'update_google_channel_config', ) for method in methods: with pytest.raises(NotImplementedError): @@ -3640,6 +10632,45 @@ def test_eventarc_client_transport_session_collision(transport_name): session1 = client1.transport.delete_trigger._session session2 = client2.transport.delete_trigger._session assert session1 != session2 + session1 = client1.transport.get_channel._session + session2 = client2.transport.get_channel._session + assert session1 != session2 + session1 = client1.transport.list_channels._session + session2 = client2.transport.list_channels._session + assert session1 != session2 + session1 = client1.transport.create_channel_._session + session2 = client2.transport.create_channel_._session + assert session1 != session2 + session1 = client1.transport.update_channel._session + session2 = client2.transport.update_channel._session + assert session1 != session2 + session1 = client1.transport.delete_channel._session + session2 = client2.transport.delete_channel._session + assert session1 != session2 + session1 = client1.transport.get_provider._session + session2 = client2.transport.get_provider._session + assert session1 != session2 + session1 = client1.transport.list_providers._session + session2 = client2.transport.list_providers._session + assert session1 != session2 + session1 = client1.transport.get_channel_connection._session + session2 = client2.transport.get_channel_connection._session + assert session1 != session2 + session1 = client1.transport.list_channel_connections._session + session2 = client2.transport.list_channel_connections._session + assert session1 != session2 + session1 = client1.transport.create_channel_connection._session + session2 = client2.transport.create_channel_connection._session + assert session1 != session2 + session1 = client1.transport.delete_channel_connection._session + session2 = client2.transport.delete_channel_connection._session + assert session1 != session2 + session1 = client1.transport.get_google_channel_config._session + session2 = client2.transport.get_google_channel_config._session + assert session1 != session2 + session1 = client1.transport.update_google_channel_config._session + session2 = client2.transport.update_google_channel_config._session + assert session1 != session2 def test_eventarc_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -3784,6 +10815,132 @@ def test_eventarc_grpc_lro_async_client(): assert transport.operations_client is transport.operations_client +def test_channel_path(): + project = "squid" + location = "clam" + channel = "whelk" + expected = "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + actual = EventarcClient.channel_path(project, location, channel) + assert expected == actual + + +def test_parse_channel_path(): + expected = { + "project": "octopus", + "location": "oyster", + "channel": "nudibranch", + } + path = EventarcClient.channel_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_channel_path(path) + assert expected == actual + +def test_channel_connection_path(): + project = "cuttlefish" + location = "mussel" + channel_connection = "winkle" + expected = "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) + actual = EventarcClient.channel_connection_path(project, location, channel_connection) + assert expected == actual + + +def test_parse_channel_connection_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "channel_connection": "abalone", + } + path = EventarcClient.channel_connection_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_channel_connection_path(path) + assert expected == actual + +def test_cloud_function_path(): + project = "squid" + location = "clam" + function = "whelk" + expected = "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + actual = EventarcClient.cloud_function_path(project, location, function) + assert expected == actual + + +def test_parse_cloud_function_path(): + expected = { + "project": "octopus", + "location": "oyster", + "function": "nudibranch", + } + path = EventarcClient.cloud_function_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_cloud_function_path(path) + assert expected == actual + +def test_crypto_key_path(): + project = "cuttlefish" + location = "mussel" + key_ring = "winkle" + crypto_key = "nautilus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + actual = EventarcClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "scallop", + "location": "abalone", + "key_ring": "squid", + "crypto_key": "clam", + } + path = EventarcClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_crypto_key_path(path) + assert expected == actual + +def test_google_channel_config_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + actual = EventarcClient.google_channel_config_path(project, location) + assert expected == actual + + +def test_parse_google_channel_config_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = EventarcClient.google_channel_config_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_google_channel_config_path(path) + assert expected == actual + +def test_provider_path(): + project = "cuttlefish" + location = "mussel" + provider = "winkle" + expected = "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + actual = EventarcClient.provider_path(project, location, provider) + assert expected == actual + + +def test_parse_provider_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "provider": "abalone", + } + path = EventarcClient.provider_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_provider_path(path) + assert expected == actual + def test_service_path(): expected = "*".format() actual = EventarcClient.service_path() @@ -3839,8 +10996,29 @@ def test_parse_trigger_path(): actual = EventarcClient.parse_trigger_path(path) assert expected == actual +def test_workflow_path(): + project = "scallop" + location = "abalone" + workflow = "squid" + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + actual = EventarcClient.workflow_path(project, location, workflow) + assert expected == actual + + +def test_parse_workflow_path(): + expected = { + "project": "clam", + "location": "whelk", + "workflow": "octopus", + } + path = EventarcClient.workflow_path(**expected) + + # Check that the path construction is reversible. + actual = EventarcClient.parse_workflow_path(path) + assert expected == actual + def test_common_billing_account_path(): - billing_account = "scallop" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) actual = EventarcClient.common_billing_account_path(billing_account) assert expected == actual @@ -3848,7 +11026,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "abalone", + "billing_account": "nudibranch", } path = EventarcClient.common_billing_account_path(**expected) @@ -3857,7 +11035,7 @@ def test_parse_common_billing_account_path(): assert expected == actual def test_common_folder_path(): - folder = "squid" + folder = "cuttlefish" expected = "folders/{folder}".format(folder=folder, ) actual = EventarcClient.common_folder_path(folder) assert expected == actual @@ -3865,7 +11043,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "clam", + "folder": "mussel", } path = EventarcClient.common_folder_path(**expected) @@ -3874,7 +11052,7 @@ def test_parse_common_folder_path(): assert expected == actual def test_common_organization_path(): - organization = "whelk" + organization = "winkle" expected = "organizations/{organization}".format(organization=organization, ) actual = EventarcClient.common_organization_path(organization) assert expected == actual @@ -3882,7 +11060,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "octopus", + "organization": "nautilus", } path = EventarcClient.common_organization_path(**expected) @@ -3891,7 +11069,7 @@ def test_parse_common_organization_path(): assert expected == actual def test_common_project_path(): - project = "oyster" + project = "scallop" expected = "projects/{project}".format(project=project, ) actual = EventarcClient.common_project_path(project) assert expected == actual @@ -3899,7 +11077,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nudibranch", + "project": "abalone", } path = EventarcClient.common_project_path(**expected) @@ -3908,8 +11086,8 @@ def test_parse_common_project_path(): assert expected == actual def test_common_location_path(): - project = "cuttlefish" - location = "mussel" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) actual = EventarcClient.common_location_path(project, location) assert expected == actual @@ -3917,8 +11095,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "winkle", - "location": "nautilus", + "project": "whelk", + "location": "octopus", } path = EventarcClient.common_location_path(**expected) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py index d8b1e1bfee36..fb661a83ae24 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -28,6 +28,7 @@ from google.cloud.logging_v2.types.log_entry import LogEntry from google.cloud.logging_v2.types.log_entry import LogEntryOperation from google.cloud.logging_v2.types.log_entry import LogEntrySourceLocation +from google.cloud.logging_v2.types.log_entry import LogSplit from google.cloud.logging_v2.types.logging import DeleteLogRequest from google.cloud.logging_v2.types.logging import ListLogEntriesRequest from google.cloud.logging_v2.types.logging import ListLogEntriesResponse @@ -40,40 +41,59 @@ from google.cloud.logging_v2.types.logging import WriteLogEntriesPartialErrors from google.cloud.logging_v2.types.logging import WriteLogEntriesRequest from google.cloud.logging_v2.types.logging import WriteLogEntriesResponse +from google.cloud.logging_v2.types.logging_config import BigQueryDataset from google.cloud.logging_v2.types.logging_config import BigQueryOptions +from google.cloud.logging_v2.types.logging_config import BucketMetadata from google.cloud.logging_v2.types.logging_config import CmekSettings +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesMetadata +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesRequest +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesResponse from google.cloud.logging_v2.types.logging_config import CreateBucketRequest from google.cloud.logging_v2.types.logging_config import CreateExclusionRequest +from google.cloud.logging_v2.types.logging_config import CreateLinkRequest from google.cloud.logging_v2.types.logging_config import CreateSinkRequest from google.cloud.logging_v2.types.logging_config import CreateViewRequest from google.cloud.logging_v2.types.logging_config import DeleteBucketRequest from google.cloud.logging_v2.types.logging_config import DeleteExclusionRequest +from google.cloud.logging_v2.types.logging_config import DeleteLinkRequest from google.cloud.logging_v2.types.logging_config import DeleteSinkRequest from google.cloud.logging_v2.types.logging_config import DeleteViewRequest from google.cloud.logging_v2.types.logging_config import GetBucketRequest from google.cloud.logging_v2.types.logging_config import GetCmekSettingsRequest from google.cloud.logging_v2.types.logging_config import GetExclusionRequest +from google.cloud.logging_v2.types.logging_config import GetLinkRequest +from google.cloud.logging_v2.types.logging_config import GetSettingsRequest from google.cloud.logging_v2.types.logging_config import GetSinkRequest from google.cloud.logging_v2.types.logging_config import GetViewRequest +from google.cloud.logging_v2.types.logging_config import IndexConfig +from google.cloud.logging_v2.types.logging_config import Link +from google.cloud.logging_v2.types.logging_config import LinkMetadata from google.cloud.logging_v2.types.logging_config import ListBucketsRequest from google.cloud.logging_v2.types.logging_config import ListBucketsResponse from google.cloud.logging_v2.types.logging_config import ListExclusionsRequest from google.cloud.logging_v2.types.logging_config import ListExclusionsResponse +from google.cloud.logging_v2.types.logging_config import ListLinksRequest +from google.cloud.logging_v2.types.logging_config import ListLinksResponse from google.cloud.logging_v2.types.logging_config import ListSinksRequest from google.cloud.logging_v2.types.logging_config import ListSinksResponse from google.cloud.logging_v2.types.logging_config import ListViewsRequest from google.cloud.logging_v2.types.logging_config import ListViewsResponse +from google.cloud.logging_v2.types.logging_config import LocationMetadata from google.cloud.logging_v2.types.logging_config import LogBucket from google.cloud.logging_v2.types.logging_config import LogExclusion from google.cloud.logging_v2.types.logging_config import LogSink from google.cloud.logging_v2.types.logging_config import LogView +from google.cloud.logging_v2.types.logging_config import Settings from google.cloud.logging_v2.types.logging_config import UndeleteBucketRequest from google.cloud.logging_v2.types.logging_config import UpdateBucketRequest from google.cloud.logging_v2.types.logging_config import UpdateCmekSettingsRequest from google.cloud.logging_v2.types.logging_config import UpdateExclusionRequest +from google.cloud.logging_v2.types.logging_config import UpdateSettingsRequest from google.cloud.logging_v2.types.logging_config import UpdateSinkRequest from google.cloud.logging_v2.types.logging_config import UpdateViewRequest +from google.cloud.logging_v2.types.logging_config import IndexType from google.cloud.logging_v2.types.logging_config import LifecycleState +from google.cloud.logging_v2.types.logging_config import OperationState from google.cloud.logging_v2.types.logging_metrics import CreateLogMetricRequest from google.cloud.logging_v2.types.logging_metrics import DeleteLogMetricRequest from google.cloud.logging_v2.types.logging_metrics import GetLogMetricRequest @@ -91,6 +111,7 @@ 'LogEntry', 'LogEntryOperation', 'LogEntrySourceLocation', + 'LogSplit', 'DeleteLogRequest', 'ListLogEntriesRequest', 'ListLogEntriesResponse', @@ -103,40 +124,59 @@ 'WriteLogEntriesPartialErrors', 'WriteLogEntriesRequest', 'WriteLogEntriesResponse', + 'BigQueryDataset', 'BigQueryOptions', + 'BucketMetadata', 'CmekSettings', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesRequest', + 'CopyLogEntriesResponse', 'CreateBucketRequest', 'CreateExclusionRequest', + 'CreateLinkRequest', 'CreateSinkRequest', 'CreateViewRequest', 'DeleteBucketRequest', 'DeleteExclusionRequest', + 'DeleteLinkRequest', 'DeleteSinkRequest', 'DeleteViewRequest', 'GetBucketRequest', 'GetCmekSettingsRequest', 'GetExclusionRequest', + 'GetLinkRequest', + 'GetSettingsRequest', 'GetSinkRequest', 'GetViewRequest', + 'IndexConfig', + 'Link', + 'LinkMetadata', 'ListBucketsRequest', 'ListBucketsResponse', 'ListExclusionsRequest', 'ListExclusionsResponse', + 'ListLinksRequest', + 'ListLinksResponse', 'ListSinksRequest', 'ListSinksResponse', 'ListViewsRequest', 'ListViewsResponse', + 'LocationMetadata', 'LogBucket', 'LogExclusion', 'LogSink', 'LogView', + 'Settings', 'UndeleteBucketRequest', 'UpdateBucketRequest', 'UpdateCmekSettingsRequest', 'UpdateExclusionRequest', + 'UpdateSettingsRequest', 'UpdateSinkRequest', 'UpdateViewRequest', + 'IndexType', 'LifecycleState', + 'OperationState', 'CreateLogMetricRequest', 'DeleteLogMetricRequest', 'GetLogMetricRequest', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index c40cb2b0d9be..b6bfee061fd4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -28,6 +28,7 @@ from .types.log_entry import LogEntry from .types.log_entry import LogEntryOperation from .types.log_entry import LogEntrySourceLocation +from .types.log_entry import LogSplit from .types.logging import DeleteLogRequest from .types.logging import ListLogEntriesRequest from .types.logging import ListLogEntriesResponse @@ -40,40 +41,59 @@ from .types.logging import WriteLogEntriesPartialErrors from .types.logging import WriteLogEntriesRequest from .types.logging import WriteLogEntriesResponse +from .types.logging_config import BigQueryDataset from .types.logging_config import BigQueryOptions +from .types.logging_config import BucketMetadata from .types.logging_config import CmekSettings +from .types.logging_config import CopyLogEntriesMetadata +from .types.logging_config import CopyLogEntriesRequest +from .types.logging_config import CopyLogEntriesResponse from .types.logging_config import CreateBucketRequest from .types.logging_config import CreateExclusionRequest +from .types.logging_config import CreateLinkRequest from .types.logging_config import CreateSinkRequest from .types.logging_config import CreateViewRequest from .types.logging_config import DeleteBucketRequest from .types.logging_config import DeleteExclusionRequest +from .types.logging_config import DeleteLinkRequest from .types.logging_config import DeleteSinkRequest from .types.logging_config import DeleteViewRequest from .types.logging_config import GetBucketRequest from .types.logging_config import GetCmekSettingsRequest from .types.logging_config import GetExclusionRequest +from .types.logging_config import GetLinkRequest +from .types.logging_config import GetSettingsRequest from .types.logging_config import GetSinkRequest from .types.logging_config import GetViewRequest +from .types.logging_config import IndexConfig +from .types.logging_config import Link +from .types.logging_config import LinkMetadata from .types.logging_config import ListBucketsRequest from .types.logging_config import ListBucketsResponse from .types.logging_config import ListExclusionsRequest from .types.logging_config import ListExclusionsResponse +from .types.logging_config import ListLinksRequest +from .types.logging_config import ListLinksResponse from .types.logging_config import ListSinksRequest from .types.logging_config import ListSinksResponse from .types.logging_config import ListViewsRequest from .types.logging_config import ListViewsResponse +from .types.logging_config import LocationMetadata from .types.logging_config import LogBucket from .types.logging_config import LogExclusion from .types.logging_config import LogSink from .types.logging_config import LogView +from .types.logging_config import Settings from .types.logging_config import UndeleteBucketRequest from .types.logging_config import UpdateBucketRequest from .types.logging_config import UpdateCmekSettingsRequest from .types.logging_config import UpdateExclusionRequest +from .types.logging_config import UpdateSettingsRequest from .types.logging_config import UpdateSinkRequest from .types.logging_config import UpdateViewRequest +from .types.logging_config import IndexType from .types.logging_config import LifecycleState +from .types.logging_config import OperationState from .types.logging_metrics import CreateLogMetricRequest from .types.logging_metrics import DeleteLogMetricRequest from .types.logging_metrics import GetLogMetricRequest @@ -86,16 +106,23 @@ 'ConfigServiceV2AsyncClient', 'LoggingServiceV2AsyncClient', 'MetricsServiceV2AsyncClient', +'BigQueryDataset', 'BigQueryOptions', +'BucketMetadata', 'CmekSettings', 'ConfigServiceV2Client', +'CopyLogEntriesMetadata', +'CopyLogEntriesRequest', +'CopyLogEntriesResponse', 'CreateBucketRequest', 'CreateExclusionRequest', +'CreateLinkRequest', 'CreateLogMetricRequest', 'CreateSinkRequest', 'CreateViewRequest', 'DeleteBucketRequest', 'DeleteExclusionRequest', +'DeleteLinkRequest', 'DeleteLogMetricRequest', 'DeleteLogRequest', 'DeleteSinkRequest', @@ -103,14 +130,22 @@ 'GetBucketRequest', 'GetCmekSettingsRequest', 'GetExclusionRequest', +'GetLinkRequest', 'GetLogMetricRequest', +'GetSettingsRequest', 'GetSinkRequest', 'GetViewRequest', +'IndexConfig', +'IndexType', 'LifecycleState', +'Link', +'LinkMetadata', 'ListBucketsRequest', 'ListBucketsResponse', 'ListExclusionsRequest', 'ListExclusionsResponse', +'ListLinksRequest', +'ListLinksResponse', 'ListLogEntriesRequest', 'ListLogEntriesResponse', 'ListLogMetricsRequest', @@ -123,6 +158,7 @@ 'ListSinksResponse', 'ListViewsRequest', 'ListViewsResponse', +'LocationMetadata', 'LogBucket', 'LogEntry', 'LogEntryOperation', @@ -130,9 +166,12 @@ 'LogExclusion', 'LogMetric', 'LogSink', +'LogSplit', 'LogView', 'LoggingServiceV2Client', 'MetricsServiceV2Client', +'OperationState', +'Settings', 'TailLogEntriesRequest', 'TailLogEntriesResponse', 'UndeleteBucketRequest', @@ -140,6 +179,7 @@ 'UpdateCmekSettingsRequest', 'UpdateExclusionRequest', 'UpdateLogMetricRequest', +'UpdateSettingsRequest', 'UpdateSinkRequest', 'UpdateViewRequest', 'WriteLogEntriesPartialErrors', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json index 69112af60b31..8d2b1297a0d7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_metadata.json @@ -10,16 +10,31 @@ "grpc": { "libraryClient": "ConfigServiceV2Client", "rpcs": { + "CopyLogEntries": { + "methods": [ + "copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" ] }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, "CreateExclusion": { "methods": [ "create_exclusion" ] }, + "CreateLink": { + "methods": [ + "create_link" + ] + }, "CreateSink": { "methods": [ "create_sink" @@ -40,6 +55,11 @@ "delete_exclusion" ] }, + "DeleteLink": { + "methods": [ + "delete_link" + ] + }, "DeleteSink": { "methods": [ "delete_sink" @@ -65,6 +85,16 @@ "get_exclusion" ] }, + "GetLink": { + "methods": [ + "get_link" + ] + }, + "GetSettings": { + "methods": [ + "get_settings" + ] + }, "GetSink": { "methods": [ "get_sink" @@ -85,6 +115,11 @@ "list_exclusions" ] }, + "ListLinks": { + "methods": [ + "list_links" + ] + }, "ListSinks": { "methods": [ "list_sinks" @@ -105,6 +140,11 @@ "update_bucket" ] }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, "UpdateCmekSettings": { "methods": [ "update_cmek_settings" @@ -115,6 +155,11 @@ "update_exclusion" ] }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] + }, "UpdateSink": { "methods": [ "update_sink" @@ -130,16 +175,31 @@ "grpc-async": { "libraryClient": "ConfigServiceV2AsyncClient", "rpcs": { + "CopyLogEntries": { + "methods": [ + "copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" ] }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, "CreateExclusion": { "methods": [ "create_exclusion" ] }, + "CreateLink": { + "methods": [ + "create_link" + ] + }, "CreateSink": { "methods": [ "create_sink" @@ -160,6 +220,11 @@ "delete_exclusion" ] }, + "DeleteLink": { + "methods": [ + "delete_link" + ] + }, "DeleteSink": { "methods": [ "delete_sink" @@ -185,124 +250,14 @@ "get_exclusion" ] }, - "GetSink": { - "methods": [ - "get_sink" - ] - }, - "GetView": { - "methods": [ - "get_view" - ] - }, - "ListBuckets": { - "methods": [ - "list_buckets" - ] - }, - "ListExclusions": { - "methods": [ - "list_exclusions" - ] - }, - "ListSinks": { - "methods": [ - "list_sinks" - ] - }, - "ListViews": { - "methods": [ - "list_views" - ] - }, - "UndeleteBucket": { - "methods": [ - "undelete_bucket" - ] - }, - "UpdateBucket": { - "methods": [ - "update_bucket" - ] - }, - "UpdateCmekSettings": { - "methods": [ - "update_cmek_settings" - ] - }, - "UpdateExclusion": { - "methods": [ - "update_exclusion" - ] - }, - "UpdateSink": { - "methods": [ - "update_sink" - ] - }, - "UpdateView": { - "methods": [ - "update_view" - ] - } - } - }, - "rest": { - "libraryClient": "ConfigServiceV2Client", - "rpcs": { - "CreateBucket": { - "methods": [ - "create_bucket" - ] - }, - "CreateExclusion": { - "methods": [ - "create_exclusion" - ] - }, - "CreateSink": { - "methods": [ - "create_sink" - ] - }, - "CreateView": { - "methods": [ - "create_view" - ] - }, - "DeleteBucket": { - "methods": [ - "delete_bucket" - ] - }, - "DeleteExclusion": { - "methods": [ - "delete_exclusion" - ] - }, - "DeleteSink": { - "methods": [ - "delete_sink" - ] - }, - "DeleteView": { - "methods": [ - "delete_view" - ] - }, - "GetBucket": { - "methods": [ - "get_bucket" - ] - }, - "GetCmekSettings": { + "GetLink": { "methods": [ - "get_cmek_settings" + "get_link" ] }, - "GetExclusion": { + "GetSettings": { "methods": [ - "get_exclusion" + "get_settings" ] }, "GetSink": { @@ -325,6 +280,11 @@ "list_exclusions" ] }, + "ListLinks": { + "methods": [ + "list_links" + ] + }, "ListSinks": { "methods": [ "list_sinks" @@ -345,6 +305,11 @@ "update_bucket" ] }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, "UpdateCmekSettings": { "methods": [ "update_cmek_settings" @@ -355,6 +320,11 @@ "update_exclusion" ] }, + "UpdateSettings": { + "methods": [ + "update_settings" + ] + }, "UpdateSink": { "methods": [ "update_sink" @@ -440,41 +410,6 @@ ] } } - }, - "rest": { - "libraryClient": "LoggingServiceV2Client", - "rpcs": { - "DeleteLog": { - "methods": [ - "delete_log" - ] - }, - "ListLogEntries": { - "methods": [ - "list_log_entries" - ] - }, - "ListLogs": { - "methods": [ - "list_logs" - ] - }, - "ListMonitoredResourceDescriptors": { - "methods": [ - "list_monitored_resource_descriptors" - ] - }, - "TailLogEntries": { - "methods": [ - "tail_log_entries" - ] - }, - "WriteLogEntries": { - "methods": [ - "write_log_entries" - ] - } - } } } }, @@ -539,36 +474,6 @@ ] } } - }, - "rest": { - "libraryClient": "MetricsServiceV2Client", - "rpcs": { - "CreateLogMetric": { - "methods": [ - "create_log_metric" - ] - }, - "DeleteLogMetric": { - "methods": [ - "delete_log_metric" - ] - }, - "GetLogMetric": { - "methods": [ - "get_log_metric" - ] - }, - "ListLogMetrics": { - "methods": [ - "list_log_metrics" - ] - }, - "UpdateLogMetric": { - "methods": [ - "update_log_metric" - ] - } - } } } } diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index e42f50769483..fca70d962c17 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -32,8 +32,11 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -51,6 +54,8 @@ class ConfigServiceV2AsyncClient: cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) parse_cmek_settings_path = staticmethod(ConfigServiceV2Client.parse_cmek_settings_path) + link_path = staticmethod(ConfigServiceV2Client.link_path) + parse_link_path = staticmethod(ConfigServiceV2Client.parse_link_path) log_bucket_path = staticmethod(ConfigServiceV2Client.log_bucket_path) parse_log_bucket_path = staticmethod(ConfigServiceV2Client.parse_log_bucket_path) log_exclusion_path = staticmethod(ConfigServiceV2Client.log_exclusion_path) @@ -59,6 +64,8 @@ class ConfigServiceV2AsyncClient: parse_log_sink_path = staticmethod(ConfigServiceV2Client.parse_log_sink_path) log_view_path = staticmethod(ConfigServiceV2Client.log_view_path) parse_log_view_path = staticmethod(ConfigServiceV2Client.parse_log_view_path) + settings_path = staticmethod(ConfigServiceV2Client.settings_path) + parse_settings_path = staticmethod(ConfigServiceV2Client.parse_settings_path) common_billing_account_path = staticmethod(ConfigServiceV2Client.common_billing_account_path) parse_common_billing_account_path = staticmethod(ConfigServiceV2Client.parse_common_billing_account_path) common_folder_path = staticmethod(ConfigServiceV2Client.common_folder_path) @@ -202,7 +209,7 @@ async def list_buckets(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsAsyncPager: - r"""Lists buckets. + r"""Lists log buckets. .. code-block:: python @@ -325,7 +332,7 @@ async def get_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket. + r"""Gets a log bucket. .. code-block:: python @@ -364,7 +371,9 @@ async def sample_get_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.GetBucketRequest(request) @@ -396,6 +405,206 @@ async def sample_get_bucket(): # Done; return the response. return response + async def create_bucket_async(self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_bucket_async, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + async def update_bucket_async(self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_bucket_async, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + async def create_bucket(self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, @@ -403,9 +612,9 @@ async def create_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. .. code-block:: python @@ -445,7 +654,9 @@ async def sample_create_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.CreateBucketRequest(request) @@ -484,17 +695,13 @@ async def update_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + r"""Updates a log bucket. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. .. code-block:: python @@ -533,7 +740,9 @@ async def sample_update_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. request = logging_config.UpdateBucketRequest(request) @@ -572,9 +781,12 @@ async def delete_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. .. code-block:: python @@ -642,8 +854,9 @@ async def undelete_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. .. code-block:: python @@ -712,7 +925,7 @@ async def list_views(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsAsyncPager: - r"""Lists views on a bucket. + r"""Lists views on a log bucket. .. code-block:: python @@ -827,7 +1040,7 @@ async def get_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Gets a view. + r"""Gets a view on a log bucket.. .. code-block:: python @@ -866,8 +1079,8 @@ async def sample_get_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -907,8 +1120,8 @@ async def create_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. .. code-block:: python @@ -948,8 +1161,8 @@ async def sample_create_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -989,8 +1202,11 @@ async def update_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. .. code-block:: python @@ -1029,8 +1245,8 @@ async def sample_update_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1070,7 +1286,10 @@ async def delete_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a view from a bucket. + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. .. code-block:: python @@ -1307,7 +1526,9 @@ async def sample_get_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1323,12 +1544,12 @@ async def sample_get_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1440,8 +1661,9 @@ async def sample_create_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1464,12 +1686,12 @@ async def sample_create_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1578,7 +1800,9 @@ async def sample_update_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1596,16 +1820,18 @@ async def sample_update_sink(): overwritten if, and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1621,12 +1847,12 @@ async def sample_update_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1731,7 +1957,9 @@ async def sample_delete_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1789,15 +2017,20 @@ async def sample_delete_sink(): metadata=metadata, ) - async def list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + async def create_link(self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, *, parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsAsyncPager: - r"""Lists all the exclusions in a parent resource. + ) -> operation_async.AsyncOperation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. .. code-block:: python @@ -1810,39 +2043,56 @@ async def list_exclusions(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_list_exclusions(): + async def sample_create_link(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.CreateLinkRequest( parent="parent_value", + link_id="link_id_value", ) # Make the request - page_result = client.list_exclusions(request=request) + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response - async for response in page_result: - print(response) + print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): - The request object. The parameters to ``ListExclusions``. + request (Optional[Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]]): + The request object. The parameters to CreateLink. parent (:class:`str`): - Required. The parent resource whose exclusions are to be - listed. + Required. The full resource name of the bucket to create + a link for. :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + link (:class:`google.cloud.logging_v2.types.Link`): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (:class:`str`): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1850,41 +2100,38 @@ async def sample_list_exclusions(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: - Result returned from ListExclusions. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.ListExclusionsRequest(request) + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_exclusions, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self._client._transport.create_link, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -1904,27 +2151,27 @@ async def sample_list_exclusions(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListExclusionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, ) # Done; return the response. return response - async def get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + async def delete_link(self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. + ) -> operation_async.AsyncOperation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. .. code-block:: python @@ -1937,36 +2184,35 @@ async def get_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_get_exclusion(): + async def sample_delete_link(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.DeleteLinkRequest( name="name_value", ) # Make the request - response = await client.get_exclusion(request=request) + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): - The request object. The parameters to ``GetExclusion``. + request (Optional[Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]]): + The request object. The parameters to DeleteLink. name (:class:`str`): - Required. The resource name of an existing exclusion: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + Required. The full resource name of the link to delete. - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1978,18 +2224,19 @@ async def sample_get_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. @@ -2000,7 +2247,7 @@ async def sample_get_exclusion(): raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.GetExclusionRequest(request) + request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2010,16 +2257,8 @@ async def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_exclusion, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self._client._transport.delete_link, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2039,22 +2278,637 @@ async def sample_get_exclusion(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + # Done; return the response. return response - async def create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + async def list_links(self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, *, parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListLinksAsyncPager: + r"""Lists links. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListLinksRequest, dict]]): + The request object. The parameters to ListLinks. + parent (:class:`str`): + Required. The parent resource whose links are to be + listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_links, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLinksAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_link(self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Link: + r"""Gets a link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetLinkRequest, dict]]): + The request object. The parameters to GetLink. + name (:class:`str`): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.GetLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_link, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_exclusions(self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsAsyncPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): + The request object. The parameters to ``ListExclusions``. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.ListExclusionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_exclusions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListExclusionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_exclusion(self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): + The request object. The parameters to ``GetExclusion``. + name (:class:`str`): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.GetExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_exclusion(self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = await client.create_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): + The request object. The parameters to ``CreateExclusion``. + parent (:class:`str`): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.CreateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_exclusion(self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. .. code-block:: python @@ -2067,7 +2921,7 @@ async def create_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_create_exclusion(): + async def sample_update_exclusion(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() @@ -2076,45 +2930,59 @@ async def sample_create_exclusion(): exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.CreateExclusionRequest( - parent="parent_value", + request = logging_v2.UpdateExclusionRequest( + name="name_value", exclusion=exclusion, ) # Make the request - response = await client.create_exclusion(request=request) + response = await client.update_exclusion(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): - The request object. The parameters to ``CreateExclusion``. - parent (:class:`str`): - Required. The parent resource in which to create the - exclusion: + request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): + The request object. The parameters to ``UpdateExclusion``. + name (:class:`str`): + Required. The resource name of the exclusion to update: :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For example: - This corresponds to the ``parent`` field + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): - Required. The new exclusion, whose ``name`` parameter is - an exclusion name that is not already used in the parent - resource. + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2123,41 +2991,250 @@ async def sample_create_exclusion(): Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.CreateExclusionRequest(request) + request = logging_config.UpdateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_exclusion, + default_timeout=120.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_exclusion(self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + await client.delete_exclusion(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): + The request object. The parameters to ``DeleteExclusion``. + name (:class:`str`): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.DeleteExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_exclusion, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_cmek_settings(self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logging CMEK settings for the given resource. + + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if exclusion is not None: - request.exclusion = exclusion + """ + # Create or coerce a protobuf request object. + request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_exclusion, - default_timeout=120.0, + self._client._transport.get_cmek_settings, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2165,7 +3242,7 @@ async def sample_create_exclusion(): # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), + ("name", request.name), )), ) @@ -2180,18 +3257,28 @@ async def sample_create_exclusion(): # Done; return the response. return response - async def update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + async def update_cmek_settings(self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. + ) -> logging_config.CmekSettings: + r"""Updates the Log Router CMEK settings for the given resource. + + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Log + Router `__ + for more information. .. code-block:: python @@ -2204,67 +3291,29 @@ async def update_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_update_exclusion(): + async def sample_update_cmek_settings(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() - exclusion.name = "name_value" - exclusion.filter = "filter_value" - - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.UpdateCmekSettingsRequest( name="name_value", - exclusion=exclusion, ) # Make the request - response = await client.update_exclusion(request=request) + response = await client.update_cmek_settings(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): - The request object. The parameters to ``UpdateExclusion``. - name (:class:`str`): - Required. The resource name of the exclusion to update: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): - Required. New values for the existing exclusion. Only - the fields specified in ``update_mask`` are relevant. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Required. A non-empty list of fields to change in the - existing exclusion. New values for the fields are taken - from the corresponding fields in the - [LogExclusion][google.logging.v2.LogExclusion] included - in this request. Fields not mentioned in ``update_mask`` - are not changed and are ignored in the request. - - For example, to change the filter and description of an - exclusion, specify an ``update_mask`` of - ``"filter,description"``. + request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + See `Enabling CMEK for Log + Router `__ + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2272,44 +3321,29 @@ async def sample_update_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. - request = logging_config.UpdateExclusionRequest(request) + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if exclusion is not None: - request.exclusion = exclusion - if update_mask is not None: - request.update_mask = update_mask + """ + # Create or coerce a protobuf request object. + request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_exclusion, - default_timeout=120.0, + self._client._transport.update_cmek_settings, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2332,15 +3366,25 @@ async def sample_update_exclusion(): # Done; return the response. return response - async def delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + async def get_settings(self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an exclusion. + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. .. code-block:: python @@ -2353,34 +3397,49 @@ async def delete_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_delete_exclusion(): + async def sample_get_settings(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( + request = logging_v2.GetSettingsRequest( name="name_value", ) # Make the request - await client.delete_exclusion(request=request) + response = await client.get_settings(request=request) + + # Handle the response + print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): - The request object. The parameters to ``DeleteExclusion``. + request (Optional[Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. name (:class:`str`): - Required. The resource name of an existing exclusion to - delete: + Required. The resource for which to retrieve settings. :: - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2390,6 +3449,13 @@ async def sample_delete_exclusion(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2399,7 +3465,7 @@ async def sample_delete_exclusion(): raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.DeleteExclusionRequest(request) + request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2409,16 +3475,8 @@ async def sample_delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_exclusion, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, + self._client._transport.get_settings, + default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2431,27 +3489,41 @@ async def sample_delete_exclusion(): ) # Send the request. - await rpc( + response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - async def get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + # Done; return the response. + return response + + async def update_settings(self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -2466,29 +3538,53 @@ async def get_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_get_cmek_settings(): + async def sample_update_settings(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.UpdateSettingsRequest( name="name_value", ) # Make the request - response = await client.get_cmek_settings(request=request) + response = await client.update_settings(request=request) # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): + request (Optional[Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]]): The request object. The parameters to - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + settings (:class:`google.cloud.logging_v2.types.Settings`): + Required. The settings to update. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2496,28 +3592,33 @@ async def sample_get_cmek_settings(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = logging_config.UpdateSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cmek_settings, + self._client._transport.update_settings, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) @@ -2541,28 +3642,15 @@ async def sample_get_cmek_settings(): # Done; return the response. return response - async def update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + async def copy_log_entries(self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) ``kms_key_name`` is invalid, or 2) the - associated service account does not have the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for - the key, or 3) access to the key is disabled. - - See `Enabling CMEK for Logs - Router `__ - for more information. + ) -> operation_async.AsyncOperation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. .. code-block:: python @@ -2575,29 +3663,29 @@ async def update_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample_update_cmek_settings(): + async def sample_copy_log_entries(): # Create a client client = logging_v2.ConfigServiceV2AsyncClient() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.CopyLogEntriesRequest( name="name_value", + destination="destination_value", ) # Make the request - response = await client.update_cmek_settings(request=request) + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() # Handle the response print(response) Args: - request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): - The request object. The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - - See `Enabling CMEK for Logs - Router `__ - for more information. + request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]): + The request object. The parameters to CopyLogEntries. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2605,40 +3693,26 @@ async def sample_update_cmek_settings(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) + request = logging_config.CopyLogEntriesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cmek_settings, + self._client._transport.copy_log_entries, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - # Send the request. response = await rpc( request, @@ -2647,6 +3721,14 @@ async def sample_update_cmek_settings(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + # Done; return the response. return response diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 419f7094486f..1752be078912 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -35,14 +35,16 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import ConfigServiceV2GrpcTransport from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport -from .transports.rest import ConfigServiceV2RestTransport class ConfigServiceV2ClientMeta(type): @@ -55,7 +57,6 @@ class ConfigServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry["grpc"] = ConfigServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport - _transport_registry["rest"] = ConfigServiceV2RestTransport def get_transport_class(cls, label: Optional[str] = None, @@ -175,6 +176,17 @@ def parse_cmek_settings_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) return m.groupdict() if m else {} + @staticmethod + def link_path(project: str,location: str,bucket: str,link: str,) -> str: + """Returns a fully-qualified link string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + + @staticmethod + def parse_link_path(path: str) -> Dict[str,str]: + """Parses a link path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def log_bucket_path(project: str,location: str,bucket: str,) -> str: """Returns a fully-qualified log_bucket string.""" @@ -219,6 +231,17 @@ def parse_log_view_path(path: str) -> Dict[str,str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def settings_path(project: str,) -> str: + """Returns a fully-qualified settings string.""" + return "projects/{project}/settings".format(project=project, ) + + @staticmethod + def parse_settings_path(path: str) -> Dict[str,str]: + """Parses a settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/settings$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path(billing_account: str, ) -> str: """Returns a fully-qualified billing_account string.""" @@ -350,9 +373,6 @@ def __init__(self, *, transport (Union[str, ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - NOTE: "rest" transport functionality is currently in a - beta state (preview). We welcome your feedback via an - issue in this library's source repository. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -432,7 +452,7 @@ def list_buckets(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListBucketsPager: - r"""Lists buckets. + r"""Lists log buckets. .. code-block:: python @@ -555,7 +575,7 @@ def get_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Gets a bucket. + r"""Gets a log bucket. .. code-block:: python @@ -594,7 +614,9 @@ def sample_get_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -627,6 +649,208 @@ def sample_get_bucket(): # Done; return the response. return response + def create_bucket_async(self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + def update_bucket_async(self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateBucketRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + def create_bucket(self, request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, *, @@ -634,9 +858,9 @@ def create_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. .. code-block:: python @@ -676,7 +900,9 @@ def sample_create_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -716,17 +942,13 @@ def update_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogBucket: - r"""Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + r"""Updates a log bucket. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. .. code-block:: python @@ -765,7 +987,9 @@ def sample_update_bucket(): Returns: google.cloud.logging_v2.types.LogBucket: - Describes a repository of logs. + Describes a repository in which log + entries are stored. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes @@ -805,9 +1029,12 @@ def delete_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. .. code-block:: python @@ -876,8 +1103,9 @@ def undelete_bucket(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. .. code-block:: python @@ -947,7 +1175,7 @@ def list_views(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListViewsPager: - r"""Lists views on a bucket. + r"""Lists views on a log bucket. .. code-block:: python @@ -1062,7 +1290,7 @@ def get_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Gets a view. + r"""Gets a view on a log bucket.. .. code-block:: python @@ -1101,8 +1329,8 @@ def sample_get_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1143,8 +1371,8 @@ def create_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. .. code-block:: python @@ -1184,8 +1412,8 @@ def sample_create_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1226,8 +1454,11 @@ def update_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> logging_config.LogView: - r"""Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. .. code-block:: python @@ -1266,8 +1497,8 @@ def sample_update_view(): Returns: google.cloud.logging_v2.types.LogView: - Describes a view over logs in a - bucket. + Describes a view over log entries in + a bucket. """ # Create or coerce a protobuf request object. @@ -1308,7 +1539,10 @@ def delete_view(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes a view from a bucket. + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. .. code-block:: python @@ -1538,7 +1772,9 @@ def sample_get_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1554,12 +1790,12 @@ def sample_get_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1663,8 +1899,9 @@ def sample_create_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1687,12 +1924,12 @@ def sample_create_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1801,7 +2038,9 @@ def sample_update_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1819,16 +2058,18 @@ def sample_update_sink(): overwritten if, and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the - following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1844,12 +2085,12 @@ def sample_update_sink(): Describes a sink used to export log entries to one of the following destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. """ # Create or coerce a protobuf request object. @@ -1946,7 +2187,9 @@ def sample_delete_sink(): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1996,15 +2239,20 @@ def sample_delete_sink(): metadata=metadata, ) - def list_exclusions(self, - request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + def create_link(self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, *, parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListExclusionsPager: - r"""Lists all the exclusions in a parent resource. + ) -> operation.Operation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. .. code-block:: python @@ -2017,39 +2265,56 @@ def list_exclusions(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_list_exclusions(): + def sample_create_link(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.ListExclusionsRequest( + request = logging_v2.CreateLinkRequest( parent="parent_value", + link_id="link_id_value", ) # Make the request - page_result = client.list_exclusions(request=request) + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response - for response in page_result: - print(response) + print(response) Args: - request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): - The request object. The parameters to ``ListExclusions``. + request (Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]): + The request object. The parameters to CreateLink. parent (str): - Required. The parent resource whose exclusions are to be - listed. + Required. The full resource name of the bucket to create + a link for. :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + link (google.cloud.logging_v2.types.Link): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (str): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2057,35 +2322,40 @@ def sample_list_exclusions(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: - Result returned from ListExclusions. + google.api_core.operation.Operation: + An object representing a long-running operation. - Iterating over this object will yield results and - resolve additional pages automatically. + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListExclusionsRequest. + # in a logging_config.CreateLinkRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.ListExclusionsRequest): - request = logging_config.ListExclusionsRequest(request) + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_exclusions] + rpc = self._transport._wrapped_methods[self._transport.create_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2103,27 +2373,27 @@ def sample_list_exclusions(): metadata=metadata, ) - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListExclusionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, ) # Done; return the response. return response - def get_exclusion(self, - request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + def delete_link(self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Gets the description of an exclusion. + ) -> operation.Operation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. .. code-block:: python @@ -2136,36 +2406,35 @@ def get_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_get_exclusion(): + def sample_delete_link(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetExclusionRequest( + request = logging_v2.DeleteLinkRequest( name="name_value", ) # Make the request - response = client.get_exclusion(request=request) + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): - The request object. The parameters to ``GetExclusion``. + request (Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]): + The request object. The parameters to DeleteLink. name (str): - Required. The resource name of an existing exclusion: + Required. The full resource name of the link to delete. - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2177,18 +2446,19 @@ def sample_get_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } """ # Create or coerce a protobuf request object. @@ -2200,11 +2470,11 @@ def sample_get_exclusion(): 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetExclusionRequest. + # in a logging_config.DeleteLinkRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.GetExclusionRequest): - request = logging_config.GetExclusionRequest(request) + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -2212,7 +2482,7 @@ def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_exclusion] + rpc = self._transport._wrapped_methods[self._transport.delete_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2230,22 +2500,26 @@ def sample_get_exclusion(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + # Done; return the response. return response - def create_exclusion(self, - request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + def list_links(self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, *, parent: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + ) -> pagers.ListLinksPager: + r"""Lists links. .. code-block:: python @@ -2258,54 +2532,663 @@ def create_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_create_exclusion(): + def sample_list_links(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListLinksRequest, dict]): + The request object. The parameters to ListLinks. + parent (str): + Required. The parent resource whose links are to be + listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListLinksRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_links] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLinksPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_link(self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.Link: + r"""Gets a link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetLinkRequest, dict]): + The request object. The parameters to GetLink. + name (str): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetLinkRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_exclusions(self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListExclusionsPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_list_exclusions(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): + The request object. The parameters to ``ListExclusions``. + parent (str): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.ListExclusionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_exclusions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListExclusionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_exclusion(self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_get_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client.get_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): + The request object. The parameters to ``GetExclusion``. + name (str): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_exclusion(self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_create_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client.create_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): + The request object. The parameters to ``CreateExclusion``. + parent (str): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.CreateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_exclusion(self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_update_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() exclusion.name = "name_value" exclusion.filter = "filter_value" - request = logging_v2.CreateExclusionRequest( - parent="parent_value", + request = logging_v2.UpdateExclusionRequest( + name="name_value", exclusion=exclusion, ) # Make the request - response = client.create_exclusion(request=request) + response = client.update_exclusion(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): - The request object. The parameters to ``CreateExclusion``. - parent (str): - Required. The parent resource in which to create the - exclusion: + request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): + The request object. The parameters to ``UpdateExclusion``. + name (str): + Required. The resource name of the exclusion to update: :: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For example: - This corresponds to the ``parent`` field + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. exclusion (google.cloud.logging_v2.types.LogExclusion): - Required. The new exclusion, whose ``name`` parameter is - an exclusion name that is not already used in the parent - resource. + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2314,49 +3197,251 @@ def sample_create_exclusion(): Returns: google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.UpdateExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_exclusion(self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_delete_exclusion(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client.delete_exclusion(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): + The request object. The parameters to ``DeleteExclusion``. + name (str): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateExclusionRequest. + # in a logging_config.DeleteExclusionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_cmek_settings(self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logging CMEK settings for the given resource. + + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_get_cmek_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a logging_config.GetCmekSettingsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.CreateExclusionRequest): - request = logging_config.CreateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if exclusion is not None: - request.exclusion = exclusion + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_exclusion] + rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), + ("name", request.name), )), ) @@ -2371,18 +3456,28 @@ def sample_create_exclusion(): # Done; return the response. return response - def update_exclusion(self, - request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + def update_cmek_settings(self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, *, - name: Optional[str] = None, - exclusion: Optional[logging_config.LogExclusion] = None, - update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.LogExclusion: - r"""Changes one or more properties of an existing - exclusion. + ) -> logging_config.CmekSettings: + r"""Updates the Log Router CMEK settings for the given resource. + + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Log + Router `__ + for more information. .. code-block:: python @@ -2395,67 +3490,29 @@ def update_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_update_exclusion(): + def sample_update_cmek_settings(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - exclusion = logging_v2.LogExclusion() - exclusion.name = "name_value" - exclusion.filter = "filter_value" - - request = logging_v2.UpdateExclusionRequest( + request = logging_v2.UpdateCmekSettingsRequest( name="name_value", - exclusion=exclusion, ) # Make the request - response = client.update_exclusion(request=request) + response = client.update_cmek_settings(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): - The request object. The parameters to ``UpdateExclusion``. - name (str): - Required. The resource name of the exclusion to update: - - :: - - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" - - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - exclusion (google.cloud.logging_v2.types.LogExclusion): - Required. New values for the existing exclusion. Only - the fields specified in ``update_mask`` are relevant. - - This corresponds to the ``exclusion`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. A non-empty list of fields to change in the - existing exclusion. New values for the fields are taken - from the corresponding fields in the - [LogExclusion][google.logging.v2.LogExclusion] included - in this request. Fields not mentioned in ``update_mask`` - are not changed and are ignored in the request. - - For example, to change the filter and description of an - exclusion, specify an ``update_mask`` of - ``"filter,description"``. + request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. + See `Enabling CMEK for Log + Router `__ + for more information. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2463,46 +3520,32 @@ def sample_update_exclusion(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateExclusionRequest. + # in a logging_config.UpdateCmekSettingsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.UpdateExclusionRequest): - request = logging_config.UpdateExclusionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if exclusion is not None: - request.exclusion = exclusion - if update_mask is not None: - request.update_mask = update_mask + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_exclusion] + rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -2523,15 +3566,25 @@ def sample_update_exclusion(): # Done; return the response. return response - def delete_exclusion(self, - request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + def get_settings(self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an exclusion. + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. .. code-block:: python @@ -2544,34 +3597,49 @@ def delete_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_delete_exclusion(): + def sample_get_settings(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.DeleteExclusionRequest( + request = logging_v2.GetSettingsRequest( name="name_value", ) # Make the request - client.delete_exclusion(request=request) + response = client.get_settings(request=request) + + # Handle the response + print(response) Args: - request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): - The request object. The parameters to ``DeleteExclusion``. + request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. name (str): - Required. The resource name of an existing exclusion to - delete: + Required. The resource for which to retrieve settings. :: - "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" - "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" - "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: - Example: - ``"projects/my-project-id/exclusions/my-exclusion-id"``. + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2581,6 +3649,13 @@ def sample_delete_exclusion(): timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -2591,11 +3666,11 @@ def sample_delete_exclusion(): 'the individual field arguments should be set.') # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteExclusionRequest. + # in a logging_config.GetSettingsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.DeleteExclusionRequest): - request = logging_config.DeleteExclusionRequest(request) + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: @@ -2603,7 +3678,7 @@ def sample_delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] + rpc = self._transport._wrapped_methods[self._transport.get_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -2614,27 +3689,41 @@ def sample_delete_exclusion(): ) # Send the request. - rpc( + response = rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) - def get_cmek_settings(self, - request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + # Done; return the response. + return response + + def update_settings(self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Gets the Logs Router CMEK settings for the given resource. + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -2649,29 +3738,53 @@ def get_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_get_cmek_settings(): + def sample_update_settings(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.GetCmekSettingsRequest( + request = logging_v2.UpdateSettingsRequest( name="name_value", ) # Make the request - response = client.get_cmek_settings(request=request) + response = client.update_settings(request=request) # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): + request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): The request object. The parameters to - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2679,32 +3792,36 @@ def sample_get_cmek_settings(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetCmekSettingsRequest. + # in a logging_config.UpdateSettingsRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.GetCmekSettingsRequest): - request = logging_config.GetCmekSettingsRequest(request) + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] + rpc = self._transport._wrapped_methods[self._transport.update_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -2725,28 +3842,15 @@ def sample_get_cmek_settings(): # Done; return the response. return response - def update_cmek_settings(self, - request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + def copy_log_entries(self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> logging_config.CmekSettings: - r"""Updates the Logs Router CMEK settings for the given resource. - - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. - - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] - will fail if 1) ``kms_key_name`` is invalid, or 2) the - associated service account does not have the required - ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for - the key, or 3) access to the key is disabled. - - See `Enabling CMEK for Logs - Router `__ - for more information. + ) -> operation.Operation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. .. code-block:: python @@ -2759,29 +3863,29 @@ def update_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample_update_cmek_settings(): + def sample_copy_log_entries(): # Create a client client = logging_v2.ConfigServiceV2Client() # Initialize request argument(s) - request = logging_v2.UpdateCmekSettingsRequest( + request = logging_v2.CopyLogEntriesRequest( name="name_value", + destination="destination_value", ) # Make the request - response = client.update_cmek_settings(request=request) + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() # Handle the response print(response) Args: - request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): - The request object. The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - - See `Enabling CMEK for Logs - Router `__ - for more information. + request (Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]): + The request object. The parameters to CopyLogEntries. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2789,40 +3893,26 @@ def sample_update_cmek_settings(): sent along with the request as metadata. Returns: - google.cloud.logging_v2.types.CmekSettings: - Describes the customer-managed encryption key (CMEK) settings associated with - a project, folder, organization, billing account, or - flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. + google.api_core.operation.Operation: + An object representing a long-running operation. - See [Enabling CMEK for Logs - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) - for more information. + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateCmekSettingsRequest. + # in a logging_config.CopyLogEntriesRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. - if not isinstance(request, logging_config.UpdateCmekSettingsRequest): - request = logging_config.UpdateCmekSettingsRequest(request) + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) + rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] # Send the request. response = rpc( @@ -2832,6 +3922,14 @@ def sample_update_cmek_settings(): metadata=metadata, ) + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + # Done; return the response. return response diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index fb524efd8bb9..444519d150d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -381,6 +381,127 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) +class ListLinksPager: + """A pager for iterating through ``list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_config.Link]: + for page in self.pages: + yield from page.links + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLinksAsyncPager: + """A pager for iterating through ``list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.Link]: + async def async_generator(): + async for page in self.pages: + for response in page.links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + class ListExclusionsPager: """A pager for iterating through ``list_exclusions`` requests. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 8aaccee9e983..1ba655878dc9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -19,20 +19,15 @@ from .base import ConfigServiceV2Transport from .grpc import ConfigServiceV2GrpcTransport from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport -from .rest import ConfigServiceV2RestTransport -from .rest import ConfigServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] _transport_registry['grpc'] = ConfigServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport -_transport_registry['rest'] = ConfigServiceV2RestTransport __all__ = ( 'ConfigServiceV2Transport', 'ConfigServiceV2GrpcTransport', 'ConfigServiceV2GrpcAsyncIOTransport', - 'ConfigServiceV2RestTransport', - 'ConfigServiceV2RestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index e30689cd0309..ab0c7ff98361 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -23,10 +23,12 @@ from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -127,6 +129,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_bucket_async: gapic_v1.method.wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: gapic_v1.method.wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), self.create_bucket: gapic_v1.method.wrap_method( self.create_bucket, default_timeout=None, @@ -229,6 +241,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_link: gapic_v1.method.wrap_method( + self.create_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_link: gapic_v1.method.wrap_method( + self.delete_link, + default_timeout=None, + client_info=client_info, + ), + self.list_links: gapic_v1.method.wrap_method( + self.list_links, + default_timeout=None, + client_info=client_info, + ), + self.get_link: gapic_v1.method.wrap_method( + self.get_link, + default_timeout=None, + client_info=client_info, + ), self.list_exclusions: gapic_v1.method.wrap_method( self.list_exclusions, default_retry=retries.Retry( @@ -288,6 +320,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_settings: gapic_v1.method.wrap_method( + self.get_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_settings: gapic_v1.method.wrap_method( + self.update_settings, + default_timeout=None, + client_info=client_info, + ), + self.copy_log_entries: gapic_v1.method.wrap_method( + self.copy_log_entries, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -299,6 +346,11 @@ def close(self): """ raise NotImplementedError() + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + @property def list_buckets(self) -> Callable[ [logging_config.ListBucketsRequest], @@ -317,6 +369,24 @@ def get_bucket(self) -> Callable[ ]]: raise NotImplementedError() + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + @property def create_bucket(self) -> Callable[ [logging_config.CreateBucketRequest], @@ -443,6 +513,42 @@ def delete_sink(self) -> Callable[ ]]: raise NotImplementedError() + @property + def create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse] + ]]: + raise NotImplementedError() + + @property + def get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + Union[ + logging_config.Link, + Awaitable[logging_config.Link] + ]]: + raise NotImplementedError() + @property def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], @@ -506,6 +612,33 @@ def update_cmek_settings(self) -> Callable[ ]]: raise NotImplementedError() + @property + def get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ]]: + raise NotImplementedError() + + @property + def update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ]]: + raise NotImplementedError() + + @property + def copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 8a58c15f81c7..783cd0ced90d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -17,6 +17,7 @@ from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers +from google.api_core import operations_v1 from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore @@ -25,6 +26,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -109,6 +111,7 @@ def __init__(self, *, self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -226,13 +229,29 @@ def grpc_channel(self) -> grpc.Channel: """ return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_buckets(self) -> Callable[ [logging_config.ListBucketsRequest], logging_config.ListBucketsResponse]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets. + Lists log buckets. Returns: Callable[[~.ListBucketsRequest], @@ -258,7 +277,7 @@ def get_bucket(self) -> Callable[ logging_config.LogBucket]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket. + Gets a log bucket. Returns: Callable[[~.GetBucketRequest], @@ -278,15 +297,76 @@ def get_bucket(self) -> Callable[ ) return self._stubs['get_bucket'] + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + operations_pb2.Operation]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket_async' not in self._stubs: + self._stubs['create_bucket_async'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_bucket_async'] + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + operations_pb2.Operation]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket_async' not in self._stubs: + self._stubs['update_bucket_async'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_bucket_async'] + @property def create_bucket(self) -> Callable[ [logging_config.CreateBucketRequest], logging_config.LogBucket]: r"""Return a callable for the create bucket method over gRPC. - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. Returns: Callable[[~.CreateBucketRequest], @@ -312,17 +392,13 @@ def update_bucket(self) -> Callable[ logging_config.LogBucket]: r"""Return a callable for the update bucket method over gRPC. - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + Updates a log bucket. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. Returns: Callable[[~.UpdateBucketRequest], @@ -348,9 +424,12 @@ def delete_bucket(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the delete bucket method over gRPC. - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. Returns: Callable[[~.DeleteBucketRequest], @@ -376,8 +455,9 @@ def undelete_bucket(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the undelete bucket method over gRPC. - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. Returns: Callable[[~.UndeleteBucketRequest], @@ -403,7 +483,7 @@ def list_views(self) -> Callable[ logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. - Lists views on a bucket. + Lists views on a log bucket. Returns: Callable[[~.ListViewsRequest], @@ -429,7 +509,7 @@ def get_view(self) -> Callable[ logging_config.LogView]: r"""Return a callable for the get view method over gRPC. - Gets a view. + Gets a view on a log bucket.. Returns: Callable[[~.GetViewRequest], @@ -455,8 +535,8 @@ def create_view(self) -> Callable[ logging_config.LogView]: r"""Return a callable for the create view method over gRPC. - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. Returns: Callable[[~.CreateViewRequest], @@ -482,8 +562,11 @@ def update_view(self) -> Callable[ logging_config.LogView]: r"""Return a callable for the update view method over gRPC. - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. Returns: Callable[[~.UpdateViewRequest], @@ -509,7 +592,10 @@ def delete_view(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. - Deletes a view from a bucket. + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. Returns: Callable[[~.DeleteViewRequest], @@ -669,13 +755,122 @@ def delete_sink(self) -> Callable[ ) return self._stubs['delete_sink'] + @property + def create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + operations_pb2.Operation]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_link' not in self._stubs: + self._stubs['create_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateLink', + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_link'] + + @property + def delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_link' not in self._stubs: + self._stubs['delete_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteLink', + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_link'] + + @property + def list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + logging_config.ListLinksResponse]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + ~.ListLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_links' not in self._stubs: + self._stubs['list_links'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListLinks', + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs['list_links'] + + @property + def get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + logging_config.Link]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + ~.Link]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_link' not in self._stubs: + self._stubs['get_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetLink', + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs['get_link'] + @property def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], logging_config.ListExclusionsResponse]: r"""Return a callable for the list exclusions method over gRPC. - Lists all the exclusions in a parent resource. + Lists all the exclusions on the \_Default sink in a parent + resource. Returns: Callable[[~.ListExclusionsRequest], @@ -701,7 +896,7 @@ def get_exclusion(self) -> Callable[ logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. - Gets the description of an exclusion. + Gets the description of an exclusion in the \_Default sink. Returns: Callable[[~.GetExclusionRequest], @@ -727,10 +922,9 @@ def create_exclusion(self) -> Callable[ logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. Returns: Callable[[~.CreateExclusionRequest], @@ -756,8 +950,8 @@ def update_exclusion(self) -> Callable[ logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. - Changes one or more properties of an existing - exclusion. + Changes one or more properties of an existing exclusion in the + \_Default sink. Returns: Callable[[~.UpdateExclusionRequest], @@ -783,7 +977,7 @@ def delete_exclusion(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. - Deletes an exclusion. + Deletes an exclusion in the \_Default sink. Returns: Callable[[~.DeleteExclusionRequest], @@ -809,13 +1003,14 @@ def get_cmek_settings(self) -> Callable[ logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. - Gets the Logs Router CMEK settings for the given resource. + Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -843,11 +1038,11 @@ def update_cmek_settings(self) -> Callable[ logging_config.CmekSettings]: r"""Return a callable for the update cmek settings method over gRPC. - Updates the Logs Router CMEK settings for the given resource. + Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -855,7 +1050,7 @@ def update_cmek_settings(self) -> Callable[ ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -877,6 +1072,112 @@ def update_cmek_settings(self) -> Callable[ ) return self._stubs['update_cmek_settings'] + @property + def get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + logging_config.Settings]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_settings' not in self._stubs: + self._stubs['get_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSettings', + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['get_settings'] + + @property + def update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + logging_config.Settings]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_settings' not in self._stubs: + self._stubs['update_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSettings', + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['update_settings'] + + @property + def copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + operations_pb2.Operation]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'copy_log_entries' not in self._stubs: + self._stubs['copy_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['copy_log_entries'] + def close(self): self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 6e208e9a7535..22a7adfee489 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -25,6 +26,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport @@ -154,6 +156,7 @@ def __init__(self, *, self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -229,13 +232,29 @@ def grpc_channel(self) -> aio.Channel: # Return the channel from cache. return self._grpc_channel + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self.grpc_channel + ) + + # Return the client from cache. + return self._operations_client + @property def list_buckets(self) -> Callable[ [logging_config.ListBucketsRequest], Awaitable[logging_config.ListBucketsResponse]]: r"""Return a callable for the list buckets method over gRPC. - Lists buckets. + Lists log buckets. Returns: Callable[[~.ListBucketsRequest], @@ -261,7 +280,7 @@ def get_bucket(self) -> Callable[ Awaitable[logging_config.LogBucket]]: r"""Return a callable for the get bucket method over gRPC. - Gets a bucket. + Gets a log bucket. Returns: Callable[[~.GetBucketRequest], @@ -281,15 +300,76 @@ def get_bucket(self) -> Callable[ ) return self._stubs['get_bucket'] + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket_async' not in self._stubs: + self._stubs['create_bucket_async'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_bucket_async'] + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket_async' not in self._stubs: + self._stubs['update_bucket_async'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_bucket_async'] + @property def create_bucket(self) -> Callable[ [logging_config.CreateBucketRequest], Awaitable[logging_config.LogBucket]]: r"""Return a callable for the create bucket method over gRPC. - Creates a bucket that can be used to store log - entries. Once a bucket has been created, the region - cannot be changed. + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. Returns: Callable[[~.CreateBucketRequest], @@ -315,17 +395,13 @@ def update_bucket(self) -> Callable[ Awaitable[logging_config.LogBucket]]: r"""Return a callable for the update bucket method over gRPC. - Updates a bucket. This method replaces the following fields in - the existing bucket with values from the new bucket: - ``retention_period`` - - If the retention period is decreased and the bucket is locked, - FAILED_PRECONDITION will be returned. + Updates a log bucket. - If the bucket has a LifecycleState of DELETE_REQUESTED, - FAILED_PRECONDITION will be returned. + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. - A buckets region may not be modified after it is created. + After a bucket has been created, the bucket's location cannot be + changed. Returns: Callable[[~.UpdateBucketRequest], @@ -351,9 +427,12 @@ def delete_bucket(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete bucket method over gRPC. - Deletes a bucket. Moves the bucket to the DELETE_REQUESTED - state. After 7 days, the bucket will be purged and all logs in - the bucket will be permanently deleted. + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. Returns: Callable[[~.DeleteBucketRequest], @@ -379,8 +458,9 @@ def undelete_bucket(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the undelete bucket method over gRPC. - Undeletes a bucket. A bucket that has been deleted - may be undeleted within the grace period of 7 days. + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. Returns: Callable[[~.UndeleteBucketRequest], @@ -406,7 +486,7 @@ def list_views(self) -> Callable[ Awaitable[logging_config.ListViewsResponse]]: r"""Return a callable for the list views method over gRPC. - Lists views on a bucket. + Lists views on a log bucket. Returns: Callable[[~.ListViewsRequest], @@ -432,7 +512,7 @@ def get_view(self) -> Callable[ Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. - Gets a view. + Gets a view on a log bucket.. Returns: Callable[[~.GetViewRequest], @@ -458,8 +538,8 @@ def create_view(self) -> Callable[ Awaitable[logging_config.LogView]]: r"""Return a callable for the create view method over gRPC. - Creates a view over logs in a bucket. A bucket may - contain a maximum of 50 views. + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. Returns: Callable[[~.CreateViewRequest], @@ -485,8 +565,11 @@ def update_view(self) -> Callable[ Awaitable[logging_config.LogView]]: r"""Return a callable for the update view method over gRPC. - Updates a view. This method replaces the following fields in the - existing view with values from the new view: ``filter``. + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. Returns: Callable[[~.UpdateViewRequest], @@ -512,7 +595,10 @@ def delete_view(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. - Deletes a view from a bucket. + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. Returns: Callable[[~.DeleteViewRequest], @@ -672,13 +758,122 @@ def delete_sink(self) -> Callable[ ) return self._stubs['delete_sink'] + @property + def create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_link' not in self._stubs: + self._stubs['create_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateLink', + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_link'] + + @property + def delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_link' not in self._stubs: + self._stubs['delete_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteLink', + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_link'] + + @property + def list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + Awaitable[logging_config.ListLinksResponse]]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + Awaitable[~.ListLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_links' not in self._stubs: + self._stubs['list_links'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListLinks', + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs['list_links'] + + @property + def get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + Awaitable[logging_config.Link]]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + Awaitable[~.Link]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_link' not in self._stubs: + self._stubs['get_link'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetLink', + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs['get_link'] + @property def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], Awaitable[logging_config.ListExclusionsResponse]]: r"""Return a callable for the list exclusions method over gRPC. - Lists all the exclusions in a parent resource. + Lists all the exclusions on the \_Default sink in a parent + resource. Returns: Callable[[~.ListExclusionsRequest], @@ -704,7 +899,7 @@ def get_exclusion(self) -> Callable[ Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the get exclusion method over gRPC. - Gets the description of an exclusion. + Gets the description of an exclusion in the \_Default sink. Returns: Callable[[~.GetExclusionRequest], @@ -730,10 +925,9 @@ def create_exclusion(self) -> Callable[ Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the create exclusion method over gRPC. - Creates a new exclusion in a specified parent - resource. Only log entries belonging to that resource - can be excluded. You can have up to 10 exclusions in a - resource. + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. Returns: Callable[[~.CreateExclusionRequest], @@ -759,8 +953,8 @@ def update_exclusion(self) -> Callable[ Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the update exclusion method over gRPC. - Changes one or more properties of an existing - exclusion. + Changes one or more properties of an existing exclusion in the + \_Default sink. Returns: Callable[[~.UpdateExclusionRequest], @@ -786,7 +980,7 @@ def delete_exclusion(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. - Deletes an exclusion. + Deletes an exclusion in the \_Default sink. Returns: Callable[[~.DeleteExclusionRequest], @@ -812,13 +1006,14 @@ def get_cmek_settings(self) -> Callable[ Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the get cmek settings method over gRPC. - Gets the Logs Router CMEK settings for the given resource. + Gets the Logging CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -846,11 +1041,11 @@ def update_cmek_settings(self) -> Callable[ Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the update cmek settings method over gRPC. - Updates the Logs Router CMEK settings for the given resource. + Updates the Log Router CMEK settings for the given resource. - Note: CMEK for the Logs Router can currently only be configured - for GCP organizations. Once configured, it applies to all - projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] will fail if 1) ``kms_key_name`` is invalid, or 2) the @@ -858,7 +1053,7 @@ def update_cmek_settings(self) -> Callable[ ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key, or 3) access to the key is disabled. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -880,6 +1075,112 @@ def update_cmek_settings(self) -> Callable[ ) return self._stubs['update_cmek_settings'] + @property + def get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + Awaitable[logging_config.Settings]]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_settings' not in self._stubs: + self._stubs['get_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSettings', + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['get_settings'] + + @property + def update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + Awaitable[logging_config.Settings]]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_settings' not in self._stubs: + self._stubs['update_settings'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSettings', + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['update_settings'] + + @property + def copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'copy_log_entries' not in self._stubs: + self._stubs['copy_log_entries'] = self.grpc_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CopyLogEntries', + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['copy_log_entries'] + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py deleted file mode 100755 index 0a90ea99c0eb..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/rest.py +++ /dev/null @@ -1,3141 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.logging_v2.types import logging_config -from google.protobuf import empty_pb2 # type: ignore - -from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class ConfigServiceV2RestInterceptor: - """Interceptor for ConfigServiceV2. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the ConfigServiceV2RestTransport. - - .. code-block:: python - class MyCustomConfigServiceV2Interceptor(ConfigServiceV2RestInterceptor): - def pre_create_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_bucket(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_exclusion(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_exclusion(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_sink(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_sink(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_create_view(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_view(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_exclusion(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_sink(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_delete_view(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_bucket(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_cmek_settings(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_cmek_settings(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_exclusion(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_exclusion(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_sink(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_sink(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_get_view(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_view(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_buckets(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_buckets(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_exclusions(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_exclusions(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_sinks(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_sinks(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_views(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_views(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_undelete_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_update_bucket(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_bucket(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_cmek_settings(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_cmek_settings(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_exclusion(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_exclusion(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_sink(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_sink(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_view(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_view(self, response): - logging.log(f"Received response: {response}") - return response - - transport = ConfigServiceV2RestTransport(interceptor=MyCustomConfigServiceV2Interceptor()) - client = ConfigServiceV2Client(transport=transport) - - - """ - def pre_create_bucket(self, request: logging_config.CreateBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_create_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: - """Post-rpc interceptor for create_bucket - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_create_exclusion(self, request: logging_config.CreateExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateExclusionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_exclusion - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_create_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: - """Post-rpc interceptor for create_exclusion - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_create_sink(self, request: logging_config.CreateSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateSinkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_sink - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_create_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: - """Post-rpc interceptor for create_sink - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_create_view(self, request: logging_config.CreateViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.CreateViewRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_view - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_create_view(self, response: logging_config.LogView) -> logging_config.LogView: - """Post-rpc interceptor for create_view - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_delete_bucket(self, request: logging_config.DeleteBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_delete_exclusion(self, request: logging_config.DeleteExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteExclusionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_exclusion - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_delete_sink(self, request: logging_config.DeleteSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteSinkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_sink - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_delete_view(self, request: logging_config.DeleteViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.DeleteViewRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_view - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_get_bucket(self, request: logging_config.GetBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: - """Post-rpc interceptor for get_bucket - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_get_cmek_settings(self, request: logging_config.GetCmekSettingsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetCmekSettingsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_cmek_settings - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_cmek_settings(self, response: logging_config.CmekSettings) -> logging_config.CmekSettings: - """Post-rpc interceptor for get_cmek_settings - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_get_exclusion(self, request: logging_config.GetExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetExclusionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_exclusion - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: - """Post-rpc interceptor for get_exclusion - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_get_sink(self, request: logging_config.GetSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetSinkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_sink - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: - """Post-rpc interceptor for get_sink - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_get_view(self, request: logging_config.GetViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.GetViewRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_view - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_get_view(self, response: logging_config.LogView) -> logging_config.LogView: - """Post-rpc interceptor for get_view - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_buckets(self, request: logging_config.ListBucketsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListBucketsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_buckets - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_list_buckets(self, response: logging_config.ListBucketsResponse) -> logging_config.ListBucketsResponse: - """Post-rpc interceptor for list_buckets - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_exclusions(self, request: logging_config.ListExclusionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListExclusionsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_exclusions - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_list_exclusions(self, response: logging_config.ListExclusionsResponse) -> logging_config.ListExclusionsResponse: - """Post-rpc interceptor for list_exclusions - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_sinks(self, request: logging_config.ListSinksRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListSinksRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_sinks - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_list_sinks(self, response: logging_config.ListSinksResponse) -> logging_config.ListSinksResponse: - """Post-rpc interceptor for list_sinks - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_views(self, request: logging_config.ListViewsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.ListViewsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_views - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_list_views(self, response: logging_config.ListViewsResponse) -> logging_config.ListViewsResponse: - """Post-rpc interceptor for list_views - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_undelete_bucket(self, request: logging_config.UndeleteBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UndeleteBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for undelete_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def pre_update_bucket(self, request: logging_config.UpdateBucketRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateBucketRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_bucket - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_bucket(self, response: logging_config.LogBucket) -> logging_config.LogBucket: - """Post-rpc interceptor for update_bucket - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_cmek_settings(self, request: logging_config.UpdateCmekSettingsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateCmekSettingsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_cmek_settings - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_cmek_settings(self, response: logging_config.CmekSettings) -> logging_config.CmekSettings: - """Post-rpc interceptor for update_cmek_settings - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_exclusion(self, request: logging_config.UpdateExclusionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateExclusionRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_exclusion - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_exclusion(self, response: logging_config.LogExclusion) -> logging_config.LogExclusion: - """Post-rpc interceptor for update_exclusion - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_sink(self, request: logging_config.UpdateSinkRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateSinkRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_sink - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_sink(self, response: logging_config.LogSink) -> logging_config.LogSink: - """Post-rpc interceptor for update_sink - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_view(self, request: logging_config.UpdateViewRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_config.UpdateViewRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_view - - Override in a subclass to manipulate the request or metadata - before they are sent to the ConfigServiceV2 server. - """ - return request, metadata - - def post_update_view(self, response: logging_config.LogView) -> logging_config.LogView: - """Post-rpc interceptor for update_view - - Override in a subclass to manipulate the response - after it is returned by the ConfigServiceV2 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class ConfigServiceV2RestStub: - _session: AuthorizedSession - _host: str - _interceptor: ConfigServiceV2RestInterceptor - - -class ConfigServiceV2RestTransport(ConfigServiceV2Transport): - """REST backend transport for ConfigServiceV2. - - Service for configuring sinks used to route log entries. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - """ - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[ConfigServiceV2RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or ConfigServiceV2RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("CreateBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "bucketId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.CreateBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogBucket: - r"""Call the create bucket method over HTTP. - - Args: - request (~.logging_config.CreateBucketRequest): - The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogBucket: - Describes a repository of logs. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=*/*/locations/*}/buckets', - 'body': 'bucket', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*}/buckets', - 'body': 'bucket', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*}/buckets', - 'body': 'bucket', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=folders/*/locations/*}/buckets', - 'body': 'bucket', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=billingAccounts/*/locations/*}/buckets', - 'body': 'bucket', - }, - ] - request, metadata = self._interceptor.pre_create_bucket(request, metadata) - pb_request = logging_config.CreateBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogBucket() - pb_resp = logging_config.LogBucket.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_bucket(resp) - return resp - - class _CreateExclusion(ConfigServiceV2RestStub): - def __hash__(self): - return hash("CreateExclusion") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.CreateExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogExclusion: - r"""Call the create exclusion method over HTTP. - - Args: - request (~.logging_config.CreateExclusionRequest): - The request object. The parameters to ``CreateExclusion``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=*/*}/exclusions', - 'body': 'exclusion', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/exclusions', - 'body': 'exclusion', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/exclusions', - 'body': 'exclusion', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=folders/*}/exclusions', - 'body': 'exclusion', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=billingAccounts/*}/exclusions', - 'body': 'exclusion', - }, - ] - request, metadata = self._interceptor.pre_create_exclusion(request, metadata) - pb_request = logging_config.CreateExclusionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogExclusion() - pb_resp = logging_config.LogExclusion.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_exclusion(resp) - return resp - - class _CreateSink(ConfigServiceV2RestStub): - def __hash__(self): - return hash("CreateSink") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.CreateSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogSink: - r"""Call the create sink method over HTTP. - - Args: - request (~.logging_config.CreateSinkRequest): - The request object. The parameters to ``CreateSink``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=*/*}/sinks', - 'body': 'sink', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/sinks', - 'body': 'sink', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*}/sinks', - 'body': 'sink', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=folders/*}/sinks', - 'body': 'sink', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=billingAccounts/*}/sinks', - 'body': 'sink', - }, - ] - request, metadata = self._interceptor.pre_create_sink(request, metadata) - pb_request = logging_config.CreateSinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogSink() - pb_resp = logging_config.LogSink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_sink(resp) - return resp - - class _CreateView(ConfigServiceV2RestStub): - def __hash__(self): - return hash("CreateView") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "viewId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.CreateViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogView: - r"""Call the create view method over HTTP. - - Args: - request (~.logging_config.CreateViewRequest): - The request object. The parameters to ``CreateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogView: - Describes a view over logs in a - bucket. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=*/*/locations/*/buckets/*}/views', - 'body': 'view', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*/locations/*/buckets/*}/views', - 'body': 'view', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=organizations/*/locations/*/buckets/*}/views', - 'body': 'view', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=folders/*/locations/*/buckets/*}/views', - 'body': 'view', - }, -{ - 'method': 'post', - 'uri': '/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views', - 'body': 'view', - }, - ] - request, metadata = self._interceptor.pre_create_view(request, metadata) - pb_request = logging_config.CreateViewRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogView() - pb_resp = logging_config.LogView.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_view(resp) - return resp - - class _DeleteBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("DeleteBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.DeleteBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete bucket method over HTTP. - - Args: - request (~.logging_config.DeleteBucketRequest): - The request object. The parameters to ``DeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=*/*/locations/*/buckets/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_bucket(request, metadata) - pb_request = logging_config.DeleteBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteExclusion(ConfigServiceV2RestStub): - def __hash__(self): - return hash("DeleteExclusion") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.DeleteExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete exclusion method over HTTP. - - Args: - request (~.logging_config.DeleteExclusionRequest): - The request object. The parameters to ``DeleteExclusion``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=*/*/exclusions/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/exclusions/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/exclusions/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=folders/*/exclusions/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_exclusion(request, metadata) - pb_request = logging_config.DeleteExclusionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteSink(ConfigServiceV2RestStub): - def __hash__(self): - return hash("DeleteSink") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.DeleteSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete sink method over HTTP. - - Args: - request (~.logging_config.DeleteSinkRequest): - The request object. The parameters to ``DeleteSink``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{sink_name=*/*/sinks/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{sink_name=projects/*/sinks/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{sink_name=organizations/*/sinks/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{sink_name=folders/*/sinks/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_sink(request, metadata) - pb_request = logging_config.DeleteSinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _DeleteView(ConfigServiceV2RestStub): - def __hash__(self): - return hash("DeleteView") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.DeleteViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete view method over HTTP. - - Args: - request (~.logging_config.DeleteViewRequest): - The request object. The parameters to ``DeleteView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_view(request, metadata) - pb_request = logging_config.DeleteViewRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogBucket: - r"""Call the get bucket method over HTTP. - - Args: - request (~.logging_config.GetBucketRequest): - The request object. The parameters to ``GetBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogBucket: - Describes a repository of logs. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=*/*/locations/*/buckets/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=billingAccounts/*/buckets/*}', - }, - ] - request, metadata = self._interceptor.pre_get_bucket(request, metadata) - pb_request = logging_config.GetBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogBucket() - pb_resp = logging_config.LogBucket.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_bucket(resp) - return resp - - class _GetCmekSettings(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetCmekSettings") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetCmekSettingsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.CmekSettings: - r"""Call the get cmek settings method over HTTP. - - Args: - request (~.logging_config.GetCmekSettingsRequest): - The request object. The parameters to - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. - - See `Enabling CMEK for Logs - Router `__ - for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.CmekSettings: - Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, - organization, billing account, or flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=*/*}/cmekSettings', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*}/cmekSettings', - }, - ] - request, metadata = self._interceptor.pre_get_cmek_settings(request, metadata) - pb_request = logging_config.GetCmekSettingsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.CmekSettings() - pb_resp = logging_config.CmekSettings.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_cmek_settings(resp) - return resp - - class _GetExclusion(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetExclusion") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogExclusion: - r"""Call the get exclusion method over HTTP. - - Args: - request (~.logging_config.GetExclusionRequest): - The request object. The parameters to ``GetExclusion``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=*/*/exclusions/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/exclusions/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/exclusions/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=folders/*/exclusions/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', - }, - ] - request, metadata = self._interceptor.pre_get_exclusion(request, metadata) - pb_request = logging_config.GetExclusionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogExclusion() - pb_resp = logging_config.LogExclusion.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_exclusion(resp) - return resp - - class _GetSink(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetSink") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogSink: - r"""Call the get sink method over HTTP. - - Args: - request (~.logging_config.GetSinkRequest): - The request object. The parameters to ``GetSink``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{sink_name=*/*/sinks/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{sink_name=projects/*/sinks/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{sink_name=organizations/*/sinks/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{sink_name=folders/*/sinks/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', - }, - ] - request, metadata = self._interceptor.pre_get_sink(request, metadata) - pb_request = logging_config.GetSinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogSink() - pb_resp = logging_config.LogSink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_sink(resp) - return resp - - class _GetView(ConfigServiceV2RestStub): - def __hash__(self): - return hash("GetView") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.GetViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogView: - r"""Call the get view method over HTTP. - - Args: - request (~.logging_config.GetViewRequest): - The request object. The parameters to ``GetView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogView: - Describes a view over logs in a - bucket. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', - }, -{ - 'method': 'get', - 'uri': '/v2/{name=billingAccounts/*/buckets/*/views/*}', - }, - ] - request, metadata = self._interceptor.pre_get_view(request, metadata) - pb_request = logging_config.GetViewRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogView() - pb_resp = logging_config.LogView.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_view(resp) - return resp - - class _ListBuckets(ConfigServiceV2RestStub): - def __hash__(self): - return hash("ListBuckets") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.ListBucketsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.ListBucketsResponse: - r"""Call the list buckets method over HTTP. - - Args: - request (~.logging_config.ListBucketsRequest): - The request object. The parameters to ``ListBuckets``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.ListBucketsResponse: - The response from ListBuckets. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*/locations/*}/buckets', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*}/buckets', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*}/buckets', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*/locations/*}/buckets', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*/locations/*}/buckets', - }, - ] - request, metadata = self._interceptor.pre_list_buckets(request, metadata) - pb_request = logging_config.ListBucketsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.ListBucketsResponse() - pb_resp = logging_config.ListBucketsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_buckets(resp) - return resp - - class _ListExclusions(ConfigServiceV2RestStub): - def __hash__(self): - return hash("ListExclusions") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.ListExclusionsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.ListExclusionsResponse: - r"""Call the list exclusions method over HTTP. - - Args: - request (~.logging_config.ListExclusionsRequest): - The request object. The parameters to ``ListExclusions``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.ListExclusionsResponse: - Result returned from ``ListExclusions``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*}/exclusions', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/exclusions', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/exclusions', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*}/exclusions', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*}/exclusions', - }, - ] - request, metadata = self._interceptor.pre_list_exclusions(request, metadata) - pb_request = logging_config.ListExclusionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.ListExclusionsResponse() - pb_resp = logging_config.ListExclusionsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_exclusions(resp) - return resp - - class _ListSinks(ConfigServiceV2RestStub): - def __hash__(self): - return hash("ListSinks") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.ListSinksRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.ListSinksResponse: - r"""Call the list sinks method over HTTP. - - Args: - request (~.logging_config.ListSinksRequest): - The request object. The parameters to ``ListSinks``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.ListSinksResponse: - Result returned from ``ListSinks``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*}/sinks', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/sinks', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/sinks', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*}/sinks', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*}/sinks', - }, - ] - request, metadata = self._interceptor.pre_list_sinks(request, metadata) - pb_request = logging_config.ListSinksRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.ListSinksResponse() - pb_resp = logging_config.ListSinksResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_sinks(resp) - return resp - - class _ListViews(ConfigServiceV2RestStub): - def __hash__(self): - return hash("ListViews") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.ListViewsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.ListViewsResponse: - r"""Call the list views method over HTTP. - - Args: - request (~.logging_config.ListViewsRequest): - The request object. The parameters to ``ListViews``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.ListViewsResponse: - The response from ListViews. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*/locations/*/buckets/*}/views', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*/locations/*/buckets/*}/views', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*/locations/*/buckets/*}/views', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*/locations/*/buckets/*}/views', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*/locations/*/buckets/*}/views', - }, - ] - request, metadata = self._interceptor.pre_list_views(request, metadata) - pb_request = logging_config.ListViewsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.ListViewsResponse() - pb_resp = logging_config.ListViewsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_views(resp) - return resp - - class _UndeleteBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UndeleteBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UndeleteBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the undelete bucket method over HTTP. - - Args: - request (~.logging_config.UndeleteBucketRequest): - The request object. The parameters to ``UndeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{name=*/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}:undelete', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_undelete_bucket(request, metadata) - pb_request = logging_config.UndeleteBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _UpdateBucket(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateBucket") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateBucketRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogBucket: - r"""Call the update bucket method over HTTP. - - Args: - request (~.logging_config.UpdateBucketRequest): - The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogBucket: - Describes a repository of logs. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=*/*/locations/*/buckets/*}', - 'body': 'bucket', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*}', - 'body': 'bucket', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*}', - 'body': 'bucket', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*}', - 'body': 'bucket', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*}', - 'body': 'bucket', - }, - ] - request, metadata = self._interceptor.pre_update_bucket(request, metadata) - pb_request = logging_config.UpdateBucketRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogBucket() - pb_resp = logging_config.LogBucket.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_bucket(resp) - return resp - - class _UpdateCmekSettings(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateCmekSettings") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateCmekSettingsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.CmekSettings: - r"""Call the update cmek settings method over HTTP. - - Args: - request (~.logging_config.UpdateCmekSettingsRequest): - The request object. The parameters to - [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - - See `Enabling CMEK for Logs - Router `__ - for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.CmekSettings: - Describes the customer-managed encryption key (CMEK) - settings associated with a project, folder, - organization, billing account, or flexible resource. - - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP - organization. - - See `Enabling CMEK for Logs - Router `__ - for more information. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=*/*}/cmekSettings', - 'body': 'cmek_settings', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*}/cmekSettings', - 'body': 'cmek_settings', - }, - ] - request, metadata = self._interceptor.pre_update_cmek_settings(request, metadata) - pb_request = logging_config.UpdateCmekSettingsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.CmekSettings() - pb_resp = logging_config.CmekSettings.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_cmek_settings(resp) - return resp - - class _UpdateExclusion(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateExclusion") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateExclusionRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogExclusion: - r"""Call the update exclusion method over HTTP. - - Args: - request (~.logging_config.UpdateExclusionRequest): - The request object. The parameters to ``UpdateExclusion``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogExclusion: - Specifies a set of log entries that - are not to be stored in Logging. If your - GCP resource receives a large volume of - logs, you can use exclusions to reduce - your chargeable logs. Exclusions are - processed after log sinks, so you can - export log entries before they are - excluded. Note that organization-level - and folder-level exclusions don't apply - to child resources, and that you can't - exclude audit log entries. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=*/*/exclusions/*}', - 'body': 'exclusion', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/exclusions/*}', - 'body': 'exclusion', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/exclusions/*}', - 'body': 'exclusion', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=folders/*/exclusions/*}', - 'body': 'exclusion', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=billingAccounts/*/exclusions/*}', - 'body': 'exclusion', - }, - ] - request, metadata = self._interceptor.pre_update_exclusion(request, metadata) - pb_request = logging_config.UpdateExclusionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogExclusion() - pb_resp = logging_config.LogExclusion.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_exclusion(resp) - return resp - - class _UpdateSink(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateSink") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateSinkRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogSink: - r"""Call the update sink method over HTTP. - - Args: - request (~.logging_config.UpdateSinkRequest): - The request object. The parameters to ``UpdateSink``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogSink: - Describes a sink used to export log - entries to one of the following - destinations in any project: a Cloud - Storage bucket, a BigQuery dataset, or a - Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. - The sink must be created within a - project, organization, billing account, - or folder. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'put', - 'uri': '/v2/{sink_name=*/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'put', - 'uri': '/v2/{sink_name=projects/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'put', - 'uri': '/v2/{sink_name=organizations/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'put', - 'uri': '/v2/{sink_name=folders/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'put', - 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'patch', - 'uri': '/v2/{sink_name=projects/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'patch', - 'uri': '/v2/{sink_name=organizations/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'patch', - 'uri': '/v2/{sink_name=folders/*/sinks/*}', - 'body': 'sink', - }, -{ - 'method': 'patch', - 'uri': '/v2/{sink_name=billingAccounts/*/sinks/*}', - 'body': 'sink', - }, - ] - request, metadata = self._interceptor.pre_update_sink(request, metadata) - pb_request = logging_config.UpdateSinkRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogSink() - pb_resp = logging_config.LogSink.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_sink(resp) - return resp - - class _UpdateView(ConfigServiceV2RestStub): - def __hash__(self): - return hash("UpdateView") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_config.UpdateViewRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_config.LogView: - r"""Call the update view method over HTTP. - - Args: - request (~.logging_config.UpdateViewRequest): - The request object. The parameters to ``UpdateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_config.LogView: - Describes a view over logs in a - bucket. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v2/{name=*/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=projects/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=organizations/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=folders/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, -{ - 'method': 'patch', - 'uri': '/v2/{name=billingAccounts/*/locations/*/buckets/*/views/*}', - 'body': 'view', - }, - ] - request, metadata = self._interceptor.pre_update_view(request, metadata) - pb_request = logging_config.UpdateViewRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_config.LogView() - pb_resp = logging_config.LogView.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_view(resp) - return resp - - @property - def create_bucket(self) -> Callable[ - [logging_config.CreateBucketRequest], - logging_config.LogBucket]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_exclusion(self) -> Callable[ - [logging_config.CreateExclusionRequest], - logging_config.LogExclusion]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateExclusion(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_sink(self) -> Callable[ - [logging_config.CreateSinkRequest], - logging_config.LogSink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateSink(self._session, self._host, self._interceptor) # type: ignore - - @property - def create_view(self) -> Callable[ - [logging_config.CreateViewRequest], - logging_config.LogView]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateView(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_bucket(self) -> Callable[ - [logging_config.DeleteBucketRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_exclusion(self) -> Callable[ - [logging_config.DeleteExclusionRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteExclusion(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_sink(self) -> Callable[ - [logging_config.DeleteSinkRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteSink(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_view(self) -> Callable[ - [logging_config.DeleteViewRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteView(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_bucket(self) -> Callable[ - [logging_config.GetBucketRequest], - logging_config.LogBucket]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_cmek_settings(self) -> Callable[ - [logging_config.GetCmekSettingsRequest], - logging_config.CmekSettings]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetCmekSettings(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_exclusion(self) -> Callable[ - [logging_config.GetExclusionRequest], - logging_config.LogExclusion]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetExclusion(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_sink(self) -> Callable[ - [logging_config.GetSinkRequest], - logging_config.LogSink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetSink(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_view(self) -> Callable[ - [logging_config.GetViewRequest], - logging_config.LogView]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetView(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_buckets(self) -> Callable[ - [logging_config.ListBucketsRequest], - logging_config.ListBucketsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListBuckets(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_exclusions(self) -> Callable[ - [logging_config.ListExclusionsRequest], - logging_config.ListExclusionsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListExclusions(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_sinks(self) -> Callable[ - [logging_config.ListSinksRequest], - logging_config.ListSinksResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListSinks(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_views(self) -> Callable[ - [logging_config.ListViewsRequest], - logging_config.ListViewsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListViews(self._session, self._host, self._interceptor) # type: ignore - - @property - def undelete_bucket(self) -> Callable[ - [logging_config.UndeleteBucketRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UndeleteBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_bucket(self) -> Callable[ - [logging_config.UpdateBucketRequest], - logging_config.LogBucket]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateBucket(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_cmek_settings(self) -> Callable[ - [logging_config.UpdateCmekSettingsRequest], - logging_config.CmekSettings]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateCmekSettings(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_exclusion(self) -> Callable[ - [logging_config.UpdateExclusionRequest], - logging_config.LogExclusion]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateExclusion(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_sink(self) -> Callable[ - [logging_config.UpdateSinkRequest], - logging_config.LogSink]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateSink(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_view(self) -> Callable[ - [logging_config.UpdateViewRequest], - logging_config.LogView]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateView(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'ConfigServiceV2RestTransport', -) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index efc2a54185b4..8ccc6353ccca 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -194,11 +194,11 @@ async def delete_log(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. .. code-block:: python @@ -229,16 +229,15 @@ async def sample_delete_log(): log_name (:class:`str`): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. @@ -354,19 +353,17 @@ async def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or @@ -422,17 +419,17 @@ async def sample_write_log_entries(): Log entries with timestamps that are more than the `logs retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this @@ -545,35 +542,32 @@ async def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added - to this list. + to this list. A maximum of 100 resources may be + specified in a single request. This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. filter (:class:`str`): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -797,14 +791,12 @@ async def sample_list_logs(): request (Optional[Union[google.cloud.logging_v2.types.ListLogsRequest, dict]]): The request object. The parameters to ListLogs. parent (:class:`str`): - Required. The resource name that owns the logs: - - :: + Required. The resource name to list logs for: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index c9ad88c97eeb..2aa1ff6e5804 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -42,7 +42,6 @@ from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport -from .transports.rest import LoggingServiceV2RestTransport class LoggingServiceV2ClientMeta(type): @@ -55,7 +54,6 @@ class LoggingServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry["grpc"] = LoggingServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport - _transport_registry["rest"] = LoggingServiceV2RestTransport def get_transport_class(cls, label: Optional[str] = None, @@ -306,9 +304,6 @@ def __init__(self, *, transport (Union[str, LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - NOTE: "rest" transport functionality is currently in a - beta state (preview). We welcome your feedback via an - issue in this library's source repository. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the @@ -388,11 +383,11 @@ def delete_log(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: - r"""Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. .. code-block:: python @@ -423,16 +418,15 @@ def sample_delete_log(): log_name (str): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. @@ -540,19 +534,17 @@ def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or @@ -608,17 +600,17 @@ def sample_write_log_entries(): Log entries with timestamps that are more than the `logs retention - period `__ - in the past or more than 24 hours in the future will not - be available when calling ``entries.list``. However, - those log entries can still be `exported with + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ - for calls to ``entries.write``, you should try to - include several log entries in this list, rather than - calling this method for each individual log entry. + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this @@ -722,35 +714,32 @@ def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added - to this list. + to this list. A maximum of 100 resources may be + specified in a single request. This corresponds to the ``resource_names`` field on the ``request`` instance; if ``request`` is provided, this should not be set. filter (str): - Optional. A filter that chooses which log entries to - return. See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources - listed in ``resource_names``. Referencing a parent - resource that is not listed in ``resource_names`` will - cause the filter to return no results. The maximum - length of the filter is 20000 characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this @@ -959,14 +948,12 @@ def sample_list_logs(): request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): The request object. The parameters to ListLogs. parent (str): - Required. The resource name that owns the logs: - - :: + Required. The resource name to list logs for: - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index c1d66e378abf..e1fb42a46005 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -19,20 +19,15 @@ from .base import LoggingServiceV2Transport from .grpc import LoggingServiceV2GrpcTransport from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport -from .rest import LoggingServiceV2RestTransport -from .rest import LoggingServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] _transport_registry['grpc'] = LoggingServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport -_transport_registry['rest'] = LoggingServiceV2RestTransport __all__ = ( 'LoggingServiceV2Transport', 'LoggingServiceV2GrpcTransport', 'LoggingServiceV2GrpcAsyncIOTransport', - 'LoggingServiceV2RestTransport', - 'LoggingServiceV2RestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index fdaa104c76d0..5559071ddcef 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -232,11 +232,11 @@ def delete_log(self) -> Callable[ empty_pb2.Empty]: r"""Return a callable for the delete log method over gRPC. - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. Returns: Callable[[~.DeleteLogRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 0f1aaa222348..f60b01417d6f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -235,11 +235,11 @@ def delete_log(self) -> Callable[ Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log method over gRPC. - Deletes all the log entries in a log. The log - reappears if it receives new entries. Log entries - written shortly before the delete operation might not be - deleted. Entries received after the delete operation - with a timestamp before the operation will be deleted. + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. Returns: Callable[[~.DeleteLogRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py deleted file mode 100755 index b77514b574cf..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/rest.py +++ /dev/null @@ -1,769 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.logging_v2.types import logging -from google.protobuf import empty_pb2 # type: ignore - -from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class LoggingServiceV2RestInterceptor: - """Interceptor for LoggingServiceV2. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the LoggingServiceV2RestTransport. - - .. code-block:: python - class MyCustomLoggingServiceV2Interceptor(LoggingServiceV2RestInterceptor): - def pre_delete_log(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_list_log_entries(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_log_entries(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_logs(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_logs(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_monitored_resource_descriptors(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_monitored_resource_descriptors(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_write_log_entries(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_write_log_entries(self, response): - logging.log(f"Received response: {response}") - return response - - transport = LoggingServiceV2RestTransport(interceptor=MyCustomLoggingServiceV2Interceptor()) - client = LoggingServiceV2Client(transport=transport) - - - """ - def pre_delete_log(self, request: logging.DeleteLogRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.DeleteLogRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_log - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def pre_list_log_entries(self, request: logging.ListLogEntriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListLogEntriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_log_entries - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def post_list_log_entries(self, response: logging.ListLogEntriesResponse) -> logging.ListLogEntriesResponse: - """Post-rpc interceptor for list_log_entries - - Override in a subclass to manipulate the response - after it is returned by the LoggingServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_logs(self, request: logging.ListLogsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListLogsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_logs - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def post_list_logs(self, response: logging.ListLogsResponse) -> logging.ListLogsResponse: - """Post-rpc interceptor for list_logs - - Override in a subclass to manipulate the response - after it is returned by the LoggingServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_monitored_resource_descriptors(self, request: logging.ListMonitoredResourceDescriptorsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.ListMonitoredResourceDescriptorsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_monitored_resource_descriptors - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def post_list_monitored_resource_descriptors(self, response: logging.ListMonitoredResourceDescriptorsResponse) -> logging.ListMonitoredResourceDescriptorsResponse: - """Post-rpc interceptor for list_monitored_resource_descriptors - - Override in a subclass to manipulate the response - after it is returned by the LoggingServiceV2 server but before - it is returned to user code. - """ - return response - def pre_write_log_entries(self, request: logging.WriteLogEntriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging.WriteLogEntriesRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for write_log_entries - - Override in a subclass to manipulate the request or metadata - before they are sent to the LoggingServiceV2 server. - """ - return request, metadata - - def post_write_log_entries(self, response: logging.WriteLogEntriesResponse) -> logging.WriteLogEntriesResponse: - """Post-rpc interceptor for write_log_entries - - Override in a subclass to manipulate the response - after it is returned by the LoggingServiceV2 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class LoggingServiceV2RestStub: - _session: AuthorizedSession - _host: str - _interceptor: LoggingServiceV2RestInterceptor - - -class LoggingServiceV2RestTransport(LoggingServiceV2Transport): - """REST backend transport for LoggingServiceV2. - - Service for ingesting and querying logs. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - """ - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[LoggingServiceV2RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or LoggingServiceV2RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _DeleteLog(LoggingServiceV2RestStub): - def __hash__(self): - return hash("DeleteLog") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging.DeleteLogRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete log method over HTTP. - - Args: - request (~.logging.DeleteLogRequest): - The request object. The parameters to DeleteLog. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{log_name=projects/*/logs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{log_name=*/*/logs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{log_name=organizations/*/logs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{log_name=folders/*/logs/*}', - }, -{ - 'method': 'delete', - 'uri': '/v2/{log_name=billingAccounts/*/logs/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_log(request, metadata) - pb_request = logging.DeleteLogRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _ListLogEntries(LoggingServiceV2RestStub): - def __hash__(self): - return hash("ListLogEntries") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging.ListLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging.ListLogEntriesResponse: - r"""Call the list log entries method over HTTP. - - Args: - request (~.logging.ListLogEntriesRequest): - The request object. The parameters to ``ListLogEntries``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging.ListLogEntriesResponse: - Result returned from ``ListLogEntries``. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/entries:list', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_list_log_entries(request, metadata) - pb_request = logging.ListLogEntriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging.ListLogEntriesResponse() - pb_resp = logging.ListLogEntriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_log_entries(resp) - return resp - - class _ListLogs(LoggingServiceV2RestStub): - def __hash__(self): - return hash("ListLogs") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging.ListLogsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging.ListLogsResponse: - r"""Call the list logs method over HTTP. - - Args: - request (~.logging.ListLogsRequest): - The request object. The parameters to ListLogs. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging.ListLogsResponse: - Result returned from ListLogs. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=*/*}/logs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/logs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=organizations/*}/logs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=folders/*}/logs', - }, -{ - 'method': 'get', - 'uri': '/v2/{parent=billingAccounts/*}/logs', - }, - ] - request, metadata = self._interceptor.pre_list_logs(request, metadata) - pb_request = logging.ListLogsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging.ListLogsResponse() - pb_resp = logging.ListLogsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_logs(resp) - return resp - - class _ListMonitoredResourceDescriptors(LoggingServiceV2RestStub): - def __hash__(self): - return hash("ListMonitoredResourceDescriptors") - - def __call__(self, - request: logging.ListMonitoredResourceDescriptorsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging.ListMonitoredResourceDescriptorsResponse: - r"""Call the list monitored resource - descriptors method over HTTP. - - Args: - request (~.logging.ListMonitoredResourceDescriptorsRequest): - The request object. The parameters to - ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging.ListMonitoredResourceDescriptorsResponse: - Result returned from - ListMonitoredResourceDescriptors. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/monitoredResourceDescriptors', - }, - ] - request, metadata = self._interceptor.pre_list_monitored_resource_descriptors(request, metadata) - pb_request = logging.ListMonitoredResourceDescriptorsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging.ListMonitoredResourceDescriptorsResponse() - pb_resp = logging.ListMonitoredResourceDescriptorsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_monitored_resource_descriptors(resp) - return resp - - class _TailLogEntries(LoggingServiceV2RestStub): - def __hash__(self): - return hash("TailLogEntries") - - def __call__(self, - request: logging.TailLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> rest_streaming.ResponseIterator: - raise NotImplementedError( - "Method TailLogEntries is not available over REST transport" - ) - class _WriteLogEntries(LoggingServiceV2RestStub): - def __hash__(self): - return hash("WriteLogEntries") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging.WriteLogEntriesRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging.WriteLogEntriesResponse: - r"""Call the write log entries method over HTTP. - - Args: - request (~.logging.WriteLogEntriesRequest): - The request object. The parameters to WriteLogEntries. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging.WriteLogEntriesResponse: - Result returned from WriteLogEntries. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/entries:write', - 'body': '*', - }, - ] - request, metadata = self._interceptor.pre_write_log_entries(request, metadata) - pb_request = logging.WriteLogEntriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging.WriteLogEntriesResponse() - pb_resp = logging.WriteLogEntriesResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_write_log_entries(resp) - return resp - - @property - def delete_log(self) -> Callable[ - [logging.DeleteLogRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteLog(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_log_entries(self) -> Callable[ - [logging.ListLogEntriesRequest], - logging.ListLogEntriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLogEntries(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_logs(self) -> Callable[ - [logging.ListLogsRequest], - logging.ListLogsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLogs(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_monitored_resource_descriptors(self) -> Callable[ - [logging.ListMonitoredResourceDescriptorsRequest], - logging.ListMonitoredResourceDescriptorsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListMonitoredResourceDescriptors(self._session, self._host, self._interceptor) # type: ignore - - @property - def tail_log_entries(self) -> Callable[ - [logging.TailLogEntriesRequest], - logging.TailLogEntriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._TailLogEntries(self._session, self._host, self._interceptor) # type: ignore - - @property - def write_log_entries(self) -> Callable[ - [logging.WriteLogEntriesRequest], - logging.WriteLogEntriesResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._WriteLogEntries(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'LoggingServiceV2RestTransport', -) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 7d74c4f61307..b82ec16c3658 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -43,7 +43,6 @@ from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport -from .transports.rest import MetricsServiceV2RestTransport class MetricsServiceV2ClientMeta(type): @@ -56,7 +55,6 @@ class MetricsServiceV2ClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry["grpc"] = MetricsServiceV2GrpcTransport _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport - _transport_registry["rest"] = MetricsServiceV2RestTransport def get_transport_class(cls, label: Optional[str] = None, @@ -307,9 +305,6 @@ def __init__(self, *, transport (Union[str, MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - NOTE: "rest" transport functionality is currently in a - beta state (preview). We welcome your feedback via an - issue in this library's source repository. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 7f421d905965..07d010436ac9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -19,20 +19,15 @@ from .base import MetricsServiceV2Transport from .grpc import MetricsServiceV2GrpcTransport from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport -from .rest import MetricsServiceV2RestTransport -from .rest import MetricsServiceV2RestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] _transport_registry['grpc'] = MetricsServiceV2GrpcTransport _transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport -_transport_registry['rest'] = MetricsServiceV2RestTransport __all__ = ( 'MetricsServiceV2Transport', 'MetricsServiceV2GrpcTransport', 'MetricsServiceV2GrpcAsyncIOTransport', - 'MetricsServiceV2RestTransport', - 'MetricsServiceV2RestInterceptor', ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py deleted file mode 100755 index 9a834ebfadfe..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/rest.py +++ /dev/null @@ -1,756 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from google.cloud.logging_v2.types import logging_metrics -from google.protobuf import empty_pb2 # type: ignore - -from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class MetricsServiceV2RestInterceptor: - """Interceptor for MetricsServiceV2. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the MetricsServiceV2RestTransport. - - .. code-block:: python - class MyCustomMetricsServiceV2Interceptor(MetricsServiceV2RestInterceptor): - def pre_create_log_metric(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_create_log_metric(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_delete_log_metric(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def pre_get_log_metric(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_get_log_metric(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_list_log_metrics(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_list_log_metrics(self, response): - logging.log(f"Received response: {response}") - return response - - def pre_update_log_metric(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - def post_update_log_metric(self, response): - logging.log(f"Received response: {response}") - return response - - transport = MetricsServiceV2RestTransport(interceptor=MyCustomMetricsServiceV2Interceptor()) - client = MetricsServiceV2Client(transport=transport) - - - """ - def pre_create_log_metric(self, request: logging_metrics.CreateLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.CreateLogMetricRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for create_log_metric - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def post_create_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: - """Post-rpc interceptor for create_log_metric - - Override in a subclass to manipulate the response - after it is returned by the MetricsServiceV2 server but before - it is returned to user code. - """ - return response - def pre_delete_log_metric(self, request: logging_metrics.DeleteLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.DeleteLogMetricRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for delete_log_metric - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def pre_get_log_metric(self, request: logging_metrics.GetLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.GetLogMetricRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for get_log_metric - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def post_get_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: - """Post-rpc interceptor for get_log_metric - - Override in a subclass to manipulate the response - after it is returned by the MetricsServiceV2 server but before - it is returned to user code. - """ - return response - def pre_list_log_metrics(self, request: logging_metrics.ListLogMetricsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.ListLogMetricsRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for list_log_metrics - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def post_list_log_metrics(self, response: logging_metrics.ListLogMetricsResponse) -> logging_metrics.ListLogMetricsResponse: - """Post-rpc interceptor for list_log_metrics - - Override in a subclass to manipulate the response - after it is returned by the MetricsServiceV2 server but before - it is returned to user code. - """ - return response - def pre_update_log_metric(self, request: logging_metrics.UpdateLogMetricRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[logging_metrics.UpdateLogMetricRequest, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for update_log_metric - - Override in a subclass to manipulate the request or metadata - before they are sent to the MetricsServiceV2 server. - """ - return request, metadata - - def post_update_log_metric(self, response: logging_metrics.LogMetric) -> logging_metrics.LogMetric: - """Post-rpc interceptor for update_log_metric - - Override in a subclass to manipulate the response - after it is returned by the MetricsServiceV2 server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class MetricsServiceV2RestStub: - _session: AuthorizedSession - _host: str - _interceptor: MetricsServiceV2RestInterceptor - - -class MetricsServiceV2RestTransport(MetricsServiceV2Transport): - """REST backend transport for MetricsServiceV2. - - Service for configuring logs-based metrics. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - """ - - def __init__(self, *, - host: str = 'logging.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[ - ], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = 'https', - interceptor: Optional[MetricsServiceV2RestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via a GitHub issue in - this library's repository. Thank you! - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or MetricsServiceV2RestInterceptor() - self._prep_wrapped_messages(client_info) - - class _CreateLogMetric(MetricsServiceV2RestStub): - def __hash__(self): - return hash("CreateLogMetric") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.CreateLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_metrics.LogMetric: - r"""Call the create log metric method over HTTP. - - Args: - request (~.logging_metrics.CreateLogMetricRequest): - The request object. The parameters to CreateLogMetric. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_metrics.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v2/{parent=projects/*}/metrics', - 'body': 'metric', - }, - ] - request, metadata = self._interceptor.pre_create_log_metric(request, metadata) - pb_request = logging_metrics.CreateLogMetricRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_metrics.LogMetric() - pb_resp = logging_metrics.LogMetric.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_log_metric(resp) - return resp - - class _DeleteLogMetric(MetricsServiceV2RestStub): - def __hash__(self): - return hash("DeleteLogMetric") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.DeleteLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ): - r"""Call the delete log metric method over HTTP. - - Args: - request (~.logging_metrics.DeleteLogMetricRequest): - The request object. The parameters to DeleteLogMetric. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v2/{metric_name=projects/*/metrics/*}', - }, - ] - request, metadata = self._interceptor.pre_delete_log_metric(request, metadata) - pb_request = logging_metrics.DeleteLogMetricRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - class _GetLogMetric(MetricsServiceV2RestStub): - def __hash__(self): - return hash("GetLogMetric") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.GetLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_metrics.LogMetric: - r"""Call the get log metric method over HTTP. - - Args: - request (~.logging_metrics.GetLogMetricRequest): - The request object. The parameters to GetLogMetric. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_metrics.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{metric_name=projects/*/metrics/*}', - }, - ] - request, metadata = self._interceptor.pre_get_log_metric(request, metadata) - pb_request = logging_metrics.GetLogMetricRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_metrics.LogMetric() - pb_resp = logging_metrics.LogMetric.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_log_metric(resp) - return resp - - class _ListLogMetrics(MetricsServiceV2RestStub): - def __hash__(self): - return hash("ListLogMetrics") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.ListLogMetricsRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_metrics.ListLogMetricsResponse: - r"""Call the list log metrics method over HTTP. - - Args: - request (~.logging_metrics.ListLogMetricsRequest): - The request object. The parameters to ListLogMetrics. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_metrics.ListLogMetricsResponse: - Result returned from ListLogMetrics. - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v2/{parent=projects/*}/metrics', - }, - ] - request, metadata = self._interceptor.pre_list_log_metrics(request, metadata) - pb_request = logging_metrics.ListLogMetricsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_metrics.ListLogMetricsResponse() - pb_resp = logging_metrics.ListLogMetricsResponse.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_list_log_metrics(resp) - return resp - - class _UpdateLogMetric(MetricsServiceV2RestStub): - def __hash__(self): - return hash("UpdateLogMetric") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - - def __call__(self, - request: logging_metrics.UpdateLogMetricRequest, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> logging_metrics.LogMetric: - r"""Call the update log metric method over HTTP. - - Args: - request (~.logging_metrics.UpdateLogMetricRequest): - The request object. The parameters to UpdateLogMetric. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - ~.logging_metrics.LogMetric: - Describes a logs-based metric. The - value of the metric is the number of log - entries that match a logs filter in a - given time interval. - - Logs-based metrics can also be used to - extract values from logs and create a - distribution of the values. The - distribution records the statistics of - the extracted values along with an - optional histogram of the values as - specified by the bucket options. - - """ - - http_options: List[Dict[str, str]] = [{ - 'method': 'put', - 'uri': '/v2/{metric_name=projects/*/metrics/*}', - 'body': 'metric', - }, - ] - request, metadata = self._interceptor.pre_update_log_metric(request, metadata) - pb_request = logging_metrics.UpdateLogMetricRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - including_default_value_fields=False, - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - including_default_value_fields=False, - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - # Return the response - resp = logging_metrics.LogMetric() - pb_resp = logging_metrics.LogMetric.pb(resp) - - json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_update_log_metric(resp) - return resp - - @property - def create_log_metric(self) -> Callable[ - [logging_metrics.CreateLogMetricRequest], - logging_metrics.LogMetric]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._CreateLogMetric(self._session, self._host, self._interceptor) # type: ignore - - @property - def delete_log_metric(self) -> Callable[ - [logging_metrics.DeleteLogMetricRequest], - empty_pb2.Empty]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._DeleteLogMetric(self._session, self._host, self._interceptor) # type: ignore - - @property - def get_log_metric(self) -> Callable[ - [logging_metrics.GetLogMetricRequest], - logging_metrics.LogMetric]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._GetLogMetric(self._session, self._host, self._interceptor) # type: ignore - - @property - def list_log_metrics(self) -> Callable[ - [logging_metrics.ListLogMetricsRequest], - logging_metrics.ListLogMetricsResponse]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._ListLogMetrics(self._session, self._host, self._interceptor) # type: ignore - - @property - def update_log_metric(self) -> Callable[ - [logging_metrics.UpdateLogMetricRequest], - logging_metrics.LogMetric]: - # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. - # In C++ this would require a dynamic_cast - return self._UpdateLogMetric(self._session, self._host, self._interceptor) # type: ignore - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__=( - 'MetricsServiceV2RestTransport', -) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py index 29636d30ebdc..64298ee56e04 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -17,6 +17,7 @@ LogEntry, LogEntryOperation, LogEntrySourceLocation, + LogSplit, ) from .logging import ( DeleteLogRequest, @@ -33,40 +34,59 @@ WriteLogEntriesResponse, ) from .logging_config import ( + BigQueryDataset, BigQueryOptions, + BucketMetadata, CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, CreateBucketRequest, CreateExclusionRequest, + CreateLinkRequest, CreateSinkRequest, CreateViewRequest, DeleteBucketRequest, DeleteExclusionRequest, + DeleteLinkRequest, DeleteSinkRequest, DeleteViewRequest, GetBucketRequest, GetCmekSettingsRequest, GetExclusionRequest, + GetLinkRequest, + GetSettingsRequest, GetSinkRequest, GetViewRequest, + IndexConfig, + Link, + LinkMetadata, ListBucketsRequest, ListBucketsResponse, ListExclusionsRequest, ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, ListSinksRequest, ListSinksResponse, ListViewsRequest, ListViewsResponse, + LocationMetadata, LogBucket, LogExclusion, LogSink, LogView, + Settings, UndeleteBucketRequest, UpdateBucketRequest, UpdateCmekSettingsRequest, UpdateExclusionRequest, + UpdateSettingsRequest, UpdateSinkRequest, UpdateViewRequest, + IndexType, LifecycleState, + OperationState, ) from .logging_metrics import ( CreateLogMetricRequest, @@ -82,6 +102,7 @@ 'LogEntry', 'LogEntryOperation', 'LogEntrySourceLocation', + 'LogSplit', 'DeleteLogRequest', 'ListLogEntriesRequest', 'ListLogEntriesResponse', @@ -94,40 +115,59 @@ 'WriteLogEntriesPartialErrors', 'WriteLogEntriesRequest', 'WriteLogEntriesResponse', + 'BigQueryDataset', 'BigQueryOptions', + 'BucketMetadata', 'CmekSettings', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesRequest', + 'CopyLogEntriesResponse', 'CreateBucketRequest', 'CreateExclusionRequest', + 'CreateLinkRequest', 'CreateSinkRequest', 'CreateViewRequest', 'DeleteBucketRequest', 'DeleteExclusionRequest', + 'DeleteLinkRequest', 'DeleteSinkRequest', 'DeleteViewRequest', 'GetBucketRequest', 'GetCmekSettingsRequest', 'GetExclusionRequest', + 'GetLinkRequest', + 'GetSettingsRequest', 'GetSinkRequest', 'GetViewRequest', + 'IndexConfig', + 'Link', + 'LinkMetadata', 'ListBucketsRequest', 'ListBucketsResponse', 'ListExclusionsRequest', 'ListExclusionsResponse', + 'ListLinksRequest', + 'ListLinksResponse', 'ListSinksRequest', 'ListSinksResponse', 'ListViewsRequest', 'ListViewsResponse', + 'LocationMetadata', 'LogBucket', 'LogExclusion', 'LogSink', 'LogView', + 'Settings', 'UndeleteBucketRequest', 'UpdateBucketRequest', 'UpdateCmekSettingsRequest', 'UpdateExclusionRequest', + 'UpdateSettingsRequest', 'UpdateSinkRequest', 'UpdateViewRequest', + 'IndexType', 'LifecycleState', + 'OperationState', 'CreateLogMetricRequest', 'DeleteLogMetricRequest', 'GetLogMetricRequest', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index 21f6b0cd67d3..f6063ad00adb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -33,6 +33,7 @@ 'LogEntry', 'LogEntryOperation', 'LogEntrySourceLocation', + 'LogSplit', }, ) @@ -67,6 +68,7 @@ class LogEntry(proto.Message): ``[LOG_ID]`` must be URL-encoded within ``log_name``. Example: ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``[LOG_ID]`` must be less than 512 characters long and can only include the following characters: upper and lower case alphanumeric characters, forward-slash, underscore, hyphen, @@ -74,7 +76,7 @@ class LogEntry(proto.Message): For backward compatibility, if ``log_name`` begins with a forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. + is ingested as usual, but the forward-slash is removed. Listing the log entry will not show the leading slash and filtering for a log name with a leading slash will never return any results. @@ -148,25 +150,74 @@ class LogEntry(proto.Message): Optional. Information about the HTTP request associated with this log entry, if applicable. labels (MutableMapping[str, str]): - Optional. A set of user-defined (key, value) - data that provides additional information about - the log entry. + Optional. A map of key, value pairs that provides additional + information about the log entry. The labels can be + user-defined or system-defined. + + User-defined labels are arbitrary key, value pairs that you + can use to classify logs. + + System-defined labels are defined by GCP services for + platform logs. They have two components - a service + namespace component and the attribute name. For example: + ``compute.googleapis.com/resource_name``. + + Cloud Logging truncates label keys that exceed 512 B and + label values that exceed 64 KB upon their associated log + entry being written. The truncation is indicated by an + ellipsis at the end of the character string. operation (google.cloud.logging_v2.types.LogEntryOperation): Optional. Information about an operation associated with the log entry, if applicable. trace (str): - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: - ``projects/my-projectid/traces/06796866738c859f2f19b7cfb3214824`` + Optional. The REST resource name of the trace being written + to `Cloud Trace `__ in + association with this log entry. For example, if your trace + data is stored in the Cloud project "my-trace-project" and + if the service that is creating the log entry receives a + trace header that includes the trace ID "12345", then the + service should use + "projects/my-tracing-project/traces/12345". + + The ``trace`` field provides the link between logs and + traces. By using this field, you can navigate from a log + entry to a trace. span_id (str): - Optional. The span ID within the trace associated with the - log entry. - - For Trace spans, this is the same format that the Trace API - v2 uses: a 16-character hexadecimal encoding of an 8-byte - array, such as ``000000000000004a``. + Optional. The ID of the `Cloud + Trace `__ span associated + with the current operation in which the log is being + written. For example, if a span has the REST resource name + of + "projects/some-project/traces/some-trace/spans/some-span-id", + then the ``span_id`` field is "some-span-id". + + A + `Span `__ + represents a single operation within a trace. Whereas a + trace may involve multiple different microservices running + on multiple different machines, a span generally corresponds + to a single logical operation being performed in a single + instance of a microservice on one specific machine. Spans + are the nodes within the tree that is a trace. + + Applications that are `instrumented for + tracing `__ will + generally assign a new, unique span ID on each incoming + request. It is also common to create and record additional + spans corresponding to internal processing elements as well + as issuing requests to dependencies. + + The span ID is expected to be a 16-character, hexadecimal + encoding of an 8-byte array and should not be zero. It + should be unique within the trace and should, ideally, be + generated in a manner that is uniformly random. + + Example values: + + - ``000000000000004a`` + - ``7a2190356c3fc94b`` + - ``0000f00300090021`` + - ``d39223e101960076`` trace_sampled (bool): Optional. The sampling decision of the trace associated with the log entry. @@ -180,6 +231,10 @@ class LogEntry(proto.Message): source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): Optional. Source code location information associated with the log entry, if any. + split (google.cloud.logging_v2.types.LogSplit): + Optional. Information indicating this + LogEntry is part of a sequence of multiple log + entries split from a single LogEntry. """ log_name: str = proto.Field( @@ -259,6 +314,11 @@ class LogEntry(proto.Message): number=23, message='LogEntrySourceLocation', ) + split: 'LogSplit' = proto.Field( + proto.MESSAGE, + number=35, + message='LogSplit', + ) class LogEntryOperation(proto.Message): @@ -337,4 +397,39 @@ class LogEntrySourceLocation(proto.Message): ) +class LogSplit(proto.Message): + r"""Additional information used to correlate multiple log + entries. Used when a single LogEntry would exceed the Google + Cloud Logging size limit and is split across multiple log + entries. + + Attributes: + uid (str): + A globally unique identifier for all log entries in a + sequence of split log entries. All log entries with the same + \|LogSplit.uid\| are assumed to be part of the same sequence + of split log entries. + index (int): + The index of this LogEntry in the sequence of split log + entries. Log entries are given \|index\| values 0, 1, ..., + n-1 for a sequence of n log entries. + total_splits (int): + The total number of log entries that the + original LogEntry was split into. + """ + + uid: str = proto.Field( + proto.STRING, + number=1, + ) + index: int = proto.Field( + proto.INT32, + number=2, + ) + total_splits: int = proto.Field( + proto.INT32, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index fffc74c192cc..715909e87a4e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -51,16 +51,15 @@ class DeleteLogRequest(proto.Message): log_name (str): Required. The resource name of the log to delete: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, - ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + For more information about log names, see [LogEntry][google.logging.v2.LogEntry]. """ @@ -80,19 +79,17 @@ class WriteLogEntriesRequest(proto.Message): all log entries in ``entries`` that do not specify a value for ``log_name``: - :: - - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: :: "projects/my-project-id/logs/syslog" - "organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" The permission ``logging.logEntries.create`` is needed on each project, organization, billing account, or folder that @@ -135,25 +132,27 @@ class WriteLogEntriesRequest(proto.Message): Log entries with timestamps that are more than the `logs retention - period `__ in - the past or more than 24 hours in the future will not be + period `__ in the + past or more than 24 hours in the future will not be available when calling ``entries.list``. However, those log entries can still be `exported with LogSinks `__. To improve throughput and to avoid exceeding the `quota - limit `__ for - calls to ``entries.write``, you should try to include - several log entries in this list, rather than calling this - method for each individual log entry. + limit `__ for calls + to ``entries.write``, you should try to include several log + entries in this list, rather than calling this method for + each individual log entry. partial_success (bool): - Optional. Whether valid entries should be written even if - some other entries fail due to INVALID_ARGUMENT or - PERMISSION_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. + Optional. Whether a batch's valid entries should be written + even if some other entry failed due to a permanent error + such as INVALID_ARGUMENT or PERMISSION_DENIED. If any entry + failed, then the response status is the response status of + one of the failed entries. The response will include error + details in ``WriteLogEntriesPartialErrors.log_entry_errors`` + keyed by the entries' zero-based index in the ``entries``. + Failed requests for which no entries are written will not + include per-entry errors. dry_run (bool): Optional. If true, the request should expect normal response, but the entries won't be @@ -226,31 +225,28 @@ class ListLogEntriesRequest(proto.Message): Required. Names of one or more parent resources from which to retrieve log entries: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + May alternatively be one or more views: - May alternatively be one or more views - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to - this list. + this list. A maximum of 100 resources may be specified in a + single request. filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Queries `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not listed in ``resource_names`` will cause the filter to - return no results. The maximum length of the filter is 20000 - characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. order_by (str): Optional. How the results should be sorted. Presently, the only permitted values are ``"timestamp asc"`` (default) and @@ -396,14 +392,29 @@ class ListLogsRequest(proto.Message): Attributes: parent (str): - Required. The resource name that owns the logs: + Required. The resource name to list logs for: - :: + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + resource_names (MutableSequence[str]): + Optional. List of resource names to list logs for: + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + + To support legacy queries, it could also be: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]". + The resource name in the ``parent`` field is added to this + list. page_size (int): Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of @@ -415,22 +426,16 @@ class ListLogsRequest(proto.Message): ``pageToken`` must be the value of ``nextPageToken`` from the previous response. The values of other method parameters should be identical to those in the previous call. - resource_names (MutableSequence[str]): - Optional. The resource name that owns the logs: - projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID] - - To support legacy queries, it could also be: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". """ parent: str = proto.Field( proto.STRING, number=1, ) + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) page_size: int = proto.Field( proto.INT32, number=2, @@ -439,10 +444,6 @@ class ListLogsRequest(proto.Message): proto.STRING, number=3, ) - resource_names: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=8, - ) class ListLogsResponse(proto.Message): @@ -482,28 +483,24 @@ class TailLogEntriesRequest(proto.Message): Required. Name of a parent resource from which to retrieve log entries: - :: - - "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "organization/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]". + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): - Optional. A filter that chooses which log entries to return. - See `Advanced Logs - Filters `__. - Only log entries that match the filter are returned. An - empty filter matches all log entries in the resources listed - in ``resource_names``. Referencing a parent resource that is - not in ``resource_names`` will cause the filter to return no - results. The maximum length of the filter is 20000 - characters. + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. buffer_window (google.protobuf.duration_pb2.Duration): Optional. The amount of time to buffer log entries at the server before being returned to diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index a7ef42ec824c..0df028c0b93e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -26,10 +26,15 @@ __protobuf__ = proto.module( package='google.logging.v2', manifest={ + 'OperationState', 'LifecycleState', + 'IndexType', + 'IndexConfig', 'LogBucket', 'LogView', 'LogSink', + 'BigQueryDataset', + 'Link', 'BigQueryOptions', 'ListBucketsRequest', 'ListBucketsResponse', @@ -50,6 +55,11 @@ 'CreateSinkRequest', 'UpdateSinkRequest', 'DeleteSinkRequest', + 'CreateLinkRequest', + 'DeleteLinkRequest', + 'ListLinksRequest', + 'ListLinksResponse', + 'GetLinkRequest', 'LogExclusion', 'ListExclusionsRequest', 'ListExclusionsResponse', @@ -60,40 +70,158 @@ 'GetCmekSettingsRequest', 'UpdateCmekSettingsRequest', 'CmekSettings', + 'GetSettingsRequest', + 'UpdateSettingsRequest', + 'Settings', + 'CopyLogEntriesRequest', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesResponse', + 'BucketMetadata', + 'LinkMetadata', + 'LocationMetadata', }, ) +class OperationState(proto.Enum): + r"""List of different operation states. + High level state of the operation. This is used to report the + job's current state to the user. Once a long running operation + is created, the current state of the operation can be queried + even before the operation is finished and the final result is + available. + + Values: + OPERATION_STATE_UNSPECIFIED (0): + Should not be used. + OPERATION_STATE_SCHEDULED (1): + The operation is scheduled. + OPERATION_STATE_WAITING_FOR_PERMISSIONS (2): + Waiting for necessary permissions. + OPERATION_STATE_RUNNING (3): + The operation is running. + OPERATION_STATE_SUCCEEDED (4): + The operation was completed successfully. + OPERATION_STATE_FAILED (5): + The operation failed. + OPERATION_STATE_CANCELLED (6): + The operation was cancelled by the user. + """ + OPERATION_STATE_UNSPECIFIED = 0 + OPERATION_STATE_SCHEDULED = 1 + OPERATION_STATE_WAITING_FOR_PERMISSIONS = 2 + OPERATION_STATE_RUNNING = 3 + OPERATION_STATE_SUCCEEDED = 4 + OPERATION_STATE_FAILED = 5 + OPERATION_STATE_CANCELLED = 6 + + class LifecycleState(proto.Enum): r"""LogBucket lifecycle states. Values: LIFECYCLE_STATE_UNSPECIFIED (0): - Unspecified state. This is only used/useful + Unspecified state. This is only used/useful for distinguishing unset values. ACTIVE (1): The normal and active state. DELETE_REQUESTED (2): - The bucket has been marked for deletion by - the user. + The resource has been marked for deletion by + the user. For some resources (e.g. buckets), + this can be reversed by an un-delete operation. + UPDATING (3): + The resource has been marked for an update by + the user. It will remain in this state until the + update is complete. + CREATING (4): + The resource has been marked for creation by + the user. It will remain in this state until the + creation is complete. + FAILED (5): + The resource is in an INTERNAL error state. """ LIFECYCLE_STATE_UNSPECIFIED = 0 ACTIVE = 1 DELETE_REQUESTED = 2 + UPDATING = 3 + CREATING = 4 + FAILED = 5 + + +class IndexType(proto.Enum): + r"""IndexType is used for custom indexing. It describes the type + of an indexed field. + + Values: + INDEX_TYPE_UNSPECIFIED (0): + The index's type is unspecified. + INDEX_TYPE_STRING (1): + The index is a string-type index. + INDEX_TYPE_INTEGER (2): + The index is a integer-type index. + """ + INDEX_TYPE_UNSPECIFIED = 0 + INDEX_TYPE_STRING = 1 + INDEX_TYPE_INTEGER = 2 + + +class IndexConfig(proto.Message): + r"""Configuration for an indexed field. + + Attributes: + field_path (str): + Required. The LogEntry field path to index. + + Note that some paths are automatically indexed, and other + paths are not eligible for indexing. See `indexing + documentation `__ + for details. + + For example: ``jsonPayload.request.status`` + type_ (google.cloud.logging_v2.types.IndexType): + Required. The type of data in this index. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the index was + last modified. + This is used to return the timestamp, and will + be ignored if supplied during update. + """ + + field_path: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'IndexType' = proto.Field( + proto.ENUM, + number=2, + enum='IndexType', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) class LogBucket(proto.Message): - r"""Describes a repository of logs. + r"""Describes a repository in which log entries are stored. Attributes: name (str): - The resource name of the bucket. For example: - "projects/my-project-id/locations/my-location/buckets/my-bucket-id - The supported locations are: "global" + Output only. The resource name of the bucket. + + For example: - For the location of ``global`` it is unspecified where logs - are actually stored. Once a bucket has been created, the - location can not be changed. + ``projects/my-project/locations/global/buckets/my-bucket`` + + For a list of supported locations, see `Supported + Regions `__ + + For the location of ``global`` it is unspecified where log + entries are actually stored. + + After a bucket has been created, the location cannot be + changed. description (str): Describes this bucket. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -111,12 +239,39 @@ class LogBucket(proto.Message): bucket creation time, the default time of 30 days will be used. locked (bool): - Whether the bucket has been locked. - The retention period on a locked bucket may not + Whether the bucket is locked. + + The retention period on a locked bucket cannot be changed. Locked buckets may only be deleted if they are empty. lifecycle_state (google.cloud.logging_v2.types.LifecycleState): Output only. The bucket lifecycle state. + analytics_enabled (bool): + Whether log analytics is enabled for this + bucket. + Once enabled, log analytics features cannot be + disabled. + restricted_fields (MutableSequence[str]): + Log entry field paths that are denied access in this bucket. + + The following fields and their children are eligible: + ``textPayload``, ``jsonPayload``, ``protoPayload``, + ``httpRequest``, ``labels``, ``sourceLocation``. + + Restricting a repeated field will restrict all values. + Adding a parent will block all child fields. (e.g. + ``foo.bar`` will block ``foo.bar.baz``) + index_configs (MutableSequence[google.cloud.logging_v2.types.IndexConfig]): + A list of indexed fields and related + configuration data. + cmek_settings (google.cloud.logging_v2.types.CmekSettings): + The CMEK settings of the log bucket. If + present, new log entries written to this log + bucket are encrypted using the CMEK key provided + in this configuration. If a log bucket has CMEK + settings, the CMEK settings cannot be disabled + later by updating the log bucket. Changing the + KMS key is allowed. """ name: str = proto.Field( @@ -150,16 +305,36 @@ class LogBucket(proto.Message): number=12, enum='LifecycleState', ) + analytics_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + restricted_fields: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + index_configs: MutableSequence['IndexConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message='IndexConfig', + ) + cmek_settings: 'CmekSettings' = proto.Field( + proto.MESSAGE, + number=19, + message='CmekSettings', + ) class LogView(proto.Message): - r"""Describes a view over logs in a bucket. + r"""Describes a view over log entries in a bucket. Attributes: name (str): The resource name of the view. - For example - "projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket/views/my-view`` description (str): Describes this view. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -170,11 +345,19 @@ class LogView(proto.Message): view. filter (str): Filter that restricts which log entries in a bucket are - visible in this view. Filters are restricted to be a logical - AND of ==/!= of any of the following: originating - project/folder/organization/billing account. resource type - log id Example: SOURCE("projects/myproject") AND - resource.type = "gce_instance" AND LOG_ID("stdout") + visible in this view. + + Filters are restricted to be a logical AND of ==/!= of any + of the following: + + - originating project/folder/organization/billing account. + - resource type + - log id + + For example: + + SOURCE("projects/myproject") AND resource.type = + "gce_instance" AND LOG_ID("stdout") """ name: str = proto.Field( @@ -204,10 +387,10 @@ class LogView(proto.Message): class LogSink(proto.Message): r"""Describes a sink used to export log entries to one of the following destinations in any project: a Cloud Storage bucket, a - BigQuery dataset, or a Cloud Pub/Sub topic. A logs filter - controls which log entries are exported. The sink must be - created within a project, organization, billing account, or - folder. + BigQuery dataset, a Pub/Sub topic or a Cloud Logging log bucket. + A logs filter controls which log entries are exported. The sink + must be created within a project, organization, billing account, + or folder. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -215,7 +398,9 @@ class LogSink(proto.Message): Attributes: name (str): Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink + the project. + + For example: ``"my-syslog-errors-to-pubsub"``. Sink identifiers are limited to 100 characters and can include only the following characters: upper and lower-case alphanumeric characters, underscores, hyphens, and periods. @@ -238,30 +423,33 @@ class LogSink(proto.Message): Optional. An `advanced logs filter `__. The only exported log entries are those that are in the - resource owning the sink and that match the filter. For - example: + resource owning the sink and that match the filter. - :: + For example: - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR + ``logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR`` description (str): Optional. A description of this sink. + The maximum length of the description is 8000 characters. disabled (bool): - Optional. If set to True, then this sink is + Optional. If set to true, then this sink is disabled and it does not export any log entries. exclusions (MutableSequence[google.cloud.logging_v2.types.LogExclusion]): - Optional. Log entries that match any of the exclusion - filters will not be exported. If a log entry is matched by - both ``filter`` and one of ``exclusion_filters`` it will not - be exported. + Optional. Log entries that match any of these exclusion + filters will not be exported. + + If a log entry is matched by both ``filter`` and one of + ``exclusion_filters`` it will not be exported. output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): Deprecated. This field is unused. writer_identity (str): Output only. An IAM identity—a service account or - group—under which Logging writes the exported log entries to - the sink's destination. This field is set by + group—under which Cloud Logging writes the exported log + entries to the sink's destination. This field is either set + by specifying ``custom_writer_identity`` or set + automatically by [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] and [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] @@ -274,25 +462,30 @@ class LogSink(proto.Message): Resource `__. Consult the destination service's documentation to determine the appropriate IAM roles to assign to the identity. + + Sinks that have a destination that is a log bucket in the + same project as the sink cannot have a writer_identity and + no additional permissions are required. include_children (bool): Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, only the logs owned by the sink's parent resource - are available for export. If the field is true, then logs - from all the projects, folders, and billing accounts + are available for export. If the field is true, then log + entries from all the projects, folders, and billing accounts contained in the sink's parent resource are also available for export. Whether a particular log entry from the children - is exported depends on the sink's filter expression. For - example, if this field is true, then the filter + is exported depends on the sink's filter expression. + + For example, if this field is true, then the filter ``resource.type=gce_instance`` would export all Compute Engine VM instance log entries from all projects in the - sink's parent. To only export entries from certain child - projects, filter on the project part of the log name: + sink's parent. - :: + To only export entries from certain child projects, filter + on the project part of the log name: - logName:("projects/test-project1/" OR "projects/test-project2/") AND - resource.type=gce_instance + logName:("projects/test-project1/" OR + "projects/test-project2/") AND resource.type=gce_instance bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. @@ -379,6 +572,90 @@ class VersionFormat(proto.Enum): ) +class BigQueryDataset(proto.Message): + r"""Describes a BigQuery dataset that was created by a link. + + Attributes: + dataset_id (str): + Output only. The full resource name of the BigQuery dataset. + The DATASET_ID will match the ID of the link, so the link + must match the naming restrictions of BigQuery datasets + (alphanumeric characters and underscores only). + + The dataset will have a resource path of + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET_ID]". + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Link(proto.Message): + r"""Describes a link connected to an analytics enabled bucket. + + Attributes: + name (str): + The resource name of the link. The name can have up to 100 + characters. A valid link id (at the end of the link name) + must only have alphanumeric characters and underscores + within it. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + For example: + + \`projects/my-project/locations/global/buckets/my-bucket/links/my_link + description (str): + Describes this link. + + The maximum length of the description is 8000 + characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + link. + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): + Output only. The resource lifecycle state. + bigquery_dataset (google.cloud.logging_v2.types.BigQueryDataset): + The information of a BigQuery Dataset. When a + link is created, a BigQuery dataset is created + along with it, in the same project as the + LogBucket it's linked to. This dataset will also + have BigQuery Views corresponding to the + LogViews in the bucket. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + lifecycle_state: 'LifecycleState' = proto.Field( + proto.ENUM, + number=4, + enum='LifecycleState', + ) + bigquery_dataset: 'BigQueryDataset' = proto.Field( + proto.MESSAGE, + number=5, + message='BigQueryDataset', + ) + + class BigQueryOptions(proto.Message): r"""Options that change functionality of a sink exporting data to BigQuery. @@ -387,18 +664,20 @@ class BigQueryOptions(proto.Message): use_partitioned_tables (bool): Optional. Whether to use `BigQuery's partition tables `__. - By default, Logging creates dated tables based on the log - entries' timestamps, e.g. syslog_20170523. With partitioned - tables the date suffix is no longer present and `special - query + By default, Cloud Logging creates dated tables based on the + log entries' timestamps, e.g. syslog_20170523. With + partitioned tables the date suffix is no longer present and + `special query syntax `__ has to be used instead. In both cases, tables are sharded based on UTC timezone. uses_timestamp_column_partitioning (bool): Output only. True if new timestamp column based partitioning is in use, false if legacy ingestion-time partitioning is in - use. All new sinks will have this field set true and will - use timestamp column based partitioning. If + use. + + All new sinks will have this field set true and will use + timestamp column based partitioning. If use_partitioned_tables is false, this value has no meaning and will be false. Legacy sinks using partitioned tables will have this field set to false. @@ -492,13 +771,15 @@ class CreateBucketRequest(proto.Message): Attributes: parent (str): - Required. The resource in which to create the bucket: + Required. The resource in which to create the log bucket: :: "projects/[PROJECT_ID]/locations/[LOCATION_ID]" - Example: ``"projects/my-logging-project/locations/global"`` + For example: + + ``"projects/my-project/locations/global"`` bucket_id (str): Required. A client-assigned identifier such as ``"my-bucket"``. Identifiers are limited to 100 characters @@ -540,11 +821,9 @@ class UpdateBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. - Also requires permission - "resourcemanager.projects.updateLiens" to set the locked - property + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` bucket (google.cloud.logging_v2.types.LogBucket): Required. The updated bucket. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -553,10 +832,10 @@ class UpdateBucketRequest(proto.Message): and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - For a detailed ``FieldMask`` definition, see + For a detailed ``FieldMask`` definition, see: https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=retention_days``. + For example: ``updateMask=retention_days`` """ name: str = proto.Field( @@ -589,8 +868,9 @@ class GetBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name: str = proto.Field( @@ -613,8 +893,9 @@ class DeleteBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name: str = proto.Field( @@ -637,8 +918,9 @@ class UndeleteBucketRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` """ name: str = proto.Field( @@ -665,7 +947,9 @@ class ListViewsRequest(proto.Message): should be identical to those in the previous call. page_size (int): Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of + request. + + Non-positive values are ignored. The presence of ``nextPageToken`` in the response indicates that more results might be available. """ @@ -721,12 +1005,16 @@ class CreateViewRequest(proto.Message): :: - "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + `"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"` + + For example: - Example: - ``"projects/my-logging-project/locations/my-location/buckets/my-bucket"`` + ``"projects/my-project/locations/global/buckets/my-bucket"`` view_id (str): - Required. The id to use for this view. + Required. A client-assigned identifier such as + ``"my-view"``. Identifiers are limited to 100 characters and + can include only letters, digits, underscores, hyphens, and + periods. view (google.cloud.logging_v2.types.LogView): Required. The new view. """ @@ -757,8 +1045,9 @@ class UpdateViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` view (google.cloud.logging_v2.types.LogView): Required. The updated view. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -770,7 +1059,7 @@ class UpdateViewRequest(proto.Message): For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` """ name: str = proto.Field( @@ -800,8 +1089,9 @@ class GetViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` """ name: str = proto.Field( @@ -821,8 +1111,11 @@ class DeleteViewRequest(proto.Message): "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" - Example: - ``"projects/my-project-id/locations/my-location/buckets/my-bucket-id/views/my-view-id"``. + For example: + + :: + + `"projects/my-project/locations/global/buckets/my-bucket/views/my-view"` """ name: str = proto.Field( @@ -913,7 +1206,9 @@ class GetSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` """ sink_name: str = proto.Field( @@ -936,8 +1231,9 @@ class CreateSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" - Examples: ``"projects/my-logging-project"``, - ``"organizations/123456789"``. + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` sink (google.cloud.logging_v2.types.LogSink): Required. The new sink, whose ``name`` parameter is a sink identifier that is not already in use. @@ -946,9 +1242,10 @@ class CreateSinkRequest(proto.Message): ``writer_identity`` in the new sink. If this value is omitted or set to false, and if the sink's parent is a project, then the value returned as ``writer_identity`` is - the same group or service account used by Logging before the - addition of writer identities to this API. The sink's - destination must be in the same project as the sink itself. + the same group or service account used by Cloud Logging + before the addition of writer identities to this API. The + sink's destination must be in the same project as the sink + itself. If this field is set to true, or if the sink is owned by a non-project resource such as an organization, then the value @@ -988,7 +1285,9 @@ class UpdateSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` sink (google.cloud.logging_v2.types.LogSink): Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. @@ -1014,16 +1313,18 @@ class UpdateSinkRequest(proto.Message): and only if, it is in the update mask. ``name`` and output only fields cannot be updated. - An empty updateMask is temporarily treated as using the + An empty ``updateMask`` is temporarily treated as using the following mask for backwards compatibility purposes: - destination,filter,includeChildren At some point in the - future, behavior will be removed and specifying an empty - updateMask will be an error. + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed and + specifying an empty ``updateMask`` will be an error. For a detailed ``FieldMask`` definition, see https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask - Example: ``updateMask=filter``. + For example: ``updateMask=filter`` """ sink_name: str = proto.Field( @@ -1061,7 +1362,9 @@ class DeleteSinkRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" "folders/[FOLDER_ID]/sinks/[SINK_ID]" - Example: ``"projects/my-project-id/sinks/my-sink-id"``. + For example: + + ``"projects/my-project/sinks/my-sink"`` """ sink_name: str = proto.Field( @@ -1070,14 +1373,151 @@ class DeleteSinkRequest(proto.Message): ) +class CreateLinkRequest(proto.Message): + r"""The parameters to CreateLink. + + Attributes: + parent (str): + Required. The full resource name of the bucket to create a + link for. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + link (google.cloud.logging_v2.types.Link): + Required. The new link. + link_id (str): + Required. The ID to use for the link. The link_id can have + up to 100 characters. A valid link_id must only have + alphanumeric characters and underscores within it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + link: 'Link' = proto.Field( + proto.MESSAGE, + number=2, + message='Link', + ) + link_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteLinkRequest(proto.Message): + r"""The parameters to DeleteLink. + + Attributes: + name (str): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLinksRequest(proto.Message): + r"""The parameters to ListLinks. + + Attributes: + parent (str): + Required. The parent resource whose links are to be listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. + page_size (int): + Optional. The maximum number of results to + return from this request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListLinksResponse(proto.Message): + r"""The response from ListLinks. + + Attributes: + links (MutableSequence[google.cloud.logging_v2.types.Link]): + A list of links. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call the same method again using the + value of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + links: MutableSequence['Link'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Link', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLinkRequest(proto.Message): + r"""The parameters to GetLink. + + Attributes: + name (str): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class LogExclusion(proto.Message): - r"""Specifies a set of log entries that are not to be stored in - Logging. If your GCP resource receives a large volume of logs, - you can use exclusions to reduce your chargeable logs. - Exclusions are processed after log sinks, so you can export log - entries before they are excluded. Note that organization-level - and folder-level exclusions don't apply to child resources, and - that you can't exclude audit log entries. + r"""Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of log entries, + you can use exclusions to reduce your chargeable logs. Note that + exclusions on organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify the \_Required + sink or exclude logs from it. Attributes: name (str): @@ -1095,10 +1535,11 @@ class LogExclusion(proto.Message): `sample function `__, you can exclude less than 100% of the matching log entries. + For example, the following query matches 99% of low-severity log entries from Google Cloud Storage buckets: - ``"resource.type=gcs_bucket severity`__ for more information. @@ -1363,11 +1809,14 @@ class GetCmekSettingsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" - Example: ``"organizations/12345/cmekSettings"``. + For example: + + ``"organizations/12345/cmekSettings"`` - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. + Note: CMEK for the Log Router can be configured for Google + Cloud projects, folders, organizations and billing accounts. + Once configured for an organization, it applies to all + projects and folders in the Google Cloud organization. """ name: str = proto.Field( @@ -1380,7 +1829,7 @@ class UpdateCmekSettingsRequest(proto.Message): r"""The parameters to [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -1395,15 +1844,18 @@ class UpdateCmekSettingsRequest(proto.Message): "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" "folders/[FOLDER_ID]/cmekSettings" - Example: ``"organizations/12345/cmekSettings"``. + For example: + + ``"organizations/12345/cmekSettings"`` - Note: CMEK for the Logs Router can currently only be - configured for GCP organizations. Once configured, it - applies to all projects and folders in the GCP organization. + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. cmek_settings (google.cloud.logging_v2.types.CmekSettings): Required. The CMEK settings to update. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. update_mask (google.protobuf.field_mask_pb2.FieldMask): @@ -1415,7 +1867,7 @@ class UpdateCmekSettingsRequest(proto.Message): See [FieldMask][google.protobuf.FieldMask] for more information. - Example: ``"updateMask=kmsKeyName"`` + For example: ``"updateMask=kmsKeyName"`` """ name: str = proto.Field( @@ -1439,11 +1891,11 @@ class CmekSettings(proto.Message): associated with a project, folder, organization, billing account, or flexible resource. - Note: CMEK for the Logs Router can currently only be configured for - GCP organizations. Once configured, it applies to all projects and - folders in the GCP organization. + Note: CMEK for the Log Router can currently only be configured for + Google Cloud organizations. Once configured, it applies to all + projects and folders in the Google Cloud organization. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. @@ -1455,14 +1907,209 @@ class CmekSettings(proto.Message): The resource name for the configured Cloud KMS key. KMS key name format: - "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" For example: - ``"projects/my-project-id/locations/my-region/keyRings/key-ring-name/cryptoKeys/key-name"`` - To enable CMEK for the Logs Router, set this field to a - valid ``kms_key_name`` for which the associated service - account has the required + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required cloudkms.cryptoKeyEncrypterDecrypter roles + assigned for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name or + disabled by setting the key name to an empty string. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Log Router, set this field to an + empty string. + + See `Enabling CMEK for Log + Router `__ + for more information. + kms_key_version_name (str): + The CryptoKeyVersion resource name for the configured Cloud + KMS key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]/cryptoKeyVersions/[VERSION]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key/cryptoKeyVersions/1"`` + + This is a read-only field used to convey the specific + configured CryptoKeyVersion of ``kms_key`` that has been + configured. It will be populated in cases where the CMEK + settings are bound to a single key version. + + If this field is populated, the ``kms_key`` is tied to a + specific CryptoKeyVersion. + service_account_id (str): + Output only. The service account that will be used by the + Log Router to access your Cloud KMS key. + + Before enabling CMEK for Log Router, you must first assign + the cloudkms.cryptoKeyEncrypterDecrypter role to the service + account that the Log Router will use to access your Cloud + KMS key. Use + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + to obtain the service account ID. + + See `Enabling CMEK for Log + Router `__ + for more information. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_version_name: str = proto.Field( + proto.STRING, + number=4, + ) + service_account_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetSettingsRequest(proto.Message): + r"""The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing accounts. + Currently it can only be configured for organizations. Once + configured for an organization, it applies to all projects + and folders in the Google Cloud organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSettingsRequest(proto.Message): + r"""The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the settings to update. + + :: + + "organizations/[ORGANIZATION_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be overwritten + if and only if it is in the update mask. Output only fields + cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + settings: 'Settings' = proto.Field( + proto.MESSAGE, + number=2, + message='Settings', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class Settings(proto.Message): + r"""Describes the settings associated with a project, folder, + organization, billing account, or flexible resource. + + Attributes: + name (str): + Output only. The resource name of the + settings. + kms_key_name (str): + Optional. The resource name for the configured Cloud KMS + key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for the key. @@ -1474,26 +2121,38 @@ class CmekSettings(proto.Message): the time of encryption unless access to that key has been revoked. - To disable CMEK for the Logs Router, set this field to an + To disable CMEK for the Log Router, set this field to an empty string. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. - service_account_id (str): + kms_service_account_id (str): Output only. The service account that will be used by the - Logs Router to access your Cloud KMS key. + Log Router to access your Cloud KMS key. - Before enabling CMEK for Logs Router, you must first assign + Before enabling CMEK for Log Router, you must first assign the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to - the service account that the Logs Router will use to access + the service account that the Log Router will use to access your Cloud KMS key. Use - [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings] to obtain the service account ID. - See `Enabling CMEK for Logs + See `Enabling CMEK for Log Router `__ for more information. + storage_location (str): + Optional. The Cloud region that will be used for \_Default + and \_Required log buckets for newly created projects and + folders. For example ``europe-west1``. This setting does not + affect the location of custom log buckets. + disable_default_sink (bool): + Optional. If set to true, the \_Default sink in newly + created projects and folders will created in a disabled + state. This can be used to automatically disable log + ingestion if there is already an aggregated sink configured + in the hierarchy. The \_Default sink can be re-enabled + manually if needed. """ name: str = proto.Field( @@ -1504,9 +2163,254 @@ class CmekSettings(proto.Message): proto.STRING, number=2, ) - service_account_id: str = proto.Field( + kms_service_account_id: str = proto.Field( + proto.STRING, + number=3, + ) + storage_location: str = proto.Field( proto.STRING, + number=4, + ) + disable_default_sink: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class CopyLogEntriesRequest(proto.Message): + r"""The parameters to CopyLogEntries. + + Attributes: + name (str): + Required. Log bucket from which to copy log entries. + + For example: + + ``"projects/my-project/locations/global/buckets/my-source-bucket"`` + filter (str): + Optional. A filter specifying which log + entries to copy. The filter must be no more than + 20k characters. An empty filter matches all log + entries. + destination (str): + Required. Destination to which to copy log + entries. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + destination: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CopyLogEntriesMetadata(proto.Message): + r"""Metadata for CopyLogEntries long running operations. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + cancellation_requested (bool): + Identifies whether the user has requested + cancellation of the operation. + request (google.cloud.logging_v2.types.CopyLogEntriesRequest): + CopyLogEntries RPC request. + progress (int): + Estimated progress of the operation (0 - + 100%). + writer_identity (str): + The IAM identity of a service account that must be granted + access to the destination. + + If the service account is not granted permission to the + destination within an hour, the operation will be cancelled. + + For example: ``"serviceAccount:foo@bar.com"`` + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + cancellation_requested: bool = proto.Field( + proto.BOOL, + number=4, + ) + request: 'CopyLogEntriesRequest' = proto.Field( + proto.MESSAGE, + number=5, + message='CopyLogEntriesRequest', + ) + progress: int = proto.Field( + proto.INT32, + number=6, + ) + writer_identity: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CopyLogEntriesResponse(proto.Message): + r"""Response type for CopyLogEntries long running operations. + + Attributes: + log_entries_copied_count (int): + Number of log entries copied. + """ + + log_entries_copied_count: int = proto.Field( + proto.INT64, + number=1, + ) + + +class BucketMetadata(proto.Message): + r"""Metadata for LongRunningUpdateBucket Operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_bucket_request (google.cloud.logging_v2.types.CreateBucketRequest): + LongRunningCreateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + update_bucket_request (google.cloud.logging_v2.types.UpdateBucketRequest): + LongRunningUpdateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, number=3, + enum='OperationState', + ) + create_bucket_request: 'CreateBucketRequest' = proto.Field( + proto.MESSAGE, + number=4, + oneof='request', + message='CreateBucketRequest', + ) + update_bucket_request: 'UpdateBucketRequest' = proto.Field( + proto.MESSAGE, + number=5, + oneof='request', + message='UpdateBucketRequest', + ) + + +class LinkMetadata(proto.Message): + r"""Metadata for long running Link operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_link_request (google.cloud.logging_v2.types.CreateLinkRequest): + CreateLink RPC request. + + This field is a member of `oneof`_ ``request``. + delete_link_request (google.cloud.logging_v2.types.DeleteLinkRequest): + DeleteLink RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + create_link_request: 'CreateLinkRequest' = proto.Field( + proto.MESSAGE, + number=4, + oneof='request', + message='CreateLinkRequest', + ) + delete_link_request: 'DeleteLinkRequest' = proto.Field( + proto.MESSAGE, + number=5, + oneof='request', + message='DeleteLinkRequest', + ) + + +class LocationMetadata(proto.Message): + r"""Cloud Logging specific location metadata. + + Attributes: + log_analytics_enabled (bool): + Indicates whether or not Log Analytics + features are supported in the given location. + """ + + log_analytics_enabled: bool = proto.Field( + proto.BOOL, + number=1, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 1259e001a861..9a485ee8f922 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -60,12 +60,12 @@ class LogMetric(proto.Message): forward-slash character (``/``) denotes a hierarchy of name pieces, and it cannot be the first character of the name. - The metric identifier in this field must not be - `URL-encoded `__. - However, when the metric identifier appears as the - ``[METRIC_ID]`` part of a ``metric_name`` API parameter, - then the metric identifier must be URL-encoded. Example: - ``"projects/my-project/metrics/nginx%2Frequests"``. + This field is the ``[METRIC_ID]`` part of a metric resource + name in the format + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". Example: If the + resource name of a metric is + ``"projects/my-project/metrics/nginx%2Frequests"``, this + field's value is ``"nginx/requests"``. description (str): Optional. A description of this metric, which is used in documentation. The maximum length of @@ -80,6 +80,20 @@ class LogMetric(proto.Message): "resource.type=gae_app AND severity>=ERROR" The maximum length of the filter is 20000 characters. + bucket_name (str): + Optional. The resource name of the Log Bucket that owns the + Log Metric. Only Log Buckets in projects are supported. The + bucket has to be in the same project as the metric. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket`` + + If empty, then the Log Metric is considered a non-Bucket Log + Metric. + disabled (bool): + Optional. If set to True, then this metric is + disabled and it does not generate any points. metric_descriptor (google.api.metric_pb2.MetricDescriptor): Optional. The metric descriptor associated with the logs-based metric. If unspecified, it uses a default metric @@ -111,7 +125,7 @@ class LogMetric(proto.Message): distribution logs-based metric to extract the values to record from a log entry. Two functions are supported for value extraction: ``EXTRACT(field)`` or - ``REGEXP_EXTRACT(field, regex)``. The argument are: + ``REGEXP_EXTRACT(field, regex)``. The arguments are: 1. field: The name of the log entry field from which the value is to be extracted. @@ -140,7 +154,7 @@ class LogMetric(proto.Message): ``value_extractor`` field. The extracted value is converted to the type defined in the - label descriptor. If the either the extraction or the type + label descriptor. If either the extraction or the type conversion fails, the label will have a default value. The default value for a string label is an empty string, for an integer label its 0, and for a boolean label its ``false``. @@ -190,6 +204,14 @@ class ApiVersion(proto.Enum): proto.STRING, number=3, ) + bucket_name: str = proto.Field( + proto.STRING, + number=13, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=12, + ) metric_descriptor: metric_pb2.MetricDescriptor = proto.Field( proto.MESSAGE, number=5, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py new file mode 100755 index 000000000000..806e937ddae0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py new file mode 100755 index 000000000000..ca0209f00fcb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_copy_log_entries(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client.copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py new file mode 100755 index 000000000000..8fe42df3c81a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py new file mode 100755 index 000000000000..1ce698784552 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py new file mode 100755 index 000000000000..8ceb5298553a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateLink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py new file mode 100755 index 000000000000..604ff66269c8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client.create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateLink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py new file mode 100755 index 000000000000..8c7a934a735d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py new file mode 100755 index 000000000000..dfa59b30742b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_delete_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py new file mode 100755 index 000000000000..ddc3d131f4c7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetLink_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py new file mode 100755 index 000000000000..3a7643b3a273 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_get_link(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetLink_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py new file mode 100755 index 000000000000..4ee968e8155d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py new file mode 100755 index 000000000000..a3e018440c2a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_get_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py new file mode 100755 index 000000000000..7eccffaa6bae --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListLinks_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListLinks_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py new file mode 100755 index 000000000000..a2f98d69d320 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListLinks_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_list_links(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListLinks_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py new file mode 100755 index 000000000000..7dde59dcdd4f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py new file mode 100755 index 000000000000..2ecaf8df26dd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_update_bucket_async(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py new file mode 100755 index 000000000000..b51dd81cc946 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client.update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py new file mode 100755 index 000000000000..1e7aefce8f6a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_update_settings(): + # Create a client + client = logging_v2.ConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client.update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index cf16dded69b9..b62675ba6439 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -19,19 +19,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.copy_log_entries", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateBucket" + "shortName": "CopyLogEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateBucketRequest" + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" }, { "name": "retry", @@ -46,22 +46,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "create_bucket" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "copy_log_entries" }, - "description": "Sample for CreateBucket", - "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "description": "Sample for CopyLogEntries", + "file": "logging_v2_generated_config_service_v2_copy_log_entries_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -76,17 +76,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 49, + "end": 53, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_copy_log_entries_async.py" }, { "canonical": true, @@ -95,19 +95,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.copy_log_entries", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateBucket" + "shortName": "CopyLogEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateBucketRequest" + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" }, { "name": "retry", @@ -122,22 +122,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "create_bucket" + "resultType": "google.api_core.operation.Operation", + "shortName": "copy_log_entries" }, - "description": "Sample for CreateBucket", - "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "description": "Sample for CopyLogEntries", + "file": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -152,17 +152,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 49, + "end": 53, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_copy_log_entries_sync.py" }, { "canonical": true, @@ -172,27 +172,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateExclusion" + "shortName": "CreateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateExclusionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "exclusion", - "type": "google.cloud.logging_v2.types.LogExclusion" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -207,14 +199,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "create_exclusion" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_bucket_async" }, - "description": "Sample for CreateExclusion", - "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async", "segments": [ { "end": 56, @@ -232,13 +224,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { "end": 53, - "start": 51, + "start": 47, "type": "REQUEST_EXECUTION" }, { @@ -247,7 +239,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async_async.py" }, { "canonical": true, @@ -256,27 +248,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateExclusion" + "shortName": "CreateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateExclusionRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "exclusion", - "type": "google.cloud.logging_v2.types.LogExclusion" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -291,14 +275,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "create_exclusion" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_bucket_async" }, - "description": "Sample for CreateExclusion", - "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync", "segments": [ { "end": 56, @@ -316,13 +300,13 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { "end": 53, - "start": 51, + "start": 47, "type": "REQUEST_EXECUTION" }, { @@ -331,7 +315,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py" }, { "canonical": true, @@ -341,27 +325,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateSink" + "shortName": "CreateBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -376,22 +352,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "create_sink" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, - "description": "Sample for CreateSink", - "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "description": "Sample for CreateBucket", + "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -401,22 +377,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_sink_async.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" }, { "canonical": true, @@ -425,27 +401,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateSink" + "shortName": "CreateBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -460,22 +428,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "create_sink" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, - "description": "Sample for CreateSink", - "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "description": "Sample for CreateBucket", + "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { - "end": 56, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 52, "start": 27, "type": "SHORT" }, @@ -485,22 +453,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" }, { "canonical": true, @@ -510,19 +478,27 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateView" + "shortName": "CreateExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" }, { "name": "retry", @@ -537,22 +513,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "create_view" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, - "description": "Sample for CreateView", - "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "description": "Sample for CreateExclusion", + "file": "logging_v2_generated_config_service_v2_create_exclusion_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -562,22 +538,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_view_async.py" + "title": "logging_v2_generated_config_service_v2_create_exclusion_async.py" }, { "canonical": true, @@ -586,19 +562,27 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateView" + "shortName": "CreateExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" }, { "name": "retry", @@ -613,22 +597,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "create_view" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "create_exclusion" }, - "description": "Sample for CreateView", - "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "description": "Sample for CreateExclusion", + "file": "logging_v2_generated_config_service_v2_create_exclusion_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -638,22 +622,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_view_sync.py" + "title": "logging_v2_generated_config_service_v2_create_exclusion_sync.py" }, { "canonical": true, @@ -663,9 +647,581 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_link" + }, + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_link" + }, + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" + }, + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_sink_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "create_sink" + }, + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_sink_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" + }, + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_view_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "create_view" + }, + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" @@ -675,7 +1231,478 @@ "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" + }, + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_exclusion" + }, + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_link" + }, + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_link" + }, + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_link_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_sink" + }, + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -690,13 +1717,13 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_bucket" + "shortName": "delete_sink" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", "segments": [ { "end": 49, @@ -727,7 +1754,81 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" + }, + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_view_async.py" }, { "canonical": true, @@ -736,19 +1837,169 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_view" + }, + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" + }, + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteBucket" + "shortName": "GetBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.GetBucketRequest" }, { "name": "retry", @@ -763,21 +2014,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_bucket" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -792,15 +2044,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" }, { "canonical": true, @@ -810,23 +2064,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteExclusion" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -841,21 +2091,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_exclusion" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, - "description": "Sample for DeleteExclusion", - "file": "logging_v2_generated_config_service_v2_delete_exclusion_async.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -870,15 +2121,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" }, { "canonical": true, @@ -887,23 +2140,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteExclusion" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -918,21 +2167,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_exclusion" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "get_cmek_settings" }, - "description": "Sample for DeleteExclusion", - "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -947,15 +2197,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" }, { "canonical": true, @@ -965,22 +2217,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteSink" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" }, { - "name": "sink_name", + "name": "name", "type": "str" }, { @@ -996,21 +2248,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_sink" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, - "description": "Sample for DeleteSink", - "file": "logging_v2_generated_config_service_v2_delete_sink_async.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1025,15 +2278,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_sink_async.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" }, { "canonical": true, @@ -1042,22 +2297,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteSink" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" }, { - "name": "sink_name", + "name": "name", "type": "str" }, { @@ -1073,21 +2328,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_sink" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "get_exclusion" }, - "description": "Sample for DeleteSink", - "file": "logging_v2_generated_config_service_v2_delete_sink_sync.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1102,15 +2358,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" }, { "canonical": true, @@ -1120,19 +2378,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.delete_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteView" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1147,21 +2409,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_view" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "get_link" }, - "description": "Sample for DeleteView", - "file": "logging_v2_generated_config_service_v2_delete_view_async.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_async", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1176,15 +2439,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_view_async.py" + "title": "logging_v2_generated_config_service_v2_get_link_async.py" }, { "canonical": true, @@ -1193,19 +2458,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.delete_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteView" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1220,21 +2489,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_view" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "get_link" }, - "description": "Sample for DeleteView", - "file": "logging_v2_generated_config_service_v2_delete_view_sync.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_sync", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1249,15 +2519,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_view_sync.py" + "title": "logging_v2_generated_config_service_v2_get_link_sync.py" }, { "canonical": true, @@ -1267,19 +2539,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1294,14 +2570,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async", "segments": [ { "end": 51, @@ -1334,7 +2610,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_get_settings_async.py" }, { "canonical": true, @@ -1343,19 +2619,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1370,14 +2650,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "get_settings" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync", "segments": [ { "end": 51, @@ -1410,7 +2690,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_get_settings_sync.py" }, { "canonical": true, @@ -1420,19 +2700,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_cmek_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetCmekSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -1447,14 +2731,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "get_cmek_settings" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" }, - "description": "Sample for GetCmekSettings", - "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py", + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", "segments": [ { "end": 51, @@ -1487,7 +2771,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async.py" + "title": "logging_v2_generated_config_service_v2_get_sink_async.py" }, { "canonical": true, @@ -1496,19 +2780,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_cmek_settings", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetCmekSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -1523,14 +2811,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "get_cmek_settings" - }, - "description": "Sample for GetCmekSettings", - "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py", + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "get_sink" + }, + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", "segments": [ { "end": 51, @@ -1563,7 +2851,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync.py" + "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" }, { "canonical": true, @@ -1573,23 +2861,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetExclusion" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -1604,14 +2888,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "get_exclusion" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, - "description": "Sample for GetExclusion", - "file": "logging_v2_generated_config_service_v2_get_exclusion_async.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", "segments": [ { "end": 51, @@ -1644,7 +2928,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_exclusion_async.py" + "title": "logging_v2_generated_config_service_v2_get_view_async.py" }, { "canonical": true, @@ -1653,23 +2937,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_exclusion", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetExclusion" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -1684,14 +2964,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "get_exclusion" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "get_view" }, - "description": "Sample for GetExclusion", - "file": "logging_v2_generated_config_service_v2_get_exclusion_sync.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", "segments": [ { "end": 51, @@ -1724,7 +3004,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_exclusion_sync.py" + "title": "logging_v2_generated_config_service_v2_get_view_sync.py" }, { "canonical": true, @@ -1734,22 +3014,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSink" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" }, { - "name": "sink_name", + "name": "parent", "type": "str" }, { @@ -1765,22 +3045,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "get_sink" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" }, - "description": "Sample for GetSink", - "file": "logging_v2_generated_config_service_v2_get_sink_async.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1800,12 +3080,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_sink_async.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" }, { "canonical": true, @@ -1814,22 +3094,22 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_sink", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetSink" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" }, { - "name": "sink_name", + "name": "parent", "type": "str" }, { @@ -1845,22 +3125,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "get_sink" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" }, - "description": "Sample for GetSink", - "file": "logging_v2_generated_config_service_v2_get_sink_sync.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1880,12 +3160,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_sink_sync.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" }, { "canonical": true, @@ -1895,19 +3175,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.get_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetView" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -1922,22 +3206,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", + "shortName": "list_exclusions" }, - "description": "Sample for GetView", - "file": "logging_v2_generated_config_service_v2_get_view_async.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1957,12 +3241,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_view_async.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" }, { "canonical": true, @@ -1971,19 +3255,23 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.get_view", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetView" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -1998,22 +3286,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", + "shortName": "list_exclusions" }, - "description": "Sample for GetView", - "file": "logging_v2_generated_config_service_v2_get_view_sync.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2033,12 +3321,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_view_sync.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" }, { "canonical": true, @@ -2048,19 +3336,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_buckets", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { "name": "parent", @@ -2079,14 +3367,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", - "shortName": "list_buckets" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager", + "shortName": "list_links" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_async", "segments": [ { "end": 52, @@ -2119,7 +3407,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" + "title": "logging_v2_generated_config_service_v2_list_links_async.py" }, { "canonical": true, @@ -2128,19 +3416,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_buckets", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { "name": "parent", @@ -2159,14 +3447,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", - "shortName": "list_buckets" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager", + "shortName": "list_links" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_sync", "segments": [ { "end": 52, @@ -2199,7 +3487,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" + "title": "logging_v2_generated_config_service_v2_list_links_sync.py" }, { "canonical": true, @@ -2209,19 +3497,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_exclusions", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_sinks", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListExclusions" + "shortName": "ListSinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListSinksRequest" }, { "name": "parent", @@ -2240,14 +3528,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", - "shortName": "list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", + "shortName": "list_sinks" }, - "description": "Sample for ListExclusions", - "file": "logging_v2_generated_config_service_v2_list_exclusions_async.py", + "description": "Sample for ListSinks", + "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", "segments": [ { "end": 52, @@ -2280,7 +3568,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_exclusions_async.py" + "title": "logging_v2_generated_config_service_v2_list_sinks_async.py" }, { "canonical": true, @@ -2289,19 +3577,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_exclusions", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_sinks", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListExclusions" + "shortName": "ListSinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListSinksRequest" }, { "name": "parent", @@ -2320,14 +3608,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", - "shortName": "list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", + "shortName": "list_sinks" }, - "description": "Sample for ListExclusions", - "file": "logging_v2_generated_config_service_v2_list_exclusions_sync.py", + "description": "Sample for ListSinks", + "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", "segments": [ { "end": 52, @@ -2360,7 +3648,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_exclusions_sync.py" + "title": "logging_v2_generated_config_service_v2_list_sinks_sync.py" }, { "canonical": true, @@ -2370,19 +3658,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_sinks", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_views", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListSinks" + "shortName": "ListViews" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListSinksRequest" + "type": "google.cloud.logging_v2.types.ListViewsRequest" }, { "name": "parent", @@ -2401,14 +3689,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", - "shortName": "list_sinks" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", + "shortName": "list_views" }, - "description": "Sample for ListSinks", - "file": "logging_v2_generated_config_service_v2_list_sinks_async.py", + "description": "Sample for ListViews", + "file": "logging_v2_generated_config_service_v2_list_views_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", "segments": [ { "end": 52, @@ -2441,7 +3729,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_sinks_async.py" + "title": "logging_v2_generated_config_service_v2_list_views_async.py" }, { "canonical": true, @@ -2450,19 +3738,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_sinks", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_views", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListSinks" + "shortName": "ListViews" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListSinksRequest" + "type": "google.cloud.logging_v2.types.ListViewsRequest" }, { "name": "parent", @@ -2481,14 +3769,14 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", - "shortName": "list_sinks" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", + "shortName": "list_views" }, - "description": "Sample for ListSinks", - "file": "logging_v2_generated_config_service_v2_list_sinks_sync.py", + "description": "Sample for ListViews", + "file": "logging_v2_generated_config_service_v2_list_views_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", "segments": [ { "end": 52, @@ -2521,7 +3809,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_sinks_sync.py" + "title": "logging_v2_generated_config_service_v2_list_views_sync.py" }, { "canonical": true, @@ -2531,23 +3819,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.list_views", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.undelete_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListViews", + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListViews" + "shortName": "UndeleteBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListViewsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" }, { "name": "retry", @@ -2562,22 +3846,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", - "shortName": "list_views" + "shortName": "undelete_bucket" }, - "description": "Sample for ListViews", - "file": "logging_v2_generated_config_service_v2_list_views_async.py", + "description": "Sample for UndeleteBucket", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2592,17 +3875,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_views_async.py" + "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" }, { "canonical": true, @@ -2611,23 +3892,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.list_views", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.undelete_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListViews", + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListViews" + "shortName": "UndeleteBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListViewsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" }, { "name": "retry", @@ -2642,22 +3919,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", - "shortName": "list_views" + "shortName": "undelete_bucket" }, - "description": "Sample for ListViews", - "file": "logging_v2_generated_config_service_v2_list_views_sync.py", + "description": "Sample for UndeleteBucket", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2672,17 +3948,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_views_sync.py" + "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" }, { "canonical": true, @@ -2692,19 +3966,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", "shortName": "ConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.undelete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UndeleteBucket" + "shortName": "UpdateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" }, { "name": "retry", @@ -2719,21 +3993,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "undelete_bucket" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_bucket_async" }, - "description": "Sample for UndeleteBucket", - "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async", "segments": [ { - "end": 49, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2748,15 +4023,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_update_bucket_async_async.py" }, { "canonical": true, @@ -2765,19 +4042,19 @@ "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", "shortName": "ConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.undelete_bucket", + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UndeleteBucket" + "shortName": "UpdateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" }, { "name": "retry", @@ -2792,21 +4069,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "undelete_bucket" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_bucket_async" }, - "description": "Sample for UndeleteBucket", - "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync", "segments": [ { - "end": 49, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 55, "start": 27, "type": "SHORT" }, @@ -2821,15 +4099,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py" }, { "canonical": true, @@ -3314,6 +4594,175 @@ ], "title": "logging_v2_generated_config_service_v2_update_exclusion_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient", + "shortName": "ConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2AsyncClient.update_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "update_settings" + }, + "description": "Sample for UpdateSettings", + "file": "logging_v2_generated_config_service_v2_update_settings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_settings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client", + "shortName": "ConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.ConfigServiceV2Client.update_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "update_settings" + }, + "description": "Sample for UpdateSettings", + "file": "logging_v2_generated_config_service_v2_update_settings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_settings_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py index a8da3c59cb52..1654590d1074 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py @@ -39,13 +39,17 @@ def partition( class loggingCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'copy_log_entries': ('name', 'destination', 'filter', ), 'create_bucket': ('parent', 'bucket_id', 'bucket', ), + 'create_bucket_async': ('parent', 'bucket_id', 'bucket', ), 'create_exclusion': ('parent', 'exclusion', ), + 'create_link': ('parent', 'link', 'link_id', ), 'create_log_metric': ('parent', 'metric', ), 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), 'create_view': ('parent', 'view_id', 'view', ), 'delete_bucket': ('name', ), 'delete_exclusion': ('name', ), + 'delete_link': ('name', ), 'delete_log': ('log_name', ), 'delete_log_metric': ('metric_name', ), 'delete_sink': ('sink_name', ), @@ -53,23 +57,28 @@ class loggingCallTransformer(cst.CSTTransformer): 'get_bucket': ('name', ), 'get_cmek_settings': ('name', ), 'get_exclusion': ('name', ), + 'get_link': ('name', ), 'get_log_metric': ('metric_name', ), + 'get_settings': ('name', ), 'get_sink': ('sink_name', ), 'get_view': ('name', ), 'list_buckets': ('parent', 'page_token', 'page_size', ), 'list_exclusions': ('parent', 'page_token', 'page_size', ), + 'list_links': ('parent', 'page_token', 'page_size', ), 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), 'list_log_metrics': ('parent', 'page_token', 'page_size', ), - 'list_logs': ('parent', 'page_size', 'page_token', 'resource_names', ), + 'list_logs': ('parent', 'resource_names', 'page_size', 'page_token', ), 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), 'list_sinks': ('parent', 'page_token', 'page_size', ), 'list_views': ('parent', 'page_token', 'page_size', ), 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), 'undelete_bucket': ('name', ), 'update_bucket': ('name', 'bucket', 'update_mask', ), + 'update_bucket_async': ('name', 'bucket', 'update_mask', ), 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), 'update_exclusion': ('name', 'exclusion', 'update_mask', ), 'update_log_metric': ('metric_name', 'metric', ), + 'update_settings': ('name', 'settings', 'update_mask', ), 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), 'update_view': ('name', 'view', 'update_mask', ), 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index ad8daf514f55..289a13b94d31 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -23,23 +23,20 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions +from google.api_core import future from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError @@ -48,7 +45,9 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore import google.auth @@ -83,7 +82,6 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), (ConfigServiceV2AsyncClient, "grpc_asyncio"), - (ConfigServiceV2Client, "rest"), ]) def test_config_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -96,16 +94,12 @@ def test_config_service_v2_client_from_service_account_info(client_class, transp assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.ConfigServiceV2GrpcTransport, "grpc"), (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.ConfigServiceV2RestTransport, "rest"), ]) def test_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -122,7 +116,6 @@ def test_config_service_v2_client_service_account_always_use_jwt(transport_class @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), (ConfigServiceV2AsyncClient, "grpc_asyncio"), - (ConfigServiceV2Client, "rest"), ]) def test_config_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -138,9 +131,6 @@ def test_config_service_v2_client_from_service_account_file(client_class, transp assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @@ -148,7 +138,6 @@ def test_config_service_v2_client_get_transport_class(): transport = ConfigServiceV2Client.get_transport_class() available_transports = [ transports.ConfigServiceV2GrpcTransport, - transports.ConfigServiceV2RestTransport, ] assert transport in available_transports @@ -159,7 +148,6 @@ def test_config_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest"), ]) @mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) @mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) @@ -279,8 +267,6 @@ def test_config_service_v2_client_client_options(client_class, transport_class, (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", "true"), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) @mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) @@ -418,7 +404,6 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest"), ]) def test_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -443,7 +428,6 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (ConfigServiceV2Client, transports.ConfigServiceV2RestTransport, "rest", None), ]) def test_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -993,6 +977,8 @@ def test_get_bucket(request_type, transport: str = 'grpc'): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], ) response = client.get_bucket(request) @@ -1008,6 +994,8 @@ def test_get_bucket(request_type, transport: str = 'grpc'): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] def test_get_bucket_empty_call(): @@ -1049,6 +1037,8 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], )) response = await client.get_bucket(request) @@ -1064,6 +1054,8 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] @pytest.mark.asyncio @@ -1134,6 +1126,298 @@ async def test_get_bucket_field_headers_async(): ) in kw['metadata'] +@pytest.mark.parametrize("request_type", [ + logging_config.CreateBucketRequest, + dict, +]) +def test_create_bucket_async(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_bucket_async_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + client.create_bucket_async() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + +@pytest.mark.asyncio +async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_bucket_async_async_from_dict(): + await test_create_bucket_async_async(request_type=dict) + + +def test_create_bucket_async_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_bucket_async_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateBucketRequest, + dict, +]) +def test_update_bucket_async(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_bucket_async_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + client.update_bucket_async() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + +@pytest.mark.asyncio +async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_bucket_async_async_from_dict(): + await test_update_bucket_async_async(request_type=dict) + + +def test_update_bucket_async_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_bucket_async_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + @pytest.mark.parametrize("request_type", [ logging_config.CreateBucketRequest, dict, @@ -1159,6 +1443,8 @@ def test_create_bucket(request_type, transport: str = 'grpc'): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], ) response = client.create_bucket(request) @@ -1174,6 +1460,8 @@ def test_create_bucket(request_type, transport: str = 'grpc'): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] def test_create_bucket_empty_call(): @@ -1215,6 +1503,8 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], )) response = await client.create_bucket(request) @@ -1230,6 +1520,8 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] @pytest.mark.asyncio @@ -1325,6 +1617,8 @@ def test_update_bucket(request_type, transport: str = 'grpc'): retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], ) response = client.update_bucket(request) @@ -1340,6 +1634,8 @@ def test_update_bucket(request_type, transport: str = 'grpc'): assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] def test_update_bucket_empty_call(): @@ -1381,6 +1677,8 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type retention_days=1512, locked=True, lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], )) response = await client.update_bucket(request) @@ -1396,6 +1694,8 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type assert response.retention_days == 1512 assert response.locked is True assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] @pytest.mark.asyncio @@ -4257,10 +4557,10 @@ async def test_delete_sink_flattened_error_async(): @pytest.mark.parametrize("request_type", [ - logging_config.ListExclusionsRequest, + logging_config.CreateLinkRequest, dict, ]) -def test_list_exclusions(request_type, transport: str = 'grpc'): +def test_create_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4272,25 +4572,22 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_exclusions(request) + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.CreateLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' + assert isinstance(response, future.Future) -def test_list_exclusions_empty_call(): +def test_create_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -4300,15 +4597,15 @@ def test_list_exclusions_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: - client.list_exclusions() + client.create_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.CreateLinkRequest() @pytest.mark.asyncio -async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): +async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4320,46 +4617,45 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_exclusions(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + assert args[0] == logging_config.CreateLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_exclusions_async_from_dict(): - await test_list_exclusions_async(request_type=dict) +async def test_create_link_async_from_dict(): + await test_create_link_async(request_type=dict) -def test_list_exclusions_field_headers(): +def test_create_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() + request = logging_config.CreateLinkRequest() request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: - call.return_value = logging_config.ListExclusionsResponse() - client.list_exclusions(request) + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4375,23 +4671,23 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio -async def test_list_exclusions_field_headers_async(): +async def test_create_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.ListExclusionsRequest() + request = logging_config.CreateLinkRequest() request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) - await client.list_exclusions(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4406,21 +4702,23 @@ async def test_list_exclusions_field_headers_async(): ) in kw['metadata'] -def test_list_exclusions_flattened(): +def test_create_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() + call.return_value = operations_pb2.Operation(name='operations/op') # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_exclusions( + client.create_link( parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', ) # Establish that the underlying call was made with the expected @@ -4430,9 +4728,15 @@ def test_list_exclusions_flattened(): arg = args[0].parent mock_val = 'parent_value' assert arg == mock_val + arg = args[0].link + mock_val = logging_config.Link(name='name_value') + assert arg == mock_val + arg = args[0].link_id + mock_val = 'link_id_value' + assert arg == mock_val -def test_list_exclusions_flattened_error(): +def test_create_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4440,29 +4744,35 @@ def test_list_exclusions_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_exclusions( - logging_config.ListExclusionsRequest(), + client.create_link( + logging_config.CreateLinkRequest(), parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', ) @pytest.mark.asyncio -async def test_list_exclusions_flattened_async(): +async def test_create_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_exclusions), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.ListExclusionsResponse() + call.return_value = operations_pb2.Operation(name='operations/op') - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_exclusions( + response = await client.create_link( parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', ) # Establish that the underlying call was made with the expected @@ -4472,9 +4782,15 @@ async def test_list_exclusions_flattened_async(): arg = args[0].parent mock_val = 'parent_value' assert arg == mock_val + arg = args[0].link + mock_val = logging_config.Link(name='name_value') + assert arg == mock_val + arg = args[0].link_id + mock_val = 'link_id_value' + assert arg == mock_val @pytest.mark.asyncio -async def test_list_exclusions_flattened_error_async(): +async def test_create_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4482,209 +4798,19 @@ async def test_list_exclusions_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.list_exclusions( - logging_config.ListExclusionsRequest(), + await client.create_link( + logging_config.CreateLinkRequest(), parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', ) -def test_list_exclusions_pager(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_exclusions(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in results) -def test_list_exclusions_pages(transport_name: str = "grpc"): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - pages = list(client.list_exclusions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_exclusions_async_pager(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_exclusions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_exclusions_async_pages(): - client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - RuntimeError, - ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_exclusions(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - @pytest.mark.parametrize("request_type", [ - logging_config.GetExclusionRequest, + logging_config.DeleteLinkRequest, dict, ]) -def test_get_exclusion(request_type, transport: str = 'grpc'): +def test_delete_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4696,31 +4822,22 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - ) - response = client.get_exclusion(request) + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert isinstance(response, future.Future) -def test_get_exclusion_empty_call(): +def test_delete_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -4730,15 +4847,15 @@ def test_get_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: - client.get_exclusion() + client.delete_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() @pytest.mark.asyncio -async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): +async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4750,52 +4867,45 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - response = await client.get_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + assert args[0] == logging_config.DeleteLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_get_exclusion_async_from_dict(): - await test_get_exclusion_async(request_type=dict) +async def test_delete_link_async_from_dict(): + await test_delete_link_async(request_type=dict) -def test_get_exclusion_field_headers(): +def test_delete_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() + request = logging_config.DeleteLinkRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.get_exclusion(request) + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4811,23 +4921,23 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio -async def test_get_exclusion_field_headers_async(): +async def test_delete_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetExclusionRequest() + request = logging_config.DeleteLinkRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.get_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4842,20 +4952,20 @@ async def test_get_exclusion_field_headers_async(): ) in kw['metadata'] -def test_get_exclusion_flattened(): +def test_delete_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = operations_pb2.Operation(name='operations/op') # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_exclusion( + client.delete_link( name='name_value', ) @@ -4868,7 +4978,7 @@ def test_get_exclusion_flattened(): assert arg == mock_val -def test_get_exclusion_flattened_error(): +def test_delete_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4876,28 +4986,30 @@ def test_get_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_exclusion( - logging_config.GetExclusionRequest(), + client.delete_link( + logging_config.DeleteLinkRequest(), name='name_value', ) @pytest.mark.asyncio -async def test_get_exclusion_flattened_async(): +async def test_delete_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_exclusion), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = operations_pb2.Operation(name='operations/op') - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_exclusion( + response = await client.delete_link( name='name_value', ) @@ -4910,7 +5022,7 @@ async def test_get_exclusion_flattened_async(): assert arg == mock_val @pytest.mark.asyncio -async def test_get_exclusion_flattened_error_async(): +async def test_delete_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4918,17 +5030,17 @@ async def test_get_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_exclusion( - logging_config.GetExclusionRequest(), + await client.delete_link( + logging_config.DeleteLinkRequest(), name='name_value', ) @pytest.mark.parametrize("request_type", [ - logging_config.CreateExclusionRequest, + logging_config.ListLinksRequest, dict, ]) -def test_create_exclusion(request_type, transport: str = 'grpc'): +def test_list_links(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4940,31 +5052,25 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, + call.return_value = logging_config.ListLinksResponse( + next_page_token='next_page_token_value', ) - response = client.create_exclusion(request) + response = client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert isinstance(response, pagers.ListLinksPager) + assert response.next_page_token == 'next_page_token_value' -def test_create_exclusion_empty_call(): +def test_list_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -4974,15 +5080,15 @@ def test_create_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: - client.create_exclusion() + client.list_links() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() @pytest.mark.asyncio -async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): +async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4994,52 +5100,46 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( + next_page_token='next_page_token_value', )) - response = await client.create_exclusion(request) + response = await client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + assert args[0] == logging_config.ListLinksRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert isinstance(response, pagers.ListLinksAsyncPager) + assert response.next_page_token == 'next_page_token_value' @pytest.mark.asyncio -async def test_create_exclusion_async_from_dict(): - await test_create_exclusion_async(request_type=dict) +async def test_list_links_async_from_dict(): + await test_list_links_async(request_type=dict) -def test_create_exclusion_field_headers(): +def test_list_links_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() + request = logging_config.ListLinksRequest() request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.create_exclusion(request) + call.return_value = logging_config.ListLinksResponse() + client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5055,23 +5155,23 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio -async def test_create_exclusion_field_headers_async(): +async def test_list_links_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.CreateExclusionRequest() + request = logging_config.ListLinksRequest() request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.create_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) + await client.list_links(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5086,22 +5186,21 @@ async def test_create_exclusion_field_headers_async(): ) in kw['metadata'] -def test_create_exclusion_flattened(): +def test_list_links_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.ListLinksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_exclusion( + client.list_links( parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), ) # Establish that the underlying call was made with the expected @@ -5111,12 +5210,9 @@ def test_create_exclusion_flattened(): arg = args[0].parent mock_val = 'parent_value' assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') - assert arg == mock_val -def test_create_exclusion_flattened_error(): +def test_list_links_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5124,31 +5220,29 @@ def test_create_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_exclusion( - logging_config.CreateExclusionRequest(), + client.list_links( + logging_config.ListLinksRequest(), parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), ) @pytest.mark.asyncio -async def test_create_exclusion_flattened_async(): +async def test_list_links_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_exclusion), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.ListLinksResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_exclusion( + response = await client.list_links( parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), ) # Establish that the underlying call was made with the expected @@ -5158,12 +5252,9 @@ async def test_create_exclusion_flattened_async(): arg = args[0].parent mock_val = 'parent_value' assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') - assert arg == mock_val @pytest.mark.asyncio -async def test_create_exclusion_flattened_error_async(): +async def test_list_links_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5171,18 +5262,209 @@ async def test_create_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_exclusion( - logging_config.CreateExclusionRequest(), + await client.list_links( + logging_config.ListLinksRequest(), parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), ) +def test_list_links_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_links(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.Link) + for i in results) +def test_list_links_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = list(client.list_links(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_links_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_links(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.Link) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_links_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + @pytest.mark.parametrize("request_type", [ - logging_config.UpdateExclusionRequest, + logging_config.GetLinkRequest, dict, ]) -def test_update_exclusion(request_type, transport: str = 'grpc'): +def test_get_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5194,31 +5476,29 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion( + call.return_value = logging_config.Link( name='name_value', description='description_value', - filter='filter_value', - disabled=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, ) - response = client.update_exclusion(request) + response = client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.GetLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) + assert isinstance(response, logging_config.Link) assert response.name == 'name_value' assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_update_exclusion_empty_call(): +def test_get_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5228,15 +5508,15 @@ def test_update_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: - client.update_exclusion() + client.get_link() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.GetLinkRequest() @pytest.mark.asyncio -async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): +async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5248,52 +5528,50 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( name='name_value', description='description_value', - filter='filter_value', - disabled=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, )) - response = await client.update_exclusion(request) + response = await client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + assert args[0] == logging_config.GetLinkRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) + assert isinstance(response, logging_config.Link) assert response.name == 'name_value' assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE @pytest.mark.asyncio -async def test_update_exclusion_async_from_dict(): - await test_update_exclusion_async(request_type=dict) +async def test_get_link_async_from_dict(): + await test_get_link_async(request_type=dict) -def test_update_exclusion_field_headers(): +def test_get_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() + request = logging_config.GetLinkRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: - call.return_value = logging_config.LogExclusion() - client.update_exclusion(request) + call.return_value = logging_config.Link() + client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5309,23 +5587,23 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio -async def test_update_exclusion_field_headers_async(): +async def test_get_link_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateExclusionRequest() + request = logging_config.GetLinkRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) - await client.update_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) + await client.get_link(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5340,23 +5618,21 @@ async def test_update_exclusion_field_headers_async(): ) in kw['metadata'] -def test_update_exclusion_flattened(): +def test_get_link_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.Link() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.update_exclusion( + client.get_link( name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) # Establish that the underlying call was made with the expected @@ -5366,15 +5642,9 @@ def test_update_exclusion_flattened(): arg = args[0].name mock_val = 'name_value' assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val -def test_update_exclusion_flattened_error(): +def test_get_link_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5382,33 +5652,29 @@ def test_update_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_exclusion( - logging_config.UpdateExclusionRequest(), + client.get_link( + logging_config.GetLinkRequest(), name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) @pytest.mark.asyncio -async def test_update_exclusion_flattened_async(): +async def test_get_link_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_exclusion), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.LogExclusion() + call.return_value = logging_config.Link() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.update_exclusion( + response = await client.get_link( name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) # Establish that the underlying call was made with the expected @@ -5418,15 +5684,9 @@ async def test_update_exclusion_flattened_async(): arg = args[0].name mock_val = 'name_value' assert arg == mock_val - arg = args[0].exclusion - mock_val = logging_config.LogExclusion(name='name_value') - assert arg == mock_val - arg = args[0].update_mask - mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) - assert arg == mock_val @pytest.mark.asyncio -async def test_update_exclusion_flattened_error_async(): +async def test_get_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5434,19 +5694,17 @@ async def test_update_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.update_exclusion( - logging_config.UpdateExclusionRequest(), + await client.get_link( + logging_config.GetLinkRequest(), name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) @pytest.mark.parametrize("request_type", [ - logging_config.DeleteExclusionRequest, + logging_config.ListExclusionsRequest, dict, ]) -def test_delete_exclusion(request_type, transport: str = 'grpc'): +def test_list_exclusions(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5458,22 +5716,25 @@ def test_delete_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_exclusion(request) + call.return_value = logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListExclusionsPager) + assert response.next_page_token == 'next_page_token_value' -def test_delete_exclusion_empty_call(): +def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5483,15 +5744,15 @@ def test_delete_exclusion_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: - client.delete_exclusion() + client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListExclusionsRequest() @pytest.mark.asyncio -async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): +async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5503,43 +5764,46 @@ async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_t # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion(request) + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + assert args[0] == logging_config.ListExclusionsRequest() # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, pagers.ListExclusionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' @pytest.mark.asyncio -async def test_delete_exclusion_async_from_dict(): - await test_delete_exclusion_async(request_type=dict) +async def test_list_exclusions_async_from_dict(): + await test_list_exclusions_async(request_type=dict) -def test_delete_exclusion_field_headers(): +def test_list_exclusions_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() + request = logging_config.ListExclusionsRequest() - request.name = 'name_value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: - call.return_value = None - client.delete_exclusion(request) + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5550,28 +5814,28 @@ def test_delete_exclusion_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name_value', + 'parent=parent_value', ) in kw['metadata'] @pytest.mark.asyncio -async def test_delete_exclusion_field_headers_async(): +async def test_list_exclusions_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.DeleteExclusionRequest() + request = logging_config.ListExclusionsRequest() - request.name = 'name_value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5582,37 +5846,37 @@ async def test_delete_exclusion_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name_value', + 'parent=parent_value', ) in kw['metadata'] -def test_delete_exclusion_flattened(): +def test_list_exclusions_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = logging_config.ListExclusionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_exclusion( - name='name_value', + client.list_exclusions( + parent='parent_value', ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' + arg = args[0].parent + mock_val = 'parent_value' assert arg == mock_val -def test_delete_exclusion_flattened_error(): +def test_list_exclusions_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5620,41 +5884,41 @@ def test_delete_exclusion_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name='name_value', + client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', ) @pytest.mark.asyncio -async def test_delete_exclusion_flattened_async(): +async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.delete_exclusion), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = None + call.return_value = logging_config.ListExclusionsResponse() - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_exclusion( - name='name_value', + response = await client.list_exclusions( + parent='parent_value', ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' + arg = args[0].parent + mock_val = 'parent_value' assert arg == mock_val @pytest.mark.asyncio -async def test_delete_exclusion_flattened_error_async(): +async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5662,69 +5926,263 @@ async def test_delete_exclusion_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.delete_exclusion( - logging_config.DeleteExclusionRequest(), - name='name_value', + await client.list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', ) -@pytest.mark.parametrize("request_type", [ - logging_config.GetCmekSettingsRequest, - dict, -]) -def test_get_cmek_settings(request_type, transport: str = 'grpc'): +def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.list_exclusions), '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, ) - response = client.get_cmek_settings(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_exclusions(request={}) + assert pager._metadata == metadata -def test_get_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in results) +def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.list_exclusions), '__call__') as call: - client.get_cmek_settings() + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = list(client.list_exclusions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_exclusions_async_pager(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_exclusions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_exclusions_async_pages(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_exclusions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging_config.GetExclusionRequest, + dict, +]) +def test_get_exclusion(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test_get_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + assert args[0] == logging_config.GetExclusionRequest() @pytest.mark.asyncio -async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): +async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5736,50 +6194,52 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', + description='description_value', + filter='filter_value', + disabled=True, )) - response = await client.get_cmek_settings(request) + response = await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + assert args[0] == logging_config.GetExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.LogExclusion) assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True @pytest.mark.asyncio -async def test_get_cmek_settings_async_from_dict(): - await test_get_cmek_settings_async(request_type=dict) +async def test_get_exclusion_async_from_dict(): + await test_get_exclusion_async(request_type=dict) -def test_get_cmek_settings_field_headers(): +def test_get_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() + request = logging_config.GetExclusionRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.get_exclusion), '__call__') as call: - call.return_value = logging_config.CmekSettings() - client.get_cmek_settings(request) + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5795,23 +6255,23 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio -async def test_get_cmek_settings_field_headers_async(): +async def test_get_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.GetCmekSettingsRequest() + request = logging_config.GetExclusionRequest() request.name = 'name_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_cmek_settings), + type(client.transport.get_exclusion), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) - await client.get_cmek_settings(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5826,45 +6286,129 @@ async def test_get_cmek_settings_field_headers_async(): ) in kw['metadata'] -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateCmekSettingsRequest, - dict, -]) -def test_update_cmek_settings(request_type, transport: str = 'grpc'): +def test_get_exclusion_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value = logging_config.CmekSettings( + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_exclusion( name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', ) - response = client.update_cmek_settings(request) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_exclusion_flattened_error(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateExclusionRequest, + dict, +]) +def test_create_exclusion(request_type, transport: str = 'grpc'): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.LogExclusion) assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True -def test_update_cmek_settings_empty_call(): +def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( @@ -5874,15 +6418,15 @@ def test_update_cmek_settings_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.create_exclusion), '__call__') as call: - client.update_cmek_settings() + client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + assert args[0] == logging_config.CreateExclusionRequest() @pytest.mark.asyncio -async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): +async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5894,50 +6438,52 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.create_exclusion), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', + description='description_value', + filter='filter_value', + disabled=True, )) - response = await client.update_cmek_settings(request) + response = await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + assert args[0] == logging_config.CreateExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.LogExclusion) assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True @pytest.mark.asyncio -async def test_update_cmek_settings_async_from_dict(): - await test_update_cmek_settings_async(request_type=dict) +async def test_create_exclusion_async_from_dict(): + await test_create_exclusion_async(request_type=dict) -def test_update_cmek_settings_field_headers(): +def test_create_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() + request = logging_config.CreateExclusionRequest() - request.name = 'name_value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.create_exclusion), '__call__') as call: - call.return_value = logging_config.CmekSettings() - client.update_cmek_settings(request) + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5948,28 +6494,28 @@ def test_update_cmek_settings_field_headers(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name_value', + 'parent=parent_value', ) in kw['metadata'] @pytest.mark.asyncio -async def test_update_cmek_settings_field_headers_async(): +async def test_create_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = logging_config.UpdateCmekSettingsRequest() + request = logging_config.CreateExclusionRequest() - request.name = 'name_value' + request.parent = 'parent_value' # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.update_cmek_settings), + type(client.transport.create_exclusion), '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) - await client.update_cmek_settings(request) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5980,5125 +6526,1503 @@ async def test_update_cmek_settings_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( 'x-goog-request-params', - 'name=name_value', + 'parent=parent_value', ) in kw['metadata'] -@pytest.mark.parametrize("request_type", [ - logging_config.ListBucketsRequest, - dict, -]) -def test_list_buckets_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListBucketsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_buckets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListBucketsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_buckets_rest_required_fields(request_type=logging_config.ListBucketsRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_buckets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_buckets._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.ListBucketsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.ListBucketsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_buckets(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_buckets_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_buckets._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_buckets_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_buckets") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_buckets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.ListBucketsRequest.pb(logging_config.ListBucketsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.ListBucketsResponse.to_json(logging_config.ListBucketsResponse()) - - request = logging_config.ListBucketsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.ListBucketsResponse() - - client.list_buckets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_buckets_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListBucketsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_buckets(request) - - -def test_list_buckets_rest_flattened(): +def test_create_exclusion_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListBucketsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2/locations/sample3'} - - # get truthy value for each flattened field - mock_args = dict( + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_exclusion( parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListBucketsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_buckets(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*/locations/*}/buckets" % client.transport._host, args[1]) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val -def test_list_buckets_rest_flattened_error(transport: str = 'rest'): +def test_create_exclusion_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_buckets( - logging_config.ListBucketsRequest(), + client.create_exclusion( + logging_config.CreateExclusionRequest(), parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), ) - -def test_list_buckets_rest_pager(transport: str = 'rest'): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_create_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - next_page_token='abc', - ), - logging_config.ListBucketsResponse( - buckets=[], - next_page_token='def', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - ], - next_page_token='ghi', - ), - logging_config.ListBucketsResponse( - buckets=[ - logging_config.LogBucket(), - logging_config.LogBucket(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_config.ListBucketsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2/locations/sample3'} - - pager = client.list_buckets(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogBucket) - for i in results) - - pages = list(client.list_buckets(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging_config.GetBucketRequest, - dict, -]) -def test_get_bucket_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_bucket(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -def test_get_bucket_rest_required_fields(request_type=logging_config.GetBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_bucket(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_bucket") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_bucket") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetBucketRequest.pb(logging_config.GetBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) - - request = logging_config.GetBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogBucket() - - client.get_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_bucket(request) - - -def test_get_bucket_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.CreateBucketRequest, - dict, -]) -def test_create_bucket_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3'} - request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_bucket(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -def test_create_bucket_rest_required_fields(request_type=logging_config.CreateBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request_init["bucket_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "bucketId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "bucketId" in jsonified_request - assert jsonified_request["bucketId"] == request_init["bucket_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["bucketId"] = 'bucket_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_bucket._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("bucket_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "bucketId" in jsonified_request - assert jsonified_request["bucketId"] == 'bucket_id_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_bucket(request) - - expected_params = [ - ( - "bucketId", - "", - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(("bucketId", )) & set(("parent", "bucketId", "bucket", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_bucket") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_bucket") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.CreateBucketRequest.pb(logging_config.CreateBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) - - request = logging_config.CreateBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogBucket() - - client.create_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3'} - request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_bucket(request) - - -def test_create_bucket_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateBucketRequest, - dict, -]) -def test_update_bucket_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_bucket(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogBucket) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.retention_days == 1512 - assert response.locked is True - assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE - - -def test_update_bucket_rest_required_fields(request_type=logging_config.UpdateBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_bucket._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogBucket() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogBucket.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_bucket(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "bucket", "updateMask", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_bucket") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_bucket") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateBucketRequest.pb(logging_config.UpdateBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogBucket.to_json(logging_config.LogBucket()) - - request = logging_config.UpdateBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogBucket() - - client.update_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request_init["bucket"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'retention_days': 1512, 'locked': True, 'lifecycle_state': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_bucket(request) - - -def test_update_bucket_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteBucketRequest, - dict, -]) -def test_delete_bucket_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_bucket(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_bucket_rest_required_fields(request_type=logging_config.DeleteBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_bucket(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_bucket") as pre: - pre.assert_not_called() - pb_message = logging_config.DeleteBucketRequest.pb(logging_config.DeleteBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging_config.DeleteBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_bucket(request) - - -def test_delete_bucket_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.UndeleteBucketRequest, - dict, -]) -def test_undelete_bucket_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.undelete_bucket(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_undelete_bucket_rest_required_fields(request_type=logging_config.UndeleteBucketRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undelete_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).undelete_bucket._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.undelete_bucket(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_undelete_bucket_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.undelete_bucket._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_undelete_bucket_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_undelete_bucket") as pre: - pre.assert_not_called() - pb_message = logging_config.UndeleteBucketRequest.pb(logging_config.UndeleteBucketRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging_config.UndeleteBucketRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.undelete_bucket(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_undelete_bucket_rest_bad_request(transport: str = 'rest', request_type=logging_config.UndeleteBucketRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.undelete_bucket(request) - - -def test_undelete_bucket_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.ListViewsRequest, - dict, -]) -def test_list_views_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListViewsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_views(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListViewsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_views_rest_required_fields(request_type=logging_config.ListViewsRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_views._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_views._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.ListViewsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.ListViewsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_views(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_views_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_views._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_views_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_views") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_views") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.ListViewsRequest.pb(logging_config.ListViewsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.ListViewsResponse.to_json(logging_config.ListViewsResponse()) - - request = logging_config.ListViewsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.ListViewsResponse() - - client.list_views(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_views_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListViewsRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_views(request) - - -def test_list_views_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListViewsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListViewsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_views(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*/locations/*/buckets/*}/views" % client.transport._host, args[1]) - - -def test_list_views_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_views( - logging_config.ListViewsRequest(), - parent='parent_value', - ) - - -def test_list_views_rest_pager(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - logging_config.LogView(), - ], - next_page_token='abc', - ), - logging_config.ListViewsResponse( - views=[], - next_page_token='def', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - ], - next_page_token='ghi', - ), - logging_config.ListViewsResponse( - views=[ - logging_config.LogView(), - logging_config.LogView(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_config.ListViewsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - - pager = client.list_views(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogView) - for i in results) - - pages = list(client.list_views(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging_config.GetViewRequest, - dict, -]) -def test_get_view_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_view(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -def test_get_view_rest_required_fields(request_type=logging_config.GetViewRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_view(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_view_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_view._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_view_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_view") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_view") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetViewRequest.pb(logging_config.GetViewRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) - - request = logging_config.GetViewRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogView() - - client.get_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_view(request) - - -def test_get_view_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.CreateViewRequest, - dict, -]) -def test_create_view_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_view(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -def test_create_view_rest_required_fields(request_type=logging_config.CreateViewRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request_init["view_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - assert "viewId" not in jsonified_request - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - assert "viewId" in jsonified_request - assert jsonified_request["viewId"] == request_init["view_id"] - - jsonified_request["parent"] = 'parent_value' - jsonified_request["viewId"] = 'view_id_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_view._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("view_id", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - assert "viewId" in jsonified_request - assert jsonified_request["viewId"] == 'view_id_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_view(request) - - expected_params = [ - ( - "viewId", - "", - ), - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_view_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_view._get_unset_required_fields({}) - assert set(unset_fields) == (set(("viewId", )) & set(("parent", "viewId", "view", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_view_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_view") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_view") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.CreateViewRequest.pb(logging_config.CreateViewRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) - - request = logging_config.CreateViewRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogView() - - client.create_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2/locations/sample3/buckets/sample4'} - request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_view(request) - - -def test_create_view_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateViewRequest, - dict, -]) -def test_update_view_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_view(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogView) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - - -def test_update_view_rest_required_fields(request_type=logging_config.UpdateViewRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_view._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogView() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogView.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_view(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_view_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_view._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "view", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_view_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_view") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_view") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateViewRequest.pb(logging_config.UpdateViewRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogView.to_json(logging_config.LogView()) - - request = logging_config.UpdateViewRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogView() - - client.update_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request_init["view"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'filter': 'filter_value'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_view(request) - - -def test_update_view_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteViewRequest, - dict, -]) -def test_delete_view_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_view(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_view_rest_required_fields(request_type=logging_config.DeleteViewRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_view._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_view(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_view_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_view._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_view_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_view") as pre: - pre.assert_not_called() - pb_message = logging_config.DeleteViewRequest.pb(logging_config.DeleteViewRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging_config.DeleteViewRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_view(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_view_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteViewRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/locations/sample3/buckets/sample4/views/sample5'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_view(request) - - -def test_delete_view_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.ListSinksRequest, - dict, -]) -def test_list_sinks_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListSinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_sinks(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSinksPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_sinks_rest_required_fields(request_type=logging_config.ListSinksRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sinks._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_sinks._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.ListSinksResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.ListSinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_sinks(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_sinks_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_sinks._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_sinks_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_sinks") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_sinks") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.ListSinksRequest.pb(logging_config.ListSinksRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.ListSinksResponse.to_json(logging_config.ListSinksResponse()) - - request = logging_config.ListSinksRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.ListSinksResponse() - - client.list_sinks(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_sinks_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListSinksRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_sinks(request) - - -def test_list_sinks_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListSinksResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListSinksResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_sinks(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/sinks" % client.transport._host, args[1]) - - -def test_list_sinks_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_sinks( - logging_config.ListSinksRequest(), - parent='parent_value', - ) - - -def test_list_sinks_rest_pager(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - logging_config.LogSink(), - ], - next_page_token='abc', - ), - logging_config.ListSinksResponse( - sinks=[], - next_page_token='def', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - ], - next_page_token='ghi', - ), - logging_config.ListSinksResponse( - sinks=[ - logging_config.LogSink(), - logging_config.LogSink(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_config.ListSinksResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_sinks(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogSink) - for i in results) - - pages = list(client.list_sinks(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging_config.GetSinkRequest, - dict, -]) -def test_get_sink_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_sink(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_get_sink_rest_required_fields(request_type=logging_config.GetSinkRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["sink_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["sinkName"] = 'sink_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "sinkName" in jsonified_request - assert jsonified_request["sinkName"] == 'sink_name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_sink(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_sink_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_sink._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("sinkName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_sink_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_sink") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_sink") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetSinkRequest.pb(logging_config.GetSinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) - - request = logging_config.GetSinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogSink() - - client.get_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_sink(request) - - -def test_get_sink_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - - # get arguments that satisfy an http rule for this method - sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - sink_name='sink_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_sink(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) - - -def test_get_sink_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_sink( - logging_config.GetSinkRequest(), - sink_name='sink_name_value', - ) - - -def test_get_sink_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.CreateSinkRequest, - dict, -]) -def test_create_sink_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_sink(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_create_sink_rest_required_fields(request_type=logging_config.CreateSinkRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_sink._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("unique_writer_identity", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_sink(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_sink_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_sink._get_unset_required_fields({}) - assert set(unset_fields) == (set(("uniqueWriterIdentity", )) & set(("parent", "sink", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_sink_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_sink") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_sink") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.CreateSinkRequest.pb(logging_config.CreateSinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) - - request = logging_config.CreateSinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogSink() - - client.create_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_sink(request) - - -def test_create_sink_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - sink=logging_config.LogSink(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_sink(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/sinks" % client.transport._host, args[1]) - - -def test_create_sink_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_sink( - logging_config.CreateSinkRequest(), + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_exclusion( parent='parent_value', - sink=logging_config.LogSink(name='name_value'), - ) - - -def test_create_sink_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateSinkRequest, - dict, -]) -def test_update_sink_rest(request_type): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_sink(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogSink) - assert response.name == 'name_value' - assert response.destination == 'destination_value' - assert response.filter == 'filter_value' - assert response.description == 'description_value' - assert response.disabled is True - assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 - assert response.writer_identity == 'writer_identity_value' - assert response.include_children is True - - -def test_update_sink_rest_required_fields(request_type=logging_config.UpdateSinkRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["sink_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["sinkName"] = 'sink_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_sink._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("unique_writer_identity", "update_mask", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "sinkName" in jsonified_request - assert jsonified_request["sinkName"] == 'sink_name_value' - - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "put", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_sink(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_sink_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_sink._get_unset_required_fields({}) - assert set(unset_fields) == (set(("uniqueWriterIdentity", "updateMask", )) & set(("sinkName", "sink", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_sink_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_sink") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_sink") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateSinkRequest.pb(logging_config.UpdateSinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogSink.to_json(logging_config.LogSink()) - - request = logging_config.UpdateSinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogSink() - - client.update_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateSinkRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request_init["sink"] = {'name': 'name_value', 'destination': 'destination_value', 'filter': 'filter_value', 'description': 'description_value', 'disabled': True, 'exclusions': [{'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}}], 'output_version_format': 1, 'writer_identity': 'writer_identity_value', 'include_children': True, 'bigquery_options': {'use_partitioned_tables': True, 'uses_timestamp_column_partitioning': True}, 'create_time': {}, 'update_time': {}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_sink(request) - - -def test_update_sink_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogSink() - - # get arguments that satisfy an http rule for this method - sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + exclusion=logging_config.LogExclusion(name='name_value'), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogSink.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_sink(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val -def test_update_sink_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_create_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_sink( - logging_config.UpdateSinkRequest(), - sink_name='sink_name_value', - sink=logging_config.LogSink(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + await client.create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), ) -def test_update_sink_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - @pytest.mark.parametrize("request_type", [ - logging_config.DeleteSinkRequest, - dict, + logging_config.UpdateExclusionRequest, + dict, ]) -def test_delete_sink_rest(request_type): +def test_update_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client.update_exclusion(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_sink(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_sink_rest_required_fields(request_type=logging_config.DeleteSinkRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["sink_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["sinkName"] = 'sink_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_sink._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True - # verify required fields with non-default values are left alone - assert "sinkName" in jsonified_request - assert jsonified_request["sinkName"] == 'sink_name_value' +def test_update_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_sink(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_sink_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_sink._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("sinkName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_sink_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_sink") as pre: - pre.assert_not_called() - pb_message = logging_config.DeleteSinkRequest.pb(logging_config.DeleteSinkRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging_config.DeleteSinkRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_sink(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + client.update_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() -def test_delete_sink_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteSinkRequest): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'sink_name': 'sample1/sample2/sinks/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_sink(request) - - -def test_delete_sink_rest_flattened(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'sink_name': 'sample1/sample2/sinks/sample3'} - - # get truthy value for each flattened field - mock_args = dict( - sink_name='sink_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_sink(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{sink_name=*/*/sinks/*}" % client.transport._host, args[1]) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.update_exclusion(request) -def test_delete_sink_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_sink( - logging_config.DeleteSinkRequest(), - sink_name='sink_name_value', - ) + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True -def test_delete_sink_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +@pytest.mark.asyncio +async def test_update_exclusion_async_from_dict(): + await test_update_exclusion_async(request_type=dict) -@pytest.mark.parametrize("request_type", [ - logging_config.ListExclusionsRequest, - dict, -]) -def test_list_exclusions_rest(request_type): +def test_update_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + request.name = 'name_value' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_exclusions(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request) - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListExclusionsPager) - assert response.next_page_token == 'next_page_token_value' + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_list_exclusions_rest_required_fields(request_type=logging_config.ListExclusionsRequest): - transport_class = transports.ConfigServiceV2RestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) +@pytest.mark.asyncio +async def test_update_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # verify fields with default values are dropped + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_exclusions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request.name = 'name_value' - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client.update_exclusion(request) - jsonified_request["parent"] = 'parent_value' + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_exclusions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' +def test_update_exclusion_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.ListExclusionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_exclusions(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - -def test_list_exclusions_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) - unset_fields = transport.list_exclusions._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_exclusions_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_list_exclusions") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_list_exclusions") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.ListExclusionsRequest.pb(logging_config.ListExclusionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.ListExclusionsResponse.to_json(logging_config.ListExclusionsResponse()) - - request = logging_config.ListExclusionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.ListExclusionsResponse() - - client.list_exclusions(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_exclusions_rest_bad_request(transport: str = 'rest', request_type=logging_config.ListExclusionsRequest): +def test_update_exclusion_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_exclusions(request) - + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) -def test_list_exclusions_rest_flattened(): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.ListExclusionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.ListExclusionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_exclusions(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/exclusions" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val -def test_list_exclusions_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_exclusions( - logging_config.ListExclusionsRequest(), - parent='parent_value', + await client.update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) -def test_list_exclusions_rest_pager(transport: str = 'rest'): +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteExclusionRequest, + dict, +]) +def test_delete_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - next_page_token='abc', - ), - logging_config.ListExclusionsResponse( - exclusions=[], - next_page_token='def', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - ], - next_page_token='ghi', - ), - logging_config.ListExclusionsResponse( - exclusions=[ - logging_config.LogExclusion(), - logging_config.LogExclusion(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_config.ListExclusionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - pager = client.list_exclusions(request=sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_exclusion(request) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_config.LogExclusion) - for i in results) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() - pages = list(client.list_exclusions(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token + # Establish that the response is the type that we expect. + assert response is None -@pytest.mark.parametrize("request_type", [ - logging_config.GetExclusionRequest, - dict, -]) -def test_get_exclusion_rest(request_type): +def test_delete_exclusion_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc', ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + client.delete_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - ) +@pytest.mark.asyncio +async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_exclusion(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True + assert response is None + +@pytest.mark.asyncio +async def test_delete_exclusion_async_from_dict(): + await test_delete_exclusion_async(request_type=dict) -def test_get_exclusion_rest_required_fields(request_type=logging_config.GetExclusionRequest): - transport_class = transports.ConfigServiceV2RestTransport - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) +def test_delete_exclusion_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) - # verify fields with default values are dropped + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request.name = 'name_value' - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = None + client.delete_exclusion(request) - jsonified_request["name"] = 'name_value' + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_delete_exclusion_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_exclusion(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request) -def test_get_exclusion_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.get_exclusion._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_exclusion_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_exclusion") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_exclusion") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetExclusionRequest.pb(logging_config.GetExclusionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) - - request = logging_config.GetExclusionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogExclusion() - - client.get_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetExclusionRequest): +def test_delete_exclusion_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_exclusion( + name='name_value', + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_exclusion(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_get_exclusion_rest_flattened(): +def test_delete_exclusion_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/exclusions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_exclusion( + logging_config.DeleteExclusionRequest(), name='name_value', ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None - client.get_exclusion(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_exclusion( + name='name_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_get_exclusion_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_delete_exclusion_flattened_error_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_exclusion( - logging_config.GetExclusionRequest(), + await client.delete_exclusion( + logging_config.DeleteExclusionRequest(), name='name_value', ) -def test_get_exclusion_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - @pytest.mark.parametrize("request_type", [ - logging_config.CreateExclusionRequest, - dict, + logging_config.GetCmekSettingsRequest, + dict, ]) -def test_create_exclusion_rest(request_type): +def test_get_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', ) + response = client.get_cmek_settings(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_exclusion(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) + assert isinstance(response, logging_config.CmekSettings) assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -def test_create_exclusion_rest_required_fields(request_type=logging_config.CreateExclusionRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' +def test_get_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_exclusion(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + client.get_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() +@pytest.mark.asyncio +async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) -def test_create_exclusion_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport.create_exclusion._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "exclusion", ))) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + response = await client.get_cmek_settings(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_exclusion_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_create_exclusion") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_create_exclusion") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.CreateExclusionRequest.pb(logging_config.CreateExclusionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) - - request = logging_config.CreateExclusionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogExclusion() - - client.create_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.CreateExclusionRequest): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_exclusion(request) +@pytest.mark.asyncio +async def test_get_cmek_settings_async_from_dict(): + await test_get_cmek_settings_async(request_type=dict) -def test_create_exclusion_rest_flattened(): +def test_get_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), - ) - mock_args.update(sample_request) + request.name = 'name_value' - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request) - client.create_exclusion(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/exclusions" % client.transport._host, args[1]) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_create_exclusion_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_get_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_exclusion( - logging_config.CreateExclusionRequest(), - parent='parent_value', - exclusion=logging_config.LogExclusion(name='name_value'), - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + request.name = 'name_value' -def test_create_exclusion_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] @pytest.mark.parametrize("request_type", [ - logging_config.UpdateExclusionRequest, - dict, + logging_config.UpdateCmekSettingsRequest, + dict, ]) -def test_update_exclusion_rest(request_type): +def test_update_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', ) + response = client.update_cmek_settings(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_exclusion(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.LogExclusion) + assert isinstance(response, logging_config.CmekSettings) assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.disabled is True - - -def test_update_exclusion_rest_required_fields(request_type=logging_config.UpdateExclusionRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = 'name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_exclusion._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +def test_update_cmek_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport='grpc', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + client.update_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) +@pytest.mark.asyncio +async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.update_exclusion(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + response = await client.update_cmek_settings(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' -def test_update_exclusion_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.update_exclusion._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "exclusion", "updateMask", ))) +@pytest.mark.asyncio +async def test_update_cmek_settings_async_from_dict(): + await test_update_cmek_settings_async(request_type=dict) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_exclusion_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_exclusion") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_exclusion") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateExclusionRequest.pb(logging_config.UpdateExclusionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.LogExclusion.to_json(logging_config.LogExclusion()) - - request = logging_config.UpdateExclusionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.LogExclusion() - - client.update_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateExclusionRequest): +def test_update_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request_init["exclusion"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'disabled': True, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}} - request = request_type(**request_init) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = 'name_value' - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_exclusion(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_update_exclusion_rest_flattened(): - client = ConfigServiceV2Client( + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_cmek_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.LogExclusion() - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/exclusions/sample3'} + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() - # get truthy value for each flattened field - mock_args = dict( - name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - mock_args.update(sample_request) + request.name = 'name_value' - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.LogExclusion.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client.update_cmek_settings(request) - client.update_exclusion(**mock_args) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_update_exclusion_rest_flattened_error(transport: str = 'rest'): +@pytest.mark.parametrize("request_type", [ + logging_config.GetSettingsRequest, + dict, +]) +def test_get_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_exclusion( - logging_config.UpdateExclusionRequest(), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( name='name_value', - exclusion=logging_config.LogExclusion(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, ) + response = client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True -def test_update_exclusion_rest_error(): +def test_get_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport='grpc', ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + client.get_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() -@pytest.mark.parametrize("request_type", [ - logging_config.DeleteExclusionRequest, - dict, -]) -def test_delete_exclusion_rest(request_type): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + response = await client.get_settings(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_exclusion(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_exclusion_rest_required_fields(request_type=logging_config.DeleteExclusionRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present + assert isinstance(response, logging_config.Settings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True - jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_exclusion._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_get_settings_async_from_dict(): + await test_get_settings_async(request_type=dict) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +def test_get_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_exclusion(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client.get_settings(request) -def test_delete_exclusion_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.delete_exclusion._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_exclusion_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( +@pytest.mark.asyncio +async def test_get_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_delete_exclusion") as pre: - pre.assert_not_called() - pb_message = logging_config.DeleteExclusionRequest.pb(logging_config.DeleteExclusionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = 'name_value' - request = logging_config.DeleteExclusionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + await client.get_settings(request) - client.delete_exclusion(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - pre.assert_called_once() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -def test_delete_exclusion_rest_bad_request(transport: str = 'rest', request_type=logging_config.DeleteExclusionRequest): +def test_get_settings_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/exclusions/sample3'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_settings( + name='name_value', + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_exclusion(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_delete_exclusion_rest_flattened(): +def test_get_settings_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'name': 'sample1/sample2/exclusions/sample3'} - - # get truthy value for each flattened field - mock_args = dict( + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_settings( + logging_config.GetSettingsRequest(), name='name_value', ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +@pytest.mark.asyncio +async def test_get_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() - client.delete_exclusion(**mock_args) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_settings( + name='name_value', + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{name=*/*/exclusions/*}" % client.transport._host, args[1]) - + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val -def test_delete_exclusion_rest_flattened_error(transport: str = 'rest'): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_get_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_exclusion( - logging_config.DeleteExclusionRequest(), + await client.get_settings( + logging_config.GetSettingsRequest(), name='name_value', ) -def test_delete_exclusion_rest_error(): +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateSettingsRequest, + dict, +]) +def test_update_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize("request_type", [ - logging_config.GetCmekSettingsRequest, - dict, -]) -def test_get_cmek_settings_rest(request_type): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + ) + response = client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True + + +def test_update_settings_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc', ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + client.update_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + +@pytest.mark.asyncio +async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.CmekSettings.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + response = await client.update_settings(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_cmek_settings(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) + assert isinstance(response, logging_config.Settings) assert response.name == 'name_value' assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True + +@pytest.mark.asyncio +async def test_update_settings_async_from_dict(): + await test_update_settings_async(request_type=dict) -def test_get_cmek_settings_rest_required_fields(request_type=logging_config.GetCmekSettingsRequest): - transport_class = transports.ConfigServiceV2RestTransport - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) +def test_update_settings_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) - # verify fields with default values are dropped + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cmek_settings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + request.name = 'name_value' - # verify required fields with default values are now present + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client.update_settings(request) - jsonified_request["name"] = 'name_value' + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_cmek_settings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_settings_field_headers_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.CmekSettings() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_config.CmekSettings.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_cmek_settings(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + request.name = 'name_value' + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + await client.update_settings(request) -def test_get_cmek_settings_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.get_cmek_settings._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name", ))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_cmek_settings_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_get_cmek_settings") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_get_cmek_settings") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.GetCmekSettingsRequest.pb(logging_config.GetCmekSettingsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.CmekSettings.to_json(logging_config.CmekSettings()) - - request = logging_config.GetCmekSettingsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.CmekSettings() - - client.get_cmek_settings(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_cmek_settings_rest_bad_request(transport: str = 'rest', request_type=logging_config.GetCmekSettingsRequest): +def test_update_settings_flattened(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_settings( + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_cmek_settings(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val -def test_get_cmek_settings_rest_error(): +def test_update_settings_flattened_error(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest' ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) -@pytest.mark.parametrize("request_type", [ - logging_config.UpdateCmekSettingsRequest, - dict, -]) -def test_update_cmek_settings_rest(request_type): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_update_settings_flattened_async(): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2'} - request_init["cmek_settings"] = {'name': 'name_value', 'kms_key_name': 'kms_key_name_value', 'service_account_id': 'service_account_id_value'} - request = request_type(**request_init) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - service_account_id='service_account_id_value', + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_settings( + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), ) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.CmekSettings.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_cmek_settings(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_config.CmekSettings) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.service_account_id == 'service_account_id_value' - - -def test_update_cmek_settings_rest_required_fields(request_type=logging_config.UpdateCmekSettingsRequest): - transport_class = transports.ConfigServiceV2RestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_cmek_settings._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val - jsonified_request["name"] = 'name_value' +@pytest.mark.asyncio +async def test_update_settings_flattened_error_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_cmek_settings._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask", )) - jsonified_request.update(unset_fields) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == 'name_value' +@pytest.mark.parametrize("request_type", [ + logging_config.CopyLogEntriesRequest, + dict, +]) +def test_copy_log_entries(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', + transport=transport, ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_config.CmekSettings() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "patch", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_config.CmekSettings.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.update_cmek_settings(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.copy_log_entries(request) - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_update_cmek_settings_rest_unset_required_fields(): - transport = transports.ConfigServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.update_cmek_settings._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask", )) & set(("name", "cmekSettings", ))) +def test_copy_log_entries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + client.copy_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_cmek_settings_rest_interceptors(null_interceptor): - transport = transports.ConfigServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.ConfigServiceV2RestInterceptor(), - ) - client = ConfigServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "post_update_cmek_settings") as post, \ - mock.patch.object(transports.ConfigServiceV2RestInterceptor, "pre_update_cmek_settings") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_config.UpdateCmekSettingsRequest.pb(logging_config.UpdateCmekSettingsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_config.CmekSettings.to_json(logging_config.CmekSettings()) - - request = logging_config.UpdateCmekSettingsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_config.CmekSettings() - - client.update_cmek_settings(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_cmek_settings_rest_bad_request(transport: str = 'rest', request_type=logging_config.UpdateCmekSettingsRequest): - client = ConfigServiceV2Client( +@pytest.mark.asyncio +async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2'} - request_init["cmek_settings"] = {'name': 'name_value', 'kms_key_name': 'kms_key_name_value', 'service_account_id': 'service_account_id_value'} - request = request_type(**request_init) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.copy_log_entries(request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_cmek_settings(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_update_cmek_settings_rest_error(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) + +@pytest.mark.asyncio +async def test_copy_log_entries_async_from_dict(): + await test_copy_log_entries_async(request_type=dict) def test_credentials_transport_error(): @@ -11179,7 +8103,6 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport, - transports.ConfigServiceV2RestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -11190,7 +8113,6 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", - "rest", ]) def test_transport_kind(transport_name): transport = ConfigServiceV2Client.get_transport_class(transport_name)( @@ -11230,6 +8152,8 @@ def test_config_service_v2_base_transport(): methods = ( 'list_buckets', 'get_bucket', + 'create_bucket_async', + 'update_bucket_async', 'create_bucket', 'update_bucket', 'delete_bucket', @@ -11244,6 +8168,10 @@ def test_config_service_v2_base_transport(): 'create_sink', 'update_sink', 'delete_sink', + 'create_link', + 'delete_link', + 'list_links', + 'get_link', 'list_exclusions', 'get_exclusion', 'create_exclusion', @@ -11251,6 +8179,9 @@ def test_config_service_v2_base_transport(): 'delete_exclusion', 'get_cmek_settings', 'update_cmek_settings', + 'get_settings', + 'update_settings', + 'copy_log_entries', ) for method in methods: with pytest.raises(NotImplementedError): @@ -11259,6 +8190,11 @@ def test_config_service_v2_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties remainder = [ 'kind', @@ -11340,7 +8276,6 @@ def test_config_service_v2_transport_auth_adc(transport_class): [ transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport, - transports.ConfigServiceV2RestTransport, ], ) def test_config_service_v2_transport_auth_gdch_credentials(transport_class): @@ -11440,20 +8375,10 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) -def test_config_service_v2_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.ConfigServiceV2RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( @@ -11463,14 +8388,11 @@ def test_config_service_v2_host_no_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( @@ -11480,93 +8402,8 @@ def test_config_service_v2_host_with_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com:8000' ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_config_service_v2_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = ConfigServiceV2Client( - credentials=creds1, - transport=transport_name, - ) - client2 = ConfigServiceV2Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_buckets._session - session2 = client2.transport.list_buckets._session - assert session1 != session2 - session1 = client1.transport.get_bucket._session - session2 = client2.transport.get_bucket._session - assert session1 != session2 - session1 = client1.transport.create_bucket._session - session2 = client2.transport.create_bucket._session - assert session1 != session2 - session1 = client1.transport.update_bucket._session - session2 = client2.transport.update_bucket._session - assert session1 != session2 - session1 = client1.transport.delete_bucket._session - session2 = client2.transport.delete_bucket._session - assert session1 != session2 - session1 = client1.transport.undelete_bucket._session - session2 = client2.transport.undelete_bucket._session - assert session1 != session2 - session1 = client1.transport.list_views._session - session2 = client2.transport.list_views._session - assert session1 != session2 - session1 = client1.transport.get_view._session - session2 = client2.transport.get_view._session - assert session1 != session2 - session1 = client1.transport.create_view._session - session2 = client2.transport.create_view._session - assert session1 != session2 - session1 = client1.transport.update_view._session - session2 = client2.transport.update_view._session - assert session1 != session2 - session1 = client1.transport.delete_view._session - session2 = client2.transport.delete_view._session - assert session1 != session2 - session1 = client1.transport.list_sinks._session - session2 = client2.transport.list_sinks._session - assert session1 != session2 - session1 = client1.transport.get_sink._session - session2 = client2.transport.get_sink._session - assert session1 != session2 - session1 = client1.transport.create_sink._session - session2 = client2.transport.create_sink._session - assert session1 != session2 - session1 = client1.transport.update_sink._session - session2 = client2.transport.update_sink._session - assert session1 != session2 - session1 = client1.transport.delete_sink._session - session2 = client2.transport.delete_sink._session - assert session1 != session2 - session1 = client1.transport.list_exclusions._session - session2 = client2.transport.list_exclusions._session - assert session1 != session2 - session1 = client1.transport.get_exclusion._session - session2 = client2.transport.get_exclusion._session - assert session1 != session2 - session1 = client1.transport.create_exclusion._session - session2 = client2.transport.create_exclusion._session - assert session1 != session2 - session1 = client1.transport.update_exclusion._session - session2 = client2.transport.update_exclusion._session - assert session1 != session2 - session1 = client1.transport.delete_exclusion._session - session2 = client2.transport.delete_exclusion._session - assert session1 != session2 - session1 = client1.transport.get_cmek_settings._session - session2 = client2.transport.get_cmek_settings._session - assert session1 != session2 - session1 = client1.transport.update_cmek_settings._session - session2 = client2.transport.update_cmek_settings._session - assert session1 != session2 def test_config_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -11677,6 +8514,40 @@ def test_config_service_v2_transport_channel_mtls_with_adc( assert transport.grpc_channel == mock_grpc_channel +def test_config_service_v2_grpc_lro_client(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_config_service_v2_grpc_lro_async_client(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + def test_cmek_settings_path(): project = "squid" expected = "projects/{project}/cmekSettings".format(project=project, ) @@ -11694,10 +8565,33 @@ def test_parse_cmek_settings_path(): actual = ConfigServiceV2Client.parse_cmek_settings_path(path) assert expected == actual -def test_log_bucket_path(): +def test_link_path(): project = "whelk" location = "octopus" bucket = "oyster" + link = "nudibranch" + expected = "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + actual = ConfigServiceV2Client.link_path(project, location, bucket, link) + assert expected == actual + + +def test_parse_link_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "bucket": "winkle", + "link": "nautilus", + } + path = ConfigServiceV2Client.link_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_link_path(path) + assert expected == actual + +def test_log_bucket_path(): + project = "scallop" + location = "abalone" + bucket = "squid" expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) actual = ConfigServiceV2Client.log_bucket_path(project, location, bucket) assert expected == actual @@ -11705,9 +8599,9 @@ def test_log_bucket_path(): def test_parse_log_bucket_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "bucket": "mussel", + "project": "clam", + "location": "whelk", + "bucket": "octopus", } path = ConfigServiceV2Client.log_bucket_path(**expected) @@ -11716,8 +8610,8 @@ def test_parse_log_bucket_path(): assert expected == actual def test_log_exclusion_path(): - project = "winkle" - exclusion = "nautilus" + project = "oyster" + exclusion = "nudibranch" expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) actual = ConfigServiceV2Client.log_exclusion_path(project, exclusion) assert expected == actual @@ -11725,8 +8619,8 @@ def test_log_exclusion_path(): def test_parse_log_exclusion_path(): expected = { - "project": "scallop", - "exclusion": "abalone", + "project": "cuttlefish", + "exclusion": "mussel", } path = ConfigServiceV2Client.log_exclusion_path(**expected) @@ -11735,8 +8629,8 @@ def test_parse_log_exclusion_path(): assert expected == actual def test_log_sink_path(): - project = "squid" - sink = "clam" + project = "winkle" + sink = "nautilus" expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) actual = ConfigServiceV2Client.log_sink_path(project, sink) assert expected == actual @@ -11744,8 +8638,8 @@ def test_log_sink_path(): def test_parse_log_sink_path(): expected = { - "project": "whelk", - "sink": "octopus", + "project": "scallop", + "sink": "abalone", } path = ConfigServiceV2Client.log_sink_path(**expected) @@ -11754,10 +8648,10 @@ def test_parse_log_sink_path(): assert expected == actual def test_log_view_path(): - project = "oyster" - location = "nudibranch" - bucket = "cuttlefish" - view = "mussel" + project = "squid" + location = "clam" + bucket = "whelk" + view = "octopus" expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) actual = ConfigServiceV2Client.log_view_path(project, location, bucket, view) assert expected == actual @@ -11765,10 +8659,10 @@ def test_log_view_path(): def test_parse_log_view_path(): expected = { - "project": "winkle", - "location": "nautilus", - "bucket": "scallop", - "view": "abalone", + "project": "oyster", + "location": "nudibranch", + "bucket": "cuttlefish", + "view": "mussel", } path = ConfigServiceV2Client.log_view_path(**expected) @@ -11776,8 +8670,25 @@ def test_parse_log_view_path(): actual = ConfigServiceV2Client.parse_log_view_path(path) assert expected == actual +def test_settings_path(): + project = "winkle" + expected = "projects/{project}/settings".format(project=project, ) + actual = ConfigServiceV2Client.settings_path(project) + assert expected == actual + + +def test_parse_settings_path(): + expected = { + "project": "nautilus", + } + path = ConfigServiceV2Client.settings_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigServiceV2Client.parse_settings_path(path) + assert expected == actual + def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) actual = ConfigServiceV2Client.common_billing_account_path(billing_account) assert expected == actual @@ -11785,7 +8696,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "abalone", } path = ConfigServiceV2Client.common_billing_account_path(**expected) @@ -11794,7 +8705,7 @@ def test_parse_common_billing_account_path(): assert expected == actual def test_common_folder_path(): - folder = "whelk" + folder = "squid" expected = "folders/{folder}".format(folder=folder, ) actual = ConfigServiceV2Client.common_folder_path(folder) assert expected == actual @@ -11802,7 +8713,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "clam", } path = ConfigServiceV2Client.common_folder_path(**expected) @@ -11811,7 +8722,7 @@ def test_parse_common_folder_path(): assert expected == actual def test_common_organization_path(): - organization = "oyster" + organization = "whelk" expected = "organizations/{organization}".format(organization=organization, ) actual = ConfigServiceV2Client.common_organization_path(organization) assert expected == actual @@ -11819,7 +8730,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "octopus", } path = ConfigServiceV2Client.common_organization_path(**expected) @@ -11828,7 +8739,7 @@ def test_parse_common_organization_path(): assert expected == actual def test_common_project_path(): - project = "cuttlefish" + project = "oyster" expected = "projects/{project}".format(project=project, ) actual = ConfigServiceV2Client.common_project_path(project) assert expected == actual @@ -11836,7 +8747,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "nudibranch", } path = ConfigServiceV2Client.common_project_path(**expected) @@ -11845,8 +8756,8 @@ def test_parse_common_project_path(): assert expected == actual def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) actual = ConfigServiceV2Client.common_location_path(project, location) assert expected == actual @@ -11854,8 +8765,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "winkle", + "location": "nautilus", } path = ConfigServiceV2Client.common_location_path(**expected) @@ -11896,7 +8807,6 @@ async def test_transport_close_async(): def test_transport_close(): transports = { - "rest": "_session", "grpc": "_grpc_channel", } @@ -11912,7 +8822,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ - 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e477e51adcb8..a648ed79bc86 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -23,17 +23,10 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options @@ -89,7 +82,6 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), (LoggingServiceV2AsyncClient, "grpc_asyncio"), - (LoggingServiceV2Client, "rest"), ]) def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -102,16 +94,12 @@ def test_logging_service_v2_client_from_service_account_info(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.LoggingServiceV2GrpcTransport, "grpc"), (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.LoggingServiceV2RestTransport, "rest"), ]) def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -128,7 +116,6 @@ def test_logging_service_v2_client_service_account_always_use_jwt(transport_clas @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), (LoggingServiceV2AsyncClient, "grpc_asyncio"), - (LoggingServiceV2Client, "rest"), ]) def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -144,9 +131,6 @@ def test_logging_service_v2_client_from_service_account_file(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @@ -154,7 +138,6 @@ def test_logging_service_v2_client_get_transport_class(): transport = LoggingServiceV2Client.get_transport_class() available_transports = [ transports.LoggingServiceV2GrpcTransport, - transports.LoggingServiceV2RestTransport, ] assert transport in available_transports @@ -165,7 +148,6 @@ def test_logging_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest"), ]) @mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) @mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) @@ -285,8 +267,6 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", "true"), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) @mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) @@ -424,7 +404,6 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest"), ]) def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -449,7 +428,6 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (LoggingServiceV2Client, transports.LoggingServiceV2RestTransport, "rest", None), ]) def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -2119,1210 +2097,6 @@ async def test_tail_log_entries_async_from_dict(): await test_tail_log_entries_async(request_type=dict) -@pytest.mark.parametrize("request_type", [ - logging.DeleteLogRequest, - dict, -]) -def test_delete_log_rest(request_type): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'log_name': 'projects/sample1/logs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_log(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_log_rest_required_fields(request_type=logging.DeleteLogRequest): - transport_class = transports.LoggingServiceV2RestTransport - - request_init = {} - request_init["log_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["logName"] = 'log_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "logName" in jsonified_request - assert jsonified_request["logName"] == 'log_name_value' - - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_log(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_log_rest_unset_required_fields(): - transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_log._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("logName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_log_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_delete_log") as pre: - pre.assert_not_called() - pb_message = logging.DeleteLogRequest.pb(logging.DeleteLogRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging.DeleteLogRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_log(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_log_rest_bad_request(transport: str = 'rest', request_type=logging.DeleteLogRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'log_name': 'projects/sample1/logs/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_log(request) - - -def test_delete_log_rest_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'log_name': 'projects/sample1/logs/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - log_name='log_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_log(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{log_name=projects/*/logs/*}" % client.transport._host, args[1]) - - -def test_delete_log_rest_flattened_error(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_log( - logging.DeleteLogRequest(), - log_name='log_name_value', - ) - - -def test_delete_log_rest_error(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging.WriteLogEntriesRequest, - dict, -]) -def test_write_log_entries_rest(request_type): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.WriteLogEntriesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.write_log_entries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging.WriteLogEntriesResponse) - - -def test_write_log_entries_rest_required_fields(request_type=logging.WriteLogEntriesRequest): - transport_class = transports.LoggingServiceV2RestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).write_log_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).write_log_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging.WriteLogEntriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.write_log_entries(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_write_log_entries_rest_unset_required_fields(): - transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.write_log_entries._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("entries", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_write_log_entries_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_write_log_entries") as post, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_write_log_entries") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging.WriteLogEntriesRequest.pb(logging.WriteLogEntriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging.WriteLogEntriesResponse.to_json(logging.WriteLogEntriesResponse()) - - request = logging.WriteLogEntriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging.WriteLogEntriesResponse() - - client.write_log_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_write_log_entries_rest_bad_request(transport: str = 'rest', request_type=logging.WriteLogEntriesRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.write_log_entries(request) - - -def test_write_log_entries_rest_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.WriteLogEntriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.WriteLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.write_log_entries(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/entries:write" % client.transport._host, args[1]) - - -def test_write_log_entries_rest_flattened_error(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.write_log_entries( - logging.WriteLogEntriesRequest(), - log_name='log_name_value', - resource=monitored_resource_pb2.MonitoredResource(type='type_value'), - labels={'key_value': 'value_value'}, - entries=[log_entry.LogEntry(log_name='log_name_value')], - ) - - -def test_write_log_entries_rest_error(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging.ListLogEntriesRequest, - dict, -]) -def test_list_log_entries_rest(request_type): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_log_entries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogEntriesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_log_entries_rest_required_fields(request_type=logging.ListLogEntriesRequest): - transport_class = transports.LoggingServiceV2RestTransport - - request_init = {} - request_init["resource_names"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["resourceNames"] = 'resource_names_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_entries._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "resourceNames" in jsonified_request - assert jsonified_request["resourceNames"] == 'resource_names_value' - - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging.ListLogEntriesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging.ListLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_log_entries(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_log_entries_rest_unset_required_fields(): - transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_log_entries._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("resourceNames", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_log_entries_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_log_entries") as post, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_log_entries") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging.ListLogEntriesRequest.pb(logging.ListLogEntriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging.ListLogEntriesResponse.to_json(logging.ListLogEntriesResponse()) - - request = logging.ListLogEntriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging.ListLogEntriesResponse() - - client.list_log_entries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_log_entries_rest_bad_request(transport: str = 'rest', request_type=logging.ListLogEntriesRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_log_entries(request) - - -def test_list_log_entries_rest_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListLogEntriesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {} - - # get truthy value for each flattened field - mock_args = dict( - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListLogEntriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_log_entries(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/entries:list" % client.transport._host, args[1]) - - -def test_list_log_entries_rest_flattened_error(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_log_entries( - logging.ListLogEntriesRequest(), - resource_names=['resource_names_value'], - filter='filter_value', - order_by='order_by_value', - ) - - -def test_list_log_entries_rest_pager(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - next_page_token='abc', - ), - logging.ListLogEntriesResponse( - entries=[], - next_page_token='def', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - ], - next_page_token='ghi', - ), - logging.ListLogEntriesResponse( - entries=[ - log_entry.LogEntry(), - log_entry.LogEntry(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging.ListLogEntriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {} - - pager = client.list_log_entries(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, log_entry.LogEntry) - for i in results) - - pages = list(client.list_log_entries(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging.ListMonitoredResourceDescriptorsRequest, - dict, -]) -def test_list_monitored_resource_descriptors_rest(request_type): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListMonitoredResourceDescriptorsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_monitored_resource_descriptors(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_monitored_resource_descriptors_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_monitored_resource_descriptors") as post, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_monitored_resource_descriptors") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging.ListMonitoredResourceDescriptorsRequest.pb(logging.ListMonitoredResourceDescriptorsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging.ListMonitoredResourceDescriptorsResponse.to_json(logging.ListMonitoredResourceDescriptorsResponse()) - - request = logging.ListMonitoredResourceDescriptorsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging.ListMonitoredResourceDescriptorsResponse() - - client.list_monitored_resource_descriptors(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_monitored_resource_descriptors_rest_bad_request(transport: str = 'rest', request_type=logging.ListMonitoredResourceDescriptorsRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_monitored_resource_descriptors(request) - - -def test_list_monitored_resource_descriptors_rest_pager(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='abc', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[], - next_page_token='def', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - next_page_token='ghi', - ), - logging.ListMonitoredResourceDescriptorsResponse( - resource_descriptors=[ - monitored_resource_pb2.MonitoredResourceDescriptor(), - monitored_resource_pb2.MonitoredResourceDescriptor(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging.ListMonitoredResourceDescriptorsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {} - - pager = client.list_monitored_resource_descriptors(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) - for i in results) - - pages = list(client.list_monitored_resource_descriptors(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging.ListLogsRequest, - dict, -]) -def test_list_logs_rest(request_type): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_logs(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogsPager) - assert response.log_names == ['log_names_value'] - assert response.next_page_token == 'next_page_token_value' - - -def test_list_logs_rest_required_fields(request_type=logging.ListLogsRequest): - transport_class = transports.LoggingServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_logs._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_logs._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", "resource_names", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging.ListLogsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging.ListLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_logs(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_logs_rest_unset_required_fields(): - transport = transports.LoggingServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_logs._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", "resourceNames", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_logs_rest_interceptors(null_interceptor): - transport = transports.LoggingServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.LoggingServiceV2RestInterceptor(), - ) - client = LoggingServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "post_list_logs") as post, \ - mock.patch.object(transports.LoggingServiceV2RestInterceptor, "pre_list_logs") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging.ListLogsRequest.pb(logging.ListLogsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging.ListLogsResponse.to_json(logging.ListLogsResponse()) - - request = logging.ListLogsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging.ListLogsResponse() - - client.list_logs(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_logs_rest_bad_request(transport: str = 'rest', request_type=logging.ListLogsRequest): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_logs(request) - - -def test_list_logs_rest_flattened(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging.ListLogsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'sample1/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging.ListLogsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_logs(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=*/*}/logs" % client.transport._host, args[1]) - - -def test_list_logs_rest_flattened_error(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_logs( - logging.ListLogsRequest(), - parent='parent_value', - ) - - -def test_list_logs_rest_pager(transport: str = 'rest'): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging.ListLogsResponse( - log_names=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - logging.ListLogsResponse( - log_names=[], - next_page_token='def', - ), - logging.ListLogsResponse( - log_names=[ - str(), - ], - next_page_token='ghi', - ), - logging.ListLogsResponse( - log_names=[ - str(), - str(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging.ListLogsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'sample1/sample2'} - - pager = client.list_logs(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) - - pages = list(client.list_logs(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -def test_tail_log_entries_rest_unimplemented(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = logging.TailLogEntriesRequest() - requests = [request] - with pytest.raises(NotImplementedError): - client.tail_log_entries(requests) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( @@ -3401,7 +2175,6 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport, - transports.LoggingServiceV2RestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -3412,7 +2185,6 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", - "rest", ]) def test_transport_kind(transport_name): transport = LoggingServiceV2Client.get_transport_class(transport_name)( @@ -3547,7 +2319,6 @@ def test_logging_service_v2_transport_auth_adc(transport_class): [ transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport, - transports.LoggingServiceV2RestTransport, ], ) def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): @@ -3648,20 +2419,10 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) -def test_logging_service_v2_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.LoggingServiceV2RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( @@ -3671,14 +2432,11 @@ def test_logging_service_v2_host_no_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( @@ -3688,42 +2446,8 @@ def test_logging_service_v2_host_with_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com:8000' ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_logging_service_v2_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = LoggingServiceV2Client( - credentials=creds1, - transport=transport_name, - ) - client2 = LoggingServiceV2Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.delete_log._session - session2 = client2.transport.delete_log._session - assert session1 != session2 - session1 = client1.transport.write_log_entries._session - session2 = client2.transport.write_log_entries._session - assert session1 != session2 - session1 = client1.transport.list_log_entries._session - session2 = client2.transport.list_log_entries._session - assert session1 != session2 - session1 = client1.transport.list_monitored_resource_descriptors._session - session2 = client2.transport.list_monitored_resource_descriptors._session - assert session1 != session2 - session1 = client1.transport.list_logs._session - session2 = client2.transport.list_logs._session - assert session1 != session2 - session1 = client1.transport.tail_log_entries._session - session2 = client2.transport.tail_log_entries._session - assert session1 != session2 def test_logging_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -3973,7 +2697,6 @@ async def test_transport_close_async(): def test_transport_close(): transports = { - "rest": "_session", "grpc": "_grpc_channel", } @@ -3989,7 +2712,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ - 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index a28f2b1a2542..8045f5d8dc88 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -23,17 +23,10 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format from google.api import distribution_pb2 # type: ignore from google.api import label_pb2 # type: ignore @@ -87,7 +80,6 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), (MetricsServiceV2AsyncClient, "grpc_asyncio"), - (MetricsServiceV2Client, "rest"), ]) def test_metrics_service_v2_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -100,16 +92,12 @@ def test_metrics_service_v2_client_from_service_account_info(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_class,transport_name", [ (transports.MetricsServiceV2GrpcTransport, "grpc"), (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (transports.MetricsServiceV2RestTransport, "rest"), ]) def test_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: @@ -126,7 +114,6 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(transport_clas @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), (MetricsServiceV2AsyncClient, "grpc_asyncio"), - (MetricsServiceV2Client, "rest"), ]) def test_metrics_service_v2_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() @@ -142,9 +129,6 @@ def test_metrics_service_v2_client_from_service_account_file(client_class, trans assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else - 'https://logging.googleapis.com' ) @@ -152,7 +136,6 @@ def test_metrics_service_v2_client_get_transport_class(): transport = MetricsServiceV2Client.get_transport_class() available_transports = [ transports.MetricsServiceV2GrpcTransport, - transports.MetricsServiceV2RestTransport, ] assert transport in available_transports @@ -163,7 +146,6 @@ def test_metrics_service_v2_client_get_transport_class(): @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest"), ]) @mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) @mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) @@ -283,8 +265,6 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", "true"), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", "false"), ]) @mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) @mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) @@ -422,7 +402,6 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest"), ]) def test_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): # Check the case scopes are provided. @@ -447,7 +426,6 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport @pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), - (MetricsServiceV2Client, transports.MetricsServiceV2RestTransport, "rest", None), ]) def test_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): # Check the case credentials file is provided. @@ -996,6 +974,8 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1011,6 +991,8 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1052,6 +1034,8 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) @@ -1067,6 +1051,8 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1244,6 +1230,8 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1259,6 +1247,8 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1300,6 +1290,8 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) @@ -1315,6 +1307,8 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1502,6 +1496,8 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, ) @@ -1517,6 +1513,8 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1558,6 +1556,8 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ name='name_value', description='description_value', filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) @@ -1573,6 +1573,8 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ assert response.name == 'name_value' assert response.description == 'description_value' assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True assert response.value_extractor == 'value_extractor_value' assert response.version == logging_metrics.LogMetric.ApiVersion.V1 @@ -1963,1245 +1965,6 @@ async def test_delete_log_metric_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - logging_metrics.ListLogMetricsRequest, - dict, -]) -def test_list_log_metrics_rest(request_type): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_log_metrics(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListLogMetricsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_log_metrics_rest_required_fields(request_type=logging_metrics.ListLogMetricsRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_metrics._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_log_metrics._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("page_size", "page_token", )) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_metrics.ListLogMetricsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.list_log_metrics(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_list_log_metrics_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.list_log_metrics._get_unset_required_fields({}) - assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_log_metrics_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_list_log_metrics") as post, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_list_log_metrics") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_metrics.ListLogMetricsRequest.pb(logging_metrics.ListLogMetricsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_metrics.ListLogMetricsResponse.to_json(logging_metrics.ListLogMetricsResponse()) - - request = logging_metrics.ListLogMetricsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_metrics.ListLogMetricsResponse() - - client.list_log_metrics(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_log_metrics_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.ListLogMetricsRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_log_metrics(request) - - -def test_list_log_metrics_rest_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.ListLogMetricsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.ListLogMetricsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.list_log_metrics(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/metrics" % client.transport._host, args[1]) - - -def test_list_log_metrics_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_log_metrics( - logging_metrics.ListLogMetricsRequest(), - parent='parent_value', - ) - - -def test_list_log_metrics_rest_pager(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - #with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - next_page_token='abc', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[], - next_page_token='def', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - ], - next_page_token='ghi', - ), - logging_metrics.ListLogMetricsResponse( - metrics=[ - logging_metrics.LogMetric(), - logging_metrics.LogMetric(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(logging_metrics.ListLogMetricsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode('UTF-8') - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {'parent': 'projects/sample1'} - - pager = client.list_log_metrics(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, logging_metrics.LogMetric) - for i in results) - - pages = list(client.list_log_metrics(request=sample_request).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [ - logging_metrics.GetLogMetricRequest, - dict, -]) -def test_get_log_metric_rest(request_type): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_log_metric(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_get_log_metric_rest_required_fields(request_type=logging_metrics.GetLogMetricRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["metric_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["metricName"] = 'metric_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "metricName" in jsonified_request - assert jsonified_request["metricName"] == 'metric_name_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "get", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_log_metric(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_get_log_metric_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.get_log_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("metricName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_log_metric_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_get_log_metric") as post, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_get_log_metric") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_metrics.GetLogMetricRequest.pb(logging_metrics.GetLogMetricRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) - - request = logging_metrics.GetLogMetricRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_metrics.LogMetric() - - client.get_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.GetLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_log_metric(request) - - -def test_get_log_metric_rest_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - - # get arguments that satisfy an http rule for this method - sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - metric_name='metric_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.get_log_metric(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) - - -def test_get_log_metric_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_log_metric( - logging_metrics.GetLogMetricRequest(), - metric_name='metric_name_value', - ) - - -def test_get_log_metric_rest_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_metrics.CreateLogMetricRequest, - dict, -]) -def test_create_log_metric_rest(request_type): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_log_metric(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_create_log_metric_rest_required_fields(request_type=logging_metrics.CreateLogMetricRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == 'parent_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "post", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.create_log_metric(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_create_log_metric_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.create_log_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent", "metric", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_log_metric_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_create_log_metric") as post, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_create_log_metric") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_metrics.CreateLogMetricRequest.pb(logging_metrics.CreateLogMetricRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) - - request = logging_metrics.CreateLogMetricRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_metrics.LogMetric() - - client.create_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.CreateLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1'} - request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_log_metric(request) - - -def test_create_log_metric_rest_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - - # get arguments that satisfy an http rule for this method - sample_request = {'parent': 'projects/sample1'} - - # get truthy value for each flattened field - mock_args = dict( - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.create_log_metric(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{parent=projects/*}/metrics" % client.transport._host, args[1]) - - -def test_create_log_metric_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_log_metric( - logging_metrics.CreateLogMetricRequest(), - parent='parent_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - -def test_create_log_metric_rest_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_metrics.UpdateLogMetricRequest, - dict, -]) -def test_update_log_metric_rest(request_type): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_log_metric(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, logging_metrics.LogMetric) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.filter == 'filter_value' - assert response.value_extractor == 'value_extractor_value' - assert response.version == logging_metrics.LogMetric.ApiVersion.V1 - - -def test_update_log_metric_rest_required_fields(request_type=logging_metrics.UpdateLogMetricRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["metric_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["metricName"] = 'metric_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "metricName" in jsonified_request - assert jsonified_request["metricName"] == 'metric_name_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "put", - 'query_params': pb_request, - } - transcode_result['body'] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.update_log_metric(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_update_log_metric_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.update_log_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("metricName", "metric", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_log_metric_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "post_update_log_metric") as post, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_update_log_metric") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = logging_metrics.UpdateLogMetricRequest.pb(logging_metrics.UpdateLogMetricRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = logging_metrics.LogMetric.to_json(logging_metrics.LogMetric()) - - request = logging_metrics.UpdateLogMetricRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = logging_metrics.LogMetric() - - client.update_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.UpdateLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request_init["metric"] = {'name': 'name_value', 'description': 'description_value', 'filter': 'filter_value', 'metric_descriptor': {'name': 'name_value', 'type': 'type_value', 'labels': [{'key': 'key_value', 'value_type': 1, 'description': 'description_value'}], 'metric_kind': 1, 'value_type': 1, 'unit': 'unit_value', 'description': 'description_value', 'display_name': 'display_name_value', 'metadata': {'launch_stage': 6, 'sample_period': {'seconds': 751, 'nanos': 543}, 'ingest_delay': {}}, 'launch_stage': 6, 'monitored_resource_types': ['monitored_resource_types_value1', 'monitored_resource_types_value2']}, 'value_extractor': 'value_extractor_value', 'label_extractors': {}, 'bucket_options': {'linear_buckets': {'num_finite_buckets': 1918, 'width': 0.544, 'offset': 0.647}, 'exponential_buckets': {'num_finite_buckets': 1918, 'growth_factor': 0.1401, 'scale': 0.52}, 'explicit_buckets': {'bounds': [0.652, 0.653]}}, 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'version': 1} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_log_metric(request) - - -def test_update_log_metric_rest_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = logging_metrics.LogMetric() - - # get arguments that satisfy an http rule for this method - sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = logging_metrics.LogMetric.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.update_log_metric(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) - - -def test_update_log_metric_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_log_metric( - logging_metrics.UpdateLogMetricRequest(), - metric_name='metric_name_value', - metric=logging_metrics.LogMetric(name='name_value'), - ) - - -def test_update_log_metric_rest_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - logging_metrics.DeleteLogMetricRequest, - dict, -]) -def test_delete_log_metric_rest(request_type): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_log_metric(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_log_metric_rest_required_fields(request_type=logging_metrics.DeleteLogMetricRequest): - transport_class = transports.MetricsServiceV2RestTransport - - request_init = {} - request_init["metric_name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["metricName"] = 'metric_name_value' - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_log_metric._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "metricName" in jsonified_request - assert jsonified_request["metricName"] == 'metric_name_value' - - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = None - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, 'request') as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, 'transcode') as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - 'uri': 'v1/sample_method', - 'method': "delete", - 'query_params': pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.delete_log_metric(request) - - expected_params = [ - ] - actual_params = req.call_args.kwargs['params'] - assert expected_params == actual_params - - -def test_delete_log_metric_rest_unset_required_fields(): - transport = transports.MetricsServiceV2RestTransport(credentials=ga_credentials.AnonymousCredentials) - - unset_fields = transport.delete_log_metric._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("metricName", ))) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_log_metric_rest_interceptors(null_interceptor): - transport = transports.MetricsServiceV2RestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.MetricsServiceV2RestInterceptor(), - ) - client = MetricsServiceV2Client(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.MetricsServiceV2RestInterceptor, "pre_delete_log_metric") as pre: - pre.assert_not_called() - pb_message = logging_metrics.DeleteLogMetricRequest.pb(logging_metrics.DeleteLogMetricRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = logging_metrics.DeleteLogMetricRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_log_metric(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_log_metric_rest_bad_request(transport: str = 'rest', request_type=logging_metrics.DeleteLogMetricRequest): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'metric_name': 'projects/sample1/metrics/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_log_metric(request) - - -def test_delete_log_metric_rest_flattened(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {'metric_name': 'projects/sample1/metrics/sample2'} - - # get truthy value for each flattened field - mock_args = dict( - metric_name='metric_name_value', - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - client.delete_log_metric(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v2/{metric_name=projects/*/metrics/*}" % client.transport._host, args[1]) - - -def test_delete_log_metric_rest_flattened_error(transport: str = 'rest'): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_log_metric( - logging_metrics.DeleteLogMetricRequest(), - metric_name='metric_name_value', - ) - - -def test_delete_log_metric_rest_error(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( @@ -3280,7 +2043,6 @@ def test_transport_get_channel(): @pytest.mark.parametrize("transport_class", [ transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport, - transports.MetricsServiceV2RestTransport, ]) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -3291,7 +2053,6 @@ def test_transport_adc(transport_class): @pytest.mark.parametrize("transport_name", [ "grpc", - "rest", ]) def test_transport_kind(transport_name): transport = MetricsServiceV2Client.get_transport_class(transport_name)( @@ -3425,7 +2186,6 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): [ transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport, - transports.MetricsServiceV2RestTransport, ], ) def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): @@ -3526,20 +2286,10 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( private_key=expected_key ) -def test_metrics_service_v2_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.MetricsServiceV2RestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( @@ -3549,14 +2299,11 @@ def test_metrics_service_v2_host_no_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com' ) @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", - "rest", ]) def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( @@ -3566,39 +2313,8 @@ def test_metrics_service_v2_host_with_port(transport_name): ) assert client.transport._host == ( 'logging.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://logging.googleapis.com:8000' ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_metrics_service_v2_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = MetricsServiceV2Client( - credentials=creds1, - transport=transport_name, - ) - client2 = MetricsServiceV2Client( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.list_log_metrics._session - session2 = client2.transport.list_log_metrics._session - assert session1 != session2 - session1 = client1.transport.get_log_metric._session - session2 = client2.transport.get_log_metric._session - assert session1 != session2 - session1 = client1.transport.create_log_metric._session - session2 = client2.transport.create_log_metric._session - assert session1 != session2 - session1 = client1.transport.update_log_metric._session - session2 = client2.transport.update_log_metric._session - assert session1 != session2 - session1 = client1.transport.delete_log_metric._session - session2 = client2.transport.delete_log_metric._session - assert session1 != session2 def test_metrics_service_v2_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) @@ -3848,7 +2564,6 @@ async def test_transport_close_async(): def test_transport_close(): transports = { - "rest": "_session", "grpc": "_grpc_channel", } @@ -3864,7 +2579,6 @@ def test_transport_close(): def test_client_ctx(): transports = [ - 'rest', 'grpc', ] for transport in transports: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py index 71f3599d0ace..92a9215362b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -27,17 +27,26 @@ from google.cloud.redis_v1.types.cloud_redis import FailoverInstanceRequest from google.cloud.redis_v1.types.cloud_redis import GcsDestination from google.cloud.redis_v1.types.cloud_redis import GcsSource +from google.cloud.redis_v1.types.cloud_redis import GetInstanceAuthStringRequest from google.cloud.redis_v1.types.cloud_redis import GetInstanceRequest from google.cloud.redis_v1.types.cloud_redis import ImportInstanceRequest from google.cloud.redis_v1.types.cloud_redis import InputConfig from google.cloud.redis_v1.types.cloud_redis import Instance +from google.cloud.redis_v1.types.cloud_redis import InstanceAuthString from google.cloud.redis_v1.types.cloud_redis import ListInstancesRequest from google.cloud.redis_v1.types.cloud_redis import ListInstancesResponse from google.cloud.redis_v1.types.cloud_redis import LocationMetadata +from google.cloud.redis_v1.types.cloud_redis import MaintenancePolicy +from google.cloud.redis_v1.types.cloud_redis import MaintenanceSchedule +from google.cloud.redis_v1.types.cloud_redis import NodeInfo from google.cloud.redis_v1.types.cloud_redis import OperationMetadata from google.cloud.redis_v1.types.cloud_redis import OutputConfig +from google.cloud.redis_v1.types.cloud_redis import PersistenceConfig +from google.cloud.redis_v1.types.cloud_redis import RescheduleMaintenanceRequest +from google.cloud.redis_v1.types.cloud_redis import TlsCertificate from google.cloud.redis_v1.types.cloud_redis import UpdateInstanceRequest from google.cloud.redis_v1.types.cloud_redis import UpgradeInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import WeeklyMaintenanceWindow from google.cloud.redis_v1.types.cloud_redis import ZoneMetadata __all__ = ('CloudRedisClient', @@ -48,16 +57,25 @@ 'FailoverInstanceRequest', 'GcsDestination', 'GcsSource', + 'GetInstanceAuthStringRequest', 'GetInstanceRequest', 'ImportInstanceRequest', 'InputConfig', 'Instance', + 'InstanceAuthString', 'ListInstancesRequest', 'ListInstancesResponse', 'LocationMetadata', + 'MaintenancePolicy', + 'MaintenanceSchedule', + 'NodeInfo', 'OperationMetadata', 'OutputConfig', + 'PersistenceConfig', + 'RescheduleMaintenanceRequest', + 'TlsCertificate', 'UpdateInstanceRequest', 'UpgradeInstanceRequest', + 'WeeklyMaintenanceWindow', 'ZoneMetadata', ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index a3273e7a5f4d..0d4ccb8a3510 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -27,17 +27,26 @@ from .types.cloud_redis import FailoverInstanceRequest from .types.cloud_redis import GcsDestination from .types.cloud_redis import GcsSource +from .types.cloud_redis import GetInstanceAuthStringRequest from .types.cloud_redis import GetInstanceRequest from .types.cloud_redis import ImportInstanceRequest from .types.cloud_redis import InputConfig from .types.cloud_redis import Instance +from .types.cloud_redis import InstanceAuthString from .types.cloud_redis import ListInstancesRequest from .types.cloud_redis import ListInstancesResponse from .types.cloud_redis import LocationMetadata +from .types.cloud_redis import MaintenancePolicy +from .types.cloud_redis import MaintenanceSchedule +from .types.cloud_redis import NodeInfo from .types.cloud_redis import OperationMetadata from .types.cloud_redis import OutputConfig +from .types.cloud_redis import PersistenceConfig +from .types.cloud_redis import RescheduleMaintenanceRequest +from .types.cloud_redis import TlsCertificate from .types.cloud_redis import UpdateInstanceRequest from .types.cloud_redis import UpgradeInstanceRequest +from .types.cloud_redis import WeeklyMaintenanceWindow from .types.cloud_redis import ZoneMetadata __all__ = ( @@ -49,16 +58,25 @@ 'FailoverInstanceRequest', 'GcsDestination', 'GcsSource', +'GetInstanceAuthStringRequest', 'GetInstanceRequest', 'ImportInstanceRequest', 'InputConfig', 'Instance', +'InstanceAuthString', 'ListInstancesRequest', 'ListInstancesResponse', 'LocationMetadata', +'MaintenancePolicy', +'MaintenanceSchedule', +'NodeInfo', 'OperationMetadata', 'OutputConfig', +'PersistenceConfig', +'RescheduleMaintenanceRequest', +'TlsCertificate', 'UpdateInstanceRequest', 'UpgradeInstanceRequest', +'WeeklyMaintenanceWindow', 'ZoneMetadata', ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json index 804956f47760..202306de2ca8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_metadata.json @@ -35,6 +35,11 @@ "get_instance" ] }, + "GetInstanceAuthString": { + "methods": [ + "get_instance_auth_string" + ] + }, "ImportInstance": { "methods": [ "import_instance" @@ -45,6 +50,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -85,6 +95,11 @@ "get_instance" ] }, + "GetInstanceAuthString": { + "methods": [ + "get_instance_auth_string" + ] + }, "ImportInstance": { "methods": [ "import_instance" @@ -95,6 +110,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" @@ -135,6 +155,11 @@ "get_instance" ] }, + "GetInstanceAuthString": { + "methods": [ + "get_instance_auth_string" + ] + }, "ImportInstance": { "methods": [ "import_instance" @@ -145,6 +170,11 @@ "list_instances" ] }, + "RescheduleMaintenance": { + "methods": [ + "reschedule_maintenance" + ] + }, "UpdateInstance": { "methods": [ "update_instance" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 5babf20b1037..dab7a5457086 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -391,7 +391,7 @@ async def sample_get_instance(): Returns: google.cloud.redis_v1.types.Instance: - A Google Cloud Redis instance. + A Memorystore for Redis instance. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -435,6 +435,109 @@ async def sample_get_instance(): # Done; return the response. return response + async def get_instance_auth_string(self, + request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis.InstanceAuthString: + r"""Gets the AUTH string for a Redis instance. If AUTH is + not enabled for the instance the response will be empty. + This information is not included in the details returned + to GetInstance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_get_instance_auth_string(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceAuthStringRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_auth_string(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.GetInstanceAuthStringRequest, dict]]): + The request object. Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_v1.types.InstanceAuthString: + Instance AUTH string details. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.GetInstanceAuthStringRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_instance_auth_string, + default_timeout=600.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def create_instance(self, request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, *, @@ -540,8 +643,8 @@ async def sample_create_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -663,6 +766,7 @@ async def sample_update_instance(): - ``labels`` - ``memorySizeGb`` - ``redisConfig`` + - ``replica_count`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -685,8 +789,8 @@ async def sample_update_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -814,8 +918,8 @@ async def sample_upgrade_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -953,8 +1057,8 @@ async def sample_import_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1089,8 +1193,8 @@ async def sample_export_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1154,7 +1258,7 @@ async def failover_instance(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation_async.AsyncOperation: - r"""Initiates a failover of the master node to current + r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -1219,8 +1323,8 @@ async def sample_failover_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1400,6 +1504,146 @@ async def sample_delete_instance(): # Done; return the response. return response + async def reschedule_maintenance(self, + request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Reschedule maintenance for a given instance in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_reschedule_maintenance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.RescheduleMaintenanceRequest, dict]]): + The request object. Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (:class:`google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType`): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): + Optional. Timestamp when the maintenance shall be + rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC + 3339 format, for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = cloud_redis.RescheduleMaintenanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reschedule_maintenance, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "CloudRedisAsyncClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 5f804c725a7c..9c4721889f56 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -585,7 +585,7 @@ def sample_get_instance(): Returns: google.cloud.redis_v1.types.Instance: - A Google Cloud Redis instance. + A Memorystore for Redis instance. """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -629,6 +629,109 @@ def sample_get_instance(): # Done; return the response. return response + def get_instance_auth_string(self, + request: Optional[Union[cloud_redis.GetInstanceAuthStringRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloud_redis.InstanceAuthString: + r"""Gets the AUTH string for a Redis instance. If AUTH is + not enabled for the instance the response will be empty. + This information is not included in the details returned + to GetInstance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_get_instance_auth_string(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceAuthStringRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_auth_string(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.GetInstanceAuthStringRequest, dict]): + The request object. Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.redis_v1.types.InstanceAuthString: + Instance AUTH string details. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.GetInstanceAuthStringRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.GetInstanceAuthStringRequest): + request = cloud_redis.GetInstanceAuthStringRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance_auth_string] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def create_instance(self, request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, *, @@ -734,8 +837,8 @@ def sample_create_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -857,6 +960,7 @@ def sample_update_instance(): - ``labels`` - ``memorySizeGb`` - ``redisConfig`` + - ``replica_count`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -879,8 +983,8 @@ def sample_update_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1008,8 +1112,8 @@ def sample_upgrade_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1147,8 +1251,8 @@ def sample_import_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1283,8 +1387,8 @@ def sample_export_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1348,7 +1452,7 @@ def failover_instance(self, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> operation.Operation: - r"""Initiates a failover of the master node to current + r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -1413,8 +1517,8 @@ def sample_failover_instance(): An object representing a long-running operation. The result type for the operation will be - :class:`google.cloud.redis_v1.types.Instance` A Google - Cloud Redis instance. + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. """ # Create or coerce a protobuf request object. @@ -1594,6 +1698,146 @@ def sample_delete_instance(): # Done; return the response. return response + def reschedule_maintenance(self, + request: Optional[Union[cloud_redis.RescheduleMaintenanceRequest, dict]] = None, + *, + name: Optional[str] = None, + reschedule_type: Optional[cloud_redis.RescheduleMaintenanceRequest.RescheduleType] = None, + schedule_time: Optional[timestamp_pb2.Timestamp] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Reschedule maintenance for a given instance in a + given project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_reschedule_maintenance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.RescheduleMaintenanceRequest, dict]): + The request object. Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + reschedule_type (google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set + up schedule_time as well. + + This corresponds to the ``reschedule_type`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Timestamp when the maintenance shall be + rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC + 3339 format, for example ``2012-11-15T16:19:00.094Z``. + + This corresponds to the ``schedule_time`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, reschedule_type, schedule_time]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a cloud_redis.RescheduleMaintenanceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloud_redis.RescheduleMaintenanceRequest): + request = cloud_redis.RescheduleMaintenanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if reschedule_type is not None: + request.reschedule_type = reschedule_type + if schedule_time is not None: + request.schedule_time = schedule_time + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CloudRedisClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 19c96cc87f8b..c99e4e883ef1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -125,6 +125,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_instance_auth_string: gapic_v1.method.wrap_method( + self.get_instance_auth_string, + default_timeout=600.0, + client_info=client_info, + ), self.create_instance: gapic_v1.method.wrap_method( self.create_instance, default_timeout=600.0, @@ -160,6 +165,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.reschedule_maintenance: gapic_v1.method.wrap_method( + self.reschedule_maintenance, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -194,6 +204,15 @@ def get_instance(self) -> Callable[ ]]: raise NotImplementedError() + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + Union[ + cloud_redis.InstanceAuthString, + Awaitable[cloud_redis.InstanceAuthString] + ]]: + raise NotImplementedError() + @property def create_instance(self) -> Callable[ [cloud_redis.CreateInstanceRequest], @@ -257,6 +276,15 @@ def delete_instance(self) -> Callable[ ]]: raise NotImplementedError() + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 0c162e8bb15b..c53d62f31dac 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -325,6 +325,35 @@ def get_instance(self) -> Callable[ ) return self._stubs['get_instance'] + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + cloud_redis.InstanceAuthString]: + r"""Return a callable for the get instance auth string method over gRPC. + + Gets the AUTH string for a Redis instance. If AUTH is + not enabled for the instance the response will be empty. + This information is not included in the details returned + to GetInstance. + + Returns: + Callable[[~.GetInstanceAuthStringRequest], + ~.InstanceAuthString]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_auth_string' not in self._stubs: + self._stubs['get_instance_auth_string'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', + request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, + response_deserializer=cloud_redis.InstanceAuthString.deserialize, + ) + return self._stubs['get_instance_auth_string'] + @property def create_instance(self) -> Callable[ [cloud_redis.CreateInstanceRequest], @@ -493,7 +522,7 @@ def failover_instance(self) -> Callable[ operations_pb2.Operation]: r"""Return a callable for the failover instance method over gRPC. - Initiates a failover of the master node to current + Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -542,6 +571,33 @@ def delete_instance(self) -> Callable[ ) return self._stubs['delete_instance'] + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Reschedule maintenance for a given instance in a + given project and location. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reschedule_maintenance' not in self._stubs: + self._stubs['reschedule_maintenance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', + request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['reschedule_maintenance'] + def close(self): self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 082123d4651e..f93b258ed99e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -328,6 +328,35 @@ def get_instance(self) -> Callable[ ) return self._stubs['get_instance'] + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + Awaitable[cloud_redis.InstanceAuthString]]: + r"""Return a callable for the get instance auth string method over gRPC. + + Gets the AUTH string for a Redis instance. If AUTH is + not enabled for the instance the response will be empty. + This information is not included in the details returned + to GetInstance. + + Returns: + Callable[[~.GetInstanceAuthStringRequest], + Awaitable[~.InstanceAuthString]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance_auth_string' not in self._stubs: + self._stubs['get_instance_auth_string'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', + request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, + response_deserializer=cloud_redis.InstanceAuthString.deserialize, + ) + return self._stubs['get_instance_auth_string'] + @property def create_instance(self) -> Callable[ [cloud_redis.CreateInstanceRequest], @@ -496,7 +525,7 @@ def failover_instance(self) -> Callable[ Awaitable[operations_pb2.Operation]]: r"""Return a callable for the failover instance method over gRPC. - Initiates a failover of the master node to current + Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud Memorystore for Redis instance. @@ -545,6 +574,33 @@ def delete_instance(self) -> Callable[ ) return self._stubs['delete_instance'] + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the reschedule maintenance method over gRPC. + + Reschedule maintenance for a given instance in a + given project and location. + + Returns: + Callable[[~.RescheduleMaintenanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reschedule_maintenance' not in self._stubs: + self._stubs['reschedule_maintenance'] = self.grpc_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', + request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['reschedule_maintenance'] + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 0908104c606c..b79fd4b0c979 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -108,6 +108,14 @@ def post_get_instance(self, response): logging.log(f"Received response: {response}") return response + def pre_get_instance_auth_string(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance_auth_string(self, response): + logging.log(f"Received response: {response}") + return response + def pre_import_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -124,6 +132,14 @@ def post_list_instances(self, response): logging.log(f"Received response: {response}") return response + def pre_reschedule_maintenance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_reschedule_maintenance(self, response): + logging.log(f"Received response: {response}") + return response + def pre_update_instance(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -220,6 +236,22 @@ def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Se def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance_auth_string + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: + """Post-rpc interceptor for get_instance_auth_string + Override in a subclass to manipulate the response after it is returned by the CloudRedis server but before it is returned to user code. @@ -252,6 +284,22 @@ def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for reschedule_maintenance + Override in a subclass to manipulate the response after it is returned by the CloudRedis server but before it is returned to user code. @@ -806,7 +854,7 @@ def __call__(self, Returns: ~.cloud_redis.Instance: - A Google Cloud Redis instance. + A Memorystore for Redis instance. """ http_options: List[Dict[str, str]] = [{ @@ -852,6 +900,83 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) return resp + class _GetInstanceAuthString(CloudRedisRestStub): + def __hash__(self): + return hash("GetInstanceAuthString") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.GetInstanceAuthStringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloud_redis.InstanceAuthString: + r"""Call the get instance auth string method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceAuthStringRequest): + The request object. Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis.InstanceAuthString: + Instance AUTH string details. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}/authString', + }, + ] + request, metadata = self._interceptor.pre_get_instance_auth_string(request, metadata) + pb_request = cloud_redis.GetInstanceAuthStringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_redis.InstanceAuthString() + pb_resp = cloud_redis.InstanceAuthString.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance_auth_string(resp) + return resp + class _ImportInstance(CloudRedisRestStub): def __hash__(self): return hash("ImportInstance") @@ -1018,6 +1143,93 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) return resp + class _RescheduleMaintenance(CloudRedisRestStub): + def __hash__(self): + return hash("RescheduleMaintenance") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + def __call__(self, + request: cloud_redis.RescheduleMaintenanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the reschedule maintenance method over HTTP. + + Args: + request (~.cloud_redis.RescheduleMaintenanceRequest): + The request object. Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance', + 'body': '*', + }, + ] + request, metadata = self._interceptor.pre_reschedule_maintenance(request, metadata) + pb_request = cloud_redis.RescheduleMaintenanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + including_default_value_fields=False, + use_integers_for_enums=False + ) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + including_default_value_fields=False, + use_integers_for_enums=False, + )) + query_params.update(self._get_unset_required_fields(query_params)) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reschedule_maintenance(resp) + return resp + class _UpdateInstance(CloudRedisRestStub): def __hash__(self): return hash("UpdateInstance") @@ -1232,6 +1444,14 @@ def get_instance(self) -> Callable[ # In C++ this would require a dynamic_cast return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + cloud_redis.InstanceAuthString]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + @property def import_instance(self) -> Callable[ [cloud_redis.ImportInstanceRequest], @@ -1248,6 +1468,14 @@ def list_instances(self) -> Callable[ # In C++ this would require a dynamic_cast return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + @property def update_instance(self) -> Callable[ [cloud_redis.UpdateInstanceRequest], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py index f616df1f68c0..a0b7fdcdc4a1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -20,17 +20,26 @@ FailoverInstanceRequest, GcsDestination, GcsSource, + GetInstanceAuthStringRequest, GetInstanceRequest, ImportInstanceRequest, InputConfig, Instance, + InstanceAuthString, ListInstancesRequest, ListInstancesResponse, LocationMetadata, + MaintenancePolicy, + MaintenanceSchedule, + NodeInfo, OperationMetadata, OutputConfig, + PersistenceConfig, + RescheduleMaintenanceRequest, + TlsCertificate, UpdateInstanceRequest, UpgradeInstanceRequest, + WeeklyMaintenanceWindow, ZoneMetadata, ) @@ -41,16 +50,25 @@ 'FailoverInstanceRequest', 'GcsDestination', 'GcsSource', + 'GetInstanceAuthStringRequest', 'GetInstanceRequest', 'ImportInstanceRequest', 'InputConfig', 'Instance', + 'InstanceAuthString', 'ListInstancesRequest', 'ListInstancesResponse', 'LocationMetadata', + 'MaintenancePolicy', + 'MaintenanceSchedule', + 'NodeInfo', 'OperationMetadata', 'OutputConfig', + 'PersistenceConfig', + 'RescheduleMaintenanceRequest', + 'TlsCertificate', 'UpdateInstanceRequest', 'UpgradeInstanceRequest', + 'WeeklyMaintenanceWindow', 'ZoneMetadata', ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index d0e12a5796f8..234065b45bae 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -19,17 +19,28 @@ import proto # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore __protobuf__ = proto.module( package='google.cloud.redis.v1', manifest={ + 'NodeInfo', 'Instance', + 'PersistenceConfig', + 'RescheduleMaintenanceRequest', + 'MaintenancePolicy', + 'WeeklyMaintenanceWindow', + 'MaintenanceSchedule', 'ListInstancesRequest', 'ListInstancesResponse', 'GetInstanceRequest', + 'GetInstanceAuthStringRequest', + 'InstanceAuthString', 'CreateInstanceRequest', 'UpdateInstanceRequest', 'UpgradeInstanceRequest', @@ -44,12 +55,34 @@ 'OperationMetadata', 'LocationMetadata', 'ZoneMetadata', + 'TlsCertificate', }, ) +class NodeInfo(proto.Message): + r"""Node specific properties. + + Attributes: + id (str): + Output only. Node identifying string. e.g. + 'node-0', 'node-1' + zone (str): + Output only. Location of the node. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + class Instance(proto.Message): - r"""A Google Cloud Redis instance. + r"""A Memorystore for Redis instance. Attributes: name (str): @@ -73,19 +106,21 @@ class Instance(proto.Message): Resource labels to represent user provided metadata location_id (str): - Optional. The zone where the instance will be provisioned. - If not provided, the service will choose a zone for the - instance. For STANDARD_HA tier, instances will be created - across two zones for protection against zonal failures. If - [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id] - is also provided, it must be different from - [location_id][google.cloud.redis.v1.Instance.location_id]. + Optional. The zone where the instance will be + provisioned. If not provided, the service will + choose a zone from the specified region for the + instance. For standard tier, additional nodes + will be added across multiple zones for + protection against zonal failures. If specified, + at least one node will be provisioned in this + zone. alternative_location_id (str): - Optional. Only applicable to STANDARD_HA tier which protects - the instance against zonal failures by provisioning it - across two zones. If provided, it must be a different zone - from the one provided in - [location_id][google.cloud.redis.v1.Instance.location_id]. + Optional. If specified, at least one node will be + provisioned in this zone in addition to the zone specified + in location_id. Only applicable to standard tier. If + provided, it must be a different zone from the one provided + in [location_id]. Additional nodes beyond the first 2 will + be placed in zones selected by the service. redis_version (str): Optional. The version of Redis software. If not provided, latest supported version will be used. Currently, the @@ -94,14 +129,24 @@ class Instance(proto.Message): - ``REDIS_3_2`` for Redis 3.2 compatibility - ``REDIS_4_0`` for Redis 4.0 compatibility (default) - ``REDIS_5_0`` for Redis 5.0 compatibility + - ``REDIS_6_X`` for Redis 6.x compatibility reserved_ip_range (str): - Optional. The CIDR range of internal - addresses that are reserved for this instance. - If not provided, the service will choose an - unused /29 block, for example, 10.0.0.0/29 or - 192.168.0.0/29. Ranges must be unique and - non-overlapping with existing subnets in an - authorized network. + Optional. For DIRECT_PEERING mode, the CIDR range of + internal addresses that are reserved for this instance. + Range must be unique and non-overlapping with existing + subnets in an authorized network. For PRIVATE_SERVICE_ACCESS + mode, the name of one allocated IP address ranges associated + with this private service access connection. If not + provided, the service will choose an unused /29 block, for + example, 10.0.0.0/29 or 192.168.0.0/29. For + READ_REPLICAS_ENABLED the default block size is /28. + secondary_ip_range (str): + Optional. Additional IP range for node placement. Required + when enabling read replicas on an existing instance. For + DIRECT_PEERING mode value must be a CIDR range of size /28, + or "auto". For PRIVATE_SERVICE_ACCESS mode value must be the + name of an allocated address range associated with the + private service access connection, or "auto". host (str): Output only. Hostname or IP address of the exposed Redis endpoint used by clients to @@ -110,15 +155,10 @@ class Instance(proto.Message): Output only. The port number of the exposed Redis endpoint. current_location_id (str): - Output only. The current zone where the Redis endpoint is - placed. For Basic Tier instances, this will always be the - same as the - [location_id][google.cloud.redis.v1.Instance.location_id] - provided by the user at creation time. For Standard Tier - instances, this can be either - [location_id][google.cloud.redis.v1.Instance.location_id] or - [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id] - and can change after a failover event. + Output only. The current zone where the Redis primary node + is located. In basic tier, this will always be the same as + [location_id]. In standard tier, this can be the zone of any + node in the instance. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the instance was created. @@ -167,6 +207,64 @@ class Instance(proto.Message): connect_mode (google.cloud.redis_v1.types.Instance.ConnectMode): Optional. The network connect mode of the Redis instance. If not provided, the connect mode defaults to DIRECT_PEERING. + auth_enabled (bool): + Optional. Indicates whether OSS Redis AUTH is + enabled for the instance. If set to "true" AUTH + is enabled on the instance. Default value is + "false" meaning AUTH is disabled. + server_ca_certs (MutableSequence[google.cloud.redis_v1.types.TlsCertificate]): + Output only. List of server CA certificates + for the instance. + transit_encryption_mode (google.cloud.redis_v1.types.Instance.TransitEncryptionMode): + Optional. The TLS mode of the Redis instance. + If not provided, TLS is disabled for the + instance. + maintenance_policy (google.cloud.redis_v1.types.MaintenancePolicy): + Optional. The maintenance policy for the + instance. If not provided, maintenance events + can be performed at any time. + maintenance_schedule (google.cloud.redis_v1.types.MaintenanceSchedule): + Output only. Date and time of upcoming + maintenance events which have been scheduled. + replica_count (int): + Optional. The number of replica nodes. The valid range for + the Standard Tier with read replicas enabled is [1-5] and + defaults to 2. If read replicas are not enabled for a + Standard Tier instance, the only valid value is 1 and the + default is 1. The valid value for basic tier is 0 and the + default is also 0. + nodes (MutableSequence[google.cloud.redis_v1.types.NodeInfo]): + Output only. Info per node. + read_endpoint (str): + Output only. Hostname or IP address of the + exposed readonly Redis endpoint. Standard tier + only. Targets all healthy replica nodes in + instance. Replication is asynchronous and + replica nodes will exhibit some lag behind the + primary. Write requests must target 'host'. + read_endpoint_port (int): + Output only. The port number of the exposed + readonly redis endpoint. Standard tier only. + Write requests should target 'port'. + read_replicas_mode (google.cloud.redis_v1.types.Instance.ReadReplicasMode): + Optional. Read replicas mode for the instance. Defaults to + READ_REPLICAS_DISABLED. + customer_managed_key (str): + Optional. The KMS key reference that the + customer provides when trying to create the + instance. + persistence_config (google.cloud.redis_v1.types.PersistenceConfig): + Optional. Persistence configuration + parameters + suspension_reasons (MutableSequence[google.cloud.redis_v1.types.Instance.SuspensionReason]): + Optional. reasons that causes instance in + "SUSPENDED" state. + maintenance_version (str): + Optional. The self service update maintenance version. The + version is date based such as "20210712_00_00". + available_maintenance_versions (MutableSequence[str]): + Optional. The available maintenance versions + that an instance could update to. """ class State(proto.Enum): r"""Represents the different states of a Redis instance. @@ -243,6 +341,56 @@ class ConnectMode(proto.Enum): DIRECT_PEERING = 1 PRIVATE_SERVICE_ACCESS = 2 + class TransitEncryptionMode(proto.Enum): + r"""Available TLS modes. + + Values: + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED (0): + Not set. + SERVER_AUTHENTICATION (1): + Client to Server traffic encryption enabled + with server authentication. + DISABLED (2): + TLS is disabled for the instance. + """ + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED = 0 + SERVER_AUTHENTICATION = 1 + DISABLED = 2 + + class ReadReplicasMode(proto.Enum): + r"""Read replicas mode. + + Values: + READ_REPLICAS_MODE_UNSPECIFIED (0): + If not set, Memorystore Redis backend will default to + READ_REPLICAS_DISABLED. + READ_REPLICAS_DISABLED (1): + If disabled, read endpoint will not be + provided and the instance cannot scale up or + down the number of replicas. + READ_REPLICAS_ENABLED (2): + If enabled, read endpoint will be provided + and the instance can scale up and down the + number of replicas. Not valid for basic tier. + """ + READ_REPLICAS_MODE_UNSPECIFIED = 0 + READ_REPLICAS_DISABLED = 1 + READ_REPLICAS_ENABLED = 2 + + class SuspensionReason(proto.Enum): + r"""Possible reasons for the instance to be in a "SUSPENDED" + state. + + Values: + SUSPENSION_REASON_UNSPECIFIED (0): + Not set. + CUSTOMER_MANAGED_KEY_ISSUE (1): + Something wrong with the CMEK key provided by + customer. + """ + SUSPENSION_REASON_UNSPECIFIED = 0 + CUSTOMER_MANAGED_KEY_ISSUE = 1 + name: str = proto.Field( proto.STRING, number=1, @@ -272,6 +420,10 @@ class ConnectMode(proto.Enum): proto.STRING, number=9, ) + secondary_ip_range: str = proto.Field( + proto.STRING, + number=30, + ) host: str = proto.Field( proto.STRING, number=10, @@ -325,6 +477,328 @@ class ConnectMode(proto.Enum): number=22, enum=ConnectMode, ) + auth_enabled: bool = proto.Field( + proto.BOOL, + number=23, + ) + server_ca_certs: MutableSequence['TlsCertificate'] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message='TlsCertificate', + ) + transit_encryption_mode: TransitEncryptionMode = proto.Field( + proto.ENUM, + number=26, + enum=TransitEncryptionMode, + ) + maintenance_policy: 'MaintenancePolicy' = proto.Field( + proto.MESSAGE, + number=27, + message='MaintenancePolicy', + ) + maintenance_schedule: 'MaintenanceSchedule' = proto.Field( + proto.MESSAGE, + number=28, + message='MaintenanceSchedule', + ) + replica_count: int = proto.Field( + proto.INT32, + number=31, + ) + nodes: MutableSequence['NodeInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=32, + message='NodeInfo', + ) + read_endpoint: str = proto.Field( + proto.STRING, + number=33, + ) + read_endpoint_port: int = proto.Field( + proto.INT32, + number=34, + ) + read_replicas_mode: ReadReplicasMode = proto.Field( + proto.ENUM, + number=35, + enum=ReadReplicasMode, + ) + customer_managed_key: str = proto.Field( + proto.STRING, + number=36, + ) + persistence_config: 'PersistenceConfig' = proto.Field( + proto.MESSAGE, + number=37, + message='PersistenceConfig', + ) + suspension_reasons: MutableSequence[SuspensionReason] = proto.RepeatedField( + proto.ENUM, + number=38, + enum=SuspensionReason, + ) + maintenance_version: str = proto.Field( + proto.STRING, + number=39, + ) + available_maintenance_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=40, + ) + + +class PersistenceConfig(proto.Message): + r"""Configuration of the persistence functionality. + + Attributes: + persistence_mode (google.cloud.redis_v1.types.PersistenceConfig.PersistenceMode): + Optional. Controls whether Persistence + features are enabled. If not provided, the + existing value will be used. + rdb_snapshot_period (google.cloud.redis_v1.types.PersistenceConfig.SnapshotPeriod): + Optional. Period between RDB snapshots. Snapshots will be + attempted every period starting from the provided snapshot + start time. For example, a start time of 01/01/2033 06:45 + and SIX_HOURS snapshot period will do nothing until + 01/01/2033, and then trigger snapshots every day at 06:45, + 12:45, 18:45, and 00:45 the next day, and so on. If not + provided, TWENTY_FOUR_HOURS will be used as default. + rdb_next_snapshot_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The next time that a snapshot + attempt is scheduled to occur. + rdb_snapshot_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Date and time that the first + snapshot was/will be attempted, and to which + future snapshots will be aligned. If not + provided, the current time will be used. + """ + class PersistenceMode(proto.Enum): + r"""Available Persistence modes. + + Values: + PERSISTENCE_MODE_UNSPECIFIED (0): + Not set. + DISABLED (1): + Persistence is disabled for the instance, + and any existing snapshots are deleted. + RDB (2): + RDB based Persistence is enabled. + """ + PERSISTENCE_MODE_UNSPECIFIED = 0 + DISABLED = 1 + RDB = 2 + + class SnapshotPeriod(proto.Enum): + r"""Available snapshot periods for scheduling. + + Values: + SNAPSHOT_PERIOD_UNSPECIFIED (0): + Not set. + ONE_HOUR (3): + Snapshot every 1 hour. + SIX_HOURS (4): + Snapshot every 6 hours. + TWELVE_HOURS (5): + Snapshot every 12 hours. + TWENTY_FOUR_HOURS (6): + Snapshot every 24 hours. + """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 + ONE_HOUR = 3 + SIX_HOURS = 4 + TWELVE_HOURS = 5 + TWENTY_FOUR_HOURS = 6 + + persistence_mode: PersistenceMode = proto.Field( + proto.ENUM, + number=1, + enum=PersistenceMode, + ) + rdb_snapshot_period: SnapshotPeriod = proto.Field( + proto.ENUM, + number=2, + enum=SnapshotPeriod, + ) + rdb_next_snapshot_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class RescheduleMaintenanceRequest(proto.Message): + r"""Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + reschedule_type (google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType): + Required. If reschedule type is SPECIFIC_TIME, must set up + schedule_time as well. + schedule_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Timestamp when the maintenance shall be + rescheduled to if reschedule_type=SPECIFIC_TIME, in RFC 3339 + format, for example ``2012-11-15T16:19:00.094Z``. + """ + class RescheduleType(proto.Enum): + r"""Reschedule options. + + Values: + RESCHEDULE_TYPE_UNSPECIFIED (0): + Not set. + IMMEDIATE (1): + If the user wants to schedule the maintenance + to happen now. + NEXT_AVAILABLE_WINDOW (2): + If the user wants to use the existing + maintenance policy to find the next available + window. + SPECIFIC_TIME (3): + If the user wants to reschedule the + maintenance to a specific time. + """ + RESCHEDULE_TYPE_UNSPECIFIED = 0 + IMMEDIATE = 1 + NEXT_AVAILABLE_WINDOW = 2 + SPECIFIC_TIME = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + reschedule_type: RescheduleType = proto.Field( + proto.ENUM, + number=2, + enum=RescheduleType, + ) + schedule_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class MaintenancePolicy(proto.Message): + r"""Maintenance policy for an instance. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + last updated. + description (str): + Optional. Description of what this policy is for. + Create/Update methods return INVALID_ARGUMENT if the length + is greater than 512. + weekly_maintenance_window (MutableSequence[google.cloud.redis_v1.types.WeeklyMaintenanceWindow]): + Optional. Maintenance window that is applied to resources + covered by this policy. Minimum 1. For the current version, + the maximum number of weekly_window is expected to be one. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + weekly_maintenance_window: MutableSequence['WeeklyMaintenanceWindow'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='WeeklyMaintenanceWindow', + ) + + +class WeeklyMaintenanceWindow(proto.Message): + r"""Time window in which disruptive maintenance updates occur. + Non-disruptive updates can occur inside or outside this window. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Required. The day of week that maintenance + updates occur. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time of the window in UTC + time. + duration (google.protobuf.duration_pb2.Duration): + Output only. Duration of the maintenance + window. The current window is fixed at 1 hour. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class MaintenanceSchedule(proto.Message): + r"""Upcoming maintenance schedule. If no maintenance is + scheduled, fields are not populated. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of any upcoming + scheduled maintenance for this instance. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of any upcoming + scheduled maintenance for this instance. + can_reschedule (bool): + If the scheduled maintenance can be + rescheduled, default is true. + schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deadline that the + maintenance schedule start time can not go + beyond, including reschedule. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + can_reschedule: bool = proto.Field( + proto.BOOL, + number=3, + ) + schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) class ListInstancesRequest(proto.Message): @@ -377,9 +851,9 @@ class ListInstancesResponse(proto.Message): If the ``location_id`` in the parent field of the request is "-", all regions available to the project are queried, and the results aggregated. If in such an aggregated query a - location is unavailable, a dummy Redis entry is included in - the response with the ``name`` field set to a value of the - form + location is unavailable, a placeholder Redis entry is + included in the response with the ``name`` field set to a + value of the form ``projects/{project_id}/locations/{location_id}/instances/``- and the ``status`` field set to ERROR and ``status_message`` field set to "location not available for ListInstances". @@ -427,6 +901,37 @@ class GetInstanceRequest(proto.Message): ) +class GetInstanceAuthStringRequest(proto.Message): + r"""Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class InstanceAuthString(proto.Message): + r"""Instance AUTH string details. + + Attributes: + auth_string (str): + AUTH string set on the instance. + """ + + auth_string: str = proto.Field( + proto.STRING, + number=1, + ) + + class CreateInstanceRequest(proto.Message): r"""Request for [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. @@ -480,6 +985,7 @@ class UpdateInstanceRequest(proto.Message): - ``labels`` - ``memorySizeGb`` - ``redisConfig`` + - ``replica_count`` instance (google.cloud.redis_v1.types.Instance): Required. Update description. Only fields specified in update_mask are updated. @@ -685,7 +1191,7 @@ class DataProtectionMode(proto.Enum): Instance failover will be protected with data loss control. More specifically, the failover will only be performed if the current - replication offset diff between master and + replication offset diff between primary and replica is under a certain threshold. FORCE_DATA_LOSS (2): Instance failover will be performed without @@ -789,4 +1295,49 @@ class ZoneMetadata(proto.Message): """ +class TlsCertificate(proto.Message): + r"""TlsCertificate Resource + + Attributes: + serial_number (str): + Serial number, as extracted from the + certificate. + cert (str): + PEM representation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the certificate was created in + `RFC 3339 `__ format, + for example ``2020-05-18T00:00:00.094Z``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the certificate expires in `RFC + 3339 `__ format, for + example ``2020-05-18T00:00:00.094Z``. + sha1_fingerprint (str): + Sha1 Fingerprint of the certificate. + """ + + serial_number: str = proto.Field( + proto.STRING, + number=1, + ) + cert: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + sha1_fingerprint: str = proto.Field( + proto.STRING, + number=5, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py new file mode 100755 index 000000000000..a8e1b9147d26 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstanceAuthString +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_GetInstanceAuthString_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_get_instance_auth_string(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceAuthStringRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance_auth_string(request=request) + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_GetInstanceAuthString_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py new file mode 100755 index 000000000000..a872f6a64a5e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstanceAuthString +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_GetInstanceAuthString_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_get_instance_auth_string(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceAuthStringRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance_auth_string(request=request) + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_GetInstanceAuthString_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py new file mode 100755 index 000000000000..564a1c0277e0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_RescheduleMaintenance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_reschedule_maintenance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_RescheduleMaintenance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py new file mode 100755 index 000000000000..825fc37aa04c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RescheduleMaintenance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_RescheduleMaintenance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_reschedule_maintenance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.RescheduleMaintenanceRequest( + name="name_value", + reschedule_type="SPECIFIC_TIME", + ) + + # Make the request + operation = client.reschedule_maintenance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_RescheduleMaintenance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 18c8da345d71..174d9dff49b1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -687,6 +687,167 @@ ], "title": "redis_v1_generated_cloud_redis_failover_instance_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.get_instance_auth_string", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.GetInstanceAuthString", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "GetInstanceAuthString" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.GetInstanceAuthStringRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_v1.types.InstanceAuthString", + "shortName": "get_instance_auth_string" + }, + "description": "Sample for GetInstanceAuthString", + "file": "redis_v1_generated_cloud_redis_get_instance_auth_string_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_GetInstanceAuthString_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_get_instance_auth_string_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.get_instance_auth_string", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.GetInstanceAuthString", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "GetInstanceAuthString" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.GetInstanceAuthStringRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.redis_v1.types.InstanceAuthString", + "shortName": "get_instance_auth_string" + }, + "description": "Sample for GetInstanceAuthString", + "file": "redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_GetInstanceAuthString_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py" + }, { "canonical": true, "clientMethod": { @@ -1178,6 +1339,183 @@ ], "title": "redis_v1_generated_cloud_redis_list_instances_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "redis_v1_generated_cloud_redis_reschedule_maintenance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_RescheduleMaintenance_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_reschedule_maintenance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.reschedule_maintenance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.RescheduleMaintenance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "RescheduleMaintenance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.RescheduleMaintenanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "reschedule_type", + "type": "google.cloud.redis_v1.types.RescheduleMaintenanceRequest.RescheduleType" + }, + { + "name": "schedule_time", + "type": "google.protobuf.timestamp_pb2.Timestamp" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "reschedule_maintenance" + }, + "description": "Sample for RescheduleMaintenance", + "file": "redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_RescheduleMaintenance_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py index b83d29e1c49b..4304e380e491 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py @@ -44,8 +44,10 @@ class redisCallTransformer(cst.CSTTransformer): 'export_instance': ('name', 'output_config', ), 'failover_instance': ('name', 'data_protection_mode', ), 'get_instance': ('name', ), + 'get_instance_auth_string': ('name', ), 'import_instance': ('name', 'input_config', ), 'list_instances': ('parent', 'page_size', 'page_token', ), + 'reschedule_maintenance': ('name', 'reschedule_type', 'schedule_time', ), 'update_instance': ('update_mask', 'instance', ), 'upgrade_instance': ('name', 'redis_version', ), } diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ca2466444ceb..851e099c8b07 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -54,9 +54,12 @@ from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore import google.auth @@ -1001,6 +1004,7 @@ def test_get_instance(request_type, transport: str = 'grpc'): alternative_location_id='alternative_location_id_value', redis_version='redis_version_value', reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', host='host_value', port=453, current_location_id='current_location_id_value', @@ -1011,6 +1015,16 @@ def test_get_instance(request_type, transport: str = 'grpc'): authorized_network='authorized_network_value', persistence_iam_identity='persistence_iam_identity_value', connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], ) response = client.get_instance(request) @@ -1027,6 +1041,7 @@ def test_get_instance(request_type, transport: str = 'grpc'): assert response.alternative_location_id == 'alternative_location_id_value' assert response.redis_version == 'redis_version_value' assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' assert response.host == 'host_value' assert response.port == 453 assert response.current_location_id == 'current_location_id_value' @@ -1037,6 +1052,16 @@ def test_get_instance(request_type, transport: str = 'grpc'): assert response.authorized_network == 'authorized_network_value' assert response.persistence_iam_identity == 'persistence_iam_identity_value' assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] def test_get_instance_empty_call(): @@ -1079,6 +1104,7 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= alternative_location_id='alternative_location_id_value', redis_version='redis_version_value', reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', host='host_value', port=453, current_location_id='current_location_id_value', @@ -1089,6 +1115,16 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= authorized_network='authorized_network_value', persistence_iam_identity='persistence_iam_identity_value', connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], )) response = await client.get_instance(request) @@ -1105,6 +1141,7 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= assert response.alternative_location_id == 'alternative_location_id_value' assert response.redis_version == 'redis_version_value' assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' assert response.host == 'host_value' assert response.port == 453 assert response.current_location_id == 'current_location_id_value' @@ -1115,6 +1152,16 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= assert response.authorized_network == 'authorized_network_value' assert response.persistence_iam_identity == 'persistence_iam_identity_value' assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] @pytest.mark.asyncio @@ -1267,6 +1314,238 @@ async def test_get_instance_flattened_error_async(): ) +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceAuthStringRequest, + dict, +]) +def test_get_instance_auth_string(request_type, transport: str = 'grpc'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + ) + response = client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + +def test_get_instance_auth_string_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + client.get_instance_auth_string() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + +@pytest.mark.asyncio +async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + )) + response = await client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + +@pytest.mark.asyncio +async def test_get_instance_auth_string_async_from_dict(): + await test_get_instance_auth_string_async(request_type=dict) + + +def test_get_instance_auth_string_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.GetInstanceAuthStringRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + call.return_value = cloud_redis.InstanceAuthString() + client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_auth_string_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.GetInstanceAuthStringRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString()) + await client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_auth_string_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.InstanceAuthString() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance_auth_string( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_auth_string_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance_auth_string( + cloud_redis.GetInstanceAuthStringRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_auth_string_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.InstanceAuthString() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance_auth_string( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_auth_string_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance_auth_string( + cloud_redis.GetInstanceAuthStringRequest(), + name='name_value', + ) + + @pytest.mark.parametrize("request_type", [ cloud_redis.CreateInstanceRequest, dict, @@ -2948,62 +3227,308 @@ async def test_delete_instance_flattened_error_async(): @pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, + cloud_redis.RescheduleMaintenanceRequest, + dict, ]) -def test_list_instances_rest(request_type): +def test_reschedule_maintenance(request_type, transport: str = 'grpc'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport, ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.reschedule_maintenance(request) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_instances(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - + assert isinstance(response, future.Future) -def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): - transport_class = transports.CloudRedisRestTransport - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False - )) +def test_reschedule_maintenance_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + client.reschedule_maintenance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) +@pytest.mark.asyncio +async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # verify required fields with default values are now present + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_async_from_dict(): + await test_reschedule_maintenance_async(request_type=dict) + + +def test_reschedule_maintenance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.RescheduleMaintenanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_reschedule_maintenance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.RescheduleMaintenanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_reschedule_maintenance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.reschedule_maintenance( + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + + +def test_reschedule_maintenance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_redis.RescheduleMaintenanceRequest(), + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.reschedule_maintenance( + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].reschedule_type + mock_val = cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE + assert arg == mock_val + assert TimestampRule().to_proto(args[0].schedule_time) == timestamp_pb2.Timestamp(seconds=751) + +@pytest.mark.asyncio +async def test_reschedule_maintenance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.reschedule_maintenance( + cloud_redis.RescheduleMaintenanceRequest(), + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +def test_list_instances_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' @@ -3262,6 +3787,7 @@ def test_get_instance_rest(request_type): alternative_location_id='alternative_location_id_value', redis_version='redis_version_value', reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', host='host_value', port=453, current_location_id='current_location_id_value', @@ -3272,39 +3798,292 @@ def test_get_instance_rest(request_type): authorized_network='authorized_network_value', persistence_iam_identity='persistence_iam_identity_value', connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloud_redis.Instance.to_json(cloud_redis.Instance()) + + request = cloud_redis.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.Instance() + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_instance(request) + + +def test_get_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_get_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_redis.GetInstanceRequest(), + name='name_value', + ) + + +def test_get_instance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceAuthStringRequest, + dict, +]) +def test_get_instance_auth_string_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', ) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.Instance.pb(return_value) + pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.get_instance(request) + response = client.get_instance_auth_string(request) # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.host == 'host_value' - assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' - assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' - assert response.tier == cloud_redis.Instance.Tier.BASIC - assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' - assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' -def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceRequest): +def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis.GetInstanceAuthStringRequest): transport_class = transports.CloudRedisRestTransport request_init = {} @@ -3319,14 +4098,14 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -3340,7 +4119,7 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = cloud_redis.Instance() + return_value = cloud_redis.InstanceAuthString() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, 'request') as req: # We need to mock transcode() because providing default values @@ -3360,13 +4139,13 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.Instance.pb(return_value) + pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - response = client.get_instance(request) + response = client.get_instance_auth_string(request) expected_params = [ ] @@ -3374,15 +4153,15 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR assert expected_params == actual_params -def test_get_instance_rest_unset_required_fields(): +def test_get_instance_auth_string_rest_unset_required_fields(): transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) - unset_fields = transport.get_instance._get_unset_required_fields({}) + unset_fields = transport.get_instance_auth_string._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): +def test_get_instance_auth_string_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), @@ -3390,11 +4169,11 @@ def test_get_instance_rest_interceptors(null_interceptor): client = CloudRedisClient(transport=transport) with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: pre.assert_not_called() post.assert_not_called() - pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -3405,23 +4184,23 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value = Response() req.return_value.status_code = 200 req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis.Instance.to_json(cloud_redis.Instance()) + req.return_value._content = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) - request = cloud_redis.GetInstanceRequest() + request = cloud_redis.GetInstanceAuthStringRequest() metadata =[ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = cloud_redis.Instance() + post.return_value = cloud_redis.InstanceAuthString() - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() -def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceRequest): +def test_get_instance_auth_string_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3438,10 +4217,10 @@ def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=clo response_value.status_code = 400 response_value.request = Request() req.return_value = response_value - client.get_instance(request) + client.get_instance_auth_string(request) -def test_get_instance_rest_flattened(): +def test_get_instance_auth_string_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3450,7 +4229,7 @@ def test_get_instance_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: # Designate an appropriate value for the returned response. - return_value = cloud_redis.Instance() + return_value = cloud_redis.InstanceAuthString() # get arguments that satisfy an http rule for this method sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} @@ -3464,21 +4243,21 @@ def test_get_instance_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.Instance.pb(return_value) + pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) json_return_value = json_format.MessageToJson(pb_return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value - client.get_instance(**mock_args) + client.get_instance_auth_string(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] - assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}/authString" % client.transport._host, args[1]) -def test_get_instance_rest_flattened_error(transport: str = 'rest'): +def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3487,13 +4266,13 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_instance( - cloud_redis.GetInstanceRequest(), + client.get_instance_auth_string( + cloud_redis.GetInstanceAuthStringRequest(), name='name_value', ) -def test_get_instance_rest_error(): +def test_get_instance_auth_string_rest_error(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), transport='rest' @@ -3512,7 +4291,7 @@ def test_create_instance_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3671,7 +4450,7 @@ def test_create_instance_rest_bad_request(transport: str = 'rest', request_type= # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3758,7 +4537,7 @@ def test_update_instance_rest(request_type): # send a request that will satisfy transcoding request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3901,7 +4680,7 @@ def test_update_instance_rest_bad_request(transport: str = 'rest', request_type= # send a request that will satisfy transcoding request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -5120,6 +5899,237 @@ def test_delete_instance_rest_error(): ) +@pytest.mark.parametrize("request_type", [ + cloud_redis.RescheduleMaintenanceRequest, + dict, +]) +def test_reschedule_maintenance_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.reschedule_maintenance(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.RescheduleMaintenanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.reschedule_maintenance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_reschedule_maintenance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", "rescheduleType", ))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reschedule_maintenance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) + + request = cloud_redis.RescheduleMaintenanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reschedule_maintenance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.RescheduleMaintenanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reschedule_maintenance(request) + + +def test_reschedule_maintenance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + client.reschedule_maintenance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance" % client.transport._host, args[1]) + + +def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.reschedule_maintenance( + cloud_redis.RescheduleMaintenanceRequest(), + name='name_value', + reschedule_type=cloud_redis.RescheduleMaintenanceRequest.RescheduleType.IMMEDIATE, + schedule_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_reschedule_maintenance_rest_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest' + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudRedisGrpcTransport( @@ -5249,6 +6259,7 @@ def test_cloud_redis_base_transport(): methods = ( 'list_instances', 'get_instance', + 'get_instance_auth_string', 'create_instance', 'update_instance', 'upgrade_instance', @@ -5256,6 +6267,7 @@ def test_cloud_redis_base_transport(): 'export_instance', 'failover_instance', 'delete_instance', + 'reschedule_maintenance', ) for method in methods: with pytest.raises(NotImplementedError): @@ -5522,6 +6534,9 @@ def test_cloud_redis_client_transport_session_collision(transport_name): session1 = client1.transport.get_instance._session session2 = client2.transport.get_instance._session assert session1 != session2 + session1 = client1.transport.get_instance_auth_string._session + session2 = client2.transport.get_instance_auth_string._session + assert session1 != session2 session1 = client1.transport.create_instance._session session2 = client2.transport.create_instance._session assert session1 != session2 @@ -5543,6 +6558,9 @@ def test_cloud_redis_client_transport_session_collision(transport_name): session1 = client1.transport.delete_instance._session session2 = client2.transport.delete_instance._session assert session1 != session2 + session1 = client1.transport.reschedule_maintenance._session + session2 = client2.transport.reschedule_maintenance._session + assert session1 != session2 def test_cloud_redis_grpc_transport_channel(): channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) From ab973dfe2caae4317e2683e36904f195636d8a8d Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 16 Aug 2023 18:56:39 -0400 Subject: [PATCH 1040/1339] test: add service yaml files to integration (#1742) --- .../%sub/services/%service/client.py.j2 | 2 +- .../services/%service/transports/base.py.j2 | 2 +- .../services/%service/transports/grpc.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 4 +- .../%sub/services/%service/async_client.py.j2 | 2 +- .../%sub/services/%service/client.py.j2 | 2 +- .../services/%service/transports/base.py.j2 | 2 +- .../services/%service/transports/grpc.py.j2 | 2 +- .../%service/transports/grpc_asyncio.py.j2 | 2 +- .../services/%service/transports/rest.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 4 +- .../tests/integration/BUILD.bazel | 6 +- .../tests/integration/cloudasset_v1.yaml | 47 + .../tests/integration/eventarc_v1.yaml | 109 ++ .../services/asset_service/async_client.py | 52 + .../asset_v1/services/asset_service/client.py | 54 + .../services/asset_service/transports/base.py | 10 + .../services/asset_service/transports/grpc.py | 19 + .../asset_service/transports/grpc_asyncio.py | 19 + .../services/asset_service/transports/rest.py | 93 + .../unit/gapic/asset_v1/test_asset_service.py | 180 +- .../services/eventarc/async_client.py | 597 ++++++ .../eventarc_v1/services/eventarc/client.py | 589 ++++++ .../services/eventarc/transports/base.py | 83 + .../services/eventarc/transports/grpc.py | 188 ++ .../eventarc/transports/grpc_asyncio.py | 188 ++ .../services/eventarc/transports/rest.py | 817 ++++++++ .../unit/gapic/eventarc_v1/test_eventarc.py | 1675 ++++++++++++++++- .../config_service_v2/async_client.py | 153 ++ .../services/config_service_v2/client.py | 153 ++ .../config_service_v2/transports/base.py | 28 + .../config_service_v2/transports/grpc.py | 55 + .../transports/grpc_asyncio.py | 55 + .../logging_service_v2/async_client.py | 153 ++ .../services/logging_service_v2/client.py | 153 ++ .../logging_service_v2/transports/base.py | 28 + .../logging_service_v2/transports/grpc.py | 55 + .../transports/grpc_asyncio.py | 55 + .../metrics_service_v2/async_client.py | 153 ++ .../services/metrics_service_v2/client.py | 153 ++ .../metrics_service_v2/transports/base.py | 28 + .../metrics_service_v2/transports/grpc.py | 55 + .../transports/grpc_asyncio.py | 55 + .../logging_v2/test_config_service_v2.py | 392 +++- .../logging_v2/test_logging_service_v2.py | 391 ++++ .../logging_v2/test_metrics_service_v2.py | 391 ++++ .../services/cloud_redis/async_client.py | 307 +++ .../redis_v1/services/cloud_redis/client.py | 302 +++ .../services/cloud_redis/transports/base.py | 54 + .../services/cloud_redis/transports/grpc.py | 110 ++ .../cloud_redis/transports/grpc_asyncio.py | 110 ++ .../services/cloud_redis/transports/rest.py | 525 ++++++ .../unit/gapic/redis_v1/test_cloud_redis.py | 1065 ++++++++++- .../tests/integration/iamcredentials_v1.yaml | 17 + .../tests/integration/logging_v2.yaml | 221 +++ .../tests/integration/redis_v1.yaml | 70 + 56 files changed, 10021 insertions(+), 18 deletions(-) create mode 100644 packages/gapic-generator/tests/integration/cloudasset_v1.yaml create mode 100644 packages/gapic-generator/tests/integration/iamcredentials_v1.yaml create mode 100644 packages/gapic-generator/tests/integration/logging_v2.yaml create mode 100644 packages/gapic-generator/tests/integration/redis_v1.yaml diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index c5233c2703c8..6c98bed39447 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -40,7 +40,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index f22a5dbe032e..3a4118b28201 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -30,7 +30,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 374e6ddbc2cc..a4fecf9c3e34 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -29,7 +29,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 186e5b4e04ea..6bb8a3124164 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -44,13 +44,13 @@ from google.api_core import path_template from google.api_core import future from google.api_core import operation from google.api_core import operations_v1 -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %}{# lro #} {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index c2fd3d57d483..5abd583cacac 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -40,7 +40,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index bf6e34da7bc2..03cc953dbeaa 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -50,7 +50,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 1043178b5021..b31e5fd85deb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -32,7 +32,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} {% filter sort_lines %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 96923c5dbd11..668330cd2bd5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -29,7 +29,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 25b7b4db5c15..0288b5cd641f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -29,7 +29,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index bf9b5b36e208..4e5d9e45cbee 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -27,7 +27,7 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} from requests import __version__ as requests_version import dataclasses diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 68508abc9205..5c4074868622 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -52,13 +52,13 @@ from google.api_core import future {% if service.has_lro %} from google.api_core import operation from google.api_core import operations_v1 -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %}{# lro #} {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 {% endif %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore {% endif %} from google.api_core import gapic_v1 {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index d9ce330dc650..4d700c938fb0 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -54,6 +54,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + service_yaml = "cloudasset_v1.yaml", transport = "grpc+rest", ) @@ -65,6 +66,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + service_yaml = "iamcredentials_v1.yaml", transport = "grpc+rest", ) @@ -124,8 +126,9 @@ py_gapic_library( ], # REST is not generated because of the following issues: # - REST unit test `test_update_settings_rest_flattened` in logging_v2 fails. See #1728 - # - REST is not generated in the public `BUILD.bazel` + # - REST is not generated in the public `BUILD.bazel` # https://github.com/googleapis/googleapis/blob/e85662e798a0a9495a035839f66d0c037c481e2c/google/logging/v2/BUILD.bazel#L201 + service_yaml = "logging_v2.yaml", transport = "grpc", ) @@ -148,6 +151,7 @@ py_gapic_library( opt_args = [ "autogen-snippets", ], + service_yaml = "redis_v1.yaml", transport = "grpc+rest", ) diff --git a/packages/gapic-generator/tests/integration/cloudasset_v1.yaml b/packages/gapic-generator/tests/integration/cloudasset_v1.yaml new file mode 100644 index 000000000000..65bc90f6e2fe --- /dev/null +++ b/packages/gapic-generator/tests/integration/cloudasset_v1.yaml @@ -0,0 +1,47 @@ +type: google.api.Service +config_version: 3 +name: cloudasset.googleapis.com +title: Cloud Asset API + +apis: +- name: google.cloud.asset.v1.AssetService +- name: google.longrunning.Operations + +types: +- name: google.cloud.asset.v1.AnalyzeIamPolicyLongrunningMetadata +- name: google.cloud.asset.v1.AnalyzeIamPolicyLongrunningResponse + +documentation: + summary: |- + The Cloud Asset API manages the history and inventory of Google Cloud + resources. + overview: |- + # Cloud Asset API + The Cloud Asset API keeps a history of Google Cloud asset metadata, and + allows Google Cloud users to download a dump of all asset metadata for the + resource types listed below within an organization or a project at a given + timestamp. + Read more documents here: + https://cloud.google.com/asset-inventory/docs +backend: + rules: + - selector: 'google.cloud.asset.v1.AssetService.*' + deadline: 600.0 + - selector: google.longrunning.Operations.GetOperation + deadline: 60.0 + +http: + rules: + - selector: google.longrunning.Operations.GetOperation + get: '/v1/{name=*/*/operations/*/**}' + +authentication: + rules: + - selector: 'google.cloud.asset.v1.AssetService.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.longrunning.Operations.GetOperation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform \ No newline at end of file diff --git a/packages/gapic-generator/tests/integration/eventarc_v1.yaml b/packages/gapic-generator/tests/integration/eventarc_v1.yaml index afd816204495..26d8875e5ba4 100644 --- a/packages/gapic-generator/tests/integration/eventarc_v1.yaml +++ b/packages/gapic-generator/tests/integration/eventarc_v1.yaml @@ -7,3 +7,112 @@ apis: - name: google.cloud.eventarc.v1.Eventarc - name: google.cloud.location.Locations - name: google.iam.v1.IAMPolicy +- name: google.longrunning.Operations + +types: +- name: google.cloud.eventarc.v1.OperationMetadata + +documentation: + summary: 'Build event-driven applications on Google Cloud Platform.' + overview: |- + Eventarc lets you asynchronously deliver events from Google services, SaaS, + and your own apps using loosely coupled services that react to state + changes. Eventarc requires no infrastructure management — you can optimize + productivity and costs while building an event-driven solution. + rules: + - selector: google.cloud.location.Locations.GetLocation + description: Gets information about a location. + + - selector: google.cloud.location.Locations.ListLocations + description: Lists information about the supported locations for this service. + + - selector: google.iam.v1.IAMPolicy.GetIamPolicy + description: |- + Gets the access control policy for a resource. Returns an empty policy + if the resource exists and does not have a policy set. + - selector: google.iam.v1.IAMPolicy.SetIamPolicy + description: |- + Sets the access control policy on the specified resource. Replaces + any existing policy. + Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` + errors. + - selector: google.iam.v1.IAMPolicy.TestIamPermissions + description: |- + Returns permissions that a caller has on the specified resource. If the + resource does not exist, this will return an empty set of + permissions, not a `NOT_FOUND` error. + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for authorization + checking. This operation may "fail open" without warning. +backend: + rules: + - selector: 'google.cloud.eventarc.v1.Eventarc.*' + deadline: 60.0 + - selector: google.cloud.location.Locations.GetLocation + deadline: 60.0 + - selector: google.cloud.location.Locations.ListLocations + deadline: 60.0 + - selector: 'google.iam.v1.IAMPolicy.*' + deadline: 60.0 + - selector: 'google.longrunning.Operations.*' + deadline: 60.0 + +http: + rules: + - selector: google.cloud.location.Locations.GetLocation + get: '/v1/{name=projects/*/locations/*}' + - selector: google.cloud.location.Locations.ListLocations + get: '/v1/{name=projects/*}/locations' + - selector: google.iam.v1.IAMPolicy.GetIamPolicy + get: '/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy' + additional_bindings: + - get: '/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy' + - get: '/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy' + - selector: google.iam.v1.IAMPolicy.SetIamPolicy + post: '/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy' + body: '*' + additional_bindings: + - post: '/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy' + body: '*' + - post: '/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy' + body: '*' + - selector: google.iam.v1.IAMPolicy.TestIamPermissions + post: '/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions' + body: '*' + additional_bindings: + - post: '/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions' + body: '*' + - post: '/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions' + body: '*' + - selector: google.longrunning.Operations.CancelOperation + post: '/v1/{name=projects/*/locations/*/operations/*}:cancel' + body: '*' + - selector: google.longrunning.Operations.DeleteOperation + delete: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.GetOperation + get: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.ListOperations + get: '/v1/{name=projects/*/locations/*}/operations' + +authentication: + rules: + - selector: 'google.cloud.eventarc.v1.Eventarc.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.cloud.location.Locations.GetLocation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.cloud.location.Locations.ListLocations + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.iam.v1.IAMPolicy.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.longrunning.Operations.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform \ No newline at end of file diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 113bbdea7682..9a4309d1bb6a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -37,6 +37,7 @@ from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -3013,6 +3014,57 @@ async def sample_analyze_org_policy_governed_assets(): # Done; return the response. return response + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def __aenter__(self) -> "AssetServiceAsyncClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 63c80bcea508..65f82d2fef0e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -40,6 +40,7 @@ from google.cloud.asset_v1.services.asset_service import pagers from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -3233,6 +3234,59 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + + diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index dcf9370a2fb2..c1008068adbf 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -29,6 +29,7 @@ from google.cloud.asset_v1.types import asset_service from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -502,6 +503,15 @@ def analyze_org_policy_governed_assets(self) -> Callable[ ]]: raise NotImplementedError() + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index a565057b1d1c..46d1f503b82a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -27,6 +27,7 @@ from google.cloud.asset_v1.types import asset_service from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO @@ -931,6 +932,24 @@ def analyze_org_policy_governed_assets(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index d018d120c725..49f4fe4e5eb2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -27,6 +27,7 @@ from google.cloud.asset_v1.types import asset_service from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .grpc import AssetServiceGrpcTransport @@ -934,6 +935,24 @@ def analyze_org_policy_governed_assets(self) -> Callable[ def close(self): return self.grpc_channel.close() + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + __all__ = ( 'AssetServiceGrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index d68de1ab528a..ed446c9cf842 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -28,6 +28,7 @@ from google.protobuf import json_format from google.api_core import operations_v1 +from google.longrunning import operations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses import re @@ -603,6 +604,27 @@ def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_s """ return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AssetService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AssetService server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class AssetServiceRestStub: @@ -714,6 +736,12 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=*/*/operations/*/**}', + }, + ], } rest_transport = operations_v1.OperationsRestTransport( @@ -2755,6 +2783,71 @@ def update_saved_query(self) -> Callable[ # In C++ this would require a dynamic_cast return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(AssetServiceRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/operations/*/**}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index e4947013d9dd..6bb3ed9ed912 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -53,7 +53,7 @@ from google.cloud.asset_v1.services.asset_service import transports from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -12449,6 +12449,7 @@ def test_asset_service_base_transport(): 'analyze_org_policies', 'analyze_org_policy_governed_containers', 'analyze_org_policy_governed_assets', + 'get_operation', ) for method in methods: with pytest.raises(NotImplementedError): @@ -13169,6 +13170,183 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation(transport: str = "grpc"): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index aae8f9df411f..e247bcbc109c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -48,6 +48,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -2383,6 +2384,602 @@ async def sample_update_google_channel_config(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def __aenter__(self) -> "EventarcAsyncClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 404f72b0bee1..e4f83e706fd8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -51,6 +51,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -2671,13 +2672,601 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + # Done; return the response. + return response + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 4f519f422c6c..72b2cc0974e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -38,6 +38,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -392,6 +393,88 @@ def update_google_channel_config(self) -> Callable[ ]]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 71712a1d6d3e..ae473c3b75eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -36,6 +36,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO @@ -729,6 +730,193 @@ def update_google_channel_config(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index cee7bc068b8c..e37c3401b12f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -36,6 +36,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO from .grpc import EventarcGrpcTransport @@ -732,6 +733,193 @@ def update_google_channel_config(self) -> Callable[ def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + __all__ = ( 'EventarcGrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 6e8a1e6e4561..13a42d7eec02 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -31,6 +31,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses import re @@ -515,6 +516,187 @@ def post_update_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_iam_policy( + self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_set_iam_policy( + self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_set_iam_policy( + self, response: policy_pb2.Policy + ) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_test_iam_permissions( + self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the Eventarc server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the Eventarc server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class EventarcRestStub: @@ -628,6 +810,31 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], } rest_transport = operations_v1.OperationsRestTransport( @@ -2273,6 +2480,616 @@ def update_trigger(self) -> Callable[ # In C++ this would require a dynamic_cast return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(EventarcRestStub): + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(EventarcRestStub): + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(EventarcRestStub): + def __call__(self, + request: iam_policy_pb2.GetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy', + }, +{ + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy', + }, +{ + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy', + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(EventarcRestStub): + def __call__(self, + request: iam_policy_pb2.SetIamPolicyRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(EventarcRestStub): + def __call__(self, + request: iam_policy_pb2.TestIamPermissionsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions', + 'body': '*', + }, +{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(EventarcRestStub): + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + body = json.dumps(transcoded_request['body']) + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(EventarcRestStub): + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(EventarcRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(EventarcRestStub): + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index c61f8327b218..771528a988c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -65,7 +65,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -10357,6 +10357,15 @@ def test_eventarc_base_transport(): 'delete_channel_connection', 'get_google_channel_config', 'update_google_channel_config', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', ) for method in methods: with pytest.raises(NotImplementedError): @@ -11135,6 +11144,1670 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + +def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + +def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + +def test_get_iam_policy(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +def test_test_iam_permissions(transport: str = "grpc"): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + def test_transport_close(): transports = { "rest": "_session", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index fca70d962c17..1c9d95910463 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -36,6 +36,7 @@ from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3732,6 +3733,158 @@ async def sample_copy_log_entries(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + async def __aenter__(self) -> "ConfigServiceV2AsyncClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 1752be078912..62f41d97a56e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -39,6 +39,7 @@ from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3946,6 +3947,158 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index ab0c7ff98361..e967588ea825 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -29,6 +29,7 @@ from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -639,6 +640,33 @@ def copy_log_entries(self) -> Callable[ ]]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 783cd0ced90d..f2ba4a37f07f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -27,6 +27,7 @@ from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO @@ -1181,6 +1182,60 @@ def copy_log_entries(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 22a7adfee489..4cdb5ffe7c55 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -27,6 +27,7 @@ from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport @@ -1184,6 +1185,60 @@ def copy_log_entries(self) -> Callable[ def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ( 'ConfigServiceV2GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 8ccc6353ccca..8f947ba54726 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -36,6 +36,7 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client @@ -964,6 +965,158 @@ def request_generator(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + async def __aenter__(self) -> "LoggingServiceV2AsyncClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 2aa1ff6e5804..c90848e28ccc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -39,6 +39,7 @@ from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import LoggingServiceV2GrpcTransport from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport @@ -1114,6 +1115,158 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 637083ce9004..649b606217dc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -27,6 +27,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -261,6 +262,33 @@ def tail_log_entries(self) -> Callable[ ]]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 5559071ddcef..b24dd885af16 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -25,6 +25,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO @@ -404,6 +405,60 @@ def tail_log_entries(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index f60b01417d6f..9454a1f27b52 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -25,6 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport @@ -407,6 +408,60 @@ def tail_log_entries(self) -> Callable[ def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ( 'LoggingServiceV2GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index f6f7e55ff827..5d160dc58bb8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -36,6 +36,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport @@ -803,6 +804,158 @@ async def sample_delete_log_metric(): metadata=metadata, ) + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + async def __aenter__(self) -> "MetricsServiceV2AsyncClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index b82ec16c3658..50ec7c399526 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -39,6 +39,7 @@ from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .transports.grpc import MetricsServiceV2GrpcTransport @@ -973,6 +974,158 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index a67ba7fd8997..5ad58024fb86 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -27,6 +27,7 @@ from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -231,6 +232,33 @@ def delete_log_metric(self) -> Callable[ ]]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 11116216065e..d6cf4e25225f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -25,6 +25,7 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO @@ -359,6 +360,60 @@ def delete_log_metric(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 3ddcc80df579..1b81c982e60e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -25,6 +25,7 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport @@ -362,6 +363,60 @@ def delete_log_metric(self) -> Callable[ def close(self): return self.grpc_channel.close() + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + __all__ = ( 'MetricsServiceV2GrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 289a13b94d31..a482c9cbf715 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -45,7 +45,7 @@ from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.services.config_service_v2 import transports from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -8182,6 +8182,9 @@ def test_config_service_v2_base_transport(): 'get_settings', 'update_settings', 'copy_log_entries', + 'get_operation', + 'cancel_operation', + 'list_operations', ) for method in methods: with pytest.raises(NotImplementedError): @@ -8805,6 +8808,393 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index a648ed79bc86..4005872a7559 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -45,6 +45,7 @@ from google.cloud.logging_v2.types import logging from google.logging.type import http_request_pb2 # type: ignore from google.logging.type import log_severity_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -2228,6 +2229,9 @@ def test_logging_service_v2_base_transport(): 'list_monitored_resource_descriptors', 'list_logs', 'tail_log_entries', + 'get_operation', + 'cancel_operation', + 'list_operations', ) for method in methods: with pytest.raises(NotImplementedError): @@ -2695,6 +2699,393 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 8045f5d8dc88..e7a0798abcfb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -45,6 +45,7 @@ from google.cloud.logging_v2.services.metrics_service_v2 import pagers from google.cloud.logging_v2.services.metrics_service_v2 import transports from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -2095,6 +2096,9 @@ def test_metrics_service_v2_base_transport(): 'create_log_metric', 'update_log_metric', 'delete_log_metric', + 'get_operation', + 'cancel_operation', + 'list_operations', ) for method in methods: with pytest.raises(NotImplementedError): @@ -2562,6 +2566,393 @@ async def test_transport_close_async(): close.assert_called_once() +def test_cancel_operation(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index dab7a5457086..909c48d2db82 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -34,8 +34,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1644,6 +1646,311 @@ async def sample_reschedule_maintenance(): # Done; return the response. return response + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def __aenter__(self) -> "CloudRedisAsyncClient": return self diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 9c4721889f56..452651485c11 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -37,8 +37,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -1851,10 +1853,310 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + # Done; return the response. + return response + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index c99e4e883ef1..021036407e58 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -27,8 +27,10 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) @@ -285,6 +287,58 @@ def reschedule_maintenance(self) -> Callable[ ]]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index c53d62f31dac..c797c13edfed 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -25,8 +25,10 @@ import grpc # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO @@ -601,6 +603,114 @@ def reschedule_maintenance(self) -> Callable[ def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index f93b258ed99e..e1f7ed8f42fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -25,8 +25,10 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO from .grpc import CloudRedisGrpcTransport @@ -604,6 +606,114 @@ def reschedule_maintenance(self) -> Callable[ def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ( 'CloudRedisGrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index b79fd4b0c979..45a69fe587df 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -28,6 +28,8 @@ from google.protobuf import json_format from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses import re @@ -338,6 +340,127 @@ def post_upgrade_instance(self, response: operations_pb2.Operation) -> operation """ return response + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class CloudRedisRestStub: @@ -469,6 +592,30 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Only create a new client if we do not already have one. if self._operations_client is None: http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], } rest_transport = operations_v1.OperationsRestTransport( @@ -1492,6 +1639,384 @@ def upgrade_instance(self) -> Callable[ # In C++ this would require a dynamic_cast return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(CloudRedisRestStub): + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(CloudRedisRestStub): + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(CloudRedisRestStub): + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ] + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(CloudRedisRestStub): + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(CloudRedisRestStub): + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.Operation() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(CloudRedisRestStub): + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + + # Send the request + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_operations(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 851e099c8b07..bc58a6c8eab9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -47,12 +47,13 @@ from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.cloud.redis_v1.services.cloud_redis import CloudRedisAsyncClient from google.cloud.redis_v1.services.cloud_redis import CloudRedisClient from google.cloud.redis_v1.services.cloud_redis import pagers from google.cloud.redis_v1.services.cloud_redis import transports from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore @@ -6268,6 +6269,12 @@ def test_cloud_redis_base_transport(): 'failover_instance', 'delete_instance', 'reschedule_maintenance', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', ) for method in methods: with pytest.raises(NotImplementedError): @@ -6844,6 +6851,1062 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = '{}' + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_delete_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "rest": "_session", diff --git a/packages/gapic-generator/tests/integration/iamcredentials_v1.yaml b/packages/gapic-generator/tests/integration/iamcredentials_v1.yaml new file mode 100644 index 000000000000..6f3f05ab34a8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/iamcredentials_v1.yaml @@ -0,0 +1,17 @@ +type: google.api.Service +config_version: 3 +name: iamcredentials.googleapis.com +title: IAM Service Account Credentials API + +apis: +- name: google.iam.credentials.v1.IAMCredentials + +documentation: + summary: 'Creates short-lived, limited-privilege credentials for IAM service accounts.' + +authentication: + rules: + - selector: 'google.iam.credentials.v1.IAMCredentials.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform \ No newline at end of file diff --git a/packages/gapic-generator/tests/integration/logging_v2.yaml b/packages/gapic-generator/tests/integration/logging_v2.yaml new file mode 100644 index 000000000000..7f10ee0bd13a --- /dev/null +++ b/packages/gapic-generator/tests/integration/logging_v2.yaml @@ -0,0 +1,221 @@ +type: google.api.Service +config_version: 3 +name: logging.googleapis.com +title: Cloud Logging API + +apis: +- name: google.logging.v2.ConfigServiceV2 +- name: google.logging.v2.LoggingServiceV2 +- name: google.logging.v2.MetricsServiceV2 +- name: google.longrunning.Operations + +types: +- name: google.logging.v2.BucketMetadata +- name: google.logging.v2.CopyLogEntriesMetadata +- name: google.logging.v2.CopyLogEntriesResponse +- name: google.logging.v2.LinkMetadata +- name: google.logging.v2.LocationMetadata + +documentation: + summary: Writes log entries and manages your Cloud Logging configuration. + overview: |- + # Introduction + The Cloud Logging service. +backend: + rules: + - selector: 'google.logging.v2.ConfigServiceV2.*' + deadline: 60.0 + - selector: google.logging.v2.ConfigServiceV2.CreateBucket + deadline: 600.0 + - selector: google.logging.v2.ConfigServiceV2.UpdateBucket + deadline: 600.0 + - selector: 'google.logging.v2.LoggingServiceV2.*' + deadline: 60.0 + - selector: google.logging.v2.LoggingServiceV2.ListLogEntries + deadline: 10.0 + - selector: google.logging.v2.LoggingServiceV2.TailLogEntries + deadline: 3600.0 + - selector: 'google.logging.v2.MetricsServiceV2.*' + deadline: 60.0 + - selector: 'google.longrunning.Operations.*' + deadline: 60.0 + +http: + rules: + - selector: google.longrunning.Operations.CancelOperation + post: '/v2/{name=*/*/locations/*/operations/*}:cancel' + body: '*' + additional_bindings: + - post: '/v2/{name=projects/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=organizations/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=folders/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=billingAccounts/*/locations/*/operations/*}:cancel' + body: '*' + - selector: google.longrunning.Operations.GetOperation + get: '/v2/{name=*/*/locations/*/operations/*}' + additional_bindings: + - get: '/v2/{name=projects/*/locations/*/operations/*}' + - get: '/v2/{name=organizations/*/locations/*/operations/*}' + - get: '/v2/{name=folders/*/locations/*/operations/*}' + - get: '/v2/{name=billingAccounts/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.ListOperations + get: '/v2/{name=*/*/locations/*}/operations' + additional_bindings: + - get: '/v2/{name=projects/*/locations/*}/operations' + - get: '/v2/{name=organizations/*/locations/*}/operations' + - get: '/v2/{name=folders/*/locations/*}/operations' + - get: '/v2/{name=billingAccounts/*/locations/*}/operations' + +authentication: + rules: + - selector: 'google.logging.v2.ConfigServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.logging.v2.ConfigServiceV2.GetBucket + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetCmekSettings + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetExclusion + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetLink + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetSettings + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetSink + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetView + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListBuckets + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListExclusions + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListLinks + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListSinks + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListViews + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: 'google.logging.v2.LoggingServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.LoggingServiceV2.DeleteLog + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.logging.v2.LoggingServiceV2.WriteLogEntries + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.write + - selector: 'google.logging.v2.MetricsServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.write + - selector: google.logging.v2.MetricsServiceV2.GetLogMetric + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.MetricsServiceV2.ListLogMetrics + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.longrunning.Operations.CancelOperation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.longrunning.Operations.GetOperation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.longrunning.Operations.ListOperations + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read +publishing: + documentation_uri: https://cloud.google.com/logging/docs/ \ No newline at end of file diff --git a/packages/gapic-generator/tests/integration/redis_v1.yaml b/packages/gapic-generator/tests/integration/redis_v1.yaml new file mode 100644 index 000000000000..499c13d4e44f --- /dev/null +++ b/packages/gapic-generator/tests/integration/redis_v1.yaml @@ -0,0 +1,70 @@ +type: google.api.Service +config_version: 3 +name: redis.googleapis.com +title: Google Cloud Memorystore for Redis API + +apis: +- name: google.cloud.location.Locations +- name: google.cloud.redis.v1.CloudRedis +- name: google.longrunning.Operations + +types: +- name: google.cloud.redis.v1.LocationMetadata +- name: google.cloud.redis.v1.OperationMetadata +- name: google.cloud.redis.v1.ZoneMetadata + +documentation: + summary: Creates and manages Redis instances on the Google Cloud Platform. + rules: + - selector: google.cloud.location.Locations.GetLocation + description: Gets information about a location. + + - selector: google.cloud.location.Locations.ListLocations + description: Lists information about the supported locations for this service. + +backend: + rules: + - selector: google.cloud.location.Locations.GetLocation + deadline: 60.0 + - selector: google.cloud.location.Locations.ListLocations + deadline: 60.0 + - selector: 'google.cloud.redis.v1.CloudRedis.*' + deadline: 60.0 + - selector: google.cloud.redis.v1.CloudRedis.ListInstances + deadline: 20.0 + - selector: 'google.longrunning.Operations.*' + deadline: 60.0 + +http: + rules: + - selector: google.cloud.location.Locations.GetLocation + get: '/v1/{name=projects/*/locations/*}' + - selector: google.cloud.location.Locations.ListLocations + get: '/v1/{name=projects/*}/locations' + - selector: google.longrunning.Operations.CancelOperation + post: '/v1/{name=projects/*/locations/*/operations/*}:cancel' + - selector: google.longrunning.Operations.DeleteOperation + delete: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.GetOperation + get: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.ListOperations + get: '/v1/{name=projects/*/locations/*}/operations' + +authentication: + rules: + - selector: google.cloud.location.Locations.GetLocation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.cloud.location.Locations.ListLocations + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.cloud.redis.v1.CloudRedis.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.longrunning.Operations.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform \ No newline at end of file From b4146db2ab7f54434d81c6acf553d346aa6898fd Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 23 Aug 2023 19:26:39 +0200 Subject: [PATCH 1041/1339] chore(deps): update all dependencies (#1744) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 40 +++++++++++------------ 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 6b69ecc88291..052fbcb6ae23 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -89,9 +89,9 @@ charset-normalizer==3.2.0 \ --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \ --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa # via requests -click==8.1.6 \ - --hash=sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd \ - --hash=sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5 +click==8.1.7 \ + --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ + --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via -r requirements.in exceptiongroup==1.1.3 \ --hash=sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9 \ @@ -283,20 +283,20 @@ proto-plus==1.22.3 \ --hash=sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df \ --hash=sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b # via -r requirements.in -protobuf==4.24.0 \ - --hash=sha256:44825e963008f8ea0d26c51911c30d3e82e122997c3c4568fd0385dd7bacaedf \ - --hash=sha256:567fe6b0647494845d0849e3d5b260bfdd75692bf452cdc9cb660d12457c055d \ - --hash=sha256:5ab19ee50037d4b663c02218a811a5e1e7bb30940c79aac385b96e7a4f9daa61 \ - --hash=sha256:5d0ceb9de6e08311832169e601d1fc71bd8e8c779f3ee38a97a78554945ecb85 \ - --hash=sha256:6c817cf4a26334625a1904b38523d1b343ff8b637d75d2c8790189a4064e51c3 \ - --hash=sha256:81cb9c4621d2abfe181154354f63af1c41b00a4882fb230b4425cbaed65e8f52 \ - --hash=sha256:82e6e9ebdd15b8200e8423676eab38b774624d6a1ad696a60d86a2ac93f18201 \ - --hash=sha256:8bb52a2be32db82ddc623aefcedfe1e0eb51da60e18fcc908fb8885c81d72109 \ - --hash=sha256:a38400a692fd0c6944c3c58837d112f135eb1ed6cdad5ca6c5763336e74f1a04 \ - --hash=sha256:a6b1ca92ccabfd9903c0c7dde8876221dc7d8d87ad5c42e095cc11b15d3569c7 \ - --hash=sha256:ae7a1835721086013de193311df858bc12cd247abe4ef9710b715d930b95b33e \ - --hash=sha256:ae97b5de10f25b7a443b40427033e545a32b0e9dda17bcd8330d70033379b3e5 \ - --hash=sha256:e8834ef0b4c88666ebb7c7ec18045aa0f4325481d724daa624a4cf9f28134653 +protobuf==4.24.1 \ + --hash=sha256:06437f0d4bb0d5f29e3d392aba69600188d4be5ad1e0a3370e581a9bf75a3081 \ + --hash=sha256:0b2b224e9541fe9f046dd7317d05f08769c332b7e4c54d93c7f0f372dedb0b1a \ + --hash=sha256:302e8752c760549ed4c7a508abc86b25d46553c81989343782809e1a062a2ef9 \ + --hash=sha256:44837a5ed9c9418ad5d502f89f28ba102e9cd172b6668bc813f21716f9273348 \ + --hash=sha256:55dd644adc27d2a624339332755fe077c7f26971045b469ebb9732a69ce1f2ca \ + --hash=sha256:5906c5e79ff50fe38b2d49d37db5874e3c8010826f2362f79996d83128a8ed9b \ + --hash=sha256:5d32363d14aca6e5c9e9d5918ad8fb65b091b6df66740ae9de50ac3916055e43 \ + --hash=sha256:970c701ee16788d74f3de20938520d7a0aebc7e4fff37096a48804c80d2908cf \ + --hash=sha256:bd39b9094a4cc003a1f911b847ab379f89059f478c0b611ba1215053e295132e \ + --hash=sha256:d414199ca605eeb498adc4d2ba82aedc0379dca4a7c364ff9bc9a179aa28e71b \ + --hash=sha256:d4af4fd9e9418e819be30f8df2a16e72fbad546a7576ac7f3653be92a6966d30 \ + --hash=sha256:df015c47d6855b8efa0b9be706c70bf7f050a4d5ac6d37fb043fbd95157a0e25 \ + --hash=sha256:fc361148e902949dcb953bbcb148c99fe8f8854291ad01107e4120361849fd0e # via # -r requirements.in # google-api-core @@ -403,7 +403,7 @@ urllib3==2.0.4 \ # requests # The following packages are considered to be unsafe in a requirements file: -setuptools==68.0.0 \ - --hash=sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f \ - --hash=sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235 +setuptools==68.1.2 \ + --hash=sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d \ + --hash=sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b # via -r requirements.in From 231da4613996955cabcde7a94b9096eac13934bc Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 23 Aug 2023 15:48:14 -0400 Subject: [PATCH 1042/1339] fix: remove duplicate import statement for `google.longrunning.operations_pb2` (#1726) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #1727 🦕 --- .../%sub/services/%service/transports/base.py.j2 | 8 ++++++++ .../%sub/services/%service/transports/grpc.py.j2 | 8 ++++++++ .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 3 +-- .../%sub/services/%service/transports/base.py.j2 | 8 ++++++++ .../%sub/services/%service/transports/grpc.py.j2 | 8 ++++++++ .../services/%service/transports/grpc_asyncio.py.j2 | 8 ++++++++ .../%sub/services/%service/transports/rest.py.j2 | 10 +++++++++- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 3 +-- .../asset_v1/services/asset_service/transports/base.py | 1 - .../asset_v1/services/asset_service/transports/grpc.py | 1 - .../services/asset_service/transports/grpc_asyncio.py | 1 - .../asset_v1/services/asset_service/transports/rest.py | 3 +-- .../eventarc_v1/services/eventarc/transports/base.py | 1 - .../eventarc_v1/services/eventarc/transports/grpc.py | 1 - .../services/eventarc/transports/grpc_asyncio.py | 1 - .../eventarc_v1/services/eventarc/transports/rest.py | 1 - .../services/config_service_v2/transports/base.py | 1 - .../services/config_service_v2/transports/grpc.py | 1 - .../config_service_v2/transports/grpc_asyncio.py | 1 - .../redis_v1/services/cloud_redis/transports/base.py | 1 - .../redis_v1/services/cloud_redis/transports/grpc.py | 1 - .../services/cloud_redis/transports/grpc_asyncio.py | 1 - .../redis_v1/services/cloud_redis/transports/rest.py | 1 - 23 files changed, 52 insertions(+), 21 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 3a4118b28201..9c3bb8fa6399 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -18,9 +18,14 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -30,6 +35,9 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index a4fecf9c3e34..9d4a91aa032a 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -17,9 +17,14 @@ from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -29,6 +34,9 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 6bb8a3124164..77974cf5ed10 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -44,12 +44,11 @@ from google.api_core import path_template from google.api_core import future from google.api_core import operation from google.api_core import operations_v1 -from google.longrunning import operations_pb2 # type: ignore {% endif %}{# lro #} {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 {% endif %} -{% if api.has_operations_mixin %} +{% if api.has_operations_mixin or service.has_lro %} from google.longrunning import operations_pb2 # type: ignore {% endif %} from google.api_core import gapic_v1 diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index b31e5fd85deb..b2469654df1a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -20,9 +20,14 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -32,6 +37,9 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 668330cd2bd5..2e4f26a8a716 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -17,9 +17,14 @@ from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -29,6 +34,9 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 0288b5cd641f..94e2952fcc06 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -17,9 +17,14 @@ import grpc # type: ignore from grpc.experimental import aio # type: ignore {% filter sort_lines %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% for method in service.methods.values() %} {{ method.input.ident.python_import }} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{ method.output.ident.python_import }} +{% endif %} {% endfor %} {% if opts.add_iam_methods or api.has_iam_mixin %} from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -29,6 +34,9 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} {% if api.has_operations_mixin %} +{% set import_ns.has_operations_mixin = True %} +{% endif %} +{% if import_ns.has_operations_mixin %} from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 4e5d9e45cbee..52493d180e3f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -26,8 +26,9 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} +{% set import_ns = namespace(has_operations_mixin=false) %} {% if api.has_operations_mixin %} -from google.longrunning import operations_pb2 # type: ignore +{% set import_ns.has_operations_mixin = True %} {% endif %} from requests import __version__ as requests_version import dataclasses @@ -45,13 +46,20 @@ except AttributeError: # pragma: NO COVER {% filter sort_lines %} {% for method in service.methods.values() %} {{method.input.ident.python_import}} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} {{method.output.ident.python_import}} +{% endif %} {% endfor %} {% if opts.add_iam_methods %} from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore +{% endif %} from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 5c4074868622..ceb92c806bc7 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -52,12 +52,11 @@ from google.api_core import future {% if service.has_lro %} from google.api_core import operation from google.api_core import operations_v1 -from google.longrunning import operations_pb2 # type: ignore {% endif %}{# lro #} {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 {% endif %} -{% if api.has_operations_mixin %} +{% if api.has_operations_mixin or service.has_lro %} from google.longrunning import operations_pb2 # type: ignore {% endif %} from google.api_core import gapic_v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index c1008068adbf..d46bee37dcca 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -28,7 +28,6 @@ from google.oauth2 import service_account # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 46d1f503b82a..53958bcf81a0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -26,7 +26,6 @@ import grpc # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 49f4fe4e5eb2..c8f62d768b49 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -26,7 +26,6 @@ from grpc.experimental import aio # type: ignore from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index ed446c9cf842..662dc3800236 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -28,7 +28,6 @@ from google.protobuf import json_format from google.api_core import operations_v1 -from google.longrunning import operations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses import re @@ -42,8 +41,8 @@ from google.cloud.asset_v1.types import asset_service -from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 72b2cc0974e6..7096c28dcc47 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -37,7 +37,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index ae473c3b75eb..1b00e781069f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index e37c3401b12f..b6e2874c4cd2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -35,7 +35,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO from .grpc import EventarcGrpcTransport diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 13a42d7eec02..32a4100f5fbb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -31,7 +31,6 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses import re diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index e967588ea825..3b1f9f96fcfd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -28,7 +28,6 @@ from google.oauth2 import service_account # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index f2ba4a37f07f..fc7344593060 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -26,7 +26,6 @@ import grpc # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 4cdb5ffe7c55..e68ff981640c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -26,7 +26,6 @@ from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 021036407e58..c0a9d6bc0949 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -29,7 +29,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index c797c13edfed..7e07797c707f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -27,7 +27,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index e1f7ed8f42fb..354d27092a62 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -27,7 +27,6 @@ from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis -from google.longrunning import operations_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO from .grpc import CloudRedisGrpcTransport diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 45a69fe587df..57b93dcbc8f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -29,7 +29,6 @@ from google.protobuf import json_format from google.api_core import operations_v1 from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 # type: ignore from requests import __version__ as requests_version import dataclasses import re From 234bc4c8769998fefe72e14d55f3bca4fc825b8c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 23 Aug 2023 16:02:14 -0400 Subject: [PATCH 1043/1339] tests: fix ModuleNotFoundError when running py_test (#1735) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #1734 🦕 --- .../rules_python_gapic/pytest.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/rules_python_gapic/pytest.py b/packages/gapic-generator/rules_python_gapic/pytest.py index 934fdfa68096..72095851636e 100644 --- a/packages/gapic-generator/rules_python_gapic/pytest.py +++ b/packages/gapic-generator/rules_python_gapic/pytest.py @@ -3,9 +3,14 @@ import os -if __name__ == '__main__': - sys.exit(pytest.main([ - '--disable-pytest-warnings', - '--quiet', - os.path.dirname(os.path.abspath(__file__)) - ])) +if __name__ == "__main__": + # The generated file name will be of the form `_pytest.py`. + # The generated gapic will be in a directory `_srcjar.py``. + # Extract the ``` from this file, and use it to determine the + # directory of the generated gapic. + # Only run `pytest` on the `tests` directory. + module_name = os.path.abspath(__file__).replace("_pytest.py", "") + src_directory = f"{module_name}_srcjar.py" + sys.exit( + pytest.main(["--disable-pytest-warnings", "--quiet", f"{src_directory}/tests"]) + ) From 729ae56033f0e33c5be897593abbead5fad7c6ed Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 28 Aug 2023 15:43:16 +0200 Subject: [PATCH 1044/1339] chore(deps): update all dependencies (#1747) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 34 +++++++++++------------ 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 052fbcb6ae23..e60c66f23f27 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -275,28 +275,28 @@ packaging==23.1 \ --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f # via pytest -pluggy==1.2.0 \ - --hash=sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849 \ - --hash=sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3 +pluggy==1.3.0 \ + --hash=sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12 \ + --hash=sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7 # via pytest proto-plus==1.22.3 \ --hash=sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df \ --hash=sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b # via -r requirements.in -protobuf==4.24.1 \ - --hash=sha256:06437f0d4bb0d5f29e3d392aba69600188d4be5ad1e0a3370e581a9bf75a3081 \ - --hash=sha256:0b2b224e9541fe9f046dd7317d05f08769c332b7e4c54d93c7f0f372dedb0b1a \ - --hash=sha256:302e8752c760549ed4c7a508abc86b25d46553c81989343782809e1a062a2ef9 \ - --hash=sha256:44837a5ed9c9418ad5d502f89f28ba102e9cd172b6668bc813f21716f9273348 \ - --hash=sha256:55dd644adc27d2a624339332755fe077c7f26971045b469ebb9732a69ce1f2ca \ - --hash=sha256:5906c5e79ff50fe38b2d49d37db5874e3c8010826f2362f79996d83128a8ed9b \ - --hash=sha256:5d32363d14aca6e5c9e9d5918ad8fb65b091b6df66740ae9de50ac3916055e43 \ - --hash=sha256:970c701ee16788d74f3de20938520d7a0aebc7e4fff37096a48804c80d2908cf \ - --hash=sha256:bd39b9094a4cc003a1f911b847ab379f89059f478c0b611ba1215053e295132e \ - --hash=sha256:d414199ca605eeb498adc4d2ba82aedc0379dca4a7c364ff9bc9a179aa28e71b \ - --hash=sha256:d4af4fd9e9418e819be30f8df2a16e72fbad546a7576ac7f3653be92a6966d30 \ - --hash=sha256:df015c47d6855b8efa0b9be706c70bf7f050a4d5ac6d37fb043fbd95157a0e25 \ - --hash=sha256:fc361148e902949dcb953bbcb148c99fe8f8854291ad01107e4120361849fd0e +protobuf==4.24.2 \ + --hash=sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880 \ + --hash=sha256:25ae91d21e3ce8d874211110c2f7edd6384816fb44e06b2867afe35139e1fd1c \ + --hash=sha256:2b23bd6e06445699b12f525f3e92a916f2dcf45ffba441026357dea7fa46f42b \ + --hash=sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e \ + --hash=sha256:4e69965e7e54de4db989289a9b971a099e626f6167a9351e9d112221fc691bc1 \ + --hash=sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924 \ + --hash=sha256:6bd26c1fa9038b26c5c044ee77e0ecb18463e957fefbaeb81a3feb419313a54e \ + --hash=sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3 \ + --hash=sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e \ + --hash=sha256:839952e759fc40b5d46be319a265cf94920174d88de31657d5622b5d8d6be5cd \ + --hash=sha256:bb7aa97c252279da65584af0456f802bd4b2de429eb945bbc9b3d61a42a8cd16 \ + --hash=sha256:c00c3c7eb9ad3833806e21e86dca448f46035242a680f81c3fe068ff65e79c74 \ + --hash=sha256:c5cdd486af081bf752225b26809d2d0a85e575b80a84cde5172a05bbb1990099 # via # -r requirements.in # google-api-core From b4559568843d32d2e7d7ca6e32964cd2069534b8 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Sat, 2 Sep 2023 03:34:44 -0700 Subject: [PATCH 1045/1339] fix: Resolve some Showcase test errors (#1353) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/tests/system/test_retry.py | 3 ++- packages/gapic-generator/tests/system/test_unary.py | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 0d2f94109034..d649cbabd3dd 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -20,7 +20,8 @@ def test_retry_bubble(echo): - with pytest.raises(exceptions.GatewayTimeout): + # Note: DeadlineExceeded is from gRPC, GatewayTimeout from http + with pytest.raises((exceptions.DeadlineExceeded, exceptions.GatewayTimeout)): echo.echo({ 'error': { 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), diff --git a/packages/gapic-generator/tests/system/test_unary.py b/packages/gapic-generator/tests/system/test_unary.py index bc72f352af9d..076771ed3035 100644 --- a/packages/gapic-generator/tests/system/test_unary.py +++ b/packages/gapic-generator/tests/system/test_unary.py @@ -37,8 +37,9 @@ def test_unary_with_dict(echo): def test_unary_error(echo): message = 'Bad things! Bad things!' - # Note: InvalidArgument is from gRPC, InternalServerError from http - with pytest.raises(exceptions.BadRequest) as exc: + # Note: InvalidArgument is from gRPC, BadRequest from http (no MTLS), InternalServerError from http (MTLS) + # TODO: Reduce number of different exception types here. + with pytest.raises((exceptions.InvalidArgument, exceptions.BadRequest, exceptions.InternalServerError)) as exc: echo.echo({ 'error': { 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), From b1e1fe9f1acad74d79d02dfcd99b66b8b96838d3 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 6 Sep 2023 12:58:22 -0400 Subject: [PATCH 1046/1339] fix: fix docs build for numbered lists (#1740) Co-authored-by: Victor Chudnovsky --- packages/gapic-generator/gapic/utils/lines.py | 61 ++++++++++++++++--- .../google/cloud/eventarc_v1/types/channel.py | 6 +- .../tests/unit/utils/test_lines.py | 35 +++++++++++ 3 files changed, 92 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 358759b558f0..fb24e19351ce 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -17,6 +17,9 @@ from typing import Iterable, Optional +NUMBERED_LIST_REGEX = r"^\d+\. " + + def sort_lines(text: str, dedupe: bool = True) -> str: """Sort the individual lines of a block of text. @@ -40,6 +43,49 @@ def sort_lines(text: str, dedupe: bool = True) -> str: return f'{leading}{answer}{trailing}' +def get_subsequent_line_indentation_level(list_item: str) -> int: + """ + Given a list item return the indentation level for subsequent lines. + For example, if it is a numbered list, the indentation level should be 3 + as shown below. + + Here subsequent lines should be indented by 2 + + - The quick brown fox jumps over the lazy dog. The quick brown fox jumps + over the lazy dog + + Here subsequent lines should be indented by 2 + + + The quick brown fox jumps over the lazy dog. The quick brown fox jumps + over the lazy dog + + Here subsequent lines should be indented by 4 to cater for double digits + + 1. The quick brown fox jumps over the lazy dog. The quick brown fox jumps + over the lazy dog + + 22. The quick brown fox jumps over the lazy dog. The quick brown fox jumps + over the lazy dog + """ + if len(list_item) >= 2 and list_item[0:2] in ['- ', '+ ']: + indentation_level = 2 + elif len(list_item) >= 4 and re.match(NUMBERED_LIST_REGEX, list_item): + indentation_level = 4 + else: + # Don't use any intentation level if the list item marker is not known + indentation_level = 0 + return indentation_level + + +def is_list_item(list_item: str) -> bool: + """ + Given a string return a boolean indicating whether a list is identified. + """ + if len(list_item) < 3: + return False + return list_item.startswith('- ') or list_item.startswith('+ ') or bool(re.match(NUMBERED_LIST_REGEX, list_item)) + + def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0) -> str: """Wrap the given string to the given width. @@ -93,11 +139,12 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 break_on_hyphens=False, ) # Strip the first \n from the text so it is not misidentified as an - # intentionally short line below, except when the text contains `:` - # as the new line is required for lists. + # intentionally short line below, except when the text contains a list, + # as the new line is required for lists. Look for a list item marker in + # the remaining text which indicates that a list is present. if '\n' in text: - initial_text = text.split('\n')[0] - if ":" not in initial_text: + remaining_text = "".join(text.split('\n')[1:]) + if not is_list_item(remaining_text.strip()): text = text.replace('\n', ' ', 1) # Save the new `first` line. @@ -121,9 +168,9 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 tokens = [] token = '' for line in text.split('\n'): - # Ensure that lines that start with a hyphen are always on a new line + # Ensure that lines that start with a list item marker are always on a new line # Ensure that blank lines are preserved - if (line.strip().startswith('-') or not len(line)) and token: + if (is_list_item(line.strip()) or not len(line)) and token: tokens.append(token) token = '' token += line + '\n' @@ -145,7 +192,7 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 initial_indent=' ' * indent, # ensure that subsequent lines for lists are indented 2 spaces subsequent_indent=' ' * indent + \ - (' ' if token.strip().startswith('-') else ''), + ' ' * get_subsequent_line_indentation_level(token.strip()), text=token, width=width, break_on_hyphens=False, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py index 10b33b682336..e864f6554108 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py @@ -102,9 +102,9 @@ class State(proto.Enum): possible cases this state can happen: 1. The SaaS provider disconnected from this - Channel. 2. The Channel activation token has - expired but the SaaS provider wasn't - connected. + Channel. + 2. The Channel activation token has expired but + the SaaS provider wasn't connected. To re-establish a Connection with a provider, the subscriber should create a new Channel and diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 7a0638b71add..9642b0f01c5e 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -246,3 +246,38 @@ def test_list_with_multiple_paragraphs(): erat. In nec est nisl. Quisque ut orci efficitur, vestibulum ante non, vestibulum erat. Donec mollis ultricies nisl.""" assert lines.wrap(input, width=60) == expected + + +def test_list_with_numbered_list(): + input = """Config for video classification human labeling task. +Currently two types of video classification are supported: +1. Assign labels on the entire video. Assign labels on the entire video. +22. Split the video into multiple video clips based on camera shot, and +assign labels on each video clip.""" + expected = """Config for video classification human labeling task. +Currently two types of video classification are supported: + +1. Assign labels on the entire video. Assign labels on the + entire video. +22. Split the video into multiple video clips based on + camera shot, and assign labels on each video clip.""" + assert lines.wrap(input, width=60) == expected + + +def test_list_with_plus_list_item_marker(): + input = """User-assigned name of the trigger. Must be unique within the project. +Trigger names must meet the following requirements: ++ They must contain only alphanumeric characters and dashes. ++ They can be 1-64 characters long. ++ They must begin and end with an alphanumeric character.""" + expected = """User-assigned name of the trigger. Must +be unique within the project. Trigger +names must meet the following +requirements: + ++ They must contain only alphanumeric + characters and dashes. ++ They can be 1-64 characters long. ++ They must begin and end with an + alphanumeric character.""" + assert lines.wrap(input, width=40) == expected From 1b50ca31fde19938c6985fc7ddfb31f419807491 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 6 Sep 2023 19:17:47 +0200 Subject: [PATCH 1047/1339] chore(deps): update all dependencies (#1753) Co-authored-by: Anthonios Partheniou --- .../.github/workflows/tests.yaml | 26 +++++++++---------- packages/gapic-generator/requirements.txt | 6 ++--- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 8a7211a685a6..1d464c51c997 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -22,7 +22,7 @@ jobs: # Don't upgrade python version; there's a bug in 3.10 sphinx runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: @@ -35,7 +35,7 @@ jobs: mypy: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -51,7 +51,7 @@ jobs: target: [showcase, showcase_alternative_templates] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -90,7 +90,7 @@ jobs: max-parallel: 1 runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup temp directory run: | sudo mkdir -p /tmp/workspace/tests/cert/ @@ -135,7 +135,7 @@ jobs: variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v4 with: @@ -160,7 +160,7 @@ jobs: showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -188,7 +188,7 @@ jobs: matrix: variant: ['', _alternative_templates] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -213,7 +213,7 @@ jobs: snippetgen: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: @@ -233,7 +233,7 @@ jobs: python: ["3.7", "3.8", "3.9", "3.10", "3.11"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v4 with: @@ -255,7 +255,7 @@ jobs: variant: ['', _alternative_templates] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v4 with: @@ -274,7 +274,7 @@ jobs: runs-on: ubuntu-latest container: gcr.io/gapic-images/googleapis steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Cache Bazel files id: cache-bazel uses: actions/cache@v3 @@ -302,7 +302,7 @@ jobs: goldens-lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3.11 uses: actions/setup-python@v4 with: @@ -321,7 +321,7 @@ jobs: style-check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python "3.11" uses: actions/setup-python@v4 with: diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e60c66f23f27..933f5c216241 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -317,9 +317,9 @@ pypandoc==1.11 \ --hash=sha256:7f6d68db0e57e0f6961bec2190897118c4d305fc2d31c22cd16037f22ee084a5 \ --hash=sha256:b260596934e9cfc6513056110a7c8600171d414f90558bf4407e68b209be8007 # via -r requirements.in -pytest==7.4.0 \ - --hash=sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32 \ - --hash=sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a +pytest==7.4.1 \ + --hash=sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab \ + --hash=sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f # via pytest-asyncio pytest-asyncio==0.21.1 \ --hash=sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d \ From 225dd95e18184e8ddfe820e9e8a8b2a885b75a1a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 17:22:49 +0000 Subject: [PATCH 1048/1339] chore(main): release 1.11.5 (#1718) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 11 +++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8a746b6f0d91..7abf51515ffe 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,17 @@ # Changelog +## [1.11.5](https://github.com/googleapis/gapic-generator-python/compare/v1.11.4...v1.11.5) (2023-09-06) + + +### Bug Fixes + +* Fix docs build for generated clients ([#1715](https://github.com/googleapis/gapic-generator-python/issues/1715)) ([e4db994](https://github.com/googleapis/gapic-generator-python/commit/e4db9941078fe417e0d7b30bcd937e6c4dc0e6ba)) +* Fix docs build for numbered lists ([#1740](https://github.com/googleapis/gapic-generator-python/issues/1740)) ([19cc5b3](https://github.com/googleapis/gapic-generator-python/commit/19cc5b36348c1406d2c84fc65e44dbe45a2bdd1c)) +* Preserve new lines ([#1721](https://github.com/googleapis/gapic-generator-python/issues/1721)) ([baa136f](https://github.com/googleapis/gapic-generator-python/commit/baa136fd4fa94cfb6638c3074f10033dcc4f9da1)) +* Remove duplicate import statement for `google.longrunning.operations_pb2` ([#1726](https://github.com/googleapis/gapic-generator-python/issues/1726)) ([e3f08cd](https://github.com/googleapis/gapic-generator-python/commit/e3f08cd48bdf93e668be1b4b117190383ce2c022)) +* Resolve some Showcase test errors ([#1353](https://github.com/googleapis/gapic-generator-python/issues/1353)) ([4eee261](https://github.com/googleapis/gapic-generator-python/commit/4eee26181e8db9fb5144eef5a76f178c1594e48a)) + ## [1.11.4](https://github.com/googleapis/gapic-generator-python/compare/v1.11.3...v1.11.4) (2023-07-11) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 3840434c3997..3d5d85e96150 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.4" +version = "1.11.5" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "click >= 6.7", From d375dfb8de234d7a8f6a3f010b32925ff776b9bd Mon Sep 17 00:00:00 2001 From: Travis Cline Date: Thu, 7 Sep 2023 12:46:38 -0700 Subject: [PATCH 1049/1339] fix: Change to Set vs FrozenSet and thread the same set through (#1125) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/gapic/schema/api.py | 11 ++- .../gapic-generator/gapic/schema/metadata.py | 8 +-- .../gapic-generator/gapic/schema/wrappers.py | 69 +++++++++++++------ 3 files changed, 61 insertions(+), 27 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index c20d33a750b8..084349d4885f 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -746,6 +746,7 @@ def proto(self) -> Proto: if not self.file_to_generate: return naive + visited_messages: Set[wrappers.MessageType] = set() # Return a context-aware proto object. return dataclasses.replace( naive, @@ -754,13 +755,19 @@ def proto(self) -> Proto: for k, v in naive.all_enums.items() ), all_messages=collections.OrderedDict( - (k, v.with_context(collisions=naive.names)) + (k, v.with_context( + collisions=naive.names, + visited_messages=visited_messages, + )) for k, v in naive.all_messages.items() ), services=collections.OrderedDict( # Note: services bind to themselves because services get their # own output files. - (k, v.with_context(collisions=v.names)) + (k, v.with_context( + collisions=v.names, + visited_messages=visited_messages, + )) for k, v in naive.services.items() ), meta=naive.meta.with_context(collisions=naive.names), diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 3dbc7389663a..2cbba5a1592e 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -28,7 +28,7 @@ import dataclasses import re -from typing import FrozenSet, Tuple, Optional +from typing import FrozenSet, Set, Tuple, Optional from google.protobuf import descriptor_pb2 @@ -54,7 +54,7 @@ class Address(BaseAddress): api_naming: naming.Naming = dataclasses.field( default_factory=naming.NewNaming, ) - collisions: FrozenSet[str] = dataclasses.field(default_factory=frozenset) + collisions: Set[str] = dataclasses.field(default_factory=set) def __eq__(self, other) -> bool: # We don't want to use api_naming or collisions to determine equality, @@ -351,7 +351,7 @@ def resolve(self, selector: str) -> str: return f'{".".join(self.package)}.{selector}' return selector - def with_context(self, *, collisions: FrozenSet[str]) -> 'Address': + def with_context(self, *, collisions: Set[str]) -> 'Address': """Return a derivative of this address with the provided context. This method is used to address naming collisions. The returned @@ -390,7 +390,7 @@ def doc(self): return '\n\n'.join(self.documentation.leading_detached_comments) return '' - def with_context(self, *, collisions: FrozenSet[str]) -> 'Metadata': + def with_context(self, *, collisions: Set[str]) -> 'Metadata': """Return a derivative of this metadata with the provided context. This method is used to address naming collisions. The returned diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 7f6a0c0ce05d..3b49db1ff127 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -355,8 +355,8 @@ def type(self) -> Union['MessageType', 'EnumType', 'PrimitiveType']: def with_context( self, *, - collisions: FrozenSet[str], - visited_messages: FrozenSet["MessageType"], + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, ) -> 'Field': """Return a derivative of this field with the provided context. @@ -368,7 +368,7 @@ def with_context( self, message=self.message.with_context( collisions=collisions, - skip_fields=self.message in visited_messages, + skip_fields=self.message in visited_messages if visited_messages else False, visited_messages=visited_messages, ) if self.message else None, enum=self.enum.with_context(collisions=collisions) @@ -631,7 +631,7 @@ def path_regex_str(self) -> str: return parsing_regex_str def get_field(self, *field_path: str, - collisions: FrozenSet[str] = frozenset()) -> Field: + collisions: Optional[Set[str]] = None) -> Field: """Return a field arbitrarily deep in this message's structure. This method recursively traverses the message tree to return the @@ -672,7 +672,7 @@ def get_field(self, *field_path: str, if len(field_path) == 1: return cursor.with_context( collisions=collisions, - visited_messages=frozenset({self}), + visited_messages=set({self}), ) # Quick check: If cursor is a repeated field, then raise an exception. @@ -698,9 +698,9 @@ def get_field(self, *field_path: str, return cursor.message.get_field(*field_path[1:], collisions=collisions) def with_context(self, *, - collisions: FrozenSet[str], + collisions: Set[str], skip_fields: bool = False, - visited_messages: FrozenSet["MessageType"] = frozenset(), + visited_messages: Optional[Set["MessageType"]] = None, ) -> 'MessageType': """Return a derivative of this message with the provided context. @@ -712,7 +712,8 @@ def with_context(self, *, underlying fields. This provides for an "exit" in the case of circular references. """ - visited_messages = visited_messages | {self} + visited_messages = visited_messages or set() + visited_messages.add(self) return dataclasses.replace( self, fields={ @@ -777,7 +778,7 @@ def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" return self.meta.address - def with_context(self, *, collisions: FrozenSet[str]) -> 'EnumType': + def with_context(self, *, collisions: Set[str]) -> 'EnumType': """Return a derivative of this enum with the provided context. This method is used to address naming collisions. The returned @@ -871,7 +872,10 @@ class ExtendedOperationInfo: request_type: MessageType operation_type: MessageType - def with_context(self, *, collisions: FrozenSet[str]) -> 'ExtendedOperationInfo': + def with_context(self, *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> 'ExtendedOperationInfo': """Return a derivative of this OperationInfo with the provided context. This method is used to address naming collisions. The returned @@ -881,10 +885,12 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'ExtendedOperationInfo' return self if not collisions else dataclasses.replace( self, request_type=self.request_type.with_context( - collisions=collisions + collisions=collisions, + visited_messages=visited_messages, ), operation_type=self.operation_type.with_context( collisions=collisions, + visited_messages=visited_messages, ), ) @@ -895,7 +901,10 @@ class OperationInfo: response_type: MessageType metadata_type: MessageType - def with_context(self, *, collisions: FrozenSet[str]) -> 'OperationInfo': + def with_context(self, *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> 'OperationInfo': """Return a derivative of this OperationInfo with the provided context. This method is used to address naming collisions. The returned @@ -905,10 +914,12 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'OperationInfo': return dataclasses.replace( self, response_type=self.response_type.with_context( - collisions=collisions + collisions=collisions, + visited_messages=visited_messages, ), metadata_type=self.metadata_type.with_context( - collisions=collisions + collisions=collisions, + visited_messages=visited_messages, ), ) @@ -1533,7 +1544,10 @@ def void(self) -> bool: """Return True if this method has no return value, False otherwise.""" return self.output.ident.proto == 'google.protobuf.Empty' - def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': + def with_context(self, *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> 'Method': """Return a derivative of this method with the provided context. This method is used to address naming collisions. The returned @@ -1543,12 +1557,14 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': maybe_lro = None if self.lro: maybe_lro = self.lro.with_context( - collisions=collisions + collisions=collisions, + visited_messages=visited_messages, ) if collisions else self.lro maybe_extended_lro = ( self.extended_lro.with_context( - collisions=collisions + collisions=collisions, + visited_messages=visited_messages, ) if self.extended_lro else None ) @@ -1556,8 +1572,14 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Method': self, lro=maybe_lro, extended_lro=maybe_extended_lro, - input=self.input.with_context(collisions=collisions), - output=self.output.with_context(collisions=collisions), + input=self.input.with_context( + collisions=collisions, + visited_messages=visited_messages, + ), + output=self.output.with_context( + collisions=collisions, + visited_messages=visited_messages, + ), meta=self.meta.with_context(collisions=collisions), ) @@ -1842,7 +1864,10 @@ def operation_polling_method(self) -> Optional[Method]: None ) - def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': + def with_context(self, *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> 'Service': """Return a derivative of this service with the provided context. This method is used to address naming collisions. The returned @@ -1855,7 +1880,9 @@ def with_context(self, *, collisions: FrozenSet[str]) -> 'Service': k: v.with_context( # A method's flattened fields create additional names # that may conflict with module imports. - collisions=collisions | frozenset(v.flattened_fields.keys())) + collisions=collisions | set(v.flattened_fields.keys()), + visited_messages=visited_messages, + ) for k, v in self.methods.items() }, meta=self.meta.with_context(collisions=collisions), From 00fdca0db6a9cf059e11f27750f4d0e1a6a9bb6d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 3 Oct 2023 00:52:15 +0200 Subject: [PATCH 1050/1339] chore(deps): update all dependencies (#1760) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---|---|---| | boringssl | http_archive | digest | `bb41bc0` -> `bd20800` | [![age](https://developer.mend.io/api/mc/badges/age/github-tags/boringssl/?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/github-tags/boringssl/?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/github-tags/boringssl//?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/github-tags/boringssl//?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [charset-normalizer](https://togithub.com/Ousret/charset_normalizer) | | minor | `==3.2.0` -> `==3.3.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/charset-normalizer/3.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/charset-normalizer/3.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/charset-normalizer/3.2.0/3.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/charset-normalizer/3.2.0/3.3.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [google-api-core](https://togithub.com/googleapis/python-api-core) | | minor | `==2.11.1` -> `==2.12.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/google-api-core/2.12.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/google-api-core/2.12.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/google-api-core/2.11.1/2.12.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/google-api-core/2.11.1/2.12.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [google-auth](https://togithub.com/googleapis/google-auth-library-python) | | minor | `==2.22.0` -> `==2.23.2` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/google-auth/2.23.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/google-auth/2.23.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/google-auth/2.22.0/2.23.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/google-auth/2.22.0/2.23.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [grpcio](https://grpc.io) ([source](https://togithub.com/grpc/grpc)) | | minor | `==1.57.0` -> `==1.59.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/grpcio/1.59.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/grpcio/1.59.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/grpcio/1.57.0/1.59.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/grpcio/1.57.0/1.59.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [packaging](https://togithub.com/pypa/packaging) | | minor | `==23.1` -> `==23.2` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/packaging/23.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/packaging/23.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/packaging/23.1/23.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/packaging/23.1/23.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [protobuf](https://developers.google.com/protocol-buffers/) | | patch | `==4.24.2` -> `==4.24.3` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/protobuf/4.24.3?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/protobuf/4.24.3?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/protobuf/4.24.2/4.24.3?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/protobuf/4.24.2/4.24.3?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | | patch | `==7.4.1` -> `==7.4.2` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/pytest/7.4.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/pytest/7.4.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/pytest/7.4.1/7.4.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/pytest/7.4.1/7.4.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [setuptools](https://togithub.com/pypa/setuptools) ([changelog](https://setuptools.pypa.io/en/stable/history.html)) | | minor | `==68.1.2` -> `==68.2.2` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/setuptools/68.2.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/setuptools/68.2.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/setuptools/68.1.2/68.2.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/setuptools/68.1.2/68.2.2?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [typing-extensions](https://togithub.com/python/typing_extensions) ([changelog](https://togithub.com/python/typing_extensions/blob/main/CHANGELOG.md)) | | minor | `==4.7.1` -> `==4.8.0` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/typing-extensions/4.8.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/typing-extensions/4.8.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/typing-extensions/4.7.1/4.8.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/typing-extensions/4.7.1/4.8.0?slim=true)](https://docs.renovatebot.com/merge-confidence/) | | [urllib3](https://togithub.com/urllib3/urllib3) ([changelog](https://togithub.com/urllib3/urllib3/blob/main/CHANGES.rst)) | | patch | `==2.0.4` -> `==2.0.5` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/urllib3/2.0.5?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/urllib3/2.0.5?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/urllib3/2.0.4/2.0.5?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/urllib3/2.0.4/2.0.5?slim=true)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes

Ousret/charset_normalizer (charset-normalizer) ### [`v3.3.0`](https://togithub.com/Ousret/charset_normalizer/blob/HEAD/CHANGELOG.md#330-2023-09-30) [Compare Source](https://togithub.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) ##### Added - Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer` - Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias ([#​323](https://togithub.com/Ousret/charset_normalizer/issues/323)) ##### Removed - (internal) Redundant utils.is_ascii function and unused function is_private_use_only - (internal) charset_normalizer.assets is moved inside charset_normalizer.constant ##### Changed - (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection - Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.7 ##### Fixed - Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \__lt\_\_ ([#​350](https://togithub.com/Ousret/charset_normalizer/issues/350))
googleapis/python-api-core (google-api-core) ### [`v2.12.0`](https://togithub.com/googleapis/python-api-core/blob/HEAD/CHANGELOG.md#2120-2023-09-07) [Compare Source](https://togithub.com/googleapis/python-api-core/compare/v2.11.1...v2.12.0) ##### Features - Add a little bit of typing to google.api_core.retry ([#​453](https://togithub.com/googleapis/python-api-core/issues/453)) ([2477ab9](https://togithub.com/googleapis/python-api-core/commit/2477ab9ea5c2e863a493fb7ebebaa429a44ea096)) - Add grpc Compression argument to channels and methods ([#​451](https://togithub.com/googleapis/python-api-core/issues/451)) ([bdebd63](https://togithub.com/googleapis/python-api-core/commit/bdebd6331f9c0d3d1a8ceaf274f07d2ed75bfe92)) ##### Documentation - Fix a typo in google/api_core/page_iterator.py ([#​511](https://togithub.com/googleapis/python-api-core/issues/511)) ([c0ce73c](https://togithub.com/googleapis/python-api-core/commit/c0ce73c4de53ad694fe36d17408998aa1230398f))
googleapis/google-auth-library-python (google-auth) ### [`v2.23.2`](https://togithub.com/googleapis/google-auth-library-python/blob/HEAD/CHANGELOG.md#2232-2023-09-28) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v2.23.1...v2.23.2) ##### Bug Fixes - Support urllib3<2.0 versions ([#​1390](https://togithub.com/googleapis/google-auth-library-python/issues/1390)) ([07c464a](https://togithub.com/googleapis/google-auth-library-python/commit/07c464a75fd873f23ca78016e7754697f4511f59)) ### [`v2.23.1`](https://togithub.com/googleapis/google-auth-library-python/blob/HEAD/CHANGELOG.md#2231-2023-09-26) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v2.23.0...v2.23.1) ##### Bug Fixes - Less restrictive content-type header check for google authentication (ignores charset) ([#​1382](https://togithub.com/googleapis/google-auth-library-python/issues/1382)) ([7039beb](https://togithub.com/googleapis/google-auth-library-python/commit/7039beb63b8644be748cfc2fc79a2b8b643cda9f)) - Trust boundary meta header renaming and using the schema from backend team. ([#​1384](https://togithub.com/googleapis/google-auth-library-python/issues/1384)) ([2503d4a](https://togithub.com/googleapis/google-auth-library-python/commit/2503d4a50995d4f2756846a17b33997273ace5f1)) - Update urllib3 to >= 2.0.5 ([#​1389](https://togithub.com/googleapis/google-auth-library-python/issues/1389)) ([a99f3bb](https://togithub.com/googleapis/google-auth-library-python/commit/a99f3bbf97c07a87203b7471817cfb2a1662293d)) ### [`v2.23.0`](https://togithub.com/googleapis/google-auth-library-python/blob/HEAD/CHANGELOG.md#2230-2023-09-11) [Compare Source](https://togithub.com/googleapis/google-auth-library-python/compare/v2.22.0...v2.23.0) ##### Features - Add get_bq_config_path() to \_cloud_sdk.py ([9f52f66](https://togithub.com/googleapis/google-auth-library-python/commit/9f52f665247ada59278ffddaaef3ada9e419154c)) - Add get_bq_config_path() to \_cloud_sdk.py ([#​1358](https://togithub.com/googleapis/google-auth-library-python/issues/1358)) ([9f52f66](https://togithub.com/googleapis/google-auth-library-python/commit/9f52f665247ada59278ffddaaef3ada9e419154c)) ##### Bug Fixes - Expose universe domain in credentials ([#​1380](https://togithub.com/googleapis/google-auth-library-python/issues/1380)) ([8b8fce6](https://togithub.com/googleapis/google-auth-library-python/commit/8b8fce6a1e1ca6e0199cb5f15a90af477bf1c853)) - Make external_account resistant to string type 'expires_in' responses from non-compliant services ([#​1379](https://togithub.com/googleapis/google-auth-library-python/issues/1379)) ([01d3770](https://togithub.com/googleapis/google-auth-library-python/commit/01d37706d6750c20952cf01b6a616b23aafa5be9)) - Missing ssj for impersonate cred ([#​1377](https://togithub.com/googleapis/google-auth-library-python/issues/1377)) ([7d453dc](https://togithub.com/googleapis/google-auth-library-python/commit/7d453dc6408b908e24312a7bd3dc380ad43220be)) - Skip checking projectid on cred if env var is set ([#​1349](https://togithub.com/googleapis/google-auth-library-python/issues/1349)) ([a4135a3](https://togithub.com/googleapis/google-auth-library-python/commit/a4135a3e9620a2cbf99957858c13780b92ff707c))
grpc/grpc (grpcio) ### [`v1.59.0`](https://togithub.com/grpc/grpc/releases/tag/v1.59.0) [Compare Source](https://togithub.com/grpc/grpc/compare/v1.58.0...v1.59.0) This is release 1.59.0 ([generative](https://togithub.com/grpc/grpc/blob/master/doc/g_stands_for.md)) of gRPC Core. For gRPC documentation, see [grpc.io](https://grpc.io/). For previous releases, see [Releases](https://togithub.com/grpc/grpc/releases). This release contains refinements, improvements, and bug fixes, with highlights listed below. ## Core - \[Release] Bump core version [`2023092`](https://togithub.com/grpc/grpc/commit/202309251628). ([#​34476](https://togithub.com/grpc/grpc/pull/34476)) - \[xds ssa] Remove environment variable protection for stateful affinity. ([#​34435](https://togithub.com/grpc/grpc/pull/34435)) - \[windows DNS] Simplify c-ares Windows code. ([#​33965](https://togithub.com/grpc/grpc/pull/33965)) - \[c-ares] fix spin loop bug when c-ares gives up on a socket that still has data left in its read buffer. ([#​34185](https://togithub.com/grpc/grpc/pull/34185)) - \[Deps] Adding upb as a submodule. ([#​34199](https://togithub.com/grpc/grpc/pull/34199)) - \[EventEngine] Update Cancel contract on closure deletion timeline. ([#​34167](https://togithub.com/grpc/grpc/pull/34167)) ## C\# - \[csharp codegen] Handle empty base_namespace option value to fix [#​34113](https://togithub.com/grpc/grpc/issues/34113). ([#​34137](https://togithub.com/grpc/grpc/pull/34137)) ## Python - \[Python 3.12] Support Python 3.12. ([#​34398](https://togithub.com/grpc/grpc/pull/34398)) - \[Python 3.12] Deprecate distutil. ([#​34186](https://togithub.com/grpc/grpc/pull/34186)) ## Ruby - \[ruby] Fix linking errors on x86-darwin. ([#​34134](https://togithub.com/grpc/grpc/pull/34134)) - \[Ruby] replace strdup with gpr_strdup. ([#​34177](https://togithub.com/grpc/grpc/pull/34177)) - \[ruby] drop ruby 2.6 support. ([#​34198](https://togithub.com/grpc/grpc/pull/34198)) ### [`v1.58.0`](https://togithub.com/grpc/grpc/compare/v1.57.0...v1.58.0) [Compare Source](https://togithub.com/grpc/grpc/compare/v1.57.0...v1.58.0)
pypa/packaging (packaging) ### [`v23.2`](https://togithub.com/pypa/packaging/releases/tag/23.2) [Compare Source](https://togithub.com/pypa/packaging/compare/23.1...23.2) #### What's Changed - parse_marker should consume the entire source string by [@​mwerschy](https://togithub.com/mwerschy) in [https://togithub.com/pypa/packaging/pull/687](https://togithub.com/pypa/packaging/pull/687) - Create a Security Policy file by [@​joycebrum](https://togithub.com/joycebrum) in [https://togithub.com/pypa/packaging/pull/695](https://togithub.com/pypa/packaging/pull/695) - Add python 3.12 to CI by [@​mayeut](https://togithub.com/mayeut) in [https://togithub.com/pypa/packaging/pull/689](https://togithub.com/pypa/packaging/pull/689) - Remove URL validation from requirement parsing by [@​uranusjr](https://togithub.com/uranusjr) in [https://togithub.com/pypa/packaging/pull/684](https://togithub.com/pypa/packaging/pull/684) - Add types for packaging.version.\_Version by [@​hauntsaninja](https://togithub.com/hauntsaninja) in [https://togithub.com/pypa/packaging/pull/665](https://togithub.com/pypa/packaging/pull/665) - Add PyPy 3.10 to CI by [@​mayeut](https://togithub.com/mayeut) in [https://togithub.com/pypa/packaging/pull/699](https://togithub.com/pypa/packaging/pull/699) - Remove unused argument in `_manylinux._is_compatible` by [@​mayeut](https://togithub.com/mayeut) in [https://togithub.com/pypa/packaging/pull/700](https://togithub.com/pypa/packaging/pull/700) - Canonicalize names for requirements comparison by [@​astrojuanlu](https://togithub.com/astrojuanlu) in [https://togithub.com/pypa/packaging/pull/696](https://togithub.com/pypa/packaging/pull/696) - Add platform tag support for LoongArch by [@​loongson-zn](https://togithub.com/loongson-zn) in [https://togithub.com/pypa/packaging/pull/693](https://togithub.com/pypa/packaging/pull/693) - Ability to install `armv7l manylinux/musllinux` wheels on `armv8l` by [@​mayeut](https://togithub.com/mayeut) in [https://togithub.com/pypa/packaging/pull/690](https://togithub.com/pypa/packaging/pull/690) - Include CHANGELOG.rst in sdist by [@​astrojuanlu](https://togithub.com/astrojuanlu) in [https://togithub.com/pypa/packaging/pull/704](https://togithub.com/pypa/packaging/pull/704) - Update pyupgrade to Python 3.7+ by [@​fangchenli](https://togithub.com/fangchenli) in [https://togithub.com/pypa/packaging/pull/580](https://togithub.com/pypa/packaging/pull/580) - Fix version pattern pre-releases by [@​deathaxe](https://togithub.com/deathaxe) in [https://togithub.com/pypa/packaging/pull/705](https://togithub.com/pypa/packaging/pull/705) - Fix typos found by codespell by [@​DimitriPapadopoulos](https://togithub.com/DimitriPapadopoulos) in [https://togithub.com/pypa/packaging/pull/706](https://togithub.com/pypa/packaging/pull/706) - Support enriched metadata by [@​brettcannon](https://togithub.com/brettcannon) in [https://togithub.com/pypa/packaging/pull/686](https://togithub.com/pypa/packaging/pull/686) - Correct rST syntax in CHANGELOG.rst by [@​atugushev](https://togithub.com/atugushev) in [https://togithub.com/pypa/packaging/pull/709](https://togithub.com/pypa/packaging/pull/709) - fix: platform tag for GraalPy by [@​mayeut](https://togithub.com/mayeut) in [https://togithub.com/pypa/packaging/pull/711](https://togithub.com/pypa/packaging/pull/711) - Document that this library uses a calendar-based versioning scheme by [@​faph](https://togithub.com/faph) in [https://togithub.com/pypa/packaging/pull/717](https://togithub.com/pypa/packaging/pull/717) - fix: Update copyright date for docs by [@​garrypolley](https://togithub.com/garrypolley) in [https://togithub.com/pypa/packaging/pull/713](https://togithub.com/pypa/packaging/pull/713) - Bump pip version to avoid known vulnerabilities by [@​joycebrum](https://togithub.com/joycebrum) in [https://togithub.com/pypa/packaging/pull/720](https://togithub.com/pypa/packaging/pull/720) - Typing annotations fixed in version.py by [@​jolaf](https://togithub.com/jolaf) in [https://togithub.com/pypa/packaging/pull/723](https://togithub.com/pypa/packaging/pull/723) - parse\_{sdist,wheel}\_filename: don't raise InvalidVersion by [@​SpecLad](https://togithub.com/SpecLad) in [https://togithub.com/pypa/packaging/pull/721](https://togithub.com/pypa/packaging/pull/721) - Fix code blocks in CHANGELOG.md by [@​edmorley](https://togithub.com/edmorley) in [https://togithub.com/pypa/packaging/pull/724](https://togithub.com/pypa/packaging/pull/724) #### New Contributors - [@​mwerschy](https://togithub.com/mwerschy) made their first contribution in [https://togithub.com/pypa/packaging/pull/687](https://togithub.com/pypa/packaging/pull/687) - [@​joycebrum](https://togithub.com/joycebrum) made their first contribution in [https://togithub.com/pypa/packaging/pull/695](https://togithub.com/pypa/packaging/pull/695) - [@​astrojuanlu](https://togithub.com/astrojuanlu) made their first contribution in [https://togithub.com/pypa/packaging/pull/696](https://togithub.com/pypa/packaging/pull/696) - [@​loongson-zn](https://togithub.com/loongson-zn) made their first contribution in [https://togithub.com/pypa/packaging/pull/693](https://togithub.com/pypa/packaging/pull/693) - [@​fangchenli](https://togithub.com/fangchenli) made their first contribution in [https://togithub.com/pypa/packaging/pull/580](https://togithub.com/pypa/packaging/pull/580) - [@​deathaxe](https://togithub.com/deathaxe) made their first contribution in [https://togithub.com/pypa/packaging/pull/705](https://togithub.com/pypa/packaging/pull/705) - [@​DimitriPapadopoulos](https://togithub.com/DimitriPapadopoulos) made their first contribution in [https://togithub.com/pypa/packaging/pull/706](https://togithub.com/pypa/packaging/pull/706) - [@​atugushev](https://togithub.com/atugushev) made their first contribution in [https://togithub.com/pypa/packaging/pull/709](https://togithub.com/pypa/packaging/pull/709) - [@​faph](https://togithub.com/faph) made their first contribution in [https://togithub.com/pypa/packaging/pull/717](https://togithub.com/pypa/packaging/pull/717) - [@​garrypolley](https://togithub.com/garrypolley) made their first contribution in [https://togithub.com/pypa/packaging/pull/713](https://togithub.com/pypa/packaging/pull/713) - [@​jolaf](https://togithub.com/jolaf) made their first contribution in [https://togithub.com/pypa/packaging/pull/723](https://togithub.com/pypa/packaging/pull/723) - [@​SpecLad](https://togithub.com/SpecLad) made their first contribution in [https://togithub.com/pypa/packaging/pull/721](https://togithub.com/pypa/packaging/pull/721) - [@​edmorley](https://togithub.com/edmorley) made their first contribution in [https://togithub.com/pypa/packaging/pull/724](https://togithub.com/pypa/packaging/pull/724) **Full Changelog**: https://togithub.com/pypa/packaging/compare/23.1...23.2
pytest-dev/pytest (pytest) ### [`v7.4.2`](https://togithub.com/pytest-dev/pytest/releases/tag/7.4.2): pytest 7.4.2 (2023-09-07) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/7.4.1...7.4.2) ##### Bug Fixes - [#​11237](https://togithub.com/pytest-dev/pytest/issues/11237): Fix doctest collection of `functools.cached_property` objects. - [#​11306](https://togithub.com/pytest-dev/pytest/issues/11306): Fixed bug using `--importmode=importlib` which would cause package `__init__.py` files to be imported more than once in some cases. - [#​11367](https://togithub.com/pytest-dev/pytest/issues/11367): Fixed bug where `user_properties` where not being saved in the JUnit XML file if a fixture failed during teardown. - [#​11394](https://togithub.com/pytest-dev/pytest/issues/11394): Fixed crash when parsing long command line arguments that might be interpreted as files. ##### Improved Documentation - [#​11391](https://togithub.com/pytest-dev/pytest/issues/11391): Improved disclaimer on pytest plugin reference page to better indicate this is an automated, non-curated listing.
pypa/setuptools (setuptools) ### [`v68.2.2`](https://togithub.com/pypa/setuptools/compare/v68.2.1...v68.2.2) [Compare Source](https://togithub.com/pypa/setuptools/compare/v68.2.1...v68.2.2) ### [`v68.2.1`](https://togithub.com/pypa/setuptools/compare/v68.2.0...v68.2.1) [Compare Source](https://togithub.com/pypa/setuptools/compare/v68.2.0...v68.2.1) ### [`v68.2.0`](https://togithub.com/pypa/setuptools/compare/v68.1.2...v68.2.0) [Compare Source](https://togithub.com/pypa/setuptools/compare/v68.1.2...v68.2.0)
python/typing_extensions (typing-extensions) ### [`v4.8.0`](https://togithub.com/python/typing_extensions/blob/HEAD/CHANGELOG.md#Release-480-September-17-2023) [Compare Source](https://togithub.com/python/typing_extensions/compare/4.7.1...4.8.0) No changes since 4.8.0rc1.
urllib3/urllib3 (urllib3) ### [`v2.0.5`](https://togithub.com/urllib3/urllib3/blob/HEAD/CHANGES.rst#205-2023-09-20) [Compare Source](https://togithub.com/urllib3/urllib3/compare/2.0.4...v2.0.5) \================== - Allowed pyOpenSSL third-party module without any deprecation warning. (`#​3126 `\__) - Fixed default `blocksize` of `HTTPConnection` classes to match high-level classes. Previously was 8KiB, now 16KiB. (`#​3066 `\__)
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 👻 **Immortal**: This PR will be recreated if closed unmerged. Get [config help](https://togithub.com/renovatebot/renovate/discussions) if that's undesired. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://developer.mend.io/github/googleapis/gapic-generator-python). --- packages/gapic-generator/WORKSPACE | 12 +- packages/gapic-generator/requirements.txt | 338 ++++++++++++---------- 2 files changed, 189 insertions(+), 161 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 6748cf6cae71..6820894328c0 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -63,11 +63,15 @@ load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") protobuf_deps() -# Import boringssl explicitly to override what gRPC imports as its dependency. -# Boringssl build fails on gcc12 without this fix: +# TODO(https://github.com/googleapis/gapic-generator-python/issues/1781): +# Remove this import once gRPC depends on a newer version. +# +# Background: Import boringssl explicitly to override what gRPC +# imports as its dependency. Boringssl build fails on gcc12 without +# this fix: # https://github.com/google/boringssl/commit/8462a367bb57e9524c3d8eca9c62733c63a63cf4, -# which is present only in the newest version of boringssl, not the one imported -# by gRPC. Remove this import once gRPC depends on a newer version. +# which is present only in the newest version of boringssl, not the +# one imported by gRPC. http_archive( name = "boringssl", sha256 = "b460f8673f3393e58ce506e9cdde7f2c3b2575b075f214cb819fb57d809f052b", diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 933f5c216241..7baff2d9f64b 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -12,82 +12,97 @@ certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -charset-normalizer==3.2.0 \ - --hash=sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96 \ - --hash=sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c \ - --hash=sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710 \ - --hash=sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 \ - --hash=sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020 \ - --hash=sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252 \ - --hash=sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad \ - --hash=sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 \ - --hash=sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a \ - --hash=sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f \ - --hash=sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6 \ - --hash=sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4 \ - --hash=sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a \ - --hash=sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 \ - --hash=sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2 \ - --hash=sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23 \ - --hash=sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace \ - --hash=sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd \ - --hash=sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982 \ - --hash=sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 \ - --hash=sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2 \ - --hash=sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea \ - --hash=sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09 \ - --hash=sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5 \ - --hash=sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 \ - --hash=sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489 \ - --hash=sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9 \ - --hash=sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80 \ - --hash=sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592 \ - --hash=sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3 \ - --hash=sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6 \ - --hash=sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed \ - --hash=sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c \ - --hash=sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200 \ - --hash=sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a \ - --hash=sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e \ - --hash=sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d \ - --hash=sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 \ - --hash=sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623 \ - --hash=sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 \ - --hash=sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3 \ - --hash=sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa \ - --hash=sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9 \ - --hash=sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 \ - --hash=sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f \ - --hash=sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1 \ - --hash=sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4 \ - --hash=sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a \ - --hash=sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8 \ - --hash=sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3 \ - --hash=sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029 \ - --hash=sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f \ - --hash=sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959 \ - --hash=sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22 \ - --hash=sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7 \ - --hash=sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952 \ - --hash=sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346 \ - --hash=sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e \ - --hash=sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d \ - --hash=sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299 \ - --hash=sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd \ - --hash=sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a \ - --hash=sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3 \ - --hash=sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 \ - --hash=sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 \ - --hash=sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c \ - --hash=sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858 \ - --hash=sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a \ - --hash=sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449 \ - --hash=sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c \ - --hash=sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918 \ - --hash=sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1 \ - --hash=sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c \ - --hash=sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac \ - --hash=sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa +charset-normalizer==3.3.0 \ + --hash=sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843 \ + --hash=sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786 \ + --hash=sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e \ + --hash=sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8 \ + --hash=sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4 \ + --hash=sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa \ + --hash=sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d \ + --hash=sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82 \ + --hash=sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7 \ + --hash=sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895 \ + --hash=sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d \ + --hash=sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a \ + --hash=sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382 \ + --hash=sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678 \ + --hash=sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b \ + --hash=sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e \ + --hash=sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741 \ + --hash=sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4 \ + --hash=sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596 \ + --hash=sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9 \ + --hash=sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69 \ + --hash=sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c \ + --hash=sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77 \ + --hash=sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13 \ + --hash=sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459 \ + --hash=sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e \ + --hash=sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7 \ + --hash=sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908 \ + --hash=sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a \ + --hash=sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f \ + --hash=sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8 \ + --hash=sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482 \ + --hash=sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d \ + --hash=sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d \ + --hash=sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545 \ + --hash=sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34 \ + --hash=sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86 \ + --hash=sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6 \ + --hash=sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe \ + --hash=sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e \ + --hash=sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc \ + --hash=sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7 \ + --hash=sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd \ + --hash=sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c \ + --hash=sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557 \ + --hash=sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a \ + --hash=sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89 \ + --hash=sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078 \ + --hash=sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e \ + --hash=sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4 \ + --hash=sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403 \ + --hash=sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0 \ + --hash=sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89 \ + --hash=sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115 \ + --hash=sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9 \ + --hash=sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05 \ + --hash=sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a \ + --hash=sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec \ + --hash=sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56 \ + --hash=sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38 \ + --hash=sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479 \ + --hash=sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c \ + --hash=sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e \ + --hash=sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd \ + --hash=sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186 \ + --hash=sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455 \ + --hash=sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c \ + --hash=sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65 \ + --hash=sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78 \ + --hash=sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287 \ + --hash=sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df \ + --hash=sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43 \ + --hash=sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1 \ + --hash=sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7 \ + --hash=sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989 \ + --hash=sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a \ + --hash=sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63 \ + --hash=sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884 \ + --hash=sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649 \ + --hash=sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810 \ + --hash=sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828 \ + --hash=sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4 \ + --hash=sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2 \ + --hash=sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd \ + --hash=sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5 \ + --hash=sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe \ + --hash=sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293 \ + --hash=sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e \ + --hash=sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e \ + --hash=sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8 # via requests click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ @@ -97,13 +112,13 @@ exceptiongroup==1.1.3 \ --hash=sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9 \ --hash=sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3 # via pytest -google-api-core==2.11.1 \ - --hash=sha256:25d29e05a0058ed5f19c61c0a78b1b53adea4d9364b464d014fbda941f6d1c9a \ - --hash=sha256:d92a5a92dc36dd4f4b9ee4e55528a90e432b059f93aee6ad857f9de8cc7ae94a +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via -r requirements.in -google-auth==2.22.0 \ - --hash=sha256:164cba9af4e6e4e40c3a4f90a1a6c12ee56f14c0b4868d1ca91b32826ab334ce \ - --hash=sha256:d61d1b40897407b574da67da1a833bdc10d5a11642566e506565d1b1a46ba873 +google-auth==2.23.2 \ + --hash=sha256:5a9af4be520ba33651471a0264eead312521566f44631cbb621164bc30c8fd40 \ + --hash=sha256:c2e253347579d483004f17c3bd0bf92e611ef6c7ba24d41c5c59f2e7aeeaf088 # via google-api-core googleapis-common-protos[grpc]==1.60.0 \ --hash=sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918 \ @@ -116,52 +131,61 @@ grpc-google-iam-v1==0.12.6 \ --hash=sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f \ --hash=sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c # via -r requirements.in -grpcio==1.57.0 \ - --hash=sha256:00258cbe3f5188629828363ae8ff78477ce976a6f63fb2bb5e90088396faa82e \ - --hash=sha256:092fa155b945015754bdf988be47793c377b52b88d546e45c6a9f9579ac7f7b6 \ - --hash=sha256:0f80bf37f09e1caba6a8063e56e2b87fa335add314cf2b78ebf7cb45aa7e3d06 \ - --hash=sha256:20ec6fc4ad47d1b6e12deec5045ec3cd5402d9a1597f738263e98f490fe07056 \ - --hash=sha256:2313b124e475aa9017a9844bdc5eafb2d5abdda9d456af16fc4535408c7d6da6 \ - --hash=sha256:23e7d8849a0e58b806253fd206ac105b328171e01b8f18c7d5922274958cc87e \ - --hash=sha256:2f708a6a17868ad8bf586598bee69abded4996b18adf26fd2d91191383b79019 \ - --hash=sha256:2f7349786da979a94690cc5c2b804cab4e8774a3cf59be40d037c4342c906649 \ - --hash=sha256:34950353539e7d93f61c6796a007c705d663f3be41166358e3d88c45760c7d98 \ - --hash=sha256:40b72effd4c789de94ce1be2b5f88d7b9b5f7379fe9645f198854112a6567d9a \ - --hash=sha256:4b089f7ad1eb00a104078bab8015b0ed0ebcb3b589e527ab009c53893fd4e613 \ - --hash=sha256:4faea2cfdf762a664ab90589b66f416274887641ae17817de510b8178356bf73 \ - --hash=sha256:5371bcd861e679d63b8274f73ac281751d34bd54eccdbfcd6aa00e692a82cd7b \ - --hash=sha256:5613a2fecc82f95d6c51d15b9a72705553aa0d7c932fad7aed7afb51dc982ee5 \ - --hash=sha256:57b183e8b252825c4dd29114d6c13559be95387aafc10a7be645462a0fc98bbb \ - --hash=sha256:5b7a4ce8f862fe32b2a10b57752cf3169f5fe2915acfe7e6a1e155db3da99e79 \ - --hash=sha256:5e5b58e32ae14658085c16986d11e99abd002ddbf51c8daae8a0671fffb3467f \ - --hash=sha256:60fe15288a0a65d5c1cb5b4a62b1850d07336e3ba728257a810317be14f0c527 \ - --hash=sha256:6907b1cf8bb29b058081d2aad677b15757a44ef2d4d8d9130271d2ad5e33efca \ - --hash=sha256:76c44efa4ede1f42a9d5b2fed1fe9377e73a109bef8675fb0728eb80b0b8e8f2 \ - --hash=sha256:7a635589201b18510ff988161b7b573f50c6a48fae9cb567657920ca82022b37 \ - --hash=sha256:7b400807fa749a9eb286e2cd893e501b110b4d356a218426cb9c825a0474ca56 \ - --hash=sha256:82640e57fb86ea1d71ea9ab54f7e942502cf98a429a200b2e743d8672171734f \ - --hash=sha256:871f9999e0211f9551f368612460442a5436d9444606184652117d6a688c9f51 \ - --hash=sha256:9338bacf172e942e62e5889b6364e56657fbf8ac68062e8b25c48843e7b202bb \ - --hash=sha256:a8a8e560e8dbbdf29288872e91efd22af71e88b0e5736b0daf7773c1fecd99f0 \ - --hash=sha256:aed90d93b731929e742967e236f842a4a2174dc5db077c8f9ad2c5996f89f63e \ - --hash=sha256:b363bbb5253e5f9c23d8a0a034dfdf1b7c9e7f12e602fc788c435171e96daccc \ - --hash=sha256:b4098b6b638d9e0ca839a81656a2fd4bc26c9486ea707e8b1437d6f9d61c3941 \ - --hash=sha256:b53333627283e7241fcc217323f225c37783b5f0472316edcaa4479a213abfa6 \ - --hash=sha256:b670c2faa92124b7397b42303e4d8eb64a4cd0b7a77e35a9e865a55d61c57ef9 \ - --hash=sha256:bb396952cfa7ad2f01061fbc7dc1ad91dd9d69243bcb8110cf4e36924785a0fe \ - --hash=sha256:c60b83c43faeb6d0a9831f0351d7787a0753f5087cc6fa218d78fdf38e5acef0 \ - --hash=sha256:c6ebecfb7a31385393203eb04ed8b6a08f5002f53df3d59e5e795edb80999652 \ - --hash=sha256:d78d8b86fcdfa1e4c21f8896614b6cc7ee01a2a758ec0c4382d662f2a62cf766 \ - --hash=sha256:d7f8df114d6b4cf5a916b98389aeaf1e3132035420a88beea4e3d977e5f267a5 \ - --hash=sha256:e1cb52fa2d67d7f7fab310b600f22ce1ff04d562d46e9e0ac3e3403c2bb4cc16 \ - --hash=sha256:e3fdf04e402f12e1de8074458549337febb3b45f21076cc02ef4ff786aff687e \ - --hash=sha256:e503cb45ed12b924b5b988ba9576dc9949b2f5283b8e33b21dcb6be74a7c58d0 \ - --hash=sha256:f19ac6ac0a256cf77d3cc926ef0b4e64a9725cc612f97228cd5dc4bd9dbab03b \ - --hash=sha256:f1fb0fd4a1e9b11ac21c30c169d169ef434c6e9344ee0ab27cfa6f605f6387b2 \ - --hash=sha256:fada6b07ec4f0befe05218181f4b85176f11d531911b64c715d1875c4736d73a \ - --hash=sha256:fd173b4cf02b20f60860dc2ffe30115c18972d7d6d2d69df97ac38dee03be5bf \ - --hash=sha256:fe752639919aad9ffb0dee0d87f29a6467d1ef764f13c4644d212a9a853a078d \ - --hash=sha256:fee387d2fab144e8a34e0e9c5ca0f45c9376b99de45628265cfa9886b1dbe62b +grpcio==1.59.0 \ + --hash=sha256:0ae444221b2c16d8211b55326f8ba173ba8f8c76349bfc1768198ba592b58f74 \ + --hash=sha256:0b84445fa94d59e6806c10266b977f92fa997db3585f125d6b751af02ff8b9fe \ + --hash=sha256:14890da86a0c0e9dc1ea8e90101d7a3e0e7b1e71f4487fab36e2bfd2ecadd13c \ + --hash=sha256:15f03bd714f987d48ae57fe092cf81960ae36da4e520e729392a59a75cda4f29 \ + --hash=sha256:1a839ba86764cc48226f50b924216000c79779c563a301586a107bda9cbe9dcf \ + --hash=sha256:225e5fa61c35eeaebb4e7491cd2d768cd8eb6ed00f2664fa83a58f29418b39fd \ + --hash=sha256:228b91ce454876d7eed74041aff24a8f04c0306b7250a2da99d35dd25e2a1211 \ + --hash=sha256:2ea95cd6abbe20138b8df965b4a8674ec312aaef3147c0f46a0bac661f09e8d0 \ + --hash=sha256:2f120d27051e4c59db2f267b71b833796770d3ea36ca712befa8c5fff5da6ebd \ + --hash=sha256:34341d9e81a4b669a5f5dca3b2a760b6798e95cdda2b173e65d29d0b16692857 \ + --hash=sha256:3859917de234a0a2a52132489c4425a73669de9c458b01c9a83687f1f31b5b10 \ + --hash=sha256:38823bd088c69f59966f594d087d3a929d1ef310506bee9e3648317660d65b81 \ + --hash=sha256:38da5310ef84e16d638ad89550b5b9424df508fd5c7b968b90eb9629ca9be4b9 \ + --hash=sha256:3b8ff795d35a93d1df6531f31c1502673d1cebeeba93d0f9bd74617381507e3f \ + --hash=sha256:50eff97397e29eeee5df106ea1afce3ee134d567aa2c8e04fabab05c79d791a7 \ + --hash=sha256:5711c51e204dc52065f4a3327dca46e69636a0b76d3e98c2c28c4ccef9b04c52 \ + --hash=sha256:598f3530231cf10ae03f4ab92d48c3be1fee0c52213a1d5958df1a90957e6a88 \ + --hash=sha256:611d9aa0017fa386809bddcb76653a5ab18c264faf4d9ff35cb904d44745f575 \ + --hash=sha256:61bc72a00ecc2b79d9695220b4d02e8ba53b702b42411397e831c9b0589f08a3 \ + --hash=sha256:63982150a7d598281fa1d7ffead6096e543ff8be189d3235dd2b5604f2c553e5 \ + --hash=sha256:6c4b1cc3a9dc1924d2eb26eec8792fedd4b3fcd10111e26c1d551f2e4eda79ce \ + --hash=sha256:81d86a096ccd24a57fa5772a544c9e566218bc4de49e8c909882dae9d73392df \ + --hash=sha256:849c47ef42424c86af069a9c5e691a765e304079755d5c29eff511263fad9c2a \ + --hash=sha256:871371ce0c0055d3db2a86fdebd1e1d647cf21a8912acc30052660297a5a6901 \ + --hash=sha256:8cd2d38c2d52f607d75a74143113174c36d8a416d9472415eab834f837580cf7 \ + --hash=sha256:936b2e04663660c600d5173bc2cc84e15adbad9c8f71946eb833b0afc205b996 \ + --hash=sha256:93e9cb546e610829e462147ce724a9cb108e61647a3454500438a6deef610be1 \ + --hash=sha256:956f0b7cb465a65de1bd90d5a7475b4dc55089b25042fe0f6c870707e9aabb1d \ + --hash=sha256:986de4aa75646e963466b386a8c5055c8b23a26a36a6c99052385d6fe8aaf180 \ + --hash=sha256:aca8a24fef80bef73f83eb8153f5f5a0134d9539b4c436a716256b311dda90a6 \ + --hash=sha256:acf70a63cf09dd494000007b798aff88a436e1c03b394995ce450be437b8e54f \ + --hash=sha256:b34c7a4c31841a2ea27246a05eed8a80c319bfc0d3e644412ec9ce437105ff6c \ + --hash=sha256:b95ec8ecc4f703f5caaa8d96e93e40c7f589bad299a2617bdb8becbcce525539 \ + --hash=sha256:ba0ca727a173ee093f49ead932c051af463258b4b493b956a2c099696f38aa66 \ + --hash=sha256:c041a91712bf23b2a910f61e16565a05869e505dc5a5c025d429ca6de5de842c \ + --hash=sha256:c0488c2b0528e6072010182075615620071371701733c63ab5be49140ed8f7f0 \ + --hash=sha256:c173a87d622ea074ce79be33b952f0b424fa92182063c3bda8625c11d3585d09 \ + --hash=sha256:c251d22de8f9f5cca9ee47e4bade7c5c853e6e40743f47f5cc02288ee7a87252 \ + --hash=sha256:c4dfdb49f4997dc664f30116af2d34751b91aa031f8c8ee251ce4dcfc11277b0 \ + --hash=sha256:ca87ee6183421b7cea3544190061f6c1c3dfc959e0b57a5286b108511fd34ff4 \ + --hash=sha256:ceb1e68135788c3fce2211de86a7597591f0b9a0d2bb80e8401fd1d915991bac \ + --hash=sha256:d09bd2a4e9f5a44d36bb8684f284835c14d30c22d8ec92ce796655af12163588 \ + --hash=sha256:d0fcf53df684fcc0154b1e61f6b4a8c4cf5f49d98a63511e3f30966feff39cd0 \ + --hash=sha256:d74f7d2d7c242a6af9d4d069552ec3669965b74fed6b92946e0e13b4168374f9 \ + --hash=sha256:de2599985b7c1b4ce7526e15c969d66b93687571aa008ca749d6235d056b7205 \ + --hash=sha256:e5378785dce2b91eb2e5b857ec7602305a3b5cf78311767146464bfa365fc897 \ + --hash=sha256:ec78aebb9b6771d6a1de7b6ca2f779a2f6113b9108d486e904bde323d51f5589 \ + --hash=sha256:f1feb034321ae2f718172d86b8276c03599846dc7bb1792ae370af02718f91c5 \ + --hash=sha256:f21917aa50b40842b51aff2de6ebf9e2f6af3fe0971c31960ad6a3a2b24988f4 \ + --hash=sha256:f367e4b524cb319e50acbdea57bb63c3b717c5d561974ace0b065a648bb3bad3 \ + --hash=sha256:f6cfe44a5d7c7d5f1017a7da1c8160304091ca5dc64a0f85bca0d63008c3137a \ + --hash=sha256:fa66cac32861500f280bb60fe7d5b3e22d68c51e18e65367e38f8669b78cea3b \ + --hash=sha256:fc8bf2e7bc725e76c0c11e474634a08c8f24bcf7426c0c6d60c8f9c6e70e4d4a \ + --hash=sha256:fe976910de34d21057bcb53b2c5e667843588b48bf11339da2a75f5c4c5b4055 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -271,9 +295,9 @@ mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via typing-inspect -packaging==23.1 \ - --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ - --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via pytest pluggy==1.3.0 \ --hash=sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12 \ @@ -283,20 +307,20 @@ proto-plus==1.22.3 \ --hash=sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df \ --hash=sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b # via -r requirements.in -protobuf==4.24.2 \ - --hash=sha256:237b9a50bd3b7307d0d834c1b0eb1a6cd47d3f4c2da840802cd03ea288ae8880 \ - --hash=sha256:25ae91d21e3ce8d874211110c2f7edd6384816fb44e06b2867afe35139e1fd1c \ - --hash=sha256:2b23bd6e06445699b12f525f3e92a916f2dcf45ffba441026357dea7fa46f42b \ - --hash=sha256:3b7b170d3491ceed33f723bbf2d5a260f8a4e23843799a3906f16ef736ef251e \ - --hash=sha256:4e69965e7e54de4db989289a9b971a099e626f6167a9351e9d112221fc691bc1 \ - --hash=sha256:58e12d2c1aa428ece2281cef09bbaa6938b083bcda606db3da4e02e991a0d924 \ - --hash=sha256:6bd26c1fa9038b26c5c044ee77e0ecb18463e957fefbaeb81a3feb419313a54e \ - --hash=sha256:77700b55ba41144fc64828e02afb41901b42497b8217b558e4a001f18a85f2e3 \ - --hash=sha256:7fda70797ddec31ddfa3576cbdcc3ddbb6b3078b737a1a87ab9136af0570cd6e \ - --hash=sha256:839952e759fc40b5d46be319a265cf94920174d88de31657d5622b5d8d6be5cd \ - --hash=sha256:bb7aa97c252279da65584af0456f802bd4b2de429eb945bbc9b3d61a42a8cd16 \ - --hash=sha256:c00c3c7eb9ad3833806e21e86dca448f46035242a680f81c3fe068ff65e79c74 \ - --hash=sha256:c5cdd486af081bf752225b26809d2d0a85e575b80a84cde5172a05bbb1990099 +protobuf==4.24.3 \ + --hash=sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719 \ + --hash=sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d \ + --hash=sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2 \ + --hash=sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4 \ + --hash=sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1 \ + --hash=sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3 \ + --hash=sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b \ + --hash=sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76 \ + --hash=sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959 \ + --hash=sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52 \ + --hash=sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b \ + --hash=sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675 \ + --hash=sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a # via # -r requirements.in # google-api-core @@ -317,9 +341,9 @@ pypandoc==1.11 \ --hash=sha256:7f6d68db0e57e0f6961bec2190897118c4d305fc2d31c22cd16037f22ee084a5 \ --hash=sha256:b260596934e9cfc6513056110a7c8600171d414f90558bf4407e68b209be8007 # via -r requirements.in -pytest==7.4.1 \ - --hash=sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab \ - --hash=sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f +pytest==7.4.2 \ + --hash=sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002 \ + --hash=sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069 # via pytest-asyncio pytest-asyncio==0.21.1 \ --hash=sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d \ @@ -385,9 +409,9 @@ tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via pytest -typing-extensions==4.7.1 \ - --hash=sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36 \ - --hash=sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2 +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via # libcst # typing-inspect @@ -395,15 +419,15 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==2.0.4 \ - --hash=sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11 \ - --hash=sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4 +urllib3==2.0.5 \ + --hash=sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594 \ + --hash=sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e # via # google-auth # requests # The following packages are considered to be unsafe in a requirements file: -setuptools==68.1.2 \ - --hash=sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d \ - --hash=sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 6cdba1664bb87b3e1d61284a1ea0575fc27b35fe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Oct 2023 22:38:15 +0000 Subject: [PATCH 1051/1339] chore: [autoapprove] bump cryptography from 41.0.3 to 41.0.4 (#1780) Source-Link: https://togithub.com/googleapis/synthtool/commit/dede53ff326079b457cfb1aae5bbdc82cbb51dc3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 49 ++++++++++--------- 2 files changed, 27 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index a3da1b0d4cd3..a9bdb1b7ac0f 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb +# created: 2023-10-02T21:31:03.517640371Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 029bd342de94..96d593c8c82a 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de +cryptography==41.0.4 \ + --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ + --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ + --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ + --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ + --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ + --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ + --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ + --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ + --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ + --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ + --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ + --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ + --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ + --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ + --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ + --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ + --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ + --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ + --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ + --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ + --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ + --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ + --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f # via # gcp-releasetool # secretstorage @@ -382,6 +382,7 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 18440080ed1663e9c8c740d64a1a8ce636410963 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 6 Oct 2023 21:29:26 -0400 Subject: [PATCH 1052/1339] fix: resolve unit test failure caused by differences in protobuf runtimes (#1749) Co-authored-by: Victor Chudnovsky --- .../%name_%version/%sub/test_%service.py.j2 | 101 +++- .../gapic/templates/setup.py.j2 | 1 + .../gapic/%name_%version/%sub/test_macros.j2 | 79 ++- packages/gapic-generator/noxfile.py | 17 +- packages/gapic-generator/setup.py | 6 +- .../tests/fragments/google/README.rst | 3 + .../fragments/test_google_protobuf_type.proto | 55 ++ .../unit/gapic/asset_v1/test_asset_service.py | 366 ++++++++++---- .../credentials_v1/test_iam_credentials.py | 60 ++- .../unit/gapic/eventarc_v1/test_eventarc.py | 468 +++++++++++++++--- .../unit/gapic/redis_v1/test_cloud_redis.py | 151 +++++- 11 files changed, 1046 insertions(+), 261 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/google/README.rst create mode 100644 packages/gapic-generator/tests/fragments/test_google_protobuf_type.proto diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 77974cf5ed10..1583fb8c7720 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1014,6 +1014,58 @@ def test_{{ method.name|snake_case }}_rest(request_type): {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr({{ method.input.ident }}.meta.fields["{{ field.name }}"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = {{ method.input.ident }}.meta.fields["{{ field.name }}"].message.meta.fields + else: + message_fields = {{ method.input.ident }}.meta.fields["{{ field.name }}"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["{{ field.name }}"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["{{ field.name }}"][field])): + del request_init["{{ field.name }}"][field][i][subfield] + else: + del request_init["{{ field.name }}"][field][subfield] {% endif %} {% endfor %} request = request_type(request_init) @@ -1048,15 +1100,22 @@ def test_{{ method.name|snake_case }}_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - {% if method.void %} + {% if method.void %} json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) - {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) - {% endif %} + {% else %} + + {% if method.output.ident.is_proto_plus_type %} + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) + {% endif %} + json_return_value = json_format.MessageToJson(return_value) + {% endif %} + {% if method.server_streaming %} + json_return_value = "[{}]".format(json_return_value) + {% endif %} + response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value {% if method.client_streaming %} @@ -1220,11 +1279,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% else %} {% if method.output.ident.is_proto_plus_type %} - pb_return_value = {{ method.output.ident }}.pb(return_value) - {% else %} - pb_return_value = return_value + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) {% endif %} - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) {% endif %} {% if method.server_streaming %} json_return_value = "[{}]".format(json_return_value) @@ -1345,12 +1403,6 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ # send a request that will satisfy transcoding request_init = {{ method.http_options[0].sample_request(method) }} - {% for field in method.body_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} - {% endif %} - {% endfor %} request = request_type(request_init) {% if method.client_streaming %} requests = [request] @@ -1391,16 +1443,21 @@ def test_{{ method.name|snake_case }}_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - {% if method.void %} + {% if method.void %} json_return_value = '' {% elif method.lro %} json_return_value = json_format.MessageToJson(return_value) - {% elif method.server_streaming %} - json_return_value = "[{}]".format({{ method.output.ident }}.to_json(return_value)) - {% else %} - json_return_value = {{ method.output.ident }}.to_json(return_value) - {% endif %} + {% else %} + {% if method.output.ident.is_proto_plus_type %} + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) + {% endif %} + json_return_value = json_format.MessageToJson(return_value) + {% endif %} + {% if method.server_streaming %} + json_return_value = "[{}]".format(json_return_value) + {% endif %} response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 72b5c44a8781..16041ab4e9ca 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -30,6 +30,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {% for package_tuple, package_info in pypi_packages.items() %} {# Quick check to make sure the package is different from this setup.py #} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 91140618017f..102da8dec11b 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -887,6 +887,58 @@ def test_{{ method_name }}_rest(request_type): {% if not field.oneof or field.proto3_optional %} {# ignore oneof fields that might conflict with sample_request #} request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr({{ method.input.ident }}.meta.fields["{{ field.name }}"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = {{ method.input.ident }}.meta.fields["{{ field.name }}"].message.meta.fields + else: + message_fields = {{ method.input.ident }}.meta.fields["{{ field.name }}"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["{{ field.name }}"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["{{ field.name }}"][field])): + del request_init["{{ field.name }}"][field][i][subfield] + else: + del request_init["{{ field.name }}"][field][subfield] {% endif %} {% endfor %} request = request_type(**request_init) @@ -942,11 +994,10 @@ def test_{{ method_name }}_rest(request_type): json_return_value = json_format.MessageToJson(return_value) {% else %} {% if method.output.ident.is_proto_plus_type %} - pb_return_value = {{ method.output.ident }}.pb(return_value) - {% else %} - pb_return_value = return_value + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) {% endif %} - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) {% endif %} {% if method.server_streaming %} @@ -1116,11 +1167,10 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% else %} {% if method.output.ident.is_proto_plus_type %} - pb_return_value = {{ method.output.ident }}.pb(return_value) - {% else %} - pb_return_value = return_value + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) {% endif %} - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) {% endif %} {% if method.server_streaming %} json_return_value = "[{}]".format(json_return_value) @@ -1242,12 +1292,6 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ # send a request that will satisfy transcoding request_init = {{ method.http_options[0].sample_request(method) }} - {% for field in method.body_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} - {% endif %} - {% endfor %} request = request_type(**request_init) {% if method.client_streaming %} requests = [request] @@ -1308,11 +1352,10 @@ def test_{{ method_name }}_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) {% else %} {% if method.output.ident.is_proto_plus_type %} - pb_return_value = {{ method.output.ident }}.pb(return_value) - {% else %} - pb_return_value = return_value + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) {% endif %} - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) {% endif %} {% if method.server_streaming %} json_return_value = "[{}]".format(json_return_value) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 3786be6c1708..152f9f68f7a1 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -110,9 +110,20 @@ def __call__(self, frag): ) # Install the generated fragment library. - # Note: install into the tempdir to prevent issues - # with running pip concurrently. - self.session.install(tmp_dir, "-e", ".", "-t", tmp_dir, "-qqq") + if self.use_ads_templates: + self.session.install(tmp_dir, "-e", ".", "-qqq") + else: + # Use the constraints file for the specific python runtime version. + # We do this to make sure that we're testing against the lowest + # supported version of a dependency. + # This is needed to recreate the issue reported in + # https://github.com/googleapis/gapic-generator-python/issues/1748 + # The ads templates do not have constraints files. + constraints_path = str( + f"{tmp_dir}/testing/constraints-{self.session.python}.txt" + ) + self.session.install(tmp_dir, "-e", ".", "-qqq", "-r", constraints_path) + # Run the fragment's generated unit tests. # Don't bother parallelizing them: we already parallelize # the fragments, and there usually aren't too many tests per fragment. diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 3d5d85e96150..6b7a248048c2 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -25,12 +25,14 @@ version = "1.11.5" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ + # Ensure that the lower bounds of these dependencies match what we have in the + # templated setup.py.j2: https://github.com/googleapis/gapic-generator-python/blob/main/gapic/templates/setup.py.j2 "click >= 6.7", - "google-api-core >= 2.8.0", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "googleapis-common-protos >= 1.55.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", - "protobuf >= 3.18.0, < 5.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "pypandoc >= 1.4", "PyYAML >= 5.1.1", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", diff --git a/packages/gapic-generator/tests/fragments/google/README.rst b/packages/gapic-generator/tests/fragments/google/README.rst new file mode 100644 index 000000000000..a697aa88ea18 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/README.rst @@ -0,0 +1,3 @@ +The protos in this folder were copied directly from `googleapis/googleapis`_ and are needed for the purposes of running fragment tests. + +.. _googleapis/googleapis: https://github.com/googleapis/googleapis/tree/master/google \ No newline at end of file diff --git a/packages/gapic-generator/tests/fragments/test_google_protobuf_type.proto b/packages/gapic-generator/tests/fragments/test_google_protobuf_type.proto new file mode 100644 index 000000000000..7e8697f95701 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_google_protobuf_type.proto @@ -0,0 +1,55 @@ +// Copyright (C) 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + + +// The purpose of this fragment test is to test for an issue found in the generated +// client for `google/api/servicemanagement/v1` where the version of a generated +// dependency (google.protobuf.type) at runtime differs from the version used during +// generation. See https://github.com/googleapis/gapic-generator-python/issues/1748. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/type.proto"; + +service MyServiceWithProtobufType { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequestWithProtobufType) + returns (MethodResponseWithProtobufType) { + option (google.api.http) = { + post: "/v1/services/{service_name}/configs" + body: "test_message" + }; + option (google.api.method_signature) = "service_name,test_message"; + } +} + +message MethodRequestWithProtobufType { + string service_name = 1 [(google.api.field_behavior) = REQUIRED]; + TestMessage test_message = 2 [(google.api.field_behavior) = REQUIRED]; +} + +message TestMessage { + repeated google.protobuf.Type types = 2 [(google.api.field_behavior) = REQUIRED]; +} + +message MethodResponseWithProtobufType { + google.protobuf.Value result = 1; +} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 6bb3ed9ed912..14df45cff61d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -7029,8 +7029,9 @@ def test_list_assets_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7099,8 +7100,9 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7203,8 +7205,9 @@ def test_list_assets_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7317,8 +7320,9 @@ def test_batch_get_assets_history_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7386,8 +7390,9 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7502,8 +7507,9 @@ def test_create_feed_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7579,8 +7585,9 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7683,8 +7690,9 @@ def test_create_feed_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7747,8 +7755,9 @@ def test_get_feed_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7819,8 +7828,9 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7923,8 +7933,9 @@ def test_get_feed_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7982,8 +7993,9 @@ def test_list_feeds_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8049,8 +8061,9 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8153,8 +8166,9 @@ def test_list_feeds_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8217,8 +8231,9 @@ def test_update_feed_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8285,8 +8300,9 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8389,8 +8405,9 @@ def test_update_feed_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8669,8 +8686,9 @@ def test_search_all_resources_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8739,8 +8757,9 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8845,8 +8864,9 @@ def test_search_all_resources_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8962,8 +8982,9 @@ def test_search_all_iam_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9032,8 +9053,9 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9137,8 +9159,9 @@ def test_search_all_iam_policies_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9253,8 +9276,9 @@ def test_analyze_iam_policy_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9318,8 +9342,9 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9600,8 +9625,9 @@ def test_analyze_move_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeMoveResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeMoveResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9676,8 +9702,9 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeMoveResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeMoveResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9793,8 +9820,9 @@ def test_query_assets_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.QueryAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.QueryAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9863,8 +9891,9 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.QueryAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.QueryAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9964,6 +9993,58 @@ def test_create_saved_query_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'sample1/sample2'} request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(asset_service.CreateSavedQueryRequest.meta.fields["saved_query"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"].message.meta.fields + else: + message_fields = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["saved_query"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["saved_query"][field])): + del request_init["saved_query"][field][i][subfield] + else: + del request_init["saved_query"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9979,8 +10060,9 @@ def test_create_saved_query_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10060,8 +10142,9 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10133,7 +10216,6 @@ def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_ty # send a request that will satisfy transcoding request_init = {'parent': 'sample1/sample2'} - request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10171,8 +10253,9 @@ def test_create_saved_query_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10236,8 +10319,9 @@ def test_get_saved_query_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10307,8 +10391,9 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10411,8 +10496,9 @@ def test_get_saved_query_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10471,8 +10557,9 @@ def test_list_saved_queries_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListSavedQueriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10541,8 +10628,9 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListSavedQueriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10645,8 +10733,9 @@ def test_list_saved_queries_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.ListSavedQueriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.ListSavedQueriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10749,6 +10838,58 @@ def test_update_saved_query_rest(request_type): # send a request that will satisfy transcoding request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"].message.meta.fields + else: + message_fields = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["saved_query"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["saved_query"][field])): + del request_init["saved_query"][field][i][subfield] + else: + del request_init["saved_query"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10764,8 +10905,9 @@ def test_update_saved_query_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10833,8 +10975,9 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10902,7 +11045,6 @@ def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_ty # send a request that will satisfy transcoding request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10939,8 +11081,9 @@ def test_update_saved_query_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -11219,8 +11362,9 @@ def test_batch_get_effective_iam_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -11295,8 +11439,9 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -11411,8 +11556,9 @@ def test_analyze_org_policies_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -11488,8 +11634,9 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -11598,8 +11745,9 @@ def test_analyze_org_policies_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -11715,8 +11863,9 @@ def test_analyze_org_policy_governed_containers_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -11792,8 +11941,9 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -11902,8 +12052,9 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -12019,8 +12170,9 @@ def test_analyze_org_policy_governed_assets_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -12096,8 +12248,9 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -12206,8 +12359,9 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 38b124448418..8ebd8b0ac381 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1596,8 +1596,9 @@ def test_generate_access_token_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.GenerateAccessTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -1669,8 +1670,9 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate response_value = Response() response_value.status_code = 200 - pb_return_value = common.GenerateAccessTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -1776,8 +1778,9 @@ def test_generate_access_token_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.GenerateAccessTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -1839,8 +1842,9 @@ def test_generate_id_token_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.GenerateIdTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -1912,8 +1916,9 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo response_value = Response() response_value.status_code = 200 - pb_return_value = common.GenerateIdTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -2019,8 +2024,9 @@ def test_generate_id_token_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.GenerateIdTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -2083,8 +2089,9 @@ def test_sign_blob_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.SignBlobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -2157,8 +2164,9 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = common.SignBlobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -2263,8 +2271,9 @@ def test_sign_blob_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.SignBlobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -2326,8 +2335,9 @@ def test_sign_jwt_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.SignJwtResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -2400,8 +2410,9 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = common.SignJwtResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -2506,8 +2517,9 @@ def test_sign_jwt_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.SignJwtResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 771528a988c5..656b4e83fb13 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -5703,8 +5703,9 @@ def test_get_trigger_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -5775,8 +5776,9 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques response_value = Response() response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -5879,8 +5881,9 @@ def test_get_trigger_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -5940,8 +5943,9 @@ def test_list_triggers_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -6011,8 +6015,9 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -6115,8 +6120,9 @@ def test_list_triggers_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -6219,6 +6225,58 @@ def test_create_trigger_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(eventarc.CreateTriggerRequest.meta.fields["trigger"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = eventarc.CreateTriggerRequest.meta.fields["trigger"].message.meta.fields + else: + message_fields = eventarc.CreateTriggerRequest.meta.fields["trigger"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["trigger"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["trigger"][field])): + del request_init["trigger"][field][i][subfield] + else: + del request_init["trigger"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6388,7 +6446,6 @@ def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=e # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6476,6 +6533,58 @@ def test_update_trigger_rest(request_type): # send a request that will satisfy transcoding request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(eventarc.UpdateTriggerRequest.meta.fields["trigger"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = eventarc.UpdateTriggerRequest.meta.fields["trigger"].message.meta.fields + else: + message_fields = eventarc.UpdateTriggerRequest.meta.fields["trigger"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["trigger"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["trigger"][field])): + del request_init["trigger"][field][i][subfield] + else: + del request_init["trigger"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -6630,7 +6739,6 @@ def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=e # send a request that will satisfy transcoding request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -6976,8 +7084,9 @@ def test_get_channel_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = channel.Channel.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7049,8 +7158,9 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques response_value = Response() response_value.status_code = 200 - pb_return_value = channel.Channel.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7153,8 +7263,9 @@ def test_get_channel_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = channel.Channel.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7214,8 +7325,9 @@ def test_list_channels_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListChannelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7285,8 +7397,9 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListChannelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7389,8 +7502,9 @@ def test_list_channels_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListChannelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -7493,6 +7607,58 @@ def test_create_channel_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(eventarc.CreateChannelRequest.meta.fields["channel"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = eventarc.CreateChannelRequest.meta.fields["channel"].message.meta.fields + else: + message_fields = eventarc.CreateChannelRequest.meta.fields["channel"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["channel"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel"][field])): + del request_init["channel"][field][i][subfield] + else: + del request_init["channel"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7662,7 +7828,6 @@ def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=e # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -7750,6 +7915,58 @@ def test_update_channel_rest(request_type): # send a request that will satisfy transcoding request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(eventarc.UpdateChannelRequest.meta.fields["channel"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = eventarc.UpdateChannelRequest.meta.fields["channel"].message.meta.fields + else: + message_fields = eventarc.UpdateChannelRequest.meta.fields["channel"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["channel"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel"][field])): + del request_init["channel"][field][i][subfield] + else: + del request_init["channel"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -7904,7 +8121,6 @@ def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=e # send a request that will satisfy transcoding request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} - request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -8241,8 +8457,9 @@ def test_get_provider_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = discovery.Provider.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8310,8 +8527,9 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ response_value = Response() response_value.status_code = 200 - pb_return_value = discovery.Provider.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8414,8 +8632,9 @@ def test_get_provider_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = discovery.Provider.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8475,8 +8694,9 @@ def test_list_providers_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListProvidersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8546,8 +8766,9 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListProvidersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8650,8 +8871,9 @@ def test_list_providers_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListProvidersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8768,8 +8990,9 @@ def test_get_channel_connection_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8839,8 +9062,9 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh response_value = Response() response_value.status_code = 200 - pb_return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -8943,8 +9167,9 @@ def test_get_channel_connection_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9004,8 +9229,9 @@ def test_list_channel_connections_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9075,8 +9301,9 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9179,8 +9406,9 @@ def test_list_channel_connections_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9283,6 +9511,58 @@ def test_create_channel_connection_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"].message.meta.fields + else: + message_fields = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["channel_connection"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel_connection"][field])): + del request_init["channel_connection"][field][i][subfield] + else: + del request_init["channel_connection"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -9441,7 +9721,6 @@ def test_create_channel_connection_rest_bad_request(transport: str = 'rest', req # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -9767,8 +10046,9 @@ def test_get_google_channel_config_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9836,8 +10116,9 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge response_value = Response() response_value.status_code = 200 - pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9940,8 +10221,9 @@ def test_get_google_channel_config_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -9989,6 +10271,58 @@ def test_update_google_channel_config_rest(request_type): # send a request that will satisfy transcoding request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"].message.meta.fields + else: + message_fields = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["google_channel_config"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["google_channel_config"][field])): + del request_init["google_channel_config"][field][i][subfield] + else: + del request_init["google_channel_config"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -10002,8 +10336,9 @@ def test_update_google_channel_config_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10069,8 +10404,9 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc response_value = Response() response_value.status_code = 200 - pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -10138,7 +10474,6 @@ def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', # send a request that will satisfy transcoding request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10175,8 +10510,9 @@ def test_update_google_channel_config_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index bc58a6c8eab9..e69f52a9bfc4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -3498,8 +3498,9 @@ def test_list_instances_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -3569,8 +3570,9 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -3673,8 +3675,9 @@ def test_list_instances_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -3814,8 +3817,9 @@ def test_get_instance_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -3908,8 +3912,9 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -4012,8 +4017,9 @@ def test_get_instance_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -4072,8 +4078,9 @@ def test_get_instance_auth_string_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -4140,8 +4147,9 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -4244,8 +4252,9 @@ def test_get_instance_auth_string_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = cloud_redis.InstanceAuthString.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value @@ -4293,6 +4302,58 @@ def test_create_instance_rest(request_type): # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(cloud_redis.CreateInstanceRequest.meta.fields["instance"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = cloud_redis.CreateInstanceRequest.meta.fields["instance"].message.meta.fields + else: + message_fields = cloud_redis.CreateInstanceRequest.meta.fields["instance"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["instance"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4451,7 +4512,6 @@ def test_create_instance_rest_bad_request(transport: str = 'rest', request_type= # send a request that will satisfy transcoding request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -4539,6 +4599,58 @@ def test_update_instance_rest(request_type): # send a request that will satisfy transcoding request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + is_message_proto_plus_type = not hasattr(cloud_redis.UpdateInstanceRequest.meta.fields["instance"].message, "DESCRIPTOR") + + if is_message_proto_plus_type: + message_fields = cloud_redis.UpdateInstanceRequest.meta.fields["instance"].message.meta.fields + else: + message_fields = cloud_redis.UpdateInstanceRequest.meta.fields["instance"].message.DESCRIPTOR.fields + + subfields_not_in_runtime = [] + + # Get all subfields for the message + runtime_nested_fields = [ + (field.name, subfield.name) + for field in message_fields + if hasattr(field, "message_type") and field.message_type + for subfield in field.message_type.fields + ] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + for field, value in request_init["instance"].items(): + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + for subfield_to_delete in subfields_not_in_runtime: + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -4681,7 +4793,6 @@ def test_update_instance_rest_bad_request(transport: str = 'rest', request_type= # send a request that will satisfy transcoding request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. From bcf28a5ce70984f80397a1dd25f797b1bbe5789f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 10:42:17 -0400 Subject: [PATCH 1053/1339] chore: [autoapprove] Update `black` and `isort` to latest versions (#1790) Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index a9bdb1b7ac0f..dd98abbdeebe 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 96d593c8c82a..0332d3267e15 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine From 7a6b659c63bc41e8b7ea366f6d91392d4163c60e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 09:56:54 -0700 Subject: [PATCH 1054/1339] chore(main): release 1.11.6 (#1758) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 7abf51515ffe..eb97c40e7afb 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.11.6](https://github.com/googleapis/gapic-generator-python/compare/v1.11.5...v1.11.6) (2023-10-09) + + +### Bug Fixes + +* Change to Set vs FrozenSet and thread the same set through ([#1125](https://github.com/googleapis/gapic-generator-python/issues/1125)) ([723efca](https://github.com/googleapis/gapic-generator-python/commit/723efca3f909527c48e8070eff61511293888626)) +* Resolve unit test failure caused by differences in protobuf runtimes ([#1749](https://github.com/googleapis/gapic-generator-python/issues/1749)) ([812abce](https://github.com/googleapis/gapic-generator-python/commit/812abceeb68d86f3902974aaeef8a58cfd6e7515)) + ## [1.11.5](https://github.com/googleapis/gapic-generator-python/compare/v1.11.4...v1.11.5) (2023-09-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 6b7a248048c2..872fb40451ea 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.5" +version = "1.11.6" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 9510916154572f82b8b8dfe33b621a4b25520358 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 10 Oct 2023 16:39:46 -0400 Subject: [PATCH 1055/1339] fix: add google-cloud-iam to dependencies (#1792) Co-authored-by: omair --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index af25d77b3d55..3159c52a330b 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -10,6 +10,7 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "3.0.0dev"}, ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "3.0.0dev"}, - ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"} + ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"}, + ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"} } %} From e3b75b3bbdd0e274687db96c8fce1da71bd63a9b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 12 Oct 2023 11:58:07 -0400 Subject: [PATCH 1056/1339] fix(revert): partial revert of #1125 (#1799) --- packages/gapic-generator/gapic/schema/wrappers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 3b49db1ff127..7b967ac6aca3 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -713,7 +713,7 @@ def with_context(self, *, references. """ visited_messages = visited_messages or set() - visited_messages.add(self) + visited_messages = visited_messages | {self} return dataclasses.replace( self, fields={ From 66dd9848ab5e50b74756c85812c97f7ec5acbef1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 12 Oct 2023 15:42:21 -0400 Subject: [PATCH 1057/1339] fix: fix regression in REST unit test (#1798) --- .../%name_%version/%sub/test_%service.py.j2 | 30 +-- .../gapic/%name_%version/%sub/test_macros.j2 | 30 +-- .../fragments/test_google_protobuf_type.proto | 11 +- .../unit/gapic/asset_v1/test_asset_service.py | 60 +++--- .../unit/gapic/eventarc_v1/test_eventarc.py | 180 +++++++++++------- .../unit/gapic/redis_v1/test_cloud_redis.py | 60 +++--- 6 files changed, 237 insertions(+), 134 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 1583fb8c7720..ed5bc534cd59 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1019,23 +1019,31 @@ def test_{{ method.name|snake_case }}_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr({{ method.input.ident }}.meta.fields["{{ field.name }}"].message, "DESCRIPTOR") + test_field = {{ method.input.ident }}.meta.fields["{{ field.name }}"] - if is_message_proto_plus_type: - message_fields = {{ method.input.ident }}.meta.fields["{{ field.name }}"].message.meta.fields - else: - message_fields = {{ method.input.ident }}.meta.fields["{{ field.name }}"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["{{ field.name }}"].items(): result = None diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 102da8dec11b..a996b757b92d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -892,23 +892,31 @@ def test_{{ method_name }}_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr({{ method.input.ident }}.meta.fields["{{ field.name }}"].message, "DESCRIPTOR") + test_field = {{ method.input.ident }}.meta.fields["{{ field.name }}"] - if is_message_proto_plus_type: - message_fields = {{ method.input.ident }}.meta.fields["{{ field.name }}"].message.meta.fields - else: - message_fields = {{ method.input.ident }}.meta.fields["{{ field.name }}"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["{{ field.name }}"].items(): result = None diff --git a/packages/gapic-generator/tests/fragments/test_google_protobuf_type.proto b/packages/gapic-generator/tests/fragments/test_google_protobuf_type.proto index 7e8697f95701..e9a47a12932f 100644 --- a/packages/gapic-generator/tests/fragments/test_google_protobuf_type.proto +++ b/packages/gapic-generator/tests/fragments/test_google_protobuf_type.proto @@ -34,20 +34,27 @@ service MyServiceWithProtobufType { rpc MyMethod(MethodRequestWithProtobufType) returns (MethodResponseWithProtobufType) { option (google.api.http) = { - post: "/v1/services/{service_name}/configs" + post: "/v1/services/{service_name}/configs/{test_message.another_message.another_field}" body: "test_message" }; - option (google.api.method_signature) = "service_name,test_message"; + option (google.api.method_signature) = "service_name,test_message,another_string"; } } message MethodRequestWithProtobufType { string service_name = 1 [(google.api.field_behavior) = REQUIRED]; TestMessage test_message = 2 [(google.api.field_behavior) = REQUIRED]; + string another_string = 3 [(google.api.field_behavior) = REQUIRED]; } message TestMessage { + string name = 1 [(google.api.field_behavior) = REQUIRED]; repeated google.protobuf.Type types = 2 [(google.api.field_behavior) = REQUIRED]; + AnotherTestMessage another_message = 3 [(google.api.field_behavior) = REQUIRED]; +} + +message AnotherTestMessage { + string another_field = 1; } message MethodResponseWithProtobufType { diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 14df45cff61d..a38f051dadc1 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -9998,23 +9998,31 @@ def test_create_saved_query_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(asset_service.CreateSavedQueryRequest.meta.fields["saved_query"].message, "DESCRIPTOR") + test_field = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"] - if is_message_proto_plus_type: - message_fields = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"].message.meta.fields - else: - message_fields = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["saved_query"].items(): result = None @@ -10843,23 +10851,31 @@ def test_update_saved_query_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"].message, "DESCRIPTOR") + test_field = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"] - if is_message_proto_plus_type: - message_fields = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"].message.meta.fields - else: - message_fields = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["saved_query"].items(): result = None diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 656b4e83fb13..cb5be11f6b93 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -6230,23 +6230,31 @@ def test_create_trigger_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(eventarc.CreateTriggerRequest.meta.fields["trigger"].message, "DESCRIPTOR") + test_field = eventarc.CreateTriggerRequest.meta.fields["trigger"] - if is_message_proto_plus_type: - message_fields = eventarc.CreateTriggerRequest.meta.fields["trigger"].message.meta.fields - else: - message_fields = eventarc.CreateTriggerRequest.meta.fields["trigger"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["trigger"].items(): result = None @@ -6538,23 +6546,31 @@ def test_update_trigger_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(eventarc.UpdateTriggerRequest.meta.fields["trigger"].message, "DESCRIPTOR") + test_field = eventarc.UpdateTriggerRequest.meta.fields["trigger"] - if is_message_proto_plus_type: - message_fields = eventarc.UpdateTriggerRequest.meta.fields["trigger"].message.meta.fields - else: - message_fields = eventarc.UpdateTriggerRequest.meta.fields["trigger"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["trigger"].items(): result = None @@ -7612,23 +7628,31 @@ def test_create_channel_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(eventarc.CreateChannelRequest.meta.fields["channel"].message, "DESCRIPTOR") + test_field = eventarc.CreateChannelRequest.meta.fields["channel"] - if is_message_proto_plus_type: - message_fields = eventarc.CreateChannelRequest.meta.fields["channel"].message.meta.fields - else: - message_fields = eventarc.CreateChannelRequest.meta.fields["channel"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["channel"].items(): result = None @@ -7920,23 +7944,31 @@ def test_update_channel_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(eventarc.UpdateChannelRequest.meta.fields["channel"].message, "DESCRIPTOR") + test_field = eventarc.UpdateChannelRequest.meta.fields["channel"] - if is_message_proto_plus_type: - message_fields = eventarc.UpdateChannelRequest.meta.fields["channel"].message.meta.fields - else: - message_fields = eventarc.UpdateChannelRequest.meta.fields["channel"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["channel"].items(): result = None @@ -9516,23 +9548,31 @@ def test_create_channel_connection_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"].message, "DESCRIPTOR") + test_field = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"] - if is_message_proto_plus_type: - message_fields = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"].message.meta.fields - else: - message_fields = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["channel_connection"].items(): result = None @@ -10276,23 +10316,31 @@ def test_update_google_channel_config_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"].message, "DESCRIPTOR") + test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] - if is_message_proto_plus_type: - message_fields = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"].message.meta.fields - else: - message_fields = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["google_channel_config"].items(): result = None diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index e69f52a9bfc4..918dcf3ffa40 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -4307,23 +4307,31 @@ def test_create_instance_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(cloud_redis.CreateInstanceRequest.meta.fields["instance"].message, "DESCRIPTOR") + test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] - if is_message_proto_plus_type: - message_fields = cloud_redis.CreateInstanceRequest.meta.fields["instance"].message.meta.fields - else: - message_fields = cloud_redis.CreateInstanceRequest.meta.fields["instance"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["instance"].items(): result = None @@ -4604,23 +4612,31 @@ def test_update_instance_rest(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - is_message_proto_plus_type = not hasattr(cloud_redis.UpdateInstanceRequest.meta.fields["instance"].message, "DESCRIPTOR") + test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] - if is_message_proto_plus_type: - message_fields = cloud_redis.UpdateInstanceRequest.meta.fields["instance"].message.meta.fields - else: - message_fields = cloud_redis.UpdateInstanceRequest.meta.fields["instance"].message.DESCRIPTOR.fields + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - subfields_not_in_runtime = [] + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + else: + message_fields = field.message.DESCRIPTOR.fields + return message_fields - # Get all subfields for the message runtime_nested_fields = [ - (field.name, subfield.name) - for field in message_fields - if hasattr(field, "message_type") and field.message_type - for subfield in field.message_type.fields + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) ] + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime for field, value in request_init["instance"].items(): result = None From 409087ba7c6609db1679a3092474dd8016ea70a5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 12 Oct 2023 19:48:00 +0000 Subject: [PATCH 1058/1339] chore(main): release 1.11.7 (#1793) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index eb97c40e7afb..70ab8148fd7c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [1.11.7](https://github.com/googleapis/gapic-generator-python/compare/v1.11.6...v1.11.7) (2023-10-12) + + +### Bug Fixes + +* Add google-cloud-iam to dependencies ([#1792](https://github.com/googleapis/gapic-generator-python/issues/1792)) ([c5ed152](https://github.com/googleapis/gapic-generator-python/commit/c5ed1529f1d61ea3d0990b662a3d979d05192ab5)) +* Fix regression in REST unit test ([#1798](https://github.com/googleapis/gapic-generator-python/issues/1798)) ([0cee3c2](https://github.com/googleapis/gapic-generator-python/commit/0cee3c26bcd428ce6eec57c3616423905ec21781)) +* **revert:** Partial revert of [#1125](https://github.com/googleapis/gapic-generator-python/issues/1125) ([#1799](https://github.com/googleapis/gapic-generator-python/issues/1799)) ([14eec93](https://github.com/googleapis/gapic-generator-python/commit/14eec937fc4eddf3d6bd32f7ae35969bdc6f8bb5)) + ## [1.11.6](https://github.com/googleapis/gapic-generator-python/compare/v1.11.5...v1.11.6) (2023-10-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 872fb40451ea..fe8aad092517 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.6" +version = "1.11.7" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 3cb96a3735fbe6bfd52ae90d84dee2b619fa718e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 16 Oct 2023 11:34:57 -0400 Subject: [PATCH 1059/1339] fix: Fix coverage gap in tests (#1802) --- .../%name_%version/%sub/test_%service.py.j2 | 10 ++-- .../gapic/%name_%version/%sub/test_macros.j2 | 9 ++-- .../unit/gapic/asset_v1/test_asset_service.py | 18 ++++--- .../unit/gapic/eventarc_v1/test_eventarc.py | 54 ++++++++++++------- .../unit/gapic/redis_v1/test_cloud_redis.py | 18 ++++--- 5 files changed, 72 insertions(+), 37 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ed5bc534cd59..fdcf2b3351a8 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1032,7 +1032,8 @@ def test_{{ method.name|snake_case }}_rest(request_type): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -1044,8 +1045,8 @@ def test_{{ method.name|snake_case }}_rest(request_type): subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["{{ field.name }}"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -1064,7 +1065,8 @@ def test_{{ method.name|snake_case }}_rest(request_type): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index a996b757b92d..6c00659c5a98 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -905,7 +905,8 @@ def test_{{ method_name }}_rest(request_type): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -918,7 +919,8 @@ def test_{{ method_name }}_rest(request_type): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["{{ field.name }}"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -937,7 +939,8 @@ def test_{{ method_name }}_rest(request_type): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index a38f051dadc1..da7e9293edf9 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -10011,7 +10011,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -10024,7 +10025,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["saved_query"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["saved_query"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -10043,7 +10045,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -10864,7 +10867,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -10877,7 +10881,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["saved_query"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["saved_query"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -10896,7 +10901,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index cb5be11f6b93..da0e006f4f57 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -6243,7 +6243,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -6256,7 +6257,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["trigger"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["trigger"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -6275,7 +6277,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -6559,7 +6562,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -6572,7 +6576,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["trigger"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["trigger"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -6591,7 +6596,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -7641,7 +7647,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -7654,7 +7661,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["channel"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -7673,7 +7681,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -7957,7 +7966,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -7970,7 +7980,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["channel"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -7989,7 +8000,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -9561,7 +9573,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -9574,7 +9587,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["channel_connection"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel_connection"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -9593,7 +9607,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -10329,7 +10344,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -10342,7 +10358,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["google_channel_config"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -10361,7 +10378,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 918dcf3ffa40..795084128731 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -4320,7 +4320,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -4333,7 +4334,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["instance"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -4352,7 +4354,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -4625,7 +4628,8 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() - else: + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -4638,7 +4642,8 @@ def get_message_fields(field): subfields_not_in_runtime = [] # For each item in the sample request, create a list of sub fields which are not present at runtime - for field, value in request_init["instance"].items(): + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -4657,7 +4662,8 @@ def get_message_fields(field): ) # Remove fields from the sample request which are not present in the runtime version of the dependency - for subfield_to_delete in subfields_not_in_runtime: + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") From 29306e723db005b97b79bbb13dbb7817961f99cf Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 16 Oct 2023 18:27:47 +0200 Subject: [PATCH 1060/1339] chore(deps): update dependency urllib3 to v2.0.6 [security] (#1783) --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 7baff2d9f64b..f3f50c2ac63f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -419,9 +419,9 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==2.0.5 \ - --hash=sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594 \ - --hash=sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e +urllib3==2.0.6 \ + --hash=sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2 \ + --hash=sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564 # via # google-auth # requests From e3fb617fd0c94350f86c19bba8ea21734ec25dfd Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 16 Oct 2023 16:19:48 -0400 Subject: [PATCH 1061/1339] fix: add missing dependencies (#1804) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 2 ++ .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 ++ .../integration/goldens/asset/testing/constraints-3.10.txt | 2 ++ .../integration/goldens/asset/testing/constraints-3.11.txt | 2 ++ .../integration/goldens/asset/testing/constraints-3.12.txt | 2 ++ .../tests/integration/goldens/asset/testing/constraints-3.7.txt | 2 ++ .../tests/integration/goldens/asset/testing/constraints-3.8.txt | 2 ++ .../tests/integration/goldens/asset/testing/constraints-3.9.txt | 2 ++ 8 files changed, 16 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index 3159c52a330b..487f1b478b77 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -8,8 +8,10 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup {% set pypi_packages = { ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0dev"}, ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, + ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "3.0.0dev"}, ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "3.0.0dev"}, + ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0dev"}, ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"}, ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"} } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 166120b041d5..4353b691d2c3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -40,6 +40,8 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", + "google-cloud-os-config >= 1.0.0, <2.0.0dev", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/python-asset" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt index ad3f0fa58e2d..70744e58974a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt @@ -4,4 +4,6 @@ google-api-core proto-plus protobuf +google-cloud-access-context-manager +google-cloud-os-config grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt index ad3f0fa58e2d..70744e58974a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt @@ -4,4 +4,6 @@ google-api-core proto-plus protobuf +google-cloud-access-context-manager +google-cloud-os-config grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt index ad3f0fa58e2d..70744e58974a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt @@ -4,4 +4,6 @@ google-api-core proto-plus protobuf +google-cloud-access-context-manager +google-cloud-os-config grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index 2beecf99e0be..a77838fd956a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -7,4 +7,6 @@ google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 +google-cloud-access-context-manager==0.1.2 +google-cloud-os-config==1.0.0 grpc-google-iam-v1==0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt index ad3f0fa58e2d..70744e58974a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt @@ -4,4 +4,6 @@ google-api-core proto-plus protobuf +google-cloud-access-context-manager +google-cloud-os-config grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt index ad3f0fa58e2d..70744e58974a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt @@ -4,4 +4,6 @@ google-api-core proto-plus protobuf +google-cloud-access-context-manager +google-cloud-os-config grpc-google-iam-v1 From e06e5cf46d3f8226d8a286302a54cae0bb308b35 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 17:14:16 -0400 Subject: [PATCH 1062/1339] chore(main): release 1.11.8 (#1803) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 70ab8148fd7c..c26b40a5abf4 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.11.8](https://github.com/googleapis/gapic-generator-python/compare/v1.11.7...v1.11.8) (2023-10-16) + + +### Bug Fixes + +* Add missing dependencies ([#1804](https://github.com/googleapis/gapic-generator-python/issues/1804)) ([3e020cd](https://github.com/googleapis/gapic-generator-python/commit/3e020cd338339e311f825c7ea77dd473b660bcc5)) +* Fix coverage gap in tests ([#1802](https://github.com/googleapis/gapic-generator-python/issues/1802)) ([2fe0df1](https://github.com/googleapis/gapic-generator-python/commit/2fe0df19261bf5e6e7589a7cd77de03dd1c5c5fc)) + ## [1.11.7](https://github.com/googleapis/gapic-generator-python/compare/v1.11.6...v1.11.7) (2023-10-12) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index fe8aad092517..2369464d3081 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.7" +version = "1.11.8" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 1b2f7cac620f6c8019f3b0b9b7b0437cd5966230 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 18 Oct 2023 11:31:43 -0400 Subject: [PATCH 1063/1339] fix: rename rst files to avoid conflict with service names (#1706) Co-authored-by: Yu-Han Liu --- .../docs/%name_%version/{services.rst.j2 => services_.rst.j2} | 0 .../templates/docs/%name_%version/{types.rst.j2 => types_.rst.j2} | 0 .../goldens/asset/docs/asset_v1/{services.rst => services_.rst} | 0 .../goldens/asset/docs/asset_v1/{types.rst => types_.rst} | 0 .../docs/credentials_v1/{services.rst => services_.rst} | 0 .../credentials/docs/credentials_v1/{types.rst => types_.rst} | 0 .../eventarc/docs/eventarc_v1/{services.rst => services_.rst} | 0 .../goldens/eventarc/docs/eventarc_v1/{types.rst => types_.rst} | 0 .../logging/docs/logging_v2/{services.rst => services_.rst} | 0 .../goldens/logging/docs/logging_v2/{types.rst => types_.rst} | 0 .../goldens/redis/docs/redis_v1/{services.rst => services_.rst} | 0 .../goldens/redis/docs/redis_v1/{types.rst => types_.rst} | 0 12 files changed, 0 insertions(+), 0 deletions(-) rename packages/gapic-generator/gapic/templates/docs/%name_%version/{services.rst.j2 => services_.rst.j2} (100%) rename packages/gapic-generator/gapic/templates/docs/%name_%version/{types.rst.j2 => types_.rst.j2} (100%) rename packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/{services.rst => services_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/{types.rst => types_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/{services.rst => services_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/{types.rst => types_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/{services.rst => services_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/{types.rst => types_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/{services.rst => services_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/{types.rst => types_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/{services.rst => services_.rst} (100%) rename packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/{types.rst => types_.rst} (100%) diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/services_.rst.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/docs/%name_%version/services.rst.j2 rename to packages/gapic-generator/gapic/templates/docs/%name_%version/services_.rst.j2 diff --git a/packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 b/packages/gapic-generator/gapic/templates/docs/%name_%version/types_.rst.j2 similarity index 100% rename from packages/gapic-generator/gapic/templates/docs/%name_%version/types.rst.j2 rename to packages/gapic-generator/gapic/templates/docs/%name_%version/types_.rst.j2 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services.rst rename to packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/services_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types.rst rename to packages/gapic-generator/tests/integration/goldens/asset/docs/asset_v1/types_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services.rst rename to packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/services_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types.rst rename to packages/gapic-generator/tests/integration/goldens/credentials/docs/credentials_v1/types_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services.rst rename to packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/services_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types.rst rename to packages/gapic-generator/tests/integration/goldens/eventarc/docs/eventarc_v1/types_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services.rst rename to packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/services_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types.rst rename to packages/gapic-generator/tests/integration/goldens/logging/docs/logging_v2/types_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services.rst rename to packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/services_.rst diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types_.rst similarity index 100% rename from packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types.rst rename to packages/gapic-generator/tests/integration/goldens/redis/docs/redis_v1/types_.rst From 4227ff56b2fdf10579bf27d1e9eedbe2277a165c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 15:54:17 +0000 Subject: [PATCH 1064/1339] chore(main): release 1.11.9 (#1811) :robot: I have created a release *beep* *boop* --- ## [1.11.9](https://togithub.com/googleapis/gapic-generator-python/compare/v1.11.8...v1.11.9) (2023-10-18) ### Bug Fixes * Rename rst files to avoid conflict with service names ([#1706](https://togithub.com/googleapis/gapic-generator-python/issues/1706)) ([70c3db5](https://togithub.com/googleapis/gapic-generator-python/commit/70c3db5ae1bac69e8ade13e4608af60c824e7870)) --- This PR was generated with [Release Please](https://togithub.com/googleapis/release-please). See [documentation](https://togithub.com/googleapis/release-please#release-please). --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c26b40a5abf4..f8e62caf38c2 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.11.9](https://github.com/googleapis/gapic-generator-python/compare/v1.11.8...v1.11.9) (2023-10-18) + + +### Bug Fixes + +* Rename rst files to avoid conflict with service names ([#1706](https://github.com/googleapis/gapic-generator-python/issues/1706)) ([70c3db5](https://github.com/googleapis/gapic-generator-python/commit/70c3db5ae1bac69e8ade13e4608af60c824e7870)) + ## [1.11.8](https://github.com/googleapis/gapic-generator-python/compare/v1.11.7...v1.11.8) (2023-10-16) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2369464d3081..afd01d365ee2 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.8" +version = "1.11.9" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 1d7c77f236806c90abfbfd5a33f0fef17f9c2d97 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 24 Oct 2023 15:55:59 -0400 Subject: [PATCH 1065/1339] test: Enable service yaml in showcase (#1819) Co-authored-by: omair --- packages/gapic-generator/noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 152f9f68f7a1..d4aeef1b083b 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -177,7 +177,7 @@ def fragment_alternative_templates(session): @contextmanager def showcase_library( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), - include_service_yaml=False, + include_service_yaml=True, ): """Install the generated library into the session for showcase tests.""" From c99ed8b4f0e1d2d519b996ae1eda44f9085912f4 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 24 Oct 2023 15:59:29 -0400 Subject: [PATCH 1066/1339] build: Update WORKSPACE to align with googleapis/googleapis (#1818) Co-authored-by: omair Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/WORKSPACE | 42 +++++++++++++++++++++--------- 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 6820894328c0..d3ea4ba87ed1 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -2,14 +2,14 @@ workspace(name = "gapic_generator_python") load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") -_bazel_skylib_version = "0.9.0" +_bazel_skylib_version = "1.4.0" -_bazel_skylib_sha256 = "1dde365491125a3db70731e25658dfdd3bc5dbdfd11b840b3e987ecf043c7ca0" +_bazel_skylib_sha256 = "f24ab666394232f834f74d19e2ff142b0af17466ea0c69a3f4c276ee75f6efce" http_archive( name = "bazel_skylib", sha256 = _bazel_skylib_sha256, - url = "https://github.com/bazelbuild/bazel-skylib/releases/download/{0}/bazel_skylib-{0}.tar.gz".format(_bazel_skylib_version), + url = "https://github.com/bazelbuild/bazel-skylib/releases/download/{0}/bazel-skylib-{0}.tar.gz".format(_bazel_skylib_version), ) _io_bazel_rules_go_version = "0.33.0" @@ -59,10 +59,6 @@ gapic_generator_python() gapic_generator_register_toolchains() -load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps") - -protobuf_deps() - # TODO(https://github.com/googleapis/gapic-generator-python/issues/1781): # Remove this import once gRPC depends on a newer version. # @@ -81,16 +77,36 @@ http_archive( ], ) -# -# Import grpc as a native bazel dependency. This avoids duplication and also -# speeds up loading phase a lot (otherwise python_rules will be building grpcio -# from sources in a single-core speed, which takes around 5 minutes on a regular -# workstation) -# +_grpc_version = "1.55.1" + +_grpc_sha256 = "17c0685da231917a7b3be2671a7b13b550a85fdda5e475313264c5f51c4da3f8" + +http_archive( + name = "com_github_grpc_grpc", + sha256 = _grpc_sha256, + strip_prefix = "grpc-%s" % _grpc_version, + urls = ["https://github.com/grpc/grpc/archive/v%s.zip" % _grpc_version], +) +# instantiated in grpc_deps(). +http_archive( + name = "com_google_protobuf", + sha256 = "0b0395d34e000f1229679e10d984ed7913078f3dd7f26cf0476467f5e65716f4", + strip_prefix = "protobuf-23.2", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v23.2.tar.gz"], +) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") grpc_deps() +load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps", "PROTOBUF_MAVEN_ARTIFACTS") +# This is actually already done within grpc_deps but calling this for Bazel convention. +protobuf_deps() + +# gRPC enforces a specific version of Go toolchain which conflicts with our build. +# All the relevant parts of grpc_extra_deps() are imported in this WORKSPACE file +# explicitly, that is why we do not call grpc_extra_deps() here and call +# apple_rules_dependencies and apple_support_dependencies macros explicitly. + load("@build_bazel_rules_apple//apple:repositories.bzl", "apple_rules_dependencies") apple_rules_dependencies() From f9ccd8672fa2b18f7a70ba9f08d054220d8f5c14 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 24 Oct 2023 16:39:22 -0400 Subject: [PATCH 1067/1339] test: Add service config to showcase (#1820) Co-authored-by: omair --- packages/gapic-generator/noxfile.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index d4aeef1b083b..8836ec09d12b 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -178,6 +178,7 @@ def fragment_alternative_templates(session): def showcase_library( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), include_service_yaml=True, + retry_config=True, ): """Install the generated library into the session for showcase tests.""" @@ -218,11 +219,23 @@ def showcase_library( external=True, silent=True, ) + if retry_config: + session.run( + "curl", + "https://github.com/googleapis/gapic-showcase/releases/" + f"download/v{showcase_version}/" + f"showcase_grpc_service_config.json", + "-L", + "--output", + path.join(tmp_dir, "showcase_grpc_service_config.json"), + external=True, + silent=True, + ) # Write out a client library for Showcase. template_opt = f"python-gapic-templates={templates}" opts = "--python_gapic_opt=" - if include_service_yaml: - opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest", f"service-yaml={tmp_dir}/showcase_v1beta1.yaml")) + if include_service_yaml and retry_config: + opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest", f"service-yaml={tmp_dir}/showcase_v1beta1.yaml", f"retry-config={tmp_dir}/showcase_grpc_service_config.json")) else: opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest",)) cmd_tup = ( From 9ccec7c7ab4551964201e7185fe9dfa192a9066d Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 26 Oct 2023 11:04:41 -0400 Subject: [PATCH 1068/1339] fix: upgrade rules_python to 0.26.0 (#1825) --- packages/gapic-generator/WORKSPACE | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index d3ea4ba87ed1..27bb62c49009 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -22,9 +22,9 @@ http_archive( ], ) -_rules_python_version = "0.24.0" +_rules_python_version = "0.26.0" -_rules_python_sha256 = "0a8003b044294d7840ac7d9d73eef05d6ceb682d7516781a4ec62eeb34702578" +_rules_python_sha256 = "9d04041ac92a0985e344235f5d946f71ac543f1b1565f2cdbc9a2aaee8adf55b" http_archive( name = "rules_python", @@ -35,9 +35,10 @@ http_archive( load("@rules_python//python:repositories.bzl", "py_repositories") +py_repositories() + load("@rules_python//python:pip.bzl", "pip_parse") -py_repositories() pip_parse( name = "gapic_generator_python_pip_deps", From d0ea096aad7b49ffb1f26878c61caa6411e4fbaa Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 26 Oct 2023 12:11:29 -0400 Subject: [PATCH 1069/1339] chore(main): release 1.11.10 (#1826) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f8e62caf38c2..8494143ecd35 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.11.10](https://github.com/googleapis/gapic-generator-python/compare/v1.11.9...v1.11.10) (2023-10-26) + + +### Bug Fixes + +* Upgrade rules_python to 0.26.0 ([#1825](https://github.com/googleapis/gapic-generator-python/issues/1825)) ([5d66387](https://github.com/googleapis/gapic-generator-python/commit/5d66387cd42a38d54dde5e8f0153446fee194b27)) + ## [1.11.9](https://github.com/googleapis/gapic-generator-python/compare/v1.11.8...v1.11.9) (2023-10-18) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index afd01d365ee2..61e66bae684c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.9" +version = "1.11.10" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 60869d368d9639c596be2c281c311cdb56e32c1d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 26 Oct 2023 21:10:17 +0000 Subject: [PATCH 1070/1339] chore: rename rst files to avoid conflict with service names (#1812) Source-Link: https://togithub.com/googleapis/synthtool/commit/d52e638b37b091054c869bfa6f5a9fedaba9e0dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index dd98abbdeebe..7f291dbd5f9b 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 -# created: 2023-10-09T14:06:13.397766266Z + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 +# created: 2023-10-18T20:26:37.410353675Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 0332d3267e15..16170d0ca7b8 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine From c83d67acd99c0ba33b60fb5031f10a7db35091aa Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 2 Nov 2023 17:11:56 -0400 Subject: [PATCH 1071/1339] fix: add missing dependency google-shopping-type (#1842) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index 487f1b478b77..3e8345d1fd8e 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -13,6 +13,7 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "3.0.0dev"}, ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0dev"}, ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"}, - ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"} + ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"}, + ("google", "shopping", "type"): {"package_name": "google-shopping-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"} } %} From 331a608932698aca25dcc17b73f13c6e8262a2bb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 2 Nov 2023 18:59:28 -0400 Subject: [PATCH 1072/1339] chore(main): release 1.11.11 (#1843) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8494143ecd35..017f28a0505b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.11.11](https://github.com/googleapis/gapic-generator-python/compare/v1.11.10...v1.11.11) (2023-11-02) + + +### Bug Fixes + +* Add missing dependency google-shopping-type ([#1842](https://github.com/googleapis/gapic-generator-python/issues/1842)) ([b1eabd7](https://github.com/googleapis/gapic-generator-python/commit/b1eabd7f439d032edd3fb6d0b65415a2c6b737ab)) + ## [1.11.10](https://github.com/googleapis/gapic-generator-python/compare/v1.11.9...v1.11.10) (2023-10-26) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 61e66bae684c..43376f420af1 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.10" +version = "1.11.11" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 19f323ef9f9f00dcbcb5efb7658643496dece343 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Fri, 3 Nov 2023 12:57:51 -0400 Subject: [PATCH 1073/1339] Fix: Implement Async Client to use Async Retry to work as expected (#1823) Co-authored-by: omair --- .../services/%service/_async_mixins.py.j2 | 20 ++--- .../%sub/services/%service/async_client.py.j2 | 16 ++-- .../services/asset_service/async_client.py | 68 +++++++------- .../services/iam_credentials/async_client.py | 22 ++--- .../services/eventarc/async_client.py | 60 ++++++------- .../config_service_v2/async_client.py | 90 +++++++++---------- .../logging_service_v2/async_client.py | 36 ++++---- .../metrics_service_v2/async_client.py | 30 +++---- .../services/cloud_redis/async_client.py | 40 ++++----- .../tests/system/test_retry.py | 6 +- 10 files changed, 194 insertions(+), 194 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 9fc240a34c29..71f1c1aeb4e0 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -17,7 +17,7 @@ request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -71,7 +71,7 @@ request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -129,7 +129,7 @@ request (:class:`~.operations_pb2.DeleteOperationRequest`): The request object. Request message for `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -181,7 +181,7 @@ request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -235,7 +235,7 @@ request (:class:`~.operations_pb2.WaitOperationRequest`): The request object. Request message for `WaitOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -295,7 +295,7 @@ request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -415,7 +415,7 @@ request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -536,7 +536,7 @@ request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -594,7 +594,7 @@ request (:class:`~.location_pb2.GetLocationRequest`): The request object. Request message for `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -647,7 +647,7 @@ request (:class:`~.location_pb2.ListLocationsRequest`): The request object. Request message for `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 5abd583cacac..c7cfd1ff8429 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -16,14 +16,14 @@ from {{package_path}} import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} @@ -241,7 +241,7 @@ class {{ service.async_client_name }}: The request object AsyncIterator.{{ " " }} {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} {% endif %} - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -313,7 +313,7 @@ class {{ service.async_client_name }}: rpc = gapic_v1.method_async.wrap_method( self._client._transport.{{ method.transport_safe_name|snake_case }}, {% if method.retry %} - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} @@ -403,7 +403,7 @@ class {{ service.async_client_name }}: request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -518,7 +518,7 @@ class {{ service.async_client_name }}: request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -635,7 +635,7 @@ class {{ service.async_client_name }}: request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 9a4309d1bb6a..d3daf1aaab71 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -23,14 +23,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -262,7 +262,7 @@ async def sample_export_assets(): Args: request (Optional[Union[google.cloud.asset_v1.types.ExportAssetsRequest, dict]]): The request object. Export asset request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -370,7 +370,7 @@ async def sample_list_assets(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -480,7 +480,7 @@ async def sample_batch_get_assets_history(): Args: request (Optional[Union[google.cloud.asset_v1.types.BatchGetAssetsHistoryRequest, dict]]): The request object. Batch get assets history request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -497,7 +497,7 @@ async def sample_batch_get_assets_history(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.batch_get_assets_history, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -587,7 +587,7 @@ async def sample_create_feed(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -695,7 +695,7 @@ async def sample_get_feed(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -732,7 +732,7 @@ async def sample_get_feed(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_feed, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -813,7 +813,7 @@ async def sample_list_feeds(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -842,7 +842,7 @@ async def sample_list_feeds(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_feeds, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -924,7 +924,7 @@ async def sample_update_feed(): This corresponds to the ``feed`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1029,7 +1029,7 @@ async def sample_delete_feed(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1054,7 +1054,7 @@ async def sample_delete_feed(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_feed, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -1237,7 +1237,7 @@ async def sample_search_all_resources(): This corresponds to the ``asset_types`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1275,7 +1275,7 @@ async def sample_search_all_resources(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.search_all_resources, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -1433,7 +1433,7 @@ async def sample_search_all_iam_policies(): This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1469,7 +1469,7 @@ async def sample_search_all_iam_policies(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.search_all_iam_policies, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -1551,7 +1551,7 @@ async def sample_analyze_iam_policy(): request (Optional[Union[google.cloud.asset_v1.types.AnalyzeIamPolicyRequest, dict]]): The request object. A request message for [AssetService.AnalyzeIamPolicy][google.cloud.asset.v1.AssetService.AnalyzeIamPolicy]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1570,7 +1570,7 @@ async def sample_analyze_iam_policy(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.analyze_iam_policy, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), @@ -1660,7 +1660,7 @@ async def sample_analyze_iam_policy_longrunning(): request (Optional[Union[google.cloud.asset_v1.types.AnalyzeIamPolicyLongrunningRequest, dict]]): The request object. A request message for [AssetService.AnalyzeIamPolicyLongrunning][google.cloud.asset.v1.AssetService.AnalyzeIamPolicyLongrunning]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1758,7 +1758,7 @@ async def sample_analyze_move(): request (Optional[Union[google.cloud.asset_v1.types.AnalyzeMoveRequest, dict]]): The request object. The request message for performing resource move analysis. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1854,7 +1854,7 @@ async def sample_query_assets(): Args: request (Optional[Union[google.cloud.asset_v1.types.QueryAssetsRequest, dict]]): The request object. QueryAssets request. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1971,7 +1971,7 @@ async def sample_create_saved_query(): This corresponds to the ``saved_query_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2079,7 +2079,7 @@ async def sample_get_saved_query(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2186,7 +2186,7 @@ async def sample_list_saved_queries(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2311,7 +2311,7 @@ async def sample_update_saved_query(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2414,7 +2414,7 @@ async def sample_delete_saved_query(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2499,7 +2499,7 @@ async def sample_batch_get_effective_iam_policies(): request (Optional[Union[google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesRequest, dict]]): The request object. A request message for [AssetService.BatchGetEffectiveIamPolicies][google.cloud.asset.v1.AssetService.BatchGetEffectiveIamPolicies]. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2619,7 +2619,7 @@ async def sample_analyze_org_policies(): This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2767,7 +2767,7 @@ async def sample_analyze_org_policy_governed_containers(): This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2944,7 +2944,7 @@ async def sample_analyze_org_policy_governed_assets(): This corresponds to the ``filter`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3028,7 +3028,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 54189ad74743..4ae76357b909 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -23,14 +23,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore @@ -290,7 +290,7 @@ async def sample_generate_access_token(): This corresponds to the ``lifetime`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -325,7 +325,7 @@ async def sample_generate_access_token(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.generate_access_token, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -444,7 +444,7 @@ async def sample_generate_id_token(): This corresponds to the ``include_email`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -479,7 +479,7 @@ async def sample_generate_id_token(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.generate_id_token, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -586,7 +586,7 @@ async def sample_sign_blob(): This corresponds to the ``payload`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -619,7 +619,7 @@ async def sample_sign_blob(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.sign_blob, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, @@ -729,7 +729,7 @@ async def sample_sign_jwt(): This corresponds to the ``payload`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -762,7 +762,7 @@ async def sample_sign_jwt(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.sign_jwt, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index e247bcbc109c..54a22eae6f77 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -23,14 +23,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -269,7 +269,7 @@ async def sample_get_trigger(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -371,7 +371,7 @@ async def sample_list_triggers(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -512,7 +512,7 @@ async def sample_create_trigger(): This corresponds to the ``trigger_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -649,7 +649,7 @@ async def sample_update_trigger(): This corresponds to the ``allow_missing`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -779,7 +779,7 @@ async def sample_delete_trigger(): This corresponds to the ``allow_missing`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -893,7 +893,7 @@ async def sample_get_channel(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1001,7 +1001,7 @@ async def sample_list_channels(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1139,7 +1139,7 @@ async def sample_create_channel(): This corresponds to the ``channel_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1270,7 +1270,7 @@ async def sample_update_channel(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1392,7 +1392,7 @@ async def sample_delete_channel(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1507,7 +1507,7 @@ async def sample_get_provider(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1609,7 +1609,7 @@ async def sample_list_providers(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1721,7 +1721,7 @@ async def sample_get_channel_connection(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1828,7 +1828,7 @@ async def sample_list_channel_connections(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1968,7 +1968,7 @@ async def sample_create_channel_connection(): This corresponds to the ``channel_connection_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2090,7 +2090,7 @@ async def sample_delete_channel_connection(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2204,7 +2204,7 @@ async def sample_get_google_channel_config(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2322,7 +2322,7 @@ async def sample_update_google_channel_config(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2398,7 +2398,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2449,7 +2449,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2505,7 +2505,7 @@ async def delete_operation( request (:class:`~.operations_pb2.DeleteOperationRequest`): The request object. Request message for `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2555,7 +2555,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2603,7 +2603,7 @@ async def set_iam_policy( request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2721,7 +2721,7 @@ async def get_iam_policy( request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2840,7 +2840,7 @@ async def test_iam_permissions( request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2892,7 +2892,7 @@ async def get_location( request (:class:`~.location_pb2.GetLocationRequest`): The request object. Request message for `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2943,7 +2943,7 @@ async def list_locations( request (:class:`~.location_pb2.ListLocationsRequest`): The request object. Request message for `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 1c9d95910463..156d523f66dc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -23,14 +23,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -260,7 +260,7 @@ async def sample_list_buckets(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -364,7 +364,7 @@ async def sample_get_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): The request object. The parameters to ``GetBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -452,7 +452,7 @@ async def sample_create_bucket_async(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -553,7 +553,7 @@ async def sample_update_bucket_async(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -647,7 +647,7 @@ async def sample_create_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): The request object. The parameters to ``CreateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -733,7 +733,7 @@ async def sample_update_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): The request object. The parameters to ``UpdateBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -815,7 +815,7 @@ async def sample_delete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): The request object. The parameters to ``DeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -885,7 +885,7 @@ async def sample_undelete_bucket(): Args: request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): The request object. The parameters to ``UndeleteBucket``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -968,7 +968,7 @@ async def sample_list_views(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1072,7 +1072,7 @@ async def sample_get_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): The request object. The parameters to ``GetView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1154,7 +1154,7 @@ async def sample_create_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): The request object. The parameters to ``CreateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1238,7 +1238,7 @@ async def sample_update_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): The request object. The parameters to ``UpdateView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1318,7 +1318,7 @@ async def sample_delete_view(): Args: request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): The request object. The parameters to ``DeleteView``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1405,7 +1405,7 @@ async def sample_list_sinks(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1438,7 +1438,7 @@ async def sample_list_sinks(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_sinks, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -1534,7 +1534,7 @@ async def sample_get_sink(): This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1572,7 +1572,7 @@ async def sample_get_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -1676,7 +1676,7 @@ async def sample_create_sink(): This corresponds to the ``sink`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1837,7 +1837,7 @@ async def sample_update_sink(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1879,7 +1879,7 @@ async def sample_update_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -1965,7 +1965,7 @@ async def sample_delete_sink(): This corresponds to the ``sink_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1990,7 +1990,7 @@ async def sample_delete_sink(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_sink, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -2094,7 +2094,7 @@ async def sample_create_link(): This corresponds to the ``link_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2218,7 +2218,7 @@ async def sample_delete_link(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2342,7 +2342,7 @@ async def sample_list_links(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2458,7 +2458,7 @@ async def sample_get_link(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2567,7 +2567,7 @@ async def sample_list_exclusions(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2600,7 +2600,7 @@ async def sample_list_exclusions(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_exclusions, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -2696,7 +2696,7 @@ async def sample_get_exclusion(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2732,7 +2732,7 @@ async def sample_get_exclusion(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_exclusion, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -2837,7 +2837,7 @@ async def sample_create_exclusion(): This corresponds to the ``exclusion`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2984,7 +2984,7 @@ async def sample_update_exclusion(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3101,7 +3101,7 @@ async def sample_delete_exclusion(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3126,7 +3126,7 @@ async def sample_delete_exclusion(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_exclusion, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -3206,7 +3206,7 @@ async def sample_get_cmek_settings(): See `Enabling CMEK for Log Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3315,7 +3315,7 @@ async def sample_update_cmek_settings(): See `Enabling CMEK for Log Router `__ for more information. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3445,7 +3445,7 @@ async def sample_get_settings(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3586,7 +3586,7 @@ async def sample_update_settings(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3687,7 +3687,7 @@ async def sample_copy_log_entries(): Args: request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]): The request object. The parameters to CopyLogEntries. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3747,7 +3747,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3798,7 +3798,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -3853,7 +3853,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 8f947ba54726..34e386834685 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -23,14 +23,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers @@ -245,7 +245,7 @@ async def sample_delete_log(): This corresponds to the ``log_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -270,7 +270,7 @@ async def sample_delete_log(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_log, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -435,7 +435,7 @@ async def sample_write_log_entries(): This corresponds to the ``entries`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -471,7 +471,7 @@ async def sample_write_log_entries(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.write_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -586,7 +586,7 @@ async def sample_list_log_entries(): This corresponds to the ``order_by`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -623,7 +623,7 @@ async def sample_list_log_entries(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -695,7 +695,7 @@ async def sample_list_monitored_resource_descriptors(): request (Optional[Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]]): The request object. The parameters to ListMonitoredResourceDescriptors - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -717,7 +717,7 @@ async def sample_list_monitored_resource_descriptors(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -802,7 +802,7 @@ async def sample_list_logs(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -836,7 +836,7 @@ async def sample_list_logs(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_logs, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -927,7 +927,7 @@ def request_generator(): Args: requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): The request object AsyncIterator. The parameters to ``TailLogEntries``. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -942,7 +942,7 @@ def request_generator(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.tail_log_entries, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -979,7 +979,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1030,7 +1030,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1085,7 +1085,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 5d160dc58bb8..4f423f073a83 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -23,14 +23,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore @@ -239,7 +239,7 @@ async def sample_list_log_metrics(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -273,7 +273,7 @@ async def sample_list_log_metrics(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_log_metrics, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -362,7 +362,7 @@ async def sample_get_log_metric(): This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -403,7 +403,7 @@ async def sample_get_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -500,7 +500,7 @@ async def sample_create_log_metric(): This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -631,7 +631,7 @@ async def sample_update_log_metric(): This corresponds to the ``metric`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -674,7 +674,7 @@ async def sample_update_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -751,7 +751,7 @@ async def sample_delete_log_metric(): This corresponds to the ``metric_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -776,7 +776,7 @@ async def sample_delete_log_metric(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_log_metric, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, @@ -818,7 +818,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -869,7 +869,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -924,7 +924,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 909c48d2db82..27510d6fd325 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -23,14 +23,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -271,7 +271,7 @@ async def sample_list_instances(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -385,7 +385,7 @@ async def sample_get_instance(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -488,7 +488,7 @@ async def sample_get_instance_auth_string(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -634,7 +634,7 @@ async def sample_create_instance(): This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -780,7 +780,7 @@ async def sample_update_instance(): This corresponds to the ``instance`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -909,7 +909,7 @@ async def sample_upgrade_instance(): This corresponds to the ``redis_version`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1048,7 +1048,7 @@ async def sample_import_instance(): This corresponds to the ``input_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1184,7 +1184,7 @@ async def sample_export_instance(): This corresponds to the ``output_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1314,7 +1314,7 @@ async def sample_failover_instance(): This corresponds to the ``data_protection_mode`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1434,7 +1434,7 @@ async def sample_delete_instance(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1577,7 +1577,7 @@ async def sample_reschedule_maintenance(): This corresponds to the ``schedule_time`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1660,7 +1660,7 @@ async def list_operations( request (:class:`~.operations_pb2.ListOperationsRequest`): The request object. Request message for `ListOperations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1711,7 +1711,7 @@ async def get_operation( request (:class:`~.operations_pb2.GetOperationRequest`): The request object. Request message for `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1767,7 +1767,7 @@ async def delete_operation( request (:class:`~.operations_pb2.DeleteOperationRequest`): The request object. Request message for `DeleteOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1817,7 +1817,7 @@ async def cancel_operation( request (:class:`~.operations_pb2.CancelOperationRequest`): The request object. Request message for `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1863,7 +1863,7 @@ async def get_location( request (:class:`~.location_pb2.GetLocationRequest`): The request object. Request message for `GetLocation` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1914,7 +1914,7 @@ async def list_locations( request (:class:`~.location_pb2.ListLocationsRequest`): The request object. Request message for `ListLocations` method. - retry (google.api_core.retry.Retry): Designation of what errors, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index d649cbabd3dd..8e1c3ee383bf 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -45,10 +45,10 @@ def test_retry_bubble(echo): @pytest.mark.asyncio async def test_retry_bubble_async(async_echo): - with pytest.raises(exceptions.DeadlineExceeded): + with pytest.raises(exceptions.RetryError): await async_echo.echo({ 'error': { - 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), - 'message': 'This took longer than you said it should.', + 'code': code_pb2.Code.Value('UNAVAILABLE'), + 'message': 'This service is not available.', }, }) From 3d79f994ffdc67b2a5d25fe52ee4c338d7f1f191 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 8 Nov 2023 15:09:01 -0500 Subject: [PATCH 1074/1339] feat: Introduce compatibility with native namespace packages (#1852) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 3 +-- .../gapic-generator/tests/integration/goldens/asset/setup.py | 3 +-- .../tests/integration/goldens/credentials/setup.py | 3 +-- .../tests/integration/goldens/eventarc/setup.py | 3 +-- .../gapic-generator/tests/integration/goldens/logging/setup.py | 3 +-- .../gapic-generator/tests/integration/goldens/redis/setup.py | 3 +-- 6 files changed, 6 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 16041ab4e9ca..fe58518a3018 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -51,7 +51,7 @@ with io.open(readme_filename, encoding="utf-8") as readme_file: packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("{{ api.naming.namespace_packages|first }}") ] @@ -83,7 +83,6 @@ setuptools.setup( platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 4353b691d2c3..3c09a1ca6a89 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -54,7 +54,7 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] @@ -86,7 +86,6 @@ platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 98a42b7ba219..e4ab84289d68 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -51,7 +51,7 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] @@ -83,7 +83,6 @@ platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 6cba716beac8..f07bbc2d340c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -52,7 +52,7 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] @@ -84,7 +84,6 @@ platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 96a1235326b6..2dc29ddaaea2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -51,7 +51,7 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] @@ -83,7 +83,6 @@ platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 4ccd159d1211..66ea605b434e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -51,7 +51,7 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] @@ -83,7 +83,6 @@ platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, From 0a36d667d746ad5b969c771edf826abfcd19b328 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 8 Nov 2023 17:24:21 -0500 Subject: [PATCH 1075/1339] Fix: wrap method in async client (#1834) Co-authored-by: omair --- .../services/%service/_async_mixins.py.j2 | 20 +++++++++---------- .../%name_%version/%sub/_test_mixins.py.j2 | 12 +++++------ .../services/asset_service/async_client.py | 2 +- .../unit/gapic/asset_v1/test_asset_service.py | 2 +- .../services/eventarc/async_client.py | 18 ++++++++--------- .../unit/gapic/eventarc_v1/test_eventarc.py | 10 +++++----- .../config_service_v2/async_client.py | 6 +++--- .../logging_service_v2/async_client.py | 6 +++--- .../metrics_service_v2/async_client.py | 6 +++--- .../logging_v2/test_config_service_v2.py | 6 +++--- .../logging_v2/test_logging_service_v2.py | 6 +++--- .../logging_v2/test_metrics_service_v2.py | 6 +++--- .../services/cloud_redis/async_client.py | 12 +++++------ .../unit/gapic/redis_v1/test_cloud_redis.py | 10 +++++----- .../tests/system/test_retry.py | 11 ++++++++++ 15 files changed, 72 insertions(+), 61 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 71f1c1aeb4e0..8dacd96b1e65 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -34,7 +34,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -88,7 +88,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -145,7 +145,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -197,7 +197,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -252,7 +252,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.wait_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -376,7 +376,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.set_iam_policy, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -496,7 +496,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_iam_policy, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -554,7 +554,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.test_iam_permissions, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -611,7 +611,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_location, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -664,7 +664,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_locations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index f0a0256211cc..eaa40e24dc2d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -68,7 +68,7 @@ def test_{{ name|snake_case }}_rest(request_type): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): +async def test_delete_operation_async(transport: str = "grpc_asyncio"): {% else %} def test_delete_operation(transport: str = "grpc"): {% endif %} @@ -197,7 +197,7 @@ def test_delete_operation_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): {% else %} def test_cancel_operation(transport: str = "grpc"): {% endif %} @@ -325,7 +325,7 @@ def test_cancel_operation_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_wait_operation(transport: str = "grpc"): +async def test_wait_operation(transport: str = "grpc_asyncio"): {% else %} def test_wait_operation(transport: str = "grpc"): {% endif %} @@ -454,7 +454,7 @@ def test_wait_operation_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): {% else %} def test_get_operation(transport: str = "grpc"): {% endif %} @@ -583,7 +583,7 @@ def test_get_operation_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): {% else %} def test_list_operations(transport: str = "grpc"): {% endif %} @@ -718,7 +718,7 @@ def test_list_operations_from_dict(): {% for mode in ["", "async"] %} {% if mode == "async" %} @pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc"): +async def test_list_locations_async(transport: str = "grpc_asyncio"): {% else %} def test_list_locations(transport: str = "grpc"): {% endif %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index d3daf1aaab71..39871843efa2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -3045,7 +3045,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index da7e9293edf9..0ea3a40d3466 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -13416,7 +13416,7 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = AssetServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 54a22eae6f77..01d0a70d66ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -2415,7 +2415,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -2466,7 +2466,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -2521,7 +2521,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -2571,7 +2571,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -2684,7 +2684,7 @@ async def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.set_iam_policy, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -2802,7 +2802,7 @@ async def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_iam_policy, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -2858,7 +2858,7 @@ async def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.test_iam_permissions, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -2909,7 +2909,7 @@ async def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_location, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -2960,7 +2960,7 @@ async def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_locations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index da0e006f4f57..8d3d25bad750 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -11992,7 +11992,7 @@ def test_delete_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): +async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12121,7 +12121,7 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12250,7 +12250,7 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12379,7 +12379,7 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12508,7 +12508,7 @@ def test_list_locations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) @pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc"): +async def test_list_locations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 156d523f66dc..755d2b11b991 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -3764,7 +3764,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -3815,7 +3815,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -3869,7 +3869,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 34e386834685..bbb16469c236 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -996,7 +996,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1047,7 +1047,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1101,7 +1101,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 4f423f073a83..03630110d299 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -835,7 +835,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -886,7 +886,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -940,7 +940,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index a482c9cbf715..f7c8e0802458 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -8830,7 +8830,7 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8959,7 +8959,7 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9088,7 +9088,7 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 4005872a7559..8bb644e170b9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2721,7 +2721,7 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2850,7 +2850,7 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2979,7 +2979,7 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index e7a0798abcfb..c19f8e31c8db 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2588,7 +2588,7 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2717,7 +2717,7 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2846,7 +2846,7 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 27510d6fd325..49404a6513a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1677,7 +1677,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_operations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1728,7 +1728,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1783,7 +1783,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1833,7 +1833,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_operation, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1880,7 +1880,7 @@ async def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_location, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, @@ -1931,7 +1931,7 @@ async def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( + rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_locations, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 795084128731..59deff1f6610 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -7289,7 +7289,7 @@ def test_delete_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio -async def test_delete_operation_async(transport: str = "grpc"): +async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7418,7 +7418,7 @@ def test_cancel_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert response is None @pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7547,7 +7547,7 @@ def test_get_operation(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.Operation) @pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): +async def test_get_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7676,7 +7676,7 @@ def test_list_operations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) @pytest.mark.asyncio -async def test_list_operations_async(transport: str = "grpc"): +async def test_list_operations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7805,7 +7805,7 @@ def test_list_locations(transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, locations_pb2.ListLocationsResponse) @pytest.mark.asyncio -async def test_list_locations_async(transport: str = "grpc"): +async def test_list_locations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 8e1c3ee383bf..6ec707cd509a 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -52,3 +52,14 @@ async def test_retry_bubble_async(async_echo): 'message': 'This service is not available.', }, }) + + # Note: This test verifies that: + # Using gapic_v1.method.wrap_method in *AsyncClient raises a RPCError (Incorrect behaviour). + # Using gapic_v1.method_async.wrap_method in *AsyncClient raises a google.api_core.exceptions.GoogleAPIError. + + @pytest.mark.asyncio + async def test_method_async_wrapper_for_async_client(async_echo): + with pytest.raises(exceptions.NotFound): + await async_echo.get_operation({ + 'name': "operations/echo" + }) From cb1174ea68c46f72ef0a208e9bbb8f278dc06458 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 9 Nov 2023 10:28:09 -0500 Subject: [PATCH 1076/1339] fix: allow pb2 files to be included in the output of py_gapic_assembly_pkg (#1855) --- .../rules_python_gapic/py_gapic_pkg.bzl | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl index 55ba8fde6467..3898836d2586 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl @@ -17,12 +17,16 @@ load("@rules_gapic//:gapic_pkg.bzl", "construct_package_dir_paths") def _py_gapic_src_pkg_impl(ctx): srcjar_srcs = [] dir_srcs = [] + py_srcs = [] for dep in ctx.attr.deps: for f in dep.files.to_list(): if f.is_directory: dir_srcs.append(f) elif f.extension in ("srcjar", "jar", "zip"): srcjar_srcs.append(f) + # Exclude source files and files for external packages + elif f.extension in ("py") and not f.is_source and 'external' not in f.path: + py_srcs.append(f) paths = construct_package_dir_paths(ctx.attr.package_dir, ctx.outputs.pkg, ctx.label.name) @@ -34,6 +38,9 @@ def _py_gapic_src_pkg_impl(ctx): for dir_src in {dir_srcs}; do cp -rT -L $dir_src {package_dir_path} done + for py_src in {py_srcs}; do + cp $py_src {package_dir_path} + done # Replace 555 (forced by Bazel) permissions with 644 find {package_dir_path} -type f -exec chmod 644 {{}} \\; cd {package_dir_path}/.. @@ -44,6 +51,7 @@ def _py_gapic_src_pkg_impl(ctx): """.format( srcjar_srcs = " ".join(["'%s'" % f.path for f in srcjar_srcs]), dir_srcs = " ".join(["'%s'" % f.path for f in dir_srcs]), + py_srcs = " ".join(["'%s'" % f.path for f in py_srcs]), package_dir_path = paths.package_dir_path, package_dir = paths.package_dir, pkg = ctx.outputs.pkg.path, @@ -51,7 +59,7 @@ def _py_gapic_src_pkg_impl(ctx): ) ctx.actions.run_shell( - inputs = srcjar_srcs + dir_srcs, + inputs = srcjar_srcs + dir_srcs + py_srcs, command = script, outputs = [ctx.outputs.pkg], ) From f884ef0fb8c0325425d6e1c4b14e93bcfff5b7db Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 9 Nov 2023 11:10:02 -0500 Subject: [PATCH 1077/1339] chore(main): release 1.12.0 (#1844) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 14 ++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 017f28a0505b..ea9f87f635e9 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,20 @@ # Changelog +## [1.12.0](https://github.com/googleapis/gapic-generator-python/compare/v1.11.11...v1.12.0) (2023-11-09) + + +### Features + +* Introduce compatibility with native namespace packages ([#1852](https://github.com/googleapis/gapic-generator-python/issues/1852)) ([ef2094a](https://github.com/googleapis/gapic-generator-python/commit/ef2094ae15cd8b4be5e18f1c6b4d62a6b8f0d416)) + + +### Bug Fixes + +* Allow pb2 files to be included in the output of py_gapic_assembly_pkg ([#1855](https://github.com/googleapis/gapic-generator-python/issues/1855)) ([e374734](https://github.com/googleapis/gapic-generator-python/commit/e37473494002de855d63498f731c26cec5c49b9c)) +* Implement Async Client to use Async Retry to work as expected ([#1823](https://github.com/googleapis/gapic-generator-python/issues/1823)) ([8ede788](https://github.com/googleapis/gapic-generator-python/commit/8ede788616b5b73f52bc7a8aca831c37b054b81f)) +* Wrap method in async client ([#1834](https://github.com/googleapis/gapic-generator-python/issues/1834)) ([8e1b5e0](https://github.com/googleapis/gapic-generator-python/commit/8e1b5e077a78904710d350b7dc97cdd2d6034fa9)) + ## [1.11.11](https://github.com/googleapis/gapic-generator-python/compare/v1.11.10...v1.11.11) (2023-11-02) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 43376f420af1..3661b22ad03a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.11.11" +version = "1.12.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From f72fd694a8fe7aea4fddea6d1d325c4a0f3d2cdd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 9 Nov 2023 19:48:15 +0000 Subject: [PATCH 1078/1339] build: bump dependencies in .kokoro/requirements.txt (#1853) Source-Link: https://togithub.com/googleapis/synthtool/commit/febacccc98d6d224aff9d0bd0373bb5a4cd5969c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 532 +++++++++--------- 2 files changed, 277 insertions(+), 259 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 7f291dbd5f9b..453b540c1e58 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 -# created: 2023-10-18T20:26:37.410353675Z + digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 +# created: 2023-11-08T19:46:45.022803742Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 16170d0ca7b8..8957e21104e2 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -4,91 +4,75 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e +argcomplete==3.1.4 \ + --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ + --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -109,78 +93,74 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 # via # gcp-releasetool # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e +distlib==0.3.7 \ + --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ + --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 +gcp-releasetool==1.16.0 \ + --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ + --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via # google-cloud-core # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a +google-cloud-core==2.3.3 \ + --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ + --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 +google-cloud-storage==2.13.0 \ + --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ + --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -251,29 +231,31 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.6.0 \ + --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ + --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c +googleapis-common-protos==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 +importlib-metadata==6.8.0 \ + --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ + --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 # via # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -285,75 +267,121 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==24.2.0 \ + --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ + --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 # via # gcp-releasetool # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +more-itertools==10.1.0 \ + --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ + --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 +nh3==0.2.14 \ + --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ + --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ + --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ + --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ + --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ + --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ + --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ + --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ + --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ + --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ + --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ + --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ + --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ + --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ + --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ + --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # gcp-releasetool # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -383,34 +411,30 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 +pygments==2.16.1 \ + --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ + --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool @@ -418,9 +442,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ @@ -431,17 +455,17 @@ requests==2.31.0 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 +rich==13.6.0 \ + --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ + --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -455,43 +479,37 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # bleach # gcp-docuploader - # google-auth # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==1.26.18 \ - --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ - --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # requests # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 +virtualenv==20.24.6 \ + --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ + --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.41.3 \ + --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ + --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 4b18f029e3fad87bce28035bccde47bce0ca302e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 9 Nov 2023 22:24:16 +0100 Subject: [PATCH 1079/1339] chore(deps): update dependency urllib3 to v2.0.7 [security] (#1827) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![Mend Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [urllib3](https://togithub.com/urllib3/urllib3) ([changelog](https://togithub.com/urllib3/urllib3/blob/main/CHANGES.rst)) | `==2.0.6` -> `==2.0.7` | [![age](https://developer.mend.io/api/mc/badges/age/pypi/urllib3/2.0.7?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://developer.mend.io/api/mc/badges/adoption/pypi/urllib3/2.0.7?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://developer.mend.io/api/mc/badges/compatibility/pypi/urllib3/2.0.6/2.0.7?slim=true)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://developer.mend.io/api/mc/badges/confidence/pypi/urllib3/2.0.6/2.0.7?slim=true)](https://docs.renovatebot.com/merge-confidence/) | ### GitHub Vulnerability Alerts #### [CVE-2023-45803](https://togithub.com/urllib3/urllib3/security/advisories/GHSA-g4mx-q9vg-27p4) urllib3 previously wouldn't remove the HTTP request body when an HTTP redirect response using status 303 "See Other" after the request had its method changed from one that could accept a request body (like `POST`) to `GET` as is required by HTTP RFCs. Although the behavior of removing the request body is not specified in the section for redirects, it can be inferred by piecing together information from different sections and we have observed the behavior in other major HTTP client implementations like curl and web browsers. From [RFC 9110 Section 9.3.1](https://www.rfc-editor.org/rfc/rfc9110.html#name-get): > A client SHOULD NOT generate content in a GET request unless it is made directly to an origin server that has previously indicated, in or out of band, that such a request has a purpose and will be adequately supported. ## Affected usages Because the vulnerability requires a previously trusted service to become compromised in order to have an impact on confidentiality we believe the exploitability of this vulnerability is low. Additionally, many users aren't putting sensitive data in HTTP request bodies, if this is the case then this vulnerability isn't exploitable. Both of the following conditions must be true to be affected by this vulnerability: * If you're using urllib3 and submitting sensitive information in the HTTP request body (such as form data or JSON) * The origin service is compromised and starts redirecting using 303 to a malicious peer or the redirected-to service becomes compromised. ## Remediation You can remediate this vulnerability with any of the following steps: * Upgrade to a patched version of urllib3 (v1.26.18 or v2.0.7) * Disable redirects for services that you aren't expecting to respond with redirects with `redirects=False`. * Disable automatic redirects with `redirects=False` and handle 303 redirects manually by stripping the HTTP request body. --- ### Release Notes
urllib3/urllib3 (urllib3) ### [`v2.0.7`](https://togithub.com/urllib3/urllib3/blob/HEAD/CHANGES.rst#207-2023-10-17) [Compare Source](https://togithub.com/urllib3/urllib3/compare/2.0.6...2.0.7) \================== - Made body stripped from HTTP requests changing the request method to GET after HTTP 303 "See Other" redirect responses.
--- ### Configuration 📅 **Schedule**: Branch creation - "" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Mend Renovate](https://www.mend.io/free-developer-tools/renovate/). View repository job log [here](https://developer.mend.io/github/googleapis/gapic-generator-python). --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f3f50c2ac63f..2e878ac95621 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -419,9 +419,9 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==2.0.6 \ - --hash=sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2 \ - --hash=sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # google-auth # requests From 0f9f80bcdc7482bf2082b26dc107563dae3bea41 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Fri, 10 Nov 2023 17:23:17 -0500 Subject: [PATCH 1080/1339] feat: add support for python 3.12 (#1816) Co-authored-by: omair Co-authored-by: Anthonios Partheniou --- .../.github/sync-repo-settings.yaml | 19 +++++- .../.github/workflows/tests.yaml | 62 +++++++++++-------- .../gapic/ads-templates/noxfile.py.j2 | 1 + .../gapic/ads-templates/setup.py.j2 | 1 + .../gapic-generator/gapic/schema/metadata.py | 2 +- .../gapic/templates/noxfile.py.j2 | 15 ++--- .../gapic/templates/setup.py.j2 | 3 +- packages/gapic-generator/noxfile.py | 15 ++--- packages/gapic-generator/setup.py | 3 +- .../{gapic => test_utils}/__init__.py | 2 +- .../integration/goldens/asset/noxfile.py | 15 ++--- .../tests/integration/goldens/asset/setup.py | 3 +- .../goldens/credentials/noxfile.py | 15 ++--- .../integration/goldens/credentials/setup.py | 3 +- .../integration/goldens/eventarc/noxfile.py | 15 ++--- .../integration/goldens/eventarc/setup.py | 3 +- .../integration/goldens/logging/noxfile.py | 15 ++--- .../integration/goldens/logging/setup.py | 3 +- .../integration/goldens/redis/noxfile.py | 15 ++--- .../tests/integration/goldens/redis/setup.py | 3 +- 20 files changed, 96 insertions(+), 117 deletions(-) rename packages/gapic-generator/{gapic => test_utils}/__init__.py (95%) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index bae183f3b068..0b661a220f3c 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -7,9 +7,15 @@ branchProtectionRules: requiredStatusCheckContexts: - 'cla/google' - 'docs' - - 'mypy' - - 'showcase (showcase)' - - 'showcase (showcase_alternative_templates)' + - 'mypy (3.8)' + - 'mypy (3.9)' + - 'mypy (3.10)' + - 'mypy (3.11)' + - 'mypy (3.12)' + - 'showcase (3.7, showcase)' + - 'showcase (3.12, showcase)' + - 'showcase (3.7, showcase_alternative_templates)' + - 'showcase (3.12, showcase_alternative_templates)' # TODO(dovs): reenable these when the mtls tests have been debugged and fixed # See #1218 for details # - 'showcase-mtls (showcase_mtls)' @@ -21,21 +27,25 @@ branchProtectionRules: - 'showcase-unit (3.9)' - 'showcase-unit (3.10)' - 'showcase-unit (3.11)' + - 'showcase-unit (3.12)' - 'showcase-unit (3.7, _alternative_templates)' - 'showcase-unit (3.8, _alternative_templates)' - 'showcase-unit (3.9, _alternative_templates)' - 'showcase-unit (3.10, _alternative_templates)' - 'showcase-unit (3.11, _alternative_templates)' + - 'showcase-unit (3.12, _alternative_templates)' - 'showcase-unit (3.7, _alternative_templates_mixins)' - 'showcase-unit (3.8, _alternative_templates_mixins)' - 'showcase-unit (3.9, _alternative_templates_mixins)' - 'showcase-unit (3.10, _alternative_templates_mixins)' - 'showcase-unit (3.11, _alternative_templates_mixins)' + - 'showcase-unit (3.12, _alternative_templates_mixins)' - 'showcase-unit (3.7, _mixins)' - 'showcase-unit (3.8, _mixins)' - 'showcase-unit (3.9, _mixins)' - 'showcase-unit (3.10, _mixins)' - 'showcase-unit (3.11, _mixins)' + - 'showcase-unit (3.12, _mixins)' - 'showcase-unit-add-iam-methods' - 'integration' - 'goldens-lint' @@ -46,16 +56,19 @@ branchProtectionRules: - 'unit (3.9)' - 'unit (3.10)' - 'unit (3.11)' + - 'unit (3.12)' - 'fragment (3.7)' - 'fragment (3.8)' - 'fragment (3.9)' - 'fragment (3.10)' - 'fragment (3.11)' + - 'fragment (3.12)' - 'fragment (3.7, _alternative_templates)' - 'fragment (3.8, _alternative_templates)' - 'fragment (3.9, _alternative_templates)' - 'fragment (3.10, _alternative_templates)' - 'fragment (3.11, _alternative_templates)' + - 'fragment (3.12, _alternative_templates)' - 'OwlBot Post Processor' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 1d464c51c997..a611be53063f 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -19,43 +19,51 @@ env: jobs: docs: - # Don't upgrade python version; there's a bug in 3.10 sphinx runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 + # Use python 3.10 for docs to match the version for the sphinx plugin + # https://github.com/googleapis/synthtool/pull/1891 with: - python-version: "3.9" + python-version: "3.10" cache: 'pip' - name: Install nox. run: python -m pip install nox - name: Build the documentation. run: nox -s docs mypy: + strategy: + matrix: + # Run mypy on all of the supported python versions listed in setup.py + # https://github.com/python/mypy/blob/master/setup.py + python: ["3.8", "3.9", "3.10", "3.11", "3.12"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.11" + - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "${{ matrix.python }}" cache: 'pip' - name: Install nox. run: python -m pip install nox - name: Check type annotations. - run: nox -s mypy + run: nox -s mypy-${{ matrix.python }} showcase: strategy: + # Run showcase tests on the lowest and highest supported runtimes matrix: + python: ["3.7", "3.12"] target: [showcase, showcase_alternative_templates] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.11" + - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "${{ matrix.python }}" cache: 'pip' - name: Install system dependencies. run: | @@ -81,7 +89,7 @@ jobs: unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Run showcase tests. - run: nox -s ${{ matrix.target }} + run: nox -s ${{ matrix.target }}-${{ matrix.python }} showcase-mtls: if: ${{ false }} # TODO(dovs): reenable when #1218 is fixed strategy: @@ -95,10 +103,10 @@ jobs: run: | sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - - name: Set up Python "3.11" + - name: Set up Python "3.12" uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "3.12" cache: 'pip' - name: Copy mtls files run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ @@ -131,7 +139,7 @@ jobs: showcase-unit: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins] runs-on: ubuntu-latest steps: @@ -161,10 +169,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.11" + - name: Set up Python "3.12" uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "3.12" cache: 'pip' - name: Install system dependencies. run: | @@ -189,10 +197,10 @@ jobs: variant: ['', _alternative_templates] steps: - uses: actions/checkout@v4 - - name: Set up Python "3.11" + - name: Set up Python "3.12" uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "3.12" cache: 'pip' - name: Install system dependencies. run: | @@ -214,10 +222,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.11" + - name: Set up Python "3.12" uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "3.12" cache: 'pip' - name: Install system dependencies. run: | @@ -230,7 +238,7 @@ jobs: unit: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -251,7 +259,7 @@ jobs: fragment: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] variant: ['', _alternative_templates] runs-on: ubuntu-latest steps: @@ -303,21 +311,21 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "3.12" cache: 'pip' - name: Install nox. run: | python -m pip install nox - name: Run blacken and lint on the generated output. run: | - nox -f tests/integration/goldens/asset/noxfile.py -s mypy-3.11 blacken lint_setup_py lint - nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-3.11 blacken lint_setup_py lint - nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.11 blacken lint_setup_py lint - nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.11 blacken lint_setup_py lint - nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.11 blacken lint_setup_py lint + nox -f tests/integration/goldens/asset/noxfile.py -s mypy-3.12 blacken lint + nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-3.12 blacken lint + nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.12 blacken lint + nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.12 blacken lint + nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.12 blacken lint style-check: runs-on: ubuntu-latest steps: @@ -325,6 +333,8 @@ jobs: - name: Set up Python "3.11" uses: actions/setup-python@v4 with: + # Do not upgrade this check to python 3.12 until + # https://github.com/hhatto/autopep8/issues/712 is fixed python-version: "3.11" cache: 'pip' - name: Install autopep8 diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 91380ededde6..f10310fdfbba 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -13,6 +13,7 @@ ALL_PYTHON = [ "3.9", "3.10", "3.11", + "3.12", ] @nox.session(python=ALL_PYTHON) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 584d3c53cf89..b3a8b6d77a2e 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -47,6 +47,7 @@ setuptools.setup( 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 2cbba5a1592e..dc9389e8f566 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -178,7 +178,7 @@ def convert_to_versioned_package(self) -> Tuple[str, ...]: # underscore between the module and the version. For example, # change google.cloud.documentai.v1 to google.cloud.documentai_v1. # Check if the package name contains a version. - version_regex = "^v\d[^/]*$" + version_regex = r"^v\d[^/]*$" regex_match = re.match(version_regex, self.package[-1]) if regex_match and len(self.package) > 1: versioned_module = f"{self.package[-2]}_{regex_match[0]}" diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index c592f651466e..63958b72e182 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -17,16 +17,17 @@ ALL_PYTHON = [ "3.9", "3.10", "3.11", + "3.12" ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +PACKAGE_NAME = '{{ api.naming.warehouse_package_name }}' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" +DEFAULT_PYTHON_VERSION = "3.12" nox.sessions = [ "unit", @@ -37,7 +38,6 @@ nox.sessions = [ "docs", "blacken", "lint", - "lint_setup_py", ] @nox.session(python=ALL_PYTHON) @@ -82,7 +82,7 @@ def mypy(session): session.install('.') session.run( 'mypy', - '--explicit-package-bases', + '-p', {% if api.naming.module_namespace %} '{{ api.naming.module_namespace[0] }}', {% else %} @@ -169,11 +169,4 @@ def blacken(session): *BLACK_PATHS, ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") - {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index fe58518a3018..374f9e6fdc34 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -55,8 +55,6 @@ packages = [ if package.startswith("{{ api.naming.namespace_packages|first }}") ] -namespaces = [{% for namespace_packages in api.naming.namespace_packages %}"{{ namespace_packages }}"{% if not loop.last %}, {% endif %}{% endfor %}] - setuptools.setup( name=name, version=version, @@ -77,6 +75,7 @@ setuptools.setup( "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 8836ec09d12b..8024af73515a 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -39,6 +39,7 @@ "3.9", "3.10", "3.11", + "3.12", ) NEWEST_PYTHON = ALL_PYTHON[-1] @@ -260,7 +261,7 @@ def showcase_library( yield tmp_dir -@nox.session(python=NEWEST_PYTHON) +@nox.session(python=ALL_PYTHON) def showcase( session, templates="DEFAULT", @@ -299,7 +300,7 @@ def showcase_mtls( ) -@nox.session(python=NEWEST_PYTHON) +@nox.session(python=ALL_PYTHON) def showcase_alternative_templates(session): templates = path.join(path.dirname(__file__), "gapic", "ads-templates") showcase( @@ -404,7 +405,7 @@ def showcase_mypy( session.chdir(lib) # Run the tests. - session.run("mypy", "--explicit-package-bases", "google") + session.run("mypy", "-p", "google") @nox.session(python=NEWEST_PYTHON) @@ -436,11 +437,11 @@ def snippetgen(session): session.run("py.test", "-vv", "tests/snippetgen") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs.""" - session.install("sphinx==4.0.1", "sphinx_rtd_theme") + session.install("sphinx==4.5.0", "sphinx_rtd_theme") session.install(".") # Build the docs! @@ -457,7 +458,7 @@ def docs(session): ) -@nox.session(python=NEWEST_PYTHON) +@nox.session(python=ALL_PYTHON) def mypy(session): """Perform typecheck analysis.""" # Pin to click==8.1.3 to workaround https://github.com/pallets/click/issues/2558 @@ -469,4 +470,4 @@ def mypy(session): "click==8.1.3", ) session.install(".") - session.run("mypy", "gapic") + session.run("mypy", "-p", "gapic") diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 3661b22ad03a..396877666a6c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -58,7 +58,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - packages=setuptools.find_packages(exclude=["docs", "tests"]), + packages=setuptools.find_namespace_packages(exclude=["docs", "tests"]), url=url, classifiers=[ release_status, @@ -72,6 +72,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Libraries :: Python Modules", diff --git a/packages/gapic-generator/gapic/__init__.py b/packages/gapic-generator/test_utils/__init__.py similarity index 95% rename from packages/gapic-generator/gapic/__init__.py rename to packages/gapic-generator/test_utils/__init__.py index b0c7da3d7725..5678014ca955 100644 --- a/packages/gapic-generator/gapic/__init__.py +++ b/packages/gapic-generator/test_utils/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index c333de24aff6..3069bdfad130 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -28,16 +28,17 @@ "3.9", "3.10", "3.11", + "3.12" ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +PACKAGE_NAME = 'google-cloud-asset' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" +DEFAULT_PYTHON_VERSION = "3.12" nox.sessions = [ "unit", @@ -48,7 +49,6 @@ "docs", "blacken", "lint", - "lint_setup_py", ] @nox.session(python=ALL_PYTHON) @@ -93,7 +93,7 @@ def mypy(session): session.install('.') session.run( 'mypy', - '--explicit-package-bases', + '-p', 'google', ) @@ -175,10 +175,3 @@ def blacken(session): "black", *BLACK_PATHS, ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 3c09a1ca6a89..4fba230046d4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -58,8 +58,6 @@ if package.startswith("google") ] -namespaces = ["google", "google.cloud"] - setuptools.setup( name=name, version=version, @@ -80,6 +78,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index b4f8b235098e..4fcb174b02aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -28,16 +28,17 @@ "3.9", "3.10", "3.11", + "3.12" ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +PACKAGE_NAME = 'google-iam-credentials' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" +DEFAULT_PYTHON_VERSION = "3.12" nox.sessions = [ "unit", @@ -48,7 +49,6 @@ "docs", "blacken", "lint", - "lint_setup_py", ] @nox.session(python=ALL_PYTHON) @@ -93,7 +93,7 @@ def mypy(session): session.install('.') session.run( 'mypy', - '--explicit-package-bases', + '-p', 'google', ) @@ -175,10 +175,3 @@ def blacken(session): "black", *BLACK_PATHS, ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index e4ab84289d68..4e5b752ff3a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -55,8 +55,6 @@ if package.startswith("google") ] -namespaces = ["google", "google.iam"] - setuptools.setup( name=name, version=version, @@ -77,6 +75,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 6d840f965896..2c53e9a6d5de 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -28,16 +28,17 @@ "3.9", "3.10", "3.11", + "3.12" ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +PACKAGE_NAME = 'google-cloud-eventarc' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" +DEFAULT_PYTHON_VERSION = "3.12" nox.sessions = [ "unit", @@ -48,7 +49,6 @@ "docs", "blacken", "lint", - "lint_setup_py", ] @nox.session(python=ALL_PYTHON) @@ -93,7 +93,7 @@ def mypy(session): session.install('.') session.run( 'mypy', - '--explicit-package-bases', + '-p', 'google', ) @@ -175,10 +175,3 @@ def blacken(session): "black", *BLACK_PATHS, ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index f07bbc2d340c..2d804f70c977 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -56,8 +56,6 @@ if package.startswith("google") ] -namespaces = ["google", "google.cloud"] - setuptools.setup( name=name, version=version, @@ -78,6 +76,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index db2905b4a388..dc4b148f1bba 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -28,16 +28,17 @@ "3.9", "3.10", "3.11", + "3.12" ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +PACKAGE_NAME = 'google-cloud-logging' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" +DEFAULT_PYTHON_VERSION = "3.12" nox.sessions = [ "unit", @@ -48,7 +49,6 @@ "docs", "blacken", "lint", - "lint_setup_py", ] @nox.session(python=ALL_PYTHON) @@ -93,7 +93,7 @@ def mypy(session): session.install('.') session.run( 'mypy', - '--explicit-package-bases', + '-p', 'google', ) @@ -175,10 +175,3 @@ def blacken(session): "black", *BLACK_PATHS, ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 2dc29ddaaea2..09251f4724b0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -55,8 +55,6 @@ if package.startswith("google") ] -namespaces = ["google", "google.cloud"] - setuptools.setup( name=name, version=version, @@ -77,6 +75,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 0b02ca125e95..21d26879fa2b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -28,16 +28,17 @@ "3.9", "3.10", "3.11", + "3.12" ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") +PACKAGE_NAME = 'google-cloud-redis' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" +DEFAULT_PYTHON_VERSION = "3.12" nox.sessions = [ "unit", @@ -48,7 +49,6 @@ "docs", "blacken", "lint", - "lint_setup_py", ] @nox.session(python=ALL_PYTHON) @@ -93,7 +93,7 @@ def mypy(session): session.install('.') session.run( 'mypy', - '--explicit-package-bases', + '-p', 'google', ) @@ -175,10 +175,3 @@ def blacken(session): "black", *BLACK_PATHS, ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 66ea605b434e..4138dae863a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -55,8 +55,6 @@ if package.startswith("google") ] -namespaces = ["google", "google.cloud"] - setuptools.setup( name=name, version=version, @@ -77,6 +75,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], From 9f6dd42a2d161dd2cddabf63b72315540aa528eb Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 13 Nov 2023 14:33:00 -0500 Subject: [PATCH 1081/1339] feat: Add support for python 312 (ads templates) (#1861) --- .../%name/%version/%sub/gapic_version.py.j2 | 5 + .../%sub/services/%service/client.py.j2 | 13 +-- .../services/%service/transports/base.py.j2 | 13 +-- .../%namespace/%name/%version/__init__.py.j2 | 5 + .../gapic/ads-templates/.coveragerc.j2 | 5 - .../gapic/ads-templates/setup.py.j2 | 101 +++++++++++------- .../gapic/templates/setup.py.j2 | 9 +- .../tests/integration/goldens/asset/setup.py | 9 +- .../integration/goldens/credentials/setup.py | 9 +- .../integration/goldens/eventarc/setup.py | 9 +- .../integration/goldens/logging/setup.py | 9 +- .../tests/integration/goldens/redis/setup.py | 9 +- 12 files changed, 119 insertions(+), 77 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/gapic_version.py.j2 diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/gapic_version.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/gapic_version.py.j2 new file mode 100644 index 000000000000..ef63e0d03bf7 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/gapic_version.py.j2 @@ -0,0 +1,5 @@ +{% extends '_base.py.j2' %} +{% block content %} + +__version__ = "0.0.0" +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 6c98bed39447..abde0499e53d 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -6,7 +6,6 @@ from collections import OrderedDict import os import re from typing import Callable, Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast -import pkg_resources {% if service.any_deprecated %} import warnings {% endif %} @@ -21,6 +20,9 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} +from {{package_path}} import gapic_version as package_version + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER @@ -806,14 +808,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% endif %} -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "{{ api.naming.warehouse_package_name }}", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) __all__ = ( diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index 9c3bb8fa6399..fd7832ef81f1 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -4,7 +4,6 @@ import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources import google.auth # type: ignore import google.api_core # type: ignore @@ -42,14 +41,10 @@ from google.longrunning import operations_pb2 # type: ignore {% endif %} {% endfilter %} -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - '{{ api.naming.warehouse_package_name }}', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} +from {{package_path}} import gapic_version as package_version + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) class {{ service.name }}Transport(abc.ABC): diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 index 8ec595555f13..a7383b72e675 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -5,6 +5,11 @@ import importlib import sys +{% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} +from {{package_path}} import gapic_version as package_version + +__version__ = package_version.__version__ + if sys.version_info < (3, 7): raise ImportError('This module requires Python 3.7 or later.') diff --git a/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 b/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 index 9d0b8441dff7..240282108674 100644 --- a/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 +++ b/packages/gapic-generator/gapic/ads-templates/.coveragerc.j2 @@ -11,11 +11,6 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound # This is used to indicate a python version mismatch, # which is not easily tested in unit tests. raise ImportError diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index b3a8b6d77a2e..f848eeb825a0 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -2,36 +2,58 @@ {% block content %} +import os +import re + import setuptools # type: ignore +package_root = os.path.abspath(os.path.dirname(__file__)) -setuptools.setup( - name='{{ api.naming.warehouse_package_name }}', - version='0.0.1', - {% if api.naming.namespace %} - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages={{ api.naming.namespace_packages }}, - {% else %} - packages=setuptools.find_packages(), - {% endif %} - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - {# TODO(dovs): remove when 1.x deprecation is complete #} - {% if 'rest' in opts.transport %} - "google-api-core[grpc] >= 2.10.0, < 3.0.0dev", - {% else %} - "google-api-core[grpc] >= 1.28.0, < 3.0.0dev", - {% endif %} - "googleapis-common-protos >= 1.53.0", - "grpcio >= 1.10.0", - "proto-plus >= 1.19.4, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", +name = '{{ api.naming.warehouse_package_name }}' + +{% set warehouse_description = api.naming.warehouse_package_name.replace('-',' ')|title %} +{% set package_path = api.naming.module_namespace|join('/') + "/" + api.naming.module_name + "/" + api.naming.version %} + +description = "{{ warehouse_description }} API client library" + +version = None + +with open(os.path.join(package_root, '{{ package_path }}/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 2.10.0, < 3.0.0dev", + "googleapis-common-protos >= 1.53.0", + "grpcio >= 1.10.0", + "proto-plus >= 1.19.4, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", {% if api.requires_package(('google', 'iam', 'v1')) %} - 'grpc-google-iam-v1', + "grpc-google-iam-v1", {% endif %} - ), - python_requires='>=3.7',{# Lazy import requires module-level getattr #} +] + +package_root = os.path.abspath(os.path.dirname(__file__)) + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("{{ api.naming.namespace_packages|first }}") +] + +setuptools.setup( + name=name, + version=version, + description=description, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", setup_requires=[ 'libcst >= 0.2.5', ], @@ -39,18 +61,25 @@ setuptools.setup( 'scripts/fixup_{{ api.naming.versioned_module_name }}_keywords.py', ], classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Topic :: Internet", ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + include_package_data=True, zip_safe=False, ) {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 374f9e6fdc34..7237ffdabc1e 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -4,6 +4,7 @@ import io import os +import re import setuptools # type: ignore @@ -16,10 +17,12 @@ name = '{{ api.naming.warehouse_package_name }}' description = "{{ warehouse_description }} API client library" -version = {} +version = None + with open(os.path.join(package_root, '{{ package_path }}/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] if version[0] == "0": release_status = "Development Status :: 4 - Beta" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 4fba230046d4..022655f4439c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -15,6 +15,7 @@ # import io import os +import re import setuptools # type: ignore @@ -25,10 +26,12 @@ description = "Google Cloud Asset API client library" -version = {} +version = None + with open(os.path.join(package_root, 'google/cloud/asset/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] if version[0] == "0": release_status = "Development Status :: 4 - Beta" diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 4e5b752ff3a6..4a285746ad4f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -15,6 +15,7 @@ # import io import os +import re import setuptools # type: ignore @@ -25,10 +26,12 @@ description = "Google Iam Credentials API client library" -version = {} +version = None + with open(os.path.join(package_root, 'google/iam/credentials/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] if version[0] == "0": release_status = "Development Status :: 4 - Beta" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 2d804f70c977..cc83573f7bc7 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -15,6 +15,7 @@ # import io import os +import re import setuptools # type: ignore @@ -25,10 +26,12 @@ description = "Google Cloud Eventarc API client library" -version = {} +version = None + with open(os.path.join(package_root, 'google/cloud/eventarc/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] if version[0] == "0": release_status = "Development Status :: 4 - Beta" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 09251f4724b0..4e7788ed227d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -15,6 +15,7 @@ # import io import os +import re import setuptools # type: ignore @@ -25,10 +26,12 @@ description = "Google Cloud Logging API client library" -version = {} +version = None + with open(os.path.join(package_root, 'google/cloud/logging/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] if version[0] == "0": release_status = "Development Status :: 4 - Beta" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 4138dae863a8..d197c4a22e8b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -15,6 +15,7 @@ # import io import os +import re import setuptools # type: ignore @@ -25,10 +26,12 @@ description = "Google Cloud Redis API client library" -version = {} +version = None + with open(os.path.join(package_root, 'google/cloud/redis/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] if version[0] == "0": release_status = "Development Status :: 4 - Beta" From 13f4f36c70da4f178065abf783820ef0f466fe5f Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 16 Nov 2023 15:00:19 -0500 Subject: [PATCH 1082/1339] fix: bump proto-plus version to 1.22.3 (#1863) Co-authored-by: omair --- packages/gapic-generator/gapic/ads-templates/setup.py.j2 | 3 +-- packages/gapic-generator/gapic/templates/setup.py.j2 | 3 +-- .../gapic/templates/testing/constraints-3.7.txt.j2 | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 3 +-- .../integration/goldens/asset/testing/constraints-3.7.txt | 2 +- .../tests/integration/goldens/credentials/setup.py | 3 +-- .../goldens/credentials/testing/constraints-3.7.txt | 2 +- .../tests/integration/goldens/eventarc/setup.py | 3 +-- .../integration/goldens/eventarc/testing/constraints-3.7.txt | 2 +- .../gapic-generator/tests/integration/goldens/logging/setup.py | 3 +-- .../integration/goldens/logging/testing/constraints-3.7.txt | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 3 +-- .../integration/goldens/redis/testing/constraints-3.7.txt | 2 +- 13 files changed, 13 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index f848eeb825a0..68a55721f296 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -32,8 +32,7 @@ dependencies = [ "google-api-core[grpc] >= 2.10.0, < 3.0.0dev", "googleapis-common-protos >= 1.53.0", "grpcio >= 1.10.0", - "proto-plus >= 1.19.4, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.22.3, <2.0.0dev", {% if api.requires_package(('google', 'iam', 'v1')) %} "grpc-google-iam-v1", {% endif %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 7237ffdabc1e..1e3b78d0be3e 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -31,8 +31,7 @@ else: dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.22.3, <2.0.0dev", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {% for package_tuple, package_info in pypi_packages.items() %} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index ae6c992c406c..7c6c2ad05d1b 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -6,7 +6,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 -proto-plus==1.22.0 +proto-plus==1.22.3 protobuf==3.19.5 {% for package_tuple, package_info in pypi_packages.items() %} {# Quick check to make sure the package is different from this setup.py #} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 022655f4439c..18e8931237a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -40,8 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", "google-cloud-os-config >= 1.0.0, <2.0.0dev", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index a77838fd956a..7a1dfde768ed 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 -proto-plus==1.22.0 +proto-plus==1.22.3 protobuf==3.19.5 google-cloud-access-context-manager==0.1.2 google-cloud-os-config==1.0.0 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 4a285746ad4f..545b49ba7600 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -40,8 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-iam-credentials" diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt index 6c44adfea7ee..185f7d366c2f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -5,5 +5,5 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 -proto-plus==1.22.0 +proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index cc83573f7bc7..71b39cbc2abf 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -40,8 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index 2beecf99e0be..44ffd04543a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -5,6 +5,6 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 -proto-plus==1.22.0 +proto-plus==1.22.3 protobuf==3.19.5 grpc-google-iam-v1==0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 4e7788ed227d..f07fc0881f57 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -40,8 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-logging" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt index 6c44adfea7ee..185f7d366c2f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -5,5 +5,5 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 -proto-plus==1.22.0 +proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index d197c4a22e8b..96caa9d6cf27 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -40,8 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-redis" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt index 6c44adfea7ee..185f7d366c2f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -5,5 +5,5 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 -proto-plus==1.22.0 +proto-plus==1.22.3 protobuf==3.19.5 From 73587f34ca64d45b840689ff017ed58f0e24907f Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 20 Nov 2023 10:53:17 -0500 Subject: [PATCH 1083/1339] chore: update url in setup.py (#1867) Co-authored-by: ohmayr --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/eventarc/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/logging/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 1e3b78d0be3e..1d5ee4f43c52 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -43,7 +43,7 @@ dependencies = [ {% endif %} {% endfor %} ] -url = "https://github.com/googleapis/python-{{ api.naming.warehouse_package_name|replace("google-cloud-", "")|replace("google-", "") }}" +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/{{ api.naming.warehouse_package_name }}" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 18e8931237a8..7c6f39008ab2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -46,7 +46,7 @@ "google-cloud-os-config >= 1.0.0, <2.0.0dev", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] -url = "https://github.com/googleapis/python-asset" +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 545b49ba7600..78b4f48770b3 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -43,7 +43,7 @@ "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] -url = "https://github.com/googleapis/python-iam-credentials" +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-iam-credentials" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 71b39cbc2abf..bd6939bce7a9 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -44,7 +44,7 @@ "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] -url = "https://github.com/googleapis/python-eventarc" +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index f07fc0881f57..31b6686b9be2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -43,7 +43,7 @@ "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] -url = "https://github.com/googleapis/python-logging" +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-logging" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 96caa9d6cf27..e13752192174 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -43,7 +43,7 @@ "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] -url = "https://github.com/googleapis/python-redis" +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" package_root = os.path.abspath(os.path.dirname(__file__)) From 2b018a288b9c1fb1c30cf52c0675fee357c3cfb8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 13:57:15 -0500 Subject: [PATCH 1084/1339] chore(main): release 1.13.0 (#1860) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index ea9f87f635e9..1a1c1701d4ea 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.13.0](https://github.com/googleapis/gapic-generator-python/compare/v1.12.0...v1.13.0) (2023-11-20) + + +### Features + +* Add support for python 3.12 ([#1816](https://github.com/googleapis/gapic-generator-python/issues/1816)) ([b65898e](https://github.com/googleapis/gapic-generator-python/commit/b65898e11f6b7210a0614eb1ece4b139541f17cd)) +* Add support for python 312 (ads templates) ([#1861](https://github.com/googleapis/gapic-generator-python/issues/1861)) ([d65a540](https://github.com/googleapis/gapic-generator-python/commit/d65a54045abb9b25364e8cfb0ad4c7e5d95917d2)) + + +### Bug Fixes + +* Bump proto-plus version to 1.22.3 ([#1863](https://github.com/googleapis/gapic-generator-python/issues/1863)) ([2ad9be2](https://github.com/googleapis/gapic-generator-python/commit/2ad9be28500972777cec69097be6581eaefb89e4)) + ## [1.12.0](https://github.com/googleapis/gapic-generator-python/compare/v1.11.11...v1.12.0) (2023-11-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 396877666a6c..473cd9abd26a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.12.0" +version = "1.13.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From f7a563852575049f17d211fbc52bc855664839cb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 15:49:39 -0500 Subject: [PATCH 1085/1339] chore: bump cryptography from 41.0.5 to 41.0.6 in /synthtool/gcp/templates/python_library/.kokoro (#1879) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 48 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 453b540c1e58..773c1dfd2146 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 -# created: 2023-11-08T19:46:45.022803742Z + digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c +# created: 2023-11-29T14:54:29.548172703Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 8957e21104e2..e5c1ffca94b7 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -93,30 +93,30 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 +cryptography==41.0.6 \ + --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ + --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ + --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ + --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ + --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ + --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ + --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ + --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ + --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ + --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ + --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ + --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ + --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ + --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ + --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ + --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ + --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ + --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ + --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ + --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ + --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ + --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ + --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae # via # gcp-releasetool # secretstorage From 562dc873e858b5cc31b17dbcc660428ebe83e71c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 8 Dec 2023 21:40:50 +0100 Subject: [PATCH 1086/1339] chore(deps): update all dependencies (#1789) Co-authored-by: Victor Chudnovsky Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/Dockerfile | 2 +- packages/gapic-generator/requirements.txt | 417 +++++++++++----------- 2 files changed, 209 insertions(+), 210 deletions(-) diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile index 840b88f6ae30..beda35897ba6 100644 --- a/packages/gapic-generator/Dockerfile +++ b/packages/gapic-generator/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.11-slim +FROM python:3.12-slim # Install system packages. RUN apt-get update \ diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2e878ac95621..d8f1788c5570 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,105 +4,105 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -cachetools==5.3.1 \ - --hash=sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590 \ - --hash=sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -charset-normalizer==3.3.0 \ - --hash=sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843 \ - --hash=sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786 \ - --hash=sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e \ - --hash=sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8 \ - --hash=sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4 \ - --hash=sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa \ - --hash=sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d \ - --hash=sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82 \ - --hash=sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7 \ - --hash=sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895 \ - --hash=sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d \ - --hash=sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a \ - --hash=sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382 \ - --hash=sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678 \ - --hash=sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b \ - --hash=sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e \ - --hash=sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741 \ - --hash=sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4 \ - --hash=sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596 \ - --hash=sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9 \ - --hash=sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69 \ - --hash=sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c \ - --hash=sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77 \ - --hash=sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13 \ - --hash=sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459 \ - --hash=sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e \ - --hash=sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7 \ - --hash=sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908 \ - --hash=sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a \ - --hash=sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f \ - --hash=sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8 \ - --hash=sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482 \ - --hash=sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d \ - --hash=sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d \ - --hash=sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545 \ - --hash=sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34 \ - --hash=sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86 \ - --hash=sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6 \ - --hash=sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe \ - --hash=sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e \ - --hash=sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc \ - --hash=sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7 \ - --hash=sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd \ - --hash=sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c \ - --hash=sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557 \ - --hash=sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a \ - --hash=sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89 \ - --hash=sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078 \ - --hash=sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e \ - --hash=sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4 \ - --hash=sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403 \ - --hash=sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0 \ - --hash=sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89 \ - --hash=sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115 \ - --hash=sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9 \ - --hash=sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05 \ - --hash=sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a \ - --hash=sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec \ - --hash=sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56 \ - --hash=sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38 \ - --hash=sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479 \ - --hash=sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c \ - --hash=sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e \ - --hash=sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd \ - --hash=sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186 \ - --hash=sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455 \ - --hash=sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c \ - --hash=sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65 \ - --hash=sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78 \ - --hash=sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287 \ - --hash=sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df \ - --hash=sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43 \ - --hash=sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1 \ - --hash=sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7 \ - --hash=sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989 \ - --hash=sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a \ - --hash=sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63 \ - --hash=sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884 \ - --hash=sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649 \ - --hash=sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810 \ - --hash=sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828 \ - --hash=sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4 \ - --hash=sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2 \ - --hash=sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd \ - --hash=sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5 \ - --hash=sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe \ - --hash=sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293 \ - --hash=sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e \ - --hash=sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e \ - --hash=sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8 +charset-normalizer==3.3.2 \ + --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ + --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ + --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ + --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ + --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ + --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ + --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ + --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ + --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ + --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ + --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ + --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ + --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ + --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ + --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ + --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ + --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ + --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ + --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ + --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ + --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ + --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ + --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ + --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ + --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ + --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ + --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ + --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ + --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ + --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ + --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ + --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ + --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ + --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ + --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ + --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ + --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ + --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ + --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ + --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ + --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ + --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ + --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ + --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ + --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ + --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ + --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ + --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ + --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ + --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ + --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ + --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ + --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ + --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ + --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ + --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ + --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ + --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ + --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ + --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ + --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ + --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ + --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ + --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ + --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ + --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ + --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ + --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ + --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ + --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ + --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ + --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ + --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ + --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ + --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ + --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ + --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ + --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ + --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ + --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ + --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ + --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ + --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ + --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ + --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ + --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ + --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ + --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ + --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ + --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 # via requests click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ @@ -112,17 +112,17 @@ exceptiongroup==1.1.3 \ --hash=sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9 \ --hash=sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3 # via pytest -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.13.0 \ + --hash=sha256:44ed591f6c3a0c1ac7a91867d2b3841f92839f860f3d3fe26c464dbd50f97094 \ + --hash=sha256:abc1da067c9026c6cd15dfbd4f6ad07735a62eeadc541d1cc296314447fc3aad # via -r requirements.in -google-auth==2.23.2 \ - --hash=sha256:5a9af4be520ba33651471a0264eead312521566f44631cbb621164bc30c8fd40 \ - --hash=sha256:c2e253347579d483004f17c3bd0bf92e611ef6c7ba24d41c5c59f2e7aeeaf088 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via google-api-core -googleapis-common-protos[grpc]==1.60.0 \ - --hash=sha256:69f9bbcc6acde92cab2db95ce30a70bd2b81d20b12eff3f1aabaffcbe8a93918 \ - --hash=sha256:e73ebb404098db405ba95d1e1ae0aa91c3e15a71da031a2eeb6b2e23e7bc3708 +googleapis-common-protos[grpc]==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via # -r requirements.in # google-api-core @@ -131,61 +131,61 @@ grpc-google-iam-v1==0.12.6 \ --hash=sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f \ --hash=sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c # via -r requirements.in -grpcio==1.59.0 \ - --hash=sha256:0ae444221b2c16d8211b55326f8ba173ba8f8c76349bfc1768198ba592b58f74 \ - --hash=sha256:0b84445fa94d59e6806c10266b977f92fa997db3585f125d6b751af02ff8b9fe \ - --hash=sha256:14890da86a0c0e9dc1ea8e90101d7a3e0e7b1e71f4487fab36e2bfd2ecadd13c \ - --hash=sha256:15f03bd714f987d48ae57fe092cf81960ae36da4e520e729392a59a75cda4f29 \ - --hash=sha256:1a839ba86764cc48226f50b924216000c79779c563a301586a107bda9cbe9dcf \ - --hash=sha256:225e5fa61c35eeaebb4e7491cd2d768cd8eb6ed00f2664fa83a58f29418b39fd \ - --hash=sha256:228b91ce454876d7eed74041aff24a8f04c0306b7250a2da99d35dd25e2a1211 \ - --hash=sha256:2ea95cd6abbe20138b8df965b4a8674ec312aaef3147c0f46a0bac661f09e8d0 \ - --hash=sha256:2f120d27051e4c59db2f267b71b833796770d3ea36ca712befa8c5fff5da6ebd \ - --hash=sha256:34341d9e81a4b669a5f5dca3b2a760b6798e95cdda2b173e65d29d0b16692857 \ - --hash=sha256:3859917de234a0a2a52132489c4425a73669de9c458b01c9a83687f1f31b5b10 \ - --hash=sha256:38823bd088c69f59966f594d087d3a929d1ef310506bee9e3648317660d65b81 \ - --hash=sha256:38da5310ef84e16d638ad89550b5b9424df508fd5c7b968b90eb9629ca9be4b9 \ - --hash=sha256:3b8ff795d35a93d1df6531f31c1502673d1cebeeba93d0f9bd74617381507e3f \ - --hash=sha256:50eff97397e29eeee5df106ea1afce3ee134d567aa2c8e04fabab05c79d791a7 \ - --hash=sha256:5711c51e204dc52065f4a3327dca46e69636a0b76d3e98c2c28c4ccef9b04c52 \ - --hash=sha256:598f3530231cf10ae03f4ab92d48c3be1fee0c52213a1d5958df1a90957e6a88 \ - --hash=sha256:611d9aa0017fa386809bddcb76653a5ab18c264faf4d9ff35cb904d44745f575 \ - --hash=sha256:61bc72a00ecc2b79d9695220b4d02e8ba53b702b42411397e831c9b0589f08a3 \ - --hash=sha256:63982150a7d598281fa1d7ffead6096e543ff8be189d3235dd2b5604f2c553e5 \ - --hash=sha256:6c4b1cc3a9dc1924d2eb26eec8792fedd4b3fcd10111e26c1d551f2e4eda79ce \ - --hash=sha256:81d86a096ccd24a57fa5772a544c9e566218bc4de49e8c909882dae9d73392df \ - --hash=sha256:849c47ef42424c86af069a9c5e691a765e304079755d5c29eff511263fad9c2a \ - --hash=sha256:871371ce0c0055d3db2a86fdebd1e1d647cf21a8912acc30052660297a5a6901 \ - --hash=sha256:8cd2d38c2d52f607d75a74143113174c36d8a416d9472415eab834f837580cf7 \ - --hash=sha256:936b2e04663660c600d5173bc2cc84e15adbad9c8f71946eb833b0afc205b996 \ - --hash=sha256:93e9cb546e610829e462147ce724a9cb108e61647a3454500438a6deef610be1 \ - --hash=sha256:956f0b7cb465a65de1bd90d5a7475b4dc55089b25042fe0f6c870707e9aabb1d \ - --hash=sha256:986de4aa75646e963466b386a8c5055c8b23a26a36a6c99052385d6fe8aaf180 \ - --hash=sha256:aca8a24fef80bef73f83eb8153f5f5a0134d9539b4c436a716256b311dda90a6 \ - --hash=sha256:acf70a63cf09dd494000007b798aff88a436e1c03b394995ce450be437b8e54f \ - --hash=sha256:b34c7a4c31841a2ea27246a05eed8a80c319bfc0d3e644412ec9ce437105ff6c \ - --hash=sha256:b95ec8ecc4f703f5caaa8d96e93e40c7f589bad299a2617bdb8becbcce525539 \ - --hash=sha256:ba0ca727a173ee093f49ead932c051af463258b4b493b956a2c099696f38aa66 \ - --hash=sha256:c041a91712bf23b2a910f61e16565a05869e505dc5a5c025d429ca6de5de842c \ - --hash=sha256:c0488c2b0528e6072010182075615620071371701733c63ab5be49140ed8f7f0 \ - --hash=sha256:c173a87d622ea074ce79be33b952f0b424fa92182063c3bda8625c11d3585d09 \ - --hash=sha256:c251d22de8f9f5cca9ee47e4bade7c5c853e6e40743f47f5cc02288ee7a87252 \ - --hash=sha256:c4dfdb49f4997dc664f30116af2d34751b91aa031f8c8ee251ce4dcfc11277b0 \ - --hash=sha256:ca87ee6183421b7cea3544190061f6c1c3dfc959e0b57a5286b108511fd34ff4 \ - --hash=sha256:ceb1e68135788c3fce2211de86a7597591f0b9a0d2bb80e8401fd1d915991bac \ - --hash=sha256:d09bd2a4e9f5a44d36bb8684f284835c14d30c22d8ec92ce796655af12163588 \ - --hash=sha256:d0fcf53df684fcc0154b1e61f6b4a8c4cf5f49d98a63511e3f30966feff39cd0 \ - --hash=sha256:d74f7d2d7c242a6af9d4d069552ec3669965b74fed6b92946e0e13b4168374f9 \ - --hash=sha256:de2599985b7c1b4ce7526e15c969d66b93687571aa008ca749d6235d056b7205 \ - --hash=sha256:e5378785dce2b91eb2e5b857ec7602305a3b5cf78311767146464bfa365fc897 \ - --hash=sha256:ec78aebb9b6771d6a1de7b6ca2f779a2f6113b9108d486e904bde323d51f5589 \ - --hash=sha256:f1feb034321ae2f718172d86b8276c03599846dc7bb1792ae370af02718f91c5 \ - --hash=sha256:f21917aa50b40842b51aff2de6ebf9e2f6af3fe0971c31960ad6a3a2b24988f4 \ - --hash=sha256:f367e4b524cb319e50acbdea57bb63c3b717c5d561974ace0b065a648bb3bad3 \ - --hash=sha256:f6cfe44a5d7c7d5f1017a7da1c8160304091ca5dc64a0f85bca0d63008c3137a \ - --hash=sha256:fa66cac32861500f280bb60fe7d5b3e22d68c51e18e65367e38f8669b78cea3b \ - --hash=sha256:fc8bf2e7bc725e76c0c11e474634a08c8f24bcf7426c0c6d60c8f9c6e70e4d4a \ - --hash=sha256:fe976910de34d21057bcb53b2c5e667843588b48bf11339da2a75f5c4c5b4055 +grpcio==1.59.2 \ + --hash=sha256:023088764012411affe7db183d1ada3ad9daf2e23ddc719ff46d7061de661340 \ + --hash=sha256:08d77e682f2bf730a4961eea330e56d2f423c6a9b91ca222e5b1eb24a357b19f \ + --hash=sha256:0a4a3833c0e067f3558538727235cd8a49709bff1003200bbdefa2f09334e4b1 \ + --hash=sha256:0a754aff9e3af63bdc4c75c234b86b9d14e14a28a30c4e324aed1a9b873d755f \ + --hash=sha256:11168ef43e4a43ff1b1a65859f3e0ef1a173e277349e7fb16923ff108160a8cd \ + --hash=sha256:128e20f57c5f27cb0157e73756d1586b83c1b513ebecc83ea0ac37e4b0e4e758 \ + --hash=sha256:1f9524d1d701e399462d2c90ba7c193e49d1711cf429c0d3d97c966856e03d00 \ + --hash=sha256:1ff16d68bf453275466a9a46739061a63584d92f18a0f5b33d19fc97eb69867c \ + --hash=sha256:2067274c88bc6de89c278a672a652b4247d088811ece781a4858b09bdf8448e3 \ + --hash=sha256:2171c39f355ba5b551c5d5928d65aa6c69807fae195b86ef4a7d125bcdb860a9 \ + --hash=sha256:242adc47725b9a499ee77c6a2e36688fa6c96484611f33b1be4c57ab075a92dd \ + --hash=sha256:27f879ae604a7fcf371e59fba6f3ff4635a4c2a64768bd83ff0cac503142fef4 \ + --hash=sha256:2b230028a008ae1d0f430acb227d323ff8a619017415cf334c38b457f814119f \ + --hash=sha256:3059668df17627f0e0fa680e9ef8c995c946c792612e9518f5cc1503be14e90b \ + --hash=sha256:31176aa88f36020055ace9adff2405a33c8bdbfa72a9c4980e25d91b2f196873 \ + --hash=sha256:36f53c2b3449c015880e7d55a89c992c357f176327b0d2873cdaaf9628a37c69 \ + --hash=sha256:3b4368b33908f683a363f376dfb747d40af3463a6e5044afee07cf9436addf96 \ + --hash=sha256:3c61d641d4f409c5ae46bfdd89ea42ce5ea233dcf69e74ce9ba32b503c727e29 \ + --hash=sha256:4abb717e320e74959517dc8e84a9f48fbe90e9abe19c248541e9418b1ce60acd \ + --hash=sha256:4c93f4abbb54321ee6471e04a00139c80c754eda51064187963ddf98f5cf36a4 \ + --hash=sha256:535561990e075fa6bd4b16c4c3c1096b9581b7bb35d96fac4650f1181e428268 \ + --hash=sha256:53c9aa5ddd6857c0a1cd0287225a2a25873a8e09727c2e95c4aebb1be83a766a \ + --hash=sha256:5d573e70a6fe77555fb6143c12d3a7d3fa306632a3034b4e7c59ca09721546f8 \ + --hash=sha256:6009386a2df66159f64ac9f20425ae25229b29b9dd0e1d3dd60043f037e2ad7e \ + --hash=sha256:686e975a5d16602dc0982c7c703948d17184bd1397e16c8ee03511ecb8c4cdda \ + --hash=sha256:6959fb07e8351e20501ffb8cc4074c39a0b7ef123e1c850a7f8f3afdc3a3da01 \ + --hash=sha256:6b25ed37c27e652db01be341af93fbcea03d296c024d8a0e680017a268eb85dd \ + --hash=sha256:6da6dea3a1bacf99b3c2187e296db9a83029ed9c38fd4c52b7c9b7326d13c828 \ + --hash=sha256:72ca2399097c0b758198f2ff30f7178d680de8a5cfcf3d9b73a63cf87455532e \ + --hash=sha256:73abb8584b0cf74d37f5ef61c10722adc7275502ab71789a8fe3cb7ef04cf6e2 \ + --hash=sha256:74100fecaec8a535e380cf5f2fb556ff84957d481c13e54051c52e5baac70541 \ + --hash=sha256:75c6ecb70e809cf1504465174343113f51f24bc61e22a80ae1c859f3f7034c6d \ + --hash=sha256:7cf05053242f61ba94014dd3a986e11a083400a32664058f80bf4cf817c0b3a1 \ + --hash=sha256:9411e24328a2302e279e70cae6e479f1fddde79629fcb14e03e6d94b3956eabf \ + --hash=sha256:a213acfbf186b9f35803b52e4ca9addb153fc0b67f82a48f961be7000ecf6721 \ + --hash=sha256:bb7e0fe6ad73b7f06d7e2b689c19a71cf5cc48f0c2bf8608469e51ffe0bd2867 \ + --hash=sha256:c2504eed520958a5b77cc99458297cb7906308cb92327f35fb7fbbad4e9b2188 \ + --hash=sha256:c35aa9657f5d5116d23b934568e0956bd50c615127810fffe3ac356a914c176a \ + --hash=sha256:c5f09cffa619adfb44799fa4a81c2a1ad77c887187613fb0a8f201ab38d89ba1 \ + --hash=sha256:c978f864b35f2261e0819f5cd88b9830b04dc51bcf055aac3c601e525a10d2ba \ + --hash=sha256:cbe946b3e6e60a7b4618f091e62a029cb082b109a9d6b53962dd305087c6e4fd \ + --hash=sha256:cc3e4cd087f07758b16bef8f31d88dbb1b5da5671d2f03685ab52dece3d7a16e \ + --hash=sha256:cf0dead5a2c5a3347af2cfec7131d4f2a2e03c934af28989c9078f8241a491fa \ + --hash=sha256:d2794f0e68b3085d99b4f6ff9c089f6fdd02b32b9d3efdfbb55beac1bf22d516 \ + --hash=sha256:d2fa68a96a30dd240be80bbad838a0ac81a61770611ff7952b889485970c4c71 \ + --hash=sha256:d6f70406695e3220f09cd7a2f879333279d91aa4a8a1d34303b56d61a8180137 \ + --hash=sha256:d8f9cd4ad1be90b0cf350a2f04a38a36e44a026cac1e036ac593dc48efe91d52 \ + --hash=sha256:da2d94c15f88cd40d7e67f7919d4f60110d2b9d5b1e08cf354c2be773ab13479 \ + --hash=sha256:e1727c1c0e394096bb9af185c6923e8ea55a5095b8af44f06903bcc0e06800a2 \ + --hash=sha256:e420ced29b5904cdf9ee5545e23f9406189d8acb6750916c2db4793dada065c6 \ + --hash=sha256:e82c5cf1495244adf5252f925ac5932e5fd288b3e5ab6b70bec5593074b7236c \ + --hash=sha256:f1ef0d39bc1feb420caf549b3c657c871cad4ebbcf0580c4d03816b0590de0cf \ + --hash=sha256:f8753a6c88d1d0ba64302309eecf20f70d2770f65ca02d83c2452279085bfcd3 \ + --hash=sha256:f93dbf58f03146164048be5426ffde298b237a5e059144847e4940f5b80172c3 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -205,37 +205,38 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via -r requirements.in -libcst==1.0.1 \ - --hash=sha256:0138068baf09561268c7f079373bda45f0e2b606d2d19df1307ca8a5134fc465 \ - --hash=sha256:119ba709f1dcb785a4458cf36cedb51d6f9cb2eec0acd7bb171f730eac7cb6ce \ - --hash=sha256:1adcfa7cafb6a0d39a1a0bec541355608038b45815e0c5019c95f91921d42884 \ - --hash=sha256:37187337f979ba426d8bfefc08008c3c1b09b9e9f9387050804ed2da88107570 \ - --hash=sha256:414350df5e334ddf0db1732d63da44e81b734d45abe1c597b5e5c0dd46aa4156 \ - --hash=sha256:440887e5f82efb299f2e98d4bfa5663851a878cfc0efed652ab8c50205191436 \ - --hash=sha256:47dba43855e9c7b06d8b256ee81f0ebec6a4f43605456519577e09dfe4b4288c \ - --hash=sha256:4840a3de701778f0a19582bb3085c61591329153f801dc25da84689a3733960b \ - --hash=sha256:4b4e336f6d68456017671cdda8ddebf9caebce8052cc21a3f494b03d7bd28386 \ - --hash=sha256:5599166d5fec40e18601fb8868519dde99f77b6e4ad6074958018f9545da7abd \ - --hash=sha256:5e3293e77657ba62533553bb9f0c5fb173780e164c65db1ea2a3e0d03944a284 \ - --hash=sha256:600c4d3a9a2f75d5a055fed713a5a4d812709947909610aa6527abe08a31896f \ - --hash=sha256:6caa33430c0c7a0fcad921b0deeec61ddb96796b6f88dca94966f6db62065f4f \ - --hash=sha256:80423311f09fc5fc3270ede44d30d9d8d3c2d3dd50dbf703a581ca7346949fa6 \ - --hash=sha256:8420926791b0b6206cb831a7ec73d26ae820e65bdf07ce9813c7754c7722c07a \ - --hash=sha256:8c50541c3fd6b1d5a3765c4bb5ee8ecbba9d0e798e48f79fd5adf3b6752de4d0 \ - --hash=sha256:8d31ce2790eab59c1bd8e33fe72d09cfc78635c145bdc3f08296b360abb5f443 \ - --hash=sha256:967c66fabd52102954207bf1541312b467afc210fdf7033f32da992fb6c2372c \ - --hash=sha256:9a4931feceab171e6fce73de94e13880424367247dad6ff2b49cabfec733e144 \ - --hash=sha256:9d6dec2a3c443792e6af7c36fadc256e4ea586214c76b52f0d18118811dbe351 \ - --hash=sha256:a6b5aea04c35e13109edad3cf83bc6dcd74309b150a781d2189eecb288b73a87 \ - --hash=sha256:ae49dcbfadefb82e830d41d9f0a1db0af3b771224768f431f1b7b3a9803ed7e3 \ - --hash=sha256:ae7f4e71d714f256b5f2ff98b5a9effba0f9dff4d779d8f35d7eb157bef78f59 \ - --hash=sha256:b0533de4e35396c61aeb3a6266ac30369a855910c2385aaa902ff4aabd60d409 \ - --hash=sha256:b666a605f4205c8357696f3b6571a38f6a8537cdcbb8f357587d35168298af34 \ - --hash=sha256:b97f652b15c50e91df411a9c8d5e6f75882b30743a49b387dcedd3f68ed94d75 \ - --hash=sha256:c90c74a8a314f0774f045122323fb60bacba79cbf5f71883c0848ecd67179541 \ - --hash=sha256:d237e9164a43caa7d6765ee560412264484e7620c546a2ee10a8d01bd56884e0 \ - --hash=sha256:ddd4e0eeec499d1c824ab545e62e957dbbd69a16bc4273208817638eb7d6b3c6 \ - --hash=sha256:f2cb687e1514625e91024e50a5d2e485c0ad3be24f199874ebf32b5de0346150 +libcst==1.1.0 \ + --hash=sha256:003e5e83a12eed23542c4ea20fdc8de830887cc03662432bb36f84f8c4841b81 \ + --hash=sha256:0acbacb9a170455701845b7e940e2d7b9519db35a86768d86330a0b0deae1086 \ + --hash=sha256:0bf69cbbab5016d938aac4d3ae70ba9ccb3f90363c588b3b97be434e6ba95403 \ + --hash=sha256:2d37326bd6f379c64190a28947a586b949de3a76be00176b0732c8ee87d67ebe \ + --hash=sha256:3a07ecfabbbb8b93209f952a365549e65e658831e9231649f4f4e4263cad24b1 \ + --hash=sha256:3ebbb9732ae3cc4ae7a0e97890bed0a57c11d6df28790c2b9c869f7da653c7c7 \ + --hash=sha256:4bc745d0c06420fe2644c28d6ddccea9474fb68a2135904043676deb4fa1e6bc \ + --hash=sha256:5297a16e575be8173185e936b7765c89a3ca69d4ae217a4af161814a0f9745a7 \ + --hash=sha256:5f1cd308a4c2f71d5e4eec6ee693819933a03b78edb2e4cc5e3ad1afd5fb3f07 \ + --hash=sha256:63f75656fd733dc20354c46253fde3cf155613e37643c3eaf6f8818e95b7a3d1 \ + --hash=sha256:73c086705ed34dbad16c62c9adca4249a556c1b022993d511da70ea85feaf669 \ + --hash=sha256:75816647736f7e09c6120bdbf408456f99b248d6272277eed9a58cf50fb8bc7d \ + --hash=sha256:78b7a38ec4c1c009ac39027d51558b52851fb9234669ba5ba62283185963a31c \ + --hash=sha256:7ccaf53925f81118aeaadb068a911fac8abaff608817d7343da280616a5ca9c1 \ + --hash=sha256:82d1271403509b0a4ee6ff7917c2d33b5a015f44d1e208abb1da06ba93b2a378 \ + --hash=sha256:8ae11eb1ea55a16dc0cdc61b41b29ac347da70fec14cc4381248e141ee2fbe6c \ + --hash=sha256:8afb6101b8b3c86c5f9cec6b90ab4da16c3c236fe7396f88e8b93542bb341f7c \ + --hash=sha256:8c1f2da45f1c45634090fd8672c15e0159fdc46853336686959b2d093b6e10fa \ + --hash=sha256:97fbc73c87e9040e148881041fd5ffa2a6ebf11f64b4ccb5b52e574b95df1a15 \ + --hash=sha256:99fdc1929703fd9e7408aed2e03f58701c5280b05c8911753a8d8619f7dfdda5 \ + --hash=sha256:9dffa1795c2804d183efb01c0f1efd20a7831db6a21a0311edf90b4100d67436 \ + --hash=sha256:bca1841693941fdd18371824bb19a9702d5784cd347cb8231317dbdc7062c5bc \ + --hash=sha256:c653d9121d6572d8b7f8abf20f88b0a41aab77ff5a6a36e5a0ec0f19af0072e8 \ + --hash=sha256:c8f26250f87ca849a7303ed7a4fd6b2c7ac4dec16b7d7e68ca6a476d7c9bfcdb \ + --hash=sha256:cc9b6ac36d7ec9db2f053014ea488086ca2ed9c322be104fbe2c71ca759da4bb \ + --hash=sha256:d22d1abfe49aa60fc61fa867e10875a9b3024ba5a801112f4d7ba42d8d53242e \ + --hash=sha256:d68c34e3038d3d1d6324eb47744cbf13f2c65e1214cf49db6ff2a6603c1cd838 \ + --hash=sha256:e3d8cf974cfa2487b28f23f56c4bff90d550ef16505e58b0dca0493d5293784b \ + --hash=sha256:f36f592e035ef84f312a12b75989dde6a5f6767fe99146cdae6a9ee9aff40dd0 \ + --hash=sha256:f561c9a84eca18be92f4ad90aa9bd873111efbea995449301719a1a7805dbc5c \ + --hash=sha256:fe41b33aa73635b1651f64633f429f7aa21f86d2db5748659a99d9b7b1ed2a90 # via -r requirements.in markupsafe==2.1.3 \ --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ @@ -307,20 +308,18 @@ proto-plus==1.22.3 \ --hash=sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df \ --hash=sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b # via -r requirements.in -protobuf==4.24.3 \ - --hash=sha256:067f750169bc644da2e1ef18c785e85071b7c296f14ac53e0900e605da588719 \ - --hash=sha256:12e9ad2ec079b833176d2921be2cb24281fa591f0b119b208b788adc48c2561d \ - --hash=sha256:1b182c7181a2891e8f7f3a1b5242e4ec54d1f42582485a896e4de81aa17540c2 \ - --hash=sha256:20651f11b6adc70c0f29efbe8f4a94a74caf61b6200472a9aea6e19898f9fcf4 \ - --hash=sha256:2da777d34b4f4f7613cdf85c70eb9a90b1fbef9d36ae4a0ccfe014b0b07906f1 \ - --hash=sha256:3d42e9e4796a811478c783ef63dc85b5a104b44aaaca85d4864d5b886e4b05e3 \ - --hash=sha256:6e514e8af0045be2b56e56ae1bb14f43ce7ffa0f68b1c793670ccbe2c4fc7d2b \ - --hash=sha256:b0271a701e6782880d65a308ba42bc43874dabd1a0a0f41f72d2dac3b57f8e76 \ - --hash=sha256:ba53c2f04798a326774f0e53b9c759eaef4f6a568ea7072ec6629851c8435959 \ - --hash=sha256:e29d79c913f17a60cf17c626f1041e5288e9885c8579832580209de8b75f2a52 \ - --hash=sha256:f631bb982c5478e0c1c70eab383af74a84be66945ebf5dd6b06fc90079668d0b \ - --hash=sha256:f6ccbcf027761a2978c1406070c3788f6de4a4b2cc20800cc03d52df716ad675 \ - --hash=sha256:f6f8dc65625dadaad0c8545319c2e2f0424fede988368893ca3844261342c11a +protobuf==4.25.0 \ + --hash=sha256:1a3ba712877e6d37013cdc3476040ea1e313a6c2e1580836a94f76b3c176d575 \ + --hash=sha256:1a53d6f64b00eecf53b65ff4a8c23dc95df1fa1e97bb06b8122e5a64f49fc90a \ + --hash=sha256:32ac2100b0e23412413d948c03060184d34a7c50b3e5d7524ee96ac2b10acf51 \ + --hash=sha256:5c1203ac9f50e4853b0a0bfffd32c67118ef552a33942982eeab543f5c634395 \ + --hash=sha256:63714e79b761a37048c9701a37438aa29945cd2417a97076048232c1df07b701 \ + --hash=sha256:683dc44c61f2620b32ce4927de2108f3ebe8ccf2fd716e1e684e5a50da154054 \ + --hash=sha256:68f7caf0d4f012fd194a301420cf6aa258366144d814f358c5b32558228afa7c \ + --hash=sha256:b2cf8b5d381f9378afe84618288b239e75665fe58d0f3fd5db400959274296e9 \ + --hash=sha256:c40ff8f00aa737938c5378d461637d15c442a12275a81019cc2fef06d81c9419 \ + --hash=sha256:cf21faba64cd2c9a3ed92b7a67f226296b10159dbb8fbc5e854fc90657d908e4 \ + --hash=sha256:d94a33db8b7ddbd0af7c467475fb9fde0c705fb315a8433c0e2020942b863a1f # via # -r requirements.in # google-api-core @@ -337,13 +336,13 @@ pyasn1-modules==0.3.0 \ --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth -pypandoc==1.11 \ - --hash=sha256:7f6d68db0e57e0f6961bec2190897118c4d305fc2d31c22cd16037f22ee084a5 \ - --hash=sha256:b260596934e9cfc6513056110a7c8600171d414f90558bf4407e68b209be8007 +pypandoc==1.12 \ + --hash=sha256:8f44740a9f074e121d81b489f073160421611d4ead62d1b306aeb11aab3c32df \ + --hash=sha256:efb4f7d68ead8bec32e22b62f02d5608a1700978b51bfc4af286fd6acfe9d218 # via -r requirements.in -pytest==7.4.2 \ - --hash=sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002 \ - --hash=sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069 +pytest==7.4.3 \ + --hash=sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac \ + --hash=sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5 # via pytest-asyncio pytest-asyncio==0.21.1 \ --hash=sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d \ From 2d962b3cf5e51a503b13ea950b4b470bf6219fcd Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 14 Dec 2023 13:55:30 +0100 Subject: [PATCH 1087/1339] chore(deps): update all dependencies (#1891) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/.bazeliskrc | 2 + .../.github/workflows/tests.yaml | 26 +-- packages/gapic-generator/WORKSPACE | 28 +-- packages/gapic-generator/requirements.txt | 212 +++++++++--------- 4 files changed, 126 insertions(+), 142 deletions(-) create mode 100644 packages/gapic-generator/.bazeliskrc diff --git a/packages/gapic-generator/.bazeliskrc b/packages/gapic-generator/.bazeliskrc new file mode 100644 index 000000000000..74cd900fbd3c --- /dev/null +++ b/packages/gapic-generator/.bazeliskrc @@ -0,0 +1,2 @@ +# See https://github.com/bazelbuild/bazelisk +USE_BAZEL_VERSION=6.3.0 diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index a611be53063f..d209e3830fc6 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -23,7 +23,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 # Use python 3.10 for docs to match the version for the sphinx plugin # https://github.com/googleapis/synthtool/pull/1891 with: @@ -43,7 +43,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "${{ matrix.python }}" cache: 'pip' @@ -61,7 +61,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "${{ matrix.python }}" cache: 'pip' @@ -104,7 +104,7 @@ jobs: sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - name: Set up Python "3.12" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.12" cache: 'pip' @@ -145,7 +145,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "${{ matrix.python }}" cache: 'pip' @@ -170,7 +170,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python "3.12" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.12" cache: 'pip' @@ -198,7 +198,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python "3.12" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.12" cache: 'pip' @@ -223,7 +223,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python "3.12" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.12" cache: 'pip' @@ -243,7 +243,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} cache: 'pip' @@ -265,7 +265,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} cache: 'pip' @@ -305,14 +305,14 @@ jobs: echo "and it will start over with a clean cache." echo "The old one will disappear after 7 days." - name: Integration Tests - run: bazel test //tests/integration/... --test_output=errors + run: bazelisk test //tests/integration/... --test_output=errors goldens-lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python 3.12 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.12" cache: 'pip' @@ -331,7 +331,7 @@ jobs: steps: - uses: actions/checkout@v4 - name: Set up Python "3.11" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: # Do not upgrade this check to python 3.12 until # https://github.com/hhatto/autopep8/issues/712 is fixed diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 27bb62c49009..1f82e3d1c554 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -60,27 +60,9 @@ gapic_generator_python() gapic_generator_register_toolchains() -# TODO(https://github.com/googleapis/gapic-generator-python/issues/1781): -# Remove this import once gRPC depends on a newer version. -# -# Background: Import boringssl explicitly to override what gRPC -# imports as its dependency. Boringssl build fails on gcc12 without -# this fix: -# https://github.com/google/boringssl/commit/8462a367bb57e9524c3d8eca9c62733c63a63cf4, -# which is present only in the newest version of boringssl, not the -# one imported by gRPC. -http_archive( - name = "boringssl", - sha256 = "b460f8673f3393e58ce506e9cdde7f2c3b2575b075f214cb819fb57d809f052b", - strip_prefix = "boringssl-bb41bc007079982da419c0ec3186e510cbcf09d0", - urls = [ - "https://github.com/google/boringssl/archive/bb41bc007079982da419c0ec3186e510cbcf09d0.zip", - ], -) - -_grpc_version = "1.55.1" +_grpc_version = "1.60.0" -_grpc_sha256 = "17c0685da231917a7b3be2671a7b13b550a85fdda5e475313264c5f51c4da3f8" +_grpc_sha256 = "09640607a340ff0d97407ed22fe4adb177e5bb85329821122084359cd57c3dea" http_archive( name = "com_github_grpc_grpc", @@ -91,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "0b0395d34e000f1229679e10d984ed7913078f3dd7f26cf0476467f5e65716f4", - strip_prefix = "protobuf-23.2", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v23.2.tar.gz"], + sha256 = "9bd87b8280ef720d3240514f884e56a712f2218f0d693b48050c836028940a42", + strip_prefix = "protobuf-25.1", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v25.1.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d8f1788c5570..ac1f53dfe826 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,9 +8,9 @@ cachetools==5.3.2 \ --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2023.11.17 \ + --hash=sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1 \ + --hash=sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474 # via requests charset-normalizer==3.3.2 \ --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ @@ -108,90 +108,90 @@ click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via -r requirements.in -exceptiongroup==1.1.3 \ - --hash=sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9 \ - --hash=sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3 +exceptiongroup==1.2.0 \ + --hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14 \ + --hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68 # via pytest -google-api-core==2.13.0 \ - --hash=sha256:44ed591f6c3a0c1ac7a91867d2b3841f92839f860f3d3fe26c464dbd50f97094 \ - --hash=sha256:abc1da067c9026c6cd15dfbd4f6ad07735a62eeadc541d1cc296314447fc3aad +google-api-core==2.15.0 \ + --hash=sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a \ + --hash=sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca # via -r requirements.in -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.25.2 \ + --hash=sha256:42f707937feb4f5e5a39e6c4f343a17300a459aaf03141457ba505812841cc40 \ + --hash=sha256:473a8dfd0135f75bb79d878436e568f2695dce456764bf3a02b6f8c540b1d256 # via google-api-core -googleapis-common-protos[grpc]==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos[grpc]==1.62.0 \ + --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ + --hash=sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277 # via # -r requirements.in # google-api-core # grpc-google-iam-v1 -grpc-google-iam-v1==0.12.6 \ - --hash=sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f \ - --hash=sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c +grpc-google-iam-v1==0.13.0 \ + --hash=sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89 \ + --hash=sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e # via -r requirements.in -grpcio==1.59.2 \ - --hash=sha256:023088764012411affe7db183d1ada3ad9daf2e23ddc719ff46d7061de661340 \ - --hash=sha256:08d77e682f2bf730a4961eea330e56d2f423c6a9b91ca222e5b1eb24a357b19f \ - --hash=sha256:0a4a3833c0e067f3558538727235cd8a49709bff1003200bbdefa2f09334e4b1 \ - --hash=sha256:0a754aff9e3af63bdc4c75c234b86b9d14e14a28a30c4e324aed1a9b873d755f \ - --hash=sha256:11168ef43e4a43ff1b1a65859f3e0ef1a173e277349e7fb16923ff108160a8cd \ - --hash=sha256:128e20f57c5f27cb0157e73756d1586b83c1b513ebecc83ea0ac37e4b0e4e758 \ - --hash=sha256:1f9524d1d701e399462d2c90ba7c193e49d1711cf429c0d3d97c966856e03d00 \ - --hash=sha256:1ff16d68bf453275466a9a46739061a63584d92f18a0f5b33d19fc97eb69867c \ - --hash=sha256:2067274c88bc6de89c278a672a652b4247d088811ece781a4858b09bdf8448e3 \ - --hash=sha256:2171c39f355ba5b551c5d5928d65aa6c69807fae195b86ef4a7d125bcdb860a9 \ - --hash=sha256:242adc47725b9a499ee77c6a2e36688fa6c96484611f33b1be4c57ab075a92dd \ - --hash=sha256:27f879ae604a7fcf371e59fba6f3ff4635a4c2a64768bd83ff0cac503142fef4 \ - --hash=sha256:2b230028a008ae1d0f430acb227d323ff8a619017415cf334c38b457f814119f \ - --hash=sha256:3059668df17627f0e0fa680e9ef8c995c946c792612e9518f5cc1503be14e90b \ - --hash=sha256:31176aa88f36020055ace9adff2405a33c8bdbfa72a9c4980e25d91b2f196873 \ - --hash=sha256:36f53c2b3449c015880e7d55a89c992c357f176327b0d2873cdaaf9628a37c69 \ - --hash=sha256:3b4368b33908f683a363f376dfb747d40af3463a6e5044afee07cf9436addf96 \ - --hash=sha256:3c61d641d4f409c5ae46bfdd89ea42ce5ea233dcf69e74ce9ba32b503c727e29 \ - --hash=sha256:4abb717e320e74959517dc8e84a9f48fbe90e9abe19c248541e9418b1ce60acd \ - --hash=sha256:4c93f4abbb54321ee6471e04a00139c80c754eda51064187963ddf98f5cf36a4 \ - --hash=sha256:535561990e075fa6bd4b16c4c3c1096b9581b7bb35d96fac4650f1181e428268 \ - --hash=sha256:53c9aa5ddd6857c0a1cd0287225a2a25873a8e09727c2e95c4aebb1be83a766a \ - --hash=sha256:5d573e70a6fe77555fb6143c12d3a7d3fa306632a3034b4e7c59ca09721546f8 \ - --hash=sha256:6009386a2df66159f64ac9f20425ae25229b29b9dd0e1d3dd60043f037e2ad7e \ - --hash=sha256:686e975a5d16602dc0982c7c703948d17184bd1397e16c8ee03511ecb8c4cdda \ - --hash=sha256:6959fb07e8351e20501ffb8cc4074c39a0b7ef123e1c850a7f8f3afdc3a3da01 \ - --hash=sha256:6b25ed37c27e652db01be341af93fbcea03d296c024d8a0e680017a268eb85dd \ - --hash=sha256:6da6dea3a1bacf99b3c2187e296db9a83029ed9c38fd4c52b7c9b7326d13c828 \ - --hash=sha256:72ca2399097c0b758198f2ff30f7178d680de8a5cfcf3d9b73a63cf87455532e \ - --hash=sha256:73abb8584b0cf74d37f5ef61c10722adc7275502ab71789a8fe3cb7ef04cf6e2 \ - --hash=sha256:74100fecaec8a535e380cf5f2fb556ff84957d481c13e54051c52e5baac70541 \ - --hash=sha256:75c6ecb70e809cf1504465174343113f51f24bc61e22a80ae1c859f3f7034c6d \ - --hash=sha256:7cf05053242f61ba94014dd3a986e11a083400a32664058f80bf4cf817c0b3a1 \ - --hash=sha256:9411e24328a2302e279e70cae6e479f1fddde79629fcb14e03e6d94b3956eabf \ - --hash=sha256:a213acfbf186b9f35803b52e4ca9addb153fc0b67f82a48f961be7000ecf6721 \ - --hash=sha256:bb7e0fe6ad73b7f06d7e2b689c19a71cf5cc48f0c2bf8608469e51ffe0bd2867 \ - --hash=sha256:c2504eed520958a5b77cc99458297cb7906308cb92327f35fb7fbbad4e9b2188 \ - --hash=sha256:c35aa9657f5d5116d23b934568e0956bd50c615127810fffe3ac356a914c176a \ - --hash=sha256:c5f09cffa619adfb44799fa4a81c2a1ad77c887187613fb0a8f201ab38d89ba1 \ - --hash=sha256:c978f864b35f2261e0819f5cd88b9830b04dc51bcf055aac3c601e525a10d2ba \ - --hash=sha256:cbe946b3e6e60a7b4618f091e62a029cb082b109a9d6b53962dd305087c6e4fd \ - --hash=sha256:cc3e4cd087f07758b16bef8f31d88dbb1b5da5671d2f03685ab52dece3d7a16e \ - --hash=sha256:cf0dead5a2c5a3347af2cfec7131d4f2a2e03c934af28989c9078f8241a491fa \ - --hash=sha256:d2794f0e68b3085d99b4f6ff9c089f6fdd02b32b9d3efdfbb55beac1bf22d516 \ - --hash=sha256:d2fa68a96a30dd240be80bbad838a0ac81a61770611ff7952b889485970c4c71 \ - --hash=sha256:d6f70406695e3220f09cd7a2f879333279d91aa4a8a1d34303b56d61a8180137 \ - --hash=sha256:d8f9cd4ad1be90b0cf350a2f04a38a36e44a026cac1e036ac593dc48efe91d52 \ - --hash=sha256:da2d94c15f88cd40d7e67f7919d4f60110d2b9d5b1e08cf354c2be773ab13479 \ - --hash=sha256:e1727c1c0e394096bb9af185c6923e8ea55a5095b8af44f06903bcc0e06800a2 \ - --hash=sha256:e420ced29b5904cdf9ee5545e23f9406189d8acb6750916c2db4793dada065c6 \ - --hash=sha256:e82c5cf1495244adf5252f925ac5932e5fd288b3e5ab6b70bec5593074b7236c \ - --hash=sha256:f1ef0d39bc1feb420caf549b3c657c871cad4ebbcf0580c4d03816b0590de0cf \ - --hash=sha256:f8753a6c88d1d0ba64302309eecf20f70d2770f65ca02d83c2452279085bfcd3 \ - --hash=sha256:f93dbf58f03146164048be5426ffde298b237a5e059144847e4940f5b80172c3 +grpcio==1.60.0 \ + --hash=sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6 \ + --hash=sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328 \ + --hash=sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead \ + --hash=sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5 \ + --hash=sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491 \ + --hash=sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96 \ + --hash=sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444 \ + --hash=sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951 \ + --hash=sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf \ + --hash=sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253 \ + --hash=sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629 \ + --hash=sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae \ + --hash=sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43 \ + --hash=sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b \ + --hash=sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14 \ + --hash=sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab \ + --hash=sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390 \ + --hash=sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2 \ + --hash=sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0 \ + --hash=sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590 \ + --hash=sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508 \ + --hash=sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b \ + --hash=sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08 \ + --hash=sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13 \ + --hash=sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca \ + --hash=sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03 \ + --hash=sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748 \ + --hash=sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860 \ + --hash=sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d \ + --hash=sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353 \ + --hash=sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e \ + --hash=sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c \ + --hash=sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134 \ + --hash=sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415 \ + --hash=sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320 \ + --hash=sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179 \ + --hash=sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324 \ + --hash=sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18 \ + --hash=sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df \ + --hash=sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e \ + --hash=sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b \ + --hash=sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6 \ + --hash=sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d \ + --hash=sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff \ + --hash=sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968 \ + --hash=sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619 \ + --hash=sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139 \ + --hash=sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55 \ + --hash=sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454 \ + --hash=sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65 \ + --hash=sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a \ + --hash=sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19 \ + --hash=sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b \ + --hash=sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd # via # googleapis-common-protos # grpc-google-iam-v1 -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.6 \ + --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \ + --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f # via requests inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ @@ -304,31 +304,31 @@ pluggy==1.3.0 \ --hash=sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12 \ --hash=sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7 # via pytest -proto-plus==1.22.3 \ - --hash=sha256:a49cd903bc0b6ab41f76bf65510439d56ca76f868adf0274e738bfdd096894df \ - --hash=sha256:fdcd09713cbd42480740d2fe29c990f7fbd885a67efc328aa8be6ee3e9f76a6b +proto-plus==1.23.0 \ + --hash=sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2 \ + --hash=sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c # via -r requirements.in -protobuf==4.25.0 \ - --hash=sha256:1a3ba712877e6d37013cdc3476040ea1e313a6c2e1580836a94f76b3c176d575 \ - --hash=sha256:1a53d6f64b00eecf53b65ff4a8c23dc95df1fa1e97bb06b8122e5a64f49fc90a \ - --hash=sha256:32ac2100b0e23412413d948c03060184d34a7c50b3e5d7524ee96ac2b10acf51 \ - --hash=sha256:5c1203ac9f50e4853b0a0bfffd32c67118ef552a33942982eeab543f5c634395 \ - --hash=sha256:63714e79b761a37048c9701a37438aa29945cd2417a97076048232c1df07b701 \ - --hash=sha256:683dc44c61f2620b32ce4927de2108f3ebe8ccf2fd716e1e684e5a50da154054 \ - --hash=sha256:68f7caf0d4f012fd194a301420cf6aa258366144d814f358c5b32558228afa7c \ - --hash=sha256:b2cf8b5d381f9378afe84618288b239e75665fe58d0f3fd5db400959274296e9 \ - --hash=sha256:c40ff8f00aa737938c5378d461637d15c442a12275a81019cc2fef06d81c9419 \ - --hash=sha256:cf21faba64cd2c9a3ed92b7a67f226296b10159dbb8fbc5e854fc90657d908e4 \ - --hash=sha256:d94a33db8b7ddbd0af7c467475fb9fde0c705fb315a8433c0e2020942b863a1f +protobuf==4.25.1 \ + --hash=sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd \ + --hash=sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb \ + --hash=sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0 \ + --hash=sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7 \ + --hash=sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b \ + --hash=sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2 \ + --hash=sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510 \ + --hash=sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6 \ + --hash=sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd \ + --hash=sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10 \ + --hash=sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7 # via # -r requirements.in # google-api-core # googleapis-common-protos # grpc-google-iam-v1 # proto-plus -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde +pyasn1==0.5.1 \ + --hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 \ + --hash=sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c # via # pyasn1-modules # rsa @@ -344,9 +344,9 @@ pytest==7.4.3 \ --hash=sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac \ --hash=sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5 # via pytest-asyncio -pytest-asyncio==0.21.1 \ - --hash=sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d \ - --hash=sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b +pytest-asyncio==0.23.2 \ + --hash=sha256:c16052382554c7b22d48782ab3438d5b10f8cf7a4bdcae7f0f67f097d95beecc \ + --hash=sha256:ea9021364e32d58f0be43b91c6233fb8d2224ccef2398d6837559e587682808f # via -r requirements.in pyyaml==6.0.1 \ --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ @@ -408,9 +408,9 @@ tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via pytest -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.9.0 \ + --hash=sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783 \ + --hash=sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd # via # libcst # typing-inspect @@ -418,15 +418,15 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.1.0 \ + --hash=sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3 \ + --hash=sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54 # via # google-auth # requests # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.0.2 \ + --hash=sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2 \ + --hash=sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6 # via -r requirements.in From 2cfd807a05b07f187a91dbe3d6b64e5fed7135e9 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 18 Dec 2023 17:30:56 -0500 Subject: [PATCH 1088/1339] refactor: reading environment variables; setting client cert source, api endpoints (#1873) Co-authored-by: gcf-owl-bot[bot] <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- .../%sub/services/%service/async_client.py.j2 | 10 ++ .../%sub/services/%service/client.py.j2 | 115 ++++++++++++--- .../%name_%version/%sub/test_%service.py.j2 | 138 +++++++++++++++++- .../services/asset_service/async_client.py | 10 ++ .../asset_v1/services/asset_service/client.py | 114 ++++++++++++--- .../unit/gapic/asset_v1/test_asset_service.py | 128 +++++++++++++++- .../services/iam_credentials/async_client.py | 10 ++ .../services/iam_credentials/client.py | 114 ++++++++++++--- .../credentials_v1/test_iam_credentials.py | 128 +++++++++++++++- .../services/eventarc/async_client.py | 10 ++ .../eventarc_v1/services/eventarc/client.py | 114 ++++++++++++--- .../unit/gapic/eventarc_v1/test_eventarc.py | 128 +++++++++++++++- .../config_service_v2/async_client.py | 10 ++ .../services/config_service_v2/client.py | 114 ++++++++++++--- .../logging_service_v2/async_client.py | 10 ++ .../services/logging_service_v2/client.py | 114 ++++++++++++--- .../metrics_service_v2/async_client.py | 10 ++ .../services/metrics_service_v2/client.py | 114 ++++++++++++--- .../logging_v2/test_config_service_v2.py | 128 +++++++++++++++- .../logging_v2/test_logging_service_v2.py | 128 +++++++++++++++- .../logging_v2/test_metrics_service_v2.py | 128 +++++++++++++++- .../services/cloud_redis/async_client.py | 10 ++ .../redis_v1/services/cloud_redis/client.py | 114 ++++++++++++--- .../unit/gapic/redis_v1/test_cloud_redis.py | 128 +++++++++++++++- 24 files changed, 1865 insertions(+), 162 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index c7cfd1ff8429..750b1b24906b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -141,6 +141,16 @@ class {{ service.async_client_name }}: {{ service.name }}Transport: The transport used by the client instance. """ return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._client._api_endpoint get_transport_class = functools.partial(type({{ service.client_name }}).get_transport_class, type({{ service.client_name }})) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 03cc953dbeaa..23fe548afd24 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -10,9 +10,7 @@ import functools import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast -{% if service.any_deprecated %} import warnings -{% endif %} {% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} from {{package_path}} import gapic_version as package_version @@ -218,7 +216,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -248,6 +246,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -275,6 +276,77 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE + and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint + + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always the return value of this function. + client_cert_source (bytes): The client certificate source used by the client. + use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = {{ service.client_name }}.DEFAULT_ENDPOINT + return api_endpoint + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._api_endpoint + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, {{ service.name }}Transport]] = None, @@ -323,15 +395,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + self._use_client_cert, self._use_mtls_endpoint = {{ service.client_name }}._read_environment_variables() + self._client_cert_source = {{ service.client_name }}._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._api_endpoint = {{ service.client_name }}._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -340,10 +415,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # instance provides an extensibility point for unusual situations. if isinstance(transport, {{ service.name }}Transport): # transport is a {{ service.name }}Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." @@ -358,14 +433,14 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ceb92c806bc7..7166676ac71c 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -100,6 +100,64 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + + assert {{ service.client_name }}._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert {{ service.client_name }}._read_environment_variables() == (True, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert {{ service.client_name }}._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + {{ service.client_name }}._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert {{ service.client_name }}._read_environment_variables() == (False, "never") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert {{ service.client_name }}._read_environment_variables() == (False, "always") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert {{ service.client_name }}._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + {{ service.client_name }}._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert {{ service.client_name }}._get_client_cert_source(None, False) is None + assert {{ service.client_name }}._get_client_cert_source(mock_provided_cert_source, False) is None + assert {{ service.client_name }}._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert {{ service.client_name }}._get_client_cert_source(None, True) is mock_default_cert_source + assert {{ service.client_name }}._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +{% if 'grpc' in opts.transport %} +@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +{% endif %} +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + + assert {{ service.client_name }}._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override + assert {{ service.client_name }}._get_api_endpoint(None, mock_client_cert_source, "auto") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + assert {{ service.client_name }}._get_api_endpoint(None, None, "auto") == {{ service.client_name }}.DEFAULT_ENDPOINT + assert {{ service.client_name }}._get_api_endpoint(None, None, "always") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + assert {{ service.client_name }}._get_api_endpoint(None, mock_client_cert_source, "always") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + assert {{ service.client_name }}._get_api_endpoint(None, None, "never") == {{ service.client_name }}.DEFAULT_ENDPOINT + + @pytest.mark.parametrize("client_class,transport_name", [ {% if 'grpc' in opts.transport %} @@ -281,13 +339,15 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -474,6 +534,80 @@ def test_{{ service.client_name|snake_case }}_get_mtls_endpoint_and_cert_source( assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + {% if 'grpc' in opts.transport %} + {{ service.client_name }}, {{ service.async_client_name }} + {% elif 'rest' in opts.transport %} + {{ service.client_name }} + {% endif %} +]) +@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +{% if 'grpc' in opts.transport %} +@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +{% endif %} +def test_{{ service.client_name|snake_case }}_client_api_endpoint(client_class): + {# TODO(clean-up): remove redundant tests that are already covered by the smaller functions tests #} + mock_client_cert_source = client_cert_source_callback + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", + # use ClientOptions.api_endpoint as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 39871843efa2..5d1ce25abdf1 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -156,6 +156,16 @@ def transport(self) -> AssetServiceTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._client._api_endpoint + get_transport_class = functools.partial(type(AssetServiceClient).get_transport_class, type(AssetServiceClient)) def __init__(self, *, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 65f82d2fef0e..46504a5907be 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -17,6 +17,7 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings from google.cloud.asset_v1 import gapic_version as package_version @@ -304,7 +305,7 @@ def parse_common_location_path(path: str) -> Dict[str,str]: @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -334,6 +335,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -361,6 +365,77 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE + and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint + + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always the return value of this function. + client_cert_source (bytes): The client certificate source used by the client. + use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = AssetServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = AssetServiceClient.DEFAULT_ENDPOINT + return api_endpoint + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._api_endpoint + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, AssetServiceTransport]] = None, @@ -407,15 +482,18 @@ def __init__(self, *, google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + self._use_client_cert, self._use_mtls_endpoint = AssetServiceClient._read_environment_variables() + self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._api_endpoint = AssetServiceClient._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -424,10 +502,10 @@ def __init__(self, *, # instance provides an extensibility point for unusual situations. if isinstance(transport, AssetServiceTransport): # transport is a AssetServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." @@ -442,14 +520,14 @@ def __init__(self, *, Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def export_assets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 0ea3a40d3466..48fea1841d11 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -88,6 +88,61 @@ def test__get_default_mtls_endpoint(): assert AssetServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + + assert AssetServiceClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AssetServiceClient._read_environment_variables() == (True, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AssetServiceClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + AssetServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AssetServiceClient._read_environment_variables() == (False, "never") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AssetServiceClient._read_environment_variables() == (False, "always") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AssetServiceClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AssetServiceClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AssetServiceClient._get_client_cert_source(None, False) is None + assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert AssetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source + assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + + assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override + assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + assert AssetServiceClient._get_api_endpoint(None, None, "auto") == AssetServiceClient.DEFAULT_ENDPOINT + assert AssetServiceClient._get_api_endpoint(None, None, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + assert AssetServiceClient._get_api_endpoint(None, None, "never") == AssetServiceClient.DEFAULT_ENDPOINT + @pytest.mark.parametrize("client_class,transport_name", [ (AssetServiceClient, "grpc"), @@ -242,13 +297,15 @@ def test_asset_service_client_client_options(client_class, transport_class, tran # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -423,6 +480,73 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + AssetServiceClient, AssetServiceAsyncClient +]) +@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +def test_asset_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", + # use ClientOptions.api_endpoint as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 4ae76357b909..9085a6e7db5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -147,6 +147,16 @@ def transport(self) -> IAMCredentialsTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._client._api_endpoint + get_transport_class = functools.partial(type(IAMCredentialsClient).get_transport_class, type(IAMCredentialsClient)) def __init__(self, *, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index c2414fe97d31..357f0fc39c00 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -17,6 +17,7 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings from google.iam.credentials_v1 import gapic_version as package_version @@ -241,7 +242,7 @@ def parse_common_location_path(path: str) -> Dict[str,str]: @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -271,6 +272,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -298,6 +302,77 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE + and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint + + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always the return value of this function. + client_cert_source (bytes): The client certificate source used by the client. + use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = IAMCredentialsClient.DEFAULT_ENDPOINT + return api_endpoint + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._api_endpoint + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, IAMCredentialsTransport]] = None, @@ -344,15 +419,18 @@ def __init__(self, *, google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + self._use_client_cert, self._use_mtls_endpoint = IAMCredentialsClient._read_environment_variables() + self._client_cert_source = IAMCredentialsClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._api_endpoint = IAMCredentialsClient._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -361,10 +439,10 @@ def __init__(self, *, # instance provides an extensibility point for unusual situations. if isinstance(transport, IAMCredentialsTransport): # transport is a IAMCredentialsTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." @@ -379,14 +457,14 @@ def __init__(self, *, Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def generate_access_token(self, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 8ebd8b0ac381..32588a319d5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -78,6 +78,61 @@ def test__get_default_mtls_endpoint(): assert IAMCredentialsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert IAMCredentialsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + + assert IAMCredentialsClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert IAMCredentialsClient._read_environment_variables() == (True, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert IAMCredentialsClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + IAMCredentialsClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert IAMCredentialsClient._read_environment_variables() == (False, "never") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert IAMCredentialsClient._read_environment_variables() == (False, "always") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert IAMCredentialsClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + IAMCredentialsClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert IAMCredentialsClient._get_client_cert_source(None, False) is None + assert IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert IAMCredentialsClient._get_client_cert_source(None, True) is mock_default_cert_source + assert IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + + assert IAMCredentialsClient._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override + assert IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, "auto") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + assert IAMCredentialsClient._get_api_endpoint(None, None, "auto") == IAMCredentialsClient.DEFAULT_ENDPOINT + assert IAMCredentialsClient._get_api_endpoint(None, None, "always") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + assert IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, "always") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + assert IAMCredentialsClient._get_api_endpoint(None, None, "never") == IAMCredentialsClient.DEFAULT_ENDPOINT + @pytest.mark.parametrize("client_class,transport_name", [ (IAMCredentialsClient, "grpc"), @@ -232,13 +287,15 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -413,6 +470,73 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + IAMCredentialsClient, IAMCredentialsAsyncClient +]) +@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +def test_iam_credentials_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", + # use ClientOptions.api_endpoint as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 01d0a70d66ff..67ed7101bcd7 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -174,6 +174,16 @@ def transport(self) -> EventarcTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._client._api_endpoint + get_transport_class = functools.partial(type(EventarcClient).get_transport_class, type(EventarcClient)) def __init__(self, *, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index e4f83e706fd8..5000844dc2be 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -17,6 +17,7 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings from google.cloud.eventarc_v1 import gapic_version as package_version @@ -349,7 +350,7 @@ def parse_common_location_path(path: str) -> Dict[str,str]: @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -379,6 +380,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -406,6 +410,77 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE + and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint + + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always the return value of this function. + client_cert_source (bytes): The client certificate source used by the client. + use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = EventarcClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = EventarcClient.DEFAULT_ENDPOINT + return api_endpoint + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._api_endpoint + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, EventarcTransport]] = None, @@ -452,15 +527,18 @@ def __init__(self, *, google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + self._use_client_cert, self._use_mtls_endpoint = EventarcClient._read_environment_variables() + self._client_cert_source = EventarcClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._api_endpoint = EventarcClient._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -469,10 +547,10 @@ def __init__(self, *, # instance provides an extensibility point for unusual situations. if isinstance(transport, EventarcTransport): # transport is a EventarcTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." @@ -487,14 +565,14 @@ def __init__(self, *, Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def get_trigger(self, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 8d3d25bad750..889f2f175325 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -98,6 +98,61 @@ def test__get_default_mtls_endpoint(): assert EventarcClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert EventarcClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + + assert EventarcClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert EventarcClient._read_environment_variables() == (True, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert EventarcClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + EventarcClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert EventarcClient._read_environment_variables() == (False, "never") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert EventarcClient._read_environment_variables() == (False, "always") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert EventarcClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + EventarcClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert EventarcClient._get_client_cert_source(None, False) is None + assert EventarcClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert EventarcClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert EventarcClient._get_client_cert_source(None, True) is mock_default_cert_source + assert EventarcClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + + assert EventarcClient._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override + assert EventarcClient._get_api_endpoint(None, mock_client_cert_source, "auto") == EventarcClient.DEFAULT_MTLS_ENDPOINT + assert EventarcClient._get_api_endpoint(None, None, "auto") == EventarcClient.DEFAULT_ENDPOINT + assert EventarcClient._get_api_endpoint(None, None, "always") == EventarcClient.DEFAULT_MTLS_ENDPOINT + assert EventarcClient._get_api_endpoint(None, mock_client_cert_source, "always") == EventarcClient.DEFAULT_MTLS_ENDPOINT + assert EventarcClient._get_api_endpoint(None, None, "never") == EventarcClient.DEFAULT_ENDPOINT + @pytest.mark.parametrize("client_class,transport_name", [ (EventarcClient, "grpc"), @@ -252,13 +307,15 @@ def test_eventarc_client_client_options(client_class, transport_class, transport # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -433,6 +490,73 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + EventarcClient, EventarcAsyncClient +]) +@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +def test_eventarc_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", + # use ClientOptions.api_endpoint as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (EventarcClient, transports.EventarcGrpcTransport, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 755d2b11b991..2b839a0307d1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -154,6 +154,16 @@ def transport(self) -> ConfigServiceV2Transport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._client._api_endpoint + get_transport_class = functools.partial(type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client)) def __init__(self, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 62f41d97a56e..23b8d3087283 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -17,6 +17,7 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -300,7 +301,7 @@ def parse_common_location_path(path: str) -> Dict[str,str]: @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -330,6 +331,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -357,6 +361,77 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE + and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint + + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always the return value of this function. + client_cert_source (bytes): The client certificate source used by the client. + use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ConfigServiceV2Client.DEFAULT_ENDPOINT + return api_endpoint + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._api_endpoint + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, ConfigServiceV2Transport]] = None, @@ -400,15 +475,18 @@ def __init__(self, *, google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + self._use_client_cert, self._use_mtls_endpoint = ConfigServiceV2Client._read_environment_variables() + self._client_cert_source = ConfigServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._api_endpoint = ConfigServiceV2Client._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -417,10 +495,10 @@ def __init__(self, *, # instance provides an extensibility point for unusual situations. if isinstance(transport, ConfigServiceV2Transport): # transport is a ConfigServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." @@ -435,14 +513,14 @@ def __init__(self, *, Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def list_buckets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index bbb16469c236..4dd252e8d2f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -139,6 +139,16 @@ def transport(self) -> LoggingServiceV2Transport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._client._api_endpoint + get_transport_class = functools.partial(type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client)) def __init__(self, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index c90848e28ccc..a734f1448b7b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -17,6 +17,7 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -231,7 +232,7 @@ def parse_common_location_path(path: str) -> Dict[str,str]: @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -261,6 +262,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -288,6 +292,77 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE + and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint + + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always the return value of this function. + client_cert_source (bytes): The client certificate source used by the client. + use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = LoggingServiceV2Client.DEFAULT_ENDPOINT + return api_endpoint + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._api_endpoint + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, LoggingServiceV2Transport]] = None, @@ -331,15 +406,18 @@ def __init__(self, *, google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + self._use_client_cert, self._use_mtls_endpoint = LoggingServiceV2Client._read_environment_variables() + self._client_cert_source = LoggingServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._api_endpoint = LoggingServiceV2Client._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -348,10 +426,10 @@ def __init__(self, *, # instance provides an extensibility point for unusual situations. if isinstance(transport, LoggingServiceV2Transport): # transport is a LoggingServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." @@ -366,14 +444,14 @@ def __init__(self, *, Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def delete_log(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 03630110d299..fee35ec66f53 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -140,6 +140,16 @@ def transport(self) -> MetricsServiceV2Transport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._client._api_endpoint + get_transport_class = functools.partial(type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client)) def __init__(self, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 50ec7c399526..155df4fb3e52 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -17,6 +17,7 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -232,7 +233,7 @@ def parse_common_location_path(path: str) -> Dict[str,str]: @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -262,6 +263,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -289,6 +293,77 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE + and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint + + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always the return value of this function. + client_cert_source (bytes): The client certificate source used by the client. + use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = MetricsServiceV2Client.DEFAULT_ENDPOINT + return api_endpoint + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._api_endpoint + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, MetricsServiceV2Transport]] = None, @@ -332,15 +407,18 @@ def __init__(self, *, google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + self._use_client_cert, self._use_mtls_endpoint = MetricsServiceV2Client._read_environment_variables() + self._client_cert_source = MetricsServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._api_endpoint = MetricsServiceV2Client._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -349,10 +427,10 @@ def __init__(self, *, # instance provides an extensibility point for unusual situations. if isinstance(transport, MetricsServiceV2Transport): # transport is a MetricsServiceV2Transport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." @@ -367,14 +445,14 @@ def __init__(self, *, Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def list_log_metrics(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index f7c8e0802458..044a642d07f5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -78,6 +78,61 @@ def test__get_default_mtls_endpoint(): assert ConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ConfigServiceV2Client._read_environment_variables() == (True, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + ConfigServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ConfigServiceV2Client._read_environment_variables() == (False, "never") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ConfigServiceV2Client._read_environment_variables() == (False, "always") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ConfigServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ConfigServiceV2Client._get_client_cert_source(None, False) is None + assert ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None + assert ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert ConfigServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source + assert ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + + assert ConfigServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override + assert ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "auto") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert ConfigServiceV2Client._get_api_endpoint(None, None, "auto") == ConfigServiceV2Client.DEFAULT_ENDPOINT + assert ConfigServiceV2Client._get_api_endpoint(None, None, "always") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "always") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert ConfigServiceV2Client._get_api_endpoint(None, None, "never") == ConfigServiceV2Client.DEFAULT_ENDPOINT + @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), @@ -221,13 +276,15 @@ def test_config_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -400,6 +457,73 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + ConfigServiceV2Client, ConfigServiceV2AsyncClient +]) +@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +def test_config_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", + # use ClientOptions.api_endpoint as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 8bb644e170b9..af7e482f1b40 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -79,6 +79,61 @@ def test__get_default_mtls_endpoint(): assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LoggingServiceV2Client._read_environment_variables() == (True, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "never") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "always") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert LoggingServiceV2Client._get_client_cert_source(None, False) is None + assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None + assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert LoggingServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source + assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + + assert LoggingServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override + assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "auto") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, None, "auto") == LoggingServiceV2Client.DEFAULT_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, None, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, None, "never") == LoggingServiceV2Client.DEFAULT_ENDPOINT + @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), @@ -222,13 +277,15 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -401,6 +458,73 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, LoggingServiceV2AsyncClient +]) +@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +def test_logging_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", + # use ClientOptions.api_endpoint as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index c19f8e31c8db..58423012cf8e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -77,6 +77,61 @@ def test__get_default_mtls_endpoint(): assert MetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MetricsServiceV2Client._read_environment_variables() == (True, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + MetricsServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert MetricsServiceV2Client._read_environment_variables() == (False, "never") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert MetricsServiceV2Client._read_environment_variables() == (False, "always") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + MetricsServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert MetricsServiceV2Client._get_client_cert_source(None, False) is None + assert MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None + assert MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert MetricsServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source + assert MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + + assert MetricsServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override + assert MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "auto") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert MetricsServiceV2Client._get_api_endpoint(None, None, "auto") == MetricsServiceV2Client.DEFAULT_ENDPOINT + assert MetricsServiceV2Client._get_api_endpoint(None, None, "always") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "always") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert MetricsServiceV2Client._get_api_endpoint(None, None, "never") == MetricsServiceV2Client.DEFAULT_ENDPOINT + @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), @@ -220,13 +275,15 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -399,6 +456,73 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + MetricsServiceV2Client, MetricsServiceV2AsyncClient +]) +@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +def test_metrics_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", + # use ClientOptions.api_endpoint as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 49404a6513a8..708d8845b742 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -164,6 +164,16 @@ def transport(self) -> CloudRedisTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._client._api_endpoint + get_transport_class = functools.partial(type(CloudRedisClient).get_transport_class, type(CloudRedisClient)) def __init__(self, *, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 452651485c11..cc1fc5f528e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -17,6 +17,7 @@ import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings from google.cloud.redis_v1 import gapic_version as package_version @@ -258,7 +259,7 @@ def parse_common_location_path(path: str) -> Dict[str,str]: @classmethod def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -288,6 +289,9 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -315,6 +319,77 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE + and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint + + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always the return value of this function. + client_cert_source (bytes): The client certificate source used by the client. + use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = CloudRedisClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = CloudRedisClient.DEFAULT_ENDPOINT + return api_endpoint + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used + by the client instance. + """ + return self._api_endpoint + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, CloudRedisTransport]] = None, @@ -361,15 +436,18 @@ def __init__(self, *, google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + self._use_client_cert, self._use_mtls_endpoint = CloudRedisClient._read_environment_variables() + self._client_cert_source = CloudRedisClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._api_endpoint = CloudRedisClient._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -378,10 +456,10 @@ def __init__(self, *, # instance provides an extensibility point for unusual situations. if isinstance(transport, CloudRedisTransport): # transport is a CloudRedisTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " "provide its credentials directly.") - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." @@ -396,14 +474,14 @@ def __init__(self, *, Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def list_instances(self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 59deff1f6610..1135b01f4825 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -89,6 +89,61 @@ def test__get_default_mtls_endpoint(): assert CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + + assert CloudRedisClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CloudRedisClient._read_environment_variables() == (True, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CloudRedisClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + CloudRedisClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert CloudRedisClient._read_environment_variables() == (False, "never") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert CloudRedisClient._read_environment_variables() == (False, "always") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert CloudRedisClient._read_environment_variables() == (False, "auto") + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + CloudRedisClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert CloudRedisClient._get_client_cert_source(None, False) is None + assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert CloudRedisClient._get_client_cert_source(None, True) is mock_default_cert_source + assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + + assert CloudRedisClient._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override + assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, "auto") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, None, "auto") == CloudRedisClient.DEFAULT_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, None, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, None, "never") == CloudRedisClient.DEFAULT_ENDPOINT + @pytest.mark.parametrize("client_class,transport_name", [ (CloudRedisClient, "grpc"), @@ -243,13 +298,15 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -424,6 +481,73 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + CloudRedisClient, CloudRedisAsyncClient +]) +@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +def test_cloud_redis_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", + # use ClientOptions.api_endpoint as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == mock_api_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, + # use the DEFAULT_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), From e6187dd38a51458daa5f6183aa296d940c4ca35a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 5 Jan 2024 15:59:36 +0100 Subject: [PATCH 1089/1339] chore(deps): update all dependencies (#1896) --- packages/gapic-generator/requirements.txt | 24 +++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ac1f53dfe826..e8ec932b3b9d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -116,9 +116,9 @@ google-api-core==2.15.0 \ --hash=sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a \ --hash=sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca # via -r requirements.in -google-auth==2.25.2 \ - --hash=sha256:42f707937feb4f5e5a39e6c4f343a17300a459aaf03141457ba505812841cc40 \ - --hash=sha256:473a8dfd0135f75bb79d878436e568f2695dce456764bf3a02b6f8c540b1d256 +google-auth==2.26.1 \ + --hash=sha256:2c8b55e3e564f298122a02ab7b97458ccfcc5617840beb5d0ac757ada92c9780 \ + --hash=sha256:54385acca5c0fbdda510cd8585ba6f3fcb06eeecf8a6ecca39d3ee148b092590 # via google-api-core googleapis-common-protos[grpc]==1.62.0 \ --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ @@ -340,13 +340,13 @@ pypandoc==1.12 \ --hash=sha256:8f44740a9f074e121d81b489f073160421611d4ead62d1b306aeb11aab3c32df \ --hash=sha256:efb4f7d68ead8bec32e22b62f02d5608a1700978b51bfc4af286fd6acfe9d218 # via -r requirements.in -pytest==7.4.3 \ - --hash=sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac \ - --hash=sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5 +pytest==7.4.4 \ + --hash=sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280 \ + --hash=sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8 # via pytest-asyncio -pytest-asyncio==0.23.2 \ - --hash=sha256:c16052382554c7b22d48782ab3438d5b10f8cf7a4bdcae7f0f67f097d95beecc \ - --hash=sha256:ea9021364e32d58f0be43b91c6233fb8d2224ccef2398d6837559e587682808f +pytest-asyncio==0.23.3 \ + --hash=sha256:37a9d912e8338ee7b4a3e917381d1c95bfc8682048cb0fbc35baba316ec1faba \ + --hash=sha256:af313ce900a62fbe2b1aed18e37ad757f1ef9940c6b6a88e2954de38d6b1fb9f # via -r requirements.in pyyaml==6.0.1 \ --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ @@ -426,7 +426,7 @@ urllib3==2.1.0 \ # requests # The following packages are considered to be unsafe in a requirements file: -setuptools==69.0.2 \ - --hash=sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2 \ - --hash=sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6 +setuptools==69.0.3 \ + --hash=sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05 \ + --hash=sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78 # via -r requirements.in From 3704e24f2f60a119217cd27c948737e23c817ebb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 09:30:30 -0500 Subject: [PATCH 1090/1339] chore(deps): bump jinja2 from 3.1.2 to 3.1.3 (#1902) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- packages/gapic-generator/requirements.txt | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e8ec932b3b9d..d21080658837 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -201,9 +201,9 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via -r requirements.in libcst==1.1.0 \ --hash=sha256:003e5e83a12eed23542c4ea20fdc8de830887cc03662432bb36f84f8c4841b81 \ @@ -400,10 +400,6 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via google-auth tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f @@ -421,9 +417,7 @@ typing-inspect==0.9.0 \ urllib3==2.1.0 \ --hash=sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3 \ --hash=sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54 - # via - # google-auth - # requests + # via requests # The following packages are considered to be unsafe in a requirements file: setuptools==69.0.3 \ From b01cf51fbf8338ede87a234382fbdfb98a4a4dd8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 16 Jan 2024 17:55:51 +0000 Subject: [PATCH 1091/1339] build(python): fix `docs` and `docfx` builds (#1907) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 6 +++--- packages/gapic-generator/.kokoro/requirements.txt | 6 +++--- packages/gapic-generator/noxfile.py | 14 +++++++++++++- 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 773c1dfd2146..d8a1bbca7179 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c -# created: 2023-11-29T14:54:29.548172703Z + digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa +# created: 2024-01-15T16:32:08.142785673Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index e5c1ffca94b7..bb3d6ca38b14 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -263,9 +263,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 8024af73515a..958f40af8d09 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -441,7 +441,19 @@ def snippetgen(session): def docs(session): """Build the docs.""" - session.install("sphinx==4.5.0", "sphinx_rtd_theme") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "sphinx_rtd_theme" + ) session.install(".") # Build the docs! From 9114a7e656c583a5e9fb0c97e987b492cb108af1 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 16 Jan 2024 18:59:52 +0100 Subject: [PATCH 1092/1339] chore(deps): update all dependencies (#1906) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/WORKSPACE | 6 ++--- packages/gapic-generator/requirements.txt | 30 +++++++++++------------ 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 1f82e3d1c554..6983103dd710 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -73,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "9bd87b8280ef720d3240514f884e56a712f2218f0d693b48050c836028940a42", - strip_prefix = "protobuf-25.1", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v25.1.tar.gz"], + sha256 = "8ff511a64fc46ee792d3fe49a5a1bcad6f7dc50dfbba5a28b0e5b979c17f9871", + strip_prefix = "protobuf-25.2", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v25.2.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d21080658837..96dace63d770 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -116,9 +116,9 @@ google-api-core==2.15.0 \ --hash=sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a \ --hash=sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca # via -r requirements.in -google-auth==2.26.1 \ - --hash=sha256:2c8b55e3e564f298122a02ab7b97458ccfcc5617840beb5d0ac757ada92c9780 \ - --hash=sha256:54385acca5c0fbdda510cd8585ba6f3fcb06eeecf8a6ecca39d3ee148b092590 +google-auth==2.26.2 \ + --hash=sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424 \ + --hash=sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81 # via google-api-core googleapis-common-protos[grpc]==1.62.0 \ --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ @@ -308,18 +308,18 @@ proto-plus==1.23.0 \ --hash=sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2 \ --hash=sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c # via -r requirements.in -protobuf==4.25.1 \ - --hash=sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd \ - --hash=sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb \ - --hash=sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0 \ - --hash=sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7 \ - --hash=sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b \ - --hash=sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2 \ - --hash=sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510 \ - --hash=sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6 \ - --hash=sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd \ - --hash=sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10 \ - --hash=sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7 +protobuf==4.25.2 \ + --hash=sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62 \ + --hash=sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d \ + --hash=sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61 \ + --hash=sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62 \ + --hash=sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3 \ + --hash=sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9 \ + --hash=sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830 \ + --hash=sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6 \ + --hash=sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0 \ + --hash=sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020 \ + --hash=sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e # via # -r requirements.in # google-api-core From 1e6600c8fc2f24d5c130f9d8939ccf298624df0b Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 17 Jan 2024 09:17:05 -0500 Subject: [PATCH 1093/1339] fix: Add None as a type to OptionalRetry (#1901) Co-authored-by: Anthonios Partheniou --- .../%name/%version/%sub/services/%service/client.py.j2 | 4 ++-- .../%version/%sub/services/%service/transports/rest.py.j2 | 4 ++-- .../%name_%version/%sub/services/%service/async_client.py.j2 | 4 ++-- .../%name_%version/%sub/services/%service/client.py.j2 | 4 ++-- .../%sub/services/%service/transports/rest.py.j2 | 4 ++-- .../cloud/asset_v1/services/asset_service/async_client.py | 4 ++-- .../google/cloud/asset_v1/services/asset_service/client.py | 4 ++-- .../cloud/asset_v1/services/asset_service/transports/rest.py | 4 ++-- .../credentials_v1/services/iam_credentials/async_client.py | 4 ++-- .../iam/credentials_v1/services/iam_credentials/client.py | 4 ++-- .../services/iam_credentials/transports/rest.py | 4 ++-- .../cloud/eventarc_v1/services/eventarc/async_client.py | 4 ++-- .../google/cloud/eventarc_v1/services/eventarc/client.py | 4 ++-- .../cloud/eventarc_v1/services/eventarc/transports/rest.py | 4 ++-- .../logging_v2/services/config_service_v2/async_client.py | 4 ++-- .../cloud/logging_v2/services/config_service_v2/client.py | 4 ++-- .../logging_v2/services/logging_service_v2/async_client.py | 4 ++-- .../cloud/logging_v2/services/logging_service_v2/client.py | 4 ++-- .../logging_v2/services/metrics_service_v2/async_client.py | 4 ++-- .../cloud/logging_v2/services/metrics_service_v2/client.py | 4 ++-- .../cloud/redis_v1/services/cloud_redis/async_client.py | 4 ++-- .../google/cloud/redis_v1/services/cloud_redis/client.py | 4 ++-- .../cloud/redis_v1/services/cloud_redis/transports/rest.py | 4 ++-- 23 files changed, 46 insertions(+), 46 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index abde0499e53d..0dd30f65ce13 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -24,9 +24,9 @@ from google.oauth2 import service_account # type: ignore from {{package_path}} import gapic_version as package_version try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index adc150a4487c..3fb7f35be08b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -26,9 +26,9 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore {# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 750b1b24906b..f87ac3f66034 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -21,9 +21,9 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 23fe548afd24..e4d2019b462a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -29,9 +29,9 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 52493d180e3f..18fcf3b34658 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -37,9 +37,9 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore {# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 5d1ce25abdf1..4119e9f305e8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -28,9 +28,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 46504a5907be..3c9d1fd4d8b4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -32,9 +32,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 662dc3800236..5f9d93bc1b81 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -35,9 +35,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.asset_v1.types import asset_service diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 9085a6e7db5f..2d7d38410dbe 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -28,9 +28,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 357f0fc39c00..ee7c57cbf68f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -32,9 +32,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 817fe8928950..0b8f029df97a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -34,9 +34,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.iam.credentials_v1.types import common diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 67ed7101bcd7..1f1ae062d765 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -28,9 +28,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 5000844dc2be..5da6e3bf190e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -32,9 +32,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 32a4100f5fbb..301d6c5f2b97 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -38,9 +38,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.eventarc_v1.types import channel diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 2b839a0307d1..fbeb65b8a85b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -28,9 +28,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 23b8d3087283..cc5b7e0e028d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -32,9 +32,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 4dd252e8d2f4..ebb83ee03e19 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -28,9 +28,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a734f1448b7b..0b1dcb34e77b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -32,9 +32,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index fee35ec66f53..873108dee869 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -28,9 +28,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 155df4fb3e52..d8b072132913 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -32,9 +32,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 708d8845b742..559636664225 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -28,9 +28,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index cc1fc5f528e9..f3af09495120 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -32,9 +32,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 57b93dcbc8f0..4726c326caf6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -36,9 +36,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.redis_v1.types import cloud_redis From df2a075e15bcfbe5d257706606189195846aca21 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 18 Jan 2024 14:45:34 -0800 Subject: [PATCH 1094/1339] chore(main): release 1.13.1 (#1908) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1a1c1701d4ea..93c878955a2c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.13.1](https://github.com/googleapis/gapic-generator-python/compare/v1.13.0...v1.13.1) (2024-01-17) + + +### Bug Fixes + +* Add None as a type to OptionalRetry ([#1901](https://github.com/googleapis/gapic-generator-python/issues/1901)) ([011475c](https://github.com/googleapis/gapic-generator-python/commit/011475c525cda4245803e197c77fa2a1c37fa98d)) + ## [1.13.0](https://github.com/googleapis/gapic-generator-python/compare/v1.12.0...v1.13.0) (2023-11-20) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 473cd9abd26a..1c63a79e263f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.13.0" +version = "1.13.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 1d0c49d9140498884f19b7d5c20c8d75c24a3dc5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 26 Jan 2024 17:22:20 +0100 Subject: [PATCH 1095/1339] chore(deps): update all dependencies (#1913) --- .../.github/workflows/tests.yaml | 2 +- packages/gapic-generator/requirements.txt | 124 ++++++++++-------- 2 files changed, 68 insertions(+), 58 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index d209e3830fc6..7d7c6a5c8b90 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -285,7 +285,7 @@ jobs: - uses: actions/checkout@v4 - name: Cache Bazel files id: cache-bazel - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.cache/bazel # Note: if the container is updated, the key needs to be updated as well. diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 96dace63d770..3e984bb784db 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -116,9 +116,9 @@ google-api-core==2.15.0 \ --hash=sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a \ --hash=sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca # via -r requirements.in -google-auth==2.26.2 \ - --hash=sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424 \ - --hash=sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81 +google-auth==2.27.0 \ + --hash=sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245 \ + --hash=sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821 # via google-api-core googleapis-common-protos[grpc]==1.62.0 \ --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ @@ -238,57 +238,67 @@ libcst==1.1.0 \ --hash=sha256:f561c9a84eca18be92f4ad90aa9bd873111efbea995449301719a1a7805dbc5c \ --hash=sha256:fe41b33aa73635b1651f64633f429f7aa21f86d2db5748659a99d9b7b1ed2a90 # via -r requirements.in -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 +MarkupSafe==2.1.4 \ + --hash=sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69 \ + --hash=sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0 \ + --hash=sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d \ + --hash=sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec \ + --hash=sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5 \ + --hash=sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411 \ + --hash=sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3 \ + --hash=sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74 \ + --hash=sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0 \ + --hash=sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949 \ + --hash=sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d \ + --hash=sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279 \ + --hash=sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f \ + --hash=sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6 \ + --hash=sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc \ + --hash=sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e \ + --hash=sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954 \ + --hash=sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656 \ + --hash=sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc \ + --hash=sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518 \ + --hash=sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56 \ + --hash=sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc \ + --hash=sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa \ + --hash=sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565 \ + --hash=sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4 \ + --hash=sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb \ + --hash=sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250 \ + --hash=sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4 \ + --hash=sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959 \ + --hash=sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc \ + --hash=sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474 \ + --hash=sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863 \ + --hash=sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8 \ + --hash=sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f \ + --hash=sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2 \ + --hash=sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e \ + --hash=sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e \ + --hash=sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb \ + --hash=sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f \ + --hash=sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a \ + --hash=sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26 \ + --hash=sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d \ + --hash=sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2 \ + --hash=sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131 \ + --hash=sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789 \ + --hash=sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6 \ + --hash=sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a \ + --hash=sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858 \ + --hash=sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e \ + --hash=sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb \ + --hash=sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e \ + --hash=sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84 \ + --hash=sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7 \ + --hash=sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea \ + --hash=sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b \ + --hash=sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6 \ + --hash=sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475 \ + --hash=sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74 \ + --hash=sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a \ + --hash=sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00 # via # -r requirements.in # jinja2 @@ -300,9 +310,9 @@ packaging==23.2 \ --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via pytest -pluggy==1.3.0 \ - --hash=sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12 \ - --hash=sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7 +pluggy==1.4.0 \ + --hash=sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981 \ + --hash=sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be # via pytest proto-plus==1.23.0 \ --hash=sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2 \ From 5614db04f88a2b8353fe93bc1ee4d7cf04d36e14 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 30 Jan 2024 16:43:25 -0500 Subject: [PATCH 1096/1339] feat: Allow users to explicitly configure universe domain. (#1898) Co-authored-by: Victor Chudnovsky --- .../services/%service/_async_mixins.py.j2 | 30 + .../%sub/services/%service/_client_macros.j2 | 3 + .../%sub/services/%service/_mixins.py.j2 | 30 + .../%sub/services/%service/async_client.py.j2 | 75 +- .../%sub/services/%service/client.py.j2 | 172 ++- .../services/%service/transports/base.py.j2 | 5 +- .../services/%service/transports/grpc.py.j2 | 2 +- .../%service/transports/grpc_asyncio.py.j2 | 2 +- .../services/%service/transports/rest.py.j2 | 2 +- .../%name_%version/%sub/_test_mixins.py.j2 | 160 +-- .../%name_%version/%sub/test_%service.py.j2 | 309 +++-- .../gapic/%name_%version/%sub/test_macros.j2 | 56 +- packages/gapic-generator/noxfile.py | 34 +- .../services/asset_service/async_client.py | 131 +- .../asset_v1/services/asset_service/client.py | 231 +++- .../services/asset_service/transports/base.py | 6 +- .../services/asset_service/transports/grpc.py | 2 +- .../asset_service/transports/grpc_asyncio.py | 2 +- .../services/asset_service/transports/rest.py | 2 +- .../unit/gapic/asset_v1/test_asset_service.py | 1161 +++++++++-------- .../services/iam_credentials/async_client.py | 71 +- .../services/iam_credentials/client.py | 171 ++- .../iam_credentials/transports/base.py | 6 +- .../iam_credentials/transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- .../iam_credentials/transports/rest.py | 2 +- .../credentials_v1/test_iam_credentials.py | 445 ++++--- .../services/eventarc/async_client.py | 140 +- .../eventarc_v1/services/eventarc/client.py | 240 +++- .../services/eventarc/transports/base.py | 6 +- .../services/eventarc/transports/grpc.py | 2 +- .../eventarc/transports/grpc_asyncio.py | 2 +- .../services/eventarc/transports/rest.py | 2 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 1159 ++++++++-------- .../config_service_v2/async_client.py | 161 ++- .../services/config_service_v2/client.py | 264 +++- .../config_service_v2/transports/base.py | 6 +- .../config_service_v2/transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- .../logging_service_v2/async_client.py | 83 +- .../services/logging_service_v2/client.py | 186 ++- .../logging_service_v2/transports/base.py | 6 +- .../logging_service_v2/transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- .../metrics_service_v2/async_client.py | 80 +- .../services/metrics_service_v2/client.py | 183 ++- .../metrics_service_v2/transports/base.py | 6 +- .../metrics_service_v2/transports/grpc.py | 2 +- .../transports/grpc_asyncio.py | 2 +- .../logging_v2/test_config_service_v2.py | 826 ++++++------ .../logging_v2/test_logging_service_v2.py | 420 +++--- .../logging_v2/test_metrics_service_v2.py | 420 +++--- .../services/cloud_redis/async_client.py | 110 +- .../redis_v1/services/cloud_redis/client.py | 210 ++- .../services/cloud_redis/transports/base.py | 6 +- .../services/cloud_redis/transports/grpc.py | 2 +- .../cloud_redis/transports/grpc_asyncio.py | 2 +- .../services/cloud_redis/transports/rest.py | 2 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 821 ++++++------ .../gapic-generator/tests/system/conftest.py | 57 +- .../tests/system/test_universe_domain.py | 77 ++ 61 files changed, 5705 insertions(+), 2900 deletions(-) create mode 100644 packages/gapic-generator/tests/system/test_universe_domain.py diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 8dacd96b1e65..750bd734abcc 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -47,6 +47,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -101,6 +104,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -158,6 +164,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) {% endif %} @@ -210,6 +219,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) {% endif %} @@ -265,6 +277,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -389,6 +404,9 @@ (("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -509,6 +527,9 @@ (("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -567,6 +588,9 @@ (("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -624,6 +648,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -677,6 +704,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 23bef26fe810..9b0d07629e4b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -183,6 +183,9 @@ ) {% endif %} {# method.explicit_routing #} + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. {%+ if not method.void %}response = {% endif %}rpc( {% if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index 8287ecf05965..3417068a9ff2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -45,6 +45,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -99,6 +102,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -156,6 +162,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) {% endif %} @@ -208,6 +217,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) {% endif %} @@ -263,6 +275,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -387,6 +402,9 @@ (("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -507,6 +525,9 @@ (("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -565,6 +586,9 @@ (("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -622,6 +646,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -675,6 +702,9 @@ (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index f87ac3f66034..79ca0a1b78aa 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -54,8 +54,12 @@ class {{ service.async_client_name }}: _client: {{ service.client_name }} + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = {{ service.client_name }}.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = {{ service.client_name }}._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = {{ service.client_name }}._DEFAULT_UNIVERSE {% for message in service.resource_messages|sort(attribute="resource_type") %} {{ message.resource_type|snake_case }}_path = staticmethod({{ service.client_name }}.{{ message.resource_type|snake_case }}_path) @@ -147,11 +151,19 @@ class {{ service.async_client_name }}: """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. """ - return self._client._api_endpoint - + return self._client._universe_domain get_transport_class = functools.partial(type({{ service.client_name }}).get_transport_class, type({{ service.client_name }})) @@ -161,7 +173,7 @@ class {{ service.async_client_name }}: client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the {{ (service.client_name|snake_case).replace("_", " ") }}. + """Instantiates the {{ (service.async_client_name|snake_case).replace("_", " ") }}. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -172,22 +184,43 @@ class {{ service.async_client_name }}: transport (Union[str, ~.{{ service.name }}Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + {% if 'rest' in opts.transport and not opts.rest_numeric_enums %} + {# TODO (gapic-generator-python/issues/1918): Remove the beta preview comment. #} + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + {% endif %} + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -353,6 +386,9 @@ class {{ service.async_client_name }}: ) {% endif %} + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. {%+ if not method.void %}response = {% endif %} {% if not method.server_streaming %}await {% endif %}rpc( @@ -505,6 +541,9 @@ class {{ service.async_client_name }}: gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -621,6 +660,9 @@ class {{ service.async_client_name }}: gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -675,6 +717,9 @@ class {{ service.async_client_name }}: gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index e4d2019b462a..6bf6b0fa5aba 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -131,12 +131,17 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = {% if service.host %}"{{ service.host }}"{% else %}None{% endif %} DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = {% if service.host %}"{{ service.host.replace("googleapis.com", "{UNIVERSE_DOMAIN}") }}"{% else %}None{% endif %} + + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -281,8 +286,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """Returns the environment variables used by the client. Returns: - Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE - and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. Raises: ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not @@ -292,11 +297,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): """ use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -316,36 +322,110 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_cert_source = mtls.default_client_cert_source() return client_cert_source - def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. Args: - api_override (str): The API endpoint override. If specified, this is always the return value of this function. + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. client_cert_source (bytes): The client certificate source used by the client. - use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. Possible values are "always", "auto", or "never". Returns: str: The API endpoint to be used by the client. """ - if api_override is not None: api_endpoint = api_override elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = {{ service.client_name }}._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") api_endpoint = {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = {{ service.client_name }}.DEFAULT_ENDPOINT + api_endpoint = {{ service.client_name }}._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) return api_endpoint + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = {{ service.client_name }}._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + if credentials: + credentials_universe = credentials.universe_domain + if client_universe != credentials_universe: + default_universe = {{ service.client_name }}._DEFAULT_UNIVERSE + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + {{ service.client_name }}._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + @property def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. """ - return self._api_endpoint + return self._universe_domain def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, @@ -365,26 +445,37 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): transport to use. If set to None, a transport is chosen automatically. {% if 'rest' in opts.transport and not opts.rest_numeric_enums %} + {# TODO (gapic-generator-python/issues/1918): Remove the beta preview comment. #} NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. {% endif %} - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -402,9 +493,15 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - self._use_client_cert, self._use_mtls_endpoint = {{ service.client_name }}._read_environment_variables() + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = {{ service.client_name }}._read_environment_variables() self._client_cert_source = {{ service.client_name }}._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._api_endpoint = {{ service.client_name }}._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + self._universe_domain = {{ service.client_name }}._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: @@ -413,7 +510,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, {{ service.name }}Transport): + transport_provided = isinstance(transport, {{ service.name }}Transport) + if transport_provided: # transport is a {{ service.name }}Transport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " @@ -423,14 +521,23 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast({{ service.name }}Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + {{ service.client_name }}._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, credentials_file=self._client_options.credentials_file, @@ -581,6 +688,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): (("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -698,6 +808,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): gapic_v1.routing_header.to_grpc_metadata( (("resource", request.resource),)), ) + + # Validate the universe domain. + self._validate_universe_domain() # Send the request. response = rpc( @@ -755,6 +868,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): (("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index b2469654df1a..e6a77080d7bd 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -79,7 +79,7 @@ class {{ service.name }}Transport(abc.ABC): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -137,6 +137,9 @@ class {{ service.name }}Transport(abc.ABC): host += ':443' self._host = host + @property + def host(self): + return self._host def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 2e4f26a8a716..64958b9e4a6d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -76,7 +76,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 94e2952fcc06..f88b6f85cf2d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -120,7 +120,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 18fcf3b34658..2303760fb17a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -203,7 +203,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index eaa40e24dc2d..1c3699f1aed1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -3,7 +3,7 @@ def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ sig.request_type }}): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -25,7 +25,7 @@ def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request ]) def test_{{ name|snake_case }}_rest(request_type): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} @@ -74,11 +74,11 @@ def test_delete_operation(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -116,11 +116,11 @@ def test_delete_operation_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} @@ -160,11 +160,11 @@ def test_delete_operation_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -203,11 +203,11 @@ def test_cancel_operation(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -245,11 +245,11 @@ def test_cancel_operation_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} @@ -289,11 +289,11 @@ def test_cancel_operation_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -331,11 +331,11 @@ def test_wait_operation(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -373,11 +373,11 @@ def test_wait_operation_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} @@ -417,11 +417,11 @@ def test_wait_operation_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -460,11 +460,11 @@ def test_get_operation(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -502,11 +502,11 @@ def test_get_operation_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} @@ -546,11 +546,11 @@ def test_get_operation_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -589,11 +589,11 @@ def test_list_operations(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -631,11 +631,11 @@ def test_list_operations_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} @@ -675,11 +675,11 @@ def test_list_operations_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -724,11 +724,11 @@ def test_list_locations(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -766,11 +766,11 @@ def test_list_locations_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} @@ -810,11 +810,11 @@ def test_list_locations_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -855,11 +855,11 @@ def test_get_location(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -897,10 +897,10 @@ def test_get_location_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials()) + credentials=_AnonymousCredentialsWithUniverseDomain()) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) {% endif %} @@ -940,11 +940,11 @@ def test_get_location_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -986,11 +986,11 @@ def test_set_iam_policy(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -1034,11 +1034,11 @@ def test_set_iam_policy_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} @@ -1071,7 +1071,7 @@ def test_set_iam_policy_field_headers(): def test_set_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1090,7 +1090,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1110,7 +1110,7 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1141,7 +1141,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1175,7 +1175,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1202,7 +1202,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1230,7 +1230,7 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1248,7 +1248,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1268,7 +1268,7 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1301,7 +1301,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1333,7 +1333,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1362,7 +1362,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1392,7 +1392,7 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1412,7 +1412,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1445,11 +1445,11 @@ def test_set_iam_policy(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) {% endif %} @@ -1493,11 +1493,11 @@ def test_set_iam_policy_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% endif %} @@ -1530,7 +1530,7 @@ def test_set_iam_policy_field_headers(): def test_set_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1549,7 +1549,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1570,7 +1570,7 @@ async def test_set_iam_policy_from_dict_async(): {% if "GetIamPolicy" in api.mixin_api_methods %} def test_get_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1601,7 +1601,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1635,7 +1635,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1662,7 +1662,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1690,7 +1690,7 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1708,7 +1708,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1729,7 +1729,7 @@ async def test_get_iam_policy_from_dict_async(): {% if "TestIamPermissions" in api.mixin_api_methods %} def test_test_iam_permissions(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1762,7 +1762,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1794,7 +1794,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1823,7 +1823,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1853,7 +1853,7 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1873,7 +1873,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 7166676ac71c..eed35fd0dd4f 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -20,6 +20,7 @@ import json {% endif %} import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers {% if 'rest' in opts.transport %} @@ -78,13 +79,29 @@ from google.iam.v1 import policy_pb2 # type: ignore def client_cert_source_callback(): return b"cert bytes", b"key bytes" - +{#TODO(https://github.com/googleapis/gapic-generator-python/issues/1894): Remove this function as part of cleanup when DEFAULT_ENDPOINT is no longer used.#} # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + +# Anonymous Credentials with universe domain property. If no universe domain is provided, then +# the default universe domain is "googleapis.com". +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + @property + def universe_domain(self): + return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -101,14 +118,13 @@ def test__get_default_mtls_endpoint(): assert {{ service.client_name }}._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - - assert {{ service.client_name }}._read_environment_variables() == (False, "auto") + assert {{ service.client_name }}._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert {{ service.client_name }}._read_environment_variables() == (True, "auto") + assert {{ service.client_name }}._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert {{ service.client_name }}._read_environment_variables() == (False, "auto") + assert {{ service.client_name }}._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError) as excinfo: @@ -116,19 +132,22 @@ def test__read_environment_variables(): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert {{ service.client_name }}._read_environment_variables() == (False, "never") + assert {{ service.client_name }}._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert {{ service.client_name }}._read_environment_variables() == (False, "always") + assert {{ service.client_name }}._read_environment_variables() == (False, "always", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert {{ service.client_name }}._read_environment_variables() == (False, "auto") + assert {{ service.client_name }}._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: {{ service.client_name }}._read_environment_variables() assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert {{ service.client_name }}._read_environment_variables() == (False, "auto", "foo.com") + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -142,22 +161,95 @@ def test__get_client_cert_source(): assert {{ service.client_name }}._get_client_cert_source(None, True) is mock_default_cert_source assert {{ service.client_name }}._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +@mock.patch.object({{ service.client_name }}, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template({{ service.client_name }})) {% if 'grpc' in opts.transport %} -@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +@mock.patch.object({{ service.async_client_name }}, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template({{ service.async_client_name }})) {% endif %} def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() + default_universe = {{ service.client_name }}._DEFAULT_UNIVERSE + default_endpoint = {{ service.client_name }}._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = {{ service.client_name }}._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert {{ service.client_name }}._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert {{ service.client_name }}._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + assert {{ service.client_name }}._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert {{ service.client_name }}._get_api_endpoint(None, None, default_universe, "always") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + assert {{ service.client_name }}._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT + assert {{ service.client_name }}._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert {{ service.client_name }}._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + {{ service.client_name }}._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert {{ service.client_name }}._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert {{ service.client_name }}._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert {{ service.client_name }}._get_universe_domain(None, None) == {{ service.client_name }}._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + {{ service.client_name }}._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." - assert {{ service.client_name }}._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override - assert {{ service.client_name }}._get_api_endpoint(None, mock_client_cert_source, "auto") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT - assert {{ service.client_name }}._get_api_endpoint(None, None, "auto") == {{ service.client_name }}.DEFAULT_ENDPOINT - assert {{ service.client_name }}._get_api_endpoint(None, None, "always") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT - assert {{ service.client_name }}._get_api_endpoint(None, mock_client_cert_source, "always") == {{ service.client_name }}.DEFAULT_MTLS_ENDPOINT - assert {{ service.client_name }}._get_api_endpoint(None, None, "never") == {{ service.client_name }}.DEFAULT_ENDPOINT - - +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + {% if 'grpc' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), + {% endif %} +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=_AnonymousCredentialsWithUniverseDomain() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." @pytest.mark.parametrize("client_class,transport_name", [ {% if 'grpc' in opts.transport %} @@ -169,7 +261,7 @@ def test__get_api_endpoint(): {% endif %} ]) def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -220,7 +312,7 @@ def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(tra {% endif %} ]) def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -265,15 +357,15 @@ def test_{{ service.client_name|snake_case }}_get_transport_class(): ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), {% endif %} ]) -@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +@mock.patch.object({{ service.client_name }}, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template({{ service.client_name }})) {% if 'grpc' in opts.transport %} -@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +@mock.patch.object({{ service.async_client_name }}, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template({{ service.async_client_name }})) {% endif %} def test_{{ service.client_name|snake_case }}_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -309,7 +401,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -357,7 +449,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -373,7 +465,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -394,9 +486,9 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest", "false"), {% endif %} ]) -@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +@mock.patch.object({{ service.client_name }}, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template({{ service.client_name }})) {% if 'grpc' in opts.transport %} -@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +@mock.patch.object({{ service.async_client_name }}, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template({{ service.async_client_name }})) {% endif %} @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): @@ -413,7 +505,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -437,7 +529,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -466,7 +558,7 @@ def test_{{ service.client_name|snake_case }}_mtls_env_auto(client_class, transp patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -556,58 +648,61 @@ def test_{{ service.client_name|snake_case }}_get_mtls_endpoint_and_cert_source( {{ service.client_name }} {% endif %} ]) -@mock.patch.object({{ service.client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.client_name }})) +@mock.patch.object({{ service.client_name }}, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template({{ service.client_name }})) {% if 'grpc' in opts.transport %} -@mock.patch.object({{ service.async_client_name }}, "DEFAULT_ENDPOINT", modify_default_endpoint({{ service.async_client_name }})) +@mock.patch.object({{ service.async_client_name }}, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template({{ service.async_client_name }})) {% endif %} def test_{{ service.client_name|snake_case }}_client_api_endpoint(client_class): {# TODO(clean-up): remove redundant tests that are already covered by the smaller functions tests #} mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = {{ service.client_name }}._DEFAULT_UNIVERSE + default_endpoint = {{ service.client_name }}._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = {{ service.client_name }}._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", - # use ClientOptions.api_endpoint as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the DEFAULT_ENDPOINT as the api endpoint. + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, - # use the DEFAULT_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT - - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + else: + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize("client_class,transport_class,transport_name", [ {% if 'grpc' in opts.transport %} @@ -629,7 +724,7 @@ def test_{{ service.client_name|snake_case }}_client_options_scopes(client_class patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -659,7 +754,7 @@ def test_{{ service.client_name|snake_case }}_client_options_credentials_file(cl patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -705,7 +800,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -722,8 +817,8 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() + file_creds = _AnonymousCredentialsWithUniverseDomain() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -770,7 +865,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) # Since a `google.api.http` annotation is required for using a rest transport @@ -789,17 +884,17 @@ def test_{{ method_name }}_rest_error(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( @@ -809,7 +904,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -820,17 +915,17 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = {{ service.client_name }}( client_options=options, - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # It is an error to provide scopes and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( @@ -842,7 +937,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) client = {{ service.client_name }}(transport=transport) assert client.transport is transport @@ -852,13 +947,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel transport = transports.{{ service.grpc_asyncio_transport_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel @@ -876,7 +971,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class() adc.assert_called_once() @@ -890,7 +985,7 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = {{ service.client_name }}.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert transport.kind == transport_name @@ -898,7 +993,7 @@ def test_transport_kind(transport_name): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert isinstance( client.transport, @@ -910,7 +1005,7 @@ def test_{{ service.name|snake_case }}_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.{{ service.name }}Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), credentials_file="credentials.json" ) @@ -920,7 +1015,7 @@ def test_{{ service.name|snake_case }}_base_transport(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as Transport: Transport.return_value = None transport = transports.{{ service.name }}Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Every method on the transport should just blindly @@ -999,7 +1094,7 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.{{ service.name }}Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -1018,7 +1113,7 @@ def test_{{ service.name|snake_case }}_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.{{ service.name }}Transport() adc.assert_called_once() @@ -1026,7 +1121,7 @@ def test_{{ service.name|snake_case }}_base_transport_with_adc(): def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) {{ service.client_name }}() adc.assert_called_once_with( scopes=None, @@ -1050,7 +1145,7 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -1115,7 +1210,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -1149,7 +1244,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -1189,7 +1284,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtl {% if 'rest' in opts.transport %} def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.{{ service.rest_transport_name }} ( credentials=cred, @@ -1201,7 +1296,7 @@ def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtl {% if service.has_lro -%} def test_{{ service.name|snake_case }}_rest_lro_client(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) transport = client.transport @@ -1231,7 +1326,7 @@ def test_{{ service.name|snake_case }}_rest_lro_client(): def test_{{ service.name|snake_case }}_host_no_port(transport_name): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), transport=transport_name, ) @@ -1256,7 +1351,7 @@ def test_{{ service.name|snake_case }}_host_no_port(transport_name): def test_{{ service.name|snake_case }}_host_with_port(transport_name): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), transport=transport_name, ) @@ -1274,8 +1369,8 @@ def test_{{ service.name|snake_case }}_host_with_port(transport_name): "rest", ]) def test_{{ service.name|snake_case }}_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() + creds1 = _AnonymousCredentialsWithUniverseDomain() + creds2 = _AnonymousCredentialsWithUniverseDomain() client1 = {{ service.client_name }}( credentials=creds1, transport=transport_name, @@ -1332,7 +1427,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -1405,7 +1500,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( {% if service.has_lro %} def test_{{ service.name|snake_case }}_grpc_lro_client(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) transport = client.transport @@ -1422,7 +1517,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): def test_{{ service.name|snake_case }}_grpc_lro_async_client(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc_asyncio', ) transport = client.transport @@ -1494,7 +1589,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1502,7 +1597,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: transport_class = {{ service.client_name }}.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1511,7 +1606,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -1534,7 +1629,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -1553,7 +1648,7 @@ def test_client_ctx(): ] for transport in transports: client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) # Test client calls underlying transport. @@ -1585,7 +1680,7 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 6c00659c5a98..164987ea93ca 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -6,7 +6,7 @@ ]) def test_{{ method_name }}(request_type, transport: str = 'grpc'): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -105,7 +105,7 @@ def test_{{ method_name }}_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -127,7 +127,7 @@ def test_{{ method_name }}_empty_call(): @pytest.mark.asyncio async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -228,7 +228,7 @@ async def test_{{ method_name }}_async_from_dict(): {% if method.explicit_routing %} def test_{{ method.name|snake_case }}_routing_parameters(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) {% for routing_param in method.routing_rule.routing_parameters %} @@ -266,7 +266,7 @@ def test_{{ method.name|snake_case }}_routing_parameters(): {% if method.field_headers and not method.client_streaming and not method.explicit_routing %} def test_{{ method_name }}_field_headers(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -312,7 +312,7 @@ def test_{{ method_name }}_field_headers(): @pytest.mark.asyncio async def test_{{ method_name }}_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -359,7 +359,7 @@ async def test_{{ method_name }}_field_headers_async(): {% if method.ident.package != method.input.ident.package %} def test_{{ method_name }}_from_dict_foreign(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -388,7 +388,7 @@ def test_{{ method_name }}_from_dict_foreign(): {% if method.flattened_fields and not method.client_streaming %} def test_{{ method_name }}_flattened(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -448,7 +448,7 @@ def test_{{ method_name }}_flattened(): def test_{{ method_name }}_flattened_error(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -465,7 +465,7 @@ def test_{{ method_name }}_flattened_error(): @pytest.mark.asyncio async def test_{{ method_name }}_flattened_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -544,7 +544,7 @@ async def test_{{ method_name }}_flattened_async(): @pytest.mark.asyncio async def test_{{ method_name }}_flattened_error_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -564,7 +564,7 @@ async def test_{{ method_name }}_flattened_error_async(): {% if not method.paged_result_field.map %} def test_{{ method_name }}_pager(transport_name: str = "grpc"): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -624,7 +624,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): {% endif %} def test_{{ method_name }}_pages(transport_name: str = "grpc"): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -698,7 +698,7 @@ def test_{{ method_name }}_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_{{ method_name }}_async_pager(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -788,7 +788,7 @@ async def test_{{ method_name }}_async_pager(): @pytest.mark.asyncio async def test_{{ method_name }}_async_pages(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -877,7 +877,7 @@ def test_{{ method_name }}_raw_page_lro(): ]) def test_{{ method_name }}_rest(request_type): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -1100,7 +1100,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide assert "{{ field_name }}" not in jsonified_request {% endfor %} - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1116,7 +1116,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request["{{ field_name }}"] = {{ mock_value }} {% endfor %} - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) {% if method.query_params %} # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", {% endfor %})) @@ -1133,7 +1133,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -1222,7 +1222,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide def test_{{ method_name }}_rest_unset_required_fields(): - transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) + transport = transports.{{ service.rest_transport_name }}(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.{{ method.transport_safe_name|snake_case }}._get_unset_required_fields({}) assert set(unset_fields) == (set(({% for param in method.query_params|sort %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{ param.name|camel_case }}", {% endfor %}))) @@ -1234,7 +1234,7 @@ def test_{{ method_name }}_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_{{ method_name }}_rest_interceptors(null_interceptor): transport = transports.{{ service.name }}RestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.{{ service.name}}RestInterceptor(), ) client = {{ service.client_name }}(transport=transport) @@ -1297,7 +1297,7 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1325,7 +1325,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ {% if method.flattened_fields and not method.client_streaming %} def test_{{ method_name }}_rest_flattened(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -1394,7 +1394,7 @@ def test_{{ method_name }}_rest_flattened(): def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1413,7 +1413,7 @@ def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): {% if method.paged_result_field %} def test_{{ method_name }}_rest_pager(transport: str = 'rest'): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1533,7 +1533,7 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) {%- if not method.http_options %} @@ -1552,7 +1552,7 @@ def test_{{ method_name }}_rest_error(): {% else %}{# this is an lro or streaming method #} def test_{{ method_name }}_rest_unimplemented(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request = {{ method.input.ident }}() @@ -1565,7 +1565,7 @@ def test_{{ method_name }}_rest_unimplemented(): {% endif %}{# not lro and not streaming #}{% else %}{# not method.http_options #} def test_{{ method_name }}_rest_no_http_options(): client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request = {{ method.input.ident }}() diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 958f40af8d09..a344eeb10ef6 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -266,16 +266,25 @@ def showcase( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), - env: typing.Optional[typing.Dict[str, str]] = None, + env: typing.Optional[typing.Dict[str, str]] = {}, ): """Run the Showcase test suite.""" with showcase_library(session, templates=templates, other_opts=other_opts): session.install("pytest", "pytest-asyncio") - session.run( + test_directory = Path("tests", "system") + ignore_file = env.get("IGNORE_FILE") + pytest_command = [ "py.test", "--quiet", - *(session.posargs or [path.join("tests", "system")]), + *(session.posargs or [str(test_directory)]), + ] + if ignore_file: + ignore_path = test_directory / ignore_file + pytest_command.extend(["--ignore", str(ignore_path)]) + + session.run( + *pytest_command, env=env, ) @@ -285,17 +294,26 @@ def showcase_mtls( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), - env: typing.Optional[typing.Dict[str, str]] = None, + env: typing.Optional[typing.Dict[str, str]] = {}, ): """Run the Showcase mtls test suite.""" with showcase_library(session, templates=templates, other_opts=other_opts): session.install("pytest", "pytest-asyncio") - session.run( + test_directory = Path("tests", "system") + ignore_file = env.get("IGNORE_FILE") + pytest_command = [ "py.test", "--quiet", "--mtls", - *(session.posargs or [path.join("tests", "system")]), + *(session.posargs or [str(test_directory)]), + ] + if ignore_file: + ignore_path = test_directory / ignore_file + pytest_command.extend(["--ignore", str(ignore_path)]) + + session.run( + *pytest_command, env=env, ) @@ -307,7 +325,7 @@ def showcase_alternative_templates(session): session, templates=templates, other_opts=("old-naming",), - env={"GAPIC_PYTHON_ASYNC": "False"}, + env={"GAPIC_PYTHON_ASYNC": "False", "IGNORE_FILE": "test_universe_domain.py"}, ) @@ -318,7 +336,7 @@ def showcase_mtls_alternative_templates(session): session, templates=templates, other_opts=("old-naming",), - env={"GAPIC_PYTHON_ASYNC": "False"}, + env={"GAPIC_PYTHON_ASYNC": "False", "IGNORE_FILE": "test_universe_domain.py"}, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 4119e9f305e8..fd6b4f6cb482 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -52,8 +52,12 @@ class AssetServiceAsyncClient: _client: AssetServiceClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = AssetServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = AssetServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = AssetServiceClient._DEFAULT_UNIVERSE access_level_path = staticmethod(AssetServiceClient.access_level_path) parse_access_level_path = staticmethod(AssetServiceClient.parse_access_level_path) @@ -161,11 +165,20 @@ def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._client._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial(type(AssetServiceClient).get_transport_class, type(AssetServiceClient)) def __init__(self, *, @@ -174,7 +187,7 @@ def __init__(self, *, client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the asset service client. + """Instantiates the asset service async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -185,23 +198,41 @@ def __init__(self, *, transport (Union[str, ~.AssetServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -308,6 +339,9 @@ async def sample_export_assets(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -426,6 +460,9 @@ async def sample_list_assets(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -526,6 +563,9 @@ async def sample_batch_get_assets_history(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -646,6 +686,9 @@ async def sample_create_feed(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -761,6 +804,9 @@ async def sample_get_feed(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -871,6 +917,9 @@ async def sample_list_feeds(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -983,6 +1032,9 @@ async def sample_update_feed(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1083,6 +1135,9 @@ async def sample_delete_feed(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1304,6 +1359,9 @@ async def sample_search_all_resources(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1498,6 +1556,9 @@ async def sample_search_all_iam_policies(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1598,6 +1659,9 @@ async def sample_analyze_iam_policy(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1703,6 +1767,9 @@ async def sample_analyze_iam_policy_longrunning(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1799,6 +1866,9 @@ async def sample_analyze_move(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1893,6 +1963,9 @@ async def sample_query_assets(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2028,6 +2101,9 @@ async def sample_create_saved_query(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2132,6 +2208,9 @@ async def sample_get_saved_query(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2242,6 +2321,9 @@ async def sample_list_saved_queries(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2366,6 +2448,9 @@ async def sample_update_saved_query(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2461,6 +2546,9 @@ async def sample_delete_saved_query(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -2540,6 +2628,9 @@ async def sample_batch_get_effective_iam_policies(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2679,6 +2770,9 @@ async def sample_analyze_org_policies(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2827,6 +2921,9 @@ async def sample_analyze_org_policy_governed_containers(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3004,6 +3101,9 @@ async def sample_analyze_org_policy_governed_assets(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3068,6 +3168,9 @@ async def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 3c9d1fd4d8b4..6779ee8783ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -118,11 +118,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "cloudasset.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "cloudasset.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -370,8 +374,8 @@ def _read_environment_variables(): """Returns the environment variables used by the client. Returns: - Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE - and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. Raises: ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not @@ -381,11 +385,12 @@ def _read_environment_variables(): """ use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -405,37 +410,111 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source - def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. Args: - api_override (str): The API endpoint override. If specified, this is always the return value of this function. + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. client_cert_source (bytes): The client certificate source used by the client. - use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. Possible values are "always", "auto", or "never". Returns: str: The API endpoint to be used by the client. """ - if api_override is not None: api_endpoint = api_override elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = AssetServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") api_endpoint = AssetServiceClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = AssetServiceClient.DEFAULT_ENDPOINT + api_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) return api_endpoint + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AssetServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + if credentials: + credentials_universe = credentials.universe_domain + if client_universe != credentials_universe: + default_universe = AssetServiceClient._DEFAULT_UNIVERSE + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + AssetServiceClient._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + @property def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, AssetServiceTransport]] = None, @@ -456,22 +535,32 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -489,9 +578,15 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - self._use_client_cert, self._use_mtls_endpoint = AssetServiceClient._read_environment_variables() + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = AssetServiceClient._read_environment_variables() self._client_cert_source = AssetServiceClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._api_endpoint = AssetServiceClient._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + self._universe_domain = AssetServiceClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: @@ -500,7 +595,8 @@ def __init__(self, *, # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, AssetServiceTransport): + transport_provided = isinstance(transport, AssetServiceTransport) + if transport_provided: # transport is a AssetServiceTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " @@ -510,14 +606,23 @@ def __init__(self, *, "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(AssetServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + AssetServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, credentials_file=self._client_options.credentials_file, @@ -625,6 +730,9 @@ def sample_export_assets(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -743,6 +851,9 @@ def sample_list_assets(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -837,6 +948,9 @@ def sample_batch_get_assets_history(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -957,6 +1071,9 @@ def sample_create_feed(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1065,6 +1182,9 @@ def sample_get_feed(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1168,6 +1288,9 @@ def sample_list_feeds(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1280,6 +1403,9 @@ def sample_update_feed(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1373,6 +1499,9 @@ def sample_delete_feed(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1587,6 +1716,9 @@ def sample_search_all_resources(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1774,6 +1906,9 @@ def sample_search_all_iam_policies(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1869,6 +2004,9 @@ def sample_analyze_iam_policy(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1975,6 +2113,9 @@ def sample_analyze_iam_policy_longrunning(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2072,6 +2213,9 @@ def sample_analyze_move(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2167,6 +2311,9 @@ def sample_query_assets(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2302,6 +2449,9 @@ def sample_create_saved_query(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2406,6 +2556,9 @@ def sample_get_saved_query(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2516,6 +2669,9 @@ def sample_list_saved_queries(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2640,6 +2796,9 @@ def sample_update_saved_query(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2735,6 +2894,9 @@ def sample_delete_saved_query(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -2815,6 +2977,9 @@ def sample_batch_get_effective_iam_policies(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2954,6 +3119,9 @@ def sample_analyze_org_policies(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3102,6 +3270,9 @@ def sample_analyze_org_policy_governed_containers(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3279,6 +3450,9 @@ def sample_analyze_org_policy_governed_assets(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3356,6 +3530,9 @@ def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index d46bee37dcca..446fc27632de 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -58,7 +58,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'cloudasset.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -113,6 +113,10 @@ def __init__( host += ':443' self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 53958bcf81a0..195d47849773 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -64,7 +64,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'cloudasset.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index c8f62d768b49..1228475d0563 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -108,7 +108,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'cloudasset.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 5f9d93bc1b81..ea64ef9d1c75 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -670,7 +670,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'cloudasset.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 48fea1841d11..b8825c96b3cd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -28,6 +28,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -66,13 +67,28 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" - # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + +# Anonymous Credentials with universe domain property. If no universe domain is provided, then +# the default universe domain is "googleapis.com". +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + @property + def universe_domain(self): + return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -89,14 +105,13 @@ def test__get_default_mtls_endpoint(): assert AssetServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - - assert AssetServiceClient._read_environment_variables() == (False, "auto") + assert AssetServiceClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert AssetServiceClient._read_environment_variables() == (True, "auto") + assert AssetServiceClient._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto") + assert AssetServiceClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError) as excinfo: @@ -104,19 +119,22 @@ def test__read_environment_variables(): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert AssetServiceClient._read_environment_variables() == (False, "never") + assert AssetServiceClient._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert AssetServiceClient._read_environment_variables() == (False, "always") + assert AssetServiceClient._read_environment_variables() == (False, "always", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert AssetServiceClient._read_environment_variables() == (False, "auto") + assert AssetServiceClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: AssetServiceClient._read_environment_variables() assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AssetServiceClient._read_environment_variables() == (False, "auto", "foo.com") + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -130,19 +148,89 @@ def test__get_client_cert_source(): assert AssetServiceClient._get_client_cert_source(None, True) is mock_default_cert_source assert AssetServiceClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() + default_universe = AssetServiceClient._DEFAULT_UNIVERSE + default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT + assert AssetServiceClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert AssetServiceClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert AssetServiceClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert AssetServiceClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert AssetServiceClient._get_universe_domain(None, None) == AssetServiceClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + AssetServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." - assert AssetServiceClient._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, "auto") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, "auto") == AssetServiceClient.DEFAULT_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, "always") == AssetServiceClient.DEFAULT_MTLS_ENDPOINT - assert AssetServiceClient._get_api_endpoint(None, None, "never") == AssetServiceClient.DEFAULT_ENDPOINT - +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), + (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=_AnonymousCredentialsWithUniverseDomain() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." @pytest.mark.parametrize("client_class,transport_name", [ (AssetServiceClient, "grpc"), @@ -150,7 +238,7 @@ def test__get_api_endpoint(): (AssetServiceClient, "rest"), ]) def test_asset_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -189,7 +277,7 @@ def test_asset_service_client_service_account_always_use_jwt(transport_class, tr (AssetServiceClient, "rest"), ]) def test_asset_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -225,13 +313,13 @@ def test_asset_service_client_get_transport_class(): (AssetServiceAsyncClient, transports.AssetServiceGrpcAsyncIOTransport, "grpc_asyncio"), (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), ]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) def test_asset_service_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -267,7 +355,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -315,7 +403,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -331,7 +419,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -348,8 +436,8 @@ def test_asset_service_client_client_options(client_class, transport_class, tran (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "true"), (AssetServiceClient, transports.AssetServiceRestTransport, "rest", "false"), ]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_asset_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -365,7 +453,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -389,7 +477,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -418,7 +506,7 @@ def test_asset_service_client_mtls_env_auto(client_class, transport_class, trans patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -498,55 +586,58 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize("client_class", [ AssetServiceClient, AssetServiceAsyncClient ]) -@mock.patch.object(AssetServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceClient)) -@mock.patch.object(AssetServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(AssetServiceAsyncClient)) +@mock.patch.object(AssetServiceClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceClient)) +@mock.patch.object(AssetServiceAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(AssetServiceAsyncClient)) def test_asset_service_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AssetServiceClient._DEFAULT_UNIVERSE + default_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = AssetServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", - # use ClientOptions.api_endpoint as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the DEFAULT_ENDPOINT as the api endpoint. + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, - # use the DEFAULT_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + else: + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), @@ -564,7 +655,7 @@ def test_asset_service_client_client_options_scopes(client_class, transport_clas patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -590,7 +681,7 @@ def test_asset_service_client_client_options_credentials_file(client_class, tran patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -634,7 +725,7 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -651,8 +742,8 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() + file_creds = _AnonymousCredentialsWithUniverseDomain() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -680,7 +771,7 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran ]) def test_export_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -709,7 +800,7 @@ def test_export_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -725,7 +816,7 @@ def test_export_assets_empty_call(): @pytest.mark.asyncio async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -759,7 +850,7 @@ async def test_export_assets_async_from_dict(): def test_export_assets_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -791,7 +882,7 @@ def test_export_assets_field_headers(): @pytest.mark.asyncio async def test_export_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -826,7 +917,7 @@ async def test_export_assets_field_headers_async(): ]) def test_list_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -858,7 +949,7 @@ def test_list_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -874,7 +965,7 @@ def test_list_assets_empty_call(): @pytest.mark.asyncio async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -909,7 +1000,7 @@ async def test_list_assets_async_from_dict(): def test_list_assets_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -941,7 +1032,7 @@ def test_list_assets_field_headers(): @pytest.mark.asyncio async def test_list_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -972,7 +1063,7 @@ async def test_list_assets_field_headers_async(): def test_list_assets_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -998,7 +1089,7 @@ def test_list_assets_flattened(): def test_list_assets_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1012,7 +1103,7 @@ def test_list_assets_flattened_error(): @pytest.mark.asyncio async def test_list_assets_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1040,7 +1131,7 @@ async def test_list_assets_flattened_async(): @pytest.mark.asyncio async def test_list_assets_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1054,7 +1145,7 @@ async def test_list_assets_flattened_error_async(): def test_list_assets_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1107,7 +1198,7 @@ def test_list_assets_pager(transport_name: str = "grpc"): for i in results) def test_list_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1150,7 +1241,7 @@ def test_list_assets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_assets_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1199,7 +1290,7 @@ async def test_list_assets_async_pager(): @pytest.mark.asyncio async def test_list_assets_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1250,7 +1341,7 @@ async def test_list_assets_async_pages(): ]) def test_batch_get_assets_history(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1280,7 +1371,7 @@ def test_batch_get_assets_history_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1296,7 +1387,7 @@ def test_batch_get_assets_history_empty_call(): @pytest.mark.asyncio async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1329,7 +1420,7 @@ async def test_batch_get_assets_history_async_from_dict(): def test_batch_get_assets_history_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1361,7 +1452,7 @@ def test_batch_get_assets_history_field_headers(): @pytest.mark.asyncio async def test_batch_get_assets_history_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1396,7 +1487,7 @@ async def test_batch_get_assets_history_field_headers_async(): ]) def test_create_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1436,7 +1527,7 @@ def test_create_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1452,7 +1543,7 @@ def test_create_feed_empty_call(): @pytest.mark.asyncio async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1495,7 +1586,7 @@ async def test_create_feed_async_from_dict(): def test_create_feed_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1527,7 +1618,7 @@ def test_create_feed_field_headers(): @pytest.mark.asyncio async def test_create_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1558,7 +1649,7 @@ async def test_create_feed_field_headers_async(): def test_create_feed_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1584,7 +1675,7 @@ def test_create_feed_flattened(): def test_create_feed_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1598,7 +1689,7 @@ def test_create_feed_flattened_error(): @pytest.mark.asyncio async def test_create_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1626,7 +1717,7 @@ async def test_create_feed_flattened_async(): @pytest.mark.asyncio async def test_create_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1644,7 +1735,7 @@ async def test_create_feed_flattened_error_async(): ]) def test_get_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1684,7 +1775,7 @@ def test_get_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1700,7 +1791,7 @@ def test_get_feed_empty_call(): @pytest.mark.asyncio async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1743,7 +1834,7 @@ async def test_get_feed_async_from_dict(): def test_get_feed_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1775,7 +1866,7 @@ def test_get_feed_field_headers(): @pytest.mark.asyncio async def test_get_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1806,7 +1897,7 @@ async def test_get_feed_field_headers_async(): def test_get_feed_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1832,7 +1923,7 @@ def test_get_feed_flattened(): def test_get_feed_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1846,7 +1937,7 @@ def test_get_feed_flattened_error(): @pytest.mark.asyncio async def test_get_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1874,7 +1965,7 @@ async def test_get_feed_flattened_async(): @pytest.mark.asyncio async def test_get_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1892,7 +1983,7 @@ async def test_get_feed_flattened_error_async(): ]) def test_list_feeds(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1922,7 +2013,7 @@ def test_list_feeds_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1938,7 +2029,7 @@ def test_list_feeds_empty_call(): @pytest.mark.asyncio async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1971,7 +2062,7 @@ async def test_list_feeds_async_from_dict(): def test_list_feeds_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2003,7 +2094,7 @@ def test_list_feeds_field_headers(): @pytest.mark.asyncio async def test_list_feeds_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2034,7 +2125,7 @@ async def test_list_feeds_field_headers_async(): def test_list_feeds_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2060,7 +2151,7 @@ def test_list_feeds_flattened(): def test_list_feeds_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2074,7 +2165,7 @@ def test_list_feeds_flattened_error(): @pytest.mark.asyncio async def test_list_feeds_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2102,7 +2193,7 @@ async def test_list_feeds_flattened_async(): @pytest.mark.asyncio async def test_list_feeds_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2120,7 +2211,7 @@ async def test_list_feeds_flattened_error_async(): ]) def test_update_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2160,7 +2251,7 @@ def test_update_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2176,7 +2267,7 @@ def test_update_feed_empty_call(): @pytest.mark.asyncio async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2219,7 +2310,7 @@ async def test_update_feed_async_from_dict(): def test_update_feed_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2251,7 +2342,7 @@ def test_update_feed_field_headers(): @pytest.mark.asyncio async def test_update_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2282,7 +2373,7 @@ async def test_update_feed_field_headers_async(): def test_update_feed_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2308,7 +2399,7 @@ def test_update_feed_flattened(): def test_update_feed_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2322,7 +2413,7 @@ def test_update_feed_flattened_error(): @pytest.mark.asyncio async def test_update_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2350,7 +2441,7 @@ async def test_update_feed_flattened_async(): @pytest.mark.asyncio async def test_update_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2368,7 +2459,7 @@ async def test_update_feed_flattened_error_async(): ]) def test_delete_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2397,7 +2488,7 @@ def test_delete_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2413,7 +2504,7 @@ def test_delete_feed_empty_call(): @pytest.mark.asyncio async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2445,7 +2536,7 @@ async def test_delete_feed_async_from_dict(): def test_delete_feed_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2477,7 +2568,7 @@ def test_delete_feed_field_headers(): @pytest.mark.asyncio async def test_delete_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2508,7 +2599,7 @@ async def test_delete_feed_field_headers_async(): def test_delete_feed_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2534,7 +2625,7 @@ def test_delete_feed_flattened(): def test_delete_feed_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2548,7 +2639,7 @@ def test_delete_feed_flattened_error(): @pytest.mark.asyncio async def test_delete_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2576,7 +2667,7 @@ async def test_delete_feed_flattened_async(): @pytest.mark.asyncio async def test_delete_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2594,7 +2685,7 @@ async def test_delete_feed_flattened_error_async(): ]) def test_search_all_resources(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2626,7 +2717,7 @@ def test_search_all_resources_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2642,7 +2733,7 @@ def test_search_all_resources_empty_call(): @pytest.mark.asyncio async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2677,7 +2768,7 @@ async def test_search_all_resources_async_from_dict(): def test_search_all_resources_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2709,7 +2800,7 @@ def test_search_all_resources_field_headers(): @pytest.mark.asyncio async def test_search_all_resources_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2740,7 +2831,7 @@ async def test_search_all_resources_field_headers_async(): def test_search_all_resources_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2774,7 +2865,7 @@ def test_search_all_resources_flattened(): def test_search_all_resources_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2790,7 +2881,7 @@ def test_search_all_resources_flattened_error(): @pytest.mark.asyncio async def test_search_all_resources_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2826,7 +2917,7 @@ async def test_search_all_resources_flattened_async(): @pytest.mark.asyncio async def test_search_all_resources_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2842,7 +2933,7 @@ async def test_search_all_resources_flattened_error_async(): def test_search_all_resources_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -2895,7 +2986,7 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): for i in results) def test_search_all_resources_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -2938,7 +3029,7 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_search_all_resources_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2987,7 +3078,7 @@ async def test_search_all_resources_async_pager(): @pytest.mark.asyncio async def test_search_all_resources_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3038,7 +3129,7 @@ async def test_search_all_resources_async_pages(): ]) def test_search_all_iam_policies(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3070,7 +3161,7 @@ def test_search_all_iam_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3086,7 +3177,7 @@ def test_search_all_iam_policies_empty_call(): @pytest.mark.asyncio async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3121,7 +3212,7 @@ async def test_search_all_iam_policies_async_from_dict(): def test_search_all_iam_policies_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3153,7 +3244,7 @@ def test_search_all_iam_policies_field_headers(): @pytest.mark.asyncio async def test_search_all_iam_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3184,7 +3275,7 @@ async def test_search_all_iam_policies_field_headers_async(): def test_search_all_iam_policies_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3214,7 +3305,7 @@ def test_search_all_iam_policies_flattened(): def test_search_all_iam_policies_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3229,7 +3320,7 @@ def test_search_all_iam_policies_flattened_error(): @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3261,7 +3352,7 @@ async def test_search_all_iam_policies_flattened_async(): @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3276,7 +3367,7 @@ async def test_search_all_iam_policies_flattened_error_async(): def test_search_all_iam_policies_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -3329,7 +3420,7 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): for i in results) def test_search_all_iam_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -3372,7 +3463,7 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_search_all_iam_policies_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3421,7 +3512,7 @@ async def test_search_all_iam_policies_async_pager(): @pytest.mark.asyncio async def test_search_all_iam_policies_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3472,7 +3563,7 @@ async def test_search_all_iam_policies_async_pages(): ]) def test_analyze_iam_policy(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3504,7 +3595,7 @@ def test_analyze_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3520,7 +3611,7 @@ def test_analyze_iam_policy_empty_call(): @pytest.mark.asyncio async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3555,7 +3646,7 @@ async def test_analyze_iam_policy_async_from_dict(): def test_analyze_iam_policy_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3587,7 +3678,7 @@ def test_analyze_iam_policy_field_headers(): @pytest.mark.asyncio async def test_analyze_iam_policy_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3622,7 +3713,7 @@ async def test_analyze_iam_policy_field_headers_async(): ]) def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3651,7 +3742,7 @@ def test_analyze_iam_policy_longrunning_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3667,7 +3758,7 @@ def test_analyze_iam_policy_longrunning_empty_call(): @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3701,7 +3792,7 @@ async def test_analyze_iam_policy_longrunning_async_from_dict(): def test_analyze_iam_policy_longrunning_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3733,7 +3824,7 @@ def test_analyze_iam_policy_longrunning_field_headers(): @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3768,7 +3859,7 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): ]) def test_analyze_move(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3798,7 +3889,7 @@ def test_analyze_move_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3814,7 +3905,7 @@ def test_analyze_move_empty_call(): @pytest.mark.asyncio async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3847,7 +3938,7 @@ async def test_analyze_move_async_from_dict(): def test_analyze_move_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3879,7 +3970,7 @@ def test_analyze_move_field_headers(): @pytest.mark.asyncio async def test_analyze_move_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3914,7 +4005,7 @@ async def test_analyze_move_field_headers_async(): ]) def test_query_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3948,7 +4039,7 @@ def test_query_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3964,7 +4055,7 @@ def test_query_assets_empty_call(): @pytest.mark.asyncio async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4001,7 +4092,7 @@ async def test_query_assets_async_from_dict(): def test_query_assets_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4033,7 +4124,7 @@ def test_query_assets_field_headers(): @pytest.mark.asyncio async def test_query_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4068,7 +4159,7 @@ async def test_query_assets_field_headers_async(): ]) def test_create_saved_query(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4106,7 +4197,7 @@ def test_create_saved_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4122,7 +4213,7 @@ def test_create_saved_query_empty_call(): @pytest.mark.asyncio async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4163,7 +4254,7 @@ async def test_create_saved_query_async_from_dict(): def test_create_saved_query_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4195,7 +4286,7 @@ def test_create_saved_query_field_headers(): @pytest.mark.asyncio async def test_create_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4226,7 +4317,7 @@ async def test_create_saved_query_field_headers_async(): def test_create_saved_query_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4260,7 +4351,7 @@ def test_create_saved_query_flattened(): def test_create_saved_query_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4276,7 +4367,7 @@ def test_create_saved_query_flattened_error(): @pytest.mark.asyncio async def test_create_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4312,7 +4403,7 @@ async def test_create_saved_query_flattened_async(): @pytest.mark.asyncio async def test_create_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4332,7 +4423,7 @@ async def test_create_saved_query_flattened_error_async(): ]) def test_get_saved_query(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4370,7 +4461,7 @@ def test_get_saved_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4386,7 +4477,7 @@ def test_get_saved_query_empty_call(): @pytest.mark.asyncio async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4427,7 +4518,7 @@ async def test_get_saved_query_async_from_dict(): def test_get_saved_query_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4459,7 +4550,7 @@ def test_get_saved_query_field_headers(): @pytest.mark.asyncio async def test_get_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4490,7 +4581,7 @@ async def test_get_saved_query_field_headers_async(): def test_get_saved_query_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4516,7 +4607,7 @@ def test_get_saved_query_flattened(): def test_get_saved_query_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4530,7 +4621,7 @@ def test_get_saved_query_flattened_error(): @pytest.mark.asyncio async def test_get_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4558,7 +4649,7 @@ async def test_get_saved_query_flattened_async(): @pytest.mark.asyncio async def test_get_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4576,7 +4667,7 @@ async def test_get_saved_query_flattened_error_async(): ]) def test_list_saved_queries(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4608,7 +4699,7 @@ def test_list_saved_queries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4624,7 +4715,7 @@ def test_list_saved_queries_empty_call(): @pytest.mark.asyncio async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4659,7 +4750,7 @@ async def test_list_saved_queries_async_from_dict(): def test_list_saved_queries_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4691,7 +4782,7 @@ def test_list_saved_queries_field_headers(): @pytest.mark.asyncio async def test_list_saved_queries_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4722,7 +4813,7 @@ async def test_list_saved_queries_field_headers_async(): def test_list_saved_queries_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4748,7 +4839,7 @@ def test_list_saved_queries_flattened(): def test_list_saved_queries_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4762,7 +4853,7 @@ def test_list_saved_queries_flattened_error(): @pytest.mark.asyncio async def test_list_saved_queries_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4790,7 +4881,7 @@ async def test_list_saved_queries_flattened_async(): @pytest.mark.asyncio async def test_list_saved_queries_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4804,7 +4895,7 @@ async def test_list_saved_queries_flattened_error_async(): def test_list_saved_queries_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -4857,7 +4948,7 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): for i in results) def test_list_saved_queries_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -4900,7 +4991,7 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_saved_queries_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4949,7 +5040,7 @@ async def test_list_saved_queries_async_pager(): @pytest.mark.asyncio async def test_list_saved_queries_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5000,7 +5091,7 @@ async def test_list_saved_queries_async_pages(): ]) def test_update_saved_query(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5038,7 +5129,7 @@ def test_update_saved_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5054,7 +5145,7 @@ def test_update_saved_query_empty_call(): @pytest.mark.asyncio async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5095,7 +5186,7 @@ async def test_update_saved_query_async_from_dict(): def test_update_saved_query_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5127,7 +5218,7 @@ def test_update_saved_query_field_headers(): @pytest.mark.asyncio async def test_update_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5158,7 +5249,7 @@ async def test_update_saved_query_field_headers_async(): def test_update_saved_query_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5188,7 +5279,7 @@ def test_update_saved_query_flattened(): def test_update_saved_query_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5203,7 +5294,7 @@ def test_update_saved_query_flattened_error(): @pytest.mark.asyncio async def test_update_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5235,7 +5326,7 @@ async def test_update_saved_query_flattened_async(): @pytest.mark.asyncio async def test_update_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5254,7 +5345,7 @@ async def test_update_saved_query_flattened_error_async(): ]) def test_delete_saved_query(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5283,7 +5374,7 @@ def test_delete_saved_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5299,7 +5390,7 @@ def test_delete_saved_query_empty_call(): @pytest.mark.asyncio async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5331,7 +5422,7 @@ async def test_delete_saved_query_async_from_dict(): def test_delete_saved_query_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5363,7 +5454,7 @@ def test_delete_saved_query_field_headers(): @pytest.mark.asyncio async def test_delete_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5394,7 +5485,7 @@ async def test_delete_saved_query_field_headers_async(): def test_delete_saved_query_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5420,7 +5511,7 @@ def test_delete_saved_query_flattened(): def test_delete_saved_query_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5434,7 +5525,7 @@ def test_delete_saved_query_flattened_error(): @pytest.mark.asyncio async def test_delete_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5462,7 +5553,7 @@ async def test_delete_saved_query_flattened_async(): @pytest.mark.asyncio async def test_delete_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5480,7 +5571,7 @@ async def test_delete_saved_query_flattened_error_async(): ]) def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5510,7 +5601,7 @@ def test_batch_get_effective_iam_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5526,7 +5617,7 @@ def test_batch_get_effective_iam_policies_empty_call(): @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5559,7 +5650,7 @@ async def test_batch_get_effective_iam_policies_async_from_dict(): def test_batch_get_effective_iam_policies_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5591,7 +5682,7 @@ def test_batch_get_effective_iam_policies_field_headers(): @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5626,7 +5717,7 @@ async def test_batch_get_effective_iam_policies_field_headers_async(): ]) def test_analyze_org_policies(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5658,7 +5749,7 @@ def test_analyze_org_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5674,7 +5765,7 @@ def test_analyze_org_policies_empty_call(): @pytest.mark.asyncio async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5709,7 +5800,7 @@ async def test_analyze_org_policies_async_from_dict(): def test_analyze_org_policies_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5741,7 +5832,7 @@ def test_analyze_org_policies_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5772,7 +5863,7 @@ async def test_analyze_org_policies_field_headers_async(): def test_analyze_org_policies_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5806,7 +5897,7 @@ def test_analyze_org_policies_flattened(): def test_analyze_org_policies_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5822,7 +5913,7 @@ def test_analyze_org_policies_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policies_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5858,7 +5949,7 @@ async def test_analyze_org_policies_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policies_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5874,7 +5965,7 @@ async def test_analyze_org_policies_flattened_error_async(): def test_analyze_org_policies_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -5927,7 +6018,7 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): for i in results) def test_analyze_org_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -5970,7 +6061,7 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_analyze_org_policies_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6019,7 +6110,7 @@ async def test_analyze_org_policies_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policies_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6070,7 +6161,7 @@ async def test_analyze_org_policies_async_pages(): ]) def test_analyze_org_policy_governed_containers(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6102,7 +6193,7 @@ def test_analyze_org_policy_governed_containers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -6118,7 +6209,7 @@ def test_analyze_org_policy_governed_containers_empty_call(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6153,7 +6244,7 @@ async def test_analyze_org_policy_governed_containers_async_from_dict(): def test_analyze_org_policy_governed_containers_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6185,7 +6276,7 @@ def test_analyze_org_policy_governed_containers_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6216,7 +6307,7 @@ async def test_analyze_org_policy_governed_containers_field_headers_async(): def test_analyze_org_policy_governed_containers_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6250,7 +6341,7 @@ def test_analyze_org_policy_governed_containers_flattened(): def test_analyze_org_policy_governed_containers_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6266,7 +6357,7 @@ def test_analyze_org_policy_governed_containers_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6302,7 +6393,7 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6318,7 +6409,7 @@ async def test_analyze_org_policy_governed_containers_flattened_error_async(): def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -6371,7 +6462,7 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp for i in results) def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -6414,7 +6505,7 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6463,7 +6554,7 @@ async def test_analyze_org_policy_governed_containers_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6514,7 +6605,7 @@ async def test_analyze_org_policy_governed_containers_async_pages(): ]) def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6546,7 +6637,7 @@ def test_analyze_org_policy_governed_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -6562,7 +6653,7 @@ def test_analyze_org_policy_governed_assets_empty_call(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6597,7 +6688,7 @@ async def test_analyze_org_policy_governed_assets_async_from_dict(): def test_analyze_org_policy_governed_assets_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6629,7 +6720,7 @@ def test_analyze_org_policy_governed_assets_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6660,7 +6751,7 @@ async def test_analyze_org_policy_governed_assets_field_headers_async(): def test_analyze_org_policy_governed_assets_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6694,7 +6785,7 @@ def test_analyze_org_policy_governed_assets_flattened(): def test_analyze_org_policy_governed_assets_flattened_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6710,7 +6801,7 @@ def test_analyze_org_policy_governed_assets_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6746,7 +6837,7 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6762,7 +6853,7 @@ async def test_analyze_org_policy_governed_assets_flattened_error_async(): def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -6815,7 +6906,7 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): for i in results) def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -6858,7 +6949,7 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6907,7 +6998,7 @@ async def test_analyze_org_policy_governed_assets_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6959,7 +7050,7 @@ async def test_analyze_org_policy_governed_assets_async_pages(): ]) def test_export_assets_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7000,14 +7091,14 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7015,7 +7106,7 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7055,7 +7146,7 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss def test_export_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.export_assets._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) @@ -7064,7 +7155,7 @@ def test_export_assets_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_export_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -7104,7 +7195,7 @@ def test_export_assets_rest_interceptors(null_interceptor): def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7124,7 +7215,7 @@ def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=as def test_export_assets_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -7135,7 +7226,7 @@ def test_export_assets_rest_error(): ]) def test_list_assets_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7181,14 +7272,14 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", "relationship_types", )) jsonified_request.update(unset_fields) @@ -7198,7 +7289,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7240,7 +7331,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR def test_list_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.list_assets._get_unset_required_fields({}) assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) @@ -7249,7 +7340,7 @@ def test_list_assets_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -7288,7 +7379,7 @@ def test_list_assets_rest_interceptors(null_interceptor): def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7308,7 +7399,7 @@ def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asse def test_list_assets_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7346,7 +7437,7 @@ def test_list_assets_rest_flattened(): def test_list_assets_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7361,7 +7452,7 @@ def test_list_assets_rest_flattened_error(transport: str = 'rest'): def test_list_assets_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7427,7 +7518,7 @@ def test_list_assets_rest_pager(transport: str = 'rest'): ]) def test_batch_get_assets_history_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7471,14 +7562,14 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).batch_get_assets_history._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).batch_get_assets_history._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", "relationship_types", )) jsonified_request.update(unset_fields) @@ -7488,7 +7579,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7530,7 +7621,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic def test_batch_get_assets_history_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) @@ -7539,7 +7630,7 @@ def test_batch_get_assets_history_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_batch_get_assets_history_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -7578,7 +7669,7 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7598,7 +7689,7 @@ def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', requ def test_batch_get_assets_history_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -7609,7 +7700,7 @@ def test_batch_get_assets_history_rest_error(): ]) def test_create_feed_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7664,7 +7755,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7672,7 +7763,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR jsonified_request["parent"] = 'parent_value' jsonified_request["feedId"] = 'feed_id_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7682,7 +7773,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR assert jsonified_request["feedId"] == 'feed_id_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7725,7 +7816,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR def test_create_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.create_feed._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) @@ -7734,7 +7825,7 @@ def test_create_feed_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -7773,7 +7864,7 @@ def test_create_feed_rest_interceptors(null_interceptor): def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7793,7 +7884,7 @@ def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asse def test_create_feed_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7831,7 +7922,7 @@ def test_create_feed_rest_flattened(): def test_create_feed_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7846,7 +7937,7 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): def test_create_feed_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -7857,7 +7948,7 @@ def test_create_feed_rest_error(): ]) def test_get_feed_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7911,14 +8002,14 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7926,7 +8017,7 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest assert jsonified_request["name"] == 'name_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7968,7 +8059,7 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest def test_get_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_feed._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -7977,7 +8068,7 @@ def test_get_feed_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8016,7 +8107,7 @@ def test_get_feed_rest_interceptors(null_interceptor): def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8036,7 +8127,7 @@ def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_s def test_get_feed_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8074,7 +8165,7 @@ def test_get_feed_rest_flattened(): def test_get_feed_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8089,7 +8180,7 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): def test_get_feed_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -8100,7 +8191,7 @@ def test_get_feed_rest_error(): ]) def test_list_feeds_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8144,14 +8235,14 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8159,7 +8250,7 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -8201,7 +8292,7 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq def test_list_feeds_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.list_feeds._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent", ))) @@ -8210,7 +8301,7 @@ def test_list_feeds_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_feeds_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8249,7 +8340,7 @@ def test_list_feeds_rest_interceptors(null_interceptor): def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8269,7 +8360,7 @@ def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset def test_list_feeds_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8307,7 +8398,7 @@ def test_list_feeds_rest_flattened(): def test_list_feeds_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8322,7 +8413,7 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): def test_list_feeds_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -8333,7 +8424,7 @@ def test_list_feeds_rest_error(): ]) def test_update_feed_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8386,18 +8477,18 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -8440,7 +8531,7 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR def test_update_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.update_feed._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) @@ -8449,7 +8540,7 @@ def test_update_feed_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8488,7 +8579,7 @@ def test_update_feed_rest_interceptors(null_interceptor): def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8508,7 +8599,7 @@ def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asse def test_update_feed_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8546,7 +8637,7 @@ def test_update_feed_rest_flattened(): def test_update_feed_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8561,7 +8652,7 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): def test_update_feed_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -8572,7 +8663,7 @@ def test_update_feed_rest_error(): ]) def test_delete_feed_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8613,14 +8704,14 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8628,7 +8719,7 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR assert jsonified_request["name"] == 'name_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -8667,7 +8758,7 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR def test_delete_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.delete_feed._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -8676,7 +8767,7 @@ def test_delete_feed_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8710,7 +8801,7 @@ def test_delete_feed_rest_interceptors(null_interceptor): def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8730,7 +8821,7 @@ def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asse def test_delete_feed_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8766,7 +8857,7 @@ def test_delete_feed_rest_flattened(): def test_delete_feed_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8781,7 +8872,7 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): def test_delete_feed_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -8792,7 +8883,7 @@ def test_delete_feed_rest_error(): ]) def test_search_all_resources_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8838,14 +8929,14 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).search_all_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["scope"] = 'scope_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).search_all_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", "read_mask", )) jsonified_request.update(unset_fields) @@ -8855,7 +8946,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se assert jsonified_request["scope"] == 'scope_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -8897,7 +8988,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se def test_search_all_resources_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.search_all_resources._get_unset_required_fields({}) assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) @@ -8906,7 +8997,7 @@ def test_search_all_resources_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_search_all_resources_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8945,7 +9036,7 @@ def test_search_all_resources_rest_interceptors(null_interceptor): def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8965,7 +9056,7 @@ def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_ def test_search_all_resources_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9005,7 +9096,7 @@ def test_search_all_resources_rest_flattened(): def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9022,7 +9113,7 @@ def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): def test_search_all_resources_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9088,7 +9179,7 @@ def test_search_all_resources_rest_pager(transport: str = 'rest'): ]) def test_search_all_iam_policies_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9134,14 +9225,14 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).search_all_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["scope"] = 'scope_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).search_all_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) jsonified_request.update(unset_fields) @@ -9151,7 +9242,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service assert jsonified_request["scope"] == 'scope_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -9193,7 +9284,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service def test_search_all_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) @@ -9202,7 +9293,7 @@ def test_search_all_iam_policies_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_search_all_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9241,7 +9332,7 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9261,7 +9352,7 @@ def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', reque def test_search_all_iam_policies_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9300,7 +9391,7 @@ def test_search_all_iam_policies_rest_flattened(): def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9316,7 +9407,7 @@ def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9382,7 +9473,7 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): ]) def test_analyze_iam_policy_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9427,12 +9518,12 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_iam_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("analysis_query", "execution_timeout", "saved_analysis_query", )) jsonified_request.update(unset_fields) @@ -9440,7 +9531,7 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal # verify required fields with non-default values are left alone client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -9482,7 +9573,7 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal def test_analyze_iam_policy_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) @@ -9491,7 +9582,7 @@ def test_analyze_iam_policy_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_iam_policy_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9530,7 +9621,7 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9550,7 +9641,7 @@ def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_ty def test_analyze_iam_policy_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -9561,7 +9652,7 @@ def test_analyze_iam_policy_rest_error(): ]) def test_analyze_iam_policy_longrunning_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9601,18 +9692,18 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -9652,7 +9743,7 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) @@ -9661,7 +9752,7 @@ def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9701,7 +9792,7 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9721,7 +9812,7 @@ def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest' def test_analyze_iam_policy_longrunning_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -9732,7 +9823,7 @@ def test_analyze_iam_policy_longrunning_rest_error(): ]) def test_analyze_move_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9778,7 +9869,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov # verify fields with default values are dropped assert "destinationParent" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_move._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9788,7 +9879,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov jsonified_request["resource"] = 'resource_value' jsonified_request["destinationParent"] = 'destination_parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_move._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("destination_parent", "view", )) jsonified_request.update(unset_fields) @@ -9800,7 +9891,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov assert jsonified_request["destinationParent"] == 'destination_parent_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -9846,7 +9937,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov def test_analyze_move_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.analyze_move._get_unset_required_fields({}) assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) @@ -9855,7 +9946,7 @@ def test_analyze_move_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_move_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9894,7 +9985,7 @@ def test_analyze_move_rest_interceptors(null_interceptor): def test_analyze_move_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9914,7 +10005,7 @@ def test_analyze_move_rest_bad_request(transport: str = 'rest', request_type=ass def test_analyze_move_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -9925,7 +10016,7 @@ def test_analyze_move_rest_error(): ]) def test_query_assets_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9973,14 +10064,14 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -9988,7 +10079,7 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -10031,7 +10122,7 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset def test_query_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.query_assets._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent", ))) @@ -10040,7 +10131,7 @@ def test_query_assets_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_query_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -10079,7 +10170,7 @@ def test_query_assets_rest_interceptors(null_interceptor): def test_query_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.QueryAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10099,7 +10190,7 @@ def test_query_assets_rest_bad_request(transport: str = 'rest', request_type=ass def test_query_assets_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -10110,7 +10201,7 @@ def test_query_assets_rest_error(): ]) def test_create_saved_query_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10228,7 +10319,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea # verify fields with default values are dropped assert "savedQueryId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -10238,7 +10329,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea jsonified_request["parent"] = 'parent_value' jsonified_request["savedQueryId"] = 'saved_query_id_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("saved_query_id", )) jsonified_request.update(unset_fields) @@ -10250,7 +10341,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea assert jsonified_request["savedQueryId"] == 'saved_query_id_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -10297,7 +10388,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea def test_create_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.create_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) @@ -10306,7 +10397,7 @@ def test_create_saved_query_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -10345,7 +10436,7 @@ def test_create_saved_query_rest_interceptors(null_interceptor): def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10365,7 +10456,7 @@ def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_ty def test_create_saved_query_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10405,7 +10496,7 @@ def test_create_saved_query_rest_flattened(): def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10422,7 +10513,7 @@ def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): def test_create_saved_query_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -10433,7 +10524,7 @@ def test_create_saved_query_rest_error(): ]) def test_get_saved_query_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10485,14 +10576,14 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10500,7 +10591,7 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave assert jsonified_request["name"] == 'name_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -10542,7 +10633,7 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave def test_get_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -10551,7 +10642,7 @@ def test_get_saved_query_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -10590,7 +10681,7 @@ def test_get_saved_query_rest_interceptors(null_interceptor): def test_get_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10610,7 +10701,7 @@ def test_get_saved_query_rest_bad_request(transport: str = 'rest', request_type= def test_get_saved_query_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10648,7 +10739,7 @@ def test_get_saved_query_rest_flattened(): def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10663,7 +10754,7 @@ def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): def test_get_saved_query_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -10674,7 +10765,7 @@ def test_get_saved_query_rest_error(): ]) def test_list_saved_queries_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10720,14 +10811,14 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_saved_queries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_saved_queries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -10737,7 +10828,7 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -10779,7 +10870,7 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List def test_list_saved_queries_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.list_saved_queries._get_unset_required_fields({}) assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) @@ -10788,7 +10879,7 @@ def test_list_saved_queries_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_saved_queries_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -10827,7 +10918,7 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): def test_list_saved_queries_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListSavedQueriesRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10847,7 +10938,7 @@ def test_list_saved_queries_rest_bad_request(transport: str = 'rest', request_ty def test_list_saved_queries_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10885,7 +10976,7 @@ def test_list_saved_queries_rest_flattened(): def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10900,7 +10991,7 @@ def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): def test_list_saved_queries_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10966,7 +11057,7 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): ]) def test_update_saved_query_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -11081,12 +11172,12 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) @@ -11094,7 +11185,7 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda # verify required fields with non-default values are left alone client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -11137,7 +11228,7 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda def test_update_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.update_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) @@ -11146,7 +11237,7 @@ def test_update_saved_query_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -11185,7 +11276,7 @@ def test_update_saved_query_rest_interceptors(null_interceptor): def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11205,7 +11296,7 @@ def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_ty def test_update_saved_query_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -11244,7 +11335,7 @@ def test_update_saved_query_rest_flattened(): def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11260,7 +11351,7 @@ def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): def test_update_saved_query_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -11271,7 +11362,7 @@ def test_update_saved_query_rest_error(): ]) def test_delete_saved_query_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -11312,14 +11403,14 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -11327,7 +11418,7 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele assert jsonified_request["name"] == 'name_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -11366,7 +11457,7 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele def test_delete_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.delete_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -11375,7 +11466,7 @@ def test_delete_saved_query_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -11409,7 +11500,7 @@ def test_delete_saved_query_rest_interceptors(null_interceptor): def test_delete_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11429,7 +11520,7 @@ def test_delete_saved_query_rest_bad_request(transport: str = 'rest', request_ty def test_delete_saved_query_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -11465,7 +11556,7 @@ def test_delete_saved_query_rest_flattened(): def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11480,7 +11571,7 @@ def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): def test_delete_saved_query_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -11491,7 +11582,7 @@ def test_delete_saved_query_rest_error(): ]) def test_batch_get_effective_iam_policies_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -11537,7 +11628,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse # verify fields with default values are dropped assert "names" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -11547,7 +11638,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse jsonified_request["scope"] = 'scope_value' jsonified_request["names"] = 'names_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("names", )) jsonified_request.update(unset_fields) @@ -11559,7 +11650,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse assert jsonified_request["names"] == 'names_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -11605,7 +11696,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse def test_batch_get_effective_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.batch_get_effective_iam_policies._get_unset_required_fields({}) assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) @@ -11614,7 +11705,7 @@ def test_batch_get_effective_iam_policies_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -11653,7 +11744,7 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): def test_batch_get_effective_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11673,7 +11764,7 @@ def test_batch_get_effective_iam_policies_rest_bad_request(transport: str = 'res def test_batch_get_effective_iam_policies_rest_error(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -11684,7 +11775,7 @@ def test_batch_get_effective_iam_policies_rest_error(): ]) def test_analyze_org_policies_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -11732,7 +11823,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -11742,7 +11833,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An jsonified_request["scope"] = 'scope_value' jsonified_request["constraint"] = 'constraint_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -11754,7 +11845,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -11800,7 +11891,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An def test_analyze_org_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.analyze_org_policies._get_unset_required_fields({}) assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @@ -11809,7 +11900,7 @@ def test_analyze_org_policies_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -11848,7 +11939,7 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): def test_analyze_org_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11868,7 +11959,7 @@ def test_analyze_org_policies_rest_bad_request(transport: str = 'rest', request_ def test_analyze_org_policies_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -11908,7 +11999,7 @@ def test_analyze_org_policies_rest_flattened(): def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11925,7 +12016,7 @@ def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): def test_analyze_org_policies_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11991,7 +12082,7 @@ def test_analyze_org_policies_rest_pager(transport: str = 'rest'): ]) def test_analyze_org_policy_governed_containers_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -12039,7 +12130,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12049,7 +12140,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ jsonified_request["scope"] = 'scope_value' jsonified_request["constraint"] = 'constraint_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -12061,7 +12152,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -12107,7 +12198,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @@ -12116,7 +12207,7 @@ def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -12155,7 +12246,7 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept def test_analyze_org_policy_governed_containers_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -12175,7 +12266,7 @@ def test_analyze_org_policy_governed_containers_rest_bad_request(transport: str def test_analyze_org_policy_governed_containers_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -12215,7 +12306,7 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -12232,7 +12323,7 @@ def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -12298,7 +12389,7 @@ def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'res ]) def test_analyze_org_policy_governed_assets_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -12346,7 +12437,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12356,7 +12447,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as jsonified_request["scope"] = 'scope_value' jsonified_request["constraint"] = 'constraint_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -12368,7 +12459,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -12414,7 +12505,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @@ -12423,7 +12514,7 @@ def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -12462,7 +12553,7 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): def test_analyze_org_policy_governed_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -12482,7 +12573,7 @@ def test_analyze_org_policy_governed_assets_rest_bad_request(transport: str = 'r def test_analyze_org_policy_governed_assets_rest_flattened(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -12522,7 +12613,7 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -12539,7 +12630,7 @@ def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -12602,17 +12693,17 @@ def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = AssetServiceClient( @@ -12622,7 +12713,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -12633,17 +12724,17 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = AssetServiceClient( client_options=options, - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # It is an error to provide scopes and a transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = AssetServiceClient( @@ -12655,7 +12746,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) client = AssetServiceClient(transport=transport) assert client.transport is transport @@ -12663,13 +12754,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel transport = transports.AssetServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel @@ -12682,7 +12773,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class() adc.assert_called_once() @@ -12692,14 +12783,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = AssetServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert isinstance( client.transport, @@ -12710,7 +12801,7 @@ def test_asset_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), credentials_file="credentials.json" ) @@ -12720,7 +12811,7 @@ def test_asset_service_base_transport(): with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: Transport.return_value = None transport = transports.AssetServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Every method on the transport should just blindly @@ -12776,7 +12867,7 @@ def test_asset_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.AssetServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -12794,7 +12885,7 @@ def test_asset_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.AssetServiceTransport() adc.assert_called_once() @@ -12802,7 +12893,7 @@ def test_asset_service_base_transport_with_adc(): def test_asset_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) AssetServiceClient() adc.assert_called_once_with( scopes=None, @@ -12824,7 +12915,7 @@ def test_asset_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -12869,7 +12960,7 @@ def test_asset_service_transport_create_channel(transport_class, grpc_helpers): with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -12898,7 +12989,7 @@ def test_asset_service_transport_create_channel(transport_class, grpc_helpers): def test_asset_service_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -12936,7 +13027,7 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( ) def test_asset_service_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.AssetServiceRestTransport ( credentials=cred, @@ -12947,7 +13038,7 @@ def test_asset_service_http_transport_client_cert_source_for_mtls(): def test_asset_service_rest_lro_client(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) transport = client.transport @@ -12969,7 +13060,7 @@ def test_asset_service_rest_lro_client(): ]) def test_asset_service_host_no_port(transport_name): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), transport=transport_name, ) @@ -12986,7 +13077,7 @@ def test_asset_service_host_no_port(transport_name): ]) def test_asset_service_host_with_port(transport_name): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), transport=transport_name, ) @@ -13000,8 +13091,8 @@ def test_asset_service_host_with_port(transport_name): "rest", ]) def test_asset_service_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() + creds1 = _AnonymousCredentialsWithUniverseDomain() + creds2 = _AnonymousCredentialsWithUniverseDomain() client1 = AssetServiceClient( credentials=creds1, transport=transport_name, @@ -13119,7 +13210,7 @@ def test_asset_service_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -13191,7 +13282,7 @@ def test_asset_service_transport_channel_mtls_with_adc( def test_asset_service_grpc_lro_client(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) transport = client.transport @@ -13208,7 +13299,7 @@ def test_asset_service_grpc_lro_client(): def test_asset_service_grpc_lro_async_client(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc_asyncio', ) transport = client.transport @@ -13445,7 +13536,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -13453,7 +13544,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: transport_class = AssetServiceClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -13461,7 +13552,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -13472,7 +13563,7 @@ async def test_transport_close_async(): def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -13494,7 +13585,7 @@ def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=op ]) def test_get_operation_rest(request_type): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} @@ -13520,7 +13611,7 @@ def test_get_operation_rest(request_type): def test_get_operation(transport: str = "grpc"): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13542,7 +13633,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13566,7 +13657,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13590,7 +13681,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13615,7 +13706,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -13631,7 +13722,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -13655,7 +13746,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -13670,7 +13761,7 @@ def test_client_ctx(): ] for transport in transports: client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) # Test client calls underlying transport. @@ -13698,7 +13789,7 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 2d7d38410dbe..977ea7533428 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -55,8 +55,12 @@ class IAMCredentialsAsyncClient: _client: IAMCredentialsClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = IAMCredentialsClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = IAMCredentialsClient._DEFAULT_UNIVERSE service_account_path = staticmethod(IAMCredentialsClient.service_account_path) parse_service_account_path = staticmethod(IAMCredentialsClient.parse_service_account_path) @@ -152,11 +156,20 @@ def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._client._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial(type(IAMCredentialsClient).get_transport_class, type(IAMCredentialsClient)) def __init__(self, *, @@ -165,7 +178,7 @@ def __init__(self, *, client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the iam credentials client. + """Instantiates the iam credentials async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -176,23 +189,41 @@ def __init__(self, *, transport (Union[str, ~.IAMCredentialsTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -354,6 +385,9 @@ async def sample_generate_access_token(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -508,6 +542,9 @@ async def sample_generate_id_token(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -648,6 +685,9 @@ async def sample_sign_blob(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -791,6 +831,9 @@ async def sample_sign_jwt(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index ee7c57cbf68f..c2f06015714a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -121,11 +121,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "iamcredentials.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "iamcredentials.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -307,8 +311,8 @@ def _read_environment_variables(): """Returns the environment variables used by the client. Returns: - Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE - and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. Raises: ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not @@ -318,11 +322,12 @@ def _read_environment_variables(): """ use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -342,37 +347,111 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source - def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. Args: - api_override (str): The API endpoint override. If specified, this is always the return value of this function. + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. client_cert_source (bytes): The client certificate source used by the client. - use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. Possible values are "always", "auto", or "never". Returns: str: The API endpoint to be used by the client. """ - if api_override is not None: api_endpoint = api_override elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") api_endpoint = IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = IAMCredentialsClient.DEFAULT_ENDPOINT + api_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) return api_endpoint + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = IAMCredentialsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + if credentials: + credentials_universe = credentials.universe_domain + if client_universe != credentials_universe: + default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + IAMCredentialsClient._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + @property def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, IAMCredentialsTransport]] = None, @@ -393,22 +472,32 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -426,9 +515,15 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - self._use_client_cert, self._use_mtls_endpoint = IAMCredentialsClient._read_environment_variables() + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = IAMCredentialsClient._read_environment_variables() self._client_cert_source = IAMCredentialsClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._api_endpoint = IAMCredentialsClient._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + self._universe_domain = IAMCredentialsClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: @@ -437,7 +532,8 @@ def __init__(self, *, # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, IAMCredentialsTransport): + transport_provided = isinstance(transport, IAMCredentialsTransport) + if transport_provided: # transport is a IAMCredentialsTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " @@ -447,14 +543,23 @@ def __init__(self, *, "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(IAMCredentialsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + IAMCredentialsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, credentials_file=self._client_options.credentials_file, @@ -609,6 +714,9 @@ def sample_generate_access_token(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -756,6 +864,9 @@ def sample_generate_id_token(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -889,6 +1000,9 @@ def sample_sign_blob(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1025,6 +1139,9 @@ def sample_sign_jwt(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index f7b9b4714fb9..451a5fed6a1e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -55,7 +55,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'iamcredentials.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -110,6 +110,10 @@ def __init__( host += ':443' self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index bb2fe56cfab9..e9864f159a13 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -70,7 +70,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'iamcredentials.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 4cd9e1f2b9b7..2ea028c0b641 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -114,7 +114,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'iamcredentials.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 0b8f029df97a..d48dc503440f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -223,7 +223,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'iamcredentials.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 32588a319d5f..f8b5556ffa4d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -28,6 +28,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -56,13 +57,28 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" - # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + +# Anonymous Credentials with universe domain property. If no universe domain is provided, then +# the default universe domain is "googleapis.com". +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + @property + def universe_domain(self): + return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -79,14 +95,13 @@ def test__get_default_mtls_endpoint(): assert IAMCredentialsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - - assert IAMCredentialsClient._read_environment_variables() == (False, "auto") + assert IAMCredentialsClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert IAMCredentialsClient._read_environment_variables() == (True, "auto") + assert IAMCredentialsClient._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "auto") + assert IAMCredentialsClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError) as excinfo: @@ -94,19 +109,22 @@ def test__read_environment_variables(): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "never") + assert IAMCredentialsClient._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "always") + assert IAMCredentialsClient._read_environment_variables() == (False, "always", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert IAMCredentialsClient._read_environment_variables() == (False, "auto") + assert IAMCredentialsClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: IAMCredentialsClient._read_environment_variables() assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert IAMCredentialsClient._read_environment_variables() == (False, "auto", "foo.com") + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -120,19 +138,89 @@ def test__get_client_cert_source(): assert IAMCredentialsClient._get_client_cert_source(None, True) is mock_default_cert_source assert IAMCredentialsClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +@mock.patch.object(IAMCredentialsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsAsyncClient)) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() + default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE + default_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert IAMCredentialsClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + assert IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "always") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + assert IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT + assert IAMCredentialsClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert IAMCredentialsClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert IAMCredentialsClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert IAMCredentialsClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert IAMCredentialsClient._get_universe_domain(None, None) == IAMCredentialsClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + IAMCredentialsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." - assert IAMCredentialsClient._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override - assert IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, "auto") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT - assert IAMCredentialsClient._get_api_endpoint(None, None, "auto") == IAMCredentialsClient.DEFAULT_ENDPOINT - assert IAMCredentialsClient._get_api_endpoint(None, None, "always") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT - assert IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, "always") == IAMCredentialsClient.DEFAULT_MTLS_ENDPOINT - assert IAMCredentialsClient._get_api_endpoint(None, None, "never") == IAMCredentialsClient.DEFAULT_ENDPOINT - +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), + (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=_AnonymousCredentialsWithUniverseDomain() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." @pytest.mark.parametrize("client_class,transport_name", [ (IAMCredentialsClient, "grpc"), @@ -140,7 +228,7 @@ def test__get_api_endpoint(): (IAMCredentialsClient, "rest"), ]) def test_iam_credentials_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -179,7 +267,7 @@ def test_iam_credentials_client_service_account_always_use_jwt(transport_class, (IAMCredentialsClient, "rest"), ]) def test_iam_credentials_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -215,13 +303,13 @@ def test_iam_credentials_client_get_transport_class(): (IAMCredentialsAsyncClient, transports.IAMCredentialsGrpcAsyncIOTransport, "grpc_asyncio"), (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), ]) -@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +@mock.patch.object(IAMCredentialsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsAsyncClient)) def test_iam_credentials_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(IAMCredentialsClient, 'get_transport_class') as gtc: transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -257,7 +345,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -305,7 +393,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -321,7 +409,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -338,8 +426,8 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", "true"), (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest", "false"), ]) -@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +@mock.patch.object(IAMCredentialsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsAsyncClient)) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -355,7 +443,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -379,7 +467,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -408,7 +496,7 @@ def test_iam_credentials_client_mtls_env_auto(client_class, transport_class, tra patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -488,55 +576,58 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize("client_class", [ IAMCredentialsClient, IAMCredentialsAsyncClient ]) -@mock.patch.object(IAMCredentialsClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsClient)) -@mock.patch.object(IAMCredentialsAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(IAMCredentialsAsyncClient)) +@mock.patch.object(IAMCredentialsClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsClient)) +@mock.patch.object(IAMCredentialsAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(IAMCredentialsAsyncClient)) def test_iam_credentials_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE + default_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = IAMCredentialsClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", - # use ClientOptions.api_endpoint as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the DEFAULT_ENDPOINT as the api endpoint. + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, - # use the DEFAULT_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + else: + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), @@ -554,7 +645,7 @@ def test_iam_credentials_client_client_options_scopes(client_class, transport_cl patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -580,7 +671,7 @@ def test_iam_credentials_client_client_options_credentials_file(client_class, tr patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -624,7 +715,7 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -641,8 +732,8 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() + file_creds = _AnonymousCredentialsWithUniverseDomain() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -670,7 +761,7 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr ]) def test_generate_access_token(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -702,7 +793,7 @@ def test_generate_access_token_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -718,7 +809,7 @@ def test_generate_access_token_empty_call(): @pytest.mark.asyncio async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -753,7 +844,7 @@ async def test_generate_access_token_async_from_dict(): def test_generate_access_token_field_headers(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -785,7 +876,7 @@ def test_generate_access_token_field_headers(): @pytest.mark.asyncio async def test_generate_access_token_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -816,7 +907,7 @@ async def test_generate_access_token_field_headers_async(): def test_generate_access_token_flattened(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -852,7 +943,7 @@ def test_generate_access_token_flattened(): def test_generate_access_token_flattened_error(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -869,7 +960,7 @@ def test_generate_access_token_flattened_error(): @pytest.mark.asyncio async def test_generate_access_token_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -907,7 +998,7 @@ async def test_generate_access_token_flattened_async(): @pytest.mark.asyncio async def test_generate_access_token_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -928,7 +1019,7 @@ async def test_generate_access_token_flattened_error_async(): ]) def test_generate_id_token(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -960,7 +1051,7 @@ def test_generate_id_token_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -976,7 +1067,7 @@ def test_generate_id_token_empty_call(): @pytest.mark.asyncio async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1011,7 +1102,7 @@ async def test_generate_id_token_async_from_dict(): def test_generate_id_token_field_headers(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1043,7 +1134,7 @@ def test_generate_id_token_field_headers(): @pytest.mark.asyncio async def test_generate_id_token_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1074,7 +1165,7 @@ async def test_generate_id_token_field_headers_async(): def test_generate_id_token_flattened(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1112,7 +1203,7 @@ def test_generate_id_token_flattened(): def test_generate_id_token_flattened_error(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1129,7 +1220,7 @@ def test_generate_id_token_flattened_error(): @pytest.mark.asyncio async def test_generate_id_token_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1169,7 +1260,7 @@ async def test_generate_id_token_flattened_async(): @pytest.mark.asyncio async def test_generate_id_token_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1190,7 +1281,7 @@ async def test_generate_id_token_flattened_error_async(): ]) def test_sign_blob(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1224,7 +1315,7 @@ def test_sign_blob_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1240,7 +1331,7 @@ def test_sign_blob_empty_call(): @pytest.mark.asyncio async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1277,7 +1368,7 @@ async def test_sign_blob_async_from_dict(): def test_sign_blob_field_headers(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1309,7 +1400,7 @@ def test_sign_blob_field_headers(): @pytest.mark.asyncio async def test_sign_blob_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1340,7 +1431,7 @@ async def test_sign_blob_field_headers_async(): def test_sign_blob_flattened(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1374,7 +1465,7 @@ def test_sign_blob_flattened(): def test_sign_blob_flattened_error(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1390,7 +1481,7 @@ def test_sign_blob_flattened_error(): @pytest.mark.asyncio async def test_sign_blob_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1426,7 +1517,7 @@ async def test_sign_blob_flattened_async(): @pytest.mark.asyncio async def test_sign_blob_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1446,7 +1537,7 @@ async def test_sign_blob_flattened_error_async(): ]) def test_sign_jwt(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1480,7 +1571,7 @@ def test_sign_jwt_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1496,7 +1587,7 @@ def test_sign_jwt_empty_call(): @pytest.mark.asyncio async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1533,7 +1624,7 @@ async def test_sign_jwt_async_from_dict(): def test_sign_jwt_field_headers(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1565,7 +1656,7 @@ def test_sign_jwt_field_headers(): @pytest.mark.asyncio async def test_sign_jwt_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1596,7 +1687,7 @@ async def test_sign_jwt_field_headers_async(): def test_sign_jwt_flattened(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1630,7 +1721,7 @@ def test_sign_jwt_flattened(): def test_sign_jwt_flattened_error(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1646,7 +1737,7 @@ def test_sign_jwt_flattened_error(): @pytest.mark.asyncio async def test_sign_jwt_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1682,7 +1773,7 @@ async def test_sign_jwt_flattened_async(): @pytest.mark.asyncio async def test_sign_jwt_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1702,7 +1793,7 @@ async def test_sign_jwt_flattened_error_async(): ]) def test_generate_access_token_rest(request_type): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -1749,7 +1840,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).generate_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1757,7 +1848,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate jsonified_request["name"] = 'name_value' jsonified_request["scope"] = 'scope_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).generate_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1767,7 +1858,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate assert jsonified_request["scope"] == 'scope_value' client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -1810,7 +1901,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate def test_generate_access_token_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.IAMCredentialsRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.generate_access_token._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "scope", ))) @@ -1819,7 +1910,7 @@ def test_generate_access_token_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_generate_access_token_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) @@ -1858,7 +1949,7 @@ def test_generate_access_token_rest_interceptors(null_interceptor): def test_generate_access_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateAccessTokenRequest): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1878,7 +1969,7 @@ def test_generate_access_token_rest_bad_request(transport: str = 'rest', request def test_generate_access_token_rest_flattened(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -1919,7 +2010,7 @@ def test_generate_access_token_rest_flattened(): def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1937,7 +2028,7 @@ def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): def test_generate_access_token_rest_error(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -1948,7 +2039,7 @@ def test_generate_access_token_rest_error(): ]) def test_generate_id_token_rest(request_type): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -1995,7 +2086,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).generate_id_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2003,7 +2094,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo jsonified_request["name"] = 'name_value' jsonified_request["audience"] = 'audience_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).generate_id_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2013,7 +2104,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo assert jsonified_request["audience"] == 'audience_value' client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -2056,7 +2147,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo def test_generate_id_token_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.IAMCredentialsRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.generate_id_token._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "audience", ))) @@ -2065,7 +2156,7 @@ def test_generate_id_token_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_generate_id_token_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) @@ -2104,7 +2195,7 @@ def test_generate_id_token_rest_interceptors(null_interceptor): def test_generate_id_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateIdTokenRequest): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2124,7 +2215,7 @@ def test_generate_id_token_rest_bad_request(transport: str = 'rest', request_typ def test_generate_id_token_rest_flattened(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -2165,7 +2256,7 @@ def test_generate_id_token_rest_flattened(): def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2183,7 +2274,7 @@ def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): def test_generate_id_token_rest_error(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -2194,7 +2285,7 @@ def test_generate_id_token_rest_error(): ]) def test_sign_blob_rest(request_type): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -2243,7 +2334,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).sign_blob._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2251,7 +2342,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): jsonified_request["name"] = 'name_value' jsonified_request["payload"] = b'payload_blob' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).sign_blob._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2261,7 +2352,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): assert jsonified_request["payload"] == b'payload_blob' client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -2304,7 +2395,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): def test_sign_blob_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.IAMCredentialsRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.sign_blob._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "payload", ))) @@ -2313,7 +2404,7 @@ def test_sign_blob_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_sign_blob_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) @@ -2352,7 +2443,7 @@ def test_sign_blob_rest_interceptors(null_interceptor): def test_sign_blob_rest_bad_request(transport: str = 'rest', request_type=common.SignBlobRequest): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2372,7 +2463,7 @@ def test_sign_blob_rest_bad_request(transport: str = 'rest', request_type=common def test_sign_blob_rest_flattened(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -2412,7 +2503,7 @@ def test_sign_blob_rest_flattened(): def test_sign_blob_rest_flattened_error(transport: str = 'rest'): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2429,7 +2520,7 @@ def test_sign_blob_rest_flattened_error(transport: str = 'rest'): def test_sign_blob_rest_error(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -2440,7 +2531,7 @@ def test_sign_blob_rest_error(): ]) def test_sign_jwt_rest(request_type): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -2489,7 +2580,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).sign_jwt._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2497,7 +2588,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): jsonified_request["name"] = 'name_value' jsonified_request["payload"] = 'payload_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).sign_jwt._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2507,7 +2598,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): assert jsonified_request["payload"] == 'payload_value' client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -2550,7 +2641,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): def test_sign_jwt_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.IAMCredentialsRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.sign_jwt._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "payload", ))) @@ -2559,7 +2650,7 @@ def test_sign_jwt_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_sign_jwt_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) @@ -2598,7 +2689,7 @@ def test_sign_jwt_rest_interceptors(null_interceptor): def test_sign_jwt_rest_bad_request(transport: str = 'rest', request_type=common.SignJwtRequest): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2618,7 +2709,7 @@ def test_sign_jwt_rest_bad_request(transport: str = 'rest', request_type=common. def test_sign_jwt_rest_flattened(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -2658,7 +2749,7 @@ def test_sign_jwt_rest_flattened(): def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2675,7 +2766,7 @@ def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): def test_sign_jwt_rest_error(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -2683,17 +2774,17 @@ def test_sign_jwt_rest_error(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = IAMCredentialsClient( @@ -2703,7 +2794,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -2714,17 +2805,17 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = IAMCredentialsClient( client_options=options, - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # It is an error to provide scopes and a transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = IAMCredentialsClient( @@ -2736,7 +2827,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) client = IAMCredentialsClient(transport=transport) assert client.transport is transport @@ -2744,13 +2835,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel transport = transports.IAMCredentialsGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel @@ -2763,7 +2854,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class() adc.assert_called_once() @@ -2773,14 +2864,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = IAMCredentialsClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert isinstance( client.transport, @@ -2791,7 +2882,7 @@ def test_iam_credentials_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.IAMCredentialsTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), credentials_file="credentials.json" ) @@ -2801,7 +2892,7 @@ def test_iam_credentials_base_transport(): with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport.__init__') as Transport: Transport.return_value = None transport = transports.IAMCredentialsTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Every method on the transport should just blindly @@ -2832,7 +2923,7 @@ def test_iam_credentials_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.IAMCredentialsTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -2850,7 +2941,7 @@ def test_iam_credentials_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.IAMCredentialsTransport() adc.assert_called_once() @@ -2858,7 +2949,7 @@ def test_iam_credentials_base_transport_with_adc(): def test_iam_credentials_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) IAMCredentialsClient() adc.assert_called_once_with( scopes=None, @@ -2880,7 +2971,7 @@ def test_iam_credentials_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -2925,7 +3016,7 @@ def test_iam_credentials_transport_create_channel(transport_class, grpc_helpers) with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -2954,7 +3045,7 @@ def test_iam_credentials_transport_create_channel(transport_class, grpc_helpers) def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2992,7 +3083,7 @@ def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( ) def test_iam_credentials_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.IAMCredentialsRestTransport ( credentials=cred, @@ -3008,7 +3099,7 @@ def test_iam_credentials_http_transport_client_cert_source_for_mtls(): ]) def test_iam_credentials_host_no_port(transport_name): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com'), transport=transport_name, ) @@ -3025,7 +3116,7 @@ def test_iam_credentials_host_no_port(transport_name): ]) def test_iam_credentials_host_with_port(transport_name): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com:8000'), transport=transport_name, ) @@ -3039,8 +3130,8 @@ def test_iam_credentials_host_with_port(transport_name): "rest", ]) def test_iam_credentials_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() + creds1 = _AnonymousCredentialsWithUniverseDomain() + creds2 = _AnonymousCredentialsWithUniverseDomain() client1 = IAMCredentialsClient( credentials=creds1, transport=transport_name, @@ -3101,7 +3192,7 @@ def test_iam_credentials_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -3283,7 +3374,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3291,7 +3382,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: transport_class = IAMCredentialsClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3299,7 +3390,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -3316,7 +3407,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -3331,7 +3422,7 @@ def test_client_ctx(): ] for transport in transports: client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) # Test client calls underlying transport. @@ -3359,7 +3450,7 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 1f1ae062d765..c5446b631c63 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -64,8 +64,12 @@ class EventarcAsyncClient: _client: EventarcClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = EventarcClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = EventarcClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = EventarcClient._DEFAULT_UNIVERSE channel_path = staticmethod(EventarcClient.channel_path) parse_channel_path = staticmethod(EventarcClient.parse_channel_path) @@ -179,11 +183,20 @@ def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._client._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial(type(EventarcClient).get_transport_class, type(EventarcClient)) def __init__(self, *, @@ -192,7 +205,7 @@ def __init__(self, *, client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the eventarc client. + """Instantiates the eventarc async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -203,23 +216,41 @@ def __init__(self, *, transport (Union[str, ~.EventarcTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -322,6 +353,9 @@ async def sample_get_trigger(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -426,6 +460,9 @@ async def sample_list_triggers(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -572,6 +609,9 @@ async def sample_create_trigger(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -709,6 +749,9 @@ async def sample_update_trigger(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -837,6 +880,9 @@ async def sample_delete_trigger(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -952,6 +998,9 @@ async def sample_get_channel(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1056,6 +1105,9 @@ async def sample_list_channels(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1202,6 +1254,9 @@ async def sample_create_channel(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1331,6 +1386,9 @@ async def sample_update_channel(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1451,6 +1509,9 @@ async def sample_delete_channel(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1560,6 +1621,9 @@ async def sample_get_provider(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1664,6 +1728,9 @@ async def sample_list_providers(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1779,6 +1846,9 @@ async def sample_get_channel_connection(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1884,6 +1954,9 @@ async def sample_list_channel_connections(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2030,6 +2103,9 @@ async def sample_create_channel_connection(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2148,6 +2224,9 @@ async def sample_delete_channel_connection(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2263,6 +2342,9 @@ async def sample_get_google_channel_config(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2383,6 +2465,9 @@ async def sample_update_google_channel_config(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2438,6 +2523,9 @@ async def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2489,6 +2577,9 @@ async def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2544,6 +2635,9 @@ async def delete_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2594,6 +2688,9 @@ async def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2707,6 +2804,9 @@ async def set_iam_policy( (("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2825,6 +2925,9 @@ async def get_iam_policy( (("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2881,6 +2984,9 @@ async def test_iam_permissions( (("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2932,6 +3038,9 @@ async def get_location( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2983,6 +3092,9 @@ async def list_locations( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 5da6e3bf190e..fd747140bcd9 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -130,11 +130,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "eventarc.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "eventarc.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -415,8 +419,8 @@ def _read_environment_variables(): """Returns the environment variables used by the client. Returns: - Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE - and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. Raises: ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not @@ -426,11 +430,12 @@ def _read_environment_variables(): """ use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -450,37 +455,111 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source - def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. Args: - api_override (str): The API endpoint override. If specified, this is always the return value of this function. + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. client_cert_source (bytes): The client certificate source used by the client. - use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. Possible values are "always", "auto", or "never". Returns: str: The API endpoint to be used by the client. """ - if api_override is not None: api_endpoint = api_override elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = EventarcClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") api_endpoint = EventarcClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = EventarcClient.DEFAULT_ENDPOINT + api_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) return api_endpoint + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = EventarcClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + if credentials: + credentials_universe = credentials.universe_domain + if client_universe != credentials_universe: + default_universe = EventarcClient._DEFAULT_UNIVERSE + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + EventarcClient._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + @property def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, EventarcTransport]] = None, @@ -501,22 +580,32 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -534,9 +623,15 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - self._use_client_cert, self._use_mtls_endpoint = EventarcClient._read_environment_variables() + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = EventarcClient._read_environment_variables() self._client_cert_source = EventarcClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._api_endpoint = EventarcClient._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + self._universe_domain = EventarcClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: @@ -545,7 +640,8 @@ def __init__(self, *, # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, EventarcTransport): + transport_provided = isinstance(transport, EventarcTransport) + if transport_provided: # transport is a EventarcTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " @@ -555,14 +651,23 @@ def __init__(self, *, "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(EventarcTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + EventarcClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, credentials_file=self._client_options.credentials_file, @@ -665,6 +770,9 @@ def sample_get_trigger(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -769,6 +877,9 @@ def sample_list_triggers(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -915,6 +1026,9 @@ def sample_create_trigger(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1052,6 +1166,9 @@ def sample_update_trigger(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1180,6 +1297,9 @@ def sample_delete_trigger(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1295,6 +1415,9 @@ def sample_get_channel(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1399,6 +1522,9 @@ def sample_list_channels(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1545,6 +1671,9 @@ def sample_create_channel(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1674,6 +1803,9 @@ def sample_update_channel(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1794,6 +1926,9 @@ def sample_delete_channel(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1903,6 +2038,9 @@ def sample_get_provider(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2007,6 +2145,9 @@ def sample_list_providers(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2122,6 +2263,9 @@ def sample_get_channel_connection(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2227,6 +2371,9 @@ def sample_list_channel_connections(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2373,6 +2520,9 @@ def sample_create_channel_connection(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2491,6 +2641,9 @@ def sample_delete_channel_connection(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2606,6 +2759,9 @@ def sample_get_google_channel_config(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2726,6 +2882,9 @@ def sample_update_google_channel_config(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2794,6 +2953,9 @@ def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2845,6 +3007,9 @@ def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2900,6 +3065,9 @@ def delete_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2950,6 +3118,9 @@ def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -3063,6 +3234,9 @@ def set_iam_policy( (("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -3181,6 +3355,9 @@ def get_iam_policy( (("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -3237,6 +3414,9 @@ def test_iam_permissions( (("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -3288,6 +3468,9 @@ def get_location( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -3339,6 +3522,9 @@ def list_locations( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 7096c28dcc47..a225d7dff0a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -66,7 +66,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'eventarc.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -121,6 +121,10 @@ def __init__( host += ':443' self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 1b00e781069f..768321d9bb89 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -74,7 +74,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'eventarc.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index b6e2874c4cd2..e94825227abd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -118,7 +118,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'eventarc.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 301d6c5f2b97..b48f92031749 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -744,7 +744,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'eventarc.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 889f2f175325..1b06b0209d32 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -28,6 +28,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -76,13 +77,28 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" - # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + +# Anonymous Credentials with universe domain property. If no universe domain is provided, then +# the default universe domain is "googleapis.com". +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + @property + def universe_domain(self): + return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -99,14 +115,13 @@ def test__get_default_mtls_endpoint(): assert EventarcClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - - assert EventarcClient._read_environment_variables() == (False, "auto") + assert EventarcClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert EventarcClient._read_environment_variables() == (True, "auto") + assert EventarcClient._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert EventarcClient._read_environment_variables() == (False, "auto") + assert EventarcClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError) as excinfo: @@ -114,19 +129,22 @@ def test__read_environment_variables(): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert EventarcClient._read_environment_variables() == (False, "never") + assert EventarcClient._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert EventarcClient._read_environment_variables() == (False, "always") + assert EventarcClient._read_environment_variables() == (False, "always", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert EventarcClient._read_environment_variables() == (False, "auto") + assert EventarcClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: EventarcClient._read_environment_variables() assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert EventarcClient._read_environment_variables() == (False, "auto", "foo.com") + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -140,19 +158,89 @@ def test__get_client_cert_source(): assert EventarcClient._get_client_cert_source(None, True) is mock_default_cert_source assert EventarcClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +@mock.patch.object(EventarcClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcAsyncClient)) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() + default_universe = EventarcClient._DEFAULT_UNIVERSE + default_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert EventarcClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert EventarcClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == EventarcClient.DEFAULT_MTLS_ENDPOINT + assert EventarcClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert EventarcClient._get_api_endpoint(None, None, default_universe, "always") == EventarcClient.DEFAULT_MTLS_ENDPOINT + assert EventarcClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == EventarcClient.DEFAULT_MTLS_ENDPOINT + assert EventarcClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert EventarcClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + EventarcClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert EventarcClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert EventarcClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert EventarcClient._get_universe_domain(None, None) == EventarcClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + EventarcClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." - assert EventarcClient._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override - assert EventarcClient._get_api_endpoint(None, mock_client_cert_source, "auto") == EventarcClient.DEFAULT_MTLS_ENDPOINT - assert EventarcClient._get_api_endpoint(None, None, "auto") == EventarcClient.DEFAULT_ENDPOINT - assert EventarcClient._get_api_endpoint(None, None, "always") == EventarcClient.DEFAULT_MTLS_ENDPOINT - assert EventarcClient._get_api_endpoint(None, mock_client_cert_source, "always") == EventarcClient.DEFAULT_MTLS_ENDPOINT - assert EventarcClient._get_api_endpoint(None, None, "never") == EventarcClient.DEFAULT_ENDPOINT - +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (EventarcClient, transports.EventarcGrpcTransport, "grpc"), + (EventarcClient, transports.EventarcRestTransport, "rest"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=_AnonymousCredentialsWithUniverseDomain() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." @pytest.mark.parametrize("client_class,transport_name", [ (EventarcClient, "grpc"), @@ -160,7 +248,7 @@ def test__get_api_endpoint(): (EventarcClient, "rest"), ]) def test_eventarc_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -199,7 +287,7 @@ def test_eventarc_client_service_account_always_use_jwt(transport_class, transpo (EventarcClient, "rest"), ]) def test_eventarc_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -235,13 +323,13 @@ def test_eventarc_client_get_transport_class(): (EventarcAsyncClient, transports.EventarcGrpcAsyncIOTransport, "grpc_asyncio"), (EventarcClient, transports.EventarcRestTransport, "rest"), ]) -@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +@mock.patch.object(EventarcClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcAsyncClient)) def test_eventarc_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(EventarcClient, 'get_transport_class') as gtc: transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -277,7 +365,7 @@ def test_eventarc_client_client_options(client_class, transport_class, transport patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -325,7 +413,7 @@ def test_eventarc_client_client_options(client_class, transport_class, transport patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -341,7 +429,7 @@ def test_eventarc_client_client_options(client_class, transport_class, transport patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -358,8 +446,8 @@ def test_eventarc_client_client_options(client_class, transport_class, transport (EventarcClient, transports.EventarcRestTransport, "rest", "true"), (EventarcClient, transports.EventarcRestTransport, "rest", "false"), ]) -@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +@mock.patch.object(EventarcClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcAsyncClient)) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -375,7 +463,7 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -399,7 +487,7 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -428,7 +516,7 @@ def test_eventarc_client_mtls_env_auto(client_class, transport_class, transport_ patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -508,55 +596,58 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize("client_class", [ EventarcClient, EventarcAsyncClient ]) -@mock.patch.object(EventarcClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcClient)) -@mock.patch.object(EventarcAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(EventarcAsyncClient)) +@mock.patch.object(EventarcClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcClient)) +@mock.patch.object(EventarcAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(EventarcAsyncClient)) def test_eventarc_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = EventarcClient._DEFAULT_UNIVERSE + default_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = EventarcClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", - # use ClientOptions.api_endpoint as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the DEFAULT_ENDPOINT as the api endpoint. + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, - # use the DEFAULT_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + else: + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (EventarcClient, transports.EventarcGrpcTransport, "grpc"), @@ -574,7 +665,7 @@ def test_eventarc_client_client_options_scopes(client_class, transport_class, tr patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -600,7 +691,7 @@ def test_eventarc_client_client_options_credentials_file(client_class, transport patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -644,7 +735,7 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -661,8 +752,8 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() + file_creds = _AnonymousCredentialsWithUniverseDomain() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -690,7 +781,7 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport ]) def test_get_trigger(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -730,7 +821,7 @@ def test_get_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -746,7 +837,7 @@ def test_get_trigger_empty_call(): @pytest.mark.asyncio async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetTriggerRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -789,7 +880,7 @@ async def test_get_trigger_async_from_dict(): def test_get_trigger_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -821,7 +912,7 @@ def test_get_trigger_field_headers(): @pytest.mark.asyncio async def test_get_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -852,7 +943,7 @@ async def test_get_trigger_field_headers_async(): def test_get_trigger_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -878,7 +969,7 @@ def test_get_trigger_flattened(): def test_get_trigger_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -892,7 +983,7 @@ def test_get_trigger_flattened_error(): @pytest.mark.asyncio async def test_get_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -920,7 +1011,7 @@ async def test_get_trigger_flattened_async(): @pytest.mark.asyncio async def test_get_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -938,7 +1029,7 @@ async def test_get_trigger_flattened_error_async(): ]) def test_list_triggers(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -972,7 +1063,7 @@ def test_list_triggers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -988,7 +1079,7 @@ def test_list_triggers_empty_call(): @pytest.mark.asyncio async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListTriggersRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1025,7 +1116,7 @@ async def test_list_triggers_async_from_dict(): def test_list_triggers_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1057,7 +1148,7 @@ def test_list_triggers_field_headers(): @pytest.mark.asyncio async def test_list_triggers_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1088,7 +1179,7 @@ async def test_list_triggers_field_headers_async(): def test_list_triggers_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1114,7 +1205,7 @@ def test_list_triggers_flattened(): def test_list_triggers_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1128,7 +1219,7 @@ def test_list_triggers_flattened_error(): @pytest.mark.asyncio async def test_list_triggers_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1156,7 +1247,7 @@ async def test_list_triggers_flattened_async(): @pytest.mark.asyncio async def test_list_triggers_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1170,7 +1261,7 @@ async def test_list_triggers_flattened_error_async(): def test_list_triggers_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1223,7 +1314,7 @@ def test_list_triggers_pager(transport_name: str = "grpc"): for i in results) def test_list_triggers_pages(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1266,7 +1357,7 @@ def test_list_triggers_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_triggers_async_pager(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1315,7 +1406,7 @@ async def test_list_triggers_async_pager(): @pytest.mark.asyncio async def test_list_triggers_async_pages(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1366,7 +1457,7 @@ async def test_list_triggers_async_pages(): ]) def test_create_trigger(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1395,7 +1486,7 @@ def test_create_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1411,7 +1502,7 @@ def test_create_trigger_empty_call(): @pytest.mark.asyncio async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateTriggerRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1445,7 +1536,7 @@ async def test_create_trigger_async_from_dict(): def test_create_trigger_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1477,7 +1568,7 @@ def test_create_trigger_field_headers(): @pytest.mark.asyncio async def test_create_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1508,7 +1599,7 @@ async def test_create_trigger_field_headers_async(): def test_create_trigger_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1542,7 +1633,7 @@ def test_create_trigger_flattened(): def test_create_trigger_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1558,7 +1649,7 @@ def test_create_trigger_flattened_error(): @pytest.mark.asyncio async def test_create_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1596,7 +1687,7 @@ async def test_create_trigger_flattened_async(): @pytest.mark.asyncio async def test_create_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1616,7 +1707,7 @@ async def test_create_trigger_flattened_error_async(): ]) def test_update_trigger(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1645,7 +1736,7 @@ def test_update_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1661,7 +1752,7 @@ def test_update_trigger_empty_call(): @pytest.mark.asyncio async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateTriggerRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1695,7 +1786,7 @@ async def test_update_trigger_async_from_dict(): def test_update_trigger_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1727,7 +1818,7 @@ def test_update_trigger_field_headers(): @pytest.mark.asyncio async def test_update_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1758,7 +1849,7 @@ async def test_update_trigger_field_headers_async(): def test_update_trigger_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1792,7 +1883,7 @@ def test_update_trigger_flattened(): def test_update_trigger_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1808,7 +1899,7 @@ def test_update_trigger_flattened_error(): @pytest.mark.asyncio async def test_update_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1846,7 +1937,7 @@ async def test_update_trigger_flattened_async(): @pytest.mark.asyncio async def test_update_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1866,7 +1957,7 @@ async def test_update_trigger_flattened_error_async(): ]) def test_delete_trigger(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1895,7 +1986,7 @@ def test_delete_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1911,7 +2002,7 @@ def test_delete_trigger_empty_call(): @pytest.mark.asyncio async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteTriggerRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1945,7 +2036,7 @@ async def test_delete_trigger_async_from_dict(): def test_delete_trigger_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1977,7 +2068,7 @@ def test_delete_trigger_field_headers(): @pytest.mark.asyncio async def test_delete_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2008,7 +2099,7 @@ async def test_delete_trigger_field_headers_async(): def test_delete_trigger_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2038,7 +2129,7 @@ def test_delete_trigger_flattened(): def test_delete_trigger_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2053,7 +2144,7 @@ def test_delete_trigger_flattened_error(): @pytest.mark.asyncio async def test_delete_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2087,7 +2178,7 @@ async def test_delete_trigger_flattened_async(): @pytest.mark.asyncio async def test_delete_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2106,7 +2197,7 @@ async def test_delete_trigger_flattened_error_async(): ]) def test_get_channel(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2149,7 +2240,7 @@ def test_get_channel_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2165,7 +2256,7 @@ def test_get_channel_empty_call(): @pytest.mark.asyncio async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2210,7 +2301,7 @@ async def test_get_channel_async_from_dict(): def test_get_channel_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2242,7 +2333,7 @@ def test_get_channel_field_headers(): @pytest.mark.asyncio async def test_get_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2273,7 +2364,7 @@ async def test_get_channel_field_headers_async(): def test_get_channel_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2299,7 +2390,7 @@ def test_get_channel_flattened(): def test_get_channel_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2313,7 +2404,7 @@ def test_get_channel_flattened_error(): @pytest.mark.asyncio async def test_get_channel_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2341,7 +2432,7 @@ async def test_get_channel_flattened_async(): @pytest.mark.asyncio async def test_get_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2359,7 +2450,7 @@ async def test_get_channel_flattened_error_async(): ]) def test_list_channels(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2393,7 +2484,7 @@ def test_list_channels_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2409,7 +2500,7 @@ def test_list_channels_empty_call(): @pytest.mark.asyncio async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2446,7 +2537,7 @@ async def test_list_channels_async_from_dict(): def test_list_channels_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2478,7 +2569,7 @@ def test_list_channels_field_headers(): @pytest.mark.asyncio async def test_list_channels_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2509,7 +2600,7 @@ async def test_list_channels_field_headers_async(): def test_list_channels_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2535,7 +2626,7 @@ def test_list_channels_flattened(): def test_list_channels_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2549,7 +2640,7 @@ def test_list_channels_flattened_error(): @pytest.mark.asyncio async def test_list_channels_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2577,7 +2668,7 @@ async def test_list_channels_flattened_async(): @pytest.mark.asyncio async def test_list_channels_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2591,7 +2682,7 @@ async def test_list_channels_flattened_error_async(): def test_list_channels_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -2644,7 +2735,7 @@ def test_list_channels_pager(transport_name: str = "grpc"): for i in results) def test_list_channels_pages(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -2687,7 +2778,7 @@ def test_list_channels_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_channels_async_pager(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2736,7 +2827,7 @@ async def test_list_channels_async_pager(): @pytest.mark.asyncio async def test_list_channels_async_pages(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2787,7 +2878,7 @@ async def test_list_channels_async_pages(): ]) def test_create_channel(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2816,7 +2907,7 @@ def test_create_channel_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2832,7 +2923,7 @@ def test_create_channel_empty_call(): @pytest.mark.asyncio async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2866,7 +2957,7 @@ async def test_create_channel_async_from_dict(): def test_create_channel_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2898,7 +2989,7 @@ def test_create_channel_field_headers(): @pytest.mark.asyncio async def test_create_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2929,7 +3020,7 @@ async def test_create_channel_field_headers_async(): def test_create_channel_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2963,7 +3054,7 @@ def test_create_channel_flattened(): def test_create_channel_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2979,7 +3070,7 @@ def test_create_channel_flattened_error(): @pytest.mark.asyncio async def test_create_channel_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3017,7 +3108,7 @@ async def test_create_channel_flattened_async(): @pytest.mark.asyncio async def test_create_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3037,7 +3128,7 @@ async def test_create_channel_flattened_error_async(): ]) def test_update_channel(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3066,7 +3157,7 @@ def test_update_channel_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3082,7 +3173,7 @@ def test_update_channel_empty_call(): @pytest.mark.asyncio async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3116,7 +3207,7 @@ async def test_update_channel_async_from_dict(): def test_update_channel_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3148,7 +3239,7 @@ def test_update_channel_field_headers(): @pytest.mark.asyncio async def test_update_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3179,7 +3270,7 @@ async def test_update_channel_field_headers_async(): def test_update_channel_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3209,7 +3300,7 @@ def test_update_channel_flattened(): def test_update_channel_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3224,7 +3315,7 @@ def test_update_channel_flattened_error(): @pytest.mark.asyncio async def test_update_channel_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3258,7 +3349,7 @@ async def test_update_channel_flattened_async(): @pytest.mark.asyncio async def test_update_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3277,7 +3368,7 @@ async def test_update_channel_flattened_error_async(): ]) def test_delete_channel(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3306,7 +3397,7 @@ def test_delete_channel_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3322,7 +3413,7 @@ def test_delete_channel_empty_call(): @pytest.mark.asyncio async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3356,7 +3447,7 @@ async def test_delete_channel_async_from_dict(): def test_delete_channel_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3388,7 +3479,7 @@ def test_delete_channel_field_headers(): @pytest.mark.asyncio async def test_delete_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3419,7 +3510,7 @@ async def test_delete_channel_field_headers_async(): def test_delete_channel_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3445,7 +3536,7 @@ def test_delete_channel_flattened(): def test_delete_channel_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3459,7 +3550,7 @@ def test_delete_channel_flattened_error(): @pytest.mark.asyncio async def test_delete_channel_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3489,7 +3580,7 @@ async def test_delete_channel_flattened_async(): @pytest.mark.asyncio async def test_delete_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3507,7 +3598,7 @@ async def test_delete_channel_flattened_error_async(): ]) def test_get_provider(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3541,7 +3632,7 @@ def test_get_provider_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3557,7 +3648,7 @@ def test_get_provider_empty_call(): @pytest.mark.asyncio async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3594,7 +3685,7 @@ async def test_get_provider_async_from_dict(): def test_get_provider_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3626,7 +3717,7 @@ def test_get_provider_field_headers(): @pytest.mark.asyncio async def test_get_provider_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3657,7 +3748,7 @@ async def test_get_provider_field_headers_async(): def test_get_provider_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3683,7 +3774,7 @@ def test_get_provider_flattened(): def test_get_provider_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3697,7 +3788,7 @@ def test_get_provider_flattened_error(): @pytest.mark.asyncio async def test_get_provider_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3725,7 +3816,7 @@ async def test_get_provider_flattened_async(): @pytest.mark.asyncio async def test_get_provider_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3743,7 +3834,7 @@ async def test_get_provider_flattened_error_async(): ]) def test_list_providers(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3777,7 +3868,7 @@ def test_list_providers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3793,7 +3884,7 @@ def test_list_providers_empty_call(): @pytest.mark.asyncio async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3830,7 +3921,7 @@ async def test_list_providers_async_from_dict(): def test_list_providers_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3862,7 +3953,7 @@ def test_list_providers_field_headers(): @pytest.mark.asyncio async def test_list_providers_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3893,7 +3984,7 @@ async def test_list_providers_field_headers_async(): def test_list_providers_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3919,7 +4010,7 @@ def test_list_providers_flattened(): def test_list_providers_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3933,7 +4024,7 @@ def test_list_providers_flattened_error(): @pytest.mark.asyncio async def test_list_providers_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3961,7 +4052,7 @@ async def test_list_providers_flattened_async(): @pytest.mark.asyncio async def test_list_providers_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3975,7 +4066,7 @@ async def test_list_providers_flattened_error_async(): def test_list_providers_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -4028,7 +4119,7 @@ def test_list_providers_pager(transport_name: str = "grpc"): for i in results) def test_list_providers_pages(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -4071,7 +4162,7 @@ def test_list_providers_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_providers_async_pager(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4120,7 +4211,7 @@ async def test_list_providers_async_pager(): @pytest.mark.asyncio async def test_list_providers_async_pages(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4171,7 +4262,7 @@ async def test_list_providers_async_pages(): ]) def test_get_channel_connection(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4209,7 +4300,7 @@ def test_get_channel_connection_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4225,7 +4316,7 @@ def test_get_channel_connection_empty_call(): @pytest.mark.asyncio async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4266,7 +4357,7 @@ async def test_get_channel_connection_async_from_dict(): def test_get_channel_connection_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4298,7 +4389,7 @@ def test_get_channel_connection_field_headers(): @pytest.mark.asyncio async def test_get_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4329,7 +4420,7 @@ async def test_get_channel_connection_field_headers_async(): def test_get_channel_connection_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4355,7 +4446,7 @@ def test_get_channel_connection_flattened(): def test_get_channel_connection_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4369,7 +4460,7 @@ def test_get_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_get_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4397,7 +4488,7 @@ async def test_get_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_get_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4415,7 +4506,7 @@ async def test_get_channel_connection_flattened_error_async(): ]) def test_list_channel_connections(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4449,7 +4540,7 @@ def test_list_channel_connections_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4465,7 +4556,7 @@ def test_list_channel_connections_empty_call(): @pytest.mark.asyncio async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4502,7 +4593,7 @@ async def test_list_channel_connections_async_from_dict(): def test_list_channel_connections_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4534,7 +4625,7 @@ def test_list_channel_connections_field_headers(): @pytest.mark.asyncio async def test_list_channel_connections_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4565,7 +4656,7 @@ async def test_list_channel_connections_field_headers_async(): def test_list_channel_connections_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4591,7 +4682,7 @@ def test_list_channel_connections_flattened(): def test_list_channel_connections_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4605,7 +4696,7 @@ def test_list_channel_connections_flattened_error(): @pytest.mark.asyncio async def test_list_channel_connections_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4633,7 +4724,7 @@ async def test_list_channel_connections_flattened_async(): @pytest.mark.asyncio async def test_list_channel_connections_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4647,7 +4738,7 @@ async def test_list_channel_connections_flattened_error_async(): def test_list_channel_connections_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -4700,7 +4791,7 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): for i in results) def test_list_channel_connections_pages(transport_name: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -4743,7 +4834,7 @@ def test_list_channel_connections_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_channel_connections_async_pager(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4792,7 +4883,7 @@ async def test_list_channel_connections_async_pager(): @pytest.mark.asyncio async def test_list_channel_connections_async_pages(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4843,7 +4934,7 @@ async def test_list_channel_connections_async_pages(): ]) def test_create_channel_connection(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4872,7 +4963,7 @@ def test_create_channel_connection_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4888,7 +4979,7 @@ def test_create_channel_connection_empty_call(): @pytest.mark.asyncio async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4922,7 +5013,7 @@ async def test_create_channel_connection_async_from_dict(): def test_create_channel_connection_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4954,7 +5045,7 @@ def test_create_channel_connection_field_headers(): @pytest.mark.asyncio async def test_create_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4985,7 +5076,7 @@ async def test_create_channel_connection_field_headers_async(): def test_create_channel_connection_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5019,7 +5110,7 @@ def test_create_channel_connection_flattened(): def test_create_channel_connection_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5035,7 +5126,7 @@ def test_create_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_create_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5073,7 +5164,7 @@ async def test_create_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_create_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5093,7 +5184,7 @@ async def test_create_channel_connection_flattened_error_async(): ]) def test_delete_channel_connection(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5122,7 +5213,7 @@ def test_delete_channel_connection_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5138,7 +5229,7 @@ def test_delete_channel_connection_empty_call(): @pytest.mark.asyncio async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5172,7 +5263,7 @@ async def test_delete_channel_connection_async_from_dict(): def test_delete_channel_connection_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5204,7 +5295,7 @@ def test_delete_channel_connection_field_headers(): @pytest.mark.asyncio async def test_delete_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5235,7 +5326,7 @@ async def test_delete_channel_connection_field_headers_async(): def test_delete_channel_connection_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5261,7 +5352,7 @@ def test_delete_channel_connection_flattened(): def test_delete_channel_connection_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5275,7 +5366,7 @@ def test_delete_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_delete_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5305,7 +5396,7 @@ async def test_delete_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_delete_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5323,7 +5414,7 @@ async def test_delete_channel_connection_flattened_error_async(): ]) def test_get_google_channel_config(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5357,7 +5448,7 @@ def test_get_google_channel_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5373,7 +5464,7 @@ def test_get_google_channel_config_empty_call(): @pytest.mark.asyncio async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5410,7 +5501,7 @@ async def test_get_google_channel_config_async_from_dict(): def test_get_google_channel_config_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5442,7 +5533,7 @@ def test_get_google_channel_config_field_headers(): @pytest.mark.asyncio async def test_get_google_channel_config_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5473,7 +5564,7 @@ async def test_get_google_channel_config_field_headers_async(): def test_get_google_channel_config_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5499,7 +5590,7 @@ def test_get_google_channel_config_flattened(): def test_get_google_channel_config_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5513,7 +5604,7 @@ def test_get_google_channel_config_flattened_error(): @pytest.mark.asyncio async def test_get_google_channel_config_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5541,7 +5632,7 @@ async def test_get_google_channel_config_flattened_async(): @pytest.mark.asyncio async def test_get_google_channel_config_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5559,7 +5650,7 @@ async def test_get_google_channel_config_flattened_error_async(): ]) def test_update_google_channel_config(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5593,7 +5684,7 @@ def test_update_google_channel_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5609,7 +5700,7 @@ def test_update_google_channel_config_empty_call(): @pytest.mark.asyncio async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5646,7 +5737,7 @@ async def test_update_google_channel_config_async_from_dict(): def test_update_google_channel_config_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5678,7 +5769,7 @@ def test_update_google_channel_config_field_headers(): @pytest.mark.asyncio async def test_update_google_channel_config_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5709,7 +5800,7 @@ async def test_update_google_channel_config_field_headers_async(): def test_update_google_channel_config_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5739,7 +5830,7 @@ def test_update_google_channel_config_flattened(): def test_update_google_channel_config_flattened_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5754,7 +5845,7 @@ def test_update_google_channel_config_flattened_error(): @pytest.mark.asyncio async def test_update_google_channel_config_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5786,7 +5877,7 @@ async def test_update_google_channel_config_flattened_async(): @pytest.mark.asyncio async def test_update_google_channel_config_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5805,7 +5896,7 @@ async def test_update_google_channel_config_flattened_error_async(): ]) def test_get_trigger_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5859,14 +5950,14 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5874,7 +5965,7 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -5916,7 +6007,7 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques def test_get_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_trigger._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -5925,7 +6016,7 @@ def test_get_trigger_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -5964,7 +6055,7 @@ def test_get_trigger_rest_interceptors(null_interceptor): def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5984,7 +6075,7 @@ def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=even def test_get_trigger_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6022,7 +6113,7 @@ def test_get_trigger_rest_flattened(): def test_get_trigger_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6037,7 +6128,7 @@ def test_get_trigger_rest_flattened_error(transport: str = 'rest'): def test_get_trigger_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -6048,7 +6139,7 @@ def test_get_trigger_rest_error(): ]) def test_list_triggers_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6096,14 +6187,14 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_triggers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_triggers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -6113,7 +6204,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -6155,7 +6246,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe def test_list_triggers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.list_triggers._get_unset_required_fields({}) assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) @@ -6164,7 +6255,7 @@ def test_list_triggers_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_triggers_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -6203,7 +6294,7 @@ def test_list_triggers_rest_interceptors(null_interceptor): def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6223,7 +6314,7 @@ def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=ev def test_list_triggers_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6261,7 +6352,7 @@ def test_list_triggers_rest_flattened(): def test_list_triggers_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6276,7 +6367,7 @@ def test_list_triggers_rest_flattened_error(transport: str = 'rest'): def test_list_triggers_rest_pager(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6342,7 +6433,7 @@ def test_list_triggers_rest_pager(transport: str = 'rest'): ]) def test_create_trigger_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6451,7 +6542,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger assert "triggerId" not in jsonified_request assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6464,7 +6555,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger jsonified_request["triggerId"] = 'trigger_id_value' jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("trigger_id", "validate_only", )) jsonified_request.update(unset_fields) @@ -6478,7 +6569,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -6526,7 +6617,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger def test_create_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.create_trigger._get_unset_required_fields({}) assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) @@ -6535,7 +6626,7 @@ def test_create_trigger_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -6575,7 +6666,7 @@ def test_create_trigger_rest_interceptors(null_interceptor): def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6595,7 +6686,7 @@ def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=e def test_create_trigger_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6633,7 +6724,7 @@ def test_create_trigger_rest_flattened(): def test_create_trigger_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6650,7 +6741,7 @@ def test_create_trigger_rest_flattened_error(transport: str = 'rest'): def test_create_trigger_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -6661,7 +6752,7 @@ def test_create_trigger_rest_error(): ]) def test_update_trigger_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6767,7 +6858,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6776,7 +6867,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) jsonified_request.update(unset_fields) @@ -6786,7 +6877,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -6830,7 +6921,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger def test_update_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.update_trigger._get_unset_required_fields({}) assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) @@ -6839,7 +6930,7 @@ def test_update_trigger_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -6879,7 +6970,7 @@ def test_update_trigger_rest_interceptors(null_interceptor): def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6899,7 +6990,7 @@ def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=e def test_update_trigger_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6937,7 +7028,7 @@ def test_update_trigger_rest_flattened(): def test_update_trigger_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6954,7 +7045,7 @@ def test_update_trigger_rest_flattened_error(transport: str = 'rest'): def test_update_trigger_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -6965,7 +7056,7 @@ def test_update_trigger_rest_error(): ]) def test_delete_trigger_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7008,7 +7099,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7018,7 +7109,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger jsonified_request["name"] = 'name_value' jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) jsonified_request.update(unset_fields) @@ -7030,7 +7121,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7073,7 +7164,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger def test_delete_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.delete_trigger._get_unset_required_fields({}) assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) @@ -7082,7 +7173,7 @@ def test_delete_trigger_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -7122,7 +7213,7 @@ def test_delete_trigger_rest_interceptors(null_interceptor): def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7142,7 +7233,7 @@ def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=e def test_delete_trigger_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7179,7 +7270,7 @@ def test_delete_trigger_rest_flattened(): def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7195,7 +7286,7 @@ def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): def test_delete_trigger_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -7206,7 +7297,7 @@ def test_delete_trigger_rest_error(): ]) def test_get_channel_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7263,14 +7354,14 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7278,7 +7369,7 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7320,7 +7411,7 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques def test_get_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_channel._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -7329,7 +7420,7 @@ def test_get_channel_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -7368,7 +7459,7 @@ def test_get_channel_rest_interceptors(null_interceptor): def test_get_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7388,7 +7479,7 @@ def test_get_channel_rest_bad_request(transport: str = 'rest', request_type=even def test_get_channel_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7426,7 +7517,7 @@ def test_get_channel_rest_flattened(): def test_get_channel_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7441,7 +7532,7 @@ def test_get_channel_rest_flattened_error(transport: str = 'rest'): def test_get_channel_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -7452,7 +7543,7 @@ def test_get_channel_rest_error(): ]) def test_list_channels_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7500,14 +7591,14 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_channels._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_channels._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -7517,7 +7608,7 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7559,7 +7650,7 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe def test_list_channels_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.list_channels._get_unset_required_fields({}) assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) @@ -7568,7 +7659,7 @@ def test_list_channels_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_channels_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -7607,7 +7698,7 @@ def test_list_channels_rest_interceptors(null_interceptor): def test_list_channels_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelsRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7627,7 +7718,7 @@ def test_list_channels_rest_bad_request(transport: str = 'rest', request_type=ev def test_list_channels_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7665,7 +7756,7 @@ def test_list_channels_rest_flattened(): def test_list_channels_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7680,7 +7771,7 @@ def test_list_channels_rest_flattened_error(transport: str = 'rest'): def test_list_channels_rest_pager(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7746,7 +7837,7 @@ def test_list_channels_rest_pager(transport: str = 'rest'): ]) def test_create_channel_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -7855,7 +7946,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel assert "channelId" not in jsonified_request assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_channel_._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7868,7 +7959,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel jsonified_request["channelId"] = 'channel_id_value' jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_channel_._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("channel_id", "validate_only", )) jsonified_request.update(unset_fields) @@ -7882,7 +7973,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -7930,7 +8021,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel def test_create_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.create_channel_._get_unset_required_fields({}) assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", "validateOnly", ))) @@ -7939,7 +8030,7 @@ def test_create_channel_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -7979,7 +8070,7 @@ def test_create_channel_rest_interceptors(null_interceptor): def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7999,7 +8090,7 @@ def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=e def test_create_channel_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8037,7 +8128,7 @@ def test_create_channel_rest_flattened(): def test_create_channel_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8054,7 +8145,7 @@ def test_create_channel_rest_flattened_error(transport: str = 'rest'): def test_create_channel_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -8065,7 +8156,7 @@ def test_create_channel_rest_error(): ]) def test_update_channel_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8171,7 +8262,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8180,7 +8271,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_channel._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask", "validate_only", )) jsonified_request.update(unset_fields) @@ -8190,7 +8281,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -8234,7 +8325,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel def test_update_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.update_channel._get_unset_required_fields({}) assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("validateOnly", ))) @@ -8243,7 +8334,7 @@ def test_update_channel_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -8283,7 +8374,7 @@ def test_update_channel_rest_interceptors(null_interceptor): def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateChannelRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8303,7 +8394,7 @@ def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=e def test_update_channel_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8340,7 +8431,7 @@ def test_update_channel_rest_flattened(): def test_update_channel_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8356,7 +8447,7 @@ def test_update_channel_rest_flattened_error(transport: str = 'rest'): def test_update_channel_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -8367,7 +8458,7 @@ def test_update_channel_rest_error(): ]) def test_delete_channel_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8410,7 +8501,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8420,7 +8511,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel jsonified_request["name"] = 'name_value' jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_channel._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("validate_only", )) jsonified_request.update(unset_fields) @@ -8432,7 +8523,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -8475,7 +8566,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel def test_delete_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.delete_channel._get_unset_required_fields({}) assert set(unset_fields) == (set(("validateOnly", )) & set(("name", "validateOnly", ))) @@ -8484,7 +8575,7 @@ def test_delete_channel_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -8524,7 +8615,7 @@ def test_delete_channel_rest_interceptors(null_interceptor): def test_delete_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8544,7 +8635,7 @@ def test_delete_channel_rest_bad_request(transport: str = 'rest', request_type=e def test_delete_channel_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8580,7 +8671,7 @@ def test_delete_channel_rest_flattened(): def test_delete_channel_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8595,7 +8686,7 @@ def test_delete_channel_rest_flattened_error(transport: str = 'rest'): def test_delete_channel_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -8606,7 +8697,7 @@ def test_delete_channel_rest_error(): ]) def test_get_provider_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8654,14 +8745,14 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_provider._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_provider._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8669,7 +8760,7 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -8711,7 +8802,7 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ def test_get_provider_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_provider._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -8720,7 +8811,7 @@ def test_get_provider_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_provider_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -8759,7 +8850,7 @@ def test_get_provider_rest_interceptors(null_interceptor): def test_get_provider_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetProviderRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8779,7 +8870,7 @@ def test_get_provider_rest_bad_request(transport: str = 'rest', request_type=eve def test_get_provider_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8817,7 +8908,7 @@ def test_get_provider_rest_flattened(): def test_get_provider_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8832,7 +8923,7 @@ def test_get_provider_rest_flattened_error(transport: str = 'rest'): def test_get_provider_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -8843,7 +8934,7 @@ def test_get_provider_rest_error(): ]) def test_list_providers_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -8891,14 +8982,14 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_providers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_providers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -8908,7 +8999,7 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -8950,7 +9041,7 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders def test_list_providers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.list_providers._get_unset_required_fields({}) assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) @@ -8959,7 +9050,7 @@ def test_list_providers_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_providers_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -8998,7 +9089,7 @@ def test_list_providers_rest_interceptors(null_interceptor): def test_list_providers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListProvidersRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9018,7 +9109,7 @@ def test_list_providers_rest_bad_request(transport: str = 'rest', request_type=e def test_list_providers_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9056,7 +9147,7 @@ def test_list_providers_rest_flattened(): def test_list_providers_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9071,7 +9162,7 @@ def test_list_providers_rest_flattened_error(transport: str = 'rest'): def test_list_providers_rest_pager(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9137,7 +9228,7 @@ def test_list_providers_rest_pager(transport: str = 'rest'): ]) def test_get_channel_connection_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9189,14 +9280,14 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -9204,7 +9295,7 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -9246,7 +9337,7 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh def test_get_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_channel_connection._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -9255,7 +9346,7 @@ def test_get_channel_connection_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -9294,7 +9385,7 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): def test_get_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelConnectionRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9314,7 +9405,7 @@ def test_get_channel_connection_rest_bad_request(transport: str = 'rest', reques def test_get_channel_connection_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9352,7 +9443,7 @@ def test_get_channel_connection_rest_flattened(): def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9367,7 +9458,7 @@ def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): def test_get_channel_connection_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -9378,7 +9469,7 @@ def test_get_channel_connection_rest_error(): ]) def test_list_channel_connections_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9426,14 +9517,14 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_channel_connections._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_channel_connections._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -9443,7 +9534,7 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -9485,7 +9576,7 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis def test_list_channel_connections_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.list_channel_connections._get_unset_required_fields({}) assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) @@ -9494,7 +9585,7 @@ def test_list_channel_connections_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_channel_connections_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -9533,7 +9624,7 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): def test_list_channel_connections_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelConnectionsRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9553,7 +9644,7 @@ def test_list_channel_connections_rest_bad_request(transport: str = 'rest', requ def test_list_channel_connections_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9591,7 +9682,7 @@ def test_list_channel_connections_rest_flattened(): def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9606,7 +9697,7 @@ def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): def test_list_channel_connections_rest_pager(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9672,7 +9763,7 @@ def test_list_channel_connections_rest_pager(transport: str = 'rest'): ]) def test_create_channel_connection_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9779,7 +9870,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr # verify fields with default values are dropped assert "channelConnectionId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9789,7 +9880,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr jsonified_request["parent"] = 'parent_value' jsonified_request["channelConnectionId"] = 'channel_connection_id_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_channel_connection._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("channel_connection_id", )) jsonified_request.update(unset_fields) @@ -9801,7 +9892,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr assert jsonified_request["channelConnectionId"] == 'channel_connection_id_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -9845,7 +9936,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr def test_create_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.create_channel_connection._get_unset_required_fields({}) assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) @@ -9854,7 +9945,7 @@ def test_create_channel_connection_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -9894,7 +9985,7 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): def test_create_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelConnectionRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9914,7 +10005,7 @@ def test_create_channel_connection_rest_bad_request(transport: str = 'rest', req def test_create_channel_connection_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -9952,7 +10043,7 @@ def test_create_channel_connection_rest_flattened(): def test_create_channel_connection_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -9969,7 +10060,7 @@ def test_create_channel_connection_rest_flattened_error(transport: str = 'rest') def test_create_channel_connection_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -9980,7 +10071,7 @@ def test_create_channel_connection_rest_error(): ]) def test_delete_channel_connection_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10021,14 +10112,14 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10036,7 +10127,7 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -10075,7 +10166,7 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De def test_delete_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.delete_channel_connection._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -10084,7 +10175,7 @@ def test_delete_channel_connection_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -10124,7 +10215,7 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): def test_delete_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelConnectionRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10144,7 +10235,7 @@ def test_delete_channel_connection_rest_bad_request(transport: str = 'rest', req def test_delete_channel_connection_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10180,7 +10271,7 @@ def test_delete_channel_connection_rest_flattened(): def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10195,7 +10286,7 @@ def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest') def test_delete_channel_connection_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -10206,7 +10297,7 @@ def test_delete_channel_connection_rest_error(): ]) def test_get_google_channel_config_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10254,14 +10345,14 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10269,7 +10360,7 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -10311,7 +10402,7 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge def test_get_google_channel_config_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_google_channel_config._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -10320,7 +10411,7 @@ def test_get_google_channel_config_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_google_channel_config_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -10359,7 +10450,7 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): def test_get_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetGoogleChannelConfigRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10379,7 +10470,7 @@ def test_get_google_channel_config_rest_bad_request(transport: str = 'rest', req def test_get_google_channel_config_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10417,7 +10508,7 @@ def test_get_google_channel_config_rest_flattened(): def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10432,7 +10523,7 @@ def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest') def test_get_google_channel_config_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -10443,7 +10534,7 @@ def test_get_google_channel_config_rest_error(): ]) def test_update_google_channel_config_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10554,12 +10645,12 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_google_channel_config._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) @@ -10567,7 +10658,7 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc # verify required fields with non-default values are left alone client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -10610,7 +10701,7 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc def test_update_google_channel_config_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.update_google_channel_config._get_unset_required_fields({}) assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) @@ -10619,7 +10710,7 @@ def test_update_google_channel_config_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_google_channel_config_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -10658,7 +10749,7 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10678,7 +10769,7 @@ def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', def test_update_google_channel_config_rest_flattened(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -10717,7 +10808,7 @@ def test_update_google_channel_config_rest_flattened(): def test_update_google_channel_config_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -10733,7 +10824,7 @@ def test_update_google_channel_config_rest_flattened_error(transport: str = 'res def test_update_google_channel_config_rest_error(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -10741,17 +10832,17 @@ def test_update_google_channel_config_rest_error(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = EventarcClient( @@ -10761,7 +10852,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -10772,17 +10863,17 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = EventarcClient( client_options=options, - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # It is an error to provide scopes and a transport instance. transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = EventarcClient( @@ -10794,7 +10885,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) client = EventarcClient(transport=transport) assert client.transport is transport @@ -10802,13 +10893,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.EventarcGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel transport = transports.EventarcGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel @@ -10821,7 +10912,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class() adc.assert_called_once() @@ -10831,14 +10922,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = EventarcClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert isinstance( client.transport, @@ -10849,7 +10940,7 @@ def test_eventarc_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.EventarcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), credentials_file="credentials.json" ) @@ -10859,7 +10950,7 @@ def test_eventarc_base_transport(): with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: Transport.return_value = None transport = transports.EventarcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Every method on the transport should just blindly @@ -10918,7 +11009,7 @@ def test_eventarc_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.EventarcTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -10936,7 +11027,7 @@ def test_eventarc_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.EventarcTransport() adc.assert_called_once() @@ -10944,7 +11035,7 @@ def test_eventarc_base_transport_with_adc(): def test_eventarc_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) EventarcClient() adc.assert_called_once_with( scopes=None, @@ -10966,7 +11057,7 @@ def test_eventarc_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -11011,7 +11102,7 @@ def test_eventarc_transport_create_channel(transport_class, grpc_helpers): with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -11040,7 +11131,7 @@ def test_eventarc_transport_create_channel(transport_class, grpc_helpers): def test_eventarc_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -11078,7 +11169,7 @@ def test_eventarc_grpc_transport_client_cert_source_for_mtls( ) def test_eventarc_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.EventarcRestTransport ( credentials=cred, @@ -11089,7 +11180,7 @@ def test_eventarc_http_transport_client_cert_source_for_mtls(): def test_eventarc_rest_lro_client(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) transport = client.transport @@ -11111,7 +11202,7 @@ def test_eventarc_rest_lro_client(): ]) def test_eventarc_host_no_port(transport_name): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), transport=transport_name, ) @@ -11128,7 +11219,7 @@ def test_eventarc_host_no_port(transport_name): ]) def test_eventarc_host_with_port(transport_name): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), transport=transport_name, ) @@ -11142,8 +11233,8 @@ def test_eventarc_host_with_port(transport_name): "rest", ]) def test_eventarc_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() + creds1 = _AnonymousCredentialsWithUniverseDomain() + creds2 = _AnonymousCredentialsWithUniverseDomain() client1 = EventarcClient( credentials=creds1, transport=transport_name, @@ -11246,7 +11337,7 @@ def test_eventarc_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -11318,7 +11409,7 @@ def test_eventarc_transport_channel_mtls_with_adc( def test_eventarc_grpc_lro_client(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) transport = client.transport @@ -11335,7 +11426,7 @@ def test_eventarc_grpc_lro_client(): def test_eventarc_grpc_lro_async_client(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc_asyncio', ) transport = client.transport @@ -11645,7 +11736,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -11653,7 +11744,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: transport_class = EventarcClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -11661,7 +11752,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -11672,7 +11763,7 @@ async def test_transport_close_async(): def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11694,7 +11785,7 @@ def test_get_location_rest_bad_request(transport: str = 'rest', request_type=loc ]) def test_get_location_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2'} @@ -11719,7 +11810,7 @@ def test_get_location_rest(request_type): def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11741,7 +11832,7 @@ def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=l ]) def test_list_locations_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1'} @@ -11766,7 +11857,7 @@ def test_list_locations_rest(request_type): def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11788,7 +11879,7 @@ def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=i ]) def test_get_iam_policy_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} @@ -11813,7 +11904,7 @@ def test_get_iam_policy_rest(request_type): def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11835,7 +11926,7 @@ def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=i ]) def test_set_iam_policy_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} @@ -11860,7 +11951,7 @@ def test_set_iam_policy_rest(request_type): def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11882,7 +11973,7 @@ def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_ ]) def test_test_iam_permissions_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} @@ -11907,7 +11998,7 @@ def test_test_iam_permissions_rest(request_type): def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11929,7 +12020,7 @@ def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type ]) def test_cancel_operation_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -11954,7 +12045,7 @@ def test_cancel_operation_rest(request_type): def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -11976,7 +12067,7 @@ def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type ]) def test_delete_operation_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -12001,7 +12092,7 @@ def test_delete_operation_rest(request_type): def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -12023,7 +12114,7 @@ def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=op ]) def test_get_operation_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -12048,7 +12139,7 @@ def test_get_operation_rest(request_type): def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -12070,7 +12161,7 @@ def test_list_operations_rest_bad_request(transport: str = 'rest', request_type= ]) def test_list_operations_rest(request_type): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2'} @@ -12096,7 +12187,7 @@ def test_list_operations_rest(request_type): def test_delete_operation(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12118,7 +12209,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12142,7 +12233,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): def test_delete_operation_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12166,7 +12257,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12191,7 +12282,7 @@ async def test_delete_operation_field_headers_async(): def test_delete_operation_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -12207,7 +12298,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -12225,7 +12316,7 @@ async def test_delete_operation_from_dict_async(): def test_cancel_operation(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12247,7 +12338,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12271,7 +12362,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12295,7 +12386,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12320,7 +12411,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -12336,7 +12427,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -12354,7 +12445,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12376,7 +12467,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12400,7 +12491,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12424,7 +12515,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12449,7 +12540,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -12465,7 +12556,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -12483,7 +12574,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12505,7 +12596,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12529,7 +12620,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12553,7 +12644,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12578,7 +12669,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -12594,7 +12685,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -12612,7 +12703,7 @@ async def test_list_operations_from_dict_async(): def test_list_locations(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12634,7 +12725,7 @@ def test_list_locations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12658,7 +12749,7 @@ async def test_list_locations_async(transport: str = "grpc_asyncio"): def test_list_locations_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12682,7 +12773,7 @@ def test_list_locations_field_headers(): @pytest.mark.asyncio async def test_list_locations_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12707,7 +12798,7 @@ async def test_list_locations_field_headers_async(): def test_list_locations_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -12723,7 +12814,7 @@ def test_list_locations_from_dict(): @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -12741,7 +12832,7 @@ async def test_list_locations_from_dict_async(): def test_get_location(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12763,7 +12854,7 @@ def test_get_location(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12787,7 +12878,7 @@ async def test_get_location_async(transport: str = "grpc_asyncio"): def test_get_location_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials()) + credentials=_AnonymousCredentialsWithUniverseDomain()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -12810,7 +12901,7 @@ def test_get_location_field_headers(): @pytest.mark.asyncio async def test_get_location_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12835,7 +12926,7 @@ async def test_get_location_field_headers_async(): def test_get_location_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -12851,7 +12942,7 @@ def test_get_location_from_dict(): @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -12869,7 +12960,7 @@ async def test_get_location_from_dict_async(): def test_set_iam_policy(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12896,7 +12987,7 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12926,7 +13017,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12951,7 +13042,7 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12976,7 +13067,7 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -12995,7 +13086,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -13014,7 +13105,7 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13045,7 +13136,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13079,7 +13170,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13106,7 +13197,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13134,7 +13225,7 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -13152,7 +13243,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -13171,7 +13262,7 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13204,7 +13295,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13236,7 +13327,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13265,7 +13356,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13295,7 +13386,7 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -13315,7 +13406,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -13342,7 +13433,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -13357,7 +13448,7 @@ def test_client_ctx(): ] for transport in transports: client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) # Test client calls underlying transport. @@ -13385,7 +13476,7 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index fbeb65b8a85b..f13822bd2e6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -50,8 +50,12 @@ class ConfigServiceV2AsyncClient: _client: ConfigServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = ConfigServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ConfigServiceV2Client._DEFAULT_UNIVERSE cmek_settings_path = staticmethod(ConfigServiceV2Client.cmek_settings_path) parse_cmek_settings_path = staticmethod(ConfigServiceV2Client.parse_cmek_settings_path) @@ -159,11 +163,20 @@ def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._client._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial(type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client)) def __init__(self, *, @@ -172,7 +185,7 @@ def __init__(self, *, client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the config service v2 client. + """Instantiates the config service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -183,23 +196,38 @@ def __init__(self, *, transport (Union[str, ~.ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -316,6 +344,9 @@ async def sample_list_buckets(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -405,6 +436,9 @@ async def sample_get_bucket(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -496,6 +530,9 @@ async def sample_create_bucket_async(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -597,6 +634,9 @@ async def sample_update_bucket_async(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -688,6 +728,9 @@ async def sample_create_bucket(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -774,6 +817,9 @@ async def sample_update_bucket(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -850,6 +896,9 @@ async def sample_delete_bucket(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -920,6 +969,9 @@ async def sample_undelete_bucket(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1024,6 +1076,9 @@ async def sample_list_views(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1113,6 +1168,9 @@ async def sample_get_view(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1195,6 +1253,9 @@ async def sample_create_view(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1279,6 +1340,9 @@ async def sample_update_view(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1353,6 +1417,9 @@ async def sample_delete_view(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1468,6 +1535,9 @@ async def sample_list_sinks(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1602,6 +1672,9 @@ async def sample_get_sink(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1738,6 +1811,9 @@ async def sample_create_sink(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1909,6 +1985,9 @@ async def sample_update_sink(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2020,6 +2099,9 @@ async def sample_delete_sink(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -2154,6 +2236,9 @@ async def sample_create_link(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2281,6 +2366,9 @@ async def sample_delete_link(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2398,6 +2486,9 @@ async def sample_list_links(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2511,6 +2602,9 @@ async def sample_get_link(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2630,6 +2724,9 @@ async def sample_list_exclusions(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2762,6 +2859,9 @@ async def sample_get_exclusion(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2897,6 +2997,9 @@ async def sample_create_exclusion(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3046,6 +3149,9 @@ async def sample_update_exclusion(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3156,6 +3262,9 @@ async def sample_delete_exclusion(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -3257,6 +3366,9 @@ async def sample_get_cmek_settings(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3366,6 +3478,9 @@ async def sample_update_cmek_settings(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3499,6 +3614,9 @@ async def sample_get_settings(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3642,6 +3760,9 @@ async def sample_update_settings(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3724,6 +3845,9 @@ async def sample_copy_log_entries(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -3787,6 +3911,9 @@ async def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -3838,6 +3965,9 @@ async def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -3892,6 +4022,9 @@ async def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index cc5b7e0e028d..df1aa1434a36 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -114,11 +114,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -366,8 +370,8 @@ def _read_environment_variables(): """Returns the environment variables used by the client. Returns: - Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE - and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. Raises: ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not @@ -377,11 +381,12 @@ def _read_environment_variables(): """ use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -401,37 +406,111 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source - def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. Args: - api_override (str): The API endpoint override. If specified, this is always the return value of this function. + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. client_cert_source (bytes): The client certificate source used by the client. - use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. Possible values are "always", "auto", or "never". Returns: str: The API endpoint to be used by the client. """ - if api_override is not None: api_endpoint = api_override elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") api_endpoint = ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = ConfigServiceV2Client.DEFAULT_ENDPOINT + api_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) return api_endpoint + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ConfigServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + if credentials: + credentials_universe = credentials.universe_domain + if client_universe != credentials_universe: + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + ConfigServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + @property def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, ConfigServiceV2Transport]] = None, @@ -449,22 +528,32 @@ def __init__(self, *, transport (Union[str, ConfigServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -482,9 +571,15 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - self._use_client_cert, self._use_mtls_endpoint = ConfigServiceV2Client._read_environment_variables() + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ConfigServiceV2Client._read_environment_variables() self._client_cert_source = ConfigServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._api_endpoint = ConfigServiceV2Client._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + self._universe_domain = ConfigServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: @@ -493,7 +588,8 @@ def __init__(self, *, # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, ConfigServiceV2Transport): + transport_provided = isinstance(transport, ConfigServiceV2Transport) + if transport_provided: # transport is a ConfigServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " @@ -503,14 +599,23 @@ def __init__(self, *, "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(ConfigServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + ConfigServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, credentials_file=self._client_options.credentials_file, @@ -627,6 +732,9 @@ def sample_list_buckets(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -717,6 +825,9 @@ def sample_get_bucket(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -809,6 +920,9 @@ def sample_create_bucket_async(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -911,6 +1025,9 @@ def sample_update_bucket_async(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1003,6 +1120,9 @@ def sample_create_bucket(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1090,6 +1210,9 @@ def sample_update_bucket(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1167,6 +1290,9 @@ def sample_delete_bucket(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1238,6 +1364,9 @@ def sample_undelete_bucket(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1342,6 +1471,9 @@ def sample_list_views(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1432,6 +1564,9 @@ def sample_get_view(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1515,6 +1650,9 @@ def sample_create_view(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1600,6 +1738,9 @@ def sample_update_view(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1675,6 +1816,9 @@ def sample_delete_view(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1782,6 +1926,9 @@ def sample_list_sinks(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1908,6 +2055,9 @@ def sample_get_sink(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2044,6 +2194,9 @@ def sample_create_sink(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2207,6 +2360,9 @@ def sample_update_sink(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2310,6 +2466,9 @@ def sample_delete_sink(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -2444,6 +2603,9 @@ def sample_create_link(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2571,6 +2733,9 @@ def sample_delete_link(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2688,6 +2853,9 @@ def sample_list_links(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2801,6 +2969,9 @@ def sample_get_link(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2912,6 +3083,9 @@ def sample_list_exclusions(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3036,6 +3210,9 @@ def sample_get_exclusion(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3171,6 +3348,9 @@ def sample_create_exclusion(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3320,6 +3500,9 @@ def sample_update_exclusion(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3422,6 +3605,9 @@ def sample_delete_exclusion(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -3524,6 +3710,9 @@ def sample_get_cmek_settings(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3634,6 +3823,9 @@ def sample_update_cmek_settings(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3767,6 +3959,9 @@ def sample_get_settings(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3910,6 +4105,9 @@ def sample_update_settings(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -3993,6 +4191,9 @@ def sample_copy_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -4069,6 +4270,9 @@ def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -4120,6 +4324,9 @@ def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -4174,6 +4381,9 @@ def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 3b1f9f96fcfd..f25c29123807 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -61,7 +61,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -116,6 +116,10 @@ def __init__( host += ':443' self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index fc7344593060..7251763c30bd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -64,7 +64,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index e68ff981640c..b618ba9a8e99 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -108,7 +108,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index ebb83ee03e19..113699c5055f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -47,8 +47,12 @@ class LoggingServiceV2AsyncClient: _client: LoggingServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = LoggingServiceV2Client._DEFAULT_UNIVERSE log_path = staticmethod(LoggingServiceV2Client.log_path) parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) @@ -144,11 +148,20 @@ def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._client._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial(type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client)) def __init__(self, *, @@ -157,7 +170,7 @@ def __init__(self, *, client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the logging service v2 client. + """Instantiates the logging service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -168,23 +181,38 @@ def __init__(self, *, transport (Union[str, ~.LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -300,6 +328,9 @@ async def sample_delete_log(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -493,6 +524,9 @@ async def sample_write_log_entries(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -645,6 +679,9 @@ async def sample_list_log_entries(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -739,6 +776,9 @@ async def sample_list_monitored_resource_descriptors(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -866,6 +906,9 @@ async def sample_list_logs(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -964,6 +1007,9 @@ def request_generator(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = rpc( requests, @@ -1019,6 +1065,9 @@ async def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1070,6 +1119,9 @@ async def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1124,6 +1176,9 @@ async def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 0b1dcb34e77b..77699f5aa5d1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -111,11 +111,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -297,8 +301,8 @@ def _read_environment_variables(): """Returns the environment variables used by the client. Returns: - Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE - and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. Raises: ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not @@ -308,11 +312,12 @@ def _read_environment_variables(): """ use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -332,37 +337,111 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source - def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. Args: - api_override (str): The API endpoint override. If specified, this is always the return value of this function. + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. client_cert_source (bytes): The client certificate source used by the client. - use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. Possible values are "always", "auto", or "never". Returns: str: The API endpoint to be used by the client. """ - if api_override is not None: api_endpoint = api_override elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") api_endpoint = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = LoggingServiceV2Client.DEFAULT_ENDPOINT + api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) return api_endpoint + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = LoggingServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + if credentials: + credentials_universe = credentials.universe_domain + if client_universe != credentials_universe: + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + LoggingServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + @property def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, LoggingServiceV2Transport]] = None, @@ -380,22 +459,32 @@ def __init__(self, *, transport (Union[str, LoggingServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -413,9 +502,15 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - self._use_client_cert, self._use_mtls_endpoint = LoggingServiceV2Client._read_environment_variables() + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = LoggingServiceV2Client._read_environment_variables() self._client_cert_source = LoggingServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._api_endpoint = LoggingServiceV2Client._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + self._universe_domain = LoggingServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: @@ -424,7 +519,8 @@ def __init__(self, *, # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, LoggingServiceV2Transport): + transport_provided = isinstance(transport, LoggingServiceV2Transport) + if transport_provided: # transport is a LoggingServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " @@ -434,14 +530,23 @@ def __init__(self, *, "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(LoggingServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + LoggingServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, credentials_file=self._client_options.credentials_file, @@ -549,6 +654,9 @@ def sample_delete_log(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -733,6 +841,9 @@ def sample_write_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.write_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -877,6 +988,9 @@ def sample_list_log_entries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -964,6 +1078,9 @@ def sample_list_monitored_resource_descriptors(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1083,6 +1200,9 @@ def sample_list_logs(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1169,6 +1289,9 @@ def request_generator(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( requests, @@ -1237,6 +1360,9 @@ def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1288,6 +1414,9 @@ def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1342,6 +1471,9 @@ def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 649b606217dc..2b871e92600d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -61,7 +61,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -116,6 +116,10 @@ def __init__( host += ':443' self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index b24dd885af16..4010a645d9f7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -63,7 +63,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 9454a1f27b52..6d00bd7e344c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -107,7 +107,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 873108dee869..7eaed7beeffa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -48,8 +48,12 @@ class MetricsServiceV2AsyncClient: _client: MetricsServiceV2Client + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = MetricsServiceV2Client.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = MetricsServiceV2Client._DEFAULT_UNIVERSE log_metric_path = staticmethod(MetricsServiceV2Client.log_metric_path) parse_log_metric_path = staticmethod(MetricsServiceV2Client.parse_log_metric_path) @@ -145,11 +149,20 @@ def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._client._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial(type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client)) def __init__(self, *, @@ -158,7 +171,7 @@ def __init__(self, *, client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the metrics service v2 client. + """Instantiates the metrics service v2 async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -169,23 +182,38 @@ def __init__(self, *, transport (Union[str, ~.MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -303,6 +331,9 @@ async def sample_list_log_metrics(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -433,6 +464,9 @@ async def sample_get_log_metric(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -565,6 +599,9 @@ async def sample_create_log_metric(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -704,6 +741,9 @@ async def sample_update_log_metric(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -806,6 +846,9 @@ async def sample_delete_log_metric(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -858,6 +901,9 @@ async def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -909,6 +955,9 @@ async def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -963,6 +1012,9 @@ async def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index d8b072132913..80981fee4b28 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -112,11 +112,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -298,8 +302,8 @@ def _read_environment_variables(): """Returns the environment variables used by the client. Returns: - Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE - and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. Raises: ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not @@ -309,11 +313,12 @@ def _read_environment_variables(): """ use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -333,37 +338,111 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source - def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. Args: - api_override (str): The API endpoint override. If specified, this is always the return value of this function. + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. client_cert_source (bytes): The client certificate source used by the client. - use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. Possible values are "always", "auto", or "never". Returns: str: The API endpoint to be used by the client. """ - if api_override is not None: api_endpoint = api_override elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") api_endpoint = MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = MetricsServiceV2Client.DEFAULT_ENDPOINT + api_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) return api_endpoint + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = MetricsServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + if credentials: + credentials_universe = credentials.universe_domain + if client_universe != credentials_universe: + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + MetricsServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + @property def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, MetricsServiceV2Transport]] = None, @@ -381,22 +460,32 @@ def __init__(self, *, transport (Union[str, MetricsServiceV2Transport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -414,9 +503,15 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - self._use_client_cert, self._use_mtls_endpoint = MetricsServiceV2Client._read_environment_variables() + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = MetricsServiceV2Client._read_environment_variables() self._client_cert_source = MetricsServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._api_endpoint = MetricsServiceV2Client._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + self._universe_domain = MetricsServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: @@ -425,7 +520,8 @@ def __init__(self, *, # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, MetricsServiceV2Transport): + transport_provided = isinstance(transport, MetricsServiceV2Transport) + if transport_provided: # transport is a MetricsServiceV2Transport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " @@ -435,14 +531,23 @@ def __init__(self, *, "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(MetricsServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + MetricsServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, credentials_file=self._client_options.credentials_file, @@ -552,6 +657,9 @@ def sample_list_log_metrics(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -674,6 +782,9 @@ def sample_get_log_metric(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -806,6 +917,9 @@ def sample_create_log_metric(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -937,6 +1051,9 @@ def sample_update_log_metric(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1031,6 +1148,9 @@ def sample_delete_log_metric(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1096,6 +1216,9 @@ def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1147,6 +1270,9 @@ def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1201,6 +1327,9 @@ def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 5ad58024fb86..9fe00adae387 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -61,7 +61,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -116,6 +116,10 @@ def __init__( host += ':443' self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index d6cf4e25225f..fb39e573bba8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -63,7 +63,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 1b81c982e60e..e59f926ca139 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -107,7 +107,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'logging.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 044a642d07f5..42795193ecd3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -25,6 +25,7 @@ from grpc.experimental import aio import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers @@ -56,13 +57,28 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" - # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + +# Anonymous Credentials with universe domain property. If no universe domain is provided, then +# the default universe domain is "googleapis.com". +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + @property + def universe_domain(self): + return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -79,14 +95,13 @@ def test__get_default_mtls_endpoint(): assert ConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - - assert ConfigServiceV2Client._read_environment_variables() == (False, "auto") + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert ConfigServiceV2Client._read_environment_variables() == (True, "auto") + assert ConfigServiceV2Client._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "auto") + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError) as excinfo: @@ -94,19 +109,22 @@ def test__read_environment_variables(): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "never") + assert ConfigServiceV2Client._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "always") + assert ConfigServiceV2Client._read_environment_variables() == (False, "always", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert ConfigServiceV2Client._read_environment_variables() == (False, "auto") + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: ConfigServiceV2Client._read_environment_variables() assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -120,26 +138,95 @@ def test__get_client_cert_source(): assert ConfigServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source assert ConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +@mock.patch.object(ConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2AsyncClient)) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert ConfigServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert ConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert ConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ConfigServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert ConfigServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert ConfigServiceV2Client._get_universe_domain(None, None) == ConfigServiceV2Client._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + ConfigServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." - assert ConfigServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override - assert ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "auto") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert ConfigServiceV2Client._get_api_endpoint(None, None, "auto") == ConfigServiceV2Client.DEFAULT_ENDPOINT - assert ConfigServiceV2Client._get_api_endpoint(None, None, "always") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "always") == ConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert ConfigServiceV2Client._get_api_endpoint(None, None, "never") == ConfigServiceV2Client.DEFAULT_ENDPOINT - +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=_AnonymousCredentialsWithUniverseDomain() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), (ConfigServiceV2AsyncClient, "grpc_asyncio"), ]) def test_config_service_v2_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -173,7 +260,7 @@ def test_config_service_v2_client_service_account_always_use_jwt(transport_class (ConfigServiceV2AsyncClient, "grpc_asyncio"), ]) def test_config_service_v2_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -204,13 +291,13 @@ def test_config_service_v2_client_get_transport_class(): (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ]) -@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +@mock.patch.object(ConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2AsyncClient)) def test_config_service_v2_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -246,7 +333,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -294,7 +381,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -310,7 +397,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -325,8 +412,8 @@ def test_config_service_v2_client_client_options(client_class, transport_class, (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), ]) -@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +@mock.patch.object(ConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2AsyncClient)) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -342,7 +429,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -366,7 +453,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -395,7 +482,7 @@ def test_config_service_v2_client_mtls_env_auto(client_class, transport_class, t patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -475,55 +562,58 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class @pytest.mark.parametrize("client_class", [ ConfigServiceV2Client, ConfigServiceV2AsyncClient ]) -@mock.patch.object(ConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2Client)) -@mock.patch.object(ConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ConfigServiceV2AsyncClient)) +@mock.patch.object(ConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2Client)) +@mock.patch.object(ConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(ConfigServiceV2AsyncClient)) def test_config_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = ConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", - # use ClientOptions.api_endpoint as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the DEFAULT_ENDPOINT as the api endpoint. + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, - # use the DEFAULT_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + else: + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), @@ -540,7 +630,7 @@ def test_config_service_v2_client_client_options_scopes(client_class, transport_ patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -565,7 +655,7 @@ def test_config_service_v2_client_client_options_credentials_file(client_class, patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -609,7 +699,7 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -626,8 +716,8 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() + file_creds = _AnonymousCredentialsWithUniverseDomain() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -658,7 +748,7 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, ]) def test_list_buckets(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -690,7 +780,7 @@ def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -706,7 +796,7 @@ def test_list_buckets_empty_call(): @pytest.mark.asyncio async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -741,7 +831,7 @@ async def test_list_buckets_async_from_dict(): def test_list_buckets_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -773,7 +863,7 @@ def test_list_buckets_field_headers(): @pytest.mark.asyncio async def test_list_buckets_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -804,7 +894,7 @@ async def test_list_buckets_field_headers_async(): def test_list_buckets_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -830,7 +920,7 @@ def test_list_buckets_flattened(): def test_list_buckets_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -844,7 +934,7 @@ def test_list_buckets_flattened_error(): @pytest.mark.asyncio async def test_list_buckets_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -872,7 +962,7 @@ async def test_list_buckets_flattened_async(): @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -886,7 +976,7 @@ async def test_list_buckets_flattened_error_async(): def test_list_buckets_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -939,7 +1029,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): for i in results) def test_list_buckets_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -982,7 +1072,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_buckets_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1031,7 +1121,7 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1082,7 +1172,7 @@ async def test_list_buckets_async_pages(): ]) def test_get_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1126,7 +1216,7 @@ def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1142,7 +1232,7 @@ def test_get_bucket_empty_call(): @pytest.mark.asyncio async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1189,7 +1279,7 @@ async def test_get_bucket_async_from_dict(): def test_get_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1221,7 +1311,7 @@ def test_get_bucket_field_headers(): @pytest.mark.asyncio async def test_get_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1256,7 +1346,7 @@ async def test_get_bucket_field_headers_async(): ]) def test_create_bucket_async(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1285,7 +1375,7 @@ def test_create_bucket_async_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1301,7 +1391,7 @@ def test_create_bucket_async_empty_call(): @pytest.mark.asyncio async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1335,7 +1425,7 @@ async def test_create_bucket_async_async_from_dict(): def test_create_bucket_async_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1367,7 +1457,7 @@ def test_create_bucket_async_field_headers(): @pytest.mark.asyncio async def test_create_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1402,7 +1492,7 @@ async def test_create_bucket_async_field_headers_async(): ]) def test_update_bucket_async(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1431,7 +1521,7 @@ def test_update_bucket_async_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1447,7 +1537,7 @@ def test_update_bucket_async_empty_call(): @pytest.mark.asyncio async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1481,7 +1571,7 @@ async def test_update_bucket_async_async_from_dict(): def test_update_bucket_async_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1513,7 +1603,7 @@ def test_update_bucket_async_field_headers(): @pytest.mark.asyncio async def test_update_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1548,7 +1638,7 @@ async def test_update_bucket_async_field_headers_async(): ]) def test_create_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1592,7 +1682,7 @@ def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1608,7 +1698,7 @@ def test_create_bucket_empty_call(): @pytest.mark.asyncio async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1655,7 +1745,7 @@ async def test_create_bucket_async_from_dict(): def test_create_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1687,7 +1777,7 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio async def test_create_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1722,7 +1812,7 @@ async def test_create_bucket_field_headers_async(): ]) def test_update_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1766,7 +1856,7 @@ def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1782,7 +1872,7 @@ def test_update_bucket_empty_call(): @pytest.mark.asyncio async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1829,7 +1919,7 @@ async def test_update_bucket_async_from_dict(): def test_update_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1861,7 +1951,7 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio async def test_update_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1896,7 +1986,7 @@ async def test_update_bucket_field_headers_async(): ]) def test_delete_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1925,7 +2015,7 @@ def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1941,7 +2031,7 @@ def test_delete_bucket_empty_call(): @pytest.mark.asyncio async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1973,7 +2063,7 @@ async def test_delete_bucket_async_from_dict(): def test_delete_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2005,7 +2095,7 @@ def test_delete_bucket_field_headers(): @pytest.mark.asyncio async def test_delete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2040,7 +2130,7 @@ async def test_delete_bucket_field_headers_async(): ]) def test_undelete_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2069,7 +2159,7 @@ def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2085,7 +2175,7 @@ def test_undelete_bucket_empty_call(): @pytest.mark.asyncio async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2117,7 +2207,7 @@ async def test_undelete_bucket_async_from_dict(): def test_undelete_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2149,7 +2239,7 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio async def test_undelete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2184,7 +2274,7 @@ async def test_undelete_bucket_field_headers_async(): ]) def test_list_views(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2216,7 +2306,7 @@ def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2232,7 +2322,7 @@ def test_list_views_empty_call(): @pytest.mark.asyncio async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2267,7 +2357,7 @@ async def test_list_views_async_from_dict(): def test_list_views_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2299,7 +2389,7 @@ def test_list_views_field_headers(): @pytest.mark.asyncio async def test_list_views_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2330,7 +2420,7 @@ async def test_list_views_field_headers_async(): def test_list_views_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2356,7 +2446,7 @@ def test_list_views_flattened(): def test_list_views_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2370,7 +2460,7 @@ def test_list_views_flattened_error(): @pytest.mark.asyncio async def test_list_views_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2398,7 +2488,7 @@ async def test_list_views_flattened_async(): @pytest.mark.asyncio async def test_list_views_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2412,7 +2502,7 @@ async def test_list_views_flattened_error_async(): def test_list_views_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -2465,7 +2555,7 @@ def test_list_views_pager(transport_name: str = "grpc"): for i in results) def test_list_views_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -2508,7 +2598,7 @@ def test_list_views_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_views_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2557,7 +2647,7 @@ async def test_list_views_async_pager(): @pytest.mark.asyncio async def test_list_views_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2608,7 +2698,7 @@ async def test_list_views_async_pages(): ]) def test_get_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2644,7 +2734,7 @@ def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2660,7 +2750,7 @@ def test_get_view_empty_call(): @pytest.mark.asyncio async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2699,7 +2789,7 @@ async def test_get_view_async_from_dict(): def test_get_view_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2731,7 +2821,7 @@ def test_get_view_field_headers(): @pytest.mark.asyncio async def test_get_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2766,7 +2856,7 @@ async def test_get_view_field_headers_async(): ]) def test_create_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2802,7 +2892,7 @@ def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2818,7 +2908,7 @@ def test_create_view_empty_call(): @pytest.mark.asyncio async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2857,7 +2947,7 @@ async def test_create_view_async_from_dict(): def test_create_view_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2889,7 +2979,7 @@ def test_create_view_field_headers(): @pytest.mark.asyncio async def test_create_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2924,7 +3014,7 @@ async def test_create_view_field_headers_async(): ]) def test_update_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2960,7 +3050,7 @@ def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2976,7 +3066,7 @@ def test_update_view_empty_call(): @pytest.mark.asyncio async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3015,7 +3105,7 @@ async def test_update_view_async_from_dict(): def test_update_view_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3047,7 +3137,7 @@ def test_update_view_field_headers(): @pytest.mark.asyncio async def test_update_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3082,7 +3172,7 @@ async def test_update_view_field_headers_async(): ]) def test_delete_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3111,7 +3201,7 @@ def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3127,7 +3217,7 @@ def test_delete_view_empty_call(): @pytest.mark.asyncio async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3159,7 +3249,7 @@ async def test_delete_view_async_from_dict(): def test_delete_view_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3191,7 +3281,7 @@ def test_delete_view_field_headers(): @pytest.mark.asyncio async def test_delete_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3226,7 +3316,7 @@ async def test_delete_view_field_headers_async(): ]) def test_list_sinks(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3258,7 +3348,7 @@ def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3274,7 +3364,7 @@ def test_list_sinks_empty_call(): @pytest.mark.asyncio async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3309,7 +3399,7 @@ async def test_list_sinks_async_from_dict(): def test_list_sinks_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3341,7 +3431,7 @@ def test_list_sinks_field_headers(): @pytest.mark.asyncio async def test_list_sinks_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3372,7 +3462,7 @@ async def test_list_sinks_field_headers_async(): def test_list_sinks_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3398,7 +3488,7 @@ def test_list_sinks_flattened(): def test_list_sinks_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3412,7 +3502,7 @@ def test_list_sinks_flattened_error(): @pytest.mark.asyncio async def test_list_sinks_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3440,7 +3530,7 @@ async def test_list_sinks_flattened_async(): @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3454,7 +3544,7 @@ async def test_list_sinks_flattened_error_async(): def test_list_sinks_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -3507,7 +3597,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"): for i in results) def test_list_sinks_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -3550,7 +3640,7 @@ def test_list_sinks_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_sinks_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3599,7 +3689,7 @@ async def test_list_sinks_async_pager(): @pytest.mark.asyncio async def test_list_sinks_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3650,7 +3740,7 @@ async def test_list_sinks_async_pages(): ]) def test_get_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3696,7 +3786,7 @@ def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3712,7 +3802,7 @@ def test_get_sink_empty_call(): @pytest.mark.asyncio async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3761,7 +3851,7 @@ async def test_get_sink_async_from_dict(): def test_get_sink_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3793,7 +3883,7 @@ def test_get_sink_field_headers(): @pytest.mark.asyncio async def test_get_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3824,7 +3914,7 @@ async def test_get_sink_field_headers_async(): def test_get_sink_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3850,7 +3940,7 @@ def test_get_sink_flattened(): def test_get_sink_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3864,7 +3954,7 @@ def test_get_sink_flattened_error(): @pytest.mark.asyncio async def test_get_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3892,7 +3982,7 @@ async def test_get_sink_flattened_async(): @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3910,7 +4000,7 @@ async def test_get_sink_flattened_error_async(): ]) def test_create_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3956,7 +4046,7 @@ def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3972,7 +4062,7 @@ def test_create_sink_empty_call(): @pytest.mark.asyncio async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4021,7 +4111,7 @@ async def test_create_sink_async_from_dict(): def test_create_sink_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4053,7 +4143,7 @@ def test_create_sink_field_headers(): @pytest.mark.asyncio async def test_create_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4084,7 +4174,7 @@ async def test_create_sink_field_headers_async(): def test_create_sink_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4114,7 +4204,7 @@ def test_create_sink_flattened(): def test_create_sink_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4129,7 +4219,7 @@ def test_create_sink_flattened_error(): @pytest.mark.asyncio async def test_create_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4161,7 +4251,7 @@ async def test_create_sink_flattened_async(): @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4180,7 +4270,7 @@ async def test_create_sink_flattened_error_async(): ]) def test_update_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4226,7 +4316,7 @@ def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4242,7 +4332,7 @@ def test_update_sink_empty_call(): @pytest.mark.asyncio async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4291,7 +4381,7 @@ async def test_update_sink_async_from_dict(): def test_update_sink_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4323,7 +4413,7 @@ def test_update_sink_field_headers(): @pytest.mark.asyncio async def test_update_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4354,7 +4444,7 @@ async def test_update_sink_field_headers_async(): def test_update_sink_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4388,7 +4478,7 @@ def test_update_sink_flattened(): def test_update_sink_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4404,7 +4494,7 @@ def test_update_sink_flattened_error(): @pytest.mark.asyncio async def test_update_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4440,7 +4530,7 @@ async def test_update_sink_flattened_async(): @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4460,7 +4550,7 @@ async def test_update_sink_flattened_error_async(): ]) def test_delete_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4489,7 +4579,7 @@ def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4505,7 +4595,7 @@ def test_delete_sink_empty_call(): @pytest.mark.asyncio async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4537,7 +4627,7 @@ async def test_delete_sink_async_from_dict(): def test_delete_sink_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4569,7 +4659,7 @@ def test_delete_sink_field_headers(): @pytest.mark.asyncio async def test_delete_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4600,7 +4690,7 @@ async def test_delete_sink_field_headers_async(): def test_delete_sink_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4626,7 +4716,7 @@ def test_delete_sink_flattened(): def test_delete_sink_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4640,7 +4730,7 @@ def test_delete_sink_flattened_error(): @pytest.mark.asyncio async def test_delete_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4668,7 +4758,7 @@ async def test_delete_sink_flattened_async(): @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4686,7 +4776,7 @@ async def test_delete_sink_flattened_error_async(): ]) def test_create_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4715,7 +4805,7 @@ def test_create_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4731,7 +4821,7 @@ def test_create_link_empty_call(): @pytest.mark.asyncio async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4765,7 +4855,7 @@ async def test_create_link_async_from_dict(): def test_create_link_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4797,7 +4887,7 @@ def test_create_link_field_headers(): @pytest.mark.asyncio async def test_create_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4828,7 +4918,7 @@ async def test_create_link_field_headers_async(): def test_create_link_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4862,7 +4952,7 @@ def test_create_link_flattened(): def test_create_link_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4878,7 +4968,7 @@ def test_create_link_flattened_error(): @pytest.mark.asyncio async def test_create_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4916,7 +5006,7 @@ async def test_create_link_flattened_async(): @pytest.mark.asyncio async def test_create_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -4936,7 +5026,7 @@ async def test_create_link_flattened_error_async(): ]) def test_delete_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4965,7 +5055,7 @@ def test_delete_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -4981,7 +5071,7 @@ def test_delete_link_empty_call(): @pytest.mark.asyncio async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5015,7 +5105,7 @@ async def test_delete_link_async_from_dict(): def test_delete_link_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5047,7 +5137,7 @@ def test_delete_link_field_headers(): @pytest.mark.asyncio async def test_delete_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5078,7 +5168,7 @@ async def test_delete_link_field_headers_async(): def test_delete_link_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5104,7 +5194,7 @@ def test_delete_link_flattened(): def test_delete_link_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5118,7 +5208,7 @@ def test_delete_link_flattened_error(): @pytest.mark.asyncio async def test_delete_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5148,7 +5238,7 @@ async def test_delete_link_flattened_async(): @pytest.mark.asyncio async def test_delete_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5166,7 +5256,7 @@ async def test_delete_link_flattened_error_async(): ]) def test_list_links(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5198,7 +5288,7 @@ def test_list_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5214,7 +5304,7 @@ def test_list_links_empty_call(): @pytest.mark.asyncio async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5249,7 +5339,7 @@ async def test_list_links_async_from_dict(): def test_list_links_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5281,7 +5371,7 @@ def test_list_links_field_headers(): @pytest.mark.asyncio async def test_list_links_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5312,7 +5402,7 @@ async def test_list_links_field_headers_async(): def test_list_links_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5338,7 +5428,7 @@ def test_list_links_flattened(): def test_list_links_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5352,7 +5442,7 @@ def test_list_links_flattened_error(): @pytest.mark.asyncio async def test_list_links_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5380,7 +5470,7 @@ async def test_list_links_flattened_async(): @pytest.mark.asyncio async def test_list_links_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5394,7 +5484,7 @@ async def test_list_links_flattened_error_async(): def test_list_links_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -5447,7 +5537,7 @@ def test_list_links_pager(transport_name: str = "grpc"): for i in results) def test_list_links_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -5490,7 +5580,7 @@ def test_list_links_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_links_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5539,7 +5629,7 @@ async def test_list_links_async_pager(): @pytest.mark.asyncio async def test_list_links_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5590,7 +5680,7 @@ async def test_list_links_async_pages(): ]) def test_get_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5626,7 +5716,7 @@ def test_get_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5642,7 +5732,7 @@ def test_get_link_empty_call(): @pytest.mark.asyncio async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5681,7 +5771,7 @@ async def test_get_link_async_from_dict(): def test_get_link_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5713,7 +5803,7 @@ def test_get_link_field_headers(): @pytest.mark.asyncio async def test_get_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5744,7 +5834,7 @@ async def test_get_link_field_headers_async(): def test_get_link_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5770,7 +5860,7 @@ def test_get_link_flattened(): def test_get_link_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5784,7 +5874,7 @@ def test_get_link_flattened_error(): @pytest.mark.asyncio async def test_get_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5812,7 +5902,7 @@ async def test_get_link_flattened_async(): @pytest.mark.asyncio async def test_get_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -5830,7 +5920,7 @@ async def test_get_link_flattened_error_async(): ]) def test_list_exclusions(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5862,7 +5952,7 @@ def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -5878,7 +5968,7 @@ def test_list_exclusions_empty_call(): @pytest.mark.asyncio async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5913,7 +6003,7 @@ async def test_list_exclusions_async_from_dict(): def test_list_exclusions_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5945,7 +6035,7 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio async def test_list_exclusions_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5976,7 +6066,7 @@ async def test_list_exclusions_field_headers_async(): def test_list_exclusions_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6002,7 +6092,7 @@ def test_list_exclusions_flattened(): def test_list_exclusions_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6016,7 +6106,7 @@ def test_list_exclusions_flattened_error(): @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6044,7 +6134,7 @@ async def test_list_exclusions_flattened_async(): @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6058,7 +6148,7 @@ async def test_list_exclusions_flattened_error_async(): def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -6111,7 +6201,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): for i in results) def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -6154,7 +6244,7 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_exclusions_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6203,7 +6293,7 @@ async def test_list_exclusions_async_pager(): @pytest.mark.asyncio async def test_list_exclusions_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6254,7 +6344,7 @@ async def test_list_exclusions_async_pages(): ]) def test_get_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6292,7 +6382,7 @@ def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -6308,7 +6398,7 @@ def test_get_exclusion_empty_call(): @pytest.mark.asyncio async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6349,7 +6439,7 @@ async def test_get_exclusion_async_from_dict(): def test_get_exclusion_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6381,7 +6471,7 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio async def test_get_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6412,7 +6502,7 @@ async def test_get_exclusion_field_headers_async(): def test_get_exclusion_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6438,7 +6528,7 @@ def test_get_exclusion_flattened(): def test_get_exclusion_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6452,7 +6542,7 @@ def test_get_exclusion_flattened_error(): @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6480,7 +6570,7 @@ async def test_get_exclusion_flattened_async(): @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6498,7 +6588,7 @@ async def test_get_exclusion_flattened_error_async(): ]) def test_create_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6536,7 +6626,7 @@ def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -6552,7 +6642,7 @@ def test_create_exclusion_empty_call(): @pytest.mark.asyncio async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6593,7 +6683,7 @@ async def test_create_exclusion_async_from_dict(): def test_create_exclusion_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6625,7 +6715,7 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio async def test_create_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6656,7 +6746,7 @@ async def test_create_exclusion_field_headers_async(): def test_create_exclusion_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6686,7 +6776,7 @@ def test_create_exclusion_flattened(): def test_create_exclusion_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6701,7 +6791,7 @@ def test_create_exclusion_flattened_error(): @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6733,7 +6823,7 @@ async def test_create_exclusion_flattened_async(): @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6752,7 +6842,7 @@ async def test_create_exclusion_flattened_error_async(): ]) def test_update_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6790,7 +6880,7 @@ def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -6806,7 +6896,7 @@ def test_update_exclusion_empty_call(): @pytest.mark.asyncio async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6847,7 +6937,7 @@ async def test_update_exclusion_async_from_dict(): def test_update_exclusion_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6879,7 +6969,7 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio async def test_update_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6910,7 +7000,7 @@ async def test_update_exclusion_field_headers_async(): def test_update_exclusion_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6944,7 +7034,7 @@ def test_update_exclusion_flattened(): def test_update_exclusion_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -6960,7 +7050,7 @@ def test_update_exclusion_flattened_error(): @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6996,7 +7086,7 @@ async def test_update_exclusion_flattened_async(): @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -7016,7 +7106,7 @@ async def test_update_exclusion_flattened_error_async(): ]) def test_delete_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7045,7 +7135,7 @@ def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -7061,7 +7151,7 @@ def test_delete_exclusion_empty_call(): @pytest.mark.asyncio async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7093,7 +7183,7 @@ async def test_delete_exclusion_async_from_dict(): def test_delete_exclusion_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7125,7 +7215,7 @@ def test_delete_exclusion_field_headers(): @pytest.mark.asyncio async def test_delete_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7156,7 +7246,7 @@ async def test_delete_exclusion_field_headers_async(): def test_delete_exclusion_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7182,7 +7272,7 @@ def test_delete_exclusion_flattened(): def test_delete_exclusion_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -7196,7 +7286,7 @@ def test_delete_exclusion_flattened_error(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7224,7 +7314,7 @@ async def test_delete_exclusion_flattened_async(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -7242,7 +7332,7 @@ async def test_delete_exclusion_flattened_error_async(): ]) def test_get_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7280,7 +7370,7 @@ def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -7296,7 +7386,7 @@ def test_get_cmek_settings_empty_call(): @pytest.mark.asyncio async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7337,7 +7427,7 @@ async def test_get_cmek_settings_async_from_dict(): def test_get_cmek_settings_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7369,7 +7459,7 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_get_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7404,7 +7494,7 @@ async def test_get_cmek_settings_field_headers_async(): ]) def test_update_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7442,7 +7532,7 @@ def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -7458,7 +7548,7 @@ def test_update_cmek_settings_empty_call(): @pytest.mark.asyncio async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7499,7 +7589,7 @@ async def test_update_cmek_settings_async_from_dict(): def test_update_cmek_settings_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7531,7 +7621,7 @@ def test_update_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_update_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7566,7 +7656,7 @@ async def test_update_cmek_settings_field_headers_async(): ]) def test_get_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7606,7 +7696,7 @@ def test_get_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -7622,7 +7712,7 @@ def test_get_settings_empty_call(): @pytest.mark.asyncio async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7665,7 +7755,7 @@ async def test_get_settings_async_from_dict(): def test_get_settings_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7697,7 +7787,7 @@ def test_get_settings_field_headers(): @pytest.mark.asyncio async def test_get_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7728,7 +7818,7 @@ async def test_get_settings_field_headers_async(): def test_get_settings_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7754,7 +7844,7 @@ def test_get_settings_flattened(): def test_get_settings_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -7768,7 +7858,7 @@ def test_get_settings_flattened_error(): @pytest.mark.asyncio async def test_get_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7796,7 +7886,7 @@ async def test_get_settings_flattened_async(): @pytest.mark.asyncio async def test_get_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -7814,7 +7904,7 @@ async def test_get_settings_flattened_error_async(): ]) def test_update_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7854,7 +7944,7 @@ def test_update_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -7870,7 +7960,7 @@ def test_update_settings_empty_call(): @pytest.mark.asyncio async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7913,7 +8003,7 @@ async def test_update_settings_async_from_dict(): def test_update_settings_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7945,7 +8035,7 @@ def test_update_settings_field_headers(): @pytest.mark.asyncio async def test_update_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7976,7 +8066,7 @@ async def test_update_settings_field_headers_async(): def test_update_settings_flattened(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8006,7 +8096,7 @@ def test_update_settings_flattened(): def test_update_settings_flattened_error(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -8021,7 +8111,7 @@ def test_update_settings_flattened_error(): @pytest.mark.asyncio async def test_update_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8053,7 +8143,7 @@ async def test_update_settings_flattened_async(): @pytest.mark.asyncio async def test_update_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -8072,7 +8162,7 @@ async def test_update_settings_flattened_error_async(): ]) def test_copy_log_entries(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8101,7 +8191,7 @@ def test_copy_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -8117,7 +8207,7 @@ def test_copy_log_entries_empty_call(): @pytest.mark.asyncio async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -8152,17 +8242,17 @@ async def test_copy_log_entries_async_from_dict(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -8172,7 +8262,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -8183,17 +8273,17 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = ConfigServiceV2Client( client_options=options, - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # It is an error to provide scopes and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -8205,7 +8295,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) client = ConfigServiceV2Client(transport=transport) assert client.transport is transport @@ -8213,13 +8303,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel @@ -8231,7 +8321,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class() adc.assert_called_once() @@ -8240,14 +8330,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = ConfigServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert isinstance( client.transport, @@ -8258,7 +8348,7 @@ def test_config_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ConfigServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), credentials_file="credentials.json" ) @@ -8268,7 +8358,7 @@ def test_config_service_v2_base_transport(): with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: Transport.return_value = None transport = transports.ConfigServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Every method on the transport should just blindly @@ -8335,7 +8425,7 @@ def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.ConfigServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -8356,7 +8446,7 @@ def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.ConfigServiceV2Transport() adc.assert_called_once() @@ -8364,7 +8454,7 @@ def test_config_service_v2_base_transport_with_adc(): def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) ConfigServiceV2Client() adc.assert_called_once_with( scopes=None, @@ -8389,7 +8479,7 @@ def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -8433,7 +8523,7 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -8465,7 +8555,7 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -8509,7 +8599,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( ]) def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), transport=transport_name, ) @@ -8523,7 +8613,7 @@ def test_config_service_v2_host_no_port(transport_name): ]) def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), transport=transport_name, ) @@ -8571,7 +8661,7 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -8643,7 +8733,7 @@ def test_config_service_v2_transport_channel_mtls_with_adc( def test_config_service_v2_grpc_lro_client(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) transport = client.transport @@ -8660,7 +8750,7 @@ def test_config_service_v2_grpc_lro_client(): def test_config_service_v2_grpc_lro_async_client(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc_asyncio', ) transport = client.transport @@ -8907,7 +8997,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -8915,7 +9005,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: transport_class = ConfigServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -8923,7 +9013,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -8934,7 +9024,7 @@ async def test_transport_close_async(): def test_cancel_operation(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8956,7 +9046,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8980,7 +9070,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9004,7 +9094,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9029,7 +9119,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -9045,7 +9135,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -9063,7 +9153,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9085,7 +9175,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9109,7 +9199,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9133,7 +9223,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9158,7 +9248,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -9174,7 +9264,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -9192,7 +9282,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9214,7 +9304,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9238,7 +9328,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9262,7 +9352,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9287,7 +9377,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -9303,7 +9393,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -9326,7 +9416,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -9340,7 +9430,7 @@ def test_client_ctx(): ] for transport in transports: client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) # Test client calls underlying transport. @@ -9368,7 +9458,7 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index af7e482f1b40..e8b7788e9522 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -25,6 +25,7 @@ from grpc.experimental import aio import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers @@ -57,13 +58,28 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" - # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + +# Anonymous Credentials with universe domain property. If no universe domain is provided, then +# the default universe domain is "googleapis.com". +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + @property + def universe_domain(self): + return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -80,14 +96,13 @@ def test__get_default_mtls_endpoint(): assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto") + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert LoggingServiceV2Client._read_environment_variables() == (True, "auto") + assert LoggingServiceV2Client._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto") + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError) as excinfo: @@ -95,19 +110,22 @@ def test__read_environment_variables(): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "never") + assert LoggingServiceV2Client._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "always") + assert LoggingServiceV2Client._read_environment_variables() == (False, "always", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert LoggingServiceV2Client._read_environment_variables() == (False, "auto") + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: LoggingServiceV2Client._read_environment_variables() assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -121,26 +139,95 @@ def test__get_client_cert_source(): assert LoggingServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert LoggingServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert LoggingServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert LoggingServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert LoggingServiceV2Client._get_universe_domain(None, None) == LoggingServiceV2Client._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." - assert LoggingServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override - assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "auto") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, None, "auto") == LoggingServiceV2Client.DEFAULT_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, None, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert LoggingServiceV2Client._get_api_endpoint(None, None, "never") == LoggingServiceV2Client.DEFAULT_ENDPOINT - +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=_AnonymousCredentialsWithUniverseDomain() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), (LoggingServiceV2AsyncClient, "grpc_asyncio"), ]) def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -174,7 +261,7 @@ def test_logging_service_v2_client_service_account_always_use_jwt(transport_clas (LoggingServiceV2AsyncClient, "grpc_asyncio"), ]) def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -205,13 +292,13 @@ def test_logging_service_v2_client_get_transport_class(): (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ]) -@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) def test_logging_service_v2_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -247,7 +334,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -295,7 +382,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -311,7 +398,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -326,8 +413,8 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), ]) -@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -343,7 +430,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -367,7 +454,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -396,7 +483,7 @@ def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -476,55 +563,58 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas @pytest.mark.parametrize("client_class", [ LoggingServiceV2Client, LoggingServiceV2AsyncClient ]) -@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) -@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) def test_logging_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", - # use ClientOptions.api_endpoint as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the DEFAULT_ENDPOINT as the api endpoint. + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, - # use the DEFAULT_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + else: + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), @@ -541,7 +631,7 @@ def test_logging_service_v2_client_client_options_scopes(client_class, transport patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -566,7 +656,7 @@ def test_logging_service_v2_client_client_options_credentials_file(client_class, patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -610,7 +700,7 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -627,8 +717,8 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() + file_creds = _AnonymousCredentialsWithUniverseDomain() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -660,7 +750,7 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, ]) def test_delete_log(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -689,7 +779,7 @@ def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -705,7 +795,7 @@ def test_delete_log_empty_call(): @pytest.mark.asyncio async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -737,7 +827,7 @@ async def test_delete_log_async_from_dict(): def test_delete_log_field_headers(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -769,7 +859,7 @@ def test_delete_log_field_headers(): @pytest.mark.asyncio async def test_delete_log_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -800,7 +890,7 @@ async def test_delete_log_field_headers_async(): def test_delete_log_flattened(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -826,7 +916,7 @@ def test_delete_log_flattened(): def test_delete_log_flattened_error(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -840,7 +930,7 @@ def test_delete_log_flattened_error(): @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -868,7 +958,7 @@ async def test_delete_log_flattened_async(): @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -886,7 +976,7 @@ async def test_delete_log_flattened_error_async(): ]) def test_write_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -916,7 +1006,7 @@ def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -932,7 +1022,7 @@ def test_write_log_entries_empty_call(): @pytest.mark.asyncio async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -965,7 +1055,7 @@ async def test_write_log_entries_async_from_dict(): def test_write_log_entries_flattened(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1003,7 +1093,7 @@ def test_write_log_entries_flattened(): def test_write_log_entries_flattened_error(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1020,7 +1110,7 @@ def test_write_log_entries_flattened_error(): @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1060,7 +1150,7 @@ async def test_write_log_entries_flattened_async(): @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1081,7 +1171,7 @@ async def test_write_log_entries_flattened_error_async(): ]) def test_list_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1113,7 +1203,7 @@ def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1129,7 +1219,7 @@ def test_list_log_entries_empty_call(): @pytest.mark.asyncio async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1164,7 +1254,7 @@ async def test_list_log_entries_async_from_dict(): def test_list_log_entries_flattened(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1198,7 +1288,7 @@ def test_list_log_entries_flattened(): def test_list_log_entries_flattened_error(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1214,7 +1304,7 @@ def test_list_log_entries_flattened_error(): @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1250,7 +1340,7 @@ async def test_list_log_entries_flattened_async(): @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1266,7 +1356,7 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1314,7 +1404,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): for i in results) def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1357,7 +1447,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1406,7 +1496,7 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1457,7 +1547,7 @@ async def test_list_log_entries_async_pages(): ]) def test_list_monitored_resource_descriptors(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1489,7 +1579,7 @@ def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1505,7 +1595,7 @@ def test_list_monitored_resource_descriptors_empty_call(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1540,7 +1630,7 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1588,7 +1678,7 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") for i in results) def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1631,7 +1721,7 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1680,7 +1770,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1731,7 +1821,7 @@ async def test_list_monitored_resource_descriptors_async_pages(): ]) def test_list_logs(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1765,7 +1855,7 @@ def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1781,7 +1871,7 @@ def test_list_logs_empty_call(): @pytest.mark.asyncio async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1818,7 +1908,7 @@ async def test_list_logs_async_from_dict(): def test_list_logs_field_headers(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1850,7 +1940,7 @@ def test_list_logs_field_headers(): @pytest.mark.asyncio async def test_list_logs_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1881,7 +1971,7 @@ async def test_list_logs_field_headers_async(): def test_list_logs_flattened(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1907,7 +1997,7 @@ def test_list_logs_flattened(): def test_list_logs_flattened_error(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1921,7 +2011,7 @@ def test_list_logs_flattened_error(): @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1949,7 +2039,7 @@ async def test_list_logs_flattened_async(): @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1963,7 +2053,7 @@ async def test_list_logs_flattened_error_async(): def test_list_logs_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -2016,7 +2106,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): for i in results) def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -2059,7 +2149,7 @@ def test_list_logs_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_logs_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2108,7 +2198,7 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2159,7 +2249,7 @@ async def test_list_logs_async_pages(): ]) def test_tail_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2189,7 +2279,7 @@ def test_tail_log_entries(request_type, transport: str = 'grpc'): @pytest.mark.asyncio async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2225,17 +2315,17 @@ async def test_tail_log_entries_async_from_dict(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -2245,7 +2335,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -2256,17 +2346,17 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = LoggingServiceV2Client( client_options=options, - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # It is an error to provide scopes and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -2278,7 +2368,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) client = LoggingServiceV2Client(transport=transport) assert client.transport is transport @@ -2286,13 +2376,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel @@ -2304,7 +2394,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class() adc.assert_called_once() @@ -2313,14 +2403,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = LoggingServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert isinstance( client.transport, @@ -2331,7 +2421,7 @@ def test_logging_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.LoggingServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), credentials_file="credentials.json" ) @@ -2341,7 +2431,7 @@ def test_logging_service_v2_base_transport(): with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: Transport.return_value = None transport = transports.LoggingServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Every method on the transport should just blindly @@ -2377,7 +2467,7 @@ def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.LoggingServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -2399,7 +2489,7 @@ def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.LoggingServiceV2Transport() adc.assert_called_once() @@ -2407,7 +2497,7 @@ def test_logging_service_v2_base_transport_with_adc(): def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) LoggingServiceV2Client() adc.assert_called_once_with( scopes=None, @@ -2433,7 +2523,7 @@ def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -2477,7 +2567,7 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -2510,7 +2600,7 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2554,7 +2644,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( ]) def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), transport=transport_name, ) @@ -2568,7 +2658,7 @@ def test_logging_service_v2_host_no_port(transport_name): ]) def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), transport=transport_name, ) @@ -2616,7 +2706,7 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -2798,7 +2888,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2806,7 +2896,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2814,7 +2904,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -2825,7 +2915,7 @@ async def test_transport_close_async(): def test_cancel_operation(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2847,7 +2937,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2871,7 +2961,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2895,7 +2985,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2920,7 +3010,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -2936,7 +3026,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -2954,7 +3044,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2976,7 +3066,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3000,7 +3090,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3024,7 +3114,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3049,7 +3139,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -3065,7 +3155,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -3083,7 +3173,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3105,7 +3195,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3129,7 +3219,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3153,7 +3243,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3178,7 +3268,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -3194,7 +3284,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -3217,7 +3307,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -3231,7 +3321,7 @@ def test_client_ctx(): ] for transport in transports: client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) # Test client calls underlying transport. @@ -3259,7 +3349,7 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 58423012cf8e..d1234f80bda3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -25,6 +25,7 @@ from grpc.experimental import aio import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers @@ -55,13 +56,28 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" - # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + +# Anonymous Credentials with universe domain property. If no universe domain is provided, then +# the default universe domain is "googleapis.com". +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + @property + def universe_domain(self): + return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -78,14 +94,13 @@ def test__get_default_mtls_endpoint(): assert MetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - - assert MetricsServiceV2Client._read_environment_variables() == (False, "auto") + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert MetricsServiceV2Client._read_environment_variables() == (True, "auto") + assert MetricsServiceV2Client._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "auto") + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError) as excinfo: @@ -93,19 +108,22 @@ def test__read_environment_variables(): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "never") + assert MetricsServiceV2Client._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "always") + assert MetricsServiceV2Client._read_environment_variables() == (False, "always", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert MetricsServiceV2Client._read_environment_variables() == (False, "auto") + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: MetricsServiceV2Client._read_environment_variables() assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -119,26 +137,95 @@ def test__get_client_cert_source(): assert MetricsServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source assert MetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +@mock.patch.object(MetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2AsyncClient)) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert MetricsServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert MetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert MetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert MetricsServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert MetricsServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert MetricsServiceV2Client._get_universe_domain(None, None) == MetricsServiceV2Client._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + MetricsServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." - assert MetricsServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override - assert MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "auto") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert MetricsServiceV2Client._get_api_endpoint(None, None, "auto") == MetricsServiceV2Client.DEFAULT_ENDPOINT - assert MetricsServiceV2Client._get_api_endpoint(None, None, "always") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, "always") == MetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT - assert MetricsServiceV2Client._get_api_endpoint(None, None, "never") == MetricsServiceV2Client.DEFAULT_ENDPOINT - +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=_AnonymousCredentialsWithUniverseDomain() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), (MetricsServiceV2AsyncClient, "grpc_asyncio"), ]) def test_metrics_service_v2_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -172,7 +259,7 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(transport_clas (MetricsServiceV2AsyncClient, "grpc_asyncio"), ]) def test_metrics_service_v2_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -203,13 +290,13 @@ def test_metrics_service_v2_client_get_transport_class(): (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), ]) -@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +@mock.patch.object(MetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2AsyncClient)) def test_metrics_service_v2_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -245,7 +332,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -293,7 +380,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -309,7 +396,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -324,8 +411,8 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), ]) -@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +@mock.patch.object(MetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2AsyncClient)) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -341,7 +428,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -365,7 +452,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -394,7 +481,7 @@ def test_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -474,55 +561,58 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas @pytest.mark.parametrize("client_class", [ MetricsServiceV2Client, MetricsServiceV2AsyncClient ]) -@mock.patch.object(MetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2Client)) -@mock.patch.object(MetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(MetricsServiceV2AsyncClient)) +@mock.patch.object(MetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2Client)) +@mock.patch.object(MetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(MetricsServiceV2AsyncClient)) def test_metrics_service_v2_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = MetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", - # use ClientOptions.api_endpoint as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the DEFAULT_ENDPOINT as the api endpoint. + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, - # use the DEFAULT_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + else: + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), @@ -539,7 +629,7 @@ def test_metrics_service_v2_client_client_options_scopes(client_class, transport patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -564,7 +654,7 @@ def test_metrics_service_v2_client_client_options_credentials_file(client_class, patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -608,7 +698,7 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -625,8 +715,8 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() + file_creds = _AnonymousCredentialsWithUniverseDomain() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -658,7 +748,7 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, ]) def test_list_log_metrics(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -690,7 +780,7 @@ def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -706,7 +796,7 @@ def test_list_log_metrics_empty_call(): @pytest.mark.asyncio async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -741,7 +831,7 @@ async def test_list_log_metrics_async_from_dict(): def test_list_log_metrics_field_headers(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -773,7 +863,7 @@ def test_list_log_metrics_field_headers(): @pytest.mark.asyncio async def test_list_log_metrics_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -804,7 +894,7 @@ async def test_list_log_metrics_field_headers_async(): def test_list_log_metrics_flattened(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -830,7 +920,7 @@ def test_list_log_metrics_flattened(): def test_list_log_metrics_flattened_error(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -844,7 +934,7 @@ def test_list_log_metrics_flattened_error(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -872,7 +962,7 @@ async def test_list_log_metrics_flattened_async(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -886,7 +976,7 @@ async def test_list_log_metrics_flattened_error_async(): def test_list_log_metrics_pager(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -939,7 +1029,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): for i in results) def test_list_log_metrics_pages(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -982,7 +1072,7 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1031,7 +1121,7 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1082,7 +1172,7 @@ async def test_list_log_metrics_async_pages(): ]) def test_get_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1126,7 +1216,7 @@ def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1142,7 +1232,7 @@ def test_get_log_metric_empty_call(): @pytest.mark.asyncio async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1189,7 +1279,7 @@ async def test_get_log_metric_async_from_dict(): def test_get_log_metric_field_headers(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1221,7 +1311,7 @@ def test_get_log_metric_field_headers(): @pytest.mark.asyncio async def test_get_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1252,7 +1342,7 @@ async def test_get_log_metric_field_headers_async(): def test_get_log_metric_flattened(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1278,7 +1368,7 @@ def test_get_log_metric_flattened(): def test_get_log_metric_flattened_error(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1292,7 +1382,7 @@ def test_get_log_metric_flattened_error(): @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1320,7 +1410,7 @@ async def test_get_log_metric_flattened_async(): @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1338,7 +1428,7 @@ async def test_get_log_metric_flattened_error_async(): ]) def test_create_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1382,7 +1472,7 @@ def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1398,7 +1488,7 @@ def test_create_log_metric_empty_call(): @pytest.mark.asyncio async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1445,7 +1535,7 @@ async def test_create_log_metric_async_from_dict(): def test_create_log_metric_field_headers(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1477,7 +1567,7 @@ def test_create_log_metric_field_headers(): @pytest.mark.asyncio async def test_create_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1508,7 +1598,7 @@ async def test_create_log_metric_field_headers_async(): def test_create_log_metric_flattened(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1538,7 +1628,7 @@ def test_create_log_metric_flattened(): def test_create_log_metric_flattened_error(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1553,7 +1643,7 @@ def test_create_log_metric_flattened_error(): @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1585,7 +1675,7 @@ async def test_create_log_metric_flattened_async(): @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1604,7 +1694,7 @@ async def test_create_log_metric_flattened_error_async(): ]) def test_update_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1648,7 +1738,7 @@ def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1664,7 +1754,7 @@ def test_update_log_metric_empty_call(): @pytest.mark.asyncio async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1711,7 +1801,7 @@ async def test_update_log_metric_async_from_dict(): def test_update_log_metric_field_headers(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1743,7 +1833,7 @@ def test_update_log_metric_field_headers(): @pytest.mark.asyncio async def test_update_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1774,7 +1864,7 @@ async def test_update_log_metric_field_headers_async(): def test_update_log_metric_flattened(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1804,7 +1894,7 @@ def test_update_log_metric_flattened(): def test_update_log_metric_flattened_error(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1819,7 +1909,7 @@ def test_update_log_metric_flattened_error(): @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1851,7 +1941,7 @@ async def test_update_log_metric_flattened_async(): @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1870,7 +1960,7 @@ async def test_update_log_metric_flattened_error_async(): ]) def test_delete_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1899,7 +1989,7 @@ def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1915,7 +2005,7 @@ def test_delete_log_metric_empty_call(): @pytest.mark.asyncio async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1947,7 +2037,7 @@ async def test_delete_log_metric_async_from_dict(): def test_delete_log_metric_field_headers(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1979,7 +2069,7 @@ def test_delete_log_metric_field_headers(): @pytest.mark.asyncio async def test_delete_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2010,7 +2100,7 @@ async def test_delete_log_metric_field_headers_async(): def test_delete_log_metric_flattened(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2036,7 +2126,7 @@ def test_delete_log_metric_flattened(): def test_delete_log_metric_flattened_error(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2050,7 +2140,7 @@ def test_delete_log_metric_flattened_error(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2078,7 +2168,7 @@ async def test_delete_log_metric_flattened_async(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2093,17 +2183,17 @@ async def test_delete_log_metric_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -2113,7 +2203,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -2124,17 +2214,17 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = MetricsServiceV2Client( client_options=options, - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # It is an error to provide scopes and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -2146,7 +2236,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) client = MetricsServiceV2Client(transport=transport) assert client.transport is transport @@ -2154,13 +2244,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel @@ -2172,7 +2262,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class() adc.assert_called_once() @@ -2181,14 +2271,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = MetricsServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert isinstance( client.transport, @@ -2199,7 +2289,7 @@ def test_metrics_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MetricsServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), credentials_file="credentials.json" ) @@ -2209,7 +2299,7 @@ def test_metrics_service_v2_base_transport(): with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: Transport.return_value = None transport = transports.MetricsServiceV2Transport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Every method on the transport should just blindly @@ -2244,7 +2334,7 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.MetricsServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -2266,7 +2356,7 @@ def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.MetricsServiceV2Transport() adc.assert_called_once() @@ -2274,7 +2364,7 @@ def test_metrics_service_v2_base_transport_with_adc(): def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) MetricsServiceV2Client() adc.assert_called_once_with( scopes=None, @@ -2300,7 +2390,7 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -2344,7 +2434,7 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -2377,7 +2467,7 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2421,7 +2511,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( ]) def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), transport=transport_name, ) @@ -2435,7 +2525,7 @@ def test_metrics_service_v2_host_no_port(transport_name): ]) def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), transport=transport_name, ) @@ -2483,7 +2573,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -2665,7 +2755,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2673,7 +2763,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: transport_class = MetricsServiceV2Client.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2681,7 +2771,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -2692,7 +2782,7 @@ async def test_transport_close_async(): def test_cancel_operation(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2714,7 +2804,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2738,7 +2828,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2762,7 +2852,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2787,7 +2877,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -2803,7 +2893,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -2821,7 +2911,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2843,7 +2933,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2867,7 +2957,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2891,7 +2981,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2916,7 +3006,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -2932,7 +3022,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -2950,7 +3040,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2972,7 +3062,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2996,7 +3086,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3020,7 +3110,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3045,7 +3135,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -3061,7 +3151,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -3084,7 +3174,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -3098,7 +3188,7 @@ def test_client_ctx(): ] for transport in transports: client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) # Test client calls underlying transport. @@ -3126,7 +3216,7 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 559636664225..8fdc04b6e624 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -72,8 +72,12 @@ class CloudRedisAsyncClient: _client: CloudRedisClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = CloudRedisClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = CloudRedisClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = CloudRedisClient._DEFAULT_UNIVERSE instance_path = staticmethod(CloudRedisClient.instance_path) parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path) @@ -169,11 +173,20 @@ def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._client._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial(type(CloudRedisClient).get_transport_class, type(CloudRedisClient)) def __init__(self, *, @@ -182,7 +195,7 @@ def __init__(self, *, client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the cloud redis client. + """Instantiates the cloud redis async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -193,23 +206,41 @@ def __init__(self, *, transport (Union[str, ~.CloudRedisTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -327,6 +358,9 @@ async def sample_list_instances(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -436,6 +470,9 @@ async def sample_get_instance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -539,6 +576,9 @@ async def sample_get_instance_auth_string(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -694,6 +734,9 @@ async def sample_create_instance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -838,6 +881,9 @@ async def sample_update_instance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -967,6 +1013,9 @@ async def sample_upgrade_instance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1106,6 +1155,9 @@ async def sample_import_instance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1242,6 +1294,9 @@ async def sample_export_instance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1372,6 +1427,9 @@ async def sample_failover_instance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1497,6 +1555,9 @@ async def sample_delete_instance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1637,6 +1698,9 @@ async def sample_reschedule_maintenance(): )), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1700,6 +1764,9 @@ async def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1751,6 +1818,9 @@ async def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1806,6 +1876,9 @@ async def delete_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1856,6 +1929,9 @@ async def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1903,6 +1979,9 @@ async def get_location( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1954,6 +2033,9 @@ async def list_locations( (("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index f3af09495120..1f1080604a33 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -138,11 +138,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "redis.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "redis.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -324,8 +328,8 @@ def _read_environment_variables(): """Returns the environment variables used by the client. Returns: - Tuple[bool, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE - and the GOOGLE_API_USE_MTLS_ENDPOINT environment variables. + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. Raises: ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not @@ -335,11 +339,12 @@ def _read_environment_variables(): """ use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") if use_client_cert not in ("true", "false"): raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -359,37 +364,111 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source - def _get_api_endpoint(api_override, client_cert_source, use_mtls_endpoint): + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. Args: - api_override (str): The API endpoint override. If specified, this is always the return value of this function. + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. client_cert_source (bytes): The client certificate source used by the client. - use_mtls_endpoint (str): How to use the MTLS endpoint, which depends also on the other parameters. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. Possible values are "always", "auto", or "never". Returns: str: The API endpoint to be used by the client. """ - if api_override is not None: api_endpoint = api_override elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = CloudRedisClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") api_endpoint = CloudRedisClient.DEFAULT_MTLS_ENDPOINT else: - api_endpoint = CloudRedisClient.DEFAULT_ENDPOINT + api_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) return api_endpoint + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = CloudRedisClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes(client_universe: str, + credentials: ga_credentials.Credentials) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + if credentials: + credentials_universe = credentials.universe_domain + if client_universe != credentials_universe: + default_universe = CloudRedisClient._DEFAULT_UNIVERSE + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = (self._is_universe_domain_valid or + CloudRedisClient._compare_universes(self.universe_domain, self.transport._credentials)) + return self._is_universe_domain_valid + @property def api_endpoint(self): """Return the API endpoint used by the client instance. Returns: - str: The API endpoint used - by the client instance. + str: The API endpoint used by the client instance. """ return self._api_endpoint + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, transport: Optional[Union[str, CloudRedisTransport]] = None, @@ -410,22 +489,32 @@ def __init__(self, *, NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -443,9 +532,15 @@ def __init__(self, *, self._client_options = client_options_lib.ClientOptions() self._client_options = cast(client_options_lib.ClientOptions, self._client_options) - self._use_client_cert, self._use_mtls_endpoint = CloudRedisClient._read_environment_variables() + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CloudRedisClient._read_environment_variables() self._client_cert_source = CloudRedisClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) - self._api_endpoint = CloudRedisClient._get_api_endpoint(self._client_options.api_endpoint, self._client_cert_source, self._use_mtls_endpoint) + self._universe_domain = CloudRedisClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: @@ -454,7 +549,8 @@ def __init__(self, *, # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudRedisTransport): + transport_provided = isinstance(transport, CloudRedisTransport) + if transport_provided: # transport is a CloudRedisTransport instance. if credentials or self._client_options.credentials_file or api_key_value: raise ValueError("When providing a transport instance, " @@ -464,14 +560,23 @@ def __init__(self, *, "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(CloudRedisTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + CloudRedisClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, credentials_file=self._client_options.credentials_file, @@ -589,6 +694,9 @@ def sample_list_instances(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -698,6 +806,9 @@ def sample_get_instance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -801,6 +912,9 @@ def sample_get_instance_auth_string(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -956,6 +1070,9 @@ def sample_create_instance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1100,6 +1217,9 @@ def sample_update_instance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1229,6 +1349,9 @@ def sample_upgrade_instance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1368,6 +1491,9 @@ def sample_import_instance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1504,6 +1630,9 @@ def sample_export_instance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1634,6 +1763,9 @@ def sample_failover_instance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1759,6 +1891,9 @@ def sample_delete_instance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1899,6 +2034,9 @@ def sample_reschedule_maintenance(): )), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1975,6 +2113,9 @@ def list_operations( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2026,6 +2167,9 @@ def get_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2081,6 +2225,9 @@ def delete_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2131,6 +2278,9 @@ def cancel_operation( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2178,6 +2328,9 @@ def get_location( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) @@ -2229,6 +2382,9 @@ def list_locations( (("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, retry=retry, timeout=timeout, metadata=metadata,) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index c0a9d6bc0949..f54cb911e7ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -58,7 +58,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'redis.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -113,6 +113,10 @@ def __init__( host += ':443' self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 7e07797c707f..26328c6f6dd7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -84,7 +84,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'redis.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 354d27092a62..f9f3df6a6398 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -128,7 +128,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'redis.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 4726c326caf6..450507cf954c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -526,7 +526,7 @@ def __init__(self, *, Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'redis.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 1135b01f4825..37d9cc11c1c0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -28,6 +28,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -67,13 +68,28 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" - # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. def modify_default_endpoint(client): return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + +# Anonymous Credentials with universe domain property. If no universe domain is provided, then +# the default universe domain is "googleapis.com". +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + @property + def universe_domain(self): + return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -90,14 +106,13 @@ def test__get_default_mtls_endpoint(): assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi def test__read_environment_variables(): - - assert CloudRedisClient._read_environment_variables() == (False, "auto") + assert CloudRedisClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - assert CloudRedisClient._read_environment_variables() == (True, "auto") + assert CloudRedisClient._read_environment_variables() == (True, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - assert CloudRedisClient._read_environment_variables() == (False, "auto") + assert CloudRedisClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): with pytest.raises(ValueError) as excinfo: @@ -105,19 +120,22 @@ def test__read_environment_variables(): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - assert CloudRedisClient._read_environment_variables() == (False, "never") + assert CloudRedisClient._read_environment_variables() == (False, "never", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - assert CloudRedisClient._read_environment_variables() == (False, "always") + assert CloudRedisClient._read_environment_variables() == (False, "always", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): - assert CloudRedisClient._read_environment_variables() == (False, "auto") + assert CloudRedisClient._read_environment_variables() == (False, "auto", None) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError) as excinfo: CloudRedisClient._read_environment_variables() assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert CloudRedisClient._read_environment_variables() == (False, "auto", "foo.com") + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -131,19 +149,89 @@ def test__get_client_cert_source(): assert CloudRedisClient._get_client_cert_source(None, True) is mock_default_cert_source assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source -@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) def test__get_api_endpoint(): api_override = "foo.com" mock_client_cert_source = mock.Mock() + default_universe = CloudRedisClient._DEFAULT_UNIVERSE + default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert CloudRedisClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert CloudRedisClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert CloudRedisClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert CloudRedisClient._get_universe_domain(None, None) == CloudRedisClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + CloudRedisClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." - assert CloudRedisClient._get_api_endpoint(api_override, mock_client_cert_source, "always") == api_override - assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, "auto") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, None, "auto") == CloudRedisClient.DEFAULT_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, None, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT - assert CloudRedisClient._get_api_endpoint(None, None, "never") == CloudRedisClient.DEFAULT_ENDPOINT - +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), +]) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class( + credentials=_AnonymousCredentialsWithUniverseDomain() + ) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + transport=transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." @pytest.mark.parametrize("client_class,transport_name", [ (CloudRedisClient, "grpc"), @@ -151,7 +239,7 @@ def test__get_api_endpoint(): (CloudRedisClient, "rest"), ]) def test_cloud_redis_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -190,7 +278,7 @@ def test_cloud_redis_client_service_account_always_use_jwt(transport_class, tran (CloudRedisClient, "rest"), ]) def test_cloud_redis_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -226,13 +314,13 @@ def test_cloud_redis_client_get_transport_class(): (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), ]) -@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) def test_cloud_redis_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -268,7 +356,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -316,7 +404,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -332,7 +420,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -349,8 +437,8 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "true"), (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "false"), ]) -@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default @@ -366,7 +454,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -390,7 +478,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -419,7 +507,7 @@ def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transpo patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -499,55 +587,58 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): @pytest.mark.parametrize("client_class", [ CloudRedisClient, CloudRedisAsyncClient ]) -@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) -@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) def test_cloud_redis_client_client_api_endpoint(client_class): mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = CloudRedisClient._DEFAULT_UNIVERSE + default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint - - # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="false", - # use ClientOptions.api_endpoint as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == mock_api_endpoint + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", - # use the DEFAULT_ENDPOINT as the api endpoint. + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source doesn't exist, - # use the DEFAULT_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_ENDPOINT + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + else: + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == default_endpoint - # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (by default), - # GOOGLE_API_USE_CLIENT_CERTIFICATE="true" and default cert source exists, - # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - client = client_class(credentials=ga_credentials.AnonymousCredentials()) - assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT @pytest.mark.parametrize("client_class,transport_class,transport_name", [ (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), @@ -565,7 +656,7 @@ def test_cloud_redis_client_client_options_scopes(client_class, transport_class, patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -591,7 +682,7 @@ def test_cloud_redis_client_client_options_credentials_file(client_class, transp patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -635,7 +726,7 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -652,8 +743,8 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() + file_creds = _AnonymousCredentialsWithUniverseDomain() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -681,7 +772,7 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp ]) def test_list_instances(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -715,7 +806,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -731,7 +822,7 @@ def test_list_instances_empty_call(): @pytest.mark.asyncio async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -768,7 +859,7 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -800,7 +891,7 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -831,7 +922,7 @@ async def test_list_instances_field_headers_async(): def test_list_instances_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -857,7 +948,7 @@ def test_list_instances_flattened(): def test_list_instances_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -871,7 +962,7 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -899,7 +990,7 @@ async def test_list_instances_flattened_async(): @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -913,7 +1004,7 @@ async def test_list_instances_flattened_error_async(): def test_list_instances_pager(transport_name: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -966,7 +1057,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): for i in results) def test_list_instances_pages(transport_name: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport_name, ) @@ -1009,7 +1100,7 @@ def test_list_instances_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_instances_async_pager(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1058,7 +1149,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1109,7 +1200,7 @@ async def test_list_instances_async_pages(): ]) def test_get_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1193,7 +1284,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1209,7 +1300,7 @@ def test_get_instance_empty_call(): @pytest.mark.asyncio async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1296,7 +1387,7 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1328,7 +1419,7 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1359,7 +1450,7 @@ async def test_get_instance_field_headers_async(): def test_get_instance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1385,7 +1476,7 @@ def test_get_instance_flattened(): def test_get_instance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1399,7 +1490,7 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1427,7 +1518,7 @@ async def test_get_instance_flattened_async(): @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1445,7 +1536,7 @@ async def test_get_instance_flattened_error_async(): ]) def test_get_instance_auth_string(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1477,7 +1568,7 @@ def test_get_instance_auth_string_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1493,7 +1584,7 @@ def test_get_instance_auth_string_empty_call(): @pytest.mark.asyncio async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1528,7 +1619,7 @@ async def test_get_instance_auth_string_async_from_dict(): def test_get_instance_auth_string_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1560,7 +1651,7 @@ def test_get_instance_auth_string_field_headers(): @pytest.mark.asyncio async def test_get_instance_auth_string_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1591,7 +1682,7 @@ async def test_get_instance_auth_string_field_headers_async(): def test_get_instance_auth_string_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1617,7 +1708,7 @@ def test_get_instance_auth_string_flattened(): def test_get_instance_auth_string_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1631,7 +1722,7 @@ def test_get_instance_auth_string_flattened_error(): @pytest.mark.asyncio async def test_get_instance_auth_string_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1659,7 +1750,7 @@ async def test_get_instance_auth_string_flattened_async(): @pytest.mark.asyncio async def test_get_instance_auth_string_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1677,7 +1768,7 @@ async def test_get_instance_auth_string_flattened_error_async(): ]) def test_create_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1706,7 +1797,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1722,7 +1813,7 @@ def test_create_instance_empty_call(): @pytest.mark.asyncio async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1756,7 +1847,7 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1788,7 +1879,7 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1819,7 +1910,7 @@ async def test_create_instance_field_headers_async(): def test_create_instance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1853,7 +1944,7 @@ def test_create_instance_flattened(): def test_create_instance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1869,7 +1960,7 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1907,7 +1998,7 @@ async def test_create_instance_flattened_async(): @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -1927,7 +2018,7 @@ async def test_create_instance_flattened_error_async(): ]) def test_update_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -1956,7 +2047,7 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -1972,7 +2063,7 @@ def test_update_instance_empty_call(): @pytest.mark.asyncio async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2006,7 +2097,7 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2038,7 +2129,7 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2069,7 +2160,7 @@ async def test_update_instance_field_headers_async(): def test_update_instance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2099,7 +2190,7 @@ def test_update_instance_flattened(): def test_update_instance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2114,7 +2205,7 @@ def test_update_instance_flattened_error(): @pytest.mark.asyncio async def test_update_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2148,7 +2239,7 @@ async def test_update_instance_flattened_async(): @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2167,7 +2258,7 @@ async def test_update_instance_flattened_error_async(): ]) def test_upgrade_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2196,7 +2287,7 @@ def test_upgrade_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2212,7 +2303,7 @@ def test_upgrade_instance_empty_call(): @pytest.mark.asyncio async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2246,7 +2337,7 @@ async def test_upgrade_instance_async_from_dict(): def test_upgrade_instance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2278,7 +2369,7 @@ def test_upgrade_instance_field_headers(): @pytest.mark.asyncio async def test_upgrade_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2309,7 +2400,7 @@ async def test_upgrade_instance_field_headers_async(): def test_upgrade_instance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2339,7 +2430,7 @@ def test_upgrade_instance_flattened(): def test_upgrade_instance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2354,7 +2445,7 @@ def test_upgrade_instance_flattened_error(): @pytest.mark.asyncio async def test_upgrade_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2388,7 +2479,7 @@ async def test_upgrade_instance_flattened_async(): @pytest.mark.asyncio async def test_upgrade_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2407,7 +2498,7 @@ async def test_upgrade_instance_flattened_error_async(): ]) def test_import_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2436,7 +2527,7 @@ def test_import_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2452,7 +2543,7 @@ def test_import_instance_empty_call(): @pytest.mark.asyncio async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2486,7 +2577,7 @@ async def test_import_instance_async_from_dict(): def test_import_instance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2518,7 +2609,7 @@ def test_import_instance_field_headers(): @pytest.mark.asyncio async def test_import_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2549,7 +2640,7 @@ async def test_import_instance_field_headers_async(): def test_import_instance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2579,7 +2670,7 @@ def test_import_instance_flattened(): def test_import_instance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2594,7 +2685,7 @@ def test_import_instance_flattened_error(): @pytest.mark.asyncio async def test_import_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2628,7 +2719,7 @@ async def test_import_instance_flattened_async(): @pytest.mark.asyncio async def test_import_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2647,7 +2738,7 @@ async def test_import_instance_flattened_error_async(): ]) def test_export_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2676,7 +2767,7 @@ def test_export_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2692,7 +2783,7 @@ def test_export_instance_empty_call(): @pytest.mark.asyncio async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2726,7 +2817,7 @@ async def test_export_instance_async_from_dict(): def test_export_instance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2758,7 +2849,7 @@ def test_export_instance_field_headers(): @pytest.mark.asyncio async def test_export_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2789,7 +2880,7 @@ async def test_export_instance_field_headers_async(): def test_export_instance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2819,7 +2910,7 @@ def test_export_instance_flattened(): def test_export_instance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2834,7 +2925,7 @@ def test_export_instance_flattened_error(): @pytest.mark.asyncio async def test_export_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2868,7 +2959,7 @@ async def test_export_instance_flattened_async(): @pytest.mark.asyncio async def test_export_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -2887,7 +2978,7 @@ async def test_export_instance_flattened_error_async(): ]) def test_failover_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2916,7 +3007,7 @@ def test_failover_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -2932,7 +3023,7 @@ def test_failover_instance_empty_call(): @pytest.mark.asyncio async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -2966,7 +3057,7 @@ async def test_failover_instance_async_from_dict(): def test_failover_instance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2998,7 +3089,7 @@ def test_failover_instance_field_headers(): @pytest.mark.asyncio async def test_failover_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3029,7 +3120,7 @@ async def test_failover_instance_field_headers_async(): def test_failover_instance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3059,7 +3150,7 @@ def test_failover_instance_flattened(): def test_failover_instance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3074,7 +3165,7 @@ def test_failover_instance_flattened_error(): @pytest.mark.asyncio async def test_failover_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3108,7 +3199,7 @@ async def test_failover_instance_flattened_async(): @pytest.mark.asyncio async def test_failover_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3127,7 +3218,7 @@ async def test_failover_instance_flattened_error_async(): ]) def test_delete_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3156,7 +3247,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3172,7 +3263,7 @@ def test_delete_instance_empty_call(): @pytest.mark.asyncio async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3206,7 +3297,7 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3238,7 +3329,7 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3269,7 +3360,7 @@ async def test_delete_instance_field_headers_async(): def test_delete_instance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3295,7 +3386,7 @@ def test_delete_instance_flattened(): def test_delete_instance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3309,7 +3400,7 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3339,7 +3430,7 @@ async def test_delete_instance_flattened_async(): @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3357,7 +3448,7 @@ async def test_delete_instance_flattened_error_async(): ]) def test_reschedule_maintenance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3386,7 +3477,7 @@ def test_reschedule_maintenance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) @@ -3402,7 +3493,7 @@ def test_reschedule_maintenance_empty_call(): @pytest.mark.asyncio async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3436,7 +3527,7 @@ async def test_reschedule_maintenance_async_from_dict(): def test_reschedule_maintenance_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3468,7 +3559,7 @@ def test_reschedule_maintenance_field_headers(): @pytest.mark.asyncio async def test_reschedule_maintenance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3499,7 +3590,7 @@ async def test_reschedule_maintenance_field_headers_async(): def test_reschedule_maintenance_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3531,7 +3622,7 @@ def test_reschedule_maintenance_flattened(): def test_reschedule_maintenance_flattened_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3547,7 +3638,7 @@ def test_reschedule_maintenance_flattened_error(): @pytest.mark.asyncio async def test_reschedule_maintenance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3583,7 +3674,7 @@ async def test_reschedule_maintenance_flattened_async(): @pytest.mark.asyncio async def test_reschedule_maintenance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Attempting to call a method with both a request object and flattened @@ -3603,7 +3694,7 @@ async def test_reschedule_maintenance_flattened_error_async(): ]) def test_list_instances_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -3651,14 +3742,14 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -3668,7 +3759,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan assert jsonified_request["parent"] == 'parent_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -3710,7 +3801,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan def test_list_instances_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.list_instances._get_unset_required_fields({}) assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) @@ -3719,7 +3810,7 @@ def test_list_instances_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_instances_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -3758,7 +3849,7 @@ def test_list_instances_rest_interceptors(null_interceptor): def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ListInstancesRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3778,7 +3869,7 @@ def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=c def test_list_instances_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -3816,7 +3907,7 @@ def test_list_instances_rest_flattened(): def test_list_instances_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3831,7 +3922,7 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): def test_list_instances_rest_pager(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -3897,7 +3988,7 @@ def test_list_instances_rest_pager(transport: str = 'rest'): ]) def test_get_instance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -3995,14 +4086,14 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4010,7 +4101,7 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -4052,7 +4143,7 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR def test_get_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -4061,7 +4152,7 @@ def test_get_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -4100,7 +4191,7 @@ def test_get_instance_rest_interceptors(null_interceptor): def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4120,7 +4211,7 @@ def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=clo def test_get_instance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -4158,7 +4249,7 @@ def test_get_instance_rest_flattened(): def test_get_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4173,7 +4264,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): def test_get_instance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -4184,7 +4275,7 @@ def test_get_instance_rest_error(): ]) def test_get_instance_auth_string_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -4230,14 +4321,14 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4245,7 +4336,7 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -4287,7 +4378,7 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. def test_get_instance_auth_string_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.get_instance_auth_string._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -4296,7 +4387,7 @@ def test_get_instance_auth_string_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_instance_auth_string_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -4335,7 +4426,7 @@ def test_get_instance_auth_string_rest_interceptors(null_interceptor): def test_get_instance_auth_string_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4355,7 +4446,7 @@ def test_get_instance_auth_string_rest_bad_request(transport: str = 'rest', requ def test_get_instance_auth_string_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -4393,7 +4484,7 @@ def test_get_instance_auth_string_rest_flattened(): def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4408,7 +4499,7 @@ def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): def test_get_instance_auth_string_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -4419,7 +4510,7 @@ def test_get_instance_auth_string_rest_error(): ]) def test_create_instance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -4526,7 +4617,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns # verify fields with default values are dropped assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -4536,7 +4627,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns jsonified_request["parent"] = 'parent_value' jsonified_request["instanceId"] = 'instance_id_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("instance_id", )) jsonified_request.update(unset_fields) @@ -4548,7 +4639,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns assert jsonified_request["instanceId"] == 'instance_id_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -4592,7 +4683,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns def test_create_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.create_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) @@ -4601,7 +4692,7 @@ def test_create_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -4641,7 +4732,7 @@ def test_create_instance_rest_interceptors(null_interceptor): def test_create_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4661,7 +4752,7 @@ def test_create_instance_rest_bad_request(transport: str = 'rest', request_type= def test_create_instance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -4699,7 +4790,7 @@ def test_create_instance_rest_flattened(): def test_create_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4716,7 +4807,7 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): def test_create_instance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -4727,7 +4818,7 @@ def test_create_instance_rest_error(): ]) def test_update_instance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -4831,12 +4922,12 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) @@ -4844,7 +4935,7 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns # verify required fields with non-default values are left alone client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -4884,7 +4975,7 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns def test_update_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.update_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(("updateMask", )) & set(("updateMask", "instance", ))) @@ -4893,7 +4984,7 @@ def test_update_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -4933,7 +5024,7 @@ def test_update_instance_rest_interceptors(null_interceptor): def test_update_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpdateInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -4953,7 +5044,7 @@ def test_update_instance_rest_bad_request(transport: str = 'rest', request_type= def test_update_instance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -4990,7 +5081,7 @@ def test_update_instance_rest_flattened(): def test_update_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5006,7 +5097,7 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): def test_update_instance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -5017,7 +5108,7 @@ def test_update_instance_rest_error(): ]) def test_upgrade_instance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5059,7 +5150,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).upgrade_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5067,7 +5158,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI jsonified_request["name"] = 'name_value' jsonified_request["redisVersion"] = 'redis_version_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).upgrade_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5077,7 +5168,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI assert jsonified_request["redisVersion"] == 'redis_version_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -5117,7 +5208,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI def test_upgrade_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.upgrade_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "redisVersion", ))) @@ -5126,7 +5217,7 @@ def test_upgrade_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_upgrade_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5166,7 +5257,7 @@ def test_upgrade_instance_rest_interceptors(null_interceptor): def test_upgrade_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpgradeInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5186,7 +5277,7 @@ def test_upgrade_instance_rest_bad_request(transport: str = 'rest', request_type def test_upgrade_instance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5223,7 +5314,7 @@ def test_upgrade_instance_rest_flattened(): def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5239,7 +5330,7 @@ def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): def test_upgrade_instance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -5250,7 +5341,7 @@ def test_upgrade_instance_rest_error(): ]) def test_import_instance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5291,14 +5382,14 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).import_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).import_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5306,7 +5397,7 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -5346,7 +5437,7 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns def test_import_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.import_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "inputConfig", ))) @@ -5355,7 +5446,7 @@ def test_import_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_import_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5395,7 +5486,7 @@ def test_import_instance_rest_interceptors(null_interceptor): def test_import_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ImportInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5415,7 +5506,7 @@ def test_import_instance_rest_bad_request(transport: str = 'rest', request_type= def test_import_instance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5452,7 +5543,7 @@ def test_import_instance_rest_flattened(): def test_import_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5468,7 +5559,7 @@ def test_import_instance_rest_flattened_error(transport: str = 'rest'): def test_import_instance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -5479,7 +5570,7 @@ def test_import_instance_rest_error(): ]) def test_export_instance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5520,14 +5611,14 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).export_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).export_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5535,7 +5626,7 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -5575,7 +5666,7 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns def test_export_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.export_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) @@ -5584,7 +5675,7 @@ def test_export_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_export_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5624,7 +5715,7 @@ def test_export_instance_rest_interceptors(null_interceptor): def test_export_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ExportInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5644,7 +5735,7 @@ def test_export_instance_rest_bad_request(transport: str = 'rest', request_type= def test_export_instance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5681,7 +5772,7 @@ def test_export_instance_rest_flattened(): def test_export_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5697,7 +5788,7 @@ def test_export_instance_rest_flattened_error(transport: str = 'rest'): def test_export_instance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -5708,7 +5799,7 @@ def test_export_instance_rest_error(): ]) def test_failover_instance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5749,14 +5840,14 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5764,7 +5855,7 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -5804,7 +5895,7 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove def test_failover_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.failover_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -5813,7 +5904,7 @@ def test_failover_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_failover_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5853,7 +5944,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): def test_failover_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.FailoverInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5873,7 +5964,7 @@ def test_failover_instance_rest_bad_request(transport: str = 'rest', request_typ def test_failover_instance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5910,7 +6001,7 @@ def test_failover_instance_rest_flattened(): def test_failover_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -5926,7 +6017,7 @@ def test_failover_instance_rest_flattened_error(transport: str = 'rest'): def test_failover_instance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -5937,7 +6028,7 @@ def test_failover_instance_rest_error(): ]) def test_delete_instance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -5978,14 +6069,14 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5993,7 +6084,7 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -6032,7 +6123,7 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns def test_delete_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.delete_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -6041,7 +6132,7 @@ def test_delete_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -6081,7 +6172,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): def test_delete_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.DeleteInstanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6101,7 +6192,7 @@ def test_delete_instance_rest_bad_request(transport: str = 'rest', request_type= def test_delete_instance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6137,7 +6228,7 @@ def test_delete_instance_rest_flattened(): def test_delete_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6152,7 +6243,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): def test_delete_instance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -6163,7 +6254,7 @@ def test_delete_instance_rest_error(): ]) def test_reschedule_maintenance_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6204,14 +6295,14 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re # verify fields with default values are dropped - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).reschedule_maintenance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).reschedule_maintenance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6219,7 +6310,7 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) request = request_type(**request_init) @@ -6259,7 +6350,7 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re def test_reschedule_maintenance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "rescheduleType", ))) @@ -6268,7 +6359,7 @@ def test_reschedule_maintenance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_reschedule_maintenance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -6308,7 +6399,7 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): def test_reschedule_maintenance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.RescheduleMaintenanceRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6328,7 +6419,7 @@ def test_reschedule_maintenance_rest_bad_request(transport: str = 'rest', reques def test_reschedule_maintenance_rest_flattened(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) @@ -6366,7 +6457,7 @@ def test_reschedule_maintenance_rest_flattened(): def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -6383,7 +6474,7 @@ def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): def test_reschedule_maintenance_rest_error(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest' ) @@ -6391,17 +6482,17 @@ def test_reschedule_maintenance_rest_error(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = CloudRedisClient( @@ -6411,7 +6502,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -6422,17 +6513,17 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = CloudRedisClient( client_options=options, - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # It is an error to provide scopes and a transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) with pytest.raises(ValueError): client = CloudRedisClient( @@ -6444,7 +6535,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) client = CloudRedisClient(transport=transport) assert client.transport is transport @@ -6452,13 +6543,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel transport = transports.CloudRedisGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) channel = transport.grpc_channel assert channel @@ -6471,7 +6562,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class() adc.assert_called_once() @@ -6481,14 +6572,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = CloudRedisClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) assert isinstance( client.transport, @@ -6499,7 +6590,7 @@ def test_cloud_redis_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudRedisTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), credentials_file="credentials.json" ) @@ -6509,7 +6600,7 @@ def test_cloud_redis_base_transport(): with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: Transport.return_value = None transport = transports.CloudRedisTransport( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Every method on the transport should just blindly @@ -6558,7 +6649,7 @@ def test_cloud_redis_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.CloudRedisTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -6576,7 +6667,7 @@ def test_cloud_redis_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport = transports.CloudRedisTransport() adc.assert_called_once() @@ -6584,7 +6675,7 @@ def test_cloud_redis_base_transport_with_adc(): def test_cloud_redis_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) CloudRedisClient() adc.assert_called_once_with( scopes=None, @@ -6606,7 +6697,7 @@ def test_cloud_redis_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -6651,7 +6742,7 @@ def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = ga_credentials.AnonymousCredentials() + creds = _AnonymousCredentialsWithUniverseDomain() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -6680,7 +6771,7 @@ def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -6718,7 +6809,7 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( ) def test_cloud_redis_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.CloudRedisRestTransport ( credentials=cred, @@ -6729,7 +6820,7 @@ def test_cloud_redis_http_transport_client_cert_source_for_mtls(): def test_cloud_redis_rest_lro_client(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='rest', ) transport = client.transport @@ -6751,7 +6842,7 @@ def test_cloud_redis_rest_lro_client(): ]) def test_cloud_redis_host_no_port(transport_name): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), transport=transport_name, ) @@ -6768,7 +6859,7 @@ def test_cloud_redis_host_no_port(transport_name): ]) def test_cloud_redis_host_with_port(transport_name): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), transport=transport_name, ) @@ -6782,8 +6873,8 @@ def test_cloud_redis_host_with_port(transport_name): "rest", ]) def test_cloud_redis_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() + creds1 = _AnonymousCredentialsWithUniverseDomain() + creds2 = _AnonymousCredentialsWithUniverseDomain() client1 = CloudRedisClient( credentials=creds1, transport=transport_name, @@ -6865,7 +6956,7 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() + cred = _AnonymousCredentialsWithUniverseDomain() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -6937,7 +7028,7 @@ def test_cloud_redis_transport_channel_mtls_with_adc( def test_cloud_redis_grpc_lro_client(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc', ) transport = client.transport @@ -6954,7 +7045,7 @@ def test_cloud_redis_grpc_lro_client(): def test_cloud_redis_grpc_lro_async_client(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport='grpc_asyncio', ) transport = client.transport @@ -7083,7 +7174,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -7091,7 +7182,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: transport_class = CloudRedisClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -7099,7 +7190,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -7110,7 +7201,7 @@ async def test_transport_close_async(): def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7132,7 +7223,7 @@ def test_get_location_rest_bad_request(transport: str = 'rest', request_type=loc ]) def test_get_location_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2'} @@ -7157,7 +7248,7 @@ def test_get_location_rest(request_type): def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7179,7 +7270,7 @@ def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=l ]) def test_list_locations_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1'} @@ -7204,7 +7295,7 @@ def test_list_locations_rest(request_type): def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7226,7 +7317,7 @@ def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type ]) def test_cancel_operation_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -7251,7 +7342,7 @@ def test_cancel_operation_rest(request_type): def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7273,7 +7364,7 @@ def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type ]) def test_delete_operation_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -7298,7 +7389,7 @@ def test_delete_operation_rest(request_type): def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7320,7 +7411,7 @@ def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=op ]) def test_get_operation_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -7345,7 +7436,7 @@ def test_get_operation_rest(request_type): def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) @@ -7367,7 +7458,7 @@ def test_list_operations_rest_bad_request(transport: str = 'rest', request_type= ]) def test_list_operations_rest(request_type): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2'} @@ -7393,7 +7484,7 @@ def test_list_operations_rest(request_type): def test_delete_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7415,7 +7506,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7439,7 +7530,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): def test_delete_operation_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7463,7 +7554,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7488,7 +7579,7 @@ async def test_delete_operation_field_headers_async(): def test_delete_operation_from_dict(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -7504,7 +7595,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -7522,7 +7613,7 @@ async def test_delete_operation_from_dict_async(): def test_cancel_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7544,7 +7635,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7568,7 +7659,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7592,7 +7683,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7617,7 +7708,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -7633,7 +7724,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -7651,7 +7742,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7673,7 +7764,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7697,7 +7788,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7721,7 +7812,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7746,7 +7837,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -7762,7 +7853,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -7780,7 +7871,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7802,7 +7893,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7826,7 +7917,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7850,7 +7941,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7875,7 +7966,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -7891,7 +7982,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -7909,7 +8000,7 @@ async def test_list_operations_from_dict_async(): def test_list_locations(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7931,7 +8022,7 @@ def test_list_locations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7955,7 +8046,7 @@ async def test_list_locations_async(transport: str = "grpc_asyncio"): def test_list_locations_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7979,7 +8070,7 @@ def test_list_locations_field_headers(): @pytest.mark.asyncio async def test_list_locations_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8004,7 +8095,7 @@ async def test_list_locations_field_headers_async(): def test_list_locations_from_dict(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -8020,7 +8111,7 @@ def test_list_locations_from_dict(): @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -8038,7 +8129,7 @@ async def test_list_locations_from_dict_async(): def test_get_location(transport: str = "grpc"): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8060,7 +8151,7 @@ def test_get_location(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8084,7 +8175,7 @@ async def test_get_location_async(transport: str = "grpc_asyncio"): def test_get_location_field_headers(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials()) + credentials=_AnonymousCredentialsWithUniverseDomain()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -8107,7 +8198,7 @@ def test_get_location_field_headers(): @pytest.mark.asyncio async def test_get_location_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=_AnonymousCredentialsWithUniverseDomain() ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8132,7 +8223,7 @@ async def test_get_location_field_headers_async(): def test_get_location_from_dict(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -8148,7 +8239,7 @@ def test_get_location_from_dict(): @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -8172,7 +8263,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -8187,7 +8278,7 @@ def test_client_ctx(): ] for transport in transports: client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport ) # Test client calls underlying transport. @@ -8215,7 +8306,7 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 15b4f4c300f5..ec2fb6ab5b55 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -19,7 +19,7 @@ import pytest from google.api_core.client_options import ClientOptions # type: ignore -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.showcase import EchoClient from google.showcase import IdentityClient from google.showcase import MessagingClient @@ -86,18 +86,27 @@ def pytest_addoption(parser): ) +class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): + def __init__(self, universe_domain="googleapis.com"): + super(_AnonymousCredentialsWithUniverseDomain, self).__init__() + self._universe_domain = universe_domain + + +# TODO: Need to test without passing in a transport class def construct_client( client_class, use_mtls, transport_name="grpc", - channel_creator=grpc.insecure_channel, + channel_creator=grpc.insecure_channel, # for grpc,grpc_asyncio only + credentials=_AnonymousCredentialsWithUniverseDomain(), + transport_endpoint="localhost:7469" ): if use_mtls: with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: mock_ssl_cred.return_value = ssl_credentials client = client_class( - credentials=credentials.AnonymousCredentials(), + credentials=credentials, client_options=client_options, ) mock_ssl_cred.assert_called_once_with( @@ -107,21 +116,24 @@ def construct_client( else: transport_cls = client_class.get_transport_class(transport_name) if transport_name in ["grpc", "grpc_asyncio"]: + # TODO(gapic-generator-python/issues/1914): Need to test grpc transports without a channel_creator + assert channel_creator transport = transport_cls( - credentials=credentials.AnonymousCredentials(), - channel=channel_creator("localhost:7469"), + credentials=credentials, + channel=channel_creator(transport_endpoint), ) elif transport_name == "rest": # The custom host explicitly bypasses https. transport = transport_cls( - credentials=credentials.AnonymousCredentials(), - host="localhost:7469", + credentials=credentials, + host=transport_endpoint, url_scheme="http", ) else: raise RuntimeError(f"Unexpected transport type: {transport_name}") - return client_class(transport=transport) + client = client_class(transport=transport) + return client @pytest.fixture @@ -129,11 +141,37 @@ def use_mtls(request): return request.config.getoption("--mtls") +@pytest.fixture +def parametrized_echo(use_mtls, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): + print( + f"test_params: {channel_creator, transport_name, transport_endpoint, credential_universe, client_universe}") + credentials = _AnonymousCredentialsWithUniverseDomain( + universe_domain=credential_universe) + client = construct_client(EchoClient, use_mtls, + transport_endpoint=transport_endpoint, + transport_name=transport_name, + channel_creator=channel_creator, + credentials=credentials) + # Since `channel_creator` does not take credentials, we set them + # explicitly in the client for test purposes. + # + # TODO: verify that the transport gets the correct credentials + # from the client. + if credential_universe: + client.transport._credentials = credentials + return client + + @pytest.fixture(params=["grpc", "rest"]) def echo(use_mtls, request): return construct_client(EchoClient, use_mtls, transport_name=request.param) +@pytest.fixture(params=["grpc", "rest"]) +def echo_with_universe_credentials_localhost(use_mtls, request): + return construct_client(EchoClient, use_mtls, transport_name=request.param, credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="localhost:7469")) + + @pytest.fixture(params=["grpc", "rest"]) def identity(use_mtls, request): return construct_client(IdentityClient, use_mtls, transport_name=request.param) @@ -197,6 +235,7 @@ def intercepted_echo(use_mtls): ) intercept_channel = grpc.intercept_channel(channel, interceptor) transport = EchoClient.get_transport_class("grpc")( - channel=intercept_channel + credentials=_AnonymousCredentialsWithUniverseDomain(), + channel=intercept_channel, ) return EchoClient(transport=transport) diff --git a/packages/gapic-generator/tests/system/test_universe_domain.py b/packages/gapic-generator/tests/system/test_universe_domain.py new file mode 100644 index 000000000000..a29ddc481dce --- /dev/null +++ b/packages/gapic-generator/tests/system/test_universe_domain.py @@ -0,0 +1,77 @@ +import pytest + +import grpc + +# Define the parametrized data +vary_transport = [ + (grpc.insecure_channel, "grpc", "localhost:7469", + "googleapis.com", "googleapis.com"), + (grpc.insecure_channel, "rest", "localhost:7469", + "googleapis.com", "googleapis.com"), +] + +vary_channel_transport_endpoints_universes = [ + (grpc.insecure_channel, "grpc", "showcase.googleapis.com", + "showcase.googleapis.com", "googleapis.com"), + (grpc.insecure_channel, "grpc", "showcase.googleapis.com", + "localhost:7469", "googleapis.com"), + (grpc.insecure_channel, "grpc", "localhost:7469", + "showcase.googleapis.com", "googleapis.com"), + (grpc.insecure_channel, "grpc", "localhost:7469", + "localhost:7469", "googleapis.com"), + (grpc.insecure_channel, "rest", "showcase.googleapis.com", + "showcase.googleapis.com", "googleapis.com"), + (grpc.insecure_channel, "rest", "showcase.googleapis.com", + "localhost:7469", "googleapis.com"), + (grpc.insecure_channel, "rest", "localhost:7469", + "showcase.googleapis.com", "googleapis.com"), + (grpc.insecure_channel, "rest", "localhost:7469", + "localhost:7469", "googleapis.com"), +] + + +@pytest.mark.parametrize( + "channel_creator, transport_name, transport_endpoint, credential_universe, client_universe", + vary_transport +) +def test_universe_domain_validation_pass(parametrized_echo, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): + # Test that only the configured client universe and credentials universe are used for validation + assert parametrized_echo.universe_domain == client_universe + assert parametrized_echo.transport._credentials._universe_domain == credential_universe + if transport_name == "rest": + assert parametrized_echo.api_endpoint == "http://" + transport_endpoint + else: + assert parametrized_echo.api_endpoint == transport_endpoint + response = parametrized_echo.echo({ + 'content': 'Universe validation succeeded!' + }) + assert response.content == "Universe validation succeeded!" + + +# TODO: Test without passing a channel to gRPC transports in the test fixture +# TODO: Test without creating a transport in the test fixture +# TODO: Test asynchronous client as well. +# @pytest.mark.parametrize("channel_creator", [grpc.insecure_channel, None]) + + +@pytest.mark.parametrize( + "channel_creator, transport_name, transport_endpoint, credential_universe, client_universe", + vary_channel_transport_endpoints_universes +) +def test_universe_domain_validation_fail(parametrized_echo, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): + """Test that only the client and credentials universes are used for validation, and not the endpoint.""" + assert parametrized_echo.universe_domain == client_universe + assert parametrized_echo.transport._credentials._universe_domain == credential_universe + if transport_name == "rest": + assert parametrized_echo.api_endpoint == "http://" + transport_endpoint + elif channel_creator == grpc.insecure_channel: + # TODO: Investigate where this endpoint override is coming from + assert parametrized_echo.api_endpoint == "localhost:7469" + else: + assert parametrized_echo.api_endpoint == transport_endpoint + with pytest.raises(ValueError) as err: + parametrized_echo.echo({ + 'content': 'Universe validation failed!' + }) + assert str( + err.value) == f"The configured universe domain ({client_universe}) does not match the universe domain found in the credentials ({credential_universe}). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." From 127618922c6f71a81df376b90c9f45698a6b57d8 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 31 Jan 2024 13:35:05 -0500 Subject: [PATCH 1097/1339] fix: mock out configure_mtls_channel in rest transport for testing (#1920) --- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 7 ++++--- .../asset/tests/unit/gapic/asset_v1/test_asset_service.py | 7 ++++--- .../unit/gapic/credentials_v1/test_iam_credentials.py | 7 ++++--- .../eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py | 7 ++++--- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 7 ++++--- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 7 ++++--- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 7 ++++--- .../redis/tests/unit/gapic/redis_v1/test_cloud_redis.py | 7 ++++--- 8 files changed, 32 insertions(+), 24 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index eed35fd0dd4f..9ae5d23fb7a8 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -664,9 +664,10 @@ def test_{{ service.client_name|snake_case }}_client_api_endpoint(client_class): # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) - assert client.api_endpoint == api_override + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index b8825c96b3cd..4ae533f0bd77 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -599,9 +599,10 @@ def test_asset_service_client_client_api_endpoint(client_class): # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) - assert client.api_endpoint == api_override + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index f8b5556ffa4d..954bc6bef7b3 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -589,9 +589,10 @@ def test_iam_credentials_client_client_api_endpoint(client_class): # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) - assert client.api_endpoint == api_override + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 1b06b0209d32..cd024521907b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -609,9 +609,10 @@ def test_eventarc_client_client_api_endpoint(client_class): # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) - assert client.api_endpoint == api_override + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 42795193ecd3..bf3b8f5b3ecf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -575,9 +575,10 @@ def test_config_service_v2_client_client_api_endpoint(client_class): # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) - assert client.api_endpoint == api_override + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e8b7788e9522..e821729dfe70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -576,9 +576,10 @@ def test_logging_service_v2_client_client_api_endpoint(client_class): # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) - assert client.api_endpoint == api_override + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index d1234f80bda3..300b7e27eccb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -574,9 +574,10 @@ def test_metrics_service_v2_client_client_api_endpoint(client_class): # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) - assert client.api_endpoint == api_override + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 37d9cc11c1c0..ce363b93d469 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -600,9 +600,10 @@ def test_cloud_redis_client_client_api_endpoint(client_class): # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", # use ClientOptions.api_endpoint as the api endpoint regardless. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) - assert client.api_endpoint == api_override + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. From 40be3358eb6071ef3f046fbbf11d897392011086 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 31 Jan 2024 13:40:19 -0500 Subject: [PATCH 1098/1339] chore(main): release 1.14.0 (#1919) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 93c878955a2c..7a26092d8b26 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,18 @@ # Changelog +## [1.14.0](https://github.com/googleapis/gapic-generator-python/compare/v1.13.1...v1.14.0) (2024-01-31) + + +### Features + +* Allow users to explicitly configure universe domain. ([#1898](https://github.com/googleapis/gapic-generator-python/issues/1898)) ([e5a55c1](https://github.com/googleapis/gapic-generator-python/commit/e5a55c1c01f0e1b3739927640c713057cd17b4ae)) + + +### Bug Fixes + +* Mock out configure_mtls_channel in rest transport for testing ([#1920](https://github.com/googleapis/gapic-generator-python/issues/1920)) ([9ade50d](https://github.com/googleapis/gapic-generator-python/commit/9ade50d2dfff0bb15fbe1fb2b00644047f6d2d38)) + ## [1.13.1](https://github.com/googleapis/gapic-generator-python/compare/v1.13.0...v1.13.1) (2024-01-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1c63a79e263f..91759b01bc4a 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.13.1" +version = "1.14.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 56cca5e46a1f3cfd07ba7eda32aceba59c1d601e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 2 Feb 2024 11:17:45 -0500 Subject: [PATCH 1099/1339] fix: add google-auth as a direct dependency (#1923) --- .../gapic/ads-templates/setup.py.j2 | 1 + .../%sub/services/%service/client.py.j2 | 21 +- .../gapic/templates/setup.py.j2 | 1 + .../templates/testing/constraints-3.7.txt.j2 | 1 + .../%name_%version/%sub/_test_mixins.py.j2 | 160 +-- .../%name_%version/%sub/test_%service.py.j2 | 150 +-- .../gapic/%name_%version/%sub/test_macros.j2 | 56 +- packages/gapic-generator/noxfile.py | 20 +- .../asset_v1/services/asset_service/client.py | 21 +- .../tests/integration/goldens/asset/setup.py | 1 + .../goldens/asset/testing/constraints-3.7.txt | 1 + .../unit/gapic/asset_v1/test_asset_service.py | 1008 +++++++++-------- .../services/iam_credentials/client.py | 21 +- .../integration/goldens/credentials/setup.py | 1 + .../credentials/testing/constraints-3.7.txt | 1 + .../credentials_v1/test_iam_credentials.py | 292 ++--- .../eventarc_v1/services/eventarc/client.py | 21 +- .../integration/goldens/eventarc/setup.py | 1 + .../eventarc/testing/constraints-3.7.txt | 1 + .../unit/gapic/eventarc_v1/test_eventarc.py | 1006 ++++++++-------- .../services/config_service_v2/client.py | 21 +- .../services/logging_service_v2/client.py | 21 +- .../services/metrics_service_v2/client.py | 21 +- .../integration/goldens/logging/setup.py | 1 + .../logging/testing/constraints-3.7.txt | 1 + .../logging_v2/test_config_service_v2.py | 674 +++++------ .../logging_v2/test_logging_service_v2.py | 268 ++--- .../logging_v2/test_metrics_service_v2.py | 268 ++--- .../redis_v1/services/cloud_redis/client.py | 21 +- .../tests/integration/goldens/redis/setup.py | 1 + .../goldens/redis/testing/constraints-3.7.txt | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 668 +++++------ .../gapic-generator/tests/system/conftest.py | 24 +- .../tests/system/test_universe_domain.py | 45 +- 34 files changed, 2452 insertions(+), 2368 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 68a55721f296..8bc504f57a88 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -30,6 +30,7 @@ else: dependencies = [ "google-api-core[grpc] >= 2.10.0, < 3.0.0dev", + "google-auth >= 2.14.1, <3.0.0dev", "googleapis-common-protos >= 1.53.0", "grpcio >= 1.10.0", "proto-plus >= 1.22.3, <2.0.0dev", diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 6bf6b0fa5aba..741c03afe0f1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -304,6 +304,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -322,6 +323,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_cert_source = mtls.default_client_cert_source() return client_cert_source + @staticmethod def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. @@ -385,15 +387,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Raises: ValueError: when client_universe does not match the universe in credentials. """ - if credentials: - credentials_universe = credentials.universe_domain - if client_universe != credentials_universe: - default_universe = {{ service.client_name }}._DEFAULT_UNIVERSE - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + + default_universe = {{ service.client_name }}._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") return True def _validate_universe_domain(self): diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 1d5ee4f43c52..551c611d63ff 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -31,6 +31,7 @@ else: dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index 7c6c2ad05d1b..abc9e8ae92e0 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -6,6 +6,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 {% for package_tuple, package_info in pypi_packages.items() %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 1c3699f1aed1..eaa40e24dc2d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -3,7 +3,7 @@ def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ sig.request_type }}): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -25,7 +25,7 @@ def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request ]) def test_{{ name|snake_case }}_rest(request_type): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} @@ -74,11 +74,11 @@ def test_delete_operation(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -116,11 +116,11 @@ def test_delete_operation_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} @@ -160,11 +160,11 @@ def test_delete_operation_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -203,11 +203,11 @@ def test_cancel_operation(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -245,11 +245,11 @@ def test_cancel_operation_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} @@ -289,11 +289,11 @@ def test_cancel_operation_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -331,11 +331,11 @@ def test_wait_operation(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -373,11 +373,11 @@ def test_wait_operation_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} @@ -417,11 +417,11 @@ def test_wait_operation_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -460,11 +460,11 @@ def test_get_operation(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -502,11 +502,11 @@ def test_get_operation_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} @@ -546,11 +546,11 @@ def test_get_operation_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -589,11 +589,11 @@ def test_list_operations(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -631,11 +631,11 @@ def test_list_operations_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} @@ -675,11 +675,11 @@ def test_list_operations_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -724,11 +724,11 @@ def test_list_locations(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -766,11 +766,11 @@ def test_list_locations_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} @@ -810,11 +810,11 @@ def test_list_locations_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -855,11 +855,11 @@ def test_get_location(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -897,10 +897,10 @@ def test_get_location_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain()) + credentials=ga_credentials.AnonymousCredentials()) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) {% endif %} @@ -940,11 +940,11 @@ def test_get_location_from_dict(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -986,11 +986,11 @@ def test_set_iam_policy(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -1034,11 +1034,11 @@ def test_set_iam_policy_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} @@ -1071,7 +1071,7 @@ def test_set_iam_policy_field_headers(): def test_set_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1090,7 +1090,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1110,7 +1110,7 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1141,7 +1141,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1175,7 +1175,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1202,7 +1202,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1230,7 +1230,7 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1248,7 +1248,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1268,7 +1268,7 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1301,7 +1301,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1333,7 +1333,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1362,7 +1362,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1392,7 +1392,7 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1412,7 +1412,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1445,11 +1445,11 @@ def test_set_iam_policy(transport: str = "grpc"): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) {% endif %} @@ -1493,11 +1493,11 @@ def test_set_iam_policy_field_headers(): {% endif %} {% if mode == "" %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% else %} client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% endif %} @@ -1530,7 +1530,7 @@ def test_set_iam_policy_field_headers(): def test_set_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1549,7 +1549,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1570,7 +1570,7 @@ async def test_set_iam_policy_from_dict_async(): {% if "GetIamPolicy" in api.mixin_api_methods %} def test_get_iam_policy(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1601,7 +1601,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1635,7 +1635,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1662,7 +1662,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1690,7 +1690,7 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1708,7 +1708,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1729,7 +1729,7 @@ async def test_get_iam_policy_from_dict_async(): {% if "TestIamPermissions" in api.mixin_api_methods %} def test_test_iam_permissions(transport: str = "grpc"): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1762,7 +1762,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1794,7 +1794,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1823,7 +1823,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1853,7 +1853,7 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1873,7 +1873,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 9ae5d23fb7a8..ef7fa51bef0e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -92,16 +92,6 @@ def modify_default_endpoint(client): def modify_default_endpoint_template(client): return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE -# Anonymous Credentials with universe domain property. If no universe domain is provided, then -# the default universe domain is "googleapis.com". -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - @property - def universe_domain(self): - return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -208,7 +198,7 @@ def test__get_universe_domain(): def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( transport=transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) ) assert client._validate_universe_domain() == True @@ -231,25 +221,37 @@ def test__validate_universe_domain(client_class, transport_class, transport_name transport._credentials = None client = client_class(transport=transport) assert client._validate_universe_domain() == True - - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + @pytest.mark.parametrize("client_class,transport_name", [ {% if 'grpc' in opts.transport %} @@ -261,7 +263,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name {% endif %} ]) def test_{{ service.client_name|snake_case }}_from_service_account_info(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -312,7 +314,7 @@ def test_{{ service.client_name|snake_case }}_service_account_always_use_jwt(tra {% endif %} ]) def test_{{ service.client_name|snake_case }}_from_service_account_file(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -365,7 +367,7 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans # Check that if channel is provided we won't create a new one. with mock.patch.object({{ service.client_name }}, 'get_transport_class') as gtc: transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -666,19 +668,19 @@ def test_{{ service.client_name|snake_case }}_client_api_endpoint(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), @@ -689,9 +691,9 @@ def test_{{ service.client_name|snake_case }}_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) else: - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) assert client.universe_domain == (mock_universe if universe_exists else default_universe) @@ -701,7 +703,7 @@ def test_{{ service.client_name|snake_case }}_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint @@ -818,8 +820,8 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() - file_creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -866,7 +868,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) # Since a `google.api.http` annotation is required for using a rest transport @@ -885,17 +887,17 @@ def test_{{ method_name }}_rest_error(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( @@ -905,7 +907,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -921,12 +923,12 @@ def test_credentials_transport_error(): with pytest.raises(ValueError): client = {{ service.client_name }}( client_options=options, - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = {{ service.client_name }}( @@ -938,7 +940,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}{{ opts.transport[0].capitalize() }}Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) client = {{ service.client_name }}(transport=transport) assert client.transport is transport @@ -948,13 +950,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.{{ service.name }}GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.{{ service.grpc_asyncio_transport_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -972,7 +974,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -986,7 +988,7 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = {{ service.client_name }}.get_transport_class(transport_name)( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name @@ -994,7 +996,7 @@ def test_transport_kind(transport_name): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -1006,7 +1008,7 @@ def test_{{ service.name|snake_case }}_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.{{ service.name }}Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -1016,7 +1018,7 @@ def test_{{ service.name|snake_case }}_base_transport(): with mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport.__init__') as Transport: Transport.return_value = None transport = transports.{{ service.name }}Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1095,7 +1097,7 @@ def test_{{ service.name|snake_case }}_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -1114,7 +1116,7 @@ def test_{{ service.name|snake_case }}_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('{{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }}.transports.{{ service.name }}Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.{{ service.name }}Transport() adc.assert_called_once() @@ -1122,7 +1124,7 @@ def test_{{ service.name|snake_case }}_base_transport_with_adc(): def test_{{ service.name|snake_case }}_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) {{ service.client_name }}() adc.assert_called_once_with( scopes=None, @@ -1146,7 +1148,7 @@ def test_{{ service.name|snake_case }}_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -1211,7 +1213,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -1245,7 +1247,7 @@ def test_{{ service.name|snake_case }}_transport_create_channel(transport_class, def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -1285,7 +1287,7 @@ def test_{{ service.name|snake_case }}_grpc_transport_client_cert_source_for_mtl {% if 'rest' in opts.transport %} def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtls(): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.{{ service.rest_transport_name }} ( credentials=cred, @@ -1297,7 +1299,7 @@ def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtl {% if service.has_lro -%} def test_{{ service.name|snake_case }}_rest_lro_client(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) transport = client.transport @@ -1327,7 +1329,7 @@ def test_{{ service.name|snake_case }}_rest_lro_client(): def test_{{ service.name|snake_case }}_host_no_port(transport_name): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}'), transport=transport_name, ) @@ -1352,7 +1354,7 @@ def test_{{ service.name|snake_case }}_host_no_port(transport_name): def test_{{ service.name|snake_case }}_host_with_port(transport_name): {% with host = (service.host|default('localhost', true)).split(':')[0] %} client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='{{ host }}:8000'), transport=transport_name, ) @@ -1370,8 +1372,8 @@ def test_{{ service.name|snake_case }}_host_with_port(transport_name): "rest", ]) def test_{{ service.name|snake_case }}_client_transport_session_collision(transport_name): - creds1 = _AnonymousCredentialsWithUniverseDomain() - creds2 = _AnonymousCredentialsWithUniverseDomain() + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() client1 = {{ service.client_name }}( credentials=creds1, transport=transport_name, @@ -1428,7 +1430,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_s mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -1501,7 +1503,7 @@ def test_{{ service.name|snake_case }}_transport_channel_mtls_with_adc( {% if service.has_lro %} def test_{{ service.name|snake_case }}_grpc_lro_client(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) transport = client.transport @@ -1518,7 +1520,7 @@ def test_{{ service.name|snake_case }}_grpc_lro_client(): def test_{{ service.name|snake_case }}_grpc_lro_async_client(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc_asyncio', ) transport = client.transport @@ -1590,7 +1592,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1598,7 +1600,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.{{ service.name }}Transport, '_prep_wrapped_messages') as prep: transport_class = {{ service.client_name }}.get_transport_class() transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -1607,7 +1609,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -1630,7 +1632,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -1649,7 +1651,7 @@ def test_client_ctx(): ] for transport in transports: client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 164987ea93ca..96ee8639b40e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -6,7 +6,7 @@ ]) def test_{{ method_name }}(request_type, transport: str = 'grpc'): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -105,7 +105,7 @@ def test_{{ method_name }}_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -127,7 +127,7 @@ def test_{{ method_name }}_empty_call(): @pytest.mark.asyncio async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -228,7 +228,7 @@ async def test_{{ method_name }}_async_from_dict(): {% if method.explicit_routing %} def test_{{ method.name|snake_case }}_routing_parameters(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) {% for routing_param in method.routing_rule.routing_parameters %} @@ -266,7 +266,7 @@ def test_{{ method.name|snake_case }}_routing_parameters(): {% if method.field_headers and not method.client_streaming and not method.explicit_routing %} def test_{{ method_name }}_field_headers(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -312,7 +312,7 @@ def test_{{ method_name }}_field_headers(): @pytest.mark.asyncio async def test_{{ method_name }}_field_headers_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -359,7 +359,7 @@ async def test_{{ method_name }}_field_headers_async(): {% if method.ident.package != method.input.ident.package %} def test_{{ method_name }}_from_dict_foreign(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -388,7 +388,7 @@ def test_{{ method_name }}_from_dict_foreign(): {% if method.flattened_fields and not method.client_streaming %} def test_{{ method_name }}_flattened(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -448,7 +448,7 @@ def test_{{ method_name }}_flattened(): def test_{{ method_name }}_flattened_error(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -465,7 +465,7 @@ def test_{{ method_name }}_flattened_error(): @pytest.mark.asyncio async def test_{{ method_name }}_flattened_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -544,7 +544,7 @@ async def test_{{ method_name }}_flattened_async(): @pytest.mark.asyncio async def test_{{ method_name }}_flattened_error_async(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -564,7 +564,7 @@ async def test_{{ method_name }}_flattened_error_async(): {% if not method.paged_result_field.map %} def test_{{ method_name }}_pager(transport_name: str = "grpc"): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -624,7 +624,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): {% endif %} def test_{{ method_name }}_pages(transport_name: str = "grpc"): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -698,7 +698,7 @@ def test_{{ method_name }}_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_{{ method_name }}_async_pager(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -788,7 +788,7 @@ async def test_{{ method_name }}_async_pager(): @pytest.mark.asyncio async def test_{{ method_name }}_async_pages(): client = {{ service.async_client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -877,7 +877,7 @@ def test_{{ method_name }}_raw_page_lro(): ]) def test_{{ method_name }}_rest(request_type): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1100,7 +1100,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide assert "{{ field_name }}" not in jsonified_request {% endfor %} - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1116,7 +1116,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide jsonified_request["{{ field_name }}"] = {{ mock_value }} {% endfor %} - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).{{ method.transport_safe_name | snake_case }}._get_unset_required_fields(jsonified_request) {% if method.query_params %} # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(({% for param in method.query_params|sort %}"{{param}}", {% endfor %})) @@ -1133,7 +1133,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -1222,7 +1222,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide def test_{{ method_name }}_rest_unset_required_fields(): - transport = transports.{{ service.rest_transport_name }}(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.{{ service.rest_transport_name }}(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.{{ method.transport_safe_name|snake_case }}._get_unset_required_fields({}) assert set(unset_fields) == (set(({% for param in method.query_params|sort %}"{{ param|camel_case }}", {% endfor %})) & set(({% for param in method.input.required_fields %}"{{ param.name|camel_case }}", {% endfor %}))) @@ -1234,7 +1234,7 @@ def test_{{ method_name }}_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_{{ method_name }}_rest_interceptors(null_interceptor): transport = transports.{{ service.name }}RestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.{{ service.name}}RestInterceptor(), ) client = {{ service.client_name }}(transport=transport) @@ -1297,7 +1297,7 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1325,7 +1325,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ {% if method.flattened_fields and not method.client_streaming %} def test_{{ method_name }}_rest_flattened(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1394,7 +1394,7 @@ def test_{{ method_name }}_rest_flattened(): def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1413,7 +1413,7 @@ def test_{{ method_name }}_rest_flattened_error(transport: str = 'rest'): {% if method.paged_result_field %} def test_{{ method_name }}_rest_pager(transport: str = 'rest'): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1533,7 +1533,7 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) {%- if not method.http_options %} @@ -1552,7 +1552,7 @@ def test_{{ method_name }}_rest_error(): {% else %}{# this is an lro or streaming method #} def test_{{ method_name }}_rest_unimplemented(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = {{ method.input.ident }}() @@ -1565,7 +1565,7 @@ def test_{{ method_name }}_rest_unimplemented(): {% endif %}{# not lro and not streaming #}{% else %}{# not method.http_options #} def test_{{ method_name }}_rest_no_http_options(): client = {{ service.client_name }}( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request = {{ method.input.ident }}() diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index a344eeb10ef6..2567626e256f 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -255,9 +255,23 @@ def showcase_library( *cmd_tup, external=True, ) - # Install the library. - session.install("-e", tmp_dir) - + # Install the generated showcase library. + if templates == "DEFAULT": + # Use the constraints file for the specific python runtime version. + # We do this to make sure that we're testing against the lowest + # supported version of a dependency. + # This is needed to recreate the issue reported in + # https://github.com/googleapis/google-cloud-python/issues/12254 + constraints_path = str( + f"{tmp_dir}/testing/constraints-{session.python}.txt" + ) + # Install the library with a constraints file. + session.install("-e", tmp_dir, "-r", constraints_path) + else: + # The ads templates do not have constraints files. + # See https://github.com/googleapis/gapic-generator-python/issues/1788 + # Install the library without a constraints file. + session.install("-e", tmp_dir) yield tmp_dir diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 6779ee8783ff..4bf1188d11b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -392,6 +392,7 @@ def _read_environment_variables(): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -410,6 +411,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source + @staticmethod def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. @@ -473,15 +475,16 @@ def _compare_universes(client_universe: str, Raises: ValueError: when client_universe does not match the universe in credentials. """ - if credentials: - credentials_universe = credentials.universe_domain - if client_universe != credentials_universe: - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + + default_universe = AssetServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") return True def _validate_universe_domain(self): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 7c6f39008ab2..221455ac7949 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -40,6 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index 7a1dfde768ed..04f6609fa953 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -5,6 +5,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 google-cloud-access-context-manager==0.1.2 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 4ae533f0bd77..af160eb6d5a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -79,16 +79,6 @@ def modify_default_endpoint(client): def modify_default_endpoint_template(client): return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE -# Anonymous Credentials with universe domain property. If no universe domain is provided, then -# the default universe domain is "googleapis.com". -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - @property - def universe_domain(self): - return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -189,7 +179,7 @@ def test__get_universe_domain(): def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( transport=transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) ) assert client._validate_universe_domain() == True @@ -213,24 +203,36 @@ def test__validate_universe_domain(client_class, transport_class, transport_name client = client_class(transport=transport) assert client._validate_universe_domain() == True - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + @pytest.mark.parametrize("client_class,transport_name", [ (AssetServiceClient, "grpc"), @@ -238,7 +240,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name (AssetServiceClient, "rest"), ]) def test_asset_service_client_from_service_account_info(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -277,7 +279,7 @@ def test_asset_service_client_service_account_always_use_jwt(transport_class, tr (AssetServiceClient, "rest"), ]) def test_asset_service_client_from_service_account_file(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -319,7 +321,7 @@ def test_asset_service_client_client_options(client_class, transport_class, tran # Check that if channel is provided we won't create a new one. with mock.patch.object(AssetServiceClient, 'get_transport_class') as gtc: transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -601,19 +603,19 @@ def test_asset_service_client_client_api_endpoint(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), @@ -624,9 +626,9 @@ def test_asset_service_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) else: - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) assert client.universe_domain == (mock_universe if universe_exists else default_universe) @@ -636,7 +638,7 @@ def test_asset_service_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint @@ -743,8 +745,8 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() - file_creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -772,7 +774,7 @@ def test_asset_service_client_create_channel_credentials_file(client_class, tran ]) def test_export_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -801,7 +803,7 @@ def test_export_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -817,7 +819,7 @@ def test_export_assets_empty_call(): @pytest.mark.asyncio async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -851,7 +853,7 @@ async def test_export_assets_async_from_dict(): def test_export_assets_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -883,7 +885,7 @@ def test_export_assets_field_headers(): @pytest.mark.asyncio async def test_export_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -918,7 +920,7 @@ async def test_export_assets_field_headers_async(): ]) def test_list_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -950,7 +952,7 @@ def test_list_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -966,7 +968,7 @@ def test_list_assets_empty_call(): @pytest.mark.asyncio async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1001,7 +1003,7 @@ async def test_list_assets_async_from_dict(): def test_list_assets_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1033,7 +1035,7 @@ def test_list_assets_field_headers(): @pytest.mark.asyncio async def test_list_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1064,7 +1066,7 @@ async def test_list_assets_field_headers_async(): def test_list_assets_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1090,7 +1092,7 @@ def test_list_assets_flattened(): def test_list_assets_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1104,7 +1106,7 @@ def test_list_assets_flattened_error(): @pytest.mark.asyncio async def test_list_assets_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1132,7 +1134,7 @@ async def test_list_assets_flattened_async(): @pytest.mark.asyncio async def test_list_assets_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1146,7 +1148,7 @@ async def test_list_assets_flattened_error_async(): def test_list_assets_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1199,7 +1201,7 @@ def test_list_assets_pager(transport_name: str = "grpc"): for i in results) def test_list_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1242,7 +1244,7 @@ def test_list_assets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_assets_async_pager(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1291,7 +1293,7 @@ async def test_list_assets_async_pager(): @pytest.mark.asyncio async def test_list_assets_async_pages(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1342,7 +1344,7 @@ async def test_list_assets_async_pages(): ]) def test_batch_get_assets_history(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1372,7 +1374,7 @@ def test_batch_get_assets_history_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1388,7 +1390,7 @@ def test_batch_get_assets_history_empty_call(): @pytest.mark.asyncio async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1421,7 +1423,7 @@ async def test_batch_get_assets_history_async_from_dict(): def test_batch_get_assets_history_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1453,7 +1455,7 @@ def test_batch_get_assets_history_field_headers(): @pytest.mark.asyncio async def test_batch_get_assets_history_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1488,7 +1490,7 @@ async def test_batch_get_assets_history_field_headers_async(): ]) def test_create_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1528,7 +1530,7 @@ def test_create_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1544,7 +1546,7 @@ def test_create_feed_empty_call(): @pytest.mark.asyncio async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1587,7 +1589,7 @@ async def test_create_feed_async_from_dict(): def test_create_feed_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1619,7 +1621,7 @@ def test_create_feed_field_headers(): @pytest.mark.asyncio async def test_create_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1650,7 +1652,7 @@ async def test_create_feed_field_headers_async(): def test_create_feed_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1676,7 +1678,7 @@ def test_create_feed_flattened(): def test_create_feed_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1690,7 +1692,7 @@ def test_create_feed_flattened_error(): @pytest.mark.asyncio async def test_create_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1718,7 +1720,7 @@ async def test_create_feed_flattened_async(): @pytest.mark.asyncio async def test_create_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1736,7 +1738,7 @@ async def test_create_feed_flattened_error_async(): ]) def test_get_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1776,7 +1778,7 @@ def test_get_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1792,7 +1794,7 @@ def test_get_feed_empty_call(): @pytest.mark.asyncio async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1835,7 +1837,7 @@ async def test_get_feed_async_from_dict(): def test_get_feed_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1867,7 +1869,7 @@ def test_get_feed_field_headers(): @pytest.mark.asyncio async def test_get_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1898,7 +1900,7 @@ async def test_get_feed_field_headers_async(): def test_get_feed_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1924,7 +1926,7 @@ def test_get_feed_flattened(): def test_get_feed_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1938,7 +1940,7 @@ def test_get_feed_flattened_error(): @pytest.mark.asyncio async def test_get_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1966,7 +1968,7 @@ async def test_get_feed_flattened_async(): @pytest.mark.asyncio async def test_get_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1984,7 +1986,7 @@ async def test_get_feed_flattened_error_async(): ]) def test_list_feeds(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2014,7 +2016,7 @@ def test_list_feeds_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2030,7 +2032,7 @@ def test_list_feeds_empty_call(): @pytest.mark.asyncio async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2063,7 +2065,7 @@ async def test_list_feeds_async_from_dict(): def test_list_feeds_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2095,7 +2097,7 @@ def test_list_feeds_field_headers(): @pytest.mark.asyncio async def test_list_feeds_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2126,7 +2128,7 @@ async def test_list_feeds_field_headers_async(): def test_list_feeds_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2152,7 +2154,7 @@ def test_list_feeds_flattened(): def test_list_feeds_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2166,7 +2168,7 @@ def test_list_feeds_flattened_error(): @pytest.mark.asyncio async def test_list_feeds_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2194,7 +2196,7 @@ async def test_list_feeds_flattened_async(): @pytest.mark.asyncio async def test_list_feeds_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2212,7 +2214,7 @@ async def test_list_feeds_flattened_error_async(): ]) def test_update_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2252,7 +2254,7 @@ def test_update_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2268,7 +2270,7 @@ def test_update_feed_empty_call(): @pytest.mark.asyncio async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2311,7 +2313,7 @@ async def test_update_feed_async_from_dict(): def test_update_feed_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2343,7 +2345,7 @@ def test_update_feed_field_headers(): @pytest.mark.asyncio async def test_update_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2374,7 +2376,7 @@ async def test_update_feed_field_headers_async(): def test_update_feed_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2400,7 +2402,7 @@ def test_update_feed_flattened(): def test_update_feed_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2414,7 +2416,7 @@ def test_update_feed_flattened_error(): @pytest.mark.asyncio async def test_update_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2442,7 +2444,7 @@ async def test_update_feed_flattened_async(): @pytest.mark.asyncio async def test_update_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2460,7 +2462,7 @@ async def test_update_feed_flattened_error_async(): ]) def test_delete_feed(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2489,7 +2491,7 @@ def test_delete_feed_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2505,7 +2507,7 @@ def test_delete_feed_empty_call(): @pytest.mark.asyncio async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2537,7 +2539,7 @@ async def test_delete_feed_async_from_dict(): def test_delete_feed_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2569,7 +2571,7 @@ def test_delete_feed_field_headers(): @pytest.mark.asyncio async def test_delete_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2600,7 +2602,7 @@ async def test_delete_feed_field_headers_async(): def test_delete_feed_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2626,7 +2628,7 @@ def test_delete_feed_flattened(): def test_delete_feed_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2640,7 +2642,7 @@ def test_delete_feed_flattened_error(): @pytest.mark.asyncio async def test_delete_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2668,7 +2670,7 @@ async def test_delete_feed_flattened_async(): @pytest.mark.asyncio async def test_delete_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2686,7 +2688,7 @@ async def test_delete_feed_flattened_error_async(): ]) def test_search_all_resources(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2718,7 +2720,7 @@ def test_search_all_resources_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2734,7 +2736,7 @@ def test_search_all_resources_empty_call(): @pytest.mark.asyncio async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2769,7 +2771,7 @@ async def test_search_all_resources_async_from_dict(): def test_search_all_resources_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2801,7 +2803,7 @@ def test_search_all_resources_field_headers(): @pytest.mark.asyncio async def test_search_all_resources_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2832,7 +2834,7 @@ async def test_search_all_resources_field_headers_async(): def test_search_all_resources_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2866,7 +2868,7 @@ def test_search_all_resources_flattened(): def test_search_all_resources_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2882,7 +2884,7 @@ def test_search_all_resources_flattened_error(): @pytest.mark.asyncio async def test_search_all_resources_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2918,7 +2920,7 @@ async def test_search_all_resources_flattened_async(): @pytest.mark.asyncio async def test_search_all_resources_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2934,7 +2936,7 @@ async def test_search_all_resources_flattened_error_async(): def test_search_all_resources_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2987,7 +2989,7 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): for i in results) def test_search_all_resources_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3030,7 +3032,7 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_search_all_resources_async_pager(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3079,7 +3081,7 @@ async def test_search_all_resources_async_pager(): @pytest.mark.asyncio async def test_search_all_resources_async_pages(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3130,7 +3132,7 @@ async def test_search_all_resources_async_pages(): ]) def test_search_all_iam_policies(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3162,7 +3164,7 @@ def test_search_all_iam_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3178,7 +3180,7 @@ def test_search_all_iam_policies_empty_call(): @pytest.mark.asyncio async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3213,7 +3215,7 @@ async def test_search_all_iam_policies_async_from_dict(): def test_search_all_iam_policies_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3245,7 +3247,7 @@ def test_search_all_iam_policies_field_headers(): @pytest.mark.asyncio async def test_search_all_iam_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3276,7 +3278,7 @@ async def test_search_all_iam_policies_field_headers_async(): def test_search_all_iam_policies_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3306,7 +3308,7 @@ def test_search_all_iam_policies_flattened(): def test_search_all_iam_policies_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3321,7 +3323,7 @@ def test_search_all_iam_policies_flattened_error(): @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3353,7 +3355,7 @@ async def test_search_all_iam_policies_flattened_async(): @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3368,7 +3370,7 @@ async def test_search_all_iam_policies_flattened_error_async(): def test_search_all_iam_policies_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3421,7 +3423,7 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): for i in results) def test_search_all_iam_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3464,7 +3466,7 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_search_all_iam_policies_async_pager(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3513,7 +3515,7 @@ async def test_search_all_iam_policies_async_pager(): @pytest.mark.asyncio async def test_search_all_iam_policies_async_pages(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3564,7 +3566,7 @@ async def test_search_all_iam_policies_async_pages(): ]) def test_analyze_iam_policy(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3596,7 +3598,7 @@ def test_analyze_iam_policy_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3612,7 +3614,7 @@ def test_analyze_iam_policy_empty_call(): @pytest.mark.asyncio async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3647,7 +3649,7 @@ async def test_analyze_iam_policy_async_from_dict(): def test_analyze_iam_policy_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3679,7 +3681,7 @@ def test_analyze_iam_policy_field_headers(): @pytest.mark.asyncio async def test_analyze_iam_policy_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3714,7 +3716,7 @@ async def test_analyze_iam_policy_field_headers_async(): ]) def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3743,7 +3745,7 @@ def test_analyze_iam_policy_longrunning_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3759,7 +3761,7 @@ def test_analyze_iam_policy_longrunning_empty_call(): @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3793,7 +3795,7 @@ async def test_analyze_iam_policy_longrunning_async_from_dict(): def test_analyze_iam_policy_longrunning_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3825,7 +3827,7 @@ def test_analyze_iam_policy_longrunning_field_headers(): @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3860,7 +3862,7 @@ async def test_analyze_iam_policy_longrunning_field_headers_async(): ]) def test_analyze_move(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3890,7 +3892,7 @@ def test_analyze_move_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3906,7 +3908,7 @@ def test_analyze_move_empty_call(): @pytest.mark.asyncio async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3939,7 +3941,7 @@ async def test_analyze_move_async_from_dict(): def test_analyze_move_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3971,7 +3973,7 @@ def test_analyze_move_field_headers(): @pytest.mark.asyncio async def test_analyze_move_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4006,7 +4008,7 @@ async def test_analyze_move_field_headers_async(): ]) def test_query_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4040,7 +4042,7 @@ def test_query_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4056,7 +4058,7 @@ def test_query_assets_empty_call(): @pytest.mark.asyncio async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4093,7 +4095,7 @@ async def test_query_assets_async_from_dict(): def test_query_assets_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4125,7 +4127,7 @@ def test_query_assets_field_headers(): @pytest.mark.asyncio async def test_query_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4160,7 +4162,7 @@ async def test_query_assets_field_headers_async(): ]) def test_create_saved_query(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4198,7 +4200,7 @@ def test_create_saved_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4214,7 +4216,7 @@ def test_create_saved_query_empty_call(): @pytest.mark.asyncio async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4255,7 +4257,7 @@ async def test_create_saved_query_async_from_dict(): def test_create_saved_query_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4287,7 +4289,7 @@ def test_create_saved_query_field_headers(): @pytest.mark.asyncio async def test_create_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4318,7 +4320,7 @@ async def test_create_saved_query_field_headers_async(): def test_create_saved_query_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4352,7 +4354,7 @@ def test_create_saved_query_flattened(): def test_create_saved_query_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4368,7 +4370,7 @@ def test_create_saved_query_flattened_error(): @pytest.mark.asyncio async def test_create_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4404,7 +4406,7 @@ async def test_create_saved_query_flattened_async(): @pytest.mark.asyncio async def test_create_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4424,7 +4426,7 @@ async def test_create_saved_query_flattened_error_async(): ]) def test_get_saved_query(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4462,7 +4464,7 @@ def test_get_saved_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4478,7 +4480,7 @@ def test_get_saved_query_empty_call(): @pytest.mark.asyncio async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4519,7 +4521,7 @@ async def test_get_saved_query_async_from_dict(): def test_get_saved_query_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4551,7 +4553,7 @@ def test_get_saved_query_field_headers(): @pytest.mark.asyncio async def test_get_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4582,7 +4584,7 @@ async def test_get_saved_query_field_headers_async(): def test_get_saved_query_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4608,7 +4610,7 @@ def test_get_saved_query_flattened(): def test_get_saved_query_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4622,7 +4624,7 @@ def test_get_saved_query_flattened_error(): @pytest.mark.asyncio async def test_get_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4650,7 +4652,7 @@ async def test_get_saved_query_flattened_async(): @pytest.mark.asyncio async def test_get_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4668,7 +4670,7 @@ async def test_get_saved_query_flattened_error_async(): ]) def test_list_saved_queries(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4700,7 +4702,7 @@ def test_list_saved_queries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4716,7 +4718,7 @@ def test_list_saved_queries_empty_call(): @pytest.mark.asyncio async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4751,7 +4753,7 @@ async def test_list_saved_queries_async_from_dict(): def test_list_saved_queries_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4783,7 +4785,7 @@ def test_list_saved_queries_field_headers(): @pytest.mark.asyncio async def test_list_saved_queries_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4814,7 +4816,7 @@ async def test_list_saved_queries_field_headers_async(): def test_list_saved_queries_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4840,7 +4842,7 @@ def test_list_saved_queries_flattened(): def test_list_saved_queries_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4854,7 +4856,7 @@ def test_list_saved_queries_flattened_error(): @pytest.mark.asyncio async def test_list_saved_queries_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4882,7 +4884,7 @@ async def test_list_saved_queries_flattened_async(): @pytest.mark.asyncio async def test_list_saved_queries_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4896,7 +4898,7 @@ async def test_list_saved_queries_flattened_error_async(): def test_list_saved_queries_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -4949,7 +4951,7 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): for i in results) def test_list_saved_queries_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -4992,7 +4994,7 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_saved_queries_async_pager(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5041,7 +5043,7 @@ async def test_list_saved_queries_async_pager(): @pytest.mark.asyncio async def test_list_saved_queries_async_pages(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5092,7 +5094,7 @@ async def test_list_saved_queries_async_pages(): ]) def test_update_saved_query(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5130,7 +5132,7 @@ def test_update_saved_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5146,7 +5148,7 @@ def test_update_saved_query_empty_call(): @pytest.mark.asyncio async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5187,7 +5189,7 @@ async def test_update_saved_query_async_from_dict(): def test_update_saved_query_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5219,7 +5221,7 @@ def test_update_saved_query_field_headers(): @pytest.mark.asyncio async def test_update_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5250,7 +5252,7 @@ async def test_update_saved_query_field_headers_async(): def test_update_saved_query_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5280,7 +5282,7 @@ def test_update_saved_query_flattened(): def test_update_saved_query_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5295,7 +5297,7 @@ def test_update_saved_query_flattened_error(): @pytest.mark.asyncio async def test_update_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5327,7 +5329,7 @@ async def test_update_saved_query_flattened_async(): @pytest.mark.asyncio async def test_update_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5346,7 +5348,7 @@ async def test_update_saved_query_flattened_error_async(): ]) def test_delete_saved_query(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5375,7 +5377,7 @@ def test_delete_saved_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5391,7 +5393,7 @@ def test_delete_saved_query_empty_call(): @pytest.mark.asyncio async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5423,7 +5425,7 @@ async def test_delete_saved_query_async_from_dict(): def test_delete_saved_query_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5455,7 +5457,7 @@ def test_delete_saved_query_field_headers(): @pytest.mark.asyncio async def test_delete_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5486,7 +5488,7 @@ async def test_delete_saved_query_field_headers_async(): def test_delete_saved_query_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5512,7 +5514,7 @@ def test_delete_saved_query_flattened(): def test_delete_saved_query_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5526,7 +5528,7 @@ def test_delete_saved_query_flattened_error(): @pytest.mark.asyncio async def test_delete_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5554,7 +5556,7 @@ async def test_delete_saved_query_flattened_async(): @pytest.mark.asyncio async def test_delete_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5572,7 +5574,7 @@ async def test_delete_saved_query_flattened_error_async(): ]) def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5602,7 +5604,7 @@ def test_batch_get_effective_iam_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5618,7 +5620,7 @@ def test_batch_get_effective_iam_policies_empty_call(): @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5651,7 +5653,7 @@ async def test_batch_get_effective_iam_policies_async_from_dict(): def test_batch_get_effective_iam_policies_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5683,7 +5685,7 @@ def test_batch_get_effective_iam_policies_field_headers(): @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5718,7 +5720,7 @@ async def test_batch_get_effective_iam_policies_field_headers_async(): ]) def test_analyze_org_policies(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5750,7 +5752,7 @@ def test_analyze_org_policies_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5766,7 +5768,7 @@ def test_analyze_org_policies_empty_call(): @pytest.mark.asyncio async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5801,7 +5803,7 @@ async def test_analyze_org_policies_async_from_dict(): def test_analyze_org_policies_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5833,7 +5835,7 @@ def test_analyze_org_policies_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5864,7 +5866,7 @@ async def test_analyze_org_policies_field_headers_async(): def test_analyze_org_policies_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5898,7 +5900,7 @@ def test_analyze_org_policies_flattened(): def test_analyze_org_policies_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5914,7 +5916,7 @@ def test_analyze_org_policies_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policies_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5950,7 +5952,7 @@ async def test_analyze_org_policies_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policies_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5966,7 +5968,7 @@ async def test_analyze_org_policies_flattened_error_async(): def test_analyze_org_policies_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6019,7 +6021,7 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): for i in results) def test_analyze_org_policies_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6062,7 +6064,7 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_analyze_org_policies_async_pager(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6111,7 +6113,7 @@ async def test_analyze_org_policies_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policies_async_pages(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6162,7 +6164,7 @@ async def test_analyze_org_policies_async_pages(): ]) def test_analyze_org_policy_governed_containers(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6194,7 +6196,7 @@ def test_analyze_org_policy_governed_containers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -6210,7 +6212,7 @@ def test_analyze_org_policy_governed_containers_empty_call(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6245,7 +6247,7 @@ async def test_analyze_org_policy_governed_containers_async_from_dict(): def test_analyze_org_policy_governed_containers_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6277,7 +6279,7 @@ def test_analyze_org_policy_governed_containers_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6308,7 +6310,7 @@ async def test_analyze_org_policy_governed_containers_field_headers_async(): def test_analyze_org_policy_governed_containers_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6342,7 +6344,7 @@ def test_analyze_org_policy_governed_containers_flattened(): def test_analyze_org_policy_governed_containers_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6358,7 +6360,7 @@ def test_analyze_org_policy_governed_containers_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6394,7 +6396,7 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6410,7 +6412,7 @@ async def test_analyze_org_policy_governed_containers_flattened_error_async(): def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6463,7 +6465,7 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp for i in results) def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6506,7 +6508,7 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_pager(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6555,7 +6557,7 @@ async def test_analyze_org_policy_governed_containers_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_pages(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6606,7 +6608,7 @@ async def test_analyze_org_policy_governed_containers_async_pages(): ]) def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6638,7 +6640,7 @@ def test_analyze_org_policy_governed_assets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -6654,7 +6656,7 @@ def test_analyze_org_policy_governed_assets_empty_call(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6689,7 +6691,7 @@ async def test_analyze_org_policy_governed_assets_async_from_dict(): def test_analyze_org_policy_governed_assets_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6721,7 +6723,7 @@ def test_analyze_org_policy_governed_assets_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6752,7 +6754,7 @@ async def test_analyze_org_policy_governed_assets_field_headers_async(): def test_analyze_org_policy_governed_assets_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6786,7 +6788,7 @@ def test_analyze_org_policy_governed_assets_flattened(): def test_analyze_org_policy_governed_assets_flattened_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6802,7 +6804,7 @@ def test_analyze_org_policy_governed_assets_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6838,7 +6840,7 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6854,7 +6856,7 @@ async def test_analyze_org_policy_governed_assets_flattened_error_async(): def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6907,7 +6909,7 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): for i in results) def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6950,7 +6952,7 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_pager(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6999,7 +7001,7 @@ async def test_analyze_org_policy_governed_assets_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_pages(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7051,7 +7053,7 @@ async def test_analyze_org_policy_governed_assets_async_pages(): ]) def test_export_assets_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7092,14 +7094,14 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).export_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7107,7 +7109,7 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -7147,7 +7149,7 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss def test_export_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.export_assets._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) @@ -7156,7 +7158,7 @@ def test_export_assets_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_export_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -7196,7 +7198,7 @@ def test_export_assets_rest_interceptors(null_interceptor): def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7216,7 +7218,7 @@ def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=as def test_export_assets_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -7227,7 +7229,7 @@ def test_export_assets_rest_error(): ]) def test_list_assets_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7273,14 +7275,14 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("asset_types", "content_type", "page_size", "page_token", "read_time", "relationship_types", )) jsonified_request.update(unset_fields) @@ -7290,7 +7292,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -7332,7 +7334,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR def test_list_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.list_assets._get_unset_required_fields({}) assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) @@ -7341,7 +7343,7 @@ def test_list_assets_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -7380,7 +7382,7 @@ def test_list_assets_rest_interceptors(null_interceptor): def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7400,7 +7402,7 @@ def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asse def test_list_assets_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7438,7 +7440,7 @@ def test_list_assets_rest_flattened(): def test_list_assets_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7453,7 +7455,7 @@ def test_list_assets_rest_flattened_error(transport: str = 'rest'): def test_list_assets_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7519,7 +7521,7 @@ def test_list_assets_rest_pager(transport: str = 'rest'): ]) def test_batch_get_assets_history_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7563,14 +7565,14 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).batch_get_assets_history._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_assets_history._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("asset_names", "content_type", "read_time_window", "relationship_types", )) jsonified_request.update(unset_fields) @@ -7580,7 +7582,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -7622,7 +7624,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic def test_batch_get_assets_history_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.batch_get_assets_history._get_unset_required_fields({}) assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) @@ -7631,7 +7633,7 @@ def test_batch_get_assets_history_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_batch_get_assets_history_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -7670,7 +7672,7 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7690,7 +7692,7 @@ def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', requ def test_batch_get_assets_history_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -7701,7 +7703,7 @@ def test_batch_get_assets_history_rest_error(): ]) def test_create_feed_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7756,7 +7758,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7764,7 +7766,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR jsonified_request["parent"] = 'parent_value' jsonified_request["feedId"] = 'feed_id_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7774,7 +7776,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR assert jsonified_request["feedId"] == 'feed_id_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -7817,7 +7819,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR def test_create_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.create_feed._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) @@ -7826,7 +7828,7 @@ def test_create_feed_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -7865,7 +7867,7 @@ def test_create_feed_rest_interceptors(null_interceptor): def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7885,7 +7887,7 @@ def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asse def test_create_feed_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7923,7 +7925,7 @@ def test_create_feed_rest_flattened(): def test_create_feed_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7938,7 +7940,7 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): def test_create_feed_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -7949,7 +7951,7 @@ def test_create_feed_rest_error(): ]) def test_get_feed_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8003,14 +8005,14 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8018,7 +8020,7 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest assert jsonified_request["name"] == 'name_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8060,7 +8062,7 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest def test_get_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_feed._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -8069,7 +8071,7 @@ def test_get_feed_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8108,7 +8110,7 @@ def test_get_feed_rest_interceptors(null_interceptor): def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8128,7 +8130,7 @@ def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_s def test_get_feed_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8166,7 +8168,7 @@ def test_get_feed_rest_flattened(): def test_get_feed_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8181,7 +8183,7 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): def test_get_feed_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -8192,7 +8194,7 @@ def test_get_feed_rest_error(): ]) def test_list_feeds_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8236,14 +8238,14 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_feeds._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_feeds._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8251,7 +8253,7 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8293,7 +8295,7 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq def test_list_feeds_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.list_feeds._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent", ))) @@ -8302,7 +8304,7 @@ def test_list_feeds_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_feeds_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8341,7 +8343,7 @@ def test_list_feeds_rest_interceptors(null_interceptor): def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8361,7 +8363,7 @@ def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset def test_list_feeds_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8399,7 +8401,7 @@ def test_list_feeds_rest_flattened(): def test_list_feeds_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8414,7 +8416,7 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): def test_list_feeds_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -8425,7 +8427,7 @@ def test_list_feeds_rest_error(): ]) def test_update_feed_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8478,18 +8480,18 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8532,7 +8534,7 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR def test_update_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.update_feed._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) @@ -8541,7 +8543,7 @@ def test_update_feed_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8580,7 +8582,7 @@ def test_update_feed_rest_interceptors(null_interceptor): def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8600,7 +8602,7 @@ def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asse def test_update_feed_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8638,7 +8640,7 @@ def test_update_feed_rest_flattened(): def test_update_feed_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8653,7 +8655,7 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): def test_update_feed_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -8664,7 +8666,7 @@ def test_update_feed_rest_error(): ]) def test_delete_feed_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8705,14 +8707,14 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_feed._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_feed._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8720,7 +8722,7 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR assert jsonified_request["name"] == 'name_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8759,7 +8761,7 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR def test_delete_feed_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.delete_feed._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -8768,7 +8770,7 @@ def test_delete_feed_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_feed_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -8802,7 +8804,7 @@ def test_delete_feed_rest_interceptors(null_interceptor): def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8822,7 +8824,7 @@ def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asse def test_delete_feed_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8858,7 +8860,7 @@ def test_delete_feed_rest_flattened(): def test_delete_feed_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8873,7 +8875,7 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): def test_delete_feed_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -8884,7 +8886,7 @@ def test_delete_feed_rest_error(): ]) def test_search_all_resources_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8930,14 +8932,14 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["scope"] = 'scope_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).search_all_resources._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", "read_mask", )) jsonified_request.update(unset_fields) @@ -8947,7 +8949,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se assert jsonified_request["scope"] == 'scope_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8989,7 +8991,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se def test_search_all_resources_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.search_all_resources._get_unset_required_fields({}) assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) @@ -8998,7 +9000,7 @@ def test_search_all_resources_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_search_all_resources_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9037,7 +9039,7 @@ def test_search_all_resources_rest_interceptors(null_interceptor): def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9057,7 +9059,7 @@ def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_ def test_search_all_resources_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9097,7 +9099,7 @@ def test_search_all_resources_rest_flattened(): def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9114,7 +9116,7 @@ def test_search_all_resources_rest_flattened_error(transport: str = 'rest'): def test_search_all_resources_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9180,7 +9182,7 @@ def test_search_all_resources_rest_pager(transport: str = 'rest'): ]) def test_search_all_iam_policies_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9226,14 +9228,14 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["scope"] = 'scope_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).search_all_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).search_all_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("asset_types", "order_by", "page_size", "page_token", "query", )) jsonified_request.update(unset_fields) @@ -9243,7 +9245,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service assert jsonified_request["scope"] == 'scope_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -9285,7 +9287,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service def test_search_all_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.search_all_iam_policies._get_unset_required_fields({}) assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) @@ -9294,7 +9296,7 @@ def test_search_all_iam_policies_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_search_all_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9333,7 +9335,7 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9353,7 +9355,7 @@ def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', reque def test_search_all_iam_policies_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9392,7 +9394,7 @@ def test_search_all_iam_policies_rest_flattened(): def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9408,7 +9410,7 @@ def test_search_all_iam_policies_rest_flattened_error(transport: str = 'rest'): def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9474,7 +9476,7 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): ]) def test_analyze_iam_policy_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9519,12 +9521,12 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_iam_policy._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("analysis_query", "execution_timeout", "saved_analysis_query", )) jsonified_request.update(unset_fields) @@ -9532,7 +9534,7 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal # verify required fields with non-default values are left alone client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -9574,7 +9576,7 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal def test_analyze_iam_policy_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.analyze_iam_policy._get_unset_required_fields({}) assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) @@ -9583,7 +9585,7 @@ def test_analyze_iam_policy_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_iam_policy_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9622,7 +9624,7 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9642,7 +9644,7 @@ def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_ty def test_analyze_iam_policy_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -9653,7 +9655,7 @@ def test_analyze_iam_policy_rest_error(): ]) def test_analyze_iam_policy_longrunning_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9693,18 +9695,18 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_iam_policy_longrunning._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -9744,7 +9746,7 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.analyze_iam_policy_longrunning._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) @@ -9753,7 +9755,7 @@ def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9793,7 +9795,7 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9813,7 +9815,7 @@ def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest' def test_analyze_iam_policy_longrunning_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -9824,7 +9826,7 @@ def test_analyze_iam_policy_longrunning_rest_error(): ]) def test_analyze_move_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9870,7 +9872,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov # verify fields with default values are dropped assert "destinationParent" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_move._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9880,7 +9882,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov jsonified_request["resource"] = 'resource_value' jsonified_request["destinationParent"] = 'destination_parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_move._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_move._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("destination_parent", "view", )) jsonified_request.update(unset_fields) @@ -9892,7 +9894,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov assert jsonified_request["destinationParent"] == 'destination_parent_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -9938,7 +9940,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov def test_analyze_move_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.analyze_move._get_unset_required_fields({}) assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) @@ -9947,7 +9949,7 @@ def test_analyze_move_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_move_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -9986,7 +9988,7 @@ def test_analyze_move_rest_interceptors(null_interceptor): def test_analyze_move_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10006,7 +10008,7 @@ def test_analyze_move_rest_bad_request(transport: str = 'rest', request_type=ass def test_analyze_move_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -10017,7 +10019,7 @@ def test_analyze_move_rest_error(): ]) def test_query_assets_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10065,14 +10067,14 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).query_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).query_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10080,7 +10082,7 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -10123,7 +10125,7 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset def test_query_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.query_assets._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("parent", ))) @@ -10132,7 +10134,7 @@ def test_query_assets_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_query_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -10171,7 +10173,7 @@ def test_query_assets_rest_interceptors(null_interceptor): def test_query_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.QueryAssetsRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10191,7 +10193,7 @@ def test_query_assets_rest_bad_request(transport: str = 'rest', request_type=ass def test_query_assets_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -10202,7 +10204,7 @@ def test_query_assets_rest_error(): ]) def test_create_saved_query_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10320,7 +10322,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea # verify fields with default values are dropped assert "savedQueryId" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -10330,7 +10332,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea jsonified_request["parent"] = 'parent_value' jsonified_request["savedQueryId"] = 'saved_query_id_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("saved_query_id", )) jsonified_request.update(unset_fields) @@ -10342,7 +10344,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea assert jsonified_request["savedQueryId"] == 'saved_query_id_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -10389,7 +10391,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea def test_create_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.create_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) @@ -10398,7 +10400,7 @@ def test_create_saved_query_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -10437,7 +10439,7 @@ def test_create_saved_query_rest_interceptors(null_interceptor): def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10457,7 +10459,7 @@ def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_ty def test_create_saved_query_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10497,7 +10499,7 @@ def test_create_saved_query_rest_flattened(): def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10514,7 +10516,7 @@ def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): def test_create_saved_query_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -10525,7 +10527,7 @@ def test_create_saved_query_rest_error(): ]) def test_get_saved_query_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10577,14 +10579,14 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10592,7 +10594,7 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave assert jsonified_request["name"] == 'name_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -10634,7 +10636,7 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave def test_get_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -10643,7 +10645,7 @@ def test_get_saved_query_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -10682,7 +10684,7 @@ def test_get_saved_query_rest_interceptors(null_interceptor): def test_get_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10702,7 +10704,7 @@ def test_get_saved_query_rest_bad_request(transport: str = 'rest', request_type= def test_get_saved_query_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10740,7 +10742,7 @@ def test_get_saved_query_rest_flattened(): def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10755,7 +10757,7 @@ def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): def test_get_saved_query_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -10766,7 +10768,7 @@ def test_get_saved_query_rest_error(): ]) def test_list_saved_queries_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10812,14 +10814,14 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_saved_queries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_saved_queries._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_saved_queries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -10829,7 +10831,7 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List assert jsonified_request["parent"] == 'parent_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -10871,7 +10873,7 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List def test_list_saved_queries_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.list_saved_queries._get_unset_required_fields({}) assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) @@ -10880,7 +10882,7 @@ def test_list_saved_queries_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_saved_queries_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -10919,7 +10921,7 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): def test_list_saved_queries_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListSavedQueriesRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10939,7 +10941,7 @@ def test_list_saved_queries_rest_bad_request(transport: str = 'rest', request_ty def test_list_saved_queries_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10977,7 +10979,7 @@ def test_list_saved_queries_rest_flattened(): def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10992,7 +10994,7 @@ def test_list_saved_queries_rest_flattened_error(transport: str = 'rest'): def test_list_saved_queries_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11058,7 +11060,7 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): ]) def test_update_saved_query_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -11173,12 +11175,12 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_saved_query._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) @@ -11186,7 +11188,7 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda # verify required fields with non-default values are left alone client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -11229,7 +11231,7 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda def test_update_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.update_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) @@ -11238,7 +11240,7 @@ def test_update_saved_query_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -11277,7 +11279,7 @@ def test_update_saved_query_rest_interceptors(null_interceptor): def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11297,7 +11299,7 @@ def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_ty def test_update_saved_query_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -11336,7 +11338,7 @@ def test_update_saved_query_rest_flattened(): def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11352,7 +11354,7 @@ def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): def test_update_saved_query_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -11363,7 +11365,7 @@ def test_update_saved_query_rest_error(): ]) def test_delete_saved_query_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -11404,14 +11406,14 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_saved_query._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_saved_query._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -11419,7 +11421,7 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele assert jsonified_request["name"] == 'name_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -11458,7 +11460,7 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele def test_delete_saved_query_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.delete_saved_query._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -11467,7 +11469,7 @@ def test_delete_saved_query_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_saved_query_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -11501,7 +11503,7 @@ def test_delete_saved_query_rest_interceptors(null_interceptor): def test_delete_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11521,7 +11523,7 @@ def test_delete_saved_query_rest_bad_request(transport: str = 'rest', request_ty def test_delete_saved_query_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -11557,7 +11559,7 @@ def test_delete_saved_query_rest_flattened(): def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11572,7 +11574,7 @@ def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): def test_delete_saved_query_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -11583,7 +11585,7 @@ def test_delete_saved_query_rest_error(): ]) def test_batch_get_effective_iam_policies_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -11629,7 +11631,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse # verify fields with default values are dropped assert "names" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -11639,7 +11641,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse jsonified_request["scope"] = 'scope_value' jsonified_request["names"] = 'names_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).batch_get_effective_iam_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("names", )) jsonified_request.update(unset_fields) @@ -11651,7 +11653,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse assert jsonified_request["names"] == 'names_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -11697,7 +11699,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse def test_batch_get_effective_iam_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.batch_get_effective_iam_policies._get_unset_required_fields({}) assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) @@ -11706,7 +11708,7 @@ def test_batch_get_effective_iam_policies_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -11745,7 +11747,7 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): def test_batch_get_effective_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11765,7 +11767,7 @@ def test_batch_get_effective_iam_policies_rest_bad_request(transport: str = 'res def test_batch_get_effective_iam_policies_rest_error(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -11776,7 +11778,7 @@ def test_batch_get_effective_iam_policies_rest_error(): ]) def test_analyze_org_policies_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -11824,7 +11826,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -11834,7 +11836,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An jsonified_request["scope"] = 'scope_value' jsonified_request["constraint"] = 'constraint_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policies._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policies._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -11846,7 +11848,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -11892,7 +11894,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An def test_analyze_org_policies_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.analyze_org_policies._get_unset_required_fields({}) assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @@ -11901,7 +11903,7 @@ def test_analyze_org_policies_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policies_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -11940,7 +11942,7 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): def test_analyze_org_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11960,7 +11962,7 @@ def test_analyze_org_policies_rest_bad_request(transport: str = 'rest', request_ def test_analyze_org_policies_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -12000,7 +12002,7 @@ def test_analyze_org_policies_rest_flattened(): def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12017,7 +12019,7 @@ def test_analyze_org_policies_rest_flattened_error(transport: str = 'rest'): def test_analyze_org_policies_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12083,7 +12085,7 @@ def test_analyze_org_policies_rest_pager(transport: str = 'rest'): ]) def test_analyze_org_policy_governed_containers_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -12131,7 +12133,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12141,7 +12143,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ jsonified_request["scope"] = 'scope_value' jsonified_request["constraint"] = 'constraint_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_containers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -12153,7 +12155,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -12199,7 +12201,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.analyze_org_policy_governed_containers._get_unset_required_fields({}) assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @@ -12208,7 +12210,7 @@ def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -12247,7 +12249,7 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept def test_analyze_org_policy_governed_containers_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12267,7 +12269,7 @@ def test_analyze_org_policy_governed_containers_rest_bad_request(transport: str def test_analyze_org_policy_governed_containers_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -12307,7 +12309,7 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12324,7 +12326,7 @@ def test_analyze_org_policy_governed_containers_rest_flattened_error(transport: def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12390,7 +12392,7 @@ def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'res ]) def test_analyze_org_policy_governed_assets_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -12438,7 +12440,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as # verify fields with default values are dropped assert "constraint" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12448,7 +12450,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as jsonified_request["scope"] = 'scope_value' jsonified_request["constraint"] = 'constraint_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).analyze_org_policy_governed_assets._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("constraint", "filter", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -12460,7 +12462,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as assert jsonified_request["constraint"] == 'constraint_value' client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -12506,7 +12508,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): - transport = transports.AssetServiceRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.AssetServiceRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.analyze_org_policy_governed_assets._get_unset_required_fields({}) assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) @@ -12515,7 +12517,7 @@ def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): transport = transports.AssetServiceRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), ) client = AssetServiceClient(transport=transport) @@ -12554,7 +12556,7 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): def test_analyze_org_policy_governed_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12574,7 +12576,7 @@ def test_analyze_org_policy_governed_assets_rest_bad_request(transport: str = 'r def test_analyze_org_policy_governed_assets_rest_flattened(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -12614,7 +12616,7 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12631,7 +12633,7 @@ def test_analyze_org_policy_governed_assets_rest_flattened_error(transport: str def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12694,17 +12696,17 @@ def test_analyze_org_policy_governed_assets_rest_pager(transport: str = 'rest'): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AssetServiceClient( @@ -12714,7 +12716,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -12730,12 +12732,12 @@ def test_credentials_transport_error(): with pytest.raises(ValueError): client = AssetServiceClient( client_options=options, - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AssetServiceClient( @@ -12747,7 +12749,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) client = AssetServiceClient(transport=transport) assert client.transport is transport @@ -12755,13 +12757,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.AssetServiceGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.AssetServiceGrpcAsyncIOTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -12774,7 +12776,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -12784,14 +12786,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = AssetServiceClient.get_transport_class(transport_name)( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -12802,7 +12804,7 @@ def test_asset_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.AssetServiceTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -12812,7 +12814,7 @@ def test_asset_service_base_transport(): with mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport.__init__') as Transport: Transport.return_value = None transport = transports.AssetServiceTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -12868,7 +12870,7 @@ def test_asset_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AssetServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -12886,7 +12888,7 @@ def test_asset_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.asset_v1.services.asset_service.transports.AssetServiceTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AssetServiceTransport() adc.assert_called_once() @@ -12894,7 +12896,7 @@ def test_asset_service_base_transport_with_adc(): def test_asset_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) AssetServiceClient() adc.assert_called_once_with( scopes=None, @@ -12916,7 +12918,7 @@ def test_asset_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -12961,7 +12963,7 @@ def test_asset_service_transport_create_channel(transport_class, grpc_helpers): with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -12990,7 +12992,7 @@ def test_asset_service_transport_create_channel(transport_class, grpc_helpers): def test_asset_service_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -13028,7 +13030,7 @@ def test_asset_service_grpc_transport_client_cert_source_for_mtls( ) def test_asset_service_http_transport_client_cert_source_for_mtls(): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.AssetServiceRestTransport ( credentials=cred, @@ -13039,7 +13041,7 @@ def test_asset_service_http_transport_client_cert_source_for_mtls(): def test_asset_service_rest_lro_client(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) transport = client.transport @@ -13061,7 +13063,7 @@ def test_asset_service_rest_lro_client(): ]) def test_asset_service_host_no_port(transport_name): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com'), transport=transport_name, ) @@ -13078,7 +13080,7 @@ def test_asset_service_host_no_port(transport_name): ]) def test_asset_service_host_with_port(transport_name): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='cloudasset.googleapis.com:8000'), transport=transport_name, ) @@ -13092,8 +13094,8 @@ def test_asset_service_host_with_port(transport_name): "rest", ]) def test_asset_service_client_transport_session_collision(transport_name): - creds1 = _AnonymousCredentialsWithUniverseDomain() - creds2 = _AnonymousCredentialsWithUniverseDomain() + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() client1 = AssetServiceClient( credentials=creds1, transport=transport_name, @@ -13211,7 +13213,7 @@ def test_asset_service_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -13283,7 +13285,7 @@ def test_asset_service_transport_channel_mtls_with_adc( def test_asset_service_grpc_lro_client(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) transport = client.transport @@ -13300,7 +13302,7 @@ def test_asset_service_grpc_lro_client(): def test_asset_service_grpc_lro_async_client(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc_asyncio', ) transport = client.transport @@ -13537,7 +13539,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -13545,7 +13547,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.AssetServiceTransport, '_prep_wrapped_messages') as prep: transport_class = AssetServiceClient.get_transport_class() transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -13553,7 +13555,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -13564,7 +13566,7 @@ async def test_transport_close_async(): def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -13586,7 +13588,7 @@ def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=op ]) def test_get_operation_rest(request_type): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} @@ -13612,7 +13614,7 @@ def test_get_operation_rest(request_type): def test_get_operation(transport: str = "grpc"): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13634,7 +13636,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13658,7 +13660,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13682,7 +13684,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13707,7 +13709,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -13723,7 +13725,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = AssetServiceAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -13747,7 +13749,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -13762,7 +13764,7 @@ def test_client_ctx(): ] for transport in transports: client = AssetServiceClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index c2f06015714a..08f630f9d2ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -329,6 +329,7 @@ def _read_environment_variables(): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -347,6 +348,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source + @staticmethod def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. @@ -410,15 +412,16 @@ def _compare_universes(client_universe: str, Raises: ValueError: when client_universe does not match the universe in credentials. """ - if credentials: - credentials_universe = credentials.universe_domain - if client_universe != credentials_universe: - default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + + default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") return True def _validate_universe_domain(self): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 78b4f48770b3..5e5cbf584aa2 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -40,6 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt index 185f7d366c2f..2c74b9860b39 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -5,5 +5,6 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 954bc6bef7b3..6b47190ddc36 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -69,16 +69,6 @@ def modify_default_endpoint(client): def modify_default_endpoint_template(client): return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE -# Anonymous Credentials with universe domain property. If no universe domain is provided, then -# the default universe domain is "googleapis.com". -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - @property - def universe_domain(self): - return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -179,7 +169,7 @@ def test__get_universe_domain(): def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( transport=transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) ) assert client._validate_universe_domain() == True @@ -203,24 +193,36 @@ def test__validate_universe_domain(client_class, transport_class, transport_name client = client_class(transport=transport) assert client._validate_universe_domain() == True - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + @pytest.mark.parametrize("client_class,transport_name", [ (IAMCredentialsClient, "grpc"), @@ -228,7 +230,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name (IAMCredentialsClient, "rest"), ]) def test_iam_credentials_client_from_service_account_info(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -267,7 +269,7 @@ def test_iam_credentials_client_service_account_always_use_jwt(transport_class, (IAMCredentialsClient, "rest"), ]) def test_iam_credentials_client_from_service_account_file(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -309,7 +311,7 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr # Check that if channel is provided we won't create a new one. with mock.patch.object(IAMCredentialsClient, 'get_transport_class') as gtc: transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -591,19 +593,19 @@ def test_iam_credentials_client_client_api_endpoint(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), @@ -614,9 +616,9 @@ def test_iam_credentials_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) else: - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) assert client.universe_domain == (mock_universe if universe_exists else default_universe) @@ -626,7 +628,7 @@ def test_iam_credentials_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint @@ -733,8 +735,8 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() - file_creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -762,7 +764,7 @@ def test_iam_credentials_client_create_channel_credentials_file(client_class, tr ]) def test_generate_access_token(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -794,7 +796,7 @@ def test_generate_access_token_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -810,7 +812,7 @@ def test_generate_access_token_empty_call(): @pytest.mark.asyncio async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -845,7 +847,7 @@ async def test_generate_access_token_async_from_dict(): def test_generate_access_token_field_headers(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -877,7 +879,7 @@ def test_generate_access_token_field_headers(): @pytest.mark.asyncio async def test_generate_access_token_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -908,7 +910,7 @@ async def test_generate_access_token_field_headers_async(): def test_generate_access_token_flattened(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -944,7 +946,7 @@ def test_generate_access_token_flattened(): def test_generate_access_token_flattened_error(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -961,7 +963,7 @@ def test_generate_access_token_flattened_error(): @pytest.mark.asyncio async def test_generate_access_token_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -999,7 +1001,7 @@ async def test_generate_access_token_flattened_async(): @pytest.mark.asyncio async def test_generate_access_token_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1020,7 +1022,7 @@ async def test_generate_access_token_flattened_error_async(): ]) def test_generate_id_token(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1052,7 +1054,7 @@ def test_generate_id_token_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1068,7 +1070,7 @@ def test_generate_id_token_empty_call(): @pytest.mark.asyncio async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1103,7 +1105,7 @@ async def test_generate_id_token_async_from_dict(): def test_generate_id_token_field_headers(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1135,7 +1137,7 @@ def test_generate_id_token_field_headers(): @pytest.mark.asyncio async def test_generate_id_token_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1166,7 +1168,7 @@ async def test_generate_id_token_field_headers_async(): def test_generate_id_token_flattened(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1204,7 +1206,7 @@ def test_generate_id_token_flattened(): def test_generate_id_token_flattened_error(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1221,7 +1223,7 @@ def test_generate_id_token_flattened_error(): @pytest.mark.asyncio async def test_generate_id_token_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1261,7 +1263,7 @@ async def test_generate_id_token_flattened_async(): @pytest.mark.asyncio async def test_generate_id_token_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1282,7 +1284,7 @@ async def test_generate_id_token_flattened_error_async(): ]) def test_sign_blob(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1316,7 +1318,7 @@ def test_sign_blob_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1332,7 +1334,7 @@ def test_sign_blob_empty_call(): @pytest.mark.asyncio async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1369,7 +1371,7 @@ async def test_sign_blob_async_from_dict(): def test_sign_blob_field_headers(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1401,7 +1403,7 @@ def test_sign_blob_field_headers(): @pytest.mark.asyncio async def test_sign_blob_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1432,7 +1434,7 @@ async def test_sign_blob_field_headers_async(): def test_sign_blob_flattened(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1466,7 +1468,7 @@ def test_sign_blob_flattened(): def test_sign_blob_flattened_error(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1482,7 +1484,7 @@ def test_sign_blob_flattened_error(): @pytest.mark.asyncio async def test_sign_blob_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1518,7 +1520,7 @@ async def test_sign_blob_flattened_async(): @pytest.mark.asyncio async def test_sign_blob_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1538,7 +1540,7 @@ async def test_sign_blob_flattened_error_async(): ]) def test_sign_jwt(request_type, transport: str = 'grpc'): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1572,7 +1574,7 @@ def test_sign_jwt_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1588,7 +1590,7 @@ def test_sign_jwt_empty_call(): @pytest.mark.asyncio async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1625,7 +1627,7 @@ async def test_sign_jwt_async_from_dict(): def test_sign_jwt_field_headers(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1657,7 +1659,7 @@ def test_sign_jwt_field_headers(): @pytest.mark.asyncio async def test_sign_jwt_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1688,7 +1690,7 @@ async def test_sign_jwt_field_headers_async(): def test_sign_jwt_flattened(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1722,7 +1724,7 @@ def test_sign_jwt_flattened(): def test_sign_jwt_flattened_error(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1738,7 +1740,7 @@ def test_sign_jwt_flattened_error(): @pytest.mark.asyncio async def test_sign_jwt_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1774,7 +1776,7 @@ async def test_sign_jwt_flattened_async(): @pytest.mark.asyncio async def test_sign_jwt_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1794,7 +1796,7 @@ async def test_sign_jwt_flattened_error_async(): ]) def test_generate_access_token_rest(request_type): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -1841,7 +1843,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).generate_access_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -1849,7 +1851,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate jsonified_request["name"] = 'name_value' jsonified_request["scope"] = 'scope_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).generate_access_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_access_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -1859,7 +1861,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate assert jsonified_request["scope"] == 'scope_value' client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -1902,7 +1904,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate def test_generate_access_token_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.generate_access_token._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "scope", ))) @@ -1911,7 +1913,7 @@ def test_generate_access_token_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_generate_access_token_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) @@ -1950,7 +1952,7 @@ def test_generate_access_token_rest_interceptors(null_interceptor): def test_generate_access_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateAccessTokenRequest): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1970,7 +1972,7 @@ def test_generate_access_token_rest_bad_request(transport: str = 'rest', request def test_generate_access_token_rest_flattened(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -2011,7 +2013,7 @@ def test_generate_access_token_rest_flattened(): def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2029,7 +2031,7 @@ def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): def test_generate_access_token_rest_error(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -2040,7 +2042,7 @@ def test_generate_access_token_rest_error(): ]) def test_generate_id_token_rest(request_type): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -2087,7 +2089,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).generate_id_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2095,7 +2097,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo jsonified_request["name"] = 'name_value' jsonified_request["audience"] = 'audience_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).generate_id_token._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).generate_id_token._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2105,7 +2107,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo assert jsonified_request["audience"] == 'audience_value' client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -2148,7 +2150,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo def test_generate_id_token_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.generate_id_token._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "audience", ))) @@ -2157,7 +2159,7 @@ def test_generate_id_token_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_generate_id_token_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) @@ -2196,7 +2198,7 @@ def test_generate_id_token_rest_interceptors(null_interceptor): def test_generate_id_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateIdTokenRequest): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2216,7 +2218,7 @@ def test_generate_id_token_rest_bad_request(transport: str = 'rest', request_typ def test_generate_id_token_rest_flattened(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -2257,7 +2259,7 @@ def test_generate_id_token_rest_flattened(): def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2275,7 +2277,7 @@ def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): def test_generate_id_token_rest_error(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -2286,7 +2288,7 @@ def test_generate_id_token_rest_error(): ]) def test_sign_blob_rest(request_type): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -2335,7 +2337,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).sign_blob._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2343,7 +2345,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): jsonified_request["name"] = 'name_value' jsonified_request["payload"] = b'payload_blob' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).sign_blob._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_blob._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2353,7 +2355,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): assert jsonified_request["payload"] == b'payload_blob' client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -2396,7 +2398,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): def test_sign_blob_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.sign_blob._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "payload", ))) @@ -2405,7 +2407,7 @@ def test_sign_blob_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_sign_blob_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) @@ -2444,7 +2446,7 @@ def test_sign_blob_rest_interceptors(null_interceptor): def test_sign_blob_rest_bad_request(transport: str = 'rest', request_type=common.SignBlobRequest): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2464,7 +2466,7 @@ def test_sign_blob_rest_bad_request(transport: str = 'rest', request_type=common def test_sign_blob_rest_flattened(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -2504,7 +2506,7 @@ def test_sign_blob_rest_flattened(): def test_sign_blob_rest_flattened_error(transport: str = 'rest'): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2521,7 +2523,7 @@ def test_sign_blob_rest_flattened_error(transport: str = 'rest'): def test_sign_blob_rest_error(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -2532,7 +2534,7 @@ def test_sign_blob_rest_error(): ]) def test_sign_jwt_rest(request_type): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -2581,7 +2583,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).sign_jwt._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -2589,7 +2591,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): jsonified_request["name"] = 'name_value' jsonified_request["payload"] = 'payload_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).sign_jwt._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).sign_jwt._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -2599,7 +2601,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): assert jsonified_request["payload"] == 'payload_value' client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -2642,7 +2644,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): def test_sign_jwt_rest_unset_required_fields(): - transport = transports.IAMCredentialsRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.IAMCredentialsRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.sign_jwt._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "payload", ))) @@ -2651,7 +2653,7 @@ def test_sign_jwt_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_sign_jwt_rest_interceptors(null_interceptor): transport = transports.IAMCredentialsRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), ) client = IAMCredentialsClient(transport=transport) @@ -2690,7 +2692,7 @@ def test_sign_jwt_rest_interceptors(null_interceptor): def test_sign_jwt_rest_bad_request(transport: str = 'rest', request_type=common.SignJwtRequest): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2710,7 +2712,7 @@ def test_sign_jwt_rest_bad_request(transport: str = 'rest', request_type=common. def test_sign_jwt_rest_flattened(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -2750,7 +2752,7 @@ def test_sign_jwt_rest_flattened(): def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2767,7 +2769,7 @@ def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): def test_sign_jwt_rest_error(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -2775,17 +2777,17 @@ def test_sign_jwt_rest_error(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = IAMCredentialsClient( @@ -2795,7 +2797,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -2811,12 +2813,12 @@ def test_credentials_transport_error(): with pytest.raises(ValueError): client = IAMCredentialsClient( client_options=options, - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = IAMCredentialsClient( @@ -2828,7 +2830,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) client = IAMCredentialsClient(transport=transport) assert client.transport is transport @@ -2836,13 +2838,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.IAMCredentialsGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.IAMCredentialsGrpcAsyncIOTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2855,7 +2857,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -2865,14 +2867,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = IAMCredentialsClient.get_transport_class(transport_name)( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -2883,7 +2885,7 @@ def test_iam_credentials_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.IAMCredentialsTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -2893,7 +2895,7 @@ def test_iam_credentials_base_transport(): with mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport.__init__') as Transport: Transport.return_value = None transport = transports.IAMCredentialsTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2924,7 +2926,7 @@ def test_iam_credentials_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.IAMCredentialsTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -2942,7 +2944,7 @@ def test_iam_credentials_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.iam.credentials_v1.services.iam_credentials.transports.IAMCredentialsTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.IAMCredentialsTransport() adc.assert_called_once() @@ -2950,7 +2952,7 @@ def test_iam_credentials_base_transport_with_adc(): def test_iam_credentials_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) IAMCredentialsClient() adc.assert_called_once_with( scopes=None, @@ -2972,7 +2974,7 @@ def test_iam_credentials_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -3017,7 +3019,7 @@ def test_iam_credentials_transport_create_channel(transport_class, grpc_helpers) with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -3046,7 +3048,7 @@ def test_iam_credentials_transport_create_channel(transport_class, grpc_helpers) def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -3084,7 +3086,7 @@ def test_iam_credentials_grpc_transport_client_cert_source_for_mtls( ) def test_iam_credentials_http_transport_client_cert_source_for_mtls(): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.IAMCredentialsRestTransport ( credentials=cred, @@ -3100,7 +3102,7 @@ def test_iam_credentials_http_transport_client_cert_source_for_mtls(): ]) def test_iam_credentials_host_no_port(transport_name): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com'), transport=transport_name, ) @@ -3117,7 +3119,7 @@ def test_iam_credentials_host_no_port(transport_name): ]) def test_iam_credentials_host_with_port(transport_name): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='iamcredentials.googleapis.com:8000'), transport=transport_name, ) @@ -3131,8 +3133,8 @@ def test_iam_credentials_host_with_port(transport_name): "rest", ]) def test_iam_credentials_client_transport_session_collision(transport_name): - creds1 = _AnonymousCredentialsWithUniverseDomain() - creds2 = _AnonymousCredentialsWithUniverseDomain() + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() client1 = IAMCredentialsClient( credentials=creds1, transport=transport_name, @@ -3193,7 +3195,7 @@ def test_iam_credentials_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -3375,7 +3377,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3383,7 +3385,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.IAMCredentialsTransport, '_prep_wrapped_messages') as prep: transport_class = IAMCredentialsClient.get_transport_class() transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3391,7 +3393,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = IAMCredentialsAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -3408,7 +3410,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -3423,7 +3425,7 @@ def test_client_ctx(): ] for transport in transports: client = IAMCredentialsClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index fd747140bcd9..98a3cebb3cac 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -437,6 +437,7 @@ def _read_environment_variables(): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -455,6 +456,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source + @staticmethod def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. @@ -518,15 +520,16 @@ def _compare_universes(client_universe: str, Raises: ValueError: when client_universe does not match the universe in credentials. """ - if credentials: - credentials_universe = credentials.universe_domain - if client_universe != credentials_universe: - default_universe = EventarcClient._DEFAULT_UNIVERSE - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + + default_universe = EventarcClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") return True def _validate_universe_domain(self): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index bd6939bce7a9..c699f1b11999 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -40,6 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index 44ffd04543a8..712d45b4c5ea 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -5,6 +5,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 grpc-google-iam-v1==0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index cd024521907b..e2b0887d5208 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -89,16 +89,6 @@ def modify_default_endpoint(client): def modify_default_endpoint_template(client): return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE -# Anonymous Credentials with universe domain property. If no universe domain is provided, then -# the default universe domain is "googleapis.com". -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - @property - def universe_domain(self): - return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -199,7 +189,7 @@ def test__get_universe_domain(): def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( transport=transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) ) assert client._validate_universe_domain() == True @@ -223,24 +213,36 @@ def test__validate_universe_domain(client_class, transport_class, transport_name client = client_class(transport=transport) assert client._validate_universe_domain() == True - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + @pytest.mark.parametrize("client_class,transport_name", [ (EventarcClient, "grpc"), @@ -248,7 +250,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name (EventarcClient, "rest"), ]) def test_eventarc_client_from_service_account_info(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -287,7 +289,7 @@ def test_eventarc_client_service_account_always_use_jwt(transport_class, transpo (EventarcClient, "rest"), ]) def test_eventarc_client_from_service_account_file(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -329,7 +331,7 @@ def test_eventarc_client_client_options(client_class, transport_class, transport # Check that if channel is provided we won't create a new one. with mock.patch.object(EventarcClient, 'get_transport_class') as gtc: transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -611,19 +613,19 @@ def test_eventarc_client_client_api_endpoint(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), @@ -634,9 +636,9 @@ def test_eventarc_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) else: - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) assert client.universe_domain == (mock_universe if universe_exists else default_universe) @@ -646,7 +648,7 @@ def test_eventarc_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint @@ -753,8 +755,8 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() - file_creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -782,7 +784,7 @@ def test_eventarc_client_create_channel_credentials_file(client_class, transport ]) def test_get_trigger(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -822,7 +824,7 @@ def test_get_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -838,7 +840,7 @@ def test_get_trigger_empty_call(): @pytest.mark.asyncio async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetTriggerRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -881,7 +883,7 @@ async def test_get_trigger_async_from_dict(): def test_get_trigger_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -913,7 +915,7 @@ def test_get_trigger_field_headers(): @pytest.mark.asyncio async def test_get_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -944,7 +946,7 @@ async def test_get_trigger_field_headers_async(): def test_get_trigger_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -970,7 +972,7 @@ def test_get_trigger_flattened(): def test_get_trigger_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -984,7 +986,7 @@ def test_get_trigger_flattened_error(): @pytest.mark.asyncio async def test_get_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1012,7 +1014,7 @@ async def test_get_trigger_flattened_async(): @pytest.mark.asyncio async def test_get_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1030,7 +1032,7 @@ async def test_get_trigger_flattened_error_async(): ]) def test_list_triggers(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1064,7 +1066,7 @@ def test_list_triggers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1080,7 +1082,7 @@ def test_list_triggers_empty_call(): @pytest.mark.asyncio async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListTriggersRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1117,7 +1119,7 @@ async def test_list_triggers_async_from_dict(): def test_list_triggers_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1149,7 +1151,7 @@ def test_list_triggers_field_headers(): @pytest.mark.asyncio async def test_list_triggers_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1180,7 +1182,7 @@ async def test_list_triggers_field_headers_async(): def test_list_triggers_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1206,7 +1208,7 @@ def test_list_triggers_flattened(): def test_list_triggers_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1220,7 +1222,7 @@ def test_list_triggers_flattened_error(): @pytest.mark.asyncio async def test_list_triggers_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1248,7 +1250,7 @@ async def test_list_triggers_flattened_async(): @pytest.mark.asyncio async def test_list_triggers_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1262,7 +1264,7 @@ async def test_list_triggers_flattened_error_async(): def test_list_triggers_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1315,7 +1317,7 @@ def test_list_triggers_pager(transport_name: str = "grpc"): for i in results) def test_list_triggers_pages(transport_name: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1358,7 +1360,7 @@ def test_list_triggers_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_triggers_async_pager(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1407,7 +1409,7 @@ async def test_list_triggers_async_pager(): @pytest.mark.asyncio async def test_list_triggers_async_pages(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1458,7 +1460,7 @@ async def test_list_triggers_async_pages(): ]) def test_create_trigger(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1487,7 +1489,7 @@ def test_create_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1503,7 +1505,7 @@ def test_create_trigger_empty_call(): @pytest.mark.asyncio async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateTriggerRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1537,7 +1539,7 @@ async def test_create_trigger_async_from_dict(): def test_create_trigger_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1569,7 +1571,7 @@ def test_create_trigger_field_headers(): @pytest.mark.asyncio async def test_create_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1600,7 +1602,7 @@ async def test_create_trigger_field_headers_async(): def test_create_trigger_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1634,7 +1636,7 @@ def test_create_trigger_flattened(): def test_create_trigger_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1650,7 +1652,7 @@ def test_create_trigger_flattened_error(): @pytest.mark.asyncio async def test_create_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1688,7 +1690,7 @@ async def test_create_trigger_flattened_async(): @pytest.mark.asyncio async def test_create_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1708,7 +1710,7 @@ async def test_create_trigger_flattened_error_async(): ]) def test_update_trigger(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1737,7 +1739,7 @@ def test_update_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1753,7 +1755,7 @@ def test_update_trigger_empty_call(): @pytest.mark.asyncio async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateTriggerRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1787,7 +1789,7 @@ async def test_update_trigger_async_from_dict(): def test_update_trigger_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1819,7 +1821,7 @@ def test_update_trigger_field_headers(): @pytest.mark.asyncio async def test_update_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1850,7 +1852,7 @@ async def test_update_trigger_field_headers_async(): def test_update_trigger_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1884,7 +1886,7 @@ def test_update_trigger_flattened(): def test_update_trigger_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1900,7 +1902,7 @@ def test_update_trigger_flattened_error(): @pytest.mark.asyncio async def test_update_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1938,7 +1940,7 @@ async def test_update_trigger_flattened_async(): @pytest.mark.asyncio async def test_update_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1958,7 +1960,7 @@ async def test_update_trigger_flattened_error_async(): ]) def test_delete_trigger(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1987,7 +1989,7 @@ def test_delete_trigger_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2003,7 +2005,7 @@ def test_delete_trigger_empty_call(): @pytest.mark.asyncio async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteTriggerRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2037,7 +2039,7 @@ async def test_delete_trigger_async_from_dict(): def test_delete_trigger_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2069,7 +2071,7 @@ def test_delete_trigger_field_headers(): @pytest.mark.asyncio async def test_delete_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2100,7 +2102,7 @@ async def test_delete_trigger_field_headers_async(): def test_delete_trigger_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2130,7 +2132,7 @@ def test_delete_trigger_flattened(): def test_delete_trigger_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2145,7 +2147,7 @@ def test_delete_trigger_flattened_error(): @pytest.mark.asyncio async def test_delete_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2179,7 +2181,7 @@ async def test_delete_trigger_flattened_async(): @pytest.mark.asyncio async def test_delete_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2198,7 +2200,7 @@ async def test_delete_trigger_flattened_error_async(): ]) def test_get_channel(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2241,7 +2243,7 @@ def test_get_channel_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2257,7 +2259,7 @@ def test_get_channel_empty_call(): @pytest.mark.asyncio async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2302,7 +2304,7 @@ async def test_get_channel_async_from_dict(): def test_get_channel_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2334,7 +2336,7 @@ def test_get_channel_field_headers(): @pytest.mark.asyncio async def test_get_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2365,7 +2367,7 @@ async def test_get_channel_field_headers_async(): def test_get_channel_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2391,7 +2393,7 @@ def test_get_channel_flattened(): def test_get_channel_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2405,7 +2407,7 @@ def test_get_channel_flattened_error(): @pytest.mark.asyncio async def test_get_channel_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2433,7 +2435,7 @@ async def test_get_channel_flattened_async(): @pytest.mark.asyncio async def test_get_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2451,7 +2453,7 @@ async def test_get_channel_flattened_error_async(): ]) def test_list_channels(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2485,7 +2487,7 @@ def test_list_channels_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2501,7 +2503,7 @@ def test_list_channels_empty_call(): @pytest.mark.asyncio async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2538,7 +2540,7 @@ async def test_list_channels_async_from_dict(): def test_list_channels_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2570,7 +2572,7 @@ def test_list_channels_field_headers(): @pytest.mark.asyncio async def test_list_channels_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2601,7 +2603,7 @@ async def test_list_channels_field_headers_async(): def test_list_channels_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2627,7 +2629,7 @@ def test_list_channels_flattened(): def test_list_channels_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2641,7 +2643,7 @@ def test_list_channels_flattened_error(): @pytest.mark.asyncio async def test_list_channels_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2669,7 +2671,7 @@ async def test_list_channels_flattened_async(): @pytest.mark.asyncio async def test_list_channels_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2683,7 +2685,7 @@ async def test_list_channels_flattened_error_async(): def test_list_channels_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2736,7 +2738,7 @@ def test_list_channels_pager(transport_name: str = "grpc"): for i in results) def test_list_channels_pages(transport_name: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2779,7 +2781,7 @@ def test_list_channels_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_channels_async_pager(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2828,7 +2830,7 @@ async def test_list_channels_async_pager(): @pytest.mark.asyncio async def test_list_channels_async_pages(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2879,7 +2881,7 @@ async def test_list_channels_async_pages(): ]) def test_create_channel(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2908,7 +2910,7 @@ def test_create_channel_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2924,7 +2926,7 @@ def test_create_channel_empty_call(): @pytest.mark.asyncio async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2958,7 +2960,7 @@ async def test_create_channel_async_from_dict(): def test_create_channel_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2990,7 +2992,7 @@ def test_create_channel_field_headers(): @pytest.mark.asyncio async def test_create_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3021,7 +3023,7 @@ async def test_create_channel_field_headers_async(): def test_create_channel_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3055,7 +3057,7 @@ def test_create_channel_flattened(): def test_create_channel_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3071,7 +3073,7 @@ def test_create_channel_flattened_error(): @pytest.mark.asyncio async def test_create_channel_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3109,7 +3111,7 @@ async def test_create_channel_flattened_async(): @pytest.mark.asyncio async def test_create_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3129,7 +3131,7 @@ async def test_create_channel_flattened_error_async(): ]) def test_update_channel(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3158,7 +3160,7 @@ def test_update_channel_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3174,7 +3176,7 @@ def test_update_channel_empty_call(): @pytest.mark.asyncio async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3208,7 +3210,7 @@ async def test_update_channel_async_from_dict(): def test_update_channel_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3240,7 +3242,7 @@ def test_update_channel_field_headers(): @pytest.mark.asyncio async def test_update_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3271,7 +3273,7 @@ async def test_update_channel_field_headers_async(): def test_update_channel_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3301,7 +3303,7 @@ def test_update_channel_flattened(): def test_update_channel_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3316,7 +3318,7 @@ def test_update_channel_flattened_error(): @pytest.mark.asyncio async def test_update_channel_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3350,7 +3352,7 @@ async def test_update_channel_flattened_async(): @pytest.mark.asyncio async def test_update_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3369,7 +3371,7 @@ async def test_update_channel_flattened_error_async(): ]) def test_delete_channel(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3398,7 +3400,7 @@ def test_delete_channel_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3414,7 +3416,7 @@ def test_delete_channel_empty_call(): @pytest.mark.asyncio async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3448,7 +3450,7 @@ async def test_delete_channel_async_from_dict(): def test_delete_channel_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3480,7 +3482,7 @@ def test_delete_channel_field_headers(): @pytest.mark.asyncio async def test_delete_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3511,7 +3513,7 @@ async def test_delete_channel_field_headers_async(): def test_delete_channel_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3537,7 +3539,7 @@ def test_delete_channel_flattened(): def test_delete_channel_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3551,7 +3553,7 @@ def test_delete_channel_flattened_error(): @pytest.mark.asyncio async def test_delete_channel_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3581,7 +3583,7 @@ async def test_delete_channel_flattened_async(): @pytest.mark.asyncio async def test_delete_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3599,7 +3601,7 @@ async def test_delete_channel_flattened_error_async(): ]) def test_get_provider(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3633,7 +3635,7 @@ def test_get_provider_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3649,7 +3651,7 @@ def test_get_provider_empty_call(): @pytest.mark.asyncio async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3686,7 +3688,7 @@ async def test_get_provider_async_from_dict(): def test_get_provider_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3718,7 +3720,7 @@ def test_get_provider_field_headers(): @pytest.mark.asyncio async def test_get_provider_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3749,7 +3751,7 @@ async def test_get_provider_field_headers_async(): def test_get_provider_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3775,7 +3777,7 @@ def test_get_provider_flattened(): def test_get_provider_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3789,7 +3791,7 @@ def test_get_provider_flattened_error(): @pytest.mark.asyncio async def test_get_provider_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3817,7 +3819,7 @@ async def test_get_provider_flattened_async(): @pytest.mark.asyncio async def test_get_provider_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3835,7 +3837,7 @@ async def test_get_provider_flattened_error_async(): ]) def test_list_providers(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3869,7 +3871,7 @@ def test_list_providers_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3885,7 +3887,7 @@ def test_list_providers_empty_call(): @pytest.mark.asyncio async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3922,7 +3924,7 @@ async def test_list_providers_async_from_dict(): def test_list_providers_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3954,7 +3956,7 @@ def test_list_providers_field_headers(): @pytest.mark.asyncio async def test_list_providers_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3985,7 +3987,7 @@ async def test_list_providers_field_headers_async(): def test_list_providers_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4011,7 +4013,7 @@ def test_list_providers_flattened(): def test_list_providers_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4025,7 +4027,7 @@ def test_list_providers_flattened_error(): @pytest.mark.asyncio async def test_list_providers_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4053,7 +4055,7 @@ async def test_list_providers_flattened_async(): @pytest.mark.asyncio async def test_list_providers_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4067,7 +4069,7 @@ async def test_list_providers_flattened_error_async(): def test_list_providers_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -4120,7 +4122,7 @@ def test_list_providers_pager(transport_name: str = "grpc"): for i in results) def test_list_providers_pages(transport_name: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -4163,7 +4165,7 @@ def test_list_providers_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_providers_async_pager(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4212,7 +4214,7 @@ async def test_list_providers_async_pager(): @pytest.mark.asyncio async def test_list_providers_async_pages(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4263,7 +4265,7 @@ async def test_list_providers_async_pages(): ]) def test_get_channel_connection(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4301,7 +4303,7 @@ def test_get_channel_connection_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4317,7 +4319,7 @@ def test_get_channel_connection_empty_call(): @pytest.mark.asyncio async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4358,7 +4360,7 @@ async def test_get_channel_connection_async_from_dict(): def test_get_channel_connection_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4390,7 +4392,7 @@ def test_get_channel_connection_field_headers(): @pytest.mark.asyncio async def test_get_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4421,7 +4423,7 @@ async def test_get_channel_connection_field_headers_async(): def test_get_channel_connection_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4447,7 +4449,7 @@ def test_get_channel_connection_flattened(): def test_get_channel_connection_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4461,7 +4463,7 @@ def test_get_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_get_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4489,7 +4491,7 @@ async def test_get_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_get_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4507,7 +4509,7 @@ async def test_get_channel_connection_flattened_error_async(): ]) def test_list_channel_connections(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4541,7 +4543,7 @@ def test_list_channel_connections_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4557,7 +4559,7 @@ def test_list_channel_connections_empty_call(): @pytest.mark.asyncio async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4594,7 +4596,7 @@ async def test_list_channel_connections_async_from_dict(): def test_list_channel_connections_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4626,7 +4628,7 @@ def test_list_channel_connections_field_headers(): @pytest.mark.asyncio async def test_list_channel_connections_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4657,7 +4659,7 @@ async def test_list_channel_connections_field_headers_async(): def test_list_channel_connections_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4683,7 +4685,7 @@ def test_list_channel_connections_flattened(): def test_list_channel_connections_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4697,7 +4699,7 @@ def test_list_channel_connections_flattened_error(): @pytest.mark.asyncio async def test_list_channel_connections_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4725,7 +4727,7 @@ async def test_list_channel_connections_flattened_async(): @pytest.mark.asyncio async def test_list_channel_connections_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4739,7 +4741,7 @@ async def test_list_channel_connections_flattened_error_async(): def test_list_channel_connections_pager(transport_name: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -4792,7 +4794,7 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): for i in results) def test_list_channel_connections_pages(transport_name: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -4835,7 +4837,7 @@ def test_list_channel_connections_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_channel_connections_async_pager(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4884,7 +4886,7 @@ async def test_list_channel_connections_async_pager(): @pytest.mark.asyncio async def test_list_channel_connections_async_pages(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4935,7 +4937,7 @@ async def test_list_channel_connections_async_pages(): ]) def test_create_channel_connection(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4964,7 +4966,7 @@ def test_create_channel_connection_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4980,7 +4982,7 @@ def test_create_channel_connection_empty_call(): @pytest.mark.asyncio async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5014,7 +5016,7 @@ async def test_create_channel_connection_async_from_dict(): def test_create_channel_connection_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5046,7 +5048,7 @@ def test_create_channel_connection_field_headers(): @pytest.mark.asyncio async def test_create_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5077,7 +5079,7 @@ async def test_create_channel_connection_field_headers_async(): def test_create_channel_connection_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5111,7 +5113,7 @@ def test_create_channel_connection_flattened(): def test_create_channel_connection_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5127,7 +5129,7 @@ def test_create_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_create_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5165,7 +5167,7 @@ async def test_create_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_create_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5185,7 +5187,7 @@ async def test_create_channel_connection_flattened_error_async(): ]) def test_delete_channel_connection(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5214,7 +5216,7 @@ def test_delete_channel_connection_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5230,7 +5232,7 @@ def test_delete_channel_connection_empty_call(): @pytest.mark.asyncio async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5264,7 +5266,7 @@ async def test_delete_channel_connection_async_from_dict(): def test_delete_channel_connection_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5296,7 +5298,7 @@ def test_delete_channel_connection_field_headers(): @pytest.mark.asyncio async def test_delete_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5327,7 +5329,7 @@ async def test_delete_channel_connection_field_headers_async(): def test_delete_channel_connection_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5353,7 +5355,7 @@ def test_delete_channel_connection_flattened(): def test_delete_channel_connection_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5367,7 +5369,7 @@ def test_delete_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_delete_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5397,7 +5399,7 @@ async def test_delete_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_delete_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5415,7 +5417,7 @@ async def test_delete_channel_connection_flattened_error_async(): ]) def test_get_google_channel_config(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5449,7 +5451,7 @@ def test_get_google_channel_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5465,7 +5467,7 @@ def test_get_google_channel_config_empty_call(): @pytest.mark.asyncio async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5502,7 +5504,7 @@ async def test_get_google_channel_config_async_from_dict(): def test_get_google_channel_config_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5534,7 +5536,7 @@ def test_get_google_channel_config_field_headers(): @pytest.mark.asyncio async def test_get_google_channel_config_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5565,7 +5567,7 @@ async def test_get_google_channel_config_field_headers_async(): def test_get_google_channel_config_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5591,7 +5593,7 @@ def test_get_google_channel_config_flattened(): def test_get_google_channel_config_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5605,7 +5607,7 @@ def test_get_google_channel_config_flattened_error(): @pytest.mark.asyncio async def test_get_google_channel_config_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5633,7 +5635,7 @@ async def test_get_google_channel_config_flattened_async(): @pytest.mark.asyncio async def test_get_google_channel_config_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5651,7 +5653,7 @@ async def test_get_google_channel_config_flattened_error_async(): ]) def test_update_google_channel_config(request_type, transport: str = 'grpc'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5685,7 +5687,7 @@ def test_update_google_channel_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5701,7 +5703,7 @@ def test_update_google_channel_config_empty_call(): @pytest.mark.asyncio async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5738,7 +5740,7 @@ async def test_update_google_channel_config_async_from_dict(): def test_update_google_channel_config_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5770,7 +5772,7 @@ def test_update_google_channel_config_field_headers(): @pytest.mark.asyncio async def test_update_google_channel_config_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5801,7 +5803,7 @@ async def test_update_google_channel_config_field_headers_async(): def test_update_google_channel_config_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5831,7 +5833,7 @@ def test_update_google_channel_config_flattened(): def test_update_google_channel_config_flattened_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5846,7 +5848,7 @@ def test_update_google_channel_config_flattened_error(): @pytest.mark.asyncio async def test_update_google_channel_config_flattened_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5878,7 +5880,7 @@ async def test_update_google_channel_config_flattened_async(): @pytest.mark.asyncio async def test_update_google_channel_config_flattened_error_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5897,7 +5899,7 @@ async def test_update_google_channel_config_flattened_error_async(): ]) def test_get_trigger_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5951,14 +5953,14 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5966,7 +5968,7 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -6008,7 +6010,7 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques def test_get_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_trigger._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -6017,7 +6019,7 @@ def test_get_trigger_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -6056,7 +6058,7 @@ def test_get_trigger_rest_interceptors(null_interceptor): def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6076,7 +6078,7 @@ def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=even def test_get_trigger_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6114,7 +6116,7 @@ def test_get_trigger_rest_flattened(): def test_get_trigger_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6129,7 +6131,7 @@ def test_get_trigger_rest_flattened_error(transport: str = 'rest'): def test_get_trigger_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -6140,7 +6142,7 @@ def test_get_trigger_rest_error(): ]) def test_list_triggers_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6188,14 +6190,14 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_triggers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_triggers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_triggers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -6205,7 +6207,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -6247,7 +6249,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe def test_list_triggers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.list_triggers._get_unset_required_fields({}) assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) @@ -6256,7 +6258,7 @@ def test_list_triggers_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_triggers_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -6295,7 +6297,7 @@ def test_list_triggers_rest_interceptors(null_interceptor): def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6315,7 +6317,7 @@ def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=ev def test_list_triggers_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6353,7 +6355,7 @@ def test_list_triggers_rest_flattened(): def test_list_triggers_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6368,7 +6370,7 @@ def test_list_triggers_rest_flattened_error(transport: str = 'rest'): def test_list_triggers_rest_pager(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6434,7 +6436,7 @@ def test_list_triggers_rest_pager(transport: str = 'rest'): ]) def test_create_trigger_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6543,7 +6545,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger assert "triggerId" not in jsonified_request assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6556,7 +6558,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger jsonified_request["triggerId"] = 'trigger_id_value' jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("trigger_id", "validate_only", )) jsonified_request.update(unset_fields) @@ -6570,7 +6572,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -6618,7 +6620,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger def test_create_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.create_trigger._get_unset_required_fields({}) assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) @@ -6627,7 +6629,7 @@ def test_create_trigger_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -6667,7 +6669,7 @@ def test_create_trigger_rest_interceptors(null_interceptor): def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6687,7 +6689,7 @@ def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=e def test_create_trigger_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6725,7 +6727,7 @@ def test_create_trigger_rest_flattened(): def test_create_trigger_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6742,7 +6744,7 @@ def test_create_trigger_rest_flattened_error(transport: str = 'rest'): def test_create_trigger_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -6753,7 +6755,7 @@ def test_create_trigger_rest_error(): ]) def test_update_trigger_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6859,7 +6861,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -6868,7 +6870,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("allow_missing", "update_mask", "validate_only", )) jsonified_request.update(unset_fields) @@ -6878,7 +6880,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -6922,7 +6924,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger def test_update_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.update_trigger._get_unset_required_fields({}) assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) @@ -6931,7 +6933,7 @@ def test_update_trigger_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -6971,7 +6973,7 @@ def test_update_trigger_rest_interceptors(null_interceptor): def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6991,7 +6993,7 @@ def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=e def test_update_trigger_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7029,7 +7031,7 @@ def test_update_trigger_rest_flattened(): def test_update_trigger_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7046,7 +7048,7 @@ def test_update_trigger_rest_flattened_error(transport: str = 'rest'): def test_update_trigger_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -7057,7 +7059,7 @@ def test_update_trigger_rest_error(): ]) def test_delete_trigger_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7100,7 +7102,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7110,7 +7112,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger jsonified_request["name"] = 'name_value' jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_trigger._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_trigger._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("allow_missing", "etag", "validate_only", )) jsonified_request.update(unset_fields) @@ -7122,7 +7124,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -7165,7 +7167,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger def test_delete_trigger_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.delete_trigger._get_unset_required_fields({}) assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) @@ -7174,7 +7176,7 @@ def test_delete_trigger_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_trigger_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -7214,7 +7216,7 @@ def test_delete_trigger_rest_interceptors(null_interceptor): def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7234,7 +7236,7 @@ def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=e def test_delete_trigger_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7271,7 +7273,7 @@ def test_delete_trigger_rest_flattened(): def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7287,7 +7289,7 @@ def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): def test_delete_trigger_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -7298,7 +7300,7 @@ def test_delete_trigger_rest_error(): ]) def test_get_channel_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7355,14 +7357,14 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -7370,7 +7372,7 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -7412,7 +7414,7 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques def test_get_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_channel._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -7421,7 +7423,7 @@ def test_get_channel_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -7460,7 +7462,7 @@ def test_get_channel_rest_interceptors(null_interceptor): def test_get_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7480,7 +7482,7 @@ def test_get_channel_rest_bad_request(transport: str = 'rest', request_type=even def test_get_channel_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7518,7 +7520,7 @@ def test_get_channel_rest_flattened(): def test_get_channel_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7533,7 +7535,7 @@ def test_get_channel_rest_flattened_error(transport: str = 'rest'): def test_get_channel_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -7544,7 +7546,7 @@ def test_get_channel_rest_error(): ]) def test_list_channels_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7592,14 +7594,14 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_channels._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_channels._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channels._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("order_by", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -7609,7 +7611,7 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -7651,7 +7653,7 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe def test_list_channels_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.list_channels._get_unset_required_fields({}) assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) @@ -7660,7 +7662,7 @@ def test_list_channels_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_channels_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -7699,7 +7701,7 @@ def test_list_channels_rest_interceptors(null_interceptor): def test_list_channels_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelsRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7719,7 +7721,7 @@ def test_list_channels_rest_bad_request(transport: str = 'rest', request_type=ev def test_list_channels_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7757,7 +7759,7 @@ def test_list_channels_rest_flattened(): def test_list_channels_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7772,7 +7774,7 @@ def test_list_channels_rest_flattened_error(transport: str = 'rest'): def test_list_channels_rest_pager(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7838,7 +7840,7 @@ def test_list_channels_rest_pager(transport: str = 'rest'): ]) def test_create_channel_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -7947,7 +7949,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel assert "channelId" not in jsonified_request assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_channel_._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -7960,7 +7962,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel jsonified_request["channelId"] = 'channel_id_value' jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_channel_._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("channel_id", "validate_only", )) jsonified_request.update(unset_fields) @@ -7974,7 +7976,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8022,7 +8024,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel def test_create_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.create_channel_._get_unset_required_fields({}) assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", "validateOnly", ))) @@ -8031,7 +8033,7 @@ def test_create_channel_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -8071,7 +8073,7 @@ def test_create_channel_rest_interceptors(null_interceptor): def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8091,7 +8093,7 @@ def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=e def test_create_channel_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8129,7 +8131,7 @@ def test_create_channel_rest_flattened(): def test_create_channel_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8146,7 +8148,7 @@ def test_create_channel_rest_flattened_error(transport: str = 'rest'): def test_create_channel_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -8157,7 +8159,7 @@ def test_create_channel_rest_error(): ]) def test_update_channel_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8263,7 +8265,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8272,7 +8274,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_channel._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask", "validate_only", )) jsonified_request.update(unset_fields) @@ -8282,7 +8284,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8326,7 +8328,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel def test_update_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.update_channel._get_unset_required_fields({}) assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("validateOnly", ))) @@ -8335,7 +8337,7 @@ def test_update_channel_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -8375,7 +8377,7 @@ def test_update_channel_rest_interceptors(null_interceptor): def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateChannelRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8395,7 +8397,7 @@ def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=e def test_update_channel_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8432,7 +8434,7 @@ def test_update_channel_rest_flattened(): def test_update_channel_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8448,7 +8450,7 @@ def test_update_channel_rest_flattened_error(transport: str = 'rest'): def test_update_channel_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -8459,7 +8461,7 @@ def test_update_channel_rest_error(): ]) def test_delete_channel_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8502,7 +8504,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel # verify fields with default values are dropped assert "validateOnly" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -8512,7 +8514,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel jsonified_request["name"] = 'name_value' jsonified_request["validateOnly"] = True - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_channel._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("validate_only", )) jsonified_request.update(unset_fields) @@ -8524,7 +8526,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel assert jsonified_request["validateOnly"] == True client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8567,7 +8569,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel def test_delete_channel_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.delete_channel._get_unset_required_fields({}) assert set(unset_fields) == (set(("validateOnly", )) & set(("name", "validateOnly", ))) @@ -8576,7 +8578,7 @@ def test_delete_channel_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_channel_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -8616,7 +8618,7 @@ def test_delete_channel_rest_interceptors(null_interceptor): def test_delete_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8636,7 +8638,7 @@ def test_delete_channel_rest_bad_request(transport: str = 'rest', request_type=e def test_delete_channel_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8672,7 +8674,7 @@ def test_delete_channel_rest_flattened(): def test_delete_channel_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8687,7 +8689,7 @@ def test_delete_channel_rest_flattened_error(transport: str = 'rest'): def test_delete_channel_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -8698,7 +8700,7 @@ def test_delete_channel_rest_error(): ]) def test_get_provider_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8746,14 +8748,14 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_provider._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_provider._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_provider._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -8761,7 +8763,7 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -8803,7 +8805,7 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ def test_get_provider_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_provider._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -8812,7 +8814,7 @@ def test_get_provider_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_provider_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -8851,7 +8853,7 @@ def test_get_provider_rest_interceptors(null_interceptor): def test_get_provider_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetProviderRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8871,7 +8873,7 @@ def test_get_provider_rest_bad_request(transport: str = 'rest', request_type=eve def test_get_provider_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8909,7 +8911,7 @@ def test_get_provider_rest_flattened(): def test_get_provider_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8924,7 +8926,7 @@ def test_get_provider_rest_flattened_error(transport: str = 'rest'): def test_get_provider_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -8935,7 +8937,7 @@ def test_get_provider_rest_error(): ]) def test_list_providers_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -8983,14 +8985,14 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_providers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_providers._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_providers._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("filter", "order_by", "page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -9000,7 +9002,7 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -9042,7 +9044,7 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders def test_list_providers_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.list_providers._get_unset_required_fields({}) assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) @@ -9051,7 +9053,7 @@ def test_list_providers_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_providers_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -9090,7 +9092,7 @@ def test_list_providers_rest_interceptors(null_interceptor): def test_list_providers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListProvidersRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9110,7 +9112,7 @@ def test_list_providers_rest_bad_request(transport: str = 'rest', request_type=e def test_list_providers_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9148,7 +9150,7 @@ def test_list_providers_rest_flattened(): def test_list_providers_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9163,7 +9165,7 @@ def test_list_providers_rest_flattened_error(transport: str = 'rest'): def test_list_providers_rest_pager(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9229,7 +9231,7 @@ def test_list_providers_rest_pager(transport: str = 'rest'): ]) def test_get_channel_connection_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9281,14 +9283,14 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -9296,7 +9298,7 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -9338,7 +9340,7 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh def test_get_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_channel_connection._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -9347,7 +9349,7 @@ def test_get_channel_connection_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -9386,7 +9388,7 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): def test_get_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelConnectionRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9406,7 +9408,7 @@ def test_get_channel_connection_rest_bad_request(transport: str = 'rest', reques def test_get_channel_connection_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9444,7 +9446,7 @@ def test_get_channel_connection_rest_flattened(): def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9459,7 +9461,7 @@ def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): def test_get_channel_connection_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -9470,7 +9472,7 @@ def test_get_channel_connection_rest_error(): ]) def test_list_channel_connections_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9518,14 +9520,14 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_channel_connections._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_channel_connections._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_channel_connections._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -9535,7 +9537,7 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis assert jsonified_request["parent"] == 'parent_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -9577,7 +9579,7 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis def test_list_channel_connections_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.list_channel_connections._get_unset_required_fields({}) assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) @@ -9586,7 +9588,7 @@ def test_list_channel_connections_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_channel_connections_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -9625,7 +9627,7 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): def test_list_channel_connections_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelConnectionsRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9645,7 +9647,7 @@ def test_list_channel_connections_rest_bad_request(transport: str = 'rest', requ def test_list_channel_connections_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9683,7 +9685,7 @@ def test_list_channel_connections_rest_flattened(): def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9698,7 +9700,7 @@ def test_list_channel_connections_rest_flattened_error(transport: str = 'rest'): def test_list_channel_connections_rest_pager(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -9764,7 +9766,7 @@ def test_list_channel_connections_rest_pager(transport: str = 'rest'): ]) def test_create_channel_connection_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -9871,7 +9873,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr # verify fields with default values are dropped assert "channelConnectionId" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -9881,7 +9883,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr jsonified_request["parent"] = 'parent_value' jsonified_request["channelConnectionId"] = 'channel_connection_id_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_channel_connection._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("channel_connection_id", )) jsonified_request.update(unset_fields) @@ -9893,7 +9895,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr assert jsonified_request["channelConnectionId"] == 'channel_connection_id_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -9937,7 +9939,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr def test_create_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.create_channel_connection._get_unset_required_fields({}) assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) @@ -9946,7 +9948,7 @@ def test_create_channel_connection_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -9986,7 +9988,7 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): def test_create_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelConnectionRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10006,7 +10008,7 @@ def test_create_channel_connection_rest_bad_request(transport: str = 'rest', req def test_create_channel_connection_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10044,7 +10046,7 @@ def test_create_channel_connection_rest_flattened(): def test_create_channel_connection_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10061,7 +10063,7 @@ def test_create_channel_connection_rest_flattened_error(transport: str = 'rest') def test_create_channel_connection_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -10072,7 +10074,7 @@ def test_create_channel_connection_rest_error(): ]) def test_delete_channel_connection_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10113,14 +10115,14 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_channel_connection._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_channel_connection._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10128,7 +10130,7 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -10167,7 +10169,7 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De def test_delete_channel_connection_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.delete_channel_connection._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -10176,7 +10178,7 @@ def test_delete_channel_connection_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_channel_connection_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -10216,7 +10218,7 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): def test_delete_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelConnectionRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10236,7 +10238,7 @@ def test_delete_channel_connection_rest_bad_request(transport: str = 'rest', req def test_delete_channel_connection_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10272,7 +10274,7 @@ def test_delete_channel_connection_rest_flattened(): def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10287,7 +10289,7 @@ def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest') def test_delete_channel_connection_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -10298,7 +10300,7 @@ def test_delete_channel_connection_rest_error(): ]) def test_get_google_channel_config_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10346,14 +10348,14 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -10361,7 +10363,7 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge assert jsonified_request["name"] == 'name_value' client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -10403,7 +10405,7 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge def test_get_google_channel_config_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_google_channel_config._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -10412,7 +10414,7 @@ def test_get_google_channel_config_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_google_channel_config_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -10451,7 +10453,7 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): def test_get_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetGoogleChannelConfigRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10471,7 +10473,7 @@ def test_get_google_channel_config_rest_bad_request(transport: str = 'rest', req def test_get_google_channel_config_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10509,7 +10511,7 @@ def test_get_google_channel_config_rest_flattened(): def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10524,7 +10526,7 @@ def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest') def test_get_google_channel_config_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -10535,7 +10537,7 @@ def test_get_google_channel_config_rest_error(): ]) def test_update_google_channel_config_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10646,12 +10648,12 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_google_channel_config._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_google_channel_config._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) @@ -10659,7 +10661,7 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc # verify required fields with non-default values are left alone client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -10702,7 +10704,7 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc def test_update_google_channel_config_rest_unset_required_fields(): - transport = transports.EventarcRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.EventarcRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.update_google_channel_config._get_unset_required_fields({}) assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) @@ -10711,7 +10713,7 @@ def test_update_google_channel_config_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_google_channel_config_rest_interceptors(null_interceptor): transport = transports.EventarcRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) client = EventarcClient(transport=transport) @@ -10750,7 +10752,7 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10770,7 +10772,7 @@ def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', def test_update_google_channel_config_rest_flattened(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -10809,7 +10811,7 @@ def test_update_google_channel_config_rest_flattened(): def test_update_google_channel_config_rest_flattened_error(transport: str = 'rest'): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -10825,7 +10827,7 @@ def test_update_google_channel_config_rest_flattened_error(transport: str = 'res def test_update_google_channel_config_rest_error(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -10833,17 +10835,17 @@ def test_update_google_channel_config_rest_error(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EventarcGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.EventarcGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = EventarcClient( @@ -10853,7 +10855,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.EventarcGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -10869,12 +10871,12 @@ def test_credentials_transport_error(): with pytest.raises(ValueError): client = EventarcClient( client_options=options, - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.EventarcGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = EventarcClient( @@ -10886,7 +10888,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.EventarcGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) client = EventarcClient(transport=transport) assert client.transport is transport @@ -10894,13 +10896,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.EventarcGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.EventarcGrpcAsyncIOTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -10913,7 +10915,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -10923,14 +10925,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = EventarcClient.get_transport_class(transport_name)( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -10941,7 +10943,7 @@ def test_eventarc_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.EventarcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -10951,7 +10953,7 @@ def test_eventarc_base_transport(): with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: Transport.return_value = None transport = transports.EventarcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -11010,7 +11012,7 @@ def test_eventarc_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.EventarcTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -11028,7 +11030,7 @@ def test_eventarc_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.EventarcTransport() adc.assert_called_once() @@ -11036,7 +11038,7 @@ def test_eventarc_base_transport_with_adc(): def test_eventarc_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) EventarcClient() adc.assert_called_once_with( scopes=None, @@ -11058,7 +11060,7 @@ def test_eventarc_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -11103,7 +11105,7 @@ def test_eventarc_transport_create_channel(transport_class, grpc_helpers): with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -11132,7 +11134,7 @@ def test_eventarc_transport_create_channel(transport_class, grpc_helpers): def test_eventarc_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -11170,7 +11172,7 @@ def test_eventarc_grpc_transport_client_cert_source_for_mtls( ) def test_eventarc_http_transport_client_cert_source_for_mtls(): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.EventarcRestTransport ( credentials=cred, @@ -11181,7 +11183,7 @@ def test_eventarc_http_transport_client_cert_source_for_mtls(): def test_eventarc_rest_lro_client(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) transport = client.transport @@ -11203,7 +11205,7 @@ def test_eventarc_rest_lro_client(): ]) def test_eventarc_host_no_port(transport_name): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), transport=transport_name, ) @@ -11220,7 +11222,7 @@ def test_eventarc_host_no_port(transport_name): ]) def test_eventarc_host_with_port(transport_name): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), transport=transport_name, ) @@ -11234,8 +11236,8 @@ def test_eventarc_host_with_port(transport_name): "rest", ]) def test_eventarc_client_transport_session_collision(transport_name): - creds1 = _AnonymousCredentialsWithUniverseDomain() - creds2 = _AnonymousCredentialsWithUniverseDomain() + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() client1 = EventarcClient( credentials=creds1, transport=transport_name, @@ -11338,7 +11340,7 @@ def test_eventarc_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -11410,7 +11412,7 @@ def test_eventarc_transport_channel_mtls_with_adc( def test_eventarc_grpc_lro_client(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) transport = client.transport @@ -11427,7 +11429,7 @@ def test_eventarc_grpc_lro_client(): def test_eventarc_grpc_lro_async_client(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc_asyncio', ) transport = client.transport @@ -11737,7 +11739,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -11745,7 +11747,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: transport_class = EventarcClient.get_transport_class() transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -11753,7 +11755,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -11764,7 +11766,7 @@ async def test_transport_close_async(): def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11786,7 +11788,7 @@ def test_get_location_rest_bad_request(transport: str = 'rest', request_type=loc ]) def test_get_location_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2'} @@ -11811,7 +11813,7 @@ def test_get_location_rest(request_type): def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11833,7 +11835,7 @@ def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=l ]) def test_list_locations_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1'} @@ -11858,7 +11860,7 @@ def test_list_locations_rest(request_type): def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11880,7 +11882,7 @@ def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=i ]) def test_get_iam_policy_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} @@ -11905,7 +11907,7 @@ def test_get_iam_policy_rest(request_type): def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11927,7 +11929,7 @@ def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=i ]) def test_set_iam_policy_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} @@ -11952,7 +11954,7 @@ def test_set_iam_policy_rest(request_type): def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -11974,7 +11976,7 @@ def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_ ]) def test_test_iam_permissions_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} @@ -11999,7 +12001,7 @@ def test_test_iam_permissions_rest(request_type): def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12021,7 +12023,7 @@ def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type ]) def test_cancel_operation_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -12046,7 +12048,7 @@ def test_cancel_operation_rest(request_type): def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12068,7 +12070,7 @@ def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type ]) def test_delete_operation_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -12093,7 +12095,7 @@ def test_delete_operation_rest(request_type): def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12115,7 +12117,7 @@ def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=op ]) def test_get_operation_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -12140,7 +12142,7 @@ def test_get_operation_rest(request_type): def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -12162,7 +12164,7 @@ def test_list_operations_rest_bad_request(transport: str = 'rest', request_type= ]) def test_list_operations_rest(request_type): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2'} @@ -12188,7 +12190,7 @@ def test_list_operations_rest(request_type): def test_delete_operation(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12210,7 +12212,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12234,7 +12236,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): def test_delete_operation_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12258,7 +12260,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12283,7 +12285,7 @@ async def test_delete_operation_field_headers_async(): def test_delete_operation_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -12299,7 +12301,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -12317,7 +12319,7 @@ async def test_delete_operation_from_dict_async(): def test_cancel_operation(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12339,7 +12341,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12363,7 +12365,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12387,7 +12389,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12412,7 +12414,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -12428,7 +12430,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -12446,7 +12448,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12468,7 +12470,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12492,7 +12494,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12516,7 +12518,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12541,7 +12543,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -12557,7 +12559,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -12575,7 +12577,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12597,7 +12599,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12621,7 +12623,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12645,7 +12647,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12670,7 +12672,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -12686,7 +12688,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -12704,7 +12706,7 @@ async def test_list_operations_from_dict_async(): def test_list_locations(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12726,7 +12728,7 @@ def test_list_locations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12750,7 +12752,7 @@ async def test_list_locations_async(transport: str = "grpc_asyncio"): def test_list_locations_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12774,7 +12776,7 @@ def test_list_locations_field_headers(): @pytest.mark.asyncio async def test_list_locations_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12799,7 +12801,7 @@ async def test_list_locations_field_headers_async(): def test_list_locations_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -12815,7 +12817,7 @@ def test_list_locations_from_dict(): @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -12833,7 +12835,7 @@ async def test_list_locations_from_dict_async(): def test_get_location(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12855,7 +12857,7 @@ def test_get_location(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12879,7 +12881,7 @@ async def test_get_location_async(transport: str = "grpc_asyncio"): def test_get_location_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain()) + credentials=ga_credentials.AnonymousCredentials()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -12902,7 +12904,7 @@ def test_get_location_field_headers(): @pytest.mark.asyncio async def test_get_location_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -12927,7 +12929,7 @@ async def test_get_location_field_headers_async(): def test_get_location_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -12943,7 +12945,7 @@ def test_get_location_from_dict(): @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -12961,7 +12963,7 @@ async def test_get_location_from_dict_async(): def test_set_iam_policy(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -12988,7 +12990,7 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13018,7 +13020,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13043,7 +13045,7 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13068,7 +13070,7 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -13087,7 +13089,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -13106,7 +13108,7 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13137,7 +13139,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13171,7 +13173,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13198,7 +13200,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13226,7 +13228,7 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -13244,7 +13246,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -13263,7 +13265,7 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13296,7 +13298,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13328,7 +13330,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13357,7 +13359,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13387,7 +13389,7 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -13407,7 +13409,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = EventarcAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -13434,7 +13436,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -13449,7 +13451,7 @@ def test_client_ctx(): ] for transport in transports: client = EventarcClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index df1aa1434a36..bbe8fe1cb9df 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -388,6 +388,7 @@ def _read_environment_variables(): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -406,6 +407,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source + @staticmethod def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. @@ -469,15 +471,16 @@ def _compare_universes(client_universe: str, Raises: ValueError: when client_universe does not match the universe in credentials. """ - if credentials: - credentials_universe = credentials.universe_domain - if client_universe != credentials_universe: - default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + + default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") return True def _validate_universe_domain(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 77699f5aa5d1..46f79d64cbbb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -319,6 +319,7 @@ def _read_environment_variables(): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -337,6 +338,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source + @staticmethod def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. @@ -400,15 +402,16 @@ def _compare_universes(client_universe: str, Raises: ValueError: when client_universe does not match the universe in credentials. """ - if credentials: - credentials_universe = credentials.universe_domain - if client_universe != credentials_universe: - default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") return True def _validate_universe_domain(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 80981fee4b28..a538953a4acf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -320,6 +320,7 @@ def _read_environment_variables(): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -338,6 +339,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source + @staticmethod def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. @@ -401,15 +403,16 @@ def _compare_universes(client_universe: str, Raises: ValueError: when client_universe does not match the universe in credentials. """ - if credentials: - credentials_universe = credentials.universe_domain - if client_universe != credentials_universe: - default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + + default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") return True def _validate_universe_domain(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 31b6686b9be2..459119e8e5be 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -40,6 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt index 185f7d366c2f..2c74b9860b39 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -5,5 +5,6 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index bf3b8f5b3ecf..fa144856eb29 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -69,16 +69,6 @@ def modify_default_endpoint(client): def modify_default_endpoint_template(client): return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE -# Anonymous Credentials with universe domain property. If no universe domain is provided, then -# the default universe domain is "googleapis.com". -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - @property - def universe_domain(self): - return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -178,7 +168,7 @@ def test__get_universe_domain(): def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( transport=transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) ) assert client._validate_universe_domain() == True @@ -202,31 +192,43 @@ def test__validate_universe_domain(client_class, transport_class, transport_name client = client_class(transport=transport) assert client._validate_universe_domain() == True - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), (ConfigServiceV2AsyncClient, "grpc_asyncio"), ]) def test_config_service_v2_client_from_service_account_info(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -260,7 +262,7 @@ def test_config_service_v2_client_service_account_always_use_jwt(transport_class (ConfigServiceV2AsyncClient, "grpc_asyncio"), ]) def test_config_service_v2_client_from_service_account_file(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -297,7 +299,7 @@ def test_config_service_v2_client_client_options(client_class, transport_class, # Check that if channel is provided we won't create a new one. with mock.patch.object(ConfigServiceV2Client, 'get_transport_class') as gtc: transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -577,19 +579,19 @@ def test_config_service_v2_client_client_api_endpoint(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), @@ -600,9 +602,9 @@ def test_config_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) else: - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) assert client.universe_domain == (mock_universe if universe_exists else default_universe) @@ -612,7 +614,7 @@ def test_config_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint @@ -717,8 +719,8 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() - file_creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -749,7 +751,7 @@ def test_config_service_v2_client_create_channel_credentials_file(client_class, ]) def test_list_buckets(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -781,7 +783,7 @@ def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -797,7 +799,7 @@ def test_list_buckets_empty_call(): @pytest.mark.asyncio async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -832,7 +834,7 @@ async def test_list_buckets_async_from_dict(): def test_list_buckets_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -864,7 +866,7 @@ def test_list_buckets_field_headers(): @pytest.mark.asyncio async def test_list_buckets_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -895,7 +897,7 @@ async def test_list_buckets_field_headers_async(): def test_list_buckets_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -921,7 +923,7 @@ def test_list_buckets_flattened(): def test_list_buckets_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -935,7 +937,7 @@ def test_list_buckets_flattened_error(): @pytest.mark.asyncio async def test_list_buckets_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -963,7 +965,7 @@ async def test_list_buckets_flattened_async(): @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -977,7 +979,7 @@ async def test_list_buckets_flattened_error_async(): def test_list_buckets_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1030,7 +1032,7 @@ def test_list_buckets_pager(transport_name: str = "grpc"): for i in results) def test_list_buckets_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1073,7 +1075,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_buckets_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1122,7 +1124,7 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1173,7 +1175,7 @@ async def test_list_buckets_async_pages(): ]) def test_get_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1217,7 +1219,7 @@ def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1233,7 +1235,7 @@ def test_get_bucket_empty_call(): @pytest.mark.asyncio async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1280,7 +1282,7 @@ async def test_get_bucket_async_from_dict(): def test_get_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1312,7 +1314,7 @@ def test_get_bucket_field_headers(): @pytest.mark.asyncio async def test_get_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1347,7 +1349,7 @@ async def test_get_bucket_field_headers_async(): ]) def test_create_bucket_async(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1376,7 +1378,7 @@ def test_create_bucket_async_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1392,7 +1394,7 @@ def test_create_bucket_async_empty_call(): @pytest.mark.asyncio async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1426,7 +1428,7 @@ async def test_create_bucket_async_async_from_dict(): def test_create_bucket_async_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1458,7 +1460,7 @@ def test_create_bucket_async_field_headers(): @pytest.mark.asyncio async def test_create_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1493,7 +1495,7 @@ async def test_create_bucket_async_field_headers_async(): ]) def test_update_bucket_async(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1522,7 +1524,7 @@ def test_update_bucket_async_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1538,7 +1540,7 @@ def test_update_bucket_async_empty_call(): @pytest.mark.asyncio async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1572,7 +1574,7 @@ async def test_update_bucket_async_async_from_dict(): def test_update_bucket_async_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1604,7 +1606,7 @@ def test_update_bucket_async_field_headers(): @pytest.mark.asyncio async def test_update_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1639,7 +1641,7 @@ async def test_update_bucket_async_field_headers_async(): ]) def test_create_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1683,7 +1685,7 @@ def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1699,7 +1701,7 @@ def test_create_bucket_empty_call(): @pytest.mark.asyncio async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1746,7 +1748,7 @@ async def test_create_bucket_async_from_dict(): def test_create_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1778,7 +1780,7 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio async def test_create_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1813,7 +1815,7 @@ async def test_create_bucket_field_headers_async(): ]) def test_update_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1857,7 +1859,7 @@ def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1873,7 +1875,7 @@ def test_update_bucket_empty_call(): @pytest.mark.asyncio async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1920,7 +1922,7 @@ async def test_update_bucket_async_from_dict(): def test_update_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1952,7 +1954,7 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio async def test_update_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1987,7 +1989,7 @@ async def test_update_bucket_field_headers_async(): ]) def test_delete_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2016,7 +2018,7 @@ def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2032,7 +2034,7 @@ def test_delete_bucket_empty_call(): @pytest.mark.asyncio async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2064,7 +2066,7 @@ async def test_delete_bucket_async_from_dict(): def test_delete_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2096,7 +2098,7 @@ def test_delete_bucket_field_headers(): @pytest.mark.asyncio async def test_delete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2131,7 +2133,7 @@ async def test_delete_bucket_field_headers_async(): ]) def test_undelete_bucket(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2160,7 +2162,7 @@ def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2176,7 +2178,7 @@ def test_undelete_bucket_empty_call(): @pytest.mark.asyncio async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2208,7 +2210,7 @@ async def test_undelete_bucket_async_from_dict(): def test_undelete_bucket_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2240,7 +2242,7 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio async def test_undelete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2275,7 +2277,7 @@ async def test_undelete_bucket_field_headers_async(): ]) def test_list_views(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2307,7 +2309,7 @@ def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2323,7 +2325,7 @@ def test_list_views_empty_call(): @pytest.mark.asyncio async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2358,7 +2360,7 @@ async def test_list_views_async_from_dict(): def test_list_views_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2390,7 +2392,7 @@ def test_list_views_field_headers(): @pytest.mark.asyncio async def test_list_views_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2421,7 +2423,7 @@ async def test_list_views_field_headers_async(): def test_list_views_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2447,7 +2449,7 @@ def test_list_views_flattened(): def test_list_views_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2461,7 +2463,7 @@ def test_list_views_flattened_error(): @pytest.mark.asyncio async def test_list_views_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2489,7 +2491,7 @@ async def test_list_views_flattened_async(): @pytest.mark.asyncio async def test_list_views_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2503,7 +2505,7 @@ async def test_list_views_flattened_error_async(): def test_list_views_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2556,7 +2558,7 @@ def test_list_views_pager(transport_name: str = "grpc"): for i in results) def test_list_views_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2599,7 +2601,7 @@ def test_list_views_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_views_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2648,7 +2650,7 @@ async def test_list_views_async_pager(): @pytest.mark.asyncio async def test_list_views_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2699,7 +2701,7 @@ async def test_list_views_async_pages(): ]) def test_get_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2735,7 +2737,7 @@ def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2751,7 +2753,7 @@ def test_get_view_empty_call(): @pytest.mark.asyncio async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2790,7 +2792,7 @@ async def test_get_view_async_from_dict(): def test_get_view_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2822,7 +2824,7 @@ def test_get_view_field_headers(): @pytest.mark.asyncio async def test_get_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2857,7 +2859,7 @@ async def test_get_view_field_headers_async(): ]) def test_create_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2893,7 +2895,7 @@ def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2909,7 +2911,7 @@ def test_create_view_empty_call(): @pytest.mark.asyncio async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2948,7 +2950,7 @@ async def test_create_view_async_from_dict(): def test_create_view_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2980,7 +2982,7 @@ def test_create_view_field_headers(): @pytest.mark.asyncio async def test_create_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3015,7 +3017,7 @@ async def test_create_view_field_headers_async(): ]) def test_update_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3051,7 +3053,7 @@ def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3067,7 +3069,7 @@ def test_update_view_empty_call(): @pytest.mark.asyncio async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3106,7 +3108,7 @@ async def test_update_view_async_from_dict(): def test_update_view_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3138,7 +3140,7 @@ def test_update_view_field_headers(): @pytest.mark.asyncio async def test_update_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3173,7 +3175,7 @@ async def test_update_view_field_headers_async(): ]) def test_delete_view(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3202,7 +3204,7 @@ def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3218,7 +3220,7 @@ def test_delete_view_empty_call(): @pytest.mark.asyncio async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3250,7 +3252,7 @@ async def test_delete_view_async_from_dict(): def test_delete_view_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3282,7 +3284,7 @@ def test_delete_view_field_headers(): @pytest.mark.asyncio async def test_delete_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3317,7 +3319,7 @@ async def test_delete_view_field_headers_async(): ]) def test_list_sinks(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3349,7 +3351,7 @@ def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3365,7 +3367,7 @@ def test_list_sinks_empty_call(): @pytest.mark.asyncio async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3400,7 +3402,7 @@ async def test_list_sinks_async_from_dict(): def test_list_sinks_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3432,7 +3434,7 @@ def test_list_sinks_field_headers(): @pytest.mark.asyncio async def test_list_sinks_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3463,7 +3465,7 @@ async def test_list_sinks_field_headers_async(): def test_list_sinks_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3489,7 +3491,7 @@ def test_list_sinks_flattened(): def test_list_sinks_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3503,7 +3505,7 @@ def test_list_sinks_flattened_error(): @pytest.mark.asyncio async def test_list_sinks_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3531,7 +3533,7 @@ async def test_list_sinks_flattened_async(): @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3545,7 +3547,7 @@ async def test_list_sinks_flattened_error_async(): def test_list_sinks_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3598,7 +3600,7 @@ def test_list_sinks_pager(transport_name: str = "grpc"): for i in results) def test_list_sinks_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3641,7 +3643,7 @@ def test_list_sinks_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_sinks_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3690,7 +3692,7 @@ async def test_list_sinks_async_pager(): @pytest.mark.asyncio async def test_list_sinks_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3741,7 +3743,7 @@ async def test_list_sinks_async_pages(): ]) def test_get_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3787,7 +3789,7 @@ def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3803,7 +3805,7 @@ def test_get_sink_empty_call(): @pytest.mark.asyncio async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3852,7 +3854,7 @@ async def test_get_sink_async_from_dict(): def test_get_sink_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3884,7 +3886,7 @@ def test_get_sink_field_headers(): @pytest.mark.asyncio async def test_get_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3915,7 +3917,7 @@ async def test_get_sink_field_headers_async(): def test_get_sink_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3941,7 +3943,7 @@ def test_get_sink_flattened(): def test_get_sink_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3955,7 +3957,7 @@ def test_get_sink_flattened_error(): @pytest.mark.asyncio async def test_get_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3983,7 +3985,7 @@ async def test_get_sink_flattened_async(): @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4001,7 +4003,7 @@ async def test_get_sink_flattened_error_async(): ]) def test_create_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4047,7 +4049,7 @@ def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4063,7 +4065,7 @@ def test_create_sink_empty_call(): @pytest.mark.asyncio async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4112,7 +4114,7 @@ async def test_create_sink_async_from_dict(): def test_create_sink_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4144,7 +4146,7 @@ def test_create_sink_field_headers(): @pytest.mark.asyncio async def test_create_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4175,7 +4177,7 @@ async def test_create_sink_field_headers_async(): def test_create_sink_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4205,7 +4207,7 @@ def test_create_sink_flattened(): def test_create_sink_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4220,7 +4222,7 @@ def test_create_sink_flattened_error(): @pytest.mark.asyncio async def test_create_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4252,7 +4254,7 @@ async def test_create_sink_flattened_async(): @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4271,7 +4273,7 @@ async def test_create_sink_flattened_error_async(): ]) def test_update_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4317,7 +4319,7 @@ def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4333,7 +4335,7 @@ def test_update_sink_empty_call(): @pytest.mark.asyncio async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4382,7 +4384,7 @@ async def test_update_sink_async_from_dict(): def test_update_sink_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4414,7 +4416,7 @@ def test_update_sink_field_headers(): @pytest.mark.asyncio async def test_update_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4445,7 +4447,7 @@ async def test_update_sink_field_headers_async(): def test_update_sink_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4479,7 +4481,7 @@ def test_update_sink_flattened(): def test_update_sink_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4495,7 +4497,7 @@ def test_update_sink_flattened_error(): @pytest.mark.asyncio async def test_update_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4531,7 +4533,7 @@ async def test_update_sink_flattened_async(): @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4551,7 +4553,7 @@ async def test_update_sink_flattened_error_async(): ]) def test_delete_sink(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4580,7 +4582,7 @@ def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4596,7 +4598,7 @@ def test_delete_sink_empty_call(): @pytest.mark.asyncio async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4628,7 +4630,7 @@ async def test_delete_sink_async_from_dict(): def test_delete_sink_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4660,7 +4662,7 @@ def test_delete_sink_field_headers(): @pytest.mark.asyncio async def test_delete_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4691,7 +4693,7 @@ async def test_delete_sink_field_headers_async(): def test_delete_sink_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4717,7 +4719,7 @@ def test_delete_sink_flattened(): def test_delete_sink_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4731,7 +4733,7 @@ def test_delete_sink_flattened_error(): @pytest.mark.asyncio async def test_delete_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4759,7 +4761,7 @@ async def test_delete_sink_flattened_async(): @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4777,7 +4779,7 @@ async def test_delete_sink_flattened_error_async(): ]) def test_create_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4806,7 +4808,7 @@ def test_create_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -4822,7 +4824,7 @@ def test_create_link_empty_call(): @pytest.mark.asyncio async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4856,7 +4858,7 @@ async def test_create_link_async_from_dict(): def test_create_link_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4888,7 +4890,7 @@ def test_create_link_field_headers(): @pytest.mark.asyncio async def test_create_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4919,7 +4921,7 @@ async def test_create_link_field_headers_async(): def test_create_link_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4953,7 +4955,7 @@ def test_create_link_flattened(): def test_create_link_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -4969,7 +4971,7 @@ def test_create_link_flattened_error(): @pytest.mark.asyncio async def test_create_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5007,7 +5009,7 @@ async def test_create_link_flattened_async(): @pytest.mark.asyncio async def test_create_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5027,7 +5029,7 @@ async def test_create_link_flattened_error_async(): ]) def test_delete_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5056,7 +5058,7 @@ def test_delete_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5072,7 +5074,7 @@ def test_delete_link_empty_call(): @pytest.mark.asyncio async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5106,7 +5108,7 @@ async def test_delete_link_async_from_dict(): def test_delete_link_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5138,7 +5140,7 @@ def test_delete_link_field_headers(): @pytest.mark.asyncio async def test_delete_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5169,7 +5171,7 @@ async def test_delete_link_field_headers_async(): def test_delete_link_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5195,7 +5197,7 @@ def test_delete_link_flattened(): def test_delete_link_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5209,7 +5211,7 @@ def test_delete_link_flattened_error(): @pytest.mark.asyncio async def test_delete_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5239,7 +5241,7 @@ async def test_delete_link_flattened_async(): @pytest.mark.asyncio async def test_delete_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5257,7 +5259,7 @@ async def test_delete_link_flattened_error_async(): ]) def test_list_links(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5289,7 +5291,7 @@ def test_list_links_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5305,7 +5307,7 @@ def test_list_links_empty_call(): @pytest.mark.asyncio async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5340,7 +5342,7 @@ async def test_list_links_async_from_dict(): def test_list_links_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5372,7 +5374,7 @@ def test_list_links_field_headers(): @pytest.mark.asyncio async def test_list_links_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5403,7 +5405,7 @@ async def test_list_links_field_headers_async(): def test_list_links_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5429,7 +5431,7 @@ def test_list_links_flattened(): def test_list_links_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5443,7 +5445,7 @@ def test_list_links_flattened_error(): @pytest.mark.asyncio async def test_list_links_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5471,7 +5473,7 @@ async def test_list_links_flattened_async(): @pytest.mark.asyncio async def test_list_links_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5485,7 +5487,7 @@ async def test_list_links_flattened_error_async(): def test_list_links_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -5538,7 +5540,7 @@ def test_list_links_pager(transport_name: str = "grpc"): for i in results) def test_list_links_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -5581,7 +5583,7 @@ def test_list_links_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_links_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5630,7 +5632,7 @@ async def test_list_links_async_pager(): @pytest.mark.asyncio async def test_list_links_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5681,7 +5683,7 @@ async def test_list_links_async_pages(): ]) def test_get_link(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5717,7 +5719,7 @@ def test_get_link_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5733,7 +5735,7 @@ def test_get_link_empty_call(): @pytest.mark.asyncio async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5772,7 +5774,7 @@ async def test_get_link_async_from_dict(): def test_get_link_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5804,7 +5806,7 @@ def test_get_link_field_headers(): @pytest.mark.asyncio async def test_get_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5835,7 +5837,7 @@ async def test_get_link_field_headers_async(): def test_get_link_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5861,7 +5863,7 @@ def test_get_link_flattened(): def test_get_link_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5875,7 +5877,7 @@ def test_get_link_flattened_error(): @pytest.mark.asyncio async def test_get_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5903,7 +5905,7 @@ async def test_get_link_flattened_async(): @pytest.mark.asyncio async def test_get_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -5921,7 +5923,7 @@ async def test_get_link_flattened_error_async(): ]) def test_list_exclusions(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5953,7 +5955,7 @@ def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -5969,7 +5971,7 @@ def test_list_exclusions_empty_call(): @pytest.mark.asyncio async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6004,7 +6006,7 @@ async def test_list_exclusions_async_from_dict(): def test_list_exclusions_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6036,7 +6038,7 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio async def test_list_exclusions_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6067,7 +6069,7 @@ async def test_list_exclusions_field_headers_async(): def test_list_exclusions_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6093,7 +6095,7 @@ def test_list_exclusions_flattened(): def test_list_exclusions_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6107,7 +6109,7 @@ def test_list_exclusions_flattened_error(): @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6135,7 +6137,7 @@ async def test_list_exclusions_flattened_async(): @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6149,7 +6151,7 @@ async def test_list_exclusions_flattened_error_async(): def test_list_exclusions_pager(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6202,7 +6204,7 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): for i in results) def test_list_exclusions_pages(transport_name: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -6245,7 +6247,7 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_exclusions_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6294,7 +6296,7 @@ async def test_list_exclusions_async_pager(): @pytest.mark.asyncio async def test_list_exclusions_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6345,7 +6347,7 @@ async def test_list_exclusions_async_pages(): ]) def test_get_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6383,7 +6385,7 @@ def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -6399,7 +6401,7 @@ def test_get_exclusion_empty_call(): @pytest.mark.asyncio async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6440,7 +6442,7 @@ async def test_get_exclusion_async_from_dict(): def test_get_exclusion_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6472,7 +6474,7 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio async def test_get_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6503,7 +6505,7 @@ async def test_get_exclusion_field_headers_async(): def test_get_exclusion_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6529,7 +6531,7 @@ def test_get_exclusion_flattened(): def test_get_exclusion_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6543,7 +6545,7 @@ def test_get_exclusion_flattened_error(): @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6571,7 +6573,7 @@ async def test_get_exclusion_flattened_async(): @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6589,7 +6591,7 @@ async def test_get_exclusion_flattened_error_async(): ]) def test_create_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6627,7 +6629,7 @@ def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -6643,7 +6645,7 @@ def test_create_exclusion_empty_call(): @pytest.mark.asyncio async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6684,7 +6686,7 @@ async def test_create_exclusion_async_from_dict(): def test_create_exclusion_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6716,7 +6718,7 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio async def test_create_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6747,7 +6749,7 @@ async def test_create_exclusion_field_headers_async(): def test_create_exclusion_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6777,7 +6779,7 @@ def test_create_exclusion_flattened(): def test_create_exclusion_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6792,7 +6794,7 @@ def test_create_exclusion_flattened_error(): @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6824,7 +6826,7 @@ async def test_create_exclusion_flattened_async(): @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -6843,7 +6845,7 @@ async def test_create_exclusion_flattened_error_async(): ]) def test_update_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6881,7 +6883,7 @@ def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -6897,7 +6899,7 @@ def test_update_exclusion_empty_call(): @pytest.mark.asyncio async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6938,7 +6940,7 @@ async def test_update_exclusion_async_from_dict(): def test_update_exclusion_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6970,7 +6972,7 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio async def test_update_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7001,7 +7003,7 @@ async def test_update_exclusion_field_headers_async(): def test_update_exclusion_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7035,7 +7037,7 @@ def test_update_exclusion_flattened(): def test_update_exclusion_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -7051,7 +7053,7 @@ def test_update_exclusion_flattened_error(): @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7087,7 +7089,7 @@ async def test_update_exclusion_flattened_async(): @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -7107,7 +7109,7 @@ async def test_update_exclusion_flattened_error_async(): ]) def test_delete_exclusion(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7136,7 +7138,7 @@ def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -7152,7 +7154,7 @@ def test_delete_exclusion_empty_call(): @pytest.mark.asyncio async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7184,7 +7186,7 @@ async def test_delete_exclusion_async_from_dict(): def test_delete_exclusion_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7216,7 +7218,7 @@ def test_delete_exclusion_field_headers(): @pytest.mark.asyncio async def test_delete_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7247,7 +7249,7 @@ async def test_delete_exclusion_field_headers_async(): def test_delete_exclusion_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7273,7 +7275,7 @@ def test_delete_exclusion_flattened(): def test_delete_exclusion_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -7287,7 +7289,7 @@ def test_delete_exclusion_flattened_error(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7315,7 +7317,7 @@ async def test_delete_exclusion_flattened_async(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -7333,7 +7335,7 @@ async def test_delete_exclusion_flattened_error_async(): ]) def test_get_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7371,7 +7373,7 @@ def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -7387,7 +7389,7 @@ def test_get_cmek_settings_empty_call(): @pytest.mark.asyncio async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7428,7 +7430,7 @@ async def test_get_cmek_settings_async_from_dict(): def test_get_cmek_settings_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7460,7 +7462,7 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_get_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7495,7 +7497,7 @@ async def test_get_cmek_settings_field_headers_async(): ]) def test_update_cmek_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7533,7 +7535,7 @@ def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -7549,7 +7551,7 @@ def test_update_cmek_settings_empty_call(): @pytest.mark.asyncio async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7590,7 +7592,7 @@ async def test_update_cmek_settings_async_from_dict(): def test_update_cmek_settings_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7622,7 +7624,7 @@ def test_update_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_update_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7657,7 +7659,7 @@ async def test_update_cmek_settings_field_headers_async(): ]) def test_get_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7697,7 +7699,7 @@ def test_get_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -7713,7 +7715,7 @@ def test_get_settings_empty_call(): @pytest.mark.asyncio async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7756,7 +7758,7 @@ async def test_get_settings_async_from_dict(): def test_get_settings_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7788,7 +7790,7 @@ def test_get_settings_field_headers(): @pytest.mark.asyncio async def test_get_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7819,7 +7821,7 @@ async def test_get_settings_field_headers_async(): def test_get_settings_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7845,7 +7847,7 @@ def test_get_settings_flattened(): def test_get_settings_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -7859,7 +7861,7 @@ def test_get_settings_flattened_error(): @pytest.mark.asyncio async def test_get_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7887,7 +7889,7 @@ async def test_get_settings_flattened_async(): @pytest.mark.asyncio async def test_get_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -7905,7 +7907,7 @@ async def test_get_settings_flattened_error_async(): ]) def test_update_settings(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7945,7 +7947,7 @@ def test_update_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -7961,7 +7963,7 @@ def test_update_settings_empty_call(): @pytest.mark.asyncio async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8004,7 +8006,7 @@ async def test_update_settings_async_from_dict(): def test_update_settings_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8036,7 +8038,7 @@ def test_update_settings_field_headers(): @pytest.mark.asyncio async def test_update_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8067,7 +8069,7 @@ async def test_update_settings_field_headers_async(): def test_update_settings_flattened(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8097,7 +8099,7 @@ def test_update_settings_flattened(): def test_update_settings_flattened_error(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -8112,7 +8114,7 @@ def test_update_settings_flattened_error(): @pytest.mark.asyncio async def test_update_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8144,7 +8146,7 @@ async def test_update_settings_flattened_async(): @pytest.mark.asyncio async def test_update_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -8163,7 +8165,7 @@ async def test_update_settings_flattened_error_async(): ]) def test_copy_log_entries(request_type, transport: str = 'grpc'): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8192,7 +8194,7 @@ def test_copy_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -8208,7 +8210,7 @@ def test_copy_log_entries_empty_call(): @pytest.mark.asyncio async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -8243,17 +8245,17 @@ async def test_copy_log_entries_async_from_dict(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -8263,7 +8265,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -8279,12 +8281,12 @@ def test_credentials_transport_error(): with pytest.raises(ValueError): client = ConfigServiceV2Client( client_options=options, - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = ConfigServiceV2Client( @@ -8296,7 +8298,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) client = ConfigServiceV2Client(transport=transport) assert client.transport is transport @@ -8304,13 +8306,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.ConfigServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.ConfigServiceV2GrpcAsyncIOTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -8322,7 +8324,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -8331,14 +8333,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = ConfigServiceV2Client.get_transport_class(transport_name)( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -8349,7 +8351,7 @@ def test_config_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.ConfigServiceV2Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -8359,7 +8361,7 @@ def test_config_service_v2_base_transport(): with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: Transport.return_value = None transport = transports.ConfigServiceV2Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -8426,7 +8428,7 @@ def test_config_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -8447,7 +8449,7 @@ def test_config_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.ConfigServiceV2Transport() adc.assert_called_once() @@ -8455,7 +8457,7 @@ def test_config_service_v2_base_transport_with_adc(): def test_config_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) ConfigServiceV2Client() adc.assert_called_once_with( scopes=None, @@ -8480,7 +8482,7 @@ def test_config_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -8524,7 +8526,7 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -8556,7 +8558,7 @@ def test_config_service_v2_transport_create_channel(transport_class, grpc_helper def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -8600,7 +8602,7 @@ def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( ]) def test_config_service_v2_host_no_port(transport_name): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), transport=transport_name, ) @@ -8614,7 +8616,7 @@ def test_config_service_v2_host_no_port(transport_name): ]) def test_config_service_v2_host_with_port(transport_name): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), transport=transport_name, ) @@ -8662,7 +8664,7 @@ def test_config_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -8734,7 +8736,7 @@ def test_config_service_v2_transport_channel_mtls_with_adc( def test_config_service_v2_grpc_lro_client(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) transport = client.transport @@ -8751,7 +8753,7 @@ def test_config_service_v2_grpc_lro_client(): def test_config_service_v2_grpc_lro_async_client(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc_asyncio', ) transport = client.transport @@ -8998,7 +9000,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -9006,7 +9008,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: transport_class = ConfigServiceV2Client.get_transport_class() transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -9014,7 +9016,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -9025,7 +9027,7 @@ async def test_transport_close_async(): def test_cancel_operation(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9047,7 +9049,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9071,7 +9073,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9095,7 +9097,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9120,7 +9122,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -9136,7 +9138,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -9154,7 +9156,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9176,7 +9178,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9200,7 +9202,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9224,7 +9226,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9249,7 +9251,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -9265,7 +9267,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -9283,7 +9285,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9305,7 +9307,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9329,7 +9331,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9353,7 +9355,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9378,7 +9380,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -9394,7 +9396,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -9417,7 +9419,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -9431,7 +9433,7 @@ def test_client_ctx(): ] for transport in transports: client = ConfigServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e821729dfe70..74014d4b19f7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -70,16 +70,6 @@ def modify_default_endpoint(client): def modify_default_endpoint_template(client): return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE -# Anonymous Credentials with universe domain property. If no universe domain is provided, then -# the default universe domain is "googleapis.com". -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - @property - def universe_domain(self): - return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -179,7 +169,7 @@ def test__get_universe_domain(): def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( transport=transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) ) assert client._validate_universe_domain() == True @@ -203,31 +193,43 @@ def test__validate_universe_domain(client_class, transport_class, transport_name client = client_class(transport=transport) assert client._validate_universe_domain() == True - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), (LoggingServiceV2AsyncClient, "grpc_asyncio"), ]) def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -261,7 +263,7 @@ def test_logging_service_v2_client_service_account_always_use_jwt(transport_clas (LoggingServiceV2AsyncClient, "grpc_asyncio"), ]) def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -298,7 +300,7 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, # Check that if channel is provided we won't create a new one. with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -578,19 +580,19 @@ def test_logging_service_v2_client_client_api_endpoint(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), @@ -601,9 +603,9 @@ def test_logging_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) else: - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) assert client.universe_domain == (mock_universe if universe_exists else default_universe) @@ -613,7 +615,7 @@ def test_logging_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint @@ -718,8 +720,8 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() - file_creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -751,7 +753,7 @@ def test_logging_service_v2_client_create_channel_credentials_file(client_class, ]) def test_delete_log(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -780,7 +782,7 @@ def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -796,7 +798,7 @@ def test_delete_log_empty_call(): @pytest.mark.asyncio async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -828,7 +830,7 @@ async def test_delete_log_async_from_dict(): def test_delete_log_field_headers(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -860,7 +862,7 @@ def test_delete_log_field_headers(): @pytest.mark.asyncio async def test_delete_log_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -891,7 +893,7 @@ async def test_delete_log_field_headers_async(): def test_delete_log_flattened(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -917,7 +919,7 @@ def test_delete_log_flattened(): def test_delete_log_flattened_error(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -931,7 +933,7 @@ def test_delete_log_flattened_error(): @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -959,7 +961,7 @@ async def test_delete_log_flattened_async(): @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -977,7 +979,7 @@ async def test_delete_log_flattened_error_async(): ]) def test_write_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1007,7 +1009,7 @@ def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1023,7 +1025,7 @@ def test_write_log_entries_empty_call(): @pytest.mark.asyncio async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1056,7 +1058,7 @@ async def test_write_log_entries_async_from_dict(): def test_write_log_entries_flattened(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1094,7 +1096,7 @@ def test_write_log_entries_flattened(): def test_write_log_entries_flattened_error(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1111,7 +1113,7 @@ def test_write_log_entries_flattened_error(): @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1151,7 +1153,7 @@ async def test_write_log_entries_flattened_async(): @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1172,7 +1174,7 @@ async def test_write_log_entries_flattened_error_async(): ]) def test_list_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1204,7 +1206,7 @@ def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1220,7 +1222,7 @@ def test_list_log_entries_empty_call(): @pytest.mark.asyncio async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1255,7 +1257,7 @@ async def test_list_log_entries_async_from_dict(): def test_list_log_entries_flattened(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1289,7 +1291,7 @@ def test_list_log_entries_flattened(): def test_list_log_entries_flattened_error(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1305,7 +1307,7 @@ def test_list_log_entries_flattened_error(): @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1341,7 +1343,7 @@ async def test_list_log_entries_flattened_async(): @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1357,7 +1359,7 @@ async def test_list_log_entries_flattened_error_async(): def test_list_log_entries_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1405,7 +1407,7 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): for i in results) def test_list_log_entries_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1448,7 +1450,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1497,7 +1499,7 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1548,7 +1550,7 @@ async def test_list_log_entries_async_pages(): ]) def test_list_monitored_resource_descriptors(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1580,7 +1582,7 @@ def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1596,7 +1598,7 @@ def test_list_monitored_resource_descriptors_empty_call(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1631,7 +1633,7 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1679,7 +1681,7 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") for i in results) def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1722,7 +1724,7 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1771,7 +1773,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1822,7 +1824,7 @@ async def test_list_monitored_resource_descriptors_async_pages(): ]) def test_list_logs(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1856,7 +1858,7 @@ def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1872,7 +1874,7 @@ def test_list_logs_empty_call(): @pytest.mark.asyncio async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1909,7 +1911,7 @@ async def test_list_logs_async_from_dict(): def test_list_logs_field_headers(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1941,7 +1943,7 @@ def test_list_logs_field_headers(): @pytest.mark.asyncio async def test_list_logs_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1972,7 +1974,7 @@ async def test_list_logs_field_headers_async(): def test_list_logs_flattened(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1998,7 +2000,7 @@ def test_list_logs_flattened(): def test_list_logs_flattened_error(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2012,7 +2014,7 @@ def test_list_logs_flattened_error(): @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2040,7 +2042,7 @@ async def test_list_logs_flattened_async(): @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2054,7 +2056,7 @@ async def test_list_logs_flattened_error_async(): def test_list_logs_pager(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2107,7 +2109,7 @@ def test_list_logs_pager(transport_name: str = "grpc"): for i in results) def test_list_logs_pages(transport_name: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2150,7 +2152,7 @@ def test_list_logs_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_logs_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2199,7 +2201,7 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2250,7 +2252,7 @@ async def test_list_logs_async_pages(): ]) def test_tail_log_entries(request_type, transport: str = 'grpc'): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2280,7 +2282,7 @@ def test_tail_log_entries(request_type, transport: str = 'grpc'): @pytest.mark.asyncio async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2316,17 +2318,17 @@ async def test_tail_log_entries_async_from_dict(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -2336,7 +2338,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -2352,12 +2354,12 @@ def test_credentials_transport_error(): with pytest.raises(ValueError): client = LoggingServiceV2Client( client_options=options, - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = LoggingServiceV2Client( @@ -2369,7 +2371,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) client = LoggingServiceV2Client(transport=transport) assert client.transport is transport @@ -2377,13 +2379,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.LoggingServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.LoggingServiceV2GrpcAsyncIOTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2395,7 +2397,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -2404,14 +2406,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = LoggingServiceV2Client.get_transport_class(transport_name)( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -2422,7 +2424,7 @@ def test_logging_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.LoggingServiceV2Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -2432,7 +2434,7 @@ def test_logging_service_v2_base_transport(): with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: Transport.return_value = None transport = transports.LoggingServiceV2Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2468,7 +2470,7 @@ def test_logging_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -2490,7 +2492,7 @@ def test_logging_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.LoggingServiceV2Transport() adc.assert_called_once() @@ -2498,7 +2500,7 @@ def test_logging_service_v2_base_transport_with_adc(): def test_logging_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) LoggingServiceV2Client() adc.assert_called_once_with( scopes=None, @@ -2524,7 +2526,7 @@ def test_logging_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -2568,7 +2570,7 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -2601,7 +2603,7 @@ def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpe def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2645,7 +2647,7 @@ def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( ]) def test_logging_service_v2_host_no_port(transport_name): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), transport=transport_name, ) @@ -2659,7 +2661,7 @@ def test_logging_service_v2_host_no_port(transport_name): ]) def test_logging_service_v2_host_with_port(transport_name): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), transport=transport_name, ) @@ -2707,7 +2709,7 @@ def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -2889,7 +2891,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2897,7 +2899,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: transport_class = LoggingServiceV2Client.get_transport_class() transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2905,7 +2907,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -2916,7 +2918,7 @@ async def test_transport_close_async(): def test_cancel_operation(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2938,7 +2940,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2962,7 +2964,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2986,7 +2988,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3011,7 +3013,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -3027,7 +3029,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -3045,7 +3047,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3067,7 +3069,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3091,7 +3093,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3115,7 +3117,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3140,7 +3142,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -3156,7 +3158,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -3174,7 +3176,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3196,7 +3198,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3220,7 +3222,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3244,7 +3246,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3269,7 +3271,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -3285,7 +3287,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -3308,7 +3310,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -3322,7 +3324,7 @@ def test_client_ctx(): ] for transport in transports: client = LoggingServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 300b7e27eccb..798b85cfb501 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -68,16 +68,6 @@ def modify_default_endpoint(client): def modify_default_endpoint_template(client): return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE -# Anonymous Credentials with universe domain property. If no universe domain is provided, then -# the default universe domain is "googleapis.com". -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - @property - def universe_domain(self): - return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -177,7 +167,7 @@ def test__get_universe_domain(): def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( transport=transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) ) assert client._validate_universe_domain() == True @@ -201,31 +191,43 @@ def test__validate_universe_domain(client_class, transport_class, transport_name client = client_class(transport=transport) assert client._validate_universe_domain() == True - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), (MetricsServiceV2AsyncClient, "grpc_asyncio"), ]) def test_metrics_service_v2_client_from_service_account_info(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -259,7 +261,7 @@ def test_metrics_service_v2_client_service_account_always_use_jwt(transport_clas (MetricsServiceV2AsyncClient, "grpc_asyncio"), ]) def test_metrics_service_v2_client_from_service_account_file(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -296,7 +298,7 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, # Check that if channel is provided we won't create a new one. with mock.patch.object(MetricsServiceV2Client, 'get_transport_class') as gtc: transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -576,19 +578,19 @@ def test_metrics_service_v2_client_client_api_endpoint(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), @@ -599,9 +601,9 @@ def test_metrics_service_v2_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) else: - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) assert client.universe_domain == (mock_universe if universe_exists else default_universe) @@ -611,7 +613,7 @@ def test_metrics_service_v2_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint @@ -716,8 +718,8 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() - file_creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -749,7 +751,7 @@ def test_metrics_service_v2_client_create_channel_credentials_file(client_class, ]) def test_list_log_metrics(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -781,7 +783,7 @@ def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -797,7 +799,7 @@ def test_list_log_metrics_empty_call(): @pytest.mark.asyncio async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -832,7 +834,7 @@ async def test_list_log_metrics_async_from_dict(): def test_list_log_metrics_field_headers(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -864,7 +866,7 @@ def test_list_log_metrics_field_headers(): @pytest.mark.asyncio async def test_list_log_metrics_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -895,7 +897,7 @@ async def test_list_log_metrics_field_headers_async(): def test_list_log_metrics_flattened(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -921,7 +923,7 @@ def test_list_log_metrics_flattened(): def test_list_log_metrics_flattened_error(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -935,7 +937,7 @@ def test_list_log_metrics_flattened_error(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -963,7 +965,7 @@ async def test_list_log_metrics_flattened_async(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -977,7 +979,7 @@ async def test_list_log_metrics_flattened_error_async(): def test_list_log_metrics_pager(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1030,7 +1032,7 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): for i in results) def test_list_log_metrics_pages(transport_name: str = "grpc"): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1073,7 +1075,7 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1122,7 +1124,7 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1173,7 +1175,7 @@ async def test_list_log_metrics_async_pages(): ]) def test_get_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1217,7 +1219,7 @@ def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1233,7 +1235,7 @@ def test_get_log_metric_empty_call(): @pytest.mark.asyncio async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1280,7 +1282,7 @@ async def test_get_log_metric_async_from_dict(): def test_get_log_metric_field_headers(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1312,7 +1314,7 @@ def test_get_log_metric_field_headers(): @pytest.mark.asyncio async def test_get_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1343,7 +1345,7 @@ async def test_get_log_metric_field_headers_async(): def test_get_log_metric_flattened(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1369,7 +1371,7 @@ def test_get_log_metric_flattened(): def test_get_log_metric_flattened_error(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1383,7 +1385,7 @@ def test_get_log_metric_flattened_error(): @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1411,7 +1413,7 @@ async def test_get_log_metric_flattened_async(): @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1429,7 +1431,7 @@ async def test_get_log_metric_flattened_error_async(): ]) def test_create_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1473,7 +1475,7 @@ def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1489,7 +1491,7 @@ def test_create_log_metric_empty_call(): @pytest.mark.asyncio async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1536,7 +1538,7 @@ async def test_create_log_metric_async_from_dict(): def test_create_log_metric_field_headers(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1568,7 +1570,7 @@ def test_create_log_metric_field_headers(): @pytest.mark.asyncio async def test_create_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1599,7 +1601,7 @@ async def test_create_log_metric_field_headers_async(): def test_create_log_metric_flattened(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1629,7 +1631,7 @@ def test_create_log_metric_flattened(): def test_create_log_metric_flattened_error(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1644,7 +1646,7 @@ def test_create_log_metric_flattened_error(): @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1676,7 +1678,7 @@ async def test_create_log_metric_flattened_async(): @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1695,7 +1697,7 @@ async def test_create_log_metric_flattened_error_async(): ]) def test_update_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1739,7 +1741,7 @@ def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1755,7 +1757,7 @@ def test_update_log_metric_empty_call(): @pytest.mark.asyncio async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1802,7 +1804,7 @@ async def test_update_log_metric_async_from_dict(): def test_update_log_metric_field_headers(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1834,7 +1836,7 @@ def test_update_log_metric_field_headers(): @pytest.mark.asyncio async def test_update_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1865,7 +1867,7 @@ async def test_update_log_metric_field_headers_async(): def test_update_log_metric_flattened(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1895,7 +1897,7 @@ def test_update_log_metric_flattened(): def test_update_log_metric_flattened_error(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1910,7 +1912,7 @@ def test_update_log_metric_flattened_error(): @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1942,7 +1944,7 @@ async def test_update_log_metric_flattened_async(): @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1961,7 +1963,7 @@ async def test_update_log_metric_flattened_error_async(): ]) def test_delete_log_metric(request_type, transport: str = 'grpc'): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1990,7 +1992,7 @@ def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2006,7 +2008,7 @@ def test_delete_log_metric_empty_call(): @pytest.mark.asyncio async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2038,7 +2040,7 @@ async def test_delete_log_metric_async_from_dict(): def test_delete_log_metric_field_headers(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2070,7 +2072,7 @@ def test_delete_log_metric_field_headers(): @pytest.mark.asyncio async def test_delete_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2101,7 +2103,7 @@ async def test_delete_log_metric_field_headers_async(): def test_delete_log_metric_flattened(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2127,7 +2129,7 @@ def test_delete_log_metric_flattened(): def test_delete_log_metric_flattened_error(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2141,7 +2143,7 @@ def test_delete_log_metric_flattened_error(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2169,7 +2171,7 @@ async def test_delete_log_metric_flattened_async(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2184,17 +2186,17 @@ async def test_delete_log_metric_flattened_error_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -2204,7 +2206,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -2220,12 +2222,12 @@ def test_credentials_transport_error(): with pytest.raises(ValueError): client = MetricsServiceV2Client( client_options=options, - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = MetricsServiceV2Client( @@ -2237,7 +2239,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) client = MetricsServiceV2Client(transport=transport) assert client.transport is transport @@ -2245,13 +2247,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.MetricsServiceV2GrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.MetricsServiceV2GrpcAsyncIOTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2263,7 +2265,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -2272,14 +2274,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = MetricsServiceV2Client.get_transport_class(transport_name)( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -2290,7 +2292,7 @@ def test_metrics_service_v2_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.MetricsServiceV2Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -2300,7 +2302,7 @@ def test_metrics_service_v2_base_transport(): with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: Transport.return_value = None transport = transports.MetricsServiceV2Transport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2335,7 +2337,7 @@ def test_metrics_service_v2_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport( credentials_file="credentials.json", quota_project_id="octopus", @@ -2357,7 +2359,7 @@ def test_metrics_service_v2_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.MetricsServiceV2Transport() adc.assert_called_once() @@ -2365,7 +2367,7 @@ def test_metrics_service_v2_base_transport_with_adc(): def test_metrics_service_v2_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) MetricsServiceV2Client() adc.assert_called_once_with( scopes=None, @@ -2391,7 +2393,7 @@ def test_metrics_service_v2_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -2435,7 +2437,7 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -2468,7 +2470,7 @@ def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpe def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2512,7 +2514,7 @@ def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( ]) def test_metrics_service_v2_host_no_port(transport_name): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), transport=transport_name, ) @@ -2526,7 +2528,7 @@ def test_metrics_service_v2_host_no_port(transport_name): ]) def test_metrics_service_v2_host_with_port(transport_name): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), transport=transport_name, ) @@ -2574,7 +2576,7 @@ def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -2756,7 +2758,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2764,7 +2766,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: transport_class = MetricsServiceV2Client.get_transport_class() transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2772,7 +2774,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -2783,7 +2785,7 @@ async def test_transport_close_async(): def test_cancel_operation(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2805,7 +2807,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2829,7 +2831,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2853,7 +2855,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2878,7 +2880,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -2894,7 +2896,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -2912,7 +2914,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2934,7 +2936,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2958,7 +2960,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2982,7 +2984,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3007,7 +3009,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -3023,7 +3025,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -3041,7 +3043,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3063,7 +3065,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3087,7 +3089,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3111,7 +3113,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3136,7 +3138,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -3152,7 +3154,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -3175,7 +3177,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -3189,7 +3191,7 @@ def test_client_ctx(): ] for transport in transports: client = MetricsServiceV2Client( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 1f1080604a33..668dd5b97e62 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -346,6 +346,7 @@ def _read_environment_variables(): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): """Return the client cert source to be used by the client. @@ -364,6 +365,7 @@ def _get_client_cert_source(provided_cert_source, use_cert_flag): client_cert_source = mtls.default_client_cert_source() return client_cert_source + @staticmethod def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): """Return the API endpoint used by the client. @@ -427,15 +429,16 @@ def _compare_universes(client_universe: str, Raises: ValueError: when client_universe does not match the universe in credentials. """ - if credentials: - credentials_universe = credentials.universe_domain - if client_universe != credentials_universe: - default_universe = CloudRedisClient._DEFAULT_UNIVERSE - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") + + default_universe = CloudRedisClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError("The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default.") return True def _validate_universe_domain(self): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index e13752192174..970ef113f31e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -40,6 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt index 185f7d366c2f..2c74b9860b39 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -5,5 +5,6 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ce363b93d469..a74c1dc425d0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -80,16 +80,6 @@ def modify_default_endpoint(client): def modify_default_endpoint_template(client): return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE -# Anonymous Credentials with universe domain property. If no universe domain is provided, then -# the default universe domain is "googleapis.com". -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - @property - def universe_domain(self): - return self._universe_domain def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" @@ -190,7 +180,7 @@ def test__get_universe_domain(): def test__validate_universe_domain(client_class, transport_class, transport_name): client = client_class( transport=transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) ) assert client._validate_universe_domain() == True @@ -214,24 +204,36 @@ def test__validate_universe_domain(client_class, transport_class, transport_name client = client_class(transport=transport) assert client._validate_universe_domain() == True - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="foo.com")) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=_AnonymousCredentialsWithUniverseDomain(),)) + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class( + transport=transport_class(credentials=credentials) + ) with pytest.raises(ValueError) as excinfo: client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + @pytest.mark.parametrize("client_class,transport_name", [ (CloudRedisClient, "grpc"), @@ -239,7 +241,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name (CloudRedisClient, "rest"), ]) def test_cloud_redis_client_from_service_account_info(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: factory.return_value = creds info = {"valid": True} @@ -278,7 +280,7 @@ def test_cloud_redis_client_service_account_always_use_jwt(transport_class, tran (CloudRedisClient, "rest"), ]) def test_cloud_redis_client_from_service_account_file(client_class, transport_name): - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) @@ -320,7 +322,7 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp # Check that if channel is provided we won't create a new one. with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) client = client_class(transport=transport) gtc.assert_not_called() @@ -602,19 +604,19 @@ def test_cloud_redis_client_client_api_endpoint(client_class): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == api_override # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - client = client_class(credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), @@ -625,9 +627,9 @@ def test_cloud_redis_client_client_api_endpoint(client_class): universe_exists = hasattr(options, "universe_domain") if universe_exists: options = client_options.ClientOptions(universe_domain=mock_universe) - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) else: - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) assert client.universe_domain == (mock_universe if universe_exists else default_universe) @@ -637,7 +639,7 @@ def test_cloud_redis_client_client_api_endpoint(client_class): if hasattr(options, "universe_domain"): delattr(options, "universe_domain") with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - client = client_class(client_options=options, credentials=_AnonymousCredentialsWithUniverseDomain()) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) assert client.api_endpoint == default_endpoint @@ -744,8 +746,8 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp ) as adc, mock.patch.object( grpc_helpers, "create_channel" ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() - file_creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() load_creds.return_value = (file_creds, None) adc.return_value = (creds, None) client = client_class(client_options=options, transport=transport_name) @@ -773,7 +775,7 @@ def test_cloud_redis_client_create_channel_credentials_file(client_class, transp ]) def test_list_instances(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -807,7 +809,7 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -823,7 +825,7 @@ def test_list_instances_empty_call(): @pytest.mark.asyncio async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -860,7 +862,7 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -892,7 +894,7 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -923,7 +925,7 @@ async def test_list_instances_field_headers_async(): def test_list_instances_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -949,7 +951,7 @@ def test_list_instances_flattened(): def test_list_instances_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -963,7 +965,7 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -991,7 +993,7 @@ async def test_list_instances_flattened_async(): @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1005,7 +1007,7 @@ async def test_list_instances_flattened_error_async(): def test_list_instances_pager(transport_name: str = "grpc"): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1058,7 +1060,7 @@ def test_list_instances_pager(transport_name: str = "grpc"): for i in results) def test_list_instances_pages(transport_name: str = "grpc"): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1101,7 +1103,7 @@ def test_list_instances_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_instances_async_pager(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1150,7 +1152,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1201,7 +1203,7 @@ async def test_list_instances_async_pages(): ]) def test_get_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1285,7 +1287,7 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1301,7 +1303,7 @@ def test_get_instance_empty_call(): @pytest.mark.asyncio async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1388,7 +1390,7 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1420,7 +1422,7 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1451,7 +1453,7 @@ async def test_get_instance_field_headers_async(): def test_get_instance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1477,7 +1479,7 @@ def test_get_instance_flattened(): def test_get_instance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1491,7 +1493,7 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1519,7 +1521,7 @@ async def test_get_instance_flattened_async(): @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1537,7 +1539,7 @@ async def test_get_instance_flattened_error_async(): ]) def test_get_instance_auth_string(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1569,7 +1571,7 @@ def test_get_instance_auth_string_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1585,7 +1587,7 @@ def test_get_instance_auth_string_empty_call(): @pytest.mark.asyncio async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1620,7 +1622,7 @@ async def test_get_instance_auth_string_async_from_dict(): def test_get_instance_auth_string_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1652,7 +1654,7 @@ def test_get_instance_auth_string_field_headers(): @pytest.mark.asyncio async def test_get_instance_auth_string_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1683,7 +1685,7 @@ async def test_get_instance_auth_string_field_headers_async(): def test_get_instance_auth_string_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1709,7 +1711,7 @@ def test_get_instance_auth_string_flattened(): def test_get_instance_auth_string_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1723,7 +1725,7 @@ def test_get_instance_auth_string_flattened_error(): @pytest.mark.asyncio async def test_get_instance_auth_string_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1751,7 +1753,7 @@ async def test_get_instance_auth_string_flattened_async(): @pytest.mark.asyncio async def test_get_instance_auth_string_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1769,7 +1771,7 @@ async def test_get_instance_auth_string_flattened_error_async(): ]) def test_create_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1798,7 +1800,7 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -1814,7 +1816,7 @@ def test_create_instance_empty_call(): @pytest.mark.asyncio async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1848,7 +1850,7 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1880,7 +1882,7 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1911,7 +1913,7 @@ async def test_create_instance_field_headers_async(): def test_create_instance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1945,7 +1947,7 @@ def test_create_instance_flattened(): def test_create_instance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -1961,7 +1963,7 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1999,7 +2001,7 @@ async def test_create_instance_flattened_async(): @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2019,7 +2021,7 @@ async def test_create_instance_flattened_error_async(): ]) def test_update_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2048,7 +2050,7 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2064,7 +2066,7 @@ def test_update_instance_empty_call(): @pytest.mark.asyncio async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2098,7 +2100,7 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2130,7 +2132,7 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2161,7 +2163,7 @@ async def test_update_instance_field_headers_async(): def test_update_instance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2191,7 +2193,7 @@ def test_update_instance_flattened(): def test_update_instance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2206,7 +2208,7 @@ def test_update_instance_flattened_error(): @pytest.mark.asyncio async def test_update_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2240,7 +2242,7 @@ async def test_update_instance_flattened_async(): @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2259,7 +2261,7 @@ async def test_update_instance_flattened_error_async(): ]) def test_upgrade_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2288,7 +2290,7 @@ def test_upgrade_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2304,7 +2306,7 @@ def test_upgrade_instance_empty_call(): @pytest.mark.asyncio async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2338,7 +2340,7 @@ async def test_upgrade_instance_async_from_dict(): def test_upgrade_instance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2370,7 +2372,7 @@ def test_upgrade_instance_field_headers(): @pytest.mark.asyncio async def test_upgrade_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2401,7 +2403,7 @@ async def test_upgrade_instance_field_headers_async(): def test_upgrade_instance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2431,7 +2433,7 @@ def test_upgrade_instance_flattened(): def test_upgrade_instance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2446,7 +2448,7 @@ def test_upgrade_instance_flattened_error(): @pytest.mark.asyncio async def test_upgrade_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2480,7 +2482,7 @@ async def test_upgrade_instance_flattened_async(): @pytest.mark.asyncio async def test_upgrade_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2499,7 +2501,7 @@ async def test_upgrade_instance_flattened_error_async(): ]) def test_import_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2528,7 +2530,7 @@ def test_import_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2544,7 +2546,7 @@ def test_import_instance_empty_call(): @pytest.mark.asyncio async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2578,7 +2580,7 @@ async def test_import_instance_async_from_dict(): def test_import_instance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2610,7 +2612,7 @@ def test_import_instance_field_headers(): @pytest.mark.asyncio async def test_import_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2641,7 +2643,7 @@ async def test_import_instance_field_headers_async(): def test_import_instance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2671,7 +2673,7 @@ def test_import_instance_flattened(): def test_import_instance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2686,7 +2688,7 @@ def test_import_instance_flattened_error(): @pytest.mark.asyncio async def test_import_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2720,7 +2722,7 @@ async def test_import_instance_flattened_async(): @pytest.mark.asyncio async def test_import_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2739,7 +2741,7 @@ async def test_import_instance_flattened_error_async(): ]) def test_export_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2768,7 +2770,7 @@ def test_export_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -2784,7 +2786,7 @@ def test_export_instance_empty_call(): @pytest.mark.asyncio async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2818,7 +2820,7 @@ async def test_export_instance_async_from_dict(): def test_export_instance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2850,7 +2852,7 @@ def test_export_instance_field_headers(): @pytest.mark.asyncio async def test_export_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2881,7 +2883,7 @@ async def test_export_instance_field_headers_async(): def test_export_instance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2911,7 +2913,7 @@ def test_export_instance_flattened(): def test_export_instance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2926,7 +2928,7 @@ def test_export_instance_flattened_error(): @pytest.mark.asyncio async def test_export_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2960,7 +2962,7 @@ async def test_export_instance_flattened_async(): @pytest.mark.asyncio async def test_export_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -2979,7 +2981,7 @@ async def test_export_instance_flattened_error_async(): ]) def test_failover_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3008,7 +3010,7 @@ def test_failover_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3024,7 +3026,7 @@ def test_failover_instance_empty_call(): @pytest.mark.asyncio async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3058,7 +3060,7 @@ async def test_failover_instance_async_from_dict(): def test_failover_instance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3090,7 +3092,7 @@ def test_failover_instance_field_headers(): @pytest.mark.asyncio async def test_failover_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3121,7 +3123,7 @@ async def test_failover_instance_field_headers_async(): def test_failover_instance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3151,7 +3153,7 @@ def test_failover_instance_flattened(): def test_failover_instance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3166,7 +3168,7 @@ def test_failover_instance_flattened_error(): @pytest.mark.asyncio async def test_failover_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3200,7 +3202,7 @@ async def test_failover_instance_flattened_async(): @pytest.mark.asyncio async def test_failover_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3219,7 +3221,7 @@ async def test_failover_instance_flattened_error_async(): ]) def test_delete_instance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3248,7 +3250,7 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3264,7 +3266,7 @@ def test_delete_instance_empty_call(): @pytest.mark.asyncio async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3298,7 +3300,7 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3330,7 +3332,7 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3361,7 +3363,7 @@ async def test_delete_instance_field_headers_async(): def test_delete_instance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3387,7 +3389,7 @@ def test_delete_instance_flattened(): def test_delete_instance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3401,7 +3403,7 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3431,7 +3433,7 @@ async def test_delete_instance_flattened_async(): @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3449,7 +3451,7 @@ async def test_delete_instance_flattened_error_async(): ]) def test_reschedule_maintenance(request_type, transport: str = 'grpc'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3478,7 +3480,7 @@ def test_reschedule_maintenance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) @@ -3494,7 +3496,7 @@ def test_reschedule_maintenance_empty_call(): @pytest.mark.asyncio async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3528,7 +3530,7 @@ async def test_reschedule_maintenance_async_from_dict(): def test_reschedule_maintenance_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3560,7 +3562,7 @@ def test_reschedule_maintenance_field_headers(): @pytest.mark.asyncio async def test_reschedule_maintenance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3591,7 +3593,7 @@ async def test_reschedule_maintenance_field_headers_async(): def test_reschedule_maintenance_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3623,7 +3625,7 @@ def test_reschedule_maintenance_flattened(): def test_reschedule_maintenance_flattened_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3639,7 +3641,7 @@ def test_reschedule_maintenance_flattened_error(): @pytest.mark.asyncio async def test_reschedule_maintenance_flattened_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3675,7 +3677,7 @@ async def test_reschedule_maintenance_flattened_async(): @pytest.mark.asyncio async def test_reschedule_maintenance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Attempting to call a method with both a request object and flattened @@ -3695,7 +3697,7 @@ async def test_reschedule_maintenance_flattened_error_async(): ]) def test_list_instances_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -3743,14 +3745,14 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = 'parent_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).list_instances._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("page_size", "page_token", )) jsonified_request.update(unset_fields) @@ -3760,7 +3762,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan assert jsonified_request["parent"] == 'parent_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -3802,7 +3804,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan def test_list_instances_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.list_instances._get_unset_required_fields({}) assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) @@ -3811,7 +3813,7 @@ def test_list_instances_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_list_instances_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -3850,7 +3852,7 @@ def test_list_instances_rest_interceptors(null_interceptor): def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ListInstancesRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3870,7 +3872,7 @@ def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=c def test_list_instances_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -3908,7 +3910,7 @@ def test_list_instances_rest_flattened(): def test_list_instances_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3923,7 +3925,7 @@ def test_list_instances_rest_flattened_error(transport: str = 'rest'): def test_list_instances_rest_pager(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3989,7 +3991,7 @@ def test_list_instances_rest_pager(transport: str = 'rest'): ]) def test_get_instance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -4087,14 +4089,14 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4102,7 +4104,7 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -4144,7 +4146,7 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR def test_get_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -4153,7 +4155,7 @@ def test_get_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -4192,7 +4194,7 @@ def test_get_instance_rest_interceptors(null_interceptor): def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4212,7 +4214,7 @@ def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=clo def test_get_instance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -4250,7 +4252,7 @@ def test_get_instance_rest_flattened(): def test_get_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4265,7 +4267,7 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): def test_get_instance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -4276,7 +4278,7 @@ def test_get_instance_rest_error(): ]) def test_get_instance_auth_string_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -4322,14 +4324,14 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_instance_auth_string._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).get_instance_auth_string._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance_auth_string._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4337,7 +4339,7 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -4379,7 +4381,7 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. def test_get_instance_auth_string_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.get_instance_auth_string._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -4388,7 +4390,7 @@ def test_get_instance_auth_string_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_get_instance_auth_string_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -4427,7 +4429,7 @@ def test_get_instance_auth_string_rest_interceptors(null_interceptor): def test_get_instance_auth_string_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4447,7 +4449,7 @@ def test_get_instance_auth_string_rest_bad_request(transport: str = 'rest', requ def test_get_instance_auth_string_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -4485,7 +4487,7 @@ def test_get_instance_auth_string_rest_flattened(): def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4500,7 +4502,7 @@ def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): def test_get_instance_auth_string_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -4511,7 +4513,7 @@ def test_get_instance_auth_string_rest_error(): ]) def test_create_instance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -4618,7 +4620,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns # verify fields with default values are dropped assert "instanceId" not in jsonified_request - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -4628,7 +4630,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns jsonified_request["parent"] = 'parent_value' jsonified_request["instanceId"] = 'instance_id_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).create_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("instance_id", )) jsonified_request.update(unset_fields) @@ -4640,7 +4642,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns assert jsonified_request["instanceId"] == 'instance_id_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -4684,7 +4686,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns def test_create_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.create_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) @@ -4693,7 +4695,7 @@ def test_create_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_create_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -4733,7 +4735,7 @@ def test_create_instance_rest_interceptors(null_interceptor): def test_create_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4753,7 +4755,7 @@ def test_create_instance_rest_bad_request(transport: str = 'rest', request_type= def test_create_instance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -4791,7 +4793,7 @@ def test_create_instance_rest_flattened(): def test_create_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4808,7 +4810,7 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): def test_create_instance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -4819,7 +4821,7 @@ def test_create_instance_rest_error(): ]) def test_update_instance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -4923,12 +4925,12 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).update_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set(("update_mask", )) jsonified_request.update(unset_fields) @@ -4936,7 +4938,7 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns # verify required fields with non-default values are left alone client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -4976,7 +4978,7 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns def test_update_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.update_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(("updateMask", )) & set(("updateMask", "instance", ))) @@ -4985,7 +4987,7 @@ def test_update_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_update_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5025,7 +5027,7 @@ def test_update_instance_rest_interceptors(null_interceptor): def test_update_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpdateInstanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5045,7 +5047,7 @@ def test_update_instance_rest_bad_request(transport: str = 'rest', request_type= def test_update_instance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5082,7 +5084,7 @@ def test_update_instance_rest_flattened(): def test_update_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5098,7 +5100,7 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): def test_update_instance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -5109,7 +5111,7 @@ def test_update_instance_rest_error(): ]) def test_upgrade_instance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5151,7 +5153,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).upgrade_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -5159,7 +5161,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI jsonified_request["name"] = 'name_value' jsonified_request["redisVersion"] = 'redis_version_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).upgrade_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).upgrade_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5169,7 +5171,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI assert jsonified_request["redisVersion"] == 'redis_version_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -5209,7 +5211,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI def test_upgrade_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.upgrade_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "redisVersion", ))) @@ -5218,7 +5220,7 @@ def test_upgrade_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_upgrade_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5258,7 +5260,7 @@ def test_upgrade_instance_rest_interceptors(null_interceptor): def test_upgrade_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpgradeInstanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5278,7 +5280,7 @@ def test_upgrade_instance_rest_bad_request(transport: str = 'rest', request_type def test_upgrade_instance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5315,7 +5317,7 @@ def test_upgrade_instance_rest_flattened(): def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5331,7 +5333,7 @@ def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): def test_upgrade_instance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -5342,7 +5344,7 @@ def test_upgrade_instance_rest_error(): ]) def test_import_instance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5383,14 +5385,14 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).import_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).import_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).import_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5398,7 +5400,7 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -5438,7 +5440,7 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns def test_import_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.import_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "inputConfig", ))) @@ -5447,7 +5449,7 @@ def test_import_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_import_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5487,7 +5489,7 @@ def test_import_instance_rest_interceptors(null_interceptor): def test_import_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ImportInstanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5507,7 +5509,7 @@ def test_import_instance_rest_bad_request(transport: str = 'rest', request_type= def test_import_instance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5544,7 +5546,7 @@ def test_import_instance_rest_flattened(): def test_import_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5560,7 +5562,7 @@ def test_import_instance_rest_flattened_error(transport: str = 'rest'): def test_import_instance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -5571,7 +5573,7 @@ def test_import_instance_rest_error(): ]) def test_export_instance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5612,14 +5614,14 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).export_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).export_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).export_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5627,7 +5629,7 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -5667,7 +5669,7 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns def test_export_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.export_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) @@ -5676,7 +5678,7 @@ def test_export_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_export_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5716,7 +5718,7 @@ def test_export_instance_rest_interceptors(null_interceptor): def test_export_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ExportInstanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5736,7 +5738,7 @@ def test_export_instance_rest_bad_request(transport: str = 'rest', request_type= def test_export_instance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5773,7 +5775,7 @@ def test_export_instance_rest_flattened(): def test_export_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5789,7 +5791,7 @@ def test_export_instance_rest_flattened_error(transport: str = 'rest'): def test_export_instance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -5800,7 +5802,7 @@ def test_export_instance_rest_error(): ]) def test_failover_instance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -5841,14 +5843,14 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).failover_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).failover_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).failover_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -5856,7 +5858,7 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -5896,7 +5898,7 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove def test_failover_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.failover_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -5905,7 +5907,7 @@ def test_failover_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_failover_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -5945,7 +5947,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): def test_failover_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.FailoverInstanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5965,7 +5967,7 @@ def test_failover_instance_rest_bad_request(transport: str = 'rest', request_typ def test_failover_instance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6002,7 +6004,7 @@ def test_failover_instance_rest_flattened(): def test_failover_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6018,7 +6020,7 @@ def test_failover_instance_rest_flattened_error(transport: str = 'rest'): def test_failover_instance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -6029,7 +6031,7 @@ def test_failover_instance_rest_error(): ]) def test_delete_instance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6070,14 +6072,14 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).delete_instance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6085,7 +6087,7 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -6124,7 +6126,7 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns def test_delete_instance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.delete_instance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", ))) @@ -6133,7 +6135,7 @@ def test_delete_instance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_delete_instance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -6173,7 +6175,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): def test_delete_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.DeleteInstanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6193,7 +6195,7 @@ def test_delete_instance_rest_bad_request(transport: str = 'rest', request_type= def test_delete_instance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6229,7 +6231,7 @@ def test_delete_instance_rest_flattened(): def test_delete_instance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6244,7 +6246,7 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): def test_delete_instance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -6255,7 +6257,7 @@ def test_delete_instance_rest_error(): ]) def test_reschedule_maintenance_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6296,14 +6298,14 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re # verify fields with default values are dropped - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = 'name_value' - unset_fields = transport_class(credentials=_AnonymousCredentialsWithUniverseDomain()).reschedule_maintenance._get_unset_required_fields(jsonified_request) + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).reschedule_maintenance._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -6311,7 +6313,7 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re assert jsonified_request["name"] == 'name_value' client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) request = request_type(**request_init) @@ -6351,7 +6353,7 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re def test_reschedule_maintenance_rest_unset_required_fields(): - transport = transports.CloudRedisRestTransport(credentials=_AnonymousCredentialsWithUniverseDomain) + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) unset_fields = transport.reschedule_maintenance._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name", "rescheduleType", ))) @@ -6360,7 +6362,7 @@ def test_reschedule_maintenance_rest_unset_required_fields(): @pytest.mark.parametrize("null_interceptor", [True, False]) def test_reschedule_maintenance_rest_interceptors(null_interceptor): transport = transports.CloudRedisRestTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), ) client = CloudRedisClient(transport=transport) @@ -6400,7 +6402,7 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): def test_reschedule_maintenance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.RescheduleMaintenanceRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6420,7 +6422,7 @@ def test_reschedule_maintenance_rest_bad_request(transport: str = 'rest', reques def test_reschedule_maintenance_rest_flattened(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) @@ -6458,7 +6460,7 @@ def test_reschedule_maintenance_rest_flattened(): def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -6475,7 +6477,7 @@ def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): def test_reschedule_maintenance_rest_error(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest' ) @@ -6483,17 +6485,17 @@ def test_reschedule_maintenance_rest_error(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudRedisClient( @@ -6503,7 +6505,7 @@ def test_credentials_transport_error(): # It is an error to provide an api_key and a transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) options = client_options.ClientOptions() options.api_key = "api_key" @@ -6519,12 +6521,12 @@ def test_credentials_transport_error(): with pytest.raises(ValueError): client = CloudRedisClient( client_options=options, - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # It is an error to provide scopes and a transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = CloudRedisClient( @@ -6536,7 +6538,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) client = CloudRedisClient(transport=transport) assert client.transport is transport @@ -6544,13 +6546,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.CloudRedisGrpcTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.CloudRedisGrpcAsyncIOTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -6563,7 +6565,7 @@ def test_transport_get_channel(): def test_transport_adc(transport_class): # Test default credentials are used if not provided. with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -6573,14 +6575,14 @@ def test_transport_adc(transport_class): ]) def test_transport_kind(transport_name): transport = CloudRedisClient.get_transport_class(transport_name)( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert transport.kind == transport_name def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance( client.transport, @@ -6591,7 +6593,7 @@ def test_cloud_redis_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.CloudRedisTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json" ) @@ -6601,7 +6603,7 @@ def test_cloud_redis_base_transport(): with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: Transport.return_value = None transport = transports.CloudRedisTransport( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -6650,7 +6652,7 @@ def test_cloud_redis_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - load_creds.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudRedisTransport( credentials_file="credentials.json", quota_project_id="octopus", @@ -6668,7 +6670,7 @@ def test_cloud_redis_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: Transport.return_value = None - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.CloudRedisTransport() adc.assert_called_once() @@ -6676,7 +6678,7 @@ def test_cloud_redis_base_transport_with_adc(): def test_cloud_redis_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) CloudRedisClient() adc.assert_called_once_with( scopes=None, @@ -6698,7 +6700,7 @@ def test_cloud_redis_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (_AnonymousCredentialsWithUniverseDomain(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class(quota_project_id="octopus", scopes=["1", "2"]) adc.assert_called_once_with( scopes=["1", "2"], @@ -6743,7 +6745,7 @@ def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( grpc_helpers, "create_channel", autospec=True ) as create_channel: - creds = _AnonymousCredentialsWithUniverseDomain() + creds = ga_credentials.AnonymousCredentials() adc.return_value = (creds, None) transport_class( quota_project_id="octopus", @@ -6772,7 +6774,7 @@ def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( transport_class ): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -6810,7 +6812,7 @@ def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( ) def test_cloud_redis_http_transport_client_cert_source_for_mtls(): - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: transports.CloudRedisRestTransport ( credentials=cred, @@ -6821,7 +6823,7 @@ def test_cloud_redis_http_transport_client_cert_source_for_mtls(): def test_cloud_redis_rest_lro_client(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='rest', ) transport = client.transport @@ -6843,7 +6845,7 @@ def test_cloud_redis_rest_lro_client(): ]) def test_cloud_redis_host_no_port(transport_name): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), transport=transport_name, ) @@ -6860,7 +6862,7 @@ def test_cloud_redis_host_no_port(transport_name): ]) def test_cloud_redis_host_with_port(transport_name): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), transport=transport_name, ) @@ -6874,8 +6876,8 @@ def test_cloud_redis_host_with_port(transport_name): "rest", ]) def test_cloud_redis_client_transport_session_collision(transport_name): - creds1 = _AnonymousCredentialsWithUniverseDomain() - creds2 = _AnonymousCredentialsWithUniverseDomain() + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() client1 = CloudRedisClient( credentials=creds1, transport=transport_name, @@ -6957,7 +6959,7 @@ def test_cloud_redis_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = _AnonymousCredentialsWithUniverseDomain() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): with mock.patch.object(google.auth, 'default') as adc: adc.return_value = (cred, None) @@ -7029,7 +7031,7 @@ def test_cloud_redis_transport_channel_mtls_with_adc( def test_cloud_redis_grpc_lro_client(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) transport = client.transport @@ -7046,7 +7048,7 @@ def test_cloud_redis_grpc_lro_client(): def test_cloud_redis_grpc_lro_async_client(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport='grpc_asyncio', ) transport = client.transport @@ -7175,7 +7177,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -7183,7 +7185,7 @@ def test_client_with_default_client_info(): with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: transport_class = CloudRedisClient.get_transport_class() transport = transport_class( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -7191,7 +7193,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -7202,7 +7204,7 @@ async def test_transport_close_async(): def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7224,7 +7226,7 @@ def test_get_location_rest_bad_request(transport: str = 'rest', request_type=loc ]) def test_get_location_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2'} @@ -7249,7 +7251,7 @@ def test_get_location_rest(request_type): def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7271,7 +7273,7 @@ def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=l ]) def test_list_locations_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1'} @@ -7296,7 +7298,7 @@ def test_list_locations_rest(request_type): def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7318,7 +7320,7 @@ def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type ]) def test_cancel_operation_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -7343,7 +7345,7 @@ def test_cancel_operation_rest(request_type): def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7365,7 +7367,7 @@ def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type ]) def test_delete_operation_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -7390,7 +7392,7 @@ def test_delete_operation_rest(request_type): def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7412,7 +7414,7 @@ def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=op ]) def test_get_operation_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} @@ -7437,7 +7439,7 @@ def test_get_operation_rest(request_type): def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -7459,7 +7461,7 @@ def test_list_operations_rest_bad_request(transport: str = 'rest', request_type= ]) def test_list_operations_rest(request_type): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) request_init = {'name': 'projects/sample1/locations/sample2'} @@ -7485,7 +7487,7 @@ def test_list_operations_rest(request_type): def test_delete_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7507,7 +7509,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7531,7 +7533,7 @@ async def test_delete_operation_async(transport: str = "grpc_asyncio"): def test_delete_operation_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7555,7 +7557,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7580,7 +7582,7 @@ async def test_delete_operation_field_headers_async(): def test_delete_operation_from_dict(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -7596,7 +7598,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -7614,7 +7616,7 @@ async def test_delete_operation_from_dict_async(): def test_cancel_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7636,7 +7638,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7660,7 +7662,7 @@ async def test_cancel_operation_async(transport: str = "grpc_asyncio"): def test_cancel_operation_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7684,7 +7686,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7709,7 +7711,7 @@ async def test_cancel_operation_field_headers_async(): def test_cancel_operation_from_dict(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -7725,7 +7727,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -7743,7 +7745,7 @@ async def test_cancel_operation_from_dict_async(): def test_get_operation(transport: str = "grpc"): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7765,7 +7767,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7789,7 +7791,7 @@ async def test_get_operation_async(transport: str = "grpc_asyncio"): def test_get_operation_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7813,7 +7815,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7838,7 +7840,7 @@ async def test_get_operation_field_headers_async(): def test_get_operation_from_dict(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -7854,7 +7856,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -7872,7 +7874,7 @@ async def test_get_operation_from_dict_async(): def test_list_operations(transport: str = "grpc"): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7894,7 +7896,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -7918,7 +7920,7 @@ async def test_list_operations_async(transport: str = "grpc_asyncio"): def test_list_operations_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7942,7 +7944,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7967,7 +7969,7 @@ async def test_list_operations_field_headers_async(): def test_list_operations_from_dict(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -7983,7 +7985,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -8001,7 +8003,7 @@ async def test_list_operations_from_dict_async(): def test_list_locations(transport: str = "grpc"): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8023,7 +8025,7 @@ def test_list_locations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8047,7 +8049,7 @@ async def test_list_locations_async(transport: str = "grpc_asyncio"): def test_list_locations_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8071,7 +8073,7 @@ def test_list_locations_field_headers(): @pytest.mark.asyncio async def test_list_locations_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8096,7 +8098,7 @@ async def test_list_locations_field_headers_async(): def test_list_locations_from_dict(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -8112,7 +8114,7 @@ def test_list_locations_from_dict(): @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -8130,7 +8132,7 @@ async def test_list_locations_from_dict_async(): def test_get_location(transport: str = "grpc"): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8152,7 +8154,7 @@ def test_get_location(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -8176,7 +8178,7 @@ async def test_get_location_async(transport: str = "grpc_asyncio"): def test_get_location_field_headers(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain()) + credentials=ga_credentials.AnonymousCredentials()) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -8199,7 +8201,7 @@ def test_get_location_field_headers(): @pytest.mark.asyncio async def test_get_location_field_headers_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain() + credentials=ga_credentials.AnonymousCredentials() ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8224,7 +8226,7 @@ async def test_get_location_field_headers_async(): def test_get_location_from_dict(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -8240,7 +8242,7 @@ def test_get_location_from_dict(): @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = CloudRedisAsyncClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -8264,7 +8266,7 @@ def test_transport_close(): for transport, close_name in transports.items(): client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: @@ -8279,7 +8281,7 @@ def test_client_ctx(): ] for transport in transports: client = CloudRedisClient( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport ) # Test client calls underlying transport. diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index ec2fb6ab5b55..f8fdee76d112 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -19,6 +19,7 @@ import pytest from google.api_core.client_options import ClientOptions # type: ignore +import google.auth from google.auth import credentials as ga_credentials from google.showcase import EchoClient from google.showcase import IdentityClient @@ -86,19 +87,13 @@ def pytest_addoption(parser): ) -class _AnonymousCredentialsWithUniverseDomain(ga_credentials.AnonymousCredentials): - def __init__(self, universe_domain="googleapis.com"): - super(_AnonymousCredentialsWithUniverseDomain, self).__init__() - self._universe_domain = universe_domain - - # TODO: Need to test without passing in a transport class def construct_client( client_class, use_mtls, transport_name="grpc", channel_creator=grpc.insecure_channel, # for grpc,grpc_asyncio only - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), transport_endpoint="localhost:7469" ): if use_mtls: @@ -145,8 +140,15 @@ def use_mtls(request): def parametrized_echo(use_mtls, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): print( f"test_params: {channel_creator, transport_name, transport_endpoint, credential_universe, client_universe}") - credentials = _AnonymousCredentialsWithUniverseDomain( - universe_domain=credential_universe) + credentials = ga_credentials.AnonymousCredentials() + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [ + int(part) for part in google.auth.__version__.split(".") + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials._universe_domain = credential_universe client = construct_client(EchoClient, use_mtls, transport_endpoint=transport_endpoint, transport_name=transport_name, @@ -169,7 +171,7 @@ def echo(use_mtls, request): @pytest.fixture(params=["grpc", "rest"]) def echo_with_universe_credentials_localhost(use_mtls, request): - return construct_client(EchoClient, use_mtls, transport_name=request.param, credentials=_AnonymousCredentialsWithUniverseDomain(universe_domain="localhost:7469")) + return construct_client(EchoClient, use_mtls, transport_name=request.param, credentials=ga_credentials.AnonymousCredentials(universe_domain="localhost:7469")) @pytest.fixture(params=["grpc", "rest"]) @@ -235,7 +237,7 @@ def intercepted_echo(use_mtls): ) intercept_channel = grpc.intercept_channel(channel, interceptor) transport = EchoClient.get_transport_class("grpc")( - credentials=_AnonymousCredentialsWithUniverseDomain(), + credentials=ga_credentials.AnonymousCredentials(), channel=intercept_channel, ) return EchoClient(transport=transport) diff --git a/packages/gapic-generator/tests/system/test_universe_domain.py b/packages/gapic-generator/tests/system/test_universe_domain.py index a29ddc481dce..75a957ebdc52 100644 --- a/packages/gapic-generator/tests/system/test_universe_domain.py +++ b/packages/gapic-generator/tests/system/test_universe_domain.py @@ -1,5 +1,6 @@ import pytest +import google.auth import grpc # Define the parametrized data @@ -37,7 +38,14 @@ def test_universe_domain_validation_pass(parametrized_echo, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): # Test that only the configured client universe and credentials universe are used for validation assert parametrized_echo.universe_domain == client_universe - assert parametrized_echo.transport._credentials._universe_domain == credential_universe + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [ + int(part) for part in google.auth.__version__.split(".") + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + assert parametrized_echo.transport._credentials.universe_domain == credential_universe if transport_name == "rest": assert parametrized_echo.api_endpoint == "http://" + transport_endpoint else: @@ -61,17 +69,24 @@ def test_universe_domain_validation_pass(parametrized_echo, channel_creator, tra def test_universe_domain_validation_fail(parametrized_echo, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): """Test that only the client and credentials universes are used for validation, and not the endpoint.""" assert parametrized_echo.universe_domain == client_universe - assert parametrized_echo.transport._credentials._universe_domain == credential_universe - if transport_name == "rest": - assert parametrized_echo.api_endpoint == "http://" + transport_endpoint - elif channel_creator == grpc.insecure_channel: - # TODO: Investigate where this endpoint override is coming from - assert parametrized_echo.api_endpoint == "localhost:7469" - else: - assert parametrized_echo.api_endpoint == transport_endpoint - with pytest.raises(ValueError) as err: - parametrized_echo.echo({ - 'content': 'Universe validation failed!' - }) - assert str( - err.value) == f"The configured universe domain ({client_universe}) does not match the universe domain found in the credentials ({credential_universe}). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor, _ = [ + int(part) for part in google.auth.__version__.split(".") + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + assert parametrized_echo.transport._credentials.universe_domain == credential_universe + if transport_name == "rest": + assert parametrized_echo.api_endpoint == "http://" + transport_endpoint + elif channel_creator == grpc.insecure_channel: + # TODO: Investigate where this endpoint override is coming from + assert parametrized_echo.api_endpoint == "localhost:7469" + else: + assert parametrized_echo.api_endpoint == transport_endpoint + with pytest.raises(ValueError) as err: + parametrized_echo.echo({ + 'content': 'Universe validation failed!' + }) + assert str( + err.value) == f"The configured universe domain ({client_universe}) does not match the universe domain found in the credentials ({credential_universe}). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." From 93d8d608a5661c4e334e9757cadc0011c60aefec Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 11:48:10 -0500 Subject: [PATCH 1100/1339] chore(main): release 1.14.1 (#1925) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 7a26092d8b26..ab9a8bf8feef 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [1.14.1](https://github.com/googleapis/gapic-generator-python/compare/v1.14.0...v1.14.1) (2024-02-02) + + +### Bug Fixes + +* Add google-auth as a direct dependency ([2154924](https://github.com/googleapis/gapic-generator-python/commit/2154924c6ef9df6e2727b433ec7d2fee762ccde8)) +* Add staticmethod decorator to methods added in [#1873](https://github.com/googleapis/gapic-generator-python/issues/1873) ([2154924](https://github.com/googleapis/gapic-generator-python/commit/2154924c6ef9df6e2727b433ec7d2fee762ccde8)) +* Resolve AttributeError 'Credentials' object has no attribute 'universe_domain' ([2154924](https://github.com/googleapis/gapic-generator-python/commit/2154924c6ef9df6e2727b433ec7d2fee762ccde8)) + ## [1.14.0](https://github.com/googleapis/gapic-generator-python/compare/v1.13.1...v1.14.0) (2024-01-31) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 91759b01bc4a..6ca6850f621d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.14.0" +version = "1.14.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From b4854887e33bd9551a5187198da4aea9d4ec0aa7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 5 Feb 2024 18:14:07 +0100 Subject: [PATCH 1101/1339] chore(deps): update all dependencies (#1927) --- packages/gapic-generator/requirements.txt | 262 +++++++++++----------- 1 file changed, 131 insertions(+), 131 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 3e984bb784db..831da9778db1 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,9 +8,9 @@ cachetools==5.3.2 \ --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth -certifi==2023.11.17 \ - --hash=sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1 \ - --hash=sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474 +certifi==2024.2.2 \ + --hash=sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f \ + --hash=sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1 # via requests charset-normalizer==3.3.2 \ --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ @@ -112,9 +112,9 @@ exceptiongroup==1.2.0 \ --hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14 \ --hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68 # via pytest -google-api-core==2.15.0 \ - --hash=sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a \ - --hash=sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca +google-api-core==2.16.2 \ + --hash=sha256:032d37b45d1d6bdaf68fb11ff621e2593263a239fa9246e2e94325f9c47876d2 \ + --hash=sha256:449ca0e3f14c179b4165b664256066c7861610f70b6ffe54bb01a04e9b466929 # via -r requirements.in google-auth==2.27.0 \ --hash=sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245 \ @@ -131,61 +131,61 @@ grpc-google-iam-v1==0.13.0 \ --hash=sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89 \ --hash=sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e # via -r requirements.in -grpcio==1.60.0 \ - --hash=sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6 \ - --hash=sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328 \ - --hash=sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead \ - --hash=sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5 \ - --hash=sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491 \ - --hash=sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96 \ - --hash=sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444 \ - --hash=sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951 \ - --hash=sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf \ - --hash=sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253 \ - --hash=sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629 \ - --hash=sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae \ - --hash=sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43 \ - --hash=sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b \ - --hash=sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14 \ - --hash=sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab \ - --hash=sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390 \ - --hash=sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2 \ - --hash=sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0 \ - --hash=sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590 \ - --hash=sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508 \ - --hash=sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b \ - --hash=sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08 \ - --hash=sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13 \ - --hash=sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca \ - --hash=sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03 \ - --hash=sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748 \ - --hash=sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860 \ - --hash=sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d \ - --hash=sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353 \ - --hash=sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e \ - --hash=sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c \ - --hash=sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134 \ - --hash=sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415 \ - --hash=sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320 \ - --hash=sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179 \ - --hash=sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324 \ - --hash=sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18 \ - --hash=sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df \ - --hash=sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e \ - --hash=sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b \ - --hash=sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6 \ - --hash=sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d \ - --hash=sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff \ - --hash=sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968 \ - --hash=sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619 \ - --hash=sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139 \ - --hash=sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55 \ - --hash=sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454 \ - --hash=sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65 \ - --hash=sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a \ - --hash=sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19 \ - --hash=sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b \ - --hash=sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd +grpcio==1.60.1 \ + --hash=sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8 \ + --hash=sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8 \ + --hash=sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104 \ + --hash=sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc \ + --hash=sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092 \ + --hash=sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1 \ + --hash=sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180 \ + --hash=sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05 \ + --hash=sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287 \ + --hash=sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216 \ + --hash=sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c \ + --hash=sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d \ + --hash=sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594 \ + --hash=sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c \ + --hash=sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87 \ + --hash=sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de \ + --hash=sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9 \ + --hash=sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2 \ + --hash=sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff \ + --hash=sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904 \ + --hash=sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73 \ + --hash=sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549 \ + --hash=sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f \ + --hash=sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c \ + --hash=sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0 \ + --hash=sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e \ + --hash=sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367 \ + --hash=sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc \ + --hash=sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303 \ + --hash=sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb \ + --hash=sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c \ + --hash=sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804 \ + --hash=sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03 \ + --hash=sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6 \ + --hash=sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f \ + --hash=sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0 \ + --hash=sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd \ + --hash=sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21 \ + --hash=sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858 \ + --hash=sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce \ + --hash=sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9 \ + --hash=sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23 \ + --hash=sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073 \ + --hash=sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2 \ + --hash=sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872 \ + --hash=sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe \ + --hash=sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7 \ + --hash=sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6 \ + --hash=sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929 \ + --hash=sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962 \ + --hash=sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a \ + --hash=sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a \ + --hash=sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177 \ + --hash=sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -238,67 +238,67 @@ libcst==1.1.0 \ --hash=sha256:f561c9a84eca18be92f4ad90aa9bd873111efbea995449301719a1a7805dbc5c \ --hash=sha256:fe41b33aa73635b1651f64633f429f7aa21f86d2db5748659a99d9b7b1ed2a90 # via -r requirements.in -MarkupSafe==2.1.4 \ - --hash=sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69 \ - --hash=sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0 \ - --hash=sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d \ - --hash=sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec \ - --hash=sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5 \ - --hash=sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411 \ - --hash=sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3 \ - --hash=sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74 \ - --hash=sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0 \ - --hash=sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949 \ - --hash=sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d \ - --hash=sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279 \ - --hash=sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f \ - --hash=sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6 \ - --hash=sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc \ - --hash=sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e \ - --hash=sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954 \ - --hash=sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656 \ - --hash=sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc \ - --hash=sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518 \ - --hash=sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56 \ - --hash=sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc \ - --hash=sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa \ - --hash=sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565 \ - --hash=sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4 \ - --hash=sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb \ - --hash=sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250 \ - --hash=sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4 \ - --hash=sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959 \ - --hash=sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc \ - --hash=sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474 \ - --hash=sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863 \ - --hash=sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8 \ - --hash=sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f \ - --hash=sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2 \ - --hash=sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e \ - --hash=sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e \ - --hash=sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb \ - --hash=sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f \ - --hash=sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a \ - --hash=sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26 \ - --hash=sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d \ - --hash=sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2 \ - --hash=sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131 \ - --hash=sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789 \ - --hash=sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6 \ - --hash=sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a \ - --hash=sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858 \ - --hash=sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e \ - --hash=sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb \ - --hash=sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e \ - --hash=sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84 \ - --hash=sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7 \ - --hash=sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea \ - --hash=sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b \ - --hash=sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6 \ - --hash=sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475 \ - --hash=sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74 \ - --hash=sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a \ - --hash=sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00 +MarkupSafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via # -r requirements.in # jinja2 @@ -350,13 +350,13 @@ pypandoc==1.12 \ --hash=sha256:8f44740a9f074e121d81b489f073160421611d4ead62d1b306aeb11aab3c32df \ --hash=sha256:efb4f7d68ead8bec32e22b62f02d5608a1700978b51bfc4af286fd6acfe9d218 # via -r requirements.in -pytest==7.4.4 \ - --hash=sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280 \ - --hash=sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8 +pytest==8.0.0 \ + --hash=sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c \ + --hash=sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6 # via pytest-asyncio -pytest-asyncio==0.23.3 \ - --hash=sha256:37a9d912e8338ee7b4a3e917381d1c95bfc8682048cb0fbc35baba316ec1faba \ - --hash=sha256:af313ce900a62fbe2b1aed18e37ad757f1ef9940c6b6a88e2954de38d6b1fb9f +pytest-asyncio==0.23.4 \ + --hash=sha256:2143d9d9375bf372a73260e4114541485e84fca350b0b6b92674ca56ff5f7ea2 \ + --hash=sha256:b0079dfac14b60cd1ce4691fbfb1748fe939db7d0234b5aba97197d10fbe0fef # via -r requirements.in pyyaml==6.0.1 \ --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ @@ -424,9 +424,9 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==2.1.0 \ - --hash=sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3 \ - --hash=sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54 +urllib3==2.2.0 \ + --hash=sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20 \ + --hash=sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224 # via requests # The following packages are considered to be unsafe in a requirements file: From 420797677446fbca7bed13c20dc14bd6a57befcc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 07:31:35 -0500 Subject: [PATCH 1102/1339] build(deps): bump cryptography from 41.0.6 to 42.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#1929) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 57 +++++++++++-------- 2 files changed, 35 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index d8a1bbca7179..2aefd0e91175 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa -# created: 2024-01-15T16:32:08.142785673Z + digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 +# created: 2024-02-06T03:20:16.660474034Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index bb3d6ca38b14..8c11c9f3e9b6 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -93,30 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.0 \ + --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ + --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ + --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ + --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ + --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ + --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ + --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ + --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ + --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ + --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ + --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ + --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ + --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ + --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ + --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ + --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ + --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ + --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ + --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ + --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ + --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ + --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ + --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ + --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ + --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ + --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ + --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ + --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ + --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ + --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ + --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ + --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 # via # gcp-releasetool # secretstorage From 4bbf7f4bde4790f849ba612e9111c565080635db Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 6 Feb 2024 10:45:29 -0500 Subject: [PATCH 1103/1339] fix: fix `ValueError` in `test__validate_universe_domain` (#1931) --- .../.github/sync-repo-settings.yaml | 1 + .../.github/workflows/tests.yaml | 21 +++++ .../gapic/templates/noxfile.py.j2 | 76 +++++++++++++++++++ .../%name_%version/%sub/test_%service.py.j2 | 4 +- .../integration/goldens/asset/noxfile.py | 76 +++++++++++++++++++ .../unit/gapic/asset_v1/test_asset_service.py | 4 +- .../goldens/credentials/noxfile.py | 76 +++++++++++++++++++ .../credentials_v1/test_iam_credentials.py | 4 +- .../integration/goldens/eventarc/noxfile.py | 76 +++++++++++++++++++ .../unit/gapic/eventarc_v1/test_eventarc.py | 4 +- .../integration/goldens/logging/noxfile.py | 76 +++++++++++++++++++ .../logging_v2/test_config_service_v2.py | 4 +- .../logging_v2/test_logging_service_v2.py | 4 +- .../logging_v2/test_metrics_service_v2.py | 4 +- .../integration/goldens/redis/noxfile.py | 76 +++++++++++++++++++ .../unit/gapic/redis_v1/test_cloud_redis.py | 4 +- .../gapic-generator/tests/system/conftest.py | 4 +- .../tests/system/test_universe_domain.py | 4 +- 18 files changed, 498 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 0b661a220f3c..e04213881e68 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -49,6 +49,7 @@ branchProtectionRules: - 'showcase-unit-add-iam-methods' - 'integration' - 'goldens-lint' + - 'goldens-prerelease' - 'style-check' - 'snippetgen' - 'unit (3.7)' diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 7d7c6a5c8b90..34da448d4d0d 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -326,6 +326,27 @@ jobs: nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.12 blacken lint nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.12 blacken lint nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.12 blacken lint + goldens-prerelease: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: 'pip' + - name: Install nox. + run: | + python -m pip install nox + - name: Run the prerelease_deps nox session + # Exclude testing for asset which requires dependency google-cloud-org-policy + # in order to run unit tests + # See https://github.com/googleapis/gapic-generator-python/issues/1806 + run: | + nox -f tests/integration/goldens/credentials/noxfile.py -s prerelease_deps + nox -f tests/integration/goldens/eventarc/noxfile.py -s prerelease_deps + nox -f tests/integration/goldens/logging/noxfile.py -s prerelease_deps + nox -f tests/integration/goldens/redis/noxfile.py -s prerelease_deps style-check: runs-on: ubuntu-latest steps: diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 63958b72e182..db669133511f 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -4,6 +4,7 @@ import os import pathlib +import re import shutil import subprocess import sys @@ -38,6 +39,7 @@ nox.sessions = [ "docs", "blacken", "lint", + "prerelease_deps", ] @nox.session(python=ALL_PYTHON) @@ -58,6 +60,80 @@ def unit(session): os.path.join('tests', 'unit', ''.join(session.posargs)) ) +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov={{ api.naming.module_namespace|join("/") }}/{{ api.naming.versioned_module_name }}/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ef7fa51bef0e..66605a9c3e8a 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -225,7 +225,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" @@ -241,7 +241,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) with pytest.raises(ValueError) as excinfo: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 3069bdfad130..1035f1d3a23a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -15,6 +15,7 @@ # import os import pathlib +import re import shutil import subprocess import sys @@ -49,6 +50,7 @@ "docs", "blacken", "lint", + "prerelease_deps", ] @nox.session(python=ALL_PYTHON) @@ -69,6 +71,80 @@ def unit(session): os.path.join('tests', 'unit', ''.join(session.posargs)) ) +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/asset_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index af160eb6d5a8..d92af5254182 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -206,7 +206,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" @@ -222,7 +222,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) with pytest.raises(ValueError) as excinfo: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 4fcb174b02aa..cad74f01894d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -15,6 +15,7 @@ # import os import pathlib +import re import shutil import subprocess import sys @@ -49,6 +50,7 @@ "docs", "blacken", "lint", + "prerelease_deps", ] @nox.session(python=ALL_PYTHON) @@ -69,6 +71,80 @@ def unit(session): os.path.join('tests', 'unit', ''.join(session.posargs)) ) +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/iam/credentials_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 6b47190ddc36..60fd57cb64e0 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -196,7 +196,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" @@ -212,7 +212,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) with pytest.raises(ValueError) as excinfo: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 2c53e9a6d5de..18c77ff7722b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -15,6 +15,7 @@ # import os import pathlib +import re import shutil import subprocess import sys @@ -49,6 +50,7 @@ "docs", "blacken", "lint", + "prerelease_deps", ] @nox.session(python=ALL_PYTHON) @@ -69,6 +71,80 @@ def unit(session): os.path.join('tests', 'unit', ''.join(session.posargs)) ) +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/eventarc_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index e2b0887d5208..9af49b1f2177 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -216,7 +216,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" @@ -232,7 +232,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) with pytest.raises(ValueError) as excinfo: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index dc4b148f1bba..c8c26c4f29f6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -15,6 +15,7 @@ # import os import pathlib +import re import shutil import subprocess import sys @@ -49,6 +50,7 @@ "docs", "blacken", "lint", + "prerelease_deps", ] @nox.session(python=ALL_PYTHON) @@ -69,6 +71,80 @@ def unit(session): os.path.join('tests', 'unit', ''.join(session.posargs)) ) +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/logging_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index fa144856eb29..7449687209ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -195,7 +195,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" @@ -211,7 +211,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) with pytest.raises(ValueError) as excinfo: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 74014d4b19f7..0707912ba636 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -196,7 +196,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" @@ -212,7 +212,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) with pytest.raises(ValueError) as excinfo: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 798b85cfb501..e32ddadc8ae8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -194,7 +194,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" @@ -210,7 +210,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) with pytest.raises(ValueError) as excinfo: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 21d26879fa2b..37e6969d24cb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -15,6 +15,7 @@ # import os import pathlib +import re import shutil import subprocess import sys @@ -49,6 +50,7 @@ "docs", "blacken", "lint", + "prerelease_deps", ] @nox.session(python=ALL_PYTHON) @@ -69,6 +71,80 @@ def unit(session): os.path.join('tests', 'unit', ''.join(session.posargs)) ) +@nox.session(python=ALL_PYTHON[-1]) +def prerelease_deps(session): + """Run the unit test suite against pre-release versions of dependencies.""" + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/redis_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index a74c1dc425d0..a752b6b27d78 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -207,7 +207,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [int(part) for part in google.auth.__version__.split(".")] + google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials = ga_credentials.AnonymousCredentials() credentials._universe_domain = "foo.com" @@ -223,7 +223,7 @@ def test__validate_universe_domain(client_class, transport_class, transport_name # # TODO: Make this test unconditional once the minimum supported version of # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor, _ = [int(part) for part in api_core_version.__version__.split(".")] + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) with pytest.raises(ValueError) as excinfo: diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index f8fdee76d112..3fe3c7b0d25f 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -144,8 +144,8 @@ def parametrized_echo(use_mtls, channel_creator, transport_name, transport_endpo # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [ - int(part) for part in google.auth.__version__.split(".") + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] ] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials._universe_domain = credential_universe diff --git a/packages/gapic-generator/tests/system/test_universe_domain.py b/packages/gapic-generator/tests/system/test_universe_domain.py index 75a957ebdc52..9690085876ba 100644 --- a/packages/gapic-generator/tests/system/test_universe_domain.py +++ b/packages/gapic-generator/tests/system/test_universe_domain.py @@ -41,8 +41,8 @@ def test_universe_domain_validation_pass(parametrized_echo, channel_creator, tra # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor, _ = [ - int(part) for part in google.auth.__version__.split(".") + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] ] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): assert parametrized_echo.transport._credentials.universe_domain == credential_universe From 611e82e6fa3c2494c8d7be58e14a2034f6058185 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 16:04:02 +0000 Subject: [PATCH 1104/1339] chore(main): release 1.14.2 (#1933) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index ab9a8bf8feef..a74fb9a25de3 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.14.2](https://github.com/googleapis/gapic-generator-python/compare/v1.14.1...v1.14.2) (2024-02-06) + + +### Bug Fixes + +* Fix `ValueError` in `test__validate_universe_domain` ([#1931](https://github.com/googleapis/gapic-generator-python/issues/1931)) ([ce855a8](https://github.com/googleapis/gapic-generator-python/commit/ce855a866c43e80fb3f9ba3b295c83afcd6297b7)) + ## [1.14.1](https://github.com/googleapis/gapic-generator-python/compare/v1.14.0...v1.14.1) (2024-02-02) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 6ca6850f621d..31896f2e9cb1 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.14.1" +version = "1.14.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 95218c2e66e77be008e69324c1e0f9715f796e03 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 14 Feb 2024 11:28:40 -0500 Subject: [PATCH 1105/1339] fix: fix TypeError: MessageToJson() got an unexpected keyword argument 'including_default_value_fields' (#1936) --- .../services/%service/transports/rest.py.j2 | 2 -- .../%name_%version/%sub/test_%service.py.j2 | 1 - .../services/%service/transports/rest.py.j2 | 2 -- .../gapic/%name_%version/%sub/test_macros.j2 | 1 - .../services/asset_service/transports/rest.py | 30 ------------------- .../unit/gapic/asset_v1/test_asset_service.py | 23 -------------- .../iam_credentials/transports/rest.py | 8 ----- .../credentials_v1/test_iam_credentials.py | 4 --- .../services/eventarc/transports/rest.py | 24 --------------- .../unit/gapic/eventarc_v1/test_eventarc.py | 18 ----------- .../services/cloud_redis/transports/rest.py | 18 ----------- .../unit/gapic/redis_v1/test_cloud_redis.py | 11 ------- 12 files changed, 142 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 3fb7f35be08b..5a2d2f5b3480 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -335,7 +335,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums={{ opts.rest_numeric_enums }} ) {%- endif %} @@ -346,7 +345,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums={{ opts.rest_numeric_enums }}, )) {% if method.input.required_fields %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index fdcf2b3351a8..f49cac457926 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1201,7 +1201,6 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %} jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 2303760fb17a..da7308589224 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -377,7 +377,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums={{ opts.rest_numeric_enums }} ) {%- endif %} @@ -388,7 +387,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums={{ opts.rest_numeric_enums }}, )) {% if method.input.required_fields %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 96ee8639b40e..40238952b9a3 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1090,7 +1090,6 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide {% endif %} jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index ea64ef9d1c75..d3f32dce5148 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -807,7 +807,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -887,7 +886,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -896,7 +894,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -974,7 +971,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1053,7 +1049,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1133,7 +1128,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1213,7 +1207,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1289,7 +1282,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1369,7 +1361,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1452,7 +1443,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -1461,7 +1451,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1539,7 +1528,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -1548,7 +1536,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1621,7 +1608,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1685,7 +1671,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1755,7 +1740,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -1764,7 +1748,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1847,7 +1830,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1925,7 +1907,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2001,7 +1982,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2077,7 +2057,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2153,7 +2132,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2228,7 +2206,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -2237,7 +2214,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2314,7 +2290,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2390,7 +2365,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2473,7 +2447,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -2482,7 +2455,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2560,7 +2532,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -2569,7 +2540,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index d92af5254182..2590301a34e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -7088,7 +7088,6 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -7269,7 +7268,6 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -7559,7 +7557,6 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -7752,7 +7749,6 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -7999,7 +7995,6 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -8232,7 +8227,6 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -8474,7 +8468,6 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -8701,7 +8694,6 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -8926,7 +8918,6 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -9222,7 +9213,6 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -9515,7 +9505,6 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -9689,7 +9678,6 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -9865,7 +9853,6 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -10061,7 +10048,6 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -10315,7 +10301,6 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -10573,7 +10558,6 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -10808,7 +10792,6 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -11169,7 +11152,6 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -11400,7 +11382,6 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -11624,7 +11605,6 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -11819,7 +11799,6 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -12126,7 +12105,6 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -12433,7 +12411,6 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index d48dc503440f..ca0b0c649e4b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -324,7 +324,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -333,7 +332,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -409,7 +407,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -418,7 +415,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -494,7 +490,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -503,7 +498,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -579,7 +573,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -588,7 +581,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 60fd57cb64e0..f57329dfaf90 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1837,7 +1837,6 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -2083,7 +2082,6 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -2331,7 +2329,6 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -2577,7 +2574,6 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index b48f92031749..8a51ab2e8f3e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -900,7 +900,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -909,7 +908,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -987,7 +985,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -996,7 +993,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1074,7 +1070,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -1083,7 +1078,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1162,7 +1156,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1240,7 +1233,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1318,7 +1310,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1401,7 +1392,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1485,7 +1475,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1570,7 +1559,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1649,7 +1637,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1728,7 +1715,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1807,7 +1793,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1884,7 +1869,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1961,7 +1945,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2038,7 +2021,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2117,7 +2099,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -2126,7 +2107,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2210,7 +2190,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -2219,7 +2198,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -2299,7 +2277,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -2308,7 +2285,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 9af49b1f2177..51fba7e54336 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -5947,7 +5947,6 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -6184,7 +6183,6 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -6537,7 +6535,6 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -6854,7 +6851,6 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -7095,7 +7091,6 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -7351,7 +7346,6 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -7588,7 +7582,6 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -7941,7 +7934,6 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -8258,7 +8250,6 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -8497,7 +8488,6 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -8742,7 +8732,6 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -8979,7 +8968,6 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -9277,7 +9265,6 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -9514,7 +9501,6 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -9866,7 +9852,6 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -10109,7 +10094,6 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -10342,7 +10326,6 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -10642,7 +10625,6 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 450507cf954c..a02c1b609996 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -681,7 +681,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -690,7 +689,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -769,7 +767,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -846,7 +843,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -855,7 +851,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -933,7 +928,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -942,7 +936,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1018,7 +1011,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1095,7 +1087,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1174,7 +1165,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -1183,7 +1173,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1261,7 +1250,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1340,7 +1328,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -1349,7 +1336,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1427,7 +1413,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -1436,7 +1421,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) @@ -1514,7 +1498,6 @@ def __call__(self, body = json_format.MessageToJson( transcoded_request['body'], - including_default_value_fields=False, use_integers_for_enums=False ) uri = transcoded_request['uri'] @@ -1523,7 +1506,6 @@ def __call__(self, # Jsonify the query params query_params = json.loads(json_format.MessageToJson( transcoded_request['query_params'], - including_default_value_fields=False, use_integers_for_enums=False, )) query_params.update(self._get_unset_required_fields(query_params)) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index a752b6b27d78..a990edad8b6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -3739,7 +3739,6 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -4083,7 +4082,6 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -4318,7 +4316,6 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -4613,7 +4610,6 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -4919,7 +4915,6 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -5147,7 +5142,6 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -5379,7 +5373,6 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -5608,7 +5601,6 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -5837,7 +5829,6 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -6066,7 +6057,6 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) @@ -6292,7 +6282,6 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re pb_request = request_type.pb(request) jsonified_request = json.loads(json_format.MessageToJson( pb_request, - including_default_value_fields=False, use_integers_for_enums=False )) From 14e95e98f8d5cf0c13fefa7d383a9b03636c792b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 14 Feb 2024 12:54:51 -0500 Subject: [PATCH 1106/1339] fix: require google-api-core 1.34.1 (#1942) --- .../gapic/templates/setup.py.j2 | 2 +- .../templates/testing/constraints-3.7.txt.j2 | 2 +- packages/gapic-generator/requirements.in | 1 - packages/gapic-generator/requirements.txt | 39 +++++---- packages/gapic-generator/setup.py | 2 +- .../tests/fragments/google/api/httpbody.proto | 81 +++++++++++++++++++ .../tests/fragments/test_rest_streaming.proto | 17 +++- .../tests/integration/goldens/asset/setup.py | 2 +- .../goldens/asset/testing/constraints-3.7.txt | 2 +- .../integration/goldens/credentials/setup.py | 2 +- .../credentials/testing/constraints-3.7.txt | 2 +- .../integration/goldens/eventarc/setup.py | 2 +- .../eventarc/testing/constraints-3.7.txt | 2 +- .../integration/goldens/logging/setup.py | 2 +- .../logging/testing/constraints-3.7.txt | 2 +- .../tests/integration/goldens/redis/setup.py | 2 +- .../goldens/redis/testing/constraints-3.7.txt | 2 +- 17 files changed, 131 insertions(+), 33 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/google/api/httpbody.proto diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 551c611d63ff..9bd523e37444 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -30,7 +30,7 @@ else: release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index abc9e8ae92e0..6975b517a21d 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -5,7 +5,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 +google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/requirements.in b/packages/gapic-generator/requirements.in index 8dc2b43cf304..db955dc7ec22 100644 --- a/packages/gapic-generator/requirements.in +++ b/packages/gapic-generator/requirements.in @@ -6,7 +6,6 @@ MarkupSafe protobuf pypandoc PyYAML -setuptools grpc-google-iam-v1 proto-plus pytest-asyncio diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 831da9778db1..402414931277 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --generate-hashes requirements.in # cachetools==5.3.2 \ --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ @@ -112,9 +112,9 @@ exceptiongroup==1.2.0 \ --hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14 \ --hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68 # via pytest -google-api-core==2.16.2 \ - --hash=sha256:032d37b45d1d6bdaf68fb11ff621e2593263a239fa9246e2e94325f9c47876d2 \ - --hash=sha256:449ca0e3f14c179b4165b664256066c7861610f70b6ffe54bb01a04e9b466929 +google-api-core==2.17.1 \ + --hash=sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e \ + --hash=sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95 # via -r requirements.in google-auth==2.27.0 \ --hash=sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245 \ @@ -238,7 +238,7 @@ libcst==1.1.0 \ --hash=sha256:f561c9a84eca18be92f4ad90aa9bd873111efbea995449301719a1a7805dbc5c \ --hash=sha256:fe41b33aa73635b1651f64633f429f7aa21f86d2db5748659a99d9b7b1ed2a90 # via -r requirements.in -MarkupSafe==2.1.5 \ +markupsafe==2.1.5 \ --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ @@ -350,16 +350,18 @@ pypandoc==1.12 \ --hash=sha256:8f44740a9f074e121d81b489f073160421611d4ead62d1b306aeb11aab3c32df \ --hash=sha256:efb4f7d68ead8bec32e22b62f02d5608a1700978b51bfc4af286fd6acfe9d218 # via -r requirements.in -pytest==8.0.0 \ - --hash=sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c \ - --hash=sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6 +pytest==7.4.4 \ + --hash=sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280 \ + --hash=sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8 # via pytest-asyncio -pytest-asyncio==0.23.4 \ - --hash=sha256:2143d9d9375bf372a73260e4114541485e84fca350b0b6b92674ca56ff5f7ea2 \ - --hash=sha256:b0079dfac14b60cd1ce4691fbfb1748fe939db7d0234b5aba97197d10fbe0fef +pytest-asyncio==0.23.5 \ + --hash=sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675 \ + --hash=sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac # via -r requirements.in pyyaml==6.0.1 \ + --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \ --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ + --hash=sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df \ --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \ --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \ --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \ @@ -367,7 +369,10 @@ pyyaml==6.0.1 \ --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \ --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \ --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \ + --hash=sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290 \ + --hash=sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9 \ --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \ + --hash=sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6 \ --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \ --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \ --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \ @@ -375,11 +380,15 @@ pyyaml==6.0.1 \ --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \ --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \ --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \ + --hash=sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0 \ --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \ --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \ --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \ + --hash=sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28 \ + --hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \ --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \ --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \ + --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \ --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \ --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \ --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \ @@ -392,7 +401,9 @@ pyyaml==6.0.1 \ --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \ --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \ --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \ + --hash=sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54 \ --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \ + --hash=sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b \ --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \ --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \ --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \ @@ -428,9 +439,3 @@ urllib3==2.2.0 \ --hash=sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20 \ --hash=sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224 # via requests - -# The following packages are considered to be unsafe in a requirements file: -setuptools==69.0.3 \ - --hash=sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05 \ - --hash=sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78 - # via -r requirements.in diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 31896f2e9cb1..8fb3e5c3acf1 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -28,7 +28,7 @@ # Ensure that the lower bounds of these dependencies match what we have in the # templated setup.py.j2: https://github.com/googleapis/gapic-generator-python/blob/main/gapic/templates/setup.py.j2 "click >= 6.7", - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "googleapis-common-protos >= 1.55.0", "grpcio >= 1.24.3", "jinja2 >= 2.10", diff --git a/packages/gapic-generator/tests/fragments/google/api/httpbody.proto b/packages/gapic-generator/tests/fragments/google/api/httpbody.proto new file mode 100644 index 000000000000..7f1685e8089c --- /dev/null +++ b/packages/gapic-generator/tests/fragments/google/api/httpbody.proto @@ -0,0 +1,81 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/httpbody;httpbody"; +option java_multiple_files = true; +option java_outer_classname = "HttpBodyProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Message that represents an arbitrary HTTP body. It should only be used for +// payload formats that can't be represented as JSON, such as raw binary or +// an HTML page. +// +// +// This message can be used both in streaming and non-streaming API methods in +// the request as well as the response. +// +// It can be used as a top-level request field, which is convenient if one +// wants to extract parameters from either the URL or HTTP template into the +// request fields and also want access to the raw HTTP body. +// +// Example: +// +// message GetResourceRequest { +// // A unique request id. +// string request_id = 1; +// +// // The raw HTTP body is bound to this field. +// google.api.HttpBody http_body = 2; +// +// } +// +// service ResourceService { +// rpc GetResource(GetResourceRequest) +// returns (google.api.HttpBody); +// rpc UpdateResource(google.api.HttpBody) +// returns (google.protobuf.Empty); +// +// } +// +// Example with streaming methods: +// +// service CaldavService { +// rpc GetCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// rpc UpdateCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// +// } +// +// Use of this type only changes how the request and response bodies are +// handled, all other features will continue to work unchanged. +message HttpBody { + // The HTTP Content-Type header value specifying the content type of the body. + string content_type = 1; + + // The HTTP request/response body as raw binary. + bytes data = 2; + + // Application specific response metadata. Must be set in the first response + // for streaming APIs. + repeated google.protobuf.Any extensions = 3; +} diff --git a/packages/gapic-generator/tests/fragments/test_rest_streaming.proto b/packages/gapic-generator/tests/fragments/test_rest_streaming.proto index b47d2030b94d..2055144005cb 100644 --- a/packages/gapic-generator/tests/fragments/test_rest_streaming.proto +++ b/packages/gapic-generator/tests/fragments/test_rest_streaming.proto @@ -16,12 +16,26 @@ syntax = "proto3"; package google.fragment; +import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/httpbody.proto"; service MyService { option (google.api.default_host) = "my.example.com"; - rpc MyMethod(MethodRequest) returns (stream MethodResponse) { + rpc MyMethodApiSpecificResponse(MethodRequest) returns (stream MethodResponse) { + option (google.api.http) = { + post: "/v1/{from=projects/*}" + body: "*" + }; + option (google.api.method_signature) = "from,class,import,any,license,type"; + } + + rpc MyMethodCommonResponse(MethodRequest) returns (stream google.api.HttpBody) { + option (google.api.http) = { + post: "/v1/{from=projects/*}" + body: "*" + }; option (google.api.method_signature) = "from,class,import,any,license,type"; } } @@ -39,5 +53,4 @@ message MethodRequest { message MethodResponse { string result = 1; - string next_page_token = 2; } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 221455ac7949..58de46c7e096 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -39,7 +39,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index 04f6609fa953..fa6caa7b094e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 +google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 5e5cbf584aa2..ee7848a52185 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -39,7 +39,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt index 2c74b9860b39..b8a550c73855 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 +google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index c699f1b11999..20038709ee67 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -39,7 +39,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index 712d45b4c5ea..4cd2782277d4 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 +google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 459119e8e5be..c4ee0f3d6399 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -39,7 +39,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt index 2c74b9860b39..b8a550c73855 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 +google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 970ef113f31e..e841c91d823c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -39,7 +39,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "google-auth >= 2.14.1, <3.0.0dev", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt index 2c74b9860b39..b8a550c73855 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 +google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.19.5 From 92dc8af16b2e5ac713ee5be0d00c19c7e9bc5820 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 14 Feb 2024 13:00:22 -0500 Subject: [PATCH 1107/1339] ci: update `gapic-showcase` version to 0.30.0 (#1932) --- packages/gapic-generator/.github/workflows/tests.yaml | 2 +- packages/gapic-generator/noxfile.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 34da448d4d0d..da3eaf29c53a 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -14,7 +14,7 @@ concurrency: cancel-in-progress: true env: - SHOWCASE_VERSION: 0.25.0 + SHOWCASE_VERSION: 0.30.0 PROTOC_VERSION: 3.20.2 jobs: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 2567626e256f..4c283e47f0c7 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -29,7 +29,7 @@ nox.options.error_on_missing_interpreters = True -showcase_version = os.environ.get("SHOWCASE_VERSION", "0.25.0") +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.30.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") From 3e5b0b5cc04191b36ca04be9c721590d8aaa3f04 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 14 Feb 2024 19:00:41 +0100 Subject: [PATCH 1108/1339] chore(deps): update dependency pytest to v8 (#1941) --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 402414931277..58ee28e6f484 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -350,9 +350,9 @@ pypandoc==1.12 \ --hash=sha256:8f44740a9f074e121d81b489f073160421611d4ead62d1b306aeb11aab3c32df \ --hash=sha256:efb4f7d68ead8bec32e22b62f02d5608a1700978b51bfc4af286fd6acfe9d218 # via -r requirements.in -pytest==7.4.4 \ - --hash=sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280 \ - --hash=sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8 +pytest==8.0.0 \ + --hash=sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c \ + --hash=sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6 # via pytest-asyncio pytest-asyncio==0.23.5 \ --hash=sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675 \ From 57ad742bbc5793bb67196c8382b68b547e112aa4 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 14 Feb 2024 13:00:48 -0500 Subject: [PATCH 1109/1339] =?UTF-8?q?fix:=20resolve=20issue=20with=20missi?= =?UTF-8?q?ng=20import=20for=20certain=20enums=20in=20**/types/=E2=80=A6?= =?UTF-8?q?=20(#1944)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../%namespace/%name_%version/%sub/types/__init__.py.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 index d5c74e888424..bec74ebf9077 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/__init__.py.j2 @@ -2,7 +2,7 @@ {% block content %} -{% for _, proto in api.protos|dictsort if proto.file_to_generate and proto.messages %} +{% for _, proto in api.protos|dictsort if proto.file_to_generate and proto.messages or proto.enums %} from .{{proto.module_name }} import ( {% for _, message in proto.messages|dictsort %} {{message.name }}, From a88d262268f620f3b50869484be211530eb5e735 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 14 Feb 2024 18:04:52 +0000 Subject: [PATCH 1110/1339] chore(main): release 1.14.3 (#1947) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a74fb9a25de3..186217aa395c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [1.14.3](https://github.com/googleapis/gapic-generator-python/compare/v1.14.2...v1.14.3) (2024-02-14) + + +### Bug Fixes + +* Fix TypeError: MessageToJson() got an unexpected keyword argument 'including_default_value_fields' ([#1936](https://github.com/googleapis/gapic-generator-python/issues/1936)) ([12734ff](https://github.com/googleapis/gapic-generator-python/commit/12734ffb37ab005d556f7b7a23219701d404487d)) +* Require google-api-core 1.34.1 ([#1942](https://github.com/googleapis/gapic-generator-python/issues/1942)) ([6c176a3](https://github.com/googleapis/gapic-generator-python/commit/6c176a3277c935bf5b02ce2d1a8b1ef7503c338a)) +* Resolve issue with missing import for certain enums in **/types/… ([#1944](https://github.com/googleapis/gapic-generator-python/issues/1944)) ([97a4eed](https://github.com/googleapis/gapic-generator-python/commit/97a4eedeee196568008ce6612abbce566d9ad5a0)) + ## [1.14.2](https://github.com/googleapis/gapic-generator-python/compare/v1.14.1...v1.14.2) (2024-02-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 8fb3e5c3acf1..7c9c97521f41 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.14.2" +version = "1.14.3" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 687b16a7ff54fd0ae3ab36a82d3f74df3741f190 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 18:50:33 -0500 Subject: [PATCH 1111/1339] build(deps): bump cryptography from 42.0.0 to 42.0.2 in .kokoro (#1950) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 66 +++++++++---------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 2aefd0e91175..51213ca00ee3 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 +# created: 2024-02-17T12:21:23.177926195Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 8c11c9f3e9b6..f80bdcd62981 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.2 \ + --hash=sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380 \ + --hash=sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589 \ + --hash=sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea \ + --hash=sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65 \ + --hash=sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a \ + --hash=sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3 \ + --hash=sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008 \ + --hash=sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1 \ + --hash=sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2 \ + --hash=sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635 \ + --hash=sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2 \ + --hash=sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90 \ + --hash=sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee \ + --hash=sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a \ + --hash=sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242 \ + --hash=sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12 \ + --hash=sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2 \ + --hash=sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d \ + --hash=sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be \ + --hash=sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee \ + --hash=sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6 \ + --hash=sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529 \ + --hash=sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929 \ + --hash=sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1 \ + --hash=sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6 \ + --hash=sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a \ + --hash=sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446 \ + --hash=sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9 \ + --hash=sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888 \ + --hash=sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4 \ + --hash=sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33 \ + --hash=sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f # via # gcp-releasetool # secretstorage From 7379fd305ecdc827895643831d8954c322093d45 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 29 Feb 2024 09:32:19 -0800 Subject: [PATCH 1112/1339] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#1958) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.kokoro/requirements.txt | 66 +++++++++---------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 51213ca00ee3..e4e943e0259a 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 -# created: 2024-02-17T12:21:23.177926195Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index f80bdcd62981..bda8e38c4f31 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.2 \ - --hash=sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380 \ - --hash=sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589 \ - --hash=sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea \ - --hash=sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65 \ - --hash=sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a \ - --hash=sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3 \ - --hash=sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008 \ - --hash=sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1 \ - --hash=sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2 \ - --hash=sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635 \ - --hash=sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2 \ - --hash=sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90 \ - --hash=sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee \ - --hash=sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a \ - --hash=sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242 \ - --hash=sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12 \ - --hash=sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2 \ - --hash=sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d \ - --hash=sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be \ - --hash=sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee \ - --hash=sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6 \ - --hash=sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529 \ - --hash=sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929 \ - --hash=sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1 \ - --hash=sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6 \ - --hash=sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a \ - --hash=sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446 \ - --hash=sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9 \ - --hash=sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888 \ - --hash=sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4 \ - --hash=sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33 \ - --hash=sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage From 9ee787125bf122732a95f6ce26c3a4218f50eb2b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 29 Feb 2024 12:38:34 -0500 Subject: [PATCH 1113/1339] fix: exclude google-auth 2.24.0 and 2.25.0 (#1957) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 4 +++- .../gapic-generator/tests/integration/goldens/asset/setup.py | 4 +++- .../tests/integration/goldens/credentials/setup.py | 4 +++- .../tests/integration/goldens/eventarc/setup.py | 4 +++- .../tests/integration/goldens/logging/setup.py | 4 +++- .../gapic-generator/tests/integration/goldens/redis/setup.py | 4 +++- 6 files changed, 18 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 9bd523e37444..667e166d5e03 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -31,7 +31,9 @@ else: dependencies = [ "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "google-auth >= 2.14.1, <3.0.0dev", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 58de46c7e096..7433d7afe293 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -40,7 +40,9 @@ dependencies = [ "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "google-auth >= 2.14.1, <3.0.0dev", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index ee7848a52185..6aa26da126e0 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -40,7 +40,9 @@ dependencies = [ "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "google-auth >= 2.14.1, <3.0.0dev", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 20038709ee67..fdf2488ed7b2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -40,7 +40,9 @@ dependencies = [ "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "google-auth >= 2.14.1, <3.0.0dev", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index c4ee0f3d6399..c7c8d2b65b65 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -40,7 +40,9 @@ dependencies = [ "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "google-auth >= 2.14.1, <3.0.0dev", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index e841c91d823c..10ab2419a611 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -40,7 +40,9 @@ dependencies = [ "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "google-auth >= 2.14.1, <3.0.0dev", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] From 02733a1678cb2cce155ca9e66537af15436cbe1b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 29 Feb 2024 14:46:50 -0500 Subject: [PATCH 1114/1339] fix: add `google-apps-card` dependency (#1964) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index 3e8345d1fd8e..b5e420632a5b 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -6,6 +6,7 @@ PyPI package name, the minimum allowed version and the maximum allowed version. Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has support for `barcode` in `google.cloud.documentai.types` --> {% set pypi_packages = { + ("google", "apps", "card"): {"package_name": "google-apps-card", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0dev"}, ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, From 5c0fbbd2251479f1eede222b40b319b289839a87 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 29 Feb 2024 20:47:40 +0100 Subject: [PATCH 1115/1339] chore(deps): update all dependencies (#1951) --- packages/gapic-generator/WORKSPACE | 6 +- packages/gapic-generator/requirements.txt | 228 +++++++++++----------- 2 files changed, 114 insertions(+), 120 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 6983103dd710..1663b1e3499f 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -73,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "8ff511a64fc46ee792d3fe49a5a1bcad6f7dc50dfbba5a28b0e5b979c17f9871", - strip_prefix = "protobuf-25.2", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v25.2.tar.gz"], + sha256 = "d19643d265b978383352b3143f04c0641eea75a75235c111cc01a1350173180e", + strip_prefix = "protobuf-25.3", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v25.3.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 58ee28e6f484..2661e6d21eaa 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --generate-hashes requirements.in # -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth certifi==2024.2.2 \ --hash=sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f \ @@ -116,9 +116,9 @@ google-api-core==2.17.1 \ --hash=sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e \ --hash=sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95 # via -r requirements.in -google-auth==2.27.0 \ - --hash=sha256:8e4bad367015430ff253fe49d500fdc3396c1a434db5740828c728e45bcce245 \ - --hash=sha256:e863a56ccc2d8efa83df7a80272601e43487fa9a728a376205c86c26aaefa821 +google-auth==2.28.1 \ + --hash=sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72 \ + --hash=sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885 # via google-api-core googleapis-common-protos[grpc]==1.62.0 \ --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ @@ -131,61 +131,61 @@ grpc-google-iam-v1==0.13.0 \ --hash=sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89 \ --hash=sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e # via -r requirements.in -grpcio==1.60.1 \ - --hash=sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8 \ - --hash=sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8 \ - --hash=sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104 \ - --hash=sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc \ - --hash=sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092 \ - --hash=sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1 \ - --hash=sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180 \ - --hash=sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05 \ - --hash=sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287 \ - --hash=sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216 \ - --hash=sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c \ - --hash=sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d \ - --hash=sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594 \ - --hash=sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c \ - --hash=sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87 \ - --hash=sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de \ - --hash=sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9 \ - --hash=sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2 \ - --hash=sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff \ - --hash=sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904 \ - --hash=sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73 \ - --hash=sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549 \ - --hash=sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f \ - --hash=sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c \ - --hash=sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0 \ - --hash=sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e \ - --hash=sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367 \ - --hash=sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc \ - --hash=sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303 \ - --hash=sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb \ - --hash=sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c \ - --hash=sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804 \ - --hash=sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03 \ - --hash=sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6 \ - --hash=sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f \ - --hash=sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0 \ - --hash=sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd \ - --hash=sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21 \ - --hash=sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858 \ - --hash=sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce \ - --hash=sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9 \ - --hash=sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23 \ - --hash=sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073 \ - --hash=sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2 \ - --hash=sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872 \ - --hash=sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe \ - --hash=sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7 \ - --hash=sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6 \ - --hash=sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929 \ - --hash=sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962 \ - --hash=sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a \ - --hash=sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a \ - --hash=sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177 \ - --hash=sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525 +grpcio==1.62.0 \ + --hash=sha256:0b9179478b09ee22f4a36b40ca87ad43376acdccc816ce7c2193a9061bf35701 \ + --hash=sha256:0d3dee701e48ee76b7d6fbbba18ba8bc142e5b231ef7d3d97065204702224e0e \ + --hash=sha256:0d7ae7fc7dbbf2d78d6323641ded767d9ec6d121aaf931ec4a5c50797b886532 \ + --hash=sha256:0e97f37a3b7c89f9125b92d22e9c8323f4e76e7993ba7049b9f4ccbe8bae958a \ + --hash=sha256:136ffd79791b1eddda8d827b607a6285474ff8a1a5735c4947b58c481e5e4271 \ + --hash=sha256:1bc8449084fe395575ed24809752e1dc4592bb70900a03ca42bf236ed5bf008f \ + --hash=sha256:1eda79574aec8ec4d00768dcb07daba60ed08ef32583b62b90bbf274b3c279f7 \ + --hash=sha256:29cb592c4ce64a023712875368bcae13938c7f03e99f080407e20ffe0a9aa33b \ + --hash=sha256:2c1488b31a521fbba50ae86423f5306668d6f3a46d124f7819c603979fc538c4 \ + --hash=sha256:2e84bfb2a734e4a234b116be208d6f0214e68dcf7804306f97962f93c22a1839 \ + --hash=sha256:2f3d9a4d0abb57e5f49ed5039d3ed375826c2635751ab89dcc25932ff683bbb6 \ + --hash=sha256:36df33080cd7897623feff57831eb83c98b84640b016ce443305977fac7566fb \ + --hash=sha256:38f69de9c28c1e7a8fd24e4af4264726637b72f27c2099eaea6e513e7142b47e \ + --hash=sha256:39cd45bd82a2e510e591ca2ddbe22352e8413378852ae814549c162cf3992a93 \ + --hash=sha256:3fa15850a6aba230eed06b236287c50d65a98f05054a0f01ccedf8e1cc89d57f \ + --hash=sha256:4cd356211579043fce9f52acc861e519316fff93980a212c8109cca8f47366b6 \ + --hash=sha256:56ca7ba0b51ed0de1646f1735154143dcbdf9ec2dbe8cc6645def299bb527ca1 \ + --hash=sha256:5e709f7c8028ce0443bddc290fb9c967c1e0e9159ef7a030e8c21cac1feabd35 \ + --hash=sha256:614c3ed234208e76991992342bab725f379cc81c7dd5035ee1de2f7e3f7a9842 \ + --hash=sha256:62aa1659d8b6aad7329ede5d5b077e3d71bf488d85795db517118c390358d5f6 \ + --hash=sha256:62ccb92f594d3d9fcd00064b149a0187c246b11e46ff1b7935191f169227f04c \ + --hash=sha256:662d3df5314ecde3184cf87ddd2c3a66095b3acbb2d57a8cada571747af03873 \ + --hash=sha256:748496af9238ac78dcd98cce65421f1adce28c3979393e3609683fcd7f3880d7 \ + --hash=sha256:77d48e5b1f8f4204889f1acf30bb57c30378e17c8d20df5acbe8029e985f735c \ + --hash=sha256:7a195531828b46ea9c4623c47e1dc45650fc7206f8a71825898dd4c9004b0928 \ + --hash=sha256:7e1f51e2a460b7394670fdb615e26d31d3260015154ea4f1501a45047abe06c9 \ + --hash=sha256:7eea57444a354ee217fda23f4b479a4cdfea35fb918ca0d8a0e73c271e52c09c \ + --hash=sha256:7f9d6c3223914abb51ac564dc9c3782d23ca445d2864321b9059d62d47144021 \ + --hash=sha256:81531632f93fece32b2762247c4c169021177e58e725494f9a746ca62c83acaa \ + --hash=sha256:81d444e5e182be4c7856cd33a610154fe9ea1726bd071d07e7ba13fafd202e38 \ + --hash=sha256:821a44bd63d0f04e33cf4ddf33c14cae176346486b0df08b41a6132b976de5fc \ + --hash=sha256:88f41f33da3840b4a9bbec68079096d4caf629e2c6ed3a72112159d570d98ebe \ + --hash=sha256:8aab8f90b2a41208c0a071ec39a6e5dbba16fd827455aaa070fec241624ccef8 \ + --hash=sha256:921148f57c2e4b076af59a815467d399b7447f6e0ee10ef6d2601eb1e9c7f402 \ + --hash=sha256:92cdb616be44c8ac23a57cce0243af0137a10aa82234f23cd46e69e115071388 \ + --hash=sha256:95370c71b8c9062f9ea033a0867c4c73d6f0ff35113ebd2618171ec1f1e903e0 \ + --hash=sha256:98d8f4eb91f1ce0735bf0b67c3b2a4fea68b52b2fd13dc4318583181f9219b4b \ + --hash=sha256:a33f2bfd8a58a02aab93f94f6c61279be0f48f99fcca20ebaee67576cd57307b \ + --hash=sha256:ab140a3542bbcea37162bdfc12ce0d47a3cda3f2d91b752a124cc9fe6776a9e2 \ + --hash=sha256:b3d3d755cfa331d6090e13aac276d4a3fb828bf935449dc16c3d554bf366136b \ + --hash=sha256:b71c65427bf0ec6a8b48c68c17356cb9fbfc96b1130d20a07cb462f4e4dcdcd5 \ + --hash=sha256:b7a6be562dd18e5d5bec146ae9537f20ae1253beb971c0164f1e8a2f5a27e829 \ + --hash=sha256:bcff647e7fe25495e7719f779cc219bbb90b9e79fbd1ce5bda6aae2567f469f2 \ + --hash=sha256:c912688acc05e4ff012c8891803659d6a8a8b5106f0f66e0aed3fb7e77898fa6 \ + --hash=sha256:ce1aafdf8d3f58cb67664f42a617af0e34555fe955450d42c19e4a6ad41c84bd \ + --hash=sha256:d6a56ba703be6b6267bf19423d888600c3f574ac7c2cc5e6220af90662a4d6b0 \ + --hash=sha256:e803e9b58d8f9b4ff0ea991611a8d51b31c68d2e24572cd1fe85e99e8cc1b4f8 \ + --hash=sha256:eef1d16ac26c5325e7d39f5452ea98d6988c700c427c52cbc7ce3201e6d93334 \ + --hash=sha256:f359d635ee9428f0294bea062bb60c478a8ddc44b0b6f8e1f42997e5dc12e2ee \ + --hash=sha256:f4c04fe33039b35b97c02d2901a164bbbb2f21fb9c4e2a45a959f0b044c3512c \ + --hash=sha256:f897b16190b46bc4d4aaf0a32a4b819d559a37a756d7c6b571e9562c360eed72 \ + --hash=sha256:fbe0c20ce9a1cff75cfb828b21f08d0a1ca527b67f2443174af6626798a754a4 \ + --hash=sha256:fc2836cb829895ee190813446dce63df67e6ed7b9bf76060262c55fcd097d270 \ + --hash=sha256:fcc98cff4084467839d0a20d16abc2a76005f3d1b38062464d088c07f500d170 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -205,38 +205,32 @@ jinja2==3.1.3 \ --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via -r requirements.in -libcst==1.1.0 \ - --hash=sha256:003e5e83a12eed23542c4ea20fdc8de830887cc03662432bb36f84f8c4841b81 \ - --hash=sha256:0acbacb9a170455701845b7e940e2d7b9519db35a86768d86330a0b0deae1086 \ - --hash=sha256:0bf69cbbab5016d938aac4d3ae70ba9ccb3f90363c588b3b97be434e6ba95403 \ - --hash=sha256:2d37326bd6f379c64190a28947a586b949de3a76be00176b0732c8ee87d67ebe \ - --hash=sha256:3a07ecfabbbb8b93209f952a365549e65e658831e9231649f4f4e4263cad24b1 \ - --hash=sha256:3ebbb9732ae3cc4ae7a0e97890bed0a57c11d6df28790c2b9c869f7da653c7c7 \ - --hash=sha256:4bc745d0c06420fe2644c28d6ddccea9474fb68a2135904043676deb4fa1e6bc \ - --hash=sha256:5297a16e575be8173185e936b7765c89a3ca69d4ae217a4af161814a0f9745a7 \ - --hash=sha256:5f1cd308a4c2f71d5e4eec6ee693819933a03b78edb2e4cc5e3ad1afd5fb3f07 \ - --hash=sha256:63f75656fd733dc20354c46253fde3cf155613e37643c3eaf6f8818e95b7a3d1 \ - --hash=sha256:73c086705ed34dbad16c62c9adca4249a556c1b022993d511da70ea85feaf669 \ - --hash=sha256:75816647736f7e09c6120bdbf408456f99b248d6272277eed9a58cf50fb8bc7d \ - --hash=sha256:78b7a38ec4c1c009ac39027d51558b52851fb9234669ba5ba62283185963a31c \ - --hash=sha256:7ccaf53925f81118aeaadb068a911fac8abaff608817d7343da280616a5ca9c1 \ - --hash=sha256:82d1271403509b0a4ee6ff7917c2d33b5a015f44d1e208abb1da06ba93b2a378 \ - --hash=sha256:8ae11eb1ea55a16dc0cdc61b41b29ac347da70fec14cc4381248e141ee2fbe6c \ - --hash=sha256:8afb6101b8b3c86c5f9cec6b90ab4da16c3c236fe7396f88e8b93542bb341f7c \ - --hash=sha256:8c1f2da45f1c45634090fd8672c15e0159fdc46853336686959b2d093b6e10fa \ - --hash=sha256:97fbc73c87e9040e148881041fd5ffa2a6ebf11f64b4ccb5b52e574b95df1a15 \ - --hash=sha256:99fdc1929703fd9e7408aed2e03f58701c5280b05c8911753a8d8619f7dfdda5 \ - --hash=sha256:9dffa1795c2804d183efb01c0f1efd20a7831db6a21a0311edf90b4100d67436 \ - --hash=sha256:bca1841693941fdd18371824bb19a9702d5784cd347cb8231317dbdc7062c5bc \ - --hash=sha256:c653d9121d6572d8b7f8abf20f88b0a41aab77ff5a6a36e5a0ec0f19af0072e8 \ - --hash=sha256:c8f26250f87ca849a7303ed7a4fd6b2c7ac4dec16b7d7e68ca6a476d7c9bfcdb \ - --hash=sha256:cc9b6ac36d7ec9db2f053014ea488086ca2ed9c322be104fbe2c71ca759da4bb \ - --hash=sha256:d22d1abfe49aa60fc61fa867e10875a9b3024ba5a801112f4d7ba42d8d53242e \ - --hash=sha256:d68c34e3038d3d1d6324eb47744cbf13f2c65e1214cf49db6ff2a6603c1cd838 \ - --hash=sha256:e3d8cf974cfa2487b28f23f56c4bff90d550ef16505e58b0dca0493d5293784b \ - --hash=sha256:f36f592e035ef84f312a12b75989dde6a5f6767fe99146cdae6a9ee9aff40dd0 \ - --hash=sha256:f561c9a84eca18be92f4ad90aa9bd873111efbea995449301719a1a7805dbc5c \ - --hash=sha256:fe41b33aa73635b1651f64633f429f7aa21f86d2db5748659a99d9b7b1ed2a90 +libcst==1.2.0 \ + --hash=sha256:0cb92398236566f0b73a0c73f8a41a9c4906c793e8f7c2745f30e3fb141a34b5 \ + --hash=sha256:13ca9fe82326d82feb2c7b0f5a320ce7ed0d707c32919dd36e1f40792459bf6f \ + --hash=sha256:1b5fecb2b26fa3c1efe6e05ef1420522bd31bb4dae239e4c41fdf3ddbd853aeb \ + --hash=sha256:1d45718f7e7a1405a16fd8e7fc75c365120001b6928bfa3c4112f7e533990b9a \ + --hash=sha256:2bbb4e442224da46b59a248d7d632ed335eae023a921dea1f5c72d2a059f6be9 \ + --hash=sha256:38fbd56f885e1f77383a6d1d798a917ffbc6d28dc6b1271eddbf8511c194213e \ + --hash=sha256:3c7c0edfe3b878d64877671261c7b3ffe9d23181774bfad5d8fcbdbbbde9f064 \ + --hash=sha256:4973a9d509cf1a59e07fac55a98f70bc4fd35e09781dffb3ec93ee32fc0de7af \ + --hash=sha256:5c0d548d92c6704bb07ce35d78c0e054cdff365def0645c1b57c856c8e112bb4 \ + --hash=sha256:5e54389abdea995b39ee96ad736ed1b0b8402ed30a7956b7a279c10baf0c0294 \ + --hash=sha256:6dd388c74c04434b41e3b25fc4a0fafa3e6abf91f97181df55e8f8327fd903cc \ + --hash=sha256:71dd69fff76e7edaf8fae0f63ffcdbf5016e8cd83165b1d0688d6856aa48186a \ + --hash=sha256:7f4919978c2b395079b64d8a654357854767adbabab13998b39c1f0bc67da8a7 \ + --hash=sha256:82373a35711a8bb2a664dba2b7aeb20bbcce92a4db40af964e9cb2b976f989e7 \ + --hash=sha256:8b56130f18aca9a98b3bcaf5962b2b26c2dcdd6d5132decf3f0b0b635f4403ba \ + --hash=sha256:968b93400e66e6711a29793291365e312d206dbafd3fc80219cfa717f0f01ad5 \ + --hash=sha256:b4066dcadf92b183706f81ae0b4342e7624fc1d9c5ca2bf2b44066cb74bf863f \ + --hash=sha256:ba24b8cf789db6b87c6e23a6c6365f5f73cb7306d929397581d5680149e9990c \ + --hash=sha256:c0149d24a455536ff2e41b3a48b16d3ebb245e28035013c91bd868def16592a0 \ + --hash=sha256:c80f36f4a02d530e28eac7073aabdea7c6795fc820773a02224021d79d164e8b \ + --hash=sha256:dded0e4f2e18150c4b07fedd7ef84a9abc7f9bd2d47cc1c485248ee1ec58e5cc \ + --hash=sha256:dece0362540abfc39cd2cf5c98cde238b35fd74a1b0167e2563e4b8bb5f47489 \ + --hash=sha256:e01879aa8cd478bb8b1e4285cfd0607e64047116f7ab52bc2a787cde584cd686 \ + --hash=sha256:f080e9af843ff609f8f35fc7275c8bf08b02c31115e7cd5b77ca3b6a56c75096 \ + --hash=sha256:f2342634f6c61fc9076dc0baf21e9cf5ef0195a06e1e95c0c9dc583ba3a30d00 # via -r requirements.in markupsafe==2.1.5 \ --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ @@ -318,18 +312,18 @@ proto-plus==1.23.0 \ --hash=sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2 \ --hash=sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c # via -r requirements.in -protobuf==4.25.2 \ - --hash=sha256:10894a2885b7175d3984f2be8d9850712c57d5e7587a2410720af8be56cdaf62 \ - --hash=sha256:2db9f8fa64fbdcdc93767d3cf81e0f2aef176284071507e3ede160811502fd3d \ - --hash=sha256:33a1aeef4b1927431d1be780e87b641e322b88d654203a9e9d93f218ee359e61 \ - --hash=sha256:47f3de503fe7c1245f6f03bea7e8d3ec11c6c4a2ea9ef910e3221c8a15516d62 \ - --hash=sha256:5e5c933b4c30a988b52e0b7c02641760a5ba046edc5e43d3b94a74c9fc57c1b3 \ - --hash=sha256:8f62574857ee1de9f770baf04dde4165e30b15ad97ba03ceac65f760ff018ac9 \ - --hash=sha256:a8b7a98d4ce823303145bf3c1a8bdb0f2f4642a414b196f04ad9853ed0c8f830 \ - --hash=sha256:b50c949608682b12efb0b2717f53256f03636af5f60ac0c1d900df6213910fd6 \ - --hash=sha256:d66a769b8d687df9024f2985d5137a337f957a0916cf5464d1513eee96a63ff0 \ - --hash=sha256:fc381d1dd0516343f1440019cedf08a7405f791cd49eef4ae1ea06520bc1c020 \ - --hash=sha256:fe599e175cb347efc8ee524bcd4b902d11f7262c0e569ececcb89995c15f0a5e +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # -r requirements.in # google-api-core @@ -346,13 +340,13 @@ pyasn1-modules==0.3.0 \ --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth -pypandoc==1.12 \ - --hash=sha256:8f44740a9f074e121d81b489f073160421611d4ead62d1b306aeb11aab3c32df \ - --hash=sha256:efb4f7d68ead8bec32e22b62f02d5608a1700978b51bfc4af286fd6acfe9d218 +pypandoc==1.13 \ + --hash=sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e \ + --hash=sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681 # via -r requirements.in -pytest==8.0.0 \ - --hash=sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c \ - --hash=sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6 +pytest==8.0.2 \ + --hash=sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd \ + --hash=sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096 # via pytest-asyncio pytest-asyncio==0.23.5 \ --hash=sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675 \ @@ -425,9 +419,9 @@ tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via pytest -typing-extensions==4.9.0 \ - --hash=sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783 \ - --hash=sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd +typing-extensions==4.10.0 \ + --hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \ + --hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb # via # libcst # typing-inspect @@ -435,7 +429,7 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==2.2.0 \ - --hash=sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20 \ - --hash=sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224 +urllib3==2.2.1 \ + --hash=sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d \ + --hash=sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19 # via requests From 050647b195ca62e6cd3fc6d54e1bd06012c37519 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 29 Feb 2024 19:53:41 +0000 Subject: [PATCH 1116/1339] chore(main): release 1.14.4 (#1963) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 186217aa395c..4816ae39bacf 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.14.4](https://github.com/googleapis/gapic-generator-python/compare/v1.14.3...v1.14.4) (2024-02-29) + + +### Bug Fixes + +* Add `google-apps-card` dependency ([#1964](https://github.com/googleapis/gapic-generator-python/issues/1964)) ([d4d51d4](https://github.com/googleapis/gapic-generator-python/commit/d4d51d4e84342065f34ef584abbe4b99404de574)) +* Exclude google-auth 2.24.0 and 2.25.0 ([#1957](https://github.com/googleapis/gapic-generator-python/issues/1957)) ([abe8de3](https://github.com/googleapis/gapic-generator-python/commit/abe8de30086506f4694c5c5e4bae08fae8525e4b)) + ## [1.14.3](https://github.com/googleapis/gapic-generator-python/compare/v1.14.2...v1.14.3) (2024-02-14) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 7c9c97521f41..001aa1f122e9 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.14.3" +version = "1.14.4" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 6f346a6cdb5bbccbb244697b77dae643d026c36d Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Thu, 29 Feb 2024 15:14:21 -0800 Subject: [PATCH 1117/1339] chore: update copyright year (#1959) --- packages/gapic-generator/gapic/ads-templates/_license.j2 | 2 +- packages/gapic-generator/gapic/templates/_license.j2 | 2 +- .../tests/integration/goldens/asset/docs/conf.py | 2 +- .../integration/goldens/asset/google/cloud/asset/__init__.py | 2 +- .../goldens/asset/google/cloud/asset/gapic_version.py | 2 +- .../integration/goldens/asset/google/cloud/asset_v1/__init__.py | 2 +- .../goldens/asset/google/cloud/asset_v1/gapic_version.py | 2 +- .../goldens/asset/google/cloud/asset_v1/services/__init__.py | 2 +- .../google/cloud/asset_v1/services/asset_service/__init__.py | 2 +- .../cloud/asset_v1/services/asset_service/async_client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/pagers.py | 2 +- .../asset_v1/services/asset_service/transports/__init__.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/base.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/grpc.py | 2 +- .../asset_v1/services/asset_service/transports/grpc_asyncio.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/rest.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/__init__.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/asset_service.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/assets.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/noxfile.py | 2 +- ...asset_v1_generated_asset_service_analyze_iam_policy_async.py | 2 +- ...erated_asset_service_analyze_iam_policy_longrunning_async.py | 2 +- ...nerated_asset_service_analyze_iam_policy_longrunning_sync.py | 2 +- ...dasset_v1_generated_asset_service_analyze_iam_policy_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_analyze_move_async.py | 2 +- .../cloudasset_v1_generated_asset_service_analyze_move_sync.py | 2 +- ...set_v1_generated_asset_service_analyze_org_policies_async.py | 2 +- ...sset_v1_generated_asset_service_analyze_org_policies_sync.py | 2 +- ...ed_asset_service_analyze_org_policy_governed_assets_async.py | 2 +- ...ted_asset_service_analyze_org_policy_governed_assets_sync.py | 2 +- ...sset_service_analyze_org_policy_governed_containers_async.py | 2 +- ...asset_service_analyze_org_policy_governed_containers_sync.py | 2 +- ...v1_generated_asset_service_batch_get_assets_history_async.py | 2 +- ..._v1_generated_asset_service_batch_get_assets_history_sync.py | 2 +- ...ated_asset_service_batch_get_effective_iam_policies_async.py | 2 +- ...rated_asset_service_batch_get_effective_iam_policies_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_create_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_create_feed_sync.py | 2 +- ...asset_v1_generated_asset_service_create_saved_query_async.py | 2 +- ...dasset_v1_generated_asset_service_create_saved_query_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_delete_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_delete_feed_sync.py | 2 +- ...asset_v1_generated_asset_service_delete_saved_query_async.py | 2 +- ...dasset_v1_generated_asset_service_delete_saved_query_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_export_assets_async.py | 2 +- .../cloudasset_v1_generated_asset_service_export_assets_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_get_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_get_feed_sync.py | 2 +- ...oudasset_v1_generated_asset_service_get_saved_query_async.py | 2 +- ...loudasset_v1_generated_asset_service_get_saved_query_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_list_assets_async.py | 2 +- .../cloudasset_v1_generated_asset_service_list_assets_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_list_feeds_async.py | 2 +- .../cloudasset_v1_generated_asset_service_list_feeds_sync.py | 2 +- ...asset_v1_generated_asset_service_list_saved_queries_async.py | 2 +- ...dasset_v1_generated_asset_service_list_saved_queries_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_query_assets_async.py | 2 +- .../cloudasset_v1_generated_asset_service_query_assets_sync.py | 2 +- ..._v1_generated_asset_service_search_all_iam_policies_async.py | 2 +- ...t_v1_generated_asset_service_search_all_iam_policies_sync.py | 2 +- ...set_v1_generated_asset_service_search_all_resources_async.py | 2 +- ...sset_v1_generated_asset_service_search_all_resources_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_update_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_update_feed_sync.py | 2 +- ...asset_v1_generated_asset_service_update_saved_query_async.py | 2 +- ...dasset_v1_generated_asset_service_update_saved_query_sync.py | 2 +- .../goldens/asset/scripts/fixup_asset_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../tests/integration/goldens/asset/tests/__init__.py | 2 +- .../tests/integration/goldens/asset/tests/unit/__init__.py | 2 +- .../integration/goldens/asset/tests/unit/gapic/__init__.py | 2 +- .../goldens/asset/tests/unit/gapic/asset_v1/__init__.py | 2 +- .../asset/tests/unit/gapic/asset_v1/test_asset_service.py | 2 +- .../tests/integration/goldens/credentials/docs/conf.py | 2 +- .../goldens/credentials/google/iam/credentials/__init__.py | 2 +- .../goldens/credentials/google/iam/credentials/gapic_version.py | 2 +- .../goldens/credentials/google/iam/credentials_v1/__init__.py | 2 +- .../credentials/google/iam/credentials_v1/gapic_version.py | 2 +- .../credentials/google/iam/credentials_v1/services/__init__.py | 2 +- .../iam/credentials_v1/services/iam_credentials/__init__.py | 2 +- .../iam/credentials_v1/services/iam_credentials/async_client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../services/iam_credentials/transports/__init__.py | 2 +- .../credentials_v1/services/iam_credentials/transports/base.py | 2 +- .../credentials_v1/services/iam_credentials/transports/grpc.py | 2 +- .../services/iam_credentials/transports/grpc_asyncio.py | 2 +- .../credentials_v1/services/iam_credentials/transports/rest.py | 2 +- .../credentials/google/iam/credentials_v1/types/__init__.py | 2 +- .../credentials/google/iam/credentials_v1/types/common.py | 2 +- .../google/iam/credentials_v1/types/iamcredentials.py | 2 +- .../tests/integration/goldens/credentials/noxfile.py | 2 +- ..._v1_generated_iam_credentials_generate_access_token_async.py | 2 +- ...s_v1_generated_iam_credentials_generate_access_token_sync.py | 2 +- ...ials_v1_generated_iam_credentials_generate_id_token_async.py | 2 +- ...tials_v1_generated_iam_credentials_generate_id_token_sync.py | 2 +- ...mcredentials_v1_generated_iam_credentials_sign_blob_async.py | 2 +- ...amcredentials_v1_generated_iam_credentials_sign_blob_sync.py | 2 +- ...amcredentials_v1_generated_iam_credentials_sign_jwt_async.py | 2 +- ...iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py | 2 +- .../credentials/scripts/fixup_credentials_v1_keywords.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- .../tests/integration/goldens/credentials/tests/__init__.py | 2 +- .../integration/goldens/credentials/tests/unit/__init__.py | 2 +- .../goldens/credentials/tests/unit/gapic/__init__.py | 2 +- .../credentials/tests/unit/gapic/credentials_v1/__init__.py | 2 +- .../tests/unit/gapic/credentials_v1/test_iam_credentials.py | 2 +- .../tests/integration/goldens/eventarc/docs/conf.py | 2 +- .../goldens/eventarc/google/cloud/eventarc/__init__.py | 2 +- .../goldens/eventarc/google/cloud/eventarc/gapic_version.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/__init__.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py | 2 +- .../eventarc/google/cloud/eventarc_v1/services/__init__.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/__init__.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/async_client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/pagers.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/__init__.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/base.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/grpc.py | 2 +- .../eventarc_v1/services/eventarc/transports/grpc_asyncio.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/rest.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/types/channel.py | 2 +- .../google/cloud/eventarc_v1/types/channel_connection.py | 2 +- .../eventarc/google/cloud/eventarc_v1/types/discovery.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py | 2 +- .../google/cloud/eventarc_v1/types/google_channel_config.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py | 2 +- .../tests/integration/goldens/eventarc/noxfile.py | 2 +- .../eventarc_v1_generated_eventarc_create_channel_async.py | 2 +- ...arc_v1_generated_eventarc_create_channel_connection_async.py | 2 +- ...tarc_v1_generated_eventarc_create_channel_connection_sync.py | 2 +- .../eventarc_v1_generated_eventarc_create_channel_sync.py | 2 +- .../eventarc_v1_generated_eventarc_create_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_create_trigger_sync.py | 2 +- .../eventarc_v1_generated_eventarc_delete_channel_async.py | 2 +- ...arc_v1_generated_eventarc_delete_channel_connection_async.py | 2 +- ...tarc_v1_generated_eventarc_delete_channel_connection_sync.py | 2 +- .../eventarc_v1_generated_eventarc_delete_channel_sync.py | 2 +- .../eventarc_v1_generated_eventarc_delete_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_delete_trigger_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_channel_async.py | 2 +- ...entarc_v1_generated_eventarc_get_channel_connection_async.py | 2 +- ...ventarc_v1_generated_eventarc_get_channel_connection_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_channel_sync.py | 2 +- ...arc_v1_generated_eventarc_get_google_channel_config_async.py | 2 +- ...tarc_v1_generated_eventarc_get_google_channel_config_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_provider_async.py | 2 +- .../eventarc_v1_generated_eventarc_get_provider_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_get_trigger_sync.py | 2 +- ...tarc_v1_generated_eventarc_list_channel_connections_async.py | 2 +- ...ntarc_v1_generated_eventarc_list_channel_connections_sync.py | 2 +- .../eventarc_v1_generated_eventarc_list_channels_async.py | 2 +- .../eventarc_v1_generated_eventarc_list_channels_sync.py | 2 +- .../eventarc_v1_generated_eventarc_list_providers_async.py | 2 +- .../eventarc_v1_generated_eventarc_list_providers_sync.py | 2 +- .../eventarc_v1_generated_eventarc_list_triggers_async.py | 2 +- .../eventarc_v1_generated_eventarc_list_triggers_sync.py | 2 +- .../eventarc_v1_generated_eventarc_update_channel_async.py | 2 +- .../eventarc_v1_generated_eventarc_update_channel_sync.py | 2 +- ..._v1_generated_eventarc_update_google_channel_config_async.py | 2 +- ...c_v1_generated_eventarc_update_google_channel_config_sync.py | 2 +- .../eventarc_v1_generated_eventarc_update_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_update_trigger_sync.py | 2 +- .../goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/eventarc/setup.py | 2 +- .../tests/integration/goldens/eventarc/tests/__init__.py | 2 +- .../tests/integration/goldens/eventarc/tests/unit/__init__.py | 2 +- .../integration/goldens/eventarc/tests/unit/gapic/__init__.py | 2 +- .../goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py | 2 +- .../eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py | 2 +- .../tests/integration/goldens/logging/docs/conf.py | 2 +- .../goldens/logging/google/cloud/logging/__init__.py | 2 +- .../goldens/logging/google/cloud/logging/gapic_version.py | 2 +- .../goldens/logging/google/cloud/logging_v2/__init__.py | 2 +- .../goldens/logging/google/cloud/logging_v2/gapic_version.py | 2 +- .../logging/google/cloud/logging_v2/services/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/pagers.py | 2 +- .../services/config_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/config_service_v2/transports/base.py | 2 +- .../logging_v2/services/config_service_v2/transports/grpc.py | 2 +- .../services/config_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/pagers.py | 2 +- .../services/logging_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/transports/base.py | 2 +- .../logging_v2/services/logging_service_v2/transports/grpc.py | 2 +- .../services/logging_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/pagers.py | 2 +- .../services/metrics_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/base.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/grpc.py | 2 +- .../services/metrics_service_v2/transports/grpc_asyncio.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/__init__.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/log_entry.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/logging.py | 2 +- .../logging/google/cloud/logging_v2/types/logging_config.py | 2 +- .../logging/google/cloud/logging_v2/types/logging_metrics.py | 2 +- .../tests/integration/goldens/logging/noxfile.py | 2 +- ...ing_v2_generated_config_service_v2_copy_log_entries_async.py | 2 +- ...ging_v2_generated_config_service_v2_copy_log_entries_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_create_bucket_async.py | 2 +- ..._v2_generated_config_service_v2_create_bucket_async_async.py | 2 +- ...g_v2_generated_config_service_v2_create_bucket_async_sync.py | 2 +- ...logging_v2_generated_config_service_v2_create_bucket_sync.py | 2 +- ...ing_v2_generated_config_service_v2_create_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_create_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_link_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_view_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_delete_bucket_async.py | 2 +- ...logging_v2_generated_config_service_v2_delete_bucket_sync.py | 2 +- ...ing_v2_generated_config_service_v2_delete_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_delete_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_link_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_view_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_sync.py | 2 +- ...ng_v2_generated_config_service_v2_get_cmek_settings_async.py | 2 +- ...ing_v2_generated_config_service_v2_get_cmek_settings_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_get_exclusion_async.py | 2 +- ...logging_v2_generated_config_service_v2_get_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_link_sync.py | 2 +- ...logging_v2_generated_config_service_v2_get_settings_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_settings_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_sync.py | 2 +- ...logging_v2_generated_config_service_v2_list_buckets_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_buckets_sync.py | 2 +- ...ging_v2_generated_config_service_v2_list_exclusions_async.py | 2 +- ...gging_v2_generated_config_service_v2_list_exclusions_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_links_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_links_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_sinks_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_sinks_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_views_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_views_sync.py | 2 +- ...ging_v2_generated_config_service_v2_undelete_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_undelete_bucket_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_update_bucket_async.py | 2 +- ..._v2_generated_config_service_v2_update_bucket_async_async.py | 2 +- ...g_v2_generated_config_service_v2_update_bucket_async_sync.py | 2 +- ...logging_v2_generated_config_service_v2_update_bucket_sync.py | 2 +- ...v2_generated_config_service_v2_update_cmek_settings_async.py | 2 +- ..._v2_generated_config_service_v2_update_cmek_settings_sync.py | 2 +- ...ing_v2_generated_config_service_v2_update_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_update_exclusion_sync.py | 2 +- ...ging_v2_generated_config_service_v2_update_settings_async.py | 2 +- ...gging_v2_generated_config_service_v2_update_settings_sync.py | 2 +- .../logging_v2_generated_config_service_v2_update_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_update_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_update_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_update_view_sync.py | 2 +- .../logging_v2_generated_logging_service_v2_delete_log_async.py | 2 +- .../logging_v2_generated_logging_service_v2_delete_log_sync.py | 2 +- ...ng_v2_generated_logging_service_v2_list_log_entries_async.py | 2 +- ...ing_v2_generated_logging_service_v2_list_log_entries_sync.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_async.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_sync.py | 2 +- ...ging_service_v2_list_monitored_resource_descriptors_async.py | 2 +- ...gging_service_v2_list_monitored_resource_descriptors_sync.py | 2 +- ...ng_v2_generated_logging_service_v2_tail_log_entries_async.py | 2 +- ...ing_v2_generated_logging_service_v2_tail_log_entries_sync.py | 2 +- ...g_v2_generated_logging_service_v2_write_log_entries_async.py | 2 +- ...ng_v2_generated_logging_service_v2_write_log_entries_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_create_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_create_log_metric_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_delete_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_delete_log_metric_sync.py | 2 +- ...ging_v2_generated_metrics_service_v2_get_log_metric_async.py | 2 +- ...gging_v2_generated_metrics_service_v2_get_log_metric_sync.py | 2 +- ...ng_v2_generated_metrics_service_v2_list_log_metrics_async.py | 2 +- ...ing_v2_generated_metrics_service_v2_list_log_metrics_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_update_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_update_log_metric_sync.py | 2 +- .../goldens/logging/scripts/fixup_logging_v2_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/logging/setup.py | 2 +- .../tests/integration/goldens/logging/tests/__init__.py | 2 +- .../tests/integration/goldens/logging/tests/unit/__init__.py | 2 +- .../integration/goldens/logging/tests/unit/gapic/__init__.py | 2 +- .../goldens/logging/tests/unit/gapic/logging_v2/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 2 +- .../tests/integration/goldens/redis/docs/conf.py | 2 +- .../integration/goldens/redis/google/cloud/redis/__init__.py | 2 +- .../goldens/redis/google/cloud/redis/gapic_version.py | 2 +- .../integration/goldens/redis/google/cloud/redis_v1/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/gapic_version.py | 2 +- .../goldens/redis/google/cloud/redis_v1/services/__init__.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/__init__.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/async_client.py | 2 +- .../redis/google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- .../redis/google/cloud/redis_v1/services/cloud_redis/pagers.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/__init__.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/base.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/grpc.py | 2 +- .../redis_v1/services/cloud_redis/transports/grpc_asyncio.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/rest.py | 2 +- .../goldens/redis/google/cloud/redis_v1/types/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/types/cloud_redis.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/noxfile.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_export_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_export_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_failover_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_failover_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_async.py | 2 +- ...s_v1_generated_cloud_redis_get_instance_auth_string_async.py | 2 +- ...is_v1_generated_cloud_redis_get_instance_auth_string_sync.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_import_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_import_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_async.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_sync.py | 2 +- ...dis_v1_generated_cloud_redis_reschedule_maintenance_async.py | 2 +- ...edis_v1_generated_cloud_redis_reschedule_maintenance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_upgrade_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_upgrade_instance_sync.py | 2 +- .../goldens/redis/scripts/fixup_redis_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 +- .../tests/integration/goldens/redis/tests/__init__.py | 2 +- .../tests/integration/goldens/redis/tests/unit/__init__.py | 2 +- .../integration/goldens/redis/tests/unit/gapic/__init__.py | 2 +- .../goldens/redis/tests/unit/gapic/redis_v1/__init__.py | 2 +- .../goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py | 2 +- .../mollusca_v1_generated_snippets_list_resources_async.py | 2 +- .../mollusca_v1_generated_snippets_list_resources_sync.py | 2 +- ...ollusca_v1_generated_snippets_method_bidi_streaming_async.py | 2 +- ...mollusca_v1_generated_snippets_method_bidi_streaming_sync.py | 2 +- ...ollusca_v1_generated_snippets_method_lro_signatures_async.py | 2 +- ...mollusca_v1_generated_snippets_method_lro_signatures_sync.py | 2 +- ...mollusca_v1_generated_snippets_method_one_signature_async.py | 2 +- .../mollusca_v1_generated_snippets_method_one_signature_sync.py | 2 +- ...lusca_v1_generated_snippets_method_server_streaming_async.py | 2 +- ...llusca_v1_generated_snippets_method_server_streaming_sync.py | 2 +- .../mollusca_v1_generated_snippets_one_of_method_async.py | 2 +- ..._v1_generated_snippets_one_of_method_required_field_async.py | 2 +- ...a_v1_generated_snippets_one_of_method_required_field_sync.py | 2 +- .../mollusca_v1_generated_snippets_one_of_method_sync.py | 2 +- .../tests/unit/samplegen/golden_snippets/sample_basic.py | 2 +- .../tests/unit/samplegen/golden_snippets/sample_basic_async.py | 2 +- .../samplegen/golden_snippets/sample_basic_unflattenable.py | 2 +- .../unit/samplegen/golden_snippets/sample_basic_void_method.py | 2 +- 369 files changed, 369 insertions(+), 369 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/_license.j2 b/packages/gapic-generator/gapic/ads-templates/_license.j2 index eee20db57924..1ee14b782014 100644 --- a/packages/gapic-generator/gapic/ads-templates/_license.j2 +++ b/packages/gapic-generator/gapic/ads-templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/gapic/templates/_license.j2 b/packages/gapic-generator/gapic/templates/_license.j2 index eee20db57924..1ee14b782014 100644 --- a/packages/gapic-generator/gapic/templates/_license.j2 +++ b/packages/gapic-generator/gapic/templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index b6b4f0941d46..fa7647914fb5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index 366ca0861def..de2660efe3aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 8fbe3ed54f62..f189f4430667 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py index 542f923f08c5..1ad75a011889 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index fd6b4f6cb482..d82fb5015cee 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 4bf1188d11b6..e38dcc22c3bf 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 8f71ac0df30c..626c1ea04f4a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py index c34a5f8f6e44..315eb22bd6cb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 446fc27632de..9bc218b63bce 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 195d47849773..7dc4a81d9d38 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 1228475d0563..8f2ea898f870 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index d3f32dce5148..064d43528c4d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index 24f59fb0cc06..ccb88282b582 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 00ef45dfcea0..312683d50eeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index 41a578401e64..15cfb5b14265 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 1035f1d3a23a..0872592be30d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py index 4a13b28117ea..8369402d5e70 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py index 47fb7196c09f..90a5878ac05a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py index 5f29caec5251..1f246a28937e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py index c2aa70280658..73dd37f5cab0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py index 377f2e12b2a8..ab5d6f2a7560 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py index 5ec453c16dc9..e886b920c49a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py index 625f3dec46e1..552d1ecc6eb2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py index 892f5a15b3a2..b56d918846fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py index 81b67efb9d02..adb76c21c6a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py index 86aca87da3e7..63efeb54057e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py index 3a24500e39ca..66dc1dab9063 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py index 4aa243c92f3d..edd19ed69fbb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py index 6dd1e1dc4e5f..9655b814eb65 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py index 87e233fb64d7..954d23bf7dce 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py index 30ef0c4e5388..7bf645687401 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py index f64e953a894c..8e173d62bda9 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py index f5aaf9a07c86..6cc4b2f9521a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py index 4ef06854bf6e..f5237ca5e998 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py index b495af664e8c..7c9bf95a48d1 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py index bdebace2df7c..746fcbda3656 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py index cd3fab7c0047..67a09ebd009a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py index b6edad9d3957..f86f14507e30 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py index 40cdf7f6ffa9..daf5c52ac19c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py index 29d2ed16f12f..f556d9389662 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py index bc5930773f55..d6ce3ef29c0a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py index 0e402bbff7ba..3d1227c3717a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py index 75bdd2fdd431..8f28ebec4b74 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py index 01cd552cf69f..64a8156dd3e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py index 7f2185105f81..bd5835b75eba 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py index 947c761d88b6..68c585047a7f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py index 91bc6e83d934..36af8779f4ea 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py index cbd0599118be..19e076231458 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py index 52059178faaf..b0c43ba61881 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py index 2c516167b319..cb5da42cf621 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py index c4be56a4f015..98079450807b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py index 9825502a6044..09b57384c4da 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py index bd68125cafa6..7616115c304b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py index e64414fdbcc0..177f021b5382 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py index f99c45036215..4417191b56ad 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py index 2095ee02945d..23e8b1f9fecc 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py index 6e22841996ac..6ff7c119df12 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py index 4a82b7eb50de..c6ef7ed258ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py index 80d4dca4dea6..2c15208140d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py index da3483f24a55..f867fd26d768 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py index 56aaab9fdc4a..b55c69cba951 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py index ef6a45d6374a..f187066a055a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py index 979373d9256e..113392c7d9b5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 7433d7afe293..3399343f4786 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 2590301a34e6..69d0c6c12f5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index 760611c76f53..ddea8370c741 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py index 6344d9b69f5e..2041446f63fc 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 10dccc1f3672..dc7f393a7568 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py index 308f0419c2e6..1e7e3fa347f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 977ea7533428..560b4e92524a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 08f630f9d2ac..4ea9050cc4a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py index fcc2e07495e7..cc75211251f1 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 451a5fed6a1e..33e9637cb0d0 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index e9864f159a13..7d873e3be7d1 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 2ea028c0b641..7477486eb132 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index ca0b0c649e4b..f205888bb85b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py index 5f6aa27a12bd..c93152d4ac27 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index fb3a30054371..a275bf02a07e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py index 3ee0123b6eec..f17a81e5542c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index cad74f01894d..93241d052f48 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py index 4fb685e783f9..21e2f06dad40 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py index f5cde6216918..b29ff5b52e70 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py index 4de35e89a022..3bbb2348993d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py index 761870af59c8..4d7e1050a689 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py index 42f0d1197050..7003b8f58fd3 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py index 16cb3f41c415..b35aaaa4d908 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py index 3d5c88eb756f..2e6958a96efe 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py index 0ff30f162316..86ecac2a308e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py index 90c337555a5b..40cebee0e91b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 6aa26da126e0..86e0eac8568e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index f57329dfaf90..1d705928de01 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py index 3859f04c4dc9..4545505bf3da 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py index 76921a7ee06c..0f2890f68071 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index 5b322f29c5d6..1f990ec5d30b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py index 0f441a7b5f7f..6c6ea026d4bc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index c5446b631c63..c71895f6aa73 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 98a3cebb3cac..a6b43a56fdc8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 773f83d8d86c..8689f7365d5b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py index 2a8db61b7d48..afed78b17c2c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index a225d7dff0a6..6113e4beaa74 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 768321d9bb89..7898699b4805 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index e94825227abd..9ea7acdc34df 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 8a51ab2e8f3e..dbf62e74e652 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py index 66303876a1cc..02e5a64aa84d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py index e864f6554108..13ab06e15858 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py index 301d8832c118..59d79edf4331 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py index 36a152e7df8a..d8424067300d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index 0e737707c81e..ec6bce60dfbf 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py index 291ebd01bd46..c99e4bc09554 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index 86ba6a41cae3..562f0b4d59b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 18c77ff7722b..0e5ae5f08614 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py index 4dbb67d6ad9e..609801b7ede9 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py index d740fe73e578..27908ff206e8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py index 1743fe90e282..a4d3d5321f73 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py index 899225b0e4cb..36a0696246ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py index 601c9defba1a..a962abf7b2ec 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py index a0bd61ac12c6..1255f73e32f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py index 8cffd31dc159..9dad97f48420 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py index 4de62a3619c8..92f3c64550fd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py index b549b4daf80d..c9a0a1e35252 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py index d640d9872629..f2efb782d664 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py index cf8e3997acd0..c028d40e01ce 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py index 5a33153b0b3d..df0bd0efe944 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py index 482e80511e3f..a2bdc034a30d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py index e3300768b66b..d1f83e2be2ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py index 23fe5839bd31..e91dfb82ba94 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py index f56a7c44e50f..24f6e592ab2a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py index fd03d024ccb9..43968fed3093 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py index 273ee21a7aec..87cb63ab4e1f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py index 477e2b47508f..ee87d696f717 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py index 121fe759f9d8..9c19d1923ae4 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py index ea3d734762c8..36655bfbf3b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py index 7bc98b9cdeea..8354eb167ebc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py index a9f93081bc3d..24a4ac3b6b03 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py index 0f6bbfb23c02..a8a5ec2ab3fd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py index 4cfb527880bc..cb70db05a623 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py index 6caa5f6e86f5..405dd6d39da9 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py index af9e9b81bcf6..9b1a282ece1f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py index 6f24cdd55718..bcaaa019d697 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py index 1a2962e813ff..9878997a7732 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py index 3d9f79df1f2f..31558bea7a97 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py index 83c308256a42..909bc26c904e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py index 9f2001c813d2..c478b155f896 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py index 59d955d63169..ef74cb1afd0b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py index d39449bc2556..d20d7c495395 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py index a142df42f5cb..c237b16bf935 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py index c09e035c9f73..bed421304638 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py index bdd67ffcdc7c..afc415fb519a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index fdf2488ed7b2..766fba234f09 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 51fba7e54336..a21c154713ed 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index c78410beee73..e1d58773a736 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py index fb661a83ae24..8a740d4fffe6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index b6bfee061fd4..3ad08618f71f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index 2e513bad23c3..7e81edbc6394 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index f13822bd2e6e..d0b6d13a6f2e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index bbe8fe1cb9df..520303c420c2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 444519d150d8..c87a5cc6397b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 1ba655878dc9..1239a292b25a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index f25c29123807..b353b67be76d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 7251763c30bd..6176b2bc40bc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index b618ba9a8e99..fab08309e0c1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index fde7d503251d..3533e0cda4ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 113699c5055f..eb40f1771d55 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 46f79d64cbbb..a64066a8039f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 18997810173d..da84fac6f90f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index e1fb42a46005..4e814dcca94b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 2b871e92600d..90f4018d18e0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 4010a645d9f7..679c1a859808 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 6d00bd7e344c..8bf5fb4587ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 03275e543107..4068a59662bf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 7eaed7beeffa..5e3b8965423d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index a538953a4acf..92c4511844ba 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 6f8bc8e01f74..5be809849b43 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 07d010436ac9..994b47887efd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 9fe00adae387..864a110564d0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index fb39e573bba8..b27a13b8a07f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index e59f926ca139..d57e42b08afd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py index 64298ee56e04..ce693111d295 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index f6063ad00adb..e9dac2e5ebcb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index 715909e87a4e..b294739dddea 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 0df028c0b93e..3115109aa03d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index 9a485ee8f922..d31f7e39732a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index c8c26c4f29f6..0292d4feb399 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index 806e937ddae0..d7a99dde2eec 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index ca0209f00fcb..dfb1d6649a9b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index c1f028fb0464..33fbfdc82062 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index 8fe42df3c81a..63cfbe6809a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index 1ce698784552..7aa41cc72357 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index dc73253f4897..45eb3008c868 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 17490c61ef37..4e6a7a4b65c8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index 75ec32f48eeb..aaf80170866b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py index 8ceb5298553a..ad37d01172a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py index 604ff66269c8..c76b96582041 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 277e83055225..3a1905617454 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index a4df0299426a..bae648ae4a9c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 5cd201276977..316cffd4e0a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index cd3ca94e19c6..8f2ce706d64a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index fcffb6db861d..22c65b7c831d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index a8f902116832..df954b6733ed 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index b426d4703e1c..cf42f9ea10dc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index 5d98f782bde2..8ffd1c8e0837 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py index 8c7a934a735d..8e9546500ea7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py index dfa59b30742b..542d327dad1e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index 11d91947e3bc..2ccbdea9bb04 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index bf9875b0aa94..5bc0e2a9cdb5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index fe9c7031ef46..2ed78b67996b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index b5539c04f291..1683711af60a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index c29c35a4e213..65bac322829d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 818ab646fdf3..e540e3c2bde7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 3ae13401d749..6a725158774a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index 499d68bdbf44..5c259791926a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index 03b7dc7d8f71..d9561edfed3b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index a445ed396c8f..a83707e78240 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py index ddc3d131f4c7..6ebb21533096 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py index 3a7643b3a273..9ea61a18c813 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index 4ee968e8155d..e3f3eb215a5c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index a3e018440c2a..c5c2dbdbdb22 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index b6fe5b11e08f..6117f303c91e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index ecebaf1194ff..5e368652d475 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 5992e53ee4aa..3c73ceb7b686 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index 14d8679bce7f..4c85e51bb0dd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index dfbc3b411480..fd1c75ee2644 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 8626f7cae9fd..1100c49463e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index ad42edff534e..8a84e02dc9fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 727723a7a559..36c2ef9b238b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py index 7eccffaa6bae..7357700c8b10 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py index a2f98d69d320..35758efbfee1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index b642d38eec23..08e56ea67e0b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index b4fc92452254..3c343f13400c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 1542a5a387f2..08c9f5ec4bc0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index b273c465d3ec..f04009b8b7d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index d2695708ddd3..00ba706111f3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index 8d25c7d33f73..c85952a369a5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index e1c741b67075..88042ee699a4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index 7dde59dcdd4f..8e615977c75a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index 2ecaf8df26dd..847e83ef1295 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index 7b4a3c597f2b..fdf74e7d8679 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index 96fc8ff9788c..f8dd087b9b31 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 9bbc7dcb1c5d..42a511d0d5e5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index d8b8d7f7bca5..20301b70581a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index 36d5776e36dc..a3ac5db82a51 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index b51dd81cc946..486ff4f0905e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 1e7aefce8f6a..57b588e71725 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index aef8473798e3..f820c78a88ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index e84230202450..d10840436a40 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index f143a772c3d3..cc2d8a998cde 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 3867d3702391..5f5c2b231d27 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index c6469053baa4..4f6f42664dd5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 1e4e28abc08c..056be6957768 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index d5cfe190c8dd..2d3e52ebe184 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index d24923cb1e75..d1a86427f185 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 71859024dbb7..278a2c47012e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index 5a5ff140c42a..bc2071cf4784 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index 519a2498ac3a..b9224990329b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index ca97be4b3d86..78f5bc605553 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 24e9e200951f..3047a4f49db7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index dc9a545e7c4b..2080458cfaa0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index 1a0d48664303..bdc98630dac8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index de4bfe6c822b..884ef7a296cd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index 557d3229302a..23c4ebf2461f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index b9e9cade9e94..a7954dddedae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index fea40e7a4957..fda8be673408 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index ccf2983be6d8..d57818dff81a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 4b6984171895..232cd5ee4944 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index abb071c655da..62458a437a6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index f280ec9dea62..3655223bae09 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index bffbe10a8eac..780f276ec59d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 59bfeeaaaa3f..09a259a2705f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index ed4dd0126e75..159576050279 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py index 1654590d1074..8f01527b8d4a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index c7c8d2b65b65..976935414a44 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 7449687209ab..34b89a6a4a29 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 0707912ba636..d4c9a1168651 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index e32ddadc8ae8..70a02b72fdb8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index b95d38e026c2..0764f832c6f8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py index 92a9215362b6..ededc2d374b0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index 0d4ccb8a3510..e89fe0cc8a10 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py index 360a0d13ebdd..558c8aab67c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py index bdef6fef167e..17e93eea6bfe 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 8fdc04b6e624..3d2a03c068ef 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 668dd5b97e62..6bc325838c82 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index 2efaf76cfe5d..1c389c8f1343 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index ec40765ecc00..889648d305d4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index f54cb911e7ff..7c916ad9daa9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 26328c6f6dd7..5512c3073690 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index f9f3df6a6398..43afea8584ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index a02c1b609996..354a41b5da78 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py index a0b7fdcdc4a1..6afcaca198a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 234065b45bae..50ca82ff0299 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 37e6969d24cb..eee3aad5cd31 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index 33f13c2311dc..3385bd5f99e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py index 28b2a638cb6e..4ea53ed516c6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index 361dbe4f4b3d..462ec0e4ee0d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py index 3cc2c500d2d3..d933dd8a4521 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py index b1080a1af5ef..bc2a58ff39b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py index 5c4ffd0cef26..a3e059a90f56 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py index 59321ba49ec2..bc5eb5bf71e1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py index 31564909c30a..221da9018e87 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py index 9ca737687131..e13bffbd78cc 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py index a8e1b9147d26..e79a81d7b704 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py index a872f6a64a5e..440b4db35ba4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py index 96f25ba4b399..4a6053e1d95d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py index 6c092f684b1a..8384b9c1870e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py index 5a88ac2b0744..43e5fb5a1798 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py index 93173fc27de1..19396f14baeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py index e7ee022f94d9..ec07ceff62c3 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py index 564a1c0277e0..9ff86bb0c7a9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py index 825fc37aa04c..be55eed88654 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index b54b18108137..9a6d69923d3f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py index fbcabb81e6ff..2429f106358d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py index cb82a9b6eb1b..d72856938573 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py index d21c2dc08db9..2682878bbb74 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py index 4304e380e491..5ea0edd0793a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 10ab2419a611..55e1fd55736c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py index 1b4db446eb8d..7b3de3117f38 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index a990edad8b6e..10552af1470e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py index 02c96869da19..63c508e4b039 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py index d7434c0776ae..d557c056c521 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py index b9cdaa1f7c7f..619f1e00b638 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py index 229f7cb9d784..903281ab4c46 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py index 6ef4757d3f3d..678e3a60927f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py index fcfb29df413a..c878afbe7cb8 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py index 94f25ad6a056..29a2ee485323 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py index 196bb80d44d7..b840b45536a8 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py index ccf631119e6e..76ad17bb6a83 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py index 236efda92ec7..f148f2c53a9e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py index ce315bc224a0..faedde72a35e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py index 0408577440af..352996bd1c77 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py index a91e3e54e4ed..84b972a47089 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py index e7ac3c078827..5474658577d8 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index 422b1250e00f..c02a3dd584b1 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index f42269db19dc..7fff3fed8772 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index 422b1250e00f..c02a3dd584b1 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py index ea15a2fa37c9..7f679080dc20 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 6a9ea41967293ed76973c4e218819f31a3df4156 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:11:31 +0000 Subject: [PATCH 1118/1339] chore(main): release 1.14.5 (#1968) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 4816ae39bacf..7ae2e6247c96 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.14.5](https://github.com/googleapis/gapic-generator-python/compare/v1.14.4...v1.14.5) (2024-03-04) + + +### Bug Fixes + +* Update copyright year ([dde240b](https://github.com/googleapis/gapic-generator-python/commit/dde240bc7e63396416709b7f3dd7b50b355bc829)) + ## [1.14.4](https://github.com/googleapis/gapic-generator-python/compare/v1.14.3...v1.14.4) (2024-02-29) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 001aa1f122e9..a1fcd15c5a8d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.14.4" +version = "1.14.5" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 58185c15b49e6b99bf55e3bff05bd2df1b1ca43b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 12 Mar 2024 12:31:11 -0400 Subject: [PATCH 1119/1339] feat: add support for checking uuid4 fields (#1972) --- packages/gapic-generator/gapic/schema/wrappers.py | 12 ++++++++++++ .../tests/unit/schema/wrappers/test_field.py | 14 ++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 7b967ac6aca3..9d2443e89409 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -39,6 +39,7 @@ from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 +from google.api import field_info_pb2 from google.api import http_pb2 from google.api import resource_pb2 from google.api import routing_pb2 @@ -307,6 +308,17 @@ def required(self) -> bool: return (field_behavior_pb2.FieldBehavior.Value('REQUIRED') in self.options.Extensions[field_behavior_pb2.field_behavior]) + @property + def uuid4(self) -> bool: + """ + Return True if the format of this field is a Universally + Unique Identifier, version 4 field, False otherwise. + + Returns: + bool: Whether this field is UUID4. + """ + return self.options.Extensions[field_info_pb2.field_info].format == field_info_pb2.FieldInfo.Format.Value("UUID4") + @property def resource_reference(self) -> Optional[str]: """Return a resource reference type if it exists. diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index 7bded119c5a0..efaf595d946b 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -17,6 +17,7 @@ import pytest from google.api import field_behavior_pb2 +from google.api import field_info_pb2 from google.api import resource_pb2 from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 @@ -140,6 +141,19 @@ def test_not_required(): assert not field.required +def test_uuid4(): + field = make_field() + field.options.Extensions[field_info_pb2.field_info].format = field_info_pb2.FieldInfo.Format.Value( + "UUID4" + ) + assert field.uuid4 + + +def test_not_uuid4(): + field = make_field() + assert not field.uuid4 + + def test_ident_sphinx(): field = make_field(type='TYPE_BOOL') assert field.ident.sphinx == 'bool' From b3baf14fb2caf8e2d3525342c524d9e4cb9a65b6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 15 Mar 2024 07:24:49 -0400 Subject: [PATCH 1120/1339] fix: fix resource path helpers for paths with =** (#1976) --- .../%name/%version/%sub/services/%service/client.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- packages/gapic-generator/gapic/schema/wrappers.py | 10 ++++++++++ .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- .../unit/gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- .../tests/unit/schema/wrappers/test_message.py | 1 + 6 files changed, 17 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 0dd30f65ce13..02e97a85fef9 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -194,7 +194,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @staticmethod def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: """Returns a fully-qualified {{ message.resource_type|snake_case }} string.""" - return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + return "{{ message.resource_path_formatted }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @staticmethod diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index f49cac457926..6e5469c08713 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -2259,7 +2259,7 @@ def test_{{ message.resource_type|snake_case }}_path(): {% for arg in message.resource_path_args %} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} - expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + expected = "{{ message.resource_path_formatted }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) assert expected == actual @@ -2282,7 +2282,7 @@ def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): {% for arg in resource_msg.message_type.resource_path_args %} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} - expected = "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + expected = "{{ resource_msg.message_type.resource_path_formatted }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) actual = {{ service.client_name }}.common_{{ resource_msg.message_type.resource_type|snake_case }}_path({{ resource_msg.message_type.resource_path_args|join(", ") }}) assert expected == actual diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 9d2443e89409..c7396ca5e910 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -613,6 +613,16 @@ def resource_type_full_path(self) -> Optional[str]: def resource_path_args(self) -> Sequence[str]: return self.PATH_ARG_RE.findall(self.resource_path or '') + @property + def resource_path_formatted(self) -> str: + """ + Returns a formatted version of `resource_path`. This re-writes + patterns like: 'projects/{project}/metricDescriptors/{metric_descriptor=**}' + to 'projects/{project}/metricDescriptors/{metric_descriptor} + so it can be used in an f-string. + """ + return self.PATH_ARG_RE.sub(r"{\g<1>}", self.resource_path or '') + @utils.cached_property def path_regex_str(self) -> str: # The indirection here is a little confusing: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 741c03afe0f1..80bd73bdaccb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -195,7 +195,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): @staticmethod def {{ message.resource_type|snake_case }}_path({% for arg in message.resource_path_args %}{{ arg }}: str,{% endfor %}) -> str: """Returns a fully-qualified {{ message.resource_type|snake_case }} string.""" - return "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + return "{{ message.resource_path_formatted }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) @staticmethod diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 66605a9c3e8a..3830c93d7bb2 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1543,7 +1543,7 @@ def test_{{ message.resource_type|snake_case }}_path(): {% for arg in message.resource_path_args %} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} - expected = "{{ message.resource_path }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + expected = "{{ message.resource_path_formatted }}".format({% for arg in message.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) actual = {{ service.client_name }}.{{ message.resource_type|snake_case }}_path({{message.resource_path_args|join(", ") }}) assert expected == actual @@ -1566,7 +1566,7 @@ def test_common_{{ resource_msg.message_type.resource_type|snake_case }}_path(): {% for arg in resource_msg.message_type.resource_path_args %} {{ arg }} = "{{ molluscs.next() }}" {% endfor %} - expected = "{{ resource_msg.message_type.resource_path }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) + expected = "{{ resource_msg.message_type.resource_path_formatted }}".format({% for arg in resource_msg.message_type.resource_path_args %}{{ arg }}={{ arg }}, {% endfor %}) actual = {{ service.client_name }}.common_{{ resource_msg.message_type.resource_type|snake_case }}_path({{ resource_msg.message_type.resource_path_args|join(", ") }}) assert expected == actual diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 8eea36e5d031..00f91b65f7a8 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -221,6 +221,7 @@ def test_resource_path_with_wildcard(): "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass/additional-segment") assert re.match(message.path_regex_str, "kingdoms/my-kingdom/phyla/my-phylum/classes/") is None + assert message.resource_path_formatted == "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}" def test_resource_path_pure_wildcard(): From b6172e951108c969b1e871b4d5c0d05eaf05fb47 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 14:48:46 +0000 Subject: [PATCH 1121/1339] chore: remove nox uninstall/reinstall from python build.sh template (#1977) Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/build.sh | 7 ------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index e4e943e0259a..af879fdecde7 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad -# created: 2024-02-27T15:56:18.442440378Z + digest: sha256:007e7e46ef05e5a32e652bd0062be02f6ff050347d91e0f357b28caab0a042c4 +# created: 2024-03-15T14:27:15.879623611Z diff --git a/packages/gapic-generator/.kokoro/build.sh b/packages/gapic-generator/.kokoro/build.sh index ecf29aa67f54..a2d75e376ba5 100755 --- a/packages/gapic-generator/.kokoro/build.sh +++ b/packages/gapic-generator/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then From fc11c292aff8e853acf62d3a671e6f9beff7db0e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 13:33:54 -0400 Subject: [PATCH 1122/1339] chore(main): release 1.15.0 (#1973) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 12 ++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 7ae2e6247c96..f80bad40a72c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,18 @@ # Changelog +## [1.15.0](https://github.com/googleapis/gapic-generator-python/compare/v1.14.5...v1.15.0) (2024-03-15) + + +### Features + +* Add support for checking uuid4 fields ([#1972](https://github.com/googleapis/gapic-generator-python/issues/1972)) ([d5f90a2](https://github.com/googleapis/gapic-generator-python/commit/d5f90a2455cb671e67e2cd30f3eb470092e4f889)) + + +### Bug Fixes + +* Fix resource path helpers for paths with =** ([#1976](https://github.com/googleapis/gapic-generator-python/issues/1976)) ([08c01e9](https://github.com/googleapis/gapic-generator-python/commit/08c01e9d92adeb492a7b526b1b5267931750ee61)) + ## [1.14.5](https://github.com/googleapis/gapic-generator-python/compare/v1.14.4...v1.14.5) (2024-03-04) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a1fcd15c5a8d..2b243db84219 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.14.5" +version = "1.15.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 27975a0788b9de91cfd4dd4271a1b960a8d76181 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 13:34:49 -0400 Subject: [PATCH 1123/1339] chore(python): add requirements for docs build (#1979) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/Dockerfile | 4 ++ .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 38 +++++++++++++++++++ 4 files changed, 45 insertions(+), 2 deletions(-) create mode 100644 packages/gapic-generator/.kokoro/docker/docs/requirements.in create mode 100644 packages/gapic-generator/.kokoro/docker/docs/requirements.txt diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index af879fdecde7..5d9542b1cb21 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:007e7e46ef05e5a32e652bd0062be02f6ff050347d91e0f357b28caab0a042c4 -# created: 2024-03-15T14:27:15.879623611Z + digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f +# created: 2024-03-15T16:26:15.743347415Z diff --git a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile index 8e39a2cc438d..bdaf39fe22d0 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile +++ b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.in b/packages/gapic-generator/.kokoro/docker/docs/requirements.in new file mode 100644 index 000000000000..816817c672a1 --- /dev/null +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt new file mode 100644 index 000000000000..0e5d70f20f83 --- /dev/null +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox From f1aef7fdeb3d14aff5e117908380ea6c8ea87c5c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 15 Mar 2024 14:06:30 -0400 Subject: [PATCH 1124/1339] fix: fix dependency `google-apps-card` (#1971) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index b5e420632a5b..be4cff6aee8f 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -1,12 +1,13 @@ {% set pypi_packages = { - ("google", "apps", "card"): {"package_name": "google-apps-card", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, + ("google", "apps", "card", "v1"): {"package_name": "google-apps-card", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0dev"}, ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, From 98776b1f775c090c0fdbc8765550002101fef1de Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 18 Mar 2024 16:37:27 -0400 Subject: [PATCH 1125/1339] ci: Update showcase version to 0.31.0 (#1983) --- packages/gapic-generator/.github/workflows/tests.yaml | 2 +- packages/gapic-generator/noxfile.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index da3eaf29c53a..354aff4a577f 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -14,7 +14,7 @@ concurrency: cancel-in-progress: true env: - SHOWCASE_VERSION: 0.30.0 + SHOWCASE_VERSION: 0.31.0 PROTOC_VERSION: 3.20.2 jobs: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 4c283e47f0c7..8aa70a80215e 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -29,7 +29,7 @@ nox.options.error_on_missing_interpreters = True -showcase_version = os.environ.get("SHOWCASE_VERSION", "0.30.0") +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.31.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") From 6c16db27cc8388f3c82fe2eb6c753ce0192570c8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 18 Mar 2024 22:10:22 -0400 Subject: [PATCH 1126/1339] feat: Add support for reading MethodSettings from service configuration YAML (#1975) Co-authored-by: Victor Chudnovsky --- packages/gapic-generator/gapic/schema/api.py | 139 +++++- .../tests/unit/schema/test_api.py | 441 +++++++++++++++++- 2 files changed, 577 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 084349d4885f..d3aa9f07ca21 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -23,10 +23,12 @@ import keyword import os import sys -from typing import Callable, Container, Dict, FrozenSet, Mapping, Optional, Sequence, Set, Tuple from types import MappingProxyType +from typing import Callable, Container, Dict, FrozenSet, Mapping, Optional, Sequence, Set, Tuple +import yaml from google.api_core import exceptions +from google.api import client_pb2 # type: ignore from google.api import http_pb2 # type: ignore from google.api import resource_pb2 # type: ignore from google.api import service_pb2 # type: ignore @@ -58,6 +60,14 @@ TRANSPORT_REST = "rest" +class MethodSettingsError(ValueError): + """ + Raised when `google.api.client_pb2.MethodSettings` contains + an invalid value. + """ + pass + + @dataclasses.dataclass(frozen=True) class Proto: """A representation of a particular proto file within an API.""" @@ -560,6 +570,133 @@ def mixin_http_options(self): res[s] = [rule for rule in opt_gen if rule] return res + @cached_property + def all_methods(self) -> Mapping[str, MethodDescriptorProto]: + """Return a map of all methods for the API. + + Return: + Mapping[str, MethodDescriptorProto]: A mapping of MethodDescriptorProto + values for the API. + """ + return { + f"{service_key}.{method_key}": method_value + for service_key, service_value in self.services.items() + for method_key, method_value in service_value.methods.items() + } + + def enforce_valid_method_settings( + self, service_method_settings: Sequence[client_pb2.MethodSettings] + ) -> None: + """ + Checks each `google.api.client.MethodSettings` provided for validity and + raises an exception if invalid values are found. If + `google.api.client.MethodSettings.auto_populated_fields` + is set, verify each field against the criteria of AIP-4235 + (https://google.aip.dev/client-libraries/4235). All of the conditions + below must be true: + + - The field must be of type string + - The field must be at the top-level of the request message + - The RPC must be a unary RPC (i.e. streaming RPCs are not supported) + - The field must not be annotated with google.api.field_behavior = REQUIRED. + - The field must be annotated with google.api.field_info.format = UUID4. + + Note that the field presence requirements in AIP-4235 should be checked at run + time. + + Args: + service_method_settings (Sequence[client_pb2.MethodSettings]): Method + settings to be used when generating API methods. + Return: + None + Raises: + MethodSettingsError: if fields in `method_settings.auto_populated_fields` + cannot be automatically populated. + """ + + all_errors: dict = {} + selectors_seen = [] + for method_settings in service_method_settings: + # Check if this selector is defind more than once + if method_settings.selector in selectors_seen: + all_errors[method_settings.selector] = ["Duplicate selector"] + continue + selectors_seen.append(method_settings.selector) + + method_descriptor = self.all_methods.get(method_settings.selector) + # Check if this selector can be mapped to a method in the API. + if not method_descriptor: + all_errors[method_settings.selector] = [ + "Method was not found." + ] + continue + + if method_settings.auto_populated_fields: + # Check if the selector maps to a streaming method + if ( + method_descriptor.client_streaming + or method_descriptor.server_streaming + ): + all_errors[method_settings.selector] = [ + "Method is not a unary method." + ] + continue + top_level_request_message = self.messages[ + method_descriptor.input_type.lstrip(".") + ] + selector_errors = [] + for field_str in method_settings.auto_populated_fields: + if field_str not in top_level_request_message.fields: + selector_errors.append( + f"Field `{field_str}` was not found" + ) + else: + field = top_level_request_message.fields[field_str] + if field.type != wrappers.PrimitiveType.build(str): + selector_errors.append( + f"Field `{field_str}` is not of type string." + ) + if field.required: + selector_errors.append( + f"Field `{field_str}` is a required field." + ) + if not field.uuid4: + selector_errors.append( + f"Field `{field_str}` is not annotated with " + "`google.api.field_info.format = \"UUID4\"." + ) + if selector_errors: + all_errors[method_settings.selector] = selector_errors + if all_errors: + raise MethodSettingsError(yaml.dump(all_errors)) + + @cached_property + def all_method_settings(self) -> Mapping[str, Sequence[client_pb2.MethodSettings]]: + """Return a map of all `google.api.client.MethodSettings` to be used + when generating methods. + https://github.com/googleapis/googleapis/blob/7dab3de7ec79098bb367b6b2ac3815512a49dd56/google/api/client.proto#L325 + + Return: + Mapping[str, Sequence[client_pb2.MethodSettings]]: A mapping of all method + settings read from the service YAML. + + Raises: + gapic.schema.api.MethodSettingsError: if the method settings do not + meet the requirements of https://google.aip.dev/client-libraries/4235. + """ + self.enforce_valid_method_settings( + self.service_yaml_config.publishing.method_settings + ) + + return { + method_setting.selector: client_pb2.MethodSettings( + selector=method_setting.selector, + long_running=method_setting.long_running, + auto_populated_fields=method_setting.auto_populated_fields, + ) + for method_setting in self.service_yaml_config.publishing.method_settings + } + @cached_property def has_location_mixin(self) -> bool: return len(list(filter(lambda api: api.name == "google.cloud.location.Locations", self.service_yaml_config.apis))) > 0 diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index e493e68d0c11..b242fd9e811c 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -13,13 +13,17 @@ # limitations under the License. import collections - +import re +from typing import Sequence from unittest import mock +import yaml import pytest from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 +from google.api import field_behavior_pb2 +from google.api import field_info_pb2 from google.api import resource_pb2 from google.api_core import exceptions from google.cloud import extended_operations_pb2 as ex_ops_pb2 @@ -39,10 +43,10 @@ from test_utils.test_utils import ( make_enum_pb2, + make_field, make_field_pb2, make_file_pb2, make_message_pb2, - make_method, make_naming, make_oneof_pb2, ) @@ -2602,3 +2606,436 @@ def test_has_iam_mixin(): }) api_schema = api.API.build(fd, 'google.example.v1', opts=opts) assert api_schema.has_iam_mixin + + +def get_file_descriptor_proto_for_method_settings_tests( + fields: Sequence[descriptor_pb2.FieldDescriptorProto] = None, + client_streaming: bool = False, + server_streaming: bool = False, +) -> descriptor_pb2.FileDescriptorProto: + """ + Args: + fields (Sequence[descriptor_pb2.FieldDescriptorProto]): Fields to include + in messages in the return object `descriptor_pb2.FileDescriptorProto`. + client_streaming (bool): Whether the methods in the return object + `descriptor_pb2.FileDescriptorProto` should use client streaming. + server_streaming (bool): Whether the methods in the return object + `descriptor_pb2.FileDescriptorProto` should use server streaming. + Return: + descriptor_pb2.FileDescriptorProto: Returns an object describing the API. + """ + + field_options = descriptor_pb2.FieldOptions() + field_options.Extensions[ + field_info_pb2.field_info + ].format = field_info_pb2.FieldInfo.Format.Value("UUID4") + + fd = ( + make_file_pb2( + name="someexample.proto", + package="google.example.v1beta1", + messages=( + make_message_pb2(name="ExampleRequest", fields=fields), + make_message_pb2(name="ExampleResponse", fields=()), + make_message_pb2( + name='NestedMessage', + fields=( + make_field_pb2( + name="squid", + options=field_options, + type="TYPE_STRING", + number=1 + ), + ), + options=descriptor_pb2.MessageOptions(map_entry=True), + ) + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="ServiceOne", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Example1", + input_type="google.example.v1beta1.ExampleRequest", + output_type="google.example.v1beta1.ExampleResponse", + client_streaming=client_streaming, + server_streaming=server_streaming, + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name="ServiceTwo", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Example1", + input_type="google.example.v1beta1.ExampleRequest", + output_type="google.example.v1beta1.ExampleResponse", + client_streaming=client_streaming, + server_streaming=server_streaming, + ), + ), + ), + ), + ), + ) + return fd + + +def test_api_all_methods(): + """ + Tests the `all_methods` method of `gapic.schema.api` method which returns a map of + all methods for the API. + """ + fd = get_file_descriptor_proto_for_method_settings_tests() + api_schema = api.API.build(fd, "google.example.v1beta1") + assert len(api_schema.all_methods) == 2 + assert list(api_schema.all_methods.keys()) == [ + "google.example.v1beta1.ServiceOne.Example1", + "google.example.v1beta1.ServiceTwo.Example1", + ] + + +def test_read_method_settings_from_service_yaml(): + """ + Tests the `gapic.schema.api.all_method_settings` method which reads + `MethodSettings` from the service config YAML. + https://github.com/googleapis/googleapis/blob/7dab3de7ec79098bb367b6b2ac3815512a49dd56/google/api/client.proto#L325 + """ + service_yaml_config = { + "apis": [ + {"name": "google.example.v1beta1.ServiceOne.Example1"}, + ], + "publishing": { + "method_settings": [ + { + "selector": "google.example.v1beta1.ServiceOne.Example1", + "auto_populated_fields": [ + "squid", + "mollusc", + ], + }, + ] + }, + } + cli_options = Options(service_yaml_config=service_yaml_config) + field_options = descriptor_pb2.FieldOptions() + field_options.Extensions[ + field_info_pb2.field_info + ].format = field_info_pb2.FieldInfo.Format.Value("UUID4") + + squid = make_field_pb2( + name="squid", type="TYPE_STRING", options=field_options, number=1 + ) + mollusc = make_field_pb2( + name="mollusc", type="TYPE_STRING", options=field_options, number=2 + ) + fields = [squid, mollusc] + fd = get_file_descriptor_proto_for_method_settings_tests(fields=fields) + api_schema = api.API.build(fd, "google.example.v1beta1", opts=cli_options) + assert api_schema.all_method_settings == { + "google.example.v1beta1.ServiceOne.Example1": client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + auto_populated_fields=["squid", "mollusc"], + long_running=client_pb2.MethodSettings.LongRunning(), + ) + } + + +def test_method_settings_duplicate_selector_raises_error(): + """ + Test that `MethodSettingsError` is raised when there are duplicate selectors in + `client_pb2.MethodSettings`. + """ + fd = get_file_descriptor_proto_for_method_settings_tests() + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + ), + client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + ), + ] + with pytest.raises( + api.MethodSettingsError, match="(?i)duplicate selector" + ): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_invalid_selector_raises_error(): + """ + Test that `MethodSettingsError` when `client_pb2.MethodSettings.selector` + cannot be mapped to a method in the API. + """ + method_example1 = "google.example.v1beta1.DoesNotExist.Example1" + method_example2 = "google.example.v1beta1.ServiceOne.DoesNotExist" + + fd = get_file_descriptor_proto_for_method_settings_tests() + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector=method_example1, + ), + client_pb2.MethodSettings( + selector=method_example2, + ), + ] + + with pytest.raises(api.MethodSettingsError) as ex: + api_schema.enforce_valid_method_settings(methodsettings) + + error_yaml = yaml.safe_load(ex.value.args[0]) + + assert re.match( + ".*not found.*", + error_yaml[method_example1][0].lower() + ) + assert re.match( + ".*not found.*", + error_yaml[method_example2][0].lower() + ) + + +def test_method_settings_unsupported_auto_populated_field_type_raises_error(): + """ + Test that `MethodSettingsError` is raised when a field in + `client_pb2.MethodSettings.auto_populated_fields` is not of type string. + """ + squid = make_field_pb2(name="squid", type="TYPE_INT32", number=1) + fd = get_file_descriptor_proto_for_method_settings_tests(fields=[squid]) + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + auto_populated_fields=["squid"], + ), + ] + with pytest.raises(api.MethodSettingsError, match="(?i)type string"): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_auto_populated_field_not_found_raises_error(): + """ + Test that `MethodSettingsError` is raised when a field in + `client_pb2.MethodSettings.auto_populated_fields` is not found in the top-level + request message of the selector. + """ + fd = get_file_descriptor_proto_for_method_settings_tests() + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + auto_populated_fields=["whelk"], + ), + ] + with pytest.raises(api.MethodSettingsError, match="(?i)not found"): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_auto_populated_nested_field_raises_error(): + """ + Test that `MethodSettingsError` is raised when a field in + `client_pb2.MethodSettings.auto_populated_fields` is not found in the top-level + request message of the selector. Instead, the field exists in a nested message. + """ + + octopus = make_field( + name='octopus', + type_name='google.example.v1beta1.NestedMessage', + label=3, + type='TYPE_MESSAGE', + ) + + fd = get_file_descriptor_proto_for_method_settings_tests( + fields=[octopus.field_pb] + ) + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + auto_populated_fields=["squid"], + ), + ] + with pytest.raises(api.MethodSettingsError, match="(?i)not found"): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_auto_populated_field_client_streaming_rpc_raises_error(): + """ + Test that `MethodSettingsError` is raised when the selector in + `client_pb2.MethodSettings.selector` maps to a method which uses client streaming. + """ + fd = get_file_descriptor_proto_for_method_settings_tests( + client_streaming=True + ) + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + auto_populated_fields=["squid"], + ), + ] + with pytest.raises( + api.MethodSettingsError, match="(?i)not a unary method" + ): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_auto_populated_field_server_streaming_rpc_raises_error(): + """ + Test that `MethodSettingsError` is raised when the selector in + `client_pb2.MethodSettings.selector` maps to a method which uses server streaming. + """ + fd = get_file_descriptor_proto_for_method_settings_tests( + server_streaming=True + ) + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + auto_populated_fields=["squid"], + ), + ] + with pytest.raises( + api.MethodSettingsError, match="(?i)not a unary method" + ): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_unsupported_auto_populated_field_behavior_raises_error(): + """ + Test that `MethodSettingsError` is raised when a field in + `client_pb2.MethodSettings.auto_populated_fields` is a required field. + """ + field_options = descriptor_pb2.FieldOptions() + field_options.Extensions[field_behavior_pb2.field_behavior].append( + field_behavior_pb2.FieldBehavior.Value("REQUIRED") + ) + squid = make_field_pb2( + name="squid", type="TYPE_STRING", options=field_options, number=1 + ) + fd = get_file_descriptor_proto_for_method_settings_tests(fields=[squid]) + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.ServiceOne.Example1", + auto_populated_fields=["squid"], + ), + ] + with pytest.raises( + api.MethodSettingsError, match="(?i)required field" + ): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_auto_populated_field_field_info_format_not_specified_raises_error(): + """ + Test that `MethodSettingsError` is raised when a field in + `client_pb2.MethodSettings.auto_populated_fields` is not annotated with + `google.api.field_info.format = UUID4`. For this test case, + the format of the field is not specified. + """ + squid = make_field_pb2(name="squid", type="TYPE_STRING", number=1) + fd = get_file_descriptor_proto_for_method_settings_tests(fields=[squid]) + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.SomeExample.Example1", + auto_populated_fields=["squid"], + ), + ] + with pytest.raises(api.MethodSettingsError): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_unsupported_auto_populated_field_field_info_format_raises_error(): + """ + Test that `MethodSettingsError` is raised when a field in + `client_pb2.MethodSettings.auto_populated_fields` is not annotated with + `google.api.field_info.format = UUID4`.For this test case, + the format of the field is `IPV4`. + """ + field_options = descriptor_pb2.FieldOptions() + field_options.Extensions[ + field_info_pb2.field_info + ].format = field_info_pb2.FieldInfo.Format.Value("IPV4") + squid = make_field_pb2( + name="squid", type="TYPE_STRING", options=field_options, number=1 + ) + fd = get_file_descriptor_proto_for_method_settings_tests(fields=[squid]) + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector="google.example.v1beta1.SomeExample.Example1", + auto_populated_fields=["squid"], + ), + ] + with pytest.raises(api.MethodSettingsError): + api_schema.enforce_valid_method_settings(methodsettings) + + +def test_method_settings_invalid_multiple_issues(): + """ + A kitchen sink type of test to ensure `MethodSettingsError` is raised and the contents + of the exception includes sufficient detail describing each issue. + """ + method_example1 = "google.example.v1beta1.ServiceTwo.Example1" + method_example2 = "google.example.v1beta1.ServiceThree.Example2" + field_options = descriptor_pb2.FieldOptions() + + # Field Squid Errors + # - Not annotated with google.api.field_info.format = UUID4 + # - Not of type string + # - Required field + field_options.Extensions[ + field_info_pb2.field_info + ].format = field_info_pb2.FieldInfo.Format.Value("IPV4") + squid = make_field_pb2( + name="squid", type="TYPE_INT32", options=field_options, number=1 + ) + field_options = descriptor_pb2.FieldOptions() + field_options.Extensions[field_behavior_pb2.field_behavior].append( + field_behavior_pb2.FieldBehavior.Value("REQUIRED") + ) + + # Field Octopus Errors + # - Not annotated with google.api.field_info.format = UUID4 + octopus = make_field_pb2(name="octopus", type="TYPE_STRING", number=1) + fd = get_file_descriptor_proto_for_method_settings_tests( + fields=[squid, octopus] + ) + api_schema = api.API.build(fd, "google.example.v1beta1") + methodsettings = [ + client_pb2.MethodSettings( + selector=method_example1, + auto_populated_fields=[ + "squid", + "octopus", + ], + ), + client_pb2.MethodSettings( + selector=method_example2, + auto_populated_fields=["squid", "octopus"], + ), + ] + with pytest.raises(api.MethodSettingsError) as ex: + api_schema.enforce_valid_method_settings(methodsettings) + + error_yaml = yaml.safe_load(ex.value.args[0]) + + assert re.match( + ".*squid.*not.*string.*", + error_yaml[method_example1][0].lower() + ) + assert re.match( + ".*squid.*not.*uuid4.*", + error_yaml[method_example1][1].lower() + ) + assert re.match( + ".*octopus.*not.*uuid4.*", + error_yaml[method_example1][2].lower() + ) + assert re.match( + ".*method.*not found.*", + error_yaml[method_example2][0].lower() + ) From 086cda30659b5a12432a2d7703a83272b2ace6a1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 22 Mar 2024 06:43:14 -0400 Subject: [PATCH 1127/1339] feat: Automatically populate uuid4 fields (#1985) Co-authored-by: Victor Chudnovsky --- .../.github/workflows/tests.yaml | 2 +- .../%sub/services/%service/client.py.j2 | 24 + .../%name_%version/%sub/test_%service.py.j2 | 80 +- .../%sub/services/%service/_client_macros.j2 | 26 + .../%sub/services/%service/async_client.py.j2 | 6 + .../%sub/services/%service/client.py.j2 | 3 + .../%name_%version/%sub/test_%service.py.j2 | 7 +- .../gapic/%name_%version/%sub/test_macros.j2 | 157 +- packages/gapic-generator/noxfile.py | 2 +- packages/gapic-generator/setup.py | 5 +- .../unit/gapic/asset_v1/test_asset_service.py | 1359 +++++++++++- .../credentials_v1/test_iam_credentials.py | 226 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 1061 +++++++++- .../logging_v2/test_config_service_v2.py | 1848 ++++++++++++++++- .../logging_v2/test_logging_service_v2.py | 279 ++- .../logging_v2/test_metrics_service_v2.py | 293 ++- .../unit/gapic/redis_v1/test_cloud_redis.py | 636 +++++- .../tests/system/test_unary.py | 40 + 18 files changed, 5812 insertions(+), 242 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 354aff4a577f..e8a914f0e731 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -14,7 +14,7 @@ concurrency: cancel-in-progress: true env: - SHOWCASE_VERSION: 0.31.0 + SHOWCASE_VERSION: 0.32.0 PROTOC_VERSION: 3.20.2 jobs: diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 02e97a85fef9..aff47ad91302 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -6,6 +6,9 @@ from collections import OrderedDict import os import re from typing import Callable, Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast +{% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} +import uuid +{% endif %} {% if service.any_deprecated %} import warnings {% endif %} @@ -473,6 +476,27 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): ) {% endif %} +{# + Automatically populate UUID4 fields according to + https://google.aip.dev/client-libraries/4235 when the + field satisfies either of: + - The field supports explicit presence and has not been set by the user. + - The field doesn't support explicit presence, and its value is the empty + string (i.e. the default value). +#} +{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} +{% if method_settings is not none %} +{% for auto_populated_field in method_settings.auto_populated_fields %} + {% if method.input.fields[auto_populated_field].proto3_optional %} + if '{{ auto_populated_field }}' not in request: + {% else %} + if not request.{{ auto_populated_field }}: + {% endif %} + request.{{ auto_populated_field }} = str(uuid.uuid4()) +{% endfor %} +{% endif %}{# if method_settings is not none #} +{% endwith %}{# method_settings #} + # Send the request. {%+ if not method.void %}response = {% endif %}rpc( {% if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 6e5469c08713..5cc8a9b80f29 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -3,6 +3,9 @@ {% block content %} import os +{% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} +import re +{% endif %} # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -67,6 +70,7 @@ from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} +{% with uuid4_re = "[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}" %} def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -513,6 +517,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl dict, ]) def test_{{ method_name }}(request_type, transport: str = 'grpc'): + {% with auto_populated_field_sample_value = "explicit value for autopopulate-able field" %} client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -521,6 +526,18 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() + + {# Set UUID4 fields so that they are not automatically populated. #} + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + if isinstance(request, dict): + request['{{ auto_populated_field }}'] = "{{ auto_populated_field_sample_value }}" + else: + request.{{ auto_populated_field }} = "{{ auto_populated_field_sample_value }}" + {% endfor %} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} {% if method.client_streaming %} requests = [request] {% endif %} @@ -568,7 +585,15 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% if method.client_streaming %} assert next(args[0]) == request {% else %} - assert args[0] == {{ method.input.ident }}() + request = {{ method.input.ident }}() + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + request.{{ auto_populated_field }} = "{{ auto_populated_field_sample_value }}" + {% endfor %} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} + assert args[0] == request {% endif %} # Establish that the response is the type that we expect. @@ -608,6 +633,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% endif %}{# end oneof/optional #} {% endfor %} {% endif %} + {% endwith %}{# auto_populated_field_sample_value #} {% if not method.client_streaming %} @@ -629,8 +655,59 @@ def test_{{ method_name }}_empty_call(): {% if method.client_streaming %} assert next(args[0]) == request {% else %} +{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} +{% if method_settings is not none %} +{% for auto_populated_field in method_settings.auto_populated_fields %} + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + # clear UUID field so that the check below succeeds + args[0].{{ auto_populated_field }} = None +{% endfor %} +{% endif %}{# if method_settings is not none #} +{% endwith %}{# method_settings #} assert args[0] == {{ method.input.ident }}() {% endif %} + + +def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = {{ method.input.ident }}( + {% for field in method.input.fields.values() if field.ident|string() == "str" and not field.uuid4 %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + client.{{ method_name }}(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] +{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} +{% if method_settings is not none %} +{% for auto_populated_field in method_settings.auto_populated_fields %} + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + # clear UUID field so that the check below succeeds + args[0].{{ auto_populated_field }} = None +{% endfor %} +{% endif %}{# if method_settings is not none #} +{% endwith %}{# method_settings #} + assert args[0] == {{ method.input.ident }}( + {% for field in method.input.fields.values() if field.ident|string() == "str" and not field.uuid4 %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) {% endif %} @@ -2364,4 +2441,5 @@ def test_client_ctx(): pass close.assert_called() +{% endwith %}{# uuid4_re #} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 9b0d07629e4b..8d08f0ed423d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -183,6 +183,8 @@ ) {% endif %} {# method.explicit_routing #} +{{ auto_populate_uuid4_fields(api, method) }} + # Validate the universe domain. self._validate_universe_domain() @@ -265,3 +267,27 @@ {% macro define_extended_operation_subclass(extended_operation) %} {% endmacro %} + +{% macro auto_populate_uuid4_fields(api, method) %} +{# + Automatically populate UUID4 fields according to + https://google.aip.dev/client-libraries/4235 when the + field satisfies either of: + - The field supports explicit presence and has not been set by the user. + - The field doesn't support explicit presence, and its value is the empty + string (i.e. the default value). + When using this macro, ensure the calling template generates a line `import uuid` +#} +{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} +{% if method_settings is not none %} +{% for auto_populated_field in method_settings.auto_populated_fields %} + {% if method.input.fields[auto_populated_field].proto3_optional %} + if '{{ auto_populated_field }}' not in request: + {% else %} + if not request.{{ auto_populated_field }}: + {% endif %} + request.{{ auto_populated_field }} = str(uuid.uuid4()) +{% endfor %} +{% endif %}{# if method_settings is not none #} +{% endwith %}{# method_settings #} +{% endmacro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 79ca0a1b78aa..de18fb5c2a64 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -1,11 +1,15 @@ {% extends "_base.py.j2" %} {% block content %} +{% import "%namespace/%name_%version/%sub/services/%service/_client_macros.j2" as macros %} from collections import OrderedDict import functools import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union +{% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} +import uuid +{% endif %} {% if service.any_deprecated %} import warnings {% endif %} @@ -386,6 +390,8 @@ class {{ service.async_client_name }}: ) {% endif %} +{{ macros.auto_populate_uuid4_fields(api, method) }} + # Validate the universe domain. self._client._validate_universe_domain() diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 80bd73bdaccb..ddeca3862704 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -10,6 +10,9 @@ import functools import os import re from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast +{% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} +import uuid +{% endif %} import warnings {% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3830c93d7bb2..ebc96487a1a3 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -4,6 +4,9 @@ {% import "tests/unit/gapic/%name_%version/%sub/test_macros.j2" as test_macros %} import os +{% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} +import re +{% endif %} # try/except added for compatibility with python < 3.8 try: from unittest import mock @@ -849,10 +852,10 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% for method in service.methods.values() if 'grpc' in opts.transport %}{# method_name #} {% if method.extended_lro %} -{{ test_macros.grpc_required_tests(method, service, full_extended_lro=True) }} +{{ test_macros.grpc_required_tests(method, service, api, full_extended_lro=True) }} {% endif %} -{{ test_macros.grpc_required_tests(method, service) }} +{{ test_macros.grpc_required_tests(method, service, api) }} {% endfor %} {# method in methods for grpc #} {% for method in service.methods.values() if 'rest' in opts.transport %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 40238952b9a3..769f49383d02 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1,10 +1,12 @@ -{% macro grpc_required_tests(method, service, full_extended_lro=False) %} +{% macro grpc_required_tests(method, service, api, full_extended_lro=False) %} {% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.safe_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %} +{% with uuid4_re = "[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}" %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, ]) def test_{{ method_name }}(request_type, transport: str = 'grpc'): + {% with auto_populated_field_sample_value = "explicit value for autopopulate-able field" %} client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13,6 +15,17 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() + {# Set UUID4 fields so that they are not automatically populated. #} + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + if isinstance(request, dict): + request['{{ auto_populated_field }}'] = "{{ auto_populated_field_sample_value }}" + else: + request.{{ auto_populated_field }} = "{{ auto_populated_field_sample_value }}" + {% endfor %} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} {% if method.client_streaming %} requests = [request] {% endif %} @@ -58,7 +71,15 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% if method.client_streaming %} assert next(args[0]) == request {% else %} - assert args[0] == {{ method.input.ident }}() + request = {{ method.input.ident }}() + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + request.{{ auto_populated_field }} = "{{ auto_populated_field_sample_value }}" + {% endfor %} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} + assert args[0] == request {% endif %} # Establish that the response is the type that we expect. @@ -98,6 +119,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% endif %}{# end oneof/optional #} {% endfor %} {% endif %} + {% endwith %}{# auto_populated_field_sample_value #} {% if not method.client_streaming %} @@ -119,13 +141,121 @@ def test_{{ method_name }}_empty_call(): {% if method.client_streaming %} assert next(args[0]) == request {% else %} + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + # clear UUID field so that the check below succeeds + args[0].{{ auto_populated_field }} = None + {% endfor %} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} assert args[0] == {{ method.input.ident }}() {% endif %} + + +def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = {{ method.input.ident }}( + {% for field in method.input.fields.values() if field.ident|string() == "str" and not field.uuid4 %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + client.{{ method_name }}(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] +{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} +{% if method_settings is not none %} +{% for auto_populated_field in method_settings.auto_populated_fields %} + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + # clear UUID field so that the check below succeeds + args[0].{{ auto_populated_field }} = None +{% endfor %} +{% endif %}{# if method_settings is not none #} +{% endwith %}{# method_settings #} + assert args[0] == {{ method.input.ident }}( + {% for field in method.input.fields.values() if field.ident|string() == "str" and not field.uuid4 %} + {{ field.name }}={{ field.mock_value }}, + {% endfor %} + ) {% endif %} {% if not full_extended_lro %} +{% if not method.client_streaming %} +@pytest.mark.asyncio +async def test_{{ method_name }}_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + # Designate an appropriate return value for the call. + {% if method.void %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + {% elif method.lro %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + {% elif not method.client_streaming and method.server_streaming %} + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% elif method.client_streaming and method.server_streaming %} + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ '' }} + {%- if not method.client_streaming and not method.server_streaming -%} + grpc_helpers_async.FakeUnaryUnaryCall + {%- else -%} + grpc_helpers_async.FakeStreamUnaryCall + {%- endif -%}({{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message') %}{% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %} + {% endfor %} + )) + {% endif %} + response = await client.{{ method_name }}() + call.assert_called() + _, args, _ = call.mock_calls[0] + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + # clear UUID field so that the check below succeeds + args[0].{{ auto_populated_field }} = None + {% endfor %} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} + assert args[0] == {{ method.input.ident }}() +{% endif %} + @pytest.mark.asyncio async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): + {% with auto_populated_field_sample_value = "explicit value for autopopulate-able field" %} client = {{ service.async_client_name }}( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -134,6 +264,17 @@ async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_ # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() + {# Set UUID4 fields so that they are not automatically populated. #} + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + if isinstance(request, dict): + request['{{ auto_populated_field }}'] = "{{ auto_populated_field_sample_value }}" + else: + request.{{ auto_populated_field }} = "{{ auto_populated_field_sample_value }}" + {% endfor %} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} {% if method.client_streaming %} requests = [request] {% endif %} @@ -182,7 +323,15 @@ async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_ {% if method.client_streaming %} assert next(args[0]) == request {% else %} - assert args[0] == {{ method.input.ident }}() + request = {{ method.input.ident }}() + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + request.{{ auto_populated_field }} = "{{ auto_populated_field_sample_value }}" + {% endfor %} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} + assert args[0] == request {% endif %} # Establish that the response is the type that we expect. @@ -218,6 +367,7 @@ async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_ {% endif %}{# oneof/optional #} {% endfor %} {% endif %} + {% endwith %}{# auto_populated_field_sample_value #} @pytest.mark.asyncio @@ -865,6 +1015,7 @@ def test_{{ method_name }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() assert response.raw_page is response {% endif %}{# method.paged_result_field #}{% endwith %} +{% endwith %}{# uuid4_re #} {% endmacro %} {% macro rest_required_tests(method, service, numeric_enums=False, full_extended_lro=False) %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 8aa70a80215e..a7e49ac43278 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -29,7 +29,7 @@ nox.options.error_on_missing_interpreters = True -showcase_version = os.environ.get("SHOWCASE_VERSION", "0.31.0") +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.32.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2b243db84219..a1e65f71b71c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -31,7 +31,10 @@ "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "googleapis-common-protos >= 1.55.0", "grpcio >= 1.24.3", - "jinja2 >= 2.10", + # 2.11.0 is required which adds the `default` argument to `jinja-filters.map()` + # https://jinja.palletsprojects.com/en/3.0.x/templates/#jinja-filters.map + # https://jinja.palletsprojects.com/en/2.11.x/changelog/#version-2-11-0 + "jinja2 >= 2.11", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "pypandoc >= 1.4", "PyYAML >= 5.1.1", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 69d0c6c12f5f..ec79f660bca0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -793,7 +793,8 @@ def test_export_assets(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest() + request = asset_service.ExportAssetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -816,6 +817,55 @@ def test_export_assets_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ExportAssetsRequest() + +def test_export_assets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.ExportAssetsRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + client.export_assets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ExportAssetsRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_export_assets_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.export_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ExportAssetsRequest() + @pytest.mark.asyncio async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): client = AssetServiceAsyncClient( @@ -840,7 +890,8 @@ async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest() + request = asset_service.ExportAssetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -941,7 +992,8 @@ def test_list_assets(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest() + request = asset_service.ListAssetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsPager) @@ -965,6 +1017,57 @@ def test_list_assets_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListAssetsRequest() + +def test_list_assets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.ListAssetsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + client.list_assets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListAssetsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_assets_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListAssetsRequest() + @pytest.mark.asyncio async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): client = AssetServiceAsyncClient( @@ -989,7 +1092,8 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest() + request = asset_service.ListAssetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListAssetsAsyncPager) @@ -1364,7 +1468,8 @@ def test_batch_get_assets_history(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + request = asset_service.BatchGetAssetsHistoryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) @@ -1387,6 +1492,54 @@ def test_batch_get_assets_history_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + +def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.BatchGetAssetsHistoryRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + client.batch_get_assets_history(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetAssetsHistoryRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_batch_get_assets_history_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( + )) + response = await client.batch_get_assets_history() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + @pytest.mark.asyncio async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceAsyncClient( @@ -1410,7 +1563,8 @@ async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', r # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + request = asset_service.BatchGetAssetsHistoryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) @@ -1515,7 +1669,8 @@ def test_create_feed(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest() + request = asset_service.CreateFeedRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) @@ -1543,6 +1698,61 @@ def test_create_feed_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateFeedRequest() + +def test_create_feed_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.CreateFeedRequest( + parent='parent_value', + feed_id='feed_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + client.create_feed(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateFeedRequest( + parent='parent_value', + feed_id='feed_id_value', + ) + +@pytest.mark.asyncio +async def test_create_feed_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + response = await client.create_feed() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateFeedRequest() + @pytest.mark.asyncio async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): client = AssetServiceAsyncClient( @@ -1571,7 +1781,8 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest() + request = asset_service.CreateFeedRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) @@ -1763,7 +1974,8 @@ def test_get_feed(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest() + request = asset_service.GetFeedRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) @@ -1791,6 +2003,59 @@ def test_get_feed_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetFeedRequest() + +def test_get_feed_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.GetFeedRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + client.get_feed(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetFeedRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_feed_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + response = await client.get_feed() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetFeedRequest() + @pytest.mark.asyncio async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): client = AssetServiceAsyncClient( @@ -1819,7 +2084,8 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest() + request = asset_service.GetFeedRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) @@ -2006,7 +2272,8 @@ def test_list_feeds(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest() + request = asset_service.ListFeedsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.ListFeedsResponse) @@ -2029,6 +2296,54 @@ def test_list_feeds_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListFeedsRequest() + +def test_list_feeds_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.ListFeedsRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + client.list_feeds(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListFeedsRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_feeds_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( + )) + response = await client.list_feeds() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListFeedsRequest() + @pytest.mark.asyncio async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): client = AssetServiceAsyncClient( @@ -2052,7 +2367,8 @@ async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=as # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest() + request = asset_service.ListFeedsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.ListFeedsResponse) @@ -2239,7 +2555,8 @@ def test_update_feed(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest() + request = asset_service.UpdateFeedRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) @@ -2267,6 +2584,57 @@ def test_update_feed_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.UpdateFeedRequest() + +def test_update_feed_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.UpdateFeedRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + client.update_feed(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateFeedRequest( + ) + +@pytest.mark.asyncio +async def test_update_feed_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + response = await client.update_feed() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateFeedRequest() + @pytest.mark.asyncio async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): client = AssetServiceAsyncClient( @@ -2295,7 +2663,8 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest() + request = asset_service.UpdateFeedRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.Feed) @@ -2481,7 +2850,8 @@ def test_delete_feed(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest() + request = asset_service.DeleteFeedRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2504,6 +2874,53 @@ def test_delete_feed_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteFeedRequest() + +def test_delete_feed_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.DeleteFeedRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + client.delete_feed(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteFeedRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_feed_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_feed() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteFeedRequest() + @pytest.mark.asyncio async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): client = AssetServiceAsyncClient( @@ -2526,7 +2943,8 @@ async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=a # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest() + request = asset_service.DeleteFeedRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2709,7 +3127,8 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest() + request = asset_service.SearchAllResourcesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesPager) @@ -2733,6 +3152,61 @@ def test_search_all_resources_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllResourcesRequest() + +def test_search_all_resources_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.SearchAllResourcesRequest( + scope='scope_value', + query='query_value', + page_token='page_token_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + client.search_all_resources(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllResourcesRequest( + scope='scope_value', + query='query_value', + page_token='page_token_value', + order_by='order_by_value', + ) + +@pytest.mark.asyncio +async def test_search_all_resources_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + )) + response = await client.search_all_resources() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllResourcesRequest() + @pytest.mark.asyncio async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceAsyncClient( @@ -2757,7 +3231,8 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest() + request = asset_service.SearchAllResourcesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllResourcesAsyncPager) @@ -3153,7 +3628,8 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest() + request = asset_service.SearchAllIamPoliciesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesPager) @@ -3177,6 +3653,61 @@ def test_search_all_iam_policies_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllIamPoliciesRequest() + +def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.SearchAllIamPoliciesRequest( + scope='scope_value', + query='query_value', + page_token='page_token_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + client.search_all_iam_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllIamPoliciesRequest( + scope='scope_value', + query='query_value', + page_token='page_token_value', + order_by='order_by_value', + ) + +@pytest.mark.asyncio +async def test_search_all_iam_policies_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + )) + response = await client.search_all_iam_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllIamPoliciesRequest() + @pytest.mark.asyncio async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceAsyncClient( @@ -3201,7 +3732,8 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest() + request = asset_service.SearchAllIamPoliciesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.SearchAllIamPoliciesAsyncPager) @@ -3587,7 +4119,8 @@ def test_analyze_iam_policy(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest() + request = asset_service.AnalyzeIamPolicyRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) @@ -3611,6 +4144,55 @@ def test_analyze_iam_policy_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyRequest() + +def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.AnalyzeIamPolicyRequest( + saved_analysis_query='saved_analysis_query_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + client.analyze_iam_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyRequest( + saved_analysis_query='saved_analysis_query_value', + ) + +@pytest.mark.asyncio +async def test_analyze_iam_policy_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + )) + response = await client.analyze_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyRequest() + @pytest.mark.asyncio async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceAsyncClient( @@ -3635,7 +4217,8 @@ async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest() + request = asset_service.AnalyzeIamPolicyRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) @@ -3717,43 +4300,93 @@ async def test_analyze_iam_policy_field_headers_async(): def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.analyze_iam_policy_longrunning(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = asset_service.AnalyzeIamPolicyLongrunningRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_analyze_iam_policy_longrunning_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + client.analyze_iam_policy_longrunning() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + + +def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.AnalyzeIamPolicyLongrunningRequest( + saved_analysis_query='saved_analysis_query_value', + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.analyze_iam_policy_longrunning), '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name='operations/spam') - response = client.analyze_iam_policy_longrunning(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 + client.analyze_iam_policy_longrunning(request=request) + call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) - + assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest( + saved_analysis_query='saved_analysis_query_value', + ) -def test_analyze_iam_policy_longrunning_empty_call(): +@pytest.mark.asyncio +async def test_analyze_iam_policy_longrunning_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( + client = AssetServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport='grpc_asyncio', ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.analyze_iam_policy_longrunning), '__call__') as call: - client.analyze_iam_policy_longrunning() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.analyze_iam_policy_longrunning() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() @@ -3782,7 +4415,8 @@ async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_async # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + request = asset_service.AnalyzeIamPolicyLongrunningRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3882,7 +4516,8 @@ def test_analyze_move(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest() + request = asset_service.AnalyzeMoveRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.AnalyzeMoveResponse) @@ -3905,6 +4540,56 @@ def test_analyze_move_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeMoveRequest() + +def test_analyze_move_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.AnalyzeMoveRequest( + resource='resource_value', + destination_parent='destination_parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + client.analyze_move(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest( + resource='resource_value', + destination_parent='destination_parent_value', + ) + +@pytest.mark.asyncio +async def test_analyze_move_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( + )) + response = await client.analyze_move() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + @pytest.mark.asyncio async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceAsyncClient( @@ -3928,7 +4613,8 @@ async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type= # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest() + request = asset_service.AnalyzeMoveRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.AnalyzeMoveResponse) @@ -4030,7 +4716,8 @@ def test_query_assets(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest() + request = asset_service.QueryAssetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) @@ -4055,6 +4742,62 @@ def test_query_assets_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.QueryAssetsRequest() + +def test_query_assets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.QueryAssetsRequest( + parent='parent_value', + statement='statement_value', + job_reference='job_reference_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + client.query_assets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest( + parent='parent_value', + statement='statement_value', + job_reference='job_reference_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_query_assets_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + )) + response = await client.query_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + @pytest.mark.asyncio async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): client = AssetServiceAsyncClient( @@ -4080,7 +4823,8 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest() + request = asset_service.QueryAssetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.QueryAssetsResponse) @@ -4186,7 +4930,8 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest() + request = asset_service.CreateSavedQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) @@ -4213,6 +4958,60 @@ def test_create_saved_query_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateSavedQueryRequest() + +def test_create_saved_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.CreateSavedQueryRequest( + parent='parent_value', + saved_query_id='saved_query_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + client.create_saved_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest( + parent='parent_value', + saved_query_id='saved_query_id_value', + ) + +@pytest.mark.asyncio +async def test_create_saved_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.create_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + @pytest.mark.asyncio async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceAsyncClient( @@ -4240,7 +5039,8 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest() + request = asset_service.CreateSavedQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) @@ -4450,7 +5250,8 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest() + request = asset_service.GetSavedQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) @@ -4477,6 +5278,58 @@ def test_get_saved_query_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetSavedQueryRequest() + +def test_get_saved_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.GetSavedQueryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + client.get_saved_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_saved_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.get_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + @pytest.mark.asyncio async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceAsyncClient( @@ -4504,7 +5357,8 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest() + request = asset_service.GetSavedQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) @@ -4691,7 +5545,8 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() + request = asset_service.ListSavedQueriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesPager) @@ -4715,6 +5570,59 @@ def test_list_saved_queries_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListSavedQueriesRequest() + +def test_list_saved_queries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.ListSavedQueriesRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + client.list_saved_queries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest( + parent='parent_value', + filter='filter_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_saved_queries_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_saved_queries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + @pytest.mark.asyncio async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): client = AssetServiceAsyncClient( @@ -4739,7 +5647,8 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() + request = asset_service.ListSavedQueriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSavedQueriesAsyncPager) @@ -5118,7 +6027,8 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest() + request = asset_service.UpdateSavedQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) @@ -5145,6 +6055,56 @@ def test_update_saved_query_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.UpdateSavedQueryRequest() + +def test_update_saved_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.UpdateSavedQueryRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + client.update_saved_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest( + ) + +@pytest.mark.asyncio +async def test_update_saved_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + response = await client.update_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + @pytest.mark.asyncio async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceAsyncClient( @@ -5172,7 +6132,8 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest() + request = asset_service.UpdateSavedQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.SavedQuery) @@ -5367,7 +6328,8 @@ def test_delete_saved_query(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest() + request = asset_service.DeleteSavedQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -5390,6 +6352,53 @@ def test_delete_saved_query_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteSavedQueryRequest() + +def test_delete_saved_query_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.DeleteSavedQueryRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + client.delete_saved_query(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_saved_query_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_saved_query() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + @pytest.mark.asyncio async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceAsyncClient( @@ -5412,7 +6421,8 @@ async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest() + request = asset_service.DeleteSavedQueryRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -5594,7 +6604,8 @@ def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc') # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + request = asset_service.BatchGetEffectiveIamPoliciesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) @@ -5617,6 +6628,54 @@ def test_batch_get_effective_iam_policies_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + +def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.BatchGetEffectiveIamPoliciesRequest( + scope='scope_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + client.batch_get_effective_iam_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest( + scope='scope_value', + ) + +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( + )) + response = await client.batch_get_effective_iam_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceAsyncClient( @@ -5640,7 +6699,8 @@ async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asy # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + request = asset_service.BatchGetEffectiveIamPoliciesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) @@ -5741,7 +6801,8 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + request = asset_service.AnalyzeOrgPoliciesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) @@ -5765,6 +6826,61 @@ def test_analyze_org_policies_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + +def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.AnalyzeOrgPoliciesRequest( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + client.analyze_org_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policies_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policies() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + @pytest.mark.asyncio async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceAsyncClient( @@ -5789,7 +6905,8 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + request = asset_service.AnalyzeOrgPoliciesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPoliciesAsyncPager) @@ -6185,7 +7302,8 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) @@ -6209,6 +7327,61 @@ def test_analyze_org_policy_governed_containers_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + +def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + client.analyze_org_policy_governed_containers(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policy_governed_containers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): client = AssetServiceAsyncClient( @@ -6233,7 +7406,8 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager) @@ -6629,7 +7803,8 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) @@ -6653,6 +7828,61 @@ def test_analyze_org_policy_governed_assets_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + +def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + client.analyze_org_policy_governed_assets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest( + scope='scope_value', + constraint='constraint_value', + filter='filter_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + )) + response = await client.analyze_org_policy_governed_assets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): client = AssetServiceAsyncClient( @@ -6677,7 +7907,8 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 1d705928de01..f12e4689fd11 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -785,7 +785,8 @@ def test_generate_access_token(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateAccessTokenRequest() + request = common.GenerateAccessTokenRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateAccessTokenResponse) @@ -809,6 +810,55 @@ def test_generate_access_token_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateAccessTokenRequest() + +def test_generate_access_token_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = common.GenerateAccessTokenRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + client.generate_access_token(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateAccessTokenRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_generate_access_token_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( + access_token='access_token_value', + )) + response = await client.generate_access_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateAccessTokenRequest() + @pytest.mark.asyncio async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): client = IAMCredentialsAsyncClient( @@ -833,7 +883,8 @@ async def test_generate_access_token_async(transport: str = 'grpc_asyncio', requ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateAccessTokenRequest() + request = common.GenerateAccessTokenRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateAccessTokenResponse) @@ -1043,7 +1094,8 @@ def test_generate_id_token(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateIdTokenRequest() + request = common.GenerateIdTokenRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateIdTokenResponse) @@ -1067,6 +1119,57 @@ def test_generate_id_token_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateIdTokenRequest() + +def test_generate_id_token_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = common.GenerateIdTokenRequest( + name='name_value', + audience='audience_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + client.generate_id_token(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateIdTokenRequest( + name='name_value', + audience='audience_value', + ) + +@pytest.mark.asyncio +async def test_generate_id_token_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( + token='token_value', + )) + response = await client.generate_id_token() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateIdTokenRequest() + @pytest.mark.asyncio async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): client = IAMCredentialsAsyncClient( @@ -1091,7 +1194,8 @@ async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateIdTokenRequest() + request = common.GenerateIdTokenRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.GenerateIdTokenResponse) @@ -1306,7 +1410,8 @@ def test_sign_blob(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == common.SignBlobRequest() + request = common.SignBlobRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.SignBlobResponse) @@ -1331,6 +1436,56 @@ def test_sign_blob_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == common.SignBlobRequest() + +def test_sign_blob_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = common.SignBlobRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + client.sign_blob(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignBlobRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_sign_blob_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + )) + response = await client.sign_blob() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignBlobRequest() + @pytest.mark.asyncio async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): client = IAMCredentialsAsyncClient( @@ -1356,7 +1511,8 @@ async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=com # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == common.SignBlobRequest() + request = common.SignBlobRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.SignBlobResponse) @@ -1562,7 +1718,8 @@ def test_sign_jwt(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == common.SignJwtRequest() + request = common.SignJwtRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.SignJwtResponse) @@ -1587,6 +1744,58 @@ def test_sign_jwt_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == common.SignJwtRequest() + +def test_sign_jwt_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = common.SignJwtRequest( + name='name_value', + payload='payload_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + client.sign_jwt(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignJwtRequest( + name='name_value', + payload='payload_value', + ) + +@pytest.mark.asyncio +async def test_sign_jwt_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', + )) + response = await client.sign_jwt() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignJwtRequest() + @pytest.mark.asyncio async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): client = IAMCredentialsAsyncClient( @@ -1612,7 +1821,8 @@ async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=comm # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == common.SignJwtRequest() + request = common.SignJwtRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.SignJwtResponse) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index a21c154713ed..5729663c9cd0 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -809,7 +809,8 @@ def test_get_trigger(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetTriggerRequest() + request = eventarc.GetTriggerRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, trigger.Trigger) @@ -837,6 +838,59 @@ def test_get_trigger_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetTriggerRequest() + +def test_get_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetTriggerRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + client.get_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetTriggerRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_trigger_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + etag='etag_value', + )) + response = await client.get_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetTriggerRequest() + @pytest.mark.asyncio async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetTriggerRequest): client = EventarcAsyncClient( @@ -865,7 +919,8 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetTriggerRequest() + request = eventarc.GetTriggerRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, trigger.Trigger) @@ -1054,7 +1109,8 @@ def test_list_triggers(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListTriggersRequest() + request = eventarc.ListTriggersRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTriggersPager) @@ -1079,6 +1135,62 @@ def test_list_triggers_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListTriggersRequest() + +def test_list_triggers_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListTriggersRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + client.list_triggers(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListTriggersRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_list_triggers_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListTriggersRequest() + @pytest.mark.asyncio async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListTriggersRequest): client = EventarcAsyncClient( @@ -1104,7 +1216,8 @@ async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListTriggersRequest() + request = eventarc.ListTriggersRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTriggersAsyncPager) @@ -1479,7 +1592,8 @@ def test_create_trigger(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateTriggerRequest() + request = eventarc.CreateTriggerRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1502,6 +1616,57 @@ def test_create_trigger_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateTriggerRequest() + +def test_create_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreateTriggerRequest( + parent='parent_value', + trigger_id='trigger_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + client.create_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateTriggerRequest( + parent='parent_value', + trigger_id='trigger_id_value', + ) + +@pytest.mark.asyncio +async def test_create_trigger_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateTriggerRequest() + @pytest.mark.asyncio async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateTriggerRequest): client = EventarcAsyncClient( @@ -1526,7 +1691,8 @@ async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateTriggerRequest() + request = eventarc.CreateTriggerRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1729,7 +1895,8 @@ def test_update_trigger(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest() + request = eventarc.UpdateTriggerRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1752,6 +1919,53 @@ def test_update_trigger_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateTriggerRequest() + +def test_update_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.UpdateTriggerRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + client.update_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateTriggerRequest( + ) + +@pytest.mark.asyncio +async def test_update_trigger_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateTriggerRequest() + @pytest.mark.asyncio async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateTriggerRequest): client = EventarcAsyncClient( @@ -1776,7 +1990,8 @@ async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest() + request = eventarc.UpdateTriggerRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1979,7 +2194,8 @@ def test_delete_trigger(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteTriggerRequest() + request = eventarc.DeleteTriggerRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2002,6 +2218,57 @@ def test_delete_trigger_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteTriggerRequest() + +def test_delete_trigger_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.DeleteTriggerRequest( + name='name_value', + etag='etag_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + client.delete_trigger(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteTriggerRequest( + name='name_value', + etag='etag_value', + ) + +@pytest.mark.asyncio +async def test_delete_trigger_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteTriggerRequest() + @pytest.mark.asyncio async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteTriggerRequest): client = EventarcAsyncClient( @@ -2026,7 +2293,8 @@ async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteTriggerRequest() + request = eventarc.DeleteTriggerRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2227,7 +2495,8 @@ def test_get_channel(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelRequest() + request = eventarc.GetChannelRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, channel.Channel) @@ -2256,23 +2525,48 @@ def test_get_channel_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelRequest() + +def test_get_channel_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetChannelRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + client.get_channel(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest( + name='name_value', + ) + @pytest.mark.asyncio -async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): +async def test_get_channel_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='grpc_asyncio', ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.get_channel), '__call__') as call: # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( name='name_value', uid='uid_value', provider='provider_value', @@ -2280,15 +2574,45 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e activation_token='activation_token_value', crypto_key_name='crypto_key_name_value', )) - response = await client.get_channel(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + response = await client.get_channel() + call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelRequest() - # Establish that the response is the type that we expect. - assert isinstance(response, channel.Channel) +@pytest.mark.asyncio +async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = eventarc.GetChannelRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) assert response.name == 'name_value' assert response.uid == 'uid_value' assert response.provider == 'provider_value' @@ -2475,7 +2799,8 @@ def test_list_channels(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelsRequest() + request = eventarc.ListChannelsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelsPager) @@ -2500,6 +2825,60 @@ def test_list_channels_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelsRequest() + +def test_list_channels_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListChannelsRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + client.list_channels(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + ) + +@pytest.mark.asyncio +async def test_list_channels_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_channels() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() + @pytest.mark.asyncio async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): client = EventarcAsyncClient( @@ -2525,7 +2904,8 @@ async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelsRequest() + request = eventarc.ListChannelsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelsAsyncPager) @@ -2900,7 +3280,8 @@ def test_create_channel(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelRequest() + request = eventarc.CreateChannelRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2923,6 +3304,57 @@ def test_create_channel_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelRequest() + +def test_create_channel_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreateChannelRequest( + parent='parent_value', + channel_id='channel_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + client.create_channel(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest( + parent='parent_value', + channel_id='channel_id_value', + ) + +@pytest.mark.asyncio +async def test_create_channel_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() + @pytest.mark.asyncio async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): client = EventarcAsyncClient( @@ -2947,7 +3379,8 @@ async def test_create_channel_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelRequest() + request = eventarc.CreateChannelRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3150,7 +3583,8 @@ def test_update_channel(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest() + request = eventarc.UpdateChannelRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3173,6 +3607,53 @@ def test_update_channel_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateChannelRequest() + +def test_update_channel_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.UpdateChannelRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + client.update_channel(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest( + ) + +@pytest.mark.asyncio +async def test_update_channel_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() + @pytest.mark.asyncio async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): client = EventarcAsyncClient( @@ -3197,7 +3678,8 @@ async def test_update_channel_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest() + request = eventarc.UpdateChannelRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3390,7 +3872,8 @@ def test_delete_channel(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelRequest() + request = eventarc.DeleteChannelRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3413,6 +3896,55 @@ def test_delete_channel_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelRequest() + +def test_delete_channel_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.DeleteChannelRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + client.delete_channel(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_channel_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_channel() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() + @pytest.mark.asyncio async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): client = EventarcAsyncClient( @@ -3437,7 +3969,8 @@ async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelRequest() + request = eventarc.DeleteChannelRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3623,7 +4156,8 @@ def test_get_provider(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetProviderRequest() + request = eventarc.GetProviderRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, discovery.Provider) @@ -3648,6 +4182,56 @@ def test_get_provider_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetProviderRequest() + +def test_get_provider_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetProviderRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + client.get_provider(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_provider_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( + name='name_value', + display_name='display_name_value', + )) + response = await client.get_provider() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() + @pytest.mark.asyncio async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): client = EventarcAsyncClient( @@ -3673,7 +4257,8 @@ async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type= # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetProviderRequest() + request = eventarc.GetProviderRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, discovery.Provider) @@ -3859,7 +4444,8 @@ def test_list_providers(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListProvidersRequest() + request = eventarc.ListProvidersRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListProvidersPager) @@ -3884,10 +4470,66 @@ def test_list_providers_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListProvidersRequest() -@pytest.mark.asyncio -async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): - client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + +def test_list_providers_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListProvidersRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + client.list_providers(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest( + parent='parent_value', + page_token='page_token_value', + order_by='order_by_value', + filter='filter_value', + ) + +@pytest.mark.asyncio +async def test_list_providers_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_providers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + +@pytest.mark.asyncio +async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3909,7 +4551,8 @@ async def test_list_providers_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListProvidersRequest() + request = eventarc.ListProvidersRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListProvidersAsyncPager) @@ -4289,7 +4932,8 @@ def test_get_channel_connection(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelConnectionRequest() + request = eventarc.GetChannelConnectionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, channel_connection.ChannelConnection) @@ -4316,6 +4960,58 @@ def test_get_channel_connection_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelConnectionRequest() + +def test_get_channel_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetChannelConnectionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + client.get_channel_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_channel_connection_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + )) + response = await client.get_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + @pytest.mark.asyncio async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): client = EventarcAsyncClient( @@ -4343,7 +5039,8 @@ async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', req # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelConnectionRequest() + request = eventarc.GetChannelConnectionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, channel_connection.ChannelConnection) @@ -4531,7 +5228,8 @@ def test_list_channel_connections(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelConnectionsRequest() + request = eventarc.ListChannelConnectionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelConnectionsPager) @@ -4556,6 +5254,58 @@ def test_list_channel_connections_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelConnectionsRequest() + +def test_list_channel_connections_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.ListChannelConnectionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + client.list_channel_connections(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_channel_connections_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_channel_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + @pytest.mark.asyncio async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): client = EventarcAsyncClient( @@ -4581,7 +5331,8 @@ async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', r # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelConnectionsRequest() + request = eventarc.ListChannelConnectionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListChannelConnectionsAsyncPager) @@ -4956,7 +5707,8 @@ def test_create_channel_connection(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelConnectionRequest() + request = eventarc.CreateChannelConnectionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -4979,6 +5731,57 @@ def test_create_channel_connection_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelConnectionRequest() + +def test_create_channel_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreateChannelConnectionRequest( + parent='parent_value', + channel_connection_id='channel_connection_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + client.create_channel_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest( + parent='parent_value', + channel_connection_id='channel_connection_id_value', + ) + +@pytest.mark.asyncio +async def test_create_channel_connection_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + @pytest.mark.asyncio async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): client = EventarcAsyncClient( @@ -5003,7 +5806,8 @@ async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelConnectionRequest() + request = eventarc.CreateChannelConnectionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5206,7 +6010,8 @@ def test_delete_channel_connection(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelConnectionRequest() + request = eventarc.DeleteChannelConnectionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5229,6 +6034,55 @@ def test_delete_channel_connection_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelConnectionRequest() + +def test_delete_channel_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.DeleteChannelConnectionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + client.delete_channel_connection(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_channel_connection_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_channel_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + @pytest.mark.asyncio async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): client = EventarcAsyncClient( @@ -5253,7 +6107,8 @@ async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelConnectionRequest() + request = eventarc.DeleteChannelConnectionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5439,7 +6294,8 @@ def test_get_google_channel_config(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetGoogleChannelConfigRequest() + request = eventarc.GetGoogleChannelConfigRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, google_channel_config.GoogleChannelConfig) @@ -5464,6 +6320,56 @@ def test_get_google_channel_config_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetGoogleChannelConfigRequest() + +def test_get_google_channel_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.GetGoogleChannelConfigRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + client.get_google_channel_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_google_channel_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.get_google_channel_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + @pytest.mark.asyncio async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): client = EventarcAsyncClient( @@ -5489,7 +6395,8 @@ async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetGoogleChannelConfigRequest() + request = eventarc.GetGoogleChannelConfigRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, google_channel_config.GoogleChannelConfig) @@ -5675,7 +6582,8 @@ def test_update_google_channel_config(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + request = eventarc.UpdateGoogleChannelConfigRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) @@ -5700,6 +6608,54 @@ def test_update_google_channel_config_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + +def test_update_google_channel_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.UpdateGoogleChannelConfigRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + client.update_google_channel_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest( + ) + +@pytest.mark.asyncio +async def test_update_google_channel_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + response = await client.update_google_channel_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + @pytest.mark.asyncio async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcAsyncClient( @@ -5725,7 +6681,8 @@ async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + request = eventarc.UpdateGoogleChannelConfigRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 34b89a6a4a29..e70efd4e43af 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -772,7 +772,8 @@ def test_list_buckets(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + request = logging_config.ListBucketsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsPager) @@ -796,6 +797,57 @@ def test_list_buckets_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListBucketsRequest() + +def test_list_buckets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListBucketsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + client.list_buckets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_buckets_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_buckets() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + @pytest.mark.asyncio async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): client = ConfigServiceV2AsyncClient( @@ -820,7 +872,8 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + request = logging_config.ListBucketsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBucketsAsyncPager) @@ -1202,7 +1255,8 @@ def test_get_bucket(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + request = logging_config.GetBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1232,6 +1286,61 @@ def test_get_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetBucketRequest() + +def test_get_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + client.get_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + response = await client.get_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + @pytest.mark.asyncio async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1262,7 +1371,8 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + request = logging_config.GetBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1368,7 +1478,8 @@ def test_create_bucket_async(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1391,6 +1502,57 @@ def test_create_bucket_async_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest() + +def test_create_bucket_async_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateBucketRequest( + parent='parent_value', + bucket_id='bucket_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + client.create_bucket_async(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest( + parent='parent_value', + bucket_id='bucket_id_value', + ) + +@pytest.mark.asyncio +async def test_create_bucket_async_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_bucket_async() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + @pytest.mark.asyncio async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1415,7 +1577,8 @@ async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', reques # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1514,7 +1677,8 @@ def test_update_bucket_async(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1537,6 +1701,55 @@ def test_update_bucket_async_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() + +def test_update_bucket_async_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + client.update_bucket_async(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_update_bucket_async_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_bucket_async() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + @pytest.mark.asyncio async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1561,7 +1774,8 @@ async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', reques # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1668,7 +1882,8 @@ def test_create_bucket(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1698,6 +1913,63 @@ def test_create_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest() + +def test_create_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateBucketRequest( + parent='parent_value', + bucket_id='bucket_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + client.create_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest( + parent='parent_value', + bucket_id='bucket_id_value', + ) + +@pytest.mark.asyncio +async def test_create_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + response = await client.create_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + @pytest.mark.asyncio async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1728,7 +2000,8 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request = logging_config.CreateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1842,7 +2115,8 @@ def test_update_bucket(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -1872,6 +2146,61 @@ def test_update_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() + +def test_update_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + client.update_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_update_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + response = await client.update_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + @pytest.mark.asyncio async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1902,7 +2231,8 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request = logging_config.UpdateBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogBucket) @@ -2008,7 +2338,8 @@ def test_delete_bucket(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + request = logging_config.DeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2031,6 +2362,53 @@ def test_delete_bucket_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteBucketRequest() + +def test_delete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + client.delete_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + @pytest.mark.asyncio async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): client = ConfigServiceV2AsyncClient( @@ -2053,7 +2431,8 @@ async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + request = logging_config.DeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2152,7 +2531,8 @@ def test_undelete_bucket(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + request = logging_config.UndeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2161,16 +2541,63 @@ def test_undelete_bucket(request_type, transport: str = 'grpc'): def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + client.undelete_bucket() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + + +def test_undelete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UndeleteBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + client.undelete_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_undelete_bucket_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', + transport='grpc_asyncio', ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.undelete_bucket), '__call__') as call: - client.undelete_bucket() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.undelete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UndeleteBucketRequest() @@ -2197,7 +2624,8 @@ async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + request = logging_config.UndeleteBucketRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2298,7 +2726,8 @@ def test_list_views(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + request = logging_config.ListViewsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsPager) @@ -2322,6 +2751,57 @@ def test_list_views_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListViewsRequest() + +def test_list_views_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListViewsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + client.list_views(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_views_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_views() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + @pytest.mark.asyncio async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): client = ConfigServiceV2AsyncClient( @@ -2346,7 +2826,8 @@ async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + request = logging_config.ListViewsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListViewsAsyncPager) @@ -2724,7 +3205,8 @@ def test_get_view(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + request = logging_config.GetViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -2750,6 +3232,57 @@ def test_get_view_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetViewRequest() + +def test_get_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetViewRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + client.get_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.get_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + @pytest.mark.asyncio async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): client = ConfigServiceV2AsyncClient( @@ -2776,7 +3309,8 @@ async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + request = logging_config.GetViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -2882,7 +3416,8 @@ def test_create_view(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + request = logging_config.CreateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -2908,6 +3443,59 @@ def test_create_view_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateViewRequest() + +def test_create_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateViewRequest( + parent='parent_value', + view_id='view_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + client.create_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest( + parent='parent_value', + view_id='view_id_value', + ) + +@pytest.mark.asyncio +async def test_create_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.create_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + @pytest.mark.asyncio async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): client = ConfigServiceV2AsyncClient( @@ -2934,7 +3522,8 @@ async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + request = logging_config.CreateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3040,7 +3629,8 @@ def test_update_view(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + request = logging_config.UpdateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3066,6 +3656,57 @@ def test_update_view_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateViewRequest() + +def test_update_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateViewRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + client.update_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_update_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client.update_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + @pytest.mark.asyncio async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): client = ConfigServiceV2AsyncClient( @@ -3092,7 +3733,8 @@ async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + request = logging_config.UpdateViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogView) @@ -3194,7 +3836,8 @@ def test_delete_view(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + request = logging_config.DeleteViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3217,6 +3860,53 @@ def test_delete_view_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteViewRequest() + +def test_delete_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteViewRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + client.delete_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_view_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_view() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + @pytest.mark.asyncio async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): client = ConfigServiceV2AsyncClient( @@ -3239,7 +3929,8 @@ async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + request = logging_config.DeleteViewRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3340,7 +4031,8 @@ def test_list_sinks(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + request = logging_config.ListSinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksPager) @@ -3364,6 +4056,57 @@ def test_list_sinks_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListSinksRequest() + +def test_list_sinks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListSinksRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + client.list_sinks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_sinks_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_sinks() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + @pytest.mark.asyncio async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): client = ConfigServiceV2AsyncClient( @@ -3388,7 +4131,8 @@ async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + request = logging_config.ListSinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSinksAsyncPager) @@ -3771,7 +4515,8 @@ def test_get_sink(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + request = logging_config.GetSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -3802,6 +4547,62 @@ def test_get_sink_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSinkRequest() + +def test_get_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetSinkRequest( + sink_name='sink_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + client.get_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest( + sink_name='sink_name_value', + ) + +@pytest.mark.asyncio +async def test_get_sink_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.get_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + @pytest.mark.asyncio async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): client = ConfigServiceV2AsyncClient( @@ -3833,7 +4634,8 @@ async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + request = logging_config.GetSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4031,7 +4833,8 @@ def test_create_sink(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + request = logging_config.CreateSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4062,6 +4865,62 @@ def test_create_sink_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateSinkRequest() + +def test_create_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateSinkRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + client.create_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_create_sink_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.create_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + @pytest.mark.asyncio async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): client = ConfigServiceV2AsyncClient( @@ -4093,7 +4952,8 @@ async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + request = logging_config.CreateSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4301,7 +5161,8 @@ def test_update_sink(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + request = logging_config.UpdateSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4332,6 +5193,62 @@ def test_update_sink_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest() + +def test_update_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateSinkRequest( + sink_name='sink_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + client.update_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest( + sink_name='sink_name_value', + ) + +@pytest.mark.asyncio +async def test_update_sink_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client.update_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + @pytest.mark.asyncio async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): client = ConfigServiceV2AsyncClient( @@ -4363,7 +5280,8 @@ async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + request = logging_config.UpdateSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogSink) @@ -4572,7 +5490,8 @@ def test_delete_sink(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + request = logging_config.DeleteSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -4595,6 +5514,53 @@ def test_delete_sink_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteSinkRequest() + +def test_delete_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteSinkRequest( + sink_name='sink_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + client.delete_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest( + sink_name='sink_name_value', + ) + +@pytest.mark.asyncio +async def test_delete_sink_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_sink() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + @pytest.mark.asyncio async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): client = ConfigServiceV2AsyncClient( @@ -4617,7 +5583,8 @@ async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + request = logging_config.DeleteSinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -4798,7 +5765,8 @@ def test_create_link(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() + request = logging_config.CreateLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -4821,6 +5789,57 @@ def test_create_link_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateLinkRequest() + +def test_create_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateLinkRequest( + parent='parent_value', + link_id='link_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_link), + '__call__') as call: + client.create_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest( + parent='parent_value', + link_id='link_id_value', + ) + +@pytest.mark.asyncio +async def test_create_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest() + @pytest.mark.asyncio async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): client = ConfigServiceV2AsyncClient( @@ -4845,7 +5864,8 @@ async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() + request = logging_config.CreateLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5048,7 +6068,8 @@ def test_delete_link(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() + request = logging_config.DeleteLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5071,6 +6092,55 @@ def test_delete_link_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteLinkRequest() + +def test_delete_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteLinkRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_link), + '__call__') as call: + client.delete_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteLinkRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteLinkRequest() + @pytest.mark.asyncio async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): client = ConfigServiceV2AsyncClient( @@ -5095,7 +6165,8 @@ async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=l # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() + request = logging_config.DeleteLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -5280,7 +6351,8 @@ def test_list_links(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() + request = logging_config.ListLinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksPager) @@ -5304,6 +6376,57 @@ def test_list_links_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListLinksRequest() + +def test_list_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListLinksRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + client.list_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListLinksRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_links_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_links() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListLinksRequest() + @pytest.mark.asyncio async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): client = ConfigServiceV2AsyncClient( @@ -5328,7 +6451,8 @@ async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() + request = logging_config.ListLinksRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLinksAsyncPager) @@ -5706,7 +6830,8 @@ def test_get_link(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() + request = logging_config.GetLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) @@ -5732,6 +6857,57 @@ def test_get_link_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetLinkRequest() + +def test_get_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetLinkRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_link), + '__call__') as call: + client.get_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetLinkRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_link_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( + name='name_value', + description='description_value', + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + response = await client.get_link() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetLinkRequest() + @pytest.mark.asyncio async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): client = ConfigServiceV2AsyncClient( @@ -5758,7 +6934,8 @@ async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logg # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() + request = logging_config.GetLinkRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Link) @@ -5944,7 +7121,8 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + request = logging_config.ListExclusionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsPager) @@ -5968,6 +7146,57 @@ def test_list_exclusions_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListExclusionsRequest() + +def test_list_exclusions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListExclusionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + client.list_exclusions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_exclusions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_exclusions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + @pytest.mark.asyncio async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): client = ConfigServiceV2AsyncClient( @@ -5992,7 +7221,8 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + request = logging_config.ListExclusionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListExclusionsAsyncPager) @@ -6371,7 +7601,8 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + request = logging_config.GetExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -6398,6 +7629,58 @@ def test_get_exclusion_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetExclusionRequest() + +def test_get_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetExclusionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + client.get_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_exclusion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.get_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + @pytest.mark.asyncio async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -6425,7 +7708,8 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + request = logging_config.GetExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -6615,7 +7899,8 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + request = logging_config.CreateExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -6642,6 +7927,58 @@ def test_create_exclusion_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest() + +def test_create_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateExclusionRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + client.create_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_create_exclusion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.create_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + @pytest.mark.asyncio async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -6669,7 +8006,8 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + request = logging_config.CreateExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -6869,7 +8207,8 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request = logging_config.UpdateExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -6896,6 +8235,58 @@ def test_update_exclusion_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateExclusionRequest() + +def test_update_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateExclusionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + client.update_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_update_exclusion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client.update_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + @pytest.mark.asyncio async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -6923,7 +8314,8 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request = logging_config.UpdateExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.LogExclusion) @@ -7128,7 +8520,8 @@ def test_delete_exclusion(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + request = logging_config.DeleteExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -7151,6 +8544,53 @@ def test_delete_exclusion_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteExclusionRequest() + +def test_delete_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteExclusionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + client.delete_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_exclusion_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_exclusion() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + @pytest.mark.asyncio async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -7173,7 +8613,8 @@ async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_t # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + request = logging_config.DeleteExclusionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -7359,7 +8800,8 @@ def test_get_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request = logging_config.GetCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) @@ -7386,6 +8828,58 @@ def test_get_cmek_settings_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetCmekSettingsRequest() + +def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetCmekSettingsRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + client.get_cmek_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_cmek_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + response = await client.get_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + @pytest.mark.asyncio async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -7413,7 +8907,8 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request = logging_config.GetCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) @@ -7521,7 +9016,8 @@ def test_update_cmek_settings(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) @@ -7548,6 +9044,58 @@ def test_update_cmek_settings_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateCmekSettingsRequest() + +def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateCmekSettingsRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + client.update_cmek_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_update_cmek_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + response = await client.update_cmek_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + @pytest.mark.asyncio async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -7575,7 +9123,8 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.CmekSettings) @@ -7684,7 +9233,8 @@ def test_get_settings(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request = logging_config.GetSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) @@ -7712,6 +9262,59 @@ def test_get_settings_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSettingsRequest() + +def test_get_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetSettingsRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + client.get_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + response = await client.get_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + @pytest.mark.asyncio async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -7740,7 +9343,8 @@ async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type= # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request = logging_config.GetSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) @@ -7932,7 +9536,8 @@ def test_update_settings(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request = logging_config.UpdateSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) @@ -7960,6 +9565,59 @@ def test_update_settings_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSettingsRequest() + +def test_update_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateSettingsRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + client.update_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_update_settings_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + response = await client.update_settings() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + @pytest.mark.asyncio async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -7988,7 +9646,8 @@ async def test_update_settings_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request = logging_config.UpdateSettingsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_config.Settings) @@ -8184,7 +9843,8 @@ def test_copy_log_entries(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -8207,6 +9867,59 @@ def test_copy_log_entries_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CopyLogEntriesRequest() + +def test_copy_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CopyLogEntriesRequest( + name='name_value', + filter='filter_value', + destination='destination_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + client.copy_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest( + name='name_value', + filter='filter_value', + destination='destination_value', + ) + +@pytest.mark.asyncio +async def test_copy_log_entries_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.copy_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + @pytest.mark.asyncio async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): client = ConfigServiceV2AsyncClient( @@ -8231,7 +9944,8 @@ async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_t # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index d4c9a1168651..b6c611d634e2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -772,7 +772,8 @@ def test_delete_log(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + request = logging.DeleteLogRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -795,6 +796,53 @@ def test_delete_log_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.DeleteLogRequest() + +def test_delete_log_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.DeleteLogRequest( + log_name='log_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + client.delete_log(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest( + log_name='log_name_value', + ) + +@pytest.mark.asyncio +async def test_delete_log_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + @pytest.mark.asyncio async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): client = LoggingServiceV2AsyncClient( @@ -817,7 +865,8 @@ async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=lo # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + request = logging.DeleteLogRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -999,7 +1048,8 @@ def test_write_log_entries(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + request = logging.WriteLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging.WriteLogEntriesResponse) @@ -1022,6 +1072,54 @@ def test_write_log_entries_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.WriteLogEntriesRequest() + +def test_write_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.WriteLogEntriesRequest( + log_name='log_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + client.write_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest( + log_name='log_name_value', + ) + +@pytest.mark.asyncio +async def test_write_log_entries_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( + )) + response = await client.write_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + @pytest.mark.asyncio async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): client = LoggingServiceV2AsyncClient( @@ -1045,7 +1143,8 @@ async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + request = logging.WriteLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging.WriteLogEntriesResponse) @@ -1195,7 +1294,8 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + request = logging.ListLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesPager) @@ -1219,6 +1319,59 @@ def test_list_log_entries_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogEntriesRequest() + +def test_list_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListLogEntriesRequest( + filter='filter_value', + order_by='order_by_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + client.list_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest( + filter='filter_value', + order_by='order_by_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_log_entries_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_log_entries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + @pytest.mark.asyncio async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): client = LoggingServiceV2AsyncClient( @@ -1243,7 +1396,8 @@ async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_t # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + request = logging.ListLogEntriesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogEntriesAsyncPager) @@ -1571,7 +1725,8 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + request = logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) @@ -1595,6 +1750,55 @@ def test_list_monitored_resource_descriptors_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + +def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListMonitoredResourceDescriptorsRequest( + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + client.list_monitored_resource_descriptors(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest( + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_monitored_resource_descriptors() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): client = LoggingServiceV2AsyncClient( @@ -1619,7 +1823,8 @@ async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + request = logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) @@ -1846,7 +2051,8 @@ def test_list_logs(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + request = logging.ListLogsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsPager) @@ -1871,6 +2077,58 @@ def test_list_logs_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogsRequest() + +def test_list_logs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListLogsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + client.list_logs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_logs_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + )) + response = await client.list_logs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + @pytest.mark.asyncio async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): client = LoggingServiceV2AsyncClient( @@ -1896,7 +2154,8 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + request = logging.ListLogsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogsAsyncPager) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 70a02b72fdb8..d1543607c906 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -772,7 +772,8 @@ def test_list_log_metrics(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + request = logging_metrics.ListLogMetricsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsPager) @@ -796,6 +797,57 @@ def test_list_log_metrics_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.ListLogMetricsRequest() + +def test_list_log_metrics_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.ListLogMetricsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + client.list_log_metrics(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_log_metrics_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_log_metrics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + @pytest.mark.asyncio async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): client = MetricsServiceV2AsyncClient( @@ -820,7 +872,8 @@ async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_t # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + request = logging_metrics.ListLogMetricsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListLogMetricsAsyncPager) @@ -1202,7 +1255,8 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + request = logging_metrics.GetLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1232,6 +1286,61 @@ def test_get_log_metric_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.GetLogMetricRequest() + +def test_get_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.GetLogMetricRequest( + metric_name='metric_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + client.get_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest( + metric_name='metric_name_value', + ) + +@pytest.mark.asyncio +async def test_get_log_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.get_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + @pytest.mark.asyncio async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1262,7 +1371,8 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + request = logging_metrics.GetLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1458,7 +1568,8 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + request = logging_metrics.CreateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1488,6 +1599,61 @@ def test_create_log_metric_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.CreateLogMetricRequest() + +def test_create_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.CreateLogMetricRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + client.create_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest( + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_create_log_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.create_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + @pytest.mark.asyncio async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1518,7 +1684,8 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + request = logging_metrics.CreateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1724,7 +1891,8 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + request = logging_metrics.UpdateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1754,6 +1922,61 @@ def test_update_log_metric_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.UpdateLogMetricRequest() + +def test_update_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.UpdateLogMetricRequest( + metric_name='metric_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + client.update_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest( + metric_name='metric_name_value', + ) + +@pytest.mark.asyncio +async def test_update_log_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client.update_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + @pytest.mark.asyncio async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1784,7 +2007,8 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + request = logging_metrics.UpdateLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, logging_metrics.LogMetric) @@ -1982,7 +2206,8 @@ def test_delete_log_metric(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + request = logging_metrics.DeleteLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2005,6 +2230,53 @@ def test_delete_log_metric_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.DeleteLogMetricRequest() + +def test_delete_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.DeleteLogMetricRequest( + metric_name='metric_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + client.delete_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest( + metric_name='metric_name_value', + ) + +@pytest.mark.asyncio +async def test_delete_log_metric_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log_metric() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + @pytest.mark.asyncio async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -2027,7 +2299,8 @@ async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + request = logging_metrics.DeleteLogMetricRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 10552af1470e..edb68db57562 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -797,7 +797,8 @@ def test_list_instances(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ListInstancesRequest() + request = cloud_redis.ListInstancesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) @@ -822,6 +823,58 @@ def test_list_instances_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ListInstancesRequest() + +def test_list_instances_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.ListInstancesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ListInstancesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +@pytest.mark.asyncio +async def test_list_instances_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_instances() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ListInstancesRequest() + @pytest.mark.asyncio async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): client = CloudRedisAsyncClient( @@ -847,7 +900,8 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ListInstancesRequest() + request = cloud_redis.ListInstancesRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesAsyncPager) @@ -1250,7 +1304,8 @@ def test_get_instance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceRequest() + request = cloud_redis.GetInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) @@ -1300,6 +1355,81 @@ def test_get_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceRequest() + +def test_get_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.GetInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + )) + response = await client.get_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceRequest() + @pytest.mark.asyncio async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): client = CloudRedisAsyncClient( @@ -1350,7 +1480,8 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceRequest() + request = cloud_redis.GetInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.Instance) @@ -1560,7 +1691,8 @@ def test_get_instance_auth_string(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + request = cloud_redis.GetInstanceAuthStringRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) @@ -1584,6 +1716,55 @@ def test_get_instance_auth_string_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + +def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.GetInstanceAuthStringRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + client.get_instance_auth_string(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_auth_string_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + )) + response = await client.get_instance_auth_string() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + @pytest.mark.asyncio async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisAsyncClient( @@ -1608,7 +1789,8 @@ async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', r # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + request = cloud_redis.GetInstanceAuthStringRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, cloud_redis.InstanceAuthString) @@ -1790,7 +1972,8 @@ def test_create_instance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.CreateInstanceRequest() + request = cloud_redis.CreateInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -1813,6 +1996,57 @@ def test_create_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.CreateInstanceRequest() + +def test_create_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.CreateInstanceRequest( + parent='parent_value', + instance_id='instance_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.CreateInstanceRequest( + parent='parent_value', + instance_id='instance_id_value', + ) + +@pytest.mark.asyncio +async def test_create_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.CreateInstanceRequest() + @pytest.mark.asyncio async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisAsyncClient( @@ -1837,7 +2071,8 @@ async def test_create_instance_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.CreateInstanceRequest() + request = cloud_redis.CreateInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2040,7 +2275,8 @@ def test_update_instance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest() + request = cloud_redis.UpdateInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2063,6 +2299,53 @@ def test_update_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpdateInstanceRequest() + +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.UpdateInstanceRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpdateInstanceRequest( + ) + +@pytest.mark.asyncio +async def test_update_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpdateInstanceRequest() + @pytest.mark.asyncio async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): client = CloudRedisAsyncClient( @@ -2087,7 +2370,8 @@ async def test_update_instance_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest() + request = cloud_redis.UpdateInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2280,7 +2564,8 @@ def test_upgrade_instance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpgradeInstanceRequest() + request = cloud_redis.UpgradeInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2303,6 +2588,57 @@ def test_upgrade_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpgradeInstanceRequest() + +def test_upgrade_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.UpgradeInstanceRequest( + name='name_value', + redis_version='redis_version_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + client.upgrade_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpgradeInstanceRequest( + name='name_value', + redis_version='redis_version_value', + ) + +@pytest.mark.asyncio +async def test_upgrade_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.upgrade_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpgradeInstanceRequest() + @pytest.mark.asyncio async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): client = CloudRedisAsyncClient( @@ -2327,7 +2663,8 @@ async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_t # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpgradeInstanceRequest() + request = cloud_redis.UpgradeInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2520,7 +2857,8 @@ def test_import_instance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ImportInstanceRequest() + request = cloud_redis.ImportInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2543,6 +2881,55 @@ def test_import_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ImportInstanceRequest() + +def test_import_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.ImportInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + client.import_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ImportInstanceRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_import_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.import_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ImportInstanceRequest() + @pytest.mark.asyncio async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): client = CloudRedisAsyncClient( @@ -2567,7 +2954,8 @@ async def test_import_instance_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ImportInstanceRequest() + request = cloud_redis.ImportInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2760,7 +3148,8 @@ def test_export_instance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ExportInstanceRequest() + request = cloud_redis.ExportInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -2783,6 +3172,55 @@ def test_export_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ExportInstanceRequest() + +def test_export_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.ExportInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + client.export_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ExportInstanceRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_export_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.export_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ExportInstanceRequest() + @pytest.mark.asyncio async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): client = CloudRedisAsyncClient( @@ -2807,7 +3245,8 @@ async def test_export_instance_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ExportInstanceRequest() + request = cloud_redis.ExportInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3000,7 +3439,8 @@ def test_failover_instance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.FailoverInstanceRequest() + request = cloud_redis.FailoverInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3023,6 +3463,55 @@ def test_failover_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.FailoverInstanceRequest() + +def test_failover_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.FailoverInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + client.failover_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.FailoverInstanceRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_failover_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.failover_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.FailoverInstanceRequest() + @pytest.mark.asyncio async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): client = CloudRedisAsyncClient( @@ -3047,7 +3536,8 @@ async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_ # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.FailoverInstanceRequest() + request = cloud_redis.FailoverInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3240,7 +3730,8 @@ def test_delete_instance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.DeleteInstanceRequest() + request = cloud_redis.DeleteInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3263,6 +3754,55 @@ def test_delete_instance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.DeleteInstanceRequest() + +def test_delete_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.DeleteInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.DeleteInstanceRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_instance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_instance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.DeleteInstanceRequest() + @pytest.mark.asyncio async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): client = CloudRedisAsyncClient( @@ -3287,7 +3827,8 @@ async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_ty # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.DeleteInstanceRequest() + request = cloud_redis.DeleteInstanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3470,7 +4011,8 @@ def test_reschedule_maintenance(request_type, transport: str = 'grpc'): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + request = cloud_redis.RescheduleMaintenanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) @@ -3493,6 +4035,55 @@ def test_reschedule_maintenance_empty_call(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + +def test_reschedule_maintenance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.RescheduleMaintenanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + client.reschedule_maintenance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest( + name='name_value', + ) + +@pytest.mark.asyncio +async def test_reschedule_maintenance_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.reschedule_maintenance() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + @pytest.mark.asyncio async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): client = CloudRedisAsyncClient( @@ -3517,7 +4108,8 @@ async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', req # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + request = cloud_redis.RescheduleMaintenanceRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, future.Future) diff --git a/packages/gapic-generator/tests/system/test_unary.py b/packages/gapic-generator/tests/system/test_unary.py index 076771ed3035..52c03df2206d 100644 --- a/packages/gapic-generator/tests/system/test_unary.py +++ b/packages/gapic-generator/tests/system/test_unary.py @@ -14,25 +14,65 @@ import os import pytest +import re from google.api_core import exceptions from google.rpc import code_pb2 from google import showcase +UUID4_RE = r"[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}" + def test_unary_with_request_object(echo): + response = echo.echo(showcase.EchoRequest( + content='The hail in Wales falls mainly on the snails.', + request_id='some_value', + other_request_id='', + )) + assert response.content == 'The hail in Wales falls mainly on the snails.' + assert response.request_id == 'some_value' + assert response.other_request_id == '' + + # Repeat the same test but this time without `request_id`` set + # The `request_id` field should be automatically populated with + # a UUID4 value if it is not set. + # See https://google.aip.dev/client-libraries/4235 response = echo.echo(showcase.EchoRequest( content='The hail in Wales falls mainly on the snails.', )) assert response.content == 'The hail in Wales falls mainly on the snails.' + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(UUID4_RE, response.request_id) + assert len(response.request_id) == 36 + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(UUID4_RE, response.other_request_id) + assert len(response.other_request_id) == 36 def test_unary_with_dict(echo): + response = echo.echo({ + 'content': 'The hail in Wales falls mainly on the snails.', + 'request_id': 'some_value', + 'other_request_id': '', + }) + assert response.content == 'The hail in Wales falls mainly on the snails.' + assert response.request_id == 'some_value' + assert response.other_request_id == '' + + # Repeat the same test but this time without `request_id`` set + # The `request_id` field should be automatically populated with + # a UUID4 value if it is not set. + # See https://google.aip.dev/client-libraries/4235 response = echo.echo({ 'content': 'The hail in Wales falls mainly on the snails.', }) assert response.content == 'The hail in Wales falls mainly on the snails.' + assert re.match(UUID4_RE, response.request_id) + assert len(response.request_id) == 36 + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(UUID4_RE, response.other_request_id) + assert len(response.other_request_id) == 36 def test_unary_error(echo): From 0d2a806ae12406c6ed09ceae6fc07ceb53ac0097 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 10:47:34 +0000 Subject: [PATCH 1128/1339] chore(main): release 1.16.0 (#1980) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f80bad40a72c..f0d19dfab2fb 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.16.0](https://github.com/googleapis/gapic-generator-python/compare/v1.15.0...v1.16.0) (2024-03-22) + + +### Features + +* Add support for reading MethodSettings from service configuration YAML ([#1975](https://github.com/googleapis/gapic-generator-python/issues/1975)) ([24a23a1](https://github.com/googleapis/gapic-generator-python/commit/24a23a1ab885246e447c0010b2e5602209dfbb8d)) +* Automatically populate uuid4 fields ([#1985](https://github.com/googleapis/gapic-generator-python/issues/1985)) ([eb57e4f](https://github.com/googleapis/gapic-generator-python/commit/eb57e4f2e6b339f89aa3b7d55e4b4c0dfdfd002e)) + + +### Bug Fixes + +* Fix dependency `google-apps-card` ([#1971](https://github.com/googleapis/gapic-generator-python/issues/1971)) ([9a49cb0](https://github.com/googleapis/gapic-generator-python/commit/9a49cb07de8bd54d8601b742367887bdde854643)) + ## [1.15.0](https://github.com/googleapis/gapic-generator-python/compare/v1.14.5...v1.15.0) (2024-03-15) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a1e65f71b71c..e5190356f420 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.15.0" +version = "1.16.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From eeb08fbfabf24f69d347a0f5f1b399e65649b212 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 22 Mar 2024 11:46:58 -0400 Subject: [PATCH 1129/1339] fix: cater for empty async call (#1997) --- .../%name_%version/%sub/services/%service/async_client.py.j2 | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index de18fb5c2a64..ffcb136fa616 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -325,10 +325,8 @@ class {{ service.async_client_name }}: # so it must be constructed via keyword expansion. if isinstance(request, dict): request = {{ method.input.ident }}(**request) - {% if method.flattened_fields %}{# Cross-package req and flattened fields #} elif not request: - request = {{ method.input.ident }}({% if method.input.ident.package != method.ident.package %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}, {% endfor %}{% endif %}) - {% endif %}{# Cross-package req and flattened fields #} + request = {{ method.input.ident }}({% if method.flattened_fields %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% endif %}) {% else %} request = {{ method.input.ident }}(request) {% endif %} {# different request package #} From 2ac4bf3f0ecd76bd8cdbefb3deb80bb56aa10f91 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 16:06:20 +0000 Subject: [PATCH 1130/1339] chore(main): release 1.16.1 (#1998) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f0d19dfab2fb..17934635e2f8 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.16.1](https://github.com/googleapis/gapic-generator-python/compare/v1.16.0...v1.16.1) (2024-03-22) + + +### Bug Fixes + +* Cater for empty async call ([#1997](https://github.com/googleapis/gapic-generator-python/issues/1997)) ([801eedb](https://github.com/googleapis/gapic-generator-python/commit/801eedbb8986516ab354da6bccd4289a9c6d5362)) + ## [1.16.0](https://github.com/googleapis/gapic-generator-python/compare/v1.15.0...v1.16.0) (2024-03-22) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e5190356f420..947348fba2e8 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.16.0" +version = "1.16.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From f50f93eb0770b2c50f9815f8824018b8ed9f89ae Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 23 Mar 2024 17:19:14 +0100 Subject: [PATCH 1131/1339] chore(deps): update all dependencies (#1967) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.in | 2 +- packages/gapic-generator/requirements.txt | 150 +++++++++++----------- 2 files changed, 77 insertions(+), 75 deletions(-) diff --git a/packages/gapic-generator/requirements.in b/packages/gapic-generator/requirements.in index db955dc7ec22..1c391632e542 100644 --- a/packages/gapic-generator/requirements.in +++ b/packages/gapic-generator/requirements.in @@ -3,7 +3,7 @@ google-api-core googleapis-common-protos jinja2 MarkupSafe -protobuf +protobuf<5 # See https://github.com/googleapis/gapic-generator-python/issues/1996 pypandoc PyYAML grpc-google-iam-v1 diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 2661e6d21eaa..1c9dea2d241c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -112,17 +112,17 @@ exceptiongroup==1.2.0 \ --hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14 \ --hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68 # via pytest -google-api-core==2.17.1 \ - --hash=sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e \ - --hash=sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95 +google-api-core==2.18.0 \ + --hash=sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6 \ + --hash=sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9 # via -r requirements.in -google-auth==2.28.1 \ - --hash=sha256:25141e2d7a14bfcba945f5e9827f98092716e99482562f15306e5b026e21aa72 \ - --hash=sha256:34fc3046c257cedcf1622fc4b31fc2be7923d9b4d44973d481125ecc50d83885 +google-auth==2.29.0 \ + --hash=sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360 \ + --hash=sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415 # via google-api-core -googleapis-common-protos[grpc]==1.62.0 \ - --hash=sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07 \ - --hash=sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277 +googleapis-common-protos[grpc]==1.63.0 \ + --hash=sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e \ + --hash=sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632 # via # -r requirements.in # google-api-core @@ -131,61 +131,61 @@ grpc-google-iam-v1==0.13.0 \ --hash=sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89 \ --hash=sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e # via -r requirements.in -grpcio==1.62.0 \ - --hash=sha256:0b9179478b09ee22f4a36b40ca87ad43376acdccc816ce7c2193a9061bf35701 \ - --hash=sha256:0d3dee701e48ee76b7d6fbbba18ba8bc142e5b231ef7d3d97065204702224e0e \ - --hash=sha256:0d7ae7fc7dbbf2d78d6323641ded767d9ec6d121aaf931ec4a5c50797b886532 \ - --hash=sha256:0e97f37a3b7c89f9125b92d22e9c8323f4e76e7993ba7049b9f4ccbe8bae958a \ - --hash=sha256:136ffd79791b1eddda8d827b607a6285474ff8a1a5735c4947b58c481e5e4271 \ - --hash=sha256:1bc8449084fe395575ed24809752e1dc4592bb70900a03ca42bf236ed5bf008f \ - --hash=sha256:1eda79574aec8ec4d00768dcb07daba60ed08ef32583b62b90bbf274b3c279f7 \ - --hash=sha256:29cb592c4ce64a023712875368bcae13938c7f03e99f080407e20ffe0a9aa33b \ - --hash=sha256:2c1488b31a521fbba50ae86423f5306668d6f3a46d124f7819c603979fc538c4 \ - --hash=sha256:2e84bfb2a734e4a234b116be208d6f0214e68dcf7804306f97962f93c22a1839 \ - --hash=sha256:2f3d9a4d0abb57e5f49ed5039d3ed375826c2635751ab89dcc25932ff683bbb6 \ - --hash=sha256:36df33080cd7897623feff57831eb83c98b84640b016ce443305977fac7566fb \ - --hash=sha256:38f69de9c28c1e7a8fd24e4af4264726637b72f27c2099eaea6e513e7142b47e \ - --hash=sha256:39cd45bd82a2e510e591ca2ddbe22352e8413378852ae814549c162cf3992a93 \ - --hash=sha256:3fa15850a6aba230eed06b236287c50d65a98f05054a0f01ccedf8e1cc89d57f \ - --hash=sha256:4cd356211579043fce9f52acc861e519316fff93980a212c8109cca8f47366b6 \ - --hash=sha256:56ca7ba0b51ed0de1646f1735154143dcbdf9ec2dbe8cc6645def299bb527ca1 \ - --hash=sha256:5e709f7c8028ce0443bddc290fb9c967c1e0e9159ef7a030e8c21cac1feabd35 \ - --hash=sha256:614c3ed234208e76991992342bab725f379cc81c7dd5035ee1de2f7e3f7a9842 \ - --hash=sha256:62aa1659d8b6aad7329ede5d5b077e3d71bf488d85795db517118c390358d5f6 \ - --hash=sha256:62ccb92f594d3d9fcd00064b149a0187c246b11e46ff1b7935191f169227f04c \ - --hash=sha256:662d3df5314ecde3184cf87ddd2c3a66095b3acbb2d57a8cada571747af03873 \ - --hash=sha256:748496af9238ac78dcd98cce65421f1adce28c3979393e3609683fcd7f3880d7 \ - --hash=sha256:77d48e5b1f8f4204889f1acf30bb57c30378e17c8d20df5acbe8029e985f735c \ - --hash=sha256:7a195531828b46ea9c4623c47e1dc45650fc7206f8a71825898dd4c9004b0928 \ - --hash=sha256:7e1f51e2a460b7394670fdb615e26d31d3260015154ea4f1501a45047abe06c9 \ - --hash=sha256:7eea57444a354ee217fda23f4b479a4cdfea35fb918ca0d8a0e73c271e52c09c \ - --hash=sha256:7f9d6c3223914abb51ac564dc9c3782d23ca445d2864321b9059d62d47144021 \ - --hash=sha256:81531632f93fece32b2762247c4c169021177e58e725494f9a746ca62c83acaa \ - --hash=sha256:81d444e5e182be4c7856cd33a610154fe9ea1726bd071d07e7ba13fafd202e38 \ - --hash=sha256:821a44bd63d0f04e33cf4ddf33c14cae176346486b0df08b41a6132b976de5fc \ - --hash=sha256:88f41f33da3840b4a9bbec68079096d4caf629e2c6ed3a72112159d570d98ebe \ - --hash=sha256:8aab8f90b2a41208c0a071ec39a6e5dbba16fd827455aaa070fec241624ccef8 \ - --hash=sha256:921148f57c2e4b076af59a815467d399b7447f6e0ee10ef6d2601eb1e9c7f402 \ - --hash=sha256:92cdb616be44c8ac23a57cce0243af0137a10aa82234f23cd46e69e115071388 \ - --hash=sha256:95370c71b8c9062f9ea033a0867c4c73d6f0ff35113ebd2618171ec1f1e903e0 \ - --hash=sha256:98d8f4eb91f1ce0735bf0b67c3b2a4fea68b52b2fd13dc4318583181f9219b4b \ - --hash=sha256:a33f2bfd8a58a02aab93f94f6c61279be0f48f99fcca20ebaee67576cd57307b \ - --hash=sha256:ab140a3542bbcea37162bdfc12ce0d47a3cda3f2d91b752a124cc9fe6776a9e2 \ - --hash=sha256:b3d3d755cfa331d6090e13aac276d4a3fb828bf935449dc16c3d554bf366136b \ - --hash=sha256:b71c65427bf0ec6a8b48c68c17356cb9fbfc96b1130d20a07cb462f4e4dcdcd5 \ - --hash=sha256:b7a6be562dd18e5d5bec146ae9537f20ae1253beb971c0164f1e8a2f5a27e829 \ - --hash=sha256:bcff647e7fe25495e7719f779cc219bbb90b9e79fbd1ce5bda6aae2567f469f2 \ - --hash=sha256:c912688acc05e4ff012c8891803659d6a8a8b5106f0f66e0aed3fb7e77898fa6 \ - --hash=sha256:ce1aafdf8d3f58cb67664f42a617af0e34555fe955450d42c19e4a6ad41c84bd \ - --hash=sha256:d6a56ba703be6b6267bf19423d888600c3f574ac7c2cc5e6220af90662a4d6b0 \ - --hash=sha256:e803e9b58d8f9b4ff0ea991611a8d51b31c68d2e24572cd1fe85e99e8cc1b4f8 \ - --hash=sha256:eef1d16ac26c5325e7d39f5452ea98d6988c700c427c52cbc7ce3201e6d93334 \ - --hash=sha256:f359d635ee9428f0294bea062bb60c478a8ddc44b0b6f8e1f42997e5dc12e2ee \ - --hash=sha256:f4c04fe33039b35b97c02d2901a164bbbb2f21fb9c4e2a45a959f0b044c3512c \ - --hash=sha256:f897b16190b46bc4d4aaf0a32a4b819d559a37a756d7c6b571e9562c360eed72 \ - --hash=sha256:fbe0c20ce9a1cff75cfb828b21f08d0a1ca527b67f2443174af6626798a754a4 \ - --hash=sha256:fc2836cb829895ee190813446dce63df67e6ed7b9bf76060262c55fcd097d270 \ - --hash=sha256:fcc98cff4084467839d0a20d16abc2a76005f3d1b38062464d088c07f500d170 +grpcio==1.62.1 \ + --hash=sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4 \ + --hash=sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505 \ + --hash=sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e \ + --hash=sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49 \ + --hash=sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c \ + --hash=sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362 \ + --hash=sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f \ + --hash=sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b \ + --hash=sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31 \ + --hash=sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41 \ + --hash=sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de \ + --hash=sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f \ + --hash=sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db \ + --hash=sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea \ + --hash=sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660 \ + --hash=sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f \ + --hash=sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243 \ + --hash=sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc \ + --hash=sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd \ + --hash=sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d \ + --hash=sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947 \ + --hash=sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a \ + --hash=sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483 \ + --hash=sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3 \ + --hash=sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2 \ + --hash=sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f \ + --hash=sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22 \ + --hash=sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66 \ + --hash=sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec \ + --hash=sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9 \ + --hash=sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407 \ + --hash=sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9 \ + --hash=sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585 \ + --hash=sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7 \ + --hash=sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369 \ + --hash=sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1 \ + --hash=sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9 \ + --hash=sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4 \ + --hash=sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b \ + --hash=sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d \ + --hash=sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1 \ + --hash=sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70 \ + --hash=sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332 \ + --hash=sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06 \ + --hash=sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f \ + --hash=sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7 \ + --hash=sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d \ + --hash=sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037 \ + --hash=sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd \ + --hash=sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a \ + --hash=sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b \ + --hash=sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de \ + --hash=sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698 \ + --hash=sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -300,9 +300,9 @@ mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via typing-inspect -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 # via pytest pluggy==1.4.0 \ --hash=sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981 \ @@ -311,7 +311,9 @@ pluggy==1.4.0 \ proto-plus==1.23.0 \ --hash=sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2 \ --hash=sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c - # via -r requirements.in + # via + # -r requirements.in + # google-api-core protobuf==4.25.3 \ --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ @@ -344,13 +346,13 @@ pypandoc==1.13 \ --hash=sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e \ --hash=sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681 # via -r requirements.in -pytest==8.0.2 \ - --hash=sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd \ - --hash=sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096 +pytest==8.1.1 \ + --hash=sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7 \ + --hash=sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044 # via pytest-asyncio -pytest-asyncio==0.23.5 \ - --hash=sha256:3a048872a9c4ba14c3e90cc1aa20cbc2def7d01c7c8db3777ec281ba9c057675 \ - --hash=sha256:4e7093259ba018d58ede7d5315131d21923a60f8a6e9ee266ce1589685c89eac +pytest-asyncio==0.23.6 \ + --hash=sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a \ + --hash=sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f # via -r requirements.in pyyaml==6.0.1 \ --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \ From 7de4b96a1409f8b7eab072bee1486d71529ba790 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 7 Apr 2024 23:54:16 -0400 Subject: [PATCH 1132/1339] docs: fix docs session (#2003) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.github/auto-label.yaml | 5 + .../gapic-generator/.github/blunderbuss.yml | 17 +++ .../gapic-generator/.kokoro/requirements.in | 3 +- .../gapic-generator/.kokoro/requirements.txt | 114 ++++++++---------- 5 files changed, 78 insertions(+), 65 deletions(-) create mode 100644 packages/gapic-generator/.github/blunderbuss.yml diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 5d9542b1cb21..ee2c6d1f3b94 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f -# created: 2024-03-15T16:26:15.743347415Z + digest: sha256:8244c1253becbaa533f48724a6348e4b92a10df4b4dfb66d87e615e633059bdf +# created: 2024-04-07T11:43:40.730565127Z diff --git a/packages/gapic-generator/.github/auto-label.yaml b/packages/gapic-generator/.github/auto-label.yaml index b2016d119b40..8b37ee89711f 100644 --- a/packages/gapic-generator/.github/auto-label.yaml +++ b/packages/gapic-generator/.github/auto-label.yaml @@ -13,3 +13,8 @@ # limitations under the License. requestsize: enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/packages/gapic-generator/.github/blunderbuss.yml b/packages/gapic-generator/.github/blunderbuss.yml new file mode 100644 index 000000000000..1618464d1073 --- /dev/null +++ b/packages/gapic-generator/.github/blunderbuss.yml @@ -0,0 +1,17 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - googleapis/actools-python + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/actools-python + +assign_prs: + - googleapis/actools-python diff --git a/packages/gapic-generator/.kokoro/requirements.in b/packages/gapic-generator/.kokoro/requirements.in index ec867d9fd65a..fff4d9ce0d0a 100644 --- a/packages/gapic-generator/.kokoro/requirements.in +++ b/packages/gapic-generator/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index bda8e38c4f31..dd61f5f32018 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in From a60a211c2279cbc3dc6cbf50f49119fbf7682cbc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 11 Apr 2024 10:58:04 -0700 Subject: [PATCH 1133/1339] feat: allow Callables for transport and channel init (#1699) --- .../%sub/services/%service/async_client.py.j2 | 16 ++++++------ .../%sub/services/%service/client.py.j2 | 21 ++++++++++------ .../services/%service/transports/grpc.py.j2 | 25 +++++++++++-------- .../%service/transports/grpc_asyncio.py.j2 | 24 ++++++++++-------- .../services/asset_service/async_client.py | 12 +++++---- .../asset_v1/services/asset_service/client.py | 21 ++++++++++------ .../services/asset_service/transports/grpc.py | 25 +++++++++++-------- .../asset_service/transports/grpc_asyncio.py | 24 ++++++++++-------- .../services/iam_credentials/async_client.py | 12 +++++---- .../services/iam_credentials/client.py | 21 ++++++++++------ .../iam_credentials/transports/grpc.py | 25 +++++++++++-------- .../transports/grpc_asyncio.py | 24 ++++++++++-------- .../services/eventarc/async_client.py | 12 +++++---- .../eventarc_v1/services/eventarc/client.py | 21 ++++++++++------ .../services/eventarc/transports/grpc.py | 25 +++++++++++-------- .../eventarc/transports/grpc_asyncio.py | 24 ++++++++++-------- .../config_service_v2/async_client.py | 12 +++++---- .../services/config_service_v2/client.py | 21 ++++++++++------ .../config_service_v2/transports/grpc.py | 25 +++++++++++-------- .../transports/grpc_asyncio.py | 24 ++++++++++-------- .../logging_service_v2/async_client.py | 12 +++++---- .../services/logging_service_v2/client.py | 21 ++++++++++------ .../logging_service_v2/transports/grpc.py | 25 +++++++++++-------- .../transports/grpc_asyncio.py | 24 ++++++++++-------- .../metrics_service_v2/async_client.py | 12 +++++---- .../services/metrics_service_v2/client.py | 21 ++++++++++------ .../metrics_service_v2/transports/grpc.py | 25 +++++++++++-------- .../transports/grpc_asyncio.py | 24 ++++++++++-------- .../services/cloud_redis/async_client.py | 12 +++++---- .../redis_v1/services/cloud_redis/client.py | 21 ++++++++++------ .../services/cloud_redis/transports/grpc.py | 25 +++++++++++-------- .../cloud_redis/transports/grpc_asyncio.py | 24 ++++++++++-------- 32 files changed, 402 insertions(+), 258 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index ffcb136fa616..2c4ade4636c2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -6,7 +6,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union {% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} import uuid {% endif %} @@ -173,7 +173,7 @@ class {{ service.async_client_name }}: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, {{ service.name }}Transport] = "grpc_asyncio", + transport: Optional[Union[str, {{ service.name }}Transport, Callable[..., {{ service.name }}Transport]]] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -185,9 +185,11 @@ class {{ service.async_client_name }}: credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.{{ service.name }}Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,{{ service.name }}Transport,Callable[..., {{ service.name }}Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the {{ service.name }}Transport constructor. + If set to None, a transport is chosen automatically. {% if 'rest' in opts.transport and not opts.rest_numeric_enums %} {# TODO (gapic-generator-python/issues/1918): Remove the beta preview comment. #} NOTE: "rest" transport functionality is currently in a @@ -196,8 +198,8 @@ class {{ service.async_client_name }}: {% endif %} client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. - - 1. The ``api_endpoint`` property can be used to override the + + 1. The ``api_endpoint`` property can be used to override the default endpoint provided by the client when ``transport`` is not explicitly provided. Only if this property is not set and ``transport`` was not explicitly provided, the endpoint is diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index ddeca3862704..e047b0d55a67 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -9,7 +9,7 @@ import functools {% endif %} import os import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast {% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} import uuid {% endif %} @@ -435,7 +435,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, {{ service.name }}Transport]] = None, + transport: Optional[Union[str, {{ service.name }}Transport, Callable[..., {{ service.name }}Transport]]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -447,9 +447,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, {{ service.name }}Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,{{ service.name }}Transport,Callable[..., {{ service.name }}Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the {{ service.name }}Transport constructor. + If set to None, a transport is chosen automatically. {% if 'rest' in opts.transport and not opts.rest_numeric_enums %} {# TODO (gapic-generator-python/issues/1918): Remove the beta preview comment. #} NOTE: "rest" transport functionality is currently in a @@ -543,8 +545,13 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[Type[{{ service.name }}Transport], Callable[..., {{ service.name }}Transport]] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., {{ service.name }}Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 64958b9e4a6d..268ea2ea5f23 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -62,7 +62,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -82,14 +82,17 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -99,11 +102,11 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -132,7 +135,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -173,7 +176,9 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index f88b6f85cf2d..f27991a4778b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -78,7 +78,6 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -106,7 +105,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -126,15 +125,18 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -144,11 +146,11 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -177,7 +179,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -217,7 +219,9 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index d82fb5015cee..806c702c11f6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union from google.cloud.asset_v1 import gapic_version as package_version @@ -183,7 +183,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, AssetServiceTransport] = "grpc_asyncio", + transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -195,9 +195,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.AssetServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AssetServiceTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index e38dcc22c3bf..584308510741 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings from google.cloud.asset_v1 import gapic_version as package_version @@ -520,7 +520,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, AssetServiceTransport]] = None, + transport: Optional[Union[str, AssetServiceTransport, Callable[..., AssetServiceTransport]]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -532,9 +532,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, AssetServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,AssetServiceTransport,Callable[..., AssetServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AssetServiceTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -625,8 +627,13 @@ def __init__(self, *, if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[Type[AssetServiceTransport], Callable[..., AssetServiceTransport]] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AssetServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 7dc4a81d9d38..a9862888e2c7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -50,7 +50,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -70,14 +70,17 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -87,11 +90,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -118,7 +121,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -159,7 +162,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 8f2ea898f870..0183a01e2e20 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -66,7 +66,6 @@ def create_channel(cls, the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -94,7 +93,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -114,15 +113,18 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -132,11 +134,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -163,7 +165,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -203,7 +205,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 560b4e92524a..0fee3e065c38 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union from google.iam.credentials_v1 import gapic_version as package_version @@ -174,7 +174,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, IAMCredentialsTransport] = "grpc_asyncio", + transport: Optional[Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]]] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -186,9 +186,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.IAMCredentialsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,IAMCredentialsTransport,Callable[..., IAMCredentialsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the IAMCredentialsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 4ea9050cc4a8..4397f37900b9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings from google.iam.credentials_v1 import gapic_version as package_version @@ -457,7 +457,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, IAMCredentialsTransport]] = None, + transport: Optional[Union[str, IAMCredentialsTransport, Callable[..., IAMCredentialsTransport]]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -469,9 +469,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, IAMCredentialsTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,IAMCredentialsTransport,Callable[..., IAMCredentialsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the IAMCredentialsTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -562,8 +564,13 @@ def __init__(self, *, if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[Type[IAMCredentialsTransport], Callable[..., IAMCredentialsTransport]] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., IAMCredentialsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 7d873e3be7d1..d07061919a29 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -56,7 +56,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -76,14 +76,17 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -93,11 +96,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -123,7 +126,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -164,7 +167,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 7477486eb132..f3f074d5766e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -72,7 +72,6 @@ def create_channel(cls, the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -100,7 +99,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -120,15 +119,18 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -138,11 +140,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -168,7 +170,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -208,7 +210,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index c71895f6aa73..cfefe93ceaa3 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union from google.cloud.eventarc_v1 import gapic_version as package_version @@ -201,7 +201,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, EventarcTransport] = "grpc_asyncio", + transport: Optional[Union[str, EventarcTransport, Callable[..., EventarcTransport]]] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -213,9 +213,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.EventarcTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,EventarcTransport,Callable[..., EventarcTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the EventarcTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index a6b43a56fdc8..6b6f1fbd1e5e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings from google.cloud.eventarc_v1 import gapic_version as package_version @@ -565,7 +565,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, EventarcTransport]] = None, + transport: Optional[Union[str, EventarcTransport, Callable[..., EventarcTransport]]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -577,9 +577,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, EventarcTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,EventarcTransport,Callable[..., EventarcTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the EventarcTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -670,8 +672,13 @@ def __init__(self, *, if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[Type[EventarcTransport], Callable[..., EventarcTransport]] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., EventarcTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 7898699b4805..bf51d459464c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -60,7 +60,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -80,14 +80,17 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -97,11 +100,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -128,7 +131,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -169,7 +172,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 9ea7acdc34df..6da6805298d3 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -76,7 +76,6 @@ def create_channel(cls, the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -104,7 +103,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -124,15 +123,18 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -142,11 +144,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -173,7 +175,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -213,7 +215,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index d0b6d13a6f2e..be07b22b2152 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union from google.cloud.logging_v2 import gapic_version as package_version @@ -181,7 +181,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ConfigServiceV2Transport] = "grpc_asyncio", + transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -193,9 +193,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 520303c420c2..32b5be71af7d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -516,7 +516,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ConfigServiceV2Transport]] = None, + transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -528,9 +528,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ConfigServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -618,8 +620,13 @@ def __init__(self, *, if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport]] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ConfigServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 6176b2bc40bc..81818e1ca6d9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -50,7 +50,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -70,14 +70,17 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -87,11 +90,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -118,7 +121,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -159,7 +162,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index fab08309e0c1..27a9675d1c39 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -66,7 +66,6 @@ def create_channel(cls, the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -94,7 +93,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -114,15 +113,18 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -132,11 +134,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -163,7 +165,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -203,7 +205,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index eb40f1771d55..5b08d1efb8a5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union from google.cloud.logging_v2 import gapic_version as package_version @@ -166,7 +166,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, LoggingServiceV2Transport] = "grpc_asyncio", + transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -178,9 +178,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a64066a8039f..cb4004bb8bf7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -447,7 +447,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, LoggingServiceV2Transport]] = None, + transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -459,9 +459,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, LoggingServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -549,8 +551,13 @@ def __init__(self, *, if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[Type[LoggingServiceV2Transport], Callable[..., LoggingServiceV2Transport]] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LoggingServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 679c1a859808..87498f9bc131 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -49,7 +49,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -69,14 +69,17 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -86,11 +89,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -116,7 +119,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -157,7 +160,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 8bf5fb4587ae..2a083c46a0eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -65,7 +65,6 @@ def create_channel(cls, the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -93,7 +92,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -113,15 +112,18 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -131,11 +133,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -161,7 +163,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -201,7 +203,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 5e3b8965423d..43963989ea78 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union from google.cloud.logging_v2 import gapic_version as package_version @@ -167,7 +167,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, MetricsServiceV2Transport] = "grpc_asyncio", + transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -179,9 +179,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 92c4511844ba..36decf7ec151 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings from google.cloud.logging_v2 import gapic_version as package_version @@ -448,7 +448,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, MetricsServiceV2Transport]] = None, + transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -460,9 +460,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, MetricsServiceV2Transport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -550,8 +552,13 @@ def __init__(self, *, if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[Type[MetricsServiceV2Transport], Callable[..., MetricsServiceV2Transport]] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MetricsServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index b27a13b8a07f..ffe49e97d4e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -49,7 +49,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -69,14 +69,17 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -86,11 +89,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -116,7 +119,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -157,7 +160,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index d57e42b08afd..2e66658c60c2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -65,7 +65,6 @@ def create_channel(cls, the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -93,7 +92,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -113,15 +112,18 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -131,11 +133,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -161,7 +163,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -201,7 +203,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 3d2a03c068ef..bd46e2b0ec33 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union from google.cloud.redis_v1 import gapic_version as package_version @@ -191,7 +191,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudRedisTransport] = "grpc_asyncio", + transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -203,9 +203,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.CloudRedisTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,CloudRedisTransport,Callable[..., CloudRedisTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CloudRedisTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 6bc325838c82..25454d4a36c2 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast import warnings from google.cloud.redis_v1 import gapic_version as package_version @@ -474,7 +474,7 @@ def universe_domain(self) -> str: def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudRedisTransport]] = None, + transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -486,9 +486,11 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, CloudRedisTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,CloudRedisTransport,Callable[..., CloudRedisTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CloudRedisTransport constructor. + If set to None, a transport is chosen automatically. NOTE: "rest" transport functionality is currently in a beta state (preview). We welcome your feedback via an issue in this library's source repository. @@ -579,8 +581,13 @@ def __init__(self, *, if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CloudRedisTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 5512c3073690..916d2cc75c57 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -70,7 +70,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -90,14 +90,17 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -107,11 +110,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -138,7 +141,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -179,7 +182,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 43afea8584ab..1fe6599f7777 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -86,7 +86,6 @@ def create_channel(cls, the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -114,7 +113,7 @@ def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -134,15 +133,18 @@ def __init__(self, *, credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -152,11 +154,11 @@ def __init__(self, *, private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -183,7 +185,7 @@ def __init__(self, *, if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. credentials = False # If a channel was explicitly provided, set it. @@ -223,7 +225,9 @@ def __init__(self, *, ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, From 6263c23e9da8c6f7b500a0c41077e1c5b6568e79 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 11 Apr 2024 11:02:31 -0700 Subject: [PATCH 1134/1339] chore: increase async rpc performance (#1755) --- .../%sub/services/%service/_client_macros.j2 | 14 +- .../%sub/services/%service/async_client.py.j2 | 34 +- .../services/%service/transports/base.py.j2 | 12 +- .../%service/transports/grpc_asyncio.py.j2 | 33 + .../gapic/%name_%version/%sub/test_macros.j2 | 147 + .../services/asset_service/async_client.py | 365 +-- .../asset_v1/services/asset_service/client.py | 202 +- .../services/asset_service/transports/base.py | 35 +- .../asset_service/transports/grpc_asyncio.py | 191 ++ .../unit/gapic/asset_v1/test_asset_service.py | 2371 ++++++++++++++++- .../services/iam_credentials/async_client.py | 88 +- .../services/iam_credentials/client.py | 40 +- .../iam_credentials/transports/base.py | 20 +- .../transports/grpc_asyncio.py | 67 + .../credentials_v1/test_iam_credentials.py | 404 +++ .../services/eventarc/async_client.py | 270 +- .../eventarc_v1/services/eventarc/client.py | 180 +- .../eventarc/transports/grpc_asyncio.py | 97 + .../unit/gapic/eventarc_v1/test_eventarc.py | 1928 +++++++++++++- .../config_service_v2/async_client.py | 480 ++-- .../services/config_service_v2/client.py | 264 +- .../config_service_v2/transports/base.py | 35 +- .../transports/grpc_asyncio.py | 244 ++ .../logging_service_v2/async_client.py | 125 +- .../services/logging_service_v2/client.py | 46 +- .../logging_service_v2/transports/base.py | 30 +- .../transports/grpc_asyncio.py | 103 + .../metrics_service_v2/async_client.py | 107 +- .../services/metrics_service_v2/client.py | 50 +- .../metrics_service_v2/transports/base.py | 20 +- .../transports/grpc_asyncio.py | 76 + .../logging_v2/test_config_service_v2.py | 2248 ++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 415 +++ .../logging_v2/test_metrics_service_v2.py | 345 +++ .../services/cloud_redis/async_client.py | 165 +- .../redis_v1/services/cloud_redis/client.py | 110 +- .../cloud_redis/transports/grpc_asyncio.py | 62 + .../unit/gapic/redis_v1/test_cloud_redis.py | 1207 +++++++++ 38 files changed, 11047 insertions(+), 1583 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 8d08f0ed423d..25b3ef6fcaa3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -87,8 +87,8 @@ {% if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields %} - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' @@ -97,17 +97,15 @@ {% endif %} {% if method.input.ident.package != method.ident.package %}{# request lives in a different package, so there is no proto wrapper #} if isinstance(request, dict): - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = {{ method.input.ident }}(**request) elif not request: # Null request, just make one. request = {{ method.input.ident }}() {% else %} - # Minor optimization to avoid making a copy if the user passes - # in a {{ method.input.ident }}. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, {{ method.input.ident }}): request = {{ method.input.ident }}(request) {% endif %}{# different request package #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 2c4ade4636c2..66ac6a70b51c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -314,8 +314,8 @@ class {{ service.async_client_name }}: {% if not method.client_streaming %} # Create or coerce a protobuf request object. {% if method.flattened_fields %} - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " @@ -323,14 +323,17 @@ class {{ service.async_client_name }}: {% endif %} {% if method.input.ident.package != method.ident.package %} {# request lives in a different package, so there is no proto wrapper #} - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. + # - The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. if isinstance(request, dict): request = {{ method.input.ident }}(**request) elif not request: request = {{ method.input.ident }}({% if method.flattened_fields %}{% for f in method.flattened_fields.values() %}{{ f.name }}={{ f.name }}{% if not loop.last %}, {% endif %}{% endfor %}{% endif %}) {% else %} - request = {{ method.input.ident }}(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, {{ method.input.ident }}): + request = {{ method.input.ident }}(request) {% endif %} {# different request package #} {# Vanilla python protobuf wrapper types cannot _set_ repeated fields #} @@ -357,26 +360,9 @@ class {{ service.async_client_name }}: # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.{{ method.transport_safe_name|snake_case }}, - {% if method.retry %} - default_retry=retries.AsyncRetry( - {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} - {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} - {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} - predicate=retries.if_exception_type( - {% for ex in method.retry.retryable_exceptions|sort(attribute="__name__") %} - core_exceptions.{{ ex.__name__ }}, - {% endfor %} - ), - deadline={{ method.timeout }}, - ), - {% endif %} - default_timeout={{ method.timeout }}, - client_info=DEFAULT_CLIENT_INFO, - ) - {% if method.field_headers %} + rpc = self._client._transport._wrapped_methods[self._client._transport.{{ method.transport_safe_name|snake_case }}] + {% if method.field_headers %} # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index e6a77080d7bd..316eb84f9675 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -149,9 +149,15 @@ class {{ service.name }}Transport(abc.ABC): self.{{ method.transport_safe_name|snake_case }}, {% if method.retry %} default_retry=retries.Retry( - {% if method.retry.initial_backoff %}initial={{ method.retry.initial_backoff }},{% endif %} - {% if method.retry.max_backoff %}maximum={{ method.retry.max_backoff }},{% endif %} - {% if method.retry.backoff_multiplier %}multiplier={{ method.retry.backoff_multiplier }},{% endif %} + {% if method.retry.initial_backoff %} + initial={{ method.retry.initial_backoff }}, + {% endif %} + {% if method.retry.max_backoff %} + maximum={{ method.retry.max_backoff }}, + {% endif %} + {% if method.retry.backoff_multiplier %} + multiplier={{ method.retry.backoff_multiplier }}, + {% endif %} predicate=retries.if_exception_type( {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} core_exceptions.{{ ex.__name__ }}, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index f27991a4778b..ab14820095f2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -7,6 +7,8 @@ from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries {% if service.has_lro %} from google.api_core import operations_v1 {% endif %} @@ -382,6 +384,37 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): return self._stubs["test_iam_permissions"] {% endif %} + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + {% for method in service.methods.values() %} + self.{{ method.transport_safe_name|snake_case }}: gapic_v1.method_async.wrap_method( + self.{{ method.transport_safe_name|snake_case }}, + {% if method.retry %} + default_retry=retries.AsyncRetry( + {% if method.retry.initial_backoff %} + initial={{ method.retry.initial_backoff }}, + {% endif %} + {% if method.retry.max_backoff %} + maximum={{ method.retry.max_backoff }}, + {% endif %} + {% if method.retry.backoff_multiplier %} + multiplier={{ method.retry.backoff_multiplier }}, + {% endif %} + predicate=retries.if_exception_type( + {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} + core_exceptions.{{ ex.__name__ }}, + {% endfor %} + ), + deadline={{ method.timeout }}, + ), + {% endif %} + default_timeout={{ method.timeout }}, + client_info=client_info, + ), + {% endfor %} {# service.methods.values() #} + } + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 769f49383d02..bae87184fc33 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -196,6 +196,54 @@ def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): ) {% endif %} +def test_{{ method_name }}_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.{{method.transport_safe_name|snake_case}} in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.{{method.transport_safe_name|snake_case}}] = mock_rpc + + {% if method.client_streaming %} + request = [{}] + client.{{ method.safe_name|snake_case }}(request) + {% else %} + request = {} + client.{{ method_name }}(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + {% if method.lro or method.extended_lro %} + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + {% endif %} + + {% if method.client_streaming %} + client.{{ method.safe_name|snake_case }}(request) + {% else %} + client.{{ method_name }}(request) + {% endif %} + + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + {% if not full_extended_lro %} {% if not method.client_streaming %} @pytest.mark.asyncio @@ -253,6 +301,58 @@ async def test_{{ method_name }}_empty_call_async(): assert args[0] == {{ method.input.ident }}() {% endif %} +@pytest.mark.asyncio +async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = {{ service.async_client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.{{method.transport_safe_name|snake_case}} in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.{{method.transport_safe_name|snake_case}}] = mock_object + + {% if method.client_streaming %} + request = [{}] + await client.{{ method.name|snake_case }}(request) + {% else %} + request = {} + await client.{{ method_name }}(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + {% if method.lro or method.extended_lro %} + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + {% endif %} + + {% if method.client_streaming %} + await client.{{ method.name|snake_case }}(request) + {% else %} + await client.{{ method_name }}(request) + {% endif %} + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): {% with auto_populated_field_sample_value = "explicit value for autopopulate-able field" %} @@ -1220,6 +1320,53 @@ def test_{{ method_name }}_rest(request_type): {% endfor %} {% endif %} +def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.{{method.transport_safe_name|snake_case}} in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.{{method.transport_safe_name|snake_case}}] = mock_rpc + + {% if method.client_streaming %} + request = [{}] + client.{{ method.safe_name|snake_case }}(request) + {% else %} + request = {} + client.{{ method_name }}(request) + {% endif %} + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + {% if method.lro or method.extended_lro %} + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + {% endif %} + + {% if method.client_streaming %} + client.{{ method.safe_name|snake_case }}(request) + {% else %} + client.{{ method_name }}(request) + {% endif %} + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + {% if method.input.required_fields %} def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ident }}): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 806c702c11f6..dc47b46ca899 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -323,15 +323,14 @@ async def sample_export_assets(): """ # Create or coerce a protobuf request object. - request = asset_service.ExportAssetsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.ExportAssetsRequest): + request = asset_service.ExportAssetsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.export_assets, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.export_assets] # Certain fields should be provided within the metadata header; # add these here. @@ -432,14 +431,17 @@ async def sample_list_assets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.ListAssetsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.ListAssetsRequest): + request = asset_service.ListAssetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -448,11 +450,7 @@ async def sample_list_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_assets, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_assets] # Certain fields should be provided within the metadata header; # add these here. @@ -540,22 +538,14 @@ async def sample_batch_get_assets_history(): Batch get assets history response. """ # Create or coerce a protobuf request object. - request = asset_service.BatchGetAssetsHistoryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.BatchGetAssetsHistoryRequest): + request = asset_service.BatchGetAssetsHistoryRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.batch_get_assets_history, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_assets_history] # Certain fields should be provided within the metadata header; # add these here. @@ -658,14 +648,17 @@ async def sample_create_feed(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.CreateFeedRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.CreateFeedRequest): + request = asset_service.CreateFeedRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -674,11 +667,7 @@ async def sample_create_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_feed, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_feed] # Certain fields should be provided within the metadata header; # add these here. @@ -769,14 +758,17 @@ async def sample_get_feed(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.GetFeedRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.GetFeedRequest): + request = asset_service.GetFeedRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -785,18 +777,7 @@ async def sample_get_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_feed, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_feed] # Certain fields should be provided within the metadata header; # add these here. @@ -882,14 +863,17 @@ async def sample_list_feeds(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.ListFeedsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.ListFeedsRequest): + request = asset_service.ListFeedsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -898,18 +882,7 @@ async def sample_list_feeds(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_feeds, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_feeds] # Certain fields should be provided within the metadata header; # add these here. @@ -1004,14 +977,17 @@ async def sample_update_feed(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([feed]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.UpdateFeedRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.UpdateFeedRequest): + request = asset_service.UpdateFeedRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1020,11 +996,7 @@ async def sample_update_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_feed, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_feed] # Certain fields should be provided within the metadata header; # add these here. @@ -1100,14 +1072,17 @@ async def sample_delete_feed(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.DeleteFeedRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.DeleteFeedRequest): + request = asset_service.DeleteFeedRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1116,18 +1091,7 @@ async def sample_delete_feed(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_feed, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_feed] # Certain fields should be provided within the metadata header; # add these here. @@ -1320,14 +1284,17 @@ async def sample_search_all_resources(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, query, asset_types]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.SearchAllResourcesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.SearchAllResourcesRequest): + request = asset_service.SearchAllResourcesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1340,18 +1307,7 @@ async def sample_search_all_resources(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.search_all_resources, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=15.0, - ), - default_timeout=15.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_resources] # Certain fields should be provided within the metadata header; # add these here. @@ -1519,14 +1475,17 @@ async def sample_search_all_iam_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, query]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.SearchAllIamPoliciesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.SearchAllIamPoliciesRequest): + request = asset_service.SearchAllIamPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1537,18 +1496,7 @@ async def sample_search_all_iam_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.search_all_iam_policies, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=15.0, - ), - default_timeout=15.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.search_all_iam_policies] # Certain fields should be provided within the metadata header; # add these here. @@ -1637,21 +1585,14 @@ async def sample_analyze_iam_policy(): """ # Create or coerce a protobuf request object. - request = asset_service.AnalyzeIamPolicyRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.AnalyzeIamPolicyRequest): + request = asset_service.AnalyzeIamPolicyRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_iam_policy, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1751,15 +1692,14 @@ async def sample_analyze_iam_policy_longrunning(): """ # Create or coerce a protobuf request object. - request = asset_service.AnalyzeIamPolicyLongrunningRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.AnalyzeIamPolicyLongrunningRequest): + request = asset_service.AnalyzeIamPolicyLongrunningRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_iam_policy_longrunning, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_iam_policy_longrunning] # Certain fields should be provided within the metadata header; # add these here. @@ -1850,15 +1790,14 @@ async def sample_analyze_move(): """ # Create or coerce a protobuf request object. - request = asset_service.AnalyzeMoveRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.AnalyzeMoveRequest): + request = asset_service.AnalyzeMoveRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_move, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_move] # Certain fields should be provided within the metadata header; # add these here. @@ -1947,15 +1886,14 @@ async def sample_query_assets(): QueryAssets response. """ # Create or coerce a protobuf request object. - request = asset_service.QueryAssetsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.QueryAssetsRequest): + request = asset_service.QueryAssetsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.query_assets, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.query_assets] # Certain fields should be provided within the metadata header; # add these here. @@ -2069,14 +2007,17 @@ async def sample_create_saved_query(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, saved_query, saved_query_id]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.CreateSavedQueryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.CreateSavedQueryRequest): + request = asset_service.CreateSavedQueryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2089,11 +2030,7 @@ async def sample_create_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_saved_query, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_saved_query] # Certain fields should be provided within the metadata header; # add these here. @@ -2180,14 +2117,17 @@ async def sample_get_saved_query(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.GetSavedQueryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.GetSavedQueryRequest): + request = asset_service.GetSavedQueryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2196,11 +2136,7 @@ async def sample_get_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_saved_query, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_saved_query] # Certain fields should be provided within the metadata header; # add these here. @@ -2293,14 +2229,17 @@ async def sample_list_saved_queries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.ListSavedQueriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.ListSavedQueriesRequest): + request = asset_service.ListSavedQueriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2309,11 +2248,7 @@ async def sample_list_saved_queries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_saved_queries, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_saved_queries] # Certain fields should be provided within the metadata header; # add these here. @@ -2418,14 +2353,17 @@ async def sample_update_saved_query(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([saved_query, update_mask]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.UpdateSavedQueryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.UpdateSavedQueryRequest): + request = asset_service.UpdateSavedQueryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2436,11 +2374,7 @@ async def sample_update_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_saved_query, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_saved_query] # Certain fields should be provided within the metadata header; # add these here. @@ -2518,14 +2452,17 @@ async def sample_delete_saved_query(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.DeleteSavedQueryRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.DeleteSavedQueryRequest): + request = asset_service.DeleteSavedQueryRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2534,11 +2471,7 @@ async def sample_delete_saved_query(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_saved_query, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_saved_query] # Certain fields should be provided within the metadata header; # add these here. @@ -2612,15 +2545,14 @@ async def sample_batch_get_effective_iam_policies(): """ # Create or coerce a protobuf request object. - request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.BatchGetEffectiveIamPoliciesRequest): + request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.batch_get_effective_iam_policies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.batch_get_effective_iam_policies] # Certain fields should be provided within the metadata header; # add these here. @@ -2738,14 +2670,17 @@ async def sample_analyze_org_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, constraint, filter]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.AnalyzeOrgPoliciesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.AnalyzeOrgPoliciesRequest): + request = asset_service.AnalyzeOrgPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2758,11 +2693,7 @@ async def sample_analyze_org_policies(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_org_policies, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policies] # Certain fields should be provided within the metadata header; # add these here. @@ -2889,14 +2820,17 @@ async def sample_analyze_org_policy_governed_containers(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, constraint, filter]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2909,11 +2843,7 @@ async def sample_analyze_org_policy_governed_containers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_org_policy_governed_containers, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policy_governed_containers] # Certain fields should be provided within the metadata header; # add these here. @@ -3069,14 +2999,17 @@ async def sample_analyze_org_policy_governed_assets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, constraint, filter]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3089,11 +3022,7 @@ async def sample_analyze_org_policy_governed_assets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.analyze_org_policy_governed_assets, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.analyze_org_policy_governed_assets] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 584308510741..5e42e3917cbd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -721,10 +721,8 @@ def sample_export_assets(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.ExportAssetsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.ExportAssetsRequest): request = asset_service.ExportAssetsRequest(request) @@ -831,17 +829,15 @@ def sample_list_assets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.ListAssetsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.ListAssetsRequest): request = asset_service.ListAssetsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -939,10 +935,8 @@ def sample_batch_get_assets_history(): Batch get assets history response. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.BatchGetAssetsHistoryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.BatchGetAssetsHistoryRequest): request = asset_service.BatchGetAssetsHistoryRequest(request) @@ -1051,17 +1045,15 @@ def sample_create_feed(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.CreateFeedRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.CreateFeedRequest): request = asset_service.CreateFeedRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1162,17 +1154,15 @@ def sample_get_feed(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.GetFeedRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.GetFeedRequest): request = asset_service.GetFeedRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1268,17 +1258,15 @@ def sample_list_feeds(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.ListFeedsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.ListFeedsRequest): request = asset_service.ListFeedsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1383,17 +1371,15 @@ def sample_update_feed(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([feed]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.UpdateFeedRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.UpdateFeedRequest): request = asset_service.UpdateFeedRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1479,17 +1465,15 @@ def sample_delete_feed(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.DeleteFeedRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.DeleteFeedRequest): request = asset_service.DeleteFeedRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1692,17 +1676,15 @@ def sample_search_all_resources(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, query, asset_types]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.SearchAllResourcesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.SearchAllResourcesRequest): request = asset_service.SearchAllResourcesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1884,17 +1866,15 @@ def sample_search_all_iam_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, query]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.SearchAllIamPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.SearchAllIamPoliciesRequest): request = asset_service.SearchAllIamPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1995,10 +1975,8 @@ def sample_analyze_iam_policy(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.AnalyzeIamPolicyRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.AnalyzeIamPolicyRequest): request = asset_service.AnalyzeIamPolicyRequest(request) @@ -2104,10 +2082,8 @@ def sample_analyze_iam_policy_longrunning(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.AnalyzeIamPolicyLongrunningRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.AnalyzeIamPolicyLongrunningRequest): request = asset_service.AnalyzeIamPolicyLongrunningRequest(request) @@ -2204,10 +2180,8 @@ def sample_analyze_move(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.AnalyzeMoveRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.AnalyzeMoveRequest): request = asset_service.AnalyzeMoveRequest(request) @@ -2302,10 +2276,8 @@ def sample_query_assets(): QueryAssets response. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.QueryAssetsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.QueryAssetsRequest): request = asset_service.QueryAssetsRequest(request) @@ -2425,17 +2397,15 @@ def sample_create_saved_query(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, saved_query, saved_query_id]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.CreateSavedQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.CreateSavedQueryRequest): request = asset_service.CreateSavedQueryRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2536,17 +2506,15 @@ def sample_get_saved_query(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.GetSavedQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.GetSavedQueryRequest): request = asset_service.GetSavedQueryRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2649,17 +2617,15 @@ def sample_list_saved_queries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.ListSavedQueriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.ListSavedQueriesRequest): request = asset_service.ListSavedQueriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2774,17 +2740,15 @@ def sample_update_saved_query(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([saved_query, update_mask]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.UpdateSavedQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.UpdateSavedQueryRequest): request = asset_service.UpdateSavedQueryRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2874,17 +2838,15 @@ def sample_delete_saved_query(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.DeleteSavedQueryRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.DeleteSavedQueryRequest): request = asset_service.DeleteSavedQueryRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2968,10 +2930,8 @@ def sample_batch_get_effective_iam_policies(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.BatchGetEffectiveIamPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.BatchGetEffectiveIamPoliciesRequest): request = asset_service.BatchGetEffectiveIamPoliciesRequest(request) @@ -3095,17 +3055,15 @@ def sample_analyze_org_policies(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, constraint, filter]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.AnalyzeOrgPoliciesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.AnalyzeOrgPoliciesRequest): request = asset_service.AnalyzeOrgPoliciesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3246,17 +3204,15 @@ def sample_analyze_org_policy_governed_containers(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, constraint, filter]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.AnalyzeOrgPolicyGovernedContainersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedContainersRequest): request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3426,17 +3382,15 @@ def sample_analyze_org_policy_governed_assets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([scope, constraint, filter]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a asset_service.AnalyzeOrgPolicyGovernedAssetsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 9bc218b63bce..5e4aa9aab846 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -133,7 +133,10 @@ def _prep_wrapped_messages(self, client_info): self.batch_get_assets_history: gapic_v1.method.wrap_method( self.batch_get_assets_history, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -150,7 +153,10 @@ def _prep_wrapped_messages(self, client_info): self.get_feed: gapic_v1.method.wrap_method( self.get_feed, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -162,7 +168,10 @@ def _prep_wrapped_messages(self, client_info): self.list_feeds: gapic_v1.method.wrap_method( self.list_feeds, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -179,7 +188,10 @@ def _prep_wrapped_messages(self, client_info): self.delete_feed: gapic_v1.method.wrap_method( self.delete_feed, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -191,7 +203,10 @@ def _prep_wrapped_messages(self, client_info): self.search_all_resources: gapic_v1.method.wrap_method( self.search_all_resources, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -203,7 +218,10 @@ def _prep_wrapped_messages(self, client_info): self.search_all_iam_policies: gapic_v1.method.wrap_method( self.search_all_iam_policies, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -215,7 +233,10 @@ def _prep_wrapped_messages(self, client_info): self.analyze_iam_policy: gapic_v1.method.wrap_method( self.analyze_iam_policy, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), deadline=300.0, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 0183a01e2e20..46532f8549b1 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -935,6 +937,195 @@ def analyze_org_policy_governed_assets(self) -> Callable[ ) return self._stubs['analyze_org_policy_governed_assets'] + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.export_assets: gapic_v1.method_async.wrap_method( + self.export_assets, + default_timeout=60.0, + client_info=client_info, + ), + self.list_assets: gapic_v1.method_async.wrap_method( + self.list_assets, + default_timeout=None, + client_info=client_info, + ), + self.batch_get_assets_history: gapic_v1.method_async.wrap_method( + self.batch_get_assets_history, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_feed: gapic_v1.method_async.wrap_method( + self.create_feed, + default_timeout=60.0, + client_info=client_info, + ), + self.get_feed: gapic_v1.method_async.wrap_method( + self.get_feed, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_feeds: gapic_v1.method_async.wrap_method( + self.list_feeds, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_feed: gapic_v1.method_async.wrap_method( + self.update_feed, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_feed: gapic_v1.method_async.wrap_method( + self.delete_feed, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.search_all_resources: gapic_v1.method_async.wrap_method( + self.search_all_resources, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=15.0, + ), + default_timeout=15.0, + client_info=client_info, + ), + self.search_all_iam_policies: gapic_v1.method_async.wrap_method( + self.search_all_iam_policies, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=15.0, + ), + default_timeout=15.0, + client_info=client_info, + ), + self.analyze_iam_policy: gapic_v1.method_async.wrap_method( + self.analyze_iam_policy, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.analyze_iam_policy_longrunning: gapic_v1.method_async.wrap_method( + self.analyze_iam_policy_longrunning, + default_timeout=60.0, + client_info=client_info, + ), + self.analyze_move: gapic_v1.method_async.wrap_method( + self.analyze_move, + default_timeout=None, + client_info=client_info, + ), + self.query_assets: gapic_v1.method_async.wrap_method( + self.query_assets, + default_timeout=None, + client_info=client_info, + ), + self.create_saved_query: gapic_v1.method_async.wrap_method( + self.create_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.get_saved_query: gapic_v1.method_async.wrap_method( + self.get_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.list_saved_queries: gapic_v1.method_async.wrap_method( + self.list_saved_queries, + default_timeout=None, + client_info=client_info, + ), + self.update_saved_query: gapic_v1.method_async.wrap_method( + self.update_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.delete_saved_query: gapic_v1.method_async.wrap_method( + self.delete_saved_query, + default_timeout=None, + client_info=client_info, + ), + self.batch_get_effective_iam_policies: gapic_v1.method_async.wrap_method( + self.batch_get_effective_iam_policies, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policies: gapic_v1.method_async.wrap_method( + self.analyze_org_policies, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policy_governed_containers: gapic_v1.method_async.wrap_method( + self.analyze_org_policy_governed_containers, + default_timeout=None, + client_info=client_info, + ), + self.analyze_org_policy_governed_assets: gapic_v1.method_async.wrap_method( + self.analyze_org_policy_governed_assets, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index ec79f660bca0..a3de204fbe45 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -844,6 +844,42 @@ def test_export_assets_non_empty_request_with_auto_populated_field(): parent='parent_value', ) +def test_export_assets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc + + request = {} + client.export_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_export_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -866,6 +902,47 @@ async def test_export_assets_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ExportAssetsRequest() +@pytest.mark.asyncio +async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.export_assets in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.export_assets] = mock_object + + request = {} + await client.export_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): client = AssetServiceAsyncClient( @@ -1046,6 +1123,38 @@ def test_list_assets_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_assets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc + + request = {} + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1068,6 +1177,43 @@ async def test_list_assets_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListAssetsRequest() +@pytest.mark.asyncio +async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_assets in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_object + + request = {} + await client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): client = AssetServiceAsyncClient( @@ -1519,6 +1665,38 @@ def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): parent='parent_value', ) +def test_batch_get_assets_history_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_get_assets_history in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc + + request = {} + client.batch_get_assets_history(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_assets_history(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_batch_get_assets_history_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1540,6 +1718,43 @@ async def test_batch_get_assets_history_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetAssetsHistoryRequest() +@pytest.mark.asyncio +async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_get_assets_history in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.batch_get_assets_history] = mock_object + + request = {} + await client.batch_get_assets_history(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.batch_get_assets_history(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceAsyncClient( @@ -1727,6 +1942,38 @@ def test_create_feed_non_empty_request_with_auto_populated_field(): feed_id='feed_id_value', ) +def test_create_feed_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_feed in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc + + request = {} + client.create_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_feed_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1753,6 +2000,43 @@ async def test_create_feed_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateFeedRequest() +@pytest.mark.asyncio +async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_feed in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_feed] = mock_object + + request = {} + await client.create_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): client = AssetServiceAsyncClient( @@ -2030,6 +2314,38 @@ def test_get_feed_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_feed_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_feed in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc + + request = {} + client.get_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_feed_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2056,6 +2372,43 @@ async def test_get_feed_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetFeedRequest() +@pytest.mark.asyncio +async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_feed in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_feed] = mock_object + + request = {} + await client.get_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): client = AssetServiceAsyncClient( @@ -2323,6 +2676,38 @@ def test_list_feeds_non_empty_request_with_auto_populated_field(): parent='parent_value', ) +def test_list_feeds_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_feeds in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc + + request = {} + client.list_feeds(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_feeds(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_feeds_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2344,6 +2729,43 @@ async def test_list_feeds_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListFeedsRequest() +@pytest.mark.asyncio +async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_feeds in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_feeds] = mock_object + + request = {} + await client.list_feeds(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_feeds(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): client = AssetServiceAsyncClient( @@ -2609,6 +3031,38 @@ def test_update_feed_non_empty_request_with_auto_populated_field(): assert args[0] == asset_service.UpdateFeedRequest( ) +def test_update_feed_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_feed in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc + + request = {} + client.update_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_feed_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2635,6 +3089,43 @@ async def test_update_feed_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.UpdateFeedRequest() +@pytest.mark.asyncio +async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_feed in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_feed] = mock_object + + request = {} + await client.update_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): client = AssetServiceAsyncClient( @@ -2901,6 +3392,38 @@ def test_delete_feed_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_delete_feed_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_feed in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc + + request = {} + client.delete_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_feed_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2921,6 +3444,43 @@ async def test_delete_feed_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteFeedRequest() +@pytest.mark.asyncio +async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_feed in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_feed] = mock_object + + request = {} + await client.delete_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): client = AssetServiceAsyncClient( @@ -3185,6 +3745,38 @@ def test_search_all_resources_non_empty_request_with_auto_populated_field(): order_by='order_by_value', ) +def test_search_all_resources_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_all_resources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc + + request = {} + client.search_all_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_all_resources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_search_all_resources_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3207,6 +3799,43 @@ async def test_search_all_resources_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllResourcesRequest() +@pytest.mark.asyncio +async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.search_all_resources in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.search_all_resources] = mock_object + + request = {} + await client.search_all_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.search_all_resources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceAsyncClient( @@ -3686,6 +4315,38 @@ def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): order_by='order_by_value', ) +def test_search_all_iam_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_all_iam_policies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc + + request = {} + client.search_all_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_all_iam_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_search_all_iam_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3708,6 +4369,43 @@ async def test_search_all_iam_policies_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllIamPoliciesRequest() +@pytest.mark.asyncio +async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.search_all_iam_policies in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.search_all_iam_policies] = mock_object + + request = {} + await client.search_all_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.search_all_iam_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceAsyncClient( @@ -4171,6 +4869,38 @@ def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): saved_analysis_query='saved_analysis_query_value', ) +def test_analyze_iam_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc + + request = {} + client.analyze_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_analyze_iam_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4193,6 +4923,43 @@ async def test_analyze_iam_policy_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyRequest() +@pytest.mark.asyncio +async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.analyze_iam_policy in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy] = mock_object + + request = {} + await client.analyze_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.analyze_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceAsyncClient( @@ -4369,6 +5136,42 @@ def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_fi saved_analysis_query='saved_analysis_query_value', ) +def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_iam_policy_longrunning in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc + + request = {} + client.analyze_iam_policy_longrunning(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.analyze_iam_policy_longrunning(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4391,6 +5194,47 @@ async def test_analyze_iam_policy_longrunning_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() +@pytest.mark.asyncio +async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.analyze_iam_policy_longrunning in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy_longrunning] = mock_object + + request = {} + await client.analyze_iam_policy_longrunning(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.analyze_iam_policy_longrunning(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceAsyncClient( @@ -4569,6 +5413,38 @@ def test_analyze_move_non_empty_request_with_auto_populated_field(): destination_parent='destination_parent_value', ) +def test_analyze_move_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_move in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc + + request = {} + client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_move(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_analyze_move_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4590,6 +5466,43 @@ async def test_analyze_move_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeMoveRequest() +@pytest.mark.asyncio +async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.analyze_move in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.analyze_move] = mock_object + + request = {} + await client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.analyze_move(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceAsyncClient( @@ -4775,6 +5688,38 @@ def test_query_assets_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_query_assets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc + + request = {} + client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_query_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4798,6 +5743,43 @@ async def test_query_assets_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.QueryAssetsRequest() +@pytest.mark.asyncio +async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.query_assets in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.query_assets] = mock_object + + request = {} + await client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.query_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): client = AssetServiceAsyncClient( @@ -4987,6 +5969,38 @@ def test_create_saved_query_non_empty_request_with_auto_populated_field(): saved_query_id='saved_query_id_value', ) +def test_create_saved_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_saved_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc + + request = {} + client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_saved_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5012,6 +6026,43 @@ async def test_create_saved_query_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateSavedQueryRequest() +@pytest.mark.asyncio +async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_saved_query in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_saved_query] = mock_object + + request = {} + await client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceAsyncClient( @@ -5305,6 +6356,38 @@ def test_get_saved_query_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_saved_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_saved_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc + + request = {} + client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_saved_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5330,6 +6413,43 @@ async def test_get_saved_query_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetSavedQueryRequest() +@pytest.mark.asyncio +async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_saved_query in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_saved_query] = mock_object + + request = {} + await client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceAsyncClient( @@ -5601,6 +6721,38 @@ def test_list_saved_queries_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_saved_queries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_saved_queries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc + + request = {} + client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_saved_queries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_saved_queries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5610,18 +6762,55 @@ async def test_list_saved_queries_empty_call_async(): transport='grpc_asyncio', ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_saved_queries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_saved_queries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + +@pytest.mark.asyncio +async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_saved_queries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_saved_queries] = mock_object + + request = {} + await client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_saved_queries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 @pytest.mark.asyncio async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): @@ -6080,6 +7269,38 @@ def test_update_saved_query_non_empty_request_with_auto_populated_field(): assert args[0] == asset_service.UpdateSavedQueryRequest( ) +def test_update_saved_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_saved_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc + + request = {} + client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_saved_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6105,6 +7326,43 @@ async def test_update_saved_query_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.UpdateSavedQueryRequest() +@pytest.mark.asyncio +async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_saved_query in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_saved_query] = mock_object + + request = {} + await client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceAsyncClient( @@ -6379,6 +7637,38 @@ def test_delete_saved_query_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_delete_saved_query_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_saved_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc + + request = {} + client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_saved_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6399,6 +7689,43 @@ async def test_delete_saved_query_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteSavedQueryRequest() +@pytest.mark.asyncio +async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_saved_query in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_saved_query] = mock_object + + request = {} + await client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceAsyncClient( @@ -6655,6 +7982,38 @@ def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_ scope='scope_value', ) +def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc + + request = {} + client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_effective_iam_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6676,6 +8035,43 @@ async def test_batch_get_effective_iam_policies_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.batch_get_effective_iam_policies in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.batch_get_effective_iam_policies] = mock_object + + request = {} + await client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.batch_get_effective_iam_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceAsyncClient( @@ -6859,6 +8255,38 @@ def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_analyze_org_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_org_policies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc + + request = {} + client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_org_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_analyze_org_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6881,6 +8309,43 @@ async def test_analyze_org_policies_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() +@pytest.mark.asyncio +async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.analyze_org_policies in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.analyze_org_policies] = mock_object + + request = {} + await client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.analyze_org_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceAsyncClient( @@ -7360,6 +8825,38 @@ def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_popu page_token='page_token_value', ) +def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_org_policy_governed_containers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc + + request = {} + client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_org_policy_governed_containers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7382,6 +8879,43 @@ async def test_analyze_org_policy_governed_containers_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.analyze_org_policy_governed_containers in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_containers] = mock_object + + request = {} + await client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.analyze_org_policy_governed_containers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): client = AssetServiceAsyncClient( @@ -7861,6 +9395,38 @@ def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populate page_token='page_token_value', ) +def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_org_policy_governed_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc + + request = {} + client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_org_policy_governed_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7883,6 +9449,43 @@ async def test_analyze_org_policy_governed_assets_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AssetServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.analyze_org_policy_governed_assets in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_assets] = mock_object + + request = {} + await client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.analyze_org_policy_governed_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): client = AssetServiceAsyncClient( @@ -8309,6 +9912,42 @@ def test_export_assets_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_export_assets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc + + request = {} + client.export_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_export_assets_rest_required_fields(request_type=asset_service.ExportAssetsRequest): transport_class = transports.AssetServiceRestTransport @@ -8489,6 +10128,38 @@ def test_list_assets_rest(request_type): assert isinstance(response, pagers.ListAssetsPager) assert response.next_page_token == 'next_page_token_value' +def test_list_assets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc + + request = {} + client.list_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsRequest): transport_class = transports.AssetServiceRestTransport @@ -8778,6 +10449,38 @@ def test_batch_get_assets_history_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) +def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_get_assets_history in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc + + request = {} + client.batch_get_assets_history(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_assets_history(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_batch_get_assets_history_rest_required_fields(request_type=asset_service.BatchGetAssetsHistoryRequest): transport_class = transports.AssetServiceRestTransport @@ -8969,6 +10672,38 @@ def test_create_feed_rest(request_type): assert response.content_type == asset_service.ContentType.RESOURCE assert response.relationship_types == ['relationship_types_value'] +def test_create_feed_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_feed in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc + + request = {} + client.create_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedRequest): transport_class = transports.AssetServiceRestTransport @@ -9216,6 +10951,38 @@ def test_get_feed_rest(request_type): assert response.content_type == asset_service.ContentType.RESOURCE assert response.relationship_types == ['relationship_types_value'] +def test_get_feed_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_feed in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc + + request = {} + client.get_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest): transport_class = transports.AssetServiceRestTransport @@ -9448,6 +11215,38 @@ def test_list_feeds_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.ListFeedsResponse) +def test_list_feeds_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_feeds in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc + + request = {} + client.list_feeds(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_feeds(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsRequest): transport_class = transports.AssetServiceRestTransport @@ -9690,6 +11489,38 @@ def test_update_feed_rest(request_type): assert response.content_type == asset_service.ContentType.RESOURCE assert response.relationship_types == ['relationship_types_value'] +def test_update_feed_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_feed in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc + + request = {} + client.update_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedRequest): transport_class = transports.AssetServiceRestTransport @@ -9915,6 +11746,38 @@ def test_delete_feed_rest(request_type): # Establish that the response is the type that we expect. assert response is None +def test_delete_feed_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_feed in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc + + request = {} + client.delete_feed(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_feed(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedRequest): transport_class = transports.AssetServiceRestTransport @@ -10139,6 +12002,38 @@ def test_search_all_resources_rest(request_type): assert isinstance(response, pagers.SearchAllResourcesPager) assert response.next_page_token == 'next_page_token_value' +def test_search_all_resources_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_all_resources in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc + + request = {} + client.search_all_resources(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_all_resources(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_search_all_resources_rest_required_fields(request_type=asset_service.SearchAllResourcesRequest): transport_class = transports.AssetServiceRestTransport @@ -10434,6 +12329,38 @@ def test_search_all_iam_policies_rest(request_type): assert isinstance(response, pagers.SearchAllIamPoliciesPager) assert response.next_page_token == 'next_page_token_value' +def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.search_all_iam_policies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc + + request = {} + client.search_all_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_all_iam_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_search_all_iam_policies_rest_required_fields(request_type=asset_service.SearchAllIamPoliciesRequest): transport_class = transports.AssetServiceRestTransport @@ -10727,6 +12654,38 @@ def test_analyze_iam_policy_rest(request_type): assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) assert response.fully_explored is True +def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc + + request = {} + client.analyze_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyRequest): transport_class = transports.AssetServiceRestTransport @@ -10900,6 +12859,42 @@ def test_analyze_iam_policy_longrunning_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_iam_policy_longrunning in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc + + request = {} + client.analyze_iam_policy_longrunning(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.analyze_iam_policy_longrunning(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): transport_class = transports.AssetServiceRestTransport @@ -11073,6 +13068,38 @@ def test_analyze_move_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.AnalyzeMoveResponse) +def test_analyze_move_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_move in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc + + request = {} + client.analyze_move(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_move(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMoveRequest): transport_class = transports.AssetServiceRestTransport @@ -11269,6 +13296,38 @@ def test_query_assets_rest(request_type): assert response.job_reference == 'job_reference_value' assert response.done is True +def test_query_assets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.query_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc + + request = {} + client.query_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.query_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): transport_class = transports.AssetServiceRestTransport @@ -11521,6 +13580,38 @@ def get_message_fields(field): assert response.creator == 'creator_value' assert response.last_updater == 'last_updater_value' +def test_create_saved_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_saved_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc + + request = {} + client.create_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_create_saved_query_rest_required_fields(request_type=asset_service.CreateSavedQueryRequest): transport_class = transports.AssetServiceRestTransport @@ -11779,6 +13870,38 @@ def test_get_saved_query_rest(request_type): assert response.creator == 'creator_value' assert response.last_updater == 'last_updater_value' +def test_get_saved_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_saved_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc + + request = {} + client.get_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSavedQueryRequest): transport_class = transports.AssetServiceRestTransport @@ -12013,6 +14136,38 @@ def test_list_saved_queries_rest(request_type): assert isinstance(response, pagers.ListSavedQueriesPager) assert response.next_page_token == 'next_page_token_value' +def test_list_saved_queries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_saved_queries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc + + request = {} + client.list_saved_queries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_saved_queries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_list_saved_queries_rest_required_fields(request_type=asset_service.ListSavedQueriesRequest): transport_class = transports.AssetServiceRestTransport @@ -12374,6 +14529,38 @@ def get_message_fields(field): assert response.creator == 'creator_value' assert response.last_updater == 'last_updater_value' +def test_update_saved_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_saved_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc + + request = {} + client.update_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_update_saved_query_rest_required_fields(request_type=asset_service.UpdateSavedQueryRequest): transport_class = transports.AssetServiceRestTransport @@ -12603,6 +14790,38 @@ def test_delete_saved_query_rest(request_type): # Establish that the response is the type that we expect. assert response is None +def test_delete_saved_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_saved_query in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc + + request = {} + client.delete_saved_query(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_saved_query(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_delete_saved_query_rest_required_fields(request_type=asset_service.DeleteSavedQueryRequest): transport_class = transports.AssetServiceRestTransport @@ -12825,6 +15044,38 @@ def test_batch_get_effective_iam_policies_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) +def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc + + request = {} + client.batch_get_effective_iam_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.batch_get_effective_iam_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): transport_class = transports.AssetServiceRestTransport @@ -13019,6 +15270,38 @@ def test_analyze_org_policies_rest(request_type): assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) assert response.next_page_token == 'next_page_token_value' +def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_org_policies in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc + + request = {} + client.analyze_org_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_org_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_analyze_org_policies_rest_required_fields(request_type=asset_service.AnalyzeOrgPoliciesRequest): transport_class = transports.AssetServiceRestTransport @@ -13325,6 +15608,38 @@ def test_analyze_org_policy_governed_containers_rest(request_type): assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) assert response.next_page_token == 'next_page_token_value' +def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_org_policy_governed_containers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc + + request = {} + client.analyze_org_policy_governed_containers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_org_policy_governed_containers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_analyze_org_policy_governed_containers_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): transport_class = transports.AssetServiceRestTransport @@ -13631,6 +15946,38 @@ def test_analyze_org_policy_governed_assets_rest(request_type): assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) assert response.next_page_token == 'next_page_token_value' +def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.analyze_org_policy_governed_assets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc + + request = {} + client.analyze_org_policy_governed_assets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.analyze_org_policy_governed_assets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): transport_class = transports.AssetServiceRestTransport diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 0fee3e065c38..151f89ed2234 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -344,14 +344,17 @@ async def sample_generate_access_token(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, scope, lifetime]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = common.GenerateAccessTokenRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, common.GenerateAccessTokenRequest): + request = common.GenerateAccessTokenRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -366,18 +369,7 @@ async def sample_generate_access_token(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.generate_access_token, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.generate_access_token] # Certain fields should be provided within the metadata header; # add these here. @@ -501,14 +493,17 @@ async def sample_generate_id_token(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, audience, include_email]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = common.GenerateIdTokenRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, common.GenerateIdTokenRequest): + request = common.GenerateIdTokenRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -523,18 +518,7 @@ async def sample_generate_id_token(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.generate_id_token, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.generate_id_token] # Certain fields should be provided within the metadata header; # add these here. @@ -646,14 +630,17 @@ async def sample_sign_blob(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, payload]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = common.SignBlobRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, common.SignBlobRequest): + request = common.SignBlobRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -666,18 +653,7 @@ async def sample_sign_blob(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.sign_blob, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.sign_blob] # Certain fields should be provided within the metadata header; # add these here. @@ -792,14 +768,17 @@ async def sample_sign_jwt(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, payload]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = common.SignJwtRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, common.SignJwtRequest): + request = common.SignJwtRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -812,18 +791,7 @@ async def sample_sign_jwt(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.sign_jwt, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.sign_jwt] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 4397f37900b9..9126bd1da6cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -688,17 +688,15 @@ def sample_generate_access_token(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, scope, lifetime]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a common.GenerateAccessTokenRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, common.GenerateAccessTokenRequest): request = common.GenerateAccessTokenRequest(request) # If we have keyword arguments corresponding to fields on the @@ -838,17 +836,15 @@ def sample_generate_id_token(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, audience, include_email]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a common.GenerateIdTokenRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, common.GenerateIdTokenRequest): request = common.GenerateIdTokenRequest(request) # If we have keyword arguments corresponding to fields on the @@ -976,17 +972,15 @@ def sample_sign_blob(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, payload]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a common.SignBlobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, common.SignBlobRequest): request = common.SignBlobRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1115,17 +1109,15 @@ def sample_sign_jwt(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, delegates, payload]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a common.SignJwtRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, common.SignJwtRequest): request = common.SignJwtRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 33e9637cb0d0..2f1aa66aaccc 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -120,7 +120,10 @@ def _prep_wrapped_messages(self, client_info): self.generate_access_token: gapic_v1.method.wrap_method( self.generate_access_token, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -132,7 +135,10 @@ def _prep_wrapped_messages(self, client_info): self.generate_id_token: gapic_v1.method.wrap_method( self.generate_id_token, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -144,7 +150,10 @@ def _prep_wrapped_messages(self, client_info): self.sign_blob: gapic_v1.method.wrap_method( self.sign_blob, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), @@ -156,7 +165,10 @@ def _prep_wrapped_messages(self, client_info): self.sign_jwt: gapic_v1.method.wrap_method( self.sign_jwt, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.ServiceUnavailable, ), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index f3f074d5766e..4a4809a095cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -349,6 +351,71 @@ def sign_jwt(self) -> Callable[ ) return self._stubs['sign_jwt'] + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.generate_access_token: gapic_v1.method_async.wrap_method( + self.generate_access_token, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.generate_id_token: gapic_v1.method_async.wrap_method( + self.generate_id_token, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.sign_blob: gapic_v1.method_async.wrap_method( + self.sign_blob, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.sign_jwt: gapic_v1.method_async.wrap_method( + self.sign_jwt, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index f12e4689fd11..63a1da8acbfa 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -837,6 +837,38 @@ def test_generate_access_token_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_generate_access_token_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_access_token in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.generate_access_token] = mock_rpc + + request = {} + client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_access_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_generate_access_token_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -859,6 +891,43 @@ async def test_generate_access_token_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateAccessTokenRequest() +@pytest.mark.asyncio +async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.generate_access_token in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.generate_access_token] = mock_object + + request = {} + await client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.generate_access_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): client = IAMCredentialsAsyncClient( @@ -1148,6 +1217,38 @@ def test_generate_id_token_non_empty_request_with_auto_populated_field(): audience='audience_value', ) +def test_generate_id_token_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_id_token in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.generate_id_token] = mock_rpc + + request = {} + client.generate_id_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_id_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_generate_id_token_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1170,6 +1271,43 @@ async def test_generate_id_token_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateIdTokenRequest() +@pytest.mark.asyncio +async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.generate_id_token in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.generate_id_token] = mock_object + + request = {} + await client.generate_id_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.generate_id_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): client = IAMCredentialsAsyncClient( @@ -1463,6 +1601,38 @@ def test_sign_blob_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_sign_blob_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.sign_blob in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.sign_blob] = mock_rpc + + request = {} + client.sign_blob(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.sign_blob(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_sign_blob_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1486,6 +1656,43 @@ async def test_sign_blob_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == common.SignBlobRequest() +@pytest.mark.asyncio +async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.sign_blob in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.sign_blob] = mock_object + + request = {} + await client.sign_blob(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.sign_blob(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): client = IAMCredentialsAsyncClient( @@ -1773,6 +1980,38 @@ def test_sign_jwt_non_empty_request_with_auto_populated_field(): payload='payload_value', ) +def test_sign_jwt_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.sign_jwt in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.sign_jwt] = mock_rpc + + request = {} + client.sign_jwt(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.sign_jwt(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_sign_jwt_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1796,6 +2035,43 @@ async def test_sign_jwt_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == common.SignJwtRequest() +@pytest.mark.asyncio +async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = IAMCredentialsAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.sign_jwt in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.sign_jwt] = mock_object + + request = {} + await client.sign_jwt(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.sign_jwt(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): client = IAMCredentialsAsyncClient( @@ -2036,6 +2312,38 @@ def test_generate_access_token_rest(request_type): assert isinstance(response, common.GenerateAccessTokenResponse) assert response.access_token == 'access_token_value' +def test_generate_access_token_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_access_token in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.generate_access_token] = mock_rpc + + request = {} + client.generate_access_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_access_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_generate_access_token_rest_required_fields(request_type=common.GenerateAccessTokenRequest): transport_class = transports.IAMCredentialsRestTransport @@ -2281,6 +2589,38 @@ def test_generate_id_token_rest(request_type): assert isinstance(response, common.GenerateIdTokenResponse) assert response.token == 'token_value' +def test_generate_id_token_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.generate_id_token in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.generate_id_token] = mock_rpc + + request = {} + client.generate_id_token(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.generate_id_token(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTokenRequest): transport_class = transports.IAMCredentialsRestTransport @@ -2528,6 +2868,38 @@ def test_sign_blob_rest(request_type): assert response.key_id == 'key_id_value' assert response.signed_blob == b'signed_blob_blob' +def test_sign_blob_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.sign_blob in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.sign_blob] = mock_rpc + + request = {} + client.sign_blob(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.sign_blob(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): transport_class = transports.IAMCredentialsRestTransport @@ -2773,6 +3145,38 @@ def test_sign_jwt_rest(request_type): assert response.key_id == 'key_id_value' assert response.signed_jwt == 'signed_jwt_value' +def test_sign_jwt_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.sign_jwt in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.sign_jwt] = mock_rpc + + request = {} + client.sign_jwt(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.sign_jwt(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): transport_class = transports.IAMCredentialsRestTransport diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index cfefe93ceaa3..9fc21688adab 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -325,14 +325,17 @@ async def sample_get_trigger(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.GetTriggerRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetTriggerRequest): + request = eventarc.GetTriggerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -341,11 +344,7 @@ async def sample_get_trigger(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_trigger, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_trigger] # Certain fields should be provided within the metadata header; # add these here. @@ -432,14 +431,17 @@ async def sample_list_triggers(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.ListTriggersRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListTriggersRequest): + request = eventarc.ListTriggersRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -448,11 +450,7 @@ async def sample_list_triggers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_triggers, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_triggers] # Certain fields should be provided within the metadata header; # add these here. @@ -577,14 +575,17 @@ async def sample_create_trigger(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, trigger, trigger_id]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.CreateTriggerRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateTriggerRequest): + request = eventarc.CreateTriggerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -597,11 +598,7 @@ async def sample_create_trigger(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_trigger, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_trigger] # Certain fields should be provided within the metadata header; # add these here. @@ -717,14 +714,17 @@ async def sample_update_trigger(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([trigger, update_mask, allow_missing]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.UpdateTriggerRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateTriggerRequest): + request = eventarc.UpdateTriggerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -737,11 +737,7 @@ async def sample_update_trigger(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_trigger, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_trigger] # Certain fields should be provided within the metadata header; # add these here. @@ -850,14 +846,17 @@ async def sample_delete_trigger(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, allow_missing]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.DeleteTriggerRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteTriggerRequest): + request = eventarc.DeleteTriggerRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -868,11 +867,7 @@ async def sample_delete_trigger(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_trigger, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_trigger] # Certain fields should be provided within the metadata header; # add these here. @@ -970,14 +965,17 @@ async def sample_get_channel(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.GetChannelRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetChannelRequest): + request = eventarc.GetChannelRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -986,11 +984,7 @@ async def sample_get_channel(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_channel, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_channel] # Certain fields should be provided within the metadata header; # add these here. @@ -1077,14 +1071,17 @@ async def sample_list_channels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.ListChannelsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListChannelsRequest): + request = eventarc.ListChannelsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1093,11 +1090,7 @@ async def sample_list_channels(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_channels, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_channels] # Certain fields should be provided within the metadata header; # add these here. @@ -1222,14 +1215,17 @@ async def sample_create_channel(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, channel, channel_id]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.CreateChannelRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateChannelRequest): + request = eventarc.CreateChannelRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1242,11 +1238,7 @@ async def sample_create_channel(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_channel_, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_channel_] # Certain fields should be provided within the metadata header; # add these here. @@ -1356,14 +1348,17 @@ async def sample_update_channel(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([channel, update_mask]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.UpdateChannelRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateChannelRequest): + request = eventarc.UpdateChannelRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1374,11 +1369,7 @@ async def sample_update_channel(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_channel, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_channel] # Certain fields should be provided within the metadata header; # add these here. @@ -1481,14 +1472,17 @@ async def sample_delete_channel(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.DeleteChannelRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteChannelRequest): + request = eventarc.DeleteChannelRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1497,11 +1491,7 @@ async def sample_delete_channel(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_channel, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_channel] # Certain fields should be provided within the metadata header; # add these here. @@ -1593,14 +1583,17 @@ async def sample_get_provider(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.GetProviderRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetProviderRequest): + request = eventarc.GetProviderRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1609,11 +1602,7 @@ async def sample_get_provider(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_provider, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_provider] # Certain fields should be provided within the metadata header; # add these here. @@ -1700,14 +1689,17 @@ async def sample_list_providers(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.ListProvidersRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListProvidersRequest): + request = eventarc.ListProvidersRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1716,11 +1708,7 @@ async def sample_list_providers(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_providers, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_providers] # Certain fields should be provided within the metadata header; # add these here. @@ -1818,14 +1806,17 @@ async def sample_get_channel_connection(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.GetChannelConnectionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetChannelConnectionRequest): + request = eventarc.GetChannelConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1834,11 +1825,7 @@ async def sample_get_channel_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_channel_connection, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_channel_connection] # Certain fields should be provided within the metadata header; # add these here. @@ -1926,14 +1913,17 @@ async def sample_list_channel_connections(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.ListChannelConnectionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.ListChannelConnectionsRequest): + request = eventarc.ListChannelConnectionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1942,11 +1932,7 @@ async def sample_list_channel_connections(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_channel_connections, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_channel_connections] # Certain fields should be provided within the metadata header; # add these here. @@ -2071,14 +2057,17 @@ async def sample_create_channel_connection(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, channel_connection, channel_connection_id]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.CreateChannelConnectionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.CreateChannelConnectionRequest): + request = eventarc.CreateChannelConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2091,11 +2080,7 @@ async def sample_create_channel_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_channel_connection, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_channel_connection] # Certain fields should be provided within the metadata header; # add these here. @@ -2196,14 +2181,17 @@ async def sample_delete_channel_connection(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.DeleteChannelConnectionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.DeleteChannelConnectionRequest): + request = eventarc.DeleteChannelConnectionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2212,11 +2200,7 @@ async def sample_delete_channel_connection(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_channel_connection, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_channel_connection] # Certain fields should be provided within the metadata header; # add these here. @@ -2314,14 +2298,17 @@ async def sample_get_google_channel_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.GetGoogleChannelConfigRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.GetGoogleChannelConfigRequest): + request = eventarc.GetGoogleChannelConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2330,11 +2317,7 @@ async def sample_get_google_channel_config(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_google_channel_config, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_google_channel_config] # Certain fields should be provided within the metadata header; # add these here. @@ -2435,14 +2418,17 @@ async def sample_update_google_channel_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([google_channel_config, update_mask]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = eventarc.UpdateGoogleChannelConfigRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, eventarc.UpdateGoogleChannelConfigRequest): + request = eventarc.UpdateGoogleChannelConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2453,11 +2439,7 @@ async def sample_update_google_channel_config(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_google_channel_config, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_google_channel_config] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 6b6f1fbd1e5e..590f85a28245 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -750,17 +750,15 @@ def sample_get_trigger(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.GetTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.GetTriggerRequest): request = eventarc.GetTriggerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -857,17 +855,15 @@ def sample_list_triggers(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.ListTriggersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.ListTriggersRequest): request = eventarc.ListTriggersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1002,17 +998,15 @@ def sample_create_trigger(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, trigger, trigger_id]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.CreateTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.CreateTriggerRequest): request = eventarc.CreateTriggerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1142,17 +1136,15 @@ def sample_update_trigger(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([trigger, update_mask, allow_missing]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.UpdateTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.UpdateTriggerRequest): request = eventarc.UpdateTriggerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1275,17 +1267,15 @@ def sample_delete_trigger(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, allow_missing]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.DeleteTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.DeleteTriggerRequest): request = eventarc.DeleteTriggerRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1395,17 +1385,15 @@ def sample_get_channel(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.GetChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.GetChannelRequest): request = eventarc.GetChannelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1502,17 +1490,15 @@ def sample_list_channels(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.ListChannelsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.ListChannelsRequest): request = eventarc.ListChannelsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1647,17 +1633,15 @@ def sample_create_channel(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, channel, channel_id]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.CreateChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.CreateChannelRequest): request = eventarc.CreateChannelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1781,17 +1765,15 @@ def sample_update_channel(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([channel, update_mask]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.UpdateChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.UpdateChannelRequest): request = eventarc.UpdateChannelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1906,17 +1888,15 @@ def sample_delete_channel(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.DeleteChannelRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.DeleteChannelRequest): request = eventarc.DeleteChannelRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2018,17 +1998,15 @@ def sample_get_provider(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.GetProviderRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.GetProviderRequest): request = eventarc.GetProviderRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2125,17 +2103,15 @@ def sample_list_providers(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.ListProvidersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.ListProvidersRequest): request = eventarc.ListProvidersRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2243,17 +2219,15 @@ def sample_get_channel_connection(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.GetChannelConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.GetChannelConnectionRequest): request = eventarc.GetChannelConnectionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2351,17 +2325,15 @@ def sample_list_channel_connections(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.ListChannelConnectionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.ListChannelConnectionsRequest): request = eventarc.ListChannelConnectionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2496,17 +2468,15 @@ def sample_create_channel_connection(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, channel_connection, channel_connection_id]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.CreateChannelConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.CreateChannelConnectionRequest): request = eventarc.CreateChannelConnectionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2621,17 +2591,15 @@ def sample_delete_channel_connection(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.DeleteChannelConnectionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.DeleteChannelConnectionRequest): request = eventarc.DeleteChannelConnectionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2739,17 +2707,15 @@ def sample_get_google_channel_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.GetGoogleChannelConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.GetGoogleChannelConfigRequest): request = eventarc.GetGoogleChannelConfigRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2860,17 +2826,15 @@ def sample_update_google_channel_config(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([google_channel_config, update_mask]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a eventarc.UpdateGoogleChannelConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, eventarc.UpdateGoogleChannelConfigRequest): request = eventarc.UpdateGoogleChannelConfigRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 6da6805298d3..271fbf8ada0d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -733,6 +735,101 @@ def update_google_channel_config(self) -> Callable[ ) return self._stubs['update_google_channel_config'] + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_trigger: gapic_v1.method_async.wrap_method( + self.get_trigger, + default_timeout=None, + client_info=client_info, + ), + self.list_triggers: gapic_v1.method_async.wrap_method( + self.list_triggers, + default_timeout=None, + client_info=client_info, + ), + self.create_trigger: gapic_v1.method_async.wrap_method( + self.create_trigger, + default_timeout=None, + client_info=client_info, + ), + self.update_trigger: gapic_v1.method_async.wrap_method( + self.update_trigger, + default_timeout=None, + client_info=client_info, + ), + self.delete_trigger: gapic_v1.method_async.wrap_method( + self.delete_trigger, + default_timeout=None, + client_info=client_info, + ), + self.get_channel: gapic_v1.method_async.wrap_method( + self.get_channel, + default_timeout=None, + client_info=client_info, + ), + self.list_channels: gapic_v1.method_async.wrap_method( + self.list_channels, + default_timeout=None, + client_info=client_info, + ), + self.create_channel_: gapic_v1.method_async.wrap_method( + self.create_channel_, + default_timeout=None, + client_info=client_info, + ), + self.update_channel: gapic_v1.method_async.wrap_method( + self.update_channel, + default_timeout=None, + client_info=client_info, + ), + self.delete_channel: gapic_v1.method_async.wrap_method( + self.delete_channel, + default_timeout=None, + client_info=client_info, + ), + self.get_provider: gapic_v1.method_async.wrap_method( + self.get_provider, + default_timeout=None, + client_info=client_info, + ), + self.list_providers: gapic_v1.method_async.wrap_method( + self.list_providers, + default_timeout=None, + client_info=client_info, + ), + self.get_channel_connection: gapic_v1.method_async.wrap_method( + self.get_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.list_channel_connections: gapic_v1.method_async.wrap_method( + self.list_channel_connections, + default_timeout=None, + client_info=client_info, + ), + self.create_channel_connection: gapic_v1.method_async.wrap_method( + self.create_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.delete_channel_connection: gapic_v1.method_async.wrap_method( + self.delete_channel_connection, + default_timeout=None, + client_info=client_info, + ), + self.get_google_channel_config: gapic_v1.method_async.wrap_method( + self.get_google_channel_config, + default_timeout=None, + client_info=client_info, + ), + self.update_google_channel_config: gapic_v1.method_async.wrap_method( + self.update_google_channel_config, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 5729663c9cd0..8774c4b5d355 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -865,6 +865,38 @@ def test_get_trigger_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_trigger] = mock_rpc + + request = {} + client.get_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_trigger_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -891,6 +923,43 @@ async def test_get_trigger_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetTriggerRequest() +@pytest.mark.asyncio +async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_trigger] = mock_object + + request = {} + await client.get_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetTriggerRequest): client = EventarcAsyncClient( @@ -1168,6 +1237,38 @@ def test_list_triggers_non_empty_request_with_auto_populated_field(): filter='filter_value', ) +def test_list_triggers_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_triggers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_triggers] = mock_rpc + + request = {} + client.list_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_triggers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_triggers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1191,6 +1292,43 @@ async def test_list_triggers_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListTriggersRequest() +@pytest.mark.asyncio +async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_triggers in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_triggers] = mock_object + + request = {} + await client.list_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_triggers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListTriggersRequest): client = EventarcAsyncClient( @@ -1645,6 +1783,42 @@ def test_create_trigger_non_empty_request_with_auto_populated_field(): trigger_id='trigger_id_value', ) +def test_create_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc + + request = {} + client.create_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_trigger_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1667,6 +1841,47 @@ async def test_create_trigger_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateTriggerRequest() +@pytest.mark.asyncio +async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_trigger] = mock_object + + request = {} + await client.create_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateTriggerRequest): client = EventarcAsyncClient( @@ -1944,6 +2159,42 @@ def test_update_trigger_non_empty_request_with_auto_populated_field(): assert args[0] == eventarc.UpdateTriggerRequest( ) +def test_update_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_trigger] = mock_rpc + + request = {} + client.update_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_trigger_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1966,6 +2217,47 @@ async def test_update_trigger_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateTriggerRequest() +@pytest.mark.asyncio +async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_trigger] = mock_object + + request = {} + await client.update_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateTriggerRequest): client = EventarcAsyncClient( @@ -2247,6 +2539,42 @@ def test_delete_trigger_non_empty_request_with_auto_populated_field(): etag='etag_value', ) +def test_delete_trigger_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_trigger] = mock_rpc + + request = {} + client.delete_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_trigger_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2269,6 +2597,47 @@ async def test_delete_trigger_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteTriggerRequest() +@pytest.mark.asyncio +async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_trigger in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_trigger] = mock_object + + request = {} + await client.delete_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteTriggerRequest): client = EventarcAsyncClient( @@ -2552,6 +2921,38 @@ def test_get_channel_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_channel_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_channel] = mock_rpc + + request = {} + client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_channel_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2579,6 +2980,43 @@ async def test_get_channel_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelRequest() +@pytest.mark.asyncio +async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_channel in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_channel] = mock_object + + request = {} + await client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): client = EventarcAsyncClient( @@ -2856,6 +3294,38 @@ def test_list_channels_non_empty_request_with_auto_populated_field(): order_by='order_by_value', ) +def test_list_channels_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_channels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_channels] = mock_rpc + + request = {} + client.list_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_channels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_channels_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2879,6 +3349,43 @@ async def test_list_channels_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelsRequest() +@pytest.mark.asyncio +async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_channels in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_channels] = mock_object + + request = {} + await client.list_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_channels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): client = EventarcAsyncClient( @@ -3333,6 +3840,42 @@ def test_create_channel_non_empty_request_with_auto_populated_field(): channel_id='channel_id_value', ) +def test_create_channel_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_channel_ in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc + + request = {} + client.create_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_channel_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3355,6 +3898,47 @@ async def test_create_channel_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelRequest() +@pytest.mark.asyncio +async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_channel_ in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_channel_] = mock_object + + request = {} + await client.create_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): client = EventarcAsyncClient( @@ -3632,6 +4216,42 @@ def test_update_channel_non_empty_request_with_auto_populated_field(): assert args[0] == eventarc.UpdateChannelRequest( ) +def test_update_channel_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_channel] = mock_rpc + + request = {} + client.update_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_channel_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3654,6 +4274,47 @@ async def test_update_channel_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateChannelRequest() +@pytest.mark.asyncio +async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_channel in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_channel] = mock_object + + request = {} + await client.update_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): client = EventarcAsyncClient( @@ -3923,6 +4584,42 @@ def test_delete_channel_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_delete_channel_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_channel] = mock_rpc + + request = {} + client.delete_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_channel_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3945,6 +4642,47 @@ async def test_delete_channel_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelRequest() +@pytest.mark.asyncio +async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_channel in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_channel] = mock_object + + request = {} + await client.delete_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): client = EventarcAsyncClient( @@ -4209,6 +4947,38 @@ def test_get_provider_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_provider_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_provider in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_provider] = mock_rpc + + request = {} + client.get_provider(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_provider(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_provider_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4232,6 +5002,43 @@ async def test_get_provider_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetProviderRequest() +@pytest.mark.asyncio +async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_provider in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_provider] = mock_object + + request = {} + await client.get_provider(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_provider(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): client = EventarcAsyncClient( @@ -4503,6 +5310,38 @@ def test_list_providers_non_empty_request_with_auto_populated_field(): filter='filter_value', ) +def test_list_providers_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_providers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc + + request = {} + client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_providers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_providers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4526,6 +5365,43 @@ async def test_list_providers_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListProvidersRequest() +@pytest.mark.asyncio +async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_providers in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_providers] = mock_object + + request = {} + await client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_providers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): client = EventarcAsyncClient( @@ -4987,6 +5863,38 @@ def test_get_channel_connection_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_channel_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_channel_connection] = mock_rpc + + request = {} + client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_channel_connection_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5012,6 +5920,43 @@ async def test_get_channel_connection_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelConnectionRequest() +@pytest.mark.asyncio +async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_channel_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_channel_connection] = mock_object + + request = {} + await client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): client = EventarcAsyncClient( @@ -5283,6 +6228,38 @@ def test_list_channel_connections_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_channel_connections_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_channel_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_channel_connections] = mock_rpc + + request = {} + client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_channel_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_channel_connections_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5306,6 +6283,43 @@ async def test_list_channel_connections_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelConnectionsRequest() +@pytest.mark.asyncio +async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_channel_connections in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_channel_connections] = mock_object + + request = {} + await client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_channel_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): client = EventarcAsyncClient( @@ -5760,6 +6774,42 @@ def test_create_channel_connection_non_empty_request_with_auto_populated_field() channel_connection_id='channel_connection_id_value', ) +def test_create_channel_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_channel_connection] = mock_rpc + + request = {} + client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_channel_connection_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5782,6 +6832,47 @@ async def test_create_channel_connection_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelConnectionRequest() +@pytest.mark.asyncio +async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_channel_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_channel_connection] = mock_object + + request = {} + await client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): client = EventarcAsyncClient( @@ -6061,6 +7152,42 @@ def test_delete_channel_connection_non_empty_request_with_auto_populated_field() name='name_value', ) +def test_delete_channel_connection_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_channel_connection] = mock_rpc + + request = {} + client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_channel_connection_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6083,6 +7210,47 @@ async def test_delete_channel_connection_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelConnectionRequest() +@pytest.mark.asyncio +async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_channel_connection in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_channel_connection] = mock_object + + request = {} + await client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): client = EventarcAsyncClient( @@ -6347,6 +7515,38 @@ def test_get_google_channel_config_non_empty_request_with_auto_populated_field() name='name_value', ) +def test_get_google_channel_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_google_channel_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_google_channel_config] = mock_rpc + + request = {} + client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_google_channel_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_google_channel_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6370,6 +7570,43 @@ async def test_get_google_channel_config_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetGoogleChannelConfigRequest() +@pytest.mark.asyncio +async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_google_channel_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_google_channel_config] = mock_object + + request = {} + await client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_google_channel_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): client = EventarcAsyncClient( @@ -6633,6 +7870,38 @@ def test_update_google_channel_config_non_empty_request_with_auto_populated_fiel assert args[0] == eventarc.UpdateGoogleChannelConfigRequest( ) +def test_update_google_channel_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_google_channel_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc + + request = {} + client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_google_channel_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_google_channel_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6656,6 +7925,43 @@ async def test_update_google_channel_config_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() +@pytest.mark.asyncio +async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EventarcAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_google_channel_config in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_google_channel_config] = mock_object + + request = {} + await client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_google_channel_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcAsyncClient( @@ -6886,13 +8192,45 @@ def test_get_trigger_rest(request_type): req.return_value = response_value response = client.get_trigger(request) - # Establish that the response is the type that we expect. - assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' + assert response.etag == 'etag_value' + +def test_get_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_trigger] = mock_rpc + + request = {} + client.get_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerRequest): @@ -7130,6 +8468,38 @@ def test_list_triggers_rest(request_type): assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] +def test_list_triggers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_triggers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_triggers] = mock_rpc + + request = {} + client.list_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_triggers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRequest): transport_class = transports.EventarcRestTransport @@ -7480,6 +8850,42 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_create_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc + + request = {} + client.create_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTriggerRequest): transport_class = transports.EventarcRestTransport @@ -7798,6 +9204,42 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_update_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_trigger] = mock_rpc + + request = {} + client.update_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTriggerRequest): transport_class = transports.EventarcRestTransport @@ -8037,6 +9479,42 @@ def test_delete_trigger_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_delete_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_trigger in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_trigger] = mock_rpc + + request = {} + client.delete_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_trigger(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTriggerRequest): transport_class = transports.EventarcRestTransport @@ -8293,6 +9771,38 @@ def test_get_channel_rest(request_type): assert response.activation_token == 'activation_token_value' assert response.crypto_key_name == 'crypto_key_name_value' +def test_get_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_channel] = mock_rpc + + request = {} + client.get_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelRequest): transport_class = transports.EventarcRestTransport @@ -8529,6 +10039,38 @@ def test_list_channels_rest(request_type): assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] +def test_list_channels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_channels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_channels] = mock_rpc + + request = {} + client.list_channels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_channels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRequest): transport_class = transports.EventarcRestTransport @@ -8879,6 +10421,42 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_create_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_channel_ in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc + + request = {} + client.create_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannelRequest): transport_class = transports.EventarcRestTransport @@ -9197,6 +10775,42 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_update_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_channel] = mock_rpc + + request = {} + client.update_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannelRequest): transport_class = transports.EventarcRestTransport @@ -9434,6 +11048,42 @@ def test_delete_channel_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_delete_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_channel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_channel] = mock_rpc + + request = {} + client.delete_channel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_channel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannelRequest): transport_class = transports.EventarcRestTransport @@ -9679,6 +11329,38 @@ def test_get_provider_rest(request_type): assert response.name == 'name_value' assert response.display_name == 'display_name_value' +def test_get_provider_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_provider in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_provider] = mock_rpc + + request = {} + client.get_provider(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_provider(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequest): transport_class = transports.EventarcRestTransport @@ -9915,6 +11597,38 @@ def test_list_providers_rest(request_type): assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] +def test_list_providers_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_providers in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc + + request = {} + client.list_providers(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_providers(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_list_providers_rest_required_fields(request_type=eventarc.ListProvidersRequest): transport_class = transports.EventarcRestTransport @@ -10212,6 +11926,38 @@ def test_get_channel_connection_rest(request_type): assert response.channel == 'channel_value' assert response.activation_token == 'activation_token_value' +def test_get_channel_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_channel_connection] = mock_rpc + + request = {} + client.get_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetChannelConnectionRequest): transport_class = transports.EventarcRestTransport @@ -10448,6 +12194,38 @@ def test_list_channel_connections_rest(request_type): assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] +def test_list_channel_connections_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_channel_connections in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_channel_connections] = mock_rpc + + request = {} + client.list_channel_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_channel_connections(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_list_channel_connections_rest_required_fields(request_type=eventarc.ListChannelConnectionsRequest): transport_class = transports.EventarcRestTransport @@ -10798,6 +12576,42 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_create_channel_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_channel_connection] = mock_rpc + + request = {} + client.create_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_create_channel_connection_rest_required_fields(request_type=eventarc.CreateChannelConnectionRequest): transport_class = transports.EventarcRestTransport @@ -11041,6 +12855,42 @@ def test_delete_channel_connection_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_delete_channel_connection_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_channel_connection in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_channel_connection] = mock_rpc + + request = {} + client.delete_channel_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_channel_connection(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_delete_channel_connection_rest_required_fields(request_type=eventarc.DeleteChannelConnectionRequest): transport_class = transports.EventarcRestTransport @@ -11273,6 +13123,38 @@ def test_get_google_channel_config_rest(request_type): assert response.name == 'name_value' assert response.crypto_key_name == 'crypto_key_name_value' +def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_google_channel_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_google_channel_config] = mock_rpc + + request = {} + client.get_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_google_channel_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_get_google_channel_config_rest_required_fields(request_type=eventarc.GetGoogleChannelConfigRequest): transport_class = transports.EventarcRestTransport @@ -11573,6 +13455,38 @@ def get_message_fields(field): assert response.name == 'name_value' assert response.crypto_key_name == 'crypto_key_name_value' +def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_google_channel_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc + + request = {} + client.update_google_channel_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_google_channel_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_update_google_channel_config_rest_required_fields(request_type=eventarc.UpdateGoogleChannelConfigRequest): transport_class = transports.EventarcRestTransport diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index be07b22b2152..e93c0db60340 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -316,14 +316,17 @@ async def sample_list_buckets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.ListBucketsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -332,11 +335,7 @@ async def sample_list_buckets(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_buckets, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_buckets] # Certain fields should be provided within the metadata header; # add these here. @@ -420,15 +419,14 @@ async def sample_get_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.GetBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_bucket] # Certain fields should be provided within the metadata header; # add these here. @@ -514,15 +512,14 @@ async def sample_create_bucket_async(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_bucket_async, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_bucket_async] # Certain fields should be provided within the metadata header; # add these here. @@ -618,15 +615,14 @@ async def sample_update_bucket_async(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_bucket_async, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_bucket_async] # Certain fields should be provided within the metadata header; # add these here. @@ -712,15 +708,14 @@ async def sample_create_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_bucket] # Certain fields should be provided within the metadata header; # add these here. @@ -801,15 +796,14 @@ async def sample_update_bucket(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_bucket] # Certain fields should be provided within the metadata header; # add these here. @@ -880,15 +874,14 @@ async def sample_delete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_bucket] # Certain fields should be provided within the metadata header; # add these here. @@ -953,15 +946,14 @@ async def sample_undelete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.UndeleteBucketRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.undelete_bucket, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.undelete_bucket] # Certain fields should be provided within the metadata header; # add these here. @@ -1048,14 +1040,17 @@ async def sample_list_views(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.ListViewsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1064,11 +1059,7 @@ async def sample_list_views(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_views, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_views] # Certain fields should be provided within the metadata header; # add these here. @@ -1152,15 +1143,14 @@ async def sample_get_view(): """ # Create or coerce a protobuf request object. - request = logging_config.GetViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1237,15 +1227,14 @@ async def sample_create_view(): """ # Create or coerce a protobuf request object. - request = logging_config.CreateViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1324,15 +1313,14 @@ async def sample_update_view(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1401,15 +1389,14 @@ async def sample_delete_view(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - request = logging_config.DeleteViewRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_view, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1499,14 +1486,17 @@ async def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.ListSinksRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1515,19 +1505,7 @@ async def sample_list_sinks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_sinks, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_sinks] # Certain fields should be provided within the metadata header; # add these here. @@ -1636,14 +1614,17 @@ async def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.GetSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1652,19 +1633,7 @@ async def sample_get_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_sink, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -1781,14 +1750,17 @@ async def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.CreateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1799,11 +1771,7 @@ async def sample_create_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_sink, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -1945,14 +1913,17 @@ async def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.UpdateSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1965,19 +1936,7 @@ async def sample_update_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_sink, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -2063,14 +2022,17 @@ async def sample_delete_sink(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.DeleteSinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2079,19 +2041,7 @@ async def sample_delete_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_sink, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -2204,14 +2154,17 @@ async def sample_create_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.CreateLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2224,11 +2177,7 @@ async def sample_create_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2338,14 +2287,17 @@ async def sample_delete_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.DeleteLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2354,11 +2306,7 @@ async def sample_delete_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2458,14 +2406,17 @@ async def sample_list_links(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.ListLinksRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2474,11 +2425,7 @@ async def sample_list_links(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_links, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_links] # Certain fields should be provided within the metadata header; # add these here. @@ -2574,14 +2521,17 @@ async def sample_get_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.GetLinkRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2590,11 +2540,7 @@ async def sample_get_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_link, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2688,14 +2634,17 @@ async def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.ListExclusionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2704,19 +2653,7 @@ async def sample_list_exclusions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_exclusions, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_exclusions] # Certain fields should be provided within the metadata header; # add these here. @@ -2823,14 +2760,17 @@ async def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.GetExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2839,19 +2779,7 @@ async def sample_get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_exclusion, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -2967,14 +2895,17 @@ async def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.CreateExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2985,11 +2916,7 @@ async def sample_create_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3117,14 +3044,17 @@ async def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.UpdateExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3137,11 +3067,7 @@ async def sample_update_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_exclusion, - default_timeout=120.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3226,14 +3152,17 @@ async def sample_delete_exclusion(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.DeleteExclusionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3242,19 +3171,7 @@ async def sample_delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_exclusion, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3350,15 +3267,14 @@ async def sample_get_cmek_settings(): """ # Create or coerce a protobuf request object. - request = logging_config.GetCmekSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_cmek_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3462,15 +3378,14 @@ async def sample_update_cmek_settings(): """ # Create or coerce a protobuf request object. - request = logging_config.UpdateCmekSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_cmek_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_cmek_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3586,14 +3501,17 @@ async def sample_get_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.GetSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3602,11 +3520,7 @@ async def sample_get_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3730,14 +3644,17 @@ async def sample_update_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([settings, update_mask]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_config.UpdateSettingsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -3748,11 +3665,7 @@ async def sample_update_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_settings, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3837,15 +3750,14 @@ async def sample_copy_log_entries(): """ # Create or coerce a protobuf request object. - request = logging_config.CopyLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.copy_log_entries, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.copy_log_entries] # Validate the universe domain. self._client._validate_universe_domain() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 32b5be71af7d..2c3165381359 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -712,17 +712,15 @@ def sample_list_buckets(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListBucketsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListBucketsRequest): request = logging_config.ListBucketsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -816,10 +814,8 @@ def sample_get_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetBucketRequest): request = logging_config.GetBucketRequest(request) @@ -911,10 +907,8 @@ def sample_create_bucket_async(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateBucketRequest): request = logging_config.CreateBucketRequest(request) @@ -1016,10 +1010,8 @@ def sample_update_bucket_async(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateBucketRequest): request = logging_config.UpdateBucketRequest(request) @@ -1111,10 +1103,8 @@ def sample_create_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateBucketRequest): request = logging_config.CreateBucketRequest(request) @@ -1201,10 +1191,8 @@ def sample_update_bucket(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateBucketRequest): request = logging_config.UpdateBucketRequest(request) @@ -1281,10 +1269,8 @@ def sample_delete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteBucketRequest): request = logging_config.DeleteBucketRequest(request) @@ -1355,10 +1341,8 @@ def sample_undelete_bucket(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UndeleteBucketRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UndeleteBucketRequest): request = logging_config.UndeleteBucketRequest(request) @@ -1451,17 +1435,15 @@ def sample_list_views(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListViewsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListViewsRequest): request = logging_config.ListViewsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1555,10 +1537,8 @@ def sample_get_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetViewRequest): request = logging_config.GetViewRequest(request) @@ -1641,10 +1621,8 @@ def sample_create_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateViewRequest): request = logging_config.CreateViewRequest(request) @@ -1729,10 +1707,8 @@ def sample_update_view(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateViewRequest): request = logging_config.UpdateViewRequest(request) @@ -1807,10 +1783,8 @@ def sample_delete_view(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteViewRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteViewRequest): request = logging_config.DeleteViewRequest(request) @@ -1906,17 +1880,15 @@ def sample_list_sinks(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListSinksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListSinksRequest): request = logging_config.ListSinksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2035,17 +2007,15 @@ def sample_get_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetSinkRequest): request = logging_config.GetSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2172,17 +2142,15 @@ def sample_create_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, sink]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateSinkRequest): request = logging_config.CreateSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2336,17 +2304,15 @@ def sample_update_sink(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name, sink, update_mask]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateSinkRequest): request = logging_config.UpdateSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2446,17 +2412,15 @@ def sample_delete_sink(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([sink_name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteSinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteSinkRequest): request = logging_config.DeleteSinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2579,17 +2543,15 @@ def sample_create_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, link, link_id]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateLinkRequest): request = logging_config.CreateLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2713,17 +2675,15 @@ def sample_delete_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteLinkRequest): request = logging_config.DeleteLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2833,17 +2793,15 @@ def sample_list_links(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListLinksRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListLinksRequest): request = logging_config.ListLinksRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2949,17 +2907,15 @@ def sample_get_link(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetLinkRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetLinkRequest): request = logging_config.GetLinkRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3063,17 +3019,15 @@ def sample_list_exclusions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.ListExclusionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.ListExclusionsRequest): request = logging_config.ListExclusionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3190,17 +3144,15 @@ def sample_get_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetExclusionRequest): request = logging_config.GetExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3326,17 +3278,15 @@ def sample_create_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, exclusion]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CreateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CreateExclusionRequest): request = logging_config.CreateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3476,17 +3426,15 @@ def sample_update_exclusion(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, exclusion, update_mask]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateExclusionRequest): request = logging_config.UpdateExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3585,17 +3533,15 @@ def sample_delete_exclusion(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.DeleteExclusionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.DeleteExclusionRequest): request = logging_config.DeleteExclusionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -3701,10 +3647,8 @@ def sample_get_cmek_settings(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetCmekSettingsRequest): request = logging_config.GetCmekSettingsRequest(request) @@ -3814,10 +3758,8 @@ def sample_update_cmek_settings(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateCmekSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateCmekSettingsRequest): request = logging_config.UpdateCmekSettingsRequest(request) @@ -3939,17 +3881,15 @@ def sample_get_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.GetSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.GetSettingsRequest): request = logging_config.GetSettingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4083,17 +4023,15 @@ def sample_update_settings(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([settings, update_mask]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.UpdateSettingsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.UpdateSettingsRequest): request = logging_config.UpdateSettingsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -4190,10 +4128,8 @@ def sample_copy_log_entries(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging_config.CopyLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_config.CopyLogEntriesRequest): request = logging_config.CopyLogEntriesRequest(request) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index b353b67be76d..9f1f2ab45f05 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -191,7 +191,10 @@ def _prep_wrapped_messages(self, client_info): self.list_sinks: gapic_v1.method.wrap_method( self.list_sinks, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -204,7 +207,10 @@ def _prep_wrapped_messages(self, client_info): self.get_sink: gapic_v1.method.wrap_method( self.get_sink, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -222,7 +228,10 @@ def _prep_wrapped_messages(self, client_info): self.update_sink: gapic_v1.method.wrap_method( self.update_sink, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -235,7 +244,10 @@ def _prep_wrapped_messages(self, client_info): self.delete_sink: gapic_v1.method.wrap_method( self.delete_sink, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -268,7 +280,10 @@ def _prep_wrapped_messages(self, client_info): self.list_exclusions: gapic_v1.method.wrap_method( self.list_exclusions, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -281,7 +296,10 @@ def _prep_wrapped_messages(self, client_info): self.get_exclusion: gapic_v1.method.wrap_method( self.get_exclusion, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -304,7 +322,10 @@ def _prep_wrapped_messages(self, client_info): self.delete_exclusion: gapic_v1.method.wrap_method( self.delete_exclusion, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 27a9675d1c39..0a8dca763fb7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -1185,6 +1187,248 @@ def copy_log_entries(self) -> Callable[ ) return self._stubs['copy_log_entries'] + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_buckets: gapic_v1.method_async.wrap_method( + self.list_buckets, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket: gapic_v1.method_async.wrap_method( + self.get_bucket, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket_async: gapic_v1.method_async.wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: gapic_v1.method_async.wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket: gapic_v1.method_async.wrap_method( + self.create_bucket, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket: gapic_v1.method_async.wrap_method( + self.update_bucket, + default_timeout=None, + client_info=client_info, + ), + self.delete_bucket: gapic_v1.method_async.wrap_method( + self.delete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.undelete_bucket: gapic_v1.method_async.wrap_method( + self.undelete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.list_views: gapic_v1.method_async.wrap_method( + self.list_views, + default_timeout=None, + client_info=client_info, + ), + self.get_view: gapic_v1.method_async.wrap_method( + self.get_view, + default_timeout=None, + client_info=client_info, + ), + self.create_view: gapic_v1.method_async.wrap_method( + self.create_view, + default_timeout=None, + client_info=client_info, + ), + self.update_view: gapic_v1.method_async.wrap_method( + self.update_view, + default_timeout=None, + client_info=client_info, + ), + self.delete_view: gapic_v1.method_async.wrap_method( + self.delete_view, + default_timeout=None, + client_info=client_info, + ), + self.list_sinks: gapic_v1.method_async.wrap_method( + self.list_sinks, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_sink: gapic_v1.method_async.wrap_method( + self.get_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_sink: gapic_v1.method_async.wrap_method( + self.create_sink, + default_timeout=120.0, + client_info=client_info, + ), + self.update_sink: gapic_v1.method_async.wrap_method( + self.update_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_sink: gapic_v1.method_async.wrap_method( + self.delete_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_link: gapic_v1.method_async.wrap_method( + self.create_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_link: gapic_v1.method_async.wrap_method( + self.delete_link, + default_timeout=None, + client_info=client_info, + ), + self.list_links: gapic_v1.method_async.wrap_method( + self.list_links, + default_timeout=None, + client_info=client_info, + ), + self.get_link: gapic_v1.method_async.wrap_method( + self.get_link, + default_timeout=None, + client_info=client_info, + ), + self.list_exclusions: gapic_v1.method_async.wrap_method( + self.list_exclusions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_exclusion: gapic_v1.method_async.wrap_method( + self.get_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_exclusion: gapic_v1.method_async.wrap_method( + self.create_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.update_exclusion: gapic_v1.method_async.wrap_method( + self.update_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self.delete_exclusion: gapic_v1.method_async.wrap_method( + self.delete_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_cmek_settings: gapic_v1.method_async.wrap_method( + self.get_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_cmek_settings: gapic_v1.method_async.wrap_method( + self.update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self.get_settings: gapic_v1.method_async.wrap_method( + self.get_settings, + default_timeout=None, + client_info=client_info, + ), + self.update_settings: gapic_v1.method_async.wrap_method( + self.update_settings, + default_timeout=None, + client_info=client_info, + ), + self.copy_log_entries: gapic_v1.method_async.wrap_method( + self.copy_log_entries, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 5b08d1efb8a5..1c844f9ee0f3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -292,14 +292,17 @@ async def sample_delete_log(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging.DeleteLogRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -308,19 +311,7 @@ async def sample_delete_log(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_log] # Certain fields should be provided within the metadata header; # add these here. @@ -489,14 +480,17 @@ async def sample_write_log_entries(): Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging.WriteLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -512,19 +506,7 @@ async def sample_write_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.write_log_entries, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.write_log_entries] # Validate the universe domain. self._client._validate_universe_domain() @@ -647,14 +629,17 @@ async def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging.ListLogEntriesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -667,19 +652,7 @@ async def sample_list_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_entries, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_log_entries] # Validate the universe domain. self._client._validate_universe_domain() @@ -760,23 +733,14 @@ async def sample_list_monitored_resource_descriptors(): """ # Create or coerce a protobuf request object. - request = logging.ListMonitoredResourceDescriptorsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_monitored_resource_descriptors, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_monitored_resource_descriptors] # Validate the universe domain. self._client._validate_universe_domain() @@ -870,14 +834,17 @@ async def sample_list_logs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging.ListLogsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -886,19 +853,7 @@ async def sample_list_logs(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_logs, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_logs] # Certain fields should be provided within the metadata header; # add these here. @@ -995,19 +950,7 @@ def request_generator(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.tail_log_entries, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=3600.0, - ), - default_timeout=3600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.tail_log_entries] # Validate the universe domain. self._client._validate_universe_domain() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index cb4004bb8bf7..139156e5b2e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -634,17 +634,15 @@ def sample_delete_log(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging.DeleteLogRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.DeleteLogRequest): request = logging.DeleteLogRequest(request) # If we have keyword arguments corresponding to fields on the @@ -823,17 +821,15 @@ def sample_write_log_entries(): Result returned from WriteLogEntries. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([log_name, resource, labels, entries]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging.WriteLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.WriteLogEntriesRequest): request = logging.WriteLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -972,17 +968,15 @@ def sample_list_log_entries(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([resource_names, filter, order_by]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogEntriesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListLogEntriesRequest): request = logging.ListLogEntriesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1077,10 +1071,8 @@ def sample_list_monitored_resource_descriptors(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListMonitoredResourceDescriptorsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -1180,17 +1172,15 @@ def sample_list_logs(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging.ListLogsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging.ListLogsRequest): request = logging.ListLogsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 90f4018d18e0..c9850fac55f2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -126,7 +126,10 @@ def _prep_wrapped_messages(self, client_info): self.delete_log: gapic_v1.method.wrap_method( self.delete_log, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -139,7 +142,10 @@ def _prep_wrapped_messages(self, client_info): self.write_log_entries: gapic_v1.method.wrap_method( self.write_log_entries, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -152,7 +158,10 @@ def _prep_wrapped_messages(self, client_info): self.list_log_entries: gapic_v1.method.wrap_method( self.list_log_entries, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -165,7 +174,10 @@ def _prep_wrapped_messages(self, client_info): self.list_monitored_resource_descriptors: gapic_v1.method.wrap_method( self.list_monitored_resource_descriptors, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -178,7 +190,10 @@ def _prep_wrapped_messages(self, client_info): self.list_logs: gapic_v1.method.wrap_method( self.list_logs, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -191,7 +206,10 @@ def _prep_wrapped_messages(self, client_info): self.tail_log_entries: gapic_v1.method.wrap_method( self.tail_log_entries, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 2a083c46a0eb..337b490b1117 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -409,6 +411,107 @@ def tail_log_entries(self) -> Callable[ ) return self._stubs['tail_log_entries'] + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.delete_log: gapic_v1.method_async.wrap_method( + self.delete_log, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: gapic_v1.method_async.wrap_method( + self.write_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: gapic_v1.method_async.wrap_method( + self.list_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: gapic_v1.method_async.wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: gapic_v1.method_async.wrap_method( + self.list_logs, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.tail_log_entries: gapic_v1.method_async.wrap_method( + self.tail_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 43963989ea78..681c965ed934 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -295,14 +295,17 @@ async def sample_list_log_metrics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_metrics.ListLogMetricsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -311,19 +314,7 @@ async def sample_list_log_metrics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_log_metrics, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_log_metrics] # Certain fields should be provided within the metadata header; # add these here. @@ -428,14 +419,17 @@ async def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_metrics.GetLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -444,19 +438,7 @@ async def sample_get_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_log_metric, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -569,14 +551,17 @@ async def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_metrics.CreateLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -587,11 +572,7 @@ async def sample_create_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_log_metric, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -703,14 +684,17 @@ async def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_metrics.UpdateLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -721,19 +705,7 @@ async def sample_update_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_log_metric, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -810,14 +782,17 @@ async def sample_delete_log_metric(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = logging_metrics.DeleteLogMetricRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -826,19 +801,7 @@ async def sample_delete_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_log_metric, - default_retry=retries.AsyncRetry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_log_metric] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 36decf7ec151..bebfbf98f469 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -637,17 +637,15 @@ def sample_list_log_metrics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.ListLogMetricsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.ListLogMetricsRequest): request = logging_metrics.ListLogMetricsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -762,17 +760,15 @@ def sample_get_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.GetLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.GetLogMetricRequest): request = logging_metrics.GetLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -895,17 +891,15 @@ def sample_create_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, metric]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.CreateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.CreateLogMetricRequest): request = logging_metrics.CreateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1029,17 +1023,15 @@ def sample_update_log_metric(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name, metric]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.UpdateLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.UpdateLogMetricRequest): request = logging_metrics.UpdateLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1128,17 +1120,15 @@ def sample_delete_log_metric(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([metric_name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a logging_metrics.DeleteLogMetricRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, logging_metrics.DeleteLogMetricRequest): request = logging_metrics.DeleteLogMetricRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 864a110564d0..c347164816f9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -126,7 +126,10 @@ def _prep_wrapped_messages(self, client_info): self.list_log_metrics: gapic_v1.method.wrap_method( self.list_log_metrics, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -139,7 +142,10 @@ def _prep_wrapped_messages(self, client_info): self.get_log_metric: gapic_v1.method.wrap_method( self.get_log_metric, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -157,7 +163,10 @@ def _prep_wrapped_messages(self, client_info): self.update_log_metric: gapic_v1.method.wrap_method( self.update_log_metric, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, @@ -170,7 +179,10 @@ def _prep_wrapped_messages(self, client_info): self.delete_log_metric: gapic_v1.method.wrap_method( self.delete_log_metric, default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 2e66658c60c2..4715a47d0d16 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -364,6 +366,80 @@ def delete_log_metric(self) -> Callable[ ) return self._stubs['delete_log_metric'] + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_log_metrics: gapic_v1.method_async.wrap_method( + self.list_log_metrics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_log_metric: gapic_v1.method_async.wrap_method( + self.get_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_log_metric: gapic_v1.method_async.wrap_method( + self.create_log_metric, + default_timeout=60.0, + client_info=client_info, + ), + self.update_log_metric: gapic_v1.method_async.wrap_method( + self.update_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_log_metric: gapic_v1.method_async.wrap_method( + self.delete_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index e70efd4e43af..027deec8533b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -826,6 +826,38 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_buckets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_buckets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc + + request = {} + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_buckets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -848,6 +880,43 @@ async def test_list_buckets_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListBucketsRequest() +@pytest.mark.asyncio +async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_buckets in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_buckets] = mock_object + + request = {} + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): client = ConfigServiceV2AsyncClient( @@ -1313,6 +1382,38 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc + + request = {} + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1341,6 +1442,43 @@ async def test_get_bucket_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetBucketRequest() +@pytest.mark.asyncio +async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_bucket] = mock_object + + request = {} + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1531,6 +1669,42 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): bucket_id='bucket_id_value', ) +def test_create_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_bucket_async in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_bucket_async] = mock_rpc + + request = {} + client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_bucket_async_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1553,6 +1727,47 @@ async def test_create_bucket_async_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest() +@pytest.mark.asyncio +async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_bucket_async in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_bucket_async] = mock_object + + request = {} + await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1728,6 +1943,42 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_update_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_bucket_async in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_bucket_async] = mock_rpc + + request = {} + client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_bucket_async_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1750,6 +2001,47 @@ async def test_update_bucket_async_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() +@pytest.mark.asyncio +async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_bucket_async in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_bucket_async] = mock_object + + request = {} + await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -1942,6 +2234,38 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): bucket_id='bucket_id_value', ) +def test_create_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc + + request = {} + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1970,6 +2294,43 @@ async def test_create_bucket_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest() +@pytest.mark.asyncio +async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_bucket] = mock_object + + request = {} + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -2173,6 +2534,38 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_update_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc + + request = {} + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2201,6 +2594,43 @@ async def test_update_bucket_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() +@pytest.mark.asyncio +async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_bucket] = mock_object + + request = {} + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( @@ -2389,6 +2819,38 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_delete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc + + request = {} + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2409,6 +2871,43 @@ async def test_delete_bucket_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteBucketRequest() +@pytest.mark.asyncio +async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_bucket] = mock_object + + request = {} + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): client = ConfigServiceV2AsyncClient( @@ -2582,6 +3081,38 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_undelete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.undelete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc + + request = {} + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_undelete_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2602,6 +3133,43 @@ async def test_undelete_bucket_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UndeleteBucketRequest() +@pytest.mark.asyncio +async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.undelete_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.undelete_bucket] = mock_object + + request = {} + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): client = ConfigServiceV2AsyncClient( @@ -2780,6 +3348,38 @@ def test_list_views_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_views in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_views] = mock_rpc + + request = {} + client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_views_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2802,6 +3402,43 @@ async def test_list_views_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListViewsRequest() +@pytest.mark.asyncio +async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_views in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_views] = mock_object + + request = {} + await client.list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): client = ConfigServiceV2AsyncClient( @@ -3259,6 +3896,38 @@ def test_get_view_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_view] = mock_rpc + + request = {} + client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3283,6 +3952,43 @@ async def test_get_view_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetViewRequest() +@pytest.mark.asyncio +async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_view in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_view] = mock_object + + request = {} + await client.get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): client = ConfigServiceV2AsyncClient( @@ -3472,6 +4178,38 @@ def test_create_view_non_empty_request_with_auto_populated_field(): view_id='view_id_value', ) +def test_create_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_view] = mock_rpc + + request = {} + client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3496,6 +4234,43 @@ async def test_create_view_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateViewRequest() +@pytest.mark.asyncio +async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_view in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_view] = mock_object + + request = {} + await client.create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): client = ConfigServiceV2AsyncClient( @@ -3683,6 +4458,38 @@ def test_update_view_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_update_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_view] = mock_rpc + + request = {} + client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3707,6 +4514,43 @@ async def test_update_view_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateViewRequest() +@pytest.mark.asyncio +async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_view in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_view] = mock_object + + request = {} + await client.update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): client = ConfigServiceV2AsyncClient( @@ -3887,6 +4731,38 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_delete_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc + + request = {} + client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3907,6 +4783,43 @@ async def test_delete_view_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteViewRequest() +@pytest.mark.asyncio +async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_view in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_view] = mock_object + + request = {} + await client.delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): client = ConfigServiceV2AsyncClient( @@ -4085,6 +4998,38 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_sinks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_sinks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc + + request = {} + client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_sinks_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4107,6 +5052,43 @@ async def test_list_sinks_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListSinksRequest() +@pytest.mark.asyncio +async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_sinks in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_sinks] = mock_object + + request = {} + await client.list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): client = ConfigServiceV2AsyncClient( @@ -4574,6 +5556,38 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): sink_name='sink_name_value', ) +def test_get_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc + + request = {} + client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4603,6 +5617,43 @@ async def test_get_sink_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSinkRequest() +@pytest.mark.asyncio +async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_sink in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_sink] = mock_object + + request = {} + await client.get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): client = ConfigServiceV2AsyncClient( @@ -4892,6 +5943,38 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): parent='parent_value', ) +def test_create_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc + + request = {} + client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4921,6 +6004,43 @@ async def test_create_sink_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateSinkRequest() +@pytest.mark.asyncio +async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_sink in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_sink] = mock_object + + request = {} + await client.create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): client = ConfigServiceV2AsyncClient( @@ -5220,6 +6340,38 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): sink_name='sink_name_value', ) +def test_update_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc + + request = {} + client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5249,6 +6401,43 @@ async def test_update_sink_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest() +@pytest.mark.asyncio +async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_sink in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_sink] = mock_object + + request = {} + await client.update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): client = ConfigServiceV2AsyncClient( @@ -5541,6 +6730,38 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): sink_name='sink_name_value', ) +def test_delete_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc + + request = {} + client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5561,6 +6782,43 @@ async def test_delete_sink_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteSinkRequest() +@pytest.mark.asyncio +async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_sink in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_sink] = mock_object + + request = {} + await client.delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): client = ConfigServiceV2AsyncClient( @@ -5818,6 +7076,42 @@ def test_create_link_non_empty_request_with_auto_populated_field(): link_id='link_id_value', ) +def test_create_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_link] = mock_rpc + + request = {} + client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5840,6 +7134,47 @@ async def test_create_link_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateLinkRequest() +@pytest.mark.asyncio +async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_link] = mock_object + + request = {} + await client.create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): client = ConfigServiceV2AsyncClient( @@ -6119,6 +7454,42 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_delete_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc + + request = {} + client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6141,6 +7512,47 @@ async def test_delete_link_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteLinkRequest() +@pytest.mark.asyncio +async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_link] = mock_object + + request = {} + await client.delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): client = ConfigServiceV2AsyncClient( @@ -6405,6 +7817,38 @@ def test_list_links_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_links in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_links] = mock_rpc + + request = {} + client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_links_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6427,6 +7871,43 @@ async def test_list_links_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListLinksRequest() +@pytest.mark.asyncio +async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_links in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_links] = mock_object + + request = {} + await client.list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): client = ConfigServiceV2AsyncClient( @@ -6884,6 +8365,38 @@ def test_get_link_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_link] = mock_rpc + + request = {} + client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -6908,6 +8421,43 @@ async def test_get_link_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetLinkRequest() +@pytest.mark.asyncio +async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_link] = mock_object + + request = {} + await client.get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): client = ConfigServiceV2AsyncClient( @@ -7175,6 +8725,38 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_exclusions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_exclusions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc + + request = {} + client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_exclusions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7197,6 +8779,43 @@ async def test_list_exclusions_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListExclusionsRequest() +@pytest.mark.asyncio +async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_exclusions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_exclusions] = mock_object + + request = {} + await client.list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): client = ConfigServiceV2AsyncClient( @@ -7656,6 +9275,38 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc + + request = {} + client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7681,6 +9332,43 @@ async def test_get_exclusion_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetExclusionRequest() +@pytest.mark.asyncio +async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_exclusion in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_exclusion] = mock_object + + request = {} + await client.get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -7954,6 +9642,38 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): parent='parent_value', ) +def test_create_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_exclusion] = mock_rpc + + request = {} + client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -7979,6 +9699,43 @@ async def test_create_exclusion_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest() +@pytest.mark.asyncio +async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_exclusion in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_exclusion] = mock_object + + request = {} + await client.create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -8262,6 +10019,38 @@ def test_update_exclusion_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_update_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_exclusion] = mock_rpc + + request = {} + client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8287,6 +10076,43 @@ async def test_update_exclusion_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateExclusionRequest() +@pytest.mark.asyncio +async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_exclusion in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_exclusion] = mock_object + + request = {} + await client.update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -8571,6 +10397,38 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_delete_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_exclusion] = mock_rpc + + request = {} + client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8591,6 +10449,43 @@ async def test_delete_exclusion_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteExclusionRequest() +@pytest.mark.asyncio +async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_exclusion in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_exclusion] = mock_object + + request = {} + await client.delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): client = ConfigServiceV2AsyncClient( @@ -8855,6 +10750,38 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_cmek_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_cmek_settings] = mock_rpc + + request = {} + client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_cmek_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -8880,6 +10807,43 @@ async def test_get_cmek_settings_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetCmekSettingsRequest() +@pytest.mark.asyncio +async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_cmek_settings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_cmek_settings] = mock_object + + request = {} + await client.get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -9071,6 +11035,38 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_update_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_cmek_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_cmek_settings] = mock_rpc + + request = {} + client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_cmek_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9096,6 +11092,43 @@ async def test_update_cmek_settings_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateCmekSettingsRequest() +@pytest.mark.asyncio +async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_cmek_settings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_cmek_settings] = mock_object + + request = {} + await client.update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -9289,6 +11322,38 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc + + request = {} + client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9315,6 +11380,43 @@ async def test_get_settings_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSettingsRequest() +@pytest.mark.asyncio +async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_settings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_settings] = mock_object + + request = {} + await client.get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -9592,6 +11694,38 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_update_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc + + request = {} + client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9618,6 +11752,43 @@ async def test_update_settings_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSettingsRequest() +@pytest.mark.asyncio +async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_settings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_settings] = mock_object + + request = {} + await client.update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): client = ConfigServiceV2AsyncClient( @@ -9898,6 +12069,42 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): destination='destination_value', ) +def test_copy_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.copy_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.copy_log_entries] = mock_rpc + + request = {} + client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_copy_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -9920,6 +12127,47 @@ async def test_copy_log_entries_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CopyLogEntriesRequest() +@pytest.mark.asyncio +async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.copy_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_object + + request = {} + await client.copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): client = ConfigServiceV2AsyncClient( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index b6c611d634e2..f27f07e668c6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -823,6 +823,38 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): log_name='log_name_value', ) +def test_delete_log_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc + + request = {} + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_log_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -843,6 +875,43 @@ async def test_delete_log_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging.DeleteLogRequest() +@pytest.mark.asyncio +async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_log in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_log] = mock_object + + request = {} + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): client = LoggingServiceV2AsyncClient( @@ -1099,6 +1168,38 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): log_name='log_name_value', ) +def test_write_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.write_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.write_log_entries] = mock_rpc + + request = {} + client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_write_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1120,6 +1221,43 @@ async def test_write_log_entries_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging.WriteLogEntriesRequest() +@pytest.mark.asyncio +async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.write_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.write_log_entries] = mock_object + + request = {} + await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): client = LoggingServiceV2AsyncClient( @@ -1350,6 +1488,38 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_log_entries] = mock_rpc + + request = {} + client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1372,6 +1542,43 @@ async def test_list_log_entries_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogEntriesRequest() +@pytest.mark.asyncio +async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_log_entries] = mock_object + + request = {} + await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): client = LoggingServiceV2AsyncClient( @@ -1777,6 +1984,38 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat page_token='page_token_value', ) +def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_monitored_resource_descriptors in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_monitored_resource_descriptors] = mock_rpc + + request = {} + client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1799,6 +2038,43 @@ async def test_list_monitored_resource_descriptors_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_monitored_resource_descriptors in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_monitored_resource_descriptors] = mock_object + + request = {} + await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): client = LoggingServiceV2AsyncClient( @@ -2106,6 +2382,38 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_logs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_logs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc + + request = {} + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_logs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2129,6 +2437,43 @@ async def test_list_logs_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogsRequest() +@pytest.mark.asyncio +async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_logs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_logs] = mock_object + + request = {} + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): client = LoggingServiceV2AsyncClient( @@ -2538,6 +2883,76 @@ def test_tail_log_entries(request_type, transport: str = 'grpc'): assert isinstance(message, logging.TailLogEntriesResponse) +def test_tail_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.tail_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.tail_log_entries] = mock_rpc + + request = [{}] + client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.tail_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.tail_log_entries] = mock_object + + request = [{}] + await client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): client = LoggingServiceV2AsyncClient( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index d1543607c906..7cdd396a89bf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -826,6 +826,38 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_log_metrics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_metrics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_log_metrics] = mock_rpc + + request = {} + client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_log_metrics_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -848,6 +880,43 @@ async def test_list_log_metrics_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.ListLogMetricsRequest() +@pytest.mark.asyncio +async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_log_metrics in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_log_metrics] = mock_object + + request = {} + await client.list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): client = MetricsServiceV2AsyncClient( @@ -1313,6 +1382,38 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): metric_name='metric_name_value', ) +def test_get_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc + + request = {} + client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1341,6 +1442,43 @@ async def test_get_log_metric_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.GetLogMetricRequest() +@pytest.mark.asyncio +async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_log_metric in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_log_metric] = mock_object + + request = {} + await client.get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1626,6 +1764,38 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): parent='parent_value', ) +def test_create_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_log_metric] = mock_rpc + + request = {} + client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1654,6 +1824,43 @@ async def test_create_log_metric_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.CreateLogMetricRequest() +@pytest.mark.asyncio +async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_log_metric in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_log_metric] = mock_object + + request = {} + await client.create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -1949,6 +2156,38 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): metric_name='metric_name_value', ) +def test_update_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_log_metric] = mock_rpc + + request = {} + client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1977,6 +2216,43 @@ async def test_update_log_metric_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.UpdateLogMetricRequest() +@pytest.mark.asyncio +async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_log_metric in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_log_metric] = mock_object + + request = {} + await client.update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): client = MetricsServiceV2AsyncClient( @@ -2257,6 +2533,38 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): metric_name='metric_name_value', ) +def test_delete_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_log_metric] = mock_rpc + + request = {} + client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2277,6 +2585,43 @@ async def test_delete_log_metric_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.DeleteLogMetricRequest() +@pytest.mark.asyncio +async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = MetricsServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_log_metric in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_log_metric] = mock_object + + request = {} + await client.delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): client = MetricsServiceV2AsyncClient( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index bd46e2b0ec33..cd02bb551ffd 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -330,14 +330,17 @@ async def sample_list_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.ListInstancesRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.ListInstancesRequest): + request = cloud_redis.ListInstancesRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -346,11 +349,7 @@ async def sample_list_instances(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_instances, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] # Certain fields should be provided within the metadata header; # add these here. @@ -442,14 +441,17 @@ async def sample_get_instance(): A Memorystore for Redis instance. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.GetInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.GetInstanceRequest): + request = cloud_redis.GetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -458,11 +460,7 @@ async def sample_get_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_instance, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -548,14 +546,17 @@ async def sample_get_instance_auth_string(): Instance AUTH string details. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.GetInstanceAuthStringRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.GetInstanceAuthStringRequest): + request = cloud_redis.GetInstanceAuthStringRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -564,11 +565,7 @@ async def sample_get_instance_auth_string(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_instance_auth_string, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance_auth_string] # Certain fields should be provided within the metadata header; # add these here. @@ -702,14 +699,17 @@ async def sample_create_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance_id, instance]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.CreateInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.CreateInstanceRequest): + request = cloud_redis.CreateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -722,11 +722,7 @@ async def sample_create_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_instance, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -851,14 +847,17 @@ async def sample_update_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([update_mask, instance]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.UpdateInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.UpdateInstanceRequest): + request = cloud_redis.UpdateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -869,11 +868,7 @@ async def sample_update_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_instance, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -983,14 +978,17 @@ async def sample_upgrade_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, redis_version]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.UpgradeInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.UpgradeInstanceRequest): + request = cloud_redis.UpgradeInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1001,11 +999,7 @@ async def sample_upgrade_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.upgrade_instance, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.upgrade_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -1125,14 +1119,17 @@ async def sample_import_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, input_config]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.ImportInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.ImportInstanceRequest): + request = cloud_redis.ImportInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1143,11 +1140,7 @@ async def sample_import_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.import_instance, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.import_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -1264,14 +1257,17 @@ async def sample_export_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, output_config]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.ExportInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.ExportInstanceRequest): + request = cloud_redis.ExportInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1282,11 +1278,7 @@ async def sample_export_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.export_instance, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.export_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -1397,14 +1389,17 @@ async def sample_failover_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, data_protection_mode]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.FailoverInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.FailoverInstanceRequest): + request = cloud_redis.FailoverInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1415,11 +1410,7 @@ async def sample_failover_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.failover_instance, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.failover_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -1527,14 +1518,17 @@ async def sample_delete_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.DeleteInstanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.DeleteInstanceRequest): + request = cloud_redis.DeleteInstanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1543,11 +1537,7 @@ async def sample_delete_instance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_instance, - default_timeout=600.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance] # Certain fields should be provided within the metadata header; # add these here. @@ -1666,14 +1656,17 @@ async def sample_reschedule_maintenance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, reschedule_type, schedule_time]) if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") - request = cloud_redis.RescheduleMaintenanceRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.RescheduleMaintenanceRequest): + request = cloud_redis.RescheduleMaintenanceRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1686,11 +1679,7 @@ async def sample_reschedule_maintenance(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reschedule_maintenance, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.reschedule_maintenance] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 25454d4a36c2..421ff472abd9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -674,17 +674,15 @@ def sample_list_instances(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.ListInstancesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.ListInstancesRequest): request = cloud_redis.ListInstancesRequest(request) # If we have keyword arguments corresponding to fields on the @@ -786,17 +784,15 @@ def sample_get_instance(): A Memorystore for Redis instance. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.GetInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.GetInstanceRequest): request = cloud_redis.GetInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -892,17 +888,15 @@ def sample_get_instance_auth_string(): Instance AUTH string details. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.GetInstanceAuthStringRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.GetInstanceAuthStringRequest): request = cloud_redis.GetInstanceAuthStringRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1046,17 +1040,15 @@ def sample_create_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, instance_id, instance]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.CreateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.CreateInstanceRequest): request = cloud_redis.CreateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1195,17 +1187,15 @@ def sample_update_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([update_mask, instance]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.UpdateInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.UpdateInstanceRequest): request = cloud_redis.UpdateInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1327,17 +1317,15 @@ def sample_upgrade_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, redis_version]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.UpgradeInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.UpgradeInstanceRequest): request = cloud_redis.UpgradeInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1469,17 +1457,15 @@ def sample_import_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, input_config]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.ImportInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.ImportInstanceRequest): request = cloud_redis.ImportInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1608,17 +1594,15 @@ def sample_export_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, output_config]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.ExportInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.ExportInstanceRequest): request = cloud_redis.ExportInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1741,17 +1725,15 @@ def sample_failover_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, data_protection_mode]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.FailoverInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.FailoverInstanceRequest): request = cloud_redis.FailoverInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1871,17 +1853,15 @@ def sample_delete_instance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.DeleteInstanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.DeleteInstanceRequest): request = cloud_redis.DeleteInstanceRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2010,17 +1990,15 @@ def sample_reschedule_maintenance(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, reschedule_type, schedule_time]) if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') - # Minor optimization to avoid making a copy if the user passes - # in a cloud_redis.RescheduleMaintenanceRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, cloud_redis.RescheduleMaintenanceRequest): request = cloud_redis.RescheduleMaintenanceRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 1fe6599f7777..b60d1965524f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -606,6 +608,66 @@ def reschedule_maintenance(self) -> Callable[ ) return self._stubs['reschedule_maintenance'] + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instances: gapic_v1.method_async.wrap_method( + self.list_instances, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method_async.wrap_method( + self.get_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance_auth_string: gapic_v1.method_async.wrap_method( + self.get_instance_auth_string, + default_timeout=600.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method_async.wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method_async.wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.upgrade_instance: gapic_v1.method_async.wrap_method( + self.upgrade_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.import_instance: gapic_v1.method_async.wrap_method( + self.import_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.export_instance: gapic_v1.method_async.wrap_method( + self.export_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.failover_instance: gapic_v1.method_async.wrap_method( + self.failover_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method_async.wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.reschedule_maintenance: gapic_v1.method_async.wrap_method( + self.reschedule_maintenance, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index edb68db57562..679bd4533c20 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -852,6 +852,38 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): page_token='page_token_value', ) +def test_list_instances_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_list_instances_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -875,6 +907,43 @@ async def test_list_instances_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ListInstancesRequest() +@pytest.mark.asyncio +async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instances in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_object + + request = {} + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): client = CloudRedisAsyncClient( @@ -1382,6 +1451,38 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1430,6 +1531,43 @@ async def test_get_instance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceRequest() +@pytest.mark.asyncio +async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_object + + request = {} + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): client = CloudRedisAsyncClient( @@ -1743,6 +1881,38 @@ def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_get_instance_auth_string_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_auth_string in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_instance_auth_string] = mock_rpc + + request = {} + client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_auth_string(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_get_instance_auth_string_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1765,6 +1935,43 @@ async def test_get_instance_auth_string_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceAuthStringRequest() +@pytest.mark.asyncio +async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance_auth_string in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.get_instance_auth_string] = mock_object + + request = {} + await client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_instance_auth_string(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisAsyncClient( @@ -2025,6 +2232,42 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): instance_id='instance_id_value', ) +def test_create_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_create_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2047,6 +2290,47 @@ async def test_create_instance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.CreateInstanceRequest() +@pytest.mark.asyncio +async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_object + + request = {} + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisAsyncClient( @@ -2324,6 +2608,42 @@ def test_update_instance_non_empty_request_with_auto_populated_field(): assert args[0] == cloud_redis.UpdateInstanceRequest( ) +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_update_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2346,6 +2666,47 @@ async def test_update_instance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpdateInstanceRequest() +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_object + + request = {} + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): client = CloudRedisAsyncClient( @@ -2617,6 +2978,42 @@ def test_upgrade_instance_non_empty_request_with_auto_populated_field(): redis_version='redis_version_value', ) +def test_upgrade_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.upgrade_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.upgrade_instance] = mock_rpc + + request = {} + client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.upgrade_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_upgrade_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2639,6 +3036,47 @@ async def test_upgrade_instance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpgradeInstanceRequest() +@pytest.mark.asyncio +async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.upgrade_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.upgrade_instance] = mock_object + + request = {} + await client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.upgrade_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): client = CloudRedisAsyncClient( @@ -2908,6 +3346,42 @@ def test_import_instance_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_import_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.import_instance] = mock_rpc + + request = {} + client.import_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_import_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2930,6 +3404,47 @@ async def test_import_instance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ImportInstanceRequest() +@pytest.mark.asyncio +async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.import_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.import_instance] = mock_object + + request = {} + await client.import_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.import_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): client = CloudRedisAsyncClient( @@ -3199,6 +3714,42 @@ def test_export_instance_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_export_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.export_instance] = mock_rpc + + request = {} + client.export_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_export_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3221,6 +3772,47 @@ async def test_export_instance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ExportInstanceRequest() +@pytest.mark.asyncio +async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.export_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.export_instance] = mock_object + + request = {} + await client.export_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): client = CloudRedisAsyncClient( @@ -3490,6 +4082,42 @@ def test_failover_instance_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_failover_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.failover_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.failover_instance] = mock_rpc + + request = {} + client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.failover_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_failover_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3512,6 +4140,47 @@ async def test_failover_instance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.FailoverInstanceRequest() +@pytest.mark.asyncio +async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.failover_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.failover_instance] = mock_object + + request = {} + await client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.failover_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): client = CloudRedisAsyncClient( @@ -3781,6 +4450,42 @@ def test_delete_instance_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_delete_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_delete_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3803,6 +4508,47 @@ async def test_delete_instance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.DeleteInstanceRequest() +@pytest.mark.asyncio +async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_object + + request = {} + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): client = CloudRedisAsyncClient( @@ -4062,6 +4808,42 @@ def test_reschedule_maintenance_non_empty_request_with_auto_populated_field(): name='name_value', ) +def test_reschedule_maintenance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reschedule_maintenance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.reschedule_maintenance] = mock_rpc + + request = {} + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.reschedule_maintenance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + @pytest.mark.asyncio async def test_reschedule_maintenance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4084,6 +4866,47 @@ async def test_reschedule_maintenance_empty_call_async(): _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.RescheduleMaintenanceRequest() +@pytest.mark.asyncio +async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.reschedule_maintenance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + class AwaitableMock(mock.AsyncMock): + def __await__(self): + self.await_count += 1 + return iter([]) + mock_object = AwaitableMock() + client._client._transport._wrapped_methods[client._client._transport.reschedule_maintenance] = mock_object + + request = {} + await client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.reschedule_maintenance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + @pytest.mark.asyncio async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): client = CloudRedisAsyncClient( @@ -4321,6 +5144,38 @@ def test_list_instances_rest(request_type): assert response.next_page_token == 'next_page_token_value' assert response.unreachable == ['unreachable_value'] +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): transport_class = transports.CloudRedisRestTransport @@ -4664,6 +5519,38 @@ def test_get_instance_rest(request_type): assert response.maintenance_version == 'maintenance_version_value' assert response.available_maintenance_versions == ['available_maintenance_versions_value'] +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceRequest): transport_class = transports.CloudRedisRestTransport @@ -4898,6 +5785,38 @@ def test_get_instance_auth_string_rest(request_type): assert isinstance(response, cloud_redis.InstanceAuthString) assert response.auth_string == 'auth_string_value' +def test_get_instance_auth_string_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance_auth_string in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.get_instance_auth_string] = mock_rpc + + request = {} + client.get_instance_auth_string(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance_auth_string(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis.GetInstanceAuthStringRequest): transport_class = transports.CloudRedisRestTransport @@ -5191,6 +6110,42 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateInstanceRequest): transport_class = transports.CloudRedisRestTransport @@ -5498,6 +6453,42 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateInstanceRequest): transport_class = transports.CloudRedisRestTransport @@ -5723,6 +6714,42 @@ def test_upgrade_instance_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_upgrade_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.upgrade_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.upgrade_instance] = mock_rpc + + request = {} + client.upgrade_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.upgrade_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeInstanceRequest): transport_class = transports.CloudRedisRestTransport @@ -5955,6 +6982,42 @@ def test_import_instance_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_import_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.import_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.import_instance] = mock_rpc + + request = {} + client.import_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.import_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportInstanceRequest): transport_class = transports.CloudRedisRestTransport @@ -6183,6 +7246,42 @@ def test_export_instance_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_export_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.export_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.export_instance] = mock_rpc + + request = {} + client.export_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportInstanceRequest): transport_class = transports.CloudRedisRestTransport @@ -6411,6 +7510,42 @@ def test_failover_instance_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_failover_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.failover_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.failover_instance] = mock_rpc + + request = {} + client.failover_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.failover_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_failover_instance_rest_required_fields(request_type=cloud_redis.FailoverInstanceRequest): transport_class = transports.CloudRedisRestTransport @@ -6639,6 +7774,42 @@ def test_delete_instance_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteInstanceRequest): transport_class = transports.CloudRedisRestTransport @@ -6864,6 +8035,42 @@ def test_reschedule_maintenance_rest(request_type): # Establish that the response is the type that we expect. assert response.operation.name == "operations/spam" +def test_reschedule_maintenance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.reschedule_maintenance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + client._transport._wrapped_methods[client._transport.reschedule_maintenance] = mock_rpc + + request = {} + client.reschedule_maintenance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.reschedule_maintenance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.RescheduleMaintenanceRequest): transport_class = transports.CloudRedisRestTransport From e525807956851e6262a93119259f0547cfafd8d5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 07:43:29 -0400 Subject: [PATCH 1135/1339] chore(deps): bump idna from 3.6 to 3.7 (#2007) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Owl Bot --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 1c9dea2d241c..0a8cb6f8ae87 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -189,9 +189,9 @@ grpcio==1.62.1 \ # via # googleapis-common-protos # grpc-google-iam-v1 -idna==3.6 \ - --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \ - --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ From 8b9247f32cc71a8ae5ad84fb726a546394127153 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 12 Apr 2024 14:30:42 -0400 Subject: [PATCH 1136/1339] fix: set `default` argument of `jinja-filters.map()` when looking up attributes (#1989) --- .../%namespace/%name/%version/%sub/types/_message.py.j2 | 4 ++-- .../tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 | 4 ++-- .../%namespace/%name_%version/%sub/types/_message.py.j2 | 4 ++-- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 index ea4d5eb25535..86b128925fa0 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/types/_message.py.j2 @@ -12,7 +12,7 @@ class {{ message.name }}({{ p }}.Message): {% endif %} {% endif %} {# Use select filter to capture nested values. See https://github.com/googleapis/gapic-generator-python/issues/1083 #} - {%- if message.fields.values() | map(attribute="oneof") | select | list %} + {%- if message.fields.values() | map(attribute="oneof", default="") | select | list %} .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields {% endif %} @@ -43,7 +43,7 @@ class {{ message.name }}({{ p }}.Message): {% endif %} {% endfor %} - {% if "next_page_token" in message.fields.values()|map(attribute='name') %} + {% if "next_page_token" in message.fields.values()|map(attribute='name', default="") %} @property def raw_page(self): return self diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 5cc8a9b80f29..af148882f4c9 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -607,7 +607,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): for message in response: assert isinstance(message, {{ method.output.ident }}) {% else %} - {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {% if "next_page_token" in method.output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} {# Cheeser assertion to force code coverage for bad paginated methods #} assert response.raw_page is response {% endif %} @@ -1215,7 +1215,7 @@ def test_{{ method.name|snake_case }}_rest(request_type): response = client.{{ method_name }}(request) {% endif %} - {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {% if "next_page_token" in method.output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} {# Cheeser assertion to force code coverage for bad paginated methods #} assert response.raw_page is response diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 index eb2dc2399830..714b9eead1f6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/types/_message.py.j2 @@ -12,7 +12,7 @@ class {{ message.name }}({{ p }}.Message): {% endif %} {% endif %} {# Use select filter to capture nested values. See https://github.com/googleapis/gapic-generator-python/issues/1083 #} - {%- if message.fields.values() | map(attribute="oneof") | select | list %} + {%- if message.fields.values() | map(attribute="oneof", default="") | select | list %} .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields {% endif %} @@ -43,7 +43,7 @@ class {{ message.name }}({{ p }}.Message): {% endif %} {% endfor %} - {% if "next_page_token" in message.fields.values()|map(attribute='name') %} + {% if "next_page_token" in message.fields.values()|map(attribute='name', default="") %} @property def raw_page(self): return self diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index bae87184fc33..2b3c3a382047 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -93,7 +93,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): for message in response: assert isinstance(message, {{ method.output.ident }}) {% else %} - {% if "next_page_token" in method.output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {% if "next_page_token" in method.output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} {# Cheeser assertion to force code coverage for bad paginated methods #} assert response.raw_page is response {% endif %} @@ -1278,7 +1278,7 @@ def test_{{ method_name }}_rest(request_type): response = client.{{ method_name }}(request) {% endif %} - {% if "next_page_token" in method_output.fields.values()|map(attribute='name') and not method.paged_result_field %} + {% if "next_page_token" in method_output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} {# Cheeser assertion to force code coverage for bad paginated methods #} assert response.raw_page is response From 192446afb353aba1e19f39524236f250e19fbf48 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 14:31:14 -0400 Subject: [PATCH 1137/1339] chore(python): bump idna from 3.4 to 3.7 in .kokoro (#2009) Co-authored-by: Owl Bot --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index ee2c6d1f3b94..81f87c56917d 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8244c1253becbaa533f48724a6348e4b92a10df4b4dfb66d87e615e633059bdf -# created: 2024-04-07T11:43:40.730565127Z + digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 +# created: 2024-04-12T11:35:58.922854369Z diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index dd61f5f32018..51f92b8e12f1 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -252,9 +252,9 @@ googleapis-common-protos==1.61.0 \ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests importlib-metadata==6.8.0 \ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ From 444d85e40b092c0ff87b50547ff81d6e85041a36 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 17 Apr 2024 20:11:29 -0400 Subject: [PATCH 1138/1339] fix: Update the lower bound for `google-apps-card` (#2012) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index be4cff6aee8f..395b2262935c 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -7,7 +7,7 @@ allowed version. Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has support for `barcode` in `google.cloud.documentai.types` --> {% set pypi_packages = { - ("google", "apps", "card", "v1"): {"package_name": "google-apps-card", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, + ("google", "apps", "card", "v1"): {"package_name": "google-apps-card", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0dev"}, ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, From 89667831729402cfcc90b9de402f21105e5fb27b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 19 Apr 2024 14:20:24 -0400 Subject: [PATCH 1139/1339] chore(main): release 1.17.0 (#2005) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 17934635e2f8..f6eac1c55ec3 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.17.0](https://github.com/googleapis/gapic-generator-python/compare/v1.16.1...v1.17.0) (2024-04-18) + + +### Features + +* Allow Callables for transport and channel init ([#1699](https://github.com/googleapis/gapic-generator-python/issues/1699)) ([62855c1](https://github.com/googleapis/gapic-generator-python/commit/62855c11570bb3a42cebee43cecd0e59ffb01573)) + + +### Bug Fixes + +* Set `default` argument of `jinja-filters.map()` when looking up attributes ([#1989](https://github.com/googleapis/gapic-generator-python/issues/1989)) ([3e74a0a](https://github.com/googleapis/gapic-generator-python/commit/3e74a0a3f7f94cf21aaf198ecd55cca419cedbd2)) +* Update the lower bound for `google-apps-card` ([#2012](https://github.com/googleapis/gapic-generator-python/issues/2012)) ([9027a5f](https://github.com/googleapis/gapic-generator-python/commit/9027a5fc83218241981a326984ed1aad3a162a4b)) + ## [1.16.1](https://github.com/googleapis/gapic-generator-python/compare/v1.16.0...v1.16.1) (2024-03-22) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 947348fba2e8..4afb61797237 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.16.1" +version = "1.17.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 4027b46ae3f93a25be74239b4af33a1fb259e13f Mon Sep 17 00:00:00 2001 From: ohmayr Date: Fri, 26 Apr 2024 05:56:58 -0400 Subject: [PATCH 1140/1339] fix: type error for compute client(s) tests (#2014) --- .../gapic/%name_%version/%sub/test_macros.j2 | 5 +- .../fragments/test_compute_operation.proto | 108 +++++++++++++++ .../unit/gapic/asset_v1/test_asset_service.py | 115 ++++++++++++---- .../credentials_v1/test_iam_credentials.py | 20 ++- .../unit/gapic/eventarc_v1/test_eventarc.py | 90 +++++++++--- .../logging_v2/test_config_service_v2.py | 128 +++++++++++++----- .../logging_v2/test_logging_service_v2.py | 22 ++- .../logging_v2/test_metrics_service_v2.py | 20 ++- .../unit/gapic/redis_v1/test_cloud_redis.py | 55 ++++++-- 9 files changed, 463 insertions(+), 100 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/test_compute_operation.proto diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 2b3c3a382047..9cb64a3d2992 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -135,6 +135,7 @@ def test_{{ method_name }}_empty_call(): with mock.patch.object( type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.{{ method_name }}() call.assert_called() _, args, _ = call.mock_calls[0] @@ -176,6 +177,7 @@ def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.{{ method_name }}(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -214,8 +216,8 @@ def test_{{ method_name }}_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.{{method.transport_safe_name|snake_case}}] = mock_rpc - {% if method.client_streaming %} request = [{}] client.{{ method.safe_name|snake_case }}(request) @@ -1338,6 +1340,7 @@ def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.{{method.transport_safe_name|snake_case}}] = mock_rpc {% if method.client_streaming %} diff --git a/packages/gapic-generator/tests/fragments/test_compute_operation.proto b/packages/gapic-generator/tests/fragments/test_compute_operation.proto new file mode 100644 index 000000000000..d471a5ebd823 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_compute_operation.proto @@ -0,0 +1,108 @@ +// Copyright (C) 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/annotations.proto"; +import "google/cloud/extended_operations.proto"; +import "google/protobuf/struct.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; + + +message GetRegionOperationRequest { + // Name of the Operations resource to return. + string operation = 52090215 [ + (google.api.field_behavior) = REQUIRED, + (google.cloud.operation_response_field) = "name" + ]; + + // Project ID for this request. + string project = 227560217 [(google.api.field_behavior) = REQUIRED]; + + // Name of the region for this request. + string region = 138946292 [(google.api.field_behavior) = REQUIRED]; +} + +// A request message for Addresses.Insert. See the method description for details. +message InsertAddressRequest { + // The body resource for this request + Address address_resource = 483888121; + + // Project ID for this request. + string project = 227560217; + + // Name of the region for this request. + string region = 138946292; + +} + +message Address { + // The static IP address represented by this resource. + optional string address = 462920692; +} + +message Operation { + + // [Output Only] The status of the operation, which can be one of the following: `PENDING`, `RUNNING`, or `DONE`. + enum Status { + DONE = 0; + } + + // [Output Only] Name of the operation. + optional string name = 3373707 [(google.cloud.operation_field) = NAME]; + + // [Output Only] If the operation fails, this field contains the HTTP error message that was returned, such as `NOT FOUND`. + optional string http_error_message = 202521945 [(google.cloud.operation_field) = ERROR_MESSAGE]; + + // [Output Only] If the operation fails, this field contains the HTTP error status code that was returned. For example, a `404` means the resource was not found. + optional int32 http_error_status_code = 312345196 [(google.cloud.operation_field) = ERROR_CODE]; + + // [Output Only] The status of the operation, which can be one of the following: `PENDING`, `RUNNING`, or `DONE`. + optional Status status = 181260274 [(google.cloud.operation_field) = STATUS]; +} + +// The RegionOperations API. +service RegionOperations { + option (google.api.default_host) = + "compute.googleapis.com"; + + // Retrieves the specified region-specific Operations resource. + rpc Get(GetRegionOperationRequest) returns (Operation) { + option (google.api.http) = { + get: "/compute/v1/projects/{project}/regions/{region}/operations/{operation}" + }; + option (google.api.method_signature) = "project,region,operation"; + option (google.cloud.operation_polling_method) = true; + } +} + +// The Addresses API. +service Addresses { + option (google.api.default_host) = + "compute.googleapis.com"; + + // Creates an address resource in the specified project by using the data included in the request. + rpc Insert(InsertAddressRequest) returns (Operation) { + option (google.api.http) = { + body: "address_resource" + post: "/compute/v1/projects/{project}/regions/{region}/addresses" + }; + option (google.api.method_signature) = "project,region,address_resource"; + option (google.cloud.operation_service) = "RegionOperations"; + } +} + diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index a3de204fbe45..22f07d036865 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -812,6 +812,7 @@ def test_export_assets_empty_call(): with mock.patch.object( type(client.transport.export_assets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.export_assets() call.assert_called() _, args, _ = call.mock_calls[0] @@ -837,6 +838,7 @@ def test_export_assets_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.export_assets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.export_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -862,8 +864,8 @@ def test_export_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc - request = {} client.export_assets(request) @@ -1089,6 +1091,7 @@ def test_list_assets_empty_call(): with mock.patch.object( type(client.transport.list_assets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_assets() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1115,6 +1118,7 @@ def test_list_assets_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_assets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1141,8 +1145,8 @@ def test_list_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc - request = {} client.list_assets(request) @@ -1633,6 +1637,7 @@ def test_batch_get_assets_history_empty_call(): with mock.patch.object( type(client.transport.batch_get_assets_history), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.batch_get_assets_history() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1658,6 +1663,7 @@ def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.batch_get_assets_history), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.batch_get_assets_history(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1683,8 +1689,8 @@ def test_batch_get_assets_history_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc - request = {} client.batch_get_assets_history(request) @@ -1908,6 +1914,7 @@ def test_create_feed_empty_call(): with mock.patch.object( type(client.transport.create_feed), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_feed() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1934,6 +1941,7 @@ def test_create_feed_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_feed), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1960,8 +1968,8 @@ def test_create_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc - request = {} client.create_feed(request) @@ -2282,6 +2290,7 @@ def test_get_feed_empty_call(): with mock.patch.object( type(client.transport.get_feed), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_feed() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2307,6 +2316,7 @@ def test_get_feed_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_feed), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2332,8 +2342,8 @@ def test_get_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc - request = {} client.get_feed(request) @@ -2644,6 +2654,7 @@ def test_list_feeds_empty_call(): with mock.patch.object( type(client.transport.list_feeds), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_feeds() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2669,6 +2680,7 @@ def test_list_feeds_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_feeds), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_feeds(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2694,8 +2706,8 @@ def test_list_feeds_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc - request = {} client.list_feeds(request) @@ -3001,6 +3013,7 @@ def test_update_feed_empty_call(): with mock.patch.object( type(client.transport.update_feed), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_feed() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3025,6 +3038,7 @@ def test_update_feed_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_feed), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3049,8 +3063,8 @@ def test_update_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc - request = {} client.update_feed(request) @@ -3360,6 +3374,7 @@ def test_delete_feed_empty_call(): with mock.patch.object( type(client.transport.delete_feed), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_feed() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3385,6 +3400,7 @@ def test_delete_feed_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_feed), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_feed(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3410,8 +3426,8 @@ def test_delete_feed_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc - request = {} client.delete_feed(request) @@ -3707,6 +3723,7 @@ def test_search_all_resources_empty_call(): with mock.patch.object( type(client.transport.search_all_resources), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.search_all_resources() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3735,6 +3752,7 @@ def test_search_all_resources_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.search_all_resources), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.search_all_resources(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3763,8 +3781,8 @@ def test_search_all_resources_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc - request = {} client.search_all_resources(request) @@ -4277,6 +4295,7 @@ def test_search_all_iam_policies_empty_call(): with mock.patch.object( type(client.transport.search_all_iam_policies), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.search_all_iam_policies() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4305,6 +4324,7 @@ def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.search_all_iam_policies), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.search_all_iam_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4333,8 +4353,8 @@ def test_search_all_iam_policies_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc - request = {} client.search_all_iam_policies(request) @@ -4837,6 +4857,7 @@ def test_analyze_iam_policy_empty_call(): with mock.patch.object( type(client.transport.analyze_iam_policy), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4862,6 +4883,7 @@ def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.analyze_iam_policy), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_iam_policy(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4887,8 +4909,8 @@ def test_analyze_iam_policy_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc - request = {} client.analyze_iam_policy(request) @@ -5104,6 +5126,7 @@ def test_analyze_iam_policy_longrunning_empty_call(): with mock.patch.object( type(client.transport.analyze_iam_policy_longrunning), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_iam_policy_longrunning() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5129,6 +5152,7 @@ def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_fi with mock.patch.object( type(client.transport.analyze_iam_policy_longrunning), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_iam_policy_longrunning(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5154,8 +5178,8 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc - request = {} client.analyze_iam_policy_longrunning(request) @@ -5379,6 +5403,7 @@ def test_analyze_move_empty_call(): with mock.patch.object( type(client.transport.analyze_move), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_move() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5405,6 +5430,7 @@ def test_analyze_move_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.analyze_move), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_move(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5431,8 +5457,8 @@ def test_analyze_move_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc - request = {} client.analyze_move(request) @@ -5650,6 +5676,7 @@ def test_query_assets_empty_call(): with mock.patch.object( type(client.transport.query_assets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.query_assets() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5678,6 +5705,7 @@ def test_query_assets_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.query_assets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.query_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5706,8 +5734,8 @@ def test_query_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc - request = {} client.query_assets(request) @@ -5935,6 +5963,7 @@ def test_create_saved_query_empty_call(): with mock.patch.object( type(client.transport.create_saved_query), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_saved_query() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5961,6 +5990,7 @@ def test_create_saved_query_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_saved_query), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5987,8 +6017,8 @@ def test_create_saved_query_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc - request = {} client.create_saved_query(request) @@ -6324,6 +6354,7 @@ def test_get_saved_query_empty_call(): with mock.patch.object( type(client.transport.get_saved_query), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_saved_query() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6349,6 +6380,7 @@ def test_get_saved_query_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_saved_query), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6374,8 +6406,8 @@ def test_get_saved_query_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc - request = {} client.get_saved_query(request) @@ -6685,6 +6717,7 @@ def test_list_saved_queries_empty_call(): with mock.patch.object( type(client.transport.list_saved_queries), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_saved_queries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6712,6 +6745,7 @@ def test_list_saved_queries_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_saved_queries), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_saved_queries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6739,8 +6773,8 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc - request = {} client.list_saved_queries(request) @@ -7239,6 +7273,7 @@ def test_update_saved_query_empty_call(): with mock.patch.object( type(client.transport.update_saved_query), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_saved_query() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7263,6 +7298,7 @@ def test_update_saved_query_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_saved_query), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7287,8 +7323,8 @@ def test_update_saved_query_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc - request = {} client.update_saved_query(request) @@ -7605,6 +7641,7 @@ def test_delete_saved_query_empty_call(): with mock.patch.object( type(client.transport.delete_saved_query), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_saved_query() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7630,6 +7667,7 @@ def test_delete_saved_query_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_saved_query), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_saved_query(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7655,8 +7693,8 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc - request = {} client.delete_saved_query(request) @@ -7950,6 +7988,7 @@ def test_batch_get_effective_iam_policies_empty_call(): with mock.patch.object( type(client.transport.batch_get_effective_iam_policies), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.batch_get_effective_iam_policies() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7975,6 +8014,7 @@ def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_ with mock.patch.object( type(client.transport.batch_get_effective_iam_policies), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.batch_get_effective_iam_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8000,8 +8040,8 @@ def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc - request = {} client.batch_get_effective_iam_policies(request) @@ -8217,6 +8257,7 @@ def test_analyze_org_policies_empty_call(): with mock.patch.object( type(client.transport.analyze_org_policies), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_org_policies() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8245,6 +8286,7 @@ def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.analyze_org_policies), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_org_policies(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8273,8 +8315,8 @@ def test_analyze_org_policies_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc - request = {} client.analyze_org_policies(request) @@ -8787,6 +8829,7 @@ def test_analyze_org_policy_governed_containers_empty_call(): with mock.patch.object( type(client.transport.analyze_org_policy_governed_containers), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_org_policy_governed_containers() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8815,6 +8858,7 @@ def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_popu with mock.patch.object( type(client.transport.analyze_org_policy_governed_containers), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_org_policy_governed_containers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8843,8 +8887,8 @@ def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc - request = {} client.analyze_org_policy_governed_containers(request) @@ -9357,6 +9401,7 @@ def test_analyze_org_policy_governed_assets_empty_call(): with mock.patch.object( type(client.transport.analyze_org_policy_governed_assets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_org_policy_governed_assets() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9385,6 +9430,7 @@ def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populate with mock.patch.object( type(client.transport.analyze_org_policy_governed_assets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.analyze_org_policy_governed_assets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9413,8 +9459,8 @@ def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc - request = {} client.analyze_org_policy_governed_assets(request) @@ -9930,6 +9976,7 @@ def test_export_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.export_assets] = mock_rpc request = {} @@ -10146,6 +10193,7 @@ def test_list_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_assets] = mock_rpc request = {} @@ -10467,6 +10515,7 @@ def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.batch_get_assets_history] = mock_rpc request = {} @@ -10690,6 +10739,7 @@ def test_create_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc request = {} @@ -10969,6 +11019,7 @@ def test_get_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_feed] = mock_rpc request = {} @@ -11233,6 +11284,7 @@ def test_list_feeds_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_feeds] = mock_rpc request = {} @@ -11507,6 +11559,7 @@ def test_update_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_feed] = mock_rpc request = {} @@ -11764,6 +11817,7 @@ def test_delete_feed_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_feed] = mock_rpc request = {} @@ -12020,6 +12074,7 @@ def test_search_all_resources_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.search_all_resources] = mock_rpc request = {} @@ -12347,6 +12402,7 @@ def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.search_all_iam_policies] = mock_rpc request = {} @@ -12672,6 +12728,7 @@ def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_iam_policy] = mock_rpc request = {} @@ -12877,6 +12934,7 @@ def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_iam_policy_longrunning] = mock_rpc request = {} @@ -13086,6 +13144,7 @@ def test_analyze_move_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_move] = mock_rpc request = {} @@ -13314,6 +13373,7 @@ def test_query_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc request = {} @@ -13598,6 +13658,7 @@ def test_create_saved_query_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_saved_query] = mock_rpc request = {} @@ -13888,6 +13949,7 @@ def test_get_saved_query_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_saved_query] = mock_rpc request = {} @@ -14154,6 +14216,7 @@ def test_list_saved_queries_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_saved_queries] = mock_rpc request = {} @@ -14547,6 +14610,7 @@ def test_update_saved_query_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_saved_query] = mock_rpc request = {} @@ -14808,6 +14872,7 @@ def test_delete_saved_query_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_saved_query] = mock_rpc request = {} @@ -15062,6 +15127,7 @@ def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.batch_get_effective_iam_policies] = mock_rpc request = {} @@ -15288,6 +15354,7 @@ def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_org_policies] = mock_rpc request = {} @@ -15626,6 +15693,7 @@ def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_containers] = mock_rpc request = {} @@ -15964,6 +16032,7 @@ def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.analyze_org_policy_governed_assets] = mock_rpc request = {} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 63a1da8acbfa..9e86be34700f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -805,6 +805,7 @@ def test_generate_access_token_empty_call(): with mock.patch.object( type(client.transport.generate_access_token), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.generate_access_token() call.assert_called() _, args, _ = call.mock_calls[0] @@ -830,6 +831,7 @@ def test_generate_access_token_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.generate_access_token), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.generate_access_token(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -855,8 +857,8 @@ def test_generate_access_token_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.generate_access_token] = mock_rpc - request = {} client.generate_access_token(request) @@ -1183,6 +1185,7 @@ def test_generate_id_token_empty_call(): with mock.patch.object( type(client.transport.generate_id_token), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.generate_id_token() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1209,6 +1212,7 @@ def test_generate_id_token_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.generate_id_token), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.generate_id_token(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1235,8 +1239,8 @@ def test_generate_id_token_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.generate_id_token] = mock_rpc - request = {} client.generate_id_token(request) @@ -1569,6 +1573,7 @@ def test_sign_blob_empty_call(): with mock.patch.object( type(client.transport.sign_blob), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.sign_blob() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1594,6 +1599,7 @@ def test_sign_blob_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.sign_blob), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.sign_blob(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1619,8 +1625,8 @@ def test_sign_blob_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.sign_blob] = mock_rpc - request = {} client.sign_blob(request) @@ -1946,6 +1952,7 @@ def test_sign_jwt_empty_call(): with mock.patch.object( type(client.transport.sign_jwt), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.sign_jwt() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1972,6 +1979,7 @@ def test_sign_jwt_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.sign_jwt), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.sign_jwt(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1998,8 +2006,8 @@ def test_sign_jwt_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.sign_jwt] = mock_rpc - request = {} client.sign_jwt(request) @@ -2330,6 +2338,7 @@ def test_generate_access_token_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.generate_access_token] = mock_rpc request = {} @@ -2607,6 +2616,7 @@ def test_generate_id_token_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.generate_id_token] = mock_rpc request = {} @@ -2886,6 +2896,7 @@ def test_sign_blob_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.sign_blob] = mock_rpc request = {} @@ -3163,6 +3174,7 @@ def test_sign_jwt_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.sign_jwt] = mock_rpc request = {} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 8774c4b5d355..06b205c1d133 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -833,6 +833,7 @@ def test_get_trigger_empty_call(): with mock.patch.object( type(client.transport.get_trigger), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_trigger() call.assert_called() _, args, _ = call.mock_calls[0] @@ -858,6 +859,7 @@ def test_get_trigger_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_trigger), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_trigger(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -883,8 +885,8 @@ def test_get_trigger_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_trigger] = mock_rpc - request = {} client.get_trigger(request) @@ -1199,6 +1201,7 @@ def test_list_triggers_empty_call(): with mock.patch.object( type(client.transport.list_triggers), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_triggers() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1227,6 +1230,7 @@ def test_list_triggers_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_triggers), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_triggers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1255,8 +1259,8 @@ def test_list_triggers_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_triggers] = mock_rpc - request = {} client.list_triggers(request) @@ -1749,6 +1753,7 @@ def test_create_trigger_empty_call(): with mock.patch.object( type(client.transport.create_trigger), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_trigger() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1775,6 +1780,7 @@ def test_create_trigger_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_trigger), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_trigger(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1801,8 +1807,8 @@ def test_create_trigger_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc - request = {} client.create_trigger(request) @@ -2129,6 +2135,7 @@ def test_update_trigger_empty_call(): with mock.patch.object( type(client.transport.update_trigger), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_trigger() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2153,6 +2160,7 @@ def test_update_trigger_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_trigger), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_trigger(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2177,8 +2185,8 @@ def test_update_trigger_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_trigger] = mock_rpc - request = {} client.update_trigger(request) @@ -2505,6 +2513,7 @@ def test_delete_trigger_empty_call(): with mock.patch.object( type(client.transport.delete_trigger), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_trigger() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2531,6 +2540,7 @@ def test_delete_trigger_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_trigger), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_trigger(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2557,8 +2567,8 @@ def test_delete_trigger_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_trigger] = mock_rpc - request = {} client.delete_trigger(request) @@ -2889,6 +2899,7 @@ def test_get_channel_empty_call(): with mock.patch.object( type(client.transport.get_channel), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_channel() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2914,6 +2925,7 @@ def test_get_channel_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_channel), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_channel(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2939,8 +2951,8 @@ def test_get_channel_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_channel] = mock_rpc - request = {} client.get_channel(request) @@ -3258,6 +3270,7 @@ def test_list_channels_empty_call(): with mock.patch.object( type(client.transport.list_channels), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_channels() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3285,6 +3298,7 @@ def test_list_channels_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_channels), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_channels(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3312,8 +3326,8 @@ def test_list_channels_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_channels] = mock_rpc - request = {} client.list_channels(request) @@ -3806,6 +3820,7 @@ def test_create_channel_empty_call(): with mock.patch.object( type(client.transport.create_channel_), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_channel() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3832,6 +3847,7 @@ def test_create_channel_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_channel_), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_channel(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3858,8 +3874,8 @@ def test_create_channel_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc - request = {} client.create_channel(request) @@ -4186,6 +4202,7 @@ def test_update_channel_empty_call(): with mock.patch.object( type(client.transport.update_channel), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_channel() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4210,6 +4227,7 @@ def test_update_channel_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_channel), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_channel(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4234,8 +4252,8 @@ def test_update_channel_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_channel] = mock_rpc - request = {} client.update_channel(request) @@ -4552,6 +4570,7 @@ def test_delete_channel_empty_call(): with mock.patch.object( type(client.transport.delete_channel), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_channel() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4577,6 +4596,7 @@ def test_delete_channel_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_channel), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_channel(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4602,8 +4622,8 @@ def test_delete_channel_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_channel] = mock_rpc - request = {} client.delete_channel(request) @@ -4915,6 +4935,7 @@ def test_get_provider_empty_call(): with mock.patch.object( type(client.transport.get_provider), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_provider() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4940,6 +4961,7 @@ def test_get_provider_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_provider), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_provider(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4965,8 +4987,8 @@ def test_get_provider_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_provider] = mock_rpc - request = {} client.get_provider(request) @@ -5272,6 +5294,7 @@ def test_list_providers_empty_call(): with mock.patch.object( type(client.transport.list_providers), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_providers() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5300,6 +5323,7 @@ def test_list_providers_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_providers), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_providers(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5328,8 +5352,8 @@ def test_list_providers_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc - request = {} client.list_providers(request) @@ -5831,6 +5855,7 @@ def test_get_channel_connection_empty_call(): with mock.patch.object( type(client.transport.get_channel_connection), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_channel_connection() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5856,6 +5881,7 @@ def test_get_channel_connection_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_channel_connection), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_channel_connection(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5881,8 +5907,8 @@ def test_get_channel_connection_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_channel_connection] = mock_rpc - request = {} client.get_channel_connection(request) @@ -6194,6 +6220,7 @@ def test_list_channel_connections_empty_call(): with mock.patch.object( type(client.transport.list_channel_connections), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_channel_connections() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6220,6 +6247,7 @@ def test_list_channel_connections_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_channel_connections), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_channel_connections(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6246,8 +6274,8 @@ def test_list_channel_connections_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_channel_connections] = mock_rpc - request = {} client.list_channel_connections(request) @@ -6740,6 +6768,7 @@ def test_create_channel_connection_empty_call(): with mock.patch.object( type(client.transport.create_channel_connection), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_channel_connection() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6766,6 +6795,7 @@ def test_create_channel_connection_non_empty_request_with_auto_populated_field() with mock.patch.object( type(client.transport.create_channel_connection), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_channel_connection(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6792,8 +6822,8 @@ def test_create_channel_connection_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_channel_connection] = mock_rpc - request = {} client.create_channel_connection(request) @@ -7120,6 +7150,7 @@ def test_delete_channel_connection_empty_call(): with mock.patch.object( type(client.transport.delete_channel_connection), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_channel_connection() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7145,6 +7176,7 @@ def test_delete_channel_connection_non_empty_request_with_auto_populated_field() with mock.patch.object( type(client.transport.delete_channel_connection), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_channel_connection(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7170,8 +7202,8 @@ def test_delete_channel_connection_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_channel_connection] = mock_rpc - request = {} client.delete_channel_connection(request) @@ -7483,6 +7515,7 @@ def test_get_google_channel_config_empty_call(): with mock.patch.object( type(client.transport.get_google_channel_config), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_google_channel_config() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7508,6 +7541,7 @@ def test_get_google_channel_config_non_empty_request_with_auto_populated_field() with mock.patch.object( type(client.transport.get_google_channel_config), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_google_channel_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7533,8 +7567,8 @@ def test_get_google_channel_config_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_google_channel_config] = mock_rpc - request = {} client.get_google_channel_config(request) @@ -7840,6 +7874,7 @@ def test_update_google_channel_config_empty_call(): with mock.patch.object( type(client.transport.update_google_channel_config), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_google_channel_config() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7864,6 +7899,7 @@ def test_update_google_channel_config_non_empty_request_with_auto_populated_fiel with mock.patch.object( type(client.transport.update_google_channel_config), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_google_channel_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7888,8 +7924,8 @@ def test_update_google_channel_config_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc - request = {} client.update_google_channel_config(request) @@ -8218,6 +8254,7 @@ def test_get_trigger_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_trigger] = mock_rpc request = {} @@ -8486,6 +8523,7 @@ def test_list_triggers_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_triggers] = mock_rpc request = {} @@ -8868,6 +8906,7 @@ def test_create_trigger_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc request = {} @@ -9222,6 +9261,7 @@ def test_update_trigger_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_trigger] = mock_rpc request = {} @@ -9497,6 +9537,7 @@ def test_delete_trigger_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_trigger] = mock_rpc request = {} @@ -9789,6 +9830,7 @@ def test_get_channel_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_channel] = mock_rpc request = {} @@ -10057,6 +10099,7 @@ def test_list_channels_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_channels] = mock_rpc request = {} @@ -10439,6 +10482,7 @@ def test_create_channel_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_channel_] = mock_rpc request = {} @@ -10793,6 +10837,7 @@ def test_update_channel_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_channel] = mock_rpc request = {} @@ -11066,6 +11111,7 @@ def test_delete_channel_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_channel] = mock_rpc request = {} @@ -11347,6 +11393,7 @@ def test_get_provider_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_provider] = mock_rpc request = {} @@ -11615,6 +11662,7 @@ def test_list_providers_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_providers] = mock_rpc request = {} @@ -11944,6 +11992,7 @@ def test_get_channel_connection_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_channel_connection] = mock_rpc request = {} @@ -12212,6 +12261,7 @@ def test_list_channel_connections_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_channel_connections] = mock_rpc request = {} @@ -12594,6 +12644,7 @@ def test_create_channel_connection_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_channel_connection] = mock_rpc request = {} @@ -12873,6 +12924,7 @@ def test_delete_channel_connection_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_channel_connection] = mock_rpc request = {} @@ -13141,6 +13193,7 @@ def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_google_channel_config] = mock_rpc request = {} @@ -13473,6 +13526,7 @@ def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc request = {} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 027deec8533b..138133b22d81 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -792,6 +792,7 @@ def test_list_buckets_empty_call(): with mock.patch.object( type(client.transport.list_buckets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_buckets() call.assert_called() _, args, _ = call.mock_calls[0] @@ -818,6 +819,7 @@ def test_list_buckets_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_buckets), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_buckets(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -844,8 +846,8 @@ def test_list_buckets_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc - request = {} client.list_buckets(request) @@ -1350,6 +1352,7 @@ def test_get_bucket_empty_call(): with mock.patch.object( type(client.transport.get_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1375,6 +1378,7 @@ def test_get_bucket_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1400,8 +1404,8 @@ def test_get_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc - request = {} client.get_bucket(request) @@ -1635,6 +1639,7 @@ def test_create_bucket_async_empty_call(): with mock.patch.object( type(client.transport.create_bucket_async), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1661,6 +1666,7 @@ def test_create_bucket_async_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_bucket_async), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1687,8 +1693,8 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_bucket_async] = mock_rpc - request = {} client.create_bucket_async(request) @@ -1911,6 +1917,7 @@ def test_update_bucket_async_empty_call(): with mock.patch.object( type(client.transport.update_bucket_async), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1936,6 +1943,7 @@ def test_update_bucket_async_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_bucket_async), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_bucket_async(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1961,8 +1969,8 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_bucket_async] = mock_rpc - request = {} client.update_bucket_async(request) @@ -2200,6 +2208,7 @@ def test_create_bucket_empty_call(): with mock.patch.object( type(client.transport.create_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2226,6 +2235,7 @@ def test_create_bucket_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2252,8 +2262,8 @@ def test_create_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc - request = {} client.create_bucket(request) @@ -2502,6 +2512,7 @@ def test_update_bucket_empty_call(): with mock.patch.object( type(client.transport.update_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2527,6 +2538,7 @@ def test_update_bucket_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2552,8 +2564,8 @@ def test_update_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc - request = {} client.update_bucket(request) @@ -2787,6 +2799,7 @@ def test_delete_bucket_empty_call(): with mock.patch.object( type(client.transport.delete_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2812,6 +2825,7 @@ def test_delete_bucket_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2837,8 +2851,8 @@ def test_delete_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc - request = {} client.delete_bucket(request) @@ -3049,6 +3063,7 @@ def test_undelete_bucket_empty_call(): with mock.patch.object( type(client.transport.undelete_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.undelete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3074,6 +3089,7 @@ def test_undelete_bucket_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.undelete_bucket), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.undelete_bucket(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3099,8 +3115,8 @@ def test_undelete_bucket_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc - request = {} client.undelete_bucket(request) @@ -3314,6 +3330,7 @@ def test_list_views_empty_call(): with mock.patch.object( type(client.transport.list_views), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_views() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3340,6 +3357,7 @@ def test_list_views_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_views), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_views(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3366,8 +3384,8 @@ def test_list_views_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_views] = mock_rpc - request = {} client.list_views(request) @@ -3864,6 +3882,7 @@ def test_get_view_empty_call(): with mock.patch.object( type(client.transport.get_view), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3889,6 +3908,7 @@ def test_get_view_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_view), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3914,8 +3934,8 @@ def test_get_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_view] = mock_rpc - request = {} client.get_view(request) @@ -4144,6 +4164,7 @@ def test_create_view_empty_call(): with mock.patch.object( type(client.transport.create_view), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4170,6 +4191,7 @@ def test_create_view_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_view), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4196,8 +4218,8 @@ def test_create_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_view] = mock_rpc - request = {} client.create_view(request) @@ -4426,6 +4448,7 @@ def test_update_view_empty_call(): with mock.patch.object( type(client.transport.update_view), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4451,6 +4474,7 @@ def test_update_view_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_view), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4476,8 +4500,8 @@ def test_update_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_view] = mock_rpc - request = {} client.update_view(request) @@ -4699,6 +4723,7 @@ def test_delete_view_empty_call(): with mock.patch.object( type(client.transport.delete_view), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_view() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4724,6 +4749,7 @@ def test_delete_view_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_view), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_view(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4749,8 +4775,8 @@ def test_delete_view_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc - request = {} client.delete_view(request) @@ -4964,6 +4990,7 @@ def test_list_sinks_empty_call(): with mock.patch.object( type(client.transport.list_sinks), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_sinks() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4990,6 +5017,7 @@ def test_list_sinks_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_sinks), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_sinks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5016,8 +5044,8 @@ def test_list_sinks_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc - request = {} client.list_sinks(request) @@ -5524,6 +5552,7 @@ def test_get_sink_empty_call(): with mock.patch.object( type(client.transport.get_sink), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5549,6 +5578,7 @@ def test_get_sink_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_sink), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5574,8 +5604,8 @@ def test_get_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc - request = {} client.get_sink(request) @@ -5911,6 +5941,7 @@ def test_create_sink_empty_call(): with mock.patch.object( type(client.transport.create_sink), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5936,6 +5967,7 @@ def test_create_sink_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_sink), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5961,8 +5993,8 @@ def test_create_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc - request = {} client.create_sink(request) @@ -6308,6 +6340,7 @@ def test_update_sink_empty_call(): with mock.patch.object( type(client.transport.update_sink), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6333,6 +6366,7 @@ def test_update_sink_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_sink), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6358,8 +6392,8 @@ def test_update_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc - request = {} client.update_sink(request) @@ -6698,6 +6732,7 @@ def test_delete_sink_empty_call(): with mock.patch.object( type(client.transport.delete_sink), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_sink() call.assert_called() _, args, _ = call.mock_calls[0] @@ -6723,6 +6758,7 @@ def test_delete_sink_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_sink), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_sink(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -6748,8 +6784,8 @@ def test_delete_sink_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc - request = {} client.delete_sink(request) @@ -7042,6 +7078,7 @@ def test_create_link_empty_call(): with mock.patch.object( type(client.transport.create_link), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7068,6 +7105,7 @@ def test_create_link_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_link), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7094,8 +7132,8 @@ def test_create_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_link] = mock_rpc - request = {} client.create_link(request) @@ -7422,6 +7460,7 @@ def test_delete_link_empty_call(): with mock.patch.object( type(client.transport.delete_link), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7447,6 +7486,7 @@ def test_delete_link_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_link), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7472,8 +7512,8 @@ def test_delete_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc - request = {} client.delete_link(request) @@ -7783,6 +7823,7 @@ def test_list_links_empty_call(): with mock.patch.object( type(client.transport.list_links), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_links() call.assert_called() _, args, _ = call.mock_calls[0] @@ -7809,6 +7850,7 @@ def test_list_links_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_links), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_links(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -7835,8 +7877,8 @@ def test_list_links_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_links] = mock_rpc - request = {} client.list_links(request) @@ -8333,6 +8375,7 @@ def test_get_link_empty_call(): with mock.patch.object( type(client.transport.get_link), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_link() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8358,6 +8401,7 @@ def test_get_link_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_link), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_link(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8383,8 +8427,8 @@ def test_get_link_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_link] = mock_rpc - request = {} client.get_link(request) @@ -8691,6 +8735,7 @@ def test_list_exclusions_empty_call(): with mock.patch.object( type(client.transport.list_exclusions), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -8717,6 +8762,7 @@ def test_list_exclusions_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_exclusions), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_exclusions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -8743,8 +8789,8 @@ def test_list_exclusions_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc - request = {} client.list_exclusions(request) @@ -9243,6 +9289,7 @@ def test_get_exclusion_empty_call(): with mock.patch.object( type(client.transport.get_exclusion), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9268,6 +9315,7 @@ def test_get_exclusion_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_exclusion), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9293,8 +9341,8 @@ def test_get_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc - request = {} client.get_exclusion(request) @@ -9610,6 +9658,7 @@ def test_create_exclusion_empty_call(): with mock.patch.object( type(client.transport.create_exclusion), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -9635,6 +9684,7 @@ def test_create_exclusion_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_exclusion), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -9660,8 +9710,8 @@ def test_create_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_exclusion] = mock_rpc - request = {} client.create_exclusion(request) @@ -9987,6 +10037,7 @@ def test_update_exclusion_empty_call(): with mock.patch.object( type(client.transport.update_exclusion), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -10012,6 +10063,7 @@ def test_update_exclusion_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_exclusion), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -10037,8 +10089,8 @@ def test_update_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_exclusion] = mock_rpc - request = {} client.update_exclusion(request) @@ -10365,6 +10417,7 @@ def test_delete_exclusion_empty_call(): with mock.patch.object( type(client.transport.delete_exclusion), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] @@ -10390,6 +10443,7 @@ def test_delete_exclusion_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_exclusion), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_exclusion(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -10415,8 +10469,8 @@ def test_delete_exclusion_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_exclusion] = mock_rpc - request = {} client.delete_exclusion(request) @@ -10718,6 +10772,7 @@ def test_get_cmek_settings_empty_call(): with mock.patch.object( type(client.transport.get_cmek_settings), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -10743,6 +10798,7 @@ def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_cmek_settings), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -10768,8 +10824,8 @@ def test_get_cmek_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_cmek_settings] = mock_rpc - request = {} client.get_cmek_settings(request) @@ -11003,6 +11059,7 @@ def test_update_cmek_settings_empty_call(): with mock.patch.object( type(client.transport.update_cmek_settings), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -11028,6 +11085,7 @@ def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_cmek_settings), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_cmek_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -11053,8 +11111,8 @@ def test_update_cmek_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_cmek_settings] = mock_rpc - request = {} client.update_cmek_settings(request) @@ -11290,6 +11348,7 @@ def test_get_settings_empty_call(): with mock.patch.object( type(client.transport.get_settings), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -11315,6 +11374,7 @@ def test_get_settings_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_settings), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -11340,8 +11400,8 @@ def test_get_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc - request = {} client.get_settings(request) @@ -11662,6 +11722,7 @@ def test_update_settings_empty_call(): with mock.patch.object( type(client.transport.update_settings), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] @@ -11687,6 +11748,7 @@ def test_update_settings_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_settings), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_settings(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -11712,8 +11774,8 @@ def test_update_settings_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc - request = {} client.update_settings(request) @@ -12033,6 +12095,7 @@ def test_copy_log_entries_empty_call(): with mock.patch.object( type(client.transport.copy_log_entries), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.copy_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -12060,6 +12123,7 @@ def test_copy_log_entries_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.copy_log_entries), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.copy_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -12087,8 +12151,8 @@ def test_copy_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.copy_log_entries] = mock_rpc - request = {} client.copy_log_entries(request) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index f27f07e668c6..73f543d55de3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -791,6 +791,7 @@ def test_delete_log_empty_call(): with mock.patch.object( type(client.transport.delete_log), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_log() call.assert_called() _, args, _ = call.mock_calls[0] @@ -816,6 +817,7 @@ def test_delete_log_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_log), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_log(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -841,8 +843,8 @@ def test_delete_log_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc - request = {} client.delete_log(request) @@ -1136,6 +1138,7 @@ def test_write_log_entries_empty_call(): with mock.patch.object( type(client.transport.write_log_entries), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.write_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1161,6 +1164,7 @@ def test_write_log_entries_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.write_log_entries), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.write_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1186,8 +1190,8 @@ def test_write_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.write_log_entries] = mock_rpc - request = {} client.write_log_entries(request) @@ -1452,6 +1456,7 @@ def test_list_log_entries_empty_call(): with mock.patch.object( type(client.transport.list_log_entries), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1479,6 +1484,7 @@ def test_list_log_entries_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_log_entries), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_log_entries(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1506,8 +1512,8 @@ def test_list_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_log_entries] = mock_rpc - request = {} client.list_log_entries(request) @@ -1952,6 +1958,7 @@ def test_list_monitored_resource_descriptors_empty_call(): with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_monitored_resource_descriptors() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1977,6 +1984,7 @@ def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populat with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_monitored_resource_descriptors(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2002,8 +2010,8 @@ def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_monitored_resource_descriptors] = mock_rpc - request = {} client.list_monitored_resource_descriptors(request) @@ -2348,6 +2356,7 @@ def test_list_logs_empty_call(): with mock.patch.object( type(client.transport.list_logs), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_logs() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2374,6 +2383,7 @@ def test_list_logs_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_logs), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_logs(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2400,8 +2410,8 @@ def test_list_logs_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc - request = {} client.list_logs(request) @@ -2901,8 +2911,8 @@ def test_tail_log_entries_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.tail_log_entries] = mock_rpc - request = [{}] client.tail_log_entries(request) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7cdd396a89bf..be4325c6782f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -792,6 +792,7 @@ def test_list_log_metrics_empty_call(): with mock.patch.object( type(client.transport.list_log_metrics), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_log_metrics() call.assert_called() _, args, _ = call.mock_calls[0] @@ -818,6 +819,7 @@ def test_list_log_metrics_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_log_metrics), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_log_metrics(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -844,8 +846,8 @@ def test_list_log_metrics_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_log_metrics] = mock_rpc - request = {} client.list_log_metrics(request) @@ -1350,6 +1352,7 @@ def test_get_log_metric_empty_call(): with mock.patch.object( type(client.transport.get_log_metric), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1375,6 +1378,7 @@ def test_get_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_log_metric), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1400,8 +1404,8 @@ def test_get_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc - request = {} client.get_log_metric(request) @@ -1732,6 +1736,7 @@ def test_create_log_metric_empty_call(): with mock.patch.object( type(client.transport.create_log_metric), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1757,6 +1762,7 @@ def test_create_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_log_metric), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1782,8 +1788,8 @@ def test_create_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_log_metric] = mock_rpc - request = {} client.create_log_metric(request) @@ -2124,6 +2130,7 @@ def test_update_log_metric_empty_call(): with mock.patch.object( type(client.transport.update_log_metric), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2149,6 +2156,7 @@ def test_update_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_log_metric), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2174,8 +2182,8 @@ def test_update_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_log_metric] = mock_rpc - request = {} client.update_log_metric(request) @@ -2501,6 +2509,7 @@ def test_delete_log_metric_empty_call(): with mock.patch.object( type(client.transport.delete_log_metric), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2526,6 +2535,7 @@ def test_delete_log_metric_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_log_metric), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_log_metric(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2551,8 +2561,8 @@ def test_delete_log_metric_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_log_metric] = mock_rpc - request = {} client.delete_log_metric(request) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 679bd4533c20..0441f71b1219 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -818,6 +818,7 @@ def test_list_instances_empty_call(): with mock.patch.object( type(client.transport.list_instances), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] @@ -844,6 +845,7 @@ def test_list_instances_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_instances), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.list_instances(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -870,8 +872,8 @@ def test_list_instances_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc - request = {} client.list_instances(request) @@ -1419,6 +1421,7 @@ def test_get_instance_empty_call(): with mock.patch.object( type(client.transport.get_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1444,6 +1447,7 @@ def test_get_instance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1469,8 +1473,8 @@ def test_get_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc - request = {} client.get_instance(request) @@ -1849,6 +1853,7 @@ def test_get_instance_auth_string_empty_call(): with mock.patch.object( type(client.transport.get_instance_auth_string), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_instance_auth_string() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1874,6 +1879,7 @@ def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.get_instance_auth_string), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.get_instance_auth_string(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1899,8 +1905,8 @@ def test_get_instance_auth_string_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_instance_auth_string] = mock_rpc - request = {} client.get_instance_auth_string(request) @@ -2198,6 +2204,7 @@ def test_create_instance_empty_call(): with mock.patch.object( type(client.transport.create_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2224,6 +2231,7 @@ def test_create_instance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.create_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2250,8 +2258,8 @@ def test_create_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc - request = {} client.create_instance(request) @@ -2578,6 +2586,7 @@ def test_update_instance_empty_call(): with mock.patch.object( type(client.transport.update_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2602,6 +2611,7 @@ def test_update_instance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.update_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2626,8 +2636,8 @@ def test_update_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc - request = {} client.update_instance(request) @@ -2944,6 +2954,7 @@ def test_upgrade_instance_empty_call(): with mock.patch.object( type(client.transport.upgrade_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.upgrade_instance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2970,6 +2981,7 @@ def test_upgrade_instance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.upgrade_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.upgrade_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2996,8 +3008,8 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.upgrade_instance] = mock_rpc - request = {} client.upgrade_instance(request) @@ -3314,6 +3326,7 @@ def test_import_instance_empty_call(): with mock.patch.object( type(client.transport.import_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.import_instance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3339,6 +3352,7 @@ def test_import_instance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.import_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.import_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3364,8 +3378,8 @@ def test_import_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.import_instance] = mock_rpc - request = {} client.import_instance(request) @@ -3682,6 +3696,7 @@ def test_export_instance_empty_call(): with mock.patch.object( type(client.transport.export_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.export_instance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3707,6 +3722,7 @@ def test_export_instance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.export_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.export_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3732,8 +3748,8 @@ def test_export_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.export_instance] = mock_rpc - request = {} client.export_instance(request) @@ -4050,6 +4066,7 @@ def test_failover_instance_empty_call(): with mock.patch.object( type(client.transport.failover_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.failover_instance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4075,6 +4092,7 @@ def test_failover_instance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.failover_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.failover_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4100,8 +4118,8 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.failover_instance] = mock_rpc - request = {} client.failover_instance(request) @@ -4418,6 +4436,7 @@ def test_delete_instance_empty_call(): with mock.patch.object( type(client.transport.delete_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4443,6 +4462,7 @@ def test_delete_instance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_instance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.delete_instance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4468,8 +4488,8 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc - request = {} client.delete_instance(request) @@ -4776,6 +4796,7 @@ def test_reschedule_maintenance_empty_call(): with mock.patch.object( type(client.transport.reschedule_maintenance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.reschedule_maintenance() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4801,6 +4822,7 @@ def test_reschedule_maintenance_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.reschedule_maintenance), '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client.reschedule_maintenance(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4826,8 +4848,8 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.reschedule_maintenance] = mock_rpc - request = {} client.reschedule_maintenance(request) @@ -5162,6 +5184,7 @@ def test_list_instances_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc request = {} @@ -5537,6 +5560,7 @@ def test_get_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc request = {} @@ -5803,6 +5827,7 @@ def test_get_instance_auth_string_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.get_instance_auth_string] = mock_rpc request = {} @@ -6128,6 +6153,7 @@ def test_create_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc request = {} @@ -6471,6 +6497,7 @@ def test_update_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc request = {} @@ -6732,6 +6759,7 @@ def test_upgrade_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.upgrade_instance] = mock_rpc request = {} @@ -7000,6 +7028,7 @@ def test_import_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.import_instance] = mock_rpc request = {} @@ -7264,6 +7293,7 @@ def test_export_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.export_instance] = mock_rpc request = {} @@ -7528,6 +7558,7 @@ def test_failover_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.failover_instance] = mock_rpc request = {} @@ -7792,6 +7823,7 @@ def test_delete_instance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc request = {} @@ -8053,6 +8085,7 @@ def test_reschedule_maintenance_rest_use_cached_wrapped_rpc(): # Replace cached wrapped function with mock mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._transport._wrapped_methods[client._transport.reschedule_maintenance] = mock_rpc request = {} From 8d81ae0ba409b473b2e0b510906050ad6d519998 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 26 Apr 2024 12:30:55 -0400 Subject: [PATCH 1141/1339] chore(main): release 1.17.1 (#2018) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f6eac1c55ec3..48501fc0d83b 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.17.1](https://github.com/googleapis/gapic-generator-python/compare/v1.17.0...v1.17.1) (2024-04-26) + + +### Bug Fixes + +* Type error for compute client(s) tests ([#2014](https://github.com/googleapis/gapic-generator-python/issues/2014)) ([61f50f7](https://github.com/googleapis/gapic-generator-python/commit/61f50f76fcbc47ef2d0fd4686f9187d22c2e698e)) + ## [1.17.0](https://github.com/googleapis/gapic-generator-python/compare/v1.16.1...v1.17.0) (2024-04-18) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 4afb61797237..93ea88b1e6c4 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.17.0" +version = "1.17.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 1f5660423f5fec3d2a248fe5b03817f418a2cccb Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 8 May 2024 13:48:38 -0400 Subject: [PATCH 1142/1339] feat: add support for reading google.api.api_version (#1999) Co-authored-by: Victor Chudnovsky --- .../.github/workflows/tests.yaml | 2 +- .../%sub/services/%service/_shared_macros.j2 | 76 +++++++++++++++++++ .../%sub/services/%service/client.py.j2 | 28 ++----- .../%name_%version/%sub/test_%service.py.j2 | 45 ++++++++++- .../gapic-generator/gapic/schema/wrappers.py | 11 +++ .../%sub/services/%service/_client_macros.j2 | 29 +------ .../%sub/services/%service/_shared_macros.j2 | 70 +++++++++++++++++ .../%sub/services/%service/async_client.py.j2 | 8 +- .../%sub/services/%service/client.py.j2 | 6 +- .../%name_%version/%sub/test_%service.py.j2 | 46 ++++++++++- .../gapic/%name_%version/%sub/test_macros.j2 | 12 ++- packages/gapic-generator/noxfile.py | 2 +- .../gapic-generator/test_utils/test_utils.py | 3 + .../tests/fragments/google/api/client.proto | 16 ++++ .../tests/fragments/test_api_version.proto | 37 +++++++++ .../services/asset_service/async_client.py | 1 + .../unit/gapic/asset_v1/test_asset_service.py | 43 ++++++----- .../services/iam_credentials/async_client.py | 1 + .../credentials_v1/test_iam_credentials.py | 1 + .../services/eventarc/async_client.py | 1 + .../unit/gapic/eventarc_v1/test_eventarc.py | 25 +++--- .../config_service_v2/async_client.py | 1 + .../logging_service_v2/async_client.py | 1 + .../metrics_service_v2/async_client.py | 1 + .../logging_v2/test_config_service_v2.py | 31 ++++---- .../logging_v2/test_logging_service_v2.py | 15 ++-- .../logging_v2/test_metrics_service_v2.py | 7 +- .../services/cloud_redis/async_client.py | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 7 +- .../tests/system/test_api_version_header.py | 42 ++++++++++ .../system/test_grpc_interceptor_streams.py | 12 ++- .../tests/system/test_streams.py | 50 +++++++----- .../unit/schema/wrappers/test_service.py | 10 +++ 33 files changed, 501 insertions(+), 140 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 create mode 100644 packages/gapic-generator/tests/fragments/test_api_version.proto create mode 100644 packages/gapic-generator/tests/system/test_api_version_header.py diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index e8a914f0e731..8be4f041d2e3 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -14,7 +14,7 @@ concurrency: cancel-in-progress: true env: - SHOWCASE_VERSION: 0.32.0 + SHOWCASE_VERSION: 0.35.0 PROTOC_VERSION: 3.20.2 jobs: diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 new file mode 100644 index 000000000000..e7f623cfd214 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 @@ -0,0 +1,76 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # + # This file is a copy of `_shared_macros.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `_shared_macros.j2` in standard templates. +#} + +{% macro auto_populate_uuid4_fields(api, method) %} +{# + Automatically populate UUID4 fields according to + https://google.aip.dev/client-libraries/4235 when the + field satisfies either of: + - The field supports explicit presence and has not been set by the user. + - The field doesn't support explicit presence, and its value is the empty + string (i.e. the default value). + When using this macro, ensure the calling template generates a line `import uuid` +#} +{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} +{% if method_settings is not none %} +{% for auto_populated_field in method_settings.auto_populated_fields %} + {% if method.input.fields[auto_populated_field].proto3_optional %} + if '{{ auto_populated_field }}' not in request: + {% else %} + if not request.{{ auto_populated_field }}: + {% endif %} + request.{{ auto_populated_field }} = str(uuid.uuid4()) +{% endfor %} +{% endif %}{# if method_settings is not none #} +{% endwith %}{# method_settings #} +{% endmacro %} + +{% macro add_google_api_core_version_header_import(service_version) %} +{# +The `version_header` module was added to `google-api-core` +in version 2.19.0. +https://github.com/googleapis/python-api-core/releases/tag/v2.19.0 +The `try/except` below can be removed once the minimum version of +`google-api-core` is 2.19.0 or newer. +#} +{% if service_version %} +try: + from google.api_core import version_header + HAS_GOOGLE_API_CORE_VERSION_HEADER = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + HAS_GOOGLE_API_CORE_VERSION_HEADER = False +{% endif %}{# service_version #} +{% endmacro %} +{% macro add_api_version_header_to_metadata(service_version) %} +{# + Add API Version to metadata as per https://github.com/aip-dev/google.aip.dev/pull/1331. + When using this macro, ensure the calling template also calls macro + `add_google_api_core_version_header_import` to add the necessary import statements. +#} + {% if service_version %} + if HAS_GOOGLE_API_CORE_VERSION_HEADER: # pragma: NO COVER + metadata = tuple(metadata) + ( + version_header.to_api_version_header("{{ service_version }}"), + ) + {% endif %}{# service_version #} +{% endmacro %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index aff47ad91302..e4b0af6dec6b 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -1,6 +1,7 @@ {% extends '_base.py.j2' %} {% block content %} +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} from collections import OrderedDict import os @@ -23,6 +24,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +{{ shared_macros.add_google_api_core_version_header_import(service.version) }} {% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} from {{package_path}} import gapic_version as package_version @@ -94,7 +96,8 @@ class {{ service.client_name }}Meta(type): class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): - """{{ service.meta.doc|rst(width=72, indent=4) }}""" + """{{ service.meta.doc|rst(width=72, indent=4) }}{% if service.version|length %} + This class implements API version {{ service.version }}.{% endif %}""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): @@ -475,27 +478,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): )), ) {% endif %} - -{# - Automatically populate UUID4 fields according to - https://google.aip.dev/client-libraries/4235 when the - field satisfies either of: - - The field supports explicit presence and has not been set by the user. - - The field doesn't support explicit presence, and its value is the empty - string (i.e. the default value). -#} -{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} -{% if method_settings is not none %} -{% for auto_populated_field in method_settings.auto_populated_fields %} - {% if method.input.fields[auto_populated_field].proto3_optional %} - if '{{ auto_populated_field }}' not in request: - {% else %} - if not request.{{ auto_populated_field }}: - {% endif %} - request.{{ auto_populated_field }} = str(uuid.uuid4()) -{% endfor %} -{% endif %}{# if method_settings is not none #} -{% endwith %}{# method_settings #} +{{ shared_macros.add_api_version_header_to_metadata(service.version) }} +{{ shared_macros.auto_populate_uuid4_fields(api, method) }} # Send the request. {%+ if not method.void %}response = {% endif %}rpc( diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index af148882f4c9..2a6af5ac1efa 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1,6 +1,7 @@ {% extends "_base.py.j2" %} {% block content %} +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} import os {% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} @@ -39,6 +40,7 @@ from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import transports +from google.api_core import api_core_version from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers @@ -69,6 +71,8 @@ from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} +{{ shared_macros.add_google_api_core_version_header_import(service.version) }} + {% with uuid4_re = "[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}" %} @@ -636,6 +640,35 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% endwith %}{# auto_populated_field_sample_value #} +{% if service.version %} +@pytest.mark.parametrize("transport_name", [ + {% if 'grpc' in opts.transport %} + ("grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + ("rest"), + {% endif %} +]) +def test_{{ method_name }}_api_version_header(transport_name): + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.19.0 or higher. + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 19): + client = {{ service.client_name }}(credentials=ga_credentials.AnonymousCredentials(), transport=transport_name) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__' + ) as call: + client.{{ method_name }}() + + # Establish that the api version header was sent. + _, _, kw = call.mock_calls[0] + assert kw['metadata'][0] == (version_header.API_VERSION_METADATA_KEY, "{{ service.version }}") + else: + pytest.skip("google-api-core>=2.19.0 is required for `google.api_core.version_header`") +{% endif %}{# service.version #} + {% if not method.client_streaming %} def test_{{ method_name }}_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -904,9 +937,9 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () {% if method.field_headers %} - metadata = tuple(metadata) + ( + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} @@ -918,7 +951,13 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): {% endif %} pager = client.{{ method_name }}(request={}) - assert pager._metadata == metadata + {% if service.version %} + if HAS_GOOGLE_API_CORE_VERSION_HEADER: + expected_metadata = tuple(expected_metadata) + ( + version_header.to_api_version_header("{{ service.version }}"), + ) + {% endif %} + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index c7396ca5e910..fd75e8fd5ec0 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1734,6 +1734,17 @@ def host(self) -> str: return self.options.Extensions[client_pb2.default_host] return '' + @property + def version(self) -> str: + """Return the API version for this service, if specified. + + Returns: + str: The API version for this service. + """ + if self.options.Extensions[client_pb2.api_version]: + return self.options.Extensions[client_pb2.api_version] + return '' + @property def shortname(self) -> str: """Return the API short name. DRIFT uses this to identify diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 25b3ef6fcaa3..128421655e08 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -14,6 +14,8 @@ # limitations under the License. #} +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + {% macro client_method(method, name, snippet_index, api, service, full_extended_lro=False) %} def {{ name }}(self, {% if not method.client_streaming %} @@ -181,7 +183,8 @@ ) {% endif %} {# method.explicit_routing #} -{{ auto_populate_uuid4_fields(api, method) }} +{{ shared_macros.add_api_version_header_to_metadata(service.version) }} +{{ shared_macros.auto_populate_uuid4_fields(api, method) }} # Validate the universe domain. self._validate_universe_domain() @@ -265,27 +268,3 @@ {% macro define_extended_operation_subclass(extended_operation) %} {% endmacro %} - -{% macro auto_populate_uuid4_fields(api, method) %} -{# - Automatically populate UUID4 fields according to - https://google.aip.dev/client-libraries/4235 when the - field satisfies either of: - - The field supports explicit presence and has not been set by the user. - - The field doesn't support explicit presence, and its value is the empty - string (i.e. the default value). - When using this macro, ensure the calling template generates a line `import uuid` -#} -{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} -{% if method_settings is not none %} -{% for auto_populated_field in method_settings.auto_populated_fields %} - {% if method.input.fields[auto_populated_field].proto3_optional %} - if '{{ auto_populated_field }}' not in request: - {% else %} - if not request.{{ auto_populated_field }}: - {% endif %} - request.{{ auto_populated_field }} = str(uuid.uuid4()) -{% endfor %} -{% endif %}{# if method_settings is not none #} -{% endwith %}{# method_settings #} -{% endmacro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 new file mode 100644 index 000000000000..14764da17ed5 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -0,0 +1,70 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} + +{% macro auto_populate_uuid4_fields(api, method) %} +{# + Automatically populate UUID4 fields according to + https://google.aip.dev/client-libraries/4235 when the + field satisfies either of: + - The field supports explicit presence and has not been set by the user. + - The field doesn't support explicit presence, and its value is the empty + string (i.e. the default value). + When using this macro, ensure the calling template generates a line `import uuid` +#} +{% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} +{% if method_settings is not none %} +{% for auto_populated_field in method_settings.auto_populated_fields %} + {% if method.input.fields[auto_populated_field].proto3_optional %} + if '{{ auto_populated_field }}' not in request: + {% else %} + if not request.{{ auto_populated_field }}: + {% endif %} + request.{{ auto_populated_field }} = str(uuid.uuid4()) +{% endfor %} +{% endif %}{# if method_settings is not none #} +{% endwith %}{# method_settings #} +{% endmacro %} + +{% macro add_google_api_core_version_header_import(service_version) %} +{# +The `version_header` module was added to `google-api-core` +in version 2.19.0. +https://github.com/googleapis/python-api-core/releases/tag/v2.19.0 +The `try/except` below can be removed once the minimum version of +`google-api-core` is 2.19.0 or newer. +#} +{% if service_version %} +try: + from google.api_core import version_header + HAS_GOOGLE_API_CORE_VERSION_HEADER = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + HAS_GOOGLE_API_CORE_VERSION_HEADER = False +{% endif %}{# service_version #} +{% endmacro %} + +{% macro add_api_version_header_to_metadata(service_version) %} +{# + Add API Version to metadata as per https://github.com/aip-dev/google.aip.dev/pull/1331. + When using this macro, ensure the calling template also calls macro + `add_google_api_core_version_header_import` to add the necessary import statements. +#} + {% if service_version %} + if HAS_GOOGLE_API_CORE_VERSION_HEADER: # pragma: NO COVER + metadata = tuple(metadata) + ( + version_header.to_api_version_header("{{ service_version }}"), + ) + {% endif %}{# service_version #} +{% endmacro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 66ac6a70b51c..b63cb1e99f4f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -2,6 +2,7 @@ {% block content %} {% import "%namespace/%name_%version/%sub/services/%service/_client_macros.j2" as macros %} +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} from collections import OrderedDict import functools @@ -24,6 +25,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +{{ shared_macros.add_google_api_core_version_header_import(service.version) }} try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -54,7 +56,8 @@ from .client import {{ service.client_name }} {# TODO(yon-mg): handle rest transport async client interaction #} class {{ service.async_client_name }}: - """{{ service.meta.doc|rst(width=72, indent=4) }}""" + """{{ service.meta.doc|rst(width=72, indent=4) }}{% if service.version|length %} + This class implements API version {{ service.version }}.{% endif %}""" _client: {{ service.client_name }} @@ -376,7 +379,8 @@ class {{ service.async_client_name }}: ) {% endif %} -{{ macros.auto_populate_uuid4_fields(api, method) }} +{{ shared_macros.add_api_version_header_to_metadata(service.version) }} +{{ shared_macros.auto_populate_uuid4_fields(api, method) }} # Validate the universe domain. self._client._validate_universe_domain() diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index e047b0d55a67..eaa572f6e31b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -2,6 +2,7 @@ {% block content %} {% import "%namespace/%name_%version/%sub/services/%service/_client_macros.j2" as macros %} +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} from collections import OrderedDict {% if service.any_extended_operations_methods %} @@ -30,7 +31,7 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore - +{{ shared_macros.add_google_api_core_version_header_import(service.version) }} try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -102,7 +103,8 @@ class {{ service.client_name }}Meta(type): class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): - """{{ service.meta.doc|rst(width=72, indent=4) }}""" + """{{ service.meta.doc|rst(width=72, indent=4) }}{% if service.version|length %} + This class implements API version {{ service.version }}.{% endif %}""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index ebc96487a1a3..edf2588ecd89 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -2,6 +2,7 @@ {% block content %} {% import "tests/unit/gapic/%name_%version/%sub/test_macros.j2" as test_macros %} +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} import os {% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} @@ -77,7 +78,7 @@ from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore {% endif %} {% endfilter %} - +{{ shared_macros.add_google_api_core_version_header_import(service.version) }} def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -178,6 +179,49 @@ def test__get_api_endpoint(): {{ service.client_name }}._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." +{% if service.version %} +{% for method in service.methods.values() %}{% with method_name = method.name|snake_case %} +{% for mode in ["", "async"] %} +{% if mode == "async" %} +async def test_{{ method_name }}_api_version_header_async(transport_name="grpc"): + client = {{ service.async_client_name }}(credentials=ga_credentials.AnonymousCredentials(), transport=transport_name) +{% else %} +@pytest.mark.parametrize("transport_name", [ + {% if 'grpc' in opts.transport %} + ("grpc"), + {% endif %} + {% if 'rest' in opts.transport %} + ("rest"), + {% endif %} +]) +def test_{{ method_name }}_api_version_header(transport_name): + client = {{ service.client_name }}(credentials=ga_credentials.AnonymousCredentials(), transport=transport_name) +{% endif %} + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.19.0 or higher. + api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 19): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__' + ) as call: + {% if mode == "async" %} + await client.{{ method_name }}() + {% else %} + client.{{ method_name }}() + {% endif %} + + # Establish that the api version header was sent. + _, _, kw = call.mock_calls[0] + assert kw['metadata'][0] == (version_header.API_VERSION_METADATA_KEY, "{{ service.version }}") + else: + pytest.skip("google-api-core>=2.19.0 is required for `google.api_core.version_header`") +{% endfor %}{# mode #} +{% endwith %} +{% endfor %} +{% endif %}{# service.version #} + def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 9cb64a3d2992..3d6fb7fd7d58 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -853,9 +853,9 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () {% if not method.explicit_routing and method.field_headers %} - metadata = tuple(metadata) + ( + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( {% for field_header in method.field_headers %} {% if not method.client_streaming %} @@ -865,9 +865,15 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): )), ) {% endif %} + {% if service.version %} + if HAS_GOOGLE_API_CORE_VERSION_HEADER: + expected_metadata = tuple(expected_metadata) + ( + version_header.to_api_version_header("{{ service.version }}"), + ) + {% endif %} pager = client.{{ method_name }}(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index a7e49ac43278..9eba5d7c796f 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -29,7 +29,7 @@ nox.options.error_on_missing_interpreters = True -showcase_version = os.environ.get("SHOWCASE_VERSION", "0.32.0") +showcase_version = os.environ.get("SHOWCASE_VERSION", "0.35.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index ac186d293e78..f23a7cec41d0 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -32,6 +32,7 @@ def make_service( visible_resources: typing.Optional[ typing.Mapping[str, wrappers.CommonResource] ] = None, + version: str = "", ) -> wrappers.Service: visible_resources = visible_resources or {} # Define a service descriptor, and set a host and oauth scopes if @@ -40,6 +41,8 @@ def make_service( if host: service_pb.options.Extensions[client_pb2.default_host] = host service_pb.options.Extensions[client_pb2.oauth_scopes] = ','.join(scopes) + if version: + service_pb.options.Extensions[client_pb2.api_version] = version # Return a service object to test. return wrappers.Service( diff --git a/packages/gapic-generator/tests/fragments/google/api/client.proto b/packages/gapic-generator/tests/fragments/google/api/client.proto index 2102623d305f..f7781254b6e4 100644 --- a/packages/gapic-generator/tests/fragments/google/api/client.proto +++ b/packages/gapic-generator/tests/fragments/google/api/client.proto @@ -96,4 +96,20 @@ extend google.protobuf.ServiceOptions { // ... // } string oauth_scopes = 1050; + + // The API version of this service, which should be sent by version-aware + // clients to the service. This allows services to abide by the schema and + // behavior of the service at the time this API version was deployed. + // The format of the API version must be treated as opaque by clients. + // Services may use a format with an apparent structure, but clients must + // not rely on this to determine components within an API version, or attempt + // to construct other valid API versions. Note that this is for upcoming + // functionality and may not be implemented for all services. + // + // Example: + // + // service Foo { + // option (google.api.api_version) = "v1_20230821_preview"; + // } + string api_version = 525000001; } diff --git a/packages/gapic-generator/tests/fragments/test_api_version.proto b/packages/gapic-generator/tests/fragments/test_api_version.proto new file mode 100644 index 000000000000..d75134d8457b --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_api_version.proto @@ -0,0 +1,37 @@ +// Copyright (C) 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/protobuf/struct.proto"; +import "google/api/client.proto"; + +service MyServiceWithVersion { + option (google.api.api_version) = "v1_20230601"; + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (MethodResponse) { + option (google.api.method_signature) = "parameter"; + } +} + +message MethodRequest { + google.protobuf.Value parameter = 1; +} + +message MethodResponse { + google.protobuf.Value result = 1; +} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index dc47b46ca899..2985521eab84 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -27,6 +27,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 22f07d036865..a304fe8da464 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -160,6 +160,7 @@ def test__get_api_endpoint(): AssetServiceClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" @@ -1439,15 +1440,15 @@ def test_list_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4095,15 +4096,15 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) pager = client.search_all_resources(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4657,15 +4658,15 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) pager = client.search_all_iam_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -7067,15 +7068,15 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_saved_queries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8629,15 +8630,15 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) pager = client.analyze_org_policies(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9201,15 +9202,15 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) pager = client.analyze_org_policy_governed_containers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9773,15 +9774,15 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) pager = client.analyze_org_policy_governed_assets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 151f89ed2234..d61293a8bea4 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -27,6 +27,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 9e86be34700f..cc8db8a42c8f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -150,6 +150,7 @@ def test__get_api_endpoint(): IAMCredentialsClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 9fc21688adab..b2b3a07b7957 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -27,6 +27,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 06b205c1d133..ae832fb35ec2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -170,6 +170,7 @@ def test__get_api_endpoint(): EventarcClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" @@ -1556,15 +1557,15 @@ def test_list_triggers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_triggers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3623,15 +3624,15 @@ def test_list_channels_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_channels(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5649,15 +5650,15 @@ def test_list_providers_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_providers(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -6571,15 +6572,15 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_channel_connections(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index e93c0db60340..8c73903d7c87 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -27,6 +27,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 1c844f9ee0f3..97bd907c9321 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -27,6 +27,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 681c965ed934..4b937e15358e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -27,6 +27,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 138133b22d81..2219af1173b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -150,6 +150,7 @@ def test__get_api_endpoint(): ConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" @@ -1140,15 +1141,15 @@ def test_list_buckets_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_buckets(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3678,15 +3679,15 @@ def test_list_views_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_views(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -5338,15 +5339,15 @@ def test_list_sinks_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_sinks(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -8171,15 +8172,15 @@ def test_list_links_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_links(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -9083,15 +9084,15 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_exclusions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 73f543d55de3..992c8836f759 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -151,6 +151,7 @@ def test__get_api_endpoint(): LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" @@ -1763,10 +1764,10 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_log_entries(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2159,10 +2160,10 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") RuntimeError, ) - metadata = () + expected_metadata = () pager = client.list_monitored_resource_descriptors(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2707,15 +2708,15 @@ def test_list_logs_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_logs(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index be4325c6782f..8acc9a12e81d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -149,6 +149,7 @@ def test__get_api_endpoint(): MetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" @@ -1140,15 +1141,15 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_log_metrics(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index cd02bb551ffd..38b770944870 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -27,6 +27,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 0441f71b1219..f22a2c227edd 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -161,6 +161,7 @@ def test__get_api_endpoint(): CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + def test__get_universe_domain(): client_universe_domain = "foo.com" universe_domain_env = "bar.com" @@ -1169,15 +1170,15 @@ def test_list_instances_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) pager = client.list_instances(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/system/test_api_version_header.py b/packages/gapic-generator/tests/system/test_api_version_header.py new file mode 100644 index 000000000000..5fcb4be517af --- /dev/null +++ b/packages/gapic-generator/tests/system/test_api_version_header.py @@ -0,0 +1,42 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest + +try: + from google.api_core import version_header +except ImportError: + version_header = None + + +def test_api_version_in_grpc_trailing_metadata(echo): + if not version_header: + pytest.skip( + "google-api-core>=2.19.0 is required for `google.api_core.version_header`" + ) + + # This feature requires version 0.35.0 of `gapic-showcase` or newer which has the + # ability to echo request headers + content = 'The hail in Wales falls mainly on the snails.' + responses = echo.expand({ + 'content': content, + }) + if isinstance(echo.transport, type(echo).get_transport_class("grpc")): + response_metadata = [ + (metadata.key, metadata.value) + for metadata in responses.trailing_metadata() + ] + assert ("x-goog-api-version", "v1_20240408") in response_metadata + else: + assert "X-Showcase-Request-X-Goog-Api-Version" in responses._response.headers + assert responses._response.headers["X-Showcase-Request-X-Goog-Api-Version"] == "v1_20240408" diff --git a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py index 4b40b7611fca..cbfa72d2ce58 100644 --- a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py @@ -31,7 +31,11 @@ def test_unary_stream(intercepted_echo): assert response.content == ground_truth assert ground_truth == 'snails.' - assert responses.trailing_metadata() == intercepted_metadata + response_metadata = [ + (metadata.key, metadata.value) + for metadata in responses.trailing_metadata() + ] + assert intercepted_metadata[0] in response_metadata def test_stream_stream(intercepted_echo): @@ -43,4 +47,8 @@ def test_stream_stream(intercepted_echo): contents = [response.content for response in responses] assert contents == ['hello', 'world!'] - assert responses.trailing_metadata() == intercepted_metadata + response_metadata = [ + (metadata.key, metadata.value) + for metadata in responses.trailing_metadata() + ] + assert intercepted_metadata[0] in response_metadata diff --git a/packages/gapic-generator/tests/system/test_streams.py b/packages/gapic-generator/tests/system/test_streams.py index 8294e6795836..aa8c84c84c7e 100644 --- a/packages/gapic-generator/tests/system/test_streams.py +++ b/packages/gapic-generator/tests/system/test_streams.py @@ -19,14 +19,14 @@ from google import showcase -metadata = (("showcase-trailer", "hello world"),) +_METADATA = (("showcase-trailer", "hello world"),) def test_unary_stream(echo): content = 'The hail in Wales falls mainly on the snails.' responses = echo.expand({ 'content': content, - }, metadata=metadata) + }, metadata=_METADATA) # Consume the response and ensure it matches what we expect. # with pytest.raises(exceptions.NotFound) as exc: @@ -34,7 +34,15 @@ def test_unary_stream(echo): assert response.content == ground_truth assert ground_truth == 'snails.' if isinstance(echo.transport, type(echo).get_transport_class("grpc")): - assert responses.trailing_metadata() == metadata + response_metadata = [ + (metadata.key, metadata.value) + for metadata in responses.trailing_metadata() + ] + assert _METADATA[0] in response_metadata + else: + showcase_header = f"X-Showcase-Request-{_METADATA[0][0]}" + assert showcase_header in responses._response.headers + assert responses._response.headers[showcase_header] == _METADATA[0][1] def test_stream_unary(echo): @@ -67,14 +75,18 @@ def test_stream_stream(echo): requests = [] requests.append(showcase.EchoRequest(content="hello")) requests.append(showcase.EchoRequest(content="world!")) - responses = echo.chat(iter(requests), metadata=metadata) + responses = echo.chat(iter(requests), metadata=_METADATA) contents = [] for response in responses: contents.append(response.content) assert contents == ['hello', 'world!'] - assert responses.trailing_metadata() == metadata + response_metadata = [ + (metadata.key, metadata.value) + for metadata in responses.trailing_metadata() + ] + assert _METADATA[0] in response_metadata def test_stream_stream_passing_dict(echo): @@ -83,14 +95,18 @@ def test_stream_stream_passing_dict(echo): return requests = [{'content': 'hello'}, {'content': 'world!'}] - responses = echo.chat(iter(requests), metadata=metadata) + responses = echo.chat(iter(requests), metadata=_METADATA) contents = [] for response in responses: contents.append(response.content) assert contents == ['hello', 'world!'] - assert responses.trailing_metadata() == metadata + response_metadata = [ + (metadata.key, metadata.value) + for metadata in responses.trailing_metadata() + ] + assert _METADATA[0] in response_metadata if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @@ -101,7 +117,7 @@ async def test_async_unary_stream_reader(async_echo): content = 'The hail in Wales falls mainly on the snails.' call = await async_echo.expand({ 'content': content, - }, metadata=metadata) + }, metadata=_METADATA) # Consume the response and ensure it matches what we expect. # with pytest.raises(exceptions.NotFound) as exc: @@ -111,14 +127,14 @@ async def test_async_unary_stream_reader(async_echo): assert ground_truth == 'snails.' trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata + assert _METADATA[0] in trailing_metadata.items() @pytest.mark.asyncio async def test_async_unary_stream_async_generator(async_echo): content = 'The hail in Wales falls mainly on the snails.' call = await async_echo.expand({ 'content': content, - }, metadata=metadata) + }, metadata=_METADATA) # Consume the response and ensure it matches what we expect. # with pytest.raises(exceptions.NotFound) as exc: @@ -129,7 +145,7 @@ async def test_async_unary_stream_async_generator(async_echo): assert ground_truth == 'snails.' trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata + assert _METADATA[0] in trailing_metadata.items() @pytest.mark.asyncio async def test_async_stream_unary_iterable(async_echo): @@ -171,7 +187,7 @@ async def test_async_stream_unary_passing_dict(async_echo): @pytest.mark.asyncio async def test_async_stream_stream_reader_writier(async_echo): - call = await async_echo.chat(metadata=metadata) + call = await async_echo.chat(metadata=_METADATA) await call.write(showcase.EchoRequest(content="hello")) await call.write(showcase.EchoRequest(content="world!")) await call.done_writing() @@ -183,7 +199,7 @@ async def test_async_stream_stream_reader_writier(async_echo): assert contents == ['hello', 'world!'] trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata + assert _METADATA[0] in trailing_metadata.items() @pytest.mark.asyncio async def test_async_stream_stream_async_generator(async_echo): @@ -192,7 +208,7 @@ async def async_generator(): yield showcase.EchoRequest(content="hello") yield showcase.EchoRequest(content="world!") - call = await async_echo.chat(async_generator(), metadata=metadata) + call = await async_echo.chat(async_generator(), metadata=_METADATA) contents = [] async for response in call: @@ -200,12 +216,12 @@ async def async_generator(): assert contents == ['hello', 'world!'] trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata + assert _METADATA[0] in trailing_metadata.items() @pytest.mark.asyncio async def test_async_stream_stream_passing_dict(async_echo): requests = [{'content': 'hello'}, {'content': 'world!'}] - call = await async_echo.chat(iter(requests), metadata=metadata) + call = await async_echo.chat(iter(requests), metadata=_METADATA) contents = [] async for response in call: @@ -213,4 +229,4 @@ async def test_async_stream_stream_passing_dict(async_echo): assert contents == ['hello', 'world!'] trailing_metadata = await call.trailing_metadata() - assert trailing_metadata == metadata + assert _METADATA[0] in trailing_metadata.items() diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 2664167c7264..98a5ce1f3e96 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -71,6 +71,16 @@ def test_service_host(): assert service.host == 'thingdoer.googleapis.com' +def test_service_api_version_not_specified(): + service = make_service(host='thingdoer.googleapis.com') + assert not service.version + + +def test_service_api_version_exists(): + service = make_service(host='thingdoer.googleapis.com', version="goose") + assert service.version == "goose" + + def test_service_no_host(): service = make_service() assert not service.host From ae1cf6f91c6ba74ef21cbc39792730951165372f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 8 May 2024 19:49:30 +0200 Subject: [PATCH 1143/1339] chore(deps): update dependency jinja2 to v3.1.4 [security] (#2026) --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0a8cb6f8ae87..bd5a418ec50e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -201,9 +201,9 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +Jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via -r requirements.in libcst==1.2.0 \ --hash=sha256:0cb92398236566f0b73a0c73f8a41a9c4906c793e8f7c2745f30e3fb141a34b5 \ From 7e1b198ea139f85038441f086a3a97597e647132 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 8 May 2024 18:09:19 +0000 Subject: [PATCH 1144/1339] chore(main): release 1.18.0 (#2029) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 48501fc0d83b..37402bc9971e 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.18.0](https://github.com/googleapis/gapic-generator-python/compare/v1.17.1...v1.18.0) (2024-05-08) + + +### Features + +* Add support for reading google.api.api_version ([#1999](https://github.com/googleapis/gapic-generator-python/issues/1999)) ([b2486e5](https://github.com/googleapis/gapic-generator-python/commit/b2486e5630312fb01b9eb5ffb09c9f0328fbce20)) + ## [1.17.1](https://github.com/googleapis/gapic-generator-python/compare/v1.17.0...v1.17.1) (2024-04-26) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 93ea88b1e6c4..1f6927c07026 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.17.1" +version = "1.18.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 035f79c641b28284ac3b44fe160238575dc34d1e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 20 Jun 2024 12:03:50 -0400 Subject: [PATCH 1145/1339] fix: fix credentials typing issue in transport layer (#2043) --- .../%version/%sub/services/%service/transports/base.py.j2 | 5 ++++- .../%version/%sub/services/%service/transports/grpc.py.j2 | 3 ++- .../%sub/services/%service/transports/base.py.j2 | 4 +++- .../%sub/services/%service/transports/grpc.py.j2 | 3 ++- .../%sub/services/%service/transports/grpc_asyncio.py.j2 | 3 ++- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 2 ++ packages/gapic-generator/gapic/utils/lines.py | 2 +- packages/gapic-generator/noxfile.py | 3 +++ .../cloud/asset_v1/services/asset_service/transports/base.py | 4 +++- .../cloud/asset_v1/services/asset_service/transports/grpc.py | 3 ++- .../services/asset_service/transports/grpc_asyncio.py | 3 ++- .../tests/integration/goldens/asset/noxfile.py | 2 ++ .../services/iam_credentials/transports/base.py | 4 +++- .../services/iam_credentials/transports/grpc.py | 3 ++- .../services/iam_credentials/transports/grpc_asyncio.py | 3 ++- .../tests/integration/goldens/credentials/noxfile.py | 2 ++ .../cloud/eventarc_v1/services/eventarc/transports/base.py | 4 +++- .../cloud/eventarc_v1/services/eventarc/transports/grpc.py | 3 ++- .../eventarc_v1/services/eventarc/transports/grpc_asyncio.py | 3 ++- .../tests/integration/goldens/eventarc/noxfile.py | 2 ++ .../logging_v2/services/config_service_v2/transports/base.py | 4 +++- .../logging_v2/services/config_service_v2/transports/grpc.py | 3 ++- .../services/config_service_v2/transports/grpc_asyncio.py | 3 ++- .../services/logging_service_v2/transports/base.py | 4 +++- .../services/logging_service_v2/transports/grpc.py | 3 ++- .../services/logging_service_v2/transports/grpc_asyncio.py | 3 ++- .../services/metrics_service_v2/transports/base.py | 4 +++- .../services/metrics_service_v2/transports/grpc.py | 3 ++- .../services/metrics_service_v2/transports/grpc_asyncio.py | 3 ++- .../tests/integration/goldens/logging/noxfile.py | 2 ++ .../cloud/redis_v1/services/cloud_redis/transports/base.py | 4 +++- .../cloud/redis_v1/services/cloud_redis/transports/grpc.py | 3 ++- .../redis_v1/services/cloud_redis/transports/grpc_asyncio.py | 3 ++- .../tests/integration/goldens/redis/noxfile.py | 2 ++ 34 files changed, 78 insertions(+), 27 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index fd7832ef81f1..d4364e44f3e3 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -103,6 +103,9 @@ class {{ service.name }}Transport(abc.ABC): # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -114,7 +117,7 @@ class {{ service.name }}Transport(abc.ABC): **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # If the credentials are service account credentials, then always try to use self signed JWT. diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 index 9d4a91aa032a..02d75444ffda 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/grpc.py.j2 @@ -133,7 +133,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if channel: # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 316eb84f9675..fc4b16f6c8ec 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -107,6 +107,8 @@ class {{ service.name }}Transport(abc.ABC): # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -119,7 +121,7 @@ class {{ service.name }}Transport(abc.ABC): **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 268ea2ea5f23..69b92f162e98 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -137,7 +137,8 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index ab14820095f2..8a3bc140bce5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -183,7 +183,8 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index db669133511f..271c7226370a 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -156,6 +156,8 @@ def mypy(session): 'types-protobuf' ) session.install('.') + # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged + session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index fb24e19351ce..1a5b5ce8e372 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -191,7 +191,7 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 break_long_words=False, initial_indent=' ' * indent, # ensure that subsequent lines for lists are indented 2 spaces - subsequent_indent=' ' * indent + \ + subsequent_indent=' ' * indent + ' ' * get_subsequent_line_indentation_level(token.strip()), text=token, width=width, diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 9eba5d7c796f..41fec240bc3e 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -272,6 +272,9 @@ def showcase_library( # See https://github.com/googleapis/gapic-generator-python/issues/1788 # Install the library without a constraints file. session.install("-e", tmp_dir) + + # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged + session.install("google-api-core>=2.19.1rc0") yield tmp_dir diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 5e4aa9aab846..40735ff6b593 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -83,6 +83,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -95,7 +97,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index a9862888e2c7..63790c16d115 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -123,7 +123,8 @@ def __init__(self, *, if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 46532f8549b1..2ff6022256c8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -169,7 +169,8 @@ def __init__(self, *, if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 0872592be30d..383128c9f679 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -167,6 +167,8 @@ def mypy(session): 'types-protobuf' ) session.install('.') + # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged + session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 2f1aa66aaccc..1c7ac928006f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -80,6 +80,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -92,7 +94,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index d07061919a29..b0dd5bb66940 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -128,7 +128,8 @@ def __init__(self, *, if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 4a4809a095cf..447c2e3bafe0 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -174,7 +174,8 @@ def __init__(self, *, if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 93241d052f48..bb492879f52f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -167,6 +167,8 @@ def mypy(session): 'types-protobuf' ) session.install('.') + # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged + session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 6113e4beaa74..86ab6167d49b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -91,6 +91,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -103,7 +105,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index bf51d459464c..54dc68143560 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -133,7 +133,8 @@ def __init__(self, *, if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 271fbf8ada0d..864708c68f6b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -179,7 +179,8 @@ def __init__(self, *, if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 0e5ae5f08614..6821d9d1b263 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -167,6 +167,8 @@ def mypy(session): 'types-protobuf' ) session.install('.') + # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged + session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 9f1f2ab45f05..26cbb2b350ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -86,6 +86,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -98,7 +100,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 81818e1ca6d9..08c0c4707bb5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -123,7 +123,8 @@ def __init__(self, *, if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 0a8dca763fb7..f804863e9048 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -169,7 +169,8 @@ def __init__(self, *, if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index c9850fac55f2..3ec63b61486a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -86,6 +86,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -98,7 +100,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 87498f9bc131..aff5aad6e120 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -121,7 +121,8 @@ def __init__(self, *, if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 337b490b1117..9817b9fd82ec 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -167,7 +167,8 @@ def __init__(self, *, if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index c347164816f9..941b7050f7fa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -86,6 +86,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -98,7 +100,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index ffe49e97d4e9..04c8ab051225 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -121,7 +121,8 @@ def __init__(self, *, if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 4715a47d0d16..e5ff12d01339 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -167,7 +167,8 @@ def __init__(self, *, if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 0292d4feb399..54f31479ec3d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -167,6 +167,8 @@ def mypy(session): 'types-protobuf' ) session.install('.') + # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged + session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 7c916ad9daa9..b8a9fa84b5c4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -83,6 +83,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -95,7 +97,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) # Don't apply audience if the credentials file passed from user. if hasattr(credentials, "with_gdch_audience"): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 916d2cc75c57..87b5e7e0c06a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -143,7 +143,8 @@ def __init__(self, *, if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index b60d1965524f..e5c8f727eb8e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -189,7 +189,8 @@ def __init__(self, *, if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index eee3aad5cd31..f16c08211def 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -167,6 +167,8 @@ def mypy(session): 'types-protobuf' ) session.install('.') + # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged + session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', From e5e5d26a0a98139c4ca447ba740d88f7eba7eb25 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 20 Jun 2024 12:50:17 -0400 Subject: [PATCH 1146/1339] fix: allow protobuf 5.x (#2042) --- .../gapic/templates/noxfile.py.j2 | 35 ++++++++++++++++--- .../gapic/templates/setup.py.j2 | 2 +- .../templates/testing/constraints-3.7.txt.j2 | 2 +- packages/gapic-generator/requirements.in | 2 +- packages/gapic-generator/requirements.txt | 2 +- packages/gapic-generator/setup.py | 2 +- .../integration/goldens/asset/noxfile.py | 35 ++++++++++++++++--- .../tests/integration/goldens/asset/setup.py | 2 +- .../goldens/asset/testing/constraints-3.7.txt | 2 +- .../goldens/credentials/noxfile.py | 35 ++++++++++++++++--- .../integration/goldens/credentials/setup.py | 2 +- .../credentials/testing/constraints-3.7.txt | 2 +- .../integration/goldens/eventarc/noxfile.py | 35 ++++++++++++++++--- .../integration/goldens/eventarc/setup.py | 2 +- .../eventarc/testing/constraints-3.7.txt | 2 +- .../integration/goldens/logging/noxfile.py | 35 ++++++++++++++++--- .../integration/goldens/logging/setup.py | 2 +- .../logging/testing/constraints-3.7.txt | 2 +- .../integration/goldens/redis/noxfile.py | 35 ++++++++++++++++--- .../tests/integration/goldens/redis/setup.py | 2 +- .../goldens/redis/testing/constraints-3.7.txt | 2 +- 21 files changed, 195 insertions(+), 45 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 271c7226370a..2c2b7921c223 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -43,11 +43,23 @@ nox.sessions = [ ] @nox.session(python=ALL_PYTHON) -def unit(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): """Run the unit test suite.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") session.run( 'py.test', @@ -57,13 +69,23 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) @nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install test environment dependencies session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -131,7 +153,10 @@ def prerelease_deps(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 667e166d5e03..48427184f958 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -36,7 +36,7 @@ dependencies = [ "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {% for package_tuple, package_info in pypi_packages.items() %} {# Quick check to make sure the package is different from this setup.py #} {% if api.naming.warehouse_package_name != package_info.package_name %} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index 6975b517a21d..3dbaa6f3825b 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -8,7 +8,7 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 -protobuf==3.19.5 +protobuf==3.20.2 {% for package_tuple, package_info in pypi_packages.items() %} {# Quick check to make sure the package is different from this setup.py #} {% if api.naming.warehouse_package_name != package_info.package_name %} diff --git a/packages/gapic-generator/requirements.in b/packages/gapic-generator/requirements.in index 1c391632e542..db955dc7ec22 100644 --- a/packages/gapic-generator/requirements.in +++ b/packages/gapic-generator/requirements.in @@ -3,7 +3,7 @@ google-api-core googleapis-common-protos jinja2 MarkupSafe -protobuf<5 # See https://github.com/googleapis/gapic-generator-python/issues/1996 +protobuf pypandoc PyYAML grpc-google-iam-v1 diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index bd5a418ec50e..3747c0916e7b 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -201,7 +201,7 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -Jinja2==3.1.4 \ +jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via -r requirements.in diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1f6927c07026..887b6b94feda 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -35,7 +35,7 @@ # https://jinja.palletsprojects.com/en/3.0.x/templates/#jinja-filters.map # https://jinja.palletsprojects.com/en/2.11.x/changelog/#version-2-11-0 "jinja2 >= 2.11", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "pypandoc >= 1.4", "PyYAML >= 5.1.1", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 383128c9f679..d6472c1d9812 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -54,11 +54,23 @@ ] @nox.session(python=ALL_PYTHON) -def unit(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): """Run the unit test suite.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") session.run( 'py.test', @@ -68,13 +80,23 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) @nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install test environment dependencies session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -142,7 +164,10 @@ def prerelease_deps(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 3399343f4786..931351be5fa8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -44,7 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", "google-cloud-os-config >= 1.0.0, <2.0.0dev", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index fa6caa7b094e..aa4420733c50 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -7,7 +7,7 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 -protobuf==3.19.5 +protobuf==3.20.2 google-cloud-access-context-manager==0.1.2 google-cloud-os-config==1.0.0 grpc-google-iam-v1==0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index bb492879f52f..5aeea54e57d5 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -54,11 +54,23 @@ ] @nox.session(python=ALL_PYTHON) -def unit(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): """Run the unit test suite.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") session.run( 'py.test', @@ -68,13 +80,23 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) @nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install test environment dependencies session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -142,7 +164,10 @@ def prerelease_deps(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 86e0eac8568e..28f72ad6962c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -44,7 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-iam-credentials" diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt index b8a550c73855..fc812592b0ee 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -7,4 +7,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 -protobuf==3.19.5 +protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 6821d9d1b263..d909cd230899 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -54,11 +54,23 @@ ] @nox.session(python=ALL_PYTHON) -def unit(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): """Run the unit test suite.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") session.run( 'py.test', @@ -68,13 +80,23 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) @nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install test environment dependencies session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -142,7 +164,10 @@ def prerelease_deps(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 766fba234f09..b930a775e478 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -44,7 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index 4cd2782277d4..a81fb6bcd05c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -7,5 +7,5 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 -protobuf==3.19.5 +protobuf==3.20.2 grpc-google-iam-v1==0.12.4 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 54f31479ec3d..728f36ac124e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -54,11 +54,23 @@ ] @nox.session(python=ALL_PYTHON) -def unit(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): """Run the unit test suite.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") session.run( 'py.test', @@ -68,13 +80,23 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) @nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install test environment dependencies session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -142,7 +164,10 @@ def prerelease_deps(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 976935414a44..8bfa1824327f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -44,7 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-logging" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt index b8a550c73855..fc812592b0ee 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -7,4 +7,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 -protobuf==3.19.5 +protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index f16c08211def..264b051a2590 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -54,11 +54,23 @@ ] @nox.session(python=ALL_PYTHON) -def unit(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): """Run the unit test suite.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") session.run( 'py.test', @@ -68,13 +80,23 @@ def unit(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) @nox.session(python=ALL_PYTHON[-1]) -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install test environment dependencies session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -142,7 +164,10 @@ def prerelease_deps(session): '--cov-config=.coveragerc', '--cov-report=term', '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 55e1fd55736c..5e8332354d54 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -44,7 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt index b8a550c73855..fc812592b0ee 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -7,4 +7,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 -protobuf==3.19.5 +protobuf==3.20.2 From deabb4dc86a80355c4e624bf7d73897f9548929c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 20 Jun 2024 13:49:43 -0400 Subject: [PATCH 1147/1339] chore(main): release 1.18.1 (#2045) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 37402bc9971e..3976dc9d0603 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.18.1](https://github.com/googleapis/gapic-generator-python/compare/v1.18.0...v1.18.1) (2024-06-20) + + +### Bug Fixes + +* Allow protobuf 5.x ([#2042](https://github.com/googleapis/gapic-generator-python/issues/2042)) ([1998b81](https://github.com/googleapis/gapic-generator-python/commit/1998b813d2525cef8e46d606de494f0847e156bf)) +* Fix credentials typing issue in transport layer ([#2043](https://github.com/googleapis/gapic-generator-python/issues/2043)) ([205fe5e](https://github.com/googleapis/gapic-generator-python/commit/205fe5e9445eb2f6811f022c68003d23c95f186b)) + ## [1.18.0](https://github.com/googleapis/gapic-generator-python/compare/v1.17.1...v1.18.0) (2024-05-08) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 887b6b94feda..79812de07a1c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.18.0" +version = "1.18.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 74e15051fe0a35bc0fc332e72586d53f8a1d7c25 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 20 Jun 2024 21:47:57 +0200 Subject: [PATCH 1148/1339] chore(deps): update dependency urllib3 to v2.2.2 [security] (#2041) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 3747c0916e7b..e24177593543 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -431,7 +431,7 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==2.2.1 \ - --hash=sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d \ - --hash=sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19 +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via requests From 148d2724f4a7ea32fd3fbf9889262d7c77d9a405 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 20 Jun 2024 21:55:15 +0200 Subject: [PATCH 1149/1339] chore(deps): update dependency requests to v2.32.2 [security] (#2039) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e24177593543..eedba8c56044 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -409,9 +409,9 @@ pyyaml==6.0.1 \ # via # -r requirements.in # libcst -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.2 \ + --hash=sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289 \ + --hash=sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c # via google-api-core rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ From b1f8440bc87d137388f09acece6c7c347ef4fe23 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 25 Jun 2024 10:55:41 -0700 Subject: [PATCH 1150/1339] chore: fix AwaitableMock test coverage (#2050) --- .../gapic/%name_%version/%sub/test_macros.j2 | 6 +- .../unit/gapic/asset_v1/test_asset_service.py | 138 +++---------- .../credentials_v1/test_iam_credentials.py | 24 +-- .../unit/gapic/eventarc_v1/test_eventarc.py | 108 ++-------- .../logging_v2/test_config_service_v2.py | 192 +++--------------- .../logging_v2/test_logging_service_v2.py | 36 +--- .../logging_v2/test_metrics_service_v2.py | 30 +-- .../unit/gapic/redis_v1/test_cloud_redis.py | 66 +----- 8 files changed, 100 insertions(+), 500 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 3d6fb7fd7d58..627c2b9a6bcc 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -321,11 +321,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.{{method.transport_safe_name|snake_case}} in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.{{method.transport_safe_name|snake_case}}] = mock_object {% if method.client_streaming %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index a304fe8da464..59ef0f59bcab 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -923,11 +923,7 @@ async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.export_assets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.export_assets] = mock_object request = {} @@ -1200,11 +1196,7 @@ async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.list_assets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_object request = {} @@ -1743,11 +1735,7 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: assert client._client._transport.batch_get_assets_history in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.batch_get_assets_history] = mock_object request = {} @@ -2027,11 +2015,7 @@ async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.create_feed in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_feed] = mock_object request = {} @@ -2401,11 +2385,7 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.get_feed in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_feed] = mock_object request = {} @@ -2760,11 +2740,7 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.list_feeds in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_feeds] = mock_object request = {} @@ -3122,11 +3098,7 @@ async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.update_feed in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_feed] = mock_object request = {} @@ -3479,11 +3451,7 @@ async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.delete_feed in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_feed] = mock_object request = {} @@ -3836,11 +3804,7 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str assert client._client._transport.search_all_resources in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.search_all_resources] = mock_object request = {} @@ -4408,11 +4372,7 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: s assert client._client._transport.search_all_iam_policies in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.search_all_iam_policies] = mock_object request = {} @@ -4964,11 +4924,7 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.analyze_iam_policy in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy] = mock_object request = {} @@ -5237,11 +5193,7 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(trans assert client._client._transport.analyze_iam_policy_longrunning in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy_longrunning] = mock_object request = {} @@ -5511,11 +5463,7 @@ async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.analyze_move in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.analyze_move] = mock_object request = {} @@ -5790,11 +5738,7 @@ async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.query_assets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.query_assets] = mock_object request = {} @@ -6075,11 +6019,7 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.create_saved_query in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_saved_query] = mock_object request = {} @@ -6464,11 +6404,7 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.get_saved_query in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_saved_query] = mock_object request = {} @@ -6828,11 +6764,7 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.list_saved_queries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_saved_queries] = mock_object request = {} @@ -7381,11 +7313,7 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.update_saved_query in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_saved_query] = mock_object request = {} @@ -7746,11 +7674,7 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.delete_saved_query in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_saved_query] = mock_object request = {} @@ -8094,11 +8018,7 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(tra assert client._client._transport.batch_get_effective_iam_policies in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.batch_get_effective_iam_policies] = mock_object request = {} @@ -8370,11 +8290,7 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str assert client._client._transport.analyze_org_policies in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.analyze_org_policies] = mock_object request = {} @@ -8942,11 +8858,7 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r assert client._client._transport.analyze_org_policy_governed_containers in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_containers] = mock_object request = {} @@ -9514,11 +9426,7 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(t assert client._client._transport.analyze_org_policy_governed_assets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_assets] = mock_object request = {} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index cc8db8a42c8f..bfc6ac5c6972 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -912,11 +912,7 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str assert client._client._transport.generate_access_token in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.generate_access_token] = mock_object request = {} @@ -1294,11 +1290,7 @@ async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.generate_id_token in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.generate_id_token] = mock_object request = {} @@ -1681,11 +1673,7 @@ async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert client._client._transport.sign_blob in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.sign_blob] = mock_object request = {} @@ -2062,11 +2050,7 @@ async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.sign_jwt in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.sign_jwt] = mock_object request = {} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index ae832fb35ec2..545acf28bb85 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -944,11 +944,7 @@ async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.get_trigger in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_trigger] = mock_object request = {} @@ -1315,11 +1311,7 @@ async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.list_triggers in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_triggers] = mock_object request = {} @@ -1866,11 +1858,7 @@ async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.create_trigger in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_trigger] = mock_object request = {} @@ -2244,11 +2232,7 @@ async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.update_trigger in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_trigger] = mock_object request = {} @@ -2626,11 +2610,7 @@ async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.delete_trigger in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_trigger] = mock_object request = {} @@ -3011,11 +2991,7 @@ async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.get_channel in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_channel] = mock_object request = {} @@ -3382,11 +3358,7 @@ async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.list_channels in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_channels] = mock_object request = {} @@ -3933,11 +3905,7 @@ async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.create_channel_ in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_channel_] = mock_object request = {} @@ -4311,11 +4279,7 @@ async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.update_channel in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_channel] = mock_object request = {} @@ -4681,11 +4645,7 @@ async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.delete_channel in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_channel] = mock_object request = {} @@ -5043,11 +5003,7 @@ async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.get_provider in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_provider] = mock_object request = {} @@ -5408,11 +5364,7 @@ async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.list_providers in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_providers] = mock_object request = {} @@ -5965,11 +5917,7 @@ async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: st assert client._client._transport.get_channel_connection in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_channel_connection] = mock_object request = {} @@ -6330,11 +6278,7 @@ async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: assert client._client._transport.list_channel_connections in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_channel_connections] = mock_object request = {} @@ -6881,11 +6825,7 @@ async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: assert client._client._transport.create_channel_connection in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_channel_connection] = mock_object request = {} @@ -7261,11 +7201,7 @@ async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: assert client._client._transport.delete_channel_connection in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_channel_connection] = mock_object request = {} @@ -7623,11 +7559,7 @@ async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: assert client._client._transport.get_google_channel_config in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_google_channel_config] = mock_object request = {} @@ -7980,11 +7912,7 @@ async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transpo assert client._client._transport.update_google_channel_config in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_google_channel_config] = mock_object request = {} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 2219af1173b6..491b0237bcaa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -901,11 +901,7 @@ async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.list_buckets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_buckets] = mock_object request = {} @@ -1465,11 +1461,7 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.get_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_bucket] = mock_object request = {} @@ -1752,11 +1744,7 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.create_bucket_async in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_bucket_async] = mock_object request = {} @@ -2028,11 +2016,7 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.update_bucket_async in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_bucket_async] = mock_object request = {} @@ -2323,11 +2307,7 @@ async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.create_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_bucket] = mock_object request = {} @@ -2625,11 +2605,7 @@ async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.update_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_bucket] = mock_object request = {} @@ -2904,11 +2880,7 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.delete_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_bucket] = mock_object request = {} @@ -3168,11 +3140,7 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.undelete_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.undelete_bucket] = mock_object request = {} @@ -3439,11 +3407,7 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.list_views in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_views] = mock_object request = {} @@ -3991,11 +3955,7 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.get_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_view] = mock_object request = {} @@ -4275,11 +4235,7 @@ async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.create_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_view] = mock_object request = {} @@ -4557,11 +4513,7 @@ async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.update_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_view] = mock_object request = {} @@ -4828,11 +4780,7 @@ async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.delete_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_view] = mock_object request = {} @@ -5099,11 +5047,7 @@ async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.list_sinks in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_sinks] = mock_object request = {} @@ -5666,11 +5610,7 @@ async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.get_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_sink] = mock_object request = {} @@ -6055,11 +5995,7 @@ async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.create_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_sink] = mock_object request = {} @@ -6454,11 +6390,7 @@ async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.update_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_sink] = mock_object request = {} @@ -6837,11 +6769,7 @@ async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.delete_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_sink] = mock_object request = {} @@ -7191,11 +7119,7 @@ async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.create_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_link] = mock_object request = {} @@ -7571,11 +7495,7 @@ async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.delete_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_link] = mock_object request = {} @@ -7932,11 +7852,7 @@ async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.list_links in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_links] = mock_object request = {} @@ -8484,11 +8400,7 @@ async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.get_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_link] = mock_object request = {} @@ -8844,11 +8756,7 @@ async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.list_exclusions in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_exclusions] = mock_object request = {} @@ -9399,11 +9307,7 @@ async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.get_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_exclusion] = mock_object request = {} @@ -9768,11 +9672,7 @@ async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.create_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_exclusion] = mock_object request = {} @@ -10147,11 +10047,7 @@ async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.update_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_exclusion] = mock_object request = {} @@ -10522,11 +10418,7 @@ async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.delete_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_exclusion] = mock_object request = {} @@ -10882,11 +10774,7 @@ async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.get_cmek_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_cmek_settings] = mock_object request = {} @@ -11169,11 +11057,7 @@ async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str assert client._client._transport.update_cmek_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_cmek_settings] = mock_object request = {} @@ -11459,11 +11343,7 @@ async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.get_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_settings] = mock_object request = {} @@ -11833,11 +11713,7 @@ async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.update_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_settings] = mock_object request = {} @@ -12210,11 +12086,7 @@ async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.copy_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_object request = {} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 992c8836f759..24376f879ab7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -896,11 +896,7 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.delete_log in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_log] = mock_object request = {} @@ -1244,11 +1240,7 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.write_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.write_log_entries] = mock_object request = {} @@ -1567,11 +1559,7 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.list_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_log_entries] = mock_object request = {} @@ -2065,11 +2053,7 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( assert client._client._transport.list_monitored_resource_descriptors in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_monitored_resource_descriptors] = mock_object request = {} @@ -2466,11 +2450,7 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert client._client._transport.list_logs in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_logs] = mock_object request = {} @@ -2945,11 +2925,7 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.tail_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.tail_log_entries] = mock_object request = [{}] diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 8acc9a12e81d..1d35d05a716a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -901,11 +901,7 @@ async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.list_log_metrics in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_log_metrics] = mock_object request = {} @@ -1465,11 +1461,7 @@ async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.get_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_log_metric] = mock_object request = {} @@ -1849,11 +1841,7 @@ async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.create_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_log_metric] = mock_object request = {} @@ -2243,11 +2231,7 @@ async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.update_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_log_metric] = mock_object request = {} @@ -2614,11 +2598,7 @@ async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.delete_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_log_metric] = mock_object request = {} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index f22a2c227edd..b1d02f31f348 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -928,11 +928,7 @@ async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.list_instances in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_object request = {} @@ -1554,11 +1550,7 @@ async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.get_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_object request = {} @@ -1960,11 +1952,7 @@ async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: assert client._client._transport.get_instance_auth_string in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.get_instance_auth_string] = mock_object request = {} @@ -2317,11 +2305,7 @@ async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.create_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_object request = {} @@ -2695,11 +2679,7 @@ async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.update_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_object request = {} @@ -3067,11 +3047,7 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.upgrade_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.upgrade_instance] = mock_object request = {} @@ -3437,11 +3413,7 @@ async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.import_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.import_instance] = mock_object request = {} @@ -3807,11 +3779,7 @@ async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.export_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.export_instance] = mock_object request = {} @@ -4177,11 +4145,7 @@ async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.failover_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.failover_instance] = mock_object request = {} @@ -4547,11 +4511,7 @@ async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.delete_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_object request = {} @@ -4907,11 +4867,7 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: st assert client._client._transport.reschedule_maintenance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - class AwaitableMock(mock.AsyncMock): - def __await__(self): - self.await_count += 1 - return iter([]) - mock_object = AwaitableMock() + mock_object = mock.AsyncMock() client._client._transport._wrapped_methods[client._client._transport.reschedule_maintenance] = mock_object request = {} From 0d0859d651fb975249f9c98c805bd94075b49d55 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 2 Jul 2024 17:28:48 +0000 Subject: [PATCH 1151/1339] chore(main): release 1.18.2 (#2052) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3976dc9d0603..f510c23fc0d8 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.18.2](https://github.com/googleapis/gapic-generator-python/compare/v1.18.1...v1.18.2) (2024-07-02) + + +### Bug Fixes + +* Fix AwaitableMock test coverage ([b48c935](https://github.com/googleapis/gapic-generator-python/commit/b48c935b55b840ad2deff451c5229f32bc386e9c)) + ## [1.18.1](https://github.com/googleapis/gapic-generator-python/compare/v1.18.0...v1.18.1) (2024-06-20) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 79812de07a1c..ace0550caf64 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.18.1" +version = "1.18.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From b6ab1e253862f8ede7249b7e61501c8cdaf480a0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 3 Jul 2024 15:10:38 -0400 Subject: [PATCH 1152/1339] chore: update templated files (#2056) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../gapic-generator/.github/auto-label.yaml | 2 +- packages/gapic-generator/.kokoro/build.sh | 2 +- .../.kokoro/docker/docs/Dockerfile | 2 +- .../.kokoro/populate-secrets.sh | 2 +- packages/gapic-generator/.kokoro/release.sh | 2 +- .../gapic-generator/.kokoro/requirements.txt | 509 +++++++++--------- .../gapic-generator/.kokoro/trampoline.sh | 2 +- .../gapic-generator/.kokoro/trampoline_v2.sh | 2 +- packages/gapic-generator/.trampolinerc | 2 +- 10 files changed, 276 insertions(+), 253 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 81f87c56917d..91d742b5b9fe 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:d3de8a02819f65001effcbd3ea76ce97e9bcff035c7a89457f40f892c87c5b32 +# created: 2024-07-03T17:43:00.77142528Z diff --git a/packages/gapic-generator/.github/auto-label.yaml b/packages/gapic-generator/.github/auto-label.yaml index 8b37ee89711f..21786a4eb085 100644 --- a/packages/gapic-generator/.github/auto-label.yaml +++ b/packages/gapic-generator/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/build.sh b/packages/gapic-generator/.kokoro/build.sh index a2d75e376ba5..1cebf75e7312 100755 --- a/packages/gapic-generator/.kokoro/build.sh +++ b/packages/gapic-generator/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile index bdaf39fe22d0..a26ce61930f5 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile +++ b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/populate-secrets.sh b/packages/gapic-generator/.kokoro/populate-secrets.sh index 6f3972140e80..c435402f473e 100755 --- a/packages/gapic-generator/.kokoro/populate-secrets.sh +++ b/packages/gapic-generator/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/release.sh b/packages/gapic-generator/.kokoro/release.sh index ea4f0153bf97..b9f654279fe1 100755 --- a/packages/gapic-generator/.kokoro/release.sh +++ b/packages/gapic-generator/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 51f92b8e12f1..35ece0e4d2e9 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.6.2 \ + --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ + --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.17 \ + --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ + --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ + --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ + --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ + --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ + --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ + --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ + --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ + --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ + --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ + --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ + --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ + --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ + --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ + --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ + --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==43.0 \ + --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ + --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/packages/gapic-generator/.kokoro/trampoline.sh b/packages/gapic-generator/.kokoro/trampoline.sh index d85b1f267693..48f79699706e 100755 --- a/packages/gapic-generator/.kokoro/trampoline.sh +++ b/packages/gapic-generator/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.kokoro/trampoline_v2.sh b/packages/gapic-generator/.kokoro/trampoline_v2.sh index 59a7cf3a9373..35fa529231dc 100755 --- a/packages/gapic-generator/.kokoro/trampoline_v2.sh +++ b/packages/gapic-generator/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/.trampolinerc b/packages/gapic-generator/.trampolinerc index a7dfeb42c6d0..0080152373d5 100644 --- a/packages/gapic-generator/.trampolinerc +++ b/packages/gapic-generator/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 75294fd41d57f1dbe66602f632a2c286599264ab Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Jul 2024 15:49:19 -0400 Subject: [PATCH 1153/1339] chore(python): use python 3.10 for docs build (#2062) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/Dockerfile | 21 +++++---- .../.kokoro/docker/docs/requirements.txt | 40 ++++++++-------- .../gapic-generator/.kokoro/requirements.txt | 46 +++++++++---------- 4 files changed, 59 insertions(+), 52 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 91d742b5b9fe..f30cb3775afc 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:d3de8a02819f65001effcbd3ea76ce97e9bcff035c7a89457f40f892c87c5b32 -# created: 2024-07-03T17:43:00.77142528Z + digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e +# created: 2024-07-08T19:25:35.862283192Z diff --git a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile index a26ce61930f5..5205308b334d 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile +++ b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:22.04 +from ubuntu:24.04 ENV DEBIAN_FRONTEND noninteractive @@ -40,7 +40,6 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ - python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -60,18 +59,22 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.9.13 -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz +###################### Install python 3.10.14 for docs/docfx session + +# Download python 3.10.14 +RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz # Extract files -RUN tar -xvf Python-3.9.13.tgz +RUN tar -xvf Python-3.10.14.tgz -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations +# Install python 3.10.14 +RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall +RUN python3.10 -m venv /venv +ENV PATH /venv/bin:$PATH + ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ && python3 /tmp/get-pip.py \ @@ -84,4 +87,4 @@ RUN python3 -m pip COPY requirements.txt /requirements.txt RUN python3 -m pip install --require-hashes -r requirements.txt -CMD ["python3.8"] +CMD ["python3.10"] diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt index 0e5d70f20f83..7129c7715594 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,27 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 35ece0e4d2e9..9622baf0ba38 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.3.3 \ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2024.6.2 \ - --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ - --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -371,23 +371,23 @@ more-itertools==10.3.0 \ # via # jaraco-classes # jaraco-functools -nh3==0.2.17 \ - --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ - --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ - --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ - --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ - --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ - --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ - --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ - --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ - --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ - --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ - --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ - --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ - --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ - --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ - --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ - --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a +nh3==0.2.18 \ + --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ + --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ + --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ + --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ + --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ + --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ + --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ + --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ + --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ + --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ + --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ + --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ + --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ + --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ + --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ + --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer nox==2024.4.15 \ --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ @@ -460,9 +460,9 @@ python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==43.0 \ - --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ - --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 +readme-renderer==44.0 \ + --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ + --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 # via twine requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ From 89d2a45cb5dc03daca0bc9af3c78736733dae7ea Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 9 Jul 2024 21:50:32 +0200 Subject: [PATCH 1154/1339] chore(deps): update dependency certifi to v2024.7.4 [security] (#2058) --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index eedba8c56044..0e45c3df3f24 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,9 +8,9 @@ cachetools==5.3.3 \ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2024.2.2 \ - --hash=sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f \ - --hash=sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests charset-normalizer==3.3.2 \ --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ From bb4a9be9c3d1743bcd73d7f87aecb2b14b5660d6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 15 Jul 2024 22:12:55 +0200 Subject: [PATCH 1155/1339] chore(deps): update all dependencies (#2000) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/.bazeliskrc | 2 +- packages/gapic-generator/requirements.txt | 256 +++++++++++----------- 2 files changed, 125 insertions(+), 133 deletions(-) diff --git a/packages/gapic-generator/.bazeliskrc b/packages/gapic-generator/.bazeliskrc index 74cd900fbd3c..63f820260fa9 100644 --- a/packages/gapic-generator/.bazeliskrc +++ b/packages/gapic-generator/.bazeliskrc @@ -1,2 +1,2 @@ # See https://github.com/bazelbuild/bazelisk -USE_BAZEL_VERSION=6.3.0 +USE_BAZEL_VERSION=6.5.0 diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0e45c3df3f24..0d1f28ec4735 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -108,21 +108,21 @@ click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via -r requirements.in -exceptiongroup==1.2.0 \ - --hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14 \ - --hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68 +exceptiongroup==1.2.1 \ + --hash=sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad \ + --hash=sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16 # via pytest -google-api-core==2.18.0 \ - --hash=sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6 \ - --hash=sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9 +google-api-core==2.19.0 \ + --hash=sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251 \ + --hash=sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10 # via -r requirements.in -google-auth==2.29.0 \ - --hash=sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360 \ - --hash=sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415 +google-auth==2.30.0 \ + --hash=sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5 \ + --hash=sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688 # via google-api-core -googleapis-common-protos[grpc]==1.63.0 \ - --hash=sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e \ - --hash=sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632 +googleapis-common-protos[grpc]==1.63.1 \ + --hash=sha256:0e1c2cdfcbc354b76e4a211a35ea35d6926a835cba1377073c4861db904a1877 \ + --hash=sha256:c6442f7a0a6b2a80369457d79e6672bb7dcbaab88e0848302497e3ec80780a6a # via # -r requirements.in # google-api-core @@ -131,61 +131,53 @@ grpc-google-iam-v1==0.13.0 \ --hash=sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89 \ --hash=sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e # via -r requirements.in -grpcio==1.62.1 \ - --hash=sha256:12859468e8918d3bd243d213cd6fd6ab07208195dc140763c00dfe901ce1e1b4 \ - --hash=sha256:1714e7bc935780bc3de1b3fcbc7674209adf5208ff825799d579ffd6cd0bd505 \ - --hash=sha256:179bee6f5ed7b5f618844f760b6acf7e910988de77a4f75b95bbfaa8106f3c1e \ - --hash=sha256:1f1e7b36bdff50103af95a80923bf1853f6823dd62f2d2a2524b66ed74103e49 \ - --hash=sha256:1faa02530b6c7426404372515fe5ddf66e199c2ee613f88f025c6f3bd816450c \ - --hash=sha256:22bccdd7b23c420a27fd28540fb5dcbc97dc6be105f7698cb0e7d7a420d0e362 \ - --hash=sha256:23e2e04b83f347d0aadde0c9b616f4726c3d76db04b438fd3904b289a725267f \ - --hash=sha256:3227c667dccbe38f2c4d943238b887bac588d97c104815aecc62d2fd976e014b \ - --hash=sha256:359f821d4578f80f41909b9ee9b76fb249a21035a061a327f91c953493782c31 \ - --hash=sha256:3952b581eb121324853ce2b191dae08badb75cd493cb4e0243368aa9e61cfd41 \ - --hash=sha256:407b26b7f7bbd4f4751dbc9767a1f0716f9fe72d3d7e96bb3ccfc4aace07c8de \ - --hash=sha256:4187201a53f8561c015bc745b81a1b2d278967b8de35f3399b84b0695e281d5f \ - --hash=sha256:482ae2ae78679ba9ed5752099b32e5fe580443b4f798e1b71df412abf43375db \ - --hash=sha256:48611e4fa010e823ba2de8fd3f77c1322dd60cb0d180dc6630a7e157b205f7ea \ - --hash=sha256:48f7135c3de2f298b833be8b4ae20cafe37091634e91f61f5a7eb3d61ec6f660 \ - --hash=sha256:4b49fd8fe9f9ac23b78437da94c54aa7e9996fbb220bac024a67469ce5d0825f \ - --hash=sha256:58f6c693d446964e3292425e1d16e21a97a48ba9172f2d0df9d7b640acb99243 \ - --hash=sha256:5bd90b8c395f39bc82a5fb32a0173e220e3f401ff697840f4003e15b96d1befc \ - --hash=sha256:60dcd824df166ba266ee0cfaf35a31406cd16ef602b49f5d4dfb21f014b0dedd \ - --hash=sha256:6696ffe440333a19d8d128e88d440f91fb92c75a80ce4b44d55800e656a3ef1d \ - --hash=sha256:6c455e008fa86d9e9a9d85bb76da4277c0d7d9668a3bfa70dbe86e9f3c759947 \ - --hash=sha256:71f11fd63365ade276c9d4a7b7df5c136f9030e3457107e1791b3737a9b9ed6a \ - --hash=sha256:73db2dc1b201d20ab7083e7041946910bb991e7e9761a0394bbc3c2632326483 \ - --hash=sha256:77c339403db5a20ef4fed02e4d1a9a3d9866bf9c0afc77a42234677313ea22f3 \ - --hash=sha256:833379943d1728a005e44103f17ecd73d058d37d95783eb8f0b28ddc1f54d7b2 \ - --hash=sha256:83a17b303425104d6329c10eb34bba186ffa67161e63fa6cdae7776ff76df73f \ - --hash=sha256:83e7ccb85a74beaeae2634f10eb858a0ed1a63081172649ff4261f929bacfd22 \ - --hash=sha256:844d1f3fb11bd1ed362d3fdc495d0770cfab75761836193af166fee113421d66 \ - --hash=sha256:882020c87999d54667a284c7ddf065b359bd00251fcd70279ac486776dbf84ec \ - --hash=sha256:8999bf1b57172dbc7c3e4bb3c732658e918f5c333b2942243f10d0d653953ba9 \ - --hash=sha256:9084086190cc6d628f282e5615f987288b95457292e969b9205e45b442276407 \ - --hash=sha256:960edebedc6b9ada1ef58e1c71156f28689978188cd8cff3b646b57288a927d9 \ - --hash=sha256:973c49086cabab773525f6077f95e5a993bfc03ba8fc32e32f2c279497780585 \ - --hash=sha256:978121758711916d34fe57c1f75b79cdfc73952f1481bb9583399331682d36f7 \ - --hash=sha256:9bd5c8a1af40ec305d001c60236308a67e25419003e9bb3ebfab5695a8d0b369 \ - --hash=sha256:a10383035e864f386fe096fed5c47d27a2bf7173c56a6e26cffaaa5a361addb1 \ - --hash=sha256:a485f0c2010c696be269184bdb5ae72781344cb4e60db976c59d84dd6354fac9 \ - --hash=sha256:a7f615270fe534548112a74e790cd9d4f5509d744dd718cd442bf016626c22e4 \ - --hash=sha256:b134d5d71b4e0837fff574c00e49176051a1c532d26c052a1e43231f252d813b \ - --hash=sha256:b2a0e71b0a2158aa4bce48be9f8f9eb45cbd17c78c7443616d00abbe2a509f6d \ - --hash=sha256:b50b09b4dc01767163d67e1532f948264167cd27f49e9377e3556c3cba1268e1 \ - --hash=sha256:b5a4ea906db7dec694098435d84bf2854fe158eb3cd51e1107e571246d4d1d70 \ - --hash=sha256:b7209117bbeebdfa5d898205cc55153a51285757902dd73c47de498ad4d11332 \ - --hash=sha256:bba97b8e8883a8038606480d6b6772289f4c907f6ba780fa1f7b7da7dfd76f06 \ - --hash=sha256:be0477cb31da67846a33b1a75c611f88bfbcd427fe17701b6317aefceee1b96f \ - --hash=sha256:c7fcc6a32e7b7b58f5a7d27530669337a5d587d4066060bcb9dee7a8c833dfb7 \ - --hash=sha256:c8842ccbd8c0e253c1f189088228f9b433f7a93b7196b9e5b6f87dba393f5d5d \ - --hash=sha256:d1f6c96573dc09d50dbcbd91dbf71d5cf97640c9427c32584010fbbd4c0e0037 \ - --hash=sha256:d9e52558b8b8c2f4ac05ac86344a7417ccdd2b460a59616de49eb6933b07a0bd \ - --hash=sha256:e3393b0823f938253370ebef033c9fd23d27f3eae8eb9a8f6264900c7ea3fb5a \ - --hash=sha256:e6c8c8693df718c5ecbc7babb12c69a4e3677fd11de8886f05ab22d4e6b1c43b \ - --hash=sha256:f8de7c8cef9261a2d0a62edf2ccea3d741a523c6b8a6477a340a1f2e417658de \ - --hash=sha256:fa7d28eb4d50b7cbe75bb8b45ed0da9a1dc5b219a0af59449676a29c2eed9698 \ - --hash=sha256:fbe80577c7880911d3ad65e5ecc997416c98f354efeba2f8d0f9112a67ed65a5 +grpcio==1.64.1 \ + --hash=sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040 \ + --hash=sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122 \ + --hash=sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9 \ + --hash=sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f \ + --hash=sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd \ + --hash=sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d \ + --hash=sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33 \ + --hash=sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762 \ + --hash=sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294 \ + --hash=sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650 \ + --hash=sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b \ + --hash=sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad \ + --hash=sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1 \ + --hash=sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff \ + --hash=sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59 \ + --hash=sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4 \ + --hash=sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027 \ + --hash=sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502 \ + --hash=sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae \ + --hash=sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61 \ + --hash=sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb \ + --hash=sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa \ + --hash=sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5 \ + --hash=sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1 \ + --hash=sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9 \ + --hash=sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90 \ + --hash=sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b \ + --hash=sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179 \ + --hash=sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e \ + --hash=sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a \ + --hash=sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489 \ + --hash=sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d \ + --hash=sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a \ + --hash=sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2 \ + --hash=sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd \ + --hash=sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb \ + --hash=sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61 \ + --hash=sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca \ + --hash=sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6 \ + --hash=sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602 \ + --hash=sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367 \ + --hash=sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62 \ + --hash=sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d \ + --hash=sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd \ + --hash=sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22 \ + --hash=sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -205,32 +197,32 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via -r requirements.in -libcst==1.2.0 \ - --hash=sha256:0cb92398236566f0b73a0c73f8a41a9c4906c793e8f7c2745f30e3fb141a34b5 \ - --hash=sha256:13ca9fe82326d82feb2c7b0f5a320ce7ed0d707c32919dd36e1f40792459bf6f \ - --hash=sha256:1b5fecb2b26fa3c1efe6e05ef1420522bd31bb4dae239e4c41fdf3ddbd853aeb \ - --hash=sha256:1d45718f7e7a1405a16fd8e7fc75c365120001b6928bfa3c4112f7e533990b9a \ - --hash=sha256:2bbb4e442224da46b59a248d7d632ed335eae023a921dea1f5c72d2a059f6be9 \ - --hash=sha256:38fbd56f885e1f77383a6d1d798a917ffbc6d28dc6b1271eddbf8511c194213e \ - --hash=sha256:3c7c0edfe3b878d64877671261c7b3ffe9d23181774bfad5d8fcbdbbbde9f064 \ - --hash=sha256:4973a9d509cf1a59e07fac55a98f70bc4fd35e09781dffb3ec93ee32fc0de7af \ - --hash=sha256:5c0d548d92c6704bb07ce35d78c0e054cdff365def0645c1b57c856c8e112bb4 \ - --hash=sha256:5e54389abdea995b39ee96ad736ed1b0b8402ed30a7956b7a279c10baf0c0294 \ - --hash=sha256:6dd388c74c04434b41e3b25fc4a0fafa3e6abf91f97181df55e8f8327fd903cc \ - --hash=sha256:71dd69fff76e7edaf8fae0f63ffcdbf5016e8cd83165b1d0688d6856aa48186a \ - --hash=sha256:7f4919978c2b395079b64d8a654357854767adbabab13998b39c1f0bc67da8a7 \ - --hash=sha256:82373a35711a8bb2a664dba2b7aeb20bbcce92a4db40af964e9cb2b976f989e7 \ - --hash=sha256:8b56130f18aca9a98b3bcaf5962b2b26c2dcdd6d5132decf3f0b0b635f4403ba \ - --hash=sha256:968b93400e66e6711a29793291365e312d206dbafd3fc80219cfa717f0f01ad5 \ - --hash=sha256:b4066dcadf92b183706f81ae0b4342e7624fc1d9c5ca2bf2b44066cb74bf863f \ - --hash=sha256:ba24b8cf789db6b87c6e23a6c6365f5f73cb7306d929397581d5680149e9990c \ - --hash=sha256:c0149d24a455536ff2e41b3a48b16d3ebb245e28035013c91bd868def16592a0 \ - --hash=sha256:c80f36f4a02d530e28eac7073aabdea7c6795fc820773a02224021d79d164e8b \ - --hash=sha256:dded0e4f2e18150c4b07fedd7ef84a9abc7f9bd2d47cc1c485248ee1ec58e5cc \ - --hash=sha256:dece0362540abfc39cd2cf5c98cde238b35fd74a1b0167e2563e4b8bb5f47489 \ - --hash=sha256:e01879aa8cd478bb8b1e4285cfd0607e64047116f7ab52bc2a787cde584cd686 \ - --hash=sha256:f080e9af843ff609f8f35fc7275c8bf08b02c31115e7cd5b77ca3b6a56c75096 \ - --hash=sha256:f2342634f6c61fc9076dc0baf21e9cf5ef0195a06e1e95c0c9dc583ba3a30d00 +libcst==1.4.0 \ + --hash=sha256:061d6855ef30efe38b8a292b7e5d57c8e820e71fc9ec9846678b60a934b53bbb \ + --hash=sha256:17d71001cb25e94cfe8c3d997095741a8c4aa7a6d234c0f972bc42818c88dfaf \ + --hash=sha256:279b54568ea1f25add50ea4ba3d76d4f5835500c82f24d54daae4c5095b986aa \ + --hash=sha256:2d47de16d105e7dd5f4e01a428d9f4dc1e71efd74f79766daf54528ce37f23c3 \ + --hash=sha256:3399e6c95df89921511b44d8c5bf6a75bcbc2d51f1f6429763609ba005c10f6b \ + --hash=sha256:3401dae41fe24565387a65baee3887e31a44e3e58066b0250bc3f3ccf85b1b5a \ + --hash=sha256:3c6a8faab9da48c5b371557d0999b4ca51f4f2cbd37ee8c2c4df0ac01c781465 \ + --hash=sha256:449e0b16604f054fa7f27c3ffe86ea7ef6c409836fe68fe4e752a1894175db00 \ + --hash=sha256:48601e3e590e2d6a7ab8c019cf3937c70511a78d778ab3333764531253acdb33 \ + --hash=sha256:5da9d7dc83801aba3b8d911f82dc1a375db0d508318bad79d9fb245374afe068 \ + --hash=sha256:62e2682ee1567b6a89c91853865372bf34f178bfd237853d84df2b87b446e654 \ + --hash=sha256:7c54aa66c86d8ece9c93156a2cf5ca512b0dce40142fe9e072c86af2bf892411 \ + --hash=sha256:7ece51d935bc9bf60b528473d2e5cc67cbb88e2f8146297e40ee2c7d80be6f13 \ + --hash=sha256:81653dea1cdfa4c6520a7c5ffb95fa4d220cbd242e446c7a06d42d8636bfcbba \ + --hash=sha256:8e54c777b8d27339b70f304d16fc8bc8674ef1bd34ed05ea874bf4921eb5a313 \ + --hash=sha256:9d0cc3c5a2a51fa7e1d579a828c0a2e46b2170024fd8b1a0691c8a52f3abb2d9 \ + --hash=sha256:addc6d585141a7677591868886f6bda0577529401a59d210aa8112114340e129 \ + --hash=sha256:b8ecdba8934632b4dadacb666cd3816627a6ead831b806336972ccc4ba7ca0e9 \ + --hash=sha256:bb0abf627ee14903d05d0ad9b2c6865f1b21eb4081e2c7bea1033f85db2b8bae \ + --hash=sha256:cb4e42ea107a37bff7f9fdbee9532d39f9ea77b89caa5c5112b37057b12e0838 \ + --hash=sha256:d024f44059a853b4b852cfc04fec33e346659d851371e46fc8e7c19de24d3da9 \ + --hash=sha256:d1989fa12d3cd79118ebd29ebe2a6976d23d509b1a4226bc3d66fcb7cb50bd5d \ + --hash=sha256:e6227562fc5c9c1efd15dfe90b0971ae254461b8b6b23c1b617139b6003de1c1 \ + --hash=sha256:f42797309bb725f0f000510d5463175ccd7155395f09b5e7723971b0007a976d \ + --hash=sha256:f6abce0e66bba2babfadc20530fd3688f672d565674336595b4623cd800b91ef # via -r requirements.in markupsafe==2.1.5 \ --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ @@ -300,59 +292,59 @@ mypy-extensions==1.0.0 \ --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 # via typing-inspect -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via pytest -pluggy==1.4.0 \ - --hash=sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981 \ - --hash=sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be +pluggy==1.5.0 \ + --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ + --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 # via pytest -proto-plus==1.23.0 \ - --hash=sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2 \ - --hash=sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 # via # -r requirements.in # google-api-core -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +protobuf==5.27.1 \ + --hash=sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd \ + --hash=sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340 \ + --hash=sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333 \ + --hash=sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf \ + --hash=sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17 \ + --hash=sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107 \ + --hash=sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a \ + --hash=sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15 \ + --hash=sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d \ + --hash=sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631 \ + --hash=sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d # via # -r requirements.in # google-api-core # googleapis-common-protos # grpc-google-iam-v1 # proto-plus -pyasn1==0.5.1 \ - --hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 \ - --hash=sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth pypandoc==1.13 \ --hash=sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e \ --hash=sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681 # via -r requirements.in -pytest==8.1.1 \ - --hash=sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7 \ - --hash=sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044 +pytest==8.2.2 \ + --hash=sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343 \ + --hash=sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977 # via pytest-asyncio -pytest-asyncio==0.23.6 \ - --hash=sha256:68516fdd1018ac57b846c9846b954f0393b26f094764a28c955eabb0536a4e8a \ - --hash=sha256:ffe523a89c1c222598c76856e76852b787504ddb72dd5d9b6617ffa8aa2cde5f +pytest-asyncio==0.23.7 \ + --hash=sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b \ + --hash=sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268 # via -r requirements.in pyyaml==6.0.1 \ --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \ @@ -409,9 +401,9 @@ pyyaml==6.0.1 \ # via # -r requirements.in # libcst -requests==2.32.2 \ - --hash=sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289 \ - --hash=sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via google-api-core rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -421,9 +413,9 @@ tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via pytest -typing-extensions==4.10.0 \ - --hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \ - --hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via # libcst # typing-inspect From d2ce174590c40c27257b2cedb5f0410e1d3971ee Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 18 Jul 2024 10:06:00 -0400 Subject: [PATCH 1156/1339] fix: allow pyi files to be included in the output of py_gapic_assembly_pkg (#2036) --- packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl index 3898836d2586..e85981653734 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl @@ -25,7 +25,7 @@ def _py_gapic_src_pkg_impl(ctx): elif f.extension in ("srcjar", "jar", "zip"): srcjar_srcs.append(f) # Exclude source files and files for external packages - elif f.extension in ("py") and not f.is_source and 'external' not in f.path: + elif f.extension in ("py", "pyi") and not f.is_source and 'external' not in f.path: py_srcs.append(f) paths = construct_package_dir_paths(ctx.attr.package_dir, ctx.outputs.pkg, ctx.label.name) From e2637372a3ca610a17e37995d303f53167dc267f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 23 Jul 2024 13:39:32 -0400 Subject: [PATCH 1157/1339] fix: retry and timeout values do not propagate in requests during pagination (#2065) --- .../%sub/services/%service/client.py.j2 | 2 + .../%sub/services/%service/pagers.py.j2 | 17 ++- .../%name_%version/%sub/test_%service.py.j2 | 7 +- .../%sub/services/%service/_client_macros.j2 | 2 + .../%sub/services/%service/async_client.py.j2 | 2 + .../%sub/services/%service/pagers.py.j2 | 28 +++- .../gapic/templates/noxfile.py.j2 | 4 +- .../%name_%version/%sub/test_%service.py.j2 | 1 + .../gapic/%name_%version/%sub/test_macros.j2 | 6 +- packages/gapic-generator/noxfile.py | 4 +- .../services/asset_service/async_client.py | 14 ++ .../asset_v1/services/asset_service/client.py | 14 ++ .../asset_v1/services/asset_service/pagers.py | 136 ++++++++++++++++-- .../integration/goldens/asset/noxfile.py | 4 +- .../unit/gapic/asset_v1/test_asset_service.py | 43 +++++- .../goldens/credentials/noxfile.py | 4 +- .../credentials_v1/test_iam_credentials.py | 1 + .../services/eventarc/async_client.py | 8 ++ .../eventarc_v1/services/eventarc/client.py | 8 ++ .../eventarc_v1/services/eventarc/pagers.py | 82 +++++++++-- .../integration/goldens/eventarc/noxfile.py | 4 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 25 +++- .../config_service_v2/async_client.py | 10 ++ .../services/config_service_v2/client.py | 10 ++ .../services/config_service_v2/pagers.py | 100 +++++++++++-- .../logging_service_v2/async_client.py | 6 + .../services/logging_service_v2/client.py | 6 + .../services/logging_service_v2/pagers.py | 64 ++++++++- .../metrics_service_v2/async_client.py | 2 + .../services/metrics_service_v2/client.py | 2 + .../services/metrics_service_v2/pagers.py | 28 +++- .../integration/goldens/logging/noxfile.py | 4 +- .../logging_v2/test_config_service_v2.py | 31 +++- .../logging_v2/test_logging_service_v2.py | 19 ++- .../logging_v2/test_metrics_service_v2.py | 7 +- .../services/cloud_redis/async_client.py | 2 + .../redis_v1/services/cloud_redis/client.py | 2 + .../redis_v1/services/cloud_redis/pagers.py | 28 +++- .../integration/goldens/redis/noxfile.py | 4 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 7 +- 40 files changed, 665 insertions(+), 83 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index e4b0af6dec6b..064e17924f8d 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -509,6 +509,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) {% endif %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 index 7be0c3a5c5bf..15ca3d3e48a5 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/pagers.py.j2 @@ -7,7 +7,13 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. -#} -from typing import Any, Callable, Iterable, Sequence, Tuple, Optional, Iterator +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from typing import Any, Callable, Iterable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() | selectattr('paged_result_field') %} @@ -41,6 +47,8 @@ class {{ method.name }}Pager: method: Callable[..., {{ method.output.ident }}], request: {{ method.input.ident }}, response: {{ method.output.ident }}, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -51,12 +59,17 @@ class {{ method.name }}Pager: The initial request object. response (:class:`{{ method.output.ident.sphinx }}`): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = {{ method.input.ident }}(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -67,7 +80,7 @@ class {{ method.name }}Pager: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response {% if method.paged_result_field.map %} diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 2a6af5ac1efa..9387a124e2d5 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -45,6 +45,7 @@ from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers from google.api_core import path_template +from google.api_core import retry as retries {% if service.has_lro %} from google.api_core import future from google.api_core import operation @@ -938,6 +939,8 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 {% if method.field_headers %} expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -949,7 +952,7 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): )), ) {% endif %} - pager = client.{{ method_name }}(request={}) + pager = client.{{ method_name }}(request={}, retry=retry, timeout=timeout) {% if service.version %} if HAS_GOOGLE_API_CORE_VERSION_HEADER: @@ -958,6 +961,8 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): ) {% endif %} assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 128421655e08..e741d6d04766 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -217,6 +217,8 @@ method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) {% elif method.extended_lro and full_extended_lro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index b63cb1e99f4f..b001cea8910b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -414,6 +414,8 @@ class {{ service.async_client_name }}: method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index fc3aa4f44c01..175bc34929b3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -7,7 +7,15 @@ {# This lives within the loop in order to ensure that this template is empty if there are no paged methods. #} -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() | selectattr('paged_result_field') %} @@ -43,6 +51,8 @@ class {{ method.name }}Pager: request: {{ method.input.ident }}, response: {{ method.output.ident }}, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -53,12 +63,17 @@ class {{ method.name }}Pager: The initial request object. response ({{ method.output.ident.sphinx }}): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = {{ method.input.ident }}(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -69,7 +84,7 @@ class {{ method.name }}Pager: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response {% if method.paged_result_field.map %} @@ -113,6 +128,8 @@ class {{ method.name }}AsyncPager: request: {{ method.input.ident }}, response: {{ method.output.ident }}, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -123,12 +140,17 @@ class {{ method.name }}AsyncPager: The initial request object. response ({{ method.output.ident.sphinx }}): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = {{ method.input.ident }}(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -139,7 +161,7 @@ class {{ method.name }}AsyncPager: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response {% if method.paged_result_field.map %} def __aiter__(self) -> Iterator[Tuple[str, {{ method.paged_result_field.type.fields.get('value').ident }}]]: diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 2c2b7921c223..430183c6437b 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -176,7 +176,9 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - 'mypy', + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): + # Ignore release of mypy 1.11.0 which may have a regression + 'mypy!=1.11.0', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index edf2588ecd89..45bbb2d7e9c1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -51,6 +51,7 @@ from google.api_core import exceptions as core_exceptions from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries {% if service.has_lro or service.has_extended_lro %} from google.api_core import future {% endif %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 627c2b9a6bcc..b53072887a56 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -850,6 +850,8 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 {% if not method.explicit_routing and method.field_headers %} expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -867,9 +869,11 @@ def test_{{ method_name }}_pager(transport_name: str = "grpc"): version_header.to_api_version_header("{{ service.version }}"), ) {% endif %} - pager = client.{{ method_name }}(request={}) + pager = client.{{ method_name }}(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 41fec240bc3e..42dab63bf4b2 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -434,7 +434,9 @@ def showcase_mypy( """Perform typecheck analysis on the generated Showcase library.""" # Install pytest and gapic-generator-python - session.install("mypy", "types-pkg-resources", "types-protobuf", "types-requests", "types-dataclasses") + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): + # Ignore release of mypy 1.11.0 which may have a regression + session.install("mypy!=1.11.0", "types-pkg-resources", "types-protobuf", "types-requests", "types-dataclasses") with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 2985521eab84..2fee2a05b846 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -478,6 +478,8 @@ async def sample_list_assets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1335,6 +1337,8 @@ async def sample_search_all_resources(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1524,6 +1528,8 @@ async def sample_search_all_iam_policies(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2276,6 +2282,8 @@ async def sample_list_saved_queries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2721,6 +2729,8 @@ async def sample_analyze_org_policies(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2871,6 +2881,8 @@ async def sample_analyze_org_policy_governed_containers(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3050,6 +3062,8 @@ async def sample_analyze_org_policy_governed_assets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 5e42e3917cbd..23c8a7c27dba 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -874,6 +874,8 @@ def sample_list_assets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1725,6 +1727,8 @@ def sample_search_all_resources(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1913,6 +1917,8 @@ def sample_search_all_iam_policies(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2662,6 +2668,8 @@ def sample_list_saved_queries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3104,6 +3112,8 @@ def sample_analyze_org_policies(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3253,6 +3263,8 @@ def sample_analyze_org_policy_governed_containers(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3431,6 +3443,8 @@ def sample_analyze_org_policy_governed_assets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 626c1ea04f4a..a4f950083cd8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -13,7 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets @@ -41,6 +49,8 @@ def __init__(self, request: asset_service.ListAssetsRequest, response: asset_service.ListAssetsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -51,12 +61,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.ListAssetsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.ListAssetsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -67,7 +82,7 @@ def pages(self) -> Iterator[asset_service.ListAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[assets.Asset]: @@ -100,6 +115,8 @@ def __init__(self, request: asset_service.ListAssetsRequest, response: asset_service.ListAssetsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -110,12 +127,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.ListAssetsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.ListAssetsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -126,7 +148,7 @@ async def pages(self) -> AsyncIterator[asset_service.ListAssetsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[assets.Asset]: async def async_generator(): @@ -162,6 +184,8 @@ def __init__(self, request: asset_service.SearchAllResourcesRequest, response: asset_service.SearchAllResourcesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -172,12 +196,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.SearchAllResourcesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.SearchAllResourcesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -188,7 +217,7 @@ def pages(self) -> Iterator[asset_service.SearchAllResourcesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[assets.ResourceSearchResult]: @@ -221,6 +250,8 @@ def __init__(self, request: asset_service.SearchAllResourcesRequest, response: asset_service.SearchAllResourcesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -231,12 +262,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.SearchAllResourcesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.SearchAllResourcesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -247,7 +283,7 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllResourcesResponse] yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[assets.ResourceSearchResult]: async def async_generator(): @@ -283,6 +319,8 @@ def __init__(self, request: asset_service.SearchAllIamPoliciesRequest, response: asset_service.SearchAllIamPoliciesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -293,12 +331,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.SearchAllIamPoliciesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.SearchAllIamPoliciesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -309,7 +352,7 @@ def pages(self) -> Iterator[asset_service.SearchAllIamPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[assets.IamPolicySearchResult]: @@ -342,6 +385,8 @@ def __init__(self, request: asset_service.SearchAllIamPoliciesRequest, response: asset_service.SearchAllIamPoliciesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -352,12 +397,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.SearchAllIamPoliciesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.SearchAllIamPoliciesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -368,7 +418,7 @@ async def pages(self) -> AsyncIterator[asset_service.SearchAllIamPoliciesRespons yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[assets.IamPolicySearchResult]: async def async_generator(): @@ -404,6 +454,8 @@ def __init__(self, request: asset_service.ListSavedQueriesRequest, response: asset_service.ListSavedQueriesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -414,12 +466,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.ListSavedQueriesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.ListSavedQueriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -430,7 +487,7 @@ def pages(self) -> Iterator[asset_service.ListSavedQueriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[asset_service.SavedQuery]: @@ -463,6 +520,8 @@ def __init__(self, request: asset_service.ListSavedQueriesRequest, response: asset_service.ListSavedQueriesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -473,12 +532,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.ListSavedQueriesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.ListSavedQueriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -489,7 +553,7 @@ async def pages(self) -> AsyncIterator[asset_service.ListSavedQueriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[asset_service.SavedQuery]: async def async_generator(): @@ -525,6 +589,8 @@ def __init__(self, request: asset_service.AnalyzeOrgPoliciesRequest, response: asset_service.AnalyzeOrgPoliciesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -535,12 +601,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.AnalyzeOrgPoliciesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -551,7 +622,7 @@ def pages(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: @@ -584,6 +655,8 @@ def __init__(self, request: asset_service.AnalyzeOrgPoliciesRequest, response: asset_service.AnalyzeOrgPoliciesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -594,12 +667,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.AnalyzeOrgPoliciesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.AnalyzeOrgPoliciesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -610,7 +688,7 @@ async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse] yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPoliciesResponse.OrgPolicyResult]: async def async_generator(): @@ -646,6 +724,8 @@ def __init__(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -656,12 +736,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -672,7 +757,7 @@ def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResp yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: @@ -705,6 +790,8 @@ def __init__(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -715,12 +802,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedContainersResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -731,7 +823,7 @@ async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedCon yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedContainersResponse.GovernedContainer]: async def async_generator(): @@ -767,6 +859,8 @@ def __init__(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -777,12 +871,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -793,7 +892,7 @@ def pages(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: @@ -826,6 +925,8 @@ def __init__(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -836,12 +937,17 @@ def __init__(self, The initial request object. response (google.cloud.asset_v1.types.AnalyzeOrgPolicyGovernedAssetsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -852,7 +958,7 @@ async def pages(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAss yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.GovernedAsset]: async def async_generator(): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index d6472c1d9812..e4e439309290 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -187,7 +187,9 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - 'mypy', + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): + # Ignore release of mypy 1.11.0 which may have a regression + 'mypy!=1.11.0', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 59ef0f59bcab..980beb35d2a1 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -46,6 +46,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.asset_v1.services.asset_service import AssetServiceAsyncClient @@ -1433,14 +1434,18 @@ def test_list_assets_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_assets(request={}) + pager = client.list_assets(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -4061,14 +4066,18 @@ def test_search_all_resources_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) - pager = client.search_all_resources(request={}) + pager = client.search_all_resources(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -4619,14 +4628,18 @@ def test_search_all_iam_policies_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) - pager = client.search_all_iam_policies(request={}) + pager = client.search_all_iam_policies(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -7001,14 +7014,18 @@ def test_list_saved_queries_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_saved_queries(request={}) + pager = client.list_saved_queries(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -8547,14 +8564,18 @@ def test_analyze_org_policies_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) - pager = client.analyze_org_policies(request={}) + pager = client.analyze_org_policies(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -9115,14 +9136,18 @@ def test_analyze_org_policy_governed_containers_pager(transport_name: str = "grp ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) - pager = client.analyze_org_policy_governed_containers(request={}) + pager = client.analyze_org_policy_governed_containers(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -9683,14 +9708,18 @@ def test_analyze_org_policy_governed_assets_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('scope', ''), )), ) - pager = client.analyze_org_policy_governed_assets(request={}) + pager = client.analyze_org_policy_governed_assets(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 5aeea54e57d5..6037ded7bc13 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -187,7 +187,9 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - 'mypy', + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): + # Ignore release of mypy 1.11.0 which may have a regression + 'mypy!=1.11.0', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index bfc6ac5c6972..c07b6bdf38d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -42,6 +42,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.credentials_v1.services.iam_credentials import IAMCredentialsAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index b2b3a07b7957..cdf056c7a018 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -478,6 +478,8 @@ async def sample_list_triggers(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1118,6 +1120,8 @@ async def sample_list_channels(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1736,6 +1740,8 @@ async def sample_list_providers(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1960,6 +1966,8 @@ async def sample_list_channel_connections(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 590f85a28245..c82b0a80dc90 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -900,6 +900,8 @@ def sample_list_triggers(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1535,6 +1537,8 @@ def sample_list_channels(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2148,6 +2152,8 @@ def sample_list_providers(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2370,6 +2376,8 @@ def sample_list_channel_connections(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 8689f7365d5b..ac39c92068c1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -13,7 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection @@ -44,6 +52,8 @@ def __init__(self, request: eventarc.ListTriggersRequest, response: eventarc.ListTriggersResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -54,12 +64,17 @@ def __init__(self, The initial request object. response (google.cloud.eventarc_v1.types.ListTriggersResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = eventarc.ListTriggersRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -70,7 +85,7 @@ def pages(self) -> Iterator[eventarc.ListTriggersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[trigger.Trigger]: @@ -103,6 +118,8 @@ def __init__(self, request: eventarc.ListTriggersRequest, response: eventarc.ListTriggersResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -113,12 +130,17 @@ def __init__(self, The initial request object. response (google.cloud.eventarc_v1.types.ListTriggersResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = eventarc.ListTriggersRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -129,7 +151,7 @@ async def pages(self) -> AsyncIterator[eventarc.ListTriggersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[trigger.Trigger]: async def async_generator(): @@ -165,6 +187,8 @@ def __init__(self, request: eventarc.ListChannelsRequest, response: eventarc.ListChannelsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -175,12 +199,17 @@ def __init__(self, The initial request object. response (google.cloud.eventarc_v1.types.ListChannelsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = eventarc.ListChannelsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -191,7 +220,7 @@ def pages(self) -> Iterator[eventarc.ListChannelsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[channel.Channel]: @@ -224,6 +253,8 @@ def __init__(self, request: eventarc.ListChannelsRequest, response: eventarc.ListChannelsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -234,12 +265,17 @@ def __init__(self, The initial request object. response (google.cloud.eventarc_v1.types.ListChannelsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = eventarc.ListChannelsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -250,7 +286,7 @@ async def pages(self) -> AsyncIterator[eventarc.ListChannelsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[channel.Channel]: async def async_generator(): @@ -286,6 +322,8 @@ def __init__(self, request: eventarc.ListProvidersRequest, response: eventarc.ListProvidersResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -296,12 +334,17 @@ def __init__(self, The initial request object. response (google.cloud.eventarc_v1.types.ListProvidersResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = eventarc.ListProvidersRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -312,7 +355,7 @@ def pages(self) -> Iterator[eventarc.ListProvidersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[discovery.Provider]: @@ -345,6 +388,8 @@ def __init__(self, request: eventarc.ListProvidersRequest, response: eventarc.ListProvidersResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -355,12 +400,17 @@ def __init__(self, The initial request object. response (google.cloud.eventarc_v1.types.ListProvidersResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = eventarc.ListProvidersRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -371,7 +421,7 @@ async def pages(self) -> AsyncIterator[eventarc.ListProvidersResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[discovery.Provider]: async def async_generator(): @@ -407,6 +457,8 @@ def __init__(self, request: eventarc.ListChannelConnectionsRequest, response: eventarc.ListChannelConnectionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -417,12 +469,17 @@ def __init__(self, The initial request object. response (google.cloud.eventarc_v1.types.ListChannelConnectionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = eventarc.ListChannelConnectionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -433,7 +490,7 @@ def pages(self) -> Iterator[eventarc.ListChannelConnectionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[channel_connection.ChannelConnection]: @@ -466,6 +523,8 @@ def __init__(self, request: eventarc.ListChannelConnectionsRequest, response: eventarc.ListChannelConnectionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -476,12 +535,17 @@ def __init__(self, The initial request object. response (google.cloud.eventarc_v1.types.ListChannelConnectionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = eventarc.ListChannelConnectionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -492,7 +556,7 @@ async def pages(self) -> AsyncIterator[eventarc.ListChannelConnectionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[channel_connection.ChannelConnection]: async def async_generator(): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index d909cd230899..c85926c9c0b1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -187,7 +187,9 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - 'mypy', + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): + # Ignore release of mypy 1.11.0 which may have a regression + 'mypy!=1.11.0', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 545acf28bb85..49890323b453 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -46,6 +46,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.eventarc_v1.services.eventarc import EventarcAsyncClient @@ -1550,14 +1551,18 @@ def test_list_triggers_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_triggers(request={}) + pager = client.list_triggers(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3597,14 +3602,18 @@ def test_list_channels_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_channels(request={}) + pager = client.list_channels(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -5603,14 +5612,18 @@ def test_list_providers_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_providers(request={}) + pager = client.list_providers(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -6517,14 +6530,18 @@ def test_list_channel_connections_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_channel_connections(request={}) + pager = client.list_channel_connections(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 8c73903d7c87..f5ab658fe15d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -363,6 +363,8 @@ async def sample_list_buckets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1087,6 +1089,8 @@ async def sample_list_views(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1533,6 +1537,8 @@ async def sample_list_sinks(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2453,6 +2459,8 @@ async def sample_list_links(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2681,6 +2689,8 @@ async def sample_list_exclusions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 2c3165381359..2593d1ce6688 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -757,6 +757,8 @@ def sample_list_buckets(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1480,6 +1482,8 @@ def sample_list_views(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1925,6 +1929,8 @@ def sample_list_sinks(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2838,6 +2844,8 @@ def sample_list_links(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -3064,6 +3072,8 @@ def sample_list_exclusions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index c87a5cc6397b..9890f2d3415b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -13,7 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.logging_v2.types import logging_config @@ -40,6 +48,8 @@ def __init__(self, request: logging_config.ListBucketsRequest, response: logging_config.ListBucketsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -50,12 +60,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListBucketsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -66,7 +81,7 @@ def pages(self) -> Iterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[logging_config.LogBucket]: @@ -99,6 +114,8 @@ def __init__(self, request: logging_config.ListBucketsRequest, response: logging_config.ListBucketsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -109,12 +126,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListBucketsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListBucketsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -125,7 +147,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: async def async_generator(): @@ -161,6 +183,8 @@ def __init__(self, request: logging_config.ListViewsRequest, response: logging_config.ListViewsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -171,12 +195,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListViewsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -187,7 +216,7 @@ def pages(self) -> Iterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[logging_config.LogView]: @@ -220,6 +249,8 @@ def __init__(self, request: logging_config.ListViewsRequest, response: logging_config.ListViewsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -230,12 +261,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListViewsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListViewsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -246,7 +282,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogView]: async def async_generator(): @@ -282,6 +318,8 @@ def __init__(self, request: logging_config.ListSinksRequest, response: logging_config.ListSinksResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -292,12 +330,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListSinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -308,7 +351,7 @@ def pages(self) -> Iterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[logging_config.LogSink]: @@ -341,6 +384,8 @@ def __init__(self, request: logging_config.ListSinksRequest, response: logging_config.ListSinksResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -351,12 +396,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListSinksResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListSinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -367,7 +417,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: async def async_generator(): @@ -403,6 +453,8 @@ def __init__(self, request: logging_config.ListLinksRequest, response: logging_config.ListLinksResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -413,12 +465,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListLinksResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListLinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -429,7 +486,7 @@ def pages(self) -> Iterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[logging_config.Link]: @@ -462,6 +519,8 @@ def __init__(self, request: logging_config.ListLinksRequest, response: logging_config.ListLinksResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -472,12 +531,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListLinksResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListLinksRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -488,7 +552,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.Link]: async def async_generator(): @@ -524,6 +588,8 @@ def __init__(self, request: logging_config.ListExclusionsRequest, response: logging_config.ListExclusionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -534,12 +600,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -550,7 +621,7 @@ def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[logging_config.LogExclusion]: @@ -583,6 +654,8 @@ def __init__(self, request: logging_config.ListExclusionsRequest, response: logging_config.ListExclusionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -593,12 +666,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListExclusionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -609,7 +687,7 @@ async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: async def async_generator(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 97bd907c9321..5695fcf3281c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -672,6 +672,8 @@ async def sample_list_log_entries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -760,6 +762,8 @@ async def sample_list_monitored_resource_descriptors(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -881,6 +885,8 @@ async def sample_list_logs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 139156e5b2e3..d53bce94642f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1009,6 +1009,8 @@ def sample_list_log_entries(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1097,6 +1099,8 @@ def sample_list_monitored_resource_descriptors(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1217,6 +1221,8 @@ def sample_list_logs(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index da84fac6f90f..52391a92857a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -13,7 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry @@ -42,6 +50,8 @@ def __init__(self, request: logging.ListLogEntriesRequest, response: logging.ListLogEntriesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -52,12 +62,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogEntriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -68,7 +83,7 @@ def pages(self) -> Iterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[log_entry.LogEntry]: @@ -101,6 +116,8 @@ def __init__(self, request: logging.ListLogEntriesRequest, response: logging.ListLogEntriesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -111,12 +128,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListLogEntriesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogEntriesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -127,7 +149,7 @@ async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: async def async_generator(): @@ -163,6 +185,8 @@ def __init__(self, request: logging.ListMonitoredResourceDescriptorsRequest, response: logging.ListMonitoredResourceDescriptorsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -173,12 +197,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -189,7 +218,7 @@ def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: @@ -222,6 +251,8 @@ def __init__(self, request: logging.ListMonitoredResourceDescriptorsRequest, response: logging.ListMonitoredResourceDescriptorsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -232,12 +263,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -248,7 +284,7 @@ async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsR yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: async def async_generator(): @@ -284,6 +320,8 @@ def __init__(self, request: logging.ListLogsRequest, response: logging.ListLogsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -294,12 +332,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -310,7 +353,7 @@ def pages(self) -> Iterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[str]: @@ -343,6 +386,8 @@ def __init__(self, request: logging.ListLogsRequest, response: logging.ListLogsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -353,12 +398,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListLogsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging.ListLogsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -369,7 +419,7 @@ async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 4b937e15358e..0f8961a508d4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -342,6 +342,8 @@ async def sample_list_log_metrics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index bebfbf98f469..639b5b2397e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -682,6 +682,8 @@ def sample_list_log_metrics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 5be809849b43..9d2d326a9b60 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -13,7 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.logging_v2.types import logging_metrics @@ -40,6 +48,8 @@ def __init__(self, request: logging_metrics.ListLogMetricsRequest, response: logging_metrics.ListLogMetricsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -50,12 +60,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -66,7 +81,7 @@ def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[logging_metrics.LogMetric]: @@ -99,6 +114,8 @@ def __init__(self, request: logging_metrics.ListLogMetricsRequest, response: logging_metrics.ListLogMetricsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -109,12 +126,17 @@ def __init__(self, The initial request object. response (google.cloud.logging_v2.types.ListLogMetricsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -125,7 +147,7 @@ async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: async def async_generator(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 728f36ac124e..0e32cdbf338f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -187,7 +187,9 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - 'mypy', + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): + # Ignore release of mypy 1.11.0 which may have a regression + 'mypy!=1.11.0', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 491b0237bcaa..f8cd2f5efca1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -39,6 +39,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.config_service_v2 import ConfigServiceV2AsyncClient @@ -1138,14 +1139,18 @@ def test_list_buckets_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_buckets(request={}) + pager = client.list_buckets(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3644,14 +3649,18 @@ def test_list_views_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_views(request={}) + pager = client.list_views(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -5284,14 +5293,18 @@ def test_list_sinks_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_sinks(request={}) + pager = client.list_sinks(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -8089,14 +8102,18 @@ def test_list_links_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_links(request={}) + pager = client.list_links(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -8993,14 +9010,18 @@ def test_list_exclusions_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_exclusions(request={}) + pager = client.list_exclusions(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 24376f879ab7..7f463657f720 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -36,6 +36,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2AsyncClient @@ -1753,9 +1754,13 @@ def test_list_log_entries_pager(transport_name: str = "grpc"): ) expected_metadata = () - pager = client.list_log_entries(request={}) + retry = retries.Retry() + timeout = 5 + pager = client.list_log_entries(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2145,9 +2150,13 @@ def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc") ) expected_metadata = () - pager = client.list_monitored_resource_descriptors(request={}) + retry = retries.Retry() + timeout = 5 + pager = client.list_monitored_resource_descriptors(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2689,14 +2698,18 @@ def test_list_logs_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_logs(request={}) + pager = client.list_logs(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 1d35d05a716a..f144f8729451 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -39,6 +39,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.logging_v2.services.metrics_service_v2 import MetricsServiceV2AsyncClient @@ -1138,14 +1139,18 @@ def test_list_log_metrics_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_log_metrics(request={}) + pager = client.list_log_metrics(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 38b770944870..ae9eea6c54d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -377,6 +377,8 @@ async def sample_list_instances(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 421ff472abd9..31b300b5b3da 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -719,6 +719,8 @@ def sample_list_instances(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index 1c389c8f1343..0c65b5164073 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -13,7 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.redis_v1.types import cloud_redis @@ -40,6 +48,8 @@ def __init__(self, request: cloud_redis.ListInstancesRequest, response: cloud_redis.ListInstancesResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. @@ -50,12 +60,17 @@ def __init__(self, The initial request object. response (google.cloud.redis_v1.types.ListInstancesResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = cloud_redis.ListInstancesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -66,7 +81,7 @@ def pages(self) -> Iterator[cloud_redis.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __iter__(self) -> Iterator[cloud_redis.Instance]: @@ -99,6 +114,8 @@ def __init__(self, request: cloud_redis.ListInstancesRequest, response: cloud_redis.ListInstancesResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. @@ -109,12 +126,17 @@ def __init__(self, The initial request object. response (google.cloud.redis_v1.types.ListInstancesResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = cloud_redis.ListInstancesRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -125,7 +147,7 @@ async def pages(self) -> AsyncIterator[cloud_redis.ListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterator[cloud_redis.Instance]: async def async_generator(): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 264b051a2590..69a634c48715 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -187,7 +187,9 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - 'mypy', + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): + # Ignore release of mypy 1.11.0 which may have a regression + 'mypy!=1.11.0', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index b1d02f31f348..224d961e4311 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -46,6 +46,7 @@ from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.location import locations_pb2 @@ -1167,14 +1168,18 @@ def test_list_instances_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( ('parent', ''), )), ) - pager = client.list_instances(request={}) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 From 211fc0a4016c86011f340b0a64f2677ed40831bc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 23 Jul 2024 14:34:23 -0400 Subject: [PATCH 1158/1339] chore(main): release 1.18.3 (#2063) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index f510c23fc0d8..90fcf49a7516 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.18.3](https://github.com/googleapis/gapic-generator-python/compare/v1.18.2...v1.18.3) (2024-07-23) + + +### Bug Fixes + +* Allow pyi files to be included in the output of py_gapic_assembly_pkg ([#2036](https://github.com/googleapis/gapic-generator-python/issues/2036)) ([8c517a0](https://github.com/googleapis/gapic-generator-python/commit/8c517a030c88cceb179c6d83ad706b2d7f1eba89)) +* Retry and timeout values do not propagate in requests during pagination ([#2065](https://github.com/googleapis/gapic-generator-python/issues/2065)) ([76aa98e](https://github.com/googleapis/gapic-generator-python/commit/76aa98eda53cfc5c406fb5ed705e894c5c6c2513)) + ## [1.18.2](https://github.com/googleapis/gapic-generator-python/compare/v1.18.1...v1.18.2) (2024-07-02) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index ace0550caf64..292328154975 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.18.2" +version = "1.18.3" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From b0755c310537cf37bde8ddd304f3b7ca42502177 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 24 Jul 2024 18:05:23 +0200 Subject: [PATCH 1159/1339] chore(deps): update all dependencies (#2064) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 166 +++++++++++----------- 1 file changed, 83 insertions(+), 83 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0d1f28ec4735..b907b589016e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --generate-hashes requirements.in # -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 +cachetools==5.4.0 \ + --hash=sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474 \ + --hash=sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827 # via google-auth certifi==2024.7.4 \ --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ @@ -108,76 +108,76 @@ click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de # via -r requirements.in -exceptiongroup==1.2.1 \ - --hash=sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad \ - --hash=sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16 +exceptiongroup==1.2.2 \ + --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ + --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc # via pytest -google-api-core==2.19.0 \ - --hash=sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251 \ - --hash=sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via -r requirements.in -google-auth==2.30.0 \ - --hash=sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5 \ - --hash=sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688 +google-auth==2.32.0 \ + --hash=sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022 \ + --hash=sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b # via google-api-core -googleapis-common-protos[grpc]==1.63.1 \ - --hash=sha256:0e1c2cdfcbc354b76e4a211a35ea35d6926a835cba1377073c4861db904a1877 \ - --hash=sha256:c6442f7a0a6b2a80369457d79e6672bb7dcbaab88e0848302497e3ec80780a6a +googleapis-common-protos[grpc]==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via # -r requirements.in # google-api-core # grpc-google-iam-v1 -grpc-google-iam-v1==0.13.0 \ - --hash=sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89 \ - --hash=sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e +grpc-google-iam-v1==0.13.1 \ + --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ + --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e # via -r requirements.in -grpcio==1.64.1 \ - --hash=sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040 \ - --hash=sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122 \ - --hash=sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9 \ - --hash=sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f \ - --hash=sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd \ - --hash=sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d \ - --hash=sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33 \ - --hash=sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762 \ - --hash=sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294 \ - --hash=sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650 \ - --hash=sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b \ - --hash=sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad \ - --hash=sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1 \ - --hash=sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff \ - --hash=sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59 \ - --hash=sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4 \ - --hash=sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027 \ - --hash=sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502 \ - --hash=sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae \ - --hash=sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61 \ - --hash=sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb \ - --hash=sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa \ - --hash=sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5 \ - --hash=sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1 \ - --hash=sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9 \ - --hash=sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90 \ - --hash=sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b \ - --hash=sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179 \ - --hash=sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e \ - --hash=sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a \ - --hash=sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489 \ - --hash=sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d \ - --hash=sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a \ - --hash=sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2 \ - --hash=sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd \ - --hash=sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb \ - --hash=sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61 \ - --hash=sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca \ - --hash=sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6 \ - --hash=sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602 \ - --hash=sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367 \ - --hash=sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62 \ - --hash=sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d \ - --hash=sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd \ - --hash=sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22 \ - --hash=sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309 +grpcio==1.65.1 \ + --hash=sha256:12e9bdf3b5fd48e5fbe5b3da382ad8f97c08b47969f3cca81dd9b36b86ed39e2 \ + --hash=sha256:1bceeec568372cbebf554eae1b436b06c2ff24cfaf04afade729fb9035408c6c \ + --hash=sha256:1faaf7355ceed07ceaef0b9dcefa4c98daf1dd8840ed75c2de128c3f4a4d859d \ + --hash=sha256:1fbd6331f18c3acd7e09d17fd840c096f56eaf0ef830fbd50af45ae9dc8dfd83 \ + --hash=sha256:27adee2338d697e71143ed147fe286c05810965d5d30ec14dd09c22479bfe48a \ + --hash=sha256:2ca684ba331fb249d8a1ce88db5394e70dbcd96e58d8c4b7e0d7b141a453dce9 \ + --hash=sha256:2f56b5a68fdcf17a0a1d524bf177218c3c69b3947cb239ea222c6f1867c3ab68 \ + --hash=sha256:3019fb50128b21a5e018d89569ffaaaa361680e1346c2f261bb84a91082eb3d3 \ + --hash=sha256:34966cf526ef0ea616e008d40d989463e3db157abb213b2f20c6ce0ae7928875 \ + --hash=sha256:3c492301988cd720cd145d84e17318d45af342e29ef93141228f9cd73222368b \ + --hash=sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062 \ + --hash=sha256:4effc0562b6c65d4add6a873ca132e46ba5e5a46f07c93502c37a9ae7f043857 \ + --hash=sha256:54cb822e177374b318b233e54b6856c692c24cdbd5a3ba5335f18a47396bac8f \ + --hash=sha256:557de35bdfbe8bafea0a003dbd0f4da6d89223ac6c4c7549d78e20f92ead95d9 \ + --hash=sha256:5f096ffb881f37e8d4f958b63c74bfc400c7cebd7a944b027357cd2fb8d91a57 \ + --hash=sha256:5fd7337a823b890215f07d429f4f193d24b80d62a5485cf88ee06648591a0c57 \ + --hash=sha256:60f1f38eed830488ad2a1b11579ef0f345ff16fffdad1d24d9fbc97ba31804ff \ + --hash=sha256:6e71aed8835f8d9fbcb84babc93a9da95955d1685021cceb7089f4f1e717d719 \ + --hash=sha256:71a05fd814700dd9cb7d9a507f2f6a1ef85866733ccaf557eedacec32d65e4c2 \ + --hash=sha256:76e81a86424d6ca1ce7c16b15bdd6a964a42b40544bf796a48da241fdaf61153 \ + --hash=sha256:7ae15275ed98ea267f64ee9ddedf8ecd5306a5b5bb87972a48bfe24af24153e8 \ + --hash=sha256:7af64838b6e615fff0ec711960ed9b6ee83086edfa8c32670eafb736f169d719 \ + --hash=sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8 \ + --hash=sha256:8558f0083ddaf5de64a59c790bffd7568e353914c0c551eae2955f54ee4b857f \ + --hash=sha256:8bfd95ef3b097f0cc86ade54eafefa1c8ed623aa01a26fbbdcd1a3650494dd11 \ + --hash=sha256:8d8143a3e3966f85dce6c5cc45387ec36552174ba5712c5dc6fcc0898fb324c0 \ + --hash=sha256:941596d419b9736ab548aa0feb5bbba922f98872668847bf0720b42d1d227b9e \ + --hash=sha256:941c4869aa229d88706b78187d60d66aca77fe5c32518b79e3c3e03fc26109a2 \ + --hash=sha256:9a1c84560b3b2d34695c9ba53ab0264e2802721c530678a8f0a227951f453462 \ + --hash=sha256:9e6a8f3d6c41e6b642870afe6cafbaf7b61c57317f9ec66d0efdaf19db992b90 \ + --hash=sha256:a6c71575a2fedf259724981fd73a18906513d2f306169c46262a5bae956e6364 \ + --hash=sha256:a8422dc13ad93ec8caa2612b5032a2b9cd6421c13ed87f54db4a3a2c93afaf77 \ + --hash=sha256:aaf3c54419a28d45bd1681372029f40e5bfb58e5265e3882eaf21e4a5f81a119 \ + --hash=sha256:b12c1aa7b95abe73b3e04e052c8b362655b41c7798da69f1eaf8d186c7d204df \ + --hash=sha256:b590f1ad056294dfaeac0b7e1b71d3d5ace638d8dd1f1147ce4bd13458783ba8 \ + --hash=sha256:bbb46330cc643ecf10bd9bd4ca8e7419a14b6b9dedd05f671c90fb2c813c6037 \ + --hash=sha256:ca931de5dd6d9eb94ff19a2c9434b23923bce6f767179fef04dfa991f282eaad \ + --hash=sha256:cb5175f45c980ff418998723ea1b3869cce3766d2ab4e4916fbd3cedbc9d0ed3 \ + --hash=sha256:d827a6fb9215b961eb73459ad7977edb9e748b23e3407d21c845d1d8ef6597e5 \ + --hash=sha256:dbb64b4166362d9326f7efbf75b1c72106c1aa87f13a8c8b56a1224fac152f5c \ + --hash=sha256:de5b6be29116e094c5ef9d9e4252e7eb143e3d5f6bd6d50a78075553ab4930b0 \ + --hash=sha256:e4a3cdba62b2d6aeae6027ae65f350de6dc082b72e6215eccf82628e79efe9ba \ + --hash=sha256:e75acfa52daf5ea0712e8aa82f0003bba964de7ae22c26d208cbd7bc08500177 \ + --hash=sha256:f40cebe5edb518d78b8131e87cb83b3ee688984de38a232024b9b44e74ee53d3 \ + --hash=sha256:f62652ddcadc75d0e7aa629e96bb61658f85a993e748333715b4ab667192e4e8 \ + --hash=sha256:ff5a84907e51924973aa05ed8759210d8cdae7ffcf9e44fd17646cf4a902df59 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -306,18 +306,18 @@ proto-plus==1.24.0 \ # via # -r requirements.in # google-api-core -protobuf==5.27.1 \ - --hash=sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd \ - --hash=sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340 \ - --hash=sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333 \ - --hash=sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf \ - --hash=sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17 \ - --hash=sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107 \ - --hash=sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a \ - --hash=sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15 \ - --hash=sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d \ - --hash=sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631 \ - --hash=sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # -r requirements.in # google-api-core @@ -338,13 +338,13 @@ pypandoc==1.13 \ --hash=sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e \ --hash=sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681 # via -r requirements.in -pytest==8.2.2 \ - --hash=sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343 \ - --hash=sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977 +pytest==8.3.1 \ + --hash=sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6 \ + --hash=sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c # via pytest-asyncio -pytest-asyncio==0.23.7 \ - --hash=sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b \ - --hash=sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268 +pytest-asyncio==0.23.8 \ + --hash=sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2 \ + --hash=sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3 # via -r requirements.in pyyaml==6.0.1 \ --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \ From bd9dff0c34b701646db1b3492faa9d22b1aff1e2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 26 Jul 2024 15:32:10 -0400 Subject: [PATCH 1160/1339] fix: fix AttributeError with AsyncRetry (#2072) --- .../%name_%version/%sub/services/%service/pagers.py.j2 | 5 +++-- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 2 -- packages/gapic-generator/noxfile.py | 2 -- .../google/cloud/asset_v1/services/asset_service/pagers.py | 5 +++-- .../tests/integration/goldens/asset/noxfile.py | 2 -- .../tests/integration/goldens/credentials/noxfile.py | 2 -- .../google/cloud/eventarc_v1/services/eventarc/pagers.py | 5 +++-- .../tests/integration/goldens/eventarc/noxfile.py | 2 -- .../cloud/logging_v2/services/config_service_v2/pagers.py | 5 +++-- .../cloud/logging_v2/services/logging_service_v2/pagers.py | 5 +++-- .../cloud/logging_v2/services/metrics_service_v2/pagers.py | 5 +++-- .../tests/integration/goldens/logging/noxfile.py | 2 -- .../google/cloud/redis_v1/services/cloud_redis/pagers.py | 5 +++-- .../tests/integration/goldens/redis/noxfile.py | 2 -- 14 files changed, 21 insertions(+), 28 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 175bc34929b3..796cefe48cc5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -9,13 +9,14 @@ #} from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore {% filter sort_lines %} {% for method in service.methods.values() | selectattr('paged_result_field') %} diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 430183c6437b..c88ce57c6998 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -183,8 +183,6 @@ def mypy(session): 'types-protobuf' ) session.install('.') - # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged - session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 42dab63bf4b2..a5df7f67165f 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -273,8 +273,6 @@ def showcase_library( # Install the library without a constraints file. session.install("-e", tmp_dir) - # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged - session.install("google-api-core>=2.19.1rc0") yield tmp_dir diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index a4f950083cd8..1498b1ab970c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -15,13 +15,14 @@ # from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.cloud.asset_v1.types import asset_service from google.cloud.asset_v1.types import assets diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index e4e439309290..dc7bead94015 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -194,8 +194,6 @@ def mypy(session): 'types-protobuf' ) session.install('.') - # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged - session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 6037ded7bc13..0a72020a4f3e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -194,8 +194,6 @@ def mypy(session): 'types-protobuf' ) session.install('.') - # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged - session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index ac39c92068c1..9ef97e16e5dc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -15,13 +15,14 @@ # from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index c85926c9c0b1..db601208a437 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -194,8 +194,6 @@ def mypy(session): 'types-protobuf' ) session.install('.') - # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged - session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 9890f2d3415b..459a76f21307 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -15,13 +15,14 @@ # from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.cloud.logging_v2.types import logging_config diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 52391a92857a..13e9963f7934 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -15,13 +15,14 @@ # from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.types import log_entry diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 9d2d326a9b60..134bd0dedb17 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -15,13 +15,14 @@ # from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.cloud.logging_v2.types import logging_metrics diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 0e32cdbf338f..5a34c0e35294 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -194,8 +194,6 @@ def mypy(session): 'types-protobuf' ) session.install('.') - # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged - session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index 0c65b5164073..55a7a66f9503 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -15,13 +15,14 @@ # from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] - OptionalAsyncRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore - OptionalAsyncRetry = Union[retries.AsyncRetry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore from google.cloud.redis_v1.types import cloud_redis diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 69a634c48715..595f4baa097e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -194,8 +194,6 @@ def mypy(session): 'types-protobuf' ) session.install('.') - # Remove once https://github.com/googleapis/python-api-core/pull/650 is merged - session.install("google-api-core>=2.19.1rc0") session.run( 'mypy', '-p', From 71b80eb6c8100fdd7e66fd93597b049f10a1d1c8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 26 Jul 2024 19:50:47 +0000 Subject: [PATCH 1161/1339] chore(main): release 1.18.4 (#2075) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 90fcf49a7516..a23bb08b5b33 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.18.4](https://github.com/googleapis/gapic-generator-python/compare/v1.18.3...v1.18.4) (2024-07-26) + + +### Bug Fixes + +* Fix AttributeError with AsyncRetry ([#2072](https://github.com/googleapis/gapic-generator-python/issues/2072)) ([dcddac8](https://github.com/googleapis/gapic-generator-python/commit/dcddac803fc5eb58c0d242d88c8b4c419b83fe90)) + ## [1.18.3](https://github.com/googleapis/gapic-generator-python/compare/v1.18.2...v1.18.3) (2024-07-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 292328154975..3dcb12f3672b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.18.3" +version = "1.18.4" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From b0284335c7094593a417fd1ec62e621d4905c154 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 30 Jul 2024 16:15:18 +0200 Subject: [PATCH 1162/1339] chore(deps): update all dependencies (#2081) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index b907b589016e..258fb718b701 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -338,9 +338,9 @@ pypandoc==1.13 \ --hash=sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e \ --hash=sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681 # via -r requirements.in -pytest==8.3.1 \ - --hash=sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6 \ - --hash=sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c +pytest==8.3.2 \ + --hash=sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 \ + --hash=sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce # via pytest-asyncio pytest-asyncio==0.23.8 \ --hash=sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2 \ From 69ef46b67e95c3bf67c0cf5c9eb0dc3e33a07621 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 30 Jul 2024 10:34:55 -0400 Subject: [PATCH 1163/1339] fix: require google.shopping.type >= 0.1.6 (#2083) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index 395b2262935c..0136654f16a8 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -16,6 +16,6 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0dev"}, ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"}, ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"}, - ("google", "shopping", "type"): {"package_name": "google-shopping-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"} + ("google", "shopping", "type"): {"package_name": "google-shopping-type", "lower_bound": "0.1.6", "upper_bound": "1.0.0dev"} } %} From a698598002a40dac592654dfa9844940f7567547 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 6 Aug 2024 13:39:57 -0600 Subject: [PATCH 1164/1339] fix: mypy types in get_transport (#2088) --- .../%name_%version/%sub/services/%service/async_client.py.j2 | 3 +-- .../%name_%version/%sub/services/%service/client.py.j2 | 2 +- .../cloud/asset_v1/services/asset_service/async_client.py | 3 +-- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../credentials_v1/services/iam_credentials/async_client.py | 3 +-- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/async_client.py | 3 +-- .../google/cloud/eventarc_v1/services/eventarc/client.py | 2 +- .../logging_v2/services/config_service_v2/async_client.py | 3 +-- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 3 +-- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 3 +-- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/async_client.py | 3 +-- .../redis/google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- 16 files changed, 16 insertions(+), 24 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index b001cea8910b..36a40f4db076 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -5,7 +5,6 @@ {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} from collections import OrderedDict -import functools import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union {% if api.all_method_settings.values()|map(attribute="auto_populated_fields", default=[])|list %} @@ -172,7 +171,7 @@ class {{ service.async_client_name }}: """ return self._client._universe_domain - get_transport_class = functools.partial(type({{ service.client_name }}).get_transport_class, type({{ service.client_name }})) + get_transport_class = {{ service.client_name }}.get_transport_class def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index eaa572f6e31b..6c45fdd728de 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -548,7 +548,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): credentials = google.auth._default.get_api_key_credentials(api_key_value) transport_init: Union[Type[{{ service.name }}Transport], Callable[..., {{ service.name }}Transport]] = ( - type(self).get_transport_class(transport) + {{ service.client_name }}.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., {{ service.name }}Transport], transport) ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 2fee2a05b846..2bd35d94b709 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -180,7 +179,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial(type(AssetServiceClient).get_transport_class, type(AssetServiceClient)) + get_transport_class = AssetServiceClient.get_transport_class def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 23c8a7c27dba..54349918c711 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -628,7 +628,7 @@ def __init__(self, *, credentials = google.auth._default.get_api_key_credentials(api_key_value) transport_init: Union[Type[AssetServiceTransport], Callable[..., AssetServiceTransport]] = ( - type(self).get_transport_class(transport) + AssetServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., AssetServiceTransport], transport) ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index d61293a8bea4..485c5bdee9e8 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -171,7 +170,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial(type(IAMCredentialsClient).get_transport_class, type(IAMCredentialsClient)) + get_transport_class = IAMCredentialsClient.get_transport_class def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 9126bd1da6cf..b38845b359fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -565,7 +565,7 @@ def __init__(self, *, credentials = google.auth._default.get_api_key_credentials(api_key_value) transport_init: Union[Type[IAMCredentialsTransport], Callable[..., IAMCredentialsTransport]] = ( - type(self).get_transport_class(transport) + IAMCredentialsClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., IAMCredentialsTransport], transport) ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index cdf056c7a018..85e32e498946 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -198,7 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial(type(EventarcClient).get_transport_class, type(EventarcClient)) + get_transport_class = EventarcClient.get_transport_class def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index c82b0a80dc90..ad080aa3bad7 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -673,7 +673,7 @@ def __init__(self, *, credentials = google.auth._default.get_api_key_credentials(api_key_value) transport_init: Union[Type[EventarcTransport], Callable[..., EventarcTransport]] = ( - type(self).get_transport_class(transport) + EventarcClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., EventarcTransport], transport) ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index f5ab658fe15d..a02aedca9f37 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -178,7 +177,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial(type(ConfigServiceV2Client).get_transport_class, type(ConfigServiceV2Client)) + get_transport_class = ConfigServiceV2Client.get_transport_class def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 2593d1ce6688..92a2df84cf4b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -621,7 +621,7 @@ def __init__(self, *, credentials = google.auth._default.get_api_key_credentials(api_key_value) transport_init: Union[Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport]] = ( - type(self).get_transport_class(transport) + ConfigServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ConfigServiceV2Transport], transport) ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 5695fcf3281c..4e96eec30d0a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union @@ -163,7 +162,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial(type(LoggingServiceV2Client).get_transport_class, type(LoggingServiceV2Client)) + get_transport_class = LoggingServiceV2Client.get_transport_class def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index d53bce94642f..9d085d369e1e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -552,7 +552,7 @@ def __init__(self, *, credentials = google.auth._default.get_api_key_credentials(api_key_value) transport_init: Union[Type[LoggingServiceV2Transport], Callable[..., LoggingServiceV2Transport]] = ( - type(self).get_transport_class(transport) + LoggingServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., LoggingServiceV2Transport], transport) ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 0f8961a508d4..01d94e8ff268 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -164,7 +163,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial(type(MetricsServiceV2Client).get_transport_class, type(MetricsServiceV2Client)) + get_transport_class = MetricsServiceV2Client.get_transport_class def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 639b5b2397e7..1fff39e085b2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -553,7 +553,7 @@ def __init__(self, *, credentials = google.auth._default.get_api_key_credentials(api_key_value) transport_init: Union[Type[MetricsServiceV2Transport], Callable[..., MetricsServiceV2Transport]] = ( - type(self).get_transport_class(transport) + MetricsServiceV2Client.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., MetricsServiceV2Transport], transport) ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index ae9eea6c54d6..6bf97550411b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -188,7 +187,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial(type(CloudRedisClient).get_transport_class, type(CloudRedisClient)) + get_transport_class = CloudRedisClient.get_transport_class def __init__(self, *, credentials: Optional[ga_credentials.Credentials] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 31b300b5b3da..925ff8da9f0c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -582,7 +582,7 @@ def __init__(self, *, credentials = google.auth._default.get_api_key_credentials(api_key_value) transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( - type(self).get_transport_class(transport) + CloudRedisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisTransport], transport) ) From 87a806c29ee316703b8c6d666a4bc456e3b1327c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 6 Aug 2024 16:24:03 -0400 Subject: [PATCH 1165/1339] build: fix mypy (#2092) --- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 4 +--- packages/gapic-generator/noxfile.py | 5 +---- .../tests/integration/goldens/asset/noxfile.py | 4 +--- .../tests/integration/goldens/credentials/noxfile.py | 4 +--- .../tests/integration/goldens/eventarc/noxfile.py | 4 +--- .../tests/integration/goldens/logging/noxfile.py | 4 +--- .../tests/integration/goldens/redis/noxfile.py | 4 +--- 7 files changed, 7 insertions(+), 22 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index c88ce57c6998..452c7061353d 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -176,9 +176,7 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): - # Ignore release of mypy 1.11.0 which may have a regression - 'mypy!=1.11.0', + 'mypy', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index a5df7f67165f..3ab6c7b392c4 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -431,10 +431,7 @@ def showcase_mypy( ): """Perform typecheck analysis on the generated Showcase library.""" - # Install pytest and gapic-generator-python - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): - # Ignore release of mypy 1.11.0 which may have a regression - session.install("mypy!=1.11.0", "types-pkg-resources", "types-protobuf", "types-requests", "types-dataclasses") + session.install("mypy", "types-setuptools", "types-protobuf", "types-requests", "types-dataclasses") with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index dc7bead94015..3e4f7c4fe142 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -187,9 +187,7 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): - # Ignore release of mypy 1.11.0 which may have a regression - 'mypy!=1.11.0', + 'mypy', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 0a72020a4f3e..2194befe9c8b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -187,9 +187,7 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): - # Ignore release of mypy 1.11.0 which may have a regression - 'mypy!=1.11.0', + 'mypy', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index db601208a437..61f7c5c43dbe 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -187,9 +187,7 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): - # Ignore release of mypy 1.11.0 which may have a regression - 'mypy!=1.11.0', + 'mypy', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 5a34c0e35294..6402e148ffc6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -187,9 +187,7 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): - # Ignore release of mypy 1.11.0 which may have a regression - 'mypy!=1.11.0', + 'mypy', 'types-requests', 'types-protobuf' ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 595f4baa097e..513250d8fb0c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -187,9 +187,7 @@ def cover(session): def mypy(session): """Run the type checker.""" session.install( - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2066): - # Ignore release of mypy 1.11.0 which may have a regression - 'mypy!=1.11.0', + 'mypy', 'types-requests', 'types-protobuf' ) From a4fcd751b1780e3fcfb83eaea29e4b1244292993 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 6 Aug 2024 22:33:40 +0200 Subject: [PATCH 1166/1339] chore(deps): update all dependencies (#2090) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 118 +++++++++++----------- 1 file changed, 59 insertions(+), 59 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 258fb718b701..56e7d6386085 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -131,53 +131,53 @@ grpc-google-iam-v1==0.13.1 \ --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e # via -r requirements.in -grpcio==1.65.1 \ - --hash=sha256:12e9bdf3b5fd48e5fbe5b3da382ad8f97c08b47969f3cca81dd9b36b86ed39e2 \ - --hash=sha256:1bceeec568372cbebf554eae1b436b06c2ff24cfaf04afade729fb9035408c6c \ - --hash=sha256:1faaf7355ceed07ceaef0b9dcefa4c98daf1dd8840ed75c2de128c3f4a4d859d \ - --hash=sha256:1fbd6331f18c3acd7e09d17fd840c096f56eaf0ef830fbd50af45ae9dc8dfd83 \ - --hash=sha256:27adee2338d697e71143ed147fe286c05810965d5d30ec14dd09c22479bfe48a \ - --hash=sha256:2ca684ba331fb249d8a1ce88db5394e70dbcd96e58d8c4b7e0d7b141a453dce9 \ - --hash=sha256:2f56b5a68fdcf17a0a1d524bf177218c3c69b3947cb239ea222c6f1867c3ab68 \ - --hash=sha256:3019fb50128b21a5e018d89569ffaaaa361680e1346c2f261bb84a91082eb3d3 \ - --hash=sha256:34966cf526ef0ea616e008d40d989463e3db157abb213b2f20c6ce0ae7928875 \ - --hash=sha256:3c492301988cd720cd145d84e17318d45af342e29ef93141228f9cd73222368b \ - --hash=sha256:3dc5f928815b8972fb83b78d8db5039559f39e004ec93ebac316403fe031a062 \ - --hash=sha256:4effc0562b6c65d4add6a873ca132e46ba5e5a46f07c93502c37a9ae7f043857 \ - --hash=sha256:54cb822e177374b318b233e54b6856c692c24cdbd5a3ba5335f18a47396bac8f \ - --hash=sha256:557de35bdfbe8bafea0a003dbd0f4da6d89223ac6c4c7549d78e20f92ead95d9 \ - --hash=sha256:5f096ffb881f37e8d4f958b63c74bfc400c7cebd7a944b027357cd2fb8d91a57 \ - --hash=sha256:5fd7337a823b890215f07d429f4f193d24b80d62a5485cf88ee06648591a0c57 \ - --hash=sha256:60f1f38eed830488ad2a1b11579ef0f345ff16fffdad1d24d9fbc97ba31804ff \ - --hash=sha256:6e71aed8835f8d9fbcb84babc93a9da95955d1685021cceb7089f4f1e717d719 \ - --hash=sha256:71a05fd814700dd9cb7d9a507f2f6a1ef85866733ccaf557eedacec32d65e4c2 \ - --hash=sha256:76e81a86424d6ca1ce7c16b15bdd6a964a42b40544bf796a48da241fdaf61153 \ - --hash=sha256:7ae15275ed98ea267f64ee9ddedf8ecd5306a5b5bb87972a48bfe24af24153e8 \ - --hash=sha256:7af64838b6e615fff0ec711960ed9b6ee83086edfa8c32670eafb736f169d719 \ - --hash=sha256:8333ca46053c35484c9f2f7e8d8ec98c1383a8675a449163cea31a2076d93de8 \ - --hash=sha256:8558f0083ddaf5de64a59c790bffd7568e353914c0c551eae2955f54ee4b857f \ - --hash=sha256:8bfd95ef3b097f0cc86ade54eafefa1c8ed623aa01a26fbbdcd1a3650494dd11 \ - --hash=sha256:8d8143a3e3966f85dce6c5cc45387ec36552174ba5712c5dc6fcc0898fb324c0 \ - --hash=sha256:941596d419b9736ab548aa0feb5bbba922f98872668847bf0720b42d1d227b9e \ - --hash=sha256:941c4869aa229d88706b78187d60d66aca77fe5c32518b79e3c3e03fc26109a2 \ - --hash=sha256:9a1c84560b3b2d34695c9ba53ab0264e2802721c530678a8f0a227951f453462 \ - --hash=sha256:9e6a8f3d6c41e6b642870afe6cafbaf7b61c57317f9ec66d0efdaf19db992b90 \ - --hash=sha256:a6c71575a2fedf259724981fd73a18906513d2f306169c46262a5bae956e6364 \ - --hash=sha256:a8422dc13ad93ec8caa2612b5032a2b9cd6421c13ed87f54db4a3a2c93afaf77 \ - --hash=sha256:aaf3c54419a28d45bd1681372029f40e5bfb58e5265e3882eaf21e4a5f81a119 \ - --hash=sha256:b12c1aa7b95abe73b3e04e052c8b362655b41c7798da69f1eaf8d186c7d204df \ - --hash=sha256:b590f1ad056294dfaeac0b7e1b71d3d5ace638d8dd1f1147ce4bd13458783ba8 \ - --hash=sha256:bbb46330cc643ecf10bd9bd4ca8e7419a14b6b9dedd05f671c90fb2c813c6037 \ - --hash=sha256:ca931de5dd6d9eb94ff19a2c9434b23923bce6f767179fef04dfa991f282eaad \ - --hash=sha256:cb5175f45c980ff418998723ea1b3869cce3766d2ab4e4916fbd3cedbc9d0ed3 \ - --hash=sha256:d827a6fb9215b961eb73459ad7977edb9e748b23e3407d21c845d1d8ef6597e5 \ - --hash=sha256:dbb64b4166362d9326f7efbf75b1c72106c1aa87f13a8c8b56a1224fac152f5c \ - --hash=sha256:de5b6be29116e094c5ef9d9e4252e7eb143e3d5f6bd6d50a78075553ab4930b0 \ - --hash=sha256:e4a3cdba62b2d6aeae6027ae65f350de6dc082b72e6215eccf82628e79efe9ba \ - --hash=sha256:e75acfa52daf5ea0712e8aa82f0003bba964de7ae22c26d208cbd7bc08500177 \ - --hash=sha256:f40cebe5edb518d78b8131e87cb83b3ee688984de38a232024b9b44e74ee53d3 \ - --hash=sha256:f62652ddcadc75d0e7aa629e96bb61658f85a993e748333715b4ab667192e4e8 \ - --hash=sha256:ff5a84907e51924973aa05ed8759210d8cdae7ffcf9e44fd17646cf4a902df59 +grpcio==1.65.4 \ + --hash=sha256:075f3903bc1749ace93f2b0664f72964ee5f2da5c15d4b47e0ab68e4f442c257 \ + --hash=sha256:0a0720299bdb2cc7306737295d56e41ce8827d5669d4a3cd870af832e3b17c4d \ + --hash=sha256:0cef8c919a3359847c357cb4314e50ed1f0cca070f828ee8f878d362fd744d52 \ + --hash=sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c \ + --hash=sha256:17de4fda50967679677712eec0a5c13e8904b76ec90ac845d83386b65da0ae1e \ + --hash=sha256:18c10f0d054d2dce34dd15855fcca7cc44ec3b811139437543226776730c0f28 \ + --hash=sha256:24a2246e80a059b9eb981e4c2a6d8111b1b5e03a44421adbf2736cc1d4988a8a \ + --hash=sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b \ + --hash=sha256:2a1d4c84d9e657f72bfbab8bedf31bdfc6bfc4a1efb10b8f2d28241efabfaaf2 \ + --hash=sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39 \ + --hash=sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18 \ + --hash=sha256:3d1bbf7e1dd1096378bd83c83f554d3b93819b91161deaf63e03b7022a85224a \ + --hash=sha256:3dee50c1b69754a4228e933696408ea87f7e896e8d9797a3ed2aeed8dbd04b74 \ + --hash=sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d \ + --hash=sha256:4934077b33aa6fe0b451de8b71dabde96bf2d9b4cb2b3187be86e5adebcba021 \ + --hash=sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e \ + --hash=sha256:626319a156b1f19513156a3b0dbfe977f5f93db63ca673a0703238ebd40670d7 \ + --hash=sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82 \ + --hash=sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1 \ + --hash=sha256:74c34fc7562bdd169b77966068434a93040bfca990e235f7a67cdf26e1bd5c63 \ + --hash=sha256:7656376821fed8c89e68206a522522317787a3d9ed66fb5110b1dff736a5e416 \ + --hash=sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef \ + --hash=sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec \ + --hash=sha256:874acd010e60a2ec1e30d5e505b0651ab12eb968157cd244f852b27c6dbed733 \ + --hash=sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17 \ + --hash=sha256:88fcabc332a4aef8bcefadc34a02e9ab9407ab975d2c7d981a8e12c1aed92aa1 \ + --hash=sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e \ + --hash=sha256:8eb485801957a486bf5de15f2c792d9f9c897a86f2f18db8f3f6795a094b4bb2 \ + --hash=sha256:926a0750a5e6fb002542e80f7fa6cab8b1a2ce5513a1c24641da33e088ca4c56 \ + --hash=sha256:a146bc40fa78769f22e1e9ff4f110ef36ad271b79707577bf2a31e3e931141b9 \ + --hash=sha256:a925446e6aa12ca37114840d8550f308e29026cdc423a73da3043fd1603a6385 \ + --hash=sha256:a99e6dffefd3027b438116f33ed1261c8d360f0dd4f943cb44541a2782eba72f \ + --hash=sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8 \ + --hash=sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5 \ + --hash=sha256:b07f36faf01fca5427d4aa23645e2d492157d56c91fab7e06fe5697d7e171ad4 \ + --hash=sha256:b81711bf4ec08a3710b534e8054c7dcf90f2edc22bebe11c1775a23f145595fe \ + --hash=sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4 \ + --hash=sha256:c9ba3e63108a8749994f02c7c0e156afb39ba5bdf755337de8e75eb685be244b \ + --hash=sha256:cdb34278e4ceb224c89704cd23db0d902e5e3c1c9687ec9d7c5bb4c150f86816 \ + --hash=sha256:cf53e6247f1e2af93657e62e240e4f12e11ee0b9cef4ddcb37eab03d501ca864 \ + --hash=sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558 \ + --hash=sha256:d72962788b6c22ddbcdb70b10c11fbb37d60ae598c51eb47ec019db66ccfdff0 \ + --hash=sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8 \ + --hash=sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a \ + --hash=sha256:e6cbdd107e56bde55c565da5fd16f08e1b4e9b0674851d7749e7f32d8645f524 \ + --hash=sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de # via # googleapis-common-protos # grpc-google-iam-v1 @@ -306,18 +306,18 @@ proto-plus==1.24.0 \ # via # -r requirements.in # google-api-core -protobuf==5.27.2 \ - --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ - --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ - --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ - --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ - --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ - --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ - --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ - --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ - --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ - --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ - --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 +protobuf==5.27.3 \ + --hash=sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035 \ + --hash=sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7 \ + --hash=sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f \ + --hash=sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c \ + --hash=sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5 \ + --hash=sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25 \ + --hash=sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1 \ + --hash=sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce \ + --hash=sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e \ + --hash=sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf \ + --hash=sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b # via # -r requirements.in # google-api-core From cbcf1a71e71e3a25fcb25cd346fa1200b15c41fc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 16:33:53 -0400 Subject: [PATCH 1167/1339] chore(python): fix docs build (#2086) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/docker/docs/Dockerfile | 9 ++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index f30cb3775afc..6d064ddb9b06 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e -# created: 2024-07-08T19:25:35.862283192Z + digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 +# created: 2024-07-31T14:52:44.926548819Z diff --git a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile index 5205308b334d..e5410e296bd8 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile +++ b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile @@ -72,19 +72,18 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -RUN python3.10 -m venv /venv -ENV PATH /venv/bin:$PATH +ENV PATH /usr/local/bin/python3.10:$PATH ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ + && python3.10 /tmp/get-pip.py \ && rm /tmp/get-pip.py # Test pip -RUN python3 -m pip +RUN python3.10 -m pip # Install build requirements COPY requirements.txt /requirements.txt -RUN python3 -m pip install --require-hashes -r requirements.txt +RUN python3.10 -m pip install --require-hashes -r requirements.txt CMD ["python3.10"] From 4951fd334595318c80c6806f8ad7fc1f94e61239 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 6 Aug 2024 14:34:41 -0600 Subject: [PATCH 1168/1339] chore: fix mock runtime warnings (#2085) Co-authored-by: Anthonios Partheniou --- .../gapic/%name_%version/%sub/test_macros.j2 | 19 +- .../unit/gapic/asset_v1/test_asset_service.py | 227 ++++++------ .../credentials_v1/test_iam_credentials.py | 36 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 242 +++++++------ .../logging_v2/test_config_service_v2.py | 338 ++++++++++-------- .../logging_v2/test_logging_service_v2.py | 54 +-- .../logging_v2/test_metrics_service_v2.py | 45 +-- .../unit/gapic/redis_v1/test_cloud_redis.py | 179 ++++++---- 8 files changed, 644 insertions(+), 496 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index b53072887a56..dae3fb4c4327 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -230,8 +230,9 @@ def test_{{ method_name }}_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 1 {% if method.lro or method.extended_lro %} - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() {% endif %} @@ -321,8 +322,9 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.{{method.transport_safe_name|snake_case}} in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.{{method.transport_safe_name|snake_case}}] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.{{method.transport_safe_name|snake_case}}] = mock_rpc {% if method.client_streaming %} request = [{}] @@ -333,11 +335,12 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " {% endif %} # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 {% if method.lro or method.extended_lro %} - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() {% endif %} @@ -349,7 +352,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 980beb35d2a1..ef0f2259a7ee 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -874,8 +874,9 @@ def test_export_assets_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_assets(request) @@ -924,24 +925,26 @@ async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.export_assets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.export_assets] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.export_assets] = mock_rpc request = {} await client.export_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): @@ -1197,20 +1200,21 @@ async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.list_assets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_assets] = mock_rpc request = {} await client.list_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): @@ -1740,20 +1744,21 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: assert client._client._transport.batch_get_assets_history in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.batch_get_assets_history] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_get_assets_history] = mock_rpc request = {} await client.batch_get_assets_history(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_get_assets_history(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): @@ -2020,20 +2025,21 @@ async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.create_feed in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_feed] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_feed] = mock_rpc request = {} await client.create_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): @@ -2390,20 +2396,21 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.get_feed in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_feed] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_feed] = mock_rpc request = {} await client.get_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): @@ -2745,20 +2752,21 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.list_feeds in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_feeds] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_feeds] = mock_rpc request = {} await client.list_feeds(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_feeds(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): @@ -3103,20 +3111,21 @@ async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.update_feed in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_feed] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_feed] = mock_rpc request = {} await client.update_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): @@ -3456,20 +3465,21 @@ async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.delete_feed in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_feed] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_feed] = mock_rpc request = {} await client.delete_feed(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_feed(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): @@ -3809,20 +3819,21 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str assert client._client._transport.search_all_resources in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.search_all_resources] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.search_all_resources] = mock_rpc request = {} await client.search_all_resources(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): @@ -4381,20 +4392,21 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: s assert client._client._transport.search_all_iam_policies in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.search_all_iam_policies] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.search_all_iam_policies] = mock_rpc request = {} await client.search_all_iam_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.search_all_iam_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): @@ -4937,20 +4949,21 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.analyze_iam_policy in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy] = mock_rpc request = {} await client.analyze_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_iam_policy(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): @@ -5156,8 +5169,9 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.analyze_iam_policy_longrunning(request) @@ -5206,24 +5220,26 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(trans assert client._client._transport.analyze_iam_policy_longrunning in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy_longrunning] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.analyze_iam_policy_longrunning] = mock_rpc request = {} await client.analyze_iam_policy_longrunning(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.analyze_iam_policy_longrunning(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): @@ -5476,20 +5492,21 @@ async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.analyze_move in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.analyze_move] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.analyze_move] = mock_rpc request = {} await client.analyze_move(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_move(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): @@ -5751,20 +5768,21 @@ async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.query_assets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.query_assets] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.query_assets] = mock_rpc request = {} await client.query_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.query_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): @@ -6032,20 +6050,21 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.create_saved_query in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_saved_query] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_saved_query] = mock_rpc request = {} await client.create_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): @@ -6417,20 +6436,21 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.get_saved_query in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_saved_query] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_saved_query] = mock_rpc request = {} await client.get_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): @@ -6777,20 +6797,21 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.list_saved_queries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_saved_queries] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_saved_queries] = mock_rpc request = {} await client.list_saved_queries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_saved_queries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): @@ -7330,20 +7351,21 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.update_saved_query in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_saved_query] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_saved_query] = mock_rpc request = {} await client.update_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): @@ -7691,20 +7713,21 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.delete_saved_query in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_saved_query] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_saved_query] = mock_rpc request = {} await client.delete_saved_query(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_saved_query(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): @@ -8035,20 +8058,21 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(tra assert client._client._transport.batch_get_effective_iam_policies in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.batch_get_effective_iam_policies] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.batch_get_effective_iam_policies] = mock_rpc request = {} await client.batch_get_effective_iam_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.batch_get_effective_iam_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): @@ -8307,20 +8331,21 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str assert client._client._transport.analyze_org_policies in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policies] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.analyze_org_policies] = mock_rpc request = {} await client.analyze_org_policies(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policies(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): @@ -8879,20 +8904,21 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r assert client._client._transport.analyze_org_policy_governed_containers in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_containers] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_containers] = mock_rpc request = {} await client.analyze_org_policy_governed_containers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policy_governed_containers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): @@ -9451,20 +9477,21 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(t assert client._client._transport.analyze_org_policy_governed_assets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_assets] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.analyze_org_policy_governed_assets] = mock_rpc request = {} await client.analyze_org_policy_governed_assets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.analyze_org_policy_governed_assets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index c07b6bdf38d8..46555f55f6ef 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -913,20 +913,21 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str assert client._client._transport.generate_access_token in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.generate_access_token] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.generate_access_token] = mock_rpc request = {} await client.generate_access_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_access_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): @@ -1291,20 +1292,21 @@ async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.generate_id_token in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.generate_id_token] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.generate_id_token] = mock_rpc request = {} await client.generate_id_token(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.generate_id_token(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): @@ -1674,20 +1676,21 @@ async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert client._client._transport.sign_blob in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.sign_blob] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.sign_blob] = mock_rpc request = {} await client.sign_blob(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.sign_blob(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): @@ -2051,20 +2054,21 @@ async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.sign_jwt in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.sign_jwt] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.sign_jwt] = mock_rpc request = {} await client.sign_jwt(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.sign_jwt(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 49890323b453..99f56b7e7ec8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -945,20 +945,21 @@ async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.get_trigger in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_trigger] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_trigger] = mock_rpc request = {} await client.get_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetTriggerRequest): @@ -1312,20 +1313,21 @@ async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.list_triggers in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_triggers] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_triggers] = mock_rpc request = {} await client.list_triggers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_triggers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListTriggersRequest): @@ -1813,8 +1815,9 @@ def test_create_trigger_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_trigger(request) @@ -1863,24 +1866,26 @@ async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.create_trigger in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_trigger] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_trigger] = mock_rpc request = {} await client.create_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateTriggerRequest): @@ -2187,8 +2192,9 @@ def test_update_trigger_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_trigger(request) @@ -2237,24 +2243,26 @@ async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.update_trigger in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_trigger] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_trigger] = mock_rpc request = {} await client.update_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateTriggerRequest): @@ -2565,8 +2573,9 @@ def test_delete_trigger_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_trigger(request) @@ -2615,24 +2624,26 @@ async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.delete_trigger in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_trigger] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_trigger] = mock_rpc request = {} await client.delete_trigger(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteTriggerRequest): @@ -2996,20 +3007,21 @@ async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.get_channel in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_channel] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_channel] = mock_rpc request = {} await client.get_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): @@ -3363,20 +3375,21 @@ async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.list_channels in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_channels] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_channels] = mock_rpc request = {} await client.list_channels(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channels(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): @@ -3864,8 +3877,9 @@ def test_create_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_channel(request) @@ -3914,24 +3928,26 @@ async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.create_channel_ in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_channel_] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_channel_] = mock_rpc request = {} await client.create_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): @@ -4238,8 +4254,9 @@ def test_update_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_channel(request) @@ -4288,24 +4305,26 @@ async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.update_channel in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_channel] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_channel] = mock_rpc request = {} await client.update_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): @@ -4604,8 +4623,9 @@ def test_delete_channel_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_channel(request) @@ -4654,24 +4674,26 @@ async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.delete_channel in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_channel] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_channel] = mock_rpc request = {} await client.delete_channel(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_channel(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): @@ -5012,20 +5034,21 @@ async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.get_provider in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_provider] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_provider] = mock_rpc request = {} await client.get_provider(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_provider(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): @@ -5373,20 +5396,21 @@ async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.list_providers in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_providers] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_providers] = mock_rpc request = {} await client.list_providers(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_providers(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): @@ -5930,20 +5954,21 @@ async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: st assert client._client._transport.get_channel_connection in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_channel_connection] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_channel_connection] = mock_rpc request = {} await client.get_channel_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_channel_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): @@ -6291,20 +6316,21 @@ async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: assert client._client._transport.list_channel_connections in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_channel_connections] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_channel_connections] = mock_rpc request = {} await client.list_channel_connections(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_channel_connections(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): @@ -6792,8 +6818,9 @@ def test_create_channel_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_channel_connection(request) @@ -6842,24 +6869,26 @@ async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: assert client._client._transport.create_channel_connection in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_channel_connection] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_channel_connection] = mock_rpc request = {} await client.create_channel_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_channel_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): @@ -7168,8 +7197,9 @@ def test_delete_channel_connection_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_channel_connection(request) @@ -7218,24 +7248,26 @@ async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: assert client._client._transport.delete_channel_connection in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_channel_connection] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_channel_connection] = mock_rpc request = {} await client.delete_channel_connection(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_channel_connection(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): @@ -7576,20 +7608,21 @@ async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: assert client._client._transport.get_google_channel_config in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_google_channel_config] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_google_channel_config] = mock_rpc request = {} await client.get_google_channel_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_google_channel_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): @@ -7929,20 +7962,21 @@ async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transpo assert client._client._transport.update_google_channel_config in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_google_channel_config] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_google_channel_config] = mock_rpc request = {} await client.update_google_channel_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_google_channel_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index f8cd2f5efca1..612b98f70848 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -902,20 +902,21 @@ async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.list_buckets in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_buckets] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_buckets] = mock_rpc request = {} await client.list_buckets(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_buckets(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): @@ -1466,20 +1467,21 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.get_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_bucket] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_bucket] = mock_rpc request = {} await client.get_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): @@ -1699,8 +1701,9 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_bucket_async(request) @@ -1749,24 +1752,26 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.create_bucket_async in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_bucket_async] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_bucket_async] = mock_rpc request = {} await client.create_bucket_async(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_bucket_async(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): @@ -1971,8 +1976,9 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_bucket_async(request) @@ -2021,24 +2027,26 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = assert client._client._transport.update_bucket_async in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_bucket_async] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_bucket_async] = mock_rpc request = {} await client.update_bucket_async(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_bucket_async(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): @@ -2312,20 +2320,21 @@ async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.create_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_bucket] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_bucket] = mock_rpc request = {} await client.create_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): @@ -2610,20 +2619,21 @@ async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.update_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_bucket] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_bucket] = mock_rpc request = {} await client.update_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): @@ -2885,20 +2895,21 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.delete_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_bucket] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_bucket] = mock_rpc request = {} await client.delete_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): @@ -3145,20 +3156,21 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.undelete_bucket in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.undelete_bucket] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.undelete_bucket] = mock_rpc request = {} await client.undelete_bucket(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.undelete_bucket(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): @@ -3412,20 +3424,21 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.list_views in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_views] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_views] = mock_rpc request = {} await client.list_views(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_views(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): @@ -3964,20 +3977,21 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.get_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_view] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_view] = mock_rpc request = {} await client.get_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): @@ -4244,20 +4258,21 @@ async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.create_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_view] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_view] = mock_rpc request = {} await client.create_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): @@ -4522,20 +4537,21 @@ async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.update_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_view] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_view] = mock_rpc request = {} await client.update_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): @@ -4789,20 +4805,21 @@ async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.delete_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_view] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_view] = mock_rpc request = {} await client.delete_view(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_view(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): @@ -5056,20 +5073,21 @@ async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.list_sinks in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_sinks] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_sinks] = mock_rpc request = {} await client.list_sinks(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_sinks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): @@ -5623,20 +5641,21 @@ async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.get_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_sink] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_sink] = mock_rpc request = {} await client.get_sink(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_sink(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): @@ -6008,20 +6027,21 @@ async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.create_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_sink] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_sink] = mock_rpc request = {} await client.create_sink(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_sink(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): @@ -6403,20 +6423,21 @@ async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.update_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_sink] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_sink] = mock_rpc request = {} await client.update_sink(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_sink(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): @@ -6782,20 +6803,21 @@ async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.delete_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_sink] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_sink] = mock_rpc request = {} await client.delete_sink(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_sink(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): @@ -7082,8 +7104,9 @@ def test_create_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_link(request) @@ -7132,24 +7155,26 @@ async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.create_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_link] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_link] = mock_rpc request = {} await client.create_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): @@ -7458,8 +7483,9 @@ def test_delete_link_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_link(request) @@ -7508,24 +7534,26 @@ async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a assert client._client._transport.delete_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_link] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_link] = mock_rpc request = {} await client.delete_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): @@ -7865,20 +7893,21 @@ async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.list_links in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_links] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_links] = mock_rpc request = {} await client.list_links(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_links(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): @@ -8417,20 +8446,21 @@ async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn assert client._client._transport.get_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_link] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_link] = mock_rpc request = {} await client.get_link(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): @@ -8773,20 +8803,21 @@ async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.list_exclusions in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_exclusions] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_exclusions] = mock_rpc request = {} await client.list_exclusions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_exclusions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): @@ -9328,20 +9359,21 @@ async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc assert client._client._transport.get_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_exclusion] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_exclusion] = mock_rpc request = {} await client.get_exclusion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_exclusion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): @@ -9693,20 +9725,21 @@ async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.create_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_exclusion] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_exclusion] = mock_rpc request = {} await client.create_exclusion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_exclusion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): @@ -10068,20 +10101,21 @@ async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.update_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_exclusion] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_exclusion] = mock_rpc request = {} await client.update_exclusion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_exclusion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): @@ -10439,20 +10473,21 @@ async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.delete_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_exclusion] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_exclusion] = mock_rpc request = {} await client.delete_exclusion(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_exclusion(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): @@ -10795,20 +10830,21 @@ async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.get_cmek_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_cmek_settings] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_cmek_settings] = mock_rpc request = {} await client.get_cmek_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_cmek_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): @@ -11078,20 +11114,21 @@ async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str assert client._client._transport.update_cmek_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_cmek_settings] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_cmek_settings] = mock_rpc request = {} await client.update_cmek_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_cmek_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): @@ -11364,20 +11401,21 @@ async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.get_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_settings] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_settings] = mock_rpc request = {} await client.get_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): @@ -11734,20 +11772,21 @@ async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.update_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_settings] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_settings] = mock_rpc request = {} await client.update_settings(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_settings(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): @@ -12057,8 +12096,9 @@ def test_copy_log_entries_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.copy_log_entries(request) @@ -12107,24 +12147,26 @@ async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.copy_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_rpc request = {} await client.copy_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.copy_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 7f463657f720..f3c4bb8c9554 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -897,20 +897,21 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as assert client._client._transport.delete_log in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_log] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_log] = mock_rpc request = {} await client.delete_log(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_log(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): @@ -1241,20 +1242,21 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.write_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.write_log_entries] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.write_log_entries] = mock_rpc request = {} await client.write_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.write_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): @@ -1560,20 +1562,21 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.list_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_log_entries] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_log_entries] = mock_rpc request = {} await client.list_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): @@ -2058,20 +2061,21 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( assert client._client._transport.list_monitored_resource_descriptors in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_monitored_resource_descriptors] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_monitored_resource_descriptors] = mock_rpc request = {} await client.list_monitored_resource_descriptors(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_monitored_resource_descriptors(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): @@ -2459,20 +2463,21 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy assert client._client._transport.list_logs in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_logs] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_logs] = mock_rpc request = {} await client.list_logs(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_logs(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): @@ -2938,20 +2943,21 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.tail_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.tail_log_entries] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.tail_log_entries] = mock_rpc request = [{}] await client.tail_log_entries(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.tail_log_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index f144f8729451..c0917891e97d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -902,20 +902,21 @@ async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.list_log_metrics in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_log_metrics] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_log_metrics] = mock_rpc request = {} await client.list_log_metrics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_log_metrics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): @@ -1466,20 +1467,21 @@ async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.get_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_log_metric] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_log_metric] = mock_rpc request = {} await client.get_log_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_log_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): @@ -1846,20 +1848,21 @@ async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.create_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_log_metric] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_log_metric] = mock_rpc request = {} await client.create_log_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_log_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): @@ -2236,20 +2239,21 @@ async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.update_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_log_metric] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_log_metric] = mock_rpc request = {} await client.update_log_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_log_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): @@ -2603,20 +2607,21 @@ async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.delete_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_log_metric] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_log_metric] = mock_rpc request = {} await client.delete_log_metric(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_log_metric(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 224d961e4311..5abdbbe8c2ca 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -929,20 +929,21 @@ async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grp assert client._client._transport.list_instances in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc request = {} await client.list_instances(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_instances(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): @@ -1555,20 +1556,21 @@ async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_ assert client._client._transport.get_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc request = {} await client.get_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): @@ -1957,20 +1959,21 @@ async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: assert client._client._transport.get_instance_auth_string in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.get_instance_auth_string] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance_auth_string] = mock_rpc request = {} await client.get_instance_auth_string(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_instance_auth_string(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): @@ -2260,8 +2263,9 @@ def test_create_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.create_instance(request) @@ -2310,24 +2314,26 @@ async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.create_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_rpc request = {} await client.create_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.create_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): @@ -2634,8 +2640,9 @@ def test_update_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.update_instance(request) @@ -2684,24 +2691,26 @@ async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.update_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc request = {} await client.update_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.update_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): @@ -3002,8 +3011,9 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.upgrade_instance(request) @@ -3052,24 +3062,26 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "g assert client._client._transport.upgrade_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.upgrade_instance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.upgrade_instance] = mock_rpc request = {} await client.upgrade_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.upgrade_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): @@ -3368,8 +3380,9 @@ def test_import_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.import_instance(request) @@ -3418,24 +3431,26 @@ async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.import_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.import_instance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.import_instance] = mock_rpc request = {} await client.import_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.import_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): @@ -3734,8 +3749,9 @@ def test_export_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.export_instance(request) @@ -3784,24 +3800,26 @@ async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.export_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.export_instance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.export_instance] = mock_rpc request = {} await client.export_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.export_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): @@ -4100,8 +4118,9 @@ def test_failover_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.failover_instance(request) @@ -4150,24 +4169,26 @@ async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = " assert client._client._transport.failover_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.failover_instance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.failover_instance] = mock_rpc request = {} await client.failover_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.failover_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): @@ -4466,8 +4487,9 @@ def test_delete_instance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.delete_instance(request) @@ -4516,24 +4538,26 @@ async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "gr assert client._client._transport.delete_instance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_rpc request = {} await client.delete_instance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.delete_instance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): @@ -4822,8 +4846,9 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() client.reschedule_maintenance(request) @@ -4872,24 +4897,26 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: st assert client._client._transport.reschedule_maintenance in client._client._transport._wrapped_methods # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() - client._client._transport._wrapped_methods[client._client._transport.reschedule_maintenance] = mock_object + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.reschedule_maintenance] = mock_rpc request = {} await client.reschedule_maintenance(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() await client.reschedule_maintenance(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): From 47324d403a9402b3e47911fd69a45258164a7199 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2024 16:48:27 -0400 Subject: [PATCH 1169/1339] chore(main): release 1.18.5 (#2084) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a23bb08b5b33..3e62651ae50c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.18.5](https://github.com/googleapis/gapic-generator-python/compare/v1.18.4...v1.18.5) (2024-08-06) + + +### Bug Fixes + +* Mypy types in get_transport ([#2088](https://github.com/googleapis/gapic-generator-python/issues/2088)) ([f76fdaf](https://github.com/googleapis/gapic-generator-python/commit/f76fdaf1f717e99caef4c0ce217eef3322df9a30)) +* Require google.shopping.type >= 0.1.6 ([#2083](https://github.com/googleapis/gapic-generator-python/issues/2083)) ([1b63310](https://github.com/googleapis/gapic-generator-python/commit/1b63310378692f478ef29734b3cb5fde3d436b22)) + ## [1.18.4](https://github.com/googleapis/gapic-generator-python/compare/v1.18.3...v1.18.4) (2024-07-26) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 3dcb12f3672b..5c794454d8a1 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.18.4" +version = "1.18.5" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 7cd4e6fdb1ff470df2b1e4abaa2c8660ee180813 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 26 Aug 2024 11:25:26 -0400 Subject: [PATCH 1170/1339] feat: leverage async anonymous credentials in tests (#2105) --- .../%name_%version/%sub/_test_mixins.py.j2 | 78 ++-- .../%name_%version/%sub/test_%service.py.j2 | 19 +- .../gapic/%name_%version/%sub/test_macros.j2 | 16 +- .../unit/gapic/asset_v1/test_asset_service.py | 297 +++++++------- .../credentials_v1/test_iam_credentials.py | 63 +-- .../unit/gapic/eventarc_v1/test_eventarc.py | 301 +++++++------- .../logging_v2/test_config_service_v2.py | 379 +++++++++--------- .../logging_v2/test_logging_service_v2.py | 99 +++-- .../logging_v2/test_metrics_service_v2.py | 97 +++-- .../unit/gapic/redis_v1/test_cloud_redis.py | 187 +++++---- .../gapic-generator/tests/system/conftest.py | 60 ++- 11 files changed, 869 insertions(+), 727 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index eaa40e24dc2d..33cbd8117d51 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -78,7 +78,7 @@ def test_delete_operation(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -120,7 +120,7 @@ def test_delete_operation_field_headers(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} @@ -164,7 +164,7 @@ def test_delete_operation_from_dict(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -207,7 +207,7 @@ def test_cancel_operation(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -249,7 +249,7 @@ def test_cancel_operation_field_headers(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} @@ -293,7 +293,7 @@ def test_cancel_operation_from_dict(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -335,7 +335,7 @@ def test_wait_operation(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -377,7 +377,7 @@ def test_wait_operation_field_headers(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} @@ -421,7 +421,7 @@ def test_wait_operation_from_dict(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -464,7 +464,7 @@ def test_get_operation(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -506,7 +506,7 @@ def test_get_operation_field_headers(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} @@ -550,7 +550,7 @@ def test_get_operation_from_dict(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -593,7 +593,7 @@ def test_list_operations(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -635,7 +635,7 @@ def test_list_operations_field_headers(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} @@ -679,7 +679,7 @@ def test_list_operations_from_dict(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -728,7 +728,7 @@ def test_list_locations(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -770,7 +770,7 @@ def test_list_locations_field_headers(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} @@ -814,7 +814,7 @@ def test_list_locations_from_dict(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -859,7 +859,7 @@ def test_get_location(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -900,7 +900,7 @@ def test_get_location_field_headers(): credentials=ga_credentials.AnonymousCredentials()) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials() + credentials=async_anonymous_credentials() ) {% endif %} @@ -944,7 +944,7 @@ def test_get_location_from_dict(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} # Mock the actual call within the gRPC stub, and fake the request. @@ -990,7 +990,7 @@ def test_set_iam_policy(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -1038,7 +1038,7 @@ def test_set_iam_policy_field_headers(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} @@ -1090,7 +1090,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1141,7 +1141,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1202,7 +1202,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1248,7 +1248,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1301,7 +1301,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1362,7 +1362,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1412,7 +1412,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1449,7 +1449,7 @@ def test_set_iam_policy(transport: str = "grpc"): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) {% endif %} @@ -1497,7 +1497,7 @@ def test_set_iam_policy_field_headers(): ) {% else %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) {% endif %} @@ -1549,7 +1549,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -1601,7 +1601,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1662,7 +1662,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1708,7 +1708,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -1762,7 +1762,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1823,7 +1823,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1873,7 +1873,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 45bbb2d7e9c1..82948346e61d 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -34,10 +34,18 @@ from requests.sessions import Session from google.protobuf import json_format {% endif %} +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + {# Import the service itself as well as every proto module that it imports. #} {% filter sort_lines %} import google.auth from google.auth import credentials as ga_credentials + + from google.auth.exceptions import MutualTLSChannelError from google.oauth2 import service_account from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + service.meta.address.subpackage)|join(".") }}.services.{{ service.name|snake_case }} import {{ service.client_name }} @@ -84,6 +92,13 @@ from google.iam.v1 import policy_pb2 # type: ignore def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + {#TODO(https://github.com/googleapis/gapic-generator-python/issues/1894): Remove this function as part of cleanup when DEFAULT_ENDPOINT is no longer used.#} # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different @@ -185,7 +200,7 @@ def test__get_api_endpoint(): {% for mode in ["", "async"] %} {% if mode == "async" %} async def test_{{ method_name }}_api_version_header_async(transport_name="grpc"): - client = {{ service.async_client_name }}(credentials=ga_credentials.AnonymousCredentials(), transport=transport_name) + client = {{ service.async_client_name }}(credentials=async_anonymous_credentials(), transport=transport_name) {% else %} @pytest.mark.parametrize("transport_name", [ {% if 'grpc' in opts.transport %} @@ -1657,7 +1672,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index dae3fb4c4327..711de9f34de1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -254,7 +254,7 @@ async def test_{{ method_name }}_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -310,7 +310,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -358,7 +358,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_type={{ method.input.ident }}): {% with auto_populated_field_sample_value = "explicit value for autopopulate-able field" %} client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -563,7 +563,7 @@ def test_{{ method_name }}_field_headers(): @pytest.mark.asyncio async def test_{{ method_name }}_field_headers_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -716,7 +716,7 @@ def test_{{ method_name }}_flattened_error(): @pytest.mark.asyncio async def test_{{ method_name }}_flattened_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -795,7 +795,7 @@ async def test_{{ method_name }}_flattened_async(): @pytest.mark.asyncio async def test_{{ method_name }}_flattened_error_async(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -959,7 +959,7 @@ def test_{{ method_name }}_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_{{ method_name }}_async_pager(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1049,7 +1049,7 @@ async def test_{{ method_name }}_async_pager(): @pytest.mark.asyncio async def test_{{ method_name }}_async_pages(): client = {{ service.async_client_name }}( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index ef0f2259a7ee..8735a29496de 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -36,6 +36,12 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future @@ -68,6 +74,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -890,7 +903,7 @@ async def test_export_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -913,7 +926,7 @@ async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -949,7 +962,7 @@ async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc @pytest.mark.asyncio async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ExportAssetsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1016,7 +1029,7 @@ def test_export_assets_field_headers(): @pytest.mark.asyncio async def test_export_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1165,7 +1178,7 @@ async def test_list_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1188,7 +1201,7 @@ async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1219,7 +1232,7 @@ async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListAssetsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1287,7 +1300,7 @@ def test_list_assets_field_headers(): @pytest.mark.asyncio async def test_list_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1358,7 +1371,7 @@ def test_list_assets_flattened_error(): @pytest.mark.asyncio async def test_list_assets_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1386,7 +1399,7 @@ async def test_list_assets_flattened_async(): @pytest.mark.asyncio async def test_list_assets_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1500,7 +1513,7 @@ def test_list_assets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_assets_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1549,7 +1562,7 @@ async def test_list_assets_async_pager(): @pytest.mark.asyncio async def test_list_assets_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1710,7 +1723,7 @@ async def test_batch_get_assets_history_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1732,7 +1745,7 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1763,7 +1776,7 @@ async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: @pytest.mark.asyncio async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetAssetsHistoryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1829,7 +1842,7 @@ def test_batch_get_assets_history_field_headers(): @pytest.mark.asyncio async def test_batch_get_assets_history_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1986,7 +1999,7 @@ async def test_create_feed_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2013,7 +2026,7 @@ async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2044,7 +2057,7 @@ async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateFeedRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2120,7 +2133,7 @@ def test_create_feed_field_headers(): @pytest.mark.asyncio async def test_create_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2191,7 +2204,7 @@ def test_create_feed_flattened_error(): @pytest.mark.asyncio async def test_create_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2219,7 +2232,7 @@ async def test_create_feed_flattened_async(): @pytest.mark.asyncio async def test_create_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2357,7 +2370,7 @@ async def test_get_feed_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2384,7 +2397,7 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2415,7 +2428,7 @@ async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetFeedRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2491,7 +2504,7 @@ def test_get_feed_field_headers(): @pytest.mark.asyncio async def test_get_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2562,7 +2575,7 @@ def test_get_feed_flattened_error(): @pytest.mark.asyncio async def test_get_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2590,7 +2603,7 @@ async def test_get_feed_flattened_async(): @pytest.mark.asyncio async def test_get_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2718,7 +2731,7 @@ async def test_list_feeds_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2740,7 +2753,7 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2771,7 +2784,7 @@ async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListFeedsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2837,7 +2850,7 @@ def test_list_feeds_field_headers(): @pytest.mark.asyncio async def test_list_feeds_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2908,7 +2921,7 @@ def test_list_feeds_flattened_error(): @pytest.mark.asyncio async def test_list_feeds_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2936,7 +2949,7 @@ async def test_list_feeds_flattened_async(): @pytest.mark.asyncio async def test_list_feeds_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3072,7 +3085,7 @@ async def test_update_feed_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3099,7 +3112,7 @@ async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3130,7 +3143,7 @@ async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateFeedRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3206,7 +3219,7 @@ def test_update_feed_field_headers(): @pytest.mark.asyncio async def test_update_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3277,7 +3290,7 @@ def test_update_feed_flattened_error(): @pytest.mark.asyncio async def test_update_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3305,7 +3318,7 @@ async def test_update_feed_flattened_async(): @pytest.mark.asyncio async def test_update_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3432,7 +3445,7 @@ async def test_delete_feed_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3453,7 +3466,7 @@ async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3484,7 +3497,7 @@ async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteFeedRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3549,7 +3562,7 @@ def test_delete_feed_field_headers(): @pytest.mark.asyncio async def test_delete_feed_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3620,7 +3633,7 @@ def test_delete_feed_flattened_error(): @pytest.mark.asyncio async def test_delete_feed_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3648,7 +3661,7 @@ async def test_delete_feed_flattened_async(): @pytest.mark.asyncio async def test_delete_feed_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3784,7 +3797,7 @@ async def test_search_all_resources_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3807,7 +3820,7 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3838,7 +3851,7 @@ async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str @pytest.mark.asyncio async def test_search_all_resources_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllResourcesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3906,7 +3919,7 @@ def test_search_all_resources_field_headers(): @pytest.mark.asyncio async def test_search_all_resources_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3987,7 +4000,7 @@ def test_search_all_resources_flattened_error(): @pytest.mark.asyncio async def test_search_all_resources_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4023,7 +4036,7 @@ async def test_search_all_resources_flattened_async(): @pytest.mark.asyncio async def test_search_all_resources_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4139,7 +4152,7 @@ def test_search_all_resources_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_search_all_resources_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4188,7 +4201,7 @@ async def test_search_all_resources_async_pager(): @pytest.mark.asyncio async def test_search_all_resources_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4357,7 +4370,7 @@ async def test_search_all_iam_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4380,7 +4393,7 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: s # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4411,7 +4424,7 @@ async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: s @pytest.mark.asyncio async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.SearchAllIamPoliciesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4479,7 +4492,7 @@ def test_search_all_iam_policies_field_headers(): @pytest.mark.asyncio async def test_search_all_iam_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4555,7 +4568,7 @@ def test_search_all_iam_policies_flattened_error(): @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4587,7 +4600,7 @@ async def test_search_all_iam_policies_flattened_async(): @pytest.mark.asyncio async def test_search_all_iam_policies_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4702,7 +4715,7 @@ def test_search_all_iam_policies_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_search_all_iam_policies_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4751,7 +4764,7 @@ async def test_search_all_iam_policies_async_pager(): @pytest.mark.asyncio async def test_search_all_iam_policies_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4914,7 +4927,7 @@ async def test_analyze_iam_policy_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4937,7 +4950,7 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4968,7 +4981,7 @@ async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = @pytest.mark.asyncio async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5036,7 +5049,7 @@ def test_analyze_iam_policy_field_headers(): @pytest.mark.asyncio async def test_analyze_iam_policy_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5185,7 +5198,7 @@ async def test_analyze_iam_policy_longrunning_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -5208,7 +5221,7 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(trans # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5244,7 +5257,7 @@ async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(trans @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5311,7 +5324,7 @@ def test_analyze_iam_policy_longrunning_field_headers(): @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5458,7 +5471,7 @@ async def test_analyze_move_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -5480,7 +5493,7 @@ async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_ # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5511,7 +5524,7 @@ async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_ @pytest.mark.asyncio async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeMoveRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5577,7 +5590,7 @@ def test_analyze_move_field_headers(): @pytest.mark.asyncio async def test_analyze_move_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5732,7 +5745,7 @@ async def test_query_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -5756,7 +5769,7 @@ async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_ # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5787,7 +5800,7 @@ async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_ @pytest.mark.asyncio async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.QueryAssetsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5857,7 +5870,7 @@ def test_query_assets_field_headers(): @pytest.mark.asyncio async def test_query_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6012,7 +6025,7 @@ async def test_create_saved_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -6038,7 +6051,7 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6069,7 +6082,7 @@ async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = @pytest.mark.asyncio async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.CreateSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6143,7 +6156,7 @@ def test_create_saved_query_field_headers(): @pytest.mark.asyncio async def test_create_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6224,7 +6237,7 @@ def test_create_saved_query_flattened_error(): @pytest.mark.asyncio async def test_create_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6260,7 +6273,7 @@ async def test_create_saved_query_flattened_async(): @pytest.mark.asyncio async def test_create_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6398,7 +6411,7 @@ async def test_get_saved_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -6424,7 +6437,7 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6455,7 +6468,7 @@ async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.GetSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6529,7 +6542,7 @@ def test_get_saved_query_field_headers(): @pytest.mark.asyncio async def test_get_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6600,7 +6613,7 @@ def test_get_saved_query_flattened_error(): @pytest.mark.asyncio async def test_get_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6628,7 +6641,7 @@ async def test_get_saved_query_flattened_async(): @pytest.mark.asyncio async def test_get_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6762,7 +6775,7 @@ async def test_list_saved_queries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -6785,7 +6798,7 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6816,7 +6829,7 @@ async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = @pytest.mark.asyncio async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request_type=asset_service.ListSavedQueriesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6884,7 +6897,7 @@ def test_list_saved_queries_field_headers(): @pytest.mark.asyncio async def test_list_saved_queries_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6955,7 +6968,7 @@ def test_list_saved_queries_flattened_error(): @pytest.mark.asyncio async def test_list_saved_queries_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6983,7 +6996,7 @@ async def test_list_saved_queries_flattened_async(): @pytest.mark.asyncio async def test_list_saved_queries_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7097,7 +7110,7 @@ def test_list_saved_queries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_saved_queries_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7146,7 +7159,7 @@ async def test_list_saved_queries_async_pager(): @pytest.mark.asyncio async def test_list_saved_queries_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7313,7 +7326,7 @@ async def test_update_saved_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -7339,7 +7352,7 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7370,7 +7383,7 @@ async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = @pytest.mark.asyncio async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.UpdateSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7444,7 +7457,7 @@ def test_update_saved_query_field_headers(): @pytest.mark.asyncio async def test_update_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7520,7 +7533,7 @@ def test_update_saved_query_flattened_error(): @pytest.mark.asyncio async def test_update_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7552,7 +7565,7 @@ async def test_update_saved_query_flattened_async(): @pytest.mark.asyncio async def test_update_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7680,7 +7693,7 @@ async def test_delete_saved_query_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -7701,7 +7714,7 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7732,7 +7745,7 @@ async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = @pytest.mark.asyncio async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request_type=asset_service.DeleteSavedQueryRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7797,7 +7810,7 @@ def test_delete_saved_query_field_headers(): @pytest.mark.asyncio async def test_delete_saved_query_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7868,7 +7881,7 @@ def test_delete_saved_query_flattened_error(): @pytest.mark.asyncio async def test_delete_saved_query_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7896,7 +7909,7 @@ async def test_delete_saved_query_flattened_async(): @pytest.mark.asyncio async def test_delete_saved_query_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8024,7 +8037,7 @@ async def test_batch_get_effective_iam_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -8046,7 +8059,7 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(tra # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8077,7 +8090,7 @@ async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(tra @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8143,7 +8156,7 @@ def test_batch_get_effective_iam_policies_field_headers(): @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8296,7 +8309,7 @@ async def test_analyze_org_policies_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -8319,7 +8332,7 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8350,7 +8363,7 @@ async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str @pytest.mark.asyncio async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPoliciesRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8418,7 +8431,7 @@ def test_analyze_org_policies_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policies_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8499,7 +8512,7 @@ def test_analyze_org_policies_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policies_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8535,7 +8548,7 @@ async def test_analyze_org_policies_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policies_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8651,7 +8664,7 @@ def test_analyze_org_policies_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_analyze_org_policies_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8700,7 +8713,7 @@ async def test_analyze_org_policies_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policies_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8869,7 +8882,7 @@ async def test_analyze_org_policy_governed_containers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -8892,7 +8905,7 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8923,7 +8936,7 @@ async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_r @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8991,7 +9004,7 @@ def test_analyze_org_policy_governed_containers_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9072,7 +9085,7 @@ def test_analyze_org_policy_governed_containers_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9108,7 +9121,7 @@ async def test_analyze_org_policy_governed_containers_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9224,7 +9237,7 @@ def test_analyze_org_policy_governed_containers_pages(transport_name: str = "grp @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9273,7 +9286,7 @@ async def test_analyze_org_policy_governed_containers_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9442,7 +9455,7 @@ async def test_analyze_org_policy_governed_assets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -9465,7 +9478,7 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(t # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9496,7 +9509,7 @@ async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(t @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_asyncio', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9564,7 +9577,7 @@ def test_analyze_org_policy_governed_assets_field_headers(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9645,7 +9658,7 @@ def test_analyze_org_policy_governed_assets_flattened_error(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9681,7 +9694,7 @@ async def test_analyze_org_policy_governed_assets_flattened_async(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_flattened_error_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9797,7 +9810,7 @@ def test_analyze_org_policy_governed_assets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_pager(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9846,7 +9859,7 @@ async def test_analyze_org_policy_governed_assets_async_pager(): @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_pages(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -17144,7 +17157,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -17225,7 +17238,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -17273,7 +17286,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -17314,7 +17327,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = AssetServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 46555f55f6ef..61ef5a893fc7 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -36,6 +36,12 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -58,6 +64,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -878,7 +891,7 @@ async def test_generate_access_token_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -901,7 +914,7 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -932,7 +945,7 @@ async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str @pytest.mark.asyncio async def test_generate_access_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateAccessTokenRequest): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1000,7 +1013,7 @@ def test_generate_access_token_field_headers(): @pytest.mark.asyncio async def test_generate_access_token_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1084,7 +1097,7 @@ def test_generate_access_token_flattened_error(): @pytest.mark.asyncio async def test_generate_access_token_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1122,7 +1135,7 @@ async def test_generate_access_token_flattened_async(): @pytest.mark.asyncio async def test_generate_access_token_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1257,7 +1270,7 @@ async def test_generate_id_token_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1280,7 +1293,7 @@ async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = " # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1311,7 +1324,7 @@ async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = " @pytest.mark.asyncio async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_type=common.GenerateIdTokenRequest): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1379,7 +1392,7 @@ def test_generate_id_token_field_headers(): @pytest.mark.asyncio async def test_generate_id_token_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1465,7 +1478,7 @@ def test_generate_id_token_flattened_error(): @pytest.mark.asyncio async def test_generate_id_token_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1505,7 +1518,7 @@ async def test_generate_id_token_flattened_async(): @pytest.mark.asyncio async def test_generate_id_token_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1640,7 +1653,7 @@ async def test_sign_blob_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1664,7 +1677,7 @@ async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1695,7 +1708,7 @@ async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=common.SignBlobRequest): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1765,7 +1778,7 @@ def test_sign_blob_field_headers(): @pytest.mark.asyncio async def test_sign_blob_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1846,7 +1859,7 @@ def test_sign_blob_flattened_error(): @pytest.mark.asyncio async def test_sign_blob_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1882,7 +1895,7 @@ async def test_sign_blob_flattened_async(): @pytest.mark.asyncio async def test_sign_blob_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2018,7 +2031,7 @@ async def test_sign_jwt_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2042,7 +2055,7 @@ async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2073,7 +2086,7 @@ async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=common.SignJwtRequest): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2143,7 +2156,7 @@ def test_sign_jwt_field_headers(): @pytest.mark.asyncio async def test_sign_jwt_field_headers_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2224,7 +2237,7 @@ def test_sign_jwt_flattened_error(): @pytest.mark.asyncio async def test_sign_jwt_flattened_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2260,7 +2273,7 @@ async def test_sign_jwt_flattened_async(): @pytest.mark.asyncio async def test_sign_jwt_flattened_error_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4005,7 +4018,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = IAMCredentialsAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 99f56b7e7ec8..b5a233ef0afc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -36,6 +36,12 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future @@ -78,6 +84,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -906,7 +919,7 @@ async def test_get_trigger_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -933,7 +946,7 @@ async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -964,7 +977,7 @@ async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetTriggerRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1040,7 +1053,7 @@ def test_get_trigger_field_headers(): @pytest.mark.asyncio async def test_get_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1111,7 +1124,7 @@ def test_get_trigger_flattened_error(): @pytest.mark.asyncio async def test_get_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1139,7 +1152,7 @@ async def test_get_trigger_flattened_async(): @pytest.mark.asyncio async def test_get_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1277,7 +1290,7 @@ async def test_list_triggers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1301,7 +1314,7 @@ async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1332,7 +1345,7 @@ async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc @pytest.mark.asyncio async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListTriggersRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1402,7 +1415,7 @@ def test_list_triggers_field_headers(): @pytest.mark.asyncio async def test_list_triggers_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1473,7 +1486,7 @@ def test_list_triggers_flattened_error(): @pytest.mark.asyncio async def test_list_triggers_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1501,7 +1514,7 @@ async def test_list_triggers_flattened_async(): @pytest.mark.asyncio async def test_list_triggers_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1615,7 +1628,7 @@ def test_list_triggers_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_triggers_async_pager(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1664,7 +1677,7 @@ async def test_list_triggers_async_pager(): @pytest.mark.asyncio async def test_list_triggers_async_pages(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1831,7 +1844,7 @@ async def test_create_trigger_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1854,7 +1867,7 @@ async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1890,7 +1903,7 @@ async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateTriggerRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1957,7 +1970,7 @@ def test_create_trigger_field_headers(): @pytest.mark.asyncio async def test_create_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2038,7 +2051,7 @@ def test_create_trigger_flattened_error(): @pytest.mark.asyncio async def test_create_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2076,7 +2089,7 @@ async def test_create_trigger_flattened_async(): @pytest.mark.asyncio async def test_create_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2208,7 +2221,7 @@ async def test_update_trigger_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2231,7 +2244,7 @@ async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2267,7 +2280,7 @@ async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateTriggerRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2334,7 +2347,7 @@ def test_update_trigger_field_headers(): @pytest.mark.asyncio async def test_update_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2415,7 +2428,7 @@ def test_update_trigger_flattened_error(): @pytest.mark.asyncio async def test_update_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2453,7 +2466,7 @@ async def test_update_trigger_flattened_async(): @pytest.mark.asyncio async def test_update_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2589,7 +2602,7 @@ async def test_delete_trigger_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2612,7 +2625,7 @@ async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2648,7 +2661,7 @@ async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteTriggerRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2715,7 +2728,7 @@ def test_delete_trigger_field_headers(): @pytest.mark.asyncio async def test_delete_trigger_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2791,7 +2804,7 @@ def test_delete_trigger_flattened_error(): @pytest.mark.asyncio async def test_delete_trigger_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2825,7 +2838,7 @@ async def test_delete_trigger_flattened_async(): @pytest.mark.asyncio async def test_delete_trigger_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2967,7 +2980,7 @@ async def test_get_channel_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2995,7 +3008,7 @@ async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3026,7 +3039,7 @@ async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3104,7 +3117,7 @@ def test_get_channel_field_headers(): @pytest.mark.asyncio async def test_get_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3175,7 +3188,7 @@ def test_get_channel_flattened_error(): @pytest.mark.asyncio async def test_get_channel_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3203,7 +3216,7 @@ async def test_get_channel_flattened_async(): @pytest.mark.asyncio async def test_get_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3339,7 +3352,7 @@ async def test_list_channels_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3363,7 +3376,7 @@ async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3394,7 +3407,7 @@ async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc @pytest.mark.asyncio async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelsRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3464,7 +3477,7 @@ def test_list_channels_field_headers(): @pytest.mark.asyncio async def test_list_channels_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3535,7 +3548,7 @@ def test_list_channels_flattened_error(): @pytest.mark.asyncio async def test_list_channels_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3563,7 +3576,7 @@ async def test_list_channels_flattened_async(): @pytest.mark.asyncio async def test_list_channels_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3677,7 +3690,7 @@ def test_list_channels_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_channels_async_pager(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3726,7 +3739,7 @@ async def test_list_channels_async_pager(): @pytest.mark.asyncio async def test_list_channels_async_pages(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3893,7 +3906,7 @@ async def test_create_channel_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3916,7 +3929,7 @@ async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3952,7 +3965,7 @@ async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_create_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4019,7 +4032,7 @@ def test_create_channel_field_headers(): @pytest.mark.asyncio async def test_create_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4100,7 +4113,7 @@ def test_create_channel_flattened_error(): @pytest.mark.asyncio async def test_create_channel_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4138,7 +4151,7 @@ async def test_create_channel_flattened_async(): @pytest.mark.asyncio async def test_create_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4270,7 +4283,7 @@ async def test_update_channel_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4293,7 +4306,7 @@ async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4329,7 +4342,7 @@ async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_update_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateChannelRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4396,7 +4409,7 @@ def test_update_channel_field_headers(): @pytest.mark.asyncio async def test_update_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4472,7 +4485,7 @@ def test_update_channel_flattened_error(): @pytest.mark.asyncio async def test_update_channel_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4506,7 +4519,7 @@ async def test_update_channel_flattened_async(): @pytest.mark.asyncio async def test_update_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4639,7 +4652,7 @@ async def test_delete_channel_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4662,7 +4675,7 @@ async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4698,7 +4711,7 @@ async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4765,7 +4778,7 @@ def test_delete_channel_field_headers(): @pytest.mark.asyncio async def test_delete_channel_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4836,7 +4849,7 @@ def test_delete_channel_flattened_error(): @pytest.mark.asyncio async def test_delete_channel_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4866,7 +4879,7 @@ async def test_delete_channel_flattened_async(): @pytest.mark.asyncio async def test_delete_channel_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4998,7 +5011,7 @@ async def test_get_provider_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -5022,7 +5035,7 @@ async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_ # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5053,7 +5066,7 @@ async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_ @pytest.mark.asyncio async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetProviderRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5123,7 +5136,7 @@ def test_get_provider_field_headers(): @pytest.mark.asyncio async def test_get_provider_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5194,7 +5207,7 @@ def test_get_provider_flattened_error(): @pytest.mark.asyncio async def test_get_provider_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5222,7 +5235,7 @@ async def test_get_provider_flattened_async(): @pytest.mark.asyncio async def test_get_provider_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5360,7 +5373,7 @@ async def test_list_providers_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -5384,7 +5397,7 @@ async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5415,7 +5428,7 @@ async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_list_providers_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListProvidersRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5485,7 +5498,7 @@ def test_list_providers_field_headers(): @pytest.mark.asyncio async def test_list_providers_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5556,7 +5569,7 @@ def test_list_providers_flattened_error(): @pytest.mark.asyncio async def test_list_providers_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5584,7 +5597,7 @@ async def test_list_providers_flattened_async(): @pytest.mark.asyncio async def test_list_providers_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5698,7 +5711,7 @@ def test_list_providers_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_providers_async_pager(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5747,7 +5760,7 @@ async def test_list_providers_async_pager(): @pytest.mark.asyncio async def test_list_providers_async_pages(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5916,7 +5929,7 @@ async def test_get_channel_connection_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -5942,7 +5955,7 @@ async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: st # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5973,7 +5986,7 @@ async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: st @pytest.mark.asyncio async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetChannelConnectionRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6047,7 +6060,7 @@ def test_get_channel_connection_field_headers(): @pytest.mark.asyncio async def test_get_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6118,7 +6131,7 @@ def test_get_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_get_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6146,7 +6159,7 @@ async def test_get_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_get_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6280,7 +6293,7 @@ async def test_list_channel_connections_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -6304,7 +6317,7 @@ async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6335,7 +6348,7 @@ async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: @pytest.mark.asyncio async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', request_type=eventarc.ListChannelConnectionsRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6405,7 +6418,7 @@ def test_list_channel_connections_field_headers(): @pytest.mark.asyncio async def test_list_channel_connections_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6476,7 +6489,7 @@ def test_list_channel_connections_flattened_error(): @pytest.mark.asyncio async def test_list_channel_connections_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6504,7 +6517,7 @@ async def test_list_channel_connections_flattened_async(): @pytest.mark.asyncio async def test_list_channel_connections_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6618,7 +6631,7 @@ def test_list_channel_connections_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_channel_connections_async_pager(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6667,7 +6680,7 @@ async def test_list_channel_connections_async_pager(): @pytest.mark.asyncio async def test_list_channel_connections_async_pages(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6834,7 +6847,7 @@ async def test_create_channel_connection_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -6857,7 +6870,7 @@ async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6893,7 +6906,7 @@ async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: @pytest.mark.asyncio async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.CreateChannelConnectionRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6960,7 +6973,7 @@ def test_create_channel_connection_field_headers(): @pytest.mark.asyncio async def test_create_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7041,7 +7054,7 @@ def test_create_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_create_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7079,7 +7092,7 @@ async def test_create_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_create_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7213,7 +7226,7 @@ async def test_delete_channel_connection_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -7236,7 +7249,7 @@ async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7272,7 +7285,7 @@ async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: @pytest.mark.asyncio async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', request_type=eventarc.DeleteChannelConnectionRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7339,7 +7352,7 @@ def test_delete_channel_connection_field_headers(): @pytest.mark.asyncio async def test_delete_channel_connection_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7410,7 +7423,7 @@ def test_delete_channel_connection_flattened_error(): @pytest.mark.asyncio async def test_delete_channel_connection_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7440,7 +7453,7 @@ async def test_delete_channel_connection_flattened_async(): @pytest.mark.asyncio async def test_delete_channel_connection_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7572,7 +7585,7 @@ async def test_get_google_channel_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -7596,7 +7609,7 @@ async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7627,7 +7640,7 @@ async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: @pytest.mark.asyncio async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.GetGoogleChannelConfigRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7697,7 +7710,7 @@ def test_get_google_channel_config_field_headers(): @pytest.mark.asyncio async def test_get_google_channel_config_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7768,7 +7781,7 @@ def test_get_google_channel_config_flattened_error(): @pytest.mark.asyncio async def test_get_google_channel_config_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7796,7 +7809,7 @@ async def test_get_google_channel_config_flattened_async(): @pytest.mark.asyncio async def test_get_google_channel_config_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7926,7 +7939,7 @@ async def test_update_google_channel_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -7950,7 +7963,7 @@ async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transpo # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7981,7 +7994,7 @@ async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transpo @pytest.mark.asyncio async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio', request_type=eventarc.UpdateGoogleChannelConfigRequest): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8051,7 +8064,7 @@ def test_update_google_channel_config_field_headers(): @pytest.mark.asyncio async def test_update_google_channel_config_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8127,7 +8140,7 @@ def test_update_google_channel_config_flattened_error(): @pytest.mark.asyncio async def test_update_google_channel_config_flattened_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8159,7 +8172,7 @@ async def test_update_google_channel_config_flattened_async(): @pytest.mark.asyncio async def test_update_google_channel_config_flattened_error_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -14642,7 +14655,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -15099,7 +15112,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -15147,7 +15160,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -15188,7 +15201,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -15228,7 +15241,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -15276,7 +15289,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -15317,7 +15330,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -15357,7 +15370,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -15405,7 +15418,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -15446,7 +15459,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -15486,7 +15499,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -15534,7 +15547,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -15575,7 +15588,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -15615,7 +15628,7 @@ def test_list_locations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -15663,7 +15676,7 @@ def test_list_locations_field_headers(): @pytest.mark.asyncio async def test_list_locations_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -15704,7 +15717,7 @@ def test_list_locations_from_dict(): @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -15744,7 +15757,7 @@ def test_get_location(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -15791,7 +15804,7 @@ def test_get_location_field_headers(): @pytest.mark.asyncio async def test_get_location_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=async_anonymous_credentials() ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -15832,7 +15845,7 @@ def test_get_location_from_dict(): @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -15877,7 +15890,7 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -15932,7 +15945,7 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -15976,7 +15989,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -16026,7 +16039,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16087,7 +16100,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -16133,7 +16146,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -16185,7 +16198,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -16246,7 +16259,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -16296,7 +16309,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = EventarcAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 612b98f70848..15b2af664e59 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -29,6 +29,12 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future @@ -58,6 +64,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -867,7 +880,7 @@ async def test_list_buckets_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -890,7 +903,7 @@ async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_ # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -921,7 +934,7 @@ async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_ @pytest.mark.asyncio async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -989,7 +1002,7 @@ def test_list_buckets_field_headers(): @pytest.mark.asyncio async def test_list_buckets_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1060,7 +1073,7 @@ def test_list_buckets_flattened_error(): @pytest.mark.asyncio async def test_list_buckets_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1088,7 +1101,7 @@ async def test_list_buckets_flattened_async(): @pytest.mark.asyncio async def test_list_buckets_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1202,7 +1215,7 @@ def test_list_buckets_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_buckets_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1251,7 +1264,7 @@ async def test_list_buckets_async_pager(): @pytest.mark.asyncio async def test_list_buckets_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1426,7 +1439,7 @@ async def test_get_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1455,7 +1468,7 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1486,7 +1499,7 @@ async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1566,7 +1579,7 @@ def test_get_bucket_field_headers(): @pytest.mark.asyncio async def test_get_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1717,7 +1730,7 @@ async def test_create_bucket_async_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1740,7 +1753,7 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1776,7 +1789,7 @@ async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = @pytest.mark.asyncio async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1843,7 +1856,7 @@ def test_create_bucket_async_field_headers(): @pytest.mark.asyncio async def test_create_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1992,7 +2005,7 @@ async def test_update_bucket_async_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2015,7 +2028,7 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2051,7 +2064,7 @@ async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = @pytest.mark.asyncio async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2118,7 +2131,7 @@ def test_update_bucket_async_field_headers(): @pytest.mark.asyncio async def test_update_bucket_async_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2279,7 +2292,7 @@ async def test_create_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2308,7 +2321,7 @@ async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2339,7 +2352,7 @@ async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc @pytest.mark.asyncio async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2419,7 +2432,7 @@ def test_create_bucket_field_headers(): @pytest.mark.asyncio async def test_create_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2578,7 +2591,7 @@ async def test_update_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2607,7 +2620,7 @@ async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2638,7 +2651,7 @@ async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc @pytest.mark.asyncio async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2718,7 +2731,7 @@ def test_update_bucket_field_headers(): @pytest.mark.asyncio async def test_update_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2862,7 +2875,7 @@ async def test_delete_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2883,7 +2896,7 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2914,7 +2927,7 @@ async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc @pytest.mark.asyncio async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2979,7 +2992,7 @@ def test_delete_bucket_field_headers(): @pytest.mark.asyncio async def test_delete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3123,7 +3136,7 @@ async def test_undelete_bucket_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3144,7 +3157,7 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3175,7 +3188,7 @@ async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3240,7 +3253,7 @@ def test_undelete_bucket_field_headers(): @pytest.mark.asyncio async def test_undelete_bucket_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3389,7 +3402,7 @@ async def test_list_views_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3412,7 +3425,7 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3443,7 +3456,7 @@ async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3511,7 +3524,7 @@ def test_list_views_field_headers(): @pytest.mark.asyncio async def test_list_views_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3582,7 +3595,7 @@ def test_list_views_flattened_error(): @pytest.mark.asyncio async def test_list_views_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3610,7 +3623,7 @@ async def test_list_views_flattened_async(): @pytest.mark.asyncio async def test_list_views_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3724,7 +3737,7 @@ def test_list_views_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_views_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3773,7 +3786,7 @@ async def test_list_views_async_pager(): @pytest.mark.asyncio async def test_list_views_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3940,7 +3953,7 @@ async def test_get_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3965,7 +3978,7 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3996,7 +4009,7 @@ async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4068,7 +4081,7 @@ def test_get_view_field_headers(): @pytest.mark.asyncio async def test_get_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4221,7 +4234,7 @@ async def test_create_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4246,7 +4259,7 @@ async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4277,7 +4290,7 @@ async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4349,7 +4362,7 @@ def test_create_view_field_headers(): @pytest.mark.asyncio async def test_create_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4500,7 +4513,7 @@ async def test_update_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4525,7 +4538,7 @@ async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4556,7 +4569,7 @@ async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4628,7 +4641,7 @@ def test_update_view_field_headers(): @pytest.mark.asyncio async def test_update_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4772,7 +4785,7 @@ async def test_delete_view_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4793,7 +4806,7 @@ async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4824,7 +4837,7 @@ async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4889,7 +4902,7 @@ def test_delete_view_field_headers(): @pytest.mark.asyncio async def test_delete_view_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5038,7 +5051,7 @@ async def test_list_sinks_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -5061,7 +5074,7 @@ async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_as # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5092,7 +5105,7 @@ async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5160,7 +5173,7 @@ def test_list_sinks_field_headers(): @pytest.mark.asyncio async def test_list_sinks_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5231,7 +5244,7 @@ def test_list_sinks_flattened_error(): @pytest.mark.asyncio async def test_list_sinks_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5259,7 +5272,7 @@ async def test_list_sinks_flattened_async(): @pytest.mark.asyncio async def test_list_sinks_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5373,7 +5386,7 @@ def test_list_sinks_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_sinks_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5422,7 +5435,7 @@ async def test_list_sinks_async_pager(): @pytest.mark.asyncio async def test_list_sinks_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5599,7 +5612,7 @@ async def test_get_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -5629,7 +5642,7 @@ async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5660,7 +5673,7 @@ async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5742,7 +5755,7 @@ def test_get_sink_field_headers(): @pytest.mark.asyncio async def test_get_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5813,7 +5826,7 @@ def test_get_sink_flattened_error(): @pytest.mark.asyncio async def test_get_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5841,7 +5854,7 @@ async def test_get_sink_flattened_async(): @pytest.mark.asyncio async def test_get_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5985,7 +5998,7 @@ async def test_create_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -6015,7 +6028,7 @@ async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6046,7 +6059,7 @@ async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6128,7 +6141,7 @@ def test_create_sink_field_headers(): @pytest.mark.asyncio async def test_create_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6204,7 +6217,7 @@ def test_create_sink_flattened_error(): @pytest.mark.asyncio async def test_create_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6236,7 +6249,7 @@ async def test_create_sink_flattened_async(): @pytest.mark.asyncio async def test_create_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6381,7 +6394,7 @@ async def test_update_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -6411,7 +6424,7 @@ async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6442,7 +6455,7 @@ async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6524,7 +6537,7 @@ def test_update_sink_field_headers(): @pytest.mark.asyncio async def test_update_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6605,7 +6618,7 @@ def test_update_sink_flattened_error(): @pytest.mark.asyncio async def test_update_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6641,7 +6654,7 @@ async def test_update_sink_flattened_async(): @pytest.mark.asyncio async def test_update_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6770,7 +6783,7 @@ async def test_delete_sink_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -6791,7 +6804,7 @@ async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6822,7 +6835,7 @@ async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6887,7 +6900,7 @@ def test_delete_sink_field_headers(): @pytest.mark.asyncio async def test_delete_sink_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6958,7 +6971,7 @@ def test_delete_sink_flattened_error(): @pytest.mark.asyncio async def test_delete_sink_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6986,7 +6999,7 @@ async def test_delete_sink_flattened_async(): @pytest.mark.asyncio async def test_delete_sink_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7120,7 +7133,7 @@ async def test_create_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -7143,7 +7156,7 @@ async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7179,7 +7192,7 @@ async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7246,7 +7259,7 @@ def test_create_link_field_headers(): @pytest.mark.asyncio async def test_create_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7327,7 +7340,7 @@ def test_create_link_flattened_error(): @pytest.mark.asyncio async def test_create_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7365,7 +7378,7 @@ async def test_create_link_flattened_async(): @pytest.mark.asyncio async def test_create_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7499,7 +7512,7 @@ async def test_delete_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -7522,7 +7535,7 @@ async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7558,7 +7571,7 @@ async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_a @pytest.mark.asyncio async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7625,7 +7638,7 @@ def test_delete_link_field_headers(): @pytest.mark.asyncio async def test_delete_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7696,7 +7709,7 @@ def test_delete_link_flattened_error(): @pytest.mark.asyncio async def test_delete_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7726,7 +7739,7 @@ async def test_delete_link_flattened_async(): @pytest.mark.asyncio async def test_delete_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7858,7 +7871,7 @@ async def test_list_links_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -7881,7 +7894,7 @@ async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_as # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7912,7 +7925,7 @@ async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7980,7 +7993,7 @@ def test_list_links_field_headers(): @pytest.mark.asyncio async def test_list_links_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8051,7 +8064,7 @@ def test_list_links_flattened_error(): @pytest.mark.asyncio async def test_list_links_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8079,7 +8092,7 @@ async def test_list_links_flattened_async(): @pytest.mark.asyncio async def test_list_links_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8193,7 +8206,7 @@ def test_list_links_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_links_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8242,7 +8255,7 @@ async def test_list_links_async_pager(): @pytest.mark.asyncio async def test_list_links_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8409,7 +8422,7 @@ async def test_get_link_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -8434,7 +8447,7 @@ async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8465,7 +8478,7 @@ async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyn @pytest.mark.asyncio async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8537,7 +8550,7 @@ def test_get_link_field_headers(): @pytest.mark.asyncio async def test_get_link_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8608,7 +8621,7 @@ def test_get_link_flattened_error(): @pytest.mark.asyncio async def test_get_link_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8636,7 +8649,7 @@ async def test_get_link_flattened_async(): @pytest.mark.asyncio async def test_get_link_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -8768,7 +8781,7 @@ async def test_list_exclusions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -8791,7 +8804,7 @@ async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8822,7 +8835,7 @@ async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8890,7 +8903,7 @@ def test_list_exclusions_field_headers(): @pytest.mark.asyncio async def test_list_exclusions_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8961,7 +8974,7 @@ def test_list_exclusions_flattened_error(): @pytest.mark.asyncio async def test_list_exclusions_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -8989,7 +9002,7 @@ async def test_list_exclusions_flattened_async(): @pytest.mark.asyncio async def test_list_exclusions_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9103,7 +9116,7 @@ def test_list_exclusions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_exclusions_async_pager(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9152,7 +9165,7 @@ async def test_list_exclusions_async_pager(): @pytest.mark.asyncio async def test_list_exclusions_async_pages(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9321,7 +9334,7 @@ async def test_get_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -9347,7 +9360,7 @@ async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9378,7 +9391,7 @@ async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc @pytest.mark.asyncio async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9452,7 +9465,7 @@ def test_get_exclusion_field_headers(): @pytest.mark.asyncio async def test_get_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9523,7 +9536,7 @@ def test_get_exclusion_flattened_error(): @pytest.mark.asyncio async def test_get_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9551,7 +9564,7 @@ async def test_get_exclusion_flattened_async(): @pytest.mark.asyncio async def test_get_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9687,7 +9700,7 @@ async def test_create_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -9713,7 +9726,7 @@ async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "g # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9744,7 +9757,7 @@ async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "g @pytest.mark.asyncio async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9818,7 +9831,7 @@ def test_create_exclusion_field_headers(): @pytest.mark.asyncio async def test_create_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9894,7 +9907,7 @@ def test_create_exclusion_flattened_error(): @pytest.mark.asyncio async def test_create_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -9926,7 +9939,7 @@ async def test_create_exclusion_flattened_async(): @pytest.mark.asyncio async def test_create_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -10063,7 +10076,7 @@ async def test_update_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -10089,7 +10102,7 @@ async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "g # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10120,7 +10133,7 @@ async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "g @pytest.mark.asyncio async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10194,7 +10207,7 @@ def test_update_exclusion_field_headers(): @pytest.mark.asyncio async def test_update_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -10275,7 +10288,7 @@ def test_update_exclusion_flattened_error(): @pytest.mark.asyncio async def test_update_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10311,7 +10324,7 @@ async def test_update_exclusion_flattened_async(): @pytest.mark.asyncio async def test_update_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -10440,7 +10453,7 @@ async def test_delete_exclusion_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -10461,7 +10474,7 @@ async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "g # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10492,7 +10505,7 @@ async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "g @pytest.mark.asyncio async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10557,7 +10570,7 @@ def test_delete_exclusion_field_headers(): @pytest.mark.asyncio async def test_delete_exclusion_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -10628,7 +10641,7 @@ def test_delete_exclusion_flattened_error(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -10656,7 +10669,7 @@ async def test_delete_exclusion_flattened_async(): @pytest.mark.asyncio async def test_delete_exclusion_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -10792,7 +10805,7 @@ async def test_get_cmek_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -10818,7 +10831,7 @@ async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = " # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10849,7 +10862,7 @@ async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = " @pytest.mark.asyncio async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -10923,7 +10936,7 @@ def test_get_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_get_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -11076,7 +11089,7 @@ async def test_update_cmek_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -11102,7 +11115,7 @@ async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11133,7 +11146,7 @@ async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str @pytest.mark.asyncio async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11207,7 +11220,7 @@ def test_update_cmek_settings_field_headers(): @pytest.mark.asyncio async def test_update_cmek_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -11362,7 +11375,7 @@ async def test_get_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -11389,7 +11402,7 @@ async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_ # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11420,7 +11433,7 @@ async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_ @pytest.mark.asyncio async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11496,7 +11509,7 @@ def test_get_settings_field_headers(): @pytest.mark.asyncio async def test_get_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -11567,7 +11580,7 @@ def test_get_settings_flattened_error(): @pytest.mark.asyncio async def test_get_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -11595,7 +11608,7 @@ async def test_get_settings_flattened_async(): @pytest.mark.asyncio async def test_get_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -11733,7 +11746,7 @@ async def test_update_settings_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -11760,7 +11773,7 @@ async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11791,7 +11804,7 @@ async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -11867,7 +11880,7 @@ def test_update_settings_field_headers(): @pytest.mark.asyncio async def test_update_settings_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -11943,7 +11956,7 @@ def test_update_settings_flattened_error(): @pytest.mark.asyncio async def test_update_settings_flattened_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -11975,7 +11988,7 @@ async def test_update_settings_flattened_async(): @pytest.mark.asyncio async def test_update_settings_flattened_error_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -12112,7 +12125,7 @@ async def test_copy_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -12135,7 +12148,7 @@ async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "g # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -12171,7 +12184,7 @@ async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "g @pytest.mark.asyncio async def test_copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -12978,7 +12991,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -13011,7 +13024,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13059,7 +13072,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13100,7 +13113,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -13140,7 +13153,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13188,7 +13201,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13229,7 +13242,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -13269,7 +13282,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -13317,7 +13330,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13358,7 +13371,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = ConfigServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index f3c4bb8c9554..e307dd2b6528 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -29,6 +29,12 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api import monitored_resource_pb2 # type: ignore from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -59,6 +65,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -864,7 +877,7 @@ async def test_delete_log_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -885,7 +898,7 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -916,7 +929,7 @@ async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_as @pytest.mark.asyncio async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -981,7 +994,7 @@ def test_delete_log_field_headers(): @pytest.mark.asyncio async def test_delete_log_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1052,7 +1065,7 @@ def test_delete_log_flattened_error(): @pytest.mark.asyncio async def test_delete_log_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1080,7 +1093,7 @@ async def test_delete_log_flattened_async(): @pytest.mark.asyncio async def test_delete_log_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1208,7 +1221,7 @@ async def test_write_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1230,7 +1243,7 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = " # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1261,7 +1274,7 @@ async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = " @pytest.mark.asyncio async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1350,7 +1363,7 @@ def test_write_log_entries_flattened_error(): @pytest.mark.asyncio async def test_write_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1390,7 +1403,7 @@ async def test_write_log_entries_flattened_async(): @pytest.mark.asyncio async def test_write_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1527,7 +1540,7 @@ async def test_list_log_entries_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1550,7 +1563,7 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "g # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1581,7 +1594,7 @@ async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "g @pytest.mark.asyncio async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1667,7 +1680,7 @@ def test_list_log_entries_flattened_error(): @pytest.mark.asyncio async def test_list_log_entries_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1703,7 +1716,7 @@ async def test_list_log_entries_flattened_async(): @pytest.mark.asyncio async def test_list_log_entries_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1814,7 +1827,7 @@ def test_list_log_entries_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_entries_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1863,7 +1876,7 @@ async def test_list_log_entries_async_pager(): @pytest.mark.asyncio async def test_list_log_entries_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2026,7 +2039,7 @@ async def test_list_monitored_resource_descriptors_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2049,7 +2062,7 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2080,7 +2093,7 @@ async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc( @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2211,7 +2224,7 @@ def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc") @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2260,7 +2273,7 @@ async def test_list_monitored_resource_descriptors_async_pager(): @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2427,7 +2440,7 @@ async def test_list_logs_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2451,7 +2464,7 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2482,7 +2495,7 @@ async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asy @pytest.mark.asyncio async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2552,7 +2565,7 @@ def test_list_logs_field_headers(): @pytest.mark.asyncio async def test_list_logs_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2623,7 +2636,7 @@ def test_list_logs_flattened_error(): @pytest.mark.asyncio async def test_list_logs_flattened_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2651,7 +2664,7 @@ async def test_list_logs_flattened_async(): @pytest.mark.asyncio async def test_list_logs_flattened_error_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2765,7 +2778,7 @@ def test_list_logs_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_logs_async_pager(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2814,7 +2827,7 @@ async def test_list_logs_async_pager(): @pytest.mark.asyncio async def test_list_logs_async_pages(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2931,7 +2944,7 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "g # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2962,7 +2975,7 @@ async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "g @pytest.mark.asyncio async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3587,7 +3600,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -3620,7 +3633,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3668,7 +3681,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3709,7 +3722,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -3749,7 +3762,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3797,7 +3810,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3838,7 +3851,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -3878,7 +3891,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3926,7 +3939,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3967,7 +3980,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = LoggingServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index c0917891e97d..eac9dd581d9f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -29,6 +29,12 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api import distribution_pb2 # type: ignore from google.api import label_pb2 # type: ignore from google.api import launch_stage_pb2 # type: ignore @@ -57,6 +63,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -867,7 +880,7 @@ async def test_list_log_metrics_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -890,7 +903,7 @@ async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "g # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -921,7 +934,7 @@ async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "g @pytest.mark.asyncio async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -989,7 +1002,7 @@ def test_list_log_metrics_field_headers(): @pytest.mark.asyncio async def test_list_log_metrics_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1060,7 +1073,7 @@ def test_list_log_metrics_flattened_error(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1088,7 +1101,7 @@ async def test_list_log_metrics_flattened_async(): @pytest.mark.asyncio async def test_list_log_metrics_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1202,7 +1215,7 @@ def test_list_log_metrics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_log_metrics_async_pager(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1251,7 +1264,7 @@ async def test_list_log_metrics_async_pager(): @pytest.mark.asyncio async def test_list_log_metrics_async_pages(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1426,7 +1439,7 @@ async def test_get_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1455,7 +1468,7 @@ async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1486,7 +1499,7 @@ async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1566,7 +1579,7 @@ def test_get_log_metric_field_headers(): @pytest.mark.asyncio async def test_get_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1637,7 +1650,7 @@ def test_get_log_metric_flattened_error(): @pytest.mark.asyncio async def test_get_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1665,7 +1678,7 @@ async def test_get_log_metric_flattened_async(): @pytest.mark.asyncio async def test_get_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1807,7 +1820,7 @@ async def test_create_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1836,7 +1849,7 @@ async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = " # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1867,7 +1880,7 @@ async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = " @pytest.mark.asyncio async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1947,7 +1960,7 @@ def test_create_log_metric_field_headers(): @pytest.mark.asyncio async def test_create_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2023,7 +2036,7 @@ def test_create_log_metric_flattened_error(): @pytest.mark.asyncio async def test_create_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2055,7 +2068,7 @@ async def test_create_log_metric_flattened_async(): @pytest.mark.asyncio async def test_create_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2198,7 +2211,7 @@ async def test_update_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2227,7 +2240,7 @@ async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = " # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2258,7 +2271,7 @@ async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = " @pytest.mark.asyncio async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2338,7 +2351,7 @@ def test_update_log_metric_field_headers(): @pytest.mark.asyncio async def test_update_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2414,7 +2427,7 @@ def test_update_log_metric_flattened_error(): @pytest.mark.asyncio async def test_update_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2446,7 +2459,7 @@ async def test_update_log_metric_flattened_async(): @pytest.mark.asyncio async def test_update_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2574,7 +2587,7 @@ async def test_delete_log_metric_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2595,7 +2608,7 @@ async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = " # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2626,7 +2639,7 @@ async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = " @pytest.mark.asyncio async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2691,7 +2704,7 @@ def test_delete_log_metric_field_headers(): @pytest.mark.asyncio async def test_delete_log_metric_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2762,7 +2775,7 @@ def test_delete_log_metric_flattened_error(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2790,7 +2803,7 @@ async def test_delete_log_metric_flattened_async(): @pytest.mark.asyncio async def test_delete_log_metric_flattened_error_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3393,7 +3406,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -3426,7 +3439,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3474,7 +3487,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3515,7 +3528,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -3555,7 +3568,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3603,7 +3616,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3644,7 +3657,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -3684,7 +3697,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3732,7 +3745,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3773,7 +3786,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = MetricsServiceV2AsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 5abdbbe8c2ca..9d8f120f0e66 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -36,6 +36,12 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import future @@ -69,6 +75,13 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -893,7 +906,7 @@ async def test_list_instances_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -917,7 +930,7 @@ async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grp # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -948,7 +961,7 @@ async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grp @pytest.mark.asyncio async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1018,7 +1031,7 @@ def test_list_instances_field_headers(): @pytest.mark.asyncio async def test_list_instances_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1089,7 +1102,7 @@ def test_list_instances_flattened_error(): @pytest.mark.asyncio async def test_list_instances_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1117,7 +1130,7 @@ async def test_list_instances_flattened_async(): @pytest.mark.asyncio async def test_list_instances_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1231,7 +1244,7 @@ def test_list_instances_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_instances_async_pager(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1280,7 +1293,7 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1495,7 +1508,7 @@ async def test_get_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1544,7 +1557,7 @@ async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_ # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1575,7 +1588,7 @@ async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_ @pytest.mark.asyncio async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1695,7 +1708,7 @@ def test_get_instance_field_headers(): @pytest.mark.asyncio async def test_get_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1766,7 +1779,7 @@ def test_get_instance_flattened_error(): @pytest.mark.asyncio async def test_get_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1794,7 +1807,7 @@ async def test_get_instance_flattened_async(): @pytest.mark.asyncio async def test_get_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1924,7 +1937,7 @@ async def test_get_instance_auth_string_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -1947,7 +1960,7 @@ async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1978,7 +1991,7 @@ async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: @pytest.mark.asyncio async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceAuthStringRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2046,7 +2059,7 @@ def test_get_instance_auth_string_field_headers(): @pytest.mark.asyncio async def test_get_instance_auth_string_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2117,7 +2130,7 @@ def test_get_instance_auth_string_flattened_error(): @pytest.mark.asyncio async def test_get_instance_auth_string_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2145,7 +2158,7 @@ async def test_get_instance_auth_string_flattened_async(): @pytest.mark.asyncio async def test_get_instance_auth_string_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2279,7 +2292,7 @@ async def test_create_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2302,7 +2315,7 @@ async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2338,7 +2351,7 @@ async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2405,7 +2418,7 @@ def test_create_instance_field_headers(): @pytest.mark.asyncio async def test_create_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2486,7 +2499,7 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2524,7 +2537,7 @@ async def test_create_instance_flattened_async(): @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2656,7 +2669,7 @@ async def test_update_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -2679,7 +2692,7 @@ async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2715,7 +2728,7 @@ async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2782,7 +2795,7 @@ def test_update_instance_field_headers(): @pytest.mark.asyncio async def test_update_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2858,7 +2871,7 @@ def test_update_instance_flattened_error(): @pytest.mark.asyncio async def test_update_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2892,7 +2905,7 @@ async def test_update_instance_flattened_async(): @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3027,7 +3040,7 @@ async def test_upgrade_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3050,7 +3063,7 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "g # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3086,7 +3099,7 @@ async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "g @pytest.mark.asyncio async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpgradeInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3153,7 +3166,7 @@ def test_upgrade_instance_field_headers(): @pytest.mark.asyncio async def test_upgrade_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3229,7 +3242,7 @@ def test_upgrade_instance_flattened_error(): @pytest.mark.asyncio async def test_upgrade_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3263,7 +3276,7 @@ async def test_upgrade_instance_flattened_async(): @pytest.mark.asyncio async def test_upgrade_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3396,7 +3409,7 @@ async def test_import_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3419,7 +3432,7 @@ async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3455,7 +3468,7 @@ async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_import_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ImportInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3522,7 +3535,7 @@ def test_import_instance_field_headers(): @pytest.mark.asyncio async def test_import_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3598,7 +3611,7 @@ def test_import_instance_flattened_error(): @pytest.mark.asyncio async def test_import_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3632,7 +3645,7 @@ async def test_import_instance_flattened_async(): @pytest.mark.asyncio async def test_import_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3765,7 +3778,7 @@ async def test_export_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -3788,7 +3801,7 @@ async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3824,7 +3837,7 @@ async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_export_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ExportInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3891,7 +3904,7 @@ def test_export_instance_field_headers(): @pytest.mark.asyncio async def test_export_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3967,7 +3980,7 @@ def test_export_instance_flattened_error(): @pytest.mark.asyncio async def test_export_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4001,7 +4014,7 @@ async def test_export_instance_flattened_async(): @pytest.mark.asyncio async def test_export_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4134,7 +4147,7 @@ async def test_failover_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4157,7 +4170,7 @@ async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = " # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4193,7 +4206,7 @@ async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = " @pytest.mark.asyncio async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.FailoverInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4260,7 +4273,7 @@ def test_failover_instance_field_headers(): @pytest.mark.asyncio async def test_failover_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4336,7 +4349,7 @@ def test_failover_instance_flattened_error(): @pytest.mark.asyncio async def test_failover_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4370,7 +4383,7 @@ async def test_failover_instance_flattened_async(): @pytest.mark.asyncio async def test_failover_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4503,7 +4516,7 @@ async def test_delete_instance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4526,7 +4539,7 @@ async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "gr # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4562,7 +4575,7 @@ async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "gr @pytest.mark.asyncio async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4629,7 +4642,7 @@ def test_delete_instance_field_headers(): @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4700,7 +4713,7 @@ def test_delete_instance_flattened_error(): @pytest.mark.asyncio async def test_delete_instance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4730,7 +4743,7 @@ async def test_delete_instance_flattened_async(): @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4862,7 +4875,7 @@ async def test_reschedule_maintenance_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport='grpc_asyncio', ) @@ -4885,7 +4898,7 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: st # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4921,7 +4934,7 @@ async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: st @pytest.mark.asyncio async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.RescheduleMaintenanceRequest): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4988,7 +5001,7 @@ def test_reschedule_maintenance_field_headers(): @pytest.mark.asyncio async def test_reschedule_maintenance_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5067,7 +5080,7 @@ def test_reschedule_maintenance_flattened_error(): @pytest.mark.asyncio async def test_reschedule_maintenance_flattened_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5103,7 +5116,7 @@ async def test_reschedule_maintenance_flattened_async(): @pytest.mark.asyncio async def test_reschedule_maintenance_flattened_error_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -9003,7 +9016,7 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: @@ -9319,7 +9332,7 @@ def test_delete_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_delete_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9367,7 +9380,7 @@ def test_delete_operation_field_headers(): @pytest.mark.asyncio async def test_delete_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9408,7 +9421,7 @@ def test_delete_operation_from_dict(): @pytest.mark.asyncio async def test_delete_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: @@ -9448,7 +9461,7 @@ def test_cancel_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_cancel_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9496,7 +9509,7 @@ def test_cancel_operation_field_headers(): @pytest.mark.asyncio async def test_cancel_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9537,7 +9550,7 @@ def test_cancel_operation_from_dict(): @pytest.mark.asyncio async def test_cancel_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: @@ -9577,7 +9590,7 @@ def test_get_operation(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_operation_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9625,7 +9638,7 @@ def test_get_operation_field_headers(): @pytest.mark.asyncio async def test_get_operation_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9666,7 +9679,7 @@ def test_get_operation_from_dict(): @pytest.mark.asyncio async def test_get_operation_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_operation), "__call__") as call: @@ -9706,7 +9719,7 @@ def test_list_operations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_operations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9754,7 +9767,7 @@ def test_list_operations_field_headers(): @pytest.mark.asyncio async def test_list_operations_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9795,7 +9808,7 @@ def test_list_operations_from_dict(): @pytest.mark.asyncio async def test_list_operations_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_operations), "__call__") as call: @@ -9835,7 +9848,7 @@ def test_list_locations(transport: str = "grpc"): @pytest.mark.asyncio async def test_list_locations_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -9883,7 +9896,7 @@ def test_list_locations_field_headers(): @pytest.mark.asyncio async def test_list_locations_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9924,7 +9937,7 @@ def test_list_locations_from_dict(): @pytest.mark.asyncio async def test_list_locations_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: @@ -9964,7 +9977,7 @@ def test_get_location(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_location_async(transport: str = "grpc_asyncio"): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=async_anonymous_credentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -10011,7 +10024,7 @@ def test_get_location_field_headers(): @pytest.mark.asyncio async def test_get_location_field_headers_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials() + credentials=async_anonymous_credentials() ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -10052,7 +10065,7 @@ def test_get_location_from_dict(): @pytest.mark.asyncio async def test_get_location_from_dict_async(): client = CloudRedisAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_locations), "__call__") as call: diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 3fe3c7b0d25f..7b541976fc22 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -19,6 +19,13 @@ import pytest from google.api_core.client_options import ClientOptions # type: ignore + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False import google.auth from google.auth import credentials as ga_credentials from google.showcase import EchoClient @@ -31,6 +38,13 @@ from google.showcase import EchoAsyncClient from google.showcase import IdentityAsyncClient + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. + # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. + def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + _test_event_loop = asyncio.new_event_loop() asyncio.set_event_loop(_test_event_loop) @@ -49,7 +63,8 @@ def async_echo(use_mtls, event_loop): EchoAsyncClient, use_mtls, transport_name="grpc_asyncio", - channel_creator=aio.insecure_channel + channel_creator=aio.insecure_channel, + credentials=async_anonymous_credentials(), ) @pytest.fixture @@ -58,7 +73,8 @@ def async_identity(use_mtls, event_loop): IdentityAsyncClient, use_mtls, transport_name="grpc_asyncio", - channel_creator=aio.insecure_channel + channel_creator=aio.insecure_channel, + credentials=async_anonymous_credentials(), ) @@ -94,11 +110,13 @@ def construct_client( transport_name="grpc", channel_creator=grpc.insecure_channel, # for grpc,grpc_asyncio only credentials=ga_credentials.AnonymousCredentials(), - transport_endpoint="localhost:7469" + transport_endpoint="localhost:7469", ): if use_mtls: with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as mock_ssl_cred: + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as mock_ssl_cred: mock_ssl_cred.return_value = ssl_credentials client = client_class( credentials=credentials, @@ -137,9 +155,17 @@ def use_mtls(request): @pytest.fixture -def parametrized_echo(use_mtls, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): +def parametrized_echo( + use_mtls, + channel_creator, + transport_name, + transport_endpoint, + credential_universe, + client_universe, +): print( - f"test_params: {channel_creator, transport_name, transport_endpoint, credential_universe, client_universe}") + f"test_params: {channel_creator, transport_name, transport_endpoint, credential_universe, client_universe}" + ) credentials = ga_credentials.AnonymousCredentials() # TODO: This is needed to cater for older versions of google-auth # Make this test unconditional once the minimum supported version of @@ -149,11 +175,14 @@ def parametrized_echo(use_mtls, channel_creator, transport_name, transport_endpo ] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): credentials._universe_domain = credential_universe - client = construct_client(EchoClient, use_mtls, - transport_endpoint=transport_endpoint, - transport_name=transport_name, - channel_creator=channel_creator, - credentials=credentials) + client = construct_client( + EchoClient, + use_mtls, + transport_endpoint=transport_endpoint, + transport_name=transport_name, + channel_creator=channel_creator, + credentials=credentials, + ) # Since `channel_creator` does not take credentials, we set them # explicitly in the client for test purposes. # @@ -171,7 +200,14 @@ def echo(use_mtls, request): @pytest.fixture(params=["grpc", "rest"]) def echo_with_universe_credentials_localhost(use_mtls, request): - return construct_client(EchoClient, use_mtls, transport_name=request.param, credentials=ga_credentials.AnonymousCredentials(universe_domain="localhost:7469")) + return construct_client( + EchoClient, + use_mtls, + transport_name=request.param, + credentials=ga_credentials.AnonymousCredentials( + universe_domain="localhost:7469" + ), + ) @pytest.fixture(params=["grpc", "rest"]) From 6b9b839aa82c171ccaa9be87ad5968aa11c24403 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 27 Aug 2024 12:47:20 -0400 Subject: [PATCH 1171/1339] build: update googleapis-common-protos (#2109) --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 56e7d6386085..c57b0648564d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -120,9 +120,9 @@ google-auth==2.32.0 \ --hash=sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022 \ --hash=sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b # via google-api-core -googleapis-common-protos[grpc]==1.63.2 \ - --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ - --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 +googleapis-common-protos[grpc]==1.65.0 \ + --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ + --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 # via # -r requirements.in # google-api-core From f498b2f9ea47e2c46ea74e443ee3d959307b7ee8 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 27 Aug 2024 13:09:23 -0400 Subject: [PATCH 1172/1339] cleanup: refactor rest transport class in gapics (#2099) --- .../gapic/generator/generator.py | 4 +- .../%sub/services/%service/_shared_macros.j2 | 91 + .../%service/transports/README.rst.j2 | 10 + .../%service/transports/_mixins.py.j2 | 18 + .../%service/transports/_rest_mixins.py.j2 | 70 +- .../transports/_rest_mixins_base.py.j2 | 49 + .../services/%service/transports/rest.py.j2 | 139 +- .../%service/transports/rest_base.py.j2 | 173 ++ .../asset_service/transports/README.rst | 9 + .../services/asset_service/transports/rest.py | 1483 +++++++-------- .../asset_service/transports/rest_base.py | 1015 +++++++++++ .../iam_credentials/transports/README.rst | 9 + .../iam_credentials/transports/rest.py | 302 ++- .../iam_credentials/transports/rest_base.py | 276 +++ .../services/eventarc/transports/README.rst | 9 + .../services/eventarc/transports/rest.py | 1624 ++++++++--------- .../services/eventarc/transports/rest_base.py | 1060 +++++++++++ .../config_service_v2/transports/README.rst | 9 + .../logging_service_v2/transports/README.rst | 9 + .../metrics_service_v2/transports/README.rst | 9 + .../cloud_redis/transports/README.rst | 9 + .../services/cloud_redis/transports/rest.py | 1037 +++++------ .../cloud_redis/transports/rest_base.py | 698 +++++++ .../tests/unit/generator/test_generator.py | 1 + 24 files changed, 5541 insertions(+), 2572 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/README.rst.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 73accd9a22ee..dff8e0ebedf3 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -297,6 +297,8 @@ def _render_template( # TODO(yon-mg) - remove when rest async implementation resolved # temporarily stop async client gen while rest async is unkown ('async' in template_name and 'grpc' not in opts.transport) + or + ('rest_base' in template_name and 'rest' not in opts.transport) ): continue @@ -319,7 +321,7 @@ def _render_template( def _is_desired_transport(self, template_name: str, opts: Options) -> bool: """Returns true if template name contains a desired transport""" - desired_transports = ['__init__', 'base'] + opts.transport + desired_transports = ['__init__', 'base', 'README'] + opts.transport return any(transport in template_name for transport in desired_transports) def _get_file( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index 14764da17ed5..baeb630b4faa 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -68,3 +68,94 @@ except ImportError: # pragma: NO COVER ) {% endif %}{# service_version #} {% endmacro %} + +{% macro operations_mixin_imports(api, service, opts) %} +{% if import_ns is not defined %} +{% set import_ns = namespace(has_operations_mixin=false) %} +{% endif %}{# import_ns is not defined #} +{% set import_ns.has_operations_mixin = api.has_operations_mixin %} + +{% filter sort_lines %} +{% for method in service.methods.values() %} +{{method.input.ident.python_import}} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} +{{method.output.ident.python_import}} +{% endif %} +{% endfor %} +{% if opts.add_iam_methods %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %}{# opts.add_iam_methods #} +{% endfilter %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore +{% endif %}{# import_ns.has_operations_mixin #} +{% endmacro %} + +{% macro http_options_method(rules) %} +@staticmethod +def _get_http_options(): + http_options: List[Dict[str, str]] = [ + {%- for rule in rules %}{ + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %}{# rule.body #} + }, + {% endfor %}{# rule in rules #} + ] + return http_options +{% endmacro %} + +{% macro response_method(body_spec) %} +@staticmethod +def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + {% if body_spec %} + data=body, + {% endif %} + ) + return response +{% endmacro %} + +{% macro rest_call_method_common(body_spec, method_name, service_name) %} + + http_options = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_http_options() + request, metadata = self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) + transcoded_request = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_transcoded_request(http_options, request) + + {% if body_spec %} + body = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_request_body_json(transcoded_request) + {% endif %} {# body_spec #} + + # Jsonify the query params + query_params = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_query_params_json(transcoded_request) + + # Send the request + response = {{ service_name }}RestTransport._{{method_name}}._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request{% if body_spec %}, body{% endif %}) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + +{% endmacro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/README.rst.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/README.rst.j2 new file mode 100644 index 000000000000..2475c2c3360c --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/README.rst.j2 @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +`{{ service.name }}Transport` is the ABC for all transports. +- public child `{{ service.name }}GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `{{ service.name }}GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_Base{{ service.name }}RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `{{ service.name }}RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). +{# Since the service mixins have a similar structure, we factor out shared code into `_shared_macros.j2` to avoid duplication. #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 index ff573768121f..1867d0ac8c50 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 @@ -1,3 +1,21 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} + +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + {% if "grpc" in opts.transport %} {% if api.has_operations_mixin %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 index 460df244cc79..f01102a18f50 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -1,3 +1,21 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} + +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + {% if "rest" in opts.transport %} {% for name, sig in api.mixin_api_signatures.items() %} @@ -5,7 +23,10 @@ def {{ name|snake_case }}(self): return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore - class _{{ name }}({{service.name}}RestStub): + class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{service.name}}RestStub): + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {{ shared_macros.response_method(body_spec)|indent(8) }} + def __call__(self, request: {{ sig.request_type }}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -32,52 +53,7 @@ {{ sig.response_type }}: Response from {{ name }} method. {% endif %} """ - - http_options: List[Dict[str, str]] = [ - {%- for rule in api.mixin_http_options["{}".format(name)] %}{ - 'method': '{{ rule.method }}', - 'uri': '{{ rule.uri }}', - {% if rule.body %} - 'body': '{{ rule.body }}', - {% endif %}{# rule.body #} - }, - {% endfor %} - ] - - request, metadata = self._interceptor.pre_{{ name|snake_case }}(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} - {%- if body_spec %} - body = json.dumps(transcoded_request['body']) - {%- endif %} - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - {% if body_spec %} - data=body, - {% endif %} - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) + {{ shared_macros.rest_call_method_common(body_spec, name, service.name)|indent(8) }} {% if sig.response_type == "None" %} return self._interceptor.post_{{ name|snake_case }}(None) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 new file mode 100644 index 000000000000..2fd6d8bafe63 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 @@ -0,0 +1,49 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} + +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + + {% if "rest" in opts.transport %} + + {% for name, sig in api.mixin_api_signatures.items() %} + class _Base{{ name }}: + + {{ shared_macros.http_options_method(api.mixin_http_options["{}".format(name)])|indent(8)}} + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {%- if body_spec %} + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + + {%- endif %} {# body_spec #} + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + {% endfor %} + {% endif %} {# rest in opts.transport #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index da7308589224..d7864e4b1291 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -1,3 +1,4 @@ +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} {% extends '_base.py.j2' %} {% block content %} @@ -5,14 +6,11 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format @@ -26,44 +24,23 @@ from google.iam.v1 import policy_pb2 # type: ignore {% if api.has_location_mixin %} from google.cloud.location import locations_pb2 # type: ignore {% endif %} -{% set import_ns = namespace(has_operations_mixin=false) %} -{% if api.has_operations_mixin %} -{% set import_ns.has_operations_mixin = True %} -{% endif %} + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings +{{ shared_macros.operations_mixin_imports(api, service, opts) }} + +from .rest_base import _Base{{ service.name }}RestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -{# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} -{% filter sort_lines %} -{% for method in service.methods.values() %} -{{method.input.ident.python_import}} -{% if method.output.ident|string() == "operations_pb2.Operation" %} -{% set import_ns.has_operations_mixin = True %} -{% else %} -{{method.output.ident.python_import}} -{% endif %} -{% endfor %} -{% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -{% endif %} -{% endfilter %} -{% if import_ns.has_operations_mixin %} -from google.longrunning import operations_pb2 # type: ignore -{% endif %} - -from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, @@ -159,8 +136,8 @@ class {{service.name}}RestStub: _interceptor: {{ service.name }}RestInterceptor -class {{service.name}}RestTransport({{service.name}}Transport): - """REST backend transport for {{ service.name }}. +class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): + """REST backend synchronous transport for {{ service.name }}. {{ service.meta.doc|rst(width=72, indent=4) }} @@ -169,15 +146,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - - {% if not opts.rest_numeric_enums %} - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - {% endif %} """ - {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, @@ -235,19 +205,12 @@ class {{service.name}}RestTransport({{service.name}}Transport): # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience ) self._session = AuthorizedSession( @@ -305,24 +268,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %}{# service.has_lro #} {% for method in service.methods.values()|sort(attribute="name") %} - class _{{method.name}}({{service.name}}RestStub): + class _{{method.name}}(_Base{{ service.name }}RestTransport._Base{{method.name}}, {{service.name}}RestStub): def __hash__(self): - return hash("{{method.name}}") - + return hash("{{service.name}}RestTransport.{{method.name}}") {% if method.http_options and not method.client_streaming %} - {% if method.input.required_fields %} - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} - {% endfor %} - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - {% endif %}{# required fields #} - {% endif %}{# not method.client_streaming #} + {% set body_spec = method.http_options[0].body %} + {{ shared_macros.response_method(body_spec)|indent(8) }} + {% endif %}{# method.http_options and not method.client_streaming #} def __call__(self, request: {{method.input.ident}}, *, @@ -353,67 +306,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %} """ - http_options: List[Dict[str, str]] = [ - {%- for rule in method.http_options %}{ - 'method': '{{ rule.method }}', - 'uri': '{{ rule.uri }}', - {% if rule.body %} - 'body': '{{ rule.body }}', - {% endif %}{# rule.body #} - }, - {% endfor %}{# rule in method.http_options #} - ] - request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) - {% if method.input.ident.is_proto_plus_type %} - pb_request = {{method.input.ident}}.pb(request) - {% else %} - pb_request = request - {% endif %} - transcoded_request = path_template.transcode(http_options, pb_request) - - {% set body_spec = method.http_options[0].body %} - {%- if body_spec %} - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums={{ opts.rest_numeric_enums }} - ) - {%- endif %} - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums={{ opts.rest_numeric_enums }}, - )) - {% if method.input.required_fields %} - query_params.update(self._get_unset_required_fields(query_params)) - {% endif %}{# required fields #} - - {% if opts.rest_numeric_enums %} - query_params["$alt"] = "json;enum-encoding=int" - {% endif %} - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - {% if body_spec %} - data=body, - {% endif %} - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) + {{ shared_macros.rest_call_method_common(body_spec, method.name, service.name)|indent(8) }} {% if not method.void %} # Return the response diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 new file mode 100644 index 000000000000..9e4f26fcb30c --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 @@ -0,0 +1,173 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} +{% extends '_base.py.j2' %} + +{% block content %} + +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} +from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO +from google.auth import credentials as ga_credentials # type: ignore + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +{{ shared_macros.operations_mixin_imports(api, service, opts) }} + + +class _Base{{ service.name }}RestTransport({{service.name}}Transport): + """Base REST backend transport for {{ service.name }}. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + + {# TODO: handle mtls stuff if that is relevant for rest transport #} + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: Optional[ga_credentials.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + {% for method in service.methods.values()|sort(attribute="name") %} + class _Base{{method.name}}: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + {% if method.http_options and not method.client_streaming %} + {% if method.input.required_fields %} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} + {% endfor %} + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + {% endif %}{# required fields #} + + {% set method_http_options = method.http_options %} + + {{ shared_macros.http_options_method(method_http_options)|indent(8) }} + + @staticmethod + def _get_transcoded_request(http_options, request): + {% if method.input.ident.is_proto_plus_type %} + pb_request = {{method.input.ident}}.pb(request) + {% else %} + pb_request = request + {% endif %} + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + {% set body_spec = method.http_options[0].body %} + {%- if body_spec %} + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums={{ opts.rest_numeric_enums }} + ) + return body + + {%- endif %}{# body_spec #} + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums={{ opts.rest_numeric_enums }}, + )) + {% if method.input.required_fields %} + query_params.update(_Base{{ service.name }}RestTransport._Base{{method.name}}._get_unset_required_fields(query_params)) + {% endif %}{# required fields #} + + {% if opts.rest_numeric_enums %} + query_params["$alt"] = "json;enum-encoding=int" + {% endif %} + return query_params + + {% endif %}{# method.http_options and not method.client_streaming #} + {% endfor %} + + {% include '%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2' %} + + +__all__=( + '_Base{{ service.name }}RestTransport', +) +{% endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/README.rst new file mode 100755 index 000000000000..f0467812ea79 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`AssetServiceTransport` is the ABC for all transports. +- public child `AssetServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `AssetServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseAssetServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `AssetServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 064d43528c4d..4c86709c4fe0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -16,35 +16,34 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format from google.api_core import operations_v1 + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.cloud.asset_v1.types import asset_service from google.protobuf import empty_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore -from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +from .rest_base import _BaseAssetServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -632,8 +631,8 @@ class AssetServiceRestStub: _interceptor: AssetServiceRestInterceptor -class AssetServiceRestTransport(AssetServiceTransport): - """REST backend transport for AssetService. +class AssetServiceRestTransport(_BaseAssetServiceRestTransport): + """REST backend synchronous transport for AssetService. Asset service definition. @@ -642,10 +641,6 @@ class AssetServiceRestTransport(AssetServiceTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! """ def __init__(self, *, @@ -702,19 +697,12 @@ def __init__(self, *, # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience ) self._session = AuthorizedSession( @@ -756,16 +744,31 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _AnalyzeIamPolicy(AssetServiceRestStub): + class _AnalyzeIamPolicy(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy, AssetServiceRestStub): def __hash__(self): - return hash("AnalyzeIamPolicy") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "analysisQuery" : {}, } + return hash("AssetServiceRestTransport.AnalyzeIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.AnalyzeIamPolicyRequest, *, @@ -792,34 +795,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicy', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_http_options() request, metadata = self._interceptor.pre_analyze_iam_policy(request, metadata) - pb_request = asset_service.AnalyzeIamPolicyRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -834,16 +818,32 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy(resp) return resp - class _AnalyzeIamPolicyLongrunning(AssetServiceRestStub): + class _AnalyzeIamPolicyLongrunning(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning, AssetServiceRestStub): def __hash__(self): - return hash("AnalyzeIamPolicyLongrunning") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("AssetServiceRestTransport.AnalyzeIamPolicyLongrunning") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, @@ -872,42 +872,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning', - 'body': '*', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_http_options() request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning(request, metadata) - pb_request = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -920,16 +895,31 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) return resp - class _AnalyzeMove(AssetServiceRestStub): + class _AnalyzeMove(_BaseAssetServiceRestTransport._BaseAnalyzeMove, AssetServiceRestStub): def __hash__(self): - return hash("AnalyzeMove") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "destinationParent" : "", } + return hash("AssetServiceRestTransport.AnalyzeMove") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.AnalyzeMoveRequest, *, @@ -956,34 +946,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=*/*}:analyzeMove', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_http_options() request, metadata = self._interceptor.pre_analyze_move(request, metadata) - pb_request = asset_service.AnalyzeMoveRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._AnalyzeMove._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -998,16 +969,31 @@ def __call__(self, resp = self._interceptor.post_analyze_move(resp) return resp - class _AnalyzeOrgPolicies(AssetServiceRestStub): + class _AnalyzeOrgPolicies(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies, AssetServiceRestStub): def __hash__(self): - return hash("AnalyzeOrgPolicies") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + return hash("AssetServiceRestTransport.AnalyzeOrgPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.AnalyzeOrgPoliciesRequest, *, @@ -1034,34 +1020,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicies', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_http_options() request, metadata = self._interceptor.pre_analyze_org_policies(request, metadata) - pb_request = asset_service.AnalyzeOrgPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1076,16 +1043,31 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policies(resp) return resp - class _AnalyzeOrgPolicyGovernedAssets(AssetServiceRestStub): + class _AnalyzeOrgPolicyGovernedAssets(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets, AssetServiceRestStub): def __hash__(self): - return hash("AnalyzeOrgPolicyGovernedAssets") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + return hash("AssetServiceRestTransport.AnalyzeOrgPolicyGovernedAssets") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, @@ -1113,34 +1095,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_http_options() request, metadata = self._interceptor.pre_analyze_org_policy_governed_assets(request, metadata) - pb_request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1155,16 +1118,31 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) return resp - class _AnalyzeOrgPolicyGovernedContainers(AssetServiceRestStub): + class _AnalyzeOrgPolicyGovernedContainers(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers, AssetServiceRestStub): def __hash__(self): - return hash("AnalyzeOrgPolicyGovernedContainers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "constraint" : "", } + return hash("AssetServiceRestTransport.AnalyzeOrgPolicyGovernedContainers") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, @@ -1192,34 +1170,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_http_options() request, metadata = self._interceptor.pre_analyze_org_policy_governed_containers(request, metadata) - pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1234,16 +1193,31 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) return resp - class _BatchGetAssetsHistory(AssetServiceRestStub): + class _BatchGetAssetsHistory(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory, AssetServiceRestStub): def __hash__(self): - return hash("BatchGetAssetsHistory") + return hash("AssetServiceRestTransport.BatchGetAssetsHistory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.BatchGetAssetsHistoryRequest, *, @@ -1267,34 +1241,15 @@ def __call__(self, Batch get assets history response. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_http_options() request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) - pb_request = asset_service.BatchGetAssetsHistoryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1309,16 +1264,31 @@ def __call__(self, resp = self._interceptor.post_batch_get_assets_history(resp) return resp - class _BatchGetEffectiveIamPolicies(AssetServiceRestStub): + class _BatchGetEffectiveIamPolicies(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies, AssetServiceRestStub): def __hash__(self): - return hash("BatchGetEffectiveIamPolicies") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "names" : "", } + return hash("AssetServiceRestTransport.BatchGetEffectiveIamPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, @@ -1346,34 +1316,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}/effectiveIamPolicies:batchGet', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_http_options() request, metadata = self._interceptor.pre_batch_get_effective_iam_policies(request, metadata) - pb_request = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1388,16 +1339,32 @@ def __call__(self, resp = self._interceptor.post_batch_get_effective_iam_policies(resp) return resp - class _CreateFeed(AssetServiceRestStub): + class _CreateFeed(_BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub): def __hash__(self): - return hash("CreateFeed") + return hash("AssetServiceRestTransport.CreateFeed") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: asset_service.CreateFeedRequest, *, @@ -1429,42 +1396,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/feeds', - 'body': '*', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() request, metadata = self._interceptor.pre_create_feed(request, metadata) - pb_request = asset_service.CreateFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request(http_options, request) - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseAssetServiceRestTransport._BaseCreateFeed._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = AssetServiceRestTransport._CreateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1479,16 +1421,32 @@ def __call__(self, resp = self._interceptor.post_create_feed(resp) return resp - class _CreateSavedQuery(AssetServiceRestStub): + class _CreateSavedQuery(_BaseAssetServiceRestTransport._BaseCreateSavedQuery, AssetServiceRestStub): def __hash__(self): - return hash("CreateSavedQuery") + return hash("AssetServiceRestTransport.CreateSavedQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "savedQueryId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: asset_service.CreateSavedQueryRequest, *, @@ -1514,42 +1472,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}/savedQueries', - 'body': 'saved_query', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_http_options() request, metadata = self._interceptor.pre_create_saved_query(request, metadata) - pb_request = asset_service.CreateSavedQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = AssetServiceRestTransport._CreateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1564,16 +1497,31 @@ def __call__(self, resp = self._interceptor.post_create_saved_query(resp) return resp - class _DeleteFeed(AssetServiceRestStub): + class _DeleteFeed(_BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub): def __hash__(self): - return hash("DeleteFeed") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("AssetServiceRestTransport.DeleteFeed") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.DeleteFeedRequest, *, @@ -1593,50 +1541,46 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/feeds/*}', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() request, metadata = self._interceptor.pre_delete_feed(request, metadata) - pb_request = asset_service.DeleteFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._DeleteFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteSavedQuery(AssetServiceRestStub): + class _DeleteSavedQuery(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery, AssetServiceRestStub): def __hash__(self): - return hash("DeleteSavedQuery") + return hash("AssetServiceRestTransport.DeleteSavedQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.DeleteSavedQueryRequest, *, @@ -1656,50 +1600,47 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() request, metadata = self._interceptor.pre_delete_saved_query(request, metadata) - pb_request = asset_service.DeleteSavedQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._DeleteSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _ExportAssets(AssetServiceRestStub): + class _ExportAssets(_BaseAssetServiceRestTransport._BaseExportAssets, AssetServiceRestStub): def __hash__(self): - return hash("ExportAssets") + return hash("AssetServiceRestTransport.ExportAssets") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: asset_service.ExportAssetsRequest, *, @@ -1726,42 +1667,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:exportAssets', - 'body': '*', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseExportAssets._get_http_options() request, metadata = self._interceptor.pre_export_assets(request, metadata) - pb_request = asset_service.ExportAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseAssetServiceRestTransport._BaseExportAssets._get_transcoded_request(http_options, request) - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseAssetServiceRestTransport._BaseExportAssets._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = AssetServiceRestTransport._ExportAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1774,16 +1690,31 @@ def __call__(self, resp = self._interceptor.post_export_assets(resp) return resp - class _GetFeed(AssetServiceRestStub): + class _GetFeed(_BaseAssetServiceRestTransport._BaseGetFeed, AssetServiceRestStub): def __hash__(self): - return hash("GetFeed") + return hash("AssetServiceRestTransport.GetFeed") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.GetFeedRequest, *, @@ -1815,34 +1746,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/feeds/*}', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() request, metadata = self._interceptor.pre_get_feed(request, metadata) - pb_request = asset_service.GetFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._GetFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1857,16 +1769,31 @@ def __call__(self, resp = self._interceptor.post_get_feed(resp) return resp - class _GetSavedQuery(AssetServiceRestStub): + class _GetSavedQuery(_BaseAssetServiceRestTransport._BaseGetSavedQuery, AssetServiceRestStub): def __hash__(self): - return hash("GetSavedQuery") + return hash("AssetServiceRestTransport.GetSavedQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.GetSavedQueryRequest, *, @@ -1892,34 +1819,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/savedQueries/*}', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_http_options() request, metadata = self._interceptor.pre_get_saved_query(request, metadata) - pb_request = asset_service.GetSavedQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._GetSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1934,16 +1842,31 @@ def __call__(self, resp = self._interceptor.post_get_saved_query(resp) return resp - class _ListAssets(AssetServiceRestStub): + class _ListAssets(_BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub): def __hash__(self): - return hash("ListAssets") + return hash("AssetServiceRestTransport.ListAssets") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.ListAssetsRequest, *, @@ -1967,34 +1890,15 @@ def __call__(self, ListAssets response. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/assets', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() request, metadata = self._interceptor.pre_list_assets(request, metadata) - pb_request = asset_service.ListAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2009,16 +1913,31 @@ def __call__(self, resp = self._interceptor.post_list_assets(resp) return resp - class _ListFeeds(AssetServiceRestStub): + class _ListFeeds(_BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub): def __hash__(self): - return hash("ListFeeds") + return hash("AssetServiceRestTransport.ListFeeds") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.ListFeedsRequest, *, @@ -2042,34 +1961,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/feeds', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() request, metadata = self._interceptor.pre_list_feeds(request, metadata) - pb_request = asset_service.ListFeedsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._ListFeeds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2084,16 +1984,31 @@ def __call__(self, resp = self._interceptor.post_list_feeds(resp) return resp - class _ListSavedQueries(AssetServiceRestStub): + class _ListSavedQueries(_BaseAssetServiceRestTransport._BaseListSavedQueries, AssetServiceRestStub): def __hash__(self): - return hash("ListSavedQueries") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("AssetServiceRestTransport.ListSavedQueries") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.ListSavedQueriesRequest, *, @@ -2117,34 +2032,15 @@ def __call__(self, Response of listing saved queries. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=*/*}/savedQueries', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_http_options() request, metadata = self._interceptor.pre_list_saved_queries(request, metadata) - pb_request = asset_service.ListSavedQueriesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._ListSavedQueries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2159,16 +2055,32 @@ def __call__(self, resp = self._interceptor.post_list_saved_queries(resp) return resp - class _QueryAssets(AssetServiceRestStub): + class _QueryAssets(_BaseAssetServiceRestTransport._BaseQueryAssets, AssetServiceRestStub): def __hash__(self): - return hash("QueryAssets") + return hash("AssetServiceRestTransport.QueryAssets") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: asset_service.QueryAssetsRequest, *, @@ -2192,42 +2104,17 @@ def __call__(self, QueryAssets response. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=*/*}:queryAssets', - 'body': '*', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseQueryAssets._get_http_options() request, metadata = self._interceptor.pre_query_assets(request, metadata) - pb_request = asset_service.QueryAssetsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseAssetServiceRestTransport._BaseQueryAssets._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseAssetServiceRestTransport._BaseQueryAssets._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = AssetServiceRestTransport._QueryAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2242,16 +2129,31 @@ def __call__(self, resp = self._interceptor.post_query_assets(resp) return resp - class _SearchAllIamPolicies(AssetServiceRestStub): + class _SearchAllIamPolicies(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub): def __hash__(self): - return hash("SearchAllIamPolicies") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("AssetServiceRestTransport.SearchAllIamPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.SearchAllIamPoliciesRequest, *, @@ -2275,34 +2177,15 @@ def __call__(self, Search all IAM policies response. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllIamPolicies', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_http_options() request, metadata = self._interceptor.pre_search_all_iam_policies(request, metadata) - pb_request = asset_service.SearchAllIamPoliciesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._SearchAllIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2317,16 +2200,31 @@ def __call__(self, resp = self._interceptor.post_search_all_iam_policies(resp) return resp - class _SearchAllResources(AssetServiceRestStub): + class _SearchAllResources(_BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub): def __hash__(self): - return hash("SearchAllResources") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("AssetServiceRestTransport.SearchAllResources") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: asset_service.SearchAllResourcesRequest, *, @@ -2350,34 +2248,15 @@ def __call__(self, Search all resources response. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{scope=*/*}:searchAllResources', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_http_options() request, metadata = self._interceptor.pre_search_all_resources(request, metadata) - pb_request = asset_service.SearchAllResourcesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = AssetServiceRestTransport._SearchAllResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2392,16 +2271,32 @@ def __call__(self, resp = self._interceptor.post_search_all_resources(resp) return resp - class _UpdateFeed(AssetServiceRestStub): + class _UpdateFeed(_BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub): def __hash__(self): - return hash("UpdateFeed") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("AssetServiceRestTransport.UpdateFeed") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: asset_service.UpdateFeedRequest, *, @@ -2433,42 +2328,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{feed.name=*/*/feeds/*}', - 'body': '*', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() request, metadata = self._interceptor.pre_update_feed(request, metadata) - pb_request = asset_service.UpdateFeedRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = AssetServiceRestTransport._UpdateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2483,16 +2353,32 @@ def __call__(self, resp = self._interceptor.post_update_feed(resp) return resp - class _UpdateSavedQuery(AssetServiceRestStub): + class _UpdateSavedQuery(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery, AssetServiceRestStub): def __hash__(self): - return hash("UpdateSavedQuery") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } + return hash("AssetServiceRestTransport.UpdateSavedQuery") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: asset_service.UpdateSavedQueryRequest, *, @@ -2518,42 +2404,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{saved_query.name=*/*/savedQueries/*}', - 'body': 'saved_query', - }, - ] + http_options = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_http_options() request, metadata = self._interceptor.pre_update_saved_query(request, metadata) - pb_request = asset_service.UpdateSavedQueryRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = AssetServiceRestTransport._UpdateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2756,7 +2617,29 @@ def update_saved_query(self) -> Callable[ def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(AssetServiceRestStub): + class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2779,33 +2662,15 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=*/*/operations/*/**}', - }, - ] - + http_options = _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = AssetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py new file mode 100755 index 000000000000..116512120a95 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py @@ -0,0 +1,1015 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO +from google.auth import credentials as ga_credentials # type: ignore + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.asset_v1.types import asset_service +from google.protobuf import empty_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseAssetServiceRestTransport(AssetServiceTransport): + """Base REST backend transport for AssetService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'cloudasset.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'cloudasset.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseAnalyzeIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "analysisQuery" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicy', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.AnalyzeIamPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_unset_required_fields(query_params)) + + return query_params + + class _BaseAnalyzeIamPolicyLongrunning: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{analysis_query.scope=*/*}:analyzeIamPolicyLongrunning', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_unset_required_fields(query_params)) + + return query_params + + class _BaseAnalyzeMove: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "destinationParent" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{resource=*/*}:analyzeMove', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.AnalyzeMoveRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeMove._get_unset_required_fields(query_params)) + + return query_params + + class _BaseAnalyzeOrgPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicies', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.AnalyzeOrgPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_unset_required_fields(query_params)) + + return query_params + + class _BaseAnalyzeOrgPolicyGovernedAssets: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedAssets', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_unset_required_fields(query_params)) + + return query_params + + class _BaseAnalyzeOrgPolicyGovernedContainers: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "constraint" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:analyzeOrgPolicyGovernedContainers', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_unset_required_fields(query_params)) + + return query_params + + class _BaseBatchGetAssetsHistory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}:batchGetAssetsHistory', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.BatchGetAssetsHistoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_unset_required_fields(query_params)) + + return query_params + + class _BaseBatchGetEffectiveIamPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "names" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}/effectiveIamPolicies:batchGet', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_unset_required_fields(query_params)) + + return query_params + + class _BaseCreateFeed: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}/feeds', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.CreateFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseCreateFeed._get_unset_required_fields(query_params)) + + return query_params + + class _BaseCreateSavedQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "savedQueryId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}/savedQueries', + 'body': 'saved_query', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.CreateSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteFeed: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=*/*/feeds/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.DeleteFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseDeleteFeed._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteSavedQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=*/*/savedQueries/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.DeleteSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_unset_required_fields(query_params)) + + return query_params + + class _BaseExportAssets: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}:exportAssets', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.ExportAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseExportAssets._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetFeed: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/feeds/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.GetFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseGetFeed._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetSavedQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/savedQueries/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.GetSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseGetSavedQuery._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListAssets: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}/assets', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.ListAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseListAssets._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListFeeds: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}/feeds', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.ListFeedsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseListFeeds._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListSavedQueries: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=*/*}/savedQueries', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.ListSavedQueriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseListSavedQueries._get_unset_required_fields(query_params)) + + return query_params + + class _BaseQueryAssets: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=*/*}:queryAssets', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.QueryAssetsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseQueryAssets._get_unset_required_fields(query_params)) + + return query_params + + class _BaseSearchAllIamPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:searchAllIamPolicies', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.SearchAllIamPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_unset_required_fields(query_params)) + + return query_params + + class _BaseSearchAllResources: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{scope=*/*}:searchAllResources', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.SearchAllResourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseSearchAllResources._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateFeed: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{feed.name=*/*/feeds/*}', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.UpdateFeedRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseUpdateFeed._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateSavedQuery: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{saved_query.name=*/*/savedQueries/*}', + 'body': 'saved_query', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = asset_service.UpdateSavedQueryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetOperation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=*/*/operations/*/**}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseAssetServiceRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/README.rst new file mode 100755 index 000000000000..b4c630e83951 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`IAMCredentialsTransport` is the ABC for all transports. +- public child `IAMCredentialsGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `IAMCredentialsGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseIAMCredentialsRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `IAMCredentialsRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index f205888bb85b..1dc8b94d616f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -16,34 +16,33 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings + +from google.iam.credentials_v1.types import common + + +from .rest_base import _BaseIAMCredentialsRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -from google.iam.credentials_v1.types import common - -from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, @@ -176,8 +175,8 @@ class IAMCredentialsRestStub: _interceptor: IAMCredentialsRestInterceptor -class IAMCredentialsRestTransport(IAMCredentialsTransport): - """REST backend transport for IAMCredentials. +class IAMCredentialsRestTransport(_BaseIAMCredentialsRestTransport): + """REST backend synchronous transport for IAMCredentials. A service account is a special type of Google account that belongs to your application or a virtual machine (VM), instead @@ -195,10 +194,6 @@ class IAMCredentialsRestTransport(IAMCredentialsTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! """ def __init__(self, *, @@ -255,19 +250,12 @@ def __init__(self, *, # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience ) self._session = AuthorizedSession( @@ -277,16 +265,32 @@ def __init__(self, *, self._interceptor = interceptor or IAMCredentialsRestInterceptor() self._prep_wrapped_messages(client_info) - class _GenerateAccessToken(IAMCredentialsRestStub): + class _GenerateAccessToken(_BaseIAMCredentialsRestTransport._BaseGenerateAccessToken, IAMCredentialsRestStub): def __hash__(self): - return hash("GenerateAccessToken") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("IAMCredentialsRestTransport.GenerateAccessToken") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: common.GenerateAccessTokenRequest, *, @@ -310,42 +314,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken', - 'body': '*', - }, - ] + http_options = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_http_options() request, metadata = self._interceptor.pre_generate_access_token(request, metadata) - pb_request = common.GenerateAccessTokenRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = IAMCredentialsRestTransport._GenerateAccessToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -360,16 +339,32 @@ def __call__(self, resp = self._interceptor.post_generate_access_token(resp) return resp - class _GenerateIdToken(IAMCredentialsRestStub): + class _GenerateIdToken(_BaseIAMCredentialsRestTransport._BaseGenerateIdToken, IAMCredentialsRestStub): def __hash__(self): - return hash("GenerateIdToken") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("IAMCredentialsRestTransport.GenerateIdToken") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: common.GenerateIdTokenRequest, *, @@ -393,42 +388,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateIdToken', - 'body': '*', - }, - ] + http_options = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_http_options() request, metadata = self._interceptor.pre_generate_id_token(request, metadata) - pb_request = common.GenerateIdTokenRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_transcoded_request(http_options, request) - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = IAMCredentialsRestTransport._GenerateIdToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -443,16 +413,32 @@ def __call__(self, resp = self._interceptor.post_generate_id_token(resp) return resp - class _SignBlob(IAMCredentialsRestStub): + class _SignBlob(_BaseIAMCredentialsRestTransport._BaseSignBlob, IAMCredentialsRestStub): def __hash__(self): - return hash("SignBlob") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("IAMCredentialsRestTransport.SignBlob") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: common.SignBlobRequest, *, @@ -476,42 +462,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signBlob', - 'body': '*', - }, - ] + http_options = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_http_options() request, metadata = self._interceptor.pre_sign_blob(request, metadata) - pb_request = common.SignBlobRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_transcoded_request(http_options, request) - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = IAMCredentialsRestTransport._SignBlob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -526,16 +487,32 @@ def __call__(self, resp = self._interceptor.post_sign_blob(resp) return resp - class _SignJwt(IAMCredentialsRestStub): + class _SignJwt(_BaseIAMCredentialsRestTransport._BaseSignJwt, IAMCredentialsRestStub): def __hash__(self): - return hash("SignJwt") + return hash("IAMCredentialsRestTransport.SignJwt") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: common.SignJwtRequest, *, @@ -559,42 +536,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signJwt', - 'body': '*', - }, - ] + http_options = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_http_options() request, metadata = self._interceptor.pre_sign_jwt(request, metadata) - pb_request = common.SignJwtRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_transcoded_request(http_options, request) - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = IAMCredentialsRestTransport._SignJwt._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py new file mode 100755 index 000000000000..3ffce85ffd95 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py @@ -0,0 +1,276 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +from google.auth import credentials as ga_credentials # type: ignore + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.iam.credentials_v1.types import common + + +class _BaseIAMCredentialsRestTransport(IAMCredentialsTransport): + """Base REST backend transport for IAMCredentials. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'iamcredentials.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'iamcredentials.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseGenerateAccessToken: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateAccessToken', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = common.GenerateAccessTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGenerateIdToken: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/serviceAccounts/*}:generateIdToken', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = common.GenerateIdTokenRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_unset_required_fields(query_params)) + + return query_params + + class _BaseSignBlob: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signBlob', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = common.SignBlobRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseIAMCredentialsRestTransport._BaseSignBlob._get_unset_required_fields(query_params)) + + return query_params + + class _BaseSignJwt: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/serviceAccounts/*}:signJwt', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = common.SignJwtRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseIAMCredentialsRestTransport._BaseSignJwt._get_unset_required_fields(query_params)) + + return query_params + + +__all__=( + '_BaseIAMCredentialsRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/README.rst new file mode 100755 index 000000000000..442d2fb0c1b2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`EventarcTransport` is the ABC for all transports. +- public child `EventarcGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `EventarcGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseEventarcRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `EventarcRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index dbf62e74e652..be3f58cf1d78 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -16,14 +16,11 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format @@ -31,17 +28,12 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection @@ -52,7 +44,14 @@ from google.cloud.eventarc_v1.types import trigger from google.longrunning import operations_pb2 # type: ignore -from .base import EventarcTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +from .rest_base import _BaseEventarcRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -704,8 +703,8 @@ class EventarcRestStub: _interceptor: EventarcRestInterceptor -class EventarcRestTransport(EventarcTransport): - """REST backend transport for Eventarc. +class EventarcRestTransport(_BaseEventarcRestTransport): + """REST backend synchronous transport for Eventarc. Eventarc allows users to subscribe to various events that are provided by Google Cloud services and forward them to supported @@ -716,10 +715,6 @@ class EventarcRestTransport(EventarcTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! """ def __init__(self, *, @@ -776,19 +771,12 @@ def __init__(self, *, # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience ) self._session = AuthorizedSession( @@ -849,16 +837,32 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _CreateChannel(EventarcRestStub): + class _CreateChannel(_BaseEventarcRestTransport._BaseCreateChannel, EventarcRestStub): def __hash__(self): - return hash("CreateChannel") + return hash("EventarcRestTransport.CreateChannel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "channelId" : "", "validateOnly" : False, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: eventarc.CreateChannelRequest, *, @@ -886,42 +890,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/channels', - 'body': 'channel', - }, - ] + http_options = _BaseEventarcRestTransport._BaseCreateChannel._get_http_options() request, metadata = self._interceptor.pre_create_channel(request, metadata) - pb_request = eventarc.CreateChannelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseEventarcRestTransport._BaseCreateChannel._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseCreateChannel._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseCreateChannel._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = EventarcRestTransport._CreateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -934,16 +913,32 @@ def __call__(self, resp = self._interceptor.post_create_channel(resp) return resp - class _CreateChannelConnection(EventarcRestStub): + class _CreateChannelConnection(_BaseEventarcRestTransport._BaseCreateChannelConnection, EventarcRestStub): def __hash__(self): - return hash("CreateChannelConnection") + return hash("EventarcRestTransport.CreateChannelConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "channelConnectionId" : "", } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: eventarc.CreateChannelConnectionRequest, *, @@ -971,42 +966,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', - 'body': 'channel_connection', - }, - ] + http_options = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_http_options() request, metadata = self._interceptor.pre_create_channel_connection(request, metadata) - pb_request = eventarc.CreateChannelConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = EventarcRestTransport._CreateChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1019,16 +989,32 @@ def __call__(self, resp = self._interceptor.post_create_channel_connection(resp) return resp - class _CreateTrigger(EventarcRestStub): + class _CreateTrigger(_BaseEventarcRestTransport._BaseCreateTrigger, EventarcRestStub): def __hash__(self): - return hash("CreateTrigger") + return hash("EventarcRestTransport.CreateTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "triggerId" : "", "validateOnly" : False, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: eventarc.CreateTriggerRequest, *, @@ -1056,42 +1042,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - 'body': 'trigger', - }, - ] + http_options = _BaseEventarcRestTransport._BaseCreateTrigger._get_http_options() request, metadata = self._interceptor.pre_create_trigger(request, metadata) - pb_request = eventarc.CreateTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseEventarcRestTransport._BaseCreateTrigger._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseCreateTrigger._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = EventarcRestTransport._CreateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1104,16 +1065,31 @@ def __call__(self, resp = self._interceptor.post_create_trigger(resp) return resp - class _DeleteChannel(EventarcRestStub): + class _DeleteChannel(_BaseEventarcRestTransport._BaseDeleteChannel, EventarcRestStub): def __hash__(self): - return hash("DeleteChannel") + return hash("EventarcRestTransport.DeleteChannel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.DeleteChannelRequest, *, @@ -1141,34 +1117,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/channels/*}', - }, - ] + http_options = _BaseEventarcRestTransport._BaseDeleteChannel._get_http_options() request, metadata = self._interceptor.pre_delete_channel(request, metadata) - pb_request = eventarc.DeleteChannelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannel._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._DeleteChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1181,16 +1138,31 @@ def __call__(self, resp = self._interceptor.post_delete_channel(resp) return resp - class _DeleteChannelConnection(EventarcRestStub): + class _DeleteChannelConnection(_BaseEventarcRestTransport._BaseDeleteChannelConnection, EventarcRestStub): def __hash__(self): - return hash("DeleteChannelConnection") + return hash("EventarcRestTransport.DeleteChannelConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.DeleteChannelConnectionRequest, *, @@ -1218,34 +1190,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', - }, - ] + http_options = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_http_options() request, metadata = self._interceptor.pre_delete_channel_connection(request, metadata) - pb_request = eventarc.DeleteChannelConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._DeleteChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1258,16 +1211,31 @@ def __call__(self, resp = self._interceptor.post_delete_channel_connection(resp) return resp - class _DeleteTrigger(EventarcRestStub): + class _DeleteTrigger(_BaseEventarcRestTransport._BaseDeleteTrigger, EventarcRestStub): def __hash__(self): - return hash("DeleteTrigger") + return hash("EventarcRestTransport.DeleteTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.DeleteTriggerRequest, *, @@ -1295,34 +1263,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, - ] + http_options = _BaseEventarcRestTransport._BaseDeleteTrigger._get_http_options() request, metadata = self._interceptor.pre_delete_trigger(request, metadata) - pb_request = eventarc.DeleteTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseDeleteTrigger._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._DeleteTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1335,16 +1284,31 @@ def __call__(self, resp = self._interceptor.post_delete_trigger(resp) return resp - class _GetChannel(EventarcRestStub): + class _GetChannel(_BaseEventarcRestTransport._BaseGetChannel, EventarcRestStub): def __hash__(self): - return hash("GetChannel") + return hash("EventarcRestTransport.GetChannel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.GetChannelRequest, *, @@ -1377,34 +1341,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/channels/*}', - }, - ] + http_options = _BaseEventarcRestTransport._BaseGetChannel._get_http_options() request, metadata = self._interceptor.pre_get_channel(request, metadata) - pb_request = eventarc.GetChannelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseGetChannel._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._GetChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1419,16 +1364,31 @@ def __call__(self, resp = self._interceptor.post_get_channel(resp) return resp - class _GetChannelConnection(EventarcRestStub): + class _GetChannelConnection(_BaseEventarcRestTransport._BaseGetChannelConnection, EventarcRestStub): def __hash__(self): - return hash("GetChannelConnection") + return hash("EventarcRestTransport.GetChannelConnection") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.GetChannelConnectionRequest, *, @@ -1460,34 +1420,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', - }, - ] + http_options = _BaseEventarcRestTransport._BaseGetChannelConnection._get_http_options() request, metadata = self._interceptor.pre_get_channel_connection(request, metadata) - pb_request = eventarc.GetChannelConnectionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseGetChannelConnection._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._GetChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1502,16 +1443,31 @@ def __call__(self, resp = self._interceptor.post_get_channel_connection(resp) return resp - class _GetGoogleChannelConfig(EventarcRestStub): + class _GetGoogleChannelConfig(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig, EventarcRestStub): def __hash__(self): - return hash("GetGoogleChannelConfig") + return hash("EventarcRestTransport.GetGoogleChannelConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.GetGoogleChannelConfigRequest, *, @@ -1544,34 +1500,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/googleChannelConfig}', - }, - ] + http_options = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_http_options() request, metadata = self._interceptor.pre_get_google_channel_config(request, metadata) - pb_request = eventarc.GetGoogleChannelConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._GetGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1586,16 +1523,31 @@ def __call__(self, resp = self._interceptor.post_get_google_channel_config(resp) return resp - class _GetProvider(EventarcRestStub): + class _GetProvider(_BaseEventarcRestTransport._BaseGetProvider, EventarcRestStub): def __hash__(self): - return hash("GetProvider") + return hash("EventarcRestTransport.GetProvider") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.GetProviderRequest, *, @@ -1622,34 +1574,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/providers/*}', - }, - ] + http_options = _BaseEventarcRestTransport._BaseGetProvider._get_http_options() request, metadata = self._interceptor.pre_get_provider(request, metadata) - pb_request = eventarc.GetProviderRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseGetProvider._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._GetProvider._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1664,16 +1597,31 @@ def __call__(self, resp = self._interceptor.post_get_provider(resp) return resp - class _GetTrigger(EventarcRestStub): + class _GetTrigger(_BaseEventarcRestTransport._BaseGetTrigger, EventarcRestStub): def __hash__(self): - return hash("GetTrigger") + return hash("EventarcRestTransport.GetTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.GetTriggerRequest, *, @@ -1700,34 +1648,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', - }, - ] + http_options = _BaseEventarcRestTransport._BaseGetTrigger._get_http_options() request, metadata = self._interceptor.pre_get_trigger(request, metadata) - pb_request = eventarc.GetTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseGetTrigger._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._GetTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1742,16 +1671,31 @@ def __call__(self, resp = self._interceptor.post_get_trigger(resp) return resp - class _ListChannelConnections(EventarcRestStub): + class _ListChannelConnections(_BaseEventarcRestTransport._BaseListChannelConnections, EventarcRestStub): def __hash__(self): - return hash("ListChannelConnections") + return hash("EventarcRestTransport.ListChannelConnections") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.ListChannelConnectionsRequest, *, @@ -1778,34 +1722,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', - }, - ] + http_options = _BaseEventarcRestTransport._BaseListChannelConnections._get_http_options() request, metadata = self._interceptor.pre_list_channel_connections(request, metadata) - pb_request = eventarc.ListChannelConnectionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseListChannelConnections._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._ListChannelConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1820,16 +1745,31 @@ def __call__(self, resp = self._interceptor.post_list_channel_connections(resp) return resp - class _ListChannels(EventarcRestStub): + class _ListChannels(_BaseEventarcRestTransport._BaseListChannels, EventarcRestStub): def __hash__(self): - return hash("ListChannels") + return hash("EventarcRestTransport.ListChannels") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.ListChannelsRequest, *, @@ -1854,34 +1794,15 @@ def __call__(self, The response message for the ``ListChannels`` method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/channels', - }, - ] + http_options = _BaseEventarcRestTransport._BaseListChannels._get_http_options() request, metadata = self._interceptor.pre_list_channels(request, metadata) - pb_request = eventarc.ListChannelsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseListChannels._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseListChannels._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._ListChannels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1896,16 +1817,31 @@ def __call__(self, resp = self._interceptor.post_list_channels(resp) return resp - class _ListProviders(EventarcRestStub): + class _ListProviders(_BaseEventarcRestTransport._BaseListProviders, EventarcRestStub): def __hash__(self): - return hash("ListProviders") + return hash("EventarcRestTransport.ListProviders") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.ListProvidersRequest, *, @@ -1930,34 +1866,15 @@ def __call__(self, The response message for the ``ListProviders`` method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/providers', - }, - ] + http_options = _BaseEventarcRestTransport._BaseListProviders._get_http_options() request, metadata = self._interceptor.pre_list_providers(request, metadata) - pb_request = eventarc.ListProvidersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseListProviders._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseListProviders._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._ListProviders._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1972,16 +1889,31 @@ def __call__(self, resp = self._interceptor.post_list_providers(resp) return resp - class _ListTriggers(EventarcRestStub): + class _ListTriggers(_BaseEventarcRestTransport._BaseListTriggers, EventarcRestStub): def __hash__(self): - return hash("ListTriggers") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("EventarcRestTransport.ListTriggers") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: eventarc.ListTriggersRequest, *, @@ -2006,34 +1938,15 @@ def __call__(self, The response message for the ``ListTriggers`` method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/triggers', - }, - ] + http_options = _BaseEventarcRestTransport._BaseListTriggers._get_http_options() request, metadata = self._interceptor.pre_list_triggers(request, metadata) - pb_request = eventarc.ListTriggersRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseListTriggers._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = EventarcRestTransport._ListTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2048,16 +1961,32 @@ def __call__(self, resp = self._interceptor.post_list_triggers(resp) return resp - class _UpdateChannel(EventarcRestStub): + class _UpdateChannel(_BaseEventarcRestTransport._BaseUpdateChannel, EventarcRestStub): def __hash__(self): - return hash("UpdateChannel") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + return hash("EventarcRestTransport.UpdateChannel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: eventarc.UpdateChannelRequest, *, @@ -2085,42 +2014,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{channel.name=projects/*/locations/*/channels/*}', - 'body': 'channel', - }, - ] + http_options = _BaseEventarcRestTransport._BaseUpdateChannel._get_http_options() request, metadata = self._interceptor.pre_update_channel(request, metadata) - pb_request = eventarc.UpdateChannelRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseEventarcRestTransport._BaseUpdateChannel._get_transcoded_request(http_options, request) - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseUpdateChannel._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = EventarcRestTransport._UpdateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2133,16 +2037,32 @@ def __call__(self, resp = self._interceptor.post_update_channel(resp) return resp - class _UpdateGoogleChannelConfig(EventarcRestStub): + class _UpdateGoogleChannelConfig(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig, EventarcRestStub): def __hash__(self): - return hash("UpdateGoogleChannelConfig") + return hash("EventarcRestTransport.UpdateGoogleChannelConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: eventarc.UpdateGoogleChannelConfigRequest, *, @@ -2176,42 +2096,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}', - 'body': 'google_channel_config', - }, - ] + http_options = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_http_options() request, metadata = self._interceptor.pre_update_google_channel_config(request, metadata) - pb_request = eventarc.UpdateGoogleChannelConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2226,16 +2121,32 @@ def __call__(self, resp = self._interceptor.post_update_google_channel_config(resp) return resp - class _UpdateTrigger(EventarcRestStub): + class _UpdateTrigger(_BaseEventarcRestTransport._BaseUpdateTrigger, EventarcRestStub): def __hash__(self): - return hash("UpdateTrigger") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "validateOnly" : False, } + return hash("EventarcRestTransport.UpdateTrigger") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: eventarc.UpdateTriggerRequest, *, @@ -2263,42 +2174,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{trigger.name=projects/*/locations/*/triggers/*}', - 'body': 'trigger', - }, - ] + http_options = _BaseEventarcRestTransport._BaseUpdateTrigger._get_http_options() request, metadata = self._interceptor.pre_update_trigger(request, metadata) - pb_request = eventarc.UpdateTriggerRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseEventarcRestTransport._BaseUpdateTrigger._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseUpdateTrigger._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseEventarcRestTransport._BaseUpdateTrigger._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = EventarcRestTransport._UpdateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2459,7 +2345,29 @@ def update_trigger(self) -> Callable[ def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(EventarcRestStub): + class _GetLocation(_BaseEventarcRestTransport._BaseGetLocation, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2482,33 +2390,15 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseGetLocation._get_http_options() request, metadata = self._interceptor.pre_get_location(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = EventarcRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2524,7 +2414,29 @@ def __call__(self, def list_locations(self): return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(EventarcRestStub): + class _ListLocations(_BaseEventarcRestTransport._BaseListLocations, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2547,33 +2459,15 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseListLocations._get_http_options() request, metadata = self._interceptor.pre_list_locations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseListLocations._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseListLocations._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = EventarcRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2589,7 +2483,29 @@ def __call__(self, def get_iam_policy(self): return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - class _GetIamPolicy(EventarcRestStub): + class _GetIamPolicy(_BaseEventarcRestTransport._BaseGetIamPolicy, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: iam_policy_pb2.GetIamPolicyRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2612,41 +2528,15 @@ def __call__(self, policy_pb2.Policy: Response from GetIamPolicy method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy', - }, -{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy', - }, -{ - 'method': 'get', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseGetIamPolicy._get_http_options() request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = EventarcRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2662,7 +2552,30 @@ def __call__(self, def set_iam_policy(self): return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - class _SetIamPolicy(EventarcRestStub): + class _SetIamPolicy(_BaseEventarcRestTransport._BaseSetIamPolicy, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__(self, request: iam_policy_pb2.SetIamPolicyRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2685,46 +2598,17 @@ def __call__(self, policy_pb2.Policy: Response from SetIamPolicy method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy', - 'body': '*', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseSetIamPolicy._get_http_options() request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = _BaseEventarcRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseSetIamPolicy._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) + response = EventarcRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2740,7 +2624,30 @@ def __call__(self, def test_iam_permissions(self): return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - class _TestIamPermissions(EventarcRestStub): + class _TestIamPermissions(_BaseEventarcRestTransport._BaseTestIamPermissions, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__(self, request: iam_policy_pb2.TestIamPermissionsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2763,46 +2670,17 @@ def __call__(self, iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions', - 'body': '*', - }, -{ - 'method': 'post', - 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions', - 'body': '*', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseTestIamPermissions._get_http_options() request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = _BaseEventarcRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseTestIamPermissions._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) + response = EventarcRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2818,7 +2696,30 @@ def __call__(self, def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(EventarcRestStub): + class _CancelOperation(_BaseEventarcRestTransport._BaseCancelOperation, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__(self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2838,36 +2739,17 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseCancelOperation._get_http_options() request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) + transcoded_request = _BaseEventarcRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) - body = json.dumps(transcoded_request['body']) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseEventarcRestTransport._BaseCancelOperation._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) + response = EventarcRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2880,7 +2762,29 @@ def __call__(self, def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(EventarcRestStub): + class _DeleteOperation(_BaseEventarcRestTransport._BaseDeleteOperation, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2900,33 +2804,15 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseDeleteOperation._get_http_options() request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = EventarcRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2939,7 +2825,29 @@ def __call__(self, def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(EventarcRestStub): + class _GetOperation(_BaseEventarcRestTransport._BaseGetOperation, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -2962,33 +2870,15 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseGetOperation._get_http_options() request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = EventarcRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -3004,7 +2894,29 @@ def __call__(self, def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(EventarcRestStub): + class _ListOperations(_BaseEventarcRestTransport._BaseListOperations, EventarcRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -3027,33 +2939,15 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - + http_options = _BaseEventarcRestTransport._BaseListOperations._get_http_options() request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseEventarcRestTransport._BaseListOperations._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseEventarcRestTransport._BaseListOperations._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = EventarcRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py new file mode 100755 index 000000000000..660024e9a42c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py @@ -0,0 +1,1060 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from .base import EventarcTransport, DEFAULT_CLIENT_INFO +from google.auth import credentials as ga_credentials # type: ignore + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.eventarc_v1.types import channel +from google.cloud.eventarc_v1.types import channel_connection +from google.cloud.eventarc_v1.types import discovery +from google.cloud.eventarc_v1.types import eventarc +from google.cloud.eventarc_v1.types import google_channel_config +from google.cloud.eventarc_v1.types import google_channel_config as gce_google_channel_config +from google.cloud.eventarc_v1.types import trigger +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseEventarcRestTransport(EventarcTransport): + """Base REST backend transport for Eventarc. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'eventarc.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'eventarc.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateChannel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelId" : "", "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/channels', + 'body': 'channel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.CreateChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseCreateChannel._get_unset_required_fields(query_params)) + + return query_params + + class _BaseCreateChannelConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "channelConnectionId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', + 'body': 'channel_connection', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.CreateChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseCreateChannelConnection._get_unset_required_fields(query_params)) + + return query_params + + class _BaseCreateTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "triggerId" : "", "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + 'body': 'trigger', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.CreateTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseCreateTrigger._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteChannel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteChannel._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteChannelConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteChannelConnection._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.DeleteTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseDeleteTrigger._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetChannel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/channels/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetChannel._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetChannelConnection: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/channelConnections/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetChannelConnectionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetChannelConnection._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetGoogleChannelConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/googleChannelConfig}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetGoogleChannelConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetProvider: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/providers/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetProviderRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetProvider._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/triggers/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.GetTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseGetTrigger._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListChannelConnections: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/channelConnections', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.ListChannelConnectionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseListChannelConnections._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListChannels: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/channels', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.ListChannelsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseListChannels._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListProviders: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/providers', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.ListProvidersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseListProviders._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListTriggers: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/triggers', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.ListTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseListTriggers._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateChannel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{channel.name=projects/*/locations/*/channels/*}', + 'body': 'channel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.UpdateChannelRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseUpdateChannel._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateGoogleChannelConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{google_channel_config.name=projects/*/locations/*/googleChannelConfig}', + 'body': 'google_channel_config', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.UpdateGoogleChannelConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateTrigger: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "validateOnly" : False, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{trigger.name=projects/*/locations/*/triggers/*}', + 'body': 'trigger', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = eventarc.UpdateTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseEventarcRestTransport._BaseUpdateTrigger._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetLocation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetIamPolicy: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:getIamPolicy', + }, + { + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:getIamPolicy', + }, + { + 'method': 'get', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:getIamPolicy', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseSetIamPolicy: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:setIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:setIamPolicy', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:setIamPolicy', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseTestIamPermissions: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/triggers/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channels/*}:testIamPermissions', + 'body': '*', + }, + { + 'method': 'post', + 'uri': '/v1/{resource=projects/*/locations/*/channelConnections/*}:testIamPermissions', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseEventarcRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/README.rst new file mode 100755 index 000000000000..4ea84879601d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ConfigServiceV2Transport` is the ABC for all transports. +- public child `ConfigServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ConfigServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseConfigServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ConfigServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst new file mode 100755 index 000000000000..897a4c7bfaec --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`LoggingServiceV2Transport` is the ABC for all transports. +- public child `LoggingServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `LoggingServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseLoggingServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `LoggingServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst new file mode 100755 index 000000000000..00dffa25f329 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MetricsServiceV2Transport` is the ABC for all transports. +- public child `MetricsServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MetricsServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMetricsServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MetricsServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/README.rst new file mode 100755 index 000000000000..fce41822b52c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`CloudRedisTransport` is the ABC for all transports. +- public child `CloudRedisGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `CloudRedisGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseCloudRedisRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `CloudRedisRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 354a41b5da78..5d99f7e2c3f7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -16,35 +16,34 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format from google.api_core import operations_v1 from google.cloud.location import locations_pb2 # type: ignore + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore -from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +from .rest_base import _BaseCloudRedisRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -468,8 +467,8 @@ class CloudRedisRestStub: _interceptor: CloudRedisRestInterceptor -class CloudRedisRestTransport(CloudRedisTransport): - """REST backend transport for CloudRedis. +class CloudRedisRestTransport(_BaseCloudRedisRestTransport): + """REST backend synchronous transport for CloudRedis. Configures and manages Cloud Memorystore for Redis instances @@ -498,10 +497,6 @@ class CloudRedisRestTransport(CloudRedisTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! """ def __init__(self, *, @@ -558,19 +553,12 @@ def __init__(self, *, # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience ) self._session = AuthorizedSession( @@ -630,16 +618,32 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: # Return the client from cache. return self._operations_client - class _CreateInstance(CloudRedisRestStub): + class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, CloudRedisRestStub): def __hash__(self): - return hash("CreateInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "instanceId" : "", } + return hash("CloudRedisRestTransport.CreateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: cloud_redis.CreateInstanceRequest, *, @@ -667,42 +671,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - 'body': 'instance', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() request, metadata = self._interceptor.pre_create_instance(request, metadata) - pb_request = cloud_redis.CreateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = CloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -715,16 +694,31 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) return resp - class _DeleteInstance(CloudRedisRestStub): + class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, CloudRedisRestStub): def __hash__(self): - return hash("DeleteInstance") + return hash("CloudRedisRestTransport.DeleteInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: cloud_redis.DeleteInstanceRequest, *, @@ -752,34 +746,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() request, metadata = self._interceptor.pre_delete_instance(request, metadata) - pb_request = cloud_redis.DeleteInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = CloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -792,16 +767,32 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) return resp - class _ExportInstance(CloudRedisRestStub): + class _ExportInstance(_BaseCloudRedisRestTransport._BaseExportInstance, CloudRedisRestStub): def __hash__(self): - return hash("ExportInstance") + return hash("CloudRedisRestTransport.ExportInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: cloud_redis.ExportInstanceRequest, *, @@ -829,42 +820,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:export', - 'body': '*', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() request, metadata = self._interceptor.pre_export_instance(request, metadata) - pb_request = cloud_redis.ExportInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseCloudRedisRestTransport._BaseExportInstance._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = CloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -877,16 +843,32 @@ def __call__(self, resp = self._interceptor.post_export_instance(resp) return resp - class _FailoverInstance(CloudRedisRestStub): + class _FailoverInstance(_BaseCloudRedisRestTransport._BaseFailoverInstance, CloudRedisRestStub): def __hash__(self): - return hash("FailoverInstance") + return hash("CloudRedisRestTransport.FailoverInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: cloud_redis.FailoverInstanceRequest, *, @@ -914,42 +896,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:failover', - 'body': '*', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() request, metadata = self._interceptor.pre_failover_instance(request, metadata) - pb_request = cloud_redis.FailoverInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = CloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -962,16 +919,31 @@ def __call__(self, resp = self._interceptor.post_failover_instance(resp) return resp - class _GetInstance(CloudRedisRestStub): + class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, CloudRedisRestStub): def __hash__(self): - return hash("GetInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("CloudRedisRestTransport.GetInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: cloud_redis.GetInstanceRequest, *, @@ -996,34 +968,15 @@ def __call__(self, A Memorystore for Redis instance. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() request, metadata = self._interceptor.pre_get_instance(request, metadata) - pb_request = cloud_redis.GetInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = CloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1038,16 +991,31 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) return resp - class _GetInstanceAuthString(CloudRedisRestStub): + class _GetInstanceAuthString(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString, CloudRedisRestStub): def __hash__(self): - return hash("GetInstanceAuthString") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("CloudRedisRestTransport.GetInstanceAuthString") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: cloud_redis.GetInstanceAuthStringRequest, *, @@ -1072,34 +1040,15 @@ def __call__(self, Instance AUTH string details. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}/authString', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_http_options() request, metadata = self._interceptor.pre_get_instance_auth_string(request, metadata) - pb_request = cloud_redis.GetInstanceAuthStringRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = CloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1114,16 +1063,32 @@ def __call__(self, resp = self._interceptor.post_get_instance_auth_string(resp) return resp - class _ImportInstance(CloudRedisRestStub): + class _ImportInstance(_BaseCloudRedisRestTransport._BaseImportInstance, CloudRedisRestStub): def __hash__(self): - return hash("ImportInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("CloudRedisRestTransport.ImportInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: cloud_redis.ImportInstanceRequest, *, @@ -1151,42 +1116,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:import', - 'body': '*', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() request, metadata = self._interceptor.pre_import_instance(request, metadata) - pb_request = cloud_redis.ImportInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseCloudRedisRestTransport._BaseImportInstance._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = CloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1199,16 +1139,31 @@ def __call__(self, resp = self._interceptor.post_import_instance(resp) return resp - class _ListInstances(CloudRedisRestStub): + class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, CloudRedisRestStub): def __hash__(self): - return hash("ListInstances") + return hash("CloudRedisRestTransport.ListInstances") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__(self, request: cloud_redis.ListInstancesRequest, *, @@ -1235,34 +1190,15 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{parent=projects/*/locations/*}/instances', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() request, metadata = self._interceptor.pre_list_instances(request, metadata) - pb_request = cloud_redis.ListInstancesRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - ) + response = CloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1277,16 +1213,32 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) return resp - class _RescheduleMaintenance(CloudRedisRestStub): + class _RescheduleMaintenance(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance, CloudRedisRestStub): def __hash__(self): - return hash("RescheduleMaintenance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("CloudRedisRestTransport.RescheduleMaintenance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: cloud_redis.RescheduleMaintenanceRequest, *, @@ -1314,42 +1266,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance', - 'body': '*', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_http_options() request, metadata = self._interceptor.pre_reschedule_maintenance(request, metadata) - pb_request = cloud_redis.RescheduleMaintenanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = CloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1362,16 +1289,32 @@ def __call__(self, resp = self._interceptor.post_reschedule_maintenance(resp) return resp - class _UpdateInstance(CloudRedisRestStub): + class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, CloudRedisRestStub): def __hash__(self): - return hash("UpdateInstance") + return hash("CloudRedisRestTransport.UpdateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "updateMask" : {}, } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: cloud_redis.UpdateInstanceRequest, *, @@ -1399,42 +1342,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'patch', - 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', - 'body': 'instance', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() request, metadata = self._interceptor.pre_update_instance(request, metadata) - pb_request = cloud_redis.UpdateInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = CloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1447,16 +1365,32 @@ def __call__(self, resp = self._interceptor.post_update_instance(resp) return resp - class _UpgradeInstance(CloudRedisRestStub): + class _UpgradeInstance(_BaseCloudRedisRestTransport._BaseUpgradeInstance, CloudRedisRestStub): def __hash__(self): - return hash("UpgradeInstance") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - } + return hash("CloudRedisRestTransport.UpgradeInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__(self, request: cloud_redis.UpgradeInstanceRequest, *, @@ -1484,42 +1418,17 @@ def __call__(self, """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/instances/*}:upgrade', - 'body': '*', - }, - ] + http_options = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() request, metadata = self._interceptor.pre_upgrade_instance(request, metadata) - pb_request = cloud_redis.UpgradeInstanceRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request(http_options, request) - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums=False - ) - uri = transcoded_request['uri'] - method = transcoded_request['method'] + body = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_request_body_json(transcoded_request) # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums=False, - )) - query_params.update(self._get_unset_required_fields(query_params)) + query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, - ) + response = CloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1624,7 +1533,29 @@ def upgrade_instance(self) -> Callable[ def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore - class _GetLocation(CloudRedisRestStub): + class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -1647,33 +1578,15 @@ def __call__(self, locations_pb2.Location: Response from GetLocation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}', - }, - ] - + http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() request, metadata = self._interceptor.pre_get_location(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = CloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1689,7 +1602,29 @@ def __call__(self, def list_locations(self): return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore - class _ListLocations(CloudRedisRestStub): + class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -1712,33 +1647,15 @@ def __call__(self, locations_pb2.ListLocationsResponse: Response from ListLocations method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*}/locations', - }, - ] - + http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() request, metadata = self._interceptor.pre_list_locations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = CloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1754,7 +1671,29 @@ def __call__(self, def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - class _CancelOperation(CloudRedisRestStub): + class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -1774,33 +1713,15 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'post', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', - }, - ] - + http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() request, metadata = self._interceptor.pre_cancel_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = CloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1813,7 +1734,29 @@ def __call__(self, def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore - class _DeleteOperation(CloudRedisRestStub): + class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -1833,33 +1776,15 @@ def __call__(self, sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'delete', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - + http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() request, metadata = self._interceptor.pre_delete_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = CloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1872,7 +1797,29 @@ def __call__(self, def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - class _GetOperation(CloudRedisRestStub): + class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -1895,33 +1842,15 @@ def __call__(self, operations_pb2.Operation: Response from GetOperation method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*/operations/*}', - }, - ] - + http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = CloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1937,7 +1866,29 @@ def __call__(self, def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore - class _ListOperations(CloudRedisRestStub): + class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__(self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, @@ -1960,33 +1911,15 @@ def __call__(self, operations_pb2.ListOperationsResponse: Response from ListOperations method. """ - http_options: List[Dict[str, str]] = [{ - 'method': 'get', - 'uri': '/v1/{name=projects/*/locations/*}/operations', - }, - ] - + http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() request, metadata = self._interceptor.pre_list_operations(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode( - http_options, **request_kwargs) - - uri = transcoded_request['uri'] - method = transcoded_request['method'] + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request['query_params'])) + query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) + response = CloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py new file mode 100755 index 000000000000..2ed2456c7b10 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -0,0 +1,698 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from google.auth import credentials as ga_credentials # type: ignore + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseCloudRedisRestTransport(CloudRedisTransport): + """Base REST backend transport for CloudRedis. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/instances', + 'body': 'instance', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseExportInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:export', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.ExportInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseExportInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseFailoverInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:failover', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.FailoverInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseFailoverInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetInstanceAuthString: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}/authString', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.GetInstanceAuthStringRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_unset_required_fields(query_params)) + + return query_params + + class _BaseImportInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:import', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.ImportInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseImportInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListInstances: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/instances', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + + return query_params + + class _BaseRescheduleMaintenance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:rescheduleMaintenance', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.RescheduleMaintenanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', + 'body': 'instance', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpgradeInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}:upgrade', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.UpgradeInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseUpgradeInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetLocation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseCloudRedisRestTransport', +) diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 5cac04e6f756..ec589f9df9b6 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -154,6 +154,7 @@ def test_get_response_ignores_unwanted_transports_and_clients(): "foo/%service/transports/grpc.py.j2", "foo/%service/transports/__init__.py.j2", "foo/%service/transports/base.py.j2", + "foo/%service/transports/rest_base.py.j2", "foo/%service/async_client.py.j2", "foo/%service/client.py.j2", "mollusks/squid/sample.py.j2", From 315e30ddea20169f29e687cb0f276af353e1d77d Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 27 Aug 2024 13:57:36 -0400 Subject: [PATCH 1173/1339] process: move assertion outside of `pytest.raises` context in showcase tests (#2101) --- .../tests/system/test_error_details.py | 48 ++++-- .../tests/system/test_unary.py | 144 +++++++++++------- 2 files changed, 118 insertions(+), 74 deletions(-) diff --git a/packages/gapic-generator/tests/system/test_error_details.py b/packages/gapic-generator/tests/system/test_error_details.py index 0561b3bdfd65..35cb21287ca5 100644 --- a/packages/gapic-generator/tests/system/test_error_details.py +++ b/packages/gapic-generator/tests/system/test_error_details.py @@ -23,9 +23,7 @@ def create_status(error_details=None): status = rpc_status.status_pb2.Status() status.code = 3 - status.message = ( - "test" - ) + status.message = "test" status_detail = any_pb2.Any() if error_details: status_detail.Pack(error_details) @@ -47,14 +45,19 @@ def create_bad_request_details(): field_violation.field = "test field" field_violation.description = "test description" return bad_request_details + bad_request_details = create_bad_request_details() status = create_status(bad_request_details) with pytest.raises(exceptions.GoogleAPICallError) as e: - _ = echo.echo(showcase.EchoRequest( - error=status, - )) - assert e.details == [bad_request_details] + _ = echo.echo( + showcase.EchoRequest( + error=status, + ) + ) + + # Note: error details are exposed as e.value.details. + assert e.value.details == [bad_request_details] def test_precondition_failure_details(echo): @@ -77,16 +80,31 @@ def create_precondition_failure_details(): status = create_status(pf_details) with pytest.raises(exceptions.GoogleAPICallError) as e: - _ = echo.echo(showcase.EchoRequest( - error=status, - )) - assert e.details == [pf_details] + _ = echo.echo( + showcase.EchoRequest( + error=status, + ) + ) + + # Note: error details are exposed as e.value.details. + assert e.value.details == [pf_details] def test_unknown_details(echo): + # TODO(dovs): reenable when transcoding requests with an "Any" + # field is properly handled + # See https://github.com/googleapis/proto-plus-python/issues/285 + # for background and tracking. + if "rest" in str(echo.transport).lower(): + return + status = create_status() with pytest.raises(exceptions.GoogleAPICallError) as e: - _ = echo.echo(showcase.EchoRequest( - error=status, - )) - assert e.details == status.details + _ = echo.echo( + showcase.EchoRequest( + error=status, + ) + ) + + # Note: error details are exposed as e.value.details. + assert e.value.details == list(status.details) diff --git a/packages/gapic-generator/tests/system/test_unary.py b/packages/gapic-generator/tests/system/test_unary.py index 52c03df2206d..59f0ad1c5c1c 100644 --- a/packages/gapic-generator/tests/system/test_unary.py +++ b/packages/gapic-generator/tests/system/test_unary.py @@ -25,23 +25,27 @@ def test_unary_with_request_object(echo): - response = echo.echo(showcase.EchoRequest( - content='The hail in Wales falls mainly on the snails.', - request_id='some_value', - other_request_id='', - )) - assert response.content == 'The hail in Wales falls mainly on the snails.' - assert response.request_id == 'some_value' - assert response.other_request_id == '' + response = echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + request_id="some_value", + other_request_id="", + ) + ) + assert response.content == "The hail in Wales falls mainly on the snails." + assert response.request_id == "some_value" + assert response.other_request_id == "" # Repeat the same test but this time without `request_id`` set # The `request_id` field should be automatically populated with # a UUID4 value if it is not set. # See https://google.aip.dev/client-libraries/4235 - response = echo.echo(showcase.EchoRequest( - content='The hail in Wales falls mainly on the snails.', - )) - assert response.content == 'The hail in Wales falls mainly on the snails.' + response = echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + ) + ) + assert response.content == "The hail in Wales falls mainly on the snails." # Ensure that the uuid4 field is set according to AIP 4235 assert re.match(UUID4_RE, response.request_id) assert len(response.request_id) == 36 @@ -51,23 +55,27 @@ def test_unary_with_request_object(echo): def test_unary_with_dict(echo): - response = echo.echo({ - 'content': 'The hail in Wales falls mainly on the snails.', - 'request_id': 'some_value', - 'other_request_id': '', - }) - assert response.content == 'The hail in Wales falls mainly on the snails.' - assert response.request_id == 'some_value' - assert response.other_request_id == '' + response = echo.echo( + { + "content": "The hail in Wales falls mainly on the snails.", + "request_id": "some_value", + "other_request_id": "", + } + ) + assert response.content == "The hail in Wales falls mainly on the snails." + assert response.request_id == "some_value" + assert response.other_request_id == "" # Repeat the same test but this time without `request_id`` set # The `request_id` field should be automatically populated with # a UUID4 value if it is not set. # See https://google.aip.dev/client-libraries/4235 - response = echo.echo({ - 'content': 'The hail in Wales falls mainly on the snails.', - }) - assert response.content == 'The hail in Wales falls mainly on the snails.' + response = echo.echo( + { + "content": "The hail in Wales falls mainly on the snails.", + } + ) + assert response.content == "The hail in Wales falls mainly on the snails." assert re.match(UUID4_RE, response.request_id) assert len(response.request_id) == 36 # Ensure that the uuid4 field is set according to AIP 4235 @@ -76,31 +84,43 @@ def test_unary_with_dict(echo): def test_unary_error(echo): - message = 'Bad things! Bad things!' + message = "Bad things! Bad things!" + http_message = f"POST http://localhost:7469/v1beta1/echo:echo: {message}" # Note: InvalidArgument is from gRPC, BadRequest from http (no MTLS), InternalServerError from http (MTLS) # TODO: Reduce number of different exception types here. - with pytest.raises((exceptions.InvalidArgument, exceptions.BadRequest, exceptions.InternalServerError)) as exc: - echo.echo({ - 'error': { - 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), - 'message': message, - }, - }) - assert exc.value.code == 400 - assert exc.value.message == message + with pytest.raises( + ( + exceptions.InvalidArgument, + exceptions.BadRequest, + exceptions.InternalServerError, + ) + ) as exc: + echo.echo( + { + "error": { + "code": code_pb2.Code.Value("INVALID_ARGUMENT"), + "message": message, + }, + } + ) + err_message = message if "grpc" in str(echo.transport) else http_message + assert exc.value.code == 400 + assert exc.value.message == err_message if isinstance(echo.transport, type(echo).get_transport_class("grpc")): # Under gRPC, we raise exceptions.InvalidArgument, which is a # sub-class of exceptions.BadRequest. with pytest.raises(exceptions.InvalidArgument) as exc: - echo.echo({ - 'error': { - 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), - 'message': message, - }, - }) - assert exc.value.code == 400 - assert exc.value.message == message + echo.echo( + { + "error": { + "code": code_pb2.Code.Value("INVALID_ARGUMENT"), + "message": message, + }, + } + ) + assert exc.value.code == 400 + assert exc.value.message == message if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @@ -108,27 +128,33 @@ def test_unary_error(echo): @pytest.mark.asyncio async def test_async_unary_with_request_object(async_echo): - response = await async_echo.echo(showcase.EchoRequest( - content='The hail in Wales falls mainly on the snails.', - ), timeout=1) - assert response.content == 'The hail in Wales falls mainly on the snails.' + response = await async_echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + ), + timeout=1, + ) + assert response.content == "The hail in Wales falls mainly on the snails." @pytest.mark.asyncio async def test_async_unary_with_dict(async_echo): - response = await async_echo.echo({ - 'content': 'The hail in Wales falls mainly on the snails.', - }) - assert response.content == 'The hail in Wales falls mainly on the snails.' + response = await async_echo.echo( + { + "content": "The hail in Wales falls mainly on the snails.", + } + ) + assert response.content == "The hail in Wales falls mainly on the snails." @pytest.mark.asyncio async def test_async_unary_error(async_echo): - message = 'Bad things! Bad things!' + message = "Bad things! Bad things!" with pytest.raises(exceptions.InvalidArgument) as exc: - await async_echo.echo({ - 'error': { - 'code': code_pb2.Code.Value('INVALID_ARGUMENT'), - 'message': message, - }, - }) - assert exc.value.code == 400 - assert exc.value.message == message + await async_echo.echo( + { + "error": { + "code": code_pb2.Code.Value("INVALID_ARGUMENT"), + "message": message, + }, + } + ) + assert exc.value.message == message From 52d2ccdae5a44b965c55e0801c519eaf93955e9c Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 28 Aug 2024 10:18:45 -0400 Subject: [PATCH 1174/1339] cleanup: refactor empty call test macro (#2110) --- .../gapic/%name_%version/%sub/test_macros.j2 | 156 ++++++++---------- .../unit/gapic/asset_v1/test_asset_service.py | 69 +++++--- .../credentials_v1/test_iam_credentials.py | 12 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 54 ++++-- .../logging_v2/test_config_service_v2.py | 96 +++++++---- .../logging_v2/test_logging_service_v2.py | 15 +- .../logging_v2/test_metrics_service_v2.py | 15 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 33 ++-- 8 files changed, 268 insertions(+), 182 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 711de9f34de1..82bce1309973 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -123,37 +123,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% if not method.client_streaming %} -def test_{{ method_name }}_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.{{ method_name }}() - call.assert_called() - _, args, _ = call.mock_calls[0] - {% if method.client_streaming %} - assert next(args[0]) == request - {% else %} - {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} - {% if method_settings is not none %} - {% for auto_populated_field in method_settings.auto_populated_fields %} - # Ensure that the uuid4 field is set according to AIP 4235 - assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) - # clear UUID field so that the check below succeeds - args[0].{{ auto_populated_field }} = None - {% endfor %} - {% endif %}{# if method_settings is not none #} - {% endwith %}{# method_settings #} - assert args[0] == {{ method.input.ident }}() - {% endif %} +{{ empty_call_test(method, method_name, service, api, uuid4_re)}} def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): @@ -249,59 +219,7 @@ def test_{{ method_name }}_use_cached_wrapped_rpc(): {% if not full_extended_lro %} {% if not method.client_streaming %} -@pytest.mark.asyncio -async def test_{{ method_name }}_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = {{ service.async_client_name }}( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - # Designate an appropriate return value for the call. - {% if method.void %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - {% elif method.lro %} - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - {% elif not method.client_streaming and method.server_streaming %} - call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) - {% elif method.client_streaming and method.server_streaming %} - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ '' }} - {%- if not method.client_streaming and not method.server_streaming -%} - grpc_helpers_async.FakeUnaryUnaryCall - {%- else -%} - grpc_helpers_async.FakeStreamUnaryCall - {%- endif -%}({{ method.output.ident }}( - {% for field in method.output.fields.values() | rejectattr('message') %}{% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %} - {% endfor %} - )) - {% endif %} - response = await client.{{ method_name }}() - call.assert_called() - _, args, _ = call.mock_calls[0] - {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} - {% if method_settings is not none %} - {% for auto_populated_field in method_settings.auto_populated_fields %} - # Ensure that the uuid4 field is set according to AIP 4235 - assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) - # clear UUID field so that the check below succeeds - args[0].{{ auto_populated_field }} = None - {% endfor %} - {% endif %}{# if method_settings is not none #} - {% endwith %}{# method_settings #} - assert args[0] == {{ method.input.ident }}() +{{ empty_call_test(method, method_name, service, api, uuid4_re, is_async=True) }} {% endif %} @pytest.mark.asyncio @@ -1888,3 +1806,73 @@ def test_{{ method_name }}_rest_no_http_options(): {% endif %}{# not method.http_options #} {% endwith %}{# method_name #} {% endmacro %} + + +{% macro empty_call_test(method, method_name, service, api, uuid4_re, is_async=False) %} +{% if is_async %} +@pytest.mark.asyncio +async def test_{{ method_name }}_empty_call_async(): +{% else %} +def test_{{ method_name }}_empty_call(): +{% endif %}{# if is_async #} + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + {% if is_async %} + client = {{ service.async_client_name }}( + credentials=async_anonymous_credentials(), + transport='grpc_asyncio', + ) + {% else %} + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + {% endif %}{# if is_async #} + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.{{ method.transport_safe_name|snake_case }}), + '__call__') as call: + {% if is_async %} + # Designate an appropriate return value for the call. + {% if method.void %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + {% elif method.lro %} + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + {% elif method.server_streaming %} + call.return_value = mock.Mock(aio.UnaryStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ '' }} + {%- if not method.server_streaming -%} + grpc_helpers_async.FakeUnaryUnaryCall + {%- else -%} + grpc_helpers_async.FakeStreamUnaryCall + {%- endif -%}({{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message') %}{% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %} + {% endfor %} + )) + {% endif %}{# method.void #} + await client.{{ method_name }}() + {% else %}{# if not is_async #} + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.{{ method_name }}() + {% endif %}{# is_async #} + call.assert_called() + _, args, _ = call.mock_calls[0] + {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} + {% if method_settings is not none %} + {% for auto_populated_field in method_settings.auto_populated_fields %} + # Ensure that the uuid4 field is set according to AIP 4235 + assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + # clear UUID field so that the check below succeeds + args[0].{{ auto_populated_field }} = None + {% endfor %}{# for auto_populated_field in method_settings.auto_populated_fields #} + {% endif %}{# if method_settings is not none #} + {% endwith %}{# method_settings #} + assert args[0] == {{ method.input.ident }}() +{% endmacro %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 8735a29496de..6e0e9a638128 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -915,11 +915,12 @@ async def test_export_assets_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.export_assets() + await client.export_assets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ExportAssetsRequest() + @pytest.mark.asyncio async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1190,11 +1191,12 @@ async def test_list_assets_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( next_page_token='next_page_token_value', )) - response = await client.list_assets() + await client.list_assets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListAssetsRequest() + @pytest.mark.asyncio async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1734,11 +1736,12 @@ async def test_batch_get_assets_history_empty_call_async(): # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( )) - response = await client.batch_get_assets_history() + await client.batch_get_assets_history() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + @pytest.mark.asyncio async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2015,11 +2018,12 @@ async def test_create_feed_empty_call_async(): content_type=asset_service.ContentType.RESOURCE, relationship_types=['relationship_types_value'], )) - response = await client.create_feed() + await client.create_feed() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateFeedRequest() + @pytest.mark.asyncio async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2386,11 +2390,12 @@ async def test_get_feed_empty_call_async(): content_type=asset_service.ContentType.RESOURCE, relationship_types=['relationship_types_value'], )) - response = await client.get_feed() + await client.get_feed() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetFeedRequest() + @pytest.mark.asyncio async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2742,11 +2747,12 @@ async def test_list_feeds_empty_call_async(): # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( )) - response = await client.list_feeds() + await client.list_feeds() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListFeedsRequest() + @pytest.mark.asyncio async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3101,11 +3107,12 @@ async def test_update_feed_empty_call_async(): content_type=asset_service.ContentType.RESOURCE, relationship_types=['relationship_types_value'], )) - response = await client.update_feed() + await client.update_feed() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.UpdateFeedRequest() + @pytest.mark.asyncio async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3455,11 +3462,12 @@ async def test_delete_feed_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_feed() + await client.delete_feed() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteFeedRequest() + @pytest.mark.asyncio async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3809,11 +3817,12 @@ async def test_search_all_resources_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( next_page_token='next_page_token_value', )) - response = await client.search_all_resources() + await client.search_all_resources() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllResourcesRequest() + @pytest.mark.asyncio async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4382,11 +4391,12 @@ async def test_search_all_iam_policies_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( next_page_token='next_page_token_value', )) - response = await client.search_all_iam_policies() + await client.search_all_iam_policies() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.SearchAllIamPoliciesRequest() + @pytest.mark.asyncio async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4939,11 +4949,12 @@ async def test_analyze_iam_policy_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( fully_explored=True, )) - response = await client.analyze_iam_policy() + await client.analyze_iam_policy() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyRequest() + @pytest.mark.asyncio async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5210,11 +5221,12 @@ async def test_analyze_iam_policy_longrunning_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.analyze_iam_policy_longrunning() + await client.analyze_iam_policy_longrunning() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5482,11 +5494,12 @@ async def test_analyze_move_empty_call_async(): # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( )) - response = await client.analyze_move() + await client.analyze_move() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeMoveRequest() + @pytest.mark.asyncio async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5758,11 +5771,12 @@ async def test_query_assets_empty_call_async(): job_reference='job_reference_value', done=True, )) - response = await client.query_assets() + await client.query_assets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.QueryAssetsRequest() + @pytest.mark.asyncio async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6040,11 +6054,12 @@ async def test_create_saved_query_empty_call_async(): creator='creator_value', last_updater='last_updater_value', )) - response = await client.create_saved_query() + await client.create_saved_query() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.CreateSavedQueryRequest() + @pytest.mark.asyncio async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6426,11 +6441,12 @@ async def test_get_saved_query_empty_call_async(): creator='creator_value', last_updater='last_updater_value', )) - response = await client.get_saved_query() + await client.get_saved_query() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.GetSavedQueryRequest() + @pytest.mark.asyncio async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6787,11 +6803,12 @@ async def test_list_saved_queries_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( next_page_token='next_page_token_value', )) - response = await client.list_saved_queries() + await client.list_saved_queries() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.ListSavedQueriesRequest() + @pytest.mark.asyncio async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7341,11 +7358,12 @@ async def test_update_saved_query_empty_call_async(): creator='creator_value', last_updater='last_updater_value', )) - response = await client.update_saved_query() + await client.update_saved_query() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.UpdateSavedQueryRequest() + @pytest.mark.asyncio async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7703,11 +7721,12 @@ async def test_delete_saved_query_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_saved_query() + await client.delete_saved_query() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.DeleteSavedQueryRequest() + @pytest.mark.asyncio async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8048,11 +8067,12 @@ async def test_batch_get_effective_iam_policies_empty_call_async(): # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( )) - response = await client.batch_get_effective_iam_policies() + await client.batch_get_effective_iam_policies() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8321,11 +8341,12 @@ async def test_analyze_org_policies_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( next_page_token='next_page_token_value', )) - response = await client.analyze_org_policies() + await client.analyze_org_policies() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + @pytest.mark.asyncio async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8894,11 +8915,12 @@ async def test_analyze_org_policy_governed_containers_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( next_page_token='next_page_token_value', )) - response = await client.analyze_org_policy_governed_containers() + await client.analyze_org_policy_governed_containers() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9467,11 +9489,12 @@ async def test_analyze_org_policy_governed_assets_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( next_page_token='next_page_token_value', )) - response = await client.analyze_org_policy_governed_assets() + await client.analyze_org_policy_governed_assets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 61ef5a893fc7..2f646744fdad 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -903,11 +903,12 @@ async def test_generate_access_token_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( access_token='access_token_value', )) - response = await client.generate_access_token() + await client.generate_access_token() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateAccessTokenRequest() + @pytest.mark.asyncio async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1282,11 +1283,12 @@ async def test_generate_id_token_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( token='token_value', )) - response = await client.generate_id_token() + await client.generate_id_token() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.GenerateIdTokenRequest() + @pytest.mark.asyncio async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1666,11 +1668,12 @@ async def test_sign_blob_empty_call_async(): key_id='key_id_value', signed_blob=b'signed_blob_blob', )) - response = await client.sign_blob() + await client.sign_blob() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.SignBlobRequest() + @pytest.mark.asyncio async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2044,11 +2047,12 @@ async def test_sign_jwt_empty_call_async(): key_id='key_id_value', signed_jwt='signed_jwt_value', )) - response = await client.sign_jwt() + await client.sign_jwt() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == common.SignJwtRequest() + @pytest.mark.asyncio async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index b5a233ef0afc..23ef03067160 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -935,11 +935,12 @@ async def test_get_trigger_empty_call_async(): channel='channel_value', etag='etag_value', )) - response = await client.get_trigger() + await client.get_trigger() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetTriggerRequest() + @pytest.mark.asyncio async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1303,11 +1304,12 @@ async def test_list_triggers_empty_call_async(): next_page_token='next_page_token_value', unreachable=['unreachable_value'], )) - response = await client.list_triggers() + await client.list_triggers() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListTriggersRequest() + @pytest.mark.asyncio async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1856,11 +1858,12 @@ async def test_create_trigger_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.create_trigger() + await client.create_trigger() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateTriggerRequest() + @pytest.mark.asyncio async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2233,11 +2236,12 @@ async def test_update_trigger_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.update_trigger() + await client.update_trigger() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateTriggerRequest() + @pytest.mark.asyncio async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2614,11 +2618,12 @@ async def test_delete_trigger_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.delete_trigger() + await client.delete_trigger() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteTriggerRequest() + @pytest.mark.asyncio async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2997,11 +3002,12 @@ async def test_get_channel_empty_call_async(): activation_token='activation_token_value', crypto_key_name='crypto_key_name_value', )) - response = await client.get_channel() + await client.get_channel() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelRequest() + @pytest.mark.asyncio async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3365,11 +3371,12 @@ async def test_list_channels_empty_call_async(): next_page_token='next_page_token_value', unreachable=['unreachable_value'], )) - response = await client.list_channels() + await client.list_channels() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelsRequest() + @pytest.mark.asyncio async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3918,11 +3925,12 @@ async def test_create_channel_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.create_channel() + await client.create_channel() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelRequest() + @pytest.mark.asyncio async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4295,11 +4303,12 @@ async def test_update_channel_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.update_channel() + await client.update_channel() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateChannelRequest() + @pytest.mark.asyncio async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4664,11 +4673,12 @@ async def test_delete_channel_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.delete_channel() + await client.delete_channel() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelRequest() + @pytest.mark.asyncio async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5024,11 +5034,12 @@ async def test_get_provider_empty_call_async(): name='name_value', display_name='display_name_value', )) - response = await client.get_provider() + await client.get_provider() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetProviderRequest() + @pytest.mark.asyncio async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5386,11 +5397,12 @@ async def test_list_providers_empty_call_async(): next_page_token='next_page_token_value', unreachable=['unreachable_value'], )) - response = await client.list_providers() + await client.list_providers() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListProvidersRequest() + @pytest.mark.asyncio async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5944,11 +5956,12 @@ async def test_get_channel_connection_empty_call_async(): channel='channel_value', activation_token='activation_token_value', )) - response = await client.get_channel_connection() + await client.get_channel_connection() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetChannelConnectionRequest() + @pytest.mark.asyncio async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6306,11 +6319,12 @@ async def test_list_channel_connections_empty_call_async(): next_page_token='next_page_token_value', unreachable=['unreachable_value'], )) - response = await client.list_channel_connections() + await client.list_channel_connections() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.ListChannelConnectionsRequest() + @pytest.mark.asyncio async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6859,11 +6873,12 @@ async def test_create_channel_connection_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.create_channel_connection() + await client.create_channel_connection() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelConnectionRequest() + @pytest.mark.asyncio async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7238,11 +7253,12 @@ async def test_delete_channel_connection_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.delete_channel_connection() + await client.delete_channel_connection() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.DeleteChannelConnectionRequest() + @pytest.mark.asyncio async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7598,11 +7614,12 @@ async def test_get_google_channel_config_empty_call_async(): name='name_value', crypto_key_name='crypto_key_name_value', )) - response = await client.get_google_channel_config() + await client.get_google_channel_config() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.GetGoogleChannelConfigRequest() + @pytest.mark.asyncio async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7952,11 +7969,12 @@ async def test_update_google_channel_config_empty_call_async(): name='name_value', crypto_key_name='crypto_key_name_value', )) - response = await client.update_google_channel_config() + await client.update_google_channel_config() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + @pytest.mark.asyncio async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 15b2af664e59..62c50348d531 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -892,11 +892,12 @@ async def test_list_buckets_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( next_page_token='next_page_token_value', )) - response = await client.list_buckets() + await client.list_buckets() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListBucketsRequest() + @pytest.mark.asyncio async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1457,11 +1458,12 @@ async def test_get_bucket_empty_call_async(): analytics_enabled=True, restricted_fields=['restricted_fields_value'], )) - response = await client.get_bucket() + await client.get_bucket() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetBucketRequest() + @pytest.mark.asyncio async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1742,11 +1744,12 @@ async def test_create_bucket_async_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.create_bucket_async() + await client.create_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest() + @pytest.mark.asyncio async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2017,11 +2020,12 @@ async def test_update_bucket_async_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.update_bucket_async() + await client.update_bucket_async() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() + @pytest.mark.asyncio async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2310,11 +2314,12 @@ async def test_create_bucket_empty_call_async(): analytics_enabled=True, restricted_fields=['restricted_fields_value'], )) - response = await client.create_bucket() + await client.create_bucket() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateBucketRequest() + @pytest.mark.asyncio async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2609,11 +2614,12 @@ async def test_update_bucket_empty_call_async(): analytics_enabled=True, restricted_fields=['restricted_fields_value'], )) - response = await client.update_bucket() + await client.update_bucket() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateBucketRequest() + @pytest.mark.asyncio async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2885,11 +2891,12 @@ async def test_delete_bucket_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_bucket() + await client.delete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteBucketRequest() + @pytest.mark.asyncio async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3146,11 +3153,12 @@ async def test_undelete_bucket_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.undelete_bucket() + await client.undelete_bucket() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UndeleteBucketRequest() + @pytest.mark.asyncio async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3414,11 +3422,12 @@ async def test_list_views_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( next_page_token='next_page_token_value', )) - response = await client.list_views() + await client.list_views() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListViewsRequest() + @pytest.mark.asyncio async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3967,11 +3976,12 @@ async def test_get_view_empty_call_async(): description='description_value', filter='filter_value', )) - response = await client.get_view() + await client.get_view() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetViewRequest() + @pytest.mark.asyncio async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4248,11 +4258,12 @@ async def test_create_view_empty_call_async(): description='description_value', filter='filter_value', )) - response = await client.create_view() + await client.create_view() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateViewRequest() + @pytest.mark.asyncio async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4527,11 +4538,12 @@ async def test_update_view_empty_call_async(): description='description_value', filter='filter_value', )) - response = await client.update_view() + await client.update_view() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateViewRequest() + @pytest.mark.asyncio async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4795,11 +4807,12 @@ async def test_delete_view_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_view() + await client.delete_view() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteViewRequest() + @pytest.mark.asyncio async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5063,11 +5076,12 @@ async def test_list_sinks_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( next_page_token='next_page_token_value', )) - response = await client.list_sinks() + await client.list_sinks() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListSinksRequest() + @pytest.mark.asyncio async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5631,11 +5645,12 @@ async def test_get_sink_empty_call_async(): writer_identity='writer_identity_value', include_children=True, )) - response = await client.get_sink() + await client.get_sink() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSinkRequest() + @pytest.mark.asyncio async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6017,11 +6032,12 @@ async def test_create_sink_empty_call_async(): writer_identity='writer_identity_value', include_children=True, )) - response = await client.create_sink() + await client.create_sink() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateSinkRequest() + @pytest.mark.asyncio async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6413,11 +6429,12 @@ async def test_update_sink_empty_call_async(): writer_identity='writer_identity_value', include_children=True, )) - response = await client.update_sink() + await client.update_sink() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSinkRequest() + @pytest.mark.asyncio async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6793,11 +6810,12 @@ async def test_delete_sink_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_sink() + await client.delete_sink() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteSinkRequest() + @pytest.mark.asyncio async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7145,11 +7163,12 @@ async def test_create_link_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.create_link() + await client.create_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateLinkRequest() + @pytest.mark.asyncio async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7524,11 +7543,12 @@ async def test_delete_link_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.delete_link() + await client.delete_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteLinkRequest() + @pytest.mark.asyncio async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7883,11 +7903,12 @@ async def test_list_links_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( next_page_token='next_page_token_value', )) - response = await client.list_links() + await client.list_links() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListLinksRequest() + @pytest.mark.asyncio async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8436,11 +8457,12 @@ async def test_get_link_empty_call_async(): description='description_value', lifecycle_state=logging_config.LifecycleState.ACTIVE, )) - response = await client.get_link() + await client.get_link() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetLinkRequest() + @pytest.mark.asyncio async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8793,11 +8815,12 @@ async def test_list_exclusions_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( next_page_token='next_page_token_value', )) - response = await client.list_exclusions() + await client.list_exclusions() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.ListExclusionsRequest() + @pytest.mark.asyncio async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9349,11 +9372,12 @@ async def test_get_exclusion_empty_call_async(): filter='filter_value', disabled=True, )) - response = await client.get_exclusion() + await client.get_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetExclusionRequest() + @pytest.mark.asyncio async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9715,11 +9739,12 @@ async def test_create_exclusion_empty_call_async(): filter='filter_value', disabled=True, )) - response = await client.create_exclusion() + await client.create_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CreateExclusionRequest() + @pytest.mark.asyncio async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10091,11 +10116,12 @@ async def test_update_exclusion_empty_call_async(): filter='filter_value', disabled=True, )) - response = await client.update_exclusion() + await client.update_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateExclusionRequest() + @pytest.mark.asyncio async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10463,11 +10489,12 @@ async def test_delete_exclusion_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_exclusion() + await client.delete_exclusion() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.DeleteExclusionRequest() + @pytest.mark.asyncio async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10820,11 +10847,12 @@ async def test_get_cmek_settings_empty_call_async(): kms_key_version_name='kms_key_version_name_value', service_account_id='service_account_id_value', )) - response = await client.get_cmek_settings() + await client.get_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetCmekSettingsRequest() + @pytest.mark.asyncio async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11104,11 +11132,12 @@ async def test_update_cmek_settings_empty_call_async(): kms_key_version_name='kms_key_version_name_value', service_account_id='service_account_id_value', )) - response = await client.update_cmek_settings() + await client.update_cmek_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateCmekSettingsRequest() + @pytest.mark.asyncio async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11391,11 +11420,12 @@ async def test_get_settings_empty_call_async(): storage_location='storage_location_value', disable_default_sink=True, )) - response = await client.get_settings() + await client.get_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.GetSettingsRequest() + @pytest.mark.asyncio async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11762,11 +11792,12 @@ async def test_update_settings_empty_call_async(): storage_location='storage_location_value', disable_default_sink=True, )) - response = await client.update_settings() + await client.update_settings() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.UpdateSettingsRequest() + @pytest.mark.asyncio async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12137,11 +12168,12 @@ async def test_copy_log_entries_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.copy_log_entries() + await client.copy_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_config.CopyLogEntriesRequest() + @pytest.mark.asyncio async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e307dd2b6528..b0fd3c2d5fb7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -887,11 +887,12 @@ async def test_delete_log_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log() + await client.delete_log() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.DeleteLogRequest() + @pytest.mark.asyncio async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1232,11 +1233,12 @@ async def test_write_log_entries_empty_call_async(): # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( )) - response = await client.write_log_entries() + await client.write_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.WriteLogEntriesRequest() + @pytest.mark.asyncio async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1552,11 +1554,12 @@ async def test_list_log_entries_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( next_page_token='next_page_token_value', )) - response = await client.list_log_entries() + await client.list_log_entries() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogEntriesRequest() + @pytest.mark.asyncio async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2051,11 +2054,12 @@ async def test_list_monitored_resource_descriptors_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( next_page_token='next_page_token_value', )) - response = await client.list_monitored_resource_descriptors() + await client.list_monitored_resource_descriptors() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2453,11 +2457,12 @@ async def test_list_logs_empty_call_async(): log_names=['log_names_value'], next_page_token='next_page_token_value', )) - response = await client.list_logs() + await client.list_logs() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging.ListLogsRequest() + @pytest.mark.asyncio async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index eac9dd581d9f..939eb6911f7b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -892,11 +892,12 @@ async def test_list_log_metrics_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( next_page_token='next_page_token_value', )) - response = await client.list_log_metrics() + await client.list_log_metrics() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.ListLogMetricsRequest() + @pytest.mark.asyncio async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1457,11 +1458,12 @@ async def test_get_log_metric_empty_call_async(): value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) - response = await client.get_log_metric() + await client.get_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.GetLogMetricRequest() + @pytest.mark.asyncio async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1838,11 +1840,12 @@ async def test_create_log_metric_empty_call_async(): value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) - response = await client.create_log_metric() + await client.create_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.CreateLogMetricRequest() + @pytest.mark.asyncio async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2229,11 +2232,12 @@ async def test_update_log_metric_empty_call_async(): value_extractor='value_extractor_value', version=logging_metrics.LogMetric.ApiVersion.V1, )) - response = await client.update_log_metric() + await client.update_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.UpdateLogMetricRequest() + @pytest.mark.asyncio async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2597,11 +2601,12 @@ async def test_delete_log_metric_empty_call_async(): '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_log_metric() + await client.delete_log_metric() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == logging_metrics.DeleteLogMetricRequest() + @pytest.mark.asyncio async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 9d8f120f0e66..b06145517d4d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -919,11 +919,12 @@ async def test_list_instances_empty_call_async(): next_page_token='next_page_token_value', unreachable=['unreachable_value'], )) - response = await client.list_instances() + await client.list_instances() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ListInstancesRequest() + @pytest.mark.asyncio async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1546,11 +1547,12 @@ async def test_get_instance_empty_call_async(): maintenance_version='maintenance_version_value', available_maintenance_versions=['available_maintenance_versions_value'], )) - response = await client.get_instance() + await client.get_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceRequest() + @pytest.mark.asyncio async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1949,11 +1951,12 @@ async def test_get_instance_auth_string_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( auth_string='auth_string_value', )) - response = await client.get_instance_auth_string() + await client.get_instance_auth_string() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + @pytest.mark.asyncio async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2304,11 +2307,12 @@ async def test_create_instance_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.create_instance() + await client.create_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.CreateInstanceRequest() + @pytest.mark.asyncio async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2681,11 +2685,12 @@ async def test_update_instance_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.update_instance() + await client.update_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpdateInstanceRequest() + @pytest.mark.asyncio async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3052,11 +3057,12 @@ async def test_upgrade_instance_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.upgrade_instance() + await client.upgrade_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.UpgradeInstanceRequest() + @pytest.mark.asyncio async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3421,11 +3427,12 @@ async def test_import_instance_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.import_instance() + await client.import_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ImportInstanceRequest() + @pytest.mark.asyncio async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3790,11 +3797,12 @@ async def test_export_instance_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.export_instance() + await client.export_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.ExportInstanceRequest() + @pytest.mark.asyncio async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4159,11 +4167,12 @@ async def test_failover_instance_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.failover_instance() + await client.failover_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.FailoverInstanceRequest() + @pytest.mark.asyncio async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4528,11 +4537,12 @@ async def test_delete_instance_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.delete_instance() + await client.delete_instance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.DeleteInstanceRequest() + @pytest.mark.asyncio async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4887,11 +4897,12 @@ async def test_reschedule_maintenance_empty_call_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name='operations/spam') ) - response = await client.reschedule_maintenance() + await client.reschedule_maintenance() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + @pytest.mark.asyncio async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, From f71d69dca260690bd79ca9e02543b4c92382e60b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 29 Aug 2024 11:08:44 -0400 Subject: [PATCH 1175/1339] feat: Add support for reading ClientLibrarySettings from service configuration YAML (#2098) --- packages/gapic-generator/gapic/schema/api.py | 77 +++++++++++++++-- .../tests/unit/schema/test_api.py | 82 +++++++++++++++---- 2 files changed, 138 insertions(+), 21 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index d3aa9f07ca21..2f3992ceb978 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -68,6 +68,14 @@ class MethodSettingsError(ValueError): pass +class ClientLibrarySettingsError(ValueError): + """ + Raised when `google.api.client_pb2.ClientLibrarySettings` contains + an invalid value. + """ + pass + + @dataclasses.dataclass(frozen=True) class Proto: """A representation of a particular proto file within an API.""" @@ -574,7 +582,7 @@ def mixin_http_options(self): def all_methods(self) -> Mapping[str, MethodDescriptorProto]: """Return a map of all methods for the API. - Return: + Returns: Mapping[str, MethodDescriptorProto]: A mapping of MethodDescriptorProto values for the API. """ @@ -607,7 +615,7 @@ def enforce_valid_method_settings( Args: service_method_settings (Sequence[client_pb2.MethodSettings]): Method settings to be used when generating API methods. - Return: + Returns: None Raises: MethodSettingsError: if fields in `method_settings.auto_populated_fields` @@ -615,13 +623,13 @@ def enforce_valid_method_settings( """ all_errors: dict = {} - selectors_seen = [] + selectors_seen: set = set() for method_settings in service_method_settings: # Check if this selector is defind more than once if method_settings.selector in selectors_seen: all_errors[method_settings.selector] = ["Duplicate selector"] continue - selectors_seen.append(method_settings.selector) + selectors_seen.add(method_settings.selector) method_descriptor = self.all_methods.get(method_settings.selector) # Check if this selector can be mapped to a method in the API. @@ -670,13 +678,70 @@ def enforce_valid_method_settings( if all_errors: raise MethodSettingsError(yaml.dump(all_errors)) + @cached_property + def all_library_settings( + self, + ) -> Mapping[str, Sequence[client_pb2.ClientLibrarySettings]]: + """Return a map of all `google.api.client.ClientLibrarySettings` to be used + when generating client libraries. + https://github.com/googleapis/googleapis/blob/master/google/api/client.proto#L130 + + Returns: + Mapping[str, Sequence[client_pb2.ClientLibrarySettings]]: A mapping of all library + settings read from the service YAML. + + Raises: + gapic.schema.api.ClientLibrarySettingsError: Raised when `google.api.client_pb2.ClientLibrarySettings` + contains an invalid value. + """ + self.enforce_valid_library_settings( + self.service_yaml_config.publishing.library_settings + ) + + return { + library_setting.version: client_pb2.ClientLibrarySettings( + version=library_setting.version, + python_settings=library_setting.python_settings, + ) + for library_setting in self.service_yaml_config.publishing.library_settings + } + + def enforce_valid_library_settings( + self, client_library_settings: Sequence[client_pb2.ClientLibrarySettings] + ) -> None: + """ + Checks each `google.api.client.ClientLibrarySettings` provided for validity and + raises an exception if invalid values are found. + + Args: + client_library_settings (Sequence[client_pb2.ClientLibrarySettings]): Client + library settings to be used when generating API methods. + Returns: + None + Raises: + ClientLibrarySettingsError: if fields in `client_library_settings.experimental_features` + are not supported. + """ + + all_errors: dict = {} + versions_seen: set = set() + for library_settings in client_library_settings: + # Check if this version is defind more than once + if library_settings.version in versions_seen: + all_errors[library_settings.version] = ["Duplicate version"] + continue + versions_seen.add(library_settings.version) + + if all_errors: + raise ClientLibrarySettingsError(yaml.dump(all_errors)) + @cached_property def all_method_settings(self) -> Mapping[str, Sequence[client_pb2.MethodSettings]]: """Return a map of all `google.api.client.MethodSettings` to be used when generating methods. https://github.com/googleapis/googleapis/blob/7dab3de7ec79098bb367b6b2ac3815512a49dd56/google/api/client.proto#L325 - Return: + Returns: Mapping[str, Sequence[client_pb2.MethodSettings]]: A mapping of all method settings read from the service YAML. @@ -953,7 +1018,7 @@ def _load_children(self, used to correspond to documentation in ``SourceCodeInfo.Location`` in ``descriptor.proto``. - Return: + Returns: Mapping[str, Union[~.MessageType, ~.Service, ~.EnumType]]: A sequence of the objects that were loaded. """ diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index b242fd9e811c..78e94949e74a 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -2608,7 +2608,7 @@ def test_has_iam_mixin(): assert api_schema.has_iam_mixin -def get_file_descriptor_proto_for_method_settings_tests( +def get_file_descriptor_proto_for_tests( fields: Sequence[descriptor_pb2.FieldDescriptorProto] = None, client_streaming: bool = False, server_streaming: bool = False, @@ -2621,7 +2621,7 @@ def get_file_descriptor_proto_for_method_settings_tests( `descriptor_pb2.FileDescriptorProto` should use client streaming. server_streaming (bool): Whether the methods in the return object `descriptor_pb2.FileDescriptorProto` should use server streaming. - Return: + Returns: descriptor_pb2.FileDescriptorProto: Returns an object describing the API. """ @@ -2686,7 +2686,7 @@ def test_api_all_methods(): Tests the `all_methods` method of `gapic.schema.api` method which returns a map of all methods for the API. """ - fd = get_file_descriptor_proto_for_method_settings_tests() + fd = get_file_descriptor_proto_for_tests() api_schema = api.API.build(fd, "google.example.v1beta1") assert len(api_schema.all_methods) == 2 assert list(api_schema.all_methods.keys()) == [ @@ -2695,6 +2695,58 @@ def test_api_all_methods(): ] +def test_read_python_settings_from_service_yaml(): + service_yaml_config = { + "apis": [ + {"name": "google.example.v1beta1.ServiceOne.Example1"}, + ], + "publishing": { + "library_settings": [ + { + "version": "google.example.v1beta1", + "python_settings": { + "experimental_features": {"rest_async_io_enabled": True}, + }, + } + ] + }, + } + cli_options = Options(service_yaml_config=service_yaml_config) + fd = get_file_descriptor_proto_for_tests(fields=[]) + api_schema = api.API.build(fd, "google.example.v1beta1", opts=cli_options) + assert api_schema.all_library_settings == { + "google.example.v1beta1": client_pb2.ClientLibrarySettings( + version="google.example.v1beta1", + python_settings=client_pb2.PythonSettings( + experimental_features=client_pb2.PythonSettings.ExperimentalFeatures( + rest_async_io_enabled=True + ) + ), + ) + } + + +def test_python_settings_duplicate_version_raises_error(): + """ + Test that `ClientLibrarySettingsError` is raised when there are duplicate versions in + `client_pb2.ClientLibrarySettings`. + """ + fd = get_file_descriptor_proto_for_tests() + api_schema = api.API.build(fd, "google.example.v1beta1") + clientlibrarysettings = [ + client_pb2.ClientLibrarySettings( + version="google.example.v1beta1", + ), + client_pb2.ClientLibrarySettings( + version="google.example.v1beta1", + ), + ] + with pytest.raises( + api.ClientLibrarySettingsError, match="(?i)duplicate version" + ): + api_schema.enforce_valid_library_settings(clientlibrarysettings) + + def test_read_method_settings_from_service_yaml(): """ Tests the `gapic.schema.api.all_method_settings` method which reads @@ -2730,7 +2782,7 @@ def test_read_method_settings_from_service_yaml(): name="mollusc", type="TYPE_STRING", options=field_options, number=2 ) fields = [squid, mollusc] - fd = get_file_descriptor_proto_for_method_settings_tests(fields=fields) + fd = get_file_descriptor_proto_for_tests(fields=fields) api_schema = api.API.build(fd, "google.example.v1beta1", opts=cli_options) assert api_schema.all_method_settings == { "google.example.v1beta1.ServiceOne.Example1": client_pb2.MethodSettings( @@ -2746,7 +2798,7 @@ def test_method_settings_duplicate_selector_raises_error(): Test that `MethodSettingsError` is raised when there are duplicate selectors in `client_pb2.MethodSettings`. """ - fd = get_file_descriptor_proto_for_method_settings_tests() + fd = get_file_descriptor_proto_for_tests() api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -2770,7 +2822,7 @@ def test_method_settings_invalid_selector_raises_error(): method_example1 = "google.example.v1beta1.DoesNotExist.Example1" method_example2 = "google.example.v1beta1.ServiceOne.DoesNotExist" - fd = get_file_descriptor_proto_for_method_settings_tests() + fd = get_file_descriptor_proto_for_tests() api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -2802,7 +2854,7 @@ def test_method_settings_unsupported_auto_populated_field_type_raises_error(): `client_pb2.MethodSettings.auto_populated_fields` is not of type string. """ squid = make_field_pb2(name="squid", type="TYPE_INT32", number=1) - fd = get_file_descriptor_proto_for_method_settings_tests(fields=[squid]) + fd = get_file_descriptor_proto_for_tests(fields=[squid]) api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -2820,7 +2872,7 @@ def test_method_settings_auto_populated_field_not_found_raises_error(): `client_pb2.MethodSettings.auto_populated_fields` is not found in the top-level request message of the selector. """ - fd = get_file_descriptor_proto_for_method_settings_tests() + fd = get_file_descriptor_proto_for_tests() api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -2846,7 +2898,7 @@ def test_method_settings_auto_populated_nested_field_raises_error(): type='TYPE_MESSAGE', ) - fd = get_file_descriptor_proto_for_method_settings_tests( + fd = get_file_descriptor_proto_for_tests( fields=[octopus.field_pb] ) api_schema = api.API.build(fd, "google.example.v1beta1") @@ -2865,7 +2917,7 @@ def test_method_settings_auto_populated_field_client_streaming_rpc_raises_error( Test that `MethodSettingsError` is raised when the selector in `client_pb2.MethodSettings.selector` maps to a method which uses client streaming. """ - fd = get_file_descriptor_proto_for_method_settings_tests( + fd = get_file_descriptor_proto_for_tests( client_streaming=True ) api_schema = api.API.build(fd, "google.example.v1beta1") @@ -2886,7 +2938,7 @@ def test_method_settings_auto_populated_field_server_streaming_rpc_raises_error( Test that `MethodSettingsError` is raised when the selector in `client_pb2.MethodSettings.selector` maps to a method which uses server streaming. """ - fd = get_file_descriptor_proto_for_method_settings_tests( + fd = get_file_descriptor_proto_for_tests( server_streaming=True ) api_schema = api.API.build(fd, "google.example.v1beta1") @@ -2914,7 +2966,7 @@ def test_method_settings_unsupported_auto_populated_field_behavior_raises_error( squid = make_field_pb2( name="squid", type="TYPE_STRING", options=field_options, number=1 ) - fd = get_file_descriptor_proto_for_method_settings_tests(fields=[squid]) + fd = get_file_descriptor_proto_for_tests(fields=[squid]) api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -2936,7 +2988,7 @@ def test_method_settings_auto_populated_field_field_info_format_not_specified_ra the format of the field is not specified. """ squid = make_field_pb2(name="squid", type="TYPE_STRING", number=1) - fd = get_file_descriptor_proto_for_method_settings_tests(fields=[squid]) + fd = get_file_descriptor_proto_for_tests(fields=[squid]) api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -2962,7 +3014,7 @@ def test_method_settings_unsupported_auto_populated_field_field_info_format_rais squid = make_field_pb2( name="squid", type="TYPE_STRING", options=field_options, number=1 ) - fd = get_file_descriptor_proto_for_method_settings_tests(fields=[squid]) + fd = get_file_descriptor_proto_for_tests(fields=[squid]) api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -3001,7 +3053,7 @@ def test_method_settings_invalid_multiple_issues(): # Field Octopus Errors # - Not annotated with google.api.field_info.format = UUID4 octopus = make_field_pb2(name="octopus", type="TYPE_STRING", number=1) - fd = get_file_descriptor_proto_for_method_settings_tests( + fd = get_file_descriptor_proto_for_tests( fields=[squid, octopus] ) api_schema = api.API.build(fd, "google.example.v1beta1") From 33fc935d2b0484c3cc9d854d1ac882af6463c799 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Fri, 30 Aug 2024 00:13:43 -0400 Subject: [PATCH 1176/1339] chore: refactor wrap method helper into a macro (#2111) --- .../%sub/services/%service/_shared_macros.j2 | 50 ++++++++++++ .../%service/transports/grpc_asyncio.py.j2 | 40 +++------- .../%name_%version/%sub/test_%service.py.j2 | 16 +--- .../gapic/%name_%version/%sub/test_macros.j2 | 31 ++++++++ .../asset_service/transports/grpc_asyncio.py | 59 ++++++++------ .../unit/gapic/asset_v1/test_asset_service.py | 12 +++ .../transports/grpc_asyncio.py | 21 +++-- .../credentials_v1/test_iam_credentials.py | 12 +++ .../eventarc/transports/grpc_asyncio.py | 49 +++++++----- .../unit/gapic/eventarc_v1/test_eventarc.py | 12 +++ .../transports/grpc_asyncio.py | 77 +++++++++++-------- .../transports/grpc_asyncio.py | 25 ++++-- .../transports/grpc_asyncio.py | 23 ++++-- .../logging_v2/test_config_service_v2.py | 12 +++ .../logging_v2/test_logging_service_v2.py | 12 +++ .../logging_v2/test_metrics_service_v2.py | 12 +++ .../cloud_redis/transports/grpc_asyncio.py | 35 ++++++--- .../unit/gapic/redis_v1/test_cloud_redis.py | 12 +++ 18 files changed, 361 insertions(+), 149 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index baeb630b4faa..2236475864e8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -159,3 +159,53 @@ def _get_response( raise core_exceptions.from_http_response(response) {% endmacro %} + + +{% macro prep_wrapped_messages_async_method(service) %} +def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + {% for method in service.methods.values() %} + self.{{ method.transport_safe_name|snake_case }}: self._wrap_method( + self.{{ method.transport_safe_name|snake_case }}, + {% if method.retry %} + default_retry=retries.AsyncRetry( + {% if method.retry.initial_backoff %} + initial={{ method.retry.initial_backoff }}, + {% endif %} + {% if method.retry.max_backoff %} + maximum={{ method.retry.max_backoff }}, + {% endif %} + {% if method.retry.backoff_multiplier %} + multiplier={{ method.retry.backoff_multiplier }}, + {% endif %} + predicate=retries.if_exception_type( + {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} + core_exceptions.{{ ex.__name__ }}, + {% endfor %} + ), + deadline={{ method.timeout }}, + ), + {% endif %} + default_timeout={{ method.timeout }}, + client_info=client_info, + ), + {% endfor %}{# service.methods.values() #} + } +{% endmacro %} + +{# TODO: This helper logic to check whether `kind` needs to be configured in wrap_method +can be removed once we require the correct version of the google-api-core dependency to +avoid having a gRPC code path in an async REST call. +See related issue: https://github.com/googleapis/python-api-core/issues/661. +In the meantime, if an older version of the dependency is installed (which has a wrap_method with +no kind parameter), then an async gRPC call will work correctly and async REST transport +will not be available as a transport. +See related issue: https://github.com/googleapis/gapic-generator-python/issues/2119. #} +{% macro wrap_async_method_macro() %} +def _wrap_method(self, func, *args, **kwargs): + {# TODO: Remove `pragma: NO COVER` once https://github.com/googleapis/python-api-core/pull/688 is merged. #} + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) +{% endmacro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 8a3bc140bce5..8f26b936b1b6 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -1,7 +1,9 @@ {% extends '_base.py.j2' %} {% block content %} +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -241,6 +243,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters self._prep_wrapped_messages(client_info) @property @@ -385,39 +388,16 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): return self._stubs["test_iam_permissions"] {% endif %} - def _prep_wrapped_messages(self, client_info): - """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" - self._wrapped_methods = { - {% for method in service.methods.values() %} - self.{{ method.transport_safe_name|snake_case }}: gapic_v1.method_async.wrap_method( - self.{{ method.transport_safe_name|snake_case }}, - {% if method.retry %} - default_retry=retries.AsyncRetry( - {% if method.retry.initial_backoff %} - initial={{ method.retry.initial_backoff }}, - {% endif %} - {% if method.retry.max_backoff %} - maximum={{ method.retry.max_backoff }}, - {% endif %} - {% if method.retry.backoff_multiplier %} - multiplier={{ method.retry.backoff_multiplier }}, - {% endif %} - predicate=retries.if_exception_type( - {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} - core_exceptions.{{ ex.__name__ }}, - {% endfor %} - ), - deadline={{ method.timeout }}, - ), - {% endif %} - default_timeout={{ method.timeout }}, - client_info=client_info, - ), - {% endfor %} {# service.methods.values() #} - } + {{ shared_macros.prep_wrapped_messages_async_method(service)|indent(4) }} + + {{ shared_macros.wrap_async_method_macro()|indent(4) }} def close(self): return self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" {% include '%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2' %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 82948346e61d..8a64754a42cf 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1041,19 +1041,9 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - {% if "grpc" in opts.transport %} - "grpc", - {% endif %} - {% if "rest" in opts.transport %} - "rest", - {% endif %} -]) -def test_transport_kind(transport_name): - transport = {{ service.client_name }}.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name +{{ test_macros.transport_kind_test(service, opts) }} + +{{ test_macros.transport_kind_test(service, opts, is_async=True) }} {% if 'grpc' in opts.transport %} def test_transport_grpc_default(): diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 82bce1309973..b73e8eac811f 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1876,3 +1876,34 @@ def test_{{ method_name }}_empty_call(): {% endwith %}{# method_settings #} assert args[0] == {{ method.input.ident }}() {% endmacro %} + + +{% macro transport_kind_test(service, opts, is_async=False) %} +@pytest.mark.parametrize("transport_name", [ + {% if is_async %} + {% if "grpc" in opts.transport %} + "grpc_asyncio", + {% endif %} + {% else %}{# if not is_async #} + {% if "grpc" in opts.transport%} + "grpc", + {% endif %} + {% if "rest" in opts.transport %} + "rest", + {% endif %} + {% endif %}{# is_async #} +]) +{% if is_async %} +@pytest.mark.asyncio +async def test_transport_kind_async(transport_name): + transport = {{ service.async_client_name }}.get_transport_class(transport_name)( + credentials=async_anonymous_credentials(), + ) +{% else %} +def test_transport_kind(transport_name): + transport = {{ service.client_name }}.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) +{% endif %} + assert transport.kind == transport_name +{% endmacro %} \ No newline at end of file diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 2ff6022256c8..45cafd05fb71 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -227,6 +228,7 @@ def __init__(self, *, ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters self._prep_wrapped_messages(client_info) @property @@ -941,17 +943,17 @@ def analyze_org_policy_governed_assets(self) -> Callable[ def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.export_assets: gapic_v1.method_async.wrap_method( + self.export_assets: self._wrap_method( self.export_assets, default_timeout=60.0, client_info=client_info, ), - self.list_assets: gapic_v1.method_async.wrap_method( + self.list_assets: self._wrap_method( self.list_assets, default_timeout=None, client_info=client_info, ), - self.batch_get_assets_history: gapic_v1.method_async.wrap_method( + self.batch_get_assets_history: self._wrap_method( self.batch_get_assets_history, default_retry=retries.AsyncRetry( initial=0.1, @@ -966,12 +968,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_feed: gapic_v1.method_async.wrap_method( + self.create_feed: self._wrap_method( self.create_feed, default_timeout=60.0, client_info=client_info, ), - self.get_feed: gapic_v1.method_async.wrap_method( + self.get_feed: self._wrap_method( self.get_feed, default_retry=retries.AsyncRetry( initial=0.1, @@ -986,7 +988,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_feeds: gapic_v1.method_async.wrap_method( + self.list_feeds: self._wrap_method( self.list_feeds, default_retry=retries.AsyncRetry( initial=0.1, @@ -1001,12 +1003,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.update_feed: gapic_v1.method_async.wrap_method( + self.update_feed: self._wrap_method( self.update_feed, default_timeout=60.0, client_info=client_info, ), - self.delete_feed: gapic_v1.method_async.wrap_method( + self.delete_feed: self._wrap_method( self.delete_feed, default_retry=retries.AsyncRetry( initial=0.1, @@ -1021,7 +1023,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.search_all_resources: gapic_v1.method_async.wrap_method( + self.search_all_resources: self._wrap_method( self.search_all_resources, default_retry=retries.AsyncRetry( initial=0.1, @@ -1036,7 +1038,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=15.0, client_info=client_info, ), - self.search_all_iam_policies: gapic_v1.method_async.wrap_method( + self.search_all_iam_policies: self._wrap_method( self.search_all_iam_policies, default_retry=retries.AsyncRetry( initial=0.1, @@ -1051,7 +1053,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=15.0, client_info=client_info, ), - self.analyze_iam_policy: gapic_v1.method_async.wrap_method( + self.analyze_iam_policy: self._wrap_method( self.analyze_iam_policy, default_retry=retries.AsyncRetry( initial=0.1, @@ -1065,71 +1067,80 @@ def _prep_wrapped_messages(self, client_info): default_timeout=300.0, client_info=client_info, ), - self.analyze_iam_policy_longrunning: gapic_v1.method_async.wrap_method( + self.analyze_iam_policy_longrunning: self._wrap_method( self.analyze_iam_policy_longrunning, default_timeout=60.0, client_info=client_info, ), - self.analyze_move: gapic_v1.method_async.wrap_method( + self.analyze_move: self._wrap_method( self.analyze_move, default_timeout=None, client_info=client_info, ), - self.query_assets: gapic_v1.method_async.wrap_method( + self.query_assets: self._wrap_method( self.query_assets, default_timeout=None, client_info=client_info, ), - self.create_saved_query: gapic_v1.method_async.wrap_method( + self.create_saved_query: self._wrap_method( self.create_saved_query, default_timeout=None, client_info=client_info, ), - self.get_saved_query: gapic_v1.method_async.wrap_method( + self.get_saved_query: self._wrap_method( self.get_saved_query, default_timeout=None, client_info=client_info, ), - self.list_saved_queries: gapic_v1.method_async.wrap_method( + self.list_saved_queries: self._wrap_method( self.list_saved_queries, default_timeout=None, client_info=client_info, ), - self.update_saved_query: gapic_v1.method_async.wrap_method( + self.update_saved_query: self._wrap_method( self.update_saved_query, default_timeout=None, client_info=client_info, ), - self.delete_saved_query: gapic_v1.method_async.wrap_method( + self.delete_saved_query: self._wrap_method( self.delete_saved_query, default_timeout=None, client_info=client_info, ), - self.batch_get_effective_iam_policies: gapic_v1.method_async.wrap_method( + self.batch_get_effective_iam_policies: self._wrap_method( self.batch_get_effective_iam_policies, default_timeout=None, client_info=client_info, ), - self.analyze_org_policies: gapic_v1.method_async.wrap_method( + self.analyze_org_policies: self._wrap_method( self.analyze_org_policies, default_timeout=None, client_info=client_info, ), - self.analyze_org_policy_governed_containers: gapic_v1.method_async.wrap_method( + self.analyze_org_policy_governed_containers: self._wrap_method( self.analyze_org_policy_governed_containers, default_timeout=None, client_info=client_info, ), - self.analyze_org_policy_governed_assets: gapic_v1.method_async.wrap_method( + self.analyze_org_policy_governed_assets: self._wrap_method( self.analyze_org_policy_governed_assets, default_timeout=None, client_info=client_info, ), - } + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + @property def get_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 6e0e9a638128..f952581a0e03 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -16415,6 +16415,18 @@ def test_transport_kind(transport_name): ) assert transport.kind == transport_name + +@pytest.mark.parametrize("transport_name", [ + "grpc_asyncio", +]) +@pytest.mark.asyncio +async def test_transport_kind_async(transport_name): + transport = AssetServiceAsyncClient.get_transport_class(transport_name)( + credentials=async_anonymous_credentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AssetServiceClient( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 447c2e3bafe0..8589ca45f040 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -232,6 +233,7 @@ def __init__(self, *, ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters self._prep_wrapped_messages(client_info) @property @@ -355,7 +357,7 @@ def sign_jwt(self) -> Callable[ def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.generate_access_token: gapic_v1.method_async.wrap_method( + self.generate_access_token: self._wrap_method( self.generate_access_token, default_retry=retries.AsyncRetry( initial=0.1, @@ -370,7 +372,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.generate_id_token: gapic_v1.method_async.wrap_method( + self.generate_id_token: self._wrap_method( self.generate_id_token, default_retry=retries.AsyncRetry( initial=0.1, @@ -385,7 +387,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.sign_blob: gapic_v1.method_async.wrap_method( + self.sign_blob: self._wrap_method( self.sign_blob, default_retry=retries.AsyncRetry( initial=0.1, @@ -400,7 +402,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.sign_jwt: gapic_v1.method_async.wrap_method( + self.sign_jwt: self._wrap_method( self.sign_jwt, default_retry=retries.AsyncRetry( initial=0.1, @@ -415,11 +417,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - } + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + __all__ = ( 'IAMCredentialsGrpcAsyncIOTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 2f646744fdad..2e96e2aebf64 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -3500,6 +3500,18 @@ def test_transport_kind(transport_name): ) assert transport.kind == transport_name + +@pytest.mark.parametrize("transport_name", [ + "grpc_asyncio", +]) +@pytest.mark.asyncio +async def test_transport_kind_async(transport_name): + transport = IAMCredentialsAsyncClient.get_transport_class(transport_name)( + credentials=async_anonymous_credentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = IAMCredentialsClient( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 864708c68f6b..3dbf4ebfe692 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -237,6 +238,7 @@ def __init__(self, *, ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters self._prep_wrapped_messages(client_info) @property @@ -739,101 +741,110 @@ def update_google_channel_config(self) -> Callable[ def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.get_trigger: gapic_v1.method_async.wrap_method( + self.get_trigger: self._wrap_method( self.get_trigger, default_timeout=None, client_info=client_info, ), - self.list_triggers: gapic_v1.method_async.wrap_method( + self.list_triggers: self._wrap_method( self.list_triggers, default_timeout=None, client_info=client_info, ), - self.create_trigger: gapic_v1.method_async.wrap_method( + self.create_trigger: self._wrap_method( self.create_trigger, default_timeout=None, client_info=client_info, ), - self.update_trigger: gapic_v1.method_async.wrap_method( + self.update_trigger: self._wrap_method( self.update_trigger, default_timeout=None, client_info=client_info, ), - self.delete_trigger: gapic_v1.method_async.wrap_method( + self.delete_trigger: self._wrap_method( self.delete_trigger, default_timeout=None, client_info=client_info, ), - self.get_channel: gapic_v1.method_async.wrap_method( + self.get_channel: self._wrap_method( self.get_channel, default_timeout=None, client_info=client_info, ), - self.list_channels: gapic_v1.method_async.wrap_method( + self.list_channels: self._wrap_method( self.list_channels, default_timeout=None, client_info=client_info, ), - self.create_channel_: gapic_v1.method_async.wrap_method( + self.create_channel_: self._wrap_method( self.create_channel_, default_timeout=None, client_info=client_info, ), - self.update_channel: gapic_v1.method_async.wrap_method( + self.update_channel: self._wrap_method( self.update_channel, default_timeout=None, client_info=client_info, ), - self.delete_channel: gapic_v1.method_async.wrap_method( + self.delete_channel: self._wrap_method( self.delete_channel, default_timeout=None, client_info=client_info, ), - self.get_provider: gapic_v1.method_async.wrap_method( + self.get_provider: self._wrap_method( self.get_provider, default_timeout=None, client_info=client_info, ), - self.list_providers: gapic_v1.method_async.wrap_method( + self.list_providers: self._wrap_method( self.list_providers, default_timeout=None, client_info=client_info, ), - self.get_channel_connection: gapic_v1.method_async.wrap_method( + self.get_channel_connection: self._wrap_method( self.get_channel_connection, default_timeout=None, client_info=client_info, ), - self.list_channel_connections: gapic_v1.method_async.wrap_method( + self.list_channel_connections: self._wrap_method( self.list_channel_connections, default_timeout=None, client_info=client_info, ), - self.create_channel_connection: gapic_v1.method_async.wrap_method( + self.create_channel_connection: self._wrap_method( self.create_channel_connection, default_timeout=None, client_info=client_info, ), - self.delete_channel_connection: gapic_v1.method_async.wrap_method( + self.delete_channel_connection: self._wrap_method( self.delete_channel_connection, default_timeout=None, client_info=client_info, ), - self.get_google_channel_config: gapic_v1.method_async.wrap_method( + self.get_google_channel_config: self._wrap_method( self.get_google_channel_config, default_timeout=None, client_info=client_info, ), - self.update_google_channel_config: gapic_v1.method_async.wrap_method( + self.update_google_channel_config: self._wrap_method( self.update_google_channel_config, default_timeout=None, client_info=client_info, ), - } + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + @property def delete_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 23ef03067160..225cf30b7af6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -13847,6 +13847,18 @@ def test_transport_kind(transport_name): ) assert transport.kind == transport_name + +@pytest.mark.parametrize("transport_name", [ + "grpc_asyncio", +]) +@pytest.mark.asyncio +async def test_transport_kind_async(transport_name): + transport = EventarcAsyncClient.get_transport_class(transport_name)( + credentials=async_anonymous_credentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = EventarcClient( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index f804863e9048..4b5df55245d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -227,6 +228,7 @@ def __init__(self, *, ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters self._prep_wrapped_messages(client_info) @property @@ -1191,72 +1193,72 @@ def copy_log_entries(self) -> Callable[ def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.list_buckets: gapic_v1.method_async.wrap_method( + self.list_buckets: self._wrap_method( self.list_buckets, default_timeout=None, client_info=client_info, ), - self.get_bucket: gapic_v1.method_async.wrap_method( + self.get_bucket: self._wrap_method( self.get_bucket, default_timeout=None, client_info=client_info, ), - self.create_bucket_async: gapic_v1.method_async.wrap_method( + self.create_bucket_async: self._wrap_method( self.create_bucket_async, default_timeout=None, client_info=client_info, ), - self.update_bucket_async: gapic_v1.method_async.wrap_method( + self.update_bucket_async: self._wrap_method( self.update_bucket_async, default_timeout=None, client_info=client_info, ), - self.create_bucket: gapic_v1.method_async.wrap_method( + self.create_bucket: self._wrap_method( self.create_bucket, default_timeout=None, client_info=client_info, ), - self.update_bucket: gapic_v1.method_async.wrap_method( + self.update_bucket: self._wrap_method( self.update_bucket, default_timeout=None, client_info=client_info, ), - self.delete_bucket: gapic_v1.method_async.wrap_method( + self.delete_bucket: self._wrap_method( self.delete_bucket, default_timeout=None, client_info=client_info, ), - self.undelete_bucket: gapic_v1.method_async.wrap_method( + self.undelete_bucket: self._wrap_method( self.undelete_bucket, default_timeout=None, client_info=client_info, ), - self.list_views: gapic_v1.method_async.wrap_method( + self.list_views: self._wrap_method( self.list_views, default_timeout=None, client_info=client_info, ), - self.get_view: gapic_v1.method_async.wrap_method( + self.get_view: self._wrap_method( self.get_view, default_timeout=None, client_info=client_info, ), - self.create_view: gapic_v1.method_async.wrap_method( + self.create_view: self._wrap_method( self.create_view, default_timeout=None, client_info=client_info, ), - self.update_view: gapic_v1.method_async.wrap_method( + self.update_view: self._wrap_method( self.update_view, default_timeout=None, client_info=client_info, ), - self.delete_view: gapic_v1.method_async.wrap_method( + self.delete_view: self._wrap_method( self.delete_view, default_timeout=None, client_info=client_info, ), - self.list_sinks: gapic_v1.method_async.wrap_method( + self.list_sinks: self._wrap_method( self.list_sinks, default_retry=retries.AsyncRetry( initial=0.1, @@ -1272,7 +1274,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_sink: gapic_v1.method_async.wrap_method( + self.get_sink: self._wrap_method( self.get_sink, default_retry=retries.AsyncRetry( initial=0.1, @@ -1288,12 +1290,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_sink: gapic_v1.method_async.wrap_method( + self.create_sink: self._wrap_method( self.create_sink, default_timeout=120.0, client_info=client_info, ), - self.update_sink: gapic_v1.method_async.wrap_method( + self.update_sink: self._wrap_method( self.update_sink, default_retry=retries.AsyncRetry( initial=0.1, @@ -1309,7 +1311,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_sink: gapic_v1.method_async.wrap_method( + self.delete_sink: self._wrap_method( self.delete_sink, default_retry=retries.AsyncRetry( initial=0.1, @@ -1325,27 +1327,27 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_link: gapic_v1.method_async.wrap_method( + self.create_link: self._wrap_method( self.create_link, default_timeout=None, client_info=client_info, ), - self.delete_link: gapic_v1.method_async.wrap_method( + self.delete_link: self._wrap_method( self.delete_link, default_timeout=None, client_info=client_info, ), - self.list_links: gapic_v1.method_async.wrap_method( + self.list_links: self._wrap_method( self.list_links, default_timeout=None, client_info=client_info, ), - self.get_link: gapic_v1.method_async.wrap_method( + self.get_link: self._wrap_method( self.get_link, default_timeout=None, client_info=client_info, ), - self.list_exclusions: gapic_v1.method_async.wrap_method( + self.list_exclusions: self._wrap_method( self.list_exclusions, default_retry=retries.AsyncRetry( initial=0.1, @@ -1361,7 +1363,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_exclusion: gapic_v1.method_async.wrap_method( + self.get_exclusion: self._wrap_method( self.get_exclusion, default_retry=retries.AsyncRetry( initial=0.1, @@ -1377,17 +1379,17 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_exclusion: gapic_v1.method_async.wrap_method( + self.create_exclusion: self._wrap_method( self.create_exclusion, default_timeout=120.0, client_info=client_info, ), - self.update_exclusion: gapic_v1.method_async.wrap_method( + self.update_exclusion: self._wrap_method( self.update_exclusion, default_timeout=120.0, client_info=client_info, ), - self.delete_exclusion: gapic_v1.method_async.wrap_method( + self.delete_exclusion: self._wrap_method( self.delete_exclusion, default_retry=retries.AsyncRetry( initial=0.1, @@ -1403,36 +1405,45 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_cmek_settings: gapic_v1.method_async.wrap_method( + self.get_cmek_settings: self._wrap_method( self.get_cmek_settings, default_timeout=None, client_info=client_info, ), - self.update_cmek_settings: gapic_v1.method_async.wrap_method( + self.update_cmek_settings: self._wrap_method( self.update_cmek_settings, default_timeout=None, client_info=client_info, ), - self.get_settings: gapic_v1.method_async.wrap_method( + self.get_settings: self._wrap_method( self.get_settings, default_timeout=None, client_info=client_info, ), - self.update_settings: gapic_v1.method_async.wrap_method( + self.update_settings: self._wrap_method( self.update_settings, default_timeout=None, client_info=client_info, ), - self.copy_log_entries: gapic_v1.method_async.wrap_method( + self.copy_log_entries: self._wrap_method( self.copy_log_entries, default_timeout=None, client_info=client_info, ), - } + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + @property def cancel_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 9817b9fd82ec..fa0d3072d4e5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -225,6 +226,7 @@ def __init__(self, *, ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters self._prep_wrapped_messages(client_info) @property @@ -415,7 +417,7 @@ def tail_log_entries(self) -> Callable[ def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.delete_log: gapic_v1.method_async.wrap_method( + self.delete_log: self._wrap_method( self.delete_log, default_retry=retries.AsyncRetry( initial=0.1, @@ -431,7 +433,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.write_log_entries: gapic_v1.method_async.wrap_method( + self.write_log_entries: self._wrap_method( self.write_log_entries, default_retry=retries.AsyncRetry( initial=0.1, @@ -447,7 +449,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_log_entries: gapic_v1.method_async.wrap_method( + self.list_log_entries: self._wrap_method( self.list_log_entries, default_retry=retries.AsyncRetry( initial=0.1, @@ -463,7 +465,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_monitored_resource_descriptors: gapic_v1.method_async.wrap_method( + self.list_monitored_resource_descriptors: self._wrap_method( self.list_monitored_resource_descriptors, default_retry=retries.AsyncRetry( initial=0.1, @@ -479,7 +481,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_logs: gapic_v1.method_async.wrap_method( + self.list_logs: self._wrap_method( self.list_logs, default_retry=retries.AsyncRetry( initial=0.1, @@ -495,7 +497,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.tail_log_entries: gapic_v1.method_async.wrap_method( + self.tail_log_entries: self._wrap_method( self.tail_log_entries, default_retry=retries.AsyncRetry( initial=0.1, @@ -511,11 +513,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), - } + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + @property def cancel_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index e5ff12d01339..91fb8fab4fa1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -225,6 +226,7 @@ def __init__(self, *, ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters self._prep_wrapped_messages(client_info) @property @@ -370,7 +372,7 @@ def delete_log_metric(self) -> Callable[ def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.list_log_metrics: gapic_v1.method_async.wrap_method( + self.list_log_metrics: self._wrap_method( self.list_log_metrics, default_retry=retries.AsyncRetry( initial=0.1, @@ -386,7 +388,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_log_metric: gapic_v1.method_async.wrap_method( + self.get_log_metric: self._wrap_method( self.get_log_metric, default_retry=retries.AsyncRetry( initial=0.1, @@ -402,12 +404,12 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_log_metric: gapic_v1.method_async.wrap_method( + self.create_log_metric: self._wrap_method( self.create_log_metric, default_timeout=60.0, client_info=client_info, ), - self.update_log_metric: gapic_v1.method_async.wrap_method( + self.update_log_metric: self._wrap_method( self.update_log_metric, default_retry=retries.AsyncRetry( initial=0.1, @@ -423,7 +425,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_log_metric: gapic_v1.method_async.wrap_method( + self.delete_log_metric: self._wrap_method( self.delete_log_metric, default_retry=retries.AsyncRetry( initial=0.1, @@ -439,11 +441,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - } + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + @property def cancel_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 62c50348d531..86f42cc044ee 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -12344,6 +12344,18 @@ def test_transport_kind(transport_name): ) assert transport.kind == transport_name + +@pytest.mark.parametrize("transport_name", [ + "grpc_asyncio", +]) +@pytest.mark.asyncio +async def test_transport_kind_async(transport_name): + transport = ConfigServiceV2AsyncClient.get_transport_class(transport_name)( + credentials=async_anonymous_credentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = ConfigServiceV2Client( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index b0fd3c2d5fb7..7602beb75d07 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -3108,6 +3108,18 @@ def test_transport_kind(transport_name): ) assert transport.kind == transport_name + +@pytest.mark.parametrize("transport_name", [ + "grpc_asyncio", +]) +@pytest.mark.asyncio +async def test_transport_kind_async(transport_name): + transport = LoggingServiceV2AsyncClient.get_transport_class(transport_name)( + credentials=async_anonymous_credentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = LoggingServiceV2Client( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 939eb6911f7b..8ac1d16baee0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2915,6 +2915,18 @@ def test_transport_kind(transport_name): ) assert transport.kind == transport_name + +@pytest.mark.parametrize("transport_name", [ + "grpc_asyncio", +]) +@pytest.mark.asyncio +async def test_transport_kind_async(transport_name): + transport = MetricsServiceV2AsyncClient.get_transport_class(transport_name)( + credentials=async_anonymous_credentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MetricsServiceV2Client( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index e5c8f727eb8e..364bd7e9503e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -247,6 +248,7 @@ def __init__(self, *, ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters self._prep_wrapped_messages(client_info) @property @@ -612,66 +614,75 @@ def reschedule_maintenance(self) -> Callable[ def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.list_instances: gapic_v1.method_async.wrap_method( + self.list_instances: self._wrap_method( self.list_instances, default_timeout=600.0, client_info=client_info, ), - self.get_instance: gapic_v1.method_async.wrap_method( + self.get_instance: self._wrap_method( self.get_instance, default_timeout=600.0, client_info=client_info, ), - self.get_instance_auth_string: gapic_v1.method_async.wrap_method( + self.get_instance_auth_string: self._wrap_method( self.get_instance_auth_string, default_timeout=600.0, client_info=client_info, ), - self.create_instance: gapic_v1.method_async.wrap_method( + self.create_instance: self._wrap_method( self.create_instance, default_timeout=600.0, client_info=client_info, ), - self.update_instance: gapic_v1.method_async.wrap_method( + self.update_instance: self._wrap_method( self.update_instance, default_timeout=600.0, client_info=client_info, ), - self.upgrade_instance: gapic_v1.method_async.wrap_method( + self.upgrade_instance: self._wrap_method( self.upgrade_instance, default_timeout=600.0, client_info=client_info, ), - self.import_instance: gapic_v1.method_async.wrap_method( + self.import_instance: self._wrap_method( self.import_instance, default_timeout=600.0, client_info=client_info, ), - self.export_instance: gapic_v1.method_async.wrap_method( + self.export_instance: self._wrap_method( self.export_instance, default_timeout=600.0, client_info=client_info, ), - self.failover_instance: gapic_v1.method_async.wrap_method( + self.failover_instance: self._wrap_method( self.failover_instance, default_timeout=600.0, client_info=client_info, ), - self.delete_instance: gapic_v1.method_async.wrap_method( + self.delete_instance: self._wrap_method( self.delete_instance, default_timeout=600.0, client_info=client_info, ), - self.reschedule_maintenance: gapic_v1.method_async.wrap_method( + self.reschedule_maintenance: self._wrap_method( self.reschedule_maintenance, default_timeout=None, client_info=client_info, ), - } + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + @property def delete_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index b06145517d4d..d9bacbf441ba 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -8413,6 +8413,18 @@ def test_transport_kind(transport_name): ) assert transport.kind == transport_name + +@pytest.mark.parametrize("transport_name", [ + "grpc_asyncio", +]) +@pytest.mark.asyncio +async def test_transport_kind_async(transport_name): + transport = CloudRedisAsyncClient.get_transport_class(transport_name)( + credentials=async_anonymous_credentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudRedisClient( From c0ce83f080be1d987d6361e52d76f29ba963f34a Mon Sep 17 00:00:00 2001 From: ohmayr Date: Fri, 30 Aug 2024 00:27:19 -0400 Subject: [PATCH 1177/1339] chore: Update response method macro to support async calls (#2113) --- .../%sub/services/%service/_shared_macros.j2 | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index 2236475864e8..c8b052ae0618 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -110,9 +110,11 @@ def _get_http_options(): return http_options {% endmacro %} -{% macro response_method(body_spec) %} +{% macro response_method(body_spec, is_async=False) %} +{% set async_prefix = "async " if is_async else "" %} +{% set await_prefix = "await " if is_async else "" %} @staticmethod -def _get_response( +{{ async_prefix }}def _get_response( host, metadata, query_params, @@ -125,7 +127,7 @@ def _get_response( method = transcoded_request['method'] headers = dict(metadata) headers['Content-Type'] = 'application/json' - response = getattr(session, method)( + response = {{ await_prefix }}getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, From 7811826f7ea258bb6f9b6e13bcb868a65b2be722 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Fri, 30 Aug 2024 00:36:50 -0400 Subject: [PATCH 1178/1339] chore: support async in rest call common macro (#2114) --- .../%sub/services/%service/_shared_macros.j2 | 32 +++++++++++++++++-- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index c8b052ae0618..b7b223caad3f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -139,26 +139,52 @@ def _get_http_options(): return response {% endmacro %} -{% macro rest_call_method_common(body_spec, method_name, service_name) %} +{# rest_call_method_common includes the common code for a rest __call__ method to be + re-used for sync REST, async REST, and mixins __call__ implementation. + + Args: + body_spec (str): The http options body i.e. method.http_options[0].body + method_name (str): The method name. + service_name (str): The service name. + is_async (bool): Used to determine the code path i.e. whether for sync or async call. #} +{% macro rest_call_method_common(body_spec, method_name, service_name, is_async=False) %} +{% set await_prefix = "await " if is_async else "" %} +{% set async_class_prefix = "Async" if is_async else "" %} http_options = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_http_options() + {% if not is_async %} + {# TODO (ohmayr): Make this unconditional once REST interceptors are supported for async. Googlers, + see internal tracking issue: b/362949568. #} request, metadata = self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) + {% endif %} transcoded_request = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_transcoded_request(http_options, request) {% if body_spec %} body = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_request_body_json(transcoded_request) - {% endif %} {# body_spec #} + {% endif %}{# body_spec #} # Jsonify the query params query_params = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_query_params_json(transcoded_request) # Send the request - response = {{ service_name }}RestTransport._{{method_name}}._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request{% if body_spec %}, body{% endif %}) + response = {{ await_prefix }}{{ async_class_prefix }}{{ service_name }}RestTransport._{{method_name}}._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request{% if body_spec %}, body{% endif %}) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. if response.status_code >= 400: + {# Note: format_http_response_error takes in more parameters than from_http_response and the + latter only supports a response of type requests.Response. + TODO: Clean up the sync response error handling and use format_http_response_error. + See issue: https://github.com/googleapis/gapic-generator-python/issues/2116. #} + {% if is_async %} + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) + {% else %} raise core_exceptions.from_http_response(response) + {% endif %}{# is_async #} {% endmacro %} From 2b9865545abadefba6ef79d8c91ba84d68279544 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 30 Aug 2024 13:30:37 -0400 Subject: [PATCH 1179/1339] chore: add default library settings (#2120) --- packages/gapic-generator/gapic/schema/api.py | 12 +++++++++++- .../tests/unit/schema/test_api.py | 17 +++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 2f3992ceb978..ce2fdc8022e5 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -698,7 +698,7 @@ def all_library_settings( self.service_yaml_config.publishing.library_settings ) - return { + result = { library_setting.version: client_pb2.ClientLibrarySettings( version=library_setting.version, python_settings=library_setting.python_settings, @@ -706,6 +706,16 @@ def all_library_settings( for library_setting in self.service_yaml_config.publishing.library_settings } + # Add default settings for the current proto package + if not result: + result = { + self.naming.proto_package: client_pb2.ClientLibrarySettings( + version=self.naming.proto_package + ) + } + + return result + def enforce_valid_library_settings( self, client_library_settings: Sequence[client_pb2.ClientLibrarySettings] ) -> None: diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 78e94949e74a..c8bf9706d39c 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -2726,6 +2726,23 @@ def test_read_python_settings_from_service_yaml(): } +def test_read_empty_python_settings_from_service_yaml(): + service_yaml_config = { + "apis": [ + {"name": "google.example.v1beta1.ServiceOne.Example1"}, + ], + } + cli_options = Options(service_yaml_config=service_yaml_config) + fd = get_file_descriptor_proto_for_tests(fields=[]) + api_schema = api.API.build(fd, "google.example.v1beta1", opts=cli_options) + assert api_schema.all_library_settings["google.example.v1beta1"].python_settings \ + == client_pb2.PythonSettings() + assert api_schema.all_library_settings["google.example.v1beta1"].python_settings.experimental_features \ + == client_pb2.PythonSettings.ExperimentalFeatures() + assert api_schema.all_library_settings["google.example.v1beta1"].python_settings.experimental_features.rest_async_io_enabled \ + == False + + def test_python_settings_duplicate_version_raises_error(): """ Test that `ClientLibrarySettingsError` is raised when there are duplicate versions in From dc5b4eff2f99e2a807601f8af34fc8009305c252 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 3 Sep 2024 14:27:39 +0200 Subject: [PATCH 1180/1339] chore(deps): update all dependencies (#2125) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/WORKSPACE | 10 +- packages/gapic-generator/requirements.txt | 260 +++++++++++----------- 2 files changed, 136 insertions(+), 134 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 1663b1e3499f..0828e207172d 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -60,9 +60,9 @@ gapic_generator_python() gapic_generator_register_toolchains() -_grpc_version = "1.60.0" +_grpc_version = "1.64.2" -_grpc_sha256 = "09640607a340ff0d97407ed22fe4adb177e5bb85329821122084359cd57c3dea" +_grpc_sha256 = "8579095a31e280d0c5fcc81ea0a2a0efb2900dbfbac0eb018a961a5be22e076e" http_archive( name = "com_github_grpc_grpc", @@ -73,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "d19643d265b978383352b3143f04c0641eea75a75235c111cc01a1350173180e", - strip_prefix = "protobuf-25.3", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v25.3.tar.gz"], + sha256 = "13e7749c30bc24af6ee93e092422f9dc08491c7097efa69461f88eb5f61805ce", + strip_prefix = "protobuf-28.0", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.0.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index c57b0648564d..ab28e808cc74 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,13 +4,13 @@ # # pip-compile --generate-hashes requirements.in # -cachetools==5.4.0 \ - --hash=sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474 \ - --hash=sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 # via requests charset-normalizer==3.3.2 \ --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ @@ -112,13 +112,13 @@ exceptiongroup==1.2.2 \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc # via pytest -google-api-core==2.19.1 \ - --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ - --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd +google-api-core==2.19.2 \ + --hash=sha256:53ec0258f2837dd53bbd3d3df50f5359281b3cc13f800c941dd15a9b5a415af4 \ + --hash=sha256:ca07de7e8aa1c98a8bfca9321890ad2340ef7f2eb136e558cee68f24b94b0a8f # via -r requirements.in -google-auth==2.32.0 \ - --hash=sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022 \ - --hash=sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b +google-auth==2.34.0 \ + --hash=sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 \ + --hash=sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc # via google-api-core googleapis-common-protos[grpc]==1.65.0 \ --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ @@ -131,59 +131,59 @@ grpc-google-iam-v1==0.13.1 \ --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e # via -r requirements.in -grpcio==1.65.4 \ - --hash=sha256:075f3903bc1749ace93f2b0664f72964ee5f2da5c15d4b47e0ab68e4f442c257 \ - --hash=sha256:0a0720299bdb2cc7306737295d56e41ce8827d5669d4a3cd870af832e3b17c4d \ - --hash=sha256:0cef8c919a3359847c357cb4314e50ed1f0cca070f828ee8f878d362fd744d52 \ - --hash=sha256:0e85c8766cf7f004ab01aff6a0393935a30d84388fa3c58d77849fcf27f3e98c \ - --hash=sha256:17de4fda50967679677712eec0a5c13e8904b76ec90ac845d83386b65da0ae1e \ - --hash=sha256:18c10f0d054d2dce34dd15855fcca7cc44ec3b811139437543226776730c0f28 \ - --hash=sha256:24a2246e80a059b9eb981e4c2a6d8111b1b5e03a44421adbf2736cc1d4988a8a \ - --hash=sha256:280e93356fba6058cbbfc6f91a18e958062ef1bdaf5b1caf46c615ba1ae71b5b \ - --hash=sha256:2a1d4c84d9e657f72bfbab8bedf31bdfc6bfc4a1efb10b8f2d28241efabfaaf2 \ - --hash=sha256:2a4f476209acffec056360d3e647ae0e14ae13dcf3dfb130c227ae1c594cbe39 \ - --hash=sha256:2bd672e005afab8bf0d6aad5ad659e72a06dd713020554182a66d7c0c8f47e18 \ - --hash=sha256:3d1bbf7e1dd1096378bd83c83f554d3b93819b91161deaf63e03b7022a85224a \ - --hash=sha256:3dee50c1b69754a4228e933696408ea87f7e896e8d9797a3ed2aeed8dbd04b74 \ - --hash=sha256:4482a44ce7cf577a1f8082e807a5b909236bce35b3e3897f839f2fbd9ae6982d \ - --hash=sha256:4934077b33aa6fe0b451de8b71dabde96bf2d9b4cb2b3187be86e5adebcba021 \ - --hash=sha256:5764237d751d3031a36fafd57eb7d36fd2c10c658d2b4057c516ccf114849a3e \ - --hash=sha256:626319a156b1f19513156a3b0dbfe977f5f93db63ca673a0703238ebd40670d7 \ - --hash=sha256:644a783ce604a7d7c91412bd51cf9418b942cf71896344b6dc8d55713c71ce82 \ - --hash=sha256:66bb051881c84aa82e4f22d8ebc9d1704b2e35d7867757f0740c6ef7b902f9b1 \ - --hash=sha256:74c34fc7562bdd169b77966068434a93040bfca990e235f7a67cdf26e1bd5c63 \ - --hash=sha256:7656376821fed8c89e68206a522522317787a3d9ed66fb5110b1dff736a5e416 \ - --hash=sha256:85e9c69378af02e483bc626fc19a218451b24a402bdf44c7531e4c9253fb49ef \ - --hash=sha256:870370524eff3144304da4d1bbe901d39bdd24f858ce849b7197e530c8c8f2ec \ - --hash=sha256:874acd010e60a2ec1e30d5e505b0651ab12eb968157cd244f852b27c6dbed733 \ - --hash=sha256:886b45b29f3793b0c2576201947258782d7e54a218fe15d4a0468d9a6e00ce17 \ - --hash=sha256:88fcabc332a4aef8bcefadc34a02e9ab9407ab975d2c7d981a8e12c1aed92aa1 \ - --hash=sha256:8dc9ddc4603ec43f6238a5c95400c9a901b6d079feb824e890623da7194ff11e \ - --hash=sha256:8eb485801957a486bf5de15f2c792d9f9c897a86f2f18db8f3f6795a094b4bb2 \ - --hash=sha256:926a0750a5e6fb002542e80f7fa6cab8b1a2ce5513a1c24641da33e088ca4c56 \ - --hash=sha256:a146bc40fa78769f22e1e9ff4f110ef36ad271b79707577bf2a31e3e931141b9 \ - --hash=sha256:a925446e6aa12ca37114840d8550f308e29026cdc423a73da3043fd1603a6385 \ - --hash=sha256:a99e6dffefd3027b438116f33ed1261c8d360f0dd4f943cb44541a2782eba72f \ - --hash=sha256:abccc5d73f5988e8f512eb29341ed9ced923b586bb72e785f265131c160231d8 \ - --hash=sha256:ade1256c98cba5a333ef54636095f2c09e6882c35f76acb04412f3b1aa3c29a5 \ - --hash=sha256:b07f36faf01fca5427d4aa23645e2d492157d56c91fab7e06fe5697d7e171ad4 \ - --hash=sha256:b81711bf4ec08a3710b534e8054c7dcf90f2edc22bebe11c1775a23f145595fe \ - --hash=sha256:be952436571dacc93ccc7796db06b7daf37b3b56bb97e3420e6503dccfe2f1b4 \ - --hash=sha256:c9ba3e63108a8749994f02c7c0e156afb39ba5bdf755337de8e75eb685be244b \ - --hash=sha256:cdb34278e4ceb224c89704cd23db0d902e5e3c1c9687ec9d7c5bb4c150f86816 \ - --hash=sha256:cf53e6247f1e2af93657e62e240e4f12e11ee0b9cef4ddcb37eab03d501ca864 \ - --hash=sha256:d2b819f9ee27ed4e3e737a4f3920e337e00bc53f9e254377dd26fc7027c4d558 \ - --hash=sha256:d72962788b6c22ddbcdb70b10c11fbb37d60ae598c51eb47ec019db66ccfdff0 \ - --hash=sha256:d7b984a8dd975d949c2042b9b5ebcf297d6d5af57dcd47f946849ee15d3c2fb8 \ - --hash=sha256:e4a795c02405c7dfa8affd98c14d980f4acea16ea3b539e7404c645329460e5a \ - --hash=sha256:e6cbdd107e56bde55c565da5fd16f08e1b4e9b0674851d7749e7f32d8645f524 \ - --hash=sha256:ee40d058cf20e1dd4cacec9c39e9bce13fedd38ce32f9ba00f639464fcb757de +grpcio==1.66.1 \ + --hash=sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e \ + --hash=sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce \ + --hash=sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8 \ + --hash=sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d \ + --hash=sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858 \ + --hash=sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0 \ + --hash=sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a \ + --hash=sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45 \ + --hash=sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef \ + --hash=sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2 \ + --hash=sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac \ + --hash=sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd \ + --hash=sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1 \ + --hash=sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce \ + --hash=sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492 \ + --hash=sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e \ + --hash=sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb \ + --hash=sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44 \ + --hash=sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb \ + --hash=sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759 \ + --hash=sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e \ + --hash=sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761 \ + --hash=sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26 \ + --hash=sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791 \ + --hash=sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c \ + --hash=sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60 \ + --hash=sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df \ + --hash=sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a \ + --hash=sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3 \ + --hash=sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734 \ + --hash=sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f \ + --hash=sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083 \ + --hash=sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524 \ + --hash=sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d \ + --hash=sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a \ + --hash=sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0 \ + --hash=sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb \ + --hash=sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503 \ + --hash=sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815 \ + --hash=sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22 \ + --hash=sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2 \ + --hash=sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c \ + --hash=sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d \ + --hash=sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b \ + --hash=sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c \ + --hash=sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9 # via # googleapis-common-protos # grpc-google-iam-v1 -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 +idna==3.8 \ + --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ + --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 # via requests inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ @@ -306,18 +306,18 @@ proto-plus==1.24.0 \ # via # -r requirements.in # google-api-core -protobuf==5.27.3 \ - --hash=sha256:043853dcb55cc262bf2e116215ad43fa0859caab79bb0b2d31b708f128ece035 \ - --hash=sha256:16ddf3f8c6c41e1e803da7abea17b1793a97ef079a912e42351eabb19b2cffe7 \ - --hash=sha256:68248c60d53f6168f565a8c76dc58ba4fa2ade31c2d1ebdae6d80f969cdc2d4f \ - --hash=sha256:82460903e640f2b7e34ee81a947fdaad89de796d324bcbc38ff5430bcdead82c \ - --hash=sha256:8572c6533e544ebf6899c360e91d6bcbbee2549251643d32c52cf8a5de295ba5 \ - --hash=sha256:a55c48f2a2092d8e213bd143474df33a6ae751b781dd1d1f4d953c128a415b25 \ - --hash=sha256:af7c0b7cfbbb649ad26132e53faa348580f844d9ca46fd3ec7ca48a1ea5db8a1 \ - --hash=sha256:b8a994fb3d1c11156e7d1e427186662b64694a62b55936b2b9348f0a7c6625ce \ - --hash=sha256:c2a105c24f08b1e53d6c7ffe69cb09d0031512f0b72f812dd4005b8112dbe91e \ - --hash=sha256:c84eee2c71ed83704f1afbf1a85c3171eab0fd1ade3b399b3fad0884cbcca8bf \ - --hash=sha256:dcb307cd4ef8fec0cf52cb9105a03d06fbb5275ce6d84a6ae33bc6cf84e0a07b +protobuf==5.28.0 \ + --hash=sha256:018db9056b9d75eb93d12a9d35120f97a84d9a919bcab11ed56ad2d399d6e8dd \ + --hash=sha256:510ed78cd0980f6d3218099e874714cdf0d8a95582e7b059b06cabad855ed0a0 \ + --hash=sha256:532627e8fdd825cf8767a2d2b94d77e874d5ddb0adefb04b237f7cc296748681 \ + --hash=sha256:6206afcb2d90181ae8722798dcb56dc76675ab67458ac24c0dd7d75d632ac9bd \ + --hash=sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0 \ + --hash=sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6 \ + --hash=sha256:853db610214e77ee817ecf0514e0d1d052dff7f63a0c157aa6eabae98db8a8de \ + --hash=sha256:d001a73c8bc2bf5b5c1360d59dd7573744e163b3607fa92788b7f3d5fefbd9a5 \ + --hash=sha256:dde74af0fa774fa98892209992295adbfb91da3fa98c8f67a88afe8f5a349add \ + --hash=sha256:dde9fcaa24e7a9654f4baf2a55250b13a5ea701493d904c54069776b99a8216b \ + --hash=sha256:eef7a8a2f4318e2cb2dee8666d26e58eaf437c14788f3a2911d0c3da40405ae8 # via # -r requirements.in # google-api-core @@ -342,62 +342,64 @@ pytest==8.3.2 \ --hash=sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 \ --hash=sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce # via pytest-asyncio -pytest-asyncio==0.23.8 \ - --hash=sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2 \ - --hash=sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3 +pytest-asyncio==0.24.0 \ + --hash=sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b \ + --hash=sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276 # via -r requirements.in -pyyaml==6.0.1 \ - --hash=sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5 \ - --hash=sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc \ - --hash=sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df \ - --hash=sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741 \ - --hash=sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206 \ - --hash=sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27 \ - --hash=sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595 \ - --hash=sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62 \ - --hash=sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98 \ - --hash=sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696 \ - --hash=sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290 \ - --hash=sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9 \ - --hash=sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d \ - --hash=sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6 \ - --hash=sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867 \ - --hash=sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47 \ - --hash=sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486 \ - --hash=sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6 \ - --hash=sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3 \ - --hash=sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007 \ - --hash=sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938 \ - --hash=sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0 \ - --hash=sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c \ - --hash=sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735 \ - --hash=sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d \ - --hash=sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28 \ - --hash=sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4 \ - --hash=sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba \ - --hash=sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8 \ - --hash=sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef \ - --hash=sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5 \ - --hash=sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd \ - --hash=sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3 \ - --hash=sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0 \ - --hash=sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515 \ - --hash=sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c \ - --hash=sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c \ - --hash=sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924 \ - --hash=sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34 \ - --hash=sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43 \ - --hash=sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859 \ - --hash=sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673 \ - --hash=sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54 \ - --hash=sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a \ - --hash=sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b \ - --hash=sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab \ - --hash=sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa \ - --hash=sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c \ - --hash=sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585 \ - --hash=sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d \ - --hash=sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f +PyYAML==6.0.2 \ + --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ + --hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \ + --hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \ + --hash=sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e \ + --hash=sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133 \ + --hash=sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5 \ + --hash=sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484 \ + --hash=sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee \ + --hash=sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5 \ + --hash=sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68 \ + --hash=sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a \ + --hash=sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf \ + --hash=sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99 \ + --hash=sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8 \ + --hash=sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85 \ + --hash=sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19 \ + --hash=sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc \ + --hash=sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a \ + --hash=sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1 \ + --hash=sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317 \ + --hash=sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c \ + --hash=sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631 \ + --hash=sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d \ + --hash=sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652 \ + --hash=sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5 \ + --hash=sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e \ + --hash=sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b \ + --hash=sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8 \ + --hash=sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476 \ + --hash=sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706 \ + --hash=sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563 \ + --hash=sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237 \ + --hash=sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b \ + --hash=sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083 \ + --hash=sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180 \ + --hash=sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425 \ + --hash=sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e \ + --hash=sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f \ + --hash=sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725 \ + --hash=sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183 \ + --hash=sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab \ + --hash=sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774 \ + --hash=sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725 \ + --hash=sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e \ + --hash=sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5 \ + --hash=sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d \ + --hash=sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290 \ + --hash=sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44 \ + --hash=sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed \ + --hash=sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4 \ + --hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \ + --hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \ + --hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4 # via # -r requirements.in # libcst From fb2b94e83ca182642acdad2c0567ac6e1199f907 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 4 Sep 2024 17:16:32 -0400 Subject: [PATCH 1181/1339] chore: optionally generate async rest and client files (#2122) --- packages/gapic-generator/gapic/generator/generator.py | 10 +++++++--- .../services/%service/transports/rest_asyncio.py.j2 | 5 +++++ 2 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index dff8e0ebedf3..cb0dff456b0f 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -294,9 +294,13 @@ def _render_template( ('transport' in template_name and not self._is_desired_transport(template_name, opts)) or - # TODO(yon-mg) - remove when rest async implementation resolved - # temporarily stop async client gen while rest async is unkown - ('async' in template_name and 'grpc' not in opts.transport) + # TODO: Remove the following conditions once support for async rest transport is GA: + # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2121. + ('async_client' in template_name and 'grpc' not in opts.transport and + not api_schema.all_library_settings[api_schema.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled) + or + ('rest_asyncio' in template_name and + not api_schema.all_library_settings[api_schema.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled) or ('rest_base' in template_name and 'rest' not in opts.transport) ): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 new file mode 100644 index 000000000000..d73591c47269 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -0,0 +1,5 @@ +{# TODO: Remove the following condition for async rest transport once support for it is GA: + # {% if rest_async_io_enabled %} + # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2121. +#} +{% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} From 367c14058d173f1859e6948d7705c13741051b8b Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 5 Sep 2024 15:08:36 -0400 Subject: [PATCH 1182/1339] feat: implement async rest transport constructor (#2123) --- .../.github/workflows/tests.yaml | 3 +- .../gapic/generator/generator.py | 3 +- .../%sub/services/%service/client.py.j2 | 10 ++ .../%service/transports/rest_asyncio.py.j2 | 83 +++++++++++++- .../%name_%version/%sub/test_%service.py.j2 | 16 ++- .../gapic/%name_%version/%sub/test_macros.j2 | 47 ++++---- packages/gapic-generator/noxfile.py | 51 +++++++++ .../unit/gapic/asset_v1/test_asset_service.py | 31 +++--- .../credentials_v1/test_iam_credentials.py | 31 +++--- .../unit/gapic/eventarc_v1/test_eventarc.py | 31 +++--- .../logging_v2/test_config_service_v2.py | 23 ++-- .../logging_v2/test_logging_service_v2.py | 23 ++-- .../logging_v2/test_metrics_service_v2.py | 23 ++-- .../redis_v1/services/cloud_redis/client.py | 2 + .../cloud_redis/transports/rest_asyncio.py | 102 ++++++++++++++++++ .../unit/gapic/redis_v1/test_cloud_redis.py | 38 ++++--- .../tests/integration/redis_v1.yaml | 11 +- 17 files changed, 380 insertions(+), 148 deletions(-) create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 8be4f041d2e3..4c00a2bb3237 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -140,7 +140,8 @@ jobs: strategy: matrix: python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] - variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins] + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `_w_rest_async` variant when async rest is GA. + variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins, _w_rest_async] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index cb0dff456b0f..27ad4b7ff22c 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -294,8 +294,7 @@ def _render_template( ('transport' in template_name and not self._is_desired_transport(template_name, opts)) or - # TODO: Remove the following conditions once support for async rest transport is GA: - # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2121. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. ('async_client' in template_name and 'grpc' not in opts.transport and not api_schema.all_library_settings[api_schema.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled) or diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 6c45fdd728de..168955c8f650 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -1,3 +1,5 @@ +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} +{% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} {% extends '_base.py.j2' %} {% block content %} @@ -62,6 +64,10 @@ from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} {% endif %} {% if 'rest' in opts.transport %} from .transports.rest import {{ service.name }}RestTransport +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} +{% if rest_async_io_enabled %} +from .transports.rest_asyncio import Async{{ service.name }}RestTransport +{% endif %}{# if rest_async_io_enabled #} {% endif %} @@ -79,6 +85,10 @@ class {{ service.client_name }}Meta(type): {% endif %} {% if "rest" in opts.transport %} _transport_registry["rest"] = {{ service.name }}RestTransport + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} + {% if rest_async_io_enabled %} + _transport_registry["rest_asyncio"] = Async{{ service.name }}RestTransport + {% endif %}{# if rest_async_io_enabled #} {% endif %} def get_transport_class(cls, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index d73591c47269..ab87c32c76a7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -1,5 +1,80 @@ -{# TODO: Remove the following condition for async rest transport once support for it is GA: - # {% if rest_async_io_enabled %} - # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2121. -#} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} {% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} +{% extends '_base.py.j2' %} + +{% block content %} + +from google.api_core import gapic_v1 + +from typing import Any, Optional + +from .rest_base import _Base{{ service.name }}RestTransport + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +{# TODO (https://github.com/googleapis/gapic-generator-python/issues/2128): Update `rest_version` to include the transport dependency version. #} +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=None, +) + +class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): + """Asynchronous REST backend transport for {{ service.name }}. + + {{ service.meta.doc|rst(width=72, indent=4) }} + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + {# TODO (https://github.com/googleapis/gapic-generator-python/issues/2129): Update the default type for credentials. #} + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + url_scheme: str = 'https', + ) -> None: + """Instantiate the transport. + + {% if not opts.rest_numeric_enums %} + NOTE: This async REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + {% endif %} + + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {# TODO (https://github.com/googleapis/gapic-generator-python/issues/2129): Update the default type for credentials. #} + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + url_scheme (str): the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=False, + url_scheme=url_scheme, + api_audience=None + ) + + @property + def kind(self) -> str: + return "rest_asyncio" + +{% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 8a64754a42cf..6f467a56a6da 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1,3 +1,5 @@ +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} +{% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} {% extends "_base.py.j2" %} {% block content %} @@ -1041,9 +1043,17 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -{{ test_macros.transport_kind_test(service, opts) }} - -{{ test_macros.transport_kind_test(service, opts, is_async=True) }} +{% set configs = [] %} +{% for transport in opts.transport %} + {% do configs.append({'service':service, 'transport':transport, 'is_async':false}) %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} +{% if 'grpc' in transport or rest_async_io_enabled %} + {% do configs.append({'service':service, 'transport':transport, 'is_async':true}) %} +{% endif %} +{% endfor %} +{% for conf in configs %} +{{ test_macros.transport_kind_test(**conf) }} +{% endfor %} {% if 'grpc' in opts.transport %} def test_transport_grpc_default(): diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index b73e8eac811f..2f2fb3b74f2f 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1877,33 +1877,24 @@ def test_{{ method_name }}_empty_call(): assert args[0] == {{ method.input.ident }}() {% endmacro %} +{% macro get_credentials(is_async=False) %} +{{- 'async_anonymous_credentials()' if is_async else 'ga_credentials.AnonymousCredentials()' -}} +{% endmacro %} -{% macro transport_kind_test(service, opts, is_async=False) %} -@pytest.mark.parametrize("transport_name", [ - {% if is_async %} - {% if "grpc" in opts.transport %} - "grpc_asyncio", - {% endif %} - {% else %}{# if not is_async #} - {% if "grpc" in opts.transport%} - "grpc", - {% endif %} - {% if "rest" in opts.transport %} - "rest", - {% endif %} - {% endif %}{# is_async #} -]) -{% if is_async %} -@pytest.mark.asyncio -async def test_transport_kind_async(transport_name): - transport = {{ service.async_client_name }}.get_transport_class(transport_name)( - credentials=async_anonymous_credentials(), - ) -{% else %} -def test_transport_kind(transport_name): - transport = {{ service.client_name }}.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +{% macro get_client(service, is_async) %} +{{- service.async_client_name if is_async else service.client_name -}} +{% endmacro %} + +{% macro get_transport_name(transport, is_async=False)%} +{{- transport + ("_asyncio" if is_async else "") -}} +{% endmacro %} + +{% macro transport_kind_test(service, transport, is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +def test_transport_kind_{{ transport_name }}(): + transport = {{ get_client(service, is_async) }}.get_transport_class("{{ transport_name }}")( + credentials={{get_credentials(is_async)}} ) -{% endif %} - assert transport.kind == transport_name -{% endmacro %} \ No newline at end of file + assert transport.kind == "{{ transport_name }}" + +{% endmacro %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 3ab6c7b392c4..0a28618b4ecd 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -174,12 +174,31 @@ def fragment(session, use_ads_templates=False): def fragment_alternative_templates(session): fragment(session, use_ads_templates=True) +# `_add_python_settings` consumes a path to a temporary directory (str; i.e. tmp_dir) and +# python settings (Dict; i.e. python settings) and modifies the service yaml within +# tmp_dir to include python settings. The primary purpose of this function is to modify +# the service yaml and include `rest_async_io_enabled=True` to test the async rest +# optional feature. +def _add_python_settings(tmp_dir, python_settings): + return f""" +import yaml +from pathlib import Path +temp_file_path = Path(f"{tmp_dir}/showcase_v1beta1.yaml") +with temp_file_path.open('r') as file: + data = yaml.safe_load(file) + data['publishing']['library_settings'] = {python_settings} + +with temp_file_path.open('w') as file: + yaml.safe_dump(data, file, default_flow_style=False, sort_keys=False) +""" +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): `rest_async_io_enabled` must be removed once async rest is GA. @contextmanager def showcase_library( session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), include_service_yaml=True, retry_config=True, + rest_async_io_enabled=False ): """Install the generated library into the session for showcase tests.""" @@ -220,6 +239,25 @@ def showcase_library( external=True, silent=True, ) + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): The section below updates the showcase service yaml + # to test experimental async rest transport. It must be removed once support for async rest is GA. + if rest_async_io_enabled: + # Install pyYAML for yaml. + session.install("pyYAML") + + python_settings = [ + { + 'version': 'google.showcase.v1beta1', + 'python_settings': { + 'experimental_features': { + 'rest_async_io_enabled': True + } + } + } + ] + update_service_yaml = _add_python_settings(tmp_dir, python_settings) + session.run("python", "-c" f"{update_service_yaml}") + # END TODO section to remove. if retry_config: session.run( "curl", @@ -392,6 +430,19 @@ def showcase_unit( run_showcase_unit_tests(session) +# TODO: `showcase_unit_w_rest_async` nox session runs showcase unit tests with the +# experimental async rest transport and must be removed once support for async rest is GA. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2121. +@nox.session(python=ALL_PYTHON) +def showcase_unit_w_rest_async( + session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), +): + """Run the generated unit tests with async rest transport against the Showcase library.""" + with showcase_library(session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True) as lib: + session.chdir(lib) + run_showcase_unit_tests(session) + + @nox.session(python=ALL_PYTHON) def showcase_unit_alternative_templates(session): with showcase_library( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index f952581a0e03..b7bb1fad3415 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -16405,26 +16405,25 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = AssetServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = AssetServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -@pytest.mark.parametrize("transport_name", [ - "grpc_asyncio", -]) -@pytest.mark.asyncio -async def test_transport_kind_async(transport_name): - transport = AssetServiceAsyncClient.get_transport_class(transport_name)( - credentials=async_anonymous_credentials(), +def test_transport_kind_grpc_asyncio(): + transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_transport_kind_rest(): + transport = AssetServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "rest" def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 2e96e2aebf64..37bcaed5d04c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -3490,26 +3490,25 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = IAMCredentialsClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = IAMCredentialsClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -@pytest.mark.parametrize("transport_name", [ - "grpc_asyncio", -]) -@pytest.mark.asyncio -async def test_transport_kind_async(transport_name): - transport = IAMCredentialsAsyncClient.get_transport_class(transport_name)( - credentials=async_anonymous_credentials(), +def test_transport_kind_grpc_asyncio(): + transport = IAMCredentialsAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_transport_kind_rest(): + transport = IAMCredentialsClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "rest" def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 225cf30b7af6..5e67d9980cfb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -13837,26 +13837,25 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = EventarcClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = EventarcClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -@pytest.mark.parametrize("transport_name", [ - "grpc_asyncio", -]) -@pytest.mark.asyncio -async def test_transport_kind_async(transport_name): - transport = EventarcAsyncClient.get_transport_class(transport_name)( - credentials=async_anonymous_credentials(), +def test_transport_kind_grpc_asyncio(): + transport = EventarcAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_transport_kind_rest(): + transport = EventarcClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "rest" def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 86f42cc044ee..17c35b4b5bb7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -12335,25 +12335,18 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = ConfigServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = ConfigServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -@pytest.mark.parametrize("transport_name", [ - "grpc_asyncio", -]) -@pytest.mark.asyncio -async def test_transport_kind_async(transport_name): - transport = ConfigServiceV2AsyncClient.get_transport_class(transport_name)( - credentials=async_anonymous_credentials(), +def test_transport_kind_grpc_asyncio(): + transport = ConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc_asyncio" def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 7602beb75d07..c8b9bfa33388 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -3099,25 +3099,18 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = LoggingServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = LoggingServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -@pytest.mark.parametrize("transport_name", [ - "grpc_asyncio", -]) -@pytest.mark.asyncio -async def test_transport_kind_async(transport_name): - transport = LoggingServiceV2AsyncClient.get_transport_class(transport_name)( - credentials=async_anonymous_credentials(), +def test_transport_kind_grpc_asyncio(): + transport = LoggingServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc_asyncio" def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 8ac1d16baee0..1c391fcfc39f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2906,25 +2906,18 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = MetricsServiceV2Client.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = MetricsServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -@pytest.mark.parametrize("transport_name", [ - "grpc_asyncio", -]) -@pytest.mark.asyncio -async def test_transport_kind_async(transport_name): - transport = MetricsServiceV2AsyncClient.get_transport_class(transport_name)( - credentials=async_anonymous_credentials(), +def test_transport_kind_grpc_asyncio(): + transport = MetricsServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc_asyncio" def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 925ff8da9f0c..2771c207391e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -49,6 +49,7 @@ from .transports.grpc import CloudRedisGrpcTransport from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .transports.rest import CloudRedisRestTransport +from .transports.rest_asyncio import AsyncCloudRedisRestTransport class CloudRedisClientMeta(type): @@ -62,6 +63,7 @@ class CloudRedisClientMeta(type): _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport _transport_registry["rest"] = CloudRedisRestTransport + _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport def get_transport_class(cls, label: Optional[str] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py new file mode 100755 index 000000000000..5127656c348e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 + +from typing import Any, Optional + +from .rest_base import _BaseCloudRedisRestTransport + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=None, +) + +class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): + """Asynchronous REST backend transport for CloudRedis. + + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + url_scheme: str = 'https', + ) -> None: + """Instantiate the transport. + + NOTE: This async REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + url_scheme (str): the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=False, + url_scheme=url_scheme, + api_audience=None + ) + + @property + def kind(self) -> str: + return "rest_asyncio" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index d9bacbf441ba..906e41d32740 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -8403,26 +8403,32 @@ def test_transport_adc(transport_class): transport_class() adc.assert_called_once() -@pytest.mark.parametrize("transport_name", [ - "grpc", - "rest", -]) -def test_transport_kind(transport_name): - transport = CloudRedisClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), +def test_transport_kind_grpc(): + transport = CloudRedisClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() ) - assert transport.kind == transport_name + assert transport.kind == "grpc" -@pytest.mark.parametrize("transport_name", [ - "grpc_asyncio", -]) -@pytest.mark.asyncio -async def test_transport_kind_async(transport_name): - transport = CloudRedisAsyncClient.get_transport_class(transport_name)( - credentials=async_anonymous_credentials(), +def test_transport_kind_grpc_asyncio(): + transport = CloudRedisAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_transport_kind_rest(): + transport = CloudRedisClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_transport_kind_rest_asyncio(): + transport = CloudRedisAsyncClient.get_transport_class("rest_asyncio")( + credentials=async_anonymous_credentials() ) - assert transport.kind == transport_name + assert transport.kind == "rest_asyncio" def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/redis_v1.yaml b/packages/gapic-generator/tests/integration/redis_v1.yaml index 499c13d4e44f..47440ccdad52 100644 --- a/packages/gapic-generator/tests/integration/redis_v1.yaml +++ b/packages/gapic-generator/tests/integration/redis_v1.yaml @@ -67,4 +67,13 @@ authentication: - selector: 'google.longrunning.Operations.*' oauth: canonical_scopes: |- - https://www.googleapis.com/auth/cloud-platform \ No newline at end of file + https://www.googleapis.com/auth/cloud-platform + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this section +# when async rest is GA. +publishing: + library_settings: + - version: 'google.cloud.redis.v1' + python_settings: + experimental_features: + rest_async_io_enabled: true From 9243f301ff2155cf619e1ca81a7da20f60993939 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 14 Sep 2024 08:08:24 -0400 Subject: [PATCH 1183/1339] chore: refactor empty_call_test (#2126) --- .../%name_%version/%sub/test_%service.py.j2 | 8 +- .../gapic/%name_%version/%sub/test_macros.j2 | 90 +- .../unit/gapic/asset_v1/test_asset_service.py | 1976 ++++++------ .../credentials_v1/test_iam_credentials.py | 342 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 1590 +++++----- .../logging_v2/test_config_service_v2.py | 2818 +++++++++-------- .../logging_v2/test_logging_service_v2.py | 419 +-- .../logging_v2/test_metrics_service_v2.py | 454 +-- .../unit/gapic/redis_v1/test_cloud_redis.py | 988 +++--- 9 files changed, 4361 insertions(+), 4324 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 6f467a56a6da..b159eaa8addb 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -930,7 +930,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% endfor -%} {#- method in methods for rest #} {% for method in service.methods.values() if 'rest' in opts.transport and - not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} + not method.http_options %}{% with method_name = (method.name + ("_unary" if method.operation_service else "")) | snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), @@ -1045,14 +1045,16 @@ def test_transport_adc(transport_class): {% set configs = [] %} {% for transport in opts.transport %} - {% do configs.append({'service':service, 'transport':transport, 'is_async':false}) %} + {% do configs.append({'service':service, 'api':api, 'transport':transport, 'is_async':false}) %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if 'grpc' in transport or rest_async_io_enabled %} - {% do configs.append({'service':service, 'transport':transport, 'is_async':true}) %} + {% do configs.append({'service':service, 'api':api, 'transport':transport, 'is_async':true}) %} {% endif %} {% endfor %} {% for conf in configs %} {{ test_macros.transport_kind_test(**conf) }} + +{{ test_macros.empty_call_test(**conf) }} {% endfor %} {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 2f2fb3b74f2f..d0c1c5c05563 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1,6 +1,5 @@ {% macro grpc_required_tests(method, service, api, full_extended_lro=False) %} {% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.safe_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %} -{% with uuid4_re = "[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}" %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -123,9 +122,6 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): {% if not method.client_streaming %} -{{ empty_call_test(method, method_name, service, api, uuid4_re)}} - - def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -155,7 +151,7 @@ def test_{{ method_name }}_non_empty_request_with_auto_populated_field(): {% if method_settings is not none %} {% for auto_populated_field in method_settings.auto_populated_fields %} # Ensure that the uuid4 field is set according to AIP 4235 - assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + assert re.match(r"{{ get_uuid4_re() }}", args[0].{{ auto_populated_field }}) # clear UUID field so that the check below succeeds args[0].{{ auto_populated_field }} = None {% endfor %} @@ -218,10 +214,6 @@ def test_{{ method_name }}_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 {% if not full_extended_lro %} -{% if not method.client_streaming %} -{{ empty_call_test(method, method_name, service, api, uuid4_re, is_async=True) }} -{% endif %} - @pytest.mark.asyncio async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1044,7 +1036,6 @@ def test_{{ method_name }}_raw_page_lro(): response = {{ method.lro.response_type.ident }}() assert response.raw_page is response {% endif %}{# method.paged_result_field #}{% endwith %} -{% endwith %}{# uuid4_re #} {% endmacro %} {% macro rest_required_tests(method, service, numeric_enums=False, full_extended_lro=False) %} @@ -1808,26 +1799,23 @@ def test_{{ method_name }}_rest_no_http_options(): {% endmacro %} -{% macro empty_call_test(method, method_name, service, api, uuid4_re, is_async=False) %} +{# + This is a generic macro for testing method calls. Ideally this macro can be used to avoid duplication + in Jinja templates. If this macro cannot be custimized for a specific method call test, consider + creating a new macro with the name `method_call_test_` for the macro which supports + a more customized method call. +#} +{% macro method_call_test_generic(test_name, method, service, api, transport, request, is_async=False) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% with method_name = (method.name + ("_unary" if method.operation_service else "")) | snake_case %} +{% set async_method_prefix = "async " if is_async else "" %} {% if is_async %} @pytest.mark.asyncio -async def test_{{ method_name }}_empty_call_async(): -{% else %} -def test_{{ method_name }}_empty_call(): -{% endif %}{# if is_async #} - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - {% if is_async %} - client = {{ service.async_client_name }}( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', +{% endif %}{# is_async #} +{{ async_method_prefix }}def test_{{ method_name }}_{{ test_name }}_{{transport_name}}(): + client = {{ get_client(service=service, is_async=is_async) }}( + credentials={{ get_credentials(is_async=is_async) }}, ) - {% else %} - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - {% endif %}{# if is_async #} # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1857,24 +1845,39 @@ def test_{{ method_name }}_empty_call(): {% endfor %} )) {% endif %}{# method.void #} - await client.{{ method_name }}() + await client.{{ method_name }}(request={{ request }}) {% else %}{# if not is_async #} - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.{{ method_name }}() + {% if method.void %} + call.return_value = None + {% elif method.lro %} + call.return_value = operations_pb2.Operation(name='operations/op') + {% elif method.server_streaming %} + call.return_value = iter([{{ method.output.ident }}()]) + {% else %} + call.return_value = {{ method.output.ident }}() + {% endif %} + client.{{ method_name }}(request={{ request }}) {% endif %}{# is_async #} + + # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} {% if method_settings is not none %} {% for auto_populated_field in method_settings.auto_populated_fields %} # Ensure that the uuid4 field is set according to AIP 4235 - assert re.match(r"{{ uuid4_re }}", args[0].{{ auto_populated_field }}) + assert re.match(r"{{ get_uuid4_re() }}", args[0].{{ auto_populated_field }}) # clear UUID field so that the check below succeeds args[0].{{ auto_populated_field }} = None {% endfor %}{# for auto_populated_field in method_settings.auto_populated_fields #} {% endif %}{# if method_settings is not none #} {% endwith %}{# method_settings #} + {% if request %} + assert args[0] == {{ request }} + {% else %} assert args[0] == {{ method.input.ident }}() + {% endif %}{# request #} +{% endwith %}{# method_name #} {% endmacro %} {% macro get_credentials(is_async=False) %} @@ -1889,12 +1892,31 @@ def test_{{ method_name }}_empty_call(): {{- transport + ("_asyncio" if is_async else "") -}} {% endmacro %} -{% macro transport_kind_test(service, transport, is_async) %} +{% macro transport_kind_test(service, api, transport, is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} def test_transport_kind_{{ transport_name }}(): transport = {{ get_client(service, is_async) }}.get_transport_class("{{ transport_name }}")( credentials={{get_credentials(is_async)}} ) assert transport.kind == "{{ transport_name }}" - -{% endmacro %} +{% endmacro %}{# transport_kind_test #} + +{% macro empty_call_test(service, api, transport, is_async) %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2159): + Currently this macro only supports gRPC. It should be updated to support REST + transport as well. +#} +{% if 'rest' not in transport %} +{% for method in service.methods.values() %}{# method #} +{% if not method.client_streaming %} +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +{{ method_call_test_generic("empty_call", method, service, api, transport, request=None, is_async=is_async) }} +{% endif %}{# not method.client_streaming #} +{% endfor %}{# method in service.methods.values() #} +{% endif %}{# 'rest' not in transport #} +{% endmacro %}{# empty_call_test #} + +{% macro get_uuid4_re() -%} +[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12} +{%- endmacro %}{# uuid_re #} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index b7bb1fad3415..268e14d432b0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -815,25 +815,6 @@ def test_export_assets(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_export_assets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest() - - def test_export_assets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -898,29 +879,6 @@ def test_export_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_export_assets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest() - - @pytest.mark.asyncio async def test_export_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1094,25 +1052,6 @@ def test_list_assets(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_assets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest() - - def test_list_assets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1174,29 +1113,6 @@ def test_list_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_assets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.list_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest() - - @pytest.mark.asyncio async def test_list_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1642,25 +1558,6 @@ def test_batch_get_assets_history(request_type, transport: str = 'grpc'): assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) -def test_batch_get_assets_history_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_get_assets_history() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest() - - def test_batch_get_assets_history_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1720,28 +1617,6 @@ def test_batch_get_assets_history_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_batch_get_assets_history_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_assets_history), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( - )) - await client.batch_get_assets_history() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest() - - @pytest.mark.asyncio async def test_batch_get_assets_history_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1917,25 +1792,6 @@ def test_create_feed(request_type, transport: str = 'grpc'): assert response.relationship_types == ['relationship_types_value'] -def test_create_feed_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest() - - def test_create_feed_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1997,33 +1853,6 @@ def test_create_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_feed_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.create_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest() - - @pytest.mark.asyncio async def test_create_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2291,25 +2120,6 @@ def test_get_feed(request_type, transport: str = 'grpc'): assert response.relationship_types == ['relationship_types_value'] -def test_get_feed_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest() - - def test_get_feed_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2369,33 +2179,6 @@ def test_get_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_feed_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.get_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest() - - @pytest.mark.asyncio async def test_get_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2653,25 +2436,6 @@ def test_list_feeds(request_type, transport: str = 'grpc'): assert isinstance(response, asset_service.ListFeedsResponse) -def test_list_feeds_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_feeds() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest() - - def test_list_feeds_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2731,28 +2495,6 @@ def test_list_feeds_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_feeds_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_feeds), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( - )) - await client.list_feeds() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest() - - @pytest.mark.asyncio async def test_list_feeds_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3010,25 +2752,6 @@ def test_update_feed(request_type, transport: str = 'grpc'): assert response.relationship_types == ['relationship_types_value'] -def test_update_feed_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest() - - def test_update_feed_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3086,33 +2809,6 @@ def test_update_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_feed_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - )) - await client.update_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest() - - @pytest.mark.asyncio async def test_update_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3369,28 +3065,9 @@ def test_delete_feed(request_type, transport: str = 'grpc'): assert response is None -def test_delete_feed_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest() - - -def test_delete_feed_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_delete_feed_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport='grpc', @@ -3447,27 +3124,6 @@ def test_delete_feed_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_feed_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_feed), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_feed() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest() - - @pytest.mark.asyncio async def test_delete_feed_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3716,25 +3372,6 @@ def test_search_all_resources(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_search_all_resources_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest() - - def test_search_all_resources_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3800,29 +3437,6 @@ def test_search_all_resources_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_search_all_resources_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_resources), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_resources() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest() - - @pytest.mark.asyncio async def test_search_all_resources_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4290,25 +3904,6 @@ def test_search_all_iam_policies(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_search_all_iam_policies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.search_all_iam_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest() - - def test_search_all_iam_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4374,29 +3969,6 @@ def test_search_all_iam_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_search_all_iam_policies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.search_all_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.search_all_iam_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest() - - @pytest.mark.asyncio async def test_search_all_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4854,25 +4426,6 @@ def test_analyze_iam_policy(request_type, transport: str = 'grpc'): assert response.fully_explored is True -def test_analyze_iam_policy_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest() - - def test_analyze_iam_policy_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4932,29 +4485,6 @@ def test_analyze_iam_policy_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_iam_policy_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - )) - await client.analyze_iam_policy() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest() - - @pytest.mark.asyncio async def test_analyze_iam_policy_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5121,25 +4651,6 @@ def test_analyze_iam_policy_longrunning(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_analyze_iam_policy_longrunning_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_iam_policy_longrunning() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() - - def test_analyze_iam_policy_longrunning_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5204,29 +4715,6 @@ def test_analyze_iam_policy_longrunning_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_iam_policy_longrunning_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_iam_policy_longrunning), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.analyze_iam_policy_longrunning() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() - - @pytest.mark.asyncio async def test_analyze_iam_policy_longrunning_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5398,25 +4886,6 @@ def test_analyze_move(request_type, transport: str = 'grpc'): assert isinstance(response, asset_service.AnalyzeMoveResponse) -def test_analyze_move_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_move() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest() - - def test_analyze_move_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5478,28 +4947,6 @@ def test_analyze_move_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_move_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_move), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( - )) - await client.analyze_move() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest() - - @pytest.mark.asyncio async def test_analyze_move_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5669,25 +5116,6 @@ def test_query_assets(request_type, transport: str = 'grpc'): assert response.done is True -def test_query_assets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.query_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest() - - def test_query_assets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5753,30 +5181,6 @@ def test_query_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_query_assets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.query_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - )) - await client.query_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest() - - @pytest.mark.asyncio async def test_query_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5954,25 +5358,6 @@ def test_create_saved_query(request_type, transport: str = 'grpc'): assert response.last_updater == 'last_updater_value' -def test_create_saved_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest() - - def test_create_saved_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6034,32 +5419,6 @@ def test_create_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_saved_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.create_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest() - - @pytest.mark.asyncio async def test_create_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6343,25 +5702,6 @@ def test_get_saved_query(request_type, transport: str = 'grpc'): assert response.last_updater == 'last_updater_value' -def test_get_saved_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest() - - def test_get_saved_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6421,32 +5761,6 @@ def test_get_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_saved_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.get_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest() - - @pytest.mark.asyncio async def test_get_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6704,25 +6018,6 @@ def test_list_saved_queries(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_saved_queries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_saved_queries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() - - def test_list_saved_queries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6786,29 +6081,6 @@ def test_list_saved_queries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_saved_queries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_saved_queries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - )) - await client.list_saved_queries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() - - @pytest.mark.asyncio async def test_list_saved_queries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7262,25 +6534,6 @@ def test_update_saved_query(request_type, transport: str = 'grpc'): assert response.last_updater == 'last_updater_value' -def test_update_saved_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest() - - def test_update_saved_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7338,32 +6591,6 @@ def test_update_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_saved_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - )) - await client.update_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest() - - @pytest.mark.asyncio async def test_update_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7628,25 +6855,6 @@ def test_delete_saved_query(request_type, transport: str = 'grpc'): assert response is None -def test_delete_saved_query_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest() - - def test_delete_saved_query_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7706,27 +6914,6 @@ def test_delete_saved_query_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_saved_query_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_saved_query), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_saved_query() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest() - - @pytest.mark.asyncio async def test_delete_saved_query_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7973,25 +7160,6 @@ def test_batch_get_effective_iam_policies(request_type, transport: str = 'grpc') assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) -def test_batch_get_effective_iam_policies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.batch_get_effective_iam_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() - - def test_batch_get_effective_iam_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8051,28 +7219,6 @@ def test_batch_get_effective_iam_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) - await client.batch_get_effective_iam_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() - - @pytest.mark.asyncio async def test_batch_get_effective_iam_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8240,25 +7386,6 @@ def test_analyze_org_policies(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_analyze_org_policies_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() - - def test_analyze_org_policies_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8324,29 +7451,6 @@ def test_analyze_org_policies_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_org_policies_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policies() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() - - @pytest.mark.asyncio async def test_analyze_org_policies_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8814,25 +7918,6 @@ def test_analyze_org_policy_governed_containers(request_type, transport: str = ' assert response.next_page_token == 'next_page_token_value' -def test_analyze_org_policy_governed_containers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policy_governed_containers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - def test_analyze_org_policy_governed_containers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8898,29 +7983,6 @@ def test_analyze_org_policy_governed_containers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policy_governed_containers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - - @pytest.mark.asyncio async def test_analyze_org_policy_governed_containers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9388,25 +8450,6 @@ def test_analyze_org_policy_governed_assets(request_type, transport: str = 'grpc assert response.next_page_token == 'next_page_token_value' -def test_analyze_org_policy_governed_assets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.analyze_org_policy_governed_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - def test_analyze_org_policy_governed_assets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9472,29 +8515,6 @@ def test_analyze_org_policy_governed_assets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policy_governed_assets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - - @pytest.mark.asyncio async def test_analyze_org_policy_governed_assets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -16412,11 +15432,993 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" -def test_transport_kind_grpc_asyncio(): - transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_assets_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - assert transport.kind == "grpc_asyncio" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.export_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ExportAssetsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_assets_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + call.return_value = asset_service.ListAssetsResponse() + client.list_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListAssetsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_assets_history_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + call.return_value = asset_service.BatchGetAssetsHistoryResponse() + client.batch_get_assets_history(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_feed_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.create_feed(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateFeedRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_feed_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.get_feed(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetFeedRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_feeds_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + call.return_value = asset_service.ListFeedsResponse() + client.list_feeds(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListFeedsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_feed_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + call.return_value = asset_service.Feed() + client.update_feed(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateFeedRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_feed_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + call.return_value = None + client.delete_feed(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteFeedRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_resources_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + call.return_value = asset_service.SearchAllResourcesResponse() + client.search_all_resources(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllResourcesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_iam_policies_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + call.return_value = asset_service.SearchAllIamPoliciesResponse() + client.search_all_iam_policies(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllIamPoliciesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_iam_policy_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + call.return_value = asset_service.AnalyzeIamPolicyResponse() + client.analyze_iam_policy(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_iam_policy_longrunning_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.analyze_iam_policy_longrunning(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_move_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + call.return_value = asset_service.AnalyzeMoveResponse() + client.analyze_move(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_assets_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + call.return_value = asset_service.QueryAssetsResponse() + client.query_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_saved_query_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.create_saved_query(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_saved_query_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.get_saved_query(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_saved_queries_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + call.return_value = asset_service.ListSavedQueriesResponse() + client.list_saved_queries(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_saved_query_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + call.return_value = asset_service.SavedQuery() + client.update_saved_query(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_saved_query_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + call.return_value = None + client.delete_saved_query(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_effective_iam_policies_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() + client.batch_get_effective_iam_policies(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policies_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPoliciesResponse() + client.analyze_org_policies(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policy_governed_containers_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + client.analyze_org_policy_governed_containers(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policy_governed_assets_empty_call_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + client.analyze_org_policy_governed_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + +def test_transport_kind_grpc_asyncio(): + transport = AssetServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.export_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ExportAssetsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + )) + await client.list_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListAssetsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_get_assets_history_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetAssetsHistoryResponse( + )) + await client.batch_get_assets_history(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_feed_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + await client.create_feed(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateFeedRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_feed_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + await client.get_feed(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetFeedRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_feeds_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListFeedsResponse( + )) + await client.list_feeds(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListFeedsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_feed_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + )) + await client.update_feed(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateFeedRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_feed_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_feed(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteFeedRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_all_resources_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + )) + await client.search_all_resources(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllResourcesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_all_iam_policies_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + )) + await client.search_all_iam_policies(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.SearchAllIamPoliciesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_iam_policy_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + )) + await client.analyze_iam_policy(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.analyze_iam_policy_longrunning(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_move_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeMoveResponse( + )) + await client.analyze_move(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeMoveRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_query_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + )) + await client.query_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.QueryAssetsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_saved_query_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + await client.create_saved_query(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.CreateSavedQueryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_saved_query_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + await client.get_saved_query(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.GetSavedQueryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_saved_queries_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + )) + await client.list_saved_queries(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.ListSavedQueriesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_saved_query_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + )) + await client.update_saved_query(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.UpdateSavedQueryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_saved_query_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_saved_query(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.DeleteSavedQueryRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( + )) + await client.batch_get_effective_iam_policies(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policies_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policies(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policy_governed_containers(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policy_governed_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() def test_transport_kind_rest(): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 37bcaed5d04c..ee56784e8616 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -808,25 +808,6 @@ def test_generate_access_token(request_type, transport: str = 'grpc'): assert response.access_token == 'access_token_value' -def test_generate_access_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.generate_access_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateAccessTokenRequest() - - def test_generate_access_token_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -886,29 +867,6 @@ def test_generate_access_token_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_generate_access_token_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_access_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( - access_token='access_token_value', - )) - await client.generate_access_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateAccessTokenRequest() - - @pytest.mark.asyncio async def test_generate_access_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1186,25 +1144,6 @@ def test_generate_id_token(request_type, transport: str = 'grpc'): assert response.token == 'token_value' -def test_generate_id_token_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.generate_id_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateIdTokenRequest() - - def test_generate_id_token_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1266,29 +1205,6 @@ def test_generate_id_token_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_generate_id_token_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.generate_id_token), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( - token='token_value', - )) - await client.generate_id_token() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateIdTokenRequest() - - @pytest.mark.asyncio async def test_generate_id_token_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1572,25 +1488,6 @@ def test_sign_blob(request_type, transport: str = 'grpc'): assert response.signed_blob == b'signed_blob_blob' -def test_sign_blob_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.sign_blob() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.SignBlobRequest() - - def test_sign_blob_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1650,30 +1547,6 @@ def test_sign_blob_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_sign_blob_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_blob), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', - )) - await client.sign_blob() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.SignBlobRequest() - - @pytest.mark.asyncio async def test_sign_blob_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1949,25 +1822,6 @@ def test_sign_jwt(request_type, transport: str = 'grpc'): assert response.signed_jwt == 'signed_jwt_value' -def test_sign_jwt_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.sign_jwt() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.SignJwtRequest() - - def test_sign_jwt_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2029,30 +1883,6 @@ def test_sign_jwt_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_sign_jwt_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = IAMCredentialsAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.sign_jwt), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', - )) - await client.sign_jwt() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == common.SignJwtRequest() - - @pytest.mark.asyncio async def test_sign_jwt_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3497,6 +3327,83 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_access_token_empty_call_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + call.return_value = common.GenerateAccessTokenResponse() + client.generate_access_token(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateAccessTokenRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_id_token_empty_call_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + call.return_value = common.GenerateIdTokenResponse() + client.generate_id_token(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateIdTokenRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sign_blob_empty_call_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + call.return_value = common.SignBlobResponse() + client.sign_blob(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignBlobRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sign_jwt_empty_call_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + call.return_value = common.SignJwtResponse() + client.sign_jwt(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignJwtRequest() + + def test_transport_kind_grpc_asyncio(): transport = IAMCredentialsAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -3504,6 +3411,101 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_access_token_empty_call_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateAccessTokenResponse( + access_token='access_token_value', + )) + await client.generate_access_token(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateAccessTokenRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_generate_id_token_empty_call_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.GenerateIdTokenResponse( + token='token_value', + )) + await client.generate_id_token(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.GenerateIdTokenRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_sign_blob_empty_call_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + )) + await client.sign_blob(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignBlobRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_sign_jwt_empty_call_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', + )) + await client.sign_jwt(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == common.SignJwtRequest() + + def test_transport_kind_rest(): transport = IAMCredentialsClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 5e67d9980cfb..54f7efa8902c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -836,25 +836,6 @@ def test_get_trigger(request_type, transport: str = 'grpc'): assert response.etag == 'etag_value' -def test_get_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetTriggerRequest() - - def test_get_trigger_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -914,33 +895,6 @@ def test_get_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_trigger_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', - )) - await client.get_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetTriggerRequest() - - @pytest.mark.asyncio async def test_get_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1202,25 +1156,6 @@ def test_list_triggers(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_triggers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListTriggersRequest() - - def test_list_triggers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1286,30 +1221,6 @@ def test_list_triggers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_triggers_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListTriggersRequest() - - @pytest.mark.asyncio async def test_list_triggers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1756,25 +1667,6 @@ def test_create_trigger(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateTriggerRequest() - - def test_create_trigger_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1841,29 +1733,6 @@ def test_create_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_trigger_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateTriggerRequest() - - @pytest.mark.asyncio async def test_create_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2138,25 +2007,6 @@ def test_update_trigger(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_update_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest() - - def test_update_trigger_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2219,29 +2069,6 @@ def test_update_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_trigger_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest() - - @pytest.mark.asyncio async def test_update_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2516,25 +2343,6 @@ def test_delete_trigger(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteTriggerRequest() - - def test_delete_trigger_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2601,29 +2409,6 @@ def test_delete_trigger_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_trigger_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteTriggerRequest() - - @pytest.mark.asyncio async def test_delete_trigger_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2902,25 +2687,6 @@ def test_get_channel(request_type, transport: str = 'grpc'): assert response.crypto_key_name == 'crypto_key_name_value' -def test_get_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelRequest() - - def test_get_channel_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2980,34 +2746,6 @@ def test_get_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_channel_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - )) - await client.get_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelRequest() - - @pytest.mark.asyncio async def test_get_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3271,25 +3009,6 @@ def test_list_channels(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_channels_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_channels() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelsRequest() - - def test_list_channels_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3353,30 +3072,6 @@ def test_list_channels_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_channels_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channels), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_channels() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelsRequest() - - @pytest.mark.asyncio async def test_list_channels_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3823,28 +3518,9 @@ def test_create_channel(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelRequest() - - -def test_create_channel_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. +def test_create_channel_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport='grpc', @@ -3908,29 +3584,6 @@ def test_create_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_channel_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelRequest() - - @pytest.mark.asyncio async def test_create_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4205,25 +3858,6 @@ def test_update_channel(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_update_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest() - - def test_update_channel_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4286,29 +3920,6 @@ def test_update_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_channel_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest() - - @pytest.mark.asyncio async def test_update_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4573,25 +4184,6 @@ def test_delete_channel(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_channel_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelRequest() - - def test_delete_channel_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4656,29 +4248,6 @@ def test_delete_channel_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_channel_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_channel() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelRequest() - - @pytest.mark.asyncio async def test_delete_channel_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4938,25 +4507,6 @@ def test_get_provider(request_type, transport: str = 'grpc'): assert response.display_name == 'display_name_value' -def test_get_provider_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_provider() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetProviderRequest() - - def test_get_provider_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5016,30 +4566,6 @@ def test_get_provider_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_provider_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_provider), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( - name='name_value', - display_name='display_name_value', - )) - await client.get_provider() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetProviderRequest() - - @pytest.mark.asyncio async def test_get_provider_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5295,25 +4821,6 @@ def test_list_providers(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_providers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_providers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListProvidersRequest() - - def test_list_providers_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5379,30 +4886,6 @@ def test_list_providers_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_providers_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_providers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_providers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListProvidersRequest() - - @pytest.mark.asyncio async def test_list_providers_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5858,25 +5341,6 @@ def test_get_channel_connection(request_type, transport: str = 'grpc'): assert response.activation_token == 'activation_token_value' -def test_get_channel_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelConnectionRequest() - - def test_get_channel_connection_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5936,32 +5400,6 @@ def test_get_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_channel_connection_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_channel_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - )) - await client.get_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelConnectionRequest() - - @pytest.mark.asyncio async def test_get_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6221,25 +5659,6 @@ def test_list_channel_connections(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_channel_connections_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_channel_connections() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelConnectionsRequest() - - def test_list_channel_connections_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6301,30 +5720,6 @@ def test_list_channel_connections_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_channel_connections_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_channel_connections), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_channel_connections() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelConnectionsRequest() - - @pytest.mark.asyncio async def test_list_channel_connections_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6771,47 +6166,28 @@ def test_create_channel_connection(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_channel_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_create_channel_connection_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), transport='grpc', ) + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = eventarc.CreateChannelConnectionRequest( + parent='parent_value', + channel_connection_id='channel_connection_id_value', + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.create_channel_connection), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelConnectionRequest() - - -def test_create_channel_connection_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = eventarc.CreateChannelConnectionRequest( - parent='parent_value', - channel_connection_id='channel_connection_id_value', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_channel_connection(request=request) + client.create_channel_connection(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == eventarc.CreateChannelConnectionRequest( @@ -6856,29 +6232,6 @@ def test_create_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_channel_connection_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_channel_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelConnectionRequest() - - @pytest.mark.asyncio async def test_create_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7153,25 +6506,6 @@ def test_delete_channel_connection(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_channel_connection_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelConnectionRequest() - - def test_delete_channel_connection_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7236,29 +6570,6 @@ def test_delete_channel_connection_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_channel_connection_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_channel_connection), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_channel_connection() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelConnectionRequest() - - @pytest.mark.asyncio async def test_delete_channel_connection_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7518,25 +6829,6 @@ def test_get_google_channel_config(request_type, transport: str = 'grpc'): assert response.crypto_key_name == 'crypto_key_name_value' -def test_get_google_channel_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_google_channel_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetGoogleChannelConfigRequest() - - def test_get_google_channel_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7596,30 +6888,6 @@ def test_get_google_channel_config_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_google_channel_config_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_google_channel_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) - await client.get_google_channel_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetGoogleChannelConfigRequest() - - @pytest.mark.asyncio async def test_get_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7875,25 +7143,6 @@ def test_update_google_channel_config(request_type, transport: str = 'grpc'): assert response.crypto_key_name == 'crypto_key_name_value' -def test_update_google_channel_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_google_channel_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() - - def test_update_google_channel_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7951,30 +7200,6 @@ def test_update_google_channel_config_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_google_channel_config_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_google_channel_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - )) - await client.update_google_channel_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() - - @pytest.mark.asyncio async def test_update_google_channel_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -13844,11 +13069,788 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" -def test_transport_kind_grpc_asyncio(): - transport = EventarcAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), ) - assert transport.kind == "grpc_asyncio" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + call.return_value = trigger.Trigger() + client.get_trigger(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetTriggerRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_triggers_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + call.return_value = eventarc.ListTriggersResponse() + client.list_triggers(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListTriggersRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_trigger(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateTriggerRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_trigger(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateTriggerRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_trigger_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_trigger(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteTriggerRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + call.return_value = channel.Channel() + client.get_channel(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channels_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + call.return_value = eventarc.ListChannelsResponse() + client.list_channels(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_channel(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_provider_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + call.return_value = discovery.Provider() + client.get_provider(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_providers_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + call.return_value = eventarc.ListProvidersResponse() + client.list_providers(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + call.return_value = channel_connection.ChannelConnection() + client.get_channel_connection(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channel_connections_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + call.return_value = eventarc.ListChannelConnectionsResponse() + client.list_channel_connections(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_channel_connection(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_connection_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_channel_connection(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_google_channel_config_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + call.return_value = google_channel_config.GoogleChannelConfig() + client.get_google_channel_config(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_google_channel_config_empty_call_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + call.return_value = gce_google_channel_config.GoogleChannelConfig() + client.update_google_channel_config(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + + +def test_transport_kind_grpc_asyncio(): + transport = EventarcAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + etag='etag_value', + )) + await client.get_trigger(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetTriggerRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_triggers_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_triggers(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListTriggersRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_trigger(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateTriggerRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_trigger(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateTriggerRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_trigger_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_trigger(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteTriggerRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + )) + await client.get_channel(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_channels_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_channels(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_channel(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_channel(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateChannelRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_channel_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_channel(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_provider_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(discovery.Provider( + name='name_value', + display_name='display_name_value', + )) + await client.get_provider(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetProviderRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_providers_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_providers(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListProvidersRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + )) + await client.get_channel_connection(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetChannelConnectionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_channel_connections_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_channel_connections(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.ListChannelConnectionsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_channel_connection(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.CreateChannelConnectionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_channel_connection_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_channel_connection(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.DeleteChannelConnectionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_google_channel_config_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + await client.get_google_channel_config(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.GetGoogleChannelConfigRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_google_channel_config_empty_call_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + )) + await client.update_google_channel_config(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() def test_transport_kind_rest(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 17c35b4b5bb7..e1709e17ed5f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -795,25 +795,6 @@ def test_list_buckets(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_buckets_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_buckets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - def test_list_buckets_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -875,29 +856,6 @@ def test_list_buckets_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_buckets_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_buckets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( - next_page_token='next_page_token_value', - )) - await client.list_buckets() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() - - @pytest.mark.asyncio async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1357,25 +1315,6 @@ def test_get_bucket(request_type, transport: str = 'grpc'): assert response.restricted_fields == ['restricted_fields_value'] -def test_get_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - def test_get_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1435,35 +1374,6 @@ def test_get_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) - await client.get_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() - - @pytest.mark.asyncio async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1642,25 +1552,6 @@ def test_create_bucket_async(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_bucket_async_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - def test_create_bucket_async_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1727,29 +1618,6 @@ def test_create_bucket_async_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_bucket_async_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket_async), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - @pytest.mark.asyncio async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1920,25 +1788,6 @@ def test_update_bucket_async(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_update_bucket_async_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - def test_update_bucket_async_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2003,29 +1852,6 @@ def test_update_bucket_async_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_bucket_async_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket_async), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_bucket_async() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - @pytest.mark.asyncio async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2211,25 +2037,6 @@ def test_create_bucket(request_type, transport: str = 'grpc'): assert response.restricted_fields == ['restricted_fields_value'] -def test_create_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - def test_create_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2291,35 +2098,6 @@ def test_create_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) - await client.create_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() - - @pytest.mark.asyncio async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2513,25 +2291,6 @@ def test_update_bucket(request_type, transport: str = 'grpc'): assert response.restricted_fields == ['restricted_fields_value'] -def test_update_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - def test_update_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2591,35 +2350,6 @@ def test_update_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( - name='name_value', - description='description_value', - retention_days=1512, - locked=True, - lifecycle_state=logging_config.LifecycleState.ACTIVE, - analytics_enabled=True, - restricted_fields=['restricted_fields_value'], - )) - await client.update_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() - - @pytest.mark.asyncio async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2798,25 +2528,6 @@ def test_delete_bucket(request_type, transport: str = 'grpc'): assert response is None -def test_delete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - def test_delete_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2876,27 +2587,6 @@ def test_delete_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() - - @pytest.mark.asyncio async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3060,25 +2750,6 @@ def test_undelete_bucket(request_type, transport: str = 'grpc'): assert response is None -def test_undelete_bucket_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.undelete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - def test_undelete_bucket_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3138,27 +2809,6 @@ def test_undelete_bucket_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_undelete_bucket_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.undelete_bucket), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.undelete_bucket() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() - - @pytest.mark.asyncio async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3325,25 +2975,6 @@ def test_list_views(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_views_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_views() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - def test_list_views_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3405,29 +3036,6 @@ def test_list_views_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_views_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_views), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( - next_page_token='next_page_token_value', - )) - await client.list_views() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() - - @pytest.mark.asyncio async def test_list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3879,25 +3487,6 @@ def test_get_view(request_type, transport: str = 'grpc'): assert response.filter == 'filter_value' -def test_get_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - def test_get_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3957,31 +3546,6 @@ def test_get_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - await client.get_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() - - @pytest.mark.asyncio async def test_get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4159,25 +3723,6 @@ def test_create_view(request_type, transport: str = 'grpc'): assert response.filter == 'filter_value' -def test_create_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - def test_create_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4239,31 +3784,6 @@ def test_create_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - await client.create_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() - - @pytest.mark.asyncio async def test_create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4441,25 +3961,6 @@ def test_update_view(request_type, transport: str = 'grpc'): assert response.filter == 'filter_value' -def test_update_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - def test_update_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4519,31 +4020,6 @@ def test_update_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( - name='name_value', - description='description_value', - filter='filter_value', - )) - await client.update_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() - - @pytest.mark.asyncio async def test_update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4714,25 +4190,6 @@ def test_delete_view(request_type, transport: str = 'grpc'): assert response is None -def test_delete_view_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - def test_delete_view_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4792,27 +4249,6 @@ def test_delete_view_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_view_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_view), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_view() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() - - @pytest.mark.asyncio async def test_delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4979,25 +4415,6 @@ def test_list_sinks(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_sinks_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_sinks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - def test_list_sinks_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5059,29 +4476,6 @@ def test_list_sinks_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_sinks_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_sinks), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( - next_page_token='next_page_token_value', - )) - await client.list_sinks() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() - - @pytest.mark.asyncio async def test_list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5543,25 +4937,6 @@ def test_get_sink(request_type, transport: str = 'grpc'): assert response.include_children is True -def test_get_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - def test_get_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5621,36 +4996,6 @@ def test_get_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - await client.get_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() - - @pytest.mark.asyncio async def test_get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5930,25 +5275,6 @@ def test_create_sink(request_type, transport: str = 'grpc'): assert response.include_children is True -def test_create_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - def test_create_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6008,36 +5334,6 @@ def test_create_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - await client.create_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() - - @pytest.mark.asyncio async def test_create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6327,25 +5623,6 @@ def test_update_sink(request_type, transport: str = 'grpc'): assert response.include_children is True -def test_update_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - def test_update_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6405,36 +5682,6 @@ def test_update_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( - name='name_value', - destination='destination_value', - filter='filter_value', - description='description_value', - disabled=True, - output_version_format=logging_config.LogSink.VersionFormat.V2, - writer_identity='writer_identity_value', - include_children=True, - )) - await client.update_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() - - @pytest.mark.asyncio async def test_update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6717,25 +5964,6 @@ def test_delete_sink(request_type, transport: str = 'grpc'): assert response is None -def test_delete_sink_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - def test_delete_sink_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6795,27 +6023,6 @@ def test_delete_sink_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_sink_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_sink), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_sink() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() - - @pytest.mark.asyncio async def test_delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7061,25 +6268,6 @@ def test_create_link(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() - - def test_create_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7146,29 +6334,6 @@ def test_create_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() - - @pytest.mark.asyncio async def test_create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7443,25 +6608,6 @@ def test_delete_link(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() - - def test_delete_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7526,29 +6672,6 @@ def test_delete_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() - - @pytest.mark.asyncio async def test_delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -7806,25 +6929,6 @@ def test_list_links(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_links_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_links() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() - - def test_list_links_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7886,29 +6990,6 @@ def test_list_links_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_links_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_links), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( - next_page_token='next_page_token_value', - )) - await client.list_links() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() - - @pytest.mark.asyncio async def test_list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8360,25 +7441,6 @@ def test_get_link(request_type, transport: str = 'grpc'): assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_link_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() - - def test_get_link_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8438,31 +7500,6 @@ def test_get_link_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_link_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_link), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( - name='name_value', - description='description_value', - lifecycle_state=logging_config.LifecycleState.ACTIVE, - )) - await client.get_link() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() - - @pytest.mark.asyncio async def test_get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8718,25 +7755,6 @@ def test_list_exclusions(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_exclusions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_exclusions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - def test_list_exclusions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -8798,29 +7816,6 @@ def test_list_exclusions_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_exclusions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_exclusions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( - next_page_token='next_page_token_value', - )) - await client.list_exclusions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() - - @pytest.mark.asyncio async def test_list_exclusions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9274,25 +8269,6 @@ def test_get_exclusion(request_type, transport: str = 'grpc'): assert response.disabled is True -def test_get_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - def test_get_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9352,32 +8328,6 @@ def test_get_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - await client.get_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() - - @pytest.mark.asyncio async def test_get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -9641,25 +8591,6 @@ def test_create_exclusion(request_type, transport: str = 'grpc'): assert response.disabled is True -def test_create_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - def test_create_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -9719,32 +8650,6 @@ def test_create_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - await client.create_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() - - @pytest.mark.asyncio async def test_create_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10018,25 +8923,6 @@ def test_update_exclusion(request_type, transport: str = 'grpc'): assert response.disabled is True -def test_update_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - def test_update_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -10096,32 +8982,6 @@ def test_update_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( - name='name_value', - description='description_value', - filter='filter_value', - disabled=True, - )) - await client.update_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() - - @pytest.mark.asyncio async def test_update_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10396,25 +9256,6 @@ def test_delete_exclusion(request_type, transport: str = 'grpc'): assert response is None -def test_delete_exclusion_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - def test_delete_exclusion_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -10474,27 +9315,6 @@ def test_delete_exclusion_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_exclusion_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_exclusion), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_exclusion() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() - - @pytest.mark.asyncio async def test_delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -10749,25 +9569,6 @@ def test_get_cmek_settings(request_type, transport: str = 'grpc'): assert response.service_account_id == 'service_account_id_value' -def test_get_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - def test_get_cmek_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -10827,32 +9628,6 @@ def test_get_cmek_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_cmek_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) - await client.get_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() - - @pytest.mark.asyncio async def test_get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11034,25 +9809,6 @@ def test_update_cmek_settings(request_type, transport: str = 'grpc'): assert response.service_account_id == 'service_account_id_value' -def test_update_cmek_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - def test_update_cmek_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11112,32 +9868,6 @@ def test_update_cmek_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_cmek_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_cmek_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_key_version_name='kms_key_version_name_value', - service_account_id='service_account_id_value', - )) - await client.update_cmek_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() - - @pytest.mark.asyncio async def test_update_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11321,25 +10051,6 @@ def test_get_settings(request_type, transport: str = 'grpc'): assert response.disable_default_sink is True -def test_get_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() - - def test_get_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11399,33 +10110,6 @@ def test_get_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) - await client.get_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() - - @pytest.mark.asyncio async def test_get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -11693,25 +10377,6 @@ def test_update_settings(request_type, transport: str = 'grpc'): assert response.disable_default_sink is True -def test_update_settings_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() - - def test_update_settings_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -11771,33 +10436,6 @@ def test_update_settings_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_settings_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_settings), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( - name='name_value', - kms_key_name='kms_key_name_value', - kms_service_account_id='kms_service_account_id_value', - storage_location='storage_location_value', - disable_default_sink=True, - )) - await client.update_settings() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() - - @pytest.mark.asyncio async def test_update_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12064,25 +10702,6 @@ def test_copy_log_entries(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_copy_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.copy_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() - - def test_copy_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -12151,29 +10770,6 @@ def test_copy_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_copy_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.copy_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.copy_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() - - @pytest.mark.asyncio async def test_copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -12342,11 +10938,1417 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" -def test_transport_kind_grpc_asyncio(): - transport = ConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_buckets_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), ) - assert transport.kind == "grpc_asyncio" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = logging_config.ListBucketsResponse() + client.list_buckets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.get_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_async_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_bucket_async(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_async_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_bucket_async(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = None + client.delete_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_bucket_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = None + client.undelete_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_views_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + call.return_value = logging_config.ListViewsResponse() + client.list_views(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.get_view(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.create_view(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client.update_view(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_view_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + call.return_value = None + client.delete_view(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_sinks_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + call.return_value = logging_config.ListSinksResponse() + client.list_sinks(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.get_sink(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.create_sink(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client.update_sink(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_sink_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + call.return_value = None + client.delete_sink(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_link), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_link(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_link), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_link(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteLinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_links_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + call.return_value = logging_config.ListLinksResponse() + client.list_links(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListLinksRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_link_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_link), + '__call__') as call: + call.return_value = logging_config.Link() + client.get_link(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetLinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_exclusions_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + call.return_value = logging_config.ListExclusionsResponse() + client.list_exclusions(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.get_exclusion(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.create_exclusion(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client.update_exclusion(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_exclusion_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + call.return_value = None + client.delete_exclusion(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_cmek_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.get_cmek_settings(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_cmek_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client.update_cmek_settings(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client.get_settings(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_settings_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client.update_settings(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_copy_log_entries_empty_call_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.copy_log_entries(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() + + +def test_transport_kind_grpc_asyncio(): + transport = ConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_buckets_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + )) + await client.list_buckets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.get_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_async_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_bucket_async(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bucket_async_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_bucket_async(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.create_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.update_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_undelete_bucket_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_views_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + )) + await client.list_views(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client.get_view(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client.create_view(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client.update_view(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_view_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_view(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_sinks_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + )) + await client.list_sinks(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client.get_sink(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client.create_sink(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client.update_sink(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_sink_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_sink(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_link(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_link(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteLinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_links_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_links), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( + next_page_token='next_page_token_value', + )) + await client.list_links(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListLinksRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_link_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( + name='name_value', + description='description_value', + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + await client.get_link(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetLinkRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_exclusions_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + await client.list_exclusions(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client.get_exclusion(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client.create_exclusion(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client.update_exclusion(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_exclusion_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_exclusion(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_cmek_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + await client.get_cmek_settings(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_cmek_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + await client.update_cmek_settings(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + await client.get_settings(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_settings_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + await client.update_settings(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_copy_log_entries_empty_call_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.copy_log_entries(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest() def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index c8b9bfa33388..72d12df2f5ad 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -794,25 +794,6 @@ def test_delete_log(request_type, transport: str = 'grpc'): assert response is None -def test_delete_log_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_log() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - def test_delete_log_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -872,27 +853,6 @@ def test_delete_log_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_log_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() - - @pytest.mark.asyncio async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1139,25 +1099,6 @@ def test_write_log_entries(request_type, transport: str = 'grpc'): assert isinstance(response, logging.WriteLogEntriesResponse) -def test_write_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.write_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - def test_write_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1217,28 +1158,6 @@ def test_write_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_write_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.write_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( - )) - await client.write_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() - - @pytest.mark.asyncio async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1455,25 +1374,6 @@ def test_list_log_entries(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_log_entries_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - def test_list_log_entries_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1537,29 +1437,6 @@ def test_list_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_log_entries_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_entries), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( - next_page_token='next_page_token_value', - )) - await client.list_log_entries() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() - - @pytest.mark.asyncio async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1959,25 +1836,6 @@ def test_list_monitored_resource_descriptors(request_type, transport: str = 'grp assert response.next_page_token == 'next_page_token_value' -def test_list_monitored_resource_descriptors_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_monitored_resource_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2037,29 +1895,6 @@ def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_monitored_resource_descriptors_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_monitored_resource_descriptors), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( - next_page_token='next_page_token_value', - )) - await client.list_monitored_resource_descriptors() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() - - @pytest.mark.asyncio async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2359,25 +2194,6 @@ def test_list_logs(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_logs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_logs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - def test_list_logs_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2439,30 +2255,6 @@ def test_list_logs_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_logs_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_logs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( - log_names=['log_names_value'], - next_page_token='next_page_token_value', - )) - await client.list_logs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() - - @pytest.mark.asyncio async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2942,7 +2734,6 @@ def test_tail_log_entries_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 - @pytest.mark.asyncio async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3106,6 +2897,102 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = None + client.delete_log(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_write_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + call.return_value = logging.WriteLogEntriesResponse() + client.write_log_entries(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + call.return_value = logging.ListLogEntriesResponse() + client.list_log_entries(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_monitored_resource_descriptors_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + call.return_value = logging.ListMonitoredResourceDescriptorsResponse() + client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_logs_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = logging.ListLogsResponse() + client.list_logs(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + def test_transport_kind_grpc_asyncio(): transport = LoggingServiceV2AsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -3113,6 +3000,120 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_write_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( + )) + await client.write_log_entries(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + )) + await client.list_log_entries(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + )) + await client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_logs_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + )) + await client.list_logs(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = LoggingServiceV2Client( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 1c391fcfc39f..7555ad73a79e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -795,25 +795,6 @@ def test_list_log_metrics(request_type, transport: str = 'grpc'): assert response.next_page_token == 'next_page_token_value' -def test_list_log_metrics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_log_metrics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - def test_list_log_metrics_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -875,29 +856,6 @@ def test_list_log_metrics_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_log_metrics_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_log_metrics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( - next_page_token='next_page_token_value', - )) - await client.list_log_metrics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() - - @pytest.mark.asyncio async def test_list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1357,25 +1315,6 @@ def test_get_log_metric(request_type, transport: str = 'grpc'): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_get_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - def test_get_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1435,35 +1374,6 @@ def test_get_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - await client.get_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() - - @pytest.mark.asyncio async def test_get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1739,25 +1649,6 @@ def test_create_log_metric(request_type, transport: str = 'grpc'): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_create_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - def test_create_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1817,35 +1708,6 @@ def test_create_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - await client.create_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() - - @pytest.mark.asyncio async def test_create_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2131,25 +1993,6 @@ def test_update_log_metric(request_type, transport: str = 'grpc'): assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_update_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - def test_update_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2209,35 +2052,6 @@ def test_update_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( - name='name_value', - description='description_value', - filter='filter_value', - bucket_name='bucket_name_value', - disabled=True, - value_extractor='value_extractor_value', - version=logging_metrics.LogMetric.ApiVersion.V1, - )) - await client.update_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() - - @pytest.mark.asyncio async def test_update_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2508,25 +2322,6 @@ def test_delete_log_metric(request_type, transport: str = 'grpc'): assert response is None -def test_delete_log_metric_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - def test_delete_log_metric_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2586,27 +2381,6 @@ def test_delete_log_metric_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_log_metric_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_log_metric), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_log_metric() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() - - @pytest.mark.asyncio async def test_delete_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2913,6 +2687,102 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_metrics_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + client.list_log_metrics(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.get_log_metric(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.create_log_metric(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client.update_log_metric(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_metric_empty_call_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + call.return_value = None + client.delete_log_metric(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + def test_transport_kind_grpc_asyncio(): transport = MetricsServiceV2AsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -2920,6 +2790,138 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_metrics_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + )) + await client.list_log_metrics(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client.get_log_metric(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client.create_log_metric(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client.update_log_metric(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_metric_empty_call_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log_metric(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = MetricsServiceV2Client( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 906e41d32740..b73b73a73801 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -821,25 +821,6 @@ def test_list_instances(request_type, transport: str = 'grpc'): assert response.unreachable == ['unreachable_value'] -def test_list_instances_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.list_instances() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ListInstancesRequest() - - def test_list_instances_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -901,30 +882,6 @@ def test_list_instances_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_instances_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_instances), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - await client.list_instances() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ListInstancesRequest() - - @pytest.mark.asyncio async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1426,25 +1383,6 @@ def test_get_instance(request_type, transport: str = 'grpc'): assert response.available_maintenance_versions == ['available_maintenance_versions_value'] -def test_get_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceRequest() - - def test_get_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1504,55 +1442,6 @@ def test_get_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - )) - await client.get_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceRequest() - - @pytest.mark.asyncio async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -1856,25 +1745,6 @@ def test_get_instance_auth_string(request_type, transport: str = 'grpc'): assert response.auth_string == 'auth_string_value' -def test_get_instance_auth_string_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.get_instance_auth_string() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceAuthStringRequest() - - def test_get_instance_auth_string_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1934,29 +1804,6 @@ def test_get_instance_auth_string_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_instance_auth_string_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_instance_auth_string), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( - auth_string='auth_string_value', - )) - await client.get_instance_auth_string() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceAuthStringRequest() - - @pytest.mark.asyncio async def test_get_instance_auth_string_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2205,25 +2052,6 @@ def test_create_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_create_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.create_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.CreateInstanceRequest() - - def test_create_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2290,29 +2118,6 @@ def test_create_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.create_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.CreateInstanceRequest() - - @pytest.mark.asyncio async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2587,25 +2392,6 @@ def test_update_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_update_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.update_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest() - - def test_update_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2668,29 +2454,6 @@ def test_update_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.update_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest() - - @pytest.mark.asyncio async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2955,25 +2718,6 @@ def test_upgrade_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_upgrade_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.upgrade_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpgradeInstanceRequest() - - def test_upgrade_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3040,29 +2784,6 @@ def test_upgrade_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_upgrade_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.upgrade_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.upgrade_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpgradeInstanceRequest() - - @pytest.mark.asyncio async def test_upgrade_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3327,25 +3048,6 @@ def test_import_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_import_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.import_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ImportInstanceRequest() - - def test_import_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3410,29 +3112,6 @@ def test_import_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_import_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.import_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.import_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ImportInstanceRequest() - - @pytest.mark.asyncio async def test_import_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3697,25 +3376,6 @@ def test_export_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_export_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.export_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ExportInstanceRequest() - - def test_export_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3780,29 +3440,6 @@ def test_export_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_export_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.export_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.export_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ExportInstanceRequest() - - @pytest.mark.asyncio async def test_export_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4067,25 +3704,6 @@ def test_failover_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_failover_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.failover_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.FailoverInstanceRequest() - - def test_failover_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4150,29 +3768,6 @@ def test_failover_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_failover_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.failover_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.failover_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.FailoverInstanceRequest() - - @pytest.mark.asyncio async def test_failover_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4437,25 +4032,6 @@ def test_delete_instance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_delete_instance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.delete_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.DeleteInstanceRequest() - - def test_delete_instance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4520,29 +4096,6 @@ def test_delete_instance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_instance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_instance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.delete_instance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.DeleteInstanceRequest() - - @pytest.mark.asyncio async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4797,25 +4350,6 @@ def test_reschedule_maintenance(request_type, transport: str = 'grpc'): assert isinstance(response, future.Future) -def test_reschedule_maintenance_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: - call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client.reschedule_maintenance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.RescheduleMaintenanceRequest() - - def test_reschedule_maintenance_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4880,29 +4414,6 @@ def test_reschedule_maintenance_use_cached_wrapped_rpc(): assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_reschedule_maintenance_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport='grpc_asyncio', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reschedule_maintenance), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name='operations/spam') - ) - await client.reschedule_maintenance() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.RescheduleMaintenanceRequest() - - @pytest.mark.asyncio async def test_reschedule_maintenance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -8410,11 +7921,502 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" -def test_transport_kind_grpc_asyncio(): - transport = CloudRedisAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), ) - assert transport.kind == "grpc_asyncio" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = cloud_redis.ListInstancesResponse() + client.list_instances(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ListInstancesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = cloud_redis.Instance() + client.get_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_auth_string_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + call.return_value = cloud_redis.InstanceAuthString() + client.get_instance_auth_string(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.CreateInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpdateInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upgrade_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.upgrade_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpgradeInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.import_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ImportInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.export_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ExportInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_failover_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.failover_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.FailoverInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.DeleteInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reschedule_maintenance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.reschedule_maintenance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + + +def test_transport_kind_grpc_asyncio(): + transport = CloudRedisAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instances_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_instances(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ListInstancesRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + )) + await client.get_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_auth_string_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + )) + await client.get_instance_auth_string(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.CreateInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpdateInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_upgrade_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.upgrade_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpgradeInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.import_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ImportInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.export_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ExportInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_failover_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.failover_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.FailoverInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_instance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.DeleteInstanceRequest() + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_reschedule_maintenance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.reschedule_maintenance(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.RescheduleMaintenanceRequest() def test_transport_kind_rest(): From b1ef9744260f5e917915b8e939841ac997f19b9b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 16 Sep 2024 17:46:42 -0400 Subject: [PATCH 1184/1339] test: improve routing parameter assert (#2131) Co-authored-by: Victor Chudnovsky --- .../gapic-generator/gapic/schema/wrappers.py | 74 +++++++++++++++++++ .../gapic/%name_%version/%sub/test_macros.j2 | 7 +- .../unit/schema/wrappers/test_routing.py | 54 +++++++------- 3 files changed, 102 insertions(+), 33 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index fd75e8fd5ec0..473b56e7eb0a 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -30,6 +30,7 @@ import collections import copy import dataclasses +import functools import json import keyword import re @@ -1035,10 +1036,20 @@ def _to_regex(self, path_template: str) -> Pattern: """ return re.compile(f"^{self._convert_to_regex(path_template)}$") + # Use caching to avoid repeated computation + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2161): + # Use `@functools.cache` instead of `@functools.lru_cache` once python 3.8 is dropped. + # https://docs.python.org/3/library/functools.html#functools.cache + @functools.lru_cache(maxsize=None) def to_regex(self) -> Pattern: return self._to_regex(self.path_template) @property + # Use caching to avoid repeated computation + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2161): + # Use `@functools.cache` instead of `@functools.lru_cache` once python 3.8 is dropped. + # https://docs.python.org/3/library/functools.html#functools.cache + @functools.lru_cache(maxsize=None) def key(self) -> Union[str, None]: if self.path_template == "": return self.field @@ -1067,6 +1078,69 @@ def try_parse_routing_rule(cls, routing_rule: routing_pb2.RoutingRule) -> Option params = [RoutingParameter(x.field, x.path_template) for x in params] return cls(params) + @classmethod + def resolve(cls, routing_rule: routing_pb2.RoutingRule, request: Union[dict, str]) -> dict: + """Resolves the routing header which should be sent along with the request. + The routing header is determined based on the given routing rule and request. + See the following link for more information on explicit routing headers: + https://google.aip.dev/client-libraries/4222#explicit-routing-headers-googleapirouting + + Args: + routing_rule(routing_pb2.RoutingRule): A collection of Routing Parameter specifications + defined by `routing_pb2.RoutingRule`. + See https://github.com/googleapis/googleapis/blob/cb39bdd75da491466f6c92bc73cd46b0fbd6ba9a/google/api/routing.proto#L391 + request(Union[dict, str]): The request for which the routine rule should be resolved. + The format can be either a dictionary or json string representing the request. + + Returns(dict): + A dictionary containing the resolved routing header to the sent along with the given request. + """ + + def _get_field(request, field_path: str): + segments = field_path.split(".") + + # Either json string or dictionary is supported + if isinstance(request, str): + current = json.loads(request) + else: + current = request + + # This is to cater for the case where the `field_path` contains a + # dot-separated path of field names leading to a field in a sub-message. + for x in segments: + current = current.get(x, None) + # Break if the sub-message does not exist + if current is None: + break + return current + + header_params = {} + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2160): Move this logic to + # `google-api-core` so that the shared code can be used in both `wrappers.py` and GAPIC clients + # via Jinja templates. + for routing_param in routing_rule.routing_parameters: + request_field_value = _get_field(request, routing_param.field) + # Only resolve the header for routing parameter fields which are populated in the request + if request_field_value is not None: + # If there is a path_template for a given routing parameter field, the value of the field must match + # If multiple `routing_param`s describe the same key + # (via the `path_template` field or via the `field` field when + # `path_template` is not provided), the "last one wins" rule + # determines which parameter gets used. See https://google.aip.dev/client-libraries/4222. + routing_parameter_key = routing_param.key + if routing_param.path_template: + routing_param_regex = routing_param.to_regex() + regex_match = routing_param_regex.match( + request_field_value + ) + if regex_match: + header_params[routing_parameter_key] = regex_match.group( + routing_parameter_key + ) + else: # No need to match + header_params[routing_parameter_key] = request_field_value + return header_params + @dataclasses.dataclass(frozen=True) class HttpRule: diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index d0c1c5c05563..ea146e0f42ba 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -414,12 +414,11 @@ def test_{{ method.name|snake_case }}_routing_parameters(): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] + _, args, kw = call.mock_calls[0] assert args[0] == request - _, _, kw = call.mock_calls[0] - # This test doesn't assert anything useful. - assert kw['metadata'] + expected_headers = {{ method.routing_rule.resolve(method.routing_rule, routing_param.sample_request) }} + assert gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw['metadata'] {% endfor %} {% endif %} diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py index f93d6680a0a7..f4e6215bca41 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py @@ -14,6 +14,7 @@ from gapic.schema import wrappers +import json import proto import pytest @@ -23,31 +24,6 @@ class RoutingTestRequest(proto.Message): app_profile_id = proto.Field(proto.STRING, number=2) -def resolve(rule, request): - """This function performs dynamic header resolution, identical to what's in client.py.j2.""" - - def _get_field(request, field_path: str): - segments = field_path.split(".") - cur = request - for x in segments: - cur = getattr(cur, x) - return cur - - header_params = {} - for routing_param in rule.routing_parameters: - # This may raise exception (which we show to clients). - request_field_value = _get_field(request, routing_param.field) - if routing_param.path_template: - routing_param_regex = routing_param.to_regex() - regex_match = routing_param_regex.match(request_field_value) - if regex_match: - header_params[routing_param.key] = regex_match.group( - routing_param.key) - else: # No need to match - header_params[routing_param.key] = request_field_value - return header_params - - @pytest.mark.parametrize( "req, expected", [ @@ -63,7 +39,10 @@ def _get_field(request, field_path: str): def test_routing_rule_resolve_simple_extraction(req, expected): rule = wrappers.RoutingRule( [wrappers.RoutingParameter("app_profile_id", "")]) - assert resolve(rule, req) == expected + assert wrappers.RoutingRule.resolve( + rule, + RoutingTestRequest.to_dict(req) + ) == expected @pytest.mark.parametrize( @@ -82,7 +61,10 @@ def test_routing_rule_resolve_rename_extraction(req, expected): rule = wrappers.RoutingRule( [wrappers.RoutingParameter("app_profile_id", "{routing_id=**}")] ) - assert resolve(rule, req) == expected + assert wrappers.RoutingRule.resolve( + rule, + RoutingTestRequest.to_dict(req) + ) == expected @pytest.mark.parametrize( @@ -111,7 +93,10 @@ def test_routing_rule_resolve_field_match(req, expected): ), ] ) - assert resolve(rule, req) == expected + assert wrappers.RoutingRule.resolve( + rule, + RoutingTestRequest.to_dict(req) + ) == expected @pytest.mark.parametrize( @@ -135,6 +120,9 @@ def test_routing_rule_resolve_field_match(req, expected): wrappers.RoutingParameter( "table_name", "projects/*/{instance_id=instances/*}/**" ), + wrappers.RoutingParameter( + "doesnotexist", "projects/*/{instance_id=instances/*}/**" + ), ], RoutingTestRequest( table_name="projects/100/instances/200/tables/300"), @@ -144,7 +132,15 @@ def test_routing_rule_resolve_field_match(req, expected): ) def test_routing_rule_resolve(routing_parameters, req, expected): rule = wrappers.RoutingRule(routing_parameters) - got = resolve(rule, req) + got = wrappers.RoutingRule.resolve( + rule, RoutingTestRequest.to_dict(req) + ) + assert got == expected + + rule = wrappers.RoutingRule(routing_parameters) + got = wrappers.RoutingRule.resolve( + rule, json.dumps(RoutingTestRequest.to_dict(req)) + ) assert got == expected From 2ca283e8dfb98659554794818b14912ab4b70153 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 18 Sep 2024 12:21:48 -0400 Subject: [PATCH 1185/1339] fix: resolve issue where explicit routing metadata was not sent in async clients (#2133) Co-authored-by: Victor Chudnovsky --- .../%sub/services/%service/_client_macros.j2 | 40 +- .../%sub/services/%service/_shared_macros.j2 | 41 ++ .../%sub/services/%service/async_client.py.j2 | 15 +- .../%name_%version/%sub/test_%service.py.j2 | 2 + .../gapic/%name_%version/%sub/test_macros.j2 | 80 ++-- .../asset_v1/services/asset_service/client.py | 46 +-- .../unit/gapic/asset_v1/test_asset_service.py | 251 +++++++++---- .../services/iam_credentials/client.py | 8 +- .../credentials_v1/test_iam_credentials.py | 42 ++- .../eventarc_v1/services/eventarc/client.py | 36 +- .../unit/gapic/eventarc_v1/test_eventarc.py | 196 +++++++--- .../services/config_service_v2/client.py | 62 ++-- .../services/logging_service_v2/client.py | 4 +- .../services/metrics_service_v2/client.py | 10 +- .../logging_v2/test_config_service_v2.py | 349 +++++++++++++----- .../logging_v2/test_logging_service_v2.py | 50 ++- .../logging_v2/test_metrics_service_v2.py | 53 ++- .../redis_v1/services/cloud_redis/client.py | 22 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 119 ++++-- 19 files changed, 942 insertions(+), 484 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index e741d6d04766..7eeda037b79f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -144,45 +144,7 @@ # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.{{ method.transport_safe_name|snake_case}}] - {% if method.explicit_routing %} - header_params = {} - {% if not method.client_streaming %} - {% for routing_param in method.routing_rule.routing_parameters %} - {% if routing_param.path_template %} {# Need to match. #} - - routing_param_regex = {{ routing_param.to_regex() }} - regex_match = routing_param_regex.match(request.{{ routing_param.field }}) - if regex_match and regex_match.group("{{ routing_param.key }}"): - header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") - - {% else %} - - if request.{{ routing_param.field }}: - header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} - - {% endif %} - {% endfor %} {# method.routing_rule.routing_parameters #} - {% endif %} {# if not method.client_streaming #} - - if header_params: - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(header_params), - ) - - {% elif method.field_headers %} {# implicit routing #} - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - {% for field_header in method.field_headers %} - {% if not method.client_streaming %} - ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), - {% endif %} - {% endfor %} - )), - ) - {% endif %} {# method.explicit_routing #} - +{{ shared_macros.create_metadata(method) }} {{ shared_macros.add_api_version_header_to_metadata(service.version) }} {{ shared_macros.auto_populate_uuid4_fields(api, method) }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index b7b223caad3f..8fec390957e2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -55,6 +55,47 @@ except ImportError: # pragma: NO COVER {% endif %}{# service_version #} {% endmacro %} +{% macro create_metadata(method) %} + {% if method.explicit_routing %} + header_params = {} + {% if not method.client_streaming %} + {% for routing_param in method.routing_rule.routing_parameters %} + {% if routing_param.path_template %} {# Need to match. #} + + routing_param_regex = {{ routing_param.to_regex() }} + regex_match = routing_param_regex.match(request.{{ routing_param.field }}) + if regex_match and regex_match.group("{{ routing_param.key }}"): + header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") + + {% else %} + + if request.{{ routing_param.field }}: + header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} + + {% endif %} + {% endfor %} {# method.routing_rule.routing_parameters #} + {% endif %} {# if not method.client_streaming #} + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + {% elif method.field_headers %}{# implicit routing #} + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {% if not method.client_streaming %} + {% for field_header in method.field_headers %} + ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), + {% endfor %}{# for field_header in method.field_headers #} + {% endif %}{# not method.client_streaming #} + )), + ) + {% endif %}{# method.explicit_routing #} +{% endmacro %}{# create_metadata #} + {% macro add_api_version_header_to_metadata(service_version) %} {# Add API Version to metadata as per https://github.com/aip-dev/google.aip.dev/pull/1331. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 36a40f4db076..11171569bdf3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -364,20 +364,7 @@ class {{ service.async_client_name }}: # and friendly error handling. rpc = self._client._transport._wrapped_methods[self._client._transport.{{ method.transport_safe_name|snake_case }}] - {% if method.field_headers %} - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - {% for field_header in method.field_headers %} - {% if not method.client_streaming %} - ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), - {% endif %} - {% endfor %} - )), - ) - {% endif %} - +{{ shared_macros.create_metadata(method) }} {{ shared_macros.add_api_version_header_to_metadata(service.version) }} {{ shared_macros.auto_populate_uuid4_fields(api, method) }} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index b159eaa8addb..fa9545d2a1a4 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1055,6 +1055,8 @@ def test_transport_adc(transport_class): {{ test_macros.transport_kind_test(**conf) }} {{ test_macros.empty_call_test(**conf) }} + +{{ test_macros.routing_parameter_test(**conf) }} {% endfor %} {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index ea146e0f42ba..f3c20ea5dba1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -386,43 +386,6 @@ async def test_{{ method_name }}_async_from_dict(): await test_{{ method_name }}_async(request_type=dict) {% endif %}{# full_extended_lro #} -{% if method.explicit_routing %} -def test_{{ method.name|snake_case }}_routing_parameters(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - ) - - {% for routing_param in method.routing_rule.routing_parameters %} - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = {{ method.input.ident }}(**{{ routing_param.sample_request }}) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.{{ method.transport_safe_name|snake_case }}), - '__call__') as call: - {% if method.void %} - call.return_value = None - {% elif method.lro %} - call.return_value = operations_pb2.Operation(name='operations/op') - {% elif method.server_streaming %} - call.return_value = iter([{{ method.output.ident }}()]) - {% else %} - call.return_value = {{ method.output.ident }}() - {% endif %} - client.{{ method.safe_name|snake_case }}(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, kw = call.mock_calls[0] - assert args[0] == request - - expected_headers = {{ method.routing_rule.resolve(method.routing_rule, routing_param.sample_request) }} - assert gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw['metadata'] - {% endfor %} -{% endif %} - - {% if method.field_headers and not method.client_streaming and not method.explicit_routing %} def test_{{ method_name }}_field_headers(): client = {{ service.client_name }}( @@ -1797,14 +1760,13 @@ def test_{{ method_name }}_rest_no_http_options(): {% endwith %}{# method_name #} {% endmacro %} - {# This is a generic macro for testing method calls. Ideally this macro can be used to avoid duplication in Jinja templates. If this macro cannot be custimized for a specific method call test, consider creating a new macro with the name `method_call_test_` for the macro which supports a more customized method call. #} -{% macro method_call_test_generic(test_name, method, service, api, transport, request, is_async=False) %} +{% macro method_call_test_generic(test_name, method, service, api, transport, request_dict, is_async=False, routing_param=None) %} {% set transport_name = get_transport_name(transport, is_async) %} {% with method_name = (method.name + ("_unary" if method.operation_service else "")) | snake_case %} {% set async_method_prefix = "async " if is_async else "" %} @@ -1844,7 +1806,7 @@ def test_{{ method_name }}_rest_no_http_options(): {% endfor %} )) {% endif %}{# method.void #} - await client.{{ method_name }}(request={{ request }}) + await client.{{ method_name }}(request={{ request_dict }}) {% else %}{# if not is_async #} {% if method.void %} call.return_value = None @@ -1855,12 +1817,12 @@ def test_{{ method_name }}_rest_no_http_options(): {% else %} call.return_value = {{ method.output.ident }}() {% endif %} - client.{{ method_name }}(request={{ request }}) + client.{{ method_name }}(request={{ request_dict }}) {% endif %}{# is_async #} # Establish that the underlying gRPC stub method was called. call.assert_called() - _, args, _ = call.mock_calls[0] + _, args, {% if routing_param %}kw{% else %}_{% endif %} = call.mock_calls[0] {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} {% if method_settings is not none %} {% for auto_populated_field in method_settings.auto_populated_fields %} @@ -1871,11 +1833,17 @@ def test_{{ method_name }}_rest_no_http_options(): {% endfor %}{# for auto_populated_field in method_settings.auto_populated_fields #} {% endif %}{# if method_settings is not none #} {% endwith %}{# method_settings #} - {% if request %} - assert args[0] == {{ request }} + {% if request_dict %} + request_msg = {{ method.input.ident }}(**{{ request_dict }}) {% else %} - assert args[0] == {{ method.input.ident }}() - {% endif %}{# request #} + request_msg = {{ method.input.ident }}() + {% endif %}{# request_dict #} + assert args[0] == request_msg + + {% if routing_param %} + expected_headers = {{ method.routing_rule.resolve(method.routing_rule, routing_param.sample_request) }} + assert gapic_v1.routing_header.to_grpc_metadata(expected_headers) in kw['metadata'] + {% endif %} {% endwith %}{# method_name #} {% endmacro %} @@ -1910,7 +1878,7 @@ def test_transport_kind_{{ transport_name }}(): {% if not method.client_streaming %} # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -{{ method_call_test_generic("empty_call", method, service, api, transport, request=None, is_async=is_async) }} +{{ method_call_test_generic("empty_call", method, service, api, transport, request_dict=None, is_async=is_async) }} {% endif %}{# not method.client_streaming #} {% endfor %}{# method in service.methods.values() #} {% endif %}{# 'rest' not in transport #} @@ -1919,3 +1887,21 @@ def test_transport_kind_{{ transport_name }}(): {% macro get_uuid4_re() -%} [a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12} {%- endmacro %}{# uuid_re #} + +{% macro routing_parameter_test(service, api, transport, is_async) %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2159): + Currently this macro only supports gRPC. It should be updated to support REST + transport as well. +#} +{% if 'rest' not in transport %} +{% for method in service.methods.values() %}{# method #} +{% if method.explicit_routing %} +{# Any value that is part of the HTTP/1.1 URI should be sent as #} +{# a field header. Set these to a non-empty value. #} +{% for routing_param in method.routing_rule.routing_parameters %} +{{ method_call_test_generic("routing_parameters_request_" + loop.index|string, method, service, api, transport, request_dict=routing_param.sample_request, is_async=is_async, routing_param=routing_param) }} +{% endfor %}{# routing_param in method.routing_rule.routing_parameters #} +{% endif %}{# method.explicit_routing #} +{% endfor %}{# method in service.methods.values() #} +{% endif %} +{% endmacro %}{# routing_parameter_test #} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 54349918c711..64b52c14bb88 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -730,7 +730,7 @@ def sample_export_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -849,7 +849,7 @@ def sample_list_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -946,7 +946,7 @@ def sample_batch_get_assets_history(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.batch_get_assets_history] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1067,7 +1067,7 @@ def sample_create_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1176,7 +1176,7 @@ def sample_get_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1280,7 +1280,7 @@ def sample_list_feeds(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_feeds] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1393,7 +1393,7 @@ def sample_update_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1487,7 +1487,7 @@ def sample_delete_feed(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_feed] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1702,7 +1702,7 @@ def sample_search_all_resources(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.search_all_resources] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1892,7 +1892,7 @@ def sample_search_all_iam_policies(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.search_all_iam_policies] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1990,7 +1990,7 @@ def sample_analyze_iam_policy(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2097,7 +2097,7 @@ def sample_analyze_iam_policy_longrunning(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_iam_policy_longrunning] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2195,7 +2195,7 @@ def sample_analyze_move(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_move] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2291,7 +2291,7 @@ def sample_query_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.query_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2427,7 +2427,7 @@ def sample_create_saved_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_saved_query] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2532,7 +2532,7 @@ def sample_get_saved_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_saved_query] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2643,7 +2643,7 @@ def sample_list_saved_queries(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_saved_queries] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2770,7 +2770,7 @@ def sample_update_saved_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_saved_query] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2866,7 +2866,7 @@ def sample_delete_saved_query(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_saved_query] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2947,7 +2947,7 @@ def sample_batch_get_effective_iam_policies(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.batch_get_effective_iam_policies] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3087,7 +3087,7 @@ def sample_analyze_org_policies(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_org_policies] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3238,7 +3238,7 @@ def sample_analyze_org_policy_governed_containers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_containers] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3418,7 +3418,7 @@ def sample_analyze_org_policy_governed_assets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.analyze_org_policy_governed_assets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 268e14d432b0..200bcec7e389 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -953,7 +953,6 @@ async def test_export_assets_async(transport: str = 'grpc_asyncio', request_type async def test_export_assets_async_from_dict(): await test_export_assets_async(request_type=dict) - def test_export_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1183,7 +1182,6 @@ async def test_list_assets_async(transport: str = 'grpc_asyncio', request_type=a async def test_list_assets_async_from_dict(): await test_list_assets_async(request_type=dict) - def test_list_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1685,7 +1683,6 @@ async def test_batch_get_assets_history_async(transport: str = 'grpc_asyncio', r async def test_batch_get_assets_history_async_from_dict(): await test_batch_get_assets_history_async(request_type=dict) - def test_batch_get_assets_history_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1931,7 +1928,6 @@ async def test_create_feed_async(transport: str = 'grpc_asyncio', request_type=a async def test_create_feed_async_from_dict(): await test_create_feed_async(request_type=dict) - def test_create_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2257,7 +2253,6 @@ async def test_get_feed_async(transport: str = 'grpc_asyncio', request_type=asse async def test_get_feed_async_from_dict(): await test_get_feed_async(request_type=dict) - def test_get_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2563,7 +2558,6 @@ async def test_list_feeds_async(transport: str = 'grpc_asyncio', request_type=as async def test_list_feeds_async_from_dict(): await test_list_feeds_async(request_type=dict) - def test_list_feeds_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2887,7 +2881,6 @@ async def test_update_feed_async(transport: str = 'grpc_asyncio', request_type=a async def test_update_feed_async_from_dict(): await test_update_feed_async(request_type=dict) - def test_update_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3191,7 +3184,6 @@ async def test_delete_feed_async(transport: str = 'grpc_asyncio', request_type=a async def test_delete_feed_async_from_dict(): await test_delete_feed_async(request_type=dict) - def test_delete_feed_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3507,7 +3499,6 @@ async def test_search_all_resources_async(transport: str = 'grpc_asyncio', reque async def test_search_all_resources_async_from_dict(): await test_search_all_resources_async(request_type=dict) - def test_search_all_resources_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4039,7 +4030,6 @@ async def test_search_all_iam_policies_async(transport: str = 'grpc_asyncio', re async def test_search_all_iam_policies_async_from_dict(): await test_search_all_iam_policies_async(request_type=dict) - def test_search_all_iam_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4555,7 +4545,6 @@ async def test_analyze_iam_policy_async(transport: str = 'grpc_asyncio', request async def test_analyze_iam_policy_async_from_dict(): await test_analyze_iam_policy_async(request_type=dict) - def test_analyze_iam_policy_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4789,7 +4778,6 @@ async def test_analyze_iam_policy_longrunning_async(transport: str = 'grpc_async async def test_analyze_iam_policy_longrunning_async_from_dict(): await test_analyze_iam_policy_longrunning_async(request_type=dict) - def test_analyze_iam_policy_longrunning_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5015,7 +5003,6 @@ async def test_analyze_move_async(transport: str = 'grpc_asyncio', request_type= async def test_analyze_move_async_from_dict(): await test_analyze_move_async(request_type=dict) - def test_analyze_move_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5253,7 +5240,6 @@ async def test_query_assets_async(transport: str = 'grpc_asyncio', request_type= async def test_query_assets_async_from_dict(): await test_query_assets_async(request_type=dict) - def test_query_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5495,7 +5481,6 @@ async def test_create_saved_query_async(transport: str = 'grpc_asyncio', request async def test_create_saved_query_async_from_dict(): await test_create_saved_query_async(request_type=dict) - def test_create_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5837,7 +5822,6 @@ async def test_get_saved_query_async(transport: str = 'grpc_asyncio', request_ty async def test_get_saved_query_async_from_dict(): await test_get_saved_query_async(request_type=dict) - def test_get_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6151,7 +6135,6 @@ async def test_list_saved_queries_async(transport: str = 'grpc_asyncio', request async def test_list_saved_queries_async_from_dict(): await test_list_saved_queries_async(request_type=dict) - def test_list_saved_queries_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6667,7 +6650,6 @@ async def test_update_saved_query_async(transport: str = 'grpc_asyncio', request async def test_update_saved_query_async_from_dict(): await test_update_saved_query_async(request_type=dict) - def test_update_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6981,7 +6963,6 @@ async def test_delete_saved_query_async(transport: str = 'grpc_asyncio', request async def test_delete_saved_query_async_from_dict(): await test_delete_saved_query_async(request_type=dict) - def test_delete_saved_query_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7287,7 +7268,6 @@ async def test_batch_get_effective_iam_policies_async(transport: str = 'grpc_asy async def test_batch_get_effective_iam_policies_async_from_dict(): await test_batch_get_effective_iam_policies_async(request_type=dict) - def test_batch_get_effective_iam_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7521,7 +7501,6 @@ async def test_analyze_org_policies_async(transport: str = 'grpc_asyncio', reque async def test_analyze_org_policies_async_from_dict(): await test_analyze_org_policies_async(request_type=dict) - def test_analyze_org_policies_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8053,7 +8032,6 @@ async def test_analyze_org_policy_governed_containers_async(transport: str = 'gr async def test_analyze_org_policy_governed_containers_async_from_dict(): await test_analyze_org_policy_governed_containers_async(request_type=dict) - def test_analyze_org_policy_governed_containers_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8585,7 +8563,6 @@ async def test_analyze_org_policy_governed_assets_async(transport: str = 'grpc_a async def test_analyze_org_policy_governed_assets_async_from_dict(): await test_analyze_org_policy_governed_assets_async(request_type=dict) - def test_analyze_org_policy_governed_assets_field_headers(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15449,7 +15426,10 @@ def test_export_assets_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest() + request_msg = asset_service.ExportAssetsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15468,7 +15448,10 @@ def test_list_assets_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest() + request_msg = asset_service.ListAssetsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15487,7 +15470,10 @@ def test_batch_get_assets_history_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + request_msg = asset_service.BatchGetAssetsHistoryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15506,7 +15492,10 @@ def test_create_feed_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest() + request_msg = asset_service.CreateFeedRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15525,7 +15514,10 @@ def test_get_feed_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest() + request_msg = asset_service.GetFeedRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15544,7 +15536,10 @@ def test_list_feeds_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest() + request_msg = asset_service.ListFeedsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15563,7 +15558,10 @@ def test_update_feed_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest() + request_msg = asset_service.UpdateFeedRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15582,7 +15580,10 @@ def test_delete_feed_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest() + request_msg = asset_service.DeleteFeedRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15601,7 +15602,10 @@ def test_search_all_resources_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest() + request_msg = asset_service.SearchAllResourcesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15620,7 +15624,10 @@ def test_search_all_iam_policies_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest() + request_msg = asset_service.SearchAllIamPoliciesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15639,7 +15646,10 @@ def test_analyze_iam_policy_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest() + request_msg = asset_service.AnalyzeIamPolicyRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15658,7 +15668,10 @@ def test_analyze_iam_policy_longrunning_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15677,7 +15690,10 @@ def test_analyze_move_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest() + request_msg = asset_service.AnalyzeMoveRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15696,7 +15712,10 @@ def test_query_assets_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest() + request_msg = asset_service.QueryAssetsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15715,7 +15734,10 @@ def test_create_saved_query_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest() + request_msg = asset_service.CreateSavedQueryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15734,7 +15756,10 @@ def test_get_saved_query_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest() + request_msg = asset_service.GetSavedQueryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15753,7 +15778,10 @@ def test_list_saved_queries_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() + request_msg = asset_service.ListSavedQueriesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15772,7 +15800,10 @@ def test_update_saved_query_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest() + request_msg = asset_service.UpdateSavedQueryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15791,7 +15822,10 @@ def test_delete_saved_query_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest() + request_msg = asset_service.DeleteSavedQueryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15810,7 +15844,10 @@ def test_batch_get_effective_iam_policies_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15829,7 +15866,10 @@ def test_analyze_org_policies_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + request_msg = asset_service.AnalyzeOrgPoliciesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15848,7 +15888,10 @@ def test_analyze_org_policy_governed_containers_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15867,7 +15910,9 @@ def test_analyze_org_policy_governed_assets_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + assert args[0] == request_msg def test_transport_kind_grpc_asyncio(): @@ -15898,7 +15943,10 @@ async def test_export_assets_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ExportAssetsRequest() + request_msg = asset_service.ExportAssetsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15921,7 +15969,10 @@ async def test_list_assets_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListAssetsRequest() + request_msg = asset_service.ListAssetsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15943,7 +15994,10 @@ async def test_batch_get_assets_history_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetAssetsHistoryRequest() + request_msg = asset_service.BatchGetAssetsHistoryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15970,7 +16024,10 @@ async def test_create_feed_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateFeedRequest() + request_msg = asset_service.CreateFeedRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -15997,7 +16054,10 @@ async def test_get_feed_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetFeedRequest() + request_msg = asset_service.GetFeedRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16019,7 +16079,10 @@ async def test_list_feeds_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListFeedsRequest() + request_msg = asset_service.ListFeedsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16046,7 +16109,10 @@ async def test_update_feed_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateFeedRequest() + request_msg = asset_service.UpdateFeedRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16067,7 +16133,10 @@ async def test_delete_feed_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteFeedRequest() + request_msg = asset_service.DeleteFeedRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16090,7 +16159,10 @@ async def test_search_all_resources_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllResourcesRequest() + request_msg = asset_service.SearchAllResourcesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16113,7 +16185,10 @@ async def test_search_all_iam_policies_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.SearchAllIamPoliciesRequest() + request_msg = asset_service.SearchAllIamPoliciesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16136,7 +16211,10 @@ async def test_analyze_iam_policy_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyRequest() + request_msg = asset_service.AnalyzeIamPolicyRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16159,7 +16237,10 @@ async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeIamPolicyLongrunningRequest() + request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16181,7 +16262,10 @@ async def test_analyze_move_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeMoveRequest() + request_msg = asset_service.AnalyzeMoveRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16205,7 +16289,10 @@ async def test_query_assets_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.QueryAssetsRequest() + request_msg = asset_service.QueryAssetsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16231,7 +16318,10 @@ async def test_create_saved_query_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.CreateSavedQueryRequest() + request_msg = asset_service.CreateSavedQueryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16257,7 +16347,10 @@ async def test_get_saved_query_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.GetSavedQueryRequest() + request_msg = asset_service.GetSavedQueryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16280,7 +16373,10 @@ async def test_list_saved_queries_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.ListSavedQueriesRequest() + request_msg = asset_service.ListSavedQueriesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16306,7 +16402,10 @@ async def test_update_saved_query_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.UpdateSavedQueryRequest() + request_msg = asset_service.UpdateSavedQueryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16327,7 +16426,10 @@ async def test_delete_saved_query_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.DeleteSavedQueryRequest() + request_msg = asset_service.DeleteSavedQueryRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16349,7 +16451,10 @@ async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.BatchGetEffectiveIamPoliciesRequest() + request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16372,7 +16477,10 @@ async def test_analyze_org_policies_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPoliciesRequest() + request_msg = asset_service.AnalyzeOrgPoliciesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16395,7 +16503,10 @@ async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -16418,7 +16529,9 @@ async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + assert args[0] == request_msg def test_transport_kind_rest(): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index b38845b359fb..b9597ea22664 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -714,7 +714,7 @@ def sample_generate_access_token(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.generate_access_token] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -862,7 +862,7 @@ def sample_generate_id_token(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.generate_id_token] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -996,7 +996,7 @@ def sample_sign_blob(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.sign_blob] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1133,7 +1133,7 @@ def sample_sign_jwt(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.sign_jwt] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index ee56784e8616..553259e95062 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -937,7 +937,6 @@ async def test_generate_access_token_async(transport: str = 'grpc_asyncio', requ async def test_generate_access_token_async_from_dict(): await test_generate_access_token_async(request_type=dict) - def test_generate_access_token_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1275,7 +1274,6 @@ async def test_generate_id_token_async(transport: str = 'grpc_asyncio', request_ async def test_generate_id_token_async_from_dict(): await test_generate_id_token_async(request_type=dict) - def test_generate_id_token_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1619,7 +1617,6 @@ async def test_sign_blob_async(transport: str = 'grpc_asyncio', request_type=com async def test_sign_blob_async_from_dict(): await test_sign_blob_async(request_type=dict) - def test_sign_blob_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1955,7 +1952,6 @@ async def test_sign_jwt_async(transport: str = 'grpc_asyncio', request_type=comm async def test_sign_jwt_async_from_dict(): await test_sign_jwt_async(request_type=dict) - def test_sign_jwt_field_headers(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3344,7 +3340,10 @@ def test_generate_access_token_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateAccessTokenRequest() + request_msg = common.GenerateAccessTokenRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3363,7 +3362,10 @@ def test_generate_id_token_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateIdTokenRequest() + request_msg = common.GenerateIdTokenRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3382,7 +3384,10 @@ def test_sign_blob_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == common.SignBlobRequest() + request_msg = common.SignBlobRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3401,7 +3406,9 @@ def test_sign_jwt_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == common.SignJwtRequest() + request_msg = common.SignJwtRequest() + + assert args[0] == request_msg def test_transport_kind_grpc_asyncio(): @@ -3432,7 +3439,10 @@ async def test_generate_access_token_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateAccessTokenRequest() + request_msg = common.GenerateAccessTokenRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3455,7 +3465,10 @@ async def test_generate_id_token_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == common.GenerateIdTokenRequest() + request_msg = common.GenerateIdTokenRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3479,7 +3492,10 @@ async def test_sign_blob_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == common.SignBlobRequest() + request_msg = common.SignBlobRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3503,7 +3519,9 @@ async def test_sign_jwt_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == common.SignJwtRequest() + request_msg = common.SignJwtRequest() + + assert args[0] == request_msg def test_transport_kind_rest(): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index ad080aa3bad7..53e54c3e04b5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -770,7 +770,7 @@ def sample_get_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_trigger] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -875,7 +875,7 @@ def sample_list_triggers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_triggers] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1024,7 +1024,7 @@ def sample_create_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_trigger] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1162,7 +1162,7 @@ def sample_update_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_trigger] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1291,7 +1291,7 @@ def sample_delete_trigger(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_trigger] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1407,7 +1407,7 @@ def sample_get_channel(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_channel] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1512,7 +1512,7 @@ def sample_list_channels(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_channels] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1661,7 +1661,7 @@ def sample_create_channel(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_channel_] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1791,7 +1791,7 @@ def sample_update_channel(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_channel] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1912,7 +1912,7 @@ def sample_delete_channel(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_channel] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2022,7 +2022,7 @@ def sample_get_provider(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_provider] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2127,7 +2127,7 @@ def sample_list_providers(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_providers] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2245,7 +2245,7 @@ def sample_get_channel_connection(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_channel_connection] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2351,7 +2351,7 @@ def sample_list_channel_connections(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_channel_connections] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2500,7 +2500,7 @@ def sample_create_channel_connection(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_channel_connection] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2619,7 +2619,7 @@ def sample_delete_channel_connection(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_channel_connection] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2735,7 +2735,7 @@ def sample_get_google_channel_config(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_google_channel_config] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2856,7 +2856,7 @@ def sample_update_google_channel_config(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_google_channel_config] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 54f7efa8902c..3770b34748d1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -973,7 +973,6 @@ async def test_get_trigger_async(transport: str = 'grpc_asyncio', request_type=e async def test_get_trigger_async_from_dict(): await test_get_trigger_async(request_type=dict) - def test_get_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1293,7 +1292,6 @@ async def test_list_triggers_async(transport: str = 'grpc_asyncio', request_type async def test_list_triggers_async_from_dict(): await test_list_triggers_async(request_type=dict) - def test_list_triggers_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1807,7 +1805,6 @@ async def test_create_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_create_trigger_async_from_dict(): await test_create_trigger_async(request_type=dict) - def test_create_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2143,7 +2140,6 @@ async def test_update_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_update_trigger_async_from_dict(): await test_update_trigger_async(request_type=dict) - def test_update_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2483,7 +2479,6 @@ async def test_delete_trigger_async(transport: str = 'grpc_asyncio', request_typ async def test_delete_trigger_async_from_dict(): await test_delete_trigger_async(request_type=dict) - def test_delete_trigger_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2826,7 +2821,6 @@ async def test_get_channel_async(transport: str = 'grpc_asyncio', request_type=e async def test_get_channel_async_from_dict(): await test_get_channel_async(request_type=dict) - def test_get_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3144,7 +3138,6 @@ async def test_list_channels_async(transport: str = 'grpc_asyncio', request_type async def test_list_channels_async_from_dict(): await test_list_channels_async(request_type=dict) - def test_list_channels_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3658,7 +3651,6 @@ async def test_create_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_create_channel_async_from_dict(): await test_create_channel_async(request_type=dict) - def test_create_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3994,7 +3986,6 @@ async def test_update_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_update_channel_async_from_dict(): await test_update_channel_async(request_type=dict) - def test_update_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4322,7 +4313,6 @@ async def test_delete_channel_async(transport: str = 'grpc_asyncio', request_typ async def test_delete_channel_async_from_dict(): await test_delete_channel_async(request_type=dict) - def test_delete_channel_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4638,7 +4628,6 @@ async def test_get_provider_async(transport: str = 'grpc_asyncio', request_type= async def test_get_provider_async_from_dict(): await test_get_provider_async(request_type=dict) - def test_get_provider_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4958,7 +4947,6 @@ async def test_list_providers_async(transport: str = 'grpc_asyncio', request_typ async def test_list_providers_async_from_dict(): await test_list_providers_async(request_type=dict) - def test_list_providers_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5476,7 +5464,6 @@ async def test_get_channel_connection_async(transport: str = 'grpc_asyncio', req async def test_get_channel_connection_async_from_dict(): await test_get_channel_connection_async(request_type=dict) - def test_get_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5792,7 +5779,6 @@ async def test_list_channel_connections_async(transport: str = 'grpc_asyncio', r async def test_list_channel_connections_async_from_dict(): await test_list_channel_connections_async(request_type=dict) - def test_list_channel_connections_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6306,7 +6292,6 @@ async def test_create_channel_connection_async(transport: str = 'grpc_asyncio', async def test_create_channel_connection_async_from_dict(): await test_create_channel_connection_async(request_type=dict) - def test_create_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6644,7 +6629,6 @@ async def test_delete_channel_connection_async(transport: str = 'grpc_asyncio', async def test_delete_channel_connection_async_from_dict(): await test_delete_channel_connection_async(request_type=dict) - def test_delete_channel_connection_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6960,7 +6944,6 @@ async def test_get_google_channel_config_async(transport: str = 'grpc_asyncio', async def test_get_google_channel_config_async_from_dict(): await test_get_google_channel_config_async(request_type=dict) - def test_get_google_channel_config_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7272,7 +7255,6 @@ async def test_update_google_channel_config_async(transport: str = 'grpc_asyncio async def test_update_google_channel_config_async_from_dict(): await test_update_google_channel_config_async(request_type=dict) - def test_update_google_channel_config_field_headers(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13086,7 +13068,10 @@ def test_get_trigger_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetTriggerRequest() + request_msg = eventarc.GetTriggerRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13105,7 +13090,10 @@ def test_list_triggers_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListTriggersRequest() + request_msg = eventarc.ListTriggersRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13124,7 +13112,10 @@ def test_create_trigger_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateTriggerRequest() + request_msg = eventarc.CreateTriggerRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13143,7 +13134,10 @@ def test_update_trigger_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest() + request_msg = eventarc.UpdateTriggerRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13162,7 +13156,10 @@ def test_delete_trigger_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteTriggerRequest() + request_msg = eventarc.DeleteTriggerRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13181,7 +13178,10 @@ def test_get_channel_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelRequest() + request_msg = eventarc.GetChannelRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13200,7 +13200,10 @@ def test_list_channels_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelsRequest() + request_msg = eventarc.ListChannelsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13219,7 +13222,10 @@ def test_create_channel_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelRequest() + request_msg = eventarc.CreateChannelRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13238,7 +13244,10 @@ def test_update_channel_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest() + request_msg = eventarc.UpdateChannelRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13257,7 +13266,10 @@ def test_delete_channel_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelRequest() + request_msg = eventarc.DeleteChannelRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13276,7 +13288,10 @@ def test_get_provider_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetProviderRequest() + request_msg = eventarc.GetProviderRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13295,7 +13310,10 @@ def test_list_providers_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListProvidersRequest() + request_msg = eventarc.ListProvidersRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13314,7 +13332,10 @@ def test_get_channel_connection_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelConnectionRequest() + request_msg = eventarc.GetChannelConnectionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13333,7 +13354,10 @@ def test_list_channel_connections_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelConnectionsRequest() + request_msg = eventarc.ListChannelConnectionsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13352,7 +13376,10 @@ def test_create_channel_connection_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelConnectionRequest() + request_msg = eventarc.CreateChannelConnectionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13371,7 +13398,10 @@ def test_delete_channel_connection_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelConnectionRequest() + request_msg = eventarc.DeleteChannelConnectionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13390,7 +13420,10 @@ def test_get_google_channel_config_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetGoogleChannelConfigRequest() + request_msg = eventarc.GetGoogleChannelConfigRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13409,7 +13442,9 @@ def test_update_google_channel_config_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + request_msg = eventarc.UpdateGoogleChannelConfigRequest() + + assert args[0] == request_msg def test_transport_kind_grpc_asyncio(): @@ -13444,7 +13479,10 @@ async def test_get_trigger_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetTriggerRequest() + request_msg = eventarc.GetTriggerRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13468,7 +13506,10 @@ async def test_list_triggers_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListTriggersRequest() + request_msg = eventarc.ListTriggersRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13491,7 +13532,10 @@ async def test_create_trigger_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateTriggerRequest() + request_msg = eventarc.CreateTriggerRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13514,7 +13558,10 @@ async def test_update_trigger_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateTriggerRequest() + request_msg = eventarc.UpdateTriggerRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13537,7 +13584,10 @@ async def test_delete_trigger_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteTriggerRequest() + request_msg = eventarc.DeleteTriggerRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13565,7 +13615,10 @@ async def test_get_channel_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelRequest() + request_msg = eventarc.GetChannelRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13589,7 +13642,10 @@ async def test_list_channels_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelsRequest() + request_msg = eventarc.ListChannelsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13612,7 +13668,10 @@ async def test_create_channel_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelRequest() + request_msg = eventarc.CreateChannelRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13635,7 +13694,10 @@ async def test_update_channel_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateChannelRequest() + request_msg = eventarc.UpdateChannelRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13658,7 +13720,10 @@ async def test_delete_channel_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelRequest() + request_msg = eventarc.DeleteChannelRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13682,7 +13747,10 @@ async def test_get_provider_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetProviderRequest() + request_msg = eventarc.GetProviderRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13706,7 +13774,10 @@ async def test_list_providers_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListProvidersRequest() + request_msg = eventarc.ListProvidersRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13732,7 +13803,10 @@ async def test_get_channel_connection_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetChannelConnectionRequest() + request_msg = eventarc.GetChannelConnectionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13756,7 +13830,10 @@ async def test_list_channel_connections_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.ListChannelConnectionsRequest() + request_msg = eventarc.ListChannelConnectionsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13779,7 +13856,10 @@ async def test_create_channel_connection_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.CreateChannelConnectionRequest() + request_msg = eventarc.CreateChannelConnectionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13802,7 +13882,10 @@ async def test_delete_channel_connection_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.DeleteChannelConnectionRequest() + request_msg = eventarc.DeleteChannelConnectionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13826,7 +13909,10 @@ async def test_get_google_channel_config_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.GetGoogleChannelConfigRequest() + request_msg = eventarc.GetGoogleChannelConfigRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -13850,7 +13936,9 @@ async def test_update_google_channel_config_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == eventarc.UpdateGoogleChannelConfigRequest() + request_msg = eventarc.UpdateGoogleChannelConfigRequest() + + assert args[0] == request_msg def test_transport_kind_rest(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 92a2df84cf4b..9ce55ae44f6a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -732,7 +732,7 @@ def sample_list_buckets(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_buckets] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -825,7 +825,7 @@ def sample_get_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -918,7 +918,7 @@ def sample_create_bucket_async(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_bucket_async] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1021,7 +1021,7 @@ def sample_update_bucket_async(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_bucket_async] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1114,7 +1114,7 @@ def sample_create_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1202,7 +1202,7 @@ def sample_update_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1280,7 +1280,7 @@ def sample_delete_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1352,7 +1352,7 @@ def sample_undelete_bucket(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1457,7 +1457,7 @@ def sample_list_views(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_views] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1550,7 +1550,7 @@ def sample_get_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1634,7 +1634,7 @@ def sample_create_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1720,7 +1720,7 @@ def sample_update_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1796,7 +1796,7 @@ def sample_delete_view(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_view] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1904,7 +1904,7 @@ def sample_list_sinks(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_sinks] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2033,7 +2033,7 @@ def sample_get_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2170,7 +2170,7 @@ def sample_create_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2334,7 +2334,7 @@ def sample_update_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2438,7 +2438,7 @@ def sample_delete_sink(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_sink] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2573,7 +2573,7 @@ def sample_create_link(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_link] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2701,7 +2701,7 @@ def sample_delete_link(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_link] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2819,7 +2819,7 @@ def sample_list_links(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_links] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2935,7 +2935,7 @@ def sample_get_link(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_link] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3047,7 +3047,7 @@ def sample_list_exclusions(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_exclusions] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3174,7 +3174,7 @@ def sample_get_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3310,7 +3310,7 @@ def sample_create_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3460,7 +3460,7 @@ def sample_update_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3563,7 +3563,7 @@ def sample_delete_exclusion(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3666,7 +3666,7 @@ def sample_get_cmek_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3777,7 +3777,7 @@ def sample_update_cmek_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -3911,7 +3911,7 @@ def sample_get_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -4055,7 +4055,7 @@ def sample_update_settings(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_settings] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 9d085d369e1e..083eb997b046 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -654,7 +654,7 @@ def sample_delete_log(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_log] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1196,7 +1196,7 @@ def sample_list_logs(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_logs] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 1fff39e085b2..41a23585681b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -657,7 +657,7 @@ def sample_list_log_metrics(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_log_metrics] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -782,7 +782,7 @@ def sample_get_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -915,7 +915,7 @@ def sample_create_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1047,7 +1047,7 @@ def sample_update_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1142,7 +1142,7 @@ def sample_delete_log_metric(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_log_metric] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index e1709e17ed5f..bb77f848fbc6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -926,7 +926,6 @@ async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type= async def test_list_buckets_async_from_dict(): await test_list_buckets_async(request_type=dict) - def test_list_buckets_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1456,7 +1455,6 @@ async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=lo async def test_get_bucket_async_from_dict(): await test_get_bucket_async(request_type=dict) - def test_get_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1692,7 +1690,6 @@ async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', reques async def test_create_bucket_async_async_from_dict(): await test_create_bucket_async_async(request_type=dict) - def test_create_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1926,7 +1923,6 @@ async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', reques async def test_update_bucket_async_async_from_dict(): await test_update_bucket_async_async(request_type=dict) - def test_update_bucket_async_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2180,7 +2176,6 @@ async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type async def test_create_bucket_async_from_dict(): await test_create_bucket_async(request_type=dict) - def test_create_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2432,7 +2427,6 @@ async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type async def test_update_bucket_async_from_dict(): await test_update_bucket_async(request_type=dict) - def test_update_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2654,7 +2648,6 @@ async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type async def test_delete_bucket_async_from_dict(): await test_delete_bucket_async(request_type=dict) - def test_delete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2876,7 +2869,6 @@ async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_ty async def test_undelete_bucket_async_from_dict(): await test_undelete_bucket_async(request_type=dict) - def test_undelete_bucket_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3106,7 +3098,6 @@ async def test_list_views_async(transport: str = 'grpc_asyncio', request_type=lo async def test_list_views_async_from_dict(): await test_list_views_async(request_type=dict) - def test_list_views_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3620,7 +3611,6 @@ async def test_get_view_async(transport: str = 'grpc_asyncio', request_type=logg async def test_get_view_async_from_dict(): await test_get_view_async(request_type=dict) - def test_get_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -3858,7 +3848,6 @@ async def test_create_view_async(transport: str = 'grpc_asyncio', request_type=l async def test_create_view_async_from_dict(): await test_create_view_async(request_type=dict) - def test_create_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4094,7 +4083,6 @@ async def test_update_view_async(transport: str = 'grpc_asyncio', request_type=l async def test_update_view_async_from_dict(): await test_update_view_async(request_type=dict) - def test_update_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4316,7 +4304,6 @@ async def test_delete_view_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_view_async_from_dict(): await test_delete_view_async(request_type=dict) - def test_delete_view_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -4546,7 +4533,6 @@ async def test_list_sinks_async(transport: str = 'grpc_asyncio', request_type=lo async def test_list_sinks_async_from_dict(): await test_list_sinks_async(request_type=dict) - def test_list_sinks_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5080,7 +5066,6 @@ async def test_get_sink_async(transport: str = 'grpc_asyncio', request_type=logg async def test_get_sink_async_from_dict(): await test_get_sink_async(request_type=dict) - def test_get_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5418,7 +5403,6 @@ async def test_create_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_create_sink_async_from_dict(): await test_create_sink_async(request_type=dict) - def test_create_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -5766,7 +5750,6 @@ async def test_update_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_update_sink_async_from_dict(): await test_update_sink_async(request_type=dict) - def test_update_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6090,7 +6073,6 @@ async def test_delete_sink_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_sink_async_from_dict(): await test_delete_sink_async(request_type=dict) - def test_delete_sink_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6408,7 +6390,6 @@ async def test_create_link_async(transport: str = 'grpc_asyncio', request_type=l async def test_create_link_async_from_dict(): await test_create_link_async(request_type=dict) - def test_create_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -6746,7 +6727,6 @@ async def test_delete_link_async(transport: str = 'grpc_asyncio', request_type=l async def test_delete_link_async_from_dict(): await test_delete_link_async(request_type=dict) - def test_delete_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7060,7 +7040,6 @@ async def test_list_links_async(transport: str = 'grpc_asyncio', request_type=lo async def test_list_links_async_from_dict(): await test_list_links_async(request_type=dict) - def test_list_links_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7574,7 +7553,6 @@ async def test_get_link_async(transport: str = 'grpc_asyncio', request_type=logg async def test_get_link_async_from_dict(): await test_get_link_async(request_type=dict) - def test_get_link_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -7886,7 +7864,6 @@ async def test_list_exclusions_async(transport: str = 'grpc_asyncio', request_ty async def test_list_exclusions_async_from_dict(): await test_list_exclusions_async(request_type=dict) - def test_list_exclusions_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8404,7 +8381,6 @@ async def test_get_exclusion_async(transport: str = 'grpc_asyncio', request_type async def test_get_exclusion_async_from_dict(): await test_get_exclusion_async(request_type=dict) - def test_get_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -8726,7 +8702,6 @@ async def test_create_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_create_exclusion_async_from_dict(): await test_create_exclusion_async(request_type=dict) - def test_create_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9058,7 +9033,6 @@ async def test_update_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_update_exclusion_async_from_dict(): await test_update_exclusion_async(request_type=dict) - def test_update_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9382,7 +9356,6 @@ async def test_delete_exclusion_async(transport: str = 'grpc_asyncio', request_t async def test_delete_exclusion_async_from_dict(): await test_delete_exclusion_async(request_type=dict) - def test_delete_exclusion_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9704,7 +9677,6 @@ async def test_get_cmek_settings_async(transport: str = 'grpc_asyncio', request_ async def test_get_cmek_settings_async_from_dict(): await test_get_cmek_settings_async(request_type=dict) - def test_get_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -9944,7 +9916,6 @@ async def test_update_cmek_settings_async(transport: str = 'grpc_asyncio', reque async def test_update_cmek_settings_async_from_dict(): await test_update_cmek_settings_async(request_type=dict) - def test_update_cmek_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10188,7 +10159,6 @@ async def test_get_settings_async(transport: str = 'grpc_asyncio', request_type= async def test_get_settings_async_from_dict(): await test_get_settings_async(request_type=dict) - def test_get_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10514,7 +10484,6 @@ async def test_update_settings_async(transport: str = 'grpc_asyncio', request_ty async def test_update_settings_async_from_dict(): await test_update_settings_async(request_type=dict) - def test_update_settings_field_headers(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -10955,7 +10924,10 @@ def test_list_buckets_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + request_msg = logging_config.ListBucketsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -10974,7 +10946,10 @@ def test_get_bucket_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + request_msg = logging_config.GetBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -10993,7 +10968,10 @@ def test_create_bucket_async_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11012,7 +10990,10 @@ def test_update_bucket_async_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11031,7 +11012,10 @@ def test_create_bucket_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11050,7 +11034,10 @@ def test_update_bucket_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11069,7 +11056,10 @@ def test_delete_bucket_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + request_msg = logging_config.DeleteBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11088,7 +11078,10 @@ def test_undelete_bucket_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11107,7 +11100,10 @@ def test_list_views_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11126,7 +11122,10 @@ def test_get_view_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + request_msg = logging_config.GetViewRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11145,7 +11144,10 @@ def test_create_view_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + request_msg = logging_config.CreateViewRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11164,7 +11166,10 @@ def test_update_view_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11183,7 +11188,10 @@ def test_delete_view_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + request_msg = logging_config.DeleteViewRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11202,7 +11210,10 @@ def test_list_sinks_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + request_msg = logging_config.ListSinksRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11221,7 +11232,10 @@ def test_get_sink_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + request_msg = logging_config.GetSinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11240,7 +11254,10 @@ def test_create_sink_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + request_msg = logging_config.CreateSinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11259,7 +11276,10 @@ def test_update_sink_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + request_msg = logging_config.UpdateSinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11278,7 +11298,10 @@ def test_delete_sink_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + request_msg = logging_config.DeleteSinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11297,7 +11320,10 @@ def test_create_link_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() + request_msg = logging_config.CreateLinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11316,7 +11342,10 @@ def test_delete_link_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11335,7 +11364,10 @@ def test_list_links_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() + request_msg = logging_config.ListLinksRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11354,7 +11386,10 @@ def test_get_link_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() + request_msg = logging_config.GetLinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11373,7 +11408,10 @@ def test_list_exclusions_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + request_msg = logging_config.ListExclusionsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11392,7 +11430,10 @@ def test_get_exclusion_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + request_msg = logging_config.GetExclusionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11411,7 +11452,10 @@ def test_create_exclusion_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + request_msg = logging_config.CreateExclusionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11430,7 +11474,10 @@ def test_update_exclusion_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request_msg = logging_config.UpdateExclusionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11449,7 +11496,10 @@ def test_delete_exclusion_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + request_msg = logging_config.DeleteExclusionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11468,7 +11518,10 @@ def test_get_cmek_settings_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request_msg = logging_config.GetCmekSettingsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11487,7 +11540,10 @@ def test_update_cmek_settings_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request_msg = logging_config.UpdateCmekSettingsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11506,7 +11562,10 @@ def test_get_settings_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request_msg = logging_config.GetSettingsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11525,7 +11584,10 @@ def test_update_settings_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request_msg = logging_config.UpdateSettingsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11544,7 +11606,9 @@ def test_copy_log_entries_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request_msg = logging_config.CopyLogEntriesRequest() + + assert args[0] == request_msg def test_transport_kind_grpc_asyncio(): @@ -11575,7 +11639,10 @@ async def test_list_buckets_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListBucketsRequest() + request_msg = logging_config.ListBucketsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11604,7 +11671,10 @@ async def test_get_bucket_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetBucketRequest() + request_msg = logging_config.GetBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11627,7 +11697,10 @@ async def test_create_bucket_async_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11650,7 +11723,10 @@ async def test_update_bucket_async_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11679,7 +11755,10 @@ async def test_create_bucket_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateBucketRequest() + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11708,7 +11787,10 @@ async def test_update_bucket_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateBucketRequest() + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11729,7 +11811,10 @@ async def test_delete_bucket_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteBucketRequest() + request_msg = logging_config.DeleteBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11750,7 +11835,10 @@ async def test_undelete_bucket_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UndeleteBucketRequest() + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11773,7 +11861,10 @@ async def test_list_views_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListViewsRequest() + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11798,7 +11889,10 @@ async def test_get_view_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetViewRequest() + request_msg = logging_config.GetViewRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11823,7 +11917,10 @@ async def test_create_view_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateViewRequest() + request_msg = logging_config.CreateViewRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11848,7 +11945,10 @@ async def test_update_view_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateViewRequest() + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11869,7 +11969,10 @@ async def test_delete_view_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteViewRequest() + request_msg = logging_config.DeleteViewRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11892,7 +11995,10 @@ async def test_list_sinks_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListSinksRequest() + request_msg = logging_config.ListSinksRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11922,7 +12028,10 @@ async def test_get_sink_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSinkRequest() + request_msg = logging_config.GetSinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11952,7 +12061,10 @@ async def test_create_sink_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateSinkRequest() + request_msg = logging_config.CreateSinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -11982,7 +12094,10 @@ async def test_update_sink_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSinkRequest() + request_msg = logging_config.UpdateSinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12003,7 +12118,10 @@ async def test_delete_sink_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteSinkRequest() + request_msg = logging_config.DeleteSinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12026,7 +12144,10 @@ async def test_create_link_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateLinkRequest() + request_msg = logging_config.CreateLinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12049,7 +12170,10 @@ async def test_delete_link_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteLinkRequest() + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12072,7 +12196,10 @@ async def test_list_links_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListLinksRequest() + request_msg = logging_config.ListLinksRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12097,7 +12224,10 @@ async def test_get_link_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetLinkRequest() + request_msg = logging_config.GetLinkRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12120,7 +12250,10 @@ async def test_list_exclusions_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.ListExclusionsRequest() + request_msg = logging_config.ListExclusionsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12146,7 +12279,10 @@ async def test_get_exclusion_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetExclusionRequest() + request_msg = logging_config.GetExclusionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12172,7 +12308,10 @@ async def test_create_exclusion_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CreateExclusionRequest() + request_msg = logging_config.CreateExclusionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12198,7 +12337,10 @@ async def test_update_exclusion_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateExclusionRequest() + request_msg = logging_config.UpdateExclusionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12219,7 +12361,10 @@ async def test_delete_exclusion_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.DeleteExclusionRequest() + request_msg = logging_config.DeleteExclusionRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12245,7 +12390,10 @@ async def test_get_cmek_settings_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetCmekSettingsRequest() + request_msg = logging_config.GetCmekSettingsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12271,7 +12419,10 @@ async def test_update_cmek_settings_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateCmekSettingsRequest() + request_msg = logging_config.UpdateCmekSettingsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12298,7 +12449,10 @@ async def test_get_settings_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.GetSettingsRequest() + request_msg = logging_config.GetSettingsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12325,7 +12479,10 @@ async def test_update_settings_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.UpdateSettingsRequest() + request_msg = logging_config.UpdateSettingsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -12348,7 +12505,9 @@ async def test_copy_log_entries_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_config.CopyLogEntriesRequest() + request_msg = logging_config.CopyLogEntriesRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 72d12df2f5ad..dc5a94a1efa5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -920,7 +920,6 @@ async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=lo async def test_delete_log_async_from_dict(): await test_delete_log_async(request_type=dict) - def test_delete_log_field_headers(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2327,7 +2326,6 @@ async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=log async def test_list_logs_async_from_dict(): await test_list_logs_async(request_type=dict) - def test_list_logs_field_headers(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2914,7 +2912,10 @@ def test_delete_log_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2933,7 +2934,10 @@ def test_write_log_entries_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2952,7 +2956,10 @@ def test_list_log_entries_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2971,7 +2978,10 @@ def test_list_monitored_resource_descriptors_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2990,7 +3000,9 @@ def test_list_logs_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg def test_transport_kind_grpc_asyncio(): @@ -3019,7 +3031,10 @@ async def test_delete_log_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.DeleteLogRequest() + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3041,7 +3056,10 @@ async def test_write_log_entries_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.WriteLogEntriesRequest() + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3064,7 +3082,10 @@ async def test_list_log_entries_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogEntriesRequest() + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3087,7 +3108,10 @@ async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListMonitoredResourceDescriptorsRequest() + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3111,7 +3135,9 @@ async def test_list_logs_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging.ListLogsRequest() + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 7555ad73a79e..64dab1eecb85 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -926,7 +926,6 @@ async def test_list_log_metrics_async(transport: str = 'grpc_asyncio', request_t async def test_list_log_metrics_async_from_dict(): await test_list_log_metrics_async(request_type=dict) - def test_list_log_metrics_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1456,7 +1455,6 @@ async def test_get_log_metric_async(transport: str = 'grpc_asyncio', request_typ async def test_get_log_metric_async_from_dict(): await test_get_log_metric_async(request_type=dict) - def test_get_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -1790,7 +1788,6 @@ async def test_create_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_create_log_metric_async_from_dict(): await test_create_log_metric_async(request_type=dict) - def test_create_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2134,7 +2131,6 @@ async def test_update_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_update_log_metric_async_from_dict(): await test_update_log_metric_async(request_type=dict) - def test_update_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2448,7 +2444,6 @@ async def test_delete_log_metric_async(transport: str = 'grpc_asyncio', request_ async def test_delete_log_metric_async_from_dict(): await test_delete_log_metric_async(request_type=dict) - def test_delete_log_metric_field_headers(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), @@ -2704,7 +2699,10 @@ def test_list_log_metrics_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2723,7 +2721,10 @@ def test_get_log_metric_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2742,7 +2743,10 @@ def test_create_log_metric_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2761,7 +2765,10 @@ def test_update_log_metric_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2780,7 +2787,9 @@ def test_delete_log_metric_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg def test_transport_kind_grpc_asyncio(): @@ -2811,7 +2820,10 @@ async def test_list_log_metrics_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.ListLogMetricsRequest() + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2840,7 +2852,10 @@ async def test_get_log_metric_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.GetLogMetricRequest() + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2869,7 +2884,10 @@ async def test_create_log_metric_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.CreateLogMetricRequest() + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2898,7 +2916,10 @@ async def test_update_log_metric_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.UpdateLogMetricRequest() + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2919,7 +2940,9 @@ async def test_delete_log_metric_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == logging_metrics.DeleteLogMetricRequest() + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 2771c207391e..d06e6a56eb8e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -696,7 +696,7 @@ def sample_list_instances(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.list_instances] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -808,7 +808,7 @@ def sample_get_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -912,7 +912,7 @@ def sample_get_instance_auth_string(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.get_instance_auth_string] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1068,7 +1068,7 @@ def sample_create_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.create_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1213,7 +1213,7 @@ def sample_update_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.update_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1343,7 +1343,7 @@ def sample_upgrade_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.upgrade_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1483,7 +1483,7 @@ def sample_import_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.import_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1620,7 +1620,7 @@ def sample_export_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.export_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1751,7 +1751,7 @@ def sample_failover_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.failover_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -1877,7 +1877,7 @@ def sample_delete_instance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.delete_instance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( @@ -2018,7 +2018,7 @@ def sample_reschedule_maintenance(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.reschedule_maintenance] - # Certain fields should be provided within the metadata header; + # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata(( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index b73b73a73801..76a68fb5b3ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -954,7 +954,6 @@ async def test_list_instances_async(transport: str = 'grpc_asyncio', request_typ async def test_list_instances_async_from_dict(): await test_list_instances_async(request_type=dict) - def test_list_instances_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1564,7 +1563,6 @@ async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type= async def test_get_instance_async_from_dict(): await test_get_instance_async(request_type=dict) - def test_get_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -1874,7 +1872,6 @@ async def test_get_instance_auth_string_async(transport: str = 'grpc_asyncio', r async def test_get_instance_auth_string_async_from_dict(): await test_get_instance_auth_string_async(request_type=dict) - def test_get_instance_auth_string_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2192,7 +2189,6 @@ async def test_create_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_create_instance_async_from_dict(): await test_create_instance_async(request_type=dict) - def test_create_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2528,7 +2524,6 @@ async def test_update_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_update_instance_async_from_dict(): await test_update_instance_async(request_type=dict) - def test_update_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2858,7 +2853,6 @@ async def test_upgrade_instance_async(transport: str = 'grpc_asyncio', request_t async def test_upgrade_instance_async_from_dict(): await test_upgrade_instance_async(request_type=dict) - def test_upgrade_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3186,7 +3180,6 @@ async def test_import_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_import_instance_async_from_dict(): await test_import_instance_async(request_type=dict) - def test_import_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3514,7 +3507,6 @@ async def test_export_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_export_instance_async_from_dict(): await test_export_instance_async(request_type=dict) - def test_export_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3842,7 +3834,6 @@ async def test_failover_instance_async(transport: str = 'grpc_asyncio', request_ async def test_failover_instance_async_from_dict(): await test_failover_instance_async(request_type=dict) - def test_failover_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4170,7 +4161,6 @@ async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_ty async def test_delete_instance_async_from_dict(): await test_delete_instance_async(request_type=dict) - def test_delete_instance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4488,7 +4478,6 @@ async def test_reschedule_maintenance_async(transport: str = 'grpc_asyncio', req async def test_reschedule_maintenance_async_from_dict(): await test_reschedule_maintenance_async(request_type=dict) - def test_reschedule_maintenance_field_headers(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7938,7 +7927,10 @@ def test_list_instances_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ListInstancesRequest() + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -7957,7 +7949,10 @@ def test_get_instance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceRequest() + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -7976,7 +7971,10 @@ def test_get_instance_auth_string_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + request_msg = cloud_redis.GetInstanceAuthStringRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -7995,7 +7993,10 @@ def test_create_instance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.CreateInstanceRequest() + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8014,7 +8015,10 @@ def test_update_instance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest() + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8033,7 +8037,10 @@ def test_upgrade_instance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpgradeInstanceRequest() + request_msg = cloud_redis.UpgradeInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8052,7 +8059,10 @@ def test_import_instance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ImportInstanceRequest() + request_msg = cloud_redis.ImportInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8071,7 +8081,10 @@ def test_export_instance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ExportInstanceRequest() + request_msg = cloud_redis.ExportInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8090,7 +8103,10 @@ def test_failover_instance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.FailoverInstanceRequest() + request_msg = cloud_redis.FailoverInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8109,7 +8125,10 @@ def test_delete_instance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.DeleteInstanceRequest() + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8128,7 +8147,9 @@ def test_reschedule_maintenance_empty_call_grpc(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + request_msg = cloud_redis.RescheduleMaintenanceRequest() + + assert args[0] == request_msg def test_transport_kind_grpc_asyncio(): @@ -8160,7 +8181,10 @@ async def test_list_instances_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ListInstancesRequest() + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8209,7 +8233,10 @@ async def test_get_instance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceRequest() + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8232,7 +8259,10 @@ async def test_get_instance_auth_string_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.GetInstanceAuthStringRequest() + request_msg = cloud_redis.GetInstanceAuthStringRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8255,7 +8285,10 @@ async def test_create_instance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.CreateInstanceRequest() + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8278,7 +8311,10 @@ async def test_update_instance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpdateInstanceRequest() + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8301,7 +8337,10 @@ async def test_upgrade_instance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.UpgradeInstanceRequest() + request_msg = cloud_redis.UpgradeInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8324,7 +8363,10 @@ async def test_import_instance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ImportInstanceRequest() + request_msg = cloud_redis.ImportInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8347,7 +8389,10 @@ async def test_export_instance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.ExportInstanceRequest() + request_msg = cloud_redis.ExportInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8370,7 +8415,10 @@ async def test_failover_instance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.FailoverInstanceRequest() + request_msg = cloud_redis.FailoverInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8393,7 +8441,10 @@ async def test_delete_instance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.DeleteInstanceRequest() + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -8416,7 +8467,9 @@ async def test_reschedule_maintenance_empty_call_grpc_asyncio(): # Establish that the underlying gRPC stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == cloud_redis.RescheduleMaintenanceRequest() + request_msg = cloud_redis.RescheduleMaintenanceRequest() + + assert args[0] == request_msg def test_transport_kind_rest(): From f87d3c449f8216dbddafb9a06e4da0946b32c44e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 20 Sep 2024 17:19:20 +0200 Subject: [PATCH 1186/1339] chore(deps): update all dependencies (#2162) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/WORKSPACE | 6 +-- packages/gapic-generator/requirements.txt | 54 +++++++++++------------ 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 0828e207172d..d3c8d7faec00 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -73,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "13e7749c30bc24af6ee93e092422f9dc08491c7097efa69461f88eb5f61805ce", - strip_prefix = "protobuf-28.0", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.0.tar.gz"], + sha256 = "3b8bf6e96499a744bd014c60b58f797715a758093abf859f1d902194b8e1f8c9", + strip_prefix = "protobuf-28.1", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.1.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index ab28e808cc74..09ac6df230c1 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -181,9 +181,9 @@ grpcio==1.66.1 \ # via # googleapis-common-protos # grpc-google-iam-v1 -idna==3.8 \ - --hash=sha256:050b4e5baadcd44d760cedbd2b8e639f2ff89bbc7a5730fcc662954303377aac \ - --hash=sha256:d838c2c0ed6fced7693d5e8ab8e734d5f8fda53a039c0164afb0b82e771e3603 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via requests inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ @@ -306,41 +306,41 @@ proto-plus==1.24.0 \ # via # -r requirements.in # google-api-core -protobuf==5.28.0 \ - --hash=sha256:018db9056b9d75eb93d12a9d35120f97a84d9a919bcab11ed56ad2d399d6e8dd \ - --hash=sha256:510ed78cd0980f6d3218099e874714cdf0d8a95582e7b059b06cabad855ed0a0 \ - --hash=sha256:532627e8fdd825cf8767a2d2b94d77e874d5ddb0adefb04b237f7cc296748681 \ - --hash=sha256:6206afcb2d90181ae8722798dcb56dc76675ab67458ac24c0dd7d75d632ac9bd \ - --hash=sha256:66c3edeedb774a3508ae70d87b3a19786445fe9a068dd3585e0cefa8a77b83d0 \ - --hash=sha256:6d7cc9e60f976cf3e873acb9a40fed04afb5d224608ed5c1a105db4a3f09c5b6 \ - --hash=sha256:853db610214e77ee817ecf0514e0d1d052dff7f63a0c157aa6eabae98db8a8de \ - --hash=sha256:d001a73c8bc2bf5b5c1360d59dd7573744e163b3607fa92788b7f3d5fefbd9a5 \ - --hash=sha256:dde74af0fa774fa98892209992295adbfb91da3fa98c8f67a88afe8f5a349add \ - --hash=sha256:dde9fcaa24e7a9654f4baf2a55250b13a5ea701493d904c54069776b99a8216b \ - --hash=sha256:eef7a8a2f4318e2cb2dee8666d26e58eaf437c14788f3a2911d0c3da40405ae8 +protobuf==5.28.1 \ + --hash=sha256:0dfd86d2b5edf03d91ec2a7c15b4e950258150f14f9af5f51c17fa224ee1931f \ + --hash=sha256:1b04bde117a10ff9d906841a89ec326686c48ececeb65690f15b8cabe7149495 \ + --hash=sha256:42597e938f83bb7f3e4b35f03aa45208d49ae8d5bcb4bc10b9fc825e0ab5e423 \ + --hash=sha256:4304e4fceb823d91699e924a1fdf95cde0e066f3b1c28edb665bda762ecde10f \ + --hash=sha256:4b4b9a0562a35773ff47a3df823177ab71a1f5eb1ff56d8f842b7432ecfd7fd2 \ + --hash=sha256:4c7f5cb38c640919791c9f74ea80c5b82314c69a8409ea36f2599617d03989af \ + --hash=sha256:51f09caab818707ab91cf09cc5c156026599cf05a4520779ccbf53c1b352fb25 \ + --hash=sha256:c529535e5c0effcf417682563719e5d8ac8d2b93de07a56108b4c2d436d7a29a \ + --hash=sha256:cabfe43044ee319ad6832b2fda332646f9ef1636b0130186a3ae0a52fc264bb4 \ + --hash=sha256:f24e5d70e6af8ee9672ff605d5503491635f63d5db2fffb6472be78ba62efd8f \ + --hash=sha256:fc063acaf7a3d9ca13146fefb5b42ac94ab943ec6e978f543cd5637da2d57957 # via # -r requirements.in # google-api-core # googleapis-common-protos # grpc-google-iam-v1 # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c # via google-auth pypandoc==1.13 \ --hash=sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e \ --hash=sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681 # via -r requirements.in -pytest==8.3.2 \ - --hash=sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5 \ - --hash=sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce +pytest==8.3.3 \ + --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ + --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 # via pytest-asyncio pytest-asyncio==0.24.0 \ --hash=sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b \ @@ -425,7 +425,7 @@ typing-inspect==0.9.0 \ --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 # via libcst -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via requests From 3374a6882679c71ac82d9a220afff9411d43a28e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 20 Sep 2024 11:50:44 -0400 Subject: [PATCH 1187/1339] chore: update dependencies (#2185) --- packages/gapic-generator/requirements.txt | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 09ac6df230c1..766016e6acf4 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -112,14 +112,16 @@ exceptiongroup==1.2.2 \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc # via pytest -google-api-core==2.19.2 \ - --hash=sha256:53ec0258f2837dd53bbd3d3df50f5359281b3cc13f800c941dd15a9b5a415af4 \ - --hash=sha256:ca07de7e8aa1c98a8bfca9321890ad2340ef7f2eb136e558cee68f24b94b0a8f +google-api-core==2.20.0 \ + --hash=sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a \ + --hash=sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f # via -r requirements.in -google-auth==2.34.0 \ - --hash=sha256:72fd4733b80b6d777dcde515628a9eb4a577339437012874ea286bca7261ee65 \ - --hash=sha256:8eb87396435c19b20d32abd2f984e31c191a15284af72eb922f10e5bde9c04cc - # via google-api-core +google-auth==2.35.0 \ + --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ + --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a + # via + # -r requirements.in + # google-api-core googleapis-common-protos[grpc]==1.65.0 \ --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 From 3307b1a63d7f50886020293974c0498899859deb Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 24 Sep 2024 01:19:26 +0200 Subject: [PATCH 1188/1339] chore(deps): update all dependencies (#2190) Co-authored-by: Owl Bot --- packages/gapic-generator/WORKSPACE | 6 +++--- packages/gapic-generator/requirements.txt | 24 +++++++++++------------ 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index d3c8d7faec00..caf3401818d4 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -73,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "3b8bf6e96499a744bd014c60b58f797715a758093abf859f1d902194b8e1f8c9", - strip_prefix = "protobuf-28.1", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.1.tar.gz"], + sha256 = "b2340aa47faf7ef10a0328190319d3f3bee1b24f426d4ce8f4253b6f27ce16db", + strip_prefix = "protobuf-28.2", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.2.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 766016e6acf4..7bfcade326ce 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -308,18 +308,18 @@ proto-plus==1.24.0 \ # via # -r requirements.in # google-api-core -protobuf==5.28.1 \ - --hash=sha256:0dfd86d2b5edf03d91ec2a7c15b4e950258150f14f9af5f51c17fa224ee1931f \ - --hash=sha256:1b04bde117a10ff9d906841a89ec326686c48ececeb65690f15b8cabe7149495 \ - --hash=sha256:42597e938f83bb7f3e4b35f03aa45208d49ae8d5bcb4bc10b9fc825e0ab5e423 \ - --hash=sha256:4304e4fceb823d91699e924a1fdf95cde0e066f3b1c28edb665bda762ecde10f \ - --hash=sha256:4b4b9a0562a35773ff47a3df823177ab71a1f5eb1ff56d8f842b7432ecfd7fd2 \ - --hash=sha256:4c7f5cb38c640919791c9f74ea80c5b82314c69a8409ea36f2599617d03989af \ - --hash=sha256:51f09caab818707ab91cf09cc5c156026599cf05a4520779ccbf53c1b352fb25 \ - --hash=sha256:c529535e5c0effcf417682563719e5d8ac8d2b93de07a56108b4c2d436d7a29a \ - --hash=sha256:cabfe43044ee319ad6832b2fda332646f9ef1636b0130186a3ae0a52fc264bb4 \ - --hash=sha256:f24e5d70e6af8ee9672ff605d5503491635f63d5db2fffb6472be78ba62efd8f \ - --hash=sha256:fc063acaf7a3d9ca13146fefb5b42ac94ab943ec6e978f543cd5637da2d57957 +protobuf==5.28.2 \ + --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ + --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ + --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ + --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ + --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ + --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ + --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ + --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ + --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ + --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ + --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d # via # -r requirements.in # google-api-core From 4b2e8903627aa4ee402ee3083e15659da4ac8b17 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 8 Oct 2024 11:01:10 -0400 Subject: [PATCH 1189/1339] fix: add support for field with name 'self' (#2205) --- packages/gapic-generator/gapic/utils/reserved_names.py | 4 +++- .../tests/fragments/test_reserved_field_name.proto | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/utils/reserved_names.py b/packages/gapic-generator/gapic/utils/reserved_names.py index 0914cb02e3ce..6aac10742006 100644 --- a/packages/gapic-generator/gapic/utils/reserved_names.py +++ b/packages/gapic-generator/gapic/utils/reserved_names.py @@ -89,6 +89,8 @@ "exec", "help", # Comes from Protoplus - "ignore_unknown_fields" + "ignore_unknown_fields", + "self", # Refer to PEP8 https://peps.python.org/pep-0008/#function-and-method-arguments + "cls", # Refer PEP8 https://peps.python.org/pep-0008/#function-and-method-arguments ] ) diff --git a/packages/gapic-generator/tests/fragments/test_reserved_field_name.proto b/packages/gapic-generator/tests/fragments/test_reserved_field_name.proto index 9fee7912c952..f11b3416ce43 100644 --- a/packages/gapic-generator/tests/fragments/test_reserved_field_name.proto +++ b/packages/gapic-generator/tests/fragments/test_reserved_field_name.proto @@ -34,6 +34,10 @@ message MethodRequest { string any = 4; string license = 5; string type = 6; + // Refer to PEP8 https://peps.python.org/pep-0008/#function-and-method-arguments + string self = 7; + // Refer to PEP8 https://peps.python.org/pep-0008/#function-and-method-arguments + string cls = 8; } message MethodResponse { From a42469e9db44fb9e323b369c9a51da207182a7d9 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 8 Oct 2024 16:59:44 -0400 Subject: [PATCH 1190/1339] feat: add async rest transport support in gapics (#2164) Co-authored-by: Anthonios Partheniou --- .../.github/workflows/tests.yaml | 3 +- .../%sub/services/%service/_mixins.py.j2 | 60 +- .../%sub/services/%service/_shared_macros.j2 | 372 + .../%service/transports/_rest_mixins.py.j2 | 30 + .../transports/_rest_mixins_base.py.j2 | 55 + .../services/%service/transports/base.py.j2 | 9 +- .../services/%service/transports/rest.py.j2 | 215 +- .../%service/transports/rest_base.py.j2 | 186 + .../%namespace/%name/%version/__init__.py.j2 | 12 +- .../%name_%version/%sub/_test_mixins.py.j2 | 64 + .../services/%service/_async_mixins.py.j2 | 60 +- .../%sub/services/%service/_mixins.py.j2 | 60 +- .../%sub/services/%service/_shared_macros.j2 | 180 +- .../%sub/services/%service/async_client.py.j2 | 18 +- .../%sub/services/%service/client.py.j2 | 53 +- .../%service/transports/__init__.py.j2 | 21 +- .../%service/transports/_rest_mixins.py.j2 | 48 +- .../transports/_rest_mixins_base.py.j2 | 4 +- .../services/%service/transports/base.py.j2 | 9 +- .../%service/transports/grpc_asyncio.py.j2 | 2 +- .../services/%service/transports/rest.py.j2 | 80 +- .../%service/transports/rest_asyncio.py.j2 | 221 +- .../%service/transports/rest_base.py.j2 | 10 +- .../gapic/templates/noxfile.py.j2 | 1 + .../gapic/templates/setup.py.j2 | 12 + .../%name_%version/%sub/_test_mixins.py.j2 | 64 - .../%name_%version/%sub/test_%service.py.j2 | 83 +- .../gapic/%name_%version/%sub/test_macros.j2 | 1057 ++- packages/gapic-generator/noxfile.py | 90 +- .../services/asset_service/async_client.py | 6 +- .../asset_v1/services/asset_service/client.py | 6 +- .../services/asset_service/transports/base.py | 5 + .../asset_service/transports/grpc_asyncio.py | 5 + .../services/asset_service/transports/rest.py | 26 +- .../asset_service/transports/rest_base.py | 7 +- .../tests/integration/goldens/asset/setup.py | 3 + .../unit/gapic/asset_v1/test_asset_service.py | 5470 +++++++------ .../iam_credentials/transports/rest.py | 3 + .../iam_credentials/transports/rest_base.py | 5 +- .../integration/goldens/credentials/setup.py | 3 + .../credentials_v1/test_iam_credentials.py | 891 +-- .../services/eventarc/async_client.py | 54 +- .../eventarc_v1/services/eventarc/client.py | 54 +- .../services/eventarc/transports/base.py | 45 + .../eventarc/transports/grpc_asyncio.py | 45 + .../services/eventarc/transports/rest.py | 73 +- .../services/eventarc/transports/rest_base.py | 23 +- .../integration/goldens/eventarc/setup.py | 3 + .../unit/gapic/eventarc_v1/test_eventarc.py | 6772 +++++++++-------- .../config_service_v2/async_client.py | 18 +- .../services/config_service_v2/client.py | 18 +- .../config_service_v2/transports/base.py | 15 + .../transports/grpc_asyncio.py | 15 + .../logging_service_v2/async_client.py | 18 +- .../services/logging_service_v2/client.py | 18 +- .../logging_service_v2/transports/base.py | 15 + .../transports/grpc_asyncio.py | 15 + .../metrics_service_v2/async_client.py | 18 +- .../services/metrics_service_v2/client.py | 18 +- .../metrics_service_v2/transports/base.py | 15 + .../transports/grpc_asyncio.py | 15 + .../integration/goldens/logging/setup.py | 3 + .../logging_v2/test_config_service_v2.py | 66 +- .../logging_v2/test_logging_service_v2.py | 66 +- .../logging_v2/test_metrics_service_v2.py | 66 +- .../services/cloud_redis/async_client.py | 36 +- .../redis_v1/services/cloud_redis/client.py | 80 +- .../cloud_redis/transports/__init__.py | 15 +- .../services/cloud_redis/transports/base.py | 30 + .../cloud_redis/transports/grpc_asyncio.py | 30 + .../services/cloud_redis/transports/rest.py | 45 +- .../cloud_redis/transports/rest_asyncio.py | 2025 ++++- .../cloud_redis/transports/rest_base.py | 17 +- .../tests/integration/goldens/redis/setup.py | 7 + .../unit/gapic/redis_v1/test_cloud_redis.py | 5861 +++++++++----- .../gapic-generator/tests/system/conftest.py | 35 +- .../system/test_client_context_manager.py | 3 +- .../gapic-generator/tests/system/test_lro.py | 5 +- .../tests/system/test_mixins.py | 158 + .../tests/system/test_streams.py | 73 +- .../tests/system/test_unary.py | 10 +- .../tests/system/test_universe_domain.py | 4 +- 82 files changed, 15410 insertions(+), 10001 deletions(-) create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 create mode 100644 packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest_base.py.j2 create mode 100644 packages/gapic-generator/tests/system/test_mixins.py diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 4c00a2bb3237..5eaf75d2acb9 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -55,8 +55,9 @@ jobs: strategy: # Run showcase tests on the lowest and highest supported runtimes matrix: + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `showcase_w_rest_async` target when async rest is GA. python: ["3.7", "3.12"] - target: [showcase, showcase_alternative_templates] + target: [showcase, showcase_alternative_templates, showcase_w_rest_async] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 index f264ce88866f..6dd04dfb186e 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_mixins.py.j2 @@ -32,11 +32,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -86,11 +82,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -143,11 +135,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -195,11 +183,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -250,11 +234,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.wait_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.wait_operation] metadata = tuple(metadata) @@ -369,11 +349,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -489,11 +465,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -547,11 +519,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -604,11 +572,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -657,11 +621,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 index e7f623cfd214..b055b9ca3170 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 @@ -57,10 +57,53 @@ The `try/except` below can be removed once the minimum version of try: from google.api_core import version_header HAS_GOOGLE_API_CORE_VERSION_HEADER = True # pragma: NO COVER +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} except ImportError: # pragma: NO COVER HAS_GOOGLE_API_CORE_VERSION_HEADER = False {% endif %}{# service_version #} {% endmacro %} + +{% macro create_metadata(method) %} + {% if method.explicit_routing %} + header_params = {} + {% if not method.client_streaming %} + {% for routing_param in method.routing_rule.routing_parameters %} + {% if routing_param.path_template %} {# Need to match. #} + + routing_param_regex = {{ routing_param.to_regex() }} + regex_match = routing_param_regex.match(request.{{ routing_param.field }}) + if regex_match and regex_match.group("{{ routing_param.key }}"): + header_params["{{ routing_param.key }}"] = regex_match.group("{{ routing_param.key }}") + + {% else %} + + if request.{{ routing_param.field }}: + header_params["{{ routing_param.key }}"] = request.{{ routing_param.field }} + + {% endif %} + {% endfor %} {# method.routing_rule.routing_parameters #} + {% endif %} {# if not method.client_streaming #} + + if header_params: + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(header_params), + ) + + {% elif method.field_headers %}{# implicit routing #} + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + {% if not method.client_streaming %} + {% for field_header in method.field_headers %} + ("{{ field_header.raw }}", request.{{ field_header.disambiguated }}), + {% endfor %}{# for field_header in method.field_headers #} + {% endif %}{# not method.client_streaming #} + )), + ) + {% endif %}{# method.explicit_routing #} +{% endmacro %}{# create_metadata #} + {% macro add_api_version_header_to_metadata(service_version) %} {# Add API Version to metadata as per https://github.com/aip-dev/google.aip.dev/pull/1331. @@ -74,3 +117,332 @@ except ImportError: # pragma: NO COVER ) {% endif %}{# service_version #} {% endmacro %} + +{% macro operations_mixin_imports(api, service, opts) %} +{% if import_ns is not defined %} +{% set import_ns = namespace(has_operations_mixin=false) %} +{% endif %}{# import_ns is not defined #} +{% set import_ns.has_operations_mixin = api.has_operations_mixin %} + +{% filter sort_lines %} +{% for method in service.methods.values() %} +{{method.input.ident.python_import}} +{% if method.output.ident|string() == "operations_pb2.Operation" %} +{% set import_ns.has_operations_mixin = True %} +{% else %} +{{method.output.ident.python_import}} +{% endif %} +{% endfor %} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %}{# opts.add_iam_methods #} +{% endfilter %} +{% if import_ns.has_operations_mixin %} +from google.longrunning import operations_pb2 # type: ignore +{% endif %}{# import_ns.has_operations_mixin #} +{% endmacro %} + +{% macro http_options_method(rules) %} +@staticmethod +def _get_http_options(): + http_options: List[Dict[str, str]] = [ + {%- for rule in rules %}{ + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %}{# rule.body #} + }, + {% endfor %}{# rule in rules #} + ] + return http_options +{% endmacro %} + +{% macro response_method(body_spec, is_async=False) %} +{% set async_prefix = "async " if is_async else "" %} +{% set await_prefix = "await " if is_async else "" %} +@staticmethod +{{ async_prefix }}def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = {{ await_prefix }}getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + {% if body_spec %} + data=body, + {% endif %} + ) + return response +{% endmacro %} + +{# rest_call_method_common includes the common code for a rest __call__ method to be + re-used for sync REST, async REST, and mixins __call__ implementation. + + Args: + body_spec (str): The http options body i.e. method.http_options[0].body + method_name (str): The method name. + service_name (str): The service name. + is_async (bool): Used to determine the code path i.e. whether for sync or async call. #} +{% macro rest_call_method_common(body_spec, method_name, service_name, is_async=False) %} +{% set await_prefix = "await " if is_async else "" %} +{% set async_class_prefix = "Async" if is_async else "" %} + + http_options = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_http_options() + request, metadata = {{ await_prefix }}self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) + transcoded_request = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_transcoded_request(http_options, request) + + {% if body_spec %} + body = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_request_body_json(transcoded_request) + {% endif %}{# body_spec #} + + # Jsonify the query params + query_params = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_query_params_json(transcoded_request) + + # Send the request + response = {{ await_prefix }}{{ async_class_prefix }}{{ service_name }}RestTransport._{{method_name}}._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request{% if body_spec %}, body{% endif %}) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + {# Note: format_http_response_error takes in more parameters than from_http_response and the + latter only supports a response of type requests.Response. + TODO: Clean up the sync response error handling and use format_http_response_error. + See issue: https://github.com/googleapis/gapic-generator-python/issues/2116. #} + {% if is_async %} + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once version check is added for google-api-core. #} + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + {% else %} + raise core_exceptions.from_http_response(response) + {% endif %}{# is_async #} + +{% endmacro %} + + +{% macro prep_wrapped_messages_async_method(api, service) %} +def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + {% for method in service.methods.values() %} + self.{{ method.transport_safe_name|snake_case }}: self._wrap_method( + self.{{ method.transport_safe_name|snake_case }}, + {% if method.retry %} + default_retry=retries.AsyncRetry( + {% if method.retry.initial_backoff %} + initial={{ method.retry.initial_backoff }}, + {% endif %} + {% if method.retry.max_backoff %} + maximum={{ method.retry.max_backoff }}, + {% endif %} + {% if method.retry.backoff_multiplier %} + multiplier={{ method.retry.backoff_multiplier }}, + {% endif %} + predicate=retries.if_exception_type( + {% for ex in method.retry.retryable_exceptions|sort(attribute='__name__') %} + core_exceptions.{{ ex.__name__ }}, + {% endfor %} + ), + deadline={{ method.timeout }}, + ), + {% endif %} + default_timeout={{ method.timeout }}, + client_info=client_info, + ), + {% endfor %}{# service.methods.values() #} + {% for method_name in api.mixin_api_methods.keys() %} + self.{{ method_name|snake_case }}: self._wrap_method( + self.{{ method_name|snake_case }}, + default_timeout=None, + client_info=client_info, + ), + {% endfor %} {# method_name in api.mixin_api_methods.keys() #} + } +{% endmacro %} + +{# TODO: This helper logic to check whether `kind` needs to be configured in wrap_method +can be removed once we require the correct version of the google-api-core dependency to +avoid having a gRPC code path in an async REST call. +See related issue: https://github.com/googleapis/python-api-core/issues/661. +In the meantime, if an older version of the dependency is installed (which has a wrap_method with +no kind parameter), then an async gRPC call will work correctly and async REST transport +will not be available as a transport. +See related issue: https://github.com/googleapis/gapic-generator-python/issues/2119. #} +{% macro wrap_async_method_macro() %} +def _wrap_method(self, func, *args, **kwargs): + {# TODO: Remove `pragma: NO COVER` once https://github.com/googleapis/python-api-core/pull/688 is merged. #} + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) +{% endmacro %} + +{# `create_interceptor_class` generates an Interceptor class for + # synchronous and asynchronous rest transports +#} +{% macro create_interceptor_class(api, service, method, is_async=False) %} +{% set async_prefix = "async " if is_async else "" %} +{% set async_method_name_prefix = "Async" if is_async else "" %} +{% set async_docstring = "Asynchronous " if is_async else "" %} +{% set async_suffix = "_async" if is_async else "" %} + +class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: + """{{ async_docstring }}Interceptor for {{ service.name }}. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the {{ async_method_name_prefix }}{{ service.name }}RestTransport. + + .. code-block:: python + class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): + {% for _, method in service.methods|dictsort if not method.client_streaming %} + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + {% if not method.void %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response): + logging.log(f"Received response: {response}") + return response + {% endif %} + + {% endfor %} + transport = {{ async_method_name_prefix }}{{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) + client = {{ async_prefix }}{{ service.client_name }}(transport=transport) + + + """ + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2147): Remove the condition below once async rest transport supports the guarded methods. #} + {% if (not is_async) or (is_async and not method.lro and not method.extended_lro and not method.paged_result_field) %} + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {% if not method.void %} + {% if not method.server_streaming %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + {% else %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator) -> rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator: + {% endif %} + """Post-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + {% endif %} + {% endif %}{# if (not is_async) or (is_async and not method.lro and not method.extended_lro and not method.paged_result_field) #} + {% endfor %} + + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2148): Remove the condition below once mixins are supported for async rest transport. #} + {% if not is_async %} + {% for name, signature in api.mixin_api_signatures.items() %} + {{ async_prefix }}def pre_{{ name|snake_case }}( + self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[{{signature.request_type}}, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {{ async_prefix }}def post_{{ name|snake_case }}( + self, response: {{signature.response_type}} + ) -> {{signature.response_type}}: + """Post-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + {% endfor %} + {% endif %} +{% endmacro %} + +{% macro generate_mixin_call_method(service, api, name, sig, is_async) %} +{% set async_prefix = "async " if is_async else "" %} +{% set async_method_name_prefix = "Async" if is_async else "" %} +{% set async_suffix = "_async" if is_async else "" %} +{% set await_prefix = "await " if is_async else "" %} + +@property +def {{ name|snake_case }}(self): + return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore + +class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_method_name_prefix }}{{service.name}}RestStub): + def __hash__(self): + return hash("{{ async_method_name_prefix }}{{ service.name }}RestTransport.{{ name }}") + + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {{ response_method(body_spec) | indent(4) }} + + {{ async_prefix }}def __call__(self, + request: {{ sig.request_type }}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> {{ sig.response_type }}: + + r"""Call the {{- ' ' -}} + {{ (name|snake_case).replace('_',' ')|wrap(width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request ({{ sig.request_type }}): + The request object for {{ name }} method. + retry (google.api_core.retry{{ async_suffix }}.{{ async_method_name_prefix }}Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {% if sig.response_type != 'None' %} + + Returns: + {{ sig.response_type }}: Response from {{ name }} method. + {% endif %} + """ + {{ rest_call_method_common(body_spec, name, service.name, is_async)|indent(4) }} + + {% if sig.response_type == "None" %} + return {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(None) + {% else %} + {% if is_async %} + content = await response.read() + {% else %} + content = response.content.decode("utf-8") + {% endif %} + resp = {{ sig.response_type }}() + resp = json_format.Parse(content, resp) + resp = {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(resp) + return resp + {% endif %} + +{% endmacro %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2 new file mode 100644 index 000000000000..2603b19b4829 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -0,0 +1,30 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # + # This file is a copy of `_rest_mixins.py.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `_rest_mixins.py.j2` in standard templates. +#} + +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + + {% if "rest" in opts.transport %} + {% for name, sig in api.mixin_api_signatures.items() %} + {{ shared_macros.generate_mixin_call_method(service, api, name, sig, is_async=False) | indent(4) }} + {% endfor %} + {% endif %} {# rest in opts.transport #} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 new file mode 100644 index 000000000000..b88489d5f140 --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 @@ -0,0 +1,55 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # This file is a copy of `_rest_mixins_base.py.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `_rest_mixins_base.py.j2` in standard templates. +#} + +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + + {% if "rest" in opts.transport %} + + {% for name, sig in api.mixin_api_signatures.items() %} + class _Base{{ name }}: + + {{ shared_macros.http_options_method(api.mixin_http_options["{}".format(name)])|indent(8)}} + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {%- if body_spec %} + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + + {%- endif %} {# body_spec #} + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + {% endfor %} + {% endif %} {# rest in opts.transport #} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 index d4364e44f3e3..e9789d8a5da3 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/base.py.j2 @@ -150,7 +150,14 @@ class {{ service.name }}Transport(abc.ABC): default_timeout={{ method.timeout }}, client_info=client_info, ), - {% endfor %} {# precomputed wrappers loop #} + {% endfor %}{# method in service.methods.values() #} + {% for method_name in api.mixin_api_methods.keys() %} + self.{{ method_name|snake_case }}: gapic_v1.method.wrap_method( + self.{{ method_name|snake_case }}, + default_timeout=None, + client_info=client_info, + ), + {% endfor %} {# method_name in api.mixin_api_methods.keys() #} } def close(self): diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index 5a2d2f5b3480..a55ced7c086a 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -1,3 +1,12 @@ +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} +{# + # This file is a copy of `rest.py.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `rest.py.j2` in standard templates. +#} {% extends '_base.py.j2' %} {% block content %} @@ -5,47 +14,41 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format {% if service.has_lro %} from google.api_core import operations_v1 {% endif %} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings +{{ shared_macros.operations_mixin_imports(api, service, opts) }} + +from .rest_base import _Base{{ service.name }}RestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -{# TODO(yon-mg): re-add python_import/ python_modules from removed diff/current grpc template code #} -{% filter sort_lines %} -{% for method in service.methods.values() %} -{{method.input.ident.python_import}} -{{method.output.ident.python_import}} -{% endfor %} -{% if opts.add_iam_methods %} -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -{% endif %} -{% endfilter %} - -from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, @@ -53,63 +56,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( ) -class {{ service.name }}RestInterceptor: - """Interceptor for {{ service.name }}. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the {{ service.name }}RestTransport. - - .. code-block:: python - class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): - {% for _, method in service.methods|dictsort if not method.client_streaming %} - def pre_{{ method.name|snake_case }}(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - {% if not method.void %} - def post_{{ method.name|snake_case }}(self, response): - logging.log(f"Received response: {response}") - return response - {% endif %} - -{% endfor %} - transport = {{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) - client = {{ service.client_name }}(transport=transport) - - - """ - {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} - def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for {{ method.name|snake_case }} - - Override in a subclass to manipulate the request or metadata - before they are sent to the {{ service.name }} server. - """ - return request, metadata - - {% if not method.void %} - {% if not method.server_streaming %} - def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: - {% else %} - def post_{{ method.name|snake_case }}(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - {% endif %} - """Post-rpc interceptor for {{ method.name|snake_case }} - - Override in a subclass to manipulate the response - after it is returned by the {{ service.name }} server but before - it is returned to user code. - """ - return response - {% endif %} - - {% endfor %} +{{ shared_macros.create_interceptor_class(api, service, method, is_async=False) }} @dataclasses.dataclass @@ -119,8 +66,8 @@ class {{service.name}}RestStub: _interceptor: {{ service.name }}RestInterceptor -class {{service.name}}RestTransport({{service.name}}Transport): - """REST backend transport for {{ service.name }}. +class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): + """REST backend synchronous transport for {{ service.name }}. {{ service.meta.doc|rst(width=72, indent=4) }} @@ -129,15 +76,8 @@ class {{service.name}}RestTransport({{service.name}}Transport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - - {% if not opts.rest_numeric_enums %} - NOTE: This REST transport functionality is currently in a beta - state (preview). We welcome your feedback via an issue in this - library's source repository. Thank you! - {% endif %} """ - {# TODO(yon-mg): handle mtls stuff if that is relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, @@ -151,6 +91,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', interceptor: Optional[{{ service.name }}RestInterceptor] = None, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -162,7 +103,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): - {{ ' ' }}The hostname to connect to. + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -194,19 +135,13 @@ class {{service.name}}RestTransport({{service.name}}Transport): # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience ) self._session = AuthorizedSession( self._credentials, default_host=self.DEFAULT_HOST) @@ -263,24 +198,14 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %}{# service.has_lro #} {% for method in service.methods.values()|sort(attribute="name") %} - class _{{method.name}}({{service.name}}RestStub): + class _{{method.name}}(_Base{{ service.name }}RestTransport._Base{{method.name}}, {{service.name}}RestStub): def __hash__(self): - return hash("{{method.name}}") - + return hash("{{service.name}}RestTransport.{{method.name}}") {% if method.http_options and not method.client_streaming %} - {% if method.input.required_fields %} - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - {% for req_field in method.input.required_fields if req_field.name in method.query_params %} - "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} - {% endfor %} - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} - {% endif %}{# required fields #} - {% endif %}{# not method.client_streaming #} + {% set body_spec = method.http_options[0].body %} + {{ shared_macros.response_method(body_spec)|indent(8) }} + {% endif %}{# method.http_options and not method.client_streaming #} def __call__(self, request: {{method.input.ident}}, *, @@ -297,7 +222,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): Args: request (~.{{ method.input.ident }}): The request object.{{ ' ' }} - {{- method.input.meta.doc|rst(width=72, indent=16) }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -311,67 +236,7 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endif %} """ - http_options: List[Dict[str, str]] = [ - {%- for rule in method.http_options %}{ - 'method': '{{ rule.method }}', - 'uri': '{{ rule.uri }}', - {% if rule.body %} - 'body': '{{ rule.body }}', - {% endif %}{# rule.body #} - }, - {% endfor %}{# rule in method.http_options #} - ] - request, metadata = self._interceptor.pre_{{ method.name|snake_case }}(request, metadata) - {% if method.input.ident.is_proto_plus_type %} - pb_request = {{method.input.ident}}.pb(request) - {% else %} - pb_request = request - {% endif %} - transcoded_request = path_template.transcode(http_options, pb_request) - - {% set body_spec = method.http_options[0].body %} - {%- if body_spec %} - # Jsonify the request body - - body = json_format.MessageToJson( - transcoded_request['body'], - use_integers_for_enums={{ opts.rest_numeric_enums }} - ) - {%- endif %} - - uri = transcoded_request['uri'] - method = transcoded_request['method'] - - # Jsonify the query params - query_params = json.loads(json_format.MessageToJson( - transcoded_request['query_params'], - use_integers_for_enums={{ opts.rest_numeric_enums }}, - )) - {% if method.input.required_fields %} - query_params.update(self._get_unset_required_fields(query_params)) - {% endif %}{# required fields #} - - {% if opts.rest_numeric_enums %} - query_params["$alt"] = "json;enum-encoding=int" - {% endif %} - - # Send the request - headers = dict(metadata) - headers['Content-Type'] = 'application/json' - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - {% if body_spec %} - data=body, - {% endif %} - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) + {{ shared_macros.rest_call_method_common(body_spec, method.name, service.name)|indent(8) }} {% if not method.void %} # Return the response @@ -398,7 +263,6 @@ class {{service.name}}RestTransport({{service.name}}Transport): raise NotImplementedError( "Method {{ method.name }} is not available over REST transport" ) - {% endif %}{# method.http_options and not method.client_streaming #} {% endfor %} {% for method in service.methods.values()|sort(attribute="name") %} @@ -413,6 +277,13 @@ class {{service.name}}RestTransport({{service.name}}Transport): {% endfor %} + {% include '%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins.py.j2' %} + + {# + TODO(https://github.com/googleapis/gapic-generator-python/issues/2183): + Add `kind` property to transport in ads templates + #} + def close(self): self._session.close() diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest_base.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest_base.py.j2 new file mode 100644 index 000000000000..29b64f00087c --- /dev/null +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest_base.py.j2 @@ -0,0 +1,186 @@ +{# + # Copyright (C) 2024 Google LLC + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +#} +{# + # This file is a copy of `rest_base.py.j2` in standard templates located at + # `gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2` + # It is intended to be a symlink. + # See https://github.com/googleapis/gapic-generator-python/issues/2028 + # which contains follow up work to convert it to a symlink. + # Do not diverge from the copy of `rest_base.py.j2` in standard templates. +#} + +{% import "%namespace/%name/%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} +{% extends '_base.py.j2' %} + +{% block content %} + +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} +from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +{{ shared_macros.operations_mixin_imports(api, service, opts) }} + + +class _Base{{ service.name }}RestTransport({{service.name}}Transport): + """Base REST backend transport for {{ service.name }}. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + + {# TODO: handle mtls stuff if that is relevant for rest transport #} + def __init__(self, *, + host: str{% if service.host %} = '{{ service.host }}'{% endif %}, + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host ({% if service.host %}Optional[str]{% else %}str{% endif %}): + {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2173): Type hint for credentials is + # set to `Any` to support async and sync credential types in the parent rest transport classes. + # However, we should have a stronger type here such as an abstract base credentials + # class leveraged by sync and async credential classes. + #} + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + {% for method in service.methods.values()|sort(attribute="name") %} + class _Base{{method.name}}: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + {% if method.http_options and not method.client_streaming %} + {% if method.input.required_fields %} + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + {% for req_field in method.input.required_fields if req_field.name in method.query_params %} + "{{ req_field.name | camel_case }}" : {% if req_field.field_pb.type == 9 %}"{{req_field.field_pb.default_value }}"{% elif req_field.field_pb.type in [11, 14] %}{}{% else %}{{ req_field.type.python_type(req_field.field_pb.default_value or 0) }}{% endif %},{# default is str #} + {% endfor %} + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + {% endif %}{# required fields #} + + {% set method_http_options = method.http_options %} + + {{ shared_macros.http_options_method(method_http_options)|indent(8) }} + + @staticmethod + def _get_transcoded_request(http_options, request): + {% if method.input.ident.is_proto_plus_type %} + pb_request = {{method.input.ident}}.pb(request) + {% else %} + pb_request = request + {% endif %} + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + {% set body_spec = method.http_options[0].body %} + {%- if body_spec %} + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums={{ opts.rest_numeric_enums }} + ) + return body + + {%- endif %}{# body_spec #} + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums={{ opts.rest_numeric_enums }}, + )) + {% if method.input.required_fields %} + query_params.update(_Base{{ service.name }}RestTransport._Base{{method.name}}._get_unset_required_fields(query_params)) + {% endif %}{# required fields #} + + {% if opts.rest_numeric_enums %} + query_params["$alt"] = "json;enum-encoding=int" + {% endif %} + return query_params + + {% endif %}{# method.http_options and not method.client_streaming #} + {% endfor %} + + {% include '%namespace/%name/%version/%sub/services/%service/transports/_rest_mixins_base.py.j2' %} + + +__all__=( + '_Base{{ service.name }}RestTransport', +) +{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 index a7383b72e675..5a8d6d6bacbc 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/__init__.py.j2 @@ -56,15 +56,13 @@ def __dir__(): {% else %} {# do not use lazy import #} {# Import subpackages. -#} {% for subpackage in api.subpackages|dictsort %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} - {{ api.naming.versioned_module_name }} import {{ subpackage }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %}{{ api.naming.versioned_module_name }} import {{ subpackage }} {% endfor %} {# Import services for this package. -#} {% for service in api.services.values()|sort(attribute='name') if service.meta.address.subpackage == api.subpackage_view %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} - {{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %}{{ api.naming.versioned_module_name }}.services.{{ service.name|snake_case }}.client import {{ service.client_name }} {% endfor %} {# Import messages and enums from each proto. @@ -80,12 +78,10 @@ from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.' {% for proto in api.protos.values()|sort(attribute='module_name') if proto.meta.address.subpackage == api.subpackage_view %} {% for message in proto.messages.values()|sort(attribute='name') %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} - {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ message.name }} {% endfor %} {% for enum in proto.enums.values()|sort(attribute='name') %} -from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %} - {{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} +from {% if api.naming.module_namespace %}{{ api.naming.module_namespace|join('.') }}.{% endif %}{{ api.naming.versioned_module_name }}.types.{{ proto.module_name }} import {{ enum.name }} {% endfor %}{% endfor %} {# Define __all__. This requires the full set of imported names, so we iterate over diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 8175f2e0e5de..73b4a5c0a658 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -1,3 +1,67 @@ +{% if 'rest' in opts.transport %} +{% for name, sig in api.mixin_api_signatures.items() %} + +def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ sig.request_type }}): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({{ api.mixin_http_options["{}".format(name)][0].sample_request }}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.{{ name|snake_case }}(request) + +@pytest.mark.parametrize("request_type", [ + {{ sig.request_type }}, + dict, +]) +def test_{{ name|snake_case }}_rest(request_type): + client = {{ service.client_name }}( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if sig.response_type == "None" %} + return_value = None + {% else %} + return_value = {{ sig.response_type }}() + {% endif %} + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + {% if sig.response_type == "None" %} + json_return_value = '{}' + {% else %} + json_return_value = json_format.MessageToJson(return_value) + {% endif %} + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + + response = client.{{ name|snake_case }}(request) + + # Establish that the response is the type that we expect. + {% if sig.response_type == "None" %} + assert response is None + {% else %} + assert isinstance(response, {{ sig.response_type }}) + {% endif %} +{% endfor %} +{% endif %} + {% if api.has_operations_mixin and 'grpc' in opts.transport %} {% if "DeleteOperation" in api.mixin_api_methods %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 750bd734abcc..33ecb4d8e08c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -34,11 +34,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -91,11 +87,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -151,11 +143,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -206,11 +194,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -264,11 +248,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.wait_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.wait_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -391,11 +371,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -514,11 +490,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -575,11 +547,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -635,11 +603,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -691,11 +655,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index 3417068a9ff2..8a57c5944cb7 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -32,11 +32,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -89,11 +85,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -149,11 +141,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -204,11 +192,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -262,11 +246,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.wait_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.wait_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -389,11 +369,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -512,11 +488,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -573,11 +545,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -633,11 +601,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -689,11 +653,7 @@ # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index 8fec390957e2..c35c8975001e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -50,6 +50,7 @@ The `try/except` below can be removed once the minimum version of try: from google.api_core import version_header HAS_GOOGLE_API_CORE_VERSION_HEADER = True # pragma: NO COVER +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} except ImportError: # pragma: NO COVER HAS_GOOGLE_API_CORE_VERSION_HEADER = False {% endif %}{# service_version #} @@ -193,11 +194,7 @@ def _get_http_options(): {% set async_class_prefix = "Async" if is_async else "" %} http_options = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_http_options() - {% if not is_async %} - {# TODO (ohmayr): Make this unconditional once REST interceptors are supported for async. Googlers, - see internal tracking issue: b/362949568. #} - request, metadata = self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) - {% endif %} + request, metadata = {{ await_prefix }}self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) transcoded_request = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_transcoded_request(http_options, request) {% if body_spec %} @@ -222,7 +219,8 @@ def _get_http_options(): payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once version check is added for google-api-core. #} + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore {% else %} raise core_exceptions.from_http_response(response) {% endif %}{# is_async #} @@ -230,7 +228,7 @@ def _get_http_options(): {% endmacro %} -{% macro prep_wrapped_messages_async_method(service) %} +{% macro prep_wrapped_messages_async_method(api, service) %} def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -260,6 +258,16 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), {% endfor %}{# service.methods.values() #} + {% for method_name in api.mixin_api_methods.keys() %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2197): Use `transport_safe_name` similar + # to what we do for non-mixin methods above. + #} + self.{{ method_name|snake_case }}: self._wrap_method( + self.{{ method_name|snake_case }}, + default_timeout=None, + client_info=client_info, + ), + {% endfor %}{# method_name in api.mixin_api_methods.keys() #} } {% endmacro %} @@ -278,3 +286,161 @@ def _wrap_method(self, func, *args, **kwargs): kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) {% endmacro %} + +{# `create_interceptor_class` generates an Interceptor class for + # synchronous and asynchronous rest transports +#} +{% macro create_interceptor_class(api, service, method, is_async=False) %} +{% set async_prefix = "async " if is_async else "" %} +{% set async_method_name_prefix = "Async" if is_async else "" %} +{% set async_docstring = "Asynchronous " if is_async else "" %} +{% set async_suffix = "_async" if is_async else "" %} + +class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: + """{{ async_docstring }}Interceptor for {{ service.name }}. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the {{ async_method_name_prefix }}{{ service.name }}RestTransport. + + .. code-block:: python + class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): + {% for _, method in service.methods|dictsort if not method.client_streaming %} + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + {% if not method.void %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response): + logging.log(f"Received response: {response}") + return response + + {% endif %} + + {% endfor %} + transport = {{ async_method_name_prefix }}{{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) + client = {{ async_prefix }}{{ service.client_name }}(transport=transport) + + + """ + {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2147): Remove the condition below once async rest transport supports the guarded methods. #} + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {% if not method.void %} + {% if not method.server_streaming %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: + {% else %} + {{ async_prefix }}def post_{{ method.name|snake_case }}(self, response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator) -> rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator: + {% endif %} + """Post-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + + {% endif %} + {% endfor %} + + {% for name, signature in api.mixin_api_signatures.items() %} + {{ async_prefix }}def pre_{{ name|snake_case }}( + self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[{{signature.request_type}}, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the request or metadata + before they are sent to the {{ service.name }} server. + """ + return request, metadata + + {{ async_prefix }}def post_{{ name|snake_case }}( + self, response: {{signature.response_type}} + ) -> {{signature.response_type}}: + """Post-rpc interceptor for {{ name|snake_case }} + + Override in a subclass to manipulate the response + after it is returned by the {{ service.name }} server but before + it is returned to user code. + """ + return response + + {% endfor %} +{% endmacro %} + +{% macro generate_mixin_call_method(service, api, name, sig, is_async) %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2198): generate _Mixin classes + # and @property methods into separate macros so that _Method and _Mixin classes can be defined all + # together and the @property methods for each can be defined after the class definitions. +#} +{% set async_prefix = "async " if is_async else "" %} +{% set async_method_name_prefix = "Async" if is_async else "" %} +{% set async_suffix = "_async" if is_async else "" %} +{% set await_prefix = "await " if is_async else "" %} + +@property +def {{ name|snake_case }}(self): + return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore + +class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_method_name_prefix }}{{service.name}}RestStub): + def __hash__(self): + return hash("{{ async_method_name_prefix }}{{ service.name }}RestTransport.{{ name }}") + + {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} + {{ response_method(body_spec) | indent(4) }} + + {{ async_prefix }}def __call__(self, + request: {{ sig.request_type }}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> {{ sig.response_type }}: + + r"""Call the {{- ' ' -}} + {{ (name|snake_case).replace('_',' ')|wrap(width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request ({{ sig.request_type }}): + The request object for {{ name }} method. + retry (google.api_core.retry{{ async_suffix }}.{{ async_method_name_prefix }}Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {% if sig.response_type != 'None' %} + + Returns: + {{ sig.response_type }}: Response from {{ name }} method. + {% endif %} + """ + {{ rest_call_method_common(body_spec, name, service.name, is_async)|indent(4) }} + + {% if sig.response_type == "None" %} + return {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(None) + {% else %} + {% if is_async %} + content = await response.read() + {% else %} + content = response.content.decode("utf-8") + {% endif %} + resp = {{ sig.response_type }}() + resp = json_format.Parse(content, resp) + resp = {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(resp) + return resp + {% endif %} + +{% endmacro %} \ No newline at end of file diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 11171569bdf3..1abddd198eb9 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -513,11 +513,7 @@ class {{ service.async_client_name }}: # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -632,11 +628,7 @@ class {{ service.async_client_name }}: # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -689,11 +681,7 @@ class {{ service.async_client_name }}: # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 168955c8f650..74803f51f30b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -66,7 +66,14 @@ from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} from .transports.rest import {{ service.name }}RestTransport {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if rest_async_io_enabled %} -from .transports.rest_asyncio import Async{{ service.name }}RestTransport +try: + from .transports.rest_asyncio import Async{{ service.name }}RestTransport + HAS_ASYNC_REST_DEPENDENCIES = True +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} +except ImportError as e: # pragma: NO COVER + HAS_ASYNC_REST_DEPENDENCIES = False + ASYNC_REST_EXCEPTION = e + {% endif %}{# if rest_async_io_enabled #} {% endif %} @@ -87,7 +94,8 @@ class {{ service.client_name }}Meta(type): _transport_registry["rest"] = {{ service.name }}RestTransport {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if rest_async_io_enabled %} - _transport_registry["rest_asyncio"] = Async{{ service.name }}RestTransport + if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + _transport_registry["rest_asyncio"] = Async{{ service.name }}RestTransport {% endif %}{# if rest_async_io_enabled #} {% endif %} @@ -104,6 +112,12 @@ class {{ service.client_name }}Meta(type): The transport class to use. """ # If a specific transport is requested, return that one. + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} + {% if rest_async_io_enabled %} + {# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} + if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + raise ASYNC_REST_EXCEPTION + {% endif %} if label: return cls._transport_registry[label] @@ -552,16 +566,51 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): self._use_mtls_endpoint)) if not transport_provided: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} + {% if rest_async_io_enabled %} + transport_init: Union[Type[{{ service.name }}Transport], Callable[..., {{ service.name }}Transport]] = ( + {{ service.client_name }}.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., {{ service.name }}Transport], transport) + ) + + if "rest_asyncio" in str(transport_init): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2136): Support the following parameters in async rest: #} + unsupported_params = { + "google.api_core.client_options.ClientOptions.credentials_file": self._client_options.credentials_file, + "google.api_core.client_options.ClientOptions.scopes": self._client_options.scopes, + "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, + "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, + "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, + + } + provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] + if provided_unsupported_params: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once we add a version check for google-api-core. #} + raise core_exceptions.AsyncRestUnsupportedParameterError( # type: ignore + f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" + ) + self._transport = transport_init( + credentials=credentials, + host=self._api_endpoint, + client_info=client_info, + ) + return + + {% endif %} import google.auth._default # type: ignore if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): credentials = google.auth._default.get_api_key_credentials(api_key_value) + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} + {% if not rest_async_io_enabled %} transport_init: Union[Type[{{ service.name }}Transport], Callable[..., {{ service.name }}Transport]] = ( {{ service.client_name }}.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., {{ service.name }}Transport], transport) ) + {% endif %} # initialize with the provided callable or the passed in class self._transport = transport_init( credentials=credentials, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 index 66be2e5c29a7..9745b08d789c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 @@ -1,9 +1,11 @@ +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} +{% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} {% extends '_base.py.j2' %} {% block content %} from collections import OrderedDict -from typing import Dict, Type +from typing import Dict, Type{% if rest_async_io_enabled %}, Tuple{% endif +%} from .base import {{ service.name }}Transport {% if 'grpc' in opts.transport %} @@ -13,6 +15,17 @@ from .grpc_asyncio import {{ service.name }}GrpcAsyncIOTransport {% if 'rest' in opts.transport %} from .rest import {{ service.name }}RestTransport from .rest import {{ service.name }}RestInterceptor +{% if rest_async_io_enabled %} +ASYNC_REST_CLASSES: Tuple[str, ...] +try: + from .rest_asyncio import Async{{ service.name }}RestTransport + from .rest_asyncio import Async{{ service.name }}RestInterceptor + ASYNC_REST_CLASSES = ('Async{{ service.name }}RestTransport', 'Async{{ service.name }}RestInterceptor') + HAS_REST_ASYNC = True +except ImportError: # pragma: NO COVER + ASYNC_REST_CLASSES = () + HAS_REST_ASYNC = False +{% endif %}{# if rest_async_io_enabled #} {% endif %} @@ -25,6 +38,10 @@ _transport_registry['grpc_asyncio'] = {{ service.name }}GrpcAsyncIOTransport {% endif %} {% if 'rest' in opts.transport %} _transport_registry['rest'] = {{ service.name }}RestTransport +{% if rest_async_io_enabled %} +if HAS_REST_ASYNC: # pragma: NO COVER + _transport_registry['rest_asyncio'] = Async{{ service.name }}RestTransport +{% endif %}{# if rest_async_io_enabled #} {% endif %} __all__ = ( @@ -37,5 +54,5 @@ __all__ = ( '{{ service.name }}RestTransport', '{{ service.name }}RestInterceptor', {% endif %} -) +){% if 'rest' in opts.transport and rest_async_io_enabled%} + ASYNC_REST_CLASSES{%endif%} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 index f01102a18f50..691b98f0ef0d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins.py.j2 @@ -17,53 +17,7 @@ {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} {% if "rest" in opts.transport %} - {% for name, sig in api.mixin_api_signatures.items() %} - @property - def {{ name|snake_case }}(self): - return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore - - class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{service.name}}RestStub): - {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} - {{ shared_macros.response_method(body_spec)|indent(8) }} - - def __call__(self, - request: {{ sig.request_type }}, *, - retry: OptionalRetry=gapic_v1.method.DEFAULT, - timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), - ) -> {{ sig.response_type }}: - - r"""Call the {{- ' ' -}} - {{ (name|snake_case).replace('_',' ')|wrap( - width=70, offset=45, indent=8) }} - {{- ' ' -}} method over HTTP. - - Args: - request ({{ sig.request_type }}): - The request object for {{ name }} method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - {% if sig.response_type != 'None' %} - - Returns: - {{ sig.response_type }}: Response from {{ name }} method. - {% endif %} - """ - {{ shared_macros.rest_call_method_common(body_spec, name, service.name)|indent(8) }} - - {% if sig.response_type == "None" %} - return self._interceptor.post_{{ name|snake_case }}(None) - {% else %} - - resp = {{ sig.response_type }}() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_{{ name|snake_case }}(resp) - return resp - {% endif %} - + {{ shared_macros.generate_mixin_call_method(service, api, name, sig, is_async=False) | indent(4) }} {% endfor %} {% endif %} {# rest in opts.transport #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 index 2fd6d8bafe63..16cc77ea937c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_rest_mixins_base.py.j2 @@ -20,7 +20,9 @@ {% for name, sig in api.mixin_api_signatures.items() %} class _Base{{ name }}: - + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + {{ shared_macros.http_options_method(api.mixin_http_options["{}".format(name)])|indent(8)}} @staticmethod diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index fc4b16f6c8ec..e75d03a761da 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -171,7 +171,14 @@ class {{ service.name }}Transport(abc.ABC): default_timeout={{ method.timeout }}, client_info=client_info, ), - {% endfor %} {# precomputed wrappers loop #} + {% endfor %}{# method in service.methods.values() #} + {% for method_name in api.mixin_api_methods.keys() %} + self.{{ method_name|snake_case }}: gapic_v1.method.wrap_method( + self.{{ method_name|snake_case }}, + default_timeout=None, + client_info=client_info, + ), + {% endfor %} {# method_name in api.mixin_api_methods.keys() #} } def close(self): diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 8f26b936b1b6..0bb3126e5f11 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -388,7 +388,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): return self._stubs["test_iam_permissions"] {% endif %} - {{ shared_macros.prep_wrapped_messages_async_method(service)|indent(4) }} + {{ shared_macros.prep_wrapped_messages_async_method(api, service)|indent(4) }} {{ shared_macros.wrap_async_method_macro()|indent(4) }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index d7864e4b1291..796aa9932570 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -48,85 +48,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( ) -class {{ service.name }}RestInterceptor: - """Interceptor for {{ service.name }}. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the {{ service.name }}RestTransport. - - .. code-block:: python - class MyCustom{{ service.name }}Interceptor({{ service.name }}RestInterceptor): - {% for _, method in service.methods|dictsort if not method.client_streaming %} - def pre_{{ method.name|snake_case }}(self, request, metadata): - logging.log(f"Received request: {request}") - return request, metadata - - {% if not method.void %} - def post_{{ method.name|snake_case }}(self, response): - logging.log(f"Received response: {response}") - return response - {% endif %} - -{% endfor %} - transport = {{ service.name }}RestTransport(interceptor=MyCustom{{ service.name }}Interceptor()) - client = {{ service.client_name }}(transport=transport) - - - """ - {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} - def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for {{ method.name|snake_case }} - - Override in a subclass to manipulate the request or metadata - before they are sent to the {{ service.name }} server. - """ - return request, metadata - - {% if not method.void %} - {% if not method.server_streaming %} - def post_{{ method.name|snake_case }}(self, response: {{method.output.ident}}) -> {{method.output.ident}}: - {% else %} - def post_{{ method.name|snake_case }}(self, response: rest_streaming.ResponseIterator) -> rest_streaming.ResponseIterator: - {% endif %} - """Post-rpc interceptor for {{ method.name|snake_case }} - - Override in a subclass to manipulate the response - after it is returned by the {{ service.name }} server but before - it is returned to user code. - """ - return response - {% endif %} - {% endfor %} - - {% for name, signature in api.mixin_api_signatures.items() %} - def pre_{{ name|snake_case }}( - self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[{{signature.request_type}}, Sequence[Tuple[str, str]]]: - """Pre-rpc interceptor for {{ name|snake_case }} - - Override in a subclass to manipulate the request or metadata - before they are sent to the {{ service.name }} server. - """ - return request, metadata - - def post_{{ name|snake_case }}( - self, response: {{signature.response_type}} - ) -> {{signature.response_type}}: - """Post-rpc interceptor for {{ name|snake_case }} - - Override in a subclass to manipulate the response - after it is returned by the {{ service.name }} server but before - it is returned to user code. - """ - return response - {% endfor %} +{{ shared_macros.create_interceptor_class(api, service, method, is_async=False) }} @dataclasses.dataclass diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index ab87c32c76a7..3c15b8e88622 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -1,24 +1,82 @@ +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} {% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} {% extends '_base.py.j2' %} {% block content %} + +import google.auth +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2200): Add coverage for ImportError. #} +except ImportError as e: # pragma: NO COVER + raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install {{ api.naming.warehouse_package_name }}[async_rest]`") from e + +from google.auth.aio import credentials as ga_credentials_async # type: ignore + +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 +{% if service.has_lro %} +from google.api_core import operations_v1 +{% endif %}{# service.has_lro #} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} +from google.api_core import retry_async as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming_async # type: ignore + + +from google.protobuf import json_format +{% if service.has_lro %} +from google.api_core import operations_v1 +{% endif %} +{% if opts.add_iam_methods or api.has_iam_mixin %} +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +{% endif %} +{% if api.has_location_mixin %} +from google.cloud.location import locations_pb2 # type: ignore +{% endif %} -from typing import Any, Optional +import json # type: ignore +import dataclasses +from typing import Any, Dict, List, Callable, Tuple, Optional, Sequence, Union + +{{ shared_macros.operations_mixin_imports(api, service, opts) }} from .rest_base import _Base{{ service.name }}RestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -{# TODO (https://github.com/googleapis/gapic-generator-python/issues/2128): Update `rest_version` to include the transport dependency version. #} +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=None, + rest_version=google.auth.__version__ ) +{{ shared_macros.create_interceptor_class(api, service, method, is_async=True) }} + +@dataclasses.dataclass +class Async{{service.name}}RestStub: + _session: AsyncAuthorizedSession + _host: str + _interceptor: Async{{service.name}}RestInterceptor + class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): """Asynchronous REST backend transport for {{ service.name }}. @@ -30,12 +88,13 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, + def __init__(self, + *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - {# TODO (https://github.com/googleapis/gapic-generator-python/issues/2129): Update the default type for credentials. #} - credentials: Optional[Any] = None, + credentials: Optional[ga_credentials_async.Credentials] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, url_scheme: str = 'https', + interceptor: Optional[Async{{ service.name }}RestInterceptor] = None, ) -> None: """Instantiate the transport. @@ -48,8 +107,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. - {# TODO (https://github.com/googleapis/gapic-generator-python/issues/2129): Update the default type for credentials. #} - credentials (Optional[Any]): The + credentials (Optional[google.auth.aio.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -72,9 +130,156 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): url_scheme=url_scheme, api_audience=None ) + {# Note: Type for creds is ignored because of incorrect type hint for creds in the client layer. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2177): Remove `# type: ignore` once + # we update the type hints for credentials to include asynchronous credentials in the client layer. + #} + self._session = AsyncAuthorizedSession(self._credentials) # type: ignore + self._interceptor = interceptor or Async{{ service.name }}RestInterceptor() + self._wrap_with_kind = True + self._prep_wrapped_messages(client_info) + {% if service.has_lro %} + self._operations_client: Optional[operations_v1.OperationsClient] = None + {% endif %} + + + {{ shared_macros.prep_wrapped_messages_async_method(api, service)|indent(4) }} + + {{ shared_macros.wrap_async_method_macro()|indent(4) }} + + {% for method in service.methods.values()|sort(attribute="name") %} + class _{{method.name}}(_Base{{ service.name }}RestTransport._Base{{method.name}}, Async{{service.name}}RestStub): + def __hash__(self): + return hash("Async{{service.name}}RestTransport.{{method.name}}") + + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Implement client streaming method. #} + {% if method.http_options and not method.client_streaming %} + {% set body_spec = method.http_options[0].body %} + {{ shared_macros.response_method(body_spec, is_async=True)|indent(8) }} + + {% endif %}{# method.http_options and not method.client_streaming and not method.paged_result_field #} + async def __call__(self, + request: {{method.input.ident}}, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Update return type for client streaming method. #} + ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming_async.AsyncResponseIterator{% endif %}{% endif %}: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Implement client streaming method. #} + {% if method.http_options and not method.client_streaming %} + r"""Call the {{- ' ' -}} + {{ (method.name|snake_case).replace('_',' ')|wrap( + width=70, offset=45, indent=8) }} + {{- ' ' -}} method over HTTP. + + Args: + request (~.{{ method.input.ident }}): + The request object.{{ ' ' }} + {{- method.input.meta.doc|rst(width=72, indent=16, nl=False) }} + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + {% if not method.void %} + + Returns: + ~.{{ method.output.ident }}: + {{ method.output.meta.doc|rst(width=72, indent=16) }} + {% endif %} + """ + + {{ shared_macros.rest_call_method_common(body_spec, method.name, service.name, is_async=True)|indent(8) }} + + {% if not method.void %} + # Return the response + {% if method.server_streaming %} + resp = rest_streaming_async.AsyncResponseIterator(response, {{method.output.ident}}) + {% else %} + resp = {{method.output.ident}}() + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2189): Investigate if the proto-plus conversion below is needed for a streaming response. #} + {% if method.output.ident.is_proto_plus_type %} + pb_resp = {{method.output.ident}}.pb(resp) + {% else %} + pb_resp = resp + {% endif %}{# if method.output.ident.is_proto_plus_type #} + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + {% endif %}{# if method.server_streaming #} + resp = await self._interceptor.post_{{ method.name|snake_case }}(resp) + return resp + + {% endif %}{# method.void #} + + {% else %} + raise NotImplementedError( + "Method {{ method.name }} is not available over REST transport" + ) + {% endif %}{# method.http_options and not method.client_streaming #} + + {% endfor %} + + {% if service.has_lro %} + + @property + def operations_client(self) -> AsyncOperationsRestClient: + """Create the async client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + {% for selector, rules in api.http_options.items() %} + {% if selector.startswith('google.longrunning.Operations') %} + '{{ selector }}': [ + {% for rule in rules %} + { + 'method': '{{ rule.method }}', + 'uri': '{{ rule.uri }}', + {% if rule.body %} + 'body': '{{ rule.body }}', + {% endif %}{# rule.body #} + }, + {% endfor %}{# rules #} + ], + {% endif %}{# selector.startswith Operations #} + {% endfor %}{# http_options #} + } + + rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + http_options=http_options, + path_prefix="{{ service.client_package_version }}" + ) + + self._operations_client = AsyncOperationsRestClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + {% endif %} + + {% for method in service.methods.values()|sort(attribute="name") %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2154): Remove `type: ignore`. #} + @property + def {{method.transport_safe_name|snake_case}}(self) -> Callable[ + [{{method.input.ident}}], + {{method.output.ident}}]: + return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore + + {% endfor %} + {% for name, sig in api.mixin_api_signatures.items() %} + {{ shared_macros.generate_mixin_call_method(service, api, name, sig, is_async=True) | indent(4) }} + {% endfor %} @property def kind(self) -> str: return "rest_asyncio" + async def close(self): + await self._session.close() + {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 index 9e4f26fcb30c..b79785afc517 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 @@ -31,7 +31,6 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore {% endif %} from .base import {{service.name}}Transport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -56,7 +55,7 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): {# TODO: handle mtls stuff if that is relevant for rest transport #} def __init__(self, *, host: str{% if service.host %} = '{{ service.host }}'{% endif %}, - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -66,7 +65,12 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): Args: host ({% if service.host %}Optional[str]{% else %}str{% endif %}): {{ ' ' }}The hostname to connect to {% if service.host %}(default: '{{ service.host }}'){% endif %}. - credentials (Optional[google.auth.credentials.Credentials]): The + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2173): Type hint for credentials is + # set to `Any` to support async and sync credential types in the parent rest transport classes. + # However, we should have a stronger type here such as an abstract base credentials + # class leveraged by sync and async credential classes. + #} + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 452c7061353d..5a6042e981b8 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -74,6 +74,7 @@ def unit(session, protobuf_implementation): "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2201) Add a `unit_rest_async` nox session to run tests with [async_rest] extra installed. #} @nox.session(python=ALL_PYTHON[-1]) @nox.parametrize( diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 48427184f958..28b217dd7613 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -1,3 +1,5 @@ +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} +{% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} {% extends '_base.py.j2' %} {% from '_pypi_packages.j2' import pypi_packages %} {% block content %} @@ -46,6 +48,15 @@ dependencies = [ {% endif %} {% endfor %} ] +extras = { +{% if rest_async_io_enabled %} + "async_rest": [ + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2208): Update the minimum supported version of api-core to `2.21.0` when released. #} + "google-api-core[grpc] >= 2.21.0rc0, < 3.0.0dev", + "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" + ], +{% endif %} +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/{{ api.naming.warehouse_package_name }}" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -88,6 +99,7 @@ setuptools.setup( packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 33cbd8117d51..169807a9617c 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -1,67 +1,3 @@ -{% if 'rest' in opts.transport %} -{% for name, sig in api.mixin_api_signatures.items() %} - -def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request_type={{ sig.request_type }}): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({{ api.mixin_http_options["{}".format(name)][0].sample_request }}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.{{ name|snake_case }}(request) - -@pytest.mark.parametrize("request_type", [ - {{ sig.request_type }}, - dict, -]) -def test_{{ name|snake_case }}_rest(request_type): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - {% if sig.response_type == "None" %} - return_value = None - {% else %} - return_value = {{ sig.response_type }}() - {% endif %} - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - {% if sig.response_type == "None" %} - json_return_value = '{}' - {% else %} - json_return_value = json_format.MessageToJson(return_value) - {% endif %} - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.{{ name|snake_case }}(request) - - # Establish that the response is the type that we expect. - {% if sig.response_type == "None" %} - assert response is None - {% else %} - assert isinstance(response, {{ sig.response_type }}) - {% endif %} -{% endfor %} -{% endif %} - {% if api.has_operations_mixin and ('grpc' in opts.transport or 'grpc_asyncio' in opts.transport) %} {% if "DeleteOperation" in api.mixin_api_methods %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index fa9545d2a1a4..0a3b1d21f910 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -20,7 +20,7 @@ except ImportError: # pragma: NO COVER import grpc from grpc.experimental import aio {% if "rest" in opts.transport %} -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json {% endif %} @@ -30,6 +30,16 @@ from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers {% if 'rest' in opts.transport %} +{% if rest_async_io_enabled %} +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + from google.api_core.operations_v1 import AsyncOperationsRestClient + HAS_ASYNC_REST_EXTRA = True +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} +except ImportError: # pragma: NO COVER + HAS_ASYNC_REST_EXTRA = False +{% endif %}{# if rest_async_io_enabled #} from requests import Response from requests import Request, PreparedRequest from requests.sessions import Session @@ -39,6 +49,7 @@ from google.protobuf import json_format try: from google.auth.aio import credentials as ga_credentials_async HAS_GOOGLE_AUTH_AIO = True +{# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False @@ -91,6 +102,11 @@ from google.iam.v1 import policy_pb2 # type: ignore {% endfilter %} {{ shared_macros.add_google_api_core_version_header_import(service.version) }} +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -1053,12 +1069,18 @@ def test_transport_adc(transport_class): {% endfor %} {% for conf in configs %} {{ test_macros.transport_kind_test(**conf) }} - +{{ test_macros.run_transport_tests_for_config(**conf) }} {{ test_macros.empty_call_test(**conf) }} {{ test_macros.routing_parameter_test(**conf) }} +{% if service.has_lro %} +{{ test_macros.lro_client_test(**conf) }} +{% endif %}{# service.has_lro #} {% endfor %} - +{% if rest_async_io_enabled %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the below macro when async rest is GA and supports all required parameters. #} +{{ test_macros.async_rest_unsupported_params_test(service) }} +{% endif %} {% if 'grpc' in opts.transport %} def test_transport_grpc_default(): # A client should use the gRPC transport by default. @@ -1362,26 +1384,6 @@ def test_{{ service.name|snake_case }}_http_transport_client_cert_source_for_mtl ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - -{% if service.has_lro -%} -def test_{{ service.name|snake_case }}_rest_lro_client(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client -{%- endif %} - - {% endif %} {# rest #} @pytest.mark.parametrize("transport_name", [ @@ -1672,40 +1674,11 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -{% if 'grpc' in opts.transport %} -@pytest.mark.asyncio -async def test_transport_close_async(): - client = {{ service.async_client_name }}( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() -{% endif %} - {% include 'tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2' %} -def test_transport_close(): - transports = { - {% if 'rest' in opts.transport %} - "rest": "_session", - {% endif %} - {% if 'grpc' in opts.transport %} - "grpc": "_grpc_channel", - {% endif %} - } - - for transport, close_name in transports.items(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() +{% for conf in configs %} +{{ test_macros.transport_close_test(**conf) }} +{% endfor %} def test_client_ctx(): transports = [ diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index f3c20ea5dba1..ccd963ffbda1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1004,204 +1004,6 @@ def test_{{ method_name }}_raw_page_lro(): {% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} {% if not method.client_streaming %} -@pytest.mark.parametrize("request_type", [ - {{ method.input.ident }}, - dict, -]) -def test_{{ method_name }}_rest(request_type): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {{ method.http_options[0].sample_request(method) }} - {% for field in method.body_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = {{ method.input.ident }}.meta.fields["{{ field.name }}"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["{{ field.name }}"][field])): - del request_init["{{ field.name }}"][field][i][subfield] - else: - del request_init["{{ field.name }}"][field][subfield] - {% endif %} - {% endfor %} - request = request_type(**request_init) - {% if method.client_streaming %} - requests = [request] - {% endif %} - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - {% if method.void %} - return_value = None - {% elif method.lro %} - return_value = operations_pb2.Operation(name='operations/spam') - {% elif method.extended_lro %} - return_value = {{ method.extended_lro.operation_type.ident }}( - {% for field in method.extended_lro.operation_type.fields.values() | rejectattr('message')%} - {% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.output.oneof_fields().values() %} - {% if (oneof_fields | rejectattr('message') | list) %} - {% with field = (oneof_fields | rejectattr('message') | first) %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} - {% endif %} - {% endfor %} - ) - {% else %} - return_value = {{ method.output.ident }}( - {% for field in method.output.fields.values() | rejectattr('message')%} - {% if not field.oneof or field.proto3_optional %} - {{ field.name }}={{ field.mock_value }}, - {% endif %}{% endfor %} - {# This is a hack to only pick one field #} - {% for oneof_fields in method.output.oneof_fields().values() %} - {% if (oneof_fields | rejectattr('message') | list) %} - {% with field = (oneof_fields | rejectattr('message') | first) %} - {{ field.name }}={{ field.mock_value }}, - {% endwith %} - {% endif %} - {% endfor %} - ) - {% endif %} - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - {% if method.void %} - json_return_value = '' - {% elif method.lro %} - json_return_value = json_format.MessageToJson(return_value) - {% else %} - {% if method.output.ident.is_proto_plus_type %} - # Convert return value to protobuf type - return_value = {{ method.output.ident }}.pb(return_value) - {% endif %} - json_return_value = json_format.MessageToJson(return_value) - {% endif %} - - {% if method.server_streaming %} - json_return_value = "[{}]".format(json_return_value) - {% endif %} - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - {% if method.client_streaming %} - response = client.{{ method_name }}(iter(requests)) - {% elif method.server_streaming %} - with mock.patch.object(response_value, 'iter_content') as iter_content: - iter_content.return_value = iter(json_return_value) - response = client.{{ method_name }}(request) - {% else %} - response = client.{{ method_name }}(request) - {% endif %} - - {% if "next_page_token" in method_output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} - {# Cheeser assertion to force code coverage for bad paginated methods #} - assert response.raw_page is response - - {% endif %} - - {% if method.server_streaming %} - assert isinstance(response, Iterable) - response = next(response) - {% endif %} - - # Establish that the response is the type that we expect. - {% if method.void %} - assert response is None - {% elif method.lro %} - assert response.operation.name == "operations/spam" - {% elif method.extended_lro and not full_extended_lro %} - assert isinstance(response, {{ method.extended_lro.operation_type.ident }}) - {% else %} - assert isinstance(response, {{ method.client_output.ident }}) - {% for field in method_output.fields.values() | rejectattr('message') %} - {% if not field.oneof or field.proto3_optional %} - {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} - {% if field.repeated %} - for index in range(len(response.{{ field.name }})): - assert math.isclose( - response.{{ field.name }}[index], - {{ field.mock_value }}[index], - rel_tol=1e-6, - ) - {% else %}{# field.repeated #} - assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) - {% endif %}{# field.repeated #} - {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} - assert response.{{ field.name }} is {{ field.mock_value }} - {% else %} - assert response.{{ field.name }} == {{ field.mock_value }} - {% endif %} - {% endif %}{# end oneof/optional #} - {% endfor %} - {% endif %} - def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1410,141 +1212,49 @@ def test_{{ method_name }}_rest_unset_required_fields(): {% endif %}{# required_fields #} -{% if not method.client_streaming %} -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_{{ method_name }}_rest_interceptors(null_interceptor): - transport = transports.{{ service.name }}RestTransport( +{% if method.flattened_fields and not method.client_streaming %} +def test_{{ method_name }}_rest_flattened(): + client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.{{ service.name}}RestInterceptor(), - ) - client = {{ service.client_name }}(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - {% if method.lro %} - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - {% endif %} - {% if not method.void %} - mock.patch.object(transports.{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ - {% endif %} - mock.patch.object(transports.{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: - pre.assert_not_called() - {% if not method.void %} - post.assert_not_called() - {% endif %} - {% if method.input.ident.is_proto_plus_type %} - pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') {% else %} - pb_message = {{ method.input.ident }}() + return_value = {{ method.output.ident }}() {% endif %} - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - {% if not method.void %} - req.return_value._content = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + # get arguments that satisfy an http rule for this method + sample_request = {{ method.http_options[0].sample_request(method) }} - {% if method.server_streaming %} - req.return_value._content = "[{}]".format(req.return_value._content) - {% endif %} + # get truthy value for each flattened field + mock_args = dict( + {% for field in method.flattened_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + {{ field.name }}={{ field.mock_value }}, + {% endif %} + {% endfor %} + ) + mock_args.update(sample_request) - {% endif %} - - request = {{ method.input.ident }}() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - {% if not method.void %} - post.return_value = {{ method.output.ident }}() - {% endif %} - - client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - {% if not method.void %} - post.assert_called_once() - {% endif %} - -{% endif %}{# streaming #} - - -def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_type={{ method.input.ident }}): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {{ method.http_options[0].sample_request(method) }} - request = request_type(**request_init) - {% if method.client_streaming %} - requests = [request] - {% endif %} - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - {% if method.client_streaming %} - client.{{ method_name }}(iter(requests)) - {% else %} - client.{{ method_name }}(request) - {% endif %} - - -{% if method.flattened_fields and not method.client_streaming %} -def test_{{ method_name }}_rest_flattened(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - {% if method.void %} - return_value = None - {% elif method.lro %} - return_value = operations_pb2.Operation(name='operations/spam') - {% else %} - return_value = {{ method.output.ident }}() - {% endif %} - - # get arguments that satisfy an http rule for this method - sample_request = {{ method.http_options[0].sample_request(method) }} - - # get truthy value for each flattened field - mock_args = dict( - {% for field in method.flattened_fields.values() %} - {% if not field.oneof or field.proto3_optional %} - {# ignore oneof fields that might conflict with sample_request #} - {{ field.name }}={{ field.mock_value }}, - {% endif %} - {% endfor %} - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - {% if method.void %} - json_return_value = '' - {% elif method.lro %} - json_return_value = json_format.MessageToJson(return_value) - {% else %} - {% if method.output.ident.is_proto_plus_type %} - # Convert return value to protobuf type - return_value = {{ method.output.ident }}.pb(return_value) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} + {% if method.output.ident.is_proto_plus_type %} + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) {% endif %} json_return_value = json_format.MessageToJson(return_value) {% endif %} @@ -1709,25 +1419,7 @@ def test_{{ method_name }}_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -{%- else %}{# paged_result_field #} - -def test_{{ method_name }}_rest_error(): - client = {{ service.client_name }}( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - {%- if not method.http_options %} - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.{{ method_name }}({}) - assert ( - "Method {{ method.name }} is not available over REST transport" - in str(not_implemented_error.value) - ) - - {%- endif %}{# not method.http_options #} -{% endif %}{# flattened_fields #} +{%- endif %}{# paged_result_field #} {% else %}{# this is an lro or streaming method #} def test_{{ method_name }}_rest_unimplemented(): @@ -1862,12 +1554,593 @@ def test_{{ method_name }}_rest_no_http_options(): {% macro transport_kind_test(service, api, transport, is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} def test_transport_kind_{{ transport_name }}(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} transport = {{ get_client(service, is_async) }}.get_transport_class("{{ transport_name }}")( credentials={{get_credentials(is_async)}} ) assert transport.kind == "{{ transport_name }}" + {% endmacro %}{# transport_kind_test #} +{% macro lro_client_test(service, api, transport, is_async) %} +{% if 'rest' in transport %} +{% set transport_name = get_transport_name(transport, is_async) %} +def test_{{ service.name|snake_case }}_{{ transport_name }}_lro_client(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service=service, is_async=is_async) }}( + credentials={{ get_credentials(is_async=is_async) }}, + transport="{{ transport_name }}", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + {% if is_async %}operations_v1.AsyncOperationsRestClient{% else %}operations_v1.AbstractOperationsClient{% endif %}, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client +{% endif %}{# 'rest' in transport #} +{% endmacro %}{# lro_client_test #} + + +{% macro transport_close_test(service, api, transport, is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set close_session = { + 'rest': "_session", + 'grpc': "_grpc_channel"} +-%} +{{async_decorator}} +{{async_prefix}}def test_transport_close_{{transport_name}}(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + with mock.patch.object(type(getattr(client.transport, "{{close_session[transport]}}")), "close") as close: + {{async_prefix}}with client: + close.assert_not_called() + close.assert_called_once() + +{% endmacro %} + +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove / Update this test macro when async rest is GA. #} +{% macro async_rest_unsupported_params_test(service) %} +def test_unsupported_parameter_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + options = client_options.ClientOptions(quota_project_id="octopus") + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once we add a version check for google-api-core. #} + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + client = {{ get_client(service, True) }}( + credentials={{get_credentials(True)}}, + transport="rest_asyncio", + client_options=options + ) + +{% endmacro %} + +{# get_await_prefix sets an "await" keyword + # to a method call if is_async=True. +#} +{% macro get_await_prefix(is_async) %} +{{- "await " if is_async else "" -}} +{% endmacro %} + +{# get_async_prefix sets an "async" keyword + # to a method definition if is_async=True. +#} +{% macro get_async_prefix(is_async) %} +{{- "async " if is_async else "" -}} +{% endmacro %} + +{# get_async_decorator sets a "@pytest.mark.asyncio" decorator + # to an async test method if is_async=True. +#} +{% macro get_async_decorator(is_async) %} +{{- "@pytest.mark.asyncio " if is_async else "" -}} +{% endmacro %} + +{# is_rest_unsupported_method renders: + # 'True' if transport is REST (sync or async) and method is a client_streaming method. + # 'False' otherwise. + # NOTE: There are no plans to add support for client streaming. +#} +{% macro is_rest_unsupported_method(method, is_async) %} +{%- if method.client_streaming -%} +{{'True'}} +{%- else -%} +{{'False'}} +{%- endif -%} +{% endmacro %} + +{# run_transport_tests_for_config generates all the rest specific tests for both +# sync and async transport. +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2142): Continue migrating the test cases +# in macro::run_transport_tests_for_config into here, and then delete that macro in favor of this one. +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2153): As a follow up, migrate gRPC test cases +# into `run_transport_tests_for_config` and make any of the rest specific specific macros which are called within more generic. +#} +{% macro run_transport_tests_for_config(service, api, transport, is_async) %} +{% for method in service.methods.values() %} +{% if is_rest_unsupported_method(method, is_async) == 'True' or not method.http_options %} +{{ rest_method_not_implemented_error(service, method, transport, is_async) }} +{% else %} +{% if 'rest' in transport %} +{{ bad_request_test(service, method, transport, is_async) }} +{{ call_success_test(service, method, transport, is_async) }} +{{ inteceptor_class_test(service, method, transport, is_async) }} +{% endif %}{# if 'rest' in transport #} +{% endif %}{# is_rest_unsupported_method(method, is_async) == 'False' and method.http_options #} +{% endfor %}{# for method in service.methods.values() #} +{% for name, sig in api.mixin_api_signatures.items() %} +{% if 'rest' in transport %} +{{ bad_request_mixins_test(service, api, name, sig, transport, is_async) }} +{{ call_success_mixins_test(service, api, name, sig, transport, is_async) }} +{% endif %}{# if 'rest' in transport #} +{% endfor %} +{{ initialize_client_with_transport_test(service, transport, is_async) }} +{% endmacro %} + +{# rest_method_not_implemented_error generates tests for methods + # which are not supported for rest transport. +#} +{% macro rest_method_not_implemented_error(service, method, transport, is_async) %} +{% if 'rest' in transport %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.name|snake_case %} +{{async_decorator}} +{{async_prefix}}def test_{{ method_name }}_{{transport_name}}_error(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + {{await_prefix}}client.{{ method_name }}({}) + assert ( + "Method {{ method.name }} is not available over REST transport" + in str(not_implemented_error.value) + ) + +{% endif %}{# if 'rest' in transport #} +{% endmacro %} + +{# initialize_client_with_transport_test adds coverage for transport clients. + # Note: This test case is needed because we aren't unconditionally + # generating the not implemented coverage test for every client. +#} +{% macro initialize_client_with_transport_test(service, transport, is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +def test_initialize_client_w_{{transport_name}}(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + assert client is not None + +{% endmacro %} + +{# bad_request_test generates tests for rest methods + # which raise a google.api.core.exceptions.BadRequest error. +#} +{% macro bad_request_test(service, method, transport, is_async) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.name|snake_case %} +{% set mocked_session = "AsyncAuthorizedSession" if is_async else "Session" %} +{{ async_decorator }} +{{ async_prefix }}def test_{{ method_name }}_{{transport_name}}_bad_request(request_type={{ method.input.ident }}): +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2157): Refactor this macro to include `gRPC` coverage. #} +{% if 'grpc' in transport %} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object({{mocked_session}}, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + {% if is_async %} + response_value.read = mock.AsyncMock(return_value=b'{}') + {% else %} + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + {% endif %}{# if is_async #} + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + {{ await_prefix }}client.{{ method_name }}(request) + +{% endif %}{# if 'grpc' in transport #} +{% endmacro %} + +{# bad_request_mixins_test generates tests for rest mixin methods + # which raise a google.api.core.exceptions.BadRequest error. +#} +{% macro bad_request_mixins_test(service, api, name, sig, transport, is_async=False) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = name|snake_case %} +{% set mocked_session = "AsyncAuthorizedSession" if is_async else "Session" %} +{{ async_decorator }} +{{ async_prefix }}def test_{{ method_name }}_{{ transport_name }}_bad_request(request_type={{ sig.request_type }}): +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2157): Refactor this macro to include `gRPC` coverage. #} +{% if 'grpc' in transport %} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}", + ) + request = request_type() + request = json_format.ParseDict({{ api.mixin_http_options["{}".format(name)][0].sample_request }}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object({{mocked_session}}, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + {% if is_async %} + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + {% else %} + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + {% endif %}{# if is_async #} + response_value.status_code = 400 + {% if is_async %} + response_value.request = mock.Mock() + {% else %} + response_value.request = Request() + {% endif %} + req.return_value = response_value + {{ await_prefix }}client.{{ method_name }}(request) +{% endif %}{# if 'grpc' in transport #} +{% endmacro %} + +{# call_success_test generates tests for rest methods + # when they make a successful request. + # NOTE: Currently, this macro does not support the following method + # types: [method.paged_result_field]. + # As support is added for the above methods, the relevant guard can be removed from within the macro + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2142): Clean up `rest_required_tests` as we add support for each of the method types metioned above. +#} +{% macro call_success_test(service, method, transport, is_async) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.name|snake_case %} +{# NOTE: set method_output to method.extended_lro.operation_type for the following method types: + # (method.extended_lro and not full_extended_lro) +#} +{% set method_output = method.output %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2143): Update the guard below as we add support for each method, and keep it in sync with the guard in + # `rest_required_tests`, which should be the exact opposite. Remove it once we have all the methods supported in async rest transport that are supported in sync rest transport. + #} +{{ async_decorator }} +@pytest.mark.parametrize("request_type", [ + {{ method.input.ident }}, + dict, +]) +{{async_prefix}}def test_{{method_name}}_{{transport_name}}_call_success(request_type): +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2157): Refactor this macro to include `gRPC` coverage. #} +{% if 'grpc' in transport %} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}" + ) + + # send a request that will satisfy transcoding + request_init = {{ method.http_options[0].sample_request(method) }} + {% for field in method.body_fields.values() %} + {% if not field.oneof or field.proto3_optional %} + {# ignore oneof fields that might conflict with sample_request #} + request_init["{{ field.name }}"] = {{ field.merged_mock_value(method.http_options[0].sample_request(method).get(field.name)) }} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = {{ method.input.ident }}.meta.fields["{{ field.name }}"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["{{ field.name }}"][field])): + del request_init["{{ field.name }}"][field][i][subfield] + else: + del request_init["{{ field.name }}"][field][subfield] + {% endif %} + {% endfor %} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + {% if method.void %} + return_value = None + {% elif method.lro %} + return_value = operations_pb2.Operation(name='operations/spam') + {% elif method.extended_lro %} + return_value = {{ method.extended_lro.operation_type.ident }}( + {% for field in method.extended_lro.operation_type.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endif %} + {% endfor %} + ) + {% else %} + return_value = {{ method.output.ident }}( + {% for field in method.output.fields.values() | rejectattr('message')%} + {% if not field.oneof or field.proto3_optional %} + {{ field.name }}={{ field.mock_value }}, + {% endif %}{% endfor %} + {# This is a hack to only pick one field #} + {% for oneof_fields in method.output.oneof_fields().values() %} + {% if (oneof_fields | rejectattr('message') | list) %} + {% with field = (oneof_fields | rejectattr('message') | first) %} + {{ field.name }}={{ field.mock_value }}, + {% endwith %} + {% endif %} + {% endfor %} + ) + {% endif %}{# method.void #} + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + {% if method.void %} + json_return_value = '' + {% else %}{# method.void #} + {% if method.output.ident.is_proto_plus_type %} + + # Convert return value to protobuf type + return_value = {{ method.output.ident }}.pb(return_value) + {% endif %}{# method.output.ident.is_proto_plus_type #} + json_return_value = json_format.MessageToJson(return_value) + {% endif %}{# method.void #} + {% if method.server_streaming %} + json_return_value = "[{}]".format(json_return_value) + {% if is_async %} + response_value.content.return_value = mock_async_gen(json_return_value) + {% else %}{# not is_async #} + response_value.iter_content = mock.Mock(return_value=iter(json_return_value)) + {% endif %}{# is_async #} + {% else %}{# not method.streaming #} + {% if is_async %} + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + {% else %}{# not is_async #} + response_value.content = json_return_value.encode('UTF-8') + {% endif %}{# is_async #} + {% endif %}{# method.server_streaming #} + req.return_value = response_value + response = {{ await_prefix }}client.{{ method_name }}(request) + {% if "next_page_token" in method_output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2199): The following assert statement is added to force + # code coverage for bad paginated methods. Investigate what bad paginated methods are, and follow up on why this assertion is required. + #} + + assert response.raw_page is response + {% endif %} + + {% if method.server_streaming %} + {% if is_async %} + assert isinstance(response, AsyncIterable) + response = await response.__anext__() + {% else %} + assert isinstance(response, Iterable) + response = next(response) + {% endif %} + {% endif %} + + # Establish that the response is the type that we expect. + {% if method.void %} + assert response is None + {% elif method.lro %} + json_return_value = json_format.MessageToJson(return_value) + {% else %} + {% if is_async %} + assert isinstance(response, {{ method.client_output_async.ident }}) + {% else %} + assert isinstance(response, {{ method.client_output.ident }}) + {% endif %} + {% for field in method_output.fields.values() | rejectattr('message') %} + {% if not field.oneof or field.proto3_optional %} + {% if field.field_pb.type in [1, 2] %}{# Use approx eq for floats #} + {% if field.repeated %} + for index in range(len(response.{{ field.name }})): + assert math.isclose( + response.{{ field.name }}[index], + {{ field.mock_value }}[index], + rel_tol=1e-6, + ) + {% else %}{# field.repeated #} + assert math.isclose(response.{{ field.name }}, {{ field.mock_value }}, rel_tol=1e-6) + {% endif %}{# field.repeated #} + {% elif field.field_pb.type == 8 %}{# Use 'is' for bools #} + assert response.{{ field.name }} is {{ field.mock_value }} + {% else %} + assert response.{{ field.name }} == {{ field.mock_value }} + {% endif %}{# field.field_pb.type in [1, 2] #} + {% endif %}{# not field.oneof or field.proto3_optional #} + {% endfor %}{# field in method_output.fields.values() | rejectattr('message') #} + {% endif %}{# method.void #} + +{% endif %}{# if 'grpc' in transport #} +{% endmacro %}{# call_success_test #} + +{# call_success_mixins_test generates tests for rest mixin methods + # when they make a successful request. +#} +{% macro call_success_mixins_test(service, api, name, sig, transport, is_async=False) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = name|snake_case %} +{% set mocked_session = "AsyncAuthorizedSession" if is_async else "Session" %} +{{ async_decorator }} +@pytest.mark.parametrize("request_type", [ + {{ sig.request_type }}, + dict, +]) +{{ async_prefix }}def test_{{ method_name }}_{{ transport_name }}(request_type): +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2157): Refactor this macro to include `gRPC` coverage. #} +{% if 'grpc' in transport %} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + client = {{ get_client(service, is_async) }}( + credentials={{get_credentials(is_async)}}, + transport="{{transport_name}}", + ) + + request_init = {{ api.mixin_http_options["{}".format(name)][0].sample_request }} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object({{mocked_session}}, 'request') as req: + # Designate an appropriate value for the returned response. + {% if sig.response_type == "None" %} + return_value = None + {% else %} + return_value = {{ sig.response_type }}() + {% endif %} + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + {% if sig.response_type == "None" %} + json_return_value = '{}' + {% else %} + json_return_value = json_format.MessageToJson(return_value) + {% endif %} + {% if is_async %} + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + {% else %} + response_value.content = json_return_value.encode('UTF-8') + {% endif %} + + req.return_value = response_value + + response = {{ await_prefix }}client.{{ method_name }}(request) + + # Establish that the response is the type that we expect. + {% if sig.response_type == "None" %} + assert response is None + {% else %} + assert isinstance(response, {{ sig.response_type }}) + {% endif %} +{% endif %}{# if 'grpc' in transport #} +{% endmacro %}{# call_success_mixins_test #} + {% macro empty_call_test(service, api, transport, is_async) %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2159): Currently this macro only supports gRPC. It should be updated to support REST @@ -1905,3 +2178,95 @@ def test_transport_kind_{{ transport_name }}(): {% endfor %}{# method in service.methods.values() #} {% endif %} {% endmacro %}{# routing_parameter_test #} + +{# inteceptor_class_test generates tests for rest interceptors. #} +{% macro inteceptor_class_test(service, method, transport, is_async) %} +{% set await_prefix = get_await_prefix(is_async) %} +{% set async_prefix = get_async_prefix(is_async) %} +{% set async_decorator = get_async_decorator(is_async) %} +{% set transport_name = get_transport_name(transport, is_async) %} +{% set method_name = method.name|snake_case %} +{% set async_method_prefix = "Async" if is_async else "" %} +{{async_decorator}} +@pytest.mark.parametrize("null_interceptor", [True, False]) +{{async_prefix}}def test_{{ method_name }}_{{transport_name}}_interceptors(null_interceptor): +{% if 'grpc' in transport %} + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2191): + # Currently this macro only supports REST. It should be updated to support gRPC + # transport as well. + #} + raise NotImplementedError("gRPC is currently not supported for this test case.") +{% else %}{# 'rest' in transport #} + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} + transport = transports.{{async_method_prefix}}{{ service.name }}RestTransport( + credentials={{get_credentials(is_async)}}, + interceptor=None if null_interceptor else transports.{{async_method_prefix}}{{ service.name}}RestInterceptor(), + ) + client = {{ get_client(service, is_async) }}(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + {% if method.lro %} + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + {% endif %} + {% if not method.void %} + mock.patch.object(transports.{{async_method_prefix}}{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ + {% endif %} + mock.patch.object(transports.{{async_method_prefix}}{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: + pre.assert_not_called() + {% if not method.void %} + post.assert_not_called() + {% endif %} + {% if method.input.ident.is_proto_plus_type %} + pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) + {% else %} + pb_message = {{ method.input.ident }}() + {% endif %}{# if method.input.ident.is_proto_plus_type #} + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + {% if not method.void %} + return_value = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} + + {% if method.server_streaming %} + {% if is_async %} + req.return_value.content.return_value = mock_async_gen(return_value) + {% else %}{# not is_async #} + req.return_value.iter_content = mock.Mock(return_value=iter(return_value)) + {% endif %}{# end is_async #} + {% else %}{# not method.server_streaming #} + {% if is_async %} + req.return_value.read = mock.AsyncMock(return_value=return_value) + {% else %}{# not is_async #} + req.return_value.content = return_value + {% endif %}{# end is_async #} + {% endif %}{# end method.server_streaming #} + {% endif %}{# end not method.void #} + + request = {{ method.input.ident }}() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + {% if not method.void %} + post.return_value = {{ method.output.ident }}() + {% endif %} + + {{await_prefix}}client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + {% if not method.void %} + post.assert_called_once() + {% endif %} +{% endif %}{# end 'grpc' in transport #} +{% endmacro%} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 0a28618b4ecd..8875a9907816 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -304,7 +304,18 @@ def showcase_library( f"{tmp_dir}/testing/constraints-{session.python}.txt" ) # Install the library with a constraints file. - session.install("-e", tmp_dir, "-r", constraints_path) + if session.python == "3.7": + session.install("-e", tmp_dir, "-r", constraints_path) + if rest_async_io_enabled: + # NOTE: We re-install `google-api-core` and `google-auth` to override the respective + # versions for each specified in constraints-3.7.txt. This is needed because async REST + # is not supported with the minimum version of `google-api-core` and `google-auth`. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2208): Update the minimum supported version of api-core to `2.21.0` when released. + session.install('--no-cache-dir', '--force-reinstall', "google-api-core[grpc, async_rest]==2.21.0rc0") + # session.install('--no-cache-dir', '--force-reinstall', "google-api-core==2.20.0") + session.install('--no-cache-dir', '--force-reinstall', "google-auth[aiohttp]==2.35.0") + else: + session.install("-e", tmp_dir + ("[async_rest]" if rest_async_io_enabled else ""), "-r", constraints_path) else: # The ads templates do not have constraints files. # See https://github.com/googleapis/gapic-generator-python/issues/1788 @@ -341,6 +352,33 @@ def showcase( env=env, ) +@nox.session(python=ALL_PYTHON) +def showcase_w_rest_async( + session, + templates="DEFAULT", + other_opts: typing.Iterable[str] = (), + env: typing.Optional[typing.Dict[str, str]] = {}, +): + """Run the Showcase test suite.""" + + with showcase_library(session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True): + session.install("pytest", "pytest-asyncio") + test_directory = Path("tests", "system") + ignore_file = env.get("IGNORE_FILE") + pytest_command = [ + "py.test", + "--quiet", + *(session.posargs or [str(test_directory)]), + ] + if ignore_file: + ignore_path = test_directory / ignore_file + pytest_command.extend(["--ignore", str(ignore_path)]) + + session.run( + *pytest_command, + env=env, + ) + @nox.session(python=NEWEST_PYTHON) def showcase_mtls( @@ -393,7 +431,7 @@ def showcase_mtls_alternative_templates(session): ) -def run_showcase_unit_tests(session, fail_under=100): +def run_showcase_unit_tests(session, fail_under=100, rest_async_io_enabled=False): session.install( "coverage", "pytest", @@ -402,22 +440,38 @@ def run_showcase_unit_tests(session, fail_under=100): "asyncmock; python_version < '3.8'", "pytest-asyncio", ) - # Run the tests. - session.run( - "py.test", - *( - session.posargs - or [ - "-n=auto", - "--quiet", - "--cov=google", - "--cov-append", - f"--cov-fail-under={str(fail_under)}", - path.join("tests", "unit"), - ] - ), - ) + # NOTE: async rest is not supported against the minimum supported version of google-api-core. + # Therefore, we ignore the coverage requirement in this case. + if session.python == "3.7" and rest_async_io_enabled: + session.run( + "py.test", + *( + session.posargs + or [ + "-n=auto", + "--quiet", + "--cov=google", + "--cov-append", + path.join("tests", "unit"), + ] + ), + ) + else: + session.run( + "py.test", + *( + session.posargs + or [ + "-n=auto", + "--quiet", + "--cov=google", + "--cov-append", + f"--cov-fail-under={str(fail_under)}", + path.join("tests", "unit"), + ] + ), + ) @nox.session(python=ALL_PYTHON) @@ -440,7 +494,7 @@ def showcase_unit_w_rest_async( """Run the generated unit tests with async rest transport against the Showcase library.""" with showcase_library(session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True) as lib: session.chdir(lib) - run_showcase_unit_tests(session) + run_showcase_unit_tests(session, rest_async_io_enabled=True) @nox.session(python=ALL_PYTHON) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 2bd35d94b709..6aef1e816227 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -3100,11 +3100,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 64b52c14bb88..8b3fc48e5b68 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -3495,11 +3495,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 40735ff6b593..a1690041f744 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -306,6 +306,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 45cafd05fb71..36cdea1121d0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -1127,6 +1127,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 4c86709c4fe0..f7e7aaffcbe4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -265,6 +265,7 @@ def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyRespon it is returned to user code. """ return response + def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for analyze_iam_policy_longrunning @@ -281,6 +282,7 @@ def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation it is returned to user code. """ return response + def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for analyze_move @@ -297,6 +299,7 @@ def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asse it is returned to user code. """ return response + def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for analyze_org_policies @@ -313,6 +316,7 @@ def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesRe it is returned to user code. """ return response + def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for analyze_org_policy_governed_assets @@ -329,6 +333,7 @@ def post_analyze_org_policy_governed_assets(self, response: asset_service.Analyz it is returned to user code. """ return response + def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for analyze_org_policy_governed_containers @@ -345,6 +350,7 @@ def post_analyze_org_policy_governed_containers(self, response: asset_service.An it is returned to user code. """ return response + def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for batch_get_assets_history @@ -361,6 +367,7 @@ def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHi it is returned to user code. """ return response + def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for batch_get_effective_iam_policies @@ -377,6 +384,7 @@ def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGet it is returned to user code. """ return response + def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_feed @@ -393,6 +401,7 @@ def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: it is returned to user code. """ return response + def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_saved_query @@ -409,6 +418,7 @@ def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_s it is returned to user code. """ return response + def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_feed @@ -441,6 +451,7 @@ def post_export_assets(self, response: operations_pb2.Operation) -> operations_p it is returned to user code. """ return response + def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_feed @@ -457,6 +468,7 @@ def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: it is returned to user code. """ return response + def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_saved_query @@ -473,6 +485,7 @@ def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_serv it is returned to user code. """ return response + def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_assets @@ -489,6 +502,7 @@ def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_ it is returned to user code. """ return response + def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_feeds @@ -505,6 +519,7 @@ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_se it is returned to user code. """ return response + def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_saved_queries @@ -521,6 +536,7 @@ def post_list_saved_queries(self, response: asset_service.ListSavedQueriesRespon it is returned to user code. """ return response + def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for query_assets @@ -537,6 +553,7 @@ def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asse it is returned to user code. """ return response + def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for search_all_iam_policies @@ -553,6 +570,7 @@ def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPolic it is returned to user code. """ return response + def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for search_all_resources @@ -569,6 +587,7 @@ def post_search_all_resources(self, response: asset_service.SearchAllResourcesRe it is returned to user code. """ return response + def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_feed @@ -585,6 +604,7 @@ def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: it is returned to user code. """ return response + def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_saved_query @@ -2618,6 +2638,9 @@ def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): + def __hash__(self): + return hash("AssetServiceRestTransport.GetOperation") + @staticmethod def _get_response( host, @@ -2677,8 +2700,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) return resp diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py index 116512120a95..7c4d227f9999 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py @@ -19,7 +19,6 @@ from google.protobuf import json_format from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -45,7 +44,7 @@ class _BaseAssetServiceRestTransport(AssetServiceTransport): def __init__(self, *, host: str = 'cloudasset.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -55,7 +54,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'cloudasset.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -987,6 +986,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 931351be5fa8..406380650fee 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -49,6 +49,8 @@ "google-cloud-os-config >= 1.0.0, <2.0.0dev", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] +extras = { +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-asset" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -91,6 +93,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 200bcec7e389..1e304113db38 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -23,7 +23,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -71,6 +71,11 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -8925,37 +8930,6 @@ async def test_analyze_org_policy_governed_assets_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.ExportAssetsRequest, - dict, -]) -def test_export_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.export_assets(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_export_assets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9069,110 +9043,6 @@ def test_export_assets_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent", "outputConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = asset_service.ExportAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_export_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ExportAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_assets(request) - - -def test_export_assets_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListAssetsRequest, - dict, -]) -def test_list_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListAssetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAssetsPager) - assert response.next_page_token == 'next_page_token_value' - def test_list_assets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9286,66 +9156,6 @@ def test_list_assets_rest_unset_required_fields(): assert set(unset_fields) == (set(("assetTypes", "contentType", "pageSize", "pageToken", "readTime", "relationshipTypes", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) - - request = asset_service.ListAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListAssetsResponse() - - client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_assets(request) - - def test_list_assets_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9461,40 +9271,6 @@ def test_list_assets_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetAssetsHistoryRequest, - dict, -]) -def test_batch_get_assets_history_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetAssetsHistoryResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_get_assets_history(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) - def test_batch_get_assets_history_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9608,143 +9384,32 @@ def test_batch_get_assets_history_rest_unset_required_fields(): assert set(unset_fields) == (set(("assetNames", "contentType", "readTimeWindow", "relationshipTypes", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_assets_history_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), +def test_create_feed_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request = asset_service.BatchGetAssetsHistoryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.BatchGetAssetsHistoryResponse() + # Ensure method has been cached + assert client._transport.create_feed in client._transport._wrapped_methods - client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc - pre.assert_called_once() - post.assert_called_once() + request = {} + client.create_feed(request) - -def test_batch_get_assets_history_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetAssetsHistoryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_get_assets_history(request) - - -def test_batch_get_assets_history_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateFeedRequest, - dict, -]) -def test_create_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - -def test_create_feed_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_feed in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_feed] = mock_rpc - - request = {} - client.create_feed(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 client.create_feed(request) @@ -9835,66 +9500,6 @@ def test_create_feed_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent", "feedId", "feed", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) - - request = asset_service.CreateFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - - client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_feed(request) - - def test_create_feed_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9948,57 +9553,6 @@ def test_create_feed_rest_flattened_error(transport: str = 'rest'): ) -def test_create_feed_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetFeedRequest, - dict, -]) -def test_get_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - def test_get_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10110,66 +9664,6 @@ def test_get_feed_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) - - request = asset_service.GetFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - - client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_feed(request) - - def test_get_feed_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10223,55 +9717,14 @@ def test_get_feed_rest_flattened_error(transport: str = 'rest'): ) -def test_get_feed_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListFeedsRequest, - dict, -]) -def test_list_feeds_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListFeedsResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListFeedsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_feeds(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.ListFeedsResponse) - -def test_list_feeds_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_list_feeds_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -10375,66 +9828,6 @@ def test_list_feeds_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_feeds_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) - - request = asset_service.ListFeedsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListFeedsResponse() - - client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_feeds_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListFeedsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_feeds(request) - - def test_list_feeds_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10488,57 +9881,6 @@ def test_list_feeds_rest_flattened_error(transport: str = 'rest'): ) -def test_list_feeds_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateFeedRequest, - dict, -]) -def test_update_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.Feed( - name='name_value', - asset_names=['asset_names_value'], - asset_types=['asset_types_value'], - content_type=asset_service.ContentType.RESOURCE, - relationship_types=['relationship_types_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.Feed.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_feed(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.Feed) - assert response.name == 'name_value' - assert response.asset_names == ['asset_names_value'] - assert response.asset_types == ['asset_types_value'] - assert response.content_type == asset_service.ContentType.RESOURCE - assert response.relationship_types == ['relationship_types_value'] - def test_update_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10646,66 +9988,6 @@ def test_update_feed_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("feed", "updateMask", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.Feed.to_json(asset_service.Feed()) - - request = asset_service.UpdateFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.Feed() - - client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_feed(request) - - def test_update_feed_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10759,44 +10041,6 @@ def test_update_feed_rest_flattened_error(transport: str = 'rest'): ) -def test_update_feed_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteFeedRequest, - dict, -]) -def test_delete_feed_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_feed(request) - - # Establish that the response is the type that we expect. - assert response is None - def test_delete_feed_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10905,71 +10149,16 @@ def test_delete_feed_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_feed_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( +def test_delete_feed_rest_flattened(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: - pre.assert_not_called() - pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + transport="rest", + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = asset_service.DeleteFeedRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_feed_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteFeedRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/feeds/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_feed(request) - - -def test_delete_feed_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None # get arguments that satisfy an http rule for this method sample_request = {'name': 'sample1/sample2/feeds/sample3'} @@ -11011,49 +10200,6 @@ def test_delete_feed_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_feed_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllResourcesRequest, - dict, -]) -def test_search_all_resources_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllResourcesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SearchAllResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.search_all_resources(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllResourcesPager) - assert response.next_page_token == 'next_page_token_value' - def test_search_all_resources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11167,66 +10313,6 @@ def test_search_all_resources_rest_unset_required_fields(): assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", "readMask", )) & set(("scope", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_resources_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) - - request = asset_service.SearchAllResourcesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SearchAllResourcesResponse() - - client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_search_all_resources_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllResourcesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.search_all_resources(request) - - def test_search_all_resources_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11346,42 +10432,6 @@ def test_search_all_resources_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.SearchAllIamPoliciesRequest, - dict, -]) -def test_search_all_iam_policies_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SearchAllIamPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.search_all_iam_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchAllIamPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - def test_search_all_iam_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11495,66 +10545,6 @@ def test_search_all_iam_policies_rest_unset_required_fields(): assert set(unset_fields) == (set(("assetTypes", "orderBy", "pageSize", "pageToken", "query", )) & set(("scope", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_all_iam_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) - - request = asset_service.SearchAllIamPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SearchAllIamPoliciesResponse() - - client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_search_all_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.SearchAllIamPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.search_all_iam_policies(request) - - def test_search_all_iam_policies_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11672,42 +10662,6 @@ def test_search_all_iam_policies_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyRequest, - dict, -]) -def test_analyze_iam_policy_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeIamPolicyResponse( - fully_explored=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) - assert response.fully_explored is True - def test_analyze_iam_policy_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11816,104 +10770,6 @@ def test_analyze_iam_policy_rest_unset_required_fields(): assert set(unset_fields) == (set(("analysisQuery", "executionTimeout", "savedAnalysisQuery", )) & set(("analysisQuery", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) - - request = asset_service.AnalyzeIamPolicyRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeIamPolicyResponse() - - client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_iam_policy_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_iam_policy(request) - - -def test_analyze_iam_policy_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeIamPolicyLongrunningRequest, - dict, -]) -def test_analyze_iam_policy_longrunning_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_iam_policy_longrunning(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_analyze_iam_policy_longrunning_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12022,108 +10878,6 @@ def test_analyze_iam_policy_longrunning_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("analysisQuery", "outputConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = asset_service.AnalyzeIamPolicyLongrunningRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_iam_policy_longrunning_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'analysis_query': {'scope': 'sample1/sample2'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_iam_policy_longrunning(request) - - -def test_analyze_iam_policy_longrunning_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeMoveRequest, - dict, -]) -def test_analyze_move_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeMoveResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeMoveResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_move(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.AnalyzeMoveResponse) - def test_analyze_move_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12248,165 +11002,60 @@ def test_analyze_move_rest_unset_required_fields(): assert set(unset_fields) == (set(("destinationParent", "view", )) & set(("resource", "destinationParent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_move_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), +def test_query_assets_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - request = asset_service.AnalyzeMoveRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeMoveResponse() + # Ensure method has been cached + assert client._transport.query_assets in client._transport._wrapped_methods - client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc - pre.assert_called_once() - post.assert_called_once() + request = {} + client.query_assets(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_analyze_move_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeMoveRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + client.query_assets(request) - # send a request that will satisfy transcoding - request_init = {'resource': 'sample1/sample2'} + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): + transport_class = transports.AssetServiceRestTransport + + request_init = {} + request_init["parent"] = "" request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_move(request) + # verify fields with default values are dropped + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) -def test_analyze_move_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) + # verify required fields with default values are now present - -@pytest.mark.parametrize("request_type", [ - asset_service.QueryAssetsRequest, - dict, -]) -def test_query_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.QueryAssetsResponse( - job_reference='job_reference_value', - done=True, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.QueryAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.query_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.QueryAssetsResponse) - assert response.job_reference == 'job_reference_value' - assert response.done is True - -def test_query_assets_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.query_assets in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.query_assets] = mock_rpc - - request = {} - client.query_assets(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.query_assets(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_query_assets_rest_required_fields(request_type=asset_service.QueryAssetsRequest): - transport_class = transports.AssetServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads(json_format.MessageToJson( - pb_request, - use_integers_for_enums=False - )) - - # verify fields with default values are dropped - - unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = 'parent_value' + jsonified_request["parent"] = 'parent_value' unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).query_assets._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) @@ -12465,179 +11114,6 @@ def test_query_assets_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) - - request = asset_service.QueryAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.QueryAssetsResponse() - - client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_query_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.QueryAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.query_assets(request) - - -def test_query_assets_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.CreateSavedQueryRequest, - dict, -]) -def test_create_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["saved_query"][field])): - del request_init["saved_query"][field][i][subfield] - else: - del request_init["saved_query"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - def test_create_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12763,66 +11239,6 @@ def test_create_saved_query_rest_unset_required_fields(): assert set(unset_fields) == (set(("savedQueryId", )) & set(("parent", "savedQuery", "savedQueryId", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - - request = asset_service.CreateSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - - client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.CreateSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_saved_query(request) - - def test_create_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12880,55 +11296,6 @@ def test_create_saved_query_rest_flattened_error(transport: str = 'rest'): ) -def test_create_saved_query_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.GetSavedQueryRequest, - dict, -]) -def test_get_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - def test_get_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13040,66 +11407,6 @@ def test_get_saved_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - - request = asset_service.GetSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - - client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.GetSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_saved_query(request) - - def test_get_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13153,49 +11460,6 @@ def test_get_saved_query_rest_flattened_error(transport: str = 'rest'): ) -def test_get_saved_query_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.ListSavedQueriesRequest, - dict, -]) -def test_list_saved_queries_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.ListSavedQueriesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.ListSavedQueriesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_saved_queries(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSavedQueriesPager) - assert response.next_page_token == 'next_page_token_value' - def test_list_saved_queries_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13309,66 +11573,6 @@ def test_list_saved_queries_rest_unset_required_fields(): assert set(unset_fields) == (set(("filter", "pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_saved_queries_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) - - request = asset_service.ListSavedQueriesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.ListSavedQueriesResponse() - - client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_saved_queries_rest_bad_request(transport: str = 'rest', request_type=asset_service.ListSavedQueriesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_saved_queries(request) - - def test_list_saved_queries_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13484,120 +11688,14 @@ def test_list_saved_queries_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.UpdateSavedQueryRequest, - dict, -]) -def test_update_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["saved_query"][field])): - del request_init["saved_query"][field][i][subfield] - else: - del request_init["saved_query"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.SavedQuery( - name='name_value', - description='description_value', - creator='creator_value', - last_updater='last_updater_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.SavedQuery.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_saved_query(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.SavedQuery) - assert response.name == 'name_value' - assert response.description == 'description_value' - assert response.creator == 'creator_value' - assert response.last_updater == 'last_updater_value' - -def test_update_saved_query_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_update_saved_query_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -13699,66 +11797,6 @@ def test_update_saved_query_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask", )) & set(("savedQuery", "updateMask", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) - - request = asset_service.UpdateSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.SavedQuery() - - client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.UpdateSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_saved_query(request) - - def test_update_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13814,44 +11852,6 @@ def test_update_saved_query_rest_flattened_error(transport: str = 'rest'): ) -def test_update_saved_query_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.DeleteSavedQueryRequest, - dict, -]) -def test_delete_saved_query_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '' - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_saved_query(request) - - # Establish that the response is the type that we expect. - assert response is None - def test_delete_saved_query_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -13960,61 +11960,6 @@ def test_delete_saved_query_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_saved_query_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: - pre.assert_not_called() - pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = asset_service.DeleteSavedQueryRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - - -def test_delete_saved_query_rest_bad_request(transport: str = 'rest', request_type=asset_service.DeleteSavedQueryRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'sample1/sample2/savedQueries/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_saved_query(request) - - def test_delete_saved_query_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14066,59 +12011,18 @@ def test_delete_saved_query_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_saved_query_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - -@pytest.mark.parametrize("request_type", [ - asset_service.BatchGetEffectiveIamPoliciesRequest, - dict, -]) -def test_batch_get_effective_iam_policies_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.batch_get_effective_iam_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) - -def test_batch_get_effective_iam_policies_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.batch_get_effective_iam_policies in client._transport._wrapped_methods @@ -14231,109 +12135,6 @@ def test_batch_get_effective_iam_policies_rest_unset_required_fields(): assert set(unset_fields) == (set(("names", )) & set(("scope", "names", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) - - request = asset_service.BatchGetEffectiveIamPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() - - client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_batch_get_effective_iam_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.batch_get_effective_iam_policies(request) - - -def test_batch_get_effective_iam_policies_rest_error(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPoliciesRequest, - dict, -]) -def test_analyze_org_policies_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_org_policies(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) - assert response.next_page_token == 'next_page_token_value' - def test_analyze_org_policies_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14458,66 +12259,6 @@ def test_analyze_org_policies_rest_unset_required_fields(): assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policies_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) - - request = asset_service.AnalyzeOrgPoliciesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPoliciesResponse() - - client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_org_policies_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPoliciesRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_org_policies(request) - - def test_analyze_org_policies_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14637,42 +12378,6 @@ def test_analyze_org_policies_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedContainersRequest, - dict, -]) -def test_analyze_org_policy_governed_containers_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_org_policy_governed_containers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) - assert response.next_page_token == 'next_page_token_value' - def test_analyze_org_policy_governed_containers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -14797,79 +12502,19 @@ def test_analyze_org_policy_governed_containers_rest_unset_required_fields(): assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( +def test_analyze_org_policy_governed_containers_rest_flattened(): + client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + transport="rest", + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - - client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_org_policy_governed_containers_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_org_policy_governed_containers(request) - - -def test_analyze_org_policy_governed_containers_rest_flattened(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {'scope': 'sample1/sample2'} + # get arguments that satisfy an http rule for this method + sample_request = {'scope': 'sample1/sample2'} # get truthy value for each flattened field mock_args = dict( @@ -14976,42 +12621,6 @@ def test_analyze_org_policy_governed_containers_rest_pager(transport: str = 'res assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, - dict, -]) -def test_analyze_org_policy_governed_assets_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.analyze_org_policy_governed_assets(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) - assert response.next_page_token == 'next_page_token_value' - def test_analyze_org_policy_governed_assets_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -15136,66 +12745,6 @@ def test_analyze_org_policy_governed_assets_rest_unset_required_fields(): assert set(unset_fields) == (set(("constraint", "filter", "pageSize", "pageToken", )) & set(("scope", "constraint", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): - transport = transports.AssetServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), - ) - client = AssetServiceClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ - mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) - - request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - - client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_analyze_org_policy_governed_assets_rest_bad_request(transport: str = 'rest', request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'scope': 'sample1/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.analyze_org_policy_governed_assets(request) - - def test_analyze_org_policy_governed_assets_rest_flattened(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15409,6 +12958,14 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_export_assets_empty_call_grpc(): @@ -15922,6 +13479,14 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -16431,115 +13996,2596 @@ async def test_delete_saved_query_empty_call_grpc_asyncio(): assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( + )) + await client.batch_get_effective_iam_policies(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policies_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policies(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policy_governed_containers(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + )) + await client.analyze_org_policy_governed_assets(request=None) + + # Establish that the underlying gRPC stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = AssetServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_export_assets_rest_bad_request(request_type=asset_service.ExportAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.export_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ExportAssetsRequest, + dict, +]) +def test_export_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.export_assets(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = asset_service.ExportAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListAssetsRequest, + dict, +]) +def test_list_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListAssetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.ListAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) + req.return_value.content = return_value + + request = asset_service.ListAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListAssetsResponse() + + client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.BatchGetAssetsHistoryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.batch_get_assets_history(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetAssetsHistoryRequest, + dict, +]) +def test_batch_get_assets_history_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetAssetsHistoryResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.BatchGetAssetsHistoryResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_get_assets_history(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetAssetsHistoryResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_assets_history_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) + req.return_value.content = return_value + + request = asset_service.BatchGetAssetsHistoryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.BatchGetAssetsHistoryResponse() + + client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_feed(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateFeedRequest, + dict, +]) +def test_create_feed_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value.content = return_value + + request = asset_service.CreateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_feed(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetFeedRequest, + dict, +]) +def test_get_feed_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value.content = return_value + + request = asset_service.GetFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_feeds(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListFeedsRequest, + dict, +]) +def test_list_feeds_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListFeedsResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.ListFeedsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_feeds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.ListFeedsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_feeds_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) + req.return_value.content = return_value + + request = asset_service.ListFeedsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListFeedsResponse() + + client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_feed(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateFeedRequest, + dict, +]) +def test_update_feed_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'feed': {'name': 'sample1/sample2/feeds/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.Feed( + name='name_value', + asset_names=['asset_names_value'], + asset_types=['asset_types_value'], + content_type=asset_service.ContentType.RESOURCE, + relationship_types=['relationship_types_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.Feed.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_feed(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.Feed) + assert response.name == 'name_value' + assert response.asset_names == ['asset_names_value'] + assert response.asset_types == ['asset_types_value'] + assert response.content_type == asset_service.ContentType.RESOURCE + assert response.relationship_types == ['relationship_types_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.Feed.to_json(asset_service.Feed()) + req.return_value.content = return_value + + request = asset_service.UpdateFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.Feed() + + client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_feed(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteFeedRequest, + dict, +]) +def test_delete_feed_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/feeds/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_feed(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_feed_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_feed") as pre: + pre.assert_not_called() + pb_message = asset_service.DeleteFeedRequest.pb(asset_service.DeleteFeedRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = asset_service.DeleteFeedRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_search_all_resources_rest_bad_request(request_type=asset_service.SearchAllResourcesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.search_all_resources(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllResourcesRequest, + dict, +]) +def test_search_all_resources_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllResourcesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SearchAllResourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.search_all_resources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllResourcesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_resources_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) + req.return_value.content = return_value + + request = asset_service.SearchAllResourcesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllResourcesResponse() + + client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.search_all_iam_policies(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.SearchAllIamPoliciesRequest, + dict, +]) +def test_search_all_iam_policies_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SearchAllIamPoliciesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SearchAllIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.search_all_iam_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAllIamPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_all_iam_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) + req.return_value.content = return_value + + request = asset_service.SearchAllIamPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SearchAllIamPoliciesResponse() + + client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.analyze_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyRequest, + dict, +]) +def test_analyze_iam_policy_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeIamPolicyResponse( + fully_explored=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeIamPolicyResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeIamPolicyResponse) + assert response.fully_explored is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_iam_policy_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeIamPolicyRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeIamPolicyResponse() + + client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.analyze_iam_policy_longrunning(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeIamPolicyLongrunningRequest, + dict, +]) +def test_analyze_iam_policy_longrunning_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'analysis_query': {'scope': 'sample1/sample2'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_iam_policy_longrunning(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = asset_service.AnalyzeIamPolicyLongrunningRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.analyze_move(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeMoveRequest, + dict, +]) +def test_analyze_move_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'resource': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeMoveResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeMoveResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_move(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.AnalyzeMoveResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_move_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeMoveRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeMoveResponse() + + client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.query_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.QueryAssetsRequest, + dict, +]) +def test_query_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.QueryAssetsResponse( + job_reference='job_reference_value', + done=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.QueryAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.query_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.QueryAssetsResponse) + assert response.job_reference == 'job_reference_value' + assert response.done is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_query_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) + req.return_value.content = return_value + + request = asset_service.QueryAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.QueryAssetsResponse() + + client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.CreateSavedQueryRequest, + dict, +]) +def test_create_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request_init["saved_query"] = {'name': 'name_value', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = asset_service.CreateSavedQueryRequest.meta.fields["saved_query"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["saved_query"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["saved_query"][field])): + del request_init["saved_query"][field][i][subfield] + else: + del request_init["saved_query"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_saved_query_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) + req.return_value.content = return_value + + request = asset_service.CreateSavedQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SavedQuery() + + client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.GetSavedQueryRequest, + dict, +]) +def test_get_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_saved_query_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) + req.return_value.content = return_value + + request = asset_service.GetSavedQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SavedQuery() + + client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSavedQueriesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_saved_queries(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.ListSavedQueriesRequest, + dict, +]) +def test_list_saved_queries_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.ListSavedQueriesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.ListSavedQueriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_saved_queries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSavedQueriesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_saved_queries_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) + req.return_value.content = return_value + + request = asset_service.ListSavedQueriesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.ListSavedQueriesResponse() + + client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.UpdateSavedQueryRequest, + dict, +]) +def test_update_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'saved_query': {'name': 'sample1/sample2/savedQueries/sample3'}} + request_init["saved_query"] = {'name': 'sample1/sample2/savedQueries/sample3', 'description': 'description_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'creator': 'creator_value', 'last_update_time': {}, 'last_updater': 'last_updater_value', 'labels': {}, 'content': {'iam_policy_analysis_query': {'scope': 'scope_value', 'resource_selector': {'full_resource_name': 'full_resource_name_value'}, 'identity_selector': {'identity': 'identity_value'}, 'access_selector': {'roles': ['roles_value1', 'roles_value2'], 'permissions': ['permissions_value1', 'permissions_value2']}, 'options': {'expand_groups': True, 'expand_roles': True, 'expand_resources': True, 'output_resource_edges': True, 'output_group_edges': True, 'analyze_service_account_impersonation': True}, 'condition_context': {'access_time': {}}}}} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = asset_service.UpdateSavedQueryRequest.meta.fields["saved_query"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["saved_query"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["saved_query"][field])): + del request_init["saved_query"][field][i][subfield] + else: + del request_init["saved_query"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.SavedQuery( + name='name_value', + description='description_value', + creator='creator_value', + last_updater='last_updater_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.SavedQuery.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_saved_query(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.SavedQuery) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.creator == 'creator_value' + assert response.last_updater == 'last_updater_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_saved_query_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) + req.return_value.content = return_value + + request = asset_service.UpdateSavedQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.SavedQuery() + + client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSavedQueryRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_saved_query(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.DeleteSavedQueryRequest, + dict, +]) +def test_delete_saved_query_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'sample1/sample2/savedQueries/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '' + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_saved_query(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_saved_query_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_delete_saved_query") as pre: + pre.assert_not_called() + pb_message = asset_service.DeleteSavedQueryRequest.pb(asset_service.DeleteSavedQueryRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = asset_service.DeleteSavedQueryRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + + +def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_service.BatchGetEffectiveIamPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.batch_get_effective_iam_policies(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.BatchGetEffectiveIamPoliciesRequest, + dict, +]) +def test_batch_get_effective_iam_policies_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse( + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.batch_get_effective_iam_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, asset_service.BatchGetEffectiveIamPoliciesResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) + req.return_value.content = return_value + + request = asset_service.BatchGetEffectiveIamPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() + + client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_org_policies_rest_bad_request(request_type=asset_service.AnalyzeOrgPoliciesRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.analyze_org_policies(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPoliciesRequest, + dict, +]) +def test_analyze_org_policies_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPoliciesResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPoliciesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_org_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPoliciesPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_org_policies_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeOrgPoliciesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeOrgPoliciesResponse() + + client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.analyze_org_policy_governed_containers(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPolicyGovernedContainersRequest, + dict, +]) +def test_analyze_org_policy_governed_containers_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_org_policy_governed_containers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedContainersPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_org_policy_governed_containers_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) + req.return_value.content = return_value + + request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + + client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.analyze_org_policy_governed_assets(request) + + +@pytest.mark.parametrize("request_type", [ + asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, + dict, +]) +def test_analyze_org_policy_governed_assets_rest_call_success(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'scope': 'sample1/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( + next_page_token='next_page_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.analyze_org_policy_governed_assets(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.AnalyzeOrgPolicyGovernedAssetsPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): + transport = transports.AssetServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.AssetServiceRestInterceptor(), + ) + client = AssetServiceClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) + req.return_value.content = return_value - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.batch_get_effective_iam_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.BatchGetEffectiveIamPoliciesResponse( - )) - await client.batch_get_effective_iam_policies(request=None) + request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() - # Establish that the underlying gRPC stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() + client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - assert args[0] == request_msg + pre.assert_called_once() + post.assert_called_once() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_org_policies_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) + request = request_type() + request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policies), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPoliciesResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policies(request=None) - - # Establish that the underlying gRPC stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPoliciesRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_containers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedContainersResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policy_governed_containers(request=None) - - # Establish that the underlying gRPC stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - ) + response = client.get_operation(request) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.analyze_org_policy_governed_assets), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse( - next_page_token='next_page_token_value', - )) - await client.analyze_org_policy_governed_assets(request=None) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) - # Establish that the underlying gRPC stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() +def test_initialize_client_w_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None - assert args[0] == request_msg +def test_asset_service_rest_lro_client(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport -def test_transport_kind_rest(): - transport = AssetServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, ) - assert transport.kind == "rest" + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client def test_transport_grpc_default(): # A client should use the gRPC transport by default. @@ -16790,23 +16836,6 @@ def test_asset_service_http_transport_client_cert_source_for_mtls(): mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_asset_service_rest_lro_client(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, - ) - - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - - @pytest.mark.parametrize("transport_name", [ "grpc", "grpc_asyncio", @@ -17303,65 +17332,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = AssetServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict({'name': 'sample1/sample2/operations/sample3/sample4'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'sample1/sample2/operations/sample3/sample4'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - def test_get_operation(transport: str = "grpc"): client = AssetServiceClient( @@ -17492,21 +17462,39 @@ async def test_get_operation_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = AssetServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = AssetServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 1dc8b94d616f..819c62f3c5b5 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -118,6 +118,7 @@ def post_generate_access_token(self, response: common.GenerateAccessTokenRespons it is returned to user code. """ return response + def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for generate_id_token @@ -134,6 +135,7 @@ def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> co it is returned to user code. """ return response + def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for sign_blob @@ -150,6 +152,7 @@ def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobRe it is returned to user code. """ return response + def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for sign_jwt diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py index 3ffce85ffd95..8d1bc465012b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py @@ -19,7 +19,6 @@ from google.protobuf import json_format from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -43,7 +42,7 @@ class _BaseIAMCredentialsRestTransport(IAMCredentialsTransport): def __init__(self, *, host: str = 'iamcredentials.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -53,7 +52,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'iamcredentials.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 28f72ad6962c..f9ceb4c0f2b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -46,6 +46,8 @@ "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] +extras = { +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-iam-credentials" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -88,6 +90,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 553259e95062..c943272ba254 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -23,7 +23,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -61,6 +61,11 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -2117,42 +2122,6 @@ async def test_sign_jwt_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - common.GenerateAccessTokenRequest, - dict, -]) -def test_generate_access_token_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.GenerateAccessTokenResponse( - access_token='access_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.GenerateAccessTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.generate_access_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.GenerateAccessTokenResponse) - assert response.access_token == 'access_token_value' - def test_generate_access_token_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2269,66 +2238,6 @@ def test_generate_access_token_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "scope", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_access_token_rest_interceptors(null_interceptor): - transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) - client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_access_token") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = common.GenerateAccessTokenRequest.pb(common.GenerateAccessTokenRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.GenerateAccessTokenResponse.to_json(common.GenerateAccessTokenResponse()) - - request = common.GenerateAccessTokenRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common.GenerateAccessTokenResponse() - - client.generate_access_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_generate_access_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateAccessTokenRequest): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.generate_access_token(request) - - def test_generate_access_token_rest_flattened(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2388,49 +2297,6 @@ def test_generate_access_token_rest_flattened_error(transport: str = 'rest'): ) -def test_generate_access_token_rest_error(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - common.GenerateIdTokenRequest, - dict, -]) -def test_generate_id_token_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.GenerateIdTokenResponse( - token='token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.GenerateIdTokenResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.generate_id_token(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.GenerateIdTokenResponse) - assert response.token == 'token_value' - def test_generate_id_token_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2547,66 +2413,6 @@ def test_generate_id_token_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "audience", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_generate_id_token_rest_interceptors(null_interceptor): - transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) - client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_id_token") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = common.GenerateIdTokenRequest.pb(common.GenerateIdTokenRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.GenerateIdTokenResponse.to_json(common.GenerateIdTokenResponse()) - - request = common.GenerateIdTokenRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common.GenerateIdTokenResponse() - - client.generate_id_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_generate_id_token_rest_bad_request(transport: str = 'rest', request_type=common.GenerateIdTokenRequest): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.generate_id_token(request) - - def test_generate_id_token_rest_flattened(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2666,51 +2472,6 @@ def test_generate_id_token_rest_flattened_error(transport: str = 'rest'): ) -def test_generate_id_token_rest_error(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - common.SignBlobRequest, - dict, -]) -def test_sign_blob_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.SignBlobResponse( - key_id='key_id_value', - signed_blob=b'signed_blob_blob', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.SignBlobResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.sign_blob(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.SignBlobResponse) - assert response.key_id == 'key_id_value' - assert response.signed_blob == b'signed_blob_blob' - def test_sign_blob_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2827,70 +2588,10 @@ def test_sign_blob_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "payload", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sign_blob_rest_interceptors(null_interceptor): - transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) - client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_blob") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = common.SignBlobRequest.pb(common.SignBlobRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.SignBlobResponse.to_json(common.SignBlobResponse()) - - request = common.SignBlobRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common.SignBlobResponse() - - client.sign_blob(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_sign_blob_rest_bad_request(transport: str = 'rest', request_type=common.SignBlobRequest): +def test_sign_blob_rest_flattened(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.sign_blob(request) - - -def test_sign_blob_rest_flattened(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest", ) # Mock the http request call within the method and fake a response. @@ -2944,51 +2645,6 @@ def test_sign_blob_rest_flattened_error(transport: str = 'rest'): ) -def test_sign_blob_rest_error(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - common.SignJwtRequest, - dict, -]) -def test_sign_jwt_rest(request_type): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = common.SignJwtResponse( - key_id='key_id_value', - signed_jwt='signed_jwt_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.SignJwtResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.sign_jwt(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.SignJwtResponse) - assert response.key_id == 'key_id_value' - assert response.signed_jwt == 'signed_jwt_value' - def test_sign_jwt_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3105,66 +2761,6 @@ def test_sign_jwt_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "payload", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sign_jwt_rest_interceptors(null_interceptor): - transport = transports.IAMCredentialsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), - ) - client = IAMCredentialsClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt") as post, \ - mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_jwt") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = common.SignJwtRequest.pb(common.SignJwtRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.SignJwtResponse.to_json(common.SignJwtResponse()) - - request = common.SignJwtRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common.SignJwtResponse() - - client.sign_jwt(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_sign_jwt_rest_bad_request(transport: str = 'rest', request_type=common.SignJwtRequest): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.sign_jwt(request) - - def test_sign_jwt_rest_flattened(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3222,13 +2818,6 @@ def test_sign_jwt_rest_flattened_error(transport: str = 'rest'): ) -def test_sign_jwt_rest_error(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.IAMCredentialsGrpcTransport( @@ -3323,6 +2912,14 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_generate_access_token_empty_call_grpc(): @@ -3418,6 +3015,14 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = IAMCredentialsAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -3531,6 +3136,413 @@ def test_transport_kind_rest(): assert transport.kind == "rest" +def test_generate_access_token_rest_bad_request(request_type=common.GenerateAccessTokenRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.generate_access_token(request) + + +@pytest.mark.parametrize("request_type", [ + common.GenerateAccessTokenRequest, + dict, +]) +def test_generate_access_token_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateAccessTokenResponse( + access_token='access_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GenerateAccessTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.generate_access_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateAccessTokenResponse) + assert response.access_token == 'access_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_access_token_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_access_token") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = common.GenerateAccessTokenRequest.pb(common.GenerateAccessTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = common.GenerateAccessTokenResponse.to_json(common.GenerateAccessTokenResponse()) + req.return_value.content = return_value + + request = common.GenerateAccessTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GenerateAccessTokenResponse() + + client.generate_access_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.generate_id_token(request) + + +@pytest.mark.parametrize("request_type", [ + common.GenerateIdTokenRequest, + dict, +]) +def test_generate_id_token_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.GenerateIdTokenResponse( + token='token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.GenerateIdTokenResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.generate_id_token(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.GenerateIdTokenResponse) + assert response.token == 'token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_generate_id_token_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_id_token") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = common.GenerateIdTokenRequest.pb(common.GenerateIdTokenRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = common.GenerateIdTokenResponse.to_json(common.GenerateIdTokenResponse()) + req.return_value.content = return_value + + request = common.GenerateIdTokenRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.GenerateIdTokenResponse() + + client.generate_id_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.sign_blob(request) + + +@pytest.mark.parametrize("request_type", [ + common.SignBlobRequest, + dict, +]) +def test_sign_blob_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignBlobResponse( + key_id='key_id_value', + signed_blob=b'signed_blob_blob', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.SignBlobResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.sign_blob(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignBlobResponse) + assert response.key_id == 'key_id_value' + assert response.signed_blob == b'signed_blob_blob' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_sign_blob_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_blob") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = common.SignBlobRequest.pb(common.SignBlobRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = common.SignBlobResponse.to_json(common.SignBlobResponse()) + req.return_value.content = return_value + + request = common.SignBlobRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.SignBlobResponse() + + client.sign_blob(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.sign_jwt(request) + + +@pytest.mark.parametrize("request_type", [ + common.SignJwtRequest, + dict, +]) +def test_sign_jwt_rest_call_success(request_type): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/serviceAccounts/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = common.SignJwtResponse( + key_id='key_id_value', + signed_jwt='signed_jwt_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.SignJwtResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.sign_jwt(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.SignJwtResponse) + assert response.key_id == 'key_id_value' + assert response.signed_jwt == 'signed_jwt_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_sign_jwt_rest_interceptors(null_interceptor): + transport = transports.IAMCredentialsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.IAMCredentialsRestInterceptor(), + ) + client = IAMCredentialsClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_jwt") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = common.SignJwtRequest.pb(common.SignJwtRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = common.SignJwtResponse.to_json(common.SignJwtResponse()) + req.return_value.content = return_value + + request = common.SignJwtRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.SignJwtResponse() + + client.sign_jwt(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +def test_initialize_client_w_rest(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = IAMCredentialsClient( @@ -4050,33 +4062,40 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) + +def test_transport_close_grpc(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_transport_close_grpc_asyncio(): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + transport="grpc_asyncio" ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: async with client: close.assert_not_called() close.assert_called_once() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_rest(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = IAMCredentialsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 85e32e498946..7e51502dda4c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -2502,11 +2502,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -2556,11 +2552,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2614,11 +2606,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2667,11 +2655,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2783,11 +2767,7 @@ async def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -2904,11 +2884,7 @@ async def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -2963,11 +2939,7 @@ async def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -3017,11 +2989,7 @@ async def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -3071,11 +3039,7 @@ async def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 53e54c3e04b5..64d7613c6e75 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -2922,11 +2922,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -2976,11 +2972,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3034,11 +3026,7 @@ def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3087,11 +3075,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3203,11 +3187,7 @@ def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -3324,11 +3304,7 @@ def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -3383,11 +3359,7 @@ def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -3437,11 +3409,7 @@ def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -3491,11 +3459,7 @@ def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 86ab6167d49b..4ad308359ab1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -220,6 +220,51 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 3dbf4ebfe692..5914da06cbaf 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -831,6 +831,51 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index be3f58cf1d78..1031edb41b1e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -241,6 +241,7 @@ def post_create_channel(self, response: operations_pb2.Operation) -> operations_ it is returned to user code. """ return response + def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_channel_connection @@ -257,6 +258,7 @@ def post_create_channel_connection(self, response: operations_pb2.Operation) -> it is returned to user code. """ return response + def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for create_trigger @@ -273,6 +275,7 @@ def post_create_trigger(self, response: operations_pb2.Operation) -> operations_ it is returned to user code. """ return response + def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_channel @@ -289,6 +292,7 @@ def post_delete_channel(self, response: operations_pb2.Operation) -> operations_ it is returned to user code. """ return response + def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_channel_connection @@ -305,6 +309,7 @@ def post_delete_channel_connection(self, response: operations_pb2.Operation) -> it is returned to user code. """ return response + def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_trigger @@ -321,6 +326,7 @@ def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_ it is returned to user code. """ return response + def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_channel @@ -337,6 +343,7 @@ def post_get_channel(self, response: channel.Channel) -> channel.Channel: it is returned to user code. """ return response + def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_channel_connection @@ -353,6 +360,7 @@ def post_get_channel_connection(self, response: channel_connection.ChannelConnec it is returned to user code. """ return response + def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_google_channel_config @@ -369,6 +377,7 @@ def post_get_google_channel_config(self, response: google_channel_config.GoogleC it is returned to user code. """ return response + def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_provider @@ -385,6 +394,7 @@ def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: it is returned to user code. """ return response + def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_trigger @@ -401,6 +411,7 @@ def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: it is returned to user code. """ return response + def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_channel_connections @@ -417,6 +428,7 @@ def post_list_channel_connections(self, response: eventarc.ListChannelConnection it is returned to user code. """ return response + def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_channels @@ -433,6 +445,7 @@ def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventar it is returned to user code. """ return response + def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_providers @@ -449,6 +462,7 @@ def post_list_providers(self, response: eventarc.ListProvidersResponse) -> event it is returned to user code. """ return response + def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_triggers @@ -465,6 +479,7 @@ def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventar it is returned to user code. """ return response + def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_channel @@ -481,6 +496,7 @@ def post_update_channel(self, response: operations_pb2.Operation) -> operations_ it is returned to user code. """ return response + def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_google_channel_config @@ -497,6 +513,7 @@ def post_update_google_channel_config(self, response: gce_google_channel_config. it is returned to user code. """ return response + def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_trigger @@ -534,6 +551,7 @@ def post_get_location( it is returned to user code. """ return response + def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: @@ -554,6 +572,7 @@ def post_list_locations( it is returned to user code. """ return response + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: @@ -574,6 +593,7 @@ def post_get_iam_policy( it is returned to user code. """ return response + def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: @@ -594,6 +614,7 @@ def post_set_iam_policy( it is returned to user code. """ return response + def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: @@ -614,6 +635,7 @@ def post_test_iam_permissions( it is returned to user code. """ return response + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: @@ -634,6 +656,7 @@ def post_cancel_operation( it is returned to user code. """ return response + def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: @@ -654,6 +677,7 @@ def post_delete_operation( it is returned to user code. """ return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: @@ -674,6 +698,7 @@ def post_get_operation( it is returned to user code. """ return response + def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: @@ -2346,6 +2371,9 @@ def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseEventarcRestTransport._BaseGetLocation, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetLocation") + @staticmethod def _get_response( host, @@ -2405,8 +2433,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = locations_pb2.Location() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) return resp @@ -2415,6 +2444,9 @@ def list_locations(self): return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseEventarcRestTransport._BaseListLocations, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.ListLocations") + @staticmethod def _get_response( host, @@ -2474,8 +2506,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) return resp @@ -2484,6 +2517,9 @@ def get_iam_policy(self): return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore class _GetIamPolicy(_BaseEventarcRestTransport._BaseGetIamPolicy, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetIamPolicy") + @staticmethod def _get_response( host, @@ -2543,8 +2579,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) return resp @@ -2553,6 +2590,9 @@ def set_iam_policy(self): return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore class _SetIamPolicy(_BaseEventarcRestTransport._BaseSetIamPolicy, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.SetIamPolicy") + @staticmethod def _get_response( host, @@ -2615,8 +2655,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) return resp @@ -2625,6 +2666,9 @@ def test_iam_permissions(self): return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore class _TestIamPermissions(_BaseEventarcRestTransport._BaseTestIamPermissions, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.TestIamPermissions") + @staticmethod def _get_response( host, @@ -2687,8 +2731,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) return resp @@ -2697,6 +2742,9 @@ def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseEventarcRestTransport._BaseCancelOperation, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.CancelOperation") + @staticmethod def _get_response( host, @@ -2763,6 +2811,9 @@ def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseEventarcRestTransport._BaseDeleteOperation, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.DeleteOperation") + @staticmethod def _get_response( host, @@ -2826,6 +2877,9 @@ def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseEventarcRestTransport._BaseGetOperation, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.GetOperation") + @staticmethod def _get_response( host, @@ -2885,8 +2939,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) return resp @@ -2895,6 +2950,9 @@ def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseEventarcRestTransport._BaseListOperations, EventarcRestStub): + def __hash__(self): + return hash("EventarcRestTransport.ListOperations") + @staticmethod def _get_response( host, @@ -2954,8 +3012,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) return resp diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py index 660024e9a42c..c2bafca03b57 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py @@ -22,7 +22,6 @@ from google.iam.v1 import policy_pb2 # type: ignore from google.cloud.location import locations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -53,7 +52,7 @@ class _BaseEventarcRestTransport(EventarcTransport): def __init__(self, *, host: str = 'eventarc.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -63,7 +62,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'eventarc.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -805,6 +804,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -828,6 +829,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -851,6 +854,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -882,6 +887,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -920,6 +927,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -958,6 +967,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -986,6 +997,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -1009,6 +1022,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -1032,6 +1047,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index b930a775e478..78e888f6a316 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -47,6 +47,8 @@ "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] +extras = { +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-eventarc" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -89,6 +91,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 3770b34748d1..d9b3d3d26b5b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -23,7 +23,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -81,6 +81,11 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -7410,50 +7415,6 @@ async def test_update_google_channel_config_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - eventarc.GetTriggerRequest, - dict, -]) -def test_get_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = trigger.Trigger( - name='name_value', - uid='uid_value', - service_account='service_account_value', - channel='channel_value', - etag='etag_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = trigger.Trigger.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_trigger(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, trigger.Trigger) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.service_account == 'service_account_value' - assert response.channel == 'channel_value' - assert response.etag == 'etag_value' - def test_get_trigger_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7565,66 +7526,6 @@ def test_get_trigger_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = trigger.Trigger.to_json(trigger.Trigger()) - - request = eventarc.GetTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = trigger.Trigger() - - client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetTriggerRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_trigger(request) - - def test_get_trigger_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7678,51 +7579,6 @@ def test_get_trigger_rest_flattened_error(transport: str = 'rest'): ) -def test_get_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.ListTriggersRequest, - dict, -]) -def test_list_triggers_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListTriggersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListTriggersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_triggers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTriggersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - def test_list_triggers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7836,66 +7692,6 @@ def test_list_triggers_rest_unset_required_fields(): assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_triggers_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) - - request = eventarc.ListTriggersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListTriggersResponse() - - client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_triggers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListTriggersRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_triggers(request) - - def test_list_triggers_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8011,133 +7807,38 @@ def test_list_triggers_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateTriggerRequest, - dict, -]) -def test_create_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_create_trigger_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateTriggerRequest.meta.fields["trigger"] + # Ensure method has been cached + assert client._transport.create_trigger in client._transport._wrapped_methods - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + request = {} + client.create_trigger(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["trigger"][field])): - del request_init["trigger"][field][i][subfield] - else: - del request_init["trigger"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_trigger(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - -def test_create_trigger_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_trigger in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.create_trigger] = mock_rpc - - request = {} - client.create_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_trigger(request) + client.create_trigger(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -8243,67 +7944,6 @@ def test_create_trigger_rest_unset_required_fields(): assert set(unset_fields) == (set(("triggerId", "validateOnly", )) & set(("parent", "trigger", "triggerId", "validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.CreateTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateTriggerRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_trigger(request) - - def test_create_trigger_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8359,108 +7999,6 @@ def test_create_trigger_rest_flattened_error(transport: str = 'rest'): ) -def test_create_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateTriggerRequest, - dict, -]) -def test_update_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateTriggerRequest.meta.fields["trigger"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["trigger"][field])): - del request_init["trigger"][field][i][subfield] - else: - del request_init["trigger"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_trigger(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_update_trigger_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8583,77 +8121,16 @@ def test_update_trigger_rest_unset_required_fields(): assert set(unset_fields) == (set(("allowMissing", "updateMask", "validateOnly", )) & set(("validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +def test_update_trigger_rest_flattened(): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + transport="rest", + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.UpdateTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateTriggerRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_trigger(request) - - -def test_update_trigger_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') # get arguments that satisfy an http rule for this method sample_request = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} @@ -8699,44 +8176,6 @@ def test_update_trigger_rest_flattened_error(transport: str = 'rest'): ) -def test_update_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteTriggerRequest, - dict, -]) -def test_delete_trigger_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_trigger(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_delete_trigger_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8862,67 +8301,6 @@ def test_delete_trigger_rest_unset_required_fields(): assert set(unset_fields) == (set(("allowMissing", "etag", "validateOnly", )) & set(("name", "validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_trigger_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.DeleteTriggerRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_trigger_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteTriggerRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_trigger(request) - - def test_delete_trigger_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8976,60 +8354,6 @@ def test_delete_trigger_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_trigger_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelRequest, - dict, -]) -def test_get_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel.Channel( - name='name_value', - uid='uid_value', - provider='provider_value', - state=channel.Channel.State.PENDING, - activation_token='activation_token_value', - crypto_key_name='crypto_key_name_value', - pubsub_topic='pubsub_topic_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = channel.Channel.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_channel(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, channel.Channel) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.provider == 'provider_value' - assert response.state == channel.Channel.State.PENDING - assert response.activation_token == 'activation_token_value' - assert response.crypto_key_name == 'crypto_key_name_value' - def test_get_channel_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9141,66 +8465,6 @@ def test_get_channel_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_channel_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = channel.Channel.to_json(channel.Channel()) - - request = eventarc.GetChannelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = channel.Channel() - - client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_channel(request) - - def test_get_channel_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9254,66 +8518,21 @@ def test_get_channel_rest_flattened_error(transport: str = 'rest'): ) -def test_get_channel_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - +def test_list_channels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelsRequest, - dict, -]) -def test_list_channels_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListChannelsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_channels(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - -def test_list_channels_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_channels in client._transport._wrapped_methods + # Ensure method has been cached + assert client._transport.list_channels in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -9412,66 +8631,6 @@ def test_list_channels_rest_unset_required_fields(): assert set(unset_fields) == (set(("orderBy", "pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_channels_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) - - request = eventarc.ListChannelsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListChannelsResponse() - - client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_channels_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_channels(request) - - def test_list_channels_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9587,101 +8746,6 @@ def test_list_channels_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelRequest, - dict, -]) -def test_create_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateChannelRequest.meta.fields["channel"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["channel"][field])): - del request_init["channel"][field][i][subfield] - else: - del request_init["channel"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_channel(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_create_channel_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9819,67 +8883,6 @@ def test_create_channel_rest_unset_required_fields(): assert set(unset_fields) == (set(("channelId", "validateOnly", )) & set(("parent", "channel", "channelId", "validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_channel_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.CreateChannelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_channel(request) - - def test_create_channel_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9935,120 +8938,18 @@ def test_create_channel_rest_flattened_error(transport: str = 'rest'): ) -def test_create_channel_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_update_channel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateChannelRequest, - dict, -]) -def test_update_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} - request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateChannelRequest.meta.fields["channel"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["channel"][field])): - del request_init["channel"][field][i][subfield] - else: - del request_init["channel"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_channel(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - -def test_update_channel_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.update_channel in client._transport._wrapped_methods @@ -10159,67 +9060,6 @@ def test_update_channel_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask", "validateOnly", )) & set(("validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_channel_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.UpdateChannelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateChannelRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_channel(request) - - def test_update_channel_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10273,44 +9113,6 @@ def test_update_channel_rest_flattened_error(transport: str = 'rest'): ) -def test_update_channel_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelRequest, - dict, -]) -def test_delete_channel_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_channel(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_delete_channel_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10436,67 +9238,6 @@ def test_delete_channel_rest_unset_required_fields(): assert set(unset_fields) == (set(("validateOnly", )) & set(("name", "validateOnly", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_channel_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.DeleteChannelRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_channel_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_channel(request) - - def test_delete_channel_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10548,63 +9289,18 @@ def test_delete_channel_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_channel_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_get_provider_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - -@pytest.mark.parametrize("request_type", [ - eventarc.GetProviderRequest, - dict, -]) -def test_get_provider_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = discovery.Provider( - name='name_value', - display_name='display_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = discovery.Provider.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_provider(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, discovery.Provider) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - -def test_get_provider_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.get_provider in client._transport._wrapped_methods @@ -10704,66 +9400,6 @@ def test_get_provider_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_provider_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = discovery.Provider.to_json(discovery.Provider()) - - request = eventarc.GetProviderRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = discovery.Provider() - - client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_provider_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetProviderRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_provider(request) - - def test_get_provider_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10817,51 +9453,6 @@ def test_get_provider_rest_flattened_error(transport: str = 'rest'): ) -def test_get_provider_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.ListProvidersRequest, - dict, -]) -def test_list_providers_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListProvidersResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListProvidersResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_providers(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListProvidersPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - def test_list_providers_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10975,66 +9566,6 @@ def test_list_providers_rest_unset_required_fields(): assert set(unset_fields) == (set(("filter", "orderBy", "pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_providers_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) - - request = eventarc.ListProvidersRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListProvidersResponse() - - client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_providers_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListProvidersRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_providers(request) - - def test_list_providers_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11150,48 +9681,6 @@ def test_list_providers_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.GetChannelConnectionRequest, - dict, -]) -def test_get_channel_connection_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = channel_connection.ChannelConnection( - name='name_value', - uid='uid_value', - channel='channel_value', - activation_token='activation_token_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = channel_connection.ChannelConnection.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_channel_connection(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, channel_connection.ChannelConnection) - assert response.name == 'name_value' - assert response.uid == 'uid_value' - assert response.channel == 'channel_value' - assert response.activation_token == 'activation_token_value' - def test_get_channel_connection_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11303,71 +9792,11 @@ def test_get_channel_connection_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_channel_connection_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +def test_get_channel_connection_rest_flattened(): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) - - request = eventarc.GetChannelConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = channel_connection.ChannelConnection() - - client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetChannelConnectionRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_channel_connection(request) - - -def test_get_channel_connection_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + transport="rest", + ) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), 'request') as req: @@ -11416,51 +9845,6 @@ def test_get_channel_connection_rest_flattened_error(transport: str = 'rest'): ) -def test_get_channel_connection_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.ListChannelConnectionsRequest, - dict, -]) -def test_list_channel_connections_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = eventarc.ListChannelConnectionsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_channel_connections(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListChannelConnectionsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - def test_list_channel_connections_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11574,66 +9958,6 @@ def test_list_channel_connections_rest_unset_required_fields(): assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_channel_connections_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) - - request = eventarc.ListChannelConnectionsRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = eventarc.ListChannelConnectionsResponse() - - client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_channel_connections_rest_bad_request(transport: str = 'rest', request_type=eventarc.ListChannelConnectionsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_channel_connections(request) - - def test_list_channel_connections_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11749,101 +10073,6 @@ def test_list_channel_connections_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - eventarc.CreateChannelConnectionRequest, - dict, -]) -def test_create_channel_connection_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel_connection"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["channel_connection"][field])): - del request_init["channel_connection"][field][i][subfield] - else: - del request_init["channel_connection"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_channel_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_create_channel_connection_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11970,77 +10199,16 @@ def test_create_channel_connection_rest_unset_required_fields(): assert set(unset_fields) == (set(("channelConnectionId", )) & set(("parent", "channelConnection", "channelConnectionId", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_channel_connection_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( +def test_create_channel_connection_rest_flattened(): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } + transport="rest", + ) - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.CreateChannelConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.CreateChannelConnectionRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_channel_connection(request) - - -def test_create_channel_connection_rest_flattened(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') # get arguments that satisfy an http rule for this method sample_request = {'parent': 'projects/sample1/locations/sample2'} @@ -12086,44 +10254,6 @@ def test_create_channel_connection_rest_flattened_error(transport: str = 'rest') ) -def test_create_channel_connection_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.DeleteChannelConnectionRequest, - dict, -]) -def test_delete_channel_connection_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_channel_connection(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_delete_channel_connection_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12236,67 +10366,6 @@ def test_delete_channel_connection_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_channel_connection_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = eventarc.DeleteChannelConnectionRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_channel_connection_rest_bad_request(transport: str = 'rest', request_type=eventarc.DeleteChannelConnectionRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_channel_connection(request) - - def test_delete_channel_connection_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12348,51 +10417,6 @@ def test_delete_channel_connection_rest_flattened_error(transport: str = 'rest') ) -def test_delete_channel_connection_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - eventarc.GetGoogleChannelConfigRequest, - dict, -]) -def test_get_google_channel_config_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_google_channel_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' - def test_get_google_channel_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -12504,66 +10528,6 @@ def test_get_google_channel_config_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_google_channel_config_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) - - request = eventarc.GetGoogleChannelConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = google_channel_config.GoogleChannelConfig() - - client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.GetGoogleChannelConfigRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_google_channel_config(request) - - def test_get_google_channel_config_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12617,138 +10581,29 @@ def test_get_google_channel_config_rest_flattened_error(transport: str = 'rest') ) -def test_get_google_channel_config_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize("request_type", [ - eventarc.UpdateGoogleChannelConfigRequest, - dict, -]) -def test_update_google_channel_config_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + # Ensure method has been cached + assert client._transport.update_google_channel_config in client._transport._wrapped_methods - # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc - # Determine if the message type is proto-plus or protobuf - test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["google_channel_config"][field])): - del request_init["google_channel_config"][field][i][subfield] - else: - del request_init["google_channel_config"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = gce_google_channel_config.GoogleChannelConfig( - name='name_value', - crypto_key_name='crypto_key_name_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_google_channel_config(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) - assert response.name == 'name_value' - assert response.crypto_key_name == 'crypto_key_name_value' - -def test_update_google_channel_config_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_google_channel_config in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport.update_google_channel_config] = mock_rpc - - request = {} - client.update_google_channel_config(request) + request = {} + client.update_google_channel_config(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -12835,66 +10690,6 @@ def test_update_google_channel_config_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask", )) & set(("googleChannelConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_google_channel_config_rest_interceptors(null_interceptor): - transport = transports.EventarcRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), - ) - client = EventarcClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ - mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) - - request = eventarc.UpdateGoogleChannelConfigRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gce_google_channel_config.GoogleChannelConfig() - - client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_google_channel_config_rest_bad_request(transport: str = 'rest', request_type=eventarc.UpdateGoogleChannelConfigRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_google_channel_config(request) - - def test_update_google_channel_config_rest_flattened(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12950,13 +10745,6 @@ def test_update_google_channel_config_rest_flattened_error(transport: str = 'res ) -def test_update_google_channel_config_rest_error(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.EventarcGrpcTransport( @@ -13051,6 +10839,14 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_trigger_empty_call_grpc(): @@ -13454,6 +11250,14 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -13948,1263 +11752,3458 @@ def test_transport_kind_rest(): assert transport.kind == "rest" -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest" ) - assert isinstance( - client.transport, - transports.EventarcGrpcTransport, + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_trigger(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetTriggerRequest, + dict, +]) +def test_get_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" ) -def test_eventarc_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.EventarcTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = trigger.Trigger( + name='name_value', + uid='uid_value', + service_account='service_account_value', + channel='channel_value', + etag='etag_value', ) + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 -def test_eventarc_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.EventarcTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Convert return value to protobuf type + return_value = trigger.Trigger.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, trigger.Trigger) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.service_account == 'service_account_value' + assert response.channel == 'channel_value' + assert response.etag == 'etag_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) + client = EventarcClient(transport=transport) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'get_trigger', - 'list_triggers', - 'create_trigger', - 'update_trigger', - 'delete_trigger', - 'get_channel', - 'list_channels', - 'create_channel_', - 'update_channel', - 'delete_channel', - 'get_provider', - 'list_providers', - 'get_channel_connection', - 'list_channel_connections', - 'create_channel_connection', - 'delete_channel_connection', - 'get_google_channel_config', - 'update_google_channel_config', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - with pytest.raises(NotImplementedError): - transport.close() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = trigger.Trigger.to_json(trigger.Trigger()) + req.return_value.content = return_value - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client + request = eventarc.GetTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = trigger.Trigger() - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + pre.assert_called_once() + post.assert_called_once() -def test_eventarc_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.EventarcTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) +def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) -def test_eventarc_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.EventarcTransport() - adc.assert_called_once() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_triggers(request) -def test_eventarc_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - EventarcClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) +@pytest.mark.parametrize("request_type", [ + eventarc.ListTriggersRequest, + dict, +]) +def test_list_triggers_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) -@pytest.mark.parametrize( - "transport_class", - [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - ], -) -def test_eventarc_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListTriggersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], ) + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 -@pytest.mark.parametrize( - "transport_class", - [ - transports.EventarcGrpcTransport, - transports.EventarcGrpcAsyncIOTransport, - transports.EventarcRestTransport, - ], -) -def test_eventarc_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + # Convert return value to protobuf type + return_value = eventarc.ListTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_triggers(request) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTriggersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.EventarcGrpcTransport, grpc_helpers), - (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_eventarc_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - create_channel.assert_called_with( - "eventarc.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="eventarc.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_triggers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), ) + client = EventarcClient(transport=transport) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) + req.return_value.content = return_value - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) + request = eventarc.ListTriggersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListTriggersResponse() - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) + client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) -def test_eventarc_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.EventarcRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + pre.assert_called_once() + post.assert_called_once() -def test_eventarc_rest_lro_client(): +def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequest): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, + transport="rest" ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_trigger(request) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", -]) -def test_eventarc_host_no_port(transport_name): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'eventarc.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://eventarc.googleapis.com' - ) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", +@pytest.mark.parametrize("request_type", [ + eventarc.CreateTriggerRequest, + dict, ]) -def test_eventarc_host_with_port(transport_name): +def test_create_trigger_rest_call_success(request_type): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'eventarc.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://eventarc.googleapis.com:8000' + transport="rest" ) -@pytest.mark.parametrize("transport_name", [ - "rest", -]) -def test_eventarc_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = EventarcClient( - credentials=creds1, - transport=transport_name, - ) - client2 = EventarcClient( - credentials=creds2, - transport=transport_name, - ) - session1 = client1.transport.get_trigger._session - session2 = client2.transport.get_trigger._session - assert session1 != session2 - session1 = client1.transport.list_triggers._session - session2 = client2.transport.list_triggers._session - assert session1 != session2 - session1 = client1.transport.create_trigger._session - session2 = client2.transport.create_trigger._session - assert session1 != session2 - session1 = client1.transport.update_trigger._session - session2 = client2.transport.update_trigger._session - assert session1 != session2 - session1 = client1.transport.delete_trigger._session - session2 = client2.transport.delete_trigger._session - assert session1 != session2 - session1 = client1.transport.get_channel._session - session2 = client2.transport.get_channel._session - assert session1 != session2 - session1 = client1.transport.list_channels._session - session2 = client2.transport.list_channels._session - assert session1 != session2 - session1 = client1.transport.create_channel_._session - session2 = client2.transport.create_channel_._session - assert session1 != session2 - session1 = client1.transport.update_channel._session - session2 = client2.transport.update_channel._session - assert session1 != session2 - session1 = client1.transport.delete_channel._session - session2 = client2.transport.delete_channel._session - assert session1 != session2 - session1 = client1.transport.get_provider._session - session2 = client2.transport.get_provider._session - assert session1 != session2 - session1 = client1.transport.list_providers._session - session2 = client2.transport.list_providers._session - assert session1 != session2 - session1 = client1.transport.get_channel_connection._session - session2 = client2.transport.get_channel_connection._session - assert session1 != session2 - session1 = client1.transport.list_channel_connections._session - session2 = client2.transport.list_channel_connections._session - assert session1 != session2 - session1 = client1.transport.create_channel_connection._session - session2 = client2.transport.create_channel_connection._session - assert session1 != session2 - session1 = client1.transport.delete_channel_connection._session - session2 = client2.transport.delete_channel_connection._session - assert session1 != session2 - session1 = client1.transport.get_google_channel_config._session - session2 = client2.transport.get_google_channel_config._session - assert session1 != session2 - session1 = client1.transport.update_google_channel_config._session - session2 = client2.transport.update_google_channel_config._session - assert session1 != session2 -def test_eventarc_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["trigger"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - # Check that channel is used if provided. - transport = transports.EventarcGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateTriggerRequest.meta.fields["trigger"] + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] -def test_eventarc_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Check that channel is used if provided. - transport = transports.EventarcGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + subfields_not_in_runtime = [] - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["trigger"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} ) - adc.assert_called_once() - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["trigger"][field])): + del request_init["trigger"][field][i][subfield] + else: + del request_init["trigger"][field][subfield] + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) -def test_eventarc_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_trigger(request) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) -def test_eventarc_grpc_lro_client(): - client = EventarcClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - transport = client.transport + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, - ) + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + request = eventarc.CreateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() -def test_eventarc_grpc_lro_async_client(): - client = EventarcAsyncClient( + client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequest): + client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', + transport="rest" ) - transport = client.transport + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request = request_type(**request_init) - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_trigger(request) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateTriggerRequest, + dict, +]) +def test_update_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) -def test_channel_path(): - project = "squid" - location = "clam" - channel = "whelk" - expected = "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) - actual = EventarcClient.channel_path(project, location, channel) - assert expected == actual + # send a request that will satisfy transcoding + request_init = {'trigger': {'name': 'projects/sample1/locations/sample2/triggers/sample3'}} + request_init["trigger"] = {'name': 'projects/sample1/locations/sample2/triggers/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'event_filters': [{'attribute': 'attribute_value', 'value': 'value_value', 'operator': 'operator_value'}], 'service_account': 'service_account_value', 'destination': {'cloud_run': {'service': 'service_value', 'path': 'path_value', 'region': 'region_value'}, 'cloud_function': 'cloud_function_value', 'gke': {'cluster': 'cluster_value', 'location': 'location_value', 'namespace': 'namespace_value', 'service': 'service_value', 'path': 'path_value'}, 'workflow': 'workflow_value'}, 'transport': {'pubsub': {'topic': 'topic_value', 'subscription': 'subscription_value'}}, 'labels': {}, 'channel': 'channel_value', 'conditions': {}, 'etag': 'etag_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateTriggerRequest.meta.fields["trigger"] -def test_parse_channel_path(): - expected = { - "project": "octopus", - "location": "oyster", - "channel": "nudibranch", - } - path = EventarcClient.channel_path(**expected) + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] - # Check that the path construction is reversible. - actual = EventarcClient.parse_channel_path(path) - assert expected == actual + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") -def test_channel_connection_path(): - project = "cuttlefish" - location = "mussel" - channel_connection = "winkle" - expected = "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) - actual = EventarcClient.channel_connection_path(project, location, channel_connection) - assert expected == actual + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -def test_parse_channel_connection_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "channel_connection": "abalone", - } - path = EventarcClient.channel_connection_path(**expected) + subfields_not_in_runtime = [] - # Check that the path construction is reversible. - actual = EventarcClient.parse_channel_connection_path(path) - assert expected == actual + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["trigger"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -def test_cloud_function_path(): - project = "squid" - location = "clam" - function = "whelk" - expected = "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) - actual = EventarcClient.cloud_function_path(project, location, function) - assert expected == actual + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["trigger"][field])): + del request_init["trigger"][field][i][subfield] + else: + del request_init["trigger"][field][subfield] + request = request_type(**request_init) -def test_parse_cloud_function_path(): - expected = { - "project": "octopus", - "location": "oyster", - "function": "nudibranch", - } - path = EventarcClient.cloud_function_path(**expected) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') - # Check that the path construction is reversible. - actual = EventarcClient.parse_cloud_function_path(path) - assert expected == actual + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_trigger(request) -def test_crypto_key_path(): - project = "cuttlefish" - location = "mussel" - key_ring = "winkle" - crypto_key = "nautilus" - expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) - actual = EventarcClient.crypto_key_path(project, location, key_ring, crypto_key) - assert expected == actual + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) -def test_parse_crypto_key_path(): - expected = { - "project": "scallop", - "location": "abalone", - "key_ring": "squid", - "crypto_key": "clam", - } - path = EventarcClient.crypto_key_path(**expected) +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) - # Check that the path construction is reversible. - actual = EventarcClient.parse_crypto_key_path(path) - assert expected == actual - -def test_google_channel_config_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) - actual = EventarcClient.google_channel_config_path(project, location) - assert expected == actual + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value -def test_parse_google_channel_config_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = EventarcClient.google_channel_config_path(**expected) + request = eventarc.UpdateTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # Check that the path construction is reversible. - actual = EventarcClient.parse_google_channel_config_path(path) - assert expected == actual + client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) -def test_provider_path(): - project = "cuttlefish" - location = "mussel" - provider = "winkle" - expected = "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) - actual = EventarcClient.provider_path(project, location, provider) - assert expected == actual + pre.assert_called_once() + post.assert_called_once() -def test_parse_provider_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "provider": "abalone", - } - path = EventarcClient.provider_path(**expected) +def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) - # Check that the path construction is reversible. - actual = EventarcClient.parse_provider_path(path) - assert expected == actual + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_trigger(request) -def test_service_path(): - expected = "*".format() - actual = EventarcClient.service_path() - assert expected == actual +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteTriggerRequest, + dict, +]) +def test_delete_trigger_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) -def test_parse_service_path(): - expected = { - } - path = EventarcClient.service_path(**expected) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) - # Check that the path construction is reversible. - actual = EventarcClient.parse_service_path(path) - assert expected == actual + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') -def test_service_account_path(): - project = "squid" - service_account = "clam" - expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) - actual = EventarcClient.service_account_path(project, service_account) - assert expected == actual + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_trigger(request) + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) -def test_parse_service_account_path(): - expected = { - "project": "whelk", - "service_account": "octopus", - } - path = EventarcClient.service_account_path(**expected) - # Check that the path construction is reversible. - actual = EventarcClient.parse_service_account_path(path) - assert expected == actual +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_trigger_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) -def test_trigger_path(): - project = "oyster" - location = "nudibranch" - trigger = "cuttlefish" - expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) - actual = EventarcClient.trigger_path(project, location, trigger) - assert expected == actual + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value -def test_parse_trigger_path(): - expected = { - "project": "mussel", - "location": "winkle", - "trigger": "nautilus", - } - path = EventarcClient.trigger_path(**expected) + request = eventarc.DeleteTriggerRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() - # Check that the path construction is reversible. - actual = EventarcClient.parse_trigger_path(path) - assert expected == actual + client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) -def test_workflow_path(): - project = "scallop" - location = "abalone" - workflow = "squid" - expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) - actual = EventarcClient.workflow_path(project, location, workflow) - assert expected == actual + pre.assert_called_once() + post.assert_called_once() -def test_parse_workflow_path(): - expected = { - "project": "clam", - "location": "whelk", - "workflow": "octopus", - } - path = EventarcClient.workflow_path(**expected) +def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) - # Check that the path construction is reversible. - actual = EventarcClient.parse_workflow_path(path) - assert expected == actual + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_channel(request) -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = EventarcClient.common_billing_account_path(billing_account) - assert expected == actual +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelRequest, + dict, +]) +def test_get_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = EventarcClient.common_billing_account_path(**expected) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_billing_account_path(path) - assert expected == actual + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel.Channel( + name='name_value', + uid='uid_value', + provider='provider_value', + state=channel.Channel.State.PENDING, + activation_token='activation_token_value', + crypto_key_name='crypto_key_name_value', + pubsub_topic='pubsub_topic_value', + ) -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = EventarcClient.common_folder_path(folder) - assert expected == actual + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = channel.Channel.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_channel(request) -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = EventarcClient.common_folder_path(**expected) + # Establish that the response is the type that we expect. + assert isinstance(response, channel.Channel) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.provider == 'provider_value' + assert response.state == channel.Channel.State.PENDING + assert response.activation_token == 'activation_token_value' + assert response.crypto_key_name == 'crypto_key_name_value' - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_folder_path(path) - assert expected == actual -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = EventarcClient.common_organization_path(organization) - assert expected == actual +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = channel.Channel.to_json(channel.Channel()) + req.return_value.content = return_value + + request = eventarc.GetChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel.Channel() + + client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_channels(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelsRequest, + dict, +]) +def test_list_channels_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListChannelsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_channels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channels_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) + req.return_value.content = return_value + + request = eventarc.ListChannelsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelsResponse() + + client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelRequest, + dict, +]) +def test_create_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel"] = {'name': 'name_value', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateChannelRequest.meta.fields["channel"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel"][field])): + del request_init["channel"][field][i][subfield] + else: + del request_init["channel"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateChannelRequest, + dict, +]) +def test_update_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'channel': {'name': 'projects/sample1/locations/sample2/channels/sample3'}} + request_init["channel"] = {'name': 'projects/sample1/locations/sample2/channels/sample3', 'uid': 'uid_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'provider': 'provider_value', 'pubsub_topic': 'pubsub_topic_value', 'state': 1, 'activation_token': 'activation_token_value', 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateChannelRequest.meta.fields["channel"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel"][field])): + del request_init["channel"][field][i][subfield] + else: + del request_init["channel"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.UpdateChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_channel(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelRequest, + dict, +]) +def test_delete_channel_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channels/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_channel(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.DeleteChannelRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_provider(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetProviderRequest, + dict, +]) +def test_get_provider_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/providers/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = discovery.Provider( + name='name_value', + display_name='display_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = discovery.Provider.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_provider(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, discovery.Provider) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_provider_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = discovery.Provider.to_json(discovery.Provider()) + req.return_value.content = return_value + + request = eventarc.GetProviderRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = discovery.Provider() + + client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_providers(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListProvidersRequest, + dict, +]) +def test_list_providers_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListProvidersResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListProvidersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_providers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListProvidersPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_providers_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) + req.return_value.content = return_value + + request = eventarc.ListProvidersRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListProvidersResponse() + + client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetChannelConnectionRequest, + dict, +]) +def test_get_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = channel_connection.ChannelConnection( + name='name_value', + uid='uid_value', + channel='channel_value', + activation_token='activation_token_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = channel_connection.ChannelConnection.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_channel_connection(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, channel_connection.ChannelConnection) + assert response.name == 'name_value' + assert response.uid == 'uid_value' + assert response.channel == 'channel_value' + assert response.activation_token == 'activation_token_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) + req.return_value.content = return_value + + request = eventarc.GetChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = channel_connection.ChannelConnection() + + client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListChannelConnectionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_channel_connections(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.ListChannelConnectionsRequest, + dict, +]) +def test_list_channel_connections_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = eventarc.ListChannelConnectionsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = eventarc.ListChannelConnectionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_channel_connections(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListChannelConnectionsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_channel_connections_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) + req.return_value.content = return_value + + request = eventarc.ListChannelConnectionsRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = eventarc.ListChannelConnectionsResponse() + + client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_channel_connection_rest_bad_request(request_type=eventarc.CreateChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.CreateChannelConnectionRequest, + dict, +]) +def test_create_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["channel_connection"] = {'name': 'name_value', 'uid': 'uid_value', 'channel': 'channel_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'update_time': {}, 'activation_token': 'activation_token_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.CreateChannelConnectionRequest.meta.fields["channel_connection"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["channel_connection"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["channel_connection"][field])): + del request_init["channel_connection"][field][i][subfield] + else: + del request_init["channel_connection"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_channel_connection(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.CreateChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_channel_connection_rest_bad_request(request_type=eventarc.DeleteChannelConnectionRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_channel_connection(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.DeleteChannelConnectionRequest, + dict, +]) +def test_delete_channel_connection_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/channelConnections/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_channel_connection(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_channel_connection_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = eventarc.DeleteChannelConnectionRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoogleChannelConfigRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_google_channel_config(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.GetGoogleChannelConfigRequest, + dict, +]) +def test_get_google_channel_config_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/googleChannelConfig'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_google_channel_config_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) + req.return_value.content = return_value + + request = eventarc.GetGoogleChannelConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = google_channel_config.GoogleChannelConfig() + + client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_google_channel_config_rest_bad_request(request_type=eventarc.UpdateGoogleChannelConfigRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_google_channel_config(request) + + +@pytest.mark.parametrize("request_type", [ + eventarc.UpdateGoogleChannelConfigRequest, + dict, +]) +def test_update_google_channel_config_rest_call_success(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'google_channel_config': {'name': 'projects/sample1/locations/sample2/googleChannelConfig'}} + request_init["google_channel_config"] = {'name': 'projects/sample1/locations/sample2/googleChannelConfig', 'update_time': {'seconds': 751, 'nanos': 543}, 'crypto_key_name': 'crypto_key_name_value'} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = eventarc.UpdateGoogleChannelConfigRequest.meta.fields["google_channel_config"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["google_channel_config"][field])): + del request_init["google_channel_config"][field][i][subfield] + else: + del request_init["google_channel_config"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = gce_google_channel_config.GoogleChannelConfig( + name='name_value', + crypto_key_name='crypto_key_name_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gce_google_channel_config.GoogleChannelConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_google_channel_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gce_google_channel_config.GoogleChannelConfig) + assert response.name == 'name_value' + assert response.crypto_key_name == 'crypto_key_name_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_google_channel_config_rest_interceptors(null_interceptor): + transport = transports.EventarcRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.EventarcRestInterceptor(), + ) + client = EventarcClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) + req.return_value.content = return_value + + request = eventarc.UpdateGoogleChannelConfigRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gce_google_channel_config.GoogleChannelConfig() + + client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.GetIamPolicyRequest, + dict, +]) +def test_get_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolicyRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.SetIamPolicyRequest, + dict, +]) +def test_set_iam_policy_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestIamPermissionsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize("request_type", [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, +]) +def test_test_iam_permissions_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +def test_eventarc_rest_lro_client(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.EventarcGrpcTransport, + ) + +def test_eventarc_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.EventarcTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_eventarc_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.EventarcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'get_trigger', + 'list_triggers', + 'create_trigger', + 'update_trigger', + 'delete_trigger', + 'get_channel', + 'list_channels', + 'create_channel_', + 'update_channel', + 'delete_channel', + 'get_provider', + 'list_providers', + 'get_channel_connection', + 'list_channel_connections', + 'create_channel_connection', + 'delete_channel_connection', + 'get_google_channel_config', + 'update_google_channel_config', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_eventarc_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EventarcTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_eventarc_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.eventarc_v1.services.eventarc.transports.EventarcTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.EventarcTransport() + adc.assert_called_once() + + +def test_eventarc_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + EventarcClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = EventarcClient.common_organization_path(**expected) +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + ], +) +def test_eventarc_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_organization_path(path) - assert expected == actual -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = EventarcClient.common_project_path(project) - assert expected == actual +@pytest.mark.parametrize( + "transport_class", + [ + transports.EventarcGrpcTransport, + transports.EventarcGrpcAsyncIOTransport, + transports.EventarcRestTransport, + ], +) +def test_eventarc_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.EventarcGrpcTransport, grpc_helpers), + (transports.EventarcGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_eventarc_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "eventarc.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="eventarc.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_eventarc_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.EventarcRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_eventarc_host_no_port(transport_name): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'eventarc.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://eventarc.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_eventarc_host_with_port(transport_name): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='eventarc.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'eventarc.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://eventarc.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_eventarc_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = EventarcClient( + credentials=creds1, + transport=transport_name, + ) + client2 = EventarcClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_trigger._session + session2 = client2.transport.get_trigger._session + assert session1 != session2 + session1 = client1.transport.list_triggers._session + session2 = client2.transport.list_triggers._session + assert session1 != session2 + session1 = client1.transport.create_trigger._session + session2 = client2.transport.create_trigger._session + assert session1 != session2 + session1 = client1.transport.update_trigger._session + session2 = client2.transport.update_trigger._session + assert session1 != session2 + session1 = client1.transport.delete_trigger._session + session2 = client2.transport.delete_trigger._session + assert session1 != session2 + session1 = client1.transport.get_channel._session + session2 = client2.transport.get_channel._session + assert session1 != session2 + session1 = client1.transport.list_channels._session + session2 = client2.transport.list_channels._session + assert session1 != session2 + session1 = client1.transport.create_channel_._session + session2 = client2.transport.create_channel_._session + assert session1 != session2 + session1 = client1.transport.update_channel._session + session2 = client2.transport.update_channel._session + assert session1 != session2 + session1 = client1.transport.delete_channel._session + session2 = client2.transport.delete_channel._session + assert session1 != session2 + session1 = client1.transport.get_provider._session + session2 = client2.transport.get_provider._session + assert session1 != session2 + session1 = client1.transport.list_providers._session + session2 = client2.transport.list_providers._session + assert session1 != session2 + session1 = client1.transport.get_channel_connection._session + session2 = client2.transport.get_channel_connection._session + assert session1 != session2 + session1 = client1.transport.list_channel_connections._session + session2 = client2.transport.list_channel_connections._session + assert session1 != session2 + session1 = client1.transport.create_channel_connection._session + session2 = client2.transport.create_channel_connection._session + assert session1 != session2 + session1 = client1.transport.delete_channel_connection._session + session2 = client2.transport.delete_channel_connection._session + assert session1 != session2 + session1 = client1.transport.get_google_channel_config._session + session2 = client2.transport.get_google_channel_config._session + assert session1 != session2 + session1 = client1.transport.update_google_channel_config._session + session2 = client2.transport.update_google_channel_config._session + assert session1 != session2 +def test_eventarc_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.EventarcGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = EventarcClient.common_project_path(**expected) +def test_eventarc_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_project_path(path) - assert expected == actual + # Check that channel is used if provided. + transport = transports.EventarcGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = EventarcClient.common_location_path(project, location) - assert expected == actual +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = EventarcClient.common_location_path(**expected) + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel - # Check that the path construction is reversible. - actual = EventarcClient.parse_common_location_path(path) - assert expected == actual + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) +def test_eventarc_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() - with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: - transport_class = EventarcClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = EventarcAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel -def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): +def test_eventarc_grpc_lro_client(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='grpc', ) + transport = client.transport - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = EventarcClient( + +def test_eventarc_grpc_lro_async_client(): + client = EventarcAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc_asyncio', ) - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + transport = client.transport - response = client.get_location(request) + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client -def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) +def test_channel_path(): + project = "squid" + location = "clam" + channel = "whelk" + expected = "projects/{project}/locations/{location}/channels/{channel}".format(project=project, location=location, channel=channel, ) + actual = EventarcClient.channel_path(project, location, channel) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() +def test_parse_channel_path(): + expected = { + "project": "octopus", + "location": "oyster", + "channel": "nudibranch", + } + path = EventarcClient.channel_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = EventarcClient.parse_channel_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_channel_connection_path(): + project = "cuttlefish" + location = "mussel" + channel_connection = "winkle" + expected = "projects/{project}/locations/{location}/channelConnections/{channel_connection}".format(project=project, location=location, channel_connection=channel_connection, ) + actual = EventarcClient.channel_connection_path(project, location, channel_connection) + assert expected == actual - response = client.list_locations(request) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) +def test_parse_channel_connection_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "channel_connection": "abalone", + } + path = EventarcClient.channel_connection_path(**expected) -def test_get_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.GetIamPolicyRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_channel_connection_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) +def test_cloud_function_path(): + project = "squid" + location = "clam" + function = "whelk" + expected = "projects/{project}/locations/{location}/functions/{function}".format(project=project, location=location, function=function, ) + actual = EventarcClient.cloud_function_path(project, location, function) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.GetIamPolicyRequest, - dict, -]) -def test_get_iam_policy_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() +def test_parse_cloud_function_path(): + expected = { + "project": "octopus", + "location": "oyster", + "function": "nudibranch", + } + path = EventarcClient.cloud_function_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = EventarcClient.parse_cloud_function_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_crypto_key_path(): + project = "cuttlefish" + location = "mussel" + key_ring = "winkle" + crypto_key = "nautilus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format(project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, ) + actual = EventarcClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual - response = client.get_iam_policy(request) - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) +def test_parse_crypto_key_path(): + expected = { + "project": "scallop", + "location": "abalone", + "key_ring": "squid", + "crypto_key": "clam", + } + path = EventarcClient.crypto_key_path(**expected) -def test_set_iam_policy_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.SetIamPolicyRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_crypto_key_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) +def test_google_channel_config_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}/googleChannelConfig".format(project=project, location=location, ) + actual = EventarcClient.google_channel_config_path(project, location) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.SetIamPolicyRequest, - dict, -]) -def test_set_iam_policy_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() +def test_parse_google_channel_config_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = EventarcClient.google_channel_config_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = EventarcClient.parse_google_channel_config_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_provider_path(): + project = "cuttlefish" + location = "mussel" + provider = "winkle" + expected = "projects/{project}/locations/{location}/providers/{provider}".format(project=project, location=location, provider=provider, ) + actual = EventarcClient.provider_path(project, location, provider) + assert expected == actual - response = client.set_iam_policy(request) - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) +def test_parse_provider_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "provider": "abalone", + } + path = EventarcClient.provider_path(**expected) -def test_test_iam_permissions_rest_bad_request(transport: str = 'rest', request_type=iam_policy_pb2.TestIamPermissionsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_provider_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'resource': 'projects/sample1/locations/sample2/triggers/sample3'}, request) +def test_service_path(): + expected = "*".format() + actual = EventarcClient.service_path() + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) -@pytest.mark.parametrize("request_type", [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, -]) -def test_test_iam_permissions_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'resource': 'projects/sample1/locations/sample2/triggers/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() +def test_parse_service_path(): + expected = { + } + path = EventarcClient.service_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = EventarcClient.parse_service_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_service_account_path(): + project = "squid" + service_account = "clam" + expected = "projects/{project}/serviceAccounts/{service_account}".format(project=project, service_account=service_account, ) + actual = EventarcClient.service_account_path(project, service_account) + assert expected == actual - response = client.test_iam_permissions(request) - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) +def test_parse_service_account_path(): + expected = { + "project": "whelk", + "service_account": "octopus", + } + path = EventarcClient.service_account_path(**expected) -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_service_account_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) +def test_trigger_path(): + project = "oyster" + location = "nudibranch" + trigger = "cuttlefish" + expected = "projects/{project}/locations/{location}/triggers/{trigger}".format(project=project, location=location, trigger=trigger, ) + actual = EventarcClient.trigger_path(project, location, trigger) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None +def test_parse_trigger_path(): + expected = { + "project": "mussel", + "location": "winkle", + "trigger": "nautilus", + } + path = EventarcClient.trigger_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' + # Check that the path construction is reversible. + actual = EventarcClient.parse_trigger_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_workflow_path(): + project = "scallop" + location = "abalone" + workflow = "squid" + expected = "projects/{project}/locations/{location}/workflows/{workflow}".format(project=project, location=location, workflow=workflow, ) + actual = EventarcClient.workflow_path(project, location, workflow) + assert expected == actual - response = client.cancel_operation(request) - # Establish that the response is the type that we expect. - assert response is None +def test_parse_workflow_path(): + expected = { + "project": "clam", + "location": "whelk", + "workflow": "octopus", + } + path = EventarcClient.workflow_path(**expected) -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_workflow_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = EventarcClient.common_billing_account_path(billing_account) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = EventarcClient.common_billing_account_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_billing_account_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = EventarcClient.common_folder_path(folder) + assert expected == actual - response = client.delete_operation(request) - # Establish that the response is the type that we expect. - assert response is None +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = EventarcClient.common_folder_path(**expected) -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_folder_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = EventarcClient.common_organization_path(organization) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = EventarcClient.common_organization_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_organization_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = EventarcClient.common_project_path(project) + assert expected == actual - response = client.get_operation(request) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = EventarcClient.common_project_path(**expected) -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_project_path(path) + assert expected == actual - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = EventarcClient.common_location_path(project, location) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = EventarcClient.common_location_path(**expected) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + # Check that the path construction is reversible. + actual = EventarcClient.parse_common_location_path(path) + assert expected == actual - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_operations(request) +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) + with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.EventarcTransport, '_prep_wrapped_messages') as prep: + transport_class = EventarcClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) def test_delete_operation(transport: str = "grpc"): @@ -16447,21 +16446,40 @@ async def test_test_iam_permissions_from_dict_async(): ) call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - for transport, close_name in transports.items(): - client = EventarcClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() +def test_transport_close_grpc(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = EventarcAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + def test_client_ctx(): transports = [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index a02aedca9f37..dbe7f7077425 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -3822,11 +3822,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -3876,11 +3872,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -3933,11 +3925,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 9ce55ae44f6a..67b84604b9d5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -4213,11 +4213,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -4267,11 +4263,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -4324,11 +4316,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 26cbb2b350ab..6dbea3b48393 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -362,6 +362,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 4b5df55245d6..9479b4476c08 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1430,6 +1430,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 4e96eec30d0a..d8c1d2284de7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1003,11 +1003,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1057,11 +1053,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1114,11 +1106,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 083eb997b046..7cfbc3b0babd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1353,11 +1353,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1407,11 +1403,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1464,11 +1456,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 3ec63b61486a..ff6d8c8dd20a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -221,6 +221,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index fa0d3072d4e5..83a400473ff4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -513,6 +513,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=3600.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 01d94e8ff268..70a383b96779 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -855,11 +855,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -909,11 +905,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -966,11 +958,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 41a23585681b..2680b5970767 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1205,11 +1205,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1259,11 +1255,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1316,11 +1308,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 941b7050f7fa..3fdfd91333d0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -194,6 +194,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 91fb8fab4fa1..2707c88cf5e8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -441,6 +441,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 8bfa1824327f..2d9c19f37c00 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -46,6 +46,8 @@ "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] +extras = { +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-logging" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -88,6 +90,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index bb77f848fbc6..80b4f75e6ebc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -61,6 +61,11 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -10907,6 +10912,14 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_buckets_empty_call_grpc(): @@ -11618,6 +11631,14 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -13186,17 +13207,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = ConfigServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - def test_cancel_operation(transport: str = "grpc"): client = ConfigServiceV2Client( @@ -13585,20 +13595,28 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = ConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = ConfigServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index dc5a94a1efa5..bb2190376003 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -62,6 +62,11 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -2895,6 +2900,14 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_log_empty_call_grpc(): @@ -3012,6 +3025,14 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -3634,17 +3655,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = LoggingServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - def test_cancel_operation(transport: str = "grpc"): client = LoggingServiceV2Client( @@ -4033,20 +4043,28 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = LoggingServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 64dab1eecb85..a80079b399bb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -60,6 +60,11 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -2682,6 +2687,14 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_log_metrics_empty_call_grpc(): @@ -2799,6 +2812,14 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -3438,17 +3459,6 @@ def test_client_with_default_client_info(): ) prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = MetricsServiceV2AsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - def test_cancel_operation(transport: str = "grpc"): client = MetricsServiceV2Client( @@ -3837,20 +3847,28 @@ async def test_list_operations_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = MetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = MetricsServiceV2Client( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 6bf97550411b..ca71e086e398 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1744,11 +1744,7 @@ async def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -1798,11 +1794,7 @@ async def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1856,11 +1848,7 @@ async def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1909,11 +1897,7 @@ async def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -1959,11 +1943,7 @@ async def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -2013,11 +1993,7 @@ async def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index d06e6a56eb8e..cb40c31c34ec 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -49,7 +49,12 @@ from .transports.grpc import CloudRedisGrpcTransport from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .transports.rest import CloudRedisRestTransport -from .transports.rest_asyncio import AsyncCloudRedisRestTransport +try: + from .transports.rest_asyncio import AsyncCloudRedisRestTransport + HAS_ASYNC_REST_DEPENDENCIES = True +except ImportError as e: # pragma: NO COVER + HAS_ASYNC_REST_DEPENDENCIES = False + ASYNC_REST_EXCEPTION = e class CloudRedisClientMeta(type): @@ -63,7 +68,8 @@ class CloudRedisClientMeta(type): _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport _transport_registry["rest"] = CloudRedisRestTransport - _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport + if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport def get_transport_class(cls, label: Optional[str] = None, @@ -78,6 +84,8 @@ def get_transport_class(cls, The transport class to use. """ # If a specific transport is requested, return that one. + if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + raise ASYNC_REST_EXCEPTION if label: return cls._transport_registry[label] @@ -578,16 +586,38 @@ def __init__(self, *, self._use_mtls_endpoint)) if not transport_provided: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( CloudRedisClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., CloudRedisTransport], transport) ) + + if "rest_asyncio" in str(transport_init): + unsupported_params = { + "google.api_core.client_options.ClientOptions.credentials_file": self._client_options.credentials_file, + "google.api_core.client_options.ClientOptions.scopes": self._client_options.scopes, + "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, + "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, + "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, + + } + provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] + if provided_unsupported_params: + raise core_exceptions.AsyncRestUnsupportedParameterError( # type: ignore + f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" + ) + self._transport = transport_init( + credentials=credentials, + host=self._api_endpoint, + client_info=client_info, + ) + return + + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + # initialize with the provided callable or the passed in class self._transport = transport_init( credentials=credentials, @@ -2092,11 +2122,7 @@ def list_operations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_operations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_operations] # Certain fields should be provided within the metadata header; # add these here. @@ -2146,11 +2172,7 @@ def get_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2204,11 +2226,7 @@ def delete_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2257,11 +2275,7 @@ def cancel_operation( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] # Certain fields should be provided within the metadata header; # add these here. @@ -2307,11 +2321,7 @@ def get_location( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_location, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_location] # Certain fields should be provided within the metadata header; # add these here. @@ -2361,11 +2371,7 @@ def list_locations( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_locations, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.list_locations] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 889648d305d4..563cd5dd7682 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -14,13 +14,22 @@ # limitations under the License. # from collections import OrderedDict -from typing import Dict, Type +from typing import Dict, Type, Tuple from .base import CloudRedisTransport from .grpc import CloudRedisGrpcTransport from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .rest import CloudRedisRestTransport from .rest import CloudRedisRestInterceptor +ASYNC_REST_CLASSES: Tuple[str, ...] +try: + from .rest_asyncio import AsyncCloudRedisRestTransport + from .rest_asyncio import AsyncCloudRedisRestInterceptor + ASYNC_REST_CLASSES = ('AsyncCloudRedisRestTransport', 'AsyncCloudRedisRestInterceptor') + HAS_REST_ASYNC = True +except ImportError: # pragma: NO COVER + ASYNC_REST_CLASSES = () + HAS_REST_ASYNC = False # Compile a registry of transports. @@ -28,6 +37,8 @@ _transport_registry['grpc'] = CloudRedisGrpcTransport _transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport _transport_registry['rest'] = CloudRedisRestTransport +if HAS_REST_ASYNC: # pragma: NO COVER + _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport __all__ = ( 'CloudRedisTransport', @@ -35,4 +46,4 @@ 'CloudRedisGrpcAsyncIOTransport', 'CloudRedisRestTransport', 'CloudRedisRestInterceptor', -) +) + ASYNC_REST_CLASSES diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index b8a9fa84b5c4..feb12006b170 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -177,6 +177,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 364bd7e9503e..385d9f24490a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -669,6 +669,36 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 5d99f7e2c3f7..182ce777148e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -177,6 +177,7 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations it is returned to user code. """ return response + def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for delete_instance @@ -193,6 +194,7 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations it is returned to user code. """ return response + def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for export_instance @@ -209,6 +211,7 @@ def post_export_instance(self, response: operations_pb2.Operation) -> operations it is returned to user code. """ return response + def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for failover_instance @@ -225,6 +228,7 @@ def post_failover_instance(self, response: operations_pb2.Operation) -> operatio it is returned to user code. """ return response + def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_instance @@ -241,6 +245,7 @@ def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Insta it is returned to user code. """ return response + def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for get_instance_auth_string @@ -257,6 +262,7 @@ def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString it is returned to user code. """ return response + def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for import_instance @@ -273,6 +279,7 @@ def post_import_instance(self, response: operations_pb2.Operation) -> operations it is returned to user code. """ return response + def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for list_instances @@ -289,6 +296,7 @@ def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cl it is returned to user code. """ return response + def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for reschedule_maintenance @@ -305,6 +313,7 @@ def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> ope it is returned to user code. """ return response + def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for update_instance @@ -321,6 +330,7 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations it is returned to user code. """ return response + def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, str]]]: """Pre-rpc interceptor for upgrade_instance @@ -358,6 +368,7 @@ def post_get_location( it is returned to user code. """ return response + def pre_list_locations( self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: @@ -378,6 +389,7 @@ def post_list_locations( it is returned to user code. """ return response + def pre_cancel_operation( self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: @@ -398,6 +410,7 @@ def post_cancel_operation( it is returned to user code. """ return response + def pre_delete_operation( self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: @@ -418,6 +431,7 @@ def post_delete_operation( it is returned to user code. """ return response + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: @@ -438,6 +452,7 @@ def post_get_operation( it is returned to user code. """ return response + def pre_list_operations( self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: @@ -1534,6 +1549,9 @@ def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.GetLocation") + @staticmethod def _get_response( host, @@ -1593,8 +1611,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = locations_pb2.Location() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) return resp @@ -1603,6 +1622,9 @@ def list_locations(self): return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.ListLocations") + @staticmethod def _get_response( host, @@ -1662,8 +1684,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = locations_pb2.ListLocationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) return resp @@ -1672,6 +1695,9 @@ def cancel_operation(self): return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.CancelOperation") + @staticmethod def _get_response( host, @@ -1735,6 +1761,9 @@ def delete_operation(self): return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.DeleteOperation") + @staticmethod def _get_response( host, @@ -1798,6 +1827,9 @@ def get_operation(self): return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.GetOperation") + @staticmethod def _get_response( host, @@ -1857,8 +1889,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) return resp @@ -1867,6 +1900,9 @@ def list_operations(self): return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.ListOperations") + @staticmethod def _get_response( host, @@ -1926,8 +1962,9 @@ def __call__(self, if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = operations_pb2.ListOperationsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) return resp diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 5127656c348e..9c99985554c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -13,20 +13,484 @@ # See the License for the specific language governing permissions and # limitations under the License. # + +import google.auth +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore +except ImportError as e: # pragma: NO COVER + raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`") from e + +from google.auth.aio import credentials as ga_credentials_async # type: ignore + +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore +from google.api_core import retry_async as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming_async # type: ignore + + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore + +import json # type: ignore +import dataclasses +from typing import Any, Dict, List, Callable, Tuple, Optional, Sequence, Union + + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore -from typing import Any, Optional from .rest_base import _BaseCloudRedisRestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=None, + rest_version=google.auth.__version__ ) + +class AsyncCloudRedisRestInterceptor: + """Asynchronous Interceptor for CloudRedis. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AsyncCloudRedisRestTransport. + + .. code-block:: python + class MyCustomCloudRedisInterceptor(CloudRedisRestInterceptor): + async def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_export_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_export_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_failover_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_failover_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_get_instance_auth_string(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_get_instance_auth_string(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_import_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_import_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_reschedule_maintenance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_reschedule_maintenance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_upgrade_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_upgrade_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AsyncCloudRedisRestTransport(interceptor=MyCustomCloudRedisInterceptor()) + client = async CloudRedisClient(transport=transport) + + + """ + async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for export_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for export_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for failover_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for failover_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + """Post-rpc interceptor for get_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_instance_auth_string + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: + """Post-rpc interceptor for get_instance_auth_string + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for import_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for import_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for upgrade_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for upgrade_instance + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AsyncCloudRedisRestStub: + _session: AsyncAuthorizedSession + _host: str + _interceptor: AsyncCloudRedisRestInterceptor + class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): """Asynchronous REST backend transport for CloudRedis. @@ -58,11 +522,13 @@ class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): It sends JSON representations of protocol buffers over HTTP/1.1 """ - def __init__(self, *, + def __init__(self, + *, host: str = 'redis.googleapis.com', - credentials: Optional[Any] = None, + credentials: Optional[ga_credentials_async.Credentials] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, url_scheme: str = 'https', + interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, ) -> None: """Instantiate the transport. @@ -73,7 +539,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'redis.googleapis.com'). - credentials (Optional[Any]): The + credentials (Optional[google.auth.aio.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -96,7 +562,1556 @@ def __init__(self, *, url_scheme=url_scheme, api_audience=None ) + self._session = AsyncAuthorizedSession(self._credentials) # type: ignore + self._interceptor = interceptor or AsyncCloudRedisRestInterceptor() + self._wrap_with_kind = True + self._prep_wrapped_messages(client_info) + self._operations_client: Optional[operations_v1.OperationsClient] = None + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instances: self._wrap_method( + self.list_instances, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance: self._wrap_method( + self.get_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance_auth_string: self._wrap_method( + self.get_instance_auth_string, + default_timeout=600.0, + client_info=client_info, + ), + self.create_instance: self._wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: self._wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.upgrade_instance: self._wrap_method( + self.upgrade_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.import_instance: self._wrap_method( + self.import_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.export_instance: self._wrap_method( + self.export_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.failover_instance: self._wrap_method( + self.failover_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: self._wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.reschedule_maintenance: self._wrap_method( + self.reschedule_maintenance, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.CreateInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.cloud_redis.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + request, metadata = await self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_create_instance(resp) + return resp + + class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.DeleteInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.cloud_redis.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + request, metadata = await self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_delete_instance(resp) + return resp + + class _ExportInstance(_BaseCloudRedisRestTransport._BaseExportInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ExportInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.ExportInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the export instance method over HTTP. + + Args: + request (~.cloud_redis.ExportInstanceRequest): + The request object. Request for + [Export][google.cloud.redis.v1.CloudRedis.ExportInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + request, metadata = await self._interceptor.pre_export_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseExportInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_export_instance(resp) + return resp + + class _FailoverInstance(_BaseCloudRedisRestTransport._BaseFailoverInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.FailoverInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.FailoverInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the failover instance method over HTTP. + + Args: + request (~.cloud_redis.FailoverInstanceRequest): + The request object. Request for + [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + request, metadata = await self._interceptor.pre_failover_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_failover_instance(resp) + return resp + + class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloud_redis.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis.Instance: + A Memorystore for Redis instance. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + request, metadata = await self._interceptor.pre_get_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = cloud_redis.Instance() + pb_resp = cloud_redis.Instance.pb(resp) + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_get_instance(resp) + return resp + + class _GetInstanceAuthString(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetInstanceAuthString") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.GetInstanceAuthStringRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloud_redis.InstanceAuthString: + r"""Call the get instance auth string method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceAuthStringRequest): + The request object. Request for + [GetInstanceAuthString][google.cloud.redis.v1.CloudRedis.GetInstanceAuthString]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis.InstanceAuthString: + Instance AUTH string details. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_http_options() + request, metadata = await self._interceptor.pre_get_instance_auth_string(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = cloud_redis.InstanceAuthString() + pb_resp = cloud_redis.InstanceAuthString.pb(resp) + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_get_instance_auth_string(resp) + return resp + + class _ImportInstance(_BaseCloudRedisRestTransport._BaseImportInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ImportInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.ImportInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the import instance method over HTTP. + + Args: + request (~.cloud_redis.ImportInstanceRequest): + The request object. Request for + [Import][google.cloud.redis.v1.CloudRedis.ImportInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + request, metadata = await self._interceptor.pre_import_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseImportInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_import_instance(resp) + return resp + + class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListInstances") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> cloud_redis.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.cloud_redis.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloud_redis.ListInstancesResponse: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + request, metadata = await self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = cloud_redis.ListInstancesResponse() + pb_resp = cloud_redis.ListInstancesResponse.pb(resp) + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_list_instances(resp) + return resp + + class _RescheduleMaintenance(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.RescheduleMaintenance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.RescheduleMaintenanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the reschedule maintenance method over HTTP. + + Args: + request (~.cloud_redis.RescheduleMaintenanceRequest): + The request object. Request for + [RescheduleMaintenance][google.cloud.redis.v1.CloudRedis.RescheduleMaintenance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_http_options() + request, metadata = await self._interceptor.pre_reschedule_maintenance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_reschedule_maintenance(resp) + return resp + + class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.UpdateInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.cloud_redis.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + request, metadata = await self._interceptor.pre_update_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_update_instance(resp) + return resp + + class _UpgradeInstance(_BaseCloudRedisRestTransport._BaseUpgradeInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.UpgradeInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.UpgradeInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + r"""Call the upgrade instance method over HTTP. + + Args: + request (~.cloud_redis.UpgradeInstanceRequest): + The request object. Request for + [UpgradeInstance][google.cloud.redis.v1.CloudRedis.UpgradeInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + request, metadata = await self._interceptor.pre_upgrade_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_upgrade_instance(resp) + return resp + + @property + def operations_client(self) -> AsyncOperationsRestClient: + """Create the async client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], + } + + rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + http_options=http_options, + path_prefix="v1" + ) + + self._operations_client = AsyncOperationsRestClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + operations_pb2.Operation]: + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + operations_pb2.Operation]: + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def export_instance(self) -> Callable[ + [cloud_redis.ExportInstanceRequest], + operations_pb2.Operation]: + return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def failover_instance(self) -> Callable[ + [cloud_redis.FailoverInstanceRequest], + operations_pb2.Operation]: + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + cloud_redis.Instance]: + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance_auth_string(self) -> Callable[ + [cloud_redis.GetInstanceAuthStringRequest], + cloud_redis.InstanceAuthString]: + return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + + @property + def import_instance(self) -> Callable[ + [cloud_redis.ImportInstanceRequest], + operations_pb2.Operation]: + return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + cloud_redis.ListInstancesResponse]: + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def reschedule_maintenance(self) -> Callable[ + [cloud_redis.RescheduleMaintenanceRequest], + operations_pb2.Operation]: + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + operations_pb2.Operation]: + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def upgrade_instance(self) -> Callable[ + [cloud_redis.UpgradeInstanceRequest], + operations_pb2.Operation]: + return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + request, metadata = await self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + request, metadata = await self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_list_locations(resp) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + request, metadata = await self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return await self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + request, metadata = await self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return await self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + request, metadata = await self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_get_operation(resp) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + request, metadata = await self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_list_operations(resp) + return resp @property def kind(self) -> str: return "rest_asyncio" + + async def close(self): + await self._session.close() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index 2ed2456c7b10..dc561b714fd1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -20,7 +20,6 @@ from google.protobuf import json_format from google.cloud.location import locations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO -from google.auth import credentials as ga_credentials # type: ignore import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union @@ -45,7 +44,7 @@ class _BaseCloudRedisRestTransport(CloudRedisTransport): def __init__(self, *, host: str = 'redis.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, + credentials: Optional[Any] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, url_scheme: str = 'https', @@ -55,7 +54,7 @@ def __init__(self, *, Args: host (Optional[str]): The hostname to connect to (default: 'redis.googleapis.com'). - credentials (Optional[google.auth.credentials.Credentials]): The + credentials (Optional[Any]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the @@ -555,6 +554,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -578,6 +579,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -601,6 +604,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -624,6 +629,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -647,6 +654,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): @@ -670,6 +679,8 @@ def _get_query_params_json(transcoded_request): return query_params class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") @staticmethod def _get_http_options(): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 5e8332354d54..6a98686dcb52 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -46,6 +46,12 @@ "proto-plus >= 1.22.3, <2.0.0dev", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] +extras = { + "async_rest": [ + "google-api-core[grpc] >= 2.21.0rc0, < 3.0.0dev", + "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" + ], +} url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -88,6 +94,7 @@ packages=packages, python_requires=">=3.7", install_requires=dependencies, + extras_require=extras, include_package_data=True, zip_safe=False, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 76a68fb5b3ae..9e500de2c003 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -23,7 +23,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -31,6 +31,13 @@ from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + from google.api_core.operations_v1 import AsyncOperationsRestClient + HAS_ASYNC_REST_EXTRA = True +except ImportError: # pragma: NO COVER + HAS_ASYNC_REST_EXTRA = False from requests import Response from requests import Request, PreparedRequest from requests.sessions import Session @@ -72,6 +79,11 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -4641,44 +4653,6 @@ async def test_reschedule_maintenance_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [ - cloud_redis.ListInstancesRequest, - dict, -]) -def test_list_instances_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.ListInstancesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis.ListInstancesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.list_instances(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInstancesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - def test_list_instances_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4792,66 +4766,6 @@ def test_list_instances_rest_unset_required_fields(): assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_instances_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) - - request = cloud_redis.ListInstancesRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_redis.ListInstancesResponse() - - client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_instances_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ListInstancesRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_instances(request) - - def test_list_instances_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4967,94 +4881,6 @@ def test_list_instances_rest_pager(transport: str = 'rest'): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceRequest, - dict, -]) -def test_get_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.Instance( - name='name_value', - display_name='display_name_value', - location_id='location_id_value', - alternative_location_id='alternative_location_id_value', - redis_version='redis_version_value', - reserved_ip_range='reserved_ip_range_value', - secondary_ip_range='secondary_ip_range_value', - host='host_value', - port=453, - current_location_id='current_location_id_value', - state=cloud_redis.Instance.State.CREATING, - status_message='status_message_value', - tier=cloud_redis.Instance.Tier.BASIC, - memory_size_gb=1499, - authorized_network='authorized_network_value', - persistence_iam_identity='persistence_iam_identity_value', - connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, - auth_enabled=True, - transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, - replica_count=1384, - read_endpoint='read_endpoint_value', - read_endpoint_port=1920, - read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, - customer_managed_key='customer_managed_key_value', - suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], - maintenance_version='maintenance_version_value', - available_maintenance_versions=['available_maintenance_versions_value'], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis.Instance.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_instance(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis.Instance) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.location_id == 'location_id_value' - assert response.alternative_location_id == 'alternative_location_id_value' - assert response.redis_version == 'redis_version_value' - assert response.reserved_ip_range == 'reserved_ip_range_value' - assert response.secondary_ip_range == 'secondary_ip_range_value' - assert response.host == 'host_value' - assert response.port == 453 - assert response.current_location_id == 'current_location_id_value' - assert response.state == cloud_redis.Instance.State.CREATING - assert response.status_message == 'status_message_value' - assert response.tier == cloud_redis.Instance.Tier.BASIC - assert response.memory_size_gb == 1499 - assert response.authorized_network == 'authorized_network_value' - assert response.persistence_iam_identity == 'persistence_iam_identity_value' - assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING - assert response.auth_enabled is True - assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION - assert response.replica_count == 1384 - assert response.read_endpoint == 'read_endpoint_value' - assert response.read_endpoint_port == 1920 - assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED - assert response.customer_managed_key == 'customer_managed_key_value' - assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] - assert response.maintenance_version == 'maintenance_version_value' - assert response.available_maintenance_versions == ['available_maintenance_versions_value'] - def test_get_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5166,66 +4992,6 @@ def test_get_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis.Instance.to_json(cloud_redis.Instance()) - - request = cloud_redis.GetInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_redis.Instance() - - client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_instance(request) - - def test_get_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5279,64 +5045,21 @@ def test_get_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_get_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_get_instance_auth_string_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize("request_type", [ - cloud_redis.GetInstanceAuthStringRequest, - dict, -]) -def test_get_instance_auth_string_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = cloud_redis.InstanceAuthString( - auth_string='auth_string_value', - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = cloud_redis.InstanceAuthString.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.get_instance_auth_string(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, cloud_redis.InstanceAuthString) - assert response.auth_string == 'auth_string_value' - -def test_get_instance_auth_string_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.get_instance_auth_string in client._transport._wrapped_methods + # Ensure method has been cached + assert client._transport.get_instance_auth_string in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -5433,66 +5156,6 @@ def test_get_instance_auth_string_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_instance_auth_string_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) - - request = cloud_redis.GetInstanceAuthStringRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = cloud_redis.InstanceAuthString() - - client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_instance_auth_string_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.GetInstanceAuthStringRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_instance_auth_string(request) - - def test_get_instance_auth_string_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5546,108 +5209,6 @@ def test_get_instance_auth_string_rest_flattened_error(transport: str = 'rest'): ) -def test_get_instance_auth_string_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.CreateInstanceRequest, - dict, -]) -def test_create_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] - else: - del request_init["instance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.create_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_create_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5774,67 +5335,6 @@ def test_create_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.CreateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.CreateInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'parent': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_instance(request) - - def test_create_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5890,116 +5390,14 @@ def test_create_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_create_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpdateInstanceRequest, - dict, -]) -def test_update_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - {"field": field, "subfield": subfield, "is_repeated": is_repeated} - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["instance"][field])): - del request_init["instance"][field][i][subfield] - else: - del request_init["instance"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.update_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - -def test_update_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -6102,67 +5500,6 @@ def test_update_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(("updateMask", )) & set(("updateMask", "instance", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.UpdateInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpdateInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_instance(request) - - def test_update_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6216,44 +5553,6 @@ def test_update_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_update_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.UpgradeInstanceRequest, - dict, -]) -def test_upgrade_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.upgrade_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_upgrade_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6371,67 +5670,6 @@ def test_upgrade_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "redisVersion", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_upgrade_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_upgrade_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.UpgradeInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_upgrade_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.UpgradeInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.upgrade_instance(request) - - def test_upgrade_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6485,56 +5723,18 @@ def test_upgrade_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_upgrade_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) +def test_import_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - -@pytest.mark.parametrize("request_type", [ - cloud_redis.ImportInstanceRequest, - dict, -]) -def test_import_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.import_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - -def test_import_instance_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.import_instance in client._transport._wrapped_methods @@ -6636,67 +5836,6 @@ def test_import_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "inputConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_import_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.ImportInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_import_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ImportInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.import_instance(request) - - def test_import_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6750,44 +5889,6 @@ def test_import_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_import_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.ExportInstanceRequest, - dict, -]) -def test_export_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.export_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_export_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6901,67 +6002,6 @@ def test_export_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "outputConfig", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_export_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.ExportInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_export_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.ExportInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.export_instance(request) - - def test_export_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7015,44 +6055,6 @@ def test_export_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_export_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.FailoverInstanceRequest, - dict, -]) -def test_failover_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.failover_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_failover_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7166,77 +6168,16 @@ def test_failover_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_failover_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_failover_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.FailoverInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_failover_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.FailoverInstanceRequest): +def test_failover_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.failover_instance(request) - - -def test_failover_instance_rest_flattened(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') # get arguments that satisfy an http rule for this method sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} @@ -7280,44 +6221,6 @@ def test_failover_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_failover_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.DeleteInstanceRequest, - dict, -]) -def test_delete_instance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.delete_instance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_delete_instance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7430,67 +6333,6 @@ def test_delete_instance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_instance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.DeleteInstanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_instance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.DeleteInstanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_instance(request) - - def test_delete_instance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7542,44 +6384,6 @@ def test_delete_instance_rest_flattened_error(transport: str = 'rest'): ) -def test_delete_instance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - -@pytest.mark.parametrize("request_type", [ - cloud_redis.RescheduleMaintenanceRequest, - dict, -]) -def test_reschedule_maintenance_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name='operations/spam') - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.reschedule_maintenance(request) - - # Establish that the response is the type that we expect. - assert response.operation.name == "operations/spam" - def test_reschedule_maintenance_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7693,67 +6497,6 @@ def test_reschedule_maintenance_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name", "rescheduleType", ))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_reschedule_maintenance_rest_interceptors(null_interceptor): - transport = transports.CloudRedisRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), - ) - client = CloudRedisClient(transport=transport) - with mock.patch.object(type(client.transport._session), "request") as req, \ - mock.patch.object(path_template, "transcode") as transcode, \ - mock.patch.object(operation.Operation, "_set_result_from_operation"), \ - mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ - mock.patch.object(transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = json_format.MessageToJson(operations_pb2.Operation()) - - request = cloud_redis.RescheduleMaintenanceRequest() - metadata =[ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - - client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) - - pre.assert_called_once() - post.assert_called_once() - - -def test_reschedule_maintenance_rest_bad_request(transport: str = 'rest', request_type=cloud_redis.RescheduleMaintenanceRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.reschedule_maintenance(request) - - def test_reschedule_maintenance_rest_flattened(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7809,13 +6552,6 @@ def test_reschedule_maintenance_rest_flattened_error(transport: str = 'rest'): ) -def test_reschedule_maintenance_rest_error(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest' - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudRedisGrpcTransport( @@ -7910,6 +6646,14 @@ def test_transport_kind_grpc(): assert transport.kind == "grpc" +def test_initialize_client_w_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_list_instances_empty_call_grpc(): @@ -8159,6 +6903,14 @@ def test_transport_kind_grpc_asyncio(): assert transport.kind == "grpc_asyncio" +def test_initialize_client_w_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -8479,917 +7231,3840 @@ def test_transport_kind_rest(): assert transport.kind == "rest" -def test_transport_kind_rest_asyncio(): - transport = CloudRedisAsyncClient.get_transport_class("rest_asyncio")( - credentials=async_anonymous_credentials() +def test_list_instances_rest_bad_request(request_type=cloud_redis.ListInstancesRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" ) - assert transport.kind == "rest_asyncio" + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_instances(request) -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +def test_list_instances_rest_call_success(request_type): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest" ) - assert isinstance( - client.transport, - transports.CloudRedisGrpcTransport, - ) - -def test_cloud_redis_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudRedisTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) -def test_cloud_redis_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.CloudRedisTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'list_instances', - 'get_instance', - 'get_instance_auth_string', - 'create_instance', - 'update_instance', - 'upgrade_instance', - 'import_instance', - 'export_instance', - 'failover_instance', - 'delete_instance', - 'reschedule_maintenance', - 'get_location', - 'list_locations', - 'get_operation', - 'cancel_operation', - 'delete_operation', - 'list_operations', + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + req.return_value.content = return_value + + request = cloud_redis.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.ListInstancesResponse() + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +def test_get_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = cloud_redis.Instance.to_json(cloud_redis.Instance()) + req.return_value.content = return_value + + request = cloud_redis.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.Instance() + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_instance_auth_string_rest_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_instance_auth_string(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceAuthStringRequest, + dict, +]) +def test_get_instance_auth_string_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.get_instance_auth_string(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_auth_string_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) + req.return_value.content = return_value + + request = cloud_redis.GetInstanceAuthStringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.InstanceAuthString() + + client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +def test_update_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_upgrade_instance_rest_bad_request(request_type=cloud_redis.UpgradeInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.upgrade_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpgradeInstanceRequest, + dict, +]) +def test_upgrade_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.upgrade_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_upgrade_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_upgrade_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.UpgradeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_import_instance_rest_bad_request(request_type=cloud_redis.ImportInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.import_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ImportInstanceRequest, + dict, +]) +def test_import_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.import_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_import_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_import_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.ImportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_export_instance_rest_bad_request(request_type=cloud_redis.ExportInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.export_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ExportInstanceRequest, + dict, +]) +def test_export_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.export_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_export_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.ExportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_failover_instance_rest_bad_request(request_type=cloud_redis.FailoverInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.failover_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.FailoverInstanceRequest, + dict, +]) +def test_failover_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.failover_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_failover_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_failover_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.FailoverInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +def test_delete_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_reschedule_maintenance_rest_bad_request(request_type=cloud_redis.RescheduleMaintenanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.reschedule_maintenance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.RescheduleMaintenanceRequest, + dict, +]) +def test_reschedule_maintenance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + response = client.reschedule_maintenance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_reschedule_maintenance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.RescheduleMaintenanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_initialize_client_w_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +def test_cloud_redis_rest_lro_client(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_kind_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = CloudRedisAsyncClient.get_transport_class("rest_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "rest_asyncio" + + +@pytest.mark.asyncio +async def test_list_instances_rest_asyncio_bad_request(request_type=cloud_redis.ListInstancesRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.list_instances(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +async def test_list_instances_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_list_instances_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.ListInstancesResponse() + + await client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.get_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +async def test_get_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_get_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = cloud_redis.Instance.to_json(cloud_redis.Instance()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.Instance() + + await client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_get_instance_auth_string_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.get_instance_auth_string(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceAuthStringRequest, + dict, +]) +async def test_get_instance_auth_string_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.InstanceAuthString( + auth_string='auth_string_value', + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.InstanceAuthString.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.get_instance_auth_string(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.InstanceAuthString) + assert response.auth_string == 'auth_string_value' + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_get_instance_auth_string_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.GetInstanceAuthStringRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.InstanceAuthString() + + await client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis.CreateInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.create_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +async def test_create_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_create_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + await client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpdateInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.update_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +async def test_update_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_update_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + await client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_upgrade_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpgradeInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.upgrade_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpgradeInstanceRequest, + dict, +]) +async def test_upgrade_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.upgrade_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_upgrade_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_upgrade_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_upgrade_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.UpgradeInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + await client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_import_instance_rest_asyncio_bad_request(request_type=cloud_redis.ImportInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.import_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.ImportInstanceRequest, + dict, +]) +async def test_import_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.import_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_import_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_import_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_import_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.ImportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + await client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_export_instance_rest_asyncio_bad_request(request_type=cloud_redis.ExportInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.export_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.ExportInstanceRequest, + dict, +]) +async def test_export_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.export_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_export_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_export_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_export_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.ExportInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + await client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_failover_instance_rest_asyncio_bad_request(request_type=cloud_redis.FailoverInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.failover_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.FailoverInstanceRequest, + dict, +]) +async def test_failover_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.failover_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_failover_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_failover_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_failover_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.FailoverInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + await client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis.DeleteInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.delete_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +async def test_delete_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + await client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + +@pytest.mark.asyncio +async def test_reschedule_maintenance_rest_asyncio_bad_request(request_type=cloud_redis.RescheduleMaintenanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) - with pytest.raises(NotImplementedError): - transport.close() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.reschedule_maintenance(request) - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.RescheduleMaintenanceRequest, + dict, +]) +async def test_reschedule_maintenance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) -def test_cloud_redis_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudRedisTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + response = await client.reschedule_maintenance(request) -def test_cloud_redis_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudRedisTransport() - adc.assert_called_once() + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) -def test_cloud_redis_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudRedisClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_reschedule_maintenance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudRedisGrpcTransport, - transports.CloudRedisGrpcAsyncIOTransport, - ], -) -def test_cloud_redis_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) + request = cloud_redis.RescheduleMaintenanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + await client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudRedisGrpcTransport, - transports.CloudRedisGrpcAsyncIOTransport, - transports.CloudRedisRestTransport, - ], -) -def test_cloud_redis_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) + pre.assert_called_once() + post.assert_called_once() +@pytest.mark.asyncio +async def test_get_location_rest_asyncio_bad_request(request_type=locations_pb2.GetLocationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudRedisGrpcTransport, grpc_helpers), - (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.get_location(request) - create_channel.assert_called_with( - "redis.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="redis.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +async def test_get_location_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + + response = await client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +@pytest.mark.asyncio +async def test_list_locations_rest_asyncio_bad_request(request_type=locations_pb2.ListLocationsRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.list_locations(request) -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +async def test_list_locations_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) -def test_cloud_redis_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: - transports.CloudRedisRestTransport ( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + req.return_value = response_value + response = await client.list_locations(request) -def test_cloud_redis_rest_lro_client(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='rest', - ) - transport = client.transport + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.AbstractOperationsClient, +@pytest.mark.asyncio +async def test_cancel_operation_rest_asyncio_bad_request(request_type=operations_pb2.CancelOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.cancel_operation(request) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, ]) -def test_cloud_redis_host_no_port(transport_name): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), - transport=transport_name, +async def test_cancel_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", ) - assert client.transport._host == ( - 'redis.googleapis.com:443' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://redis.googleapis.com' + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + + response = await client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +@pytest.mark.asyncio +async def test_delete_operation_rest_asyncio_bad_request(request_type=operations_pb2.DeleteOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", - "rest", + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.delete_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, ]) -def test_cloud_redis_host_with_port(transport_name): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), - transport=transport_name, +async def test_delete_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", ) - assert client.transport._host == ( - 'redis.googleapis.com:8000' - if transport_name in ['grpc', 'grpc_asyncio'] - else 'https://redis.googleapis.com:8000' + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + + response = await client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +@pytest.mark.asyncio +async def test_get_operation_rest_asyncio_bad_request(request_type=operations_pb2.GetOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) -@pytest.mark.parametrize("transport_name", [ - "rest", + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.get_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, ]) -def test_cloud_redis_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CloudRedisClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CloudRedisClient( - credentials=creds2, - transport=transport_name, +async def test_get_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", ) - session1 = client1.transport.list_instances._session - session2 = client2.transport.list_instances._session - assert session1 != session2 - session1 = client1.transport.get_instance._session - session2 = client2.transport.get_instance._session - assert session1 != session2 - session1 = client1.transport.get_instance_auth_string._session - session2 = client2.transport.get_instance_auth_string._session - assert session1 != session2 - session1 = client1.transport.create_instance._session - session2 = client2.transport.create_instance._session - assert session1 != session2 - session1 = client1.transport.update_instance._session - session2 = client2.transport.update_instance._session - assert session1 != session2 - session1 = client1.transport.upgrade_instance._session - session2 = client2.transport.upgrade_instance._session - assert session1 != session2 - session1 = client1.transport.import_instance._session - session2 = client2.transport.import_instance._session - assert session1 != session2 - session1 = client1.transport.export_instance._session - session2 = client2.transport.export_instance._session - assert session1 != session2 - session1 = client1.transport.failover_instance._session - session2 = client2.transport.failover_instance._session - assert session1 != session2 - session1 = client1.transport.delete_instance._session - session2 = client2.transport.delete_instance._session - assert session1 != session2 - session1 = client1.transport.reschedule_maintenance._session - session2 = client2.transport.reschedule_maintenance._session - assert session1 != session2 -def test_cloud_redis_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - # Check that channel is used if provided. - transport = transports.CloudRedisGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + response = await client.get_operation(request) -def test_cloud_redis_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) - # Check that channel is used if provided. - transport = transports.CloudRedisGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, +@pytest.mark.asyncio +async def test_list_operations_rest_asyncio_bad_request(request_type=operations_pb2.ListOperationsRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.list_operations(request) - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +async def test_list_operations_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) -def test_cloud_redis_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() + response = await client.list_operations(request) - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel +def test_initialize_client_w_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + assert client is not None -def test_cloud_redis_grpc_lro_client(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', +def test_cloud_redis_rest_asyncio_lro_client(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", ) transport = client.transport - # Ensure that we have a api-core operations client. + # Ensure that we have an api-core operations client. assert isinstance( transport.operations_client, - operations_v1.OperationsClient, +operations_v1.AsyncOperationsRestClient, ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client +def test_unsupported_parameter_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + options = client_options.ClientOptions(quota_project_id="octopus") + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + client_options=options + ) -def test_cloud_redis_grpc_lro_async_client(): - client = CloudRedisAsyncClient( + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport='grpc_asyncio', ) - transport = client.transport - - # Ensure that we have a api-core operations client. assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, + client.transport, + transports.CloudRedisGrpcTransport, ) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client - +def test_cloud_redis_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudRedisTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) -def test_instance_path(): - project = "squid" - location = "clam" - instance = "whelk" - expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) - actual = CloudRedisClient.instance_path(project, location, instance) - assert expected == actual +def test_cloud_redis_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudRedisTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) -def test_parse_instance_path(): - expected = { - "project": "octopus", - "location": "oyster", - "instance": "nudibranch", - } - path = CloudRedisClient.instance_path(**expected) + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instances', + 'get_instance', + 'get_instance_auth_string', + 'create_instance', + 'update_instance', + 'upgrade_instance', + 'import_instance', + 'export_instance', + 'failover_instance', + 'delete_instance', + 'reschedule_maintenance', + 'get_location', + 'list_locations', + 'get_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_instance_path(path) - assert expected == actual + with pytest.raises(NotImplementedError): + transport.close() -def test_common_billing_account_path(): - billing_account = "cuttlefish" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = CloudRedisClient.common_billing_account_path(billing_account) - assert expected == actual + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "mussel", - } - path = CloudRedisClient.common_billing_account_path(**expected) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_billing_account_path(path) - assert expected == actual +def test_cloud_redis_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) -def test_common_folder_path(): - folder = "winkle" - expected = "folders/{folder}".format(folder=folder, ) - actual = CloudRedisClient.common_folder_path(folder) - assert expected == actual +def test_cloud_redis_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport() + adc.assert_called_once() -def test_parse_common_folder_path(): - expected = { - "folder": "nautilus", - } - path = CloudRedisClient.common_folder_path(**expected) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_folder_path(path) - assert expected == actual +def test_cloud_redis_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudRedisClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) -def test_common_organization_path(): - organization = "scallop" - expected = "organizations/{organization}".format(organization=organization, ) - actual = CloudRedisClient.common_organization_path(organization) - assert expected == actual +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + ], +) +def test_cloud_redis_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) -def test_parse_common_organization_path(): - expected = { - "organization": "abalone", - } - path = CloudRedisClient.common_organization_path(**expected) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_organization_path(path) - assert expected == actual +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, + ], +) +def test_cloud_redis_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) -def test_common_project_path(): - project = "squid" - expected = "projects/{project}".format(project=project, ) - actual = CloudRedisClient.common_project_path(project) - assert expected == actual +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudRedisGrpcTransport, grpc_helpers), + (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) -def test_parse_common_project_path(): - expected = { - "project": "clam", - } - path = CloudRedisClient.common_project_path(**expected) + create_channel.assert_called_with( + "redis.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="redis.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_project_path(path) - assert expected == actual -def test_common_location_path(): - project = "whelk" - location = "octopus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = CloudRedisClient.common_location_path(project, location) - assert expected == actual +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) -def test_parse_common_location_path(): - expected = { - "project": "oyster", - "location": "nudibranch", - } - path = CloudRedisClient.common_location_path(**expected) + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) - # Check that the path construction is reversible. - actual = CloudRedisClient.parse_common_location_path(path) - assert expected == actual +def test_cloud_redis_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CloudRedisRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_redis_host_no_port(transport_name): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'redis.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://redis.googleapis.com' + ) - with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_redis_host_with_port(transport_name): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'redis.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://redis.googleapis.com:8000' + ) - with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: - transport_class = CloudRedisClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_cloud_redis_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudRedisClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudRedisClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.get_instance_auth_string._session + session2 = client2.transport.get_instance_auth_string._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.upgrade_instance._session + session2 = client2.transport.upgrade_instance._session + assert session1 != session2 + session1 = client1.transport.import_instance._session + session2 = client2.transport.import_instance._session + assert session1 != session2 + session1 = client1.transport.export_instance._session + session2 = client2.transport.export_instance._session + assert session1 != session2 + session1 = client1.transport.failover_instance._session + session2 = client2.transport.failover_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 + session1 = client1.transport.reschedule_maintenance._session + session2 = client2.transport.reschedule_maintenance._session + assert session1 != session2 +def test_cloud_redis_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = CloudRedisAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Check that channel is used if provided. + transport = transports.CloudRedisGrpcTransport( + host="squid.clam.whelk", + channel=channel, ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None -def test_get_location_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.GetLocationRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_cloud_redis_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudRedisGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_location(request) +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred -@pytest.mark.parametrize("request_type", [ - locations_pb2.GetLocationRequest, - dict, -]) -def test_get_location_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.Location() + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred - response = client.get_location(request) - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.Location) +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() -def test_list_locations_rest_bad_request(transport: str = 'rest', request_type=locations_pb2.ListLocationsRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1'}, request) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_locations(request) -@pytest.mark.parametrize("request_type", [ - locations_pb2.ListLocationsRequest, - dict, -]) -def test_list_locations_rest(request_type): +def test_cloud_redis_grpc_lro_client(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport='grpc', ) - request_init = {'name': 'projects/sample1'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = locations_pb2.ListLocationsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + transport = client.transport - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) - response = client.list_locations(request) + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client - # Establish that the response is the type that we expect. - assert isinstance(response, locations_pb2.ListLocationsResponse) -def test_cancel_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.CancelOperationRequest): - client = CloudRedisClient( +def test_cloud_redis_grpc_lro_async_client(): + client = CloudRedisAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport='grpc_asyncio', ) + transport = client.transport - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - -@pytest.mark.parametrize("request_type", [ - operations_pb2.CancelOperationRequest, - dict, -]) -def test_cancel_operation_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value - response = client.cancel_operation(request) +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + actual = CloudRedisClient.instance_path(project, location, instance) + assert expected == actual - # Establish that the response is the type that we expect. - assert response is None -def test_delete_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.DeleteOperationRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = CloudRedisClient.instance_path(**expected) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_instance_path(path) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_operation(request) +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudRedisClient.common_billing_account_path(billing_account) + assert expected == actual -@pytest.mark.parametrize("request_type", [ - operations_pb2.DeleteOperationRequest, - dict, -]) -def test_delete_operation_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = None - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = '{}' +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudRedisClient.common_billing_account_path(**expected) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_billing_account_path(path) + assert expected == actual - response = client.delete_operation(request) +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudRedisClient.common_folder_path(folder) + assert expected == actual - # Establish that the response is the type that we expect. - assert response is None -def test_get_operation_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.GetOperationRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudRedisClient.common_folder_path(**expected) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_folder_path(path) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudRedisClient.common_organization_path(organization) + assert expected == actual -@pytest.mark.parametrize("request_type", [ - operations_pb2.GetOperationRequest, - dict, -]) -def test_get_operation_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudRedisClient.common_organization_path(**expected) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_organization_path(path) + assert expected == actual - response = client.get_operation(request) +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = CloudRedisClient.common_project_path(project) + assert expected == actual - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) -def test_list_operations_rest_bad_request(transport: str = 'rest', request_type=operations_pb2.ListOperationsRequest): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudRedisClient.common_project_path(**expected) - request = request_type() - request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_project_path(path) + assert expected == actual - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_operations(request) +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudRedisClient.common_location_path(project, location) + assert expected == actual -@pytest.mark.parametrize("request_type", [ - operations_pb2.ListOperationsRequest, - dict, -]) -def test_list_operations_rest(request_type): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {'name': 'projects/sample1/locations/sample2'} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), 'request') as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.ListOperationsResponse() - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudRedisClient.common_location_path(**expected) - response_value._content = json_return_value.encode('UTF-8') - req.return_value = response_value + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_location_path(path) + assert expected == actual - response = client.list_operations(request) - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.ListOperationsResponse) +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudRedisClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) def test_delete_operation(transport: str = "grpc"): @@ -10165,21 +11840,53 @@ async def test_get_location_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = CloudRedisClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() def test_client_ctx(): transports = [ diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 7b541976fc22..395467661c3c 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -24,6 +24,7 @@ from google.auth.aio import credentials as ga_credentials_async HAS_GOOGLE_AUTH_AIO = True +# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. except ImportError: # pragma: NO COVER HAS_GOOGLE_AUTH_AIO = False import google.auth @@ -37,6 +38,16 @@ import asyncio from google.showcase import EchoAsyncClient from google.showcase import IdentityAsyncClient + try: + from google.showcase_v1beta1.services.echo.transports import AsyncEchoRestTransport + HAS_ASYNC_REST_ECHO_TRANSPORT = True + except: + HAS_ASYNC_REST_ECHO_TRANSPORT = False + try: + from google.showcase_v1beta1.services.identity.transports import AsyncIdentityRestTransport + HAS_ASYNC_REST_IDENTITY_TRANSPORT = True + except: + HAS_ASYNC_REST_IDENTITY_TRANSPORT = False # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. @@ -57,23 +68,29 @@ def async_anonymous_credentials(): def event_loop(): return asyncio.get_event_loop() - @pytest.fixture - def async_echo(use_mtls, event_loop): + @pytest.fixture(params=["grpc_asyncio", "rest_asyncio"]) + def async_echo(use_mtls, request, event_loop): + transport = request.param + if transport == "rest_asyncio" and not HAS_ASYNC_REST_ECHO_TRANSPORT: + pytest.skip("Skipping test with async rest.") return construct_client( EchoAsyncClient, use_mtls, - transport_name="grpc_asyncio", - channel_creator=aio.insecure_channel, + transport_name=transport, + channel_creator=aio.insecure_channel if request.param == "grpc_asyncio" else None, credentials=async_anonymous_credentials(), ) - @pytest.fixture - def async_identity(use_mtls, event_loop): + @pytest.fixture(params=["grpc_asyncio", "rest_asyncio"]) + def async_identity(use_mtls, request, event_loop): + transport = request.param + if transport == "rest_asyncio" and not HAS_ASYNC_REST_IDENTITY_TRANSPORT: + pytest.skip("Skipping test with async rest.") return construct_client( IdentityAsyncClient, use_mtls, - transport_name="grpc_asyncio", - channel_creator=aio.insecure_channel, + transport_name=transport, + channel_creator=aio.insecure_channel if request.param == "grpc_asyncio" else None, credentials=async_anonymous_credentials(), ) @@ -135,7 +152,7 @@ def construct_client( credentials=credentials, channel=channel_creator(transport_endpoint), ) - elif transport_name == "rest": + elif transport_name in ["rest", "rest_asyncio"]: # The custom host explicitly bypasses https. transport = transport_cls( credentials=credentials, diff --git a/packages/gapic-generator/tests/system/test_client_context_manager.py b/packages/gapic-generator/tests/system/test_client_context_manager.py index 0d20292dc6e5..541de4c5b924 100644 --- a/packages/gapic-generator/tests/system/test_client_context_manager.py +++ b/packages/gapic-generator/tests/system/test_client_context_manager.py @@ -15,6 +15,7 @@ import os import pytest import grpc +from google.auth import exceptions def test_client(echo): @@ -50,7 +51,7 @@ async def test_client_async(async_echo): @pytest.mark.asyncio async def test_client_destroyed_async(async_echo): await async_echo.__aexit__(None, None, None) - with pytest.raises(grpc._cython.cygrpc.UsageError): + with pytest.raises((grpc._cython.cygrpc.UsageError, exceptions.TransportError)): await async_echo.echo({ 'content': 'hello' }) diff --git a/packages/gapic-generator/tests/system/test_lro.py b/packages/gapic-generator/tests/system/test_lro.py index 8098519d9e47..99bbba007ca3 100644 --- a/packages/gapic-generator/tests/system/test_lro.py +++ b/packages/gapic-generator/tests/system/test_lro.py @@ -20,10 +20,6 @@ def test_lro(echo): - if isinstance(echo.transport, type(echo).get_transport_class("rest")): - # (TODO: dovs) Temporarily disabling rest - return - future = echo.wait({ 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), 'success': { @@ -39,6 +35,7 @@ def test_lro(echo): @pytest.mark.asyncio async def test_lro_async(async_echo): + future = await async_echo.wait({ 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), 'success': { diff --git a/packages/gapic-generator/tests/system/test_mixins.py b/packages/gapic-generator/tests/system/test_mixins.py new file mode 100644 index 000000000000..87a926e72dd4 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_mixins.py @@ -0,0 +1,158 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pytest + +from google.api_core import exceptions + + +def test_get_operation(echo): + with pytest.raises(exceptions.NotFound): + echo.get_operation({"name": "operations/**"}) + + +def test_list_operations(echo): + response = echo.list_operations({"name": "operations/name"}) + assert response.operations[0].name == "a/pending/thing" + + +def test_delete_operation(echo): + response = echo.delete_operation({"name": "operations/name"}) + assert response is None + + +def test_cancel_operation(echo): + response = echo.cancel_operation({"name": "operations/name"}) + assert response is None + + +def test_set_iam_policy(echo): + policy = echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240919}} + ) + assert policy.version == 20240919 + + +def test_get_iam_policy(echo): + # First we need to set a policy, before we can get it + echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240920}} + ) + policy = echo.get_iam_policy( + { + "resource": "users/user", + } + ) + assert policy.version == 20240920 + + +def test_test_iam_permissions(echo): + # First we need to set a policy, before we can call test_iam_permissions + echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240920}} + ) + response = echo.test_iam_permissions( + {"resource": "users/user", "permissions": ["test_some_permission"]} + ) + assert response.permissions == ["test_some_permission"] + + +def test_get_location(echo): + response = echo.get_location( + { + "name": "projects/some_project/locations/some_location", + } + ) + assert response.name == "projects/some_project/locations/some_location" + + +def test_list_locations(echo): + response = echo.list_locations( + { + "name": "projects/some_project", + } + ) + assert response.locations[0].name == "projects/some_project/locations/us-north" + + +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": + + @pytest.mark.asyncio + async def test_get_operation_async(async_echo): + with pytest.raises(exceptions.NotFound): + await async_echo.get_operation({"name": "operations/**"}) + + @pytest.mark.asyncio + async def test_list_operations_async(async_echo): + response = await async_echo.list_operations({"name": "operations/name"}) + assert response.operations[0].name == "a/pending/thing" + + @pytest.mark.asyncio + async def test_delete_operation_async(async_echo): + await async_echo.delete_operation({"name": "operations/name"}) + + @pytest.mark.asyncio + async def test_cancel_operation_async(async_echo): + await async_echo.cancel_operation({"name": "operations/name"}) + + @pytest.mark.asyncio + async def test_set_iam_policy_async(async_echo): + policy = await async_echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240919}} + ) + assert policy.version == 20240919 + + @pytest.mark.asyncio + async def test_get_iam_policy_async(async_echo): + # First we need to set a policy, before we can get it + await async_echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240920}} + ) + policy = await async_echo.get_iam_policy( + { + "resource": "users/user", + } + ) + assert policy.version == 20240920 + + @pytest.mark.asyncio + async def test_test_iam_permissions_async(async_echo): + # First we need to set a policy, before we can get it + await async_echo.set_iam_policy( + {"resource": "users/user", "policy": {"version": 20240920}} + ) + + response = await async_echo.test_iam_permissions( + {"resource": "users/user", "permissions": ["test_some_permission"]} + ) + assert response.permissions == ["test_some_permission"] + + @pytest.mark.asyncio + async def test_get_location_async(async_echo): + response = await async_echo.get_location( + { + "name": "projects/some_project/locations/some_location", + } + ) + assert response.name == "projects/some_project/locations/some_location" + + @pytest.mark.asyncio + async def test_list_locations_async(async_echo): + response = await async_echo.list_locations( + { + "name": "projects/some_project", + } + ) + assert response.locations[0].name == "projects/some_project/locations/us-north" diff --git a/packages/gapic-generator/tests/system/test_streams.py b/packages/gapic-generator/tests/system/test_streams.py index aa8c84c84c7e..b4adc6ee5152 100644 --- a/packages/gapic-generator/tests/system/test_streams.py +++ b/packages/gapic-generator/tests/system/test_streams.py @@ -115,40 +115,54 @@ def test_stream_stream_passing_dict(echo): @pytest.mark.asyncio async def test_async_unary_stream_reader(async_echo): content = 'The hail in Wales falls mainly on the snails.' - call = await async_echo.expand({ + stream = await async_echo.expand({ 'content': content, }, metadata=_METADATA) + # Note: gRPC exposes `read`, REST exposes `__anext__` to read + # a chunk of response from the stream. + response_attr = '__anext__' if "rest" in str( + async_echo.transport).lower() else 'read' + # Consume the response and ensure it matches what we expect. - # with pytest.raises(exceptions.NotFound) as exc: for ground_truth in content.split(' '): - response = await call.read() + response = await getattr(stream, response_attr)() assert response.content == ground_truth assert ground_truth == 'snails.' - trailing_metadata = await call.trailing_metadata() - assert _METADATA[0] in trailing_metadata.items() + # Note: trailing metadata is part of a gRPC response. + if "grpc" in str(async_echo.transport).lower(): + trailing_metadata = await stream.trailing_metadata() + assert _METADATA[0] in trailing_metadata.items() @pytest.mark.asyncio async def test_async_unary_stream_async_generator(async_echo): content = 'The hail in Wales falls mainly on the snails.' - call = await async_echo.expand({ + stream = await async_echo.expand({ 'content': content, }, metadata=_METADATA) # Consume the response and ensure it matches what we expect. - # with pytest.raises(exceptions.NotFound) as exc: tokens = iter(content.split(' ')) - async for response in call: + async for response in stream: ground_truth = next(tokens) assert response.content == ground_truth assert ground_truth == 'snails.' - trailing_metadata = await call.trailing_metadata() - assert _METADATA[0] in trailing_metadata.items() + # Note: trailing metadata is part of a gRPC response. + if "grpc" in str(async_echo.transport).lower(): + trailing_metadata = await stream.trailing_metadata() + assert _METADATA[0] in trailing_metadata.items() @pytest.mark.asyncio async def test_async_stream_unary_iterable(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.collect() + return + requests = [] requests.append(showcase.EchoRequest(content="hello")) requests.append(showcase.EchoRequest(content="world!")) @@ -159,6 +173,12 @@ async def test_async_stream_unary_iterable(async_echo): @pytest.mark.asyncio async def test_async_stream_unary_async_generator(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.collect() + return async def async_generator(): yield showcase.EchoRequest(content="hello") @@ -170,6 +190,12 @@ async def async_generator(): @pytest.mark.asyncio async def test_async_stream_unary_writer(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.collect() + return call = await async_echo.collect() await call.write(showcase.EchoRequest(content="hello")) await call.write(showcase.EchoRequest(content="world!")) @@ -180,6 +206,13 @@ async def test_async_stream_unary_writer(async_echo): @pytest.mark.asyncio async def test_async_stream_unary_passing_dict(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.collect() + return + requests = [{'content': 'hello'}, {'content': 'world!'}] call = await async_echo.collect(iter(requests)) response = await call @@ -187,6 +220,13 @@ async def test_async_stream_unary_passing_dict(async_echo): @pytest.mark.asyncio async def test_async_stream_stream_reader_writier(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.chat(metadata=_METADATA) + return + call = await async_echo.chat(metadata=_METADATA) await call.write(showcase.EchoRequest(content="hello")) await call.write(showcase.EchoRequest(content="world!")) @@ -203,6 +243,12 @@ async def test_async_stream_stream_reader_writier(async_echo): @pytest.mark.asyncio async def test_async_stream_stream_async_generator(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.chat(metadata=_METADATA) + return async def async_generator(): yield showcase.EchoRequest(content="hello") @@ -220,6 +266,13 @@ async def async_generator(): @pytest.mark.asyncio async def test_async_stream_stream_passing_dict(async_echo): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Add test for async rest client-streaming. + # NOTE: There are currently no plans for supporting async rest client-streaming. + if "rest" in str(async_echo.transport).lower(): + with pytest.raises(NotImplementedError): + call = await async_echo.chat(metadata=_METADATA) + return + requests = [{'content': 'hello'}, {'content': 'world!'}] call = await async_echo.chat(iter(requests), metadata=_METADATA) diff --git a/packages/gapic-generator/tests/system/test_unary.py b/packages/gapic-generator/tests/system/test_unary.py index 59f0ad1c5c1c..674919eb648d 100644 --- a/packages/gapic-generator/tests/system/test_unary.py +++ b/packages/gapic-generator/tests/system/test_unary.py @@ -148,13 +148,17 @@ async def test_async_unary_with_dict(async_echo): @pytest.mark.asyncio async def test_async_unary_error(async_echo): message = "Bad things! Bad things!" - with pytest.raises(exceptions.InvalidArgument) as exc: + expected_err_message = message if "grpc_asyncio" in str( + async_echo.transport) else f"POST http://localhost:7469/v1beta1/echo:echo: {message}" + # Note: InvalidArgument is from gRPC, BadRequest from http (no MTLS) + with pytest.raises((exceptions.InvalidArgument, exceptions.BadRequest)) as exc: await async_echo.echo( { "error": { - "code": code_pb2.Code.Value("INVALID_ARGUMENT"), + "code": code_pb2.Code.Value("INVALID_ARGUMENT",), "message": message, }, } ) - assert exc.value.message == message + assert exc.value.code == 400 + assert exc.value.message == expected_err_message diff --git a/packages/gapic-generator/tests/system/test_universe_domain.py b/packages/gapic-generator/tests/system/test_universe_domain.py index 9690085876ba..55cce87ca3ba 100644 --- a/packages/gapic-generator/tests/system/test_universe_domain.py +++ b/packages/gapic-generator/tests/system/test_universe_domain.py @@ -73,9 +73,9 @@ def test_universe_domain_validation_fail(parametrized_echo, channel_creator, tra # Make this test unconditional once the minimum supported version of # google-auth becomes 2.23.0 or higher. google_auth_major, google_auth_minor, _ = [ - int(part) for part in google.auth.__version__.split(".") + part for part in google.auth.__version__.split(".") ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + if int(google_auth_major) > 2 or (int(google_auth_major) == 2 and int(google_auth_minor) >= 23): assert parametrized_echo.transport._credentials.universe_domain == credential_universe if transport_name == "rest": assert parametrized_echo.api_endpoint == "http://" + transport_endpoint From 5670e0494ab67b6842411729def8d7363f5e066b Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 9 Oct 2024 11:47:11 -0400 Subject: [PATCH 1191/1339] chore: update api-core version in async extra (#2210) --- .../%sub/services/%service/transports/rest_asyncio.py.j2 | 8 ++++++-- packages/gapic-generator/gapic/templates/setup.py.j2 | 3 +-- packages/gapic-generator/noxfile.py | 5 +++-- .../services/cloud_redis/transports/rest_asyncio.py | 4 ++-- .../tests/integration/goldens/redis/setup.py | 2 +- 5 files changed, 13 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 3c15b8e88622..be65f9db6812 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -139,7 +139,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): self._wrap_with_kind = True self._prep_wrapped_messages(client_info) {% if service.has_lro %} - self._operations_client: Optional[operations_v1.OperationsClient] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None {% endif %} @@ -251,7 +251,11 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore host=self._host, # use the credentials which are saved - credentials=self._credentials, + {# Note: Type for creds is ignored because of incorrect type hint for creds in the client layer. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2177): Remove `# type: ignore` once + # we update the type hints for credentials to include asynchronous credentials in the client layer. + #} + credentials=self._credentials, # type: ignore http_options=http_options, path_prefix="{{ service.client_package_version }}" ) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 28b217dd7613..a466758ae624 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -51,8 +51,7 @@ dependencies = [ extras = { {% if rest_async_io_enabled %} "async_rest": [ - {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2208): Update the minimum supported version of api-core to `2.21.0` when released. #} - "google-api-core[grpc] >= 2.21.0rc0, < 3.0.0dev", + "google-api-core[grpc] >= 2.21.0, < 3.0.0dev", "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" ], {% endif %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 8875a9907816..2387ea130290 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -310,8 +310,9 @@ def showcase_library( # NOTE: We re-install `google-api-core` and `google-auth` to override the respective # versions for each specified in constraints-3.7.txt. This is needed because async REST # is not supported with the minimum version of `google-api-core` and `google-auth`. - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2208): Update the minimum supported version of api-core to `2.21.0` when released. - session.install('--no-cache-dir', '--force-reinstall', "google-api-core[grpc, async_rest]==2.21.0rc0") + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2211): Remove hardcoded dependencies + # from here and add a new constraints file for testing the minimum supported versions for async REST feature. + session.install('--no-cache-dir', '--force-reinstall', "google-api-core[grpc, async_rest]==2.21.0") # session.install('--no-cache-dir', '--force-reinstall', "google-api-core==2.20.0") session.install('--no-cache-dir', '--force-reinstall', "google-auth[aiohttp]==2.35.0") else: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 9c99985554c5..9b72bcc6b139 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -566,7 +566,7 @@ def __init__(self, self._interceptor = interceptor or AsyncCloudRedisRestInterceptor() self._wrap_with_kind = True self._prep_wrapped_messages(client_info) - self._operations_client: Optional[operations_v1.OperationsClient] = None + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -1585,7 +1585,7 @@ def operations_client(self) -> AsyncOperationsRestClient: rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore host=self._host, # use the credentials which are saved - credentials=self._credentials, + credentials=self._credentials, # type: ignore http_options=http_options, path_prefix="v1" ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 6a98686dcb52..760f590ed18b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -48,7 +48,7 @@ ] extras = { "async_rest": [ - "google-api-core[grpc] >= 2.21.0rc0, < 3.0.0dev", + "google-api-core[grpc] >= 2.21.0, < 3.0.0dev", "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" ], } From 013dcaef8352ae5c2473754373e47c2d34e2ebc2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 9 Oct 2024 14:19:55 -0400 Subject: [PATCH 1192/1339] test: add routing parameter and empty call test for REST (#2165) --- .../gapic/%name_%version/%sub/test_macros.j2 | 30 +- .../unit/gapic/asset_v1/test_asset_service.py | 736 +++++++++++++++--- .../credentials_v1/test_iam_credentials.py | 128 ++- .../unit/gapic/eventarc_v1/test_eventarc.py | 576 ++++++++++++-- .../logging_v2/test_config_service_v2.py | 320 +++++--- .../logging_v2/test_logging_service_v2.py | 50 +- .../logging_v2/test_metrics_service_v2.py | 50 +- .../unit/gapic/redis_v1/test_cloud_redis.py | 627 +++++++++++++-- 8 files changed, 2109 insertions(+), 408 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index ccd963ffbda1..d76cd91a824e 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1466,14 +1466,20 @@ def test_{{ method_name }}_rest_no_http_options(): @pytest.mark.asyncio {% endif %}{# is_async #} {{ async_method_prefix }}def test_{{ method_name }}_{{ test_name }}_{{transport_name}}(): + {% if transport_name == 'rest_asyncio' %} + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + {% endif %} client = {{ get_client(service=service, is_async=is_async) }}( credentials={{ get_credentials(is_async=is_async) }}, + transport="{{ transport_name }}", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.{{ method.transport_safe_name|snake_case }}), '__call__') as call: + {% if 'rest' not in transport %} {% if is_async %} # Designate an appropriate return value for the call. {% if method.void %} @@ -1498,7 +1504,6 @@ def test_{{ method_name }}_rest_no_http_options(): {% endfor %} )) {% endif %}{# method.void #} - await client.{{ method_name }}(request={{ request_dict }}) {% else %}{# if not is_async #} {% if method.void %} call.return_value = None @@ -1509,10 +1514,15 @@ def test_{{ method_name }}_rest_no_http_options(): {% else %} call.return_value = {{ method.output.ident }}() {% endif %} + {% endif %}{# is_async #} + {% endif %}{# if 'rest' not in transport #} + {% if is_async %} + await client.{{ method_name }}(request={{ request_dict }}) + {% else %}{# is_async #} client.{{ method_name }}(request={{ request_dict }}) {% endif %}{# is_async #} - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, {% if routing_param %}kw{% else %}_{% endif %} = call.mock_calls[0] {% with method_settings = api.all_method_settings.get(method.meta.address.proto) %} @@ -2142,11 +2152,6 @@ def test_initialize_client_w_{{transport_name}}(): {% endmacro %}{# call_success_mixins_test #} {% macro empty_call_test(service, api, transport, is_async) %} -{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2159): - Currently this macro only supports gRPC. It should be updated to support REST - transport as well. -#} -{% if 'rest' not in transport %} {% for method in service.methods.values() %}{# method #} {% if not method.client_streaming %} # This test is a coverage failsafe to make sure that totally empty calls, @@ -2154,7 +2159,6 @@ def test_initialize_client_w_{{transport_name}}(): {{ method_call_test_generic("empty_call", method, service, api, transport, request_dict=None, is_async=is_async) }} {% endif %}{# not method.client_streaming #} {% endfor %}{# method in service.methods.values() #} -{% endif %}{# 'rest' not in transport #} {% endmacro %}{# empty_call_test #} {% macro get_uuid4_re() -%} @@ -2162,11 +2166,6 @@ def test_initialize_client_w_{{transport_name}}(): {%- endmacro %}{# uuid_re #} {% macro routing_parameter_test(service, api, transport, is_async) %} -{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2159): - Currently this macro only supports gRPC. It should be updated to support REST - transport as well. -#} -{% if 'rest' not in transport %} {% for method in service.methods.values() %}{# method #} {% if method.explicit_routing %} {# Any value that is part of the HTTP/1.1 URI should be sent as #} @@ -2176,7 +2175,6 @@ def test_initialize_client_w_{{transport_name}}(): {% endfor %}{# routing_param in method.routing_rule.routing_parameters #} {% endif %}{# method.explicit_routing #} {% endfor %}{# method in service.methods.values() #} -{% endif %} {% endmacro %}{# routing_parameter_test #} {# inteceptor_class_test generates tests for rest interceptors. #} @@ -2269,4 +2267,4 @@ def test_initialize_client_w_{{transport_name}}(): post.assert_called_once() {% endif %} {% endif %}{# end 'grpc' in transport #} -{% endmacro%} +{% endmacro%}{# inteceptor_class_test #} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 1e304113db38..fcf9a01fbc1b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -12971,16 +12971,17 @@ def test_initialize_client_w_grpc(): def test_export_assets_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.export_assets), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.export_assets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.ExportAssetsRequest() @@ -12993,16 +12994,17 @@ def test_export_assets_empty_call_grpc(): def test_list_assets_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_assets), '__call__') as call: call.return_value = asset_service.ListAssetsResponse() client.list_assets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.ListAssetsRequest() @@ -13015,16 +13017,17 @@ def test_list_assets_empty_call_grpc(): def test_batch_get_assets_history_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.batch_get_assets_history), '__call__') as call: call.return_value = asset_service.BatchGetAssetsHistoryResponse() client.batch_get_assets_history(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.BatchGetAssetsHistoryRequest() @@ -13037,16 +13040,17 @@ def test_batch_get_assets_history_empty_call_grpc(): def test_create_feed_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_feed), '__call__') as call: call.return_value = asset_service.Feed() client.create_feed(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.CreateFeedRequest() @@ -13059,16 +13063,17 @@ def test_create_feed_empty_call_grpc(): def test_get_feed_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_feed), '__call__') as call: call.return_value = asset_service.Feed() client.get_feed(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.GetFeedRequest() @@ -13081,16 +13086,17 @@ def test_get_feed_empty_call_grpc(): def test_list_feeds_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_feeds), '__call__') as call: call.return_value = asset_service.ListFeedsResponse() client.list_feeds(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.ListFeedsRequest() @@ -13103,16 +13109,17 @@ def test_list_feeds_empty_call_grpc(): def test_update_feed_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_feed), '__call__') as call: call.return_value = asset_service.Feed() client.update_feed(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.UpdateFeedRequest() @@ -13125,16 +13132,17 @@ def test_update_feed_empty_call_grpc(): def test_delete_feed_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_feed), '__call__') as call: call.return_value = None client.delete_feed(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.DeleteFeedRequest() @@ -13147,16 +13155,17 @@ def test_delete_feed_empty_call_grpc(): def test_search_all_resources_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.search_all_resources), '__call__') as call: call.return_value = asset_service.SearchAllResourcesResponse() client.search_all_resources(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.SearchAllResourcesRequest() @@ -13169,16 +13178,17 @@ def test_search_all_resources_empty_call_grpc(): def test_search_all_iam_policies_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.search_all_iam_policies), '__call__') as call: call.return_value = asset_service.SearchAllIamPoliciesResponse() client.search_all_iam_policies(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.SearchAllIamPoliciesRequest() @@ -13191,16 +13201,17 @@ def test_search_all_iam_policies_empty_call_grpc(): def test_analyze_iam_policy_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_iam_policy), '__call__') as call: call.return_value = asset_service.AnalyzeIamPolicyResponse() client.analyze_iam_policy(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeIamPolicyRequest() @@ -13213,16 +13224,17 @@ def test_analyze_iam_policy_empty_call_grpc(): def test_analyze_iam_policy_longrunning_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_iam_policy_longrunning), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.analyze_iam_policy_longrunning(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() @@ -13235,16 +13247,17 @@ def test_analyze_iam_policy_longrunning_empty_call_grpc(): def test_analyze_move_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_move), '__call__') as call: call.return_value = asset_service.AnalyzeMoveResponse() client.analyze_move(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeMoveRequest() @@ -13257,16 +13270,17 @@ def test_analyze_move_empty_call_grpc(): def test_query_assets_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.query_assets), '__call__') as call: call.return_value = asset_service.QueryAssetsResponse() client.query_assets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.QueryAssetsRequest() @@ -13279,16 +13293,17 @@ def test_query_assets_empty_call_grpc(): def test_create_saved_query_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_saved_query), '__call__') as call: call.return_value = asset_service.SavedQuery() client.create_saved_query(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.CreateSavedQueryRequest() @@ -13301,16 +13316,17 @@ def test_create_saved_query_empty_call_grpc(): def test_get_saved_query_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_saved_query), '__call__') as call: call.return_value = asset_service.SavedQuery() client.get_saved_query(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.GetSavedQueryRequest() @@ -13323,16 +13339,17 @@ def test_get_saved_query_empty_call_grpc(): def test_list_saved_queries_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_saved_queries), '__call__') as call: call.return_value = asset_service.ListSavedQueriesResponse() client.list_saved_queries(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.ListSavedQueriesRequest() @@ -13345,16 +13362,17 @@ def test_list_saved_queries_empty_call_grpc(): def test_update_saved_query_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_saved_query), '__call__') as call: call.return_value = asset_service.SavedQuery() client.update_saved_query(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.UpdateSavedQueryRequest() @@ -13367,16 +13385,17 @@ def test_update_saved_query_empty_call_grpc(): def test_delete_saved_query_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_saved_query), '__call__') as call: call.return_value = None client.delete_saved_query(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.DeleteSavedQueryRequest() @@ -13389,16 +13408,17 @@ def test_delete_saved_query_empty_call_grpc(): def test_batch_get_effective_iam_policies_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.batch_get_effective_iam_policies), '__call__') as call: call.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() client.batch_get_effective_iam_policies(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() @@ -13411,16 +13431,17 @@ def test_batch_get_effective_iam_policies_empty_call_grpc(): def test_analyze_org_policies_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_org_policies), '__call__') as call: call.return_value = asset_service.AnalyzeOrgPoliciesResponse() client.analyze_org_policies(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeOrgPoliciesRequest() @@ -13433,16 +13454,17 @@ def test_analyze_org_policies_empty_call_grpc(): def test_analyze_org_policy_governed_containers_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_org_policy_governed_containers), '__call__') as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() client.analyze_org_policy_governed_containers(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() @@ -13455,16 +13477,17 @@ def test_analyze_org_policy_governed_containers_empty_call_grpc(): def test_analyze_org_policy_governed_assets_empty_call_grpc(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_org_policy_governed_assets), '__call__') as call: call.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() client.analyze_org_policy_governed_assets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() @@ -13493,9 +13516,10 @@ def test_initialize_client_w_grpc_asyncio(): async def test_export_assets_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.export_assets), '__call__') as call: @@ -13505,7 +13529,7 @@ async def test_export_assets_empty_call_grpc_asyncio(): ) await client.export_assets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.ExportAssetsRequest() @@ -13519,9 +13543,10 @@ async def test_export_assets_empty_call_grpc_asyncio(): async def test_list_assets_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_assets), '__call__') as call: @@ -13531,7 +13556,7 @@ async def test_list_assets_empty_call_grpc_asyncio(): )) await client.list_assets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.ListAssetsRequest() @@ -13545,9 +13570,10 @@ async def test_list_assets_empty_call_grpc_asyncio(): async def test_batch_get_assets_history_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.batch_get_assets_history), '__call__') as call: @@ -13556,7 +13582,7 @@ async def test_batch_get_assets_history_empty_call_grpc_asyncio(): )) await client.batch_get_assets_history(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.BatchGetAssetsHistoryRequest() @@ -13570,9 +13596,10 @@ async def test_batch_get_assets_history_empty_call_grpc_asyncio(): async def test_create_feed_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_feed), '__call__') as call: @@ -13586,7 +13613,7 @@ async def test_create_feed_empty_call_grpc_asyncio(): )) await client.create_feed(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.CreateFeedRequest() @@ -13600,9 +13627,10 @@ async def test_create_feed_empty_call_grpc_asyncio(): async def test_get_feed_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_feed), '__call__') as call: @@ -13616,7 +13644,7 @@ async def test_get_feed_empty_call_grpc_asyncio(): )) await client.get_feed(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.GetFeedRequest() @@ -13630,9 +13658,10 @@ async def test_get_feed_empty_call_grpc_asyncio(): async def test_list_feeds_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_feeds), '__call__') as call: @@ -13641,7 +13670,7 @@ async def test_list_feeds_empty_call_grpc_asyncio(): )) await client.list_feeds(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.ListFeedsRequest() @@ -13655,9 +13684,10 @@ async def test_list_feeds_empty_call_grpc_asyncio(): async def test_update_feed_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_feed), '__call__') as call: @@ -13671,7 +13701,7 @@ async def test_update_feed_empty_call_grpc_asyncio(): )) await client.update_feed(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.UpdateFeedRequest() @@ -13685,9 +13715,10 @@ async def test_update_feed_empty_call_grpc_asyncio(): async def test_delete_feed_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_feed), '__call__') as call: @@ -13695,7 +13726,7 @@ async def test_delete_feed_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_feed(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.DeleteFeedRequest() @@ -13709,9 +13740,10 @@ async def test_delete_feed_empty_call_grpc_asyncio(): async def test_search_all_resources_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.search_all_resources), '__call__') as call: @@ -13721,7 +13753,7 @@ async def test_search_all_resources_empty_call_grpc_asyncio(): )) await client.search_all_resources(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.SearchAllResourcesRequest() @@ -13735,9 +13767,10 @@ async def test_search_all_resources_empty_call_grpc_asyncio(): async def test_search_all_iam_policies_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.search_all_iam_policies), '__call__') as call: @@ -13747,7 +13780,7 @@ async def test_search_all_iam_policies_empty_call_grpc_asyncio(): )) await client.search_all_iam_policies(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.SearchAllIamPoliciesRequest() @@ -13761,9 +13794,10 @@ async def test_search_all_iam_policies_empty_call_grpc_asyncio(): async def test_analyze_iam_policy_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_iam_policy), '__call__') as call: @@ -13773,7 +13807,7 @@ async def test_analyze_iam_policy_empty_call_grpc_asyncio(): )) await client.analyze_iam_policy(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeIamPolicyRequest() @@ -13787,9 +13821,10 @@ async def test_analyze_iam_policy_empty_call_grpc_asyncio(): async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_iam_policy_longrunning), '__call__') as call: @@ -13799,7 +13834,7 @@ async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): ) await client.analyze_iam_policy_longrunning(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() @@ -13813,9 +13848,10 @@ async def test_analyze_iam_policy_longrunning_empty_call_grpc_asyncio(): async def test_analyze_move_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_move), '__call__') as call: @@ -13824,7 +13860,7 @@ async def test_analyze_move_empty_call_grpc_asyncio(): )) await client.analyze_move(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeMoveRequest() @@ -13838,9 +13874,10 @@ async def test_analyze_move_empty_call_grpc_asyncio(): async def test_query_assets_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.query_assets), '__call__') as call: @@ -13851,7 +13888,7 @@ async def test_query_assets_empty_call_grpc_asyncio(): )) await client.query_assets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.QueryAssetsRequest() @@ -13865,9 +13902,10 @@ async def test_query_assets_empty_call_grpc_asyncio(): async def test_create_saved_query_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_saved_query), '__call__') as call: @@ -13880,7 +13918,7 @@ async def test_create_saved_query_empty_call_grpc_asyncio(): )) await client.create_saved_query(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.CreateSavedQueryRequest() @@ -13894,9 +13932,10 @@ async def test_create_saved_query_empty_call_grpc_asyncio(): async def test_get_saved_query_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_saved_query), '__call__') as call: @@ -13909,7 +13948,7 @@ async def test_get_saved_query_empty_call_grpc_asyncio(): )) await client.get_saved_query(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.GetSavedQueryRequest() @@ -13923,9 +13962,10 @@ async def test_get_saved_query_empty_call_grpc_asyncio(): async def test_list_saved_queries_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_saved_queries), '__call__') as call: @@ -13935,7 +13975,7 @@ async def test_list_saved_queries_empty_call_grpc_asyncio(): )) await client.list_saved_queries(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.ListSavedQueriesRequest() @@ -13949,9 +13989,10 @@ async def test_list_saved_queries_empty_call_grpc_asyncio(): async def test_update_saved_query_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_saved_query), '__call__') as call: @@ -13964,7 +14005,7 @@ async def test_update_saved_query_empty_call_grpc_asyncio(): )) await client.update_saved_query(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.UpdateSavedQueryRequest() @@ -13978,9 +14019,10 @@ async def test_update_saved_query_empty_call_grpc_asyncio(): async def test_delete_saved_query_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_saved_query), '__call__') as call: @@ -13988,7 +14030,7 @@ async def test_delete_saved_query_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_saved_query(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.DeleteSavedQueryRequest() @@ -14002,9 +14044,10 @@ async def test_delete_saved_query_empty_call_grpc_asyncio(): async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.batch_get_effective_iam_policies), '__call__') as call: @@ -14013,7 +14056,7 @@ async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): )) await client.batch_get_effective_iam_policies(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() @@ -14027,9 +14070,10 @@ async def test_batch_get_effective_iam_policies_empty_call_grpc_asyncio(): async def test_analyze_org_policies_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_org_policies), '__call__') as call: @@ -14039,7 +14083,7 @@ async def test_analyze_org_policies_empty_call_grpc_asyncio(): )) await client.analyze_org_policies(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeOrgPoliciesRequest() @@ -14053,9 +14097,10 @@ async def test_analyze_org_policies_empty_call_grpc_asyncio(): async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_org_policy_governed_containers), '__call__') as call: @@ -14065,7 +14110,7 @@ async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): )) await client.analyze_org_policy_governed_containers(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() @@ -14079,9 +14124,10 @@ async def test_analyze_org_policy_governed_containers_empty_call_grpc_asyncio(): async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): client = AssetServiceAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.analyze_org_policy_governed_assets), '__call__') as call: @@ -14091,7 +14137,7 @@ async def test_analyze_org_policy_governed_assets_empty_call_grpc_asyncio(): )) await client.analyze_org_policy_governed_assets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() @@ -16571,6 +16617,512 @@ def test_initialize_client_w_rest(): assert client is not None +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_assets_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_assets), + '__call__') as call: + client.export_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ExportAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_assets_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_assets), + '__call__') as call: + client.list_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_assets_history_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_assets_history), + '__call__') as call: + client.batch_get_assets_history(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetAssetsHistoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_feed_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_feed), + '__call__') as call: + client.create_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.CreateFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_feed_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_feed), + '__call__') as call: + client.get_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.GetFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_feeds_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_feeds), + '__call__') as call: + client.list_feeds(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListFeedsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_feed_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_feed), + '__call__') as call: + client.update_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.UpdateFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_feed_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_feed), + '__call__') as call: + client.delete_feed(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.DeleteFeedRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_resources_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_resources), + '__call__') as call: + client.search_all_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.SearchAllResourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_all_iam_policies_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_all_iam_policies), + '__call__') as call: + client.search_all_iam_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.SearchAllIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_iam_policy_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy), + '__call__') as call: + client.analyze_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeIamPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_iam_policy_longrunning_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_iam_policy_longrunning), + '__call__') as call: + client.analyze_iam_policy_longrunning(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeIamPolicyLongrunningRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_move_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_move), + '__call__') as call: + client.analyze_move(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeMoveRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_query_assets_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.query_assets), + '__call__') as call: + client.query_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.QueryAssetsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_saved_query_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_saved_query), + '__call__') as call: + client.create_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.CreateSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_saved_query_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_saved_query), + '__call__') as call: + client.get_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.GetSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_saved_queries_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_saved_queries), + '__call__') as call: + client.list_saved_queries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.ListSavedQueriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_saved_query_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_saved_query), + '__call__') as call: + client.update_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.UpdateSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_saved_query_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_saved_query), + '__call__') as call: + client.delete_saved_query(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.DeleteSavedQueryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_batch_get_effective_iam_policies_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.batch_get_effective_iam_policies), + '__call__') as call: + client.batch_get_effective_iam_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.BatchGetEffectiveIamPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policies_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policies), + '__call__') as call: + client.analyze_org_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policy_governed_containers_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_containers), + '__call__') as call: + client.analyze_org_policy_governed_containers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedContainersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_analyze_org_policy_governed_assets_empty_call_rest(): + client = AssetServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.analyze_org_policy_governed_assets), + '__call__') as call: + client.analyze_org_policy_governed_assets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest() + + assert args[0] == request_msg + + def test_asset_service_rest_lro_client(): client = AssetServiceClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index c943272ba254..94f791935d71 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -2925,16 +2925,17 @@ def test_initialize_client_w_grpc(): def test_generate_access_token_empty_call_grpc(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.generate_access_token), '__call__') as call: call.return_value = common.GenerateAccessTokenResponse() client.generate_access_token(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = common.GenerateAccessTokenRequest() @@ -2947,16 +2948,17 @@ def test_generate_access_token_empty_call_grpc(): def test_generate_id_token_empty_call_grpc(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.generate_id_token), '__call__') as call: call.return_value = common.GenerateIdTokenResponse() client.generate_id_token(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = common.GenerateIdTokenRequest() @@ -2969,16 +2971,17 @@ def test_generate_id_token_empty_call_grpc(): def test_sign_blob_empty_call_grpc(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.sign_blob), '__call__') as call: call.return_value = common.SignBlobResponse() client.sign_blob(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = common.SignBlobRequest() @@ -2991,16 +2994,17 @@ def test_sign_blob_empty_call_grpc(): def test_sign_jwt_empty_call_grpc(): client = IAMCredentialsClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.sign_jwt), '__call__') as call: call.return_value = common.SignJwtResponse() client.sign_jwt(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = common.SignJwtRequest() @@ -3029,9 +3033,10 @@ def test_initialize_client_w_grpc_asyncio(): async def test_generate_access_token_empty_call_grpc_asyncio(): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.generate_access_token), '__call__') as call: @@ -3041,7 +3046,7 @@ async def test_generate_access_token_empty_call_grpc_asyncio(): )) await client.generate_access_token(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = common.GenerateAccessTokenRequest() @@ -3055,9 +3060,10 @@ async def test_generate_access_token_empty_call_grpc_asyncio(): async def test_generate_id_token_empty_call_grpc_asyncio(): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.generate_id_token), '__call__') as call: @@ -3067,7 +3073,7 @@ async def test_generate_id_token_empty_call_grpc_asyncio(): )) await client.generate_id_token(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = common.GenerateIdTokenRequest() @@ -3081,9 +3087,10 @@ async def test_generate_id_token_empty_call_grpc_asyncio(): async def test_sign_blob_empty_call_grpc_asyncio(): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.sign_blob), '__call__') as call: @@ -3094,7 +3101,7 @@ async def test_sign_blob_empty_call_grpc_asyncio(): )) await client.sign_blob(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = common.SignBlobRequest() @@ -3108,9 +3115,10 @@ async def test_sign_blob_empty_call_grpc_asyncio(): async def test_sign_jwt_empty_call_grpc_asyncio(): client = IAMCredentialsAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.sign_jwt), '__call__') as call: @@ -3121,7 +3129,7 @@ async def test_sign_jwt_empty_call_grpc_asyncio(): )) await client.sign_jwt(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = common.SignJwtRequest() @@ -3543,6 +3551,94 @@ def test_initialize_client_w_rest(): assert client is not None +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_access_token_empty_call_rest(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_access_token), + '__call__') as call: + client.generate_access_token(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.GenerateAccessTokenRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_generate_id_token_empty_call_rest(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.generate_id_token), + '__call__') as call: + client.generate_id_token(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.GenerateIdTokenRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sign_blob_empty_call_rest(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sign_blob), + '__call__') as call: + client.sign_blob(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.SignBlobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_sign_jwt_empty_call_rest(): + client = IAMCredentialsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.sign_jwt), + '__call__') as call: + client.sign_jwt(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = common.SignJwtRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = IAMCredentialsClient( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index d9b3d3d26b5b..fac54c7318cc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -10852,16 +10852,17 @@ def test_initialize_client_w_grpc(): def test_get_trigger_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_trigger), '__call__') as call: call.return_value = trigger.Trigger() client.get_trigger(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetTriggerRequest() @@ -10874,16 +10875,17 @@ def test_get_trigger_empty_call_grpc(): def test_list_triggers_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_triggers), '__call__') as call: call.return_value = eventarc.ListTriggersResponse() client.list_triggers(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.ListTriggersRequest() @@ -10896,16 +10898,17 @@ def test_list_triggers_empty_call_grpc(): def test_create_trigger_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_trigger), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.create_trigger(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.CreateTriggerRequest() @@ -10918,16 +10921,17 @@ def test_create_trigger_empty_call_grpc(): def test_update_trigger_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_trigger), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.update_trigger(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.UpdateTriggerRequest() @@ -10940,16 +10944,17 @@ def test_update_trigger_empty_call_grpc(): def test_delete_trigger_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_trigger), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.delete_trigger(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.DeleteTriggerRequest() @@ -10962,16 +10967,17 @@ def test_delete_trigger_empty_call_grpc(): def test_get_channel_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_channel), '__call__') as call: call.return_value = channel.Channel() client.get_channel(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetChannelRequest() @@ -10984,16 +10990,17 @@ def test_get_channel_empty_call_grpc(): def test_list_channels_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_channels), '__call__') as call: call.return_value = eventarc.ListChannelsResponse() client.list_channels(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.ListChannelsRequest() @@ -11006,16 +11013,17 @@ def test_list_channels_empty_call_grpc(): def test_create_channel_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_channel_), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.create_channel(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.CreateChannelRequest() @@ -11028,16 +11036,17 @@ def test_create_channel_empty_call_grpc(): def test_update_channel_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_channel), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.update_channel(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.UpdateChannelRequest() @@ -11050,16 +11059,17 @@ def test_update_channel_empty_call_grpc(): def test_delete_channel_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_channel), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.delete_channel(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.DeleteChannelRequest() @@ -11072,16 +11082,17 @@ def test_delete_channel_empty_call_grpc(): def test_get_provider_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_provider), '__call__') as call: call.return_value = discovery.Provider() client.get_provider(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetProviderRequest() @@ -11094,16 +11105,17 @@ def test_get_provider_empty_call_grpc(): def test_list_providers_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_providers), '__call__') as call: call.return_value = eventarc.ListProvidersResponse() client.list_providers(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.ListProvidersRequest() @@ -11116,16 +11128,17 @@ def test_list_providers_empty_call_grpc(): def test_get_channel_connection_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_channel_connection), '__call__') as call: call.return_value = channel_connection.ChannelConnection() client.get_channel_connection(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetChannelConnectionRequest() @@ -11138,16 +11151,17 @@ def test_get_channel_connection_empty_call_grpc(): def test_list_channel_connections_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_channel_connections), '__call__') as call: call.return_value = eventarc.ListChannelConnectionsResponse() client.list_channel_connections(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.ListChannelConnectionsRequest() @@ -11160,16 +11174,17 @@ def test_list_channel_connections_empty_call_grpc(): def test_create_channel_connection_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_channel_connection), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.create_channel_connection(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.CreateChannelConnectionRequest() @@ -11182,16 +11197,17 @@ def test_create_channel_connection_empty_call_grpc(): def test_delete_channel_connection_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_channel_connection), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.delete_channel_connection(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.DeleteChannelConnectionRequest() @@ -11204,16 +11220,17 @@ def test_delete_channel_connection_empty_call_grpc(): def test_get_google_channel_config_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_google_channel_config), '__call__') as call: call.return_value = google_channel_config.GoogleChannelConfig() client.get_google_channel_config(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetGoogleChannelConfigRequest() @@ -11226,16 +11243,17 @@ def test_get_google_channel_config_empty_call_grpc(): def test_update_google_channel_config_empty_call_grpc(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_google_channel_config), '__call__') as call: call.return_value = gce_google_channel_config.GoogleChannelConfig() client.update_google_channel_config(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.UpdateGoogleChannelConfigRequest() @@ -11264,9 +11282,10 @@ def test_initialize_client_w_grpc_asyncio(): async def test_get_trigger_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_trigger), '__call__') as call: @@ -11280,7 +11299,7 @@ async def test_get_trigger_empty_call_grpc_asyncio(): )) await client.get_trigger(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetTriggerRequest() @@ -11294,9 +11313,10 @@ async def test_get_trigger_empty_call_grpc_asyncio(): async def test_list_triggers_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_triggers), '__call__') as call: @@ -11307,7 +11327,7 @@ async def test_list_triggers_empty_call_grpc_asyncio(): )) await client.list_triggers(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.ListTriggersRequest() @@ -11321,9 +11341,10 @@ async def test_list_triggers_empty_call_grpc_asyncio(): async def test_create_trigger_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_trigger), '__call__') as call: @@ -11333,7 +11354,7 @@ async def test_create_trigger_empty_call_grpc_asyncio(): ) await client.create_trigger(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.CreateTriggerRequest() @@ -11347,9 +11368,10 @@ async def test_create_trigger_empty_call_grpc_asyncio(): async def test_update_trigger_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_trigger), '__call__') as call: @@ -11359,7 +11381,7 @@ async def test_update_trigger_empty_call_grpc_asyncio(): ) await client.update_trigger(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.UpdateTriggerRequest() @@ -11373,9 +11395,10 @@ async def test_update_trigger_empty_call_grpc_asyncio(): async def test_delete_trigger_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_trigger), '__call__') as call: @@ -11385,7 +11408,7 @@ async def test_delete_trigger_empty_call_grpc_asyncio(): ) await client.delete_trigger(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.DeleteTriggerRequest() @@ -11399,9 +11422,10 @@ async def test_delete_trigger_empty_call_grpc_asyncio(): async def test_get_channel_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_channel), '__call__') as call: @@ -11416,7 +11440,7 @@ async def test_get_channel_empty_call_grpc_asyncio(): )) await client.get_channel(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetChannelRequest() @@ -11430,9 +11454,10 @@ async def test_get_channel_empty_call_grpc_asyncio(): async def test_list_channels_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_channels), '__call__') as call: @@ -11443,7 +11468,7 @@ async def test_list_channels_empty_call_grpc_asyncio(): )) await client.list_channels(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.ListChannelsRequest() @@ -11457,9 +11482,10 @@ async def test_list_channels_empty_call_grpc_asyncio(): async def test_create_channel_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_channel_), '__call__') as call: @@ -11469,7 +11495,7 @@ async def test_create_channel_empty_call_grpc_asyncio(): ) await client.create_channel(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.CreateChannelRequest() @@ -11483,9 +11509,10 @@ async def test_create_channel_empty_call_grpc_asyncio(): async def test_update_channel_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_channel), '__call__') as call: @@ -11495,7 +11522,7 @@ async def test_update_channel_empty_call_grpc_asyncio(): ) await client.update_channel(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.UpdateChannelRequest() @@ -11509,9 +11536,10 @@ async def test_update_channel_empty_call_grpc_asyncio(): async def test_delete_channel_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_channel), '__call__') as call: @@ -11521,7 +11549,7 @@ async def test_delete_channel_empty_call_grpc_asyncio(): ) await client.delete_channel(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.DeleteChannelRequest() @@ -11535,9 +11563,10 @@ async def test_delete_channel_empty_call_grpc_asyncio(): async def test_get_provider_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_provider), '__call__') as call: @@ -11548,7 +11577,7 @@ async def test_get_provider_empty_call_grpc_asyncio(): )) await client.get_provider(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetProviderRequest() @@ -11562,9 +11591,10 @@ async def test_get_provider_empty_call_grpc_asyncio(): async def test_list_providers_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_providers), '__call__') as call: @@ -11575,7 +11605,7 @@ async def test_list_providers_empty_call_grpc_asyncio(): )) await client.list_providers(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.ListProvidersRequest() @@ -11589,9 +11619,10 @@ async def test_list_providers_empty_call_grpc_asyncio(): async def test_get_channel_connection_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_channel_connection), '__call__') as call: @@ -11604,7 +11635,7 @@ async def test_get_channel_connection_empty_call_grpc_asyncio(): )) await client.get_channel_connection(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetChannelConnectionRequest() @@ -11618,9 +11649,10 @@ async def test_get_channel_connection_empty_call_grpc_asyncio(): async def test_list_channel_connections_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_channel_connections), '__call__') as call: @@ -11631,7 +11663,7 @@ async def test_list_channel_connections_empty_call_grpc_asyncio(): )) await client.list_channel_connections(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.ListChannelConnectionsRequest() @@ -11645,9 +11677,10 @@ async def test_list_channel_connections_empty_call_grpc_asyncio(): async def test_create_channel_connection_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_channel_connection), '__call__') as call: @@ -11657,7 +11690,7 @@ async def test_create_channel_connection_empty_call_grpc_asyncio(): ) await client.create_channel_connection(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.CreateChannelConnectionRequest() @@ -11671,9 +11704,10 @@ async def test_create_channel_connection_empty_call_grpc_asyncio(): async def test_delete_channel_connection_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_channel_connection), '__call__') as call: @@ -11683,7 +11717,7 @@ async def test_delete_channel_connection_empty_call_grpc_asyncio(): ) await client.delete_channel_connection(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.DeleteChannelConnectionRequest() @@ -11697,9 +11731,10 @@ async def test_delete_channel_connection_empty_call_grpc_asyncio(): async def test_get_google_channel_config_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_google_channel_config), '__call__') as call: @@ -11710,7 +11745,7 @@ async def test_get_google_channel_config_empty_call_grpc_asyncio(): )) await client.get_google_channel_config(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.GetGoogleChannelConfigRequest() @@ -11724,9 +11759,10 @@ async def test_get_google_channel_config_empty_call_grpc_asyncio(): async def test_update_google_channel_config_empty_call_grpc_asyncio(): client = EventarcAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_google_channel_config), '__call__') as call: @@ -11737,7 +11773,7 @@ async def test_update_google_channel_config_empty_call_grpc_asyncio(): )) await client.update_google_channel_config(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = eventarc.UpdateGoogleChannelConfigRequest() @@ -14383,6 +14419,402 @@ def test_initialize_client_w_rest(): assert client is not None +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_trigger), + '__call__') as call: + client.get_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_triggers_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_triggers), + '__call__') as call: + client.list_triggers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListTriggersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_trigger), + '__call__') as call: + client.create_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_trigger), + '__call__') as call: + client.update_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_trigger_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_trigger), + '__call__') as call: + client.delete_trigger(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteTriggerRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel), + '__call__') as call: + client.get_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channels_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channels), + '__call__') as call: + client.list_channels(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_), + '__call__') as call: + client.create_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_channel), + '__call__') as call: + client.update_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel), + '__call__') as call: + client.delete_channel(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_provider_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_provider), + '__call__') as call: + client.get_provider(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetProviderRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_providers_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_providers), + '__call__') as call: + client.list_providers(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListProvidersRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_channel_connection_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_channel_connection), + '__call__') as call: + client.get_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_channel_connections_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_channel_connections), + '__call__') as call: + client.list_channel_connections(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.ListChannelConnectionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_channel_connection_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_channel_connection), + '__call__') as call: + client.create_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.CreateChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_channel_connection_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_channel_connection), + '__call__') as call: + client.delete_channel_connection(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.DeleteChannelConnectionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_google_channel_config_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_google_channel_config), + '__call__') as call: + client.get_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.GetGoogleChannelConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_google_channel_config_empty_call_rest(): + client = EventarcClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_google_channel_config), + '__call__') as call: + client.update_google_channel_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = eventarc.UpdateGoogleChannelConfigRequest() + + assert args[0] == request_msg + + def test_eventarc_rest_lro_client(): client = EventarcClient( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 80b4f75e6ebc..9afec2b18972 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -10925,16 +10925,17 @@ def test_initialize_client_w_grpc(): def test_list_buckets_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_buckets), '__call__') as call: call.return_value = logging_config.ListBucketsResponse() client.list_buckets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListBucketsRequest() @@ -10947,16 +10948,17 @@ def test_list_buckets_empty_call_grpc(): def test_get_bucket_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_bucket), '__call__') as call: call.return_value = logging_config.LogBucket() client.get_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetBucketRequest() @@ -10969,16 +10971,17 @@ def test_get_bucket_empty_call_grpc(): def test_create_bucket_async_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_bucket_async), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.create_bucket_async(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateBucketRequest() @@ -10991,16 +10994,17 @@ def test_create_bucket_async_empty_call_grpc(): def test_update_bucket_async_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_bucket_async), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.update_bucket_async(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateBucketRequest() @@ -11013,16 +11017,17 @@ def test_update_bucket_async_empty_call_grpc(): def test_create_bucket_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_bucket), '__call__') as call: call.return_value = logging_config.LogBucket() client.create_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateBucketRequest() @@ -11035,16 +11040,17 @@ def test_create_bucket_empty_call_grpc(): def test_update_bucket_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_bucket), '__call__') as call: call.return_value = logging_config.LogBucket() client.update_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateBucketRequest() @@ -11057,16 +11063,17 @@ def test_update_bucket_empty_call_grpc(): def test_delete_bucket_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_bucket), '__call__') as call: call.return_value = None client.delete_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteBucketRequest() @@ -11079,16 +11086,17 @@ def test_delete_bucket_empty_call_grpc(): def test_undelete_bucket_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.undelete_bucket), '__call__') as call: call.return_value = None client.undelete_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UndeleteBucketRequest() @@ -11101,16 +11109,17 @@ def test_undelete_bucket_empty_call_grpc(): def test_list_views_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_views), '__call__') as call: call.return_value = logging_config.ListViewsResponse() client.list_views(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListViewsRequest() @@ -11123,16 +11132,17 @@ def test_list_views_empty_call_grpc(): def test_get_view_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_view), '__call__') as call: call.return_value = logging_config.LogView() client.get_view(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetViewRequest() @@ -11145,16 +11155,17 @@ def test_get_view_empty_call_grpc(): def test_create_view_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_view), '__call__') as call: call.return_value = logging_config.LogView() client.create_view(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateViewRequest() @@ -11167,16 +11178,17 @@ def test_create_view_empty_call_grpc(): def test_update_view_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_view), '__call__') as call: call.return_value = logging_config.LogView() client.update_view(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateViewRequest() @@ -11189,16 +11201,17 @@ def test_update_view_empty_call_grpc(): def test_delete_view_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_view), '__call__') as call: call.return_value = None client.delete_view(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteViewRequest() @@ -11211,16 +11224,17 @@ def test_delete_view_empty_call_grpc(): def test_list_sinks_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_sinks), '__call__') as call: call.return_value = logging_config.ListSinksResponse() client.list_sinks(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListSinksRequest() @@ -11233,16 +11247,17 @@ def test_list_sinks_empty_call_grpc(): def test_get_sink_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_sink), '__call__') as call: call.return_value = logging_config.LogSink() client.get_sink(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetSinkRequest() @@ -11255,16 +11270,17 @@ def test_get_sink_empty_call_grpc(): def test_create_sink_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_sink), '__call__') as call: call.return_value = logging_config.LogSink() client.create_sink(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateSinkRequest() @@ -11277,16 +11293,17 @@ def test_create_sink_empty_call_grpc(): def test_update_sink_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_sink), '__call__') as call: call.return_value = logging_config.LogSink() client.update_sink(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateSinkRequest() @@ -11299,16 +11316,17 @@ def test_update_sink_empty_call_grpc(): def test_delete_sink_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_sink), '__call__') as call: call.return_value = None client.delete_sink(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteSinkRequest() @@ -11321,16 +11339,17 @@ def test_delete_sink_empty_call_grpc(): def test_create_link_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_link), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.create_link(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateLinkRequest() @@ -11343,16 +11362,17 @@ def test_create_link_empty_call_grpc(): def test_delete_link_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_link), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.delete_link(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteLinkRequest() @@ -11365,16 +11385,17 @@ def test_delete_link_empty_call_grpc(): def test_list_links_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_links), '__call__') as call: call.return_value = logging_config.ListLinksResponse() client.list_links(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListLinksRequest() @@ -11387,16 +11408,17 @@ def test_list_links_empty_call_grpc(): def test_get_link_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_link), '__call__') as call: call.return_value = logging_config.Link() client.get_link(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetLinkRequest() @@ -11409,16 +11431,17 @@ def test_get_link_empty_call_grpc(): def test_list_exclusions_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_exclusions), '__call__') as call: call.return_value = logging_config.ListExclusionsResponse() client.list_exclusions(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListExclusionsRequest() @@ -11431,16 +11454,17 @@ def test_list_exclusions_empty_call_grpc(): def test_get_exclusion_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client.get_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetExclusionRequest() @@ -11453,16 +11477,17 @@ def test_get_exclusion_empty_call_grpc(): def test_create_exclusion_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client.create_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateExclusionRequest() @@ -11475,16 +11500,17 @@ def test_create_exclusion_empty_call_grpc(): def test_update_exclusion_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client.update_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateExclusionRequest() @@ -11497,16 +11523,17 @@ def test_update_exclusion_empty_call_grpc(): def test_delete_exclusion_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_exclusion), '__call__') as call: call.return_value = None client.delete_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteExclusionRequest() @@ -11519,16 +11546,17 @@ def test_delete_exclusion_empty_call_grpc(): def test_get_cmek_settings_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_cmek_settings), '__call__') as call: call.return_value = logging_config.CmekSettings() client.get_cmek_settings(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetCmekSettingsRequest() @@ -11541,16 +11569,17 @@ def test_get_cmek_settings_empty_call_grpc(): def test_update_cmek_settings_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_cmek_settings), '__call__') as call: call.return_value = logging_config.CmekSettings() client.update_cmek_settings(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateCmekSettingsRequest() @@ -11563,16 +11592,17 @@ def test_update_cmek_settings_empty_call_grpc(): def test_get_settings_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_settings), '__call__') as call: call.return_value = logging_config.Settings() client.get_settings(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetSettingsRequest() @@ -11585,16 +11615,17 @@ def test_get_settings_empty_call_grpc(): def test_update_settings_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_settings), '__call__') as call: call.return_value = logging_config.Settings() client.update_settings(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateSettingsRequest() @@ -11607,16 +11638,17 @@ def test_update_settings_empty_call_grpc(): def test_copy_log_entries_empty_call_grpc(): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.copy_log_entries), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.copy_log_entries(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CopyLogEntriesRequest() @@ -11645,9 +11677,10 @@ def test_initialize_client_w_grpc_asyncio(): async def test_list_buckets_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_buckets), '__call__') as call: @@ -11657,7 +11690,7 @@ async def test_list_buckets_empty_call_grpc_asyncio(): )) await client.list_buckets(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListBucketsRequest() @@ -11671,9 +11704,10 @@ async def test_list_buckets_empty_call_grpc_asyncio(): async def test_get_bucket_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_bucket), '__call__') as call: @@ -11689,7 +11723,7 @@ async def test_get_bucket_empty_call_grpc_asyncio(): )) await client.get_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetBucketRequest() @@ -11703,9 +11737,10 @@ async def test_get_bucket_empty_call_grpc_asyncio(): async def test_create_bucket_async_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_bucket_async), '__call__') as call: @@ -11715,7 +11750,7 @@ async def test_create_bucket_async_empty_call_grpc_asyncio(): ) await client.create_bucket_async(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateBucketRequest() @@ -11729,9 +11764,10 @@ async def test_create_bucket_async_empty_call_grpc_asyncio(): async def test_update_bucket_async_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_bucket_async), '__call__') as call: @@ -11741,7 +11777,7 @@ async def test_update_bucket_async_empty_call_grpc_asyncio(): ) await client.update_bucket_async(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateBucketRequest() @@ -11755,9 +11791,10 @@ async def test_update_bucket_async_empty_call_grpc_asyncio(): async def test_create_bucket_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_bucket), '__call__') as call: @@ -11773,7 +11810,7 @@ async def test_create_bucket_empty_call_grpc_asyncio(): )) await client.create_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateBucketRequest() @@ -11787,9 +11824,10 @@ async def test_create_bucket_empty_call_grpc_asyncio(): async def test_update_bucket_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_bucket), '__call__') as call: @@ -11805,7 +11843,7 @@ async def test_update_bucket_empty_call_grpc_asyncio(): )) await client.update_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateBucketRequest() @@ -11819,9 +11857,10 @@ async def test_update_bucket_empty_call_grpc_asyncio(): async def test_delete_bucket_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_bucket), '__call__') as call: @@ -11829,7 +11868,7 @@ async def test_delete_bucket_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteBucketRequest() @@ -11843,9 +11882,10 @@ async def test_delete_bucket_empty_call_grpc_asyncio(): async def test_undelete_bucket_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.undelete_bucket), '__call__') as call: @@ -11853,7 +11893,7 @@ async def test_undelete_bucket_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.undelete_bucket(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UndeleteBucketRequest() @@ -11867,9 +11907,10 @@ async def test_undelete_bucket_empty_call_grpc_asyncio(): async def test_list_views_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_views), '__call__') as call: @@ -11879,7 +11920,7 @@ async def test_list_views_empty_call_grpc_asyncio(): )) await client.list_views(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListViewsRequest() @@ -11893,9 +11934,10 @@ async def test_list_views_empty_call_grpc_asyncio(): async def test_get_view_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_view), '__call__') as call: @@ -11907,7 +11949,7 @@ async def test_get_view_empty_call_grpc_asyncio(): )) await client.get_view(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetViewRequest() @@ -11921,9 +11963,10 @@ async def test_get_view_empty_call_grpc_asyncio(): async def test_create_view_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_view), '__call__') as call: @@ -11935,7 +11978,7 @@ async def test_create_view_empty_call_grpc_asyncio(): )) await client.create_view(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateViewRequest() @@ -11949,9 +11992,10 @@ async def test_create_view_empty_call_grpc_asyncio(): async def test_update_view_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_view), '__call__') as call: @@ -11963,7 +12007,7 @@ async def test_update_view_empty_call_grpc_asyncio(): )) await client.update_view(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateViewRequest() @@ -11977,9 +12021,10 @@ async def test_update_view_empty_call_grpc_asyncio(): async def test_delete_view_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_view), '__call__') as call: @@ -11987,7 +12032,7 @@ async def test_delete_view_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_view(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteViewRequest() @@ -12001,9 +12046,10 @@ async def test_delete_view_empty_call_grpc_asyncio(): async def test_list_sinks_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_sinks), '__call__') as call: @@ -12013,7 +12059,7 @@ async def test_list_sinks_empty_call_grpc_asyncio(): )) await client.list_sinks(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListSinksRequest() @@ -12027,9 +12073,10 @@ async def test_list_sinks_empty_call_grpc_asyncio(): async def test_get_sink_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_sink), '__call__') as call: @@ -12046,7 +12093,7 @@ async def test_get_sink_empty_call_grpc_asyncio(): )) await client.get_sink(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetSinkRequest() @@ -12060,9 +12107,10 @@ async def test_get_sink_empty_call_grpc_asyncio(): async def test_create_sink_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_sink), '__call__') as call: @@ -12079,7 +12127,7 @@ async def test_create_sink_empty_call_grpc_asyncio(): )) await client.create_sink(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateSinkRequest() @@ -12093,9 +12141,10 @@ async def test_create_sink_empty_call_grpc_asyncio(): async def test_update_sink_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_sink), '__call__') as call: @@ -12112,7 +12161,7 @@ async def test_update_sink_empty_call_grpc_asyncio(): )) await client.update_sink(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateSinkRequest() @@ -12126,9 +12175,10 @@ async def test_update_sink_empty_call_grpc_asyncio(): async def test_delete_sink_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_sink), '__call__') as call: @@ -12136,7 +12186,7 @@ async def test_delete_sink_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_sink(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteSinkRequest() @@ -12150,9 +12200,10 @@ async def test_delete_sink_empty_call_grpc_asyncio(): async def test_create_link_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_link), '__call__') as call: @@ -12162,7 +12213,7 @@ async def test_create_link_empty_call_grpc_asyncio(): ) await client.create_link(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateLinkRequest() @@ -12176,9 +12227,10 @@ async def test_create_link_empty_call_grpc_asyncio(): async def test_delete_link_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_link), '__call__') as call: @@ -12188,7 +12240,7 @@ async def test_delete_link_empty_call_grpc_asyncio(): ) await client.delete_link(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteLinkRequest() @@ -12202,9 +12254,10 @@ async def test_delete_link_empty_call_grpc_asyncio(): async def test_list_links_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_links), '__call__') as call: @@ -12214,7 +12267,7 @@ async def test_list_links_empty_call_grpc_asyncio(): )) await client.list_links(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListLinksRequest() @@ -12228,9 +12281,10 @@ async def test_list_links_empty_call_grpc_asyncio(): async def test_get_link_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_link), '__call__') as call: @@ -12242,7 +12296,7 @@ async def test_get_link_empty_call_grpc_asyncio(): )) await client.get_link(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetLinkRequest() @@ -12256,9 +12310,10 @@ async def test_get_link_empty_call_grpc_asyncio(): async def test_list_exclusions_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_exclusions), '__call__') as call: @@ -12268,7 +12323,7 @@ async def test_list_exclusions_empty_call_grpc_asyncio(): )) await client.list_exclusions(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.ListExclusionsRequest() @@ -12282,9 +12337,10 @@ async def test_list_exclusions_empty_call_grpc_asyncio(): async def test_get_exclusion_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_exclusion), '__call__') as call: @@ -12297,7 +12353,7 @@ async def test_get_exclusion_empty_call_grpc_asyncio(): )) await client.get_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetExclusionRequest() @@ -12311,9 +12367,10 @@ async def test_get_exclusion_empty_call_grpc_asyncio(): async def test_create_exclusion_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_exclusion), '__call__') as call: @@ -12326,7 +12383,7 @@ async def test_create_exclusion_empty_call_grpc_asyncio(): )) await client.create_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CreateExclusionRequest() @@ -12340,9 +12397,10 @@ async def test_create_exclusion_empty_call_grpc_asyncio(): async def test_update_exclusion_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_exclusion), '__call__') as call: @@ -12355,7 +12413,7 @@ async def test_update_exclusion_empty_call_grpc_asyncio(): )) await client.update_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateExclusionRequest() @@ -12369,9 +12427,10 @@ async def test_update_exclusion_empty_call_grpc_asyncio(): async def test_delete_exclusion_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_exclusion), '__call__') as call: @@ -12379,7 +12438,7 @@ async def test_delete_exclusion_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_exclusion(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.DeleteExclusionRequest() @@ -12393,9 +12452,10 @@ async def test_delete_exclusion_empty_call_grpc_asyncio(): async def test_get_cmek_settings_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_cmek_settings), '__call__') as call: @@ -12408,7 +12468,7 @@ async def test_get_cmek_settings_empty_call_grpc_asyncio(): )) await client.get_cmek_settings(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetCmekSettingsRequest() @@ -12422,9 +12482,10 @@ async def test_get_cmek_settings_empty_call_grpc_asyncio(): async def test_update_cmek_settings_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_cmek_settings), '__call__') as call: @@ -12437,7 +12498,7 @@ async def test_update_cmek_settings_empty_call_grpc_asyncio(): )) await client.update_cmek_settings(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateCmekSettingsRequest() @@ -12451,9 +12512,10 @@ async def test_update_cmek_settings_empty_call_grpc_asyncio(): async def test_get_settings_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_settings), '__call__') as call: @@ -12467,7 +12529,7 @@ async def test_get_settings_empty_call_grpc_asyncio(): )) await client.get_settings(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.GetSettingsRequest() @@ -12481,9 +12543,10 @@ async def test_get_settings_empty_call_grpc_asyncio(): async def test_update_settings_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_settings), '__call__') as call: @@ -12497,7 +12560,7 @@ async def test_update_settings_empty_call_grpc_asyncio(): )) await client.update_settings(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.UpdateSettingsRequest() @@ -12511,9 +12574,10 @@ async def test_update_settings_empty_call_grpc_asyncio(): async def test_copy_log_entries_empty_call_grpc_asyncio(): client = ConfigServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.copy_log_entries), '__call__') as call: @@ -12523,7 +12587,7 @@ async def test_copy_log_entries_empty_call_grpc_asyncio(): ) await client.copy_log_entries(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_config.CopyLogEntriesRequest() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index bb2190376003..e68ade7f1e0e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -2913,16 +2913,17 @@ def test_initialize_client_w_grpc(): def test_delete_log_empty_call_grpc(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_log), '__call__') as call: call.return_value = None client.delete_log(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.DeleteLogRequest() @@ -2935,16 +2936,17 @@ def test_delete_log_empty_call_grpc(): def test_write_log_entries_empty_call_grpc(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.write_log_entries), '__call__') as call: call.return_value = logging.WriteLogEntriesResponse() client.write_log_entries(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.WriteLogEntriesRequest() @@ -2957,16 +2959,17 @@ def test_write_log_entries_empty_call_grpc(): def test_list_log_entries_empty_call_grpc(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_log_entries), '__call__') as call: call.return_value = logging.ListLogEntriesResponse() client.list_log_entries(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.ListLogEntriesRequest() @@ -2979,16 +2982,17 @@ def test_list_log_entries_empty_call_grpc(): def test_list_monitored_resource_descriptors_empty_call_grpc(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), '__call__') as call: call.return_value = logging.ListMonitoredResourceDescriptorsResponse() client.list_monitored_resource_descriptors(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.ListMonitoredResourceDescriptorsRequest() @@ -3001,16 +3005,17 @@ def test_list_monitored_resource_descriptors_empty_call_grpc(): def test_list_logs_empty_call_grpc(): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_logs), '__call__') as call: call.return_value = logging.ListLogsResponse() client.list_logs(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.ListLogsRequest() @@ -3039,9 +3044,10 @@ def test_initialize_client_w_grpc_asyncio(): async def test_delete_log_empty_call_grpc_asyncio(): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_log), '__call__') as call: @@ -3049,7 +3055,7 @@ async def test_delete_log_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_log(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.DeleteLogRequest() @@ -3063,9 +3069,10 @@ async def test_delete_log_empty_call_grpc_asyncio(): async def test_write_log_entries_empty_call_grpc_asyncio(): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.write_log_entries), '__call__') as call: @@ -3074,7 +3081,7 @@ async def test_write_log_entries_empty_call_grpc_asyncio(): )) await client.write_log_entries(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.WriteLogEntriesRequest() @@ -3088,9 +3095,10 @@ async def test_write_log_entries_empty_call_grpc_asyncio(): async def test_list_log_entries_empty_call_grpc_asyncio(): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_log_entries), '__call__') as call: @@ -3100,7 +3108,7 @@ async def test_list_log_entries_empty_call_grpc_asyncio(): )) await client.list_log_entries(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.ListLogEntriesRequest() @@ -3114,9 +3122,10 @@ async def test_list_log_entries_empty_call_grpc_asyncio(): async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_monitored_resource_descriptors), '__call__') as call: @@ -3126,7 +3135,7 @@ async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): )) await client.list_monitored_resource_descriptors(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.ListMonitoredResourceDescriptorsRequest() @@ -3140,9 +3149,10 @@ async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): async def test_list_logs_empty_call_grpc_asyncio(): client = LoggingServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_logs), '__call__') as call: @@ -3153,7 +3163,7 @@ async def test_list_logs_empty_call_grpc_asyncio(): )) await client.list_logs(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging.ListLogsRequest() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index a80079b399bb..6bcf51b2f5ce 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -2700,16 +2700,17 @@ def test_initialize_client_w_grpc(): def test_list_log_metrics_empty_call_grpc(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_log_metrics), '__call__') as call: call.return_value = logging_metrics.ListLogMetricsResponse() client.list_log_metrics(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.ListLogMetricsRequest() @@ -2722,16 +2723,17 @@ def test_list_log_metrics_empty_call_grpc(): def test_get_log_metric_empty_call_grpc(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client.get_log_metric(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.GetLogMetricRequest() @@ -2744,16 +2746,17 @@ def test_get_log_metric_empty_call_grpc(): def test_create_log_metric_empty_call_grpc(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client.create_log_metric(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.CreateLogMetricRequest() @@ -2766,16 +2769,17 @@ def test_create_log_metric_empty_call_grpc(): def test_update_log_metric_empty_call_grpc(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client.update_log_metric(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.UpdateLogMetricRequest() @@ -2788,16 +2792,17 @@ def test_update_log_metric_empty_call_grpc(): def test_delete_log_metric_empty_call_grpc(): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_log_metric), '__call__') as call: call.return_value = None client.delete_log_metric(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.DeleteLogMetricRequest() @@ -2826,9 +2831,10 @@ def test_initialize_client_w_grpc_asyncio(): async def test_list_log_metrics_empty_call_grpc_asyncio(): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_log_metrics), '__call__') as call: @@ -2838,7 +2844,7 @@ async def test_list_log_metrics_empty_call_grpc_asyncio(): )) await client.list_log_metrics(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.ListLogMetricsRequest() @@ -2852,9 +2858,10 @@ async def test_list_log_metrics_empty_call_grpc_asyncio(): async def test_get_log_metric_empty_call_grpc_asyncio(): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_log_metric), '__call__') as call: @@ -2870,7 +2877,7 @@ async def test_get_log_metric_empty_call_grpc_asyncio(): )) await client.get_log_metric(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.GetLogMetricRequest() @@ -2884,9 +2891,10 @@ async def test_get_log_metric_empty_call_grpc_asyncio(): async def test_create_log_metric_empty_call_grpc_asyncio(): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_log_metric), '__call__') as call: @@ -2902,7 +2910,7 @@ async def test_create_log_metric_empty_call_grpc_asyncio(): )) await client.create_log_metric(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.CreateLogMetricRequest() @@ -2916,9 +2924,10 @@ async def test_create_log_metric_empty_call_grpc_asyncio(): async def test_update_log_metric_empty_call_grpc_asyncio(): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_log_metric), '__call__') as call: @@ -2934,7 +2943,7 @@ async def test_update_log_metric_empty_call_grpc_asyncio(): )) await client.update_log_metric(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.UpdateLogMetricRequest() @@ -2948,9 +2957,10 @@ async def test_update_log_metric_empty_call_grpc_asyncio(): async def test_delete_log_metric_empty_call_grpc_asyncio(): client = MetricsServiceV2AsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_log_metric), '__call__') as call: @@ -2958,7 +2968,7 @@ async def test_delete_log_metric_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_log_metric(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = logging_metrics.DeleteLogMetricRequest() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 9e500de2c003..42b1aaca0cb7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -6659,16 +6659,17 @@ def test_initialize_client_w_grpc(): def test_list_instances_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_instances), '__call__') as call: call.return_value = cloud_redis.ListInstancesResponse() client.list_instances(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.ListInstancesRequest() @@ -6681,16 +6682,17 @@ def test_list_instances_empty_call_grpc(): def test_get_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_instance), '__call__') as call: call.return_value = cloud_redis.Instance() client.get_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.GetInstanceRequest() @@ -6703,16 +6705,17 @@ def test_get_instance_empty_call_grpc(): def test_get_instance_auth_string_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_instance_auth_string), '__call__') as call: call.return_value = cloud_redis.InstanceAuthString() client.get_instance_auth_string(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.GetInstanceAuthStringRequest() @@ -6725,16 +6728,17 @@ def test_get_instance_auth_string_empty_call_grpc(): def test_create_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_instance), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.create_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.CreateInstanceRequest() @@ -6747,16 +6751,17 @@ def test_create_instance_empty_call_grpc(): def test_update_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_instance), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.update_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.UpdateInstanceRequest() @@ -6769,16 +6774,17 @@ def test_update_instance_empty_call_grpc(): def test_upgrade_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.upgrade_instance), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.upgrade_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.UpgradeInstanceRequest() @@ -6791,16 +6797,17 @@ def test_upgrade_instance_empty_call_grpc(): def test_import_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.import_instance), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.import_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.ImportInstanceRequest() @@ -6813,16 +6820,17 @@ def test_import_instance_empty_call_grpc(): def test_export_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.export_instance), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.export_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.ExportInstanceRequest() @@ -6835,16 +6843,17 @@ def test_export_instance_empty_call_grpc(): def test_failover_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.failover_instance), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.failover_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.FailoverInstanceRequest() @@ -6857,16 +6866,17 @@ def test_failover_instance_empty_call_grpc(): def test_delete_instance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_instance), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.delete_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.DeleteInstanceRequest() @@ -6879,16 +6889,17 @@ def test_delete_instance_empty_call_grpc(): def test_reschedule_maintenance_empty_call_grpc(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.reschedule_maintenance), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client.reschedule_maintenance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.RescheduleMaintenanceRequest() @@ -6917,9 +6928,10 @@ def test_initialize_client_w_grpc_asyncio(): async def test_list_instances_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.list_instances), '__call__') as call: @@ -6930,7 +6942,7 @@ async def test_list_instances_empty_call_grpc_asyncio(): )) await client.list_instances(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.ListInstancesRequest() @@ -6944,9 +6956,10 @@ async def test_list_instances_empty_call_grpc_asyncio(): async def test_get_instance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_instance), '__call__') as call: @@ -6982,7 +6995,7 @@ async def test_get_instance_empty_call_grpc_asyncio(): )) await client.get_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.GetInstanceRequest() @@ -6996,9 +7009,10 @@ async def test_get_instance_empty_call_grpc_asyncio(): async def test_get_instance_auth_string_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.get_instance_auth_string), '__call__') as call: @@ -7008,7 +7022,7 @@ async def test_get_instance_auth_string_empty_call_grpc_asyncio(): )) await client.get_instance_auth_string(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.GetInstanceAuthStringRequest() @@ -7022,9 +7036,10 @@ async def test_get_instance_auth_string_empty_call_grpc_asyncio(): async def test_create_instance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.create_instance), '__call__') as call: @@ -7034,7 +7049,7 @@ async def test_create_instance_empty_call_grpc_asyncio(): ) await client.create_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.CreateInstanceRequest() @@ -7048,9 +7063,10 @@ async def test_create_instance_empty_call_grpc_asyncio(): async def test_update_instance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.update_instance), '__call__') as call: @@ -7060,7 +7076,7 @@ async def test_update_instance_empty_call_grpc_asyncio(): ) await client.update_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.UpdateInstanceRequest() @@ -7074,9 +7090,10 @@ async def test_update_instance_empty_call_grpc_asyncio(): async def test_upgrade_instance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.upgrade_instance), '__call__') as call: @@ -7086,7 +7103,7 @@ async def test_upgrade_instance_empty_call_grpc_asyncio(): ) await client.upgrade_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.UpgradeInstanceRequest() @@ -7100,9 +7117,10 @@ async def test_upgrade_instance_empty_call_grpc_asyncio(): async def test_import_instance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.import_instance), '__call__') as call: @@ -7112,7 +7130,7 @@ async def test_import_instance_empty_call_grpc_asyncio(): ) await client.import_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.ImportInstanceRequest() @@ -7126,9 +7144,10 @@ async def test_import_instance_empty_call_grpc_asyncio(): async def test_export_instance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.export_instance), '__call__') as call: @@ -7138,7 +7157,7 @@ async def test_export_instance_empty_call_grpc_asyncio(): ) await client.export_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.ExportInstanceRequest() @@ -7152,9 +7171,10 @@ async def test_export_instance_empty_call_grpc_asyncio(): async def test_failover_instance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.failover_instance), '__call__') as call: @@ -7164,7 +7184,7 @@ async def test_failover_instance_empty_call_grpc_asyncio(): ) await client.failover_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.FailoverInstanceRequest() @@ -7178,9 +7198,10 @@ async def test_failover_instance_empty_call_grpc_asyncio(): async def test_delete_instance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.delete_instance), '__call__') as call: @@ -7190,7 +7211,7 @@ async def test_delete_instance_empty_call_grpc_asyncio(): ) await client.delete_instance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.DeleteInstanceRequest() @@ -7204,9 +7225,10 @@ async def test_delete_instance_empty_call_grpc_asyncio(): async def test_reschedule_maintenance_empty_call_grpc_asyncio(): client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), + transport="grpc_asyncio", ) - # Mock the actual call within the gRPC stub, and fake the request. + # Mock the actual call, and fake the request. with mock.patch.object( type(client.transport.reschedule_maintenance), '__call__') as call: @@ -7216,7 +7238,7 @@ async def test_reschedule_maintenance_empty_call_grpc_asyncio(): ) await client.reschedule_maintenance(request=None) - # Establish that the underlying gRPC stub method was called. + # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] request_msg = cloud_redis.RescheduleMaintenanceRequest() @@ -8775,6 +8797,248 @@ def test_initialize_client_w_rest(): assert client is not None +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_auth_string_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + client.get_instance_auth_string(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceAuthStringRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_upgrade_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + client.upgrade_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpgradeInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + client.import_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ImportInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + client.export_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ExportInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_failover_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + client.failover_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.FailoverInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_reschedule_maintenance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + client.reschedule_maintenance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.RescheduleMaintenanceRequest() + + assert args[0] == request_msg + + def test_cloud_redis_rest_lro_client(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10442,6 +10706,281 @@ def test_initialize_client_w_rest_asyncio(): assert client is not None +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instances_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + await client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + await client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_auth_string_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance_auth_string), + '__call__') as call: + await client.get_instance_auth_string(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceAuthStringRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + await client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + await client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_upgrade_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.upgrade_instance), + '__call__') as call: + await client.upgrade_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpgradeInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.import_instance), + '__call__') as call: + await client.import_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ImportInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_instance), + '__call__') as call: + await client.export_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ExportInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_failover_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.failover_instance), + '__call__') as call: + await client.failover_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.FailoverInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + await client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_reschedule_maintenance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.reschedule_maintenance), + '__call__') as call: + await client.reschedule_maintenance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.RescheduleMaintenanceRequest() + + assert args[0] == request_msg + + def test_cloud_redis_rest_asyncio_lro_client(): if not HAS_ASYNC_REST_EXTRA: pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") From ecc9865c9cfdbd6e9a6a3f5f4e3f33403cb059e9 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Oct 2024 20:25:34 +0200 Subject: [PATCH 1193/1339] chore(deps): update all dependencies (#2202) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 103 ++++++++++++---------- 1 file changed, 56 insertions(+), 47 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 7bfcade326ce..738674515602 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -133,53 +133,62 @@ grpc-google-iam-v1==0.13.1 \ --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e # via -r requirements.in -grpcio==1.66.1 \ - --hash=sha256:0e6c9b42ded5d02b6b1fea3a25f036a2236eeb75d0579bfd43c0018c88bf0a3e \ - --hash=sha256:161d5c535c2bdf61b95080e7f0f017a1dfcb812bf54093e71e5562b16225b4ce \ - --hash=sha256:17663598aadbedc3cacd7bbde432f541c8e07d2496564e22b214b22c7523dac8 \ - --hash=sha256:1c17ebcec157cfb8dd445890a03e20caf6209a5bd4ac5b040ae9dbc59eef091d \ - --hash=sha256:292a846b92cdcd40ecca46e694997dd6b9be6c4c01a94a0dfb3fcb75d20da858 \ - --hash=sha256:2ca2559692d8e7e245d456877a85ee41525f3ed425aa97eb7a70fc9a79df91a0 \ - --hash=sha256:307b1d538140f19ccbd3aed7a93d8f71103c5d525f3c96f8616111614b14bf2a \ - --hash=sha256:30a1c2cf9390c894c90bbc70147f2372130ad189cffef161f0432d0157973f45 \ - --hash=sha256:31a049daa428f928f21090403e5d18ea02670e3d5d172581670be006100db9ef \ - --hash=sha256:35334f9c9745add3e357e3372756fd32d925bd52c41da97f4dfdafbde0bf0ee2 \ - --hash=sha256:3750c5a00bd644c75f4507f77a804d0189d97a107eb1481945a0cf3af3e7a5ac \ - --hash=sha256:3885f037eb11f1cacc41f207b705f38a44b69478086f40608959bf5ad85826dd \ - --hash=sha256:4573608e23f7e091acfbe3e84ac2045680b69751d8d67685ffa193a4429fedb1 \ - --hash=sha256:4825a3aa5648010842e1c9d35a082187746aa0cdbf1b7a2a930595a94fb10fce \ - --hash=sha256:4877ba180591acdf127afe21ec1c7ff8a5ecf0fe2600f0d3c50e8c4a1cbc6492 \ - --hash=sha256:48b0d92d45ce3be2084b92fb5bae2f64c208fea8ceed7fccf6a7b524d3c4942e \ - --hash=sha256:4d813316d1a752be6f5c4360c49f55b06d4fe212d7df03253dfdae90c8a402bb \ - --hash=sha256:5dd67ed9da78e5121efc5c510f0122a972216808d6de70953a740560c572eb44 \ - --hash=sha256:6f914386e52cbdeb5d2a7ce3bf1fdfacbe9d818dd81b6099a05b741aaf3848bb \ - --hash=sha256:7101db1bd4cd9b880294dec41a93fcdce465bdbb602cd8dc5bd2d6362b618759 \ - --hash=sha256:7e06aa1f764ec8265b19d8f00140b8c4b6ca179a6dc67aa9413867c47e1fb04e \ - --hash=sha256:84ca1be089fb4446490dd1135828bd42a7c7f8421e74fa581611f7afdf7ab761 \ - --hash=sha256:8a1e224ce6f740dbb6b24c58f885422deebd7eb724aff0671a847f8951857c26 \ - --hash=sha256:97ae7edd3f3f91480e48ede5d3e7d431ad6005bfdbd65c1b56913799ec79e791 \ - --hash=sha256:9c9bebc6627873ec27a70fc800f6083a13c70b23a5564788754b9ee52c5aef6c \ - --hash=sha256:a013c5fbb12bfb5f927444b477a26f1080755a931d5d362e6a9a720ca7dbae60 \ - --hash=sha256:a66fe4dc35d2330c185cfbb42959f57ad36f257e0cc4557d11d9f0a3f14311df \ - --hash=sha256:a92c4f58c01c77205df6ff999faa008540475c39b835277fb8883b11cada127a \ - --hash=sha256:aa8ba945c96e73de29d25331b26f3e416e0c0f621e984a3ebdb2d0d0b596a3b3 \ - --hash=sha256:b0aa03d240b5539648d996cc60438f128c7f46050989e35b25f5c18286c86734 \ - --hash=sha256:b1b24c23d51a1e8790b25514157d43f0a4dce1ac12b3f0b8e9f66a5e2c4c132f \ - --hash=sha256:b7ffb8ea674d68de4cac6f57d2498fef477cef582f1fa849e9f844863af50083 \ - --hash=sha256:b9feb4e5ec8dc2d15709f4d5fc367794d69277f5d680baf1910fc9915c633524 \ - --hash=sha256:bff2096bdba686019fb32d2dde45b95981f0d1490e054400f70fc9a8af34b49d \ - --hash=sha256:c30aeceeaff11cd5ddbc348f37c58bcb96da8d5aa93fed78ab329de5f37a0d7a \ - --hash=sha256:c9f80f9fad93a8cf71c7f161778ba47fd730d13a343a46258065c4deb4b550c0 \ - --hash=sha256:cfd349de4158d797db2bd82d2020554a121674e98fbe6b15328456b3bf2495bb \ - --hash=sha256:d0cd7050397b3609ea51727b1811e663ffda8bda39c6a5bb69525ef12414b503 \ - --hash=sha256:d639c939ad7c440c7b2819a28d559179a4508783f7e5b991166f8d7a34b52815 \ - --hash=sha256:e3ba04659e4fce609de2658fe4dbf7d6ed21987a94460f5f92df7579fd5d0e22 \ - --hash=sha256:ecfe735e7a59e5a98208447293ff8580e9db1e890e232b8b292dc8bd15afc0d2 \ - --hash=sha256:ef82d361ed5849d34cf09105d00b94b6728d289d6b9235513cb2fcc79f7c432c \ - --hash=sha256:f03a5884c56256e08fd9e262e11b5cfacf1af96e2ce78dc095d2c41ccae2c80d \ - --hash=sha256:f1fe60d0772831d96d263b53d83fb9a3d050a94b0e94b6d004a5ad111faa5b5b \ - --hash=sha256:f517fd7259fe823ef3bd21e508b653d5492e706e9f0ef82c16ce3347a8a5620c \ - --hash=sha256:fdb14bad0835914f325349ed34a51940bc2ad965142eb3090081593c6e347be9 +grpcio==1.66.2 \ + --hash=sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd \ + --hash=sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604 \ + --hash=sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73 \ + --hash=sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3 \ + --hash=sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50 \ + --hash=sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6 \ + --hash=sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34 \ + --hash=sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249 \ + --hash=sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75 \ + --hash=sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8 \ + --hash=sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453 \ + --hash=sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8 \ + --hash=sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d \ + --hash=sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c \ + --hash=sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c \ + --hash=sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c \ + --hash=sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39 \ + --hash=sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01 \ + --hash=sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231 \ + --hash=sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae \ + --hash=sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a \ + --hash=sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d \ + --hash=sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987 \ + --hash=sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a \ + --hash=sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7 \ + --hash=sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7 \ + --hash=sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3 \ + --hash=sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b \ + --hash=sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf \ + --hash=sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8 \ + --hash=sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf \ + --hash=sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7 \ + --hash=sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839 \ + --hash=sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e \ + --hash=sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b \ + --hash=sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3 \ + --hash=sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee \ + --hash=sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54 \ + --hash=sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e \ + --hash=sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc \ + --hash=sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd \ + --hash=sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d \ + --hash=sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed \ + --hash=sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7 \ + --hash=sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4 \ + --hash=sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a \ + --hash=sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec \ + --hash=sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8 \ + --hash=sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd \ + --hash=sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c \ + --hash=sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46 \ + --hash=sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e \ + --hash=sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf \ + --hash=sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa \ + --hash=sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679 # via # googleapis-common-protos # grpc-google-iam-v1 From ae2f63265d64354d78713cdc5da94fdbff3721bf Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 9 Oct 2024 14:43:49 -0400 Subject: [PATCH 1194/1339] fix: streaming for sync REST API calls (#2204) --- .../%sub/services/%service/_shared_macros.j2 | 12 ++++++++-- .../services/%service/transports/rest.py.j2 | 2 +- .../%service/transports/rest_asyncio.py.j2 | 2 +- .../cloud_redis/transports/rest_asyncio.py | 24 +++++++++---------- 4 files changed, 24 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index c35c8975001e..cc795cc91aa3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -152,7 +152,7 @@ def _get_http_options(): return http_options {% endmacro %} -{% macro response_method(body_spec, is_async=False) %} +{% macro response_method(body_spec, is_async=False, is_streaming_method=False) %} {% set async_prefix = "async " if is_async else "" %} {% set await_prefix = "await " if is_async else "" %} @staticmethod @@ -177,6 +177,14 @@ def _get_http_options(): {% if body_spec %} data=body, {% endif %} + {% if not is_async and is_streaming_method %} + {# NOTE: The underlying `requests` library used for making a sync request + # requires us to set `stream=True` to avoid loading the entire response + # into memory at once. For an async request, given its nature where it + # reads data chunk by chunk, this is not required. + #} + stream=True, + {% endif %} ) return response {% endmacro %} @@ -400,7 +408,7 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m return hash("{{ async_method_name_prefix }}{{ service.name }}RestTransport.{{ name }}") {% set body_spec = api.mixin_http_options["{}".format(name)][0].body %} - {{ response_method(body_spec) | indent(4) }} + {{ response_method(body_spec, is_async=is_async, is_streaming_method=None) | indent(4) }} {{ async_prefix }}def __call__(self, request: {{ sig.request_type }}, *, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 796aa9932570..6bdbbbcbc406 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -196,7 +196,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% if method.http_options and not method.client_streaming %} {% set body_spec = method.http_options[0].body %} - {{ shared_macros.response_method(body_spec)|indent(8) }} + {{ shared_macros.response_method(body_spec, is_async=False, is_streaming_method=method.server_streaming)|indent(8) }} {% endif %}{# method.http_options and not method.client_streaming #} def __call__(self, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index be65f9db6812..6ae4e677865c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -155,7 +155,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Implement client streaming method. #} {% if method.http_options and not method.client_streaming %} {% set body_spec = method.http_options[0].body %} - {{ shared_macros.response_method(body_spec, is_async=True)|indent(8) }} + {{ shared_macros.response_method(body_spec, is_async=True, is_streaming_method=None)|indent(8) }} {% endif %}{# method.http_options and not method.client_streaming and not method.paged_result_field #} async def __call__(self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 9b72bcc6b139..9c461e3c1e62 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -1670,7 +1670,7 @@ def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetLocation") @staticmethod - def _get_response( + async def _get_response( host, metadata, query_params, @@ -1683,7 +1683,7 @@ def _get_response( method = transcoded_request['method'] headers = dict(metadata) headers['Content-Type'] = 'application/json' - response = getattr(session, method)( + response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, @@ -1747,7 +1747,7 @@ def __hash__(self): return hash("AsyncCloudRedisRestTransport.ListLocations") @staticmethod - def _get_response( + async def _get_response( host, metadata, query_params, @@ -1760,7 +1760,7 @@ def _get_response( method = transcoded_request['method'] headers = dict(metadata) headers['Content-Type'] = 'application/json' - response = getattr(session, method)( + response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, @@ -1824,7 +1824,7 @@ def __hash__(self): return hash("AsyncCloudRedisRestTransport.CancelOperation") @staticmethod - def _get_response( + async def _get_response( host, metadata, query_params, @@ -1837,7 +1837,7 @@ def _get_response( method = transcoded_request['method'] headers = dict(metadata) headers['Content-Type'] = 'application/json' - response = getattr(session, method)( + response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, @@ -1894,7 +1894,7 @@ def __hash__(self): return hash("AsyncCloudRedisRestTransport.DeleteOperation") @staticmethod - def _get_response( + async def _get_response( host, metadata, query_params, @@ -1907,7 +1907,7 @@ def _get_response( method = transcoded_request['method'] headers = dict(metadata) headers['Content-Type'] = 'application/json' - response = getattr(session, method)( + response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, @@ -1964,7 +1964,7 @@ def __hash__(self): return hash("AsyncCloudRedisRestTransport.GetOperation") @staticmethod - def _get_response( + async def _get_response( host, metadata, query_params, @@ -1977,7 +1977,7 @@ def _get_response( method = transcoded_request['method'] headers = dict(metadata) headers['Content-Type'] = 'application/json' - response = getattr(session, method)( + response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, @@ -2041,7 +2041,7 @@ def __hash__(self): return hash("AsyncCloudRedisRestTransport.ListOperations") @staticmethod - def _get_response( + async def _get_response( host, metadata, query_params, @@ -2054,7 +2054,7 @@ def _get_response( method = transcoded_request['method'] headers = dict(metadata) headers['Content-Type'] = 'application/json' - response = getattr(session, method)( + response = await getattr(session, method)( "{host}{uri}".format(host=host, uri=uri), timeout=timeout, headers=headers, From 4da2bf1191fc3f6c056e6c9e3582263a69c70bc0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 14:48:33 -0400 Subject: [PATCH 1195/1339] chore(main): release 1.19.0 (#2108) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 17 +++++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3e62651ae50c..1da6f68f062c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,23 @@ # Changelog +## [1.19.0](https://github.com/googleapis/gapic-generator-python/compare/v1.18.5...v1.19.0) (2024-10-09) + + +### Features + +* Add async rest transport support in gapics ([#2164](https://github.com/googleapis/gapic-generator-python/issues/2164)) ([2949465](https://github.com/googleapis/gapic-generator-python/commit/29494651fb39719af920ee1c114c82bd903e544b)) +* Add support for reading ClientLibrarySettings from service configuration YAML ([#2098](https://github.com/googleapis/gapic-generator-python/issues/2098)) ([11e3967](https://github.com/googleapis/gapic-generator-python/commit/11e3967b6a3b1e86f5ec0f5387bd340e3a8ae9d0)) +* Implement async rest transport constructor ([#2123](https://github.com/googleapis/gapic-generator-python/issues/2123)) ([2809753](https://github.com/googleapis/gapic-generator-python/commit/28097536e1a47063a5d3211e9c1c498a1f06c724)) +* Leverage async anonymous credentials in tests ([#2105](https://github.com/googleapis/gapic-generator-python/issues/2105)) ([4afac87](https://github.com/googleapis/gapic-generator-python/commit/4afac87efc8fcdd7090d003b6247be64071b611d)) + + +### Bug Fixes + +* Add support for field with name 'self' ([#2205](https://github.com/googleapis/gapic-generator-python/issues/2205)) ([ed88fe2](https://github.com/googleapis/gapic-generator-python/commit/ed88fe298647cdad310a5341f931a0de42f1b81e)) +* Resolve issue where explicit routing metadata was not sent in async clients ([#2133](https://github.com/googleapis/gapic-generator-python/issues/2133)) ([c222b12](https://github.com/googleapis/gapic-generator-python/commit/c222b125d741426259d82e726c0c07397d099a8a)) +* Streaming for sync REST API calls ([#2204](https://github.com/googleapis/gapic-generator-python/issues/2204)) ([ce3b84c](https://github.com/googleapis/gapic-generator-python/commit/ce3b84c67a31785f45eb46f154ef08af6edc9a36)) + ## [1.18.5](https://github.com/googleapis/gapic-generator-python/compare/v1.18.4...v1.18.5) (2024-08-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 5c794454d8a1..5c997bf55293 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.18.5" +version = "1.19.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 842e324bb663d59499577a6da0163e922ae22ece Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 10 Oct 2024 13:24:19 -0400 Subject: [PATCH 1196/1339] fix: add default library settings for incorrect lib version (#2212) --- packages/gapic-generator/gapic/schema/api.py | 13 +++++---- .../tests/unit/schema/test_api.py | 29 +++++++++++++++++++ 2 files changed, 36 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index ce2fdc8022e5..f7a7669f8c66 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -706,13 +706,14 @@ def all_library_settings( for library_setting in self.service_yaml_config.publishing.library_settings } - # Add default settings for the current proto package - if not result: - result = { - self.naming.proto_package: client_pb2.ClientLibrarySettings( - version=self.naming.proto_package + # NOTE: Add default settings for the current proto package + # for the following cases: + # - if library settings are not specified in the service config. + # - if library_settings.version != self.naming.proto_package (proto package name) + if self.naming.proto_package not in result: + result[self.naming.proto_package] = client_pb2.ClientLibrarySettings( + version=self.naming.proto_package ) - } return result diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index c8bf9706d39c..afe9b1434fda 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -2741,6 +2741,35 @@ def test_read_empty_python_settings_from_service_yaml(): == client_pb2.PythonSettings.ExperimentalFeatures() assert api_schema.all_library_settings["google.example.v1beta1"].python_settings.experimental_features.rest_async_io_enabled \ == False + assert api_schema.all_library_settings[api_schema.naming.proto_package].python_settings \ + == client_pb2.PythonSettings() + + +def test_incorrect_library_settings_version(): + # NOTE: This test case ensures that the generator is able to read + # from the default library settings if the version specified against the + # library settings in the service yaml of an API differs from the version + # of the API. + service_yaml_config = { + "apis": [ + {"name": "google.example.v1beta1.ServiceOne.Example1"}, + ], + "publishing": { + "library_settings": [ + { + "version": "google.example.v1", + "python_settings": { + "experimental_features": {"rest_async_io_enabled": True}, + }, + } + ] + }, + } + cli_options = Options(service_yaml_config=service_yaml_config) + fd = get_file_descriptor_proto_for_tests(fields=[]) + api_schema = api.API.build(fd, "google.example.v1beta1", opts=cli_options) + assert api_schema.all_library_settings[api_schema.naming.proto_package].python_settings \ + == client_pb2.PythonSettings() def test_python_settings_duplicate_version_raises_error(): From a4d42b412166969a4c2f8a0ee568308a3f8f5f7a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 10 Oct 2024 15:16:10 -0400 Subject: [PATCH 1197/1339] fix: use disambiguated name for rpcs to avoid collisions (#2217) --- .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 12 +++++------ .../%name_%version/%sub/test_%service.py.j2 | 2 +- .../gapic/%name_%version/%sub/test_macros.j2 | 20 +++++++++---------- .../test_reserved_method_names.proto | 15 ++++++++++++++ 5 files changed, 33 insertions(+), 18 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 064e17924f8d..5555339c4d1f 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -345,7 +345,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if method.operation_service %}{# Extended Operations LRO #} def {{ method.name|snake_case }}_unary(self, {% else %} - def {{ method.name|snake_case }}(self, + def {{ method.safe_name|snake_case }}(self, {% endif %}{# Extended Operations LRO #} {% if not method.client_streaming %} request: Optional[Union[{{ method.input.ident }}, dict]] = None, diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 9387a124e2d5..c84936dc87c7 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -516,7 +516,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% endif %} -{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} +{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -579,7 +579,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): ) {% endif %} {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) + response = client.{{ method.safe_name|snake_case }}(iter(requests)) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -1053,7 +1053,7 @@ def test_{{ method_name }}_raw_page_lro(): {% endfor %} {# method in methods for grpc #} -{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %}{% if method.http_options %} +{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when client streaming are supported. #} {% if not method.client_streaming %} @pytest.mark.parametrize("request_type", [ @@ -1250,7 +1250,7 @@ def test_{{ method.name|snake_case }}_rest(request_type): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value {% if method.client_streaming %} - response = client.{{ method.name|snake_case }}(iter(requests)) + response = client.{{ method.safe_name|snake_case }}(iter(requests)) {% elif method.server_streaming %} with mock.patch.object(response_value, 'iter_content') as iter_content: iter_content.return_value = iter(json_return_value) @@ -1546,7 +1546,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ response_value.request = Request() req.return_value = response_value {% if method.client_streaming %} - client.{{ method.name|snake_case }}(iter(requests)) + client.{{ method.safe_name|snake_case }}(iter(requests)) {% else %} client.{{ method_name }}(request) {% endif %} @@ -1814,7 +1814,7 @@ def test_{{ method_name }}_rest_no_http_options(): {% endfor -%} {#- method in methods for rest #} {% for method in service.methods.values() if 'rest' in opts.transport and - not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.name|snake_case %} + not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 0a3b1d21f910..17603ca6d6e2 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -946,7 +946,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% endfor -%} {#- method in methods for rest #} {% for method in service.methods.values() if 'rest' in opts.transport and - not method.http_options %}{% with method_name = (method.name + ("_unary" if method.operation_service else "")) | snake_case %} + not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index d76cd91a824e..38c43b0bad90 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -238,7 +238,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " {% if method.client_streaming %} request = [{}] - await client.{{ method.name|snake_case }}(request) + await client.{{ method.safe_name|snake_case }}(request) {% else %} request = {} await client.{{ method_name }}(request) @@ -255,7 +255,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " {% endif %} {% if method.client_streaming %} - await client.{{ method.name|snake_case }}(request) + await client.{{ method.safe_name|snake_case }}(request) {% else %} await client.{{ method_name }}(request) {% endif %} @@ -321,9 +321,9 @@ async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_ )) {% endif %} {% if method.client_streaming and method.server_streaming %} - response = await client.{{ method.name|snake_case }}(iter(requests)) + response = await client.{{ method.safe_name|snake_case }}(iter(requests)) {% elif method.client_streaming and not method.server_streaming %} - response = await (await client.{{ method.name|snake_case }}(iter(requests))) + response = await (await client.{{ method.safe_name|snake_case }}(iter(requests))) {% else %} response = await client.{{ method_name }}(request) {% endif %} @@ -1001,7 +1001,7 @@ def test_{{ method_name }}_raw_page_lro(): {% endmacro %} {% macro rest_required_tests(method, service, numeric_enums=False, full_extended_lro=False) %} -{% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} +{% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.safe_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} {% if not method.client_streaming %} def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): @@ -1460,7 +1460,7 @@ def test_{{ method_name }}_rest_no_http_options(): #} {% macro method_call_test_generic(test_name, method, service, api, transport, request_dict, is_async=False, routing_param=None) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% with method_name = (method.name + ("_unary" if method.operation_service else "")) | snake_case %} +{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} {% set async_method_prefix = "async " if is_async else "" %} {% if is_async %} @pytest.mark.asyncio @@ -1713,7 +1713,7 @@ def test_unsupported_parameter_rest_asyncio(): {% set async_prefix = get_async_prefix(is_async) %} {% set async_decorator = get_async_decorator(is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% set method_name = method.name|snake_case %} +{% set method_name = method.safe_name|snake_case %} {{async_decorator}} {{async_prefix}}def test_{{ method_name }}_{{transport_name}}_error(): {% if transport_name == 'rest_asyncio' %} @@ -1763,7 +1763,7 @@ def test_initialize_client_w_{{transport_name}}(): {% set async_prefix = get_async_prefix(is_async) %} {% set async_decorator = get_async_decorator(is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% set method_name = method.name|snake_case %} +{% set method_name = method.safe_name|snake_case %} {% set mocked_session = "AsyncAuthorizedSession" if is_async else "Session" %} {{ async_decorator }} {{ async_prefix }}def test_{{ method_name }}_{{transport_name}}_bad_request(request_type={{ method.input.ident }}): @@ -1862,7 +1862,7 @@ def test_initialize_client_w_{{transport_name}}(): {% set async_prefix = get_async_prefix(is_async) %} {% set async_decorator = get_async_decorator(is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% set method_name = method.name|snake_case %} +{% set method_name = method.safe_name|snake_case %} {# NOTE: set method_output to method.extended_lro.operation_type for the following method types: # (method.extended_lro and not full_extended_lro) #} @@ -2183,7 +2183,7 @@ def test_initialize_client_w_{{transport_name}}(): {% set async_prefix = get_async_prefix(is_async) %} {% set async_decorator = get_async_decorator(is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% set method_name = method.name|snake_case %} +{% set method_name = method.safe_name|snake_case %} {% set async_method_prefix = "Async" if is_async else "" %} {{async_decorator}} @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/gapic-generator/tests/fragments/test_reserved_method_names.proto b/packages/gapic-generator/tests/fragments/test_reserved_method_names.proto index d8f23494fe59..ba89ef0f25e3 100644 --- a/packages/gapic-generator/tests/fragments/test_reserved_method_names.proto +++ b/packages/gapic-generator/tests/fragments/test_reserved_method_names.proto @@ -30,6 +30,13 @@ service MyService { }; }; + rpc Import(CreateImportRequest) returns (CreateImportResponse) { + option (google.api.http) = { + body: "*" + post: "/import/v1" + }; + }; + rpc GrpcChannel(GrpcChannelRequest) returns (GrpcChannelResponse) { option (google.api.http) = { body: "*" @@ -59,6 +66,14 @@ message CreateChannelResponse { string info = 1; } +message CreateImportRequest { + string info = 1; +} + +message CreateImportResponse { + string info = 1; +} + message GrpcChannelRequest { string grpc_channel = 1; string info = 2; From f84b46ef71da739d7105f0dc13c02bdd29251c17 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 10 Oct 2024 15:41:57 -0400 Subject: [PATCH 1198/1339] fix: resolve issue with wait operation mixin (#2218) --- .../%service/transports/_mixins.py.j2 | 2 +- .../%service/transports/_mixins.py.j2 | 2 +- .../services/cloud_redis/async_client.py | 56 +++++ .../redis_v1/services/cloud_redis/client.py | 56 +++++ .../services/cloud_redis/transports/base.py | 14 ++ .../services/cloud_redis/transports/grpc.py | 18 ++ .../cloud_redis/transports/grpc_asyncio.py | 23 ++ .../services/cloud_redis/transports/rest.py | 104 ++++++++ .../cloud_redis/transports/rest_asyncio.py | 113 +++++++++ .../cloud_redis/transports/rest_base.py | 30 +++ .../unit/gapic/redis_v1/test_cloud_redis.py | 234 ++++++++++++++++++ .../tests/integration/redis_v1.yaml | 3 + 12 files changed, 653 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 index 00b0d53b7112..50a49d6d000a 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/_mixins.py.j2 @@ -53,7 +53,7 @@ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_operation" not in self._stubs: + if "wait_operation" not in self._stubs: self._stubs["wait_operation"] = self.grpc_channel.unary_unary( "/google.longrunning.Operations/WaitOperation", request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 index 1867d0ac8c50..84b085ee1a35 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 @@ -71,7 +71,7 @@ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if "delete_operation" not in self._stubs: + if "wait_operation" not in self._stubs: self._stubs["wait_operation"] = self.grpc_channel.unary_unary( "/google.longrunning.Operations/WaitOperation", request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index ca71e086e398..24aa303c8997 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1912,6 +1912,62 @@ async def cancel_operation( # Send the request. await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + async def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.wait_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + async def get_location( self, request: Optional[locations_pb2.GetLocationRequest] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index cb40c31c34ec..046c051a3837 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -2290,6 +2290,62 @@ def cancel_operation( # Send the request. rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.wait_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + def get_location( self, request: Optional[locations_pb2.GetLocationRequest] = None, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index feb12006b170..57832b4d914e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -207,6 +207,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.wait_operation: gapic_v1.method.wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -358,6 +363,15 @@ def delete_operation( ]: raise NotImplementedError() + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def get_location(self, ) -> Callable[ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 87b5e7e0c06a..0df383915736 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -644,6 +644,24 @@ def cancel_operation( ) return self._stubs["cancel_operation"] + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "wait_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + @property def get_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 385d9f24490a..78b7c4d028cb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -699,6 +699,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.wait_operation: self._wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): @@ -749,6 +754,24 @@ def cancel_operation( ) return self._stubs["cancel_operation"] + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "wait_operation" not in self._stubs: + self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + @property def get_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 182ce777148e..750a09fa6bbb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -474,6 +474,27 @@ def post_list_operations( """ return response + def pre_wait_operation( + self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class CloudRedisRestStub: @@ -618,6 +639,13 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: 'uri': '/v1/{name=projects/*/locations/*}/operations', }, ], + 'google.longrunning.Operations.WaitOperation': [ + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ], } rest_transport = operations_v1.OperationsRestTransport( @@ -1968,6 +1996,82 @@ def __call__(self, resp = self._interceptor.post_list_operations(resp) return resp + @property + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + + class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.WaitOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.WaitOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the wait operation method over HTTP. + + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + request, metadata = self._interceptor.pre_wait_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + + # Send the request + response = CloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_wait_operation(resp) + return resp + @property def kind(self) -> str: return "rest" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 9c461e3c1e62..e82769893051 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -484,6 +484,27 @@ async def post_list_operations( """ return response + async def pre_wait_operation( + self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + @dataclasses.dataclass class AsyncCloudRedisRestStub: @@ -656,6 +677,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.wait_operation: self._wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): @@ -1580,6 +1606,13 @@ def operations_client(self) -> AsyncOperationsRestClient: 'uri': '/v1/{name=projects/*/locations/*}/operations', }, ], + 'google.longrunning.Operations.WaitOperation': [ + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ], } rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore @@ -2109,6 +2142,86 @@ async def __call__(self, resp = await self._interceptor.post_list_operations(resp) return resp + @property + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + + class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.WaitOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: operations_pb2.WaitOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, str]]=(), + ) -> operations_pb2.Operation: + + r"""Call the wait operation method over HTTP. + + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + request, metadata = await self._interceptor.pre_wait_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + + # Send the request + response = await AsyncCloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_wait_operation(resp) + return resp + @property def kind(self) -> str: return "rest_asyncio" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index dc561b714fd1..e63a3d850341 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -703,6 +703,36 @@ def _get_query_params_json(transcoded_request): query_params = json.loads(json.dumps(transcoded_request['query_params'])) return query_params + class _BaseWaitOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + __all__=( '_BaseCloudRedisRestTransport', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 42b1aaca0cb7..920b97dfca36 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -8789,6 +8789,57 @@ def test_list_operations_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_wait_operation_rest_bad_request(request_type=operations_pb2.WaitOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.WaitOperationRequest, + dict, +]) +def test_wait_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + + response = client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + def test_initialize_client_w_rest(): client = CloudRedisClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10696,6 +10747,60 @@ async def test_list_operations_rest_asyncio(request_type): # Establish that the response is the type that we expect. assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_wait_operation_rest_asyncio_bad_request(request_type=operations_pb2.WaitOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + await client.wait_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.WaitOperationRequest, + dict, +]) +async def test_wait_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + + response = await client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + def test_initialize_client_w_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") @@ -11055,6 +11160,7 @@ def test_cloud_redis_base_transport(): 'get_location', 'list_locations', 'get_operation', + 'wait_operation', 'cancel_operation', 'delete_operation', 'list_operations', @@ -11863,6 +11969,134 @@ async def test_cancel_operation_from_dict_async(): ) call.assert_called() +def test_wait_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_wait_operation(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_wait_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_wait_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_wait_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_wait_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + def test_get_operation(transport: str = "grpc"): client = CloudRedisClient( diff --git a/packages/gapic-generator/tests/integration/redis_v1.yaml b/packages/gapic-generator/tests/integration/redis_v1.yaml index 47440ccdad52..efb6675bbfcb 100644 --- a/packages/gapic-generator/tests/integration/redis_v1.yaml +++ b/packages/gapic-generator/tests/integration/redis_v1.yaml @@ -49,6 +49,9 @@ http: get: '/v1/{name=projects/*/locations/*/operations/*}' - selector: google.longrunning.Operations.ListOperations get: '/v1/{name=projects/*/locations/*}/operations' + - selector: google.longrunning.Operations.WaitOperation + post: '/v2/{name=projects/*/locations/*/operations/*}:wait' + body: '*' authentication: rules: From 0faf062aa86f147f6dceb4b1a98aa6f2a1699012 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 10 Oct 2024 16:03:20 -0400 Subject: [PATCH 1199/1339] build: add asyncio to requirements.txt (#2219) --- packages/gapic-generator/requirements.in | 1 + packages/gapic-generator/requirements.txt | 900 ++++++++++++++---- .../rules_python_gapic/py_gapic.bzl | 1 + 3 files changed, 698 insertions(+), 204 deletions(-) diff --git a/packages/gapic-generator/requirements.in b/packages/gapic-generator/requirements.in index db955dc7ec22..d6a1c8d6f1f6 100644 --- a/packages/gapic-generator/requirements.in +++ b/packages/gapic-generator/requirements.in @@ -11,3 +11,4 @@ proto-plus pytest-asyncio libcst inflection +aiohttp diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 738674515602..5003841aceea 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -2,8 +2,117 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # +aiohappyeyeballs==2.4.3 \ + --hash=sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586 \ + --hash=sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572 + # via aiohttp +aiohttp==3.10.9 \ + --hash=sha256:02d1d6610588bcd743fae827bd6f2e47e0d09b346f230824b4c6fb85c6065f9c \ + --hash=sha256:03690541e4cc866eef79626cfa1ef4dd729c5c1408600c8cb9e12e1137eed6ab \ + --hash=sha256:0bc059ecbce835630e635879f5f480a742e130d9821fbe3d2f76610a6698ee25 \ + --hash=sha256:0c21c82df33b264216abffff9f8370f303dab65d8eee3767efbbd2734363f677 \ + --hash=sha256:1298b854fd31d0567cbb916091be9d3278168064fca88e70b8468875ef9ff7e7 \ + --hash=sha256:1321658f12b6caffafdc35cfba6c882cb014af86bef4e78c125e7e794dfb927b \ + --hash=sha256:143b0026a9dab07a05ad2dd9e46aa859bffdd6348ddc5967b42161168c24f857 \ + --hash=sha256:16e6a51d8bc96b77f04a6764b4ad03eeef43baa32014fce71e882bd71302c7e4 \ + --hash=sha256:172ad884bb61ad31ed7beed8be776eb17e7fb423f1c1be836d5cb357a096bf12 \ + --hash=sha256:17c272cfe7b07a5bb0c6ad3f234e0c336fb53f3bf17840f66bd77b5815ab3d16 \ + --hash=sha256:1a0ee6c0d590c917f1b9629371fce5f3d3f22c317aa96fbdcce3260754d7ea21 \ + --hash=sha256:2746d8994ebca1bdc55a1e998feff4e94222da709623bb18f6e5cfec8ec01baf \ + --hash=sha256:2914caa46054f3b5ff910468d686742ff8cff54b8a67319d75f5d5945fd0a13d \ + --hash=sha256:2bbf94d4a0447705b7775417ca8bb8086cc5482023a6e17cdc8f96d0b1b5aba6 \ + --hash=sha256:2bd9f3eac515c16c4360a6a00c38119333901b8590fe93c3257a9b536026594d \ + --hash=sha256:2c33fa6e10bb7ed262e3ff03cc69d52869514f16558db0626a7c5c61dde3c29f \ + --hash=sha256:2d37f4718002863b82c6f391c8efd4d3a817da37030a29e2682a94d2716209de \ + --hash=sha256:3668d0c2a4d23fb136a753eba42caa2c0abbd3d9c5c87ee150a716a16c6deec1 \ + --hash=sha256:36d4fba838be5f083f5490ddd281813b44d69685db910907636bc5dca6322316 \ + --hash=sha256:40ff5b7660f903dc587ed36ef08a88d46840182d9d4b5694e7607877ced698a1 \ + --hash=sha256:42775de0ca04f90c10c5c46291535ec08e9bcc4756f1b48f02a0657febe89b10 \ + --hash=sha256:482c85cf3d429844396d939b22bc2a03849cb9ad33344689ad1c85697bcba33a \ + --hash=sha256:4e6cb75f8ddd9c2132d00bc03c9716add57f4beff1263463724f6398b813e7eb \ + --hash=sha256:4edc3fd701e2b9a0d605a7b23d3de4ad23137d23fc0dbab726aa71d92f11aaaf \ + --hash=sha256:4fd16b30567c5b8e167923be6e027eeae0f20cf2b8a26b98a25115f28ad48ee0 \ + --hash=sha256:5002a02c17fcfd796d20bac719981d2fca9c006aac0797eb8f430a58e9d12431 \ + --hash=sha256:51d0a4901b27272ae54e42067bc4b9a90e619a690b4dc43ea5950eb3070afc32 \ + --hash=sha256:558b3d223fd631ad134d89adea876e7fdb4c93c849ef195049c063ada82b7d08 \ + --hash=sha256:5c070430fda1a550a1c3a4c2d7281d3b8cfc0c6715f616e40e3332201a253067 \ + --hash=sha256:5f392ef50e22c31fa49b5a46af7f983fa3f118f3eccb8522063bee8bfa6755f8 \ + --hash=sha256:60555211a006d26e1a389222e3fab8cd379f28e0fbf7472ee55b16c6c529e3a6 \ + --hash=sha256:608cecd8d58d285bfd52dbca5b6251ca8d6ea567022c8a0eaae03c2589cd9af9 \ + --hash=sha256:60ad5b8a7452c0f5645c73d4dad7490afd6119d453d302cd5b72b678a85d6044 \ + --hash=sha256:63649309da83277f06a15bbdc2a54fbe75efb92caa2c25bb57ca37762789c746 \ + --hash=sha256:6ebdc3b3714afe1b134b3bbeb5f745eed3ecbcff92ab25d80e4ef299e83a5465 \ + --hash=sha256:6f3c6648aa123bcd73d6f26607d59967b607b0da8ffcc27d418a4b59f4c98c7c \ + --hash=sha256:7003f33f5f7da1eb02f0446b0f8d2ccf57d253ca6c2e7a5732d25889da82b517 \ + --hash=sha256:776e9f3c9b377fcf097c4a04b241b15691e6662d850168642ff976780609303c \ + --hash=sha256:85711eec2d875cd88c7eb40e734c4ca6d9ae477d6f26bd2b5bb4f7f60e41b156 \ + --hash=sha256:87d1e4185c5d7187684d41ebb50c9aeaaaa06ca1875f4c57593071b0409d2444 \ + --hash=sha256:8a3f063b41cc06e8d0b3fcbbfc9c05b7420f41287e0cd4f75ce0a1f3d80729e6 \ + --hash=sha256:8b3fb28a9ac8f2558760d8e637dbf27aef1e8b7f1d221e8669a1074d1a266bb2 \ + --hash=sha256:8bd9125dd0cc8ebd84bff2be64b10fdba7dc6fd7be431b5eaf67723557de3a31 \ + --hash=sha256:8be1a65487bdfc285bd5e9baf3208c2132ca92a9b4020e9f27df1b16fab998a9 \ + --hash=sha256:8cc0d13b4e3b1362d424ce3f4e8c79e1f7247a00d792823ffd640878abf28e56 \ + --hash=sha256:8d9d10d10ec27c0d46ddaecc3c5598c4db9ce4e6398ca872cdde0525765caa2f \ + --hash=sha256:8debb45545ad95b58cc16c3c1cc19ad82cffcb106db12b437885dbee265f0ab5 \ + --hash=sha256:91aa966858593f64c8a65cdefa3d6dc8fe3c2768b159da84c1ddbbb2c01ab4ef \ + --hash=sha256:9331dd34145ff105177855017920dde140b447049cd62bb589de320fd6ddd582 \ + --hash=sha256:99f9678bf0e2b1b695e8028fedac24ab6770937932eda695815d5a6618c37e04 \ + --hash=sha256:9fdf5c839bf95fc67be5794c780419edb0dbef776edcfc6c2e5e2ffd5ee755fa \ + --hash=sha256:a14e4b672c257a6b94fe934ee62666bacbc8e45b7876f9dd9502d0f0fe69db16 \ + --hash=sha256:a19caae0d670771ea7854ca30df76f676eb47e0fd9b2ee4392d44708f272122d \ + --hash=sha256:a35ed3d03910785f7d9d6f5381f0c24002b2b888b298e6f941b2fc94c5055fcd \ + --hash=sha256:a61df62966ce6507aafab24e124e0c3a1cfbe23c59732987fc0fd0d71daa0b88 \ + --hash=sha256:a6e00c8a92e7663ed2be6fcc08a2997ff06ce73c8080cd0df10cc0321a3168d7 \ + --hash=sha256:ac3196952c673822ebed8871cf8802e17254fff2a2ed4835d9c045d9b88c5ec7 \ + --hash=sha256:ac74e794e3aee92ae8f571bfeaa103a141e409863a100ab63a253b1c53b707eb \ + --hash=sha256:ad3675c126f2a95bde637d162f8231cff6bc0bc9fbe31bd78075f9ff7921e322 \ + --hash=sha256:aeebd3061f6f1747c011e1d0b0b5f04f9f54ad1a2ca183e687e7277bef2e0da2 \ + --hash=sha256:ba1a599255ad6a41022e261e31bc2f6f9355a419575b391f9655c4d9e5df5ff5 \ + --hash=sha256:bbdb8def5268f3f9cd753a265756f49228a20ed14a480d151df727808b4531dd \ + --hash=sha256:c2555e4949c8d8782f18ef20e9d39730d2656e218a6f1a21a4c4c0b56546a02e \ + --hash=sha256:c2695c61cf53a5d4345a43d689f37fc0f6d3a2dc520660aec27ec0f06288d1f9 \ + --hash=sha256:c2b627d3c8982691b06d89d31093cee158c30629fdfebe705a91814d49b554f8 \ + --hash=sha256:c46131c6112b534b178d4e002abe450a0a29840b61413ac25243f1291613806a \ + --hash=sha256:c54dc329cd44f7f7883a9f4baaefe686e8b9662e2c6c184ea15cceee587d8d69 \ + --hash=sha256:c7d7cafc11d70fdd8801abfc2ff276744ae4cb39d8060b6b542c7e44e5f2cfc2 \ + --hash=sha256:cb0b2d5d51f96b6cc19e6ab46a7b684be23240426ae951dcdac9639ab111b45e \ + --hash=sha256:d15a29424e96fad56dc2f3abed10a89c50c099f97d2416520c7a543e8fddf066 \ + --hash=sha256:d1f5c9169e26db6a61276008582d945405b8316aae2bb198220466e68114a0f5 \ + --hash=sha256:d271f770b52e32236d945911b2082f9318e90ff835d45224fa9e28374303f729 \ + --hash=sha256:d646fdd74c25bbdd4a055414f0fe32896c400f38ffbdfc78c68e62812a9e0257 \ + --hash=sha256:d6e395c3d1f773cf0651cd3559e25182eb0c03a2777b53b4575d8adc1149c6e9 \ + --hash=sha256:d7c071235a47d407b0e93aa6262b49422dbe48d7d8566e1158fecc91043dd948 \ + --hash=sha256:d97273a52d7f89a75b11ec386f786d3da7723d7efae3034b4dda79f6f093edc1 \ + --hash=sha256:dcf354661f54e6a49193d0b5653a1b011ba856e0b7a76bda2c33e4c6892f34ea \ + --hash=sha256:e3e7fabedb3fe06933f47f1538df7b3a8d78e13d7167195f51ca47ee12690373 \ + --hash=sha256:e525b69ee8a92c146ae5b4da9ecd15e518df4d40003b01b454ad694a27f498b5 \ + --hash=sha256:e709d6ac598c5416f879bb1bae3fd751366120ac3fa235a01de763537385d036 \ + --hash=sha256:e83dfefb4f7d285c2d6a07a22268344a97d61579b3e0dce482a5be0251d672ab \ + --hash=sha256:e86260b76786c28acf0b5fe31c8dca4c2add95098c709b11e8c35b424ebd4f5b \ + --hash=sha256:e883b61b75ca6efc2541fcd52a5c8ccfe288b24d97e20ac08fdf343b8ac672ea \ + --hash=sha256:f0a44bb40b6aaa4fb9a5c1ee07880570ecda2065433a96ccff409c9c20c1624a \ + --hash=sha256:f82ace0ec57c94aaf5b0e118d4366cff5889097412c75aa14b4fd5fc0c44ee3e \ + --hash=sha256:f9ca09414003c0e96a735daa1f071f7d7ed06962ef4fa29ceb6c80d06696d900 \ + --hash=sha256:fa430b871220dc62572cef9c69b41e0d70fcb9d486a4a207a5de4c1f25d82593 \ + --hash=sha256:fc262c3df78c8ff6020c782d9ce02e4bcffe4900ad71c0ecdad59943cba54442 \ + --hash=sha256:fcd546782d03181b0b1d20b43d612429a90a68779659ba8045114b867971ab71 \ + --hash=sha256:fd4ceeae2fb8cabdd1b71c82bfdd39662473d3433ec95b962200e9e752fb70d0 \ + --hash=sha256:fec5fac7aea6c060f317f07494961236434928e6f4374e170ef50b3001e14581 + # via -r requirements.in +aiosignal==1.3.1 \ + --hash=sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc \ + --hash=sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17 + # via aiohttp +async-timeout==4.0.3 \ + --hash=sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f \ + --hash=sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028 + # via aiohttp +attrs==24.2.0 \ + --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ + --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 + # via aiohttp cachetools==5.5.0 \ --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a @@ -12,97 +121,112 @@ certifi==2024.8.30 \ --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 # via requests -charset-normalizer==3.3.2 \ - --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ - --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ - --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ - --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ - --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ - --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ - --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ - --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ - --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ - --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ - --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ - --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ - --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ - --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ - --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ - --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ - --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ - --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ - --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ - --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ - --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ - --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ - --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ - --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ - --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ - --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ - --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ - --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ - --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ - --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ - --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ - --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ - --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ - --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ - --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ - --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ - --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ - --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ - --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ - --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ - --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ - --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ - --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ - --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ - --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ - --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ - --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ - --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ - --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ - --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ - --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ - --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ - --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ - --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ - --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ - --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ - --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ - --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ - --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ - --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ - --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ - --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ - --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ - --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ - --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ - --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ - --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ - --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ - --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ - --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ - --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ - --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ - --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ - --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ - --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ - --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ - --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ - --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ - --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ - --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ - --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ - --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ - --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ - --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ - --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ - --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ - --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ - --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ - --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ - --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 +charset-normalizer==3.4.0 \ + --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ + --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ + --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ + --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ + --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ + --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ + --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ + --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ + --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ + --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ + --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ + --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ + --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ + --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ + --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ + --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ + --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ + --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ + --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ + --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ + --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ + --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ + --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ + --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ + --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ + --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ + --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ + --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ + --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ + --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ + --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ + --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ + --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ + --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ + --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ + --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ + --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ + --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ + --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ + --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ + --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ + --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ + --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ + --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ + --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ + --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ + --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ + --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ + --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ + --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ + --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ + --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ + --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ + --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ + --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ + --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ + --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ + --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ + --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ + --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ + --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ + --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ + --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ + --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ + --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ + --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ + --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ + --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ + --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ + --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ + --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ + --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ + --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ + --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ + --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ + --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ + --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ + --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ + --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ + --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ + --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ + --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ + --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ + --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ + --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ + --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ + --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ + --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ + --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ + --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ + --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ + --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ + --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ + --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ + --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ + --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ + --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ + --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ + --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ + --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ + --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ + --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ + --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ + --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ + --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 # via requests click==8.1.7 \ --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ @@ -112,16 +236,95 @@ exceptiongroup==1.2.2 \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc # via pytest -google-api-core==2.20.0 \ - --hash=sha256:ef0591ef03c30bb83f79b3d0575c3f31219001fc9c5cf37024d08310aeffed8a \ - --hash=sha256:f74dff1889ba291a4b76c5079df0711810e2d9da81abfdc99957bc961c1eb28f +frozenlist==1.4.1 \ + --hash=sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7 \ + --hash=sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98 \ + --hash=sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad \ + --hash=sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5 \ + --hash=sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae \ + --hash=sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e \ + --hash=sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a \ + --hash=sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701 \ + --hash=sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d \ + --hash=sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6 \ + --hash=sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6 \ + --hash=sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106 \ + --hash=sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75 \ + --hash=sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868 \ + --hash=sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a \ + --hash=sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0 \ + --hash=sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1 \ + --hash=sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826 \ + --hash=sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec \ + --hash=sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6 \ + --hash=sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950 \ + --hash=sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19 \ + --hash=sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0 \ + --hash=sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8 \ + --hash=sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a \ + --hash=sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09 \ + --hash=sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86 \ + --hash=sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c \ + --hash=sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5 \ + --hash=sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b \ + --hash=sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b \ + --hash=sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d \ + --hash=sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0 \ + --hash=sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea \ + --hash=sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776 \ + --hash=sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a \ + --hash=sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897 \ + --hash=sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7 \ + --hash=sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09 \ + --hash=sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9 \ + --hash=sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe \ + --hash=sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd \ + --hash=sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742 \ + --hash=sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09 \ + --hash=sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0 \ + --hash=sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932 \ + --hash=sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1 \ + --hash=sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a \ + --hash=sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49 \ + --hash=sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d \ + --hash=sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7 \ + --hash=sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480 \ + --hash=sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89 \ + --hash=sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e \ + --hash=sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b \ + --hash=sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82 \ + --hash=sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb \ + --hash=sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068 \ + --hash=sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8 \ + --hash=sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b \ + --hash=sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb \ + --hash=sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2 \ + --hash=sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11 \ + --hash=sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b \ + --hash=sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc \ + --hash=sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0 \ + --hash=sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497 \ + --hash=sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17 \ + --hash=sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0 \ + --hash=sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2 \ + --hash=sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439 \ + --hash=sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5 \ + --hash=sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac \ + --hash=sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825 \ + --hash=sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887 \ + --hash=sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced \ + --hash=sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74 + # via + # aiohttp + # aiosignal +google-api-core==2.21.0 \ + --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ + --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d # via -r requirements.in google-auth==2.35.0 \ --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a - # via - # -r requirements.in - # google-api-core + # via google-api-core googleapis-common-protos[grpc]==1.65.0 \ --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 @@ -195,7 +398,9 @@ grpcio==1.66.2 \ idna==3.10 \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests + # via + # requests + # yarl inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ --hash=sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2 @@ -208,101 +413,200 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via -r requirements.in -libcst==1.4.0 \ - --hash=sha256:061d6855ef30efe38b8a292b7e5d57c8e820e71fc9ec9846678b60a934b53bbb \ - --hash=sha256:17d71001cb25e94cfe8c3d997095741a8c4aa7a6d234c0f972bc42818c88dfaf \ - --hash=sha256:279b54568ea1f25add50ea4ba3d76d4f5835500c82f24d54daae4c5095b986aa \ - --hash=sha256:2d47de16d105e7dd5f4e01a428d9f4dc1e71efd74f79766daf54528ce37f23c3 \ - --hash=sha256:3399e6c95df89921511b44d8c5bf6a75bcbc2d51f1f6429763609ba005c10f6b \ - --hash=sha256:3401dae41fe24565387a65baee3887e31a44e3e58066b0250bc3f3ccf85b1b5a \ - --hash=sha256:3c6a8faab9da48c5b371557d0999b4ca51f4f2cbd37ee8c2c4df0ac01c781465 \ - --hash=sha256:449e0b16604f054fa7f27c3ffe86ea7ef6c409836fe68fe4e752a1894175db00 \ - --hash=sha256:48601e3e590e2d6a7ab8c019cf3937c70511a78d778ab3333764531253acdb33 \ - --hash=sha256:5da9d7dc83801aba3b8d911f82dc1a375db0d508318bad79d9fb245374afe068 \ - --hash=sha256:62e2682ee1567b6a89c91853865372bf34f178bfd237853d84df2b87b446e654 \ - --hash=sha256:7c54aa66c86d8ece9c93156a2cf5ca512b0dce40142fe9e072c86af2bf892411 \ - --hash=sha256:7ece51d935bc9bf60b528473d2e5cc67cbb88e2f8146297e40ee2c7d80be6f13 \ - --hash=sha256:81653dea1cdfa4c6520a7c5ffb95fa4d220cbd242e446c7a06d42d8636bfcbba \ - --hash=sha256:8e54c777b8d27339b70f304d16fc8bc8674ef1bd34ed05ea874bf4921eb5a313 \ - --hash=sha256:9d0cc3c5a2a51fa7e1d579a828c0a2e46b2170024fd8b1a0691c8a52f3abb2d9 \ - --hash=sha256:addc6d585141a7677591868886f6bda0577529401a59d210aa8112114340e129 \ - --hash=sha256:b8ecdba8934632b4dadacb666cd3816627a6ead831b806336972ccc4ba7ca0e9 \ - --hash=sha256:bb0abf627ee14903d05d0ad9b2c6865f1b21eb4081e2c7bea1033f85db2b8bae \ - --hash=sha256:cb4e42ea107a37bff7f9fdbee9532d39f9ea77b89caa5c5112b37057b12e0838 \ - --hash=sha256:d024f44059a853b4b852cfc04fec33e346659d851371e46fc8e7c19de24d3da9 \ - --hash=sha256:d1989fa12d3cd79118ebd29ebe2a6976d23d509b1a4226bc3d66fcb7cb50bd5d \ - --hash=sha256:e6227562fc5c9c1efd15dfe90b0971ae254461b8b6b23c1b617139b6003de1c1 \ - --hash=sha256:f42797309bb725f0f000510d5463175ccd7155395f09b5e7723971b0007a976d \ - --hash=sha256:f6abce0e66bba2babfadc20530fd3688f672d565674336595b4623cd800b91ef +libcst==1.5.0 \ + --hash=sha256:02be4aab728261bb76d16e77c9a457884cebb60d09c8edee844de43b0e08aff7 \ + --hash=sha256:208ea92d80b2eeed8cbc879d5f39f241582a5d56b916b1b65ed2be2f878a2425 \ + --hash=sha256:23d0e07fd3ed11480f8993a1e99d58a45f914a711b14f858b8db08ae861a8a34 \ + --hash=sha256:2d5978fd60c66794bb60d037b2e6427ea52d032636e84afce32b0f04e1cf500a \ + --hash=sha256:40748361f4ea66ab6cdd82f8501c82c29808317ac7a3bd132074efd5fd9bfae2 \ + --hash=sha256:48e581af6127c5af4c9f483e5986d94f0c6b2366967ee134f0a8eba0aa4c8c12 \ + --hash=sha256:4d6acb0bdee1e55b44c6215c59755ec4693ac01e74bb1fde04c37358b378835d \ + --hash=sha256:4f71aed85932c2ea92058fd9bbd99a6478bd69eada041c3726b4f4c9af1f564e \ + --hash=sha256:52b6aadfe54e3ae52c3b815eaaa17ba4da9ff010d5e8adf6a70697872886dd10 \ + --hash=sha256:585b3aa705b3767d717d2100935d8ef557275ecdd3fac81c3e28db0959efb0ea \ + --hash=sha256:5f10124bf99a0b075eae136ef0ce06204e5f6b8da4596a9c4853a0663e80ddf3 \ + --hash=sha256:6453b5a8755a6eee3ad67ee246f13a8eac9827d2cfc8e4a269e8bf0393db74bc \ + --hash=sha256:6fb324ed20f3a725d152df5dba8d80f7e126d9c93cced581bf118a5fc18c1065 \ + --hash=sha256:7dba93cca0a5c6d771ed444c44d21ce8ea9b277af7036cea3743677aba9fbbb8 \ + --hash=sha256:80b5c4d87721a7bab265c202575809b810815ab81d5e2e7a5d4417a087975840 \ + --hash=sha256:83bc5fbe34d33597af1d5ea113dcb9b5dd5afe5a5f4316bac4293464d5e3971a \ + --hash=sha256:8478abf21ae3861a073e898d80b822bd56e578886331b33129ba77fec05b8c24 \ + --hash=sha256:88520b6dea59eaea0cae80f77c0a632604a82c5b2d23dedb4b5b34035cbf1615 \ + --hash=sha256:8935dd3393e30c2f97344866a4cb14efe560200e232166a8db1de7865c2ef8b2 \ + --hash=sha256:96adc45e96476350df6b8a5ddbb1e1d6a83a7eb3f13087e52eb7cd2f9b65bcc7 \ + --hash=sha256:99e7c52150a135d66716b03e00c7b1859a44336dc2a2bf8f9acc164494308531 \ + --hash=sha256:9cccfc0a78e110c0d0a9d2c6fdeb29feb5274c9157508a8baef7edf352420f6d \ + --hash=sha256:a8fcd78be4d9ce3c36d0c5d0bdd384e0c7d5f72970a9e4ebd56070141972b4ad \ + --hash=sha256:b48bf71d52c1e891a0948465a94d9817b5fc1ec1a09603566af90585f3b11948 \ + --hash=sha256:b5b5bcd3a9ba92840f27ad34eaa038acbee195ec337da39536c0a2efbbf28efd \ + --hash=sha256:b60b09abcc2848ab52d479c3a9b71b606d91a941e3779616efd083bb87dbe8ad \ + --hash=sha256:d2788b2b5838b78fe15df8e9fa6b6903195ea49b2d2ba43e8f423f6c90e4b69f \ + --hash=sha256:d4592872aaf5b7fa5c2727a7d73c0985261f1b3fe7eff51f4fd5b8174f30b4e2 \ + --hash=sha256:d6502aeb11412afc759036160c686be1107eb5a4466db56b207c786b9b4da7c4 \ + --hash=sha256:d92c5ae2e2dc9356ad7e3d05077d9b7e5065423e45788fd86729c88729e45c6e \ + --hash=sha256:fc80ea16c7d44e38f193e4d4ef7ff1e0ba72d8e60e8b61ac6f4c87f070a118bd # via -r requirements.in -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 +markupsafe==3.0.1 \ + --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ + --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ + --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ + --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ + --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ + --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ + --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ + --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ + --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ + --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ + --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ + --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ + --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ + --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ + --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ + --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ + --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ + --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ + --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ + --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ + --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ + --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ + --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ + --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ + --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ + --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ + --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ + --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ + --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ + --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ + --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ + --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ + --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ + --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ + --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ + --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ + --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ + --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ + --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ + --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ + --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ + --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ + --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ + --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ + --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ + --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ + --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ + --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ + --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ + --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ + --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ + --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ + --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ + --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ + --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ + --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ + --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ + --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ + --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ + --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ + --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f # via # -r requirements.in # jinja2 -mypy-extensions==1.0.0 \ - --hash=sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d \ - --hash=sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782 - # via typing-inspect +multidict==6.1.0 \ + --hash=sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f \ + --hash=sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056 \ + --hash=sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761 \ + --hash=sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3 \ + --hash=sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b \ + --hash=sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6 \ + --hash=sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748 \ + --hash=sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966 \ + --hash=sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f \ + --hash=sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1 \ + --hash=sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6 \ + --hash=sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada \ + --hash=sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305 \ + --hash=sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2 \ + --hash=sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d \ + --hash=sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a \ + --hash=sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef \ + --hash=sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c \ + --hash=sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb \ + --hash=sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60 \ + --hash=sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6 \ + --hash=sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4 \ + --hash=sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478 \ + --hash=sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81 \ + --hash=sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7 \ + --hash=sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56 \ + --hash=sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3 \ + --hash=sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6 \ + --hash=sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30 \ + --hash=sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb \ + --hash=sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506 \ + --hash=sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0 \ + --hash=sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925 \ + --hash=sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c \ + --hash=sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6 \ + --hash=sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e \ + --hash=sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95 \ + --hash=sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2 \ + --hash=sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133 \ + --hash=sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2 \ + --hash=sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa \ + --hash=sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3 \ + --hash=sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3 \ + --hash=sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436 \ + --hash=sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657 \ + --hash=sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581 \ + --hash=sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492 \ + --hash=sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43 \ + --hash=sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2 \ + --hash=sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2 \ + --hash=sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926 \ + --hash=sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057 \ + --hash=sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc \ + --hash=sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80 \ + --hash=sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255 \ + --hash=sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1 \ + --hash=sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972 \ + --hash=sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53 \ + --hash=sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1 \ + --hash=sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423 \ + --hash=sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a \ + --hash=sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160 \ + --hash=sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c \ + --hash=sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd \ + --hash=sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa \ + --hash=sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5 \ + --hash=sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b \ + --hash=sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa \ + --hash=sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef \ + --hash=sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44 \ + --hash=sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4 \ + --hash=sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156 \ + --hash=sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753 \ + --hash=sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28 \ + --hash=sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d \ + --hash=sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a \ + --hash=sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304 \ + --hash=sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008 \ + --hash=sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429 \ + --hash=sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72 \ + --hash=sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399 \ + --hash=sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3 \ + --hash=sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392 \ + --hash=sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167 \ + --hash=sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c \ + --hash=sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774 \ + --hash=sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351 \ + --hash=sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76 \ + --hash=sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875 \ + --hash=sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd \ + --hash=sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28 \ + --hash=sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db + # via + # aiohttp + # yarl packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 @@ -311,6 +615,106 @@ pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 # via pytest +propcache==0.2.0 \ + --hash=sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9 \ + --hash=sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763 \ + --hash=sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325 \ + --hash=sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb \ + --hash=sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b \ + --hash=sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09 \ + --hash=sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957 \ + --hash=sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68 \ + --hash=sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f \ + --hash=sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798 \ + --hash=sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418 \ + --hash=sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6 \ + --hash=sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162 \ + --hash=sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f \ + --hash=sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036 \ + --hash=sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8 \ + --hash=sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2 \ + --hash=sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110 \ + --hash=sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23 \ + --hash=sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8 \ + --hash=sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638 \ + --hash=sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a \ + --hash=sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44 \ + --hash=sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2 \ + --hash=sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2 \ + --hash=sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850 \ + --hash=sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136 \ + --hash=sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b \ + --hash=sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887 \ + --hash=sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89 \ + --hash=sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87 \ + --hash=sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348 \ + --hash=sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4 \ + --hash=sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861 \ + --hash=sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e \ + --hash=sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c \ + --hash=sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b \ + --hash=sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb \ + --hash=sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1 \ + --hash=sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de \ + --hash=sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354 \ + --hash=sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563 \ + --hash=sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5 \ + --hash=sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf \ + --hash=sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9 \ + --hash=sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12 \ + --hash=sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4 \ + --hash=sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5 \ + --hash=sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71 \ + --hash=sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9 \ + --hash=sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed \ + --hash=sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336 \ + --hash=sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90 \ + --hash=sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063 \ + --hash=sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad \ + --hash=sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6 \ + --hash=sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8 \ + --hash=sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e \ + --hash=sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2 \ + --hash=sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7 \ + --hash=sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d \ + --hash=sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d \ + --hash=sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df \ + --hash=sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b \ + --hash=sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178 \ + --hash=sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2 \ + --hash=sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630 \ + --hash=sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48 \ + --hash=sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61 \ + --hash=sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89 \ + --hash=sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb \ + --hash=sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3 \ + --hash=sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6 \ + --hash=sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562 \ + --hash=sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b \ + --hash=sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58 \ + --hash=sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db \ + --hash=sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99 \ + --hash=sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37 \ + --hash=sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83 \ + --hash=sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a \ + --hash=sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d \ + --hash=sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04 \ + --hash=sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70 \ + --hash=sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544 \ + --hash=sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394 \ + --hash=sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea \ + --hash=sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7 \ + --hash=sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1 \ + --hash=sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793 \ + --hash=sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577 \ + --hash=sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7 \ + --hash=sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57 \ + --hash=sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d \ + --hash=sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032 \ + --hash=sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d \ + --hash=sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016 \ + --hash=sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504 + # via yarl proto-plus==1.24.0 \ --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 @@ -345,9 +749,9 @@ pyasn1-modules==0.4.1 \ --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c # via google-auth -pypandoc==1.13 \ - --hash=sha256:31652073c7960c2b03570bd1e94f602ca9bc3e70099df5ead4cea98ff5151c1e \ - --hash=sha256:4c7d71bf2f1ed122aac287113b5c4d537a33bbc3c1df5aed11a7d4a7ac074681 +pypandoc==1.14 \ + --hash=sha256:1315c7ad7fac7236dacf69a05b521ed2c3f1d0177f70e9b92bfffce6c023df22 \ + --hash=sha256:6b4c45f5f1b9fb5bb562079164806bdbbc3e837b5402bcf3f1139edc5730a197 # via -r requirements.in pytest==8.3.3 \ --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ @@ -357,7 +761,7 @@ pytest-asyncio==0.24.0 \ --hash=sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b \ --hash=sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276 # via -r requirements.in -PyYAML==6.0.2 \ +pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ --hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \ --hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \ @@ -422,21 +826,109 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via pytest typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via - # libcst - # typing-inspect -typing-inspect==0.9.0 \ - --hash=sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f \ - --hash=sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78 - # via libcst + # via multidict urllib3==2.2.3 \ --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via requests +yarl==1.14.0 \ + --hash=sha256:047b258e00b99091b6f90355521f026238c63bd76dcf996d93527bb13320eefd \ + --hash=sha256:06ff23462398333c78b6f4f8d3d70410d657a471c2c5bbe6086133be43fc8f1a \ + --hash=sha256:07f9eaf57719d6721ab15805d85f4b01a5b509a0868d7320134371bcb652152d \ + --hash=sha256:0aa92e3e30a04f9462a25077db689c4ac5ea9ab6cc68a2e563881b987d42f16d \ + --hash=sha256:0cf21f46a15d445417de8fc89f2568852cf57fe8ca1ab3d19ddb24d45c0383ae \ + --hash=sha256:0fd7b941dd1b00b5f0acb97455fea2c4b7aac2dd31ea43fb9d155e9bc7b78664 \ + --hash=sha256:147e36331f6f63e08a14640acf12369e041e0751bb70d9362df68c2d9dcf0c87 \ + --hash=sha256:16a682a127930f3fc4e42583becca6049e1d7214bcad23520c590edd741d2114 \ + --hash=sha256:176110bff341b6730f64a1eb3a7070e12b373cf1c910a9337e7c3240497db76f \ + --hash=sha256:19268b4fec1d7760134f2de46ef2608c2920134fb1fa61e451f679e41356dc55 \ + --hash=sha256:1b16f6c75cffc2dc0616ea295abb0e1967601bd1fb1e0af6a1de1c6c887f3439 \ + --hash=sha256:1bfc25aa6a7c99cf86564210f79a0b7d4484159c67e01232b116e445b3036547 \ + --hash=sha256:1ca3894e9e9f72da93544f64988d9c052254a338a9f855165f37f51edb6591de \ + --hash=sha256:1dda53508df0de87b6e6b0a52d6718ff6c62a5aca8f5552748404963df639269 \ + --hash=sha256:217a782020b875538eebf3948fac3a7f9bbbd0fd9bf8538f7c2ad7489e80f4e8 \ + --hash=sha256:2192f718db4a8509f63dd6d950f143279211fa7e6a2c612edc17d85bf043d36e \ + --hash=sha256:29a84a46ec3ebae7a1c024c055612b11e9363a8a23238b3e905552d77a2bc51b \ + --hash=sha256:3007a5b75cb50140708420fe688c393e71139324df599434633019314ceb8b59 \ + --hash=sha256:30600ba5db60f7c0820ef38a2568bb7379e1418ecc947a0f76fd8b2ff4257a97 \ + --hash=sha256:337912bcdcf193ade64b9aae5a4017a0a1950caf8ca140362e361543c6773f21 \ + --hash=sha256:37001e5d4621cef710c8dc1429ca04e189e572f128ab12312eab4e04cf007132 \ + --hash=sha256:3d569f877ed9a708e4c71a2d13d2940cb0791da309f70bd970ac1a5c088a0a92 \ + --hash=sha256:4009def9be3a7e5175db20aa2d7307ecd00bbf50f7f0f989300710eee1d0b0b9 \ + --hash=sha256:46a9772a1efa93f9cd170ad33101c1817c77e0e9914d4fe33e2da299d7cf0f9b \ + --hash=sha256:47eede5d11d669ab3759b63afb70d28d5328c14744b8edba3323e27dc52d298d \ + --hash=sha256:498b3c55087b9d762636bca9b45f60d37e51d24341786dc01b81253f9552a607 \ + --hash=sha256:4e0d45ebf975634468682c8bec021618b3ad52c37619e5c938f8f831fa1ac5c0 \ + --hash=sha256:4f24f08b6c9b9818fd80612c97857d28f9779f0d1211653ece9844fc7b414df2 \ + --hash=sha256:55c144d363ad4626ca744556c049c94e2b95096041ac87098bb363dcc8635e8d \ + --hash=sha256:582cedde49603f139be572252a318b30dc41039bc0b8165f070f279e5d12187f \ + --hash=sha256:587c3cc59bc148a9b1c07a019346eda2549bc9f468acd2f9824d185749acf0a6 \ + --hash=sha256:5cd5dad8366e0168e0fd23d10705a603790484a6dbb9eb272b33673b8f2cce72 \ + --hash=sha256:5d02d700705d67e09e1f57681f758f0b9d4412eeb70b2eb8d96ca6200b486db3 \ + --hash=sha256:625f207b1799e95e7c823f42f473c1e9dbfb6192bd56bba8695656d92be4535f \ + --hash=sha256:659603d26d40dd4463200df9bfbc339fbfaed3fe32e5c432fe1dc2b5d4aa94b4 \ + --hash=sha256:689a99a42ee4583fcb0d3a67a0204664aa1539684aed72bdafcbd505197a91c4 \ + --hash=sha256:68ac1a09392ed6e3fd14be880d39b951d7b981fd135416db7d18a6208c536561 \ + --hash=sha256:6a615cad11ec3428020fb3c5a88d85ce1b5c69fd66e9fcb91a7daa5e855325dd \ + --hash=sha256:73bedd2be05f48af19f0f2e9e1353921ce0c83f4a1c9e8556ecdcf1f1eae4892 \ + --hash=sha256:742aef0a99844faaac200564ea6f5e08facb285d37ea18bd1a5acf2771f3255a \ + --hash=sha256:75ff4c819757f9bdb35de049a509814d6ce851fe26f06eb95a392a5640052482 \ + --hash=sha256:781e2495e408a81e4eaeedeb41ba32b63b1980dddf8b60dbbeff6036bcd35049 \ + --hash=sha256:7a9f917966d27f7ce30039fe8d900f913c5304134096554fd9bea0774bcda6d1 \ + --hash=sha256:7e2637d75e92763d1322cb5041573279ec43a80c0f7fbbd2d64f5aee98447b17 \ + --hash=sha256:8089d4634d8fa2b1806ce44fefa4979b1ab2c12c0bc7ef3dfa45c8a374811348 \ + --hash=sha256:816d24f584edefcc5ca63428f0b38fee00b39fe64e3c5e558f895a18983efe96 \ + --hash=sha256:8385ab36bf812e9d37cf7613999a87715f27ef67a53f0687d28c44b819df7cb0 \ + --hash=sha256:85cb3e40eaa98489f1e2e8b29f5ad02ee1ee40d6ce6b88d50cf0f205de1d9d2c \ + --hash=sha256:8648180b34faaea4aa5b5ca7e871d9eb1277033fa439693855cf0ea9195f85f1 \ + --hash=sha256:8892fa575ac9b1b25fae7b221bc4792a273877b9b56a99ee2d8d03eeb3dbb1d2 \ + --hash=sha256:88c7d9d58aab0724b979ab5617330acb1c7030b79379c8138c1c8c94e121d1b3 \ + --hash=sha256:8a2f8fb7f944bcdfecd4e8d855f84c703804a594da5123dd206f75036e536d4d \ + --hash=sha256:8f4e475f29a9122f908d0f1f706e1f2fc3656536ffd21014ff8a6f2e1b14d1d8 \ + --hash=sha256:8f50eb3837012a937a2b649ec872b66ba9541ad9d6f103ddcafb8231cfcafd22 \ + --hash=sha256:91d875f75fabf76b3018c5f196bf3d308ed2b49ddcb46c1576d6b075754a1393 \ + --hash=sha256:94b2bb9bcfd5be9d27004ea4398fb640373dd0c1a9e219084f42c08f77a720ab \ + --hash=sha256:9557c9322aaa33174d285b0c1961fb32499d65ad1866155b7845edc876c3c835 \ + --hash=sha256:95e16e9eaa2d7f5d87421b8fe694dd71606aa61d74b824c8d17fc85cc51983d1 \ + --hash=sha256:96952f642ac69075e44c7d0284528938fdff39422a1d90d3e45ce40b72e5e2d9 \ + --hash=sha256:985623575e5c4ea763056ffe0e2d63836f771a8c294b3de06d09480538316b13 \ + --hash=sha256:99ff3744f5fe48288be6bc402533b38e89749623a43208e1d57091fc96b783b9 \ + --hash=sha256:9abe80ae2c9d37c17599557b712e6515f4100a80efb2cda15f5f070306477cd2 \ + --hash=sha256:a152751af7ef7b5d5fa6d215756e508dd05eb07d0cf2ba51f3e740076aa74373 \ + --hash=sha256:a2e4725a08cb2b4794db09e350c86dee18202bb8286527210e13a1514dc9a59a \ + --hash=sha256:a56fbe3d7f3bce1d060ea18d2413a2ca9ca814eea7cedc4d247b5f338d54844e \ + --hash=sha256:ab3abc0b78a5dfaa4795a6afbe7b282b6aa88d81cf8c1bb5e394993d7cae3457 \ + --hash=sha256:b03384eed107dbeb5f625a99dc3a7de8be04fc8480c9ad42fccbc73434170b20 \ + --hash=sha256:b0547ab1e9345dc468cac8368d88ea4c5bd473ebc1d8d755347d7401982b5dd8 \ + --hash=sha256:b4c1ecba93e7826dc71ddba75fb7740cdb52e7bd0be9f03136b83f54e6a1f511 \ + --hash=sha256:b693c63e7e64b524f54aa4888403c680342d1ad0d97be1707c531584d6aeeb4f \ + --hash=sha256:b6d0147574ce2e7b812c989e50fa72bbc5338045411a836bd066ce5fc8ac0bce \ + --hash=sha256:b9cfef3f14f75bf6aba73a76caf61f9d00865912a04a4393c468a7ce0981b519 \ + --hash=sha256:b9f805e37ed16cc212fdc538a608422d7517e7faf539bedea4fe69425bc55d76 \ + --hash=sha256:bab03192091681d54e8225c53f270b0517637915d9297028409a2a5114ff4634 \ + --hash=sha256:bc24f968b82455f336b79bf37dbb243b7d76cd40897489888d663d4e028f5069 \ + --hash=sha256:c14b504a74e58e2deb0378b3eca10f3d076635c100f45b113c18c770b4a47a50 \ + --hash=sha256:c2089a9afef887664115f7fa6d3c0edd6454adaca5488dba836ca91f60401075 \ + --hash=sha256:c8ed4034f0765f8861620c1f2f2364d2e58520ea288497084dae880424fc0d9f \ + --hash=sha256:cd2660c01367eb3ef081b8fa0a5da7fe767f9427aa82023a961a5f28f0d4af6c \ + --hash=sha256:d8361c7d04e6a264481f0b802e395f647cd3f8bbe27acfa7c12049efea675bd1 \ + --hash=sha256:d9baec588f015d0ee564057aa7574313c53a530662ffad930b7886becc85abdf \ + --hash=sha256:dbd9ff43a04f8ffe8a959a944c2dca10d22f5f99fc6a459f49c3ebfb409309d9 \ + --hash=sha256:e3f8bfc1db82589ef965ed234b87de30d140db8b6dc50ada9e33951ccd8ec07a \ + --hash=sha256:e6a2c5c5bb2556dfbfffffc2bcfb9c235fd2b566d5006dfb2a37afc7e3278a07 \ + --hash=sha256:e749af6c912a7bb441d105c50c1a3da720474e8acb91c89350080dd600228f0e \ + --hash=sha256:e85d86527baebb41a214cc3b45c17177177d900a2ad5783dbe6f291642d4906f \ + --hash=sha256:ee2c68e4f2dd1b1c15b849ba1c96fac105fca6ffdb7c1e8be51da6fabbdeafb9 \ + --hash=sha256:f3ab950f8814f3b7b5e3eebc117986f817ec933676f68f0a6c5b2137dd7c9c69 \ + --hash=sha256:f4f4547944d4f5cfcdc03f3f097d6f05bbbc915eaaf80a2ee120d0e756de377d \ + --hash=sha256:f72a0d746d38cb299b79ce3d4d60ba0892c84bbc905d0d49c13df5bace1b65f8 \ + --hash=sha256:fc2c80bc87fba076e6cbb926216c27fba274dae7100a7b9a0983b53132dd99f2 \ + --hash=sha256:fe4d2536c827f508348d7b40c08767e8c7071614250927233bf0c92170451c0a + # via aiohttp diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl index cf5e225c65bf..5b11375d2eb0 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic.bzl @@ -94,6 +94,7 @@ def py_gapic_library( requirement("google-api-core"), requirement("googleapis-common-protos"), requirement("pytest-asyncio"), + requirement("aiohttp") ] py_library( From ee5a3187ddf74c0d819ecb2cd6ebb3c169dd0227 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 10 Oct 2024 16:05:04 -0400 Subject: [PATCH 1200/1339] chore: update templated files (#2220) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/requirements.txt | 42 +- packages/gapic-generator/.kokoro/release.sh | 2 +- .../.kokoro/release/common.cfg | 2 +- .../gapic-generator/.kokoro/requirements.txt | 610 +++++++++--------- packages/gapic-generator/owlbot.py | 7 - 6 files changed, 316 insertions(+), 351 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 6d064ddb9b06..06ea02039ef9 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 -# created: 2024-07-31T14:52:44.926548819Z + digest: sha256:68ce7dace6a9481b4c94d73308572f20766031dc27e535f2afb8a84cd60feb44 +# created: 2024-10-10 diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt index 7129c7715594..66eacc82f041 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt @@ -4,39 +4,39 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via nox -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.26.6 \ + --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ + --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 # via nox diff --git a/packages/gapic-generator/.kokoro/release.sh b/packages/gapic-generator/.kokoro/release.sh index b9f654279fe1..9672d6135b06 100755 --- a/packages/gapic-generator/.kokoro/release.sh +++ b/packages/gapic-generator/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") cd github/gapic-generator-python python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/gapic-generator/.kokoro/release/common.cfg b/packages/gapic-generator/.kokoro/release/common.cfg index 1f5dced3c103..d23792661793 100644 --- a/packages/gapic-generator/.kokoro/release/common.cfg +++ b/packages/gapic-generator/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt index 9622baf0ba38..006d8ef931bf 100644 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ b/packages/gapic-generator/.kokoro/requirements.txt @@ -4,79 +4,94 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 +attrs==24.2.0 \ + --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ + --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 # via gcp-releasetool backports-tarfile==1.2.0 \ --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 # via jaraco-context -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 +cffi==1.17.1 \ + --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ + --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ + --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ + --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ + --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ + --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ + --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ + --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ + --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ + --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ + --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ + --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ + --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ + --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ + --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ + --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ + --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ + --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ + --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ + --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ + --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ + --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ + --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ + --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ + --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ + --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ + --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ + --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ + --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ + --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ + --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ + --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ + --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ + --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ + --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ + --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ + --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ + --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ + --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ + --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ + --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ + --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ + --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ + --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ + --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ + --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ + --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ + --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ + --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ + --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ + --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ + --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ + --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ + --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ + --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ + --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ + --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ + --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ + --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ + --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ + --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ + --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ + --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ + --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ + --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ + --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ + --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -97,72 +112,67 @@ colorlog==6.8.2 \ # via # gcp-docuploader # nox -cryptography==42.0.8 \ - --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ - --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ - --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ - --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ - --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ - --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ - --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ - --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ - --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ - --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ - --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ - --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ - --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ - --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ - --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ - --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ - --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ - --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ - --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ - --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ - --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ - --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ - --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ - --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ - --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ - --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ - --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ - --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ - --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ - --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ - --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ - --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e +cryptography==43.0.1 \ + --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ + --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ + --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ + --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ + --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ + --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ + --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ + --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ + --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ + --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ + --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ + --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ + --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ + --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ + --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ + --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ + --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ + --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ + --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ + --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ + --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ + --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ + --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ + --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ + --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ + --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ + --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv docutils==0.21.2 \ --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.1 \ - --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ - --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 +gcp-releasetool==2.1.1 \ + --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ + --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e # via -r requirements.in -google-api-core==2.19.1 \ - --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ - --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd +google-api-core==2.21.0 \ + --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ + --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d # via # google-cloud-core # google-cloud-storage -google-auth==2.31.0 \ - --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ - --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 +google-auth==2.35.0 \ + --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ + --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a # via # gcp-releasetool # google-api-core @@ -172,97 +182,56 @@ google-cloud-core==2.4.1 \ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.17.0 \ - --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ - --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 +google-cloud-storage==2.18.2 \ + --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ + --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 \ - --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ - --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 # via google-cloud-storage -googleapis-common-protos==1.63.2 \ - --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ - --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 +googleapis-common-protos==1.65.0 \ + --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ + --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 # via google-api-core -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via requests -importlib-metadata==8.0.0 \ - --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ - --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 +importlib-metadata==8.5.0 \ + --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ + --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 # via # -r requirements.in # keyring @@ -271,13 +240,13 @@ jaraco-classes==3.4.0 \ --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 # via keyring -jaraco-context==5.3.0 \ - --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ - --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 +jaraco-context==6.0.1 \ + --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ + --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 # via keyring -jaraco-functools==4.0.1 \ - --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ - --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 +jaraco-functools==4.1.0 \ + --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ + --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -289,9 +258,9 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==25.2.1 \ - --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ - --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b +keyring==25.4.1 \ + --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ + --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b # via # gcp-releasetool # twine @@ -299,75 +268,76 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 +markupsafe==3.0.1 \ + --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ + --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ + --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ + --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ + --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ + --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ + --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ + --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ + --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ + --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ + --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ + --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ + --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ + --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ + --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ + --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ + --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ + --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ + --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ + --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ + --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ + --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ + --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ + --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ + --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ + --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ + --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ + --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ + --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ + --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ + --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ + --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ + --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ + --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ + --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ + --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ + --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ + --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ + --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ + --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ + --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ + --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ + --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ + --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ + --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ + --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ + --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ + --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ + --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ + --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ + --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ + --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ + --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ + --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ + --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ + --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ + --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ + --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ + --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ + --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ + --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.3.0 \ - --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ - --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 +more-itertools==10.5.0 \ + --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ + --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 # via # jaraco-classes # jaraco-functools @@ -389,9 +359,9 @@ nh3==0.2.18 \ --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ @@ -403,41 +373,41 @@ pkginfo==1.10.0 \ --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv proto-plus==1.24.0 \ --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 # via google-api-core -protobuf==5.27.2 \ - --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ - --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ - --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ - --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ - --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ - --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ - --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ - --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ - --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ - --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ - --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 +protobuf==5.28.2 \ + --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ + --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ + --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ + --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ + --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ + --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ + --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ + --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ + --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ + --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ + --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c # via google-auth pycparser==2.22 \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ @@ -449,9 +419,9 @@ pygments==2.18.0 \ # via # readme-renderer # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 +pyjwt==2.9.0 \ + --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ + --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c # via gcp-releasetool pyperclip==1.9.0 \ --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 @@ -481,9 +451,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.7.1 \ - --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ - --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 +rich==13.9.2 \ + --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ + --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -499,9 +469,9 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox twine==5.1.1 \ --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ @@ -510,28 +480,30 @@ twine==5.1.1 \ typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via -r requirements.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 + # via + # -r requirements.in + # rich +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via # requests # twine -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.26.6 \ + --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ + --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 # via nox -wheel==0.43.0 \ - --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ - --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 +wheel==0.44.0 \ + --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ + --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 # via -r requirements.in -zipp==3.19.2 \ - --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ - --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c +zipp==3.20.2 \ + --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ + --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==70.2.0 \ - --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ - --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 +setuptools==75.1.0 \ + --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ + --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 # via -r requirements.in diff --git a/packages/gapic-generator/owlbot.py b/packages/gapic-generator/owlbot.py index dcb31e46eb4a..3dcd7f1516b9 100644 --- a/packages/gapic-generator/owlbot.py +++ b/packages/gapic-generator/owlbot.py @@ -21,13 +21,6 @@ excludes=["samples/**/*", "test-samples*", "publish-docs.sh", "*/prerelease-deps.cfg"], ) -# remove docfx build -assert 1 == s.replace( - ".kokoro/docs/docs-presubmit.cfg", - 'value: "docs docfx"', - 'value: "docs"', -) - # needed for docs build s.move(templated_files / ".trampolinerc") From afc55912cef747dc7d3b47201193e823261db90d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 20:10:07 +0000 Subject: [PATCH 1201/1339] chore(main): release 1.19.1 (#2216) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1da6f68f062c..4af1c354921d 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [1.19.1](https://github.com/googleapis/gapic-generator-python/compare/v1.19.0...v1.19.1) (2024-10-10) + + +### Bug Fixes + +* Add default library settings for incorrect lib version ([#2212](https://github.com/googleapis/gapic-generator-python/issues/2212)) ([de46272](https://github.com/googleapis/gapic-generator-python/commit/de46272ae65e9117be7f362355cefd28d0780917)) +* Resolve issue with wait operation mixin ([#2218](https://github.com/googleapis/gapic-generator-python/issues/2218)) ([095d060](https://github.com/googleapis/gapic-generator-python/commit/095d0600803dace8d665fee9ccbc460720b5fe17)) +* Use disambiguated name for rpcs to avoid collisions ([#2217](https://github.com/googleapis/gapic-generator-python/issues/2217)) ([296cd3e](https://github.com/googleapis/gapic-generator-python/commit/296cd3e814ba58954c16ca6256db0359bcab0f09)) + ## [1.19.0](https://github.com/googleapis/gapic-generator-python/compare/v1.18.5...v1.19.0) (2024-10-09) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 5c997bf55293..9e3a638059b4 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.19.0" +version = "1.19.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 0ee5ab4bef669bb6e6d0301f0702a3ca49768c66 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 16 Oct 2024 11:58:48 -0400 Subject: [PATCH 1202/1339] build: add presubmits for unit tests in golden files (#2224) --- .../.github/sync-repo-settings.yaml | 1 + .../.github/workflows/tests.yaml | 21 +++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index e04213881e68..f90c27a52e09 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -50,6 +50,7 @@ branchProtectionRules: - 'integration' - 'goldens-lint' - 'goldens-prerelease' + - 'goldens-unit' - 'style-check' - 'snippetgen' - 'unit (3.7)' diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 5eaf75d2acb9..dbaed7f9a53a 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -328,6 +328,27 @@ jobs: nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.12 blacken lint nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.12 blacken lint nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.12 blacken lint + goldens-unit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.12 + uses: actions/setup-python@v5 + with: + python-version: "3.12" + cache: 'pip' + - name: Install nox. + run: | + python -m pip install nox + - name: Run the `unit` nox session + # Exclude testing for asset which requires dependency google-cloud-org-policy + # in order to run unit tests + # See https://github.com/googleapis/gapic-generator-python/issues/1806 + run: | + nox -f tests/integration/goldens/credentials/noxfile.py -s unit-3.12 + nox -f tests/integration/goldens/eventarc/noxfile.py -s unit-3.12 + nox -f tests/integration/goldens/logging/noxfile.py -s unit-3.12 + nox -f tests/integration/goldens/redis/noxfile.py -s unit-3.12 goldens-prerelease: runs-on: ubuntu-latest steps: From 6f4474ebd7d2a82213abcd8820a19b14e66df9f4 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 16 Oct 2024 12:19:51 -0400 Subject: [PATCH 1203/1339] tests: Add required checks for showcase `_w_rest_async` (#2225) --- packages/gapic-generator/.github/sync-repo-settings.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index f90c27a52e09..4b6af0cbfbf3 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -16,6 +16,8 @@ branchProtectionRules: - 'showcase (3.12, showcase)' - 'showcase (3.7, showcase_alternative_templates)' - 'showcase (3.12, showcase_alternative_templates)' + - 'showcase (3.7, _w_rest_async)' + - 'showcase (3.12, _w_rest_async)' # TODO(dovs): reenable these when the mtls tests have been debugged and fixed # See #1218 for details # - 'showcase-mtls (showcase_mtls)' @@ -46,6 +48,12 @@ branchProtectionRules: - 'showcase-unit (3.10, _mixins)' - 'showcase-unit (3.11, _mixins)' - 'showcase-unit (3.12, _mixins)' + - 'showcase-unit (3.7, _w_rest_async)' + - 'showcase-unit (3.8, _w_rest_async)' + - 'showcase-unit (3.9, _w_rest_async)' + - 'showcase-unit (3.10, _w_rest_async)' + - 'showcase-unit (3.11, _w_rest_async)' + - 'showcase-unit (3.12, _w_rest_async)' - 'showcase-unit-add-iam-methods' - 'integration' - 'goldens-lint' From 3978deb6d2050f867f985a3b91fa49d448ac120f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 16 Oct 2024 13:40:03 -0400 Subject: [PATCH 1204/1339] fix: allow `google-cloud-kms` 3.x (#2226) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index 0136654f16a8..aefbe76b8506 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -12,7 +12,7 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "3.0.0dev"}, - ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "3.0.0dev"}, + ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "4.0.0dev"}, ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0dev"}, ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"}, ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"}, From 5e6cf56ae09db7033081bba1f1cd3007da69502b Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 16 Oct 2024 14:37:29 -0400 Subject: [PATCH 1205/1339] chore: update rest version in client info (#2228) --- .../%name_%version/%sub/services/%service/transports/rest.py.j2 | 2 +- .../%sub/services/%service/transports/rest_asyncio.py.j2 | 2 +- .../cloud/asset_v1/services/asset_service/transports/rest.py | 2 +- .../credentials_v1/services/iam_credentials/transports/rest.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/rest.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/rest.py | 2 +- .../redis_v1/services/cloud_redis/transports/rest_asyncio.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 6bdbbbcbc406..1001df616c3c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -44,7 +44,7 @@ except AttributeError: # pragma: NO COVER DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 6ae4e677865c..beb2d7ee1105 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -66,7 +66,7 @@ except AttributeError: # pragma: NO COVER DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=google.auth.__version__ + rest_version=f"google-auth@{google.auth.__version__}", ) {{ shared_macros.create_interceptor_class(api, service, method, is_async=True) }} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index f7e7aaffcbe4..e4ca2ea51423 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -49,7 +49,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 819c62f3c5b5..a2e5c7b97022 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -46,7 +46,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 1031edb41b1e..214d5655fa88 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -57,7 +57,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 750a09fa6bbb..2e18f09cd96c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -49,7 +49,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index e82769893051..4eff5c369818 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -59,7 +59,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=google.auth.__version__ + rest_version=f"google-auth@{google.auth.__version__}", ) From 6712aaf5b25e7e1084d029259ef14420ff340b68 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 22 Oct 2024 15:43:37 -0400 Subject: [PATCH 1206/1339] fix: Added underscores in services/types in index.rst.j2 (#2232) --- packages/gapic-generator/gapic/templates/docs/index.rst.j2 | 4 ++-- .../tests/integration/goldens/asset/docs/index.rst | 4 ++-- .../tests/integration/goldens/credentials/docs/index.rst | 4 ++-- .../tests/integration/goldens/eventarc/docs/index.rst | 4 ++-- .../tests/integration/goldens/logging/docs/index.rst | 4 ++-- .../tests/integration/goldens/redis/docs/index.rst | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/docs/index.rst.j2 b/packages/gapic-generator/gapic/templates/docs/index.rst.j2 index c8dea9adbca6..890509be79d4 100644 --- a/packages/gapic-generator/gapic/templates/docs/index.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/index.rst.j2 @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - {{ api.naming.versioned_module_name }}/services - {{ api.naming.versioned_module_name }}/types + {{ api.naming.versioned_module_name }}/services_ + {{ api.naming.versioned_module_name }}/types_ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst index fee6608ede43..df4eb53564ad 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - asset_v1/services - asset_v1/types + asset_v1/services_ + asset_v1/types_ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst index 3e271990d6f9..2113270ae152 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - credentials_v1/services - credentials_v1/types + credentials_v1/services_ + credentials_v1/types_ diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst index cd50176117ee..0a20636626b1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - eventarc_v1/services - eventarc_v1/types + eventarc_v1/services_ + eventarc_v1/types_ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst index 6a4859643f45..51acc96292ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - logging_v2/services - logging_v2/types + logging_v2/services_ + logging_v2/types_ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst index f7ccd42cd0a6..0b346d85a90f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst @@ -3,5 +3,5 @@ API Reference .. toctree:: :maxdepth: 2 - redis_v1/services - redis_v1/types + redis_v1/services_ + redis_v1/types_ From 866025a88a21d7499dbc617023287d58e7a5ea0e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 23 Oct 2024 11:49:37 -0400 Subject: [PATCH 1207/1339] feat: Add support for Python 3.13 (#2215) --- .../.github/sync-repo-settings.yaml | 17 +++-- .../.github/workflows/tests.yaml | 62 +++++++++---------- .../gapic/ads-templates/noxfile.py.j2 | 1 + .../gapic/ads-templates/setup.py.j2 | 1 + .../gapic/samplegen/samplegen.py | 2 +- .../gapic/templates/noxfile.py.j2 | 12 ++-- .../gapic/templates/setup.py.j2 | 2 + .../templates/testing/constraints-3.13.txt.j2 | 4 ++ packages/gapic-generator/noxfile.py | 1 + packages/gapic-generator/setup.py | 1 + .../integration/goldens/asset/noxfile.py | 12 ++-- .../tests/integration/goldens/asset/setup.py | 2 + .../asset/testing/constraints-3.13.txt | 9 +++ .../goldens/credentials/noxfile.py | 12 ++-- .../integration/goldens/credentials/setup.py | 2 + .../credentials/testing/constraints-3.13.txt | 6 ++ .../integration/goldens/eventarc/noxfile.py | 12 ++-- .../integration/goldens/eventarc/setup.py | 2 + .../eventarc/testing/constraints-3.13.txt | 7 +++ .../integration/goldens/logging/noxfile.py | 12 ++-- .../integration/goldens/logging/setup.py | 2 + .../logging/testing/constraints-3.13.txt | 6 ++ .../integration/goldens/redis/noxfile.py | 12 ++-- .../tests/integration/goldens/redis/setup.py | 2 + .../redis/testing/constraints-3.13.txt | 6 ++ 25 files changed, 140 insertions(+), 67 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 4b6af0cbfbf3..d5581bc38c46 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -12,12 +12,13 @@ branchProtectionRules: - 'mypy (3.10)' - 'mypy (3.11)' - 'mypy (3.12)' + - 'mypy (3.13)' - 'showcase (3.7, showcase)' - - 'showcase (3.12, showcase)' + - 'showcase (3.13, showcase)' - 'showcase (3.7, showcase_alternative_templates)' - - 'showcase (3.12, showcase_alternative_templates)' - - 'showcase (3.7, _w_rest_async)' - - 'showcase (3.12, _w_rest_async)' + - 'showcase (3.13, showcase_alternative_templates)' + - 'showcase (3.7, showcase_w_rest_async)' + - 'showcase (3.13, showcase_w_rest_async)' # TODO(dovs): reenable these when the mtls tests have been debugged and fixed # See #1218 for details # - 'showcase-mtls (showcase_mtls)' @@ -30,30 +31,35 @@ branchProtectionRules: - 'showcase-unit (3.10)' - 'showcase-unit (3.11)' - 'showcase-unit (3.12)' + - 'showcase-unit (3.13)' - 'showcase-unit (3.7, _alternative_templates)' - 'showcase-unit (3.8, _alternative_templates)' - 'showcase-unit (3.9, _alternative_templates)' - 'showcase-unit (3.10, _alternative_templates)' - 'showcase-unit (3.11, _alternative_templates)' - 'showcase-unit (3.12, _alternative_templates)' + - 'showcase-unit (3.13, _alternative_templates)' - 'showcase-unit (3.7, _alternative_templates_mixins)' - 'showcase-unit (3.8, _alternative_templates_mixins)' - 'showcase-unit (3.9, _alternative_templates_mixins)' - 'showcase-unit (3.10, _alternative_templates_mixins)' - 'showcase-unit (3.11, _alternative_templates_mixins)' - 'showcase-unit (3.12, _alternative_templates_mixins)' + - 'showcase-unit (3.13, _alternative_templates_mixins)' - 'showcase-unit (3.7, _mixins)' - 'showcase-unit (3.8, _mixins)' - 'showcase-unit (3.9, _mixins)' - 'showcase-unit (3.10, _mixins)' - 'showcase-unit (3.11, _mixins)' - 'showcase-unit (3.12, _mixins)' + - 'showcase-unit (3.13, _mixins)' - 'showcase-unit (3.7, _w_rest_async)' - 'showcase-unit (3.8, _w_rest_async)' - 'showcase-unit (3.9, _w_rest_async)' - 'showcase-unit (3.10, _w_rest_async)' - 'showcase-unit (3.11, _w_rest_async)' - 'showcase-unit (3.12, _w_rest_async)' + - 'showcase-unit (3.13, _w_rest_async)' - 'showcase-unit-add-iam-methods' - 'integration' - 'goldens-lint' @@ -67,18 +73,21 @@ branchProtectionRules: - 'unit (3.10)' - 'unit (3.11)' - 'unit (3.12)' + - 'unit (3.13)' - 'fragment (3.7)' - 'fragment (3.8)' - 'fragment (3.9)' - 'fragment (3.10)' - 'fragment (3.11)' - 'fragment (3.12)' + - 'fragment (3.13)' - 'fragment (3.7, _alternative_templates)' - 'fragment (3.8, _alternative_templates)' - 'fragment (3.9, _alternative_templates)' - 'fragment (3.10, _alternative_templates)' - 'fragment (3.11, _alternative_templates)' - 'fragment (3.12, _alternative_templates)' + - 'fragment (3.13, _alternative_templates)' - 'OwlBot Post Processor' requiredApprovingReviewCount: 1 requiresCodeOwnerReviews: true diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index dbaed7f9a53a..3328d87b285c 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -38,7 +38,7 @@ jobs: matrix: # Run mypy on all of the supported python versions listed in setup.py # https://github.com/python/mypy/blob/master/setup.py - python: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -56,7 +56,7 @@ jobs: # Run showcase tests on the lowest and highest supported runtimes matrix: # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `showcase_w_rest_async` target when async rest is GA. - python: ["3.7", "3.12"] + python: ["3.7", "3.13"] target: [showcase, showcase_alternative_templates, showcase_w_rest_async] runs-on: ubuntu-latest steps: @@ -104,10 +104,10 @@ jobs: run: | sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - - name: Set up Python "3.12" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Copy mtls files run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ @@ -140,7 +140,7 @@ jobs: showcase-unit: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `_w_rest_async` variant when async rest is GA. variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins, _w_rest_async] runs-on: ubuntu-latest @@ -171,10 +171,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.12" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install system dependencies. run: | @@ -199,10 +199,10 @@ jobs: variant: ['', _alternative_templates] steps: - uses: actions/checkout@v4 - - name: Set up Python "3.12" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install system dependencies. run: | @@ -224,10 +224,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.12" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install system dependencies. run: | @@ -240,7 +240,7 @@ jobs: unit: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -261,7 +261,7 @@ jobs: fragment: strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] variant: ['', _alternative_templates] runs-on: ubuntu-latest steps: @@ -313,29 +313,29 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.13 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install nox. run: | python -m pip install nox - name: Run blacken and lint on the generated output. run: | - nox -f tests/integration/goldens/asset/noxfile.py -s mypy-3.12 blacken lint - nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-3.12 blacken lint - nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.12 blacken lint - nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.12 blacken lint - nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.12 blacken lint + nox -f tests/integration/goldens/asset/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.13 blacken lint goldens-unit: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.13 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install nox. run: | @@ -345,18 +345,18 @@ jobs: # in order to run unit tests # See https://github.com/googleapis/gapic-generator-python/issues/1806 run: | - nox -f tests/integration/goldens/credentials/noxfile.py -s unit-3.12 - nox -f tests/integration/goldens/eventarc/noxfile.py -s unit-3.12 - nox -f tests/integration/goldens/logging/noxfile.py -s unit-3.12 - nox -f tests/integration/goldens/redis/noxfile.py -s unit-3.12 + nox -f tests/integration/goldens/credentials/noxfile.py -s unit-3.13 + nox -f tests/integration/goldens/eventarc/noxfile.py -s unit-3.13 + nox -f tests/integration/goldens/logging/noxfile.py -s unit-3.13 + nox -f tests/integration/goldens/redis/noxfile.py -s unit-3.13 goldens-prerelease: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python 3.12 + - name: Set up Python 3.13 uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" cache: 'pip' - name: Install nox. run: | @@ -374,12 +374,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set up Python "3.11" + - name: Set up Python "3.13" uses: actions/setup-python@v5 with: - # Do not upgrade this check to python 3.12 until - # https://github.com/hhatto/autopep8/issues/712 is fixed - python-version: "3.11" + python-version: "3.13" cache: 'pip' - name: Install autopep8 run: | diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index f10310fdfbba..9a2836cb8f27 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -14,6 +14,7 @@ ALL_PYTHON = [ "3.10", "3.11", "3.12", + "3.13", ] @nox.session(python=ALL_PYTHON) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 8bc504f57a88..be5e116888ba 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -72,6 +72,7 @@ setuptools.setup( "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index ebc70936a292..b583e5cbb251 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -1088,7 +1088,7 @@ def _fill_sample_metadata(sample: dict, api_schema: api.API): if not method.void: snippet_metadata.client_method.result_type = method.client_output_async.ident.sphinx if async_ else method.client_output.ident.sphinx if method.server_streaming: - snippet_metadata.client_method.result_type = f"Iterable[{snippet_metadata.client_method.result_type }]" + snippet_metadata.client_method.result_type = f"Iterable[{snippet_metadata.client_method.result_type}]" # Client Method Parameters parameters = snippet_metadata.client_method.parameters diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 5a6042e981b8..18505d5434c7 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -18,7 +18,8 @@ ALL_PYTHON = [ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -28,7 +29,7 @@ PACKAGE_NAME = '{{ api.naming.warehouse_package_name }}' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -50,7 +51,7 @@ nox.sessions = [ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -84,7 +85,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -119,7 +120,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index a466758ae624..7ebab02cbfe5 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -37,6 +37,7 @@ dependencies = [ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {% for package_tuple, package_info in pypi_packages.items() %} @@ -91,6 +92,7 @@ setuptools.setup( "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 new file mode 100644 index 000000000000..615c99518a63 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 @@ -0,0 +1,4 @@ +# -*- coding: utf-8 -*- +{% block constraints %} +{% include "testing/_default_constraints.j2" %} +{% endblock %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 2387ea130290..69af653873f9 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -40,6 +40,7 @@ "3.10", "3.11", "3.12", + "3.13", ) NEWEST_PYTHON = ALL_PYTHON[-1] diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 9e3a638059b4..b539eafd01cc 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -76,6 +76,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Libraries :: Python Modules", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 3e4f7c4fe142..2ca01db6293c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 406380650fee..db3e65c6ea31 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -44,6 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", "google-cloud-os-config >= 1.0.0, <2.0.0dev", @@ -86,6 +87,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt new file mode 100755 index 000000000000..70744e58974a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +google-cloud-access-context-manager +google-cloud-os-config +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 2194befe9c8b..3b9e7366bb9f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index f9ceb4c0f2b6..36e57a170563 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -44,6 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { @@ -83,6 +84,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 61f7c5c43dbe..685faed09bd5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 78e888f6a316..b54372e5e2d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -44,6 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] @@ -84,6 +85,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt new file mode 100755 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 6402e148ffc6..8d723fa93820 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 2d9c19f37c00..69fe4db623ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -44,6 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { @@ -83,6 +84,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 513250d8fb0c..755a3329f3c1 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -29,7 +29,8 @@ "3.9", "3.10", "3.11", - "3.12" + "3.12", + "3.13", ] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -39,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.12" +DEFAULT_PYTHON_VERSION = "3.13" nox.sessions = [ "unit", @@ -61,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -94,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -129,7 +130,8 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - "grpcio", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 760f590ed18b..220725b65cc0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -44,6 +44,7 @@ # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { @@ -87,6 +88,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf From 588b9155371b6b3028afc85c1d0a9dbc2c1fe425 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 11:54:55 -0400 Subject: [PATCH 1208/1339] chore(main): release 1.20.0 (#2227) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 4af1c354921d..81ca0b8d0140 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.20.0](https://github.com/googleapis/gapic-generator-python/compare/v1.19.1...v1.20.0) (2024-10-23) + + +### Features + +* Add support for Python 3.13 ([#2215](https://github.com/googleapis/gapic-generator-python/issues/2215)) ([4e1f9c6](https://github.com/googleapis/gapic-generator-python/commit/4e1f9c623065e5917dbd1d2178228776b7ea536d)) + + +### Bug Fixes + +* Added underscores in services/types in index.rst.j2 ([#2232](https://github.com/googleapis/gapic-generator-python/issues/2232)) ([f2053ee](https://github.com/googleapis/gapic-generator-python/commit/f2053ee04127f1f0d23fd04438ee4607ee1ce76c)) +* Allow `google-cloud-kms` 3.x ([#2226](https://github.com/googleapis/gapic-generator-python/issues/2226)) ([5e07501](https://github.com/googleapis/gapic-generator-python/commit/5e075016a2119782e611cd51335fa0af7e4c18c2)) + ## [1.19.1](https://github.com/googleapis/gapic-generator-python/compare/v1.19.0...v1.19.1) (2024-10-10) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index b539eafd01cc..36927be00664 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.19.1" +version = "1.20.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 265858d2ec282aa25388283fe07b463ef0f3735f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 25 Oct 2024 16:54:36 -0400 Subject: [PATCH 1209/1339] fix: allow google-cloud-documentai 3.x (#2237) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index aefbe76b8506..de43f16f5b95 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -11,7 +11,7 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0dev"}, ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, - ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "3.0.0dev"}, + ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "4.0.0dev"}, ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "4.0.0dev"}, ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0dev"}, ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"}, From acf866f8da0711a0947c4777ebd91ceecae6a0a4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 09:42:40 -0400 Subject: [PATCH 1210/1339] chore(main): release 1.20.1 (#2238) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 81ca0b8d0140..bc86df8872f5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.20.1](https://github.com/googleapis/gapic-generator-python/compare/v1.20.0...v1.20.1) (2024-10-25) + + +### Bug Fixes + +* Allow google-cloud-documentai 3.x ([#2237](https://github.com/googleapis/gapic-generator-python/issues/2237)) ([946adf1](https://github.com/googleapis/gapic-generator-python/commit/946adf16d8a1cf83019eaa9b6a9e8b1baf95159d)) + ## [1.20.0](https://github.com/googleapis/gapic-generator-python/compare/v1.19.1...v1.20.0) (2024-10-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 36927be00664..7a1128bac4c9 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.20.0" +version = "1.20.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 7ffa74ee6886201c6f55fa4583ecd2a99767b607 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 29 Oct 2024 10:23:48 -0400 Subject: [PATCH 1211/1339] build: release script update (#2235) Co-authored-by: Owl Bot Co-authored-by: ohmayr --- packages/gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/docs/common.cfg | 6 +++--- packages/gapic-generator/.kokoro/release.sh | 2 +- packages/gapic-generator/.kokoro/release/common.cfg | 8 +------- 4 files changed, 7 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 06ea02039ef9..13fc69ce9fc9 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:68ce7dace6a9481b4c94d73308572f20766031dc27e535f2afb8a84cd60feb44 -# created: 2024-10-10 + digest: sha256:5efdf8d38e5a22c1ec9e5541cbdfde56399bdffcb6f531183f84ac66052a8024 +# created: 2024-10-25 diff --git a/packages/gapic-generator/.kokoro/docs/common.cfg b/packages/gapic-generator/.kokoro/docs/common.cfg index dcab742e0888..d3e42b1bb714 100644 --- a/packages/gapic-generator/.kokoro/docs/common.cfg +++ b/packages/gapic-generator/.kokoro/docs/common.cfg @@ -30,9 +30,9 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - # Push non-cloud library docs to `docs-staging-v2-staging` instead of the + # Push non-cloud library docs to `docs-staging-v2-dev` instead of the # Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2-staging" + value: "docs-staging-v2-dev" } # It will upload the docker image after successful builds. @@ -64,4 +64,4 @@ before_action { keyname: "docuploader_service_account" } } -} \ No newline at end of file +} diff --git a/packages/gapic-generator/.kokoro/release.sh b/packages/gapic-generator/.kokoro/release.sh index 9672d6135b06..1d09c988b454 100755 --- a/packages/gapic-generator/.kokoro/release.sh +++ b/packages/gapic-generator/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") cd github/gapic-generator-python python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/gapic-generator/.kokoro/release/common.cfg b/packages/gapic-generator/.kokoro/release/common.cfg index d23792661793..68cddd5f08da 100644 --- a/packages/gapic-generator/.kokoro/release/common.cfg +++ b/packages/gapic-generator/.kokoro/release/common.cfg @@ -28,17 +28,11 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-2" + keyname: "google-cloud-pypi-token-keystore-3" } } } -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - # Store the packages we uploaded to PyPI. That way, we have a record of exactly # what we published, which we can use to generate SBOMs and attestations. action { From 0fdc38ef61c380e9c94d61b6dc5759d5ac9da0d9 Mon Sep 17 00:00:00 2001 From: Timur Sadykov Date: Wed, 30 Oct 2024 12:05:44 -0700 Subject: [PATCH 1212/1339] fix: disable universe-domain validation (#2236) Co-authored-by: Anthonios Partheniou Co-authored-by: ohmayr --- .../%sub/services/%service/client.py.j2 | 33 +--------- .../%name_%version/%sub/test_%service.py.j2 | 65 ------------------- .../asset_v1/services/asset_service/client.py | 33 +--------- .../unit/gapic/asset_v1/test_asset_service.py | 61 ----------------- .../services/iam_credentials/client.py | 33 +--------- .../credentials_v1/test_iam_credentials.py | 61 ----------------- .../eventarc_v1/services/eventarc/client.py | 33 +--------- .../unit/gapic/eventarc_v1/test_eventarc.py | 61 ----------------- .../services/config_service_v2/client.py | 33 +--------- .../services/logging_service_v2/client.py | 33 +--------- .../services/metrics_service_v2/client.py | 33 +--------- .../logging_v2/test_config_service_v2.py | 60 ----------------- .../logging_v2/test_logging_service_v2.py | 60 ----------------- .../logging_v2/test_metrics_service_v2.py | 60 ----------------- .../redis_v1/services/cloud_redis/client.py | 33 +--------- .../unit/gapic/redis_v1/test_cloud_redis.py | 61 ----------------- .../tests/system/test_universe_domain.py | 14 ++-- 17 files changed, 32 insertions(+), 735 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 74803f51f30b..4bb43f83673c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -400,33 +400,6 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): if len(universe_domain.strip()) == 0: raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = {{ service.client_name }}._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -437,9 +410,9 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - {{ service.client_name }}._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 17603ca6d6e2..b9301094d703 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -268,71 +268,6 @@ def test__get_universe_domain(): {{ service.client_name }}._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - {% if 'grpc' in opts.transport %} - ({{ service.client_name }}, transports.{{ service.grpc_transport_name }}, "grpc"), - {% endif %} - {% if 'rest' in opts.transport %} - ({{ service.client_name }}, transports.{{ service.rest_transport_name }}, "rest"), - {% endif %} -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 8b3fc48e5b68..430968420fd1 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -460,33 +460,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = AssetServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -496,9 +469,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - AssetServiceClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index fcf9a01fbc1b..08a92bf22270 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -192,67 +192,6 @@ def test__get_universe_domain(): AssetServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (AssetServiceClient, transports.AssetServiceGrpcTransport, "grpc"), - (AssetServiceClient, transports.AssetServiceRestTransport, "rest"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ (AssetServiceClient, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index b9597ea22664..ecdde65b21c8 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -397,33 +397,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = IAMCredentialsClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -433,9 +406,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - IAMCredentialsClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 94f791935d71..c26abcd41291 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -182,67 +182,6 @@ def test__get_universe_domain(): IAMCredentialsClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (IAMCredentialsClient, transports.IAMCredentialsGrpcTransport, "grpc"), - (IAMCredentialsClient, transports.IAMCredentialsRestTransport, "rest"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ (IAMCredentialsClient, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 64d7613c6e75..70a85278eeb2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -505,33 +505,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = EventarcClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -541,9 +514,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - EventarcClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index fac54c7318cc..ee6b68ab79b3 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -202,67 +202,6 @@ def test__get_universe_domain(): EventarcClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (EventarcClient, transports.EventarcGrpcTransport, "grpc"), - (EventarcClient, transports.EventarcRestTransport, "rest"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ (EventarcClient, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 67b84604b9d5..b42577271de7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -456,33 +456,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = ConfigServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -492,9 +465,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - ConfigServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 7cfbc3b0babd..1f1ebda6b8bb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -387,33 +387,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -423,9 +396,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - LoggingServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 2680b5970767..72ad42b1f217 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -388,33 +388,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = MetricsServiceV2Client._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -424,9 +397,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - MetricsServiceV2Client._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 9afec2b18972..ed05e4954041 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -182,66 +182,6 @@ def test__get_universe_domain(): ConfigServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index e68ade7f1e0e..88d4fffd09fd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -183,66 +183,6 @@ def test__get_universe_domain(): LoggingServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 6bcf51b2f5ce..71e91aa79234 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -181,66 +181,6 @@ def test__get_universe_domain(): MetricsServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 046c051a3837..80d28b6d9a57 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -424,33 +424,6 @@ def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_ raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes(client_universe: str, - credentials: ga_credentials.Credentials) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = CloudRedisClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError("The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default.") - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -460,9 +433,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = (self._is_universe_domain_valid or - CloudRedisClient._compare_universes(self.universe_domain, self.transport._credentials)) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 920b97dfca36..3b867230b761 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -200,67 +200,6 @@ def test__get_universe_domain(): CloudRedisClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), - (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), -]) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - transport=transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [int(part) for part in google.auth.__version__.split(".")[0:2]] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class( - transport=transport_class(credentials=credentials) - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [int(part) for part in api_core_version.__version__.split(".")[0:2]] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class(client_options={"universe_domain": "bar.com"}, transport=transport_class(credentials=ga_credentials.AnonymousCredentials(),)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert str(excinfo.value) == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - @pytest.mark.parametrize("client_class,transport_name", [ (CloudRedisClient, "grpc"), diff --git a/packages/gapic-generator/tests/system/test_universe_domain.py b/packages/gapic-generator/tests/system/test_universe_domain.py index 55cce87ca3ba..df8c1973054e 100644 --- a/packages/gapic-generator/tests/system/test_universe_domain.py +++ b/packages/gapic-generator/tests/system/test_universe_domain.py @@ -84,9 +84,11 @@ def test_universe_domain_validation_fail(parametrized_echo, channel_creator, tra assert parametrized_echo.api_endpoint == "localhost:7469" else: assert parametrized_echo.api_endpoint == transport_endpoint - with pytest.raises(ValueError) as err: - parametrized_echo.echo({ - 'content': 'Universe validation failed!' - }) - assert str( - err.value) == f"The configured universe domain ({client_universe}) does not match the universe domain found in the credentials ({credential_universe}). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + + # NOTE (b/349488459): universe validation is disabled until further notice. + # with pytest.raises(ValueError) as err: + # parametrized_echo.echo({ + # 'content': 'Universe validation failed!' + # }) + # assert str( + # err.value) == f"The configured universe domain ({client_universe}) does not match the universe domain found in the credentials ({credential_universe}). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." From 7c22dcef0574abba0df547d6aebccfc419e9a562 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 30 Oct 2024 15:28:20 -0400 Subject: [PATCH 1213/1339] chore(main): release 1.20.2 (#2240) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index bc86df8872f5..10340c68405a 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.20.2](https://github.com/googleapis/gapic-generator-python/compare/v1.20.1...v1.20.2) (2024-10-30) + + +### Bug Fixes + +* Disable universe-domain validation ([#2236](https://github.com/googleapis/gapic-generator-python/issues/2236)) ([ecaa41e](https://github.com/googleapis/gapic-generator-python/commit/ecaa41e7984a8aa2244138cce99cb91a87872c54)) + ## [1.20.1](https://github.com/googleapis/gapic-generator-python/compare/v1.20.0...v1.20.1) (2024-10-25) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 7a1128bac4c9..0a7a1ee290f2 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.20.1" +version = "1.20.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From b2ed385b72c33152b539a985727a600a008b9972 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Nov 2024 14:47:22 -0500 Subject: [PATCH 1214/1339] chore: update dependencies (#2248) --- packages/gapic-generator/requirements.txt | 822 +++++++++++----------- 1 file changed, 407 insertions(+), 415 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 5003841aceea..f0c8f1deb65f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,106 +8,91 @@ aiohappyeyeballs==2.4.3 \ --hash=sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586 \ --hash=sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572 # via aiohttp -aiohttp==3.10.9 \ - --hash=sha256:02d1d6610588bcd743fae827bd6f2e47e0d09b346f230824b4c6fb85c6065f9c \ - --hash=sha256:03690541e4cc866eef79626cfa1ef4dd729c5c1408600c8cb9e12e1137eed6ab \ - --hash=sha256:0bc059ecbce835630e635879f5f480a742e130d9821fbe3d2f76610a6698ee25 \ - --hash=sha256:0c21c82df33b264216abffff9f8370f303dab65d8eee3767efbbd2734363f677 \ - --hash=sha256:1298b854fd31d0567cbb916091be9d3278168064fca88e70b8468875ef9ff7e7 \ - --hash=sha256:1321658f12b6caffafdc35cfba6c882cb014af86bef4e78c125e7e794dfb927b \ - --hash=sha256:143b0026a9dab07a05ad2dd9e46aa859bffdd6348ddc5967b42161168c24f857 \ - --hash=sha256:16e6a51d8bc96b77f04a6764b4ad03eeef43baa32014fce71e882bd71302c7e4 \ - --hash=sha256:172ad884bb61ad31ed7beed8be776eb17e7fb423f1c1be836d5cb357a096bf12 \ - --hash=sha256:17c272cfe7b07a5bb0c6ad3f234e0c336fb53f3bf17840f66bd77b5815ab3d16 \ - --hash=sha256:1a0ee6c0d590c917f1b9629371fce5f3d3f22c317aa96fbdcce3260754d7ea21 \ - --hash=sha256:2746d8994ebca1bdc55a1e998feff4e94222da709623bb18f6e5cfec8ec01baf \ - --hash=sha256:2914caa46054f3b5ff910468d686742ff8cff54b8a67319d75f5d5945fd0a13d \ - --hash=sha256:2bbf94d4a0447705b7775417ca8bb8086cc5482023a6e17cdc8f96d0b1b5aba6 \ - --hash=sha256:2bd9f3eac515c16c4360a6a00c38119333901b8590fe93c3257a9b536026594d \ - --hash=sha256:2c33fa6e10bb7ed262e3ff03cc69d52869514f16558db0626a7c5c61dde3c29f \ - --hash=sha256:2d37f4718002863b82c6f391c8efd4d3a817da37030a29e2682a94d2716209de \ - --hash=sha256:3668d0c2a4d23fb136a753eba42caa2c0abbd3d9c5c87ee150a716a16c6deec1 \ - --hash=sha256:36d4fba838be5f083f5490ddd281813b44d69685db910907636bc5dca6322316 \ - --hash=sha256:40ff5b7660f903dc587ed36ef08a88d46840182d9d4b5694e7607877ced698a1 \ - --hash=sha256:42775de0ca04f90c10c5c46291535ec08e9bcc4756f1b48f02a0657febe89b10 \ - --hash=sha256:482c85cf3d429844396d939b22bc2a03849cb9ad33344689ad1c85697bcba33a \ - --hash=sha256:4e6cb75f8ddd9c2132d00bc03c9716add57f4beff1263463724f6398b813e7eb \ - --hash=sha256:4edc3fd701e2b9a0d605a7b23d3de4ad23137d23fc0dbab726aa71d92f11aaaf \ - --hash=sha256:4fd16b30567c5b8e167923be6e027eeae0f20cf2b8a26b98a25115f28ad48ee0 \ - --hash=sha256:5002a02c17fcfd796d20bac719981d2fca9c006aac0797eb8f430a58e9d12431 \ - --hash=sha256:51d0a4901b27272ae54e42067bc4b9a90e619a690b4dc43ea5950eb3070afc32 \ - --hash=sha256:558b3d223fd631ad134d89adea876e7fdb4c93c849ef195049c063ada82b7d08 \ - --hash=sha256:5c070430fda1a550a1c3a4c2d7281d3b8cfc0c6715f616e40e3332201a253067 \ - --hash=sha256:5f392ef50e22c31fa49b5a46af7f983fa3f118f3eccb8522063bee8bfa6755f8 \ - --hash=sha256:60555211a006d26e1a389222e3fab8cd379f28e0fbf7472ee55b16c6c529e3a6 \ - --hash=sha256:608cecd8d58d285bfd52dbca5b6251ca8d6ea567022c8a0eaae03c2589cd9af9 \ - --hash=sha256:60ad5b8a7452c0f5645c73d4dad7490afd6119d453d302cd5b72b678a85d6044 \ - --hash=sha256:63649309da83277f06a15bbdc2a54fbe75efb92caa2c25bb57ca37762789c746 \ - --hash=sha256:6ebdc3b3714afe1b134b3bbeb5f745eed3ecbcff92ab25d80e4ef299e83a5465 \ - --hash=sha256:6f3c6648aa123bcd73d6f26607d59967b607b0da8ffcc27d418a4b59f4c98c7c \ - --hash=sha256:7003f33f5f7da1eb02f0446b0f8d2ccf57d253ca6c2e7a5732d25889da82b517 \ - --hash=sha256:776e9f3c9b377fcf097c4a04b241b15691e6662d850168642ff976780609303c \ - --hash=sha256:85711eec2d875cd88c7eb40e734c4ca6d9ae477d6f26bd2b5bb4f7f60e41b156 \ - --hash=sha256:87d1e4185c5d7187684d41ebb50c9aeaaaa06ca1875f4c57593071b0409d2444 \ - --hash=sha256:8a3f063b41cc06e8d0b3fcbbfc9c05b7420f41287e0cd4f75ce0a1f3d80729e6 \ - --hash=sha256:8b3fb28a9ac8f2558760d8e637dbf27aef1e8b7f1d221e8669a1074d1a266bb2 \ - --hash=sha256:8bd9125dd0cc8ebd84bff2be64b10fdba7dc6fd7be431b5eaf67723557de3a31 \ - --hash=sha256:8be1a65487bdfc285bd5e9baf3208c2132ca92a9b4020e9f27df1b16fab998a9 \ - --hash=sha256:8cc0d13b4e3b1362d424ce3f4e8c79e1f7247a00d792823ffd640878abf28e56 \ - --hash=sha256:8d9d10d10ec27c0d46ddaecc3c5598c4db9ce4e6398ca872cdde0525765caa2f \ - --hash=sha256:8debb45545ad95b58cc16c3c1cc19ad82cffcb106db12b437885dbee265f0ab5 \ - --hash=sha256:91aa966858593f64c8a65cdefa3d6dc8fe3c2768b159da84c1ddbbb2c01ab4ef \ - --hash=sha256:9331dd34145ff105177855017920dde140b447049cd62bb589de320fd6ddd582 \ - --hash=sha256:99f9678bf0e2b1b695e8028fedac24ab6770937932eda695815d5a6618c37e04 \ - --hash=sha256:9fdf5c839bf95fc67be5794c780419edb0dbef776edcfc6c2e5e2ffd5ee755fa \ - --hash=sha256:a14e4b672c257a6b94fe934ee62666bacbc8e45b7876f9dd9502d0f0fe69db16 \ - --hash=sha256:a19caae0d670771ea7854ca30df76f676eb47e0fd9b2ee4392d44708f272122d \ - --hash=sha256:a35ed3d03910785f7d9d6f5381f0c24002b2b888b298e6f941b2fc94c5055fcd \ - --hash=sha256:a61df62966ce6507aafab24e124e0c3a1cfbe23c59732987fc0fd0d71daa0b88 \ - --hash=sha256:a6e00c8a92e7663ed2be6fcc08a2997ff06ce73c8080cd0df10cc0321a3168d7 \ - --hash=sha256:ac3196952c673822ebed8871cf8802e17254fff2a2ed4835d9c045d9b88c5ec7 \ - --hash=sha256:ac74e794e3aee92ae8f571bfeaa103a141e409863a100ab63a253b1c53b707eb \ - --hash=sha256:ad3675c126f2a95bde637d162f8231cff6bc0bc9fbe31bd78075f9ff7921e322 \ - --hash=sha256:aeebd3061f6f1747c011e1d0b0b5f04f9f54ad1a2ca183e687e7277bef2e0da2 \ - --hash=sha256:ba1a599255ad6a41022e261e31bc2f6f9355a419575b391f9655c4d9e5df5ff5 \ - --hash=sha256:bbdb8def5268f3f9cd753a265756f49228a20ed14a480d151df727808b4531dd \ - --hash=sha256:c2555e4949c8d8782f18ef20e9d39730d2656e218a6f1a21a4c4c0b56546a02e \ - --hash=sha256:c2695c61cf53a5d4345a43d689f37fc0f6d3a2dc520660aec27ec0f06288d1f9 \ - --hash=sha256:c2b627d3c8982691b06d89d31093cee158c30629fdfebe705a91814d49b554f8 \ - --hash=sha256:c46131c6112b534b178d4e002abe450a0a29840b61413ac25243f1291613806a \ - --hash=sha256:c54dc329cd44f7f7883a9f4baaefe686e8b9662e2c6c184ea15cceee587d8d69 \ - --hash=sha256:c7d7cafc11d70fdd8801abfc2ff276744ae4cb39d8060b6b542c7e44e5f2cfc2 \ - --hash=sha256:cb0b2d5d51f96b6cc19e6ab46a7b684be23240426ae951dcdac9639ab111b45e \ - --hash=sha256:d15a29424e96fad56dc2f3abed10a89c50c099f97d2416520c7a543e8fddf066 \ - --hash=sha256:d1f5c9169e26db6a61276008582d945405b8316aae2bb198220466e68114a0f5 \ - --hash=sha256:d271f770b52e32236d945911b2082f9318e90ff835d45224fa9e28374303f729 \ - --hash=sha256:d646fdd74c25bbdd4a055414f0fe32896c400f38ffbdfc78c68e62812a9e0257 \ - --hash=sha256:d6e395c3d1f773cf0651cd3559e25182eb0c03a2777b53b4575d8adc1149c6e9 \ - --hash=sha256:d7c071235a47d407b0e93aa6262b49422dbe48d7d8566e1158fecc91043dd948 \ - --hash=sha256:d97273a52d7f89a75b11ec386f786d3da7723d7efae3034b4dda79f6f093edc1 \ - --hash=sha256:dcf354661f54e6a49193d0b5653a1b011ba856e0b7a76bda2c33e4c6892f34ea \ - --hash=sha256:e3e7fabedb3fe06933f47f1538df7b3a8d78e13d7167195f51ca47ee12690373 \ - --hash=sha256:e525b69ee8a92c146ae5b4da9ecd15e518df4d40003b01b454ad694a27f498b5 \ - --hash=sha256:e709d6ac598c5416f879bb1bae3fd751366120ac3fa235a01de763537385d036 \ - --hash=sha256:e83dfefb4f7d285c2d6a07a22268344a97d61579b3e0dce482a5be0251d672ab \ - --hash=sha256:e86260b76786c28acf0b5fe31c8dca4c2add95098c709b11e8c35b424ebd4f5b \ - --hash=sha256:e883b61b75ca6efc2541fcd52a5c8ccfe288b24d97e20ac08fdf343b8ac672ea \ - --hash=sha256:f0a44bb40b6aaa4fb9a5c1ee07880570ecda2065433a96ccff409c9c20c1624a \ - --hash=sha256:f82ace0ec57c94aaf5b0e118d4366cff5889097412c75aa14b4fd5fc0c44ee3e \ - --hash=sha256:f9ca09414003c0e96a735daa1f071f7d7ed06962ef4fa29ceb6c80d06696d900 \ - --hash=sha256:fa430b871220dc62572cef9c69b41e0d70fcb9d486a4a207a5de4c1f25d82593 \ - --hash=sha256:fc262c3df78c8ff6020c782d9ce02e4bcffe4900ad71c0ecdad59943cba54442 \ - --hash=sha256:fcd546782d03181b0b1d20b43d612429a90a68779659ba8045114b867971ab71 \ - --hash=sha256:fd4ceeae2fb8cabdd1b71c82bfdd39662473d3433ec95b962200e9e752fb70d0 \ - --hash=sha256:fec5fac7aea6c060f317f07494961236434928e6f4374e170ef50b3001e14581 +aiohttp==3.11.0 \ + --hash=sha256:024409c1b1d6076d0ed933dcebd7e4fc6f3320a227bfa0c1b6b93a8b5a146f04 \ + --hash=sha256:04b24497b3baf15035730de5f207ade88a67d4483a5f16ced7ece348933a5b47 \ + --hash=sha256:08474e71772a516ba2e2167b4707af8361d2c452b3d8a5364c984f4867869499 \ + --hash=sha256:0e7a0762cc29cd3acd01a4d2b547b3af7956ad230ebb80b529a8e4f3e4740fe8 \ + --hash=sha256:104deb7873681273c5daa13c41924693df394043a118dae90387d35bc5531788 \ + --hash=sha256:104ea21994b1403e4c1b398866f1187c1694fa291314ad7216ec1d8ec6b49f38 \ + --hash=sha256:113bf06b029143e94a47c4f36e11a8b7e396e9d1f1fc8cea58e6b7e370cfed38 \ + --hash=sha256:12071dd2cc95ba81e0f2737bebcb98b2a8656015e87772e84e8fb9e635b5da6e \ + --hash=sha256:170fb2324826bb9f08055a8291f42192ae5ee2f25b2966c8f0f4537c61d73a7b \ + --hash=sha256:21b4545e8d96870da9652930c5198366605ff8f982757030e2148cf341e5746b \ + --hash=sha256:229ae13959a5f499d90ffbb4b9eac2255d8599315027d6f7c22fa9803a94d5b1 \ + --hash=sha256:2ec5efbc872b00ddd85e3904059d274f284cff314e13f48776050ca2c58f451d \ + --hash=sha256:31b91ff3a1fcb206a1fa76e0de1f08c9ffb1dc0deb7296fa2618adfe380fc676 \ + --hash=sha256:329f5059e0bf6983dceebac8e6ed20e75eaff6163b3414f4a4cb59e0d7037672 \ + --hash=sha256:37f8cf3c43f292d9bb3e6760476c2b55b9663a581fad682a586a410c43a7683e \ + --hash=sha256:3e1ed8d152cccceffb1ee7a2ac227c16372e453fb11b3aeaa56783049b85d3f6 \ + --hash=sha256:3ed360d6672a9423aad39902a4e9fe305464d20ed7931dbdba30a4625782d875 \ + --hash=sha256:40dc9446cff326672fcbf93efdb8ef7e949824de1097624efe4f61ac7f0d2c43 \ + --hash=sha256:4d218d3eca40196384ad3b481309c56fd60e664128885d1734da0a8aa530d433 \ + --hash=sha256:4e4e155968040e32c124a89852a1a5426d0e920a35f4331e1b3949037bfe93a3 \ + --hash=sha256:4f698aa61879df64425191d41213dfd99efdc1627e6398e6d7aa5c312fac9702 \ + --hash=sha256:508cfcc99534b1282595357592d8367b44392b21f6eb5d4dc021f8d0d809e94d \ + --hash=sha256:577c7429f8869fa30186fc2c9eee64d75a30b51b61f26aac9725866ae5985cfd \ + --hash=sha256:57e17c6d71f2dc857a8a1d09be1be7802e35d90fb4ba4b06cf1aab6414a57894 \ + --hash=sha256:5ecc2fb1a0a9d48cf773add34196cddf7e488e48e9596e090849751bf43098f4 \ + --hash=sha256:600b1d9f86a130131915e2f2127664311b33902c486b21a747d626f5144b4471 \ + --hash=sha256:62502b8ffee8c6a4b5c6bf99d1de277d42bf51b2fb713975d9b63b560150b7ac \ + --hash=sha256:62a2f5268b672087c45b33479ba1bb1d5a48c6d76c133cfce3a4f77410c200d1 \ + --hash=sha256:6362f50a6f0e5482c4330d2151cb682779230683da0e155c15ec9fc58cb50b6a \ + --hash=sha256:6533dd06df3d17d1756829b68b365b1583929b54082db8f65083a4184bf68322 \ + --hash=sha256:6c5a6958f4366496004cf503d847093d464814543f157ef3b738bbf604232415 \ + --hash=sha256:72cd984f7f14e8c01b3e38f18f39ea85dba84e52ea05e37116ba5e2a72eef396 \ + --hash=sha256:76d6ee8bb132f8ee0fcb0e205b4708ddb6fba524eb515ee168113063d825131b \ + --hash=sha256:7867d0808614f04e78e0a8d5a2c1f8ac6bc626a0c0e2f62be48be6b749e2f8b2 \ + --hash=sha256:7d664e5f937c08adb7908ea9f391fbf2928a9b09cb412ac0aba602bde9e499e4 \ + --hash=sha256:85ae6f182be72c3531915e90625cc65afce4df8a0fc4988bd52d8a5d5faaeb68 \ + --hash=sha256:89a96a0696dc67d548f69cb518c581a7a33cc1f26ab42229dea1709217c9d926 \ + --hash=sha256:8b323b5d3aef7dd811424c269322eec58a977c0c8152e650159e47210d900504 \ + --hash=sha256:8c47a0ba6c2b3d3e5715f8338d657badd21f778c6be16701922c65521c5ecfc9 \ + --hash=sha256:8fef105113d56e817cb9bcc609667ee461321413a7b972b03f5b4939f40f307c \ + --hash=sha256:900ff74d78eb580ae4aa5883242893b123a0c442a46570902500f08d6a7e6696 \ + --hash=sha256:9095580806d9ed07c0c29b23364a0b1fb78258ef9f4bddf7e55bac0e475d4edf \ + --hash=sha256:91d3991fad8b65e5dbc13cd95669ea689fe0a96ff63e4e64ac24ed724e4f8103 \ + --hash=sha256:9231d610754724273a6ac05a1f177979490bfa6f84d49646df3928af2e88cfd5 \ + --hash=sha256:97056d3422594e0787733ac4c45bef58722d452f4dc6615fee42f59fe51707dd \ + --hash=sha256:a896059b6937d1a22d8ee8377cdcd097bd26cd8c653b8f972051488b9baadee9 \ + --hash=sha256:aabc4e92cb153636d6be54e84dad1b252ddb9aebe077942b6dcffe5e468d476a \ + --hash=sha256:ad14cdc0fba4df31c0f6e06c21928c5b924725cbf60d0ccc5f6e7132636250e9 \ + --hash=sha256:ae36ae52b0c22fb69fb8b744eff82a20db512a29eafc6e3a4ab43b17215b219d \ + --hash=sha256:b3e4fb7f5354d39490d8209aefdf5830b208d01c7293a2164e404312c3d8bc55 \ + --hash=sha256:b40c304ab01e89ad0aeeecf91bbaa6ae3b00e27b796c9e8d50b71a4a7e885cc8 \ + --hash=sha256:b7349205bb163318dcc102329d30be59a647a3d24c82c3d91ed35b7e7301ea7e \ + --hash=sha256:b8b95a63a8e8b5f0464bd8b1b0d59d2bec98a59b6aacc71e9be23df6989b3dfb \ + --hash=sha256:bb2e82e515e268b965424ecabebd91834a41b36260b6ef5db015ee12ddb28ef3 \ + --hash=sha256:c0315978b2a4569e03fb59100f6a7e7d23f718a4521491f5c13d946d37549f3d \ + --hash=sha256:c1828e10c3a49e2b234b87600ecb68a92b8a8dcf8b99bca9447f16c4baaa1630 \ + --hash=sha256:c1c49bc393d854d4421ebc174a0a41f9261f50d3694d8ca277146cbbcfd24ee7 \ + --hash=sha256:c415b9601ff50709d6050c8a9281733a9b042b9e589265ac40305b875cf9c463 \ + --hash=sha256:c54c635d1f52490cde7ef3a423645167a8284e452a35405d5c7dc1242a8e75c9 \ + --hash=sha256:c5e6a1f8b0268ffa1c84d7c3558724956002ba8361176e76406233e704bbcffb \ + --hash=sha256:c98a596ac20e8980cc6f34c0c92a113e98eb08f3997c150064d26d2aeb043e5a \ + --hash=sha256:cd0834e4260eab78671b81d34f110fbaac449563e48d419cec0030d9a8e58693 \ + --hash=sha256:cdad66685fcf2ad14ce522cf849d4a025f4fd206d6cfc3f403d9873e4c243b03 \ + --hash=sha256:d1ea006426edf7e1299c52a58b0443158012f7a56fed3515164b60bfcb1503a9 \ + --hash=sha256:d33b4490026968bdc7f0729b9d87a3a6b1e09043557d2fc1c605c6072deb2f11 \ + --hash=sha256:d5cae4cd271e20b7ab757e966cc919186b9f02535418ab36c471a5377ef4deaa \ + --hash=sha256:dd505a1121ad5b666191840b7bd1d8cb917df2647deeca6f3474331b72452362 \ + --hash=sha256:e1668ef2f3a7ec9881f4b6a917e5f97c87a343fa6b0d5fc826b7b0297ddd0887 \ + --hash=sha256:e7bcfcede95531589295f56e924702cef7f9685c9e4e5407592e04ded6a65bf3 \ + --hash=sha256:ebf610c37df4f09c71c9bbf8309b4b459107e6fe889ac0d7e16f6e4ebd975f86 \ + --hash=sha256:f3bf5c132eb48002bcc3825702d241d35b4e9585009e65e9dcf9c4635d0b7424 \ + --hash=sha256:f40380c96dd407dfa84eb2d264e68aa47717b53bdbe210a59cc3c35a4635f195 \ + --hash=sha256:f57a0de48dda792629e7952d34a0c7b81ea336bb9b721391c7c58145b237fe55 \ + --hash=sha256:f6b925c7775ab857bdc1e52e1f5abcae7d18751c09b751aeb641a5276d9b990e \ + --hash=sha256:f8f0d79b923070f25674e4ea8f3d61c9d89d24d9598d50ff32c5b9b23c79a25b \ + --hash=sha256:feca9fafa4385aea6759c171cd25ea82f7375312fca04178dae35331be45e538 # via -r requirements.in aiosignal==1.3.1 \ --hash=sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc \ --hash=sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17 # via aiohttp -async-timeout==4.0.3 \ - --hash=sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f \ - --hash=sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028 +async-timeout==5.0.1 \ + --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ + --hash=sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3 # via aiohttp attrs==24.2.0 \ --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ @@ -236,98 +221,113 @@ exceptiongroup==1.2.2 \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc # via pytest -frozenlist==1.4.1 \ - --hash=sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7 \ - --hash=sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98 \ - --hash=sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad \ - --hash=sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5 \ - --hash=sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae \ - --hash=sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e \ - --hash=sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a \ - --hash=sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701 \ - --hash=sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d \ - --hash=sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6 \ - --hash=sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6 \ - --hash=sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106 \ - --hash=sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75 \ - --hash=sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868 \ - --hash=sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a \ - --hash=sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0 \ - --hash=sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1 \ - --hash=sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826 \ - --hash=sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec \ - --hash=sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6 \ - --hash=sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950 \ - --hash=sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19 \ - --hash=sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0 \ - --hash=sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8 \ - --hash=sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a \ - --hash=sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09 \ - --hash=sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86 \ - --hash=sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c \ - --hash=sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5 \ - --hash=sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b \ - --hash=sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b \ - --hash=sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d \ - --hash=sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0 \ - --hash=sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea \ - --hash=sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776 \ - --hash=sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a \ - --hash=sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897 \ - --hash=sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7 \ - --hash=sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09 \ - --hash=sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9 \ - --hash=sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe \ - --hash=sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd \ - --hash=sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742 \ - --hash=sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09 \ - --hash=sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0 \ - --hash=sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932 \ - --hash=sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1 \ - --hash=sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a \ - --hash=sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49 \ - --hash=sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d \ - --hash=sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7 \ - --hash=sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480 \ - --hash=sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89 \ - --hash=sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e \ - --hash=sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b \ - --hash=sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82 \ - --hash=sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb \ - --hash=sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068 \ - --hash=sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8 \ - --hash=sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b \ - --hash=sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb \ - --hash=sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2 \ - --hash=sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11 \ - --hash=sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b \ - --hash=sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc \ - --hash=sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0 \ - --hash=sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497 \ - --hash=sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17 \ - --hash=sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0 \ - --hash=sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2 \ - --hash=sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439 \ - --hash=sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5 \ - --hash=sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac \ - --hash=sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825 \ - --hash=sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887 \ - --hash=sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced \ - --hash=sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74 +frozenlist==1.5.0 \ + --hash=sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e \ + --hash=sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf \ + --hash=sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6 \ + --hash=sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a \ + --hash=sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d \ + --hash=sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f \ + --hash=sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28 \ + --hash=sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b \ + --hash=sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9 \ + --hash=sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2 \ + --hash=sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec \ + --hash=sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2 \ + --hash=sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c \ + --hash=sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336 \ + --hash=sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4 \ + --hash=sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d \ + --hash=sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b \ + --hash=sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c \ + --hash=sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10 \ + --hash=sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08 \ + --hash=sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942 \ + --hash=sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8 \ + --hash=sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f \ + --hash=sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10 \ + --hash=sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5 \ + --hash=sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6 \ + --hash=sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21 \ + --hash=sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c \ + --hash=sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d \ + --hash=sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923 \ + --hash=sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608 \ + --hash=sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de \ + --hash=sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17 \ + --hash=sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0 \ + --hash=sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f \ + --hash=sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641 \ + --hash=sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c \ + --hash=sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a \ + --hash=sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0 \ + --hash=sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9 \ + --hash=sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab \ + --hash=sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f \ + --hash=sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3 \ + --hash=sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a \ + --hash=sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784 \ + --hash=sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604 \ + --hash=sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d \ + --hash=sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5 \ + --hash=sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03 \ + --hash=sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e \ + --hash=sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953 \ + --hash=sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee \ + --hash=sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d \ + --hash=sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817 \ + --hash=sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3 \ + --hash=sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039 \ + --hash=sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f \ + --hash=sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9 \ + --hash=sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf \ + --hash=sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76 \ + --hash=sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba \ + --hash=sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171 \ + --hash=sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb \ + --hash=sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439 \ + --hash=sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631 \ + --hash=sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972 \ + --hash=sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d \ + --hash=sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869 \ + --hash=sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9 \ + --hash=sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411 \ + --hash=sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723 \ + --hash=sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2 \ + --hash=sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b \ + --hash=sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99 \ + --hash=sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e \ + --hash=sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840 \ + --hash=sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3 \ + --hash=sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb \ + --hash=sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3 \ + --hash=sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0 \ + --hash=sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca \ + --hash=sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45 \ + --hash=sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e \ + --hash=sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f \ + --hash=sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5 \ + --hash=sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307 \ + --hash=sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e \ + --hash=sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2 \ + --hash=sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778 \ + --hash=sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a \ + --hash=sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30 \ + --hash=sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a # via # aiohttp # aiosignal -google-api-core==2.21.0 \ - --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ - --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d +google-api-core==2.23.0 \ + --hash=sha256:2ceb087315e6af43f256704b871d99326b1f12a9d6ce99beaedec99ba26a0ace \ + --hash=sha256:c20100d4c4c41070cf365f1d8ddf5365915291b5eb11b83829fbd1c999b5122f # via -r requirements.in -google-auth==2.35.0 \ - --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ - --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a +google-auth==2.36.0 \ + --hash=sha256:51a15d47028b66fd36e5c64a82d2d57480075bccc7da37cde257fc94177a61fb \ + --hash=sha256:545e9618f2df0bcbb7dcbc45a546485b1212624716975a1ea5ae8149ce769ab1 # via google-api-core -googleapis-common-protos[grpc]==1.65.0 \ - --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ - --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 +googleapis-common-protos[grpc]==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed # via # -r requirements.in # google-api-core @@ -336,62 +336,62 @@ grpc-google-iam-v1==0.13.1 \ --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e # via -r requirements.in -grpcio==1.66.2 \ - --hash=sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd \ - --hash=sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604 \ - --hash=sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73 \ - --hash=sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3 \ - --hash=sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50 \ - --hash=sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6 \ - --hash=sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34 \ - --hash=sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249 \ - --hash=sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75 \ - --hash=sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8 \ - --hash=sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453 \ - --hash=sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8 \ - --hash=sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d \ - --hash=sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c \ - --hash=sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c \ - --hash=sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c \ - --hash=sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39 \ - --hash=sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01 \ - --hash=sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231 \ - --hash=sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae \ - --hash=sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a \ - --hash=sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d \ - --hash=sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987 \ - --hash=sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a \ - --hash=sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7 \ - --hash=sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7 \ - --hash=sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3 \ - --hash=sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b \ - --hash=sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf \ - --hash=sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8 \ - --hash=sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf \ - --hash=sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7 \ - --hash=sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839 \ - --hash=sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e \ - --hash=sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b \ - --hash=sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3 \ - --hash=sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee \ - --hash=sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54 \ - --hash=sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e \ - --hash=sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc \ - --hash=sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd \ - --hash=sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d \ - --hash=sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed \ - --hash=sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7 \ - --hash=sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4 \ - --hash=sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a \ - --hash=sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec \ - --hash=sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8 \ - --hash=sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd \ - --hash=sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c \ - --hash=sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46 \ - --hash=sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e \ - --hash=sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf \ - --hash=sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa \ - --hash=sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679 +grpcio==1.67.1 \ + --hash=sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04 \ + --hash=sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292 \ + --hash=sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955 \ + --hash=sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426 \ + --hash=sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65 \ + --hash=sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970 \ + --hash=sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e \ + --hash=sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab \ + --hash=sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953 \ + --hash=sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8 \ + --hash=sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085 \ + --hash=sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732 \ + --hash=sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f \ + --hash=sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af \ + --hash=sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78 \ + --hash=sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc \ + --hash=sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98 \ + --hash=sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e \ + --hash=sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f \ + --hash=sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e \ + --hash=sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3 \ + --hash=sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed \ + --hash=sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38 \ + --hash=sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb \ + --hash=sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5 \ + --hash=sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771 \ + --hash=sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc \ + --hash=sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb \ + --hash=sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8 \ + --hash=sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75 \ + --hash=sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f \ + --hash=sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f \ + --hash=sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb \ + --hash=sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8 \ + --hash=sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8 \ + --hash=sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311 \ + --hash=sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335 \ + --hash=sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62 \ + --hash=sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af \ + --hash=sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b \ + --hash=sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce \ + --hash=sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1 \ + --hash=sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f \ + --hash=sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0 \ + --hash=sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e \ + --hash=sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121 \ + --hash=sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744 \ + --hash=sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa \ + --hash=sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e \ + --hash=sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d \ + --hash=sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0 \ + --hash=sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d \ + --hash=sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46 \ + --hash=sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96 \ + --hash=sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba # via # googleapis-common-protos # grpc-google-iam-v1 @@ -446,68 +446,68 @@ libcst==1.5.0 \ --hash=sha256:d92c5ae2e2dc9356ad7e3d05077d9b7e5065423e45788fd86729c88729e45c6e \ --hash=sha256:fc80ea16c7d44e38f193e4d4ef7ff1e0ba72d8e60e8b61ac6f4c87f070a118bd # via -r requirements.in -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f +markupsafe==3.0.2 \ + --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ + --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ + --hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \ + --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ + --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ + --hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \ + --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ + --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ + --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ + --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ + --hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \ + --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ + --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ + --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ + --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ + --hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \ + --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ + --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ + --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ + --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ + --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ + --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ + --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ + --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ + --hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \ + --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ + --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ + --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ + --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ + --hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \ + --hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \ + --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ + --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ + --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ + --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ + --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ + --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ + --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ + --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ + --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ + --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ + --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ + --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ + --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ + --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ + --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ + --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ + --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ + --hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \ + --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ + --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ + --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ + --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ + --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ + --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ + --hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \ + --hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \ + --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ + --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ + --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ + --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 # via # -r requirements.in # jinja2 @@ -607,9 +607,9 @@ multidict==6.1.0 \ # via # aiohttp # yarl -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via pytest pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ @@ -714,25 +714,27 @@ propcache==0.2.0 \ --hash=sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d \ --hash=sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016 \ --hash=sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504 - # via yarl -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via + # aiohttp + # yarl +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 # via # -r requirements.in # google-api-core -protobuf==5.28.2 \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d +protobuf==5.28.3 \ + --hash=sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24 \ + --hash=sha256:135658402f71bbd49500322c0f736145731b16fc79dc8f367ab544a17eab4535 \ + --hash=sha256:27b246b3723692bf1068d5734ddaf2fccc2cdd6e0c9b47fe099244d80200593b \ + --hash=sha256:3e6101d095dfd119513cde7259aa703d16c6bbdfae2554dfe5cfdbe94e32d548 \ + --hash=sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584 \ + --hash=sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b \ + --hash=sha256:70585a70fc2dd4818c51287ceef5bdba6387f88a578c86d47bb34669b5552c36 \ + --hash=sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135 \ + --hash=sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868 \ + --hash=sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687 \ + --hash=sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed # via # -r requirements.in # google-api-core @@ -826,9 +828,9 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed +tomli==2.1.0 \ + --hash=sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8 \ + --hash=sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391 # via pytest typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ @@ -838,97 +840,87 @@ urllib3==2.2.3 \ --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via requests -yarl==1.14.0 \ - --hash=sha256:047b258e00b99091b6f90355521f026238c63bd76dcf996d93527bb13320eefd \ - --hash=sha256:06ff23462398333c78b6f4f8d3d70410d657a471c2c5bbe6086133be43fc8f1a \ - --hash=sha256:07f9eaf57719d6721ab15805d85f4b01a5b509a0868d7320134371bcb652152d \ - --hash=sha256:0aa92e3e30a04f9462a25077db689c4ac5ea9ab6cc68a2e563881b987d42f16d \ - --hash=sha256:0cf21f46a15d445417de8fc89f2568852cf57fe8ca1ab3d19ddb24d45c0383ae \ - --hash=sha256:0fd7b941dd1b00b5f0acb97455fea2c4b7aac2dd31ea43fb9d155e9bc7b78664 \ - --hash=sha256:147e36331f6f63e08a14640acf12369e041e0751bb70d9362df68c2d9dcf0c87 \ - --hash=sha256:16a682a127930f3fc4e42583becca6049e1d7214bcad23520c590edd741d2114 \ - --hash=sha256:176110bff341b6730f64a1eb3a7070e12b373cf1c910a9337e7c3240497db76f \ - --hash=sha256:19268b4fec1d7760134f2de46ef2608c2920134fb1fa61e451f679e41356dc55 \ - --hash=sha256:1b16f6c75cffc2dc0616ea295abb0e1967601bd1fb1e0af6a1de1c6c887f3439 \ - --hash=sha256:1bfc25aa6a7c99cf86564210f79a0b7d4484159c67e01232b116e445b3036547 \ - --hash=sha256:1ca3894e9e9f72da93544f64988d9c052254a338a9f855165f37f51edb6591de \ - --hash=sha256:1dda53508df0de87b6e6b0a52d6718ff6c62a5aca8f5552748404963df639269 \ - --hash=sha256:217a782020b875538eebf3948fac3a7f9bbbd0fd9bf8538f7c2ad7489e80f4e8 \ - --hash=sha256:2192f718db4a8509f63dd6d950f143279211fa7e6a2c612edc17d85bf043d36e \ - --hash=sha256:29a84a46ec3ebae7a1c024c055612b11e9363a8a23238b3e905552d77a2bc51b \ - --hash=sha256:3007a5b75cb50140708420fe688c393e71139324df599434633019314ceb8b59 \ - --hash=sha256:30600ba5db60f7c0820ef38a2568bb7379e1418ecc947a0f76fd8b2ff4257a97 \ - --hash=sha256:337912bcdcf193ade64b9aae5a4017a0a1950caf8ca140362e361543c6773f21 \ - --hash=sha256:37001e5d4621cef710c8dc1429ca04e189e572f128ab12312eab4e04cf007132 \ - --hash=sha256:3d569f877ed9a708e4c71a2d13d2940cb0791da309f70bd970ac1a5c088a0a92 \ - --hash=sha256:4009def9be3a7e5175db20aa2d7307ecd00bbf50f7f0f989300710eee1d0b0b9 \ - --hash=sha256:46a9772a1efa93f9cd170ad33101c1817c77e0e9914d4fe33e2da299d7cf0f9b \ - --hash=sha256:47eede5d11d669ab3759b63afb70d28d5328c14744b8edba3323e27dc52d298d \ - --hash=sha256:498b3c55087b9d762636bca9b45f60d37e51d24341786dc01b81253f9552a607 \ - --hash=sha256:4e0d45ebf975634468682c8bec021618b3ad52c37619e5c938f8f831fa1ac5c0 \ - --hash=sha256:4f24f08b6c9b9818fd80612c97857d28f9779f0d1211653ece9844fc7b414df2 \ - --hash=sha256:55c144d363ad4626ca744556c049c94e2b95096041ac87098bb363dcc8635e8d \ - --hash=sha256:582cedde49603f139be572252a318b30dc41039bc0b8165f070f279e5d12187f \ - --hash=sha256:587c3cc59bc148a9b1c07a019346eda2549bc9f468acd2f9824d185749acf0a6 \ - --hash=sha256:5cd5dad8366e0168e0fd23d10705a603790484a6dbb9eb272b33673b8f2cce72 \ - --hash=sha256:5d02d700705d67e09e1f57681f758f0b9d4412eeb70b2eb8d96ca6200b486db3 \ - --hash=sha256:625f207b1799e95e7c823f42f473c1e9dbfb6192bd56bba8695656d92be4535f \ - --hash=sha256:659603d26d40dd4463200df9bfbc339fbfaed3fe32e5c432fe1dc2b5d4aa94b4 \ - --hash=sha256:689a99a42ee4583fcb0d3a67a0204664aa1539684aed72bdafcbd505197a91c4 \ - --hash=sha256:68ac1a09392ed6e3fd14be880d39b951d7b981fd135416db7d18a6208c536561 \ - --hash=sha256:6a615cad11ec3428020fb3c5a88d85ce1b5c69fd66e9fcb91a7daa5e855325dd \ - --hash=sha256:73bedd2be05f48af19f0f2e9e1353921ce0c83f4a1c9e8556ecdcf1f1eae4892 \ - --hash=sha256:742aef0a99844faaac200564ea6f5e08facb285d37ea18bd1a5acf2771f3255a \ - --hash=sha256:75ff4c819757f9bdb35de049a509814d6ce851fe26f06eb95a392a5640052482 \ - --hash=sha256:781e2495e408a81e4eaeedeb41ba32b63b1980dddf8b60dbbeff6036bcd35049 \ - --hash=sha256:7a9f917966d27f7ce30039fe8d900f913c5304134096554fd9bea0774bcda6d1 \ - --hash=sha256:7e2637d75e92763d1322cb5041573279ec43a80c0f7fbbd2d64f5aee98447b17 \ - --hash=sha256:8089d4634d8fa2b1806ce44fefa4979b1ab2c12c0bc7ef3dfa45c8a374811348 \ - --hash=sha256:816d24f584edefcc5ca63428f0b38fee00b39fe64e3c5e558f895a18983efe96 \ - --hash=sha256:8385ab36bf812e9d37cf7613999a87715f27ef67a53f0687d28c44b819df7cb0 \ - --hash=sha256:85cb3e40eaa98489f1e2e8b29f5ad02ee1ee40d6ce6b88d50cf0f205de1d9d2c \ - --hash=sha256:8648180b34faaea4aa5b5ca7e871d9eb1277033fa439693855cf0ea9195f85f1 \ - --hash=sha256:8892fa575ac9b1b25fae7b221bc4792a273877b9b56a99ee2d8d03eeb3dbb1d2 \ - --hash=sha256:88c7d9d58aab0724b979ab5617330acb1c7030b79379c8138c1c8c94e121d1b3 \ - --hash=sha256:8a2f8fb7f944bcdfecd4e8d855f84c703804a594da5123dd206f75036e536d4d \ - --hash=sha256:8f4e475f29a9122f908d0f1f706e1f2fc3656536ffd21014ff8a6f2e1b14d1d8 \ - --hash=sha256:8f50eb3837012a937a2b649ec872b66ba9541ad9d6f103ddcafb8231cfcafd22 \ - --hash=sha256:91d875f75fabf76b3018c5f196bf3d308ed2b49ddcb46c1576d6b075754a1393 \ - --hash=sha256:94b2bb9bcfd5be9d27004ea4398fb640373dd0c1a9e219084f42c08f77a720ab \ - --hash=sha256:9557c9322aaa33174d285b0c1961fb32499d65ad1866155b7845edc876c3c835 \ - --hash=sha256:95e16e9eaa2d7f5d87421b8fe694dd71606aa61d74b824c8d17fc85cc51983d1 \ - --hash=sha256:96952f642ac69075e44c7d0284528938fdff39422a1d90d3e45ce40b72e5e2d9 \ - --hash=sha256:985623575e5c4ea763056ffe0e2d63836f771a8c294b3de06d09480538316b13 \ - --hash=sha256:99ff3744f5fe48288be6bc402533b38e89749623a43208e1d57091fc96b783b9 \ - --hash=sha256:9abe80ae2c9d37c17599557b712e6515f4100a80efb2cda15f5f070306477cd2 \ - --hash=sha256:a152751af7ef7b5d5fa6d215756e508dd05eb07d0cf2ba51f3e740076aa74373 \ - --hash=sha256:a2e4725a08cb2b4794db09e350c86dee18202bb8286527210e13a1514dc9a59a \ - --hash=sha256:a56fbe3d7f3bce1d060ea18d2413a2ca9ca814eea7cedc4d247b5f338d54844e \ - --hash=sha256:ab3abc0b78a5dfaa4795a6afbe7b282b6aa88d81cf8c1bb5e394993d7cae3457 \ - --hash=sha256:b03384eed107dbeb5f625a99dc3a7de8be04fc8480c9ad42fccbc73434170b20 \ - --hash=sha256:b0547ab1e9345dc468cac8368d88ea4c5bd473ebc1d8d755347d7401982b5dd8 \ - --hash=sha256:b4c1ecba93e7826dc71ddba75fb7740cdb52e7bd0be9f03136b83f54e6a1f511 \ - --hash=sha256:b693c63e7e64b524f54aa4888403c680342d1ad0d97be1707c531584d6aeeb4f \ - --hash=sha256:b6d0147574ce2e7b812c989e50fa72bbc5338045411a836bd066ce5fc8ac0bce \ - --hash=sha256:b9cfef3f14f75bf6aba73a76caf61f9d00865912a04a4393c468a7ce0981b519 \ - --hash=sha256:b9f805e37ed16cc212fdc538a608422d7517e7faf539bedea4fe69425bc55d76 \ - --hash=sha256:bab03192091681d54e8225c53f270b0517637915d9297028409a2a5114ff4634 \ - --hash=sha256:bc24f968b82455f336b79bf37dbb243b7d76cd40897489888d663d4e028f5069 \ - --hash=sha256:c14b504a74e58e2deb0378b3eca10f3d076635c100f45b113c18c770b4a47a50 \ - --hash=sha256:c2089a9afef887664115f7fa6d3c0edd6454adaca5488dba836ca91f60401075 \ - --hash=sha256:c8ed4034f0765f8861620c1f2f2364d2e58520ea288497084dae880424fc0d9f \ - --hash=sha256:cd2660c01367eb3ef081b8fa0a5da7fe767f9427aa82023a961a5f28f0d4af6c \ - --hash=sha256:d8361c7d04e6a264481f0b802e395f647cd3f8bbe27acfa7c12049efea675bd1 \ - --hash=sha256:d9baec588f015d0ee564057aa7574313c53a530662ffad930b7886becc85abdf \ - --hash=sha256:dbd9ff43a04f8ffe8a959a944c2dca10d22f5f99fc6a459f49c3ebfb409309d9 \ - --hash=sha256:e3f8bfc1db82589ef965ed234b87de30d140db8b6dc50ada9e33951ccd8ec07a \ - --hash=sha256:e6a2c5c5bb2556dfbfffffc2bcfb9c235fd2b566d5006dfb2a37afc7e3278a07 \ - --hash=sha256:e749af6c912a7bb441d105c50c1a3da720474e8acb91c89350080dd600228f0e \ - --hash=sha256:e85d86527baebb41a214cc3b45c17177177d900a2ad5783dbe6f291642d4906f \ - --hash=sha256:ee2c68e4f2dd1b1c15b849ba1c96fac105fca6ffdb7c1e8be51da6fabbdeafb9 \ - --hash=sha256:f3ab950f8814f3b7b5e3eebc117986f817ec933676f68f0a6c5b2137dd7c9c69 \ - --hash=sha256:f4f4547944d4f5cfcdc03f3f097d6f05bbbc915eaaf80a2ee120d0e756de377d \ - --hash=sha256:f72a0d746d38cb299b79ce3d4d60ba0892c84bbc905d0d49c13df5bace1b65f8 \ - --hash=sha256:fc2c80bc87fba076e6cbb926216c27fba274dae7100a7b9a0983b53132dd99f2 \ - --hash=sha256:fe4d2536c827f508348d7b40c08767e8c7071614250927233bf0c92170451c0a +yarl==1.17.1 \ + --hash=sha256:06157fb3c58f2736a5e47c8fcbe1afc8b5de6fb28b14d25574af9e62150fcaac \ + --hash=sha256:067a63fcfda82da6b198fa73079b1ca40b7c9b7994995b6ee38acda728b64d47 \ + --hash=sha256:0b1794853124e2f663f0ea54efb0340b457f08d40a1cef78edfa086576179c91 \ + --hash=sha256:0bdff5e0995522706c53078f531fb586f56de9c4c81c243865dd5c66c132c3b5 \ + --hash=sha256:117ed8b3732528a1e41af3aa6d4e08483c2f0f2e3d3d7dca7cf538b3516d93df \ + --hash=sha256:14bc88baa44e1f84164a392827b5defb4fa8e56b93fecac3d15315e7c8e5d8b3 \ + --hash=sha256:1654ec814b18be1af2c857aa9000de7a601400bd4c9ca24629b18486c2e35463 \ + --hash=sha256:16bca6678a83657dd48df84b51bd56a6c6bd401853aef6d09dc2506a78484c7b \ + --hash=sha256:1a3b91c44efa29e6c8ef8a9a2b583347998e2ba52c5d8280dbd5919c02dfc3b5 \ + --hash=sha256:1a52a1ffdd824fb1835272e125385c32fd8b17fbdefeedcb4d543cc23b332d74 \ + --hash=sha256:1ce36ded585f45b1e9bb36d0ae94765c6608b43bd2e7f5f88079f7a85c61a4d3 \ + --hash=sha256:299f11b44d8d3a588234adbe01112126010bd96d9139c3ba7b3badd9829261c3 \ + --hash=sha256:2b24ec55fad43e476905eceaf14f41f6478780b870eda5d08b4d6de9a60b65b4 \ + --hash=sha256:2d374d70fdc36f5863b84e54775452f68639bc862918602d028f89310a034ab0 \ + --hash=sha256:2d9f0606baaec5dd54cb99667fcf85183a7477f3766fbddbe3f385e7fc253299 \ + --hash=sha256:2e7ba4c9377e48fb7b20dedbd473cbcbc13e72e1826917c185157a137dac9df2 \ + --hash=sha256:2f0a6423295a0d282d00e8701fe763eeefba8037e984ad5de44aa349002562ac \ + --hash=sha256:327828786da2006085a4d1feb2594de6f6d26f8af48b81eb1ae950c788d97f61 \ + --hash=sha256:380e6c38ef692b8fd5a0f6d1fa8774d81ebc08cfbd624b1bca62a4d4af2f9931 \ + --hash=sha256:3b74ff4767d3ef47ffe0cd1d89379dc4d828d4873e5528976ced3b44fe5b0a21 \ + --hash=sha256:3e844be8d536afa129366d9af76ed7cb8dfefec99f5f1c9e4f8ae542279a6dc3 \ + --hash=sha256:459e81c2fb920b5f5df744262d1498ec2c8081acdcfe18181da44c50f51312f7 \ + --hash=sha256:46ddf6e0b975cd680eb83318aa1d321cb2bf8d288d50f1754526230fcf59ba96 \ + --hash=sha256:482c122b72e3c5ec98f11457aeb436ae4aecca75de19b3d1de7cf88bc40db82f \ + --hash=sha256:561c87fea99545ef7d692403c110b2f99dced6dff93056d6e04384ad3bc46243 \ + --hash=sha256:578d00c9b7fccfa1745a44f4eddfdc99d723d157dad26764538fbdda37209857 \ + --hash=sha256:58c8e9620eb82a189c6c40cb6b59b4e35b2ee68b1f2afa6597732a2b467d7e8f \ + --hash=sha256:5b29beab10211a746f9846baa39275e80034e065460d99eb51e45c9a9495bcca \ + --hash=sha256:5d1d42556b063d579cae59e37a38c61f4402b47d70c29f0ef15cee1acaa64488 \ + --hash=sha256:5f236cb5999ccd23a0ab1bd219cfe0ee3e1c1b65aaf6dd3320e972f7ec3a39da \ + --hash=sha256:62a91aefff3d11bf60e5956d340eb507a983a7ec802b19072bb989ce120cd948 \ + --hash=sha256:64cc6e97f14cf8a275d79c5002281f3040c12e2e4220623b5759ea7f9868d6a5 \ + --hash=sha256:6f4c9156c4d1eb490fe374fb294deeb7bc7eaccda50e23775b2354b6a6739934 \ + --hash=sha256:7294e38f9aa2e9f05f765b28ffdc5d81378508ce6dadbe93f6d464a8c9594473 \ + --hash=sha256:7615058aabad54416ddac99ade09a5510cf77039a3b903e94e8922f25ed203d7 \ + --hash=sha256:7e48cdb8226644e2fbd0bdb0a0f87906a3db07087f4de77a1b1b1ccfd9e93685 \ + --hash=sha256:7f63d176a81555984e91f2c84c2a574a61cab7111cc907e176f0f01538e9ff6e \ + --hash=sha256:7f6595c852ca544aaeeb32d357e62c9c780eac69dcd34e40cae7b55bc4fb1147 \ + --hash=sha256:7fac95714b09da9278a0b52e492466f773cfe37651cf467a83a1b659be24bf71 \ + --hash=sha256:81713b70bea5c1386dc2f32a8f0dab4148a2928c7495c808c541ee0aae614d67 \ + --hash=sha256:846dd2e1243407133d3195d2d7e4ceefcaa5f5bf7278f0a9bda00967e6326b04 \ + --hash=sha256:84c063af19ef5130084db70ada40ce63a84f6c1ef4d3dbc34e5e8c4febb20822 \ + --hash=sha256:881764d610e3269964fc4bb3c19bb6fce55422828e152b885609ec176b41cf11 \ + --hash=sha256:8994b29c462de9a8fce2d591028b986dbbe1b32f3ad600b2d3e1c482c93abad6 \ + --hash=sha256:8c79e9d7e3d8a32d4824250a9c6401194fb4c2ad9a0cec8f6a96e09a582c2cc0 \ + --hash=sha256:8ee427208c675f1b6e344a1f89376a9613fc30b52646a04ac0c1f6587c7e46ec \ + --hash=sha256:949681f68e0e3c25377462be4b658500e85ca24323d9619fdc41f68d46a1ffda \ + --hash=sha256:9e275792097c9f7e80741c36de3b61917aebecc08a67ae62899b074566ff8556 \ + --hash=sha256:9fb815155aac6bfa8d86184079652c9715c812d506b22cfa369196ef4e99d1b4 \ + --hash=sha256:a2a64e62c7a0edd07c1c917b0586655f3362d2c2d37d474db1a509efb96fea1c \ + --hash=sha256:a7ac5b4984c468ce4f4a553df281450df0a34aefae02e58d77a0847be8d1e11f \ + --hash=sha256:aa46dce75078fceaf7cecac5817422febb4355fbdda440db55206e3bd288cfb8 \ + --hash=sha256:ae3476e934b9d714aa8000d2e4c01eb2590eee10b9d8cd03e7983ad65dfbfcba \ + --hash=sha256:b0341e6d9a0c0e3cdc65857ef518bb05b410dbd70d749a0d33ac0f39e81a4258 \ + --hash=sha256:b40d1bf6e6f74f7c0a567a9e5e778bbd4699d1d3d2c0fe46f4b717eef9e96b95 \ + --hash=sha256:b5c4804e4039f487e942c13381e6c27b4b4e66066d94ef1fae3f6ba8b953f383 \ + --hash=sha256:b5d6a6c9602fd4598fa07e0389e19fe199ae96449008d8304bf5d47cb745462e \ + --hash=sha256:b5f1ac7359e17efe0b6e5fec21de34145caef22b260e978336f325d5c84e6938 \ + --hash=sha256:c0167540094838ee9093ef6cc2c69d0074bbf84a432b4995835e8e5a0d984374 \ + --hash=sha256:c180ac742a083e109c1a18151f4dd8675f32679985a1c750d2ff806796165b55 \ + --hash=sha256:c73df5b6e8fabe2ddb74876fb82d9dd44cbace0ca12e8861ce9155ad3c886139 \ + --hash=sha256:c7e177c619342e407415d4f35dec63d2d134d951e24b5166afcdfd1362828e17 \ + --hash=sha256:cbad927ea8ed814622305d842c93412cb47bd39a496ed0f96bfd42b922b4a217 \ + --hash=sha256:cc353841428d56b683a123a813e6a686e07026d6b1c5757970a877195f880c2d \ + --hash=sha256:cc7c92c1baa629cb03ecb0c3d12564f172218fb1739f54bf5f3881844daadc6d \ + --hash=sha256:cc7d768260f4ba4ea01741c1b5fe3d3a6c70eb91c87f4c8761bbcce5181beafe \ + --hash=sha256:d0eea830b591dbc68e030c86a9569826145df485b2b4554874b07fea1275a199 \ + --hash=sha256:d216e5d9b8749563c7f2c6f7a0831057ec844c68b4c11cb10fc62d4fd373c26d \ + --hash=sha256:d401f07261dc5aa36c2e4efc308548f6ae943bfff20fcadb0a07517a26b196d8 \ + --hash=sha256:d6324274b4e0e2fa1b3eccb25997b1c9ed134ff61d296448ab8269f5ac068c4c \ + --hash=sha256:d8a8b74d843c2638f3864a17d97a4acda58e40d3e44b6303b8cc3d3c44ae2d29 \ + --hash=sha256:d9b6b28a57feb51605d6ae5e61a9044a31742db557a3b851a74c13bc61de5172 \ + --hash=sha256:de599af166970d6a61accde358ec9ded821234cbbc8c6413acfec06056b8e860 \ + --hash=sha256:e594b22688d5747b06e957f1ef822060cb5cb35b493066e33ceac0cf882188b7 \ + --hash=sha256:e5b078134f48552c4d9527db2f7da0b5359abd49393cdf9794017baec7506170 \ + --hash=sha256:eb6dce402734575e1a8cc0bb1509afca508a400a57ce13d306ea2c663bad1138 \ + --hash=sha256:f1790a4b1e8e8e028c391175433b9c8122c39b46e1663228158e61e6f915bf06 \ + --hash=sha256:f5efe0661b9fcd6246f27957f6ae1c0eb29bc60552820f01e970b4996e016004 \ + --hash=sha256:f9cbfbc5faca235fbdf531b93aa0f9f005ec7d267d9d738761a4d42b744ea159 \ + --hash=sha256:fbea1751729afe607d84acfd01efd95e3b31db148a181a441984ce9b3d3469da \ + --hash=sha256:fca4b4307ebe9c3ec77a084da3a9d1999d164693d16492ca2b64594340999988 \ + --hash=sha256:ff5c6771c7e3511a06555afa317879b7db8d640137ba55d6ab0d0c50425cab75 # via aiohttp From 93bbaa2bb7bab32fac8282d1f76bbda2823ce342 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Nov 2024 10:49:24 -0500 Subject: [PATCH 1215/1339] chore(python): update dependencies in .kokoro/docker/docs (#2247) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- .../.github/release-trigger.yml | 1 + .../.kokoro/docker/docs/requirements.txt | 20 +++++++++---------- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 13fc69ce9fc9..6301519a9a05 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5efdf8d38e5a22c1ec9e5541cbdfde56399bdffcb6f531183f84ac66052a8024 -# created: 2024-10-25 + digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 +# created: 2024-11-12T12:09:45.821174897Z diff --git a/packages/gapic-generator/.github/release-trigger.yml b/packages/gapic-generator/.github/release-trigger.yml index d4ca94189e16..21ed4182c81b 100644 --- a/packages/gapic-generator/.github/release-trigger.yml +++ b/packages/gapic-generator/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: gapic-generator-python diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt index 66eacc82f041..8bb0764594b1 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in @@ -8,9 +8,9 @@ argcomplete==3.5.1 \ --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 +colorlog==6.9.0 \ + --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ + --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 # via nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ @@ -24,9 +24,9 @@ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via nox platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ @@ -36,7 +36,7 @@ tomli==2.0.2 \ --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 +virtualenv==20.27.1 \ + --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ + --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 # via nox From d8667cac1fa9fc48ce72d7f0892f09c326bc9479 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 19 Nov 2024 16:30:10 +0100 Subject: [PATCH 1216/1339] chore(deps): update all dependencies (#2252) Co-authored-by: Owl Bot --- packages/gapic-generator/Dockerfile | 2 +- packages/gapic-generator/WORKSPACE | 6 +- packages/gapic-generator/requirements.txt | 432 +++++++++++----------- 3 files changed, 220 insertions(+), 220 deletions(-) diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile index beda35897ba6..71a201f1b97f 100644 --- a/packages/gapic-generator/Dockerfile +++ b/packages/gapic-generator/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-slim +FROM python:3.13-slim # Install system packages. RUN apt-get update \ diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index caf3401818d4..9cfdc92627fc 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -73,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "b2340aa47faf7ef10a0328190319d3f3bee1b24f426d4ce8f4253b6f27ce16db", - strip_prefix = "protobuf-28.2", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.2.tar.gz"], + sha256 = "7c3ebd7aaedd86fa5dc479a0fda803f602caaf78d8aff7ce83b89e1b8ae7442a", + strip_prefix = "protobuf-28.3", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.3.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index f0c8f1deb65f..1ab0eb2b736c 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,83 +8,83 @@ aiohappyeyeballs==2.4.3 \ --hash=sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586 \ --hash=sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572 # via aiohttp -aiohttp==3.11.0 \ - --hash=sha256:024409c1b1d6076d0ed933dcebd7e4fc6f3320a227bfa0c1b6b93a8b5a146f04 \ - --hash=sha256:04b24497b3baf15035730de5f207ade88a67d4483a5f16ced7ece348933a5b47 \ - --hash=sha256:08474e71772a516ba2e2167b4707af8361d2c452b3d8a5364c984f4867869499 \ - --hash=sha256:0e7a0762cc29cd3acd01a4d2b547b3af7956ad230ebb80b529a8e4f3e4740fe8 \ - --hash=sha256:104deb7873681273c5daa13c41924693df394043a118dae90387d35bc5531788 \ - --hash=sha256:104ea21994b1403e4c1b398866f1187c1694fa291314ad7216ec1d8ec6b49f38 \ - --hash=sha256:113bf06b029143e94a47c4f36e11a8b7e396e9d1f1fc8cea58e6b7e370cfed38 \ - --hash=sha256:12071dd2cc95ba81e0f2737bebcb98b2a8656015e87772e84e8fb9e635b5da6e \ - --hash=sha256:170fb2324826bb9f08055a8291f42192ae5ee2f25b2966c8f0f4537c61d73a7b \ - --hash=sha256:21b4545e8d96870da9652930c5198366605ff8f982757030e2148cf341e5746b \ - --hash=sha256:229ae13959a5f499d90ffbb4b9eac2255d8599315027d6f7c22fa9803a94d5b1 \ - --hash=sha256:2ec5efbc872b00ddd85e3904059d274f284cff314e13f48776050ca2c58f451d \ - --hash=sha256:31b91ff3a1fcb206a1fa76e0de1f08c9ffb1dc0deb7296fa2618adfe380fc676 \ - --hash=sha256:329f5059e0bf6983dceebac8e6ed20e75eaff6163b3414f4a4cb59e0d7037672 \ - --hash=sha256:37f8cf3c43f292d9bb3e6760476c2b55b9663a581fad682a586a410c43a7683e \ - --hash=sha256:3e1ed8d152cccceffb1ee7a2ac227c16372e453fb11b3aeaa56783049b85d3f6 \ - --hash=sha256:3ed360d6672a9423aad39902a4e9fe305464d20ed7931dbdba30a4625782d875 \ - --hash=sha256:40dc9446cff326672fcbf93efdb8ef7e949824de1097624efe4f61ac7f0d2c43 \ - --hash=sha256:4d218d3eca40196384ad3b481309c56fd60e664128885d1734da0a8aa530d433 \ - --hash=sha256:4e4e155968040e32c124a89852a1a5426d0e920a35f4331e1b3949037bfe93a3 \ - --hash=sha256:4f698aa61879df64425191d41213dfd99efdc1627e6398e6d7aa5c312fac9702 \ - --hash=sha256:508cfcc99534b1282595357592d8367b44392b21f6eb5d4dc021f8d0d809e94d \ - --hash=sha256:577c7429f8869fa30186fc2c9eee64d75a30b51b61f26aac9725866ae5985cfd \ - --hash=sha256:57e17c6d71f2dc857a8a1d09be1be7802e35d90fb4ba4b06cf1aab6414a57894 \ - --hash=sha256:5ecc2fb1a0a9d48cf773add34196cddf7e488e48e9596e090849751bf43098f4 \ - --hash=sha256:600b1d9f86a130131915e2f2127664311b33902c486b21a747d626f5144b4471 \ - --hash=sha256:62502b8ffee8c6a4b5c6bf99d1de277d42bf51b2fb713975d9b63b560150b7ac \ - --hash=sha256:62a2f5268b672087c45b33479ba1bb1d5a48c6d76c133cfce3a4f77410c200d1 \ - --hash=sha256:6362f50a6f0e5482c4330d2151cb682779230683da0e155c15ec9fc58cb50b6a \ - --hash=sha256:6533dd06df3d17d1756829b68b365b1583929b54082db8f65083a4184bf68322 \ - --hash=sha256:6c5a6958f4366496004cf503d847093d464814543f157ef3b738bbf604232415 \ - --hash=sha256:72cd984f7f14e8c01b3e38f18f39ea85dba84e52ea05e37116ba5e2a72eef396 \ - --hash=sha256:76d6ee8bb132f8ee0fcb0e205b4708ddb6fba524eb515ee168113063d825131b \ - --hash=sha256:7867d0808614f04e78e0a8d5a2c1f8ac6bc626a0c0e2f62be48be6b749e2f8b2 \ - --hash=sha256:7d664e5f937c08adb7908ea9f391fbf2928a9b09cb412ac0aba602bde9e499e4 \ - --hash=sha256:85ae6f182be72c3531915e90625cc65afce4df8a0fc4988bd52d8a5d5faaeb68 \ - --hash=sha256:89a96a0696dc67d548f69cb518c581a7a33cc1f26ab42229dea1709217c9d926 \ - --hash=sha256:8b323b5d3aef7dd811424c269322eec58a977c0c8152e650159e47210d900504 \ - --hash=sha256:8c47a0ba6c2b3d3e5715f8338d657badd21f778c6be16701922c65521c5ecfc9 \ - --hash=sha256:8fef105113d56e817cb9bcc609667ee461321413a7b972b03f5b4939f40f307c \ - --hash=sha256:900ff74d78eb580ae4aa5883242893b123a0c442a46570902500f08d6a7e6696 \ - --hash=sha256:9095580806d9ed07c0c29b23364a0b1fb78258ef9f4bddf7e55bac0e475d4edf \ - --hash=sha256:91d3991fad8b65e5dbc13cd95669ea689fe0a96ff63e4e64ac24ed724e4f8103 \ - --hash=sha256:9231d610754724273a6ac05a1f177979490bfa6f84d49646df3928af2e88cfd5 \ - --hash=sha256:97056d3422594e0787733ac4c45bef58722d452f4dc6615fee42f59fe51707dd \ - --hash=sha256:a896059b6937d1a22d8ee8377cdcd097bd26cd8c653b8f972051488b9baadee9 \ - --hash=sha256:aabc4e92cb153636d6be54e84dad1b252ddb9aebe077942b6dcffe5e468d476a \ - --hash=sha256:ad14cdc0fba4df31c0f6e06c21928c5b924725cbf60d0ccc5f6e7132636250e9 \ - --hash=sha256:ae36ae52b0c22fb69fb8b744eff82a20db512a29eafc6e3a4ab43b17215b219d \ - --hash=sha256:b3e4fb7f5354d39490d8209aefdf5830b208d01c7293a2164e404312c3d8bc55 \ - --hash=sha256:b40c304ab01e89ad0aeeecf91bbaa6ae3b00e27b796c9e8d50b71a4a7e885cc8 \ - --hash=sha256:b7349205bb163318dcc102329d30be59a647a3d24c82c3d91ed35b7e7301ea7e \ - --hash=sha256:b8b95a63a8e8b5f0464bd8b1b0d59d2bec98a59b6aacc71e9be23df6989b3dfb \ - --hash=sha256:bb2e82e515e268b965424ecabebd91834a41b36260b6ef5db015ee12ddb28ef3 \ - --hash=sha256:c0315978b2a4569e03fb59100f6a7e7d23f718a4521491f5c13d946d37549f3d \ - --hash=sha256:c1828e10c3a49e2b234b87600ecb68a92b8a8dcf8b99bca9447f16c4baaa1630 \ - --hash=sha256:c1c49bc393d854d4421ebc174a0a41f9261f50d3694d8ca277146cbbcfd24ee7 \ - --hash=sha256:c415b9601ff50709d6050c8a9281733a9b042b9e589265ac40305b875cf9c463 \ - --hash=sha256:c54c635d1f52490cde7ef3a423645167a8284e452a35405d5c7dc1242a8e75c9 \ - --hash=sha256:c5e6a1f8b0268ffa1c84d7c3558724956002ba8361176e76406233e704bbcffb \ - --hash=sha256:c98a596ac20e8980cc6f34c0c92a113e98eb08f3997c150064d26d2aeb043e5a \ - --hash=sha256:cd0834e4260eab78671b81d34f110fbaac449563e48d419cec0030d9a8e58693 \ - --hash=sha256:cdad66685fcf2ad14ce522cf849d4a025f4fd206d6cfc3f403d9873e4c243b03 \ - --hash=sha256:d1ea006426edf7e1299c52a58b0443158012f7a56fed3515164b60bfcb1503a9 \ - --hash=sha256:d33b4490026968bdc7f0729b9d87a3a6b1e09043557d2fc1c605c6072deb2f11 \ - --hash=sha256:d5cae4cd271e20b7ab757e966cc919186b9f02535418ab36c471a5377ef4deaa \ - --hash=sha256:dd505a1121ad5b666191840b7bd1d8cb917df2647deeca6f3474331b72452362 \ - --hash=sha256:e1668ef2f3a7ec9881f4b6a917e5f97c87a343fa6b0d5fc826b7b0297ddd0887 \ - --hash=sha256:e7bcfcede95531589295f56e924702cef7f9685c9e4e5407592e04ded6a65bf3 \ - --hash=sha256:ebf610c37df4f09c71c9bbf8309b4b459107e6fe889ac0d7e16f6e4ebd975f86 \ - --hash=sha256:f3bf5c132eb48002bcc3825702d241d35b4e9585009e65e9dcf9c4635d0b7424 \ - --hash=sha256:f40380c96dd407dfa84eb2d264e68aa47717b53bdbe210a59cc3c35a4635f195 \ - --hash=sha256:f57a0de48dda792629e7952d34a0c7b81ea336bb9b721391c7c58145b237fe55 \ - --hash=sha256:f6b925c7775ab857bdc1e52e1f5abcae7d18751c09b751aeb641a5276d9b990e \ - --hash=sha256:f8f0d79b923070f25674e4ea8f3d61c9d89d24d9598d50ff32c5b9b23c79a25b \ - --hash=sha256:feca9fafa4385aea6759c171cd25ea82f7375312fca04178dae35331be45e538 +aiohttp==3.11.2 \ + --hash=sha256:08ebe7a1d6c1e5ca766d68407280d69658f5f98821c2ba6c41c63cabfed159af \ + --hash=sha256:0a90a0dc4b054b5af299a900bf950fe8f9e3e54322bc405005f30aa5cacc5c98 \ + --hash=sha256:0cba0b8d25aa2d450762f3dd6df85498f5e7c3ad0ddeb516ef2b03510f0eea32 \ + --hash=sha256:0ebdf5087e2ce903d8220cc45dcece90c2199ae4395fd83ca616fcc81010db2c \ + --hash=sha256:10a5f91c319d9d4afba812f72984816b5fcd20742232ff7ecc1610ffbf3fc64d \ + --hash=sha256:122768e3ae9ce74f981b46edefea9c6e5a40aea38aba3ac50168e6370459bf20 \ + --hash=sha256:14eb6c628432720e41b4fab1ada879d56cfe7034159849e083eb536b4c2afa99 \ + --hash=sha256:177b000efaf8d2f7012c649e8aee5b0bf488677b1162be5e7511aa4f9d567607 \ + --hash=sha256:1c2496182e577042e0e07a328d91c949da9e77a2047c7291071e734cd7a6e780 \ + --hash=sha256:1e33a7eddcd07545ccf5c3ab230f60314a17dc33e285475e8405e26e21f02660 \ + --hash=sha256:2793d3297f3e49015140e6d3ea26142c967e07998e2fb00b6ee8d041138fbc4e \ + --hash=sha256:2914061f5ca573f990ec14191e6998752fa8fe50d518e3405410353c3f44aa5d \ + --hash=sha256:2adb967454e10e69478ba4a8d8afbba48a7c7a8619216b7c807f8481cc66ddfb \ + --hash=sha256:2b02a68b9445c70d7f5c8b578c5f5e5866b1d67ca23eb9e8bc8658ae9e3e2c74 \ + --hash=sha256:3129151378f858cdc4a0a4df355c9a0d060ab49e2eea7e62e9f085bac100551b \ + --hash=sha256:32334f35824811dd20a12cc90825d000e6b50faaeaa71408d42269151a66140d \ + --hash=sha256:33af11eca7bb0f5c6ffaf5e7d9d2336c2448f9c6279b93abdd6f3c35f9ee321f \ + --hash=sha256:34f37c59b12bc3afc52bab6fcd9cd3be82ff01c4598a84cbea934ccb3a9c54a0 \ + --hash=sha256:3666c750b73ce463a413692e3a57c60f7089e2d9116a2aa5a0f0eaf2ae325148 \ + --hash=sha256:374baefcb1b6275f350da605951f5f02487a9bc84a574a7d5b696439fabd49a3 \ + --hash=sha256:382f853516664d2ebfc75dc01da4a10fdef5edcb335fe7b45cf471ce758ecb18 \ + --hash=sha256:3b1f4844909321ef2c1cee50ddeccbd6018cd8c8d1ddddda3f553e94a5859497 \ + --hash=sha256:3f617a48b70f4843d54f52440ea1e58da6bdab07b391a3a6aed8d3b311a4cc04 \ + --hash=sha256:435f7a08d8aa42371a94e7c141205a9cb092ba551084b5e0c57492e6673601a3 \ + --hash=sha256:44b69c69c194ffacbc50165911cf023a4b1b06422d1e1199d3aea82eac17004e \ + --hash=sha256:486273d3b5af75a80c31c311988931bdd2a4b96a74d5c7f422bad948f99988ef \ + --hash=sha256:4a23475d8d5c56e447b7752a1e2ac267c1f723f765e406c81feddcd16cdc97bc \ + --hash=sha256:4c979fc92aba66730b66099cd5becb42d869a26c0011119bc1c2478408a8bf7a \ + --hash=sha256:4d7fad8c456d180a6d2f44c41cfab4b80e2e81451815825097db48b8293f59d5 \ + --hash=sha256:50e0aee4adc9abcd2109c618a8d1b2c93b85ac277b24a003ab147d91e068b06d \ + --hash=sha256:556564d89e2f4a6e8fe000894c03e4e84cf0b6cfa5674e425db122633ee244d1 \ + --hash=sha256:5587da333b7d280a312715b843d43e734652aa382cba824a84a67c81f75b338b \ + --hash=sha256:57993f406ce3f114b2a6756d7809be3ffd0cc40f33e8f8b9a4aa1b027fd4e3eb \ + --hash=sha256:5d6e069b882c1fdcbe5577dc4be372eda705180197140577a4cddb648c29d22e \ + --hash=sha256:5d878a0186023ac391861958035174d0486f3259cabf8fd94e591985468da3ea \ + --hash=sha256:5d90b5a3b0f32a5fecf5dd83d828713986c019585f5cddf40d288ff77f366615 \ + --hash=sha256:5e9a766c346b2ed7e88937919d84ed64b4ef489dad1d8939f806ee52901dc142 \ + --hash=sha256:64e8f5178958a9954043bc8cd10a5ae97352c3f2fc99aa01f2aebb0026010910 \ + --hash=sha256:66e58a2e8c7609a3545c4b38fb8b01a6b8346c4862e529534f7674c5265a97b8 \ + --hash=sha256:68d1f46f9387db3785508f5225d3acbc5825ca13d9c29f2b5cce203d5863eb79 \ + --hash=sha256:6ad9a7d2a3a0f235184426425f80bd3b26c66b24fd5fddecde66be30c01ebe6e \ + --hash=sha256:6e8e19a80ba194db5c06915a9df23c0c06e0e9ca9a4db9386a6056cca555a027 \ + --hash=sha256:73a664478ae1ea011b5a710fb100b115ca8b2146864fa0ce4143ff944df714b8 \ + --hash=sha256:766d0ebf8703d28f854f945982aa09224d5a27a29594c70d921c43c3930fe7ac \ + --hash=sha256:783741f534c14957fbe657d62a34b947ec06db23d45a2fd4a8aeb73d9c84d7e6 \ + --hash=sha256:79efd1ee3827b2f16797e14b1e45021206c3271249b4d0025014466d416d7413 \ + --hash=sha256:83a70e22e0f6222effe7f29fdeba6c6023f9595e59a0479edacfbd7de4b77bb7 \ + --hash=sha256:85de9904bc360fd29a98885d2bfcbd4e02ab33c53353cb70607f2bea2cb92468 \ + --hash=sha256:8d954ba0eae7f33884d27dc00629ca4389d249eb8d26ca07c30911257cae8c96 \ + --hash=sha256:9075313f8e41b481e4cb10af405054564b0247dc335db5398ed05f8ec38787e2 \ + --hash=sha256:97fba98fc5d9ccd3d33909e898d00f2494d6a9eec7cbda3d030632e2c8bb4d00 \ + --hash=sha256:994cb893936dd2e1803655ae8667a45066bfd53360b148e22b4e3325cc5ea7a3 \ + --hash=sha256:9aa4e68f1e4f303971ec42976fb170204fb5092de199034b57199a1747e78a2d \ + --hash=sha256:9b6d15adc9768ff167614ca853f7eeb6ee5f1d55d5660e3af85ce6744fed2b82 \ + --hash=sha256:9bbb2dbc2701ab7e9307ca3a8fa4999c5b28246968e0a0202a5afabf48a42e22 \ + --hash=sha256:9c8d1db4f65bbc9d75b7b271d68fb996f1c8c81a525263862477d93611856c2d \ + --hash=sha256:a7b0a1618060e3f5aa73d3526ca2108a16a1b6bf86612cd0bb2ddcbef9879d06 \ + --hash=sha256:afa55e863224e664a782effa62245df73fdfc55aee539bed6efacf35f6d4e4b7 \ + --hash=sha256:b339d91ac9060bd6ecdc595a82dc151045e5d74f566e0864ef3f2ba0887fec42 \ + --hash=sha256:b470de64d17156c37e91effc109d3b032b39867000e2c126732fe01d034441f9 \ + --hash=sha256:b4ec8afd362356b8798c8caa806e91deb3f0602d8ffae8e91d2d3ced2a90c35e \ + --hash=sha256:c28c1677ea33ccb8b14330560094cc44d3ff4fad617a544fd18beb90403fe0f1 \ + --hash=sha256:c681f34e2814bc6e1eef49752b338061b94a42c92734d0be9513447d3f83718c \ + --hash=sha256:cccb2937bece1310c5c0163d0406aba170a2e5fb1f0444d7b0e7fdc9bd6bb713 \ + --hash=sha256:cdc6f8dce09281ae534eaf08a54f0d38612398375f28dad733a8885f3bf9b978 \ + --hash=sha256:d23854e5867650d40cba54d49956aad8081452aa80b2cf0d8c310633f4f48510 \ + --hash=sha256:d2d942421cf3a1d1eceae8fa192f1fbfb74eb9d3e207d35ad2696bd2ce2c987c \ + --hash=sha256:d2f991c18132f3e505c108147925372ffe4549173b7c258cf227df1c5977a635 \ + --hash=sha256:d3a2bcf6c81639a165da93469e1e0aff67c956721f3fa9c0560f07dd1e505116 \ + --hash=sha256:d84930b4145991214602372edd7305fc76b700220db79ac0dd57d3afd0f0a1ca \ + --hash=sha256:de3b4d5fb5d69749104b880a157f38baeea7765c93d9cd3837cedd5b84729e10 \ + --hash=sha256:e57a10aacedcf24666f4c90d03e599f71d172d1c5e00dcf48205c445806745b0 \ + --hash=sha256:f1d06c8fd8b453c3e553c956bd3b8395100401060430572174bb7876dd95ad49 \ + --hash=sha256:f833a80d9de9307d736b6af58c235b17ef7f90ebea7b9c49cd274dec7a66a2f1 \ + --hash=sha256:fb0544a0e8294a5a5e20d3cacdaaa9a911d7c0a9150f5264aef36e7d8fdfa07e \ + --hash=sha256:ff5d22eece44528023254b595c670dfcf9733ac6af74c4b6cb4f6a784dc3870c # via -r requirements.in aiosignal==1.3.1 \ --hash=sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc \ @@ -336,62 +336,62 @@ grpc-google-iam-v1==0.13.1 \ --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e # via -r requirements.in -grpcio==1.67.1 \ - --hash=sha256:01f616a964e540638af5130469451cf580ba8c7329f45ca998ab66e0c7dcdb04 \ - --hash=sha256:0489063974d1452436139501bf6b180f63d4977223ee87488fe36858c5725292 \ - --hash=sha256:0e6f255980afef598a9e64a24efce87b625e3e3c80a45162d111a461a9f92955 \ - --hash=sha256:0f3e49c738396e93b7ba9016e153eb09e0778e776df6090c1b8c91877cc1c426 \ - --hash=sha256:178f5db771c4f9a9facb2ab37a434c46cb9be1a75e820f187ee3d1e7805c4f65 \ - --hash=sha256:1a65b503d008f066e994f34f456e0647e5ceb34cfcec5ad180b1b44020ad4970 \ - --hash=sha256:1d7616d2ded471231c701489190379e0c311ee0a6c756f3c03e6a62b95a7146e \ - --hash=sha256:24e8a26dbfc5274d7474c27759b54486b8de23c709d76695237515bc8b5baeab \ - --hash=sha256:267d1745894200e4c604958da5f856da6293f063327cb049a51fe67348e4f953 \ - --hash=sha256:299b3d8c4f790c6bcca485f9963b4846dd92cf6f1b65d3697145d005c80f9fe8 \ - --hash=sha256:3b6c16489326d79ead41689c4b84bc40d522c9a7617219f4ad94bc7f448c5085 \ - --hash=sha256:3dc2ed4cabea4dc14d5e708c2b426205956077cc5de419b4d4079315017e9732 \ - --hash=sha256:43112046864317498a33bdc4797ae6a268c36345a910de9b9c17159d8346602f \ - --hash=sha256:4422581cdc628f77302270ff839a44f4c24fdc57887dc2a45b7e53d8fc2376af \ - --hash=sha256:4e7b904484a634a0fff132958dabdb10d63e0927398273917da3ee103e8d1f78 \ - --hash=sha256:5721e66a594a6c4204458004852719b38f3d5522082be9061d6510b455c90afc \ - --hash=sha256:5db70d32d6703b89912af16d6d45d78406374a8b8ef0d28140351dd0ec610e98 \ - --hash=sha256:5ed601c4c6008429e3d247ddb367fe8c7259c355757448d7c1ef7bd4a6739e8e \ - --hash=sha256:60336bff760fbb47d7e86165408126f1dded184448e9a4c892189eb7c9d3f90f \ - --hash=sha256:608d87d1bdabf9e2868b12338cd38a79969eaf920c89d698ead08f48de9c0f9e \ - --hash=sha256:60e6a4dcf5af7bbc36fd9f81c9f372e8ae580870a9e4b6eafe948cd334b81cf3 \ - --hash=sha256:638354e698fd0c6c76b04540a850bf1db27b4d2515a19fcd5cf645c48d3eb1ed \ - --hash=sha256:699e964923b70f3101393710793289e42845791ea07565654ada0969522d0a38 \ - --hash=sha256:7818c0454027ae3384235a65210bbf5464bd715450e30a3d40385453a85a70cb \ - --hash=sha256:786a5b18544622bfb1e25cc08402bd44ea83edfb04b93798d85dca4d1a0b5be5 \ - --hash=sha256:804c6457c3cd3ec04fe6006c739579b8d35c86ae3298ffca8de57b493524b771 \ - --hash=sha256:80b866f73224b0634f4312a4674c1be21b2b4afa73cb20953cbbb73a6b36c3cc \ - --hash=sha256:85f69fdc1d28ce7cff8de3f9c67db2b0ca9ba4449644488c1e0303c146135ddb \ - --hash=sha256:85f862069b86a305497e74d0dc43c02de3d1d184fc2c180993aa8aa86fbd19b8 \ - --hash=sha256:8a00efecde9d6fcc3ab00c13f816313c040a28450e5e25739c24f432fc6d3c75 \ - --hash=sha256:8a23cbcc5bb11ea7dc6163078be36c065db68d915c24f5faa4f872c573bb400f \ - --hash=sha256:8b0341d66a57f8a3119b77ab32207072be60c9bf79760fa609c5609f2deb1f3f \ - --hash=sha256:917e8d8994eed1d86b907ba2a61b9f0aef27a2155bca6cbb322430fc7135b7bb \ - --hash=sha256:95b5f2b857856ed78d72da93cd7d09b6db8ef30102e5e7fe0961fe4d9f7d48e8 \ - --hash=sha256:9e838cad2176ebd5d4a8bb03955138d6589ce9e2ce5d51c3ada34396dbd2dba8 \ - --hash=sha256:9fd042de4a82e3e7aca44008ee2fb5da01b3e5adb316348c21980f7f58adc311 \ - --hash=sha256:a25bdea92b13ff4d7790962190bf6bf5c4639876e01c0f3dda70fc2769616335 \ - --hash=sha256:a6703916c43b1d468d0756c8077b12017a9fcb6a1ef13faf49e67d20d7ebda62 \ - --hash=sha256:a93deda571a1bf94ec1f6fcda2872dad3ae538700d94dc283c672a3b508ba3af \ - --hash=sha256:aa0162e56fd10a5547fac8774c4899fc3e18c1aa4a4759d0ce2cd00d3696ea6b \ - --hash=sha256:b49359977c6ec9f5d0573ea4e0071ad278ef905aa74e420acc73fd28ce39e9ce \ - --hash=sha256:beee96c8c0b1a75d556fe57b92b58b4347c77a65781ee2ac749d550f2a365dc1 \ - --hash=sha256:c7a01337407dd89005527623a4a72c5c8e2894d22bead0895306b23c6695698f \ - --hash=sha256:c9b929f13677b10f63124c1a410994a401cdd85214ad83ab67cc077fc7e480f0 \ - --hash=sha256:cdc491ae35a13535fd9196acb5afe1af37c8237df2e54427be3eecda3653127e \ - --hash=sha256:e279330bef1744040db8fc432becc8a727b84f456ab62b744d3fdb83f327e121 \ - --hash=sha256:e29ca27bec8e163dca0c98084040edec3bc49afd10f18b412f483cc68c712744 \ - --hash=sha256:e7d1797a8a3845437d327145959a2c0c47c05947c9eef5ff1a4c80e499dcc6fa \ - --hash=sha256:ea33986b70f83844cd00814cee4451055cd8cab36f00ac64a31f5bb09b31919e \ - --hash=sha256:ec74ef02010186185de82cc594058a3ccd8d86821842bbac9873fd4a2cf8be8d \ - --hash=sha256:f26b0b547eb8d00e195274cdfc63ce64c8fc2d3e2d00b12bf468ece41a0423a0 \ - --hash=sha256:f5a27dddefe0e2357d3e617b9079b4bfdc91341a91565111a21ed6ebbc51b22d \ - --hash=sha256:f5b76ff64aaac53fede0cc93abf57894ab2a7362986ba22243d06218b93efe46 \ - --hash=sha256:f9fff78ba10d4250bfc07a01bd6254a6d87dc67f9627adece85c0b2ed754fa96 \ - --hash=sha256:fa0c739ad8b1996bd24823950e3cb5152ae91fca1c09cc791190bf1627ffefba +grpcio==1.68.0 \ + --hash=sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354 \ + --hash=sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21 \ + --hash=sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116 \ + --hash=sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a \ + --hash=sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829 \ + --hash=sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1 \ + --hash=sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363 \ + --hash=sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a \ + --hash=sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9 \ + --hash=sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b \ + --hash=sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03 \ + --hash=sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415 \ + --hash=sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7 \ + --hash=sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121 \ + --hash=sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f \ + --hash=sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd \ + --hash=sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d \ + --hash=sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4 \ + --hash=sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10 \ + --hash=sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5 \ + --hash=sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332 \ + --hash=sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544 \ + --hash=sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75 \ + --hash=sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665 \ + --hash=sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110 \ + --hash=sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd \ + --hash=sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a \ + --hash=sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618 \ + --hash=sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d \ + --hash=sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30 \ + --hash=sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1 \ + --hash=sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1 \ + --hash=sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d \ + --hash=sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796 \ + --hash=sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3 \ + --hash=sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b \ + --hash=sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659 \ + --hash=sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a \ + --hash=sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05 \ + --hash=sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a \ + --hash=sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c \ + --hash=sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161 \ + --hash=sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb \ + --hash=sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78 \ + --hash=sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27 \ + --hash=sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe \ + --hash=sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b \ + --hash=sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc \ + --hash=sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155 \ + --hash=sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490 \ + --hash=sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d \ + --hash=sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2 \ + --hash=sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3 \ + --hash=sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e \ + --hash=sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -840,87 +840,87 @@ urllib3==2.2.3 \ --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via requests -yarl==1.17.1 \ - --hash=sha256:06157fb3c58f2736a5e47c8fcbe1afc8b5de6fb28b14d25574af9e62150fcaac \ - --hash=sha256:067a63fcfda82da6b198fa73079b1ca40b7c9b7994995b6ee38acda728b64d47 \ - --hash=sha256:0b1794853124e2f663f0ea54efb0340b457f08d40a1cef78edfa086576179c91 \ - --hash=sha256:0bdff5e0995522706c53078f531fb586f56de9c4c81c243865dd5c66c132c3b5 \ - --hash=sha256:117ed8b3732528a1e41af3aa6d4e08483c2f0f2e3d3d7dca7cf538b3516d93df \ - --hash=sha256:14bc88baa44e1f84164a392827b5defb4fa8e56b93fecac3d15315e7c8e5d8b3 \ - --hash=sha256:1654ec814b18be1af2c857aa9000de7a601400bd4c9ca24629b18486c2e35463 \ - --hash=sha256:16bca6678a83657dd48df84b51bd56a6c6bd401853aef6d09dc2506a78484c7b \ - --hash=sha256:1a3b91c44efa29e6c8ef8a9a2b583347998e2ba52c5d8280dbd5919c02dfc3b5 \ - --hash=sha256:1a52a1ffdd824fb1835272e125385c32fd8b17fbdefeedcb4d543cc23b332d74 \ - --hash=sha256:1ce36ded585f45b1e9bb36d0ae94765c6608b43bd2e7f5f88079f7a85c61a4d3 \ - --hash=sha256:299f11b44d8d3a588234adbe01112126010bd96d9139c3ba7b3badd9829261c3 \ - --hash=sha256:2b24ec55fad43e476905eceaf14f41f6478780b870eda5d08b4d6de9a60b65b4 \ - --hash=sha256:2d374d70fdc36f5863b84e54775452f68639bc862918602d028f89310a034ab0 \ - --hash=sha256:2d9f0606baaec5dd54cb99667fcf85183a7477f3766fbddbe3f385e7fc253299 \ - --hash=sha256:2e7ba4c9377e48fb7b20dedbd473cbcbc13e72e1826917c185157a137dac9df2 \ - --hash=sha256:2f0a6423295a0d282d00e8701fe763eeefba8037e984ad5de44aa349002562ac \ - --hash=sha256:327828786da2006085a4d1feb2594de6f6d26f8af48b81eb1ae950c788d97f61 \ - --hash=sha256:380e6c38ef692b8fd5a0f6d1fa8774d81ebc08cfbd624b1bca62a4d4af2f9931 \ - --hash=sha256:3b74ff4767d3ef47ffe0cd1d89379dc4d828d4873e5528976ced3b44fe5b0a21 \ - --hash=sha256:3e844be8d536afa129366d9af76ed7cb8dfefec99f5f1c9e4f8ae542279a6dc3 \ - --hash=sha256:459e81c2fb920b5f5df744262d1498ec2c8081acdcfe18181da44c50f51312f7 \ - --hash=sha256:46ddf6e0b975cd680eb83318aa1d321cb2bf8d288d50f1754526230fcf59ba96 \ - --hash=sha256:482c122b72e3c5ec98f11457aeb436ae4aecca75de19b3d1de7cf88bc40db82f \ - --hash=sha256:561c87fea99545ef7d692403c110b2f99dced6dff93056d6e04384ad3bc46243 \ - --hash=sha256:578d00c9b7fccfa1745a44f4eddfdc99d723d157dad26764538fbdda37209857 \ - --hash=sha256:58c8e9620eb82a189c6c40cb6b59b4e35b2ee68b1f2afa6597732a2b467d7e8f \ - --hash=sha256:5b29beab10211a746f9846baa39275e80034e065460d99eb51e45c9a9495bcca \ - --hash=sha256:5d1d42556b063d579cae59e37a38c61f4402b47d70c29f0ef15cee1acaa64488 \ - --hash=sha256:5f236cb5999ccd23a0ab1bd219cfe0ee3e1c1b65aaf6dd3320e972f7ec3a39da \ - --hash=sha256:62a91aefff3d11bf60e5956d340eb507a983a7ec802b19072bb989ce120cd948 \ - --hash=sha256:64cc6e97f14cf8a275d79c5002281f3040c12e2e4220623b5759ea7f9868d6a5 \ - --hash=sha256:6f4c9156c4d1eb490fe374fb294deeb7bc7eaccda50e23775b2354b6a6739934 \ - --hash=sha256:7294e38f9aa2e9f05f765b28ffdc5d81378508ce6dadbe93f6d464a8c9594473 \ - --hash=sha256:7615058aabad54416ddac99ade09a5510cf77039a3b903e94e8922f25ed203d7 \ - --hash=sha256:7e48cdb8226644e2fbd0bdb0a0f87906a3db07087f4de77a1b1b1ccfd9e93685 \ - --hash=sha256:7f63d176a81555984e91f2c84c2a574a61cab7111cc907e176f0f01538e9ff6e \ - --hash=sha256:7f6595c852ca544aaeeb32d357e62c9c780eac69dcd34e40cae7b55bc4fb1147 \ - --hash=sha256:7fac95714b09da9278a0b52e492466f773cfe37651cf467a83a1b659be24bf71 \ - --hash=sha256:81713b70bea5c1386dc2f32a8f0dab4148a2928c7495c808c541ee0aae614d67 \ - --hash=sha256:846dd2e1243407133d3195d2d7e4ceefcaa5f5bf7278f0a9bda00967e6326b04 \ - --hash=sha256:84c063af19ef5130084db70ada40ce63a84f6c1ef4d3dbc34e5e8c4febb20822 \ - --hash=sha256:881764d610e3269964fc4bb3c19bb6fce55422828e152b885609ec176b41cf11 \ - --hash=sha256:8994b29c462de9a8fce2d591028b986dbbe1b32f3ad600b2d3e1c482c93abad6 \ - --hash=sha256:8c79e9d7e3d8a32d4824250a9c6401194fb4c2ad9a0cec8f6a96e09a582c2cc0 \ - --hash=sha256:8ee427208c675f1b6e344a1f89376a9613fc30b52646a04ac0c1f6587c7e46ec \ - --hash=sha256:949681f68e0e3c25377462be4b658500e85ca24323d9619fdc41f68d46a1ffda \ - --hash=sha256:9e275792097c9f7e80741c36de3b61917aebecc08a67ae62899b074566ff8556 \ - --hash=sha256:9fb815155aac6bfa8d86184079652c9715c812d506b22cfa369196ef4e99d1b4 \ - --hash=sha256:a2a64e62c7a0edd07c1c917b0586655f3362d2c2d37d474db1a509efb96fea1c \ - --hash=sha256:a7ac5b4984c468ce4f4a553df281450df0a34aefae02e58d77a0847be8d1e11f \ - --hash=sha256:aa46dce75078fceaf7cecac5817422febb4355fbdda440db55206e3bd288cfb8 \ - --hash=sha256:ae3476e934b9d714aa8000d2e4c01eb2590eee10b9d8cd03e7983ad65dfbfcba \ - --hash=sha256:b0341e6d9a0c0e3cdc65857ef518bb05b410dbd70d749a0d33ac0f39e81a4258 \ - --hash=sha256:b40d1bf6e6f74f7c0a567a9e5e778bbd4699d1d3d2c0fe46f4b717eef9e96b95 \ - --hash=sha256:b5c4804e4039f487e942c13381e6c27b4b4e66066d94ef1fae3f6ba8b953f383 \ - --hash=sha256:b5d6a6c9602fd4598fa07e0389e19fe199ae96449008d8304bf5d47cb745462e \ - --hash=sha256:b5f1ac7359e17efe0b6e5fec21de34145caef22b260e978336f325d5c84e6938 \ - --hash=sha256:c0167540094838ee9093ef6cc2c69d0074bbf84a432b4995835e8e5a0d984374 \ - --hash=sha256:c180ac742a083e109c1a18151f4dd8675f32679985a1c750d2ff806796165b55 \ - --hash=sha256:c73df5b6e8fabe2ddb74876fb82d9dd44cbace0ca12e8861ce9155ad3c886139 \ - --hash=sha256:c7e177c619342e407415d4f35dec63d2d134d951e24b5166afcdfd1362828e17 \ - --hash=sha256:cbad927ea8ed814622305d842c93412cb47bd39a496ed0f96bfd42b922b4a217 \ - --hash=sha256:cc353841428d56b683a123a813e6a686e07026d6b1c5757970a877195f880c2d \ - --hash=sha256:cc7c92c1baa629cb03ecb0c3d12564f172218fb1739f54bf5f3881844daadc6d \ - --hash=sha256:cc7d768260f4ba4ea01741c1b5fe3d3a6c70eb91c87f4c8761bbcce5181beafe \ - --hash=sha256:d0eea830b591dbc68e030c86a9569826145df485b2b4554874b07fea1275a199 \ - --hash=sha256:d216e5d9b8749563c7f2c6f7a0831057ec844c68b4c11cb10fc62d4fd373c26d \ - --hash=sha256:d401f07261dc5aa36c2e4efc308548f6ae943bfff20fcadb0a07517a26b196d8 \ - --hash=sha256:d6324274b4e0e2fa1b3eccb25997b1c9ed134ff61d296448ab8269f5ac068c4c \ - --hash=sha256:d8a8b74d843c2638f3864a17d97a4acda58e40d3e44b6303b8cc3d3c44ae2d29 \ - --hash=sha256:d9b6b28a57feb51605d6ae5e61a9044a31742db557a3b851a74c13bc61de5172 \ - --hash=sha256:de599af166970d6a61accde358ec9ded821234cbbc8c6413acfec06056b8e860 \ - --hash=sha256:e594b22688d5747b06e957f1ef822060cb5cb35b493066e33ceac0cf882188b7 \ - --hash=sha256:e5b078134f48552c4d9527db2f7da0b5359abd49393cdf9794017baec7506170 \ - --hash=sha256:eb6dce402734575e1a8cc0bb1509afca508a400a57ce13d306ea2c663bad1138 \ - --hash=sha256:f1790a4b1e8e8e028c391175433b9c8122c39b46e1663228158e61e6f915bf06 \ - --hash=sha256:f5efe0661b9fcd6246f27957f6ae1c0eb29bc60552820f01e970b4996e016004 \ - --hash=sha256:f9cbfbc5faca235fbdf531b93aa0f9f005ec7d267d9d738761a4d42b744ea159 \ - --hash=sha256:fbea1751729afe607d84acfd01efd95e3b31db148a181a441984ce9b3d3469da \ - --hash=sha256:fca4b4307ebe9c3ec77a084da3a9d1999d164693d16492ca2b64594340999988 \ - --hash=sha256:ff5c6771c7e3511a06555afa317879b7db8d640137ba55d6ab0d0c50425cab75 +yarl==1.17.2 \ + --hash=sha256:0c8e589379ef0407b10bed16cc26e7392ef8f86961a706ade0a22309a45414d7 \ + --hash=sha256:0d41c684f286ce41fa05ab6af70f32d6da1b6f0457459a56cf9e393c1c0b2217 \ + --hash=sha256:1056cadd5e850a1c026f28e0704ab0a94daaa8f887ece8dfed30f88befb87bb0 \ + --hash=sha256:11d86c6145ac5c706c53d484784cf504d7d10fa407cb73b9d20f09ff986059ef \ + --hash=sha256:170ed4971bf9058582b01a8338605f4d8c849bd88834061e60e83b52d0c76870 \ + --hash=sha256:17791acaa0c0f89323c57da7b9a79f2174e26d5debbc8c02d84ebd80c2b7bff8 \ + --hash=sha256:17931dfbb84ae18b287279c1f92b76a3abcd9a49cd69b92e946035cff06bcd20 \ + --hash=sha256:18662443c6c3707e2fc7fad184b4dc32dd428710bbe72e1bce7fe1988d4aa654 \ + --hash=sha256:187df91395c11e9f9dc69b38d12406df85aa5865f1766a47907b1cc9855b6303 \ + --hash=sha256:1fee66b32e79264f428dc8da18396ad59cc48eef3c9c13844adec890cd339db5 \ + --hash=sha256:2270d590997445a0dc29afa92e5534bfea76ba3aea026289e811bf9ed4b65a7f \ + --hash=sha256:2654caaf5584449d49c94a6b382b3cb4a246c090e72453493ea168b931206a4d \ + --hash=sha256:26bfb6226e0c157af5da16d2d62258f1ac578d2899130a50433ffee4a5dfa673 \ + --hash=sha256:2941756754a10e799e5b87e2319bbec481ed0957421fba0e7b9fb1c11e40509f \ + --hash=sha256:3294f787a437cb5d81846de3a6697f0c35ecff37a932d73b1fe62490bef69211 \ + --hash=sha256:358dc7ddf25e79e1cc8ee16d970c23faee84d532b873519c5036dbb858965795 \ + --hash=sha256:38bc4ed5cae853409cb193c87c86cd0bc8d3a70fd2268a9807217b9176093ac6 \ + --hash=sha256:3a0baff7827a632204060f48dca9e63fbd6a5a0b8790c1a2adfb25dc2c9c0d50 \ + --hash=sha256:3a3ede8c248f36b60227eb777eac1dbc2f1022dc4d741b177c4379ca8e75571a \ + --hash=sha256:3a58a2f2ca7aaf22b265388d40232f453f67a6def7355a840b98c2d547bd037f \ + --hash=sha256:4434b739a8a101a837caeaa0137e0e38cb4ea561f39cb8960f3b1e7f4967a3fc \ + --hash=sha256:460024cacfc3246cc4d9f47a7fc860e4fcea7d1dc651e1256510d8c3c9c7cde0 \ + --hash=sha256:46c465ad06971abcf46dd532f77560181387b4eea59084434bdff97524444032 \ + --hash=sha256:48e424347a45568413deec6f6ee2d720de2cc0385019bedf44cd93e8638aa0ed \ + --hash=sha256:4a8c83f6fcdc327783bdc737e8e45b2e909b7bd108c4da1892d3bc59c04a6d84 \ + --hash=sha256:4c840cc11163d3c01a9d8aad227683c48cd3e5be5a785921bcc2a8b4b758c4f3 \ + --hash=sha256:4d486ddcaca8c68455aa01cf53d28d413fb41a35afc9f6594a730c9779545876 \ + --hash=sha256:4e76381be3d8ff96a4e6c77815653063e87555981329cf8f85e5be5abf449021 \ + --hash=sha256:50d866f7b1a3f16f98603e095f24c0eeba25eb508c85a2c5939c8b3870ba2df8 \ + --hash=sha256:52492b87d5877ec405542f43cd3da80bdcb2d0c2fbc73236526e5f2c28e6db28 \ + --hash=sha256:56afb44a12b0864d17b597210d63a5b88915d680f6484d8d202ed68ade38673d \ + --hash=sha256:585ce7cd97be8f538345de47b279b879e091c8b86d9dbc6d98a96a7ad78876a3 \ + --hash=sha256:5870d620b23b956f72bafed6a0ba9a62edb5f2ef78a8849b7615bd9433384171 \ + --hash=sha256:5c6ea72fe619fee5e6b5d4040a451d45d8175f560b11b3d3e044cd24b2720526 \ + --hash=sha256:688058e89f512fb7541cb85c2f149c292d3fa22f981d5a5453b40c5da49eb9e8 \ + --hash=sha256:6a3f47930fbbed0f6377639503848134c4aa25426b08778d641491131351c2c8 \ + --hash=sha256:6b981316fcd940f085f646b822c2ff2b8b813cbd61281acad229ea3cbaabeb6b \ + --hash=sha256:734144cd2bd633a1516948e477ff6c835041c0536cef1d5b9a823ae29899665b \ + --hash=sha256:736bb076f7299c5c55dfef3eb9e96071a795cb08052822c2bb349b06f4cb2e0a \ + --hash=sha256:752485cbbb50c1e20908450ff4f94217acba9358ebdce0d8106510859d6eb19a \ + --hash=sha256:753eaaa0c7195244c84b5cc159dc8204b7fd99f716f11198f999f2332a86b178 \ + --hash=sha256:75ac158560dec3ed72f6d604c81090ec44529cfb8169b05ae6fcb3e986b325d9 \ + --hash=sha256:76499469dcc24759399accd85ec27f237d52dec300daaca46a5352fcbebb1071 \ + --hash=sha256:782ca9c58f5c491c7afa55518542b2b005caedaf4685ec814fadfcee51f02493 \ + --hash=sha256:792155279dc093839e43f85ff7b9b6493a8eaa0af1f94f1f9c6e8f4de8c63500 \ + --hash=sha256:7a1606ba68e311576bcb1672b2a1543417e7e0aa4c85e9e718ba6466952476c0 \ + --hash=sha256:8281db240a1616af2f9c5f71d355057e73a1409c4648c8949901396dc0a3c151 \ + --hash=sha256:871e1b47eec7b6df76b23c642a81db5dd6536cbef26b7e80e7c56c2fd371382e \ + --hash=sha256:8b9c4643e7d843a0dca9cd9d610a0876e90a1b2cbc4c5ba7930a0d90baf6903f \ + --hash=sha256:8c6d5fed96f0646bfdf698b0a1cebf32b8aae6892d1bec0c5d2d6e2df44e1e2d \ + --hash=sha256:8e1bf59e035534ba4077f5361d8d5d9194149f9ed4f823d1ee29ef3e8964ace3 \ + --hash=sha256:8fd51299e21da709eabcd5b2dd60e39090804431292daacbee8d3dabe39a6bc0 \ + --hash=sha256:91c012dceadc695ccf69301bfdccd1fc4472ad714fe2dd3c5ab4d2046afddf29 \ + --hash=sha256:93771146ef048b34201bfa382c2bf74c524980870bb278e6df515efaf93699ff \ + --hash=sha256:93d1c8cc5bf5df401015c5e2a3ce75a5254a9839e5039c881365d2a9dcfc6dc2 \ + --hash=sha256:9611b83810a74a46be88847e0ea616794c406dbcb4e25405e52bff8f4bee2d0a \ + --hash=sha256:9bc27dd5cfdbe3dc7f381b05e6260ca6da41931a6e582267d5ca540270afeeb2 \ + --hash=sha256:ac8eda86cc75859093e9ce390d423aba968f50cf0e481e6c7d7d63f90bae5c9c \ + --hash=sha256:bc3003710e335e3f842ae3fd78efa55f11a863a89a72e9a07da214db3bf7e1f8 \ + --hash=sha256:bc61b005f6521fcc00ca0d1243559a5850b9dd1e1fe07b891410ee8fe192d0c0 \ + --hash=sha256:be4c7b1c49d9917c6e95258d3d07f43cfba2c69a6929816e77daf322aaba6628 \ + --hash=sha256:c019abc2eca67dfa4d8fb72ba924871d764ec3c92b86d5b53b405ad3d6aa56b0 \ + --hash=sha256:c42774d1d1508ec48c3ed29e7b110e33f5e74a20957ea16197dbcce8be6b52ba \ + --hash=sha256:c556fbc6820b6e2cda1ca675c5fa5589cf188f8da6b33e9fc05b002e603e44fa \ + --hash=sha256:c6e659b9a24d145e271c2faf3fa6dd1fcb3e5d3f4e17273d9e0350b6ab0fe6e2 \ + --hash=sha256:c74f0b0472ac40b04e6d28532f55cac8090e34c3e81f118d12843e6df14d0909 \ + --hash=sha256:cd7e35818d2328b679a13268d9ea505c85cd773572ebb7a0da7ccbca77b6a52e \ + --hash=sha256:d17832ba39374134c10e82d137e372b5f7478c4cceeb19d02ae3e3d1daed8721 \ + --hash=sha256:d1fa68a3c921365c5745b4bd3af6221ae1f0ea1bf04b69e94eda60e57958907f \ + --hash=sha256:d63123bfd0dce5f91101e77c8a5427c3872501acece8c90df457b486bc1acd47 \ + --hash=sha256:da9d3061e61e5ae3f753654813bc1cd1c70e02fb72cf871bd6daf78443e9e2b1 \ + --hash=sha256:db5ac3871ed76340210fe028f535392f097fb31b875354bcb69162bba2632ef4 \ + --hash=sha256:dd7abf4f717e33b7487121faf23560b3a50924f80e4bef62b22dab441ded8f3b \ + --hash=sha256:dd90238d3a77a0e07d4d6ffdebc0c21a9787c5953a508a2231b5f191455f31e9 \ + --hash=sha256:ef6eee1a61638d29cd7c85f7fd3ac7b22b4c0fabc8fd00a712b727a3e73b0685 \ + --hash=sha256:f11fd61d72d93ac23718d393d2a64469af40be2116b24da0a4ca6922df26807e \ + --hash=sha256:f1e7fedb09c059efee2533119666ca7e1a2610072076926fa028c2ba5dfeb78c \ + --hash=sha256:f25b7e93f5414b9a983e1a6c1820142c13e1782cc9ed354c25e933aebe97fcf2 \ + --hash=sha256:f2f44a4247461965fed18b2573f3a9eb5e2c3cad225201ee858726cde610daca \ + --hash=sha256:f5ffc6b7ace5b22d9e73b2a4c7305740a339fbd55301d52735f73e21d9eb3130 \ + --hash=sha256:ff6af03cac0d1a4c3c19e5dcc4c05252411bf44ccaa2485e20d0a7c77892ab6e \ + --hash=sha256:ff8d95e06546c3a8c188f68040e9d0360feb67ba8498baf018918f669f7bc39b # via aiohttp From 800a9071e357f4768d238b4659f4dd7205fb41e4 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 19 Nov 2024 11:58:33 -0500 Subject: [PATCH 1217/1339] chore: add template for metadata (#2253) --- .../services/%service/_async_mixins.py.j2 | 50 ++++++++----------- .../%sub/services/%service/_client_macros.j2 | 5 +- .../%sub/services/%service/_mixins.py.j2 | 50 ++++++++----------- .../%sub/services/%service/_shared_macros.j2 | 22 +++++--- .../%sub/services/%service/async_client.py.j2 | 20 +++----- .../%sub/services/%service/client.py.j2 | 15 +++--- .../%sub/services/%service/pagers.py.j2 | 11 ++-- .../services/%service/transports/rest.py.j2 | 5 +- .../%service/transports/rest_asyncio.py.j2 | 5 +- 9 files changed, 80 insertions(+), 103 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 index 33ecb4d8e08c..60b8f0a7e961 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_async_mixins.py.j2 @@ -9,7 +9,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -20,8 +20,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -62,7 +61,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -73,8 +72,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -114,7 +112,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> None: r"""Deletes a long-running operation. @@ -130,8 +128,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: None """ @@ -166,7 +163,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -181,8 +178,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: None """ @@ -217,7 +213,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. @@ -234,8 +230,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -280,7 +275,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -293,8 +288,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -398,7 +392,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -412,8 +406,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -517,7 +510,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -532,8 +525,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -578,7 +570,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> locations_pb2.Location: r"""Gets information about a location. @@ -589,8 +581,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.location_pb2.Location: Location object. @@ -630,7 +621,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -641,8 +632,7 @@ retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 7eeda037b79f..97fa01773cd4 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -30,7 +30,7 @@ {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, {% if method.extended_lro and not full_extended_lro %}{# This is a hack to preserve backwards compatibility with the "unary" surfaces #} ) -> {{ method.extended_lro.operation_type.ident }}: {% elif not method.server_streaming %} @@ -68,8 +68,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} {% if not method.void %} Returns: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index 8a57c5944cb7..70c686f60a00 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -7,7 +7,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -18,8 +18,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -60,7 +59,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -71,8 +70,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -112,7 +110,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> None: r"""Deletes a long-running operation. @@ -128,8 +126,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: None """ @@ -164,7 +161,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -179,8 +176,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: None """ @@ -215,7 +211,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. @@ -232,8 +228,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -278,7 +273,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -291,8 +286,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -396,7 +390,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -410,8 +404,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -515,7 +508,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -530,8 +523,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -576,7 +568,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> locations_pb2.Location: r"""Gets information about a location. @@ -587,8 +579,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.location_pb2.Location: Location object. @@ -628,7 +619,7 @@ *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -639,8 +630,7 @@ retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index cc795cc91aa3..8a11623e785d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -339,7 +339,7 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: """ {% for method in service.methods.values()|sort(attribute="name") if not method.client_streaming and method.http_options %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2147): Remove the condition below once async rest transport supports the guarded methods. #} - {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, metadata: Sequence[Tuple[str, str]]) -> Tuple[{{method.input.ident}}, Sequence[Tuple[str, str]]]: + {{ async_prefix }}def pre_{{ method.name|snake_case }}(self, request: {{method.input.ident}}, {{ client_method_metadata_argument() }}) -> Tuple[{{method.input.ident}}, {{ client_method_metadata_type() }}]: """Pre-rpc interceptor for {{ method.name|snake_case }} Override in a subclass to manipulate the request or metadata @@ -366,8 +366,8 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: {% for name, signature in api.mixin_api_signatures.items() %} {{ async_prefix }}def pre_{{ name|snake_case }}( - self, request: {{signature.request_type}}, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[{{signature.request_type}}, Sequence[Tuple[str, str]]]: + self, request: {{signature.request_type}}, {{ client_method_metadata_argument() }} + ) -> Tuple[{{signature.request_type}}, {{ client_method_metadata_type() }}]: """Pre-rpc interceptor for {{ name|snake_case }} Override in a subclass to manipulate the request or metadata @@ -414,7 +414,7 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m request: {{ sig.request_type }}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + {{ client_method_metadata_argument()|indent(8) }}={{ client_method_metadata_default_value() }}, ) -> {{ sig.response_type }}: r"""Call the {{- ' ' -}} @@ -427,8 +427,7 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m retry (google.api_core.retry{{ async_suffix }}.{{ async_method_name_prefix }}Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ client_method_metadata_argument_doc()|indent(4) }} {% if sig.response_type != 'None' %} Returns: @@ -451,4 +450,13 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m return resp {% endif %} -{% endmacro %} \ No newline at end of file +{% endmacro %} + +{% macro client_method_metadata_argument() %}metadata: {{ client_method_metadata_type() }}{% endmacro %} + +{% macro client_method_metadata_type() %}Sequence[Tuple[str, str]]{% endmacro %} + +{% macro client_method_metadata_default_value() %}(){% endmacro %} + +{% macro client_method_metadata_argument_doc() %}metadata ({{ client_method_metadata_type() }}): Strings which should be + sent along with the request as metadata.{% endmacro %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 1abddd198eb9..9622866d87e1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -259,7 +259,7 @@ class {{ service.async_client_name }}: {% endif %} retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, {% if not method.server_streaming %} ) -> {{ method.client_output_async.ident }}: {% else %} @@ -295,8 +295,7 @@ class {{ service.async_client_name }}: retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} {% if not method.void %} Returns: @@ -423,7 +422,7 @@ class {{ service.async_client_name }}: *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -436,8 +435,7 @@ class {{ service.async_client_name }}: retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -536,7 +534,7 @@ class {{ service.async_client_name }}: *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -550,8 +548,7 @@ class {{ service.async_client_name }}: retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -651,7 +648,7 @@ class {{ service.async_client_name }}: *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control policy for a function. @@ -666,8 +663,7 @@ class {{ service.async_client_name }}: retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~iam_policy_pb2.PolicyTestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 4bb43f83673c..4d78dc0c754a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -631,7 +631,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -644,8 +644,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -751,7 +750,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -765,8 +764,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -872,7 +870,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + {{ shared_macros.client_method_metadata_argument()|indent(8) }} = {{ shared_macros.client_method_metadata_default_value() }}, ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -887,8 +885,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 index 796cefe48cc5..0d7b1dad8ba8 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/pagers.py.j2 @@ -1,3 +1,4 @@ +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} {% extends '_base.py.j2' %} {% block content %} @@ -54,7 +55,7 @@ class {{ method.name }}Pager: *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + {{ shared_macros.client_method_metadata_argument()|indent(4) }} = {{ shared_macros.client_method_metadata_default_value() }}): """Instantiate the pager. Args: @@ -67,8 +68,7 @@ class {{ method.name }}Pager: retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} """ self._method = method self._request = {{ method.input.ident }}(request) @@ -131,7 +131,7 @@ class {{ method.name }}AsyncPager: *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + {{ shared_macros.client_method_metadata_argument()|indent(4) }} = {{ shared_macros.client_method_metadata_default_value() }}): """Instantiates the pager. Args: @@ -144,8 +144,7 @@ class {{ method.name }}AsyncPager: retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(4) }} """ self._method = method self._request = {{ method.input.ident }}(request) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 1001df616c3c..231a436c0877 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -203,7 +203,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): request: {{method.input.ident}}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + {{ shared_macros.client_method_metadata_argument()|indent(8) }}={{ shared_macros.client_method_metadata_default_value() }}, ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming.ResponseIterator{% endif %}{% endif %}: {% if method.http_options and not method.client_streaming %} r"""Call the {{- ' ' -}} @@ -218,8 +218,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(8) }} {% if not method.void %} Returns: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index beb2d7ee1105..c7d91a0b3168 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -162,7 +162,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): request: {{method.input.ident}}, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + {{ shared_macros.client_method_metadata_argument()|indent(12) }}={{ shared_macros.client_method_metadata_default_value() }}, {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Update return type for client streaming method. #} ){% if not method.void %} -> {% if not method.server_streaming %}{{method.output.ident}}{% else %}rest_streaming_async.AsyncResponseIterator{% endif %}{% endif %}: {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2169): Implement client streaming method. #} @@ -179,8 +179,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + {{ shared_macros.client_method_metadata_argument_doc()|indent(8) }} {% if not method.void %} Returns: From 842664231f02dad6dc717c48a0b20dd31d5e85d6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 22 Nov 2024 16:16:53 -0500 Subject: [PATCH 1218/1339] fix: fix typing issue with gRPC metadata when key ends in -bin (#2251) --- .../gapic/samplegen/samplegen.py | 2 +- .../%sub/services/%service/_shared_macros.j2 | 8 +- .../services/asset_service/async_client.py | 192 +++++++----- .../asset_v1/services/asset_service/client.py | 192 +++++++----- .../asset_v1/services/asset_service/pagers.py | 112 ++++--- .../services/asset_service/transports/rest.py | 242 +++++++++------ ...nippet_metadata_google.cloud.asset.v1.json | 92 +++--- .../services/iam_credentials/async_client.py | 32 +- .../services/iam_credentials/client.py | 32 +- .../iam_credentials/transports/rest.py | 40 ++- ...et_metadata_google.iam.credentials.v1.json | 16 +- .../services/eventarc/async_client.py | 216 ++++++++----- .../eventarc_v1/services/eventarc/client.py | 216 ++++++++----- .../eventarc_v1/services/eventarc/pagers.py | 64 ++-- .../services/eventarc/transports/rest.py | 288 +++++++++++------- ...pet_metadata_google.cloud.eventarc.v1.json | 72 ++--- .../config_service_v2/async_client.py | 280 ++++++++++------- .../services/config_service_v2/client.py | 280 ++++++++++------- .../services/config_service_v2/pagers.py | 80 +++-- .../logging_service_v2/async_client.py | 72 +++-- .../services/logging_service_v2/client.py | 72 +++-- .../services/logging_service_v2/pagers.py | 48 +-- .../metrics_service_v2/async_client.py | 64 ++-- .../services/metrics_service_v2/client.py | 64 ++-- .../services/metrics_service_v2/pagers.py | 16 +- .../snippet_metadata_google.logging.v2.json | 172 +++++------ .../services/cloud_redis/async_client.py | 144 +++++---- .../redis_v1/services/cloud_redis/client.py | 144 +++++---- .../redis_v1/services/cloud_redis/pagers.py | 16 +- .../services/cloud_redis/transports/rest.py | 194 +++++++----- .../cloud_redis/transports/rest_asyncio.py | 194 +++++++----- ...nippet_metadata_google.cloud.redis.v1.json | 44 +-- .../tests/system/test_request_metadata.py | 54 ++++ .../tests/unit/samplegen/test_integration.py | 20 +- .../tests/unit/samplegen/test_samplegen.py | 10 +- 35 files changed, 2312 insertions(+), 1472 deletions(-) create mode 100644 packages/gapic-generator/tests/system/test_request_metadata.py diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index b583e5cbb251..b6b5635f344c 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -1107,7 +1107,7 @@ def _fill_sample_metadata(sample: dict, api_schema: api.API): parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore name="timeout", type="float")) parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore - name="metadata", type="Sequence[Tuple[str, str]")) + name="metadata", type="Sequence[Tuple[str, Union[str, bytes]]]")) return snippet_metadata diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index 8a11623e785d..9dd9bac88bbf 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -454,9 +454,11 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m {% macro client_method_metadata_argument() %}metadata: {{ client_method_metadata_type() }}{% endmacro %} -{% macro client_method_metadata_type() %}Sequence[Tuple[str, str]]{% endmacro %} +{% macro client_method_metadata_type() %}Sequence[Tuple[str, Union[str, bytes]]]{% endmacro %} {% macro client_method_metadata_default_value() %}(){% endmacro %} -{% macro client_method_metadata_argument_doc() %}metadata ({{ client_method_metadata_type() }}): Strings which should be - sent along with the request as metadata.{% endmacro %} +{% macro client_method_metadata_argument_doc() %}metadata ({{ client_method_metadata_type() }}): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`.{% endmacro %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 6aef1e816227..9effc7f1f163 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -252,7 +252,7 @@ async def export_assets(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location @@ -308,8 +308,10 @@ async def sample_export_assets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -368,7 +370,7 @@ async def list_assets(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListAssetsAsyncPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -418,8 +420,10 @@ async def sample_list_assets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager: @@ -490,7 +494,7 @@ async def batch_get_assets_history(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when @@ -532,8 +536,10 @@ async def sample_batch_get_assets_history(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse: @@ -577,7 +583,7 @@ async def create_feed(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset @@ -634,8 +640,10 @@ async def sample_create_feed(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -699,7 +707,7 @@ async def get_feed(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Gets details about an asset feed. @@ -744,8 +752,10 @@ async def sample_get_feed(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -809,7 +819,7 @@ async def list_feeds(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -857,8 +867,10 @@ async def sample_list_feeds(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.ListFeedsResponse: @@ -914,7 +926,7 @@ async def update_feed(self, feed: Optional[asset_service.Feed] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Updates an asset feed configuration. @@ -963,8 +975,10 @@ async def sample_update_feed(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -1028,7 +1042,7 @@ async def delete_feed(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an asset feed. @@ -1070,8 +1084,10 @@ async def sample_delete_feed(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1122,7 +1138,7 @@ async def search_all_resources(self, asset_types: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.SearchAllResourcesAsyncPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be @@ -1273,8 +1289,10 @@ async def sample_search_all_resources(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager: @@ -1351,7 +1369,7 @@ async def search_all_iam_policies(self, query: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.SearchAllIamPoliciesAsyncPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the @@ -1466,8 +1484,10 @@ async def sample_search_all_iam_policies(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager: @@ -1540,7 +1560,7 @@ async def analyze_iam_policy(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -1581,8 +1601,10 @@ async def sample_analyze_iam_policy(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: @@ -1627,7 +1649,7 @@ async def analyze_iam_policy_longrunning(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis @@ -1686,8 +1708,10 @@ async def sample_analyze_iam_policy_longrunning(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1742,7 +1766,7 @@ async def analyze_move(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is @@ -1786,8 +1810,10 @@ async def sample_analyze_move(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.AnalyzeMoveResponse: @@ -1832,7 +1858,7 @@ async def query_assets(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard @@ -1884,8 +1910,10 @@ async def sample_query_assets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.QueryAssetsResponse: @@ -1931,7 +1959,7 @@ async def create_saved_query(self, saved_query_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2003,8 +2031,10 @@ async def sample_create_saved_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2066,7 +2096,7 @@ async def get_saved_query(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Gets details about a saved query. @@ -2113,8 +2143,10 @@ async def sample_get_saved_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2172,7 +2204,7 @@ async def list_saved_queries(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSavedQueriesAsyncPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2222,8 +2254,10 @@ async def sample_list_saved_queries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager: @@ -2296,7 +2330,7 @@ async def update_saved_query(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Updates a saved query. @@ -2351,8 +2385,10 @@ async def sample_update_saved_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2412,7 +2448,7 @@ async def delete_saved_query(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a saved query. @@ -2456,8 +2492,10 @@ async def sample_delete_saved_query(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2505,7 +2543,7 @@ async def batch_get_effective_iam_policies(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. @@ -2543,8 +2581,10 @@ async def sample_batch_get_effective_iam_policies(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: @@ -2592,7 +2632,7 @@ async def analyze_org_policies(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPoliciesAsyncPager: r"""Analyzes organization policies under a scope. @@ -2665,8 +2705,10 @@ async def sample_analyze_org_policies(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager: @@ -2744,7 +2786,7 @@ async def analyze_org_policy_governed_containers(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -2817,8 +2859,10 @@ async def sample_analyze_org_policy_governed_containers(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager: @@ -2896,7 +2940,7 @@ async def analyze_org_policy_governed_assets(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom @@ -2998,8 +3042,10 @@ async def sample_analyze_org_policy_governed_assets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager: @@ -3075,7 +3121,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3086,8 +3132,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 430968420fd1..ffbc99c45deb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -623,7 +623,7 @@ def export_assets(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Exports assets with time and resource types to a given Cloud Storage location/BigQuery table. For Cloud Storage location @@ -679,8 +679,10 @@ def sample_export_assets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -739,7 +741,7 @@ def list_assets(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListAssetsPager: r"""Lists assets with time and resource types and returns paged results in response. @@ -789,8 +791,10 @@ def sample_list_assets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager: @@ -860,7 +864,7 @@ def batch_get_assets_history(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Batch gets the update history of assets that overlap a time window. For IAM_POLICY content, this API outputs history when @@ -902,8 +906,10 @@ def sample_batch_get_assets_history(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse: @@ -947,7 +953,7 @@ def create_feed(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Creates a feed in a parent project/folder/organization to listen to its asset @@ -1004,8 +1010,10 @@ def sample_create_feed(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -1068,7 +1076,7 @@ def get_feed(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Gets details about an asset feed. @@ -1113,8 +1121,10 @@ def sample_get_feed(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -1177,7 +1187,7 @@ def list_feeds(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.ListFeedsResponse: r"""Lists all asset feeds in a parent project/folder/organization. @@ -1225,8 +1235,10 @@ def sample_list_feeds(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.ListFeedsResponse: @@ -1281,7 +1293,7 @@ def update_feed(self, feed: Optional[asset_service.Feed] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.Feed: r"""Updates an asset feed configuration. @@ -1330,8 +1342,10 @@ def sample_update_feed(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.Feed: @@ -1394,7 +1408,7 @@ def delete_feed(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an asset feed. @@ -1436,8 +1450,10 @@ def sample_delete_feed(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1487,7 +1503,7 @@ def search_all_resources(self, asset_types: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.SearchAllResourcesPager: r"""Searches all Google Cloud resources within the specified scope, such as a project, folder, or organization. The caller must be @@ -1638,8 +1654,10 @@ def sample_search_all_resources(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager: @@ -1715,7 +1733,7 @@ def search_all_iam_policies(self, query: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.SearchAllIamPoliciesPager: r"""Searches all IAM policies within the specified scope, such as a project, folder, or organization. The caller must be granted the @@ -1830,8 +1848,10 @@ def sample_search_all_iam_policies(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager: @@ -1903,7 +1923,7 @@ def analyze_iam_policy(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Analyzes IAM policies to answer which identities have what accesses on which resources. @@ -1944,8 +1964,10 @@ def sample_analyze_iam_policy(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.AnalyzeIamPolicyResponse: @@ -1990,7 +2012,7 @@ def analyze_iam_policy_longrunning(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Analyzes IAM policies asynchronously to answer which identities have what accesses on which resources, and writes the analysis @@ -2049,8 +2071,10 @@ def sample_analyze_iam_policy_longrunning(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2105,7 +2129,7 @@ def analyze_move(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.AnalyzeMoveResponse: r"""Analyze moving a resource to a specified destination without kicking off the actual move. The analysis is @@ -2149,8 +2173,10 @@ def sample_analyze_move(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.AnalyzeMoveResponse: @@ -2195,7 +2221,7 @@ def query_assets(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.QueryAssetsResponse: r"""Issue a job that queries assets using a SQL statement compatible with `BigQuery Standard @@ -2247,8 +2273,10 @@ def sample_query_assets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.QueryAssetsResponse: @@ -2294,7 +2322,7 @@ def create_saved_query(self, saved_query_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Creates a saved query in a parent project/folder/organization. @@ -2366,8 +2394,10 @@ def sample_create_saved_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2428,7 +2458,7 @@ def get_saved_query(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Gets details about a saved query. @@ -2475,8 +2505,10 @@ def sample_get_saved_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2533,7 +2565,7 @@ def list_saved_queries(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSavedQueriesPager: r"""Lists all saved queries in a parent project/folder/organization. @@ -2583,8 +2615,10 @@ def sample_list_saved_queries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager: @@ -2656,7 +2690,7 @@ def update_saved_query(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.SavedQuery: r"""Updates a saved query. @@ -2711,8 +2745,10 @@ def sample_update_saved_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.SavedQuery: @@ -2771,7 +2807,7 @@ def delete_saved_query(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a saved query. @@ -2815,8 +2851,10 @@ def sample_delete_saved_query(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2863,7 +2901,7 @@ def batch_get_effective_iam_policies(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Gets effective IAM policies for a batch of resources. @@ -2901,8 +2939,10 @@ def sample_batch_get_effective_iam_policies(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse: @@ -2950,7 +2990,7 @@ def analyze_org_policies(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPoliciesPager: r"""Analyzes organization policies under a scope. @@ -3023,8 +3063,10 @@ def sample_analyze_org_policies(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager: @@ -3101,7 +3143,7 @@ def analyze_org_policy_governed_containers(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPolicyGovernedContainersPager: r"""Analyzes organization policies governed containers (projects, folders or organization) under a scope. @@ -3174,8 +3216,10 @@ def sample_analyze_org_policy_governed_containers(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager: @@ -3252,7 +3296,7 @@ def analyze_org_policy_governed_assets(self, filter: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.AnalyzeOrgPolicyGovernedAssetsPager: r"""Analyzes organization policies governed assets (Google Cloud resources or policies) under a scope. This RPC supports custom @@ -3354,8 +3398,10 @@ def sample_analyze_org_policy_governed_assets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager: @@ -3443,7 +3489,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3454,8 +3500,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 1498b1ab970c..5620c82f3151 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -52,7 +52,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -65,8 +65,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.ListAssetsRequest(request) @@ -118,7 +120,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -131,8 +133,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.ListAssetsRequest(request) @@ -187,7 +191,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -200,8 +204,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.SearchAllResourcesRequest(request) @@ -253,7 +259,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -266,8 +272,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.SearchAllResourcesRequest(request) @@ -322,7 +330,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -335,8 +343,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.SearchAllIamPoliciesRequest(request) @@ -388,7 +398,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -401,8 +411,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.SearchAllIamPoliciesRequest(request) @@ -457,7 +469,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -470,8 +482,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.ListSavedQueriesRequest(request) @@ -523,7 +537,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -536,8 +550,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.ListSavedQueriesRequest(request) @@ -592,7 +608,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -605,8 +621,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPoliciesRequest(request) @@ -658,7 +676,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -671,8 +689,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPoliciesRequest(request) @@ -727,7 +747,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -740,8 +760,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) @@ -793,7 +815,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -806,8 +828,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedContainersRequest(request) @@ -862,7 +886,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -875,8 +899,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) @@ -928,7 +954,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -941,8 +967,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest(request) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index e4ca2ea51423..df63e0de3271 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -249,7 +249,7 @@ def post_update_saved_query(self, response): """ - def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, str]]]: + def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_iam_policy Override in a subclass to manipulate the request or metadata @@ -266,7 +266,7 @@ def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyRespon """ return response - def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, str]]]: + def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_iam_policy_longrunning Override in a subclass to manipulate the request or metadata @@ -283,7 +283,7 @@ def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation """ return response - def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, str]]]: + def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_move Override in a subclass to manipulate the request or metadata @@ -300,7 +300,7 @@ def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asse """ return response - def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, str]]]: + def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policies Override in a subclass to manipulate the request or metadata @@ -317,7 +317,7 @@ def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesRe """ return response - def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, str]]]: + def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policy_governed_assets Override in a subclass to manipulate the request or metadata @@ -334,7 +334,7 @@ def post_analyze_org_policy_governed_assets(self, response: asset_service.Analyz """ return response - def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, str]]]: + def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policy_governed_containers Override in a subclass to manipulate the request or metadata @@ -351,7 +351,7 @@ def post_analyze_org_policy_governed_containers(self, response: asset_service.An """ return response - def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, str]]]: + def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_get_assets_history Override in a subclass to manipulate the request or metadata @@ -368,7 +368,7 @@ def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHi """ return response - def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, str]]]: + def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_get_effective_iam_policies Override in a subclass to manipulate the request or metadata @@ -385,7 +385,7 @@ def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGet """ return response - def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, str]]]: + def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_feed Override in a subclass to manipulate the request or metadata @@ -402,7 +402,7 @@ def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, str]]]: + def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_saved_query Override in a subclass to manipulate the request or metadata @@ -419,7 +419,7 @@ def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_s """ return response - def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, str]]]: + def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_feed Override in a subclass to manipulate the request or metadata @@ -427,7 +427,7 @@ def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Se """ return request, metadata - def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, str]]]: + def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_saved_query Override in a subclass to manipulate the request or metadata @@ -435,7 +435,7 @@ def pre_delete_saved_query(self, request: asset_service.DeleteSavedQueryRequest, """ return request, metadata - def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, str]]]: + def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ExportAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_assets Override in a subclass to manipulate the request or metadata @@ -452,7 +452,7 @@ def post_export_assets(self, response: operations_pb2.Operation) -> operations_p """ return response - def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, str]]]: + def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_feed Override in a subclass to manipulate the request or metadata @@ -469,7 +469,7 @@ def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, str]]]: + def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_saved_query Override in a subclass to manipulate the request or metadata @@ -486,7 +486,7 @@ def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_serv """ return response - def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, str]]]: + def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_assets Override in a subclass to manipulate the request or metadata @@ -503,7 +503,7 @@ def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_ """ return response - def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, str]]]: + def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_feeds Override in a subclass to manipulate the request or metadata @@ -520,7 +520,7 @@ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_se """ return response - def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, str]]]: + def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_saved_queries Override in a subclass to manipulate the request or metadata @@ -537,7 +537,7 @@ def post_list_saved_queries(self, response: asset_service.ListSavedQueriesRespon """ return response - def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, str]]]: + def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for query_assets Override in a subclass to manipulate the request or metadata @@ -554,7 +554,7 @@ def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asse """ return response - def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, str]]]: + def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for search_all_iam_policies Override in a subclass to manipulate the request or metadata @@ -571,7 +571,7 @@ def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPolic """ return response - def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, str]]]: + def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for search_all_resources Override in a subclass to manipulate the request or metadata @@ -588,7 +588,7 @@ def post_search_all_resources(self, response: asset_service.SearchAllResourcesRe """ return response - def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, str]]]: + def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_feed Override in a subclass to manipulate the request or metadata @@ -605,7 +605,7 @@ def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """ return response - def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, str]]]: + def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_saved_query Override in a subclass to manipulate the request or metadata @@ -623,8 +623,8 @@ def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_s return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -794,7 +794,7 @@ def __call__(self, request: asset_service.AnalyzeIamPolicyRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeIamPolicyResponse: r"""Call the analyze iam policy method over HTTP. @@ -805,8 +805,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeIamPolicyResponse: @@ -869,7 +871,7 @@ def __call__(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the analyze iam policy longrunning method over HTTP. @@ -881,8 +883,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -945,7 +949,7 @@ def __call__(self, request: asset_service.AnalyzeMoveRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeMoveResponse: r"""Call the analyze move method over HTTP. @@ -956,8 +960,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeMoveResponse: @@ -1019,7 +1025,7 @@ def __call__(self, request: asset_service.AnalyzeOrgPoliciesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeOrgPoliciesResponse: r"""Call the analyze org policies method over HTTP. @@ -1030,8 +1036,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeOrgPoliciesResponse: @@ -1093,7 +1101,7 @@ def __call__(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: r"""Call the analyze org policy governed assets method over HTTP. @@ -1105,8 +1113,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: @@ -1168,7 +1178,7 @@ def __call__(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: r"""Call the analyze org policy governed containers method over HTTP. @@ -1180,8 +1190,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.AnalyzeOrgPolicyGovernedContainersResponse: @@ -1243,7 +1255,7 @@ def __call__(self, request: asset_service.BatchGetAssetsHistoryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.BatchGetAssetsHistoryResponse: r"""Call the batch get assets history method over HTTP. @@ -1253,8 +1265,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.BatchGetAssetsHistoryResponse: @@ -1314,7 +1328,7 @@ def __call__(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.BatchGetEffectiveIamPoliciesResponse: r"""Call the batch get effective iam policies method over HTTP. @@ -1326,8 +1340,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.BatchGetEffectiveIamPoliciesResponse: @@ -1390,7 +1406,7 @@ def __call__(self, request: asset_service.CreateFeedRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.Feed: r"""Call the create feed method over HTTP. @@ -1400,8 +1416,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.Feed: @@ -1472,7 +1490,7 @@ def __call__(self, request: asset_service.CreateSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SavedQuery: r"""Call the create saved query method over HTTP. @@ -1482,8 +1500,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SavedQuery: @@ -1547,7 +1567,7 @@ def __call__(self, request: asset_service.DeleteFeedRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ): r"""Call the delete feed method over HTTP. @@ -1557,8 +1577,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() @@ -1606,7 +1628,7 @@ def __call__(self, request: asset_service.DeleteSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ): r"""Call the delete saved query method over HTTP. @@ -1616,8 +1638,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() @@ -1666,7 +1690,7 @@ def __call__(self, request: asset_service.ExportAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the export assets method over HTTP. @@ -1676,8 +1700,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1740,7 +1766,7 @@ def __call__(self, request: asset_service.GetFeedRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.Feed: r"""Call the get feed method over HTTP. @@ -1750,8 +1776,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.Feed: @@ -1819,7 +1847,7 @@ def __call__(self, request: asset_service.GetSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SavedQuery: r"""Call the get saved query method over HTTP. @@ -1829,8 +1857,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SavedQuery: @@ -1892,7 +1922,7 @@ def __call__(self, request: asset_service.ListAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.ListAssetsResponse: r"""Call the list assets method over HTTP. @@ -1902,8 +1932,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.ListAssetsResponse: @@ -1963,7 +1995,7 @@ def __call__(self, request: asset_service.ListFeedsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.ListFeedsResponse: r"""Call the list feeds method over HTTP. @@ -1973,8 +2005,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.ListFeedsResponse: @@ -2034,7 +2068,7 @@ def __call__(self, request: asset_service.ListSavedQueriesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.ListSavedQueriesResponse: r"""Call the list saved queries method over HTTP. @@ -2044,8 +2078,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.ListSavedQueriesResponse: @@ -2106,7 +2142,7 @@ def __call__(self, request: asset_service.QueryAssetsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.QueryAssetsResponse: r"""Call the query assets method over HTTP. @@ -2116,8 +2152,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.QueryAssetsResponse: @@ -2179,7 +2217,7 @@ def __call__(self, request: asset_service.SearchAllIamPoliciesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SearchAllIamPoliciesResponse: r"""Call the search all iam policies method over HTTP. @@ -2189,8 +2227,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SearchAllIamPoliciesResponse: @@ -2250,7 +2290,7 @@ def __call__(self, request: asset_service.SearchAllResourcesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SearchAllResourcesResponse: r"""Call the search all resources method over HTTP. @@ -2260,8 +2300,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SearchAllResourcesResponse: @@ -2322,7 +2364,7 @@ def __call__(self, request: asset_service.UpdateFeedRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.Feed: r"""Call the update feed method over HTTP. @@ -2332,8 +2374,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.Feed: @@ -2404,7 +2448,7 @@ def __call__(self, request: asset_service.UpdateSavedQueryRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> asset_service.SavedQuery: r"""Call the update saved query method over HTTP. @@ -2414,8 +2458,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.asset_service.SavedQuery: @@ -2667,7 +2713,7 @@ def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -2678,8 +2724,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 5a90dfa88b31..211efb19c2ad 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -43,7 +43,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -119,7 +119,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -196,7 +196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.AnalyzeIamPolicyResponse", @@ -272,7 +272,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.AnalyzeIamPolicyResponse", @@ -349,7 +349,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", @@ -425,7 +425,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.AnalyzeMoveResponse", @@ -514,7 +514,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesAsyncPager", @@ -602,7 +602,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPoliciesPager", @@ -691,7 +691,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsAsyncPager", @@ -779,7 +779,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedAssetsPager", @@ -868,7 +868,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersAsyncPager", @@ -956,7 +956,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.AnalyzeOrgPolicyGovernedContainersPager", @@ -1033,7 +1033,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", @@ -1109,7 +1109,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.BatchGetAssetsHistoryResponse", @@ -1186,7 +1186,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", @@ -1262,7 +1262,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.BatchGetEffectiveIamPoliciesResponse", @@ -1343,7 +1343,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -1423,7 +1423,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -1512,7 +1512,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -1600,7 +1600,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -1681,7 +1681,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_feed" @@ -1758,7 +1758,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_feed" @@ -1836,7 +1836,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_saved_query" @@ -1913,7 +1913,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_saved_query" @@ -1987,7 +1987,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -2063,7 +2063,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -2144,7 +2144,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -2224,7 +2224,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -2305,7 +2305,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -2385,7 +2385,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -2466,7 +2466,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsAsyncPager", @@ -2546,7 +2546,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListAssetsPager", @@ -2627,7 +2627,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", @@ -2707,7 +2707,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.ListFeedsResponse", @@ -2788,7 +2788,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesAsyncPager", @@ -2868,7 +2868,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.ListSavedQueriesPager", @@ -2945,7 +2945,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", @@ -3021,7 +3021,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.QueryAssetsResponse", @@ -3106,7 +3106,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesAsyncPager", @@ -3190,7 +3190,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllIamPoliciesPager", @@ -3279,7 +3279,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesAsyncPager", @@ -3367,7 +3367,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.services.asset_service.pagers.SearchAllResourcesPager", @@ -3448,7 +3448,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -3528,7 +3528,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.Feed", @@ -3613,7 +3613,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", @@ -3697,7 +3697,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.asset_v1.types.SavedQuery", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 485c5bdee9e8..ca93c43e38a1 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -247,7 +247,7 @@ async def generate_access_token(self, lifetime: Optional[duration_pb2.Duration] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service account. @@ -336,8 +336,10 @@ async def sample_generate_access_token(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.GenerateAccessTokenResponse: @@ -402,7 +404,7 @@ async def generate_id_token(self, include_email: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service account. @@ -485,8 +487,10 @@ async def sample_generate_id_token(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.GenerateIdTokenResponse: @@ -550,7 +554,7 @@ async def sign_blob(self, payload: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed private key. @@ -622,8 +626,10 @@ async def sample_sign_blob(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.SignBlobResponse: @@ -685,7 +691,7 @@ async def sign_jwt(self, payload: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed private key. @@ -760,8 +766,10 @@ async def sample_sign_jwt(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.SignJwtResponse: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index ecdde65b21c8..c8adeac9aad8 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -564,7 +564,7 @@ def generate_access_token(self, lifetime: Optional[duration_pb2.Duration] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.GenerateAccessTokenResponse: r"""Generates an OAuth 2.0 access token for a service account. @@ -653,8 +653,10 @@ def sample_generate_access_token(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.GenerateAccessTokenResponse: @@ -718,7 +720,7 @@ def generate_id_token(self, include_email: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.GenerateIdTokenResponse: r"""Generates an OpenID Connect ID token for a service account. @@ -801,8 +803,10 @@ def sample_generate_id_token(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.GenerateIdTokenResponse: @@ -865,7 +869,7 @@ def sign_blob(self, payload: Optional[bytes] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.SignBlobResponse: r"""Signs a blob using a service account's system-managed private key. @@ -937,8 +941,10 @@ def sample_sign_blob(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.SignBlobResponse: @@ -999,7 +1005,7 @@ def sign_jwt(self, payload: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.SignJwtResponse: r"""Signs a JWT using a service account's system-managed private key. @@ -1074,8 +1080,10 @@ def sample_sign_jwt(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.iam.credentials_v1.types.SignJwtResponse: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index a2e5c7b97022..a6ab18b2e235 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -102,7 +102,7 @@ def post_sign_jwt(self, response): """ - def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.GenerateAccessTokenRequest, Sequence[Tuple[str, str]]]: + def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateAccessTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for generate_access_token Override in a subclass to manipulate the request or metadata @@ -119,7 +119,7 @@ def post_generate_access_token(self, response: common.GenerateAccessTokenRespons """ return response - def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, str]]]: + def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for generate_id_token Override in a subclass to manipulate the request or metadata @@ -136,7 +136,7 @@ def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> co """ return response - def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, str]]]: + def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_blob Override in a subclass to manipulate the request or metadata @@ -153,7 +153,7 @@ def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobRe """ return response - def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, str]]]: + def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_jwt Override in a subclass to manipulate the request or metadata @@ -299,7 +299,7 @@ def __call__(self, request: common.GenerateAccessTokenRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> common.GenerateAccessTokenResponse: r"""Call the generate access token method over HTTP. @@ -309,8 +309,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.GenerateAccessTokenResponse: @@ -373,7 +375,7 @@ def __call__(self, request: common.GenerateIdTokenRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> common.GenerateIdTokenResponse: r"""Call the generate id token method over HTTP. @@ -383,8 +385,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.GenerateIdTokenResponse: @@ -447,7 +451,7 @@ def __call__(self, request: common.SignBlobRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> common.SignBlobResponse: r"""Call the sign blob method over HTTP. @@ -457,8 +461,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.SignBlobResponse: @@ -521,7 +527,7 @@ def __call__(self, request: common.SignJwtRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> common.SignJwtResponse: r"""Call the sign jwt method over HTTP. @@ -531,8 +537,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.SignJwtResponse: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json index 317a7f4a2dab..636249950f11 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json @@ -59,7 +59,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.GenerateAccessTokenResponse", @@ -151,7 +151,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.GenerateAccessTokenResponse", @@ -244,7 +244,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.GenerateIdTokenResponse", @@ -336,7 +336,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.GenerateIdTokenResponse", @@ -425,7 +425,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.SignBlobResponse", @@ -513,7 +513,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.SignBlobResponse", @@ -602,7 +602,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.SignJwtResponse", @@ -690,7 +690,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.iam.credentials_v1.types.SignJwtResponse", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 7e51502dda4c..38daa4f1c614 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -271,7 +271,7 @@ async def get_trigger(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> trigger.Trigger: r"""Get a single trigger. @@ -315,8 +315,10 @@ async def sample_get_trigger(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Trigger: @@ -374,7 +376,7 @@ async def list_triggers(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTriggersAsyncPager: r"""List triggers. @@ -419,8 +421,10 @@ async def sample_list_triggers(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager: @@ -493,7 +497,7 @@ async def create_trigger(self, trigger_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Create a new trigger in a particular project and location. @@ -564,8 +568,10 @@ async def sample_create_trigger(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -640,7 +646,7 @@ async def update_trigger(self, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Update a single trigger. @@ -703,8 +709,10 @@ async def sample_update_trigger(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -778,7 +786,7 @@ async def delete_trigger(self, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a single trigger. @@ -835,8 +843,10 @@ async def sample_delete_trigger(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -907,7 +917,7 @@ async def get_channel(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> channel.Channel: r"""Get a single Channel. @@ -951,8 +961,10 @@ async def sample_get_channel(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Channel: @@ -1016,7 +1028,7 @@ async def list_channels(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListChannelsAsyncPager: r"""List channels. @@ -1061,8 +1073,10 @@ async def sample_list_channels(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager: @@ -1135,7 +1149,7 @@ async def create_channel(self, channel_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Create a new channel in a particular project and location. @@ -1203,8 +1217,10 @@ async def sample_create_channel(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1281,7 +1297,7 @@ async def update_channel(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Update a single channel. @@ -1336,8 +1352,10 @@ async def sample_update_channel(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1411,7 +1429,7 @@ async def delete_channel(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a single channel. @@ -1460,8 +1478,10 @@ async def sample_delete_channel(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1533,7 +1553,7 @@ async def get_provider(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> discovery.Provider: r"""Get a single Provider. @@ -1577,8 +1597,10 @@ async def sample_get_provider(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Provider: @@ -1636,7 +1658,7 @@ async def list_providers(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListProvidersAsyncPager: r"""List providers. @@ -1681,8 +1703,10 @@ async def sample_list_providers(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager: @@ -1753,7 +1777,7 @@ async def get_channel_connection(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> channel_connection.ChannelConnection: r"""Get a single ChannelConnection. @@ -1797,8 +1821,10 @@ async def sample_get_channel_connection(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.ChannelConnection: @@ -1861,7 +1887,7 @@ async def list_channel_connections(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListChannelConnectionsAsyncPager: r"""List channel connections. @@ -1906,8 +1932,10 @@ async def sample_list_channel_connections(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager: @@ -1981,7 +2009,7 @@ async def create_channel_connection(self, channel_connection_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Create a new ChannelConnection in a particular project and location. @@ -2050,8 +2078,10 @@ async def sample_create_channel_connection(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2126,7 +2156,7 @@ async def delete_channel_connection(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Delete a single ChannelConnection. @@ -2174,8 +2204,10 @@ async def sample_delete_channel_connection(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2246,7 +2278,7 @@ async def get_google_channel_config(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> google_channel_config.GoogleChannelConfig: r"""Get a GoogleChannelConfig @@ -2290,8 +2322,10 @@ async def sample_get_google_channel_config(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.GoogleChannelConfig: @@ -2356,7 +2390,7 @@ async def update_google_channel_config(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gce_google_channel_config.GoogleChannelConfig: r"""Update a single GoogleChannelConfig @@ -2410,8 +2444,10 @@ async def sample_update_google_channel_config(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.GoogleChannelConfig: @@ -2477,7 +2513,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -2488,8 +2524,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -2527,7 +2565,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -2538,8 +2576,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -2577,7 +2617,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -2593,8 +2633,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2627,7 +2669,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -2642,8 +2684,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2676,7 +2720,7 @@ async def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -2689,8 +2733,10 @@ async def set_iam_policy( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -2792,7 +2838,7 @@ async def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -2806,8 +2852,10 @@ async def get_iam_policy( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -2909,7 +2957,7 @@ async def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -2924,8 +2972,10 @@ async def test_iam_permissions( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -2964,7 +3014,7 @@ async def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -2975,8 +3025,10 @@ async def get_location( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -3014,7 +3066,7 @@ async def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -3025,8 +3077,10 @@ async def list_locations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 70a85278eeb2..a6f0082d3500 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -669,7 +669,7 @@ def get_trigger(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> trigger.Trigger: r"""Get a single trigger. @@ -713,8 +713,10 @@ def sample_get_trigger(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Trigger: @@ -771,7 +773,7 @@ def list_triggers(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTriggersPager: r"""List triggers. @@ -816,8 +818,10 @@ def sample_list_triggers(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager: @@ -889,7 +893,7 @@ def create_trigger(self, trigger_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Create a new trigger in a particular project and location. @@ -960,8 +964,10 @@ def sample_create_trigger(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1035,7 +1041,7 @@ def update_trigger(self, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Update a single trigger. @@ -1098,8 +1104,10 @@ def sample_update_trigger(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1172,7 +1180,7 @@ def delete_trigger(self, allow_missing: Optional[bool] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a single trigger. @@ -1229,8 +1237,10 @@ def sample_delete_trigger(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1300,7 +1310,7 @@ def get_channel(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> channel.Channel: r"""Get a single Channel. @@ -1344,8 +1354,10 @@ def sample_get_channel(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Channel: @@ -1408,7 +1420,7 @@ def list_channels(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListChannelsPager: r"""List channels. @@ -1453,8 +1465,10 @@ def sample_list_channels(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager: @@ -1526,7 +1540,7 @@ def create_channel(self, channel_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Create a new channel in a particular project and location. @@ -1594,8 +1608,10 @@ def sample_create_channel(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1671,7 +1687,7 @@ def update_channel(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Update a single channel. @@ -1726,8 +1742,10 @@ def sample_update_channel(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1800,7 +1818,7 @@ def delete_channel(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a single channel. @@ -1849,8 +1867,10 @@ def sample_delete_channel(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1921,7 +1941,7 @@ def get_provider(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> discovery.Provider: r"""Get a single Provider. @@ -1965,8 +1985,10 @@ def sample_get_provider(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.Provider: @@ -2023,7 +2045,7 @@ def list_providers(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListProvidersPager: r"""List providers. @@ -2068,8 +2090,10 @@ def sample_list_providers(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager: @@ -2139,7 +2163,7 @@ def get_channel_connection(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> channel_connection.ChannelConnection: r"""Get a single ChannelConnection. @@ -2183,8 +2207,10 @@ def sample_get_channel_connection(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.ChannelConnection: @@ -2246,7 +2272,7 @@ def list_channel_connections(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListChannelConnectionsPager: r"""List channel connections. @@ -2291,8 +2317,10 @@ def sample_list_channel_connections(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager: @@ -2365,7 +2393,7 @@ def create_channel_connection(self, channel_connection_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Create a new ChannelConnection in a particular project and location. @@ -2434,8 +2462,10 @@ def sample_create_channel_connection(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2509,7 +2539,7 @@ def delete_channel_connection(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Delete a single ChannelConnection. @@ -2557,8 +2587,10 @@ def sample_delete_channel_connection(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2628,7 +2660,7 @@ def get_google_channel_config(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> google_channel_config.GoogleChannelConfig: r"""Get a GoogleChannelConfig @@ -2672,8 +2704,10 @@ def sample_get_google_channel_config(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.GoogleChannelConfig: @@ -2737,7 +2771,7 @@ def update_google_channel_config(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gce_google_channel_config.GoogleChannelConfig: r"""Update a single GoogleChannelConfig @@ -2791,8 +2825,10 @@ def sample_update_google_channel_config(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.eventarc_v1.types.GoogleChannelConfig: @@ -2870,7 +2906,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -2881,8 +2917,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -2920,7 +2958,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -2931,8 +2969,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -2970,7 +3010,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -2986,8 +3026,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3020,7 +3062,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -3035,8 +3077,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -3069,7 +3113,7 @@ def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -3082,8 +3126,10 @@ def set_iam_policy( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -3185,7 +3231,7 @@ def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -3199,8 +3245,10 @@ def get_iam_policy( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -3302,7 +3350,7 @@ def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -3317,8 +3365,10 @@ def test_iam_permissions( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -3357,7 +3407,7 @@ def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -3368,8 +3418,10 @@ def get_location( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -3407,7 +3459,7 @@ def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -3418,8 +3470,10 @@ def list_locations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 9ef97e16e5dc..3acf89e71bd6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -55,7 +55,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -68,8 +68,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListTriggersRequest(request) @@ -121,7 +123,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -134,8 +136,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListTriggersRequest(request) @@ -190,7 +194,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -203,8 +207,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListChannelsRequest(request) @@ -256,7 +262,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -269,8 +275,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListChannelsRequest(request) @@ -325,7 +333,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -338,8 +346,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListProvidersRequest(request) @@ -391,7 +401,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -404,8 +414,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListProvidersRequest(request) @@ -460,7 +472,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -473,8 +485,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListChannelConnectionsRequest(request) @@ -526,7 +540,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -539,8 +553,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = eventarc.ListChannelConnectionsRequest(request) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 214d5655fa88..8a66da7096fe 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -225,7 +225,7 @@ def post_update_trigger(self, response): """ - def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, str]]]: + def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_channel Override in a subclass to manipulate the request or metadata @@ -242,7 +242,7 @@ def post_create_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, str]]]: + def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_channel_connection Override in a subclass to manipulate the request or metadata @@ -259,7 +259,7 @@ def post_create_channel_connection(self, response: operations_pb2.Operation) -> """ return response - def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, str]]]: + def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_trigger Override in a subclass to manipulate the request or metadata @@ -276,7 +276,7 @@ def post_create_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response - def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, str]]]: + def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel Override in a subclass to manipulate the request or metadata @@ -293,7 +293,7 @@ def post_delete_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, str]]]: + def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel_connection Override in a subclass to manipulate the request or metadata @@ -310,7 +310,7 @@ def post_delete_channel_connection(self, response: operations_pb2.Operation) -> """ return response - def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, str]]]: + def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_trigger Override in a subclass to manipulate the request or metadata @@ -327,7 +327,7 @@ def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_ """ return response - def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, str]]]: + def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel Override in a subclass to manipulate the request or metadata @@ -344,7 +344,7 @@ def post_get_channel(self, response: channel.Channel) -> channel.Channel: """ return response - def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, str]]]: + def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel_connection Override in a subclass to manipulate the request or metadata @@ -361,7 +361,7 @@ def post_get_channel_connection(self, response: channel_connection.ChannelConnec """ return response - def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: + def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_google_channel_config Override in a subclass to manipulate the request or metadata @@ -378,7 +378,7 @@ def post_get_google_channel_config(self, response: google_channel_config.GoogleC """ return response - def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, str]]]: + def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_provider Override in a subclass to manipulate the request or metadata @@ -395,7 +395,7 @@ def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: """ return response - def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, str]]]: + def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_trigger Override in a subclass to manipulate the request or metadata @@ -412,7 +412,7 @@ def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: """ return response - def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, str]]]: + def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channel_connections Override in a subclass to manipulate the request or metadata @@ -429,7 +429,7 @@ def post_list_channel_connections(self, response: eventarc.ListChannelConnection """ return response - def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, str]]]: + def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channels Override in a subclass to manipulate the request or metadata @@ -446,7 +446,7 @@ def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventar """ return response - def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, str]]]: + def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_providers Override in a subclass to manipulate the request or metadata @@ -463,7 +463,7 @@ def post_list_providers(self, response: eventarc.ListProvidersResponse) -> event """ return response - def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, str]]]: + def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_triggers Override in a subclass to manipulate the request or metadata @@ -480,7 +480,7 @@ def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventar """ return response - def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, str]]]: + def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_channel Override in a subclass to manipulate the request or metadata @@ -497,7 +497,7 @@ def post_update_channel(self, response: operations_pb2.Operation) -> operations_ """ return response - def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, str]]]: + def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_google_channel_config Override in a subclass to manipulate the request or metadata @@ -514,7 +514,7 @@ def post_update_google_channel_config(self, response: gce_google_channel_config. """ return response - def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, str]]]: + def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_trigger Override in a subclass to manipulate the request or metadata @@ -532,8 +532,8 @@ def post_update_trigger(self, response: operations_pb2.Operation) -> operations_ return response def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -553,8 +553,8 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -574,8 +574,8 @@ def post_list_locations( return response def pre_get_iam_policy( - self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + self, request: iam_policy_pb2.GetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_iam_policy Override in a subclass to manipulate the request or metadata @@ -595,8 +595,8 @@ def post_get_iam_policy( return response def pre_set_iam_policy( - self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + self, request: iam_policy_pb2.SetIamPolicyRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for set_iam_policy Override in a subclass to manipulate the request or metadata @@ -616,8 +616,8 @@ def post_set_iam_policy( return response def pre_test_iam_permissions( - self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + self, request: iam_policy_pb2.TestIamPermissionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for test_iam_permissions Override in a subclass to manipulate the request or metadata @@ -637,8 +637,8 @@ def post_test_iam_permissions( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -658,8 +658,8 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -679,8 +679,8 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -700,8 +700,8 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -893,7 +893,7 @@ def __call__(self, request: eventarc.CreateChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create channel method over HTTP. @@ -904,8 +904,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -969,7 +971,7 @@ def __call__(self, request: eventarc.CreateChannelConnectionRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create channel connection method over HTTP. @@ -980,8 +982,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1045,7 +1049,7 @@ def __call__(self, request: eventarc.CreateTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create trigger method over HTTP. @@ -1056,8 +1060,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1120,7 +1126,7 @@ def __call__(self, request: eventarc.DeleteChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete channel method over HTTP. @@ -1131,8 +1137,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1193,7 +1201,7 @@ def __call__(self, request: eventarc.DeleteChannelConnectionRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete channel connection method over HTTP. @@ -1204,8 +1212,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1266,7 +1276,7 @@ def __call__(self, request: eventarc.DeleteTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete trigger method over HTTP. @@ -1277,8 +1287,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1339,7 +1351,7 @@ def __call__(self, request: eventarc.GetChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> channel.Channel: r"""Call the get channel method over HTTP. @@ -1350,8 +1362,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.channel.Channel: @@ -1419,7 +1433,7 @@ def __call__(self, request: eventarc.GetChannelConnectionRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> channel_connection.ChannelConnection: r"""Call the get channel connection method over HTTP. @@ -1430,8 +1444,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.channel_connection.ChannelConnection: @@ -1498,7 +1514,7 @@ def __call__(self, request: eventarc.GetGoogleChannelConfigRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> google_channel_config.GoogleChannelConfig: r"""Call the get google channel config method over HTTP. @@ -1509,8 +1525,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.google_channel_config.GoogleChannelConfig: @@ -1578,7 +1596,7 @@ def __call__(self, request: eventarc.GetProviderRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> discovery.Provider: r"""Call the get provider method over HTTP. @@ -1589,8 +1607,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.discovery.Provider: @@ -1652,7 +1672,7 @@ def __call__(self, request: eventarc.GetTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> trigger.Trigger: r"""Call the get trigger method over HTTP. @@ -1663,8 +1683,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.trigger.Trigger: @@ -1726,7 +1748,7 @@ def __call__(self, request: eventarc.ListChannelConnectionsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> eventarc.ListChannelConnectionsResponse: r"""Call the list channel connections method over HTTP. @@ -1737,8 +1759,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.eventarc.ListChannelConnectionsResponse: @@ -1800,7 +1824,7 @@ def __call__(self, request: eventarc.ListChannelsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> eventarc.ListChannelsResponse: r"""Call the list channels method over HTTP. @@ -1811,8 +1835,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.eventarc.ListChannelsResponse: @@ -1872,7 +1898,7 @@ def __call__(self, request: eventarc.ListProvidersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> eventarc.ListProvidersResponse: r"""Call the list providers method over HTTP. @@ -1883,8 +1909,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.eventarc.ListProvidersResponse: @@ -1944,7 +1972,7 @@ def __call__(self, request: eventarc.ListTriggersRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> eventarc.ListTriggersResponse: r"""Call the list triggers method over HTTP. @@ -1955,8 +1983,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.eventarc.ListTriggersResponse: @@ -2017,7 +2047,7 @@ def __call__(self, request: eventarc.UpdateChannelRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the update channel method over HTTP. @@ -2028,8 +2058,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -2093,7 +2125,7 @@ def __call__(self, request: eventarc.UpdateGoogleChannelConfigRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> gce_google_channel_config.GoogleChannelConfig: r"""Call the update google channel config method over HTTP. @@ -2105,8 +2137,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gce_google_channel_config.GoogleChannelConfig: @@ -2177,7 +2211,7 @@ def __call__(self, request: eventarc.UpdateTriggerRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the update trigger method over HTTP. @@ -2188,8 +2222,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -2400,7 +2436,7 @@ def __call__(self, request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.Location: r"""Call the get location method over HTTP. @@ -2411,8 +2447,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.Location: Response from GetLocation method. @@ -2473,7 +2511,7 @@ def __call__(self, request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. @@ -2484,8 +2522,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.ListLocationsResponse: Response from ListLocations method. @@ -2546,7 +2586,7 @@ def __call__(self, request: iam_policy_pb2.GetIamPolicyRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> policy_pb2.Policy: r"""Call the get iam policy method over HTTP. @@ -2557,8 +2597,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from GetIamPolicy method. @@ -2620,7 +2662,7 @@ def __call__(self, request: iam_policy_pb2.SetIamPolicyRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> policy_pb2.Policy: r"""Call the set iam policy method over HTTP. @@ -2631,8 +2673,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from SetIamPolicy method. @@ -2696,7 +2740,7 @@ def __call__(self, request: iam_policy_pb2.TestIamPermissionsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -2707,8 +2751,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. @@ -2772,7 +2818,7 @@ def __call__(self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the cancel operation method over HTTP. @@ -2783,8 +2829,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseEventarcRestTransport._BaseCancelOperation._get_http_options() @@ -2840,7 +2888,7 @@ def __call__(self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the delete operation method over HTTP. @@ -2851,8 +2899,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseEventarcRestTransport._BaseDeleteOperation._get_http_options() @@ -2906,7 +2956,7 @@ def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -2917,8 +2967,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -2979,7 +3031,7 @@ def __call__(self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -2990,8 +3042,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index 596aaecb1b9c..247771789f02 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -55,7 +55,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -143,7 +143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -232,7 +232,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -320,7 +320,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -409,7 +409,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -497,7 +497,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -578,7 +578,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -658,7 +658,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -739,7 +739,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -819,7 +819,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -904,7 +904,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -988,7 +988,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1069,7 +1069,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", @@ -1149,7 +1149,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.ChannelConnection", @@ -1230,7 +1230,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Channel", @@ -1310,7 +1310,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Channel", @@ -1391,7 +1391,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", @@ -1471,7 +1471,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", @@ -1552,7 +1552,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Provider", @@ -1632,7 +1632,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Provider", @@ -1713,7 +1713,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Trigger", @@ -1793,7 +1793,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.Trigger", @@ -1874,7 +1874,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsAsyncPager", @@ -1954,7 +1954,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelConnectionsPager", @@ -2035,7 +2035,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsAsyncPager", @@ -2115,7 +2115,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListChannelsPager", @@ -2196,7 +2196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersAsyncPager", @@ -2276,7 +2276,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListProvidersPager", @@ -2357,7 +2357,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersAsyncPager", @@ -2437,7 +2437,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.services.eventarc.pagers.ListTriggersPager", @@ -2522,7 +2522,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -2606,7 +2606,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -2691,7 +2691,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", @@ -2775,7 +2775,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.eventarc_v1.types.GoogleChannelConfig", @@ -2864,7 +2864,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -2952,7 +2952,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index dbe7f7077425..d053be16137b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -248,7 +248,7 @@ async def list_buckets(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListBucketsAsyncPager: r"""Lists log buckets. @@ -303,8 +303,10 @@ async def sample_list_buckets(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: @@ -375,7 +377,7 @@ async def get_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -411,8 +413,10 @@ async def sample_get_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -457,7 +461,7 @@ async def create_bucket_async(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a log bucket asynchronously that can be used to store log entries. @@ -501,8 +505,10 @@ async def sample_create_bucket_async(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -558,7 +564,7 @@ async def update_bucket_async(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Updates a log bucket asynchronously. @@ -604,8 +610,10 @@ async def sample_update_bucket_async(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -661,7 +669,7 @@ async def create_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's @@ -700,8 +708,10 @@ async def sample_create_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -746,7 +756,7 @@ async def update_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Updates a log bucket. @@ -788,8 +798,10 @@ async def sample_update_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -834,7 +846,7 @@ async def delete_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a log bucket. @@ -872,8 +884,10 @@ async def sample_delete_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -909,7 +923,7 @@ async def undelete_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 @@ -944,8 +958,10 @@ async def sample_undelete_bucket(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -982,7 +998,7 @@ async def list_views(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. @@ -1029,8 +1045,10 @@ async def sample_list_views(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: @@ -1101,7 +1119,7 @@ async def get_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -1137,8 +1155,10 @@ async def sample_get_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1183,7 +1203,7 @@ async def create_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1221,8 +1241,10 @@ async def sample_create_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1267,7 +1289,7 @@ async def update_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new @@ -1307,8 +1329,10 @@ async def sample_update_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1353,7 +1377,7 @@ async def delete_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it @@ -1389,8 +1413,10 @@ async def sample_delete_view(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1427,7 +1453,7 @@ async def list_sinks(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. @@ -1478,8 +1504,10 @@ async def sample_list_sinks(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: @@ -1550,7 +1578,7 @@ async def get_sink(self, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1603,8 +1631,10 @@ async def sample_get_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1670,7 +1700,7 @@ async def create_sink(self, sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins @@ -1739,8 +1769,10 @@ async def sample_create_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1809,7 +1841,7 @@ async def update_sink(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, @@ -1902,8 +1934,10 @@ async def sample_update_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -1972,7 +2006,7 @@ async def delete_sink(self, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2024,8 +2058,10 @@ async def sample_delete_sink(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2076,7 +2112,7 @@ async def create_link(self, link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs @@ -2147,8 +2183,10 @@ async def sample_create_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2221,7 +2259,7 @@ async def delete_link(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2273,8 +2311,10 @@ async def sample_delete_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -2350,7 +2390,7 @@ async def list_links(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLinksAsyncPager: r"""Lists links. @@ -2399,8 +2439,10 @@ async def sample_list_links(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager: @@ -2472,7 +2514,7 @@ async def get_link(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Link: r"""Gets a link. @@ -2519,8 +2561,10 @@ async def sample_get_link(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Link: @@ -2578,7 +2622,7 @@ async def list_exclusions(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2630,8 +2674,10 @@ async def sample_list_exclusions(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: @@ -2702,7 +2748,7 @@ async def get_exclusion(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -2755,8 +2801,10 @@ async def sample_get_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2820,7 +2868,7 @@ async def create_exclusion(self, exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can @@ -2890,8 +2938,10 @@ async def sample_create_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -2958,7 +3008,7 @@ async def update_exclusion(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3039,8 +3089,10 @@ async def sample_update_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3107,7 +3159,7 @@ async def delete_exclusion(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -3158,8 +3210,10 @@ async def sample_delete_exclusion(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -3207,7 +3261,7 @@ async def get_cmek_settings(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -3257,8 +3311,10 @@ async def sample_get_cmek_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3313,7 +3369,7 @@ async def update_cmek_settings(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -3368,8 +3424,10 @@ async def sample_update_cmek_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3425,7 +3483,7 @@ async def get_settings(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -3500,8 +3558,10 @@ async def sample_get_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3561,7 +3621,7 @@ async def update_settings(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -3643,8 +3703,10 @@ async def sample_update_settings(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3704,7 +3766,7 @@ async def copy_log_entries(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -3746,8 +3808,10 @@ async def sample_copy_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -3797,7 +3861,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -3808,8 +3872,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -3847,7 +3913,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -3858,8 +3924,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -3897,7 +3965,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -3912,8 +3980,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index b42577271de7..f6d5e698b062 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -617,7 +617,7 @@ def list_buckets(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListBucketsPager: r"""Lists log buckets. @@ -672,8 +672,10 @@ def sample_list_buckets(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: @@ -743,7 +745,7 @@ def get_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Gets a log bucket. @@ -779,8 +781,10 @@ def sample_get_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -825,7 +829,7 @@ def create_bucket_async(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates a log bucket asynchronously that can be used to store log entries. @@ -869,8 +873,10 @@ def sample_create_bucket_async(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -926,7 +932,7 @@ def update_bucket_async(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Updates a log bucket asynchronously. @@ -972,8 +978,10 @@ def sample_update_bucket_async(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1029,7 +1037,7 @@ def create_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Creates a log bucket that can be used to store log entries. After a bucket has been created, the bucket's @@ -1068,8 +1076,10 @@ def sample_create_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -1114,7 +1124,7 @@ def update_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogBucket: r"""Updates a log bucket. @@ -1156,8 +1166,10 @@ def sample_update_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogBucket: @@ -1202,7 +1214,7 @@ def delete_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a log bucket. @@ -1240,8 +1252,10 @@ def sample_delete_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1277,7 +1291,7 @@ def undelete_bucket(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Undeletes a log bucket. A bucket that has been deleted can be undeleted within the grace period of 7 @@ -1312,8 +1326,10 @@ def sample_undelete_bucket(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1350,7 +1366,7 @@ def list_views(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. @@ -1397,8 +1413,10 @@ def sample_list_views(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: @@ -1468,7 +1486,7 @@ def get_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Gets a view on a log bucket.. @@ -1504,8 +1522,10 @@ def sample_get_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1550,7 +1570,7 @@ def create_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Creates a view over log entries in a log bucket. A bucket may contain a maximum of 30 views. @@ -1588,8 +1608,10 @@ def sample_create_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1634,7 +1656,7 @@ def update_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogView: r"""Updates a view on a log bucket. This method replaces the following fields in the existing view with values from the new @@ -1674,8 +1696,10 @@ def sample_update_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogView: @@ -1720,7 +1744,7 @@ def delete_view(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is returned, this indicates that system is not in a state where it @@ -1756,8 +1780,10 @@ def sample_delete_view(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Use the request object if provided (there's no risk of modifying the input as @@ -1794,7 +1820,7 @@ def list_sinks(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSinksPager: r"""Lists sinks. @@ -1845,8 +1871,10 @@ def sample_list_sinks(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: @@ -1916,7 +1944,7 @@ def get_sink(self, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Gets a sink. @@ -1969,8 +1997,10 @@ def sample_get_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2035,7 +2065,7 @@ def create_sink(self, sink: Optional[logging_config.LogSink] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins @@ -2104,8 +2134,10 @@ def sample_create_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2173,7 +2205,7 @@ def update_sink(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogSink: r"""Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, @@ -2266,8 +2298,10 @@ def sample_update_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogSink: @@ -2335,7 +2369,7 @@ def delete_sink(self, sink_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a sink. If the sink has a unique ``writer_identity``, then that service account is also deleted. @@ -2387,8 +2421,10 @@ def sample_delete_sink(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -2438,7 +2474,7 @@ def create_link(self, link_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Asynchronously creates a linked dataset in BigQuery which makes it possible to use BigQuery to read the logs @@ -2509,8 +2545,10 @@ def sample_create_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2582,7 +2620,7 @@ def delete_link(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Deletes a link. This will also delete the corresponding BigQuery linked dataset. @@ -2634,8 +2672,10 @@ def sample_delete_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2710,7 +2750,7 @@ def list_links(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLinksPager: r"""Lists links. @@ -2759,8 +2799,10 @@ def sample_list_links(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager: @@ -2831,7 +2873,7 @@ def get_link(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Link: r"""Gets a link. @@ -2878,8 +2920,10 @@ def sample_get_link(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Link: @@ -2936,7 +2980,7 @@ def list_exclusions(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2988,8 +3032,10 @@ def sample_list_exclusions(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: @@ -3059,7 +3105,7 @@ def get_exclusion(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Gets the description of an exclusion in the \_Default sink. @@ -3112,8 +3158,10 @@ def sample_get_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3176,7 +3224,7 @@ def create_exclusion(self, exclusion: Optional[logging_config.LogExclusion] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Creates a new exclusion in the \_Default sink in a specified parent resource. Only log entries belonging to that resource can @@ -3246,8 +3294,10 @@ def sample_create_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3313,7 +3363,7 @@ def update_exclusion(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.LogExclusion: r"""Changes one or more properties of an existing exclusion in the \_Default sink. @@ -3394,8 +3444,10 @@ def sample_update_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogExclusion: @@ -3461,7 +3513,7 @@ def delete_exclusion(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an exclusion in the \_Default sink. @@ -3512,8 +3564,10 @@ def sample_delete_exclusion(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -3560,7 +3614,7 @@ def get_cmek_settings(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Gets the Logging CMEK settings for the given resource. @@ -3610,8 +3664,10 @@ def sample_get_cmek_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3666,7 +3722,7 @@ def update_cmek_settings(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.CmekSettings: r"""Updates the Log Router CMEK settings for the given resource. @@ -3721,8 +3777,10 @@ def sample_update_cmek_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.CmekSettings: @@ -3778,7 +3836,7 @@ def get_settings(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Gets the Log Router settings for the given resource. @@ -3853,8 +3911,10 @@ def sample_get_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -3913,7 +3973,7 @@ def update_settings(self, update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_config.Settings: r"""Updates the Log Router settings for the given resource. @@ -3995,8 +4055,10 @@ def sample_update_settings(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.Settings: @@ -4055,7 +4117,7 @@ def copy_log_entries(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Copies a set of log entries from a log bucket to a Cloud Storage bucket. @@ -4097,8 +4159,10 @@ def sample_copy_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -4161,7 +4225,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -4172,8 +4236,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -4211,7 +4277,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -4222,8 +4288,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -4261,7 +4329,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -4276,8 +4344,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index 459a76f21307..ff7d5d233510 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -51,7 +51,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -64,8 +64,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListBucketsRequest(request) @@ -117,7 +119,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -130,8 +132,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListBucketsRequest(request) @@ -186,7 +190,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -199,8 +203,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListViewsRequest(request) @@ -252,7 +258,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -265,8 +271,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListViewsRequest(request) @@ -321,7 +329,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -334,8 +342,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListSinksRequest(request) @@ -387,7 +397,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -400,8 +410,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListSinksRequest(request) @@ -456,7 +468,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -469,8 +481,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListLinksRequest(request) @@ -522,7 +536,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -535,8 +549,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListLinksRequest(request) @@ -591,7 +607,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -604,8 +620,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) @@ -657,7 +675,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -670,8 +688,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_config.ListExclusionsRequest(request) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index d8c1d2284de7..a07c53afaa74 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -233,7 +233,7 @@ async def delete_log(self, log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log @@ -288,8 +288,10 @@ async def sample_delete_log(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -341,7 +343,7 @@ async def write_log_entries(self, entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is @@ -472,8 +474,10 @@ async def sample_write_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.WriteLogEntriesResponse: @@ -530,7 +534,7 @@ async def list_log_entries(self, order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogEntriesAsyncPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. @@ -617,8 +621,10 @@ async def sample_list_log_entries(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: @@ -684,7 +690,7 @@ async def list_monitored_resource_descriptors(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -722,8 +728,10 @@ async def sample_list_monitored_resource_descriptors(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: @@ -775,7 +783,7 @@ async def list_logs(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogsAsyncPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are @@ -825,8 +833,10 @@ async def sample_list_logs(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: @@ -897,7 +907,7 @@ def tail_log_entries(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading @@ -946,8 +956,10 @@ def request_generator(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: @@ -978,7 +990,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -989,8 +1001,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1028,7 +1042,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1039,8 +1053,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1078,7 +1094,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1093,8 +1109,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 1f1ebda6b8bb..933daac99fa4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -548,7 +548,7 @@ def delete_log(self, log_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes all the log entries in a log for the \_Default Log Bucket. The log reappears if it receives new entries. Log @@ -603,8 +603,10 @@ def sample_delete_log(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -655,7 +657,7 @@ def write_log_entries(self, entries: Optional[MutableSequence[log_entry.LogEntry]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging.WriteLogEntriesResponse: r"""Writes log entries to Logging. This API method is the only way to send log entries to Logging. This method is @@ -786,8 +788,10 @@ def sample_write_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.WriteLogEntriesResponse: @@ -842,7 +846,7 @@ def list_log_entries(self, order_by: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogEntriesPager: r"""Lists log entries. Use this method to retrieve log entries that originated from a project/folder/organization/billing account. @@ -929,8 +933,10 @@ def sample_list_log_entries(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: @@ -995,7 +1001,7 @@ def list_monitored_resource_descriptors(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListMonitoredResourceDescriptorsPager: r"""Lists the descriptors for monitored resource types used by Logging. @@ -1033,8 +1039,10 @@ def sample_list_monitored_resource_descriptors(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: @@ -1086,7 +1094,7 @@ def list_logs(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogsPager: r"""Lists the logs in projects, organizations, folders, or billing accounts. Only logs that have entries are @@ -1136,8 +1144,10 @@ def sample_list_logs(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: @@ -1207,7 +1217,7 @@ def tail_log_entries(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[logging.TailLogEntriesResponse]: r"""Streaming read of log entries as they are ingested. Until the stream is terminated, it will continue reading @@ -1256,8 +1266,10 @@ def request_generator(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: @@ -1301,7 +1313,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1312,8 +1324,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1351,7 +1365,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1362,8 +1376,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1401,7 +1417,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1416,8 +1432,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index 13e9963f7934..c4f0739ea163 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -53,7 +53,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -66,8 +66,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogEntriesRequest(request) @@ -119,7 +121,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -132,8 +134,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogEntriesRequest(request) @@ -188,7 +192,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -201,8 +205,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -254,7 +260,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -267,8 +273,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListMonitoredResourceDescriptorsRequest(request) @@ -323,7 +331,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -336,8 +344,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogsRequest(request) @@ -389,7 +399,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -402,8 +412,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging.ListLogsRequest(request) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 70a383b96779..d6f06c1f1834 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -234,7 +234,7 @@ async def list_log_metrics(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. @@ -282,8 +282,10 @@ async def sample_list_log_metrics(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: @@ -355,7 +357,7 @@ async def get_log_metric(self, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -401,8 +403,10 @@ async def sample_get_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -471,7 +475,7 @@ async def create_log_metric(self, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -533,8 +537,10 @@ async def sample_create_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -605,7 +611,7 @@ async def update_log_metric(self, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -666,8 +672,10 @@ async def sample_update_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -737,7 +745,7 @@ async def delete_log_metric(self, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -780,8 +788,10 @@ async def sample_delete_log_metric(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -830,7 +840,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -841,8 +851,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -880,7 +892,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -891,8 +903,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -930,7 +944,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -945,8 +959,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 72ad42b1f217..33a386b28b32 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -549,7 +549,7 @@ def list_log_metrics(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. @@ -597,8 +597,10 @@ def sample_list_log_metrics(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: @@ -669,7 +671,7 @@ def get_log_metric(self, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Gets a logs-based metric. @@ -715,8 +717,10 @@ def sample_get_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -784,7 +788,7 @@ def create_log_metric(self, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates a logs-based metric. @@ -846,8 +850,10 @@ def sample_create_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -917,7 +923,7 @@ def update_log_metric(self, metric: Optional[logging_metrics.LogMetric] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> logging_metrics.LogMetric: r"""Creates or updates a logs-based metric. @@ -978,8 +984,10 @@ def sample_update_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.logging_v2.types.LogMetric: @@ -1048,7 +1056,7 @@ def delete_log_metric(self, metric_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a logs-based metric. @@ -1091,8 +1099,10 @@ def sample_delete_log_metric(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have @@ -1153,7 +1163,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1164,8 +1174,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1203,7 +1215,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1214,8 +1226,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1253,7 +1267,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1268,8 +1282,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 134bd0dedb17..fc24bb44c900 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -51,7 +51,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -64,8 +64,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) @@ -117,7 +119,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -130,8 +132,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = logging_metrics.ListLogMetricsRequest(request) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index b62675ba6439..50c444f70b85 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -43,7 +43,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -119,7 +119,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -196,7 +196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -272,7 +272,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -349,7 +349,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -425,7 +425,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -510,7 +510,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -594,7 +594,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -683,7 +683,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -771,7 +771,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -856,7 +856,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -940,7 +940,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -1017,7 +1017,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -1093,7 +1093,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -1170,7 +1170,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_bucket" @@ -1243,7 +1243,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_bucket" @@ -1321,7 +1321,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_exclusion" @@ -1398,7 +1398,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_exclusion" @@ -1476,7 +1476,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1556,7 +1556,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1637,7 +1637,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_sink" @@ -1714,7 +1714,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_sink" @@ -1788,7 +1788,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_view" @@ -1861,7 +1861,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_view" @@ -1935,7 +1935,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -2011,7 +2011,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -2088,7 +2088,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -2164,7 +2164,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -2245,7 +2245,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -2325,7 +2325,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -2406,7 +2406,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Link", @@ -2486,7 +2486,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Link", @@ -2567,7 +2567,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -2647,7 +2647,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -2728,7 +2728,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -2808,7 +2808,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -2885,7 +2885,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -2961,7 +2961,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -3042,7 +3042,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", @@ -3122,7 +3122,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", @@ -3203,7 +3203,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", @@ -3283,7 +3283,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", @@ -3364,7 +3364,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager", @@ -3444,7 +3444,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager", @@ -3525,7 +3525,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", @@ -3605,7 +3605,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", @@ -3686,7 +3686,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", @@ -3766,7 +3766,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", @@ -3843,7 +3843,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "undelete_bucket" @@ -3916,7 +3916,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "undelete_bucket" @@ -3990,7 +3990,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -4066,7 +4066,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -4143,7 +4143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -4219,7 +4219,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogBucket", @@ -4296,7 +4296,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -4372,7 +4372,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.CmekSettings", @@ -4461,7 +4461,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -4549,7 +4549,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogExclusion", @@ -4634,7 +4634,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -4718,7 +4718,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.Settings", @@ -4807,7 +4807,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -4895,7 +4895,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogSink", @@ -4972,7 +4972,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -5048,7 +5048,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogView", @@ -5129,7 +5129,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log" @@ -5206,7 +5206,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log" @@ -5292,7 +5292,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager", @@ -5380,7 +5380,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager", @@ -5461,7 +5461,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager", @@ -5541,7 +5541,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager", @@ -5618,7 +5618,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager", @@ -5694,7 +5694,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager", @@ -5771,7 +5771,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", @@ -5847,7 +5847,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", @@ -5940,7 +5940,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", @@ -6032,7 +6032,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", @@ -6117,7 +6117,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6201,7 +6201,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6282,7 +6282,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log_metric" @@ -6359,7 +6359,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_log_metric" @@ -6437,7 +6437,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6517,7 +6517,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6598,7 +6598,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager", @@ -6678,7 +6678,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager", @@ -6763,7 +6763,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", @@ -6847,7 +6847,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.logging_v2.types.LogMetric", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 24aa303c8997..55e33e52bd3e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -261,7 +261,7 @@ async def list_instances(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListInstancesAsyncPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -317,8 +317,10 @@ async def sample_list_instances(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesAsyncPager: @@ -390,7 +392,7 @@ async def get_instance(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. @@ -435,8 +437,10 @@ async def sample_get_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.types.Instance: @@ -492,7 +496,7 @@ async def get_instance_auth_string(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloud_redis.InstanceAuthString: r"""Gets the AUTH string for a Redis instance. If AUTH is not enabled for the instance the response will be empty. @@ -540,8 +544,10 @@ async def sample_get_instance_auth_string(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.types.InstanceAuthString: @@ -599,7 +605,7 @@ async def create_instance(self, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -688,8 +694,10 @@ async def sample_create_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -763,7 +771,7 @@ async def update_instance(self, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Updates the metadata and configuration of a specific Redis instance. @@ -836,8 +844,10 @@ async def sample_update_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -909,7 +919,7 @@ async def upgrade_instance(self, redis_version: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Upgrades Redis instance to the newer Redis version specified in the request. @@ -967,8 +977,10 @@ async def sample_upgrade_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1040,7 +1052,7 @@ async def import_instance(self, input_config: Optional[cloud_redis.InputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. @@ -1108,8 +1120,10 @@ async def sample_import_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1181,7 +1195,7 @@ async def export_instance(self, output_config: Optional[cloud_redis.OutputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. @@ -1246,8 +1260,10 @@ async def sample_export_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1319,7 +1335,7 @@ async def failover_instance(self, data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud @@ -1378,8 +1394,10 @@ async def sample_failover_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1450,7 +1468,7 @@ async def delete_instance(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1500,8 +1518,10 @@ async def sample_delete_instance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1579,7 +1599,7 @@ async def reschedule_maintenance(self, schedule_time: Optional[timestamp_pb2.Timestamp] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Reschedule maintenance for a given instance in a given project and location. @@ -1645,8 +1665,10 @@ async def sample_reschedule_maintenance(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation_async.AsyncOperation: @@ -1719,7 +1741,7 @@ async def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -1730,8 +1752,10 @@ async def list_operations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -1769,7 +1793,7 @@ async def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -1780,8 +1804,10 @@ async def get_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1819,7 +1845,7 @@ async def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -1835,8 +1861,10 @@ async def delete_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1869,7 +1897,7 @@ async def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -1884,8 +1912,10 @@ async def cancel_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -1918,7 +1948,7 @@ async def wait_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. @@ -1935,8 +1965,10 @@ async def wait_operation( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -1974,7 +2006,7 @@ async def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -1985,8 +2017,10 @@ async def get_location( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -2024,7 +2058,7 @@ async def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -2035,8 +2069,10 @@ async def list_locations( retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 80d28b6d9a57..895f5c98d014 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -610,7 +610,7 @@ def list_instances(self, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListInstancesPager: r"""Lists all Redis instances owned by a project in either the specified location (region) or all locations. @@ -666,8 +666,10 @@ def sample_list_instances(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesPager: @@ -738,7 +740,7 @@ def get_instance(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloud_redis.Instance: r"""Gets the details of a specific Redis instance. @@ -783,8 +785,10 @@ def sample_get_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.types.Instance: @@ -839,7 +843,7 @@ def get_instance_auth_string(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> cloud_redis.InstanceAuthString: r"""Gets the AUTH string for a Redis instance. If AUTH is not enabled for the instance the response will be empty. @@ -887,8 +891,10 @@ def sample_get_instance_auth_string(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.redis_v1.types.InstanceAuthString: @@ -945,7 +951,7 @@ def create_instance(self, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates a Redis instance based on the specified tier and memory size. @@ -1034,8 +1040,10 @@ def sample_create_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1108,7 +1116,7 @@ def update_instance(self, instance: Optional[cloud_redis.Instance] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Updates the metadata and configuration of a specific Redis instance. @@ -1181,8 +1189,10 @@ def sample_update_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1253,7 +1263,7 @@ def upgrade_instance(self, redis_version: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Upgrades Redis instance to the newer Redis version specified in the request. @@ -1311,8 +1321,10 @@ def sample_upgrade_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1383,7 +1395,7 @@ def import_instance(self, input_config: Optional[cloud_redis.InputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. @@ -1451,8 +1463,10 @@ def sample_import_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1523,7 +1537,7 @@ def export_instance(self, output_config: Optional[cloud_redis.OutputConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Export Redis instance data into a Redis RDB format file in Cloud Storage. @@ -1588,8 +1602,10 @@ def sample_export_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1660,7 +1676,7 @@ def failover_instance(self, data_protection_mode: Optional[cloud_redis.FailoverInstanceRequest.DataProtectionMode] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Initiates a failover of the primary node to current replica node for a specific STANDARD tier Cloud @@ -1719,8 +1735,10 @@ def sample_failover_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1790,7 +1808,7 @@ def delete_instance(self, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Deletes a specific Redis instance. Instance stops serving and data is deleted. @@ -1840,8 +1858,10 @@ def sample_delete_instance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -1918,7 +1938,7 @@ def reschedule_maintenance(self, schedule_time: Optional[timestamp_pb2.Timestamp] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Reschedule maintenance for a given instance in a given project and location. @@ -1984,8 +2004,10 @@ def sample_reschedule_maintenance(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.api_core.operation.Operation: @@ -2070,7 +2092,7 @@ def list_operations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.ListOperationsResponse: r"""Lists operations that match the specified filter in the request. @@ -2081,8 +2103,10 @@ def list_operations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.ListOperationsResponse: Response message for ``ListOperations`` method. @@ -2120,7 +2144,7 @@ def get_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Gets the latest state of a long-running operation. @@ -2131,8 +2155,10 @@ def get_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -2170,7 +2196,7 @@ def delete_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a long-running operation. @@ -2186,8 +2212,10 @@ def delete_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2220,7 +2248,7 @@ def cancel_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Starts asynchronous cancellation on a long-running operation. @@ -2235,8 +2263,10 @@ def cancel_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: None """ @@ -2269,7 +2299,7 @@ def wait_operation( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. @@ -2286,8 +2316,10 @@ def wait_operation( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: An ``Operation`` object. @@ -2325,7 +2357,7 @@ def get_location( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.Location: r"""Gets information about a location. @@ -2336,8 +2368,10 @@ def get_location( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.Location: Location object. @@ -2375,7 +2409,7 @@ def list_locations( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> locations_pb2.ListLocationsResponse: r"""Lists information about the supported locations for this service. @@ -2386,8 +2420,10 @@ def list_locations( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.location_pb2.ListLocationsResponse: Response message for ``ListLocations`` method. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index 55a7a66f9503..e3d2c05b83d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -51,7 +51,7 @@ def __init__(self, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiate the pager. Args: @@ -64,8 +64,10 @@ def __init__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = cloud_redis.ListInstancesRequest(request) @@ -117,7 +119,7 @@ def __init__(self, *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = ()): + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): """Instantiates the pager. Args: @@ -130,8 +132,10 @@ def __init__(self, retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = cloud_redis.ListInstancesRequest(request) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 2e18f09cd96c..5dafa5b3ae8c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -161,7 +161,7 @@ def post_upgrade_instance(self, response): """ - def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -178,7 +178,7 @@ def post_create_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -195,7 +195,7 @@ def post_delete_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_instance Override in a subclass to manipulate the request or metadata @@ -212,7 +212,7 @@ def post_export_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for failover_instance Override in a subclass to manipulate the request or metadata @@ -229,7 +229,7 @@ def post_failover_instance(self, response: operations_pb2.Operation) -> operatio """ return response - def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -246,7 +246,7 @@ def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Insta """ return response - def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, str]]]: + def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance_auth_string Override in a subclass to manipulate the request or metadata @@ -263,7 +263,7 @@ def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString """ return response - def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for import_instance Override in a subclass to manipulate the request or metadata @@ -280,7 +280,7 @@ def post_import_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, str]]]: + def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -297,7 +297,7 @@ def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cl """ return response - def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: + def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reschedule_maintenance Override in a subclass to manipulate the request or metadata @@ -314,7 +314,7 @@ def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> ope """ return response - def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -331,7 +331,7 @@ def post_update_instance(self, response: operations_pb2.Operation) -> operations """ return response - def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, str]]]: + def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for upgrade_instance Override in a subclass to manipulate the request or metadata @@ -349,8 +349,8 @@ def post_upgrade_instance(self, response: operations_pb2.Operation) -> operation return response def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -370,8 +370,8 @@ def post_get_location( return response def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -391,8 +391,8 @@ def post_list_locations( return response def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -412,8 +412,8 @@ def post_cancel_operation( return response def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -433,8 +433,8 @@ def post_delete_operation( return response def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -454,8 +454,8 @@ def post_get_operation( return response def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -475,8 +475,8 @@ def post_list_operations( return response def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for wait_operation Override in a subclass to manipulate the request or metadata @@ -692,7 +692,7 @@ def __call__(self, request: cloud_redis.CreateInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. @@ -703,8 +703,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -767,7 +769,7 @@ def __call__(self, request: cloud_redis.DeleteInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. @@ -778,8 +780,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -841,7 +845,7 @@ def __call__(self, request: cloud_redis.ExportInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. @@ -852,8 +856,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -917,7 +923,7 @@ def __call__(self, request: cloud_redis.FailoverInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. @@ -928,8 +934,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -992,7 +1000,7 @@ def __call__(self, request: cloud_redis.GetInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. @@ -1003,8 +1011,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.Instance: @@ -1064,7 +1074,7 @@ def __call__(self, request: cloud_redis.GetInstanceAuthStringRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.InstanceAuthString: r"""Call the get instance auth string method over HTTP. @@ -1075,8 +1085,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.InstanceAuthString: @@ -1137,7 +1149,7 @@ def __call__(self, request: cloud_redis.ImportInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. @@ -1148,8 +1160,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1212,7 +1226,7 @@ def __call__(self, request: cloud_redis.ListInstancesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. @@ -1223,8 +1237,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.ListInstancesResponse: @@ -1287,7 +1303,7 @@ def __call__(self, request: cloud_redis.RescheduleMaintenanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the reschedule maintenance method over HTTP. @@ -1298,8 +1314,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1363,7 +1381,7 @@ def __call__(self, request: cloud_redis.UpdateInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. @@ -1374,8 +1392,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1439,7 +1459,7 @@ def __call__(self, request: cloud_redis.UpgradeInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. @@ -1450,8 +1470,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1606,7 +1628,7 @@ def __call__(self, request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.Location: r"""Call the get location method over HTTP. @@ -1617,8 +1639,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.Location: Response from GetLocation method. @@ -1679,7 +1703,7 @@ def __call__(self, request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. @@ -1690,8 +1714,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.ListLocationsResponse: Response from ListLocations method. @@ -1752,7 +1778,7 @@ def __call__(self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the cancel operation method over HTTP. @@ -1763,8 +1789,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() @@ -1818,7 +1846,7 @@ def __call__(self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the delete operation method over HTTP. @@ -1829,8 +1857,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() @@ -1884,7 +1914,7 @@ def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -1895,8 +1925,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -1957,7 +1989,7 @@ def __call__(self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -1968,8 +2000,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. @@ -2031,7 +2065,7 @@ def __call__(self, request: operations_pb2.WaitOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. @@ -2042,8 +2076,10 @@ def __call__(self, retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from WaitOperation method. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 4eff5c369818..6c73f6aea625 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -171,7 +171,7 @@ async def post_upgrade_instance(self, response): """ - async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, str]]]: + async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_instance Override in a subclass to manipulate the request or metadata @@ -188,7 +188,7 @@ async def post_create_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, str]]]: + async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance Override in a subclass to manipulate the request or metadata @@ -205,7 +205,7 @@ async def post_delete_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, str]]]: + async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_instance Override in a subclass to manipulate the request or metadata @@ -222,7 +222,7 @@ async def post_export_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, str]]]: + async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for failover_instance Override in a subclass to manipulate the request or metadata @@ -239,7 +239,7 @@ async def post_failover_instance(self, response: operations_pb2.Operation) -> op """ return response - async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, str]]]: + async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance Override in a subclass to manipulate the request or metadata @@ -256,7 +256,7 @@ async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis """ return response - async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, str]]]: + async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance_auth_string Override in a subclass to manipulate the request or metadata @@ -273,7 +273,7 @@ async def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuth """ return response - async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, str]]]: + async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for import_instance Override in a subclass to manipulate the request or metadata @@ -290,7 +290,7 @@ async def post_import_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, str]]]: + async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances Override in a subclass to manipulate the request or metadata @@ -307,7 +307,7 @@ async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) """ return response - async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, str]]]: + async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reschedule_maintenance Override in a subclass to manipulate the request or metadata @@ -324,7 +324,7 @@ async def post_reschedule_maintenance(self, response: operations_pb2.Operation) """ return response - async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, str]]]: + async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance Override in a subclass to manipulate the request or metadata @@ -341,7 +341,7 @@ async def post_update_instance(self, response: operations_pb2.Operation) -> oper """ return response - async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, str]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, str]]]: + async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for upgrade_instance Override in a subclass to manipulate the request or metadata @@ -359,8 +359,8 @@ async def post_upgrade_instance(self, response: operations_pb2.Operation) -> ope return response async def pre_get_location( - self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_location Override in a subclass to manipulate the request or metadata @@ -380,8 +380,8 @@ async def post_get_location( return response async def pre_list_locations( - self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_locations Override in a subclass to manipulate the request or metadata @@ -401,8 +401,8 @@ async def post_list_locations( return response async def pre_cancel_operation( - self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for cancel_operation Override in a subclass to manipulate the request or metadata @@ -422,8 +422,8 @@ async def post_cancel_operation( return response async def pre_delete_operation( - self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_operation Override in a subclass to manipulate the request or metadata @@ -443,8 +443,8 @@ async def post_delete_operation( return response async def pre_get_operation( - self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_operation Override in a subclass to manipulate the request or metadata @@ -464,8 +464,8 @@ async def post_get_operation( return response async def pre_list_operations( - self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_operations Override in a subclass to manipulate the request or metadata @@ -485,8 +485,8 @@ async def post_list_operations( return response async def pre_wait_operation( - self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, str]]]: + self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for wait_operation Override in a subclass to manipulate the request or metadata @@ -720,7 +720,7 @@ async def __call__(self, request: cloud_redis.CreateInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the create instance method over HTTP. @@ -731,8 +731,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -801,7 +803,7 @@ async def __call__(self, request: cloud_redis.DeleteInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the delete instance method over HTTP. @@ -812,8 +814,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -881,7 +885,7 @@ async def __call__(self, request: cloud_redis.ExportInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the export instance method over HTTP. @@ -892,8 +896,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -963,7 +969,7 @@ async def __call__(self, request: cloud_redis.FailoverInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the failover instance method over HTTP. @@ -974,8 +980,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1044,7 +1052,7 @@ async def __call__(self, request: cloud_redis.GetInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.Instance: r"""Call the get instance method over HTTP. @@ -1055,8 +1063,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.Instance: @@ -1120,7 +1130,7 @@ async def __call__(self, request: cloud_redis.GetInstanceAuthStringRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.InstanceAuthString: r"""Call the get instance auth string method over HTTP. @@ -1131,8 +1141,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.InstanceAuthString: @@ -1197,7 +1209,7 @@ async def __call__(self, request: cloud_redis.ImportInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the import instance method over HTTP. @@ -1208,8 +1220,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1278,7 +1292,7 @@ async def __call__(self, request: cloud_redis.ListInstancesRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> cloud_redis.ListInstancesResponse: r"""Call the list instances method over HTTP. @@ -1289,8 +1303,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.cloud_redis.ListInstancesResponse: @@ -1357,7 +1373,7 @@ async def __call__(self, request: cloud_redis.RescheduleMaintenanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the reschedule maintenance method over HTTP. @@ -1368,8 +1384,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1439,7 +1457,7 @@ async def __call__(self, request: cloud_redis.UpdateInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the update instance method over HTTP. @@ -1450,8 +1468,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1521,7 +1541,7 @@ async def __call__(self, request: cloud_redis.UpgradeInstanceRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the upgrade instance method over HTTP. @@ -1532,8 +1552,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.operations_pb2.Operation: @@ -1728,7 +1750,7 @@ async def __call__(self, request: locations_pb2.GetLocationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.Location: r"""Call the get location method over HTTP. @@ -1739,8 +1761,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.Location: Response from GetLocation method. @@ -1805,7 +1829,7 @@ async def __call__(self, request: locations_pb2.ListLocationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> locations_pb2.ListLocationsResponse: r"""Call the list locations method over HTTP. @@ -1816,8 +1840,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: locations_pb2.ListLocationsResponse: Response from ListLocations method. @@ -1882,7 +1908,7 @@ async def __call__(self, request: operations_pb2.CancelOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the cancel operation method over HTTP. @@ -1893,8 +1919,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() @@ -1952,7 +1980,7 @@ async def __call__(self, request: operations_pb2.DeleteOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> None: r"""Call the delete operation method over HTTP. @@ -1963,8 +1991,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() @@ -2022,7 +2052,7 @@ async def __call__(self, request: operations_pb2.GetOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the get operation method over HTTP. @@ -2033,8 +2063,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from GetOperation method. @@ -2099,7 +2131,7 @@ async def __call__(self, request: operations_pb2.ListOperationsRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.ListOperationsResponse: r"""Call the list operations method over HTTP. @@ -2110,8 +2142,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.ListOperationsResponse: Response from ListOperations method. @@ -2177,7 +2211,7 @@ async def __call__(self, request: operations_pb2.WaitOperationRequest, *, retry: OptionalRetry=gapic_v1.method.DEFAULT, timeout: Optional[float]=None, - metadata: Sequence[Tuple[str, str]]=(), + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), ) -> operations_pb2.Operation: r"""Call the wait operation method over HTTP. @@ -2188,8 +2222,10 @@ async def __call__(self, retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: operations_pb2.Operation: Response from WaitOperation method. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 174d9dff49b1..78f872bc4aef 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -55,7 +55,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -143,7 +143,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -224,7 +224,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -304,7 +304,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -389,7 +389,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -473,7 +473,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -558,7 +558,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -642,7 +642,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -723,7 +723,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.types.InstanceAuthString", @@ -803,7 +803,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.types.InstanceAuthString", @@ -884,7 +884,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.types.Instance", @@ -964,7 +964,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.types.Instance", @@ -1049,7 +1049,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1133,7 +1133,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1214,7 +1214,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesAsyncPager", @@ -1294,7 +1294,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesPager", @@ -1383,7 +1383,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1471,7 +1471,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1556,7 +1556,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1640,7 +1640,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", @@ -1725,7 +1725,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation_async.AsyncOperation", @@ -1809,7 +1809,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.api_core.operation.Operation", diff --git a/packages/gapic-generator/tests/system/test_request_metadata.py b/packages/gapic-generator/tests/system/test_request_metadata.py new file mode 100644 index 000000000000..efc230f823f8 --- /dev/null +++ b/packages/gapic-generator/tests/system/test_request_metadata.py @@ -0,0 +1,54 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google import showcase + + +def test_metadata_string(echo): + echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + request_id="some_value", + other_request_id="", + ), + metadata=[('some-key', 'some_value')] + ) + + +def test_metadata_binary(echo): + echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + request_id="some_value", + other_request_id="", + ), + metadata=[('some-key-bin', b'some_value')] + ) + + if isinstance(echo.transport, type(echo).get_transport_class("grpc")): + # See https://github.com/googleapis/gapic-generator-python/issues/2250 + # and https://github.com/grpc/grpc/pull/38127. + # When the metadata key ends in `-bin`, the value should be of type + # `bytes`` rather than `str``. Otherwise, gRPC raises a TypeError. + with pytest.raises(TypeError, match="(?i)expected bytes"): + echo.echo( + showcase.EchoRequest( + content="The hail in Wales falls mainly on the snails.", + request_id="some_value", + other_request_id="", + ), + metadata=[('some-key-bin', 'some_value')] + ) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 39e8ec095632..b6c7e1dad74a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -160,7 +160,10 @@ def test_generate_sample_basic(): {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + { + 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', + 'name': 'metadata' + } ], 'resultType': 'molluscs_v1.classification', 'client': { @@ -284,7 +287,10 @@ def test_generate_sample_basic_async(): {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + { + 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', + 'name': 'metadata' + } ], 'resultType': 'molluscs_v1.classification', 'client': { @@ -399,7 +405,10 @@ def test_generate_sample_basic_unflattenable(): {'type': 'molluscs_v1.classify_request', 'name': 'request'}, {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + { + 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', + 'name': 'metadata' + } ], 'resultType': 'molluscs_v1.classification', 'client': { @@ -505,7 +514,10 @@ def test_generate_sample_void_method(): {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, {'type': 'float', 'name': 'timeout'}, - {'type': 'Sequence[Tuple[str, str]', 'name': 'metadata'} + { + 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', + 'name': 'metadata' + } ], 'client': { 'shortName': 'MolluscServiceClient', diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index 7665c831aef8..d9714c280c60 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -2301,7 +2301,10 @@ def test__set_sample_metadata_server_streaming(): }, {"type": "google.api_core.retry.Retry", "name": "retry"}, {"type": "float", "name": "timeout"}, - {"type": "Sequence[Tuple[str, str]", "name": "metadata"}, + { + "type": "Sequence[Tuple[str, Union[str, bytes]]]", + "name": "metadata" + }, ], "resultType": "Iterable[animalia.mollusca_v1.types.Mollusc]", "client": { @@ -2385,7 +2388,10 @@ def test__set_sample_metadata_client_streaming(): }, {"type": "google.api_core.retry.Retry", "name": "retry"}, {"type": "float", "name": "timeout"}, - {"type": "Sequence[Tuple[str, str]", "name": "metadata"}, + { + "type": "Sequence[Tuple[str, Union[str, bytes]]]", + "name": "metadata" + }, ], "resultType": "animalia.mollusca_v1.types.Mollusc", "client": { From b5cc758ba684b316d329ae8a30e4847aa80108b5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 25 Nov 2024 11:41:56 +0100 Subject: [PATCH 1219/1339] chore(deps): update all dependencies (#2260) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/WORKSPACE | 4 +- packages/gapic-generator/requirements.txt | 394 +++++++++++----------- 2 files changed, 204 insertions(+), 194 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 9cfdc92627fc..7f946bfeb020 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -60,9 +60,9 @@ gapic_generator_python() gapic_generator_register_toolchains() -_grpc_version = "1.64.2" +_grpc_version = "1.67.1" -_grpc_sha256 = "8579095a31e280d0c5fcc81ea0a2a0efb2900dbfbac0eb018a961a5be22e076e" +_grpc_sha256 = "f83aedc91b84d4c396d30b0b2a30f7113c651fe5bc180c8ac08a5f0ff7dcffd2" http_archive( name = "com_github_grpc_grpc", diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 1ab0eb2b736c..d1e73d2b47e5 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,83 +8,83 @@ aiohappyeyeballs==2.4.3 \ --hash=sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586 \ --hash=sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572 # via aiohttp -aiohttp==3.11.2 \ - --hash=sha256:08ebe7a1d6c1e5ca766d68407280d69658f5f98821c2ba6c41c63cabfed159af \ - --hash=sha256:0a90a0dc4b054b5af299a900bf950fe8f9e3e54322bc405005f30aa5cacc5c98 \ - --hash=sha256:0cba0b8d25aa2d450762f3dd6df85498f5e7c3ad0ddeb516ef2b03510f0eea32 \ - --hash=sha256:0ebdf5087e2ce903d8220cc45dcece90c2199ae4395fd83ca616fcc81010db2c \ - --hash=sha256:10a5f91c319d9d4afba812f72984816b5fcd20742232ff7ecc1610ffbf3fc64d \ - --hash=sha256:122768e3ae9ce74f981b46edefea9c6e5a40aea38aba3ac50168e6370459bf20 \ - --hash=sha256:14eb6c628432720e41b4fab1ada879d56cfe7034159849e083eb536b4c2afa99 \ - --hash=sha256:177b000efaf8d2f7012c649e8aee5b0bf488677b1162be5e7511aa4f9d567607 \ - --hash=sha256:1c2496182e577042e0e07a328d91c949da9e77a2047c7291071e734cd7a6e780 \ - --hash=sha256:1e33a7eddcd07545ccf5c3ab230f60314a17dc33e285475e8405e26e21f02660 \ - --hash=sha256:2793d3297f3e49015140e6d3ea26142c967e07998e2fb00b6ee8d041138fbc4e \ - --hash=sha256:2914061f5ca573f990ec14191e6998752fa8fe50d518e3405410353c3f44aa5d \ - --hash=sha256:2adb967454e10e69478ba4a8d8afbba48a7c7a8619216b7c807f8481cc66ddfb \ - --hash=sha256:2b02a68b9445c70d7f5c8b578c5f5e5866b1d67ca23eb9e8bc8658ae9e3e2c74 \ - --hash=sha256:3129151378f858cdc4a0a4df355c9a0d060ab49e2eea7e62e9f085bac100551b \ - --hash=sha256:32334f35824811dd20a12cc90825d000e6b50faaeaa71408d42269151a66140d \ - --hash=sha256:33af11eca7bb0f5c6ffaf5e7d9d2336c2448f9c6279b93abdd6f3c35f9ee321f \ - --hash=sha256:34f37c59b12bc3afc52bab6fcd9cd3be82ff01c4598a84cbea934ccb3a9c54a0 \ - --hash=sha256:3666c750b73ce463a413692e3a57c60f7089e2d9116a2aa5a0f0eaf2ae325148 \ - --hash=sha256:374baefcb1b6275f350da605951f5f02487a9bc84a574a7d5b696439fabd49a3 \ - --hash=sha256:382f853516664d2ebfc75dc01da4a10fdef5edcb335fe7b45cf471ce758ecb18 \ - --hash=sha256:3b1f4844909321ef2c1cee50ddeccbd6018cd8c8d1ddddda3f553e94a5859497 \ - --hash=sha256:3f617a48b70f4843d54f52440ea1e58da6bdab07b391a3a6aed8d3b311a4cc04 \ - --hash=sha256:435f7a08d8aa42371a94e7c141205a9cb092ba551084b5e0c57492e6673601a3 \ - --hash=sha256:44b69c69c194ffacbc50165911cf023a4b1b06422d1e1199d3aea82eac17004e \ - --hash=sha256:486273d3b5af75a80c31c311988931bdd2a4b96a74d5c7f422bad948f99988ef \ - --hash=sha256:4a23475d8d5c56e447b7752a1e2ac267c1f723f765e406c81feddcd16cdc97bc \ - --hash=sha256:4c979fc92aba66730b66099cd5becb42d869a26c0011119bc1c2478408a8bf7a \ - --hash=sha256:4d7fad8c456d180a6d2f44c41cfab4b80e2e81451815825097db48b8293f59d5 \ - --hash=sha256:50e0aee4adc9abcd2109c618a8d1b2c93b85ac277b24a003ab147d91e068b06d \ - --hash=sha256:556564d89e2f4a6e8fe000894c03e4e84cf0b6cfa5674e425db122633ee244d1 \ - --hash=sha256:5587da333b7d280a312715b843d43e734652aa382cba824a84a67c81f75b338b \ - --hash=sha256:57993f406ce3f114b2a6756d7809be3ffd0cc40f33e8f8b9a4aa1b027fd4e3eb \ - --hash=sha256:5d6e069b882c1fdcbe5577dc4be372eda705180197140577a4cddb648c29d22e \ - --hash=sha256:5d878a0186023ac391861958035174d0486f3259cabf8fd94e591985468da3ea \ - --hash=sha256:5d90b5a3b0f32a5fecf5dd83d828713986c019585f5cddf40d288ff77f366615 \ - --hash=sha256:5e9a766c346b2ed7e88937919d84ed64b4ef489dad1d8939f806ee52901dc142 \ - --hash=sha256:64e8f5178958a9954043bc8cd10a5ae97352c3f2fc99aa01f2aebb0026010910 \ - --hash=sha256:66e58a2e8c7609a3545c4b38fb8b01a6b8346c4862e529534f7674c5265a97b8 \ - --hash=sha256:68d1f46f9387db3785508f5225d3acbc5825ca13d9c29f2b5cce203d5863eb79 \ - --hash=sha256:6ad9a7d2a3a0f235184426425f80bd3b26c66b24fd5fddecde66be30c01ebe6e \ - --hash=sha256:6e8e19a80ba194db5c06915a9df23c0c06e0e9ca9a4db9386a6056cca555a027 \ - --hash=sha256:73a664478ae1ea011b5a710fb100b115ca8b2146864fa0ce4143ff944df714b8 \ - --hash=sha256:766d0ebf8703d28f854f945982aa09224d5a27a29594c70d921c43c3930fe7ac \ - --hash=sha256:783741f534c14957fbe657d62a34b947ec06db23d45a2fd4a8aeb73d9c84d7e6 \ - --hash=sha256:79efd1ee3827b2f16797e14b1e45021206c3271249b4d0025014466d416d7413 \ - --hash=sha256:83a70e22e0f6222effe7f29fdeba6c6023f9595e59a0479edacfbd7de4b77bb7 \ - --hash=sha256:85de9904bc360fd29a98885d2bfcbd4e02ab33c53353cb70607f2bea2cb92468 \ - --hash=sha256:8d954ba0eae7f33884d27dc00629ca4389d249eb8d26ca07c30911257cae8c96 \ - --hash=sha256:9075313f8e41b481e4cb10af405054564b0247dc335db5398ed05f8ec38787e2 \ - --hash=sha256:97fba98fc5d9ccd3d33909e898d00f2494d6a9eec7cbda3d030632e2c8bb4d00 \ - --hash=sha256:994cb893936dd2e1803655ae8667a45066bfd53360b148e22b4e3325cc5ea7a3 \ - --hash=sha256:9aa4e68f1e4f303971ec42976fb170204fb5092de199034b57199a1747e78a2d \ - --hash=sha256:9b6d15adc9768ff167614ca853f7eeb6ee5f1d55d5660e3af85ce6744fed2b82 \ - --hash=sha256:9bbb2dbc2701ab7e9307ca3a8fa4999c5b28246968e0a0202a5afabf48a42e22 \ - --hash=sha256:9c8d1db4f65bbc9d75b7b271d68fb996f1c8c81a525263862477d93611856c2d \ - --hash=sha256:a7b0a1618060e3f5aa73d3526ca2108a16a1b6bf86612cd0bb2ddcbef9879d06 \ - --hash=sha256:afa55e863224e664a782effa62245df73fdfc55aee539bed6efacf35f6d4e4b7 \ - --hash=sha256:b339d91ac9060bd6ecdc595a82dc151045e5d74f566e0864ef3f2ba0887fec42 \ - --hash=sha256:b470de64d17156c37e91effc109d3b032b39867000e2c126732fe01d034441f9 \ - --hash=sha256:b4ec8afd362356b8798c8caa806e91deb3f0602d8ffae8e91d2d3ced2a90c35e \ - --hash=sha256:c28c1677ea33ccb8b14330560094cc44d3ff4fad617a544fd18beb90403fe0f1 \ - --hash=sha256:c681f34e2814bc6e1eef49752b338061b94a42c92734d0be9513447d3f83718c \ - --hash=sha256:cccb2937bece1310c5c0163d0406aba170a2e5fb1f0444d7b0e7fdc9bd6bb713 \ - --hash=sha256:cdc6f8dce09281ae534eaf08a54f0d38612398375f28dad733a8885f3bf9b978 \ - --hash=sha256:d23854e5867650d40cba54d49956aad8081452aa80b2cf0d8c310633f4f48510 \ - --hash=sha256:d2d942421cf3a1d1eceae8fa192f1fbfb74eb9d3e207d35ad2696bd2ce2c987c \ - --hash=sha256:d2f991c18132f3e505c108147925372ffe4549173b7c258cf227df1c5977a635 \ - --hash=sha256:d3a2bcf6c81639a165da93469e1e0aff67c956721f3fa9c0560f07dd1e505116 \ - --hash=sha256:d84930b4145991214602372edd7305fc76b700220db79ac0dd57d3afd0f0a1ca \ - --hash=sha256:de3b4d5fb5d69749104b880a157f38baeea7765c93d9cd3837cedd5b84729e10 \ - --hash=sha256:e57a10aacedcf24666f4c90d03e599f71d172d1c5e00dcf48205c445806745b0 \ - --hash=sha256:f1d06c8fd8b453c3e553c956bd3b8395100401060430572174bb7876dd95ad49 \ - --hash=sha256:f833a80d9de9307d736b6af58c235b17ef7f90ebea7b9c49cd274dec7a66a2f1 \ - --hash=sha256:fb0544a0e8294a5a5e20d3cacdaaa9a911d7c0a9150f5264aef36e7d8fdfa07e \ - --hash=sha256:ff5d22eece44528023254b595c670dfcf9733ac6af74c4b6cb4f6a784dc3870c +aiohttp==3.11.7 \ + --hash=sha256:018f1b04883a12e77e7fc161934c0f298865d3a484aea536a6a2ca8d909f0ba0 \ + --hash=sha256:01a8aca4af3da85cea5c90141d23f4b0eee3cbecfd33b029a45a80f28c66c668 \ + --hash=sha256:04b0cc74d5a882c9dacaeeccc1444f0233212b6f5be8bc90833feef1e1ce14b9 \ + --hash=sha256:0de6466b9d742b4ee56fe1b2440706e225eb48c77c63152b1584864a236e7a50 \ + --hash=sha256:12724f3a211fa243570e601f65a8831372caf1a149d2f1859f68479f07efec3d \ + --hash=sha256:12e4d45847a174f77b2b9919719203769f220058f642b08504cf8b1cf185dacf \ + --hash=sha256:17829f37c0d31d89aa6b8b010475a10233774771f9b6dc2cc352ea4f8ce95d9a \ + --hash=sha256:1a17f6a230f81eb53282503823f59d61dff14fb2a93847bf0399dc8e87817307 \ + --hash=sha256:1cf03d27885f8c5ebf3993a220cc84fc66375e1e6e812731f51aab2b2748f4a6 \ + --hash=sha256:1fbf41a6bbc319a7816ae0f0177c265b62f2a59ad301a0e49b395746eb2a9884 \ + --hash=sha256:2257bdd5cf54a4039a4337162cd8048f05a724380a2283df34620f55d4e29341 \ + --hash=sha256:24054fce8c6d6f33a3e35d1c603ef1b91bbcba73e3f04a22b4f2f27dac59b347 \ + --hash=sha256:241a6ca732d2766836d62c58c49ca7a93d08251daef0c1e3c850df1d1ca0cbc4 \ + --hash=sha256:28c7af3e50e5903d21d7b935aceed901cc2475463bc16ddd5587653548661fdb \ + --hash=sha256:351849aca2c6f814575c1a485c01c17a4240413f960df1bf9f5deb0003c61a53 \ + --hash=sha256:3ce18f703b7298e7f7633efd6a90138d99a3f9a656cb52c1201e76cb5d79cf08 \ + --hash=sha256:3d1c9c15d3999107cbb9b2d76ca6172e6710a12fda22434ee8bd3f432b7b17e8 \ + --hash=sha256:3dd3e7e7c9ef3e7214f014f1ae260892286647b3cf7c7f1b644a568fd410f8ca \ + --hash=sha256:43bfd25113c1e98aec6c70e26d5f4331efbf4aa9037ba9ad88f090853bf64d7f \ + --hash=sha256:43dd89a6194f6ab02a3fe36b09e42e2df19c211fc2050ce37374d96f39604997 \ + --hash=sha256:481f10a1a45c5f4c4a578bbd74cff22eb64460a6549819242a87a80788461fba \ + --hash=sha256:4ba8d043fed7ffa117024d7ba66fdea011c0e7602327c6d73cacaea38abe4491 \ + --hash=sha256:4bb7493c3e3a36d3012b8564bd0e2783259ddd7ef3a81a74f0dbfa000fce48b7 \ + --hash=sha256:4c1a6309005acc4b2bcc577ba3b9169fea52638709ffacbd071f3503264620da \ + --hash=sha256:4dda726f89bfa5c465ba45b76515135a3ece0088dfa2da49b8bb278f3bdeea12 \ + --hash=sha256:53c921b58fdc6485d6b2603e0132bb01cd59b8f0620ffc0907f525e0ba071687 \ + --hash=sha256:5578cf40440eafcb054cf859964bc120ab52ebe0e0562d2b898126d868749629 \ + --hash=sha256:59ee1925b5a5efdf6c4e7be51deee93984d0ac14a6897bd521b498b9916f1544 \ + --hash=sha256:670847ee6aeb3a569cd7cdfbe0c3bec1d44828bbfbe78c5d305f7f804870ef9e \ + --hash=sha256:78c657ece7a73b976905ab9ec8be9ef2df12ed8984c24598a1791c58ce3b4ce4 \ + --hash=sha256:7a9318da4b4ada9a67c1dd84d1c0834123081e746bee311a16bb449f363d965e \ + --hash=sha256:7b2f8107a3c329789f3c00b2daad0e35f548d0a55cda6291579136622099a46e \ + --hash=sha256:7ea4490360b605804bea8173d2d086b6c379d6bb22ac434de605a9cbce006e7d \ + --hash=sha256:8360c7cc620abb320e1b8d603c39095101391a82b1d0be05fb2225471c9c5c52 \ + --hash=sha256:875f7100ce0e74af51d4139495eec4025affa1a605280f23990b6434b81df1bd \ + --hash=sha256:8bedb1f6cb919af3b6353921c71281b1491f948ca64408871465d889b4ee1b66 \ + --hash=sha256:8d20cfe63a1c135d26bde8c1d0ea46fd1200884afbc523466d2f1cf517d1fe33 \ + --hash=sha256:9202f184cc0582b1db15056f2225ab4c1e3dac4d9ade50dd0613ac3c46352ac2 \ + --hash=sha256:9acfc7f652b31853eed3b92095b0acf06fd5597eeea42e939bd23a17137679d5 \ + --hash=sha256:9d18a8b44ec8502a7fde91446cd9c9b95ce7c49f1eacc1fb2358b8907d4369fd \ + --hash=sha256:9e67531370a3b07e49b280c1f8c2df67985c790ad2834d1b288a2f13cd341c5f \ + --hash=sha256:9ee6a4cdcbf54b8083dc9723cdf5f41f722c00db40ccf9ec2616e27869151129 \ + --hash=sha256:a7d9a606355655617fee25dd7e54d3af50804d002f1fd3118dd6312d26692d70 \ + --hash=sha256:aa3705a8d14de39898da0fbad920b2a37b7547c3afd2a18b9b81f0223b7d0f68 \ + --hash=sha256:b7215bf2b53bc6cb35808149980c2ae80a4ae4e273890ac85459c014d5aa60ac \ + --hash=sha256:badda65ac99555791eed75e234afb94686ed2317670c68bff8a4498acdaee935 \ + --hash=sha256:bf0e6cce113596377cadda4e3ac5fb89f095bd492226e46d91b4baef1dd16f60 \ + --hash=sha256:c171fc35d3174bbf4787381716564042a4cbc008824d8195eede3d9b938e29a8 \ + --hash=sha256:c1f6490dd1862af5aae6cfcf2a274bffa9a5b32a8f5acb519a7ecf5a99a88866 \ + --hash=sha256:c25b74a811dba37c7ea6a14d99eb9402d89c8d739d50748a75f3cf994cf19c43 \ + --hash=sha256:c6095aaf852c34f42e1bd0cf0dc32d1e4b48a90bfb5054abdbb9d64b36acadcb \ + --hash=sha256:c63f898f683d1379b9be5afc3dd139e20b30b0b1e0bf69a3fc3681f364cf1629 \ + --hash=sha256:cd8d62cab363dfe713067027a5adb4907515861f1e4ce63e7be810b83668b847 \ + --hash=sha256:ce91a24aac80de6be8512fb1c4838a9881aa713f44f4e91dd7bb3b34061b497d \ + --hash=sha256:cea52d11e02123f125f9055dfe0ccf1c3857225fb879e4a944fae12989e2aef2 \ + --hash=sha256:cf4efa2d01f697a7dbd0509891a286a4af0d86902fc594e20e3b1712c28c0106 \ + --hash=sha256:d2fa6fc7cc865d26ff42480ac9b52b8c9b7da30a10a6442a9cdf429de840e949 \ + --hash=sha256:d329300fb23e14ed1f8c6d688dfd867d1dcc3b1d7cd49b7f8c5b44e797ce0932 \ + --hash=sha256:d6177077a31b1aecfc3c9070bd2f11419dbb4a70f30f4c65b124714f525c2e48 \ + --hash=sha256:db37248535d1ae40735d15bdf26ad43be19e3d93ab3f3dad8507eb0f85bb8124 \ + --hash=sha256:db70a47987e34494b451a334605bee57a126fe8d290511349e86810b4be53b01 \ + --hash=sha256:dcefcf2915a2dbdbce37e2fc1622129a1918abfe3d06721ce9f6cdac9b6d2eaa \ + --hash=sha256:dda3ed0a7869d2fa16aa41f9961ade73aa2c2e3b2fcb0a352524e7b744881889 \ + --hash=sha256:e0bf378db07df0a713a1e32381a1b277e62ad106d0dbe17b5479e76ec706d720 \ + --hash=sha256:e13a05db87d3b241c186d0936808d0e4e12decc267c617d54e9c643807e968b6 \ + --hash=sha256:e143b0ef9cb1a2b4f74f56d4fbe50caa7c2bb93390aff52f9398d21d89bc73ea \ + --hash=sha256:e22d1721c978a6494adc824e0916f9d187fa57baeda34b55140315fa2f740184 \ + --hash=sha256:e5522ee72f95661e79db691310290c4618b86dff2d9b90baedf343fd7a08bf79 \ + --hash=sha256:e993676c71288618eb07e20622572b1250d8713e7e00ab3aabae28cb70f3640d \ + --hash=sha256:ee9afa1b0d2293c46954f47f33e150798ad68b78925e3710044e0d67a9487791 \ + --hash=sha256:f1ac5462582d6561c1c1708853a9faf612ff4e5ea5e679e99be36143d6eabd8e \ + --hash=sha256:f5022504adab881e2d801a88b748ea63f2a9d130e0b2c430824682a96f6534be \ + --hash=sha256:f5b973cce96793725ef63eb449adfb74f99c043c718acb76e0d2a447ae369962 \ + --hash=sha256:f7c58a240260822dc07f6ae32a0293dd5bccd618bb2d0f36d51c5dbd526f89c0 \ + --hash=sha256:fc6da202068e0a268e298d7cd09b6e9f3997736cd9b060e2750963754552a0a9 \ + --hash=sha256:fdadc3f6a32d6eca45f9a900a254757fd7855dfb2d8f8dcf0e88f0fae3ff8eb1 # via -r requirements.in aiosignal==1.3.1 \ --hash=sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc \ @@ -413,38 +413,48 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via -r requirements.in -libcst==1.5.0 \ - --hash=sha256:02be4aab728261bb76d16e77c9a457884cebb60d09c8edee844de43b0e08aff7 \ - --hash=sha256:208ea92d80b2eeed8cbc879d5f39f241582a5d56b916b1b65ed2be2f878a2425 \ - --hash=sha256:23d0e07fd3ed11480f8993a1e99d58a45f914a711b14f858b8db08ae861a8a34 \ - --hash=sha256:2d5978fd60c66794bb60d037b2e6427ea52d032636e84afce32b0f04e1cf500a \ - --hash=sha256:40748361f4ea66ab6cdd82f8501c82c29808317ac7a3bd132074efd5fd9bfae2 \ - --hash=sha256:48e581af6127c5af4c9f483e5986d94f0c6b2366967ee134f0a8eba0aa4c8c12 \ - --hash=sha256:4d6acb0bdee1e55b44c6215c59755ec4693ac01e74bb1fde04c37358b378835d \ - --hash=sha256:4f71aed85932c2ea92058fd9bbd99a6478bd69eada041c3726b4f4c9af1f564e \ - --hash=sha256:52b6aadfe54e3ae52c3b815eaaa17ba4da9ff010d5e8adf6a70697872886dd10 \ - --hash=sha256:585b3aa705b3767d717d2100935d8ef557275ecdd3fac81c3e28db0959efb0ea \ - --hash=sha256:5f10124bf99a0b075eae136ef0ce06204e5f6b8da4596a9c4853a0663e80ddf3 \ - --hash=sha256:6453b5a8755a6eee3ad67ee246f13a8eac9827d2cfc8e4a269e8bf0393db74bc \ - --hash=sha256:6fb324ed20f3a725d152df5dba8d80f7e126d9c93cced581bf118a5fc18c1065 \ - --hash=sha256:7dba93cca0a5c6d771ed444c44d21ce8ea9b277af7036cea3743677aba9fbbb8 \ - --hash=sha256:80b5c4d87721a7bab265c202575809b810815ab81d5e2e7a5d4417a087975840 \ - --hash=sha256:83bc5fbe34d33597af1d5ea113dcb9b5dd5afe5a5f4316bac4293464d5e3971a \ - --hash=sha256:8478abf21ae3861a073e898d80b822bd56e578886331b33129ba77fec05b8c24 \ - --hash=sha256:88520b6dea59eaea0cae80f77c0a632604a82c5b2d23dedb4b5b34035cbf1615 \ - --hash=sha256:8935dd3393e30c2f97344866a4cb14efe560200e232166a8db1de7865c2ef8b2 \ - --hash=sha256:96adc45e96476350df6b8a5ddbb1e1d6a83a7eb3f13087e52eb7cd2f9b65bcc7 \ - --hash=sha256:99e7c52150a135d66716b03e00c7b1859a44336dc2a2bf8f9acc164494308531 \ - --hash=sha256:9cccfc0a78e110c0d0a9d2c6fdeb29feb5274c9157508a8baef7edf352420f6d \ - --hash=sha256:a8fcd78be4d9ce3c36d0c5d0bdd384e0c7d5f72970a9e4ebd56070141972b4ad \ - --hash=sha256:b48bf71d52c1e891a0948465a94d9817b5fc1ec1a09603566af90585f3b11948 \ - --hash=sha256:b5b5bcd3a9ba92840f27ad34eaa038acbee195ec337da39536c0a2efbbf28efd \ - --hash=sha256:b60b09abcc2848ab52d479c3a9b71b606d91a941e3779616efd083bb87dbe8ad \ - --hash=sha256:d2788b2b5838b78fe15df8e9fa6b6903195ea49b2d2ba43e8f423f6c90e4b69f \ - --hash=sha256:d4592872aaf5b7fa5c2727a7d73c0985261f1b3fe7eff51f4fd5b8174f30b4e2 \ - --hash=sha256:d6502aeb11412afc759036160c686be1107eb5a4466db56b207c786b9b4da7c4 \ - --hash=sha256:d92c5ae2e2dc9356ad7e3d05077d9b7e5065423e45788fd86729c88729e45c6e \ - --hash=sha256:fc80ea16c7d44e38f193e4d4ef7ff1e0ba72d8e60e8b61ac6f4c87f070a118bd +libcst==1.5.1 \ + --hash=sha256:00f3d2f32ee081bad3394546b0b9ac5e31686d3b5cfe4892d716d2ba65f9ec08 \ + --hash=sha256:01e01c04f0641188160d3b99c6526436e93a3fbf9783dba970f9885a77ec9b38 \ + --hash=sha256:02b38fa4d9f13e79fe69e9b5407b9e173557bcfb5960f7866cf4145af9c7ae09 \ + --hash=sha256:06a9b4c9b76da4a7399e6f1f3a325196fb5febd3ea59fac1f68e2116f3517cd8 \ + --hash=sha256:15697ea9f1edbb9a263364d966c72abda07195d1c1a6838eb79af057f1040770 \ + --hash=sha256:1947790a4fd7d96bcc200a6ecaa528045fcb26a34a24030d5859c7983662289e \ + --hash=sha256:19e39cfef4316599ca20d1c821490aeb783b52e8a8543a824972a525322a85d0 \ + --hash=sha256:1cc7393aaac733e963f0ee00466d059db74a38e15fc7e6a46dddd128c5be8d08 \ + --hash=sha256:1ff21005c33b634957a98db438e882522febf1cacc62fa716f29e163a3f5871a \ + --hash=sha256:26c804fa8091747128579013df0b5f8e6b0c7904d9c4ee83841f136f53e18684 \ + --hash=sha256:2e397f5b6c0fc271acea44579f154b0f3ab36011050f6db75ab00cef47441946 \ + --hash=sha256:3334afe9e7270e175de01198f816b0dc78dda94d9d72152b61851c323e4e741e \ + --hash=sha256:40b75bf2d70fc0bc26b1fa73e61bdc46fef59f5c71aedf16128e7c33db8d5e40 \ + --hash=sha256:40fbbaa8b839bfbfa5b300623ca2b6b0768b58bbc31b341afbc99110c9bee232 \ + --hash=sha256:56c944acaa781b8e586df3019374f5cf117054d7fc98f85be1ba84fe810005dc \ + --hash=sha256:5987daff8389b0df60b5c20499ff4fb73fc03cb3ae1f6a746eefd204ed08df85 \ + --hash=sha256:666813950b8637af0c0e96b1ca46f5d5f183d2fe50bbac2186f5b283a99f3529 \ + --hash=sha256:697eabe9f5ffc40f76d6d02e693274e0a382826d0cf8183bd44e7407dfb0ab90 \ + --hash=sha256:6d9ec764aa781ef35ab96b693569ac3dced16df9feb40ee6c274d13e86a1472e \ + --hash=sha256:71cb294db84df9e410208009c732628e920111683c2f2b2e0c5b71b98464f365 \ + --hash=sha256:72132756f985a19ef64d702a821099d4afc3544974662772b44cbc55b7279727 \ + --hash=sha256:76a8ac7a84f9b6f678a668bff85b360e0a93fa8d7f25a74a206a28110734bb2a \ + --hash=sha256:89c808bdb5fa9ca02df41dd234cbb0e9de0d2e0c029c7063d5435a9f6781cc10 \ + --hash=sha256:940ec4c8db4c2d620a7268d6c83e64ff646e4afd74ae5183d0f0ef3b80e05be0 \ + --hash=sha256:99bbffd8596d192bc0e844a4cf3c4fc696979d4e20ab1c0774a01768a59b47ed \ + --hash=sha256:aa524bd012aaae1f485fd44490ef5abf708b14d2addc0f06b28de3e4585c4b9e \ + --hash=sha256:ab83633e61ee91df575a3838b1e73c371f19d4916bf1816554933235553d41ea \ + --hash=sha256:b58a49895d95ec1fd34fad041a142d98edf9b51fcaf632337c13befeb4d51c7c \ + --hash=sha256:b5a0d3c632aa2b21c5fa145e4e8dbf86f45c9b37a64c0b7221a5a45caf58915a \ + --hash=sha256:b7b58b36022ae77a5a00002854043ae95c03e92f6062ad08473eff326f32efa0 \ + --hash=sha256:bbaf5755be50fa9b35a3d553d1e62293fbb2ee5ce2c16c7e7ffeb2746af1ab88 \ + --hash=sha256:c615af2117320e9a218083c83ec61227d3547e38a0de80329376971765f27a9e \ + --hash=sha256:c7021e3904d8d088c369afc3fe17c279883e583415ef07edacadba76cfbecd27 \ + --hash=sha256:cedd4c8336e01c51913113fbf5566b8f61a86d90f3d5cc5b1cb5049575622c5f \ + --hash=sha256:db7711a762b0327b581be5a963908fecd74412bdda34db34553faa521563c22d \ + --hash=sha256:dc06b7c60d086ef1832aebfd31b64c3c8a645adf0c5638d6243e5838f6a9356e \ + --hash=sha256:ec6ee607cfe4cc4cc93e56e0188fdb9e50399d61a1262d58229752946f288f5e \ + --hash=sha256:eeb13d7c598fe9a798a1d22eae56ab3d3d599b38b83436039bd6ae229fc854d7 \ + --hash=sha256:f053a5deb6a214972dbe9fa26ecd8255edb903de084a3d7715bf9e9da8821c50 \ + --hash=sha256:f3ffb8135c09e41e8cf710b152c33e9b7f1d0d0b9f242bae0c502eb082fdb1fb \ + --hash=sha256:fbccb016b1ac6d892344300dcccc8a16887b71bb7f875ba56c0ed6c1a7ade8be # via -r requirements.in markupsafe==3.0.2 \ --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ @@ -840,87 +850,87 @@ urllib3==2.2.3 \ --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via requests -yarl==1.17.2 \ - --hash=sha256:0c8e589379ef0407b10bed16cc26e7392ef8f86961a706ade0a22309a45414d7 \ - --hash=sha256:0d41c684f286ce41fa05ab6af70f32d6da1b6f0457459a56cf9e393c1c0b2217 \ - --hash=sha256:1056cadd5e850a1c026f28e0704ab0a94daaa8f887ece8dfed30f88befb87bb0 \ - --hash=sha256:11d86c6145ac5c706c53d484784cf504d7d10fa407cb73b9d20f09ff986059ef \ - --hash=sha256:170ed4971bf9058582b01a8338605f4d8c849bd88834061e60e83b52d0c76870 \ - --hash=sha256:17791acaa0c0f89323c57da7b9a79f2174e26d5debbc8c02d84ebd80c2b7bff8 \ - --hash=sha256:17931dfbb84ae18b287279c1f92b76a3abcd9a49cd69b92e946035cff06bcd20 \ - --hash=sha256:18662443c6c3707e2fc7fad184b4dc32dd428710bbe72e1bce7fe1988d4aa654 \ - --hash=sha256:187df91395c11e9f9dc69b38d12406df85aa5865f1766a47907b1cc9855b6303 \ - --hash=sha256:1fee66b32e79264f428dc8da18396ad59cc48eef3c9c13844adec890cd339db5 \ - --hash=sha256:2270d590997445a0dc29afa92e5534bfea76ba3aea026289e811bf9ed4b65a7f \ - --hash=sha256:2654caaf5584449d49c94a6b382b3cb4a246c090e72453493ea168b931206a4d \ - --hash=sha256:26bfb6226e0c157af5da16d2d62258f1ac578d2899130a50433ffee4a5dfa673 \ - --hash=sha256:2941756754a10e799e5b87e2319bbec481ed0957421fba0e7b9fb1c11e40509f \ - --hash=sha256:3294f787a437cb5d81846de3a6697f0c35ecff37a932d73b1fe62490bef69211 \ - --hash=sha256:358dc7ddf25e79e1cc8ee16d970c23faee84d532b873519c5036dbb858965795 \ - --hash=sha256:38bc4ed5cae853409cb193c87c86cd0bc8d3a70fd2268a9807217b9176093ac6 \ - --hash=sha256:3a0baff7827a632204060f48dca9e63fbd6a5a0b8790c1a2adfb25dc2c9c0d50 \ - --hash=sha256:3a3ede8c248f36b60227eb777eac1dbc2f1022dc4d741b177c4379ca8e75571a \ - --hash=sha256:3a58a2f2ca7aaf22b265388d40232f453f67a6def7355a840b98c2d547bd037f \ - --hash=sha256:4434b739a8a101a837caeaa0137e0e38cb4ea561f39cb8960f3b1e7f4967a3fc \ - --hash=sha256:460024cacfc3246cc4d9f47a7fc860e4fcea7d1dc651e1256510d8c3c9c7cde0 \ - --hash=sha256:46c465ad06971abcf46dd532f77560181387b4eea59084434bdff97524444032 \ - --hash=sha256:48e424347a45568413deec6f6ee2d720de2cc0385019bedf44cd93e8638aa0ed \ - --hash=sha256:4a8c83f6fcdc327783bdc737e8e45b2e909b7bd108c4da1892d3bc59c04a6d84 \ - --hash=sha256:4c840cc11163d3c01a9d8aad227683c48cd3e5be5a785921bcc2a8b4b758c4f3 \ - --hash=sha256:4d486ddcaca8c68455aa01cf53d28d413fb41a35afc9f6594a730c9779545876 \ - --hash=sha256:4e76381be3d8ff96a4e6c77815653063e87555981329cf8f85e5be5abf449021 \ - --hash=sha256:50d866f7b1a3f16f98603e095f24c0eeba25eb508c85a2c5939c8b3870ba2df8 \ - --hash=sha256:52492b87d5877ec405542f43cd3da80bdcb2d0c2fbc73236526e5f2c28e6db28 \ - --hash=sha256:56afb44a12b0864d17b597210d63a5b88915d680f6484d8d202ed68ade38673d \ - --hash=sha256:585ce7cd97be8f538345de47b279b879e091c8b86d9dbc6d98a96a7ad78876a3 \ - --hash=sha256:5870d620b23b956f72bafed6a0ba9a62edb5f2ef78a8849b7615bd9433384171 \ - --hash=sha256:5c6ea72fe619fee5e6b5d4040a451d45d8175f560b11b3d3e044cd24b2720526 \ - --hash=sha256:688058e89f512fb7541cb85c2f149c292d3fa22f981d5a5453b40c5da49eb9e8 \ - --hash=sha256:6a3f47930fbbed0f6377639503848134c4aa25426b08778d641491131351c2c8 \ - --hash=sha256:6b981316fcd940f085f646b822c2ff2b8b813cbd61281acad229ea3cbaabeb6b \ - --hash=sha256:734144cd2bd633a1516948e477ff6c835041c0536cef1d5b9a823ae29899665b \ - --hash=sha256:736bb076f7299c5c55dfef3eb9e96071a795cb08052822c2bb349b06f4cb2e0a \ - --hash=sha256:752485cbbb50c1e20908450ff4f94217acba9358ebdce0d8106510859d6eb19a \ - --hash=sha256:753eaaa0c7195244c84b5cc159dc8204b7fd99f716f11198f999f2332a86b178 \ - --hash=sha256:75ac158560dec3ed72f6d604c81090ec44529cfb8169b05ae6fcb3e986b325d9 \ - --hash=sha256:76499469dcc24759399accd85ec27f237d52dec300daaca46a5352fcbebb1071 \ - --hash=sha256:782ca9c58f5c491c7afa55518542b2b005caedaf4685ec814fadfcee51f02493 \ - --hash=sha256:792155279dc093839e43f85ff7b9b6493a8eaa0af1f94f1f9c6e8f4de8c63500 \ - --hash=sha256:7a1606ba68e311576bcb1672b2a1543417e7e0aa4c85e9e718ba6466952476c0 \ - --hash=sha256:8281db240a1616af2f9c5f71d355057e73a1409c4648c8949901396dc0a3c151 \ - --hash=sha256:871e1b47eec7b6df76b23c642a81db5dd6536cbef26b7e80e7c56c2fd371382e \ - --hash=sha256:8b9c4643e7d843a0dca9cd9d610a0876e90a1b2cbc4c5ba7930a0d90baf6903f \ - --hash=sha256:8c6d5fed96f0646bfdf698b0a1cebf32b8aae6892d1bec0c5d2d6e2df44e1e2d \ - --hash=sha256:8e1bf59e035534ba4077f5361d8d5d9194149f9ed4f823d1ee29ef3e8964ace3 \ - --hash=sha256:8fd51299e21da709eabcd5b2dd60e39090804431292daacbee8d3dabe39a6bc0 \ - --hash=sha256:91c012dceadc695ccf69301bfdccd1fc4472ad714fe2dd3c5ab4d2046afddf29 \ - --hash=sha256:93771146ef048b34201bfa382c2bf74c524980870bb278e6df515efaf93699ff \ - --hash=sha256:93d1c8cc5bf5df401015c5e2a3ce75a5254a9839e5039c881365d2a9dcfc6dc2 \ - --hash=sha256:9611b83810a74a46be88847e0ea616794c406dbcb4e25405e52bff8f4bee2d0a \ - --hash=sha256:9bc27dd5cfdbe3dc7f381b05e6260ca6da41931a6e582267d5ca540270afeeb2 \ - --hash=sha256:ac8eda86cc75859093e9ce390d423aba968f50cf0e481e6c7d7d63f90bae5c9c \ - --hash=sha256:bc3003710e335e3f842ae3fd78efa55f11a863a89a72e9a07da214db3bf7e1f8 \ - --hash=sha256:bc61b005f6521fcc00ca0d1243559a5850b9dd1e1fe07b891410ee8fe192d0c0 \ - --hash=sha256:be4c7b1c49d9917c6e95258d3d07f43cfba2c69a6929816e77daf322aaba6628 \ - --hash=sha256:c019abc2eca67dfa4d8fb72ba924871d764ec3c92b86d5b53b405ad3d6aa56b0 \ - --hash=sha256:c42774d1d1508ec48c3ed29e7b110e33f5e74a20957ea16197dbcce8be6b52ba \ - --hash=sha256:c556fbc6820b6e2cda1ca675c5fa5589cf188f8da6b33e9fc05b002e603e44fa \ - --hash=sha256:c6e659b9a24d145e271c2faf3fa6dd1fcb3e5d3f4e17273d9e0350b6ab0fe6e2 \ - --hash=sha256:c74f0b0472ac40b04e6d28532f55cac8090e34c3e81f118d12843e6df14d0909 \ - --hash=sha256:cd7e35818d2328b679a13268d9ea505c85cd773572ebb7a0da7ccbca77b6a52e \ - --hash=sha256:d17832ba39374134c10e82d137e372b5f7478c4cceeb19d02ae3e3d1daed8721 \ - --hash=sha256:d1fa68a3c921365c5745b4bd3af6221ae1f0ea1bf04b69e94eda60e57958907f \ - --hash=sha256:d63123bfd0dce5f91101e77c8a5427c3872501acece8c90df457b486bc1acd47 \ - --hash=sha256:da9d3061e61e5ae3f753654813bc1cd1c70e02fb72cf871bd6daf78443e9e2b1 \ - --hash=sha256:db5ac3871ed76340210fe028f535392f097fb31b875354bcb69162bba2632ef4 \ - --hash=sha256:dd7abf4f717e33b7487121faf23560b3a50924f80e4bef62b22dab441ded8f3b \ - --hash=sha256:dd90238d3a77a0e07d4d6ffdebc0c21a9787c5953a508a2231b5f191455f31e9 \ - --hash=sha256:ef6eee1a61638d29cd7c85f7fd3ac7b22b4c0fabc8fd00a712b727a3e73b0685 \ - --hash=sha256:f11fd61d72d93ac23718d393d2a64469af40be2116b24da0a4ca6922df26807e \ - --hash=sha256:f1e7fedb09c059efee2533119666ca7e1a2610072076926fa028c2ba5dfeb78c \ - --hash=sha256:f25b7e93f5414b9a983e1a6c1820142c13e1782cc9ed354c25e933aebe97fcf2 \ - --hash=sha256:f2f44a4247461965fed18b2573f3a9eb5e2c3cad225201ee858726cde610daca \ - --hash=sha256:f5ffc6b7ace5b22d9e73b2a4c7305740a339fbd55301d52735f73e21d9eb3130 \ - --hash=sha256:ff6af03cac0d1a4c3c19e5dcc4c05252411bf44ccaa2485e20d0a7c77892ab6e \ - --hash=sha256:ff8d95e06546c3a8c188f68040e9d0360feb67ba8498baf018918f669f7bc39b +yarl==1.18.0 \ + --hash=sha256:01be8688fc211dc237e628fcc209dda412d35de7642453059a0553747018d075 \ + --hash=sha256:039c299a0864d1f43c3e31570045635034ea7021db41bf4842693a72aca8df3a \ + --hash=sha256:074fee89caab89a97e18ef5f29060ef61ba3cae6cd77673acc54bfdd3214b7b7 \ + --hash=sha256:13aaf2bdbc8c86ddce48626b15f4987f22e80d898818d735b20bd58f17292ee8 \ + --hash=sha256:14408cc4d34e202caba7b5ac9cc84700e3421a9e2d1b157d744d101b061a4a88 \ + --hash=sha256:1db1537e9cb846eb0ff206eac667f627794be8b71368c1ab3207ec7b6f8c5afc \ + --hash=sha256:1ece25e2251c28bab737bdf0519c88189b3dd9492dc086a1d77336d940c28ced \ + --hash=sha256:1ff116f0285b5c8b3b9a2680aeca29a858b3b9e0402fc79fd850b32c2bcb9f8b \ + --hash=sha256:205de377bd23365cd85562c9c6c33844050a93661640fda38e0567d2826b50df \ + --hash=sha256:20d95535e7d833889982bfe7cc321b7f63bf8879788fee982c76ae2b24cfb715 \ + --hash=sha256:20de4a8b04de70c49698dc2390b7fd2d18d424d3b876371f9b775e2b462d4b41 \ + --hash=sha256:2d90f2e4d16a5b0915ee065218b435d2ef619dd228973b1b47d262a6f7cd8fa5 \ + --hash=sha256:2e6b4466714a73f5251d84b471475850954f1fa6acce4d3f404da1d55d644c34 \ + --hash=sha256:309f8d27d6f93ceeeb80aa6980e883aa57895270f7f41842b92247e65d7aeddf \ + --hash=sha256:32141e13a1d5a48525e519c9197d3f4d9744d818d5c7d6547524cc9eccc8971e \ + --hash=sha256:34176bfb082add67cb2a20abd85854165540891147f88b687a5ed0dc225750a0 \ + --hash=sha256:38b39b7b3e692b6c92b986b00137a3891eddb66311b229d1940dcbd4f025083c \ + --hash=sha256:3a3709450a574d61be6ac53d582496014342ea34876af8dc17cc16da32826c9a \ + --hash=sha256:3adaaf9c6b1b4fc258584f4443f24d775a2086aee82d1387e48a8b4f3d6aecf6 \ + --hash=sha256:3f576ed278860df2721a5d57da3381040176ef1d07def9688a385c8330db61a1 \ + --hash=sha256:42ba84e2ac26a3f252715f8ec17e6fdc0cbf95b9617c5367579fafcd7fba50eb \ + --hash=sha256:454902dc1830d935c90b5b53c863ba2a98dcde0fbaa31ca2ed1ad33b2a7171c6 \ + --hash=sha256:466d31fd043ef9af822ee3f1df8fdff4e8c199a7f4012c2642006af240eade17 \ + --hash=sha256:49a98ecadc5a241c9ba06de08127ee4796e1009555efd791bac514207862b43d \ + --hash=sha256:4d26f1fa9fa2167bb238f6f4b20218eb4e88dd3ef21bb8f97439fa6b5313e30d \ + --hash=sha256:52c136f348605974c9b1c878addd6b7a60e3bf2245833e370862009b86fa4689 \ + --hash=sha256:536a7a8a53b75b2e98ff96edb2dfb91a26b81c4fed82782035767db5a465be46 \ + --hash=sha256:576d258b21c1db4c6449b1c572c75d03f16a482eb380be8003682bdbe7db2f28 \ + --hash=sha256:609ffd44fed2ed88d9b4ef62ee860cf86446cf066333ad4ce4123505b819e581 \ + --hash=sha256:67b336c15e564d76869c9a21316f90edf546809a5796a083b8f57c845056bc01 \ + --hash=sha256:685cc37f3f307c6a8e879986c6d85328f4c637f002e219f50e2ef66f7e062c1d \ + --hash=sha256:6a49ad0102c0f0ba839628d0bf45973c86ce7b590cdedf7540d5b1833ddc6f00 \ + --hash=sha256:6fb64dd45453225f57d82c4764818d7a205ee31ce193e9f0086e493916bd4f72 \ + --hash=sha256:701bb4a8f4de191c8c0cc9a1e6d5142f4df880e9d1210e333b829ca9425570ed \ + --hash=sha256:73553bbeea7d6ec88c08ad8027f4e992798f0abc459361bf06641c71972794dc \ + --hash=sha256:7520e799b1f84e095cce919bd6c23c9d49472deeef25fe1ef960b04cca51c3fc \ + --hash=sha256:7609b8462351c4836b3edce4201acb6dd46187b207c589b30a87ffd1813b48dc \ + --hash=sha256:7db9584235895a1dffca17e1c634b13870852094f6389b68dcc6338086aa7b08 \ + --hash=sha256:7fa7d37f2ada0f42e0723632993ed422f2a679af0e200874d9d861720a54f53e \ + --hash=sha256:80741ec5b471fbdfb997821b2842c59660a1c930ceb42f8a84ba8ca0f25a66aa \ + --hash=sha256:8254dbfce84ee5d1e81051ee7a0f1536c108ba294c0fdb5933476398df0654f3 \ + --hash=sha256:8b8d3e4e014fb4274f1c5bf61511d2199e263909fb0b8bda2a7428b0894e8dc6 \ + --hash=sha256:8e1c18890091aa3cc8a77967943476b729dc2016f4cfe11e45d89b12519d4a93 \ + --hash=sha256:9106025c7f261f9f5144f9aa7681d43867eed06349a7cfb297a1bc804de2f0d1 \ + --hash=sha256:91b8fb9427e33f83ca2ba9501221ffaac1ecf0407f758c4d2f283c523da185ee \ + --hash=sha256:96404e8d5e1bbe36bdaa84ef89dc36f0e75939e060ca5cd45451aba01db02902 \ + --hash=sha256:9b4c90c5363c6b0a54188122b61edb919c2cd1119684999d08cd5e538813a28e \ + --hash=sha256:a0509475d714df8f6d498935b3f307cd122c4ca76f7d426c7e1bb791bcd87eda \ + --hash=sha256:a173401d7821a2a81c7b47d4e7d5c4021375a1441af0c58611c1957445055056 \ + --hash=sha256:a45d94075ac0647621eaaf693c8751813a3eccac455d423f473ffed38c8ac5c9 \ + --hash=sha256:a5f72421246c21af6a92fbc8c13b6d4c5427dfd949049b937c3b731f2f9076bd \ + --hash=sha256:a64619a9c47c25582190af38e9eb382279ad42e1f06034f14d794670796016c0 \ + --hash=sha256:a7ee6884a8848792d58b854946b685521f41d8871afa65e0d4a774954e9c9e89 \ + --hash=sha256:ae38bd86eae3ba3d2ce5636cc9e23c80c9db2e9cb557e40b98153ed102b5a736 \ + --hash=sha256:b026cf2c32daf48d90c0c4e406815c3f8f4cfe0c6dfccb094a9add1ff6a0e41a \ + --hash=sha256:b0a2074a37285570d54b55820687de3d2f2b9ecf1b714e482e48c9e7c0402038 \ + --hash=sha256:b1a3297b9cad594e1ff0c040d2881d7d3a74124a3c73e00c3c71526a1234a9f7 \ + --hash=sha256:b212452b80cae26cb767aa045b051740e464c5129b7bd739c58fbb7deb339e7b \ + --hash=sha256:b234a4a9248a9f000b7a5dfe84b8cb6210ee5120ae70eb72a4dcbdb4c528f72f \ + --hash=sha256:b4095c5019bb889aa866bf12ed4c85c0daea5aafcb7c20d1519f02a1e738f07f \ + --hash=sha256:b8e8c516dc4e1a51d86ac975b0350735007e554c962281c432eaa5822aa9765c \ + --hash=sha256:bd80ed29761490c622edde5dd70537ca8c992c2952eb62ed46984f8eff66d6e8 \ + --hash=sha256:c083f6dd6951b86e484ebfc9c3524b49bcaa9c420cb4b2a78ef9f7a512bfcc85 \ + --hash=sha256:c0f4808644baf0a434a3442df5e0bedf8d05208f0719cedcd499e168b23bfdc4 \ + --hash=sha256:c4cb992d8090d5ae5f7afa6754d7211c578be0c45f54d3d94f7781c495d56716 \ + --hash=sha256:c60e547c0a375c4bfcdd60eef82e7e0e8698bf84c239d715f5c1278a73050393 \ + --hash=sha256:c73a6bbc97ba1b5a0c3c992ae93d721c395bdbb120492759b94cc1ac71bc6350 \ + --hash=sha256:c893f8c1a6d48b25961e00922724732d00b39de8bb0b451307482dc87bddcd74 \ + --hash=sha256:cd6ab7d6776c186f544f893b45ee0c883542b35e8a493db74665d2e594d3ca75 \ + --hash=sha256:d89ae7de94631b60d468412c18290d358a9d805182373d804ec839978b120422 \ + --hash=sha256:d9d4f5e471e8dc49b593a80766c2328257e405f943c56a3dc985c125732bc4cf \ + --hash=sha256:da206d1ec78438a563c5429ab808a2b23ad7bc025c8adbf08540dde202be37d5 \ + --hash=sha256:dbf53db46f7cf176ee01d8d98c39381440776fcda13779d269a8ba664f69bec0 \ + --hash=sha256:dd21c0128e301851de51bc607b0a6da50e82dc34e9601f4b508d08cc89ee7929 \ + --hash=sha256:e2580c1d7e66e6d29d6e11855e3b1c6381971e0edd9a5066e6c14d79bc8967af \ + --hash=sha256:e3818eabaefb90adeb5e0f62f047310079d426387991106d4fbf3519eec7d90a \ + --hash=sha256:ed69af4fe2a0949b1ea1d012bf065c77b4c7822bad4737f17807af2adb15a73c \ + --hash=sha256:f172b8b2c72a13a06ea49225a9c47079549036ad1b34afa12d5491b881f5b993 \ + --hash=sha256:f275ede6199d0f1ed4ea5d55a7b7573ccd40d97aee7808559e1298fe6efc8dbd \ + --hash=sha256:f7edeb1dcc7f50a2c8e08b9dc13a413903b7817e72273f00878cb70e766bdb3b \ + --hash=sha256:fa2c9cb607e0f660d48c54a63de7a9b36fef62f6b8bd50ff592ce1137e73ac7d \ + --hash=sha256:fe94d1de77c4cd8caff1bd5480e22342dbd54c93929f5943495d9c1e8abe9f42 # via aiohttp From 96f18102ff33991d2e0f0b8f847ac402307a9819 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 2 Dec 2024 21:20:20 +0100 Subject: [PATCH 1220/1339] chore(deps): update all dependencies (#2267) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/WORKSPACE | 15 +- packages/gapic-generator/requirements.txt | 574 +++++++++++----------- 2 files changed, 306 insertions(+), 283 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 7f946bfeb020..99163725cf8e 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -73,14 +73,23 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "7c3ebd7aaedd86fa5dc479a0fda803f602caaf78d8aff7ce83b89e1b8ae7442a", - strip_prefix = "protobuf-28.3", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v28.3.tar.gz"], + sha256 = "10a0d58f39a1a909e95e00e8ba0b5b1dc64d02997f741151953a2b3659f6e78c", + strip_prefix = "protobuf-29.0", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v29.0.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") grpc_deps() +# Pin the version of rules_cc to the version that is present in +# https://github.com/protocolbuffers/protobuf/blob/29.x/protobuf_deps.bzl#L92-L98 +http_archive( + name = "rules_cc", + urls = ["https://github.com/bazelbuild/rules_cc/releases/download/0.0.16/rules_cc-0.0.16.tar.gz"], + sha256 = "bbf1ae2f83305b7053b11e4467d317a7ba3517a12cef608543c1b1c5bf48a4df", + strip_prefix = "rules_cc-0.0.16", +) + load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps", "PROTOBUF_MAVEN_ARTIFACTS") # This is actually already done within grpc_deps but calling this for Bazel convention. protobuf_deps() diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d1e73d2b47e5..7397b03d6d58 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,87 +4,87 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -aiohappyeyeballs==2.4.3 \ - --hash=sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586 \ - --hash=sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572 +aiohappyeyeballs==2.4.4 \ + --hash=sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745 \ + --hash=sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8 # via aiohttp -aiohttp==3.11.7 \ - --hash=sha256:018f1b04883a12e77e7fc161934c0f298865d3a484aea536a6a2ca8d909f0ba0 \ - --hash=sha256:01a8aca4af3da85cea5c90141d23f4b0eee3cbecfd33b029a45a80f28c66c668 \ - --hash=sha256:04b0cc74d5a882c9dacaeeccc1444f0233212b6f5be8bc90833feef1e1ce14b9 \ - --hash=sha256:0de6466b9d742b4ee56fe1b2440706e225eb48c77c63152b1584864a236e7a50 \ - --hash=sha256:12724f3a211fa243570e601f65a8831372caf1a149d2f1859f68479f07efec3d \ - --hash=sha256:12e4d45847a174f77b2b9919719203769f220058f642b08504cf8b1cf185dacf \ - --hash=sha256:17829f37c0d31d89aa6b8b010475a10233774771f9b6dc2cc352ea4f8ce95d9a \ - --hash=sha256:1a17f6a230f81eb53282503823f59d61dff14fb2a93847bf0399dc8e87817307 \ - --hash=sha256:1cf03d27885f8c5ebf3993a220cc84fc66375e1e6e812731f51aab2b2748f4a6 \ - --hash=sha256:1fbf41a6bbc319a7816ae0f0177c265b62f2a59ad301a0e49b395746eb2a9884 \ - --hash=sha256:2257bdd5cf54a4039a4337162cd8048f05a724380a2283df34620f55d4e29341 \ - --hash=sha256:24054fce8c6d6f33a3e35d1c603ef1b91bbcba73e3f04a22b4f2f27dac59b347 \ - --hash=sha256:241a6ca732d2766836d62c58c49ca7a93d08251daef0c1e3c850df1d1ca0cbc4 \ - --hash=sha256:28c7af3e50e5903d21d7b935aceed901cc2475463bc16ddd5587653548661fdb \ - --hash=sha256:351849aca2c6f814575c1a485c01c17a4240413f960df1bf9f5deb0003c61a53 \ - --hash=sha256:3ce18f703b7298e7f7633efd6a90138d99a3f9a656cb52c1201e76cb5d79cf08 \ - --hash=sha256:3d1c9c15d3999107cbb9b2d76ca6172e6710a12fda22434ee8bd3f432b7b17e8 \ - --hash=sha256:3dd3e7e7c9ef3e7214f014f1ae260892286647b3cf7c7f1b644a568fd410f8ca \ - --hash=sha256:43bfd25113c1e98aec6c70e26d5f4331efbf4aa9037ba9ad88f090853bf64d7f \ - --hash=sha256:43dd89a6194f6ab02a3fe36b09e42e2df19c211fc2050ce37374d96f39604997 \ - --hash=sha256:481f10a1a45c5f4c4a578bbd74cff22eb64460a6549819242a87a80788461fba \ - --hash=sha256:4ba8d043fed7ffa117024d7ba66fdea011c0e7602327c6d73cacaea38abe4491 \ - --hash=sha256:4bb7493c3e3a36d3012b8564bd0e2783259ddd7ef3a81a74f0dbfa000fce48b7 \ - --hash=sha256:4c1a6309005acc4b2bcc577ba3b9169fea52638709ffacbd071f3503264620da \ - --hash=sha256:4dda726f89bfa5c465ba45b76515135a3ece0088dfa2da49b8bb278f3bdeea12 \ - --hash=sha256:53c921b58fdc6485d6b2603e0132bb01cd59b8f0620ffc0907f525e0ba071687 \ - --hash=sha256:5578cf40440eafcb054cf859964bc120ab52ebe0e0562d2b898126d868749629 \ - --hash=sha256:59ee1925b5a5efdf6c4e7be51deee93984d0ac14a6897bd521b498b9916f1544 \ - --hash=sha256:670847ee6aeb3a569cd7cdfbe0c3bec1d44828bbfbe78c5d305f7f804870ef9e \ - --hash=sha256:78c657ece7a73b976905ab9ec8be9ef2df12ed8984c24598a1791c58ce3b4ce4 \ - --hash=sha256:7a9318da4b4ada9a67c1dd84d1c0834123081e746bee311a16bb449f363d965e \ - --hash=sha256:7b2f8107a3c329789f3c00b2daad0e35f548d0a55cda6291579136622099a46e \ - --hash=sha256:7ea4490360b605804bea8173d2d086b6c379d6bb22ac434de605a9cbce006e7d \ - --hash=sha256:8360c7cc620abb320e1b8d603c39095101391a82b1d0be05fb2225471c9c5c52 \ - --hash=sha256:875f7100ce0e74af51d4139495eec4025affa1a605280f23990b6434b81df1bd \ - --hash=sha256:8bedb1f6cb919af3b6353921c71281b1491f948ca64408871465d889b4ee1b66 \ - --hash=sha256:8d20cfe63a1c135d26bde8c1d0ea46fd1200884afbc523466d2f1cf517d1fe33 \ - --hash=sha256:9202f184cc0582b1db15056f2225ab4c1e3dac4d9ade50dd0613ac3c46352ac2 \ - --hash=sha256:9acfc7f652b31853eed3b92095b0acf06fd5597eeea42e939bd23a17137679d5 \ - --hash=sha256:9d18a8b44ec8502a7fde91446cd9c9b95ce7c49f1eacc1fb2358b8907d4369fd \ - --hash=sha256:9e67531370a3b07e49b280c1f8c2df67985c790ad2834d1b288a2f13cd341c5f \ - --hash=sha256:9ee6a4cdcbf54b8083dc9723cdf5f41f722c00db40ccf9ec2616e27869151129 \ - --hash=sha256:a7d9a606355655617fee25dd7e54d3af50804d002f1fd3118dd6312d26692d70 \ - --hash=sha256:aa3705a8d14de39898da0fbad920b2a37b7547c3afd2a18b9b81f0223b7d0f68 \ - --hash=sha256:b7215bf2b53bc6cb35808149980c2ae80a4ae4e273890ac85459c014d5aa60ac \ - --hash=sha256:badda65ac99555791eed75e234afb94686ed2317670c68bff8a4498acdaee935 \ - --hash=sha256:bf0e6cce113596377cadda4e3ac5fb89f095bd492226e46d91b4baef1dd16f60 \ - --hash=sha256:c171fc35d3174bbf4787381716564042a4cbc008824d8195eede3d9b938e29a8 \ - --hash=sha256:c1f6490dd1862af5aae6cfcf2a274bffa9a5b32a8f5acb519a7ecf5a99a88866 \ - --hash=sha256:c25b74a811dba37c7ea6a14d99eb9402d89c8d739d50748a75f3cf994cf19c43 \ - --hash=sha256:c6095aaf852c34f42e1bd0cf0dc32d1e4b48a90bfb5054abdbb9d64b36acadcb \ - --hash=sha256:c63f898f683d1379b9be5afc3dd139e20b30b0b1e0bf69a3fc3681f364cf1629 \ - --hash=sha256:cd8d62cab363dfe713067027a5adb4907515861f1e4ce63e7be810b83668b847 \ - --hash=sha256:ce91a24aac80de6be8512fb1c4838a9881aa713f44f4e91dd7bb3b34061b497d \ - --hash=sha256:cea52d11e02123f125f9055dfe0ccf1c3857225fb879e4a944fae12989e2aef2 \ - --hash=sha256:cf4efa2d01f697a7dbd0509891a286a4af0d86902fc594e20e3b1712c28c0106 \ - --hash=sha256:d2fa6fc7cc865d26ff42480ac9b52b8c9b7da30a10a6442a9cdf429de840e949 \ - --hash=sha256:d329300fb23e14ed1f8c6d688dfd867d1dcc3b1d7cd49b7f8c5b44e797ce0932 \ - --hash=sha256:d6177077a31b1aecfc3c9070bd2f11419dbb4a70f30f4c65b124714f525c2e48 \ - --hash=sha256:db37248535d1ae40735d15bdf26ad43be19e3d93ab3f3dad8507eb0f85bb8124 \ - --hash=sha256:db70a47987e34494b451a334605bee57a126fe8d290511349e86810b4be53b01 \ - --hash=sha256:dcefcf2915a2dbdbce37e2fc1622129a1918abfe3d06721ce9f6cdac9b6d2eaa \ - --hash=sha256:dda3ed0a7869d2fa16aa41f9961ade73aa2c2e3b2fcb0a352524e7b744881889 \ - --hash=sha256:e0bf378db07df0a713a1e32381a1b277e62ad106d0dbe17b5479e76ec706d720 \ - --hash=sha256:e13a05db87d3b241c186d0936808d0e4e12decc267c617d54e9c643807e968b6 \ - --hash=sha256:e143b0ef9cb1a2b4f74f56d4fbe50caa7c2bb93390aff52f9398d21d89bc73ea \ - --hash=sha256:e22d1721c978a6494adc824e0916f9d187fa57baeda34b55140315fa2f740184 \ - --hash=sha256:e5522ee72f95661e79db691310290c4618b86dff2d9b90baedf343fd7a08bf79 \ - --hash=sha256:e993676c71288618eb07e20622572b1250d8713e7e00ab3aabae28cb70f3640d \ - --hash=sha256:ee9afa1b0d2293c46954f47f33e150798ad68b78925e3710044e0d67a9487791 \ - --hash=sha256:f1ac5462582d6561c1c1708853a9faf612ff4e5ea5e679e99be36143d6eabd8e \ - --hash=sha256:f5022504adab881e2d801a88b748ea63f2a9d130e0b2c430824682a96f6534be \ - --hash=sha256:f5b973cce96793725ef63eb449adfb74f99c043c718acb76e0d2a447ae369962 \ - --hash=sha256:f7c58a240260822dc07f6ae32a0293dd5bccd618bb2d0f36d51c5dbd526f89c0 \ - --hash=sha256:fc6da202068e0a268e298d7cd09b6e9f3997736cd9b060e2750963754552a0a9 \ - --hash=sha256:fdadc3f6a32d6eca45f9a900a254757fd7855dfb2d8f8dcf0e88f0fae3ff8eb1 +aiohttp==3.11.9 \ + --hash=sha256:0411777249f25d11bd2964a230b3ffafcbed6cd65d0f2b132bc2b8f5b8c347c7 \ + --hash=sha256:0a97d657f6cf8782a830bb476c13f7d777cfcab8428ac49dde15c22babceb361 \ + --hash=sha256:0b5a5009b0159a8f707879dc102b139466d8ec6db05103ec1520394fdd8ea02c \ + --hash=sha256:0bcb7f6976dc0b6b56efde13294862adf68dd48854111b422a336fa729a82ea6 \ + --hash=sha256:14624d96f0d69cf451deed3173079a68c322279be6030208b045ab77e1e8d550 \ + --hash=sha256:15c4e489942d987d5dac0ba39e5772dcbed4cc9ae3710d1025d5ba95e4a5349c \ + --hash=sha256:176f8bb8931da0613bb0ed16326d01330066bb1e172dd97e1e02b1c27383277b \ + --hash=sha256:17af09d963fa1acd7e4c280e9354aeafd9e3d47eaa4a6bfbd2171ad7da49f0c5 \ + --hash=sha256:1a8b13b9950d8b2f8f58b6e5842c4b842b5887e2c32e3f4644d6642f1659a530 \ + --hash=sha256:202f40fb686e5f93908eee0c75d1e6fbe50a43e9bd4909bf3bf4a56b560ca180 \ + --hash=sha256:21cbe97839b009826a61b143d3ca4964c8590d7aed33d6118125e5b71691ca46 \ + --hash=sha256:27935716f8d62c1c73010428db310fd10136002cfc6d52b0ba7bdfa752d26066 \ + --hash=sha256:282e0a7ddd36ebc411f156aeaa0491e8fe7f030e2a95da532cf0c84b0b70bc66 \ + --hash=sha256:28f29bce89c3b401a53d6fd4bee401ee943083bf2bdc12ef297c1d63155070b0 \ + --hash=sha256:2ac9fd83096df36728da8e2f4488ac3b5602238f602706606f3702f07a13a409 \ + --hash=sha256:30f9f89ae625d412043f12ca3771b2ccec227cc93b93bb1f994db6e1af40a7d3 \ + --hash=sha256:317251b9c9a2f1a9ff9cd093775b34c6861d1d7df9439ce3d32a88c275c995cd \ + --hash=sha256:31de2f10f63f96cc19e04bd2df9549559beadd0b2ee2da24a17e7ed877ca8c60 \ + --hash=sha256:36df00e0541f264ce42d62280281541a47474dfda500bc5b7f24f70a7f87be7a \ + --hash=sha256:39625703540feb50b6b7f938b3856d1f4886d2e585d88274e62b1bd273fae09b \ + --hash=sha256:3f5461c77649358610fb9694e790956b4238ac5d9e697a17f63619c096469afe \ + --hash=sha256:4313f3bc901255b22f01663eeeae167468264fdae0d32c25fc631d5d6e15b502 \ + --hash=sha256:442356e8924fe1a121f8c87866b0ecdc785757fd28924b17c20493961b3d6697 \ + --hash=sha256:44cb1a1326a0264480a789e6100dc3e07122eb8cd1ad6b784a3d47d13ed1d89c \ + --hash=sha256:44d323aa80a867cb6db6bebb4bbec677c6478e38128847f2c6b0f70eae984d72 \ + --hash=sha256:499368eb904566fbdf1a3836a1532000ef1308f34a1bcbf36e6351904cced771 \ + --hash=sha256:4b01d9cfcb616eeb6d40f02e66bebfe7b06d9f2ef81641fdd50b8dd981166e0b \ + --hash=sha256:5720ebbc7a1b46c33a42d489d25d36c64c419f52159485e55589fbec648ea49a \ + --hash=sha256:5cc5e0d069c56645446c45a4b5010d4b33ac6c5ebfd369a791b5f097e46a3c08 \ + --hash=sha256:618b18c3a2360ac940a5503da14fa4f880c5b9bc315ec20a830357bcc62e6bae \ + --hash=sha256:6435a66957cdba1a0b16f368bde03ce9c79c57306b39510da6ae5312a1a5b2c1 \ + --hash=sha256:647ec5bee7e4ec9f1034ab48173b5fa970d9a991e565549b965e93331f1328fe \ + --hash=sha256:6e1e9e447856e9b7b3d38e1316ae9a8c92e7536ef48373de758ea055edfd5db5 \ + --hash=sha256:6ef1550bb5f55f71b97a6a395286db07f7f2c01c8890e613556df9a51da91e8d \ + --hash=sha256:6ffa45cc55b18d4ac1396d1ddb029f139b1d3480f1594130e62bceadf2e1a838 \ + --hash=sha256:77f31cebd8c27a36af6c7346055ac564946e562080ee1a838da724585c67474f \ + --hash=sha256:7a3b5b2c012d70c63d9d13c57ed1603709a4d9d7d473e4a9dfece0e4ea3d5f51 \ + --hash=sha256:7a7ddf981a0b953ade1c2379052d47ccda2f58ab678fca0671c7c7ca2f67aac2 \ + --hash=sha256:84de955314aa5e8d469b00b14d6d714b008087a0222b0f743e7ffac34ef56aff \ + --hash=sha256:8dcfd14c712aa9dd18049280bfb2f95700ff6a8bde645e09f17c3ed3f05a0130 \ + --hash=sha256:928f92f80e2e8d6567b87d3316c1fd9860ccfe36e87a9a7f5237d4cda8baa1ba \ + --hash=sha256:9384b07cfd3045b37b05ed002d1c255db02fb96506ad65f0f9b776b762a7572e \ + --hash=sha256:96726839a42429318017e67a42cca75d4f0d5248a809b3cc2e125445edd7d50d \ + --hash=sha256:96bbec47beb131bbf4bae05d8ef99ad9e5738f12717cfbbf16648b78b0232e87 \ + --hash=sha256:9bcf97b971289be69638d8b1b616f7e557e1342debc7fc86cf89d3f08960e411 \ + --hash=sha256:a0cf4d814689e58f57ecd5d8c523e6538417ca2e72ff52c007c64065cef50fb2 \ + --hash=sha256:a7c6147c6306f537cff59409609508a1d2eff81199f0302dd456bb9e7ea50c39 \ + --hash=sha256:a9266644064779840feec0e34f10a89b3ff1d2d6b751fe90017abcad1864fa7c \ + --hash=sha256:afbe85b50ade42ddff5669947afde9e8a610e64d2c80be046d67ec4368e555fa \ + --hash=sha256:afcda759a69c6a8be3aae764ec6733155aa4a5ad9aad4f398b52ba4037942fe3 \ + --hash=sha256:b2fab23003c4bb2249729a7290a76c1dda38c438300fdf97d4e42bf78b19c810 \ + --hash=sha256:bd3f711f4c99da0091ced41dccdc1bcf8be0281dc314d6d9c6b6cf5df66f37a9 \ + --hash=sha256:be0c7c98e38a1e3ad7a6ff64af8b6d6db34bf5a41b1478e24c3c74d9e7f8ed42 \ + --hash=sha256:c1f2d7fd583fc79c240094b3e7237d88493814d4b300d013a42726c35a734bc9 \ + --hash=sha256:c5bba6b83fde4ca233cfda04cbd4685ab88696b0c8eaf76f7148969eab5e248a \ + --hash=sha256:c6beeac698671baa558e82fa160be9761cf0eb25861943f4689ecf9000f8ebd0 \ + --hash=sha256:c7333e7239415076d1418dbfb7fa4df48f3a5b00f8fdf854fca549080455bc14 \ + --hash=sha256:c8a02f74ae419e3955af60f570d83187423e42e672a6433c5e292f1d23619269 \ + --hash=sha256:c9c23e62f3545c2216100603614f9e019e41b9403c47dd85b8e7e5015bf1bde0 \ + --hash=sha256:cca505829cdab58c2495ff418c96092d225a1bbd486f79017f6de915580d3c44 \ + --hash=sha256:d3108f0ad5c6b6d78eec5273219a5bbd884b4aacec17883ceefaac988850ce6e \ + --hash=sha256:d4b8a1b6c7a68c73191f2ebd3bf66f7ce02f9c374e309bdb68ba886bbbf1b938 \ + --hash=sha256:d6e274661c74195708fc4380a4ef64298926c5a50bb10fbae3d01627d7a075b7 \ + --hash=sha256:db2914de2559809fdbcf3e48f41b17a493b58cb7988d3e211f6b63126c55fe82 \ + --hash=sha256:e738aabff3586091221044b7a584865ddc4d6120346d12e28e788307cd731043 \ + --hash=sha256:e7f6173302f8a329ca5d1ee592af9e628d3ade87816e9958dcf7cdae2841def7 \ + --hash=sha256:e9d036a9a41fc78e8a3f10a86c2fc1098fca8fab8715ba9eb999ce4788d35df0 \ + --hash=sha256:ea142255d4901b03f89cb6a94411ecec117786a76fc9ab043af8f51dd50b5313 \ + --hash=sha256:ebd3e6b0c7d4954cca59d241970011f8d3327633d555051c430bd09ff49dc494 \ + --hash=sha256:ec656680fc53a13f849c71afd0c84a55c536206d524cbc831cde80abbe80489e \ + --hash=sha256:ec8df0ff5a911c6d21957a9182402aad7bf060eaeffd77c9ea1c16aecab5adbf \ + --hash=sha256:ed95d66745f53e129e935ad726167d3a6cb18c5d33df3165974d54742c373868 \ + --hash=sha256:ef2c9499b7bd1e24e473dc1a85de55d72fd084eea3d8bdeec7ee0720decb54fa \ + --hash=sha256:f5252ba8b43906f206048fa569debf2cd0da0316e8d5b4d25abe53307f573941 \ + --hash=sha256:f737fef6e117856400afee4f17774cdea392b28ecf058833f5eca368a18cf1bf \ + --hash=sha256:fc726c3fa8f606d07bd2b500e5dc4c0fd664c59be7788a16b9e34352c50b6b6b # via -r requirements.in aiosignal==1.3.1 \ --hash=sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc \ @@ -625,105 +625,89 @@ pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 # via pytest -propcache==0.2.0 \ - --hash=sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9 \ - --hash=sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763 \ - --hash=sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325 \ - --hash=sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb \ - --hash=sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b \ - --hash=sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09 \ - --hash=sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957 \ - --hash=sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68 \ - --hash=sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f \ - --hash=sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798 \ - --hash=sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418 \ - --hash=sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6 \ - --hash=sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162 \ - --hash=sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f \ - --hash=sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036 \ - --hash=sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8 \ - --hash=sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2 \ - --hash=sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110 \ - --hash=sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23 \ - --hash=sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8 \ - --hash=sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638 \ - --hash=sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a \ - --hash=sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44 \ - --hash=sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2 \ - --hash=sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2 \ - --hash=sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850 \ - --hash=sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136 \ - --hash=sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b \ - --hash=sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887 \ - --hash=sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89 \ - --hash=sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87 \ - --hash=sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348 \ - --hash=sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4 \ - --hash=sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861 \ - --hash=sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e \ - --hash=sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c \ - --hash=sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b \ - --hash=sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb \ - --hash=sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1 \ - --hash=sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de \ - --hash=sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354 \ - --hash=sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563 \ - --hash=sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5 \ - --hash=sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf \ - --hash=sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9 \ - --hash=sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12 \ - --hash=sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4 \ - --hash=sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5 \ - --hash=sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71 \ - --hash=sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9 \ - --hash=sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed \ - --hash=sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336 \ - --hash=sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90 \ - --hash=sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063 \ - --hash=sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad \ - --hash=sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6 \ - --hash=sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8 \ - --hash=sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e \ - --hash=sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2 \ - --hash=sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7 \ - --hash=sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d \ - --hash=sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d \ - --hash=sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df \ - --hash=sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b \ - --hash=sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178 \ - --hash=sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2 \ - --hash=sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630 \ - --hash=sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48 \ - --hash=sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61 \ - --hash=sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89 \ - --hash=sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb \ - --hash=sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3 \ - --hash=sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6 \ - --hash=sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562 \ - --hash=sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b \ - --hash=sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58 \ - --hash=sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db \ - --hash=sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99 \ - --hash=sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37 \ - --hash=sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83 \ - --hash=sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a \ - --hash=sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d \ - --hash=sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04 \ - --hash=sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70 \ - --hash=sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544 \ - --hash=sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394 \ - --hash=sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea \ - --hash=sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7 \ - --hash=sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1 \ - --hash=sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793 \ - --hash=sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577 \ - --hash=sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7 \ - --hash=sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57 \ - --hash=sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d \ - --hash=sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032 \ - --hash=sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d \ - --hash=sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016 \ - --hash=sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504 +propcache==0.2.1 \ + --hash=sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4 \ + --hash=sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4 \ + --hash=sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a \ + --hash=sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f \ + --hash=sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9 \ + --hash=sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d \ + --hash=sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e \ + --hash=sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6 \ + --hash=sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf \ + --hash=sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034 \ + --hash=sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d \ + --hash=sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16 \ + --hash=sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30 \ + --hash=sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba \ + --hash=sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95 \ + --hash=sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d \ + --hash=sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae \ + --hash=sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348 \ + --hash=sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2 \ + --hash=sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64 \ + --hash=sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce \ + --hash=sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54 \ + --hash=sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629 \ + --hash=sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54 \ + --hash=sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1 \ + --hash=sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b \ + --hash=sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf \ + --hash=sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b \ + --hash=sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587 \ + --hash=sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097 \ + --hash=sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea \ + --hash=sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24 \ + --hash=sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7 \ + --hash=sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541 \ + --hash=sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6 \ + --hash=sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634 \ + --hash=sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3 \ + --hash=sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d \ + --hash=sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034 \ + --hash=sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465 \ + --hash=sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2 \ + --hash=sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf \ + --hash=sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1 \ + --hash=sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04 \ + --hash=sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5 \ + --hash=sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583 \ + --hash=sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb \ + --hash=sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b \ + --hash=sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c \ + --hash=sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958 \ + --hash=sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc \ + --hash=sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4 \ + --hash=sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82 \ + --hash=sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e \ + --hash=sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce \ + --hash=sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9 \ + --hash=sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518 \ + --hash=sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536 \ + --hash=sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505 \ + --hash=sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052 \ + --hash=sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff \ + --hash=sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1 \ + --hash=sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f \ + --hash=sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681 \ + --hash=sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347 \ + --hash=sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af \ + --hash=sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246 \ + --hash=sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787 \ + --hash=sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0 \ + --hash=sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f \ + --hash=sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439 \ + --hash=sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3 \ + --hash=sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6 \ + --hash=sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca \ + --hash=sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec \ + --hash=sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d \ + --hash=sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3 \ + --hash=sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16 \ + --hash=sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717 \ + --hash=sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6 \ + --hash=sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd \ + --hash=sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212 # via # aiohttp # yarl @@ -733,18 +717,18 @@ proto-plus==1.25.0 \ # via # -r requirements.in # google-api-core -protobuf==5.28.3 \ - --hash=sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24 \ - --hash=sha256:135658402f71bbd49500322c0f736145731b16fc79dc8f367ab544a17eab4535 \ - --hash=sha256:27b246b3723692bf1068d5734ddaf2fccc2cdd6e0c9b47fe099244d80200593b \ - --hash=sha256:3e6101d095dfd119513cde7259aa703d16c6bbdfae2554dfe5cfdbe94e32d548 \ - --hash=sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584 \ - --hash=sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b \ - --hash=sha256:70585a70fc2dd4818c51287ceef5bdba6387f88a578c86d47bb34669b5552c36 \ - --hash=sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135 \ - --hash=sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868 \ - --hash=sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687 \ - --hash=sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed +protobuf==5.29.0 \ + --hash=sha256:0cd67a1e5c2d88930aa767f702773b2d054e29957432d7c6a18f8be02a07719a \ + --hash=sha256:0d10091d6d03537c3f902279fcf11e95372bdd36a79556311da0487455791b20 \ + --hash=sha256:17d128eebbd5d8aee80300aed7a43a48a25170af3337f6f1333d1fac2c6839ac \ + --hash=sha256:34a90cf30c908f47f40ebea7811f743d360e202b6f10d40c02529ebd84afc069 \ + --hash=sha256:445a0c02483869ed8513a585d80020d012c6dc60075f96fa0563a724987b1001 \ + --hash=sha256:6c3009e22717c6cc9e6594bb11ef9f15f669b19957ad4087214d69e08a213368 \ + --hash=sha256:85286a47caf63b34fa92fdc1fd98b649a8895db595cfa746c5286eeae890a0b1 \ + --hash=sha256:88c4af76a73183e21061881360240c0cdd3c39d263b4e8fb570aaf83348d608f \ + --hash=sha256:c931c61d0cc143a2e756b1e7f8197a508de5365efd40f83c907a9febf36e6b43 \ + --hash=sha256:e467f81fdd12ded9655cea3e9b83dc319d93b394ce810b556fb0f421d8613e86 \ + --hash=sha256:ea7fb379b257911c8c020688d455e8f74efd2f734b72dc1ea4b4d7e9fd1326f2 # via # -r requirements.in # google-api-core @@ -765,9 +749,9 @@ pypandoc==1.14 \ --hash=sha256:1315c7ad7fac7236dacf69a05b521ed2c3f1d0177f70e9b92bfffce6c023df22 \ --hash=sha256:6b4c45f5f1b9fb5bb562079164806bdbbc3e837b5402bcf3f1139edc5730a197 # via -r requirements.in -pytest==8.3.3 \ - --hash=sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181 \ - --hash=sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2 +pytest==8.3.4 \ + --hash=sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6 \ + --hash=sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761 # via pytest-asyncio pytest-asyncio==0.24.0 \ --hash=sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b \ @@ -838,9 +822,39 @@ rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -tomli==2.1.0 \ - --hash=sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8 \ - --hash=sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391 +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via pytest typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ @@ -850,87 +864,87 @@ urllib3==2.2.3 \ --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via requests -yarl==1.18.0 \ - --hash=sha256:01be8688fc211dc237e628fcc209dda412d35de7642453059a0553747018d075 \ - --hash=sha256:039c299a0864d1f43c3e31570045635034ea7021db41bf4842693a72aca8df3a \ - --hash=sha256:074fee89caab89a97e18ef5f29060ef61ba3cae6cd77673acc54bfdd3214b7b7 \ - --hash=sha256:13aaf2bdbc8c86ddce48626b15f4987f22e80d898818d735b20bd58f17292ee8 \ - --hash=sha256:14408cc4d34e202caba7b5ac9cc84700e3421a9e2d1b157d744d101b061a4a88 \ - --hash=sha256:1db1537e9cb846eb0ff206eac667f627794be8b71368c1ab3207ec7b6f8c5afc \ - --hash=sha256:1ece25e2251c28bab737bdf0519c88189b3dd9492dc086a1d77336d940c28ced \ - --hash=sha256:1ff116f0285b5c8b3b9a2680aeca29a858b3b9e0402fc79fd850b32c2bcb9f8b \ - --hash=sha256:205de377bd23365cd85562c9c6c33844050a93661640fda38e0567d2826b50df \ - --hash=sha256:20d95535e7d833889982bfe7cc321b7f63bf8879788fee982c76ae2b24cfb715 \ - --hash=sha256:20de4a8b04de70c49698dc2390b7fd2d18d424d3b876371f9b775e2b462d4b41 \ - --hash=sha256:2d90f2e4d16a5b0915ee065218b435d2ef619dd228973b1b47d262a6f7cd8fa5 \ - --hash=sha256:2e6b4466714a73f5251d84b471475850954f1fa6acce4d3f404da1d55d644c34 \ - --hash=sha256:309f8d27d6f93ceeeb80aa6980e883aa57895270f7f41842b92247e65d7aeddf \ - --hash=sha256:32141e13a1d5a48525e519c9197d3f4d9744d818d5c7d6547524cc9eccc8971e \ - --hash=sha256:34176bfb082add67cb2a20abd85854165540891147f88b687a5ed0dc225750a0 \ - --hash=sha256:38b39b7b3e692b6c92b986b00137a3891eddb66311b229d1940dcbd4f025083c \ - --hash=sha256:3a3709450a574d61be6ac53d582496014342ea34876af8dc17cc16da32826c9a \ - --hash=sha256:3adaaf9c6b1b4fc258584f4443f24d775a2086aee82d1387e48a8b4f3d6aecf6 \ - --hash=sha256:3f576ed278860df2721a5d57da3381040176ef1d07def9688a385c8330db61a1 \ - --hash=sha256:42ba84e2ac26a3f252715f8ec17e6fdc0cbf95b9617c5367579fafcd7fba50eb \ - --hash=sha256:454902dc1830d935c90b5b53c863ba2a98dcde0fbaa31ca2ed1ad33b2a7171c6 \ - --hash=sha256:466d31fd043ef9af822ee3f1df8fdff4e8c199a7f4012c2642006af240eade17 \ - --hash=sha256:49a98ecadc5a241c9ba06de08127ee4796e1009555efd791bac514207862b43d \ - --hash=sha256:4d26f1fa9fa2167bb238f6f4b20218eb4e88dd3ef21bb8f97439fa6b5313e30d \ - --hash=sha256:52c136f348605974c9b1c878addd6b7a60e3bf2245833e370862009b86fa4689 \ - --hash=sha256:536a7a8a53b75b2e98ff96edb2dfb91a26b81c4fed82782035767db5a465be46 \ - --hash=sha256:576d258b21c1db4c6449b1c572c75d03f16a482eb380be8003682bdbe7db2f28 \ - --hash=sha256:609ffd44fed2ed88d9b4ef62ee860cf86446cf066333ad4ce4123505b819e581 \ - --hash=sha256:67b336c15e564d76869c9a21316f90edf546809a5796a083b8f57c845056bc01 \ - --hash=sha256:685cc37f3f307c6a8e879986c6d85328f4c637f002e219f50e2ef66f7e062c1d \ - --hash=sha256:6a49ad0102c0f0ba839628d0bf45973c86ce7b590cdedf7540d5b1833ddc6f00 \ - --hash=sha256:6fb64dd45453225f57d82c4764818d7a205ee31ce193e9f0086e493916bd4f72 \ - --hash=sha256:701bb4a8f4de191c8c0cc9a1e6d5142f4df880e9d1210e333b829ca9425570ed \ - --hash=sha256:73553bbeea7d6ec88c08ad8027f4e992798f0abc459361bf06641c71972794dc \ - --hash=sha256:7520e799b1f84e095cce919bd6c23c9d49472deeef25fe1ef960b04cca51c3fc \ - --hash=sha256:7609b8462351c4836b3edce4201acb6dd46187b207c589b30a87ffd1813b48dc \ - --hash=sha256:7db9584235895a1dffca17e1c634b13870852094f6389b68dcc6338086aa7b08 \ - --hash=sha256:7fa7d37f2ada0f42e0723632993ed422f2a679af0e200874d9d861720a54f53e \ - --hash=sha256:80741ec5b471fbdfb997821b2842c59660a1c930ceb42f8a84ba8ca0f25a66aa \ - --hash=sha256:8254dbfce84ee5d1e81051ee7a0f1536c108ba294c0fdb5933476398df0654f3 \ - --hash=sha256:8b8d3e4e014fb4274f1c5bf61511d2199e263909fb0b8bda2a7428b0894e8dc6 \ - --hash=sha256:8e1c18890091aa3cc8a77967943476b729dc2016f4cfe11e45d89b12519d4a93 \ - --hash=sha256:9106025c7f261f9f5144f9aa7681d43867eed06349a7cfb297a1bc804de2f0d1 \ - --hash=sha256:91b8fb9427e33f83ca2ba9501221ffaac1ecf0407f758c4d2f283c523da185ee \ - --hash=sha256:96404e8d5e1bbe36bdaa84ef89dc36f0e75939e060ca5cd45451aba01db02902 \ - --hash=sha256:9b4c90c5363c6b0a54188122b61edb919c2cd1119684999d08cd5e538813a28e \ - --hash=sha256:a0509475d714df8f6d498935b3f307cd122c4ca76f7d426c7e1bb791bcd87eda \ - --hash=sha256:a173401d7821a2a81c7b47d4e7d5c4021375a1441af0c58611c1957445055056 \ - --hash=sha256:a45d94075ac0647621eaaf693c8751813a3eccac455d423f473ffed38c8ac5c9 \ - --hash=sha256:a5f72421246c21af6a92fbc8c13b6d4c5427dfd949049b937c3b731f2f9076bd \ - --hash=sha256:a64619a9c47c25582190af38e9eb382279ad42e1f06034f14d794670796016c0 \ - --hash=sha256:a7ee6884a8848792d58b854946b685521f41d8871afa65e0d4a774954e9c9e89 \ - --hash=sha256:ae38bd86eae3ba3d2ce5636cc9e23c80c9db2e9cb557e40b98153ed102b5a736 \ - --hash=sha256:b026cf2c32daf48d90c0c4e406815c3f8f4cfe0c6dfccb094a9add1ff6a0e41a \ - --hash=sha256:b0a2074a37285570d54b55820687de3d2f2b9ecf1b714e482e48c9e7c0402038 \ - --hash=sha256:b1a3297b9cad594e1ff0c040d2881d7d3a74124a3c73e00c3c71526a1234a9f7 \ - --hash=sha256:b212452b80cae26cb767aa045b051740e464c5129b7bd739c58fbb7deb339e7b \ - --hash=sha256:b234a4a9248a9f000b7a5dfe84b8cb6210ee5120ae70eb72a4dcbdb4c528f72f \ - --hash=sha256:b4095c5019bb889aa866bf12ed4c85c0daea5aafcb7c20d1519f02a1e738f07f \ - --hash=sha256:b8e8c516dc4e1a51d86ac975b0350735007e554c962281c432eaa5822aa9765c \ - --hash=sha256:bd80ed29761490c622edde5dd70537ca8c992c2952eb62ed46984f8eff66d6e8 \ - --hash=sha256:c083f6dd6951b86e484ebfc9c3524b49bcaa9c420cb4b2a78ef9f7a512bfcc85 \ - --hash=sha256:c0f4808644baf0a434a3442df5e0bedf8d05208f0719cedcd499e168b23bfdc4 \ - --hash=sha256:c4cb992d8090d5ae5f7afa6754d7211c578be0c45f54d3d94f7781c495d56716 \ - --hash=sha256:c60e547c0a375c4bfcdd60eef82e7e0e8698bf84c239d715f5c1278a73050393 \ - --hash=sha256:c73a6bbc97ba1b5a0c3c992ae93d721c395bdbb120492759b94cc1ac71bc6350 \ - --hash=sha256:c893f8c1a6d48b25961e00922724732d00b39de8bb0b451307482dc87bddcd74 \ - --hash=sha256:cd6ab7d6776c186f544f893b45ee0c883542b35e8a493db74665d2e594d3ca75 \ - --hash=sha256:d89ae7de94631b60d468412c18290d358a9d805182373d804ec839978b120422 \ - --hash=sha256:d9d4f5e471e8dc49b593a80766c2328257e405f943c56a3dc985c125732bc4cf \ - --hash=sha256:da206d1ec78438a563c5429ab808a2b23ad7bc025c8adbf08540dde202be37d5 \ - --hash=sha256:dbf53db46f7cf176ee01d8d98c39381440776fcda13779d269a8ba664f69bec0 \ - --hash=sha256:dd21c0128e301851de51bc607b0a6da50e82dc34e9601f4b508d08cc89ee7929 \ - --hash=sha256:e2580c1d7e66e6d29d6e11855e3b1c6381971e0edd9a5066e6c14d79bc8967af \ - --hash=sha256:e3818eabaefb90adeb5e0f62f047310079d426387991106d4fbf3519eec7d90a \ - --hash=sha256:ed69af4fe2a0949b1ea1d012bf065c77b4c7822bad4737f17807af2adb15a73c \ - --hash=sha256:f172b8b2c72a13a06ea49225a9c47079549036ad1b34afa12d5491b881f5b993 \ - --hash=sha256:f275ede6199d0f1ed4ea5d55a7b7573ccd40d97aee7808559e1298fe6efc8dbd \ - --hash=sha256:f7edeb1dcc7f50a2c8e08b9dc13a413903b7817e72273f00878cb70e766bdb3b \ - --hash=sha256:fa2c9cb607e0f660d48c54a63de7a9b36fef62f6b8bd50ff592ce1137e73ac7d \ - --hash=sha256:fe94d1de77c4cd8caff1bd5480e22342dbd54c93929f5943495d9c1e8abe9f42 +yarl==1.18.3 \ + --hash=sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba \ + --hash=sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193 \ + --hash=sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318 \ + --hash=sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee \ + --hash=sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e \ + --hash=sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1 \ + --hash=sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a \ + --hash=sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186 \ + --hash=sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1 \ + --hash=sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50 \ + --hash=sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640 \ + --hash=sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb \ + --hash=sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8 \ + --hash=sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc \ + --hash=sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5 \ + --hash=sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58 \ + --hash=sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2 \ + --hash=sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393 \ + --hash=sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24 \ + --hash=sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b \ + --hash=sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910 \ + --hash=sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c \ + --hash=sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272 \ + --hash=sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed \ + --hash=sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1 \ + --hash=sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04 \ + --hash=sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d \ + --hash=sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5 \ + --hash=sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d \ + --hash=sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889 \ + --hash=sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae \ + --hash=sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b \ + --hash=sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c \ + --hash=sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576 \ + --hash=sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34 \ + --hash=sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477 \ + --hash=sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990 \ + --hash=sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2 \ + --hash=sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512 \ + --hash=sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069 \ + --hash=sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a \ + --hash=sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6 \ + --hash=sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0 \ + --hash=sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8 \ + --hash=sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb \ + --hash=sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa \ + --hash=sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8 \ + --hash=sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e \ + --hash=sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e \ + --hash=sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985 \ + --hash=sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8 \ + --hash=sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1 \ + --hash=sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5 \ + --hash=sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690 \ + --hash=sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10 \ + --hash=sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789 \ + --hash=sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b \ + --hash=sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca \ + --hash=sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e \ + --hash=sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5 \ + --hash=sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59 \ + --hash=sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9 \ + --hash=sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8 \ + --hash=sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db \ + --hash=sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde \ + --hash=sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7 \ + --hash=sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb \ + --hash=sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3 \ + --hash=sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6 \ + --hash=sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285 \ + --hash=sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb \ + --hash=sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8 \ + --hash=sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482 \ + --hash=sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd \ + --hash=sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75 \ + --hash=sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760 \ + --hash=sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782 \ + --hash=sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53 \ + --hash=sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2 \ + --hash=sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1 \ + --hash=sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719 \ + --hash=sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62 # via aiohttp From 9b9249a902a25bbcd625445e250469baee21f4d8 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 3 Dec 2024 16:32:29 +0500 Subject: [PATCH 1221/1339] feat: add debug log when creating client (#2265) Co-authored-by: Anthonios Partheniou --- .../gapic-generator/gapic/schema/metadata.py | 5 ++++ .../%sub/services/%service/async_client.py.j2 | 19 ++++++++++++++ .../%sub/services/%service/client.py.j2 | 25 +++++++++++++++++++ .../services/asset_service/async_client.py | 19 ++++++++++++++ .../asset_v1/services/asset_service/client.py | 25 +++++++++++++++++++ .../services/iam_credentials/async_client.py | 19 ++++++++++++++ .../services/iam_credentials/client.py | 25 +++++++++++++++++++ .../services/eventarc/async_client.py | 19 ++++++++++++++ .../eventarc_v1/services/eventarc/client.py | 25 +++++++++++++++++++ .../config_service_v2/async_client.py | 19 ++++++++++++++ .../services/config_service_v2/client.py | 25 +++++++++++++++++++ .../logging_service_v2/async_client.py | 19 ++++++++++++++ .../services/logging_service_v2/client.py | 25 +++++++++++++++++++ .../metrics_service_v2/async_client.py | 19 ++++++++++++++ .../services/metrics_service_v2/client.py | 25 +++++++++++++++++++ .../services/cloud_redis/async_client.py | 19 ++++++++++++++ .../redis_v1/services/cloud_redis/client.py | 25 +++++++++++++++++++ .../tests/unit/schema/test_metadata.py | 6 +++++ 18 files changed, 363 insertions(+) diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index dc9389e8f566..480cde40cd8a 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -173,6 +173,11 @@ def proto_package(self) -> str: """Return the proto package for this type.""" return '.'.join(self.package) + @property + def proto_package_versioned(self) -> str: + """Return the versioned proto package for this type.""" + return ".".join(self.convert_to_versioned_package()) + def convert_to_versioned_package(self) -> Tuple[str, ...]: # We need to change the import statement to use an # underscore between the module and the version. For example, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 9622866d87e1..aaf8b02acf0a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -4,6 +4,7 @@ {% import "%namespace/%name_%version/%sub/services/%service/_client_macros.j2" as macros %} {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}AsyncIterable, Awaitable, {% endif %}{% if service.any_client_streaming %}AsyncIterator, {% endif %}Sequence, Tuple, Type, Union @@ -52,6 +53,13 @@ from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} from .client import {{ service.client_name }} +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) {# TODO(yon-mg): handle rest transport async client interaction #} class {{ service.async_client_name }}: @@ -243,6 +251,17 @@ class {{ service.async_client_name }}: ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `{{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}`.", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + {% for method in service.methods.values() %} {% with method_name = method.safe_name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} {%+ if not method.server_streaming %}async {% endif %}def {{ method_name }}(self, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 4d78dc0c754a..0bcc2406ccd3 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -10,6 +10,7 @@ from collections import OrderedDict {% if service.any_extended_operations_methods %} import functools {% endif %} +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, {% if service.any_server_streaming %}Iterable, {% endif %}{% if service.any_client_streaming %}Iterator, {% endif %}Sequence, Tuple, Type, Union, cast @@ -39,6 +40,14 @@ try: except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + {% filter sort_lines %} {% for method in service.methods.values() %} {% for ref_type in method.flat_ref_types %} @@ -510,6 +519,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -596,6 +609,18 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): always_use_jwt_access=True, api_audience=self._client_options.api_audience, ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `{{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}`.", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 9effc7f1f163..c911b2dd52ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -46,6 +47,13 @@ from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport from .client import AssetServiceClient +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class AssetServiceAsyncClient: """Asset service definition.""" @@ -247,6 +255,17 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.asset_v1.AssetServiceAsyncClient`.", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + async def export_assets(self, request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index ffbc99c45deb..779298b9c673 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -36,6 +37,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.asset_v1.services.asset_service import pagers @@ -566,6 +575,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -618,6 +631,18 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.asset_v1.AssetServiceClient`.", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + def export_assets(self, request: Optional[Union[asset_service.ExportAssetsRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index ca93c43e38a1..4dc6b9db322b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -39,6 +40,13 @@ from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport from .client import IAMCredentialsClient +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class IAMCredentialsAsyncClient: """A service account is a special type of Google account that @@ -238,6 +246,17 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.iam.credentials_v1.IAMCredentialsAsyncClient`.", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + async def generate_access_token(self, request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index c8adeac9aad8..2d21452f9528 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -36,6 +37,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.iam.credentials_v1.types import common from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -503,6 +512,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -555,6 +568,18 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.iam.credentials_v1.IAMCredentialsClient`.", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + def generate_access_token(self, request: Optional[Union[common.GenerateAccessTokenRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 38daa4f1c614..d3c86f00e4f2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -55,6 +56,13 @@ from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport from .client import EventarcClient +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class EventarcAsyncClient: """Eventarc allows users to subscribe to various events that are @@ -265,6 +273,17 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.eventarc_v1.EventarcAsyncClient`.", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + async def get_trigger(self, request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index a6f0082d3500..445c03ea58b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -36,6 +37,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.eventarc_v1.services.eventarc import pagers @@ -611,6 +620,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -663,6 +676,18 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.eventarc_v1.EventarcClient`.", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + def get_trigger(self, request: Optional[Union[eventarc.GetTriggerRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index d053be16137b..e0f9de656338 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -44,6 +45,13 @@ from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport from .client import ConfigServiceV2Client +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class ConfigServiceV2AsyncClient: """Service for configuring sinks used to route log entries.""" @@ -242,6 +250,17 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.ConfigServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + async def list_buckets(self, request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index f6d5e698b062..1d7cabde8fac 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -36,6 +37,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers @@ -559,6 +568,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -611,6 +624,18 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.ConfigServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + def list_buckets(self, request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index a07c53afaa74..a74f91a91209 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union @@ -41,6 +42,13 @@ from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class LoggingServiceV2AsyncClient: """Service for ingesting and querying logs.""" @@ -227,6 +235,17 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + async def delete_log(self, request: Optional[Union[logging.DeleteLogRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 933daac99fa4..82234b385637 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast @@ -36,6 +37,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers from google.cloud.logging_v2.types import log_entry @@ -490,6 +499,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -542,6 +555,18 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + def delete_log(self, request: Optional[Union[logging.DeleteLogRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index d6f06c1f1834..82c28b28969c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -42,6 +43,13 @@ from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport from .client import MetricsServiceV2Client +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class MetricsServiceV2AsyncClient: """Service for configuring logs-based metrics.""" @@ -228,6 +236,17 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.MetricsServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + async def list_log_metrics(self, request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 33a386b28b32..9c60ff4a0406 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -36,6 +37,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore from google.cloud.logging_v2.services.metrics_service_v2 import pagers @@ -491,6 +500,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -543,6 +556,18 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.MetricsServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + def list_log_metrics(self, request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 55e33e52bd3e..cfc8a14b939f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union @@ -45,6 +46,13 @@ from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .client import CloudRedisClient +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) class CloudRedisAsyncClient: """Configures and manages Cloud Memorystore for Redis instances @@ -255,6 +263,17 @@ def __init__(self, *, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + async def list_instances(self, request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 895f5c98d014..f69d514e0082 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import logging as std_logging import os import re from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast @@ -36,6 +37,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -530,6 +539,10 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError("client_options.api_key and credentials are mutually exclusive") @@ -604,6 +617,18 @@ def __init__(self, *, api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.redis_v1.CloudRedisClient`.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + }, + ) + def list_instances(self, request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, *, diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index df8967da71b4..3189b3a9fd46 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -97,6 +97,12 @@ def test_proto_package_version_parsing(): addr = metadata.Address(package=("bah", "v20p1", "baj", "v3")) assert addr.convert_to_versioned_package() == ("bah", "v20p1", "baj_v3") + addr = metadata.Address(package=("bab", "v1")) + assert addr.proto_package_versioned == "bab_v1" + + addr = metadata.Address(package=("bah", "v20p1", "baj", "v3")) + assert addr.proto_package_versioned == "bah.v20p1.baj_v3" + def test_address_child_no_parent(): addr = metadata.Address(package=('foo', 'bar'), module='baz') From 3d792f44a54cac575236685563b66fa1cd3bacf7 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 6 Dec 2024 14:24:22 -0500 Subject: [PATCH 1222/1339] feat: add debug log when sending requests via REST (#2270) Co-authored-by: ohmayr Co-authored-by: Victor Chudnovsky --- .../%sub/services/%service/_shared_macros.j2 | 42 +- .../%sub/services/%service/client.py.j2 | 4 +- .../services/%service/transports/rest.py.j2 | 29 +- .../%service/transports/rest_asyncio.py.j2 | 29 +- .../asset_v1/services/asset_service/client.py | 4 +- .../services/asset_service/transports/rest.py | 817 +++++++++++++++- .../services/iam_credentials/client.py | 4 +- .../iam_credentials/transports/rest.py | 150 ++- .../eventarc_v1/services/eventarc/client.py | 4 +- .../services/eventarc/transports/rest.py | 916 +++++++++++++++++- .../services/config_service_v2/client.py | 4 +- .../services/logging_service_v2/client.py | 4 +- .../services/metrics_service_v2/client.py | 4 +- .../redis_v1/services/cloud_redis/client.py | 4 +- .../services/cloud_redis/transports/rest.py | 603 +++++++++++- .../cloud_redis/transports/rest_asyncio.py | 604 ++++++++++++ 16 files changed, 3196 insertions(+), 26 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index 9dd9bac88bbf..1623d1b1155d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -195,13 +195,15 @@ def _get_http_options(): Args: body_spec (str): The http options body i.e. method.http_options[0].body method_name (str): The method name. - service_name (str): The service name. + service: The service. is_async (bool): Used to determine the code path i.e. whether for sync or async call. #} -{% macro rest_call_method_common(body_spec, method_name, service_name, is_async=False) %} +{% macro rest_call_method_common(body_spec, method_name, service, is_async=False) %} +{% set service_name = service.name %} {% set await_prefix = "await " if is_async else "" %} {% set async_class_prefix = "Async" if is_async else "" %} http_options = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_http_options() + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2274): Add debug log before intercepting a request #} request, metadata = {{ await_prefix }}self._interceptor.pre_{{ method_name|snake_case }}(request, metadata) transcoded_request = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_transcoded_request(http_options, request) @@ -212,6 +214,24 @@ def _get_http_options(): # Jsonify the query params query_params = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for {{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}.{{ method_name }}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": "{{ method_name }}", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = {{ await_prefix }}{{ async_class_prefix }}{{ service_name }}RestTransport._{{method_name}}._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request{% if body_spec %}, body{% endif %}) @@ -434,7 +454,7 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m {{ sig.response_type }}: Response from {{ name }} method. {% endif %} """ - {{ rest_call_method_common(body_spec, name, service.name, is_async)|indent(4) }} + {{ rest_call_method_common(body_spec, name, service, is_async)|indent(4) }} {% if sig.response_type == "None" %} return {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(None) @@ -447,6 +467,22 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m resp = {{ sig.response_type }}() resp = json_format.Parse(content, resp) resp = {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}.{{ name }}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": "{{ name }}", + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up #} + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 0bcc2406ccd3..8d0955910ce5 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -617,8 +617,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): extra = { "serviceName": "{{ service.meta.address.proto }}", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), }, ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 231a436c0877..008e590f089e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -3,9 +3,10 @@ {% block content %} +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -40,6 +41,13 @@ try: except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -227,7 +235,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endif %} """ - {{ shared_macros.rest_call_method_common(body_spec, method.name, service.name)|indent(8) }} + {{ shared_macros.rest_call_method_common(body_spec, method.name, service)|indent(8) }} {% if not method.void %} # Return the response @@ -246,7 +254,24 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) {% endif %}{# method.lro #} + {#- TODO(https://github.com/googleapis/gapic-generator-python/issues/2274): Add debug log before intercepting a request #} resp = self._interceptor.post_{{ method.name|snake_case }}(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(resp){% else %}json_format.MessageToJson(resp){% endif %}, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}.{{ method.transport_safe_name|snake_case }}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": "{{ method.name }}", + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up #} + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp {% endif %}{# method.void #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index c7d91a0b3168..f07090bca22d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -58,6 +58,17 @@ from .rest_base import _Base{{ service.name }}RestTransport from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +import logging + +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -188,7 +199,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endif %} """ - {{ shared_macros.rest_call_method_common(body_spec, method.name, service.name, is_async=True)|indent(8) }} + {{ shared_macros.rest_call_method_common(body_spec, method.name, service, is_async=True)|indent(8) }} {% if not method.void %} # Return the response @@ -206,6 +217,22 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): json_format.Parse(content, pb_resp, ignore_unknown_fields=True) {% endif %}{# if method.server_streaming #} resp = await self._interceptor.post_{{ method.name|snake_case }}(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(response){% endif %}, + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for {{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}.{{ method.transport_safe_name|snake_case }}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": "{{ method.name }}", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp {% endif %}{# method.void #} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 779298b9c673..1b536ff11baf 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -638,8 +638,8 @@ def __init__(self, *, extra = { "serviceName": "google.cloud.asset.v1.AssetService", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index df63e0de3271..3e07dd39a83c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -45,6 +46,13 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -818,12 +826,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_http_options() + request, metadata = self._interceptor.pre_analyze_iam_policy(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicy", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeIamPolicy", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -837,7 +864,23 @@ def __call__(self, pb_resp = asset_service.AnalyzeIamPolicyResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.AnalyzeIamPolicyResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeIamPolicyLongrunning(_BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning, AssetServiceRestStub): @@ -897,6 +940,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_http_options() + request, metadata = self._interceptor.pre_analyze_iam_policy_longrunning(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_transcoded_request(http_options, request) @@ -905,6 +949,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicyLongrunning", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeIamPolicyLongrunning", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeIamPolicyLongrunning._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -916,7 +978,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_iam_policy_longrunning", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeIamPolicyLongrunning", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeMove(_BaseAssetServiceRestTransport._BaseAnalyzeMove, AssetServiceRestStub): @@ -973,12 +1051,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_http_options() + request, metadata = self._interceptor.pre_analyze_move(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeMove", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeMove", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeMove._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -992,7 +1089,23 @@ def __call__(self, pb_resp = asset_service.AnalyzeMoveResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_move(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.AnalyzeMoveResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_move", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeMove", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeOrgPolicies(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies, AssetServiceRestStub): @@ -1049,12 +1162,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_http_options() + request, metadata = self._interceptor.pre_analyze_org_policies(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicies", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeOrgPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1068,7 +1200,23 @@ def __call__(self, pb_resp = asset_service.AnalyzeOrgPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policies(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.AnalyzeOrgPoliciesResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeOrgPolicyGovernedAssets(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets, AssetServiceRestStub): @@ -1126,12 +1274,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_http_options() + request, metadata = self._interceptor.pre_analyze_org_policy_governed_assets(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedAssets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicyGovernedAssets", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1145,7 +1312,23 @@ def __call__(self, pb_resp = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_assets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicyGovernedAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _AnalyzeOrgPolicyGovernedContainers(_BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers, AssetServiceRestStub): @@ -1203,12 +1386,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_http_options() + request, metadata = self._interceptor.pre_analyze_org_policy_governed_containers(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedContainers", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicyGovernedContainers", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._AnalyzeOrgPolicyGovernedContainers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1222,7 +1424,23 @@ def __call__(self, pb_resp = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.analyze_org_policy_governed_containers", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "AnalyzeOrgPolicyGovernedContainers", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _BatchGetAssetsHistory(_BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory, AssetServiceRestStub): @@ -1276,12 +1494,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_http_options() + request, metadata = self._interceptor.pre_batch_get_assets_history(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetAssetsHistory", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "BatchGetAssetsHistory", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._BatchGetAssetsHistory._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1295,7 +1532,23 @@ def __call__(self, pb_resp = asset_service.BatchGetAssetsHistoryResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_assets_history(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.BatchGetAssetsHistoryResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_assets_history", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "BatchGetAssetsHistory", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _BatchGetEffectiveIamPolicies(_BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies, AssetServiceRestStub): @@ -1353,12 +1606,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_http_options() + request, metadata = self._interceptor.pre_batch_get_effective_iam_policies(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetEffectiveIamPolicies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "BatchGetEffectiveIamPolicies", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._BatchGetEffectiveIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1372,7 +1644,23 @@ def __call__(self, pb_resp = asset_service.BatchGetEffectiveIamPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_batch_get_effective_iam_policies(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.batch_get_effective_iam_policies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "BatchGetEffectiveIamPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateFeed(_BaseAssetServiceRestTransport._BaseCreateFeed, AssetServiceRestStub): @@ -1435,6 +1723,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseCreateFeed._get_http_options() + request, metadata = self._interceptor.pre_create_feed(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseCreateFeed._get_transcoded_request(http_options, request) @@ -1443,6 +1732,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateFeed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "CreateFeed", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._CreateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1456,7 +1763,23 @@ def __call__(self, pb_resp = asset_service.Feed.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_feed(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.Feed.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.create_feed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "CreateFeed", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateSavedQuery(_BaseAssetServiceRestTransport._BaseCreateSavedQuery, AssetServiceRestStub): @@ -1513,6 +1836,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_http_options() + request, metadata = self._interceptor.pre_create_saved_query(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_transcoded_request(http_options, request) @@ -1521,6 +1845,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateSavedQuery", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "CreateSavedQuery", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._CreateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1534,7 +1876,23 @@ def __call__(self, pb_resp = asset_service.SavedQuery.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_saved_query(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.SavedQuery.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.create_saved_query", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "CreateSavedQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteFeed(_BaseAssetServiceRestTransport._BaseDeleteFeed, AssetServiceRestStub): @@ -1584,12 +1942,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_http_options() + request, metadata = self._interceptor.pre_delete_feed(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteFeed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "DeleteFeed", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._DeleteFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1645,12 +2022,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_http_options() + request, metadata = self._interceptor.pre_delete_saved_query(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteSavedQuery", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "DeleteSavedQuery", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._DeleteSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1714,6 +2110,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseExportAssets._get_http_options() + request, metadata = self._interceptor.pre_export_assets(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseExportAssets._get_transcoded_request(http_options, request) @@ -1722,6 +2119,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ExportAssets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ExportAssets", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._ExportAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1733,7 +2148,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_assets(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.export_assets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ExportAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetFeed(_BaseAssetServiceRestTransport._BaseGetFeed, AssetServiceRestStub): @@ -1795,12 +2226,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseGetFeed._get_http_options() + request, metadata = self._interceptor.pre_get_feed(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseGetFeed._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetFeed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetFeed", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._GetFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1814,7 +2264,23 @@ def __call__(self, pb_resp = asset_service.Feed.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_feed(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.Feed.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.get_feed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetFeed", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetSavedQuery(_BaseAssetServiceRestTransport._BaseGetSavedQuery, AssetServiceRestStub): @@ -1870,12 +2336,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_http_options() + request, metadata = self._interceptor.pre_get_saved_query(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetSavedQuery", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetSavedQuery", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._GetSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1889,7 +2374,23 @@ def __call__(self, pb_resp = asset_service.SavedQuery.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_saved_query(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.SavedQuery.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.get_saved_query", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetSavedQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListAssets(_BaseAssetServiceRestTransport._BaseListAssets, AssetServiceRestStub): @@ -1943,12 +2444,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseListAssets._get_http_options() + request, metadata = self._interceptor.pre_list_assets(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseListAssets._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListAssets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListAssets", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._ListAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1962,7 +2482,23 @@ def __call__(self, pb_resp = asset_service.ListAssetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_assets(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.ListAssetsResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.list_assets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListFeeds(_BaseAssetServiceRestTransport._BaseListFeeds, AssetServiceRestStub): @@ -2016,12 +2552,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseListFeeds._get_http_options() + request, metadata = self._interceptor.pre_list_feeds(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseListFeeds._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListFeeds", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListFeeds", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._ListFeeds._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2035,7 +2590,23 @@ def __call__(self, pb_resp = asset_service.ListFeedsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_feeds(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.ListFeedsResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.list_feeds", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListFeeds", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListSavedQueries(_BaseAssetServiceRestTransport._BaseListSavedQueries, AssetServiceRestStub): @@ -2089,12 +2660,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_http_options() + request, metadata = self._interceptor.pre_list_saved_queries(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListSavedQueries", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListSavedQueries", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._ListSavedQueries._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2108,7 +2698,23 @@ def __call__(self, pb_resp = asset_service.ListSavedQueriesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_saved_queries(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.ListSavedQueriesResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.list_saved_queries", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "ListSavedQueries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _QueryAssets(_BaseAssetServiceRestTransport._BaseQueryAssets, AssetServiceRestStub): @@ -2163,6 +2769,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseQueryAssets._get_http_options() + request, metadata = self._interceptor.pre_query_assets(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseQueryAssets._get_transcoded_request(http_options, request) @@ -2171,6 +2778,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.QueryAssets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "QueryAssets", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._QueryAssets._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2184,7 +2809,23 @@ def __call__(self, pb_resp = asset_service.QueryAssetsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_query_assets(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.QueryAssetsResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.query_assets", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "QueryAssets", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _SearchAllIamPolicies(_BaseAssetServiceRestTransport._BaseSearchAllIamPolicies, AssetServiceRestStub): @@ -2238,12 +2879,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_http_options() + request, metadata = self._interceptor.pre_search_all_iam_policies(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllIamPolicies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "SearchAllIamPolicies", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._SearchAllIamPolicies._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2257,7 +2917,23 @@ def __call__(self, pb_resp = asset_service.SearchAllIamPoliciesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_all_iam_policies(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.SearchAllIamPoliciesResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_iam_policies", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "SearchAllIamPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _SearchAllResources(_BaseAssetServiceRestTransport._BaseSearchAllResources, AssetServiceRestStub): @@ -2311,12 +2987,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_http_options() + request, metadata = self._interceptor.pre_search_all_resources(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllResources", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "SearchAllResources", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._SearchAllResources._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2330,7 +3025,23 @@ def __call__(self, pb_resp = asset_service.SearchAllResourcesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_search_all_resources(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.SearchAllResourcesResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.search_all_resources", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "SearchAllResources", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateFeed(_BaseAssetServiceRestTransport._BaseUpdateFeed, AssetServiceRestStub): @@ -2393,6 +3104,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_http_options() + request, metadata = self._interceptor.pre_update_feed(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_transcoded_request(http_options, request) @@ -2401,6 +3113,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateFeed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "UpdateFeed", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._UpdateFeed._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2414,7 +3144,23 @@ def __call__(self, pb_resp = asset_service.Feed.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_feed(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.Feed.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.update_feed", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "UpdateFeed", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateSavedQuery(_BaseAssetServiceRestTransport._BaseUpdateSavedQuery, AssetServiceRestStub): @@ -2471,6 +3217,7 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_http_options() + request, metadata = self._interceptor.pre_update_saved_query(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_transcoded_request(http_options, request) @@ -2479,6 +3226,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateSavedQuery", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "UpdateSavedQuery", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._UpdateSavedQuery._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2492,7 +3257,23 @@ def __call__(self, pb_resp = asset_service.SavedQuery.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_saved_query(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": asset_service.SavedQuery.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceClient.update_saved_query", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "UpdateSavedQuery", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2734,12 +3515,31 @@ def __call__(self, """ http_options = _BaseAssetServiceRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseAssetServiceRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetOperation", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = AssetServiceRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2752,6 +3552,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.asset_v1.AssetServiceAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": "GetOperation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 2d21452f9528..fe0fb0d93844 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -575,8 +575,8 @@ def __init__(self, *, extra = { "serviceName": "google.iam.credentials.v1.IAMCredentials", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index a6ab18b2e235..09f7f29ea7eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -42,6 +43,13 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -320,6 +328,7 @@ def __call__(self, """ http_options = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_http_options() + request, metadata = self._interceptor.pre_generate_access_token(request, metadata) transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_transcoded_request(http_options, request) @@ -328,6 +337,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateAccessToken", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "GenerateAccessToken", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = IAMCredentialsRestTransport._GenerateAccessToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -341,7 +368,23 @@ def __call__(self, pb_resp = common.GenerateAccessTokenResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_access_token(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": common.GenerateAccessTokenResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam.credentials_v1.IAMCredentialsClient.generate_access_token", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "GenerateAccessToken", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GenerateIdToken(_BaseIAMCredentialsRestTransport._BaseGenerateIdToken, IAMCredentialsRestStub): @@ -396,6 +439,7 @@ def __call__(self, """ http_options = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_http_options() + request, metadata = self._interceptor.pre_generate_id_token(request, metadata) transcoded_request = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_transcoded_request(http_options, request) @@ -404,6 +448,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateIdToken", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "GenerateIdToken", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = IAMCredentialsRestTransport._GenerateIdToken._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -417,7 +479,23 @@ def __call__(self, pb_resp = common.GenerateIdTokenResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_generate_id_token(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": common.GenerateIdTokenResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam.credentials_v1.IAMCredentialsClient.generate_id_token", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "GenerateIdToken", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _SignBlob(_BaseIAMCredentialsRestTransport._BaseSignBlob, IAMCredentialsRestStub): @@ -472,6 +550,7 @@ def __call__(self, """ http_options = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_http_options() + request, metadata = self._interceptor.pre_sign_blob(request, metadata) transcoded_request = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_transcoded_request(http_options, request) @@ -480,6 +559,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignBlob", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "SignBlob", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = IAMCredentialsRestTransport._SignBlob._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -493,7 +590,23 @@ def __call__(self, pb_resp = common.SignBlobResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_sign_blob(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": common.SignBlobResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam.credentials_v1.IAMCredentialsClient.sign_blob", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "SignBlob", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _SignJwt(_BaseIAMCredentialsRestTransport._BaseSignJwt, IAMCredentialsRestStub): @@ -548,6 +661,7 @@ def __call__(self, """ http_options = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_http_options() + request, metadata = self._interceptor.pre_sign_jwt(request, metadata) transcoded_request = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_transcoded_request(http_options, request) @@ -556,6 +670,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignJwt", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "SignJwt", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = IAMCredentialsRestTransport._SignJwt._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -569,7 +701,23 @@ def __call__(self, pb_resp = common.SignJwtResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_sign_jwt(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": common.SignJwtResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam.credentials_v1.IAMCredentialsClient.sign_jwt", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": "SignJwt", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 445c03ea58b6..8aac71a26aee 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -683,8 +683,8 @@ def __init__(self, *, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 8a66da7096fe..8385af0daabe 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -53,6 +54,13 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -918,6 +926,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseCreateChannel._get_http_options() + request, metadata = self._interceptor.pre_create_channel(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseCreateChannel._get_transcoded_request(http_options, request) @@ -926,6 +935,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateChannel._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannel", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._CreateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -937,7 +964,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_channel(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateChannelConnection(_BaseEventarcRestTransport._BaseCreateChannelConnection, EventarcRestStub): @@ -996,6 +1039,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_http_options() + request, metadata = self._interceptor.pre_create_channel_connection(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_transcoded_request(http_options, request) @@ -1004,6 +1048,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannelConnection", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._CreateChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1015,7 +1077,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_channel_connection(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateTrigger(_BaseEventarcRestTransport._BaseCreateTrigger, EventarcRestStub): @@ -1074,6 +1152,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseCreateTrigger._get_http_options() + request, metadata = self._interceptor.pre_create_trigger(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseCreateTrigger._get_transcoded_request(http_options, request) @@ -1082,6 +1161,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateTrigger", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._CreateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1093,7 +1190,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_trigger(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.create_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CreateTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteChannel(_BaseEventarcRestTransport._BaseDeleteChannel, EventarcRestStub): @@ -1151,12 +1264,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseDeleteChannel._get_http_options() + request, metadata = self._interceptor.pre_delete_channel(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannel._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannel", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._DeleteChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1168,7 +1300,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_channel(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteChannelConnection(_BaseEventarcRestTransport._BaseDeleteChannelConnection, EventarcRestStub): @@ -1226,12 +1374,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_http_options() + request, metadata = self._interceptor.pre_delete_channel_connection(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannelConnection", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._DeleteChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1243,7 +1410,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_channel_connection(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteTrigger(_BaseEventarcRestTransport._BaseDeleteTrigger, EventarcRestStub): @@ -1301,12 +1484,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseDeleteTrigger._get_http_options() + request, metadata = self._interceptor.pre_delete_trigger(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseDeleteTrigger._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteTrigger", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._DeleteTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1318,7 +1520,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_trigger(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.delete_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetChannel(_BaseEventarcRestTransport._BaseGetChannel, EventarcRestStub): @@ -1381,12 +1599,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetChannel._get_http_options() + request, metadata = self._interceptor.pre_get_channel(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetChannel._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannel", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._GetChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1400,7 +1637,23 @@ def __call__(self, pb_resp = channel.Channel.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_channel(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": channel.Channel.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetChannelConnection(_BaseEventarcRestTransport._BaseGetChannelConnection, EventarcRestStub): @@ -1462,12 +1715,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetChannelConnection._get_http_options() + request, metadata = self._interceptor.pre_get_channel_connection(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetChannelConnection._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannelConnection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannelConnection", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._GetChannelConnection._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1481,7 +1753,23 @@ def __call__(self, pb_resp = channel_connection.ChannelConnection.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_channel_connection(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": channel_connection.ChannelConnection.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_channel_connection", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetChannelConnection", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetGoogleChannelConfig(_BaseEventarcRestTransport._BaseGetGoogleChannelConfig, EventarcRestStub): @@ -1544,12 +1832,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_http_options() + request, metadata = self._interceptor.pre_get_google_channel_config(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleChannelConfig", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetGoogleChannelConfig", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._GetGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1563,7 +1870,23 @@ def __call__(self, pb_resp = google_channel_config.GoogleChannelConfig.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_google_channel_config(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": google_channel_config.GoogleChannelConfig.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_google_channel_config", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetGoogleChannelConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetProvider(_BaseEventarcRestTransport._BaseGetProvider, EventarcRestStub): @@ -1620,12 +1943,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetProvider._get_http_options() + request, metadata = self._interceptor.pre_get_provider(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetProvider._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetProvider", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetProvider", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._GetProvider._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1639,7 +1981,23 @@ def __call__(self, pb_resp = discovery.Provider.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_provider(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": discovery.Provider.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_provider", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetProvider", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetTrigger(_BaseEventarcRestTransport._BaseGetTrigger, EventarcRestStub): @@ -1696,12 +2054,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetTrigger._get_http_options() + request, metadata = self._interceptor.pre_get_trigger(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetTrigger._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetTrigger", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._GetTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1715,7 +2092,23 @@ def __call__(self, pb_resp = trigger.Trigger.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_trigger(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": trigger.Trigger.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.get_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListChannelConnections(_BaseEventarcRestTransport._BaseListChannelConnections, EventarcRestStub): @@ -1772,12 +2165,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListChannelConnections._get_http_options() + request, metadata = self._interceptor.pre_list_channel_connections(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListChannelConnections._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannelConnections", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListChannelConnections", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._ListChannelConnections._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1791,7 +2203,23 @@ def __call__(self, pb_resp = eventarc.ListChannelConnectionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_channel_connections(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": eventarc.ListChannelConnectionsResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.list_channel_connections", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListChannelConnections", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListChannels(_BaseEventarcRestTransport._BaseListChannels, EventarcRestStub): @@ -1846,12 +2274,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListChannels._get_http_options() + request, metadata = self._interceptor.pre_list_channels(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListChannels._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListChannels._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannels", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListChannels", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._ListChannels._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1865,7 +2312,23 @@ def __call__(self, pb_resp = eventarc.ListChannelsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_channels(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": eventarc.ListChannelsResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.list_channels", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListChannels", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListProviders(_BaseEventarcRestTransport._BaseListProviders, EventarcRestStub): @@ -1920,12 +2383,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListProviders._get_http_options() + request, metadata = self._interceptor.pre_list_providers(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListProviders._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListProviders._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListProviders", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListProviders", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._ListProviders._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1939,7 +2421,23 @@ def __call__(self, pb_resp = eventarc.ListProvidersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_providers(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": eventarc.ListProvidersResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.list_providers", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListProviders", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListTriggers(_BaseEventarcRestTransport._BaseListTriggers, EventarcRestStub): @@ -1994,12 +2492,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListTriggers._get_http_options() + request, metadata = self._interceptor.pre_list_triggers(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListTriggers._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListTriggers", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListTriggers", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._ListTriggers._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2013,7 +2530,23 @@ def __call__(self, pb_resp = eventarc.ListTriggersResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_triggers(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": eventarc.ListTriggersResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.list_triggers", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListTriggers", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateChannel(_BaseEventarcRestTransport._BaseUpdateChannel, EventarcRestStub): @@ -2072,6 +2605,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseUpdateChannel._get_http_options() + request, metadata = self._interceptor.pre_update_channel(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseUpdateChannel._get_transcoded_request(http_options, request) @@ -2080,6 +2614,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateChannel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateChannel", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._UpdateChannel._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2091,7 +2643,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_channel(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.update_channel", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateChannel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateGoogleChannelConfig(_BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig, EventarcRestStub): @@ -2156,6 +2724,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_http_options() + request, metadata = self._interceptor.pre_update_google_channel_config(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_transcoded_request(http_options, request) @@ -2164,6 +2733,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleChannelConfig", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateGoogleChannelConfig", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._UpdateGoogleChannelConfig._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2177,7 +2764,23 @@ def __call__(self, pb_resp = gce_google_channel_config.GoogleChannelConfig.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_google_channel_config(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": gce_google_channel_config.GoogleChannelConfig.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.update_google_channel_config", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateGoogleChannelConfig", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateTrigger(_BaseEventarcRestTransport._BaseUpdateTrigger, EventarcRestStub): @@ -2236,6 +2839,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseUpdateTrigger._get_http_options() + request, metadata = self._interceptor.pre_update_trigger(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseUpdateTrigger._get_transcoded_request(http_options, request) @@ -2244,6 +2848,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateTrigger._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateTrigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateTrigger", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._UpdateTrigger._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2255,7 +2877,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_trigger(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcClient.update_trigger", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "UpdateTrigger", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2457,12 +3095,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetLocation._get_http_options() + request, metadata = self._interceptor.pre_get_location(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetLocation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetLocation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2475,6 +3132,21 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetLocation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2532,12 +3204,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListLocations._get_http_options() + request, metadata = self._interceptor.pre_list_locations(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListLocations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListLocations", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListLocations", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2550,6 +3241,21 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListLocations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2607,12 +3313,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetIamPolicy._get_http_options() + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetIamPolicy._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetIamPolicy", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetIamPolicy", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._GetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2625,6 +3350,21 @@ def __call__(self, resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetIamPolicy", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2683,6 +3423,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseSetIamPolicy._get_http_options() + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseSetIamPolicy._get_transcoded_request(http_options, request) @@ -2691,6 +3432,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.SetIamPolicy", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "SetIamPolicy", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._SetIamPolicy._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2703,6 +3462,21 @@ def __call__(self, resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.SetIamPolicy", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2761,6 +3535,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseTestIamPermissions._get_http_options() + request, metadata = self._interceptor.pre_test_iam_permissions(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseTestIamPermissions._get_transcoded_request(http_options, request) @@ -2769,6 +3544,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.TestIamPermissions", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "TestIamPermissions", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._TestIamPermissions._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2781,6 +3574,21 @@ def __call__(self, resp = iam_policy_pb2.TestIamPermissionsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.TestIamPermissions", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2836,6 +3644,7 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseCancelOperation._get_http_options() + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) @@ -2844,6 +3653,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CancelOperation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "CancelOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2906,12 +3733,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseDeleteOperation._get_http_options() + request, metadata = self._interceptor.pre_delete_operation(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "DeleteOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2977,12 +3823,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetOperation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2995,6 +3860,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "GetOperation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -3052,12 +3932,31 @@ def __call__(self, """ http_options = _BaseEventarcRestTransport._BaseListOperations._get_http_options() + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseEventarcRestTransport._BaseListOperations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListOperations", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListOperations", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = EventarcRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -3070,6 +3969,21 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.eventarc_v1.EventarcAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": "ListOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 1d7cabde8fac..f7db3e5720a5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -631,8 +631,8 @@ def __init__(self, *, extra = { "serviceName": "google.logging.v2.ConfigServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 82234b385637..e36553ef4f93 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -562,8 +562,8 @@ def __init__(self, *, extra = { "serviceName": "google.logging.v2.LoggingServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 9c60ff4a0406..1c4d07e10ee1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -563,8 +563,8 @@ def __init__(self, *, extra = { "serviceName": "google.logging.v2.MetricsServiceV2", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index f69d514e0082..42b747703264 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -624,8 +624,8 @@ def __init__(self, *, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 5dafa5b3ae8c..c3ceb7bbd3ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -45,6 +46,13 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -717,6 +725,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + request, metadata = self._interceptor.pre_create_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) @@ -725,6 +734,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -736,7 +763,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.create_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, CloudRedisRestStub): @@ -794,12 +837,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + request, metadata = self._interceptor.pre_delete_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -811,7 +873,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.delete_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ExportInstance(_BaseCloudRedisRestTransport._BaseExportInstance, CloudRedisRestStub): @@ -870,6 +948,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + request, metadata = self._interceptor.pre_export_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request(http_options, request) @@ -878,6 +957,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ExportInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -889,7 +986,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_export_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.export_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ExportInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _FailoverInstance(_BaseCloudRedisRestTransport._BaseFailoverInstance, CloudRedisRestStub): @@ -948,6 +1061,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + request, metadata = self._interceptor.pre_failover_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request(http_options, request) @@ -956,6 +1070,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "FailoverInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -967,7 +1099,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_failover_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.failover_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "FailoverInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, CloudRedisRestStub): @@ -1022,12 +1170,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + request, metadata = self._interceptor.pre_get_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1041,7 +1208,23 @@ def __call__(self, pb_resp = cloud_redis.Instance.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": cloud_redis.Instance.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetInstanceAuthString(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString, CloudRedisRestStub): @@ -1096,12 +1279,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_http_options() + request, metadata = self._interceptor.pre_get_instance_auth_string(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstanceAuthString", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1115,7 +1317,23 @@ def __call__(self, pb_resp = cloud_redis.InstanceAuthString.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_instance_auth_string(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": cloud_redis.InstanceAuthString.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance_auth_string", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstanceAuthString", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ImportInstance(_BaseCloudRedisRestTransport._BaseImportInstance, CloudRedisRestStub): @@ -1174,6 +1392,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + request, metadata = self._interceptor.pre_import_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request(http_options, request) @@ -1182,6 +1401,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ImportInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1193,7 +1430,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_import_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.import_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ImportInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, CloudRedisRestStub): @@ -1250,12 +1503,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + request, metadata = self._interceptor.pre_list_instances(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1269,7 +1541,23 @@ def __call__(self, pb_resp = cloud_redis.ListInstancesResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_instances(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": cloud_redis.ListInstancesResponse.to_json(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.list_instances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _RescheduleMaintenance(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance, CloudRedisRestStub): @@ -1328,6 +1616,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_http_options() + request, metadata = self._interceptor.pre_reschedule_maintenance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request(http_options, request) @@ -1336,6 +1625,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "RescheduleMaintenance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1347,7 +1654,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_reschedule_maintenance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.reschedule_maintenance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "RescheduleMaintenance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, CloudRedisRestStub): @@ -1406,6 +1729,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + request, metadata = self._interceptor.pre_update_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) @@ -1414,6 +1738,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1425,7 +1767,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.update_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpgradeInstance(_BaseCloudRedisRestTransport._BaseUpgradeInstance, CloudRedisRestStub): @@ -1484,6 +1842,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + request, metadata = self._interceptor.pre_upgrade_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request(http_options, request) @@ -1492,6 +1851,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpgradeInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1503,7 +1880,23 @@ def __call__(self, # Return the response resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_upgrade_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(resp), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.upgrade_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpgradeInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1649,12 +2042,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + request, metadata = self._interceptor.pre_get_location(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1667,6 +2079,21 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1724,12 +2151,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + request, metadata = self._interceptor.pre_list_locations(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1742,6 +2188,21 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1796,12 +2257,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CancelOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1864,12 +2344,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + request, metadata = self._interceptor.pre_delete_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1935,12 +2434,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + request, metadata = self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1953,6 +2471,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2010,12 +2543,31 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + request, metadata = self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2028,6 +2580,21 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2086,6 +2653,7 @@ def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + request, metadata = self._interceptor.pre_wait_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) @@ -2094,6 +2662,24 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = CloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2106,6 +2692,21 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_wait_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 6c73f6aea625..da0081ea71be 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -51,6 +51,17 @@ from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +import logging + +try: # pragma: NO COVER + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True +except ImportError: + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -745,6 +756,7 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + request, metadata = await self._interceptor.pre_create_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) @@ -753,6 +765,24 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -771,6 +801,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_create_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, AsyncCloudRedisRestStub): @@ -828,12 +874,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + request, metadata = await self._interceptor.pre_delete_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -852,6 +917,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_delete_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _ExportInstance(_BaseCloudRedisRestTransport._BaseExportInstance, AsyncCloudRedisRestStub): @@ -910,6 +991,7 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseExportInstance._get_http_options() + request, metadata = await self._interceptor.pre_export_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseExportInstance._get_transcoded_request(http_options, request) @@ -918,6 +1000,24 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ExportInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._ExportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -936,6 +1036,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_export_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.export_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ExportInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _FailoverInstance(_BaseCloudRedisRestTransport._BaseFailoverInstance, AsyncCloudRedisRestStub): @@ -994,6 +1110,7 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_http_options() + request, metadata = await self._interceptor.pre_failover_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_transcoded_request(http_options, request) @@ -1002,6 +1119,24 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "FailoverInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._FailoverInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1020,6 +1155,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_failover_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.failover_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "FailoverInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, AsyncCloudRedisRestStub): @@ -1074,12 +1225,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + request, metadata = await self._interceptor.pre_get_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1098,6 +1268,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": cloud_redis.Instance.to_json(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _GetInstanceAuthString(_BaseCloudRedisRestTransport._BaseGetInstanceAuthString, AsyncCloudRedisRestStub): @@ -1152,12 +1338,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_http_options() + request, metadata = await self._interceptor.pre_get_instance_auth_string(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstanceAuthString", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._GetInstanceAuthString._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1176,6 +1381,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance_auth_string(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": cloud_redis.InstanceAuthString.to_json(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance_auth_string", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstanceAuthString", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _ImportInstance(_BaseCloudRedisRestTransport._BaseImportInstance, AsyncCloudRedisRestStub): @@ -1234,6 +1455,7 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseImportInstance._get_http_options() + request, metadata = await self._interceptor.pre_import_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseImportInstance._get_transcoded_request(http_options, request) @@ -1242,6 +1464,24 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ImportInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._ImportInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1260,6 +1500,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_import_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.import_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ImportInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, AsyncCloudRedisRestStub): @@ -1316,12 +1572,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + request, metadata = await self._interceptor.pre_list_instances(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1340,6 +1615,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_list_instances(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": cloud_redis.ListInstancesResponse.to_json(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _RescheduleMaintenance(_BaseCloudRedisRestTransport._BaseRescheduleMaintenance, AsyncCloudRedisRestStub): @@ -1398,6 +1689,7 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_http_options() + request, metadata = await self._interceptor.pre_reschedule_maintenance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_transcoded_request(http_options, request) @@ -1406,6 +1698,24 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "RescheduleMaintenance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._RescheduleMaintenance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1424,6 +1734,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_reschedule_maintenance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.reschedule_maintenance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "RescheduleMaintenance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, AsyncCloudRedisRestStub): @@ -1482,6 +1808,7 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + request, metadata = await self._interceptor.pre_update_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) @@ -1490,6 +1817,24 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1508,6 +1853,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_update_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp class _UpgradeInstance(_BaseCloudRedisRestTransport._BaseUpgradeInstance, AsyncCloudRedisRestStub): @@ -1566,6 +1927,7 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_http_options() + request, metadata = await self._interceptor.pre_upgrade_instance(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_transcoded_request(http_options, request) @@ -1574,6 +1936,24 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpgradeInstance", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._UpgradeInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -1592,6 +1972,22 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_upgrade_instance(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": str(dict(response.headers)), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.upgrade_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpgradeInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp @property @@ -1771,12 +2167,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + request, metadata = await self._interceptor.pre_get_location(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1793,6 +2208,21 @@ async def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1850,12 +2280,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + request, metadata = await self._interceptor.pre_list_locations(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1872,6 +2321,21 @@ async def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1926,12 +2390,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + request, metadata = await self._interceptor.pre_cancel_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CancelOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -1998,12 +2481,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + request, metadata = await self._interceptor.pre_delete_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2073,12 +2575,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + request, metadata = await self._interceptor.pre_get_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2095,6 +2616,21 @@ async def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2152,12 +2688,31 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + request, metadata = await self._interceptor.pre_list_operations(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) @@ -2174,6 +2729,21 @@ async def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2232,6 +2802,7 @@ async def __call__(self, """ http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + request, metadata = await self._interceptor.pre_wait_operation(request, metadata) transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) @@ -2240,6 +2811,24 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + http_request = { + "payload": type(request).to_json(request), + "requestMethod": method, + "requestUrl": request_url, + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "metadata": str(dict(metadata)), + "httpRequest": http_request, + }, + ) + # Send the request response = await AsyncCloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) @@ -2256,6 +2845,21 @@ async def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_wait_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + http_response = { + "payload": json_format.MessageToJson(response), + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property From 233dc9c5070742bed5e5e6b56e64cd1a78fecd80 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Mon, 9 Dec 2024 09:50:25 -0800 Subject: [PATCH 1223/1339] chore(logging): standardize logging in _shared_macros (#2277) --- .../%sub/services/%service/_shared_macros.j2 | 10 +- .../services/asset_service/transports/rest.py | 170 ++++++++------- .../iam_credentials/transports/rest.py | 28 +-- .../services/eventarc/transports/rest.py | 203 ++++++++++-------- .../services/cloud_redis/transports/rest.py | 136 +++++++----- .../cloud_redis/transports/rest_asyncio.py | 136 +++++++----- 6 files changed, 388 insertions(+), 295 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index 1623d1b1155d..287f4fac7acf 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -218,17 +218,19 @@ def _get_http_options(): request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for {{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}.{{ method_name }}", + f"Sending request for {{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}.{{ method_name }}", extra = { "serviceName": "{{ service.meta.address.proto }}", "rpcName": "{{ method_name }}", - "metadata": str(dict(metadata)), "httpRequest": http_request, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up #} + "metadata": http_request["headers"], }, ) @@ -478,9 +480,9 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m extra = { "serviceName": "{{ service.meta.address.proto }}", "rpcName": "{{ name }}", + "httpResponse": http_response, {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up #} "metadata": http_response["headers"], - "httpResponse": http_response, }, ) return resp diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 3e07dd39a83c..31f191b501c0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -837,17 +837,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicy", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicy", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicy", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -953,17 +954,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicyLongrunning", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeIamPolicyLongrunning", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeIamPolicyLongrunning", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1062,17 +1064,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeMove", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeMove", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeMove", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1173,17 +1176,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicies", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicies", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicies", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1285,17 +1289,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedAssets", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedAssets", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedAssets", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1397,17 +1402,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedContainers", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.AnalyzeOrgPolicyGovernedContainers", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "AnalyzeOrgPolicyGovernedContainers", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1505,17 +1511,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetAssetsHistory", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetAssetsHistory", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetAssetsHistory", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1617,17 +1624,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetEffectiveIamPolicies", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.BatchGetEffectiveIamPolicies", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "BatchGetEffectiveIamPolicies", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1736,17 +1744,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateFeed", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateFeed", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateFeed", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1849,17 +1858,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateSavedQuery", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.CreateSavedQuery", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "CreateSavedQuery", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1953,17 +1963,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteFeed", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteFeed", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "DeleteFeed", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2033,17 +2044,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteSavedQuery", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.DeleteSavedQuery", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "DeleteSavedQuery", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2123,17 +2135,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.ExportAssets", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ExportAssets", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ExportAssets", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2237,17 +2250,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetFeed", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetFeed", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetFeed", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2347,17 +2361,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetSavedQuery", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetSavedQuery", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetSavedQuery", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2455,17 +2470,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListAssets", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListAssets", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListAssets", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2563,17 +2579,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListFeeds", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListFeeds", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListFeeds", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2671,17 +2688,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListSavedQueries", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.ListSavedQueries", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "ListSavedQueries", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2782,17 +2800,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.QueryAssets", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.QueryAssets", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "QueryAssets", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2890,17 +2909,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllIamPolicies", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllIamPolicies", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllIamPolicies", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2998,17 +3018,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllResources", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.SearchAllResources", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "SearchAllResources", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3117,17 +3138,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateFeed", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateFeed", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateFeed", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3230,17 +3252,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateSavedQuery", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.UpdateSavedQuery", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "UpdateSavedQuery", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3526,17 +3549,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetOperation", + f"Sending request for google.cloud.asset_v1.AssetServiceClient.GetOperation", extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3563,8 +3587,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.asset.v1.AssetService", "rpcName": "GetOperation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 09f7f29ea7eb..0f8fe0f0f21d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -341,17 +341,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateAccessToken", + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateAccessToken", extra = { "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateAccessToken", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -452,17 +453,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateIdToken", + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.GenerateIdToken", extra = { "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "GenerateIdToken", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -563,17 +565,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignBlob", + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignBlob", extra = { "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignBlob", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -674,17 +677,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignJwt", + f"Sending request for google.iam.credentials_v1.IAMCredentialsClient.SignJwt", extra = { "serviceName": "google.iam.credentials.v1.IAMCredentials", "rpcName": "SignJwt", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 8385af0daabe..6f79a1481100 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -939,17 +939,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannel", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannel", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannel", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1052,17 +1053,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannelConnection", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateChannelConnection", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateChannelConnection", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1165,17 +1167,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateTrigger", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CreateTrigger", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CreateTrigger", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1275,17 +1278,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannel", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannel", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannel", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1385,17 +1389,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannelConnection", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteChannelConnection", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteChannelConnection", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1495,17 +1500,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteTrigger", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteTrigger", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteTrigger", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1610,17 +1616,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannel", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannel", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannel", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1726,17 +1733,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannelConnection", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetChannelConnection", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetChannelConnection", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1843,17 +1851,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleChannelConfig", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetGoogleChannelConfig", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetGoogleChannelConfig", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1954,17 +1963,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetProvider", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetProvider", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetProvider", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2065,17 +2075,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetTrigger", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetTrigger", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetTrigger", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2176,17 +2187,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannelConnections", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannelConnections", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannelConnections", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2285,17 +2297,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannels", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListChannels", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListChannels", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2394,17 +2407,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListProviders", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListProviders", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListProviders", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2503,17 +2517,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListTriggers", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListTriggers", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListTriggers", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2618,17 +2633,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateChannel", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateChannel", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateChannel", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2737,17 +2753,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleChannelConfig", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateGoogleChannelConfig", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateGoogleChannelConfig", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2852,17 +2869,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateTrigger", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.UpdateTrigger", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "UpdateTrigger", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3106,17 +3124,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetLocation", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetLocation", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetLocation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3143,8 +3162,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetLocation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -3215,17 +3234,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListLocations", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListLocations", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListLocations", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3252,8 +3272,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListLocations", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -3324,17 +3344,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetIamPolicy", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetIamPolicy", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetIamPolicy", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3361,8 +3382,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetIamPolicy", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -3436,17 +3457,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.SetIamPolicy", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.SetIamPolicy", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "SetIamPolicy", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3473,8 +3495,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "SetIamPolicy", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -3548,17 +3570,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.TestIamPermissions", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.TestIamPermissions", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "TestIamPermissions", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3585,8 +3608,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "TestIamPermissions", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -3657,17 +3680,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.CancelOperation", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.CancelOperation", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "CancelOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3744,17 +3768,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteOperation", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.DeleteOperation", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "DeleteOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3834,17 +3859,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetOperation", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.GetOperation", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3871,8 +3897,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "GetOperation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -3943,17 +3969,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListOperations", + f"Sending request for google.cloud.eventarc_v1.EventarcClient.ListOperations", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListOperations", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -3980,8 +4007,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "rpcName": "ListOperations", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index c3ceb7bbd3ae..2a2124b30afc 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -738,17 +738,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -848,17 +849,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -961,17 +963,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1074,17 +1077,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1181,17 +1185,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1290,17 +1295,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1405,17 +1411,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1514,17 +1521,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1629,17 +1637,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1742,17 +1751,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1855,17 +1865,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2053,17 +2064,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2090,8 +2102,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -2162,17 +2174,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2199,8 +2212,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -2268,17 +2281,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2355,17 +2369,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2445,17 +2460,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2482,8 +2498,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -2554,17 +2570,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2591,8 +2608,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -2666,17 +2683,18 @@ def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2703,8 +2721,8 @@ def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index da0081ea71be..bea441bdfa04 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -769,17 +769,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CreateInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -885,17 +886,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1004,17 +1006,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ExportInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ExportInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1123,17 +1126,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.FailoverInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "FailoverInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1236,17 +1240,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1349,17 +1354,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstanceAuthString", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetInstanceAuthString", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1468,17 +1474,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ImportInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ImportInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1583,17 +1590,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListInstances", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1702,17 +1710,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.RescheduleMaintenance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "RescheduleMaintenance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1821,17 +1830,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpdateInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -1940,17 +1950,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpgradeInstance", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "UpgradeInstance", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2178,17 +2189,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2219,8 +2231,8 @@ async def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetLocation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -2291,17 +2303,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2332,8 +2345,8 @@ async def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListLocations", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -2401,17 +2414,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "CancelOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2492,17 +2506,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "DeleteOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2586,17 +2601,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2627,8 +2643,8 @@ async def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "GetOperation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -2699,17 +2715,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2740,8 +2757,8 @@ async def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "ListOperations", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp @@ -2815,17 +2832,18 @@ async def __call__(self, request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] http_request = { - "payload": type(request).to_json(request), + "payload": type(request).to_json(request), "requestMethod": method, "requestUrl": request_url, + "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", + f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", - "metadata": str(dict(metadata)), "httpRequest": http_request, + "metadata": http_request["headers"], }, ) @@ -2856,8 +2874,8 @@ async def __call__(self, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "rpcName": "WaitOperation", - "metadata": http_response["headers"], "httpResponse": http_response, + "metadata": http_response["headers"], }, ) return resp From 1044768cba34eac53c5ca8acc746fead062ac1de Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 10 Dec 2024 03:37:52 +0500 Subject: [PATCH 1224/1339] chore: skip logging for streaming rest RPCs (#2280) --- .../%sub/services/%service/transports/rest.py.j2 | 3 +++ .../%sub/services/%service/transports/rest_asyncio.py.j2 | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 008e590f089e..0cdc6383413c 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -256,6 +256,8 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endif %}{# method.lro #} {#- TODO(https://github.com/googleapis/gapic-generator-python/issues/2274): Add debug log before intercepting a request #} resp = self._interceptor.post_{{ method.name|snake_case }}(resp) + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} + {% if not method.server_streaming %} if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER http_response = { "payload": {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(resp){% else %}json_format.MessageToJson(resp){% endif %}, @@ -272,6 +274,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): "httpResponse": http_response, }, ) + {% endif %}{# if not method.server_streaming #} return resp {% endif %}{# method.void #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index f07090bca22d..11353109dfe1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -217,6 +217,8 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): json_format.Parse(content, pb_resp, ignore_unknown_fields=True) {% endif %}{# if method.server_streaming #} resp = await self._interceptor.post_{{ method.name|snake_case }}(resp) + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} + {% if not method.server_streaming %} if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER http_response = { "payload": {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(response){% endif %}, @@ -232,7 +234,8 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): "httpResponse": http_response, }, ) - + + {% endif %}{# if not method.server_streaming #} return resp {% endif %}{# method.void #} From 70753b457582376124910fd2c18ee5c50abf63ae Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 10 Dec 2024 23:21:49 +0500 Subject: [PATCH 1225/1339] chore: add test coverage for debug logging (#2281) Co-authored-by: Victor Chudnovsky --- .../.github/workflows/tests.yaml | 9 + .../%name_%version/%sub/_test_mixins.py.j2 | 2 + .../%name_%version/%sub/test_%service.py.j2 | 4 + .../%sub/services/%service/_shared_macros.j2 | 17 +- .../%sub/services/%service/async_client.py.j2 | 15 +- .../%sub/services/%service/client.py.j2 | 11 +- .../services/%service/transports/rest.py.j2 | 16 +- .../%service/transports/rest_asyncio.py.j2 | 16 +- .../gapic/%name_%version/%sub/test_macros.j2 | 7 + .../services/asset_service/async_client.py | 15 +- .../asset_v1/services/asset_service/client.py | 11 +- .../services/asset_service/transports/rest.py | 282 +++++++++++++--- .../unit/gapic/asset_v1/test_asset_service.py | 110 ++++++ .../services/iam_credentials/async_client.py | 15 +- .../services/iam_credentials/client.py | 11 +- .../iam_credentials/transports/rest.py | 54 ++- .../credentials_v1/test_iam_credentials.py | 20 ++ .../services/eventarc/async_client.py | 15 +- .../eventarc_v1/services/eventarc/client.py | 11 +- .../services/eventarc/transports/rest.py | 318 +++++++++++++++--- .../unit/gapic/eventarc_v1/test_eventarc.py | 108 ++++++ .../config_service_v2/async_client.py | 15 +- .../services/config_service_v2/client.py | 11 +- .../logging_service_v2/async_client.py | 15 +- .../services/logging_service_v2/client.py | 11 +- .../metrics_service_v2/async_client.py | 15 +- .../services/metrics_service_v2/client.py | 11 +- .../services/cloud_redis/async_client.py | 15 +- .../redis_v1/services/cloud_redis/client.py | 11 +- .../services/cloud_redis/transports/rest.py | 210 ++++++++++-- .../cloud_redis/transports/rest_asyncio.py | 210 ++++++++++-- .../unit/gapic/redis_v1/test_cloud_redis.py | 116 +++++++ 32 files changed, 1425 insertions(+), 282 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 3328d87b285c..ffb5c1704475 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -58,6 +58,8 @@ jobs: # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `showcase_w_rest_async` target when async rest is GA. python: ["3.7", "3.13"] target: [showcase, showcase_alternative_templates, showcase_w_rest_async] + logging_scope: ["", "google"] + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -90,6 +92,9 @@ jobs: unzip protoc-${PROTOC_VERSION}.zip sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - name: Run showcase tests. + env: + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2286): Construct nox sessions with logging enabled. + GOOGLE_SDK_PYTHON_LOGGING_SCOPE: ${{ matrix.logging_scope }} run: nox -s ${{ matrix.target }}-${{ matrix.python }} showcase-mtls: if: ${{ false }} # TODO(dovs): reenable when #1218 is fixed @@ -143,6 +148,7 @@ jobs: python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `_w_rest_async` variant when async rest is GA. variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins, _w_rest_async] + logging_scope: ["", "google"] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -166,6 +172,9 @@ jobs: - name: Install nox. run: python -m pip install nox - name: Run unit tests. + env: + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2286): Construct nox sessions with logging enabled. + GOOGLE_SDK_PYTHON_LOGGING_SCOPE: ${{ matrix.logging_scope }} run: nox -s showcase_unit${{ matrix.variant }}-${{ matrix.python }} showcase-unit-add-iam-methods: runs-on: ubuntu-latest diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 index 73b4a5c0a658..d7f8bb7e6837 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/_test_mixins.py.j2 @@ -17,6 +17,7 @@ def test_{{ name|snake_case }}_rest_bad_request(transport: str = 'rest', request response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.{{ name|snake_case }}(request) @pytest.mark.parametrize("request_type", [ @@ -50,6 +51,7 @@ def test_{{ name|snake_case }}_rest(request_type): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.{{ name|snake_case }}(request) diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index c84936dc87c7..3c839f4c1177 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1096,6 +1096,7 @@ def test_{{ method_name }}_rest(request_type, transport: str = 'rest'): req.return_value = Response() req.return_value.status_code = 500 req.return_value.request = PreparedRequest() + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.void %} json_return_value = '' {% elif method.server_streaming %} @@ -1249,6 +1250,7 @@ def test_{{ method.name|snake_case }}_rest(request_type): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.client_streaming %} response = client.{{ method.safe_name|snake_case }}(iter(requests)) {% elif method.server_streaming %} @@ -1495,6 +1497,7 @@ def test_{{ method_name }}_rest_interceptors(null_interceptor): } req.return_value = Response() + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} req.return_value.status_code = 200 req.return_value.request = PreparedRequest() {% if not method.void %} @@ -1545,6 +1548,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.client_streaming %} client.{{ method.safe_name|snake_case }}(iter(requests)) {% else %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index 287f4fac7acf..f588391bd56f 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -197,7 +197,7 @@ def _get_http_options(): method_name (str): The method name. service: The service. is_async (bool): Used to determine the code path i.e. whether for sync or async call. #} -{% macro rest_call_method_common(body_spec, method_name, service, is_async=False) %} +{% macro rest_call_method_common(body_spec, method_name, service, is_async=False, is_proto_plus_type=False) %} {% set service_name = service.name %} {% set await_prefix = "await " if is_async else "" %} {% set async_class_prefix = "Async" if is_async else "" %} @@ -217,8 +217,14 @@ def _get_http_options(): if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = {% if is_proto_plus_type %}type(request).to_json(request){% else %}json_format.MessageToJson(request){% endif %} + + except: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2282): Remove try/except and correctly parse request payload. #} + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -470,8 +476,13 @@ class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_m resp = json_format.Parse(content, resp) resp = {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2283): Remove try/except once unit tests are updated. #} + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index aaf8b02acf0a..2513ce10c0ee 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -53,10 +53,10 @@ from .transports.base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import {{ service.grpc_asyncio_transport_name }} from .client import {{ service.client_name }} -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -257,9 +257,12 @@ class {{ service.async_client_name }}: extra = { "serviceName": "{{ service.meta.address.proto }}", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "{{ service.meta.address.proto }}", + "credentialsType": None, + } ) {% for method in service.methods.values() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 8d0955910ce5..952ef03abcac 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -40,10 +40,10 @@ try: except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -619,7 +619,10 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + } if hasattr(self._transport, "_credentials") else { + "serviceName": "{{ service.meta.address.proto }}", + "credentialsType": None, + } ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 0cdc6383413c..3ba65b993cbb 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -41,10 +41,10 @@ try: except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = logging.getLogger(__name__) @@ -235,7 +235,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endif %} """ - {{ shared_macros.rest_call_method_common(body_spec, method.name, service)|indent(8) }} + {{ shared_macros.rest_call_method_common(body_spec, method.name, service, False, method.output.ident.is_proto_plus_type)|indent(8) }} {% if not method.void %} # Return the response @@ -259,8 +259,14 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} {% if not method.server_streaming %} if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(resp){% endif %} + + except: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2283): Remove try/except once unit tests are updated. #} + response_payload = None http_response = { - "payload": {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(resp){% else %}json_format.MessageToJson(resp){% endif %}, + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 11353109dfe1..4a7dcf870e8b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -61,10 +61,10 @@ from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO import logging -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = logging.getLogger(__name__) @@ -199,7 +199,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endif %} """ - {{ shared_macros.rest_call_method_common(body_spec, method.name, service, is_async=True)|indent(8) }} + {{ shared_macros.rest_call_method_common(body_spec, method.name, service, True, method.output.ident.is_proto_plus_type)|indent(8) }} {% if not method.void %} # Return the response @@ -220,8 +220,14 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} {% if not method.server_streaming %} if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(resp){% endif %} + + except: + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2283): Remove try/except once unit tests are updated. #} + response_payload = None http_response = { - "payload": {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(response){% endif %}, + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 38c43b0bad90..bd8547f63de5 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1171,6 +1171,7 @@ def test_{{ method_name }}_rest_required_fields(request_type={{ method.input.ide response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.client_streaming %} response = client.{{ method_name }}(iter(requests)) @@ -1263,6 +1264,7 @@ def test_{{ method_name }}_rest_flattened(): {% endif %} response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.server_streaming %} with mock.patch.object(response_value, 'iter_content') as iter_content: @@ -1796,6 +1798,7 @@ def test_initialize_client_w_{{transport_name}}(): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {{ await_prefix }}client.{{ method_name }}(request) {% endif %}{# if 'grpc' in transport #} @@ -1846,6 +1849,7 @@ def test_initialize_client_w_{{transport_name}}(): response_value.request = Request() {% endif %} req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {{ await_prefix }}client.{{ method_name }}(request) {% endif %}{# if 'grpc' in transport #} {% endmacro %} @@ -2029,6 +2033,7 @@ def test_initialize_client_w_{{transport_name}}(): {% endif %}{# is_async #} {% endif %}{# method.server_streaming #} req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = {{ await_prefix }}client.{{ method_name }}(request) {% if "next_page_token" in method_output.fields.values()|map(attribute='name', default="") and not method.paged_result_field %} {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2199): The following assert statement is added to force @@ -2139,6 +2144,7 @@ def test_initialize_client_w_{{transport_name}}(): {% endif %} req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = {{ await_prefix }}client.{{ method_name }}(request) @@ -2232,6 +2238,7 @@ def test_initialize_client_w_{{transport_name}}(): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if not method.void %} return_value = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json({{ method.output.ident }}()){% else %}json_format.MessageToJson({{ method.output.ident }}()){% endif %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index c911b2dd52ab..334e5067fff0 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -47,10 +47,10 @@ from .transports.grpc_asyncio import AssetServiceGrpcAsyncIOTransport from .client import AssetServiceClient -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -261,9 +261,12 @@ def __init__(self, *, extra = { "serviceName": "google.cloud.asset.v1.AssetService", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.asset.v1.AssetService", + "credentialsType": None, + } ) async def export_assets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 1b536ff11baf..cf38212cf840 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -37,10 +37,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -640,7 +640,10 @@ def __init__(self, *, "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.asset.v1.AssetService", + "credentialsType": None, + } ) def export_assets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 31f191b501c0..053297f1629b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -46,10 +46,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = logging.getLogger(__name__) @@ -836,8 +836,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -868,8 +872,12 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeIamPolicyResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.AnalyzeIamPolicyResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -953,8 +961,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -983,8 +995,12 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1063,8 +1079,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1095,8 +1115,12 @@ def __call__(self, resp = self._interceptor.post_analyze_move(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeMoveResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.AnalyzeMoveResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1175,8 +1199,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1207,8 +1235,12 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policies(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.AnalyzeOrgPoliciesResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1288,8 +1320,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1320,8 +1356,12 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1401,8 +1441,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1433,8 +1477,12 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1510,8 +1558,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1542,8 +1594,12 @@ def __call__(self, resp = self._interceptor.post_batch_get_assets_history(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.BatchGetAssetsHistoryResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.BatchGetAssetsHistoryResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1623,8 +1679,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1655,8 +1715,12 @@ def __call__(self, resp = self._interceptor.post_batch_get_effective_iam_policies(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1743,8 +1807,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1775,8 +1843,12 @@ def __call__(self, resp = self._interceptor.post_create_feed(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.Feed.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.Feed.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1857,8 +1929,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1889,8 +1965,12 @@ def __call__(self, resp = self._interceptor.post_create_saved_query(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SavedQuery.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.SavedQuery.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1962,8 +2042,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2043,8 +2127,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2134,8 +2222,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2164,8 +2256,12 @@ def __call__(self, resp = self._interceptor.post_export_assets(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2249,8 +2345,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2281,8 +2381,12 @@ def __call__(self, resp = self._interceptor.post_get_feed(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.Feed.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.Feed.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2360,8 +2464,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2392,8 +2500,12 @@ def __call__(self, resp = self._interceptor.post_get_saved_query(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SavedQuery.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.SavedQuery.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2469,8 +2581,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2501,8 +2617,12 @@ def __call__(self, resp = self._interceptor.post_list_assets(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.ListAssetsResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.ListAssetsResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2578,8 +2698,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2610,8 +2734,12 @@ def __call__(self, resp = self._interceptor.post_list_feeds(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.ListFeedsResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.ListFeedsResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2687,8 +2815,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2719,8 +2851,12 @@ def __call__(self, resp = self._interceptor.post_list_saved_queries(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.ListSavedQueriesResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.ListSavedQueriesResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2799,8 +2935,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2831,8 +2971,12 @@ def __call__(self, resp = self._interceptor.post_query_assets(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.QueryAssetsResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.QueryAssetsResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2908,8 +3052,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2940,8 +3088,12 @@ def __call__(self, resp = self._interceptor.post_search_all_iam_policies(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SearchAllIamPoliciesResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.SearchAllIamPoliciesResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3017,8 +3169,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3049,8 +3205,12 @@ def __call__(self, resp = self._interceptor.post_search_all_resources(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SearchAllResourcesResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.SearchAllResourcesResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3137,8 +3297,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3169,8 +3333,12 @@ def __call__(self, resp = self._interceptor.post_update_feed(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.Feed.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.Feed.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3251,8 +3419,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3283,8 +3455,12 @@ def __call__(self, resp = self._interceptor.post_update_saved_query(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = asset_service.SavedQuery.to_json(response) + except: + response_payload = None http_response = { - "payload": asset_service.SavedQuery.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3548,8 +3724,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3577,8 +3757,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 08a92bf22270..61e40a62eed5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -8966,6 +8966,7 @@ def test_export_assets_rest_required_fields(request_type=asset_service.ExportAss response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_assets(request) @@ -9079,6 +9080,7 @@ def test_list_assets_rest_required_fields(request_type=asset_service.ListAssetsR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_assets(request) @@ -9123,6 +9125,7 @@ def test_list_assets_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_assets(**mock_args) @@ -9307,6 +9310,7 @@ def test_batch_get_assets_history_rest_required_fields(request_type=asset_servic response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_assets_history(request) @@ -9423,6 +9427,7 @@ def test_create_feed_rest_required_fields(request_type=asset_service.CreateFeedR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_feed(request) @@ -9467,6 +9472,7 @@ def test_create_feed_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_feed(**mock_args) @@ -9587,6 +9593,7 @@ def test_get_feed_rest_required_fields(request_type=asset_service.GetFeedRequest response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_feed(request) @@ -9631,6 +9638,7 @@ def test_get_feed_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_feed(**mock_args) @@ -9751,6 +9759,7 @@ def test_list_feeds_rest_required_fields(request_type=asset_service.ListFeedsReq response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_feeds(request) @@ -9795,6 +9804,7 @@ def test_list_feeds_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_feeds(**mock_args) @@ -9911,6 +9921,7 @@ def test_update_feed_rest_required_fields(request_type=asset_service.UpdateFeedR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_feed(request) @@ -9955,6 +9966,7 @@ def test_update_feed_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_feed(**mock_args) @@ -10072,6 +10084,7 @@ def test_delete_feed_rest_required_fields(request_type=asset_service.DeleteFeedR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_feed(request) @@ -10114,6 +10127,7 @@ def test_delete_feed_rest_flattened(): json_return_value = '' response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_feed(**mock_args) @@ -10236,6 +10250,7 @@ def test_search_all_resources_rest_required_fields(request_type=asset_service.Se response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_resources(request) @@ -10282,6 +10297,7 @@ def test_search_all_resources_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.search_all_resources(**mock_args) @@ -10468,6 +10484,7 @@ def test_search_all_iam_policies_rest_required_fields(request_type=asset_service response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_iam_policies(request) @@ -10513,6 +10530,7 @@ def test_search_all_iam_policies_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.search_all_iam_policies(**mock_args) @@ -10693,6 +10711,7 @@ def test_analyze_iam_policy_rest_required_fields(request_type=asset_service.Anal response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy(request) @@ -10801,6 +10820,7 @@ def test_analyze_iam_policy_longrunning_rest_required_fields(request_type=asset_ response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy_longrunning(request) @@ -10921,6 +10941,7 @@ def test_analyze_move_rest_required_fields(request_type=asset_service.AnalyzeMov response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_move(request) @@ -11037,6 +11058,7 @@ def test_query_assets_rest_required_fields(request_type=asset_service.QueryAsset response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.query_assets(request) @@ -11158,6 +11180,7 @@ def test_create_saved_query_rest_required_fields(request_type=asset_service.Crea response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_saved_query(request) @@ -11208,6 +11231,7 @@ def test_create_saved_query_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_saved_query(**mock_args) @@ -11330,6 +11354,7 @@ def test_get_saved_query_rest_required_fields(request_type=asset_service.GetSave response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_saved_query(request) @@ -11374,6 +11399,7 @@ def test_get_saved_query_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_saved_query(**mock_args) @@ -11496,6 +11522,7 @@ def test_list_saved_queries_rest_required_fields(request_type=asset_service.List response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_saved_queries(request) @@ -11540,6 +11567,7 @@ def test_list_saved_queries_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_saved_queries(**mock_args) @@ -11720,6 +11748,7 @@ def test_update_saved_query_rest_required_fields(request_type=asset_service.Upda response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_saved_query(request) @@ -11765,6 +11794,7 @@ def test_update_saved_query_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_saved_query(**mock_args) @@ -11883,6 +11913,7 @@ def test_delete_saved_query_rest_required_fields(request_type=asset_service.Dele response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_saved_query(request) @@ -11925,6 +11956,7 @@ def test_delete_saved_query_rest_flattened(): json_return_value = '' response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_saved_query(**mock_args) @@ -12054,6 +12086,7 @@ def test_batch_get_effective_iam_policies_rest_required_fields(request_type=asse response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_effective_iam_policies(request) @@ -12178,6 +12211,7 @@ def test_analyze_org_policies_rest_required_fields(request_type=asset_service.An response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policies(request) @@ -12228,6 +12262,7 @@ def test_analyze_org_policies_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policies(**mock_args) @@ -12421,6 +12456,7 @@ def test_analyze_org_policy_governed_containers_rest_required_fields(request_typ response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_containers(request) @@ -12471,6 +12507,7 @@ def test_analyze_org_policy_governed_containers_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policy_governed_containers(**mock_args) @@ -12664,6 +12701,7 @@ def test_analyze_org_policy_governed_assets_rest_required_fields(request_type=as response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_assets(request) @@ -12714,6 +12752,7 @@ def test_analyze_org_policy_governed_assets_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policy_governed_assets(**mock_args) @@ -14109,6 +14148,7 @@ def test_export_assets_rest_bad_request(request_type=asset_service.ExportAssetsR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_assets(request) @@ -14137,6 +14177,7 @@ def test_export_assets_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_assets(request) # Establish that the response is the type that we expect. @@ -14168,6 +14209,7 @@ def test_export_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -14203,6 +14245,7 @@ def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_assets(request) @@ -14236,6 +14279,7 @@ def test_list_assets_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_assets(request) # Establish that the response is the type that we expect. @@ -14267,6 +14311,7 @@ def test_list_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.ListAssetsResponse.to_json(asset_service.ListAssetsResponse()) req.return_value.content = return_value @@ -14302,6 +14347,7 @@ def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.Ba response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.batch_get_assets_history(request) @@ -14334,6 +14380,7 @@ def test_batch_get_assets_history_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_assets_history(request) # Establish that the response is the type that we expect. @@ -14364,6 +14411,7 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.BatchGetAssetsHistoryResponse.to_json(asset_service.BatchGetAssetsHistoryResponse()) req.return_value.content = return_value @@ -14399,6 +14447,7 @@ def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_feed(request) @@ -14436,6 +14485,7 @@ def test_create_feed_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_feed(request) # Establish that the response is the type that we expect. @@ -14471,6 +14521,7 @@ def test_create_feed_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.Feed.to_json(asset_service.Feed()) req.return_value.content = return_value @@ -14506,6 +14557,7 @@ def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_feed(request) @@ -14543,6 +14595,7 @@ def test_get_feed_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_feed(request) # Establish that the response is the type that we expect. @@ -14578,6 +14631,7 @@ def test_get_feed_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.Feed.to_json(asset_service.Feed()) req.return_value.content = return_value @@ -14613,6 +14667,7 @@ def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_feeds(request) @@ -14645,6 +14700,7 @@ def test_list_feeds_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_feeds(request) # Establish that the response is the type that we expect. @@ -14675,6 +14731,7 @@ def test_list_feeds_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.ListFeedsResponse.to_json(asset_service.ListFeedsResponse()) req.return_value.content = return_value @@ -14710,6 +14767,7 @@ def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_feed(request) @@ -14747,6 +14805,7 @@ def test_update_feed_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_feed(request) # Establish that the response is the type that we expect. @@ -14782,6 +14841,7 @@ def test_update_feed_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.Feed.to_json(asset_service.Feed()) req.return_value.content = return_value @@ -14817,6 +14877,7 @@ def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_feed(request) @@ -14845,6 +14906,7 @@ def test_delete_feed_rest_call_success(request_type): json_return_value = '' response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_feed(request) # Establish that the response is the type that we expect. @@ -14873,6 +14935,7 @@ def test_delete_feed_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = asset_service.DeleteFeedRequest() metadata =[ @@ -14904,6 +14967,7 @@ def test_search_all_resources_rest_bad_request(request_type=asset_service.Search response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.search_all_resources(request) @@ -14937,6 +15001,7 @@ def test_search_all_resources_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_resources(request) # Establish that the response is the type that we expect. @@ -14968,6 +15033,7 @@ def test_search_all_resources_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.SearchAllResourcesResponse.to_json(asset_service.SearchAllResourcesResponse()) req.return_value.content = return_value @@ -15003,6 +15069,7 @@ def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.Sea response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.search_all_iam_policies(request) @@ -15036,6 +15103,7 @@ def test_search_all_iam_policies_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.search_all_iam_policies(request) # Establish that the response is the type that we expect. @@ -15067,6 +15135,7 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.SearchAllIamPoliciesResponse.to_json(asset_service.SearchAllIamPoliciesResponse()) req.return_value.content = return_value @@ -15102,6 +15171,7 @@ def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeI response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_iam_policy(request) @@ -15135,6 +15205,7 @@ def test_analyze_iam_policy_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy(request) # Establish that the response is the type that we expect. @@ -15166,6 +15237,7 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.AnalyzeIamPolicyResponse.to_json(asset_service.AnalyzeIamPolicyResponse()) req.return_value.content = return_value @@ -15201,6 +15273,7 @@ def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_serv response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_iam_policy_longrunning(request) @@ -15229,6 +15302,7 @@ def test_analyze_iam_policy_longrunning_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_iam_policy_longrunning(request) # Establish that the response is the type that we expect. @@ -15260,6 +15334,7 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -15295,6 +15370,7 @@ def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveReq response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_move(request) @@ -15327,6 +15403,7 @@ def test_analyze_move_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_move(request) # Establish that the response is the type that we expect. @@ -15357,6 +15434,7 @@ def test_analyze_move_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.AnalyzeMoveResponse.to_json(asset_service.AnalyzeMoveResponse()) req.return_value.content = return_value @@ -15392,6 +15470,7 @@ def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsReq response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.query_assets(request) @@ -15426,6 +15505,7 @@ def test_query_assets_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.query_assets(request) # Establish that the response is the type that we expect. @@ -15458,6 +15538,7 @@ def test_query_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.QueryAssetsResponse.to_json(asset_service.QueryAssetsResponse()) req.return_value.content = return_value @@ -15493,6 +15574,7 @@ def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSa response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_saved_query(request) @@ -15593,6 +15675,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_saved_query(request) # Establish that the response is the type that we expect. @@ -15627,6 +15710,7 @@ def test_create_saved_query_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) req.return_value.content = return_value @@ -15662,6 +15746,7 @@ def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQue response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_saved_query(request) @@ -15698,6 +15783,7 @@ def test_get_saved_query_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_saved_query(request) # Establish that the response is the type that we expect. @@ -15732,6 +15818,7 @@ def test_get_saved_query_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) req.return_value.content = return_value @@ -15767,6 +15854,7 @@ def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSave response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_saved_queries(request) @@ -15800,6 +15888,7 @@ def test_list_saved_queries_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_saved_queries(request) # Establish that the response is the type that we expect. @@ -15831,6 +15920,7 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.ListSavedQueriesResponse.to_json(asset_service.ListSavedQueriesResponse()) req.return_value.content = return_value @@ -15866,6 +15956,7 @@ def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSa response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_saved_query(request) @@ -15966,6 +16057,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_saved_query(request) # Establish that the response is the type that we expect. @@ -16000,6 +16092,7 @@ def test_update_saved_query_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.SavedQuery.to_json(asset_service.SavedQuery()) req.return_value.content = return_value @@ -16035,6 +16128,7 @@ def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSa response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_saved_query(request) @@ -16063,6 +16157,7 @@ def test_delete_saved_query_rest_call_success(request_type): json_return_value = '' response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_saved_query(request) # Establish that the response is the type that we expect. @@ -16091,6 +16186,7 @@ def test_delete_saved_query_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = asset_service.DeleteSavedQueryRequest() metadata =[ @@ -16122,6 +16218,7 @@ def test_batch_get_effective_iam_policies_rest_bad_request(request_type=asset_se response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.batch_get_effective_iam_policies(request) @@ -16154,6 +16251,7 @@ def test_batch_get_effective_iam_policies_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.batch_get_effective_iam_policies(request) # Establish that the response is the type that we expect. @@ -16184,6 +16282,7 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(asset_service.BatchGetEffectiveIamPoliciesResponse()) req.return_value.content = return_value @@ -16219,6 +16318,7 @@ def test_analyze_org_policies_rest_bad_request(request_type=asset_service.Analyz response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policies(request) @@ -16252,6 +16352,7 @@ def test_analyze_org_policies_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policies(request) # Establish that the response is the type that we expect. @@ -16283,6 +16384,7 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.AnalyzeOrgPoliciesResponse.to_json(asset_service.AnalyzeOrgPoliciesResponse()) req.return_value.content = return_value @@ -16318,6 +16420,7 @@ def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=as response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policy_governed_containers(request) @@ -16351,6 +16454,7 @@ def test_analyze_org_policy_governed_containers_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_containers(request) # Establish that the response is the type that we expect. @@ -16382,6 +16486,7 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedContainersResponse()) req.return_value.content = return_value @@ -16417,6 +16522,7 @@ def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.analyze_org_policy_governed_assets(request) @@ -16450,6 +16556,7 @@ def test_analyze_org_policy_governed_assets_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.analyze_org_policy_governed_assets(request) # Establish that the response is the type that we expect. @@ -16481,6 +16588,7 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(asset_service.AnalyzeOrgPolicyGovernedAssetsResponse()) req.return_value.content = return_value @@ -16515,6 +16623,7 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -16542,6 +16651,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 4dc6b9db322b..3ade51402e91 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -40,10 +40,10 @@ from .transports.grpc_asyncio import IAMCredentialsGrpcAsyncIOTransport from .client import IAMCredentialsClient -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -252,9 +252,12 @@ def __init__(self, *, extra = { "serviceName": "google.iam.credentials.v1.IAMCredentials", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "credentialsType": None, + } ) async def generate_access_token(self, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index fe0fb0d93844..46f628864123 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -37,10 +37,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -577,7 +577,10 @@ def __init__(self, *, "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "credentialsType": None, + } ) def generate_access_token(self, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 0f8fe0f0f21d..9cf97b8751e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -43,10 +43,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = logging.getLogger(__name__) @@ -340,8 +340,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -372,8 +376,12 @@ def __call__(self, resp = self._interceptor.post_generate_access_token(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = common.GenerateAccessTokenResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": common.GenerateAccessTokenResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -452,8 +460,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -484,8 +496,12 @@ def __call__(self, resp = self._interceptor.post_generate_id_token(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = common.GenerateIdTokenResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": common.GenerateIdTokenResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -564,8 +580,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -596,8 +616,12 @@ def __call__(self, resp = self._interceptor.post_sign_blob(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = common.SignBlobResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": common.SignBlobResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -676,8 +700,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -708,8 +736,12 @@ def __call__(self, resp = self._interceptor.post_sign_jwt(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = common.SignJwtResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": common.SignJwtResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index c26abcd41291..715989b7cd15 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -2161,6 +2161,7 @@ def test_generate_access_token_rest_required_fields(request_type=common.Generate response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_access_token(request) @@ -2208,6 +2209,7 @@ def test_generate_access_token_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.generate_access_token(**mock_args) @@ -2336,6 +2338,7 @@ def test_generate_id_token_rest_required_fields(request_type=common.GenerateIdTo response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_id_token(request) @@ -2383,6 +2386,7 @@ def test_generate_id_token_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.generate_id_token(**mock_args) @@ -2511,6 +2515,7 @@ def test_sign_blob_rest_required_fields(request_type=common.SignBlobRequest): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_blob(request) @@ -2557,6 +2562,7 @@ def test_sign_blob_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.sign_blob(**mock_args) @@ -2684,6 +2690,7 @@ def test_sign_jwt_rest_required_fields(request_type=common.SignJwtRequest): response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_jwt(request) @@ -2730,6 +2737,7 @@ def test_sign_jwt_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.sign_jwt(**mock_args) @@ -3101,6 +3109,7 @@ def test_generate_access_token_rest_bad_request(request_type=common.GenerateAcce response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.generate_access_token(request) @@ -3134,6 +3143,7 @@ def test_generate_access_token_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_access_token(request) # Establish that the response is the type that we expect. @@ -3165,6 +3175,7 @@ def test_generate_access_token_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = common.GenerateAccessTokenResponse.to_json(common.GenerateAccessTokenResponse()) req.return_value.content = return_value @@ -3200,6 +3211,7 @@ def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.generate_id_token(request) @@ -3233,6 +3245,7 @@ def test_generate_id_token_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.generate_id_token(request) # Establish that the response is the type that we expect. @@ -3264,6 +3277,7 @@ def test_generate_id_token_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = common.GenerateIdTokenResponse.to_json(common.GenerateIdTokenResponse()) req.return_value.content = return_value @@ -3299,6 +3313,7 @@ def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.sign_blob(request) @@ -3333,6 +3348,7 @@ def test_sign_blob_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_blob(request) # Establish that the response is the type that we expect. @@ -3365,6 +3381,7 @@ def test_sign_blob_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = common.SignBlobResponse.to_json(common.SignBlobResponse()) req.return_value.content = return_value @@ -3400,6 +3417,7 @@ def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.sign_jwt(request) @@ -3434,6 +3452,7 @@ def test_sign_jwt_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.sign_jwt(request) # Establish that the response is the type that we expect. @@ -3466,6 +3485,7 @@ def test_sign_jwt_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = common.SignJwtResponse.to_json(common.SignJwtResponse()) req.return_value.content = return_value diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index d3c86f00e4f2..501680371f8f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -56,10 +56,10 @@ from .transports.grpc_asyncio import EventarcGrpcAsyncIOTransport from .client import EventarcClient -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -279,9 +279,12 @@ def __init__(self, *, extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "credentialsType": None, + } ) async def get_trigger(self, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 8aac71a26aee..37e3a7fe1d3f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -37,10 +37,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -685,7 +685,10 @@ def __init__(self, *, "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "credentialsType": None, + } ) def get_trigger(self, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 6f79a1481100..583ee430103e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -54,10 +54,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = logging.getLogger(__name__) @@ -938,8 +938,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -968,8 +972,12 @@ def __call__(self, resp = self._interceptor.post_create_channel(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1052,8 +1060,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1082,8 +1094,12 @@ def __call__(self, resp = self._interceptor.post_create_channel_connection(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1166,8 +1182,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1196,8 +1216,12 @@ def __call__(self, resp = self._interceptor.post_create_trigger(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1277,8 +1301,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1307,8 +1335,12 @@ def __call__(self, resp = self._interceptor.post_delete_channel(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1388,8 +1420,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1418,8 +1454,12 @@ def __call__(self, resp = self._interceptor.post_delete_channel_connection(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1499,8 +1539,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1529,8 +1573,12 @@ def __call__(self, resp = self._interceptor.post_delete_trigger(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1615,8 +1663,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1647,8 +1699,12 @@ def __call__(self, resp = self._interceptor.post_get_channel(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = channel.Channel.to_json(response) + except: + response_payload = None http_response = { - "payload": channel.Channel.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1732,8 +1788,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1764,8 +1824,12 @@ def __call__(self, resp = self._interceptor.post_get_channel_connection(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = channel_connection.ChannelConnection.to_json(response) + except: + response_payload = None http_response = { - "payload": channel_connection.ChannelConnection.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1850,8 +1914,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1882,8 +1950,12 @@ def __call__(self, resp = self._interceptor.post_get_google_channel_config(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = google_channel_config.GoogleChannelConfig.to_json(response) + except: + response_payload = None http_response = { - "payload": google_channel_config.GoogleChannelConfig.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1962,8 +2034,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1994,8 +2070,12 @@ def __call__(self, resp = self._interceptor.post_get_provider(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = discovery.Provider.to_json(response) + except: + response_payload = None http_response = { - "payload": discovery.Provider.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2074,8 +2154,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2106,8 +2190,12 @@ def __call__(self, resp = self._interceptor.post_get_trigger(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = trigger.Trigger.to_json(response) + except: + response_payload = None http_response = { - "payload": trigger.Trigger.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2186,8 +2274,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2218,8 +2310,12 @@ def __call__(self, resp = self._interceptor.post_list_channel_connections(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = eventarc.ListChannelConnectionsResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": eventarc.ListChannelConnectionsResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2296,8 +2392,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2328,8 +2428,12 @@ def __call__(self, resp = self._interceptor.post_list_channels(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = eventarc.ListChannelsResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": eventarc.ListChannelsResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2406,8 +2510,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2438,8 +2546,12 @@ def __call__(self, resp = self._interceptor.post_list_providers(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = eventarc.ListProvidersResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": eventarc.ListProvidersResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2516,8 +2628,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2548,8 +2664,12 @@ def __call__(self, resp = self._interceptor.post_list_triggers(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = eventarc.ListTriggersResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": eventarc.ListTriggersResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2632,8 +2752,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2662,8 +2786,12 @@ def __call__(self, resp = self._interceptor.post_update_channel(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2752,8 +2880,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2784,8 +2916,12 @@ def __call__(self, resp = self._interceptor.post_update_google_channel_config(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = gce_google_channel_config.GoogleChannelConfig.to_json(response) + except: + response_payload = None http_response = { - "payload": gce_google_channel_config.GoogleChannelConfig.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2868,8 +3004,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2898,8 +3038,12 @@ def __call__(self, resp = self._interceptor.post_update_trigger(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3123,8 +3267,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3152,8 +3300,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3233,8 +3385,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3262,8 +3418,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3343,8 +3503,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3372,8 +3536,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3456,8 +3624,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3485,8 +3657,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3569,8 +3745,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3598,8 +3778,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3679,8 +3863,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3767,8 +3955,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3858,8 +4050,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3887,8 +4083,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -3968,8 +4168,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -3997,8 +4201,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index ee6b68ab79b3..42bc23df835b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -7449,6 +7449,7 @@ def test_get_trigger_rest_required_fields(request_type=eventarc.GetTriggerReques response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_trigger(request) @@ -7493,6 +7494,7 @@ def test_get_trigger_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_trigger(**mock_args) @@ -7615,6 +7617,7 @@ def test_list_triggers_rest_required_fields(request_type=eventarc.ListTriggersRe response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_triggers(request) @@ -7659,6 +7662,7 @@ def test_list_triggers_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_triggers(**mock_args) @@ -7859,6 +7863,7 @@ def test_create_trigger_rest_required_fields(request_type=eventarc.CreateTrigger response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_trigger(request) @@ -7911,6 +7916,7 @@ def test_create_trigger_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_trigger(**mock_args) @@ -8040,6 +8046,7 @@ def test_update_trigger_rest_required_fields(request_type=eventarc.UpdateTrigger response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_trigger(request) @@ -8088,6 +8095,7 @@ def test_update_trigger_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_trigger(**mock_args) @@ -8220,6 +8228,7 @@ def test_delete_trigger_rest_required_fields(request_type=eventarc.DeleteTrigger response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_trigger(request) @@ -8267,6 +8276,7 @@ def test_delete_trigger_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_trigger(**mock_args) @@ -8388,6 +8398,7 @@ def test_get_channel_rest_required_fields(request_type=eventarc.GetChannelReques response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel(request) @@ -8432,6 +8443,7 @@ def test_get_channel_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_channel(**mock_args) @@ -8554,6 +8566,7 @@ def test_list_channels_rest_required_fields(request_type=eventarc.ListChannelsRe response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channels(request) @@ -8598,6 +8611,7 @@ def test_list_channels_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_channels(**mock_args) @@ -8798,6 +8812,7 @@ def test_create_channel_rest_required_fields(request_type=eventarc.CreateChannel response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_channel(request) @@ -8850,6 +8865,7 @@ def test_create_channel_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_channel(**mock_args) @@ -8979,6 +8995,7 @@ def test_update_channel_rest_required_fields(request_type=eventarc.UpdateChannel response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_channel(request) @@ -9026,6 +9043,7 @@ def test_update_channel_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_channel(**mock_args) @@ -9157,6 +9175,7 @@ def test_delete_channel_rest_required_fields(request_type=eventarc.DeleteChannel response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel(request) @@ -9203,6 +9222,7 @@ def test_delete_channel_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_channel(**mock_args) @@ -9323,6 +9343,7 @@ def test_get_provider_rest_required_fields(request_type=eventarc.GetProviderRequ response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_provider(request) @@ -9367,6 +9388,7 @@ def test_get_provider_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_provider(**mock_args) @@ -9489,6 +9511,7 @@ def test_list_providers_rest_required_fields(request_type=eventarc.ListProviders response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_providers(request) @@ -9533,6 +9556,7 @@ def test_list_providers_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_providers(**mock_args) @@ -9715,6 +9739,7 @@ def test_get_channel_connection_rest_required_fields(request_type=eventarc.GetCh response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel_connection(request) @@ -9759,6 +9784,7 @@ def test_get_channel_connection_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_channel_connection(**mock_args) @@ -9881,6 +9907,7 @@ def test_list_channel_connections_rest_required_fields(request_type=eventarc.Lis response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channel_connections(request) @@ -9925,6 +9952,7 @@ def test_list_channel_connections_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_channel_connections(**mock_args) @@ -10118,6 +10146,7 @@ def test_create_channel_connection_rest_required_fields(request_type=eventarc.Cr response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_channel_connection(request) @@ -10166,6 +10195,7 @@ def test_create_channel_connection_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_channel_connection(**mock_args) @@ -10289,6 +10319,7 @@ def test_delete_channel_connection_rest_required_fields(request_type=eventarc.De response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel_connection(request) @@ -10331,6 +10362,7 @@ def test_delete_channel_connection_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_channel_connection(**mock_args) @@ -10451,6 +10483,7 @@ def test_get_google_channel_config_rest_required_fields(request_type=eventarc.Ge response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_google_channel_config(request) @@ -10495,6 +10528,7 @@ def test_get_google_channel_config_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_google_channel_config(**mock_args) @@ -10613,6 +10647,7 @@ def test_update_google_channel_config_rest_required_fields(request_type=eventarc response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_google_channel_config(request) @@ -10658,6 +10693,7 @@ def test_update_google_channel_config_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_google_channel_config(**mock_args) @@ -11745,6 +11781,7 @@ def test_get_trigger_rest_bad_request(request_type=eventarc.GetTriggerRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_trigger(request) @@ -11782,6 +11819,7 @@ def test_get_trigger_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_trigger(request) # Establish that the response is the type that we expect. @@ -11817,6 +11855,7 @@ def test_get_trigger_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = trigger.Trigger.to_json(trigger.Trigger()) req.return_value.content = return_value @@ -11852,6 +11891,7 @@ def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersReques response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_triggers(request) @@ -11886,6 +11926,7 @@ def test_list_triggers_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_triggers(request) # Establish that the response is the type that we expect. @@ -11918,6 +11959,7 @@ def test_list_triggers_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = eventarc.ListTriggersResponse.to_json(eventarc.ListTriggersResponse()) req.return_value.content = return_value @@ -11953,6 +11995,7 @@ def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_trigger(request) @@ -12045,6 +12088,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_trigger(request) # Establish that the response is the type that we expect. @@ -12076,6 +12120,7 @@ def test_create_trigger_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -12111,6 +12156,7 @@ def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_trigger(request) @@ -12203,6 +12249,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_trigger(request) # Establish that the response is the type that we expect. @@ -12234,6 +12281,7 @@ def test_update_trigger_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -12269,6 +12317,7 @@ def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_trigger(request) @@ -12297,6 +12346,7 @@ def test_delete_trigger_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_trigger(request) # Establish that the response is the type that we expect. @@ -12328,6 +12378,7 @@ def test_delete_trigger_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -12363,6 +12414,7 @@ def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_channel(request) @@ -12402,6 +12454,7 @@ def test_get_channel_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel(request) # Establish that the response is the type that we expect. @@ -12438,6 +12491,7 @@ def test_get_channel_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = channel.Channel.to_json(channel.Channel()) req.return_value.content = return_value @@ -12473,6 +12527,7 @@ def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsReques response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_channels(request) @@ -12507,6 +12562,7 @@ def test_list_channels_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channels(request) # Establish that the response is the type that we expect. @@ -12539,6 +12595,7 @@ def test_list_channels_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = eventarc.ListChannelsResponse.to_json(eventarc.ListChannelsResponse()) req.return_value.content = return_value @@ -12574,6 +12631,7 @@ def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_channel(request) @@ -12666,6 +12724,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_channel(request) # Establish that the response is the type that we expect. @@ -12697,6 +12756,7 @@ def test_create_channel_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -12732,6 +12792,7 @@ def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_channel(request) @@ -12824,6 +12885,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_channel(request) # Establish that the response is the type that we expect. @@ -12855,6 +12917,7 @@ def test_update_channel_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -12890,6 +12953,7 @@ def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_channel(request) @@ -12918,6 +12982,7 @@ def test_delete_channel_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel(request) # Establish that the response is the type that we expect. @@ -12949,6 +13014,7 @@ def test_delete_channel_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -12984,6 +13050,7 @@ def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest) response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_provider(request) @@ -13018,6 +13085,7 @@ def test_get_provider_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_provider(request) # Establish that the response is the type that we expect. @@ -13050,6 +13118,7 @@ def test_get_provider_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = discovery.Provider.to_json(discovery.Provider()) req.return_value.content = return_value @@ -13085,6 +13154,7 @@ def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_providers(request) @@ -13119,6 +13189,7 @@ def test_list_providers_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_providers(request) # Establish that the response is the type that we expect. @@ -13151,6 +13222,7 @@ def test_list_providers_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = eventarc.ListProvidersResponse.to_json(eventarc.ListProvidersResponse()) req.return_value.content = return_value @@ -13186,6 +13258,7 @@ def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChanne response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_channel_connection(request) @@ -13222,6 +13295,7 @@ def test_get_channel_connection_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_channel_connection(request) # Establish that the response is the type that we expect. @@ -13256,6 +13330,7 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = channel_connection.ChannelConnection.to_json(channel_connection.ChannelConnection()) req.return_value.content = return_value @@ -13291,6 +13366,7 @@ def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListCha response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_channel_connections(request) @@ -13325,6 +13401,7 @@ def test_list_channel_connections_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_channel_connections(request) # Establish that the response is the type that we expect. @@ -13357,6 +13434,7 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = eventarc.ListChannelConnectionsResponse.to_json(eventarc.ListChannelConnectionsResponse()) req.return_value.content = return_value @@ -13392,6 +13470,7 @@ def test_create_channel_connection_rest_bad_request(request_type=eventarc.Create response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_channel_connection(request) @@ -13484,6 +13563,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_channel_connection(request) # Establish that the response is the type that we expect. @@ -13515,6 +13595,7 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -13550,6 +13631,7 @@ def test_delete_channel_connection_rest_bad_request(request_type=eventarc.Delete response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_channel_connection(request) @@ -13578,6 +13660,7 @@ def test_delete_channel_connection_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_channel_connection(request) # Establish that the response is the type that we expect. @@ -13609,6 +13692,7 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -13644,6 +13728,7 @@ def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoo response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_google_channel_config(request) @@ -13678,6 +13763,7 @@ def test_get_google_channel_config_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_google_channel_config(request) # Establish that the response is the type that we expect. @@ -13710,6 +13796,7 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = google_channel_config.GoogleChannelConfig.to_json(google_channel_config.GoogleChannelConfig()) req.return_value.content = return_value @@ -13745,6 +13832,7 @@ def test_update_google_channel_config_rest_bad_request(request_type=eventarc.Upd response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_google_channel_config(request) @@ -13843,6 +13931,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_google_channel_config(request) # Establish that the response is the type that we expect. @@ -13875,6 +13964,7 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gce_google_channel_config.GoogleChannelConfig.to_json(gce_google_channel_config.GoogleChannelConfig()) req.return_value.content = return_value @@ -13909,6 +13999,7 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_location(request) @@ -13936,6 +14027,7 @@ def test_get_location_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_location(request) @@ -13960,6 +14052,7 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_locations(request) @@ -13987,6 +14080,7 @@ def test_list_locations_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_locations(request) @@ -14011,6 +14105,7 @@ def test_get_iam_policy_rest_bad_request(request_type=iam_policy_pb2.GetIamPolic response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_iam_policy(request) @@ -14038,6 +14133,7 @@ def test_get_iam_policy_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_iam_policy(request) @@ -14062,6 +14158,7 @@ def test_set_iam_policy_rest_bad_request(request_type=iam_policy_pb2.SetIamPolic response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.set_iam_policy(request) @@ -14089,6 +14186,7 @@ def test_set_iam_policy_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.set_iam_policy(request) @@ -14113,6 +14211,7 @@ def test_test_iam_permissions_rest_bad_request(request_type=iam_policy_pb2.TestI response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.test_iam_permissions(request) @@ -14140,6 +14239,7 @@ def test_test_iam_permissions_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.test_iam_permissions(request) @@ -14164,6 +14264,7 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_operation(request) @@ -14191,6 +14292,7 @@ def test_cancel_operation_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_operation(request) @@ -14215,6 +14317,7 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_operation(request) @@ -14242,6 +14345,7 @@ def test_delete_operation_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_operation(request) @@ -14266,6 +14370,7 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -14293,6 +14398,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) @@ -14317,6 +14423,7 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_operations(request) @@ -14344,6 +14451,7 @@ def test_list_operations_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_operations(request) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index e0f9de656338..615219a62117 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -45,10 +45,10 @@ from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport from .client import ConfigServiceV2Client -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -256,9 +256,12 @@ def __init__(self, *, extra = { "serviceName": "google.logging.v2.ConfigServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + } ) async def list_buckets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index f7db3e5720a5..28683c5430ce 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -37,10 +37,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -633,7 +633,10 @@ def __init__(self, *, "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + } ) def list_buckets(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index a74f91a91209..eb900db2378d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -42,10 +42,10 @@ from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport from .client import LoggingServiceV2Client -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -241,9 +241,12 @@ def __init__(self, *, extra = { "serviceName": "google.logging.v2.LoggingServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + } ) async def delete_log(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index e36553ef4f93..312dced17cb1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -37,10 +37,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -564,7 +564,10 @@ def __init__(self, *, "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + } ) def delete_log(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 82c28b28969c..8f1ec70d9c3b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -43,10 +43,10 @@ from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport from .client import MetricsServiceV2Client -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -242,9 +242,12 @@ def __init__(self, *, extra = { "serviceName": "google.logging.v2.MetricsServiceV2", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + } ) async def list_log_metrics(self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 1c4d07e10ee1..1c3112bf6485 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -37,10 +37,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -565,7 +565,10 @@ def __init__(self, *, "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + } ) def list_log_metrics(self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index cfc8a14b939f..2ba2cb14de65 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -46,10 +46,10 @@ from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport from .client import CloudRedisClient -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -269,9 +269,12 @@ def __init__(self, *, extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), - "credentialType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", - "credentialInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "credentialsType": None, + } ) async def list_instances(self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 42b747703264..efdfcd898c81 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -37,10 +37,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = std_logging.getLogger(__name__) @@ -626,7 +626,10 @@ def __init__(self, *, "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), - }, + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "credentialsType": None, + } ) def list_instances(self, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 2a2124b30afc..0d6c45081de9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -46,10 +46,10 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = logging.getLogger(__name__) @@ -737,8 +737,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -767,8 +771,12 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -848,8 +856,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -878,8 +890,12 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -962,8 +978,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -992,8 +1012,12 @@ def __call__(self, resp = self._interceptor.post_export_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1076,8 +1100,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1106,8 +1134,12 @@ def __call__(self, resp = self._interceptor.post_failover_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1184,8 +1216,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1216,8 +1252,12 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.Instance.to_json(response) + except: + response_payload = None http_response = { - "payload": cloud_redis.Instance.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1294,8 +1334,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1326,8 +1370,12 @@ def __call__(self, resp = self._interceptor.post_get_instance_auth_string(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.InstanceAuthString.to_json(response) + except: + response_payload = None http_response = { - "payload": cloud_redis.InstanceAuthString.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1410,8 +1458,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1440,8 +1492,12 @@ def __call__(self, resp = self._interceptor.post_import_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1520,8 +1576,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1552,8 +1612,12 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.ListInstancesResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": cloud_redis.ListInstancesResponse.to_json(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1636,8 +1700,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1666,8 +1734,12 @@ def __call__(self, resp = self._interceptor.post_reschedule_maintenance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1750,8 +1822,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1780,8 +1856,12 @@ def __call__(self, resp = self._interceptor.post_update_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -1864,8 +1944,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1894,8 +1978,12 @@ def __call__(self, resp = self._interceptor.post_upgrade_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(resp), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2063,8 +2151,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2092,8 +2184,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2173,8 +2269,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2202,8 +2302,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2280,8 +2384,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2368,8 +2476,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2459,8 +2571,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2488,8 +2604,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2569,8 +2689,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2598,8 +2722,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2682,8 +2810,12 @@ def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2711,8 +2843,12 @@ def __call__(self, resp = json_format.Parse(content, resp) resp = self._interceptor.post_wait_operation(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index bea441bdfa04..f98123c4d3c4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -54,10 +54,10 @@ import logging -try: # pragma: NO COVER +try: from google.api_core import client_logging # type: ignore - CLIENT_LOGGING_SUPPORTED = True -except ImportError: + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER CLIENT_LOGGING_SUPPORTED = False _LOGGER = logging.getLogger(__name__) @@ -768,8 +768,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -803,8 +807,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_create_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -885,8 +893,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -920,8 +932,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_delete_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1005,8 +1021,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1040,8 +1060,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_export_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1125,8 +1149,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1160,8 +1188,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_failover_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1239,8 +1271,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1274,8 +1310,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.Instance.to_json(response) + except: + response_payload = None http_response = { - "payload": cloud_redis.Instance.to_json(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1353,8 +1393,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1388,8 +1432,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance_auth_string(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.InstanceAuthString.to_json(response) + except: + response_payload = None http_response = { - "payload": cloud_redis.InstanceAuthString.to_json(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1473,8 +1521,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1508,8 +1560,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_import_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1589,8 +1645,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1624,8 +1684,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_list_instances(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.ListInstancesResponse.to_json(response) + except: + response_payload = None http_response = { - "payload": cloud_redis.ListInstancesResponse.to_json(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1709,8 +1773,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1744,8 +1812,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_reschedule_maintenance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1829,8 +1901,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1864,8 +1940,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_update_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -1949,8 +2029,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -1984,8 +2068,12 @@ async def __call__(self, json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_upgrade_instance(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": str(dict(response.headers)), "status": "OK", # need to obtain this properly } @@ -2188,8 +2276,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2221,8 +2313,12 @@ async def __call__(self, resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_location(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2302,8 +2398,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2335,8 +2435,12 @@ async def __call__(self, resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_locations(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2413,8 +2517,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2505,8 +2613,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2600,8 +2712,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2633,8 +2749,12 @@ async def __call__(self, resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_operation(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2714,8 +2834,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2747,8 +2871,12 @@ async def __call__(self, resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_operations(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } @@ -2831,8 +2959,12 @@ async def __call__(self, if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None http_request = { - "payload": type(request).to_json(request), + "payload": request_payload, "requestMethod": method, "requestUrl": request_url, "headers": dict(metadata), @@ -2864,8 +2996,12 @@ async def __call__(self, resp = json_format.Parse(content, resp) resp = await self._interceptor.post_wait_operation(resp) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None http_response = { - "payload": json_format.MessageToJson(response), + "payload": response_payload, "headers": dict(response.headers), "status": response.status_code, } diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 3b867230b761..d46839b7a855 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -4689,6 +4689,7 @@ def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstan response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_instances(request) @@ -4733,6 +4734,7 @@ def test_list_instances_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_instances(**mock_args) @@ -4915,6 +4917,7 @@ def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceR response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance(request) @@ -4959,6 +4962,7 @@ def test_get_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_instance(**mock_args) @@ -5079,6 +5083,7 @@ def test_get_instance_auth_string_rest_required_fields(request_type=cloud_redis. response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance_auth_string(request) @@ -5123,6 +5128,7 @@ def test_get_instance_auth_string_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_instance_auth_string(**mock_args) @@ -5254,6 +5260,7 @@ def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_instance(request) @@ -5302,6 +5309,7 @@ def test_create_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_instance(**mock_args) @@ -5423,6 +5431,7 @@ def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_instance(request) @@ -5466,6 +5475,7 @@ def test_update_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_instance(**mock_args) @@ -5593,6 +5603,7 @@ def test_upgrade_instance_rest_required_fields(request_type=cloud_redis.UpgradeI response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.upgrade_instance(request) @@ -5636,6 +5647,7 @@ def test_upgrade_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.upgrade_instance(**mock_args) @@ -5759,6 +5771,7 @@ def test_import_instance_rest_required_fields(request_type=cloud_redis.ImportIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_instance(request) @@ -5802,6 +5815,7 @@ def test_import_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.import_instance(**mock_args) @@ -5925,6 +5939,7 @@ def test_export_instance_rest_required_fields(request_type=cloud_redis.ExportIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_instance(request) @@ -5968,6 +5983,7 @@ def test_export_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_instance(**mock_args) @@ -6091,6 +6107,7 @@ def test_failover_instance_rest_required_fields(request_type=cloud_redis.Failove response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.failover_instance(request) @@ -6134,6 +6151,7 @@ def test_failover_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.failover_instance(**mock_args) @@ -6256,6 +6274,7 @@ def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteIns response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_instance(request) @@ -6298,6 +6317,7 @@ def test_delete_instance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_instance(**mock_args) @@ -6420,6 +6440,7 @@ def test_reschedule_maintenance_rest_required_fields(request_type=cloud_redis.Re response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reschedule_maintenance(request) @@ -6464,6 +6485,7 @@ def test_reschedule_maintenance_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.reschedule_maintenance(**mock_args) @@ -7210,6 +7232,7 @@ def test_list_instances_rest_bad_request(request_type=cloud_redis.ListInstancesR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_instances(request) @@ -7244,6 +7267,7 @@ def test_list_instances_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_instances(request) # Establish that the response is the type that we expect. @@ -7276,6 +7300,7 @@ def test_list_instances_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) req.return_value.content = return_value @@ -7311,6 +7336,7 @@ def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_instance(request) @@ -7370,6 +7396,7 @@ def test_get_instance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance(request) # Establish that the response is the type that we expect. @@ -7427,6 +7454,7 @@ def test_get_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = cloud_redis.Instance.to_json(cloud_redis.Instance()) req.return_value.content = return_value @@ -7462,6 +7490,7 @@ def test_get_instance_auth_string_rest_bad_request(request_type=cloud_redis.GetI response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_instance_auth_string(request) @@ -7495,6 +7524,7 @@ def test_get_instance_auth_string_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_instance_auth_string(request) # Establish that the response is the type that we expect. @@ -7526,6 +7556,7 @@ def test_get_instance_auth_string_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) req.return_value.content = return_value @@ -7561,6 +7592,7 @@ def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanc response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_instance(request) @@ -7653,6 +7685,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_instance(request) # Establish that the response is the type that we expect. @@ -7684,6 +7717,7 @@ def test_create_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -7719,6 +7753,7 @@ def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanc response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_instance(request) @@ -7811,6 +7846,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_instance(request) # Establish that the response is the type that we expect. @@ -7842,6 +7878,7 @@ def test_update_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -7877,6 +7914,7 @@ def test_upgrade_instance_rest_bad_request(request_type=cloud_redis.UpgradeInsta response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.upgrade_instance(request) @@ -7905,6 +7943,7 @@ def test_upgrade_instance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.upgrade_instance(request) # Establish that the response is the type that we expect. @@ -7936,6 +7975,7 @@ def test_upgrade_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -7971,6 +8011,7 @@ def test_import_instance_rest_bad_request(request_type=cloud_redis.ImportInstanc response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.import_instance(request) @@ -7999,6 +8040,7 @@ def test_import_instance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.import_instance(request) # Establish that the response is the type that we expect. @@ -8030,6 +8072,7 @@ def test_import_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -8065,6 +8108,7 @@ def test_export_instance_rest_bad_request(request_type=cloud_redis.ExportInstanc response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.export_instance(request) @@ -8093,6 +8137,7 @@ def test_export_instance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.export_instance(request) # Establish that the response is the type that we expect. @@ -8124,6 +8169,7 @@ def test_export_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -8159,6 +8205,7 @@ def test_failover_instance_rest_bad_request(request_type=cloud_redis.FailoverIns response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.failover_instance(request) @@ -8187,6 +8234,7 @@ def test_failover_instance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.failover_instance(request) # Establish that the response is the type that we expect. @@ -8218,6 +8266,7 @@ def test_failover_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -8253,6 +8302,7 @@ def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanc response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_instance(request) @@ -8281,6 +8331,7 @@ def test_delete_instance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_instance(request) # Establish that the response is the type that we expect. @@ -8312,6 +8363,7 @@ def test_delete_instance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -8347,6 +8399,7 @@ def test_reschedule_maintenance_rest_bad_request(request_type=cloud_redis.Resche response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.reschedule_maintenance(request) @@ -8375,6 +8428,7 @@ def test_reschedule_maintenance_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.reschedule_maintenance(request) # Establish that the response is the type that we expect. @@ -8406,6 +8460,7 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value @@ -8440,6 +8495,7 @@ def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationReq response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_location(request) @@ -8467,6 +8523,7 @@ def test_get_location_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_location(request) @@ -8491,6 +8548,7 @@ def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocation response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_locations(request) @@ -8518,6 +8576,7 @@ def test_list_locations_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_locations(request) @@ -8542,6 +8601,7 @@ def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOpe response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.cancel_operation(request) @@ -8569,6 +8629,7 @@ def test_cancel_operation_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.cancel_operation(request) @@ -8593,6 +8654,7 @@ def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOpe response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_operation(request) @@ -8620,6 +8682,7 @@ def test_delete_operation_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_operation(request) @@ -8644,6 +8707,7 @@ def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperation response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_operation(request) @@ -8671,6 +8735,7 @@ def test_get_operation_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_operation(request) @@ -8695,6 +8760,7 @@ def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperat response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_operations(request) @@ -8722,6 +8788,7 @@ def test_list_operations_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_operations(request) @@ -8746,6 +8813,7 @@ def test_wait_operation_rest_bad_request(request_type=operations_pb2.WaitOperati response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.wait_operation(request) @@ -8773,6 +8841,7 @@ def test_wait_operation_rest(request_type): response_value.content = json_return_value.encode('UTF-8') req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.wait_operation(request) @@ -9074,6 +9143,7 @@ async def test_list_instances_rest_asyncio_bad_request(request_type=cloud_redis. response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.list_instances(request) @@ -9111,6 +9181,7 @@ async def test_list_instances_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.list_instances(request) # Establish that the response is the type that we expect. @@ -9146,6 +9217,7 @@ async def test_list_instances_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -9182,6 +9254,7 @@ async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.Ge response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.get_instance(request) @@ -9244,6 +9317,7 @@ async def test_get_instance_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.get_instance(request) # Establish that the response is the type that we expect. @@ -9304,6 +9378,7 @@ async def test_get_instance_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = cloud_redis.Instance.to_json(cloud_redis.Instance()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -9340,6 +9415,7 @@ async def test_get_instance_auth_string_rest_asyncio_bad_request(request_type=cl response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.get_instance_auth_string(request) @@ -9376,6 +9452,7 @@ async def test_get_instance_auth_string_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.get_instance_auth_string(request) # Establish that the response is the type that we expect. @@ -9410,6 +9487,7 @@ async def test_get_instance_auth_string_rest_asyncio_interceptors(null_intercept req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = cloud_redis.InstanceAuthString.to_json(cloud_redis.InstanceAuthString()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -9446,6 +9524,7 @@ async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.create_instance(request) @@ -9541,6 +9620,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.create_instance(request) # Establish that the response is the type that we expect. @@ -9575,6 +9655,7 @@ async def test_create_instance_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -9611,6 +9692,7 @@ async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.update_instance(request) @@ -9706,6 +9788,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.update_instance(request) # Establish that the response is the type that we expect. @@ -9740,6 +9823,7 @@ async def test_update_instance_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -9776,6 +9860,7 @@ async def test_upgrade_instance_rest_asyncio_bad_request(request_type=cloud_redi response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.upgrade_instance(request) @@ -9807,6 +9892,7 @@ async def test_upgrade_instance_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.upgrade_instance(request) # Establish that the response is the type that we expect. @@ -9841,6 +9927,7 @@ async def test_upgrade_instance_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -9877,6 +9964,7 @@ async def test_import_instance_rest_asyncio_bad_request(request_type=cloud_redis response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.import_instance(request) @@ -9908,6 +9996,7 @@ async def test_import_instance_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.import_instance(request) # Establish that the response is the type that we expect. @@ -9942,6 +10031,7 @@ async def test_import_instance_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -9978,6 +10068,7 @@ async def test_export_instance_rest_asyncio_bad_request(request_type=cloud_redis response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.export_instance(request) @@ -10009,6 +10100,7 @@ async def test_export_instance_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.export_instance(request) # Establish that the response is the type that we expect. @@ -10043,6 +10135,7 @@ async def test_export_instance_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -10079,6 +10172,7 @@ async def test_failover_instance_rest_asyncio_bad_request(request_type=cloud_red response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.failover_instance(request) @@ -10110,6 +10204,7 @@ async def test_failover_instance_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.failover_instance(request) # Establish that the response is the type that we expect. @@ -10144,6 +10239,7 @@ async def test_failover_instance_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -10180,6 +10276,7 @@ async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.delete_instance(request) @@ -10211,6 +10308,7 @@ async def test_delete_instance_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.delete_instance(request) # Establish that the response is the type that we expect. @@ -10245,6 +10343,7 @@ async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -10281,6 +10380,7 @@ async def test_reschedule_maintenance_rest_asyncio_bad_request(request_type=clou response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.reschedule_maintenance(request) @@ -10312,6 +10412,7 @@ async def test_reschedule_maintenance_rest_asyncio_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.reschedule_maintenance(request) # Establish that the response is the type that we expect. @@ -10346,6 +10447,7 @@ async def test_reschedule_maintenance_rest_asyncio_interceptors(null_interceptor req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.read = mock.AsyncMock(return_value=return_value) @@ -10381,6 +10483,7 @@ async def test_get_location_rest_asyncio_bad_request(request_type=locations_pb2. response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.get_location(request) @pytest.mark.asyncio @@ -10410,6 +10513,7 @@ async def test_get_location_rest_asyncio(request_type): response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.get_location(request) @@ -10435,6 +10539,7 @@ async def test_list_locations_rest_asyncio_bad_request(request_type=locations_pb response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.list_locations(request) @pytest.mark.asyncio @@ -10464,6 +10569,7 @@ async def test_list_locations_rest_asyncio(request_type): response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.list_locations(request) @@ -10489,6 +10595,7 @@ async def test_cancel_operation_rest_asyncio_bad_request(request_type=operations response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.cancel_operation(request) @pytest.mark.asyncio @@ -10518,6 +10625,7 @@ async def test_cancel_operation_rest_asyncio(request_type): response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.cancel_operation(request) @@ -10543,6 +10651,7 @@ async def test_delete_operation_rest_asyncio_bad_request(request_type=operations response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.delete_operation(request) @pytest.mark.asyncio @@ -10572,6 +10681,7 @@ async def test_delete_operation_rest_asyncio(request_type): response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.delete_operation(request) @@ -10597,6 +10707,7 @@ async def test_get_operation_rest_asyncio_bad_request(request_type=operations_pb response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.get_operation(request) @pytest.mark.asyncio @@ -10626,6 +10737,7 @@ async def test_get_operation_rest_asyncio(request_type): response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.get_operation(request) @@ -10651,6 +10763,7 @@ async def test_list_operations_rest_asyncio_bad_request(request_type=operations_ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.list_operations(request) @pytest.mark.asyncio @@ -10680,6 +10793,7 @@ async def test_list_operations_rest_asyncio(request_type): response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.list_operations(request) @@ -10705,6 +10819,7 @@ async def test_wait_operation_rest_asyncio_bad_request(request_type=operations_p response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} await client.wait_operation(request) @pytest.mark.asyncio @@ -10734,6 +10849,7 @@ async def test_wait_operation_rest_asyncio(request_type): response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = await client.wait_operation(request) From 0af7cf47c54ef8811b5de3f3d23f25afd8cbb1a9 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Tue, 10 Dec 2024 12:37:09 -0800 Subject: [PATCH 1226/1339] fix(log): preserve dict of rest async response headers (#2288) --- .../%service/transports/rest_asyncio.py.j2 | 2 +- .../cloud_redis/transports/rest_asyncio.py | 22 +++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 4a7dcf870e8b..99658a0b1495 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -228,7 +228,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index f98123c4d3c4..9ce8542f78c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -813,7 +813,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -938,7 +938,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -1066,7 +1066,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -1194,7 +1194,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -1316,7 +1316,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -1438,7 +1438,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -1566,7 +1566,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -1690,7 +1690,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -1818,7 +1818,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -1946,7 +1946,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( @@ -2074,7 +2074,7 @@ async def __call__(self, response_payload = None http_response = { "payload": response_payload, - "headers": str(dict(response.headers)), + "headers": dict(response.headers), "status": "OK", # need to obtain this properly } _LOGGER.debug( From d964d0959053eb45ee49ba5aa9e2d4f1322e4cad Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 12 Dec 2024 01:30:20 +0500 Subject: [PATCH 1227/1339] feat: add client logging support for sync gRPC (#2284) --- .../services/%service/transports/grpc.py.j2 | 93 +++++++++++- .../services/asset_service/transports/grpc.py | 123 +++++++++++---- .../iam_credentials/transports/grpc.py | 83 ++++++++++- .../services/eventarc/transports/grpc.py | 113 +++++++++++--- .../config_service_v2/transports/grpc.py | 141 +++++++++++++----- .../logging_service_v2/transports/grpc.py | 87 ++++++++++- .../metrics_service_v2/transports/grpc.py | 85 ++++++++++- .../services/cloud_redis/transports/grpc.py | 99 ++++++++++-- 8 files changed, 700 insertions(+), 124 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 69b92f162e98..ede5a4831b85 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -2,6 +2,8 @@ {% block content %} +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -13,8 +15,11 @@ from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore {% filter sort_lines %} {% set import_ns = namespace(has_operations_mixin=false) %} @@ -42,6 +47,77 @@ from google.longrunning import operations_pb2 # type: ignore {% endfilter %} from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": client_call_details.method, + "request": grpc_request, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport. #} + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": client_call_details.method, + "response": grpc_response, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport. #} + "metadata": grpc_response["metadata"], + }, + ) + return response + class {{ service.name }}GrpcTransport({{ service.name }}Transport): """gRPC backend transport for {{ service.name }}. @@ -195,7 +271,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @@ -262,7 +341,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -292,7 +371,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: - self._stubs['{{ method.transport_safe_name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + self._stubs['{{ method.transport_safe_name|snake_case }}'] = self._logged_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, @@ -320,7 +399,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -346,7 +425,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -374,7 +453,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, @@ -383,7 +462,7 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): {% endif %} def close(self): - self.grpc_channel.close() + self._logged_channel.close() {% include '%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2' %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 63790c16d115..dca9171f124e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +24,80 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.asset_v1.types import asset_service from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class AssetServiceGrpcTransport(AssetServiceTransport): """gRPC backend transport for AssetService. @@ -181,7 +249,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -245,7 +316,7 @@ def operations_client(self) -> operations_v1.OperationsClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -282,7 +353,7 @@ def export_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self.grpc_channel.unary_unary( + self._stubs['export_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ExportAssets', request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -309,7 +380,7 @@ def list_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self.grpc_channel.unary_unary( + self._stubs['list_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListAssets', request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, @@ -341,7 +412,7 @@ def batch_get_assets_history(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self.grpc_channel.unary_unary( + self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, @@ -369,7 +440,7 @@ def create_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self.grpc_channel.unary_unary( + self._stubs['create_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/CreateFeed', request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -395,7 +466,7 @@ def get_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self.grpc_channel.unary_unary( + self._stubs['get_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/GetFeed', request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -422,7 +493,7 @@ def list_feeds(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self.grpc_channel.unary_unary( + self._stubs['list_feeds'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListFeeds', request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, @@ -448,7 +519,7 @@ def update_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self.grpc_channel.unary_unary( + self._stubs['update_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/UpdateFeed', request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -474,7 +545,7 @@ def delete_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self.grpc_channel.unary_unary( + self._stubs['delete_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/DeleteFeed', request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -503,7 +574,7 @@ def search_all_resources(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self.grpc_channel.unary_unary( + self._stubs['search_all_resources'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/SearchAllResources', request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, @@ -532,7 +603,7 @@ def search_all_iam_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self.grpc_channel.unary_unary( + self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, @@ -559,7 +630,7 @@ def analyze_iam_policy(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self.grpc_channel.unary_unary( + self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, @@ -596,7 +667,7 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self.grpc_channel.unary_unary( + self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -628,7 +699,7 @@ def analyze_move(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self.grpc_channel.unary_unary( + self._stubs['analyze_move'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeMove', request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, @@ -669,7 +740,7 @@ def query_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self.grpc_channel.unary_unary( + self._stubs['query_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/QueryAssets', request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, @@ -696,7 +767,7 @@ def create_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['create_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/CreateSavedQuery', request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -722,7 +793,7 @@ def get_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['get_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/GetSavedQuery', request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -749,7 +820,7 @@ def list_saved_queries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self.grpc_channel.unary_unary( + self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListSavedQueries', request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, @@ -775,7 +846,7 @@ def update_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['update_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -801,7 +872,7 @@ def delete_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -828,7 +899,7 @@ def batch_get_effective_iam_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self.grpc_channel.unary_unary( + self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, @@ -854,7 +925,7 @@ def analyze_org_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, @@ -882,7 +953,7 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, @@ -927,7 +998,7 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, @@ -935,7 +1006,7 @@ def analyze_org_policy_governed_assets(self) -> Callable[ return self._stubs['analyze_org_policy_governed_assets'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def get_operation( diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index b0dd5bb66940..62413fb4bf4b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,12 +23,78 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.iam.credentials_v1.types import common from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class IAMCredentialsGrpcTransport(IAMCredentialsTransport): """gRPC backend transport for IAMCredentials. @@ -186,7 +254,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -260,7 +331,7 @@ def generate_access_token(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'generate_access_token' not in self._stubs: - self._stubs['generate_access_token'] = self.grpc_channel.unary_unary( + self._stubs['generate_access_token'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', request_serializer=common.GenerateAccessTokenRequest.serialize, response_deserializer=common.GenerateAccessTokenResponse.deserialize, @@ -287,7 +358,7 @@ def generate_id_token(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'generate_id_token' not in self._stubs: - self._stubs['generate_id_token'] = self.grpc_channel.unary_unary( + self._stubs['generate_id_token'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', request_serializer=common.GenerateIdTokenRequest.serialize, response_deserializer=common.GenerateIdTokenResponse.deserialize, @@ -314,7 +385,7 @@ def sign_blob(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'sign_blob' not in self._stubs: - self._stubs['sign_blob'] = self.grpc_channel.unary_unary( + self._stubs['sign_blob'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/SignBlob', request_serializer=common.SignBlobRequest.serialize, response_deserializer=common.SignBlobResponse.deserialize, @@ -341,7 +412,7 @@ def sign_jwt(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'sign_jwt' not in self._stubs: - self._stubs['sign_jwt'] = self.grpc_channel.unary_unary( + self._stubs['sign_jwt'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/SignJwt', request_serializer=common.SignJwtRequest.serialize, response_deserializer=common.SignJwtResponse.deserialize, @@ -349,7 +420,7 @@ def sign_jwt(self) -> Callable[ return self._stubs['sign_jwt'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def kind(self) -> str: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 54dc68143560..c7d20f208fa1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,8 +24,11 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection @@ -38,6 +43,69 @@ from google.longrunning import operations_pb2 # type: ignore from .base import EventarcTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class EventarcGrpcTransport(EventarcTransport): """gRPC backend transport for Eventarc. @@ -191,7 +259,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -255,7 +326,7 @@ def operations_client(self) -> operations_v1.OperationsClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -280,7 +351,7 @@ def get_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_trigger' not in self._stubs: - self._stubs['get_trigger'] = self.grpc_channel.unary_unary( + self._stubs['get_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetTrigger', request_serializer=eventarc.GetTriggerRequest.serialize, response_deserializer=trigger.Trigger.deserialize, @@ -306,7 +377,7 @@ def list_triggers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_triggers' not in self._stubs: - self._stubs['list_triggers'] = self.grpc_channel.unary_unary( + self._stubs['list_triggers'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListTriggers', request_serializer=eventarc.ListTriggersRequest.serialize, response_deserializer=eventarc.ListTriggersResponse.deserialize, @@ -333,7 +404,7 @@ def create_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_trigger' not in self._stubs: - self._stubs['create_trigger'] = self.grpc_channel.unary_unary( + self._stubs['create_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', request_serializer=eventarc.CreateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -359,7 +430,7 @@ def update_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_trigger' not in self._stubs: - self._stubs['update_trigger'] = self.grpc_channel.unary_unary( + self._stubs['update_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', request_serializer=eventarc.UpdateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -385,7 +456,7 @@ def delete_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_trigger' not in self._stubs: - self._stubs['delete_trigger'] = self.grpc_channel.unary_unary( + self._stubs['delete_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', request_serializer=eventarc.DeleteTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -411,7 +482,7 @@ def get_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_channel' not in self._stubs: - self._stubs['get_channel'] = self.grpc_channel.unary_unary( + self._stubs['get_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetChannel', request_serializer=eventarc.GetChannelRequest.serialize, response_deserializer=channel.Channel.deserialize, @@ -437,7 +508,7 @@ def list_channels(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_channels' not in self._stubs: - self._stubs['list_channels'] = self.grpc_channel.unary_unary( + self._stubs['list_channels'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListChannels', request_serializer=eventarc.ListChannelsRequest.serialize, response_deserializer=eventarc.ListChannelsResponse.deserialize, @@ -464,7 +535,7 @@ def create_channel_(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_channel_' not in self._stubs: - self._stubs['create_channel_'] = self.grpc_channel.unary_unary( + self._stubs['create_channel_'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateChannel', request_serializer=eventarc.CreateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -490,7 +561,7 @@ def update_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_channel' not in self._stubs: - self._stubs['update_channel'] = self.grpc_channel.unary_unary( + self._stubs['update_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', request_serializer=eventarc.UpdateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -516,7 +587,7 @@ def delete_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_channel' not in self._stubs: - self._stubs['delete_channel'] = self.grpc_channel.unary_unary( + self._stubs['delete_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', request_serializer=eventarc.DeleteChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -542,7 +613,7 @@ def get_provider(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_provider' not in self._stubs: - self._stubs['get_provider'] = self.grpc_channel.unary_unary( + self._stubs['get_provider'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetProvider', request_serializer=eventarc.GetProviderRequest.serialize, response_deserializer=discovery.Provider.deserialize, @@ -568,7 +639,7 @@ def list_providers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_providers' not in self._stubs: - self._stubs['list_providers'] = self.grpc_channel.unary_unary( + self._stubs['list_providers'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListProviders', request_serializer=eventarc.ListProvidersRequest.serialize, response_deserializer=eventarc.ListProvidersResponse.deserialize, @@ -594,7 +665,7 @@ def get_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_channel_connection' not in self._stubs: - self._stubs['get_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['get_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', request_serializer=eventarc.GetChannelConnectionRequest.serialize, response_deserializer=channel_connection.ChannelConnection.deserialize, @@ -620,7 +691,7 @@ def list_channel_connections(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_channel_connections' not in self._stubs: - self._stubs['list_channel_connections'] = self.grpc_channel.unary_unary( + self._stubs['list_channel_connections'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', request_serializer=eventarc.ListChannelConnectionsRequest.serialize, response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, @@ -647,7 +718,7 @@ def create_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_channel_connection' not in self._stubs: - self._stubs['create_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['create_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', request_serializer=eventarc.CreateChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -673,7 +744,7 @@ def delete_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_channel_connection' not in self._stubs: - self._stubs['delete_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['delete_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -699,7 +770,7 @@ def get_google_channel_config(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_google_channel_config' not in self._stubs: - self._stubs['get_google_channel_config'] = self.grpc_channel.unary_unary( + self._stubs['get_google_channel_config'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, @@ -725,7 +796,7 @@ def update_google_channel_config(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_google_channel_config' not in self._stubs: - self._stubs['update_google_channel_config'] = self.grpc_channel.unary_unary( + self._stubs['update_google_channel_config'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, @@ -733,7 +804,7 @@ def update_google_channel_config(self) -> Callable[ return self._stubs['update_google_channel_config'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 08c0c4707bb5..b8de6fe5d114 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +24,80 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): """gRPC backend transport for ConfigServiceV2. @@ -181,7 +249,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -245,7 +316,7 @@ def operations_client(self) -> operations_v1.OperationsClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -270,7 +341,7 @@ def list_buckets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self.grpc_channel.unary_unary( + self._stubs['list_buckets'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListBuckets', request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, @@ -296,7 +367,7 @@ def get_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self.grpc_channel.unary_unary( + self._stubs['get_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetBucket', request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -325,7 +396,7 @@ def create_bucket_async(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self.grpc_channel.unary_unary( + self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -357,7 +428,7 @@ def update_bucket_async(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self.grpc_channel.unary_unary( + self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -385,7 +456,7 @@ def create_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self.grpc_channel.unary_unary( + self._stubs['create_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateBucket', request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -417,7 +488,7 @@ def update_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self.grpc_channel.unary_unary( + self._stubs['update_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateBucket', request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -448,7 +519,7 @@ def delete_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( + self._stubs['delete_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteBucket', request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -476,7 +547,7 @@ def undelete_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( + self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UndeleteBucket', request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -502,7 +573,7 @@ def list_views(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_views' not in self._stubs: - self._stubs['list_views'] = self.grpc_channel.unary_unary( + self._stubs['list_views'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListViews', request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, @@ -528,7 +599,7 @@ def get_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_view' not in self._stubs: - self._stubs['get_view'] = self.grpc_channel.unary_unary( + self._stubs['get_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetView', request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -555,7 +626,7 @@ def create_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_view' not in self._stubs: - self._stubs['create_view'] = self.grpc_channel.unary_unary( + self._stubs['create_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateView', request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -585,7 +656,7 @@ def update_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_view' not in self._stubs: - self._stubs['update_view'] = self.grpc_channel.unary_unary( + self._stubs['update_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateView', request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -614,7 +685,7 @@ def delete_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self.grpc_channel.unary_unary( + self._stubs['delete_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteView', request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -640,7 +711,7 @@ def list_sinks(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self.grpc_channel.unary_unary( + self._stubs['list_sinks'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListSinks', request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, @@ -666,7 +737,7 @@ def get_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self.grpc_channel.unary_unary( + self._stubs['get_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetSink', request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -696,7 +767,7 @@ def create_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self.grpc_channel.unary_unary( + self._stubs['create_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateSink', request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -727,7 +798,7 @@ def update_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self.grpc_channel.unary_unary( + self._stubs['update_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateSink', request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -754,7 +825,7 @@ def delete_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self.grpc_channel.unary_unary( + self._stubs['delete_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteSink', request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -783,7 +854,7 @@ def create_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_link' not in self._stubs: - self._stubs['create_link'] = self.grpc_channel.unary_unary( + self._stubs['create_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateLink', request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -810,7 +881,7 @@ def delete_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self.grpc_channel.unary_unary( + self._stubs['delete_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteLink', request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -836,7 +907,7 @@ def list_links(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_links' not in self._stubs: - self._stubs['list_links'] = self.grpc_channel.unary_unary( + self._stubs['list_links'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListLinks', request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, @@ -862,7 +933,7 @@ def get_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_link' not in self._stubs: - self._stubs['get_link'] = self.grpc_channel.unary_unary( + self._stubs['get_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetLink', request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, @@ -889,7 +960,7 @@ def list_exclusions(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( + self._stubs['list_exclusions'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListExclusions', request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, @@ -915,7 +986,7 @@ def get_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['get_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetExclusion', request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -943,7 +1014,7 @@ def create_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['create_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateExclusion', request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -970,7 +1041,7 @@ def update_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['update_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateExclusion', request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -996,7 +1067,7 @@ def delete_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteExclusion', request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -1031,7 +1102,7 @@ def get_cmek_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( + self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetCmekSettings', request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1071,7 +1142,7 @@ def update_cmek_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( + self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1107,7 +1178,7 @@ def get_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self.grpc_channel.unary_unary( + self._stubs['get_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetSettings', request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1150,7 +1221,7 @@ def update_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self.grpc_channel.unary_unary( + self._stubs['update_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateSettings', request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1177,7 +1248,7 @@ def copy_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CopyLogEntries', request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -1185,7 +1256,7 @@ def copy_log_entries(self) -> Callable[ return self._stubs['copy_log_entries'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index aff5aad6e120..de25f5f6fdfa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,14 +23,80 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): """gRPC backend transport for LoggingServiceV2. @@ -179,7 +247,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -256,7 +327,7 @@ def delete_log(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self.grpc_channel.unary_unary( + self._stubs['delete_log'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/DeleteLog', request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -288,7 +359,7 @@ def write_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['write_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/WriteLogEntries', request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, @@ -317,7 +388,7 @@ def list_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['list_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListLogEntries', request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, @@ -345,7 +416,7 @@ def list_monitored_resource_descriptors(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, @@ -373,7 +444,7 @@ def list_logs(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self.grpc_channel.unary_unary( + self._stubs['list_logs'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListLogs', request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, @@ -401,7 +472,7 @@ def tail_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( + self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( '/google.logging.v2.LoggingServiceV2/TailLogEntries', request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, @@ -409,7 +480,7 @@ def tail_log_entries(self) -> Callable[ return self._stubs['tail_log_entries'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 04c8ab051225..9f6330cbb3b0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,14 +23,80 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.logging_v2.types import logging_metrics from google.longrunning import operations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): """gRPC backend transport for MetricsServiceV2. @@ -179,7 +247,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -252,7 +323,7 @@ def list_log_metrics(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( + self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/ListLogMetrics', request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, @@ -278,7 +349,7 @@ def get_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['get_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/GetLogMetric', request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -304,7 +375,7 @@ def create_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['create_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/CreateLogMetric', request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -330,7 +401,7 @@ def update_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['update_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -356,7 +427,7 @@ def delete_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -364,7 +435,7 @@ def delete_log_metric(self) -> Callable[ return self._stubs['delete_log_metric'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def cancel_operation( diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 0df383915736..cf5ec9a7fe97 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +24,80 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis from google.longrunning import operations_pb2 # type: ignore from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class CloudRedisGrpcTransport(CloudRedisTransport): """gRPC backend transport for CloudRedis. @@ -201,7 +269,10 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -265,7 +336,7 @@ def operations_client(self) -> operations_v1.OperationsClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -299,7 +370,7 @@ def list_instances(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self.grpc_channel.unary_unary( + self._stubs['list_instances'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ListInstances', request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, @@ -325,7 +396,7 @@ def get_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self.grpc_channel.unary_unary( + self._stubs['get_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/GetInstance', request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, @@ -354,7 +425,7 @@ def get_instance_auth_string(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_instance_auth_string' not in self._stubs: - self._stubs['get_instance_auth_string'] = self.grpc_channel.unary_unary( + self._stubs['get_instance_auth_string'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, response_deserializer=cloud_redis.InstanceAuthString.deserialize, @@ -393,7 +464,7 @@ def create_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self.grpc_channel.unary_unary( + self._stubs['create_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/CreateInstance', request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -424,7 +495,7 @@ def update_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self.grpc_channel.unary_unary( + self._stubs['update_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/UpdateInstance', request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -451,7 +522,7 @@ def upgrade_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'upgrade_instance' not in self._stubs: - self._stubs['upgrade_instance'] = self.grpc_channel.unary_unary( + self._stubs['upgrade_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -485,7 +556,7 @@ def import_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'import_instance' not in self._stubs: - self._stubs['import_instance'] = self.grpc_channel.unary_unary( + self._stubs['import_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ImportInstance', request_serializer=cloud_redis.ImportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -516,7 +587,7 @@ def export_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'export_instance' not in self._stubs: - self._stubs['export_instance'] = self.grpc_channel.unary_unary( + self._stubs['export_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ExportInstance', request_serializer=cloud_redis.ExportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -544,7 +615,7 @@ def failover_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'failover_instance' not in self._stubs: - self._stubs['failover_instance'] = self.grpc_channel.unary_unary( + self._stubs['failover_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/FailoverInstance', request_serializer=cloud_redis.FailoverInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -571,7 +642,7 @@ def delete_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self.grpc_channel.unary_unary( + self._stubs['delete_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/DeleteInstance', request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -598,7 +669,7 @@ def reschedule_maintenance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'reschedule_maintenance' not in self._stubs: - self._stubs['reschedule_maintenance'] = self.grpc_channel.unary_unary( + self._stubs['reschedule_maintenance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -606,7 +677,7 @@ def reschedule_maintenance(self) -> Callable[ return self._stubs['reschedule_maintenance'] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def delete_operation( From fffa457d4b0626534a9226d1c3fe5528a3e9e722 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 12 Dec 2024 03:48:39 +0500 Subject: [PATCH 1228/1339] feat: add client debug logging support for async gRPC (#2291) --- .../%service/transports/_mixins.py.j2 | 20 +-- .../services/%service/transports/grpc.py.j2 | 10 +- .../%service/transports/grpc_asyncio.py.j2 | 100 +++++++++++- .../services/asset_service/transports/grpc.py | 12 +- .../asset_service/transports/grpc_asyncio.py | 130 +++++++++++---- .../iam_credentials/transports/grpc.py | 10 +- .../transports/grpc_asyncio.py | 88 +++++++++- .../services/eventarc/transports/grpc.py | 28 ++-- .../eventarc/transports/grpc_asyncio.py | 136 ++++++++++++---- .../config_service_v2/transports/grpc.py | 16 +- .../transports/grpc_asyncio.py | 152 +++++++++++++----- .../logging_service_v2/transports/grpc.py | 16 +- .../transports/grpc_asyncio.py | 98 +++++++++-- .../metrics_service_v2/transports/grpc.py | 16 +- .../transports/grpc_asyncio.py | 96 +++++++++-- .../services/cloud_redis/transports/grpc.py | 24 +-- .../cloud_redis/transports/grpc_asyncio.py | 118 +++++++++++--- 17 files changed, 867 insertions(+), 203 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 index 84b085ee1a35..3458cc78e811 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/_mixins.py.j2 @@ -32,7 +32,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -52,7 +52,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -72,7 +72,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "wait_operation" not in self._stubs: - self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + self._stubs["wait_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/WaitOperation", request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, response_deserializer=None, @@ -92,7 +92,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -112,7 +112,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -136,7 +136,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -156,7 +156,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, @@ -188,7 +188,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -216,7 +216,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -246,7 +246,7 @@ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index ede5a4831b85..cb3ee8cfa666 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -2,6 +2,7 @@ {% block content %} +import json import logging as std_logging import pickle import warnings @@ -69,7 +70,12 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -90,7 +96,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None result = response.result() if isinstance(result, proto.Message): {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 0bb3126e5f11..650f4a2c6533 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -4,6 +4,9 @@ {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -16,8 +19,11 @@ from google.api_core import operations_v1 {% endif %} from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore {% filter sort_lines %} @@ -47,6 +53,81 @@ from google.longrunning import operations_pb2 # type: ignore from .base import {{ service.name }}Transport, DEFAULT_CLIENT_INFO from .grpc import {{ service.name }}GrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": str(client_call_details.method), + "request": grpc_request, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport.' #} + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": str(client_call_details.method), + "response": grpc_response, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport.' #} + "metadata": grpc_response["metadata"], + }, + ) + return response + class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): """gRPC AsyncIO backend transport for {{ service.name }}. @@ -242,8 +323,11 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -267,7 +351,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -297,7 +381,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if '{{ method.transport_safe_name|snake_case }}' not in self._stubs: - self._stubs['{{ method.transport_safe_name|snake_case }}'] = self.grpc_channel.{{ method.grpc_stub_type }}( + self._stubs['{{ method.transport_safe_name|snake_case }}'] = self._logged_channel.{{ method.grpc_stub_type }}( '/{{ '.'.join(method.meta.address.package) }}.{{ service.name }}/{{ method.name }}', request_serializer={{ method.input.ident }}.{% if method.input.ident.python_import.module.endswith('_pb2') %}SerializeToString{% else %}serialize{% endif %}, response_deserializer={{ method.output.ident }}.{% if method.output.ident.python_import.module.endswith('_pb2') %}FromString{% else %}deserialize{% endif %}, @@ -325,7 +409,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -351,7 +435,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -380,7 +464,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, @@ -393,7 +477,7 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): {{ shared_macros.wrap_async_method_macro()|indent(4) }} def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -405,4 +489,4 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): __all__ = ( '{{ service.name }}GrpcAsyncIOTransport', ) -{% endblock %} +{% endblock %} \ No newline at end of file diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index dca9171f124e..519ecdec2bd3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json import logging as std_logging import pickle import warnings @@ -54,7 +55,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -74,7 +80,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -1019,7 +1025,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 36cdea1121d0..54666f93ecc5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.asset_v1.types import asset_service @@ -34,6 +40,73 @@ from .base import AssetServiceTransport, DEFAULT_CLIENT_INFO from .grpc import AssetServiceGrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.asset.v1.AssetService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class AssetServiceGrpcAsyncIOTransport(AssetServiceTransport): """gRPC AsyncIO backend transport for AssetService. @@ -227,8 +300,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -251,7 +327,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -288,7 +364,7 @@ def export_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'export_assets' not in self._stubs: - self._stubs['export_assets'] = self.grpc_channel.unary_unary( + self._stubs['export_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ExportAssets', request_serializer=asset_service.ExportAssetsRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -315,7 +391,7 @@ def list_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_assets' not in self._stubs: - self._stubs['list_assets'] = self.grpc_channel.unary_unary( + self._stubs['list_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListAssets', request_serializer=asset_service.ListAssetsRequest.serialize, response_deserializer=asset_service.ListAssetsResponse.deserialize, @@ -347,7 +423,7 @@ def batch_get_assets_history(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'batch_get_assets_history' not in self._stubs: - self._stubs['batch_get_assets_history'] = self.grpc_channel.unary_unary( + self._stubs['batch_get_assets_history'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory', request_serializer=asset_service.BatchGetAssetsHistoryRequest.serialize, response_deserializer=asset_service.BatchGetAssetsHistoryResponse.deserialize, @@ -375,7 +451,7 @@ def create_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_feed' not in self._stubs: - self._stubs['create_feed'] = self.grpc_channel.unary_unary( + self._stubs['create_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/CreateFeed', request_serializer=asset_service.CreateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -401,7 +477,7 @@ def get_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_feed' not in self._stubs: - self._stubs['get_feed'] = self.grpc_channel.unary_unary( + self._stubs['get_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/GetFeed', request_serializer=asset_service.GetFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -428,7 +504,7 @@ def list_feeds(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_feeds' not in self._stubs: - self._stubs['list_feeds'] = self.grpc_channel.unary_unary( + self._stubs['list_feeds'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListFeeds', request_serializer=asset_service.ListFeedsRequest.serialize, response_deserializer=asset_service.ListFeedsResponse.deserialize, @@ -454,7 +530,7 @@ def update_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_feed' not in self._stubs: - self._stubs['update_feed'] = self.grpc_channel.unary_unary( + self._stubs['update_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/UpdateFeed', request_serializer=asset_service.UpdateFeedRequest.serialize, response_deserializer=asset_service.Feed.deserialize, @@ -480,7 +556,7 @@ def delete_feed(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_feed' not in self._stubs: - self._stubs['delete_feed'] = self.grpc_channel.unary_unary( + self._stubs['delete_feed'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/DeleteFeed', request_serializer=asset_service.DeleteFeedRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -509,7 +585,7 @@ def search_all_resources(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'search_all_resources' not in self._stubs: - self._stubs['search_all_resources'] = self.grpc_channel.unary_unary( + self._stubs['search_all_resources'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/SearchAllResources', request_serializer=asset_service.SearchAllResourcesRequest.serialize, response_deserializer=asset_service.SearchAllResourcesResponse.deserialize, @@ -538,7 +614,7 @@ def search_all_iam_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'search_all_iam_policies' not in self._stubs: - self._stubs['search_all_iam_policies'] = self.grpc_channel.unary_unary( + self._stubs['search_all_iam_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/SearchAllIamPolicies', request_serializer=asset_service.SearchAllIamPoliciesRequest.serialize, response_deserializer=asset_service.SearchAllIamPoliciesResponse.deserialize, @@ -565,7 +641,7 @@ def analyze_iam_policy(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_iam_policy' not in self._stubs: - self._stubs['analyze_iam_policy'] = self.grpc_channel.unary_unary( + self._stubs['analyze_iam_policy'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicy', request_serializer=asset_service.AnalyzeIamPolicyRequest.serialize, response_deserializer=asset_service.AnalyzeIamPolicyResponse.deserialize, @@ -602,7 +678,7 @@ def analyze_iam_policy_longrunning(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_iam_policy_longrunning' not in self._stubs: - self._stubs['analyze_iam_policy_longrunning'] = self.grpc_channel.unary_unary( + self._stubs['analyze_iam_policy_longrunning'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeIamPolicyLongrunning', request_serializer=asset_service.AnalyzeIamPolicyLongrunningRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -634,7 +710,7 @@ def analyze_move(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_move' not in self._stubs: - self._stubs['analyze_move'] = self.grpc_channel.unary_unary( + self._stubs['analyze_move'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeMove', request_serializer=asset_service.AnalyzeMoveRequest.serialize, response_deserializer=asset_service.AnalyzeMoveResponse.deserialize, @@ -675,7 +751,7 @@ def query_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'query_assets' not in self._stubs: - self._stubs['query_assets'] = self.grpc_channel.unary_unary( + self._stubs['query_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/QueryAssets', request_serializer=asset_service.QueryAssetsRequest.serialize, response_deserializer=asset_service.QueryAssetsResponse.deserialize, @@ -702,7 +778,7 @@ def create_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_saved_query' not in self._stubs: - self._stubs['create_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['create_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/CreateSavedQuery', request_serializer=asset_service.CreateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -728,7 +804,7 @@ def get_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_saved_query' not in self._stubs: - self._stubs['get_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['get_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/GetSavedQuery', request_serializer=asset_service.GetSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -755,7 +831,7 @@ def list_saved_queries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_saved_queries' not in self._stubs: - self._stubs['list_saved_queries'] = self.grpc_channel.unary_unary( + self._stubs['list_saved_queries'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/ListSavedQueries', request_serializer=asset_service.ListSavedQueriesRequest.serialize, response_deserializer=asset_service.ListSavedQueriesResponse.deserialize, @@ -781,7 +857,7 @@ def update_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_saved_query' not in self._stubs: - self._stubs['update_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['update_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/UpdateSavedQuery', request_serializer=asset_service.UpdateSavedQueryRequest.serialize, response_deserializer=asset_service.SavedQuery.deserialize, @@ -807,7 +883,7 @@ def delete_saved_query(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_saved_query' not in self._stubs: - self._stubs['delete_saved_query'] = self.grpc_channel.unary_unary( + self._stubs['delete_saved_query'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/DeleteSavedQuery', request_serializer=asset_service.DeleteSavedQueryRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -834,7 +910,7 @@ def batch_get_effective_iam_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'batch_get_effective_iam_policies' not in self._stubs: - self._stubs['batch_get_effective_iam_policies'] = self.grpc_channel.unary_unary( + self._stubs['batch_get_effective_iam_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/BatchGetEffectiveIamPolicies', request_serializer=asset_service.BatchGetEffectiveIamPoliciesRequest.serialize, response_deserializer=asset_service.BatchGetEffectiveIamPoliciesResponse.deserialize, @@ -860,7 +936,7 @@ def analyze_org_policies(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policies' not in self._stubs: - self._stubs['analyze_org_policies'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policies'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicies', request_serializer=asset_service.AnalyzeOrgPoliciesRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPoliciesResponse.deserialize, @@ -888,7 +964,7 @@ def analyze_org_policy_governed_containers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policy_governed_containers' not in self._stubs: - self._stubs['analyze_org_policy_governed_containers'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policy_governed_containers'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedContainers', request_serializer=asset_service.AnalyzeOrgPolicyGovernedContainersRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedContainersResponse.deserialize, @@ -933,7 +1009,7 @@ def analyze_org_policy_governed_assets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'analyze_org_policy_governed_assets' not in self._stubs: - self._stubs['analyze_org_policy_governed_assets'] = self.grpc_channel.unary_unary( + self._stubs['analyze_org_policy_governed_assets'] = self._logged_channel.unary_unary( '/google.cloud.asset.v1.AssetService/AnalyzeOrgPolicyGovernedAssets', request_serializer=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.serialize, response_deserializer=asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.deserialize, @@ -1140,7 +1216,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -1157,7 +1233,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 62413fb4bf4b..0e55e2978514 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json import logging as std_logging import pickle import warnings @@ -51,7 +52,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -71,7 +77,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 8589ca45f040..8ac65146dc67 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,14 +26,84 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.iam.credentials_v1.types import common from .base import IAMCredentialsTransport, DEFAULT_CLIENT_INFO from .grpc import IAMCredentialsGrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.iam.credentials.v1.IAMCredentials", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class IAMCredentialsGrpcAsyncIOTransport(IAMCredentialsTransport): """gRPC AsyncIO backend transport for IAMCredentials. @@ -232,8 +305,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -266,7 +342,7 @@ def generate_access_token(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'generate_access_token' not in self._stubs: - self._stubs['generate_access_token'] = self.grpc_channel.unary_unary( + self._stubs['generate_access_token'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/GenerateAccessToken', request_serializer=common.GenerateAccessTokenRequest.serialize, response_deserializer=common.GenerateAccessTokenResponse.deserialize, @@ -293,7 +369,7 @@ def generate_id_token(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'generate_id_token' not in self._stubs: - self._stubs['generate_id_token'] = self.grpc_channel.unary_unary( + self._stubs['generate_id_token'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/GenerateIdToken', request_serializer=common.GenerateIdTokenRequest.serialize, response_deserializer=common.GenerateIdTokenResponse.deserialize, @@ -320,7 +396,7 @@ def sign_blob(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'sign_blob' not in self._stubs: - self._stubs['sign_blob'] = self.grpc_channel.unary_unary( + self._stubs['sign_blob'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/SignBlob', request_serializer=common.SignBlobRequest.serialize, response_deserializer=common.SignBlobResponse.deserialize, @@ -347,7 +423,7 @@ def sign_jwt(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'sign_jwt' not in self._stubs: - self._stubs['sign_jwt'] = self.grpc_channel.unary_unary( + self._stubs['sign_jwt'] = self._logged_channel.unary_unary( '/google.iam.credentials.v1.IAMCredentials/SignJwt', request_serializer=common.SignJwtRequest.serialize, response_deserializer=common.SignJwtResponse.deserialize, @@ -425,7 +501,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index c7d20f208fa1..8c602c7fd895 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json import logging as std_logging import pickle import warnings @@ -62,7 +63,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -82,7 +88,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -817,7 +823,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -835,7 +841,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -853,7 +859,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -871,7 +877,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -889,7 +895,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -907,7 +913,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, @@ -932,7 +938,7 @@ def set_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -958,7 +964,7 @@ def get_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -986,7 +992,7 @@ def test_iam_permissions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 5914da06cbaf..38009d7f9076 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.eventarc_v1.types import channel @@ -42,6 +48,73 @@ from .base import EventarcTransport, DEFAULT_CLIENT_INFO from .grpc import EventarcGrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.eventarc.v1.Eventarc", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class EventarcGrpcAsyncIOTransport(EventarcTransport): """gRPC AsyncIO backend transport for Eventarc. @@ -237,8 +310,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -261,7 +337,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -286,7 +362,7 @@ def get_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_trigger' not in self._stubs: - self._stubs['get_trigger'] = self.grpc_channel.unary_unary( + self._stubs['get_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetTrigger', request_serializer=eventarc.GetTriggerRequest.serialize, response_deserializer=trigger.Trigger.deserialize, @@ -312,7 +388,7 @@ def list_triggers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_triggers' not in self._stubs: - self._stubs['list_triggers'] = self.grpc_channel.unary_unary( + self._stubs['list_triggers'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListTriggers', request_serializer=eventarc.ListTriggersRequest.serialize, response_deserializer=eventarc.ListTriggersResponse.deserialize, @@ -339,7 +415,7 @@ def create_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_trigger' not in self._stubs: - self._stubs['create_trigger'] = self.grpc_channel.unary_unary( + self._stubs['create_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateTrigger', request_serializer=eventarc.CreateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -365,7 +441,7 @@ def update_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_trigger' not in self._stubs: - self._stubs['update_trigger'] = self.grpc_channel.unary_unary( + self._stubs['update_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateTrigger', request_serializer=eventarc.UpdateTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -391,7 +467,7 @@ def delete_trigger(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_trigger' not in self._stubs: - self._stubs['delete_trigger'] = self.grpc_channel.unary_unary( + self._stubs['delete_trigger'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteTrigger', request_serializer=eventarc.DeleteTriggerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -417,7 +493,7 @@ def get_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_channel' not in self._stubs: - self._stubs['get_channel'] = self.grpc_channel.unary_unary( + self._stubs['get_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetChannel', request_serializer=eventarc.GetChannelRequest.serialize, response_deserializer=channel.Channel.deserialize, @@ -443,7 +519,7 @@ def list_channels(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_channels' not in self._stubs: - self._stubs['list_channels'] = self.grpc_channel.unary_unary( + self._stubs['list_channels'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListChannels', request_serializer=eventarc.ListChannelsRequest.serialize, response_deserializer=eventarc.ListChannelsResponse.deserialize, @@ -470,7 +546,7 @@ def create_channel_(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_channel_' not in self._stubs: - self._stubs['create_channel_'] = self.grpc_channel.unary_unary( + self._stubs['create_channel_'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateChannel', request_serializer=eventarc.CreateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -496,7 +572,7 @@ def update_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_channel' not in self._stubs: - self._stubs['update_channel'] = self.grpc_channel.unary_unary( + self._stubs['update_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateChannel', request_serializer=eventarc.UpdateChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -522,7 +598,7 @@ def delete_channel(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_channel' not in self._stubs: - self._stubs['delete_channel'] = self.grpc_channel.unary_unary( + self._stubs['delete_channel'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteChannel', request_serializer=eventarc.DeleteChannelRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -548,7 +624,7 @@ def get_provider(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_provider' not in self._stubs: - self._stubs['get_provider'] = self.grpc_channel.unary_unary( + self._stubs['get_provider'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetProvider', request_serializer=eventarc.GetProviderRequest.serialize, response_deserializer=discovery.Provider.deserialize, @@ -574,7 +650,7 @@ def list_providers(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_providers' not in self._stubs: - self._stubs['list_providers'] = self.grpc_channel.unary_unary( + self._stubs['list_providers'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListProviders', request_serializer=eventarc.ListProvidersRequest.serialize, response_deserializer=eventarc.ListProvidersResponse.deserialize, @@ -600,7 +676,7 @@ def get_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_channel_connection' not in self._stubs: - self._stubs['get_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['get_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetChannelConnection', request_serializer=eventarc.GetChannelConnectionRequest.serialize, response_deserializer=channel_connection.ChannelConnection.deserialize, @@ -626,7 +702,7 @@ def list_channel_connections(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_channel_connections' not in self._stubs: - self._stubs['list_channel_connections'] = self.grpc_channel.unary_unary( + self._stubs['list_channel_connections'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/ListChannelConnections', request_serializer=eventarc.ListChannelConnectionsRequest.serialize, response_deserializer=eventarc.ListChannelConnectionsResponse.deserialize, @@ -653,7 +729,7 @@ def create_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_channel_connection' not in self._stubs: - self._stubs['create_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['create_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/CreateChannelConnection', request_serializer=eventarc.CreateChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -679,7 +755,7 @@ def delete_channel_connection(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_channel_connection' not in self._stubs: - self._stubs['delete_channel_connection'] = self.grpc_channel.unary_unary( + self._stubs['delete_channel_connection'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/DeleteChannelConnection', request_serializer=eventarc.DeleteChannelConnectionRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -705,7 +781,7 @@ def get_google_channel_config(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_google_channel_config' not in self._stubs: - self._stubs['get_google_channel_config'] = self.grpc_channel.unary_unary( + self._stubs['get_google_channel_config'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/GetGoogleChannelConfig', request_serializer=eventarc.GetGoogleChannelConfigRequest.serialize, response_deserializer=google_channel_config.GoogleChannelConfig.deserialize, @@ -731,7 +807,7 @@ def update_google_channel_config(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_google_channel_config' not in self._stubs: - self._stubs['update_google_channel_config'] = self.grpc_channel.unary_unary( + self._stubs['update_google_channel_config'] = self._logged_channel.unary_unary( '/google.cloud.eventarc.v1.Eventarc/UpdateGoogleChannelConfig', request_serializer=eventarc.UpdateGoogleChannelConfigRequest.serialize, response_deserializer=gce_google_channel_config.GoogleChannelConfig.deserialize, @@ -884,7 +960,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -901,7 +977,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -919,7 +995,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -937,7 +1013,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -955,7 +1031,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -973,7 +1049,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -991,7 +1067,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, @@ -1016,7 +1092,7 @@ def set_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -1042,7 +1118,7 @@ def get_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -1070,7 +1146,7 @@ def test_iam_permissions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index b8de6fe5d114..02789efdee5a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json import logging as std_logging import pickle import warnings @@ -54,7 +55,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -74,7 +80,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -1269,7 +1275,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1287,7 +1293,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1305,7 +1311,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 9479b4476c08..78495902e822 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_config @@ -34,6 +40,73 @@ from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import ConfigServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): """gRPC AsyncIO backend transport for ConfigServiceV2. @@ -227,8 +300,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -251,7 +327,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -276,7 +352,7 @@ def list_buckets(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_buckets' not in self._stubs: - self._stubs['list_buckets'] = self.grpc_channel.unary_unary( + self._stubs['list_buckets'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListBuckets', request_serializer=logging_config.ListBucketsRequest.serialize, response_deserializer=logging_config.ListBucketsResponse.deserialize, @@ -302,7 +378,7 @@ def get_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_bucket' not in self._stubs: - self._stubs['get_bucket'] = self.grpc_channel.unary_unary( + self._stubs['get_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetBucket', request_serializer=logging_config.GetBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -331,7 +407,7 @@ def create_bucket_async(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_bucket_async' not in self._stubs: - self._stubs['create_bucket_async'] = self.grpc_channel.unary_unary( + self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -363,7 +439,7 @@ def update_bucket_async(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_bucket_async' not in self._stubs: - self._stubs['update_bucket_async'] = self.grpc_channel.unary_unary( + self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -391,7 +467,7 @@ def create_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_bucket' not in self._stubs: - self._stubs['create_bucket'] = self.grpc_channel.unary_unary( + self._stubs['create_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateBucket', request_serializer=logging_config.CreateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -423,7 +499,7 @@ def update_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_bucket' not in self._stubs: - self._stubs['update_bucket'] = self.grpc_channel.unary_unary( + self._stubs['update_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateBucket', request_serializer=logging_config.UpdateBucketRequest.serialize, response_deserializer=logging_config.LogBucket.deserialize, @@ -454,7 +530,7 @@ def delete_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_bucket' not in self._stubs: - self._stubs['delete_bucket'] = self.grpc_channel.unary_unary( + self._stubs['delete_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteBucket', request_serializer=logging_config.DeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -482,7 +558,7 @@ def undelete_bucket(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'undelete_bucket' not in self._stubs: - self._stubs['undelete_bucket'] = self.grpc_channel.unary_unary( + self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UndeleteBucket', request_serializer=logging_config.UndeleteBucketRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -508,7 +584,7 @@ def list_views(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_views' not in self._stubs: - self._stubs['list_views'] = self.grpc_channel.unary_unary( + self._stubs['list_views'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListViews', request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, @@ -534,7 +610,7 @@ def get_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_view' not in self._stubs: - self._stubs['get_view'] = self.grpc_channel.unary_unary( + self._stubs['get_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetView', request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -561,7 +637,7 @@ def create_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_view' not in self._stubs: - self._stubs['create_view'] = self.grpc_channel.unary_unary( + self._stubs['create_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateView', request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -591,7 +667,7 @@ def update_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_view' not in self._stubs: - self._stubs['update_view'] = self.grpc_channel.unary_unary( + self._stubs['update_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateView', request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, @@ -620,7 +696,7 @@ def delete_view(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_view' not in self._stubs: - self._stubs['delete_view'] = self.grpc_channel.unary_unary( + self._stubs['delete_view'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteView', request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -646,7 +722,7 @@ def list_sinks(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_sinks' not in self._stubs: - self._stubs['list_sinks'] = self.grpc_channel.unary_unary( + self._stubs['list_sinks'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListSinks', request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, @@ -672,7 +748,7 @@ def get_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_sink' not in self._stubs: - self._stubs['get_sink'] = self.grpc_channel.unary_unary( + self._stubs['get_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetSink', request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -702,7 +778,7 @@ def create_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_sink' not in self._stubs: - self._stubs['create_sink'] = self.grpc_channel.unary_unary( + self._stubs['create_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateSink', request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -733,7 +809,7 @@ def update_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_sink' not in self._stubs: - self._stubs['update_sink'] = self.grpc_channel.unary_unary( + self._stubs['update_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateSink', request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, @@ -760,7 +836,7 @@ def delete_sink(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_sink' not in self._stubs: - self._stubs['delete_sink'] = self.grpc_channel.unary_unary( + self._stubs['delete_sink'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteSink', request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -789,7 +865,7 @@ def create_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_link' not in self._stubs: - self._stubs['create_link'] = self.grpc_channel.unary_unary( + self._stubs['create_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateLink', request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -816,7 +892,7 @@ def delete_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_link' not in self._stubs: - self._stubs['delete_link'] = self.grpc_channel.unary_unary( + self._stubs['delete_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteLink', request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -842,7 +918,7 @@ def list_links(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_links' not in self._stubs: - self._stubs['list_links'] = self.grpc_channel.unary_unary( + self._stubs['list_links'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListLinks', request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, @@ -868,7 +944,7 @@ def get_link(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_link' not in self._stubs: - self._stubs['get_link'] = self.grpc_channel.unary_unary( + self._stubs['get_link'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetLink', request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, @@ -895,7 +971,7 @@ def list_exclusions(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_exclusions' not in self._stubs: - self._stubs['list_exclusions'] = self.grpc_channel.unary_unary( + self._stubs['list_exclusions'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/ListExclusions', request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, @@ -921,7 +997,7 @@ def get_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_exclusion' not in self._stubs: - self._stubs['get_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['get_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetExclusion', request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -949,7 +1025,7 @@ def create_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_exclusion' not in self._stubs: - self._stubs['create_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['create_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CreateExclusion', request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -976,7 +1052,7 @@ def update_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_exclusion' not in self._stubs: - self._stubs['update_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['update_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateExclusion', request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, @@ -1002,7 +1078,7 @@ def delete_exclusion(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_exclusion' not in self._stubs: - self._stubs['delete_exclusion'] = self.grpc_channel.unary_unary( + self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/DeleteExclusion', request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -1037,7 +1113,7 @@ def get_cmek_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_cmek_settings' not in self._stubs: - self._stubs['get_cmek_settings'] = self.grpc_channel.unary_unary( + self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetCmekSettings', request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1077,7 +1153,7 @@ def update_cmek_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_cmek_settings' not in self._stubs: - self._stubs['update_cmek_settings'] = self.grpc_channel.unary_unary( + self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, @@ -1113,7 +1189,7 @@ def get_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_settings' not in self._stubs: - self._stubs['get_settings'] = self.grpc_channel.unary_unary( + self._stubs['get_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/GetSettings', request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1156,7 +1232,7 @@ def update_settings(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_settings' not in self._stubs: - self._stubs['update_settings'] = self.grpc_channel.unary_unary( + self._stubs['update_settings'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/UpdateSettings', request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, @@ -1183,7 +1259,7 @@ def copy_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'copy_log_entries' not in self._stubs: - self._stubs['copy_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.ConfigServiceV2/CopyLogEntries', request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -1453,7 +1529,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -1470,7 +1546,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -1488,7 +1564,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -1506,7 +1582,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index de25f5f6fdfa..718d4ac6713c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json import logging as std_logging import pickle import warnings @@ -53,7 +54,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -73,7 +79,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -493,7 +499,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -511,7 +517,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -529,7 +535,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 83a400473ff4..6933d0f2ba34 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging @@ -33,6 +39,73 @@ from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import LoggingServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): """gRPC AsyncIO backend transport for LoggingServiceV2. @@ -225,8 +298,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -262,7 +338,7 @@ def delete_log(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_log' not in self._stubs: - self._stubs['delete_log'] = self.grpc_channel.unary_unary( + self._stubs['delete_log'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/DeleteLog', request_serializer=logging.DeleteLogRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -294,7 +370,7 @@ def write_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'write_log_entries' not in self._stubs: - self._stubs['write_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['write_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/WriteLogEntries', request_serializer=logging.WriteLogEntriesRequest.serialize, response_deserializer=logging.WriteLogEntriesResponse.deserialize, @@ -323,7 +399,7 @@ def list_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_log_entries' not in self._stubs: - self._stubs['list_log_entries'] = self.grpc_channel.unary_unary( + self._stubs['list_log_entries'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListLogEntries', request_serializer=logging.ListLogEntriesRequest.serialize, response_deserializer=logging.ListLogEntriesResponse.deserialize, @@ -351,7 +427,7 @@ def list_monitored_resource_descriptors(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_monitored_resource_descriptors' not in self._stubs: - self._stubs['list_monitored_resource_descriptors'] = self.grpc_channel.unary_unary( + self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, @@ -379,7 +455,7 @@ def list_logs(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_logs' not in self._stubs: - self._stubs['list_logs'] = self.grpc_channel.unary_unary( + self._stubs['list_logs'] = self._logged_channel.unary_unary( '/google.logging.v2.LoggingServiceV2/ListLogs', request_serializer=logging.ListLogsRequest.serialize, response_deserializer=logging.ListLogsResponse.deserialize, @@ -407,7 +483,7 @@ def tail_log_entries(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'tail_log_entries' not in self._stubs: - self._stubs['tail_log_entries'] = self.grpc_channel.stream_stream( + self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( '/google.logging.v2.LoggingServiceV2/TailLogEntries', request_serializer=logging.TailLogEntriesRequest.serialize, response_deserializer=logging.TailLogEntriesResponse.deserialize, @@ -536,7 +612,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -553,7 +629,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -571,7 +647,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -589,7 +665,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 9f6330cbb3b0..f393fd814ed9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json import logging as std_logging import pickle import warnings @@ -53,7 +54,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -73,7 +79,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -448,7 +454,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -466,7 +472,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -484,7 +490,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 2707c88cf5e8..b382f48506bf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.logging_v2.types import logging_metrics @@ -33,6 +39,73 @@ from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO from .grpc import MetricsServiceV2GrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): """gRPC AsyncIO backend transport for MetricsServiceV2. @@ -225,8 +298,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -258,7 +334,7 @@ def list_log_metrics(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_log_metrics' not in self._stubs: - self._stubs['list_log_metrics'] = self.grpc_channel.unary_unary( + self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/ListLogMetrics', request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, @@ -284,7 +360,7 @@ def get_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_log_metric' not in self._stubs: - self._stubs['get_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['get_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/GetLogMetric', request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -310,7 +386,7 @@ def create_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_log_metric' not in self._stubs: - self._stubs['create_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['create_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/CreateLogMetric', request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -336,7 +412,7 @@ def update_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_log_metric' not in self._stubs: - self._stubs['update_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['update_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, @@ -362,7 +438,7 @@ def delete_log_metric(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_log_metric' not in self._stubs: - self._stubs['delete_log_metric'] = self.grpc_channel.unary_unary( + self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -464,7 +540,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -481,7 +557,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -499,7 +575,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -517,7 +593,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index cf5ec9a7fe97..76c07f74bb53 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json import logging as std_logging import pickle import warnings @@ -54,7 +55,12 @@ def intercept_unary_unary(self, continuation, client_call_details, request): elif isinstance(request, google.protobuf.message.Message): request_payload = MessageToJson(request) else: - request_payload = f"{type(result).__name__}: {pickle.dumps(request)}" + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } grpc_request = { "payload": request_payload, "requestMethod": "grpc", @@ -74,7 +80,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples - metadata = dict([(k, v) for k, v in response_metadata]) if response_metadata else None + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None result = response.result() if isinstance(result, proto.Message): response_payload = type(result).to_json(result) @@ -690,7 +696,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -708,7 +714,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -726,7 +732,7 @@ def wait_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "wait_operation" not in self._stubs: - self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + self._stubs["wait_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/WaitOperation", request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, response_deserializer=None, @@ -744,7 +750,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -762,7 +768,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -780,7 +786,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -798,7 +804,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 78b7c4d028cb..aa699fc707f3 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -24,8 +27,11 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.location import locations_pb2 # type: ignore @@ -34,6 +40,73 @@ from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO from .grpc import CloudRedisGrpcTransport +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): """gRPC AsyncIO backend transport for CloudRedis. @@ -247,8 +320,11 @@ def __init__(self, *, ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -271,7 +347,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( - self.grpc_channel + self._logged_channel ) # Return the client from cache. @@ -305,7 +381,7 @@ def list_instances(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'list_instances' not in self._stubs: - self._stubs['list_instances'] = self.grpc_channel.unary_unary( + self._stubs['list_instances'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ListInstances', request_serializer=cloud_redis.ListInstancesRequest.serialize, response_deserializer=cloud_redis.ListInstancesResponse.deserialize, @@ -331,7 +407,7 @@ def get_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_instance' not in self._stubs: - self._stubs['get_instance'] = self.grpc_channel.unary_unary( + self._stubs['get_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/GetInstance', request_serializer=cloud_redis.GetInstanceRequest.serialize, response_deserializer=cloud_redis.Instance.deserialize, @@ -360,7 +436,7 @@ def get_instance_auth_string(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'get_instance_auth_string' not in self._stubs: - self._stubs['get_instance_auth_string'] = self.grpc_channel.unary_unary( + self._stubs['get_instance_auth_string'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/GetInstanceAuthString', request_serializer=cloud_redis.GetInstanceAuthStringRequest.serialize, response_deserializer=cloud_redis.InstanceAuthString.deserialize, @@ -399,7 +475,7 @@ def create_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'create_instance' not in self._stubs: - self._stubs['create_instance'] = self.grpc_channel.unary_unary( + self._stubs['create_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/CreateInstance', request_serializer=cloud_redis.CreateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -430,7 +506,7 @@ def update_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'update_instance' not in self._stubs: - self._stubs['update_instance'] = self.grpc_channel.unary_unary( + self._stubs['update_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/UpdateInstance', request_serializer=cloud_redis.UpdateInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -457,7 +533,7 @@ def upgrade_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'upgrade_instance' not in self._stubs: - self._stubs['upgrade_instance'] = self.grpc_channel.unary_unary( + self._stubs['upgrade_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/UpgradeInstance', request_serializer=cloud_redis.UpgradeInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -491,7 +567,7 @@ def import_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'import_instance' not in self._stubs: - self._stubs['import_instance'] = self.grpc_channel.unary_unary( + self._stubs['import_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ImportInstance', request_serializer=cloud_redis.ImportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -522,7 +598,7 @@ def export_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'export_instance' not in self._stubs: - self._stubs['export_instance'] = self.grpc_channel.unary_unary( + self._stubs['export_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/ExportInstance', request_serializer=cloud_redis.ExportInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -550,7 +626,7 @@ def failover_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'failover_instance' not in self._stubs: - self._stubs['failover_instance'] = self.grpc_channel.unary_unary( + self._stubs['failover_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/FailoverInstance', request_serializer=cloud_redis.FailoverInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -577,7 +653,7 @@ def delete_instance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'delete_instance' not in self._stubs: - self._stubs['delete_instance'] = self.grpc_channel.unary_unary( + self._stubs['delete_instance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/DeleteInstance', request_serializer=cloud_redis.DeleteInstanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -604,7 +680,7 @@ def reschedule_maintenance(self) -> Callable[ # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if 'reschedule_maintenance' not in self._stubs: - self._stubs['reschedule_maintenance'] = self.grpc_channel.unary_unary( + self._stubs['reschedule_maintenance'] = self._logged_channel.unary_unary( '/google.cloud.redis.v1.CloudRedis/RescheduleMaintenance', request_serializer=cloud_redis.RescheduleMaintenanceRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, @@ -712,7 +788,7 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: @@ -729,7 +805,7 @@ def delete_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_operation" not in self._stubs: - self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + self._stubs["delete_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/DeleteOperation", request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, response_deserializer=None, @@ -747,7 +823,7 @@ def cancel_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/CancelOperation", request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, response_deserializer=None, @@ -765,7 +841,7 @@ def wait_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "wait_operation" not in self._stubs: - self._stubs["wait_operation"] = self.grpc_channel.unary_unary( + self._stubs["wait_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/WaitOperation", request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, response_deserializer=None, @@ -783,7 +859,7 @@ def get_operation( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( + self._stubs["get_operation"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/GetOperation", request_serializer=operations_pb2.GetOperationRequest.SerializeToString, response_deserializer=operations_pb2.Operation.FromString, @@ -801,7 +877,7 @@ def list_operations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_operations" not in self._stubs: - self._stubs["list_operations"] = self.grpc_channel.unary_unary( + self._stubs["list_operations"] = self._logged_channel.unary_unary( "/google.longrunning.Operations/ListOperations", request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, response_deserializer=operations_pb2.ListOperationsResponse.FromString, @@ -819,7 +895,7 @@ def list_locations( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_locations" not in self._stubs: - self._stubs["list_locations"] = self.grpc_channel.unary_unary( + self._stubs["list_locations"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/ListLocations", request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, response_deserializer=locations_pb2.ListLocationsResponse.FromString, @@ -837,7 +913,7 @@ def get_location( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_location" not in self._stubs: - self._stubs["get_location"] = self.grpc_channel.unary_unary( + self._stubs["get_location"] = self._logged_channel.unary_unary( "/google.cloud.location.Locations/GetLocation", request_serializer=locations_pb2.GetLocationRequest.SerializeToString, response_deserializer=locations_pb2.Location.FromString, From 102a1fd11fb9514a887ba56f5ec38f60bd14b4ed Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 17:54:08 -0500 Subject: [PATCH 1229/1339] chore(main): release 1.21.0 (#2259) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 16 ++++++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 10340c68405a..8f25cb72b2f5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,22 @@ # Changelog +## [1.21.0](https://github.com/googleapis/gapic-generator-python/compare/v1.20.2...v1.21.0) (2024-12-11) + + +### Features + +* Add client debug logging support for async gRPC ([#2291](https://github.com/googleapis/gapic-generator-python/issues/2291)) ([f45935a](https://github.com/googleapis/gapic-generator-python/commit/f45935a4d760a36bf989ed79bfd02aa7ec203468)) +* Add client logging support for sync gRPC ([#2284](https://github.com/googleapis/gapic-generator-python/issues/2284)) ([dddf797](https://github.com/googleapis/gapic-generator-python/commit/dddf797a1ec7bf0496b4b4c75f8d37faa753c824)) +* Add debug log when creating client ([#2265](https://github.com/googleapis/gapic-generator-python/issues/2265)) ([8be95a2](https://github.com/googleapis/gapic-generator-python/commit/8be95a2f4749a2882117154aa655c0a9d71cdc50)) +* Add debug log when sending requests via REST ([#2270](https://github.com/googleapis/gapic-generator-python/issues/2270)) ([4cb1fa2](https://github.com/googleapis/gapic-generator-python/commit/4cb1fa2452ad5ba59b34c9d25cb3ca0c635059ac)) + + +### Bug Fixes + +* Fix typing issue with gRPC metadata when key ends in -bin ([#2251](https://github.com/googleapis/gapic-generator-python/issues/2251)) ([8b3b80f](https://github.com/googleapis/gapic-generator-python/commit/8b3b80f4b55c295e5d13084284ff0e2a72b2e993)) +* **log:** Preserve dict of rest async response headers ([#2288](https://github.com/googleapis/gapic-generator-python/issues/2288)) ([b10cc21](https://github.com/googleapis/gapic-generator-python/commit/b10cc21daf7d17567119f5c9b33d98fe18072eb4)) + ## [1.20.2](https://github.com/googleapis/gapic-generator-python/compare/v1.20.1...v1.20.2) (2024-10-30) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 0a7a1ee290f2..b2eb1cafd1a1 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.20.2" +version = "1.21.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 96e5fe30ddde1d1ef2c38d2cba412c0a651b0438 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 13:25:04 -0500 Subject: [PATCH 1230/1339] chore(python): update dependencies in .kokoro/docker/docs (#2298) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/requirements.txt | 52 +++++++++++++++---- 2 files changed, 43 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 6301519a9a05..26306af66f81 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 -# created: 2024-11-12T12:09:45.821174897Z + digest: sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 +# created: 2024-12-17T00:59:58.625514486Z diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt index 8bb0764594b1..f99a5c4aac7f 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in # -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 +argcomplete==3.5.2 \ + --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ + --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb # via nox colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ @@ -23,7 +23,7 @@ filelock==3.16.1 \ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in + # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,11 +32,41 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.27.1 \ - --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ - --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 +virtualenv==20.28.0 \ + --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ + --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa # via nox From b3fe7a270ea815dc2a179c0bb22ec48d3ffcc017 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 18 Dec 2024 19:56:33 +0100 Subject: [PATCH 1231/1339] chore(deps): update all dependencies (#2278) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/WORKSPACE | 6 +- packages/gapic-generator/requirements.txt | 326 +++++++++++----------- 2 files changed, 166 insertions(+), 166 deletions(-) diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 99163725cf8e..86d90144c61e 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -73,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "10a0d58f39a1a909e95e00e8ba0b5b1dc64d02997f741151953a2b3659f6e78c", - strip_prefix = "protobuf-29.0", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v29.0.tar.gz"], + sha256 = "63150aba23f7a90fd7d87bdf514e459dd5fe7023fdde01b56ac53335df64d4bd", + strip_prefix = "protobuf-29.2", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v29.2.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 7397b03d6d58..eda186e2713e 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,103 +8,103 @@ aiohappyeyeballs==2.4.4 \ --hash=sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745 \ --hash=sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8 # via aiohttp -aiohttp==3.11.9 \ - --hash=sha256:0411777249f25d11bd2964a230b3ffafcbed6cd65d0f2b132bc2b8f5b8c347c7 \ - --hash=sha256:0a97d657f6cf8782a830bb476c13f7d777cfcab8428ac49dde15c22babceb361 \ - --hash=sha256:0b5a5009b0159a8f707879dc102b139466d8ec6db05103ec1520394fdd8ea02c \ - --hash=sha256:0bcb7f6976dc0b6b56efde13294862adf68dd48854111b422a336fa729a82ea6 \ - --hash=sha256:14624d96f0d69cf451deed3173079a68c322279be6030208b045ab77e1e8d550 \ - --hash=sha256:15c4e489942d987d5dac0ba39e5772dcbed4cc9ae3710d1025d5ba95e4a5349c \ - --hash=sha256:176f8bb8931da0613bb0ed16326d01330066bb1e172dd97e1e02b1c27383277b \ - --hash=sha256:17af09d963fa1acd7e4c280e9354aeafd9e3d47eaa4a6bfbd2171ad7da49f0c5 \ - --hash=sha256:1a8b13b9950d8b2f8f58b6e5842c4b842b5887e2c32e3f4644d6642f1659a530 \ - --hash=sha256:202f40fb686e5f93908eee0c75d1e6fbe50a43e9bd4909bf3bf4a56b560ca180 \ - --hash=sha256:21cbe97839b009826a61b143d3ca4964c8590d7aed33d6118125e5b71691ca46 \ - --hash=sha256:27935716f8d62c1c73010428db310fd10136002cfc6d52b0ba7bdfa752d26066 \ - --hash=sha256:282e0a7ddd36ebc411f156aeaa0491e8fe7f030e2a95da532cf0c84b0b70bc66 \ - --hash=sha256:28f29bce89c3b401a53d6fd4bee401ee943083bf2bdc12ef297c1d63155070b0 \ - --hash=sha256:2ac9fd83096df36728da8e2f4488ac3b5602238f602706606f3702f07a13a409 \ - --hash=sha256:30f9f89ae625d412043f12ca3771b2ccec227cc93b93bb1f994db6e1af40a7d3 \ - --hash=sha256:317251b9c9a2f1a9ff9cd093775b34c6861d1d7df9439ce3d32a88c275c995cd \ - --hash=sha256:31de2f10f63f96cc19e04bd2df9549559beadd0b2ee2da24a17e7ed877ca8c60 \ - --hash=sha256:36df00e0541f264ce42d62280281541a47474dfda500bc5b7f24f70a7f87be7a \ - --hash=sha256:39625703540feb50b6b7f938b3856d1f4886d2e585d88274e62b1bd273fae09b \ - --hash=sha256:3f5461c77649358610fb9694e790956b4238ac5d9e697a17f63619c096469afe \ - --hash=sha256:4313f3bc901255b22f01663eeeae167468264fdae0d32c25fc631d5d6e15b502 \ - --hash=sha256:442356e8924fe1a121f8c87866b0ecdc785757fd28924b17c20493961b3d6697 \ - --hash=sha256:44cb1a1326a0264480a789e6100dc3e07122eb8cd1ad6b784a3d47d13ed1d89c \ - --hash=sha256:44d323aa80a867cb6db6bebb4bbec677c6478e38128847f2c6b0f70eae984d72 \ - --hash=sha256:499368eb904566fbdf1a3836a1532000ef1308f34a1bcbf36e6351904cced771 \ - --hash=sha256:4b01d9cfcb616eeb6d40f02e66bebfe7b06d9f2ef81641fdd50b8dd981166e0b \ - --hash=sha256:5720ebbc7a1b46c33a42d489d25d36c64c419f52159485e55589fbec648ea49a \ - --hash=sha256:5cc5e0d069c56645446c45a4b5010d4b33ac6c5ebfd369a791b5f097e46a3c08 \ - --hash=sha256:618b18c3a2360ac940a5503da14fa4f880c5b9bc315ec20a830357bcc62e6bae \ - --hash=sha256:6435a66957cdba1a0b16f368bde03ce9c79c57306b39510da6ae5312a1a5b2c1 \ - --hash=sha256:647ec5bee7e4ec9f1034ab48173b5fa970d9a991e565549b965e93331f1328fe \ - --hash=sha256:6e1e9e447856e9b7b3d38e1316ae9a8c92e7536ef48373de758ea055edfd5db5 \ - --hash=sha256:6ef1550bb5f55f71b97a6a395286db07f7f2c01c8890e613556df9a51da91e8d \ - --hash=sha256:6ffa45cc55b18d4ac1396d1ddb029f139b1d3480f1594130e62bceadf2e1a838 \ - --hash=sha256:77f31cebd8c27a36af6c7346055ac564946e562080ee1a838da724585c67474f \ - --hash=sha256:7a3b5b2c012d70c63d9d13c57ed1603709a4d9d7d473e4a9dfece0e4ea3d5f51 \ - --hash=sha256:7a7ddf981a0b953ade1c2379052d47ccda2f58ab678fca0671c7c7ca2f67aac2 \ - --hash=sha256:84de955314aa5e8d469b00b14d6d714b008087a0222b0f743e7ffac34ef56aff \ - --hash=sha256:8dcfd14c712aa9dd18049280bfb2f95700ff6a8bde645e09f17c3ed3f05a0130 \ - --hash=sha256:928f92f80e2e8d6567b87d3316c1fd9860ccfe36e87a9a7f5237d4cda8baa1ba \ - --hash=sha256:9384b07cfd3045b37b05ed002d1c255db02fb96506ad65f0f9b776b762a7572e \ - --hash=sha256:96726839a42429318017e67a42cca75d4f0d5248a809b3cc2e125445edd7d50d \ - --hash=sha256:96bbec47beb131bbf4bae05d8ef99ad9e5738f12717cfbbf16648b78b0232e87 \ - --hash=sha256:9bcf97b971289be69638d8b1b616f7e557e1342debc7fc86cf89d3f08960e411 \ - --hash=sha256:a0cf4d814689e58f57ecd5d8c523e6538417ca2e72ff52c007c64065cef50fb2 \ - --hash=sha256:a7c6147c6306f537cff59409609508a1d2eff81199f0302dd456bb9e7ea50c39 \ - --hash=sha256:a9266644064779840feec0e34f10a89b3ff1d2d6b751fe90017abcad1864fa7c \ - --hash=sha256:afbe85b50ade42ddff5669947afde9e8a610e64d2c80be046d67ec4368e555fa \ - --hash=sha256:afcda759a69c6a8be3aae764ec6733155aa4a5ad9aad4f398b52ba4037942fe3 \ - --hash=sha256:b2fab23003c4bb2249729a7290a76c1dda38c438300fdf97d4e42bf78b19c810 \ - --hash=sha256:bd3f711f4c99da0091ced41dccdc1bcf8be0281dc314d6d9c6b6cf5df66f37a9 \ - --hash=sha256:be0c7c98e38a1e3ad7a6ff64af8b6d6db34bf5a41b1478e24c3c74d9e7f8ed42 \ - --hash=sha256:c1f2d7fd583fc79c240094b3e7237d88493814d4b300d013a42726c35a734bc9 \ - --hash=sha256:c5bba6b83fde4ca233cfda04cbd4685ab88696b0c8eaf76f7148969eab5e248a \ - --hash=sha256:c6beeac698671baa558e82fa160be9761cf0eb25861943f4689ecf9000f8ebd0 \ - --hash=sha256:c7333e7239415076d1418dbfb7fa4df48f3a5b00f8fdf854fca549080455bc14 \ - --hash=sha256:c8a02f74ae419e3955af60f570d83187423e42e672a6433c5e292f1d23619269 \ - --hash=sha256:c9c23e62f3545c2216100603614f9e019e41b9403c47dd85b8e7e5015bf1bde0 \ - --hash=sha256:cca505829cdab58c2495ff418c96092d225a1bbd486f79017f6de915580d3c44 \ - --hash=sha256:d3108f0ad5c6b6d78eec5273219a5bbd884b4aacec17883ceefaac988850ce6e \ - --hash=sha256:d4b8a1b6c7a68c73191f2ebd3bf66f7ce02f9c374e309bdb68ba886bbbf1b938 \ - --hash=sha256:d6e274661c74195708fc4380a4ef64298926c5a50bb10fbae3d01627d7a075b7 \ - --hash=sha256:db2914de2559809fdbcf3e48f41b17a493b58cb7988d3e211f6b63126c55fe82 \ - --hash=sha256:e738aabff3586091221044b7a584865ddc4d6120346d12e28e788307cd731043 \ - --hash=sha256:e7f6173302f8a329ca5d1ee592af9e628d3ade87816e9958dcf7cdae2841def7 \ - --hash=sha256:e9d036a9a41fc78e8a3f10a86c2fc1098fca8fab8715ba9eb999ce4788d35df0 \ - --hash=sha256:ea142255d4901b03f89cb6a94411ecec117786a76fc9ab043af8f51dd50b5313 \ - --hash=sha256:ebd3e6b0c7d4954cca59d241970011f8d3327633d555051c430bd09ff49dc494 \ - --hash=sha256:ec656680fc53a13f849c71afd0c84a55c536206d524cbc831cde80abbe80489e \ - --hash=sha256:ec8df0ff5a911c6d21957a9182402aad7bf060eaeffd77c9ea1c16aecab5adbf \ - --hash=sha256:ed95d66745f53e129e935ad726167d3a6cb18c5d33df3165974d54742c373868 \ - --hash=sha256:ef2c9499b7bd1e24e473dc1a85de55d72fd084eea3d8bdeec7ee0720decb54fa \ - --hash=sha256:f5252ba8b43906f206048fa569debf2cd0da0316e8d5b4d25abe53307f573941 \ - --hash=sha256:f737fef6e117856400afee4f17774cdea392b28ecf058833f5eca368a18cf1bf \ - --hash=sha256:fc726c3fa8f606d07bd2b500e5dc4c0fd664c59be7788a16b9e34352c50b6b6b +aiohttp==3.11.10 \ + --hash=sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0 \ + --hash=sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769 \ + --hash=sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5 \ + --hash=sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59 \ + --hash=sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf \ + --hash=sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985 \ + --hash=sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50 \ + --hash=sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299 \ + --hash=sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d \ + --hash=sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab \ + --hash=sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542 \ + --hash=sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b \ + --hash=sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b \ + --hash=sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838 \ + --hash=sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683 \ + --hash=sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df \ + --hash=sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d \ + --hash=sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91 \ + --hash=sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9 \ + --hash=sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be \ + --hash=sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c \ + --hash=sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219 \ + --hash=sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4 \ + --hash=sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf \ + --hash=sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f \ + --hash=sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199 \ + --hash=sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1 \ + --hash=sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60 \ + --hash=sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77 \ + --hash=sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf \ + --hash=sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079 \ + --hash=sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4 \ + --hash=sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46 \ + --hash=sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8 \ + --hash=sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c \ + --hash=sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d \ + --hash=sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33 \ + --hash=sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34 \ + --hash=sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82 \ + --hash=sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b \ + --hash=sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c \ + --hash=sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836 \ + --hash=sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69 \ + --hash=sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39 \ + --hash=sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f \ + --hash=sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32 \ + --hash=sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc \ + --hash=sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52 \ + --hash=sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816 \ + --hash=sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1 \ + --hash=sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec \ + --hash=sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487 \ + --hash=sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0 \ + --hash=sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767 \ + --hash=sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5 \ + --hash=sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6 \ + --hash=sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9 \ + --hash=sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f \ + --hash=sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138 \ + --hash=sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e \ + --hash=sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf \ + --hash=sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109 \ + --hash=sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408 \ + --hash=sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6 \ + --hash=sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d \ + --hash=sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99 \ + --hash=sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4 \ + --hash=sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74 \ + --hash=sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc \ + --hash=sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d \ + --hash=sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5 \ + --hash=sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a \ + --hash=sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01 \ + --hash=sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f \ + --hash=sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e \ + --hash=sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3 # via -r requirements.in -aiosignal==1.3.1 \ - --hash=sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc \ - --hash=sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17 +aiosignal==1.3.2 \ + --hash=sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5 \ + --hash=sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54 # via aiohttp async-timeout==5.0.1 \ --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ --hash=sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3 # via aiohttp -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 +attrs==24.3.0 \ + --hash=sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff \ + --hash=sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308 # via aiohttp cachetools==5.5.0 \ --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db # via requests charset-normalizer==3.4.0 \ --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ @@ -317,13 +317,13 @@ frozenlist==1.5.0 \ # via # aiohttp # aiosignal -google-api-core==2.23.0 \ - --hash=sha256:2ceb087315e6af43f256704b871d99326b1f12a9d6ce99beaedec99ba26a0ace \ - --hash=sha256:c20100d4c4c41070cf365f1d8ddf5365915291b5eb11b83829fbd1c999b5122f +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf # via -r requirements.in -google-auth==2.36.0 \ - --hash=sha256:51a15d47028b66fd36e5c64a82d2d57480075bccc7da37cde257fc94177a61fb \ - --hash=sha256:545e9618f2df0bcbb7dcbc45a546485b1212624716975a1ea5ae8149ce769ab1 +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 # via google-api-core googleapis-common-protos[grpc]==1.66.0 \ --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ @@ -336,62 +336,62 @@ grpc-google-iam-v1==0.13.1 \ --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e # via -r requirements.in -grpcio==1.68.0 \ - --hash=sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354 \ - --hash=sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21 \ - --hash=sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116 \ - --hash=sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a \ - --hash=sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829 \ - --hash=sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1 \ - --hash=sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363 \ - --hash=sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a \ - --hash=sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9 \ - --hash=sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b \ - --hash=sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03 \ - --hash=sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415 \ - --hash=sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7 \ - --hash=sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121 \ - --hash=sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f \ - --hash=sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd \ - --hash=sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d \ - --hash=sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4 \ - --hash=sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10 \ - --hash=sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5 \ - --hash=sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332 \ - --hash=sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544 \ - --hash=sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75 \ - --hash=sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665 \ - --hash=sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110 \ - --hash=sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd \ - --hash=sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a \ - --hash=sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618 \ - --hash=sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d \ - --hash=sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30 \ - --hash=sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1 \ - --hash=sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1 \ - --hash=sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d \ - --hash=sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796 \ - --hash=sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3 \ - --hash=sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b \ - --hash=sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659 \ - --hash=sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a \ - --hash=sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05 \ - --hash=sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a \ - --hash=sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c \ - --hash=sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161 \ - --hash=sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb \ - --hash=sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78 \ - --hash=sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27 \ - --hash=sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe \ - --hash=sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b \ - --hash=sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc \ - --hash=sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155 \ - --hash=sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490 \ - --hash=sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d \ - --hash=sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2 \ - --hash=sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3 \ - --hash=sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e \ - --hash=sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3 +grpcio==1.68.1 \ + --hash=sha256:025f790c056815b3bf53da850dd70ebb849fd755a4b1ac822cb65cd631e37d43 \ + --hash=sha256:04cfd68bf4f38f5bb959ee2361a7546916bd9a50f78617a346b3aeb2b42e2161 \ + --hash=sha256:0feb02205a27caca128627bd1df4ee7212db051019a9afa76f4bb6a1a80ca95e \ + --hash=sha256:1098f03dedc3b9810810568060dea4ac0822b4062f537b0f53aa015269be0a76 \ + --hash=sha256:12941d533f3cd45d46f202e3667be8ebf6bcb3573629c7ec12c3e211d99cfccf \ + --hash=sha256:255b1635b0ed81e9f91da4fcc8d43b7ea5520090b9a9ad9340d147066d1d3613 \ + --hash=sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600 \ + --hash=sha256:2c4cec6177bf325eb6faa6bd834d2ff6aa8bb3b29012cceb4937b86f8b74323c \ + --hash=sha256:2cc1fd04af8399971bcd4f43bd98c22d01029ea2e56e69c34daf2bf8470e47f5 \ + --hash=sha256:334ab917792904245a028f10e803fcd5b6f36a7b2173a820c0b5b076555825e1 \ + --hash=sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515 \ + --hash=sha256:37ea3be171f3cf3e7b7e412a98b77685eba9d4fd67421f4a34686a63a65d99f9 \ + --hash=sha256:390eee4225a661c5cd133c09f5da1ee3c84498dc265fd292a6912b65c421c78c \ + --hash=sha256:3aed6544e4d523cd6b3119b0916cef3d15ef2da51e088211e4d1eb91a6c7f4f1 \ + --hash=sha256:3ceb56c4285754e33bb3c2fa777d055e96e6932351a3082ce3559be47f8024f0 \ + --hash=sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054 \ + --hash=sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73 \ + --hash=sha256:4efac5481c696d5cb124ff1c119a78bddbfdd13fc499e3bc0ca81e95fc573684 \ + --hash=sha256:52fbf85aa71263380d330f4fce9f013c0798242e31ede05fcee7fbe40ccfc20d \ + --hash=sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c \ + --hash=sha256:66a24f3d45c33550703f0abb8b656515b0ab777970fa275693a2f6dc8e35f1c1 \ + --hash=sha256:6ab2d912ca39c51f46baf2a0d92aa265aa96b2443266fc50d234fa88bf877d8e \ + --hash=sha256:77d65165fc35cff6e954e7fd4229e05ec76102d4406d4576528d3a3635fc6172 \ + --hash=sha256:7dfc914cc31c906297b30463dde0b9be48e36939575eaf2a0a22a8096e69afe5 \ + --hash=sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2 \ + --hash=sha256:80af6f1e69c5e68a2be529990684abdd31ed6622e988bf18850075c81bb1ad6e \ + --hash=sha256:83bbf5807dc3ee94ce1de2dfe8a356e1d74101e4b9d7aa8c720cc4818a34aded \ + --hash=sha256:8720c25cd9ac25dd04ee02b69256d0ce35bf8a0f29e20577427355272230965a \ + --hash=sha256:8829924fffb25386995a31998ccbbeaa7367223e647e0122043dfc485a87c666 \ + --hash=sha256:8a3869a6661ec8f81d93f4597da50336718bde9eb13267a699ac7e0a1d6d0bea \ + --hash=sha256:8cb620037a2fd9eeee97b4531880e439ebfcd6d7d78f2e7dcc3726428ab5ef63 \ + --hash=sha256:919d7f18f63bcad3a0f81146188e90274fde800a94e35d42ffe9eadf6a9a6330 \ + --hash=sha256:95c87ce2a97434dffe7327a4071839ab8e8bffd0054cc74cbe971fba98aedd60 \ + --hash=sha256:963cc8d7d79b12c56008aabd8b457f400952dbea8997dd185f155e2f228db079 \ + --hash=sha256:96f473cdacfdd506008a5d7579c9f6a7ff245a9ade92c3c0265eb76cc591914f \ + --hash=sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd \ + --hash=sha256:a0c8ddabef9c8f41617f213e527254c41e8b96ea9d387c632af878d05db9229c \ + --hash=sha256:a1b988b40f2fd9de5c820f3a701a43339d8dcf2cb2f1ca137e2c02671cc83ac1 \ + --hash=sha256:a47faedc9ea2e7a3b6569795c040aae5895a19dde0c728a48d3c5d7995fda385 \ + --hash=sha256:a8040f85dcb9830d8bbb033ae66d272614cec6faceee88d37a88a9bd1a7a704e \ + --hash=sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9 \ + --hash=sha256:c08079b4934b0bf0a8847f42c197b1d12cba6495a3d43febd7e99ecd1cdc8d54 \ + --hash=sha256:c28848761a6520c5c6071d2904a18d339a796ebe6b800adc8b3f474c5ce3c3ad \ + --hash=sha256:cb400138e73969eb5e0535d1d06cae6a6f7a15f2cc74add320e2130b8179211a \ + --hash=sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe \ + --hash=sha256:ccf2ebd2de2d6661e2520dae293298a3803a98ebfc099275f113ce1f6c2a80f1 \ + --hash=sha256:d35740e3f45f60f3c37b1e6f2f4702c23867b9ce21c6410254c9c682237da68d \ + --hash=sha256:d99abcd61760ebb34bdff37e5a3ba333c5cc09feda8c1ad42547bea0416ada78 \ + --hash=sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0 \ + --hash=sha256:dffd29a2961f3263a16d73945b57cd44a8fd0b235740cb14056f0612329b345e \ + --hash=sha256:e4842e4872ae4ae0f5497bf60a0498fa778c192cc7a9e87877abd2814aca9475 \ + --hash=sha256:e8dbe3e00771bfe3d04feed8210fc6617006d06d9a2679b74605b9fed3e8362c \ + --hash=sha256:ee2e743e51cb964b4975de572aa8fb95b633f496f9fcb5e257893df3be854746 \ + --hash=sha256:eeb38ff04ab6e5756a2aef6ad8d94e89bb4a51ef96e20f45c44ba190fa0bcaad \ + --hash=sha256:f8261fa2a5f679abeb2a0a93ad056d765cdca1c47745eda3f2d87f874ff4b8c9 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -717,18 +717,18 @@ proto-plus==1.25.0 \ # via # -r requirements.in # google-api-core -protobuf==5.29.0 \ - --hash=sha256:0cd67a1e5c2d88930aa767f702773b2d054e29957432d7c6a18f8be02a07719a \ - --hash=sha256:0d10091d6d03537c3f902279fcf11e95372bdd36a79556311da0487455791b20 \ - --hash=sha256:17d128eebbd5d8aee80300aed7a43a48a25170af3337f6f1333d1fac2c6839ac \ - --hash=sha256:34a90cf30c908f47f40ebea7811f743d360e202b6f10d40c02529ebd84afc069 \ - --hash=sha256:445a0c02483869ed8513a585d80020d012c6dc60075f96fa0563a724987b1001 \ - --hash=sha256:6c3009e22717c6cc9e6594bb11ef9f15f669b19957ad4087214d69e08a213368 \ - --hash=sha256:85286a47caf63b34fa92fdc1fd98b649a8895db595cfa746c5286eeae890a0b1 \ - --hash=sha256:88c4af76a73183e21061881360240c0cdd3c39d263b4e8fb570aaf83348d608f \ - --hash=sha256:c931c61d0cc143a2e756b1e7f8197a508de5365efd40f83c907a9febf36e6b43 \ - --hash=sha256:e467f81fdd12ded9655cea3e9b83dc319d93b394ce810b556fb0f421d8613e86 \ - --hash=sha256:ea7fb379b257911c8c020688d455e8f74efd2f734b72dc1ea4b4d7e9fd1326f2 +protobuf==5.29.2 \ + --hash=sha256:13d6d617a2a9e0e82a88113d7191a1baa1e42c2cc6f5f1398d3b054c8e7e714a \ + --hash=sha256:2d2e674c58a06311c8e99e74be43e7f3a8d1e2b2fdf845eaa347fbd866f23355 \ + --hash=sha256:36000f97ea1e76e8398a3f02936aac2a5d2b111aae9920ec1b769fc4a222c4d9 \ + --hash=sha256:494229ecd8c9009dd71eda5fd57528395d1eacdf307dbece6c12ad0dd09e912e \ + --hash=sha256:842de6d9241134a973aab719ab42b008a18a90f9f07f06ba480df268f86432f9 \ + --hash=sha256:a0c53d78383c851bfa97eb42e3703aefdc96d2036a41482ffd55dc5f529466eb \ + --hash=sha256:b2cc8e8bb7c9326996f0e160137b0861f1a82162502658df2951209d0cb0309e \ + --hash=sha256:b6b0d416bbbb9d4fbf9d0561dbfc4e324fd522f61f7af0fe0f282ab67b22477e \ + --hash=sha256:c12ba8249f5624300cf51c3d0bfe5be71a60c63e4dcf51ffe9a68771d958c851 \ + --hash=sha256:e621a98c0201a7c8afe89d9646859859be97cb22b8bf1d8eacfd90d5bda2eb19 \ + --hash=sha256:fde4554c0e578a5a0bcc9a276339594848d1e89f9ea47b4427c80e5d72f90181 # via # -r requirements.in # google-api-core @@ -753,9 +753,9 @@ pytest==8.3.4 \ --hash=sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6 \ --hash=sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761 # via pytest-asyncio -pytest-asyncio==0.24.0 \ - --hash=sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b \ - --hash=sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276 +pytest-asyncio==0.25.0 \ + --hash=sha256:8c0610303c9e0442a5db8604505fc0f545456ba1528824842b37b4a626cbf609 \ + --hash=sha256:db5432d18eac6b7e28b46dcd9b69921b55c3b1086e85febfe04e70b18d9e81b3 # via -r requirements.in pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ From 52f35753a2ca2349985569b2e8de1a340d616e10 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 18 Dec 2024 13:57:28 -0500 Subject: [PATCH 1232/1339] feat: Add REST Interceptors to support reading metadata (#2299) --- .../%sub/services/%service/_shared_macros.j2 | 27 +- .../services/%service/transports/rest.py.j2 | 2 + .../%service/transports/rest_asyncio.py.j2 | 2 + .../gapic/%name_%version/%sub/test_macros.j2 | 4 + .../services/asset_service/transports/rest.py | 525 ++++++++++++++++-- .../unit/gapic/asset_v1/test_asset_service.py | 84 +++ .../iam_credentials/transports/rest.py | 100 +++- .../credentials_v1/test_iam_credentials.py | 16 + .../services/eventarc/transports/rest.py | 450 +++++++++++++-- .../unit/gapic/eventarc_v1/test_eventarc.py | 72 +++ .../services/cloud_redis/transports/rest.py | 275 ++++++++- .../cloud_redis/transports/rest_asyncio.py | 275 ++++++++- .../unit/gapic/redis_v1/test_cloud_redis.py | 88 +++ 13 files changed, 1788 insertions(+), 132 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index f588391bd56f..e96594c01875 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -383,13 +383,36 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: {% endif %} """Post-rpc interceptor for {{ method.name|snake_case }} - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_{{ method.name|snake_case }}_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the {{ service.name }} server but before - it is returned to user code. + it is returned to user code. This `post_{{ method.name|snake_case }}` interceptor runs + before the `post_{{ method.name|snake_case }}_with_metadata` interceptor. """ return response + {% if not method.server_streaming %} + {{ async_prefix }}def post_{{ method.name|snake_case }}_with_metadata(self, response: {{method.output.ident}}, {{ client_method_metadata_argument() }}) -> Tuple[{{method.output.ident}}, {{ client_method_metadata_type() }}]: + {% else %} + {{ async_prefix }}def post_{{ method.name|snake_case }}_with_metadata(self, response: rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, {{ client_method_metadata_argument() }}) -> Tuple[rest_streaming{{ async_suffix }}.{{ async_method_name_prefix }}ResponseIterator, {{ client_method_metadata_type() }}]: {% endif %} + """Post-rpc interceptor for {{ method.name|snake_case }} + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the {{ service.name }} server but before it is returned to user code. + + We recommend only using this `post_{{ method.name|snake_case }}_with_metadata` + interceptor in new development instead of the `post_{{ method.name|snake_case }}` interceptor. + When both interceptors are used, this `post_{{ method.name|snake_case }}_with_metadata` interceptor runs after the + `post_{{ method.name|snake_case }}` interceptor. The (possibly modified) response returned by + `post_{{ method.name|snake_case }}` will be passed to + `post_{{ method.name|snake_case }}_with_metadata`. + """ + return response, metadata + + {% endif %}{# not method.void #} {% endfor %} {% for name, signature in api.mixin_api_signatures.items() %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 3ba65b993cbb..63adba1920dd 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -256,6 +256,8 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endif %}{# method.lro #} {#- TODO(https://github.com/googleapis/gapic-generator-python/issues/2274): Add debug log before intercepting a request #} resp = self._interceptor.post_{{ method.name|snake_case }}(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_{{ method.name|snake_case }}_with_metadata(resp, response_metadata) {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} {% if not method.server_streaming %} if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 99658a0b1495..79d54e4f2a6e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -217,6 +217,8 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): json_format.Parse(content, pb_resp, ignore_unknown_fields=True) {% endif %}{# if method.server_streaming #} resp = await self._interceptor.post_{{ method.name|snake_case }}(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_{{ method.name|snake_case }}_with_metadata(resp, response_metadata) {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} {% if not method.server_streaming %} if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index bd8547f63de5..33b76774c235 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -2218,11 +2218,13 @@ def test_initialize_client_w_{{transport_name}}(): {% endif %} {% if not method.void %} mock.patch.object(transports.{{async_method_prefix}}{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}") as post, \ + mock.patch.object(transports.{{async_method_prefix}}{{ service.name }}RestInterceptor, "post_{{method.name|snake_case}}_with_metadata") as post_with_metadata, \ {% endif %} mock.patch.object(transports.{{async_method_prefix}}{{ service.name }}RestInterceptor, "pre_{{ method.name|snake_case }}") as pre: pre.assert_not_called() {% if not method.void %} post.assert_not_called() + post_with_metadata.assert_not_called() {% endif %} {% if method.input.ident.is_proto_plus_type %} pb_message = {{ method.input.ident }}.pb({{ method.input.ident }}()) @@ -2265,6 +2267,7 @@ def test_initialize_client_w_{{transport_name}}(): pre.return_value = request, metadata {% if not method.void %} post.return_value = {{ method.output.ident }}() + post_with_metadata.return_value = {{ method.output.ident }}(), metadata {% endif %} {{await_prefix}}client.{{ method_name }}(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) @@ -2272,6 +2275,7 @@ def test_initialize_client_w_{{transport_name}}(): pre.assert_called_once() {% if not method.void %} post.assert_called_once() + post_with_metadata.assert_called_once() {% endif %} {% endif %}{# end 'grpc' in transport #} {% endmacro%}{# inteceptor_class_test #} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 053297f1629b..f222a8a46d87 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -268,12 +268,31 @@ def pre_analyze_iam_policy(self, request: asset_service.AnalyzeIamPolicyRequest, def post_analyze_iam_policy(self, response: asset_service.AnalyzeIamPolicyResponse) -> asset_service.AnalyzeIamPolicyResponse: """Post-rpc interceptor for analyze_iam_policy - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_iam_policy` interceptor runs + before the `post_analyze_iam_policy_with_metadata` interceptor. """ return response + def post_analyze_iam_policy_with_metadata(self, response: asset_service.AnalyzeIamPolicyResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_iam_policy_with_metadata` + interceptor in new development instead of the `post_analyze_iam_policy` interceptor. + When both interceptors are used, this `post_analyze_iam_policy_with_metadata` interceptor runs after the + `post_analyze_iam_policy` interceptor. The (possibly modified) response returned by + `post_analyze_iam_policy` will be passed to + `post_analyze_iam_policy_with_metadata`. + """ + return response, metadata + def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPolicyLongrunningRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeIamPolicyLongrunningRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_iam_policy_longrunning @@ -285,12 +304,31 @@ def pre_analyze_iam_policy_longrunning(self, request: asset_service.AnalyzeIamPo def post_analyze_iam_policy_longrunning(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for analyze_iam_policy_longrunning - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_iam_policy_longrunning_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_iam_policy_longrunning` interceptor runs + before the `post_analyze_iam_policy_longrunning_with_metadata` interceptor. """ return response + def post_analyze_iam_policy_longrunning_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_iam_policy_longrunning + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_iam_policy_longrunning_with_metadata` + interceptor in new development instead of the `post_analyze_iam_policy_longrunning` interceptor. + When both interceptors are used, this `post_analyze_iam_policy_longrunning_with_metadata` interceptor runs after the + `post_analyze_iam_policy_longrunning` interceptor. The (possibly modified) response returned by + `post_analyze_iam_policy_longrunning` will be passed to + `post_analyze_iam_policy_longrunning_with_metadata`. + """ + return response, metadata + def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_move @@ -302,12 +340,31 @@ def pre_analyze_move(self, request: asset_service.AnalyzeMoveRequest, metadata: def post_analyze_move(self, response: asset_service.AnalyzeMoveResponse) -> asset_service.AnalyzeMoveResponse: """Post-rpc interceptor for analyze_move - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_move_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_move` interceptor runs + before the `post_analyze_move_with_metadata` interceptor. """ return response + def post_analyze_move_with_metadata(self, response: asset_service.AnalyzeMoveResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeMoveResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_move + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_move_with_metadata` + interceptor in new development instead of the `post_analyze_move` interceptor. + When both interceptors are used, this `post_analyze_move_with_metadata` interceptor runs after the + `post_analyze_move` interceptor. The (possibly modified) response returned by + `post_analyze_move` will be passed to + `post_analyze_move_with_metadata`. + """ + return response, metadata + def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policies @@ -319,12 +376,31 @@ def pre_analyze_org_policies(self, request: asset_service.AnalyzeOrgPoliciesRequ def post_analyze_org_policies(self, response: asset_service.AnalyzeOrgPoliciesResponse) -> asset_service.AnalyzeOrgPoliciesResponse: """Post-rpc interceptor for analyze_org_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policies` interceptor runs + before the `post_analyze_org_policies_with_metadata` interceptor. """ return response + def post_analyze_org_policies_with_metadata(self, response: asset_service.AnalyzeOrgPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_org_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policies_with_metadata` + interceptor in new development instead of the `post_analyze_org_policies` interceptor. + When both interceptors are used, this `post_analyze_org_policies_with_metadata` interceptor runs after the + `post_analyze_org_policies` interceptor. The (possibly modified) response returned by + `post_analyze_org_policies` will be passed to + `post_analyze_org_policies_with_metadata`. + """ + return response, metadata + def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policy_governed_assets @@ -336,12 +412,31 @@ def pre_analyze_org_policy_governed_assets(self, request: asset_service.AnalyzeO def post_analyze_org_policy_governed_assets(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse) -> asset_service.AnalyzeOrgPolicyGovernedAssetsResponse: """Post-rpc interceptor for analyze_org_policy_governed_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policy_governed_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policy_governed_assets` interceptor runs + before the `post_analyze_org_policy_governed_assets_with_metadata` interceptor. """ return response + def post_analyze_org_policy_governed_assets_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_org_policy_governed_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policy_governed_assets_with_metadata` + interceptor in new development instead of the `post_analyze_org_policy_governed_assets` interceptor. + When both interceptors are used, this `post_analyze_org_policy_governed_assets_with_metadata` interceptor runs after the + `post_analyze_org_policy_governed_assets` interceptor. The (possibly modified) response returned by + `post_analyze_org_policy_governed_assets` will be passed to + `post_analyze_org_policy_governed_assets_with_metadata`. + """ + return response, metadata + def pre_analyze_org_policy_governed_containers(self, request: asset_service.AnalyzeOrgPolicyGovernedContainersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for analyze_org_policy_governed_containers @@ -353,12 +448,31 @@ def pre_analyze_org_policy_governed_containers(self, request: asset_service.Anal def post_analyze_org_policy_governed_containers(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse) -> asset_service.AnalyzeOrgPolicyGovernedContainersResponse: """Post-rpc interceptor for analyze_org_policy_governed_containers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_analyze_org_policy_governed_containers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_analyze_org_policy_governed_containers` interceptor runs + before the `post_analyze_org_policy_governed_containers_with_metadata` interceptor. """ return response + def post_analyze_org_policy_governed_containers_with_metadata(self, response: asset_service.AnalyzeOrgPolicyGovernedContainersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.AnalyzeOrgPolicyGovernedContainersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for analyze_org_policy_governed_containers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_analyze_org_policy_governed_containers_with_metadata` + interceptor in new development instead of the `post_analyze_org_policy_governed_containers` interceptor. + When both interceptors are used, this `post_analyze_org_policy_governed_containers_with_metadata` interceptor runs after the + `post_analyze_org_policy_governed_containers` interceptor. The (possibly modified) response returned by + `post_analyze_org_policy_governed_containers` will be passed to + `post_analyze_org_policy_governed_containers_with_metadata`. + """ + return response, metadata + def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHistoryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_get_assets_history @@ -370,12 +484,31 @@ def pre_batch_get_assets_history(self, request: asset_service.BatchGetAssetsHist def post_batch_get_assets_history(self, response: asset_service.BatchGetAssetsHistoryResponse) -> asset_service.BatchGetAssetsHistoryResponse: """Post-rpc interceptor for batch_get_assets_history - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_assets_history_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_assets_history` interceptor runs + before the `post_batch_get_assets_history_with_metadata` interceptor. """ return response + def post_batch_get_assets_history_with_metadata(self, response: asset_service.BatchGetAssetsHistoryResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetAssetsHistoryResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_get_assets_history + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_batch_get_assets_history_with_metadata` + interceptor in new development instead of the `post_batch_get_assets_history` interceptor. + When both interceptors are used, this `post_batch_get_assets_history_with_metadata` interceptor runs after the + `post_batch_get_assets_history` interceptor. The (possibly modified) response returned by + `post_batch_get_assets_history` will be passed to + `post_batch_get_assets_history_with_metadata`. + """ + return response, metadata + def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEffectiveIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for batch_get_effective_iam_policies @@ -387,12 +520,31 @@ def pre_batch_get_effective_iam_policies(self, request: asset_service.BatchGetEf def post_batch_get_effective_iam_policies(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse) -> asset_service.BatchGetEffectiveIamPoliciesResponse: """Post-rpc interceptor for batch_get_effective_iam_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_batch_get_effective_iam_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_batch_get_effective_iam_policies` interceptor runs + before the `post_batch_get_effective_iam_policies_with_metadata` interceptor. """ return response + def post_batch_get_effective_iam_policies_with_metadata(self, response: asset_service.BatchGetEffectiveIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.BatchGetEffectiveIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for batch_get_effective_iam_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_batch_get_effective_iam_policies_with_metadata` + interceptor in new development instead of the `post_batch_get_effective_iam_policies` interceptor. + When both interceptors are used, this `post_batch_get_effective_iam_policies_with_metadata` interceptor runs after the + `post_batch_get_effective_iam_policies` interceptor. The (possibly modified) response returned by + `post_batch_get_effective_iam_policies` will be passed to + `post_batch_get_effective_iam_policies_with_metadata`. + """ + return response, metadata + def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_feed @@ -404,12 +556,31 @@ def pre_create_feed(self, request: asset_service.CreateFeedRequest, metadata: Se def post_create_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for create_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_create_feed` interceptor runs + before the `post_create_feed_with_metadata` interceptor. """ return response + def post_create_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_create_feed_with_metadata` + interceptor in new development instead of the `post_create_feed` interceptor. + When both interceptors are used, this `post_create_feed_with_metadata` interceptor runs after the + `post_create_feed` interceptor. The (possibly modified) response returned by + `post_create_feed` will be passed to + `post_create_feed_with_metadata`. + """ + return response, metadata + def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.CreateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_saved_query @@ -421,12 +592,31 @@ def pre_create_saved_query(self, request: asset_service.CreateSavedQueryRequest, def post_create_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: """Post-rpc interceptor for create_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_create_saved_query` interceptor runs + before the `post_create_saved_query_with_metadata` interceptor. """ return response + def post_create_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_create_saved_query_with_metadata` + interceptor in new development instead of the `post_create_saved_query` interceptor. + When both interceptors are used, this `post_create_saved_query_with_metadata` interceptor runs after the + `post_create_saved_query` interceptor. The (possibly modified) response returned by + `post_create_saved_query` will be passed to + `post_create_saved_query_with_metadata`. + """ + return response, metadata + def pre_delete_feed(self, request: asset_service.DeleteFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.DeleteFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_feed @@ -454,12 +644,31 @@ def pre_export_assets(self, request: asset_service.ExportAssetsRequest, metadata def post_export_assets(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for export_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_export_assets` interceptor runs + before the `post_export_assets_with_metadata` interceptor. """ return response + def post_export_assets_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_export_assets_with_metadata` + interceptor in new development instead of the `post_export_assets` interceptor. + When both interceptors are used, this `post_export_assets_with_metadata` interceptor runs after the + `post_export_assets` interceptor. The (possibly modified) response returned by + `post_export_assets` will be passed to + `post_export_assets_with_metadata`. + """ + return response, metadata + def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_feed @@ -471,12 +680,31 @@ def pre_get_feed(self, request: asset_service.GetFeedRequest, metadata: Sequence def post_get_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for get_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_get_feed` interceptor runs + before the `post_get_feed_with_metadata` interceptor. """ return response + def post_get_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_get_feed_with_metadata` + interceptor in new development instead of the `post_get_feed` interceptor. + When both interceptors are used, this `post_get_feed_with_metadata` interceptor runs after the + `post_get_feed` interceptor. The (possibly modified) response returned by + `post_get_feed` will be passed to + `post_get_feed_with_metadata`. + """ + return response, metadata + def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.GetSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_saved_query @@ -488,12 +716,31 @@ def pre_get_saved_query(self, request: asset_service.GetSavedQueryRequest, metad def post_get_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: """Post-rpc interceptor for get_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_get_saved_query` interceptor runs + before the `post_get_saved_query_with_metadata` interceptor. """ return response + def post_get_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_get_saved_query_with_metadata` + interceptor in new development instead of the `post_get_saved_query` interceptor. + When both interceptors are used, this `post_get_saved_query_with_metadata` interceptor runs after the + `post_get_saved_query` interceptor. The (possibly modified) response returned by + `post_get_saved_query` will be passed to + `post_get_saved_query_with_metadata`. + """ + return response, metadata + def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_assets @@ -505,12 +752,31 @@ def pre_list_assets(self, request: asset_service.ListAssetsRequest, metadata: Se def post_list_assets(self, response: asset_service.ListAssetsResponse) -> asset_service.ListAssetsResponse: """Post-rpc interceptor for list_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_assets` interceptor runs + before the `post_list_assets_with_metadata` interceptor. """ return response + def post_list_assets_with_metadata(self, response: asset_service.ListAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_assets_with_metadata` + interceptor in new development instead of the `post_list_assets` interceptor. + When both interceptors are used, this `post_list_assets_with_metadata` interceptor runs after the + `post_list_assets` interceptor. The (possibly modified) response returned by + `post_list_assets` will be passed to + `post_list_assets_with_metadata`. + """ + return response, metadata + def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_feeds @@ -522,12 +788,31 @@ def pre_list_feeds(self, request: asset_service.ListFeedsRequest, metadata: Sequ def post_list_feeds(self, response: asset_service.ListFeedsResponse) -> asset_service.ListFeedsResponse: """Post-rpc interceptor for list_feeds - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_feeds_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_feeds` interceptor runs + before the `post_list_feeds_with_metadata` interceptor. """ return response + def post_list_feeds_with_metadata(self, response: asset_service.ListFeedsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListFeedsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_feeds + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_feeds_with_metadata` + interceptor in new development instead of the `post_list_feeds` interceptor. + When both interceptors are used, this `post_list_feeds_with_metadata` interceptor runs after the + `post_list_feeds` interceptor. The (possibly modified) response returned by + `post_list_feeds` will be passed to + `post_list_feeds_with_metadata`. + """ + return response, metadata + def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_saved_queries @@ -539,12 +824,31 @@ def pre_list_saved_queries(self, request: asset_service.ListSavedQueriesRequest, def post_list_saved_queries(self, response: asset_service.ListSavedQueriesResponse) -> asset_service.ListSavedQueriesResponse: """Post-rpc interceptor for list_saved_queries - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_saved_queries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_list_saved_queries` interceptor runs + before the `post_list_saved_queries_with_metadata` interceptor. """ return response + def post_list_saved_queries_with_metadata(self, response: asset_service.ListSavedQueriesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.ListSavedQueriesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_saved_queries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_list_saved_queries_with_metadata` + interceptor in new development instead of the `post_list_saved_queries` interceptor. + When both interceptors are used, this `post_list_saved_queries_with_metadata` interceptor runs after the + `post_list_saved_queries` interceptor. The (possibly modified) response returned by + `post_list_saved_queries` will be passed to + `post_list_saved_queries_with_metadata`. + """ + return response, metadata + def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for query_assets @@ -556,12 +860,31 @@ def pre_query_assets(self, request: asset_service.QueryAssetsRequest, metadata: def post_query_assets(self, response: asset_service.QueryAssetsResponse) -> asset_service.QueryAssetsResponse: """Post-rpc interceptor for query_assets - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_query_assets_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_query_assets` interceptor runs + before the `post_query_assets_with_metadata` interceptor. """ return response + def post_query_assets_with_metadata(self, response: asset_service.QueryAssetsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.QueryAssetsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for query_assets + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_query_assets_with_metadata` + interceptor in new development instead of the `post_query_assets` interceptor. + When both interceptors are used, this `post_query_assets_with_metadata` interceptor runs after the + `post_query_assets` interceptor. The (possibly modified) response returned by + `post_query_assets` will be passed to + `post_query_assets_with_metadata`. + """ + return response, metadata + def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPoliciesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for search_all_iam_policies @@ -573,12 +896,31 @@ def pre_search_all_iam_policies(self, request: asset_service.SearchAllIamPolicie def post_search_all_iam_policies(self, response: asset_service.SearchAllIamPoliciesResponse) -> asset_service.SearchAllIamPoliciesResponse: """Post-rpc interceptor for search_all_iam_policies - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_iam_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_iam_policies` interceptor runs + before the `post_search_all_iam_policies_with_metadata` interceptor. """ return response + def post_search_all_iam_policies_with_metadata(self, response: asset_service.SearchAllIamPoliciesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllIamPoliciesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_all_iam_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_search_all_iam_policies_with_metadata` + interceptor in new development instead of the `post_search_all_iam_policies` interceptor. + When both interceptors are used, this `post_search_all_iam_policies_with_metadata` interceptor runs after the + `post_search_all_iam_policies` interceptor. The (possibly modified) response returned by + `post_search_all_iam_policies` will be passed to + `post_search_all_iam_policies_with_metadata`. + """ + return response, metadata + def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for search_all_resources @@ -590,12 +932,31 @@ def pre_search_all_resources(self, request: asset_service.SearchAllResourcesRequ def post_search_all_resources(self, response: asset_service.SearchAllResourcesResponse) -> asset_service.SearchAllResourcesResponse: """Post-rpc interceptor for search_all_resources - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_search_all_resources_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_search_all_resources` interceptor runs + before the `post_search_all_resources_with_metadata` interceptor. """ return response + def post_search_all_resources_with_metadata(self, response: asset_service.SearchAllResourcesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SearchAllResourcesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for search_all_resources + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_search_all_resources_with_metadata` + interceptor in new development instead of the `post_search_all_resources` interceptor. + When both interceptors are used, this `post_search_all_resources_with_metadata` interceptor runs after the + `post_search_all_resources` interceptor. The (possibly modified) response returned by + `post_search_all_resources` will be passed to + `post_search_all_resources_with_metadata`. + """ + return response, metadata + def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateFeedRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_feed @@ -607,12 +968,31 @@ def pre_update_feed(self, request: asset_service.UpdateFeedRequest, metadata: Se def post_update_feed(self, response: asset_service.Feed) -> asset_service.Feed: """Post-rpc interceptor for update_feed - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_feed_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_update_feed` interceptor runs + before the `post_update_feed_with_metadata` interceptor. """ return response + def post_update_feed_with_metadata(self, response: asset_service.Feed, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.Feed, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_feed + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_update_feed_with_metadata` + interceptor in new development instead of the `post_update_feed` interceptor. + When both interceptors are used, this `post_update_feed_with_metadata` interceptor runs after the + `post_update_feed` interceptor. The (possibly modified) response returned by + `post_update_feed` will be passed to + `post_update_feed_with_metadata`. + """ + return response, metadata + def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.UpdateSavedQueryRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_saved_query @@ -624,12 +1004,31 @@ def pre_update_saved_query(self, request: asset_service.UpdateSavedQueryRequest, def post_update_saved_query(self, response: asset_service.SavedQuery) -> asset_service.SavedQuery: """Post-rpc interceptor for update_saved_query - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_saved_query_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the AssetService server but before - it is returned to user code. + it is returned to user code. This `post_update_saved_query` interceptor runs + before the `post_update_saved_query_with_metadata` interceptor. """ return response + def post_update_saved_query_with_metadata(self, response: asset_service.SavedQuery, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[asset_service.SavedQuery, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_saved_query + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AssetService server but before it is returned to user code. + + We recommend only using this `post_update_saved_query_with_metadata` + interceptor in new development instead of the `post_update_saved_query` interceptor. + When both interceptors are used, this `post_update_saved_query_with_metadata` interceptor runs after the + `post_update_saved_query` interceptor. The (possibly modified) response returned by + `post_update_saved_query` will be passed to + `post_update_saved_query_with_metadata`. + """ + return response, metadata + def pre_get_operation( self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: @@ -871,6 +1270,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.AnalyzeIamPolicyResponse.to_json(response) @@ -994,6 +1395,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_iam_policy_longrunning_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1114,6 +1517,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_move(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_move_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.AnalyzeMoveResponse.to_json(response) @@ -1234,6 +1639,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_org_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_org_policies_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json(response) @@ -1355,6 +1762,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_org_policy_governed_assets_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(response) @@ -1476,6 +1885,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_analyze_org_policy_governed_containers_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(response) @@ -1593,6 +2004,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_get_assets_history(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.BatchGetAssetsHistoryResponse.to_json(response) @@ -1714,6 +2127,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_batch_get_effective_iam_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_batch_get_effective_iam_policies_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(response) @@ -1842,6 +2257,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_feed_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) @@ -1964,6 +2381,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_saved_query_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) @@ -2255,6 +2674,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_assets_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -2380,6 +2801,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_feed_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) @@ -2499,6 +2922,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_saved_query_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) @@ -2616,6 +3041,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.ListAssetsResponse.to_json(response) @@ -2733,6 +3160,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_feeds(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_feeds_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.ListFeedsResponse.to_json(response) @@ -2850,6 +3279,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_saved_queries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_saved_queries_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.ListSavedQueriesResponse.to_json(response) @@ -2970,6 +3401,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_query_assets(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_query_assets_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.QueryAssetsResponse.to_json(response) @@ -3087,6 +3520,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_all_iam_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.SearchAllIamPoliciesResponse.to_json(response) @@ -3204,6 +3639,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_search_all_resources(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_search_all_resources_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.SearchAllResourcesResponse.to_json(response) @@ -3332,6 +3769,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_feed(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_feed_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.Feed.to_json(response) @@ -3454,6 +3893,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_saved_query(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_saved_query_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = asset_service.SavedQuery.to_json(response) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 61e40a62eed5..b27b49cd3011 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -14196,9 +14196,11 @@ def test_export_assets_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_export_assets_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_export_assets") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ExportAssetsRequest.pb(asset_service.ExportAssetsRequest()) transcode.return_value = { "method": "post", @@ -14220,11 +14222,13 @@ def test_export_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_assets_rest_bad_request(request_type=asset_service.ListAssetsRequest): @@ -14298,9 +14302,11 @@ def test_list_assets_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_assets_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_assets") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ListAssetsRequest.pb(asset_service.ListAssetsRequest()) transcode.return_value = { "method": "post", @@ -14322,11 +14328,13 @@ def test_list_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.ListAssetsResponse() + post_with_metadata.return_value = asset_service.ListAssetsResponse(), metadata client.list_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_batch_get_assets_history_rest_bad_request(request_type=asset_service.BatchGetAssetsHistoryRequest): @@ -14398,9 +14406,11 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_assets_history_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_assets_history") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.BatchGetAssetsHistoryRequest.pb(asset_service.BatchGetAssetsHistoryRequest()) transcode.return_value = { "method": "post", @@ -14422,11 +14432,13 @@ def test_batch_get_assets_history_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.BatchGetAssetsHistoryResponse() + post_with_metadata.return_value = asset_service.BatchGetAssetsHistoryResponse(), metadata client.batch_get_assets_history(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_feed_rest_bad_request(request_type=asset_service.CreateFeedRequest): @@ -14508,9 +14520,11 @@ def test_create_feed_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_feed_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_feed") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.CreateFeedRequest.pb(asset_service.CreateFeedRequest()) transcode.return_value = { "method": "post", @@ -14532,11 +14546,13 @@ def test_create_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.create_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_feed_rest_bad_request(request_type=asset_service.GetFeedRequest): @@ -14618,9 +14634,11 @@ def test_get_feed_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_feed_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_feed") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.GetFeedRequest.pb(asset_service.GetFeedRequest()) transcode.return_value = { "method": "post", @@ -14642,11 +14660,13 @@ def test_get_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.get_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_feeds_rest_bad_request(request_type=asset_service.ListFeedsRequest): @@ -14718,9 +14738,11 @@ def test_list_feeds_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_feeds_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_feeds") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ListFeedsRequest.pb(asset_service.ListFeedsRequest()) transcode.return_value = { "method": "post", @@ -14742,11 +14764,13 @@ def test_list_feeds_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.ListFeedsResponse() + post_with_metadata.return_value = asset_service.ListFeedsResponse(), metadata client.list_feeds(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_feed_rest_bad_request(request_type=asset_service.UpdateFeedRequest): @@ -14828,9 +14852,11 @@ def test_update_feed_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_feed_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_feed") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.UpdateFeedRequest.pb(asset_service.UpdateFeedRequest()) transcode.return_value = { "method": "post", @@ -14852,11 +14878,13 @@ def test_update_feed_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.Feed() + post_with_metadata.return_value = asset_service.Feed(), metadata client.update_feed(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_feed_rest_bad_request(request_type=asset_service.DeleteFeedRequest): @@ -15020,9 +15048,11 @@ def test_search_all_resources_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_resources_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_resources") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.SearchAllResourcesRequest.pb(asset_service.SearchAllResourcesRequest()) transcode.return_value = { "method": "post", @@ -15044,11 +15074,13 @@ def test_search_all_resources_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllResourcesResponse() + post_with_metadata.return_value = asset_service.SearchAllResourcesResponse(), metadata client.search_all_resources(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_search_all_iam_policies_rest_bad_request(request_type=asset_service.SearchAllIamPoliciesRequest): @@ -15122,9 +15154,11 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_search_all_iam_policies_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_search_all_iam_policies") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.SearchAllIamPoliciesRequest.pb(asset_service.SearchAllIamPoliciesRequest()) transcode.return_value = { "method": "post", @@ -15146,11 +15180,13 @@ def test_search_all_iam_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SearchAllIamPoliciesResponse() + post_with_metadata.return_value = asset_service.SearchAllIamPoliciesResponse(), metadata client.search_all_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_iam_policy_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyRequest): @@ -15224,9 +15260,11 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeIamPolicyRequest.pb(asset_service.AnalyzeIamPolicyRequest()) transcode.return_value = { "method": "post", @@ -15248,11 +15286,13 @@ def test_analyze_iam_policy_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeIamPolicyResponse() + post_with_metadata.return_value = asset_service.AnalyzeIamPolicyResponse(), metadata client.analyze_iam_policy(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_iam_policy_longrunning_rest_bad_request(request_type=asset_service.AnalyzeIamPolicyLongrunningRequest): @@ -15321,9 +15361,11 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_iam_policy_longrunning_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_iam_policy_longrunning") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeIamPolicyLongrunningRequest.pb(asset_service.AnalyzeIamPolicyLongrunningRequest()) transcode.return_value = { "method": "post", @@ -15345,11 +15387,13 @@ def test_analyze_iam_policy_longrunning_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.analyze_iam_policy_longrunning(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_move_rest_bad_request(request_type=asset_service.AnalyzeMoveRequest): @@ -15421,9 +15465,11 @@ def test_analyze_move_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_move_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_move") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeMoveRequest.pb(asset_service.AnalyzeMoveRequest()) transcode.return_value = { "method": "post", @@ -15445,11 +15491,13 @@ def test_analyze_move_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeMoveResponse() + post_with_metadata.return_value = asset_service.AnalyzeMoveResponse(), metadata client.analyze_move(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_query_assets_rest_bad_request(request_type=asset_service.QueryAssetsRequest): @@ -15525,9 +15573,11 @@ def test_query_assets_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_query_assets_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_query_assets") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.QueryAssetsRequest.pb(asset_service.QueryAssetsRequest()) transcode.return_value = { "method": "post", @@ -15549,11 +15599,13 @@ def test_query_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.QueryAssetsResponse() + post_with_metadata.return_value = asset_service.QueryAssetsResponse(), metadata client.query_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_saved_query_rest_bad_request(request_type=asset_service.CreateSavedQueryRequest): @@ -15697,9 +15749,11 @@ def test_create_saved_query_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_create_saved_query_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_create_saved_query") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.CreateSavedQueryRequest.pb(asset_service.CreateSavedQueryRequest()) transcode.return_value = { "method": "post", @@ -15721,11 +15775,13 @@ def test_create_saved_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata client.create_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_saved_query_rest_bad_request(request_type=asset_service.GetSavedQueryRequest): @@ -15805,9 +15861,11 @@ def test_get_saved_query_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_get_saved_query_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_get_saved_query") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.GetSavedQueryRequest.pb(asset_service.GetSavedQueryRequest()) transcode.return_value = { "method": "post", @@ -15829,11 +15887,13 @@ def test_get_saved_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata client.get_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_saved_queries_rest_bad_request(request_type=asset_service.ListSavedQueriesRequest): @@ -15907,9 +15967,11 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_list_saved_queries_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_list_saved_queries") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.ListSavedQueriesRequest.pb(asset_service.ListSavedQueriesRequest()) transcode.return_value = { "method": "post", @@ -15931,11 +15993,13 @@ def test_list_saved_queries_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.ListSavedQueriesResponse() + post_with_metadata.return_value = asset_service.ListSavedQueriesResponse(), metadata client.list_saved_queries(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_saved_query_rest_bad_request(request_type=asset_service.UpdateSavedQueryRequest): @@ -16079,9 +16143,11 @@ def test_update_saved_query_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_update_saved_query_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_update_saved_query") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.UpdateSavedQueryRequest.pb(asset_service.UpdateSavedQueryRequest()) transcode.return_value = { "method": "post", @@ -16103,11 +16169,13 @@ def test_update_saved_query_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.SavedQuery() + post_with_metadata.return_value = asset_service.SavedQuery(), metadata client.update_saved_query(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_saved_query_rest_bad_request(request_type=asset_service.DeleteSavedQueryRequest): @@ -16269,9 +16337,11 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_batch_get_effective_iam_policies_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_batch_get_effective_iam_policies") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.BatchGetEffectiveIamPoliciesRequest.pb(asset_service.BatchGetEffectiveIamPoliciesRequest()) transcode.return_value = { "method": "post", @@ -16293,11 +16363,13 @@ def test_batch_get_effective_iam_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse() + post_with_metadata.return_value = asset_service.BatchGetEffectiveIamPoliciesResponse(), metadata client.batch_get_effective_iam_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_org_policies_rest_bad_request(request_type=asset_service.AnalyzeOrgPoliciesRequest): @@ -16371,9 +16443,11 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policies_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policies") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeOrgPoliciesRequest.pb(asset_service.AnalyzeOrgPoliciesRequest()) transcode.return_value = { "method": "post", @@ -16395,11 +16469,13 @@ def test_analyze_org_policies_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPoliciesResponse() + post_with_metadata.return_value = asset_service.AnalyzeOrgPoliciesResponse(), metadata client.analyze_org_policies(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_org_policy_governed_containers_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedContainersRequest): @@ -16473,9 +16549,11 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_containers_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_containers") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeOrgPolicyGovernedContainersRequest.pb(asset_service.AnalyzeOrgPolicyGovernedContainersRequest()) transcode.return_value = { "method": "post", @@ -16497,11 +16575,13 @@ def test_analyze_org_policy_governed_containers_rest_interceptors(null_intercept ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse() + post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedContainersResponse(), metadata client.analyze_org_policy_governed_containers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_analyze_org_policy_governed_assets_rest_bad_request(request_type=asset_service.AnalyzeOrgPolicyGovernedAssetsRequest): @@ -16575,9 +16655,11 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets") as post, \ + mock.patch.object(transports.AssetServiceRestInterceptor, "post_analyze_org_policy_governed_assets_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AssetServiceRestInterceptor, "pre_analyze_org_policy_governed_assets") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = asset_service.AnalyzeOrgPolicyGovernedAssetsRequest.pb(asset_service.AnalyzeOrgPolicyGovernedAssetsRequest()) transcode.return_value = { "method": "post", @@ -16599,11 +16681,13 @@ def test_analyze_org_policy_governed_assets_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse() + post_with_metadata.return_value = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse(), metadata client.analyze_org_policy_governed_assets(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 9cf97b8751e3..9dc7959ac59a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -121,12 +121,31 @@ def pre_generate_access_token(self, request: common.GenerateAccessTokenRequest, def post_generate_access_token(self, response: common.GenerateAccessTokenResponse) -> common.GenerateAccessTokenResponse: """Post-rpc interceptor for generate_access_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_access_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_generate_access_token` interceptor runs + before the `post_generate_access_token_with_metadata` interceptor. """ return response + def post_generate_access_token_with_metadata(self, response: common.GenerateAccessTokenResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateAccessTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for generate_access_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_generate_access_token_with_metadata` + interceptor in new development instead of the `post_generate_access_token` interceptor. + When both interceptors are used, this `post_generate_access_token_with_metadata` interceptor runs after the + `post_generate_access_token` interceptor. The (possibly modified) response returned by + `post_generate_access_token` will be passed to + `post_generate_access_token_with_metadata`. + """ + return response, metadata + def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for generate_id_token @@ -138,12 +157,31 @@ def pre_generate_id_token(self, request: common.GenerateIdTokenRequest, metadata def post_generate_id_token(self, response: common.GenerateIdTokenResponse) -> common.GenerateIdTokenResponse: """Post-rpc interceptor for generate_id_token - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_generate_id_token_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_generate_id_token` interceptor runs + before the `post_generate_id_token_with_metadata` interceptor. """ return response + def post_generate_id_token_with_metadata(self, response: common.GenerateIdTokenResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.GenerateIdTokenResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for generate_id_token + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_generate_id_token_with_metadata` + interceptor in new development instead of the `post_generate_id_token` interceptor. + When both interceptors are used, this `post_generate_id_token_with_metadata` interceptor runs after the + `post_generate_id_token` interceptor. The (possibly modified) response returned by + `post_generate_id_token` will be passed to + `post_generate_id_token_with_metadata`. + """ + return response, metadata + def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_blob @@ -155,12 +193,31 @@ def pre_sign_blob(self, request: common.SignBlobRequest, metadata: Sequence[Tupl def post_sign_blob(self, response: common.SignBlobResponse) -> common.SignBlobResponse: """Post-rpc interceptor for sign_blob - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_sign_blob_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_sign_blob` interceptor runs + before the `post_sign_blob_with_metadata` interceptor. """ return response + def post_sign_blob_with_metadata(self, response: common.SignBlobResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignBlobResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for sign_blob + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_sign_blob_with_metadata` + interceptor in new development instead of the `post_sign_blob` interceptor. + When both interceptors are used, this `post_sign_blob_with_metadata` interceptor runs after the + `post_sign_blob` interceptor. The (possibly modified) response returned by + `post_sign_blob` will be passed to + `post_sign_blob_with_metadata`. + """ + return response, metadata + def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for sign_jwt @@ -172,12 +229,31 @@ def pre_sign_jwt(self, request: common.SignJwtRequest, metadata: Sequence[Tuple[ def post_sign_jwt(self, response: common.SignJwtResponse) -> common.SignJwtResponse: """Post-rpc interceptor for sign_jwt - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_sign_jwt_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the IAMCredentials server but before - it is returned to user code. + it is returned to user code. This `post_sign_jwt` interceptor runs + before the `post_sign_jwt_with_metadata` interceptor. """ return response + def post_sign_jwt_with_metadata(self, response: common.SignJwtResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[common.SignJwtResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for sign_jwt + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the IAMCredentials server but before it is returned to user code. + + We recommend only using this `post_sign_jwt_with_metadata` + interceptor in new development instead of the `post_sign_jwt` interceptor. + When both interceptors are used, this `post_sign_jwt_with_metadata` interceptor runs after the + `post_sign_jwt` interceptor. The (possibly modified) response returned by + `post_sign_jwt` will be passed to + `post_sign_jwt_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class IAMCredentialsRestStub: @@ -375,6 +451,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_access_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_access_token_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = common.GenerateAccessTokenResponse.to_json(response) @@ -495,6 +573,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_generate_id_token(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_generate_id_token_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = common.GenerateIdTokenResponse.to_json(response) @@ -615,6 +695,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_sign_blob(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_sign_blob_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = common.SignBlobResponse.to_json(response) @@ -735,6 +817,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_sign_jwt(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_sign_jwt_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = common.SignJwtResponse.to_json(response) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 715989b7cd15..d31e01c82ed9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -3162,9 +3162,11 @@ def test_generate_access_token_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_access_token_with_metadata") as post_with_metadata, \ mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_access_token") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = common.GenerateAccessTokenRequest.pb(common.GenerateAccessTokenRequest()) transcode.return_value = { "method": "post", @@ -3186,11 +3188,13 @@ def test_generate_access_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.GenerateAccessTokenResponse() + post_with_metadata.return_value = common.GenerateAccessTokenResponse(), metadata client.generate_access_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_generate_id_token_rest_bad_request(request_type=common.GenerateIdTokenRequest): @@ -3264,9 +3268,11 @@ def test_generate_id_token_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_generate_id_token_with_metadata") as post_with_metadata, \ mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_generate_id_token") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = common.GenerateIdTokenRequest.pb(common.GenerateIdTokenRequest()) transcode.return_value = { "method": "post", @@ -3288,11 +3294,13 @@ def test_generate_id_token_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.GenerateIdTokenResponse() + post_with_metadata.return_value = common.GenerateIdTokenResponse(), metadata client.generate_id_token(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_sign_blob_rest_bad_request(request_type=common.SignBlobRequest): @@ -3368,9 +3376,11 @@ def test_sign_blob_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_blob_with_metadata") as post_with_metadata, \ mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_blob") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = common.SignBlobRequest.pb(common.SignBlobRequest()) transcode.return_value = { "method": "post", @@ -3392,11 +3402,13 @@ def test_sign_blob_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.SignBlobResponse() + post_with_metadata.return_value = common.SignBlobResponse(), metadata client.sign_blob(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_sign_jwt_rest_bad_request(request_type=common.SignJwtRequest): @@ -3472,9 +3484,11 @@ def test_sign_jwt_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt") as post, \ + mock.patch.object(transports.IAMCredentialsRestInterceptor, "post_sign_jwt_with_metadata") as post_with_metadata, \ mock.patch.object(transports.IAMCredentialsRestInterceptor, "pre_sign_jwt") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = common.SignJwtRequest.pb(common.SignJwtRequest()) transcode.return_value = { "method": "post", @@ -3496,11 +3510,13 @@ def test_sign_jwt_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.SignJwtResponse() + post_with_metadata.return_value = common.SignJwtResponse(), metadata client.sign_jwt(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_initialize_client_w_rest(): client = IAMCredentialsClient( diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 583ee430103e..32211ddf9e74 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -244,12 +244,31 @@ def pre_create_channel(self, request: eventarc.CreateChannelRequest, metadata: S def post_create_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_channel` interceptor runs + before the `post_create_channel_with_metadata` interceptor. """ return response + def post_create_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_channel_with_metadata` + interceptor in new development instead of the `post_create_channel` interceptor. + When both interceptors are used, this `post_create_channel_with_metadata` interceptor runs after the + `post_create_channel` interceptor. The (possibly modified) response returned by + `post_create_channel` will be passed to + `post_create_channel_with_metadata`. + """ + return response, metadata + def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_channel_connection @@ -261,12 +280,31 @@ def pre_create_channel_connection(self, request: eventarc.CreateChannelConnectio def post_create_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_channel_connection` interceptor runs + before the `post_create_channel_connection_with_metadata` interceptor. """ return response + def post_create_channel_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_channel_connection_with_metadata` + interceptor in new development instead of the `post_create_channel_connection` interceptor. + When both interceptors are used, this `post_create_channel_connection_with_metadata` interceptor runs after the + `post_create_channel_connection` interceptor. The (possibly modified) response returned by + `post_create_channel_connection` will be passed to + `post_create_channel_connection_with_metadata`. + """ + return response, metadata + def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.CreateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_trigger @@ -278,12 +316,31 @@ def pre_create_trigger(self, request: eventarc.CreateTriggerRequest, metadata: S def post_create_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_create_trigger` interceptor runs + before the `post_create_trigger_with_metadata` interceptor. """ return response + def post_create_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_create_trigger_with_metadata` + interceptor in new development instead of the `post_create_trigger` interceptor. + When both interceptors are used, this `post_create_trigger_with_metadata` interceptor runs after the + `post_create_trigger` interceptor. The (possibly modified) response returned by + `post_create_trigger` will be passed to + `post_create_trigger_with_metadata`. + """ + return response, metadata + def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel @@ -295,12 +352,31 @@ def pre_delete_channel(self, request: eventarc.DeleteChannelRequest, metadata: S def post_delete_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_channel` interceptor runs + before the `post_delete_channel_with_metadata` interceptor. """ return response + def post_delete_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_channel_with_metadata` + interceptor in new development instead of the `post_delete_channel` interceptor. + When both interceptors are used, this `post_delete_channel_with_metadata` interceptor runs after the + `post_delete_channel` interceptor. The (possibly modified) response returned by + `post_delete_channel` will be passed to + `post_delete_channel_with_metadata`. + """ + return response, metadata + def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_channel_connection @@ -312,12 +388,31 @@ def pre_delete_channel_connection(self, request: eventarc.DeleteChannelConnectio def post_delete_channel_connection(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_channel_connection` interceptor runs + before the `post_delete_channel_connection_with_metadata` interceptor. """ return response + def post_delete_channel_connection_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_channel_connection_with_metadata` + interceptor in new development instead of the `post_delete_channel_connection` interceptor. + When both interceptors are used, this `post_delete_channel_connection_with_metadata` interceptor runs after the + `post_delete_channel_connection` interceptor. The (possibly modified) response returned by + `post_delete_channel_connection` will be passed to + `post_delete_channel_connection_with_metadata`. + """ + return response, metadata + def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.DeleteTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_trigger @@ -329,12 +424,31 @@ def pre_delete_trigger(self, request: eventarc.DeleteTriggerRequest, metadata: S def post_delete_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_delete_trigger` interceptor runs + before the `post_delete_trigger_with_metadata` interceptor. """ return response + def post_delete_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_delete_trigger_with_metadata` + interceptor in new development instead of the `post_delete_trigger` interceptor. + When both interceptors are used, this `post_delete_trigger_with_metadata` interceptor runs after the + `post_delete_trigger` interceptor. The (possibly modified) response returned by + `post_delete_trigger` will be passed to + `post_delete_trigger_with_metadata`. + """ + return response, metadata + def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel @@ -346,12 +460,31 @@ def pre_get_channel(self, request: eventarc.GetChannelRequest, metadata: Sequenc def post_get_channel(self, response: channel.Channel) -> channel.Channel: """Post-rpc interceptor for get_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_channel` interceptor runs + before the `post_get_channel_with_metadata` interceptor. """ return response + def post_get_channel_with_metadata(self, response: channel.Channel, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel.Channel, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_channel_with_metadata` + interceptor in new development instead of the `post_get_channel` interceptor. + When both interceptors are used, this `post_get_channel_with_metadata` interceptor runs after the + `post_get_channel` interceptor. The (possibly modified) response returned by + `post_get_channel` will be passed to + `post_get_channel_with_metadata`. + """ + return response, metadata + def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetChannelConnectionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_channel_connection @@ -363,12 +496,31 @@ def pre_get_channel_connection(self, request: eventarc.GetChannelConnectionReque def post_get_channel_connection(self, response: channel_connection.ChannelConnection) -> channel_connection.ChannelConnection: """Post-rpc interceptor for get_channel_connection - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_channel_connection_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_channel_connection` interceptor runs + before the `post_get_channel_connection_with_metadata` interceptor. """ return response + def post_get_channel_connection_with_metadata(self, response: channel_connection.ChannelConnection, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[channel_connection.ChannelConnection, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_channel_connection + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_channel_connection_with_metadata` + interceptor in new development instead of the `post_get_channel_connection` interceptor. + When both interceptors are used, this `post_get_channel_connection_with_metadata` interceptor runs after the + `post_get_channel_connection` interceptor. The (possibly modified) response returned by + `post_get_channel_connection` will be passed to + `post_get_channel_connection_with_metadata`. + """ + return response, metadata + def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_google_channel_config @@ -380,12 +532,31 @@ def pre_get_google_channel_config(self, request: eventarc.GetGoogleChannelConfig def post_get_google_channel_config(self, response: google_channel_config.GoogleChannelConfig) -> google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for get_google_channel_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_google_channel_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_google_channel_config` interceptor runs + before the `post_get_google_channel_config_with_metadata` interceptor. """ return response + def post_get_google_channel_config_with_metadata(self, response: google_channel_config.GoogleChannelConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_google_channel_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_google_channel_config_with_metadata` + interceptor in new development instead of the `post_get_google_channel_config` interceptor. + When both interceptors are used, this `post_get_google_channel_config_with_metadata` interceptor runs after the + `post_get_google_channel_config` interceptor. The (possibly modified) response returned by + `post_get_google_channel_config` will be passed to + `post_get_google_channel_config_with_metadata`. + """ + return response, metadata + def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetProviderRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_provider @@ -397,12 +568,31 @@ def pre_get_provider(self, request: eventarc.GetProviderRequest, metadata: Seque def post_get_provider(self, response: discovery.Provider) -> discovery.Provider: """Post-rpc interceptor for get_provider - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_provider_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_provider` interceptor runs + before the `post_get_provider_with_metadata` interceptor. """ return response + def post_get_provider_with_metadata(self, response: discovery.Provider, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[discovery.Provider, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_provider + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_provider_with_metadata` + interceptor in new development instead of the `post_get_provider` interceptor. + When both interceptors are used, this `post_get_provider_with_metadata` interceptor runs after the + `post_get_provider` interceptor. The (possibly modified) response returned by + `post_get_provider` will be passed to + `post_get_provider_with_metadata`. + """ + return response, metadata + def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.GetTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_trigger @@ -414,12 +604,31 @@ def pre_get_trigger(self, request: eventarc.GetTriggerRequest, metadata: Sequenc def post_get_trigger(self, response: trigger.Trigger) -> trigger.Trigger: """Post-rpc interceptor for get_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_get_trigger` interceptor runs + before the `post_get_trigger_with_metadata` interceptor. """ return response + def post_get_trigger_with_metadata(self, response: trigger.Trigger, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[trigger.Trigger, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_get_trigger_with_metadata` + interceptor in new development instead of the `post_get_trigger` interceptor. + When both interceptors are used, this `post_get_trigger_with_metadata` interceptor runs after the + `post_get_trigger` interceptor. The (possibly modified) response returned by + `post_get_trigger` will be passed to + `post_get_trigger_with_metadata`. + """ + return response, metadata + def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channel_connections @@ -431,12 +640,31 @@ def pre_list_channel_connections(self, request: eventarc.ListChannelConnectionsR def post_list_channel_connections(self, response: eventarc.ListChannelConnectionsResponse) -> eventarc.ListChannelConnectionsResponse: """Post-rpc interceptor for list_channel_connections - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_channel_connections_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_channel_connections` interceptor runs + before the `post_list_channel_connections_with_metadata` interceptor. """ return response + def post_list_channel_connections_with_metadata(self, response: eventarc.ListChannelConnectionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelConnectionsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_channel_connections + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_channel_connections_with_metadata` + interceptor in new development instead of the `post_list_channel_connections` interceptor. + When both interceptors are used, this `post_list_channel_connections_with_metadata` interceptor runs after the + `post_list_channel_connections` interceptor. The (possibly modified) response returned by + `post_list_channel_connections` will be passed to + `post_list_channel_connections_with_metadata`. + """ + return response, metadata + def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_channels @@ -448,12 +676,31 @@ def pre_list_channels(self, request: eventarc.ListChannelsRequest, metadata: Seq def post_list_channels(self, response: eventarc.ListChannelsResponse) -> eventarc.ListChannelsResponse: """Post-rpc interceptor for list_channels - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_channels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_channels` interceptor runs + before the `post_list_channels_with_metadata` interceptor. """ return response + def post_list_channels_with_metadata(self, response: eventarc.ListChannelsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListChannelsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_channels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_channels_with_metadata` + interceptor in new development instead of the `post_list_channels` interceptor. + When both interceptors are used, this `post_list_channels_with_metadata` interceptor runs after the + `post_list_channels` interceptor. The (possibly modified) response returned by + `post_list_channels` will be passed to + `post_list_channels_with_metadata`. + """ + return response, metadata + def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_providers @@ -465,12 +712,31 @@ def pre_list_providers(self, request: eventarc.ListProvidersRequest, metadata: S def post_list_providers(self, response: eventarc.ListProvidersResponse) -> eventarc.ListProvidersResponse: """Post-rpc interceptor for list_providers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_providers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_providers` interceptor runs + before the `post_list_providers_with_metadata` interceptor. """ return response + def post_list_providers_with_metadata(self, response: eventarc.ListProvidersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListProvidersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_providers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_providers_with_metadata` + interceptor in new development instead of the `post_list_providers` interceptor. + When both interceptors are used, this `post_list_providers_with_metadata` interceptor runs after the + `post_list_providers` interceptor. The (possibly modified) response returned by + `post_list_providers` will be passed to + `post_list_providers_with_metadata`. + """ + return response, metadata + def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_triggers @@ -482,12 +748,31 @@ def pre_list_triggers(self, request: eventarc.ListTriggersRequest, metadata: Seq def post_list_triggers(self, response: eventarc.ListTriggersResponse) -> eventarc.ListTriggersResponse: """Post-rpc interceptor for list_triggers - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_triggers_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_list_triggers` interceptor runs + before the `post_list_triggers_with_metadata` interceptor. """ return response + def post_list_triggers_with_metadata(self, response: eventarc.ListTriggersResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.ListTriggersResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_triggers + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_list_triggers_with_metadata` + interceptor in new development instead of the `post_list_triggers` interceptor. + When both interceptors are used, this `post_list_triggers_with_metadata` interceptor runs after the + `post_list_triggers` interceptor. The (possibly modified) response returned by + `post_list_triggers` will be passed to + `post_list_triggers_with_metadata`. + """ + return response, metadata + def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateChannelRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_channel @@ -499,12 +784,31 @@ def pre_update_channel(self, request: eventarc.UpdateChannelRequest, metadata: S def post_update_channel(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for update_channel - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_channel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_channel` interceptor runs + before the `post_update_channel_with_metadata` interceptor. """ return response + def post_update_channel_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_channel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_channel_with_metadata` + interceptor in new development instead of the `post_update_channel` interceptor. + When both interceptors are used, this `post_update_channel_with_metadata` interceptor runs after the + `post_update_channel` interceptor. The (possibly modified) response returned by + `post_update_channel` will be passed to + `post_update_channel_with_metadata`. + """ + return response, metadata + def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannelConfigRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateGoogleChannelConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_google_channel_config @@ -516,12 +820,31 @@ def pre_update_google_channel_config(self, request: eventarc.UpdateGoogleChannel def post_update_google_channel_config(self, response: gce_google_channel_config.GoogleChannelConfig) -> gce_google_channel_config.GoogleChannelConfig: """Post-rpc interceptor for update_google_channel_config - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_google_channel_config_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_google_channel_config` interceptor runs + before the `post_update_google_channel_config_with_metadata` interceptor. """ return response + def post_update_google_channel_config_with_metadata(self, response: gce_google_channel_config.GoogleChannelConfig, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[gce_google_channel_config.GoogleChannelConfig, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_google_channel_config + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_google_channel_config_with_metadata` + interceptor in new development instead of the `post_update_google_channel_config` interceptor. + When both interceptors are used, this `post_update_google_channel_config_with_metadata` interceptor runs after the + `post_update_google_channel_config` interceptor. The (possibly modified) response returned by + `post_update_google_channel_config` will be passed to + `post_update_google_channel_config_with_metadata`. + """ + return response, metadata + def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[eventarc.UpdateTriggerRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_trigger @@ -533,12 +856,31 @@ def pre_update_trigger(self, request: eventarc.UpdateTriggerRequest, metadata: S def post_update_trigger(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for update_trigger - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_trigger_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Eventarc server but before - it is returned to user code. + it is returned to user code. This `post_update_trigger` interceptor runs + before the `post_update_trigger_with_metadata` interceptor. """ return response + def post_update_trigger_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_trigger + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Eventarc server but before it is returned to user code. + + We recommend only using this `post_update_trigger_with_metadata` + interceptor in new development instead of the `post_update_trigger` interceptor. + When both interceptors are used, this `post_update_trigger_with_metadata` interceptor runs after the + `post_update_trigger` interceptor. The (possibly modified) response returned by + `post_update_trigger` will be passed to + `post_update_trigger_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: @@ -971,6 +1313,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_channel_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1093,6 +1437,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_channel_connection_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1215,6 +1561,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_trigger_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1334,6 +1682,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_channel_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1453,6 +1803,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_channel_connection_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1572,6 +1924,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_trigger_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1698,6 +2052,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = channel.Channel.to_json(response) @@ -1823,6 +2179,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_channel_connection(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_channel_connection_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = channel_connection.ChannelConnection.to_json(response) @@ -1949,6 +2307,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_google_channel_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_google_channel_config_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = google_channel_config.GoogleChannelConfig.to_json(response) @@ -2069,6 +2429,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_provider(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_provider_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = discovery.Provider.to_json(response) @@ -2189,6 +2551,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_trigger_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = trigger.Trigger.to_json(response) @@ -2309,6 +2673,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_channel_connections(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_channel_connections_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = eventarc.ListChannelConnectionsResponse.to_json(response) @@ -2427,6 +2793,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_channels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_channels_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = eventarc.ListChannelsResponse.to_json(response) @@ -2545,6 +2913,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_providers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_providers_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = eventarc.ListProvidersResponse.to_json(response) @@ -2663,6 +3033,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_triggers(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_triggers_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = eventarc.ListTriggersResponse.to_json(response) @@ -2785,6 +3157,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_channel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_channel_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -2915,6 +3289,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_google_channel_config(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_google_channel_config_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = gce_google_channel_config.GoogleChannelConfig.to_json(response) @@ -3037,6 +3413,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_trigger(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_trigger_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 42bc23df835b..6473ef989ec4 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -11842,9 +11842,11 @@ def test_get_trigger_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_trigger_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_get_trigger") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetTriggerRequest.pb(eventarc.GetTriggerRequest()) transcode.return_value = { "method": "post", @@ -11866,11 +11868,13 @@ def test_get_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = trigger.Trigger() + post_with_metadata.return_value = trigger.Trigger(), metadata client.get_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_triggers_rest_bad_request(request_type=eventarc.ListTriggersRequest): @@ -11946,9 +11950,11 @@ def test_list_triggers_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_triggers_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_list_triggers") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListTriggersRequest.pb(eventarc.ListTriggersRequest()) transcode.return_value = { "method": "post", @@ -11970,11 +11976,13 @@ def test_list_triggers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListTriggersResponse() + post_with_metadata.return_value = eventarc.ListTriggersResponse(), metadata client.list_triggers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_trigger_rest_bad_request(request_type=eventarc.CreateTriggerRequest): @@ -12107,9 +12115,11 @@ def test_create_trigger_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_trigger_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_create_trigger") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateTriggerRequest.pb(eventarc.CreateTriggerRequest()) transcode.return_value = { "method": "post", @@ -12131,11 +12141,13 @@ def test_create_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_trigger_rest_bad_request(request_type=eventarc.UpdateTriggerRequest): @@ -12268,9 +12280,11 @@ def test_update_trigger_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_trigger_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_update_trigger") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateTriggerRequest.pb(eventarc.UpdateTriggerRequest()) transcode.return_value = { "method": "post", @@ -12292,11 +12306,13 @@ def test_update_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_trigger_rest_bad_request(request_type=eventarc.DeleteTriggerRequest): @@ -12365,9 +12381,11 @@ def test_delete_trigger_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_trigger_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_trigger") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteTriggerRequest.pb(eventarc.DeleteTriggerRequest()) transcode.return_value = { "method": "post", @@ -12389,11 +12407,13 @@ def test_delete_trigger_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_trigger(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_channel_rest_bad_request(request_type=eventarc.GetChannelRequest): @@ -12478,9 +12498,11 @@ def test_get_channel_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetChannelRequest.pb(eventarc.GetChannelRequest()) transcode.return_value = { "method": "post", @@ -12502,11 +12524,13 @@ def test_get_channel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = channel.Channel() + post_with_metadata.return_value = channel.Channel(), metadata client.get_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_channels_rest_bad_request(request_type=eventarc.ListChannelsRequest): @@ -12582,9 +12606,11 @@ def test_list_channels_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channels_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channels") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListChannelsRequest.pb(eventarc.ListChannelsRequest()) transcode.return_value = { "method": "post", @@ -12606,11 +12632,13 @@ def test_list_channels_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListChannelsResponse() + post_with_metadata.return_value = eventarc.ListChannelsResponse(), metadata client.list_channels(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_channel_rest_bad_request(request_type=eventarc.CreateChannelRequest): @@ -12743,9 +12771,11 @@ def test_create_channel_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateChannelRequest.pb(eventarc.CreateChannelRequest()) transcode.return_value = { "method": "post", @@ -12767,11 +12797,13 @@ def test_create_channel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_channel_rest_bad_request(request_type=eventarc.UpdateChannelRequest): @@ -12904,9 +12936,11 @@ def test_update_channel_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_channel_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_update_channel") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateChannelRequest.pb(eventarc.UpdateChannelRequest()) transcode.return_value = { "method": "post", @@ -12928,11 +12962,13 @@ def test_update_channel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_channel_rest_bad_request(request_type=eventarc.DeleteChannelRequest): @@ -13001,9 +13037,11 @@ def test_delete_channel_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteChannelRequest.pb(eventarc.DeleteChannelRequest()) transcode.return_value = { "method": "post", @@ -13025,11 +13063,13 @@ def test_delete_channel_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_channel(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_provider_rest_bad_request(request_type=eventarc.GetProviderRequest): @@ -13105,9 +13145,11 @@ def test_get_provider_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_provider_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_get_provider") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetProviderRequest.pb(eventarc.GetProviderRequest()) transcode.return_value = { "method": "post", @@ -13129,11 +13171,13 @@ def test_get_provider_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = discovery.Provider() + post_with_metadata.return_value = discovery.Provider(), metadata client.get_provider(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_providers_rest_bad_request(request_type=eventarc.ListProvidersRequest): @@ -13209,9 +13253,11 @@ def test_list_providers_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_providers_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_list_providers") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListProvidersRequest.pb(eventarc.ListProvidersRequest()) transcode.return_value = { "method": "post", @@ -13233,11 +13279,13 @@ def test_list_providers_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListProvidersResponse() + post_with_metadata.return_value = eventarc.ListProvidersResponse(), metadata client.list_providers(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_channel_connection_rest_bad_request(request_type=eventarc.GetChannelConnectionRequest): @@ -13317,9 +13365,11 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_channel_connection_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_get_channel_connection") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetChannelConnectionRequest.pb(eventarc.GetChannelConnectionRequest()) transcode.return_value = { "method": "post", @@ -13341,11 +13391,13 @@ def test_get_channel_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = channel_connection.ChannelConnection() + post_with_metadata.return_value = channel_connection.ChannelConnection(), metadata client.get_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_channel_connections_rest_bad_request(request_type=eventarc.ListChannelConnectionsRequest): @@ -13421,9 +13473,11 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_list_channel_connections_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_list_channel_connections") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.ListChannelConnectionsRequest.pb(eventarc.ListChannelConnectionsRequest()) transcode.return_value = { "method": "post", @@ -13445,11 +13499,13 @@ def test_list_channel_connections_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = eventarc.ListChannelConnectionsResponse() + post_with_metadata.return_value = eventarc.ListChannelConnectionsResponse(), metadata client.list_channel_connections(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_channel_connection_rest_bad_request(request_type=eventarc.CreateChannelConnectionRequest): @@ -13582,9 +13638,11 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_create_channel_connection_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_create_channel_connection") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.CreateChannelConnectionRequest.pb(eventarc.CreateChannelConnectionRequest()) transcode.return_value = { "method": "post", @@ -13606,11 +13664,13 @@ def test_create_channel_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_channel_connection_rest_bad_request(request_type=eventarc.DeleteChannelConnectionRequest): @@ -13679,9 +13739,11 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_delete_channel_connection_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_delete_channel_connection") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.DeleteChannelConnectionRequest.pb(eventarc.DeleteChannelConnectionRequest()) transcode.return_value = { "method": "post", @@ -13703,11 +13765,13 @@ def test_delete_channel_connection_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_channel_connection(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_google_channel_config_rest_bad_request(request_type=eventarc.GetGoogleChannelConfigRequest): @@ -13783,9 +13847,11 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_get_google_channel_config_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_get_google_channel_config") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.GetGoogleChannelConfigRequest.pb(eventarc.GetGoogleChannelConfigRequest()) transcode.return_value = { "method": "post", @@ -13807,11 +13873,13 @@ def test_get_google_channel_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = google_channel_config.GoogleChannelConfig() + post_with_metadata.return_value = google_channel_config.GoogleChannelConfig(), metadata client.get_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_google_channel_config_rest_bad_request(request_type=eventarc.UpdateGoogleChannelConfigRequest): @@ -13951,9 +14019,11 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config") as post, \ + mock.patch.object(transports.EventarcRestInterceptor, "post_update_google_channel_config_with_metadata") as post_with_metadata, \ mock.patch.object(transports.EventarcRestInterceptor, "pre_update_google_channel_config") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = eventarc.UpdateGoogleChannelConfigRequest.pb(eventarc.UpdateGoogleChannelConfigRequest()) transcode.return_value = { "method": "post", @@ -13975,11 +14045,13 @@ def test_update_google_channel_config_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gce_google_channel_config.GoogleChannelConfig() + post_with_metadata.return_value = gce_google_channel_config.GoogleChannelConfig(), metadata client.update_google_channel_config(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 0d6c45081de9..702c01fb2b4f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -180,12 +180,31 @@ def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metada def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance @@ -197,12 +216,31 @@ def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metada def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_instance @@ -214,12 +252,31 @@ def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metada def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for export_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_export_instance` interceptor runs + before the `post_export_instance_with_metadata` interceptor. """ return response + def post_export_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_export_instance_with_metadata` + interceptor in new development instead of the `post_export_instance` interceptor. + When both interceptors are used, this `post_export_instance_with_metadata` interceptor runs after the + `post_export_instance` interceptor. The (possibly modified) response returned by + `post_export_instance` will be passed to + `post_export_instance_with_metadata`. + """ + return response, metadata + def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for failover_instance @@ -231,12 +288,31 @@ def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, me def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_failover_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_failover_instance` interceptor runs + before the `post_failover_instance_with_metadata` interceptor. """ return response + def post_failover_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for failover_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_failover_instance_with_metadata` + interceptor in new development instead of the `post_failover_instance` interceptor. + When both interceptors are used, this `post_failover_instance_with_metadata` interceptor runs after the + `post_failover_instance` interceptor. The (possibly modified) response returned by + `post_failover_instance` will be passed to + `post_failover_instance_with_metadata`. + """ + return response, metadata + def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance @@ -248,12 +324,31 @@ def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Se def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance_auth_string @@ -265,12 +360,31 @@ def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStrin def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: """Post-rpc interceptor for get_instance_auth_string - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_auth_string_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_get_instance_auth_string` interceptor runs + before the `post_get_instance_auth_string_with_metadata` interceptor. """ return response + def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.InstanceAuthString, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance_auth_string + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_auth_string_with_metadata` + interceptor in new development instead of the `post_get_instance_auth_string` interceptor. + When both interceptors are used, this `post_get_instance_auth_string_with_metadata` interceptor runs after the + `post_get_instance_auth_string` interceptor. The (possibly modified) response returned by + `post_get_instance_auth_string` will be passed to + `post_get_instance_auth_string_with_metadata`. + """ + return response, metadata + def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for import_instance @@ -282,12 +396,31 @@ def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metada def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for import_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_import_instance` interceptor runs + before the `post_import_instance_with_metadata` interceptor. """ return response + def post_import_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_import_instance_with_metadata` + interceptor in new development instead of the `post_import_instance` interceptor. + When both interceptors are used, this `post_import_instance_with_metadata` interceptor runs after the + `post_import_instance` interceptor. The (possibly modified) response returned by + `post_import_instance` will be passed to + `post_import_instance_with_metadata`. + """ + return response, metadata + def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances @@ -299,12 +432,31 @@ def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reschedule_maintenance @@ -316,12 +468,31 @@ def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceR def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_reschedule_maintenance` interceptor runs + before the `post_reschedule_maintenance_with_metadata` interceptor. """ return response + def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_reschedule_maintenance_with_metadata` + interceptor in new development instead of the `post_reschedule_maintenance` interceptor. + When both interceptors are used, this `post_reschedule_maintenance_with_metadata` interceptor runs after the + `post_reschedule_maintenance` interceptor. The (possibly modified) response returned by + `post_reschedule_maintenance` will be passed to + `post_reschedule_maintenance_with_metadata`. + """ + return response, metadata + def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance @@ -333,12 +504,31 @@ def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metada def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for upgrade_instance @@ -350,12 +540,31 @@ def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, meta def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_upgrade_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_upgrade_instance` interceptor runs + before the `post_upgrade_instance_with_metadata` interceptor. """ return response + def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for upgrade_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_upgrade_instance_with_metadata` + interceptor in new development instead of the `post_upgrade_instance` interceptor. + When both interceptors are used, this `post_upgrade_instance_with_metadata` interceptor runs after the + `post_upgrade_instance` interceptor. The (possibly modified) response returned by + `post_upgrade_instance` will be passed to + `post_upgrade_instance_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: @@ -770,6 +979,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -889,6 +1100,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1011,6 +1224,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_export_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1133,6 +1348,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_failover_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_failover_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1251,6 +1468,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = cloud_redis.Instance.to_json(response) @@ -1369,6 +1588,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_get_instance_auth_string(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_auth_string_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = cloud_redis.InstanceAuthString.to_json(response) @@ -1491,6 +1712,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_import_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_import_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1611,6 +1834,8 @@ def __call__(self, json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = cloud_redis.ListInstancesResponse.to_json(response) @@ -1733,6 +1958,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_reschedule_maintenance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_reschedule_maintenance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1855,6 +2082,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1977,6 +2206,8 @@ def __call__(self, json_format.Parse(response.content, resp, ignore_unknown_fields=True) resp = self._interceptor.post_upgrade_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_upgrade_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 9ce8542f78c5..24f3370c48b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -193,12 +193,31 @@ async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, async def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for create_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. """ return response + async def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_instance @@ -210,12 +229,31 @@ async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, async def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for delete_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. """ return response + async def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ExportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for export_instance @@ -227,12 +265,31 @@ async def pre_export_instance(self, request: cloud_redis.ExportInstanceRequest, async def post_export_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for export_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_export_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_export_instance` interceptor runs + before the `post_export_instance_with_metadata` interceptor. """ return response + async def post_export_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_export_instance_with_metadata` + interceptor in new development instead of the `post_export_instance` interceptor. + When both interceptors are used, this `post_export_instance_with_metadata` interceptor runs after the + `post_export_instance` interceptor. The (possibly modified) response returned by + `post_export_instance` will be passed to + `post_export_instance_with_metadata`. + """ + return response, metadata + async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.FailoverInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for failover_instance @@ -244,12 +301,31 @@ async def pre_failover_instance(self, request: cloud_redis.FailoverInstanceReque async def post_failover_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for failover_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_failover_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_failover_instance` interceptor runs + before the `post_failover_instance_with_metadata` interceptor. """ return response + async def post_failover_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for failover_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_failover_instance_with_metadata` + interceptor in new development instead of the `post_failover_instance` interceptor. + When both interceptors are used, this `post_failover_instance_with_metadata` interceptor runs after the + `post_failover_instance` interceptor. The (possibly modified) response returned by + `post_failover_instance` will be passed to + `post_failover_instance_with_metadata`. + """ + return response, metadata + async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance @@ -261,12 +337,31 @@ async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metada async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: """Post-rpc interceptor for get_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. """ return response + async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAuthStringRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceAuthStringRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_instance_auth_string @@ -278,12 +373,31 @@ async def pre_get_instance_auth_string(self, request: cloud_redis.GetInstanceAut async def post_get_instance_auth_string(self, response: cloud_redis.InstanceAuthString) -> cloud_redis.InstanceAuthString: """Post-rpc interceptor for get_instance_auth_string - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_instance_auth_string_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_get_instance_auth_string` interceptor runs + before the `post_get_instance_auth_string_with_metadata` interceptor. """ return response + async def post_get_instance_auth_string_with_metadata(self, response: cloud_redis.InstanceAuthString, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.InstanceAuthString, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance_auth_string + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_auth_string_with_metadata` + interceptor in new development instead of the `post_get_instance_auth_string` interceptor. + When both interceptors are used, this `post_get_instance_auth_string_with_metadata` interceptor runs after the + `post_get_instance_auth_string` interceptor. The (possibly modified) response returned by + `post_get_instance_auth_string` will be passed to + `post_get_instance_auth_string_with_metadata`. + """ + return response, metadata + async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ImportInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for import_instance @@ -295,12 +409,31 @@ async def pre_import_instance(self, request: cloud_redis.ImportInstanceRequest, async def post_import_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for import_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_import_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_import_instance` interceptor runs + before the `post_import_instance_with_metadata` interceptor. """ return response + async def post_import_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for import_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_import_instance_with_metadata` + interceptor in new development instead of the `post_import_instance` interceptor. + When both interceptors are used, this `post_import_instance_with_metadata` interceptor runs after the + `post_import_instance` interceptor. The (possibly modified) response returned by + `post_import_instance` will be passed to + `post_import_instance_with_metadata`. + """ + return response, metadata + async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_instances @@ -312,12 +445,31 @@ async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, me async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: """Post-rpc interceptor for list_instances - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. """ return response + async def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMaintenanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.RescheduleMaintenanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for reschedule_maintenance @@ -329,12 +481,31 @@ async def pre_reschedule_maintenance(self, request: cloud_redis.RescheduleMainte async def post_reschedule_maintenance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for reschedule_maintenance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_reschedule_maintenance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_reschedule_maintenance` interceptor runs + before the `post_reschedule_maintenance_with_metadata` interceptor. """ return response + async def post_reschedule_maintenance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for reschedule_maintenance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_reschedule_maintenance_with_metadata` + interceptor in new development instead of the `post_reschedule_maintenance` interceptor. + When both interceptors are used, this `post_reschedule_maintenance_with_metadata` interceptor runs after the + `post_reschedule_maintenance` interceptor. The (possibly modified) response returned by + `post_reschedule_maintenance` will be passed to + `post_reschedule_maintenance_with_metadata`. + """ + return response, metadata + async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_instance @@ -346,12 +517,31 @@ async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, async def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for update_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. """ return response + async def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpgradeInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for upgrade_instance @@ -363,12 +553,31 @@ async def pre_upgrade_instance(self, request: cloud_redis.UpgradeInstanceRequest async def post_upgrade_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: """Post-rpc interceptor for upgrade_instance - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_upgrade_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the CloudRedis server but before - it is returned to user code. + it is returned to user code. This `post_upgrade_instance` interceptor runs + before the `post_upgrade_instance_with_metadata` interceptor. """ return response + async def post_upgrade_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for upgrade_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_upgrade_instance_with_metadata` + interceptor in new development instead of the `post_upgrade_instance` interceptor. + When both interceptors are used, this `post_upgrade_instance_with_metadata` interceptor runs after the + `post_upgrade_instance` interceptor. The (possibly modified) response returned by + `post_upgrade_instance` will be passed to + `post_upgrade_instance_with_metadata`. + """ + return response, metadata + async def pre_get_location( self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: @@ -806,6 +1015,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_create_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -931,6 +1142,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1059,6 +1272,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_export_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_export_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1187,6 +1402,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_failover_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_failover_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1309,6 +1526,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_get_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = cloud_redis.Instance.to_json(response) @@ -1431,6 +1650,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_get_instance_auth_string(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_get_instance_auth_string_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = cloud_redis.InstanceAuthString.to_json(response) @@ -1559,6 +1780,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_import_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_import_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1683,6 +1906,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_list_instances_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = cloud_redis.ListInstancesResponse.to_json(response) @@ -1811,6 +2036,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_reschedule_maintenance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_reschedule_maintenance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -1939,6 +2166,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_update_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) @@ -2067,6 +2296,8 @@ async def __call__(self, content = await response.read() json_format.Parse(content, pb_resp, ignore_unknown_fields=True) resp = await self._interceptor.post_upgrade_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_upgrade_instance_with_metadata(resp, response_metadata) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER try: response_payload = json_format.MessageToJson(resp) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index d46839b7a855..fef9b18058b4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -7287,9 +7287,11 @@ def test_list_instances_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) transcode.return_value = { "method": "post", @@ -7311,11 +7313,13 @@ def test_list_instances_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_redis.ListInstancesResponse() + post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceRequest): @@ -7441,9 +7445,11 @@ def test_get_instance_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -7465,11 +7471,13 @@ def test_get_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_redis.Instance() + post_with_metadata.return_value = cloud_redis.Instance(), metadata client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_instance_auth_string_rest_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): @@ -7543,9 +7551,11 @@ def test_get_instance_auth_string_rest_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_auth_string_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) transcode.return_value = { "method": "post", @@ -7567,11 +7577,13 @@ def test_get_instance_auth_string_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_redis.InstanceAuthString() + post_with_metadata.return_value = cloud_redis.InstanceAuthString(), metadata client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanceRequest): @@ -7704,9 +7716,11 @@ def test_create_instance_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) transcode.return_value = { "method": "post", @@ -7728,11 +7742,13 @@ def test_create_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanceRequest): @@ -7865,9 +7881,11 @@ def test_update_instance_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) transcode.return_value = { "method": "post", @@ -7889,11 +7907,13 @@ def test_update_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_upgrade_instance_rest_bad_request(request_type=cloud_redis.UpgradeInstanceRequest): @@ -7962,9 +7982,11 @@ def test_upgrade_instance_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_upgrade_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_upgrade_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) transcode.return_value = { "method": "post", @@ -7986,11 +8008,13 @@ def test_upgrade_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_import_instance_rest_bad_request(request_type=cloud_redis.ImportInstanceRequest): @@ -8059,9 +8083,11 @@ def test_import_instance_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_import_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_import_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) transcode.return_value = { "method": "post", @@ -8083,11 +8109,13 @@ def test_import_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_export_instance_rest_bad_request(request_type=cloud_redis.ExportInstanceRequest): @@ -8156,9 +8184,11 @@ def test_export_instance_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_export_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_export_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) transcode.return_value = { "method": "post", @@ -8180,11 +8210,13 @@ def test_export_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_failover_instance_rest_bad_request(request_type=cloud_redis.FailoverInstanceRequest): @@ -8253,9 +8285,11 @@ def test_failover_instance_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_failover_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_failover_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) transcode.return_value = { "method": "post", @@ -8277,11 +8311,13 @@ def test_failover_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanceRequest): @@ -8350,9 +8386,11 @@ def test_delete_instance_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) transcode.return_value = { "method": "post", @@ -8374,11 +8412,13 @@ def test_delete_instance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_reschedule_maintenance_rest_bad_request(request_type=cloud_redis.RescheduleMaintenanceRequest): @@ -8447,9 +8487,11 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_reschedule_maintenance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.CloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) transcode.return_value = { "method": "post", @@ -8471,11 +8513,13 @@ def test_reschedule_maintenance_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): @@ -9204,9 +9248,11 @@ async def test_list_instances_rest_asyncio_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_list_instances") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) transcode.return_value = { "method": "post", @@ -9228,11 +9274,13 @@ async def test_list_instances_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_redis.ListInstancesResponse() + post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata await client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceRequest): @@ -9365,9 +9413,11 @@ async def test_get_instance_rest_asyncio_interceptors(null_interceptor): with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) transcode.return_value = { "method": "post", @@ -9389,11 +9439,13 @@ async def test_get_instance_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = cloud_redis.Instance() + post_with_metadata.return_value = cloud_redis.Instance(), metadata await client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_get_instance_auth_string_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceAuthStringRequest): @@ -9474,9 +9526,11 @@ async def test_get_instance_auth_string_rest_asyncio_interceptors(null_intercept with mock.patch.object(type(client.transport._session), "request") as req, \ mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_auth_string") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_auth_string_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance_auth_string") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.GetInstanceAuthStringRequest.pb(cloud_redis.GetInstanceAuthStringRequest()) transcode.return_value = { "method": "post", @@ -9498,11 +9552,13 @@ async def test_get_instance_auth_string_rest_asyncio_interceptors(null_intercept ] pre.return_value = request, metadata post.return_value = cloud_redis.InstanceAuthString() + post_with_metadata.return_value = cloud_redis.InstanceAuthString(), metadata await client.get_instance_auth_string(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis.CreateInstanceRequest): @@ -9642,9 +9698,11 @@ async def test_create_instance_rest_asyncio_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_create_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) transcode.return_value = { "method": "post", @@ -9666,11 +9724,13 @@ async def test_create_instance_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata await client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpdateInstanceRequest): @@ -9810,9 +9870,11 @@ async def test_update_instance_rest_asyncio_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_update_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) transcode.return_value = { "method": "post", @@ -9834,11 +9896,13 @@ async def test_update_instance_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata await client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_upgrade_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpgradeInstanceRequest): @@ -9914,9 +9978,11 @@ async def test_upgrade_instance_rest_asyncio_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_upgrade_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_upgrade_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_upgrade_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.UpgradeInstanceRequest.pb(cloud_redis.UpgradeInstanceRequest()) transcode.return_value = { "method": "post", @@ -9938,11 +10004,13 @@ async def test_upgrade_instance_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata await client.upgrade_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_import_instance_rest_asyncio_bad_request(request_type=cloud_redis.ImportInstanceRequest): @@ -10018,9 +10086,11 @@ async def test_import_instance_rest_asyncio_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_import_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_import_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_import_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.ImportInstanceRequest.pb(cloud_redis.ImportInstanceRequest()) transcode.return_value = { "method": "post", @@ -10042,11 +10112,13 @@ async def test_import_instance_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata await client.import_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_export_instance_rest_asyncio_bad_request(request_type=cloud_redis.ExportInstanceRequest): @@ -10122,9 +10194,11 @@ async def test_export_instance_rest_asyncio_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_export_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_export_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_export_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.ExportInstanceRequest.pb(cloud_redis.ExportInstanceRequest()) transcode.return_value = { "method": "post", @@ -10146,11 +10220,13 @@ async def test_export_instance_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata await client.export_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_failover_instance_rest_asyncio_bad_request(request_type=cloud_redis.FailoverInstanceRequest): @@ -10226,9 +10302,11 @@ async def test_failover_instance_rest_asyncio_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_failover_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_failover_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_failover_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.FailoverInstanceRequest.pb(cloud_redis.FailoverInstanceRequest()) transcode.return_value = { "method": "post", @@ -10250,11 +10328,13 @@ async def test_failover_instance_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata await client.failover_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis.DeleteInstanceRequest): @@ -10330,9 +10410,11 @@ async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_delete_instance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) transcode.return_value = { "method": "post", @@ -10354,11 +10436,13 @@ async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata await client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_reschedule_maintenance_rest_asyncio_bad_request(request_type=cloud_redis.RescheduleMaintenanceRequest): @@ -10434,9 +10518,11 @@ async def test_reschedule_maintenance_rest_asyncio_interceptors(null_interceptor mock.patch.object(path_template, "transcode") as transcode, \ mock.patch.object(operation.Operation, "_set_result_from_operation"), \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_reschedule_maintenance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_reschedule_maintenance_with_metadata") as post_with_metadata, \ mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_reschedule_maintenance") as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = cloud_redis.RescheduleMaintenanceRequest.pb(cloud_redis.RescheduleMaintenanceRequest()) transcode.return_value = { "method": "post", @@ -10458,11 +10544,13 @@ async def test_reschedule_maintenance_rest_asyncio_interceptors(null_interceptor ] pre.return_value = request, metadata post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata await client.reschedule_maintenance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() @pytest.mark.asyncio async def test_get_location_rest_asyncio_bad_request(request_type=locations_pb2.GetLocationRequest): From def8ec5ea08552ad1196cbe647c6d00504cd2c29 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Fri, 20 Dec 2024 13:11:29 -0500 Subject: [PATCH 1233/1339] build: Use Ubuntu 22.04 for tests that require the use of Python 3.7 (#2304) --- .../.github/workflows/tests.yaml | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index ffb5c1704475..a565a3366020 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -60,7 +60,9 @@ jobs: target: [showcase, showcase_alternative_templates, showcase_w_rest_async] logging_scope: ["", "google"] - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" @@ -149,7 +151,10 @@ jobs: # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `_w_rest_async` variant when async rest is GA. variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins, _w_rest_async] logging_scope: ["", "google"] - runs-on: ubuntu-latest + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python "${{ matrix.python }}" @@ -250,7 +255,10 @@ jobs: strategy: matrix: python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] - runs-on: ubuntu-latest + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} @@ -272,7 +280,10 @@ jobs: matrix: python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] variant: ['', _alternative_templates] - runs-on: ubuntu-latest + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python }} From 91b29b68ab38cddcf0d126887eb389ed467318ca Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Thu, 9 Jan 2025 12:57:47 -0800 Subject: [PATCH 1234/1339] feat: add cred info to auth related errors (#2115) --- .../%sub/services/%service/_mixins.py.j2 | 112 ++++++++++------ .../%sub/services/%service/client.py.j2 | 68 +++++++--- .../%name_%version/%sub/test_%service.py.j2 | 48 ++++++- .../asset_v1/services/asset_service/client.py | 40 +++++- .../unit/gapic/asset_v1/test_asset_service.py | 46 +++++++ .../services/iam_credentials/client.py | 26 ++++ .../credentials_v1/test_iam_credentials.py | 46 +++++++ .../eventarc_v1/services/eventarc/client.py | 124 +++++++++++++----- .../unit/gapic/eventarc_v1/test_eventarc.py | 46 +++++++ .../services/config_service_v2/client.py | 54 ++++++-- .../services/logging_service_v2/client.py | 54 ++++++-- .../services/metrics_service_v2/client.py | 54 ++++++-- .../logging_v2/test_config_service_v2.py | 47 +++++++ .../logging_v2/test_logging_service_v2.py | 47 +++++++ .../logging_v2/test_metrics_service_v2.py | 47 +++++++ .../redis_v1/services/cloud_redis/client.py | 96 ++++++++++---- .../unit/gapic/redis_v1/test_cloud_redis.py | 46 +++++++ 17 files changed, 850 insertions(+), 151 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 index 70c686f60a00..4bfc76e9548a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_mixins.py.j2 @@ -43,12 +43,16 @@ # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} @@ -95,12 +99,16 @@ # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} {% if "DeleteOperation" in api.mixin_api_methods %} @@ -253,12 +261,16 @@ # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} {% endif %} {# LRO #} @@ -375,12 +387,16 @@ # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} {% if "GetIamPolicy" in api.mixin_api_methods %} @@ -493,12 +509,16 @@ # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} {% if "TestIamPermissions" in api.mixin_api_methods %} @@ -549,12 +569,16 @@ # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} {% endif %} @@ -604,12 +628,16 @@ # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} {% if "ListLocations" in api.mixin_api_methods %} @@ -655,11 +683,15 @@ # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} {% endif %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 952ef03abcac..66ea9d12adf4 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -10,6 +10,8 @@ from collections import OrderedDict {% if service.any_extended_operations_methods %} import functools {% endif %} +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -423,6 +425,30 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -765,12 +791,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -885,12 +915,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -943,12 +977,16 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e {% endif %} DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index b9301094d703..d0b8cf51bec5 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -22,8 +22,8 @@ from grpc.experimental import aio {% if "rest" in opts.transport %} from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format -import json {% endif %} +import json import math import pytest from google.api_core import api_core_version @@ -102,6 +102,15 @@ from google.iam.v1 import policy_pb2 # type: ignore {% endfilter %} {{ shared_macros.add_google_api_core_version_header_import(service.version) }} + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -268,6 +277,43 @@ def test__get_universe_domain(): {{ service.client_name }}._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = {{ service.client_name }}(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = {{ service.client_name }}(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize("client_class,transport_name", [ {% if 'grpc' in opts.transport %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index cf38212cf840..8013724696cb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -482,6 +484,30 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -3556,12 +3582,16 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index b27b49cd3011..9a4489e98d53 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -71,6 +71,15 @@ import google.auth + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -192,6 +201,43 @@ def test__get_universe_domain(): AssetServiceClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AssetServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize("client_class,transport_name", [ (AssetServiceClient, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 46f628864123..3736372a4351 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -419,6 +421,30 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index d31e01c82ed9..693c6d60a733 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -61,6 +61,15 @@ import google.auth + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -182,6 +191,43 @@ def test__get_universe_domain(): IAMCredentialsClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = IAMCredentialsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = IAMCredentialsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize("client_class,transport_name", [ (IAMCredentialsClient, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 37e3a7fe1d3f..2de8d7404f83 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -527,6 +529,30 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2973,12 +2999,16 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -3025,12 +3055,16 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -3246,12 +3280,16 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -3365,12 +3403,16 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -3422,12 +3464,16 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -3474,12 +3520,16 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -3526,12 +3576,16 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 6473ef989ec4..325b396d0cc9 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -81,6 +81,15 @@ import google.auth + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -202,6 +211,43 @@ def test__get_universe_domain(): EventarcClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = EventarcClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = EventarcClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize("client_class,transport_name", [ (EventarcClient, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 28683c5430ce..e85efa74867d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -478,6 +480,30 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -4292,12 +4318,16 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -4344,12 +4374,16 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 312dced17cb1..2ca0aa9b3c31 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -409,6 +411,30 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1380,12 +1406,16 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1432,12 +1462,16 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 1c3112bf6485..afca747efbce 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -410,6 +412,30 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -1230,12 +1256,16 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -1282,12 +1312,16 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def cancel_operation( self, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index ed05e4954041..b73f557e1242 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -23,6 +23,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from google.api_core import api_core_version @@ -61,6 +62,15 @@ import google.auth + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -182,6 +192,43 @@ def test__get_universe_domain(): ConfigServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ConfigServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ConfigServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize("client_class,transport_name", [ (ConfigServiceV2Client, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 88d4fffd09fd..bd8dd4158030 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -23,6 +23,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from google.api_core import api_core_version @@ -62,6 +63,15 @@ import google.auth + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -183,6 +193,43 @@ def test__get_universe_domain(): LoggingServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LoggingServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LoggingServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize("client_class,transport_name", [ (LoggingServiceV2Client, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 71e91aa79234..8f9bc8e768c1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -23,6 +23,7 @@ import grpc from grpc.experimental import aio +import json import math import pytest from google.api_core import api_core_version @@ -60,6 +61,15 @@ import google.auth + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -181,6 +191,43 @@ def test__get_universe_domain(): MetricsServiceV2Client._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = MetricsServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = MetricsServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize("client_class,transport_name", [ (MetricsServiceV2Client, "grpc"), diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index efdfcd898c81..56c333e6cc3a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -14,6 +14,8 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json import logging as std_logging import os import re @@ -446,6 +448,30 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -2159,12 +2185,16 @@ def list_operations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_operation( self, @@ -2211,12 +2241,16 @@ def get_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def delete_operation( self, @@ -2372,12 +2406,16 @@ def wait_operation( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_location( self, @@ -2424,12 +2462,16 @@ def get_location( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def list_locations( self, @@ -2476,12 +2518,16 @@ def list_locations( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index fef9b18058b4..275fc26b2c85 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -79,6 +79,15 @@ import google.auth + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -200,6 +209,43 @@ def test__get_universe_domain(): CloudRedisClient._get_universe_domain("", None) assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudRedisClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudRedisClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize("client_class,transport_name", [ (CloudRedisClient, "grpc"), From 332a50fd4da858ad38032993509f9a23bd475873 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 22 Jan 2025 11:04:47 -0500 Subject: [PATCH 1235/1339] tests: remove generated routing parameter test for bidi/client side streaming (#2313) --- .../tests/unit/gapic/%name_%version/%sub/test_macros.j2 | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 33b76774c235..1a31294dee85 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -2173,7 +2173,8 @@ def test_initialize_client_w_{{transport_name}}(): {% macro routing_parameter_test(service, api, transport, is_async) %} {% for method in service.methods.values() %}{# method #} -{% if method.explicit_routing %} +{# See existing proposal b/330610501 to add support for explicit routing in BIDI/client side streaming #} +{% if method.explicit_routing and not method.client_streaming %} {# Any value that is part of the HTTP/1.1 URI should be sent as #} {# a field header. Set these to a non-empty value. #} {% for routing_param in method.routing_rule.routing_parameters %} From d66d418028112f13388a35a1584a2af52f2012c1 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Sat, 25 Jan 2025 03:01:47 +0500 Subject: [PATCH 1236/1339] chore: add documentation for logging (#2308) Co-authored-by: Victor Chudnovsky --- .../gapic/templates/README.rst.j2 | 93 ++++++++++++++++++ .../integration/goldens/asset/README.rst | 94 +++++++++++++++++++ .../goldens/credentials/README.rst | 94 +++++++++++++++++++ .../integration/goldens/eventarc/README.rst | 94 +++++++++++++++++++ .../integration/goldens/logging/README.rst | 94 +++++++++++++++++++ .../integration/goldens/redis/README.rst | 94 +++++++++++++++++++ 6 files changed, 563 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/README.rst.j2 b/packages/gapic-generator/gapic/templates/README.rst.j2 index c443a634060f..3e6b3746d479 100644 --- a/packages/gapic-generator/gapic/templates/README.rst.j2 +++ b/packages/gapic-generator/gapic/templates/README.rst.j2 @@ -48,3 +48,96 @@ Windows \Scripts\activate \Scripts\pip.exe install \path\to\library + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/README.rst b/packages/gapic-generator/tests/integration/goldens/asset/README.rst index 110d4086abb7..a10b3ef1e958 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/README.rst +++ b/packages/gapic-generator/tests/integration/goldens/asset/README.rst @@ -47,3 +47,97 @@ Windows python3 -m venv \Scripts\activate \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/README.rst b/packages/gapic-generator/tests/integration/goldens/credentials/README.rst index b4de94145075..44a6987f426b 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/README.rst +++ b/packages/gapic-generator/tests/integration/goldens/credentials/README.rst @@ -47,3 +47,97 @@ Windows python3 -m venv \Scripts\activate \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst index 4c79a1d0f7db..bbf4c3f7256a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/README.rst @@ -47,3 +47,97 @@ Windows python3 -m venv \Scripts\activate \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/README.rst b/packages/gapic-generator/tests/integration/goldens/logging/README.rst index 56aa7d0a8ad9..c3f6248ae3a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/README.rst +++ b/packages/gapic-generator/tests/integration/goldens/logging/README.rst @@ -47,3 +47,97 @@ Windows python3 -m venv \Scripts\activate \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/README.rst b/packages/gapic-generator/tests/integration/goldens/redis/README.rst index 45c06d80c64a..2ad783a17a53 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/README.rst +++ b/packages/gapic-generator/tests/integration/goldens/redis/README.rst @@ -47,3 +47,97 @@ Windows python3 -m venv \Scripts\activate \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) From c03b1b6f42ff3531ac057b4e50cfe6bd2d03a7d8 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Tue, 28 Jan 2025 10:04:11 -0500 Subject: [PATCH 1237/1339] feat: Add support for reading selective GAPIC generation methods from service YAML (#2272) Co-authored-by: Anthonios Partheniou Co-authored-by: Victor Chudnovsky --- packages/gapic-generator/DEVELOPMENT.md | 1 + packages/gapic-generator/gapic/schema/api.py | 156 +- .../gapic-generator/gapic/schema/wrappers.py | 230 + .../tests/integration/BUILD.bazel | 26 + .../goldens/redis_selective/.coveragerc | 13 + .../goldens/redis_selective/.flake8 | 33 + .../goldens/redis_selective/BUILD.bazel | 12 + .../goldens/redis_selective/MANIFEST.in | 2 + .../goldens/redis_selective/README.rst | 143 + .../redis_selective/docs/_static/custom.css | 3 + .../goldens/redis_selective/docs/conf.py | 376 + .../goldens/redis_selective/docs/index.rst | 7 + .../docs/redis_v1/cloud_redis.rst | 10 + .../docs/redis_v1/services_.rst | 6 + .../redis_selective/docs/redis_v1/types_.rst | 6 + .../google/cloud/redis/__init__.py | 55 + .../google/cloud/redis/gapic_version.py | 16 + .../google/cloud/redis/py.typed | 2 + .../google/cloud/redis_v1/__init__.py | 56 + .../google/cloud/redis_v1/gapic_metadata.json | 103 + .../google/cloud/redis_v1/gapic_version.py | 16 + .../google/cloud/redis_v1/py.typed | 2 + .../cloud/redis_v1/services/__init__.py | 15 + .../redis_v1/services/cloud_redis/__init__.py | 22 + .../services/cloud_redis/async_client.py | 1339 +++ .../redis_v1/services/cloud_redis/client.py | 1743 ++++ .../redis_v1/services/cloud_redis/pagers.py | 166 + .../cloud_redis/transports/README.rst | 9 + .../cloud_redis/transports/__init__.py | 49 + .../services/cloud_redis/transports/base.py | 314 + .../services/cloud_redis/transports/grpc.py | 645 ++ .../cloud_redis/transports/grpc_asyncio.py | 720 ++ .../services/cloud_redis/transports/rest.py | 2055 +++++ .../cloud_redis/transports/rest_asyncio.py | 2151 +++++ .../cloud_redis/transports/rest_base.py | 473 + .../google/cloud/redis_v1/types/__init__.py | 48 + .../cloud/redis_v1/types/cloud_redis.py | 1022 +++ .../goldens/redis_selective/mypy.ini | 3 + .../goldens/redis_selective/noxfile.py | 280 + ...rated_cloud_redis_create_instance_async.py | 63 + ...erated_cloud_redis_create_instance_sync.py | 63 + ...rated_cloud_redis_delete_instance_async.py | 56 + ...erated_cloud_redis_delete_instance_sync.py | 56 + ...enerated_cloud_redis_get_instance_async.py | 52 + ...generated_cloud_redis_get_instance_sync.py | 52 + ...erated_cloud_redis_list_instances_async.py | 53 + ...nerated_cloud_redis_list_instances_sync.py | 53 + ...rated_cloud_redis_update_instance_async.py | 61 + ...erated_cloud_redis_update_instance_sync.py | 61 + ...nippet_metadata_google.cloud.redis.v1.json | 844 ++ .../scripts/fixup_redis_v1_keywords.py | 180 + .../goldens/redis_selective/setup.py | 102 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.13.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../goldens/redis_selective/tests/__init__.py | 16 + .../redis_selective/tests/unit/__init__.py | 16 + .../tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/redis_v1/__init__.py | 16 + .../unit/gapic/redis_v1/test_cloud_redis.py | 8061 +++++++++++++++++ .../tests/integration/redis_selective_v1.yaml | 90 + .../tests/unit/schema/test_api.py | 821 +- 66 files changed, 23095 insertions(+), 11 deletions(-) create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/.coveragerc create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/.flake8 create mode 100644 packages/gapic-generator/tests/integration/goldens/redis_selective/BUILD.bazel create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/MANIFEST.in create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_static/custom.css create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/docs/index.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/cloud_redis.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/services_.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/types_.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/py.typed create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_metadata.json create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/py.typed create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/mypy.ini create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.10.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.11.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.12.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.8.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.9.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py create mode 100644 packages/gapic-generator/tests/integration/redis_selective_v1.yaml diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index 3e77e54a118e..fe8ac7699fa7 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -60,5 +60,6 @@ Execute unit tests by running one of the sessions prefixed with `unit-`. bazel run //tests/integration:eventarc_update bazel run //tests/integration:logging_update bazel run //tests/integration:redis_update + bazel run //tests/integration:redis_selective_update ``` diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index f7a7669f8c66..e95243e08641 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -24,7 +24,7 @@ import os import sys from types import MappingProxyType -from typing import Callable, Container, Dict, FrozenSet, Mapping, Optional, Sequence, Set, Tuple +from typing import Callable, Container, Dict, FrozenSet, Iterable, Mapping, Optional, Sequence, Set, Tuple import yaml from google.api_core import exceptions @@ -237,6 +237,95 @@ def disambiguate(self, string: str) -> str: return self.disambiguate(f'_{string}') return string + def add_to_address_allowlist(self, *, + address_allowlist: Set['metadata.Address'], + method_allowlist: Set[str], + resource_messages: Dict[str, 'wrappers.MessageType'], + ) -> None: + """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. + + This method is used to create an allowlist of addresses to be used to filter out unneeded + services, methods, messages, and enums at a later step. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to add to. Only the addresses of the allowlisted methods, the services + containing these methods, and messages/enums those methods use will be part of the + final address_allowlist. The set may be modified during this call. + method_allowlist (Set[str]): An allowlist of fully-qualified method names. + resource_messages (Dict[str, wrappers.MessageType]): A dictionary mapping the unified + resource type name of a resource message to the corresponding MessageType object + representing that resource message. Only resources with a message representation + should be included in the dictionary. + Returns: + None + """ + # The method.operation_service for an extended LRO is not fully qualified, so we + # truncate the service names accordingly so they can be found in + # method.add_to_address_allowlist + services_in_proto = { + service.name: service for service in self.services.values() + } + for service in self.services.values(): + service.add_to_address_allowlist(address_allowlist=address_allowlist, + method_allowlist=method_allowlist, + resource_messages=resource_messages, + services_in_proto=services_in_proto) + + def prune_messages_for_selective_generation(self, *, + address_allowlist: Set['metadata.Address']) -> Optional['Proto']: + """Returns a truncated version of this Proto. + + Only the services, messages, and enums contained in the allowlist + of visited addresses are included in the returned object. If there + are no services, messages, or enums left, and no file level resources, + return None. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to filter against. Objects with addresses not the allowlist will be + removed from the returned Proto. + Returns: + Optional[Proto]: A truncated version of this proto. If there are no services, messages, + or enums left after the truncation process and there are no file level resources, + returns None. + """ + # Once the address allowlist has been created, it suffices to only + # prune items at 2 different levels to truncate the Proto object: + # + # 1. At the Proto level, we remove unnecessary services, messages, + # and enums. + # 2. For allowlisted services, at the Service level, we remove + # non-allowlisted methods. + services = { + k: v.prune_messages_for_selective_generation( + address_allowlist=address_allowlist) + for k, v in self.services.items() + if v.meta.address in address_allowlist + } + + all_messages = { + k: v + for k, v in self.all_messages.items() + if v.ident in address_allowlist + } + + all_enums = { + k: v + for k, v in self.all_enums.items() + if v.ident in address_allowlist + } + + if not services and not all_messages and not all_enums: + return None + + return dataclasses.replace( + self, + services=services, + all_messages=all_messages, + all_enums=all_enums + ) + @dataclasses.dataclass(frozen=True) class API: @@ -365,10 +454,52 @@ def disambiguate_keyword_sanitize_fname( ignore_unknown_fields=True ) - # Done; return the API. - return cls(naming=naming, - all_protos=protos, - service_yaml_config=service_yaml_config) + # Third pass for various selective GAPIC settings; these require + # settings in the service.yaml and so we build the API object + # before doing another pass. + api = cls(naming=naming, + all_protos=protos, + service_yaml_config=service_yaml_config) + + if package in api.all_library_settings: + selective_gapic_methods = set( + api.all_library_settings[package].python_settings.common.selective_gapic_generation.methods + ) + if selective_gapic_methods: + + all_resource_messages = collections.ChainMap( + *(proto.resource_messages for proto in protos.values()) + ) + + # Prepare a list of addresses to include in selective generation, + # then prune each Proto object. We look at metadata.Addresses, not objects, because + # objects that refer to the same thing in the proto are different Python objects + # in memory. + address_allowlist: Set['metadata.Address'] = set([]) + for proto in api.protos.values(): + proto.add_to_address_allowlist(address_allowlist=address_allowlist, + method_allowlist=selective_gapic_methods, + resource_messages=all_resource_messages) + + # The list of explicitly allow-listed protos to generate, plus all + # the proto dependencies regardless of the allow-list. + new_all_protos = {} + + # We only prune services/messages/enums from protos that are not dependencies. + for name, proto in api.all_protos.items(): + if name not in api.protos: + new_all_protos[name] = proto + else: + proto_to_generate = proto.prune_messages_for_selective_generation( + address_allowlist=address_allowlist) + if proto_to_generate: + new_all_protos[name] = proto_to_generate + + api = cls(naming=naming, + all_protos=new_all_protos, + service_yaml_config=service_yaml_config) + + return api @cached_property def enums(self) -> Mapping[str, wrappers.EnumType]: @@ -743,6 +874,21 @@ def enforce_valid_library_settings( continue versions_seen.add(library_settings.version) + # Check to see if selective gapic generation methods are valid. + selective_gapic_errors = {} + for method_name in library_settings.python_settings.common.selective_gapic_generation.methods: + if method_name not in self.all_methods: + selective_gapic_errors[method_name] = "Method does not exist." + elif not method_name.startswith(library_settings.version): + selective_gapic_errors[method_name] = "Mismatched version for method." + + if selective_gapic_errors: + all_errors[library_settings.version] = [ + { + "selective_gapic_generation": selective_gapic_errors, + } + ] + if all_errors: raise ClientLibrarySettingsError(yaml.dump(all_errors)) diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 473b56e7eb0a..5618fd677359 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -389,6 +389,46 @@ def with_context( meta=self.meta.with_context(collisions=collisions), ) + def add_to_address_allowlist(self, *, + address_allowlist: Set['metadata.Address'], + resource_messages: Dict[str, 'MessageType'] + ) -> None: + """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. + + This method is used to create an allowlist of addresses to be used to filter out unneeded + services, methods, messages, and enums at a later step. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to add to. Only the addresses of the allowlisted methods, the services + containing these methods, and messages/enums those methods use will be part of the + final address_allowlist. The set may be modified during this call. + resource_messages (Dict[str, wrappers.MessageType]): A dictionary mapping the unified + resource type name of a resource message to the corresponding MessageType object + representing that resource message. Only resources with a message representation + should be included in the dictionary. + Returns: + None + """ + if self.message: + self.message.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + + if self.enum: + self.enum.add_to_address_allowlist( + address_allowlist=address_allowlist, + ) + + if self.resource_reference and self.resource_reference in resource_messages: + # The message types in resource_message are different objects, but should be + # defined the same as the MessageTypes we're traversing here. + resource_messages[self.resource_reference].add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + @dataclasses.dataclass(frozen=True) class FieldHeader: @@ -760,6 +800,47 @@ def with_context(self, *, meta=self.meta.with_context(collisions=collisions), ) + def add_to_address_allowlist(self, *, + address_allowlist: Set['metadata.Address'], + resource_messages: Dict[str, 'MessageType'] + ) -> None: + """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. + + This method is used to create an allowlist of addresses to be used to filter out unneeded + services, methods, messages, and enums at a later step. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to add to. Only the addresses of the allowlisted methods, the services + containing these methods, and messages/enums those methods use will be part of the + final address_allowlist. The set may be modified during this call. + resource_messages (Dict[str, wrappers.MessageType]): A dictionary mapping the unified + resource type name of a resource message to the corresponding MessageType object + representing that resource message. Only resources with a message representation + should be included in the dictionary. + Returns: + None + """ + if self.ident not in address_allowlist: + address_allowlist.add(self.ident) + + for field in self.fields.values(): + field.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages + ) + + for enum in self.nested_enums.values(): + enum.add_to_address_allowlist( + address_allowlist=address_allowlist, + ) + + for message in self.nested_messages.values(): + message.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + @dataclasses.dataclass(frozen=True) class EnumValueType: @@ -813,6 +894,15 @@ def with_context(self, *, collisions: Set[str]) -> 'EnumType': meta=self.meta.with_context(collisions=collisions), ) if collisions else self + def add_to_address_allowlist(self, *, + address_allowlist: Set['metadata.Address']) -> None: + """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. + + This method is used to create an allowlist of addresses to be used to filter out unneeded + services, methods, messages, and enums at a later step. + """ + address_allowlist.add(self.ident) + @property def options_dict(self) -> Dict: """Return the EnumOptions (if present) as a dict. @@ -917,6 +1007,24 @@ def with_context(self, *, ), ) + def add_to_address_allowlist(self, *, + address_allowlist: Set['metadata.Address'], + resource_messages: Dict[str, 'MessageType']) -> None: + """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. + + This method is used to create an allowlist of addresses to be used to filter out unneeded + services, methods, messages, and enums at a later step. + """ + + self.request_type.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + self.operation_type.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + @dataclasses.dataclass(frozen=True) class OperationInfo: @@ -946,6 +1054,24 @@ def with_context(self, *, ), ) + def add_to_address_allowlist(self, *, + address_allowlist: Set['metadata.Address'], + resource_messages: Dict[str, 'MessageType']) -> None: + """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. + + This method is used to create an allowlist of addresses to be used to filter out unneeded + services, methods, messages, and enums at a later step. + """ + self.response_type.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + + self.metadata_type.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages + ) + @dataclasses.dataclass(frozen=True) class RetryInfo: @@ -1679,6 +1805,52 @@ def with_context(self, *, meta=self.meta.with_context(collisions=collisions), ) + def add_to_address_allowlist(self, *, + address_allowlist: Set['metadata.Address'], + resource_messages: Dict[str, 'MessageType'], + services_in_proto: Dict[str, 'Service'], + ) -> None: + """Adds to the allowlist of Addresses of wrapper objects to be included in selective GAPIC generation. + + This method is used to create an allowlist of addresses to be used to filter out unneeded + services, methods, messages, and enums at a later step. + """ + + address_allowlist.add(self.ident) + + if self.lro: + self.lro.add_to_address_allowlist(address_allowlist=address_allowlist, + resource_messages=resource_messages) + + if self.extended_lro: + # We need to add the service/method pointed to by self.operation_service to + # the allowlist, as it might not have been specified by + # the methods under selective_gapic_generation. + # We assume that the operation service lives in the same proto file as this one. + operation_service = services_in_proto[ + self.operation_service] # type: ignore + address_allowlist.add(operation_service.meta.address) + operation_service.operation_polling_method.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + services_in_proto=services_in_proto, + ) + + self.extended_lro.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + + self.input.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + + self.output.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + ) + @dataclasses.dataclass(frozen=True) class CommonResource: @@ -1994,3 +2166,61 @@ def with_context(self, *, }, meta=self.meta.with_context(collisions=collisions), ) + + def add_to_address_allowlist(self, *, + address_allowlist: Set['metadata.Address'], + method_allowlist: Set[str], + resource_messages: Dict[str, 'MessageType'], + services_in_proto: Dict[str, 'Service'], + ) -> None: + """Adds to the allowlist of Addresses of wrapper objects to be included in selective GAPIC generation. + + This method is used to create an allowlist of addresses to be used to filter out unneeded + services, methods, messages, and enums at a later step. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to add to. Only the addresses of the allowlisted methods, the services + containing these methods, and messages/enums those methods use will be part of the + final address_allowlist. The set may be modified during this call. + method_allowlist (Set[str]): An allowlist of fully-qualified method names. + resource_messages (Dict[str, wrappers.MessageType]): A dictionary mapping the unified + resource type name of a resource message to the corresponding MessageType object + representing that resource message. Only resources with a message representation + should be included in the dictionary. + services_in_proto (Dict[str, Service]): + Returns: + None + """ + + for method in self.methods.values(): + if method.ident.proto in method_allowlist: + # Include this service if there are any types/methods in selective gapic for this service. + address_allowlist.add(self.meta.address) + method.add_to_address_allowlist( + address_allowlist=address_allowlist, + resource_messages=resource_messages, + services_in_proto=services_in_proto, + ) + + def prune_messages_for_selective_generation(self, *, + address_allowlist: Set['metadata.Address']) -> 'Service': + """Returns a truncated version of this Service. + + Only the methods, messages, and enums contained in the address allowlist + are included in the returned object. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to filter against. Objects with addresses not the allowlist will be + removed from the returned Proto. + Returns: + Service: A truncated version of this proto. + """ + return dataclasses.replace( + self, + methods={ + k: v + for k, v in self.methods.items() if v.ident in address_allowlist + } + ) diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 4d700c938fb0..3b970ba5a554 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -28,6 +28,7 @@ INTEGRATION_TEST_LIBRARIES = [ "eventarc", # create_channel is a reserved term in transport layer. "logging", # Java package remapping in gapic.yaml. "redis", # Has a gapic.yaml. + "redis_selective", # Selective generation. ] [integration_test( @@ -144,6 +145,7 @@ py_test( ], ) +# Redis py_gapic_library( name = "redis_py_gapic", srcs = ["@com_google_googleapis//google/cloud/redis/v1:redis_proto"], @@ -167,6 +169,30 @@ py_test( ], ) +# Redis - selective GAPIC phase 1 +py_gapic_library( + name = "redis_selective_py_gapic", + srcs = ["@com_google_googleapis//google/cloud/redis/v1:redis_proto"], + grpc_service_config = "redis_grpc_service_config.json", + opt_args = [ + "autogen-snippets", + ], + service_yaml = "redis_selective_v1.yaml", + transport = "grpc+rest", +) + +py_test( + name = "redis_selective_py_gapic_test", + srcs = [ + "redis_selective_py_gapic_pytest.py", + "redis_selective_py_gapic_test.py", + ], + legacy_create_init = False, + deps = [ + ":redis_selective_py_gapic", + ], +) + test_suite( name = "googleapis_test_suite", tests = [ diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/.coveragerc b/packages/gapic-generator/tests/integration/goldens/redis_selective/.coveragerc new file mode 100755 index 000000000000..5aa9171cc8a2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/redis/__init__.py + google/cloud/redis/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/.flake8 b/packages/gapic-generator/tests/integration/goldens/redis_selective/.flake8 new file mode 100755 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/BUILD.bazel b/packages/gapic-generator/tests/integration/goldens/redis_selective/BUILD.bazel new file mode 100644 index 000000000000..2822013159c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/BUILD.bazel @@ -0,0 +1,12 @@ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "goldens_files", + srcs = glob( + ["**/*"], + exclude = [ + "BUILD.bazel", + ".*.sw*", + ], + ), +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/redis_selective/MANIFEST.in new file mode 100755 index 000000000000..5a95b2698cbb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/redis *.py +recursive-include google/cloud/redis_v1 *.py diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/README.rst b/packages/gapic-generator/tests/integration/goldens/redis_selective/README.rst new file mode 100755 index 000000000000..2ad783a17a53 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Redis API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Redis API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_static/custom.css new file mode 100755 index 000000000000..06423be0b592 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py new file mode 100755 index 000000000000..0764f832c6f8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-redis documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-redis" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-redis-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-redis.tex", + u"google-cloud-redis Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-redis", + u"Google Cloud Redis Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-redis", + u"google-cloud-redis Documentation", + author, + "google-cloud-redis", + "GAPIC library for Google Cloud Redis API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/index.rst new file mode 100755 index 000000000000..0b346d85a90f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + redis_v1/services_ + redis_v1/types_ diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/cloud_redis.rst b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/cloud_redis.rst new file mode 100755 index 000000000000..0e3d7cfa809e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/cloud_redis.rst @@ -0,0 +1,10 @@ +CloudRedis +---------------------------- + +.. automodule:: google.cloud.redis_v1.services.cloud_redis + :members: + :inherited-members: + +.. automodule:: google.cloud.redis_v1.services.cloud_redis.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/services_.rst b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/services_.rst new file mode 100755 index 000000000000..dba59a371880 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/services_.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Redis v1 API +====================================== +.. toctree:: + :maxdepth: 2 + + cloud_redis diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/types_.rst b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/types_.rst new file mode 100755 index 000000000000..7eb7c77e4ced --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/redis_v1/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Redis v1 API +=================================== + +.. automodule:: google.cloud.redis_v1.types + :members: + :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py new file mode 100755 index 000000000000..527bad46ac9f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.redis import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.redis_v1.services.cloud_redis.client import CloudRedisClient +from google.cloud.redis_v1.services.cloud_redis.async_client import CloudRedisAsyncClient + +from google.cloud.redis_v1.types.cloud_redis import CreateInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import DeleteInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import GetInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import Instance +from google.cloud.redis_v1.types.cloud_redis import ListInstancesRequest +from google.cloud.redis_v1.types.cloud_redis import ListInstancesResponse +from google.cloud.redis_v1.types.cloud_redis import MaintenancePolicy +from google.cloud.redis_v1.types.cloud_redis import MaintenanceSchedule +from google.cloud.redis_v1.types.cloud_redis import NodeInfo +from google.cloud.redis_v1.types.cloud_redis import OperationMetadata +from google.cloud.redis_v1.types.cloud_redis import PersistenceConfig +from google.cloud.redis_v1.types.cloud_redis import TlsCertificate +from google.cloud.redis_v1.types.cloud_redis import UpdateInstanceRequest +from google.cloud.redis_v1.types.cloud_redis import WeeklyMaintenanceWindow + +__all__ = ('CloudRedisClient', + 'CloudRedisAsyncClient', + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'GetInstanceRequest', + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'MaintenancePolicy', + 'MaintenanceSchedule', + 'NodeInfo', + 'OperationMetadata', + 'PersistenceConfig', + 'TlsCertificate', + 'UpdateInstanceRequest', + 'WeeklyMaintenanceWindow', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py new file mode 100755 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/py.typed b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/py.typed new file mode 100755 index 000000000000..960151ecda8b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-redis package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py new file mode 100755 index 000000000000..797596a67970 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.redis_v1 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.cloud_redis import CloudRedisClient +from .services.cloud_redis import CloudRedisAsyncClient + +from .types.cloud_redis import CreateInstanceRequest +from .types.cloud_redis import DeleteInstanceRequest +from .types.cloud_redis import GetInstanceRequest +from .types.cloud_redis import Instance +from .types.cloud_redis import ListInstancesRequest +from .types.cloud_redis import ListInstancesResponse +from .types.cloud_redis import MaintenancePolicy +from .types.cloud_redis import MaintenanceSchedule +from .types.cloud_redis import NodeInfo +from .types.cloud_redis import OperationMetadata +from .types.cloud_redis import PersistenceConfig +from .types.cloud_redis import TlsCertificate +from .types.cloud_redis import UpdateInstanceRequest +from .types.cloud_redis import WeeklyMaintenanceWindow + +__all__ = ( + 'CloudRedisAsyncClient', +'CloudRedisClient', +'CreateInstanceRequest', +'DeleteInstanceRequest', +'GetInstanceRequest', +'Instance', +'ListInstancesRequest', +'ListInstancesResponse', +'MaintenancePolicy', +'MaintenanceSchedule', +'NodeInfo', +'OperationMetadata', +'PersistenceConfig', +'TlsCertificate', +'UpdateInstanceRequest', +'WeeklyMaintenanceWindow', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_metadata.json new file mode 100755 index 000000000000..4f1451dba11e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.redis_v1", + "protoPackage": "google.cloud.redis.v1", + "schema": "1.0", + "services": { + "CloudRedis": { + "clients": { + "grpc": { + "libraryClient": "CloudRedisClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "grpc-async": { + "libraryClient": "CloudRedisAsyncClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + }, + "rest": { + "libraryClient": "CloudRedisClient", + "rpcs": { + "CreateInstance": { + "methods": [ + "create_instance" + ] + }, + "DeleteInstance": { + "methods": [ + "delete_instance" + ] + }, + "GetInstance": { + "methods": [ + "get_instance" + ] + }, + "ListInstances": { + "methods": [ + "list_instances" + ] + }, + "UpdateInstance": { + "methods": [ + "update_instance" + ] + } + } + } + } + } + } +} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py new file mode 100755 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/py.typed b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/py.typed new file mode 100755 index 000000000000..960151ecda8b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-redis package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/__init__.py new file mode 100755 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py new file mode 100755 index 000000000000..17e93eea6bfe --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import CloudRedisClient +from .async_client import CloudRedisAsyncClient + +__all__ = ( + 'CloudRedisClient', + 'CloudRedisAsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py new file mode 100755 index 000000000000..233a17a6edf9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -0,0 +1,1339 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.redis_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport +from .client import CloudRedisClient + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class CloudRedisAsyncClient: + """Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + """ + + _client: CloudRedisClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = CloudRedisClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = CloudRedisClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = CloudRedisClient._DEFAULT_UNIVERSE + + instance_path = staticmethod(CloudRedisClient.instance_path) + parse_instance_path = staticmethod(CloudRedisClient.parse_instance_path) + common_billing_account_path = staticmethod(CloudRedisClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(CloudRedisClient.parse_common_billing_account_path) + common_folder_path = staticmethod(CloudRedisClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudRedisClient.parse_common_folder_path) + common_organization_path = staticmethod(CloudRedisClient.common_organization_path) + parse_common_organization_path = staticmethod(CloudRedisClient.parse_common_organization_path) + common_project_path = staticmethod(CloudRedisClient.common_project_path) + parse_common_project_path = staticmethod(CloudRedisClient.parse_common_project_path) + common_location_path = staticmethod(CloudRedisClient.common_location_path) + parse_common_location_path = staticmethod(CloudRedisClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudRedisAsyncClient: The constructed client. + """ + return CloudRedisClient.from_service_account_info.__func__(CloudRedisAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudRedisAsyncClient: The constructed client. + """ + return CloudRedisClient.from_service_account_file.__func__(CloudRedisAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return CloudRedisClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> CloudRedisTransport: + """Returns the transport used by the client instance. + + Returns: + CloudRedisTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = CloudRedisClient.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud redis async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CloudRedisTransport,Callable[..., CloudRedisTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CloudRedisTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = CloudRedisClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "credentialsType": None, + } + ) + + async def list_instances(self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesAsyncPager: + r"""Lists all Redis instances owned by a project in either the + specified location (region) or all locations. + + The location should have the following format: + + - ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_list_instances(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.ListInstancesRequest, dict]]): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesAsyncPager: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.ListInstancesRequest): + request = cloud_redis.ListInstancesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInstancesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_instance(self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: + r"""Gets the details of a specific Redis instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_get_instance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.GetInstanceRequest, dict]]): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.redis_v1.types.Instance: + A Memorystore for Redis instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.GetInstanceRequest): + request = cloud_redis.GetInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_instance(self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a Redis instance based on the specified tier and memory + size. + + By default, the instance is accessible from the project's + `default network `__. + + The creation is executed asynchronously and callers may check + the returned operation to track its progress. Once the operation + is completed the Redis instance will be fully functional. + Completed longrunning.Operation will contain the new instance + object in the response field. + + The returned operation is automatically deleted after a few + hours, so there is no need to call DeleteOperation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_create_instance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.CreateInstanceRequest, dict]]): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + parent (:class:`str`): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (:class:`str`): + Required. The logical name of the Redis instance in the + customer project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.redis_v1.types.Instance`): + Required. A Redis [Instance] resource + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance_id, instance]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.CreateInstanceRequest): + request = cloud_redis.CreateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_instance(self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the metadata and configuration of a specific + Redis instance. + Completed longrunning.Operation will contain the new + instance object in the response field. The returned + operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_update_instance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.UpdateInstanceRequest, dict]]): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Mask of fields to update. At least one path + must be supplied in this field. The elements of the + repeated paths field may only include these fields from + [Instance][google.cloud.redis.v1.Instance]: + + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (:class:`google.cloud.redis_v1.types.Instance`): + Required. Update description. Only fields specified in + update_mask are updated. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, instance]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.UpdateInstanceRequest): + request = cloud_redis.UpdateInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if update_mask is not None: + request.update_mask = update_mask + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_instance(self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a specific Redis instance. Instance stops + serving and data is deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + async def sample_delete_instance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.redis_v1.types.DeleteInstanceRequest, dict]]): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + name (:class:`str`): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.DeleteInstanceRequest): + request = cloud_redis.DeleteInstanceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.wait_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self) -> "CloudRedisAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CloudRedisAsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py new file mode 100755 index 000000000000..65444427a950 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -0,0 +1,1743 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.redis_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import CloudRedisGrpcTransport +from .transports.grpc_asyncio import CloudRedisGrpcAsyncIOTransport +from .transports.rest import CloudRedisRestTransport +try: + from .transports.rest_asyncio import AsyncCloudRedisRestTransport + HAS_ASYNC_REST_DEPENDENCIES = True +except ImportError as e: # pragma: NO COVER + HAS_ASYNC_REST_DEPENDENCIES = False + ASYNC_REST_EXCEPTION = e + + +class CloudRedisClientMeta(type): + """Metaclass for the CloudRedis client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] + _transport_registry["grpc"] = CloudRedisGrpcTransport + _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport + _transport_registry["rest"] = CloudRedisRestTransport + if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[CloudRedisTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + raise ASYNC_REST_EXCEPTION + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class CloudRedisClient(metaclass=CloudRedisClientMeta): + """Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "redis.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "redis.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudRedisClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudRedisClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> CloudRedisTransport: + """Returns the transport used by the client instance. + + Returns: + CloudRedisTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def instance_path(project: str,location: str,instance: str,) -> str: + """Returns a fully-qualified instance string.""" + return "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + + @staticmethod + def parse_instance_path(path: str) -> Dict[str,str]: + """Parses a instance path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = CloudRedisClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = CloudRedisClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = CloudRedisClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, CloudRedisTransport, Callable[..., CloudRedisTransport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the cloud redis client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,CloudRedisTransport,Callable[..., CloudRedisTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the CloudRedisTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = CloudRedisClient._read_environment_variables() + self._client_cert_source = CloudRedisClient._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = CloudRedisClient._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, CloudRedisTransport) + if transport_provided: + # transport is a CloudRedisTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(CloudRedisTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + CloudRedisClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + transport_init: Union[Type[CloudRedisTransport], Callable[..., CloudRedisTransport]] = ( + CloudRedisClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., CloudRedisTransport], transport) + ) + + if "rest_asyncio" in str(transport_init): + unsupported_params = { + "google.api_core.client_options.ClientOptions.credentials_file": self._client_options.credentials_file, + "google.api_core.client_options.ClientOptions.scopes": self._client_options.scopes, + "google.api_core.client_options.ClientOptions.quota_project_id": self._client_options.quota_project_id, + "google.api_core.client_options.ClientOptions.client_cert_source": self._client_options.client_cert_source, + "google.api_core.client_options.ClientOptions.api_audience": self._client_options.api_audience, + + } + provided_unsupported_params = [name for name, value in unsupported_params.items() if value is not None] + if provided_unsupported_params: + raise core_exceptions.AsyncRestUnsupportedParameterError( # type: ignore + f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" + ) + self._transport = transport_init( + credentials=credentials, + host=self._api_endpoint, + client_info=client_info, + ) + return + + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.redis_v1.CloudRedisClient`.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "credentialsType": None, + } + ) + + def list_instances(self, + request: Optional[Union[cloud_redis.ListInstancesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListInstancesPager: + r"""Lists all Redis instances owned by a project in either the + specified location (region) or all locations. + + The location should have the following format: + + - ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_list_instances(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.ListInstancesRequest, dict]): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + parent (str): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesPager: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.ListInstancesRequest): + request = cloud_redis.ListInstancesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_instances] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInstancesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_instance(self, + request: Optional[Union[cloud_redis.GetInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> cloud_redis.Instance: + r"""Gets the details of a specific Redis instance. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_get_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.GetInstanceRequest, dict]): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.redis_v1.types.Instance: + A Memorystore for Redis instance. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.GetInstanceRequest): + request = cloud_redis.GetInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_instance(self, + request: Optional[Union[cloud_redis.CreateInstanceRequest, dict]] = None, + *, + parent: Optional[str] = None, + instance_id: Optional[str] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a Redis instance based on the specified tier and memory + size. + + By default, the instance is accessible from the project's + `default network `__. + + The creation is executed asynchronously and callers may check + the returned operation to track its progress. Once the operation + is completed the Redis instance will be fully functional. + Completed longrunning.Operation will contain the new instance + object in the response field. + + The returned operation is automatically deleted after a few + hours, so there is no need to call DeleteOperation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_create_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.CreateInstanceRequest, dict]): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + parent (str): + Required. The resource name of the instance location + using the form: + ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_id (str): + Required. The logical name of the Redis instance in the + customer project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + + This corresponds to the ``instance_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.redis_v1.types.Instance): + Required. A Redis [Instance] resource + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, instance_id, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.CreateInstanceRequest): + request = cloud_redis.CreateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if instance_id is not None: + request.instance_id = instance_id + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_instance(self, + request: Optional[Union[cloud_redis.UpdateInstanceRequest, dict]] = None, + *, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + instance: Optional[cloud_redis.Instance] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates the metadata and configuration of a specific + Redis instance. + Completed longrunning.Operation will contain the new + instance object in the response field. The returned + operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_update_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.UpdateInstanceRequest, dict]): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. At least one path + must be supplied in this field. The elements of the + repeated paths field may only include these fields from + [Instance][google.cloud.redis.v1.Instance]: + + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance (google.cloud.redis_v1.types.Instance): + Required. Update description. Only fields specified in + update_mask are updated. + + This corresponds to the ``instance`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.redis_v1.types.Instance` A + Memorystore for Redis instance. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([update_mask, instance]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.UpdateInstanceRequest): + request = cloud_redis.UpdateInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if update_mask is not None: + request.update_mask = update_mask + if instance is not None: + request.instance = instance + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("instance.name", request.instance.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + cloud_redis.Instance, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_instance(self, + request: Optional[Union[cloud_redis.DeleteInstanceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a specific Redis instance. Instance stops + serving and data is deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import redis_v1 + + def sample_delete_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.redis_v1.types.DeleteInstanceRequest, dict]): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, cloud_redis.DeleteInstanceRequest): + request = cloud_redis.DeleteInstanceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_instance] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=cloud_redis.OperationMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "CloudRedisClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + def wait_operation( + self, + request: Optional[operations_pb2.WaitOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Waits until the specified long-running operation is done or reaches at most + a specified timeout, returning the latest state. + + If the operation is already done, the latest state is immediately returned. + If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC + timeout is used. If the server does not support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.WaitOperationRequest`): + The request object. Request message for + `WaitOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.WaitOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.wait_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "CloudRedisClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py new file mode 100755 index 000000000000..e3d2c05b83d6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.redis_v1.types import cloud_redis + + +class ListInstancesPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.redis_v1.types.ListInstancesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.redis_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., cloud_redis.ListInstancesResponse], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.redis_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.redis_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = cloud_redis.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[cloud_redis.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[cloud_redis.Instance]: + for page in self.pages: + yield from page.instances + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInstancesAsyncPager: + """A pager for iterating through ``list_instances`` requests. + + This class thinly wraps an initial + :class:`google.cloud.redis_v1.types.ListInstancesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``instances`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInstances`` requests and continue to iterate + through the ``instances`` field on the + corresponding responses. + + All the usual :class:`google.cloud.redis_v1.types.ListInstancesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[cloud_redis.ListInstancesResponse]], + request: cloud_redis.ListInstancesRequest, + response: cloud_redis.ListInstancesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.redis_v1.types.ListInstancesRequest): + The initial request object. + response (google.cloud.redis_v1.types.ListInstancesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = cloud_redis.ListInstancesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[cloud_redis.ListInstancesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[cloud_redis.Instance]: + async def async_generator(): + async for page in self.pages: + for response in page.instances: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/README.rst new file mode 100755 index 000000000000..fce41822b52c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`CloudRedisTransport` is the ABC for all transports. +- public child `CloudRedisGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `CloudRedisGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseCloudRedisRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `CloudRedisRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py new file mode 100755 index 000000000000..563cd5dd7682 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type, Tuple + +from .base import CloudRedisTransport +from .grpc import CloudRedisGrpcTransport +from .grpc_asyncio import CloudRedisGrpcAsyncIOTransport +from .rest import CloudRedisRestTransport +from .rest import CloudRedisRestInterceptor +ASYNC_REST_CLASSES: Tuple[str, ...] +try: + from .rest_asyncio import AsyncCloudRedisRestTransport + from .rest_asyncio import AsyncCloudRedisRestInterceptor + ASYNC_REST_CLASSES = ('AsyncCloudRedisRestTransport', 'AsyncCloudRedisRestInterceptor') + HAS_REST_ASYNC = True +except ImportError: # pragma: NO COVER + ASYNC_REST_CLASSES = () + HAS_REST_ASYNC = False + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[CloudRedisTransport]] +_transport_registry['grpc'] = CloudRedisGrpcTransport +_transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport +_transport_registry['rest'] = CloudRedisRestTransport +if HAS_REST_ASYNC: # pragma: NO COVER + _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport + +__all__ = ( + 'CloudRedisTransport', + 'CloudRedisGrpcTransport', + 'CloudRedisGrpcAsyncIOTransport', + 'CloudRedisRestTransport', + 'CloudRedisRestInterceptor', +) + ASYNC_REST_CLASSES diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py new file mode 100755 index 000000000000..2c311ca2c4ca --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -0,0 +1,314 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.redis_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class CloudRedisTransport(abc.ABC): + """Abstract transport class for CloudRedis.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'redis.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_instances: gapic_v1.method.wrap_method( + self.list_instances, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance: gapic_v1.method.wrap_method( + self.get_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.create_instance: gapic_v1.method.wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: gapic_v1.method.wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: gapic_v1.method.wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + self.wait_operation: gapic_v1.method.wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + Union[ + cloud_redis.ListInstancesResponse, + Awaitable[cloud_redis.ListInstancesResponse] + ]]: + raise NotImplementedError() + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + Union[ + cloud_redis.Instance, + Awaitable[cloud_redis.Instance] + ]]: + raise NotImplementedError() + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def wait_operation( + self, + ) -> Callable[ + [operations_pb2.WaitOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_location(self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations(self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[locations_pb2.ListLocationsResponse, Awaitable[locations_pb2.ListLocationsResponse]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'CloudRedisTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py new file mode 100755 index 000000000000..07a5b114b047 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -0,0 +1,645 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class CloudRedisGrpcTransport(CloudRedisTransport): + """gRPC backend transport for CloudRedis. + + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'redis.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + cloud_redis.ListInstancesResponse]: + r"""Return a callable for the list instances method over gRPC. + + Lists all Redis instances owned by a project in either the + specified location (region) or all locations. + + The location should have the following format: + + - ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + Returns: + Callable[[~.ListInstancesRequest], + ~.ListInstancesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/ListInstances', + request_serializer=cloud_redis.ListInstancesRequest.serialize, + response_deserializer=cloud_redis.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + cloud_redis.Instance]: + r"""Return a callable for the get instance method over gRPC. + + Gets the details of a specific Redis instance. + + Returns: + Callable[[~.GetInstanceRequest], + ~.Instance]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/GetInstance', + request_serializer=cloud_redis.GetInstanceRequest.serialize, + response_deserializer=cloud_redis.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the create instance method over gRPC. + + Creates a Redis instance based on the specified tier and memory + size. + + By default, the instance is accessible from the project's + `default network `__. + + The creation is executed asynchronously and callers may check + the returned operation to track its progress. Once the operation + is completed the Redis instance will be fully functional. + Completed longrunning.Operation will contain the new instance + object in the response field. + + The returned operation is automatically deleted after a few + hours, so there is no need to call DeleteOperation. + + Returns: + Callable[[~.CreateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/CreateInstance', + request_serializer=cloud_redis.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the update instance method over gRPC. + + Updates the metadata and configuration of a specific + Redis instance. + Completed longrunning.Operation will contain the new + instance object in the response field. The returned + operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. + + Returns: + Callable[[~.UpdateInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + request_serializer=cloud_redis.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a specific Redis instance. Instance stops + serving and data is deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + request_serializer=cloud_redis.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_instance'] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "wait_operation" not in self._stubs: + self._stubs["wait_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'CloudRedisGrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py new file mode 100755 index 000000000000..dc833c8e5000 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -0,0 +1,720 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.location import locations_pb2 # type: ignore +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO +from .grpc import CloudRedisGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): + """gRPC AsyncIO backend transport for CloudRedis. + + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'redis.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + Awaitable[cloud_redis.ListInstancesResponse]]: + r"""Return a callable for the list instances method over gRPC. + + Lists all Redis instances owned by a project in either the + specified location (region) or all locations. + + The location should have the following format: + + - ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all + regions available to the project are queried, and the results + are aggregated. + + Returns: + Callable[[~.ListInstancesRequest], + Awaitable[~.ListInstancesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_instances' not in self._stubs: + self._stubs['list_instances'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/ListInstances', + request_serializer=cloud_redis.ListInstancesRequest.serialize, + response_deserializer=cloud_redis.ListInstancesResponse.deserialize, + ) + return self._stubs['list_instances'] + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + Awaitable[cloud_redis.Instance]]: + r"""Return a callable for the get instance method over gRPC. + + Gets the details of a specific Redis instance. + + Returns: + Callable[[~.GetInstanceRequest], + Awaitable[~.Instance]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_instance' not in self._stubs: + self._stubs['get_instance'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/GetInstance', + request_serializer=cloud_redis.GetInstanceRequest.serialize, + response_deserializer=cloud_redis.Instance.deserialize, + ) + return self._stubs['get_instance'] + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create instance method over gRPC. + + Creates a Redis instance based on the specified tier and memory + size. + + By default, the instance is accessible from the project's + `default network `__. + + The creation is executed asynchronously and callers may check + the returned operation to track its progress. Once the operation + is completed the Redis instance will be fully functional. + Completed longrunning.Operation will contain the new instance + object in the response field. + + The returned operation is automatically deleted after a few + hours, so there is no need to call DeleteOperation. + + Returns: + Callable[[~.CreateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_instance' not in self._stubs: + self._stubs['create_instance'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/CreateInstance', + request_serializer=cloud_redis.CreateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_instance'] + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update instance method over gRPC. + + Updates the metadata and configuration of a specific + Redis instance. + Completed longrunning.Operation will contain the new + instance object in the response field. The returned + operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. + + Returns: + Callable[[~.UpdateInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_instance' not in self._stubs: + self._stubs['update_instance'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/UpdateInstance', + request_serializer=cloud_redis.UpdateInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_instance'] + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete instance method over gRPC. + + Deletes a specific Redis instance. Instance stops + serving and data is deleted. + + Returns: + Callable[[~.DeleteInstanceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_instance' not in self._stubs: + self._stubs['delete_instance'] = self._logged_channel.unary_unary( + '/google.cloud.redis.v1.CloudRedis/DeleteInstance', + request_serializer=cloud_redis.DeleteInstanceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['delete_instance'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instances: self._wrap_method( + self.list_instances, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance: self._wrap_method( + self.get_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.create_instance: self._wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: self._wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: self._wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + self.wait_operation: self._wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def wait_operation( + self, + ) -> Callable[[operations_pb2.WaitOperationRequest], None]: + r"""Return a callable for the wait_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "wait_operation" not in self._stubs: + self._stubs["wait_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/WaitOperation", + request_serializer=operations_pb2.WaitOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["wait_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[[locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ( + 'CloudRedisGrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py new file mode 100755 index 000000000000..7b3fbccb9ffb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -0,0 +1,2055 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseCloudRedisRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class CloudRedisRestInterceptor: + """Interceptor for CloudRedis. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudRedisRestTransport. + + .. code-block:: python + class MyCustomCloudRedisInterceptor(CloudRedisRestInterceptor): + def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudRedisRestTransport(interceptor=MyCustomCloudRedisInterceptor()) + client = CloudRedisClient(transport=transport) + + + """ + def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. + """ + return response + + def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + + def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. + """ + return response + + def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + + def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + """Post-rpc interceptor for get_instance + + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. + """ + return response + + def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + + def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. + """ + return response + + def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + + def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. + """ + return response + + def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + def pre_wait_operation( + self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudRedisRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudRedisRestInterceptor + + +class CloudRedisRestTransport(_BaseCloudRedisRestTransport): + """REST backend synchronous transport for CloudRedis. + + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[ + ], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + interceptor: Optional[CloudRedisRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudRedisRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], + 'google.longrunning.Operations.WaitOperation': [ + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1") + + self._operations_client = operations_v1.AbstractOperationsClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.CreateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: cloud_redis.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.cloud_redis.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + + request, metadata = self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.create_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.DeleteInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cloud_redis.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.cloud_redis.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + + request, metadata = self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.delete_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.GetInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cloud_redis.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cloud_redis.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cloud_redis.Instance: + A Memorystore for Redis instance. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + + request, metadata = self._interceptor.pre_get_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_redis.Instance() + pb_resp = cloud_redis.Instance.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.Instance.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.get_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.ListInstances") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: cloud_redis.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cloud_redis.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.cloud_redis.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cloud_redis.ListInstancesResponse: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + + request, metadata = self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloud_redis.ListInstancesResponse() + pb_resp = cloud_redis.ListInstancesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.ListInstancesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.list_instances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.UpdateInstance") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: cloud_redis.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.cloud_redis.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + + request, metadata = self._interceptor.pre_update_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisClient.update_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + cloud_redis.Instance]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + cloud_redis.ListInstancesResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + + class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): + def __hash__(self): + return hash("CloudRedisRestTransport.WaitOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__(self, + request: operations_pb2.WaitOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the wait operation method over HTTP. + + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + + request, metadata = self._interceptor.pre_wait_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_wait_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__=( + 'CloudRedisRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py new file mode 100755 index 000000000000..8028c4273108 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -0,0 +1,2151 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import google.auth +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore + from google.api_core import rest_streaming_async # type: ignore + from google.api_core.operations_v1 import AsyncOperationsRestClient # type: ignore +except ImportError as e: # pragma: NO COVER + raise ImportError("`rest_asyncio` transport requires the library to be installed with the `async_rest` extra. Install the library with the `async_rest` extra using `pip install google-cloud-redis[async_rest]`") from e + +from google.auth.aio import credentials as ga_credentials_async # type: ignore + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore +from google.api_core import retry_async as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming_async # type: ignore + + +from google.protobuf import json_format +from google.api_core import operations_v1 +from google.cloud.location import locations_pb2 # type: ignore + +import json # type: ignore +import dataclasses +from typing import Any, Dict, List, Callable, Tuple, Optional, Sequence, Union + + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + + +from .rest_base import _BaseCloudRedisRestTransport + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +import logging + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"google-auth@{google.auth.__version__}", +) + + +class AsyncCloudRedisRestInterceptor: + """Asynchronous Interceptor for CloudRedis. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AsyncCloudRedisRestTransport. + + .. code-block:: python + class MyCustomCloudRedisInterceptor(CloudRedisRestInterceptor): + async def pre_create_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_create_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_delete_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_delete_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_get_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_get_instance(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_list_instances(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_list_instances(self, response): + logging.log(f"Received response: {response}") + return response + + async def pre_update_instance(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + async def post_update_instance(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AsyncCloudRedisRestTransport(interceptor=MyCustomCloudRedisInterceptor()) + client = async CloudRedisClient(transport=transport) + + + """ + async def pre_create_instance(self, request: cloud_redis.CreateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.CreateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_create_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for create_instance + + DEPRECATED. Please use the `post_create_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_create_instance` interceptor runs + before the `post_create_instance_with_metadata` interceptor. + """ + return response + + async def post_create_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_create_instance_with_metadata` + interceptor in new development instead of the `post_create_instance` interceptor. + When both interceptors are used, this `post_create_instance_with_metadata` interceptor runs after the + `post_create_instance` interceptor. The (possibly modified) response returned by + `post_create_instance` will be passed to + `post_create_instance_with_metadata`. + """ + return response, metadata + + async def pre_delete_instance(self, request: cloud_redis.DeleteInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.DeleteInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_delete_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_instance + + DEPRECATED. Please use the `post_delete_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_delete_instance` interceptor runs + before the `post_delete_instance_with_metadata` interceptor. + """ + return response + + async def post_delete_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_delete_instance_with_metadata` + interceptor in new development instead of the `post_delete_instance` interceptor. + When both interceptors are used, this `post_delete_instance_with_metadata` interceptor runs after the + `post_delete_instance` interceptor. The (possibly modified) response returned by + `post_delete_instance` will be passed to + `post_delete_instance_with_metadata`. + """ + return response, metadata + + async def pre_get_instance(self, request: cloud_redis.GetInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.GetInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_instance(self, response: cloud_redis.Instance) -> cloud_redis.Instance: + """Post-rpc interceptor for get_instance + + DEPRECATED. Please use the `post_get_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_get_instance` interceptor runs + before the `post_get_instance_with_metadata` interceptor. + """ + return response + + async def post_get_instance_with_metadata(self, response: cloud_redis.Instance, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.Instance, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_get_instance_with_metadata` + interceptor in new development instead of the `post_get_instance` interceptor. + When both interceptors are used, this `post_get_instance_with_metadata` interceptor runs after the + `post_get_instance` interceptor. The (possibly modified) response returned by + `post_get_instance` will be passed to + `post_get_instance_with_metadata`. + """ + return response, metadata + + async def pre_list_instances(self, request: cloud_redis.ListInstancesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_instances + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_instances(self, response: cloud_redis.ListInstancesResponse) -> cloud_redis.ListInstancesResponse: + """Post-rpc interceptor for list_instances + + DEPRECATED. Please use the `post_list_instances_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_list_instances` interceptor runs + before the `post_list_instances_with_metadata` interceptor. + """ + return response + + async def post_list_instances_with_metadata(self, response: cloud_redis.ListInstancesResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.ListInstancesResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_instances + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_list_instances_with_metadata` + interceptor in new development instead of the `post_list_instances` interceptor. + When both interceptors are used, this `post_list_instances_with_metadata` interceptor runs after the + `post_list_instances` interceptor. The (possibly modified) response returned by + `post_list_instances` will be passed to + `post_list_instances_with_metadata`. + """ + return response, metadata + + async def pre_update_instance(self, request: cloud_redis.UpdateInstanceRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[cloud_redis.UpdateInstanceRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_instance + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_update_instance(self, response: operations_pb2.Operation) -> operations_pb2.Operation: + """Post-rpc interceptor for update_instance + + DEPRECATED. Please use the `post_update_instance_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. This `post_update_instance` interceptor runs + before the `post_update_instance_with_metadata` interceptor. + """ + return response + + async def post_update_instance_with_metadata(self, response: operations_pb2.Operation, metadata: Sequence[Tuple[str, Union[str, bytes]]]) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_instance + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CloudRedis server but before it is returned to user code. + + We recommend only using this `post_update_instance_with_metadata` + interceptor in new development instead of the `post_update_instance` interceptor. + When both interceptors are used, this `post_update_instance_with_metadata` interceptor runs after the + `post_update_instance` interceptor. The (possibly modified) response returned by + `post_update_instance` will be passed to + `post_update_instance_with_metadata`. + """ + return response, metadata + + async def pre_get_location( + self, request: locations_pb2.GetLocationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_list_locations( + self, request: locations_pb2.ListLocationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_cancel_operation( + self, request: operations_pb2.CancelOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_cancel_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_delete_operation( + self, request: operations_pb2.DeleteOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_delete_operation( + self, response: None + ) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_get_operation( + self, request: operations_pb2.GetOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_list_operations( + self, request: operations_pb2.ListOperationsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + async def pre_wait_operation( + self, request: operations_pb2.WaitOperationRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[operations_pb2.WaitOperationRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for wait_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudRedis server. + """ + return request, metadata + + async def post_wait_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for wait_operation + + Override in a subclass to manipulate the response + after it is returned by the CloudRedis server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AsyncCloudRedisRestStub: + _session: AsyncAuthorizedSession + _host: str + _interceptor: AsyncCloudRedisRestInterceptor + +class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): + """Asynchronous REST backend transport for CloudRedis. + + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1 + + The ``redis.googleapis.com`` service implements the Google Cloud + Memorystore for Redis API and defines the following resource model + for managing Redis instances: + + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be referring to a GCP ``region``; for + example: + + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + def __init__(self, + *, + host: str = 'redis.googleapis.com', + credentials: Optional[ga_credentials_async.Credentials] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + url_scheme: str = 'https', + interceptor: Optional[AsyncCloudRedisRestInterceptor] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This async REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[google.auth.aio.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + url_scheme (str): the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=False, + url_scheme=url_scheme, + api_audience=None + ) + self._session = AsyncAuthorizedSession(self._credentials) # type: ignore + self._interceptor = interceptor or AsyncCloudRedisRestInterceptor() + self._wrap_with_kind = True + self._prep_wrapped_messages(client_info) + self._operations_client: Optional[operations_v1.AsyncOperationsRestClient] = None + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_instances: self._wrap_method( + self.list_instances, + default_timeout=600.0, + client_info=client_info, + ), + self.get_instance: self._wrap_method( + self.get_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.create_instance: self._wrap_method( + self.create_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.update_instance: self._wrap_method( + self.update_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_instance: self._wrap_method( + self.delete_instance, + default_timeout=600.0, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + self.wait_operation: self._wrap_method( + self.wait_operation, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + class _CreateInstance(_BaseCloudRedisRestTransport._BaseCreateInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.CreateInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.CreateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the create instance method over HTTP. + + Args: + request (~.cloud_redis.CreateInstanceRequest): + The request object. Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseCreateInstance._get_http_options() + + request, metadata = await self._interceptor.pre_create_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseCreateInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseCreateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CreateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._CreateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_create_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_create_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CreateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _DeleteInstance(_BaseCloudRedisRestTransport._BaseDeleteInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.DeleteInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.DeleteInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the delete instance method over HTTP. + + Args: + request (~.cloud_redis.DeleteInstanceRequest): + The request object. Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_http_options() + + request, metadata = await self._interceptor.pre_delete_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._DeleteInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_delete_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _GetInstance(_BaseCloudRedisRestTransport._BaseGetInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.GetInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cloud_redis.Instance: + r"""Call the get instance method over HTTP. + + Args: + request (~.cloud_redis.GetInstanceRequest): + The request object. Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cloud_redis.Instance: + A Memorystore for Redis instance. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetInstance._get_http_options() + + request, metadata = await self._interceptor.pre_get_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetInstance._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = cloud_redis.Instance() + pb_resp = cloud_redis.Instance.pb(resp) + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_get_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_get_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.Instance.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _ListInstances(_BaseCloudRedisRestTransport._BaseListInstances, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListInstances") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: cloud_redis.ListInstancesRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> cloud_redis.ListInstancesResponse: + r"""Call the list instances method over HTTP. + + Args: + request (~.cloud_redis.ListInstancesRequest): + The request object. Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.cloud_redis.ListInstancesResponse: + Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseListInstances._get_http_options() + + request, metadata = await self._interceptor.pre_list_instances(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListInstances._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListInstances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListInstances._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = cloud_redis.ListInstancesResponse() + pb_resp = cloud_redis.ListInstancesResponse.pb(resp) + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_list_instances(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_list_instances_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = cloud_redis.ListInstancesResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListInstances", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + class _UpdateInstance(_BaseCloudRedisRestTransport._BaseUpdateInstance, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.UpdateInstance") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: cloud_redis.UpdateInstanceRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + r"""Call the update instance method over HTTP. + + Args: + request (~.cloud_redis.UpdateInstanceRequest): + The request object. Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_http_options() + + request, metadata = await self._interceptor.pre_update_instance(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.UpdateInstance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._UpdateInstance._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + resp = operations_pb2.Operation() + pb_resp = resp + content = await response.read() + json_format.Parse(content, pb_resp, ignore_unknown_fields=True) + resp = await self._interceptor.post_update_instance(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = await self._interceptor.post_update_instance_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": "OK", # need to obtain this properly + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "UpdateInstance", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + + return resp + + @property + def operations_client(self) -> AsyncOperationsRestClient: + """Create the async client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + 'google.longrunning.Operations.CancelOperation': [ + { + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ], + 'google.longrunning.Operations.DeleteOperation': [ + { + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.GetOperation': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ], + 'google.longrunning.Operations.ListOperations': [ + { + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ], + 'google.longrunning.Operations.WaitOperation': [ + { + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ], + } + + rest_transport = operations_v1.AsyncOperationsRestTransport( # type: ignore + host=self._host, + # use the credentials which are saved + credentials=self._credentials, # type: ignore + http_options=http_options, + path_prefix="v1" + ) + + self._operations_client = AsyncOperationsRestClient(transport=rest_transport) + + # Return the client from cache. + return self._operations_client + + @property + def create_instance(self) -> Callable[ + [cloud_redis.CreateInstanceRequest], + operations_pb2.Operation]: + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_instance(self) -> Callable[ + [cloud_redis.DeleteInstanceRequest], + operations_pb2.Operation]: + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_instance(self) -> Callable[ + [cloud_redis.GetInstanceRequest], + cloud_redis.Instance]: + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_instances(self) -> Callable[ + [cloud_redis.ListInstancesRequest], + cloud_redis.ListInstancesResponse]: + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_instance(self) -> Callable[ + [cloud_redis.UpdateInstanceRequest], + operations_pb2.Operation]: + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetLocation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: locations_pb2.GetLocationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetLocation._get_http_options() + + request, metadata = await self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetLocation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetLocation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetLocation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListLocations") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: locations_pb2.ListLocationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseListLocations._get_http_options() + + request, metadata = await self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListLocations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListLocations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListLocations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.CancelOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.CancelOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCloudRedisRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = await self._interceptor.pre_cancel_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseCancelOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.CancelOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._CancelOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return await self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.DeleteOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.DeleteOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> None: + + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = await self._interceptor.pre_delete_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.DeleteOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._DeleteOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return await self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.GetOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.GetOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseGetOperation._get_http_options() + + request, metadata = await self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseGetOperation._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._GetOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.GetOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.ListOperations") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + async def __call__(self, + request: operations_pb2.ListOperationsRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.ListOperationsResponse: + + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseListOperations._get_http_options() + + request, metadata = await self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseListOperations._get_transcoded_request(http_options, request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._ListOperations._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.ListOperations", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def wait_operation(self): + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + + class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): + def __hash__(self): + return hash("AsyncCloudRedisRestTransport.WaitOperation") + + @staticmethod + async def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None): + + uri = transcoded_request['uri'] + method = transcoded_request['method'] + headers = dict(metadata) + headers['Content-Type'] = 'application/json' + response = await getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + async def __call__(self, + request: operations_pb2.WaitOperationRequest, *, + retry: OptionalRetry=gapic_v1.method.DEFAULT, + timeout: Optional[float]=None, + metadata: Sequence[Tuple[str, Union[str, bytes]]]=(), + ) -> operations_pb2.Operation: + + r"""Call the wait operation method over HTTP. + + Args: + request (operations_pb2.WaitOperationRequest): + The request object for WaitOperation method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from WaitOperation method. + """ + + http_options = _BaseCloudRedisRestTransport._BaseWaitOperation._get_http_options() + + request, metadata = await self._interceptor.pre_wait_operation(request, metadata) + transcoded_request = _BaseCloudRedisRestTransport._BaseWaitOperation._get_transcoded_request(http_options, request) + + body = _BaseCloudRedisRestTransport._BaseWaitOperation._get_request_body_json(transcoded_request) + + # Jsonify the query params + query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.redis_v1.CloudRedisClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = await AsyncCloudRedisRestTransport._WaitOperation._get_response(self._host, metadata, query_params, self._session, timeout, transcoded_request, body) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode('utf-8')) + request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) + method = transcoded_request['method'] + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + content = await response.read() + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = await self._interceptor.post_wait_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.redis_v1.CloudRedisAsyncClient.WaitOperation", + extra = { + "serviceName": "google.cloud.redis.v1.CloudRedis", + "rpcName": "WaitOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest_asyncio" + + async def close(self): + await self._session.close() diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py new file mode 100755 index 000000000000..df8a6cd7c841 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -0,0 +1,473 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.cloud.location import locations_pb2 # type: ignore +from .base import CloudRedisTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore + + +class _BaseCloudRedisRestTransport(CloudRedisTransport): + """Base REST backend transport for CloudRedis. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__(self, *, + host: str = 'redis.googleapis.com', + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = 'https', + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'redis.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience + ) + + class _BaseCreateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "instanceId" : "", } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{parent=projects/*/locations/*}/instances', + 'body': 'instance', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.CreateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseCreateInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseDeleteInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.DeleteInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseDeleteInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/instances/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.GetInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseGetInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseListInstances: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{parent=projects/*/locations/*}/instances', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.ListInstancesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseListInstances._get_unset_required_fields(query_params)) + + return query_params + + class _BaseUpdateInstance: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask" : {}, } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return {k: v for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() if k not in message_dict} + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'patch', + 'uri': '/v1/{instance.name=projects/*/locations/*/instances/*}', + 'body': 'instance', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = cloud_redis.UpdateInstanceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request['body'], + use_integers_for_enums=False + ) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json_format.MessageToJson( + transcoded_request['query_params'], + use_integers_for_enums=False, + )) + query_params.update(_BaseCloudRedisRestTransport._BaseUpdateInstance._get_unset_required_fields(query_params)) + + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*}/locations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}:cancel', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'delete', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*/operations/*}', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'get', + 'uri': '/v1/{name=projects/*/locations/*}/operations', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + class _BaseWaitOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [{ + 'method': 'post', + 'uri': '/v2/{name=projects/*/locations/*/operations/*}:wait', + 'body': '*', + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode( + http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request['body']) + return body + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request['query_params'])) + return query_params + + +__all__=( + '_BaseCloudRedisRestTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py new file mode 100755 index 000000000000..a051e1e14471 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .cloud_redis import ( + CreateInstanceRequest, + DeleteInstanceRequest, + GetInstanceRequest, + Instance, + ListInstancesRequest, + ListInstancesResponse, + MaintenancePolicy, + MaintenanceSchedule, + NodeInfo, + OperationMetadata, + PersistenceConfig, + TlsCertificate, + UpdateInstanceRequest, + WeeklyMaintenanceWindow, +) + +__all__ = ( + 'CreateInstanceRequest', + 'DeleteInstanceRequest', + 'GetInstanceRequest', + 'Instance', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'MaintenancePolicy', + 'MaintenanceSchedule', + 'NodeInfo', + 'OperationMetadata', + 'PersistenceConfig', + 'TlsCertificate', + 'UpdateInstanceRequest', + 'WeeklyMaintenanceWindow', +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py new file mode 100755 index 000000000000..a831851aa15d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py @@ -0,0 +1,1022 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.redis.v1', + manifest={ + 'NodeInfo', + 'Instance', + 'PersistenceConfig', + 'MaintenancePolicy', + 'WeeklyMaintenanceWindow', + 'MaintenanceSchedule', + 'ListInstancesRequest', + 'ListInstancesResponse', + 'GetInstanceRequest', + 'CreateInstanceRequest', + 'UpdateInstanceRequest', + 'DeleteInstanceRequest', + 'OperationMetadata', + 'TlsCertificate', + }, +) + + +class NodeInfo(proto.Message): + r"""Node specific properties. + + Attributes: + id (str): + Output only. Node identifying string. e.g. + 'node-0', 'node-1' + zone (str): + Output only. Location of the node. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + zone: str = proto.Field( + proto.STRING, + number=2, + ) + + +class Instance(proto.Message): + r"""A Memorystore for Redis instance. + + Attributes: + name (str): + Required. Unique name of the resource in this scope + including project and location using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note: Redis instances are managed and addressed at regional + level so location_id here refers to a GCP region; however, + users may choose which specific zone (or collection of zones + for cross-zone instances) an instance should be provisioned + in. Refer to + [location_id][google.cloud.redis.v1.Instance.location_id] + and + [alternative_location_id][google.cloud.redis.v1.Instance.alternative_location_id] + fields for more details. + display_name (str): + An arbitrary and optional user-provided name + for the instance. + labels (MutableMapping[str, str]): + Resource labels to represent user provided + metadata + location_id (str): + Optional. The zone where the instance will be + provisioned. If not provided, the service will + choose a zone from the specified region for the + instance. For standard tier, additional nodes + will be added across multiple zones for + protection against zonal failures. If specified, + at least one node will be provisioned in this + zone. + alternative_location_id (str): + Optional. If specified, at least one node will be + provisioned in this zone in addition to the zone specified + in location_id. Only applicable to standard tier. If + provided, it must be a different zone from the one provided + in [location_id]. Additional nodes beyond the first 2 will + be placed in zones selected by the service. + redis_version (str): + Optional. The version of Redis software. If not provided, + latest supported version will be used. Currently, the + supported values are: + + - ``REDIS_3_2`` for Redis 3.2 compatibility + - ``REDIS_4_0`` for Redis 4.0 compatibility (default) + - ``REDIS_5_0`` for Redis 5.0 compatibility + - ``REDIS_6_X`` for Redis 6.x compatibility + reserved_ip_range (str): + Optional. For DIRECT_PEERING mode, the CIDR range of + internal addresses that are reserved for this instance. + Range must be unique and non-overlapping with existing + subnets in an authorized network. For PRIVATE_SERVICE_ACCESS + mode, the name of one allocated IP address ranges associated + with this private service access connection. If not + provided, the service will choose an unused /29 block, for + example, 10.0.0.0/29 or 192.168.0.0/29. For + READ_REPLICAS_ENABLED the default block size is /28. + secondary_ip_range (str): + Optional. Additional IP range for node placement. Required + when enabling read replicas on an existing instance. For + DIRECT_PEERING mode value must be a CIDR range of size /28, + or "auto". For PRIVATE_SERVICE_ACCESS mode value must be the + name of an allocated address range associated with the + private service access connection, or "auto". + host (str): + Output only. Hostname or IP address of the + exposed Redis endpoint used by clients to + connect to the service. + port (int): + Output only. The port number of the exposed + Redis endpoint. + current_location_id (str): + Output only. The current zone where the Redis primary node + is located. In basic tier, this will always be the same as + [location_id]. In standard tier, this can be the zone of any + node in the instance. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the instance was + created. + state (google.cloud.redis_v1.types.Instance.State): + Output only. The current state of this + instance. + status_message (str): + Output only. Additional information about the + current status of this instance, if available. + redis_configs (MutableMapping[str, str]): + Optional. Redis configuration parameters, according to + http://redis.io/topics/config. Currently, the only supported + parameters are: + + Redis version 3.2 and newer: + + - maxmemory-policy + - notify-keyspace-events + + Redis version 4.0 and newer: + + - activedefrag + - lfu-decay-time + - lfu-log-factor + - maxmemory-gb + + Redis version 5.0 and newer: + + - stream-node-max-bytes + - stream-node-max-entries + tier (google.cloud.redis_v1.types.Instance.Tier): + Required. The service tier of the instance. + memory_size_gb (int): + Required. Redis memory size in GiB. + authorized_network (str): + Optional. The full name of the Google Compute Engine + `network `__ to which + the instance is connected. If left unspecified, the + ``default`` network will be used. + persistence_iam_identity (str): + Output only. Cloud IAM identity used by import / export + operations to transfer data to/from Cloud Storage. Format is + "serviceAccount:". The value may + change over time for a given instance so should be checked + before each import/export operation. + connect_mode (google.cloud.redis_v1.types.Instance.ConnectMode): + Optional. The network connect mode of the Redis instance. If + not provided, the connect mode defaults to DIRECT_PEERING. + auth_enabled (bool): + Optional. Indicates whether OSS Redis AUTH is + enabled for the instance. If set to "true" AUTH + is enabled on the instance. Default value is + "false" meaning AUTH is disabled. + server_ca_certs (MutableSequence[google.cloud.redis_v1.types.TlsCertificate]): + Output only. List of server CA certificates + for the instance. + transit_encryption_mode (google.cloud.redis_v1.types.Instance.TransitEncryptionMode): + Optional. The TLS mode of the Redis instance. + If not provided, TLS is disabled for the + instance. + maintenance_policy (google.cloud.redis_v1.types.MaintenancePolicy): + Optional. The maintenance policy for the + instance. If not provided, maintenance events + can be performed at any time. + maintenance_schedule (google.cloud.redis_v1.types.MaintenanceSchedule): + Output only. Date and time of upcoming + maintenance events which have been scheduled. + replica_count (int): + Optional. The number of replica nodes. The valid range for + the Standard Tier with read replicas enabled is [1-5] and + defaults to 2. If read replicas are not enabled for a + Standard Tier instance, the only valid value is 1 and the + default is 1. The valid value for basic tier is 0 and the + default is also 0. + nodes (MutableSequence[google.cloud.redis_v1.types.NodeInfo]): + Output only. Info per node. + read_endpoint (str): + Output only. Hostname or IP address of the + exposed readonly Redis endpoint. Standard tier + only. Targets all healthy replica nodes in + instance. Replication is asynchronous and + replica nodes will exhibit some lag behind the + primary. Write requests must target 'host'. + read_endpoint_port (int): + Output only. The port number of the exposed + readonly redis endpoint. Standard tier only. + Write requests should target 'port'. + read_replicas_mode (google.cloud.redis_v1.types.Instance.ReadReplicasMode): + Optional. Read replicas mode for the instance. Defaults to + READ_REPLICAS_DISABLED. + customer_managed_key (str): + Optional. The KMS key reference that the + customer provides when trying to create the + instance. + persistence_config (google.cloud.redis_v1.types.PersistenceConfig): + Optional. Persistence configuration + parameters + suspension_reasons (MutableSequence[google.cloud.redis_v1.types.Instance.SuspensionReason]): + Optional. reasons that causes instance in + "SUSPENDED" state. + maintenance_version (str): + Optional. The self service update maintenance version. The + version is date based such as "20210712_00_00". + available_maintenance_versions (MutableSequence[str]): + Optional. The available maintenance versions + that an instance could update to. + """ + class State(proto.Enum): + r"""Represents the different states of a Redis instance. + + Values: + STATE_UNSPECIFIED (0): + Not set. + CREATING (1): + Redis instance is being created. + READY (2): + Redis instance has been created and is fully + usable. + UPDATING (3): + Redis instance configuration is being + updated. Certain kinds of updates may cause the + instance to become unusable while the update is + in progress. + DELETING (4): + Redis instance is being deleted. + REPAIRING (5): + Redis instance is being repaired and may be + unusable. + MAINTENANCE (6): + Maintenance is being performed on this Redis + instance. + IMPORTING (8): + Redis instance is importing data + (availability may be affected). + FAILING_OVER (9): + Redis instance is failing over (availability + may be affected). + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + UPDATING = 3 + DELETING = 4 + REPAIRING = 5 + MAINTENANCE = 6 + IMPORTING = 8 + FAILING_OVER = 9 + + class Tier(proto.Enum): + r"""Available service tiers to choose from + + Values: + TIER_UNSPECIFIED (0): + Not set. + BASIC (1): + BASIC tier: standalone instance + STANDARD_HA (3): + STANDARD_HA tier: highly available primary/replica instances + """ + TIER_UNSPECIFIED = 0 + BASIC = 1 + STANDARD_HA = 3 + + class ConnectMode(proto.Enum): + r"""Available connection modes. + + Values: + CONNECT_MODE_UNSPECIFIED (0): + Not set. + DIRECT_PEERING (1): + Connect via direct peering to the Memorystore + for Redis hosted service. + PRIVATE_SERVICE_ACCESS (2): + Connect your Memorystore for Redis instance + using Private Service Access. Private services + access provides an IP address range for multiple + Google Cloud services, including Memorystore. + """ + CONNECT_MODE_UNSPECIFIED = 0 + DIRECT_PEERING = 1 + PRIVATE_SERVICE_ACCESS = 2 + + class TransitEncryptionMode(proto.Enum): + r"""Available TLS modes. + + Values: + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED (0): + Not set. + SERVER_AUTHENTICATION (1): + Client to Server traffic encryption enabled + with server authentication. + DISABLED (2): + TLS is disabled for the instance. + """ + TRANSIT_ENCRYPTION_MODE_UNSPECIFIED = 0 + SERVER_AUTHENTICATION = 1 + DISABLED = 2 + + class ReadReplicasMode(proto.Enum): + r"""Read replicas mode. + + Values: + READ_REPLICAS_MODE_UNSPECIFIED (0): + If not set, Memorystore Redis backend will default to + READ_REPLICAS_DISABLED. + READ_REPLICAS_DISABLED (1): + If disabled, read endpoint will not be + provided and the instance cannot scale up or + down the number of replicas. + READ_REPLICAS_ENABLED (2): + If enabled, read endpoint will be provided + and the instance can scale up and down the + number of replicas. Not valid for basic tier. + """ + READ_REPLICAS_MODE_UNSPECIFIED = 0 + READ_REPLICAS_DISABLED = 1 + READ_REPLICAS_ENABLED = 2 + + class SuspensionReason(proto.Enum): + r"""Possible reasons for the instance to be in a "SUSPENDED" + state. + + Values: + SUSPENSION_REASON_UNSPECIFIED (0): + Not set. + CUSTOMER_MANAGED_KEY_ISSUE (1): + Something wrong with the CMEK key provided by + customer. + """ + SUSPENSION_REASON_UNSPECIFIED = 0 + CUSTOMER_MANAGED_KEY_ISSUE = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + location_id: str = proto.Field( + proto.STRING, + number=4, + ) + alternative_location_id: str = proto.Field( + proto.STRING, + number=5, + ) + redis_version: str = proto.Field( + proto.STRING, + number=7, + ) + reserved_ip_range: str = proto.Field( + proto.STRING, + number=9, + ) + secondary_ip_range: str = proto.Field( + proto.STRING, + number=30, + ) + host: str = proto.Field( + proto.STRING, + number=10, + ) + port: int = proto.Field( + proto.INT32, + number=11, + ) + current_location_id: str = proto.Field( + proto.STRING, + number=12, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + state: State = proto.Field( + proto.ENUM, + number=14, + enum=State, + ) + status_message: str = proto.Field( + proto.STRING, + number=15, + ) + redis_configs: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + tier: Tier = proto.Field( + proto.ENUM, + number=17, + enum=Tier, + ) + memory_size_gb: int = proto.Field( + proto.INT32, + number=18, + ) + authorized_network: str = proto.Field( + proto.STRING, + number=20, + ) + persistence_iam_identity: str = proto.Field( + proto.STRING, + number=21, + ) + connect_mode: ConnectMode = proto.Field( + proto.ENUM, + number=22, + enum=ConnectMode, + ) + auth_enabled: bool = proto.Field( + proto.BOOL, + number=23, + ) + server_ca_certs: MutableSequence['TlsCertificate'] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message='TlsCertificate', + ) + transit_encryption_mode: TransitEncryptionMode = proto.Field( + proto.ENUM, + number=26, + enum=TransitEncryptionMode, + ) + maintenance_policy: 'MaintenancePolicy' = proto.Field( + proto.MESSAGE, + number=27, + message='MaintenancePolicy', + ) + maintenance_schedule: 'MaintenanceSchedule' = proto.Field( + proto.MESSAGE, + number=28, + message='MaintenanceSchedule', + ) + replica_count: int = proto.Field( + proto.INT32, + number=31, + ) + nodes: MutableSequence['NodeInfo'] = proto.RepeatedField( + proto.MESSAGE, + number=32, + message='NodeInfo', + ) + read_endpoint: str = proto.Field( + proto.STRING, + number=33, + ) + read_endpoint_port: int = proto.Field( + proto.INT32, + number=34, + ) + read_replicas_mode: ReadReplicasMode = proto.Field( + proto.ENUM, + number=35, + enum=ReadReplicasMode, + ) + customer_managed_key: str = proto.Field( + proto.STRING, + number=36, + ) + persistence_config: 'PersistenceConfig' = proto.Field( + proto.MESSAGE, + number=37, + message='PersistenceConfig', + ) + suspension_reasons: MutableSequence[SuspensionReason] = proto.RepeatedField( + proto.ENUM, + number=38, + enum=SuspensionReason, + ) + maintenance_version: str = proto.Field( + proto.STRING, + number=39, + ) + available_maintenance_versions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=40, + ) + + +class PersistenceConfig(proto.Message): + r"""Configuration of the persistence functionality. + + Attributes: + persistence_mode (google.cloud.redis_v1.types.PersistenceConfig.PersistenceMode): + Optional. Controls whether Persistence + features are enabled. If not provided, the + existing value will be used. + rdb_snapshot_period (google.cloud.redis_v1.types.PersistenceConfig.SnapshotPeriod): + Optional. Period between RDB snapshots. Snapshots will be + attempted every period starting from the provided snapshot + start time. For example, a start time of 01/01/2033 06:45 + and SIX_HOURS snapshot period will do nothing until + 01/01/2033, and then trigger snapshots every day at 06:45, + 12:45, 18:45, and 00:45 the next day, and so on. If not + provided, TWENTY_FOUR_HOURS will be used as default. + rdb_next_snapshot_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The next time that a snapshot + attempt is scheduled to occur. + rdb_snapshot_start_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Date and time that the first + snapshot was/will be attempted, and to which + future snapshots will be aligned. If not + provided, the current time will be used. + """ + class PersistenceMode(proto.Enum): + r"""Available Persistence modes. + + Values: + PERSISTENCE_MODE_UNSPECIFIED (0): + Not set. + DISABLED (1): + Persistence is disabled for the instance, + and any existing snapshots are deleted. + RDB (2): + RDB based Persistence is enabled. + """ + PERSISTENCE_MODE_UNSPECIFIED = 0 + DISABLED = 1 + RDB = 2 + + class SnapshotPeriod(proto.Enum): + r"""Available snapshot periods for scheduling. + + Values: + SNAPSHOT_PERIOD_UNSPECIFIED (0): + Not set. + ONE_HOUR (3): + Snapshot every 1 hour. + SIX_HOURS (4): + Snapshot every 6 hours. + TWELVE_HOURS (5): + Snapshot every 12 hours. + TWENTY_FOUR_HOURS (6): + Snapshot every 24 hours. + """ + SNAPSHOT_PERIOD_UNSPECIFIED = 0 + ONE_HOUR = 3 + SIX_HOURS = 4 + TWELVE_HOURS = 5 + TWENTY_FOUR_HOURS = 6 + + persistence_mode: PersistenceMode = proto.Field( + proto.ENUM, + number=1, + enum=PersistenceMode, + ) + rdb_snapshot_period: SnapshotPeriod = proto.Field( + proto.ENUM, + number=2, + enum=SnapshotPeriod, + ) + rdb_next_snapshot_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + rdb_snapshot_start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class MaintenancePolicy(proto.Message): + r"""Maintenance policy for an instance. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the policy was + last updated. + description (str): + Optional. Description of what this policy is for. + Create/Update methods return INVALID_ARGUMENT if the length + is greater than 512. + weekly_maintenance_window (MutableSequence[google.cloud.redis_v1.types.WeeklyMaintenanceWindow]): + Optional. Maintenance window that is applied to resources + covered by this policy. Minimum 1. For the current version, + the maximum number of weekly_window is expected to be one. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + weekly_maintenance_window: MutableSequence['WeeklyMaintenanceWindow'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='WeeklyMaintenanceWindow', + ) + + +class WeeklyMaintenanceWindow(proto.Message): + r"""Time window in which disruptive maintenance updates occur. + Non-disruptive updates can occur inside or outside this window. + + Attributes: + day (google.type.dayofweek_pb2.DayOfWeek): + Required. The day of week that maintenance + updates occur. + start_time (google.type.timeofday_pb2.TimeOfDay): + Required. Start time of the window in UTC + time. + duration (google.protobuf.duration_pb2.Duration): + Output only. Duration of the maintenance + window. The current window is fixed at 1 hour. + """ + + day: dayofweek_pb2.DayOfWeek = proto.Field( + proto.ENUM, + number=1, + enum=dayofweek_pb2.DayOfWeek, + ) + start_time: timeofday_pb2.TimeOfDay = proto.Field( + proto.MESSAGE, + number=2, + message=timeofday_pb2.TimeOfDay, + ) + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class MaintenanceSchedule(proto.Message): + r"""Upcoming maintenance schedule. If no maintenance is + scheduled, fields are not populated. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The start time of any upcoming + scheduled maintenance for this instance. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The end time of any upcoming + scheduled maintenance for this instance. + can_reschedule (bool): + If the scheduled maintenance can be + rescheduled, default is true. + schedule_deadline_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The deadline that the + maintenance schedule start time can not go + beyond, including reschedule. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + can_reschedule: bool = proto.Field( + proto.BOOL, + number=3, + ) + schedule_deadline_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + + +class ListInstancesRequest(proto.Message): + r"""Request for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + Attributes: + parent (str): + Required. The resource name of the instance location using + the form: ``projects/{project_id}/locations/{location_id}`` + where ``location_id`` refers to a GCP region. + page_size (int): + The maximum number of items to return. + + If not specified, a default value of 1000 will be used by + the service. Regardless of the page_size value, the response + may include a partial list and a caller should only rely on + response's + [``next_page_token``][google.cloud.redis.v1.ListInstancesResponse.next_page_token] + to determine if there are more instances left to be queried. + page_token (str): + The ``next_page_token`` value returned from a previous + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances] + request, if any. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInstancesResponse(proto.Message): + r"""Response for + [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. + + Attributes: + instances (MutableSequence[google.cloud.redis_v1.types.Instance]): + A list of Redis instances in the project in the specified + location, or across all locations. + + If the ``location_id`` in the parent field of the request is + "-", all regions available to the project are queried, and + the results aggregated. If in such an aggregated query a + location is unavailable, a placeholder Redis entry is + included in the response with the ``name`` field set to a + value of the form + ``projects/{project_id}/locations/{location_id}/instances/``- + and the ``status`` field set to ERROR and ``status_message`` + field set to "location not available for ListInstances". + next_page_token (str): + Token to retrieve the next page of results, + or empty if there are no more results in the + list. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + instances: MutableSequence['Instance'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Instance', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetInstanceRequest(proto.Message): + r"""Request for + [GetInstance][google.cloud.redis.v1.CloudRedis.GetInstance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateInstanceRequest(proto.Message): + r"""Request for + [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. + + Attributes: + parent (str): + Required. The resource name of the instance location using + the form: ``projects/{project_id}/locations/{location_id}`` + where ``location_id`` refers to a GCP region. + instance_id (str): + Required. The logical name of the Redis instance in the + customer project with the following restrictions: + + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + instance (google.cloud.redis_v1.types.Instance): + Required. A Redis [Instance] resource + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + instance_id: str = proto.Field( + proto.STRING, + number=2, + ) + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=3, + message='Instance', + ) + + +class UpdateInstanceRequest(proto.Message): + r"""Request for + [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Mask of fields to update. At least one path must + be supplied in this field. The elements of the repeated + paths field may only include these fields from + [Instance][google.cloud.redis.v1.Instance]: + + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` + instance (google.cloud.redis_v1.types.Instance): + Required. Update description. Only fields specified in + update_mask are updated. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + instance: 'Instance' = proto.Field( + proto.MESSAGE, + number=2, + message='Instance', + ) + + +class DeleteInstanceRequest(proto.Message): + r"""Request for + [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. + + Attributes: + name (str): + Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OperationMetadata(proto.Message): + r"""Represents the v1 metadata of the long-running operation. + + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Creation timestamp. + end_time (google.protobuf.timestamp_pb2.Timestamp): + End timestamp. + target (str): + Operation target. + verb (str): + Operation verb. + status_detail (str): + Operation status details. + cancel_requested (bool): + Specifies if cancellation was requested for + the operation. + api_version (str): + API version. + """ + + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target: str = proto.Field( + proto.STRING, + number=3, + ) + verb: str = proto.Field( + proto.STRING, + number=4, + ) + status_detail: str = proto.Field( + proto.STRING, + number=5, + ) + cancel_requested: bool = proto.Field( + proto.BOOL, + number=6, + ) + api_version: str = proto.Field( + proto.STRING, + number=7, + ) + + +class TlsCertificate(proto.Message): + r"""TlsCertificate Resource + + Attributes: + serial_number (str): + Serial number, as extracted from the + certificate. + cert (str): + PEM representation. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the certificate was created in + `RFC 3339 `__ format, + for example ``2020-05-18T00:00:00.094Z``. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the certificate expires in `RFC + 3339 `__ format, for + example ``2020-05-18T00:00:00.094Z``. + sha1_fingerprint (str): + Sha1 Fingerprint of the certificate. + """ + + serial_number: str = proto.Field( + proto.STRING, + number=1, + ) + cert: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + sha1_fingerprint: str = proto.Field( + proto.STRING, + number=5, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/mypy.ini b/packages/gapic-generator/tests/integration/goldens/redis_selective/mypy.ini new file mode 100755 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py new file mode 100755 index 000000000000..755a3329f3c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-redis' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/redis_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/redis_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py new file mode 100755 index 000000000000..3385bd5f99e4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_CreateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_create_instance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_CreateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py new file mode 100755 index 000000000000..4ea53ed516c6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_CreateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_create_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.CreateInstanceRequest( + parent="parent_value", + instance_id="instance_id_value", + instance=instance, + ) + + # Make the request + operation = client.create_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_CreateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py new file mode 100755 index 000000000000..462ec0e4ee0d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_DeleteInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_delete_instance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_DeleteInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py new file mode 100755 index 000000000000..d933dd8a4521 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_DeleteInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_delete_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.DeleteInstanceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_DeleteInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py new file mode 100755 index 000000000000..e13bffbd78cc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_GetInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_get_instance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_instance(request=request) + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_GetInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py new file mode 100755 index 000000000000..4a6053e1d95d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_GetInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_get_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.GetInstanceRequest( + name="name_value", + ) + + # Make the request + response = client.get_instance(request=request) + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_GetInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py new file mode 100755 index 000000000000..19396f14baeb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_ListInstances_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_list_instances(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + request = redis_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END redis_v1_generated_CloudRedis_ListInstances_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py new file mode 100755 index 000000000000..ec07ceff62c3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListInstances +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_ListInstances_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_list_instances(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + request = redis_v1.ListInstancesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_instances(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END redis_v1_generated_CloudRedis_ListInstances_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py new file mode 100755 index 000000000000..9a6d69923d3f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_UpdateInstance_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +async def sample_update_instance(): + # Create a client + client = redis_v1.CloudRedisAsyncClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_UpdateInstance_async] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py new file mode 100755 index 000000000000..2429f106358d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateInstance +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-redis + + +# [START redis_v1_generated_CloudRedis_UpdateInstance_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import redis_v1 + + +def sample_update_instance(): + # Create a client + client = redis_v1.CloudRedisClient() + + # Initialize request argument(s) + instance = redis_v1.Instance() + instance.name = "name_value" + instance.tier = "STANDARD_HA" + instance.memory_size_gb = 1499 + + request = redis_v1.UpdateInstanceRequest( + instance=instance, + ) + + # Make the request + operation = client.update_instance(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END redis_v1_generated_CloudRedis_UpdateInstance_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json new file mode 100755 index 000000000000..467e4c62cea1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -0,0 +1,844 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.redis.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-redis", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.create_instance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.CreateInstance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.redis_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "redis_v1_generated_cloud_redis_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_CreateInstance_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_create_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.create_instance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.CreateInstance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "CreateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.redis_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" + }, + "description": "Sample for CreateInstance", + "file": "redis_v1_generated_cloud_redis_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_CreateInstance_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 52, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 53, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_create_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.delete_instance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.DeleteInstance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "redis_v1_generated_cloud_redis_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_DeleteInstance_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_delete_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.delete_instance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.DeleteInstance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "DeleteInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" + }, + "description": "Sample for DeleteInstance", + "file": "redis_v1_generated_cloud_redis_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_DeleteInstance_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_delete_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.get_instance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.GetInstance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.redis_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "redis_v1_generated_cloud_redis_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_GetInstance_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_get_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.get_instance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.GetInstance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "GetInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.GetInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.redis_v1.types.Instance", + "shortName": "get_instance" + }, + "description": "Sample for GetInstance", + "file": "redis_v1_generated_cloud_redis_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_GetInstance_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_get_instance_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.list_instances", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.ListInstances", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "redis_v1_generated_cloud_redis_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_ListInstances_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_list_instances_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.list_instances", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.ListInstances", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "ListInstances" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.ListInstancesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.redis_v1.services.cloud_redis.pagers.ListInstancesPager", + "shortName": "list_instances" + }, + "description": "Sample for ListInstances", + "file": "redis_v1_generated_cloud_redis_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_ListInstances_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_list_instances_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient", + "shortName": "CloudRedisAsyncClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisAsyncClient.update_instance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.UpdateInstance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.UpdateInstanceRequest" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "instance", + "type": "google.cloud.redis_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "redis_v1_generated_cloud_redis_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_UpdateInstance_async", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_update_instance_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.redis_v1.CloudRedisClient", + "shortName": "CloudRedisClient" + }, + "fullName": "google.cloud.redis_v1.CloudRedisClient.update_instance", + "method": { + "fullName": "google.cloud.redis.v1.CloudRedis.UpdateInstance", + "service": { + "fullName": "google.cloud.redis.v1.CloudRedis", + "shortName": "CloudRedis" + }, + "shortName": "UpdateInstance" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.redis_v1.types.UpdateInstanceRequest" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "instance", + "type": "google.cloud.redis_v1.types.Instance" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" + }, + "description": "Sample for UpdateInstance", + "file": "redis_v1_generated_cloud_redis_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "redis_v1_generated_CloudRedis_UpdateInstance_sync", + "segments": [ + { + "end": 60, + "start": 27, + "type": "FULL" + }, + { + "end": 60, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 57, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 61, + "start": 58, + "type": "RESPONSE_HANDLING" + } + ], + "title": "redis_v1_generated_cloud_redis_update_instance_sync.py" + } + ] +} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py new file mode 100755 index 000000000000..7022cedcf37c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py @@ -0,0 +1,180 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class redisCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_instance': ('parent', 'instance_id', 'instance', ), + 'delete_instance': ('name', ), + 'get_instance': ('name', ), + 'list_instances': ('parent', 'page_size', 'page_token', ), + 'update_instance': ('update_mask', 'instance', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=redisCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the redis client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py new file mode 100755 index 000000000000..220725b65cc0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -0,0 +1,102 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-redis' + + +description = "Google Cloud Redis API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/redis/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { + "async_rest": [ + "google-api-core[grpc] >= 2.21.0, < 3.0.0dev", + "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" + ], +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.10.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.11.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.12.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt new file mode 100755 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.8.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.9.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py new file mode 100755 index 000000000000..7b3de3117f38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py new file mode 100755 index 000000000000..7b3de3117f38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py new file mode 100755 index 000000000000..7b3de3117f38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py new file mode 100755 index 000000000000..7b3de3117f38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py new file mode 100755 index 000000000000..05cb264cdb78 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -0,0 +1,8061 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +try: + import aiohttp # type: ignore + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + from google.api_core.operations_v1 import AsyncOperationsRestClient + HAS_ASYNC_REST_EXTRA = True +except ImportError: # pragma: NO COVER + HAS_ASYNC_REST_EXTRA = False +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.cloud.redis_v1.services.cloud_redis import CloudRedisAsyncClient +from google.cloud.redis_v1.services.cloud_redis import CloudRedisClient +from google.cloud.redis_v1.services.cloud_redis import pagers +from google.cloud.redis_v1.services.cloud_redis import transports +from google.cloud.redis_v1.types import cloud_redis +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert CloudRedisClient._get_default_mtls_endpoint(None) is None + assert CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert CloudRedisClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CloudRedisClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CloudRedisClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + CloudRedisClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert CloudRedisClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert CloudRedisClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert CloudRedisClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + CloudRedisClient._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert CloudRedisClient._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert CloudRedisClient._get_client_cert_source(None, False) is None + assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, False) is None + assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert CloudRedisClient._get_client_cert_source(None, True) is mock_default_cert_source + assert CloudRedisClient._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = CloudRedisClient._DEFAULT_UNIVERSE + default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert CloudRedisClient._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == CloudRedisClient.DEFAULT_MTLS_ENDPOINT + assert CloudRedisClient._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert CloudRedisClient._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + CloudRedisClient._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert CloudRedisClient._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert CloudRedisClient._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert CloudRedisClient._get_universe_domain(None, None) == CloudRedisClient._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + CloudRedisClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = CloudRedisClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = CloudRedisClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudRedisClient, "grpc"), + (CloudRedisAsyncClient, "grpc_asyncio"), + (CloudRedisClient, "rest"), +]) +def test_cloud_redis_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'redis.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://redis.googleapis.com' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.CloudRedisGrpcTransport, "grpc"), + (transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudRedisRestTransport, "rest"), +]) +def test_cloud_redis_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (CloudRedisClient, "grpc"), + (CloudRedisAsyncClient, "grpc_asyncio"), + (CloudRedisClient, "rest"), +]) +def test_cloud_redis_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'redis.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else + 'https://redis.googleapis.com' + ) + + +def test_cloud_redis_client_get_transport_class(): + transport = CloudRedisClient.get_transport_class() + available_transports = [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisRestTransport, + ] + assert transport in available_transports + + transport = CloudRedisClient.get_transport_class("grpc") + assert transport == transports.CloudRedisGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), +]) +@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) +def test_cloud_redis_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(CloudRedisClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", "false"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "true"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", "false"), +]) +@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_redis_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + CloudRedisClient, CloudRedisAsyncClient +]) +@mock.patch.object(CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisAsyncClient)) +def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + CloudRedisClient, CloudRedisAsyncClient +]) +@mock.patch.object(CloudRedisClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisClient)) +@mock.patch.object(CloudRedisAsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(CloudRedisAsyncClient)) +def test_cloud_redis_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = CloudRedisClient._DEFAULT_UNIVERSE + default_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = CloudRedisClient._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest"), +]) +def test_cloud_redis_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), + (CloudRedisClient, transports.CloudRedisRestTransport, "rest", None), +]) +def test_cloud_redis_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_cloud_redis_client_client_options_from_dict(): + with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = CloudRedisClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", grpc_helpers), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_cloud_redis_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "redis.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="redis.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +def test_list_instances(request_type, transport: str = 'grpc'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_redis.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_instances_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.ListInstancesRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_instances(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.ListInstancesRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_instances_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instances_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_instances in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_instances] = mock_rpc + + request = {} + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_instances_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.ListInstancesRequest): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_redis.ListInstancesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_instances_async_from_dict(): + await test_list_instances_async(request_type=dict) + +def test_list_instances_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.ListInstancesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = cloud_redis.ListInstancesResponse() + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_instances_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.ListInstancesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse()) + await client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_instances_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.ListInstancesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_instances_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_redis.ListInstancesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_instances_flattened_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.ListInstancesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_instances( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_instances_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_instances( + cloud_redis.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_pager(transport_name: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_instances(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_redis.Instance) + for i in results) +def test_list_instances_pages(transport_name: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + RuntimeError, + ) + pages = list(client.list_instances(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_instances_async_pager(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_instances(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, cloud_redis.Instance) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_instances_async_pages(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_instances(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +def test_get_instance(request_type, transport: str = 'grpc'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) + response = client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_redis.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +def test_get_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.GetInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.GetInstanceRequest( + name='name_value', + ) + +def test_get_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_instance] = mock_rpc + + request = {} + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.GetInstanceRequest): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + )) + response = await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_redis.GetInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +@pytest.mark.asyncio +async def test_get_instance_async_from_dict(): + await test_get_instance_async(request_type=dict) + +def test_get_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.GetInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = cloud_redis.Instance() + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.GetInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance()) + await client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.Instance() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_redis.GetInstanceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = cloud_redis.Instance() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_instance( + cloud_redis.GetInstanceRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +def test_create_instance(request_type, transport: str = 'grpc'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_redis.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.CreateInstanceRequest( + parent='parent_value', + instance_id='instance_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.CreateInstanceRequest( + parent='parent_value', + instance_id='instance_id_value', + ) + +def test_create_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_instance] = mock_rpc + + request = {} + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.CreateInstanceRequest): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_redis.CreateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_instance_async_from_dict(): + await test_create_instance_async(request_type=dict) + +def test_create_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.CreateInstanceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.CreateInstanceRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_id + mock_val = 'instance_id_value' + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_redis.Instance(name='name_value') + assert arg == mock_val + + +def test_create_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_redis.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_instance( + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].instance_id + mock_val = 'instance_id_value' + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_redis.Instance(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_instance( + cloud_redis.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +def test_update_instance(request_type, transport: str = 'grpc'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_redis.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.UpdateInstanceRequest( + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.UpdateInstanceRequest( + ) + +def test_update_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_instance] = mock_rpc + + request = {} + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.UpdateInstanceRequest): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_redis.UpdateInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_instance_async_from_dict(): + await test_update_instance_async(request_type=dict) + +def test_update_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.UpdateInstanceRequest() + + request.instance.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.UpdateInstanceRequest() + + request.instance.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'instance.name=name_value', + ) in kw['metadata'] + + +def test_update_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_instance( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_redis.Instance(name='name_value') + assert arg == mock_val + + +def test_update_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_redis.UpdateInstanceRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_update_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_instance( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + arg = args[0].instance + mock_val = cloud_redis.Instance(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_update_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_instance( + cloud_redis.UpdateInstanceRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +def test_delete_instance(request_type, transport: str = 'grpc'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = cloud_redis.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_instance_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = cloud_redis.DeleteInstanceRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_instance(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == cloud_redis.DeleteInstanceRequest( + name='name_value', + ) + +def test_delete_instance_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_instance in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_instance] = mock_rpc + + request = {} + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_instance_async(transport: str = 'grpc_asyncio', request_type=cloud_redis.DeleteInstanceRequest): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = cloud_redis.DeleteInstanceRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_instance_async_from_dict(): + await test_delete_instance_async(request_type=dict) + +def test_delete_instance_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.DeleteInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_instance_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = cloud_redis.DeleteInstanceRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_instance_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_instance_flattened_error(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_redis.DeleteInstanceRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_instance_flattened_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_instance( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_instance_flattened_error_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_instance( + cloud_redis.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_list_instances_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_instances in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_instances] = mock_rpc + + request = {} + client.list_instances(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_instances(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_instances_rest_required_fields(request_type=cloud_redis.ListInstancesRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = 'parent_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).list_instances._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("page_size", "page_token", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_instances(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_list_instances_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.list_instances._get_unset_required_fields({}) + assert set(unset_fields) == (set(("pageSize", "pageToken", )) & set(("parent", ))) + + +def test_list_instances_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_instances(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + + +def test_list_instances_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_instances( + cloud_redis.ListInstancesRequest(), + parent='parent_value', + ) + + +def test_list_instances_rest_pager(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + #with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + next_page_token='abc', + ), + cloud_redis.ListInstancesResponse( + instances=[], + next_page_token='def', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + ], + next_page_token='ghi', + ), + cloud_redis.ListInstancesResponse( + instances=[ + cloud_redis.Instance(), + cloud_redis.Instance(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloud_redis.ListInstancesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode('UTF-8') + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + pager = client.list_instances(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloud_redis.Instance) + for i in results) + + pages = list(client.list_instances(request=sample_request).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + + +def test_get_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_instance] = mock_rpc + + request = {} + client.get_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_instance_rest_required_fields(request_type=cloud_redis.GetInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).get_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "get", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_get_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.get_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_get_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance() + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_get_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_instance( + cloud_redis.GetInstanceRequest(), + name='name_value', + ) + + +def test_create_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_instance] = mock_rpc + + request = {} + client.create_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_instance_rest_required_fields(request_type=cloud_redis.CreateInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["instance_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + assert "instanceId" not in jsonified_request + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == request_init["instance_id"] + + jsonified_request["parent"] = 'parent_value' + jsonified_request["instanceId"] = 'instance_id_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).create_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("instance_id", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == 'parent_value' + assert "instanceId" in jsonified_request + assert jsonified_request["instanceId"] == 'instance_id_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "post", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_instance(request) + + expected_params = [ + ( + "instanceId", + "", + ), + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_create_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.create_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("instanceId", )) & set(("parent", "instanceId", "instance", ))) + + +def test_create_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'parent': 'projects/sample1/locations/sample2'} + + # get truthy value for each flattened field + mock_args = dict( + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{parent=projects/*/locations/*}/instances" % client.transport._host, args[1]) + + +def test_create_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_instance( + cloud_redis.CreateInstanceRequest(), + parent='parent_value', + instance_id='instance_id_value', + instance=cloud_redis.Instance(name='name_value'), + ) + + +def test_update_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_instance] = mock_rpc + + request = {} + client.update_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_instance_rest_required_fields(request_type=cloud_redis.UpdateInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).update_instance._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask", )) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "patch", + 'query_params': pb_request, + } + transcode_result['body'] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_update_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.update_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask", )) & set(("updateMask", "instance", ))) + + +def test_update_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + + # get truthy value for each flattened field + mock_args = dict( + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{instance.name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_update_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_instance( + cloud_redis.UpdateInstanceRequest(), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + instance=cloud_redis.Instance(name='name_value'), + ) + + +def test_delete_instance_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_instance in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_instance] = mock_rpc + + request = {} + client.delete_instance(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_instance(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_instance_rest_required_fields(request_type=cloud_redis.DeleteInstanceRequest): + transport_class = transports.CloudRedisRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads(json_format.MessageToJson( + pb_request, + use_integers_for_enums=False + )) + + # verify fields with default values are dropped + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = 'name_value' + + unset_fields = transport_class(credentials=ga_credentials.AnonymousCredentials()).delete_instance._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == 'name_value' + + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='rest', + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, 'transcode') as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + 'uri': 'v1/sample_method', + 'method': "delete", + 'query_params': pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_instance(request) + + expected_params = [ + ] + actual_params = req.call_args.kwargs['params'] + assert expected_params == actual_params + + +def test_delete_instance_rest_unset_required_fields(): + transport = transports.CloudRedisRestTransport(credentials=ga_credentials.AnonymousCredentials) + + unset_fields = transport.delete_instance._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name", ))) + + +def test_delete_instance_rest_flattened(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # get arguments that satisfy an http rule for this method + sample_request = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + + # get truthy value for each flattened field + mock_args = dict( + name='name_value', + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_instance(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate("%s/v1/{name=projects/*/locations/*/instances/*}" % client.transport._host, args[1]) + + +def test_delete_instance_rest_flattened_error(transport: str = 'rest'): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_instance( + cloud_redis.DeleteInstanceRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudRedisClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudRedisClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudRedisGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudRedisGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = CloudRedisClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + call.return_value = cloud_redis.ListInstancesResponse() + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + call.return_value = cloud_redis.Instance() + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CloudRedisAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instances_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + await client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + )) + await client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_empty_call_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CloudRedisClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_instances_rest_bad_request(request_type=cloud_redis.ListInstancesRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_instances(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +def test_list_instances_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_instances_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + req.return_value.content = return_value + + request = cloud_redis.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.ListInstancesResponse() + post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata + + client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_instance_rest_bad_request(request_type=cloud_redis.GetInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +def test_get_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.Instance.to_json(cloud_redis.Instance()) + req.return_value.content = return_value + + request = cloud_redis.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.Instance() + post_with_metadata.return_value = cloud_redis.Instance(), metadata + + client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_instance_rest_bad_request(request_type=cloud_redis.CreateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +def test_create_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_instance_rest_bad_request(request_type=cloud_redis.UpdateInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +def test_update_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_instance_rest_bad_request(request_type=cloud_redis.DeleteInstanceRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_instance(request) + + +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +def test_delete_instance_rest_call_success(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_instance_rest_interceptors(null_interceptor): + transport = transports.CloudRedisRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.CloudRedisRestInterceptor(), + ) + client = CloudRedisClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.CloudRedisRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = cloud_redis.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +def test_get_location_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request(request_type=locations_pb2.ListLocationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +def test_list_locations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request(request_type=operations_pb2.CancelOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +def test_cancel_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request(request_type=operations_pb2.DeleteOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +def test_delete_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request(request_type=operations_pb2.GetOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +def test_get_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request(request_type=operations_pb2.ListOperationsRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +def test_list_operations_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_wait_operation_rest_bad_request(request_type=operations_pb2.WaitOperationRequest): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = '' + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.wait_operation(request) + + +@pytest.mark.parametrize("request_type", [ + operations_pb2.WaitOperationRequest, + dict, +]) +def test_wait_operation_rest(request_type): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode('UTF-8') + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_initialize_client_w_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_instances_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_instance_empty_call_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +def test_cloud_redis_rest_lro_client(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_transport_kind_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = CloudRedisAsyncClient.get_transport_class("rest_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "rest_asyncio" + + +@pytest.mark.asyncio +async def test_list_instances_rest_asyncio_bad_request(request_type=cloud_redis.ListInstancesRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.list_instances(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.ListInstancesRequest, + dict, +]) +async def test_list_instances_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.ListInstancesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.ListInstancesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.list_instances(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInstancesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_list_instances_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_list_instances_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_list_instances") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.ListInstancesRequest.pb(cloud_redis.ListInstancesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.ListInstancesResponse.to_json(cloud_redis.ListInstancesResponse()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.ListInstancesRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.ListInstancesResponse() + post_with_metadata.return_value = cloud_redis.ListInstancesResponse(), metadata + + await client.list_instances(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_get_instance_rest_asyncio_bad_request(request_type=cloud_redis.GetInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.get_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.GetInstanceRequest, + dict, +]) +async def test_get_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = cloud_redis.Instance( + name='name_value', + display_name='display_name_value', + location_id='location_id_value', + alternative_location_id='alternative_location_id_value', + redis_version='redis_version_value', + reserved_ip_range='reserved_ip_range_value', + secondary_ip_range='secondary_ip_range_value', + host='host_value', + port=453, + current_location_id='current_location_id_value', + state=cloud_redis.Instance.State.CREATING, + status_message='status_message_value', + tier=cloud_redis.Instance.Tier.BASIC, + memory_size_gb=1499, + authorized_network='authorized_network_value', + persistence_iam_identity='persistence_iam_identity_value', + connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING, + auth_enabled=True, + transit_encryption_mode=cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION, + replica_count=1384, + read_endpoint='read_endpoint_value', + read_endpoint_port=1920, + read_replicas_mode=cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED, + customer_managed_key='customer_managed_key_value', + suspension_reasons=[cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE], + maintenance_version='maintenance_version_value', + available_maintenance_versions=['available_maintenance_versions_value'], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = cloud_redis.Instance.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.get_instance(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloud_redis.Instance) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.location_id == 'location_id_value' + assert response.alternative_location_id == 'alternative_location_id_value' + assert response.redis_version == 'redis_version_value' + assert response.reserved_ip_range == 'reserved_ip_range_value' + assert response.secondary_ip_range == 'secondary_ip_range_value' + assert response.host == 'host_value' + assert response.port == 453 + assert response.current_location_id == 'current_location_id_value' + assert response.state == cloud_redis.Instance.State.CREATING + assert response.status_message == 'status_message_value' + assert response.tier == cloud_redis.Instance.Tier.BASIC + assert response.memory_size_gb == 1499 + assert response.authorized_network == 'authorized_network_value' + assert response.persistence_iam_identity == 'persistence_iam_identity_value' + assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING + assert response.auth_enabled is True + assert response.transit_encryption_mode == cloud_redis.Instance.TransitEncryptionMode.SERVER_AUTHENTICATION + assert response.replica_count == 1384 + assert response.read_endpoint == 'read_endpoint_value' + assert response.read_endpoint_port == 1920 + assert response.read_replicas_mode == cloud_redis.Instance.ReadReplicasMode.READ_REPLICAS_DISABLED + assert response.customer_managed_key == 'customer_managed_key_value' + assert response.suspension_reasons == [cloud_redis.Instance.SuspensionReason.CUSTOMER_MANAGED_KEY_ISSUE] + assert response.maintenance_version == 'maintenance_version_value' + assert response.available_maintenance_versions == ['available_maintenance_versions_value'] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_get_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_get_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_get_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.GetInstanceRequest.pb(cloud_redis.GetInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = cloud_redis.Instance.to_json(cloud_redis.Instance()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.GetInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloud_redis.Instance() + post_with_metadata.return_value = cloud_redis.Instance(), metadata + + await client.get_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_create_instance_rest_asyncio_bad_request(request_type=cloud_redis.CreateInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.create_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.CreateInstanceRequest, + dict, +]) +async def test_create_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'parent': 'projects/sample1/locations/sample2'} + request_init["instance"] = {'name': 'name_value', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.CreateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.create_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_create_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_create_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_create_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.CreateInstanceRequest.pb(cloud_redis.CreateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.CreateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.create_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_update_instance_rest_asyncio_bad_request(request_type=cloud_redis.UpdateInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.update_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.UpdateInstanceRequest, + dict, +]) +async def test_update_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'instance': {'name': 'projects/sample1/locations/sample2/instances/sample3'}} + request_init["instance"] = {'name': 'projects/sample1/locations/sample2/instances/sample3', 'display_name': 'display_name_value', 'labels': {}, 'location_id': 'location_id_value', 'alternative_location_id': 'alternative_location_id_value', 'redis_version': 'redis_version_value', 'reserved_ip_range': 'reserved_ip_range_value', 'secondary_ip_range': 'secondary_ip_range_value', 'host': 'host_value', 'port': 453, 'current_location_id': 'current_location_id_value', 'create_time': {'seconds': 751, 'nanos': 543}, 'state': 1, 'status_message': 'status_message_value', 'redis_configs': {}, 'tier': 1, 'memory_size_gb': 1499, 'authorized_network': 'authorized_network_value', 'persistence_iam_identity': 'persistence_iam_identity_value', 'connect_mode': 1, 'auth_enabled': True, 'server_ca_certs': [{'serial_number': 'serial_number_value', 'cert': 'cert_value', 'create_time': {}, 'expire_time': {}, 'sha1_fingerprint': 'sha1_fingerprint_value'}], 'transit_encryption_mode': 1, 'maintenance_policy': {'create_time': {}, 'update_time': {}, 'description': 'description_value', 'weekly_maintenance_window': [{'day': 1, 'start_time': {'hours': 561, 'minutes': 773, 'seconds': 751, 'nanos': 543}, 'duration': {'seconds': 751, 'nanos': 543}}]}, 'maintenance_schedule': {'start_time': {}, 'end_time': {}, 'can_reschedule': True, 'schedule_deadline_time': {}}, 'replica_count': 1384, 'nodes': [{'id': 'id_value', 'zone': 'zone_value'}], 'read_endpoint': 'read_endpoint_value', 'read_endpoint_port': 1920, 'read_replicas_mode': 1, 'customer_managed_key': 'customer_managed_key_value', 'persistence_config': {'persistence_mode': 1, 'rdb_snapshot_period': 3, 'rdb_next_snapshot_time': {}, 'rdb_snapshot_start_time': {}}, 'suspension_reasons': [1], 'maintenance_version': 'maintenance_version_value', 'available_maintenance_versions': ['available_maintenance_versions_value1', 'available_maintenance_versions_value2']} + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = cloud_redis.UpdateInstanceRequest.meta.fields["instance"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["instance"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + {"field": field, "subfield": subfield, "is_repeated": is_repeated} + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["instance"][field])): + del request_init["instance"][field][i][subfield] + else: + del request_init["instance"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.update_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_update_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_update_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_update_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.UpdateInstanceRequest.pb(cloud_redis.UpdateInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.UpdateInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.update_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_delete_instance_rest_asyncio_bad_request(request_type=cloud_redis.DeleteInstanceRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.delete_instance(request) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + cloud_redis.DeleteInstanceRequest, + dict, +]) +async def test_delete_instance_rest_asyncio_call_success(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + + # send a request that will satisfy transcoding + request_init = {'name': 'projects/sample1/locations/sample2/instances/sample3'} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name='operations/spam') + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = await client.delete_instance(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("null_interceptor", [True, False]) +async def test_delete_instance_rest_asyncio_interceptors(null_interceptor): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + transport = transports.AsyncCloudRedisRestTransport( + credentials=async_anonymous_credentials(), + interceptor=None if null_interceptor else transports.AsyncCloudRedisRestInterceptor(), + ) + client = CloudRedisAsyncClient(transport=transport) + + with mock.patch.object(type(client.transport._session), "request") as req, \ + mock.patch.object(path_template, "transcode") as transcode, \ + mock.patch.object(operation.Operation, "_set_result_from_operation"), \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance") as post, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "post_delete_instance_with_metadata") as post_with_metadata, \ + mock.patch.object(transports.AsyncCloudRedisRestInterceptor, "pre_delete_instance") as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = cloud_redis.DeleteInstanceRequest.pb(cloud_redis.DeleteInstanceRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.read = mock.AsyncMock(return_value=return_value) + + request = cloud_redis.DeleteInstanceRequest() + metadata =[ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + await client.delete_instance(request, metadata=[("key", "val"), ("cephalopod", "squid"),]) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + +@pytest.mark.asyncio +async def test_get_location_rest_asyncio_bad_request(request_type=locations_pb2.GetLocationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.get_location(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + locations_pb2.GetLocationRequest, + dict, +]) +async def test_get_location_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +@pytest.mark.asyncio +async def test_list_locations_rest_asyncio_bad_request(request_type=locations_pb2.ListLocationsRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.list_locations(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + locations_pb2.ListLocationsRequest, + dict, +]) +async def test_list_locations_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +@pytest.mark.asyncio +async def test_cancel_operation_rest_asyncio_bad_request(request_type=operations_pb2.CancelOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.cancel_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.CancelOperationRequest, + dict, +]) +async def test_cancel_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +@pytest.mark.asyncio +async def test_delete_operation_rest_asyncio_bad_request(request_type=operations_pb2.DeleteOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.delete_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.DeleteOperationRequest, + dict, +]) +async def test_delete_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = '{}' + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + +@pytest.mark.asyncio +async def test_get_operation_rest_asyncio_bad_request(request_type=operations_pb2.GetOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.get_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.GetOperationRequest, + dict, +]) +async def test_get_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +@pytest.mark.asyncio +async def test_list_operations_rest_asyncio_bad_request(request_type=operations_pb2.ListOperationsRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.list_operations(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.ListOperationsRequest, + dict, +]) +async def test_list_operations_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +@pytest.mark.asyncio +async def test_wait_operation_rest_asyncio_bad_request(request_type=operations_pb2.WaitOperationRequest): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + request = request_type() + request = json_format.ParseDict({'name': 'projects/sample1/locations/sample2/operations/sample3'}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req, pytest.raises(core_exceptions.BadRequest): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.read = mock.AsyncMock(return_value=b'{}') + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + await client.wait_operation(request) + +@pytest.mark.asyncio +@pytest.mark.parametrize("request_type", [ + operations_pb2.WaitOperationRequest, + dict, +]) +async def test_wait_operation_rest_asyncio(request_type): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + request_init = {'name': 'projects/sample1/locations/sample2/operations/sample3'} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(AsyncAuthorizedSession, 'request') as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value.encode('UTF-8')) + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = await client.wait_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_initialize_client_w_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_instances_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_instances), + '__call__') as call: + await client.list_instances(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.ListInstancesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_instance), + '__call__') as call: + await client.get_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.GetInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_instance), + '__call__') as call: + await client.create_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.CreateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_instance), + '__call__') as call: + await client.update_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.UpdateInstanceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_instance_empty_call_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_instance), + '__call__') as call: + await client.delete_instance(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = cloud_redis.DeleteInstanceRequest() + + assert args[0] == request_msg + + +def test_cloud_redis_rest_asyncio_lro_client(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, +operations_v1.AsyncOperationsRestClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + +def test_unsupported_parameter_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + options = client_options.ClientOptions(quota_project_id="octopus") + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio", + client_options=options + ) + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudRedisGrpcTransport, + ) + +def test_cloud_redis_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudRedisTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_cloud_redis_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.CloudRedisTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_instances', + 'get_instance', + 'create_instance', + 'update_instance', + 'delete_instance', + 'get_location', + 'list_locations', + 'get_operation', + 'wait_operation', + 'cancel_operation', + 'delete_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_redis_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_cloud_redis_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.redis_v1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.CloudRedisTransport() + adc.assert_called_once() + + +def test_cloud_redis_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + CloudRedisClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + ], +) +def test_cloud_redis_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudRedisGrpcTransport, + transports.CloudRedisGrpcAsyncIOTransport, + transports.CloudRedisRestTransport, + ], +) +def test_cloud_redis_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.CloudRedisGrpcTransport, grpc_helpers), + (transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "redis.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="redis.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + +def test_cloud_redis_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel") as mock_configure_mtls_channel: + transports.CloudRedisRestTransport ( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_redis_host_no_port(transport_name): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'redis.googleapis.com:443' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://redis.googleapis.com' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", + "rest", +]) +def test_cloud_redis_host_with_port(transport_name): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='redis.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'redis.googleapis.com:8000' + if transport_name in ['grpc', 'grpc_asyncio'] + else 'https://redis.googleapis.com:8000' + ) + +@pytest.mark.parametrize("transport_name", [ + "rest", +]) +def test_cloud_redis_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudRedisClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudRedisClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_instances._session + session2 = client2.transport.list_instances._session + assert session1 != session2 + session1 = client1.transport.get_instance._session + session2 = client2.transport.get_instance._session + assert session1 != session2 + session1 = client1.transport.create_instance._session + session2 = client2.transport.create_instance._session + assert session1 != session2 + session1 = client1.transport.update_instance._session + session2 = client2.transport.update_instance._session + assert session1 != session2 + session1 = client1.transport.delete_instance._session + session2 = client2.transport.delete_instance._session + assert session1 != session2 +def test_cloud_redis_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudRedisGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_cloud_redis_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.CloudRedisGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) +def test_cloud_redis_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_cloud_redis_grpc_lro_client(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cloud_redis_grpc_lro_async_client(): + client = CloudRedisAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_instance_path(): + project = "squid" + location = "clam" + instance = "whelk" + expected = "projects/{project}/locations/{location}/instances/{instance}".format(project=project, location=location, instance=instance, ) + actual = CloudRedisClient.instance_path(project, location, instance) + assert expected == actual + + +def test_parse_instance_path(): + expected = { + "project": "octopus", + "location": "oyster", + "instance": "nudibranch", + } + path = CloudRedisClient.instance_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_instance_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "cuttlefish" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = CloudRedisClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "mussel", + } + path = CloudRedisClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "winkle" + expected = "folders/{folder}".format(folder=folder, ) + actual = CloudRedisClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nautilus", + } + path = CloudRedisClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "scallop" + expected = "organizations/{organization}".format(organization=organization, ) + actual = CloudRedisClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "abalone", + } + path = CloudRedisClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "squid" + expected = "projects/{project}".format(project=project, ) + actual = CloudRedisClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "clam", + } + path = CloudRedisClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "whelk" + location = "octopus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = CloudRedisClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + } + path = CloudRedisClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudRedisClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.CloudRedisTransport, '_prep_wrapped_messages') as prep: + transport_class = CloudRedisClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_delete_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_delete_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + +def test_wait_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_wait_operation(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.WaitOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_wait_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_wait_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.WaitOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.wait_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_wait_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_wait_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.wait_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.wait_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + +def test_list_locations_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_locations_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + +def test_get_location_field_headers(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations/abc",) in kw["metadata"] + +def test_get_location_from_dict(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_rest_asyncio(): + if not HAS_ASYNC_REST_EXTRA: + pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") + client = CloudRedisAsyncClient( + credentials=async_anonymous_credentials(), + transport="rest_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_session")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'rest', + 'grpc', + ] + for transport in transports: + client = CloudRedisClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (CloudRedisClient, transports.CloudRedisGrpcTransport), + (CloudRedisAsyncClient, transports.CloudRedisGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/gapic-generator/tests/integration/redis_selective_v1.yaml b/packages/gapic-generator/tests/integration/redis_selective_v1.yaml new file mode 100644 index 000000000000..eba6fffba93b --- /dev/null +++ b/packages/gapic-generator/tests/integration/redis_selective_v1.yaml @@ -0,0 +1,90 @@ +type: google.api.Service +config_version: 3 +name: redis.googleapis.com +title: Google Cloud Memorystore for Redis API + +apis: +- name: google.cloud.location.Locations +- name: google.cloud.redis.v1.CloudRedis +- name: google.longrunning.Operations + +types: +- name: google.cloud.redis.v1.LocationMetadata +- name: google.cloud.redis.v1.OperationMetadata +- name: google.cloud.redis.v1.ZoneMetadata + +documentation: + summary: Creates and manages Redis instances on the Google Cloud Platform. + rules: + - selector: google.cloud.location.Locations.GetLocation + description: Gets information about a location. + + - selector: google.cloud.location.Locations.ListLocations + description: Lists information about the supported locations for this service. + +backend: + rules: + - selector: google.cloud.location.Locations.GetLocation + deadline: 60.0 + - selector: google.cloud.location.Locations.ListLocations + deadline: 60.0 + - selector: 'google.cloud.redis.v1.CloudRedis.*' + deadline: 60.0 + - selector: google.cloud.redis.v1.CloudRedis.ListInstances + deadline: 20.0 + - selector: 'google.longrunning.Operations.*' + deadline: 60.0 + +http: + rules: + - selector: google.cloud.location.Locations.GetLocation + get: '/v1/{name=projects/*/locations/*}' + - selector: google.cloud.location.Locations.ListLocations + get: '/v1/{name=projects/*}/locations' + - selector: google.longrunning.Operations.CancelOperation + post: '/v1/{name=projects/*/locations/*/operations/*}:cancel' + - selector: google.longrunning.Operations.DeleteOperation + delete: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.GetOperation + get: '/v1/{name=projects/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.ListOperations + get: '/v1/{name=projects/*/locations/*}/operations' + - selector: google.longrunning.Operations.WaitOperation + post: '/v2/{name=projects/*/locations/*/operations/*}:wait' + body: '*' + +authentication: + rules: + - selector: google.cloud.location.Locations.GetLocation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: google.cloud.location.Locations.ListLocations + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.cloud.redis.v1.CloudRedis.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + - selector: 'google.longrunning.Operations.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this section +# when async rest is GA. +publishing: + library_settings: + - version: 'google.cloud.redis.v1' + python_settings: + experimental_features: + rest_async_io_enabled: true + common: + selective_gapic_generation: + methods: + - google.cloud.redis.v1.CloudRedis.CreateInstance + - google.cloud.redis.v1.CloudRedis.DeleteInstance + - google.cloud.redis.v1.CloudRedis.GetInstance + - google.cloud.redis.v1.CloudRedis.ListInstances + - google.cloud.redis.v1.CloudRedis.UpdateInstance \ No newline at end of file diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index afe9b1434fda..4ae1fda4f464 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -14,7 +14,7 @@ import collections import re -from typing import Sequence +from typing import Any, Dict, Sequence from unittest import mock import yaml @@ -2745,11 +2745,820 @@ def test_read_empty_python_settings_from_service_yaml(): == client_pb2.PythonSettings() -def test_incorrect_library_settings_version(): - # NOTE: This test case ensures that the generator is able to read - # from the default library settings if the version specified against the - # library settings in the service yaml of an API differs from the version - # of the API. +def test_python_settings_selective_gapic_nonexistent_method_raises_error(): + """ + Test that `ClientLibrarySettingsError` is raised when there are nonexistent methods in + `client_pb2.ClientLibrarySettings.PythonSettings.CommonSettings.SelectiveGapicGeneration`. + """ + client_library_settings = [ + client_pb2.ClientLibrarySettings( + version="google.example.v1beta1", + python_settings=client_pb2.PythonSettings( + common=client_pb2.CommonLanguageSettings( + selective_gapic_generation=client_pb2.SelectiveGapicGeneration( + methods=[ + "google.example.v1beta1.ServiceOne.DoesNotExist"] + ) + ) + ) + ) + ] + fd = get_file_descriptor_proto_for_tests(fields=[]) + api_schema = api.API.build(fd, "google.example.v1beta1") + with pytest.raises( + api.ClientLibrarySettingsError, match="(?i)google.example.v1beta1.ServiceOne.DoesNotExist: Method does not exist" + ): + api_schema.enforce_valid_library_settings(client_library_settings) + + +def test_python_settings_selective_gapic_version_mismatch_method_raises_error(): + """ + Test that `ClientLibrarySettingsError` is raised when a method listed for selective generation + exists only in a different version of the library. + """ + client_library_settings = [ + client_pb2.ClientLibrarySettings( + version="google.example.v2beta2", + python_settings=client_pb2.PythonSettings( + common=client_pb2.CommonLanguageSettings( + selective_gapic_generation=client_pb2.SelectiveGapicGeneration( + methods=["google.example.v1beta1.ServiceOne.Example1"] + ) + ) + ) + ) + ] + fd = get_file_descriptor_proto_for_tests(fields=[]) + api_schema = api.API.build(fd, "google.example.v1beta1") + with pytest.raises( + api.ClientLibrarySettingsError, match="(?i)google.example.v1beta1.ServiceOne.Example1: Mismatched version for method." + ): + api_schema.enforce_valid_library_settings(client_library_settings) + + +def get_service_yaml_for_selective_gapic_tests( + apis: Sequence[str] = ["google.example.v1.FooService"], + methods=["google.example.v1.FooService.GetFoo"], +) -> Dict[str, Any]: + return { + "apis": [ + {"name": api} for api in apis + ], + "publishing": { + "library_settings": [ + { + "version": "google.example.v1", + "python_settings": { + "experimental_features": {"rest_async_io_enabled": True}, + "common": { + "selective_gapic_generation": { + "methods": methods + } + } + }, + } + ] + }, + } + + +def test_selective_gapic_api_build(): + # Put together a couple of minimal protos. + fd = ( + make_file_pb2( + name='dep.proto', + package='google.dep', + messages=(make_message_pb2(name='ImportedMessage', fields=()),), + ), + make_file_pb2( + name='common.proto', + package='google.example.v1.common', + messages=( + make_message_pb2(name='Bar'), + make_message_pb2(name='Baz'), + ), + ), + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=()), + make_message_pb2(name='GetFooRequest', fields=( + make_field_pb2(name='imported_message', number=1, + type_name='.google.dep.ImportedMessage'), + make_field_pb2(name='primitive', number=2, type=1), + make_field_pb2(name='bar', number=1, + type_name='.google.example.v1.common.Bar') + )), + make_message_pb2(name='GetFooResponse', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v1.Foo'), + )), + make_message_pb2(name='DeleteFooRequest', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v1.Foo'), + make_field_pb2(name='baz', number=2, + type_name='.google.example.v1.common.Baz'), + )), + make_message_pb2(name='DeleteFooResponse', fields=( + make_field_pb2(name='success', number=1, type=8), + )), + ), + services=(descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v1.GetFooRequest', + output_type='google.example.v1.GetFooResponse', + ), + descriptor_pb2.MethodDescriptorProto( + name='DeleteFoo', + input_type='google.example.v1.DeleteFooRequest', + output_type='google.example.v1.DeleteFooResponse', + ), + ), + ),), + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + methods=["google.example.v1.FooService.GetFoo"] + ) + opts = Options(service_yaml_config=service_yaml_config) + + # Create an API with those protos. + api_schema = api.API.build(fd, package='google.example.v1', opts=opts) + + # Establish that the API has the data expected. + assert isinstance(api_schema, api.API) + + # foo.proto, common.proto, dep.proto + assert len(api_schema.all_protos) == 3 + assert len(api_schema.protos) == 2 # foo.proto, common.proto + + assert 'google.dep.ImportedMessage' not in api_schema.messages + assert 'google.example.v1.Foo' in api_schema.messages + assert 'google.example.v1.GetFooRequest' in api_schema.messages + assert 'google.example.v1.GetFooResponse' in api_schema.messages + assert 'google.example.v1.DeleteFooRequest' not in api_schema.messages + assert 'google.example.v1.DeleteFooResponse' not in api_schema.messages + assert 'google.example.v1.FooService' in api_schema.services + assert len(api_schema.enums) == 0 + assert api_schema.protos['foo.proto'].python_modules == ( + imp.Import(package=('google', 'dep'), module='dep_pb2'), + imp.Import(package=('google', 'example_v1', + 'common', 'types'), module='common'), + ) + + assert api_schema.requires_package(('google', 'example', 'v1')) + + assert not api_schema.requires_package(('elgoog', 'example', 'v1')) + + # Establish that the subpackages still work even when they are transitively + # partially pruned. + assert 'common' in api_schema.subpackages + sub = api_schema.subpackages['common'] + assert len(sub.protos) == 1 + assert 'google.example.v1.common.Bar' in sub.messages + assert 'google.example.v1.common.Baz' not in sub.messages + + # Establish that methods have been truncated + assert 'google.example.v1.FooService.GetFoo' in api_schema.all_methods + assert 'google.example.v1.FooService.DeleteFoo' not in api_schema.all_methods + + foo_service = api_schema.protos['foo.proto'].services['google.example.v1.FooService'] + assert 'DeleteFoo' not in foo_service.methods + assert 'GetFoo' in foo_service.methods + + +def test_selective_gapic_api_build_with_lro(): + # Set up a prior proto that mimics google/protobuf/empty.proto + lro_proto = api.Proto.build(make_file_pb2( + name='operations.proto', package='google.longrunning', + messages=(make_message_pb2(name='Operation'),), + ), file_to_generate=False, naming=make_naming()) + + # Set up methods with LRO annotations. + create_foo_method_pb2 = descriptor_pb2.MethodDescriptorProto( + name='AsyncCreateFoo', + input_type='google.example.v1.AsyncCreateFooRequest', + output_type='google.longrunning.Operation', + ) + create_foo_method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( + operations_pb2.OperationInfo( + response_type='google.example.v1.AsyncCreateFooResponse', + metadata_type='google.example.v1.AsyncCreateFooMetadata', + ), + ) + + create_bar_method_pb2 = descriptor_pb2.MethodDescriptorProto( + name='AsyncCreateBar', + input_type='google.example.v1.AsyncCreateBarRequest', + output_type='google.longrunning.Operation', + ) + create_bar_method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( + operations_pb2.OperationInfo( + response_type='google.example.v1.AsyncCreateBarResponse', + metadata_type='google.example.v1.AsyncCreateBarMetadata', + ), + ) + + # Set up the service with an RPC. + fd = ( + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=()), + make_message_pb2(name='Bar', fields=()), + make_message_pb2(name='AsyncCreateFooRequest', fields=()), + make_message_pb2(name='AsyncCreateFooResponse', fields=()), + make_message_pb2(name='AsyncCreateFooMetadata', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v1.Foo'), + )), + make_message_pb2(name='AsyncCreateBarRequest', fields=()), + make_message_pb2(name='AsyncCreateBarResponse', fields=()), + make_message_pb2(name='AsyncCreateBarMetadata', fields=( + make_field_pb2(name='bar', number=1, + type_name='.google.example.v1.Bar'), + )), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + create_foo_method_pb2, + create_bar_method_pb2, + ), + ), + ) + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + apis=['google.example.v1.FooService'], + methods=['google.example.v1.FooService.AsyncCreateFoo'] + ) + opts = Options(service_yaml_config=service_yaml_config) + + api_schema = api.API.build(fd, + 'google.example.v1', + opts=opts, + prior_protos={ + 'google/longrunning/operations.proto': lro_proto, + }) + + assert 'google.example.v1.Foo' in api_schema.messages + assert 'google.example.v1.AsyncCreateFooRequest' in api_schema.messages + assert 'google.example.v1.AsyncCreateFooResponse' in api_schema.messages + assert 'google.example.v1.AsyncCreateFooMetadata' in api_schema.messages + + assert 'google.example.v1.Bar' not in api_schema.messages + assert 'google.example.v1.AsyncCreateBarRequest' not in api_schema.messages + assert 'google.example.v1.AsyncCreateBarResponse' not in api_schema.messages + assert 'google.example.v1.AsyncCreateBarMetadata' not in api_schema.messages + + +def test_selective_gapic_api_build_remove_unnecessary_services(): + # Put together a couple of minimal protos. + fd = ( + make_file_pb2( + name='foobar.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=()), + make_message_pb2(name='Bar', fields=()), + make_message_pb2(name='GetFooRequest', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v1.Foo'), + )), + make_message_pb2(name='GetFooResponse', fields=()), + make_message_pb2(name='GetBarRequest', fields=( + make_field_pb2(name='bar', number=1, + type_name='.google.example.v1.Bar'), + )), + make_message_pb2(name='GetBarResponse', fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v1.GetFooRequest', + output_type='google.example.v1.GetFooResponse', + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name='BarService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetBar', + input_type='google.example.v1.GetBarRequest', + output_type='google.example.v1.GetBarResponse', + ), + ), + ), + ), + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + apis=['google.example.v1.FooService', 'google.example.v1.BarService'], + methods=['google.example.v1.FooService.GetFoo'] + ) + opts = Options(service_yaml_config=service_yaml_config) + + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + + assert 'google.example.v1.Foo' in api_schema.messages + assert 'google.example.v1.GetFooRequest' in api_schema.messages + assert 'google.example.v1.GetFooResponse' in api_schema.messages + + assert 'google.example.v1.Bar' not in api_schema.messages + assert 'google.example.v1.GetBarRequest' not in api_schema.messages + assert 'google.example.v1.GetBarResponse' not in api_schema.messages + + assert 'google.example.v1.FooService' in api_schema.services + assert 'google.example.v1.BarService' not in api_schema.services + + +def test_selective_gapic_api_build_remove_unnecessary_proto_files(): + fd = ( + make_file_pb2( + name='foo_common.proto', + package='google.example.v1.foo_common', + messages=( + make_message_pb2(name='Foo'), + ), + ), + make_file_pb2( + name='bar_common.proto', + package='google.example.v1.bar_common', + messages=( + make_message_pb2(name='Bar'), + ), + ), + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=()), + make_message_pb2(name='GetFooRequest', fields=( + make_field_pb2( + name='foo', number=1, type_name='.google.example.v1.foo_common.Foo'), + )), + make_message_pb2(name='GetFooResponse', fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v1.GetFooRequest', + output_type='google.example.v1.GetFooResponse', + ), + ), + ), + ), + ), + make_file_pb2( + name='bar.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Bar', fields=()), + make_message_pb2(name='GetBarRequest', fields=( + make_field_pb2( + name='bar', number=1, type_name='.google.example.v1.bar_common.Bar'), + )), + make_message_pb2(name='GetBarResponse', fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='BarService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetBar', + input_type='google.example.v1.GetBarRequest', + output_type='google.example.v1.GetBarResponse', + ), + ), + ), + ), + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + apis=['google.example.v1.FooService', 'google.example.v1.BarService'], + methods=['google.example.v1.FooService.GetFoo'] + ) + opts = Options(service_yaml_config=service_yaml_config) + + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + + assert 'google.example.v1.foo_common.Foo' in api_schema.messages + assert 'google.example.v1.GetFooRequest' in api_schema.messages + assert 'google.example.v1.GetFooResponse' in api_schema.messages + + assert 'google.example.v1.bar_common.Bar' not in api_schema.messages + assert 'google.example.v1.GetBarRequest' not in api_schema.messages + assert 'google.example.v1.GetBarResponse' not in api_schema.messages + + assert 'google.example.v1.FooService' in api_schema.services + assert 'google.example.v1.BarService' not in api_schema.services + + assert 'foo.proto' in api_schema.protos + assert 'foo_common.proto' in api_schema.protos + assert 'bar.proto' not in api_schema.protos + assert 'bar_common.proto' not in api_schema.protos + + # Check that the sub-packages that have been completely pruned are excluded from generation, + # but the ones that have only been partially pruned will still be appropriately included. + assert 'foo_common' in api_schema.subpackages + sub = api_schema.subpackages['foo_common'] + assert len(sub.protos) == 1 + assert 'google.example.v1.foo_common.Foo' in sub.messages + assert 'bar_common' not in api_schema.subpackages + + +def test_selective_gapic_api_build_with_enums(): + fd = ( + make_file_pb2( + name='foobar.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Foo', fields=( + make_field_pb2(name='status', number=1, type=14, + type_name='.google.example.v1.FooStatus'), + )), + make_message_pb2(name='Bar', fields=( + make_field_pb2(name='status', number=1, type=14, + type_name='.google.example.v1.BarStatus'), + )), + make_message_pb2(name='GetFooRequest', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v1.Foo'), + )), + make_message_pb2(name='GetFooResponse', fields=()), + make_message_pb2(name='GetBarRequest', fields=( + make_field_pb2(name='bar', number=1, + type_name='.google.example.v1.Bar'), + )), + make_message_pb2(name='GetBarResponse', fields=()), + ), + enums=( + make_enum_pb2( + 'FooStatus', + 'YES', + 'NO' + ), + make_enum_pb2( + 'BarStatus', + 'YES', + 'NO' + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v1.GetFooRequest', + output_type='google.example.v1.GetFooResponse', + ), + descriptor_pb2.MethodDescriptorProto( + name='GetBar', + input_type='google.example.v1.GetBarRequest', + output_type='google.example.v1.GetBarResponse', + ), + ), + ), + ), + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + apis=['google.example.v1.FooService'], + methods=['google.example.v1.FooService.GetFoo'] + ) + opts = Options(service_yaml_config=service_yaml_config) + + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + + assert 'google.example.v1.FooStatus' in api_schema.enums + assert 'google.example.v1.BarStatus' not in api_schema.enums + assert 'google.example.v1.FooStatus' in api_schema.top_level_enums + + +def test_selective_gapic_api_build_with_nested_fields(): + # Test that, when including or excluding messages for selective GAPIC generation, + # any nested messages they may contain are included or excluded appropriately. + fd = ( + make_file_pb2( + name='foobar.proto', + package='google.example.v1', + messages=( + make_message_pb2( + name='Foo', + nested_type=( + make_message_pb2( + name='Bar', + fields=( + make_field_pb2( + name='baz', number=1, type_name='.google.example.v1.Baz'), + ) + ), + ), + enum_type=( + make_enum_pb2( + 'FooStatus', + 'YES', + 'NO' + ), + ) + ), + make_message_pb2( + name='Spam', + nested_type=( + make_message_pb2( + name='Ham', + fields=( + make_field_pb2( + name='eggs', number=1, type_name='.google.example.v1.Eggs'), + ) + ), + ), + enum_type=( + make_enum_pb2( + 'SpamStatus', + 'YES', + 'NO' + ), + ) + ), + make_message_pb2(name='Baz'), + make_message_pb2(name='Eggs'), + make_message_pb2(name='GetFooRequest', fields=( + make_field_pb2(name='foo', number=1, + type_name='.google.example.v1.Foo'), + )), + make_message_pb2(name='GetFooResponse', fields=()), + make_message_pb2(name='GetBarRequest', fields=( + make_field_pb2(name='spam', number=1, + type_name='.google.example.v1.Spam'), + )), + make_message_pb2(name='GetBarResponse', fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v1.GetFooRequest', + output_type='google.example.v1.GetFooResponse', + ), + descriptor_pb2.MethodDescriptorProto( + name='GetBar', + input_type='google.example.v1.GetBarRequest', + output_type='google.example.v1.GetBarResponse', + ), + ), + ), + ), + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + apis=['google.example.v1.FooService'], + methods=['google.example.v1.FooService.GetFoo'] + ) + opts = Options(service_yaml_config=service_yaml_config) + + api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + + assert 'google.example.v1.Baz' in api_schema.messages + assert 'google.example.v1.Foo.FooStatus' in api_schema.enums + assert 'google.example.v1.Foo.Bar' in api_schema.messages + + # Check that we can exclude nested types as well + assert 'google.example.v1.Spam' not in api_schema.messages + assert 'google.example.v1.Spam.SpamStatus' not in api_schema.enums + assert 'google.example.v1.Spam.Ham' not in api_schema.messages + + +@pytest.mark.parametrize("reference_attr", ["type", "child_type"]) +def test_selective_gapic_api_build_with_resources(reference_attr): + test_input_names = [ + ('foo.bar/Foo', 'Foo', 'FooDep', 'GetFooRequest', 'GetFooResponse'), + ('foo.bar/Bar', 'Bar', 'BarDep', 'GetBarRequest', 'GetBarResponse'), + ] + + messages = [] + + for ( + resource_type, + message_name, + message_dep_name, + request_message_name, + response_message_name, + ) in test_input_names: + resource_message_dep = make_message_pb2(name=message_dep_name) + + # Make sure that we traverse down the fields in the referenced message type. + resource_message = make_message_pb2( + name=message_name, + fields=( + make_field_pb2( + name="dep", number=1, type_name=f".google.example.v1.{message_dep_name}"), + ), + ) + request_message = make_message_pb2( + name=request_message_name, + fields=( + make_field_pb2(name="thing", number=1, type=9), + ), + ) + response_message = make_message_pb2(name=response_message_name) + + # Set up the resource + resource_message_opts = resource_message.options.Extensions[resource_pb2.resource] + resource_message_opts.type = resource_type + resource_message_opts.pattern.append( + "octopus/{octopus}/squid/{squid}") + + # Set up the reference + request_message_thing_field_opts = \ + request_message.field[0].options.Extensions[resource_pb2.resource_reference] + setattr(request_message_thing_field_opts, + reference_attr, resource_type) + + # Add to messages + messages.append(resource_message_dep) + messages.append(resource_message) + messages.append(request_message) + messages.append(response_message) + + fds = ( + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=messages, + services=(descriptor_pb2.ServiceDescriptorProto( + name='FooService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='GetFoo', + input_type='google.example.v1.GetFooRequest', + output_type='google.example.v1.GetFooResponse', + ), + descriptor_pb2.MethodDescriptorProto( + name='GetBar', + input_type='google.example.v1.GetBarRequest', + output_type='google.example.v1.GetBarResponse', + ), + ), + ),), + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + methods=['google.example.v1.FooService.GetFoo'] + ) + opts = Options(service_yaml_config=service_yaml_config) + + api_schema = api.API.build(fds, package='google.example.v1', opts=opts) + + assert 'google.example.v1.Foo' in api_schema.messages + assert 'google.example.v1.FooDep' in api_schema.messages + assert 'google.example.v1.GetFooRequest' in api_schema.messages + assert 'google.example.v1.GetFooResponse' in api_schema.messages + + assert 'google.example.v1.Bar' not in api_schema.messages + assert 'google.example.v1.BarDep' not in api_schema.messages + assert 'google.example.v1.GetBarRequest' not in api_schema.messages + assert 'google.example.v1.GetBarResponse' not in api_schema.messages + + # Ensure we're also pruning resource messages for the files + resource_messages = api_schema.protos['foo.proto'].resource_messages + assert 'foo.bar/Foo' in resource_messages + assert 'foo.bar/Bar' not in resource_messages + + +def test_selective_gapic_api_build_extended_lro(): + def make_initiate_options(service_name): + options = descriptor_pb2.MethodOptions() + options.Extensions[ex_ops_pb2.operation_service] = service_name + return options + + polling_method_options = descriptor_pb2.MethodOptions() + polling_method_options.Extensions[ex_ops_pb2.operation_polling_method] = True + + T = descriptor_pb2.FieldDescriptorProto.Type + operation_fields = tuple( + make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) + for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + ) + for f in operation_fields: + options = descriptor_pb2.FieldOptions() + options.Extensions[ex_ops_pb2.operation_field] = f.number + f.options.MergeFrom(options) + + fds = ( + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='Operation', fields=operation_fields), + make_message_pb2(name='CreateFooRequest'), + make_message_pb2(name='GetFooOperationRequest'), + make_message_pb2(name='CreateBarRequest'), + make_message_pb2(name='GetBarOperationRequest'), + make_message_pb2(name='PoorlyOrganizedMethodRequest'), + make_message_pb2(name='PoorlyOrganizedMethodReponse') + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='FooOpsService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='Get', + input_type='google.example.v1.GetFooOperationRequest', + output_type='google.example.v1.Operation', + options=polling_method_options, + ), + descriptor_pb2.MethodDescriptorProto( + name='PoorlyOrganizedMethod', + input_type='google.example.v1.PoorlyOrganizedMethodRequest', + output_type='google.example.v1.PoorlyOrganizedMethodReponse', + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name='BarOpsService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='Get', + input_type='google.example.v1.GetBarOperationRequest', + output_type='google.example.v1.Operation', + options=polling_method_options, + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name='BasicService', + method=( + descriptor_pb2.MethodDescriptorProto( + name='CreateFoo', + input_type='google.example.v1.CreateFooRequest', + output_type='google.example.v1.Operation', + options=make_initiate_options('FooOpsService'), + ), + descriptor_pb2.MethodDescriptorProto( + name='CreateBar', + input_type='google.example.v1.CreateBarRequest', + output_type='google.example.v1.Operation', + options=make_initiate_options('BarOpsService'), + ), + ), + ), + ), + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + apis=[ + 'google.example.v1.FooOpsService', + 'google.example.v1.BarOpsService', + 'google.example.v1.BasicService' + ], + methods=[ + 'google.example.v1.BasicService.CreateFoo', + ] + ) + opts = Options(service_yaml_config=service_yaml_config) + + api_schema = api.API.build(fds, 'google.example.v1', opts=opts) + + assert 'google.example.v1.BasicService' in api_schema.services + assert 'google.example.v1.FooOpsService' in api_schema.services + assert 'google.example.v1.FooOpsService.Get' in api_schema.all_methods + assert 'google.example.v1.Operation' in api_schema.messages + assert 'google.example.v1.CreateFooRequest' in api_schema.messages + assert 'google.example.v1.GetFooOperationRequest' in api_schema.messages + + assert 'google.example.v1.BarOpsService' not in api_schema.services + assert 'google.example.v1.GetBarOperationRequest' not in api_schema.messages + assert 'google.example.v1.CreateBarRequest' not in api_schema.messages + assert 'google.example.v1.FooOpsService.PoorlyOrganizedMethod' not in api_schema.all_methods + + +def test_read_empty_python_settings_from_service_yaml(): service_yaml_config = { "apis": [ {"name": "google.example.v1beta1.ServiceOne.Example1"}, From 3ba958abb9f64d314747c88d80e5d51071d19ddc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 15:36:12 -0800 Subject: [PATCH 1238/1339] chore(python): fix docs publish build (#2311) Co-authored-by: Owl Bot Co-authored-by: Victor Chudnovsky --- .../gapic-generator/.github/.OwlBot.lock.yaml | 6 +- .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 243 +++++++++++++++++- 3 files changed, 238 insertions(+), 12 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 26306af66f81..4c0027ff1c61 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8e3e7e18255c22d1489258d0374c901c01f9c4fd77a12088670cd73d580aa737 -# created: 2024-12-17T00:59:58.625514486Z + digest: sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a +# created: 2025-01-16T15:24:11.364245182Z diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.in b/packages/gapic-generator/.kokoro/docker/docs/requirements.in index 816817c672a1..586bd07037ae 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.in +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.in @@ -1 +1,2 @@ nox +gcp-docuploader diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt index f99a5c4aac7f..a9360a25b707 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt @@ -2,16 +2,124 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.5.2 \ - --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ - --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via nox + # via + # gcp-docuploader + # nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 @@ -20,10 +128,78 @@ filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in + # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,6 +208,51 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader tomli==2.2.1 \ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ @@ -66,7 +287,11 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.28.0 \ - --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ - --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox From 916259329ba2ef37727e061a3429fe95cfce95a7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 30 Jan 2025 20:54:13 +0100 Subject: [PATCH 1239/1339] chore(deps): update all dependencies (#2305) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 172 +++++++++++----------- 1 file changed, 86 insertions(+), 86 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index eda186e2713e..da487d725d94 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,83 +8,83 @@ aiohappyeyeballs==2.4.4 \ --hash=sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745 \ --hash=sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8 # via aiohttp -aiohttp==3.11.10 \ - --hash=sha256:012f176945af138abc10c4a48743327a92b4ca9adc7a0e078077cdb5dbab7be0 \ - --hash=sha256:02c13415b5732fb6ee7ff64583a5e6ed1c57aa68f17d2bda79c04888dfdc2769 \ - --hash=sha256:03b6002e20938fc6ee0918c81d9e776bebccc84690e2b03ed132331cca065ee5 \ - --hash=sha256:04814571cb72d65a6899db6099e377ed00710bf2e3eafd2985166f2918beaf59 \ - --hash=sha256:0580f2e12de2138f34debcd5d88894786453a76e98febaf3e8fe5db62d01c9bf \ - --hash=sha256:06a8e2ee1cbac16fe61e51e0b0c269400e781b13bcfc33f5425912391a542985 \ - --hash=sha256:076bc454a7e6fd646bc82ea7f98296be0b1219b5e3ef8a488afbdd8e81fbac50 \ - --hash=sha256:0c9527819b29cd2b9f52033e7fb9ff08073df49b4799c89cb5754624ecd98299 \ - --hash=sha256:0dc49f42422163efb7e6f1df2636fe3db72713f6cd94688e339dbe33fe06d61d \ - --hash=sha256:14cdb5a9570be5a04eec2ace174a48ae85833c2aadc86de68f55541f66ce42ab \ - --hash=sha256:15fccaf62a4889527539ecb86834084ecf6e9ea70588efde86e8bc775e0e7542 \ - --hash=sha256:24213ba85a419103e641e55c27dc7ff03536c4873470c2478cce3311ba1eee7b \ - --hash=sha256:31d5093d3acd02b31c649d3a69bb072d539d4c7659b87caa4f6d2bcf57c2fa2b \ - --hash=sha256:3691ed7726fef54e928fe26344d930c0c8575bc968c3e239c2e1a04bd8cf7838 \ - --hash=sha256:386fbe79863eb564e9f3615b959e28b222259da0c48fd1be5929ac838bc65683 \ - --hash=sha256:3bbbfff4c679c64e6e23cb213f57cc2c9165c9a65d63717108a644eb5a7398df \ - --hash=sha256:3de34936eb1a647aa919655ff8d38b618e9f6b7f250cc19a57a4bf7fd2062b6d \ - --hash=sha256:40d1c7a7f750b5648642586ba7206999650208dbe5afbcc5284bcec6579c9b91 \ - --hash=sha256:44224d815853962f48fe124748227773acd9686eba6dc102578defd6fc99e8d9 \ - --hash=sha256:47ad15a65fb41c570cd0ad9a9ff8012489e68176e7207ec7b82a0940dddfd8be \ - --hash=sha256:482cafb7dc886bebeb6c9ba7925e03591a62ab34298ee70d3dd47ba966370d2c \ - --hash=sha256:49c7dbbc1a559ae14fc48387a115b7d4bbc84b4a2c3b9299c31696953c2a5219 \ - --hash=sha256:4b2c7ac59c5698a7a8207ba72d9e9c15b0fc484a560be0788b31312c2c5504e4 \ - --hash=sha256:4cca22a61b7fe45da8fc73c3443150c3608750bbe27641fc7558ec5117b27fdf \ - --hash=sha256:4cfce37f31f20800a6a6620ce2cdd6737b82e42e06e6e9bd1b36f546feb3c44f \ - --hash=sha256:502a1464ccbc800b4b1995b302efaf426e8763fadf185e933c2931df7db9a199 \ - --hash=sha256:53bf2097e05c2accc166c142a2090e4c6fd86581bde3fd9b2d3f9e93dda66ac1 \ - --hash=sha256:593c114a2221444f30749cc5e5f4012488f56bd14de2af44fe23e1e9894a9c60 \ - --hash=sha256:5d6958671b296febe7f5f859bea581a21c1d05430d1bbdcf2b393599b1cdce77 \ - --hash=sha256:5ef359ebc6949e3a34c65ce20230fae70920714367c63afd80ea0c2702902ccf \ - --hash=sha256:613e5169f8ae77b1933e42e418a95931fb4867b2991fc311430b15901ed67079 \ - --hash=sha256:61b9bae80ed1f338c42f57c16918853dc51775fb5cb61da70d590de14d8b5fb4 \ - --hash=sha256:6362cc6c23c08d18ddbf0e8c4d5159b5df74fea1a5278ff4f2c79aed3f4e9f46 \ - --hash=sha256:65a96e3e03300b41f261bbfd40dfdbf1c301e87eab7cd61c054b1f2e7c89b9e8 \ - --hash=sha256:65e55ca7debae8faaffee0ebb4b47a51b4075f01e9b641c31e554fd376595c6c \ - --hash=sha256:68386d78743e6570f054fe7949d6cb37ef2b672b4d3405ce91fafa996f7d9b4d \ - --hash=sha256:68ff6f48b51bd78ea92b31079817aff539f6c8fc80b6b8d6ca347d7c02384e33 \ - --hash=sha256:6ab29b8a0beb6f8eaf1e5049252cfe74adbaafd39ba91e10f18caeb0e99ffb34 \ - --hash=sha256:77ae58586930ee6b2b6f696c82cf8e78c8016ec4795c53e36718365f6959dc82 \ - --hash=sha256:77c4aa15a89847b9891abf97f3d4048f3c2d667e00f8a623c89ad2dccee6771b \ - --hash=sha256:78153314f26d5abef3239b4a9af20c229c6f3ecb97d4c1c01b22c4f87669820c \ - --hash=sha256:7852bbcb4d0d2f0c4d583f40c3bc750ee033265d80598d0f9cb6f372baa6b836 \ - --hash=sha256:7e97d622cb083e86f18317282084bc9fbf261801b0192c34fe4b1febd9f7ae69 \ - --hash=sha256:7f3dc0e330575f5b134918976a645e79adf333c0a1439dcf6899a80776c9ab39 \ - --hash=sha256:80886dac673ceaef499de2f393fc80bb4481a129e6cb29e624a12e3296cc088f \ - --hash=sha256:811f23b3351ca532af598405db1093f018edf81368e689d1b508c57dcc6b6a32 \ - --hash=sha256:86a5dfcc39309470bd7b68c591d84056d195428d5d2e0b5ccadfbaf25b026ebc \ - --hash=sha256:8b3cf2dc0f0690a33f2d2b2cb15db87a65f1c609f53c37e226f84edb08d10f52 \ - --hash=sha256:8cc5203b817b748adccb07f36390feb730b1bc5f56683445bfe924fc270b8816 \ - --hash=sha256:909af95a72cedbefe5596f0bdf3055740f96c1a4baa0dd11fd74ca4de0b4e3f1 \ - --hash=sha256:974d3a2cce5fcfa32f06b13ccc8f20c6ad9c51802bb7f829eae8a1845c4019ec \ - --hash=sha256:98283b94cc0e11c73acaf1c9698dea80c830ca476492c0fe2622bd931f34b487 \ - --hash=sha256:98f5635f7b74bcd4f6f72fcd85bea2154b323a9f05226a80bc7398d0c90763b0 \ - --hash=sha256:99b7920e7165be5a9e9a3a7f1b680f06f68ff0d0328ff4079e5163990d046767 \ - --hash=sha256:9bca390cb247dbfaec3c664326e034ef23882c3f3bfa5fbf0b56cad0320aaca5 \ - --hash=sha256:9e2e576caec5c6a6b93f41626c9c02fc87cd91538b81a3670b2e04452a63def6 \ - --hash=sha256:9ef405356ba989fb57f84cac66f7b0260772836191ccefbb987f414bcd2979d9 \ - --hash=sha256:a55d2ad345684e7c3dd2c20d2f9572e9e1d5446d57200ff630e6ede7612e307f \ - --hash=sha256:ab7485222db0959a87fbe8125e233b5a6f01f4400785b36e8a7878170d8c3138 \ - --hash=sha256:b1fc6b45010a8d0ff9e88f9f2418c6fd408c99c211257334aff41597ebece42e \ - --hash=sha256:b78f053a7ecfc35f0451d961dacdc671f4bcbc2f58241a7c820e9d82559844cf \ - --hash=sha256:b99acd4730ad1b196bfb03ee0803e4adac371ae8efa7e1cbc820200fc5ded109 \ - --hash=sha256:be2b516f56ea883a3e14dda17059716593526e10fb6303189aaf5503937db408 \ - --hash=sha256:beb39a6d60a709ae3fb3516a1581777e7e8b76933bb88c8f4420d875bb0267c6 \ - --hash=sha256:bf3d1a519a324af764a46da4115bdbd566b3c73fb793ffb97f9111dbc684fc4d \ - --hash=sha256:c49a76c1038c2dd116fa443eba26bbb8e6c37e924e2513574856de3b6516be99 \ - --hash=sha256:c5532f0441fc09c119e1dca18fbc0687e64fbeb45aa4d6a87211ceaee50a74c4 \ - --hash=sha256:c6b9e6d7e41656d78e37ce754813fa44b455c3d0d0dced2a047def7dc5570b74 \ - --hash=sha256:c87bf31b7fdab94ae3adbe4a48e711bfc5f89d21cf4c197e75561def39e223bc \ - --hash=sha256:cbad88a61fa743c5d283ad501b01c153820734118b65aee2bd7dbb735475ce0d \ - --hash=sha256:cf14627232dfa8730453752e9cdc210966490992234d77ff90bc8dc0dce361d5 \ - --hash=sha256:db1d0b28fcb7f1d35600150c3e4b490775251dea70f894bf15c678fdd84eda6a \ - --hash=sha256:ddf5f7d877615f6a1e75971bfa5ac88609af3b74796ff3e06879e8422729fd01 \ - --hash=sha256:e44a9a3c053b90c6f09b1bb4edd880959f5328cf63052503f892c41ea786d99f \ - --hash=sha256:efb15a17a12497685304b2d976cb4939e55137df7b09fa53f1b6a023f01fcb4e \ - --hash=sha256:fbbaea811a2bba171197b08eea288b9402faa2bab2ba0858eecdd0a4105753a3 +aiohttp==3.11.11 \ + --hash=sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f \ + --hash=sha256:0a6d3fbf2232e3a08c41eca81ae4f1dff3d8f1a30bae415ebe0af2d2458b8a33 \ + --hash=sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1 \ + --hash=sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665 \ + --hash=sha256:0fd82b8e9c383af11d2b26f27a478640b6b83d669440c0a71481f7c865a51da9 \ + --hash=sha256:10b4ff0ad793d98605958089fabfa350e8e62bd5d40aa65cdc69d6785859f94e \ + --hash=sha256:1642eceeaa5ab6c9b6dfeaaa626ae314d808188ab23ae196a34c9d97efb68350 \ + --hash=sha256:1dac54e8ce2ed83b1f6b1a54005c87dfed139cf3f777fdc8afc76e7841101226 \ + --hash=sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d \ + --hash=sha256:1f21bb8d0235fc10c09ce1d11ffbd40fc50d3f08a89e4cf3a0c503dc2562247a \ + --hash=sha256:2170816e34e10f2fd120f603e951630f8a112e1be3b60963a1f159f5699059a6 \ + --hash=sha256:21fef42317cf02e05d3b09c028712e1d73a9606f02467fd803f7c1f39cc59add \ + --hash=sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e \ + --hash=sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8 \ + --hash=sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03 \ + --hash=sha256:3e23419d832d969f659c208557de4a123e30a10d26e1e14b73431d3c13444c2e \ + --hash=sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2 \ + --hash=sha256:44167fc6a763d534a6908bdb2592269b4bf30a03239bcb1654781adf5e49caf1 \ + --hash=sha256:479b8c6ebd12aedfe64563b85920525d05d394b85f166b7873c8bde6da612f9c \ + --hash=sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538 \ + --hash=sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5 \ + --hash=sha256:4eed954b161e6b9b65f6be446ed448ed3921763cc432053ceb606f89d793927e \ + --hash=sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9 \ + --hash=sha256:568c1236b2fde93b7720f95a890741854c1200fba4a3471ff48b2934d2d93fd3 \ + --hash=sha256:5854be2f3e5a729800bac57a8d76af464e160f19676ab6aea74bde18ad19d438 \ + --hash=sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12 \ + --hash=sha256:6526e5fb4e14f4bbf30411216780c9967c20c5a55f2f51d3abd6de68320cc2f3 \ + --hash=sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853 \ + --hash=sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287 \ + --hash=sha256:731468f555656767cda219ab42e033355fe48c85fbe3ba83a349631541715ba2 \ + --hash=sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9 \ + --hash=sha256:84a585799c58b795573c7fa9b84c455adf3e1d72f19a2bf498b54a95ae0d194c \ + --hash=sha256:85992ee30a31835fc482468637b3e5bd085fa8fe9392ba0bdcbdc1ef5e9e3c55 \ + --hash=sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c \ + --hash=sha256:88a12ad8ccf325a8a5ed80e6d7c3bdc247d66175afedbe104ee2aaca72960d8e \ + --hash=sha256:8be8508d110d93061197fd2d6a74f7401f73b6d12f8822bbcd6d74f2b55d71b1 \ + --hash=sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c \ + --hash=sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194 \ + --hash=sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773 \ + --hash=sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e \ + --hash=sha256:943a8b052e54dfd6439fd7989f67fc6a7f2138d0a2cf0a7de5f18aa4fe7eb3b1 \ + --hash=sha256:9d73ee3725b7a737ad86c2eac5c57a4a97793d9f442599bea5ec67ac9f4bdc3d \ + --hash=sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600 \ + --hash=sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34 \ + --hash=sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3 \ + --hash=sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8 \ + --hash=sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8 \ + --hash=sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2 \ + --hash=sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff \ + --hash=sha256:aba807f9569455cba566882c8938f1a549f205ee43c27b126e5450dc9f83cc62 \ + --hash=sha256:ae545f31489548c87b0cced5755cfe5a5308d00407000e72c4fa30b19c3220ac \ + --hash=sha256:af01e42ad87ae24932138f154105e88da13ce7d202a6de93fafdafb2883a00ef \ + --hash=sha256:b540bd67cfb54e6f0865ceccd9979687210d7ed1a1cc8c01f8e67e2f1e883d28 \ + --hash=sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab \ + --hash=sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104 \ + --hash=sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76 \ + --hash=sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e \ + --hash=sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d \ + --hash=sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a \ + --hash=sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5 \ + --hash=sha256:bfde76a8f430cf5c5584553adf9926534352251d379dcb266ad2b93c54a29745 \ + --hash=sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4 \ + --hash=sha256:c7a06301c2fb096bdb0bd25fe2011531c1453b9f2c163c8031600ec73af1cc99 \ + --hash=sha256:cb23d8bb86282b342481cad4370ea0853a39e4a32a0042bb52ca6bdde132df43 \ + --hash=sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da \ + --hash=sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231 \ + --hash=sha256:d6c9af134da4bc9b3bd3e6a70072509f295d10ee60c697826225b60b9959acdd \ + --hash=sha256:dd7659baae9ccf94ae5fe8bfaa2c7bc2e94d24611528395ce88d009107e00c6d \ + --hash=sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87 \ + --hash=sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886 \ + --hash=sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2 \ + --hash=sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b \ + --hash=sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d \ + --hash=sha256:f047569d655f81cb70ea5be942ee5d4421b6219c3f05d131f64088c73bb0917f \ + --hash=sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204 \ + --hash=sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e # via -r requirements.in aiosignal==1.3.2 \ --hash=sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5 \ @@ -213,9 +213,9 @@ charset-normalizer==3.4.0 \ --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 # via requests -click==8.1.7 \ - --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ - --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a # via -r requirements.in exceptiongroup==1.2.2 \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ @@ -409,9 +409,9 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d +Jinja2==3.1.5 \ + --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ + --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb # via -r requirements.in libcst==1.5.1 \ --hash=sha256:00f3d2f32ee081bad3394546b0b9ac5e31686d3b5cfe4892d716d2ba65f9ec08 \ @@ -860,9 +860,9 @@ typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via multidict -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d # via requests yarl==1.18.3 \ --hash=sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba \ From 78acc0c16cf7bacc748ac57c593c104c17710813 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 30 Jan 2025 14:47:39 -0800 Subject: [PATCH 1240/1339] chore(main): release 1.22.0 (#2302) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 9 +++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 8f25cb72b2f5..ac7ce83c0b8c 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,15 @@ # Changelog +## [1.22.0](https://github.com/googleapis/gapic-generator-python/compare/v1.21.0...v1.22.0) (2025-01-30) + + +### Features + +* Add cred info to auth related errors ([#2115](https://github.com/googleapis/gapic-generator-python/issues/2115)) ([a694ceb](https://github.com/googleapis/gapic-generator-python/commit/a694cebf386aace80aab36ef9cc2d1102078d3d0)) +* Add REST Interceptors to support reading metadata ([#2299](https://github.com/googleapis/gapic-generator-python/issues/2299)) ([e050f4e](https://github.com/googleapis/gapic-generator-python/commit/e050f4eb3eddfe150f028a2a2bd901899afb965a)) +* Add support for reading selective GAPIC generation methods from service YAML ([#2272](https://github.com/googleapis/gapic-generator-python/issues/2272)) ([3a1a91c](https://github.com/googleapis/gapic-generator-python/commit/3a1a91cadf4438d7b5bf0edaf93ffa3f966eb7e2)) + ## [1.21.0](https://github.com/googleapis/gapic-generator-python/compare/v1.20.2...v1.21.0) (2024-12-11) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index b2eb1cafd1a1..2121fb0131a7 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.21.0" +version = "1.22.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 9cb386fdef1c602e9d5618b34790b7018f8cc254 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 6 Feb 2025 15:19:03 -0500 Subject: [PATCH 1241/1339] build: update all dependencies (#2321) --- packages/gapic-generator/requirements.in | 3 +- packages/gapic-generator/requirements.txt | 635 +++++++++++----------- 2 files changed, 313 insertions(+), 325 deletions(-) diff --git a/packages/gapic-generator/requirements.in b/packages/gapic-generator/requirements.in index d6a1c8d6f1f6..76406bc236a1 100644 --- a/packages/gapic-generator/requirements.in +++ b/packages/gapic-generator/requirements.in @@ -1,6 +1,7 @@ click google-api-core -googleapis-common-protos +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2322): Remove the constraint once a formal version of 1.67.0 is published +googleapis-common-protos>=1.67.0rc1 jinja2 MarkupSafe protobuf diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index da487d725d94..3b1a2fd4d7fd 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,83 +8,88 @@ aiohappyeyeballs==2.4.4 \ --hash=sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745 \ --hash=sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8 # via aiohttp -aiohttp==3.11.11 \ - --hash=sha256:0882c2820fd0132240edbb4a51eb8ceb6eef8181db9ad5291ab3332e0d71df5f \ - --hash=sha256:0a6d3fbf2232e3a08c41eca81ae4f1dff3d8f1a30bae415ebe0af2d2458b8a33 \ - --hash=sha256:0b7fb429ab1aafa1f48578eb315ca45bd46e9c37de11fe45c7f5f4138091e2f1 \ - --hash=sha256:0eb98d90b6690827dcc84c246811feeb4e1eea683c0eac6caed7549be9c84665 \ - --hash=sha256:0fd82b8e9c383af11d2b26f27a478640b6b83d669440c0a71481f7c865a51da9 \ - --hash=sha256:10b4ff0ad793d98605958089fabfa350e8e62bd5d40aa65cdc69d6785859f94e \ - --hash=sha256:1642eceeaa5ab6c9b6dfeaaa626ae314d808188ab23ae196a34c9d97efb68350 \ - --hash=sha256:1dac54e8ce2ed83b1f6b1a54005c87dfed139cf3f777fdc8afc76e7841101226 \ - --hash=sha256:1e69966ea6ef0c14ee53ef7a3d68b564cc408121ea56c0caa2dc918c1b2f553d \ - --hash=sha256:1f21bb8d0235fc10c09ce1d11ffbd40fc50d3f08a89e4cf3a0c503dc2562247a \ - --hash=sha256:2170816e34e10f2fd120f603e951630f8a112e1be3b60963a1f159f5699059a6 \ - --hash=sha256:21fef42317cf02e05d3b09c028712e1d73a9606f02467fd803f7c1f39cc59add \ - --hash=sha256:249cc6912405917344192b9f9ea5cd5b139d49e0d2f5c7f70bdfaf6b4dbf3a2e \ - --hash=sha256:3499c7ffbfd9c6a3d8d6a2b01c26639da7e43d47c7b4f788016226b1e711caa8 \ - --hash=sha256:3af41686ccec6a0f2bdc66686dc0f403c41ac2089f80e2214a0f82d001052c03 \ - --hash=sha256:3e23419d832d969f659c208557de4a123e30a10d26e1e14b73431d3c13444c2e \ - --hash=sha256:3ea1b59dc06396b0b424740a10a0a63974c725b1c64736ff788a3689d36c02d2 \ - --hash=sha256:44167fc6a763d534a6908bdb2592269b4bf30a03239bcb1654781adf5e49caf1 \ - --hash=sha256:479b8c6ebd12aedfe64563b85920525d05d394b85f166b7873c8bde6da612f9c \ - --hash=sha256:4af57160800b7a815f3fe0eba9b46bf28aafc195555f1824555fa2cfab6c1538 \ - --hash=sha256:4b4fa1cb5f270fb3eab079536b764ad740bb749ce69a94d4ec30ceee1b5940d5 \ - --hash=sha256:4eed954b161e6b9b65f6be446ed448ed3921763cc432053ceb606f89d793927e \ - --hash=sha256:541d823548ab69d13d23730a06f97460f4238ad2e5ed966aaf850d7c369782d9 \ - --hash=sha256:568c1236b2fde93b7720f95a890741854c1200fba4a3471ff48b2934d2d93fd3 \ - --hash=sha256:5854be2f3e5a729800bac57a8d76af464e160f19676ab6aea74bde18ad19d438 \ - --hash=sha256:620598717fce1b3bd14dd09947ea53e1ad510317c85dda2c9c65b622edc96b12 \ - --hash=sha256:6526e5fb4e14f4bbf30411216780c9967c20c5a55f2f51d3abd6de68320cc2f3 \ - --hash=sha256:6fba278063559acc730abf49845d0e9a9e1ba74f85f0ee6efd5803f08b285853 \ - --hash=sha256:70d1f9dde0e5dd9e292a6d4d00058737052b01f3532f69c0c65818dac26dc287 \ - --hash=sha256:731468f555656767cda219ab42e033355fe48c85fbe3ba83a349631541715ba2 \ - --hash=sha256:81b8fe282183e4a3c7a1b72f5ade1094ed1c6345a8f153506d114af5bf8accd9 \ - --hash=sha256:84a585799c58b795573c7fa9b84c455adf3e1d72f19a2bf498b54a95ae0d194c \ - --hash=sha256:85992ee30a31835fc482468637b3e5bd085fa8fe9392ba0bdcbdc1ef5e9e3c55 \ - --hash=sha256:8811f3f098a78ffa16e0ea36dffd577eb031aea797cbdba81be039a4169e242c \ - --hash=sha256:88a12ad8ccf325a8a5ed80e6d7c3bdc247d66175afedbe104ee2aaca72960d8e \ - --hash=sha256:8be8508d110d93061197fd2d6a74f7401f73b6d12f8822bbcd6d74f2b55d71b1 \ - --hash=sha256:8e2bf8029dbf0810c7bfbc3e594b51c4cc9101fbffb583a3923aea184724203c \ - --hash=sha256:929f3ed33743a49ab127c58c3e0a827de0664bfcda566108989a14068f820194 \ - --hash=sha256:92cde43018a2e17d48bb09c79e4d4cb0e236de5063ce897a5e40ac7cb4878773 \ - --hash=sha256:92fc484e34b733704ad77210c7957679c5c3877bd1e6b6d74b185e9320cc716e \ - --hash=sha256:943a8b052e54dfd6439fd7989f67fc6a7f2138d0a2cf0a7de5f18aa4fe7eb3b1 \ - --hash=sha256:9d73ee3725b7a737ad86c2eac5c57a4a97793d9f442599bea5ec67ac9f4bdc3d \ - --hash=sha256:9f5b3c1ed63c8fa937a920b6c1bec78b74ee09593b3f5b979ab2ae5ef60d7600 \ - --hash=sha256:9fd46ce0845cfe28f108888b3ab17abff84ff695e01e73657eec3f96d72eef34 \ - --hash=sha256:a344d5dc18074e3872777b62f5f7d584ae4344cd6006c17ba12103759d407af3 \ - --hash=sha256:a60804bff28662cbcf340a4d61598891f12eea3a66af48ecfdc975ceec21e3c8 \ - --hash=sha256:a8f5f7515f3552d899c61202d99dcb17d6e3b0de777900405611cd747cecd1b8 \ - --hash=sha256:a9b7371665d4f00deb8f32208c7c5e652059b0fda41cf6dbcac6114a041f1cc2 \ - --hash=sha256:aa54f8ef31d23c506910c21163f22b124facb573bff73930735cf9fe38bf7dff \ - --hash=sha256:aba807f9569455cba566882c8938f1a549f205ee43c27b126e5450dc9f83cc62 \ - --hash=sha256:ae545f31489548c87b0cced5755cfe5a5308d00407000e72c4fa30b19c3220ac \ - --hash=sha256:af01e42ad87ae24932138f154105e88da13ce7d202a6de93fafdafb2883a00ef \ - --hash=sha256:b540bd67cfb54e6f0865ceccd9979687210d7ed1a1cc8c01f8e67e2f1e883d28 \ - --hash=sha256:b6212a60e5c482ef90f2d788835387070a88d52cf6241d3916733c9176d39eab \ - --hash=sha256:b63de12e44935d5aca7ed7ed98a255a11e5cb47f83a9fded7a5e41c40277d104 \ - --hash=sha256:ba74ec819177af1ef7f59063c6d35a214a8fde6f987f7661f4f0eecc468a8f76 \ - --hash=sha256:bb49c7f1e6ebf3821a42d81d494f538107610c3a705987f53068546b0e90303e \ - --hash=sha256:bd176afcf8f5d2aed50c3647d4925d0db0579d96f75a31e77cbaf67d8a87742d \ - --hash=sha256:bd7227b87a355ce1f4bf83bfae4399b1f5bb42e0259cb9405824bd03d2f4336a \ - --hash=sha256:bf8d9bfee991d8acc72d060d53860f356e07a50f0e0d09a8dfedea1c554dd0d5 \ - --hash=sha256:bfde76a8f430cf5c5584553adf9926534352251d379dcb266ad2b93c54a29745 \ - --hash=sha256:c341c7d868750e31961d6d8e60ff040fb9d3d3a46d77fd85e1ab8e76c3e9a5c4 \ - --hash=sha256:c7a06301c2fb096bdb0bd25fe2011531c1453b9f2c163c8031600ec73af1cc99 \ - --hash=sha256:cb23d8bb86282b342481cad4370ea0853a39e4a32a0042bb52ca6bdde132df43 \ - --hash=sha256:d119fafe7b634dbfa25a8c597718e69a930e4847f0b88e172744be24515140da \ - --hash=sha256:d40f9da8cabbf295d3a9dae1295c69975b86d941bc20f0a087f0477fa0a66231 \ - --hash=sha256:d6c9af134da4bc9b3bd3e6a70072509f295d10ee60c697826225b60b9959acdd \ - --hash=sha256:dd7659baae9ccf94ae5fe8bfaa2c7bc2e94d24611528395ce88d009107e00c6d \ - --hash=sha256:de8d38f1c2810fa2a4f1d995a2e9c70bb8737b18da04ac2afbf3971f65781d87 \ - --hash=sha256:e595c591a48bbc295ebf47cb91aebf9bd32f3ff76749ecf282ea7f9f6bb73886 \ - --hash=sha256:ec2aa89305006fba9ffb98970db6c8221541be7bee4c1d027421d6f6df7d1ce2 \ - --hash=sha256:ec82bf1fda6cecce7f7b915f9196601a1bd1a3079796b76d16ae4cce6d0ef89b \ - --hash=sha256:ed9ee95614a71e87f1a70bc81603f6c6760128b140bc4030abe6abaa988f1c3d \ - --hash=sha256:f047569d655f81cb70ea5be942ee5d4421b6219c3f05d131f64088c73bb0917f \ - --hash=sha256:ffa336210cf9cd8ed117011085817d00abe4c08f99968deef0013ea283547204 \ - --hash=sha256:ffb3dc385f6bb1568aa974fe65da84723210e5d9707e360e9ecb51f59406cd2e +aiohttp==3.11.12 \ + --hash=sha256:0450ada317a65383b7cce9576096150fdb97396dcfe559109b403c7242faffef \ + --hash=sha256:0b5263dcede17b6b0c41ef0c3ccce847d82a7da98709e75cf7efde3e9e3b5cae \ + --hash=sha256:0d5176f310a7fe6f65608213cc74f4228e4f4ce9fd10bcb2bb6da8fc66991462 \ + --hash=sha256:0ed49efcd0dc1611378beadbd97beb5d9ca8fe48579fc04a6ed0844072261b6a \ + --hash=sha256:145a73850926018ec1681e734cedcf2716d6a8697d90da11284043b745c286d5 \ + --hash=sha256:1987770fb4887560363b0e1a9b75aa303e447433c41284d3af2840a2f226d6e0 \ + --hash=sha256:246067ba0cf5560cf42e775069c5d80a8989d14a7ded21af529a4e10e3e0f0e6 \ + --hash=sha256:2c311e2f63e42c1bf86361d11e2c4a59f25d9e7aabdbdf53dc38b885c5435cdb \ + --hash=sha256:2cee3b117a8d13ab98b38d5b6bdcd040cfb4181068d05ce0c474ec9db5f3c5bb \ + --hash=sha256:2de1378f72def7dfb5dbd73d86c19eda0ea7b0a6873910cc37d57e80f10d64e1 \ + --hash=sha256:30f546358dfa0953db92ba620101fefc81574f87b2346556b90b5f3ef16e55ce \ + --hash=sha256:34245498eeb9ae54c687a07ad7f160053911b5745e186afe2d0c0f2898a1ab8a \ + --hash=sha256:392432a2dde22b86f70dd4a0e9671a349446c93965f261dbaecfaf28813e5c42 \ + --hash=sha256:3c0600bcc1adfaaac321422d615939ef300df81e165f6522ad096b73439c0f58 \ + --hash=sha256:4016e383f91f2814e48ed61e6bda7d24c4d7f2402c75dd28f7e1027ae44ea204 \ + --hash=sha256:40cd36749a1035c34ba8d8aaf221b91ca3d111532e5ccb5fa8c3703ab1b967ed \ + --hash=sha256:413ad794dccb19453e2b97c2375f2ca3cdf34dc50d18cc2693bd5aed7d16f4b9 \ + --hash=sha256:4a93d28ed4b4b39e6f46fd240896c29b686b75e39cc6992692e3922ff6982b4c \ + --hash=sha256:4ee84c2a22a809c4f868153b178fe59e71423e1f3d6a8cd416134bb231fbf6d3 \ + --hash=sha256:50c5c7b8aa5443304c55c262c5693b108c35a3b61ef961f1e782dd52a2f559c7 \ + --hash=sha256:525410e0790aab036492eeea913858989c4cb070ff373ec3bc322d700bdf47c1 \ + --hash=sha256:526c900397f3bbc2db9cb360ce9c35134c908961cdd0ac25b1ae6ffcaa2507ff \ + --hash=sha256:54775858c7f2f214476773ce785a19ee81d1294a6bedc5cc17225355aab74802 \ + --hash=sha256:584096938a001378484aa4ee54e05dc79c7b9dd933e271c744a97b3b6f644957 \ + --hash=sha256:6130459189e61baac5a88c10019b21e1f0c6d00ebc770e9ce269475650ff7f73 \ + --hash=sha256:67453e603cea8e85ed566b2700efa1f6916aefbc0c9fcb2e86aaffc08ec38e78 \ + --hash=sha256:68d54234c8d76d8ef74744f9f9fc6324f1508129e23da8883771cdbb5818cbef \ + --hash=sha256:6dfe7f984f28a8ae94ff3a7953cd9678550dbd2a1f9bda5dd9c5ae627744c78e \ + --hash=sha256:74bd573dde27e58c760d9ca8615c41a57e719bff315c9adb6f2a4281a28e8798 \ + --hash=sha256:7603ca26d75b1b86160ce1bbe2787a0b706e592af5b2504e12caa88a217767b0 \ + --hash=sha256:76719dd521c20a58a6c256d058547b3a9595d1d885b830013366e27011ffe804 \ + --hash=sha256:7c3623053b85b4296cd3925eeb725e386644fd5bc67250b3bb08b0f144803e7b \ + --hash=sha256:7e44eba534381dd2687be50cbd5f2daded21575242ecfdaf86bbeecbc38dae8e \ + --hash=sha256:7fe3d65279bfbee8de0fb4f8c17fc4e893eed2dba21b2f680e930cc2b09075c5 \ + --hash=sha256:8340def6737118f5429a5df4e88f440746b791f8f1c4ce4ad8a595f42c980bd5 \ + --hash=sha256:84ede78acde96ca57f6cf8ccb8a13fbaf569f6011b9a52f870c662d4dc8cd854 \ + --hash=sha256:850ff6155371fd802a280f8d369d4e15d69434651b844bde566ce97ee2277420 \ + --hash=sha256:87a2e00bf17da098d90d4145375f1d985a81605267e7f9377ff94e55c5d769eb \ + --hash=sha256:88d385b8e7f3a870146bf5ea31786ef7463e99eb59e31db56e2315535d811f55 \ + --hash=sha256:8a2fb742ef378284a50766e985804bd6adb5adb5aa781100b09befdbfa757b65 \ + --hash=sha256:8dc0fba9a74b471c45ca1a3cb6e6913ebfae416678d90529d188886278e7f3f6 \ + --hash=sha256:8fa1510b96c08aaad49303ab11f8803787c99222288f310a62f493faf883ede1 \ + --hash=sha256:8fd12d0f989c6099e7b0f30dc6e0d1e05499f3337461f0b2b0dadea6c64b89df \ + --hash=sha256:9060addfa4ff753b09392efe41e6af06ea5dd257829199747b9f15bfad819460 \ + --hash=sha256:930ffa1925393381e1e0a9b82137fa7b34c92a019b521cf9f41263976666a0d6 \ + --hash=sha256:936d8a4f0f7081327014742cd51d320296b56aa6d324461a13724ab05f4b2933 \ + --hash=sha256:97fe431f2ed646a3b56142fc81d238abcbaff08548d6912acb0b19a0cadc146b \ + --hash=sha256:9bd8695be2c80b665ae3f05cb584093a1e59c35ecb7d794d1edd96e8cc9201d7 \ + --hash=sha256:9dec0000d2d8621d8015c293e24589d46fa218637d820894cb7356c77eca3259 \ + --hash=sha256:a478aa11b328983c4444dacb947d4513cb371cd323f3845e53caeda6be5589d5 \ + --hash=sha256:a481a574af914b6e84624412666cbfbe531a05667ca197804ecc19c97b8ab1b0 \ + --hash=sha256:a4ac6a0f0f6402854adca4e3259a623f5c82ec3f0c049374133bcb243132baf9 \ + --hash=sha256:a5e69046f83c0d3cb8f0d5bd9b8838271b1bc898e01562a04398e160953e8eb9 \ + --hash=sha256:a7442662afebbf7b4c6d28cb7aab9e9ce3a5df055fc4116cc7228192ad6cb484 \ + --hash=sha256:aa8a8caca81c0a3e765f19c6953416c58e2f4cc1b84829af01dd1c771bb2f91f \ + --hash=sha256:ab3247d58b393bda5b1c8f31c9edece7162fc13265334217785518dd770792b8 \ + --hash=sha256:b10a47e5390c4b30a0d58ee12581003be52eedd506862ab7f97da7a66805befb \ + --hash=sha256:b34508f1cd928ce915ed09682d11307ba4b37d0708d1f28e5774c07a7674cac9 \ + --hash=sha256:b8d3bb96c147b39c02d3db086899679f31958c5d81c494ef0fc9ef5bb1359b3d \ + --hash=sha256:b9d45dbb3aaec05cf01525ee1a7ac72de46a8c425cb75c003acd29f76b1ffe94 \ + --hash=sha256:bf4480a5438f80e0f1539e15a7eb8b5f97a26fe087e9828e2c0ec2be119a9f72 \ + --hash=sha256:c160a04283c8c6f55b5bf6d4cad59bb9c5b9c9cd08903841b25f1f7109ef1259 \ + --hash=sha256:c96a43822f1f9f69cc5c3706af33239489a6294be486a0447fb71380070d4d5f \ + --hash=sha256:c9fd9dcf9c91affe71654ef77426f5cf8489305e1c66ed4816f5a21874b094b9 \ + --hash=sha256:cddb31f8474695cd61fc9455c644fc1606c164b93bff2490390d90464b4655df \ + --hash=sha256:ce1bb21fc7d753b5f8a5d5a4bae99566386b15e716ebdb410154c16c91494d7f \ + --hash=sha256:d1c031a7572f62f66f1257db37ddab4cb98bfaf9b9434a3b4840bf3560f5e788 \ + --hash=sha256:d589264dbba3b16e8951b6f145d1e6b883094075283dafcab4cdd564a9e353a0 \ + --hash=sha256:dc065a4285307607df3f3686363e7f8bdd0d8ab35f12226362a847731516e42c \ + --hash=sha256:e10c440d142fa8b32cfdb194caf60ceeceb3e49807072e0dc3a8887ea80e8c16 \ + --hash=sha256:e3552fe98e90fdf5918c04769f338a87fa4f00f3b28830ea9b78b1bdc6140e0d \ + --hash=sha256:e392804a38353900c3fd8b7cacbea5132888f7129f8e241915e90b85f00e3250 \ + --hash=sha256:e4cecdb52aaa9994fbed6b81d4568427b6002f0a91c322697a4bfcc2b2363f5a \ + --hash=sha256:e5148ca8955affdfeb864aca158ecae11030e952b25b3ae15d4e2b5ba299bad2 \ + --hash=sha256:e6b2732ef3bafc759f653a98881b5b9cdef0716d98f013d376ee8dfd7285abf1 \ + --hash=sha256:ea756b5a7bac046d202a9a3889b9a92219f885481d78cd318db85b15cc0b7bcf \ + --hash=sha256:edb69b9589324bdc40961cdf0657815df674f1743a8d5ad9ab56a99e4833cfdd \ + --hash=sha256:f0203433121484b32646a5f5ea93ae86f3d9559d7243f07e8c0eab5ff8e3f70e \ + --hash=sha256:f6a19bcab7fbd8f8649d6595624856635159a6527861b9cdc3447af288a00c00 \ + --hash=sha256:f752e80606b132140883bb262a457c475d219d7163d996dc9072434ffb0784c4 \ + --hash=sha256:f7914ab70d2ee8ab91c13e5402122edbc77821c66d2758abb53aabe87f013287 # via -r requirements.in aiosignal==1.3.2 \ --hash=sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5 \ @@ -94,124 +99,111 @@ async-timeout==5.0.1 \ --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ --hash=sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3 # via aiohttp -attrs==24.3.0 \ - --hash=sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff \ - --hash=sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308 +attrs==25.1.0 \ + --hash=sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e \ + --hash=sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a # via aiohttp -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a +cachetools==5.5.1 \ + --hash=sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95 \ + --hash=sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db +certifi==2025.1.31 \ + --hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \ + --hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe # via requests -charset-normalizer==3.4.0 \ - --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \ - --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \ - --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \ - --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \ - --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \ - --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \ - --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \ - --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \ - --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \ - --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \ - --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \ - --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \ - --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \ - --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \ - --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \ - --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \ - --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \ - --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \ - --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \ - --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \ - --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \ - --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \ - --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \ - --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \ - --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \ - --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \ - --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \ - --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \ - --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \ - --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \ - --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \ - --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \ - --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \ - --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \ - --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \ - --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \ - --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \ - --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \ - --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \ - --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \ - --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \ - --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \ - --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \ - --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \ - --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \ - --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \ - --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \ - --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \ - --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \ - --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \ - --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \ - --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \ - --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \ - --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \ - --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \ - --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \ - --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \ - --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \ - --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \ - --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \ - --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \ - --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \ - --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \ - --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \ - --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \ - --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \ - --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \ - --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \ - --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \ - --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \ - --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \ - --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \ - --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \ - --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \ - --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \ - --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \ - --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \ - --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \ - --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \ - --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \ - --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \ - --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \ - --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \ - --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \ - --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \ - --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \ - --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \ - --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \ - --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \ - --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \ - --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \ - --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \ - --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \ - --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \ - --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \ - --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \ - --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \ - --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \ - --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \ - --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \ - --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \ - --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \ - --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \ - --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \ - --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482 +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 # via requests click==8.1.8 \ --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ @@ -317,81 +309,81 @@ frozenlist==1.5.0 \ # via # aiohttp # aiosignal -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf +google-api-core==2.24.1 \ + --hash=sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1 \ + --hash=sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a # via -r requirements.in -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 +google-auth==2.38.0 \ + --hash=sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4 \ + --hash=sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a # via google-api-core -googleapis-common-protos[grpc]==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed +googleapis-common-protos[grpc]==1.67.0rc1 \ + --hash=sha256:3230f01d80b0e7dc857f981747df92ed69ca04d6de7c93f55ed5151ba65641b8 \ + --hash=sha256:c17971fd2b38287b03ea5973d978ec7ae72917568f125ee2b88821f20063bb0b # via # -r requirements.in # google-api-core # grpc-google-iam-v1 -grpc-google-iam-v1==0.13.1 \ - --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \ - --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e +grpc-google-iam-v1==0.14.0 \ + --hash=sha256:c66e07aa642e39bb37950f9e7f491f70dad150ac9801263b42b2814307c2df99 \ + --hash=sha256:fb4a084b30099ba3ab07d61d620a0d4429570b13ff53bd37bac75235f98b7da4 # via -r requirements.in -grpcio==1.68.1 \ - --hash=sha256:025f790c056815b3bf53da850dd70ebb849fd755a4b1ac822cb65cd631e37d43 \ - --hash=sha256:04cfd68bf4f38f5bb959ee2361a7546916bd9a50f78617a346b3aeb2b42e2161 \ - --hash=sha256:0feb02205a27caca128627bd1df4ee7212db051019a9afa76f4bb6a1a80ca95e \ - --hash=sha256:1098f03dedc3b9810810568060dea4ac0822b4062f537b0f53aa015269be0a76 \ - --hash=sha256:12941d533f3cd45d46f202e3667be8ebf6bcb3573629c7ec12c3e211d99cfccf \ - --hash=sha256:255b1635b0ed81e9f91da4fcc8d43b7ea5520090b9a9ad9340d147066d1d3613 \ - --hash=sha256:298ee7f80e26f9483f0b6f94cc0a046caf54400a11b644713bb5b3d8eb387600 \ - --hash=sha256:2c4cec6177bf325eb6faa6bd834d2ff6aa8bb3b29012cceb4937b86f8b74323c \ - --hash=sha256:2cc1fd04af8399971bcd4f43bd98c22d01029ea2e56e69c34daf2bf8470e47f5 \ - --hash=sha256:334ab917792904245a028f10e803fcd5b6f36a7b2173a820c0b5b076555825e1 \ - --hash=sha256:3522c77d7e6606d6665ec8d50e867f13f946a4e00c7df46768f1c85089eae515 \ - --hash=sha256:37ea3be171f3cf3e7b7e412a98b77685eba9d4fd67421f4a34686a63a65d99f9 \ - --hash=sha256:390eee4225a661c5cd133c09f5da1ee3c84498dc265fd292a6912b65c421c78c \ - --hash=sha256:3aed6544e4d523cd6b3119b0916cef3d15ef2da51e088211e4d1eb91a6c7f4f1 \ - --hash=sha256:3ceb56c4285754e33bb3c2fa777d055e96e6932351a3082ce3559be47f8024f0 \ - --hash=sha256:44a8502dd5de653ae6a73e2de50a401d84184f0331d0ac3daeb044e66d5c5054 \ - --hash=sha256:4b177f5547f1b995826ef529d2eef89cca2f830dd8b2c99ffd5fde4da734ba73 \ - --hash=sha256:4efac5481c696d5cb124ff1c119a78bddbfdd13fc499e3bc0ca81e95fc573684 \ - --hash=sha256:52fbf85aa71263380d330f4fce9f013c0798242e31ede05fcee7fbe40ccfc20d \ - --hash=sha256:55857c71641064f01ff0541a1776bfe04a59db5558e82897d35a7793e525774c \ - --hash=sha256:66a24f3d45c33550703f0abb8b656515b0ab777970fa275693a2f6dc8e35f1c1 \ - --hash=sha256:6ab2d912ca39c51f46baf2a0d92aa265aa96b2443266fc50d234fa88bf877d8e \ - --hash=sha256:77d65165fc35cff6e954e7fd4229e05ec76102d4406d4576528d3a3635fc6172 \ - --hash=sha256:7dfc914cc31c906297b30463dde0b9be48e36939575eaf2a0a22a8096e69afe5 \ - --hash=sha256:7f20ebec257af55694d8f993e162ddf0d36bd82d4e57f74b31c67b3c6d63d8b2 \ - --hash=sha256:80af6f1e69c5e68a2be529990684abdd31ed6622e988bf18850075c81bb1ad6e \ - --hash=sha256:83bbf5807dc3ee94ce1de2dfe8a356e1d74101e4b9d7aa8c720cc4818a34aded \ - --hash=sha256:8720c25cd9ac25dd04ee02b69256d0ce35bf8a0f29e20577427355272230965a \ - --hash=sha256:8829924fffb25386995a31998ccbbeaa7367223e647e0122043dfc485a87c666 \ - --hash=sha256:8a3869a6661ec8f81d93f4597da50336718bde9eb13267a699ac7e0a1d6d0bea \ - --hash=sha256:8cb620037a2fd9eeee97b4531880e439ebfcd6d7d78f2e7dcc3726428ab5ef63 \ - --hash=sha256:919d7f18f63bcad3a0f81146188e90274fde800a94e35d42ffe9eadf6a9a6330 \ - --hash=sha256:95c87ce2a97434dffe7327a4071839ab8e8bffd0054cc74cbe971fba98aedd60 \ - --hash=sha256:963cc8d7d79b12c56008aabd8b457f400952dbea8997dd185f155e2f228db079 \ - --hash=sha256:96f473cdacfdd506008a5d7579c9f6a7ff245a9ade92c3c0265eb76cc591914f \ - --hash=sha256:9d1fae6bbf0816415b81db1e82fb3bf56f7857273c84dcbe68cbe046e58e1ccd \ - --hash=sha256:a0c8ddabef9c8f41617f213e527254c41e8b96ea9d387c632af878d05db9229c \ - --hash=sha256:a1b988b40f2fd9de5c820f3a701a43339d8dcf2cb2f1ca137e2c02671cc83ac1 \ - --hash=sha256:a47faedc9ea2e7a3b6569795c040aae5895a19dde0c728a48d3c5d7995fda385 \ - --hash=sha256:a8040f85dcb9830d8bbb033ae66d272614cec6faceee88d37a88a9bd1a7a704e \ - --hash=sha256:b33bd114fa5a83f03ec6b7b262ef9f5cac549d4126f1dc702078767b10c46ed9 \ - --hash=sha256:c08079b4934b0bf0a8847f42c197b1d12cba6495a3d43febd7e99ecd1cdc8d54 \ - --hash=sha256:c28848761a6520c5c6071d2904a18d339a796ebe6b800adc8b3f474c5ce3c3ad \ - --hash=sha256:cb400138e73969eb5e0535d1d06cae6a6f7a15f2cc74add320e2130b8179211a \ - --hash=sha256:cbb5780e2e740b6b4f2d208e90453591036ff80c02cc605fea1af8e6fc6b1bbe \ - --hash=sha256:ccf2ebd2de2d6661e2520dae293298a3803a98ebfc099275f113ce1f6c2a80f1 \ - --hash=sha256:d35740e3f45f60f3c37b1e6f2f4702c23867b9ce21c6410254c9c682237da68d \ - --hash=sha256:d99abcd61760ebb34bdff37e5a3ba333c5cc09feda8c1ad42547bea0416ada78 \ - --hash=sha256:ddda1aa22495d8acd9dfbafff2866438d12faec4d024ebc2e656784d96328ad0 \ - --hash=sha256:dffd29a2961f3263a16d73945b57cd44a8fd0b235740cb14056f0612329b345e \ - --hash=sha256:e4842e4872ae4ae0f5497bf60a0498fa778c192cc7a9e87877abd2814aca9475 \ - --hash=sha256:e8dbe3e00771bfe3d04feed8210fc6617006d06d9a2679b74605b9fed3e8362c \ - --hash=sha256:ee2e743e51cb964b4975de572aa8fb95b633f496f9fcb5e257893df3be854746 \ - --hash=sha256:eeb38ff04ab6e5756a2aef6ad8d94e89bb4a51ef96e20f45c44ba190fa0bcaad \ - --hash=sha256:f8261fa2a5f679abeb2a0a93ad056d765cdca1c47745eda3f2d87f874ff4b8c9 +grpcio==1.70.0 \ + --hash=sha256:0495c86a55a04a874c7627fd33e5beaee771917d92c0e6d9d797628ac40e7655 \ + --hash=sha256:07269ff4940f6fb6710951116a04cd70284da86d0a4368fd5a3b552744511f5a \ + --hash=sha256:0a5c78d5198a1f0aa60006cd6eb1c912b4a1520b6a3968e677dbcba215fabb40 \ + --hash=sha256:0ba0a173f4feacf90ee618fbc1a27956bfd21260cd31ced9bc707ef551ff7dc7 \ + --hash=sha256:0cd430b9215a15c10b0e7d78f51e8a39d6cf2ea819fd635a7214fae600b1da27 \ + --hash=sha256:0de706c0a5bb9d841e353f6343a9defc9fc35ec61d6eb6111802f3aa9fef29e1 \ + --hash=sha256:17325b0be0c068f35770f944124e8839ea3185d6d54862800fc28cc2ffad205a \ + --hash=sha256:2394e3381071045a706ee2eeb6e08962dd87e8999b90ac15c55f56fa5a8c9597 \ + --hash=sha256:27cc75e22c5dba1fbaf5a66c778e36ca9b8ce850bf58a9db887754593080d839 \ + --hash=sha256:2b0d02e4b25a5c1f9b6c7745d4fa06efc9fd6a611af0fb38d3ba956786b95199 \ + --hash=sha256:374d014f29f9dfdb40510b041792e0e2828a1389281eb590df066e1cc2b404e5 \ + --hash=sha256:3b0f01f6ed9994d7a0b27eeddea43ceac1b7e6f3f9d86aeec0f0064b8cf50fdb \ + --hash=sha256:4119fed8abb7ff6c32e3d2255301e59c316c22d31ab812b3fbcbaf3d0d87cc68 \ + --hash=sha256:412faabcc787bbc826f51be261ae5fa996b21263de5368a55dc2cf824dc5090e \ + --hash=sha256:4f1937f47c77392ccd555728f564a49128b6a197a05a5cd527b796d36f3387d0 \ + --hash=sha256:5413549fdf0b14046c545e19cfc4eb1e37e9e1ebba0ca390a8d4e9963cab44d2 \ + --hash=sha256:558c386ecb0148f4f99b1a65160f9d4b790ed3163e8610d11db47838d452512d \ + --hash=sha256:58ad9ba575b39edef71f4798fdb5c7b6d02ad36d47949cd381d4392a5c9cbcd3 \ + --hash=sha256:5ea67c72101d687d44d9c56068328da39c9ccba634cabb336075fae2eab0d04b \ + --hash=sha256:7385b1cb064734005204bc8994eed7dcb801ed6c2eda283f613ad8c6c75cf873 \ + --hash=sha256:7c73c42102e4a5ec76608d9b60227d917cea46dff4d11d372f64cbeb56d259d0 \ + --hash=sha256:8058667a755f97407fca257c844018b80004ae8035565ebc2812cc550110718d \ + --hash=sha256:879a61bf52ff8ccacbedf534665bb5478ec8e86ad483e76fe4f729aaef867cab \ + --hash=sha256:880bfb43b1bb8905701b926274eafce5c70a105bc6b99e25f62e98ad59cb278e \ + --hash=sha256:8d1584a68d5922330025881e63a6c1b54cc8117291d382e4fa69339b6d914c56 \ + --hash=sha256:95469d1977429f45fe7df441f586521361e235982a0b39e33841549143ae2851 \ + --hash=sha256:9e654c4b17d07eab259d392e12b149c3a134ec52b11ecdc6a515b39aceeec898 \ + --hash=sha256:a31d7e3b529c94e930a117b2175b2efd179d96eb3c7a21ccb0289a8ab05b645c \ + --hash=sha256:aa47688a65643afd8b166928a1da6247d3f46a2784d301e48ca1cc394d2ffb40 \ + --hash=sha256:aa573896aeb7d7ce10b1fa425ba263e8dddd83d71530d1322fd3a16f31257b4a \ + --hash=sha256:aba19419aef9b254e15011b230a180e26e0f6864c90406fdbc255f01d83bc83c \ + --hash=sha256:ac073fe1c4cd856ebcf49e9ed6240f4f84d7a4e6ee95baa5d66ea05d3dd0df7f \ + --hash=sha256:b3c76701428d2df01964bc6479422f20e62fcbc0a37d82ebd58050b86926ef8c \ + --hash=sha256:b745d2c41b27650095e81dea7091668c040457483c9bdb5d0d9de8f8eb25e59f \ + --hash=sha256:bb491125103c800ec209d84c9b51f1c60ea456038e4734688004f377cfacc113 \ + --hash=sha256:c1af8e15b0f0fe0eac75195992a63df17579553b0c4af9f8362cc7cc99ccddf4 \ + --hash=sha256:c78b339869f4dbf89881e0b6fbf376313e4f845a42840a7bdf42ee6caed4b11f \ + --hash=sha256:cb5277db254ab7586769e490b7b22f4ddab3876c490da0a1a9d7c695ccf0bf77 \ + --hash=sha256:cbce24409beaee911c574a3d75d12ffb8c3e3dd1b813321b1d7a96bbcac46bf4 \ + --hash=sha256:cd24d2d9d380fbbee7a5ac86afe9787813f285e684b0271599f95a51bce33528 \ + --hash=sha256:ce7df14b2dcd1102a2ec32f621cc9fab6695effef516efbc6b063ad749867295 \ + --hash=sha256:d24035d49e026353eb042bf7b058fb831db3e06d52bee75c5f2f3ab453e71aca \ + --hash=sha256:d405b005018fd516c9ac529f4b4122342f60ec1cee181788249372524e6db429 \ + --hash=sha256:d63764963412e22f0491d0d32833d71087288f4e24cbcddbae82476bfa1d81fd \ + --hash=sha256:dbe41ad140df911e796d4463168e33ef80a24f5d21ef4d1e310553fcd2c4a386 \ + --hash=sha256:dfa089a734f24ee5f6880c83d043e4f46bf812fcea5181dcb3a572db1e79e01c \ + --hash=sha256:e27585831aa6b57b9250abaf147003e126cd3a6c6ca0c531a01996f31709bed1 \ + --hash=sha256:e7831a0fc1beeeb7759f737f5acd9fdcda520e955049512d68fda03d91186eea \ + --hash=sha256:ed9718f17fbdb472e33b869c77a16d0b55e166b100ec57b016dc7de9c8d236bf \ + --hash=sha256:ef4c14508299b1406c32bdbb9fb7b47612ab979b04cf2b27686ea31882387cff \ + --hash=sha256:f19375f0300b96c0117aca118d400e76fede6db6e91f3c34b7b035822e06c35f \ + --hash=sha256:f2af68a6f5c8f78d56c145161544ad0febbd7479524a59c16b3e25053f39c87f \ + --hash=sha256:f32090238b720eb585248654db8e3afc87b48d26ac423c8dde8334a232ff53c9 \ + --hash=sha256:fe9dbd916df3b60e865258a8c72ac98f3ac9e2a9542dcb72b7a34d236242a5ce \ + --hash=sha256:ff4a8112a79464919bb21c18e956c54add43ec9a4850e3949da54f61c241a4a6 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -409,52 +401,47 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -Jinja2==3.1.5 \ +jinja2==3.1.5 \ --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb # via -r requirements.in -libcst==1.5.1 \ - --hash=sha256:00f3d2f32ee081bad3394546b0b9ac5e31686d3b5cfe4892d716d2ba65f9ec08 \ - --hash=sha256:01e01c04f0641188160d3b99c6526436e93a3fbf9783dba970f9885a77ec9b38 \ - --hash=sha256:02b38fa4d9f13e79fe69e9b5407b9e173557bcfb5960f7866cf4145af9c7ae09 \ - --hash=sha256:06a9b4c9b76da4a7399e6f1f3a325196fb5febd3ea59fac1f68e2116f3517cd8 \ - --hash=sha256:15697ea9f1edbb9a263364d966c72abda07195d1c1a6838eb79af057f1040770 \ - --hash=sha256:1947790a4fd7d96bcc200a6ecaa528045fcb26a34a24030d5859c7983662289e \ - --hash=sha256:19e39cfef4316599ca20d1c821490aeb783b52e8a8543a824972a525322a85d0 \ - --hash=sha256:1cc7393aaac733e963f0ee00466d059db74a38e15fc7e6a46dddd128c5be8d08 \ - --hash=sha256:1ff21005c33b634957a98db438e882522febf1cacc62fa716f29e163a3f5871a \ - --hash=sha256:26c804fa8091747128579013df0b5f8e6b0c7904d9c4ee83841f136f53e18684 \ - --hash=sha256:2e397f5b6c0fc271acea44579f154b0f3ab36011050f6db75ab00cef47441946 \ - --hash=sha256:3334afe9e7270e175de01198f816b0dc78dda94d9d72152b61851c323e4e741e \ - --hash=sha256:40b75bf2d70fc0bc26b1fa73e61bdc46fef59f5c71aedf16128e7c33db8d5e40 \ - --hash=sha256:40fbbaa8b839bfbfa5b300623ca2b6b0768b58bbc31b341afbc99110c9bee232 \ - --hash=sha256:56c944acaa781b8e586df3019374f5cf117054d7fc98f85be1ba84fe810005dc \ - --hash=sha256:5987daff8389b0df60b5c20499ff4fb73fc03cb3ae1f6a746eefd204ed08df85 \ - --hash=sha256:666813950b8637af0c0e96b1ca46f5d5f183d2fe50bbac2186f5b283a99f3529 \ - --hash=sha256:697eabe9f5ffc40f76d6d02e693274e0a382826d0cf8183bd44e7407dfb0ab90 \ - --hash=sha256:6d9ec764aa781ef35ab96b693569ac3dced16df9feb40ee6c274d13e86a1472e \ - --hash=sha256:71cb294db84df9e410208009c732628e920111683c2f2b2e0c5b71b98464f365 \ - --hash=sha256:72132756f985a19ef64d702a821099d4afc3544974662772b44cbc55b7279727 \ - --hash=sha256:76a8ac7a84f9b6f678a668bff85b360e0a93fa8d7f25a74a206a28110734bb2a \ - --hash=sha256:89c808bdb5fa9ca02df41dd234cbb0e9de0d2e0c029c7063d5435a9f6781cc10 \ - --hash=sha256:940ec4c8db4c2d620a7268d6c83e64ff646e4afd74ae5183d0f0ef3b80e05be0 \ - --hash=sha256:99bbffd8596d192bc0e844a4cf3c4fc696979d4e20ab1c0774a01768a59b47ed \ - --hash=sha256:aa524bd012aaae1f485fd44490ef5abf708b14d2addc0f06b28de3e4585c4b9e \ - --hash=sha256:ab83633e61ee91df575a3838b1e73c371f19d4916bf1816554933235553d41ea \ - --hash=sha256:b58a49895d95ec1fd34fad041a142d98edf9b51fcaf632337c13befeb4d51c7c \ - --hash=sha256:b5a0d3c632aa2b21c5fa145e4e8dbf86f45c9b37a64c0b7221a5a45caf58915a \ - --hash=sha256:b7b58b36022ae77a5a00002854043ae95c03e92f6062ad08473eff326f32efa0 \ - --hash=sha256:bbaf5755be50fa9b35a3d553d1e62293fbb2ee5ce2c16c7e7ffeb2746af1ab88 \ - --hash=sha256:c615af2117320e9a218083c83ec61227d3547e38a0de80329376971765f27a9e \ - --hash=sha256:c7021e3904d8d088c369afc3fe17c279883e583415ef07edacadba76cfbecd27 \ - --hash=sha256:cedd4c8336e01c51913113fbf5566b8f61a86d90f3d5cc5b1cb5049575622c5f \ - --hash=sha256:db7711a762b0327b581be5a963908fecd74412bdda34db34553faa521563c22d \ - --hash=sha256:dc06b7c60d086ef1832aebfd31b64c3c8a645adf0c5638d6243e5838f6a9356e \ - --hash=sha256:ec6ee607cfe4cc4cc93e56e0188fdb9e50399d61a1262d58229752946f288f5e \ - --hash=sha256:eeb13d7c598fe9a798a1d22eae56ab3d3d599b38b83436039bd6ae229fc854d7 \ - --hash=sha256:f053a5deb6a214972dbe9fa26ecd8255edb903de084a3d7715bf9e9da8821c50 \ - --hash=sha256:f3ffb8135c09e41e8cf710b152c33e9b7f1d0d0b9f242bae0c502eb082fdb1fb \ - --hash=sha256:fbccb016b1ac6d892344300dcccc8a16887b71bb7f875ba56c0ed6c1a7ade8be +libcst==1.6.0 \ + --hash=sha256:05c32de72553cb93ff606c7d2421ce1eab1f0740c8c4b715444e2ae42f42b1b6 \ + --hash=sha256:0c0fb2f7b74605832cc38d79e9d104f92a8aaeec7bf8f2759b20c5ba3786a321 \ + --hash=sha256:1b8370d0f7092a17b7fcda0e1539d0162cf35a0c19af94842b09c9dddc382acd \ + --hash=sha256:1bd00399d20bf93590b6f02647f8be08e2b730e050e6b7360f669254e69c98f5 \ + --hash=sha256:1bd11863889b630fe41543b4eb5e2dd445447a7f89e6b58229e83c9e52a74942 \ + --hash=sha256:2f02d0da6dfbad44e6ec4d1e5791e17afe95d9fe89bce4374bf109fd9c103a50 \ + --hash=sha256:2f3c85602e5a6d3aec0a8fc74230363f943004d7c2b2a6a1c09b320b61692241 \ + --hash=sha256:31e45f88d4a9a8e5b690ed14a564fcbace14b10f5e7b6797d6d97f4226b395da \ + --hash=sha256:38f3f25d4f5d8713cdb6a7bd41d75299de3c2416b9890a34d9b05417b8e64c1d \ + --hash=sha256:3fb953fc0155532f366ff40f6a23f191250134d6928e02074ae4eb3531fa6c30 \ + --hash=sha256:48406225378ee9208edb1e5a10451bea810262473af1a2f2473737fd16d34e3a \ + --hash=sha256:4cd011fcd79b76be216440ec296057780223674bc2566662c4bc50d3c5ecd58e \ + --hash=sha256:5786240358b122ad901bb0b7e6b7467085b2317333233d7c7d7cac46388fbd77 \ + --hash=sha256:5ac6d68364031f0b554d8920a69b33f25ec6ef351fa31b4e8f3676abb729ce36 \ + --hash=sha256:63a8893dfc344b9b08bfaf4e433b16a7e2e9361f8362fa73eaecc4d379c328ba \ + --hash=sha256:69b705f5b1faa66f115ede52a970d7613d3a8fb988834f853f7fb46870a041d2 \ + --hash=sha256:6a12a4766ce5874ccb31a1cc095cff47e2fb35755954965fe77458d9e5b361a8 \ + --hash=sha256:8bf59a21e9968dc4e7c301fac660bf54bc7d4dcadc0b1abf31b1cac34e800555 \ + --hash=sha256:8e4fcd791cab0fe8287b6edd0d78512b6475b87d906562a5d2d0999cb6d23b8d \ + --hash=sha256:91242ccbae6e7a070b33ebe03d3677c54bf678653538fbaa89597a59e4a13b2d \ + --hash=sha256:96506807dc01c9efcea8ab57d9ea18fdc87b85514cc8ee2f8568fab6df861f02 \ + --hash=sha256:984512829a80f963bfc1803342219a4264a8d4206df0a30eae9bce921357a938 \ + --hash=sha256:a9e71a046b4a91950125967f5ee67389f25a2511103e5595508f0591a5f50bc0 \ + --hash=sha256:b3d274115d134a550fe8a0b38780a28a659d4a35ac6068c7c92fffe6661b519c \ + --hash=sha256:bdc95df61838d708adb37e18af1615491f6cac59557fd11077664dd956fe4528 \ + --hash=sha256:bfcd78a5e775f155054ed50d047a260cd23f0f6a89ef2a57e10bdb9c697680b8 \ + --hash=sha256:c4486921bebd33d67bbbd605aff8bfaefd2d13dc73c20c1fde2fb245880b7fd6 \ + --hash=sha256:c527472093b5b64ffa65d33c472da38952827abbca18c786d559d6d6122bc891 \ + --hash=sha256:cd2b28688dabf0f7a166b47ab1c7d5c0b6ef8c9a05ad932618471a33fe591a4a \ + --hash=sha256:d25132f24edc24895082589645dbb8972c0eff6c9716ff71932fa72643d7c74f \ + --hash=sha256:d45513f6cd3dbb2a80cf21a53bc6e6e560414edea17c474c784100e10aebe921 \ + --hash=sha256:d65550ac686bff9395398afacbc88fe812363703a4161108e8a6db066d30b96e \ + --hash=sha256:dac722aade8796a1e78662c3ed424f0ab9f1dc0e8fdf3088610354cdd709e53f \ + --hash=sha256:df3f452e074893dfad7746a041caeb3cde75bd9fbca4ea7b223012e112d1da8c \ + --hash=sha256:e80ecdbe3fa43b3793cae8fa0b07a985bd9a693edbe6e9d076f5422ecadbf0db \ + --hash=sha256:f8c70a124d7a7d326abdc9a6261013c57d36f21c6c6370de5dd3e6a040c4ee5e # via -r requirements.in markupsafe==3.0.2 \ --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ @@ -711,24 +698,24 @@ propcache==0.2.1 \ # via # aiohttp # yarl -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 +proto-plus==1.26.0 \ + --hash=sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22 \ + --hash=sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7 # via # -r requirements.in # google-api-core -protobuf==5.29.2 \ - --hash=sha256:13d6d617a2a9e0e82a88113d7191a1baa1e42c2cc6f5f1398d3b054c8e7e714a \ - --hash=sha256:2d2e674c58a06311c8e99e74be43e7f3a8d1e2b2fdf845eaa347fbd866f23355 \ - --hash=sha256:36000f97ea1e76e8398a3f02936aac2a5d2b111aae9920ec1b769fc4a222c4d9 \ - --hash=sha256:494229ecd8c9009dd71eda5fd57528395d1eacdf307dbece6c12ad0dd09e912e \ - --hash=sha256:842de6d9241134a973aab719ab42b008a18a90f9f07f06ba480df268f86432f9 \ - --hash=sha256:a0c53d78383c851bfa97eb42e3703aefdc96d2036a41482ffd55dc5f529466eb \ - --hash=sha256:b2cc8e8bb7c9326996f0e160137b0861f1a82162502658df2951209d0cb0309e \ - --hash=sha256:b6b0d416bbbb9d4fbf9d0561dbfc4e324fd522f61f7af0fe0f282ab67b22477e \ - --hash=sha256:c12ba8249f5624300cf51c3d0bfe5be71a60c63e4dcf51ffe9a68771d958c851 \ - --hash=sha256:e621a98c0201a7c8afe89d9646859859be97cb22b8bf1d8eacfd90d5bda2eb19 \ - --hash=sha256:fde4554c0e578a5a0bcc9a276339594848d1e89f9ea47b4427c80e5d72f90181 +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 # via # -r requirements.in # google-api-core @@ -745,17 +732,17 @@ pyasn1-modules==0.4.1 \ --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c # via google-auth -pypandoc==1.14 \ - --hash=sha256:1315c7ad7fac7236dacf69a05b521ed2c3f1d0177f70e9b92bfffce6c023df22 \ - --hash=sha256:6b4c45f5f1b9fb5bb562079164806bdbbc3e837b5402bcf3f1139edc5730a197 +pypandoc==1.15 \ + --hash=sha256:4ededcc76c8770f27aaca6dff47724578428eca84212a31479403a9731fc2b16 \ + --hash=sha256:ea25beebe712ae41d63f7410c08741a3cab0e420f6703f95bc9b3a749192ce13 # via -r requirements.in pytest==8.3.4 \ --hash=sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6 \ --hash=sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761 # via pytest-asyncio -pytest-asyncio==0.25.0 \ - --hash=sha256:8c0610303c9e0442a5db8604505fc0f545456ba1528824842b37b4a626cbf609 \ - --hash=sha256:db5432d18eac6b7e28b46dcd9b69921b55c3b1086e85febfe04e70b18d9e81b3 +pytest-asyncio==0.25.3 \ + --hash=sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3 \ + --hash=sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a # via -r requirements.in pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ From e9d0d86398f92df12d2760f45f84534ed801d43d Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 12 Feb 2025 12:40:09 -0500 Subject: [PATCH 1242/1339] fix(deps): require grpc-google-iam-v1>=0.14.0 (#2327) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 2 +- packages/gapic-generator/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../tests/integration/goldens/asset/testing/constraints-3.7.txt | 2 +- .../gapic-generator/tests/integration/goldens/eventarc/setup.py | 2 +- .../integration/goldens/eventarc/testing/constraints-3.7.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index de43f16f5b95..88ce71734072 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -14,7 +14,7 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "4.0.0dev"}, ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "4.0.0dev"}, ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0dev"}, - ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.12.4", "upper_bound": "1.0.0dev"}, + ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.14.0", "upper_bound": "1.0.0dev"}, ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"}, ("google", "shopping", "type"): {"package_name": "google-shopping-type", "lower_bound": "0.1.6", "upper_bound": "1.0.0dev"} } diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 2121fb0131a7..700245f7597f 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -38,7 +38,7 @@ "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "pypandoc >= 1.4", "PyYAML >= 5.1.1", - "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", + "grpc-google-iam-v1 >= 0.14.0, < 1.0.0dev", "libcst >= 0.4.9, < 2.0.0dev", "inflection >= 0.5.1, < 1.0.0dev", ] diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index db3e65c6ea31..46140c197e4c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -48,7 +48,7 @@ "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", "google-cloud-os-config >= 1.0.0, <2.0.0dev", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index aa4420733c50..231036469dff 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -10,4 +10,4 @@ proto-plus==1.22.3 protobuf==3.20.2 google-cloud-access-context-manager==0.1.2 google-cloud-os-config==1.0.0 -grpc-google-iam-v1==0.12.4 +grpc-google-iam-v1==0.14.0 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index b54372e5e2d8..09182698cf3d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -46,7 +46,7 @@ "proto-plus >= 1.22.3, <2.0.0dev", "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index a81fb6bcd05c..fb7e93a1b473 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-api-core==1.34.1 google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 -grpc-google-iam-v1==0.12.4 +grpc-google-iam-v1==0.14.0 From ea16313549ffb2db7fca99ca190f166a5c5b762f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 12 Feb 2025 18:01:52 +0000 Subject: [PATCH 1243/1339] chore(main): release 1.22.1 (#2328) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index ac7ce83c0b8c..3a7f529247ed 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.22.1](https://github.com/googleapis/gapic-generator-python/compare/v1.22.0...v1.22.1) (2025-02-12) + + +### Bug Fixes + +* **deps:** Require grpc-google-iam-v1>=0.14.0 ([#2327](https://github.com/googleapis/gapic-generator-python/issues/2327)) ([d4236c8](https://github.com/googleapis/gapic-generator-python/commit/d4236c8a823d78350015c97c0d10e505771da564)) + ## [1.22.0](https://github.com/googleapis/gapic-generator-python/compare/v1.21.0...v1.22.0) (2025-01-30) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 700245f7597f..692c945c03b7 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.22.0" +version = "1.22.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From a3c15dfb832170edb3beb84d2a2485327ebcb766 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 12 Feb 2025 15:56:45 -0500 Subject: [PATCH 1244/1339] build: update all dependencies (#2330) --- packages/gapic-generator/requirements.in | 3 +-- packages/gapic-generator/requirements.txt | 12 ++++++------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/requirements.in b/packages/gapic-generator/requirements.in index 76406bc236a1..d6a1c8d6f1f6 100644 --- a/packages/gapic-generator/requirements.in +++ b/packages/gapic-generator/requirements.in @@ -1,7 +1,6 @@ click google-api-core -# TODO(https://github.com/googleapis/gapic-generator-python/issues/2322): Remove the constraint once a formal version of 1.67.0 is published -googleapis-common-protos>=1.67.0rc1 +googleapis-common-protos jinja2 MarkupSafe protobuf diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 3b1a2fd4d7fd..8c346db80386 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -aiohappyeyeballs==2.4.4 \ - --hash=sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745 \ - --hash=sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8 +aiohappyeyeballs==2.4.6 \ + --hash=sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1 \ + --hash=sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0 # via aiohttp aiohttp==3.11.12 \ --hash=sha256:0450ada317a65383b7cce9576096150fdb97396dcfe559109b403c7242faffef \ @@ -317,9 +317,9 @@ google-auth==2.38.0 \ --hash=sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4 \ --hash=sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a # via google-api-core -googleapis-common-protos[grpc]==1.67.0rc1 \ - --hash=sha256:3230f01d80b0e7dc857f981747df92ed69ca04d6de7c93f55ed5151ba65641b8 \ - --hash=sha256:c17971fd2b38287b03ea5973d978ec7ae72917568f125ee2b88821f20063bb0b +googleapis-common-protos[grpc]==1.67.0 \ + --hash=sha256:21398025365f138be356d5923e9168737d94d46a72aefee4a6110a1f23463c86 \ + --hash=sha256:579de760800d13616f51cf8be00c876f00a9f146d3e6510e19d1f4111758b741 # via # -r requirements.in # google-api-core From cdeed040f729648f77b13b3be10570d5db5d32e0 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 13 Feb 2025 19:17:25 -0500 Subject: [PATCH 1245/1339] feat: add ability to remove unversioned modules (#2329) --- .../gapic/generator/generator.py | 10 + .../gapic/templates/setup.py.j2 | 3 +- .../tests/integration/eventarc_v1.yaml | 10 +- .../google/cloud/eventarc/__init__.py | 101 -------- .../google/cloud/eventarc/gapic_version.py | 16 -- .../eventarc/google/cloud/eventarc/py.typed | 2 - .../integration/goldens/eventarc/setup.py | 2 +- .../tests/unit/generator/test_generator.py | 221 +++++++++++------- 8 files changed, 163 insertions(+), 202 deletions(-) delete mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 27ad4b7ff22c..5758e767bdc7 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -244,6 +244,16 @@ def _render_template( if not opts.metadata and template_name.endswith("gapic_metadata.json.j2"): return answer + # Disables generation of an unversioned Python package for this client + # library. This means that the module names will need to be versioned in + # import statements. For example `import google.cloud.library_v2` instead + # of `import google.cloud.library`. + if template_name.startswith("%namespace/%name/") and \ + api_schema.all_library_settings[ + api_schema.naming.proto_package + ].python_settings.experimental_features.unversioned_package_disabled: + return answer + # Quick check: Rendering per service and per proto would be a # combinatorial explosion and is almost certainly not what anyone # ever wants. Error colorfully on it. diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 7ebab02cbfe5..8c1f0b224c47 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -1,5 +1,6 @@ {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove the following variable (and the condition later in this file) for async rest transport once support for it is GA. #} {% set rest_async_io_enabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled %} +{% set unversioned_package_disabled = api.all_library_settings[api.naming.proto_package].python_settings.experimental_features.unversioned_package_disabled %} {% extends '_base.py.j2' %} {% from '_pypi_packages.j2' import pypi_packages %} {% block content %} @@ -15,8 +16,8 @@ package_root = os.path.abspath(os.path.dirname(__file__)) name = '{{ api.naming.warehouse_package_name }}' {% set warehouse_description = api.naming.warehouse_package_name.replace('-',' ')|title %} -{% set package_path = api.naming.module_namespace|join('/') + "/" + api.naming.module_name %} +{% set package_path = api.naming.module_namespace|join('/') + "/" + (api.naming.versioned_module_name if unversioned_package_disabled else api.naming.module_name) %} description = "{{ warehouse_description }} API client library" version = None diff --git a/packages/gapic-generator/tests/integration/eventarc_v1.yaml b/packages/gapic-generator/tests/integration/eventarc_v1.yaml index 26d8875e5ba4..0a354f0aa27f 100644 --- a/packages/gapic-generator/tests/integration/eventarc_v1.yaml +++ b/packages/gapic-generator/tests/integration/eventarc_v1.yaml @@ -115,4 +115,12 @@ authentication: - selector: 'google.longrunning.Operations.*' oauth: canonical_scopes: |- - https://www.googleapis.com/auth/cloud-platform \ No newline at end of file + https://www.googleapis.com/auth/cloud-platform + + +publishing: + library_settings: + - version: 'google.cloud.eventarc.v1' + python_settings: + experimental_features: + unversioned_package_disabled: true diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py deleted file mode 100755 index 0f2890f68071..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/__init__.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.eventarc import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.eventarc_v1.services.eventarc.client import EventarcClient -from google.cloud.eventarc_v1.services.eventarc.async_client import EventarcAsyncClient - -from google.cloud.eventarc_v1.types.channel import Channel -from google.cloud.eventarc_v1.types.channel_connection import ChannelConnection -from google.cloud.eventarc_v1.types.discovery import EventType -from google.cloud.eventarc_v1.types.discovery import FilteringAttribute -from google.cloud.eventarc_v1.types.discovery import Provider -from google.cloud.eventarc_v1.types.eventarc import CreateChannelConnectionRequest -from google.cloud.eventarc_v1.types.eventarc import CreateChannelRequest -from google.cloud.eventarc_v1.types.eventarc import CreateTriggerRequest -from google.cloud.eventarc_v1.types.eventarc import DeleteChannelConnectionRequest -from google.cloud.eventarc_v1.types.eventarc import DeleteChannelRequest -from google.cloud.eventarc_v1.types.eventarc import DeleteTriggerRequest -from google.cloud.eventarc_v1.types.eventarc import GetChannelConnectionRequest -from google.cloud.eventarc_v1.types.eventarc import GetChannelRequest -from google.cloud.eventarc_v1.types.eventarc import GetGoogleChannelConfigRequest -from google.cloud.eventarc_v1.types.eventarc import GetProviderRequest -from google.cloud.eventarc_v1.types.eventarc import GetTriggerRequest -from google.cloud.eventarc_v1.types.eventarc import ListChannelConnectionsRequest -from google.cloud.eventarc_v1.types.eventarc import ListChannelConnectionsResponse -from google.cloud.eventarc_v1.types.eventarc import ListChannelsRequest -from google.cloud.eventarc_v1.types.eventarc import ListChannelsResponse -from google.cloud.eventarc_v1.types.eventarc import ListProvidersRequest -from google.cloud.eventarc_v1.types.eventarc import ListProvidersResponse -from google.cloud.eventarc_v1.types.eventarc import ListTriggersRequest -from google.cloud.eventarc_v1.types.eventarc import ListTriggersResponse -from google.cloud.eventarc_v1.types.eventarc import OperationMetadata -from google.cloud.eventarc_v1.types.eventarc import UpdateChannelRequest -from google.cloud.eventarc_v1.types.eventarc import UpdateGoogleChannelConfigRequest -from google.cloud.eventarc_v1.types.eventarc import UpdateTriggerRequest -from google.cloud.eventarc_v1.types.google_channel_config import GoogleChannelConfig -from google.cloud.eventarc_v1.types.trigger import CloudRun -from google.cloud.eventarc_v1.types.trigger import Destination -from google.cloud.eventarc_v1.types.trigger import EventFilter -from google.cloud.eventarc_v1.types.trigger import GKE -from google.cloud.eventarc_v1.types.trigger import Pubsub -from google.cloud.eventarc_v1.types.trigger import StateCondition -from google.cloud.eventarc_v1.types.trigger import Transport -from google.cloud.eventarc_v1.types.trigger import Trigger - -__all__ = ('EventarcClient', - 'EventarcAsyncClient', - 'Channel', - 'ChannelConnection', - 'EventType', - 'FilteringAttribute', - 'Provider', - 'CreateChannelConnectionRequest', - 'CreateChannelRequest', - 'CreateTriggerRequest', - 'DeleteChannelConnectionRequest', - 'DeleteChannelRequest', - 'DeleteTriggerRequest', - 'GetChannelConnectionRequest', - 'GetChannelRequest', - 'GetGoogleChannelConfigRequest', - 'GetProviderRequest', - 'GetTriggerRequest', - 'ListChannelConnectionsRequest', - 'ListChannelConnectionsResponse', - 'ListChannelsRequest', - 'ListChannelsResponse', - 'ListProvidersRequest', - 'ListProvidersResponse', - 'ListTriggersRequest', - 'ListTriggersResponse', - 'OperationMetadata', - 'UpdateChannelRequest', - 'UpdateGoogleChannelConfigRequest', - 'UpdateTriggerRequest', - 'GoogleChannelConfig', - 'CloudRun', - 'Destination', - 'EventFilter', - 'GKE', - 'Pubsub', - 'StateCondition', - 'Transport', - 'Trigger', -) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py deleted file mode 100755 index 558c8aab67c5..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed deleted file mode 100755 index ccbcd1359343..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-eventarc package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 09182698cf3d..e89fbba5f645 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -28,7 +28,7 @@ version = None -with open(os.path.join(package_root, 'google/cloud/eventarc/gapic_version.py')) as fp: +with open(os.path.join(package_root, 'google/cloud/eventarc_v1/gapic_version.py')) as fp: version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) assert (len(version_candidates) == 1) version = version_candidates[0] diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index ec589f9df9b6..04415c9e50b7 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -21,6 +21,7 @@ import pytest from google.api import service_pb2 +from google.api import client_pb2 from google.protobuf import descriptor_pb2 from google.protobuf.compiler.plugin_pb2 import CodeGeneratorResponse @@ -47,22 +48,26 @@ def mock_generate_sample(*args, **kwargs): def test_custom_template_directory(): # Create a generator. opts = Options.build("python-gapic-templates=/templates/") - g = generator.Generator(opts) + generator_obj = generator.Generator(opts) # Assert that the Jinja loader will pull from the correct location. - assert g._env.loader.searchpath == ["/templates"] + assert generator_obj._env.loader.searchpath == ["/templates"] def test_get_response(): - g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = ["foo/bar/baz.py.j2", "molluscs/squid/sample.py.j2"] - with mock.patch.object(jinja2.Environment, "get_template") as gt: - gt.return_value = jinja2.Template("I am a template result.") - cgr = g.get_response(api_schema=make_api(), + generator_obj = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = [ + "foo/bar/baz.py.j2", "molluscs/squid/sample.py.j2" + ] + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template( + "I am a template result." + ) + cgr = generator_obj.get_response(api_schema=make_api(), opts=Options.build("")) - lt.assert_called_once() - gt.assert_has_calls( + list_templates.assert_called_once() + get_template.assert_has_calls( [ mock.call("molluscs/squid/sample.py.j2"), mock.call("foo/bar/baz.py.j2"), @@ -74,15 +79,19 @@ def test_get_response(): def test_get_response_ignores_empty_files(): - g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = ["foo/bar/baz.py.j2", "molluscs/squid/sample.py.j2"] - with mock.patch.object(jinja2.Environment, "get_template") as gt: - gt.return_value = jinja2.Template("# Meaningless comment") - cgr = g.get_response(api_schema=make_api(), + generator_obj = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = [ + "foo/bar/baz.py.j2", "molluscs/squid/sample.py.j2" + ] + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template( + "# Meaningless comment" + ) + cgr = generator_obj.get_response(api_schema=make_api(), opts=Options.build("")) - lt.assert_called_once() - gt.assert_has_calls( + list_templates.assert_called_once() + get_template.assert_has_calls( [ mock.call("molluscs/squid/sample.py.j2"), mock.call("foo/bar/baz.py.j2"), @@ -92,19 +101,21 @@ def test_get_response_ignores_empty_files(): def test_get_response_ignores_private_files(): - g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = [ + generator_obj = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = [ "foo/bar/baz.py.j2", "foo/bar/_base.py.j2", "molluscs/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, "get_template") as gt: - gt.return_value = jinja2.Template("I am a template result.") - cgr = g.get_response(api_schema=make_api(), + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template( + "I am a template result." + ) + cgr = generator_obj.get_response(api_schema=make_api(), opts=Options.build("")) - lt.assert_called_once() - gt.assert_has_calls( + list_templates.assert_called_once() + get_template.assert_has_calls( [ mock.call("molluscs/squid/sample.py.j2"), mock.call("foo/bar/baz.py.j2"), @@ -116,13 +127,13 @@ def test_get_response_ignores_private_files(): def test_get_response_fails_invalid_file_paths(): - g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = [ + generator_obj = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = [ "foo/bar/%service/%proto/baz.py.j2", ] with pytest.raises(ValueError) as ex: - g.get_response(api_schema=make_api(), + generator_obj.get_response(api_schema=make_api(), opts=Options.build("")) ex_str = str(ex.value) @@ -130,13 +141,13 @@ def test_get_response_fails_invalid_file_paths(): def test_get_response_ignore_gapic_metadata(): - g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = ["gapic/gapic_metadata.json.j2"] - with mock.patch.object(jinja2.Environment, "get_template") as gt: - gt.return_value = jinja2.Template( + generator_obj = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = ["gapic/gapic_metadata.json.j2"] + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template( "This is not something we want to see") - res = g.get_response( + res = generator_obj.get_response( api_schema=make_api(), opts=Options.build(""), ) @@ -145,10 +156,54 @@ def test_get_response_ignore_gapic_metadata(): assert res.file == CodeGeneratorResponse().file +@pytest.mark.parametrize("unversioned_package_disabled", (True, False)) +def test_get_response_ignore_unversioned_package(unversioned_package_disabled): + generator_obj = make_generator() + naming = make_naming( + namespace=("Foo",), + name="Bar", + proto_package="foo.bar.v2", + version="v2", + ) + python_settings = client_pb2.PythonSettings( + experimental_features=client_pb2.PythonSettings.ExperimentalFeatures( + unversioned_package_disabled=unversioned_package_disabled + ) + ) + service_config = service_pb2.Service( + publishing=client_pb2.Publishing( + library_settings=[ + client_pb2.ClientLibrarySettings( + version="foo.bar.v2", python_settings=python_settings + ) + ] + ) + ) + api = make_api( + naming=naming, + service_yaml_config=service_config, + ) + + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = ["%namespace/%name/test"] + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template("unversioned_contents") + res = generator_obj.get_response( + api_schema=api, + opts=Options.build(""), + ) + if unversioned_package_disabled: + assert res.file == [] + else: + assert {file_entry.content.strip() for file_entry in res.file} == { + "unversioned_contents", + } + + def test_get_response_ignores_unwanted_transports_and_clients(): - g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = [ + generator_obj = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = [ "foo/%service/transports/river.py.j2", "foo/%service/transports/car.py.j2", "foo/%service/transports/grpc.py.j2", @@ -160,8 +215,10 @@ def test_get_response_ignores_unwanted_transports_and_clients(): "mollusks/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, "get_template") as gt: - gt.return_value = jinja2.Template("Service: {{ service.name }}") + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template( + "Service: {{ service.name }}" + ) api_schema = make_api( make_proto( descriptor_pb2.FileDescriptorProto( @@ -173,7 +230,7 @@ def test_get_response_ignores_unwanted_transports_and_clients(): ) ) - cgr = g.get_response( + cgr = generator_obj.get_response( api_schema=api_schema, opts=Options.build("transport=river+car") ) @@ -187,7 +244,7 @@ def test_get_response_ignores_unwanted_transports_and_clients(): "foo/some_service/client.py", } - cgr = g.get_response( + cgr = generator_obj.get_response( api_schema=api_schema, opts=Options.build("transport=grpc") ) @@ -202,15 +259,17 @@ def test_get_response_ignores_unwanted_transports_and_clients(): def test_get_response_enumerates_services(): - g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = [ + generator_obj = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = [ "foo/%service/baz.py.j2", "molluscs/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, "get_template") as gt: - gt.return_value = jinja2.Template("Service: {{ service.name }}") - cgr = g.get_response( + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template( + "Service: {{ service.name }}" + ) + cgr = generator_obj.get_response( api_schema=make_api( make_proto( descriptor_pb2.FileDescriptorProto( @@ -234,15 +293,17 @@ def test_get_response_enumerates_services(): def test_get_response_enumerates_proto(): - g = make_generator() - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = [ + generator_obj = make_generator() + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = [ "foo/%proto.py.j2", "molluscs/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, "get_template") as gt: - gt.return_value = jinja2.Template("Proto: {{ proto.module_name }}") - cgr = g.get_response( + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template( + "Proto: {{ proto.module_name }}" + ) + cgr = generator_obj.get_response( api_schema=make_api( make_proto( descriptor_pb2.FileDescriptorProto(name="a.proto")), @@ -259,7 +320,7 @@ def test_get_response_divides_subpackages(): # NOTE: autogen-snippets is intentionally disabled for this test # The API schema below is incomplete and will result in errors when the # snippetgen logic tries to parse it. - g = make_generator("autogen-snippets=false") + generator_obj = make_generator("autogen-snippets=false") api_schema = api.API.build( [ descriptor_pb2.FileDescriptorProto( @@ -281,19 +342,19 @@ def test_get_response_divides_subpackages(): ], package="foo.v1", ) - with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as lt: - lt.return_value = [ + with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: + list_templates.return_value = [ "foo/%sub/types/%proto.py.j2", "foo/%sub/services/%service.py.j2", "molluscs/squid/sample.py.j2", ] - with mock.patch.object(jinja2.Environment, "get_template") as gt: - gt.return_value = jinja2.Template( + with mock.patch.object(jinja2.Environment, "get_template") as get_template: + get_template.return_value = jinja2.Template( """ {{- '' }}Subpackage: {{ '.'.join(api.subpackage_view) }} """.strip() ) - cgr = g.get_response(api_schema=api_schema, + cgr = generator_obj.get_response(api_schema=api_schema, opts=Options.build("autogen-snippets=false")) assert len(cgr.file) == 6 assert {i.name for i in cgr.file} == { @@ -307,10 +368,10 @@ def test_get_response_divides_subpackages(): def test_get_filename(): - g = make_generator() + generator_obj = make_generator() template_name = "%namespace/%name_%version/foo.py.j2" assert ( - g._get_filename( + generator_obj._get_filename( template_name, api_schema=make_api( naming=make_naming(namespace=(), name="Spam", version="v2"), @@ -321,10 +382,10 @@ def test_get_filename(): def test_get_filename_with_namespace(): - g = make_generator() + generator_obj = make_generator() template_name = "%namespace/%name_%version/foo.py.j2" assert ( - g._get_filename( + generator_obj._get_filename( template_name, api_schema=make_api( naming=make_naming( @@ -337,10 +398,10 @@ def test_get_filename_with_namespace(): def test_get_filename_with_service(): - g = make_generator() + generator_obj = make_generator() template_name = "%name/%service/foo.py.j2" assert ( - g._get_filename( + generator_obj._get_filename( template_name, api_schema=make_api( naming=make_naming(namespace=(), name="Spam", version="v2"), @@ -367,9 +428,9 @@ def test_get_filename_with_proto(): naming=make_naming(namespace=(), name="Spam", version="v2"), ) - g = make_generator() + generator_obj = make_generator() assert ( - g._get_filename( + generator_obj._get_filename( "%name/types/%proto.py.j2", api_schema=api, context={"proto": api.protos["bacon.proto"]}, @@ -389,9 +450,9 @@ def test_get_filename_with_proto_and_sub(): make_proto(file_pb2, naming=naming), naming=naming, subpackage_view=("baz",), ) - g = make_generator() + generator_obj = make_generator() assert ( - g._get_filename( + generator_obj._get_filename( "%name/types/%sub/%proto.py.j2", api_schema=api, context={"proto": api.protos["bacon.proto"]}, @@ -451,9 +512,9 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): ), ) - g = generator.Generator(Options.build("samples=samples.yaml",)) + generator_obj = generator.Generator(Options.build("samples=samples.yaml",)) # Need to have the sample template visible to the generator. - g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) + generator_obj._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = DummyApiSchema( services={"Mollusc": DummyService( @@ -469,7 +530,7 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): ) with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): - actual_response = g.get_response( + actual_response = generator_obj.get_response( api_schema, opts=Options.build("autogen-snippets=False")) expected_snippet_index_json = { @@ -548,14 +609,14 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): ) def test_generate_autogen_samples(mock_generate_sample, mock_generate_specs): opts = Options.build("autogen-snippets") - g = generator.Generator(opts) + generator_obj = generator.Generator(opts) # Need to have the sample template visible to the generator. - g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) + generator_obj._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = make_api(naming=naming.NewNaming( name="Mollusc", version="v6")) - actual_response = g.get_response(api_schema, opts=opts) + generator_obj.get_response(api_schema, opts=opts) # Just check that generate_sample_specs was called # Correctness of the spec is tested in samplegen unit tests @@ -604,9 +665,9 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): """ ), ) - g = generator.Generator(Options.build("samples=samples.yaml")) + generator_obj = generator.Generator(Options.build("samples=samples.yaml")) # Need to have the sample template visible to the generator. - g._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) + generator_obj._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = DummyApiSchema( services={"Mollusc": DummyService( @@ -621,7 +682,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca.v1"), ) with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): - actual_response = g.get_response(api_schema, + actual_response = generator_obj.get_response(api_schema, opts=Options.build("autogen-snippets=False")) expected_snippet_metadata_json = { From 230965120c4f74887901a1ec5bafb5ca8051fbfa Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Thu, 13 Feb 2025 19:22:04 -0500 Subject: [PATCH 1246/1339] feat: Added support for internal methods in selective GAPIC generation (#2325) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/DEVELOPMENT.md | 1 + .../gapic/generator/generator.py | 1 + .../gapic/samplegen/samplegen.py | 6 + packages/gapic-generator/gapic/schema/api.py | 77 +- .../gapic-generator/gapic/schema/wrappers.py | 112 +- .../%sub/services/%service/_shared_macros.j2 | 4 +- .../services/%service/transports/base.py.j2 | 2 +- .../services/%service/transports/rest.py.j2 | 2 +- .../%service/transports/rest_asyncio.py.j2 | 2 +- .../gapic-generator/gapic/utils/__init__.py | 2 + packages/gapic-generator/gapic/utils/code.py | 17 + .../tests/integration/BUILD.bazel | 34 +- .../goldens/logging_internal/.coveragerc | 13 + .../goldens/logging_internal/.flake8 | 33 + .../goldens/logging_internal/BUILD.bazel | 12 + .../goldens/logging_internal/MANIFEST.in | 2 + .../goldens/logging_internal/README.rst | 143 + .../logging_internal/docs/_static/custom.css | 3 + .../goldens/logging_internal/docs/conf.py | 376 + .../goldens/logging_internal/docs/index.rst | 7 + .../docs/logging_v2/config_service_v2.rst | 10 + .../docs/logging_v2/logging_service_v2.rst | 10 + .../docs/logging_v2/metrics_service_v2.rst | 10 + .../docs/logging_v2/services_.rst | 8 + .../docs/logging_v2/types_.rst | 6 + .../google/cloud/logging/__init__.py | 187 + .../google/cloud/logging/gapic_version.py | 16 + .../google/cloud/logging/py.typed | 2 + .../google/cloud/logging_v2/__init__.py | 188 + .../cloud/logging_v2/gapic_metadata.json | 481 + .../google/cloud/logging_v2/gapic_version.py | 16 + .../google/cloud/logging_v2/py.typed | 2 + .../cloud/logging_v2/services/__init__.py | 15 + .../services/config_service_v2/__init__.py | 22 + .../config_service_v2/async_client.py | 4046 +++++ .../services/config_service_v2/client.py | 4450 +++++ .../services/config_service_v2/pagers.py | 722 + .../config_service_v2/transports/README.rst | 9 + .../config_service_v2/transports/__init__.py | 33 + .../config_service_v2/transports/base.py | 718 + .../config_service_v2/transports/grpc.py | 1328 ++ .../transports/grpc_asyncio.py | 1595 ++ .../services/logging_service_v2/__init__.py | 22 + .../logging_service_v2/async_client.py | 1175 ++ .../services/logging_service_v2/client.py | 1538 ++ .../services/logging_service_v2/pagers.py | 446 + .../logging_service_v2/transports/README.rst | 9 + .../logging_service_v2/transports/__init__.py | 33 + .../logging_service_v2/transports/base.py | 338 + .../logging_service_v2/transports/grpc.py | 552 + .../transports/grpc_asyncio.py | 678 + .../services/metrics_service_v2/__init__.py | 22 + .../metrics_service_v2/async_client.py | 1025 ++ .../services/metrics_service_v2/client.py | 1388 ++ .../services/metrics_service_v2/pagers.py | 166 + .../metrics_service_v2/transports/README.rst | 9 + .../metrics_service_v2/transports/__init__.py | 33 + .../metrics_service_v2/transports/base.py | 302 + .../metrics_service_v2/transports/grpc.py | 507 + .../transports/grpc_asyncio.py | 606 + .../google/cloud/logging_v2/types/__init__.py | 178 + .../cloud/logging_v2/types/log_entry.py | 435 + .../google/cloud/logging_v2/types/logging.py | 600 + .../cloud/logging_v2/types/logging_config.py | 2417 +++ .../cloud/logging_v2/types/logging_metrics.py | 411 + .../goldens/logging_internal/mypy.ini | 3 + .../goldens/logging_internal/noxfile.py | 280 + ...ice_v2__copy_log_entries_async_internal.py | 57 + ...vice_v2__copy_log_entries_sync_internal.py | 57 + ...ice_v2__create_exclusion_async_internal.py | 57 + ...vice_v2__create_exclusion_sync_internal.py | 57 + ..._service_v2__create_link_async_internal.py | 57 + ...g_service_v2__create_link_sync_internal.py | 57 + ..._service_v2__create_sink_async_internal.py | 57 + ...g_service_v2__create_sink_sync_internal.py | 57 + ..._service_v2__create_view_async_internal.py | 53 + ...g_service_v2__create_view_sync_internal.py | 53 + ...ice_v2__delete_exclusion_async_internal.py | 50 + ...vice_v2__delete_exclusion_sync_internal.py | 50 + ..._service_v2__delete_link_async_internal.py | 56 + ...g_service_v2__delete_link_sync_internal.py | 56 + ..._service_v2__delete_sink_async_internal.py | 50 + ...g_service_v2__delete_sink_sync_internal.py | 50 + ..._service_v2__delete_view_async_internal.py | 50 + ...g_service_v2__delete_view_sync_internal.py | 50 + ...ce_v2__get_cmek_settings_async_internal.py | 52 + ...ice_v2__get_cmek_settings_sync_internal.py | 52 + ...ervice_v2__get_exclusion_async_internal.py | 52 + ...service_v2__get_exclusion_sync_internal.py | 52 + ...fig_service_v2__get_link_async_internal.py | 52 + ...nfig_service_v2__get_link_sync_internal.py | 52 + ...service_v2__get_settings_async_internal.py | 52 + ..._service_v2__get_settings_sync_internal.py | 52 + ...fig_service_v2__get_sink_async_internal.py | 52 + ...nfig_service_v2__get_sink_sync_internal.py | 52 + ...fig_service_v2__get_view_async_internal.py | 52 + ...nfig_service_v2__get_view_sync_internal.py | 52 + ...vice_v2__list_exclusions_async_internal.py | 53 + ...rvice_v2__list_exclusions_sync_internal.py | 53 + ...g_service_v2__list_links_async_internal.py | 53 + ...ig_service_v2__list_links_sync_internal.py | 53 + ...g_service_v2__list_sinks_async_internal.py | 53 + ...ig_service_v2__list_sinks_sync_internal.py | 53 + ...g_service_v2__list_views_async_internal.py | 53 + ...ig_service_v2__list_views_sync_internal.py | 53 + ...v2__update_cmek_settings_async_internal.py | 52 + ..._v2__update_cmek_settings_sync_internal.py | 52 + ...ice_v2__update_exclusion_async_internal.py | 57 + ...vice_v2__update_exclusion_sync_internal.py | 57 + ...vice_v2__update_settings_async_internal.py | 52 + ...rvice_v2__update_settings_sync_internal.py | 52 + ..._service_v2__update_sink_async_internal.py | 57 + ...g_service_v2__update_sink_sync_internal.py | 57 + ..._service_v2__update_view_async_internal.py | 52 + ...g_service_v2__update_view_sync_internal.py | 52 + ...d_config_service_v2_create_bucket_async.py | 53 + ...ig_service_v2_create_bucket_async_async.py | 57 + ...fig_service_v2_create_bucket_async_sync.py | 57 + ...ed_config_service_v2_create_bucket_sync.py | 53 + ...d_config_service_v2_delete_bucket_async.py | 50 + ...ed_config_service_v2_delete_bucket_sync.py | 50 + ...ated_config_service_v2_get_bucket_async.py | 52 + ...rated_config_service_v2_get_bucket_sync.py | 52 + ...ed_config_service_v2_list_buckets_async.py | 53 + ...ted_config_service_v2_list_buckets_sync.py | 53 + ...config_service_v2_undelete_bucket_async.py | 50 + ..._config_service_v2_undelete_bucket_sync.py | 50 + ...d_config_service_v2_update_bucket_async.py | 52 + ...ig_service_v2_update_bucket_async_async.py | 56 + ...fig_service_v2_update_bucket_async_sync.py | 56 + ...ed_config_service_v2_update_bucket_sync.py | 52 + ...ted_logging_service_v2_delete_log_async.py | 50 + ...ated_logging_service_v2_delete_log_sync.py | 50 + ...gging_service_v2_list_log_entries_async.py | 53 + ...ogging_service_v2_list_log_entries_sync.py | 53 + ...ated_logging_service_v2_list_logs_async.py | 53 + ...rated_logging_service_v2_list_logs_sync.py | 53 + ...st_monitored_resource_descriptors_async.py | 52 + ...ist_monitored_resource_descriptors_sync.py | 52 + ...gging_service_v2_tail_log_entries_async.py | 63 + ...ogging_service_v2_tail_log_entries_sync.py | 63 + ...ging_service_v2_write_log_entries_async.py | 55 + ...gging_service_v2_write_log_entries_sync.py | 55 + ...ce_v2__create_log_metric_async_internal.py | 57 + ...ice_v2__create_log_metric_sync_internal.py | 57 + ...ce_v2__delete_log_metric_async_internal.py | 50 + ...ice_v2__delete_log_metric_sync_internal.py | 50 + ...rvice_v2__get_log_metric_async_internal.py | 52 + ...ervice_v2__get_log_metric_sync_internal.py | 52 + ...ice_v2__list_log_metrics_async_internal.py | 53 + ...vice_v2__list_log_metrics_sync_internal.py | 53 + ...ce_v2__update_log_metric_async_internal.py | 57 + ...ice_v2__update_log_metric_sync_internal.py | 57 + .../snippet_metadata_google.logging.v2.json | 6896 ++++++++ .../scripts/fixup_logging_v2_keywords.py | 218 + .../goldens/logging_internal/setup.py | 98 + .../testing/constraints-3.10.txt | 6 + .../testing/constraints-3.11.txt | 6 + .../testing/constraints-3.12.txt | 6 + .../testing/constraints-3.13.txt | 6 + .../testing/constraints-3.7.txt | 10 + .../testing/constraints-3.8.txt | 6 + .../testing/constraints-3.9.txt | 6 + .../logging_internal/tests/__init__.py | 16 + .../logging_internal/tests/unit/__init__.py | 16 + .../tests/unit/gapic/__init__.py | 16 + .../tests/unit/gapic/logging_v2/__init__.py | 16 + .../logging_v2/test_config_service_v2.py | 13713 ++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 4107 +++++ .../logging_v2/test_metrics_service_v2.py | 3911 +++++ .../integration/logging_internal_v2.yaml | 245 + .../tests/unit/common_types.py | 1 + .../tests/unit/samplegen/test_samplegen.py | 114 + .../tests/unit/schema/test_api.py | 73 +- .../tests/unit/utils/test_code.py | 8 + 175 files changed, 61955 insertions(+), 33 deletions(-) create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/.coveragerc create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/.flake8 create mode 100644 packages/gapic-generator/tests/integration/goldens/logging_internal/BUILD.bazel create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/MANIFEST.in create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_static/custom.css create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/index.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/config_service_v2.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/logging_service_v2.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/metrics_service_v2.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/services_.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/types_.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/gapic_version.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/py.typed create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_metadata.json create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_version.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/py.typed create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/mypy.ini create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.10.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.11.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.12.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.8.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.9.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py create mode 100644 packages/gapic-generator/tests/integration/logging_internal_v2.yaml diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index fe8ac7699fa7..f6480bd689ac 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -59,6 +59,7 @@ Execute unit tests by running one of the sessions prefixed with `unit-`. bazel run //tests/integration:credentials_update bazel run //tests/integration:eventarc_update bazel run //tests/integration:logging_update + bazel run //tests/integration:logging_internal_update bazel run //tests/integration:redis_update bazel run //tests/integration:redis_selective_update ``` diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 5758e767bdc7..3d490dcb3901 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -59,6 +59,7 @@ def __init__(self, opts: Options) -> None: # Add filters which templates require. self._env.filters["rst"] = utils.rst + self._env.filters["make_private"] = utils.make_private self._env.filters["snake_case"] = utils.to_snake_case self._env.filters["camel_case"] = utils.to_camel_case self._env.filters["sort_lines"] = utils.sort_lines diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index b6b5635f344c..d06968d5d627 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -1036,6 +1036,12 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A # Region Tag Format: # [{START|END} ${apishortname}_${apiVersion}_generated_${serviceName}_${rpcName}_{sync|async|rest}] region_tag = f"{api_short_name}_{api_version}_generated_{service_name}_{rpc_name}_{sync_or_async}" + + # We assume that the only methods that start with an underscore are internal methods. + is_internal = rpc_name.startswith("_") + if is_internal: + region_tag += "_internal" + spec = { "rpc": rpc_name, "transport": transport, diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index e95243e08641..70b402b50af5 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -326,6 +326,26 @@ def prune_messages_for_selective_generation(self, *, all_enums=all_enums ) + def with_internal_methods(self, *, public_methods: Set[str]) -> 'Proto': + """Returns a version of this Proto with some Methods marked as internal. + + The methods not in the public_methods set will be marked as internal and + services containing these methods will also be marked as internal by extension. + (See :meth:`Service.is_internal` for more details). + + Args: + public_methods (Set[str]): An allowlist of fully-qualified method names. + Methods not in this allowlist will be marked as internal. + Returns: + Proto: A version of this Proto with Method objects corresponding to methods + not in `public_methods` marked as internal. + """ + services = { + k: v.with_internal_methods(public_methods=public_methods) + for k, v in self.services.items() + } + return dataclasses.replace(self, services=services) + @dataclasses.dataclass(frozen=True) class API: @@ -462,34 +482,43 @@ def disambiguate_keyword_sanitize_fname( service_yaml_config=service_yaml_config) if package in api.all_library_settings: - selective_gapic_methods = set( - api.all_library_settings[package].python_settings.common.selective_gapic_generation.methods - ) - if selective_gapic_methods: - - all_resource_messages = collections.ChainMap( - *(proto.resource_messages for proto in protos.values()) - ) - - # Prepare a list of addresses to include in selective generation, - # then prune each Proto object. We look at metadata.Addresses, not objects, because - # objects that refer to the same thing in the proto are different Python objects - # in memory. - address_allowlist: Set['metadata.Address'] = set([]) - for proto in api.protos.values(): - proto.add_to_address_allowlist(address_allowlist=address_allowlist, - method_allowlist=selective_gapic_methods, - resource_messages=all_resource_messages) + selective_gapic_settings = api.all_library_settings[package].python_settings.\ + common.selective_gapic_generation + selective_gapic_methods = set(selective_gapic_settings.methods) + if selective_gapic_methods: # The list of explicitly allow-listed protos to generate, plus all # the proto dependencies regardless of the allow-list. - new_all_protos = {} + # + # Both selective GAPIC generation settings (omitting + internal) only alter + # protos that are not dependencies, so we iterate over api.all_protos and copy + # all dependencies as is here. + new_all_protos = { + k: v for k, v in api.all_protos.items() + if k not in api.protos + } + + if selective_gapic_settings.generate_omitted_as_internal: + for name, proto in api.protos.items(): + new_all_protos[name] = proto.with_internal_methods( + public_methods=selective_gapic_methods) + else: + all_resource_messages = collections.ChainMap( + *(proto.resource_messages for proto in protos.values()) + ) - # We only prune services/messages/enums from protos that are not dependencies. - for name, proto in api.all_protos.items(): - if name not in api.protos: - new_all_protos[name] = proto - else: + # Prepare a list of addresses to include in selective generation, + # then prune each Proto object. We look at metadata.Addresses, not objects, because + # objects that refer to the same thing in the proto are different Python objects + # in memory. + address_allowlist: Set['metadata.Address'] = set([]) + for proto in api.protos.values(): + proto.add_to_address_allowlist(address_allowlist=address_allowlist, + method_allowlist=selective_gapic_methods, + resource_messages=all_resource_messages) + + # We only prune services/messages/enums from protos that are not dependencies. + for name, proto in api.protos.items(): proto_to_generate = proto.prune_messages_for_selective_generation( address_allowlist=address_allowlist) if proto_to_generate: diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 5618fd677359..ff2d6738c3c2 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -53,6 +53,7 @@ from gapic import utils from gapic.schema import metadata from gapic.utils import uri_sample +from gapic.utils import make_private @dataclasses.dataclass(frozen=True) @@ -900,6 +901,14 @@ def add_to_address_allowlist(self, *, This method is used to create an allowlist of addresses to be used to filter out unneeded services, methods, messages, and enums at a later step. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to add to. Only the addresses of the allowlisted methods, the services + containing these methods, and messages/enums those methods use will be part of the + final address_allowlist. The set may be modified during this call. + Returns: + None """ address_allowlist.add(self.ident) @@ -1014,6 +1023,18 @@ def add_to_address_allowlist(self, *, This method is used to create an allowlist of addresses to be used to filter out unneeded services, methods, messages, and enums at a later step. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to add to. Only the addresses of the allowlisted methods, the services + containing these methods, and messages/enums those methods use will be part of the + final address_allowlist. The set may be modified during this call. + resource_messages (Dict[str, wrappers.MessageType]): A dictionary mapping the unified + resource type name of a resource message to the corresponding MessageType object + representing that resource message. Only resources with a message representation + should be included in the dictionary. + Returns: + None """ self.request_type.add_to_address_allowlist( @@ -1061,6 +1082,18 @@ def add_to_address_allowlist(self, *, This method is used to create an allowlist of addresses to be used to filter out unneeded services, methods, messages, and enums at a later step. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to add to. Only the addresses of the allowlisted methods, the services + containing these methods, and messages/enums those methods use will be part of the + final address_allowlist. The set may be modified during this call. + resource_messages (Dict[str, wrappers.MessageType]): A dictionary mapping the unified + resource type name of a resource message to the corresponding MessageType object + representing that resource message. Only resources with a message representation + should be included in the dictionary. + Returns: + None """ self.response_type.add_to_address_allowlist( address_allowlist=address_allowlist, @@ -1362,6 +1395,7 @@ class Method: method_pb: descriptor_pb2.MethodDescriptorProto input: MessageType output: MessageType + is_internal: bool = False lro: Optional[OperationInfo] = dataclasses.field(default=None) extended_lro: Optional[ExtendedOperationInfo] = dataclasses.field( default=None) @@ -1403,6 +1437,10 @@ def transport_safe_name(self) -> str: def is_operation_polling_method(self): return self.output.is_extended_operation and self.options.Extensions[ex_ops_pb2.operation_polling_method] + @utils.cached_property + def name(self): + return make_private(self.method_pb.name) if self.is_internal else self.method_pb.name + @utils.cached_property def client_output(self): return self._client_output(enable_asyncio=False) @@ -1814,6 +1852,22 @@ def add_to_address_allowlist(self, *, This method is used to create an allowlist of addresses to be used to filter out unneeded services, methods, messages, and enums at a later step. + + Args: + address_allowlist (Set[metadata.Address]): A set of allowlisted metadata.Address + objects to add to. Only the addresses of the allowlisted methods, the services + containing these methods, and messages/enums those methods use will be part of the + final address_allowlist. The set may be modified during this call. + method_allowlist (Set[str]): An allowlist of fully-qualified method names. + resource_messages (Dict[str, wrappers.MessageType]): A dictionary mapping the unified + resource type name of a resource message to the corresponding MessageType object + representing that resource message. Only resources with a message representation + should be included in the dictionary. + services_in_proto (Dict[str, wrappers.Service]): A dictionary mapping the names of Service + objects in the proto containing this method to the Service objects. This is necessary + for traversing the operation service in the case of extended LROs. + Returns: + None """ address_allowlist.add(self.ident) @@ -1851,6 +1905,27 @@ def add_to_address_allowlist(self, *, resource_messages=resource_messages, ) + def with_internal_methods(self, *, public_methods: Set[str]) -> 'Method': + """Returns a version of this ``Method`` marked as internal + + The methods not in the public_methods set will be marked as internal and + this ``Service`` will as well by extension (see :meth:`Service.is_internal`). + + Args: + public_methods (Set[str]): An allowlist of fully-qualified method names. + Methods not in this allowlist will be marked as internal. + Returns: + Service: A version of this `Service` with `Method` objects corresponding to methods + not in `public_methods` marked as internal. + """ + if self.ident.proto in public_methods: + return self + + return dataclasses.replace( + self, + is_internal=True, + ) + @dataclasses.dataclass(frozen=True) class CommonResource: @@ -1928,7 +2003,7 @@ def __getattr__(self, name): @property def client_name(self) -> str: """Returns the name of the generated client class""" - return self.name + "Client" + return ("Base" if self.is_internal else "") + self.name + "Client" @property def client_package_version(self) -> str: @@ -1937,7 +2012,7 @@ def client_package_version(self) -> str: @property def async_client_name(self) -> str: """Returns the name of the generated AsyncIO client class""" - return self.name + "AsyncClient" + return ("Base" if self.is_internal else "") + self.name + "AsyncClient" @property def transport_name(self): @@ -2143,6 +2218,10 @@ def operation_polling_method(self) -> Optional[Method]: None ) + @utils.cached_property + def is_internal(self) -> bool: + return any(m.is_internal for m in self.methods.values()) + def with_context(self, *, collisions: Set[str], visited_messages: Optional[Set["MessageType"]] = None, @@ -2224,3 +2303,32 @@ def prune_messages_for_selective_generation(self, *, for k, v in self.methods.items() if v.ident in address_allowlist } ) + + def with_internal_methods(self, *, + public_methods: Set[str]) -> 'Service': + """Returns a version of this ``Service`` with some Methods marked as internal. + + The methods not in the public_methods set will be marked as internal and + this ``Service`` will as well by extension (see :meth:`Service.is_internal`). + + Args: + public_methods (Set[str]): An allowlist of fully-qualified method names. + Methods not in this allowlist will be marked as internal. + Returns: + Service: A version of this `Service` with `Method` objects corresponding to methods + not in `public_methods` marked as internal. + """ + + # Internal methods need to be keyed with underscore prefixed method names + # (e.g. google.Service.Method -> google.Service._Method) in order for + # samplegen to work properly. + return dataclasses.replace( + self, + methods={ + meth.name: meth + for meth in ( + meth.with_internal_methods(public_methods=public_methods) + for meth in self.methods.values() + ) + } + ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index e96594c01875..cca0717282d1 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -452,9 +452,9 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: @property def {{ name|snake_case }}(self): - return self._{{ name }}(self._session, self._host, self._interceptor) # type: ignore + return self.{{ name|make_private }}(self._session, self._host, self._interceptor) # type: ignore -class _{{ name }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_method_name_prefix }}{{service.name}}RestStub): +class {{ name|make_private }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_method_name_prefix }}{{service.name}}RestStub): def __hash__(self): return hash("{{ async_method_name_prefix }}{{ service.name }}RestTransport.{{ name }}") diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index e75d03a761da..0daf45041a13 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -366,7 +366,7 @@ class {{ service.name }}Transport(abc.ABC): {% for operations_service in api.get_extended_operations_services(service)|sort(attribute="name") %} @property - def _{{ operations_service.client_name|snake_case }}(self) -> {{ operations_service.name|snake_case }}.{{ operations_service.client_name }}: + def {{ operations_service.client_name|snake_case|make_private }}(self) -> {{ operations_service.name|snake_case }}.{{ operations_service.client_name }}: ex_op_service = self._extended_operations_services.get("{{ operations_service.name|snake_case }}") if not ex_op_service: ex_op_service = {{ operations_service.name|snake_case }}.{{ operations_service.client_name }}( diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 63adba1920dd..d419a2d3c3cd 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -198,7 +198,7 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {% endif %}{# service.has_lro #} {% for method in service.methods.values()|sort(attribute="name") %} - class _{{method.name}}(_Base{{ service.name }}RestTransport._Base{{method.name}}, {{service.name}}RestStub): + class {{ method.name|make_private }}(_Base{{ service.name }}RestTransport._Base{{method.name}}, {{service.name}}RestStub): def __hash__(self): return hash("{{service.name}}RestTransport.{{method.name}}") {% if method.http_options and not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 79d54e4f2a6e..8664b7077dc2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -159,7 +159,7 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {{ shared_macros.wrap_async_method_macro()|indent(4) }} {% for method in service.methods.values()|sort(attribute="name") %} - class _{{method.name}}(_Base{{ service.name }}RestTransport._Base{{method.name}}, Async{{service.name}}RestStub): + class {{ method.name|make_private }}(_Base{{ service.name }}RestTransport._Base{{method.name}}, Async{{service.name}}RestStub): def __hash__(self): return hash("Async{{service.name}}RestTransport.{{method.name}}") diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 047cc4f300f1..6f2d02d4ac49 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -20,6 +20,7 @@ from gapic.utils.code import empty from gapic.utils.code import nth from gapic.utils.code import partition +from gapic.utils.code import make_private from gapic.utils.doc import doc from gapic.utils.filename import to_valid_filename from gapic.utils.filename import to_valid_module_name @@ -38,6 +39,7 @@ 'empty', 'is_msg_field_pb', 'is_str_field_pb', + 'make_private', 'nth', 'Options', 'partition', diff --git a/packages/gapic-generator/gapic/utils/code.py b/packages/gapic-generator/gapic/utils/code.py index 836b5676b09b..1a80dee92b50 100644 --- a/packages/gapic-generator/gapic/utils/code.py +++ b/packages/gapic-generator/gapic/utils/code.py @@ -63,3 +63,20 @@ def nth(iterable: Iterable[T], n: int, default: Optional[T] = None) -> Optional[ fewer than n elements. """ return next(itertools.islice(iterable, n, None), default) + + +def make_private(object_name: str) -> str: + """Labels the object name like a private object (i.e. prefixes it with underscore) + + If the object name already starts with underscore, returns the original name instead. + This is to avoid adding name manging to an object. + + This is provided to templates as the ``make_private`` filter. + + Args: + object_name (str): The name of the object to be made private. + + Returns: + str: The final name of the privated object. + """ + return object_name if object_name.startswith('_') else f'_{object_name}' diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 3b970ba5a554..be1f5cd15e99 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -29,6 +29,7 @@ INTEGRATION_TEST_LIBRARIES = [ "logging", # Java package remapping in gapic.yaml. "redis", # Has a gapic.yaml. "redis_selective", # Selective generation. + "logging_internal", # Selective generation with internal client classes. ] [integration_test( @@ -169,7 +170,7 @@ py_test( ], ) -# Redis - selective GAPIC phase 1 +# Redis - selective GAPIC generation py_gapic_library( name = "redis_selective_py_gapic", srcs = ["@com_google_googleapis//google/cloud/redis/v1:redis_proto"], @@ -193,6 +194,37 @@ py_test( ], ) +# Logging - selective GAPIC internal classes generation +py_gapic_library( + name = "logging_internal_py_gapic", + srcs = ["@com_google_googleapis//google/logging/v2:logging_proto"], + grpc_service_config = "logging_grpc_service_config.json", + opt_args = [ + "python-gapic-namespace=google.cloud", + "python-gapic-name=logging", + "autogen-snippets", + ], + # REST is not generated because of the following issues: + # - REST unit test `test_update_settings_rest_flattened` in logging_v2 fails. See #1728 + # - REST is not generated in the public `BUILD.bazel` + # https://github.com/googleapis/googleapis/blob/e85662e798a0a9495a035839f66d0c037c481e2c/google/logging/v2/BUILD.bazel#L201 + service_yaml = "logging_internal_v2.yaml", + transport = "grpc", +) + +py_test( + name = "logging_internal_py_gapic_test", + srcs = [ + "logging_internal_py_gapic_pytest.py", + "logging_internal_py_gapic_test.py", + ], + legacy_create_init = False, + deps = [ + ":logging_internal_py_gapic", + ], +) + + test_suite( name = "googleapis_test_suite", tests = [ diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/.coveragerc b/packages/gapic-generator/tests/integration/goldens/logging_internal/.coveragerc new file mode 100755 index 000000000000..5ae4d709b8ba --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/.coveragerc @@ -0,0 +1,13 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/logging/__init__.py + google/cloud/logging/gapic_version.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/.flake8 b/packages/gapic-generator/tests/integration/goldens/logging_internal/.flake8 new file mode 100755 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/BUILD.bazel b/packages/gapic-generator/tests/integration/goldens/logging_internal/BUILD.bazel new file mode 100644 index 000000000000..2822013159c1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/BUILD.bazel @@ -0,0 +1,12 @@ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "goldens_files", + srcs = glob( + ["**/*"], + exclude = [ + "BUILD.bazel", + ".*.sw*", + ], + ), +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/logging_internal/MANIFEST.in new file mode 100755 index 000000000000..f8c276f2cce8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/logging *.py +recursive-include google/cloud/logging_v2 *.py diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/README.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/README.rst new file mode 100755 index 000000000000..c3f6248ae3a6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/README.rst @@ -0,0 +1,143 @@ +Python Client for Google Cloud Logging API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Logging API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library + + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + + +Examples +^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + + +Examples +^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_static/custom.css new file mode 100755 index 000000000000..06423be0b592 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_static/custom.css @@ -0,0 +1,3 @@ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py new file mode 100755 index 000000000000..e1d58773a736 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-logging documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-logging" +copyright = u"2023, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-logging-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-logging.tex", + u"google-cloud-logging Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-logging", + u"Google Cloud Logging Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-logging", + u"google-cloud-logging Documentation", + author, + "google-cloud-logging", + "GAPIC library for Google Cloud Logging API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/index.rst new file mode 100755 index 000000000000..51acc96292ab --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + logging_v2/services_ + logging_v2/types_ diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/config_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/config_service_v2.rst new file mode 100755 index 000000000000..f7c0a7701de1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/config_service_v2.rst @@ -0,0 +1,10 @@ +ConfigServiceV2 +--------------------------------- + +.. automodule:: google.cloud.logging_v2.services.config_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.config_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/logging_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/logging_service_v2.rst new file mode 100755 index 000000000000..f41c0c89b78c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/logging_service_v2.rst @@ -0,0 +1,10 @@ +LoggingServiceV2 +---------------------------------- + +.. automodule:: google.cloud.logging_v2.services.logging_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.logging_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/metrics_service_v2.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/metrics_service_v2.rst new file mode 100755 index 000000000000..fd4d9bc7d9ba --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/metrics_service_v2.rst @@ -0,0 +1,10 @@ +MetricsServiceV2 +---------------------------------- + +.. automodule:: google.cloud.logging_v2.services.metrics_service_v2 + :members: + :inherited-members: + +.. automodule:: google.cloud.logging_v2.services.metrics_service_v2.pagers + :members: + :inherited-members: diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/services_.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/services_.rst new file mode 100755 index 000000000000..d7a0471b13c3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/services_.rst @@ -0,0 +1,8 @@ +Services for Google Cloud Logging v2 API +======================================== +.. toctree:: + :maxdepth: 2 + + config_service_v2 + logging_service_v2 + metrics_service_v2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/types_.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/types_.rst new file mode 100755 index 000000000000..a3ac7064b329 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/logging_v2/types_.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Logging v2 API +===================================== + +.. automodule:: google.cloud.logging_v2.types + :members: + :show-inheritance: diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py new file mode 100755 index 000000000000..c747bb3607b6 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.logging import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.logging_v2.services.config_service_v2.client import BaseConfigServiceV2Client +from google.cloud.logging_v2.services.config_service_v2.async_client import BaseConfigServiceV2AsyncClient +from google.cloud.logging_v2.services.logging_service_v2.client import LoggingServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2.async_client import LoggingServiceV2AsyncClient +from google.cloud.logging_v2.services.metrics_service_v2.client import BaseMetricsServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2.async_client import BaseMetricsServiceV2AsyncClient + +from google.cloud.logging_v2.types.log_entry import LogEntry +from google.cloud.logging_v2.types.log_entry import LogEntryOperation +from google.cloud.logging_v2.types.log_entry import LogEntrySourceLocation +from google.cloud.logging_v2.types.log_entry import LogSplit +from google.cloud.logging_v2.types.logging import DeleteLogRequest +from google.cloud.logging_v2.types.logging import ListLogEntriesRequest +from google.cloud.logging_v2.types.logging import ListLogEntriesResponse +from google.cloud.logging_v2.types.logging import ListLogsRequest +from google.cloud.logging_v2.types.logging import ListLogsResponse +from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsRequest +from google.cloud.logging_v2.types.logging import ListMonitoredResourceDescriptorsResponse +from google.cloud.logging_v2.types.logging import TailLogEntriesRequest +from google.cloud.logging_v2.types.logging import TailLogEntriesResponse +from google.cloud.logging_v2.types.logging import WriteLogEntriesPartialErrors +from google.cloud.logging_v2.types.logging import WriteLogEntriesRequest +from google.cloud.logging_v2.types.logging import WriteLogEntriesResponse +from google.cloud.logging_v2.types.logging_config import BigQueryDataset +from google.cloud.logging_v2.types.logging_config import BigQueryOptions +from google.cloud.logging_v2.types.logging_config import BucketMetadata +from google.cloud.logging_v2.types.logging_config import CmekSettings +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesMetadata +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesRequest +from google.cloud.logging_v2.types.logging_config import CopyLogEntriesResponse +from google.cloud.logging_v2.types.logging_config import CreateBucketRequest +from google.cloud.logging_v2.types.logging_config import CreateExclusionRequest +from google.cloud.logging_v2.types.logging_config import CreateLinkRequest +from google.cloud.logging_v2.types.logging_config import CreateSinkRequest +from google.cloud.logging_v2.types.logging_config import CreateViewRequest +from google.cloud.logging_v2.types.logging_config import DeleteBucketRequest +from google.cloud.logging_v2.types.logging_config import DeleteExclusionRequest +from google.cloud.logging_v2.types.logging_config import DeleteLinkRequest +from google.cloud.logging_v2.types.logging_config import DeleteSinkRequest +from google.cloud.logging_v2.types.logging_config import DeleteViewRequest +from google.cloud.logging_v2.types.logging_config import GetBucketRequest +from google.cloud.logging_v2.types.logging_config import GetCmekSettingsRequest +from google.cloud.logging_v2.types.logging_config import GetExclusionRequest +from google.cloud.logging_v2.types.logging_config import GetLinkRequest +from google.cloud.logging_v2.types.logging_config import GetSettingsRequest +from google.cloud.logging_v2.types.logging_config import GetSinkRequest +from google.cloud.logging_v2.types.logging_config import GetViewRequest +from google.cloud.logging_v2.types.logging_config import IndexConfig +from google.cloud.logging_v2.types.logging_config import Link +from google.cloud.logging_v2.types.logging_config import LinkMetadata +from google.cloud.logging_v2.types.logging_config import ListBucketsRequest +from google.cloud.logging_v2.types.logging_config import ListBucketsResponse +from google.cloud.logging_v2.types.logging_config import ListExclusionsRequest +from google.cloud.logging_v2.types.logging_config import ListExclusionsResponse +from google.cloud.logging_v2.types.logging_config import ListLinksRequest +from google.cloud.logging_v2.types.logging_config import ListLinksResponse +from google.cloud.logging_v2.types.logging_config import ListSinksRequest +from google.cloud.logging_v2.types.logging_config import ListSinksResponse +from google.cloud.logging_v2.types.logging_config import ListViewsRequest +from google.cloud.logging_v2.types.logging_config import ListViewsResponse +from google.cloud.logging_v2.types.logging_config import LocationMetadata +from google.cloud.logging_v2.types.logging_config import LogBucket +from google.cloud.logging_v2.types.logging_config import LogExclusion +from google.cloud.logging_v2.types.logging_config import LogSink +from google.cloud.logging_v2.types.logging_config import LogView +from google.cloud.logging_v2.types.logging_config import Settings +from google.cloud.logging_v2.types.logging_config import UndeleteBucketRequest +from google.cloud.logging_v2.types.logging_config import UpdateBucketRequest +from google.cloud.logging_v2.types.logging_config import UpdateCmekSettingsRequest +from google.cloud.logging_v2.types.logging_config import UpdateExclusionRequest +from google.cloud.logging_v2.types.logging_config import UpdateSettingsRequest +from google.cloud.logging_v2.types.logging_config import UpdateSinkRequest +from google.cloud.logging_v2.types.logging_config import UpdateViewRequest +from google.cloud.logging_v2.types.logging_config import IndexType +from google.cloud.logging_v2.types.logging_config import LifecycleState +from google.cloud.logging_v2.types.logging_config import OperationState +from google.cloud.logging_v2.types.logging_metrics import CreateLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import DeleteLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import GetLogMetricRequest +from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsRequest +from google.cloud.logging_v2.types.logging_metrics import ListLogMetricsResponse +from google.cloud.logging_v2.types.logging_metrics import LogMetric +from google.cloud.logging_v2.types.logging_metrics import UpdateLogMetricRequest + +__all__ = ('BaseConfigServiceV2Client', + 'BaseConfigServiceV2AsyncClient', + 'LoggingServiceV2Client', + 'LoggingServiceV2AsyncClient', + 'BaseMetricsServiceV2Client', + 'BaseMetricsServiceV2AsyncClient', + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'LogSplit', + 'DeleteLogRequest', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'BigQueryDataset', + 'BigQueryOptions', + 'BucketMetadata', + 'CmekSettings', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesRequest', + 'CopyLogEntriesResponse', + 'CreateBucketRequest', + 'CreateExclusionRequest', + 'CreateLinkRequest', + 'CreateSinkRequest', + 'CreateViewRequest', + 'DeleteBucketRequest', + 'DeleteExclusionRequest', + 'DeleteLinkRequest', + 'DeleteSinkRequest', + 'DeleteViewRequest', + 'GetBucketRequest', + 'GetCmekSettingsRequest', + 'GetExclusionRequest', + 'GetLinkRequest', + 'GetSettingsRequest', + 'GetSinkRequest', + 'GetViewRequest', + 'IndexConfig', + 'Link', + 'LinkMetadata', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'ListLinksRequest', + 'ListLinksResponse', + 'ListSinksRequest', + 'ListSinksResponse', + 'ListViewsRequest', + 'ListViewsResponse', + 'LocationMetadata', + 'LogBucket', + 'LogExclusion', + 'LogSink', + 'LogView', + 'Settings', + 'UndeleteBucketRequest', + 'UpdateBucketRequest', + 'UpdateCmekSettingsRequest', + 'UpdateExclusionRequest', + 'UpdateSettingsRequest', + 'UpdateSinkRequest', + 'UpdateViewRequest', + 'IndexType', + 'LifecycleState', + 'OperationState', + 'CreateLogMetricRequest', + 'DeleteLogMetricRequest', + 'GetLogMetricRequest', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'LogMetric', + 'UpdateLogMetricRequest', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/gapic_version.py new file mode 100755 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/py.typed b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/py.typed new file mode 100755 index 000000000000..6c7420d0d9cb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-logging package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py new file mode 100755 index 000000000000..f70b54f4818e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.logging_v2 import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.config_service_v2 import BaseConfigServiceV2Client +from .services.config_service_v2 import BaseConfigServiceV2AsyncClient +from .services.logging_service_v2 import LoggingServiceV2Client +from .services.logging_service_v2 import LoggingServiceV2AsyncClient +from .services.metrics_service_v2 import BaseMetricsServiceV2Client +from .services.metrics_service_v2 import BaseMetricsServiceV2AsyncClient + +from .types.log_entry import LogEntry +from .types.log_entry import LogEntryOperation +from .types.log_entry import LogEntrySourceLocation +from .types.log_entry import LogSplit +from .types.logging import DeleteLogRequest +from .types.logging import ListLogEntriesRequest +from .types.logging import ListLogEntriesResponse +from .types.logging import ListLogsRequest +from .types.logging import ListLogsResponse +from .types.logging import ListMonitoredResourceDescriptorsRequest +from .types.logging import ListMonitoredResourceDescriptorsResponse +from .types.logging import TailLogEntriesRequest +from .types.logging import TailLogEntriesResponse +from .types.logging import WriteLogEntriesPartialErrors +from .types.logging import WriteLogEntriesRequest +from .types.logging import WriteLogEntriesResponse +from .types.logging_config import BigQueryDataset +from .types.logging_config import BigQueryOptions +from .types.logging_config import BucketMetadata +from .types.logging_config import CmekSettings +from .types.logging_config import CopyLogEntriesMetadata +from .types.logging_config import CopyLogEntriesRequest +from .types.logging_config import CopyLogEntriesResponse +from .types.logging_config import CreateBucketRequest +from .types.logging_config import CreateExclusionRequest +from .types.logging_config import CreateLinkRequest +from .types.logging_config import CreateSinkRequest +from .types.logging_config import CreateViewRequest +from .types.logging_config import DeleteBucketRequest +from .types.logging_config import DeleteExclusionRequest +from .types.logging_config import DeleteLinkRequest +from .types.logging_config import DeleteSinkRequest +from .types.logging_config import DeleteViewRequest +from .types.logging_config import GetBucketRequest +from .types.logging_config import GetCmekSettingsRequest +from .types.logging_config import GetExclusionRequest +from .types.logging_config import GetLinkRequest +from .types.logging_config import GetSettingsRequest +from .types.logging_config import GetSinkRequest +from .types.logging_config import GetViewRequest +from .types.logging_config import IndexConfig +from .types.logging_config import Link +from .types.logging_config import LinkMetadata +from .types.logging_config import ListBucketsRequest +from .types.logging_config import ListBucketsResponse +from .types.logging_config import ListExclusionsRequest +from .types.logging_config import ListExclusionsResponse +from .types.logging_config import ListLinksRequest +from .types.logging_config import ListLinksResponse +from .types.logging_config import ListSinksRequest +from .types.logging_config import ListSinksResponse +from .types.logging_config import ListViewsRequest +from .types.logging_config import ListViewsResponse +from .types.logging_config import LocationMetadata +from .types.logging_config import LogBucket +from .types.logging_config import LogExclusion +from .types.logging_config import LogSink +from .types.logging_config import LogView +from .types.logging_config import Settings +from .types.logging_config import UndeleteBucketRequest +from .types.logging_config import UpdateBucketRequest +from .types.logging_config import UpdateCmekSettingsRequest +from .types.logging_config import UpdateExclusionRequest +from .types.logging_config import UpdateSettingsRequest +from .types.logging_config import UpdateSinkRequest +from .types.logging_config import UpdateViewRequest +from .types.logging_config import IndexType +from .types.logging_config import LifecycleState +from .types.logging_config import OperationState +from .types.logging_metrics import CreateLogMetricRequest +from .types.logging_metrics import DeleteLogMetricRequest +from .types.logging_metrics import GetLogMetricRequest +from .types.logging_metrics import ListLogMetricsRequest +from .types.logging_metrics import ListLogMetricsResponse +from .types.logging_metrics import LogMetric +from .types.logging_metrics import UpdateLogMetricRequest + +__all__ = ( + 'BaseConfigServiceV2AsyncClient', + 'BaseMetricsServiceV2AsyncClient', + 'LoggingServiceV2AsyncClient', +'BaseConfigServiceV2Client', +'BaseMetricsServiceV2Client', +'BigQueryDataset', +'BigQueryOptions', +'BucketMetadata', +'CmekSettings', +'CopyLogEntriesMetadata', +'CopyLogEntriesRequest', +'CopyLogEntriesResponse', +'CreateBucketRequest', +'CreateExclusionRequest', +'CreateLinkRequest', +'CreateLogMetricRequest', +'CreateSinkRequest', +'CreateViewRequest', +'DeleteBucketRequest', +'DeleteExclusionRequest', +'DeleteLinkRequest', +'DeleteLogMetricRequest', +'DeleteLogRequest', +'DeleteSinkRequest', +'DeleteViewRequest', +'GetBucketRequest', +'GetCmekSettingsRequest', +'GetExclusionRequest', +'GetLinkRequest', +'GetLogMetricRequest', +'GetSettingsRequest', +'GetSinkRequest', +'GetViewRequest', +'IndexConfig', +'IndexType', +'LifecycleState', +'Link', +'LinkMetadata', +'ListBucketsRequest', +'ListBucketsResponse', +'ListExclusionsRequest', +'ListExclusionsResponse', +'ListLinksRequest', +'ListLinksResponse', +'ListLogEntriesRequest', +'ListLogEntriesResponse', +'ListLogMetricsRequest', +'ListLogMetricsResponse', +'ListLogsRequest', +'ListLogsResponse', +'ListMonitoredResourceDescriptorsRequest', +'ListMonitoredResourceDescriptorsResponse', +'ListSinksRequest', +'ListSinksResponse', +'ListViewsRequest', +'ListViewsResponse', +'LocationMetadata', +'LogBucket', +'LogEntry', +'LogEntryOperation', +'LogEntrySourceLocation', +'LogExclusion', +'LogMetric', +'LogSink', +'LogSplit', +'LogView', +'LoggingServiceV2Client', +'OperationState', +'Settings', +'TailLogEntriesRequest', +'TailLogEntriesResponse', +'UndeleteBucketRequest', +'UpdateBucketRequest', +'UpdateCmekSettingsRequest', +'UpdateExclusionRequest', +'UpdateLogMetricRequest', +'UpdateSettingsRequest', +'UpdateSinkRequest', +'UpdateViewRequest', +'WriteLogEntriesPartialErrors', +'WriteLogEntriesRequest', +'WriteLogEntriesResponse', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_metadata.json new file mode 100755 index 000000000000..812fb89795a2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_metadata.json @@ -0,0 +1,481 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.logging_v2", + "protoPackage": "google.logging.v2", + "schema": "1.0", + "services": { + "ConfigServiceV2": { + "clients": { + "grpc": { + "libraryClient": "BaseConfigServiceV2Client", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, + "_CopyLogEntries": { + "methods": [ + "_copy_log_entries" + ] + }, + "_CreateExclusion": { + "methods": [ + "_create_exclusion" + ] + }, + "_CreateLink": { + "methods": [ + "_create_link" + ] + }, + "_CreateSink": { + "methods": [ + "_create_sink" + ] + }, + "_CreateView": { + "methods": [ + "_create_view" + ] + }, + "_DeleteExclusion": { + "methods": [ + "_delete_exclusion" + ] + }, + "_DeleteLink": { + "methods": [ + "_delete_link" + ] + }, + "_DeleteSink": { + "methods": [ + "_delete_sink" + ] + }, + "_DeleteView": { + "methods": [ + "_delete_view" + ] + }, + "_GetCmekSettings": { + "methods": [ + "_get_cmek_settings" + ] + }, + "_GetExclusion": { + "methods": [ + "_get_exclusion" + ] + }, + "_GetLink": { + "methods": [ + "_get_link" + ] + }, + "_GetSettings": { + "methods": [ + "_get_settings" + ] + }, + "_GetSink": { + "methods": [ + "_get_sink" + ] + }, + "_GetView": { + "methods": [ + "_get_view" + ] + }, + "_ListExclusions": { + "methods": [ + "_list_exclusions" + ] + }, + "_ListLinks": { + "methods": [ + "_list_links" + ] + }, + "_ListSinks": { + "methods": [ + "_list_sinks" + ] + }, + "_ListViews": { + "methods": [ + "_list_views" + ] + }, + "_UpdateCmekSettings": { + "methods": [ + "_update_cmek_settings" + ] + }, + "_UpdateExclusion": { + "methods": [ + "_update_exclusion" + ] + }, + "_UpdateSettings": { + "methods": [ + "_update_settings" + ] + }, + "_UpdateSink": { + "methods": [ + "_update_sink" + ] + }, + "_UpdateView": { + "methods": [ + "_update_view" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BaseConfigServiceV2AsyncClient", + "rpcs": { + "CreateBucket": { + "methods": [ + "create_bucket" + ] + }, + "CreateBucketAsync": { + "methods": [ + "create_bucket_async" + ] + }, + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, + "_CopyLogEntries": { + "methods": [ + "_copy_log_entries" + ] + }, + "_CreateExclusion": { + "methods": [ + "_create_exclusion" + ] + }, + "_CreateLink": { + "methods": [ + "_create_link" + ] + }, + "_CreateSink": { + "methods": [ + "_create_sink" + ] + }, + "_CreateView": { + "methods": [ + "_create_view" + ] + }, + "_DeleteExclusion": { + "methods": [ + "_delete_exclusion" + ] + }, + "_DeleteLink": { + "methods": [ + "_delete_link" + ] + }, + "_DeleteSink": { + "methods": [ + "_delete_sink" + ] + }, + "_DeleteView": { + "methods": [ + "_delete_view" + ] + }, + "_GetCmekSettings": { + "methods": [ + "_get_cmek_settings" + ] + }, + "_GetExclusion": { + "methods": [ + "_get_exclusion" + ] + }, + "_GetLink": { + "methods": [ + "_get_link" + ] + }, + "_GetSettings": { + "methods": [ + "_get_settings" + ] + }, + "_GetSink": { + "methods": [ + "_get_sink" + ] + }, + "_GetView": { + "methods": [ + "_get_view" + ] + }, + "_ListExclusions": { + "methods": [ + "_list_exclusions" + ] + }, + "_ListLinks": { + "methods": [ + "_list_links" + ] + }, + "_ListSinks": { + "methods": [ + "_list_sinks" + ] + }, + "_ListViews": { + "methods": [ + "_list_views" + ] + }, + "_UpdateCmekSettings": { + "methods": [ + "_update_cmek_settings" + ] + }, + "_UpdateExclusion": { + "methods": [ + "_update_exclusion" + ] + }, + "_UpdateSettings": { + "methods": [ + "_update_settings" + ] + }, + "_UpdateSink": { + "methods": [ + "_update_sink" + ] + }, + "_UpdateView": { + "methods": [ + "_update_view" + ] + } + } + } + } + }, + "LoggingServiceV2": { + "clients": { + "grpc": { + "libraryClient": "LoggingServiceV2Client", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + }, + "grpc-async": { + "libraryClient": "LoggingServiceV2AsyncClient", + "rpcs": { + "DeleteLog": { + "methods": [ + "delete_log" + ] + }, + "ListLogEntries": { + "methods": [ + "list_log_entries" + ] + }, + "ListLogs": { + "methods": [ + "list_logs" + ] + }, + "ListMonitoredResourceDescriptors": { + "methods": [ + "list_monitored_resource_descriptors" + ] + }, + "TailLogEntries": { + "methods": [ + "tail_log_entries" + ] + }, + "WriteLogEntries": { + "methods": [ + "write_log_entries" + ] + } + } + } + } + }, + "MetricsServiceV2": { + "clients": { + "grpc": { + "libraryClient": "BaseMetricsServiceV2Client", + "rpcs": { + "_CreateLogMetric": { + "methods": [ + "_create_log_metric" + ] + }, + "_DeleteLogMetric": { + "methods": [ + "_delete_log_metric" + ] + }, + "_GetLogMetric": { + "methods": [ + "_get_log_metric" + ] + }, + "_ListLogMetrics": { + "methods": [ + "_list_log_metrics" + ] + }, + "_UpdateLogMetric": { + "methods": [ + "_update_log_metric" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BaseMetricsServiceV2AsyncClient", + "rpcs": { + "_CreateLogMetric": { + "methods": [ + "_create_log_metric" + ] + }, + "_DeleteLogMetric": { + "methods": [ + "_delete_log_metric" + ] + }, + "_GetLogMetric": { + "methods": [ + "_get_log_metric" + ] + }, + "_ListLogMetrics": { + "methods": [ + "_list_log_metrics" + ] + }, + "_UpdateLogMetric": { + "methods": [ + "_update_log_metric" + ] + } + } + } + } + } + } +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_version.py new file mode 100755 index 000000000000..558c8aab67c5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/py.typed b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/py.typed new file mode 100755 index 000000000000..6c7420d0d9cb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-logging package uses inline types. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/__init__.py new file mode 100755 index 000000000000..8f6cf068242c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py new file mode 100755 index 000000000000..068c5d2db9ca --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import BaseConfigServiceV2Client +from .async_client import BaseConfigServiceV2AsyncClient + +__all__ = ( + 'BaseConfigServiceV2Client', + 'BaseConfigServiceV2AsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py new file mode 100755 index 000000000000..ea6f8531a563 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -0,0 +1,4046 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.logging_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport +from .client import BaseConfigServiceV2Client + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class BaseConfigServiceV2AsyncClient: + """Service for configuring sinks used to route log entries.""" + + _client: BaseConfigServiceV2Client + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BaseConfigServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = BaseConfigServiceV2Client._DEFAULT_UNIVERSE + + cmek_settings_path = staticmethod(BaseConfigServiceV2Client.cmek_settings_path) + parse_cmek_settings_path = staticmethod(BaseConfigServiceV2Client.parse_cmek_settings_path) + link_path = staticmethod(BaseConfigServiceV2Client.link_path) + parse_link_path = staticmethod(BaseConfigServiceV2Client.parse_link_path) + log_bucket_path = staticmethod(BaseConfigServiceV2Client.log_bucket_path) + parse_log_bucket_path = staticmethod(BaseConfigServiceV2Client.parse_log_bucket_path) + log_exclusion_path = staticmethod(BaseConfigServiceV2Client.log_exclusion_path) + parse_log_exclusion_path = staticmethod(BaseConfigServiceV2Client.parse_log_exclusion_path) + log_sink_path = staticmethod(BaseConfigServiceV2Client.log_sink_path) + parse_log_sink_path = staticmethod(BaseConfigServiceV2Client.parse_log_sink_path) + log_view_path = staticmethod(BaseConfigServiceV2Client.log_view_path) + parse_log_view_path = staticmethod(BaseConfigServiceV2Client.parse_log_view_path) + settings_path = staticmethod(BaseConfigServiceV2Client.settings_path) + parse_settings_path = staticmethod(BaseConfigServiceV2Client.parse_settings_path) + common_billing_account_path = staticmethod(BaseConfigServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(BaseConfigServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(BaseConfigServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(BaseConfigServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(BaseConfigServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(BaseConfigServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(BaseConfigServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(BaseConfigServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(BaseConfigServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(BaseConfigServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BaseConfigServiceV2AsyncClient: The constructed client. + """ + return BaseConfigServiceV2Client.from_service_account_info.__func__(BaseConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BaseConfigServiceV2AsyncClient: The constructed client. + """ + return BaseConfigServiceV2Client.from_service_account_file.__func__(BaseConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BaseConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> ConfigServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + ConfigServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = BaseConfigServiceV2Client.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the base config service v2 async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BaseConfigServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.BaseConfigServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + } + ) + + async def list_buckets(self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsAsyncPager: + r"""Lists log buckets. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_buckets(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]]): + The request object. The parameters to ``ListBuckets``. + parent (:class:`str`): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager: + The response from ListBuckets. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_buckets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListBucketsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_bucket(self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: + r"""Gets a log bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_get_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = await client.get_bucket(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetBucketRequest, dict]]): + The request object. The parameters to ``GetBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository in which log + entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.get_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_bucket_async(self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_create_bucket_async(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + async def update_bucket_async(self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_update_bucket_async(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + async def create_bucket(self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_create_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = await client.create_bucket(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository in which log + entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.create_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_bucket(self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: + r"""Updates a log bucket. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_update_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = await client.update_bucket(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository in which log + entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.update_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_bucket(self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_delete_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + await client.delete_bucket(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]]): + The request object. The parameters to ``DeleteBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def undelete_bucket(self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_undelete_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + await client.undelete_bucket(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]]): + The request object. The parameters to ``UndeleteBucket``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.undelete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def _list_views(self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListViewsAsyncPager: + r"""Lists views on a log bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__list_views(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_views(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListViewsRequest, dict]]): + The request object. The parameters to ``ListViews``. + parent (:class:`str`): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers._ListViewsAsyncPager: + The response from ListViews. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._list_views] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers._ListViewsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _get_view(self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: + r"""Gets a view on a log bucket.. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__get_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = await client._get_view(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetViewRequest, dict]]): + The request object. The parameters to ``GetView``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over log entries in + a bucket. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._get_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _create_view(self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__create_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = await client._create_view(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateViewRequest, dict]]): + The request object. The parameters to ``CreateView``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over log entries in + a bucket. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._create_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _update_view(self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__update_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = await client._update_view(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]]): + The request object. The parameters to ``UpdateView``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over log entries in + a bucket. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._update_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _delete_view(self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__delete_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + await client._delete_view(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]]): + The request object. The parameters to ``DeleteView``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._delete_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def _list_sinks(self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListSinksAsyncPager: + r"""Lists sinks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__list_sinks(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_sinks(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListSinksRequest, dict]]): + The request object. The parameters to ``ListSinks``. + parent (:class:`str`): + Required. The parent resource whose sinks are to be + listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers._ListSinksAsyncPager: + Result returned from ListSinks. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._list_sinks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers._ListSinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _get_sink(self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: + r"""Gets a sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__get_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = await client._get_sink(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetSinkRequest, dict]]): + The request object. The parameters to ``GetSink``. + sink_name (:class:`str`): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._get_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _create_sink(self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: + r"""Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__create_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = await client._create_sink(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]]): + The request object. The parameters to ``CreateSink``. + parent (:class:`str`): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`google.cloud.logging_v2.types.LogSink`): + Required. The new sink, whose ``name`` parameter is a + sink identifier that is not already in use. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, sink]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sink is not None: + request.sink = sink + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._create_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _update_sink(self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: + r"""Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__update_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = await client._update_sink(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]]): + The request object. The parameters to ``UpdateSink``. + sink_name (:class:`str`): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (:class:`google.cloud.logging_v2.types.LogSink`): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + For example: ``updateMask=filter`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name, sink, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._update_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _delete_sink(self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__delete_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + await client._delete_sink(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]]): + The request object. The parameters to ``DeleteSink``. + sink_name (:class:`str`): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._delete_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def _create_link(self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__create_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client._create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]]): + The request object. The parameters to CreateLink. + parent (:class:`str`): + Required. The full resource name of the bucket to create + a link for. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link (:class:`google.cloud.logging_v2.types.Link`): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (:class:`str`): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, link, link_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._create_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + async def _delete_link(self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__delete_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client._delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]]): + The request object. The parameters to DeleteLink. + name (:class:`str`): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._delete_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + async def _list_links(self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListLinksAsyncPager: + r"""Lists links. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__list_links(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListLinksRequest, dict]]): + The request object. The parameters to ListLinks. + parent (:class:`str`): + Required. The parent resource whose links are to be + listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers._ListLinksAsyncPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._list_links] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers._ListLinksAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _get_link(self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: + r"""Gets a link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__get_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client._get_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetLinkRequest, dict]]): + The request object. The parameters to GetLink. + name (:class:`str`): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._get_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _list_exclusions(self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListExclusionsAsyncPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__list_exclusions(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_exclusions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]]): + The request object. The parameters to ``ListExclusions``. + parent (:class:`str`): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers._ListExclusionsAsyncPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._list_exclusions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers._ListExclusionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _get_exclusion(self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__get_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = await client._get_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]]): + The request object. The parameters to ``GetExclusion``. + name (:class:`str`): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._get_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _create_exclusion(self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__create_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = await client._create_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]]): + The request object. The parameters to ``CreateExclusion``. + parent (:class:`str`): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._create_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _update_exclusion(self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__update_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = await client._update_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]]): + The request object. The parameters to ``UpdateExclusion``. + name (:class:`str`): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (:class:`google.cloud.logging_v2.types.LogExclusion`): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._update_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _delete_exclusion(self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__delete_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + await client._delete_exclusion(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]]): + The request object. The parameters to ``DeleteExclusion``. + name (:class:`str`): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._delete_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def _get_cmek_settings(self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logging CMEK settings for the given resource. + + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__get_cmek_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client._get_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]]): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._get_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _update_cmek_settings(self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Log Router CMEK settings for the given resource. + + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__update_cmek_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client._update_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]]): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._update_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _get_settings(self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__get_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client._get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + name (:class:`str`): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._get_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _update_settings(self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__update_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client._update_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]]): + The request object. The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + settings (:class:`google.cloud.logging_v2.types.Settings`): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._update_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _copy_log_entries(self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__copy_log_entries(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client._copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]]): + The request object. The parameters to CopyLogEntries. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._copy_log_entries] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "BaseConfigServiceV2AsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "BaseConfigServiceV2AsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py new file mode 100755 index 000000000000..4c7eedaf662f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -0,0 +1,4450 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.logging_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import ConfigServiceV2GrpcTransport +from .transports.grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport + + +class BaseConfigServiceV2ClientMeta(type): + """Metaclass for the ConfigServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] + _transport_registry["grpc"] = ConfigServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = ConfigServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[ConfigServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BaseConfigServiceV2Client(metaclass=BaseConfigServiceV2ClientMeta): + """Service for configuring sinks used to route log entries.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BaseConfigServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BaseConfigServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ConfigServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + ConfigServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def cmek_settings_path(project: str,) -> str: + """Returns a fully-qualified cmek_settings string.""" + return "projects/{project}/cmekSettings".format(project=project, ) + + @staticmethod + def parse_cmek_settings_path(path: str) -> Dict[str,str]: + """Parses a cmek_settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/cmekSettings$", path) + return m.groupdict() if m else {} + + @staticmethod + def link_path(project: str,location: str,bucket: str,link: str,) -> str: + """Returns a fully-qualified link string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + + @staticmethod + def parse_link_path(path: str) -> Dict[str,str]: + """Parses a link path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/links/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_bucket_path(project: str,location: str,bucket: str,) -> str: + """Returns a fully-qualified log_bucket string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + + @staticmethod + def parse_log_bucket_path(path: str) -> Dict[str,str]: + """Parses a log_bucket path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_exclusion_path(project: str,exclusion: str,) -> str: + """Returns a fully-qualified log_exclusion string.""" + return "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + + @staticmethod + def parse_log_exclusion_path(path: str) -> Dict[str,str]: + """Parses a log_exclusion path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/exclusions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_sink_path(project: str,sink: str,) -> str: + """Returns a fully-qualified log_sink string.""" + return "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + + @staticmethod + def parse_log_sink_path(path: str) -> Dict[str,str]: + """Parses a log_sink path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/sinks/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def log_view_path(project: str,location: str,bucket: str,view: str,) -> str: + """Returns a fully-qualified log_view string.""" + return "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + + @staticmethod + def parse_log_view_path(path: str) -> Dict[str,str]: + """Parses a log_view path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/buckets/(?P.+?)/views/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def settings_path(project: str,) -> str: + """Returns a fully-qualified settings string.""" + return "projects/{project}/settings".format(project=project, ) + + @staticmethod + def parse_settings_path(path: str) -> Dict[str,str]: + """Parses a settings path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/settings$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = BaseConfigServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BaseConfigServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, ConfigServiceV2Transport, Callable[..., ConfigServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the base config service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ConfigServiceV2Transport,Callable[..., ConfigServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ConfigServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BaseConfigServiceV2Client._read_environment_variables() + self._client_cert_source = BaseConfigServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = BaseConfigServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ConfigServiceV2Transport) + if transport_provided: + # transport is a ConfigServiceV2Transport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(ConfigServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + BaseConfigServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[ConfigServiceV2Transport], Callable[..., ConfigServiceV2Transport]] = ( + BaseConfigServiceV2Client.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ConfigServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.BaseConfigServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.ConfigServiceV2", + "credentialsType": None, + } + ) + + def list_buckets(self, + request: Optional[Union[logging_config.ListBucketsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListBucketsPager: + r"""Lists log buckets. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_list_buckets(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListBucketsRequest, dict]): + The request object. The parameters to ``ListBuckets``. + parent (str): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager: + The response from ListBuckets. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListBucketsRequest): + request = logging_config.ListBucketsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_buckets] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListBucketsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_bucket(self, + request: Optional[Union[logging_config.GetBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: + r"""Gets a log bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_get_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetBucketRequest, dict]): + The request object. The parameters to ``GetBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository in which log + entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetBucketRequest): + request = logging_config.GetBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_bucket_async(self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_create_bucket_async(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + def update_bucket_async(self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_update_bucket_async(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.LogBucket` + Describes a repository in which log entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket_async] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.LogBucket, + metadata_type=logging_config.BucketMetadata, + ) + + # Done; return the response. + return response + + def create_bucket(self, + request: Optional[Union[logging_config.CreateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: + r"""Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_create_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateBucketRequest, dict]): + The request object. The parameters to ``CreateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository in which log + entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateBucketRequest): + request = logging_config.CreateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_bucket(self, + request: Optional[Union[logging_config.UpdateBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogBucket: + r"""Updates a log bucket. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_update_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateBucketRequest, dict]): + The request object. The parameters to ``UpdateBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogBucket: + Describes a repository in which log + entries are stored. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateBucketRequest): + request = logging_config.UpdateBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_bucket(self, + request: Optional[Union[logging_config.DeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_delete_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteBucketRequest, dict]): + The request object. The parameters to ``DeleteBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteBucketRequest): + request = logging_config.DeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def undelete_bucket(self, + request: Optional[Union[logging_config.UndeleteBucketRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_undelete_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.UndeleteBucketRequest, dict]): + The request object. The parameters to ``UndeleteBucket``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UndeleteBucketRequest): + request = logging_config.UndeleteBucketRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.undelete_bucket] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def _list_views(self, + request: Optional[Union[logging_config.ListViewsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListViewsPager: + r"""Lists views on a log bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__list_views(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_views(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListViewsRequest, dict]): + The request object. The parameters to ``ListViews``. + parent (str): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers._ListViewsPager: + The response from ListViews. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListViewsRequest): + request = logging_config.ListViewsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._list_views] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers._ListViewsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _get_view(self, + request: Optional[Union[logging_config.GetViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: + r"""Gets a view on a log bucket.. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__get_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = client._get_view(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetViewRequest, dict]): + The request object. The parameters to ``GetView``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over log entries in + a bucket. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetViewRequest): + request = logging_config.GetViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._get_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _create_view(self, + request: Optional[Union[logging_config.CreateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: + r"""Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__create_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client._create_view(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateViewRequest, dict]): + The request object. The parameters to ``CreateView``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over log entries in + a bucket. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateViewRequest): + request = logging_config.CreateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._create_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _update_view(self, + request: Optional[Union[logging_config.UpdateViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogView: + r"""Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__update_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client._update_view(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateViewRequest, dict]): + The request object. The parameters to ``UpdateView``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogView: + Describes a view over log entries in + a bucket. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateViewRequest): + request = logging_config.UpdateViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._update_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _delete_view(self, + request: Optional[Union[logging_config.DeleteViewRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__delete_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + client._delete_view(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteViewRequest, dict]): + The request object. The parameters to ``DeleteView``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteViewRequest): + request = logging_config.DeleteViewRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._delete_view] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def _list_sinks(self, + request: Optional[Union[logging_config.ListSinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListSinksPager: + r"""Lists sinks. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__list_sinks(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_sinks(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListSinksRequest, dict]): + The request object. The parameters to ``ListSinks``. + parent (str): + Required. The parent resource whose sinks are to be + listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers._ListSinksPager: + Result returned from ListSinks. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListSinksRequest): + request = logging_config.ListSinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._list_sinks] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers._ListSinksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _get_sink(self, + request: Optional[Union[logging_config.GetSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: + r"""Gets a sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__get_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = client._get_sink(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetSinkRequest, dict]): + The request object. The parameters to ``GetSink``. + sink_name (str): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSinkRequest): + request = logging_config.GetSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._get_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _create_sink(self, + request: Optional[Union[logging_config.CreateSinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: + r"""Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__create_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = client._create_sink(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateSinkRequest, dict]): + The request object. The parameters to ``CreateSink``. + parent (str): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (google.cloud.logging_v2.types.LogSink): + Required. The new sink, whose ``name`` parameter is a + sink identifier that is not already in use. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, sink]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateSinkRequest): + request = logging_config.CreateSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if sink is not None: + request.sink = sink + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._create_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _update_sink(self, + request: Optional[Union[logging_config.UpdateSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + sink: Optional[logging_config.LogSink] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogSink: + r"""Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__update_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client._update_sink(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateSinkRequest, dict]): + The request object. The parameters to ``UpdateSink``. + sink_name (str): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + sink (google.cloud.logging_v2.types.LogSink): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + + This corresponds to the ``sink`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in + ``sink`` that need an update. A sink field will be + overwritten if, and only if, it is in the update mask. + ``name`` and output only fields cannot be updated. + + An empty ``updateMask`` is temporarily treated as using + the following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed + and specifying an empty ``updateMask`` will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + For example: ``updateMask=filter`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogSink: + Describes a sink used to export log + entries to one of the following + destinations in any project: a Cloud + Storage bucket, a BigQuery dataset, a + Pub/Sub topic or a Cloud Logging log + bucket. A logs filter controls which log + entries are exported. The sink must be + created within a project, organization, + billing account, or folder. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name, sink, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSinkRequest): + request = logging_config.UpdateSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + if sink is not None: + request.sink = sink + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._update_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _delete_sink(self, + request: Optional[Union[logging_config.DeleteSinkRequest, dict]] = None, + *, + sink_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__delete_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client._delete_sink(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteSinkRequest, dict]): + The request object. The parameters to ``DeleteSink``. + sink_name (str): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + + This corresponds to the ``sink_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([sink_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteSinkRequest): + request = logging_config.DeleteSinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if sink_name is not None: + request.sink_name = sink_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._delete_sink] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("sink_name", request.sink_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def _create_link(self, + request: Optional[Union[logging_config.CreateLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + link: Optional[logging_config.Link] = None, + link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__create_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client._create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateLinkRequest, dict]): + The request object. The parameters to CreateLink. + parent (str): + Required. The full resource name of the bucket to create + a link for. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link (google.cloud.logging_v2.types.Link): + Required. The new link. + This corresponds to the ``link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + link_id (str): + Required. The ID to use for the link. The link_id can + have up to 100 characters. A valid link_id must only + have alphanumeric characters and underscores within it. + + This corresponds to the ``link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.Link` Describes a + link connected to an analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, link, link_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateLinkRequest): + request = logging_config.CreateLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if link is not None: + request.link = link + if link_id is not None: + request.link_id = link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._create_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.Link, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + def _delete_link(self, + request: Optional[Union[logging_config.DeleteLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__delete_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client._delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteLinkRequest, dict]): + The request object. The parameters to DeleteLink. + name (str): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteLinkRequest): + request = logging_config.DeleteLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._delete_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=logging_config.LinkMetadata, + ) + + # Done; return the response. + return response + + def _list_links(self, + request: Optional[Union[logging_config.ListLinksRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListLinksPager: + r"""Lists links. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__list_links(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_links(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListLinksRequest, dict]): + The request object. The parameters to ListLinks. + parent (str): + Required. The parent resource whose links are to be + listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers._ListLinksPager: + The response from ListLinks. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListLinksRequest): + request = logging_config.ListLinksRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._list_links] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers._ListLinksPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _get_link(self, + request: Optional[Union[logging_config.GetLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Link: + r"""Gets a link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__get_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = client._get_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetLinkRequest, dict]): + The request object. The parameters to GetLink. + name (str): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.Link: + Describes a link connected to an + analytics enabled bucket. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetLinkRequest): + request = logging_config.GetLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._get_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _list_exclusions(self, + request: Optional[Union[logging_config.ListExclusionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListExclusionsPager: + r"""Lists all the exclusions on the \_Default sink in a parent + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__list_exclusions(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListExclusionsRequest, dict]): + The request object. The parameters to ``ListExclusions``. + parent (str): + Required. The parent resource whose exclusions are to be + listed. + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.config_service_v2.pagers._ListExclusionsPager: + Result returned from ListExclusions. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.ListExclusionsRequest): + request = logging_config.ListExclusionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._list_exclusions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers._ListExclusionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _get_exclusion(self, + request: Optional[Union[logging_config.GetExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: + r"""Gets the description of an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__get_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client._get_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetExclusionRequest, dict]): + The request object. The parameters to ``GetExclusion``. + name (str): + Required. The resource name of an existing exclusion: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetExclusionRequest): + request = logging_config.GetExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._get_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _create_exclusion(self, + request: Optional[Union[logging_config.CreateExclusionRequest, dict]] = None, + *, + parent: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: + r"""Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__create_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client._create_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateExclusionRequest, dict]): + The request object. The parameters to ``CreateExclusion``. + parent (str): + Required. The parent resource in which to create the + exclusion: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-logging-project"`` + ``"organizations/123456789"`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. The new exclusion, whose ``name`` parameter is + an exclusion name that is not already used in the parent + resource. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, exclusion]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CreateExclusionRequest): + request = logging_config.CreateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if exclusion is not None: + request.exclusion = exclusion + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._create_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _update_exclusion(self, + request: Optional[Union[logging_config.UpdateExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + exclusion: Optional[logging_config.LogExclusion] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.LogExclusion: + r"""Changes one or more properties of an existing exclusion in the + \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__update_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client._update_exclusion(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateExclusionRequest, dict]): + The request object. The parameters to ``UpdateExclusion``. + name (str): + Required. The resource name of the exclusion to update: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + exclusion (google.cloud.logging_v2.types.LogExclusion): + Required. New values for the existing exclusion. Only + the fields specified in ``update_mask`` are relevant. + + This corresponds to the ``exclusion`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. A non-empty list of fields to change in the + existing exclusion. New values for the fields are taken + from the corresponding fields in the + [LogExclusion][google.logging.v2.LogExclusion] included + in this request. Fields not mentioned in ``update_mask`` + are not changed and are ignored in the request. + + For example, to change the filter and description of an + exclusion, specify an ``update_mask`` of + ``"filter,description"``. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogExclusion: + Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of + log entries, you can use exclusions to reduce your + chargeable logs. Note that exclusions on + organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify + the \_Required sink or exclude logs from it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, exclusion, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateExclusionRequest): + request = logging_config.UpdateExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if exclusion is not None: + request.exclusion = exclusion + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._update_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _delete_exclusion(self, + request: Optional[Union[logging_config.DeleteExclusionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an exclusion in the \_Default sink. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__delete_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client._delete_exclusion(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteExclusionRequest, dict]): + The request object. The parameters to ``DeleteExclusion``. + name (str): + Required. The resource name of an existing exclusion to + delete: + + :: + + "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" + "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" + "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" + + For example: + + ``"projects/my-project/exclusions/my-exclusion"`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.DeleteExclusionRequest): + request = logging_config.DeleteExclusionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._delete_exclusion] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def _get_cmek_settings(self, + request: Optional[Union[logging_config.GetCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: + r"""Gets the Logging CMEK settings for the given resource. + + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__get_cmek_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client._get_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetCmekSettingsRequest, dict]): + The request object. The parameters to + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetCmekSettingsRequest): + request = logging_config.GetCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._get_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _update_cmek_settings(self, + request: Optional[Union[logging_config.UpdateCmekSettingsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.CmekSettings: + r"""Updates the Log Router CMEK settings for the given resource. + + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__update_cmek_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client._update_cmek_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateCmekSettingsRequest, dict]): + The request object. The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.CmekSettings: + Describes the customer-managed encryption key (CMEK) settings associated with + a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once + configured, it applies to all projects and folders in + the Google Cloud organization. + + See [Enabling CMEK for Log + Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + for more information. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateCmekSettingsRequest): + request = logging_config.UpdateCmekSettingsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._update_cmek_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _get_settings(self, + request: Optional[Union[logging_config.GetSettingsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: + r"""Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__get_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client._get_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetSettingsRequest, dict]): + The request object. The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing + accounts. Currently it can only be configured for + organizations. Once configured for an organization, it + applies to all projects and folders in the Google Cloud + organization. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.GetSettingsRequest): + request = logging_config.GetSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._get_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _update_settings(self, + request: Optional[Union[logging_config.UpdateSettingsRequest, dict]] = None, + *, + settings: Optional[logging_config.Settings] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_config.Settings: + r"""Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__update_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client._update_settings(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateSettingsRequest, dict]): + The request object. The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + + This corresponds to the ``settings`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. + Output only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.Settings: + Describes the settings associated + with a project, folder, organization, + billing account, or flexible resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([settings, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.UpdateSettingsRequest): + request = logging_config.UpdateSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if settings is not None: + request.settings = settings + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._update_settings] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _copy_log_entries(self, + request: Optional[Union[logging_config.CopyLogEntriesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__copy_log_entries(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client._copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CopyLogEntriesRequest, dict]): + The request object. The parameters to CopyLogEntries. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.logging_v2.types.CopyLogEntriesResponse` + Response type for CopyLogEntries long running + operations. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_config.CopyLogEntriesRequest): + request = logging_config.CopyLogEntriesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._copy_log_entries] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + logging_config.CopyLogEntriesResponse, + metadata_type=logging_config.CopyLogEntriesMetadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BaseConfigServiceV2Client": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "BaseConfigServiceV2Client", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py new file mode 100755 index 000000000000..1fd90927372d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -0,0 +1,722 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.logging_v2.types import logging_config + + +class ListBucketsPager: + """A pager for iterating through ``list_buckets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``buckets`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListBuckets`` requests and continue to iterate + through the ``buckets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListBucketsResponse], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListBucketsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListBucketsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListBucketsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListBucketsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_config.LogBucket]: + for page in self.pages: + yield from page.buckets + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListBucketsAsyncPager: + """A pager for iterating through ``list_buckets`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListBucketsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``buckets`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListBuckets`` requests and continue to iterate + through the ``buckets`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListBucketsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListBucketsResponse]], + request: logging_config.ListBucketsRequest, + response: logging_config.ListBucketsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListBucketsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListBucketsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListBucketsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListBucketsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogBucket]: + async def async_generator(): + async for page in self.pages: + for response in page.buckets: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListViewsPager: + """A pager for iterating through ``_list_views`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``_ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListViewsResponse], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListViewsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListViewsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_config.LogView]: + for page in self.pages: + yield from page.views + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListViewsAsyncPager: + """A pager for iterating through ``_list_views`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``views`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``_ListViews`` requests and continue to iterate + through the ``views`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListViewsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListViewsResponse]], + request: logging_config.ListViewsRequest, + response: logging_config.ListViewsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListViewsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListViewsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListViewsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListViewsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogView]: + async def async_generator(): + async for page in self.pages: + for response in page.views: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListSinksPager: + """A pager for iterating through ``_list_sinks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``sinks`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``_ListSinks`` requests and continue to iterate + through the ``sinks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListSinksResponse], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListSinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListSinksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListSinksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListSinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_config.LogSink]: + for page in self.pages: + yield from page.sinks + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListSinksAsyncPager: + """A pager for iterating through ``_list_sinks`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``sinks`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``_ListSinks`` requests and continue to iterate + through the ``sinks`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListSinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListSinksResponse]], + request: logging_config.ListSinksRequest, + response: logging_config.ListSinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListSinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListSinksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListSinksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListSinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogSink]: + async def async_generator(): + async for page in self.pages: + for response in page.sinks: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListLinksPager: + """A pager for iterating through ``_list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__iter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``_ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListLinksResponse], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_config.Link]: + for page in self.pages: + yield from page.links + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListLinksAsyncPager: + """A pager for iterating through ``_list_links`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``links`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``_ListLinks`` requests and continue to iterate + through the ``links`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLinksResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListLinksResponse]], + request: logging_config.ListLinksRequest, + response: logging_config.ListLinksResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLinksRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLinksResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListLinksRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListLinksResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.Link]: + async def async_generator(): + async for page in self.pages: + for response in page.links: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListExclusionsPager: + """A pager for iterating through ``_list_exclusions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``exclusions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``_ListExclusions`` requests and continue to iterate + through the ``exclusions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_config.ListExclusionsResponse], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListExclusionsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListExclusionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListExclusionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_config.ListExclusionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_config.LogExclusion]: + for page in self.pages: + yield from page.exclusions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListExclusionsAsyncPager: + """A pager for iterating through ``_list_exclusions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``exclusions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``_ListExclusions`` requests and continue to iterate + through the ``exclusions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListExclusionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_config.ListExclusionsResponse]], + request: logging_config.ListExclusionsRequest, + response: logging_config.ListExclusionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListExclusionsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListExclusionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_config.ListExclusionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_config.ListExclusionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[logging_config.LogExclusion]: + async def async_generator(): + async for page in self.pages: + for response in page.exclusions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/README.rst new file mode 100755 index 000000000000..4ea84879601d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ConfigServiceV2Transport` is the ABC for all transports. +- public child `ConfigServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ConfigServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseConfigServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ConfigServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py new file mode 100755 index 000000000000..1239a292b25a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ConfigServiceV2Transport +from .grpc import ConfigServiceV2GrpcTransport +from .grpc_asyncio import ConfigServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ConfigServiceV2Transport]] +_transport_registry['grpc'] = ConfigServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = ConfigServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'ConfigServiceV2Transport', + 'ConfigServiceV2GrpcTransport', + 'ConfigServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py new file mode 100755 index 000000000000..1604e0a7663a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -0,0 +1,718 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.logging_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class ConfigServiceV2Transport(abc.ABC): + """Abstract transport class for ConfigServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_buckets: gapic_v1.method.wrap_method( + self.list_buckets, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket: gapic_v1.method.wrap_method( + self.get_bucket, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket_async: gapic_v1.method.wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: gapic_v1.method.wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket: gapic_v1.method.wrap_method( + self.create_bucket, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket: gapic_v1.method.wrap_method( + self.update_bucket, + default_timeout=None, + client_info=client_info, + ), + self.delete_bucket: gapic_v1.method.wrap_method( + self.delete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.undelete_bucket: gapic_v1.method.wrap_method( + self.undelete_bucket, + default_timeout=None, + client_info=client_info, + ), + self._list_views: gapic_v1.method.wrap_method( + self._list_views, + default_timeout=None, + client_info=client_info, + ), + self._get_view: gapic_v1.method.wrap_method( + self._get_view, + default_timeout=None, + client_info=client_info, + ), + self._create_view: gapic_v1.method.wrap_method( + self._create_view, + default_timeout=None, + client_info=client_info, + ), + self._update_view: gapic_v1.method.wrap_method( + self._update_view, + default_timeout=None, + client_info=client_info, + ), + self._delete_view: gapic_v1.method.wrap_method( + self._delete_view, + default_timeout=None, + client_info=client_info, + ), + self._list_sinks: gapic_v1.method.wrap_method( + self._list_sinks, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._get_sink: gapic_v1.method.wrap_method( + self._get_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._create_sink: gapic_v1.method.wrap_method( + self._create_sink, + default_timeout=120.0, + client_info=client_info, + ), + self._update_sink: gapic_v1.method.wrap_method( + self._update_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._delete_sink: gapic_v1.method.wrap_method( + self._delete_sink, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._create_link: gapic_v1.method.wrap_method( + self._create_link, + default_timeout=None, + client_info=client_info, + ), + self._delete_link: gapic_v1.method.wrap_method( + self._delete_link, + default_timeout=None, + client_info=client_info, + ), + self._list_links: gapic_v1.method.wrap_method( + self._list_links, + default_timeout=None, + client_info=client_info, + ), + self._get_link: gapic_v1.method.wrap_method( + self._get_link, + default_timeout=None, + client_info=client_info, + ), + self._list_exclusions: gapic_v1.method.wrap_method( + self._list_exclusions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._get_exclusion: gapic_v1.method.wrap_method( + self._get_exclusion, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._create_exclusion: gapic_v1.method.wrap_method( + self._create_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self._update_exclusion: gapic_v1.method.wrap_method( + self._update_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self._delete_exclusion: gapic_v1.method.wrap_method( + self._delete_exclusion, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._get_cmek_settings: gapic_v1.method.wrap_method( + self._get_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self._update_cmek_settings: gapic_v1.method.wrap_method( + self._update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self._get_settings: gapic_v1.method.wrap_method( + self._get_settings, + default_timeout=None, + client_info=client_info, + ), + self._update_settings: gapic_v1.method.wrap_method( + self._update_settings, + default_timeout=None, + client_info=client_info, + ), + self._copy_log_entries: gapic_v1.method.wrap_method( + self._copy_log_entries, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + Union[ + logging_config.ListBucketsResponse, + Awaitable[logging_config.ListBucketsResponse] + ]]: + raise NotImplementedError() + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Union[ + logging_config.LogBucket, + Awaitable[logging_config.LogBucket] + ]]: + raise NotImplementedError() + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def _list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + Union[ + logging_config.ListViewsResponse, + Awaitable[logging_config.ListViewsResponse] + ]]: + raise NotImplementedError() + + @property + def _get_view(self) -> Callable[ + [logging_config.GetViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def _create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def _update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + Union[ + logging_config.LogView, + Awaitable[logging_config.LogView] + ]]: + raise NotImplementedError() + + @property + def _delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def _list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + Union[ + logging_config.ListSinksResponse, + Awaitable[logging_config.ListSinksResponse] + ]]: + raise NotImplementedError() + + @property + def _get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def _create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def _update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + Union[ + logging_config.LogSink, + Awaitable[logging_config.LogSink] + ]]: + raise NotImplementedError() + + @property + def _delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def _create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def _delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def _list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + Union[ + logging_config.ListLinksResponse, + Awaitable[logging_config.ListLinksResponse] + ]]: + raise NotImplementedError() + + @property + def _get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + Union[ + logging_config.Link, + Awaitable[logging_config.Link] + ]]: + raise NotImplementedError() + + @property + def _list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + Union[ + logging_config.ListExclusionsResponse, + Awaitable[logging_config.ListExclusionsResponse] + ]]: + raise NotImplementedError() + + @property + def _get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def _create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def _update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + Union[ + logging_config.LogExclusion, + Awaitable[logging_config.LogExclusion] + ]]: + raise NotImplementedError() + + @property + def _delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def _get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ]]: + raise NotImplementedError() + + @property + def _update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Union[ + logging_config.CmekSettings, + Awaitable[logging_config.CmekSettings] + ]]: + raise NotImplementedError() + + @property + def _get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ]]: + raise NotImplementedError() + + @property + def _update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + Union[ + logging_config.Settings, + Awaitable[logging_config.Settings] + ]]: + raise NotImplementedError() + + @property + def _copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Union[ + operations_pb2.Operation, + Awaitable[operations_pb2.Operation] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'ConfigServiceV2Transport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py new file mode 100755 index 000000000000..de56227328fe --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -0,0 +1,1328 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class ConfigServiceV2GrpcTransport(ConfigServiceV2Transport): + """gRPC backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + logging_config.ListBucketsResponse]: + r"""Return a callable for the list buckets method over gRPC. + + Lists log buckets. + + Returns: + Callable[[~.ListBucketsRequest], + ~.ListBucketsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_buckets' not in self._stubs: + self._stubs['list_buckets'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListBuckets', + request_serializer=logging_config.ListBucketsRequest.serialize, + response_deserializer=logging_config.ListBucketsResponse.deserialize, + ) + return self._stubs['list_buckets'] + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the get bucket method over gRPC. + + Gets a log bucket. + + Returns: + Callable[[~.GetBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_bucket' not in self._stubs: + self._stubs['get_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetBucket', + request_serializer=logging_config.GetBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['get_bucket'] + + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + operations_pb2.Operation]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket_async' not in self._stubs: + self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_bucket_async'] + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + operations_pb2.Operation]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket_async' not in self._stubs: + self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_bucket_async'] + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket' not in self._stubs: + self._stubs['create_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucket', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['create_bucket'] + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + logging_config.LogBucket]: + r"""Return a callable for the update bucket method over gRPC. + + Updates a log bucket. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + ~.LogBucket]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket' not in self._stubs: + self._stubs['update_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucket', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['update_bucket'] + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_bucket' not in self._stubs: + self._stubs['delete_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteBucket', + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_bucket'] + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + empty_pb2.Empty]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + Returns: + Callable[[~.UndeleteBucketRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'undelete_bucket' not in self._stubs: + self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['undelete_bucket'] + + @property + def _list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + logging_config.ListViewsResponse]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a log bucket. + + Returns: + Callable[[~.ListViewsRequest], + ~.ListViewsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_views' not in self._stubs: + self._stubs['_list_views'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_ListViews', + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs['_list_views'] + + @property + def _get_view(self) -> Callable[ + [logging_config.GetViewRequest], + logging_config.LogView]: + r"""Return a callable for the get view method over gRPC. + + Gets a view on a log bucket.. + + Returns: + Callable[[~.GetViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_view' not in self._stubs: + self._stubs['_get_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetView', + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['_get_view'] + + @property + def _create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + logging_config.LogView]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. + + Returns: + Callable[[~.CreateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_view' not in self._stubs: + self._stubs['_create_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CreateView', + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['_create_view'] + + @property + def _update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + logging_config.LogView]: + r"""Return a callable for the update view method over gRPC. + + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. + + Returns: + Callable[[~.UpdateViewRequest], + ~.LogView]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_view' not in self._stubs: + self._stubs['_update_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateView', + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['_update_view'] + + @property + def _delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. + + Returns: + Callable[[~.DeleteViewRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_view' not in self._stubs: + self._stubs['_delete_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_DeleteView', + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['_delete_view'] + + @property + def _list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + logging_config.ListSinksResponse]: + r"""Return a callable for the list sinks method over gRPC. + + Lists sinks. + + Returns: + Callable[[~.ListSinksRequest], + ~.ListSinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_sinks' not in self._stubs: + self._stubs['_list_sinks'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_ListSinks', + request_serializer=logging_config.ListSinksRequest.serialize, + response_deserializer=logging_config.ListSinksResponse.deserialize, + ) + return self._stubs['_list_sinks'] + + @property + def _get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the get sink method over gRPC. + + Gets a sink. + + Returns: + Callable[[~.GetSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_sink' not in self._stubs: + self._stubs['_get_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetSink', + request_serializer=logging_config.GetSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['_get_sink'] + + @property + def _create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the create sink method over gRPC. + + Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Returns: + Callable[[~.CreateSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_sink' not in self._stubs: + self._stubs['_create_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CreateSink', + request_serializer=logging_config.CreateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['_create_sink'] + + @property + def _update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + logging_config.LogSink]: + r"""Return a callable for the update sink method over gRPC. + + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Returns: + Callable[[~.UpdateSinkRequest], + ~.LogSink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_sink' not in self._stubs: + self._stubs['_update_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateSink', + request_serializer=logging_config.UpdateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['_update_sink'] + + @property + def _delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete sink method over gRPC. + + Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Returns: + Callable[[~.DeleteSinkRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_sink' not in self._stubs: + self._stubs['_delete_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_DeleteSink', + request_serializer=logging_config.DeleteSinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['_delete_sink'] + + @property + def _create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + operations_pb2.Operation]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_link' not in self._stubs: + self._stubs['_create_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CreateLink', + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['_create_link'] + + @property + def _delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + operations_pb2.Operation]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_link' not in self._stubs: + self._stubs['_delete_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_DeleteLink', + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['_delete_link'] + + @property + def _list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + logging_config.ListLinksResponse]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + ~.ListLinksResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_links' not in self._stubs: + self._stubs['_list_links'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_ListLinks', + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs['_list_links'] + + @property + def _get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + logging_config.Link]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + ~.Link]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_link' not in self._stubs: + self._stubs['_get_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetLink', + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs['_get_link'] + + @property + def _list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + logging_config.ListExclusionsResponse]: + r"""Return a callable for the list exclusions method over gRPC. + + Lists all the exclusions on the \_Default sink in a parent + resource. + + Returns: + Callable[[~.ListExclusionsRequest], + ~.ListExclusionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_exclusions' not in self._stubs: + self._stubs['_list_exclusions'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_ListExclusions', + request_serializer=logging_config.ListExclusionsRequest.serialize, + response_deserializer=logging_config.ListExclusionsResponse.deserialize, + ) + return self._stubs['_list_exclusions'] + + @property + def _get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the get exclusion method over gRPC. + + Gets the description of an exclusion in the \_Default sink. + + Returns: + Callable[[~.GetExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_exclusion' not in self._stubs: + self._stubs['_get_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetExclusion', + request_serializer=logging_config.GetExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['_get_exclusion'] + + @property + def _create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the create exclusion method over gRPC. + + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + Returns: + Callable[[~.CreateExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_exclusion' not in self._stubs: + self._stubs['_create_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CreateExclusion', + request_serializer=logging_config.CreateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['_create_exclusion'] + + @property + def _update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + logging_config.LogExclusion]: + r"""Return a callable for the update exclusion method over gRPC. + + Changes one or more properties of an existing exclusion in the + \_Default sink. + + Returns: + Callable[[~.UpdateExclusionRequest], + ~.LogExclusion]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_exclusion' not in self._stubs: + self._stubs['_update_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateExclusion', + request_serializer=logging_config.UpdateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['_update_exclusion'] + + @property + def _delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete exclusion method over gRPC. + + Deletes an exclusion in the \_Default sink. + + Returns: + Callable[[~.DeleteExclusionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_exclusion' not in self._stubs: + self._stubs['_delete_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_DeleteExclusion', + request_serializer=logging_config.DeleteExclusionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['_delete_exclusion'] + + @property + def _get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + logging_config.CmekSettings]: + r"""Return a callable for the get cmek settings method over gRPC. + + Gets the Logging CMEK settings for the given resource. + + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetCmekSettingsRequest], + ~.CmekSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_cmek_settings' not in self._stubs: + self._stubs['_get_cmek_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetCmekSettings', + request_serializer=logging_config.GetCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['_get_cmek_settings'] + + @property + def _update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + logging_config.CmekSettings]: + r"""Return a callable for the update cmek settings method over gRPC. + + Updates the Log Router CMEK settings for the given resource. + + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateCmekSettingsRequest], + ~.CmekSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_cmek_settings' not in self._stubs: + self._stubs['_update_cmek_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateCmekSettings', + request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['_update_cmek_settings'] + + @property + def _get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + logging_config.Settings]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_settings' not in self._stubs: + self._stubs['_get_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetSettings', + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['_get_settings'] + + @property + def _update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + logging_config.Settings]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + ~.Settings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_settings' not in self._stubs: + self._stubs['_update_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateSettings', + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['_update_settings'] + + @property + def _copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + operations_pb2.Operation]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_copy_log_entries' not in self._stubs: + self._stubs['_copy_log_entries'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CopyLogEntries', + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['_copy_log_entries'] + + def close(self): + self._logged_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'ConfigServiceV2GrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py new file mode 100755 index 000000000000..884a984ca825 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,1595 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.api_core import operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import ConfigServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import ConfigServiceV2GrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.ConfigServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class ConfigServiceV2GrpcAsyncIOTransport(ConfigServiceV2Transport): + """gRPC AsyncIO backend transport for ConfigServiceV2. + + Service for configuring sinks used to route log entries. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def list_buckets(self) -> Callable[ + [logging_config.ListBucketsRequest], + Awaitable[logging_config.ListBucketsResponse]]: + r"""Return a callable for the list buckets method over gRPC. + + Lists log buckets. + + Returns: + Callable[[~.ListBucketsRequest], + Awaitable[~.ListBucketsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_buckets' not in self._stubs: + self._stubs['list_buckets'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListBuckets', + request_serializer=logging_config.ListBucketsRequest.serialize, + response_deserializer=logging_config.ListBucketsResponse.deserialize, + ) + return self._stubs['list_buckets'] + + @property + def get_bucket(self) -> Callable[ + [logging_config.GetBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the get bucket method over gRPC. + + Gets a log bucket. + + Returns: + Callable[[~.GetBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_bucket' not in self._stubs: + self._stubs['get_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetBucket', + request_serializer=logging_config.GetBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['get_bucket'] + + @property + def create_bucket_async(self) -> Callable[ + [logging_config.CreateBucketRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create bucket async method over gRPC. + + Creates a log bucket asynchronously that can be used + to store log entries. + After a bucket has been created, the bucket's location + cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket_async' not in self._stubs: + self._stubs['create_bucket_async'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucketAsync', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['create_bucket_async'] + + @property + def update_bucket_async(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the update bucket async method over gRPC. + + Updates a log bucket asynchronously. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket_async' not in self._stubs: + self._stubs['update_bucket_async'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucketAsync', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['update_bucket_async'] + + @property + def create_bucket(self) -> Callable[ + [logging_config.CreateBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the create bucket method over gRPC. + + Creates a log bucket that can be used to store log + entries. After a bucket has been created, the bucket's + location cannot be changed. + + Returns: + Callable[[~.CreateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_bucket' not in self._stubs: + self._stubs['create_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateBucket', + request_serializer=logging_config.CreateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['create_bucket'] + + @property + def update_bucket(self) -> Callable[ + [logging_config.UpdateBucketRequest], + Awaitable[logging_config.LogBucket]]: + r"""Return a callable for the update bucket method over gRPC. + + Updates a log bucket. + + If the bucket has a ``lifecycle_state`` of ``DELETE_REQUESTED``, + then ``FAILED_PRECONDITION`` will be returned. + + After a bucket has been created, the bucket's location cannot be + changed. + + Returns: + Callable[[~.UpdateBucketRequest], + Awaitable[~.LogBucket]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_bucket' not in self._stubs: + self._stubs['update_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateBucket', + request_serializer=logging_config.UpdateBucketRequest.serialize, + response_deserializer=logging_config.LogBucket.deserialize, + ) + return self._stubs['update_bucket'] + + @property + def delete_bucket(self) -> Callable[ + [logging_config.DeleteBucketRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete bucket method over gRPC. + + Deletes a log bucket. + + Changes the bucket's ``lifecycle_state`` to the + ``DELETE_REQUESTED`` state. After 7 days, the bucket will be + purged and all log entries in the bucket will be permanently + deleted. + + Returns: + Callable[[~.DeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_bucket' not in self._stubs: + self._stubs['delete_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteBucket', + request_serializer=logging_config.DeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_bucket'] + + @property + def undelete_bucket(self) -> Callable[ + [logging_config.UndeleteBucketRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the undelete bucket method over gRPC. + + Undeletes a log bucket. A bucket that has been + deleted can be undeleted within the grace period of 7 + days. + + Returns: + Callable[[~.UndeleteBucketRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'undelete_bucket' not in self._stubs: + self._stubs['undelete_bucket'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UndeleteBucket', + request_serializer=logging_config.UndeleteBucketRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['undelete_bucket'] + + @property + def _list_views(self) -> Callable[ + [logging_config.ListViewsRequest], + Awaitable[logging_config.ListViewsResponse]]: + r"""Return a callable for the list views method over gRPC. + + Lists views on a log bucket. + + Returns: + Callable[[~.ListViewsRequest], + Awaitable[~.ListViewsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_views' not in self._stubs: + self._stubs['_list_views'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_ListViews', + request_serializer=logging_config.ListViewsRequest.serialize, + response_deserializer=logging_config.ListViewsResponse.deserialize, + ) + return self._stubs['_list_views'] + + @property + def _get_view(self) -> Callable[ + [logging_config.GetViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the get view method over gRPC. + + Gets a view on a log bucket.. + + Returns: + Callable[[~.GetViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_view' not in self._stubs: + self._stubs['_get_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetView', + request_serializer=logging_config.GetViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['_get_view'] + + @property + def _create_view(self) -> Callable[ + [logging_config.CreateViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the create view method over gRPC. + + Creates a view over log entries in a log bucket. A + bucket may contain a maximum of 30 views. + + Returns: + Callable[[~.CreateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_view' not in self._stubs: + self._stubs['_create_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CreateView', + request_serializer=logging_config.CreateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['_create_view'] + + @property + def _update_view(self) -> Callable[ + [logging_config.UpdateViewRequest], + Awaitable[logging_config.LogView]]: + r"""Return a callable for the update view method over gRPC. + + Updates a view on a log bucket. This method replaces the + following fields in the existing view with values from the new + view: ``filter``. If an ``UNAVAILABLE`` error is returned, this + indicates that system is not in a state where it can update the + view. If this occurs, please try again in a few minutes. + + Returns: + Callable[[~.UpdateViewRequest], + Awaitable[~.LogView]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_view' not in self._stubs: + self._stubs['_update_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateView', + request_serializer=logging_config.UpdateViewRequest.serialize, + response_deserializer=logging_config.LogView.deserialize, + ) + return self._stubs['_update_view'] + + @property + def _delete_view(self) -> Callable[ + [logging_config.DeleteViewRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete view method over gRPC. + + Deletes a view on a log bucket. If an ``UNAVAILABLE`` error is + returned, this indicates that system is not in a state where it + can delete the view. If this occurs, please try again in a few + minutes. + + Returns: + Callable[[~.DeleteViewRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_view' not in self._stubs: + self._stubs['_delete_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_DeleteView', + request_serializer=logging_config.DeleteViewRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['_delete_view'] + + @property + def _list_sinks(self) -> Callable[ + [logging_config.ListSinksRequest], + Awaitable[logging_config.ListSinksResponse]]: + r"""Return a callable for the list sinks method over gRPC. + + Lists sinks. + + Returns: + Callable[[~.ListSinksRequest], + Awaitable[~.ListSinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_sinks' not in self._stubs: + self._stubs['_list_sinks'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_ListSinks', + request_serializer=logging_config.ListSinksRequest.serialize, + response_deserializer=logging_config.ListSinksResponse.deserialize, + ) + return self._stubs['_list_sinks'] + + @property + def _get_sink(self) -> Callable[ + [logging_config.GetSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the get sink method over gRPC. + + Gets a sink. + + Returns: + Callable[[~.GetSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_sink' not in self._stubs: + self._stubs['_get_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetSink', + request_serializer=logging_config.GetSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['_get_sink'] + + @property + def _create_sink(self) -> Callable[ + [logging_config.CreateSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the create sink method over gRPC. + + Creates a sink that exports specified log entries to a + destination. The export of newly-ingested log entries begins + immediately, unless the sink's ``writer_identity`` is not + permitted to write to the destination. A sink can export log + entries only from the resource owning the sink. + + Returns: + Callable[[~.CreateSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_sink' not in self._stubs: + self._stubs['_create_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CreateSink', + request_serializer=logging_config.CreateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['_create_sink'] + + @property + def _update_sink(self) -> Callable[ + [logging_config.UpdateSinkRequest], + Awaitable[logging_config.LogSink]]: + r"""Return a callable for the update sink method over gRPC. + + Updates a sink. This method replaces the following fields in the + existing sink with values from the new sink: ``destination``, + and ``filter``. + + The updated sink might also have a new ``writer_identity``; see + the ``unique_writer_identity`` field. + + Returns: + Callable[[~.UpdateSinkRequest], + Awaitable[~.LogSink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_sink' not in self._stubs: + self._stubs['_update_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateSink', + request_serializer=logging_config.UpdateSinkRequest.serialize, + response_deserializer=logging_config.LogSink.deserialize, + ) + return self._stubs['_update_sink'] + + @property + def _delete_sink(self) -> Callable[ + [logging_config.DeleteSinkRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete sink method over gRPC. + + Deletes a sink. If the sink has a unique ``writer_identity``, + then that service account is also deleted. + + Returns: + Callable[[~.DeleteSinkRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_sink' not in self._stubs: + self._stubs['_delete_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_DeleteSink', + request_serializer=logging_config.DeleteSinkRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['_delete_sink'] + + @property + def _create_link(self) -> Callable[ + [logging_config.CreateLinkRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the create link method over gRPC. + + Asynchronously creates a linked dataset in BigQuery + which makes it possible to use BigQuery to read the logs + stored in the log bucket. A log bucket may currently + only contain one link. + + Returns: + Callable[[~.CreateLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_link' not in self._stubs: + self._stubs['_create_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CreateLink', + request_serializer=logging_config.CreateLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['_create_link'] + + @property + def _delete_link(self) -> Callable[ + [logging_config.DeleteLinkRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the delete link method over gRPC. + + Deletes a link. This will also delete the + corresponding BigQuery linked dataset. + + Returns: + Callable[[~.DeleteLinkRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_link' not in self._stubs: + self._stubs['_delete_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_DeleteLink', + request_serializer=logging_config.DeleteLinkRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['_delete_link'] + + @property + def _list_links(self) -> Callable[ + [logging_config.ListLinksRequest], + Awaitable[logging_config.ListLinksResponse]]: + r"""Return a callable for the list links method over gRPC. + + Lists links. + + Returns: + Callable[[~.ListLinksRequest], + Awaitable[~.ListLinksResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_links' not in self._stubs: + self._stubs['_list_links'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_ListLinks', + request_serializer=logging_config.ListLinksRequest.serialize, + response_deserializer=logging_config.ListLinksResponse.deserialize, + ) + return self._stubs['_list_links'] + + @property + def _get_link(self) -> Callable[ + [logging_config.GetLinkRequest], + Awaitable[logging_config.Link]]: + r"""Return a callable for the get link method over gRPC. + + Gets a link. + + Returns: + Callable[[~.GetLinkRequest], + Awaitable[~.Link]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_link' not in self._stubs: + self._stubs['_get_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetLink', + request_serializer=logging_config.GetLinkRequest.serialize, + response_deserializer=logging_config.Link.deserialize, + ) + return self._stubs['_get_link'] + + @property + def _list_exclusions(self) -> Callable[ + [logging_config.ListExclusionsRequest], + Awaitable[logging_config.ListExclusionsResponse]]: + r"""Return a callable for the list exclusions method over gRPC. + + Lists all the exclusions on the \_Default sink in a parent + resource. + + Returns: + Callable[[~.ListExclusionsRequest], + Awaitable[~.ListExclusionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_exclusions' not in self._stubs: + self._stubs['_list_exclusions'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_ListExclusions', + request_serializer=logging_config.ListExclusionsRequest.serialize, + response_deserializer=logging_config.ListExclusionsResponse.deserialize, + ) + return self._stubs['_list_exclusions'] + + @property + def _get_exclusion(self) -> Callable[ + [logging_config.GetExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the get exclusion method over gRPC. + + Gets the description of an exclusion in the \_Default sink. + + Returns: + Callable[[~.GetExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_exclusion' not in self._stubs: + self._stubs['_get_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetExclusion', + request_serializer=logging_config.GetExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['_get_exclusion'] + + @property + def _create_exclusion(self) -> Callable[ + [logging_config.CreateExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the create exclusion method over gRPC. + + Creates a new exclusion in the \_Default sink in a specified + parent resource. Only log entries belonging to that resource can + be excluded. You can have up to 10 exclusions in a resource. + + Returns: + Callable[[~.CreateExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_exclusion' not in self._stubs: + self._stubs['_create_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CreateExclusion', + request_serializer=logging_config.CreateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['_create_exclusion'] + + @property + def _update_exclusion(self) -> Callable[ + [logging_config.UpdateExclusionRequest], + Awaitable[logging_config.LogExclusion]]: + r"""Return a callable for the update exclusion method over gRPC. + + Changes one or more properties of an existing exclusion in the + \_Default sink. + + Returns: + Callable[[~.UpdateExclusionRequest], + Awaitable[~.LogExclusion]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_exclusion' not in self._stubs: + self._stubs['_update_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateExclusion', + request_serializer=logging_config.UpdateExclusionRequest.serialize, + response_deserializer=logging_config.LogExclusion.deserialize, + ) + return self._stubs['_update_exclusion'] + + @property + def _delete_exclusion(self) -> Callable[ + [logging_config.DeleteExclusionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete exclusion method over gRPC. + + Deletes an exclusion in the \_Default sink. + + Returns: + Callable[[~.DeleteExclusionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_exclusion' not in self._stubs: + self._stubs['_delete_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_DeleteExclusion', + request_serializer=logging_config.DeleteExclusionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['_delete_exclusion'] + + @property + def _get_cmek_settings(self) -> Callable[ + [logging_config.GetCmekSettingsRequest], + Awaitable[logging_config.CmekSettings]]: + r"""Return a callable for the get cmek settings method over gRPC. + + Gets the Logging CMEK settings for the given resource. + + Note: CMEK for the Log Router can be configured for Google Cloud + projects, folders, organizations and billing accounts. Once + configured for an organization, it applies to all projects and + folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetCmekSettingsRequest], + Awaitable[~.CmekSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_cmek_settings' not in self._stubs: + self._stubs['_get_cmek_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetCmekSettings', + request_serializer=logging_config.GetCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['_get_cmek_settings'] + + @property + def _update_cmek_settings(self) -> Callable[ + [logging_config.UpdateCmekSettingsRequest], + Awaitable[logging_config.CmekSettings]]: + r"""Return a callable for the update cmek settings method over gRPC. + + Updates the Log Router CMEK settings for the given resource. + + Note: CMEK for the Log Router can currently only be configured + for Google Cloud organizations. Once configured, it applies to + all projects and folders in the Google Cloud organization. + + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateCmekSettingsRequest], + Awaitable[~.CmekSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_cmek_settings' not in self._stubs: + self._stubs['_update_cmek_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateCmekSettings', + request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, + response_deserializer=logging_config.CmekSettings.deserialize, + ) + return self._stubs['_update_cmek_settings'] + + @property + def _get_settings(self) -> Callable[ + [logging_config.GetSettingsRequest], + Awaitable[logging_config.Settings]]: + r"""Return a callable for the get settings method over gRPC. + + Gets the Log Router settings for the given resource. + + Note: Settings for the Log Router can be get for Google Cloud + projects, folders, organizations and billing accounts. Currently + it can only be configured for organizations. Once configured for + an organization, it applies to all projects and folders in the + Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.GetSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_settings' not in self._stubs: + self._stubs['_get_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_GetSettings', + request_serializer=logging_config.GetSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['_get_settings'] + + @property + def _update_settings(self) -> Callable[ + [logging_config.UpdateSettingsRequest], + Awaitable[logging_config.Settings]]: + r"""Return a callable for the update settings method over gRPC. + + Updates the Log Router settings for the given resource. + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, it + applies to all projects and folders in the Google Cloud + organization. + + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings] + will fail if 1) ``kms_key_name`` is invalid, or 2) the + associated service account does not have the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned for + the key, or 3) access to the key is disabled. 4) ``location_id`` + is not supported by Logging. 5) ``location_id`` violate + OrgPolicy. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Returns: + Callable[[~.UpdateSettingsRequest], + Awaitable[~.Settings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_settings' not in self._stubs: + self._stubs['_update_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_UpdateSettings', + request_serializer=logging_config.UpdateSettingsRequest.serialize, + response_deserializer=logging_config.Settings.deserialize, + ) + return self._stubs['_update_settings'] + + @property + def _copy_log_entries(self) -> Callable[ + [logging_config.CopyLogEntriesRequest], + Awaitable[operations_pb2.Operation]]: + r"""Return a callable for the copy log entries method over gRPC. + + Copies a set of log entries from a log bucket to a + Cloud Storage bucket. + + Returns: + Callable[[~.CopyLogEntriesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_copy_log_entries' not in self._stubs: + self._stubs['_copy_log_entries'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/_CopyLogEntries', + request_serializer=logging_config.CopyLogEntriesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs['_copy_log_entries'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_buckets: self._wrap_method( + self.list_buckets, + default_timeout=None, + client_info=client_info, + ), + self.get_bucket: self._wrap_method( + self.get_bucket, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket_async: self._wrap_method( + self.create_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket_async: self._wrap_method( + self.update_bucket_async, + default_timeout=None, + client_info=client_info, + ), + self.create_bucket: self._wrap_method( + self.create_bucket, + default_timeout=None, + client_info=client_info, + ), + self.update_bucket: self._wrap_method( + self.update_bucket, + default_timeout=None, + client_info=client_info, + ), + self.delete_bucket: self._wrap_method( + self.delete_bucket, + default_timeout=None, + client_info=client_info, + ), + self.undelete_bucket: self._wrap_method( + self.undelete_bucket, + default_timeout=None, + client_info=client_info, + ), + self._list_views: self._wrap_method( + self._list_views, + default_timeout=None, + client_info=client_info, + ), + self._get_view: self._wrap_method( + self._get_view, + default_timeout=None, + client_info=client_info, + ), + self._create_view: self._wrap_method( + self._create_view, + default_timeout=None, + client_info=client_info, + ), + self._update_view: self._wrap_method( + self._update_view, + default_timeout=None, + client_info=client_info, + ), + self._delete_view: self._wrap_method( + self._delete_view, + default_timeout=None, + client_info=client_info, + ), + self._list_sinks: self._wrap_method( + self._list_sinks, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._get_sink: self._wrap_method( + self._get_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._create_sink: self._wrap_method( + self._create_sink, + default_timeout=120.0, + client_info=client_info, + ), + self._update_sink: self._wrap_method( + self._update_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._delete_sink: self._wrap_method( + self._delete_sink, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._create_link: self._wrap_method( + self._create_link, + default_timeout=None, + client_info=client_info, + ), + self._delete_link: self._wrap_method( + self._delete_link, + default_timeout=None, + client_info=client_info, + ), + self._list_links: self._wrap_method( + self._list_links, + default_timeout=None, + client_info=client_info, + ), + self._get_link: self._wrap_method( + self._get_link, + default_timeout=None, + client_info=client_info, + ), + self._list_exclusions: self._wrap_method( + self._list_exclusions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._get_exclusion: self._wrap_method( + self._get_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._create_exclusion: self._wrap_method( + self._create_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self._update_exclusion: self._wrap_method( + self._update_exclusion, + default_timeout=120.0, + client_info=client_info, + ), + self._delete_exclusion: self._wrap_method( + self._delete_exclusion, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._get_cmek_settings: self._wrap_method( + self._get_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self._update_cmek_settings: self._wrap_method( + self._update_cmek_settings, + default_timeout=None, + client_info=client_info, + ), + self._get_settings: self._wrap_method( + self._get_settings, + default_timeout=None, + client_info=client_info, + ), + self._update_settings: self._wrap_method( + self._update_settings, + default_timeout=None, + client_info=client_info, + ), + self._copy_log_entries: self._wrap_method( + self._copy_log_entries, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'ConfigServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py new file mode 100755 index 000000000000..3533e0cda4ae --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import LoggingServiceV2Client +from .async_client import LoggingServiceV2AsyncClient + +__all__ = ( + 'LoggingServiceV2Client', + 'LoggingServiceV2AsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py new file mode 100755 index 000000000000..eb900db2378d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -0,0 +1,1175 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union + +from google.cloud.logging_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport +from .client import LoggingServiceV2Client + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class LoggingServiceV2AsyncClient: + """Service for ingesting and querying logs.""" + + _client: LoggingServiceV2Client + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = LoggingServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = LoggingServiceV2Client._DEFAULT_UNIVERSE + + log_path = staticmethod(LoggingServiceV2Client.log_path) + parse_log_path = staticmethod(LoggingServiceV2Client.parse_log_path) + common_billing_account_path = staticmethod(LoggingServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(LoggingServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(LoggingServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(LoggingServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(LoggingServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(LoggingServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(LoggingServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(LoggingServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(LoggingServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(LoggingServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2AsyncClient: The constructed client. + """ + return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2AsyncClient: The constructed client. + """ + return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LoggingServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> LoggingServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + LoggingServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = LoggingServiceV2Client.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the logging service v2 async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = LoggingServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + } + ) + + async def delete_log(self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + await client.delete_log(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]]): + The request object. The parameters to DeleteLog. + log_name (:class:`str`): + Required. The resource name of the log to delete: + + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_log] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("log_name", request.log_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def write_log_entries(self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: + r"""Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = await client.write_log_entries(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]]): + The request object. The parameters to WriteLogEntries. + log_name (:class:`str`): + Optional. A default log resource name that is assigned + to all log entries in ``entries`` that do not specify a + value for ``log_name``: + + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed + on each project, organization, billing account, or + folder that is receiving new log entries, whether the + resource is specified in ``logName`` or in an individual + log entry. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (:class:`google.api.monitored_resource_pb2.MonitoredResource`): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (:class:`MutableMapping[str, str]`): + Optional. Default labels that are added to the + ``labels`` field of all log entries in ``entries``. If a + log entry already has a label with the same key as a + label in this parameter, then the log entry's label is + not changed. See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entries (:class:`MutableSequence[google.cloud.logging_v2.types.LogEntry]`): + Required. The log entries to send to Logging. The order + of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, + and ``labels`` fields are copied into those log entries + in this list that do not include values for their + corresponding fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing + in log entries, then this method supplies the current + time or a unique identifier, respectively. The supplied + values are chosen so that, among the log entries that + did not supply their own values, the entries earlier in + the list will sort before the entries later in the list. + See the ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. + + This corresponds to the ``entries`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name, resource, labels, entries]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + if resource is not None: + request.resource = resource + + if labels: + request.labels.update(labels) + if entries: + request.entries.extend(entries) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.write_log_entries] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_log_entries(self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesAsyncPager: + r"""Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value1', 'resource_names_value2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]]): + The request object. The parameters to ``ListLogEntries``. + resource_names (:class:`MutableSequence[str]`): + Required. Names of one or more parent resources from + which to retrieve log entries: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + + May alternatively be one or more views: + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + + Projects listed in the ``project_ids`` field are added + to this list. A maximum of 100 resources may be + specified in a single request. + + This corresponds to the ``resource_names`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (:class:`str`): + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_by (:class:`str`): + Optional. How the results should be sorted. Presently, + the only permitted values are ``"timestamp asc"`` + (default) and ``"timestamp desc"``. The first option + returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second + option returns entries in order of decreasing timestamps + (newest first). Entries with equal timestamps are + returned in order of their ``insert_id`` values. + + This corresponds to the ``order_by`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager: + Result returned from ListLogEntries. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource_names, filter, order_by]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if filter is not None: + request.filter = filter + if order_by is not None: + request.order_by = order_by + if resource_names: + request.resource_names.extend(resource_names) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_log_entries] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogEntriesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_monitored_resource_descriptors(self, + request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsAsyncPager: + r"""Lists the descriptors for monitored resource types + used by Logging. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]]): + The request object. The parameters to + ListMonitoredResourceDescriptors + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager: + Result returned from + ListMonitoredResourceDescriptors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_monitored_resource_descriptors] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_logs(self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsAsyncPager: + r"""Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListLogsRequest, dict]]): + The request object. The parameters to ListLogs. + parent (:class:`str`): + Required. The resource name to list logs for: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager: + Result returned from ListLogs. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.list_logs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListLogsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def tail_log_entries(self, + requests: Optional[AsyncIterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[logging.TailLogEntriesResponse]]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value1', 'resource_names_value2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.tail_log_entries(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.logging_v2.types.TailLogEntriesRequest`]): + The request object AsyncIterator. The parameters to ``TailLogEntries``. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: + Result returned from TailLogEntries. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.tail_log_entries] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "LoggingServiceV2AsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LoggingServiceV2AsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py new file mode 100755 index 000000000000..2ca0aa9b3c31 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -0,0 +1,1538 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.logging_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore +from .transports.base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import LoggingServiceV2GrpcTransport +from .transports.grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport + + +class LoggingServiceV2ClientMeta(type): + """Metaclass for the LoggingServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] + _transport_registry["grpc"] = LoggingServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = LoggingServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[LoggingServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class LoggingServiceV2Client(metaclass=LoggingServiceV2ClientMeta): + """Service for ingesting and querying logs.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + LoggingServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> LoggingServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + LoggingServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def log_path(project: str,log: str,) -> str: + """Returns a fully-qualified log string.""" + return "projects/{project}/logs/{log}".format(project=project, log=log, ) + + @staticmethod + def parse_log_path(path: str) -> Dict[str,str]: + """Parses a log path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/logs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = LoggingServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, LoggingServiceV2Transport, Callable[..., LoggingServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the logging service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,LoggingServiceV2Transport,Callable[..., LoggingServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the LoggingServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = LoggingServiceV2Client._read_environment_variables() + self._client_cert_source = LoggingServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = LoggingServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, LoggingServiceV2Transport) + if transport_provided: + # transport is a LoggingServiceV2Transport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(LoggingServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + LoggingServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[LoggingServiceV2Transport], Callable[..., LoggingServiceV2Transport]] = ( + LoggingServiceV2Client.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., LoggingServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.LoggingServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.LoggingServiceV2", + "credentialsType": None, + } + ) + + def delete_log(self, + request: Optional[Union[logging.DeleteLogRequest, dict]] = None, + *, + log_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + client.delete_log(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteLogRequest, dict]): + The request object. The parameters to DeleteLog. + log_name (str): + Required. The resource name of the log to delete: + + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.DeleteLogRequest): + request = logging.DeleteLogRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_log] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("log_name", request.log_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def write_log_entries(self, + request: Optional[Union[logging.WriteLogEntriesRequest, dict]] = None, + *, + log_name: Optional[str] = None, + resource: Optional[monitored_resource_pb2.MonitoredResource] = None, + labels: Optional[MutableMapping[str, str]] = None, + entries: Optional[MutableSequence[log_entry.LogEntry]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging.WriteLogEntriesResponse: + r"""Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.WriteLogEntriesRequest, dict]): + The request object. The parameters to WriteLogEntries. + log_name (str): + Optional. A default log resource name that is assigned + to all log entries in ``entries`` that do not specify a + value for ``log_name``: + + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed + on each project, organization, billing account, or + folder that is receiving new log entries, whether the + resource is specified in ``logName`` or in an individual + log entry. + + This corresponds to the ``log_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + labels (MutableMapping[str, str]): + Optional. Default labels that are added to the + ``labels`` field of all log entries in ``entries``. If a + log entry already has a label with the same key as a + label in this parameter, then the log entry's label is + not changed. See [LogEntry][google.logging.v2.LogEntry]. + + This corresponds to the ``labels`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): + Required. The log entries to send to Logging. The order + of log entries in this list does not matter. Values + supplied in this method's ``log_name``, ``resource``, + and ``labels`` fields are copied into those log entries + in this list that do not include values for their + corresponding fields. For more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing + in log entries, then this method supplies the current + time or a unique identifier, respectively. The supplied + values are chosen so that, among the log entries that + did not supply their own values, the entries earlier in + the list will sort before the entries later in the list. + See the ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ in + the past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those + log entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ for + calls to ``entries.write``, you should try to include + several log entries in this list, rather than calling + this method for each individual log entry. + + This corresponds to the ``entries`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.WriteLogEntriesResponse: + Result returned from WriteLogEntries. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([log_name, resource, labels, entries]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.WriteLogEntriesRequest): + request = logging.WriteLogEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if log_name is not None: + request.log_name = log_name + if resource is not None: + request.resource = resource + if labels is not None: + request.labels = labels + if entries is not None: + request.entries = entries + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.write_log_entries] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_log_entries(self, + request: Optional[Union[logging.ListLogEntriesRequest, dict]] = None, + *, + resource_names: Optional[MutableSequence[str]] = None, + filter: Optional[str] = None, + order_by: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogEntriesPager: + r"""Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value1', 'resource_names_value2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListLogEntriesRequest, dict]): + The request object. The parameters to ``ListLogEntries``. + resource_names (MutableSequence[str]): + Required. Names of one or more parent resources from + which to retrieve log entries: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + + May alternatively be one or more views: + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + + Projects listed in the ``project_ids`` field are added + to this list. A maximum of 100 resources may be + specified in a single request. + + This corresponds to the ``resource_names`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + filter (str): + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a + parent resource that is not listed in ``resource_names`` + will cause the filter to return no results. The maximum + length of a filter is 20,000 characters. + + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + order_by (str): + Optional. How the results should be sorted. Presently, + the only permitted values are ``"timestamp asc"`` + (default) and ``"timestamp desc"``. The first option + returns entries in order of increasing values of + ``LogEntry.timestamp`` (oldest first), and the second + option returns entries in order of decreasing timestamps + (newest first). Entries with equal timestamps are + returned in order of their ``insert_id`` values. + + This corresponds to the ``order_by`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager: + Result returned from ListLogEntries. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([resource_names, filter, order_by]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogEntriesRequest): + request = logging.ListLogEntriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if resource_names is not None: + request.resource_names = resource_names + if filter is not None: + request.filter = filter + if order_by is not None: + request.order_by = order_by + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_log_entries] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogEntriesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_monitored_resource_descriptors(self, + request: Optional[Union[logging.ListMonitoredResourceDescriptorsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListMonitoredResourceDescriptorsPager: + r"""Lists the descriptors for monitored resource types + used by Logging. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest, dict]): + The request object. The parameters to + ListMonitoredResourceDescriptors + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager: + Result returned from + ListMonitoredResourceDescriptors. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListMonitoredResourceDescriptorsRequest): + request = logging.ListMonitoredResourceDescriptorsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_monitored_resource_descriptors] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListMonitoredResourceDescriptorsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_logs(self, + request: Optional[Union[logging.ListLogsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListLogsPager: + r"""Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListLogsRequest, dict]): + The request object. The parameters to ListLogs. + parent (str): + Required. The resource name to list logs for: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager: + Result returned from ListLogs. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging.ListLogsRequest): + request = logging.ListLogsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_logs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListLogsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def tail_log_entries(self, + requests: Optional[Iterator[logging.TailLogEntriesRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[logging.TailLogEntriesResponse]: + r"""Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value1', 'resource_names_value2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]): + The request object iterator. The parameters to ``TailLogEntries``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]: + Result returned from TailLogEntries. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.tail_log_entries] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "LoggingServiceV2Client": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "LoggingServiceV2Client", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py new file mode 100755 index 000000000000..c4f0739ea163 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging + + +class ListLogEntriesPager: + """A pager for iterating through ``list_log_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListLogEntriesResponse], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogEntriesRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogEntriesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging.ListLogEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging.ListLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[log_entry.LogEntry]: + for page in self.pages: + yield from page.entries + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogEntriesAsyncPager: + """A pager for iterating through ``list_log_entries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``entries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogEntries`` requests and continue to iterate + through the ``entries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogEntriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListLogEntriesResponse]], + request: logging.ListLogEntriesRequest, + response: logging.ListLogEntriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogEntriesRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogEntriesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging.ListLogEntriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging.ListLogEntriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[log_entry.LogEntry]: + async def async_generator(): + async for page in self.pages: + for response in page.entries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListMonitoredResourceDescriptorsResponse], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[monitored_resource_pb2.MonitoredResourceDescriptor]: + for page in self.pages: + yield from page.resource_descriptors + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListMonitoredResourceDescriptorsAsyncPager: + """A pager for iterating through ``list_monitored_resource_descriptors`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``resource_descriptors`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListMonitoredResourceDescriptors`` requests and continue to iterate + through the ``resource_descriptors`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListMonitoredResourceDescriptorsResponse]], + request: logging.ListMonitoredResourceDescriptorsRequest, + response: logging.ListMonitoredResourceDescriptorsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging.ListMonitoredResourceDescriptorsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging.ListMonitoredResourceDescriptorsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[monitored_resource_pb2.MonitoredResourceDescriptor]: + async def async_generator(): + async for page in self.pages: + for response in page.resource_descriptors: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogsPager: + """A pager for iterating through ``list_logs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``log_names`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListLogs`` requests and continue to iterate + through the ``log_names`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging.ListLogsResponse], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging.ListLogsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging.ListLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.log_names + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListLogsAsyncPager: + """A pager for iterating through ``list_logs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``log_names`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListLogs`` requests and continue to iterate + through the ``log_names`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging.ListLogsResponse]], + request: logging.ListLogsRequest, + response: logging.ListLogsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging.ListLogsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging.ListLogsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.log_names: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst new file mode 100755 index 000000000000..897a4c7bfaec --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`LoggingServiceV2Transport` is the ABC for all transports. +- public child `LoggingServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `LoggingServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseLoggingServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `LoggingServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py new file mode 100755 index 000000000000..4e814dcca94b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import LoggingServiceV2Transport +from .grpc import LoggingServiceV2GrpcTransport +from .grpc_asyncio import LoggingServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[LoggingServiceV2Transport]] +_transport_registry['grpc'] = LoggingServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = LoggingServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'LoggingServiceV2Transport', + 'LoggingServiceV2GrpcTransport', + 'LoggingServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py new file mode 100755 index 000000000000..ff6d8c8dd20a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -0,0 +1,338 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.logging_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class LoggingServiceV2Transport(abc.ABC): + """Abstract transport class for LoggingServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete_log: gapic_v1.method.wrap_method( + self.delete_log, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: gapic_v1.method.wrap_method( + self.write_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: gapic_v1.method.wrap_method( + self.list_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: gapic_v1.method.wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: gapic_v1.method.wrap_method( + self.list_logs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.tail_log_entries: gapic_v1.method.wrap_method( + self.tail_log_entries, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + Union[ + logging.WriteLogEntriesResponse, + Awaitable[logging.WriteLogEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + Union[ + logging.ListLogEntriesResponse, + Awaitable[logging.ListLogEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Union[ + logging.ListMonitoredResourceDescriptorsResponse, + Awaitable[logging.ListMonitoredResourceDescriptorsResponse] + ]]: + raise NotImplementedError() + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + Union[ + logging.ListLogsResponse, + Awaitable[logging.ListLogsResponse] + ]]: + raise NotImplementedError() + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + Union[ + logging.TailLogEntriesResponse, + Awaitable[logging.TailLogEntriesResponse] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'LoggingServiceV2Transport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py new file mode 100755 index 000000000000..718d4ac6713c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -0,0 +1,552 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class LoggingServiceV2GrpcTransport(LoggingServiceV2Transport): + """gRPC backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete log method over gRPC. + + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. + + Returns: + Callable[[~.DeleteLogRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log' not in self._stubs: + self._stubs['delete_log'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=logging.DeleteLogRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log'] + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + logging.WriteLogEntriesResponse]: + r"""Return a callable for the write log entries method over gRPC. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Returns: + Callable[[~.WriteLogEntriesRequest], + ~.WriteLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'write_log_entries' not in self._stubs: + self._stubs['write_log_entries'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=logging.WriteLogEntriesRequest.serialize, + response_deserializer=logging.WriteLogEntriesResponse.deserialize, + ) + return self._stubs['write_log_entries'] + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + logging.ListLogEntriesResponse]: + r"""Return a callable for the list log entries method over gRPC. + + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Returns: + Callable[[~.ListLogEntriesRequest], + ~.ListLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_entries' not in self._stubs: + self._stubs['list_log_entries'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=logging.ListLogEntriesRequest.serialize, + response_deserializer=logging.ListLogEntriesResponse.deserialize, + ) + return self._stubs['list_log_entries'] + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + logging.ListMonitoredResourceDescriptorsResponse]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists the descriptors for monitored resource types + used by Logging. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + ~.ListMonitoredResourceDescriptorsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_monitored_resource_descriptors' not in self._stubs: + self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs['list_monitored_resource_descriptors'] + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + logging.ListLogsResponse]: + r"""Return a callable for the list logs method over gRPC. + + Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Returns: + Callable[[~.ListLogsRequest], + ~.ListLogsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_logs' not in self._stubs: + self._stubs['list_logs'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=logging.ListLogsRequest.serialize, + response_deserializer=logging.ListLogsResponse.deserialize, + ) + return self._stubs['list_logs'] + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + logging.TailLogEntriesResponse]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + ~.TailLogEntriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'tail_log_entries' not in self._stubs: + self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( + '/google.logging.v2.LoggingServiceV2/TailLogEntries', + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs['tail_log_entries'] + + def close(self): + self._logged_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'LoggingServiceV2GrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py new file mode 100755 index 000000000000..6933d0f2ba34 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,678 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import LoggingServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import LoggingServiceV2GrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.LoggingServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class LoggingServiceV2GrpcAsyncIOTransport(LoggingServiceV2Transport): + """gRPC AsyncIO backend transport for LoggingServiceV2. + + Service for ingesting and querying logs. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def delete_log(self) -> Callable[ + [logging.DeleteLogRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete log method over gRPC. + + Deletes all the log entries in a log for the \_Default Log + Bucket. The log reappears if it receives new entries. Log + entries written shortly before the delete operation might not be + deleted. Entries received after the delete operation with a + timestamp before the operation will be deleted. + + Returns: + Callable[[~.DeleteLogRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_log' not in self._stubs: + self._stubs['delete_log'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/DeleteLog', + request_serializer=logging.DeleteLogRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_log'] + + @property + def write_log_entries(self) -> Callable[ + [logging.WriteLogEntriesRequest], + Awaitable[logging.WriteLogEntriesResponse]]: + r"""Return a callable for the write log entries method over gRPC. + + Writes log entries to Logging. This API method is the + only way to send log entries to Logging. This method is + used, directly or indirectly, by the Logging agent + (fluentd) and all logging libraries configured to use + Logging. A single request may contain log entries for a + maximum of 1000 different resources (projects, + organizations, billing accounts or folders) + + Returns: + Callable[[~.WriteLogEntriesRequest], + Awaitable[~.WriteLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'write_log_entries' not in self._stubs: + self._stubs['write_log_entries'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/WriteLogEntries', + request_serializer=logging.WriteLogEntriesRequest.serialize, + response_deserializer=logging.WriteLogEntriesResponse.deserialize, + ) + return self._stubs['write_log_entries'] + + @property + def list_log_entries(self) -> Callable[ + [logging.ListLogEntriesRequest], + Awaitable[logging.ListLogEntriesResponse]]: + r"""Return a callable for the list log entries method over gRPC. + + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. + For ways to export log entries, see `Exporting + Logs `__. + + Returns: + Callable[[~.ListLogEntriesRequest], + Awaitable[~.ListLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_log_entries' not in self._stubs: + self._stubs['list_log_entries'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogEntries', + request_serializer=logging.ListLogEntriesRequest.serialize, + response_deserializer=logging.ListLogEntriesResponse.deserialize, + ) + return self._stubs['list_log_entries'] + + @property + def list_monitored_resource_descriptors(self) -> Callable[ + [logging.ListMonitoredResourceDescriptorsRequest], + Awaitable[logging.ListMonitoredResourceDescriptorsResponse]]: + r"""Return a callable for the list monitored resource + descriptors method over gRPC. + + Lists the descriptors for monitored resource types + used by Logging. + + Returns: + Callable[[~.ListMonitoredResourceDescriptorsRequest], + Awaitable[~.ListMonitoredResourceDescriptorsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_monitored_resource_descriptors' not in self._stubs: + self._stubs['list_monitored_resource_descriptors'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors', + request_serializer=logging.ListMonitoredResourceDescriptorsRequest.serialize, + response_deserializer=logging.ListMonitoredResourceDescriptorsResponse.deserialize, + ) + return self._stubs['list_monitored_resource_descriptors'] + + @property + def list_logs(self) -> Callable[ + [logging.ListLogsRequest], + Awaitable[logging.ListLogsResponse]]: + r"""Return a callable for the list logs method over gRPC. + + Lists the logs in projects, organizations, folders, + or billing accounts. Only logs that have entries are + listed. + + Returns: + Callable[[~.ListLogsRequest], + Awaitable[~.ListLogsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_logs' not in self._stubs: + self._stubs['list_logs'] = self._logged_channel.unary_unary( + '/google.logging.v2.LoggingServiceV2/ListLogs', + request_serializer=logging.ListLogsRequest.serialize, + response_deserializer=logging.ListLogsResponse.deserialize, + ) + return self._stubs['list_logs'] + + @property + def tail_log_entries(self) -> Callable[ + [logging.TailLogEntriesRequest], + Awaitable[logging.TailLogEntriesResponse]]: + r"""Return a callable for the tail log entries method over gRPC. + + Streaming read of log entries as they are ingested. + Until the stream is terminated, it will continue reading + logs. + + Returns: + Callable[[~.TailLogEntriesRequest], + Awaitable[~.TailLogEntriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'tail_log_entries' not in self._stubs: + self._stubs['tail_log_entries'] = self._logged_channel.stream_stream( + '/google.logging.v2.LoggingServiceV2/TailLogEntries', + request_serializer=logging.TailLogEntriesRequest.serialize, + response_deserializer=logging.TailLogEntriesResponse.deserialize, + ) + return self._stubs['tail_log_entries'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.delete_log: self._wrap_method( + self.delete_log, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.write_log_entries: self._wrap_method( + self.write_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_log_entries: self._wrap_method( + self.list_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_monitored_resource_descriptors: self._wrap_method( + self.list_monitored_resource_descriptors, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_logs: self._wrap_method( + self.list_logs, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.tail_log_entries: self._wrap_method( + self.tail_log_entries, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=3600.0, + ), + default_timeout=3600.0, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'LoggingServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py new file mode 100755 index 000000000000..78a857aefbac --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import BaseMetricsServiceV2Client +from .async_client import BaseMetricsServiceV2AsyncClient + +__all__ = ( + 'BaseMetricsServiceV2Client', + 'BaseMetricsServiceV2AsyncClient', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py new file mode 100755 index 000000000000..826e222ea3bd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -0,0 +1,1025 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.logging_v2 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport +from .client import BaseMetricsServiceV2Client + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +class BaseMetricsServiceV2AsyncClient: + """Service for configuring logs-based metrics.""" + + _client: BaseMetricsServiceV2Client + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BaseMetricsServiceV2Client.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE + + log_metric_path = staticmethod(BaseMetricsServiceV2Client.log_metric_path) + parse_log_metric_path = staticmethod(BaseMetricsServiceV2Client.parse_log_metric_path) + common_billing_account_path = staticmethod(BaseMetricsServiceV2Client.common_billing_account_path) + parse_common_billing_account_path = staticmethod(BaseMetricsServiceV2Client.parse_common_billing_account_path) + common_folder_path = staticmethod(BaseMetricsServiceV2Client.common_folder_path) + parse_common_folder_path = staticmethod(BaseMetricsServiceV2Client.parse_common_folder_path) + common_organization_path = staticmethod(BaseMetricsServiceV2Client.common_organization_path) + parse_common_organization_path = staticmethod(BaseMetricsServiceV2Client.parse_common_organization_path) + common_project_path = staticmethod(BaseMetricsServiceV2Client.common_project_path) + parse_common_project_path = staticmethod(BaseMetricsServiceV2Client.parse_common_project_path) + common_location_path = staticmethod(BaseMetricsServiceV2Client.common_location_path) + parse_common_location_path = staticmethod(BaseMetricsServiceV2Client.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BaseMetricsServiceV2AsyncClient: The constructed client. + """ + return BaseMetricsServiceV2Client.from_service_account_info.__func__(BaseMetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BaseMetricsServiceV2AsyncClient: The constructed client. + """ + return BaseMetricsServiceV2Client.from_service_account_file.__func__(BaseMetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BaseMetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> MetricsServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsServiceV2Transport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = BaseMetricsServiceV2Client.get_transport_class + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the base metrics service v2 async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BaseMetricsServiceV2Client( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.BaseMetricsServiceV2AsyncClient`.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr(self._client._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._client._transport, "_credentials") else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + } + ) + + async def _list_log_metrics(self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListLogMetricsAsyncPager: + r"""Lists logs-based metrics. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__list_log_metrics(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_log_metrics(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]]): + The request object. The parameters to ListLogMetrics. + parent (:class:`str`): + Required. The name of the project containing the + metrics: + + :: + + "projects/[PROJECT_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.metrics_service_v2.pagers._ListLogMetricsAsyncPager: + Result returned from ListLogMetrics. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._list_log_metrics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers._ListLogMetricsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _get_log_metric(self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: + r"""Gets a logs-based metric. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__get_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = await client._get_log_metric(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]]): + The request object. The parameters to GetLogMetric. + metric_name (:class:`str`): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._get_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _create_log_metric(self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates a logs-based metric. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__create_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = await client._create_log_metric(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]]): + The request object. The parameters to CreateLogMetric. + parent (:class:`str`): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`google.cloud.logging_v2.types.LogMetric`): + Required. The new logs-based metric, + which must not have an identifier that + already exists. + + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metric]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._create_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _update_log_metric(self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates or updates a logs-based metric. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__update_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = await client._update_log_metric(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]]): + The request object. The parameters to UpdateLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and + it's ``name`` field must be the same as ``[METRIC_ID]`` + If the metric does not exist in ``[PROJECT_ID]``, then a + new metric is created. + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (:class:`google.cloud.logging_v2.types.LogMetric`): + Required. The updated metric. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name, metric]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._update_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def _delete_log_metric(self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a logs-based metric. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + async def sample__delete_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + await client._delete_log_metric(request=request) + + Args: + request (Optional[Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]]): + The request object. The parameters to DeleteLogMetric. + metric_name (:class:`str`): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport._delete_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + async def __aenter__(self) -> "BaseMetricsServiceV2AsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "BaseMetricsServiceV2AsyncClient", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py new file mode 100755 index 000000000000..b3a02dc538f1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -0,0 +1,1388 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import Dict, Callable, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast +import warnings + +from google.cloud.logging_v2 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .transports.grpc import MetricsServiceV2GrpcTransport +from .transports.grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport + + +class BaseMetricsServiceV2ClientMeta(type): + """Metaclass for the MetricsServiceV2 client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] + _transport_registry["grpc"] = MetricsServiceV2GrpcTransport + _transport_registry["grpc_asyncio"] = MetricsServiceV2GrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[MetricsServiceV2Transport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BaseMetricsServiceV2Client(metaclass=BaseMetricsServiceV2ClientMeta): + """Service for configuring logs-based metrics.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "logging.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BaseMetricsServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BaseMetricsServiceV2Client: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> MetricsServiceV2Transport: + """Returns the transport used by the client instance. + + Returns: + MetricsServiceV2Transport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def log_metric_path(project: str,metric: str,) -> str: + """Returns a fully-qualified log_metric string.""" + return "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + + @staticmethod + def parse_log_metric_path(path: str) -> Dict[str,str]: + """Parses a log_metric path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/metrics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn("get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint(api_override, client_cert_source, universe_domain, use_mtls_endpoint): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + _default_universe = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError(f"mTLS is not supported in any universe other than {_default_universe}.") + api_endpoint = BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=universe_domain) + return api_endpoint + + @staticmethod + def _get_universe_domain(client_universe_domain: Optional[str], universe_domain_env: Optional[str]) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, + error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN, HTTPStatus.NOT_FOUND]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, MetricsServiceV2Transport, Callable[..., MetricsServiceV2Transport]]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the base metrics service v2 client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,MetricsServiceV2Transport,Callable[..., MetricsServiceV2Transport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the MetricsServiceV2Transport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast(client_options_lib.ClientOptions, self._client_options) + + universe_domain_opt = getattr(self._client_options, 'universe_domain', None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = BaseMetricsServiceV2Client._read_environment_variables() + self._client_cert_source = BaseMetricsServiceV2Client._get_client_cert_source(self._client_options.client_cert_source, self._use_client_cert) + self._universe_domain = BaseMetricsServiceV2Client._get_universe_domain(universe_domain_opt, self._universe_domain_env) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, MetricsServiceV2Transport) + if transport_provided: + # transport is a MetricsServiceV2Transport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(MetricsServiceV2Transport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = (self._api_endpoint or + BaseMetricsServiceV2Client._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint)) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + transport_init: Union[Type[MetricsServiceV2Transport], Callable[..., MetricsServiceV2Transport]] = ( + BaseMetricsServiceV2Client.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., MetricsServiceV2Transport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.logging_v2.BaseMetricsServiceV2Client`.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "universeDomain": getattr(self._transport._credentials, "universe_domain", ""), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr(self.transport._credentials, "get_cred_info", lambda: None)(), + } if hasattr(self._transport, "_credentials") else { + "serviceName": "google.logging.v2.MetricsServiceV2", + "credentialsType": None, + } + ) + + def _list_log_metrics(self, + request: Optional[Union[logging_metrics.ListLogMetricsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers._ListLogMetricsPager: + r"""Lists logs-based metrics. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__list_log_metrics(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_log_metrics(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.ListLogMetricsRequest, dict]): + The request object. The parameters to ListLogMetrics. + parent (str): + Required. The name of the project containing the + metrics: + + :: + + "projects/[PROJECT_ID]" + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.services.metrics_service_v2.pagers._ListLogMetricsPager: + Result returned from ListLogMetrics. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.ListLogMetricsRequest): + request = logging_metrics.ListLogMetricsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._list_log_metrics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers._ListLogMetricsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _get_log_metric(self, + request: Optional[Union[logging_metrics.GetLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: + r"""Gets a logs-based metric. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__get_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = client._get_log_metric(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.GetLogMetricRequest, dict]): + The request object. The parameters to GetLogMetric. + metric_name (str): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.GetLogMetricRequest): + request = logging_metrics.GetLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._get_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _create_log_metric(self, + request: Optional[Union[logging_metrics.CreateLogMetricRequest, dict]] = None, + *, + parent: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates a logs-based metric. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__create_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = client._create_log_metric(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.CreateLogMetricRequest, dict]): + The request object. The parameters to CreateLogMetric. + parent (str): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The new logs-based metric, + which must not have an identifier that + already exists. + + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, metric]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.CreateLogMetricRequest): + request = logging_metrics.CreateLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._create_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _update_log_metric(self, + request: Optional[Union[logging_metrics.UpdateLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + metric: Optional[logging_metrics.LogMetric] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> logging_metrics.LogMetric: + r"""Creates or updates a logs-based metric. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__update_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = client._update_log_metric(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.logging_v2.types.UpdateLogMetricRequest, dict]): + The request object. The parameters to UpdateLogMetric. + metric_name (str): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and + it's ``name`` field must be the same as ``[METRIC_ID]`` + If the metric does not exist in ``[PROJECT_ID]``, then a + new metric is created. + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The updated metric. + This corresponds to the ``metric`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.logging_v2.types.LogMetric: + Describes a logs-based metric. The + value of the metric is the number of log + entries that match a logs filter in a + given time interval. + + Logs-based metrics can also be used to + extract values from logs and create a + distribution of the values. The + distribution records the statistics of + the extracted values along with an + optional histogram of the values as + specified by the bucket options. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name, metric]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.UpdateLogMetricRequest): + request = logging_metrics.UpdateLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + if metric is not None: + request.metric = metric + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._update_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def _delete_log_metric(self, + request: Optional[Union[logging_metrics.DeleteLogMetricRequest, dict]] = None, + *, + metric_name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a logs-based metric. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import logging_v2 + + def sample__delete_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + client._delete_log_metric(request=request) + + Args: + request (Union[google.cloud.logging_v2.types.DeleteLogMetricRequest, dict]): + The request object. The parameters to DeleteLogMetric. + metric_name (str): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + This corresponds to the ``metric_name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([metric_name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, logging_metrics.DeleteLogMetricRequest): + request = logging_metrics.DeleteLogMetricRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if metric_name is not None: + request.metric_name = metric_name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport._delete_log_metric] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("metric_name", request.metric_name), + )), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self) -> "BaseMetricsServiceV2Client": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "BaseMetricsServiceV2Client", +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py new file mode 100755 index 000000000000..b7d888ff8393 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator, Union +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.logging_v2.types import logging_metrics + + +class _ListLogMetricsPager: + """A pager for iterating through ``_list_log_metrics`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``metrics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``_ListLogMetrics`` requests and continue to iterate + through the ``metrics`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., logging_metrics.ListLogMetricsResponse], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogMetricsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogMetricsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_metrics.ListLogMetricsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[logging_metrics.ListLogMetricsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[logging_metrics.LogMetric]: + for page in self.pages: + yield from page.metrics + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class _ListLogMetricsAsyncPager: + """A pager for iterating through ``_list_log_metrics`` requests. + + This class thinly wraps an initial + :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``metrics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``_ListLogMetrics`` requests and continue to iterate + through the ``metrics`` field on the + corresponding responses. + + All the usual :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[logging_metrics.ListLogMetricsResponse]], + request: logging_metrics.ListLogMetricsRequest, + response: logging_metrics.ListLogMetricsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.logging_v2.types.ListLogMetricsRequest): + The initial request object. + response (google.cloud.logging_v2.types.ListLogMetricsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = logging_metrics.ListLogMetricsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[logging_metrics.ListLogMetricsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, retry=self._retry, timeout=self._timeout, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[logging_metrics.LogMetric]: + async def async_generator(): + async for page in self.pages: + for response in page.metrics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst new file mode 100755 index 000000000000..00dffa25f329 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`MetricsServiceV2Transport` is the ABC for all transports. +- public child `MetricsServiceV2GrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `MetricsServiceV2GrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseMetricsServiceV2RestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `MetricsServiceV2RestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py new file mode 100755 index 000000000000..994b47887efd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import MetricsServiceV2Transport +from .grpc import MetricsServiceV2GrpcTransport +from .grpc_asyncio import MetricsServiceV2GrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[MetricsServiceV2Transport]] +_transport_registry['grpc'] = MetricsServiceV2GrpcTransport +_transport_registry['grpc_asyncio'] = MetricsServiceV2GrpcAsyncIOTransport + +__all__ = ( + 'MetricsServiceV2Transport', + 'MetricsServiceV2GrpcTransport', + 'MetricsServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py new file mode 100755 index 000000000000..40cb303a6233 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.logging_v2 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class MetricsServiceV2Transport(abc.ABC): + """Abstract transport class for MetricsServiceV2.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + ) + + DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self._list_log_metrics: gapic_v1.method.wrap_method( + self._list_log_metrics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._get_log_metric: gapic_v1.method.wrap_method( + self._get_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._create_log_metric: gapic_v1.method.wrap_method( + self._create_log_metric, + default_timeout=60.0, + client_info=client_info, + ), + self._update_log_metric: gapic_v1.method.wrap_method( + self._update_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._delete_log_metric: gapic_v1.method.wrap_method( + self._delete_log_metric, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def _list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Union[ + logging_metrics.ListLogMetricsResponse, + Awaitable[logging_metrics.ListLogMetricsResponse] + ]]: + raise NotImplementedError() + + @property + def _get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def _create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def _update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Union[ + logging_metrics.LogMetric, + Awaitable[logging_metrics.LogMetric] + ]]: + raise NotImplementedError() + + @property + def _delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[operations_pb2.ListOperationsResponse, Awaitable[operations_pb2.ListOperationsResponse]], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'MetricsServiceV2Transport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py new file mode 100755 index 000000000000..174066a7ba87 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport): + """gRPC backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel(self._grpc_channel, self._interceptor) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def _list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + logging_metrics.ListLogMetricsResponse]: + r"""Return a callable for the list log metrics method over gRPC. + + Lists logs-based metrics. + + Returns: + Callable[[~.ListLogMetricsRequest], + ~.ListLogMetricsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_log_metrics' not in self._stubs: + self._stubs['_list_log_metrics'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_ListLogMetrics', + request_serializer=logging_metrics.ListLogMetricsRequest.serialize, + response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, + ) + return self._stubs['_list_log_metrics'] + + @property + def _get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the get log metric method over gRPC. + + Gets a logs-based metric. + + Returns: + Callable[[~.GetLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_log_metric' not in self._stubs: + self._stubs['_get_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_GetLogMetric', + request_serializer=logging_metrics.GetLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['_get_log_metric'] + + @property + def _create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the create log metric method over gRPC. + + Creates a logs-based metric. + + Returns: + Callable[[~.CreateLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_log_metric' not in self._stubs: + self._stubs['_create_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_CreateLogMetric', + request_serializer=logging_metrics.CreateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['_create_log_metric'] + + @property + def _update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + logging_metrics.LogMetric]: + r"""Return a callable for the update log metric method over gRPC. + + Creates or updates a logs-based metric. + + Returns: + Callable[[~.UpdateLogMetricRequest], + ~.LogMetric]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_log_metric' not in self._stubs: + self._stubs['_update_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_UpdateLogMetric', + request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['_update_log_metric'] + + @property + def _delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete log metric method over gRPC. + + Deletes a logs-based metric. + + Returns: + Callable[[~.DeleteLogMetricRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_log_metric' not in self._stubs: + self._stubs['_delete_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_DeleteLogMetric', + request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['_delete_log_metric'] + + def close(self): + self._logged_channel.close() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'MetricsServiceV2GrpcTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py new file mode 100755 index 000000000000..1ad75cbf05df --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -0,0 +1,606 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO +from .grpc import MetricsServiceV2GrpcTransport + +try: + from google.api_core import client_logging # type: ignore + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra = { + "serviceName": "google.logging.v2.MetricsServiceV2", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class MetricsServiceV2GrpcAsyncIOTransport(MetricsServiceV2Transport): + """gRPC AsyncIO backend transport for MetricsServiceV2. + + Service for configuring logs-based metrics. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'logging.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'logging.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def _list_log_metrics(self) -> Callable[ + [logging_metrics.ListLogMetricsRequest], + Awaitable[logging_metrics.ListLogMetricsResponse]]: + r"""Return a callable for the list log metrics method over gRPC. + + Lists logs-based metrics. + + Returns: + Callable[[~.ListLogMetricsRequest], + Awaitable[~.ListLogMetricsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_list_log_metrics' not in self._stubs: + self._stubs['_list_log_metrics'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_ListLogMetrics', + request_serializer=logging_metrics.ListLogMetricsRequest.serialize, + response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, + ) + return self._stubs['_list_log_metrics'] + + @property + def _get_log_metric(self) -> Callable[ + [logging_metrics.GetLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the get log metric method over gRPC. + + Gets a logs-based metric. + + Returns: + Callable[[~.GetLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_get_log_metric' not in self._stubs: + self._stubs['_get_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_GetLogMetric', + request_serializer=logging_metrics.GetLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['_get_log_metric'] + + @property + def _create_log_metric(self) -> Callable[ + [logging_metrics.CreateLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the create log metric method over gRPC. + + Creates a logs-based metric. + + Returns: + Callable[[~.CreateLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_create_log_metric' not in self._stubs: + self._stubs['_create_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_CreateLogMetric', + request_serializer=logging_metrics.CreateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['_create_log_metric'] + + @property + def _update_log_metric(self) -> Callable[ + [logging_metrics.UpdateLogMetricRequest], + Awaitable[logging_metrics.LogMetric]]: + r"""Return a callable for the update log metric method over gRPC. + + Creates or updates a logs-based metric. + + Returns: + Callable[[~.UpdateLogMetricRequest], + Awaitable[~.LogMetric]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_update_log_metric' not in self._stubs: + self._stubs['_update_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_UpdateLogMetric', + request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, + response_deserializer=logging_metrics.LogMetric.deserialize, + ) + return self._stubs['_update_log_metric'] + + @property + def _delete_log_metric(self) -> Callable[ + [logging_metrics.DeleteLogMetricRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete log metric method over gRPC. + + Deletes a logs-based metric. + + Returns: + Callable[[~.DeleteLogMetricRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if '_delete_log_metric' not in self._stubs: + self._stubs['_delete_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/_DeleteLogMetric', + request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['_delete_log_metric'] + + def _prep_wrapped_messages(self, client_info): + """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self._list_log_metrics: self._wrap_method( + self._list_log_metrics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._get_log_metric: self._wrap_method( + self._get_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._create_log_metric: self._wrap_method( + self._create_log_metric, + default_timeout=60.0, + client_info=client_info, + ), + self._update_log_metric: self._wrap_method( + self._update_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self._delete_log_metric: self._wrap_method( + self._delete_log_metric, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[[operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse]: + r"""Return a callable for the list_operations method over gRPC. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ( + 'MetricsServiceV2GrpcAsyncIOTransport', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py new file mode 100755 index 000000000000..ce693111d295 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .log_entry import ( + LogEntry, + LogEntryOperation, + LogEntrySourceLocation, + LogSplit, +) +from .logging import ( + DeleteLogRequest, + ListLogEntriesRequest, + ListLogEntriesResponse, + ListLogsRequest, + ListLogsResponse, + ListMonitoredResourceDescriptorsRequest, + ListMonitoredResourceDescriptorsResponse, + TailLogEntriesRequest, + TailLogEntriesResponse, + WriteLogEntriesPartialErrors, + WriteLogEntriesRequest, + WriteLogEntriesResponse, +) +from .logging_config import ( + BigQueryDataset, + BigQueryOptions, + BucketMetadata, + CmekSettings, + CopyLogEntriesMetadata, + CopyLogEntriesRequest, + CopyLogEntriesResponse, + CreateBucketRequest, + CreateExclusionRequest, + CreateLinkRequest, + CreateSinkRequest, + CreateViewRequest, + DeleteBucketRequest, + DeleteExclusionRequest, + DeleteLinkRequest, + DeleteSinkRequest, + DeleteViewRequest, + GetBucketRequest, + GetCmekSettingsRequest, + GetExclusionRequest, + GetLinkRequest, + GetSettingsRequest, + GetSinkRequest, + GetViewRequest, + IndexConfig, + Link, + LinkMetadata, + ListBucketsRequest, + ListBucketsResponse, + ListExclusionsRequest, + ListExclusionsResponse, + ListLinksRequest, + ListLinksResponse, + ListSinksRequest, + ListSinksResponse, + ListViewsRequest, + ListViewsResponse, + LocationMetadata, + LogBucket, + LogExclusion, + LogSink, + LogView, + Settings, + UndeleteBucketRequest, + UpdateBucketRequest, + UpdateCmekSettingsRequest, + UpdateExclusionRequest, + UpdateSettingsRequest, + UpdateSinkRequest, + UpdateViewRequest, + IndexType, + LifecycleState, + OperationState, +) +from .logging_metrics import ( + CreateLogMetricRequest, + DeleteLogMetricRequest, + GetLogMetricRequest, + ListLogMetricsRequest, + ListLogMetricsResponse, + LogMetric, + UpdateLogMetricRequest, +) + +__all__ = ( + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'LogSplit', + 'DeleteLogRequest', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'BigQueryDataset', + 'BigQueryOptions', + 'BucketMetadata', + 'CmekSettings', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesRequest', + 'CopyLogEntriesResponse', + 'CreateBucketRequest', + 'CreateExclusionRequest', + 'CreateLinkRequest', + 'CreateSinkRequest', + 'CreateViewRequest', + 'DeleteBucketRequest', + 'DeleteExclusionRequest', + 'DeleteLinkRequest', + 'DeleteSinkRequest', + 'DeleteViewRequest', + 'GetBucketRequest', + 'GetCmekSettingsRequest', + 'GetExclusionRequest', + 'GetLinkRequest', + 'GetSettingsRequest', + 'GetSinkRequest', + 'GetViewRequest', + 'IndexConfig', + 'Link', + 'LinkMetadata', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'ListLinksRequest', + 'ListLinksResponse', + 'ListSinksRequest', + 'ListSinksResponse', + 'ListViewsRequest', + 'ListViewsResponse', + 'LocationMetadata', + 'LogBucket', + 'LogExclusion', + 'LogSink', + 'LogView', + 'Settings', + 'UndeleteBucketRequest', + 'UpdateBucketRequest', + 'UpdateCmekSettingsRequest', + 'UpdateExclusionRequest', + 'UpdateSettingsRequest', + 'UpdateSinkRequest', + 'UpdateViewRequest', + 'IndexType', + 'LifecycleState', + 'OperationState', + 'CreateLogMetricRequest', + 'DeleteLogMetricRequest', + 'GetLogMetricRequest', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'LogMetric', + 'UpdateLogMetricRequest', +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py new file mode 100755 index 000000000000..e9dac2e5ebcb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py @@ -0,0 +1,435 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'LogEntry', + 'LogEntryOperation', + 'LogEntrySourceLocation', + 'LogSplit', + }, +) + + +class LogEntry(proto.Message): + r"""An individual entry in a log. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + log_name (str): + Required. The resource name of the log to which this log + entry belongs: + + :: + + "projects/[PROJECT_ID]/logs/[LOG_ID]" + "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" + "folders/[FOLDER_ID]/logs/[LOG_ID]" + + A project number may be used in place of PROJECT_ID. The + project number is translated to its corresponding PROJECT_ID + internally and the ``log_name`` field will contain + PROJECT_ID in queries and exports. + + ``[LOG_ID]`` must be URL-encoded within ``log_name``. + Example: + ``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + + ``[LOG_ID]`` must be less than 512 characters long and can + only include the following characters: upper and lower case + alphanumeric characters, forward-slash, underscore, hyphen, + and period. + + For backward compatibility, if ``log_name`` begins with a + forward-slash, such as ``/projects/...``, then the log entry + is ingested as usual, but the forward-slash is removed. + Listing the log entry will not show the leading slash and + filtering for a log name with a leading slash will never + return any results. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Required. The monitored resource that + produced this log entry. + Example: a log entry that reports a database + error would be associated with the monitored + resource designating the particular database + that reported the error. + proto_payload (google.protobuf.any_pb2.Any): + The log entry payload, represented as a + protocol buffer. Some Google Cloud Platform + services use this field for their log entry + payloads. + + The following protocol buffer types are + supported; user-defined types are not supported: + + "type.googleapis.com/google.cloud.audit.AuditLog" + "type.googleapis.com/google.appengine.logging.v1.RequestLog". + + This field is a member of `oneof`_ ``payload``. + text_payload (str): + The log entry payload, represented as a + Unicode string (UTF-8). + + This field is a member of `oneof`_ ``payload``. + json_payload (google.protobuf.struct_pb2.Struct): + The log entry payload, represented as a + structure that is expressed as a JSON object. + + This field is a member of `oneof`_ ``payload``. + timestamp (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time the event described by the log entry + occurred. This time is used to compute the log entry's age + and to enforce the logs retention period. If this field is + omitted in a new log entry, then Logging assigns it the + current time. Timestamps have nanosecond accuracy, but + trailing zeros in the fractional seconds might be omitted + when the timestamp is displayed. + + Incoming log entries must have timestamps that don't exceed + the `logs retention + period `__ + in the past, and that don't exceed 24 hours in the future. + Log entries outside those time boundaries aren't ingested by + Logging. + receive_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the log entry was + received by Logging. + severity (google.logging.type.log_severity_pb2.LogSeverity): + Optional. The severity of the log entry. The default value + is ``LogSeverity.DEFAULT``. + insert_id (str): + Optional. A unique identifier for the log entry. If you + provide a value, then Logging considers other log entries in + the same project, with the same ``timestamp``, and with the + same ``insert_id`` to be duplicates which are removed in a + single query result. However, there are no guarantees of + de-duplication in the export of logs. + + If the ``insert_id`` is omitted when writing a log entry, + the Logging API assigns its own unique identifier in this + field. + + In queries, the ``insert_id`` is also used to order log + entries that have the same ``log_name`` and ``timestamp`` + values. + http_request (google.logging.type.http_request_pb2.HttpRequest): + Optional. Information about the HTTP request + associated with this log entry, if applicable. + labels (MutableMapping[str, str]): + Optional. A map of key, value pairs that provides additional + information about the log entry. The labels can be + user-defined or system-defined. + + User-defined labels are arbitrary key, value pairs that you + can use to classify logs. + + System-defined labels are defined by GCP services for + platform logs. They have two components - a service + namespace component and the attribute name. For example: + ``compute.googleapis.com/resource_name``. + + Cloud Logging truncates label keys that exceed 512 B and + label values that exceed 64 KB upon their associated log + entry being written. The truncation is indicated by an + ellipsis at the end of the character string. + operation (google.cloud.logging_v2.types.LogEntryOperation): + Optional. Information about an operation + associated with the log entry, if applicable. + trace (str): + Optional. The REST resource name of the trace being written + to `Cloud Trace `__ in + association with this log entry. For example, if your trace + data is stored in the Cloud project "my-trace-project" and + if the service that is creating the log entry receives a + trace header that includes the trace ID "12345", then the + service should use + "projects/my-tracing-project/traces/12345". + + The ``trace`` field provides the link between logs and + traces. By using this field, you can navigate from a log + entry to a trace. + span_id (str): + Optional. The ID of the `Cloud + Trace `__ span associated + with the current operation in which the log is being + written. For example, if a span has the REST resource name + of + "projects/some-project/traces/some-trace/spans/some-span-id", + then the ``span_id`` field is "some-span-id". + + A + `Span `__ + represents a single operation within a trace. Whereas a + trace may involve multiple different microservices running + on multiple different machines, a span generally corresponds + to a single logical operation being performed in a single + instance of a microservice on one specific machine. Spans + are the nodes within the tree that is a trace. + + Applications that are `instrumented for + tracing `__ will + generally assign a new, unique span ID on each incoming + request. It is also common to create and record additional + spans corresponding to internal processing elements as well + as issuing requests to dependencies. + + The span ID is expected to be a 16-character, hexadecimal + encoding of an 8-byte array and should not be zero. It + should be unique within the trace and should, ideally, be + generated in a manner that is uniformly random. + + Example values: + + - ``000000000000004a`` + - ``7a2190356c3fc94b`` + - ``0000f00300090021`` + - ``d39223e101960076`` + trace_sampled (bool): + Optional. The sampling decision of the trace associated with + the log entry. + + True means that the trace resource name in the ``trace`` + field was sampled for storage in a trace backend. False + means that the trace was not sampled for storage when this + log entry was written, or the sampling decision was unknown + at the time. A non-sampled ``trace`` value is still useful + as a request correlation identifier. The default is False. + source_location (google.cloud.logging_v2.types.LogEntrySourceLocation): + Optional. Source code location information + associated with the log entry, if any. + split (google.cloud.logging_v2.types.LogSplit): + Optional. Information indicating this + LogEntry is part of a sequence of multiple log + entries split from a single LogEntry. + """ + + log_name: str = proto.Field( + proto.STRING, + number=12, + ) + resource: monitored_resource_pb2.MonitoredResource = proto.Field( + proto.MESSAGE, + number=8, + message=monitored_resource_pb2.MonitoredResource, + ) + proto_payload: any_pb2.Any = proto.Field( + proto.MESSAGE, + number=2, + oneof='payload', + message=any_pb2.Any, + ) + text_payload: str = proto.Field( + proto.STRING, + number=3, + oneof='payload', + ) + json_payload: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=6, + oneof='payload', + message=struct_pb2.Struct, + ) + timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + receive_timestamp: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=24, + message=timestamp_pb2.Timestamp, + ) + severity: log_severity_pb2.LogSeverity = proto.Field( + proto.ENUM, + number=10, + enum=log_severity_pb2.LogSeverity, + ) + insert_id: str = proto.Field( + proto.STRING, + number=4, + ) + http_request: http_request_pb2.HttpRequest = proto.Field( + proto.MESSAGE, + number=7, + message=http_request_pb2.HttpRequest, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=11, + ) + operation: 'LogEntryOperation' = proto.Field( + proto.MESSAGE, + number=15, + message='LogEntryOperation', + ) + trace: str = proto.Field( + proto.STRING, + number=22, + ) + span_id: str = proto.Field( + proto.STRING, + number=27, + ) + trace_sampled: bool = proto.Field( + proto.BOOL, + number=30, + ) + source_location: 'LogEntrySourceLocation' = proto.Field( + proto.MESSAGE, + number=23, + message='LogEntrySourceLocation', + ) + split: 'LogSplit' = proto.Field( + proto.MESSAGE, + number=35, + message='LogSplit', + ) + + +class LogEntryOperation(proto.Message): + r"""Additional information about a potentially long-running + operation with which a log entry is associated. + + Attributes: + id (str): + Optional. An arbitrary operation identifier. + Log entries with the same identifier are assumed + to be part of the same operation. + producer (str): + Optional. An arbitrary producer identifier. The combination + of ``id`` and ``producer`` must be globally unique. Examples + for ``producer``: ``"MyDivision.MyBigCompany.com"``, + ``"github.com/MyProject/MyApplication"``. + first (bool): + Optional. Set this to True if this is the + first log entry in the operation. + last (bool): + Optional. Set this to True if this is the + last log entry in the operation. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + producer: str = proto.Field( + proto.STRING, + number=2, + ) + first: bool = proto.Field( + proto.BOOL, + number=3, + ) + last: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class LogEntrySourceLocation(proto.Message): + r"""Additional information about the source code location that + produced the log entry. + + Attributes: + file (str): + Optional. Source file name. Depending on the + runtime environment, this might be a simple name + or a fully-qualified name. + line (int): + Optional. Line within the source file. + 1-based; 0 indicates no line number available. + function (str): + Optional. Human-readable name of the function or method + being invoked, with optional context such as the class or + package name. This information may be used in contexts such + as the logs viewer, where a file and line number are less + meaningful. The format can vary by language. For example: + ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` + (Go), ``function`` (Python). + """ + + file: str = proto.Field( + proto.STRING, + number=1, + ) + line: int = proto.Field( + proto.INT64, + number=2, + ) + function: str = proto.Field( + proto.STRING, + number=3, + ) + + +class LogSplit(proto.Message): + r"""Additional information used to correlate multiple log + entries. Used when a single LogEntry would exceed the Google + Cloud Logging size limit and is split across multiple log + entries. + + Attributes: + uid (str): + A globally unique identifier for all log entries in a + sequence of split log entries. All log entries with the same + \|LogSplit.uid\| are assumed to be part of the same sequence + of split log entries. + index (int): + The index of this LogEntry in the sequence of split log + entries. Log entries are given \|index\| values 0, 1, ..., + n-1 for a sequence of n log entries. + total_splits (int): + The total number of log entries that the + original LogEntry was split into. + """ + + uid: str = proto.Field( + proto.STRING, + number=1, + ) + index: int = proto.Field( + proto.INT32, + number=2, + ) + total_splits: int = proto.Field( + proto.INT32, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py new file mode 100755 index 000000000000..b294739dddea --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py @@ -0,0 +1,600 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import monitored_resource_pb2 # type: ignore +from google.cloud.logging_v2.types import log_entry +from google.protobuf import duration_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'DeleteLogRequest', + 'WriteLogEntriesRequest', + 'WriteLogEntriesResponse', + 'WriteLogEntriesPartialErrors', + 'ListLogEntriesRequest', + 'ListLogEntriesResponse', + 'ListMonitoredResourceDescriptorsRequest', + 'ListMonitoredResourceDescriptorsResponse', + 'ListLogsRequest', + 'ListLogsResponse', + 'TailLogEntriesRequest', + 'TailLogEntriesResponse', + }, +) + + +class DeleteLogRequest(proto.Message): + r"""The parameters to DeleteLog. + + Attributes: + log_name (str): + Required. The resource name of the log to delete: + + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + + ``[LOG_ID]`` must be URL-encoded. For example, + ``"projects/my-project-id/logs/syslog"``, + ``"organizations/123/logs/cloudaudit.googleapis.com%2Factivity"``. + + For more information about log names, see + [LogEntry][google.logging.v2.LogEntry]. + """ + + log_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class WriteLogEntriesRequest(proto.Message): + r"""The parameters to WriteLogEntries. + + Attributes: + log_name (str): + Optional. A default log resource name that is assigned to + all log entries in ``entries`` that do not specify a value + for ``log_name``: + + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + + ``[LOG_ID]`` must be URL-encoded. For example: + + :: + + "projects/my-project-id/logs/syslog" + "organizations/123/logs/cloudaudit.googleapis.com%2Factivity" + + The permission ``logging.logEntries.create`` is needed on + each project, organization, billing account, or folder that + is receiving new log entries, whether the resource is + specified in ``logName`` or in an individual log entry. + resource (google.api.monitored_resource_pb2.MonitoredResource): + Optional. A default monitored resource object that is + assigned to all log entries in ``entries`` that do not + specify a value for ``resource``. Example: + + :: + + { "type": "gce_instance", + "labels": { + "zone": "us-central1-a", "instance_id": "00000000000000000000" }} + + See [LogEntry][google.logging.v2.LogEntry]. + labels (MutableMapping[str, str]): + Optional. Default labels that are added to the ``labels`` + field of all log entries in ``entries``. If a log entry + already has a label with the same key as a label in this + parameter, then the log entry's label is not changed. See + [LogEntry][google.logging.v2.LogEntry]. + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): + Required. The log entries to send to Logging. The order of + log entries in this list does not matter. Values supplied in + this method's ``log_name``, ``resource``, and ``labels`` + fields are copied into those log entries in this list that + do not include values for their corresponding fields. For + more information, see the + [LogEntry][google.logging.v2.LogEntry] type. + + If the ``timestamp`` or ``insert_id`` fields are missing in + log entries, then this method supplies the current time or a + unique identifier, respectively. The supplied values are + chosen so that, among the log entries that did not supply + their own values, the entries earlier in the list will sort + before the entries later in the list. See the + ``entries.list`` method. + + Log entries with timestamps that are more than the `logs + retention + period `__ in the + past or more than 24 hours in the future will not be + available when calling ``entries.list``. However, those log + entries can still be `exported with + LogSinks `__. + + To improve throughput and to avoid exceeding the `quota + limit `__ for calls + to ``entries.write``, you should try to include several log + entries in this list, rather than calling this method for + each individual log entry. + partial_success (bool): + Optional. Whether a batch's valid entries should be written + even if some other entry failed due to a permanent error + such as INVALID_ARGUMENT or PERMISSION_DENIED. If any entry + failed, then the response status is the response status of + one of the failed entries. The response will include error + details in ``WriteLogEntriesPartialErrors.log_entry_errors`` + keyed by the entries' zero-based index in the ``entries``. + Failed requests for which no entries are written will not + include per-entry errors. + dry_run (bool): + Optional. If true, the request should expect + normal response, but the entries won't be + persisted nor exported. Useful for checking + whether the logging API endpoints are working + properly before sending valuable data. + """ + + log_name: str = proto.Field( + proto.STRING, + number=1, + ) + resource: monitored_resource_pb2.MonitoredResource = proto.Field( + proto.MESSAGE, + number=2, + message=monitored_resource_pb2.MonitoredResource, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=log_entry.LogEntry, + ) + partial_success: bool = proto.Field( + proto.BOOL, + number=5, + ) + dry_run: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class WriteLogEntriesResponse(proto.Message): + r"""Result returned from WriteLogEntries. + """ + + +class WriteLogEntriesPartialErrors(proto.Message): + r"""Error details for WriteLogEntries with partial success. + + Attributes: + log_entry_errors (MutableMapping[int, google.rpc.status_pb2.Status]): + When ``WriteLogEntriesRequest.partial_success`` is true, + records the error status for entries that were not written + due to a permanent error, keyed by the entry's zero-based + index in ``WriteLogEntriesRequest.entries``. + + Failed requests for which no entries are written will not + include per-entry errors. + """ + + log_entry_errors: MutableMapping[int, status_pb2.Status] = proto.MapField( + proto.INT32, + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + + +class ListLogEntriesRequest(proto.Message): + r"""The parameters to ``ListLogEntries``. + + Attributes: + resource_names (MutableSequence[str]): + Required. Names of one or more parent resources from which + to retrieve log entries: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + + May alternatively be one or more views: + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + + Projects listed in the ``project_ids`` field are added to + this list. A maximum of 100 resources may be specified in a + single request. + filter (str): + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. + order_by (str): + Optional. How the results should be sorted. Presently, the + only permitted values are ``"timestamp asc"`` (default) and + ``"timestamp desc"``. The first option returns entries in + order of increasing values of ``LogEntry.timestamp`` (oldest + first), and the second option returns entries in order of + decreasing timestamps (newest first). Entries with equal + timestamps are returned in order of their ``insert_id`` + values. + page_size (int): + Optional. The maximum number of results to return from this + request. Default is 50. If the value is negative or exceeds + 1000, the request is rejected. The presence of + ``next_page_token`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``page_token`` must be the value of ``next_page_token`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + order_by: str = proto.Field( + proto.STRING, + number=3, + ) + page_size: int = proto.Field( + proto.INT32, + number=4, + ) + page_token: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListLogEntriesResponse(proto.Message): + r"""Result returned from ``ListLogEntries``. + + Attributes: + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): + A list of log entries. If ``entries`` is empty, + ``nextPageToken`` may still be returned, indicating that + more entries may exist. See ``nextPageToken`` for more + information. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + + If a value for ``next_page_token`` appears and the + ``entries`` field is empty, it means that the search found + no log entries so far but it did not have time to search all + the possible log entries. Retry the method with this value + for ``page_token`` to continue the search. Alternatively, + consider speeding up the search by changing your filter to + specify a single log name or resource type, or to narrow the + time range of the search. + """ + + @property + def raw_page(self): + return self + + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListMonitoredResourceDescriptorsRequest(proto.Message): + r"""The parameters to ListMonitoredResourceDescriptors + + Attributes: + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + page_size: int = proto.Field( + proto.INT32, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListMonitoredResourceDescriptorsResponse(proto.Message): + r"""Result returned from ListMonitoredResourceDescriptors. + + Attributes: + resource_descriptors (MutableSequence[google.api.monitored_resource_pb2.MonitoredResourceDescriptor]): + A list of resource descriptors. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + resource_descriptors: MutableSequence[monitored_resource_pb2.MonitoredResourceDescriptor] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=monitored_resource_pb2.MonitoredResourceDescriptor, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListLogsRequest(proto.Message): + r"""The parameters to ListLogs. + + Attributes: + parent (str): + Required. The resource name to list logs for: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + resource_names (MutableSequence[str]): + Optional. List of resource names to list logs for: + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + + To support legacy queries, it could also be: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + + The resource name in the ``parent`` field is added to this + list. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListLogsResponse(proto.Message): + r"""Result returned from ListLogs. + + Attributes: + log_names (MutableSequence[str]): + A list of log names. For example, + ``"projects/my-project/logs/syslog"`` or + ``"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"``. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call this method again using the value + of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + log_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class TailLogEntriesRequest(proto.Message): + r"""The parameters to ``TailLogEntries``. + + Attributes: + resource_names (MutableSequence[str]): + Required. Name of a parent resource from which to retrieve + log entries: + + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` + + May alternatively be one or more views: + + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + filter (str): + Optional. Only log entries that match the filter are + returned. An empty filter matches all log entries in the + resources listed in ``resource_names``. Referencing a parent + resource that is not listed in ``resource_names`` will cause + the filter to return no results. The maximum length of a + filter is 20,000 characters. + buffer_window (google.protobuf.duration_pb2.Duration): + Optional. The amount of time to buffer log + entries at the server before being returned to + prevent out of order results due to late + arriving log entries. Valid values are between + 0-60000 milliseconds. Defaults to 2000 + milliseconds. + """ + + resource_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + buffer_window: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + + +class TailLogEntriesResponse(proto.Message): + r"""Result returned from ``TailLogEntries``. + + Attributes: + entries (MutableSequence[google.cloud.logging_v2.types.LogEntry]): + A list of log entries. Each response in the stream will + order entries with increasing values of + ``LogEntry.timestamp``. Ordering is not guaranteed between + separate responses. + suppression_info (MutableSequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): + If entries that otherwise would have been + included in the session were not sent back to + the client, counts of relevant entries omitted + from the session with the reason that they were + not included. There will be at most one of each + reason per response. The counts represent the + number of suppressed entries since the last + streamed response. + """ + + class SuppressionInfo(proto.Message): + r"""Information about entries that were omitted from the session. + + Attributes: + reason (google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo.Reason): + The reason that entries were omitted from the + session. + suppressed_count (int): + A lower bound on the count of entries omitted due to + ``reason``. + """ + class Reason(proto.Enum): + r"""An indicator of why entries were omitted. + + Values: + REASON_UNSPECIFIED (0): + Unexpected default. + RATE_LIMIT (1): + Indicates suppression occurred due to relevant entries being + received in excess of rate limits. For quotas and limits, + see `Logging API quotas and + limits `__. + NOT_CONSUMED (2): + Indicates suppression occurred due to the + client not consuming responses quickly enough. + """ + REASON_UNSPECIFIED = 0 + RATE_LIMIT = 1 + NOT_CONSUMED = 2 + + reason: 'TailLogEntriesResponse.SuppressionInfo.Reason' = proto.Field( + proto.ENUM, + number=1, + enum='TailLogEntriesResponse.SuppressionInfo.Reason', + ) + suppressed_count: int = proto.Field( + proto.INT32, + number=2, + ) + + entries: MutableSequence[log_entry.LogEntry] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=log_entry.LogEntry, + ) + suppression_info: MutableSequence[SuppressionInfo] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=SuppressionInfo, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py new file mode 100755 index 000000000000..3115109aa03d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py @@ -0,0 +1,2417 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'OperationState', + 'LifecycleState', + 'IndexType', + 'IndexConfig', + 'LogBucket', + 'LogView', + 'LogSink', + 'BigQueryDataset', + 'Link', + 'BigQueryOptions', + 'ListBucketsRequest', + 'ListBucketsResponse', + 'CreateBucketRequest', + 'UpdateBucketRequest', + 'GetBucketRequest', + 'DeleteBucketRequest', + 'UndeleteBucketRequest', + 'ListViewsRequest', + 'ListViewsResponse', + 'CreateViewRequest', + 'UpdateViewRequest', + 'GetViewRequest', + 'DeleteViewRequest', + 'ListSinksRequest', + 'ListSinksResponse', + 'GetSinkRequest', + 'CreateSinkRequest', + 'UpdateSinkRequest', + 'DeleteSinkRequest', + 'CreateLinkRequest', + 'DeleteLinkRequest', + 'ListLinksRequest', + 'ListLinksResponse', + 'GetLinkRequest', + 'LogExclusion', + 'ListExclusionsRequest', + 'ListExclusionsResponse', + 'GetExclusionRequest', + 'CreateExclusionRequest', + 'UpdateExclusionRequest', + 'DeleteExclusionRequest', + 'GetCmekSettingsRequest', + 'UpdateCmekSettingsRequest', + 'CmekSettings', + 'GetSettingsRequest', + 'UpdateSettingsRequest', + 'Settings', + 'CopyLogEntriesRequest', + 'CopyLogEntriesMetadata', + 'CopyLogEntriesResponse', + 'BucketMetadata', + 'LinkMetadata', + 'LocationMetadata', + }, +) + + +class OperationState(proto.Enum): + r"""List of different operation states. + High level state of the operation. This is used to report the + job's current state to the user. Once a long running operation + is created, the current state of the operation can be queried + even before the operation is finished and the final result is + available. + + Values: + OPERATION_STATE_UNSPECIFIED (0): + Should not be used. + OPERATION_STATE_SCHEDULED (1): + The operation is scheduled. + OPERATION_STATE_WAITING_FOR_PERMISSIONS (2): + Waiting for necessary permissions. + OPERATION_STATE_RUNNING (3): + The operation is running. + OPERATION_STATE_SUCCEEDED (4): + The operation was completed successfully. + OPERATION_STATE_FAILED (5): + The operation failed. + OPERATION_STATE_CANCELLED (6): + The operation was cancelled by the user. + """ + OPERATION_STATE_UNSPECIFIED = 0 + OPERATION_STATE_SCHEDULED = 1 + OPERATION_STATE_WAITING_FOR_PERMISSIONS = 2 + OPERATION_STATE_RUNNING = 3 + OPERATION_STATE_SUCCEEDED = 4 + OPERATION_STATE_FAILED = 5 + OPERATION_STATE_CANCELLED = 6 + + +class LifecycleState(proto.Enum): + r"""LogBucket lifecycle states. + + Values: + LIFECYCLE_STATE_UNSPECIFIED (0): + Unspecified state. This is only used/useful + for distinguishing unset values. + ACTIVE (1): + The normal and active state. + DELETE_REQUESTED (2): + The resource has been marked for deletion by + the user. For some resources (e.g. buckets), + this can be reversed by an un-delete operation. + UPDATING (3): + The resource has been marked for an update by + the user. It will remain in this state until the + update is complete. + CREATING (4): + The resource has been marked for creation by + the user. It will remain in this state until the + creation is complete. + FAILED (5): + The resource is in an INTERNAL error state. + """ + LIFECYCLE_STATE_UNSPECIFIED = 0 + ACTIVE = 1 + DELETE_REQUESTED = 2 + UPDATING = 3 + CREATING = 4 + FAILED = 5 + + +class IndexType(proto.Enum): + r"""IndexType is used for custom indexing. It describes the type + of an indexed field. + + Values: + INDEX_TYPE_UNSPECIFIED (0): + The index's type is unspecified. + INDEX_TYPE_STRING (1): + The index is a string-type index. + INDEX_TYPE_INTEGER (2): + The index is a integer-type index. + """ + INDEX_TYPE_UNSPECIFIED = 0 + INDEX_TYPE_STRING = 1 + INDEX_TYPE_INTEGER = 2 + + +class IndexConfig(proto.Message): + r"""Configuration for an indexed field. + + Attributes: + field_path (str): + Required. The LogEntry field path to index. + + Note that some paths are automatically indexed, and other + paths are not eligible for indexing. See `indexing + documentation `__ + for details. + + For example: ``jsonPayload.request.status`` + type_ (google.cloud.logging_v2.types.IndexType): + Required. The type of data in this index. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the index was + last modified. + This is used to return the timestamp, and will + be ignored if supplied during update. + """ + + field_path: str = proto.Field( + proto.STRING, + number=1, + ) + type_: 'IndexType' = proto.Field( + proto.ENUM, + number=2, + enum='IndexType', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + + +class LogBucket(proto.Message): + r"""Describes a repository in which log entries are stored. + + Attributes: + name (str): + Output only. The resource name of the bucket. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket`` + + For a list of supported locations, see `Supported + Regions `__ + + For the location of ``global`` it is unspecified where log + entries are actually stored. + + After a bucket has been created, the location cannot be + changed. + description (str): + Describes this bucket. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + bucket. This is not set for any of the default + buckets. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + bucket. + retention_days (int): + Logs will be retained by default for this + amount of time, after which they will + automatically be deleted. The minimum retention + period is 1 day. If this value is set to zero at + bucket creation time, the default time of 30 + days will be used. + locked (bool): + Whether the bucket is locked. + + The retention period on a locked bucket cannot + be changed. Locked buckets may only be deleted + if they are empty. + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): + Output only. The bucket lifecycle state. + analytics_enabled (bool): + Whether log analytics is enabled for this + bucket. + Once enabled, log analytics features cannot be + disabled. + restricted_fields (MutableSequence[str]): + Log entry field paths that are denied access in this bucket. + + The following fields and their children are eligible: + ``textPayload``, ``jsonPayload``, ``protoPayload``, + ``httpRequest``, ``labels``, ``sourceLocation``. + + Restricting a repeated field will restrict all values. + Adding a parent will block all child fields. (e.g. + ``foo.bar`` will block ``foo.bar.baz``) + index_configs (MutableSequence[google.cloud.logging_v2.types.IndexConfig]): + A list of indexed fields and related + configuration data. + cmek_settings (google.cloud.logging_v2.types.CmekSettings): + The CMEK settings of the log bucket. If + present, new log entries written to this log + bucket are encrypted using the CMEK key provided + in this configuration. If a log bucket has CMEK + settings, the CMEK settings cannot be disabled + later by updating the log bucket. Changing the + KMS key is allowed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + retention_days: int = proto.Field( + proto.INT32, + number=11, + ) + locked: bool = proto.Field( + proto.BOOL, + number=9, + ) + lifecycle_state: 'LifecycleState' = proto.Field( + proto.ENUM, + number=12, + enum='LifecycleState', + ) + analytics_enabled: bool = proto.Field( + proto.BOOL, + number=14, + ) + restricted_fields: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + index_configs: MutableSequence['IndexConfig'] = proto.RepeatedField( + proto.MESSAGE, + number=17, + message='IndexConfig', + ) + cmek_settings: 'CmekSettings' = proto.Field( + proto.MESSAGE, + number=19, + message='CmekSettings', + ) + + +class LogView(proto.Message): + r"""Describes a view over log entries in a bucket. + + Attributes: + name (str): + The resource name of the view. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket/views/my-view`` + description (str): + Describes this view. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + view. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + view. + filter (str): + Filter that restricts which log entries in a bucket are + visible in this view. + + Filters are restricted to be a logical AND of ==/!= of any + of the following: + + - originating project/folder/organization/billing account. + - resource type + - log id + + For example: + + SOURCE("projects/myproject") AND resource.type = + "gce_instance" AND LOG_ID("stdout") + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + filter: str = proto.Field( + proto.STRING, + number=7, + ) + + +class LogSink(proto.Message): + r"""Describes a sink used to export log entries to one of the + following destinations in any project: a Cloud Storage bucket, a + BigQuery dataset, a Pub/Sub topic or a Cloud Logging log bucket. + A logs filter controls which log entries are exported. The sink + must be created within a project, organization, billing account, + or folder. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The client-assigned sink identifier, unique within + the project. + + For example: ``"my-syslog-errors-to-pubsub"``. Sink + identifiers are limited to 100 characters and can include + only the following characters: upper and lower-case + alphanumeric characters, underscores, hyphens, and periods. + First character has to be alphanumeric. + destination (str): + Required. The export destination: + + :: + + "storage.googleapis.com/[GCS_BUCKET]" + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET]" + "pubsub.googleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" + + The sink's ``writer_identity``, set when the sink is + created, must have permission to write to the destination or + else the log entries are not exported. For more information, + see `Exporting Logs with + Sinks `__. + filter (str): + Optional. An `advanced logs + filter `__. + The only exported log entries are those that are in the + resource owning the sink and that match the filter. + + For example: + + ``logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR`` + description (str): + Optional. A description of this sink. + + The maximum length of the description is 8000 + characters. + disabled (bool): + Optional. If set to true, then this sink is + disabled and it does not export any log entries. + exclusions (MutableSequence[google.cloud.logging_v2.types.LogExclusion]): + Optional. Log entries that match any of these exclusion + filters will not be exported. + + If a log entry is matched by both ``filter`` and one of + ``exclusion_filters`` it will not be exported. + output_version_format (google.cloud.logging_v2.types.LogSink.VersionFormat): + Deprecated. This field is unused. + writer_identity (str): + Output only. An IAM identity—a service account or + group—under which Cloud Logging writes the exported log + entries to the sink's destination. This field is either set + by specifying ``custom_writer_identity`` or set + automatically by + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + and + [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + based on the value of ``unique_writer_identity`` in those + methods. + + Until you grant this identity write-access to the + destination, log entry exports from this sink will fail. For + more information, see `Granting Access for a + Resource `__. + Consult the destination service's documentation to determine + the appropriate IAM roles to assign to the identity. + + Sinks that have a destination that is a log bucket in the + same project as the sink cannot have a writer_identity and + no additional permissions are required. + include_children (bool): + Optional. This field applies only to sinks owned by + organizations and folders. If the field is false, the + default, only the logs owned by the sink's parent resource + are available for export. If the field is true, then log + entries from all the projects, folders, and billing accounts + contained in the sink's parent resource are also available + for export. Whether a particular log entry from the children + is exported depends on the sink's filter expression. + + For example, if this field is true, then the filter + ``resource.type=gce_instance`` would export all Compute + Engine VM instance log entries from all projects in the + sink's parent. + + To only export entries from certain child projects, filter + on the project part of the log name: + + logName:("projects/test-project1/" OR + "projects/test-project2/") AND resource.type=gce_instance + bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): + Optional. Options that affect sinks exporting + data to BigQuery. + + This field is a member of `oneof`_ ``options``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + sink. + This field may not be present for older sinks. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + sink. + This field may not be present for older sinks. + """ + class VersionFormat(proto.Enum): + r"""Deprecated. This is unused. + + Values: + VERSION_FORMAT_UNSPECIFIED (0): + An unspecified format version that will + default to V2. + V2 (1): + ``LogEntry`` version 2 format. + V1 (2): + ``LogEntry`` version 1 format. + """ + VERSION_FORMAT_UNSPECIFIED = 0 + V2 = 1 + V1 = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + destination: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + description: str = proto.Field( + proto.STRING, + number=18, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=19, + ) + exclusions: MutableSequence['LogExclusion'] = proto.RepeatedField( + proto.MESSAGE, + number=16, + message='LogExclusion', + ) + output_version_format: VersionFormat = proto.Field( + proto.ENUM, + number=6, + enum=VersionFormat, + ) + writer_identity: str = proto.Field( + proto.STRING, + number=8, + ) + include_children: bool = proto.Field( + proto.BOOL, + number=9, + ) + bigquery_options: 'BigQueryOptions' = proto.Field( + proto.MESSAGE, + number=12, + oneof='options', + message='BigQueryOptions', + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + + +class BigQueryDataset(proto.Message): + r"""Describes a BigQuery dataset that was created by a link. + + Attributes: + dataset_id (str): + Output only. The full resource name of the BigQuery dataset. + The DATASET_ID will match the ID of the link, so the link + must match the naming restrictions of BigQuery datasets + (alphanumeric characters and underscores only). + + The dataset will have a resource path of + "bigquery.googleapis.com/projects/[PROJECT_ID]/datasets/[DATASET_ID]". + """ + + dataset_id: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Link(proto.Message): + r"""Describes a link connected to an analytics enabled bucket. + + Attributes: + name (str): + The resource name of the link. The name can have up to 100 + characters. A valid link id (at the end of the link name) + must only have alphanumeric characters and underscores + within it. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + + For example: + + \`projects/my-project/locations/global/buckets/my-bucket/links/my_link + description (str): + Describes this link. + + The maximum length of the description is 8000 + characters. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + link. + lifecycle_state (google.cloud.logging_v2.types.LifecycleState): + Output only. The resource lifecycle state. + bigquery_dataset (google.cloud.logging_v2.types.BigQueryDataset): + The information of a BigQuery Dataset. When a + link is created, a BigQuery dataset is created + along with it, in the same project as the + LogBucket it's linked to. This dataset will also + have BigQuery Views corresponding to the + LogViews in the bucket. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + lifecycle_state: 'LifecycleState' = proto.Field( + proto.ENUM, + number=4, + enum='LifecycleState', + ) + bigquery_dataset: 'BigQueryDataset' = proto.Field( + proto.MESSAGE, + number=5, + message='BigQueryDataset', + ) + + +class BigQueryOptions(proto.Message): + r"""Options that change functionality of a sink exporting data to + BigQuery. + + Attributes: + use_partitioned_tables (bool): + Optional. Whether to use `BigQuery's partition + tables `__. + By default, Cloud Logging creates dated tables based on the + log entries' timestamps, e.g. syslog_20170523. With + partitioned tables the date suffix is no longer present and + `special query + syntax `__ + has to be used instead. In both cases, tables are sharded + based on UTC timezone. + uses_timestamp_column_partitioning (bool): + Output only. True if new timestamp column based partitioning + is in use, false if legacy ingestion-time partitioning is in + use. + + All new sinks will have this field set true and will use + timestamp column based partitioning. If + use_partitioned_tables is false, this value has no meaning + and will be false. Legacy sinks using partitioned tables + will have this field set to false. + """ + + use_partitioned_tables: bool = proto.Field( + proto.BOOL, + number=1, + ) + uses_timestamp_column_partitioning: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class ListBucketsRequest(proto.Message): + r"""The parameters to ``ListBuckets``. + + Attributes: + parent (str): + Required. The parent resource whose buckets are to be + listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]" + + Note: The locations portion of the resource must be + specified, but supplying the character ``-`` in place of + [LOCATION_ID] will return all buckets. + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListBucketsResponse(proto.Message): + r"""The response from ListBuckets. + + Attributes: + buckets (MutableSequence[google.cloud.logging_v2.types.LogBucket]): + A list of buckets. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + buckets: MutableSequence['LogBucket'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogBucket', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateBucketRequest(proto.Message): + r"""The parameters to ``CreateBucket``. + + Attributes: + parent (str): + Required. The resource in which to create the log bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]" + + For example: + + ``"projects/my-project/locations/global"`` + bucket_id (str): + Required. A client-assigned identifier such as + ``"my-bucket"``. Identifiers are limited to 100 characters + and can include only letters, digits, underscores, hyphens, + and periods. + bucket (google.cloud.logging_v2.types.LogBucket): + Required. The new bucket. The region + specified in the new bucket must be compliant + with any Location Restriction Org Policy. The + name field in the bucket is ignored. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + bucket_id: str = proto.Field( + proto.STRING, + number=2, + ) + bucket: 'LogBucket' = proto.Field( + proto.MESSAGE, + number=3, + message='LogBucket', + ) + + +class UpdateBucketRequest(proto.Message): + r"""The parameters to ``UpdateBucket``. + + Attributes: + name (str): + Required. The full resource name of the bucket to update. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` + bucket (google.cloud.logging_v2.types.LogBucket): + Required. The updated bucket. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask that specifies the fields in ``bucket`` + that need an update. A bucket field will be overwritten if, + and only if, it is in the update mask. ``name`` and output + only fields cannot be updated. + + For a detailed ``FieldMask`` definition, see: + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + For example: ``updateMask=retention_days`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + bucket: 'LogBucket' = proto.Field( + proto.MESSAGE, + number=2, + message='LogBucket', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class GetBucketRequest(proto.Message): + r"""The parameters to ``GetBucket``. + + Attributes: + name (str): + Required. The resource name of the bucket: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteBucketRequest(proto.Message): + r"""The parameters to ``DeleteBucket``. + + Attributes: + name (str): + Required. The full resource name of the bucket to delete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UndeleteBucketRequest(proto.Message): + r"""The parameters to ``UndeleteBucket``. + + Attributes: + name (str): + Required. The full resource name of the bucket to undelete. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListViewsRequest(proto.Message): + r"""The parameters to ``ListViews``. + + Attributes: + parent (str): + Required. The bucket whose views are to be listed: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. + + Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListViewsResponse(proto.Message): + r"""The response from ListViews. + + Attributes: + views (MutableSequence[google.cloud.logging_v2.types.LogView]): + A list of views. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + views: MutableSequence['LogView'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogView', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateViewRequest(proto.Message): + r"""The parameters to ``CreateView``. + + Attributes: + parent (str): + Required. The bucket in which to create the view + + :: + + `"projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]"` + + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket"`` + view_id (str): + Required. A client-assigned identifier such as + ``"my-view"``. Identifiers are limited to 100 characters and + can include only letters, digits, underscores, hyphens, and + periods. + view (google.cloud.logging_v2.types.LogView): + Required. The new view. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + view_id: str = proto.Field( + proto.STRING, + number=2, + ) + view: 'LogView' = proto.Field( + proto.MESSAGE, + number=3, + message='LogView', + ) + + +class UpdateViewRequest(proto.Message): + r"""The parameters to ``UpdateView``. + + Attributes: + name (str): + Required. The full resource name of the view to update + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` + view (google.cloud.logging_v2.types.LogView): + Required. The updated view. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in ``view`` + that need an update. A field will be overwritten if, and + only if, it is in the update mask. ``name`` and output only + fields cannot be updated. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + For example: ``updateMask=filter`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: 'LogView' = proto.Field( + proto.MESSAGE, + number=2, + message='LogView', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class GetViewRequest(proto.Message): + r"""The parameters to ``GetView``. + + Attributes: + name (str): + Required. The resource name of the policy: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + For example: + + ``"projects/my-project/locations/global/buckets/my-bucket/views/my-view"`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteViewRequest(proto.Message): + r"""The parameters to ``DeleteView``. + + Attributes: + name (str): + Required. The full resource name of the view to delete: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]" + + For example: + + :: + + `"projects/my-project/locations/global/buckets/my-bucket/views/my-view"` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSinksRequest(proto.Message): + r"""The parameters to ``ListSinks``. + + Attributes: + parent (str): + Required. The parent resource whose sinks are to be listed: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListSinksResponse(proto.Message): + r"""Result returned from ``ListSinks``. + + Attributes: + sinks (MutableSequence[google.cloud.logging_v2.types.LogSink]): + A list of sinks. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call the same method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + sinks: MutableSequence['LogSink'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogSink', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetSinkRequest(proto.Message): + r"""The parameters to ``GetSink``. + + Attributes: + sink_name (str): + Required. The resource name of the sink: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + """ + + sink_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateSinkRequest(proto.Message): + r"""The parameters to ``CreateSink``. + + Attributes: + parent (str): + Required. The resource in which to create the sink: + + :: + + "projects/[PROJECT_ID]" + "organizations/[ORGANIZATION_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]" + "folders/[FOLDER_ID]" + + For examples: + + ``"projects/my-project"`` ``"organizations/123456789"`` + sink (google.cloud.logging_v2.types.LogSink): + Required. The new sink, whose ``name`` parameter is a sink + identifier that is not already in use. + unique_writer_identity (bool): + Optional. Determines the kind of IAM identity returned as + ``writer_identity`` in the new sink. If this value is + omitted or set to false, and if the sink's parent is a + project, then the value returned as ``writer_identity`` is + the same group or service account used by Cloud Logging + before the addition of writer identities to this API. The + sink's destination must be in the same project as the sink + itself. + + If this field is set to true, or if the sink is owned by a + non-project resource such as an organization, then the value + of ``writer_identity`` will be a unique service account used + only for exports from the new sink. For more information, + see ``writer_identity`` in + [LogSink][google.logging.v2.LogSink]. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + sink: 'LogSink' = proto.Field( + proto.MESSAGE, + number=2, + message='LogSink', + ) + unique_writer_identity: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class UpdateSinkRequest(proto.Message): + r"""The parameters to ``UpdateSink``. + + Attributes: + sink_name (str): + Required. The full resource name of the sink to update, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + sink (google.cloud.logging_v2.types.LogSink): + Required. The updated sink, whose name is the same + identifier that appears as part of ``sink_name``. + unique_writer_identity (bool): + Optional. See + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + for a description of this field. When updating a sink, the + effect of this field on the value of ``writer_identity`` in + the updated sink depends on both the old and new values of + this field: + + - If the old and new values of this field are both false or + both true, then there is no change to the sink's + ``writer_identity``. + - If the old value is false and the new value is true, then + ``writer_identity`` is changed to a unique service + account. + - It is an error if the old value is true and the new value + is set to false or defaulted to false. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask that specifies the fields in ``sink`` + that need an update. A sink field will be overwritten if, + and only if, it is in the update mask. ``name`` and output + only fields cannot be updated. + + An empty ``updateMask`` is temporarily treated as using the + following mask for backwards compatibility purposes: + + ``destination,filter,includeChildren`` + + At some point in the future, behavior will be removed and + specifying an empty ``updateMask`` will be an error. + + For a detailed ``FieldMask`` definition, see + https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.FieldMask + + For example: ``updateMask=filter`` + """ + + sink_name: str = proto.Field( + proto.STRING, + number=1, + ) + sink: 'LogSink' = proto.Field( + proto.MESSAGE, + number=2, + message='LogSink', + ) + unique_writer_identity: bool = proto.Field( + proto.BOOL, + number=3, + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=4, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteSinkRequest(proto.Message): + r"""The parameters to ``DeleteSink``. + + Attributes: + sink_name (str): + Required. The full resource name of the sink to delete, + including the parent resource and the sink identifier: + + :: + + "projects/[PROJECT_ID]/sinks/[SINK_ID]" + "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" + "folders/[FOLDER_ID]/sinks/[SINK_ID]" + + For example: + + ``"projects/my-project/sinks/my-sink"`` + """ + + sink_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateLinkRequest(proto.Message): + r"""The parameters to CreateLink. + + Attributes: + parent (str): + Required. The full resource name of the bucket to create a + link for. + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]". + link (google.cloud.logging_v2.types.Link): + Required. The new link. + link_id (str): + Required. The ID to use for the link. The link_id can have + up to 100 characters. A valid link_id must only have + alphanumeric characters and underscores within it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + link: 'Link' = proto.Field( + proto.MESSAGE, + number=2, + message='Link', + ) + link_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteLinkRequest(proto.Message): + r"""The parameters to DeleteLink. + + Attributes: + name (str): + Required. The full resource name of the link to delete. + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListLinksRequest(proto.Message): + r"""The parameters to ListLinks. + + Attributes: + parent (str): + Required. The parent resource whose links are to be listed: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/ + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. + page_size (int): + Optional. The maximum number of results to + return from this request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListLinksResponse(proto.Message): + r"""The response from ListLinks. + + Attributes: + links (MutableSequence[google.cloud.logging_v2.types.Link]): + A list of links. + next_page_token (str): + If there might be more results than those appearing in this + response, then ``nextPageToken`` is included. To get the + next set of results, call the same method again using the + value of ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + links: MutableSequence['Link'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Link', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLinkRequest(proto.Message): + r"""The parameters to GetLink. + + Attributes: + name (str): + Required. The resource name of the link: + + "projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID]" + "folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/links/[LINK_ID] + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class LogExclusion(proto.Message): + r"""Specifies a set of log entries that are filtered out by a sink. If + your Google Cloud resource receives a large volume of log entries, + you can use exclusions to reduce your chargeable logs. Note that + exclusions on organization-level and folder-level sinks don't apply + to child resources. Note also that you cannot modify the \_Required + sink or exclude logs from it. + + Attributes: + name (str): + Required. A client-assigned identifier, such as + ``"load-balancer-exclusion"``. Identifiers are limited to + 100 characters and can include only letters, digits, + underscores, hyphens, and periods. First character has to be + alphanumeric. + description (str): + Optional. A description of this exclusion. + filter (str): + Required. An `advanced logs + filter `__ + that matches the log entries to be excluded. By using the + `sample + function `__, + you can exclude less than 100% of the matching log entries. + + For example, the following query matches 99% of low-severity + log entries from Google Cloud Storage buckets: + + ``resource.type=gcs_bucket severity`__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve CMEK settings. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + For example: + + ``"organizations/12345/cmekSettings"`` + + Note: CMEK for the Log Router can be configured for Google + Cloud projects, folders, organizations and billing accounts. + Once configured for an organization, it applies to all + projects and folders in the Google Cloud organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateCmekSettingsRequest(proto.Message): + r"""The parameters to + [UpdateCmekSettings][google.logging.v2.ConfigServiceV2.UpdateCmekSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the CMEK settings to update. + + :: + + "projects/[PROJECT_ID]/cmekSettings" + "organizations/[ORGANIZATION_ID]/cmekSettings" + "billingAccounts/[BILLING_ACCOUNT_ID]/cmekSettings" + "folders/[FOLDER_ID]/cmekSettings" + + For example: + + ``"organizations/12345/cmekSettings"`` + + Note: CMEK for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. + cmek_settings (google.cloud.logging_v2.types.CmekSettings): + Required. The CMEK settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``cmek_settings`` should be updated. A field will be + overwritten if and only if it is in the update mask. Output + only fields cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + cmek_settings: 'CmekSettings' = proto.Field( + proto.MESSAGE, + number=2, + message='CmekSettings', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class CmekSettings(proto.Message): + r"""Describes the customer-managed encryption key (CMEK) settings + associated with a project, folder, organization, billing account, or + flexible resource. + + Note: CMEK for the Log Router can currently only be configured for + Google Cloud organizations. Once configured, it applies to all + projects and folders in the Google Cloud organization. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Output only. The resource name of the CMEK + settings. + kms_key_name (str): + The resource name for the configured Cloud KMS key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required cloudkms.cryptoKeyEncrypterDecrypter roles + assigned for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name or + disabled by setting the key name to an empty string. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Log Router, set this field to an + empty string. + + See `Enabling CMEK for Log + Router `__ + for more information. + kms_key_version_name (str): + The CryptoKeyVersion resource name for the configured Cloud + KMS key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]/cryptoKeyVersions/[VERSION]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key/cryptoKeyVersions/1"`` + + This is a read-only field used to convey the specific + configured CryptoKeyVersion of ``kms_key`` that has been + configured. It will be populated in cases where the CMEK + settings are bound to a single key version. + + If this field is populated, the ``kms_key`` is tied to a + specific CryptoKeyVersion. + service_account_id (str): + Output only. The service account that will be used by the + Log Router to access your Cloud KMS key. + + Before enabling CMEK for Log Router, you must first assign + the cloudkms.cryptoKeyEncrypterDecrypter role to the service + account that the Log Router will use to access your Cloud + KMS key. Use + [GetCmekSettings][google.logging.v2.ConfigServiceV2.GetCmekSettings] + to obtain the service account ID. + + See `Enabling CMEK for Log + Router `__ + for more information. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_key_version_name: str = proto.Field( + proto.STRING, + number=4, + ) + service_account_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetSettingsRequest(proto.Message): + r"""The parameters to + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource for which to retrieve settings. + + :: + + "projects/[PROJECT_ID]/settings" + "organizations/[ORGANIZATION_ID]/settings" + "billingAccounts/[BILLING_ACCOUNT_ID]/settings" + "folders/[FOLDER_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can be get for Google + Cloud projects, folders, organizations and billing accounts. + Currently it can only be configured for organizations. Once + configured for an organization, it applies to all projects + and folders in the Google Cloud organization. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSettingsRequest(proto.Message): + r"""The parameters to + [UpdateSettings][google.logging.v2.ConfigServiceV2.UpdateSettings]. + + See `Enabling CMEK for Log + Router `__ + for more information. + + Attributes: + name (str): + Required. The resource name for the settings to update. + + :: + + "organizations/[ORGANIZATION_ID]/settings" + + For example: + + ``"organizations/12345/settings"`` + + Note: Settings for the Log Router can currently only be + configured for Google Cloud organizations. Once configured, + it applies to all projects and folders in the Google Cloud + organization. + settings (google.cloud.logging_v2.types.Settings): + Required. The settings to update. + + See `Enabling CMEK for Log + Router `__ + for more information. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask identifying which fields from + ``settings`` should be updated. A field will be overwritten + if and only if it is in the update mask. Output only fields + cannot be updated. + + See [FieldMask][google.protobuf.FieldMask] for more + information. + + For example: ``"updateMask=kmsKeyName"`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + settings: 'Settings' = proto.Field( + proto.MESSAGE, + number=2, + message='Settings', + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class Settings(proto.Message): + r"""Describes the settings associated with a project, folder, + organization, billing account, or flexible resource. + + Attributes: + name (str): + Output only. The resource name of the + settings. + kms_key_name (str): + Optional. The resource name for the configured Cloud KMS + key. + + KMS key name format: + + :: + + "projects/[PROJECT_ID]/locations/[LOCATION]/keyRings/[KEYRING]/cryptoKeys/[KEY]" + + For example: + + ``"projects/my-project/locations/us-central1/keyRings/my-ring/cryptoKeys/my-key"`` + + To enable CMEK for the Log Router, set this field to a valid + ``kms_key_name`` for which the associated service account + has the required + ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` role assigned + for the key. + + The Cloud KMS key used by the Log Router can be updated by + changing the ``kms_key_name`` to a new valid key name. + Encryption operations that are in progress will be completed + with the key that was in use when they started. Decryption + operations will be completed using the key that was used at + the time of encryption unless access to that key has been + revoked. + + To disable CMEK for the Log Router, set this field to an + empty string. + + See `Enabling CMEK for Log + Router `__ + for more information. + kms_service_account_id (str): + Output only. The service account that will be used by the + Log Router to access your Cloud KMS key. + + Before enabling CMEK for Log Router, you must first assign + the role ``roles/cloudkms.cryptoKeyEncrypterDecrypter`` to + the service account that the Log Router will use to access + your Cloud KMS key. Use + [GetSettings][google.logging.v2.ConfigServiceV2.GetSettings] + to obtain the service account ID. + + See `Enabling CMEK for Log + Router `__ + for more information. + storage_location (str): + Optional. The Cloud region that will be used for \_Default + and \_Required log buckets for newly created projects and + folders. For example ``europe-west1``. This setting does not + affect the location of custom log buckets. + disable_default_sink (bool): + Optional. If set to true, the \_Default sink in newly + created projects and folders will created in a disabled + state. This can be used to automatically disable log + ingestion if there is already an aggregated sink configured + in the hierarchy. The \_Default sink can be re-enabled + manually if needed. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=2, + ) + kms_service_account_id: str = proto.Field( + proto.STRING, + number=3, + ) + storage_location: str = proto.Field( + proto.STRING, + number=4, + ) + disable_default_sink: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class CopyLogEntriesRequest(proto.Message): + r"""The parameters to CopyLogEntries. + + Attributes: + name (str): + Required. Log bucket from which to copy log entries. + + For example: + + ``"projects/my-project/locations/global/buckets/my-source-bucket"`` + filter (str): + Optional. A filter specifying which log + entries to copy. The filter must be no more than + 20k characters. An empty filter matches all log + entries. + destination (str): + Required. Destination to which to copy log + entries. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + destination: str = proto.Field( + proto.STRING, + number=4, + ) + + +class CopyLogEntriesMetadata(proto.Message): + r"""Metadata for CopyLogEntries long running operations. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + cancellation_requested (bool): + Identifies whether the user has requested + cancellation of the operation. + request (google.cloud.logging_v2.types.CopyLogEntriesRequest): + CopyLogEntries RPC request. + progress (int): + Estimated progress of the operation (0 - + 100%). + writer_identity (str): + The IAM identity of a service account that must be granted + access to the destination. + + If the service account is not granted permission to the + destination within an hour, the operation will be cancelled. + + For example: ``"serviceAccount:foo@bar.com"`` + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + cancellation_requested: bool = proto.Field( + proto.BOOL, + number=4, + ) + request: 'CopyLogEntriesRequest' = proto.Field( + proto.MESSAGE, + number=5, + message='CopyLogEntriesRequest', + ) + progress: int = proto.Field( + proto.INT32, + number=6, + ) + writer_identity: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CopyLogEntriesResponse(proto.Message): + r"""Response type for CopyLogEntries long running operations. + + Attributes: + log_entries_copied_count (int): + Number of log entries copied. + """ + + log_entries_copied_count: int = proto.Field( + proto.INT64, + number=1, + ) + + +class BucketMetadata(proto.Message): + r"""Metadata for LongRunningUpdateBucket Operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The create time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_bucket_request (google.cloud.logging_v2.types.CreateBucketRequest): + LongRunningCreateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + update_bucket_request (google.cloud.logging_v2.types.UpdateBucketRequest): + LongRunningUpdateBucket RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + create_bucket_request: 'CreateBucketRequest' = proto.Field( + proto.MESSAGE, + number=4, + oneof='request', + message='CreateBucketRequest', + ) + update_bucket_request: 'UpdateBucketRequest' = proto.Field( + proto.MESSAGE, + number=5, + oneof='request', + message='UpdateBucketRequest', + ) + + +class LinkMetadata(proto.Message): + r"""Metadata for long running Link operations. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + The start time of an operation. + end_time (google.protobuf.timestamp_pb2.Timestamp): + The end time of an operation. + state (google.cloud.logging_v2.types.OperationState): + State of an operation. + create_link_request (google.cloud.logging_v2.types.CreateLinkRequest): + CreateLink RPC request. + + This field is a member of `oneof`_ ``request``. + delete_link_request (google.cloud.logging_v2.types.DeleteLinkRequest): + DeleteLink RPC request. + + This field is a member of `oneof`_ ``request``. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state: 'OperationState' = proto.Field( + proto.ENUM, + number=3, + enum='OperationState', + ) + create_link_request: 'CreateLinkRequest' = proto.Field( + proto.MESSAGE, + number=4, + oneof='request', + message='CreateLinkRequest', + ) + delete_link_request: 'DeleteLinkRequest' = proto.Field( + proto.MESSAGE, + number=5, + oneof='request', + message='DeleteLinkRequest', + ) + + +class LocationMetadata(proto.Message): + r"""Cloud Logging specific location metadata. + + Attributes: + log_analytics_enabled (bool): + Indicates whether or not Log Analytics + features are supported in the given location. + """ + + log_analytics_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py new file mode 100755 index 000000000000..d31f7e39732a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py @@ -0,0 +1,411 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.api import distribution_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.logging.v2', + manifest={ + 'LogMetric', + 'ListLogMetricsRequest', + 'ListLogMetricsResponse', + 'GetLogMetricRequest', + 'CreateLogMetricRequest', + 'UpdateLogMetricRequest', + 'DeleteLogMetricRequest', + }, +) + + +class LogMetric(proto.Message): + r"""Describes a logs-based metric. The value of the metric is the + number of log entries that match a logs filter in a given time + interval. + + Logs-based metrics can also be used to extract values from logs + and create a distribution of the values. The distribution + records the statistics of the extracted values along with an + optional histogram of the values as specified by the bucket + options. + + Attributes: + name (str): + Required. The client-assigned metric identifier. Examples: + ``"error_count"``, ``"nginx/requests"``. + + Metric identifiers are limited to 100 characters and can + include only the following characters: ``A-Z``, ``a-z``, + ``0-9``, and the special characters ``_-.,+!*',()%/``. The + forward-slash character (``/``) denotes a hierarchy of name + pieces, and it cannot be the first character of the name. + + This field is the ``[METRIC_ID]`` part of a metric resource + name in the format + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". Example: If the + resource name of a metric is + ``"projects/my-project/metrics/nginx%2Frequests"``, this + field's value is ``"nginx/requests"``. + description (str): + Optional. A description of this metric, which + is used in documentation. The maximum length of + the description is 8000 characters. + filter (str): + Required. An `advanced logs + filter `__ + which is used to match log entries. Example: + + :: + + "resource.type=gae_app AND severity>=ERROR" + + The maximum length of the filter is 20000 characters. + bucket_name (str): + Optional. The resource name of the Log Bucket that owns the + Log Metric. Only Log Buckets in projects are supported. The + bucket has to be in the same project as the metric. + + For example: + + ``projects/my-project/locations/global/buckets/my-bucket`` + + If empty, then the Log Metric is considered a non-Bucket Log + Metric. + disabled (bool): + Optional. If set to True, then this metric is + disabled and it does not generate any points. + metric_descriptor (google.api.metric_pb2.MetricDescriptor): + Optional. The metric descriptor associated with the + logs-based metric. If unspecified, it uses a default metric + descriptor with a DELTA metric kind, INT64 value type, with + no labels and a unit of "1". Such a metric counts the number + of log entries matching the ``filter`` expression. + + The ``name``, ``type``, and ``description`` fields in the + ``metric_descriptor`` are output only, and is constructed + using the ``name`` and ``description`` field in the + LogMetric. + + To create a logs-based metric that records a distribution of + log values, a DELTA metric kind with a DISTRIBUTION value + type must be used along with a ``value_extractor`` + expression in the LogMetric. + + Each label in the metric descriptor must have a matching + label name as the key and an extractor expression as the + value in the ``label_extractors`` map. + + The ``metric_kind`` and ``value_type`` fields in the + ``metric_descriptor`` cannot be updated once initially + configured. New labels can be added in the + ``metric_descriptor``, but existing labels cannot be + modified except for their description. + value_extractor (str): + Optional. A ``value_extractor`` is required when using a + distribution logs-based metric to extract the values to + record from a log entry. Two functions are supported for + value extraction: ``EXTRACT(field)`` or + ``REGEXP_EXTRACT(field, regex)``. The arguments are: + + 1. field: The name of the log entry field from which the + value is to be extracted. + 2. regex: A regular expression using the Google RE2 syntax + (https://github.com/google/re2/wiki/Syntax) with a single + capture group to extract data from the specified log + entry field. The value of the field is converted to a + string before applying the regex. It is an error to + specify a regex that does not include exactly one capture + group. + + The result of the extraction must be convertible to a double + type, as the distribution always records double values. If + either the extraction or the conversion to double fails, + then those values are not recorded in the distribution. + + Example: + ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` + label_extractors (MutableMapping[str, str]): + Optional. A map from a label key string to an extractor + expression which is used to extract data from a log entry + field and assign as the label value. Each label key + specified in the LabelDescriptor must have an associated + extractor expression in this map. The syntax of the + extractor expression is the same as for the + ``value_extractor`` field. + + The extracted value is converted to the type defined in the + label descriptor. If either the extraction or the type + conversion fails, the label will have a default value. The + default value for a string label is an empty string, for an + integer label its 0, and for a boolean label its ``false``. + + Note that there are upper bounds on the maximum number of + labels and the number of active time series that are allowed + in a project. + bucket_options (google.api.distribution_pb2.BucketOptions): + Optional. The ``bucket_options`` are required when the + logs-based metric is using a DISTRIBUTION value type and it + describes the bucket boundaries used to create a histogram + of the extracted values. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of the + metric. + This field may not be present for older metrics. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of the + metric. + This field may not be present for older metrics. + version (google.cloud.logging_v2.types.LogMetric.ApiVersion): + Deprecated. The API version that created or + updated this metric. The v2 format is used by + default and cannot be changed. + """ + class ApiVersion(proto.Enum): + r"""Logging API version. + + Values: + V2 (0): + Logging API v2. + V1 (1): + Logging API v1. + """ + V2 = 0 + V1 = 1 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + description: str = proto.Field( + proto.STRING, + number=2, + ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) + bucket_name: str = proto.Field( + proto.STRING, + number=13, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=12, + ) + metric_descriptor: metric_pb2.MetricDescriptor = proto.Field( + proto.MESSAGE, + number=5, + message=metric_pb2.MetricDescriptor, + ) + value_extractor: str = proto.Field( + proto.STRING, + number=6, + ) + label_extractors: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + bucket_options: distribution_pb2.Distribution.BucketOptions = proto.Field( + proto.MESSAGE, + number=8, + message=distribution_pb2.Distribution.BucketOptions, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + version: ApiVersion = proto.Field( + proto.ENUM, + number=4, + enum=ApiVersion, + ) + + +class ListLogMetricsRequest(proto.Message): + r"""The parameters to ListLogMetrics. + + Attributes: + parent (str): + Required. The name of the project containing the metrics: + + :: + + "projects/[PROJECT_ID]". + page_token (str): + Optional. If present, then retrieve the next batch of + results from the preceding call to this method. + ``pageToken`` must be the value of ``nextPageToken`` from + the previous response. The values of other method parameters + should be identical to those in the previous call. + page_size (int): + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. The presence of + ``nextPageToken`` in the response indicates that more + results might be available. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_token: str = proto.Field( + proto.STRING, + number=2, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + + +class ListLogMetricsResponse(proto.Message): + r"""Result returned from ListLogMetrics. + + Attributes: + metrics (MutableSequence[google.cloud.logging_v2.types.LogMetric]): + A list of logs-based metrics. + next_page_token (str): + If there might be more results than appear in this response, + then ``nextPageToken`` is included. To get the next set of + results, call this method again using the value of + ``nextPageToken`` as ``pageToken``. + """ + + @property + def raw_page(self): + return self + + metrics: MutableSequence['LogMetric'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='LogMetric', + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetLogMetricRequest(proto.Message): + r"""The parameters to GetLogMetric. + + Attributes: + metric_name (str): + Required. The resource name of the desired metric: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + """ + + metric_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateLogMetricRequest(proto.Message): + r"""The parameters to CreateLogMetric. + + Attributes: + parent (str): + Required. The resource name of the project in which to + create the metric: + + :: + + "projects/[PROJECT_ID]" + + The new metric must be provided in the request. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The new logs-based metric, which + must not have an identifier that already exists. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + metric: 'LogMetric' = proto.Field( + proto.MESSAGE, + number=2, + message='LogMetric', + ) + + +class UpdateLogMetricRequest(proto.Message): + r"""The parameters to UpdateLogMetric. + + Attributes: + metric_name (str): + Required. The resource name of the metric to update: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]" + + The updated metric must be provided in the request and it's + ``name`` field must be the same as ``[METRIC_ID]`` If the + metric does not exist in ``[PROJECT_ID]``, then a new metric + is created. + metric (google.cloud.logging_v2.types.LogMetric): + Required. The updated metric. + """ + + metric_name: str = proto.Field( + proto.STRING, + number=1, + ) + metric: 'LogMetric' = proto.Field( + proto.MESSAGE, + number=2, + message='LogMetric', + ) + + +class DeleteLogMetricRequest(proto.Message): + r"""The parameters to DeleteLogMetric. + + Attributes: + metric_name (str): + Required. The resource name of the metric to delete: + + :: + + "projects/[PROJECT_ID]/metrics/[METRIC_ID]". + """ + + metric_name: str = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/mypy.ini b/packages/gapic-generator/tests/integration/goldens/logging_internal/mypy.ini new file mode 100755 index 000000000000..574c5aed394b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py new file mode 100755 index 000000000000..8d723fa93820 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import re +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = 'google-cloud-logging' + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.13" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "prerelease_deps", +] + +@nox.session(python=ALL_PYTHON) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def unit(session, protobuf_implementation): + """Run the unit test suite.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/logging_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + +@nox.session(python=ALL_PYTHON[-1]) +@nox.parametrize( + "protobuf_implementation", + [ "python", "upb", "cpp" ], +) +def prerelease_deps(session, protobuf_implementation): + """Run the unit test suite against pre-release versions of dependencies.""" + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install test environment dependencies + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + + # Install the package without dependencies + session.install('-e', '.', '--no-deps') + + # We test the minimum dependency versions using the minimum Python + # version so the lowest python runtime that we test has a corresponding constraints + # file, located at `testing/constraints--.txt`, which contains all of the + # dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 + "grpcio!=1.67.0rc1", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + + session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run( + "python", "-c", "import proto; print(proto.__version__)" + ) + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/logging_v2/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)), + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '-p', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==7.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py new file mode 100755 index 000000000000..c72d71144d36 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CopyLogEntries_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__copy_log_entries(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client._copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CopyLogEntries_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py new file mode 100755 index 000000000000..28a94850ec52 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CopyLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CopyLogEntries_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__copy_log_entries(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CopyLogEntriesRequest( + name="name_value", + destination="destination_value", + ) + + # Make the request + operation = client._copy_log_entries(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CopyLogEntries_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py new file mode 100755 index 000000000000..72cac228d628 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CreateExclusion_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__create_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = await client._create_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CreateExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py new file mode 100755 index 000000000000..4b6efb42f072 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CreateExclusion_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__create_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.CreateExclusionRequest( + parent="parent_value", + exclusion=exclusion, + ) + + # Make the request + response = client._create_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CreateExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py new file mode 100755 index 000000000000..50d9dd7d79d3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CreateLink_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__create_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client._create_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CreateLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py new file mode 100755 index 000000000000..3da3496391aa --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CreateLink_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__create_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateLinkRequest( + parent="parent_value", + link_id="link_id_value", + ) + + # Make the request + operation = client._create_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CreateLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py new file mode 100755 index 000000000000..d90e17666384 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CreateSink_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__create_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = await client._create_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CreateSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py new file mode 100755 index 000000000000..460e6fb0ab16 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CreateSink_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__create_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.CreateSinkRequest( + parent="parent_value", + sink=sink, + ) + + # Make the request + response = client._create_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CreateSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py new file mode 100755 index 000000000000..e746a27d2d40 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CreateView_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__create_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = await client._create_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CreateView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py new file mode 100755 index 000000000000..2c140e7852b7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__CreateView_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__create_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateViewRequest( + parent="parent_value", + view_id="view_id_value", + ) + + # Make the request + response = client._create_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__CreateView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py new file mode 100755 index 000000000000..742320fa71de --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__DeleteExclusion_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__delete_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + await client._delete_exclusion(request=request) + + +# [END logging_v2_generated_ConfigServiceV2__DeleteExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py new file mode 100755 index 000000000000..2e111fa747ad --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__DeleteExclusion_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__delete_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteExclusionRequest( + name="name_value", + ) + + # Make the request + client._delete_exclusion(request=request) + + +# [END logging_v2_generated_ConfigServiceV2__DeleteExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py new file mode 100755 index 000000000000..93b085612bb7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__DeleteLink_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__delete_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client._delete_link(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__DeleteLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py new file mode 100755 index 000000000000..6b6e2d114c31 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__DeleteLink_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__delete_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLinkRequest( + name="name_value", + ) + + # Make the request + operation = client._delete_link(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__DeleteLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py new file mode 100755 index 000000000000..fc97001cbb90 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__DeleteSink_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__delete_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + await client._delete_sink(request=request) + + +# [END logging_v2_generated_ConfigServiceV2__DeleteSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py new file mode 100755 index 000000000000..bfb465f43064 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__DeleteSink_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__delete_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + client._delete_sink(request=request) + + +# [END logging_v2_generated_ConfigServiceV2__DeleteSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py new file mode 100755 index 000000000000..aaa5bf7ccca1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__DeleteView_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__delete_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + await client._delete_view(request=request) + + +# [END logging_v2_generated_ConfigServiceV2__DeleteView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py new file mode 100755 index 000000000000..388721cadf17 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__DeleteView_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__delete_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteViewRequest( + name="name_value", + ) + + # Make the request + client._delete_view(request=request) + + +# [END logging_v2_generated_ConfigServiceV2__DeleteView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py new file mode 100755 index 000000000000..201149cee02e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetCmekSettings_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__get_cmek_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client._get_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetCmekSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py new file mode 100755 index 000000000000..b43d8adea143 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetCmekSettings_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__get_cmek_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client._get_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetCmekSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py new file mode 100755 index 000000000000..295a59e9536b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetExclusion_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__get_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = await client._get_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py new file mode 100755 index 000000000000..011112649658 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetExclusion_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__get_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetExclusionRequest( + name="name_value", + ) + + # Make the request + response = client._get_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py new file mode 100755 index 000000000000..43b6825bd042 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetLink_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__get_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = await client._get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py new file mode 100755 index 000000000000..7f70bec5427c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetLink_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__get_link(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLinkRequest( + name="name_value", + ) + + # Make the request + response = client._get_link(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py new file mode 100755 index 000000000000..bbe5934f442b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetSettings_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__get_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client._get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py new file mode 100755 index 000000000000..25eb8923b3d2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetSettings_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__get_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSettingsRequest( + name="name_value", + ) + + # Make the request + response = client._get_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py new file mode 100755 index 000000000000..bb5d38c0288a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetSink_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__get_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = await client._get_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py new file mode 100755 index 000000000000..47cccaaa8bd2 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetSink_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__get_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetSinkRequest( + sink_name="sink_name_value", + ) + + # Make the request + response = client._get_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py new file mode 100755 index 000000000000..6d8981da9ad0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetView_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__get_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = await client._get_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py new file mode 100755 index 000000000000..8def9a82b4b5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__GetView_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__get_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetViewRequest( + name="name_value", + ) + + # Make the request + response = client._get_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__GetView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py new file mode 100755 index 000000000000..1a46095422ce --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListExclusions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__ListExclusions_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__list_exclusions(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_exclusions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2__ListExclusions_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py new file mode 100755 index 000000000000..2497e6cd35d4 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListExclusions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__ListExclusions_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__list_exclusions(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListExclusionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_exclusions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2__ListExclusions_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py new file mode 100755 index 000000000000..38c3f77d1e77 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__ListLinks_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__list_links(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_links(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2__ListLinks_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py new file mode 100755 index 000000000000..45a26e31fbb3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListLinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__ListLinks_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__list_links(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_links(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2__ListLinks_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py new file mode 100755 index 000000000000..54bdbc6c5c0e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListSinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__ListSinks_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__list_sinks(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_sinks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2__ListSinks_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py new file mode 100755 index 000000000000..6f9841f1f96c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListSinks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__ListSinks_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__list_sinks(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListSinksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_sinks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2__ListSinks_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py new file mode 100755 index 000000000000..9c5478064b9c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListViews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__ListViews_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__list_views(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_views(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2__ListViews_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py new file mode 100755 index 000000000000..c088c8d6082a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListViews +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__ListViews_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__list_views(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListViewsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_views(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2__ListViews_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py new file mode 100755 index 000000000000..f767ebca6a76 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__update_cmek_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client._update_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py new file mode 100755 index 000000000000..3fec55b7f6b8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateCmekSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__update_cmek_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateCmekSettingsRequest( + name="name_value", + ) + + # Make the request + response = client._update_cmek_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py new file mode 100755 index 000000000000..b55f944edbb3 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateExclusion_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__update_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = await client._update_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py new file mode 100755 index 000000000000..b28402d20d6e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateExclusion +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateExclusion_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__update_exclusion(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + exclusion = logging_v2.LogExclusion() + exclusion.name = "name_value" + exclusion.filter = "filter_value" + + request = logging_v2.UpdateExclusionRequest( + name="name_value", + exclusion=exclusion, + ) + + # Make the request + response = client._update_exclusion(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py new file mode 100755 index 000000000000..f2f262e24a6a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateSettings_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__update_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = await client._update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py new file mode 100755 index 000000000000..d0ba8a3f788e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateSettings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateSettings_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__update_settings(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateSettingsRequest( + name="name_value", + ) + + # Make the request + response = client._update_settings(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py new file mode 100755 index 000000000000..716a0b07e96c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateSink_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__update_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = await client._update_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py new file mode 100755 index 000000000000..3dc375fa0243 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateSink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateSink_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__update_sink(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + sink = logging_v2.LogSink() + sink.name = "name_value" + sink.destination = "destination_value" + + request = logging_v2.UpdateSinkRequest( + sink_name="sink_name_value", + sink=sink, + ) + + # Make the request + response = client._update_sink(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py new file mode 100755 index 000000000000..983bfd16b87f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateView_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__update_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = await client._update_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py new file mode 100755 index 000000000000..204613cc4c3d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateView +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2__UpdateView_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__update_view(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateViewRequest( + name="name_value", + ) + + # Make the request + response = client._update_view(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2__UpdateView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py new file mode 100755 index 000000000000..711a4a39422f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = await client.create_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py new file mode 100755 index 000000000000..7f926256f75d --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_create_bucket_async(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py new file mode 100755 index 000000000000..c9608c7c5660 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_bucket_async(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + operation = client.create_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py new file mode 100755 index 000000000000..c13c727af381 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_CreateBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_create_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.CreateBucketRequest( + parent="parent_value", + bucket_id="bucket_id_value", + ) + + # Make the request + response = client.create_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_CreateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py new file mode 100755 index 000000000000..ee3df2fd9791 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_delete_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + await client.delete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py new file mode 100755 index 000000000000..d747c6530bd5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_delete_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.delete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_DeleteBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py new file mode 100755 index 000000000000..72093053348f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_get_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = await client.get_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py new file mode 100755 index 000000000000..9ec27d9deb40 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_GetBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_get_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetBucketRequest( + name="name_value", + ) + + # Make the request + response = client.get_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_GetBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py new file mode 100755 index 000000000000..ae5936d9f66c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuckets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListBuckets_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_list_buckets(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListBuckets_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py new file mode 100755 index 000000000000..edfe2fd7d227 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListBuckets +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_ListBuckets_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_list_buckets(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListBucketsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_buckets(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_ConfigServiceV2_ListBuckets_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py new file mode 100755 index 000000000000..5ff8813cd716 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_undelete_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + await client.undelete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_UndeleteBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py new file mode 100755 index 000000000000..c83ab9dde0fc --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UndeleteBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_undelete_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UndeleteBucketRequest( + name="name_value", + ) + + # Make the request + client.undelete_bucket(request=request) + + +# [END logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py new file mode 100755 index 000000000000..b55fd51a5d05 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucket_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_update_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = await client.update_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucket_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py new file mode 100755 index 000000000000..c9fcb689628c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_update_bucket_async(): + # Create a client + client = logging_v2.BaseConfigServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py new file mode 100755 index 000000000000..deb199aaf250 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucketAsync +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_update_bucket_async(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + operation = client.update_bucket_async(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py new file mode 100755 index 000000000000..e173f6497dad --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateBucket +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_update_bucket(): + # Create a client + client = logging_v2.BaseConfigServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.UpdateBucketRequest( + name="name_value", + ) + + # Make the request + response = client.update_bucket(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_ConfigServiceV2_UpdateBucket_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py new file mode 100755 index 000000000000..4f6f42664dd5 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_DeleteLog_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + await client.delete_log(request=request) + + +# [END logging_v2_generated_LoggingServiceV2_DeleteLog_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py new file mode 100755 index 000000000000..056be6957768 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteLog +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_DeleteLog_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_delete_log(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogRequest( + log_name="log_name_value", + ) + + # Make the request + client.delete_log(request=request) + + +# [END logging_v2_generated_LoggingServiceV2_DeleteLog_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py new file mode 100755 index 000000000000..2d3e52ebe184 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value1', 'resource_names_value2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py new file mode 100755 index 000000000000..d1a86427f185 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_list_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogEntriesRequest( + resource_names=['resource_names_value1', 'resource_names_value2'], + ) + + # Make the request + page_result = client.list_log_entries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py new file mode 100755 index 000000000000..278a2c47012e --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogs_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py new file mode 100755 index 000000000000..bc2071cf4784 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListLogs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListLogs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_list_logs(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_logs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListLogs_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py new file mode 100755 index 000000000000..b9224990329b --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMonitoredResourceDescriptors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py new file mode 100755 index 000000000000..78f5bc605553 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMonitoredResourceDescriptors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_list_monitored_resource_descriptors(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListMonitoredResourceDescriptorsRequest( + ) + + # Make the request + page_result = client.list_monitored_resource_descriptors(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py new file mode 100755 index 000000000000..3047a4f49db7 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TailLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_TailLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value1', 'resource_names_value2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.tail_log_entries(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_TailLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py new file mode 100755 index 000000000000..2080458cfaa0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TailLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_tail_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.TailLogEntriesRequest( + resource_names=['resource_names_value1', 'resource_names_value2'], + ) + + # This method expects an iterator which contains + # 'logging_v2.TailLogEntriesRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.tail_log_entries(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END logging_v2_generated_LoggingServiceV2_TailLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py new file mode 100755 index 000000000000..bdc98630dac8 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2AsyncClient() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = await client.write_log_entries(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_async] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py new file mode 100755 index 000000000000..884ef7a296cd --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for WriteLogEntries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample_write_log_entries(): + # Create a client + client = logging_v2.LoggingServiceV2Client() + + # Initialize request argument(s) + entries = logging_v2.LogEntry() + entries.log_name = "log_name_value" + + request = logging_v2.WriteLogEntriesRequest( + entries=entries, + ) + + # Make the request + response = client.write_log_entries(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py new file mode 100755 index 000000000000..e1e54ba56b14 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__CreateLogMetric_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__create_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = await client._create_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2__CreateLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py new file mode 100755 index 000000000000..2bab5195b917 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _CreateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__CreateLogMetric_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__create_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.CreateLogMetricRequest( + parent="parent_value", + metric=metric, + ) + + # Make the request + response = client._create_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2__CreateLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py new file mode 100755 index 000000000000..9373ac0d4b5a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__DeleteLogMetric_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__delete_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + await client._delete_log_metric(request=request) + + +# [END logging_v2_generated_MetricsServiceV2__DeleteLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py new file mode 100755 index 000000000000..431e26a197a9 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _DeleteLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__DeleteLogMetric_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__delete_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.DeleteLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + client._delete_log_metric(request=request) + + +# [END logging_v2_generated_MetricsServiceV2__DeleteLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py new file mode 100755 index 000000000000..458a5fc6887a --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__GetLogMetric_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__get_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = await client._get_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2__GetLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py new file mode 100755 index 000000000000..f1c293e2e080 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _GetLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__GetLogMetric_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__get_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.GetLogMetricRequest( + metric_name="metric_name_value", + ) + + # Make the request + response = client._get_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2__GetLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py new file mode 100755 index 000000000000..d084304f4a90 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListLogMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__ListLogMetrics_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__list_log_metrics(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_log_metrics(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END logging_v2_generated_MetricsServiceV2__ListLogMetrics_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py new file mode 100755 index 000000000000..305a5b39b355 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _ListLogMetrics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__ListLogMetrics_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__list_log_metrics(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + request = logging_v2.ListLogMetricsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client._list_log_metrics(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END logging_v2_generated_MetricsServiceV2__ListLogMetrics_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py new file mode 100755 index 000000000000..4546c68e4229 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__UpdateLogMetric_async_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +async def sample__update_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2AsyncClient() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = await client._update_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2__UpdateLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py new file mode 100755 index 000000000000..07fa1d67f1af --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for _UpdateLogMetric +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-logging + + +# [START logging_v2_generated_MetricsServiceV2__UpdateLogMetric_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import logging_v2 + + +def sample__update_log_metric(): + # Create a client + client = logging_v2.BaseMetricsServiceV2Client() + + # Initialize request argument(s) + metric = logging_v2.LogMetric() + metric.name = "name_value" + metric.filter = "filter_value" + + request = logging_v2.UpdateLogMetricRequest( + metric_name="metric_name_value", + metric=metric, + ) + + # Make the request + response = client._update_log_metric(request=request) + + # Handle the response + print(response) + +# [END logging_v2_generated_MetricsServiceV2__UpdateLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json new file mode 100755 index 000000000000..4c8ad2490e4f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -0,0 +1,6896 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.logging.v2", + "version": "v2" + } + ], + "language": "PYTHON", + "name": "google-cloud-logging", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.create_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_bucket_async" + }, + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.create_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_bucket_async" + }, + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.create_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" + }, + "description": "Sample for CreateBucket", + "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.create_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "CreateBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" + }, + "description": "Sample for CreateBucket", + "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.delete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "DeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_bucket" + }, + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.get_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" + }, + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.get_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "GetBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" + }, + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.list_buckets", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "ListBuckets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" + }, + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.list_buckets", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "ListBuckets" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListBucketsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" + }, + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.undelete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UndeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "undelete_bucket" + }, + "description": "Sample for UndeleteBucket", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.undelete_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UndeleteBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "undelete_bucket" + }, + "description": "Sample for UndeleteBucket", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.update_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_bucket_async" + }, + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.update_bucket_async", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucketAsync" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_bucket_async" + }, + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.update_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" + }, + "description": "Sample for UpdateBucket", + "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.update_bucket", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "UpdateBucket" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" + }, + "description": "Sample for UpdateBucket", + "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2_update_bucket_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._copy_log_entries", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CopyLogEntries", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CopyLogEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "_copy_log_entries" + }, + "description": "Sample for _CopyLogEntries", + "file": "logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CopyLogEntries_async_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._copy_log_entries", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CopyLogEntries", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CopyLogEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "_copy_log_entries" + }, + "description": "Sample for _CopyLogEntries", + "file": "logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CopyLogEntries_sync_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CreateExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CreateExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_create_exclusion" + }, + "description": "Sample for _CreateExclusion", + "file": "logging_v2_generated_config_service_v2__create_exclusion_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CreateExclusion_async_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__create_exclusion_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CreateExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CreateExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_create_exclusion" + }, + "description": "Sample for _CreateExclusion", + "file": "logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CreateExclusion_sync_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CreateLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CreateLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "_create_link" + }, + "description": "Sample for _CreateLink", + "file": "logging_v2_generated_config_service_v2__create_link_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CreateLink_async_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__create_link_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CreateLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CreateLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "_create_link" + }, + "description": "Sample for _CreateLink", + "file": "logging_v2_generated_config_service_v2__create_link_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CreateLink_sync_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__create_link_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_create_sink" + }, + "description": "Sample for _CreateSink", + "file": "logging_v2_generated_config_service_v2__create_sink_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CreateSink_async_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__create_sink_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CreateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CreateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_create_sink" + }, + "description": "Sample for _CreateSink", + "file": "logging_v2_generated_config_service_v2__create_sink_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CreateSink_sync_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__create_sink_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_create_view" + }, + "description": "Sample for _CreateView", + "file": "logging_v2_generated_config_service_v2__create_view_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CreateView_async_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__create_view_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._CreateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_CreateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_create_view" + }, + "description": "Sample for _CreateView", + "file": "logging_v2_generated_config_service_v2__create_view_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__CreateView_sync_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__create_view_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._DeleteExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_DeleteExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "_delete_exclusion" + }, + "description": "Sample for _DeleteExclusion", + "file": "logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteExclusion_async_internal", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._DeleteExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_DeleteExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "_delete_exclusion" + }, + "description": "Sample for _DeleteExclusion", + "file": "logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteExclusion_sync_internal", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._DeleteLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_DeleteLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "_delete_link" + }, + "description": "Sample for _DeleteLink", + "file": "logging_v2_generated_config_service_v2__delete_link_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteLink_async_internal", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__delete_link_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._DeleteLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_DeleteLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "_delete_link" + }, + "description": "Sample for _DeleteLink", + "file": "logging_v2_generated_config_service_v2__delete_link_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteLink_sync_internal", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__delete_link_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._DeleteSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_DeleteSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "_delete_sink" + }, + "description": "Sample for _DeleteSink", + "file": "logging_v2_generated_config_service_v2__delete_sink_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteSink_async_internal", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__delete_sink_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._DeleteSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_DeleteSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "_delete_sink" + }, + "description": "Sample for _DeleteSink", + "file": "logging_v2_generated_config_service_v2__delete_sink_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteSink_sync_internal", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__delete_sink_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._DeleteView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_DeleteView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "_delete_view" + }, + "description": "Sample for _DeleteView", + "file": "logging_v2_generated_config_service_v2__delete_view_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteView_async_internal", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__delete_view_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._DeleteView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_DeleteView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "_delete_view" + }, + "description": "Sample for _DeleteView", + "file": "logging_v2_generated_config_service_v2__delete_view_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteView_sync_internal", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__delete_view_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_cmek_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetCmekSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetCmekSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "_get_cmek_settings" + }, + "description": "Sample for _GetCmekSettings", + "file": "logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetCmekSettings_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_cmek_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetCmekSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetCmekSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "_get_cmek_settings" + }, + "description": "Sample for _GetCmekSettings", + "file": "logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetCmekSettings_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_get_exclusion" + }, + "description": "Sample for _GetExclusion", + "file": "logging_v2_generated_config_service_v2__get_exclusion_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetExclusion_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_exclusion_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_get_exclusion" + }, + "description": "Sample for _GetExclusion", + "file": "logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetExclusion_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "_get_link" + }, + "description": "Sample for _GetLink", + "file": "logging_v2_generated_config_service_v2__get_link_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetLink_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_link_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_link", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetLink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "_get_link" + }, + "description": "Sample for _GetLink", + "file": "logging_v2_generated_config_service_v2__get_link_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetLink_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_link_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "_get_settings" + }, + "description": "Sample for _GetSettings", + "file": "logging_v2_generated_config_service_v2__get_settings_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetSettings_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_settings_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "_get_settings" + }, + "description": "Sample for _GetSettings", + "file": "logging_v2_generated_config_service_v2__get_settings_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetSettings_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_settings_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_get_sink" + }, + "description": "Sample for _GetSink", + "file": "logging_v2_generated_config_service_v2__get_sink_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetSink_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_sink_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_get_sink" + }, + "description": "Sample for _GetSink", + "file": "logging_v2_generated_config_service_v2__get_sink_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetSink_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_sink_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_get_view" + }, + "description": "Sample for _GetView", + "file": "logging_v2_generated_config_service_v2__get_view_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetView_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_view_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._GetView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_GetView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_get_view" + }, + "description": "Sample for _GetView", + "file": "logging_v2_generated_config_service_v2__get_view_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__GetView_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__get_view_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_exclusions", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._ListExclusions", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_ListExclusions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListExclusionsAsyncPager", + "shortName": "_list_exclusions" + }, + "description": "Sample for _ListExclusions", + "file": "logging_v2_generated_config_service_v2__list_exclusions_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__ListExclusions_async_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__list_exclusions_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_exclusions", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._ListExclusions", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_ListExclusions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListExclusionsPager", + "shortName": "_list_exclusions" + }, + "description": "Sample for _ListExclusions", + "file": "logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__ListExclusions_sync_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_links", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._ListLinks", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_ListLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListLinksAsyncPager", + "shortName": "_list_links" + }, + "description": "Sample for _ListLinks", + "file": "logging_v2_generated_config_service_v2__list_links_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__ListLinks_async_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__list_links_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_links", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._ListLinks", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_ListLinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListLinksPager", + "shortName": "_list_links" + }, + "description": "Sample for _ListLinks", + "file": "logging_v2_generated_config_service_v2__list_links_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__ListLinks_sync_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__list_links_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_sinks", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._ListSinks", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_ListSinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListSinksAsyncPager", + "shortName": "_list_sinks" + }, + "description": "Sample for _ListSinks", + "file": "logging_v2_generated_config_service_v2__list_sinks_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__ListSinks_async_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__list_sinks_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_sinks", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._ListSinks", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_ListSinks" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListSinksPager", + "shortName": "_list_sinks" + }, + "description": "Sample for _ListSinks", + "file": "logging_v2_generated_config_service_v2__list_sinks_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__ListSinks_sync_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__list_sinks_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_views", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._ListViews", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_ListViews" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListViewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListViewsAsyncPager", + "shortName": "_list_views" + }, + "description": "Sample for _ListViews", + "file": "logging_v2_generated_config_service_v2__list_views_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__ListViews_async_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__list_views_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_views", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._ListViews", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_ListViews" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListViewsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListViewsPager", + "shortName": "_list_views" + }, + "description": "Sample for _ListViews", + "file": "logging_v2_generated_config_service_v2__list_views_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__ListViews_sync_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__list_views_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_cmek_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateCmekSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateCmekSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "_update_cmek_settings" + }, + "description": "Sample for _UpdateCmekSettings", + "file": "logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_cmek_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateCmekSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateCmekSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateCmekSettingsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "_update_cmek_settings" + }, + "description": "Sample for _UpdateCmekSettings", + "file": "logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_update_exclusion" + }, + "description": "Sample for _UpdateExclusion", + "file": "logging_v2_generated_config_service_v2__update_exclusion_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateExclusion_async_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_exclusion_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_exclusion", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateExclusion", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateExclusion" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateExclusionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_update_exclusion" + }, + "description": "Sample for _UpdateExclusion", + "file": "logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateExclusion_sync_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "_update_settings" + }, + "description": "Sample for _UpdateSettings", + "file": "logging_v2_generated_config_service_v2__update_settings_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateSettings_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_settings_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_settings", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateSettings", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateSettings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSettingsRequest" + }, + { + "name": "settings", + "type": "google.cloud.logging_v2.types.Settings" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "_update_settings" + }, + "description": "Sample for _UpdateSettings", + "file": "logging_v2_generated_config_service_v2__update_settings_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateSettings_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_settings_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_update_sink" + }, + "description": "Sample for _UpdateSink", + "file": "logging_v2_generated_config_service_v2__update_sink_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateSink_async_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_sink_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_sink", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateSink", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateSink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateSinkRequest" + }, + { + "name": "sink_name", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_update_sink" + }, + "description": "Sample for _UpdateSink", + "file": "logging_v2_generated_config_service_v2__update_sink_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateSink_sync_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_sink_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", + "shortName": "BaseConfigServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_update_view" + }, + "description": "Sample for _UpdateView", + "file": "logging_v2_generated_config_service_v2__update_view_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateView_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_view_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", + "shortName": "BaseConfigServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_view", + "method": { + "fullName": "google.logging.v2.ConfigServiceV2._UpdateView", + "service": { + "fullName": "google.logging.v2.ConfigServiceV2", + "shortName": "ConfigServiceV2" + }, + "shortName": "_UpdateView" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateViewRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_update_view" + }, + "description": "Sample for _UpdateView", + "file": "logging_v2_generated_config_service_v2__update_view_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateView_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_config_service_v2__update_view_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.delete_log", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "DeleteLog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_log" + }, + "description": "Sample for DeleteLog", + "file": "logging_v2_generated_logging_service_v2_delete_log_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_delete_log_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.delete_log", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.DeleteLog", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "DeleteLog" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_log" + }, + "description": "Sample for DeleteLog", + "file": "logging_v2_generated_logging_service_v2_delete_log_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_DeleteLog_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_delete_log_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_log_entries", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogEntriesRequest" + }, + { + "name": "resource_names", + "type": "MutableSequence[str]" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "order_by", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesAsyncPager", + "shortName": "list_log_entries" + }, + "description": "Sample for ListLogEntries", + "file": "logging_v2_generated_logging_service_v2_list_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_list_log_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_log_entries", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogEntries", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogEntriesRequest" + }, + { + "name": "resource_names", + "type": "MutableSequence[str]" + }, + { + "name": "filter", + "type": "str" + }, + { + "name": "order_by", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogEntriesPager", + "shortName": "list_log_entries" + }, + "description": "Sample for ListLogEntries", + "file": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogEntries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_list_log_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_logs", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogs", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsAsyncPager", + "shortName": "list_logs" + }, + "description": "Sample for ListLogs", + "file": "logging_v2_generated_logging_service_v2_list_logs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_list_logs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_logs", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListLogs", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "ListLogs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListLogsPager", + "shortName": "list_logs" + }, + "description": "Sample for ListLogs", + "file": "logging_v2_generated_logging_service_v2_list_logs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListLogs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_list_logs_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.list_monitored_resource_descriptors", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "ListMonitoredResourceDescriptors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsAsyncPager", + "shortName": "list_monitored_resource_descriptors" + }, + "description": "Sample for ListMonitoredResourceDescriptors", + "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.list_monitored_resource_descriptors", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "ListMonitoredResourceDescriptors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListMonitoredResourceDescriptorsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.logging_service_v2.pagers.ListMonitoredResourceDescriptorsPager", + "shortName": "list_monitored_resource_descriptors" + }, + "description": "Sample for ListMonitoredResourceDescriptors", + "file": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_ListMonitoredResourceDescriptors_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.tail_log_entries", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "TailLogEntries" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", + "shortName": "tail_log_entries" + }, + "description": "Sample for TailLogEntries", + "file": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_async", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_tail_log_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.tail_log_entries", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.TailLogEntries", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "TailLogEntries" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.logging_v2.types.TailLogEntriesRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.logging_v2.types.TailLogEntriesResponse]", + "shortName": "tail_log_entries" + }, + "description": "Sample for TailLogEntries", + "file": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_TailLogEntries_sync", + "segments": [ + { + "end": 62, + "start": 27, + "type": "FULL" + }, + { + "end": 62, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 55, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 56, + "type": "REQUEST_EXECUTION" + }, + { + "end": 63, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_tail_log_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient", + "shortName": "LoggingServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2AsyncClient.write_log_entries", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "WriteLogEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.WriteLogEntriesRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "resource", + "type": "google.api.monitored_resource_pb2.MonitoredResource" + }, + { + "name": "labels", + "type": "MutableMapping[str, str]" + }, + { + "name": "entries", + "type": "MutableSequence[google.cloud.logging_v2.types.LogEntry]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", + "shortName": "write_log_entries" + }, + "description": "Sample for WriteLogEntries", + "file": "logging_v2_generated_logging_service_v2_write_log_entries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_write_log_entries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client", + "shortName": "LoggingServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.LoggingServiceV2Client.write_log_entries", + "method": { + "fullName": "google.logging.v2.LoggingServiceV2.WriteLogEntries", + "service": { + "fullName": "google.logging.v2.LoggingServiceV2", + "shortName": "LoggingServiceV2" + }, + "shortName": "WriteLogEntries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.WriteLogEntriesRequest" + }, + { + "name": "log_name", + "type": "str" + }, + { + "name": "resource", + "type": "google.api.monitored_resource_pb2.MonitoredResource" + }, + { + "name": "labels", + "type": "MutableMapping[str, str]" + }, + { + "name": "entries", + "type": "MutableSequence[google.cloud.logging_v2.types.LogEntry]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.WriteLogEntriesResponse", + "shortName": "write_log_entries" + }, + "description": "Sample for WriteLogEntries", + "file": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_LoggingServiceV2_WriteLogEntries_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_logging_service_v2_write_log_entries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient", + "shortName": "BaseMetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._create_log_metric", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._CreateLogMetric", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_CreateLogMetric" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLogMetricRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "_create_log_metric" + }, + "description": "Sample for _CreateLogMetric", + "file": "logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__CreateLogMetric_async_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client", + "shortName": "BaseMetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._create_log_metric", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._CreateLogMetric", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_CreateLogMetric" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.CreateLogMetricRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "_create_log_metric" + }, + "description": "Sample for _CreateLogMetric", + "file": "logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__CreateLogMetric_sync_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient", + "shortName": "BaseMetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._delete_log_metric", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._DeleteLogMetric", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_DeleteLogMetric" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "_delete_log_metric" + }, + "description": "Sample for _DeleteLogMetric", + "file": "logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__DeleteLogMetric_async_internal", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client", + "shortName": "BaseMetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._delete_log_metric", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._DeleteLogMetric", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_DeleteLogMetric" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.DeleteLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "_delete_log_metric" + }, + "description": "Sample for _DeleteLogMetric", + "file": "logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__DeleteLogMetric_sync_internal", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient", + "shortName": "BaseMetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._get_log_metric", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._GetLogMetric", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_GetLogMetric" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "_get_log_metric" + }, + "description": "Sample for _GetLogMetric", + "file": "logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__GetLogMetric_async_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client", + "shortName": "BaseMetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._get_log_metric", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._GetLogMetric", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_GetLogMetric" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.GetLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "_get_log_metric" + }, + "description": "Sample for _GetLogMetric", + "file": "logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__GetLogMetric_sync_internal", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient", + "shortName": "BaseMetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._list_log_metrics", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._ListLogMetrics", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_ListLogMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogMetricsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers._ListLogMetricsAsyncPager", + "shortName": "_list_log_metrics" + }, + "description": "Sample for _ListLogMetrics", + "file": "logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__ListLogMetrics_async_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client", + "shortName": "BaseMetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._list_log_metrics", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._ListLogMetrics", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_ListLogMetrics" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.ListLogMetricsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers._ListLogMetricsPager", + "shortName": "_list_log_metrics" + }, + "description": "Sample for _ListLogMetrics", + "file": "logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__ListLogMetrics_sync_internal", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient", + "shortName": "BaseMetricsServiceV2AsyncClient" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._update_log_metric", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._UpdateLogMetric", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_UpdateLogMetric" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "_update_log_metric" + }, + "description": "Sample for _UpdateLogMetric", + "file": "logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__UpdateLogMetric_async_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client", + "shortName": "BaseMetricsServiceV2Client" + }, + "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._update_log_metric", + "method": { + "fullName": "google.logging.v2.MetricsServiceV2._UpdateLogMetric", + "service": { + "fullName": "google.logging.v2.MetricsServiceV2", + "shortName": "MetricsServiceV2" + }, + "shortName": "_UpdateLogMetric" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.logging_v2.types.UpdateLogMetricRequest" + }, + { + "name": "metric_name", + "type": "str" + }, + { + "name": "metric", + "type": "google.cloud.logging_v2.types.LogMetric" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.logging_v2.types.LogMetric", + "shortName": "_update_log_metric" + }, + "description": "Sample for _UpdateLogMetric", + "file": "logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "logging_v2_generated_MetricsServiceV2__UpdateLogMetric_sync_internal", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py" + } + ] +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py new file mode 100755 index 000000000000..3e183618dfe0 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py @@ -0,0 +1,218 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class loggingCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + '_copy_log_entries': ('name', 'destination', 'filter', ), + '_create_exclusion': ('parent', 'exclusion', ), + '_create_link': ('parent', 'link', 'link_id', ), + '_create_log_metric': ('parent', 'metric', ), + '_create_sink': ('parent', 'sink', 'unique_writer_identity', ), + '_create_view': ('parent', 'view_id', 'view', ), + '_delete_exclusion': ('name', ), + '_delete_link': ('name', ), + '_delete_log_metric': ('metric_name', ), + '_delete_sink': ('sink_name', ), + '_delete_view': ('name', ), + '_get_cmek_settings': ('name', ), + '_get_exclusion': ('name', ), + '_get_link': ('name', ), + '_get_log_metric': ('metric_name', ), + '_get_settings': ('name', ), + '_get_sink': ('sink_name', ), + '_get_view': ('name', ), + '_list_exclusions': ('parent', 'page_token', 'page_size', ), + '_list_links': ('parent', 'page_token', 'page_size', ), + '_list_log_metrics': ('parent', 'page_token', 'page_size', ), + '_list_sinks': ('parent', 'page_token', 'page_size', ), + '_list_views': ('parent', 'page_token', 'page_size', ), + '_update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), + '_update_exclusion': ('name', 'exclusion', 'update_mask', ), + '_update_log_metric': ('metric_name', 'metric', ), + '_update_settings': ('name', 'settings', 'update_mask', ), + '_update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), + '_update_view': ('name', 'view', 'update_mask', ), + 'create_bucket': ('parent', 'bucket_id', 'bucket', ), + 'create_bucket_async': ('parent', 'bucket_id', 'bucket', ), + 'delete_bucket': ('name', ), + 'delete_log': ('log_name', ), + 'get_bucket': ('name', ), + 'list_buckets': ('parent', 'page_token', 'page_size', ), + 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), + 'list_logs': ('parent', 'resource_names', 'page_size', 'page_token', ), + 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), + 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), + 'undelete_bucket': ('name', ), + 'update_bucket': ('name', 'bucket', 'update_mask', ), + 'update_bucket_async': ('name', 'bucket', 'update_mask', ), + 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=loggingCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the logging client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py new file mode 100755 index 000000000000..69fe4db623ff --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import re + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-logging' + + +description = "Google Cloud Logging API client library" + +version = None + +with open(os.path.join(package_root, 'google/cloud/logging/gapic_version.py')) as fp: + version_candidates = re.findall(r"(?<=\")\d+.\d+.\d+(?=\")", fp.read()) + assert (len(version_candidates) == 1) + version = version_candidates[0] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +extras = { +} +url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-logging" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.10.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.11.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.12.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt new file mode 100755 index 000000000000..fc812592b0ee --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.1 +google-auth==2.14.1 +proto-plus==1.22.3 +protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.8.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.9.txt new file mode 100755 index 000000000000..ed7f9aed2559 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py new file mode 100755 index 000000000000..7b3de3117f38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py new file mode 100755 index 000000000000..7b3de3117f38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py new file mode 100755 index 000000000000..7b3de3117f38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py new file mode 100755 index 000000000000..7b3de3117f38 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py new file mode 100755 index 000000000000..1c67fff5278c --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -0,0 +1,13713 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import future +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import operation +from google.api_core import operation_async # type: ignore +from google.api_core import operations_v1 +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.config_service_v2 import BaseConfigServiceV2AsyncClient +from google.cloud.logging_v2.services.config_service_v2 import BaseConfigServiceV2Client +from google.cloud.logging_v2.services.config_service_v2 import pagers +from google.cloud.logging_v2.services.config_service_v2 import transports +from google.cloud.logging_v2.types import logging_config +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BaseConfigServiceV2Client._get_default_mtls_endpoint(None) is None + assert BaseConfigServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert BaseConfigServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert BaseConfigServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert BaseConfigServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert BaseConfigServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BaseConfigServiceV2Client._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + BaseConfigServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BaseConfigServiceV2Client._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BaseConfigServiceV2Client._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BaseConfigServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BaseConfigServiceV2Client._get_client_cert_source(None, False) is None + assert BaseConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None + assert BaseConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert BaseConfigServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source + assert BaseConfigServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(BaseConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2Client)) +@mock.patch.object(BaseConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2AsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BaseConfigServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert BaseConfigServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert BaseConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert BaseConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert BaseConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert BaseConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BaseConfigServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert BaseConfigServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert BaseConfigServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + BaseConfigServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert BaseConfigServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert BaseConfigServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert BaseConfigServiceV2Client._get_universe_domain(None, None) == BaseConfigServiceV2Client._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + BaseConfigServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BaseConfigServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BaseConfigServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (BaseConfigServiceV2Client, "grpc"), + (BaseConfigServiceV2AsyncClient, "grpc_asyncio"), +]) +def test_base_config_service_v2_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.ConfigServiceV2GrpcTransport, "grpc"), + (transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_base_config_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BaseConfigServiceV2Client, "grpc"), + (BaseConfigServiceV2AsyncClient, "grpc_asyncio"), +]) +def test_base_config_service_v2_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + + +def test_base_config_service_v2_client_get_transport_class(): + transport = BaseConfigServiceV2Client.get_transport_class() + available_transports = [ + transports.ConfigServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = BaseConfigServiceV2Client.get_transport_class("grpc") + assert transport == transports.ConfigServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(BaseConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2Client)) +@mock.patch.object(BaseConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2AsyncClient)) +def test_base_config_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BaseConfigServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BaseConfigServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "true"), + (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", "false"), + (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(BaseConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2Client)) +@mock.patch.object(BaseConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_base_config_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + BaseConfigServiceV2Client, BaseConfigServiceV2AsyncClient +]) +@mock.patch.object(BaseConfigServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(BaseConfigServiceV2Client)) +@mock.patch.object(BaseConfigServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BaseConfigServiceV2AsyncClient)) +def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + BaseConfigServiceV2Client, BaseConfigServiceV2AsyncClient +]) +@mock.patch.object(BaseConfigServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2Client)) +@mock.patch.object(BaseConfigServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseConfigServiceV2AsyncClient)) +def test_base_config_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BaseConfigServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = BaseConfigServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc"), + (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_base_config_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), + (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_base_config_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_base_config_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = BaseConfigServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport, "grpc", grpc_helpers), + (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_base_config_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.ListBucketsRequest, + dict, +]) +def test_list_buckets(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.ListBucketsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_buckets_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListBucketsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_buckets(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListBucketsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_buckets_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_buckets in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_buckets] = mock_rpc + request = {} + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_buckets_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_buckets in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_buckets] = mock_rpc + + request = {} + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_buckets(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_buckets_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListBucketsRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.ListBucketsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBucketsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_buckets_async_from_dict(): + await test_list_buckets_async(request_type=dict) + +def test_list_buckets_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListBucketsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = logging_config.ListBucketsResponse() + client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_buckets_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListBucketsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + await client.list_buckets(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_buckets_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_buckets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_buckets_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_buckets( + logging_config.ListBucketsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_buckets_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListBucketsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_buckets( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_buckets_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_buckets( + logging_config.ListBucketsRequest(), + parent='parent_value', + ) + + +def test_list_buckets_pager(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_buckets(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogBucket) + for i in results) +def test_list_buckets_pages(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + pages = list(client.list_buckets(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_buckets_async_pager(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_buckets(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogBucket) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_buckets_async_pages(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + next_page_token='abc', + ), + logging_config.ListBucketsResponse( + buckets=[], + next_page_token='def', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + ], + next_page_token='ghi', + ), + logging_config.ListBucketsResponse( + buckets=[ + logging_config.LogBucket(), + logging_config.LogBucket(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_buckets(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging_config.GetBucketRequest, + dict, +]) +def test_get_bucket(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + ) + response = client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] + + +def test_get_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.get_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetBucketRequest( + name='name_value', + ) + +def test_get_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.get_bucket] = mock_rpc + request = {} + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.get_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.get_bucket] = mock_rpc + + request = {} + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_get_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetBucketRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + response = await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] + + +@pytest.mark.asyncio +async def test_get_bucket_async_from_dict(): + await test_get_bucket_async(request_type=dict) + +def test_get_bucket_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_bucket_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.get_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateBucketRequest, + dict, +]) +def test_create_bucket_async(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_bucket_async_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateBucketRequest( + parent='parent_value', + bucket_id='bucket_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_bucket_async(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest( + parent='parent_value', + bucket_id='bucket_id_value', + ) + +def test_create_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_bucket_async in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_bucket_async] = mock_rpc + request = {} + client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_bucket_async in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_bucket_async] = mock_rpc + + request = {} + await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_bucket_async_async_from_dict(): + await test_create_bucket_async_async(request_type=dict) + +def test_create_bucket_async_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_bucket_async_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.create_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateBucketRequest, + dict, +]) +def test_update_bucket_async(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_bucket_async_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_bucket_async(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest( + name='name_value', + ) + +def test_update_bucket_async_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_bucket_async in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_bucket_async] = mock_rpc + request = {} + client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_bucket_async_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_bucket_async in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_bucket_async] = mock_rpc + + request = {} + await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_bucket_async(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_bucket_async_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_bucket_async_async_from_dict(): + await test_update_bucket_async_async(request_type=dict) + +def test_update_bucket_async_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_bucket_async_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client.update_bucket_async(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateBucketRequest, + dict, +]) +def test_create_bucket(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + ) + response = client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] + + +def test_create_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateBucketRequest( + parent='parent_value', + bucket_id='bucket_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.create_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateBucketRequest( + parent='parent_value', + bucket_id='bucket_id_value', + ) + +def test_create_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.create_bucket] = mock_rpc + request = {} + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.create_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.create_bucket] = mock_rpc + + request = {} + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_create_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateBucketRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + response = await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] + + +@pytest.mark.asyncio +async def test_create_bucket_async_from_dict(): + await test_create_bucket_async(request_type=dict) + +def test_create_bucket_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_bucket_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateBucketRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.create_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateBucketRequest, + dict, +]) +def test_update_bucket(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + ) + response = client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] + + +def test_update_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.update_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateBucketRequest( + name='name_value', + ) + +def test_update_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.update_bucket] = mock_rpc + request = {} + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.update_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.update_bucket] = mock_rpc + + request = {} + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_update_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateBucketRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + response = await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogBucket) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.retention_days == 1512 + assert response.locked is True + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + assert response.analytics_enabled is True + assert response.restricted_fields == ['restricted_fields_value'] + + +@pytest.mark.asyncio +async def test_update_bucket_async_from_dict(): + await test_update_bucket_async(request_type=dict) + +def test_update_bucket_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_bucket_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket()) + await client.update_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteBucketRequest, + dict, +]) +def test_delete_bucket(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteBucketRequest( + name='name_value', + ) + +def test_delete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_bucket] = mock_rpc + request = {} + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_bucket] = mock_rpc + + request = {} + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteBucketRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_bucket_async_from_dict(): + await test_delete_bucket_async(request_type=dict) + +def test_delete_bucket_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = None + client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_bucket_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.UndeleteBucketRequest, + dict, +]) +def test_undelete_bucket(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UndeleteBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_undelete_bucket_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UndeleteBucketRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.undelete_bucket(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UndeleteBucketRequest( + name='name_value', + ) + +def test_undelete_bucket_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.undelete_bucket in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.undelete_bucket] = mock_rpc + request = {} + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_undelete_bucket_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.undelete_bucket in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.undelete_bucket] = mock_rpc + + request = {} + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.undelete_bucket(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_undelete_bucket_async(transport: str = 'grpc_asyncio', request_type=logging_config.UndeleteBucketRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UndeleteBucketRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_undelete_bucket_async_from_dict(): + await test_undelete_bucket_async(request_type=dict) + +def test_undelete_bucket_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = None + client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_undelete_bucket_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UndeleteBucketRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.ListViewsRequest, + dict, +]) +def test__list_views(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + ) + response = client._list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.ListViewsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListViewsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test__list_views_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListViewsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._list_views(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListViewsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test__list_views_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._list_views in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._list_views] = mock_rpc + request = {} + client._list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._list_views in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._list_views] = mock_rpc + + request = {} + await client._list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._list_views(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_views_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListViewsRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + )) + response = await client._list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.ListViewsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListViewsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test__list_views_async_from_dict(): + await test__list_views_async(request_type=dict) + +def test__list_views_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + call.return_value = logging_config.ListViewsResponse() + client._list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__list_views_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListViewsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + await client._list_views(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__list_views_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._list_views( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test__list_views_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._list_views( + logging_config.ListViewsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test__list_views_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListViewsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._list_views( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__list_views_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._list_views( + logging_config.ListViewsRequest(), + parent='parent_value', + ) + + +def test__list_views_pager(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client._list_views(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogView) + for i in results) +def test__list_views_pages(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = list(client._list_views(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test__list_views_async_pager(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + async_pager = await client._list_views(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogView) + for i in responses) + + +@pytest.mark.asyncio +async def test__list_views_async_pages(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + logging_config.LogView(), + ], + next_page_token='abc', + ), + logging_config.ListViewsResponse( + views=[], + next_page_token='def', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + ], + next_page_token='ghi', + ), + logging_config.ListViewsResponse( + views=[ + logging_config.LogView(), + logging_config.LogView(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client._list_views(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging_config.GetViewRequest, + dict, +]) +def test__get_view(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client._get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test__get_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetViewRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_view), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._get_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetViewRequest( + name='name_value', + ) + +def test__get_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._get_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._get_view] = mock_rpc + request = {} + client._get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._get_view in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._get_view] = mock_rpc + + request = {} + await client._get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._get_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetViewRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client._get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test__get_view_async_from_dict(): + await test__get_view_async(request_type=dict) + +def test__get_view_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client._get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__get_view_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetViewRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client._get_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateViewRequest, + dict, +]) +def test__create_view(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client._create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test__create_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateViewRequest( + parent='parent_value', + view_id='view_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_view), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._create_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateViewRequest( + parent='parent_value', + view_id='view_id_value', + ) + +def test__create_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._create_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._create_view] = mock_rpc + request = {} + client._create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._create_view in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._create_view] = mock_rpc + + request = {} + await client._create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._create_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateViewRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client._create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test__create_view_async_from_dict(): + await test__create_view_async(request_type=dict) + +def test__create_view_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateViewRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client._create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__create_view_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateViewRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client._create_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateViewRequest, + dict, +]) +def test__update_view(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + ) + response = client._update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +def test__update_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateViewRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_view), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._update_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateViewRequest( + name='name_value', + ) + +def test__update_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._update_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._update_view] = mock_rpc + request = {} + client._update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._update_view in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._update_view] = mock_rpc + + request = {} + await client._update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._update_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateViewRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + response = await client._update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogView) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + + +@pytest.mark.asyncio +async def test__update_view_async_from_dict(): + await test__update_view_async(request_type=dict) + +def test__update_view_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateViewRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client._update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__update_view_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateViewRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) + await client._update_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteViewRequest, + dict, +]) +def test__delete_view(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client._delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test__delete_view_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteViewRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_view), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._delete_view(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteViewRequest( + name='name_value', + ) + +def test__delete_view_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._delete_view in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._delete_view] = mock_rpc + request = {} + client._delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._delete_view in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._delete_view] = mock_rpc + + request = {} + await client._delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._delete_view(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_view_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteViewRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client._delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteViewRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test__delete_view_async_from_dict(): + await test__delete_view_async(request_type=dict) + +def test__delete_view_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_view), + '__call__') as call: + call.return_value = None + client._delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__delete_view_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteViewRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_view), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client._delete_view(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.ListSinksRequest, + dict, +]) +def test__list_sinks(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + ) + response = client._list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.ListSinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListSinksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test__list_sinks_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListSinksRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._list_sinks(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListSinksRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test__list_sinks_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._list_sinks in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._list_sinks] = mock_rpc + request = {} + client._list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._list_sinks in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._list_sinks] = mock_rpc + + request = {} + await client._list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._list_sinks(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_sinks_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListSinksRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + )) + response = await client._list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.ListSinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListSinksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test__list_sinks_async_from_dict(): + await test__list_sinks_async(request_type=dict) + +def test__list_sinks_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListSinksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + call.return_value = logging_config.ListSinksResponse() + client._list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__list_sinks_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListSinksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + await client._list_sinks(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__list_sinks_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._list_sinks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test__list_sinks_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._list_sinks( + logging_config.ListSinksRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test__list_sinks_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListSinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._list_sinks( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__list_sinks_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._list_sinks( + logging_config.ListSinksRequest(), + parent='parent_value', + ) + + +def test__list_sinks_pager(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client._list_sinks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogSink) + for i in results) +def test__list_sinks_pages(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = list(client._list_sinks(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test__list_sinks_async_pager(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + async_pager = await client._list_sinks(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogSink) + for i in responses) + + +@pytest.mark.asyncio +async def test__list_sinks_async_pages(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + logging_config.LogSink(), + ], + next_page_token='abc', + ), + logging_config.ListSinksResponse( + sinks=[], + next_page_token='def', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + ], + next_page_token='ghi', + ), + logging_config.ListSinksResponse( + sinks=[ + logging_config.LogSink(), + logging_config.LogSink(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client._list_sinks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging_config.GetSinkRequest, + dict, +]) +def test__get_sink(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + ) + response = client._get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test__get_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetSinkRequest( + sink_name='sink_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._get_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSinkRequest( + sink_name='sink_name_value', + ) + +def test__get_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._get_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._get_sink] = mock_rpc + request = {} + client._get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._get_sink in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._get_sink] = mock_rpc + + request = {} + await client._get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._get_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSinkRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client._get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test__get_sink_async_from_dict(): + await test__get_sink_async(request_type=dict) + +def test__get_sink_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + + request.sink_name = 'sink_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client._get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__get_sink_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSinkRequest() + + request.sink_name = 'sink_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client._get_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name_value', + ) in kw['metadata'] + + +def test__get_sink_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._get_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val + + +def test__get_sink_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._get_sink( + logging_config.GetSinkRequest(), + sink_name='sink_name_value', + ) + +@pytest.mark.asyncio +async def test__get_sink_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._get_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__get_sink_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._get_sink( + logging_config.GetSinkRequest(), + sink_name='sink_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateSinkRequest, + dict, +]) +def test__create_sink(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + ) + response = client._create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test__create_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateSinkRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._create_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateSinkRequest( + parent='parent_value', + ) + +def test__create_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._create_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._create_sink] = mock_rpc + request = {} + client._create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._create_sink in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._create_sink] = mock_rpc + + request = {} + await client._create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._create_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateSinkRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client._create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test__create_sink_async_from_dict(): + await test__create_sink_async(request_type=dict) + +def test__create_sink_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client._create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__create_sink_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateSinkRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client._create_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__create_sink_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._create_sink( + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name='name_value') + assert arg == mock_val + + +def test__create_sink_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._create_sink( + logging_config.CreateSinkRequest(), + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + +@pytest.mark.asyncio +async def test__create_sink_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._create_sink( + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test__create_sink_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._create_sink( + logging_config.CreateSinkRequest(), + parent='parent_value', + sink=logging_config.LogSink(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateSinkRequest, + dict, +]) +def test__update_sink(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + ) + response = client._update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +def test__update_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateSinkRequest( + sink_name='sink_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._update_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSinkRequest( + sink_name='sink_name_value', + ) + +def test__update_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._update_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._update_sink] = mock_rpc + request = {} + client._update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._update_sink in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._update_sink] = mock_rpc + + request = {} + await client._update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._update_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSinkRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + response = await client._update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogSink) + assert response.name == 'name_value' + assert response.destination == 'destination_value' + assert response.filter == 'filter_value' + assert response.description == 'description_value' + assert response.disabled is True + assert response.output_version_format == logging_config.LogSink.VersionFormat.V2 + assert response.writer_identity == 'writer_identity_value' + assert response.include_children is True + + +@pytest.mark.asyncio +async def test__update_sink_async_from_dict(): + await test__update_sink_async(request_type=dict) + +def test__update_sink_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + + request.sink_name = 'sink_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client._update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__update_sink_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSinkRequest() + + request.sink_name = 'sink_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + await client._update_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name_value', + ) in kw['metadata'] + + +def test__update_sink_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._update_sink( + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test__update_sink_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._update_sink( + logging_config.UpdateSinkRequest(), + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test__update_sink_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogSink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._update_sink( + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test__update_sink_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._update_sink( + logging_config.UpdateSinkRequest(), + sink_name='sink_name_value', + sink=logging_config.LogSink(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteSinkRequest, + dict, +]) +def test__delete_sink(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client._delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test__delete_sink_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteSinkRequest( + sink_name='sink_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._delete_sink(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteSinkRequest( + sink_name='sink_name_value', + ) + +def test__delete_sink_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._delete_sink in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._delete_sink] = mock_rpc + request = {} + client._delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._delete_sink in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._delete_sink] = mock_rpc + + request = {} + await client._delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._delete_sink(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_sink_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteSinkRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client._delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteSinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test__delete_sink_async_from_dict(): + await test__delete_sink_async(request_type=dict) + +def test__delete_sink_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + + request.sink_name = 'sink_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + call.return_value = None + client._delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__delete_sink_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteSinkRequest() + + request.sink_name = 'sink_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client._delete_sink(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'sink_name=sink_name_value', + ) in kw['metadata'] + + +def test__delete_sink_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._delete_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val + + +def test__delete_sink_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._delete_sink( + logging_config.DeleteSinkRequest(), + sink_name='sink_name_value', + ) + +@pytest.mark.asyncio +async def test__delete_sink_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._delete_sink( + sink_name='sink_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].sink_name + mock_val = 'sink_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__delete_sink_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._delete_sink( + logging_config.DeleteSinkRequest(), + sink_name='sink_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateLinkRequest, + dict, +]) +def test__create_link(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client._create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test__create_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateLinkRequest( + parent='parent_value', + link_id='link_id_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._create_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateLinkRequest( + parent='parent_value', + link_id='link_id_value', + ) + +def test__create_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._create_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._create_link] = mock_rpc + request = {} + client._create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client._create_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._create_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._create_link] = mock_rpc + + request = {} + await client._create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client._create_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateLinkRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client._create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test__create_link_async_from_dict(): + await test__create_link_async(request_type=dict) + +def test__create_link_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateLinkRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client._create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__create_link_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateLinkRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client._create_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__create_link_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._create_link( + parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].link + mock_val = logging_config.Link(name='name_value') + assert arg == mock_val + arg = args[0].link_id + mock_val = 'link_id_value' + assert arg == mock_val + + +def test__create_link_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._create_link( + logging_config.CreateLinkRequest(), + parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', + ) + +@pytest.mark.asyncio +async def test__create_link_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._create_link( + parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].link + mock_val = logging_config.Link(name='name_value') + assert arg == mock_val + arg = args[0].link_id + mock_val = 'link_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__create_link_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._create_link( + logging_config.CreateLinkRequest(), + parent='parent_value', + link=logging_config.Link(name='name_value'), + link_id='link_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteLinkRequest, + dict, +]) +def test__delete_link(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client._delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test__delete_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteLinkRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._delete_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteLinkRequest( + name='name_value', + ) + +def test__delete_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._delete_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._delete_link] = mock_rpc + request = {} + client._delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client._delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._delete_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._delete_link] = mock_rpc + + request = {} + await client._delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client._delete_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteLinkRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client._delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test__delete_link_async_from_dict(): + await test__delete_link_async(request_type=dict) + +def test__delete_link_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteLinkRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client._delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__delete_link_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteLinkRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) + await client._delete_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test__delete_link_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._delete_link( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test__delete_link_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._delete_link( + logging_config.DeleteLinkRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test__delete_link_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/op') + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._delete_link( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__delete_link_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._delete_link( + logging_config.DeleteLinkRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.ListLinksRequest, + dict, +]) +def test__list_links(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListLinksResponse( + next_page_token='next_page_token_value', + ) + response = client._list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.ListLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListLinksPager) + assert response.next_page_token == 'next_page_token_value' + + +def test__list_links_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListLinksRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._list_links(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListLinksRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test__list_links_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._list_links in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._list_links] = mock_rpc + request = {} + client._list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._list_links in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._list_links] = mock_rpc + + request = {} + await client._list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._list_links(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_links_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListLinksRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( + next_page_token='next_page_token_value', + )) + response = await client._list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.ListLinksRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListLinksAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test__list_links_async_from_dict(): + await test__list_links_async(request_type=dict) + +def test__list_links_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListLinksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + call.return_value = logging_config.ListLinksResponse() + client._list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__list_links_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListLinksRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) + await client._list_links(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__list_links_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListLinksResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._list_links( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test__list_links_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._list_links( + logging_config.ListLinksRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test__list_links_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListLinksResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._list_links( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__list_links_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._list_links( + logging_config.ListLinksRequest(), + parent='parent_value', + ) + + +def test__list_links_pager(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client._list_links(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.Link) + for i in results) +def test__list_links_pages(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = list(client._list_links(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test__list_links_async_pager(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + async_pager = await client._list_links(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.Link) + for i in responses) + + +@pytest.mark.asyncio +async def test__list_links_async_pages(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + logging_config.Link(), + ], + next_page_token='abc', + ), + logging_config.ListLinksResponse( + links=[], + next_page_token='def', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + ], + next_page_token='ghi', + ), + logging_config.ListLinksResponse( + links=[ + logging_config.Link(), + logging_config.Link(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client._list_links(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging_config.GetLinkRequest, + dict, +]) +def test__get_link(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Link( + name='name_value', + description='description_value', + lifecycle_state=logging_config.LifecycleState.ACTIVE, + ) + response = client._get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Link) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +def test__get_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetLinkRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._get_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetLinkRequest( + name='name_value', + ) + +def test__get_link_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._get_link in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._get_link] = mock_rpc + request = {} + client._get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._get_link in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._get_link] = mock_rpc + + request = {} + await client._get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._get_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_link_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetLinkRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( + name='name_value', + description='description_value', + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + response = await client._get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Link) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE + + +@pytest.mark.asyncio +async def test__get_link_async_from_dict(): + await test__get_link_async(request_type=dict) + +def test__get_link_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetLinkRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + call.return_value = logging_config.Link() + client._get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__get_link_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetLinkRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) + await client._get_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test__get_link_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Link() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._get_link( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test__get_link_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._get_link( + logging_config.GetLinkRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test__get_link_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Link() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._get_link( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__get_link_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._get_link( + logging_config.GetLinkRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.ListExclusionsRequest, + dict, +]) +def test__list_exclusions(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + ) + response = client._list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.ListExclusionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListExclusionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test__list_exclusions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.ListExclusionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._list_exclusions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.ListExclusionsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test__list_exclusions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._list_exclusions in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._list_exclusions] = mock_rpc + request = {} + client._list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_exclusions_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._list_exclusions in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._list_exclusions] = mock_rpc + + request = {} + await client._list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._list_exclusions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_exclusions_async(transport: str = 'grpc_asyncio', request_type=logging_config.ListExclusionsRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + response = await client._list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.ListExclusionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListExclusionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test__list_exclusions_async_from_dict(): + await test__list_exclusions_async(request_type=dict) + +def test__list_exclusions_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + call.return_value = logging_config.ListExclusionsResponse() + client._list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__list_exclusions_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.ListExclusionsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + await client._list_exclusions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__list_exclusions_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._list_exclusions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test__list_exclusions_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test__list_exclusions_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.ListExclusionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._list_exclusions( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__list_exclusions_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._list_exclusions( + logging_config.ListExclusionsRequest(), + parent='parent_value', + ) + + +def test__list_exclusions_pager(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client._list_exclusions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in results) +def test__list_exclusions_pages(transport_name: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = list(client._list_exclusions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test__list_exclusions_async_pager(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + async_pager = await client._list_exclusions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_config.LogExclusion) + for i in responses) + + +@pytest.mark.asyncio +async def test__list_exclusions_async_pages(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + next_page_token='abc', + ), + logging_config.ListExclusionsResponse( + exclusions=[], + next_page_token='def', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + ], + next_page_token='ghi', + ), + logging_config.ListExclusionsResponse( + exclusions=[ + logging_config.LogExclusion(), + logging_config.LogExclusion(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client._list_exclusions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging_config.GetExclusionRequest, + dict, +]) +def test__get_exclusion(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client._get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test__get_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetExclusionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._get_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetExclusionRequest( + name='name_value', + ) + +def test__get_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._get_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._get_exclusion] = mock_rpc + request = {} + client._get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._get_exclusion in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._get_exclusion] = mock_rpc + + request = {} + await client._get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._get_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetExclusionRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client._get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test__get_exclusion_async_from_dict(): + await test__get_exclusion_async(request_type=dict) + +def test__get_exclusion_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client._get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__get_exclusion_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetExclusionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client._get_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test__get_exclusion_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._get_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test__get_exclusion_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test__get_exclusion_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._get_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__get_exclusion_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._get_exclusion( + logging_config.GetExclusionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CreateExclusionRequest, + dict, +]) +def test__create_exclusion(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client._create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CreateExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test__create_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CreateExclusionRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._create_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CreateExclusionRequest( + parent='parent_value', + ) + +def test__create_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._create_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._create_exclusion] = mock_rpc + request = {} + client._create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._create_exclusion in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._create_exclusion] = mock_rpc + + request = {} + await client._create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._create_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.CreateExclusionRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client._create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CreateExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test__create_exclusion_async_from_dict(): + await test__create_exclusion_async(request_type=dict) + +def test__create_exclusion_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client._create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__create_exclusion_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.CreateExclusionRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client._create_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__create_exclusion_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._create_exclusion( + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + + +def test__create_exclusion_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + +@pytest.mark.asyncio +async def test__create_exclusion_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._create_exclusion( + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test__create_exclusion_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._create_exclusion( + logging_config.CreateExclusionRequest(), + parent='parent_value', + exclusion=logging_config.LogExclusion(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateExclusionRequest, + dict, +]) +def test__update_exclusion(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + ) + response = client._update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +def test__update_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateExclusionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._update_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateExclusionRequest( + name='name_value', + ) + +def test__update_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._update_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._update_exclusion] = mock_rpc + request = {} + client._update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._update_exclusion in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._update_exclusion] = mock_rpc + + request = {} + await client._update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._update_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateExclusionRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + response = await client._update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.LogExclusion) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.disabled is True + + +@pytest.mark.asyncio +async def test__update_exclusion_async_from_dict(): + await test__update_exclusion_async(request_type=dict) + +def test__update_exclusion_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client._update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__update_exclusion_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateExclusionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + await client._update_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test__update_exclusion_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test__update_exclusion_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test__update_exclusion_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.LogExclusion() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._update_exclusion( + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test__update_exclusion_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._update_exclusion( + logging_config.UpdateExclusionRequest(), + name='name_value', + exclusion=logging_config.LogExclusion(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.DeleteExclusionRequest, + dict, +]) +def test__delete_exclusion(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client._delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test__delete_exclusion_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.DeleteExclusionRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._delete_exclusion(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.DeleteExclusionRequest( + name='name_value', + ) + +def test__delete_exclusion_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._delete_exclusion in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._delete_exclusion] = mock_rpc + request = {} + client._delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_exclusion_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._delete_exclusion in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._delete_exclusion] = mock_rpc + + request = {} + await client._delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._delete_exclusion(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_exclusion_async(transport: str = 'grpc_asyncio', request_type=logging_config.DeleteExclusionRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client._delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.DeleteExclusionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test__delete_exclusion_async_from_dict(): + await test__delete_exclusion_async(request_type=dict) + +def test__delete_exclusion_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + call.return_value = None + client._delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__delete_exclusion_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.DeleteExclusionRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client._delete_exclusion(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test__delete_exclusion_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._delete_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test__delete_exclusion_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test__delete_exclusion_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._delete_exclusion( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__delete_exclusion_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._delete_exclusion( + logging_config.DeleteExclusionRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.GetCmekSettingsRequest, + dict, +]) +def test__get_cmek_settings(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + ) + response = client._get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetCmekSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' + + +def test__get_cmek_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetCmekSettingsRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_cmek_settings), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._get_cmek_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetCmekSettingsRequest( + name='name_value', + ) + +def test__get_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._get_cmek_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._get_cmek_settings] = mock_rpc + request = {} + client._get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._get_cmek_settings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._get_cmek_settings] = mock_rpc + + request = {} + await client._get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._get_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetCmekSettingsRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + response = await client._get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetCmekSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' + + +@pytest.mark.asyncio +async def test__get_cmek_settings_async_from_dict(): + await test__get_cmek_settings_async(request_type=dict) + +def test__get_cmek_settings_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client._get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__get_cmek_settings_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetCmekSettingsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client._get_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateCmekSettingsRequest, + dict, +]) +def test__update_cmek_settings(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + ) + response = client._update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' + + +def test__update_cmek_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateCmekSettingsRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_cmek_settings), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._update_cmek_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateCmekSettingsRequest( + name='name_value', + ) + +def test__update_cmek_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._update_cmek_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._update_cmek_settings] = mock_rpc + request = {} + client._update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_cmek_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._update_cmek_settings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._update_cmek_settings] = mock_rpc + + request = {} + await client._update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._update_cmek_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_cmek_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateCmekSettingsRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + response = await client._update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateCmekSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.CmekSettings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_key_version_name == 'kms_key_version_name_value' + assert response.service_account_id == 'service_account_id_value' + + +@pytest.mark.asyncio +async def test__update_cmek_settings_async_from_dict(): + await test__update_cmek_settings_async(request_type=dict) + +def test__update_cmek_settings_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client._update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__update_cmek_settings_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateCmekSettingsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_cmek_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) + await client._update_cmek_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + logging_config.GetSettingsRequest, + dict, +]) +def test__get_settings(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + ) + response = client._get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.GetSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True + + +def test__get_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.GetSettingsRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._get_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.GetSettingsRequest( + name='name_value', + ) + +def test__get_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._get_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._get_settings] = mock_rpc + request = {} + client._get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._get_settings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._get_settings] = mock_rpc + + request = {} + await client._get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._get_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.GetSettingsRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + response = await client._get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.GetSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True + + +@pytest.mark.asyncio +async def test__get_settings_async_from_dict(): + await test__get_settings_async(request_type=dict) + +def test__get_settings_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client._get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__get_settings_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.GetSettingsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + await client._get_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test__get_settings_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._get_settings( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test__get_settings_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._get_settings( + logging_config.GetSettingsRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test__get_settings_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._get_settings( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__get_settings_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._get_settings( + logging_config.GetSettingsRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.UpdateSettingsRequest, + dict, +]) +def test__update_settings(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + ) + response = client._update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True + + +def test__update_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.UpdateSettingsRequest( + name='name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._update_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.UpdateSettingsRequest( + name='name_value', + ) + +def test__update_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._update_settings in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._update_settings] = mock_rpc + request = {} + client._update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_settings_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._update_settings in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._update_settings] = mock_rpc + + request = {} + await client._update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._update_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_settings_async(transport: str = 'grpc_asyncio', request_type=logging_config.UpdateSettingsRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + response = await client._update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.UpdateSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_config.Settings) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.kms_service_account_id == 'kms_service_account_id_value' + assert response.storage_location == 'storage_location_value' + assert response.disable_default_sink is True + + +@pytest.mark.asyncio +async def test__update_settings_async_from_dict(): + await test__update_settings_async(request_type=dict) + +def test__update_settings_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client._update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__update_settings_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_config.UpdateSettingsRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + await client._update_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test__update_settings_flattened(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._update_settings( + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + + +def test__update_settings_flattened_error(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + +@pytest.mark.asyncio +async def test__update_settings_flattened_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_config.Settings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._update_settings( + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].settings + mock_val = logging_config.Settings(name='name_value') + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=['paths_value']) + assert arg == mock_val + +@pytest.mark.asyncio +async def test__update_settings_flattened_error_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._update_settings( + logging_config.UpdateSettingsRequest(), + settings=logging_config.Settings(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.parametrize("request_type", [ + logging_config.CopyLogEntriesRequest, + dict, +]) +def test__copy_log_entries(request_type, transport: str = 'grpc'): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name='operations/spam') + response = client._copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test__copy_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_config.CopyLogEntriesRequest( + name='name_value', + filter='filter_value', + destination='destination_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._copy_log_entries), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._copy_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_config.CopyLogEntriesRequest( + name='name_value', + filter='filter_value', + destination='destination_value', + ) + +def test__copy_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._copy_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._copy_log_entries] = mock_rpc + request = {} + client._copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client._copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__copy_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._copy_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._copy_log_entries] = mock_rpc + + request = {} + await client._copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client._copy_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__copy_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging_config.CopyLogEntriesRequest): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + response = await client._copy_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_config.CopyLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test__copy_log_entries_async_from_dict(): + await test__copy_log_entries_async(request_type=dict) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BaseConfigServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BaseConfigServiceV2Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BaseConfigServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BaseConfigServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BaseConfigServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = BaseConfigServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_buckets_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + call.return_value = logging_config.ListBucketsResponse() + client.list_buckets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListBucketsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_bucket_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.get_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_async_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.create_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_async_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client.update_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_bucket_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.create_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_bucket_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + call.return_value = logging_config.LogBucket() + client.update_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_bucket_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + call.return_value = None + client.delete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_undelete_bucket_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + call.return_value = None + client.undelete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__list_views_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + call.return_value = logging_config.ListViewsResponse() + client._list_views(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__get_view_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client._get_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__create_view_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client._create_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__update_view_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_view), + '__call__') as call: + call.return_value = logging_config.LogView() + client._update_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__delete_view_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_view), + '__call__') as call: + call.return_value = None + client._delete_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__list_sinks_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + call.return_value = logging_config.ListSinksResponse() + client._list_sinks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListSinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__get_sink_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client._get_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__create_sink_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client._create_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__update_sink_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + call.return_value = logging_config.LogSink() + client._update_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__delete_sink_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + call.return_value = None + client._delete_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__create_link_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client._create_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__delete_link_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client._delete_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__list_links_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + call.return_value = logging_config.ListLinksResponse() + client._list_links(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListLinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__get_link_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + call.return_value = logging_config.Link() + client._get_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__list_exclusions_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + call.return_value = logging_config.ListExclusionsResponse() + client._list_exclusions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListExclusionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__get_exclusion_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client._get_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__create_exclusion_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client._create_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__update_exclusion_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + call.return_value = logging_config.LogExclusion() + client._update_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__delete_exclusion_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + call.return_value = None + client._delete_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__get_cmek_settings_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client._get_cmek_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetCmekSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__update_cmek_settings_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_cmek_settings), + '__call__') as call: + call.return_value = logging_config.CmekSettings() + client._update_cmek_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateCmekSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__get_settings_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client._get_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__update_settings_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + call.return_value = logging_config.Settings() + client._update_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__copy_log_entries_empty_call_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._copy_log_entries), + '__call__') as call: + call.return_value = operations_pb2.Operation(name='operations/op') + client._copy_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CopyLogEntriesRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = BaseConfigServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_buckets_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_buckets), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListBucketsResponse( + next_page_token='next_page_token_value', + )) + await client.list_buckets(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListBucketsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_bucket_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.get_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_async_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.create_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bucket_async_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket_async), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client.update_bucket_async(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_bucket_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.create_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_bucket_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogBucket( + name='name_value', + description='description_value', + retention_days=1512, + locked=True, + lifecycle_state=logging_config.LifecycleState.ACTIVE, + analytics_enabled=True, + restricted_fields=['restricted_fields_value'], + )) + await client.update_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_bucket_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_undelete_bucket_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.undelete_bucket), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.undelete_bucket(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UndeleteBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__list_views_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_views), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( + next_page_token='next_page_token_value', + )) + await client._list_views(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListViewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__get_view_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client._get_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__create_view_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client._create_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__update_view_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( + name='name_value', + description='description_value', + filter='filter_value', + )) + await client._update_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__delete_view_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_view), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client._delete_view(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteViewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__list_sinks_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_sinks), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( + next_page_token='next_page_token_value', + )) + await client._list_sinks(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListSinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__get_sink_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client._get_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__create_sink_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client._create_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__update_sink_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( + name='name_value', + destination='destination_value', + filter='filter_value', + description='description_value', + disabled=True, + output_version_format=logging_config.LogSink.VersionFormat.V2, + writer_identity='writer_identity_value', + include_children=True, + )) + await client._update_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__delete_sink_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_sink), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client._delete_sink(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteSinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__create_link_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client._create_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__delete_link_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client._delete_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__list_links_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_links), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( + next_page_token='next_page_token_value', + )) + await client._list_links(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListLinksRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__get_link_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_link), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( + name='name_value', + description='description_value', + lifecycle_state=logging_config.LifecycleState.ACTIVE, + )) + await client._get_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__list_exclusions_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_exclusions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( + next_page_token='next_page_token_value', + )) + await client._list_exclusions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.ListExclusionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__get_exclusion_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client._get_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__create_exclusion_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client._create_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CreateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__update_exclusion_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( + name='name_value', + description='description_value', + filter='filter_value', + disabled=True, + )) + await client._update_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__delete_exclusion_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_exclusion), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client._delete_exclusion(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.DeleteExclusionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__get_cmek_settings_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + await client._get_cmek_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetCmekSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__update_cmek_settings_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_cmek_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_key_version_name='kms_key_version_name_value', + service_account_id='service_account_id_value', + )) + await client._update_cmek_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateCmekSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__get_settings_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + await client._get_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.GetSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__update_settings_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_settings), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( + name='name_value', + kms_key_name='kms_key_name_value', + kms_service_account_id='kms_service_account_id_value', + storage_location='storage_location_value', + disable_default_sink=True, + )) + await client._update_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.UpdateSettingsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__copy_log_entries_empty_call_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._copy_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name='operations/spam') + ) + await client._copy_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_config.CopyLogEntriesRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.ConfigServiceV2GrpcTransport, + ) + +def test_config_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ConfigServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_config_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.ConfigServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'list_buckets', + 'get_bucket', + 'create_bucket_async', + 'update_bucket_async', + 'create_bucket', + 'update_bucket', + 'delete_bucket', + 'undelete_bucket', + '_list_views', + '_get_view', + '_create_view', + '_update_view', + '_delete_view', + '_list_sinks', + '_get_sink', + '_create_sink', + '_update_sink', + '_delete_sink', + '_create_link', + '_delete_link', + '_list_links', + '_get_link', + '_list_exclusions', + '_get_exclusion', + '_create_exclusion', + '_update_exclusion', + '_delete_exclusion', + '_get_cmek_settings', + '_update_cmek_settings', + '_get_settings', + '_update_settings', + '_copy_log_entries', + 'get_operation', + 'cancel_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_config_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id="octopus", + ) + + +def test_config_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.config_service_v2.transports.ConfigServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ConfigServiceV2Transport() + adc.assert_called_once() + + +def test_config_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BaseConfigServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigServiceV2GrpcTransport, + transports.ConfigServiceV2GrpcAsyncIOTransport, + ], +) +def test_config_service_v2_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.ConfigServiceV2GrpcTransport, grpc_helpers), + (transports.ConfigServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_config_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_config_service_v2_host_no_port(transport_name): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_config_service_v2_host_with_port(transport_name): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:8000' + ) + +def test_config_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_config_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.ConfigServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) +def test_config_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_config_service_v2_grpc_lro_client(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_config_service_v2_grpc_lro_async_client(): + client = BaseConfigServiceV2AsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc_asyncio', + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_cmek_settings_path(): + project = "squid" + expected = "projects/{project}/cmekSettings".format(project=project, ) + actual = BaseConfigServiceV2Client.cmek_settings_path(project) + assert expected == actual + + +def test_parse_cmek_settings_path(): + expected = { + "project": "clam", + } + path = BaseConfigServiceV2Client.cmek_settings_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_cmek_settings_path(path) + assert expected == actual + +def test_link_path(): + project = "whelk" + location = "octopus" + bucket = "oyster" + link = "nudibranch" + expected = "projects/{project}/locations/{location}/buckets/{bucket}/links/{link}".format(project=project, location=location, bucket=bucket, link=link, ) + actual = BaseConfigServiceV2Client.link_path(project, location, bucket, link) + assert expected == actual + + +def test_parse_link_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "bucket": "winkle", + "link": "nautilus", + } + path = BaseConfigServiceV2Client.link_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_link_path(path) + assert expected == actual + +def test_log_bucket_path(): + project = "scallop" + location = "abalone" + bucket = "squid" + expected = "projects/{project}/locations/{location}/buckets/{bucket}".format(project=project, location=location, bucket=bucket, ) + actual = BaseConfigServiceV2Client.log_bucket_path(project, location, bucket) + assert expected == actual + + +def test_parse_log_bucket_path(): + expected = { + "project": "clam", + "location": "whelk", + "bucket": "octopus", + } + path = BaseConfigServiceV2Client.log_bucket_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_log_bucket_path(path) + assert expected == actual + +def test_log_exclusion_path(): + project = "oyster" + exclusion = "nudibranch" + expected = "projects/{project}/exclusions/{exclusion}".format(project=project, exclusion=exclusion, ) + actual = BaseConfigServiceV2Client.log_exclusion_path(project, exclusion) + assert expected == actual + + +def test_parse_log_exclusion_path(): + expected = { + "project": "cuttlefish", + "exclusion": "mussel", + } + path = BaseConfigServiceV2Client.log_exclusion_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_log_exclusion_path(path) + assert expected == actual + +def test_log_sink_path(): + project = "winkle" + sink = "nautilus" + expected = "projects/{project}/sinks/{sink}".format(project=project, sink=sink, ) + actual = BaseConfigServiceV2Client.log_sink_path(project, sink) + assert expected == actual + + +def test_parse_log_sink_path(): + expected = { + "project": "scallop", + "sink": "abalone", + } + path = BaseConfigServiceV2Client.log_sink_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_log_sink_path(path) + assert expected == actual + +def test_log_view_path(): + project = "squid" + location = "clam" + bucket = "whelk" + view = "octopus" + expected = "projects/{project}/locations/{location}/buckets/{bucket}/views/{view}".format(project=project, location=location, bucket=bucket, view=view, ) + actual = BaseConfigServiceV2Client.log_view_path(project, location, bucket, view) + assert expected == actual + + +def test_parse_log_view_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "bucket": "cuttlefish", + "view": "mussel", + } + path = BaseConfigServiceV2Client.log_view_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_log_view_path(path) + assert expected == actual + +def test_settings_path(): + project = "winkle" + expected = "projects/{project}/settings".format(project=project, ) + actual = BaseConfigServiceV2Client.settings_path(project) + assert expected == actual + + +def test_parse_settings_path(): + expected = { + "project": "nautilus", + } + path = BaseConfigServiceV2Client.settings_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_settings_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = BaseConfigServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = BaseConfigServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format(folder=folder, ) + actual = BaseConfigServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = BaseConfigServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format(organization=organization, ) + actual = BaseConfigServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = BaseConfigServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format(project=project, ) + actual = BaseConfigServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = BaseConfigServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = BaseConfigServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = BaseConfigServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BaseConfigServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.ConfigServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = BaseConfigServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_cancel_operation(transport: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = BaseConfigServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = BaseConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (BaseConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport), + (BaseConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py new file mode 100755 index 000000000000..bd8dd4158030 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -0,0 +1,4107 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api import monitored_resource_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2AsyncClient +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.services.logging_service_v2 import pagers +from google.cloud.logging_v2.services.logging_service_v2 import transports +from google.cloud.logging_v2.types import log_entry +from google.cloud.logging_v2.types import logging +from google.logging.type import http_request_pb2 # type: ignore +from google.logging.type import log_severity_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert LoggingServiceV2Client._get_default_mtls_endpoint(None) is None + assert LoggingServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert LoggingServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LoggingServiceV2Client._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert LoggingServiceV2Client._get_client_cert_source(None, False) is None + assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None + assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert LoggingServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source + assert LoggingServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert LoggingServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == LoggingServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert LoggingServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert LoggingServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + LoggingServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert LoggingServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert LoggingServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert LoggingServiceV2Client._get_universe_domain(None, None) == LoggingServiceV2Client._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = LoggingServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = LoggingServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), +]) +def test_logging_service_v2_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.LoggingServiceV2GrpcTransport, "grpc"), + (transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_logging_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (LoggingServiceV2Client, "grpc"), + (LoggingServiceV2AsyncClient, "grpc_asyncio"), +]) +def test_logging_service_v2_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + + +def test_logging_service_v2_client_get_transport_class(): + transport = LoggingServiceV2Client.get_transport_class() + available_transports = [ + transports.LoggingServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = LoggingServiceV2Client.get_transport_class("grpc") + assert transport == transports.LoggingServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) +def test_logging_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(LoggingServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "true"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", "false"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_logging_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, LoggingServiceV2AsyncClient +]) +@mock.patch.object(LoggingServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(LoggingServiceV2AsyncClient)) +def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + LoggingServiceV2Client, LoggingServiceV2AsyncClient +]) +@mock.patch.object(LoggingServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2Client)) +@mock.patch.object(LoggingServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(LoggingServiceV2AsyncClient)) +def test_logging_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = LoggingServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = LoggingServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc"), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_logging_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_logging_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_logging_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = LoggingServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport, "grpc", grpc_helpers), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_logging_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + logging.DeleteLogRequest, + dict, +]) +def test_delete_log(request_type, transport: str = 'grpc'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging.DeleteLogRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_log_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.DeleteLogRequest( + log_name='log_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.delete_log(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.DeleteLogRequest( + log_name='log_name_value', + ) + +def test_delete_log_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_log in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.delete_log] = mock_rpc + request = {} + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_log_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.delete_log in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.delete_log] = mock_rpc + + request = {} + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_log(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_delete_log_async(transport: str = 'grpc_asyncio', request_type=logging.DeleteLogRequest): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging.DeleteLogRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_log_async_from_dict(): + await test_delete_log_async(request_type=dict) + +def test_delete_log_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.DeleteLogRequest() + + request.log_name = 'log_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = None + client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'log_name=log_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_log_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.DeleteLogRequest() + + request.log_name = 'log_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'log_name=log_name_value', + ) in kw['metadata'] + + +def test_delete_log_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_log( + log_name='log_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].log_name + mock_val = 'log_name_value' + assert arg == mock_val + + +def test_delete_log_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_log( + logging.DeleteLogRequest(), + log_name='log_name_value', + ) + +@pytest.mark.asyncio +async def test_delete_log_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_log( + log_name='log_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].log_name + mock_val = 'log_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_log_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_log( + logging.DeleteLogRequest(), + log_name='log_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging.WriteLogEntriesRequest, + dict, +]) +def test_write_log_entries(request_type, transport: str = 'grpc'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse( + ) + response = client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging.WriteLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) + + +def test_write_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.WriteLogEntriesRequest( + log_name='log_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.write_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.WriteLogEntriesRequest( + log_name='log_name_value', + ) + +def test_write_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.write_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.write_log_entries] = mock_rpc + request = {} + client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_write_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.write_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.write_log_entries] = mock_rpc + + request = {} + await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.write_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_write_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.WriteLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( + )) + response = await client.write_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging.WriteLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging.WriteLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_write_log_entries_async_from_dict(): + await test_write_log_entries_async(request_type=dict) + + +def test_write_log_entries_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.write_log_entries( + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].log_name + mock_val = 'log_name_value' + assert arg == mock_val + arg = args[0].resource + mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + assert arg == mock_val + arg = args[0].labels + mock_val = {'key_value': 'value_value'} + assert arg == mock_val + arg = args[0].entries + mock_val = [log_entry.LogEntry(log_name='log_name_value')] + assert arg == mock_val + + +def test_write_log_entries_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + +@pytest.mark.asyncio +async def test_write_log_entries_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.WriteLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.write_log_entries( + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].log_name + mock_val = 'log_name_value' + assert arg == mock_val + arg = args[0].resource + mock_val = monitored_resource_pb2.MonitoredResource(type='type_value') + assert arg == mock_val + arg = args[0].labels + mock_val = {'key_value': 'value_value'} + assert arg == mock_val + arg = args[0].entries + mock_val = [log_entry.LogEntry(log_name='log_name_value')] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_write_log_entries_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.write_log_entries( + logging.WriteLogEntriesRequest(), + log_name='log_name_value', + resource=monitored_resource_pb2.MonitoredResource(type='type_value'), + labels={'key_value': 'value_value'}, + entries=[log_entry.LogEntry(log_name='log_name_value')], + ) + + +@pytest.mark.parametrize("request_type", [ + logging.ListLogEntriesRequest, + dict, +]) +def test_list_log_entries(request_type, transport: str = 'grpc'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging.ListLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogEntriesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_log_entries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListLogEntriesRequest( + filter='filter_value', + order_by='order_by_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_log_entries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogEntriesRequest( + filter='filter_value', + order_by='order_by_value', + page_token='page_token_value', + ) + +def test_list_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_log_entries] = mock_rpc + request = {} + client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_log_entries] = mock_rpc + + request = {} + await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging.ListLogEntriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogEntriesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_log_entries_async_from_dict(): + await test_list_log_entries_async(request_type=dict) + + +def test_list_log_entries_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_log_entries( + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].resource_names + mock_val = ['resource_names_value'] + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + arg = args[0].order_by + mock_val = 'order_by_value' + assert arg == mock_val + + +def test_list_log_entries_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + +@pytest.mark.asyncio +async def test_list_log_entries_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogEntriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_log_entries( + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].resource_names + mock_val = ['resource_names_value'] + assert arg == mock_val + arg = args[0].filter + mock_val = 'filter_value' + assert arg == mock_val + arg = args[0].order_by + mock_val = 'order_by_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_log_entries_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_log_entries( + logging.ListLogEntriesRequest(), + resource_names=['resource_names_value'], + filter='filter_value', + order_by='order_by_value', + ) + + +def test_list_log_entries_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_log_entries(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, log_entry.LogEntry) + for i in results) +def test_list_log_entries_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + pages = list(client.list_log_entries(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_log_entries_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_log_entries(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, log_entry.LogEntry) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_log_entries_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + next_page_token='abc', + ), + logging.ListLogEntriesResponse( + entries=[], + next_page_token='def', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + ], + next_page_token='ghi', + ), + logging.ListLogEntriesResponse( + entries=[ + log_entry.LogEntry(), + log_entry.LogEntry(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_log_entries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging.ListMonitoredResourceDescriptorsRequest, + dict, +]) +def test_list_monitored_resource_descriptors(request_type, transport: str = 'grpc'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_monitored_resource_descriptors_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListMonitoredResourceDescriptorsRequest( + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_monitored_resource_descriptors(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListMonitoredResourceDescriptorsRequest( + page_token='page_token_value', + ) + +def test_list_monitored_resource_descriptors_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_monitored_resource_descriptors in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_monitored_resource_descriptors] = mock_rpc + request = {} + client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_monitored_resource_descriptors in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_monitored_resource_descriptors] = mock_rpc + + request = {} + await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_monitored_resource_descriptors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async(transport: str = 'grpc_asyncio', request_type=logging.ListMonitoredResourceDescriptorsRequest): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_monitored_resource_descriptors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging.ListMonitoredResourceDescriptorsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListMonitoredResourceDescriptorsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_from_dict(): + await test_list_monitored_resource_descriptors_async(request_type=dict) + + +def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.list_monitored_resource_descriptors(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in results) +def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = list(client.list_monitored_resource_descriptors(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_monitored_resource_descriptors(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, monitored_resource_pb2.MonitoredResourceDescriptor) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='abc', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[], + next_page_token='def', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + next_page_token='ghi', + ), + logging.ListMonitoredResourceDescriptorsResponse( + resource_descriptors=[ + monitored_resource_pb2.MonitoredResourceDescriptor(), + monitored_resource_pb2.MonitoredResourceDescriptor(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_monitored_resource_descriptors(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging.ListLogsRequest, + dict, +]) +def test_list_logs(request_type, transport: str = 'grpc'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + ) + response = client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging.ListLogsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogsPager) + assert response.log_names == ['log_names_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_list_logs_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging.ListLogsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client.list_logs(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging.ListLogsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test_list_logs_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_logs in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.list_logs] = mock_rpc + request = {} + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_logs_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.list_logs in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.list_logs] = mock_rpc + + request = {} + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_logs(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_list_logs_async(transport: str = 'grpc_asyncio', request_type=logging.ListLogsRequest): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + )) + response = await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging.ListLogsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListLogsAsyncPager) + assert response.log_names == ['log_names_value'] + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_logs_async_from_dict(): + await test_list_logs_async(request_type=dict) + +def test_list_logs_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.ListLogsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = logging.ListLogsResponse() + client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_logs_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging.ListLogsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + await client.list_logs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_logs_flattened(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_logs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_logs_flattened_error(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_logs( + logging.ListLogsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_logs_flattened_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging.ListLogsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_logs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_logs_flattened_error_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_logs( + logging.ListLogsRequest(), + parent='parent_value', + ) + + +def test_list_logs_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_logs(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) +def test_list_logs_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_logs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_logs_async_pager(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_logs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_logs_async_pages(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging.ListLogsResponse( + log_names=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + logging.ListLogsResponse( + log_names=[], + next_page_token='def', + ), + logging.ListLogsResponse( + log_names=[ + str(), + ], + next_page_token='ghi', + ), + logging.ListLogsResponse( + log_names=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_logs(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging.TailLogEntriesRequest, + dict, +]) +def test_tail_log_entries(request_type, transport: str = 'grpc'): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.tail_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([logging.TailLogEntriesResponse()]) + response = client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, logging.TailLogEntriesResponse) + + +def test_tail_log_entries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.tail_log_entries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport.tail_log_entries] = mock_rpc + request = [{}] + client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_tail_log_entries_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport.tail_log_entries in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport.tail_log_entries] = mock_rpc + + request = [{}] + await client.tail_log_entries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.tail_log_entries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test_tail_log_entries_async(transport: str = 'grpc_asyncio', request_type=logging.TailLogEntriesRequest): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.tail_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[logging.TailLogEntriesResponse()]) + response = await client.tail_log_entries(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, logging.TailLogEntriesResponse) + + +@pytest.mark.asyncio +async def test_tail_log_entries_async_from_dict(): + await test_tail_log_entries_async(request_type=dict) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = LoggingServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = LoggingServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_log_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + call.return_value = None + client.delete_log(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_write_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + call.return_value = logging.WriteLogEntriesResponse() + client.write_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_log_entries_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + call.return_value = logging.ListLogEntriesResponse() + client.list_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_monitored_resource_descriptors_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + call.return_value = logging.ListMonitoredResourceDescriptorsResponse() + client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_logs_empty_call_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + call.return_value = logging.ListLogsResponse() + client.list_logs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = LoggingServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_log_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_log), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_log(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.DeleteLogRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_write_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.write_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.WriteLogEntriesResponse( + )) + await client.write_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.WriteLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_log_entries_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_log_entries), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogEntriesResponse( + next_page_token='next_page_token_value', + )) + await client.list_log_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_monitored_resource_descriptors_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_monitored_resource_descriptors), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListMonitoredResourceDescriptorsResponse( + next_page_token='next_page_token_value', + )) + await client.list_monitored_resource_descriptors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListMonitoredResourceDescriptorsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_logs_empty_call_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_logs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging.ListLogsResponse( + log_names=['log_names_value'], + next_page_token='next_page_token_value', + )) + await client.list_logs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging.ListLogsRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.LoggingServiceV2GrpcTransport, + ) + +def test_logging_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_logging_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.LoggingServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'delete_log', + 'write_log_entries', + 'list_log_entries', + 'list_monitored_resource_descriptors', + 'list_logs', + 'tail_log_entries', + 'get_operation', + 'cancel_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_logging_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +def test_logging_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.logging_service_v2.transports.LoggingServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.LoggingServiceV2Transport() + adc.assert_called_once() + + +def test_logging_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + LoggingServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.LoggingServiceV2GrpcTransport, + transports.LoggingServiceV2GrpcAsyncIOTransport, + ], +) +def test_logging_service_v2_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.LoggingServiceV2GrpcTransport, grpc_helpers), + (transports.LoggingServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_logging_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_logging_service_v2_host_no_port(transport_name): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_logging_service_v2_host_with_port(transport_name): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:8000' + ) + +def test_logging_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LoggingServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_logging_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.LoggingServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) +def test_logging_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_path(): + project = "squid" + log = "clam" + expected = "projects/{project}/logs/{log}".format(project=project, log=log, ) + actual = LoggingServiceV2Client.log_path(project, log) + assert expected == actual + + +def test_parse_log_path(): + expected = { + "project": "whelk", + "log": "octopus", + } + path = LoggingServiceV2Client.log_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_log_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = LoggingServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = LoggingServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = LoggingServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = LoggingServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = LoggingServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = LoggingServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = LoggingServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = LoggingServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = LoggingServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = LoggingServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = LoggingServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.LoggingServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = LoggingServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_cancel_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = LoggingServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py new file mode 100755 index 000000000000..ba8812f5792f --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -0,0 +1,3911 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +try: + from google.auth.aio import credentials as ga_credentials_async + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api import distribution_pb2 # type: ignore +from google.api import label_pb2 # type: ignore +from google.api import launch_stage_pb2 # type: ignore +from google.api import metric_pb2 # type: ignore +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.logging_v2.services.metrics_service_v2 import BaseMetricsServiceV2AsyncClient +from google.cloud.logging_v2.services.metrics_service_v2 import BaseMetricsServiceV2Client +from google.cloud.logging_v2.services.metrics_service_v2 import pagers +from google.cloud.logging_v2.services.metrics_service_v2 import transports +from google.cloud.logging_v2.types import logging_metrics +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return "test.{UNIVERSE_DOMAIN}" if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) else client._DEFAULT_ENDPOINT_TEMPLATE + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(None) is None + assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert BaseMetricsServiceV2Client._get_default_mtls_endpoint(non_googleapi) == non_googleapi + +def test__read_environment_variables(): + assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BaseMetricsServiceV2Client._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + BaseMetricsServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BaseMetricsServiceV2Client._read_environment_variables() + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BaseMetricsServiceV2Client._get_client_cert_source(None, False) is None + assert BaseMetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, False) is None + assert BaseMetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, True) == mock_provided_cert_source + + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_default_cert_source): + assert BaseMetricsServiceV2Client._get_client_cert_source(None, True) is mock_default_cert_source + assert BaseMetricsServiceV2Client._get_client_cert_source(mock_provided_cert_source, "true") is mock_provided_cert_source + +@mock.patch.object(BaseMetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2Client)) +@mock.patch.object(BaseMetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient)) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + assert BaseMetricsServiceV2Client._get_api_endpoint(api_override, mock_client_cert_source, default_universe, "always") == api_override + assert BaseMetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "auto") == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert BaseMetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "auto") == default_endpoint + assert BaseMetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "always") == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert BaseMetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, default_universe, "always") == BaseMetricsServiceV2Client.DEFAULT_MTLS_ENDPOINT + assert BaseMetricsServiceV2Client._get_api_endpoint(None, None, mock_universe, "never") == mock_endpoint + assert BaseMetricsServiceV2Client._get_api_endpoint(None, None, default_universe, "never") == default_endpoint + + with pytest.raises(MutualTLSChannelError) as excinfo: + BaseMetricsServiceV2Client._get_api_endpoint(None, mock_client_cert_source, mock_universe, "auto") + assert str(excinfo.value) == "mTLS is not supported in any universe other than googleapis.com." + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert BaseMetricsServiceV2Client._get_universe_domain(client_universe_domain, universe_domain_env) == client_universe_domain + assert BaseMetricsServiceV2Client._get_universe_domain(None, universe_domain_env) == universe_domain_env + assert BaseMetricsServiceV2Client._get_universe_domain(None, None) == BaseMetricsServiceV2Client._DEFAULT_UNIVERSE + + with pytest.raises(ValueError) as excinfo: + BaseMetricsServiceV2Client._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + +@pytest.mark.parametrize("error_code,cred_info_json,show_cred_info", [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False) +]) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BaseMetricsServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + +@pytest.mark.parametrize("error_code", [401,403,404,500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BaseMetricsServiceV2Client(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + +@pytest.mark.parametrize("client_class,transport_name", [ + (BaseMetricsServiceV2Client, "grpc"), + (BaseMetricsServiceV2AsyncClient, "grpc_asyncio"), +]) +def test_base_metrics_service_v2_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.MetricsServiceV2GrpcTransport, "grpc"), + (transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_base_metrics_service_v2_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (BaseMetricsServiceV2Client, "grpc"), + (BaseMetricsServiceV2AsyncClient, "grpc_asyncio"), +]) +def test_base_metrics_service_v2_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + + +def test_base_metrics_service_v2_client_get_transport_class(): + transport = BaseMetricsServiceV2Client.get_transport_class() + available_transports = [ + transports.MetricsServiceV2GrpcTransport, + ] + assert transport in available_transports + + transport = BaseMetricsServiceV2Client.get_transport_class("grpc") + assert transport == transports.MetricsServiceV2GrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(BaseMetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2Client)) +@mock.patch.object(BaseMetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient)) +def test_base_metrics_service_v2_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BaseMetricsServiceV2Client, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BaseMetricsServiceV2Client, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "true"), + (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "true"), + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", "false"), + (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(BaseMetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2Client)) +@mock.patch.object(BaseMetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_base_metrics_service_v2_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + BaseMetricsServiceV2Client, BaseMetricsServiceV2AsyncClient +]) +@mock.patch.object(BaseMetricsServiceV2Client, "DEFAULT_ENDPOINT", modify_default_endpoint(BaseMetricsServiceV2Client)) +@mock.patch.object(BaseMetricsServiceV2AsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(BaseMetricsServiceV2AsyncClient)) +def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + +@pytest.mark.parametrize("client_class", [ + BaseMetricsServiceV2Client, BaseMetricsServiceV2AsyncClient +]) +@mock.patch.object(BaseMetricsServiceV2Client, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2Client)) +@mock.patch.object(BaseMetricsServiceV2AsyncClient, "_DEFAULT_ENDPOINT_TEMPLATE", modify_default_endpoint_template(BaseMetricsServiceV2AsyncClient)) +def test_base_metrics_service_v2_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BaseMetricsServiceV2Client._DEFAULT_UNIVERSE + default_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=default_universe) + mock_universe = "bar.com" + mock_endpoint = BaseMetricsServiceV2Client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=mock_universe) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch("google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"): + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=api_override) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + else: + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == (mock_endpoint if universe_exists else default_endpoint) + assert client.universe_domain == (mock_universe if universe_exists else default_universe) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(client_options=options, credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc"), + (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_base_metrics_service_v2_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), + (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_base_metrics_service_v2_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_base_metrics_service_v2_client_client_options_from_dict(): + with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2GrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = BaseMetricsServiceV2Client( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport, "grpc", grpc_helpers), + (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_base_metrics_service_v2_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=None, + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + logging_metrics.ListLogMetricsRequest, + dict, +]) +def test__list_log_metrics(request_type, transport: str = 'grpc'): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + ) + response = client._list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_metrics.ListLogMetricsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListLogMetricsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test__list_log_metrics_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.ListLogMetricsRequest( + parent='parent_value', + page_token='page_token_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._list_log_metrics(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.ListLogMetricsRequest( + parent='parent_value', + page_token='page_token_value', + ) + +def test__list_log_metrics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._list_log_metrics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._list_log_metrics] = mock_rpc + request = {} + client._list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_log_metrics_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._list_log_metrics in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._list_log_metrics] = mock_rpc + + request = {} + await client._list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._list_log_metrics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__list_log_metrics_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.ListLogMetricsRequest): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + )) + response = await client._list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_metrics.ListLogMetricsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers._ListLogMetricsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test__list_log_metrics_async_from_dict(): + await test__list_log_metrics_async(request_type=dict) + +def test__list_log_metrics_field_headers(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.ListLogMetricsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + client._list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__list_log_metrics_field_headers_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.ListLogMetricsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + await client._list_log_metrics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__list_log_metrics_flattened(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._list_log_metrics( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test__list_log_metrics_flattened_error(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._list_log_metrics( + logging_metrics.ListLogMetricsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test__list_log_metrics_flattened_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.ListLogMetricsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._list_log_metrics( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__list_log_metrics_flattened_error_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._list_log_metrics( + logging_metrics.ListLogMetricsRequest(), + parent='parent_value', + ) + + +def test__list_log_metrics_pager(transport_name: str = "grpc"): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client._list_log_metrics(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) + for i in results) +def test__list_log_metrics_pages(transport_name: str = "grpc"): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + pages = list(client._list_log_metrics(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test__list_log_metrics_async_pager(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + async_pager = await client._list_log_metrics(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, logging_metrics.LogMetric) + for i in responses) + + +@pytest.mark.asyncio +async def test__list_log_metrics_async_pages(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + next_page_token='abc', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[], + next_page_token='def', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + ], + next_page_token='ghi', + ), + logging_metrics.ListLogMetricsResponse( + metrics=[ + logging_metrics.LogMetric(), + logging_metrics.LogMetric(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client._list_log_metrics(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + logging_metrics.GetLogMetricRequest, + dict, +]) +def test__get_log_metric(request_type, transport: str = 'grpc'): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client._get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_metrics.GetLogMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test__get_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.GetLogMetricRequest( + metric_name='metric_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._get_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.GetLogMetricRequest( + metric_name='metric_name_value', + ) + +def test__get_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._get_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._get_log_metric] = mock_rpc + request = {} + client._get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._get_log_metric in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._get_log_metric] = mock_rpc + + request = {} + await client._get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._get_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__get_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.GetLogMetricRequest): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client._get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_metrics.GetLogMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test__get_log_metric_async_from_dict(): + await test__get_log_metric_async(request_type=dict) + +def test__get_log_metric_field_headers(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.GetLogMetricRequest() + + request.metric_name = 'metric_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client._get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__get_log_metric_field_headers_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.GetLogMetricRequest() + + request.metric_name = 'metric_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client._get_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name_value', + ) in kw['metadata'] + + +def test__get_log_metric_flattened(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._get_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val + + +def test__get_log_metric_flattened_error(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._get_log_metric( + logging_metrics.GetLogMetricRequest(), + metric_name='metric_name_value', + ) + +@pytest.mark.asyncio +async def test__get_log_metric_flattened_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._get_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__get_log_metric_flattened_error_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._get_log_metric( + logging_metrics.GetLogMetricRequest(), + metric_name='metric_name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + logging_metrics.CreateLogMetricRequest, + dict, +]) +def test__create_log_metric(request_type, transport: str = 'grpc'): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client._create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_metrics.CreateLogMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test__create_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.CreateLogMetricRequest( + parent='parent_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._create_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.CreateLogMetricRequest( + parent='parent_value', + ) + +def test__create_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._create_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._create_log_metric] = mock_rpc + request = {} + client._create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._create_log_metric in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._create_log_metric] = mock_rpc + + request = {} + await client._create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._create_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__create_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.CreateLogMetricRequest): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client._create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_metrics.CreateLogMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test__create_log_metric_async_from_dict(): + await test__create_log_metric_async(request_type=dict) + +def test__create_log_metric_field_headers(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.CreateLogMetricRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client._create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__create_log_metric_field_headers_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.CreateLogMetricRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client._create_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test__create_log_metric_flattened(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._create_log_metric( + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name='name_value') + assert arg == mock_val + + +def test__create_log_metric_flattened_error(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + +@pytest.mark.asyncio +async def test__create_log_metric_flattened_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._create_log_metric( + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test__create_log_metric_flattened_error_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._create_log_metric( + logging_metrics.CreateLogMetricRequest(), + parent='parent_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + logging_metrics.UpdateLogMetricRequest, + dict, +]) +def test__update_log_metric(request_type, transport: str = 'grpc'): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + ) + response = client._update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_metrics.UpdateLogMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +def test__update_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.UpdateLogMetricRequest( + metric_name='metric_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._update_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.UpdateLogMetricRequest( + metric_name='metric_name_value', + ) + +def test__update_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._update_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._update_log_metric] = mock_rpc + request = {} + client._update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._update_log_metric in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._update_log_metric] = mock_rpc + + request = {} + await client._update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._update_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__update_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.UpdateLogMetricRequest): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + response = await client._update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_metrics.UpdateLogMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, logging_metrics.LogMetric) + assert response.name == 'name_value' + assert response.description == 'description_value' + assert response.filter == 'filter_value' + assert response.bucket_name == 'bucket_name_value' + assert response.disabled is True + assert response.value_extractor == 'value_extractor_value' + assert response.version == logging_metrics.LogMetric.ApiVersion.V1 + + +@pytest.mark.asyncio +async def test__update_log_metric_async_from_dict(): + await test__update_log_metric_async(request_type=dict) + +def test__update_log_metric_field_headers(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.UpdateLogMetricRequest() + + request.metric_name = 'metric_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client._update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__update_log_metric_field_headers_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.UpdateLogMetricRequest() + + request.metric_name = 'metric_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + await client._update_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name_value', + ) in kw['metadata'] + + +def test__update_log_metric_flattened(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._update_log_metric( + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name='name_value') + assert arg == mock_val + + +def test__update_log_metric_flattened_error(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + +@pytest.mark.asyncio +async def test__update_log_metric_flattened_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = logging_metrics.LogMetric() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._update_log_metric( + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test__update_log_metric_flattened_error_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._update_log_metric( + logging_metrics.UpdateLogMetricRequest(), + metric_name='metric_name_value', + metric=logging_metrics.LogMetric(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + logging_metrics.DeleteLogMetricRequest, + dict, +]) +def test__delete_log_metric(request_type, transport: str = 'grpc'): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client._delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = logging_metrics.DeleteLogMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test__delete_log_metric_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = logging_metrics.DeleteLogMetricRequest( + metric_name='metric_name_value', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._delete_log_metric(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == logging_metrics.DeleteLogMetricRequest( + metric_name='metric_name_value', + ) + +def test__delete_log_metric_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport._delete_log_metric in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. + client._transport._wrapped_methods[client._transport._delete_log_metric] = mock_rpc + request = {} + client._delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client._delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_log_metric_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._client._transport._delete_log_metric in client._client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[client._client._transport._delete_log_metric] = mock_rpc + + request = {} + await client._delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client._delete_log_metric(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + +@pytest.mark.asyncio +async def test__delete_log_metric_async(transport: str = 'grpc_asyncio', request_type=logging_metrics.DeleteLogMetricRequest): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client._delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = logging_metrics.DeleteLogMetricRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test__delete_log_metric_async_from_dict(): + await test__delete_log_metric_async(request_type=dict) + +def test__delete_log_metric_field_headers(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.DeleteLogMetricRequest() + + request.metric_name = 'metric_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + call.return_value = None + client._delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test__delete_log_metric_field_headers_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = logging_metrics.DeleteLogMetricRequest() + + request.metric_name = 'metric_name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client._delete_log_metric(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'metric_name=metric_name_value', + ) in kw['metadata'] + + +def test__delete_log_metric_flattened(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client._delete_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val + + +def test__delete_log_metric_flattened_error(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client._delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), + metric_name='metric_name_value', + ) + +@pytest.mark.asyncio +async def test__delete_log_metric_flattened_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client._delete_log_metric( + metric_name='metric_name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].metric_name + mock_val = 'metric_name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test__delete_log_metric_flattened_error_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client._delete_log_metric( + logging_metrics.DeleteLogMetricRequest(), + metric_name='metric_name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BaseMetricsServiceV2Client( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BaseMetricsServiceV2Client( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BaseMetricsServiceV2Client( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BaseMetricsServiceV2Client( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BaseMetricsServiceV2Client(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_kind_grpc(): + transport = BaseMetricsServiceV2Client.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__list_log_metrics_empty_call_grpc(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + call.return_value = logging_metrics.ListLogMetricsResponse() + client._list_log_metrics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__get_log_metric_empty_call_grpc(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client._get_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__create_log_metric_empty_call_grpc(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client._create_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__update_log_metric_empty_call_grpc(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + call.return_value = logging_metrics.LogMetric() + client._update_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test__delete_log_metric_empty_call_grpc(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + call.return_value = None + client._delete_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = BaseMetricsServiceV2AsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__list_log_metrics_empty_call_grpc_asyncio(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._list_log_metrics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( + next_page_token='next_page_token_value', + )) + await client._list_log_metrics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.ListLogMetricsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__get_log_metric_empty_call_grpc_asyncio(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._get_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client._get_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.GetLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__create_log_metric_empty_call_grpc_asyncio(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._create_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client._create_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.CreateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__update_log_metric_empty_call_grpc_asyncio(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._update_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( + name='name_value', + description='description_value', + filter='filter_value', + bucket_name='bucket_name_value', + disabled=True, + value_extractor='value_extractor_value', + version=logging_metrics.LogMetric.ApiVersion.V1, + )) + await client._update_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.UpdateLogMetricRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test__delete_log_metric_empty_call_grpc_asyncio(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport._delete_log_metric), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client._delete_log_metric(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = logging_metrics.DeleteLogMetricRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.MetricsServiceV2GrpcTransport, + ) + +def test_metrics_service_v2_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_metrics_service_v2_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport.__init__') as Transport: + Transport.return_value = None + transport = transports.MetricsServiceV2Transport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + '_list_log_metrics', + '_get_log_metric', + '_create_log_metric', + '_update_log_metric', + '_delete_log_metric', + 'get_operation', + 'cancel_operation', + 'list_operations', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_metrics_service_v2_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id="octopus", + ) + + +def test_metrics_service_v2_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.logging_v2.services.metrics_service_v2.transports.MetricsServiceV2Transport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.MetricsServiceV2Transport() + adc.assert_called_once() + + +def test_metrics_service_v2_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BaseMetricsServiceV2Client() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/logging.read', 'https://www.googleapis.com/auth/logging.write',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.MetricsServiceV2GrpcTransport, + transports.MetricsServiceV2GrpcAsyncIOTransport, + ], +) +def test_metrics_service_v2_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.MetricsServiceV2GrpcTransport, grpc_helpers), + (transports.MetricsServiceV2GrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_metrics_service_v2_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "logging.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloud-platform.read-only', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', +), + scopes=["1", "2"], + default_host="logging.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_metrics_service_v2_host_no_port(transport_name): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_metrics_service_v2_host_with_port(transport_name): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='logging.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'logging.googleapis.com:8000' + ) + +def test_metrics_service_v2_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricsServiceV2GrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_metrics_service_v2_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.MetricsServiceV2GrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) +def test_metrics_service_v2_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_log_metric_path(): + project = "squid" + metric = "clam" + expected = "projects/{project}/metrics/{metric}".format(project=project, metric=metric, ) + actual = BaseMetricsServiceV2Client.log_metric_path(project, metric) + assert expected == actual + + +def test_parse_log_metric_path(): + expected = { + "project": "whelk", + "metric": "octopus", + } + path = BaseMetricsServiceV2Client.log_metric_path(**expected) + + # Check that the path construction is reversible. + actual = BaseMetricsServiceV2Client.parse_log_metric_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = BaseMetricsServiceV2Client.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = BaseMetricsServiceV2Client.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BaseMetricsServiceV2Client.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = BaseMetricsServiceV2Client.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = BaseMetricsServiceV2Client.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BaseMetricsServiceV2Client.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = BaseMetricsServiceV2Client.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = BaseMetricsServiceV2Client.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BaseMetricsServiceV2Client.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = BaseMetricsServiceV2Client.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = BaseMetricsServiceV2Client.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BaseMetricsServiceV2Client.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = BaseMetricsServiceV2Client.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = BaseMetricsServiceV2Client.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BaseMetricsServiceV2Client.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.MetricsServiceV2Transport, '_prep_wrapped_messages') as prep: + transport_class = BaseMetricsServiceV2Client.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_cancel_operation(transport: str = "grpc"): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + +def test_cancel_operation_field_headers(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_cancel_operation_from_dict(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + None + ) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + +def test_get_operation_field_headers(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_get_operation_from_dict(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + +def test_list_operations_field_headers(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=locations",) in kw["metadata"] + +def test_list_operations_from_dict(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = BaseMetricsServiceV2AsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio" + ) + with mock.patch.object(type(getattr(client.transport, "_grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = BaseMetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (BaseMetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport), + (BaseMetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format(UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/gapic-generator/tests/integration/logging_internal_v2.yaml b/packages/gapic-generator/tests/integration/logging_internal_v2.yaml new file mode 100644 index 000000000000..849140507416 --- /dev/null +++ b/packages/gapic-generator/tests/integration/logging_internal_v2.yaml @@ -0,0 +1,245 @@ +type: google.api.Service +config_version: 3 +name: logging.googleapis.com +title: Cloud Logging API + +apis: +- name: google.logging.v2.ConfigServiceV2 +- name: google.logging.v2.LoggingServiceV2 +- name: google.logging.v2.MetricsServiceV2 +- name: google.longrunning.Operations + +types: +- name: google.logging.v2.BucketMetadata +- name: google.logging.v2.CopyLogEntriesMetadata +- name: google.logging.v2.CopyLogEntriesResponse +- name: google.logging.v2.LinkMetadata +- name: google.logging.v2.LocationMetadata + +documentation: + summary: Writes log entries and manages your Cloud Logging configuration. + overview: |- + # Introduction + The Cloud Logging service. +backend: + rules: + - selector: 'google.logging.v2.ConfigServiceV2.*' + deadline: 60.0 + - selector: google.logging.v2.ConfigServiceV2.CreateBucket + deadline: 600.0 + - selector: google.logging.v2.ConfigServiceV2.UpdateBucket + deadline: 600.0 + - selector: 'google.logging.v2.LoggingServiceV2.*' + deadline: 60.0 + - selector: google.logging.v2.LoggingServiceV2.ListLogEntries + deadline: 10.0 + - selector: google.logging.v2.LoggingServiceV2.TailLogEntries + deadline: 3600.0 + - selector: 'google.logging.v2.MetricsServiceV2.*' + deadline: 60.0 + - selector: 'google.longrunning.Operations.*' + deadline: 60.0 + +http: + rules: + - selector: google.longrunning.Operations.CancelOperation + post: '/v2/{name=*/*/locations/*/operations/*}:cancel' + body: '*' + additional_bindings: + - post: '/v2/{name=projects/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=organizations/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=folders/*/locations/*/operations/*}:cancel' + body: '*' + - post: '/v2/{name=billingAccounts/*/locations/*/operations/*}:cancel' + body: '*' + - selector: google.longrunning.Operations.GetOperation + get: '/v2/{name=*/*/locations/*/operations/*}' + additional_bindings: + - get: '/v2/{name=projects/*/locations/*/operations/*}' + - get: '/v2/{name=organizations/*/locations/*/operations/*}' + - get: '/v2/{name=folders/*/locations/*/operations/*}' + - get: '/v2/{name=billingAccounts/*/locations/*/operations/*}' + - selector: google.longrunning.Operations.ListOperations + get: '/v2/{name=*/*/locations/*}/operations' + additional_bindings: + - get: '/v2/{name=projects/*/locations/*}/operations' + - get: '/v2/{name=organizations/*/locations/*}/operations' + - get: '/v2/{name=folders/*/locations/*}/operations' + - get: '/v2/{name=billingAccounts/*/locations/*}/operations' + +authentication: + rules: + - selector: 'google.logging.v2.ConfigServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.logging.v2.ConfigServiceV2.GetBucket + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetCmekSettings + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetExclusion + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetLink + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetSettings + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetSink + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.GetView + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListBuckets + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListExclusions + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListLinks + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListSinks + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.ConfigServiceV2.ListViews + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: 'google.logging.v2.LoggingServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.LoggingServiceV2.DeleteLog + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.logging.v2.LoggingServiceV2.WriteLogEntries + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.write + - selector: 'google.logging.v2.MetricsServiceV2.*' + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.write + - selector: google.logging.v2.MetricsServiceV2.GetLogMetric + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.logging.v2.MetricsServiceV2.ListLogMetrics + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.longrunning.Operations.CancelOperation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/logging.admin + - selector: google.longrunning.Operations.GetOperation + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read + - selector: google.longrunning.Operations.ListOperations + oauth: + canonical_scopes: |- + https://www.googleapis.com/auth/cloud-platform, + https://www.googleapis.com/auth/cloud-platform.read-only, + https://www.googleapis.com/auth/logging.admin, + https://www.googleapis.com/auth/logging.read +publishing: + documentation_uri: https://cloud.google.com/logging/docs/ + library_settings: + - version: 'google.logging.v2' + python_settings: + common: + selective_gapic_generation: + generate_omitted_as_internal: true + methods: + # LoggingService: No internal methods + # ConfigService: Some internal methods + # MetricsService: All internal methods + - google.logging.v2.LoggingServiceV2.DeleteLog + - google.logging.v2.LoggingServiceV2.WriteLogEntries + - google.logging.v2.LoggingServiceV2.ListLogEntries + - google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors + - google.logging.v2.LoggingServiceV2.ListLogs + - google.logging.v2.LoggingServiceV2.TailLogEntries + - google.logging.v2.ConfigServiceV2.ListBuckets + - google.logging.v2.ConfigServiceV2.GetBucket + - google.logging.v2.ConfigServiceV2.CreateBucketAsync + - google.logging.v2.ConfigServiceV2.UpdateBucketAsync + - google.logging.v2.ConfigServiceV2.CreateBucket + - google.logging.v2.ConfigServiceV2.UpdateBucket + - google.logging.v2.ConfigServiceV2.DeleteBucket + - google.logging.v2.ConfigServiceV2.UndeleteBucket diff --git a/packages/gapic-generator/tests/unit/common_types.py b/packages/gapic-generator/tests/unit/common_types.py index a5d4a88d6922..ae4534da856b 100644 --- a/packages/gapic-generator/tests/unit/common_types.py +++ b/packages/gapic-generator/tests/unit/common_types.py @@ -39,6 +39,7 @@ class DummyMethod: flattened_fields: Dict[str, Any] = dataclasses.field(default_factory=dict) client_output: bool = False client_output_async: bool = False + is_internal: bool = False DummyIdent = namedtuple("DummyIdent", ["name", "sphinx"]) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index d9714c280c60..a1d1e4d8208a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -2240,6 +2240,120 @@ def test_generate_sample_spec_basic(opts_transport, expected): assert specs == expected +def test_generate_sample_spec_internal_method(): + service_options = descriptor_pb2.ServiceOptions() + service_options.Extensions[client_pb2.default_host] = "example.googleapis.com" + + file_descriptors = [ + descriptor_pb2.FileDescriptorProto( + name="cephalopod.proto", + package="animalia.mollusca.v1", + message_type=[ + descriptor_pb2.DescriptorProto( + name="MolluscRequest", + ), + descriptor_pb2.DescriptorProto( + name="Mollusc", + ), + ], + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="Squid", + options=service_options, + method=[ + descriptor_pb2.MethodDescriptorProto( + name="Ramshorn", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + descriptor_pb2.MethodDescriptorProto( + name="NotRamshorn", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ) + ], + ), + descriptor_pb2.ServiceDescriptorProto( + name="Octopus", + options=service_options, + method=[ + descriptor_pb2.MethodDescriptorProto( + name="Bighead", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + ], + ), + ], + ) + ] + + # The internal method should be animalia.mollusca.v1.Squid.NotRamshorn + service_yaml = { + "apis": [ + {"name": "animalia.mollusca.v1"} + ], + "publishing": { + "library_settings": [ + { + "version": "animalia.mollusca.v1", + "python_settings": { + "experimental_features": {"rest_async_io_enabled": True}, + "common": { + "selective_gapic_generation": { + "methods": [ + "animalia.mollusca.v1.Squid.Ramshorn", + "animalia.mollusca.v1.Octopus.Bighead" + ], + "generate_omitted_as_internal": True + } + } + }, + } + ] + }, + } + + api_opts = Options(service_yaml_config=service_yaml) + + api_schema = api.API.build( + file_descriptors, "animalia.mollusca.v1", opts=api_opts) + + samplegen_opts = Options.build("transport=rest") + + specs = sorted( + samplegen.generate_sample_specs(api_schema, opts=samplegen_opts), + key=lambda x: x["rpc"], + ) + specs.sort(key=lambda x: x["rpc"]) + + expected_specs = [ + { + "rpc": "Bighead", + "transport": "rest", + "service": "animalia.mollusca.v1.Octopus", + "region_tag": "example_v1_generated_Octopus_Bighead_sync", + "description": "Snippet for bighead" + }, + { + "rpc": "Ramshorn", + "transport": "rest", + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_v1_generated_Squid_Ramshorn_sync", + "description": "Snippet for ramshorn" + }, + { + "rpc": "_NotRamshorn", + "transport": "rest", + "service": "animalia.mollusca.v1.Squid", + "region_tag": "example_v1_generated_Squid__NotRamshorn_sync_internal", + "description": "Snippet for _not_ramshorn" + }, + ] + + assert specs == expected_specs + + def test__set_sample_metadata_server_streaming(): sample = { "rpc": "Ramshorn", diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 4ae1fda4f464..d6e5c09681d9 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -2799,6 +2799,7 @@ def test_python_settings_selective_gapic_version_mismatch_method_raises_error(): def get_service_yaml_for_selective_gapic_tests( apis: Sequence[str] = ["google.example.v1.FooService"], methods=["google.example.v1.FooService.GetFoo"], + generate_omitted_as_internal=False, ) -> Dict[str, Any]: return { "apis": [ @@ -2812,7 +2813,8 @@ def get_service_yaml_for_selective_gapic_tests( "experimental_features": {"rest_async_io_enabled": True}, "common": { "selective_gapic_generation": { - "methods": methods + "methods": methods, + "generate_omitted_as_internal": generate_omitted_as_internal, } } }, @@ -3558,6 +3560,75 @@ def make_initiate_options(service_name): assert 'google.example.v1.FooOpsService.PoorlyOrganizedMethod' not in api_schema.all_methods +def test_selective_gapic_api_build_generate_omitted_as_internal(): + # Put together a couple of minimal protos. + fds = ( + make_file_pb2( + name='foo.proto', + package='google.example.v1', + messages=( + make_message_pb2(name='GenericRequest', fields=()), + make_message_pb2(name='GenericResponse', fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name='ServiceOne', + method=( + descriptor_pb2.MethodDescriptorProto( + name='InternalMethod', + input_type='google.example.v1.GenericRequest', + output_type='google.example.v1.GenericResponse', + ), + descriptor_pb2.MethodDescriptorProto( + name='NonInternalMethod', + input_type='google.example.v1.GenericRequest', + output_type='google.example.v1.GenericResponse', + ), + ), + ), + descriptor_pb2.ServiceDescriptorProto( + name='ServiceTwo', + method=( + descriptor_pb2.MethodDescriptorProto( + name='NonInternalMethod', + input_type='google.example.v1.GenericRequest', + output_type='google.example.v1.GenericResponse', + ), + ), + ), + ), + ), + ) + + service_yaml_config = get_service_yaml_for_selective_gapic_tests( + apis=['google.example.v1.ServiceOne', 'google.example.v1.ServiceTwo'], + methods=[ + 'google.example.v1.ServiceOne.NonInternalMethod', + 'google.example.v1.ServiceTwo.NonInternalMethod' + ], + generate_omitted_as_internal=True + ) + + opts = Options(service_yaml_config=service_yaml_config) + + api_schema = api.API.build(fds, 'google.example.v1', opts=opts) + + assert 'google.example.v1.ServiceOne' in api_schema.services + assert 'google.example.v1.ServiceTwo' in api_schema.services + assert 'google.example.v1.ServiceOne.InternalMethod' not in api_schema.all_methods + assert 'google.example.v1.ServiceOne._InternalMethod' in api_schema.all_methods + + assert api_schema.services['google.example.v1.ServiceOne'].is_internal + assert not api_schema.services['google.example.v1.ServiceTwo'].is_internal + assert api_schema.all_methods['google.example.v1.ServiceOne._InternalMethod'].is_internal + assert not api_schema.all_methods['google.example.v1.ServiceOne.NonInternalMethod'].is_internal + assert not api_schema.all_methods['google.example.v1.ServiceTwo.NonInternalMethod'].is_internal + + assert api_schema.services['google.example.v1.ServiceOne'].client_name == 'BaseServiceOneClient' + assert api_schema.services['google.example.v1.ServiceOne'].async_client_name == 'BaseServiceOneAsyncClient' + assert api_schema.all_methods['google.example.v1.ServiceOne._InternalMethod'].name == '_InternalMethod' + + def test_read_empty_python_settings_from_service_yaml(): service_yaml_config = { "apis": [ diff --git a/packages/gapic-generator/tests/unit/utils/test_code.py b/packages/gapic-generator/tests/unit/utils/test_code.py index 5f18679d6f2b..021c2f2732c1 100644 --- a/packages/gapic-generator/tests/unit/utils/test_code.py +++ b/packages/gapic-generator/tests/unit/utils/test_code.py @@ -43,3 +43,11 @@ def test_nth(): assert code.nth((i * i for i in range(20)), 4) == 16 # default assert code.nth((i * i for i in range(20)), 30, 2112) == 2112 + + +def test_make_private(): + # Regular case + assert code.make_private('private_func') == '_private_func' + + # Starts with underscore + assert code.make_private('_no_dunder_plz') == '_no_dunder_plz' From 33ba80c6247b1eb7443a2f89813316c1daf8882a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 14 Feb 2025 00:43:16 +0000 Subject: [PATCH 1247/1339] chore(main): release 1.23.0 (#2332) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 3a7f529247ed..b720c3e20810 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.23.0](https://github.com/googleapis/gapic-generator-python/compare/v1.22.1...v1.23.0) (2025-02-14) + + +### Features + +* Add ability to remove unversioned modules ([#2329](https://github.com/googleapis/gapic-generator-python/issues/2329)) ([ccd619f](https://github.com/googleapis/gapic-generator-python/commit/ccd619f0461f6b1aa22cdd6fbcc37039bcdc541a)) +* Added support for internal methods in selective GAPIC generation ([#2325](https://github.com/googleapis/gapic-generator-python/issues/2325)) ([eb8a69e](https://github.com/googleapis/gapic-generator-python/commit/eb8a69e3141f865903716b9c5013e2c7650c60da)) + ## [1.22.1](https://github.com/googleapis/gapic-generator-python/compare/v1.22.0...v1.22.1) (2025-02-12) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 692c945c03b7..58e929348b7d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.22.1" +version = "1.23.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From d954060a3e35e5659591219393f16f52cbfec6f8 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 14 Feb 2025 16:57:21 +0100 Subject: [PATCH 1248/1339] chore(deps): update all dependencies (#2324) Co-authored-by: Anthonios Partheniou --- .../.kokoro/docker/docs/requirements.txt | 60 +++++++++---------- packages/gapic-generator/WORKSPACE | 6 +- 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt index a9360a25b707..312898ba3978 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt @@ -8,13 +8,13 @@ argcomplete==3.5.3 \ --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a +cachetools==5.5.1 \ + --hash=sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95 \ + --hash=sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db +certifi==2025.1.31 \ + --hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \ + --hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe # via requests charset-normalizer==3.4.1 \ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ @@ -124,23 +124,23 @@ distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 +filelock==3.17.0 \ + --hash=sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338 \ + --hash=sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf +google-api-core==2.24.1 \ + --hash=sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1 \ + --hash=sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a # via # google-cloud-core # google-cloud-storage -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 +google-auth==2.38.0 \ + --hash=sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4 \ + --hash=sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a # via # google-api-core # google-cloud-core @@ -149,9 +149,9 @@ google-cloud-core==2.4.1 \ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.19.0 \ - --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ - --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 +google-cloud-storage==3.0.0 \ + --hash=sha256:2accb3e828e584888beff1165e5f3ac61aa9088965eb0165794a82d8c7f95297 \ + --hash=sha256:f85fd059650d2dbb0ac158a9a6b304b66143b35ed2419afec2905ca522eb2c6a # via gcp-docuploader google-crc32c==1.6.0 \ --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ @@ -188,17 +188,17 @@ google-resumable-media==2.7.2 \ --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 # via google-cloud-storage -googleapis-common-protos==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed +googleapis-common-protos==1.67.0 \ + --hash=sha256:21398025365f138be356d5923e9168737d94d46a72aefee4a6110a1f23463c86 \ + --hash=sha256:579de760800d13616f51cf8be00c876f00a9f146d3e6510e19d1f4111758b741 # via google-api-core idna==3.10 \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via requests -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 +nox==2025.2.9 \ + --hash=sha256:7d1e92d1918c6980d70aee9cf1c1d19d16faa71c4afe338fffd39e8a460e2067 \ + --hash=sha256:d50cd4ca568bd7621c2e6cbbc4845b3b7f7697f25d5fb0190ce8f4600be79768 # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ @@ -208,9 +208,9 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 +proto-plus==1.26.0 \ + --hash=sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22 \ + --hash=sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7 # via google-api-core protobuf==5.29.3 \ --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ @@ -291,7 +291,7 @@ urllib3==2.3.0 \ --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 +virtualenv==20.29.2 \ + --hash=sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728 \ + --hash=sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a # via nox diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index 86d90144c61e..a21f84581d8e 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -73,9 +73,9 @@ http_archive( # instantiated in grpc_deps(). http_archive( name = "com_google_protobuf", - sha256 = "63150aba23f7a90fd7d87bdf514e459dd5fe7023fdde01b56ac53335df64d4bd", - strip_prefix = "protobuf-29.2", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v29.2.tar.gz"], + sha256 = "008a11cc56f9b96679b4c285fd05f46d317d685be3ab524b2a310be0fbad987e", + strip_prefix = "protobuf-29.3", + urls = ["https://github.com/protocolbuffers/protobuf/archive/v29.3.tar.gz"], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") From 52eecdab5b2fbea37f1fc25969c9e0297fe892af Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 14 Feb 2025 11:59:17 -0500 Subject: [PATCH 1249/1339] docs: fix minor typos (#2318) --- .../snippet_config_language.proto | 14 +++++++------- .../snippet_config_language_pb2.pyi | 16 ++++++++-------- .../gapic-generator/gapic/samplegen/samplegen.py | 6 +++--- .../gapic/samplegen_utils/snippet_metadata.proto | 12 ++++++------ packages/gapic-generator/gapic/schema/naming.py | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 2 +- .../tests/unit/samplegen/test_snippet_index.py | 4 ++-- .../tests/unit/schema/test_api.py | 4 ++-- .../tests/unit/schema/wrappers/test_message.py | 10 +++++----- .../tests/unit/schema/wrappers/test_method.py | 4 ++-- .../tests/unit/schema/wrappers/test_service.py | 2 +- 11 files changed, 38 insertions(+), 38 deletions(-) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language.proto b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language.proto index d1aecb9b983a..93bdeb141121 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language.proto +++ b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language.proto @@ -167,7 +167,7 @@ message SnippetSignature { // Languages that support both async and sync methods should generate both a // sync and an async snippets. Note that different samples should be in - // differnt files. + // different files. // Languages that support only one of async and sync methods should // generate the supported snippet. BOTH = 3; @@ -494,7 +494,7 @@ message Snippet { // Describes how to handle LRO responses. message LroResponseHandling { // The name of the variable to capture the LRO response in. Required. - // This will capture the response to the LRO operaion call and not to + // This will capture the response to the LRO operation call and not to // polling operations. string response_name = 1; @@ -556,7 +556,7 @@ message Statement { // An iteration statement. A given Statement set will be executed // repeatedly according to the iteration definition. // Each iteration type will make a per-step Expression.NameValue set - // available that may be used withing the given Statement set. + // available that may be used within the given Statement set. Iteration iteration = 5; } @@ -617,7 +617,7 @@ message Statement { // An iteration statement. A given Statement set will be executed // repeatedly according to the iteration definition. // Each iteration type will make a per-step Expression.NameValue set - // available that may be used withing the given Statement set. + // available that may be used within the given Statement set. message Iteration { oneof iteration_type { // A numeric sequence iteration. @@ -642,7 +642,7 @@ message Statement { // Represents a numeric sequence iteration. // A numeric sequence is defined over which to iterate making the current // element of the sequence available in a variable. - // It's ultimately the responsability of the user to define a finite + // It's ultimately the responsibility of the user to define a finite // sequence, although tooling may be provided to help. message NumericSequenceIteration { // Where to start the sequence at, ie. the first element of the iteration. @@ -740,7 +740,7 @@ message Statement { // How to split the byte sequence in chunks to iterate over. oneof chunk { - // The size of the chuncks to split the byte sequence in. The last chunk + // The size of the chunks to split the byte sequence in. The last chunk // will be at most chunk_size. Must be positive. // Should resolve to an integer type. Expression chunk_size = 2; @@ -1010,7 +1010,7 @@ message Expression { // and a Declaration of // - Declaration.type => SampleMessage and // - Declaration.name => sample_value - // then posible NameValues that refer to the variable declared are: + // then possible NameValues that refer to the variable declared are: // - NameValue.name => sample_value and NameValue.path => unset to reference // the value of the sample_value variable, i.e. using the variable // sample_value diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi index 73f90c543f29..fb61af2905cf 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi +++ b/packages/gapic-generator/gapic/configurable_snippetgen/snippet_config_language_pb2.pyi @@ -243,7 +243,7 @@ class SnippetSignature(google.protobuf.message.Message): BOTH: SnippetSignature._SyncPreference.ValueType # 3 """Languages that support both async and sync methods should generate both a sync and an async snippets. Note that different samples should be in - differnt files. + different files. Languages that support only one of async and sync methods should generate the supported snippet. """ @@ -271,7 +271,7 @@ class SnippetSignature(google.protobuf.message.Message): BOTH: SnippetSignature.SyncPreference.ValueType # 3 """Languages that support both async and sync methods should generate both a sync and an async snippets. Note that different samples should be in - differnt files. + different files. Languages that support only one of async and sync methods should generate the supported snippet. """ @@ -919,7 +919,7 @@ class Snippet(google.protobuf.message.Message): POLLING_CALL_FIELD_NUMBER: builtins.int response_name: builtins.str """The name of the variable to capture the LRO response in. Required. - This will capture the response to the LRO operaion call and not to + This will capture the response to the LRO operation call and not to polling operations. """ polling_type: global___Snippet.LroResponseHandling.PollingType.ValueType @@ -1154,7 +1154,7 @@ class Statement(google.protobuf.message.Message): """An iteration statement. A given Statement set will be executed repeatedly according to the iteration definition. Each iteration type will make a per-step Expression.NameValue set - available that may be used withing the given Statement set. + available that may be used within the given Statement set. """ DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -1164,7 +1164,7 @@ class Statement(google.protobuf.message.Message): """Represents a numeric sequence iteration. A numeric sequence is defined over which to iterate making the current element of the sequence available in a variable. - It's ultimately the responsability of the user to define a finite + It's ultimately the responsibility of the user to define a finite sequence, although tooling may be provided to help. """ @@ -1336,7 +1336,7 @@ class Statement(google.protobuf.message.Message): """ @property def chunk_size(self) -> global___Expression: - """The size of the chuncks to split the byte sequence in. The last chunk + """The size of the chunks to split the byte sequence in. The last chunk will be at most chunk_size. Must be positive. Should resolve to an integer type. """ @@ -1432,7 +1432,7 @@ class Statement(google.protobuf.message.Message): """An iteration statement. A given Statement set will be executed repeatedly according to the iteration definition. Each iteration type will make a per-step Expression.NameValue set - available that may be used withing the given Statement set. + available that may be used within the given Statement set. """ def __init__( self, @@ -1814,7 +1814,7 @@ class Expression(google.protobuf.message.Message): and a Declaration of - Declaration.type => SampleMessage and - Declaration.name => sample_value - then posible NameValues that refer to the variable declared are: + then possible NameValues that refer to the variable declared are: - NameValue.name => sample_value and NameValue.path => unset to reference the value of the sample_value variable, i.e. using the variable sample_value diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index d06968d5d627..9eb2f890b09a 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -302,7 +302,7 @@ def preprocess_sample(sample, api_schema: api.API, rpc: wrappers.Method): is_async = transport == api.TRANSPORT_GRPC_ASYNC sample["client_name"] = service.async_client_name if is_async else service.client_name - # the MessageType of the request object passed to the rpc e.g, `ListRequest` + # the MessageType of the request object passed to the rpc e.g., `ListRequest` sample["request_type"] = rpc.input # We check if the request object is part of the service proto package. @@ -744,7 +744,7 @@ def _validate_format(self, body: List[str]): num_prints = fmt_str.count("%s") if num_prints != len(body) - 1: raise types.MismatchedFormatSpecifier( - "Expected {} expresssions in format string '{}' but found {}".format( + "Expected {} expressions in format string '{}' but found {}".format( num_prints, fmt_str, len(body) - 1 ) ) @@ -821,7 +821,7 @@ def _validate_loop(self, loop): Checks for correctly defined loop constructs, either 'collection' loops with a collection and iteration variable, or 'map' loops with a map and at least one of 'key' or 'value'. - Loops also have a 'body', which contains statments that may + Loops also have a 'body', which contains statements that may use the variables from the header. The body statements are validated recursively. diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata.proto b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata.proto index 398dc9b27ccb..b419aed7f25d 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata.proto +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_metadata.proto @@ -61,13 +61,13 @@ message Snippet { // may be written in F# or VB .NET. // Note that this does not contain information about the snippet supported // platforms or language versions, etc. This is just a quick way to identify - // the generally supported langauge. + // the generally supported language. Language language = 5; // The client library method this snippet is for. ClientMethod client_method = 6; - // Wether this is the canonical snippet for the corresponding service method. + // Whether this is the canonical snippet for the corresponding service method. // This is to be interpreted in conjunction with origin as follows: // For a given service method: // - A handwritten canonical takes precedence over @@ -128,7 +128,7 @@ message Snippet { FULL = 1; // A shorter version of the full sample, may not include imports and some - // langauge specific initialization code. This is to be used in contexts + // language specific initialization code. This is to be used in contexts // in which the full aspects of the sample are made clear outside the // code. SHORT = 2; @@ -166,7 +166,7 @@ message ClientMethod { // This may not be unique within the service client because of overloads. string full_name = 2; - // Indicates wether this method is synchronous or asynchronous. + // Indicates whether this method is synchronous or asynchronous. // Some languages may support only one of the variants, in which case, this // field will always have the same value (for that language). bool async = 3; @@ -205,7 +205,7 @@ message ServiceClient { // The fully qualified name of the service client, which is the short_name // qualified by the namespace/package/type name this client is declared in. - // This will be unique within the client libray. + // This will be unique within the client library. string full_name = 2; } @@ -228,7 +228,7 @@ message ClientLibrary { // The programming language the library is written in. // Note that this does not contain information about the library supported // platforms or language versions, etc. This is just a quick way to identify - // the generally supported langauge. + // the generally supported language. Language language = 3; // The APIs this client library is for. diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 1d055490ff4a..92545b6526e8 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -34,7 +34,7 @@ class Naming(abc.ABC): This class contains the naming nomenclature used for this API within templates. - An concrete child of this object is made available to every template + A concrete child of this object is made available to every template (as ``api.naming``). """ name: str = '' diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index ff2d6738c3c2..d148452435ee 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -218,7 +218,7 @@ def inner_mock(self, stack, visited_fields) -> str: answer = f'{self.type.ident}({sub.name}={{}})' if self.map: - # Maps are a special case beacuse they're represented internally as + # Maps are a special case because they're represented internally as # a list of a generated type with two fields: 'key' and 'value'. answer = '{{{}: {}}}'.format( self.type.fields["key"].mock_value, diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index ade49db562a2..0c9ae262d790 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -80,7 +80,7 @@ def test_snippet_init(sample_str): # This is the same as the sample_str above, minus the # [START ...] # and # [END ...] lines - expected_full_snipppet = """from molluscs.v1 import molluscclient + expected_full_snippet = """from molluscs.v1 import molluscclient def sample_classify(video, location): @@ -109,7 +109,7 @@ def sample_classify(video, location): """ - assert snippet.full_snippet == expected_full_snipppet + assert snippet.full_snippet == expected_full_snippet def test_add_snippet_no_matching_service(sample_str): diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index d6e5c09681d9..13b0d32ffe6e 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -3482,7 +3482,7 @@ def make_initiate_options(service_name): make_message_pb2(name='CreateBarRequest'), make_message_pb2(name='GetBarOperationRequest'), make_message_pb2(name='PoorlyOrganizedMethodRequest'), - make_message_pb2(name='PoorlyOrganizedMethodReponse') + make_message_pb2(name='PoorlyOrganizedMethodResponse') ), services=( descriptor_pb2.ServiceDescriptorProto( @@ -3497,7 +3497,7 @@ def make_initiate_options(service_name): descriptor_pb2.MethodDescriptorProto( name='PoorlyOrganizedMethod', input_type='google.example.v1.PoorlyOrganizedMethodRequest', - output_type='google.example.v1.PoorlyOrganizedMethodReponse', + output_type='google.example.v1.PoorlyOrganizedMethodResponse', ), ), ), diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index 00f91b65f7a8..ffa70cbfeace 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -151,7 +151,7 @@ def test_get_field_recursive(): outer_field = make_field('inner', message=inner) outer = make_message('Outer', fields=(outer_field,)) - # Assert that a recusive retrieval works. + # Assert that a recursive retrieval works. assert outer.get_field('inner', 'zero') == inner_fields[0] assert outer.get_field('inner', 'one') == inner_fields[1] assert outer.get_field('inner.one') == inner_fields[1] @@ -166,7 +166,7 @@ def test_get_field_nested_not_found_error(): outer_field = make_field('inner', message=inner) outer = make_message('Outer', fields=(outer_field,)) - # Assert that a recusive retrieval fails. + # Assert that a recursive retrieval fails. with pytest.raises(KeyError): assert outer.get_field('inner', 'zero', 'beyond') @@ -180,7 +180,7 @@ def test_get_field_nonterminal_repeated_error(): outer_field = make_field('inner', message=inner, repeated=True) outer = make_message('Outer', fields=(outer_field,)) - # Assert that a recusive retrieval fails. + # Assert that a recursive retrieval fails. with pytest.raises(KeyError): assert outer.get_field('inner', 'zero') == inner_fields[0] with pytest.raises(KeyError): @@ -309,7 +309,7 @@ def test_oneof_fields(): length_f = make_field(name="length_f", oneof="length", type=5) color = make_field(name="color", type=5) request = make_message( - name="CreateMolluscReuqest", + name="CreateMolluscRequest", fields=( mass_kg, mass_lbs, @@ -345,7 +345,7 @@ def test_required_fields(): ) request = make_message( - name="CreateMolluscReuqest", + name="CreateMolluscRequest", fields=( mass_kg, length_m, diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index f6cf2e881876..d0b31d3bb76d 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -801,7 +801,7 @@ def test_flattened_oneof_fields(): ), ) request = make_message( - name="CreateMolluscReuqest", + name="CreateMolluscRequest", fields=( length_m, length_f, @@ -825,7 +825,7 @@ def test_flattened_oneof_fields(): actual = method.flattened_oneof_fields() assert expected == actual - # Check this method too becasue the setup is a lot of work. + # Check this method too because the setup is a lot of work. expected = { "color": "color", "length_m": "length_m", diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 98a5ce1f3e96..57fe95f84e7a 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -665,7 +665,7 @@ def test_extended_operations_lro_detection(): ) assert user_service.any_extended_operations_methods - # Note: we can't have the operation_serivce property point to the actual operation service + # Note: we can't have the operation_service property point to the actual operation service # because Service objects can't perform the lookup. # Instead we kick that can to the API object and make it do the lookup and verification. assert lro.operation_service == "CustomOperations" From 63d2dcd142bb944428150cc796027577610f1058 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 14 Feb 2025 11:59:33 -0500 Subject: [PATCH 1250/1339] build: remove unused files (#2331) --- packages/gapic-generator/.kokoro/release.sh | 29 - .../.kokoro/release/common.cfg | 43 -- .../.kokoro/release/release.cfg | 1 - .../gapic-generator/.kokoro/requirements.in | 11 - .../gapic-generator/.kokoro/requirements.txt | 509 ------------------ 5 files changed, 593 deletions(-) delete mode 100755 packages/gapic-generator/.kokoro/release.sh delete mode 100644 packages/gapic-generator/.kokoro/release/common.cfg delete mode 100644 packages/gapic-generator/.kokoro/release/release.cfg delete mode 100644 packages/gapic-generator/.kokoro/requirements.in delete mode 100644 packages/gapic-generator/.kokoro/requirements.txt diff --git a/packages/gapic-generator/.kokoro/release.sh b/packages/gapic-generator/.kokoro/release.sh deleted file mode 100755 index 1d09c988b454..000000000000 --- a/packages/gapic-generator/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/gapic-generator-python/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") -cd github/gapic-generator-python -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/gapic-generator/.kokoro/release/common.cfg b/packages/gapic-generator/.kokoro/release/common.cfg deleted file mode 100644 index 68cddd5f08da..000000000000 --- a/packages/gapic-generator/.kokoro/release/common.cfg +++ /dev/null @@ -1,43 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "gapic-generator-python/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/gapic-generator-python/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-3" - } - } -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/gapic-generator-python/**/*.tar.gz" - strip_prefix: "github/gapic-generator-python" - } -} diff --git a/packages/gapic-generator/.kokoro/release/release.cfg b/packages/gapic-generator/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/gapic-generator/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/requirements.in b/packages/gapic-generator/.kokoro/requirements.in deleted file mode 100644 index fff4d9ce0d0a..000000000000 --- a/packages/gapic-generator/.kokoro/requirements.in +++ /dev/null @@ -1,11 +0,0 @@ -gcp-docuploader -gcp-releasetool>=2 # required for compatibility with cryptography>=42.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 -cryptography>=42.0.5 diff --git a/packages/gapic-generator/.kokoro/requirements.txt b/packages/gapic-generator/.kokoro/requirements.txt deleted file mode 100644 index 006d8ef931bf..000000000000 --- a/packages/gapic-generator/.kokoro/requirements.txt +++ /dev/null @@ -1,509 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 - # via nox -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 - # via gcp-releasetool -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -cffi==1.17.1 \ - --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ - --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ - --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ - --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ - --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ - --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ - --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ - --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ - --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ - --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ - --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ - --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ - --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ - --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ - --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ - --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ - --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ - --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ - --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ - --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ - --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ - --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ - --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ - --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ - --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ - --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ - --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ - --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ - --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ - --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ - --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ - --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ - --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ - --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ - --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ - --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ - --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ - --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ - --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ - --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ - --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ - --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ - --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ - --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ - --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ - --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ - --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ - --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ - --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ - --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ - --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ - --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ - --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ - --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ - --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ - --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ - --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ - --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ - --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ - --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ - --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ - --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ - --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ - --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ - --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ - --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ - --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 - # via - # -r requirements.in - # gcp-releasetool - # secretstorage -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -docutils==0.21.2 \ - --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ - --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 - # via readme-renderer -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.1.1 \ - --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ - --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e - # via -r requirements.in -google-api-core==2.21.0 \ - --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ - --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.35.0 \ - --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ - --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.18.2 \ - --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ - --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.65.0 \ - --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ - --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -importlib-metadata==8.5.0 \ - --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ - --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==6.0.1 \ - --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ - --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 - # via keyring -jaraco-functools==4.1.0 \ - --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ - --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - # via gcp-releasetool -keyring==25.4.1 \ - --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ - --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.5.0 \ - --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ - --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 - # via - # jaraco-classes - # jaraco-functools -nh3==0.2.18 \ - --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ - --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ - --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ - --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ - --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ - --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ - --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ - --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ - --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ - --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ - --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ - --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ - --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ - --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ - --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ - --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe - # via readme-renderer -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 - # via google-api-core -protobuf==5.28.2 \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygments==2.18.0 \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a - # via - # readme-renderer - # rich -pyjwt==2.9.0 \ - --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ - --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c - # via gcp-releasetool -pyperclip==1.9.0 \ - --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 - # via gcp-releasetool -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==44.0 \ - --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ - --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 - # via twine -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.9.2 \ - --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ - --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed - # via nox -twine==5.1.1 \ - --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ - --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db - # via -r requirements.in -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via - # -r requirements.in - # rich -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 - # via - # requests - # twine -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 - # via nox -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via -r requirements.in -zipp==3.20.2 \ - --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ - --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.1.0 \ - --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ - --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 - # via -r requirements.in From b85067b9f530d06bbcd06baafb260ed9d789bde1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 14 Feb 2025 13:02:19 -0500 Subject: [PATCH 1251/1339] build: migrate from autopep8 to black/flake8 (#2334) Co-authored-by: Owl Bot --- packages/gapic-generator/.flake8 | 32 + packages/gapic-generator/.githooks/pre-commit | 13 - .../.github/sync-repo-settings.yaml | 2 +- .../.github/workflows/tests.yaml | 10 +- .../.kokoro/docker/docs/requirements.txt | 60 +- packages/gapic-generator/DEVELOPMENT.md | 12 +- packages/gapic-generator/docs/conf.py | 76 +- packages/gapic-generator/gapic/cli/dump.py | 21 +- .../gapic-generator/gapic/cli/generate.py | 33 +- .../gapic/cli/generate_with_pandoc.py | 14 +- .../configured_snippet.py | 10 +- .../configurable_snippetgen/libcst_utils.py | 9 +- .../gapic/generator/__init__.py | 4 +- .../gapic/generator/formatter.py | 8 +- .../gapic/generator/generator.py | 172 +- .../gapic/samplegen/__init__.py | 4 +- .../gapic/samplegen/manifest.py | 25 +- .../gapic/samplegen/samplegen.py | 227 +- .../gapic/samplegen_utils/__init__.py | 6 +- .../gapic/samplegen_utils/snippet_index.py | 62 +- .../gapic/samplegen_utils/types.py | 7 +- .../gapic/samplegen_utils/utils.py | 21 +- .../gapic/samplegen_utils/yaml.py | 18 +- .../gapic-generator/gapic/schema/__init__.py | 6 +- packages/gapic-generator/gapic/schema/api.py | 630 ++-- packages/gapic-generator/gapic/schema/imp.py | 10 +- .../gapic-generator/gapic/schema/metadata.py | 89 +- .../gapic-generator/gapic/schema/mixins.py | 82 +- .../gapic-generator/gapic/schema/naming.py | 91 +- .../gapic-generator/gapic/schema/wrappers.py | 775 +++-- .../gapic-generator/gapic/utils/__init__.py | 36 +- packages/gapic-generator/gapic/utils/cache.py | 6 +- packages/gapic-generator/gapic/utils/case.py | 14 +- packages/gapic-generator/gapic/utils/code.py | 16 +- .../gapic-generator/gapic/utils/filename.py | 4 +- packages/gapic-generator/gapic/utils/lines.py | 94 +- .../gapic-generator/gapic/utils/options.py | 114 +- packages/gapic-generator/gapic/utils/rst.py | 36 +- .../gapic-generator/gapic/utils/uri_conv.py | 3 +- .../gapic-generator/gapic/utils/uri_sample.py | 17 +- packages/gapic-generator/mypy.ini | 2 +- packages/gapic-generator/noxfile.py | 164 +- packages/gapic-generator/owlbot.py | 2 + .../gapic-generator/test_utils/test_utils.py | 256 +- .../gapic-generator/tests/system/conftest.py | 19 +- .../tests/system/test_api_version_header.py | 18 +- .../system/test_client_context_manager.py | 20 +- .../system/test_grpc_interceptor_streams.py | 26 +- .../gapic-generator/tests/system/test_lro.py | 28 +- .../tests/system/test_mixins.py | 8 +- .../tests/system/test_pagination.py | 64 +- .../tests/system/test_request_metadata.py | 6 +- .../tests/system/test_resource_crud.py | 50 +- .../tests/system/test_retry.py | 46 +- .../tests/system/test_streams.py | 94 +- .../tests/system/test_unary.py | 11 +- .../tests/system/test_universe_domain.py | 130 +- .../tests/unit/common_types.py | 93 +- .../test_configured_snippet.py | 9 +- .../test_libcst_utils.py | 3 +- .../configurable_snippetgen/test_resources.py | 3 +- .../tests/unit/generator/test_formatter.py | 77 +- .../tests/unit/generator/test_generator.py | 352 +- .../tests/unit/generator/test_options.py | 104 +- .../tests/unit/samplegen/test_integration.py | 551 +-- .../tests/unit/samplegen/test_manifest.py | 83 +- .../tests/unit/samplegen/test_samplegen.py | 130 +- .../unit/samplegen/test_snippet_index.py | 215 +- .../tests/unit/samplegen/test_template.py | 938 ++--- .../tests/unit/schema/test_api.py | 3009 +++++++++-------- .../tests/unit/schema/test_imp.py | 30 +- .../tests/unit/schema/test_metadata.py | 184 +- .../tests/unit/schema/test_naming.py | 206 +- .../tests/unit/schema/wrappers/test_enums.py | 34 +- .../tests/unit/schema/wrappers/test_field.py | 331 +- .../unit/schema/wrappers/test_message.py | 259 +- .../tests/unit/schema/wrappers/test_method.py | 758 ++--- .../tests/unit/schema/wrappers/test_python.py | 20 +- .../unit/schema/wrappers/test_routing.py | 46 +- .../unit/schema/wrappers/test_service.py | 336 +- .../tests/unit/utils/test_case.py | 14 +- .../tests/unit/utils/test_checks.py | 12 +- .../tests/unit/utils/test_code.py | 14 +- .../tests/unit/utils/test_filename.py | 16 +- .../tests/unit/utils/test_lines.py | 95 +- .../tests/unit/utils/test_rst.py | 34 +- .../tests/unit/utils/test_uri_sample.py | 6 +- 87 files changed, 6453 insertions(+), 5322 deletions(-) diff --git a/packages/gapic-generator/.flake8 b/packages/gapic-generator/.flake8 index 673525b82a04..bf92ccd8e35f 100644 --- a/packages/gapic-generator/.flake8 +++ b/packages/gapic-generator/.flake8 @@ -1,12 +1,44 @@ [flake8] ignore = + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): E123, E124 # Closing bracket mismatches opening bracket's line. # This works poorly with type annotations in method declarations. E123, E124 # Line over-indented for visual indent. # This works poorly with type annotations in method declarations. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): E126, E128, E131 E126, E128, E131 # Line break after binary operator. # This catches line breaks after "and" / "or" as a means of breaking up # long if statements, which PEP 8 explicitly encourages. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): W504 W504 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): E203 + E203 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): E501 + E501 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): E712 + E712 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): E711 + E711 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): E722 + E722 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): E741 + E741 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): F401 + F401 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): F541 + F541 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): F841 + F841 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): F811 + F811 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): W503 + W503 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): W291 + W291 +exclude = + # Exclude golden files + tests/integration + # Exclude generated protobuf code + *_pb2.py diff --git a/packages/gapic-generator/.githooks/pre-commit b/packages/gapic-generator/.githooks/pre-commit index b7ffeb44dbb6..0b87352f7489 100755 --- a/packages/gapic-generator/.githooks/pre-commit +++ b/packages/gapic-generator/.githooks/pre-commit @@ -93,19 +93,6 @@ if [ -x /usr/lib/git-core/google_hook ]; then /usr/lib/git-core/google_hook pre-commit "$@" fi -# Check Python format. -if [ $NUM_PYTHON_FILES_CHANGED -gt 0 ] -then - echo_status "Running Python linter..." - find gapic tests -name "*.py" -not -path 'tests/integration/goldens/*' | xargs autopep8 --diff --exit-code - FORMAT_STATUS=$? - if [ $FORMAT_STATUS != 0 ] - then - echo_error "Linting failed." "Please try again after running autopep8 on the gapic/ and tests/ directories." - exit 1 - fi -fi - # Check unit tests. if [ $NUM_PYTHON_FILES_CHANGED -gt 0 ] || [ $NUM_UNIT_GOLDEN_FILES_CHANGED -gt 0 ] then diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index d5581bc38c46..5c94998733d2 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -65,7 +65,7 @@ branchProtectionRules: - 'goldens-lint' - 'goldens-prerelease' - 'goldens-unit' - - 'style-check' + - 'lint' - 'snippetgen' - 'unit (3.7)' - 'unit (3.8)' diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index a565a3366020..ce5307d59a75 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -390,7 +390,7 @@ jobs: nox -f tests/integration/goldens/eventarc/noxfile.py -s prerelease_deps nox -f tests/integration/goldens/logging/noxfile.py -s prerelease_deps nox -f tests/integration/goldens/redis/noxfile.py -s prerelease_deps - style-check: + lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -399,9 +399,9 @@ jobs: with: python-version: "3.13" cache: 'pip' - - name: Install autopep8 + - name: Install nox. run: | - python -m pip install autopep8 - - name: Check diff + python -m pip install nox + - name: Run lint run: | - find gapic tests -name "*.py" -not -path 'tests/**/goldens/*' | xargs autopep8 --diff --exit-code + nox -s lint diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt index 312898ba3978..a9360a25b707 100644 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt +++ b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt @@ -8,13 +8,13 @@ argcomplete==3.5.3 \ --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox -cachetools==5.5.1 \ - --hash=sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95 \ - --hash=sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth -certifi==2025.1.31 \ - --hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \ - --hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db # via requests charset-normalizer==3.4.1 \ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ @@ -124,23 +124,23 @@ distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.17.0 \ - --hash=sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338 \ - --hash=sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -google-api-core==2.24.1 \ - --hash=sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1 \ - --hash=sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf # via # google-cloud-core # google-cloud-storage -google-auth==2.38.0 \ - --hash=sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4 \ - --hash=sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 # via # google-api-core # google-cloud-core @@ -149,9 +149,9 @@ google-cloud-core==2.4.1 \ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==3.0.0 \ - --hash=sha256:2accb3e828e584888beff1165e5f3ac61aa9088965eb0165794a82d8c7f95297 \ - --hash=sha256:f85fd059650d2dbb0ac158a9a6b304b66143b35ed2419afec2905ca522eb2c6a +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 # via gcp-docuploader google-crc32c==1.6.0 \ --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ @@ -188,17 +188,17 @@ google-resumable-media==2.7.2 \ --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 # via google-cloud-storage -googleapis-common-protos==1.67.0 \ - --hash=sha256:21398025365f138be356d5923e9168737d94d46a72aefee4a6110a1f23463c86 \ - --hash=sha256:579de760800d13616f51cf8be00c876f00a9f146d3e6510e19d1f4111758b741 +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed # via google-api-core idna==3.10 \ --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via requests -nox==2025.2.9 \ - --hash=sha256:7d1e92d1918c6980d70aee9cf1c1d19d16faa71c4afe338fffd39e8a460e2067 \ - --hash=sha256:d50cd4ca568bd7621c2e6cbbc4845b3b7f7697f25d5fb0190ce8f4600be79768 +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ @@ -208,9 +208,9 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -proto-plus==1.26.0 \ - --hash=sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22 \ - --hash=sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7 +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 # via google-api-core protobuf==5.29.3 \ --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ @@ -291,7 +291,7 @@ urllib3==2.3.0 \ --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d # via requests -virtualenv==20.29.2 \ - --hash=sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728 \ - --hash=sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox diff --git a/packages/gapic-generator/DEVELOPMENT.md b/packages/gapic-generator/DEVELOPMENT.md index f6480bd689ac..3df5a264d6b0 100644 --- a/packages/gapic-generator/DEVELOPMENT.md +++ b/packages/gapic-generator/DEVELOPMENT.md @@ -23,17 +23,7 @@ Execute unit tests by running one of the sessions prefixed with `unit-`. ## Formatting -- Lint sources by running `autopep8`. The specific command is the following. - - ``` - find gapic tests -name "*.py" -not -path 'tests/**/goldens/*' | xargs autopep8 --diff --exit-code - ``` - -- Format sources in place: - - ``` - find gapic tests -name "*.py" -not -path 'tests/**/goldens/*' | xargs autopep8 --in-place - ``` +- Lint sources by running `nox -s blacken`. Use `nox -s lint` to run lint check. ## Integration Tests diff --git a/packages/gapic-generator/docs/conf.py b/packages/gapic-generator/docs/conf.py index fbe1c27365fc..5d7d48e9d1e4 100644 --- a/packages/gapic-generator/docs/conf.py +++ b/packages/gapic-generator/docs/conf.py @@ -14,24 +14,27 @@ # import os import sys -sys.path.insert(0, os.path.abspath('..')) + +sys.path.insert(0, os.path.abspath("..")) # -- Project information ----------------------------------------------------- -project = 'API Client Generator for Python' -copyright = '2018, Google LLC' -author = 'Luke Sneeringer' +project = "API Client Generator for Python" +copyright = "2018, Google LLC" +author = "Luke Sneeringer" # The short X.Y version -version = os.environ.get('CIRCLE_TAG', 'latest') +version = os.environ.get("CIRCLE_TAG", "latest") # The full version, including alpha/beta/rc tags -release = os.environ.get('CIRCLE_TAG', 'latest') +release = os.environ.get("CIRCLE_TAG", "latest") # Replace |version| in the docs with the actual version string. rst_epilog = """ .. |version| replace:: {version} -""".format(version=version) +""".format( + version=version +) # -- General configuration --------------------------------------------------- @@ -43,24 +46,24 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages', - 'sphinx.ext.napoleon', + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", + "sphinx.ext.githubpages", + "sphinx.ext.napoleon", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The root toctree document. -root_doc = 'index' +root_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -72,10 +75,10 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # -- Options for HTML output ------------------------------------------------- @@ -83,7 +86,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -110,7 +113,7 @@ # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = 'gapic-generator-python' +htmlhelp_basename = "gapic-generator-python" # -- Options for LaTeX output ------------------------------------------------ @@ -119,15 +122,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -137,8 +137,13 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (root_doc, 'APIClientGeneratorforPython.tex', 'API Client Generator for Python Documentation', - 'Luke Sneeringer', 'manual'), + ( + root_doc, + "APIClientGeneratorforPython.tex", + "API Client Generator for Python Documentation", + "Luke Sneeringer", + "manual", + ), ] @@ -147,8 +152,13 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (root_doc, 'apiclientgeneratorforpython', 'API Client Generator for Python Documentation', - [author], 1) + ( + root_doc, + "apiclientgeneratorforpython", + "API Client Generator for Python Documentation", + [author], + 1, + ) ] @@ -158,9 +168,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (root_doc, 'APIClientGeneratorforPython', 'API Client Generator for Python Documentation', - author, 'APIClientGeneratorforPython', 'One line description of project.', - 'Miscellaneous'), + ( + root_doc, + "APIClientGeneratorforPython", + "API Client Generator for Python Documentation", + author, + "APIClientGeneratorforPython", + "One line description of project.", + "Miscellaneous", + ), ] @@ -169,4 +185,4 @@ # -- Options for intersphinx extension --------------------------------------- # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {'https://docs.python.org/3/': None} +intersphinx_mapping = {"https://docs.python.org/3/": None} diff --git a/packages/gapic-generator/gapic/cli/dump.py b/packages/gapic-generator/gapic/cli/dump.py index 7862750a764f..1e3aa25fd0b3 100644 --- a/packages/gapic-generator/gapic/cli/dump.py +++ b/packages/gapic-generator/gapic/cli/dump.py @@ -20,10 +20,14 @@ @click.command() -@click.option('--request', type=click.File('rb'), default=sys.stdin.buffer, - help='Location of the `CodeGeneratorRequest` to be dumped. ' - 'This defaults to stdin (which is what protoc uses) ' - 'but this option can be set for testing/debugging.') +@click.option( + "--request", + type=click.File("rb"), + default=sys.stdin.buffer, + help="Location of the `CodeGeneratorRequest` to be dumped. " + "This defaults to stdin (which is what protoc uses) " + "but this option can be set for testing/debugging.", +) def dump(request: typing.BinaryIO) -> None: """Dump the CodeGeneratorRequest, unmodified, to the given output.""" # Ideally, this would output a CodeGeneratorResponse with the content @@ -32,13 +36,14 @@ def dump(request: typing.BinaryIO) -> None: # valid utf-8. # Dump the CodeGeneratorRequest to disk. - with io.open('request.desc', 'wb+') as output: + with io.open("request.desc", "wb+") as output: output.write(request.read()) # Log what happened. click.secho( - 'Request dumped to `request.desc`. ' - 'This script will now exit 1 to satisfy protoc.', - file=sys.stderr, fg='green', + "Request dumped to `request.desc`. " + "This script will now exit 1 to satisfy protoc.", + file=sys.stderr, + fg="green", ) sys.exit(1) diff --git a/packages/gapic-generator/gapic/cli/generate.py b/packages/gapic-generator/gapic/cli/generate.py index 414170a21380..e8eee1f0341c 100644 --- a/packages/gapic-generator/gapic/cli/generate.py +++ b/packages/gapic-generator/gapic/cli/generate.py @@ -26,16 +26,21 @@ @click.command() -@click.option('--request', type=click.File('rb'), default=sys.stdin.buffer, - help='Location of the `CodeGeneratorRequest` to be processed. ' - 'This defaults to stdin (which is what protoc uses) ' - 'but this option can be set for testing/debugging.') -@click.option('--output', type=click.File('wb'), default=sys.stdout.buffer, - help='Where to output the `CodeGeneratorResponse`. ' - 'Defaults to stdout.') -def generate( - request: typing.BinaryIO, - output: typing.BinaryIO) -> None: +@click.option( + "--request", + type=click.File("rb"), + default=sys.stdin.buffer, + help="Location of the `CodeGeneratorRequest` to be processed. " + "This defaults to stdin (which is what protoc uses) " + "but this option can be set for testing/debugging.", +) +@click.option( + "--output", + type=click.File("wb"), + default=sys.stdout.buffer, + help="Where to output the `CodeGeneratorResponse`. " "Defaults to stdout.", +) +def generate(request: typing.BinaryIO, output: typing.BinaryIO) -> None: """Generate a full API client description.""" # Load the protobuf CodeGeneratorRequest. req = plugin_pb2.CodeGeneratorRequest.FromString(request.read()) @@ -47,11 +52,9 @@ def generate( # This generator uses a slightly different mechanism for determining # which files to generate; it tracks at package level rather than file # level. - package = os.path.commonprefix([ - p.package - for p in req.proto_file - if p.name in req.file_to_generate - ]).rstrip('.') + package = os.path.commonprefix( + [p.package for p in req.proto_file if p.name in req.file_to_generate] + ).rstrip(".") # Build the API model object. # This object is a frozen representation of the whole API, and is sent diff --git a/packages/gapic-generator/gapic/cli/generate_with_pandoc.py b/packages/gapic-generator/gapic/cli/generate_with_pandoc.py index 4a31f76292dd..f8b444b6b8cd 100644 --- a/packages/gapic-generator/gapic/cli/generate_with_pandoc.py +++ b/packages/gapic-generator/gapic/cli/generate_with_pandoc.py @@ -1,14 +1,16 @@ import os import sys -if __name__ == '__main__': - os.environ['PYPANDOC_PANDOC'] = os.path.join( - os.path.abspath(__file__).rsplit("gapic", 1)[0], "pandoc") - os.environ['LC_ALL'] = 'C.UTF-8' - os.environ['PYTHONNOUSERSITE'] = 'True' +if __name__ == "__main__": + os.environ["PYPANDOC_PANDOC"] = os.path.join( + os.path.abspath(__file__).rsplit("gapic", 1)[0], "pandoc" + ) + os.environ["LC_ALL"] = "C.UTF-8" + os.environ["PYTHONNOUSERSITE"] = "True" entry_point_script = os.path.join( - os.path.dirname(os.path.abspath(__file__)), "generate.py") + os.path.dirname(os.path.abspath(__file__)), "generate.py" + ) args = [sys.executable, entry_point_script] + sys.argv[1:] os.execv(args[0], args) diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py index 6f13043b8fe6..bf9fc1049317 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/configured_snippet.py @@ -163,9 +163,9 @@ def _extend_sample_function_def_body( # The result of applying a transformer could be of a different type # in general, but we will only update the sample function def here. - self._sample_function_def = self._sample_function_def.visit( + self._sample_function_def = self._sample_function_def.visit( # type: ignore transformer - ) # type: ignore + ) def _add_sample_function_parameters(self) -> None: """Adds sample function parameters. @@ -230,13 +230,11 @@ def _build_sample_function(self) -> None: # TODO: https://github.com/googleapis/gapic-generator-python/issues/1536, add return type. # TODO: https://github.com/googleapis/gapic-generator-python/issues/1538, add docstring. self._add_sample_function_parameters() - self._extend_sample_function_def_body( - self._get_service_client_initialization()) + self._extend_sample_function_def_body(self._get_service_client_initialization()) self._extend_sample_function_def_body(self._get_call()) def _add_sample_function(self) -> None: - self._module = self._module.with_changes( - body=[self._sample_function_def]) + self._module = self._module.with_changes(body=[self._sample_function_def]) def generate(self) -> None: """Generates the snippet. diff --git a/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py b/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py index 6124b262a70f..3d71d8a7f25a 100644 --- a/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py +++ b/packages/gapic-generator/gapic/configurable_snippetgen/libcst_utils.py @@ -46,8 +46,7 @@ def convert_expression( string_value = config_expression.string_value return libcst.SimpleString(value=f'"{string_value}"') else: - raise ValueError( - f"Conversion from Expression value {value_name} unsupported.") + raise ValueError(f"Conversion from Expression value {value_name} unsupported.") def convert_parameter( @@ -65,12 +64,10 @@ def convert_py_dict(key_value_pairs: Sequence[Tuple[str, str]]) -> libcst.Dict: elements = [] for key, value in key_value_pairs: if not (isinstance(key, str) and isinstance(value, str)): - raise ValueError( - f"convert_py_dict supports only string keys and values.") + raise ValueError(f"convert_py_dict supports only string keys and values.") elements.append( libcst.DictElement( - libcst.SimpleString( - f'"{key}"'), libcst.SimpleString(f'"{value}"') + libcst.SimpleString(f'"{key}"'), libcst.SimpleString(f'"{value}"') ) ) return libcst.Dict(elements=elements) diff --git a/packages/gapic-generator/gapic/generator/__init__.py b/packages/gapic-generator/gapic/generator/__init__.py index 3172c0b549d4..579201a62800 100644 --- a/packages/gapic-generator/gapic/generator/__init__.py +++ b/packages/gapic-generator/gapic/generator/__init__.py @@ -21,6 +21,4 @@ from .generator import Generator -__all__ = ( - 'Generator', -) +__all__ = ("Generator",) diff --git a/packages/gapic-generator/gapic/generator/formatter.py b/packages/gapic-generator/gapic/generator/formatter.py index e860e19a6869..c76b20ac3669 100644 --- a/packages/gapic-generator/gapic/generator/formatter.py +++ b/packages/gapic-generator/gapic/generator/formatter.py @@ -29,13 +29,13 @@ def fix_whitespace(code: str) -> str: str: Formatted code. """ # Remove trailing whitespace from any line. - code = re.sub(r'[ ]+\n', '\n', code) + code = re.sub(r"[ ]+\n", "\n", code) # Ensure at most two blank lines before top level definitions. - code = re.sub(r'\s+\n\s*\n\s*\n(class|def|@|#|_)', r'\n\n\n\1', code) + code = re.sub(r"\s+\n\s*\n\s*\n(class|def|@|#|_)", r"\n\n\n\1", code) # Ensure at most one line before nested definitions. - code = re.sub(r'\s+\n\s*\n(( )+)(\w|_|@|#)', r'\n\n\1\3', code) + code = re.sub(r"\s+\n\s*\n(( )+)(\w|_|@|#)", r"\n\n\1\3", code) # All files shall end in one and exactly one line break. - return f'{code.rstrip()}\n' + return f"{code.rstrip()}\n" diff --git a/packages/gapic-generator/gapic/generator/generator.py b/packages/gapic-generator/gapic/generator/generator.py index 3d490dcb3901..f42e40655edf 100644 --- a/packages/gapic-generator/gapic/generator/generator.py +++ b/packages/gapic-generator/gapic/generator/generator.py @@ -22,7 +22,11 @@ from typing import Any, DefaultDict, Dict, Mapping, Optional, Tuple from hashlib import sha256 from collections import OrderedDict, defaultdict -from gapic.samplegen_utils.utils import coerce_response_name, is_valid_sample_cfg, render_format_string +from gapic.samplegen_utils.utils import ( + coerce_response_name, + is_valid_sample_cfg, + render_format_string, +) from gapic.samplegen_utils.types import DuplicateSample from gapic.samplegen_utils import snippet_index, snippet_metadata_pb2 from gapic.samplegen import manifest, samplegen @@ -48,7 +52,7 @@ class Generator: """ def __init__(self, opts: Options) -> None: - # Create the jinja environment with which to render templates. + # Create the jinja environment with which to render templates. self._env = jinja2.Environment( loader=jinja2.FileSystemLoader(searchpath=opts.templates), undefined=jinja2.StrictUndefined, @@ -73,9 +77,7 @@ def __init__(self, opts: Options) -> None: self._sample_configs = opts.sample_configs - def get_response( - self, api_schema: api.API, opts: Options - ) -> CodeGeneratorResponse: + def get_response(self, api_schema: api.API, opts: Options) -> CodeGeneratorResponse: """Return a :class:`~.CodeGeneratorResponse` for this library. This is a complete response to be written to (usually) stdout, and @@ -91,8 +93,7 @@ def get_response( """ output_files: Dict[str, CodeGeneratorResponse.File] = OrderedDict() sample_templates, client_templates = utils.partition( - lambda fname: os.path.basename( - fname) == samplegen.DEFAULT_TEMPLATE_NAME, + lambda fname: os.path.basename(fname) == samplegen.DEFAULT_TEMPLATE_NAME, self._env.loader.list_templates(), # type: ignore ) @@ -101,8 +102,9 @@ def get_response( snippet_idx = snippet_index.SnippetIndex(api_schema) if sample_templates: sample_output, snippet_idx = self._generate_samples_and_manifest( - api_schema, snippet_idx, self._env.get_template( - sample_templates[0]), + api_schema, + snippet_idx, + self._env.get_template(sample_templates[0]), opts=opts, ) output_files.update(sample_output) @@ -121,17 +123,28 @@ def get_response( # Append to the output files dictionary. output_files.update( self._render_template( - template_name, api_schema=api_schema, opts=opts, snippet_index=snippet_idx) + template_name, + api_schema=api_schema, + opts=opts, + snippet_index=snippet_idx, + ) ) # Return the CodeGeneratorResponse output. res = CodeGeneratorResponse( - file=[i for i in output_files.values()]) # type: ignore + file=[i for i in output_files.values()] + ) # type: ignore res.supported_features |= CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL # type: ignore return res def _generate_samples_and_manifest( - self, api_schema: api.API, index: snippet_index.SnippetIndex, sample_template: jinja2.Template, *, opts: Options) -> Tuple[Dict, snippet_index.SnippetIndex]: + self, + api_schema: api.API, + index: snippet_index.SnippetIndex, + sample_template: jinja2.Template, + *, + opts: Options, + ) -> Tuple[Dict, snippet_index.SnippetIndex]: """Generate samples and samplegen manifest for the API. Arguments: @@ -145,18 +158,15 @@ def _generate_samples_and_manifest( # The two-layer data structure lets us do two things: # * detect duplicate samples, which is an error # * detect distinct samples with the same ID, which are disambiguated - id_to_hash_to_spec: DefaultDict[str, - Dict[str, Any]] = defaultdict(dict) + id_to_hash_to_spec: DefaultDict[str, Dict[str, Any]] = defaultdict(dict) # Autogenerated sample specs autogen_specs: typing.List[typing.Dict[str, Any]] = [] if opts.autogen_snippets: - autogen_specs = list( - samplegen.generate_sample_specs(api_schema, opts=opts)) + autogen_specs = list(samplegen.generate_sample_specs(api_schema, opts=opts)) # Also process any handwritten sample specs - handwritten_specs = samplegen.parse_handwritten_specs( - self._sample_configs) + handwritten_specs = samplegen.parse_handwritten_specs(self._sample_configs) sample_specs = autogen_specs + list(handwritten_specs) @@ -174,8 +184,7 @@ def _generate_samples_and_manifest( hash_to_spec = id_to_hash_to_spec[sample_id] if spec_hash in hash_to_spec: - raise DuplicateSample( - f"Duplicate samplegen spec found: {spec}") + raise DuplicateSample(f"Duplicate samplegen spec found: {spec}") hash_to_spec[spec_hash] = spec @@ -189,7 +198,10 @@ def _generate_samples_and_manifest( spec["id"] += f"_{spec_hash}" sample, snippet_metadata = samplegen.generate_sample( - spec, api_schema, sample_template,) + spec, + api_schema, + sample_template, + ) fpath = utils.to_snake_case(spec["id"]) + ".py" fpath_to_spec_and_rendered[os.path.join(out_dir, fpath)] = ( @@ -200,8 +212,7 @@ def _generate_samples_and_manifest( snippet_metadata.file = fpath snippet_metadata.title = fpath - index.add_snippet( - snippet_index.Snippet(sample, snippet_metadata)) + index.add_snippet(snippet_index.Snippet(sample, snippet_metadata)) output_files = { fname: CodeGeneratorResponse.File( @@ -214,15 +225,24 @@ def _generate_samples_and_manifest( # NOTE(busunkim): Not all fields are yet populated in the snippet metadata. # Expected filename: snippet_metadata.{proto_package}.json # For example: snippet_metadata_google.cloud.aiplatform.v1.json - snippet_metadata_path = str(pathlib.Path( - out_dir) / f"snippet_metadata_{api_schema.naming.proto_package}.json").lower() + snippet_metadata_path = str( + pathlib.Path(out_dir) + / f"snippet_metadata_{api_schema.naming.proto_package}.json" + ).lower() output_files[snippet_metadata_path] = CodeGeneratorResponse.File( - content=formatter.fix_whitespace(index.get_metadata_json()), name=snippet_metadata_path) + content=formatter.fix_whitespace(index.get_metadata_json()), + name=snippet_metadata_path, + ) return output_files, index def _render_template( - self, template_name: str, *, api_schema: api.API, opts: Options, snippet_index: snippet_index.SnippetIndex, + self, + template_name: str, + *, + api_schema: api.API, + opts: Options, + snippet_index: snippet_index.SnippetIndex, ) -> Dict[str, CodeGeneratorResponse.File]: """Render the requested templates. @@ -249,10 +269,12 @@ def _render_template( # library. This means that the module names will need to be versioned in # import statements. For example `import google.cloud.library_v2` instead # of `import google.cloud.library`. - if template_name.startswith("%namespace/%name/") and \ - api_schema.all_library_settings[ - api_schema.naming.proto_package - ].python_settings.experimental_features.unversioned_package_disabled: + if ( + template_name.startswith("%namespace/%name/") + and api_schema.all_library_settings[ + api_schema.naming.proto_package + ].python_settings.experimental_features.unversioned_package_disabled + ): return answer # Quick check: Rendering per service and per proto would be a @@ -271,7 +293,10 @@ def _render_template( for subpackage in api_schema.subpackages.values(): answer.update( self._render_template( - template_name, api_schema=subpackage, opts=opts, snippet_index=snippet_index + template_name, + api_schema=subpackage, + opts=opts, + snippet_index=snippet_index, ) ) skip_subpackages = True @@ -281,14 +306,18 @@ def _render_template( if "%proto" in template_name: for proto in api_schema.protos.values(): if ( - skip_subpackages - and proto.meta.address.subpackage != api_schema.subpackage_view + skip_subpackages + and proto.meta.address.subpackage != api_schema.subpackage_view ): continue answer.update( self._get_file( - template_name, api_schema=api_schema, proto=proto, opts=opts, snippet_index=snippet_index + template_name, + api_schema=api_schema, + proto=proto, + opts=opts, + snippet_index=snippet_index, ) ) @@ -299,20 +328,31 @@ def _render_template( if "%service" in template_name: for service in api_schema.services.values(): if ( - (skip_subpackages - and service.meta.address.subpackage != api_schema.subpackage_view) - or - ('transport' in template_name - and not self._is_desired_transport(template_name, opts)) - or - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. - ('async_client' in template_name and 'grpc' not in opts.transport and - not api_schema.all_library_settings[api_schema.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled) - or - ('rest_asyncio' in template_name and - not api_schema.all_library_settings[api_schema.naming.proto_package].python_settings.experimental_features.rest_async_io_enabled) - or - ('rest_base' in template_name and 'rest' not in opts.transport) + ( + skip_subpackages + and service.meta.address.subpackage + != api_schema.subpackage_view + ) + or ( + "transport" in template_name + and not self._is_desired_transport(template_name, opts) + ) + or + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. + ( + "async_client" in template_name + and "grpc" not in opts.transport + and not api_schema.all_library_settings[ + api_schema.naming.proto_package + ].python_settings.experimental_features.rest_async_io_enabled + ) + or ( + "rest_asyncio" in template_name + and not api_schema.all_library_settings[ + api_schema.naming.proto_package + ].python_settings.experimental_features.rest_async_io_enabled + ) + or ("rest_base" in template_name and "rest" not in opts.transport) ): continue @@ -329,13 +369,19 @@ def _render_template( # This file is not iterating over anything else; return back # the one applicable file. - answer.update(self._get_file( - template_name, api_schema=api_schema, opts=opts, snippet_index=snippet_index)) + answer.update( + self._get_file( + template_name, + api_schema=api_schema, + opts=opts, + snippet_index=snippet_index, + ) + ) return answer def _is_desired_transport(self, template_name: str, opts: Options) -> bool: """Returns true if template name contains a desired transport""" - desired_transports = ['__init__', 'base', 'README'] + opts.transport + desired_transports = ["__init__", "base", "README"] + opts.transport return any(transport in template_name for transport in desired_transports) def _get_file( @@ -349,7 +395,10 @@ def _get_file( """Render a template to a protobuf plugin File object.""" # Determine the target filename. fn = self._get_filename( - template_name, api_schema=api_schema, context=context,) + template_name, + api_schema=api_schema, + context=context, + ) # Render the file contents. cgr_file = CodeGeneratorResponse.File( @@ -372,7 +421,11 @@ def _get_file( return {fn: cgr_file} def _get_filename( - self, template_name: str, *, api_schema: api.API, context: Optional[dict] = None, + self, + template_name: str, + *, + api_schema: api.API, + context: Optional[dict] = None, ) -> str: """Return the appropriate output filename for this template. @@ -408,19 +461,22 @@ def _get_filename( ) filename = filename.replace("%version", api_schema.naming.version) filename = filename.replace("%name", api_schema.naming.module_name) - filename = filename.replace( - "%sub", "/".join(api_schema.subpackage_view)) + filename = filename.replace("%sub", "/".join(api_schema.subpackage_view)) # Replace the %service variable if applicable. if context and "service" in context: filename = filename.replace( - "%service", context["service"].module_name,) + "%service", + context["service"].module_name, + ) # Replace the %proto variable if appliable. # In the cases of protos, we also honor subpackages. if context and "proto" in context: filename = filename.replace( - "%proto", context["proto"].module_name,) + "%proto", + context["proto"].module_name, + ) # Paths may have empty path segments if components are empty # (e.g. no %version); handle this. diff --git a/packages/gapic-generator/gapic/samplegen/__init__.py b/packages/gapic-generator/gapic/samplegen/__init__.py index 43c58c727f60..c323f9673ca1 100644 --- a/packages/gapic-generator/gapic/samplegen/__init__.py +++ b/packages/gapic-generator/gapic/samplegen/__init__.py @@ -16,6 +16,6 @@ from gapic.samplegen import manifest __all__ = ( - 'manifest', - 'samplegen', + "manifest", + "samplegen", ) diff --git a/packages/gapic-generator/gapic/samplegen/manifest.py b/packages/gapic-generator/gapic/samplegen/manifest.py index 47727e08ef35..42a9a78db8bd 100644 --- a/packages/gapic-generator/gapic/samplegen/manifest.py +++ b/packages/gapic-generator/gapic/samplegen/manifest.py @@ -16,7 +16,7 @@ import time from typing import Optional, Tuple -from gapic.samplegen_utils import (types, yaml) +from gapic.samplegen_utils import types, yaml from gapic.utils import case BASE_PATH_KEY = "base_path" @@ -41,11 +41,11 @@ def generate( - fpaths_and_samples, - api_schema, - *, - environment: yaml.Map = PYTHON3_ENVIRONMENT, - manifest_time: Optional[int] = None + fpaths_and_samples, + api_schema, + *, + environment: yaml.Map = PYTHON3_ENVIRONMENT, + manifest_time: Optional[int] = None, ) -> Tuple[str, yaml.Doc]: """Generate a samplegen manifest for use by sampletest @@ -73,7 +73,8 @@ def transform_path(fpath): fpath = os.path.normpath(fpath) if not fpath.startswith(base_path): raise types.InvalidSampleFpath( - f"Sample fpath does not start with '{base_path}': {fpath}") + f"Sample fpath does not start with '{base_path}': {fpath}" + ) return "'{base_path}/%s'" % os.path.relpath(fpath, base_path) @@ -90,12 +91,12 @@ def transform_path(fpath): # "region_tag" conditional expression. yaml.Alias(environment.anchor_name or ""), yaml.KeyVal("sample", sample["id"]), - yaml.KeyVal( - "path", transform_path(fpath) + yaml.KeyVal("path", transform_path(fpath)), + ( + yaml.KeyVal("region_tag", sample["region_tag"]) # type: ignore + if "region_tag" in sample + else yaml.Null ), - (yaml.KeyVal("region_tag", sample["region_tag"]) # type: ignore - if "region_tag" in sample else - yaml.Null), ] for fpath, sample in fpaths_and_samples ], diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index 9eb2f890b09a..cdc6ee3d142e 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -30,7 +30,18 @@ from gapic.schema import wrappers from collections import defaultdict, namedtuple, ChainMap as chainmap -from typing import Any, ChainMap, Dict, FrozenSet, Generator, List, Mapping, Optional, Sequence, Tuple +from typing import ( + Any, + ChainMap, + Dict, + FrozenSet, + Generator, + List, + Mapping, + Optional, + Sequence, + Tuple, +) # There is no library stub file for this module, so ignore it. from google.api import resource_pb2 # type: ignore @@ -163,13 +174,14 @@ def build( # # It's a precondition that the base field is # a valid field of the request message type. - resource_reference = request_type.fields[base].options.Extensions[resource_pb2.resource_reference] + resource_reference = request_type.fields[base].options.Extensions[ + resource_pb2.resource_reference + ] resource_typestr = resource_reference.type or resource_reference.child_type resource_message = None for service in api_schema.services.values(): - resource_message = service.resource_messages_dict.get( - resource_typestr) + resource_message = service.resource_messages_dict.get(resource_typestr) if resource_message is not None: break @@ -178,7 +190,8 @@ def build( f"No message exists for resource: {resource_typestr}", ) resource_message_descriptor = resource_message.options.Extensions[ - resource_pb2.resource] + resource_pb2.resource + ] # The field is only ever empty for singleton attributes. attr_names: List[str] = [a.field for a in attrs] # type: ignore @@ -204,7 +217,12 @@ def build( # so it can be used in sample code as an f-string. pattern = cls.RESOURCE_RE.sub(r"{\g<1>}", pattern) - return cls(base=base, body=attrs, single=None, pattern=pattern,) + return cls( + base=base, + body=attrs, + single=None, + pattern=pattern, + ) @dataclasses.dataclass @@ -214,8 +232,7 @@ class RequestEntry: Deliberatly NOT frozen: is_resource_request is mutable on purpose.""" is_resource_request: bool = False - attrs: List[AttributeRequestSetup] = dataclasses.field( - default_factory=list) + attrs: List[AttributeRequestSetup] = dataclasses.field(default_factory=list) @dataclasses.dataclass(frozen=True) @@ -276,9 +293,7 @@ def __init__(self, method: wrappers.Method, api_schema: api.API): # and whether it's an enum or a message or a primitive type. # The method call response isn't a field, so construct an artificial # field that wraps the response. - { - "$resp": MockField(response_type, False) # type: ignore - } + {"$resp": MockField(response_type, False)} # type: ignore ) @staticmethod @@ -300,7 +315,9 @@ def preprocess_sample(sample, api_schema: api.API, rpc: wrappers.Method): transport = sample.setdefault("transport", api.TRANSPORT_GRPC) is_async = transport == api.TRANSPORT_GRPC_ASYNC - sample["client_name"] = service.async_client_name if is_async else service.client_name + sample["client_name"] = ( + service.async_client_name if is_async else service.client_name + ) # the MessageType of the request object passed to the rpc e.g., `ListRequest` sample["request_type"] = rpc.input @@ -316,8 +333,7 @@ def preprocess_sample(sample, api_schema: api.API, rpc: wrappers.Method): # If no request was specified in the config # Add reasonable default values as placeholders if "request" not in sample: - sample["request"] = generate_request_object( - api_schema, service, rpc.input) + sample["request"] = generate_request_object(api_schema, service, rpc.input) # If no response was specified in the config # Add reasonable defaults depending on the type of the sample @@ -488,8 +504,7 @@ def validate_and_transform_request( construction. """ - base_param_to_attrs: Dict[str, - RequestEntry] = defaultdict(RequestEntry) + base_param_to_attrs: Dict[str, RequestEntry] = defaultdict(RequestEntry) for r in request: r_dup = dict(r) val = r_dup.get("value") @@ -516,8 +531,7 @@ def validate_and_transform_request( if input_parameter: self._handle_lvalue( input_parameter, - wrappers.Field( - field_pb=descriptor_pb2.FieldDescriptorProto()), + wrappers.Field(field_pb=descriptor_pb2.FieldDescriptorProto()), ) # The percentage sign is used for setting up resource based requests @@ -538,7 +552,7 @@ def validate_and_transform_request( # It's a resource based request. base_param, resource_attr = ( field[:percent_idx], - field[percent_idx + 1:], + field[percent_idx + 1 :], ) request_entry = base_param_to_attrs.get(base_param) if request_entry and not request_entry.is_resource_request: @@ -596,8 +610,7 @@ def validate_response(self, response): for statement in response: if len(statement) != 1: - raise types.InvalidStatement( - "Invalid statement: {}".format(statement)) + raise types.InvalidStatement("Invalid statement: {}".format(statement)) keyword, body = next(iter(statement.items())) validater = self.STATEMENT_DISPATCH_TABLE.get(keyword) @@ -675,8 +688,7 @@ def validate_recursively(expression, scope, depth=0): # See https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/descriptor.proto#L496 # for a better understanding of how map attributes are handled in protobuf if not message or not message.options.map_field: - raise types.BadAttributeLookup( - f"Badly formed mapped field: {base}") + raise types.BadAttributeLookup(f"Badly formed mapped field: {base}") value_field = message.fields.get("value") if not value_field: @@ -703,7 +715,7 @@ def validate_recursively(expression, scope, depth=0): f"Non-terminal attribute is not a message: {base}" ) - return validate_recursively(expression[first_dot + 1:], scope, depth + 1) + return validate_recursively(expression[first_dot + 1 :], scope, depth + 1) return validate_recursively(exp, self.var_defs_) @@ -715,30 +727,28 @@ def _handle_lvalue(self, lval: str, type_: wrappers.Field): """ if lval in RESERVED_WORDS: raise types.ReservedVariableName( - "Tried to define a variable with reserved name: {}".format( - lval) + "Tried to define a variable with reserved name: {}".format(lval) ) # Even though it's valid python to reassign variables to any rvalue, # the samplegen spec prohibits this. if lval in self.var_defs_: - raise types.RedefinedVariable( - "Tried to redefine variable: {}".format(lval)) + raise types.RedefinedVariable("Tried to redefine variable: {}".format(lval)) self.var_defs_[lval] = type_ def _validate_format(self, body: List[str]): """Validates a format string and corresponding arguments. - The number of format tokens in the string must equal the - number of arguments, and each argument must be a defined variable. + The number of format tokens in the string must equal the + number of arguments, and each argument must be a defined variable. - Raises: - MismatchedFormatSpecifier: If the number of format string segments ("%s") in - a "print" or "comment" block does not equal the - size number of strings in the block minus 1. - UndefinedVariableReference: If the base lvalue in an expression chain - is not a previously defined lvalue. + Raises: + MismatchedFormatSpecifier: If the number of format string segments ("%s") in + a "print" or "comment" block does not equal the + size number of strings in the block minus 1. + UndefinedVariableReference: If the base lvalue in an expression chain + is not a previously defined lvalue. """ fmt_str = body[0] num_prints = fmt_str.count("%s") @@ -753,7 +763,7 @@ def _validate_format(self, body: List[str]): self.validate_expression(expression) def _validate_define(self, body: str): - """"Validates 'define' statements. + """ "Validates 'define' statements. Adds the defined lvalue to the lexical scope. Other statements can reference it. @@ -883,8 +893,7 @@ def _validate_loop(self, loop): segments -= {self.KEY_KWORD, self.VAL_KWORD} if segments: raise types.BadLoop( - "Unexpected keywords in loop statement: {}".format( - segments) + "Unexpected keywords in loop statement: {}".format(segments) ) map_field = self.validate_expression(loop[self.MAP_KWORD]) @@ -921,7 +930,9 @@ def _validate_loop(self, loop): } -def parse_handwritten_specs(sample_configs: Sequence[str]) -> Generator[Dict[str, Any], None, None]: +def parse_handwritten_specs( + sample_configs: Sequence[str], +) -> Generator[Dict[str, Any], None, None]: """Parse a handwritten sample spec""" for config_fpath in sample_configs: @@ -932,12 +943,21 @@ def parse_handwritten_specs(sample_configs: Sequence[str]) -> Generator[Dict[str valid = is_valid_sample_cfg(cfg) if not valid: raise types.InvalidConfig( - "Sample config in '{}' is invalid\n\n{}".format(config_fpath, cfg), valid) + "Sample config in '{}' is invalid\n\n{}".format( + config_fpath, cfg + ), + valid, + ) for spec in cfg.get("samples", []): yield spec -def generate_request_object(api_schema: api.API, service: wrappers.Service, message: wrappers.MessageType, field_name_prefix: str = ""): +def generate_request_object( + api_schema: api.API, + service: wrappers.Service, + message: wrappers.MessageType, + field_name_prefix: str = "", +): """Generate dummy input for a given message. Args: @@ -956,23 +976,24 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess # There is no standard syntax to mark a oneof as "required" in protos. # Assume every oneof is required and pick the first option # in each oneof. - selected_oneofs: List[wrappers.Field] = [oneof_fields[0] - for oneof_fields in message.oneof_fields().values()] + selected_oneofs: List[wrappers.Field] = [ + oneof_fields[0] for oneof_fields in message.oneof_fields().values() + ] # Don't add required fields if they're also marked as oneof - required_fields = [ - field for field in message.required_fields if not field.oneof] + required_fields = [field for field in message.required_fields if not field.oneof] request_fields = selected_oneofs + required_fields for field in request_fields: # TransformedRequest expects nested fields to be referenced like # `destination.input_config.name` - field_name = ".".join([field_name_prefix, field.name]).lstrip('.') + field_name = ".".join([field_name_prefix, field.name]).lstrip(".") # TODO(busunkim): Properly handle map fields if field.is_primitive: request.append( - {"field": field_name, "value": field.mock_value_original_type}) + {"field": field_name, "value": field.mock_value_original_type} + ) elif field.enum: # Choose the last enum value in the list since index 0 is often "unspecified" enum_value = field.enum.values[-1].name @@ -981,8 +1002,7 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess else: field_value = enum_value - request.append( - {"field": field_name, "value": field_value}) + request.append({"field": field_name, "value": field_value}) else: # This is a message type, recurse # TODO(busunkim): Some real world APIs have @@ -990,7 +1010,9 @@ def generate_request_object(api_schema: api.API, service: wrappers.Service, mess # Reference `Field.mock_value` to ensure # this always terminates. request += generate_request_object( - api_schema, service, field.type, + api_schema, + service, + field.type, field_name_prefix=field_name, ) @@ -1009,7 +1031,9 @@ def _supports_grpc(service) -> bool: return api.TRANSPORT_GRPC in service.clients.keys() -def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, Any], None, None]: +def generate_sample_specs( + api_schema: api.API, *, opts +) -> Generator[Dict[str, Any], None, None]: """Given an API, generate basic sample specs for each method. If a service supports gRPC transport, we do not generate @@ -1025,7 +1049,9 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A gapic_metadata = api_schema.gapic_metadata(opts) for service_name, service in gapic_metadata.services.items(): - api_short_name = api_schema.services[f"{api_schema.naming.proto_package}.{service_name}"].shortname + api_short_name = api_schema.services[ + f"{api_schema.naming.proto_package}.{service_name}" + ].shortname api_version = api_schema.naming.version supports_grpc = _supports_grpc(service) for transport, client in service.clients.items(): @@ -1048,7 +1074,7 @@ def generate_sample_specs(api_schema: api.API, *, opts) -> Generator[Dict[str, A # `request` and `response` are populated in `preprocess_sample` "service": f"{api_schema.naming.proto_package}.{service_name}", "region_tag": region_tag, - "description": f"Snippet for {utils.to_snake_case(rpc_name)}" + "description": f"Snippet for {utils.to_snake_case(rpc_name)}", } yield spec @@ -1074,46 +1100,75 @@ def _fill_sample_metadata(sample: dict, api_schema: api.API): snippet_metadata.origin = snippet_metadata_pb2.Snippet.Origin.API_DEFINITION # type: ignore # Service Client - snippet_metadata.client_method.client.short_name = service.async_client_name if async_ else service.client_name + snippet_metadata.client_method.client.short_name = ( + service.async_client_name if async_ else service.client_name + ) snippet_metadata.client_method.client.full_name = f"{'.'.join(sample['module_namespace'])}.{sample['module_name']}.{snippet_metadata.client_method.client.short_name}" # Service snippet_metadata.client_method.method.service.short_name = service.name - snippet_metadata.client_method.method.service.full_name = f"{api_schema.naming.proto_package}.{service.name}" + snippet_metadata.client_method.method.service.full_name = ( + f"{api_schema.naming.proto_package}.{service.name}" + ) # RPC snippet_metadata.client_method.method.short_name = method.name - snippet_metadata.client_method.method.full_name = f"{api_schema.naming.proto_package}.{service.name}.{method.name}" + snippet_metadata.client_method.method.full_name = ( + f"{api_schema.naming.proto_package}.{service.name}.{method.name}" + ) # Client Method setattr(snippet_metadata.client_method, "async", async_) - snippet_metadata.client_method.short_name = utils.to_snake_case( - method.name) + snippet_metadata.client_method.short_name = utils.to_snake_case(method.name) snippet_metadata.client_method.full_name = f"{snippet_metadata.client_method.client.full_name}.{snippet_metadata.client_method.short_name}" if not method.void: - snippet_metadata.client_method.result_type = method.client_output_async.ident.sphinx if async_ else method.client_output.ident.sphinx + snippet_metadata.client_method.result_type = ( + method.client_output_async.ident.sphinx + if async_ + else method.client_output.ident.sphinx + ) if method.server_streaming: - snippet_metadata.client_method.result_type = f"Iterable[{snippet_metadata.client_method.result_type}]" + snippet_metadata.client_method.result_type = ( + f"Iterable[{snippet_metadata.client_method.result_type}]" + ) # Client Method Parameters parameters = snippet_metadata.client_method.parameters if not method.client_streaming: - parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore - type=method.input.ident.sphinx, name="request")) + parameters.append( + snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + type=method.input.ident.sphinx, name="request" + ) + ) for field in method.flattened_fields.values(): - parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore - type=field.ident.sphinx, name=field.name)) + parameters.append( + snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + type=field.ident.sphinx, name=field.name + ) + ) else: - parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore - type=f"Iterator[{method.input.ident.sphinx}]", name="requests")) + parameters.append( + snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + type=f"Iterator[{method.input.ident.sphinx}]", name="requests" + ) + ) - parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore - name="retry", type="google.api_core.retry.Retry")) - parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore - name="timeout", type="float")) - parameters.append(snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore - name="metadata", type="Sequence[Tuple[str, Union[str, bytes]]]")) + parameters.append( + snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + name="retry", type="google.api_core.retry.Retry" + ) + ) + parameters.append( + snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + name="timeout", type="float" + ) + ) + parameters.append( + snippet_metadata_pb2.ClientMethod.Parameter( # type: ignore + name="metadata", type="Sequence[Tuple[str, Union[str, bytes]]]" + ) + ) return snippet_metadata @@ -1134,7 +1189,9 @@ def _get_sample_imports(sample: Dict, rpc: wrappers.Method) -> List[str]: return sorted([module_import, request_import]) -def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> Tuple[str, Any]: +def generate_sample( + sample, api_schema, sample_template: jinja2.Template +) -> Tuple[str, Any]: """Generate a standalone, runnable sample. Writing the rendered output is left for the caller. @@ -1156,8 +1213,7 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> Tup rpc = service.methods.get(rpc_name) if not rpc: raise types.RpcMethodNotFound( - "Could not find rpc in service {}: {}".format( - service_name, rpc_name) + "Could not find rpc in service {}: {}".format(service_name, rpc_name) ) calling_form = types.CallingForm.method_default(rpc) @@ -1177,11 +1233,14 @@ def generate_sample(sample, api_schema, sample_template: jinja2.Template) -> Tup # The sample must be preprocessed before calling _get_sample_imports. imports = _get_sample_imports(sample, rpc) - return sample_template.render( - sample=sample, - imports=imports, - calling_form=calling_form, - calling_form_enum=types.CallingForm, - trim_blocks=True, - lstrip_blocks=True, - ), snippet_metadata + return ( + sample_template.render( + sample=sample, + imports=imports, + calling_form=calling_form, + calling_form_enum=types.CallingForm, + trim_blocks=True, + lstrip_blocks=True, + ), + snippet_metadata, + ) diff --git a/packages/gapic-generator/gapic/samplegen_utils/__init__.py b/packages/gapic-generator/gapic/samplegen_utils/__init__.py index 423d58541e74..c3ef757620ef 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/__init__.py +++ b/packages/gapic-generator/gapic/samplegen_utils/__init__.py @@ -17,7 +17,7 @@ import gapic.samplegen_utils.yaml __all__ = ( - 'types', - 'utils', - 'yaml', + "types", + "utils", + "yaml", ) diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py index 29e3a5923e11..07003d701ef1 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -46,18 +46,23 @@ def _parse_snippet_segments(self): self.sample_lines = self.sample_str.splitlines(keepends=True) self._full_snippet = snippet_metadata_pb2.Snippet.Segment( - type=snippet_metadata_pb2.Snippet.Segment.SegmentType.FULL) + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.FULL + ) self._short_snippet = snippet_metadata_pb2.Snippet.Segment( - type=snippet_metadata_pb2.Snippet.Segment.SegmentType.SHORT) + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.SHORT + ) self._client_init = snippet_metadata_pb2.Snippet.Segment( - type=snippet_metadata_pb2.Snippet.Segment.SegmentType.CLIENT_INITIALIZATION) + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.CLIENT_INITIALIZATION + ) self._request_init = snippet_metadata_pb2.Snippet.Segment( - type=snippet_metadata_pb2.Snippet.Segment.SegmentType.REQUEST_INITIALIZATION) + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.REQUEST_INITIALIZATION + ) self._request_exec = snippet_metadata_pb2.Snippet.Segment( - type=snippet_metadata_pb2.Snippet.Segment.SegmentType.REQUEST_EXECUTION) + type=snippet_metadata_pb2.Snippet.Segment.SegmentType.REQUEST_EXECUTION + ) self._response_handling = snippet_metadata_pb2.Snippet.Segment( type=snippet_metadata_pb2.Snippet.Segment.SegmentType.RESPONSE_HANDLING, - end=len(self.sample_lines) + end=len(self.sample_lines), ) # Index starts at 1 since these represent line numbers @@ -80,8 +85,16 @@ def _parse_snippet_segments(self): self._request_exec.end = i - 1 self._response_handling.start = i - self.metadata.segments.extend([self._full_snippet, self._short_snippet, self._client_init, - self._request_init, self._request_exec, self._response_handling]) + self.metadata.segments.extend( + [ + self._full_snippet, + self._short_snippet, + self._client_init, + self._request_init, + self._request_exec, + self._response_handling, + ] + ) @property def full_snippet(self) -> str: @@ -101,17 +114,20 @@ class SnippetIndex: def __init__(self, api_schema: api.API): self.metadata_index = snippet_metadata_pb2.Index() # type: ignore - self.metadata_index.client_library.name = api_schema.naming.warehouse_package_name + self.metadata_index.client_library.name = ( + api_schema.naming.warehouse_package_name + ) self.metadata_index.client_library.language = snippet_metadata_pb2.Language.PYTHON # type: ignore # This is just a placeholder. release-please is responsible for # updating the metadata file to the correct library version. self.metadata_index.client_library.version = "0.1.0" - self.metadata_index.client_library.apis.append(snippet_metadata_pb2.Api( # type: ignore - id=api_schema.naming.proto_package, - version=api_schema.naming.version - )) + self.metadata_index.client_library.apis.append( + snippet_metadata_pb2.Api( # type: ignore + id=api_schema.naming.proto_package, version=api_schema.naming.version + ) + ) # Construct a dictionary to insert samples into based on the API schema # NOTE: In the future we expect the generator to support configured samples, @@ -141,12 +157,16 @@ def add_snippet(self, snippet: Snippet) -> None: service = self._index.get(service_name) if service is None: raise types.UnknownService( - "API does not have a service named '{}'.".format(service_name)) + "API does not have a service named '{}'.".format(service_name) + ) method = service.get(rpc_name) if method is None: raise types.RpcMethodNotFound( - "API does not have method '{}' in service '{}'".format(rpc_name, service_name)) + "API does not have method '{}' in service '{}'".format( + rpc_name, service_name + ) + ) if getattr(snippet.metadata.client_method, "async"): method["async"] = snippet @@ -155,7 +175,9 @@ def add_snippet(self, snippet: Snippet) -> None: self.metadata_index.snippets.append(snippet.metadata) - def get_snippet(self, service_name: str, rpc_name: str, sync: bool = True) -> Optional[Snippet]: + def get_snippet( + self, service_name: str, rpc_name: str, sync: bool = True + ) -> Optional[Snippet]: """Fetch a single snippet from the index. Args: @@ -174,11 +196,15 @@ def get_snippet(self, service_name: str, rpc_name: str, sync: bool = True) -> Op service = self._index.get(service_name) if service is None: raise types.UnknownService( - "API does not have a service named '{}'.".format(service_name)) + "API does not have a service named '{}'.".format(service_name) + ) method = service.get(rpc_name) if method is None: raise types.RpcMethodNotFound( - "API does not have method '{}' in service '{}'".format(rpc_name, service_name)) + "API does not have method '{}' in service '{}'".format( + rpc_name, service_name + ) + ) return method["sync" if sync else "async"] diff --git a/packages/gapic-generator/gapic/samplegen_utils/types.py b/packages/gapic-generator/gapic/samplegen_utils/types.py index 48a086f953ab..b1b9012bef6e 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/types.py +++ b/packages/gapic-generator/gapic/samplegen_utils/types.py @@ -117,8 +117,11 @@ def method_default(cls, m): if m.paged_result_field: return cls.RequestPagedAll if m.client_streaming: - return (cls.RequestStreamingBidi if m.server_streaming else - cls.RequestStreamingClient) + return ( + cls.RequestStreamingBidi + if m.server_streaming + else cls.RequestStreamingClient + ) if m.server_streaming: return cls.RequestStreamingServer diff --git a/packages/gapic-generator/gapic/samplegen_utils/utils.py b/packages/gapic-generator/gapic/samplegen_utils/utils.py index 27ccb50efafc..dd2fd9eda405 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/utils.py +++ b/packages/gapic-generator/gapic/samplegen_utils/utils.py @@ -18,7 +18,7 @@ import os import yaml -from typing import (Generator, Tuple, List, Union) +from typing import Generator, Tuple, List, Union from gapic.samplegen_utils import types @@ -37,7 +37,7 @@ def render_format_string(s: str, expressions: List[str] = []) -> str: expressions (Optional[List[str]]): A list of expressions. """ - s = s.replace('\"', '\\\"') + s = s.replace('"', '\\"') for exp in expressions: # some expressions will contain references to "$resp" @@ -62,9 +62,9 @@ def coerce_response_name(s: str) -> str: def is_valid_sample_cfg( - doc, - min_version: Tuple[int, int, int] = MIN_SCHEMA_VERSION, - config_type: str = VALID_CONFIG_TYPE, + doc, + min_version: Tuple[int, int, int] = MIN_SCHEMA_VERSION, + config_type: str = VALID_CONFIG_TYPE, ) -> bool: """Predicate that takes a parsed yaml doc checks if it is a valid sample config. @@ -78,6 +78,7 @@ def is_valid_sample_cfg( bool: True if doc is a valid sample config document. """ + def parse_version(version_str: str) -> Tuple[int, ...]: return tuple(int(tok) for tok in version_str.split(".")) @@ -114,14 +115,14 @@ def generate_all_sample_fpaths(path: str) -> Generator[str, None, None]: # Directly named files, however, should generate an error, because silently # ignoring them is less helpful than failing loudly. if os.path.isfile(path): - if not path.endswith('.yaml'): + if not path.endswith(".yaml"): raise types.InvalidConfig(f"Not a yaml file: {path}") with open(path) as f: - if not any(is_valid_sample_cfg(doc) - for doc in yaml.safe_load_all(f.read())): - raise types.InvalidConfig( - f"No valid sample config in file: {path}") + if not any( + is_valid_sample_cfg(doc) for doc in yaml.safe_load_all(f.read()) + ): + raise types.InvalidConfig(f"No valid sample config in file: {path}") yield path # Note: if we ever need to recursively check directories for sample configs, diff --git a/packages/gapic-generator/gapic/samplegen_utils/yaml.py b/packages/gapic-generator/gapic/samplegen_utils/yaml.py index b1251e31a949..bb558cb2698c 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/yaml.py +++ b/packages/gapic-generator/gapic/samplegen_utils/yaml.py @@ -28,6 +28,7 @@ class Element(ABC): """Abstract element that can be rendered.""" + INDENT_SPACES: int = 2 @abstractmethod @@ -44,6 +45,7 @@ def render(self, spaces: int = 0) -> str: @dataclasses.dataclass(frozen=True) class KeyVal(Element): """A single key/value entry.""" + key: str val: str @@ -55,6 +57,7 @@ def render(self, spaces: int = 0) -> str: @dataclasses.dataclass(frozen=True) class Collection(Element): """An ordered list of subobjects.""" + name: str elements: List[List[Element]] @@ -72,9 +75,7 @@ def render(self, spaces: int = 0) -> str: indent( "-" + "\n".join( - r - for r in (e.render(spaces + self.INDENT_SPACES) for e in l) - if r + r for r in (e.render(spaces + self.INDENT_SPACES) for e in l) if r )[1:], " " * (spaces), ) @@ -85,6 +86,7 @@ def render(self, spaces: int = 0) -> str: @dataclasses.dataclass(frozen=True) class Alias(Element): """An anchor to a map.""" + target: str def render(self, spaces: int = 0) -> str: @@ -95,6 +97,7 @@ def render(self, spaces: int = 0) -> str: @dataclasses.dataclass(frozen=True) class Map(Element): """A named collection with a list of attributes.""" + name: str anchor_name: Optional[str] elements: List[Element] @@ -112,17 +115,18 @@ def get(self, key, default=None): # See https://github.com/pytest-dev/pytest-cov/issues/310 for details. return next( iter( - [e.val # type: ignore - for e in self.elements - if e.key == key] # type: ignore + [ + e.val for e in self.elements if e.key == key # type: ignore + ] # type: ignore ), - default + default, ) @dataclasses.dataclass(frozen=True) class Doc(Element): """A yaml document""" + elements: List[Element] def render(self): diff --git a/packages/gapic-generator/gapic/schema/__init__.py b/packages/gapic-generator/gapic/schema/__init__.py index 56a1998909e0..8232561661cd 100644 --- a/packages/gapic-generator/gapic/schema/__init__.py +++ b/packages/gapic-generator/gapic/schema/__init__.py @@ -26,7 +26,7 @@ __all__ = ( - 'API', - 'metadata', - 'wrappers', + "API", + "metadata", + "wrappers", ) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 70b402b50af5..b9e95c304e5c 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -24,7 +24,18 @@ import os import sys from types import MappingProxyType -from typing import Callable, Container, Dict, FrozenSet, Iterable, Mapping, Optional, Sequence, Set, Tuple +from typing import ( + Callable, + Container, + Dict, + FrozenSet, + Iterable, + Mapping, + Optional, + Sequence, + Set, + Tuple, +) import yaml from google.api_core import exceptions @@ -65,6 +76,7 @@ class MethodSettingsError(ValueError): Raised when `google.api.client_pb2.MethodSettings` contains an invalid value. """ + pass @@ -73,6 +85,7 @@ class ClientLibrarySettingsError(ValueError): Raised when `google.api.client_pb2.ClientLibrarySettings` contains an invalid value. """ + pass @@ -94,15 +107,15 @@ def __getattr__(self, name: str): @classmethod def build( - cls, - file_descriptor: descriptor_pb2.FileDescriptorProto, - file_to_generate: bool, - naming: api_naming.Naming, - opts: Options = Options(), - prior_protos: Optional[Mapping[str, 'Proto']] = None, - load_services: bool = True, - all_resources: Optional[Mapping[str, wrappers.MessageType]] = None, - ) -> 'Proto': + cls, + file_descriptor: descriptor_pb2.FileDescriptorProto, + file_to_generate: bool, + naming: api_naming.Naming, + opts: Options = Options(), + prior_protos: Optional[Mapping[str, "Proto"]] = None, + load_services: bool = True, + all_resources: Optional[Mapping[str, wrappers.MessageType]] = None, + ) -> "Proto": """Build and return a Proto instance. Args: @@ -131,17 +144,15 @@ def build( @cached_property def enums(self) -> Mapping[str, wrappers.EnumType]: """Return top-level enums on the proto.""" - return collections.OrderedDict([ - (k, v) for k, v in self.all_enums.items() - if not v.meta.address.parent - ]) + return collections.OrderedDict( + [(k, v) for k, v in self.all_enums.items() if not v.meta.address.parent] + ) @cached_property def messages(self) -> Mapping[str, wrappers.MessageType]: """Return top-level messages on the proto.""" return collections.OrderedDict( - (k, v) for k, v in self.all_messages.items() - if not v.meta.address.parent + (k, v) for k, v in self.all_messages.items() if not v.meta.address.parent ) @cached_property @@ -149,7 +160,9 @@ def resource_messages(self) -> Mapping[str, wrappers.MessageType]: """Return the file level resources of the proto.""" file_resource_messages = ( (res.type, wrappers.CommonResource.build(res).message_type) - for res in self.file_pb2.options.Extensions[resource_pb2.resource_definition] + for res in self.file_pb2.options.Extensions[ + resource_pb2.resource_definition + ] ) resource_messages = ( (msg.options.Extensions[resource_pb2.resource].type, msg) @@ -158,7 +171,8 @@ def resource_messages(self) -> Mapping[str, wrappers.MessageType]: ) return collections.OrderedDict( itertools.chain( - file_resource_messages, resource_messages, + file_resource_messages, + resource_messages, ) ) @@ -170,7 +184,7 @@ def module_name(self) -> str: str: The module name for this service (which is the service name in snake case). """ - return to_snake_case(self.name.split('/')[-1][:-len('.proto')]) + return to_snake_case(self.name.split("/")[-1][: -len(".proto")]) @cached_property def names(self) -> FrozenSet[str]: @@ -220,7 +234,8 @@ def python_modules(self) -> Sequence[Tuple[str, str]]: for m in self.all_messages.values() # Quick check: We do make sure that we are not trying to have # a module import itself. - for t in m.field_types if t.ident.python_import != self_reference + for t in m.field_types + if t.ident.python_import != self_reference } # Done; return the sorted sequence. @@ -234,14 +249,16 @@ def disambiguate(self, string: str) -> str: it will cause a naming collision with messages or fields in this proto. """ if string in self.names: - return self.disambiguate(f'_{string}') + return self.disambiguate(f"_{string}") return string - def add_to_address_allowlist(self, *, - address_allowlist: Set['metadata.Address'], - method_allowlist: Set[str], - resource_messages: Dict[str, 'wrappers.MessageType'], - ) -> None: + def add_to_address_allowlist( + self, + *, + address_allowlist: Set["metadata.Address"], + method_allowlist: Set[str], + resource_messages: Dict[str, "wrappers.MessageType"], + ) -> None: """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. This method is used to create an allowlist of addresses to be used to filter out unneeded @@ -267,13 +284,16 @@ def add_to_address_allowlist(self, *, service.name: service for service in self.services.values() } for service in self.services.values(): - service.add_to_address_allowlist(address_allowlist=address_allowlist, - method_allowlist=method_allowlist, - resource_messages=resource_messages, - services_in_proto=services_in_proto) + service.add_to_address_allowlist( + address_allowlist=address_allowlist, + method_allowlist=method_allowlist, + resource_messages=resource_messages, + services_in_proto=services_in_proto, + ) - def prune_messages_for_selective_generation(self, *, - address_allowlist: Set['metadata.Address']) -> Optional['Proto']: + def prune_messages_for_selective_generation( + self, *, address_allowlist: Set["metadata.Address"] + ) -> Optional["Proto"]: """Returns a truncated version of this Proto. Only the services, messages, and enums contained in the allowlist @@ -299,34 +319,28 @@ def prune_messages_for_selective_generation(self, *, # non-allowlisted methods. services = { k: v.prune_messages_for_selective_generation( - address_allowlist=address_allowlist) + address_allowlist=address_allowlist + ) for k, v in self.services.items() if v.meta.address in address_allowlist } all_messages = { - k: v - for k, v in self.all_messages.items() - if v.ident in address_allowlist + k: v for k, v in self.all_messages.items() if v.ident in address_allowlist } all_enums = { - k: v - for k, v in self.all_enums.items() - if v.ident in address_allowlist + k: v for k, v in self.all_enums.items() if v.ident in address_allowlist } if not services and not all_messages and not all_enums: return None return dataclasses.replace( - self, - services=services, - all_messages=all_messages, - all_enums=all_enums + self, services=services, all_messages=all_messages, all_enums=all_enums ) - def with_internal_methods(self, *, public_methods: Set[str]) -> 'Proto': + def with_internal_methods(self, *, public_methods: Set[str]) -> "Proto": """Returns a version of this Proto with some Methods marked as internal. The methods not in the public_methods set will be marked as internal and @@ -359,6 +373,7 @@ class API: An instance of this object is made available to every template (as ``api``). """ + naming: api_naming.Naming all_protos: Mapping[str, Proto] service_yaml_config: service_pb2.Service @@ -368,10 +383,10 @@ class API: def build( cls, file_descriptors: Sequence[descriptor_pb2.FileDescriptorProto], - package: str = '', + package: str = "", opts: Options = Options(), - prior_protos: Optional[Mapping[str, 'Proto']] = None, - ) -> 'API': + prior_protos: Optional[Mapping[str, "Proto"]] = None, + ) -> "API": """Build the internal API schema based on the request. Args: @@ -388,18 +403,25 @@ def build( Primarily used for testing. """ # Save information about the overall naming for this API. - naming = api_naming.Naming.build(*filter( - lambda fd: fd.package.startswith(package), - file_descriptors, - ), opts=opts) + naming = api_naming.Naming.build( + *filter( + lambda fd: fd.package.startswith(package), + file_descriptors, + ), + opts=opts, + ) # "metadata", "retry", "timeout", and "request" are reserved words in client methods. invalid_module_names = set(keyword.kwlist) | { - "metadata", "retry", "timeout", "request"} + "metadata", + "retry", + "timeout", + "request", + } def disambiguate_keyword_sanitize_fname( - full_path: str, - visited_names: Container[str]) -> str: + full_path: str, visited_names: Container[str] + ) -> str: path, fname = os.path.split(full_path) name, ext = os.path.splitext(fname) @@ -469,21 +491,20 @@ def disambiguate_keyword_sanitize_fname( # Parse the google.api.Service proto from the service_yaml data. service_yaml_config = service_pb2.Service() ParseDict( - opts.service_yaml_config, - service_yaml_config, - ignore_unknown_fields=True + opts.service_yaml_config, service_yaml_config, ignore_unknown_fields=True ) # Third pass for various selective GAPIC settings; these require # settings in the service.yaml and so we build the API object # before doing another pass. - api = cls(naming=naming, - all_protos=protos, - service_yaml_config=service_yaml_config) + api = cls( + naming=naming, all_protos=protos, service_yaml_config=service_yaml_config + ) if package in api.all_library_settings: - selective_gapic_settings = api.all_library_settings[package].python_settings.\ - common.selective_gapic_generation + selective_gapic_settings = api.all_library_settings[ + package + ].python_settings.common.selective_gapic_generation selective_gapic_methods = set(selective_gapic_settings.methods) if selective_gapic_methods: @@ -494,14 +515,14 @@ def disambiguate_keyword_sanitize_fname( # protos that are not dependencies, so we iterate over api.all_protos and copy # all dependencies as is here. new_all_protos = { - k: v for k, v in api.all_protos.items() - if k not in api.protos + k: v for k, v in api.all_protos.items() if k not in api.protos } if selective_gapic_settings.generate_omitted_as_internal: for name, proto in api.protos.items(): new_all_protos[name] = proto.with_internal_methods( - public_methods=selective_gapic_methods) + public_methods=selective_gapic_methods + ) else: all_resource_messages = collections.ChainMap( *(proto.resource_messages for proto in protos.values()) @@ -511,56 +532,57 @@ def disambiguate_keyword_sanitize_fname( # then prune each Proto object. We look at metadata.Addresses, not objects, because # objects that refer to the same thing in the proto are different Python objects # in memory. - address_allowlist: Set['metadata.Address'] = set([]) + address_allowlist: Set["metadata.Address"] = set([]) for proto in api.protos.values(): - proto.add_to_address_allowlist(address_allowlist=address_allowlist, - method_allowlist=selective_gapic_methods, - resource_messages=all_resource_messages) + proto.add_to_address_allowlist( + address_allowlist=address_allowlist, + method_allowlist=selective_gapic_methods, + resource_messages=all_resource_messages, + ) # We only prune services/messages/enums from protos that are not dependencies. for name, proto in api.protos.items(): - proto_to_generate = proto.prune_messages_for_selective_generation( - address_allowlist=address_allowlist) + proto_to_generate = ( + proto.prune_messages_for_selective_generation( + address_allowlist=address_allowlist + ) + ) if proto_to_generate: new_all_protos[name] = proto_to_generate - api = cls(naming=naming, - all_protos=new_all_protos, - service_yaml_config=service_yaml_config) + api = cls( + naming=naming, + all_protos=new_all_protos, + service_yaml_config=service_yaml_config, + ) return api @cached_property def enums(self) -> Mapping[str, wrappers.EnumType]: """Return a map of all enums available in the API.""" - return collections.ChainMap({}, - *[p.all_enums for p in self.protos.values()], - ) + return collections.ChainMap( + {}, + *[p.all_enums for p in self.protos.values()], + ) @cached_property def messages(self) -> Mapping[str, wrappers.MessageType]: """Return a map of all messages available in the API.""" - return collections.ChainMap({}, - *[p.all_messages for p in self.protos.values()], - ) + return collections.ChainMap( + {}, + *[p.all_messages for p in self.protos.values()], + ) @cached_property def top_level_messages(self) -> Mapping[str, wrappers.MessageType]: """Return a map of all messages that are NOT nested.""" - return { - k: v - for p in self.protos.values() - for k, v in p.messages.items() - } + return {k: v for p in self.protos.values() for k, v in p.messages.items()} @cached_property def top_level_enums(self) -> Mapping[str, wrappers.EnumType]: """Return a map of all messages that are NOT nested.""" - return { - k: v - for p in self.protos.values() - for k, v in p.enums.items() - } + return {k: v for p in self.protos.values() for k, v in p.enums.items()} @cached_property def protos(self) -> Mapping[str, Proto]: @@ -570,28 +592,32 @@ def protos(self) -> Mapping[str, Proto]: of this API but not being directly generated. """ view = self.subpackage_view - return collections.OrderedDict([ - (k, v) for k, v in self.all_protos.items() - if v.file_to_generate and - v.meta.address.subpackage[:len(view)] == view - ]) + return collections.OrderedDict( + [ + (k, v) + for k, v in self.all_protos.items() + if v.file_to_generate and v.meta.address.subpackage[: len(view)] == view + ] + ) @cached_property def services(self) -> Mapping[str, wrappers.Service]: """Return a map of all services available in the API.""" - return collections.ChainMap({}, - *[p.services for p in self.protos.values()], - ) + return collections.ChainMap( + {}, + *[p.services for p in self.protos.values()], + ) @cached_property def http_options(self) -> Mapping[str, Sequence[wrappers.HttpRule]]: """Return a map of API-wide http rules.""" - def make_http_options(rule: http_pb2.HttpRule - ) -> Sequence[wrappers.HttpRule]: + def make_http_options(rule: http_pb2.HttpRule) -> Sequence[wrappers.HttpRule]: http_options = [rule] + list(rule.additional_bindings) - opt_gen = (wrappers.HttpRule.try_parse_http_rule(http_rule) - for http_rule in http_options) + opt_gen = ( + wrappers.HttpRule.try_parse_http_rule(http_rule) + for http_rule in http_options + ) return [rule for rule in opt_gen if rule] result: Mapping[str, Sequence[http_pb2.HttpRule]] = { @@ -602,7 +628,7 @@ def make_http_options(rule: http_pb2.HttpRule return result @cached_property - def subpackages(self) -> Mapping[str, 'API']: + def subpackages(self) -> Mapping[str, "API"]: """Return a map of all subpackages, if any. Each value in the mapping is another API object, but the ``protos`` @@ -616,14 +642,18 @@ def subpackages(self) -> Mapping[str, 'API']: # subpackages can be accessed by requesting subpackages of the # derivative API objects returned here. level = len(self.subpackage_view) - for subpkg_name in sorted({p.meta.address.subpackage[0] - for p in self.protos.values() - if len(p.meta.address.subpackage) > level and - p.meta.address.subpackage[:level] == self.subpackage_view}): - answer[subpkg_name] = dataclasses.replace(self, - subpackage_view=self.subpackage_view + - (subpkg_name,), - ) + for subpkg_name in sorted( + { + p.meta.address.subpackage[0] + for p in self.protos.values() + if len(p.meta.address.subpackage) > level + and p.meta.address.subpackage[:level] == self.subpackage_view + } + ): + answer[subpkg_name] = dataclasses.replace( + self, + subpackage_view=self.subpackage_view + (subpkg_name,), + ) return answer def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: @@ -633,8 +663,7 @@ def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: language="python", proto_package=self.naming.proto_package, library_package=".".join( - self.naming.module_namespace + - (self.naming.versioned_module_name,) + self.naming.module_namespace + (self.naming.versioned_module_name,) ), ) @@ -647,8 +676,7 @@ def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: transports = [] if "grpc" in options.transport: transports.append((TRANSPORT_GRPC, service.client_name)) - transports.append( - (TRANSPORT_GRPC_ASYNC, service.async_client_name)) + transports.append((TRANSPORT_GRPC_ASYNC, service.async_client_name)) if "rest" in options.transport: transports.append((TRANSPORT_REST, service.client_name)) @@ -667,15 +695,16 @@ def gapic_metadata_json(self, options: Options) -> str: return MessageToJson(self.gapic_metadata(options), sort_keys=True) def requires_package(self, pkg: Tuple[str, ...]) -> bool: - pkg_has_iam_mixin = self.has_iam_mixin and \ - pkg == ('google', 'iam', 'v1') + pkg_has_iam_mixin = self.has_iam_mixin and pkg == ("google", "iam", "v1") return pkg_has_iam_mixin or any( message.ident.package == pkg for proto in self.all_protos.values() for message in proto.all_messages.values() ) - def get_custom_operation_service(self, method: "wrappers.Method") -> "wrappers.Service": + def get_custom_operation_service( + self, method: "wrappers.Method" + ) -> "wrappers.Service": """Return the extended operation service that should be polled for progress from a given initial method. @@ -683,20 +712,21 @@ def get_custom_operation_service(self, method: "wrappers.Method") -> "wrappers.S and has an `operation_polling_service` annotation. """ if not method.output.is_extended_operation: - raise ValueError( - f"Method is not an extended operation LRO: {method.name}") + raise ValueError(f"Method is not an extended operation LRO: {method.name}") - op_serv_name = self.naming.proto_package + "." + \ - method.options.Extensions[ex_ops_pb2.operation_service] + op_serv_name = ( + self.naming.proto_package + + "." + + method.options.Extensions[ex_ops_pb2.operation_service] + ) op_serv = self.services.get(op_serv_name) if not op_serv: - raise ValueError( - f"No such service: {op_serv_name}" - ) + raise ValueError(f"No such service: {op_serv_name}") if not op_serv.operation_polling_method: raise ValueError( - f"Service is not an extended operation operation service: {op_serv.name}") + f"Service is not an extended operation operation service: {op_serv.name}" + ) return op_serv @@ -714,14 +744,11 @@ def mixin_api_signatures(self): def mixin_api_methods(self) -> Dict[str, MethodDescriptorProto]: methods: Dict[str, MethodDescriptorProto] = {} if self.has_location_mixin: - methods = {**methods, ** - self._get_methods_from_service(locations_pb2)} + methods = {**methods, **self._get_methods_from_service(locations_pb2)} if not self._has_iam_overrides and self.has_iam_mixin: - methods = {**methods, ** - self._get_methods_from_service(iam_policy_pb2)} + methods = {**methods, **self._get_methods_from_service(iam_policy_pb2)} if self.has_operations_mixin: - methods = {**methods, ** - self._get_methods_from_service(operations_pb2)} + methods = {**methods, **self._get_methods_from_service(operations_pb2)} return methods @cached_property @@ -733,8 +760,10 @@ def mixin_http_options(self): m = api_methods[s] http = m.options.Extensions[annotations_pb2.http] http_options = [http] + list(http.additional_bindings) - opt_gen = (wrappers.MixinHttpRule.try_parse_http_rule(http_rule) - for http_rule in http_options) + opt_gen = ( + wrappers.MixinHttpRule.try_parse_http_rule(http_rule) + for http_rule in http_options + ) res[s] = [rule for rule in opt_gen if rule] return res @@ -794,9 +823,7 @@ def enforce_valid_method_settings( method_descriptor = self.all_methods.get(method_settings.selector) # Check if this selector can be mapped to a method in the API. if not method_descriptor: - all_errors[method_settings.selector] = [ - "Method was not found." - ] + all_errors[method_settings.selector] = ["Method was not found."] continue if method_settings.auto_populated_fields: @@ -815,9 +842,7 @@ def enforce_valid_method_settings( selector_errors = [] for field_str in method_settings.auto_populated_fields: if field_str not in top_level_request_message.fields: - selector_errors.append( - f"Field `{field_str}` was not found" - ) + selector_errors.append(f"Field `{field_str}` was not found") else: field = top_level_request_message.fields[field_str] if field.type != wrappers.PrimitiveType.build(str): @@ -831,7 +856,7 @@ def enforce_valid_method_settings( if not field.uuid4: selector_errors.append( f"Field `{field_str}` is not annotated with " - "`google.api.field_info.format = \"UUID4\"." + '`google.api.field_info.format = "UUID4".' ) if selector_errors: all_errors[method_settings.selector] = selector_errors @@ -873,7 +898,7 @@ def all_library_settings( if self.naming.proto_package not in result: result[self.naming.proto_package] = client_pb2.ClientLibrarySettings( version=self.naming.proto_package - ) + ) return result @@ -905,11 +930,17 @@ def enforce_valid_library_settings( # Check to see if selective gapic generation methods are valid. selective_gapic_errors = {} - for method_name in library_settings.python_settings.common.selective_gapic_generation.methods: + for ( + method_name + ) in ( + library_settings.python_settings.common.selective_gapic_generation.methods + ): if method_name not in self.all_methods: selective_gapic_errors[method_name] = "Method does not exist." elif not method_name.startswith(library_settings.version): - selective_gapic_errors[method_name] = "Mismatched version for method." + selective_gapic_errors[method_name] = ( + "Mismatched version for method." + ) if selective_gapic_errors: all_errors[library_settings.version] = [ @@ -932,7 +963,7 @@ def all_method_settings(self) -> Mapping[str, Sequence[client_pb2.MethodSettings settings read from the service YAML. Raises: - gapic.schema.api.MethodSettingsError: if the method settings do not + gapic.schema.api.MethodSettingsError: if the method settings do not meet the requirements of https://google.aip.dev/client-libraries/4235. """ self.enforce_valid_method_settings( @@ -950,23 +981,54 @@ def all_method_settings(self) -> Mapping[str, Sequence[client_pb2.MethodSettings @cached_property def has_location_mixin(self) -> bool: - return len(list(filter(lambda api: api.name == "google.cloud.location.Locations", self.service_yaml_config.apis))) > 0 + return ( + len( + list( + filter( + lambda api: api.name == "google.cloud.location.Locations", + self.service_yaml_config.apis, + ) + ) + ) + > 0 + ) @cached_property def has_iam_mixin(self) -> bool: - return len(list(filter(lambda api: api.name == "google.iam.v1.IAMPolicy", self.service_yaml_config.apis))) > 0 + return ( + len( + list( + filter( + lambda api: api.name == "google.iam.v1.IAMPolicy", + self.service_yaml_config.apis, + ) + ) + ) + > 0 + ) @cached_property def has_operations_mixin(self) -> bool: - return len(list(filter(lambda api: api.name == "google.longrunning.Operations", self.service_yaml_config.apis))) > 0 + return ( + len( + list( + filter( + lambda api: api.name == "google.longrunning.Operations", + self.service_yaml_config.apis, + ) + ) + ) + > 0 + ) @cached_property def _has_iam_overrides(self) -> bool: if not self.has_iam_mixin: return False - iam_mixin_methods: Dict[str, MethodDescriptorProto] = self._get_methods_from_service( - iam_policy_pb2) - for (_, s) in self.services.items(): + iam_mixin_methods: Dict[str, MethodDescriptorProto] = ( + self._get_methods_from_service(iam_policy_pb2) + ) + for _, s in self.services.items(): for m_name in iam_mixin_methods: if m_name in s.methods: return True @@ -980,7 +1042,8 @@ def _get_methods_from_service(self, service_pb) -> Dict[str, MethodDescriptorPro service: ServiceDescriptor = services[service_name] for method in service.methods: fqn = "{}.{}.{}".format( - service_pb.DESCRIPTOR.package, service.name, method.name) + service_pb.DESCRIPTOR.package, service.name, method.name + ) methods[fqn] = method for rule in self.service_yaml_config.http.rules: if rule.selector in methods: @@ -1017,6 +1080,7 @@ class _ProtoBuilder: the :attr:`proto` property, and then throw the builder away. Additionally, there should be no reason to use this class outside of this module. """ + EMPTY = descriptor_pb2.SourceCodeInfo.Location() def __init__( @@ -1043,8 +1107,7 @@ def __init__( # the "path", which is a sequence of integers described in more # detail below; this code simply shifts from a list to a dict, # with tuples of paths as the dictionary keys. - self.docs: Dict[Tuple[int, ...], - descriptor_pb2.SourceCodeInfo.Location] = {} + self.docs: Dict[Tuple[int, ...], descriptor_pb2.SourceCodeInfo.Location] = {} for location in file_descriptor.source_code_info.location: self.docs[tuple(location.path)] = location @@ -1056,8 +1119,8 @@ def __init__( # for each item as it is loaded. self.address = metadata.Address( api_naming=naming, - module=file_descriptor.name.split('/')[-1][:-len('.proto')], - package=tuple(file_descriptor.package.split('.')), + module=file_descriptor.name.split("/")[-1][: -len(".proto")], + package=tuple(file_descriptor.package.split(".")), ) # Now iterate over the FileDescriptorProto and pull out each of @@ -1068,12 +1131,20 @@ def __init__( # message (e.g. the hard-code `4` for `message_type` immediately # below is because `repeated DescriptorProto message_type = 4;` in # descriptor.proto itself). - self._load_children(file_descriptor.enum_type, self._load_enum, - address=self.address, path=(5,), - resources=all_resources or {}) - self._load_children(file_descriptor.message_type, self._load_message, - address=self.address, path=(4,), - resources=all_resources or {}) + self._load_children( + file_descriptor.enum_type, + self._load_enum, + address=self.address, + path=(5,), + resources=all_resources or {}, + ) + self._load_children( + file_descriptor.message_type, + self._load_message, + address=self.address, + path=(4,), + resources=all_resources or {}, + ) # Edge case: Protocol buffers is not particularly picky about # ordering, and it is possible that a message will have had a field @@ -1084,7 +1155,7 @@ def __init__( # and the field would have its original textual reference to the # message (`type_name`) but not its resolved message wrapper. orphan_field_gen = ( - (field.type_name.lstrip('.'), field) + (field.type_name.lstrip("."), field) for message in self.proto_messages.values() for field in message.fields.values() if field.type_name and not (field.message or field.enum) @@ -1093,9 +1164,9 @@ def __init__( maybe_msg_type = self.proto_messages.get(key) maybe_enum_type = self.proto_enums.get(key) if maybe_msg_type: - object.__setattr__(field, 'message', maybe_msg_type) + object.__setattr__(field, "message", maybe_msg_type) elif maybe_enum_type: - object.__setattr__(field, 'enum', maybe_enum_type) + object.__setattr__(field, "enum", maybe_enum_type) else: raise TypeError( f"Unknown type referenced in " @@ -1107,9 +1178,13 @@ def __init__( # they are being used as an import just to get types declared in the # same files. if file_to_generate and load_services: - self._load_children(file_descriptor.service, self._load_service, - address=self.address, path=(6,), - resources=all_resources or {}) + self._load_children( + file_descriptor.service, + self._load_service, + address=self.address, + path=(6,), + resources=all_resources or {}, + ) # TODO(lukesneeringer): oneofs are on path 7. @property @@ -1143,19 +1218,25 @@ def proto(self) -> Proto: for k, v in naive.all_enums.items() ), all_messages=collections.OrderedDict( - (k, v.with_context( - collisions=naive.names, - visited_messages=visited_messages, - )) + ( + k, + v.with_context( + collisions=naive.names, + visited_messages=visited_messages, + ), + ) for k, v in naive.all_messages.items() ), services=collections.OrderedDict( # Note: services bind to themselves because services get their # own output files. - (k, v.with_context( - collisions=v.names, - visited_messages=visited_messages, - )) + ( + k, + v.with_context( + collisions=v.names, + visited_messages=visited_messages, + ), + ) for k, v in naive.services.items() ), meta=naive.meta.with_context(collisions=naive.names), @@ -1183,10 +1264,15 @@ def api_messages(self) -> Mapping[str, wrappers.MessageType]: *[p.all_messages for p in self.prior_protos.values()], # type: ignore ) - def _load_children(self, - children: Sequence, loader: Callable, *, - address: metadata.Address, path: Tuple[int, ...], - resources: Mapping[str, wrappers.MessageType]) -> Mapping: + def _load_children( + self, + children: Sequence, + loader: Callable, + *, + address: metadata.Address, + path: Tuple[int, ...], + resources: Mapping[str, wrappers.MessageType], + ) -> Mapping: """Return wrapped versions of arbitrary children from a Descriptor. Args: @@ -1212,15 +1298,18 @@ def _load_children(self, # applicable loader function on each. answer = {} for child, i in zip(children, range(0, sys.maxsize)): - wrapped = loader(child, address=address, path=path + (i,), - resources=resources) + wrapped = loader( + child, address=address, path=path + (i,), resources=resources + ) answer[wrapped.name] = wrapped return answer - def _get_oneofs(self, - oneof_pbs: Sequence[descriptor_pb2.OneofDescriptorProto], - address: metadata.Address, path: Tuple[int, ...], - ) -> Dict[str, wrappers.Oneof]: + def _get_oneofs( + self, + oneof_pbs: Sequence[descriptor_pb2.OneofDescriptorProto], + address: metadata.Address, + path: Tuple[int, ...], + ) -> Dict[str, wrappers.Oneof]: """Return a dictionary of wrapped oneofs for the given message. Args: @@ -1244,11 +1333,13 @@ def _get_oneofs(self, # Done; return the answer. return answer - def _get_fields(self, - field_pbs: Sequence[descriptor_pb2.FieldDescriptorProto], - address: metadata.Address, path: Tuple[int, ...], - oneofs: Optional[Dict[str, wrappers.Oneof]] = None - ) -> Dict[str, wrappers.Field]: + def _get_fields( + self, + field_pbs: Sequence[descriptor_pb2.FieldDescriptorProto], + address: metadata.Address, + path: Tuple[int, ...], + oneofs: Optional[Dict[str, wrappers.Oneof]] = None, + ) -> Dict[str, wrappers.Field]: """Return a dictionary of wrapped fields for the given message. Args: @@ -1275,16 +1366,15 @@ def _get_fields(self, # `_load_message` method. answer: Dict[str, wrappers.Field] = collections.OrderedDict() for i, field_pb in enumerate(field_pbs): - is_oneof = oneofs and field_pb.HasField('oneof_index') - oneof_name = nth( - (oneofs or {}).keys(), - field_pb.oneof_index - ) if is_oneof else None + is_oneof = oneofs and field_pb.HasField("oneof_index") + oneof_name = ( + nth((oneofs or {}).keys(), field_pb.oneof_index) if is_oneof else None + ) field = wrappers.Field( field_pb=field_pb, - enum=self.api_enums.get(field_pb.type_name.lstrip('.')), - message=self.api_messages.get(field_pb.type_name.lstrip('.')), + enum=self.api_enums.get(field_pb.type_name.lstrip(".")), + message=self.api_messages.get(field_pb.type_name.lstrip(".")), meta=metadata.Metadata( address=address.child(field_pb.name, path + (i,)), documentation=self.docs.get(path + (i,), self.EMPTY), @@ -1299,7 +1389,7 @@ def _get_fields(self, def _get_retry_and_timeout( self, service_address: metadata.Address, - meth_pb: descriptor_pb2.MethodDescriptorProto + meth_pb: descriptor_pb2.MethodDescriptorProto, ) -> Tuple[Optional[wrappers.RetryInfo], Optional[float]]: """Returns the retry and timeout configuration of a method if it exists. @@ -1331,36 +1421,42 @@ def _get_retry_and_timeout( # with a particular format, which we match against. # This defines the expected selector for *this* method. selector = { - 'service': '{package}.{service_name}'.format( - package='.'.join(service_address.package), + "service": "{package}.{service_name}".format( + package=".".join(service_address.package), service_name=service_address.name, ), - 'method': meth_pb.name, + "method": meth_pb.name, } # Find the method config that applies to us, if any. - mc = next((c for c in self.opts.retry.get('methodConfig', []) - if selector in c.get('name')), None) + mc = next( + ( + c + for c in self.opts.retry.get("methodConfig", []) + if selector in c.get("name") + ), + None, + ) if mc: # Set the timeout according to this method config. - if mc.get('timeout'): - timeout = self._to_float(mc['timeout']) + if mc.get("timeout"): + timeout = self._to_float(mc["timeout"]) # Set the retry according to this method config. - if 'retryPolicy' in mc: - r = mc['retryPolicy'] + if "retryPolicy" in mc: + r = mc["retryPolicy"] retry = wrappers.RetryInfo( - max_attempts=r.get('maxAttempts', 0), + max_attempts=r.get("maxAttempts", 0), initial_backoff=self._to_float( - r.get('initialBackoff', '0s'), + r.get("initialBackoff", "0s"), ), - max_backoff=self._to_float(r.get('maxBackoff', '0s')), - backoff_multiplier=r.get('backoffMultiplier', 0.0), + max_backoff=self._to_float(r.get("maxBackoff", "0s")), + backoff_multiplier=r.get("backoffMultiplier", 0.0), retryable_exceptions=frozenset( exceptions.exception_class_for_grpc_status( getattr(grpc.StatusCode, code), ) - for code in r.get('retryableStatusCodes', []) + for code in r.get("retryableStatusCodes", []) ), ) @@ -1369,7 +1465,7 @@ def _get_retry_and_timeout( def _maybe_get_lro( self, service_address: metadata.Address, - meth_pb: descriptor_pb2.MethodDescriptorProto + meth_pb: descriptor_pb2.MethodDescriptorProto, ) -> Optional[wrappers.OperationInfo]: """Determines whether a method is a Long Running Operation (aka LRO) and, if it is, return an OperationInfo that includes the response @@ -1389,7 +1485,7 @@ def _maybe_get_lro( # If the output type is google.longrunning.Operation, we use # a specialized object in its place. - if meth_pb.output_type.endswith('google.longrunning.Operation'): + if meth_pb.output_type.endswith("google.longrunning.Operation"): if not meth_pb.options.HasExtension(operations_pb2.operation_info): # This is not a long running operation even though it returns # an Operation. @@ -1397,9 +1493,9 @@ def _maybe_get_lro( op = meth_pb.options.Extensions[operations_pb2.operation_info] if not op.response_type or not op.metadata_type: raise TypeError( - f'rpc {meth_pb.name} returns a google.longrunning.' - 'Operation, but is missing a response type or ' - 'metadata type.', + f"rpc {meth_pb.name} returns a google.longrunning." + "Operation, but is missing a response type or " + "metadata type.", ) response_key = service_address.resolve(op.response_type) metadata_key = service_address.resolve(op.metadata_type) @@ -1453,9 +1549,7 @@ def _maybe_get_extended_lro( operation_type = service_address.resolve( operation_polling_method_pb.output_type.lstrip(".") ) - method_output_type = service_address.resolve( - meth_pb.output_type.lstrip(".") - ) + method_output_type = service_address.resolve(meth_pb.output_type.lstrip(".")) if operation_type != method_output_type: raise ValueError( f"Inconsistent return types between extended lro method '{meth_pb.name}'" @@ -1465,19 +1559,19 @@ def _maybe_get_extended_lro( operation_message = self.api_messages[operation_type] if not operation_message.is_extended_operation: - raise ValueError( - f"Message is not an extended operation: {operation_type}" - ) + raise ValueError(f"Message is not an extended operation: {operation_type}") return wrappers.ExtendedOperationInfo( request_type=operation_request_message, operation_type=operation_message, ) - def _get_methods(self, - methods: Sequence[descriptor_pb2.MethodDescriptorProto], - service_address: metadata.Address, path: Tuple[int, ...], - ) -> Mapping[str, wrappers.Method]: + def _get_methods( + self, + methods: Sequence[descriptor_pb2.MethodDescriptorProto], + service_address: metadata.Address, + path: Tuple[int, ...], + ) -> Mapping[str, wrappers.Method]: """Return a dictionary of wrapped methods for the given service. Args: @@ -1495,14 +1589,11 @@ def _get_methods(self, # Iterate over the methods and collect them into a dictionary. answer: Dict[str, wrappers.Method] = collections.OrderedDict() for i, meth_pb in enumerate(methods): - retry, timeout = self._get_retry_and_timeout( - service_address, - meth_pb - ) + retry, timeout = self._get_retry_and_timeout(service_address, meth_pb) # Create the method wrapper object. answer[meth_pb.name] = wrappers.Method( - input=self.api_messages[meth_pb.input_type.lstrip('.')], + input=self.api_messages[meth_pb.input_type.lstrip(".")], lro=self._maybe_get_lro(service_address, meth_pb), extended_lro=self._maybe_get_extended_lro( service_address, @@ -1513,7 +1604,7 @@ def _get_methods(self, address=service_address.child(meth_pb.name, path + (i,)), documentation=self.docs.get(path + (i,), self.EMPTY), ), - output=self.api_messages[meth_pb.output_type.lstrip('.')], + output=self.api_messages[meth_pb.output_type.lstrip(".")], retry=retry, timeout=timeout, ) @@ -1521,12 +1612,13 @@ def _get_methods(self, # Done; return the answer. return answer - def _load_message(self, - message_pb: descriptor_pb2.DescriptorProto, - address: metadata.Address, - path: Tuple[int], - resources: Mapping[str, wrappers.MessageType], - ) -> wrappers.MessageType: + def _load_message( + self, + message_pb: descriptor_pb2.DescriptorProto, + address: metadata.Address, + path: Tuple[int], + resources: Mapping[str, wrappers.MessageType], + ) -> wrappers.MessageType: """Load message descriptions from DescriptorProtos.""" address = address.child(message_pb.name, path) @@ -1565,12 +1657,14 @@ def _load_message(self, path=path + (2,), oneofs=oneofs, ) - fields.update(self._get_fields( - message_pb.extension, - address=address, - path=path + (6,), - oneofs=oneofs, - )) + fields.update( + self._get_fields( + message_pb.extension, + address=address, + path=path + (6,), + oneofs=oneofs, + ) + ) # Create a message correspoding to this descriptor. self.proto_messages[address.proto] = wrappers.MessageType( @@ -1586,25 +1680,28 @@ def _load_message(self, ) return self.proto_messages[address.proto] - def _load_enum(self, - enum: descriptor_pb2.EnumDescriptorProto, - address: metadata.Address, - path: Tuple[int], - resources: Mapping[str, wrappers.MessageType], - ) -> wrappers.EnumType: + def _load_enum( + self, + enum: descriptor_pb2.EnumDescriptorProto, + address: metadata.Address, + path: Tuple[int], + resources: Mapping[str, wrappers.MessageType], + ) -> wrappers.EnumType: """Load enum descriptions from EnumDescriptorProtos.""" address = address.child(enum.name, path) # Put together wrapped objects for the enum values. values = [] for enum_value, i in zip(enum.value, range(0, sys.maxsize)): - values.append(wrappers.EnumValueType( - enum_value_pb=enum_value, - meta=metadata.Metadata( - address=address, - documentation=self.docs.get(path + (2, i), self.EMPTY), - ), - )) + values.append( + wrappers.EnumValueType( + enum_value_pb=enum_value, + meta=metadata.Metadata( + address=address, + documentation=self.docs.get(path + (2, i), self.EMPTY), + ), + ) + ) # Load the enum itself. self.proto_enums[address.proto] = wrappers.EnumType( @@ -1617,12 +1714,13 @@ def _load_enum(self, ) return self.proto_enums[address.proto] - def _load_service(self, - service: descriptor_pb2.ServiceDescriptorProto, - address: metadata.Address, - path: Tuple[int], - resources: Mapping[str, wrappers.MessageType], - ) -> wrappers.Service: + def _load_service( + self, + service: descriptor_pb2.ServiceDescriptorProto, + address: metadata.Address, + path: Tuple[int], + resources: Mapping[str, wrappers.MessageType], + ) -> wrappers.Service: """Load comments for a service and its methods.""" address = address.child(service.name, path) @@ -1647,4 +1745,4 @@ def _load_service(self, def _to_float(self, s: str) -> float: """Convert a protobuf duration string (e.g. `"30s"`) to float.""" - return int(s[:-1]) / 1e9 if s.endswith('n') else float(s[:-1]) + return int(s[:-1]) / 1e9 if s.endswith("n") else float(s[:-1]) diff --git a/packages/gapic-generator/gapic/schema/imp.py b/packages/gapic-generator/gapic/schema/imp.py index ba53a91af18b..4069d2dd6886 100644 --- a/packages/gapic-generator/gapic/schema/imp.py +++ b/packages/gapic-generator/gapic/schema/imp.py @@ -20,17 +20,17 @@ class Import: package: Tuple[str, ...] module: str - alias: str = '' + alias: str = "" def __eq__(self, other) -> bool: return self.package == other.package and self.module == other.module def __str__(self) -> str: - answer = f'import {self.module}' + answer = f"import {self.module}" if self.package: answer = f"from {'.'.join(self.package)} {answer}" if self.alias: - answer += f' as {self.alias}' - if self.module.endswith('_pb2') or 'api_core' in self.package: - answer += ' # type: ignore' + answer += f" as {self.alias}" + if self.module.endswith("_pb2") or "api_core" in self.package: + answer += " # type: ignore" return answer diff --git a/packages/gapic-generator/gapic/schema/metadata.py b/packages/gapic-generator/gapic/schema/metadata.py index 480cde40cd8a..7df8d0291f3d 100644 --- a/packages/gapic-generator/gapic/schema/metadata.py +++ b/packages/gapic-generator/gapic/schema/metadata.py @@ -42,8 +42,8 @@ @dataclasses.dataclass(frozen=True) class BaseAddress: - name: str = '' - module: str = '' + name: str = "" + module: str = "" module_path: Tuple[int, ...] = dataclasses.field(default_factory=tuple) package: Tuple[str, ...] = dataclasses.field(default_factory=tuple) parent: Tuple[str, ...] = dataclasses.field(default_factory=tuple) @@ -92,14 +92,14 @@ def __str__(self) -> str: # Add _pb2 suffix except when it is a proto-plus type if not self.is_proto_plus_type: - module_name = f'{self.module}_pb2' + module_name = f"{self.module}_pb2" # Return the dot-separated Python identifier. - return '.'.join((module_name,) + self.parent + (self.name,)) + return ".".join((module_name,) + self.parent + (self.name,)) # This type does not have a module (most common for PythonType). # Return the Python identifier. - return '.'.join(self.parent + (self.name,)) + return ".".join(self.parent + (self.name,)) @property def is_proto_plus_type(self) -> bool: @@ -150,9 +150,9 @@ def module_alias(self) -> str: """ # This is a minor optimization to prevent constructing a temporary set. if self.module in self.collisions or self.module in RESERVED_NAMES: - return '_'.join( + return "_".join( ( - ''.join( + "".join( partial_name[0] for i in self.package for partial_name in i.split("_") @@ -161,17 +161,17 @@ def module_alias(self) -> str: self.module, ) ) - return '' + return "" @property def proto(self) -> str: """Return the proto selector for this type.""" - return '.'.join(self.package + self.parent + (self.name,)) + return ".".join(self.package + self.parent + (self.name,)) @property def proto_package(self) -> str: """Return the proto package for this type.""" - return '.'.join(self.package) + return ".".join(self.package) @property def proto_package_versioned(self) -> str: @@ -213,16 +213,17 @@ def python_import(self) -> imp.Import: # rewrite the package to our structure. if self.proto_package.startswith(self.api_naming.proto_package): return imp.Import( - package=self.api_naming.module_namespace + ( - self.api_naming.versioned_module_name, - ) + self.subpackage + ('types',), + package=self.api_naming.module_namespace + + (self.api_naming.versioned_module_name,) + + self.subpackage + + ("types",), module=self.module, alias=self.module_alias, ) if self.is_proto_plus_type: return imp.Import( - package=self.convert_to_versioned_package() + ('types',), + package=self.convert_to_versioned_package() + ("types",), module=self.module, alias=self.module_alias, ) @@ -230,7 +231,7 @@ def python_import(self) -> imp.Import: # Return the standard import. return imp.Import( package=self.package, - module=f'{self.module}_pb2', + module=f"{self.module}_pb2", ) @property @@ -239,16 +240,21 @@ def sphinx(self) -> str: if not self.api_naming: if self.package: - return '.'.join(self.package + (self.module, self.name)) + return ".".join(self.package + (self.module, self.name)) else: return str(self) # Check if this is a generated type # Use the original module name rather than the module_alias if self.proto_package.startswith(self.api_naming.proto_package): - return '.'.join(self.api_naming.module_namespace + ( - self.api_naming.versioned_module_name, - ) + self.subpackage + ('types',) + self.parent + (self.name, )) + return ".".join( + self.api_naming.module_namespace + + (self.api_naming.versioned_module_name,) + + self.subpackage + + ("types",) + + self.parent + + (self.name,) + ) elif self.is_proto_plus_type: return ".".join( self.convert_to_versioned_package() @@ -258,16 +264,14 @@ def sphinx(self) -> str: ) # Anything left is a standard _pb2 type - return f'{self.proto_package}.{self.module}_pb2.{self.name}' + return f"{self.proto_package}.{self.module}_pb2.{self.name}" @property def subpackage(self) -> Tuple[str, ...]: """Return the subpackage below the versioned module name, if any.""" - return tuple( - self.package[len(self.api_naming.proto_package.split('.')):] - ) + return tuple(self.package[len(self.api_naming.proto_package.split(".")) :]) - def child(self, child_name: str, path: Tuple[int, ...]) -> 'Address': + def child(self, child_name: str, path: Tuple[int, ...]) -> "Address": """Return a new child of the current Address. Args: @@ -284,7 +288,7 @@ def child(self, child_name: str, path: Tuple[int, ...]) -> 'Address': parent=self.parent + (self.name,) if self.name else self.parent, ) - def rel(self, address: 'Address') -> str: + def rel(self, address: "Address") -> str: """Return an identifier for this type, relative to the given address. Similar to :meth:`__str__`, but accepts an address (expected to be the @@ -308,8 +312,7 @@ def rel(self, address: 'Address') -> str: # scope in Python, without reference to the parent class being # created, so there is no way for one nested class to reference # another at class instantiation time. - if (self.parent and address.parent and - self.parent[0] == address.parent[0]): + if self.parent and address.parent and self.parent[0] == address.parent[0]: return f"'{'.'.join(self.parent)}.{self.name}'" # Edge case: Similar to above, if this is a message that is @@ -317,7 +320,7 @@ def rel(self, address: 'Address') -> str: # the message to be referenced relative to this message's # namespace. if self.parent and self.parent[0] == address.name: - return '.'.join(self.parent[1:] + (self.name,)) + return ".".join(self.parent[1:] + (self.name,)) # It is possible that a field references a message that has # not yet been declared. If so, send its name enclosed in quotes @@ -352,11 +355,11 @@ def resolve(self, selector: str) -> str: Returns: str: An absolute selector. """ - if '.' not in selector: + if "." not in selector: return f'{".".join(self.package)}.{selector}' return selector - def with_context(self, *, collisions: Set[str]) -> 'Address': + def with_context(self, *, collisions: Set[str]) -> "Address": """Return a derivative of this address with the provided context. This method is used to address naming collisions. The returned @@ -392,20 +395,24 @@ def doc(self): if self.documentation.trailing_comments: return self.documentation.trailing_comments.strip() if self.documentation.leading_detached_comments: - return '\n\n'.join(self.documentation.leading_detached_comments) - return '' + return "\n\n".join(self.documentation.leading_detached_comments) + return "" - def with_context(self, *, collisions: Set[str]) -> 'Metadata': + def with_context(self, *, collisions: Set[str]) -> "Metadata": """Return a derivative of this metadata with the provided context. This method is used to address naming collisions. The returned ``Address`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace( - self, - address=self.address.with_context(collisions=collisions), - ) if collisions and collisions != self.address.collisions else self + return ( + dataclasses.replace( + self, + address=self.address.with_context(collisions=collisions), + ) + if collisions and collisions != self.address.collisions + else self + ) @dataclasses.dataclass(frozen=True) @@ -416,15 +423,15 @@ class FieldIdentifier: def __str__(self) -> str: if self.mapping: - return f'MutableMapping[{self.mapping[0].ident}, {self.mapping[1].ident}]' + return f"MutableMapping[{self.mapping[0].ident}, {self.mapping[1].ident}]" if self.repeated: - return f'MutableSequence[{self.ident}]' + return f"MutableSequence[{self.ident}]" return str(self.ident) @property def sphinx(self) -> str: if self.mapping: - return f'MutableMapping[{self.mapping[0].ident.sphinx}, {self.mapping[1].ident.sphinx}]' + return f"MutableMapping[{self.mapping[0].ident.sphinx}, {self.mapping[1].ident.sphinx}]" if self.repeated: - return f'MutableSequence[{self.ident.sphinx}]' + return f"MutableSequence[{self.ident.sphinx}]" return self.ident.sphinx diff --git a/packages/gapic-generator/gapic/schema/mixins.py b/packages/gapic-generator/gapic/schema/mixins.py index 89fccdd573dc..d340ec1189ab 100644 --- a/packages/gapic-generator/gapic/schema/mixins.py +++ b/packages/gapic-generator/gapic/schema/mixins.py @@ -15,54 +15,54 @@ from gapic.schema import wrappers MIXINS_MAP = { - 'DeleteOperation': wrappers.MixinMethod( - 'DeleteOperation', - request_type='operations_pb2.DeleteOperationRequest', - response_type='None' + "DeleteOperation": wrappers.MixinMethod( + "DeleteOperation", + request_type="operations_pb2.DeleteOperationRequest", + response_type="None", ), - 'WaitOperation': wrappers.MixinMethod( - 'WaitOperation', - request_type='operations_pb2.WaitOperationRequest', - response_type='operations_pb2.Operation' + "WaitOperation": wrappers.MixinMethod( + "WaitOperation", + request_type="operations_pb2.WaitOperationRequest", + response_type="operations_pb2.Operation", ), - 'ListOperations': wrappers.MixinMethod( - 'ListOperations', - request_type='operations_pb2.ListOperationsRequest', - response_type='operations_pb2.ListOperationsResponse' + "ListOperations": wrappers.MixinMethod( + "ListOperations", + request_type="operations_pb2.ListOperationsRequest", + response_type="operations_pb2.ListOperationsResponse", ), - 'CancelOperation': wrappers.MixinMethod( - 'CancelOperation', - request_type='operations_pb2.CancelOperationRequest', - response_type='None' + "CancelOperation": wrappers.MixinMethod( + "CancelOperation", + request_type="operations_pb2.CancelOperationRequest", + response_type="None", ), - 'GetOperation': wrappers.MixinMethod( - 'GetOperation', - request_type='operations_pb2.GetOperationRequest', - response_type='operations_pb2.Operation' + "GetOperation": wrappers.MixinMethod( + "GetOperation", + request_type="operations_pb2.GetOperationRequest", + response_type="operations_pb2.Operation", ), - 'TestIamPermissions': wrappers.MixinMethod( - 'TestIamPermissions', - request_type='iam_policy_pb2.TestIamPermissionsRequest', - response_type='iam_policy_pb2.TestIamPermissionsResponse' + "TestIamPermissions": wrappers.MixinMethod( + "TestIamPermissions", + request_type="iam_policy_pb2.TestIamPermissionsRequest", + response_type="iam_policy_pb2.TestIamPermissionsResponse", ), - 'GetIamPolicy': wrappers.MixinMethod( - 'GetIamPolicy', - request_type='iam_policy_pb2.GetIamPolicyRequest', - response_type='policy_pb2.Policy' + "GetIamPolicy": wrappers.MixinMethod( + "GetIamPolicy", + request_type="iam_policy_pb2.GetIamPolicyRequest", + response_type="policy_pb2.Policy", ), - 'SetIamPolicy': wrappers.MixinMethod( - 'SetIamPolicy', - request_type='iam_policy_pb2.SetIamPolicyRequest', - response_type='policy_pb2.Policy' + "SetIamPolicy": wrappers.MixinMethod( + "SetIamPolicy", + request_type="iam_policy_pb2.SetIamPolicyRequest", + response_type="policy_pb2.Policy", ), - 'ListLocations': wrappers.MixinMethod( - 'ListLocations', - request_type='locations_pb2.ListLocationsRequest', - response_type='locations_pb2.ListLocationsResponse' + "ListLocations": wrappers.MixinMethod( + "ListLocations", + request_type="locations_pb2.ListLocationsRequest", + response_type="locations_pb2.ListLocationsResponse", + ), + "GetLocation": wrappers.MixinMethod( + "GetLocation", + request_type="locations_pb2.GetLocationRequest", + response_type="locations_pb2.Location", ), - 'GetLocation': wrappers.MixinMethod( - 'GetLocation', - request_type='locations_pb2.GetLocationRequest', - response_type='locations_pb2.Location' - ) } diff --git a/packages/gapic-generator/gapic/schema/naming.py b/packages/gapic-generator/gapic/schema/naming.py index 92545b6526e8..ebf650944fcb 100644 --- a/packages/gapic-generator/gapic/schema/naming.py +++ b/packages/gapic-generator/gapic/schema/naming.py @@ -37,23 +37,24 @@ class Naming(abc.ABC): A concrete child of this object is made available to every template (as ``api.naming``). """ - name: str = '' + + name: str = "" namespace: Tuple[str, ...] = dataclasses.field(default_factory=tuple) - version: str = '' - product_name: str = '' - proto_package: str = '' - _warehouse_package_name: str = '' + version: str = "" + product_name: str = "" + proto_package: str = "" + _warehouse_package_name: str = "" proto_plus_deps: Tuple[str, ...] = dataclasses.field(default_factory=tuple) def __post_init__(self): if not self.product_name: - self.__dict__['product_name'] = self.name + self.__dict__["product_name"] = self.name @staticmethod def build( *file_descriptors: descriptor_pb2.FileDescriptorProto, opts: Options = Options(), - ) -> 'Naming': + ) -> "Naming": """Return a full Naming instance based on these file descriptors. This is pieced together from the proto package names as well as the @@ -76,15 +77,15 @@ def build( """ # Determine the set of proto packages. proto_packages = {fd.package for fd in file_descriptors} - root_package = os.path.commonprefix(tuple(proto_packages)).rstrip('.') + root_package = os.path.commonprefix(tuple(proto_packages)).rstrip(".") # Quick check: If there is no common ground in the package, # we are obviously in trouble. if not root_package: raise ValueError( - 'The protos provided do not share a common root package. ' - 'Ensure that all explicitly-specified protos are for a ' - 'single API. ' + "The protos provided do not share a common root package. " + "Ensure that all explicitly-specified protos are for a " + "single API. " f'The packages we got are: {", ".join(proto_packages)}' ) @@ -93,7 +94,7 @@ def build( # It is not necessary for the regex to be as particular about package # name validity (e.g. avoiding .. or segments starting with numbers) # because protoc is guaranteed to give us valid package names. - pattern = r'^((?P[a-z0-9_.]+)\.)?(?P[a-z0-9_]+)' + pattern = r"^((?P[a-z0-9_.]+)\.)?(?P[a-z0-9_]+)" # Only require the version portion of the regex if the version is # present. @@ -101,30 +102,29 @@ def build( # This code may look counter-intuitive (why not use ? to make it # optional), but the engine's greediness routine will decide that # the version is the name, which is not what we want. - version = r'\.(?Pv[0-9]+(p[0-9]+)?((alpha|beta)[0-9]*)?)' + version = r"\.(?Pv[0-9]+(p[0-9]+)?((alpha|beta)[0-9]*)?)" if re.search(version, root_package): pattern += version # Okay, do the match - match = cast(Match, - re.search(pattern=pattern, string=root_package)).groupdict() - match['namespace'] = match['namespace'] or '' + match = cast(Match, re.search(pattern=pattern, string=root_package)).groupdict() + match["namespace"] = match["namespace"] or "" klass = OldNaming if opts.old_naming else NewNaming package_info = klass( - name=match['name'].capitalize(), - namespace=tuple( - i.capitalize() for i in match['namespace'].split('.') if i - ), - product_name=match['name'].capitalize(), + name=match["name"].capitalize(), + namespace=tuple(i.capitalize() for i in match["namespace"].split(".") if i), + product_name=match["name"].capitalize(), proto_package=root_package, - version=match.get('version', ''), + version=match.get("version", ""), ) # Quick check: Ensure that the package directives all inferred # the same information. if not package_info.version and len(proto_packages) > 1: - raise ValueError('All protos must have the same proto package ' - 'up to and including the version.') + raise ValueError( + "All protos must have the same proto package " + "up to and including the version." + ) # If a naming information was provided on the CLI, override the naming # value. @@ -133,20 +133,27 @@ def build( # likely make sense to many users to use dot-separated namespaces and # snake case, so handle that and do the right thing. if opts.name: - package_info = dataclasses.replace(package_info, name=' '.join(( - i.capitalize() for i in opts.name.replace('_', ' ').split(' ') - ))) + package_info = dataclasses.replace( + package_info, + name=" ".join( + (i.capitalize() for i in opts.name.replace("_", " ").split(" ")) + ), + ) if opts.namespace: - package_info = dataclasses.replace(package_info, namespace=tuple( - # The join-and-split on "." here causes us to expand out - # dot notation that we may have been sent; e.g. a one-tuple - # with ('x.y',) will become a two-tuple: ('x', 'y') - i.capitalize() for i in '.'.join(opts.namespace).split('.') - )) + package_info = dataclasses.replace( + package_info, + namespace=tuple( + # The join-and-split on "." here causes us to expand out + # dot notation that we may have been sent; e.g. a one-tuple + # with ('x.y',) will become a two-tuple: ('x', 'y') + i.capitalize() + for i in ".".join(opts.namespace).split(".") + ), + ) if opts.warehouse_package_name: - package_info = dataclasses.replace(package_info, - _warehouse_package_name=opts.warehouse_package_name - ) + package_info = dataclasses.replace( + package_info, _warehouse_package_name=opts.warehouse_package_name + ) if opts.proto_plus_deps: package_info = dataclasses.replace( package_info, @@ -165,7 +172,7 @@ def __bool__(self): @property def long_name(self) -> str: """Return an appropriate title-cased long name.""" - return ' '.join(tuple(self.namespace) + (self.name,)) + return " ".join(tuple(self.namespace) + (self.name,)) @property def module_name(self) -> str: @@ -182,7 +189,7 @@ def namespace_packages(self) -> Tuple[str, ...]: """Return the appropriate Python namespace packages.""" answer: List[str] = [] for cursor in (i.lower() for i in self.namespace): - answer.append(f'{answer[-1]}.{cursor}' if answer else cursor) + answer.append(f"{answer[-1]}.{cursor}" if answer else cursor) return tuple(answer) @property @@ -202,8 +209,8 @@ def warehouse_package_name(self) -> str: return self._warehouse_package_name # Otherwise piece the name and namespace together to come # up with the proper package name. - answer = list(self.namespace) + self.name.split(' ') - return '-'.join(answer).lower() + answer = list(self.namespace) + self.name.split(" ") + return "-".join(answer).lower() class NewNaming(Naming): @@ -213,7 +220,7 @@ def versioned_module_name(self) -> str: If there is no version, this is the same as ``module_name``. """ - return self.module_name + (f'_{self.version}' if self.version else '') + return self.module_name + (f"_{self.version}" if self.version else "") class OldNaming(Naming): @@ -223,4 +230,4 @@ def versioned_module_name(self) -> str: If there is no version, this is the same as ``module_name``. """ - return self.module_name + (f'.{self.version}' if self.version else '') + return self.module_name + (f".{self.version}" if self.version else "") diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index d148452435ee..41bd2e202dd4 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -35,9 +35,24 @@ import keyword import re from itertools import chain -from typing import (Any, cast, Dict, FrozenSet, Iterator, Iterable, List, Mapping, - ClassVar, Optional, Sequence, Set, Tuple, Union, Pattern) -from google.api import annotations_pb2 # type: ignore +from typing import ( + Any, + cast, + Dict, + FrozenSet, + Iterator, + Iterable, + List, + Mapping, + ClassVar, + Optional, + Sequence, + Set, + Tuple, + Union, + Pattern, +) +from google.api import annotations_pb2 # type: ignore from google.api import client_pb2 from google.api import field_behavior_pb2 from google.api import field_info_pb2 @@ -59,9 +74,10 @@ @dataclasses.dataclass(frozen=True) class Field: """Description of a field.""" + field_pb: descriptor_pb2.FieldDescriptorProto - message: Optional['MessageType'] = None - enum: Optional['EnumType'] = None + message: Optional["MessageType"] = None + enum: Optional["EnumType"] = None meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) @@ -80,7 +96,11 @@ def __hash__(self): def name(self) -> str: """Used to prevent collisions with python keywords""" name = self.field_pb.name - return name + "_" if name in utils.RESERVED_NAMES and self.meta.address.is_proto_plus_type else name + return ( + name + "_" + if name in utils.RESERVED_NAMES and self.meta.address.is_proto_plus_type + else name + ) @utils.cached_property def ident(self) -> metadata.FieldIdentifier: @@ -117,7 +137,9 @@ def operation_response_field(self) -> Optional[str]: return self.options.Extensions[ex_ops_pb2.operation_response_field] @utils.cached_property - def mock_value_original_type(self) -> Union[bool, str, bytes, int, float, Dict[str, Any], List[Any], None]: + def mock_value_original_type( + self, + ) -> Union[bool, str, bytes, int, float, Dict[str, Any], List[Any], None]: visited_messages = set() def recursive_mock_original_type(field): @@ -137,7 +159,7 @@ def recursive_mock_original_type(field): # Duration in this specific case. msg_dict = { "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b'\x08\x0c\x10\xdb\x07', + "value": b"\x08\x0c\x10\xdb\x07", } else: msg_dict = { @@ -150,7 +172,8 @@ def recursive_mock_original_type(field): if field.enum: # First Truthy value, fallback to the first value answer = next( - (v for v in field.type.values if v.number), field.type.values[0]).number + (v for v in field.type.values if v.number), field.type.values[0] + ).number if field.repeated: answer = [answer] return answer @@ -190,7 +213,7 @@ def inner_mock(self, stack, visited_fields) -> str: """Return a repr of a valid, usually truthy mock value.""" # For primitives, send a truthy value computed from the # field name. - answer = 'None' + answer = "None" if isinstance(self.type, PrimitiveType): answer = self.primitive_mock_as_str() @@ -200,39 +223,41 @@ def inner_mock(self, stack, visited_fields) -> str: # Note: The slightly-goofy [:2][-1] lets us gracefully fall # back to index 0 if there is only one element. mock_value = self.type.values[:2][-1] - answer = f'{self.type.ident}.{mock_value.name}' + answer = f"{self.type.ident}.{mock_value.name}" # If this is another message, set one value on the message. if ( - not self.map # Maps are handled separately - and isinstance(self.type, MessageType) - and len(self.type.fields) - # Nested message types need to terminate eventually - and self not in visited_fields + not self.map # Maps are handled separately + and isinstance(self.type, MessageType) + and len(self.type.fields) + # Nested message types need to terminate eventually + and self not in visited_fields ): sub = next(iter(self.type.fields.values())) stack.append(sub) visited_fields.add(self) # Don't do the recursive rendering here, just set up # where the nested value should go with the double {}. - answer = f'{self.type.ident}({sub.name}={{}})' + answer = f"{self.type.ident}({sub.name}={{}})" if self.map: # Maps are a special case because they're represented internally as # a list of a generated type with two fields: 'key' and 'value'. - answer = '{{{}: {}}}'.format( + answer = "{{{}: {}}}".format( self.type.fields["key"].mock_value, self.type.fields["value"].mock_value, ) elif self.repeated: # If this is a repeated field, then the mock answer should # be a list. - answer = f'[{answer}]' + answer = f"[{answer}]" # Done; return the mock value. return answer - def primitive_mock(self, suffix: int = 0) -> Union[bool, str, bytes, int, float, List[Any], None]: + def primitive_mock( + self, suffix: int = 0 + ) -> Union[bool, str, bytes, int, float, List[Any], None]: """Generate a valid mock for a primitive type. This function returns the original (Python) type. @@ -242,8 +267,10 @@ def primitive_mock(self, suffix: int = 0) -> Union[bool, str, bytes, int, float, answer: Union[bool, str, bytes, int, float, List[Any], None] = None if not isinstance(self.type, PrimitiveType): - raise TypeError(f"'primitive_mock' can only be used for " - f"PrimitiveType, but type is {self.type}") + raise TypeError( + f"'primitive_mock' can only be used for " + f"PrimitiveType, but type is {self.type}" + ) else: if self.type.python_type == bool: @@ -256,9 +283,13 @@ def primitive_mock(self, suffix: int = 0) -> Union[bool, str, bytes, int, float, # is not Any. answer = "type.googleapis.com/google.protobuf.Empty" else: - answer = f"{self.name}_value{suffix}" if suffix else f"{self.name}_value" + answer = ( + f"{self.name}_value{suffix}" if suffix else f"{self.name}_value" + ) elif self.type.python_type == bytes: - answer_str = f"{self.name}_blob{suffix}" if suffix else f"{self.name}_blob" + answer_str = ( + f"{self.name}_blob{suffix}" if suffix else f"{self.name}_blob" + ) answer = bytes(answer_str, encoding="utf-8") elif self.type.python_type == int: answer = sum([ord(i) for i in self.name]) + suffix @@ -266,8 +297,10 @@ def primitive_mock(self, suffix: int = 0) -> Union[bool, str, bytes, int, float, name_sum = sum([ord(i) for i in self.name]) + suffix answer = name_sum * pow(10, -1 * len(str(name_sum))) else: # Impossible; skip coverage checks. - raise TypeError('Unrecognized PrimitiveType. This should ' - 'never happen; please file an issue.') + raise TypeError( + "Unrecognized PrimitiveType. This should " + "never happen; please file an issue." + ) return answer @@ -285,9 +318,12 @@ def primitive_mock_as_str(self) -> str: @property def proto_type(self) -> str: """Return the proto type constant to be used in templates.""" - return cast(str, descriptor_pb2.FieldDescriptorProto.Type.Name( - self.field_pb.type, - ))[len('TYPE_'):] + return cast( + str, + descriptor_pb2.FieldDescriptorProto.Type.Name( + self.field_pb.type, + ), + )[len("TYPE_") :] @property def repeated(self) -> bool: @@ -296,9 +332,9 @@ def repeated(self) -> bool: Returns: bool: Whether this field is repeated. """ - return self.label == \ - descriptor_pb2.FieldDescriptorProto.Label.Value( - 'LABEL_REPEATED') # type: ignore + return self.label == descriptor_pb2.FieldDescriptorProto.Label.Value( + "LABEL_REPEATED" + ) # type: ignore @property def required(self) -> bool: @@ -307,8 +343,10 @@ def required(self) -> bool: Returns: bool: Whether this field is required. """ - return (field_behavior_pb2.FieldBehavior.Value('REQUIRED') in - self.options.Extensions[field_behavior_pb2.field_behavior]) + return ( + field_behavior_pb2.FieldBehavior.Value("REQUIRED") + in self.options.Extensions[field_behavior_pb2.field_behavior] + ) @property def uuid4(self) -> bool: @@ -319,7 +357,9 @@ def uuid4(self) -> bool: Returns: bool: Whether this field is UUID4. """ - return self.options.Extensions[field_info_pb2.field_info].format == field_info_pb2.FieldInfo.Format.Value("UUID4") + return self.options.Extensions[ + field_info_pb2.field_info + ].format == field_info_pb2.FieldInfo.Format.Value("UUID4") @property def resource_reference(self) -> Optional[str]: @@ -328,12 +368,14 @@ def resource_reference(self) -> Optional[str]: This is only applicable for string fields. Example: "translate.googleapis.com/Glossary" """ - return (self.options.Extensions[resource_pb2.resource_reference].type + return ( + self.options.Extensions[resource_pb2.resource_reference].type or self.options.Extensions[resource_pb2.resource_reference].child_type - or None) + or None + ) @utils.cached_property - def type(self) -> Union['MessageType', 'EnumType', 'PrimitiveType']: + def type(self) -> Union["MessageType", "EnumType", "PrimitiveType"]: """Return the type of this field.""" # If this is a message or enum, return the appropriate thing. if self.type_name and self.message: @@ -363,15 +405,17 @@ def type(self) -> Union['MessageType', 'EnumType', 'PrimitiveType']: return PrimitiveType.build(bytes) # This should never happen. - raise TypeError(f'Unrecognized protobuf type: {self.field_pb.type}. ' - 'This code should not be reachable; please file a bug.') + raise TypeError( + f"Unrecognized protobuf type: {self.field_pb.type}. " + "This code should not be reachable; please file a bug." + ) def with_context( - self, - *, - collisions: Set[str], - visited_messages: Optional[Set["MessageType"]] = None, - ) -> 'Field': + self, + *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> "Field": """Return a derivative of this field with the provided context. This method is used to address naming collisions. The returned @@ -380,20 +424,27 @@ def with_context( """ return dataclasses.replace( self, - message=self.message.with_context( - collisions=collisions, - skip_fields=self.message in visited_messages if visited_messages else False, - visited_messages=visited_messages, - ) if self.message else None, - enum=self.enum.with_context(collisions=collisions) - if self.enum else None, + message=( + self.message.with_context( + collisions=collisions, + skip_fields=( + self.message in visited_messages if visited_messages else False + ), + visited_messages=visited_messages, + ) + if self.message + else None + ), + enum=self.enum.with_context(collisions=collisions) if self.enum else None, meta=self.meta.with_context(collisions=collisions), ) - def add_to_address_allowlist(self, *, - address_allowlist: Set['metadata.Address'], - resource_messages: Dict[str, 'MessageType'] - ) -> None: + def add_to_address_allowlist( + self, + *, + address_allowlist: Set["metadata.Address"], + resource_messages: Dict[str, "MessageType"], + ) -> None: """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. This method is used to create an allowlist of addresses to be used to filter out unneeded @@ -443,6 +494,7 @@ def disambiguated(self) -> str: @dataclasses.dataclass(frozen=True) class Oneof: """Description of a field.""" + oneof_pb: descriptor_pb2.OneofDescriptorProto def __getattr__(self, name): @@ -452,19 +504,20 @@ def __getattr__(self, name): @dataclasses.dataclass(frozen=True) class MessageType: """Description of a message (defined with the ``message`` keyword).""" + # Class attributes # https://google.aip.dev/122 - PATH_ARG_RE = re.compile(r'\{([a-zA-Z0-9_\-]+)(?:=\*\*)?\}') + PATH_ARG_RE = re.compile(r"\{([a-zA-Z0-9_\-]+)(?:=\*\*)?\}") # Instance attributes message_pb: descriptor_pb2.DescriptorProto fields: Mapping[str, Field] - nested_enums: Mapping[str, 'EnumType'] - nested_messages: Mapping[str, 'MessageType'] + nested_enums: Mapping[str, "EnumType"] + nested_messages: Mapping[str, "MessageType"] meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, ) - oneofs: Optional[Mapping[str, 'Oneof']] = None + oneofs: Optional[Mapping[str, "Oneof"]] = None def __getattr__(self, name): return getattr(self.message_pb, name) @@ -488,11 +541,7 @@ def extended_operation_request_fields(self) -> Sequence[Field]: If this message is the request for a method that uses extended operations, return the fields that correspond to operation request fields in the operation message. """ - return tuple( - f - for f in self.fields.values() - if f.operation_request_field - ) + return tuple(f for f in self.fields.values() if f.operation_request_field) @utils.cached_property def extended_operation_response_fields(self) -> Sequence[Field]: @@ -500,11 +549,7 @@ def extended_operation_response_fields(self) -> Sequence[Field]: If this message is the request for a method that uses extended operations, return the fields that correspond to operation response fields in the polling message. """ - return tuple( - f - for f in self.fields.values() - if f.operation_response_field - ) + return tuple(f for f in self.fields.values() if f.operation_response_field) @utils.cached_property def differently_named_extended_operation_fields(self) -> Optional[Dict[str, Field]]: @@ -573,28 +618,23 @@ def extended_operation_status_field(self) -> Optional[Field]: ) @utils.cached_property - def required_fields(self) -> Sequence['Field']: - required_fields = [ - field for field in self.fields.values() if field.required] + def required_fields(self) -> Sequence["Field"]: + required_fields = [field for field in self.fields.values() if field.required] return required_fields @utils.cached_property - def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: + def field_types(self) -> Sequence[Union["MessageType", "EnumType"]]: answer = tuple( - field.type - for field in self.fields.values() - if field.message or field.enum + field.type for field in self.fields.values() if field.message or field.enum ) return answer @utils.cached_property - def recursive_field_types(self) -> Sequence[ - Union['MessageType', 'EnumType'] - ]: + def recursive_field_types(self) -> Sequence[Union["MessageType", "EnumType"]]: """Return all composite fields used in this proto's messages.""" - types: Set[Union['MessageType', 'EnumType']] = set() + types: Set[Union["MessageType", "EnumType"]] = set() stack = [iter(self.fields.values())] while stack: @@ -611,15 +651,20 @@ def recursive_field_types(self) -> Sequence[ def recursive_resource_fields(self) -> FrozenSet[Field]: all_fields = chain( self.fields.values(), - (field - for t in self.recursive_field_types if isinstance(t, MessageType) - for field in t.fields.values()), + ( + field + for t in self.recursive_field_types + if isinstance(t, MessageType) + for field in t.fields.values() + ), ) return frozenset( f for f in all_fields - if (f.options.Extensions[resource_pb2.resource_reference].type or - f.options.Extensions[resource_pb2.resource_reference].child_type) + if ( + f.options.Extensions[resource_pb2.resource_reference].type + or f.options.Extensions[resource_pb2.resource_reference].child_type + ) ) @property @@ -636,15 +681,12 @@ def ident(self) -> metadata.Address: def resource_path(self) -> Optional[str]: """If this message describes a resource, return the path to the resource. If there are multiple paths, returns the first one.""" - return next( - iter(self.options.Extensions[resource_pb2.resource].pattern), - None - ) + return next(iter(self.options.Extensions[resource_pb2.resource].pattern), None) @property def resource_type(self) -> Optional[str]: resource = self.options.Extensions[resource_pb2.resource] - return resource.type[resource.type.find('/') + 1:] if resource else None + return resource.type[resource.type.find("/") + 1 :] if resource else None @property def resource_type_full_path(self) -> Optional[str]: @@ -653,7 +695,7 @@ def resource_type_full_path(self) -> Optional[str]: @property def resource_path_args(self) -> Sequence[str]: - return self.PATH_ARG_RE.findall(self.resource_path or '') + return self.PATH_ARG_RE.findall(self.resource_path or "") @property def resource_path_formatted(self) -> str: @@ -663,7 +705,7 @@ def resource_path_formatted(self) -> str: to 'projects/{project}/metricDescriptors/{metric_descriptor} so it can be used in an f-string. """ - return self.PATH_ARG_RE.sub(r"{\g<1>}", self.resource_path or '') + return self.PATH_ARG_RE.sub(r"{\g<1>}", self.resource_path or "") @utils.cached_property def path_regex_str(self) -> str: @@ -675,8 +717,8 @@ def path_regex_str(self) -> str: # becomes the regex # ^kingdoms/(?P.+?)/phyla/(?P.+?)$ parsing_regex_str = ( - "^" + - self.PATH_ARG_RE.sub( + "^" + + self.PATH_ARG_RE.sub( # We can't just use (?P[^/]+) because segments may be # separated by delimiters other than '/'. # Multiple delimiter characters within one schema are allowed, @@ -684,9 +726,9 @@ def path_regex_str(self) -> str: # as/{a}-{b}/cs/{c}%{d}_{e} # This is discouraged but permitted by AIP4231 lambda m: "(?P<{name}>.+?)".format(name=m.groups()[0]), - self.resource_path or '' - ) + - "$" + self.resource_path or "", + ) + + "$" ) # Special case for wildcard resource names if parsing_regex_str == "^*$": @@ -694,8 +736,9 @@ def path_regex_str(self) -> str: return parsing_regex_str - def get_field(self, *field_path: str, - collisions: Optional[Set[str]] = None) -> Field: + def get_field( + self, *field_path: str, collisions: Optional[Set[str]] = None + ) -> Field: """Return a field arbitrarily deep in this message's structure. This method recursively traverses the message tree to return the @@ -715,8 +758,8 @@ def get_field(self, *field_path: str, in the path. """ # This covers the case when field_path is a string path. - if len(field_path) == 1 and '.' in field_path[0]: - field_path = tuple(field_path[0].split('.')) + if len(field_path) == 1 and "." in field_path[0]: + field_path = tuple(field_path[0].split(".")) # If collisions are not explicitly specified, retrieve them # from this message's address. @@ -729,8 +772,9 @@ def get_field(self, *field_path: str, # Get the first field in the path. first_field = field_path[0] - cursor = self.fields[first_field + - ('_' if first_field in utils.RESERVED_NAMES else '')] + cursor = self.fields[ + first_field + ("_" if first_field in utils.RESERVED_NAMES else "") + ] # Base case: If this is the last field in the path, return it outright. if len(field_path) == 1: @@ -743,29 +787,31 @@ def get_field(self, *field_path: str, # Repeated fields are only permitted in the terminal position. if cursor.repeated: raise KeyError( - f'The {cursor.name} field is repeated; unable to use ' - '`get_field` to retrieve its children.\n' - 'This exception usually indicates that a ' - 'google.api.method_signature annotation uses a repeated field ' - 'in the fields list in a position other than the end.', + f"The {cursor.name} field is repeated; unable to use " + "`get_field` to retrieve its children.\n" + "This exception usually indicates that a " + "google.api.method_signature annotation uses a repeated field " + "in the fields list in a position other than the end.", ) # Quick check: If this cursor has no message, there is a problem. if not cursor.message: raise KeyError( f'Field {".".join(field_path)} could not be resolved from ' - f'{cursor.name}.', + f"{cursor.name}.", ) # Recursion case: Pass the remainder of the path to the sub-field's # message. return cursor.message.get_field(*field_path[1:], collisions=collisions) - def with_context(self, *, - collisions: Set[str], - skip_fields: bool = False, - visited_messages: Optional[Set["MessageType"]] = None, - ) -> 'MessageType': + def with_context( + self, + *, + collisions: Set[str], + skip_fields: bool = False, + visited_messages: Optional[Set["MessageType"]] = None, + ) -> "MessageType": """Return a derivative of this message with the provided context. This method is used to address naming collisions. The returned @@ -780,12 +826,16 @@ def with_context(self, *, visited_messages = visited_messages | {self} return dataclasses.replace( self, - fields={ - k: v.with_context( - collisions=collisions, - visited_messages=visited_messages - ) for k, v in self.fields.items() - } if not skip_fields else self.fields, + fields=( + { + k: v.with_context( + collisions=collisions, visited_messages=visited_messages + ) + for k, v in self.fields.items() + } + if not skip_fields + else self.fields + ), nested_enums={ k: v.with_context(collisions=collisions) for k, v in self.nested_enums.items() @@ -801,10 +851,12 @@ def with_context(self, *, meta=self.meta.with_context(collisions=collisions), ) - def add_to_address_allowlist(self, *, - address_allowlist: Set['metadata.Address'], - resource_messages: Dict[str, 'MessageType'] - ) -> None: + def add_to_address_allowlist( + self, + *, + address_allowlist: Set["metadata.Address"], + resource_messages: Dict[str, "MessageType"], + ) -> None: """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. This method is used to create an allowlist of addresses to be used to filter out unneeded @@ -828,7 +880,7 @@ def add_to_address_allowlist(self, *, for field in self.fields.values(): field.add_to_address_allowlist( address_allowlist=address_allowlist, - resource_messages=resource_messages + resource_messages=resource_messages, ) for enum in self.nested_enums.values(): @@ -846,6 +898,7 @@ def add_to_address_allowlist(self, *, @dataclasses.dataclass(frozen=True) class EnumValueType: """Description of an enum value.""" + enum_value_pb: descriptor_pb2.EnumValueDescriptorProto meta: metadata.Metadata = dataclasses.field( default_factory=metadata.Metadata, @@ -858,6 +911,7 @@ def __getattr__(self, name): @dataclasses.dataclass(frozen=True) class EnumType: """Description of an enum (defined with the ``enum`` keyword.)""" + enum_pb: descriptor_pb2.EnumDescriptorProto values: List[EnumValueType] meta: metadata.Metadata = dataclasses.field( @@ -883,20 +937,25 @@ def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" return self.meta.address - def with_context(self, *, collisions: Set[str]) -> 'EnumType': + def with_context(self, *, collisions: Set[str]) -> "EnumType": """Return a derivative of this enum with the provided context. This method is used to address naming collisions. The returned ``EnumType`` object aliases module names to avoid naming collisions in the file being written. """ - return dataclasses.replace( - self, - meta=self.meta.with_context(collisions=collisions), - ) if collisions else self + return ( + dataclasses.replace( + self, + meta=self.meta.with_context(collisions=collisions), + ) + if collisions + else self + ) - def add_to_address_allowlist(self, *, - address_allowlist: Set['metadata.Address']) -> None: + def add_to_address_allowlist( + self, *, address_allowlist: Set["metadata.Address"] + ) -> None: """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. This method is used to create an allowlist of addresses to be used to filter out unneeded @@ -919,10 +978,7 @@ def options_dict(self) -> Dict: This is a hack to support a pythonic structure representation for the generator templates. """ - return MessageToDict( - self.enum_pb.options, - preserving_proto_field_name=True - ) + return MessageToDict(self.enum_pb.options, preserving_proto_field_name=True) @dataclasses.dataclass(frozen=True) @@ -933,6 +989,7 @@ class PythonType: :meth:`Field.type` can return an object and the caller can be confident that a ``name`` property will be present. """ + meta: metadata.Metadata def __eq__(self, other): @@ -951,13 +1008,14 @@ def name(self) -> str: return self.ident.name @property - def field_types(self) -> Sequence[Union['MessageType', 'EnumType']]: + def field_types(self) -> Sequence[Union["MessageType", "EnumType"]]: return tuple() @dataclasses.dataclass(frozen=True) class PrimitiveType(PythonType): """A representation of a Python primitive type.""" + python_type: Optional[type] @classmethod @@ -974,14 +1032,19 @@ def build(cls, primitive_type: Optional[type]): """ # Primitives have no import, and no module to reference, so the # address just uses the name of the class (e.g. "int", "str"). - return cls(meta=metadata.Metadata(address=metadata.Address( - name='None' if primitive_type is None else primitive_type.__name__, - )), python_type=primitive_type) + return cls( + meta=metadata.Metadata( + address=metadata.Address( + name="None" if primitive_type is None else primitive_type.__name__, + ) + ), + python_type=primitive_type, + ) def __eq__(self, other): # If we are sent the actual Python type (not the PrimitiveType object), # claim to be equal to that. - if not hasattr(other, 'meta'): + if not hasattr(other, "meta"): return self.python_type is other return super().__eq__(other) @@ -991,34 +1054,44 @@ class ExtendedOperationInfo: """A handle to the request type of the extended operation polling method and the underlying operation type. """ + request_type: MessageType operation_type: MessageType - def with_context(self, *, - collisions: Set[str], - visited_messages: Optional[Set["MessageType"]] = None, - ) -> 'ExtendedOperationInfo': + def with_context( + self, + *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> "ExtendedOperationInfo": """Return a derivative of this OperationInfo with the provided context. - This method is used to address naming collisions. The returned - ``OperationInfo`` object aliases module names to avoid naming collisions - in the file being written. - """ - return self if not collisions else dataclasses.replace( - self, - request_type=self.request_type.with_context( - collisions=collisions, - visited_messages=visited_messages, - ), - operation_type=self.operation_type.with_context( - collisions=collisions, - visited_messages=visited_messages, - ), + This method is used to address naming collisions. The returned + ``OperationInfo`` object aliases module names to avoid naming collisions + in the file being written. + """ + return ( + self + if not collisions + else dataclasses.replace( + self, + request_type=self.request_type.with_context( + collisions=collisions, + visited_messages=visited_messages, + ), + operation_type=self.operation_type.with_context( + collisions=collisions, + visited_messages=visited_messages, + ), + ) ) - def add_to_address_allowlist(self, *, - address_allowlist: Set['metadata.Address'], - resource_messages: Dict[str, 'MessageType']) -> None: + def add_to_address_allowlist( + self, + *, + address_allowlist: Set["metadata.Address"], + resource_messages: Dict[str, "MessageType"], + ) -> None: """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. This method is used to create an allowlist of addresses to be used to filter out unneeded @@ -1050,19 +1123,22 @@ def add_to_address_allowlist(self, *, @dataclasses.dataclass(frozen=True) class OperationInfo: """Representation of long-running operation info.""" + response_type: MessageType metadata_type: MessageType - def with_context(self, *, - collisions: Set[str], - visited_messages: Optional[Set["MessageType"]] = None, - ) -> 'OperationInfo': + def with_context( + self, + *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> "OperationInfo": """Return a derivative of this OperationInfo with the provided context. - This method is used to address naming collisions. The returned - ``OperationInfo`` object aliases module names to avoid naming collisions - in the file being written. - """ + This method is used to address naming collisions. The returned + ``OperationInfo`` object aliases module names to avoid naming collisions + in the file being written. + """ return dataclasses.replace( self, response_type=self.response_type.with_context( @@ -1075,9 +1151,12 @@ def with_context(self, *, ), ) - def add_to_address_allowlist(self, *, - address_allowlist: Set['metadata.Address'], - resource_messages: Dict[str, 'MessageType']) -> None: + def add_to_address_allowlist( + self, + *, + address_allowlist: Set["metadata.Address"], + resource_messages: Dict[str, "MessageType"], + ) -> None: """Adds to the set of Addresses of wrapper objects to be included in selective GAPIC generation. This method is used to create an allowlist of addresses to be used to filter out unneeded @@ -1101,14 +1180,14 @@ def add_to_address_allowlist(self, *, ) self.metadata_type.add_to_address_allowlist( - address_allowlist=address_allowlist, - resource_messages=resource_messages + address_allowlist=address_allowlist, resource_messages=resource_messages ) @dataclasses.dataclass(frozen=True) class RetryInfo: """Representation of the method's retry behavior.""" + max_attempts: int initial_backoff: float max_backoff: float @@ -1123,16 +1202,16 @@ class RoutingParameter: def _split_into_segments(self, path_template): segments = path_template.split("/") - named_segment_ids = [i for i, x in enumerate( - segments) if "{" in x or "}" in x] + named_segment_ids = [i for i, x in enumerate(segments) if "{" in x or "}" in x] # bar/{foo}/baz, bar/{foo=one/two/three}/baz. assert len(named_segment_ids) <= 2 if len(named_segment_ids) == 2: # Need to merge a named segment. i, j = named_segment_ids segments = ( - segments[:i] + - [self._merge_segments(segments[i: j + 1])] + segments[j + 1:] + segments[:i] + + [self._merge_segments(segments[i : j + 1])] + + segments[j + 1 :] ) return segments @@ -1176,8 +1255,11 @@ def _how_many_named_segments(self, path_template): def _convert_to_regex(self, path_template): if self._how_many_named_segments(path_template) > 1: # This also takes care of complex patterns (i.e. {foo}~{bar}) - raise ValueError("There must be exactly one named segment. {} has {}.".format( - path_template, self._how_many_named_segments(path_template))) + raise ValueError( + "There must be exactly one named segment. {} has {}.".format( + path_template, self._how_many_named_segments(path_template) + ) + ) segments = self._split_into_segments(path_template) segment_regexes = [self._convert_segment_to_regex(x) for x in segments] final_regex = self._merge_segments(segment_regexes) @@ -1220,8 +1302,7 @@ def key(self) -> Union[str, None]: @property def sample_request(self) -> str: """return json dict for sample request matching the uri template.""" - sample = uri_sample.sample_from_path_template( - self.field, self.path_template) + sample = uri_sample.sample_from_path_template(self.field, self.path_template) return json.dumps(sample) @@ -1230,15 +1311,19 @@ class RoutingRule: routing_parameters: List[RoutingParameter] @classmethod - def try_parse_routing_rule(cls, routing_rule: routing_pb2.RoutingRule) -> Optional['RoutingRule']: - params = getattr(routing_rule, 'routing_parameters') + def try_parse_routing_rule( + cls, routing_rule: routing_pb2.RoutingRule + ) -> Optional["RoutingRule"]: + params = getattr(routing_rule, "routing_parameters") if not params: return None params = [RoutingParameter(x.field, x.path_template) for x in params] return cls(params) @classmethod - def resolve(cls, routing_rule: routing_pb2.RoutingRule, request: Union[dict, str]) -> dict: + def resolve( + cls, routing_rule: routing_pb2.RoutingRule, request: Union[dict, str] + ) -> dict: """Resolves the routing header which should be sent along with the request. The routing header is determined based on the given routing rule and request. See the following link for more information on explicit routing headers: @@ -1289,9 +1374,7 @@ def _get_field(request, field_path: str): routing_parameter_key = routing_param.key if routing_param.path_template: routing_param_regex = routing_param.to_regex() - regex_match = routing_param_regex.match( - request_field_value - ) + regex_match = routing_param_regex.match(request_field_value) if regex_match: header_params[routing_parameter_key] = regex_match.group( routing_parameter_key @@ -1304,6 +1387,7 @@ def _get_field(request, field_path: str): @dataclasses.dataclass(frozen=True) class HttpRule: """Representation of the method's http bindings.""" + method: str uri: str body: Optional[str] @@ -1312,8 +1396,11 @@ def path_fields(self, method: "Method") -> List[Tuple[Field, str, str]]: """return list of (name, template) tuples extracted from uri.""" input = method.input return [ - (input.get_field(*match.group("name").split(".")), - match.group("name"), match.group("template")) + ( + input.get_field(*match.group("name").split(".")), + match.group("name"), + match.group("template"), + ) for match in path_template._VARIABLE_RE.finditer(self.uri) if match.group("name") ] @@ -1321,7 +1408,9 @@ def path_fields(self, method: "Method") -> List[Tuple[Field, str, str]]: def sample_request(self, method: "Method") -> Dict[str, Any]: """return json dict for sample request matching the uri template.""" - def sample_from_path_fields(paths: List[Tuple[Field, str, str]]) -> Dict[str, Any]: + def sample_from_path_fields( + paths: List[Tuple[Field, str, str]], + ) -> Dict[str, Any]: """Construct a dict for a sample request object from a list of fields and template patterns. @@ -1334,19 +1423,20 @@ def sample_from_path_fields(paths: List[Tuple[Field, str, str]]) -> Dict[str, An sample_names_ = uri_sample.sample_names() for field, path, template in paths: - sample_value = re.sub( - r"(\*\*|\*)", - lambda n: next(sample_names_), - template or '*' - ) if field.type == PrimitiveType.build(str) else field.mock_value_original_type + sample_value = ( + re.sub(r"(\*\*|\*)", lambda n: next(sample_names_), template or "*") + if field.type == PrimitiveType.build(str) + else field.mock_value_original_type + ) uri_sample.add_field(request, path, sample_value) return request + sample = sample_from_path_fields(self.path_fields(method)) return sample @classmethod - def try_parse_http_rule(cls, http_rule) -> Optional['HttpRule']: + def try_parse_http_rule(cls, http_rule) -> Optional["HttpRule"]: method = http_rule.WhichOneof("pattern") if method is None or method == "custom": return None @@ -1392,13 +1482,13 @@ def sample_request(self): @dataclasses.dataclass(frozen=True) class Method: """Description of a method (defined with the ``rpc`` keyword).""" + method_pb: descriptor_pb2.MethodDescriptorProto input: MessageType output: MessageType is_internal: bool = False lro: Optional[OperationInfo] = dataclasses.field(default=None) - extended_lro: Optional[ExtendedOperationInfo] = dataclasses.field( - default=None) + extended_lro: Optional[ExtendedOperationInfo] = dataclasses.field(default=None) retry: Optional[RetryInfo] = dataclasses.field(default=None) timeout: Optional[float] = None meta: metadata.Metadata = dataclasses.field( @@ -1431,15 +1521,26 @@ def transport_safe_name(self) -> str: }, keyword.kwlist, ) - return f"{self.name}_" if self.name.lower() in TRANSPORT_UNSAFE_NAMES else self.name + return ( + f"{self.name}_" + if self.name.lower() in TRANSPORT_UNSAFE_NAMES + else self.name + ) @property def is_operation_polling_method(self): - return self.output.is_extended_operation and self.options.Extensions[ex_ops_pb2.operation_polling_method] + return ( + self.output.is_extended_operation + and self.options.Extensions[ex_ops_pb2.operation_polling_method] + ) @utils.cached_property def name(self): - return make_private(self.method_pb.name) if self.is_internal else self.method_pb.name + return ( + make_private(self.method_pb.name) + if self.is_internal + else self.method_pb.name + ) @utils.cached_property def client_output(self): @@ -1475,22 +1576,24 @@ def _client_output(self, enable_asyncio: bool): # If this method is an LRO, return a PythonType instance representing # that. if self.lro: - return PythonType(meta=metadata.Metadata( - address=metadata.Address( - name='AsyncOperation' if enable_asyncio else 'Operation', - module='operation_async' if enable_asyncio else 'operation', - package=('google', 'api_core'), - collisions=self.lro.response_type.ident.collisions, - ), - documentation=utils.doc( - 'An object representing a long-running operation. \n\n' - 'The result type for the operation will be ' - ':class:`{ident}` {doc}'.format( - doc=self.lro.response_type.meta.doc, - ident=self.lro.response_type.ident.sphinx, + return PythonType( + meta=metadata.Metadata( + address=metadata.Address( + name="AsyncOperation" if enable_asyncio else "Operation", + module="operation_async" if enable_asyncio else "operation", + package=("google", "api_core"), + collisions=self.lro.response_type.ident.collisions, ), - ), - )) + documentation=utils.doc( + "An object representing a long-running operation. \n\n" + "The result type for the operation will be " + ":class:`{ident}` {doc}".format( + doc=self.lro.response_type.meta.doc, + ident=self.lro.response_type.ident.sphinx, + ), + ), + ) + ) if self.extended_lro: return PythonType( @@ -1509,22 +1612,31 @@ def _client_output(self, enable_asyncio: bool): # If this method is paginated, return that method's pager class. if self.paged_result_field: - return PythonType(meta=metadata.Metadata( - address=metadata.Address( - name=f'{self.name}AsyncPager' if enable_asyncio else f'{self.name}Pager', - package=self.ident.api_naming.module_namespace + (self.ident.api_naming.versioned_module_name,) + self.ident.subpackage + ( - 'services', - utils.to_snake_case(self.ident.parent[-1]), + return PythonType( + meta=metadata.Metadata( + address=metadata.Address( + name=( + f"{self.name}AsyncPager" + if enable_asyncio + else f"{self.name}Pager" + ), + package=self.ident.api_naming.module_namespace + + (self.ident.api_naming.versioned_module_name,) + + self.ident.subpackage + + ( + "services", + utils.to_snake_case(self.ident.parent[-1]), + ), + module="pagers", + collisions=self.input.ident.collisions, ), - module='pagers', - collisions=self.input.ident.collisions, - ), - documentation=utils.doc( - f'{self.output.meta.doc}\n\n' - 'Iterating over this object will yield results and ' - 'resolve additional pages automatically.', - ), - )) + documentation=utils.doc( + f"{self.output.meta.doc}\n\n" + "Iterating over this object will yield results and " + "resolve additional pages automatically.", + ), + ) + ) # Return the usual output. return self.output @@ -1536,7 +1648,7 @@ def operation_service(self) -> Optional[str]: @property def is_deprecated(self) -> bool: """Returns true if the method is deprecated, false otherwise.""" - return descriptor_pb2.MethodOptions.HasField(self.options, 'deprecated') + return descriptor_pb2.MethodOptions.HasField(self.options, "deprecated") # TODO(yon-mg): remove or rewrite: don't think it performs as intended # e.g. doesn't work with basic case of gRPC transcoding @@ -1549,7 +1661,7 @@ def field_headers(self) -> Sequence[FieldHeader]: # Copied from Node generator. # https://github.com/googleapis/gapic-generator-typescript/blob/3ab47f04678d72171ddf25b439d50f6dfb44584c/typescript/src/schema/proto.ts#L587 - pattern = re.compile(r'{(.*?)[=}]') + pattern = re.compile(r"{(.*?)[=}]") potential_verbs = [ http.get, @@ -1560,8 +1672,7 @@ def field_headers(self) -> Sequence[FieldHeader]: http.custom.path, ] field_headers = ( - tuple(FieldHeader(field_header) - for field_header in pattern.findall(verb)) + tuple(FieldHeader(field_header) for field_header in pattern.findall(verb)) for verb in potential_verbs if verb ) @@ -1584,17 +1695,18 @@ def http_options(self) -> List[HttpRule]: """Return a list of the http bindings for this method.""" http = self.options.Extensions[annotations_pb2.http] http_options = [http] + list(http.additional_bindings) - opt_gen = (HttpRule.try_parse_http_rule(http_rule) - for http_rule in http_options) + opt_gen = ( + HttpRule.try_parse_http_rule(http_rule) for http_rule in http_options + ) return [rule for rule in opt_gen if rule] @property def http_opt(self) -> Optional[Dict[str, str]]: """Return the (main) http option for this method. - e.g. {'verb': 'post' - 'url': '/some/path' - 'body': '*'} + e.g. {'verb': 'post' + 'url': '/some/path' + 'body': '*'} """ http: List[Tuple[descriptor_pb2.FieldDescriptorProto, str]] @@ -1605,8 +1717,8 @@ def http_opt(self) -> Optional[Dict[str, str]]: http_method = http[0] answer: Dict[str, str] = { - 'verb': http_method[0].name, - 'url': http_method[1], + "verb": http_method[0].name, + "url": http_method[1], } if len(http) > 1: body_spec = http[1] @@ -1623,8 +1735,8 @@ def path_params(self) -> Sequence[str]: if self.http_opt is None: return [] - pattern = r'\{(\w+)(?:=.+?)?\}' - return re.findall(pattern, self.http_opt['url']) + pattern = r"\{(\w+)(?:=.+?)?\}" + return re.findall(pattern, self.http_opt["url"]) @property def query_params(self) -> Set[str]: @@ -1635,7 +1747,7 @@ def query_params(self) -> Set[str]: return set() params = set(self.path_params) - body = self.http_opt.get('body') + body = self.http_opt.get("body") if body: if body == "*": # The entire request is the REST body. @@ -1664,13 +1776,13 @@ def _fields_mapping(self, signatures) -> Mapping[str, Field]: cross_pkg_request = self.input.ident.package != self.ident.package def filter_fields(sig: str) -> Iterable[Tuple[str, Field]]: - for f in sig.split(','): + for f in sig.split(","): if not f: # Special case for an empty signature continue name = f.strip() - field = self.input.get_field(*name.split('.')) - name += '_' if field.field_pb.name in utils.RESERVED_NAMES else '' + field = self.input.get_field(*name.split(".")) + name += "_" if field.field_pb.name in utils.RESERVED_NAMES else "" if cross_pkg_request and not field.is_primitive: # This is not a proto-plus wrapped message type, # and setting a non-primitive field directly is verboten. @@ -1694,17 +1806,17 @@ def flattened_field_to_key(self): def legacy_flattened_fields(self) -> Mapping[str, Field]: """Return the legacy flattening interface: top level fields only, required fields first""" - required, optional = utils.partition(lambda f: f.required, - self.input.fields.values()) - return collections.OrderedDict((f.name, f) - for f in chain(required, optional)) + required, optional = utils.partition( + lambda f: f.required, self.input.fields.values() + ) + return collections.OrderedDict((f.name, f) for f in chain(required, optional)) @property def grpc_stub_type(self) -> str: """Return the type of gRPC stub to use.""" - return '{client}_{server}'.format( - client='stream' if self.client_streaming else 'unary', - server='stream' if self.server_streaming else 'unary', + return "{client}_{server}".format( + client="stream" if self.client_streaming else "unary", + server="stream" if self.server_streaming else "unary", ) # TODO(yon-mg): figure out why idempotent is reliant on http annotation @@ -1730,17 +1842,20 @@ def paged_result_field(self) -> Optional[Field]: # then the method is not paginated. # The request must have page_token and next_page_token as they keep track of pages - for source, source_type, name in ((self.input, str, 'page_token'), - (self.output, str, 'next_page_token')): + for source, source_type, name in ( + (self.input, str, "page_token"), + (self.output, str, "next_page_token"), + ): field = source.fields.get(name, None) if not field or field.type != source_type: return None # The request must have max_results or page_size - page_fields = (self.input.fields.get('max_results', None), - self.input.fields.get('page_size', None)) - page_field_size = next( - (field for field in page_fields if field), None) + page_fields = ( + self.input.fields.get("max_results", None), + self.input.fields.get("page_size", None), + ) + page_field_size = next((field for field in page_fields if field), None) if not page_field_size or page_field_size.type != int: return None @@ -1765,12 +1880,9 @@ def _ref_types(self, recursive: bool) -> Sequence[Union[MessageType, EnumType]]: # Begin with the input (request) and output (response) messages. answer: List[Union[MessageType, EnumType]] = [self.input] types: Iterable[Union[MessageType, EnumType]] = ( - self.input.recursive_field_types if recursive - else ( - f.type - for f in self.flattened_fields.values() - if f.message or f.enum - ) + self.input.recursive_field_types + if recursive + else (f.type for f in self.flattened_fields.values() if f.message or f.enum) ) answer.extend(types) @@ -1802,12 +1914,14 @@ def _ref_types(self, recursive: bool) -> Sequence[Union[MessageType, EnumType]]: @property def void(self) -> bool: """Return True if this method has no return value, False otherwise.""" - return self.output.ident.proto == 'google.protobuf.Empty' + return self.output.ident.proto == "google.protobuf.Empty" - def with_context(self, *, - collisions: Set[str], - visited_messages: Optional[Set["MessageType"]] = None, - ) -> 'Method': + def with_context( + self, + *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> "Method": """Return a derivative of this method with the provided context. This method is used to address naming collisions. The returned @@ -1816,16 +1930,22 @@ def with_context(self, *, """ maybe_lro = None if self.lro: - maybe_lro = self.lro.with_context( - collisions=collisions, - visited_messages=visited_messages, - ) if collisions else self.lro + maybe_lro = ( + self.lro.with_context( + collisions=collisions, + visited_messages=visited_messages, + ) + if collisions + else self.lro + ) maybe_extended_lro = ( self.extended_lro.with_context( collisions=collisions, visited_messages=visited_messages, - ) if self.extended_lro else None + ) + if self.extended_lro + else None ) return dataclasses.replace( @@ -1843,11 +1963,13 @@ def with_context(self, *, meta=self.meta.with_context(collisions=collisions), ) - def add_to_address_allowlist(self, *, - address_allowlist: Set['metadata.Address'], - resource_messages: Dict[str, 'MessageType'], - services_in_proto: Dict[str, 'Service'], - ) -> None: + def add_to_address_allowlist( + self, + *, + address_allowlist: Set["metadata.Address"], + resource_messages: Dict[str, "MessageType"], + services_in_proto: Dict[str, "Service"], + ) -> None: """Adds to the allowlist of Addresses of wrapper objects to be included in selective GAPIC generation. This method is used to create an allowlist of addresses to be used to filter out unneeded @@ -1873,16 +1995,16 @@ def add_to_address_allowlist(self, *, address_allowlist.add(self.ident) if self.lro: - self.lro.add_to_address_allowlist(address_allowlist=address_allowlist, - resource_messages=resource_messages) + self.lro.add_to_address_allowlist( + address_allowlist=address_allowlist, resource_messages=resource_messages + ) - if self.extended_lro: + if self.extended_lro and self.operation_service: # We need to add the service/method pointed to by self.operation_service to # the allowlist, as it might not have been specified by # the methods under selective_gapic_generation. # We assume that the operation service lives in the same proto file as this one. - operation_service = services_in_proto[ - self.operation_service] # type: ignore + operation_service = services_in_proto[self.operation_service] address_allowlist.add(operation_service.meta.address) operation_service.operation_polling_method.add_to_address_allowlist( address_allowlist=address_allowlist, @@ -1905,7 +2027,7 @@ def add_to_address_allowlist(self, *, resource_messages=resource_messages, ) - def with_internal_methods(self, *, public_methods: Set[str]) -> 'Method': + def with_internal_methods(self, *, public_methods: Set[str]) -> "Method": """Returns a version of this ``Method`` marked as internal The methods not in the public_methods set will be marked as internal and @@ -1934,10 +2056,7 @@ class CommonResource: @classmethod def build(cls, resource: resource_pb2.ResourceDescriptor): - return cls( - type_name=resource.type, - pattern=next(iter(resource.pattern)) - ) + return cls(type_name=resource.type, pattern=next(iter(resource.pattern))) @utils.cached_property def message_type(self): @@ -1957,6 +2076,7 @@ def message_type(self): @dataclasses.dataclass(frozen=True) class Service: """Description of a service (defined with the ``service`` keyword).""" + service_pb: descriptor_pb2.ServiceDescriptorProto methods: Mapping[str, Method] # N.B.: visible_resources is intended to be a read-only view @@ -2053,7 +2173,7 @@ def host(self) -> str: """ if self.options.Extensions[client_pb2.default_host]: return self.options.Extensions[client_pb2.default_host] - return '' + return "" @property def version(self) -> str: @@ -2064,7 +2184,7 @@ def version(self) -> str: """ if self.options.Extensions[client_pb2.api_version]: return self.options.Extensions[client_pb2.api_version] - return '' + return "" @property def shortname(self) -> str: @@ -2089,7 +2209,7 @@ def oauth_scopes(self) -> Sequence[str]: # Return the OAuth scopes, split on comma. return tuple( i.strip() - for i in self.options.Extensions[client_pb2.oauth_scopes].split(',') + for i in self.options.Extensions[client_pb2.oauth_scopes].split(",") if i ) @@ -2111,9 +2231,7 @@ def names(self) -> FrozenSet[str]: """ # Put together a set of the service and method names. answer = {self.name, self.client_name, self.async_client_name} - answer.update( - utils.to_snake_case(i.name) for i in self.methods.values() - ) + answer.update(utils.to_snake_case(i.name) for i in self.methods.values()) # Identify any import module names where the same module name is used # from distinct packages. @@ -2135,6 +2253,7 @@ def names(self) -> FrozenSet[str]: def resource_messages(self) -> FrozenSet[MessageType]: """Returns all the resource message types used in all request and response fields in the service.""" + def gen_resources(message): if message.resource_path: yield message @@ -2145,8 +2264,7 @@ def gen_resources(message): def gen_indirect_resources_used(message): for field in message.recursive_resource_fields: - resource = field.options.Extensions[ - resource_pb2.resource_reference] + resource = field.options.Extensions[resource_pb2.resource_reference] resource_type = resource.type or resource.child_type # The resource may not be visible if the resource type is one of # the common_resources (see the class var in class definition) @@ -2181,7 +2299,8 @@ def resource_messages_dict(self) -> Dict[str, MessageType]: `{"locations.googleapis.com/Location": MessageType(...)}` """ service_resource_messages = { - r.resource_type_full_path: r for r in self.resource_messages} + r.resource_type_full_path: r for r in self.resource_messages + } # Add common resources service_resource_messages.update( @@ -2210,22 +2329,19 @@ def any_extended_operations_methods(self) -> bool: @utils.cached_property def operation_polling_method(self) -> Optional[Method]: return next( - ( - m - for m in self.methods.values() - if m.is_operation_polling_method - ), - None + (m for m in self.methods.values() if m.is_operation_polling_method), None ) @utils.cached_property def is_internal(self) -> bool: return any(m.is_internal for m in self.methods.values()) - def with_context(self, *, - collisions: Set[str], - visited_messages: Optional[Set["MessageType"]] = None, - ) -> 'Service': + def with_context( + self, + *, + collisions: Set[str], + visited_messages: Optional[Set["MessageType"]] = None, + ) -> "Service": """Return a derivative of this service with the provided context. This method is used to address naming collisions. The returned @@ -2246,12 +2362,14 @@ def with_context(self, *, meta=self.meta.with_context(collisions=collisions), ) - def add_to_address_allowlist(self, *, - address_allowlist: Set['metadata.Address'], - method_allowlist: Set[str], - resource_messages: Dict[str, 'MessageType'], - services_in_proto: Dict[str, 'Service'], - ) -> None: + def add_to_address_allowlist( + self, + *, + address_allowlist: Set["metadata.Address"], + method_allowlist: Set[str], + resource_messages: Dict[str, "MessageType"], + services_in_proto: Dict[str, "Service"], + ) -> None: """Adds to the allowlist of Addresses of wrapper objects to be included in selective GAPIC generation. This method is used to create an allowlist of addresses to be used to filter out unneeded @@ -2267,7 +2385,7 @@ def add_to_address_allowlist(self, *, resource type name of a resource message to the corresponding MessageType object representing that resource message. Only resources with a message representation should be included in the dictionary. - services_in_proto (Dict[str, Service]): + services_in_proto (Dict[str, Service]): Returns: None """ @@ -2282,8 +2400,9 @@ def add_to_address_allowlist(self, *, services_in_proto=services_in_proto, ) - def prune_messages_for_selective_generation(self, *, - address_allowlist: Set['metadata.Address']) -> 'Service': + def prune_messages_for_selective_generation( + self, *, address_allowlist: Set["metadata.Address"] + ) -> "Service": """Returns a truncated version of this Service. Only the methods, messages, and enums contained in the address allowlist @@ -2299,13 +2418,11 @@ def prune_messages_for_selective_generation(self, *, return dataclasses.replace( self, methods={ - k: v - for k, v in self.methods.items() if v.ident in address_allowlist - } + k: v for k, v in self.methods.items() if v.ident in address_allowlist + }, ) - def with_internal_methods(self, *, - public_methods: Set[str]) -> 'Service': + def with_internal_methods(self, *, public_methods: Set[str]) -> "Service": """Returns a version of this ``Service`` with some Methods marked as internal. The methods not in the public_methods set will be marked as internal and @@ -2330,5 +2447,5 @@ def with_internal_methods(self, *, meth.with_internal_methods(public_methods=public_methods) for meth in self.methods.values() ) - } + }, ) diff --git a/packages/gapic-generator/gapic/utils/__init__.py b/packages/gapic-generator/gapic/utils/__init__.py index 6f2d02d4ac49..8b4880173039 100644 --- a/packages/gapic-generator/gapic/utils/__init__.py +++ b/packages/gapic-generator/gapic/utils/__init__.py @@ -33,22 +33,22 @@ __all__ = ( - 'cached_property', - 'convert_uri_fieldnames', - 'doc', - 'empty', - 'is_msg_field_pb', - 'is_str_field_pb', - 'make_private', - 'nth', - 'Options', - 'partition', - 'RESERVED_NAMES', - 'rst', - 'sort_lines', - 'to_snake_case', - 'to_camel_case', - 'to_valid_filename', - 'to_valid_module_name', - 'wrap', + "cached_property", + "convert_uri_fieldnames", + "doc", + "empty", + "is_msg_field_pb", + "is_str_field_pb", + "make_private", + "nth", + "Options", + "partition", + "RESERVED_NAMES", + "rst", + "sort_lines", + "to_snake_case", + "to_camel_case", + "to_valid_filename", + "to_valid_module_name", + "wrap", ) diff --git a/packages/gapic-generator/gapic/utils/cache.py b/packages/gapic-generator/gapic/utils/cache.py index b2292b16d23e..f9c4d703f572 100644 --- a/packages/gapic-generator/gapic/utils/cache.py +++ b/packages/gapic-generator/gapic/utils/cache.py @@ -27,11 +27,12 @@ def cached_property(fx): Returns: Callable[]: The wrapped function. """ + @functools.wraps(fx) def inner(self): # Quick check: If there is no cache at all, create an empty cache. - if not hasattr(self, '_cached_values'): - object.__setattr__(self, '_cached_values', {}) + if not hasattr(self, "_cached_values"): + object.__setattr__(self, "_cached_values", {}) # If and only if the function's result is not in the cache, # run the function. @@ -40,4 +41,5 @@ def inner(self): # Return the value from cache. return self._cached_values[fx.__name__] + return property(inner) diff --git a/packages/gapic-generator/gapic/utils/case.py b/packages/gapic-generator/gapic/utils/case.py index 635d2945c5bc..62343e98d108 100644 --- a/packages/gapic-generator/gapic/utils/case.py +++ b/packages/gapic-generator/gapic/utils/case.py @@ -27,12 +27,12 @@ def to_snake_case(s: str) -> str: str: The string in snake case (and all lower-cased). """ # Replace all capital letters that are preceded by a lower-case letter. - s = re.sub(r'(?<=[a-z])([A-Z])', r'_\1', str(s)) + s = re.sub(r"(?<=[a-z])([A-Z])", r"_\1", str(s)) # Find all capital letters that are followed by a lower-case letter, # and are preceded by any character other than underscore. # (Note: This also excludes beginning-of-string.) - s = re.sub(r'(?<=[^_])([A-Z])(?=[a-z])', r'_\1', s) + s = re.sub(r"(?<=[^_])([A-Z])(?=[a-z])", r"_\1", s) # Numbers are a weird case; the goal is to spot when they _start_ # some kind of name or acronym (e.g. 2FA, 3M). @@ -40,15 +40,15 @@ def to_snake_case(s: str) -> str: # Find cases of a number preceded by a lower-case letter _and_ # followed by at least two capital letters or a single capital and # end of string. - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]{2})', r'_\1', s) - s = re.sub(r'(?<=[a-z])(\d)(?=[A-Z]$)', r'_\1', s) + s = re.sub(r"(?<=[a-z])(\d)(?=[A-Z]{2})", r"_\1", s) + s = re.sub(r"(?<=[a-z])(\d)(?=[A-Z]$)", r"_\1", s) # Done; return the camel-cased string. return s.lower() def to_camel_case(s: str) -> str: - '''Convert any string to camel case. + """Convert any string to camel case. This is provided to templates as the ``camel_case`` filter. @@ -57,7 +57,7 @@ def to_camel_case(s: str) -> str: Returns: str: The string in lower camel case. - ''' + """ - items = re.split(r'[_-]', to_snake_case(s)) + items = re.split(r"[_-]", to_snake_case(s)) return items[0].lower() + "".join(x.capitalize() for x in items[1:]) diff --git a/packages/gapic-generator/gapic/utils/code.py b/packages/gapic-generator/gapic/utils/code.py index 1a80dee92b50..047e5f1de1d7 100644 --- a/packages/gapic-generator/gapic/utils/code.py +++ b/packages/gapic-generator/gapic/utils/code.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import (Callable, Iterable, List, Optional, Tuple, TypeVar) +from typing import Callable, Iterable, List, Optional, Tuple, TypeVar import itertools @@ -22,15 +22,17 @@ def empty(content: str) -> bool: Args: content (str): A string containing Python code (or a lack thereof). """ - return not any([i.lstrip() and not i.lstrip().startswith('#') - for i in content.split('\n')]) + return not any( + [i.lstrip() and not i.lstrip().startswith("#") for i in content.split("\n")] + ) -T = TypeVar('T') +T = TypeVar("T") -def partition(predicate: Callable[[T], bool], - iterator: Iterable[T]) -> Tuple[List[T], List[T]]: +def partition( + predicate: Callable[[T], bool], iterator: Iterable[T] +) -> Tuple[List[T], List[T]]: """Partitions an iterable into two lists based on a predicate Args: @@ -79,4 +81,4 @@ def make_private(object_name: str) -> str: Returns: str: The final name of the privated object. """ - return object_name if object_name.startswith('_') else f'_{object_name}' + return object_name if object_name.startswith("_") else f"_{object_name}" diff --git a/packages/gapic-generator/gapic/utils/filename.py b/packages/gapic-generator/gapic/utils/filename.py index f0c61c14e7e0..0af1e42f648f 100644 --- a/packages/gapic-generator/gapic/utils/filename.py +++ b/packages/gapic-generator/gapic/utils/filename.py @@ -28,7 +28,7 @@ def to_valid_filename(filename: str) -> str: Returns: str: A valid filename. """ - return re.sub(r'[^a-z0-9.$_-]+', '-', filename.lower()) + return re.sub(r"[^a-z0-9.$_-]+", "-", filename.lower()) def to_valid_module_name(module_name: str) -> str: @@ -41,4 +41,4 @@ def to_valid_module_name(module_name: str) -> str: str: A valid module name. Extensions (e.g. *.py), if present, are untouched. """ - return to_valid_filename(module_name).replace('-', '_') + return to_valid_filename(module_name).replace("-", "_") diff --git a/packages/gapic-generator/gapic/utils/lines.py b/packages/gapic-generator/gapic/utils/lines.py index 1a5b5ce8e372..8cad3e0fbf57 100644 --- a/packages/gapic-generator/gapic/utils/lines.py +++ b/packages/gapic-generator/gapic/utils/lines.py @@ -28,19 +28,19 @@ def sort_lines(text: str, dedupe: bool = True) -> str: Useful for dealing with import statements in templates. """ # Preserve leading or trailing newlines. - leading = '\n' if text.startswith('\n') else '' - trailing = '\n' if text.endswith('\n') else '' + leading = "\n" if text.startswith("\n") else "" + trailing = "\n" if text.endswith("\n") else "" # Split the text into individual lines, throwing away any empty lines. - lines: Iterable[str] = (i for i in text.strip().split('\n') if i.strip()) + lines: Iterable[str] = (i for i in text.strip().split("\n") if i.strip()) # De-duplicate the lines if requested. if dedupe: lines = set(lines) # Return the final string. - answer = '\n'.join(sorted(lines)) - return f'{leading}{answer}{trailing}' + answer = "\n".join(sorted(lines)) + return f"{leading}{answer}{trailing}" def get_subsequent_line_indentation_level(list_item: str) -> int: @@ -67,7 +67,7 @@ def get_subsequent_line_indentation_level(list_item: str) -> int: 22. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog """ - if len(list_item) >= 2 and list_item[0:2] in ['- ', '+ ']: + if len(list_item) >= 2 and list_item[0:2] in ["- ", "+ "]: indentation_level = 2 elif len(list_item) >= 4 and re.match(NUMBERED_LIST_REGEX, list_item): indentation_level = 4 @@ -83,10 +83,16 @@ def is_list_item(list_item: str) -> bool: """ if len(list_item) < 3: return False - return list_item.startswith('- ') or list_item.startswith('+ ') or bool(re.match(NUMBERED_LIST_REGEX, list_item)) + return ( + list_item.startswith("- ") + or list_item.startswith("+ ") + or bool(re.match(NUMBERED_LIST_REGEX, list_item)) + ) -def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0) -> str: +def wrap( + text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 +) -> str: """Wrap the given string to the given width. This uses :meth:`textwrap.fill` under the hood, but provides useful @@ -111,7 +117,7 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 """ # Quick check: If there is empty text, abort. if not text: - return '' + return "" # If the offset is None, default it to the indent value. if offset is None: @@ -120,10 +126,10 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 # Protocol buffers preserves single initial spaces after line breaks # when parsing comments (such as the space before the "w" in "when" here). # Re-wrapping causes these to be two spaces; correct for this. - text = text.replace('\n ', '\n') + text = text.replace("\n ", "\n") # Break off the first line of the string to address non-zero offsets. - first = text.split('\n')[0] + '\n' + first = text.split("\n")[0] + "\n" # Ensure that there are 2 new lines after a colon, otherwise # the sphinx docs build will fail. @@ -133,68 +139,74 @@ def wrap(text: str, width: int, *, offset: Optional[int] = None, indent: int = 0 if len(first) > width - offset: # Ensure `break_on_hyphens` is set to `False` when using # `textwrap.wrap` to avoid breaking hyperlinks with hyphens. - initial = textwrap.wrap(first, - break_long_words=False, - width=width - offset, - break_on_hyphens=False, - ) + initial = textwrap.wrap( + first, + break_long_words=False, + width=width - offset, + break_on_hyphens=False, + ) # Strip the first \n from the text so it is not misidentified as an # intentionally short line below, except when the text contains a list, # as the new line is required for lists. Look for a list item marker in # the remaining text which indicates that a list is present. - if '\n' in text: - remaining_text = "".join(text.split('\n')[1:]) + if "\n" in text: + remaining_text = "".join(text.split("\n")[1:]) if not is_list_item(remaining_text.strip()): - text = text.replace('\n', ' ', 1) + text = text.replace("\n", " ", 1) # Save the new `first` line. - first = f'{initial[0]}\n' + first = f"{initial[0]}\n" # Ensure that there are 2 new lines after a colon, otherwise # the sphinx docs build will fail. - text = re.sub(r':\n([^\n])', r':\n\n\1', text) + text = re.sub(r":\n([^\n])", r":\n\n\1", text) - text = text[len(first):] + text = text[len(first) :] if not text: return first.strip() # Strip leading and ending whitespace. # Preserve new line at the beginning. - new_line = '\n' if text[0] == '\n' else '' + new_line = "\n" if text[0] == "\n" else "" text = new_line + text.strip() # Tokenize the rest of the text to try to preserve line breaks # that semantically matter. tokens = [] - token = '' - for line in text.split('\n'): + token = "" + for line in text.split("\n"): # Ensure that lines that start with a list item marker are always on a new line # Ensure that blank lines are preserved if (is_list_item(line.strip()) or not len(line)) and token: tokens.append(token) - token = '' - token += line + '\n' + token = "" + token += line + "\n" # Preserve line breaks for lines that are short or end with colon. - if len(line) < width * 0.75 or line.endswith(':'): + if len(line) < width * 0.75 or line.endswith(":"): tokens.append(token) - token = '' + token = "" if token: tokens.append(token) # Wrap the remainder of the string at the desired width. - return '{first}{text}'.format( + return "{first}{text}".format( first=first, # Ensure `break_on_hyphens` is set to `False` when using # `textwrap.fill` to avoid breaking hyperlinks with hyphens. - text='\n'.join([textwrap.fill( - break_long_words=False, - initial_indent=' ' * indent, - # ensure that subsequent lines for lists are indented 2 spaces - subsequent_indent=' ' * indent + - ' ' * get_subsequent_line_indentation_level(token.strip()), - text=token, - width=width, - break_on_hyphens=False, - ) for token in tokens]), - ).rstrip('\n') + text="\n".join( + [ + textwrap.fill( + break_long_words=False, + initial_indent=" " * indent, + # ensure that subsequent lines for lists are indented 2 spaces + subsequent_indent=" " * indent + + " " * get_subsequent_line_indentation_level(token.strip()), + text=token, + width=width, + break_on_hyphens=False, + ) + for token in tokens + ] + ), + ).rstrip("\n") diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index daa77ff6b59f..ef6497e39a38 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -33,47 +33,49 @@ class Options: on unrecognized arguments (essentially, we throw them away, but we do warn if it looks like it was meant for us). """ - name: str = '' + + name: str = "" namespace: Tuple[str, ...] = dataclasses.field(default=()) - warehouse_package_name: str = '' + warehouse_package_name: str = "" retry: Optional[Dict[str, Any]] = None sample_configs: Tuple[str, ...] = dataclasses.field(default=()) autogen_snippets: bool = True - templates: Tuple[str, ...] = dataclasses.field(default=('DEFAULT',)) + templates: Tuple[str, ...] = dataclasses.field(default=("DEFAULT",)) lazy_import: bool = False old_naming: bool = False add_iam_methods: bool = False metadata: bool = False # TODO(yon-mg): should there be an enum for transport type? transport: List[str] = dataclasses.field(default_factory=lambda: []) - service_yaml_config: Dict[str, Any] = dataclasses.field( - default_factory=dict) + service_yaml_config: Dict[str, Any] = dataclasses.field(default_factory=dict) rest_numeric_enums: bool = False - proto_plus_deps: Tuple[str, ...] = dataclasses.field(default=('',)) + proto_plus_deps: Tuple[str, ...] = dataclasses.field(default=("",)) # Class constants - PYTHON_GAPIC_PREFIX: str = 'python-gapic-' - OPT_FLAGS: FrozenSet[str] = frozenset(( - 'add-iam-methods', # microgenerator implementation for `reroute_to_grpc_interface` - 'lazy-import', # requires >= 3.7 - 'metadata', # generate GAPIC metadata JSON file - 'old-naming', # TODO(dovs): Come up with a better comment - 'retry-config', # takes a path - 'service-yaml', # takes a path - 'samples', # output dir - 'autogen-snippets', # produce auto-generated snippets - # transport type(s) delineated by '+' (i.e. grpc, rest, custom.[something], etc?) - 'transport', - 'warehouse-package-name', # change the package name on PyPI - # when transport includes "rest", request that response enums be JSON-encoded as numbers - 'rest-numeric-enums', - # proto plus dependencies delineated by '+' - # For example, 'google.cloud.api.v1+google.cloud.anotherapi.v2' - 'proto-plus-deps', - )) + PYTHON_GAPIC_PREFIX: str = "python-gapic-" + OPT_FLAGS: FrozenSet[str] = frozenset( + ( + "add-iam-methods", # microgenerator implementation for `reroute_to_grpc_interface` + "lazy-import", # requires >= 3.7 + "metadata", # generate GAPIC metadata JSON file + "old-naming", # TODO(dovs): Come up with a better comment + "retry-config", # takes a path + "service-yaml", # takes a path + "samples", # output dir + "autogen-snippets", # produce auto-generated snippets + # transport type(s) delineated by '+' (i.e. grpc, rest, custom.[something], etc?) + "transport", + "warehouse-package-name", # change the package name on PyPI + # when transport includes "rest", request that response enums be JSON-encoded as numbers + "rest-numeric-enums", + # proto plus dependencies delineated by '+' + # For example, 'google.cloud.api.v1+google.cloud.anotherapi.v2' + "proto-plus-deps", + ) + ) @classmethod - def build(cls, opt_string: str) -> 'Options': + def build(cls, opt_string: str) -> "Options": """Build an Options instance based on a protoc opt string. Args: @@ -93,12 +95,12 @@ def build(cls, opt_string: str) -> 'Options': """ # Parse out every option beginning with `python-gapic` opts: DefaultDict[str, List[str]] = defaultdict(list) - for opt in opt_string.split(','): + for opt in opt_string.split(","): opt = opt.strip() # Parse out the key and value. - value = 'true' - if '=' in opt: - opt, value = opt.split('=') + value = "true" + if "=" in opt: + opt, value = opt.split("=") # Save known, expected keys. if opt in cls.OPT_FLAGS: @@ -113,14 +115,14 @@ def build(cls, opt_string: str) -> 'Options': # # Just assume everything is a list at this point, and the # final instantiation step can de-list-ify where appropriate. - opts[opt[len(cls.PYTHON_GAPIC_PREFIX):]].append(value) + opts[opt[len(cls.PYTHON_GAPIC_PREFIX) :]].append(value) # If templates are specified, one of the specified directories # may be our default; perform that replacement. - default_token = 'DEFAULT' - templates = opts.pop('templates', [default_token]) - pwd = path.join(path.dirname(__file__), '..') - default_path = path.realpath(path.join(pwd, 'templates')) + default_token = "DEFAULT" + templates = opts.pop("templates", [default_token]) + pwd = path.join(path.dirname(__file__), "..") + default_path = path.realpath(path.join(pwd, "templates")) def tweak_path(p): if p == default_token: @@ -134,46 +136,50 @@ def tweak_path(p): templates = [tweak_path(p) for p in templates] retry_cfg = None - retry_paths = opts.pop('retry-config', None) + retry_paths = opts.pop("retry-config", None) if retry_paths: # Just use the last config specified. with open(retry_paths[-1]) as f: retry_cfg = json.load(f) service_yaml_config = {} - service_yaml_paths = opts.pop('service-yaml', None) + service_yaml_paths = opts.pop("service-yaml", None) if service_yaml_paths: # Just use the last file specified. with open(service_yaml_paths[-1]) as f: service_yaml_config = yaml.load(f, Loader=yaml.Loader) # The yaml service files typically have this field, # but it is not a field in the gogle.api.Service proto. - service_yaml_config.pop('type', None) + service_yaml_config.pop("type", None) # Build the options instance. - sample_paths = opts.pop('samples', []) + sample_paths = opts.pop("samples", []) # autogen-snippets is True by default, so make sure users can disable # by passing `autogen-snippets=false` - autogen_snippets = opts.pop( - "autogen-snippets", ["True"])[0] in ("True", "true", "T", "t", "TRUE") + autogen_snippets = opts.pop("autogen-snippets", ["True"])[0] in ( + "True", + "true", + "T", + "t", + "TRUE", + ) # NOTE: Snippets are not currently correct for the alternative (Ads) templates # so always disable snippetgen in that case # https://github.com/googleapis/gapic-generator-python/issues/1052 - old_naming = bool(opts.pop('old-naming', False)) + old_naming = bool(opts.pop("old-naming", False)) if old_naming: autogen_snippets = False - proto_plus_deps = tuple(opts.pop('proto-plus-deps', '')) + proto_plus_deps = tuple(opts.pop("proto-plus-deps", "")) if len(proto_plus_deps): - proto_plus_deps = tuple(proto_plus_deps[0].split('+')) + proto_plus_deps = tuple(proto_plus_deps[0].split("+")) answer = Options( - name=opts.pop('name', ['']).pop(), - namespace=tuple(opts.pop('namespace', [])), - warehouse_package_name=opts.pop( - 'warehouse-package-name', ['']).pop(), + name=opts.pop("name", [""]).pop(), + namespace=tuple(opts.pop("namespace", [])), + warehouse_package_name=opts.pop("warehouse-package-name", [""]).pop(), retry=retry_cfg, sample_configs=tuple( cfg_path @@ -182,14 +188,14 @@ def tweak_path(p): ), autogen_snippets=autogen_snippets, templates=tuple(path.expanduser(i) for i in templates), - lazy_import=bool(opts.pop('lazy-import', False)), + lazy_import=bool(opts.pop("lazy-import", False)), old_naming=old_naming, - add_iam_methods=bool(opts.pop('add-iam-methods', False)), - metadata=bool(opts.pop('metadata', False)), + add_iam_methods=bool(opts.pop("add-iam-methods", False)), + metadata=bool(opts.pop("metadata", False)), # transport should include desired transports delimited by '+', e.g. transport='grpc+rest' - transport=opts.pop('transport', ['grpc'])[0].split('+'), + transport=opts.pop("transport", ["grpc"])[0].split("+"), service_yaml_config=service_yaml_config, - rest_numeric_enums=bool(opts.pop('rest-numeric-enums', False)), + rest_numeric_enums=bool(opts.pop("rest-numeric-enums", False)), proto_plus_deps=proto_plus_deps, ) @@ -199,7 +205,7 @@ def tweak_path(p): # If there are any options remaining, then we failed to recognize # them -- complain. for key in opts.keys(): - warnings.warn(f'Unrecognized option: `python-gapic-{key}`.') + warnings.warn(f"Unrecognized option: `python-gapic-{key}`.") # Done; return the built options. return answer diff --git a/packages/gapic-generator/gapic/utils/rst.py b/packages/gapic-generator/gapic/utils/rst.py index dc4912547ce9..a77df3033276 100644 --- a/packages/gapic-generator/gapic/utils/rst.py +++ b/packages/gapic-generator/gapic/utils/rst.py @@ -20,8 +20,13 @@ from gapic.utils.lines import wrap -def rst(text: str, width: int = 72, indent: int = 0, nl: Optional[bool] = None, - source_format: str = 'commonmark'): +def rst( + text: str, + width: int = 72, + indent: int = 0, + nl: Optional[bool] = None, + source_format: str = "commonmark", +): """Convert the given text to ReStructured Text. Args: @@ -42,32 +47,39 @@ def rst(text: str, width: int = 72, indent: int = 0, nl: Optional[bool] = None, # do not convert it. # (This makes code generation significantly faster; calling out to pandoc # is by far the most expensive thing we do.) - if not re.search(r'[|*`_[\]]', text): - answer = wrap(text, + if not re.search(r"[|*`_[\]]", text): + answer = wrap( + text, indent=indent, offset=indent + 3, width=width - indent, - ) + ) else: # Convert from CommonMark to ReStructured Text. - answer = pypandoc.convert_text(text, 'rst', - format=source_format, - extra_args=['--columns=%d' % (width - indent)], - ).strip().replace('\n', f"\n{' ' * indent}") + answer = ( + pypandoc.convert_text( + text, + "rst", + format=source_format, + extra_args=["--columns=%d" % (width - indent)], + ) + .strip() + .replace("\n", f"\n{' ' * indent}") + ) # Add a newline to the end of the document if any line breaks are # already present. # # This causes the closing """ to be on the subsequent line only when # appropriate. - if nl or ('\n' in answer and nl is None): - answer += '\n' + ' ' * indent + if nl or ("\n" in answer and nl is None): + answer += "\n" + " " * indent # If the text ends in a double-quote, append a period. # This ensures that we do not get a parse error when this output is # followed by triple-quotes. if answer.endswith('"'): - answer += '.' + answer += "." # Done; return the answer. return answer diff --git a/packages/gapic-generator/gapic/utils/uri_conv.py b/packages/gapic-generator/gapic/utils/uri_conv.py index 6b8ba7277111..2d0a32d242ff 100644 --- a/packages/gapic-generator/gapic/utils/uri_conv.py +++ b/packages/gapic-generator/gapic/utils/uri_conv.py @@ -32,7 +32,8 @@ def _fix_name_segment(name_seg: str) -> str: def _fix_field_path(field_path: str) -> str: return ".".join( - (_fix_name_segment(name_seg) for name_seg in field_path.split("."))) + (_fix_name_segment(name_seg) for name_seg in field_path.split(".")) + ) last = 0 pieces = [] diff --git a/packages/gapic-generator/gapic/utils/uri_sample.py b/packages/gapic-generator/gapic/utils/uri_sample.py index 6d2b6f8e9a53..0367d6a21fd1 100644 --- a/packages/gapic-generator/gapic/utils/uri_sample.py +++ b/packages/gapic-generator/gapic/utils/uri_sample.py @@ -48,7 +48,7 @@ def add_field(obj, path, value): not a dict.: e.g. path='a.b', obj = {'a':'abc'} """ - segments = path.split('.') + segments = path.split(".") leaf = segments.pop() subfield = obj for segment in segments: @@ -71,8 +71,7 @@ def sample_from_path_fields(paths: List[Tuple[str, str]]) -> Dict[Any, Any]: for path, template in paths: sample_value = re.sub( - r"(\*\*|\*)", - lambda n: next(sample_names_), template if template else '*' + r"(\*\*|\*)", lambda n: next(sample_names_), template if template else "*" ) add_field(request, path, sample_value) return request @@ -86,11 +85,11 @@ def sample_from_path_template(field: str, path_template: str) -> Dict[Any, Any]: Returns: A new nested dict that has field as key and the instantiated template as value. """ - if '{' in path_template: - i = path_template.index('{') - j = path_template.index('}') - seg = path_template[i:j + 1] + if "{" in path_template: + i = path_template.index("{") + j = path_template.index("}") + seg = path_template[i : j + 1] # Skip "}" - seg = seg[seg.index('=') + 1:-1] - path_template = path_template[:i] + seg + path_template[j + 1:] + seg = seg[seg.index("=") + 1 : -1] + path_template = path_template[:i] + seg + path_template[j + 1 :] return sample_from_path_fields([(field, path_template)]) diff --git a/packages/gapic-generator/mypy.ini b/packages/gapic-generator/mypy.ini index 78cfb8988b2e..f8584b421d9b 100644 --- a/packages/gapic-generator/mypy.ini +++ b/packages/gapic-generator/mypy.ini @@ -1,2 +1,2 @@ [mypy] -python_version = 3.7 +python_version = 3.8 diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 69af653873f9..3198eb964149 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -31,7 +31,10 @@ showcase_version = os.environ.get("SHOWCASE_VERSION", "0.35.0") ADS_TEMPLATES = path.join(path.dirname(__file__), "gapic", "ads-templates") - +BLACK_VERSION = "black==25.1.0" +BLACK_PATHS = ["docs", "gapic", "tests", "test_utils", "noxfile.py", "setup.py"] +# exclude golden files and generated protobuf code +BLACK_EXCLUDES = "|".join([".*golden.*", ".*pb2.py"]) ALL_PYTHON = ( "3.7", @@ -50,7 +53,13 @@ def unit(session): """Run the unit test suite.""" session.install( - "coverage", "pytest-cov", "pytest", "pytest-xdist", "pyfakefs", "grpcio-status", "proto-plus", + "coverage", + "pytest-cov", + "pytest", + "pytest-xdist", + "pyfakefs", + "grpcio-status", + "proto-plus", ) session.install("-e", ".") session.run( @@ -78,6 +87,7 @@ def unit(session): if os.path.splitext(f)[1] == ".proto" and f.startswith("test_") ) + # Note: this class lives outside 'fragment' # so that, if necessary, it can be pickled for a ProcessPoolExecutor # A callable class is necessary so that the session can be closed over @@ -108,7 +118,12 @@ def __call__(self, frag): ] outputs.append( - self.session.run(*session_args, str(frag), external=True, silent=True,) + self.session.run( + *session_args, + str(frag), + external=True, + silent=True, + ) ) # Install the generated fragment library. @@ -122,7 +137,7 @@ def __call__(self, frag): # https://github.com/googleapis/gapic-generator-python/issues/1748 # The ads templates do not have constraints files. constraints_path = str( - f"{tmp_dir}/testing/constraints-{self.session.python}.txt" + f"{tmp_dir}/testing/constraints-{self.session.python}.txt" ) self.session.install(tmp_dir, "-e", ".", "-qqq", "-r", constraints_path) @@ -157,7 +172,9 @@ def fragment(session, use_ads_templates=False): ) session.install("-e", ".") - frag_files = [Path(f) for f in session.posargs] if session.posargs else FRAGMENT_FILES + frag_files = ( + [Path(f) for f in session.posargs] if session.posargs else FRAGMENT_FILES + ) if os.environ.get("PARALLEL_FRAGMENT_TESTS", "false").lower() == "true": with ThreadPoolExecutor() as p: @@ -175,9 +192,10 @@ def fragment(session, use_ads_templates=False): def fragment_alternative_templates(session): fragment(session, use_ads_templates=True) -# `_add_python_settings` consumes a path to a temporary directory (str; i.e. tmp_dir) and -# python settings (Dict; i.e. python settings) and modifies the service yaml within -# tmp_dir to include python settings. The primary purpose of this function is to modify + +# `_add_python_settings` consumes a path to a temporary directory (str; i.e. tmp_dir) and +# python settings (Dict; i.e. python settings) and modifies the service yaml within +# tmp_dir to include python settings. The primary purpose of this function is to modify # the service yaml and include `rest_async_io_enabled=True` to test the async rest # optional feature. def _add_python_settings(tmp_dir, python_settings): @@ -193,13 +211,16 @@ def _add_python_settings(tmp_dir, python_settings): yaml.safe_dump(data, file, default_flow_style=False, sort_keys=False) """ + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): `rest_async_io_enabled` must be removed once async rest is GA. @contextmanager def showcase_library( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), + session, + templates="DEFAULT", + other_opts: typing.Iterable[str] = (), include_service_yaml=True, retry_config=True, - rest_async_io_enabled=False + rest_async_io_enabled=False, ): """Install the generated library into the session for showcase tests.""" @@ -248,12 +269,10 @@ def showcase_library( python_settings = [ { - 'version': 'google.showcase.v1beta1', - 'python_settings': { - 'experimental_features': { - 'rest_async_io_enabled': True - } - } + "version": "google.showcase.v1beta1", + "python_settings": { + "experimental_features": {"rest_async_io_enabled": True} + }, } ] update_service_yaml = _add_python_settings(tmp_dir, python_settings) @@ -275,9 +294,23 @@ def showcase_library( template_opt = f"python-gapic-templates={templates}" opts = "--python_gapic_opt=" if include_service_yaml and retry_config: - opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest", f"service-yaml={tmp_dir}/showcase_v1beta1.yaml", f"retry-config={tmp_dir}/showcase_grpc_service_config.json")) + opts += ",".join( + other_opts + + ( + f"{template_opt}", + "transport=grpc+rest", + f"service-yaml={tmp_dir}/showcase_v1beta1.yaml", + f"retry-config={tmp_dir}/showcase_grpc_service_config.json", + ) + ) else: - opts += ",".join(other_opts + (f"{template_opt}", "transport=grpc+rest",)) + opts += ",".join( + other_opts + + ( + f"{template_opt}", + "transport=grpc+rest", + ) + ) cmd_tup = ( "python", "-m", @@ -291,7 +324,8 @@ def showcase_library( f"google/showcase/v1beta1/messaging.proto", ) session.run( - *cmd_tup, external=True, + *cmd_tup, + external=True, ) # Install the generated showcase library. @@ -302,7 +336,7 @@ def showcase_library( # This is needed to recreate the issue reported in # https://github.com/googleapis/google-cloud-python/issues/12254 constraints_path = str( - f"{tmp_dir}/testing/constraints-{session.python}.txt" + f"{tmp_dir}/testing/constraints-{session.python}.txt" ) # Install the library with a constraints file. if session.python == "3.7": @@ -313,11 +347,24 @@ def showcase_library( # is not supported with the minimum version of `google-api-core` and `google-auth`. # TODO(https://github.com/googleapis/gapic-generator-python/issues/2211): Remove hardcoded dependencies # from here and add a new constraints file for testing the minimum supported versions for async REST feature. - session.install('--no-cache-dir', '--force-reinstall', "google-api-core[grpc, async_rest]==2.21.0") + session.install( + "--no-cache-dir", + "--force-reinstall", + "google-api-core[grpc, async_rest]==2.21.0", + ) # session.install('--no-cache-dir', '--force-reinstall', "google-api-core==2.20.0") - session.install('--no-cache-dir', '--force-reinstall', "google-auth[aiohttp]==2.35.0") + session.install( + "--no-cache-dir", + "--force-reinstall", + "google-auth[aiohttp]==2.35.0", + ) else: - session.install("-e", tmp_dir + ("[async_rest]" if rest_async_io_enabled else ""), "-r", constraints_path) + session.install( + "-e", + tmp_dir + ("[async_rest]" if rest_async_io_enabled else ""), + "-r", + constraints_path, + ) else: # The ads templates do not have constraints files. # See https://github.com/googleapis/gapic-generator-python/issues/1788 @@ -354,6 +401,7 @@ def showcase( env=env, ) + @nox.session(python=ALL_PYTHON) def showcase_w_rest_async( session, @@ -363,7 +411,9 @@ def showcase_w_rest_async( ): """Run the Showcase test suite.""" - with showcase_library(session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True): + with showcase_library( + session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True + ): session.install("pytest", "pytest-asyncio") test_directory = Path("tests", "system") ignore_file = env.get("IGNORE_FILE") @@ -478,7 +528,9 @@ def run_showcase_unit_tests(session, fail_under=100, rest_async_io_enabled=False @nox.session(python=ALL_PYTHON) def showcase_unit( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), + session, + templates="DEFAULT", + other_opts: typing.Iterable[str] = (), ): """Run the generated unit tests against the Showcase library.""" with showcase_library(session, templates=templates, other_opts=other_opts) as lib: @@ -491,10 +543,14 @@ def showcase_unit( # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2121. @nox.session(python=ALL_PYTHON) def showcase_unit_w_rest_async( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), + session, + templates="DEFAULT", + other_opts: typing.Iterable[str] = (), ): """Run the generated unit tests with async rest transport against the Showcase library.""" - with showcase_library(session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True) as lib: + with showcase_library( + session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True + ) as lib: session.chdir(lib) run_showcase_unit_tests(session, rest_async_io_enabled=True) @@ -525,8 +581,10 @@ def showcase_unit_mixins(session): @nox.session(python=ALL_PYTHON) def showcase_unit_alternative_templates_mixins(session): with showcase_library( - session, templates=ADS_TEMPLATES, other_opts=("old-naming",), - include_service_yaml=True + session, + templates=ADS_TEMPLATES, + other_opts=("old-naming",), + include_service_yaml=True, ) as lib: session.chdir(lib) run_showcase_unit_tests(session) @@ -534,11 +592,19 @@ def showcase_unit_alternative_templates_mixins(session): @nox.session(python=NEWEST_PYTHON) def showcase_mypy( - session, templates="DEFAULT", other_opts: typing.Iterable[str] = (), + session, + templates="DEFAULT", + other_opts: typing.Iterable[str] = (), ): """Perform typecheck analysis on the generated Showcase library.""" - session.install("mypy", "types-setuptools", "types-protobuf", "types-requests", "types-dataclasses") + session.install( + "mypy", + "types-setuptools", + "types-protobuf", + "types-requests", + "types-dataclasses", + ) with showcase_library(session, templates=templates, other_opts=other_opts) as lib: session.chdir(lib) @@ -591,7 +657,7 @@ def docs(session): "sphinxcontrib-qthelp==1.0.3", "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", - "sphinx_rtd_theme" + "sphinx_rtd_theme", ) session.install(".") @@ -622,3 +688,37 @@ def mypy(session): ) session.install(".") session.run("mypy", "-p", "gapic") + + +@nox.session(python=NEWEST_PYTHON) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + "--extend-exclude", + BLACK_EXCLUDES, + ) + session.run( + "flake8", + "gapic", + "tests", + ) + + +@nox.session(python="3.10") +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + "--extend-exclude", + BLACK_EXCLUDES, + ) diff --git a/packages/gapic-generator/owlbot.py b/packages/gapic-generator/owlbot.py index 3dcd7f1516b9..814c6732573d 100644 --- a/packages/gapic-generator/owlbot.py +++ b/packages/gapic-generator/owlbot.py @@ -26,3 +26,5 @@ s.move(templated_files / "LICENSE") s.move(templated_files / ".github", excludes=["workflows", "CODEOWNERS", "auto-approve.yml"]) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index f23a7cec41d0..c9fdc41bed4e 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -40,7 +40,7 @@ def make_service( service_pb = desc.ServiceDescriptorProto(name=name) if host: service_pb.options.Extensions[client_pb2.default_host] = host - service_pb.options.Extensions[client_pb2.oauth_scopes] = ','.join(scopes) + service_pb.options.Extensions[client_pb2.oauth_scopes] = ",".join(scopes) if version: service_pb.options.Extensions[client_pb2.api_version] = version @@ -57,25 +57,27 @@ def make_service( def make_service_with_method_options( *, http_rule: http_pb2.HttpRule = None, - method_signature: str = '', + method_signature: str = "", in_fields: typing.Tuple[desc.FieldDescriptorProto] = (), - visible_resources: typing.Optional[typing.Mapping[str, wrappers.CommonResource]] = None, + visible_resources: typing.Optional[ + typing.Mapping[str, wrappers.CommonResource] + ] = None, ) -> wrappers.Service: # Declare a method with options enabled for long-running operations and # field headers. method = get_method( - 'DoBigThing', - 'foo.bar.ThingRequest', - 'google.longrunning.operations_pb2.Operation', - lro_response_type='foo.baz.ThingResponse', - lro_metadata_type='foo.qux.ThingMetadata', + "DoBigThing", + "foo.bar.ThingRequest", + "google.longrunning.operations_pb2.Operation", + lro_response_type="foo.baz.ThingResponse", + lro_metadata_type="foo.qux.ThingMetadata", in_fields=in_fields, http_rule=http_rule, method_signature=method_signature, ) # Define a service descriptor. - service_pb = desc.ServiceDescriptorProto(name='ThingDoer') + service_pb = desc.ServiceDescriptorProto(name="ThingDoer") # Return a service object to test. return wrappers.Service( @@ -85,15 +87,17 @@ def make_service_with_method_options( ) -def get_method(name: str, - in_type: str, - out_type: str, - lro_response_type: str = '', - lro_metadata_type: str = '', *, - in_fields: typing.Tuple[desc.FieldDescriptorProto] = (), - http_rule: http_pb2.HttpRule = None, - method_signature: str = '', - ) -> wrappers.Method: +def get_method( + name: str, + in_type: str, + out_type: str, + lro_response_type: str = "", + lro_metadata_type: str = "", + *, + in_fields: typing.Tuple[desc.FieldDescriptorProto] = (), + http_rule: http_pb2.HttpRule = None, + method_signature: str = "", +) -> wrappers.Method: input_ = get_message(in_type, fields=in_fields) output = get_message(out_type) lro = None @@ -125,9 +129,11 @@ def get_method(name: str, ) -def get_message(dot_path: str, *, - fields: typing.Tuple[desc.FieldDescriptorProto] = (), - ) -> wrappers.MessageType: +def get_message( + dot_path: str, + *, + fields: typing.Tuple[desc.FieldDescriptorProto] = (), +) -> wrappers.MessageType: # Pass explicit None through (for lro_metadata). if dot_path is None: return None @@ -138,47 +144,52 @@ def get_message(dot_path: str, *, # So, if trying to test the DescriptorProto message here, the path # would be google.protobuf.descriptor.DescriptorProto (whereas the proto # path is just google.protobuf.DescriptorProto). - pieces = dot_path.split('.') + pieces = dot_path.split(".") pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] return wrappers.MessageType( - fields={i.name: wrappers.Field( - field_pb=i, - enum=get_enum(i.type_name) if i.type_name else None, - ) for i in fields}, + fields={ + i.name: wrappers.Field( + field_pb=i, + enum=get_enum(i.type_name) if i.type_name else None, + ) + for i in fields + }, nested_messages={}, nested_enums={}, message_pb=desc.DescriptorProto(name=name, field=fields), - meta=metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(pkg), - module=module, - )), + meta=metadata.Metadata( + address=metadata.Address( + name=name, + package=tuple(pkg), + module=module, + ) + ), ) def make_method( - name: str, - input_message: wrappers.MessageType = None, - output_message: wrappers.MessageType = None, - package: typing.Union[typing.Tuple[str], str] = 'foo.bar.v1', - module: str = 'baz', - http_rule: http_pb2.HttpRule = None, - signatures: typing.Sequence[str] = (), - is_deprecated: bool = False, - routing_rule: routing_pb2.RoutingRule = None, - **kwargs + name: str, + input_message: wrappers.MessageType = None, + output_message: wrappers.MessageType = None, + package: typing.Union[typing.Tuple[str], str] = "foo.bar.v1", + module: str = "baz", + http_rule: http_pb2.HttpRule = None, + signatures: typing.Sequence[str] = (), + is_deprecated: bool = False, + routing_rule: routing_pb2.RoutingRule = None, + **kwargs, ) -> wrappers.Method: # Use default input and output messages if they are not provided. - input_message = input_message or make_message('MethodInput') - output_message = output_message or make_message('MethodOutput') + input_message = input_message or make_message("MethodInput") + output_message = output_message or make_message("MethodOutput") # Create the method pb2. method_pb = desc.MethodDescriptorProto( name=name, input_type=str(input_message.meta.address), output_type=str(output_message.meta.address), - **kwargs + **kwargs, ) if routing_rule: @@ -196,7 +207,7 @@ def make_method( method_pb.options.Extensions[ext_key].append(sig) if isinstance(package, str): - package = tuple(package.split('.')) + package = tuple(package.split(".")) if is_deprecated: method_pb.options.deprecated = True @@ -206,45 +217,44 @@ def make_method( method_pb=method_pb, input=input_message, output=output_message, - meta=metadata.Metadata(address=metadata.Address( - name=name, - package=package, - module=module, - parent=(f'{name}Service',), - )), + meta=metadata.Metadata( + address=metadata.Address( + name=name, + package=package, + module=module, + parent=(f"{name}Service",), + ) + ), ) def make_field( - name: str = 'my_field', + name: str = "my_field", number: int = 1, repeated: bool = False, message: wrappers.MessageType = None, enum: wrappers.EnumType = None, meta: metadata.Metadata = None, oneof: str = None, - **kwargs + **kwargs, ) -> wrappers.Field: T = desc.FieldDescriptorProto.Type if message: - kwargs.setdefault('type_name', str(message.meta.address)) - kwargs['type'] = 'TYPE_MESSAGE' + kwargs.setdefault("type_name", str(message.meta.address)) + kwargs["type"] = "TYPE_MESSAGE" elif enum: - kwargs.setdefault('type_name', str(enum.meta.address)) - kwargs['type'] = 'TYPE_ENUM' + kwargs.setdefault("type_name", str(enum.meta.address)) + kwargs["type"] = "TYPE_ENUM" else: - kwargs.setdefault('type', T.Value('TYPE_BOOL')) + kwargs.setdefault("type", T.Value("TYPE_BOOL")) - if isinstance(kwargs['type'], str): - kwargs['type'] = T.Value(kwargs['type']) + if isinstance(kwargs["type"], str): + kwargs["type"] = T.Value(kwargs["type"]) - label = kwargs.pop('label', 3 if repeated else 1) + label = kwargs.pop("label", 3 if repeated else 1) field_pb = desc.FieldDescriptorProto( - name=name, - label=label, - number=number, - **kwargs + name=name, label=label, number=number, **kwargs ) return wrappers.Field( @@ -258,8 +268,8 @@ def make_field( def make_message( name: str, - package: str = 'foo.bar.v1', - module: str = 'baz', + package: str = "foo.bar.v1", + module: str = "baz", fields: typing.Sequence[wrappers.Field] = (), meta: metadata.Metadata = None, options: desc.MethodOptions = None, @@ -274,39 +284,43 @@ def make_message( fields=collections.OrderedDict((i.name, i) for i in fields), nested_messages={}, nested_enums={}, - meta=meta or metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), + meta=meta + or metadata.Metadata( + address=metadata.Address( + name=name, + package=tuple(package.split(".")), + module=module, + ) + ), ) def get_enum(dot_path: str) -> wrappers.EnumType: - pieces = dot_path.split('.') + pieces = dot_path.split(".") pkg, module, name = pieces[:-2], pieces[-2], pieces[-1] return wrappers.EnumType( enum_pb=desc.EnumDescriptorProto(name=name), - meta=metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(pkg), - module=module, - )), + meta=metadata.Metadata( + address=metadata.Address( + name=name, + package=tuple(pkg), + module=module, + ) + ), values=[], ) def make_enum( name: str, - package: str = 'foo.bar.v1', - module: str = 'baz', + package: str = "foo.bar.v1", + module: str = "baz", values: typing.Sequence[typing.Tuple[str, int]] = (), meta: metadata.Metadata = None, options: desc.EnumOptions = None, ) -> wrappers.EnumType: enum_value_pbs = [ - desc.EnumValueDescriptorProto(name=i[0], number=i[1]) - for i in values + desc.EnumValueDescriptorProto(name=i[0], number=i[1]) for i in values ] enum_pb = desc.EnumDescriptorProto( name=name, @@ -315,52 +329,52 @@ def make_enum( ) return wrappers.EnumType( enum_pb=enum_pb, - values=[wrappers.EnumValueType(enum_value_pb=evpb) - for evpb in enum_value_pbs], - meta=meta or metadata.Metadata(address=metadata.Address( - name=name, - package=tuple(package.split('.')), - module=module, - )), + values=[wrappers.EnumValueType(enum_value_pb=evpb) for evpb in enum_value_pbs], + meta=meta + or metadata.Metadata( + address=metadata.Address( + name=name, + package=tuple(package.split(".")), + module=module, + ) + ), ) def make_naming(**kwargs) -> naming.Naming: - kwargs.setdefault('name', 'Hatstand') - kwargs.setdefault('namespace', ('Google', 'Cloud')) - kwargs.setdefault('version', 'v1') - kwargs.setdefault('product_name', 'Hatstand') + kwargs.setdefault("name", "Hatstand") + kwargs.setdefault("namespace", ("Google", "Cloud")) + kwargs.setdefault("version", "v1") + kwargs.setdefault("product_name", "Hatstand") return naming.NewNaming(**kwargs) def make_enum_pb2( - name: str, - *values: typing.Sequence[str], - **kwargs + name: str, *values: typing.Sequence[str], **kwargs ) -> desc.EnumDescriptorProto: enum_value_pbs = [ - desc.EnumValueDescriptorProto(name=n, number=i) - for i, n in enumerate(values) + desc.EnumValueDescriptorProto(name=n, number=i) for i, n in enumerate(values) ] enum_pb = desc.EnumDescriptorProto(name=name, value=enum_value_pbs, **kwargs) return enum_pb def make_message_pb2( - name: str, - fields: tuple = (), - oneof_decl: tuple = (), - **kwargs + name: str, fields: tuple = (), oneof_decl: tuple = (), **kwargs ) -> desc.DescriptorProto: - return desc.DescriptorProto(name=name, field=fields, oneof_decl=oneof_decl, **kwargs) + return desc.DescriptorProto( + name=name, field=fields, oneof_decl=oneof_decl, **kwargs + ) -def make_field_pb2(name: str, number: int, - type: int = 11, # 11 == message - type_name: str = None, - oneof_index: int = None, - **kwargs, - ) -> desc.FieldDescriptorProto: +def make_field_pb2( + name: str, + number: int, + type: int = 11, # 11 == message + type_name: str = None, + oneof_index: int = None, + **kwargs, +) -> desc.FieldDescriptorProto: return desc.FieldDescriptorProto( name=name, number=number, @@ -370,18 +384,22 @@ def make_field_pb2(name: str, number: int, **kwargs, ) + def make_oneof_pb2(name: str) -> desc.OneofDescriptorProto: return desc.OneofDescriptorProto( name=name, ) -def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, - messages: typing.Sequence[desc.DescriptorProto] = (), - enums: typing.Sequence[desc.EnumDescriptorProto] = (), - services: typing.Sequence[desc.ServiceDescriptorProto] = (), - locations: typing.Sequence[desc.SourceCodeInfo.Location] = (), - ) -> desc.FileDescriptorProto: +def make_file_pb2( + name: str = "my_proto.proto", + package: str = "example.v1", + *, + messages: typing.Sequence[desc.DescriptorProto] = (), + enums: typing.Sequence[desc.EnumDescriptorProto] = (), + services: typing.Sequence[desc.ServiceDescriptorProto] = (), + locations: typing.Sequence[desc.SourceCodeInfo.Location] = (), +) -> desc.FileDescriptorProto: return desc.FileDescriptorProto( name=name, package=package, @@ -393,10 +411,10 @@ def make_file_pb2(name: str = 'my_proto.proto', package: str = 'example.v1', *, def make_doc_meta( - *, - leading: str = '', - trailing: str = '', - detached: typing.List[str] = [], + *, + leading: str = "", + trailing: str = "", + detached: typing.List[str] = [], ) -> desc.SourceCodeInfo.Location: return metadata.Metadata( documentation=desc.SourceCodeInfo.Location( diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index 395467661c3c..f45c63ee12aa 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -38,13 +38,20 @@ import asyncio from google.showcase import EchoAsyncClient from google.showcase import IdentityAsyncClient + try: - from google.showcase_v1beta1.services.echo.transports import AsyncEchoRestTransport + from google.showcase_v1beta1.services.echo.transports import ( + AsyncEchoRestTransport, + ) + HAS_ASYNC_REST_ECHO_TRANSPORT = True except: HAS_ASYNC_REST_ECHO_TRANSPORT = False try: - from google.showcase_v1beta1.services.identity.transports import AsyncIdentityRestTransport + from google.showcase_v1beta1.services.identity.transports import ( + AsyncIdentityRestTransport, + ) + HAS_ASYNC_REST_IDENTITY_TRANSPORT = True except: HAS_ASYNC_REST_IDENTITY_TRANSPORT = False @@ -77,7 +84,9 @@ def async_echo(use_mtls, request, event_loop): EchoAsyncClient, use_mtls, transport_name=transport, - channel_creator=aio.insecure_channel if request.param == "grpc_asyncio" else None, + channel_creator=( + aio.insecure_channel if request.param == "grpc_asyncio" else None + ), credentials=async_anonymous_credentials(), ) @@ -90,7 +99,9 @@ def async_identity(use_mtls, request, event_loop): IdentityAsyncClient, use_mtls, transport_name=transport, - channel_creator=aio.insecure_channel if request.param == "grpc_asyncio" else None, + channel_creator=( + aio.insecure_channel if request.param == "grpc_asyncio" else None + ), credentials=async_anonymous_credentials(), ) diff --git a/packages/gapic-generator/tests/system/test_api_version_header.py b/packages/gapic-generator/tests/system/test_api_version_header.py index 5fcb4be517af..be17b6a28608 100644 --- a/packages/gapic-generator/tests/system/test_api_version_header.py +++ b/packages/gapic-generator/tests/system/test_api_version_header.py @@ -27,16 +27,20 @@ def test_api_version_in_grpc_trailing_metadata(echo): # This feature requires version 0.35.0 of `gapic-showcase` or newer which has the # ability to echo request headers - content = 'The hail in Wales falls mainly on the snails.' - responses = echo.expand({ - 'content': content, - }) + content = "The hail in Wales falls mainly on the snails." + responses = echo.expand( + { + "content": content, + } + ) if isinstance(echo.transport, type(echo).get_transport_class("grpc")): response_metadata = [ - (metadata.key, metadata.value) - for metadata in responses.trailing_metadata() + (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] assert ("x-goog-api-version", "v1_20240408") in response_metadata else: assert "X-Showcase-Request-X-Goog-Api-Version" in responses._response.headers - assert responses._response.headers["X-Showcase-Request-X-Goog-Api-Version"] == "v1_20240408" + assert ( + responses._response.headers["X-Showcase-Request-X-Goog-Api-Version"] + == "v1_20240408" + ) diff --git a/packages/gapic-generator/tests/system/test_client_context_manager.py b/packages/gapic-generator/tests/system/test_client_context_manager.py index 541de4c5b924..59e98183b8d8 100644 --- a/packages/gapic-generator/tests/system/test_client_context_manager.py +++ b/packages/gapic-generator/tests/system/test_client_context_manager.py @@ -20,10 +20,8 @@ def test_client(echo): with echo as c: - resp = c.echo({ - 'content': 'hello' - }) - assert resp.content == 'hello' + resp = c.echo({"content": "hello"}) + assert resp.content == "hello" def test_client_destroyed(echo): @@ -33,9 +31,7 @@ def test_client_destroyed(echo): echo.__exit__(None, None, None) with pytest.raises(ValueError): - echo.echo({ - 'content': 'hello' - }) + echo.echo({"content": "hello"}) if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @@ -43,15 +39,11 @@ def test_client_destroyed(echo): @pytest.mark.asyncio async def test_client_async(async_echo): async with async_echo: - response = await async_echo.echo({ - 'content': 'hello' - }) - assert response.content == 'hello' + response = await async_echo.echo({"content": "hello"}) + assert response.content == "hello" @pytest.mark.asyncio async def test_client_destroyed_async(async_echo): await async_echo.__aexit__(None, None, None) with pytest.raises((grpc._cython.cygrpc.UsageError, exceptions.TransportError)): - await async_echo.echo({ - 'content': 'hello' - }) + await async_echo.echo({"content": "hello"}) diff --git a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py index cbfa72d2ce58..d0eb86e4fc2c 100644 --- a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py @@ -18,22 +18,23 @@ # intercetped_metadata will be added by the interceptor automatically, and # showcase server will echo it (since it has key 'showcase-trailer') as trailing # metadata. -intercepted_metadata = (('showcase-trailer', 'intercepted'),) +intercepted_metadata = (("showcase-trailer", "intercepted"),) def test_unary_stream(intercepted_echo): - content = 'The hail in Wales falls mainly on the snails.' - responses = intercepted_echo.expand({ - 'content': content, - }) - - for ground_truth, response in zip(content.split(' '), responses): + content = "The hail in Wales falls mainly on the snails." + responses = intercepted_echo.expand( + { + "content": content, + } + ) + + for ground_truth, response in zip(content.split(" "), responses): assert response.content == ground_truth - assert ground_truth == 'snails.' + assert ground_truth == "snails." response_metadata = [ - (metadata.key, metadata.value) - for metadata in responses.trailing_metadata() + (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] assert intercepted_metadata[0] in response_metadata @@ -45,10 +46,9 @@ def test_stream_stream(intercepted_echo): responses = intercepted_echo.chat(iter(requests)) contents = [response.content for response in responses] - assert contents == ['hello', 'world!'] + assert contents == ["hello", "world!"] response_metadata = [ - (metadata.key, metadata.value) - for metadata in responses.trailing_metadata() + (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] assert intercepted_metadata[0] in response_metadata diff --git a/packages/gapic-generator/tests/system/test_lro.py b/packages/gapic-generator/tests/system/test_lro.py index 99bbba007ca3..ba7241c2deef 100644 --- a/packages/gapic-generator/tests/system/test_lro.py +++ b/packages/gapic-generator/tests/system/test_lro.py @@ -20,15 +20,17 @@ def test_lro(echo): - future = echo.wait({ - 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), - 'success': { - 'content': 'The hail in Wales falls mainly on the snails...eventually.' - }} + future = echo.wait( + { + "end_time": datetime.now(tz=timezone.utc) + timedelta(seconds=1), + "success": { + "content": "The hail in Wales falls mainly on the snails...eventually." + }, + } ) response = future.result() assert isinstance(response, showcase.WaitResponse) - assert response.content.endswith('the snails...eventually.') + assert response.content.endswith("the snails...eventually.") if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @@ -36,12 +38,14 @@ def test_lro(echo): @pytest.mark.asyncio async def test_lro_async(async_echo): - future = await async_echo.wait({ - 'end_time': datetime.now(tz=timezone.utc) + timedelta(seconds=1), - 'success': { - 'content': 'The hail in Wales falls mainly on the snails...eventually.' - }} + future = await async_echo.wait( + { + "end_time": datetime.now(tz=timezone.utc) + timedelta(seconds=1), + "success": { + "content": "The hail in Wales falls mainly on the snails...eventually." + }, + } ) response = await future.result() assert isinstance(response, showcase.WaitResponse) - assert response.content.endswith('the snails...eventually.') + assert response.content.endswith("the snails...eventually.") diff --git a/packages/gapic-generator/tests/system/test_mixins.py b/packages/gapic-generator/tests/system/test_mixins.py index 87a926e72dd4..699e4534c5aa 100644 --- a/packages/gapic-generator/tests/system/test_mixins.py +++ b/packages/gapic-generator/tests/system/test_mixins.py @@ -47,9 +47,7 @@ def test_set_iam_policy(echo): def test_get_iam_policy(echo): # First we need to set a policy, before we can get it - echo.set_iam_policy( - {"resource": "users/user", "policy": {"version": 20240920}} - ) + echo.set_iam_policy({"resource": "users/user", "policy": {"version": 20240920}}) policy = echo.get_iam_policy( { "resource": "users/user", @@ -60,9 +58,7 @@ def test_get_iam_policy(echo): def test_test_iam_permissions(echo): # First we need to set a policy, before we can call test_iam_permissions - echo.set_iam_policy( - {"resource": "users/user", "policy": {"version": 20240920}} - ) + echo.set_iam_policy({"resource": "users/user", "policy": {"version": 20240920}}) response = echo.test_iam_permissions( {"resource": "users/user", "permissions": ["test_some_permission"]} ) diff --git a/packages/gapic-generator/tests/system/test_pagination.py b/packages/gapic-generator/tests/system/test_pagination.py index fbf1a243deab..4a341222f7ca 100644 --- a/packages/gapic-generator/tests/system/test_pagination.py +++ b/packages/gapic-generator/tests/system/test_pagination.py @@ -18,22 +18,30 @@ def test_pagination(echo): - text = 'The hail in Wales falls mainly on the snails.' - results = [i for i in echo.paged_expand({ - 'content': text, - 'page_size': 3, - })] + text = "The hail in Wales falls mainly on the snails." + results = [ + i + for i in echo.paged_expand( + { + "content": text, + "page_size": 3, + } + ) + ] assert len(results) == 9 - assert results == [showcase.EchoResponse(content=i) - for i in text.split(' ')] + assert results == [showcase.EchoResponse(content=i) for i in text.split(" ")] def test_pagination_pages(echo): text = "The hail in Wales falls mainly on the snails." - page_results = list(echo.paged_expand({ - 'content': text, - 'page_size': 3, - }).pages) + page_results = list( + echo.paged_expand( + { + "content": text, + "page_size": 3, + } + ).pages + ) assert len(page_results) == 3 assert not page_results[-1].next_page_token @@ -44,33 +52,38 @@ def test_pagination_pages(echo): assert page_results[0].raw_page is page_results[0] results = [r for p in page_results for r in p.responses] - assert results == [showcase.EchoResponse(content=i) - for i in text.split(' ')] + assert results == [showcase.EchoResponse(content=i) for i in text.split(" ")] if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": + @pytest.mark.asyncio async def test_pagination_async(async_echo): - text = 'The hail in Wales falls mainly on the snails.' + text = "The hail in Wales falls mainly on the snails." results = [] - async for i in await async_echo.paged_expand({ - 'content': text, - 'page_size': 3, - }): + async for i in await async_echo.paged_expand( + { + "content": text, + "page_size": 3, + } + ): results.append(i) assert len(results) == 9 - assert results == [showcase.EchoResponse(content=i) - for i in text.split(' ')] + assert results == [showcase.EchoResponse(content=i) for i in text.split(" ")] @pytest.mark.asyncio async def test_pagination_pages_async(async_echo): text = "The hail in Wales falls mainly on the snails." page_results = [] - async for page in (await async_echo.paged_expand({ - 'content': text, - 'page_size': 3, - })).pages: + async for page in ( + await async_echo.paged_expand( + { + "content": text, + "page_size": 3, + } + ) + ).pages: page_results.append(page) assert len(page_results) == 3 @@ -82,5 +95,4 @@ async def test_pagination_pages_async(async_echo): assert page_results[0].raw_page is page_results[0] results = [r for p in page_results for r in p.responses] - assert results == [showcase.EchoResponse(content=i) - for i in text.split(' ')] + assert results == [showcase.EchoResponse(content=i) for i in text.split(" ")] diff --git a/packages/gapic-generator/tests/system/test_request_metadata.py b/packages/gapic-generator/tests/system/test_request_metadata.py index efc230f823f8..76dc739cadba 100644 --- a/packages/gapic-generator/tests/system/test_request_metadata.py +++ b/packages/gapic-generator/tests/system/test_request_metadata.py @@ -24,7 +24,7 @@ def test_metadata_string(echo): request_id="some_value", other_request_id="", ), - metadata=[('some-key', 'some_value')] + metadata=[("some-key", "some_value")], ) @@ -35,7 +35,7 @@ def test_metadata_binary(echo): request_id="some_value", other_request_id="", ), - metadata=[('some-key-bin', b'some_value')] + metadata=[("some-key-bin", b"some_value")], ) if isinstance(echo.transport, type(echo).get_transport_class("grpc")): @@ -50,5 +50,5 @@ def test_metadata_binary(echo): request_id="some_value", other_request_id="", ), - metadata=[('some-key-bin', 'some_value')] + metadata=[("some-key-bin", "some_value")], ) diff --git a/packages/gapic-generator/tests/system/test_resource_crud.py b/packages/gapic-generator/tests/system/test_resource_crud.py index b3e704d60d4c..e92d6191a3ec 100644 --- a/packages/gapic-generator/tests/system/test_resource_crud.py +++ b/packages/gapic-generator/tests/system/test_resource_crud.py @@ -20,15 +20,17 @@ def test_crud_with_request(identity): count = len(identity.list_users().users) user = identity.create_user( request={ - "user": {"display_name": "Guido van Rossum", "email": "guido@guido.fake", } + "user": { + "display_name": "Guido van Rossum", + "email": "guido@guido.fake", + } } ) try: assert user.display_name == "Guido van Rossum" assert user.email == "guido@guido.fake" assert len(identity.list_users().users) == count + 1 - assert identity.get_user( - {"name": user.name}).display_name == "Guido van Rossum" + assert identity.get_user({"name": user.name}).display_name == "Guido van Rossum" finally: identity.delete_user({"name": user.name}) @@ -36,7 +38,9 @@ def test_crud_with_request(identity): def test_crud_flattened(identity): count = len(identity.list_users().users) user = identity.create_user( - display_name="Monty Python", email="monty@python.org", ) + display_name="Monty Python", + email="monty@python.org", + ) try: assert user.display_name == "Monty Python" assert user.email == "monty@python.org" @@ -83,33 +87,39 @@ def test_path_parsing(messaging): async def test_crud_with_request_async(async_identity): pager = await async_identity.list_users() count = len(pager.users) - user = await async_identity.create_user(request={'user': { - 'display_name': 'Guido van Rossum', - 'email': 'guido@guido.fake', - }}) + user = await async_identity.create_user( + request={ + "user": { + "display_name": "Guido van Rossum", + "email": "guido@guido.fake", + } + } + ) try: - assert user.display_name == 'Guido van Rossum' - assert user.email == 'guido@guido.fake' - pager = (await async_identity.list_users()) + assert user.display_name == "Guido van Rossum" + assert user.email == "guido@guido.fake" + pager = await async_identity.list_users() assert len(pager.users) == count + 1 - assert (await async_identity.get_user({ - 'name': user.name - })).display_name == 'Guido van Rossum' + assert ( + await async_identity.get_user({"name": user.name}) + ).display_name == "Guido van Rossum" finally: - await async_identity.delete_user({'name': user.name}) + await async_identity.delete_user({"name": user.name}) @pytest.mark.asyncio async def test_crud_flattened_async(async_identity): count = len((await async_identity.list_users()).users) user = await async_identity.create_user( - display_name='Monty Python', - email='monty@python.org', + display_name="Monty Python", + email="monty@python.org", ) try: - assert user.display_name == 'Monty Python' - assert user.email == 'monty@python.org' + assert user.display_name == "Monty Python" + assert user.email == "monty@python.org" assert len((await async_identity.list_users()).users) == count + 1 - assert (await async_identity.get_user(name=user.name)).display_name == 'Monty Python' + assert ( + await async_identity.get_user(name=user.name) + ).display_name == "Monty Python" finally: await async_identity.delete_user(name=user.name) diff --git a/packages/gapic-generator/tests/system/test_retry.py b/packages/gapic-generator/tests/system/test_retry.py index 6ec707cd509a..9af80d077367 100644 --- a/packages/gapic-generator/tests/system/test_retry.py +++ b/packages/gapic-generator/tests/system/test_retry.py @@ -22,23 +22,27 @@ def test_retry_bubble(echo): # Note: DeadlineExceeded is from gRPC, GatewayTimeout from http with pytest.raises((exceptions.DeadlineExceeded, exceptions.GatewayTimeout)): - echo.echo({ - 'error': { - 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), - 'message': 'This took longer than you said it should.', - }, - }) + echo.echo( + { + "error": { + "code": code_pb2.Code.Value("DEADLINE_EXCEEDED"), + "message": "This took longer than you said it should.", + }, + } + ) if isinstance(echo.transport, type(echo).get_transport_class("grpc")): # Under gRPC, we raise exceptions.DeadlineExceeded, which is a # sub-class of exceptions.GatewayTimeout. with pytest.raises(exceptions.DeadlineExceeded): - echo.echo({ - 'error': { - 'code': code_pb2.Code.Value('DEADLINE_EXCEEDED'), - 'message': 'This took longer than you said it should.', - }, - }) + echo.echo( + { + "error": { + "code": code_pb2.Code.Value("DEADLINE_EXCEEDED"), + "message": "This took longer than you said it should.", + }, + } + ) if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": @@ -46,12 +50,14 @@ def test_retry_bubble(echo): @pytest.mark.asyncio async def test_retry_bubble_async(async_echo): with pytest.raises(exceptions.RetryError): - await async_echo.echo({ - 'error': { - 'code': code_pb2.Code.Value('UNAVAILABLE'), - 'message': 'This service is not available.', - }, - }) + await async_echo.echo( + { + "error": { + "code": code_pb2.Code.Value("UNAVAILABLE"), + "message": "This service is not available.", + }, + } + ) # Note: This test verifies that: # Using gapic_v1.method.wrap_method in *AsyncClient raises a RPCError (Incorrect behaviour). @@ -60,6 +66,4 @@ async def test_retry_bubble_async(async_echo): @pytest.mark.asyncio async def test_method_async_wrapper_for_async_client(async_echo): with pytest.raises(exceptions.NotFound): - await async_echo.get_operation({ - 'name': "operations/echo" - }) + await async_echo.get_operation({"name": "operations/echo"}) diff --git a/packages/gapic-generator/tests/system/test_streams.py b/packages/gapic-generator/tests/system/test_streams.py index b4adc6ee5152..38a2e47745fe 100644 --- a/packages/gapic-generator/tests/system/test_streams.py +++ b/packages/gapic-generator/tests/system/test_streams.py @@ -23,20 +23,22 @@ def test_unary_stream(echo): - content = 'The hail in Wales falls mainly on the snails.' - responses = echo.expand({ - 'content': content, - }, metadata=_METADATA) + content = "The hail in Wales falls mainly on the snails." + responses = echo.expand( + { + "content": content, + }, + metadata=_METADATA, + ) # Consume the response and ensure it matches what we expect. # with pytest.raises(exceptions.NotFound) as exc: - for ground_truth, response in zip(content.split(' '), responses): + for ground_truth, response in zip(content.split(" "), responses): assert response.content == ground_truth - assert ground_truth == 'snails.' + assert ground_truth == "snails." if isinstance(echo.transport, type(echo).get_transport_class("grpc")): response_metadata = [ - (metadata.key, metadata.value) - for metadata in responses.trailing_metadata() + (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] assert _METADATA[0] in response_metadata else: @@ -54,7 +56,7 @@ def test_stream_unary(echo): requests.append(showcase.EchoRequest(content="hello")) requests.append(showcase.EchoRequest(content="world!")) response = echo.collect(iter(requests)) - assert response.content == 'hello world!' + assert response.content == "hello world!" def test_stream_unary_passing_dict(echo): @@ -62,9 +64,9 @@ def test_stream_unary_passing_dict(echo): # (TODO: dovs) Temporarily disabling rest return - requests = [{'content': 'hello'}, {'content': 'world!'}] + requests = [{"content": "hello"}, {"content": "world!"}] response = echo.collect(iter(requests)) - assert response.content == 'hello world!' + assert response.content == "hello world!" def test_stream_stream(echo): @@ -80,11 +82,10 @@ def test_stream_stream(echo): contents = [] for response in responses: contents.append(response.content) - assert contents == ['hello', 'world!'] + assert contents == ["hello", "world!"] response_metadata = [ - (metadata.key, metadata.value) - for metadata in responses.trailing_metadata() + (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] assert _METADATA[0] in response_metadata @@ -94,17 +95,16 @@ def test_stream_stream_passing_dict(echo): # (TODO: dovs) Temporarily disabling rest return - requests = [{'content': 'hello'}, {'content': 'world!'}] + requests = [{"content": "hello"}, {"content": "world!"}] responses = echo.chat(iter(requests), metadata=_METADATA) contents = [] for response in responses: contents.append(response.content) - assert contents == ['hello', 'world!'] + assert contents == ["hello", "world!"] response_metadata = [ - (metadata.key, metadata.value) - for metadata in responses.trailing_metadata() + (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] assert _METADATA[0] in response_metadata @@ -114,21 +114,25 @@ def test_stream_stream_passing_dict(echo): @pytest.mark.asyncio async def test_async_unary_stream_reader(async_echo): - content = 'The hail in Wales falls mainly on the snails.' - stream = await async_echo.expand({ - 'content': content, - }, metadata=_METADATA) + content = "The hail in Wales falls mainly on the snails." + stream = await async_echo.expand( + { + "content": content, + }, + metadata=_METADATA, + ) # Note: gRPC exposes `read`, REST exposes `__anext__` to read # a chunk of response from the stream. - response_attr = '__anext__' if "rest" in str( - async_echo.transport).lower() else 'read' + response_attr = ( + "__anext__" if "rest" in str(async_echo.transport).lower() else "read" + ) # Consume the response and ensure it matches what we expect. - for ground_truth in content.split(' '): + for ground_truth in content.split(" "): response = await getattr(stream, response_attr)() assert response.content == ground_truth - assert ground_truth == 'snails.' + assert ground_truth == "snails." # Note: trailing metadata is part of a gRPC response. if "grpc" in str(async_echo.transport).lower(): @@ -137,17 +141,20 @@ async def test_async_unary_stream_reader(async_echo): @pytest.mark.asyncio async def test_async_unary_stream_async_generator(async_echo): - content = 'The hail in Wales falls mainly on the snails.' - stream = await async_echo.expand({ - 'content': content, - }, metadata=_METADATA) + content = "The hail in Wales falls mainly on the snails." + stream = await async_echo.expand( + { + "content": content, + }, + metadata=_METADATA, + ) # Consume the response and ensure it matches what we expect. - tokens = iter(content.split(' ')) + tokens = iter(content.split(" ")) async for response in stream: ground_truth = next(tokens) assert response.content == ground_truth - assert ground_truth == 'snails.' + assert ground_truth == "snails." # Note: trailing metadata is part of a gRPC response. if "grpc" in str(async_echo.transport).lower(): @@ -169,7 +176,7 @@ async def test_async_stream_unary_iterable(async_echo): call = await async_echo.collect(requests) response = await call - assert response.content == 'hello world!' + assert response.content == "hello world!" @pytest.mark.asyncio async def test_async_stream_unary_async_generator(async_echo): @@ -186,7 +193,7 @@ async def async_generator(): call = await async_echo.collect(async_generator()) response = await call - assert response.content == 'hello world!' + assert response.content == "hello world!" @pytest.mark.asyncio async def test_async_stream_unary_writer(async_echo): @@ -202,7 +209,7 @@ async def test_async_stream_unary_writer(async_echo): await call.done_writing() response = await call - assert response.content == 'hello world!' + assert response.content == "hello world!" @pytest.mark.asyncio async def test_async_stream_unary_passing_dict(async_echo): @@ -213,10 +220,10 @@ async def test_async_stream_unary_passing_dict(async_echo): call = await async_echo.collect() return - requests = [{'content': 'hello'}, {'content': 'world!'}] + requests = [{"content": "hello"}, {"content": "world!"}] call = await async_echo.collect(iter(requests)) response = await call - assert response.content == 'hello world!' + assert response.content == "hello world!" @pytest.mark.asyncio async def test_async_stream_stream_reader_writier(async_echo): @@ -232,11 +239,8 @@ async def test_async_stream_stream_reader_writier(async_echo): await call.write(showcase.EchoRequest(content="world!")) await call.done_writing() - contents = [ - (await call.read()).content, - (await call.read()).content - ] - assert contents == ['hello', 'world!'] + contents = [(await call.read()).content, (await call.read()).content] + assert contents == ["hello", "world!"] trailing_metadata = await call.trailing_metadata() assert _METADATA[0] in trailing_metadata.items() @@ -259,7 +263,7 @@ async def async_generator(): contents = [] async for response in call: contents.append(response.content) - assert contents == ['hello', 'world!'] + assert contents == ["hello", "world!"] trailing_metadata = await call.trailing_metadata() assert _METADATA[0] in trailing_metadata.items() @@ -273,13 +277,13 @@ async def test_async_stream_stream_passing_dict(async_echo): call = await async_echo.chat(metadata=_METADATA) return - requests = [{'content': 'hello'}, {'content': 'world!'}] + requests = [{"content": "hello"}, {"content": "world!"}] call = await async_echo.chat(iter(requests), metadata=_METADATA) contents = [] async for response in call: contents.append(response.content) - assert contents == ['hello', 'world!'] + assert contents == ["hello", "world!"] trailing_metadata = await call.trailing_metadata() assert _METADATA[0] in trailing_metadata.items() diff --git a/packages/gapic-generator/tests/system/test_unary.py b/packages/gapic-generator/tests/system/test_unary.py index 674919eb648d..0d269b7195d8 100644 --- a/packages/gapic-generator/tests/system/test_unary.py +++ b/packages/gapic-generator/tests/system/test_unary.py @@ -148,14 +148,19 @@ async def test_async_unary_with_dict(async_echo): @pytest.mark.asyncio async def test_async_unary_error(async_echo): message = "Bad things! Bad things!" - expected_err_message = message if "grpc_asyncio" in str( - async_echo.transport) else f"POST http://localhost:7469/v1beta1/echo:echo: {message}" + expected_err_message = ( + message + if "grpc_asyncio" in str(async_echo.transport) + else f"POST http://localhost:7469/v1beta1/echo:echo: {message}" + ) # Note: InvalidArgument is from gRPC, BadRequest from http (no MTLS) with pytest.raises((exceptions.InvalidArgument, exceptions.BadRequest)) as exc: await async_echo.echo( { "error": { - "code": code_pb2.Code.Value("INVALID_ARGUMENT",), + "code": code_pb2.Code.Value( + "INVALID_ARGUMENT", + ), "message": message, }, } diff --git a/packages/gapic-generator/tests/system/test_universe_domain.py b/packages/gapic-generator/tests/system/test_universe_domain.py index df8c1973054e..cd1c37e5b443 100644 --- a/packages/gapic-generator/tests/system/test_universe_domain.py +++ b/packages/gapic-generator/tests/system/test_universe_domain.py @@ -5,37 +5,94 @@ # Define the parametrized data vary_transport = [ - (grpc.insecure_channel, "grpc", "localhost:7469", - "googleapis.com", "googleapis.com"), - (grpc.insecure_channel, "rest", "localhost:7469", - "googleapis.com", "googleapis.com"), + ( + grpc.insecure_channel, + "grpc", + "localhost:7469", + "googleapis.com", + "googleapis.com", + ), + ( + grpc.insecure_channel, + "rest", + "localhost:7469", + "googleapis.com", + "googleapis.com", + ), ] vary_channel_transport_endpoints_universes = [ - (grpc.insecure_channel, "grpc", "showcase.googleapis.com", - "showcase.googleapis.com", "googleapis.com"), - (grpc.insecure_channel, "grpc", "showcase.googleapis.com", - "localhost:7469", "googleapis.com"), - (grpc.insecure_channel, "grpc", "localhost:7469", - "showcase.googleapis.com", "googleapis.com"), - (grpc.insecure_channel, "grpc", "localhost:7469", - "localhost:7469", "googleapis.com"), - (grpc.insecure_channel, "rest", "showcase.googleapis.com", - "showcase.googleapis.com", "googleapis.com"), - (grpc.insecure_channel, "rest", "showcase.googleapis.com", - "localhost:7469", "googleapis.com"), - (grpc.insecure_channel, "rest", "localhost:7469", - "showcase.googleapis.com", "googleapis.com"), - (grpc.insecure_channel, "rest", "localhost:7469", - "localhost:7469", "googleapis.com"), + ( + grpc.insecure_channel, + "grpc", + "showcase.googleapis.com", + "showcase.googleapis.com", + "googleapis.com", + ), + ( + grpc.insecure_channel, + "grpc", + "showcase.googleapis.com", + "localhost:7469", + "googleapis.com", + ), + ( + grpc.insecure_channel, + "grpc", + "localhost:7469", + "showcase.googleapis.com", + "googleapis.com", + ), + ( + grpc.insecure_channel, + "grpc", + "localhost:7469", + "localhost:7469", + "googleapis.com", + ), + ( + grpc.insecure_channel, + "rest", + "showcase.googleapis.com", + "showcase.googleapis.com", + "googleapis.com", + ), + ( + grpc.insecure_channel, + "rest", + "showcase.googleapis.com", + "localhost:7469", + "googleapis.com", + ), + ( + grpc.insecure_channel, + "rest", + "localhost:7469", + "showcase.googleapis.com", + "googleapis.com", + ), + ( + grpc.insecure_channel, + "rest", + "localhost:7469", + "localhost:7469", + "googleapis.com", + ), ] @pytest.mark.parametrize( "channel_creator, transport_name, transport_endpoint, credential_universe, client_universe", - vary_transport + vary_transport, ) -def test_universe_domain_validation_pass(parametrized_echo, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): +def test_universe_domain_validation_pass( + parametrized_echo, + channel_creator, + transport_name, + transport_endpoint, + credential_universe, + client_universe, +): # Test that only the configured client universe and credentials universe are used for validation assert parametrized_echo.universe_domain == client_universe # TODO: This is needed to cater for older versions of google-auth @@ -45,14 +102,15 @@ def test_universe_domain_validation_pass(parametrized_echo, channel_creator, tra int(part) for part in google.auth.__version__.split(".")[0:2] ] if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - assert parametrized_echo.transport._credentials.universe_domain == credential_universe + assert ( + parametrized_echo.transport._credentials.universe_domain + == credential_universe + ) if transport_name == "rest": assert parametrized_echo.api_endpoint == "http://" + transport_endpoint else: assert parametrized_echo.api_endpoint == transport_endpoint - response = parametrized_echo.echo({ - 'content': 'Universe validation succeeded!' - }) + response = parametrized_echo.echo({"content": "Universe validation succeeded!"}) assert response.content == "Universe validation succeeded!" @@ -64,9 +122,16 @@ def test_universe_domain_validation_pass(parametrized_echo, channel_creator, tra @pytest.mark.parametrize( "channel_creator, transport_name, transport_endpoint, credential_universe, client_universe", - vary_channel_transport_endpoints_universes + vary_channel_transport_endpoints_universes, ) -def test_universe_domain_validation_fail(parametrized_echo, channel_creator, transport_name, transport_endpoint, credential_universe, client_universe): +def test_universe_domain_validation_fail( + parametrized_echo, + channel_creator, + transport_name, + transport_endpoint, + credential_universe, + client_universe, +): """Test that only the client and credentials universes are used for validation, and not the endpoint.""" assert parametrized_echo.universe_domain == client_universe # TODO: This is needed to cater for older versions of google-auth @@ -75,8 +140,13 @@ def test_universe_domain_validation_fail(parametrized_echo, channel_creator, tra google_auth_major, google_auth_minor, _ = [ part for part in google.auth.__version__.split(".") ] - if int(google_auth_major) > 2 or (int(google_auth_major) == 2 and int(google_auth_minor) >= 23): - assert parametrized_echo.transport._credentials.universe_domain == credential_universe + if int(google_auth_major) > 2 or ( + int(google_auth_major) == 2 and int(google_auth_minor) >= 23 + ): + assert ( + parametrized_echo.transport._credentials.universe_domain + == credential_universe + ) if transport_name == "rest": assert parametrized_echo.api_endpoint == "http://" + transport_endpoint elif channel_creator == grpc.insecure_channel: diff --git a/packages/gapic-generator/tests/unit/common_types.py b/packages/gapic-generator/tests/unit/common_types.py index ae4534da856b..db9eb1d10d34 100644 --- a/packages/gapic-generator/tests/unit/common_types.py +++ b/packages/gapic-generator/tests/unit/common_types.py @@ -16,7 +16,7 @@ import itertools from collections import namedtuple -from typing import (Any, Dict, Iterable, Optional) +from typing import Any, Dict, Iterable, Optional from google.protobuf import descriptor_pb2 @@ -52,19 +52,23 @@ class DummyMethod: # DummyMessageBase.__new__.__defaults__ = (False,) * len(DummyMessageBase._fields) -DummyFieldBase = namedtuple("DummyField", - ["message", - "enum", - "name", - "repeated", - "required", - "resource_reference", - "oneof", - "field_pb", - "meta", - "is_primitive", - "ident", - "type"]) +DummyFieldBase = namedtuple( + "DummyField", + [ + "message", + "enum", + "name", + "repeated", + "required", + "resource_reference", + "oneof", + "field_pb", + "meta", + "is_primitive", + "ident", + "type", + ], +) DummyFieldBase.__new__.__defaults__ = (False,) * len(DummyFieldBase._fields) @@ -75,7 +79,16 @@ def mock_value_original_type(self): class DummyMessage: - def __init__(self, *, fields={}, type="", options=False, ident=False, resource_path=False, meta=None): + def __init__( + self, + *, + fields={}, + type="", + options=False, + ident=False, + resource_path=False, + meta=None + ): self.fields = fields self.type = type self.options = options @@ -87,7 +100,9 @@ def get_field(self, field_name: str): return self.fields[field_name] def oneof_fields(self): - return dict((field.oneof, field) for field in self.fields.values() if field.oneof) + return dict( + (field.oneof, field) for field in self.fields.values() if field.oneof + ) @property def required_fields(self): @@ -95,26 +110,37 @@ def required_fields(self): @property def resource_path_args(self): - return wrappers.MessageType.PATH_ARG_RE.findall(self.resource_path or '') + return wrappers.MessageType.PATH_ARG_RE.findall(self.resource_path or "") -DummyService = namedtuple("DummyService", [ - "name", "methods", "client_name", "async_client_name", "resource_messages_dict"]) +DummyService = namedtuple( + "DummyService", + ["name", "methods", "client_name", "async_client_name", "resource_messages_dict"], +) DummyService.__new__.__defaults__ = (False,) * len(DummyService._fields) -DummyApiSchema = namedtuple("DummyApiSchema", - ["services", "naming", "messages"]) +DummyApiSchema = namedtuple("DummyApiSchema", ["services", "naming", "messages"]) DummyApiSchema.__new__.__defaults__ = (False,) * len(DummyApiSchema._fields) DummyNaming = namedtuple( - "DummyNaming", ["warehouse_package_name", "name", "version", "versioned_module_name", "module_namespace", "proto_package"]) + "DummyNaming", + [ + "warehouse_package_name", + "name", + "version", + "versioned_module_name", + "module_namespace", + "proto_package", + ], +) DummyNaming.__new__.__defaults__ = (False,) * len(DummyNaming._fields) -def message_factory(exp: str, - repeated_iter=itertools.repeat(False), - enum: Optional[wrappers.EnumType] = None, - ) -> DummyMessage: +def message_factory( + exp: str, + repeated_iter=itertools.repeat(False), + enum: Optional[wrappers.EnumType] = None, +) -> DummyMessage: # This mimics the structure of MessageType in the wrappers module: # A MessageType has a map from field names to Fields, # and a Field has an (optional) MessageType. @@ -122,17 +148,18 @@ def message_factory(exp: str, # used to describe the field and type hierarchy, # e.g. "mollusc.cephalopod.coleoid" toks = exp.split(".") - messages = [DummyMessage(fields={}, type=tok.upper() + "_TYPE") - for tok in toks] + messages = [DummyMessage(fields={}, type=tok.upper() + "_TYPE") for tok in toks] if enum: messages[-1] = enum for base, field, attr_name, repeated_field in zip( messages, messages[1:], toks[1:], repeated_iter ): - base.fields[attr_name] = (DummyField(message=field, repeated=repeated_field) - if isinstance(field, DummyMessage) - else DummyField(enum=field)) + base.fields[attr_name] = ( + DummyField(message=field, repeated=repeated_field) + if isinstance(field, DummyMessage) + else DummyField(enum=field) + ) return messages[0] @@ -142,12 +169,12 @@ def enum_factory(name: str, variants: Iterable[str]) -> wrappers.EnumType: value=tuple( descriptor_pb2.EnumValueDescriptorProto(name=v, number=i) for i, v in enumerate(variants) - ) + ), ) enum = wrappers.EnumType( enum_pb=enum_pb, - values=[wrappers.EnumValueType(enum_value_pb=v) for v in enum_pb.value] + values=[wrappers.EnumValueType(enum_value_pb=v) for v in enum_pb.value], ) return enum diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py index 6ba39640462b..b0a9557f02fc 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_configured_snippet.py @@ -27,8 +27,7 @@ CURRENT_DIRECTORY = Path(__file__).parent.absolute() -SPEECH_V1_REQUEST_PATH = CURRENT_DIRECTORY / \ - "resources" / "speech" / "request.desc" +SPEECH_V1_REQUEST_PATH = CURRENT_DIRECTORY / "resources" / "speech" / "request.desc" CONFIG_JSON_PATH = ( CURRENT_DIRECTORY / "resources" / "speech" / "speech_createCustomClass.json" ) @@ -237,8 +236,7 @@ def test_AppendToSampleFunctionBody(): statement = libcst.parse_statement("'world'") transformer = configured_snippet._AppendToSampleFunctionBody(statement) updated_function_def = function_def.visit(transformer) - expected_function_def = libcst.parse_statement( - "def f():\n 'hello'\n 'world'") + expected_function_def = libcst.parse_statement("def f():\n 'hello'\n 'world'") assert updated_function_def.deep_equals(expected_function_def) @@ -249,8 +247,7 @@ def test_AppendToSampleFunctionBody(): statement = libcst.parse_statement("'world'") transformer = configured_snippet._AppendToSampleFunctionBody(statement) updated_function_def = function_def.visit(transformer) - expected_function_def = libcst.parse_statement( - "def f():\n 'hello'\n 'world'") + expected_function_def = libcst.parse_statement("def f():\n 'hello'\n 'world'") assert updated_function_def.deep_equals(expected_function_def) diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py index 6d2e62036b6a..28a2779ab227 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_libcst_utils.py @@ -65,8 +65,7 @@ def test_convert_expression_should_raise_error_if_unsupported(): def test_convert_parameter(): config_parameter = snippet_config_language_pb2.Statement.Declaration( name="some_variable", - value=snippet_config_language_pb2.Expression( - string_value="hello world"), + value=snippet_config_language_pb2.Expression(string_value="hello world"), ) node = libcst_utils.convert_parameter(config_parameter) expected_node = libcst.Param( diff --git a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py index d48304b8d7d3..b52b0fb2c62e 100644 --- a/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py +++ b/packages/gapic-generator/tests/unit/configurable_snippetgen/test_resources.py @@ -23,8 +23,7 @@ CURRENT_DIRECTORY = Path(__file__).parent.absolute() -SPEECH_V1_REQUEST_PATH = CURRENT_DIRECTORY / \ - "resources" / "speech" / "request.desc" +SPEECH_V1_REQUEST_PATH = CURRENT_DIRECTORY / "resources" / "speech" / "request.desc" def test_request(): diff --git a/packages/gapic-generator/tests/unit/generator/test_formatter.py b/packages/gapic-generator/tests/unit/generator/test_formatter.py index 1f7b73dfe7e0..9d613c2b31bc 100644 --- a/packages/gapic-generator/tests/unit/generator/test_formatter.py +++ b/packages/gapic-generator/tests/unit/generator/test_formatter.py @@ -18,7 +18,10 @@ def test_fix_whitespace_top_level(): - assert formatter.fix_whitespace(textwrap.dedent("""\ + assert ( + formatter.fix_whitespace( + textwrap.dedent( + """\ import something @@ -32,7 +35,11 @@ class TooFarDown: class TooClose: # remains too close pass - """)) == textwrap.dedent("""\ + """ + ) + ) + == textwrap.dedent( + """\ import something @@ -45,11 +52,16 @@ class TooFarDown: class TooClose: # remains too close pass - """) + """ + ) + ) def test_fix_whitespace_nested(): - assert formatter.fix_whitespace(textwrap.dedent("""\ + assert ( + formatter.fix_whitespace( + textwrap.dedent( + """\ class JustAClass: def foo(self): pass @@ -57,18 +69,27 @@ def foo(self): def too_far_down(self): pass - """)) == textwrap.dedent("""\ + """ + ) + ) + == textwrap.dedent( + """\ class JustAClass: def foo(self): pass def too_far_down(self): pass - """) + """ + ) + ) def test_fix_whitespace_decorators(): - assert formatter.fix_whitespace(textwrap.dedent("""\ + assert ( + formatter.fix_whitespace( + textwrap.dedent( + """\ class JustAClass: def foo(self): pass @@ -77,7 +98,11 @@ def foo(self): @property def too_far_down(self): return 42 - """)) == textwrap.dedent("""\ + """ + ) + ) + == textwrap.dedent( + """\ class JustAClass: def foo(self): pass @@ -85,11 +110,16 @@ def foo(self): @property def too_far_down(self): return 42 - """) + """ + ) + ) def test_fix_whitespace_intermediate_whitespace(): - assert formatter.fix_whitespace(textwrap.dedent("""\ + assert ( + formatter.fix_whitespace( + textwrap.dedent( + """\ class JustAClass: def foo(self): pass @@ -99,7 +129,11 @@ def foo(self): @property def too_far_down(self): return 42 - """)) == textwrap.dedent("""\ + """ + ) + ) + == textwrap.dedent( + """\ class JustAClass: def foo(self): pass @@ -107,25 +141,36 @@ def foo(self): @property def too_far_down(self): return 42 - """) + """ + ) + ) def test_fix_whitespace_comment(): - assert formatter.fix_whitespace(textwrap.dedent("""\ + assert ( + formatter.fix_whitespace( + textwrap.dedent( + """\ def do_something(): do_first_thing() # Something something something. do_second_thing() - """)) == textwrap.dedent("""\ + """ + ) + ) + == textwrap.dedent( + """\ def do_something(): do_first_thing() # Something something something. do_second_thing() - """) + """ + ) + ) def test_file_newline_ending(): - assert formatter.fix_whitespace('') == '\n' + assert formatter.fix_whitespace("") == "\n" diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 04415c9e50b7..85bcf582177a 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -27,8 +27,18 @@ from gapic.generator import generator from gapic.samplegen_utils import snippet_metadata_pb2, types, yaml -from ..common_types import (DummyApiSchema, DummyField, DummyIdent, DummyNaming, DummyMessage, DummyMessageTypePB, - DummyService, DummyMethod, message_factory, enum_factory) +from ..common_types import ( + DummyApiSchema, + DummyField, + DummyIdent, + DummyNaming, + DummyMessage, + DummyMessageTypePB, + DummyService, + DummyMethod, + message_factory, + enum_factory, +) from gapic.schema import api from gapic.schema import naming @@ -38,9 +48,10 @@ def mock_generate_sample(*args, **kwargs): dummy_snippet_metadata = snippet_metadata_pb2.Snippet() - dummy_snippet_metadata.client_method.method.service.short_name = args[0]["service"].split( - ".")[-1] - dummy_snippet_metadata.client_method.method.short_name = args[0]['rpc'] + dummy_snippet_metadata.client_method.method.service.short_name = args[0][ + "service" + ].split(".")[-1] + dummy_snippet_metadata.client_method.method.short_name = args[0]["rpc"] return "", dummy_snippet_metadata @@ -58,14 +69,14 @@ def test_get_response(): generator_obj = make_generator() with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: list_templates.return_value = [ - "foo/bar/baz.py.j2", "molluscs/squid/sample.py.j2" + "foo/bar/baz.py.j2", + "molluscs/squid/sample.py.j2", ] with mock.patch.object(jinja2.Environment, "get_template") as get_template: - get_template.return_value = jinja2.Template( - "I am a template result." + get_template.return_value = jinja2.Template("I am a template result.") + cgr = generator_obj.get_response( + api_schema=make_api(), opts=Options.build("") ) - cgr = generator_obj.get_response(api_schema=make_api(), - opts=Options.build("")) list_templates.assert_called_once() get_template.assert_has_calls( [ @@ -82,14 +93,14 @@ def test_get_response_ignores_empty_files(): generator_obj = make_generator() with mock.patch.object(jinja2.FileSystemLoader, "list_templates") as list_templates: list_templates.return_value = [ - "foo/bar/baz.py.j2", "molluscs/squid/sample.py.j2" + "foo/bar/baz.py.j2", + "molluscs/squid/sample.py.j2", ] with mock.patch.object(jinja2.Environment, "get_template") as get_template: - get_template.return_value = jinja2.Template( - "# Meaningless comment" + get_template.return_value = jinja2.Template("# Meaningless comment") + cgr = generator_obj.get_response( + api_schema=make_api(), opts=Options.build("") ) - cgr = generator_obj.get_response(api_schema=make_api(), - opts=Options.build("")) list_templates.assert_called_once() get_template.assert_has_calls( [ @@ -109,11 +120,10 @@ def test_get_response_ignores_private_files(): "molluscs/squid/sample.py.j2", ] with mock.patch.object(jinja2.Environment, "get_template") as get_template: - get_template.return_value = jinja2.Template( - "I am a template result." + get_template.return_value = jinja2.Template("I am a template result.") + cgr = generator_obj.get_response( + api_schema=make_api(), opts=Options.build("") ) - cgr = generator_obj.get_response(api_schema=make_api(), - opts=Options.build("")) list_templates.assert_called_once() get_template.assert_has_calls( [ @@ -133,8 +143,7 @@ def test_get_response_fails_invalid_file_paths(): "foo/bar/%service/%proto/baz.py.j2", ] with pytest.raises(ValueError) as ex: - generator_obj.get_response(api_schema=make_api(), - opts=Options.build("")) + generator_obj.get_response(api_schema=make_api(), opts=Options.build("")) ex_str = str(ex.value) assert "%proto" in ex_str and "%service" in ex_str @@ -146,7 +155,8 @@ def test_get_response_ignore_gapic_metadata(): list_templates.return_value = ["gapic/gapic_metadata.json.j2"] with mock.patch.object(jinja2.Environment, "get_template") as get_template: get_template.return_value = jinja2.Template( - "This is not something we want to see") + "This is not something we want to see" + ) res = generator_obj.get_response( api_schema=make_api(), opts=Options.build(""), @@ -216,23 +226,19 @@ def test_get_response_ignores_unwanted_transports_and_clients(): ] with mock.patch.object(jinja2.Environment, "get_template") as get_template: - get_template.return_value = jinja2.Template( - "Service: {{ service.name }}" - ) + get_template.return_value = jinja2.Template("Service: {{ service.name }}") api_schema = make_api( make_proto( descriptor_pb2.FileDescriptorProto( service=[ - descriptor_pb2.ServiceDescriptorProto( - name="SomeService"), + descriptor_pb2.ServiceDescriptorProto(name="SomeService"), ] ), ) ) cgr = generator_obj.get_response( - api_schema=api_schema, - opts=Options.build("transport=river+car") + api_schema=api_schema, opts=Options.build("transport=river+car") ) assert len(cgr.file) == 5 assert {i.name for i in cgr.file} == { @@ -245,8 +251,7 @@ def test_get_response_ignores_unwanted_transports_and_clients(): } cgr = generator_obj.get_response( - api_schema=api_schema, - opts=Options.build("transport=grpc") + api_schema=api_schema, opts=Options.build("transport=grpc") ) assert len(cgr.file) == 5 assert {i.name for i in cgr.file} == { @@ -266,16 +271,13 @@ def test_get_response_enumerates_services(): "molluscs/squid/sample.py.j2", ] with mock.patch.object(jinja2.Environment, "get_template") as get_template: - get_template.return_value = jinja2.Template( - "Service: {{ service.name }}" - ) + get_template.return_value = jinja2.Template("Service: {{ service.name }}") cgr = generator_obj.get_response( api_schema=make_api( make_proto( descriptor_pb2.FileDescriptorProto( service=[ - descriptor_pb2.ServiceDescriptorProto( - name="Spam"), + descriptor_pb2.ServiceDescriptorProto(name="Spam"), descriptor_pb2.ServiceDescriptorProto( name="EggsService" ), @@ -305,10 +307,8 @@ def test_get_response_enumerates_proto(): ) cgr = generator_obj.get_response( api_schema=make_api( - make_proto( - descriptor_pb2.FileDescriptorProto(name="a.proto")), - make_proto( - descriptor_pb2.FileDescriptorProto(name="b.proto")), + make_proto(descriptor_pb2.FileDescriptorProto(name="a.proto")), + make_proto(descriptor_pb2.FileDescriptorProto(name="b.proto")), ), opts=Options.build(""), ) @@ -336,8 +336,7 @@ def test_get_response_divides_subpackages(): descriptor_pb2.FileDescriptorProto( name="a/eggs/yolk.proto", package="foo.v1.eggs", - service=[descriptor_pb2.ServiceDescriptorProto( - name="Scramble")], + service=[descriptor_pb2.ServiceDescriptorProto(name="Scramble")], ), ], package="foo.v1", @@ -354,8 +353,9 @@ def test_get_response_divides_subpackages(): {{- '' }}Subpackage: {{ '.'.join(api.subpackage_view) }} """.strip() ) - cgr = generator_obj.get_response(api_schema=api_schema, - opts=Options.build("autogen-snippets=false")) + cgr = generator_obj.get_response( + api_schema=api_schema, opts=Options.build("autogen-snippets=false") + ) assert len(cgr.file) == 6 assert {i.name for i in cgr.file} == { "foo/types/top.py", @@ -389,7 +389,9 @@ def test_get_filename_with_namespace(): template_name, api_schema=make_api( naming=make_naming( - name="Spam", namespace=("Ham", "Bacon"), version="v2", + name="Spam", + namespace=("Ham", "Bacon"), + version="v2", ), ), ) @@ -409,8 +411,7 @@ def test_get_filename_with_service(): context={ "service": wrappers.Service( methods=[], - service_pb=descriptor_pb2.ServiceDescriptorProto( - name="Eggs"), + service_pb=descriptor_pb2.ServiceDescriptorProto(name="Eggs"), visible_resources={}, ), }, @@ -421,7 +422,8 @@ def test_get_filename_with_service(): def test_get_filename_with_proto(): file_pb2 = descriptor_pb2.FileDescriptorProto( - name="bacon.proto", package="foo.bar.v1", + name="bacon.proto", + package="foo.bar.v1", ) api = make_api( make_proto(file_pb2), @@ -441,13 +443,19 @@ def test_get_filename_with_proto(): def test_get_filename_with_proto_and_sub(): file_pb2 = descriptor_pb2.FileDescriptorProto( - name="bacon.proto", package="foo.bar.v2.baz", + name="bacon.proto", + package="foo.bar.v2.baz", ) naming = make_naming( - namespace=("Foo",), name="Bar", proto_package="foo.bar.v2", version="v2", + namespace=("Foo",), + name="Bar", + proto_package="foo.bar.v2", + version="v2", ) api = make_api( - make_proto(file_pb2, naming=naming), naming=naming, subpackage_view=("baz",), + make_proto(file_pb2, naming=naming), + naming=naming, + subpackage_view=("baz",), ) generator_obj = make_generator() @@ -480,7 +488,9 @@ def test_parse_sample_paths(fs): Options.build("samples=sampledir/,") -@mock.patch("time.gmtime",) +@mock.patch( + "time.gmtime", +) def test_samplegen_config_to_output_files(mock_gmtime, fs): # These time values are nothing special, # they just need to be deterministic. @@ -512,47 +522,57 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): ), ) - generator_obj = generator.Generator(Options.build("samples=samples.yaml",)) + generator_obj = generator.Generator( + Options.build( + "samples=samples.yaml", + ) + ) # Need to have the sample template visible to the generator. generator_obj._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = DummyApiSchema( - services={"Mollusc": DummyService( - name="Mollusc", - methods={ - # For this test the generator only cares about the dictionary keys - "GetSquidStreaming": DummyMethod(), - "GetClam": DummyMethod(), - }, - )}, - naming=DummyNaming(name="mollusc", version="v1", warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca.v1"), + services={ + "Mollusc": DummyService( + name="Mollusc", + methods={ + # For this test the generator only cares about the dictionary keys + "GetSquidStreaming": DummyMethod(), + "GetClam": DummyMethod(), + }, + ) + }, + naming=DummyNaming( + name="mollusc", + version="v1", + warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", + module_namespace="mollusc.cephalopod", + proto_package="google.mollusca.v1", + ), ) - with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): + with mock.patch( + "gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample + ): actual_response = generator_obj.get_response( - api_schema, opts=Options.build("autogen-snippets=False")) + api_schema, opts=Options.build("autogen-snippets=False") + ) expected_snippet_index_json = { "clientLibrary": { - "apis": [{ - "id": "google.mollusca.v1", - "version": "v1" - }], + "apis": [{"id": "google.mollusca.v1", "version": "v1"}], "language": "PYTHON", "name": "mollusc-cephalopod-teuthida-", - "version": "0.1.0" - }, + "version": "0.1.0", + }, "snippets": [ { "clientMethod": { "method": { - "service": { - "shortName": "Mollusc" - }, - "shortName": "GetSquidStreaming" - } - }, + "service": {"shortName": "Mollusc"}, + "shortName": "GetSquidStreaming", + } + }, "file": "squid_sample.py", "segments": [ {"type": "FULL"}, @@ -560,19 +580,17 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): {"type": "CLIENT_INITIALIZATION"}, {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, - {"type": "RESPONSE_HANDLING"} + {"type": "RESPONSE_HANDLING"}, ], - "title": "squid_sample.py" - }, + "title": "squid_sample.py", + }, { "clientMethod": { "method": { - "service": { - "shortName": "Mollusc" - }, - "shortName": "GetClam" - } - }, + "service": {"shortName": "Mollusc"}, + "shortName": "GetClam", + } + }, "file": "clam_sample.py", "segments": [ {"type": "FULL"}, @@ -580,32 +598,40 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): {"type": "CLIENT_INITIALIZATION"}, {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, - {"type": "RESPONSE_HANDLING"} + {"type": "RESPONSE_HANDLING"}, ], - "title": "clam_sample.py" - } - ] - } + "title": "clam_sample.py", + }, + ], + } - assert actual_response.supported_features == CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + assert ( + actual_response.supported_features + == CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + ) assert len(actual_response.file) == 3 assert actual_response.file[0] == CodeGeneratorResponse.File( - name="samples/generated_samples/squid_sample.py", content="\n",) + name="samples/generated_samples/squid_sample.py", + content="\n", + ) assert actual_response.file[1] == CodeGeneratorResponse.File( - name="samples/generated_samples/clam_sample.py", content="\n",) + name="samples/generated_samples/clam_sample.py", + content="\n", + ) - assert actual_response.file[2].name == "samples/generated_samples/snippet_metadata_google.mollusca.v1.json" + assert ( + actual_response.file[2].name + == "samples/generated_samples/snippet_metadata_google.mollusca.v1.json" + ) - assert json.loads( - actual_response.file[2].content) == expected_snippet_index_json + assert json.loads(actual_response.file[2].content) == expected_snippet_index_json +@mock.patch("gapic.samplegen.samplegen.generate_sample_specs", return_value=[]) @mock.patch( - "gapic.samplegen.samplegen.generate_sample_specs", return_value=[] -) -@mock.patch( - "gapic.samplegen.samplegen.generate_sample", return_value=("", snippet_metadata_pb2.Snippet()), + "gapic.samplegen.samplegen.generate_sample", + return_value=("", snippet_metadata_pb2.Snippet()), ) def test_generate_autogen_samples(mock_generate_sample, mock_generate_specs): opts = Options.build("autogen-snippets") @@ -613,20 +639,18 @@ def test_generate_autogen_samples(mock_generate_sample, mock_generate_specs): # Need to have the sample template visible to the generator. generator_obj._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) - api_schema = make_api(naming=naming.NewNaming( - name="Mollusc", version="v6")) + api_schema = make_api(naming=naming.NewNaming(name="Mollusc", version="v6")) generator_obj.get_response(api_schema, opts=opts) # Just check that generate_sample_specs was called # Correctness of the spec is tested in samplegen unit tests - mock_generate_specs.assert_called_once_with( - api_schema, - opts=opts - ) + mock_generate_specs.assert_called_once_with(api_schema, opts=opts) -@mock.patch("time.gmtime",) +@mock.patch( + "time.gmtime", +) def test_samplegen_id_disambiguation(mock_gmtime, fs): # These time values are nothing special, # they just need to be deterministic. @@ -670,41 +694,45 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): generator_obj._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) api_schema = DummyApiSchema( - services={"Mollusc": DummyService( - name="Mollusc", - methods={ - # The generator only cares about the dictionary keys - "GetSquidStreaming": DummyMethod(), - "GetClam": DummyMethod(), - }, - )}, - naming=DummyNaming(name="mollusc", version="v1", warehouse_package_name="mollusc-cephalopod-teuthida-", - versioned_module_name="teuthida_v1", module_namespace="mollusc.cephalopod", proto_package="google.mollusca.v1"), + services={ + "Mollusc": DummyService( + name="Mollusc", + methods={ + # The generator only cares about the dictionary keys + "GetSquidStreaming": DummyMethod(), + "GetClam": DummyMethod(), + }, + ) + }, + naming=DummyNaming( + name="mollusc", + version="v1", + warehouse_package_name="mollusc-cephalopod-teuthida-", + versioned_module_name="teuthida_v1", + module_namespace="mollusc.cephalopod", + proto_package="google.mollusca.v1", + ), ) - with mock.patch("gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample): - actual_response = generator_obj.get_response(api_schema, - opts=Options.build("autogen-snippets=False")) + with mock.patch( + "gapic.samplegen.samplegen.generate_sample", side_effect=mock_generate_sample + ): + actual_response = generator_obj.get_response( + api_schema, opts=Options.build("autogen-snippets=False") + ) expected_snippet_metadata_json = { "clientLibrary": { - "apis": [ - { - "id": "google.mollusca.v1", - "version": "v1" - } - ], + "apis": [{"id": "google.mollusca.v1", "version": "v1"}], "language": "PYTHON", "name": "mollusc-cephalopod-teuthida-", - "version": "0.1.0" + "version": "0.1.0", }, "snippets": [ { "clientMethod": { "method": { - "service": { - "shortName": "Mollusc" - }, - "shortName": "GetSquidStreaming" + "service": {"shortName": "Mollusc"}, + "shortName": "GetSquidStreaming", } }, "file": "squid_sample_1cfd0b3d.py", @@ -714,17 +742,15 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): {"type": "CLIENT_INITIALIZATION"}, {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, - {"type": "RESPONSE_HANDLING"} + {"type": "RESPONSE_HANDLING"}, ], - "title": "squid_sample_1cfd0b3d.py" - }, + "title": "squid_sample_1cfd0b3d.py", + }, { "clientMethod": { "method": { - "service": { - "shortName": "Mollusc" - }, - "shortName": "GetSquidStreaming" + "service": {"shortName": "Mollusc"}, + "shortName": "GetSquidStreaming", } }, "file": "squid_sample_cf4d4fa4.py", @@ -734,17 +760,15 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): {"type": "CLIENT_INITIALIZATION"}, {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, - {"type": "RESPONSE_HANDLING"} + {"type": "RESPONSE_HANDLING"}, ], - "title": "squid_sample_cf4d4fa4.py" - }, + "title": "squid_sample_cf4d4fa4.py", + }, { "clientMethod": { "method": { - "service": { - "shortName": "Mollusc" - }, - "shortName": "GetSquidStreaming" + "service": {"shortName": "Mollusc"}, + "shortName": "GetSquidStreaming", } }, "file": "7384949e.py", @@ -754,28 +778,36 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): {"type": "CLIENT_INITIALIZATION"}, {"type": "REQUEST_INITIALIZATION"}, {"type": "REQUEST_EXECUTION"}, - {"type": "RESPONSE_HANDLING"} + {"type": "RESPONSE_HANDLING"}, ], - "title": "7384949e.py" - } - ] + "title": "7384949e.py", + }, + ], } - assert actual_response.supported_features == CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + assert ( + actual_response.supported_features + == CodeGeneratorResponse.Feature.FEATURE_PROTO3_OPTIONAL + ) assert len(actual_response.file) == 4 assert actual_response.file[0] == CodeGeneratorResponse.File( - name="samples/generated_samples/squid_sample_1cfd0b3d.py", content="\n", + name="samples/generated_samples/squid_sample_1cfd0b3d.py", + content="\n", ) assert actual_response.file[1] == CodeGeneratorResponse.File( - name="samples/generated_samples/squid_sample_cf4d4fa4.py", content="\n", + name="samples/generated_samples/squid_sample_cf4d4fa4.py", + content="\n", ) assert actual_response.file[2] == CodeGeneratorResponse.File( - name="samples/generated_samples/7384949e.py", content="\n", + name="samples/generated_samples/7384949e.py", + content="\n", ) print(actual_response.file[3].content) - assert actual_response.file[3].name == "samples/generated_samples/snippet_metadata_google.mollusca.v1.json" - assert json.loads( - actual_response.file[3].content) == expected_snippet_metadata_json + assert ( + actual_response.file[3].name + == "samples/generated_samples/snippet_metadata_google.mollusca.v1.json" + ) + assert json.loads(actual_response.file[3].content) == expected_snippet_metadata_json def test_generator_duplicate_samples(fs): @@ -800,12 +832,10 @@ def test_generator_duplicate_samples(fs): generator = make_generator("samples=samples.yaml") generator._env.loader = jinja2.DictLoader({"sample.py.j2": ""}) - api_schema = make_api(naming=naming.NewNaming( - name="Mollusc", version="v6")) + api_schema = make_api(naming=naming.NewNaming(name="Mollusc", version="v6")) with pytest.raises(types.DuplicateSample): - generator.get_response(api_schema=api_schema, - opts=Options.build("")) + generator.get_response(api_schema=api_schema, opts=Options.build("")) def make_generator(opts_str: str = "") -> generator.Generator: diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 3a12bf474e81..9c1effb76967 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -23,41 +23,41 @@ def test_options_empty(): - opts = Options.build('') + opts = Options.build("") assert len(opts.templates) == 1 - assert opts.templates[0].endswith('gapic/templates') + assert opts.templates[0].endswith("gapic/templates") assert not opts.lazy_import assert not opts.old_naming def test_options_replace_templates(): - opts = Options.build('python-gapic-templates=/foo/') + opts = Options.build("python-gapic-templates=/foo/") assert len(opts.templates) == 1 - assert opts.templates[0] == '/foo' + assert opts.templates[0] == "/foo" def test_options_relative_templates(): - opts = Options.build('python-gapic-templates=../../squid/clam') + opts = Options.build("python-gapic-templates=../../squid/clam") - expected = (os.path.abspath('../squid/clam'),) + expected = (os.path.abspath("../squid/clam"),) assert opts.templates == expected def test_options_unrecognized(): - with mock.patch.object(warnings, 'warn') as warn: - Options.build('python-gapic-abc=xyz') - warn.assert_called_once_with('Unrecognized option: `python-gapic-abc`.') + with mock.patch.object(warnings, "warn") as warn: + Options.build("python-gapic-abc=xyz") + warn.assert_called_once_with("Unrecognized option: `python-gapic-abc`.") def test_flags_unrecognized(): - with mock.patch.object(warnings, 'warn') as warn: - Options.build('python-gapic-abc') - warn.assert_called_once_with('Unrecognized option: `python-gapic-abc`.') + with mock.patch.object(warnings, "warn") as warn: + Options.build("python-gapic-abc") + warn.assert_called_once_with("Unrecognized option: `python-gapic-abc`.") def test_options_unrecognized_likely_typo(): - with mock.patch.object(warnings, 'warn') as warn: - Options.build('go-gapic-abc=xyz') + with mock.patch.object(warnings, "warn") as warn: + Options.build("go-gapic-abc=xyz") assert len(warn.mock_calls) == 0 @@ -65,12 +65,13 @@ def test_options_trim_whitespace(): # When writing shell scripts, users may construct options strings with # whitespace that needs to be trimmed after tokenizing. opts = Options.build( - ''' + """ python-gapic-templates=/squid/clam/whelk , python-gapic-name=mollusca , - ''') - assert opts.templates[0] == '/squid/clam/whelk' - assert opts.name == 'mollusca' + """ + ) + assert opts.templates[0] == "/squid/clam/whelk" + assert opts.name == "mollusca" def test_options_no_valid_sample_config(fs): @@ -85,7 +86,9 @@ def test_options_service_config(fs): # Default of None is okay, verify build can read a config. service_config_fpath = "service_config.json" - fs.create_file(service_config_fpath, contents="""{ + fs.create_file( + service_config_fpath, + contents="""{ "methodConfig": [ { "name": [ @@ -107,7 +110,8 @@ def test_options_service_config(fs): "timeout": "5s" } ] - }""") + }""", + ) opt_string = f"retry-config={service_config_fpath}" opts = Options.build(opt_string) @@ -127,13 +131,9 @@ def test_options_service_config(fs): "maxBackoff": "3s", "initialBackoff": "0.2s", "backoffMultiplier": 2, - "retryableStatusCodes": - [ - "UNAVAILABLE", - "UNKNOWN" - ] + "retryableStatusCodes": ["UNAVAILABLE", "UNKNOWN"], }, - "timeout": "5s" + "timeout": "5s", } ] } @@ -145,28 +145,27 @@ def test_options_service_yaml_config(fs): assert opts.service_yaml_config == {} service_yaml_fpath = "testapi_v1.yaml" - fs.create_file(service_yaml_fpath, - contents=("type: google.api.Service\n" - "config_version: 3\n" - "name: testapi.googleapis.com\n")) + fs.create_file( + service_yaml_fpath, + contents=( + "type: google.api.Service\n" + "config_version: 3\n" + "name: testapi.googleapis.com\n" + ), + ) opt_string = f"service-yaml={service_yaml_fpath}" opts = Options.build(opt_string) - expected_config = { - "config_version": 3, - "name": "testapi.googleapis.com" - } + expected_config = {"config_version": 3, "name": "testapi.googleapis.com"} assert opts.service_yaml_config == expected_config service_yaml_fpath = "testapi_v2.yaml" - fs.create_file(service_yaml_fpath, - contents=("config_version: 3\n" - "name: testapi.googleapis.com\n")) + fs.create_file( + service_yaml_fpath, + contents=("config_version: 3\n" "name: testapi.googleapis.com\n"), + ) opt_string = f"service-yaml={service_yaml_fpath}" opts = Options.build(opt_string) - expected_config = { - "config_version": 3, - "name": "testapi.googleapis.com" - } + expected_config = {"config_version": 3, "name": "testapi.googleapis.com"} assert opts.service_yaml_config == expected_config @@ -189,14 +188,15 @@ def test_options_bool_flags(): # New options should follow the dash-case/snake_case convention. opt_str_to_attr_name = { name: re.sub(r"-", "_", name) - for name in - ["lazy-import", - "old-naming", - "add-iam-methods", - "metadata", - "warehouse-package-name", - "rest-numeric-enums", - ]} + for name in [ + "lazy-import", + "old-naming", + "add-iam-methods", + "metadata", + "warehouse-package-name", + "rest-numeric-enums", + ] + } for opt, attr in opt_str_to_attr_name.items(): options = Options.build("") @@ -227,10 +227,10 @@ def test_options_autogen_snippets_false_for_old_naming(): def test_options_proto_plus_deps(): opts = Options.build("proto-plus-deps=") - assert opts.proto_plus_deps == ('',) + assert opts.proto_plus_deps == ("",) opts = Options.build("proto-plus-deps=google.apps.script.type.calendar") - assert opts.proto_plus_deps == ('google.apps.script.type.calendar',) + assert opts.proto_plus_deps == ("google.apps.script.type.calendar",) opts = Options.build( "proto-plus-deps=\ @@ -249,5 +249,5 @@ def test_options_proto_plus_deps(): "google.apps.script.type.gmail", "google.apps.script.type.sheets", "google.apps.script.type.slides", - "google.apps.script.type" + "google.apps.script.type", ) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index b6c7e1dad74a..860f3efaa470 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -22,14 +22,22 @@ import gapic.utils as utils from gapic.samplegen import samplegen -from gapic.samplegen_utils import (types, utils as gapic_utils) +from gapic.samplegen_utils import types, utils as gapic_utils from gapic.samplegen_utils import snippet_metadata_pb2 -from gapic.schema import (naming, wrappers) - -from ..common_types import (DummyField, DummyMessage, - - DummyMessageTypePB, DummyMethod, DummyService, DummyIdent, - DummyApiSchema, DummyNaming, enum_factory, message_factory) +from gapic.schema import naming, wrappers + +from ..common_types import ( + DummyField, + DummyMessage, + DummyMessageTypePB, + DummyMethod, + DummyService, + DummyIdent, + DummyApiSchema, + DummyNaming, + enum_factory, + message_factory, +) from collections import namedtuple from textwrap import dedent @@ -37,17 +45,18 @@ env = jinja2.Environment( loader=jinja2.FileSystemLoader( - searchpath=path.realpath(path.join(path.dirname(__file__), - "..", "..", "..", - "gapic", "templates") - )), + searchpath=path.realpath( + path.join(path.dirname(__file__), "..", "..", "..", "gapic", "templates") + ) + ), undefined=jinja2.StrictUndefined, extensions=["jinja2.ext.do"], - trim_blocks=True, lstrip_blocks=True + trim_blocks=True, + lstrip_blocks=True, ) -env.filters['snake_case'] = utils.to_snake_case -env.filters['coerce_response_name'] = gapic_utils.coerce_response_name -env.filters['render_format_string'] = gapic_utils.render_format_string +env.filters["snake_case"] = utils.to_snake_case +env.filters["coerce_response_name"] = gapic_utils.coerce_response_name +env.filters["render_format_string"] = gapic_utils.render_format_string def golden_snippet(filename: str) -> str: @@ -76,19 +85,18 @@ def test_generate_sample_basic(): "location_annotation": DummyField( type=DummyMessageTypePB(name="Location"), message=DummyMessage(type="LOCATION TYPE"), - ) + ), }, ), - ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget") + ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget"), ) input_type = DummyMessage( type="REQUEST TYPE", - fields={ - "classify_target": classify_target_field - }, - ident=DummyIdent(name="molluscs.v1.ClassifyRequest", - sphinx="molluscs_v1.classify_request") + fields={"classify_target": classify_target_field}, + ident=DummyIdent( + name="molluscs.v1.ClassifyRequest", sphinx="molluscs_v1.classify_request" + ), ) output_type = DummyMessage( @@ -98,22 +106,19 @@ def test_generate_sample_basic(): type=DummyMessageTypePB(name="Classification"), ) }, - ident=DummyIdent(sphinx="molluscs_v1.classification") + ident=DummyIdent(sphinx="molluscs_v1.classification"), ) - api_naming = naming.NewNaming( - name="MolluscClient", namespace=("molluscs", "v1")) + api_naming = naming.NewNaming(name="MolluscClient", namespace=("molluscs", "v1")) service = wrappers.Service( - service_pb=namedtuple('service_pb', ['name'])('MolluscService'), + service_pb=namedtuple("service_pb", ["name"])("MolluscService"), methods={ "Classify": DummyMethod( name="Classify", input=input_type, output=message_factory("$resp.taxonomy"), client_output=output_type, - flattened_fields={ - "classify_target": classify_target_field - } + flattened_fields={"classify_target": classify_target_field}, ) }, visible_resources={}, @@ -124,59 +129,64 @@ def test_generate_sample_basic(): naming=api_naming, ) - sample = {"service": "animalia.mollusca.v1.Mollusc", - "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", - "rpc": "Classify", - "id": "mollusc_classify_sync", - "description": "Determine the full taxonomy of input mollusc", - "request": [ - {"field": "classify_target.video", - "value": "path/to/mollusc/video.mkv", - "input_parameter": "video", - "value_is_file": True}, - {"field": "classify_target.location_annotation", - "value": "New Zealand", - "input_parameter": "location"} - ], - "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} + sample = { + "service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", + "rpc": "Classify", + "id": "mollusc_classify_sync", + "description": "Determine the full taxonomy of input mollusc", + "request": [ + { + "field": "classify_target.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True, + }, + { + "field": "classify_target.location_annotation", + "value": "New Zealand", + "input_parameter": "location", + }, + ], + "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}], + } sample_str, metadata = samplegen.generate_sample( - sample, - schema, - env.get_template('examples/sample.py.j2') + sample, schema, env.get_template("examples/sample.py.j2") ) assert sample_str == golden_snippet("sample_basic.py") assert json_format.MessageToDict(metadata) == { - 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', - 'description': 'Sample for Classify', - 'language': 'PYTHON', - 'clientMethod': { - 'shortName': 'classify', - 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient.classify', - 'parameters': [ - {'type': 'molluscs_v1.classify_request', 'name': 'request'}, - {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, - {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, - {'type': 'float', 'name': 'timeout'}, - { - 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', - 'name': 'metadata' - } + "regionTag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", + "description": "Sample for Classify", + "language": "PYTHON", + "clientMethod": { + "shortName": "classify", + "fullName": "molluscs.v1.molluscclient.MolluscServiceClient.classify", + "parameters": [ + {"type": "molluscs_v1.classify_request", "name": "request"}, + {"type": "molluscs_v1.ClassifyTarget", "name": "classify_target"}, + {"type": "google.api_core.retry.Retry", "name": "retry"}, + {"type": "float", "name": "timeout"}, + {"type": "Sequence[Tuple[str, Union[str, bytes]]]", "name": "metadata"}, ], - 'resultType': 'molluscs_v1.classification', - 'client': { - 'shortName': 'MolluscServiceClient', - 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient' + "resultType": "molluscs_v1.classification", + "client": { + "shortName": "MolluscServiceClient", + "fullName": "molluscs.v1.molluscclient.MolluscServiceClient", }, - 'method': { - 'shortName': 'Classify', - 'fullName': '.MolluscService.Classify', - 'service': {'shortName': 'MolluscService', 'fullName': '.MolluscService'}} + "method": { + "shortName": "Classify", + "fullName": ".MolluscService.Classify", + "service": { + "shortName": "MolluscService", + "fullName": ".MolluscService", + }, }, - 'canonical': True, - 'origin': 'API_DEFINITION' + }, + "canonical": True, + "origin": "API_DEFINITION", } @@ -200,19 +210,18 @@ def test_generate_sample_basic_async(): "location_annotation": DummyField( type=DummyMessageTypePB(name="Location"), message=DummyMessage(type="LOCATION TYPE"), - ) + ), }, ), - ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget") + ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget"), ) input_type = DummyMessage( type="REQUEST TYPE", - fields={ - "classify_target": classify_target_field - }, - ident=DummyIdent(name="molluscs.v1.ClassifyRequest", - sphinx="molluscs_v1.classify_request") + fields={"classify_target": classify_target_field}, + ident=DummyIdent( + name="molluscs.v1.ClassifyRequest", sphinx="molluscs_v1.classify_request" + ), ) output_type = DummyMessage( @@ -222,13 +231,12 @@ def test_generate_sample_basic_async(): type=DummyMessageTypePB(name="Classification"), ) }, - ident=DummyIdent(sphinx="molluscs_v1.classification") + ident=DummyIdent(sphinx="molluscs_v1.classification"), ) - api_naming = naming.NewNaming( - name="MolluscClient", namespace=("molluscs", "v1")) + api_naming = naming.NewNaming(name="MolluscClient", namespace=("molluscs", "v1")) service = wrappers.Service( - service_pb=namedtuple('service_pb', ['name'])('MolluscService'), + service_pb=namedtuple("service_pb", ["name"])("MolluscService"), methods={ "Classify": DummyMethod( name="Classify", @@ -236,9 +244,7 @@ def test_generate_sample_basic_async(): output=message_factory("$resp.taxonomy"), client_output_async=output_type, client_output=output_type, - flattened_fields={ - "classify_target": classify_target_field - } + flattened_fields={"classify_target": classify_target_field}, ) }, visible_resources={}, @@ -249,65 +255,66 @@ def test_generate_sample_basic_async(): naming=api_naming, ) - sample = {"service": "animalia.mollusca.v1.Mollusc", - "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_async", - "rpc": "Classify", - "transport": "grpc-async", - "id": "mollusc_classify_sync", - "description": "Determine the full taxonomy of input mollusc", - "request": [ - {"field": "classify_target.video", - "value": "path/to/mollusc/video.mkv", - "input_parameter": "video", - "value_is_file": True}, - {"field": "classify_target.location_annotation", - "value": "New Zealand", - "input_parameter": "location"} - ], - "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} + sample = { + "service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_async", + "rpc": "Classify", + "transport": "grpc-async", + "id": "mollusc_classify_sync", + "description": "Determine the full taxonomy of input mollusc", + "request": [ + { + "field": "classify_target.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True, + }, + { + "field": "classify_target.location_annotation", + "value": "New Zealand", + "input_parameter": "location", + }, + ], + "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}], + } sample_str, metadata = samplegen.generate_sample( - sample, - schema, - env.get_template('examples/sample.py.j2') + sample, schema, env.get_template("examples/sample.py.j2") ) assert sample_str == golden_snippet("sample_basic_async.py") assert json_format.MessageToDict(metadata) == { - 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_async', - 'description': 'Sample for Classify', - 'language': 'PYTHON', - 'clientMethod': { - 'shortName': 'classify', - 'fullName': 'molluscs.v1.molluscclient.MolluscServiceAsyncClient.classify', - 'async': True, - 'parameters': [ - {'type': 'molluscs_v1.classify_request', 'name': 'request'}, - {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, - {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, - {'type': 'float', 'name': 'timeout'}, - { - 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', - 'name': 'metadata' - } + "regionTag": "molluscs_generated_molluscs_v1_Mollusc_Classify_async", + "description": "Sample for Classify", + "language": "PYTHON", + "clientMethod": { + "shortName": "classify", + "fullName": "molluscs.v1.molluscclient.MolluscServiceAsyncClient.classify", + "async": True, + "parameters": [ + {"type": "molluscs_v1.classify_request", "name": "request"}, + {"type": "molluscs_v1.ClassifyTarget", "name": "classify_target"}, + {"type": "google.api_core.retry.Retry", "name": "retry"}, + {"type": "float", "name": "timeout"}, + {"type": "Sequence[Tuple[str, Union[str, bytes]]]", "name": "metadata"}, ], - 'resultType': 'molluscs_v1.classification', - 'client': { - 'shortName': 'MolluscServiceAsyncClient', - 'fullName': 'molluscs.v1.molluscclient.MolluscServiceAsyncClient' + "resultType": "molluscs_v1.classification", + "client": { + "shortName": "MolluscServiceAsyncClient", + "fullName": "molluscs.v1.molluscclient.MolluscServiceAsyncClient", + }, + "method": { + "shortName": "Classify", + "fullName": ".MolluscService.Classify", + "service": { + "shortName": "MolluscService", + "fullName": ".MolluscService", + }, }, - 'method': { - 'shortName': 'Classify', - 'fullName': '.MolluscService.Classify', - 'service': { - 'shortName': 'MolluscService', - 'fullName': '.MolluscService' - } - } }, - 'canonical': True, - 'origin': 'API_DEFINITION' + "canonical": True, + "origin": "API_DEFINITION", } @@ -331,13 +338,14 @@ def test_generate_sample_basic_unflattenable(): "location_annotation": DummyField( type=DummyMessageTypePB(name="Location"), message=DummyMessage(type="LOCATION TYPE"), - ) + ), }, - ) + ), ) }, - ident=DummyIdent(name="molluscs.v1.ClassifyRequest", - sphinx="molluscs_v1.classify_request") + ident=DummyIdent( + name="molluscs.v1.ClassifyRequest", sphinx="molluscs_v1.classify_request" + ), ) output_type = DummyMessage( @@ -347,13 +355,12 @@ def test_generate_sample_basic_unflattenable(): type=DummyMessageTypePB(name="Classification"), ) }, - ident=DummyIdent(sphinx="molluscs_v1.classification") + ident=DummyIdent(sphinx="molluscs_v1.classification"), ) - api_naming = naming.NewNaming( - name="MolluscClient", namespace=("molluscs", "v1")) + api_naming = naming.NewNaming(name="MolluscClient", namespace=("molluscs", "v1")) service = wrappers.Service( - service_pb=namedtuple('service_pb', ['name'])('MolluscService'), + service_pb=namedtuple("service_pb", ["name"])("MolluscService"), methods={ "Classify": DummyMethod( name="Classify", @@ -370,59 +377,63 @@ def test_generate_sample_basic_unflattenable(): naming=api_naming, ) - sample = {"service": "animalia.mollusca.v1.Mollusc", - "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", - "rpc": "Classify", - "id": "mollusc_classify_sync", - "description": "Determine the full taxonomy of input mollusc", - "request": [ - {"field": "classify_target.video", - "value": "path/to/mollusc/video.mkv", - "input_parameter": "video", - "value_is_file": True}, - {"field": "classify_target.location_annotation", - "value": "New Zealand", - "input_parameter": "location"} - ], - "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}]} + sample = { + "service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", + "rpc": "Classify", + "id": "mollusc_classify_sync", + "description": "Determine the full taxonomy of input mollusc", + "request": [ + { + "field": "classify_target.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True, + }, + { + "field": "classify_target.location_annotation", + "value": "New Zealand", + "input_parameter": "location", + }, + ], + "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}], + } sample_str, metadata = samplegen.generate_sample( - sample, - schema, - env.get_template('examples/sample.py.j2') + sample, schema, env.get_template("examples/sample.py.j2") ) assert sample_str == golden_snippet("sample_basic_unflattenable.py") assert json_format.MessageToDict(metadata) == { - 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', - 'description': 'Sample for Classify', - 'language': 'PYTHON', - 'clientMethod': { - 'shortName': 'classify', - 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient.classify', - 'parameters': [ - {'type': 'molluscs_v1.classify_request', 'name': 'request'}, - {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, - {'type': 'float', 'name': 'timeout'}, - { - 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', - 'name': 'metadata' - } + "regionTag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", + "description": "Sample for Classify", + "language": "PYTHON", + "clientMethod": { + "shortName": "classify", + "fullName": "molluscs.v1.molluscclient.MolluscServiceClient.classify", + "parameters": [ + {"type": "molluscs_v1.classify_request", "name": "request"}, + {"type": "google.api_core.retry.Retry", "name": "retry"}, + {"type": "float", "name": "timeout"}, + {"type": "Sequence[Tuple[str, Union[str, bytes]]]", "name": "metadata"}, ], - 'resultType': 'molluscs_v1.classification', - 'client': { - 'shortName': 'MolluscServiceClient', - 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient' + "resultType": "molluscs_v1.classification", + "client": { + "shortName": "MolluscServiceClient", + "fullName": "molluscs.v1.molluscclient.MolluscServiceClient", + }, + "method": { + "shortName": "Classify", + "fullName": ".MolluscService.Classify", + "service": { + "shortName": "MolluscService", + "fullName": ".MolluscService", + }, }, - 'method': { - 'shortName': 'Classify', - 'fullName': '.MolluscService.Classify', - 'service': {'shortName': 'MolluscService', 'fullName': '.MolluscService'} - } }, - 'canonical': True, - 'origin': 'API_DEFINITION' + "canonical": True, + "origin": "API_DEFINITION", } @@ -440,25 +451,23 @@ def test_generate_sample_void_method(): "location_annotation": DummyField( type=DummyMessageTypePB(name="Location"), message=DummyMessage(type="LOCATION TYPE"), - ) + ), }, ), - ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget") + ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget"), ) input_type = DummyMessage( type="REQUEST TYPE", - fields={ - "classify_target": classify_target_field - }, - ident=DummyIdent(name="molluscs.v1.ClassifyRequest", - sphinx="molluscs_v1.classify_request") + fields={"classify_target": classify_target_field}, + ident=DummyIdent( + name="molluscs.v1.ClassifyRequest", sphinx="molluscs_v1.classify_request" + ), ) - api_naming = naming.NewNaming( - name="MolluscClient", namespace=("molluscs", "v1")) + api_naming = naming.NewNaming(name="MolluscClient", namespace=("molluscs", "v1")) service = wrappers.Service( - service_pb=namedtuple('service_pb', ['name'])('MolluscService'), + service_pb=namedtuple("service_pb", ["name"])("MolluscService"), methods={ "Classify": DummyMethod( name="Classify", @@ -468,7 +477,7 @@ def test_generate_sample_void_method(): output=message_factory("$resp.taxonomy"), flattened_fields={ "classify_target": classify_target_field, - } + }, ) }, visible_resources={}, @@ -479,58 +488,62 @@ def test_generate_sample_void_method(): naming=api_naming, ) - sample = {"service": "animalia.mollusca.v1.Mollusc", - "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", - "rpc": "Classify", - "id": "mollusc_classify_sync", - "description": "Determine the full taxonomy of input mollusc", - "request": [ - {"field": "classify_target.video", - "value": "path/to/mollusc/video.mkv", - "input_parameter": "video", - "value_is_file": True}, - {"field": "classify_target.location_annotation", - "value": "New Zealand", - "input_parameter": "location"} - ]} + sample = { + "service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", + "rpc": "Classify", + "id": "mollusc_classify_sync", + "description": "Determine the full taxonomy of input mollusc", + "request": [ + { + "field": "classify_target.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True, + }, + { + "field": "classify_target.location_annotation", + "value": "New Zealand", + "input_parameter": "location", + }, + ], + } sample_str, metadata = samplegen.generate_sample( - sample, - schema, - env.get_template('examples/sample.py.j2') + sample, schema, env.get_template("examples/sample.py.j2") ) assert sample_str == golden_snippet("sample_basic_void_method.py") assert json_format.MessageToDict(metadata) == { - 'regionTag': 'molluscs_generated_molluscs_v1_Mollusc_Classify_sync', - 'description': 'Sample for Classify', - 'language': 'PYTHON', - 'clientMethod': { - 'shortName': 'classify', - 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient.classify', - 'parameters': [ - {'type': 'molluscs_v1.classify_request', 'name': 'request'}, - {'type': 'molluscs_v1.ClassifyTarget', 'name': 'classify_target'}, - {'type': 'google.api_core.retry.Retry', 'name': 'retry'}, - {'type': 'float', 'name': 'timeout'}, - { - 'type': 'Sequence[Tuple[str, Union[str, bytes]]]', - 'name': 'metadata' - } + "regionTag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync", + "description": "Sample for Classify", + "language": "PYTHON", + "clientMethod": { + "shortName": "classify", + "fullName": "molluscs.v1.molluscclient.MolluscServiceClient.classify", + "parameters": [ + {"type": "molluscs_v1.classify_request", "name": "request"}, + {"type": "molluscs_v1.ClassifyTarget", "name": "classify_target"}, + {"type": "google.api_core.retry.Retry", "name": "retry"}, + {"type": "float", "name": "timeout"}, + {"type": "Sequence[Tuple[str, Union[str, bytes]]]", "name": "metadata"}, ], - 'client': { - 'shortName': 'MolluscServiceClient', - 'fullName': 'molluscs.v1.molluscclient.MolluscServiceClient' + "client": { + "shortName": "MolluscServiceClient", + "fullName": "molluscs.v1.molluscclient.MolluscServiceClient", + }, + "method": { + "shortName": "Classify", + "fullName": ".MolluscService.Classify", + "service": { + "shortName": "MolluscService", + "fullName": ".MolluscService", + }, }, - 'method': { - 'shortName': 'Classify', - 'fullName': '.MolluscService.Classify', - 'service': {'shortName': 'MolluscService', 'fullName': '.MolluscService'} - } }, - 'canonical': True, - 'origin': 'API_DEFINITION' + "canonical": True, + "origin": "API_DEFINITION", } @@ -542,36 +555,38 @@ def test_generate_sample_service_not_found(): samplegen.generate_sample( sample, schema, - env.get_template('examples/sample.py.j2'), + env.get_template("examples/sample.py.j2"), ) def test_generate_sample_rpc_not_found(): schema = DummyApiSchema( - {"Mollusc": DummyService(methods={}, client_name="ClassifyClient")}, DummyNaming("pkg_name")) + {"Mollusc": DummyService(methods={}, client_name="ClassifyClient")}, + DummyNaming("pkg_name"), + ) sample = {"service": "Mollusc", "rpc": "Classify"} with pytest.raises(types.RpcMethodNotFound): - list(samplegen.generate_sample( - sample, - schema, - env.get_template('examples/sample.py.j2')), + list( + samplegen.generate_sample( + sample, schema, env.get_template("examples/sample.py.j2") + ), ) def test_generate_sample_config_fpaths(fs): - expected_path = 'cfgs/sample_config.yaml' + expected_path = "cfgs/sample_config.yaml" fs.create_file( expected_path, contents=dedent( - ''' + """ --- type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 samples: - service: google.cloud.language.v1.LanguageService - ''' - ) + """ + ), ) actual_paths = list(gapic_utils.generate_all_sample_fpaths(expected_path)) @@ -579,7 +594,7 @@ def test_generate_sample_config_fpaths(fs): def test_generate_sample_config_fpaths_not_yaml(fs): - expected_path = 'cfgs/sample_config.not_yaml' + expected_path = "cfgs/sample_config.not_yaml" fs.create_file(expected_path) with pytest.raises(types.InvalidConfig): @@ -587,19 +602,19 @@ def test_generate_sample_config_fpaths_not_yaml(fs): def test_generate_sample_config_fpaths_bad_contents( - fs, - # Note the typo: SampleConfigPronto - contents=dedent( - ''' + fs, + # Note the typo: SampleConfigPronto + contents=dedent( + """ --- type: com.google.api.codegen.SampleConfigPronto schema_version: 1.2.0 samples: - service: google.cloud.language.v1.LanguageService - ''' - ) + """ + ), ): - expected_path = 'cfgs/sample_config.yaml' + expected_path = "cfgs/sample_config.yaml" fs.create_file(expected_path, contents=contents) with pytest.raises(types.InvalidConfig): @@ -610,14 +625,14 @@ def test_generate_sample_config_fpaths_bad_contents_old(fs): test_generate_sample_config_fpaths_bad_contents( fs, contents=dedent( - ''' + """ --- type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.1.0 samples: - service: google.cloud.language.v1.LanguageService - ''' - ) + """ + ), ) @@ -625,22 +640,22 @@ def test_generate_sample_config_fpaths_bad_contents_no_samples(fs): test_generate_sample_config_fpaths_bad_contents( fs, contents=dedent( - ''' + """ --- type: com.google.api.codegen.samplegen.v1p2.SampleConfigProto schema_version: 1.2.0 - ''' - ) + """ + ), ) def test_generate_sample_config_partial_config(fs): - expected_path = 'sample.yaml' + expected_path = "sample.yaml" fs.create_file( expected_path, # Note the typo: SampleConfigPronto contents=dedent( - ''' + """ --- # Note: not a valid config because of the type. type: com.google.api.codegen.samplegen.v1p2.SampleConfigPronto @@ -653,8 +668,8 @@ def test_generate_sample_config_partial_config(fs): schema_version: 1.2.0 samples: - service: google.cloud.language.v1.LanguageService - ''' - ) + """ + ), ) expected_paths = [expected_path] @@ -665,4 +680,4 @@ def test_generate_sample_config_partial_config(fs): def test_generate_sample_config_fpaths_no_such_file(fs): with pytest.raises(types.InvalidConfig): - list(gapic_utils.generate_all_sample_fpaths('cfgs/sample_config.yaml')) + list(gapic_utils.generate_all_sample_fpaths("cfgs/sample_config.yaml")) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py index 8357c711fece..2ffad3df1efa 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_manifest.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_manifest.py @@ -25,8 +25,10 @@ def test_generate_manifest(): fpath_to_dummy_sample = { "samples/squid_fpath.py": {"id": "squid_sample"}, - "samples/clam_fpath.py": {"id": "clam_sample", - "region_tag": "giant_clam_sample"}, + "samples/clam_fpath.py": { + "id": "clam_sample", + "region_tag": "giant_clam_sample", + }, } fname, info = manifest.generate( @@ -35,48 +37,44 @@ def test_generate_manifest(): # Empirically derived number such that the # corresponding time_struct tests the zero # padding in the returned filename. - manifest_time=4486525628 + manifest_time=4486525628, ) assert fname == "mollusc.v1.python.21120304.090708.manifest.yaml" - doc = gapic_yaml.Doc([ - gapic_yaml.KeyVal("type", "manifest/samples"), - gapic_yaml.KeyVal("schema_version", "3"), - gapic_yaml.Map(name="python", - anchor_name="python", - elements=[ - gapic_yaml.KeyVal( - "environment", "python"), - gapic_yaml.KeyVal( - "bin", "python3"), - gapic_yaml.KeyVal( - "base_path", "samples"), - gapic_yaml.KeyVal( - "invocation", "'{bin} {path} @args'"), - ]), - gapic_yaml.Collection(name="samples", - elements=[ - [ - gapic_yaml.Alias( - "python"), - gapic_yaml.KeyVal( - "sample", "squid_sample"), - gapic_yaml.KeyVal( - "path", "'{base_path}/squid_fpath.py'"), - gapic_yaml.Null, - ], - [ - gapic_yaml.Alias("python"), - gapic_yaml.KeyVal( - "sample", "clam_sample"), - gapic_yaml.KeyVal( - "path", "'{base_path}/clam_fpath.py'"), - gapic_yaml.KeyVal( - "region_tag", "giant_clam_sample") - ], - ]) - ]) + doc = gapic_yaml.Doc( + [ + gapic_yaml.KeyVal("type", "manifest/samples"), + gapic_yaml.KeyVal("schema_version", "3"), + gapic_yaml.Map( + name="python", + anchor_name="python", + elements=[ + gapic_yaml.KeyVal("environment", "python"), + gapic_yaml.KeyVal("bin", "python3"), + gapic_yaml.KeyVal("base_path", "samples"), + gapic_yaml.KeyVal("invocation", "'{bin} {path} @args'"), + ], + ), + gapic_yaml.Collection( + name="samples", + elements=[ + [ + gapic_yaml.Alias("python"), + gapic_yaml.KeyVal("sample", "squid_sample"), + gapic_yaml.KeyVal("path", "'{base_path}/squid_fpath.py'"), + gapic_yaml.Null, + ], + [ + gapic_yaml.Alias("python"), + gapic_yaml.KeyVal("sample", "clam_sample"), + gapic_yaml.KeyVal("path", "'{base_path}/clam_fpath.py'"), + gapic_yaml.KeyVal("region_tag", "giant_clam_sample"), + ], + ], + ), + ] + ) assert info == doc @@ -98,7 +96,8 @@ def test_generate_manifest(): sample: clam_sample path: '{base_path}/clam_fpath.py' region_tag: giant_clam_sample - """) + """ + ) rendered_yaml = doc.render() assert rendered_yaml == expected_rendering @@ -141,5 +140,5 @@ def test_generate_manifest_relative_path_quick_check(): with pytest.raises(types.InvalidSampleFpath): manifest.generate( {"molluscs/squid.py": {"id": "squid_sample"}}.items(), - DummyApiSchema(naming=DummyNaming(name="Mollusc", version="v1")) + DummyApiSchema(naming=DummyNaming(name="Mollusc", version="v1")), ) diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index a1d1e4d8208a..a884c262410a 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -150,8 +150,7 @@ def test_define_add_var(dummy_api_schema): v = samplegen.Validator( DummyMethod(output=message_factory("mollusc.name")), api_schema=dummy_api_schema ) - v.validate_response([{"define": "squid=$resp"}, { - "define": "name=squid.name"}]) + v.validate_response([{"define": "squid=$resp"}, {"define": "name=squid.name"}]) def test_define_bad_form(dummy_api_schema): @@ -169,8 +168,7 @@ def test_define_redefinition(dummy_api_schema): {"define": "molluscs=$resp.molluscs"}, ] v = samplegen.Validator( - DummyMethod(output=message_factory( - "$resp.molluscs", repeated_iter=[True])), + DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True])), api_schema=dummy_api_schema, ) with pytest.raises(types.RedefinedVariable): @@ -347,8 +345,7 @@ def test_preprocess_sample_nested_message_field(): def test_preprocess_sample_void_method(): sample = {"service": "Mollusc", "rpc": "Classify"} api_schema = DummyApiSchema( - services={"Mollusc": DummyService( - methods={}, client_name="MolluscClient")}, + services={"Mollusc": DummyService(methods={}, client_name="MolluscClient")}, naming=DummyNaming( warehouse_package_name="mollusc-cephalopod-teuthida-", versioned_module_name="teuthida_v1", @@ -486,8 +483,7 @@ def test_print_undefined_var(dummy_api_schema): def test_comment(dummy_api_schema): comment = {"comment": ["This is a mollusc"]} - samplegen.Validator( - DummyMethod(), dummy_api_schema).validate_response([comment]) + samplegen.Validator(DummyMethod(), dummy_api_schema).validate_response([comment]) def test_comment_fmt_str(dummy_api_schema): @@ -527,8 +523,7 @@ def test_loop_collection(dummy_api_schema): "body": [{"print": ["Mollusc of class: %s", "m.class"]}], } } - OutputType = message_factory( - "$resp.molluscs.class", repeated_iter=[True, False]) + OutputType = message_factory("$resp.molluscs.class", repeated_iter=[True, False]) v = samplegen.Validator(DummyMethod(output=OutputType), dummy_api_schema) v.validate_response([loop]) @@ -545,8 +540,7 @@ def test_loop_collection_redefinition(dummy_api_schema): }, ] v = samplegen.Validator( - DummyMethod(output=message_factory( - "$resp.molluscs", repeated_iter=[True])), + DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True])), dummy_api_schema, ) with pytest.raises(types.RedefinedVariable): @@ -601,8 +595,7 @@ def test_loop_collection_reserved_loop_var(dummy_api_schema): } } v = samplegen.Validator( - DummyMethod(output=message_factory( - "$resp.molluscs", repeated_iter=[True])), + DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True])), dummy_api_schema, ) with pytest.raises(types.ReservedVariableName): @@ -625,8 +618,7 @@ def test_loop_map(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, type="MOLLUSC_TYPE") + message=DummyMessage(fields={}, type="MOLLUSC_TYPE") ), }, type="MOLLUSCS_TYPE", @@ -653,8 +645,7 @@ def test_collection_loop_lexical_scope_variable(dummy_api_schema): {"define": "cephalopod=m"}, ] v = samplegen.Validator( - DummyMethod(output=message_factory( - "$resp.molluscs", repeated_iter=[True])), + DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True])), dummy_api_schema, ) with pytest.raises(types.UndefinedVariableReference): @@ -673,8 +664,7 @@ def test_collection_loop_lexical_scope_inline(dummy_api_schema): {"define": "cephalopod=squid"}, ] v = samplegen.Validator( - DummyMethod(output=message_factory( - "$resp.molluscs", repeated_iter=[True])), + DummyMethod(output=message_factory("$resp.molluscs", repeated_iter=[True])), dummy_api_schema, ) with pytest.raises(types.UndefinedVariableReference): @@ -702,8 +692,7 @@ def test_map_loop_lexical_scope_key(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, type="MOLLUSC_TYPE") + message=DummyMessage(fields={}, type="MOLLUSC_TYPE") ), }, type="MOLLUSCS_TYPE", @@ -741,8 +730,7 @@ def test_map_loop_lexical_scope_value(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, type="MOLLUSC_TYPE") + message=DummyMessage(fields={}, type="MOLLUSC_TYPE") ), }, type="MOLLUSCS_TYPE", @@ -780,8 +768,7 @@ def test_map_loop_lexical_scope_inline(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, type="MOLLUSC_TYPE") + message=DummyMessage(fields={}, type="MOLLUSC_TYPE") ), }, type="MOLLUSCS_TYPE", @@ -814,8 +801,7 @@ def test_loop_map_reserved_key(dummy_api_schema): fields={ "key": DummyField(), "value": DummyField( - message=DummyMessage( - fields={}, type="MOLLUSC_TYPE") + message=DummyMessage(fields={}, type="MOLLUSC_TYPE") ), }, type="MOLLUSCS_TYPE", @@ -1102,8 +1088,7 @@ def test_validate_write_file(dummy_api_schema): def test_validate_write_file_fname_fmt(dummy_api_schema): statements = [ - {"write_file": {"filename": [ - "specimen-%s"], "contents": "$resp.photo"}} + {"write_file": {"filename": ["specimen-%s"], "contents": "$resp.photo"}} ] v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.MismatchedFormatSpecifier): @@ -1138,8 +1123,7 @@ def test_validate_write_file_missing_fname(dummy_api_schema): def test_validate_write_file_missing_contents(dummy_api_schema): - statements = [ - {"write_file": {"filename": ["specimen-%s", "$resp.species"]}}] + statements = [{"write_file": {"filename": ["specimen-%s", "$resp.species"]}}] OutputType = DummyMessage( fields={ "species": DummyField(message=DummyMessage(fields={})), @@ -1230,8 +1214,7 @@ def test_validate_request_basic(dummy_api_schema): samplegen.AttributeRequestSetup( field="mantle_mass", value='"10 kg"' ), - samplegen.AttributeRequestSetup( - field="num_tentacles", value=10), + samplegen.AttributeRequestSetup(field="num_tentacles", value=10), ], single=None, ) @@ -1246,15 +1229,13 @@ def test_validate_request_no_field_parameter(dummy_api_schema): v = samplegen.Validator(DummyMethod(), dummy_api_schema) with pytest.raises(types.InvalidRequestSetup): v.validate_and_transform_request( - types.CallingForm.Request, [ - {"squid": "humboldt", "value": "teuthida"}] + types.CallingForm.Request, [{"squid": "humboldt", "value": "teuthida"}] ) def test_validate_request_no_such_attribute(dummy_api_schema): v = samplegen.Validator( - DummyMethod(input=message_factory( - "mollusc.squid.mantle")), dummy_api_schema + DummyMethod(input=message_factory("mollusc.squid.mantle")), dummy_api_schema ) with pytest.raises(types.BadAttributeLookup): v.validate_and_transform_request( @@ -1288,8 +1269,7 @@ def test_validate_request_missing_keyword(dummy_api_schema, kword="field"): DummyMethod(input=message_factory("mollusc.squid")), dummy_api_schema ) with pytest.raises(types.InvalidRequestSetup): - v.validate_and_transform_request( - types.CallingForm.Request, [{kword: "squid"}]) + v.validate_and_transform_request(types.CallingForm.Request, [{kword: "squid"}]) def test_validate_request_missing_value(dummy_api_schema): @@ -1314,8 +1294,7 @@ def test_validate_request_unknown_field_type(dummy_api_schema): ) with pytest.raises(TypeError): v.validate_and_transform_request( - types.CallingForm.Request, [ - {"field": "squid", "value": "humboldt"}] + types.CallingForm.Request, [{"field": "squid", "value": "humboldt"}] ) @@ -1362,8 +1341,7 @@ def test_validate_request_multiple_arguments(dummy_api_schema): actual = v.validate_and_transform_request( types.CallingForm.Request, [ - {"field": "squid.mantle_length", - "value": "100 cm", "value_is_file": True}, + {"field": "squid.mantle_length", "value": "100 cm", "value_is_file": True}, { "field": "clam.shell_mass", "value": "100 kg", @@ -1481,8 +1459,7 @@ def test_validate_request_calling_form(): == types.CallingForm.RequestStreamingServer ) - assert types.CallingForm.method_default( - DummyMethod()) == types.CallingForm.Request + assert types.CallingForm.method_default(DummyMethod()) == types.CallingForm.Request assert ( types.CallingForm.method_default( @@ -1520,8 +1497,7 @@ def test_lro_response_type(dummy_api_schema): OutputType = TypeVar("OutputType") LroType = TypeVar("LroType") method = DummyMethod( - output=OutputType, lro=namedtuple( - "operation", ["response_type"])(LroType) + output=OutputType, lro=namedtuple("operation", ["response_type"])(LroType) ) v = samplegen.Validator(method, dummy_api_schema) @@ -1567,8 +1543,7 @@ def test_validate_expression_non_indexed_non_terminal_repeated(dummy_api_schema) v = samplegen.Validator(method, dummy_api_schema) with pytest.raises(types.BadAttributeLookup): - v.validate_response( - [{"define": "octopus=$resp.coleoidea.octopodiformes"}]) + v.validate_response([{"define": "octopus=$resp.coleoidea.octopodiformes"}]) def test_validate_expression_collection(dummy_api_schema): @@ -1853,8 +1828,7 @@ def test_validate_request_enum_top_level(dummy_api_schema): v = samplegen.Validator(DummyMethod(input=request_type), dummy_api_schema) actual = v.validate_and_transform_request( - types.CallingForm.Request, [ - {"field": "subclass", "value": "COLEOIDEA"}] + types.CallingForm.Request, [{"field": "subclass", "value": "COLEOIDEA"}] ) expected = samplegen.FullRequest( request_list=[ @@ -1872,8 +1846,7 @@ def test_validate_request_enum_invalid_value(dummy_api_schema): enum = enum_factory("subclass", ["AMMONOIDEA", "COLEOIDEA", "NAUTILOIDEA"]) request_type = message_factory("mollusc.cephalopod.subclass", enum=enum) v = samplegen.Validator( - DummyMethod(output=message_factory( - "mollusc_result"), input=request_type), + DummyMethod(output=message_factory("mollusc_result"), input=request_type), dummy_api_schema, ) with pytest.raises(types.InvalidEnumVariant): @@ -1901,8 +1874,7 @@ def test_validate_request_enum_not_last_attr(dummy_api_schema): # request_type = message_factory("mollusc.subclass", enum=enum) v = samplegen.Validator( - DummyMethod(output=message_factory( - "mollusc_result"), input=request_type), + DummyMethod(output=message_factory("mollusc_result"), input=request_type), dummy_api_schema, ) with pytest.raises(types.NonTerminalPrimitiveOrEnum): @@ -1957,8 +1929,7 @@ def test_validate_request_resource_name(): v = samplegen.Validator(method=method, api_schema=api_schema) - actual = v.validate_and_transform_request( - types.CallingForm.Request, request) + actual = v.validate_and_transform_request(types.CallingForm.Request, request) expected = samplegen.FullRequest( request_list=[ @@ -1990,20 +1961,17 @@ def test_validate_request_primitive_field(dummy_api_schema): request = [{"field": "species", "value": "Architeuthis dux"}] v = samplegen.Validator( - DummyMethod(output=message_factory( - "mollusc_result"), input=request_type), + DummyMethod(output=message_factory("mollusc_result"), input=request_type), dummy_api_schema, ) - actual = v.validate_and_transform_request( - types.CallingForm.Request, request) + actual = v.validate_and_transform_request(types.CallingForm.Request, request) expected = samplegen.FullRequest( request_list=[ samplegen.TransformedRequest( base="species", body=None, - single=samplegen.AttributeRequestSetup( - value='"Architeuthis dux"'), + single=samplegen.AttributeRequestSetup(value='"Architeuthis dux"'), ) ] ) @@ -2134,8 +2102,7 @@ def test_validate_request_non_terminal_primitive_field(dummy_api_schema): request = [{"field": "species.nomenclature", "value": "Architeuthis dux"}] v = samplegen.Validator( - DummyMethod(output=message_factory( - "mollusc_result"), input=request_type), + DummyMethod(output=message_factory("mollusc_result"), input=request_type), dummy_api_schema, ) @@ -2270,7 +2237,7 @@ def test_generate_sample_spec_internal_method(): name="NotRamshorn", input_type="animalia.mollusca.v1.MolluscRequest", output_type="animalia.mollusca.v1.Mollusc", - ) + ), ], ), descriptor_pb2.ServiceDescriptorProto( @@ -2290,9 +2257,7 @@ def test_generate_sample_spec_internal_method(): # The internal method should be animalia.mollusca.v1.Squid.NotRamshorn service_yaml = { - "apis": [ - {"name": "animalia.mollusca.v1"} - ], + "apis": [{"name": "animalia.mollusca.v1"}], "publishing": { "library_settings": [ { @@ -2303,11 +2268,11 @@ def test_generate_sample_spec_internal_method(): "selective_gapic_generation": { "methods": [ "animalia.mollusca.v1.Squid.Ramshorn", - "animalia.mollusca.v1.Octopus.Bighead" + "animalia.mollusca.v1.Octopus.Bighead", ], - "generate_omitted_as_internal": True + "generate_omitted_as_internal": True, } - } + }, }, } ] @@ -2316,8 +2281,7 @@ def test_generate_sample_spec_internal_method(): api_opts = Options(service_yaml_config=service_yaml) - api_schema = api.API.build( - file_descriptors, "animalia.mollusca.v1", opts=api_opts) + api_schema = api.API.build(file_descriptors, "animalia.mollusca.v1", opts=api_opts) samplegen_opts = Options.build("transport=rest") @@ -2333,21 +2297,21 @@ def test_generate_sample_spec_internal_method(): "transport": "rest", "service": "animalia.mollusca.v1.Octopus", "region_tag": "example_v1_generated_Octopus_Bighead_sync", - "description": "Snippet for bighead" + "description": "Snippet for bighead", }, { "rpc": "Ramshorn", "transport": "rest", "service": "animalia.mollusca.v1.Squid", "region_tag": "example_v1_generated_Squid_Ramshorn_sync", - "description": "Snippet for ramshorn" + "description": "Snippet for ramshorn", }, { "rpc": "_NotRamshorn", "transport": "rest", "service": "animalia.mollusca.v1.Squid", "region_tag": "example_v1_generated_Squid__NotRamshorn_sync_internal", - "description": "Snippet for _not_ramshorn" + "description": "Snippet for _not_ramshorn", }, ] @@ -2415,10 +2379,7 @@ def test__set_sample_metadata_server_streaming(): }, {"type": "google.api_core.retry.Retry", "name": "retry"}, {"type": "float", "name": "timeout"}, - { - "type": "Sequence[Tuple[str, Union[str, bytes]]]", - "name": "metadata" - }, + {"type": "Sequence[Tuple[str, Union[str, bytes]]]", "name": "metadata"}, ], "resultType": "Iterable[animalia.mollusca_v1.types.Mollusc]", "client": { @@ -2502,10 +2463,7 @@ def test__set_sample_metadata_client_streaming(): }, {"type": "google.api_core.retry.Retry", "name": "retry"}, {"type": "float", "name": "timeout"}, - { - "type": "Sequence[Tuple[str, Union[str, bytes]]]", - "name": "metadata" - }, + {"type": "Sequence[Tuple[str, Union[str, bytes]]]", "name": "metadata"}, ], "resultType": "animalia.mollusca_v1.types.Mollusc", "client": { diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index 0c9ae262d790..c480adf9200d 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -75,7 +75,7 @@ def test_snippet_init(sample_str): {"end": 22, "start": 9, "type": "REQUEST_INITIALIZATION"}, {"end": 25, "start": 23, "type": "REQUEST_EXECUTION"}, {"end": 29, "start": 26, "type": "RESPONSE_HANDLING"}, - ] + ], } # This is the same as the sample_str above, minus the # [START ...] @@ -113,55 +113,59 @@ def sample_classify(video, location): def test_add_snippet_no_matching_service(sample_str): - snippet_metadata = snippet_metadata_pb2.Snippet( - ) + snippet_metadata = snippet_metadata_pb2.Snippet() snippet_metadata.client_method.method.service.short_name = "Clam" snippet = snippet_index.Snippet(sample_str, snippet_metadata) # No 'Clam' service in API Schema - index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - services={"Squid": DummyService(name="Squid", methods={})}, - naming=DummyNaming( - proto_package="google.mollusca", - warehouse_package_name="google-mollusca", - version="v1" - ), - - )) + index = snippet_index.SnippetIndex( + api_schema=DummyApiSchema( + services={"Squid": DummyService(name="Squid", methods={})}, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1", + ), + ) + ) with pytest.raises(types.UnknownService): index.add_snippet(snippet) def test_add_snippet_no_matching_rpc(sample_str): - snippet_metadata = snippet_metadata_pb2.Snippet( - ) + snippet_metadata = snippet_metadata_pb2.Snippet() snippet_metadata.client_method.method.service.short_name = "Squid" snippet_metadata.client_method.short_name = "classify" snippet = snippet_index.Snippet(sample_str, snippet_metadata) # No 'classify' method in 'Squid' service - index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - services={"Squid": DummyService(name="Squid", methods={"list": None})}, - naming=DummyNaming( - proto_package="google.mollusca", - warehouse_package_name="google-mollusca", - version="v1" - ), - )) + index = snippet_index.SnippetIndex( + api_schema=DummyApiSchema( + services={"Squid": DummyService(name="Squid", methods={"list": None})}, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1", + ), + ) + ) with pytest.raises(types.RpcMethodNotFound): index.add_snippet(snippet) def test_get_snippet_no_matching_service(): - index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - naming=DummyNaming( - proto_package="google.mollusca", - warehouse_package_name="google-mollusca", - version="v1" - ), - services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})} - )) + index = snippet_index.SnippetIndex( + api_schema=DummyApiSchema( + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1", + ), + services={ + "Squid": DummyService(name="Squid", methods={"classify": DummyMethod()}) + }, + ) + ) # No 'Clam' service in API Schema with pytest.raises(types.UnknownService): @@ -169,15 +173,18 @@ def test_get_snippet_no_matching_service(): def test_get_snippet_no_matching_rpc(): - index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})}, - naming=DummyNaming( - proto_package="google.mollusca", - warehouse_package_name="google-mollusca", - version="v1" - ), - )) + index = snippet_index.SnippetIndex( + api_schema=DummyApiSchema( + services={ + "Squid": DummyService(name="Squid", methods={"classify": DummyMethod()}) + }, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1", + ), + ) + ) # No 'list' RPC in 'Squid' service with pytest.raises(types.RpcMethodNotFound): @@ -190,15 +197,18 @@ def test_add_and_get_snippet_sync(sample_str): snippet_metadata.client_method.method.short_name = "classify" snippet = snippet_index.Snippet(sample_str, snippet_metadata) - index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})}, - naming=DummyNaming( - proto_package="google.mollusca", - warehouse_package_name="google-mollusca", - version="v1" - ), - )) + index = snippet_index.SnippetIndex( + api_schema=DummyApiSchema( + services={ + "Squid": DummyService(name="Squid", methods={"classify": DummyMethod()}) + }, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1", + ), + ) + ) index.add_snippet(snippet) @@ -212,15 +222,18 @@ def test_add_and_get_snippet_async(sample_str): setattr(snippet_metadata.client_method, "async", True) snippet = snippet_index.Snippet(sample_str, snippet_metadata) - index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})}, - naming=DummyNaming( - proto_package="google.mollusca", - warehouse_package_name="google-mollusca", - version="v1" - ), - )) + index = snippet_index.SnippetIndex( + api_schema=DummyApiSchema( + services={ + "Squid": DummyService(name="Squid", methods={"classify": DummyMethod()}) + }, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1", + ), + ) + ) index.add_snippet(snippet) @@ -233,73 +246,45 @@ def test_get_metadata_json(sample_str): snippet_metadata.client_method.method.short_name = "classify" snippet = snippet_index.Snippet(sample_str, snippet_metadata) - index = snippet_index.SnippetIndex(api_schema=DummyApiSchema( - services={"Squid": DummyService( - name="Squid", methods={"classify": DummyMethod()})}, - naming=DummyNaming( - proto_package="google.mollusca", - warehouse_package_name="google-mollusca", - version="v1" - ), - )) + index = snippet_index.SnippetIndex( + api_schema=DummyApiSchema( + services={ + "Squid": DummyService(name="Squid", methods={"classify": DummyMethod()}) + }, + naming=DummyNaming( + proto_package="google.mollusca", + warehouse_package_name="google-mollusca", + version="v1", + ), + ) + ) index.add_snippet(snippet) print(index.get_metadata_json()) assert json.loads(index.get_metadata_json()) == { "clientLibrary": { - "apis": [ - { - "id": "google.mollusca", - "version": "v1" - } - ], + "apis": [{"id": "google.mollusca", "version": "v1"}], "language": "PYTHON", "name": "google-mollusca", - "version": "0.1.0" - }, + "version": "0.1.0", + }, "snippets": [ { "clientMethod": { "method": { - "service": { - "shortName": "Squid" - }, - "shortName": "classify" - } - }, + "service": {"shortName": "Squid"}, + "shortName": "classify", + } + }, "segments": [ - { - "end": 28, - "start": 2, - "type": "FULL" - }, - { - "end": 28, - "start": 2, - "type": "SHORT" - }, - { - "end": 8, - "start": 6, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 22, - "start": 9, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 25, - "start": 23, - "type": "REQUEST_EXECUTION" - }, - { - "end": 29, - "start": 26, - "type": "RESPONSE_HANDLING" - } - ] - } - ] - } + {"end": 28, "start": 2, "type": "FULL"}, + {"end": 28, "start": 2, "type": "SHORT"}, + {"end": 8, "start": 6, "type": "CLIENT_INITIALIZATION"}, + {"end": 22, "start": 9, "type": "REQUEST_INITIALIZATION"}, + {"end": 25, "start": 23, "type": "REQUEST_EXECUTION"}, + {"end": 29, "start": 26, "type": "RESPONSE_HANDLING"}, + ], + } + ], + } diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index a5405b18b0f8..b63cffbe3616 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -33,23 +33,32 @@ def check_template(template_fragment, expected_output, **kwargs): expected_output = dedent(expected_output) env = jinja2.Environment( loader=jinja2.ChoiceLoader( - [jinja2.FileSystemLoader( - searchpath=path.realpath(path.join(path.dirname(__file__), - "..", "..", "..", - "gapic", "templates", "examples"))), - jinja2.DictLoader( - {"template_fragment": dedent(template_fragment)}), - ]), - + [ + jinja2.FileSystemLoader( + searchpath=path.realpath( + path.join( + path.dirname(__file__), + "..", + "..", + "..", + "gapic", + "templates", + "examples", + ) + ) + ), + jinja2.DictLoader({"template_fragment": dedent(template_fragment)}), + ] + ), undefined=jinja2.StrictUndefined, extensions=["jinja2.ext.do"], trim_blocks=True, - lstrip_blocks=True + lstrip_blocks=True, ) - env.filters['snake_case'] = utils.to_snake_case - env.filters['coerce_response_name'] = sample_utils.coerce_response_name - env.filters['render_format_string'] = sample_utils.render_format_string + env.filters["snake_case"] = utils.to_snake_case + env.filters["coerce_response_name"] = sample_utils.coerce_response_name + env.filters["render_format_string"] = sample_utils.render_format_string template = env.get_template("template_fragment") text = template.render(**kwargs) @@ -60,62 +69,64 @@ def check_template(template_fragment, expected_output, **kwargs): def test_render_attr_value(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_request_attr("mollusc", request) }} - ''', - ''' + """, + """ mollusc.order = Molluscs.Cephalopoda.Coleoidea - ''', + """, request=samplegen.AttributeRequestSetup( - field="order", - value="Molluscs.Cephalopoda.Coleoidea" - ) + field="order", value="Molluscs.Cephalopoda.Coleoidea" + ), ) def test_render_attr_input_parameter(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_request_attr("squid", request) }} - ''', - ''' + """, + """ # species = 'Humboldt' squid.species = species - ''', - request=samplegen.AttributeRequestSetup(field="species", - value="'Humboldt'", - input_parameter="species")) + """, + request=samplegen.AttributeRequestSetup( + field="species", value="'Humboldt'", input_parameter="species" + ), + ) def test_render_attr_file(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_request_attr("classify_mollusc_request", request) }} - ''', - ''' + """, + """ # mollusc_video_path = 'path/to/mollusc/video.mkv' with open(mollusc_video_path, "rb") as f: classify_mollusc_request.mollusc_video = f.read() - ''', - request=samplegen.AttributeRequestSetup(field="mollusc_video", - value="'path/to/mollusc/video.mkv'", - input_parameter="mollusc_video_path", - value_is_file=True) + """, + request=samplegen.AttributeRequestSetup( + field="mollusc_video", + value="'path/to/mollusc/video.mkv'", + input_parameter="mollusc_video_path", + value_is_file=True, + ), ) def test_render_request_basic(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_request_setup(request, module_name, request_type) }} - ''', - ''' + """, + """ # Initialize request argument(s) cephalopod = mollusca.Cephalopod() @@ -137,46 +148,49 @@ def test_render_request_basic(): with open(movie_path, "rb") as f: gastropod.movie = f.read() - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="cephalopod", - body=[ - samplegen.AttributeRequestSetup( - field="mantle_mass", - value="'10 kg'", - input_parameter="cephalopod_mass" - ), - samplegen.AttributeRequestSetup( - field="photo", - value="'path/to/cephalopod/photo.jpg'", - input_parameter="photo_path", - value_is_file=True - ), - samplegen.AttributeRequestSetup( - field="order", - value="Molluscs.Cephalopoda.Coleoidea"), - ], - single=None), - samplegen.TransformedRequest(base="gastropod", - body=[ - samplegen.AttributeRequestSetup( - field="mantle_mass", - value="'1 kg'", - input_parameter="gastropod_mass" - ), - samplegen.AttributeRequestSetup( - field="order", - value="Molluscs.Gastropoda.Pulmonata" - ), - samplegen.AttributeRequestSetup( - field="movie", - value="'path/to/gastropod/movie.mkv'", - input_parameter="movie_path", - value_is_file=True - ) - ], - single=None), + samplegen.TransformedRequest( + base="cephalopod", + body=[ + samplegen.AttributeRequestSetup( + field="mantle_mass", + value="'10 kg'", + input_parameter="cephalopod_mass", + ), + samplegen.AttributeRequestSetup( + field="photo", + value="'path/to/cephalopod/photo.jpg'", + input_parameter="photo_path", + value_is_file=True, + ), + samplegen.AttributeRequestSetup( + field="order", value="Molluscs.Cephalopoda.Coleoidea" + ), + ], + single=None, + ), + samplegen.TransformedRequest( + base="gastropod", + body=[ + samplegen.AttributeRequestSetup( + field="mantle_mass", + value="'1 kg'", + input_parameter="gastropod_mass", + ), + samplegen.AttributeRequestSetup( + field="order", value="Molluscs.Gastropoda.Pulmonata" + ), + samplegen.AttributeRequestSetup( + field="movie", + value="'path/to/gastropod/movie.mkv'", + input_parameter="movie_path", + value_is_file=True, + ), + ], + single=None, + ), ], flattenable=True, ), @@ -185,24 +199,24 @@ def test_render_request_basic(): fields={ "cephalopod": common_types.DummyField( name="cephalopod", - type=common_types.DummyMessageTypePB(name="Cephalopod") + type=common_types.DummyMessageTypePB(name="Cephalopod"), ), "gastropod": common_types.DummyField( name="gastropod", - type=common_types.DummyMessageTypePB(name="Gastropod") - ) + type=common_types.DummyMessageTypePB(name="Gastropod"), + ), } - ) + ), ) def test_render_request_unflattened(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_request_setup(request, module_name, request_type, calling_form, calling_form_enum) }} - ''', - ''' + """, + """ # Initialize request argument(s) cephalopod = mollusca.Cephalopod() @@ -229,51 +243,56 @@ def test_render_request_unflattened(): gastropod=gastropod, bivalve="humboldt", ) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="cephalopod", - body=[ - samplegen.AttributeRequestSetup( - field="mantle_mass", - value="'10 kg'", - input_parameter="cephalopod_mass" - ), - samplegen.AttributeRequestSetup( - field="photo", - value="'path/to/cephalopod/photo.jpg'", - input_parameter="photo_path", - value_is_file=True - ), - samplegen.AttributeRequestSetup( - field="order", - value="Molluscs.Cephalopoda.Coleoidea"), - ], - single=None), - samplegen.TransformedRequest(base="gastropod", - body=[ - samplegen.AttributeRequestSetup( - field="mantle_mass", - value="'1 kg'", - input_parameter="gastropod_mass" - ), - samplegen.AttributeRequestSetup( - field="order", - value="Molluscs.Gastropoda.Pulmonata" - ), - samplegen.AttributeRequestSetup( - field="movie", - value="'path/to/gastropod/movie.mkv'", - input_parameter="movie_path", - value_is_file=True - ) - ], - single=None), - samplegen.TransformedRequest(base="bivalve", - body=None, - single=samplegen.AttributeRequestSetup( - value='"humboldt"', - ),), + samplegen.TransformedRequest( + base="cephalopod", + body=[ + samplegen.AttributeRequestSetup( + field="mantle_mass", + value="'10 kg'", + input_parameter="cephalopod_mass", + ), + samplegen.AttributeRequestSetup( + field="photo", + value="'path/to/cephalopod/photo.jpg'", + input_parameter="photo_path", + value_is_file=True, + ), + samplegen.AttributeRequestSetup( + field="order", value="Molluscs.Cephalopoda.Coleoidea" + ), + ], + single=None, + ), + samplegen.TransformedRequest( + base="gastropod", + body=[ + samplegen.AttributeRequestSetup( + field="mantle_mass", + value="'1 kg'", + input_parameter="gastropod_mass", + ), + samplegen.AttributeRequestSetup( + field="order", value="Molluscs.Gastropoda.Pulmonata" + ), + samplegen.AttributeRequestSetup( + field="movie", + value="'path/to/gastropod/movie.mkv'", + input_parameter="movie_path", + value_is_file=True, + ), + ], + single=None, + ), + samplegen.TransformedRequest( + base="bivalve", + body=None, + single=samplegen.AttributeRequestSetup( + value='"humboldt"', + ), + ), ] ), module_name="mollusca", @@ -281,14 +300,14 @@ def test_render_request_unflattened(): fields={ "cephalopod": common_types.DummyField( name="cephalopod", - type=common_types.DummyMessageTypePB(name="Cephalopod") + type=common_types.DummyMessageTypePB(name="Cephalopod"), ), "gastropod": common_types.DummyField( name="gastropod", - type=common_types.DummyMessageTypePB(name="Gastropod") - ) + type=common_types.DummyMessageTypePB(name="Gastropod"), + ), }, - ident=common_types.DummyIdent(name="CreateMolluscRequest") + ident=common_types.DummyIdent(name="CreateMolluscRequest"), ), calling_form_enum=CallingForm, calling_form=CallingForm.Request, @@ -297,17 +316,17 @@ def test_render_request_unflattened(): def test_render_request_resource_name(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_request_setup(request) }} - ''', - ''' + """, + """ # Initialize request argument(s) kingdom = "animalia" phylum = mollusca taxon = f"kingdom/{kingdom}/phylum/{phylum}" - ''', + """, request=samplegen.FullRequest( request_list=[ samplegen.TransformedRequest( @@ -322,269 +341,285 @@ def test_render_request_resource_name(): field="phylum", value="mollusca", input_parameter="mollusca", - ) + ), ], - pattern="kingdom/{kingdom}/phylum/{phylum}" + pattern="kingdom/{kingdom}/phylum/{phylum}", ), ], - flattenable=True + flattenable=True, ), ) def test_render_print(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_print(["Mollusc"]) }} - ''', - ''' + """, + """ print("Mollusc") - ''' + """, ) def test_render_print_args(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_print(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} - ''', - ''' + """, + """ print(f"$resp {response.squids} {response.clams}") - ''' + """, ) def test_render_comment(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_comment(["Mollusc"]) }} - ''', - ''' + """, + """ # Mollusc - ''' + """, ) def test_render_comment_args(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_comment(["$resp %s %s", "$resp.squids", "$resp.clams"]) }} - ''', - ''' + """, + """ # $resp response.squids response.clams - ''' + """, ) def test_define(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_define("squid=humboldt") }} - ''', - ''' + """, + """ squid = humboldt - ''' + """, ) def test_define_resp(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_define("squid=$resp.squid") }} - ''', - ''' + """, + """ squid = response.squid - ''' + """, ) def test_dispatch_print(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.dispatch_statement({"print" : ["Squid"] }) }} - ''', - ''' + """, + """ print("Squid") - ''' + """, ) def test_dispatch_define(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.dispatch_statement({"define": "squid=humboldt"})}} - ''', - ''' + """, + """ squid = humboldt - ''' + """, ) def test_dispatch_comment(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.dispatch_statement({"comment" : ["Squid"] }) }} - ''', - ''' + """, + """ # Squid - ''' + """, ) def test_write_file(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_write_file({"filename": ["specimen-%s", "$resp.species"], "contents": "$resp.photo"}) }} - ''', - ''' + """, + """ with open(f"specimen-{response.species}", "wb") as f: f.write(response.photo) - ''' + """, ) def test_dispatch_write_file(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.dispatch_statement({"write_file": {"filename": ["specimen-%s", "$resp.species"], "contents": "$resp.photo"}})}} - ''', - ''' + """, + """ with open(f"specimen-{response.species}", "wb") as f: f.write(response.photo) - ''' + """, ) def test_collection_loop(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_collection_loop(collection) }} - ''', - ''' + """, + """ for m in response.molluscs: print(f"Mollusc: {m}") - ''', - collection={"collection": "$resp.molluscs", - "variable": "m", - "body": [{"print": ["Mollusc: %s", "m"]}]} + """, + collection={ + "collection": "$resp.molluscs", + "variable": "m", + "body": [{"print": ["Mollusc: %s", "m"]}], + }, ) def test_dispatch_collection_loop(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} - {{ frags.dispatch_statement(statement) }}''', - ''' + {{ frags.dispatch_statement(statement) }}""", + """ for m in molluscs: print(f"Mollusc: {m}") - ''', - statement={"loop": {"collection": "molluscs", - "variable": "m", - "body": [{"print": ["Mollusc: %s", "m"]}]}} + """, + statement={ + "loop": { + "collection": "molluscs", + "variable": "m", + "body": [{"print": ["Mollusc: %s", "m"]}], + } + }, ) def test_map_loop(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_map_loop(map_loop) - }}''', - ''' + }}""", + """ for cls, example in response.molluscs.items(): print(f"A {example} is a {cls}") - ''', - map_loop={"map": "$resp.molluscs", - "key": "cls", - "value": "example", - "body": [{"print": ["A %s is a %s", "example", "cls"]}]} + """, + map_loop={ + "map": "$resp.molluscs", + "key": "cls", + "value": "example", + "body": [{"print": ["A %s is a %s", "example", "cls"]}], + }, ) def test_map_loop_no_key(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_map_loop(map_loop) }} - ''', - ''' + """, + """ for example in response.molluscs.values(): print(f"A {example} is a mollusc") - ''', - map_loop={"map": "$resp.molluscs", - "value": "example", - "body": [{"print": ["A %s is a mollusc", "example"]}]} + """, + map_loop={ + "map": "$resp.molluscs", + "value": "example", + "body": [{"print": ["A %s is a mollusc", "example"]}], + }, ) def test_map_loop_no_value(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_map_loop(map_loop) }} - ''', - ''' + """, + """ for cls in response.molluscs.keys(): print(f"A {cls} is a mollusc") - ''', - map_loop={"map": "$resp.molluscs", - "key": "cls", - "body": [{"print": ["A %s is a mollusc", "cls"]}]} + """, + map_loop={ + "map": "$resp.molluscs", + "key": "cls", + "body": [{"print": ["A %s is a mollusc", "cls"]}], + }, ) def test_dispatch_map_loop(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.dispatch_statement(statement) }} - ''', - ''' + """, + """ for cls, example in molluscs.items(): print(f"A {example} is a {cls}") - ''', - statement={"loop": {"map": "molluscs", - "key": "cls", - "value": "example", - "body": [{"print": ["A %s is a %s", "example", "cls"]}]}} + """, + statement={ + "loop": { + "map": "molluscs", + "key": "cls", + "value": "example", + "body": [{"print": ["A %s is a %s", "example", "cls"]}], + } + }, ) @@ -633,7 +668,7 @@ def test_render_nested_loop_collection(): """, - statement=statement + statement=statement, ) @@ -658,13 +693,13 @@ def test_render_nested_loop_map(): "map": "families", "key": "family", "value": "ex", - "body": [{"print": ["Example: %s", "ex"]}] + "body": [{"print": ["Example: %s", "ex"]}], } } - ] + ], } } - ] + ], } } @@ -686,107 +721,113 @@ def test_render_nested_loop_map(): """, - statement=statement + statement=statement, ) def test_print_input_params(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.print_input_params(request) }} - ''', - ''' + """, + """ mass, length, color - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="squid", - body=[ - samplegen.AttributeRequestSetup( - field="mass", - value="10 kg", - input_parameter="mass" - ), - samplegen.AttributeRequestSetup( - field="length", - value="20 m", - input_parameter="length" - ) - ], - single=None), - samplegen.TransformedRequest(base="diameter", - single=samplegen.AttributeRequestSetup( - value="10 cm" - ), - body=None), - samplegen.TransformedRequest(base="color", - single=samplegen.AttributeRequestSetup( - value="red", - input_parameter="color" - ), - body=None), + samplegen.TransformedRequest( + base="squid", + body=[ + samplegen.AttributeRequestSetup( + field="mass", value="10 kg", input_parameter="mass" + ), + samplegen.AttributeRequestSetup( + field="length", value="20 m", input_parameter="length" + ), + ], + single=None, + ), + samplegen.TransformedRequest( + base="diameter", + single=samplegen.AttributeRequestSetup(value="10 cm"), + body=None, + ), + samplegen.TransformedRequest( + base="color", + single=samplegen.AttributeRequestSetup( + value="red", input_parameter="color" + ), + body=None, + ), ] - ) + ), ) -CALLING_FORM_TEMPLATE_TEST_STR = ''' +CALLING_FORM_TEMPLATE_TEST_STR = """ {% import "feature_fragments.j2" as frags %} {{ frags.render_calling_form("TEST_INVOCATION_TXT", calling_form, calling_form_enum, transport, [{"print": ["Test print statement"]}]) }} - ''' + """ def test_render_calling_form_request(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request response = TEST_INVOCATION_TXT # Handle the response print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.Request, - transport="grpc") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.Request, + transport="grpc", + ) def test_render_calling_form_paged_all(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request page_result = TEST_INVOCATION_TXT # Handle the response for response in page_result: print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.RequestPagedAll, - transport="grpc") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestPagedAll, + transport="grpc", + ) def test_render_calling_form_paged_all_async(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request page_result = TEST_INVOCATION_TXT # Handle the response async for response in page_result: print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.RequestPagedAll, - transport="grpc-async") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestPagedAll, + transport="grpc-async", + ) def test_render_calling_form_paged(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request page_result = TEST_INVOCATION_TXT @@ -794,15 +835,17 @@ def test_render_calling_form_paged(): for page in page_result.pages(): for response in page: print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.RequestPaged, - transport="grpc") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestPaged, + transport="grpc", + ) def test_render_calling_form_paged_async(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request page_result = TEST_INVOCATION_TXT @@ -810,75 +853,85 @@ def test_render_calling_form_paged_async(): async for page in page_result.pages(): for response in page: print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.RequestPaged, - transport="grpc-async") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestPaged, + transport="grpc-async", + ) def test_render_calling_form_streaming_server(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request stream = TEST_INVOCATION_TXT # Handle the response for response in stream: print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.RequestStreamingServer, - transport="grpc") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingServer, + transport="grpc", + ) def test_render_calling_form_streaming_server_async(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request stream = TEST_INVOCATION_TXT # Handle the response async for response in stream: print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.RequestStreamingServer, - transport="grpc-async") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingServer, + transport="grpc-async", + ) def test_render_calling_form_streaming_bidi(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request stream = TEST_INVOCATION_TXT # Handle the response for response in stream: print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.RequestStreamingBidi, - transport="grpc") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingBidi, + transport="grpc", + ) def test_render_calling_form_streaming_bidi_async(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request stream = TEST_INVOCATION_TXT # Handle the response async for response in stream: print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.RequestStreamingBidi, - transport="grpc-async") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingBidi, + transport="grpc-async", + ) def test_render_calling_form_longrunning(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request operation = TEST_INVOCATION_TXT @@ -888,15 +941,17 @@ def test_render_calling_form_longrunning(): # Handle the response print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.LongRunningRequestPromise, - transport="grpc") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.LongRunningRequestPromise, + transport="grpc", + ) def test_render_calling_form_longrunning_async(): - check_template(CALLING_FORM_TEMPLATE_TEST_STR, - ''' + check_template( + CALLING_FORM_TEMPLATE_TEST_STR, + """ # Make the request operation = TEST_INVOCATION_TXT @@ -906,146 +961,119 @@ def test_render_calling_form_longrunning_async(): # Handle the response print("Test print statement") - ''', - calling_form_enum=CallingForm, - calling_form=CallingForm.LongRunningRequestPromise, - transport="grpc-async") + """, + calling_form_enum=CallingForm, + calling_form=CallingForm.LongRunningRequestPromise, + transport="grpc-async", + ) def test_render_method_call_basic(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum, transport) }} - ''', - ''' + """, + """ client.categorize_mollusc(request=request) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="video", - body=True, - single=None), - samplegen.TransformedRequest(base="audio", - body=True, - single=None), - samplegen.TransformedRequest(base="guess", - body=True, - single=None) + samplegen.TransformedRequest(base="video", body=True, single=None), + samplegen.TransformedRequest(base="audio", body=True, single=None), + samplegen.TransformedRequest(base="guess", body=True, single=None), ], ), calling_form_enum=CallingForm, calling_form=CallingForm.Request, - transport="grpc" + transport="grpc", ) def test_render_method_call_basic_async(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum, transport) }} - ''', - ''' + """, + """ await client.categorize_mollusc(request=request) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="video", - body=True, - single=None), - samplegen.TransformedRequest(base="audio", - body=True, - single=None), - samplegen.TransformedRequest(base="guess", - body=True, - single=None) + samplegen.TransformedRequest(base="video", body=True, single=None), + samplegen.TransformedRequest(base="audio", body=True, single=None), + samplegen.TransformedRequest(base="guess", body=True, single=None), ], ), calling_form_enum=CallingForm, calling_form=CallingForm.Request, - transport="grpc-async" + transport="grpc-async", ) def test_render_method_call_basic_async(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum, transport) }} - ''', - ''' + """, + """ await client.categorize_mollusc(request=request) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="video", - body=True, - single=None), - samplegen.TransformedRequest(base="audio", - body=True, - single=None), - samplegen.TransformedRequest(base="guess", - body=True, - single=None) + samplegen.TransformedRequest(base="video", body=True, single=None), + samplegen.TransformedRequest(base="audio", body=True, single=None), + samplegen.TransformedRequest(base="guess", body=True, single=None), ], ), calling_form_enum=CallingForm, calling_form=CallingForm.Request, - transport="grpc-async" + transport="grpc-async", ) def test_render_method_call_basic_flattenable(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum, transport) }} - ''', - ''' + """, + """ client.categorize_mollusc(video=video, audio=audio, guess=guess) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="video", - body=True, - single=None), - samplegen.TransformedRequest(base="audio", - body=True, - single=None), - samplegen.TransformedRequest(base="guess", - body=True, - single=None) + samplegen.TransformedRequest(base="video", body=True, single=None), + samplegen.TransformedRequest(base="audio", body=True, single=None), + samplegen.TransformedRequest(base="guess", body=True, single=None), ], flattenable=True, ), calling_form_enum=CallingForm, calling_form=CallingForm.Request, - transport="grpc" + transport="grpc", ) def test_render_method_call_bidi(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum, transport) }} - ''', - ''' + """, + """ client.categorize_mollusc(requests=request_generator()) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest( - base="video", - body=True, - single=None - ) + samplegen.TransformedRequest(base="video", body=True, single=None) ] ), calling_form_enum=CallingForm, @@ -1056,21 +1084,17 @@ def test_render_method_call_bidi(): def test_render_method_call_bidi_async(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum, transport) }} - ''', - ''' + """, + """ await client.categorize_mollusc(requests=request_generator()) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest( - base="video", - body=True, - single=None - ) + samplegen.TransformedRequest(base="video", body=True, single=None) ] ), calling_form_enum=CallingForm, @@ -1081,21 +1105,17 @@ def test_render_method_call_bidi_async(): def test_render_method_call_client(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum, transport) }} - ''', - ''' + """, + """ client.categorize_mollusc(requests=request_generator()) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest( - base="video", - body=True, - single=None - ) + samplegen.TransformedRequest(base="video", body=True, single=None) ] ), calling_form_enum=CallingForm, @@ -1106,21 +1126,17 @@ def test_render_method_call_client(): def test_render_method_call_client_async(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, calling_form, calling_form_enum, transport) }} - ''', - ''' + """, + """ await client.categorize_mollusc(requests=request_generator()) - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest( - base="video", - body=True, - single=None - ) + samplegen.TransformedRequest(base="video", body=True, single=None) ] ), calling_form_enum=CallingForm, @@ -1131,52 +1147,48 @@ def test_render_method_call_client_async(): def test_render_request_params(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_request_params(request) }} - ''', - ''' + """, + """ mollusc, length_meters=16, order='TEUTHIDA' - ''', + """, request=[ samplegen.TransformedRequest( base="length_meters", body=None, - single=samplegen.AttributeRequestSetup(value="16") + single=samplegen.AttributeRequestSetup(value="16"), ), samplegen.TransformedRequest( base="mollusc", body=[ samplegen.AttributeRequestSetup( - field="video", - value="path/to/video.mkv" + field="video", value="path/to/video.mkv" ), samplegen.AttributeRequestSetup( - field="audio", - value="path/to/audio.ogg" - ) + field="audio", value="path/to/audio.ogg" + ), ], - single=None + single=None, ), samplegen.TransformedRequest( base="order", body=None, - single=samplegen.AttributeRequestSetup( - value="'TEUTHIDA'" - ) - ) - ] + single=samplegen.AttributeRequestSetup(value="'TEUTHIDA'"), + ), + ], ) def test_main_block(): check_template( - ''' + """ {% import "feature_fragments.j2" as frags %} {{ frags.render_main_block("ListMolluscs", request) }} - ''', - ''' + """, + """ def main(): import argparse @@ -1194,30 +1206,34 @@ def main(): if __name__ == "__main__": main() - ''', + """, request=samplegen.FullRequest( request_list=[ - samplegen.TransformedRequest(base="input_params", - body=[ - samplegen.AttributeRequestSetup( - field="list_molluscs.order", - value="'coleoidea'", - input_parameter="order" - ), - samplegen.AttributeRequestSetup( - field="list_molluscs.mass", - value="'60kg'", - input_parameter="mass") - ], - single=None), - samplegen.TransformedRequest(base="enum_param", - body=[ - samplegen.AttributeRequestSetup( - field="list_molluscs.zone", - value="MESOPELAGIC" - ) - ], - single=None) + samplegen.TransformedRequest( + base="input_params", + body=[ + samplegen.AttributeRequestSetup( + field="list_molluscs.order", + value="'coleoidea'", + input_parameter="order", + ), + samplegen.AttributeRequestSetup( + field="list_molluscs.mass", + value="'60kg'", + input_parameter="mass", + ), + ], + single=None, + ), + samplegen.TransformedRequest( + base="enum_param", + body=[ + samplegen.AttributeRequestSetup( + field="list_molluscs.zone", value="MESOPELAGIC" + ) + ], + single=None, + ), ] - ) + ), ) diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 13b0d32ffe6e..758e4020cc26 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -56,129 +56,141 @@ def test_api_build(): # Put together a couple of minimal protos. fd = ( make_file_pb2( - name='dep.proto', - package='google.dep', - messages=(make_message_pb2(name='ImportedMessage', fields=()),), + name="dep.proto", + package="google.dep", + messages=(make_message_pb2(name="ImportedMessage", fields=()),), ), make_file_pb2( - name='common.proto', - package='google.example.v1.common', - messages=(make_message_pb2(name='Bar'),), + name="common.proto", + package="google.example.v1.common", + messages=(make_message_pb2(name="Bar"),), ), make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=()), - make_message_pb2(name='GetFooRequest', fields=( - make_field_pb2(name='imported_message', number=1, - type_name='.google.dep.ImportedMessage'), - make_field_pb2(name='primitive', number=2, type=1), - )), - make_message_pb2(name='GetFooResponse', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - )), - ), - services=(descriptor_pb2.ServiceDescriptorProto( - name='FooService', - method=( - descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v1.GetFooRequest', - output_type='google.example.v1.GetFooResponse', + make_message_pb2(name="Foo", fields=()), + make_message_pb2( + name="GetFooRequest", + fields=( + make_field_pb2( + name="imported_message", + number=1, + type_name=".google.dep.ImportedMessage", + ), + make_field_pb2(name="primitive", number=2, type=1), ), ), - ),), - + make_message_pb2( + name="GetFooResponse", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" + ), + ), + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="FooService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="GetFoo", + input_type="google.example.v1.GetFooRequest", + output_type="google.example.v1.GetFooResponse", + ), + ), + ), + ), ), ) # Create an API with those protos. - api_schema = api.API.build(fd, package='google.example.v1') + api_schema = api.API.build(fd, package="google.example.v1") # Establish that the API has the data expected. assert isinstance(api_schema, api.API) assert len(api_schema.all_protos) == 3 assert len(api_schema.protos) == 2 - assert 'google.dep.ImportedMessage' not in api_schema.messages - assert 'google.example.v1.common.Bar' in api_schema.messages - assert 'google.example.v1.Foo' in api_schema.messages - assert 'google.example.v1.GetFooRequest' in api_schema.messages - assert 'google.example.v1.GetFooResponse' in api_schema.messages - assert 'google.example.v1.FooService' in api_schema.services + assert "google.dep.ImportedMessage" not in api_schema.messages + assert "google.example.v1.common.Bar" in api_schema.messages + assert "google.example.v1.Foo" in api_schema.messages + assert "google.example.v1.GetFooRequest" in api_schema.messages + assert "google.example.v1.GetFooResponse" in api_schema.messages + assert "google.example.v1.FooService" in api_schema.services assert len(api_schema.enums) == 0 - assert api_schema.protos['foo.proto'].python_modules == ( - imp.Import(package=('google', 'dep'), module='dep_pb2'), + assert api_schema.protos["foo.proto"].python_modules == ( + imp.Import(package=("google", "dep"), module="dep_pb2"), ) - assert api_schema.requires_package(('google', 'example', 'v1')) + assert api_schema.requires_package(("google", "example", "v1")) - assert not api_schema.requires_package(('elgoog', 'example', 'v1')) + assert not api_schema.requires_package(("elgoog", "example", "v1")) # Establish that the subpackages work. - assert 'common' in api_schema.subpackages - sub = api_schema.subpackages['common'] + assert "common" in api_schema.subpackages + sub = api_schema.subpackages["common"] assert len(sub.protos) == 1 - assert 'google.example.v1.common.Bar' in sub.messages - assert 'google.example.v1.Foo' not in sub.messages + assert "google.example.v1.common.Bar" in sub.messages + assert "google.example.v1.Foo" not in sub.messages def test_top_level_messages(): message_pbs = ( - make_message_pb2(name='Mollusc', nested_type=( - make_message_pb2(name='Squid'), - )), + make_message_pb2(name="Mollusc", nested_type=(make_message_pb2(name="Squid"),)), ) fds = ( make_file_pb2( messages=message_pbs, - package='google.example.v3', + package="google.example.v3", ), ) - api_schema = api.API.build(fds, package='google.example.v3') + api_schema = api.API.build(fds, package="google.example.v3") actual = [m.name for m in api_schema.top_level_messages.values()] - expected = ['Mollusc'] + expected = ["Mollusc"] assert expected == actual def test_top_level_enum(): # Test that a nested enum works properly. message_pbs = ( - make_message_pb2(name='Coleoidea', enum_type=( - make_enum_pb2( - 'Superorder', - 'Decapodiformes', - 'Octopodiformes', - 'Palaeoteuthomorpha', + make_message_pb2( + name="Coleoidea", + enum_type=( + make_enum_pb2( + "Superorder", + "Decapodiformes", + "Octopodiformes", + "Palaeoteuthomorpha", + ), ), - )), + ), ) enum_pbs = ( make_enum_pb2( - 'Order', - 'Gastropoda', - 'Bivalvia', - 'Cephalopoda', + "Order", + "Gastropoda", + "Bivalvia", + "Cephalopoda", ), ) fds = ( make_file_pb2( messages=message_pbs, enums=enum_pbs, - package='google.example.v3', + package="google.example.v3", ), ) - api_schema = api.API.build(fds, package='google.example.v3') + api_schema = api.API.build(fds, package="google.example.v3") actual = [e.name for e in api_schema.top_level_enums.values()] - expected = ['Order'] + expected = ["Order"] assert expected == actual def test_proto_build(): fdp = descriptor_pb2.FileDescriptorProto( - name='my_proto_file.proto', - package='google.example.v1', + name="my_proto_file.proto", + package="google.example.v1", ) proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) assert isinstance(proto, api.Proto) @@ -188,35 +200,44 @@ def test_proto_names(): # Put together a couple of minimal protos. fd = ( make_file_pb2( - name='dep.proto', - package='google.dep', - messages=(make_message_pb2(name='ImportedMessage', fields=()),), + name="dep.proto", + package="google.dep", + messages=(make_message_pb2(name="ImportedMessage", fields=()),), ), make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=()), - make_message_pb2(name='Bar', fields=( - make_field_pb2(name='imported_message', number=1, - type_name='.google.dep.ImportedMessage'), - make_field_pb2(name='primitive', number=2, type=1), - )), - make_message_pb2(name='Baz', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - )), + make_message_pb2(name="Foo", fields=()), + make_message_pb2( + name="Bar", + fields=( + make_field_pb2( + name="imported_message", + number=1, + type_name=".google.dep.ImportedMessage", + ), + make_field_pb2(name="primitive", number=2, type=1), + ), + ), + make_message_pb2( + name="Baz", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" + ), + ), + ), ), ), ) # Create an API with those protos. - api_schema = api.API.build(fd, package='google.example.v1') - proto = api_schema.protos['foo.proto'] - assert proto.names == {'Foo', 'Bar', 'Baz', 'foo', 'imported_message', - 'primitive'} - assert proto.disambiguate('enum') == 'enum' - assert proto.disambiguate('foo') == '_foo' + api_schema = api.API.build(fd, package="google.example.v1") + proto = api_schema.protos["foo.proto"] + assert proto.names == {"Foo", "Bar", "Baz", "foo", "imported_message", "primitive"} + assert proto.disambiguate("enum") == "enum" + assert proto.disambiguate("foo") == "_foo" def test_proto_with_invalid_characters(): @@ -225,21 +246,21 @@ def test_proto_with_invalid_characters(): # See https://peps.python.org/pep-0008/#package-and-module-names test_cases = [ - {'name': 'k8s.min.proto', 'expected': 'k8s_min.proto'}, - {'name': 'k8s.min.test.proto', 'expected': 'k8s_min_test.proto'} + {"name": "k8s.min.proto", "expected": "k8s_min.proto"}, + {"name": "k8s.min.test.proto", "expected": "k8s_min_test.proto"}, ] for test_case in test_cases: fd = ( make_file_pb2( - name=test_case['name'], - package='google.keywords.v1', - messages=(make_message_pb2(name='ImportRequest', fields=()),), + name=test_case["name"], + package="google.keywords.v1", + messages=(make_message_pb2(name="ImportRequest", fields=()),), ), ) - api_schema = api.API.build(fd, package='google.keywords.v1') + api_schema = api.API.build(fd, package="google.keywords.v1") assert set(api_schema.protos.keys()) == { - test_case['expected'], + test_case["expected"], } @@ -249,58 +270,58 @@ def test_proto_keyword_fname(): # Check that the file names are unspecialized when building the API object. fd = ( make_file_pb2( - name='import.proto', - package='google.keywords.v1', - messages=(make_message_pb2(name='ImportRequest', fields=()),), + name="import.proto", + package="google.keywords.v1", + messages=(make_message_pb2(name="ImportRequest", fields=()),), ), make_file_pb2( - name='import_.proto', - package='google.keywords.v1', - messages=(make_message_pb2(name='ImportUnderRequest', fields=()),), + name="import_.proto", + package="google.keywords.v1", + messages=(make_message_pb2(name="ImportUnderRequest", fields=()),), ), make_file_pb2( - name='class_.proto', - package='google.keywords.v1', - messages=(make_message_pb2(name='ClassUnderRequest', fields=()),), + name="class_.proto", + package="google.keywords.v1", + messages=(make_message_pb2(name="ClassUnderRequest", fields=()),), ), make_file_pb2( - name='class.proto', - package='google.keywords.v1', - messages=(make_message_pb2(name='ClassRequest', fields=()),), + name="class.proto", + package="google.keywords.v1", + messages=(make_message_pb2(name="ClassRequest", fields=()),), ), make_file_pb2( - name='metadata.proto', - package='google.keywords.v1', - messages=(make_message_pb2(name='MetadataRequest', fields=()),), + name="metadata.proto", + package="google.keywords.v1", + messages=(make_message_pb2(name="MetadataRequest", fields=()),), ), make_file_pb2( - name='retry.proto', - package='google.keywords.v1', - messages=(make_message_pb2(name='RetryRequest', fields=()),), + name="retry.proto", + package="google.keywords.v1", + messages=(make_message_pb2(name="RetryRequest", fields=()),), ), make_file_pb2( - name='timeout.proto', - package='google.keywords.v1', - messages=(make_message_pb2(name='TimeoutRequest', fields=()),), + name="timeout.proto", + package="google.keywords.v1", + messages=(make_message_pb2(name="TimeoutRequest", fields=()),), ), make_file_pb2( - name='request.proto', - package='google.keywords.v1', - messages=(make_message_pb2(name='RequestRequest', fields=()),), + name="request.proto", + package="google.keywords.v1", + messages=(make_message_pb2(name="RequestRequest", fields=()),), ), ) # We can't create new collisions, so check that renames cascade. - api_schema = api.API.build(fd, package='google.keywords.v1') + api_schema = api.API.build(fd, package="google.keywords.v1") assert set(api_schema.protos.keys()) == { - 'import_.proto', - 'import__.proto', - 'class_.proto', - 'class__.proto', - 'metadata_.proto', - 'retry_.proto', - 'timeout_.proto', - 'request_.proto', + "import_.proto", + "import__.proto", + "class_.proto", + "class__.proto", + "metadata_.proto", + "retry_.proto", + "timeout_.proto", + "request_.proto", } @@ -308,36 +329,38 @@ def test_proto_oneof(): # Put together a couple of minimal protos. fd = ( make_file_pb2( - name='dep.proto', - package='google.dep', - messages=(make_message_pb2(name='ImportedMessage', fields=()),), + name="dep.proto", + package="google.dep", + messages=(make_message_pb2(name="ImportedMessage", fields=()),), ), make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=()), + make_message_pb2(name="Foo", fields=()), make_message_pb2( - name='Bar', + name="Bar", fields=( - make_field_pb2(name='imported_message', number=1, - type_name='.google.dep.ImportedMessage', - oneof_index=0), make_field_pb2( - name='primitive', number=2, type=1, oneof_index=0), + name="imported_message", + number=1, + type_name=".google.dep.ImportedMessage", + oneof_index=0, + ), + make_field_pb2( + name="primitive", number=2, type=1, oneof_index=0 + ), ), - oneof_decl=( - make_oneof_pb2(name="value_type"), - ) - ) - ) - ) + oneof_decl=(make_oneof_pb2(name="value_type"),), + ), + ), + ), ) # Create an API with those protos. - api_schema = api.API.build(fd, package='google.example.v1') - proto = api_schema.protos['foo.proto'] - assert proto.names == {'imported_message', 'Bar', 'primitive', 'Foo'} + api_schema = api.API.build(fd, package="google.example.v1") + proto = api_schema.protos["foo.proto"] + assert proto.names == {"imported_message", "Bar", "primitive", "Foo"} oneofs = proto.messages["google.example.v1.Bar"].oneofs assert len(oneofs) == 1 assert "value_type" in oneofs.keys() @@ -347,85 +370,117 @@ def test_proto_names_import_collision(): # Put together a couple of minimal protos. fd = ( make_file_pb2( - name='a/b/c/spam.proto', - package='a.b.c', - messages=(make_message_pb2(name='ImportedMessage', fields=()),), + name="a/b/c/spam.proto", + package="a.b.c", + messages=(make_message_pb2(name="ImportedMessage", fields=()),), ), make_file_pb2( - name='x/y/z/spam.proto', - package='x.y.z', - messages=(make_message_pb2(name='OtherMessage', fields=()),), + name="x/y/z/spam.proto", + package="x.y.z", + messages=(make_message_pb2(name="OtherMessage", fields=()),), ), make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=()), - make_message_pb2(name='Bar', fields=( - make_field_pb2(name='imported_message', number=1, - type_name='.a.b.c.ImportedMessage'), - make_field_pb2(name='other_message', number=2, - type_name='.x.y.z.OtherMessage'), - make_field_pb2(name='primitive', number=3, type=1), - )), - make_message_pb2(name='Baz', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - )), + make_message_pb2(name="Foo", fields=()), + make_message_pb2( + name="Bar", + fields=( + make_field_pb2( + name="imported_message", + number=1, + type_name=".a.b.c.ImportedMessage", + ), + make_field_pb2( + name="other_message", + number=2, + type_name=".x.y.z.OtherMessage", + ), + make_field_pb2(name="primitive", number=3, type=1), + ), + ), + make_message_pb2( + name="Baz", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" + ), + ), + ), ), ), ) # Create an API with those protos. - api_schema = api.API.build(fd, package='google.example.v1') - proto = api_schema.protos['foo.proto'] - assert proto.names == {'Foo', 'Bar', 'Baz', 'foo', 'imported_message', - 'other_message', 'primitive', 'spam'} + api_schema = api.API.build(fd, package="google.example.v1") + proto = api_schema.protos["foo.proto"] + assert proto.names == { + "Foo", + "Bar", + "Baz", + "foo", + "imported_message", + "other_message", + "primitive", + "spam", + } def test_proto_names_import_collision_flattening(): - lro_proto = api.Proto.build(make_file_pb2( - name='operations.proto', package='google.longrunning', - messages=(make_message_pb2(name='Operation'),), - ), file_to_generate=False, naming=make_naming()) + lro_proto = api.Proto.build( + make_file_pb2( + name="operations.proto", + package="google.longrunning", + messages=(make_message_pb2(name="Operation"),), + ), + file_to_generate=False, + naming=make_naming(), + ) fd = ( make_file_pb2( - name='mollusc.proto', - package='google.animalia.mollusca', + name="mollusc.proto", + package="google.animalia.mollusca", messages=( - make_message_pb2(name='Mollusc',), - make_message_pb2(name='MolluscResponse',), - make_message_pb2(name='MolluscMetadata',), + make_message_pb2( + name="Mollusc", + ), + make_message_pb2( + name="MolluscResponse", + ), + make_message_pb2( + name="MolluscMetadata", + ), ), ), make_file_pb2( - name='squid.proto', - package='google.animalia.mollusca', + name="squid.proto", + package="google.animalia.mollusca", messages=( make_message_pb2( - name='IdentifySquidRequest', + name="IdentifySquidRequest", fields=( make_field_pb2( - name='mollusc', + name="mollusc", number=1, - type_name='.google.animalia.mollusca.Mollusc' + type_name=".google.animalia.mollusca.Mollusc", ), ), ), make_message_pb2( - name='IdentifySquidResponse', + name="IdentifySquidResponse", fields=(), ), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='SquidIdentificationService', + name="SquidIdentificationService", method=( descriptor_pb2.MethodDescriptorProto( - name='IdentifyMollusc', - input_type='google.animalia.mollusca.IdentifySquidRequest', - output_type='google.longrunning.Operation', + name="IdentifyMollusc", + input_type="google.animalia.mollusca.IdentifySquidRequest", + output_type="google.longrunning.Operation", ), ), ), @@ -435,19 +490,19 @@ def test_proto_names_import_collision_flattening(): method_options = fd[1].service[0].method[0].options # Notice that a signature field collides with the name of an imported module - method_options.Extensions[client_pb2.method_signature].append('mollusc') + method_options.Extensions[client_pb2.method_signature].append("mollusc") method_options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( - response_type='google.animalia.mollusca.MolluscResponse', - metadata_type='google.animalia.mollusca.MolluscMetadata', + response_type="google.animalia.mollusca.MolluscResponse", + metadata_type="google.animalia.mollusca.MolluscMetadata", ) ) api_schema = api.API.build( fd, - package='google.animalia.mollusca', + package="google.animalia.mollusca", prior_protos={ - 'google/longrunning/operations.proto': lro_proto, - } + "google/longrunning/operations.proto": lro_proto, + }, ) actual_imports = { @@ -459,31 +514,35 @@ def test_proto_names_import_collision_flattening(): expected_imports = { imp.Import( - package=('google', 'animalia', 'mollusca', 'types'), - module='mollusc', - alias='gam_mollusc', + package=("google", "animalia", "mollusca", "types"), + module="mollusc", + alias="gam_mollusc", + ), + imp.Import( + package=("google", "animalia", "mollusca", "types"), + module="squid", + ), + imp.Import( + package=("google", "api_core"), + module="operation", ), imp.Import( - package=('google', 'animalia', 'mollusca', 'types'), - module='squid', + package=("google", "api_core"), + module="operation_async", ), - imp.Import(package=('google', 'api_core'), module='operation',), - imp.Import(package=('google', 'api_core'), module='operation_async',), } assert expected_imports == actual_imports - method = ( - api_schema - .services['google.animalia.mollusca.SquidIdentificationService'] - .methods['IdentifyMollusc'] - ) + method = api_schema.services[ + "google.animalia.mollusca.SquidIdentificationService" + ].methods["IdentifyMollusc"] actual_response_import = method.lro.response_type.ident.python_import expected_response_import = imp.Import( - package=('google', 'animalia', 'mollusca', 'types'), - module='mollusc', - alias='gam_mollusc', + package=("google", "animalia", "mollusca", "types"), + module="mollusc", + alias="gam_mollusc", ) assert actual_response_import == expected_response_import @@ -503,11 +562,12 @@ def test_proto_builder_constructor(): ) # Test the load function. - with mock.patch.object(api._ProtoBuilder, '_load_children') as lc: - pb = api._ProtoBuilder(fdp, - file_to_generate=True, - naming=make_naming(), - ) + with mock.patch.object(api._ProtoBuilder, "_load_children") as lc: + pb = api._ProtoBuilder( + fdp, + file_to_generate=True, + naming=make_naming(), + ) # There should be three total calls to load the different types # of children. @@ -532,7 +592,7 @@ def test_proto_builder_constructor(): def test_not_target_file(): """Establish that services are not ignored for untargeted protos.""" message_pb = make_message_pb2( - name='Foo', fields=(make_field_pb2(name='bar', type=3, number=1),) + name="Foo", fields=(make_field_pb2(name="bar", type=3, number=1),) ) service_pb = descriptor_pb2.ServiceDescriptorProto() fdp = make_file_pb2(messages=(message_pb,), services=(service_pb,)) @@ -549,16 +609,16 @@ def test_messages(): L = descriptor_pb2.SourceCodeInfo.Location message_pb = make_message_pb2( - name='Foo', fields=(make_field_pb2(name='bar', type=3, number=1),) + name="Foo", fields=(make_field_pb2(name="bar", type=3, number=1),) ) locations = ( - L(path=(4, 0), leading_comments='This is the Foo message.'), - L(path=(4, 0, 2, 0), leading_comments='This is the bar field.'), + L(path=(4, 0), leading_comments="This is the Foo message."), + L(path=(4, 0, 2, 0), leading_comments="This is the bar field."), ) fdp = make_file_pb2( messages=(message_pb,), locations=locations, - package='google.example.v2', + package="google.example.v2", ) # Make the proto object. @@ -566,27 +626,30 @@ def test_messages(): # Get the message. assert len(proto.messages) == 1 - message = proto.messages['google.example.v2.Foo'] + message = proto.messages["google.example.v2.Foo"] assert isinstance(message, wrappers.MessageType) - assert message.meta.doc == 'This is the Foo message.' + assert message.meta.doc == "This is the Foo message." assert len(message.fields) == 1 - assert message.fields['bar'].meta.doc == 'This is the bar field.' + assert message.fields["bar"].meta.doc == "This is the bar field." def test_messages_reverse_declaration_order(): # Test that if a message is used as a field higher in the same file, # that things still work. message_pbs = ( - make_message_pb2(name='Foo', fields=( - make_field_pb2(name='bar', number=1, - type_name='.google.example.v3.Bar'), - ), + make_message_pb2( + name="Foo", + fields=( + make_field_pb2( + name="bar", number=1, type_name=".google.example.v3.Bar" + ), + ), ), - make_message_pb2(name='Bar'), + make_message_pb2(name="Bar"), ) fdp = make_file_pb2( messages=message_pbs, - package='google.example.v3', + package="google.example.v3", ) # Make the proto object. @@ -594,23 +657,26 @@ def test_messages_reverse_declaration_order(): # Get the message. assert len(proto.messages) == 2 - Foo = proto.messages['google.example.v3.Foo'] - assert Foo.fields['bar'].message == proto.messages['google.example.v3.Bar'] + Foo = proto.messages["google.example.v3.Foo"] + assert Foo.fields["bar"].message == proto.messages["google.example.v3.Bar"] def test_messages_recursive(): # Test that if a message is used inside itself, that things will still # work. message_pbs = ( - make_message_pb2(name='Foo', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v3.Foo'), - ), + make_message_pb2( + name="Foo", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v3.Foo" + ), + ), ), ) fdp = make_file_pb2( messages=message_pbs, - package='google.example.v3', + package="google.example.v3", ) # Make the proto object. @@ -618,33 +684,31 @@ def test_messages_recursive(): # Get the message. assert len(proto.messages) == 1 - Foo = proto.messages['google.example.v3.Foo'] - assert Foo.fields['foo'].message == proto.messages['google.example.v3.Foo'] + Foo = proto.messages["google.example.v3.Foo"] + assert Foo.fields["foo"].message == proto.messages["google.example.v3.Foo"] def test_messages_nested(): # Test that a nested message works properly. message_pbs = ( - make_message_pb2(name='Foo', nested_type=( - make_message_pb2(name='Bar'), - )), + make_message_pb2(name="Foo", nested_type=(make_message_pb2(name="Bar"),)), ) fdp = make_file_pb2( messages=message_pbs, - package='google.example.v3', + package="google.example.v3", ) # Make the proto object. proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) # Set short variables for the names. - foo = 'google.example.v3.Foo' - bar = 'google.example.v3.Foo.Bar' + foo = "google.example.v3.Foo" + bar = "google.example.v3.Foo.Bar" # Get the message. assert len(proto.all_messages) == 2 - assert proto.all_messages[foo].name == 'Foo' - assert proto.all_messages[bar].name == 'Bar' + assert proto.all_messages[foo].name == "Foo" + assert proto.all_messages[bar].name == "Bar" # Assert that the `messages` property only shows top-level messages. assert len(proto.messages) == 1 @@ -658,66 +722,59 @@ def test_out_of_order_enums(): # This happens when they're a nested type within a message. messages = ( make_message_pb2( - name='Squid', + name="Squid", fields=( make_field_pb2( - name='base_color', - type_name='google.mollusca.Chromatophore.Color', + name="base_color", + type_name="google.mollusca.Chromatophore.Color", number=1, ), ), ), make_message_pb2( - name='Chromatophore', - enum_type=( - descriptor_pb2.EnumDescriptorProto(name='Color', value=()), - ), - ) + name="Chromatophore", + enum_type=(descriptor_pb2.EnumDescriptorProto(name="Color", value=()),), + ), ) fd = ( make_file_pb2( - name='squid.proto', - package='google.mollusca', + name="squid.proto", + package="google.mollusca", messages=messages, services=( descriptor_pb2.ServiceDescriptorProto( - name='SquidService', + name="SquidService", ), ), ), ) - api_schema = api.API.build(fd, package='google.mollusca') - field_type = ( - api_schema - .messages['google.mollusca.Squid'] - .fields['base_color'] - .type - ) - enum_type = api_schema.enums['google.mollusca.Chromatophore.Color'] + api_schema = api.API.build(fd, package="google.mollusca") + field_type = api_schema.messages["google.mollusca.Squid"].fields["base_color"].type + enum_type = api_schema.enums["google.mollusca.Chromatophore.Color"] assert field_type == enum_type def test_undefined_type(): fd = ( make_file_pb2( - name='mollusc.proto', - package='google.mollusca', + name="mollusc.proto", + package="google.mollusca", messages=( make_message_pb2( - name='Mollusc', + name="Mollusc", fields=( make_field_pb2( - name='class', - type_name='google.mollusca.Class', + name="class", + type_name="google.mollusca.Class", number=1, ), - ) + ), ), ), ), ) with pytest.raises(TypeError): - api.API.build(fd, package='google.mollusca') + api.API.build(fd, package="google.mollusca") def test_python_modules_nested(): @@ -869,62 +926,69 @@ def test_services(): # Make a silly helper method to not repeat some of the structure. def _n(method_name: str): return { - 'service': 'google.example.v2.FooService', - 'method': method_name, + "service": "google.example.v2.FooService", + "method": method_name, } # Set up retry information. - opts = Options(retry={'methodConfig': [ - {'name': [_n('TimeoutableGetFoo')], 'timeout': '30s'}, - {'name': [_n('RetryableGetFoo')], 'retryPolicy': { - 'maxAttempts': 3, - 'initialBackoff': '%dn' % 1e6, - 'maxBackoff': '60s', - 'backoffMultiplier': 1.5, - 'retryableStatusCodes': ['UNAVAILABLE', 'ABORTED'], - }}, - ]}) + opts = Options( + retry={ + "methodConfig": [ + {"name": [_n("TimeoutableGetFoo")], "timeout": "30s"}, + { + "name": [_n("RetryableGetFoo")], + "retryPolicy": { + "maxAttempts": 3, + "initialBackoff": "%dn" % 1e6, + "maxBackoff": "60s", + "backoffMultiplier": 1.5, + "retryableStatusCodes": ["UNAVAILABLE", "ABORTED"], + }, + }, + ] + } + ) # Set up messages for our RPC. request_message_pb = make_message_pb2( - name='GetFooRequest', fields=(make_field_pb2(name='name', type=9, number=1),) + name="GetFooRequest", fields=(make_field_pb2(name="name", type=9, number=1),) ) - response_message_pb = make_message_pb2(name='GetFooResponse', fields=()) + response_message_pb = make_message_pb2(name="GetFooResponse", fields=()) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( - name='FooService', + name="FooService", method=( descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v2.GetFooRequest', - output_type='google.example.v2.GetFooResponse', + name="GetFoo", + input_type="google.example.v2.GetFooRequest", + output_type="google.example.v2.GetFooResponse", ), descriptor_pb2.MethodDescriptorProto( - name='TimeoutableGetFoo', - input_type='google.example.v2.GetFooRequest', - output_type='google.example.v2.GetFooResponse', + name="TimeoutableGetFoo", + input_type="google.example.v2.GetFooRequest", + output_type="google.example.v2.GetFooResponse", ), descriptor_pb2.MethodDescriptorProto( - name='RetryableGetFoo', - input_type='google.example.v2.GetFooRequest', - output_type='google.example.v2.GetFooResponse', + name="RetryableGetFoo", + input_type="google.example.v2.GetFooRequest", + output_type="google.example.v2.GetFooResponse", ), ), ) # Fake-document our fake stuff. locations = ( - L(path=(6, 0), leading_comments='This is the FooService service.'), - L(path=(6, 0, 2, 0), leading_comments='This is the GetFoo method.'), - L(path=(4, 0), leading_comments='This is the GetFooRequest message.'), - L(path=(4, 1), leading_comments='This is the GetFooResponse message.'), + L(path=(6, 0), leading_comments="This is the FooService service."), + L(path=(6, 0, 2, 0), leading_comments="This is the GetFoo method."), + L(path=(4, 0), leading_comments="This is the GetFooRequest message."), + L(path=(4, 1), leading_comments="This is the GetFooResponse message."), ) # Finally, set up the file that encompasses these. fdp = make_file_pb2( - name='test.proto', - package='google.example.v2', + name="test.proto", + package="google.example.v2", messages=(request_message_pb, response_message_pb), services=(service_pb,), locations=locations, @@ -933,43 +997,44 @@ def _n(method_name: str): # Make the proto object. proto = api.API.build( [fdp], - 'google.example.v2', + "google.example.v2", opts=opts, - ).protos['test.proto'] + ).protos["test.proto"] # Establish that our data looks correct. assert len(proto.services) == 1 assert len(proto.messages) == 2 - service = proto.services['google.example.v2.FooService'] - assert service.meta.doc == 'This is the FooService service.' + service = proto.services["google.example.v2.FooService"] + assert service.meta.doc == "This is the FooService service." assert len(service.methods) == 3 - method = service.methods['GetFoo'] - assert method.meta.doc == 'This is the GetFoo method.' + method = service.methods["GetFoo"] + assert method.meta.doc == "This is the GetFoo method." assert isinstance(method.input, wrappers.MessageType) assert isinstance(method.output, wrappers.MessageType) - assert method.input.name == 'GetFooRequest' - assert method.input.meta.doc == 'This is the GetFooRequest message.' - assert method.output.name == 'GetFooResponse' - assert method.output.meta.doc == 'This is the GetFooResponse message.' + assert method.input.name == "GetFooRequest" + assert method.input.meta.doc == "This is the GetFooRequest message." + assert method.output.name == "GetFooResponse" + assert method.output.meta.doc == "This is the GetFooResponse message." assert not method.timeout assert not method.retry # Establish that the retry information on a timeout-able method also # looks correct. - timeout_method = service.methods['TimeoutableGetFoo'] + timeout_method = service.methods["TimeoutableGetFoo"] assert timeout_method.timeout == pytest.approx(30.0) assert not timeout_method.retry # Establish that the retry information on the retryable method also # looks correct. - retry_method = service.methods['RetryableGetFoo'] + retry_method = service.methods["RetryableGetFoo"] assert retry_method.timeout is None assert retry_method.retry.max_attempts == 3 assert retry_method.retry.initial_backoff == pytest.approx(0.001) assert retry_method.retry.backoff_multiplier == pytest.approx(1.5) assert retry_method.retry.max_backoff == pytest.approx(60.0) assert retry_method.retry.retryable_exceptions == { - exceptions.ServiceUnavailable, exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Aborted, } @@ -977,98 +1042,120 @@ def test_prior_protos(): L = descriptor_pb2.SourceCodeInfo.Location # Set up a prior proto that mimics google/protobuf/empty.proto - empty_proto = api.Proto.build(make_file_pb2( - name='empty.proto', package='google.protobuf', - messages=(make_message_pb2(name='Empty'),), - ), file_to_generate=False, naming=make_naming()) + empty_proto = api.Proto.build( + make_file_pb2( + name="empty.proto", + package="google.protobuf", + messages=(make_message_pb2(name="Empty"),), + ), + file_to_generate=False, + naming=make_naming(), + ) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( - name='PingService', - method=(descriptor_pb2.MethodDescriptorProto( - name='Ping', - input_type='google.protobuf.Empty', - output_type='google.protobuf.Empty', - ),), + name="PingService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="Ping", + input_type="google.protobuf.Empty", + output_type="google.protobuf.Empty", + ), + ), ) # Fake-document our fake stuff. locations = ( - L(path=(6, 0), leading_comments='This is the PingService service.'), - L(path=(6, 0, 2, 0), leading_comments='This is the Ping method.'), + L(path=(6, 0), leading_comments="This is the PingService service."), + L(path=(6, 0, 2, 0), leading_comments="This is the Ping method."), ) # Finally, set up the file that encompasses these. fdp = make_file_pb2( - package='google.example.v1', + package="google.example.v1", services=(service_pb,), locations=locations, ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ - 'google/protobuf/empty.proto': empty_proto, - }, naming=make_naming()) + proto = api.Proto.build( + fdp, + file_to_generate=True, + prior_protos={ + "google/protobuf/empty.proto": empty_proto, + }, + naming=make_naming(), + ) # Establish that our data looks correct. assert len(proto.services) == 1 assert len(empty_proto.messages) == 1 assert len(proto.messages) == 0 - service = proto.services['google.example.v1.PingService'] - assert service.meta.doc == 'This is the PingService service.' + service = proto.services["google.example.v1.PingService"] + assert service.meta.doc == "This is the PingService service." assert len(service.methods) == 1 - method = service.methods['Ping'] + method = service.methods["Ping"] assert isinstance(method.input, wrappers.MessageType) assert isinstance(method.output, wrappers.MessageType) - assert method.input.name == 'Empty' - assert method.output.name == 'Empty' - assert method.meta.doc == 'This is the Ping method.' + assert method.input.name == "Empty" + assert method.output.name == "Empty" + assert method.meta.doc == "This is the Ping method." def test_lro(): # Set up a prior proto that mimics google/protobuf/empty.proto - lro_proto = api.Proto.build(make_file_pb2( - name='operations.proto', package='google.longrunning', - messages=(make_message_pb2(name='Operation'),), - ), file_to_generate=False, naming=make_naming()) + lro_proto = api.Proto.build( + make_file_pb2( + name="operations.proto", + package="google.longrunning", + messages=(make_message_pb2(name="Operation"),), + ), + file_to_generate=False, + naming=make_naming(), + ) # Set up a method with LRO annotations. method_pb2 = descriptor_pb2.MethodDescriptorProto( - name='AsyncDoThing', - input_type='google.example.v3.AsyncDoThingRequest', - output_type='google.longrunning.Operation', + name="AsyncDoThing", + input_type="google.example.v3.AsyncDoThingRequest", + output_type="google.longrunning.Operation", ) method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( - response_type='google.example.v3.AsyncDoThingResponse', - metadata_type='google.example.v3.AsyncDoThingMetadata', + response_type="google.example.v3.AsyncDoThingResponse", + metadata_type="google.example.v3.AsyncDoThingMetadata", ), ) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( - name='LongRunningService', + name="LongRunningService", method=(method_pb2,), ) # Set up the messages, including the annotated ones. messages = ( - make_message_pb2(name='AsyncDoThingRequest', fields=()), - make_message_pb2(name='AsyncDoThingResponse', fields=()), - make_message_pb2(name='AsyncDoThingMetadata', fields=()), + make_message_pb2(name="AsyncDoThingRequest", fields=()), + make_message_pb2(name="AsyncDoThingResponse", fields=()), + make_message_pb2(name="AsyncDoThingMetadata", fields=()), ) # Finally, set up the file that encompasses these. fdp = make_file_pb2( - package='google.example.v3', + package="google.example.v3", messages=messages, services=(service_pb,), ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ - 'google/longrunning/operations.proto': lro_proto, - }, naming=make_naming()) + proto = api.Proto.build( + fdp, + file_to_generate=True, + prior_protos={ + "google/longrunning/operations.proto": lro_proto, + }, + naming=make_naming(), + ) # Establish that our data looks correct. assert len(proto.services) == 1 @@ -1081,88 +1168,104 @@ def test_lro_operation_no_annotation(): # but has no operation_info option, is treated as not lro. # Set up a prior proto that mimics google/protobuf/empty.proto - lro_proto = api.Proto.build(make_file_pb2( - name='operations.proto', package='google.longrunning', - messages=(make_message_pb2(name='Operation'),), - ), file_to_generate=False, naming=make_naming()) + lro_proto = api.Proto.build( + make_file_pb2( + name="operations.proto", + package="google.longrunning", + messages=(make_message_pb2(name="Operation"),), + ), + file_to_generate=False, + naming=make_naming(), + ) # Set up a method that returns an Operation, but has no annotation. method_pb2 = descriptor_pb2.MethodDescriptorProto( - name='GetOperation', - input_type='google.example.v3.GetOperationRequest', - output_type='google.longrunning.Operation', + name="GetOperation", + input_type="google.example.v3.GetOperationRequest", + output_type="google.longrunning.Operation", ) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( - name='OperationService', + name="OperationService", method=(method_pb2,), ) # Set up the messages, including the annotated ones. - messages = ( - make_message_pb2(name='GetOperationRequest', fields=()), - ) + messages = (make_message_pb2(name="GetOperationRequest", fields=()),) # Finally, set up the file that encompasses these. fdp = make_file_pb2( - package='google.example.v3', + package="google.example.v3", messages=messages, services=(service_pb,), ) # Make the proto object. - proto = api.Proto.build(fdp, file_to_generate=True, prior_protos={ - 'google/longrunning/operations.proto': lro_proto, - }, naming=make_naming()) + proto = api.Proto.build( + fdp, + file_to_generate=True, + prior_protos={ + "google/longrunning/operations.proto": lro_proto, + }, + naming=make_naming(), + ) - service = proto.services['google.example.v3.OperationService'] - method = service.methods['GetOperation'] + service = proto.services["google.example.v3.OperationService"] + method = service.methods["GetOperation"] assert method.lro is None def test_lro_bad_annotation(): # Set up a prior proto that mimics google/protobuf/empty.proto - lro_proto = api.Proto.build(make_file_pb2( - name='operations.proto', package='google.longrunning', - messages=(make_message_pb2(name='Operation'),), - ), file_to_generate=False, naming=make_naming()) + lro_proto = api.Proto.build( + make_file_pb2( + name="operations.proto", + package="google.longrunning", + messages=(make_message_pb2(name="Operation"),), + ), + file_to_generate=False, + naming=make_naming(), + ) # Set up a method with an LRO and incomplete annotation. method_pb2 = descriptor_pb2.MethodDescriptorProto( - name='AsyncDoThing', - input_type='google.example.v3.AsyncDoThingRequest', - output_type='google.longrunning.Operation', + name="AsyncDoThing", + input_type="google.example.v3.AsyncDoThingRequest", + output_type="google.longrunning.Operation", ) method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( - response_type='google.example.v3.AsyncDoThingResponse', + response_type="google.example.v3.AsyncDoThingResponse", ), ) # Set up the service with an RPC. service_pb = descriptor_pb2.ServiceDescriptorProto( - name='LongRunningService', + name="LongRunningService", method=(method_pb2,), ) # Set up the messages, including the annotated ones. - messages = ( - make_message_pb2(name='AsyncDoThingRequest', fields=()), - ) + messages = (make_message_pb2(name="AsyncDoThingRequest", fields=()),) # Finally, set up the file that encompasses these. fdp = make_file_pb2( - package='google.example.v3', + package="google.example.v3", messages=messages, services=(service_pb,), ) # Make the proto object. with pytest.raises(TypeError): - api.Proto.build(fdp, file_to_generate=True, prior_protos={ - 'google/longrunning/operations.proto': lro_proto, - }, naming=make_naming()) + api.Proto.build( + fdp, + file_to_generate=True, + prior_protos={ + "google/longrunning/operations.proto": lro_proto, + }, + naming=make_naming(), + ) def test_cross_file_lro(): @@ -1175,48 +1278,51 @@ def test_cross_file_lro(): # is handled correctly. # Set up a prior proto that mimics google/protobuf/empty.proto - lro_proto = api.Proto.build(make_file_pb2( - name='operations.proto', package='google.longrunning', - messages=(make_message_pb2(name='Operation'),), - ), file_to_generate=False, naming=make_naming()) + lro_proto = api.Proto.build( + make_file_pb2( + name="operations.proto", + package="google.longrunning", + messages=(make_message_pb2(name="Operation"),), + ), + file_to_generate=False, + naming=make_naming(), + ) # Set up a method with LRO annotations. method_pb2 = descriptor_pb2.MethodDescriptorProto( - name='AsyncDoThing', - input_type='google.example.v3.AsyncDoThingRequest', - output_type='google.longrunning.Operation', + name="AsyncDoThing", + input_type="google.example.v3.AsyncDoThingRequest", + output_type="google.longrunning.Operation", ) method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( - response_type='google.example.v3.AsyncDoThingResponse', - metadata_type='google.example.v3.AsyncDoThingMetadata', + response_type="google.example.v3.AsyncDoThingResponse", + metadata_type="google.example.v3.AsyncDoThingMetadata", ), ) # Set up the service with an RPC. service_file = make_file_pb2( - name='service_file.proto', - package='google.example.v3', - messages=( - make_message_pb2(name='AsyncDoThingRequest', fields=()), - ), + name="service_file.proto", + package="google.example.v3", + messages=(make_message_pb2(name="AsyncDoThingRequest", fields=()),), services=( descriptor_pb2.ServiceDescriptorProto( - name='LongRunningService', + name="LongRunningService", method=(method_pb2,), ), - ) + ), ) # Set up the messages, including the annotated ones. # This file is distinct and is not explicitly imported # into the file that defines the service. messages_file = make_file_pb2( - name='messages_file.proto', - package='google.example.v3', + name="messages_file.proto", + package="google.example.v3", messages=( - make_message_pb2(name='AsyncDoThingResponse', fields=()), - make_message_pb2(name='AsyncDoThingMetadata', fields=()), + make_message_pb2(name="AsyncDoThingResponse", fields=()), + make_message_pb2(name="AsyncDoThingMetadata", fields=()), ), ) @@ -1225,20 +1331,21 @@ def test_cross_file_lro(): service_file, messages_file, ), - package='google.example.v3', - prior_protos={'google/longrunning/operations.proto': lro_proto, }, + package="google.example.v3", + prior_protos={ + "google/longrunning/operations.proto": lro_proto, + }, ) method = ( - api_schema. - all_protos['service_file.proto']. - services['google.example.v3.LongRunningService']. - methods['AsyncDoThing'] + api_schema.all_protos["service_file.proto"] + .services["google.example.v3.LongRunningService"] + .methods["AsyncDoThing"] ) assert method.lro - assert method.lro.response_type.name == 'AsyncDoThingResponse' - assert method.lro.metadata_type.name == 'AsyncDoThingMetadata' + assert method.lro.response_type.name == "AsyncDoThingResponse" + assert method.lro.metadata_type.name == "AsyncDoThingMetadata" def test_extended_lro(): @@ -1251,7 +1358,9 @@ def test_extended_lro(): T = descriptor_pb2.FieldDescriptorProto.Type operation_fields = tuple( make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ) for f in operation_fields: options = descriptor_pb2.FieldOptions() @@ -1266,7 +1375,7 @@ def test_extended_lro(): make_message_pb2(name="Operation", fields=operation_fields), make_message_pb2(name="InitialRequest"), make_message_pb2(name="GetOperationRequest"), - ), + ), services=( descriptor_pb2.ServiceDescriptorProto( name="OpsService", @@ -1276,9 +1385,9 @@ def test_extended_lro(): input_type="exlro.GetOperationRequest", output_type="exlro.Operation", options=polling_method_options, - ), ), ), + ), descriptor_pb2.ServiceDescriptorProto( name="BasicService", method=( @@ -1287,19 +1396,24 @@ def test_extended_lro(): input_type="exlro.InitialRequest", output_type="exlro.Operation", options=initiate_options, - ), ), ), ), ), + ), file_to_generate=True, naming=make_naming(), ) initiate = api_schema.services["exlro.BasicService"].methods["Initiate"] assert initiate.extended_lro - assert initiate.extended_lro.request_type == api_schema.messages["exlro.GetOperationRequest"] - assert initiate.extended_lro.operation_type == api_schema.messages["exlro.Operation"] + assert ( + initiate.extended_lro.request_type + == api_schema.messages["exlro.GetOperationRequest"] + ) + assert ( + initiate.extended_lro.operation_type == api_schema.messages["exlro.Operation"] + ) def test_extended_lro_no_such_service(): @@ -1312,7 +1426,9 @@ def test_extended_lro_no_such_service(): T = descriptor_pb2.FieldDescriptorProto.Type operation_fields = tuple( make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ) for f in operation_fields: options = descriptor_pb2.FieldOptions() @@ -1329,12 +1445,8 @@ def test_extended_lro_no_such_service(): name="Operation", fields=operation_fields, ), - make_message_pb2( - name="InitialRequest" - ), - make_message_pb2( - name="GetOperationRequest" - ), + make_message_pb2(name="InitialRequest"), + make_message_pb2(name="GetOperationRequest"), ), services=( descriptor_pb2.ServiceDescriptorProto( @@ -1345,9 +1457,9 @@ def test_extended_lro_no_such_service(): input_type="exlro.GetOperationRequest", output_type="exlro.Operation", options=polling_method_options, - ), ), ), + ), descriptor_pb2.ServiceDescriptorProto( name="BasicService", method=( @@ -1356,11 +1468,11 @@ def test_extended_lro_no_such_service(): input_type="exlro.InitialRequest", output_type="exlro.Operation", options=initiate_options, - ), ), ), ), ), + ), file_to_generate=True, naming=make_naming(), ) @@ -1373,7 +1485,9 @@ def test_extended_lro_no_polling_method(): T = descriptor_pb2.FieldDescriptorProto.Type operation_fields = tuple( make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ) for f in operation_fields: options = descriptor_pb2.FieldOptions() @@ -1405,9 +1519,9 @@ def test_extended_lro_no_polling_method(): name="Get", input_type="exlro.GetOperationRequest", output_type="exlro.Operation", - ), ), ), + ), descriptor_pb2.ServiceDescriptorProto( name="BasicService", method=( @@ -1416,11 +1530,11 @@ def test_extended_lro_no_polling_method(): input_type="exlro.InitialRequest", output_type="exlro.Operation", options=initiate_options, - ), ), ), ), ), + ), file_to_generate=True, naming=make_naming(), ) @@ -1436,7 +1550,9 @@ def test_extended_lro_different_output_types(): T = descriptor_pb2.FieldDescriptorProto.Type operation_fields = tuple( make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ) for f in operation_fields: options = descriptor_pb2.FieldOptions() @@ -1472,9 +1588,9 @@ def test_extended_lro_different_output_types(): input_type="exlro.GetOperationRequest", output_type="exlro.GetOperationResponse", options=polling_method_options, - ), ), ), + ), descriptor_pb2.ServiceDescriptorProto( name="BasicService", method=( @@ -1483,11 +1599,11 @@ def test_extended_lro_different_output_types(): input_type="exlro.InitialRequest", output_type="exlro.Operation", options=initiate_options, - ), ), ), ), ), + ), file_to_generate=True, naming=make_naming(), ) @@ -1509,7 +1625,7 @@ def test_extended_lro_not_an_operation(): make_message_pb2(name="Operation"), make_message_pb2(name="InitialRequest"), make_message_pb2(name="GetOperationRequest"), - ), + ), services=( descriptor_pb2.ServiceDescriptorProto( name="OpsService", @@ -1519,9 +1635,9 @@ def test_extended_lro_not_an_operation(): input_type="exlro.GetOperationRequest", output_type="exlro.Operation", options=polling_method_options, - ), ), ), + ), descriptor_pb2.ServiceDescriptorProto( name="BasicService", method=( @@ -1530,11 +1646,11 @@ def test_extended_lro_not_an_operation(): input_type="exlro.InitialRequest", output_type="exlro.Operation", options=initiate_options, - ), ), ), ), ), + ), file_to_generate=True, naming=make_naming(), ) @@ -1542,29 +1658,36 @@ def test_extended_lro_not_an_operation(): def test_enums(): L = descriptor_pb2.SourceCodeInfo.Location - enum_pb = descriptor_pb2.EnumDescriptorProto(name='Silly', value=( - descriptor_pb2.EnumValueDescriptorProto(name='ZERO', number=0), - descriptor_pb2.EnumValueDescriptorProto(name='ONE', number=1), - descriptor_pb2.EnumValueDescriptorProto(name='THREE', number=3), - )) - fdp = make_file_pb2(package='google.enum.v1', enums=(enum_pb,), locations=( - L(path=(5, 0), leading_comments='This is the Silly enum.'), - L(path=(5, 0, 2, 0), leading_comments='This is the zero value.'), - L(path=(5, 0, 2, 1), leading_comments='This is the one value.'), - )) + enum_pb = descriptor_pb2.EnumDescriptorProto( + name="Silly", + value=( + descriptor_pb2.EnumValueDescriptorProto(name="ZERO", number=0), + descriptor_pb2.EnumValueDescriptorProto(name="ONE", number=1), + descriptor_pb2.EnumValueDescriptorProto(name="THREE", number=3), + ), + ) + fdp = make_file_pb2( + package="google.enum.v1", + enums=(enum_pb,), + locations=( + L(path=(5, 0), leading_comments="This is the Silly enum."), + L(path=(5, 0, 2, 0), leading_comments="This is the zero value."), + L(path=(5, 0, 2, 1), leading_comments="This is the one value."), + ), + ) proto = api.Proto.build(fdp, file_to_generate=True, naming=make_naming()) assert len(proto.enums) == 1 - enum = proto.enums['google.enum.v1.Silly'] - assert enum.meta.doc == 'This is the Silly enum.' + enum = proto.enums["google.enum.v1.Silly"] + assert enum.meta.doc == "This is the Silly enum." assert isinstance(enum, wrappers.EnumType) assert len(enum.values) == 3 assert all([isinstance(i, wrappers.EnumValueType) for i in enum.values]) - assert enum.values[0].name == 'ZERO' - assert enum.values[0].meta.doc == 'This is the zero value.' - assert enum.values[1].name == 'ONE' - assert enum.values[1].meta.doc == 'This is the one value.' - assert enum.values[2].name == 'THREE' - assert enum.values[2].meta.doc == '' + assert enum.values[0].name == "ZERO" + assert enum.values[0].meta.doc == "This is the zero value." + assert enum.values[1].name == "ONE" + assert enum.values[1].meta.doc == "This is the one value." + assert enum.values[2].name == "THREE" + assert enum.values[2].meta.doc == "" def test_file_level_resources(): @@ -1574,9 +1697,7 @@ def test_file_level_resources(): messages=( make_message_pb2( name="CreateSpeciesRequest", - fields=( - make_field_pb2(name='species', number=1, type=9), - ), + fields=(make_field_pb2(name="species", number=1, type=9),), ), make_message_pb2( name="CreateSpeciesResponse", @@ -1597,10 +1718,11 @@ def test_file_level_resources(): ) res_pb2 = fdp.options.Extensions[resource_pb2.resource_definition] definitions = [ - ("nomenclature.linnaen.com/Species", - "families/{family}/genera/{genus}/species/{species}"), - ("nomenclature.linnaen.com/Phylum", - "kingdoms/{kingdom}/phyla/{phylum}"), + ( + "nomenclature.linnaen.com/Species", + "families/{family}/genera/{genus}/species/{species}", + ), + ("nomenclature.linnaen.com/Phylum", "kingdoms/{kingdom}/phyla/{phylum}"), ] for type_, pattern in definitions: resource_definition = res_pb2.add() @@ -1608,23 +1730,31 @@ def test_file_level_resources(): resource_definition.pattern.append(pattern) species_field = fdp.message_type[0].field[0] - resource_reference = species_field.options.Extensions[resource_pb2.resource_reference] + resource_reference = species_field.options.Extensions[ + resource_pb2.resource_reference + ] resource_reference.type = "nomenclature.linnaen.com/Species" - api_schema = api.API.build([fdp], package='nomenclature.linneaen.v1') - actual = api_schema.protos['nomenclature.proto'].resource_messages - expected = collections.OrderedDict(( - ("nomenclature.linnaen.com/Species", - wrappers.CommonResource( - type_name="nomenclature.linnaen.com/Species", - pattern="families/{family}/genera/{genus}/species/{species}" - ).message_type), - ("nomenclature.linnaen.com/Phylum", - wrappers.CommonResource( - type_name="nomenclature.linnaen.com/Phylum", - pattern="kingdoms/{kingdom}/phyla/{phylum}" - ).message_type), - )) + api_schema = api.API.build([fdp], package="nomenclature.linneaen.v1") + actual = api_schema.protos["nomenclature.proto"].resource_messages + expected = collections.OrderedDict( + ( + ( + "nomenclature.linnaen.com/Species", + wrappers.CommonResource( + type_name="nomenclature.linnaen.com/Species", + pattern="families/{family}/genera/{genus}/species/{species}", + ).message_type, + ), + ( + "nomenclature.linnaen.com/Phylum", + wrappers.CommonResource( + type_name="nomenclature.linnaen.com/Phylum", + pattern="kingdoms/{kingdom}/phyla/{phylum}", + ).message_type, + ), + ) + ) assert actual == expected @@ -1653,9 +1783,7 @@ def test_resources_referenced_but_not_typed(reference_attr="type"): ), make_message_pb2( name="CreateSpeciesRequest", - fields=( - make_field_pb2(name='species', number=1, type=9), - ), + fields=(make_field_pb2(name="species", number=1, type=9),), ), make_message_pb2( name="CreateSpeciesResponse", @@ -1676,13 +1804,18 @@ def test_resources_referenced_but_not_typed(reference_attr="type"): ) # Set up the resource - species_resource_opts = fdp.message_type[0].options.Extensions[resource_pb2.resource] + species_resource_opts = fdp.message_type[0].options.Extensions[ + resource_pb2.resource + ] species_resource_opts.type = "nomenclature.linnaen.com/Species" species_resource_opts.pattern.append( - "families/{family}/genera/{genus}/species/{species}") + "families/{family}/genera/{genus}/species/{species}" + ) # Set up the reference - name_resource_opts = fdp.message_type[1].field[0].options.Extensions[resource_pb2.resource_reference] + name_resource_opts = ( + fdp.message_type[1].field[0].options.Extensions[resource_pb2.resource_reference] + ) if reference_attr == "type": name_resource_opts.type = species_resource_opts.type else: @@ -1690,7 +1823,9 @@ def test_resources_referenced_but_not_typed(reference_attr="type"): api_schema = api.API.build([fdp], package="nomenclature.linneaen.v1") expected = {api_schema.messages["nomenclature.linneaen.v1.Species"]} - actual = api_schema.services["nomenclature.linneaen.v1.SpeciesService"].resource_messages + actual = api_schema.services[ + "nomenclature.linneaen.v1.SpeciesService" + ].resource_messages assert actual == expected @@ -1749,7 +1884,7 @@ def test_map_field_name_disambiguation(): # https://github.com/googleapis/gapic-generator-python/issues/618. # The module _is_ disambiguated for singleton # fields but NOT for map fields. - type_name=".animalia.mollusca.v2.Mollusc" + type_name=".animalia.mollusca.v2.Mollusc", ), ), options=descriptor_pb2.MessageOptions(map_entry=True), @@ -1761,11 +1896,11 @@ def test_map_field_name_disambiguation(): my_api = api.API.build( file_descriptors=[squid_file_pb, method_types_file_pb], ) - create = my_api.messages['animalia.mollusca.v2.CreateMolluscRequest'] - mollusc = create.fields['mollusc'] - molluscs_map = create.fields['molluscs_map'] + create = my_api.messages["animalia.mollusca.v2.CreateMolluscRequest"] + mollusc = create.fields["mollusc"] + molluscs_map = create.fields["molluscs_map"] mollusc_ident = str(mollusc.type.ident) - mollusc_map_ident = str(molluscs_map.message.fields['value'].type.ident) + mollusc_map_ident = str(molluscs_map.message.fields["value"].type.ident) # The same module used in the same place should have the same import alias. # Because there's a "mollusc" name used, the import should be disambiguated. @@ -1820,7 +1955,7 @@ def test_gapic_metadata(): input_type="animalia.mollusca.v1.MolluscRequest", output_type="animalia.mollusca.v1.Mollusc", ), - ] + ], ), ], ) @@ -1840,15 +1975,23 @@ def test_gapic_metadata(): "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="OctopusClient", rpcs={ - "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), - "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["blue_spot"] + ), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant_pacific"] + ), }, ), "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="OctopusAsyncClient", rpcs={ - "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), - "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["blue_spot"] + ), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant_pacific"] + ), }, ), } @@ -1858,22 +2001,34 @@ def test_gapic_metadata(): "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidClient", rpcs={ - "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), - "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), - "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant"] + ), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["humboldt"] + ), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["ramshorn"] + ), }, ), "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidAsyncClient", rpcs={ - "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), - "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), - "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant"] + ), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["humboldt"] + ), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["ramshorn"] + ), }, ), } ), - } + }, ) actual = api_schema.gapic_metadata(opts) assert expected == actual @@ -1894,8 +2049,12 @@ def test_gapic_metadata(): "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="OctopusClient", rpcs={ - "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), - "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["blue_spot"] + ), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant_pacific"] + ), }, ) } @@ -1905,15 +2064,20 @@ def test_gapic_metadata(): "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidClient", rpcs={ - "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), - "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), - "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant"] + ), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["humboldt"] + ), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["ramshorn"] + ), }, ), - } ), - } + }, ) actual = api_schema.gapic_metadata(opts) assert expected == actual @@ -1934,24 +2098,36 @@ def test_gapic_metadata(): "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="OctopusClient", rpcs={ - "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), - "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["blue_spot"] + ), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant_pacific"] + ), }, ), "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="OctopusAsyncClient", rpcs={ - "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), - "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["blue_spot"] + ), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant_pacific"] + ), }, ), "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="OctopusClient", rpcs={ - "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["blue_spot"]), - "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant_pacific"]), + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["blue_spot"] + ), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant_pacific"] + ), }, - ) + ), } ), "Squid": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( @@ -1959,31 +2135,48 @@ def test_gapic_metadata(): "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidClient", rpcs={ - "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), - "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), - "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant"] + ), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["humboldt"] + ), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["ramshorn"] + ), }, ), "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidAsyncClient", rpcs={ - "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), - "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), - "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant"] + ), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["humboldt"] + ), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["ramshorn"] + ), }, ), "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidClient", rpcs={ - "Giant": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["giant"]), - "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["humboldt"]), - "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList(methods=["ramshorn"]), + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant"] + ), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["humboldt"] + ), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["ramshorn"] + ), }, ), - } ), - } + }, ) actual = api_schema.gapic_metadata(opts) @@ -1996,35 +2189,53 @@ def test_gapic_metadata(): def test_http_options(fs): fd = ( make_file_pb2( - name='example.proto', - package='google.example.v1', - messages=(make_message_pb2(name='ExampleRequest', fields=()),), - ),) - - opts = Options(service_yaml_config={ - 'http': { - 'rules': [ - { - 'selector': 'Cancel', - 'post': '/v3/{name=projects/*/locations/*/operations/*}:cancel', - 'body': '*' - }, - { - 'selector': 'Get', - 'get': '/v3/{name=projects/*/locations/*/operations/*}', - 'additional_bindings': [{'get': '/v3/{name=/locations/*/operations/*}'}], - }, ] + name="example.proto", + package="google.example.v1", + messages=(make_message_pb2(name="ExampleRequest", fields=()),), + ), + ) + + opts = Options( + service_yaml_config={ + "http": { + "rules": [ + { + "selector": "Cancel", + "post": "/v3/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "selector": "Get", + "get": "/v3/{name=projects/*/locations/*/operations/*}", + "additional_bindings": [ + {"get": "/v3/{name=/locations/*/operations/*}"} + ], + }, + ] + } } - }) + ) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) http_options = api_schema.http_options assert http_options == { - 'Cancel': [wrappers.HttpRule(method='post', uri='/v3/{name=projects/*/locations/*/operations/*}:cancel', body='*')], - 'Get': [ + "Cancel": [ + wrappers.HttpRule( + method="post", + uri="/v3/{name=projects/*/locations/*/operations/*}:cancel", + body="*", + ) + ], + "Get": [ wrappers.HttpRule( - method='get', uri='/v3/{name=projects/*/locations/*/operations/*}', body=None), - wrappers.HttpRule(method='get', uri='/v3/{name=/locations/*/operations/*}', body=None)] + method="get", + uri="/v3/{name=projects/*/locations/*/operations/*}", + body=None, + ), + wrappers.HttpRule( + method="get", uri="/v3/{name=/locations/*/operations/*}", body=None + ), + ], } @@ -2035,7 +2246,9 @@ def generate_basic_extended_operations_setup(): name="Operation", fields=( make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ), ) @@ -2071,9 +2284,7 @@ def generate_basic_extended_operations_setup(): request = make_message_pb2( name="GetOperation", - fields=[ - make_field_pb2(name="name", type=T.Value("TYPE_STRING"), number=1) - ], + fields=[make_field_pb2(name="name", type=T.Value("TYPE_STRING"), number=1)], ) initial_opts = descriptor_pb2.MethodOptions() @@ -2117,10 +2328,14 @@ def generate_basic_extended_operations_setup(): def test_extended_operations_lro_operation_service(): file_protos = generate_basic_extended_operations_setup() api_schema = api.API.build(file_protos) - regular_service = api_schema.services["google.extended_operations.v1.stuff.RegularService"] + regular_service = api_schema.services[ + "google.extended_operations.v1.stuff.RegularService" + ] initial_method = regular_service.methods["CreateTask"] - operation_service = api_schema.services['google.extended_operations.v1.stuff.CustomOperations'] + operation_service = api_schema.services[ + "google.extended_operations.v1.stuff.CustomOperations" + ] expected = operation_service actual = api_schema.get_custom_operation_service(initial_method) @@ -2136,7 +2351,9 @@ def test_extended_operations_lro_operation_service(): def test_extended_operations_lro_operation_service_no_annotation(): file_protos = generate_basic_extended_operations_setup() api_schema = api.API.build(file_protos) - initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + initial_method = api_schema.services[ + "google.extended_operations.v1.stuff.RegularService" + ].methods["CreateTask"] # It's easier to manipulate data structures after building the API. del initial_method.options.Extensions[ex_ops_pb2.operation_service] @@ -2149,7 +2366,9 @@ def test_extended_operations_lro_operation_service_no_such_service(): file_protos = generate_basic_extended_operations_setup() api_schema = api.API.build(file_protos) - initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + initial_method = api_schema.services[ + "google.extended_operations.v1.stuff.RegularService" + ].methods["CreateTask"] initial_method.options.Extensions[ex_ops_pb2.operation_service] = "UnrealService" with pytest.raises(ValueError): @@ -2160,10 +2379,13 @@ def test_extended_operations_lro_operation_service_not_an_lro(): file_protos = generate_basic_extended_operations_setup() api_schema = api.API.build(file_protos) - initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + initial_method = api_schema.services[ + "google.extended_operations.v1.stuff.RegularService" + ].methods["CreateTask"] # Hack to pretend that the initial_method is not an LRO super(type(initial_method), initial_method).__setattr__( - "output", initial_method.input) + "output", initial_method.input + ) with pytest.raises(ValueError): api_schema.get_custom_operation_service(initial_method) @@ -2173,10 +2395,16 @@ def test_extended_operations_lro_operation_service_no_polling_method(): file_protos = generate_basic_extended_operations_setup() api_schema = api.API.build(file_protos) - initial_method = api_schema.services["google.extended_operations.v1.stuff.RegularService"].methods["CreateTask"] + initial_method = api_schema.services[ + "google.extended_operations.v1.stuff.RegularService" + ].methods["CreateTask"] - operation_service = api_schema.services["google.extended_operations.v1.stuff.CustomOperations"] - del operation_service.methods["Get"].options.Extensions[ex_ops_pb2.operation_polling_method] + operation_service = api_schema.services[ + "google.extended_operations.v1.stuff.CustomOperations" + ] + del operation_service.methods["Get"].options.Extensions[ + ex_ops_pb2.operation_polling_method + ] with pytest.raises(ValueError): api_schema.get_custom_operation_service(initial_method) @@ -2195,240 +2423,246 @@ def methods_from_service(service_pb, name: str): def test_mixin_api_methods_locations(): fd = ( make_file_pb2( - name='example.proto', - package='google.example.v1', - messages=(make_message_pb2(name='ExampleRequest', fields=()),), - ),) - opts = Options(service_yaml_config={ - 'apis': [ - { - 'name': 'google.cloud.location.Locations' - } - ], - 'http': { - 'rules': [ - { - 'selector': 'google.cloud.location.Locations.ListLocations', - 'get': '/v1/{name=examples/*}/*', - 'body': '*' - }, - { - 'selector': 'google.cloud.location.Locations.GetLocation', - 'get': '/v1/{name=examples/*}/*', - 'body': '*' - }, - { - 'selector': 'google.example.v1.Example', - }] + name="example.proto", + package="google.example.v1", + messages=(make_message_pb2(name="ExampleRequest", fields=()),), + ), + ) + opts = Options( + service_yaml_config={ + "apis": [{"name": "google.cloud.location.Locations"}], + "http": { + "rules": [ + { + "selector": "google.cloud.location.Locations.ListLocations", + "get": "/v1/{name=examples/*}/*", + "body": "*", + }, + { + "selector": "google.cloud.location.Locations.GetLocation", + "get": "/v1/{name=examples/*}/*", + "body": "*", + }, + { + "selector": "google.example.v1.Example", + }, + ] + }, } - }) - ms = methods_from_service(locations_pb2, 'Locations') + ) + ms = methods_from_service(locations_pb2, "Locations") assert len(ms) == 2 - m1 = ms['ListLocations'] + m1 = ms["ListLocations"] m1.options.ClearExtension(annotations_pb2.http) - m1.options.Extensions[annotations_pb2.http].selector = 'google.cloud.location.Locations.ListLocations' - m1.options.Extensions[annotations_pb2.http].get = '/v1/{name=examples/*}/*' - m1.options.Extensions[annotations_pb2.http].body = '*' - m2 = ms['GetLocation'] + m1.options.Extensions[annotations_pb2.http].selector = ( + "google.cloud.location.Locations.ListLocations" + ) + m1.options.Extensions[annotations_pb2.http].get = "/v1/{name=examples/*}/*" + m1.options.Extensions[annotations_pb2.http].body = "*" + m2 = ms["GetLocation"] m2.options.ClearExtension(annotations_pb2.http) - m2.options.Extensions[annotations_pb2.http].selector = 'google.cloud.location.Locations.GetLocation' - m2.options.Extensions[annotations_pb2.http].get = '/v1/{name=examples/*}/*' - m2.options.Extensions[annotations_pb2.http].body = '*' - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) - assert api_schema.mixin_api_methods == { - 'ListLocations': m1, 'GetLocation': m2} + m2.options.Extensions[annotations_pb2.http].selector = ( + "google.cloud.location.Locations.GetLocation" + ) + m2.options.Extensions[annotations_pb2.http].get = "/v1/{name=examples/*}/*" + m2.options.Extensions[annotations_pb2.http].body = "*" + api_schema = api.API.build(fd, "google.example.v1", opts=opts) + assert api_schema.mixin_api_methods == {"ListLocations": m1, "GetLocation": m2} def test_mixin_api_methods_iam(): fd = ( make_file_pb2( - name='example.proto', - package='google.example.v1', - messages=(make_message_pb2(name='ExampleRequest', fields=()), - make_message_pb2(name='ExampleResponse', fields=())), - services=(descriptor_pb2.ServiceDescriptorProto( - name='FooService', - method=( - descriptor_pb2.MethodDescriptorProto( - name='FooMethod', - # Input and output types don't matter. - input_type='google.example.v1.ExampleRequest', - output_type='google.example.v1.ExampleResponse', + name="example.proto", + package="google.example.v1", + messages=( + make_message_pb2(name="ExampleRequest", fields=()), + make_message_pb2(name="ExampleResponse", fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="FooService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="FooMethod", + # Input and output types don't matter. + input_type="google.example.v1.ExampleRequest", + output_type="google.example.v1.ExampleResponse", + ), ), ), - ),), - ),) + ), + ), + ) r1 = { - 'selector': 'google.iam.v1.IAMPolicy.SetIamPolicy', - 'post': '/v1/{resource=examples/*}/*', - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.SetIamPolicy", + "post": "/v1/{resource=examples/*}/*", + "body": "*", } r2 = { - 'selector': 'google.iam.v1.IAMPolicy.GetIamPolicy', - 'get': '/v1/{resource=examples/*}/*', - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.GetIamPolicy", + "get": "/v1/{resource=examples/*}/*", + "body": "*", } r3 = { - 'selector': 'google.iam.v1.IAMPolicy.TestIamPermissions', - 'post': '/v1/{resource=examples/*}/*', - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.TestIamPermissions", + "post": "/v1/{resource=examples/*}/*", + "body": "*", } - opts = Options(service_yaml_config={ - 'apis': [ - { - 'name': 'google.iam.v1.IAMPolicy' - } - ], - 'http': { - 'rules': [r1, r2, r3] + opts = Options( + service_yaml_config={ + "apis": [{"name": "google.iam.v1.IAMPolicy"}], + "http": {"rules": [r1, r2, r3]}, } - }) - ms = methods_from_service(iam_policy_pb2, 'IAMPolicy') + ) + ms = methods_from_service(iam_policy_pb2, "IAMPolicy") assert len(ms) == 3 - m1 = ms['SetIamPolicy'] + m1 = ms["SetIamPolicy"] m1.options.ClearExtension(annotations_pb2.http) - m1.options.Extensions[annotations_pb2.http].selector = r1['selector'] - m1.options.Extensions[annotations_pb2.http].post = r1['post'] - m1.options.Extensions[annotations_pb2.http].body = r1['body'] - m2 = ms['GetIamPolicy'] + m1.options.Extensions[annotations_pb2.http].selector = r1["selector"] + m1.options.Extensions[annotations_pb2.http].post = r1["post"] + m1.options.Extensions[annotations_pb2.http].body = r1["body"] + m2 = ms["GetIamPolicy"] m2.options.ClearExtension(annotations_pb2.http) - m2.options.Extensions[annotations_pb2.http].selector = r2['selector'] - m2.options.Extensions[annotations_pb2.http].get = r2['get'] - m2.options.Extensions[annotations_pb2.http].body = r2['body'] - m3 = ms['TestIamPermissions'] + m2.options.Extensions[annotations_pb2.http].selector = r2["selector"] + m2.options.Extensions[annotations_pb2.http].get = r2["get"] + m2.options.Extensions[annotations_pb2.http].body = r2["body"] + m3 = ms["TestIamPermissions"] m3.options.ClearExtension(annotations_pb2.http) - m3.options.Extensions[annotations_pb2.http].selector = r3['selector'] - m3.options.Extensions[annotations_pb2.http].post = r3['post'] - m3.options.Extensions[annotations_pb2.http].body = r3['body'] - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + m3.options.Extensions[annotations_pb2.http].selector = r3["selector"] + m3.options.Extensions[annotations_pb2.http].post = r3["post"] + m3.options.Extensions[annotations_pb2.http].body = r3["body"] + api_schema = api.API.build(fd, "google.example.v1", opts=opts) assert api_schema.mixin_api_methods == { - 'SetIamPolicy': m1, 'GetIamPolicy': m2, 'TestIamPermissions': m3} + "SetIamPolicy": m1, + "GetIamPolicy": m2, + "TestIamPermissions": m3, + } assert not api_schema.has_operations_mixin def test_mixin_api_methods_iam_overrides(): fd = ( make_file_pb2( - name='example.proto', - package='google.example.v1', - messages=(make_message_pb2(name='ExampleRequest', fields=()), - make_message_pb2(name='ExampleResponse', fields=()), - ), - services=(descriptor_pb2.ServiceDescriptorProto( - name='FooService', - method=( - descriptor_pb2.MethodDescriptorProto( - name='TestIamPermissions', - # Input and output types don't matter. - input_type='google.example.v1.ExampleRequest', - output_type='google.example.v1.ExampleResponse', + name="example.proto", + package="google.example.v1", + messages=( + make_message_pb2(name="ExampleRequest", fields=()), + make_message_pb2(name="ExampleResponse", fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="FooService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="TestIamPermissions", + # Input and output types don't matter. + input_type="google.example.v1.ExampleRequest", + output_type="google.example.v1.ExampleResponse", + ), ), ), - ),), + ), ), ) r1 = { - 'selector': 'google.iam.v1.IAMPolicy.SetIamPolicy', - 'post': '/v1/{resource=examples/*}/*', - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.SetIamPolicy", + "post": "/v1/{resource=examples/*}/*", + "body": "*", } r2 = { - 'selector': 'google.iam.v1.IAMPolicy.GetIamPolicy', - 'get': '/v1/{resource=examples/*}/*', - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.GetIamPolicy", + "get": "/v1/{resource=examples/*}/*", + "body": "*", } r3 = { - 'selector': 'google.iam.v1.IAMPolicy.TestIamPermissions', - 'post': '/v1/{resource=examples/*}/*', - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.TestIamPermissions", + "post": "/v1/{resource=examples/*}/*", + "body": "*", } - opts = Options(service_yaml_config={ - 'apis': [ - { - 'name': 'google.iam.v1.IAMPolicy' - } - ], - 'http': { - 'rules': [r1, r2, r3] + opts = Options( + service_yaml_config={ + "apis": [{"name": "google.iam.v1.IAMPolicy"}], + "http": {"rules": [r1, r2, r3]}, } - }) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + ) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) assert api_schema.mixin_api_methods == {} -def create_service_config_with_all_mixins(http_opt_uri='/v1/{name=examples/*}/*'): +def create_service_config_with_all_mixins(http_opt_uri="/v1/{name=examples/*}/*"): service_yaml_config = { - 'apis': [ + "apis": [ { - 'name': 'google.cloud.location.Locations', + "name": "google.cloud.location.Locations", }, { - 'name': 'google.longrunning.Operations', + "name": "google.longrunning.Operations", }, { - 'name': 'google.iam.v1.IAMPolicy', + "name": "google.iam.v1.IAMPolicy", }, ], - 'http': { - 'rules': [ + "http": { + "rules": [ # Locations { - 'selector': 'google.cloud.location.Locations.ListLocations', - 'get': http_opt_uri, - 'body': '*' + "selector": "google.cloud.location.Locations.ListLocations", + "get": http_opt_uri, + "body": "*", }, { - 'selector': 'google.cloud.location.Locations.GetLocation', - 'get': http_opt_uri, - 'body': '*' + "selector": "google.cloud.location.Locations.GetLocation", + "get": http_opt_uri, + "body": "*", }, # LRO { - 'selector': 'google.longrunning.Operations.CancelOperation', - 'post': http_opt_uri, - 'body': '*', + "selector": "google.longrunning.Operations.CancelOperation", + "post": http_opt_uri, + "body": "*", }, { - 'selector': 'google.longrunning.Operations.DeleteOperation', - 'get': http_opt_uri, - 'body': '*' + "selector": "google.longrunning.Operations.DeleteOperation", + "get": http_opt_uri, + "body": "*", }, { - 'selector': 'google.longrunning.Operations.WaitOperation', - 'post': http_opt_uri, - 'body': '*' + "selector": "google.longrunning.Operations.WaitOperation", + "post": http_opt_uri, + "body": "*", }, { - 'selector': 'google.longrunning.Operations.GetOperation', - 'post': http_opt_uri, - 'body': '*' + "selector": "google.longrunning.Operations.GetOperation", + "post": http_opt_uri, + "body": "*", }, { - 'selector': 'google.longrunning.Operations.ListOperations', - 'post': http_opt_uri, - 'body': '*' + "selector": "google.longrunning.Operations.ListOperations", + "post": http_opt_uri, + "body": "*", }, # IAM { - 'selector': 'google.iam.v1.IAMPolicy.SetIamPolicy', - 'post': http_opt_uri, - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.SetIamPolicy", + "post": http_opt_uri, + "body": "*", }, { - 'selector': 'google.iam.v1.IAMPolicy.GetIamPolicy', - 'get': http_opt_uri, - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.GetIamPolicy", + "get": http_opt_uri, + "body": "*", }, { - 'selector': 'google.iam.v1.IAMPolicy.TestIamPermissions', - 'post': http_opt_uri, - 'body': '*' + "selector": "google.iam.v1.IAMPolicy.TestIamPermissions", + "post": http_opt_uri, + "body": "*", }, { - 'selector': 'google.example.v1.Example', - } + "selector": "google.example.v1.Example", + }, ] - } + }, } return service_yaml_config @@ -2436,12 +2670,13 @@ def create_service_config_with_all_mixins(http_opt_uri='/v1/{name=examples/*}/*' def test_mixin_api_signatures(): fd = ( make_file_pb2( - name='example.proto', - package='google.example.v1', - messages=(make_message_pb2(name='ExampleRequest', fields=()),), - ),) + name="example.proto", + package="google.example.v1", + messages=(make_message_pb2(name="ExampleRequest", fields=()),), + ), + ) opts = Options(service_yaml_config=create_service_config_with_all_mixins()) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) res = api_schema.mixin_api_signatures assert res == mixins.MIXINS_MAP @@ -2449,143 +2684,158 @@ def test_mixin_api_signatures(): def test_mixin_http_options(): fd = ( make_file_pb2( - name='example.proto', - package='google.example.v1', - messages=(make_message_pb2(name='ExampleRequest', fields=()),), - ),) - opts = Options(service_yaml_config={ - 'apis': [ - { - 'name': 'google.cloud.location.Locations', - }, - { - 'name': 'google.longrunning.Operations', - }, - { - 'name': 'google.iam.v1.IAMPolicy', - }, - ], - 'http': { - 'rules': [ - # LRO - { - 'selector': 'google.longrunning.Operations.CancelOperation', - 'post': '/v1/{name=examples/*}/*', - 'body': '*', - }, - { - 'selector': 'google.longrunning.Operations.DeleteOperation', - 'get': '/v1/{name=examples/*}/*', - 'body': '*' - }, + name="example.proto", + package="google.example.v1", + messages=(make_message_pb2(name="ExampleRequest", fields=()),), + ), + ) + opts = Options( + service_yaml_config={ + "apis": [ { - 'selector': 'google.longrunning.Operations.WaitOperation', - 'post': '/v1/{name=examples/*}/*', - 'body': '*' + "name": "google.cloud.location.Locations", }, { - 'selector': 'google.longrunning.Operations.GetOperation', - 'post': '/v1/{name=examples/*}/*', - 'body': '*' + "name": "google.longrunning.Operations", }, { - 'selector': 'google.longrunning.Operations.ListOperations', - 'post': '/v1/{name=examples/*}/*', - 'body': '*' + "name": "google.iam.v1.IAMPolicy", }, - ] + ], + "http": { + "rules": [ + # LRO + { + "selector": "google.longrunning.Operations.CancelOperation", + "post": "/v1/{name=examples/*}/*", + "body": "*", + }, + { + "selector": "google.longrunning.Operations.DeleteOperation", + "get": "/v1/{name=examples/*}/*", + "body": "*", + }, + { + "selector": "google.longrunning.Operations.WaitOperation", + "post": "/v1/{name=examples/*}/*", + "body": "*", + }, + { + "selector": "google.longrunning.Operations.GetOperation", + "post": "/v1/{name=examples/*}/*", + "body": "*", + }, + { + "selector": "google.longrunning.Operations.ListOperations", + "post": "/v1/{name=examples/*}/*", + "body": "*", + }, + ] + }, } - }) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + ) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) res = api_schema.mixin_http_options assert res == { - 'ListOperations': [wrappers.MixinHttpRule('post', '/v1/{name=examples/*}/*', '*')], - 'GetOperation': [wrappers.MixinHttpRule('post', '/v1/{name=examples/*}/*', '*')], - 'DeleteOperation': [wrappers.MixinHttpRule('get', '/v1/{name=examples/*}/*', '*')], - 'CancelOperation': [wrappers.MixinHttpRule('post', '/v1/{name=examples/*}/*', '*')], - 'WaitOperation': [wrappers.MixinHttpRule('post', '/v1/{name=examples/*}/*', '*')], + "ListOperations": [ + wrappers.MixinHttpRule("post", "/v1/{name=examples/*}/*", "*") + ], + "GetOperation": [ + wrappers.MixinHttpRule("post", "/v1/{name=examples/*}/*", "*") + ], + "DeleteOperation": [ + wrappers.MixinHttpRule("get", "/v1/{name=examples/*}/*", "*") + ], + "CancelOperation": [ + wrappers.MixinHttpRule("post", "/v1/{name=examples/*}/*", "*") + ], + "WaitOperation": [ + wrappers.MixinHttpRule("post", "/v1/{name=examples/*}/*", "*") + ], } def test_mixin_api_methods_lro(): fd = ( make_file_pb2( - name='example.proto', - package='google.example.v1', - messages=(make_message_pb2(name='ExampleRequest', fields=()), - make_message_pb2(name='ExampleResponse', fields=()), - ), - services=(descriptor_pb2.ServiceDescriptorProto( - name='FooService', - method=( - descriptor_pb2.MethodDescriptorProto( - name='FooMethod', - # Input and output types don't matter. - input_type='google.example.v1.ExampleRequest', - output_type='google.example.v1.ExampleResponse', + name="example.proto", + package="google.example.v1", + messages=( + make_message_pb2(name="ExampleRequest", fields=()), + make_message_pb2(name="ExampleResponse", fields=()), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="FooService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="FooMethod", + # Input and output types don't matter. + input_type="google.example.v1.ExampleRequest", + output_type="google.example.v1.ExampleResponse", + ), ), ), - ),), + ), ), ) r1 = { - 'selector': 'google.longrunning.Operations.CancelOperation', - 'post': '/v1/{name=examples/*}/*', - 'body': '*' + "selector": "google.longrunning.Operations.CancelOperation", + "post": "/v1/{name=examples/*}/*", + "body": "*", } r2 = { - 'selector': 'google.longrunning.Operations.DeleteOperation', - 'get': '/v1/{name=examples/*}/*', - 'body': '*' + "selector": "google.longrunning.Operations.DeleteOperation", + "get": "/v1/{name=examples/*}/*", + "body": "*", } r3 = { - 'selector': 'google.longrunning.Operations.WaitOperation', - 'post': '/v1/{name=examples/*}/*', - 'body': '*' + "selector": "google.longrunning.Operations.WaitOperation", + "post": "/v1/{name=examples/*}/*", + "body": "*", } r4 = { - 'selector': 'google.longrunning.Operations.GetOperation', - 'post': '/v1/{name=examples/*}/*', - 'body': '*' + "selector": "google.longrunning.Operations.GetOperation", + "post": "/v1/{name=examples/*}/*", + "body": "*", } - opts = Options(service_yaml_config={ - 'apis': [ - { - 'name': 'google.longrunning.Operations' - } - ], - 'http': { - 'rules': [r1, r2, r3, r4] + opts = Options( + service_yaml_config={ + "apis": [{"name": "google.longrunning.Operations"}], + "http": {"rules": [r1, r2, r3, r4]}, } - }) + ) - ms = methods_from_service(operations_pb2, 'Operations') + ms = methods_from_service(operations_pb2, "Operations") assert len(ms) == 5 - m1 = ms['CancelOperation'] + m1 = ms["CancelOperation"] m1.options.ClearExtension(annotations_pb2.http) - m1.options.Extensions[annotations_pb2.http].selector = r1['selector'] - m1.options.Extensions[annotations_pb2.http].post = r1['post'] - m1.options.Extensions[annotations_pb2.http].body = r1['body'] - m2 = ms['DeleteOperation'] + m1.options.Extensions[annotations_pb2.http].selector = r1["selector"] + m1.options.Extensions[annotations_pb2.http].post = r1["post"] + m1.options.Extensions[annotations_pb2.http].body = r1["body"] + m2 = ms["DeleteOperation"] m2.options.ClearExtension(annotations_pb2.http) - m2.options.Extensions[annotations_pb2.http].selector = r2['selector'] - m2.options.Extensions[annotations_pb2.http].get = r2['get'] - m2.options.Extensions[annotations_pb2.http].body = r2['body'] - m3 = ms['WaitOperation'] + m2.options.Extensions[annotations_pb2.http].selector = r2["selector"] + m2.options.Extensions[annotations_pb2.http].get = r2["get"] + m2.options.Extensions[annotations_pb2.http].body = r2["body"] + m3 = ms["WaitOperation"] m3.options.ClearExtension(annotations_pb2.http) - m3.options.Extensions[annotations_pb2.http].selector = r3['selector'] - m3.options.Extensions[annotations_pb2.http].post = r3['post'] - m3.options.Extensions[annotations_pb2.http].body = r3['body'] - m4 = ms['GetOperation'] + m3.options.Extensions[annotations_pb2.http].selector = r3["selector"] + m3.options.Extensions[annotations_pb2.http].post = r3["post"] + m3.options.Extensions[annotations_pb2.http].body = r3["body"] + m4 = ms["GetOperation"] m4.options.ClearExtension(annotations_pb2.http) - m4.options.Extensions[annotations_pb2.http].selector = r4['selector'] - m4.options.Extensions[annotations_pb2.http].post = r4['post'] - m4.options.Extensions[annotations_pb2.http].body = r4['body'] + m4.options.Extensions[annotations_pb2.http].selector = r4["selector"] + m4.options.Extensions[annotations_pb2.http].post = r4["post"] + m4.options.Extensions[annotations_pb2.http].body = r4["body"] - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) assert api_schema.mixin_api_methods == { - 'CancelOperation': m1, 'DeleteOperation': m2, 'WaitOperation': m3, - 'GetOperation': m4} + "CancelOperation": m1, + "DeleteOperation": m2, + "WaitOperation": m3, + "GetOperation": m4, + } def test_has_iam_mixin(): @@ -2593,18 +2843,17 @@ def test_has_iam_mixin(): # service YAML contains `google.iam.v1.IAMPolicy`. fd = ( make_file_pb2( - name='example.proto', - package='google.example.v1', - messages=(make_message_pb2(name='ExampleRequest', fields=()),), - ),) - opts = Options(service_yaml_config={ - 'apis': [ - { - 'name': 'google.iam.v1.IAMPolicy' - } - ], - }) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + name="example.proto", + package="google.example.v1", + messages=(make_message_pb2(name="ExampleRequest", fields=()),), + ), + ) + opts = Options( + service_yaml_config={ + "apis": [{"name": "google.iam.v1.IAMPolicy"}], + } + ) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) assert api_schema.has_iam_mixin @@ -2626,9 +2875,9 @@ def get_file_descriptor_proto_for_tests( """ field_options = descriptor_pb2.FieldOptions() - field_options.Extensions[ - field_info_pb2.field_info - ].format = field_info_pb2.FieldInfo.Format.Value("UUID4") + field_options.Extensions[field_info_pb2.field_info].format = ( + field_info_pb2.FieldInfo.Format.Value("UUID4") + ) fd = ( make_file_pb2( @@ -2638,17 +2887,17 @@ def get_file_descriptor_proto_for_tests( make_message_pb2(name="ExampleRequest", fields=fields), make_message_pb2(name="ExampleResponse", fields=()), make_message_pb2( - name='NestedMessage', + name="NestedMessage", fields=( make_field_pb2( name="squid", options=field_options, type="TYPE_STRING", - number=1 + number=1, ), ), options=descriptor_pb2.MessageOptions(map_entry=True), - ) + ), ), services=( descriptor_pb2.ServiceDescriptorProto( @@ -2735,14 +2984,26 @@ def test_read_empty_python_settings_from_service_yaml(): cli_options = Options(service_yaml_config=service_yaml_config) fd = get_file_descriptor_proto_for_tests(fields=[]) api_schema = api.API.build(fd, "google.example.v1beta1", opts=cli_options) - assert api_schema.all_library_settings["google.example.v1beta1"].python_settings \ + assert ( + api_schema.all_library_settings["google.example.v1beta1"].python_settings == client_pb2.PythonSettings() - assert api_schema.all_library_settings["google.example.v1beta1"].python_settings.experimental_features \ + ) + assert ( + api_schema.all_library_settings[ + "google.example.v1beta1" + ].python_settings.experimental_features == client_pb2.PythonSettings.ExperimentalFeatures() - assert api_schema.all_library_settings["google.example.v1beta1"].python_settings.experimental_features.rest_async_io_enabled \ + ) + assert ( + api_schema.all_library_settings[ + "google.example.v1beta1" + ].python_settings.experimental_features.rest_async_io_enabled == False - assert api_schema.all_library_settings[api_schema.naming.proto_package].python_settings \ + ) + assert ( + api_schema.all_library_settings[api_schema.naming.proto_package].python_settings == client_pb2.PythonSettings() + ) def test_python_settings_selective_gapic_nonexistent_method_raises_error(): @@ -2756,17 +3017,17 @@ def test_python_settings_selective_gapic_nonexistent_method_raises_error(): python_settings=client_pb2.PythonSettings( common=client_pb2.CommonLanguageSettings( selective_gapic_generation=client_pb2.SelectiveGapicGeneration( - methods=[ - "google.example.v1beta1.ServiceOne.DoesNotExist"] + methods=["google.example.v1beta1.ServiceOne.DoesNotExist"] ) ) - ) + ), ) ] fd = get_file_descriptor_proto_for_tests(fields=[]) api_schema = api.API.build(fd, "google.example.v1beta1") with pytest.raises( - api.ClientLibrarySettingsError, match="(?i)google.example.v1beta1.ServiceOne.DoesNotExist: Method does not exist" + api.ClientLibrarySettingsError, + match="(?i)google.example.v1beta1.ServiceOne.DoesNotExist: Method does not exist", ): api_schema.enforce_valid_library_settings(client_library_settings) @@ -2785,26 +3046,25 @@ def test_python_settings_selective_gapic_version_mismatch_method_raises_error(): methods=["google.example.v1beta1.ServiceOne.Example1"] ) ) - ) + ), ) ] fd = get_file_descriptor_proto_for_tests(fields=[]) api_schema = api.API.build(fd, "google.example.v1beta1") with pytest.raises( - api.ClientLibrarySettingsError, match="(?i)google.example.v1beta1.ServiceOne.Example1: Mismatched version for method." + api.ClientLibrarySettingsError, + match="(?i)google.example.v1beta1.ServiceOne.Example1: Mismatched version for method.", ): api_schema.enforce_valid_library_settings(client_library_settings) def get_service_yaml_for_selective_gapic_tests( - apis: Sequence[str] = ["google.example.v1.FooService"], - methods=["google.example.v1.FooService.GetFoo"], - generate_omitted_as_internal=False, + apis: Sequence[str] = ["google.example.v1.FooService"], + methods=["google.example.v1.FooService.GetFoo"], + generate_omitted_as_internal=False, ) -> Dict[str, Any]: return { - "apis": [ - {"name": api} for api in apis - ], + "apis": [{"name": api} for api in apis], "publishing": { "library_settings": [ { @@ -2816,7 +3076,7 @@ def get_service_yaml_for_selective_gapic_tests( "methods": methods, "generate_omitted_as_internal": generate_omitted_as_internal, } - } + }, }, } ] @@ -2828,59 +3088,82 @@ def test_selective_gapic_api_build(): # Put together a couple of minimal protos. fd = ( make_file_pb2( - name='dep.proto', - package='google.dep', - messages=(make_message_pb2(name='ImportedMessage', fields=()),), + name="dep.proto", + package="google.dep", + messages=(make_message_pb2(name="ImportedMessage", fields=()),), ), make_file_pb2( - name='common.proto', - package='google.example.v1.common', + name="common.proto", + package="google.example.v1.common", messages=( - make_message_pb2(name='Bar'), - make_message_pb2(name='Baz'), + make_message_pb2(name="Bar"), + make_message_pb2(name="Baz"), ), ), make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=()), - make_message_pb2(name='GetFooRequest', fields=( - make_field_pb2(name='imported_message', number=1, - type_name='.google.dep.ImportedMessage'), - make_field_pb2(name='primitive', number=2, type=1), - make_field_pb2(name='bar', number=1, - type_name='.google.example.v1.common.Bar') - )), - make_message_pb2(name='GetFooResponse', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - )), - make_message_pb2(name='DeleteFooRequest', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - make_field_pb2(name='baz', number=2, - type_name='.google.example.v1.common.Baz'), - )), - make_message_pb2(name='DeleteFooResponse', fields=( - make_field_pb2(name='success', number=1, type=8), - )), - ), - services=(descriptor_pb2.ServiceDescriptorProto( - name='FooService', - method=( - descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v1.GetFooRequest', - output_type='google.example.v1.GetFooResponse', + make_message_pb2(name="Foo", fields=()), + make_message_pb2( + name="GetFooRequest", + fields=( + make_field_pb2( + name="imported_message", + number=1, + type_name=".google.dep.ImportedMessage", + ), + make_field_pb2(name="primitive", number=2, type=1), + make_field_pb2( + name="bar", + number=1, + type_name=".google.example.v1.common.Bar", + ), ), - descriptor_pb2.MethodDescriptorProto( - name='DeleteFoo', - input_type='google.example.v1.DeleteFooRequest', - output_type='google.example.v1.DeleteFooResponse', + ), + make_message_pb2( + name="GetFooResponse", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" + ), + ), + ), + make_message_pb2( + name="DeleteFooRequest", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" + ), + make_field_pb2( + name="baz", + number=2, + type_name=".google.example.v1.common.Baz", + ), + ), + ), + make_message_pb2( + name="DeleteFooResponse", + fields=(make_field_pb2(name="success", number=1, type=8),), + ), + ), + services=( + descriptor_pb2.ServiceDescriptorProto( + name="FooService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="GetFoo", + input_type="google.example.v1.GetFooRequest", + output_type="google.example.v1.GetFooResponse", + ), + descriptor_pb2.MethodDescriptorProto( + name="DeleteFoo", + input_type="google.example.v1.DeleteFooRequest", + output_type="google.example.v1.DeleteFooResponse", + ), ), ), - ),), + ), ), ) @@ -2890,7 +3173,7 @@ def test_selective_gapic_api_build(): opts = Options(service_yaml_config=service_yaml_config) # Create an API with those protos. - api_schema = api.API.build(fd, package='google.example.v1', opts=opts) + api_schema = api.API.build(fd, package="google.example.v1", opts=opts) # Establish that the API has the data expected. assert isinstance(api_schema, api.API) @@ -2899,168 +3182,194 @@ def test_selective_gapic_api_build(): assert len(api_schema.all_protos) == 3 assert len(api_schema.protos) == 2 # foo.proto, common.proto - assert 'google.dep.ImportedMessage' not in api_schema.messages - assert 'google.example.v1.Foo' in api_schema.messages - assert 'google.example.v1.GetFooRequest' in api_schema.messages - assert 'google.example.v1.GetFooResponse' in api_schema.messages - assert 'google.example.v1.DeleteFooRequest' not in api_schema.messages - assert 'google.example.v1.DeleteFooResponse' not in api_schema.messages - assert 'google.example.v1.FooService' in api_schema.services + assert "google.dep.ImportedMessage" not in api_schema.messages + assert "google.example.v1.Foo" in api_schema.messages + assert "google.example.v1.GetFooRequest" in api_schema.messages + assert "google.example.v1.GetFooResponse" in api_schema.messages + assert "google.example.v1.DeleteFooRequest" not in api_schema.messages + assert "google.example.v1.DeleteFooResponse" not in api_schema.messages + assert "google.example.v1.FooService" in api_schema.services assert len(api_schema.enums) == 0 - assert api_schema.protos['foo.proto'].python_modules == ( - imp.Import(package=('google', 'dep'), module='dep_pb2'), - imp.Import(package=('google', 'example_v1', - 'common', 'types'), module='common'), + assert api_schema.protos["foo.proto"].python_modules == ( + imp.Import(package=("google", "dep"), module="dep_pb2"), + imp.Import( + package=("google", "example_v1", "common", "types"), module="common" + ), ) - assert api_schema.requires_package(('google', 'example', 'v1')) + assert api_schema.requires_package(("google", "example", "v1")) - assert not api_schema.requires_package(('elgoog', 'example', 'v1')) + assert not api_schema.requires_package(("elgoog", "example", "v1")) # Establish that the subpackages still work even when they are transitively # partially pruned. - assert 'common' in api_schema.subpackages - sub = api_schema.subpackages['common'] + assert "common" in api_schema.subpackages + sub = api_schema.subpackages["common"] assert len(sub.protos) == 1 - assert 'google.example.v1.common.Bar' in sub.messages - assert 'google.example.v1.common.Baz' not in sub.messages + assert "google.example.v1.common.Bar" in sub.messages + assert "google.example.v1.common.Baz" not in sub.messages # Establish that methods have been truncated - assert 'google.example.v1.FooService.GetFoo' in api_schema.all_methods - assert 'google.example.v1.FooService.DeleteFoo' not in api_schema.all_methods + assert "google.example.v1.FooService.GetFoo" in api_schema.all_methods + assert "google.example.v1.FooService.DeleteFoo" not in api_schema.all_methods - foo_service = api_schema.protos['foo.proto'].services['google.example.v1.FooService'] - assert 'DeleteFoo' not in foo_service.methods - assert 'GetFoo' in foo_service.methods + foo_service = api_schema.protos["foo.proto"].services[ + "google.example.v1.FooService" + ] + assert "DeleteFoo" not in foo_service.methods + assert "GetFoo" in foo_service.methods def test_selective_gapic_api_build_with_lro(): # Set up a prior proto that mimics google/protobuf/empty.proto - lro_proto = api.Proto.build(make_file_pb2( - name='operations.proto', package='google.longrunning', - messages=(make_message_pb2(name='Operation'),), - ), file_to_generate=False, naming=make_naming()) + lro_proto = api.Proto.build( + make_file_pb2( + name="operations.proto", + package="google.longrunning", + messages=(make_message_pb2(name="Operation"),), + ), + file_to_generate=False, + naming=make_naming(), + ) # Set up methods with LRO annotations. create_foo_method_pb2 = descriptor_pb2.MethodDescriptorProto( - name='AsyncCreateFoo', - input_type='google.example.v1.AsyncCreateFooRequest', - output_type='google.longrunning.Operation', + name="AsyncCreateFoo", + input_type="google.example.v1.AsyncCreateFooRequest", + output_type="google.longrunning.Operation", ) create_foo_method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( - response_type='google.example.v1.AsyncCreateFooResponse', - metadata_type='google.example.v1.AsyncCreateFooMetadata', + response_type="google.example.v1.AsyncCreateFooResponse", + metadata_type="google.example.v1.AsyncCreateFooMetadata", ), ) create_bar_method_pb2 = descriptor_pb2.MethodDescriptorProto( - name='AsyncCreateBar', - input_type='google.example.v1.AsyncCreateBarRequest', - output_type='google.longrunning.Operation', + name="AsyncCreateBar", + input_type="google.example.v1.AsyncCreateBarRequest", + output_type="google.longrunning.Operation", ) create_bar_method_pb2.options.Extensions[operations_pb2.operation_info].MergeFrom( operations_pb2.OperationInfo( - response_type='google.example.v1.AsyncCreateBarResponse', - metadata_type='google.example.v1.AsyncCreateBarMetadata', + response_type="google.example.v1.AsyncCreateBarResponse", + metadata_type="google.example.v1.AsyncCreateBarMetadata", ), ) # Set up the service with an RPC. fd = ( make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=()), - make_message_pb2(name='Bar', fields=()), - make_message_pb2(name='AsyncCreateFooRequest', fields=()), - make_message_pb2(name='AsyncCreateFooResponse', fields=()), - make_message_pb2(name='AsyncCreateFooMetadata', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - )), - make_message_pb2(name='AsyncCreateBarRequest', fields=()), - make_message_pb2(name='AsyncCreateBarResponse', fields=()), - make_message_pb2(name='AsyncCreateBarMetadata', fields=( - make_field_pb2(name='bar', number=1, - type_name='.google.example.v1.Bar'), - )), + make_message_pb2(name="Foo", fields=()), + make_message_pb2(name="Bar", fields=()), + make_message_pb2(name="AsyncCreateFooRequest", fields=()), + make_message_pb2(name="AsyncCreateFooResponse", fields=()), + make_message_pb2( + name="AsyncCreateFooMetadata", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" + ), + ), + ), + make_message_pb2(name="AsyncCreateBarRequest", fields=()), + make_message_pb2(name="AsyncCreateBarResponse", fields=()), + make_message_pb2( + name="AsyncCreateBarMetadata", + fields=( + make_field_pb2( + name="bar", number=1, type_name=".google.example.v1.Bar" + ), + ), + ), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='FooService', + name="FooService", method=( create_foo_method_pb2, create_bar_method_pb2, ), ), - ) + ), ), ) service_yaml_config = get_service_yaml_for_selective_gapic_tests( - apis=['google.example.v1.FooService'], - methods=['google.example.v1.FooService.AsyncCreateFoo'] + apis=["google.example.v1.FooService"], + methods=["google.example.v1.FooService.AsyncCreateFoo"], ) opts = Options(service_yaml_config=service_yaml_config) - api_schema = api.API.build(fd, - 'google.example.v1', - opts=opts, - prior_protos={ - 'google/longrunning/operations.proto': lro_proto, - }) + api_schema = api.API.build( + fd, + "google.example.v1", + opts=opts, + prior_protos={ + "google/longrunning/operations.proto": lro_proto, + }, + ) - assert 'google.example.v1.Foo' in api_schema.messages - assert 'google.example.v1.AsyncCreateFooRequest' in api_schema.messages - assert 'google.example.v1.AsyncCreateFooResponse' in api_schema.messages - assert 'google.example.v1.AsyncCreateFooMetadata' in api_schema.messages + assert "google.example.v1.Foo" in api_schema.messages + assert "google.example.v1.AsyncCreateFooRequest" in api_schema.messages + assert "google.example.v1.AsyncCreateFooResponse" in api_schema.messages + assert "google.example.v1.AsyncCreateFooMetadata" in api_schema.messages - assert 'google.example.v1.Bar' not in api_schema.messages - assert 'google.example.v1.AsyncCreateBarRequest' not in api_schema.messages - assert 'google.example.v1.AsyncCreateBarResponse' not in api_schema.messages - assert 'google.example.v1.AsyncCreateBarMetadata' not in api_schema.messages + assert "google.example.v1.Bar" not in api_schema.messages + assert "google.example.v1.AsyncCreateBarRequest" not in api_schema.messages + assert "google.example.v1.AsyncCreateBarResponse" not in api_schema.messages + assert "google.example.v1.AsyncCreateBarMetadata" not in api_schema.messages def test_selective_gapic_api_build_remove_unnecessary_services(): # Put together a couple of minimal protos. fd = ( make_file_pb2( - name='foobar.proto', - package='google.example.v1', + name="foobar.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=()), - make_message_pb2(name='Bar', fields=()), - make_message_pb2(name='GetFooRequest', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - )), - make_message_pb2(name='GetFooResponse', fields=()), - make_message_pb2(name='GetBarRequest', fields=( - make_field_pb2(name='bar', number=1, - type_name='.google.example.v1.Bar'), - )), - make_message_pb2(name='GetBarResponse', fields=()), + make_message_pb2(name="Foo", fields=()), + make_message_pb2(name="Bar", fields=()), + make_message_pb2( + name="GetFooRequest", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" + ), + ), + ), + make_message_pb2(name="GetFooResponse", fields=()), + make_message_pb2( + name="GetBarRequest", + fields=( + make_field_pb2( + name="bar", number=1, type_name=".google.example.v1.Bar" + ), + ), + ), + make_message_pb2(name="GetBarResponse", fields=()), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='FooService', + name="FooService", method=( descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v1.GetFooRequest', - output_type='google.example.v1.GetFooResponse', + name="GetFoo", + input_type="google.example.v1.GetFooRequest", + output_type="google.example.v1.GetFooResponse", ), ), ), descriptor_pb2.ServiceDescriptorProto( - name='BarService', + name="BarService", method=( descriptor_pb2.MethodDescriptorProto( - name='GetBar', - input_type='google.example.v1.GetBarRequest', - output_type='google.example.v1.GetBarResponse', + name="GetBar", + input_type="google.example.v1.GetBarRequest", + output_type="google.example.v1.GetBarResponse", ), ), ), @@ -3069,84 +3378,92 @@ def test_selective_gapic_api_build_remove_unnecessary_services(): ) service_yaml_config = get_service_yaml_for_selective_gapic_tests( - apis=['google.example.v1.FooService', 'google.example.v1.BarService'], - methods=['google.example.v1.FooService.GetFoo'] + apis=["google.example.v1.FooService", "google.example.v1.BarService"], + methods=["google.example.v1.FooService.GetFoo"], ) opts = Options(service_yaml_config=service_yaml_config) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) - assert 'google.example.v1.Foo' in api_schema.messages - assert 'google.example.v1.GetFooRequest' in api_schema.messages - assert 'google.example.v1.GetFooResponse' in api_schema.messages + assert "google.example.v1.Foo" in api_schema.messages + assert "google.example.v1.GetFooRequest" in api_schema.messages + assert "google.example.v1.GetFooResponse" in api_schema.messages - assert 'google.example.v1.Bar' not in api_schema.messages - assert 'google.example.v1.GetBarRequest' not in api_schema.messages - assert 'google.example.v1.GetBarResponse' not in api_schema.messages + assert "google.example.v1.Bar" not in api_schema.messages + assert "google.example.v1.GetBarRequest" not in api_schema.messages + assert "google.example.v1.GetBarResponse" not in api_schema.messages - assert 'google.example.v1.FooService' in api_schema.services - assert 'google.example.v1.BarService' not in api_schema.services + assert "google.example.v1.FooService" in api_schema.services + assert "google.example.v1.BarService" not in api_schema.services def test_selective_gapic_api_build_remove_unnecessary_proto_files(): fd = ( make_file_pb2( - name='foo_common.proto', - package='google.example.v1.foo_common', - messages=( - make_message_pb2(name='Foo'), - ), + name="foo_common.proto", + package="google.example.v1.foo_common", + messages=(make_message_pb2(name="Foo"),), ), make_file_pb2( - name='bar_common.proto', - package='google.example.v1.bar_common', - messages=( - make_message_pb2(name='Bar'), - ), + name="bar_common.proto", + package="google.example.v1.bar_common", + messages=(make_message_pb2(name="Bar"),), ), make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=()), - make_message_pb2(name='GetFooRequest', fields=( - make_field_pb2( - name='foo', number=1, type_name='.google.example.v1.foo_common.Foo'), - )), - make_message_pb2(name='GetFooResponse', fields=()), + make_message_pb2(name="Foo", fields=()), + make_message_pb2( + name="GetFooRequest", + fields=( + make_field_pb2( + name="foo", + number=1, + type_name=".google.example.v1.foo_common.Foo", + ), + ), + ), + make_message_pb2(name="GetFooResponse", fields=()), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='FooService', + name="FooService", method=( descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v1.GetFooRequest', - output_type='google.example.v1.GetFooResponse', + name="GetFoo", + input_type="google.example.v1.GetFooRequest", + output_type="google.example.v1.GetFooResponse", ), ), ), ), ), make_file_pb2( - name='bar.proto', - package='google.example.v1', + name="bar.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Bar', fields=()), - make_message_pb2(name='GetBarRequest', fields=( - make_field_pb2( - name='bar', number=1, type_name='.google.example.v1.bar_common.Bar'), - )), - make_message_pb2(name='GetBarResponse', fields=()), + make_message_pb2(name="Bar", fields=()), + make_message_pb2( + name="GetBarRequest", + fields=( + make_field_pb2( + name="bar", + number=1, + type_name=".google.example.v1.bar_common.Bar", + ), + ), + ), + make_message_pb2(name="GetBarResponse", fields=()), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='BarService', + name="BarService", method=( descriptor_pb2.MethodDescriptorProto( - name='GetBar', - input_type='google.example.v1.GetBarRequest', - output_type='google.example.v1.GetBarResponse', + name="GetBar", + input_type="google.example.v1.GetBarRequest", + output_type="google.example.v1.GetBarResponse", ), ), ), @@ -3155,88 +3472,102 @@ def test_selective_gapic_api_build_remove_unnecessary_proto_files(): ) service_yaml_config = get_service_yaml_for_selective_gapic_tests( - apis=['google.example.v1.FooService', 'google.example.v1.BarService'], - methods=['google.example.v1.FooService.GetFoo'] + apis=["google.example.v1.FooService", "google.example.v1.BarService"], + methods=["google.example.v1.FooService.GetFoo"], ) opts = Options(service_yaml_config=service_yaml_config) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) - assert 'google.example.v1.foo_common.Foo' in api_schema.messages - assert 'google.example.v1.GetFooRequest' in api_schema.messages - assert 'google.example.v1.GetFooResponse' in api_schema.messages + assert "google.example.v1.foo_common.Foo" in api_schema.messages + assert "google.example.v1.GetFooRequest" in api_schema.messages + assert "google.example.v1.GetFooResponse" in api_schema.messages - assert 'google.example.v1.bar_common.Bar' not in api_schema.messages - assert 'google.example.v1.GetBarRequest' not in api_schema.messages - assert 'google.example.v1.GetBarResponse' not in api_schema.messages + assert "google.example.v1.bar_common.Bar" not in api_schema.messages + assert "google.example.v1.GetBarRequest" not in api_schema.messages + assert "google.example.v1.GetBarResponse" not in api_schema.messages - assert 'google.example.v1.FooService' in api_schema.services - assert 'google.example.v1.BarService' not in api_schema.services + assert "google.example.v1.FooService" in api_schema.services + assert "google.example.v1.BarService" not in api_schema.services - assert 'foo.proto' in api_schema.protos - assert 'foo_common.proto' in api_schema.protos - assert 'bar.proto' not in api_schema.protos - assert 'bar_common.proto' not in api_schema.protos + assert "foo.proto" in api_schema.protos + assert "foo_common.proto" in api_schema.protos + assert "bar.proto" not in api_schema.protos + assert "bar_common.proto" not in api_schema.protos # Check that the sub-packages that have been completely pruned are excluded from generation, # but the ones that have only been partially pruned will still be appropriately included. - assert 'foo_common' in api_schema.subpackages - sub = api_schema.subpackages['foo_common'] + assert "foo_common" in api_schema.subpackages + sub = api_schema.subpackages["foo_common"] assert len(sub.protos) == 1 - assert 'google.example.v1.foo_common.Foo' in sub.messages - assert 'bar_common' not in api_schema.subpackages + assert "google.example.v1.foo_common.Foo" in sub.messages + assert "bar_common" not in api_schema.subpackages def test_selective_gapic_api_build_with_enums(): fd = ( make_file_pb2( - name='foobar.proto', - package='google.example.v1', + name="foobar.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Foo', fields=( - make_field_pb2(name='status', number=1, type=14, - type_name='.google.example.v1.FooStatus'), - )), - make_message_pb2(name='Bar', fields=( - make_field_pb2(name='status', number=1, type=14, - type_name='.google.example.v1.BarStatus'), - )), - make_message_pb2(name='GetFooRequest', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - )), - make_message_pb2(name='GetFooResponse', fields=()), - make_message_pb2(name='GetBarRequest', fields=( - make_field_pb2(name='bar', number=1, - type_name='.google.example.v1.Bar'), - )), - make_message_pb2(name='GetBarResponse', fields=()), - ), - enums=( - make_enum_pb2( - 'FooStatus', - 'YES', - 'NO' + make_message_pb2( + name="Foo", + fields=( + make_field_pb2( + name="status", + number=1, + type=14, + type_name=".google.example.v1.FooStatus", + ), + ), ), - make_enum_pb2( - 'BarStatus', - 'YES', - 'NO' + make_message_pb2( + name="Bar", + fields=( + make_field_pb2( + name="status", + number=1, + type=14, + type_name=".google.example.v1.BarStatus", + ), + ), ), + make_message_pb2( + name="GetFooRequest", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" + ), + ), + ), + make_message_pb2(name="GetFooResponse", fields=()), + make_message_pb2( + name="GetBarRequest", + fields=( + make_field_pb2( + name="bar", number=1, type_name=".google.example.v1.Bar" + ), + ), + ), + make_message_pb2(name="GetBarResponse", fields=()), + ), + enums=( + make_enum_pb2("FooStatus", "YES", "NO"), + make_enum_pb2("BarStatus", "YES", "NO"), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='FooService', + name="FooService", method=( descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v1.GetFooRequest', - output_type='google.example.v1.GetFooResponse', + name="GetFoo", + input_type="google.example.v1.GetFooRequest", + output_type="google.example.v1.GetFooResponse", ), descriptor_pb2.MethodDescriptorProto( - name='GetBar', - input_type='google.example.v1.GetBarRequest', - output_type='google.example.v1.GetBarResponse', + name="GetBar", + input_type="google.example.v1.GetBarRequest", + output_type="google.example.v1.GetBarResponse", ), ), ), @@ -3245,16 +3576,16 @@ def test_selective_gapic_api_build_with_enums(): ) service_yaml_config = get_service_yaml_for_selective_gapic_tests( - apis=['google.example.v1.FooService'], - methods=['google.example.v1.FooService.GetFoo'] + apis=["google.example.v1.FooService"], + methods=["google.example.v1.FooService.GetFoo"], ) opts = Options(service_yaml_config=service_yaml_config) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) - assert 'google.example.v1.FooStatus' in api_schema.enums - assert 'google.example.v1.BarStatus' not in api_schema.enums - assert 'google.example.v1.FooStatus' in api_schema.top_level_enums + assert "google.example.v1.FooStatus" in api_schema.enums + assert "google.example.v1.BarStatus" not in api_schema.enums + assert "google.example.v1.FooStatus" in api_schema.top_level_enums def test_selective_gapic_api_build_with_nested_fields(): @@ -3262,73 +3593,75 @@ def test_selective_gapic_api_build_with_nested_fields(): # any nested messages they may contain are included or excluded appropriately. fd = ( make_file_pb2( - name='foobar.proto', - package='google.example.v1', + name="foobar.proto", + package="google.example.v1", messages=( make_message_pb2( - name='Foo', + name="Foo", nested_type=( make_message_pb2( - name='Bar', + name="Bar", fields=( make_field_pb2( - name='baz', number=1, type_name='.google.example.v1.Baz'), - ) + name="baz", + number=1, + type_name=".google.example.v1.Baz", + ), + ), ), ), - enum_type=( - make_enum_pb2( - 'FooStatus', - 'YES', - 'NO' - ), - ) + enum_type=(make_enum_pb2("FooStatus", "YES", "NO"),), ), make_message_pb2( - name='Spam', + name="Spam", nested_type=( make_message_pb2( - name='Ham', + name="Ham", fields=( make_field_pb2( - name='eggs', number=1, type_name='.google.example.v1.Eggs'), - ) + name="eggs", + number=1, + type_name=".google.example.v1.Eggs", + ), + ), + ), + ), + enum_type=(make_enum_pb2("SpamStatus", "YES", "NO"),), + ), + make_message_pb2(name="Baz"), + make_message_pb2(name="Eggs"), + make_message_pb2( + name="GetFooRequest", + fields=( + make_field_pb2( + name="foo", number=1, type_name=".google.example.v1.Foo" ), ), - enum_type=( - make_enum_pb2( - 'SpamStatus', - 'YES', - 'NO' + ), + make_message_pb2(name="GetFooResponse", fields=()), + make_message_pb2( + name="GetBarRequest", + fields=( + make_field_pb2( + name="spam", number=1, type_name=".google.example.v1.Spam" ), - ) + ), ), - make_message_pb2(name='Baz'), - make_message_pb2(name='Eggs'), - make_message_pb2(name='GetFooRequest', fields=( - make_field_pb2(name='foo', number=1, - type_name='.google.example.v1.Foo'), - )), - make_message_pb2(name='GetFooResponse', fields=()), - make_message_pb2(name='GetBarRequest', fields=( - make_field_pb2(name='spam', number=1, - type_name='.google.example.v1.Spam'), - )), - make_message_pb2(name='GetBarResponse', fields=()), + make_message_pb2(name="GetBarResponse", fields=()), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='FooService', + name="FooService", method=( descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v1.GetFooRequest', - output_type='google.example.v1.GetFooResponse', + name="GetFoo", + input_type="google.example.v1.GetFooRequest", + output_type="google.example.v1.GetFooResponse", ), descriptor_pb2.MethodDescriptorProto( - name='GetBar', - input_type='google.example.v1.GetBarRequest', - output_type='google.example.v1.GetBarResponse', + name="GetBar", + input_type="google.example.v1.GetBarRequest", + output_type="google.example.v1.GetBarResponse", ), ), ), @@ -3337,28 +3670,28 @@ def test_selective_gapic_api_build_with_nested_fields(): ) service_yaml_config = get_service_yaml_for_selective_gapic_tests( - apis=['google.example.v1.FooService'], - methods=['google.example.v1.FooService.GetFoo'] + apis=["google.example.v1.FooService"], + methods=["google.example.v1.FooService.GetFoo"], ) opts = Options(service_yaml_config=service_yaml_config) - api_schema = api.API.build(fd, 'google.example.v1', opts=opts) + api_schema = api.API.build(fd, "google.example.v1", opts=opts) - assert 'google.example.v1.Baz' in api_schema.messages - assert 'google.example.v1.Foo.FooStatus' in api_schema.enums - assert 'google.example.v1.Foo.Bar' in api_schema.messages + assert "google.example.v1.Baz" in api_schema.messages + assert "google.example.v1.Foo.FooStatus" in api_schema.enums + assert "google.example.v1.Foo.Bar" in api_schema.messages # Check that we can exclude nested types as well - assert 'google.example.v1.Spam' not in api_schema.messages - assert 'google.example.v1.Spam.SpamStatus' not in api_schema.enums - assert 'google.example.v1.Spam.Ham' not in api_schema.messages + assert "google.example.v1.Spam" not in api_schema.messages + assert "google.example.v1.Spam.SpamStatus" not in api_schema.enums + assert "google.example.v1.Spam.Ham" not in api_schema.messages @pytest.mark.parametrize("reference_attr", ["type", "child_type"]) def test_selective_gapic_api_build_with_resources(reference_attr): test_input_names = [ - ('foo.bar/Foo', 'Foo', 'FooDep', 'GetFooRequest', 'GetFooResponse'), - ('foo.bar/Bar', 'Bar', 'BarDep', 'GetBarRequest', 'GetBarResponse'), + ("foo.bar/Foo", "Foo", "FooDep", "GetFooRequest", "GetFooResponse"), + ("foo.bar/Bar", "Bar", "BarDep", "GetBarRequest", "GetBarResponse"), ] messages = [] @@ -3377,28 +3710,30 @@ def test_selective_gapic_api_build_with_resources(reference_attr): name=message_name, fields=( make_field_pb2( - name="dep", number=1, type_name=f".google.example.v1.{message_dep_name}"), + name="dep", + number=1, + type_name=f".google.example.v1.{message_dep_name}", + ), ), ) request_message = make_message_pb2( name=request_message_name, - fields=( - make_field_pb2(name="thing", number=1, type=9), - ), + fields=(make_field_pb2(name="thing", number=1, type=9),), ) response_message = make_message_pb2(name=response_message_name) # Set up the resource - resource_message_opts = resource_message.options.Extensions[resource_pb2.resource] + resource_message_opts = resource_message.options.Extensions[ + resource_pb2.resource + ] resource_message_opts.type = resource_type - resource_message_opts.pattern.append( - "octopus/{octopus}/squid/{squid}") + resource_message_opts.pattern.append("octopus/{octopus}/squid/{squid}") # Set up the reference - request_message_thing_field_opts = \ - request_message.field[0].options.Extensions[resource_pb2.resource_reference] - setattr(request_message_thing_field_opts, - reference_attr, resource_type) + request_message_thing_field_opts = request_message.field[0].options.Extensions[ + resource_pb2.resource_reference + ] + setattr(request_message_thing_field_opts, reference_attr, resource_type) # Add to messages messages.append(resource_message_dep) @@ -3408,48 +3743,50 @@ def test_selective_gapic_api_build_with_resources(reference_attr): fds = ( make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=messages, - services=(descriptor_pb2.ServiceDescriptorProto( - name='FooService', - method=( - descriptor_pb2.MethodDescriptorProto( - name='GetFoo', - input_type='google.example.v1.GetFooRequest', - output_type='google.example.v1.GetFooResponse', - ), - descriptor_pb2.MethodDescriptorProto( - name='GetBar', - input_type='google.example.v1.GetBarRequest', - output_type='google.example.v1.GetBarResponse', + services=( + descriptor_pb2.ServiceDescriptorProto( + name="FooService", + method=( + descriptor_pb2.MethodDescriptorProto( + name="GetFoo", + input_type="google.example.v1.GetFooRequest", + output_type="google.example.v1.GetFooResponse", + ), + descriptor_pb2.MethodDescriptorProto( + name="GetBar", + input_type="google.example.v1.GetBarRequest", + output_type="google.example.v1.GetBarResponse", + ), ), ), - ),), + ), ), ) service_yaml_config = get_service_yaml_for_selective_gapic_tests( - methods=['google.example.v1.FooService.GetFoo'] + methods=["google.example.v1.FooService.GetFoo"] ) opts = Options(service_yaml_config=service_yaml_config) - api_schema = api.API.build(fds, package='google.example.v1', opts=opts) + api_schema = api.API.build(fds, package="google.example.v1", opts=opts) - assert 'google.example.v1.Foo' in api_schema.messages - assert 'google.example.v1.FooDep' in api_schema.messages - assert 'google.example.v1.GetFooRequest' in api_schema.messages - assert 'google.example.v1.GetFooResponse' in api_schema.messages + assert "google.example.v1.Foo" in api_schema.messages + assert "google.example.v1.FooDep" in api_schema.messages + assert "google.example.v1.GetFooRequest" in api_schema.messages + assert "google.example.v1.GetFooResponse" in api_schema.messages - assert 'google.example.v1.Bar' not in api_schema.messages - assert 'google.example.v1.BarDep' not in api_schema.messages - assert 'google.example.v1.GetBarRequest' not in api_schema.messages - assert 'google.example.v1.GetBarResponse' not in api_schema.messages + assert "google.example.v1.Bar" not in api_schema.messages + assert "google.example.v1.BarDep" not in api_schema.messages + assert "google.example.v1.GetBarRequest" not in api_schema.messages + assert "google.example.v1.GetBarResponse" not in api_schema.messages # Ensure we're also pruning resource messages for the files - resource_messages = api_schema.protos['foo.proto'].resource_messages - assert 'foo.bar/Foo' in resource_messages - assert 'foo.bar/Bar' not in resource_messages + resource_messages = api_schema.protos["foo.proto"].resource_messages + assert "foo.bar/Foo" in resource_messages + assert "foo.bar/Bar" not in resource_messages def test_selective_gapic_api_build_extended_lro(): @@ -3464,7 +3801,9 @@ def make_initiate_options(service_name): T = descriptor_pb2.FieldDescriptorProto.Type operation_fields = tuple( make_field_pb2(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ) for f in operation_fields: options = descriptor_pb2.FieldOptions() @@ -3473,59 +3812,59 @@ def make_initiate_options(service_name): fds = ( make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='Operation', fields=operation_fields), - make_message_pb2(name='CreateFooRequest'), - make_message_pb2(name='GetFooOperationRequest'), - make_message_pb2(name='CreateBarRequest'), - make_message_pb2(name='GetBarOperationRequest'), - make_message_pb2(name='PoorlyOrganizedMethodRequest'), - make_message_pb2(name='PoorlyOrganizedMethodResponse') + make_message_pb2(name="Operation", fields=operation_fields), + make_message_pb2(name="CreateFooRequest"), + make_message_pb2(name="GetFooOperationRequest"), + make_message_pb2(name="CreateBarRequest"), + make_message_pb2(name="GetBarOperationRequest"), + make_message_pb2(name="PoorlyOrganizedMethodRequest"), + make_message_pb2(name="PoorlyOrganizedMethodResponse"), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='FooOpsService', + name="FooOpsService", method=( descriptor_pb2.MethodDescriptorProto( - name='Get', - input_type='google.example.v1.GetFooOperationRequest', - output_type='google.example.v1.Operation', + name="Get", + input_type="google.example.v1.GetFooOperationRequest", + output_type="google.example.v1.Operation", options=polling_method_options, ), descriptor_pb2.MethodDescriptorProto( - name='PoorlyOrganizedMethod', - input_type='google.example.v1.PoorlyOrganizedMethodRequest', - output_type='google.example.v1.PoorlyOrganizedMethodResponse', + name="PoorlyOrganizedMethod", + input_type="google.example.v1.PoorlyOrganizedMethodRequest", + output_type="google.example.v1.PoorlyOrganizedMethodResponse", ), ), ), descriptor_pb2.ServiceDescriptorProto( - name='BarOpsService', + name="BarOpsService", method=( descriptor_pb2.MethodDescriptorProto( - name='Get', - input_type='google.example.v1.GetBarOperationRequest', - output_type='google.example.v1.Operation', + name="Get", + input_type="google.example.v1.GetBarOperationRequest", + output_type="google.example.v1.Operation", options=polling_method_options, ), ), ), descriptor_pb2.ServiceDescriptorProto( - name='BasicService', + name="BasicService", method=( descriptor_pb2.MethodDescriptorProto( - name='CreateFoo', - input_type='google.example.v1.CreateFooRequest', - output_type='google.example.v1.Operation', - options=make_initiate_options('FooOpsService'), + name="CreateFoo", + input_type="google.example.v1.CreateFooRequest", + output_type="google.example.v1.Operation", + options=make_initiate_options("FooOpsService"), ), descriptor_pb2.MethodDescriptorProto( - name='CreateBar', - input_type='google.example.v1.CreateBarRequest', - output_type='google.example.v1.Operation', - options=make_initiate_options('BarOpsService'), + name="CreateBar", + input_type="google.example.v1.CreateBarRequest", + output_type="google.example.v1.Operation", + options=make_initiate_options("BarOpsService"), ), ), ), @@ -3535,64 +3874,67 @@ def make_initiate_options(service_name): service_yaml_config = get_service_yaml_for_selective_gapic_tests( apis=[ - 'google.example.v1.FooOpsService', - 'google.example.v1.BarOpsService', - 'google.example.v1.BasicService' + "google.example.v1.FooOpsService", + "google.example.v1.BarOpsService", + "google.example.v1.BasicService", ], methods=[ - 'google.example.v1.BasicService.CreateFoo', - ] + "google.example.v1.BasicService.CreateFoo", + ], ) opts = Options(service_yaml_config=service_yaml_config) - api_schema = api.API.build(fds, 'google.example.v1', opts=opts) + api_schema = api.API.build(fds, "google.example.v1", opts=opts) - assert 'google.example.v1.BasicService' in api_schema.services - assert 'google.example.v1.FooOpsService' in api_schema.services - assert 'google.example.v1.FooOpsService.Get' in api_schema.all_methods - assert 'google.example.v1.Operation' in api_schema.messages - assert 'google.example.v1.CreateFooRequest' in api_schema.messages - assert 'google.example.v1.GetFooOperationRequest' in api_schema.messages + assert "google.example.v1.BasicService" in api_schema.services + assert "google.example.v1.FooOpsService" in api_schema.services + assert "google.example.v1.FooOpsService.Get" in api_schema.all_methods + assert "google.example.v1.Operation" in api_schema.messages + assert "google.example.v1.CreateFooRequest" in api_schema.messages + assert "google.example.v1.GetFooOperationRequest" in api_schema.messages - assert 'google.example.v1.BarOpsService' not in api_schema.services - assert 'google.example.v1.GetBarOperationRequest' not in api_schema.messages - assert 'google.example.v1.CreateBarRequest' not in api_schema.messages - assert 'google.example.v1.FooOpsService.PoorlyOrganizedMethod' not in api_schema.all_methods + assert "google.example.v1.BarOpsService" not in api_schema.services + assert "google.example.v1.GetBarOperationRequest" not in api_schema.messages + assert "google.example.v1.CreateBarRequest" not in api_schema.messages + assert ( + "google.example.v1.FooOpsService.PoorlyOrganizedMethod" + not in api_schema.all_methods + ) def test_selective_gapic_api_build_generate_omitted_as_internal(): # Put together a couple of minimal protos. fds = ( make_file_pb2( - name='foo.proto', - package='google.example.v1', + name="foo.proto", + package="google.example.v1", messages=( - make_message_pb2(name='GenericRequest', fields=()), - make_message_pb2(name='GenericResponse', fields=()), + make_message_pb2(name="GenericRequest", fields=()), + make_message_pb2(name="GenericResponse", fields=()), ), services=( descriptor_pb2.ServiceDescriptorProto( - name='ServiceOne', + name="ServiceOne", method=( descriptor_pb2.MethodDescriptorProto( - name='InternalMethod', - input_type='google.example.v1.GenericRequest', - output_type='google.example.v1.GenericResponse', + name="InternalMethod", + input_type="google.example.v1.GenericRequest", + output_type="google.example.v1.GenericResponse", ), descriptor_pb2.MethodDescriptorProto( - name='NonInternalMethod', - input_type='google.example.v1.GenericRequest', - output_type='google.example.v1.GenericResponse', + name="NonInternalMethod", + input_type="google.example.v1.GenericRequest", + output_type="google.example.v1.GenericResponse", ), ), ), descriptor_pb2.ServiceDescriptorProto( - name='ServiceTwo', + name="ServiceTwo", method=( descriptor_pb2.MethodDescriptorProto( - name='NonInternalMethod', - input_type='google.example.v1.GenericRequest', - output_type='google.example.v1.GenericResponse', + name="NonInternalMethod", + input_type="google.example.v1.GenericRequest", + output_type="google.example.v1.GenericResponse", ), ), ), @@ -3601,32 +3943,47 @@ def test_selective_gapic_api_build_generate_omitted_as_internal(): ) service_yaml_config = get_service_yaml_for_selective_gapic_tests( - apis=['google.example.v1.ServiceOne', 'google.example.v1.ServiceTwo'], + apis=["google.example.v1.ServiceOne", "google.example.v1.ServiceTwo"], methods=[ - 'google.example.v1.ServiceOne.NonInternalMethod', - 'google.example.v1.ServiceTwo.NonInternalMethod' + "google.example.v1.ServiceOne.NonInternalMethod", + "google.example.v1.ServiceTwo.NonInternalMethod", ], - generate_omitted_as_internal=True + generate_omitted_as_internal=True, ) opts = Options(service_yaml_config=service_yaml_config) - api_schema = api.API.build(fds, 'google.example.v1', opts=opts) - - assert 'google.example.v1.ServiceOne' in api_schema.services - assert 'google.example.v1.ServiceTwo' in api_schema.services - assert 'google.example.v1.ServiceOne.InternalMethod' not in api_schema.all_methods - assert 'google.example.v1.ServiceOne._InternalMethod' in api_schema.all_methods + api_schema = api.API.build(fds, "google.example.v1", opts=opts) + + assert "google.example.v1.ServiceOne" in api_schema.services + assert "google.example.v1.ServiceTwo" in api_schema.services + assert "google.example.v1.ServiceOne.InternalMethod" not in api_schema.all_methods + assert "google.example.v1.ServiceOne._InternalMethod" in api_schema.all_methods + + assert api_schema.services["google.example.v1.ServiceOne"].is_internal + assert not api_schema.services["google.example.v1.ServiceTwo"].is_internal + assert api_schema.all_methods[ + "google.example.v1.ServiceOne._InternalMethod" + ].is_internal + assert not api_schema.all_methods[ + "google.example.v1.ServiceOne.NonInternalMethod" + ].is_internal + assert not api_schema.all_methods[ + "google.example.v1.ServiceTwo.NonInternalMethod" + ].is_internal - assert api_schema.services['google.example.v1.ServiceOne'].is_internal - assert not api_schema.services['google.example.v1.ServiceTwo'].is_internal - assert api_schema.all_methods['google.example.v1.ServiceOne._InternalMethod'].is_internal - assert not api_schema.all_methods['google.example.v1.ServiceOne.NonInternalMethod'].is_internal - assert not api_schema.all_methods['google.example.v1.ServiceTwo.NonInternalMethod'].is_internal - - assert api_schema.services['google.example.v1.ServiceOne'].client_name == 'BaseServiceOneClient' - assert api_schema.services['google.example.v1.ServiceOne'].async_client_name == 'BaseServiceOneAsyncClient' - assert api_schema.all_methods['google.example.v1.ServiceOne._InternalMethod'].name == '_InternalMethod' + assert ( + api_schema.services["google.example.v1.ServiceOne"].client_name + == "BaseServiceOneClient" + ) + assert ( + api_schema.services["google.example.v1.ServiceOne"].async_client_name + == "BaseServiceOneAsyncClient" + ) + assert ( + api_schema.all_methods["google.example.v1.ServiceOne._InternalMethod"].name + == "_InternalMethod" + ) def test_read_empty_python_settings_from_service_yaml(): @@ -3648,8 +4005,10 @@ def test_read_empty_python_settings_from_service_yaml(): cli_options = Options(service_yaml_config=service_yaml_config) fd = get_file_descriptor_proto_for_tests(fields=[]) api_schema = api.API.build(fd, "google.example.v1beta1", opts=cli_options) - assert api_schema.all_library_settings[api_schema.naming.proto_package].python_settings \ + assert ( + api_schema.all_library_settings[api_schema.naming.proto_package].python_settings == client_pb2.PythonSettings() + ) def test_python_settings_duplicate_version_raises_error(): @@ -3667,9 +4026,7 @@ def test_python_settings_duplicate_version_raises_error(): version="google.example.v1beta1", ), ] - with pytest.raises( - api.ClientLibrarySettingsError, match="(?i)duplicate version" - ): + with pytest.raises(api.ClientLibrarySettingsError, match="(?i)duplicate version"): api_schema.enforce_valid_library_settings(clientlibrarysettings) @@ -3697,9 +4054,9 @@ def test_read_method_settings_from_service_yaml(): } cli_options = Options(service_yaml_config=service_yaml_config) field_options = descriptor_pb2.FieldOptions() - field_options.Extensions[ - field_info_pb2.field_info - ].format = field_info_pb2.FieldInfo.Format.Value("UUID4") + field_options.Extensions[field_info_pb2.field_info].format = ( + field_info_pb2.FieldInfo.Format.Value("UUID4") + ) squid = make_field_pb2( name="squid", type="TYPE_STRING", options=field_options, number=1 @@ -3734,9 +4091,7 @@ def test_method_settings_duplicate_selector_raises_error(): selector="google.example.v1beta1.ServiceOne.Example1", ), ] - with pytest.raises( - api.MethodSettingsError, match="(?i)duplicate selector" - ): + with pytest.raises(api.MethodSettingsError, match="(?i)duplicate selector"): api_schema.enforce_valid_method_settings(methodsettings) @@ -3764,14 +4119,8 @@ def test_method_settings_invalid_selector_raises_error(): error_yaml = yaml.safe_load(ex.value.args[0]) - assert re.match( - ".*not found.*", - error_yaml[method_example1][0].lower() - ) - assert re.match( - ".*not found.*", - error_yaml[method_example2][0].lower() - ) + assert re.match(".*not found.*", error_yaml[method_example1][0].lower()) + assert re.match(".*not found.*", error_yaml[method_example2][0].lower()) def test_method_settings_unsupported_auto_populated_field_type_raises_error(): @@ -3818,15 +4167,13 @@ def test_method_settings_auto_populated_nested_field_raises_error(): """ octopus = make_field( - name='octopus', - type_name='google.example.v1beta1.NestedMessage', + name="octopus", + type_name="google.example.v1beta1.NestedMessage", label=3, - type='TYPE_MESSAGE', + type="TYPE_MESSAGE", ) - fd = get_file_descriptor_proto_for_tests( - fields=[octopus.field_pb] - ) + fd = get_file_descriptor_proto_for_tests(fields=[octopus.field_pb]) api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -3843,9 +4190,7 @@ def test_method_settings_auto_populated_field_client_streaming_rpc_raises_error( Test that `MethodSettingsError` is raised when the selector in `client_pb2.MethodSettings.selector` maps to a method which uses client streaming. """ - fd = get_file_descriptor_proto_for_tests( - client_streaming=True - ) + fd = get_file_descriptor_proto_for_tests(client_streaming=True) api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -3853,9 +4198,7 @@ def test_method_settings_auto_populated_field_client_streaming_rpc_raises_error( auto_populated_fields=["squid"], ), ] - with pytest.raises( - api.MethodSettingsError, match="(?i)not a unary method" - ): + with pytest.raises(api.MethodSettingsError, match="(?i)not a unary method"): api_schema.enforce_valid_method_settings(methodsettings) @@ -3864,9 +4207,7 @@ def test_method_settings_auto_populated_field_server_streaming_rpc_raises_error( Test that `MethodSettingsError` is raised when the selector in `client_pb2.MethodSettings.selector` maps to a method which uses server streaming. """ - fd = get_file_descriptor_proto_for_tests( - server_streaming=True - ) + fd = get_file_descriptor_proto_for_tests(server_streaming=True) api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -3874,9 +4215,7 @@ def test_method_settings_auto_populated_field_server_streaming_rpc_raises_error( auto_populated_fields=["squid"], ), ] - with pytest.raises( - api.MethodSettingsError, match="(?i)not a unary method" - ): + with pytest.raises(api.MethodSettingsError, match="(?i)not a unary method"): api_schema.enforce_valid_method_settings(methodsettings) @@ -3900,9 +4239,7 @@ def test_method_settings_unsupported_auto_populated_field_behavior_raises_error( auto_populated_fields=["squid"], ), ] - with pytest.raises( - api.MethodSettingsError, match="(?i)required field" - ): + with pytest.raises(api.MethodSettingsError, match="(?i)required field"): api_schema.enforce_valid_method_settings(methodsettings) @@ -3934,9 +4271,9 @@ def test_method_settings_unsupported_auto_populated_field_field_info_format_rais the format of the field is `IPV4`. """ field_options = descriptor_pb2.FieldOptions() - field_options.Extensions[ - field_info_pb2.field_info - ].format = field_info_pb2.FieldInfo.Format.Value("IPV4") + field_options.Extensions[field_info_pb2.field_info].format = ( + field_info_pb2.FieldInfo.Format.Value("IPV4") + ) squid = make_field_pb2( name="squid", type="TYPE_STRING", options=field_options, number=1 ) @@ -3965,9 +4302,9 @@ def test_method_settings_invalid_multiple_issues(): # - Not annotated with google.api.field_info.format = UUID4 # - Not of type string # - Required field - field_options.Extensions[ - field_info_pb2.field_info - ].format = field_info_pb2.FieldInfo.Format.Value("IPV4") + field_options.Extensions[field_info_pb2.field_info].format = ( + field_info_pb2.FieldInfo.Format.Value("IPV4") + ) squid = make_field_pb2( name="squid", type="TYPE_INT32", options=field_options, number=1 ) @@ -3979,9 +4316,7 @@ def test_method_settings_invalid_multiple_issues(): # Field Octopus Errors # - Not annotated with google.api.field_info.format = UUID4 octopus = make_field_pb2(name="octopus", type="TYPE_STRING", number=1) - fd = get_file_descriptor_proto_for_tests( - fields=[squid, octopus] - ) + fd = get_file_descriptor_proto_for_tests(fields=[squid, octopus]) api_schema = api.API.build(fd, "google.example.v1beta1") methodsettings = [ client_pb2.MethodSettings( @@ -4001,19 +4336,7 @@ def test_method_settings_invalid_multiple_issues(): error_yaml = yaml.safe_load(ex.value.args[0]) - assert re.match( - ".*squid.*not.*string.*", - error_yaml[method_example1][0].lower() - ) - assert re.match( - ".*squid.*not.*uuid4.*", - error_yaml[method_example1][1].lower() - ) - assert re.match( - ".*octopus.*not.*uuid4.*", - error_yaml[method_example1][2].lower() - ) - assert re.match( - ".*method.*not found.*", - error_yaml[method_example2][0].lower() - ) + assert re.match(".*squid.*not.*string.*", error_yaml[method_example1][0].lower()) + assert re.match(".*squid.*not.*uuid4.*", error_yaml[method_example1][1].lower()) + assert re.match(".*octopus.*not.*uuid4.*", error_yaml[method_example1][2].lower()) + assert re.match(".*method.*not found.*", error_yaml[method_example2][0].lower()) diff --git a/packages/gapic-generator/tests/unit/schema/test_imp.py b/packages/gapic-generator/tests/unit/schema/test_imp.py index cb42b81d930f..81821e4f7c32 100644 --- a/packages/gapic-generator/tests/unit/schema/test_imp.py +++ b/packages/gapic-generator/tests/unit/schema/test_imp.py @@ -16,36 +16,36 @@ def test_str(): - i = imp.Import(package=('foo', 'bar'), module='baz') - assert str(i) == 'from foo.bar import baz' + i = imp.Import(package=("foo", "bar"), module="baz") + assert str(i) == "from foo.bar import baz" def test_str_no_package(): - i = imp.Import(package=(), module='baz') - assert str(i) == 'import baz' + i = imp.Import(package=(), module="baz") + assert str(i) == "import baz" def test_str_alias(): - i = imp.Import(package=('foo', 'bar'), module='baz', alias='bacon') - assert str(i) == 'from foo.bar import baz as bacon' + i = imp.Import(package=("foo", "bar"), module="baz", alias="bacon") + assert str(i) == "from foo.bar import baz as bacon" def test_str_untyped_pb2(): - i = imp.Import(package=('foo', 'bar'), module='baz_pb2', alias='bacon') - assert str(i) == 'from foo.bar import baz_pb2 as bacon # type: ignore' + i = imp.Import(package=("foo", "bar"), module="baz_pb2", alias="bacon") + assert str(i) == "from foo.bar import baz_pb2 as bacon # type: ignore" def test_str_untyped_api_core(): - i = imp.Import(package=('foo', 'api_core'), module='baz', alias='bacon') - assert str(i) == 'from foo.api_core import baz as bacon # type: ignore' + i = imp.Import(package=("foo", "api_core"), module="baz", alias="bacon") + assert str(i) == "from foo.api_core import baz as bacon # type: ignore" def test_str_eq(): - i1 = imp.Import(package=('foo', 'bar'), module='baz') - i2 = imp.Import(package=('foo', 'bar'), module='baz') - i3 = imp.Import(package=('foo', 'bar'), module='baz', alias='bacon') - j1 = imp.Import(package=('foo', 'bar'), module='not_baz') - k1 = imp.Import(package=('spam', 'eggs'), module='baz') + i1 = imp.Import(package=("foo", "bar"), module="baz") + i2 = imp.Import(package=("foo", "bar"), module="baz") + i3 = imp.Import(package=("foo", "bar"), module="baz", alias="bacon") + j1 = imp.Import(package=("foo", "bar"), module="not_baz") + k1 = imp.Import(package=("spam", "eggs"), module="baz") assert i1 == i2 assert i1 == i3 assert i2 == i3 diff --git a/packages/gapic-generator/tests/unit/schema/test_metadata.py b/packages/gapic-generator/tests/unit/schema/test_metadata.py index 3189b3a9fd46..0d1d1c803203 100644 --- a/packages/gapic-generator/tests/unit/schema/test_metadata.py +++ b/packages/gapic-generator/tests/unit/schema/test_metadata.py @@ -24,46 +24,51 @@ def test_address_str(): - addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') - assert str(addr) == 'baz.Bacon' + addr = metadata.Address(package=("foo", "bar"), module="baz", name="Bacon") + assert str(addr) == "baz.Bacon" def test_address_str_with_context(): addr = metadata.Address( - package=('foo', 'bar'), - module='baz', - name='Bacon', - ).with_context(collisions=frozenset({'baz'})) - assert str(addr) == 'fb_baz.Bacon' + package=("foo", "bar"), + module="baz", + name="Bacon", + ).with_context(collisions=frozenset({"baz"})) + assert str(addr) == "fb_baz.Bacon" def test_address_str_parent(): - addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon', - parent=('spam', 'eggs')) - assert str(addr) == 'baz.spam.eggs.Bacon' + addr = metadata.Address( + package=("foo", "bar"), module="baz", name="Bacon", parent=("spam", "eggs") + ) + assert str(addr) == "baz.spam.eggs.Bacon" def test_address_str_different_proto_package(): - addr = metadata.Address(package=('google', 'iam', 'v1'), module='options', name='GetPolicyOptions', - api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1')) - assert str(addr) == 'options_pb2.GetPolicyOptions' + addr = metadata.Address( + package=("google", "iam", "v1"), + module="options", + name="GetPolicyOptions", + api_naming=naming.NewNaming(proto_package="foo.bar.baz.v1"), + ) + assert str(addr) == "options_pb2.GetPolicyOptions" def test_address_str_different_proto_package_with_collision(): addr = metadata.Address( - package=('google', 'rpc'), - module='status', - name='Status', - api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1') - ).with_context(collisions=frozenset({'status'})) + package=("google", "rpc"), + module="status", + name="Status", + api_naming=naming.NewNaming(proto_package="foo.bar.baz.v1"), + ).with_context(collisions=frozenset({"status"})) # the module alias should be ignored for _pb2 types - assert str(addr) == 'status_pb2.Status' + assert str(addr) == "status_pb2.Status" def test_address_proto(): - addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') - assert addr.proto == 'foo.bar.Bacon' - assert addr.proto_package == 'foo.bar' + addr = metadata.Address(package=("foo", "bar"), module="baz", name="Bacon") + assert addr.proto == "foo.bar.Bacon" + assert addr.proto_package == "foo.bar" def test_proto_package_version_parsing(): @@ -105,121 +110,140 @@ def test_proto_package_version_parsing(): def test_address_child_no_parent(): - addr = metadata.Address(package=('foo', 'bar'), module='baz') - child = addr.child('Bacon', path=(4, 0)) - assert child.name == 'Bacon' + addr = metadata.Address(package=("foo", "bar"), module="baz") + child = addr.child("Bacon", path=(4, 0)) + assert child.name == "Bacon" assert child.parent == () assert child.module_path == (4, 0) def test_address_child_with_parent(): - addr = metadata.Address(package=('foo', 'bar'), module='baz') - child = addr.child('Bacon', path=(4, 0)) - grandchild = child.child('Ham', path=(2, 0)) - assert grandchild.parent == ('Bacon',) - assert grandchild.name == 'Ham' + addr = metadata.Address(package=("foo", "bar"), module="baz") + child = addr.child("Bacon", path=(4, 0)) + grandchild = child.child("Ham", path=(2, 0)) + assert grandchild.parent == ("Bacon",) + assert grandchild.name == "Ham" assert grandchild.module_path == (4, 0, 2, 0) def test_address_rel(): - addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') - assert addr.rel( - metadata.Address(package=('foo', 'bar'), module='baz'), - ) == "'Bacon'" + addr = metadata.Address(package=("foo", "bar"), module="baz", name="Bacon") + assert ( + addr.rel( + metadata.Address(package=("foo", "bar"), module="baz"), + ) + == "'Bacon'" + ) def test_address_rel_other(): - addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Bacon') - assert addr.rel( - metadata.Address(package=('foo', 'not_bar'), module='baz'), - ) == 'baz.Bacon' - assert addr.rel( - metadata.Address(package=('foo', 'bar'), module='not_baz'), - ) == 'baz.Bacon' + addr = metadata.Address(package=("foo", "bar"), module="baz", name="Bacon") + assert ( + addr.rel( + metadata.Address(package=("foo", "not_bar"), module="baz"), + ) + == "baz.Bacon" + ) + assert ( + addr.rel( + metadata.Address(package=("foo", "bar"), module="not_baz"), + ) + == "baz.Bacon" + ) def test_address_rel_later(): addr = metadata.Address( - module='baz', module_path=(4, 1), - name='Bacon', package=('foo', 'bar'), + module="baz", + module_path=(4, 1), + name="Bacon", + package=("foo", "bar"), ) other = metadata.Address( - module='baz', module_path=(4, 0), - name='Ham', package=('foo', 'bar'), + module="baz", + module_path=(4, 0), + name="Ham", + package=("foo", "bar"), ) assert addr.rel(other) == "'Bacon'" def test_address_rel_nested_sibling(): addr = metadata.Address( - module='baz', name='Bacon', - package=('foo', 'bar'), parent=('Spam',) + module="baz", name="Bacon", package=("foo", "bar"), parent=("Spam",) ) other = metadata.Address( - module='baz', name='Ham', - package=('foo', 'bar'), parent=('Spam',) + module="baz", name="Ham", package=("foo", "bar"), parent=("Spam",) ) assert addr.rel(other) == "'Spam.Bacon'" def test_address_rel_nested_sibling_later(): addr = metadata.Address( - module='baz', name='Bacon', module_path=(4, 0, 3, 1), - package=('foo', 'bar'), parent=('Spam',) + module="baz", + name="Bacon", + module_path=(4, 0, 3, 1), + package=("foo", "bar"), + parent=("Spam",), ) other = metadata.Address( - module='baz', name='Ham', module_path=(4, 0, 3, 0), - package=('foo', 'bar'), parent=('Spam',) + module="baz", + name="Ham", + module_path=(4, 0, 3, 0), + package=("foo", "bar"), + parent=("Spam",), ) assert addr.rel(other) == "'Spam.Bacon'" def test_address_rel_nested_parent(): - parent = metadata.Address(module='baz', name='Ham', package=('foo', 'bar')) + parent = metadata.Address(module="baz", name="Ham", package=("foo", "bar")) child = metadata.Address( - module='baz', name='Bacon', - package=('foo', 'bar'), parent=('Ham',) + module="baz", name="Bacon", package=("foo", "bar"), parent=("Ham",) ) - assert child.rel(parent) == 'Bacon' + assert child.rel(parent) == "Bacon" def test_address_resolve(): - addr = metadata.Address(package=('foo', 'bar'), module='baz', name='Qux') - assert addr.resolve('Bacon') == 'foo.bar.Bacon' - assert addr.resolve('foo.bar.Bacon') == 'foo.bar.Bacon' - assert addr.resolve('google.example.Bacon') == 'google.example.Bacon' + addr = metadata.Address(package=("foo", "bar"), module="baz", name="Qux") + assert addr.resolve("Bacon") == "foo.bar.Bacon" + assert addr.resolve("foo.bar.Bacon") == "foo.bar.Bacon" + assert addr.resolve("google.example.Bacon") == "google.example.Bacon" def test_address_subpackage(): addr = metadata.Address( - package=('foo', 'bar', 'baz', 'v1', 'spam', 'eggs'), - api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1'), + package=("foo", "bar", "baz", "v1", "spam", "eggs"), + api_naming=naming.NewNaming(proto_package="foo.bar.baz.v1"), ) - assert addr.subpackage == ('spam', 'eggs') + assert addr.subpackage == ("spam", "eggs") def test_address_subpackage_no_version(): addr = metadata.Address( - package=('foo', 'bar', 'baz', 'spam', 'eggs'), - api_naming=naming.NewNaming(proto_package='foo.bar.baz'), + package=("foo", "bar", "baz", "spam", "eggs"), + api_naming=naming.NewNaming(proto_package="foo.bar.baz"), ) - assert addr.subpackage == ('spam', 'eggs') + assert addr.subpackage == ("spam", "eggs") def test_address_subpackage_empty(): addr = metadata.Address( - package=('foo', 'bar', 'baz', 'v1'), - api_naming=naming.NewNaming(proto_package='foo.bar.baz.v1'), + package=("foo", "bar", "baz", "v1"), + api_naming=naming.NewNaming(proto_package="foo.bar.baz.v1"), ) assert addr.subpackage == () def test_metadata_with_context(): meta = metadata.Metadata() - collisions = meta.with_context( - collisions={'foo', 'bar'}, - ).address.collisions - RESERVED_NAMES - assert collisions == {'foo', 'bar'} + collisions = ( + meta.with_context( + collisions={"foo", "bar"}, + ).address.collisions + - RESERVED_NAMES + ) + assert collisions == {"foo", "bar"} def test_address_name_builtin_keyword(): @@ -250,19 +274,19 @@ def test_address_name_builtin_keyword(): def test_doc_nothing(): meta = metadata.Metadata() - assert meta.doc == '' + assert meta.doc == "" def test_doc_leading_trumps_all(): - meta = make_doc_meta(leading='foo', trailing='bar', detached=['baz']) - assert meta.doc == 'foo' + meta = make_doc_meta(leading="foo", trailing="bar", detached=["baz"]) + assert meta.doc == "foo" def test_doc_trailing_trumps_detached(): - meta = make_doc_meta(trailing='spam', detached=['eggs']) - assert meta.doc == 'spam' + meta = make_doc_meta(trailing="spam", detached=["eggs"]) + assert meta.doc == "spam" def test_doc_detached_joined(): - meta = make_doc_meta(detached=['foo', 'bar']) - assert meta.doc == 'foo\n\nbar' + meta = make_doc_meta(detached=["foo", "bar"]) + assert meta.doc == "foo\n\nbar" diff --git a/packages/gapic-generator/tests/unit/schema/test_naming.py b/packages/gapic-generator/tests/unit/schema/test_naming.py index 5a839e4c8c80..08612f3bf43d 100644 --- a/packages/gapic-generator/tests/unit/schema/test_naming.py +++ b/packages/gapic-generator/tests/unit/schema/test_naming.py @@ -23,113 +23,113 @@ def test_long_name(): - n = make_naming(name='Genie', namespace=['Agrabah', 'Lamp']) - assert n.long_name == 'Agrabah Lamp Genie' + n = make_naming(name="Genie", namespace=["Agrabah", "Lamp"]) + assert n.long_name == "Agrabah Lamp Genie" def test_module_name(): n = make_naming( - name='Genie', - namespace=['Agrabah', 'Lamp'], - version='v2', + name="Genie", + namespace=["Agrabah", "Lamp"], + version="v2", ) - assert n.module_name == 'genie' + assert n.module_name == "genie" def test_versioned_module_name_no_version(): n = make_naming( - name='Genie', - namespace=['Agrabah', 'Lamp'], - version='', + name="Genie", + namespace=["Agrabah", "Lamp"], + version="", ) - assert n.versioned_module_name == 'genie' + assert n.versioned_module_name == "genie" def test_versioned_module_name(): n = make_naming( - name='Genie', - namespace=['Agrabah', 'Lamp'], - version='v2', + name="Genie", + namespace=["Agrabah", "Lamp"], + version="v2", ) - assert n.versioned_module_name == 'genie_v2' + assert n.versioned_module_name == "genie_v2" def test_namespace_packages(): - n = make_naming(name='BigQuery', namespace=('Google', 'Cloud')) - assert n.namespace_packages == ('google', 'google.cloud') + n = make_naming(name="BigQuery", namespace=("Google", "Cloud")) + assert n.namespace_packages == ("google", "google.cloud") def test_warehouse_package_name_no_namespace(): - n = make_naming(name='BigQuery', namespace=[]) - assert n.warehouse_package_name == 'bigquery' + n = make_naming(name="BigQuery", namespace=[]) + assert n.warehouse_package_name == "bigquery" def test_warehouse_package_name_with_namespace(): n = make_naming( - name='BigQuery', - namespace=('Google', 'Cloud'), + name="BigQuery", + namespace=("Google", "Cloud"), ) - assert n.warehouse_package_name == 'google-cloud-bigquery' + assert n.warehouse_package_name == "google-cloud-bigquery" def test_warehouse_package_name_multiple_words(): - n = make_naming(name='Big Query', namespace=[]) - assert n.warehouse_package_name == 'big-query' + n = make_naming(name="Big Query", namespace=[]) + assert n.warehouse_package_name == "big-query" def test_build_no_annotations(): protos = ( descriptor_pb2.FileDescriptorProto( - name='baz_service.proto', - package='foo.bar.baz.v1', + name="baz_service.proto", + package="foo.bar.baz.v1", ), descriptor_pb2.FileDescriptorProto( - name='baz_common.proto', - package='foo.bar.baz.v1', + name="baz_common.proto", + package="foo.bar.baz.v1", ), ) n = naming.Naming.build(*protos) - assert n.name == 'Baz' - assert n.namespace == ('Foo', 'Bar') - assert n.version == 'v1' - assert n.product_name == 'Baz' + assert n.name == "Baz" + assert n.namespace == ("Foo", "Bar") + assert n.version == "v1" + assert n.product_name == "Baz" def test_build_no_annotations_no_version(): protos = ( descriptor_pb2.FileDescriptorProto( - name='baz_service.proto', - package='foo.bar', + name="baz_service.proto", + package="foo.bar", ), descriptor_pb2.FileDescriptorProto( - name='baz_common.proto', - package='foo.bar', + name="baz_common.proto", + package="foo.bar", ), ) n = naming.Naming.build(*protos) - assert n.name == 'Bar' - assert n.namespace == ('Foo',) - assert n.version == '' + assert n.name == "Bar" + assert n.namespace == ("Foo",) + assert n.version == "" def test_build_no_namespace(): protos = ( descriptor_pb2.FileDescriptorProto( - name='foo_service.proto', - package='foo', + name="foo_service.proto", + package="foo", ), ) n = naming.Naming.build(*protos) - assert n.name == 'Foo' + assert n.name == "Foo" assert n.namespace == () - assert n.version == '' - assert n.product_name == 'Foo' + assert n.version == "" + assert n.product_name == "Foo" def test_inconsistent_package_error(): - proto1 = descriptor_pb2.FileDescriptorProto(package='google.spanner.v1') - proto2 = descriptor_pb2.FileDescriptorProto(package='spanner.v1') - proto3 = descriptor_pb2.FileDescriptorProto(package='google.spanner.v2') + proto1 = descriptor_pb2.FileDescriptorProto(package="google.spanner.v1") + proto2 = descriptor_pb2.FileDescriptorProto(package="spanner.v1") + proto3 = descriptor_pb2.FileDescriptorProto(package="google.spanner.v2") # These should all error against one another. with pytest.raises(ValueError): @@ -139,118 +139,110 @@ def test_inconsistent_package_error(): def test_subpackages(): - proto1 = descriptor_pb2.FileDescriptorProto(package='google.ads.v0.foo') - proto2 = descriptor_pb2.FileDescriptorProto(package='google.ads.v0.bar') + proto1 = descriptor_pb2.FileDescriptorProto(package="google.ads.v0.foo") + proto2 = descriptor_pb2.FileDescriptorProto(package="google.ads.v0.bar") n = naming.Naming.build(proto1, proto2) - assert n.name == 'Ads' - assert n.namespace == ('Google',) - assert n.version == 'v0' + assert n.name == "Ads" + assert n.namespace == ("Google",) + assert n.version == "v0" def test_cli_override_name(): FileDesc = descriptor_pb2.FileDescriptorProto - proto1 = FileDesc(package='google.cloud.videointelligence.v1') - n = naming.Naming.build(proto1, - opts=Options(name='Video Intelligence'), - ) - assert n.namespace == ('Google', 'Cloud') - assert n.name == 'Video Intelligence' - assert n.version == 'v1' + proto1 = FileDesc(package="google.cloud.videointelligence.v1") + n = naming.Naming.build( + proto1, + opts=Options(name="Video Intelligence"), + ) + assert n.namespace == ("Google", "Cloud") + assert n.name == "Video Intelligence" + assert n.version == "v1" def test_cli_override_name_underscores(): FileDesc = descriptor_pb2.FileDescriptorProto - proto1 = FileDesc(package='google.cloud.videointelligence.v1') - n = naming.Naming.build(proto1, - opts=Options(name='video_intelligence'), - ) - assert n.namespace == ('Google', 'Cloud') - assert n.name == 'Video Intelligence' - assert n.version == 'v1' + proto1 = FileDesc(package="google.cloud.videointelligence.v1") + n = naming.Naming.build( + proto1, + opts=Options(name="video_intelligence"), + ) + assert n.namespace == ("Google", "Cloud") + assert n.name == "Video Intelligence" + assert n.version == "v1" def test_cli_override_namespace(): FileDesc = descriptor_pb2.FileDescriptorProto - proto1 = FileDesc(package='google.spanner.v1') + proto1 = FileDesc(package="google.spanner.v1") n = naming.Naming.build( proto1, - opts=Options(namespace=('google', 'cloud')), + opts=Options(namespace=("google", "cloud")), ) - assert n.namespace == ('Google', 'Cloud') - assert n.name == 'Spanner' - assert n.version == 'v1' + assert n.namespace == ("Google", "Cloud") + assert n.name == "Spanner" + assert n.version == "v1" def test_cli_override_namespace_dotted(): FileDesc = descriptor_pb2.FileDescriptorProto - proto1 = FileDesc(package='google.spanner.v1') - n = naming.Naming.build(proto1, - opts=Options(namespace=('google.cloud',)), - ) - assert n.namespace == ('Google', 'Cloud') - assert n.name == 'Spanner' - assert n.version == 'v1' + proto1 = FileDesc(package="google.spanner.v1") + n = naming.Naming.build( + proto1, + opts=Options(namespace=("google.cloud",)), + ) + assert n.namespace == ("Google", "Cloud") + assert n.name == "Spanner" + assert n.version == "v1" def test_cli_override_name_and_namespace(): FileDesc = descriptor_pb2.FileDescriptorProto - proto1 = FileDesc(package='google.translation.v2') + proto1 = FileDesc(package="google.translation.v2") n = naming.Naming.build( proto1, - opts=Options( - namespace=('google', 'cloud'), name='translate' - ), + opts=Options(namespace=("google", "cloud"), name="translate"), ) - assert n.namespace == ('Google', 'Cloud') - assert n.name == 'Translate' - assert n.version == 'v2' + assert n.namespace == ("Google", "Cloud") + assert n.name == "Translate" + assert n.version == "v2" def test_cli_override_name_and_namespace_versionless(): FileDesc = descriptor_pb2.FileDescriptorProto - proto1 = FileDesc(package='google.translation') + proto1 = FileDesc(package="google.translation") n = naming.Naming.build( proto1, - opts=Options(namespace=('google', 'cloud'), name='translate'), + opts=Options(namespace=("google", "cloud"), name="translate"), ) - assert n.namespace == ('Google', 'Cloud') - assert n.name == 'Translate' + assert n.namespace == ("Google", "Cloud") + assert n.name == "Translate" assert not n.version def test_cli_override_warehouse_package_name(): FileDesc = descriptor_pb2.FileDescriptorProto - proto1 = FileDesc(package='google.translation') + proto1 = FileDesc(package="google.translation") n = naming.Naming.build( proto1, - opts=Options(warehouse_package_name='google-cloud-foo'), + opts=Options(warehouse_package_name="google-cloud-foo"), ) assert n.warehouse_package_name == "google-cloud-foo" def test_cli_override_proto_plus_deps(): FileDesc = descriptor_pb2.FileDescriptorProto - proto1 = FileDesc(package='google.translation') + proto1 = FileDesc(package="google.translation") n = naming.Naming.build( proto1, - opts=Options( - proto_plus_deps=('google.dep1', 'google.dep2')), + opts=Options(proto_plus_deps=("google.dep1", "google.dep2")), ) - assert n.proto_plus_deps == ('google.dep1', 'google.dep2') + assert n.proto_plus_deps == ("google.dep1", "google.dep2") def test_build_factory(): - proto = descriptor_pb2.FileDescriptorProto( - package='google.mollusc.v1alpha1' - ) - old = naming.Naming.build( - proto, - opts=Options(old_naming=True) - ) - assert old.versioned_module_name == 'mollusc.v1alpha1' + proto = descriptor_pb2.FileDescriptorProto(package="google.mollusc.v1alpha1") + old = naming.Naming.build(proto, opts=Options(old_naming=True)) + assert old.versioned_module_name == "mollusc.v1alpha1" - new = naming.Naming.build( - proto, - opts=Options() - ) - assert new.versioned_module_name == 'mollusc_v1alpha1' + new = naming.Naming.build(proto, opts=Options()) + assert new.versioned_module_name == "mollusc_v1alpha1" diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py index 2eeb9c043ff2..b5a2aff92475 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_enums.py @@ -23,32 +23,40 @@ def test_enum_properties(): - enum_type = make_enum(name='Color') - assert enum_type.name == 'Color' + enum_type = make_enum(name="Color") + assert enum_type.name == "Color" def test_enum_value_properties(): - enum_type = make_enum(name='Irrelevant', values=( - ('RED', 1), ('GREEN', 2), ('BLUE', 3), - )) + enum_type = make_enum( + name="Irrelevant", + values=( + ("RED", 1), + ("GREEN", 2), + ("BLUE", 3), + ), + ) assert len(enum_type.values) == 3 - for ev, expected in zip(enum_type.values, ('RED', 'GREEN', 'BLUE')): + for ev, expected in zip(enum_type.values, ("RED", "GREEN", "BLUE")): assert ev.name == expected def test_enum_ident(): - enum = make_enum('Baz', package='foo.v1', module='bar') - assert str(enum.ident) == 'bar.Baz' - assert enum.ident.sphinx == 'foo.v1.bar.Baz' + enum = make_enum("Baz", package="foo.v1", module="bar") + assert str(enum.ident) == "bar.Baz" + assert enum.ident.sphinx == "foo.v1.bar.Baz" def test_enum_options_dict(): - cephalopod = make_enum("Cephalopod", package="animalia.v1", - module="mollusca", options={"allow_alias": True}) + cephalopod = make_enum( + "Cephalopod", + package="animalia.v1", + module="mollusca", + options={"allow_alias": True}, + ) assert isinstance(cephalopod.enum_pb.options, descriptor_pb2.EnumOptions) assert cephalopod.options_dict == {"allow_alias": True} - bivalve = make_enum("Bivalve", package="animalia.v1", - module="mollusca") + bivalve = make_enum("Bivalve", package="animalia.v1", module="mollusca") assert isinstance(bivalve.enum_pb.options, descriptor_pb2.EnumOptions) assert bivalve.options_dict == {} diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py index efaf595d946b..dd8367c2ef01 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_field.py @@ -34,20 +34,20 @@ def test_field_properties(): - field = make_field(name='my_field', number=1, type='TYPE_BOOL') - assert field.name == 'my_field' + field = make_field(name="my_field", number=1, type="TYPE_BOOL") + assert field.name == "my_field" assert field.number == 1 assert field.type.python_type == bool def test_field_is_primitive(): - primitive_field = make_field(type='TYPE_INT32') + primitive_field = make_field(type="TYPE_INT32") assert primitive_field.is_primitive def test_field_proto_type(): - primitive_field = make_field(type='TYPE_INT32') - assert primitive_field.proto_type == 'INT32' + primitive_field = make_field(type="TYPE_INT32") + assert primitive_field.proto_type == "INT32" def test_field_not_primitive(): @@ -58,51 +58,51 @@ def test_field_not_primitive(): message_pb=descriptor_pb2.DescriptorProto(), ) non_primitive_field = make_field( - type='TYPE_MESSAGE', - type_name='bogus.Message', + type="TYPE_MESSAGE", + type_name="bogus.Message", message=message, ) assert not non_primitive_field.is_primitive def test_ident(): - field = make_field(type='TYPE_BOOL') - assert str(field.ident) == 'bool' + field = make_field(type="TYPE_BOOL") + assert str(field.ident) == "bool" def test_ident_repeated(): - REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') - field = make_field(type='TYPE_BOOL', label=REP) - assert str(field.ident) == 'MutableSequence[bool]' + REP = descriptor_pb2.FieldDescriptorProto.Label.Value("LABEL_REPEATED") + field = make_field(type="TYPE_BOOL", label=REP) + assert str(field.ident) == "MutableSequence[bool]" def test_repeated(): - REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') + REP = descriptor_pb2.FieldDescriptorProto.Label.Value("LABEL_REPEATED") field = make_field(label=REP) assert field.repeated def test_not_repeated(): - OPT = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_OPTIONAL') + OPT = descriptor_pb2.FieldDescriptorProto.Label.Value("LABEL_OPTIONAL") field = make_field(label=OPT) assert not field.repeated def test_map(): entry_msg = make_message( - name='SquidEntry', + name="SquidEntry", fields=( - make_field(name='key', type='TYPE_STRING'), - make_field(name='value', type='TYPE_STRING'), + make_field(name="key", type="TYPE_STRING"), + make_field(name="value", type="TYPE_STRING"), ), options=descriptor_pb2.MessageOptions(map_entry=True), ) field = make_field( - name='squids', - type_name='mollusc.SquidEntry', + name="squids", + type_name="mollusc.SquidEntry", message=entry_msg, label=3, - type='TYPE_MESSAGE', + type="TYPE_MESSAGE", ) assert field.map @@ -110,19 +110,19 @@ def test_map(): def test_ident_map(): entry_msg = make_message( - name='SquidEntry', + name="SquidEntry", fields=( - make_field(name='key', type='TYPE_STRING'), - make_field(name='value', type='TYPE_STRING'), + make_field(name="key", type="TYPE_STRING"), + make_field(name="value", type="TYPE_STRING"), ), options=descriptor_pb2.MessageOptions(map_entry=True), ) field = make_field( - name='squids', - type_name='mollusc.SquidEntry', + name="squids", + type_name="mollusc.SquidEntry", message=entry_msg, label=3, - type='TYPE_MESSAGE', + type="TYPE_MESSAGE", ) assert str(field.ident) == "MutableMapping[str, str]" @@ -131,7 +131,7 @@ def test_ident_map(): def test_required(): field = make_field() field.options.Extensions[field_behavior_pb2.field_behavior].append( - field_behavior_pb2.FieldBehavior.Value('REQUIRED') + field_behavior_pb2.FieldBehavior.Value("REQUIRED") ) assert field.required @@ -143,8 +143,8 @@ def test_not_required(): def test_uuid4(): field = make_field() - field.options.Extensions[field_info_pb2.field_info].format = field_info_pb2.FieldInfo.Format.Value( - "UUID4" + field.options.Extensions[field_info_pb2.field_info].format = ( + field_info_pb2.FieldInfo.Format.Value("UUID4") ) assert field.uuid4 @@ -155,48 +155,50 @@ def test_not_uuid4(): def test_ident_sphinx(): - field = make_field(type='TYPE_BOOL') - assert field.ident.sphinx == 'bool' + field = make_field(type="TYPE_BOOL") + assert field.ident.sphinx == "bool" def test_ident_sphinx_repeated(): - REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') - field = make_field(type='TYPE_BOOL', label=REP) - assert field.ident.sphinx == 'MutableSequence[bool]' + REP = descriptor_pb2.FieldDescriptorProto.Label.Value("LABEL_REPEATED") + field = make_field(type="TYPE_BOOL", label=REP) + assert field.ident.sphinx == "MutableSequence[bool]" def test_ident_sphinx_map(): entry_msg = make_message( - name='SquidEntry', + name="SquidEntry", fields=( - make_field(name='key', type='TYPE_STRING'), - make_field(name='value', type='TYPE_STRING'), + make_field(name="key", type="TYPE_STRING"), + make_field(name="value", type="TYPE_STRING"), ), options=descriptor_pb2.MessageOptions(map_entry=True), ) field = make_field( - name='squids', - type_name='mollusc.SquidEntry', + name="squids", + type_name="mollusc.SquidEntry", message=entry_msg, label=3, - type='TYPE_MESSAGE', + type="TYPE_MESSAGE", ) - assert field.ident.sphinx == 'MutableMapping[str, str]' + assert field.ident.sphinx == "MutableMapping[str, str]" def test_resource_reference(): - field = make_field(type='TYPE_STRING') - field.options.Extensions[resource_pb2.resource_reference].type = "translate.googleapis.com/Glossary" + field = make_field(type="TYPE_STRING") + field.options.Extensions[resource_pb2.resource_reference].type = ( + "translate.googleapis.com/Glossary" + ) assert field.resource_reference == "translate.googleapis.com/Glossary" def test_type_primitives(): - assert make_field(type='TYPE_FLOAT').type.python_type == float - assert make_field(type='TYPE_INT64').type.python_type == int - assert make_field(type='TYPE_BOOL').type.python_type == bool - assert make_field(type='TYPE_STRING').type.python_type == str - assert make_field(type='TYPE_BYTES').type.python_type == bytes + assert make_field(type="TYPE_FLOAT").type.python_type == float + assert make_field(type="TYPE_INT64").type.python_type == int + assert make_field(type="TYPE_BOOL").type.python_type == bool + assert make_field(type="TYPE_STRING").type.python_type == str + assert make_field(type="TYPE_BYTES").type.python_type == bytes def test_type_message(): @@ -207,8 +209,8 @@ def test_type_message(): message_pb=descriptor_pb2.DescriptorProto(), ) field = make_field( - type='TYPE_MESSAGE', - type_name='bogus.Message', + type="TYPE_MESSAGE", + type_name="bogus.Message", message=message, ) assert field.type == message @@ -220,8 +222,8 @@ def test_type_enum(): enum_pb=descriptor_pb2.EnumDescriptorProto(), ) field = make_field( - type='TYPE_ENUM', - type_name='bogus.Enumerable', + type="TYPE_ENUM", + type_name="bogus.Enumerable", enum=enum, ) assert field.type == enum @@ -229,92 +231,91 @@ def test_type_enum(): def test_type_invalid(): with pytest.raises(TypeError): - make_field(type='TYPE_GROUP').type + make_field(type="TYPE_GROUP").type def test_mock_value_int(): - field = make_field(name='foo_bar', type='TYPE_INT32') - assert field.mock_value == '728' + field = make_field(name="foo_bar", type="TYPE_INT32") + assert field.mock_value == "728" def test_mock_value_original_type_int(): - field = make_field(name='foo_bar', type='TYPE_INT32') + field = make_field(name="foo_bar", type="TYPE_INT32") assert field.mock_value_original_type == 728 def test_oneof(): - REP = descriptor_pb2.FieldDescriptorProto.Label.Value('LABEL_REPEATED') + REP = descriptor_pb2.FieldDescriptorProto.Label.Value("LABEL_REPEATED") field = make_field(oneof="oneof_name") assert field.oneof == "oneof_name" def test_mock_value_float(): - field = make_field(name='foo_bar', type='TYPE_DOUBLE') - assert field.mock_value == '0.728' + field = make_field(name="foo_bar", type="TYPE_DOUBLE") + assert field.mock_value == "0.728" def test_mock_value_original_type_float(): - field = make_field(name='foo_bar', type='TYPE_DOUBLE') + field = make_field(name="foo_bar", type="TYPE_DOUBLE") assert field.mock_value_original_type == 0.728 def test_mock_value_bool(): - field = make_field(name='foo_bar', type='TYPE_BOOL') - assert field.mock_value == 'True' + field = make_field(name="foo_bar", type="TYPE_BOOL") + assert field.mock_value == "True" def test_mock_value_original_type_bool(): - field = make_field(name='foo_bar', type='TYPE_BOOL') + field = make_field(name="foo_bar", type="TYPE_BOOL") assert field.mock_value_original_type == True def test_mock_value_str(): - field = make_field(name='foo_bar', type='TYPE_STRING') + field = make_field(name="foo_bar", type="TYPE_STRING") assert field.mock_value == "'foo_bar_value'" def test_mock_value_original_type_str(): - field = make_field(name='foo_bar', type='TYPE_STRING') + field = make_field(name="foo_bar", type="TYPE_STRING") assert field.mock_value_original_type == "foo_bar_value" def test_mock_value_bytes(): - field = make_field(name='foo_bar', type='TYPE_BYTES') + field = make_field(name="foo_bar", type="TYPE_BYTES") assert field.mock_value == "b'foo_bar_blob'" def test_mock_value_original_type_bytes(): - field = make_field(name='foo_bar', type='TYPE_BYTES') + field = make_field(name="foo_bar", type="TYPE_BYTES") assert field.mock_value_original_type == b"foo_bar_blob" def test_mock_value_repeated(): - field = make_field(name='foo_bar', type='TYPE_STRING', label=3) + field = make_field(name="foo_bar", type="TYPE_STRING", label=3) assert field.mock_value == "['foo_bar_value']" def test_mock_value_original_type_repeated(): - field = make_field(name='foo_bar', type='TYPE_STRING', label=3) - assert field.mock_value_original_type == [ - "foo_bar_value1", "foo_bar_value2"] + field = make_field(name="foo_bar", type="TYPE_STRING", label=3) + assert field.mock_value_original_type == ["foo_bar_value1", "foo_bar_value2"] def test_mock_value_map(): entry_msg = make_message( - name='SquidEntry', + name="SquidEntry", fields=( - make_field(name='key', type='TYPE_STRING'), - make_field(name='value', type='TYPE_STRING'), + make_field(name="key", type="TYPE_STRING"), + make_field(name="value", type="TYPE_STRING"), ), options=descriptor_pb2.MessageOptions(map_entry=True), ) field = make_field( - name='squids', - type_name='mollusc.SquidEntry', + name="squids", + type_name="mollusc.SquidEntry", message=entry_msg, label=3, - type='TYPE_MESSAGE', + type="TYPE_MESSAGE", ) assert field.mock_value == "{'key_value': 'value_value'}" @@ -322,107 +323,124 @@ def test_mock_value_map(): def test_mock_value_enum(): values = [ - descriptor_pb2.EnumValueDescriptorProto(name='UNSPECIFIED', number=0), - descriptor_pb2.EnumValueDescriptorProto(name='SPECIFIED', number=1), + descriptor_pb2.EnumValueDescriptorProto(name="UNSPECIFIED", number=0), + descriptor_pb2.EnumValueDescriptorProto(name="SPECIFIED", number=1), ] enum = wrappers.EnumType( values=[wrappers.EnumValueType(enum_value_pb=i) for i in values], enum_pb=descriptor_pb2.EnumDescriptorProto(value=values), - meta=metadata.Metadata(address=metadata.Address( - module='bogus', - name='Enumerable', - )), + meta=metadata.Metadata( + address=metadata.Address( + module="bogus", + name="Enumerable", + ) + ), ) field = make_field( - type='TYPE_ENUM', - type_name='bogus.Enumerable', + type="TYPE_ENUM", + type_name="bogus.Enumerable", enum=enum, ) - assert field.mock_value == 'bogus.Enumerable.SPECIFIED' + assert field.mock_value == "bogus.Enumerable.SPECIFIED" def test_mock_value_message(): - subfields = collections.OrderedDict(( - ('foo', make_field(name='foo', type='TYPE_INT32')), - ('bar', make_field(name='bar', type='TYPE_STRING')) - )) + subfields = collections.OrderedDict( + ( + ("foo", make_field(name="foo", type="TYPE_INT32")), + ("bar", make_field(name="bar", type="TYPE_STRING")), + ) + ) message = wrappers.MessageType( fields=subfields, - message_pb=descriptor_pb2.DescriptorProto(name='Message', field=[ - i.field_pb for i in subfields.values() - ]), - meta=metadata.Metadata(address=metadata.Address( - module='bogus', - name='Message', - )), + message_pb=descriptor_pb2.DescriptorProto( + name="Message", field=[i.field_pb for i in subfields.values()] + ), + meta=metadata.Metadata( + address=metadata.Address( + module="bogus", + name="Message", + ) + ), nested_enums={}, nested_messages={}, ) field = make_field( - type='TYPE_MESSAGE', - type_name='bogus.Message', + type="TYPE_MESSAGE", + type_name="bogus.Message", message=message, ) - assert field.mock_value == 'bogus.Message(foo=324)' + assert field.mock_value == "bogus.Message(foo=324)" def test_mock_value_original_type_message(): - any_message_subfields = collections.OrderedDict(( - ('type_url', make_field(name='type_url', number=1, type='TYPE_STRING')), - ('value', make_field(name='value', number=2, type='TYPE_BYTES')), - )) + any_message_subfields = collections.OrderedDict( + ( + ("type_url", make_field(name="type_url", number=1, type="TYPE_STRING")), + ("value", make_field(name="value", number=2, type="TYPE_BYTES")), + ) + ) any_message = wrappers.MessageType( fields=any_message_subfields, - message_pb=descriptor_pb2.DescriptorProto(name='Any', field=[ - i.field_pb for i in any_message_subfields.values() - ]), - meta=metadata.Metadata(address=metadata.Address( - module='bogus', - name='Any', - package=('google', 'protobuf') - )), + message_pb=descriptor_pb2.DescriptorProto( + name="Any", field=[i.field_pb for i in any_message_subfields.values()] + ), + meta=metadata.Metadata( + address=metadata.Address( + module="bogus", name="Any", package=("google", "protobuf") + ) + ), nested_enums={}, nested_messages={}, ) any_field = make_field( - name='surprise', - type='TYPE_MESSAGE', - type_name='google.protobuf.Any', - message=any_message + name="surprise", + type="TYPE_MESSAGE", + type_name="google.protobuf.Any", + message=any_message, ) - subfields = collections.OrderedDict(( - ('foo', make_field(name='foo', type='TYPE_INT32')), - ('bar', make_field(name='bar', type='TYPE_STRING')), - ('surprise', any_field), - )) + subfields = collections.OrderedDict( + ( + ("foo", make_field(name="foo", type="TYPE_INT32")), + ("bar", make_field(name="bar", type="TYPE_STRING")), + ("surprise", any_field), + ) + ) message = wrappers.MessageType( fields=subfields, - message_pb=descriptor_pb2.DescriptorProto(name='Message', field=[ - i.field_pb for i in subfields.values() - ]), - meta=metadata.Metadata(address=metadata.Address( - module='bogus', - name='Message', - )), + message_pb=descriptor_pb2.DescriptorProto( + name="Message", field=[i.field_pb for i in subfields.values()] + ), + meta=metadata.Metadata( + address=metadata.Address( + module="bogus", + name="Message", + ) + ), nested_enums={}, nested_messages={}, ) field = make_field( - type='TYPE_MESSAGE', - type_name='bogus.Message', + type="TYPE_MESSAGE", + type_name="bogus.Message", message=message, ) mock = field.mock_value_original_type - assert mock == {"foo": 324, "bar": "bar_value", "surprise": { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07"}} + assert mock == { + "foo": 324, + "bar": "bar_value", + "surprise": { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + }, + } # Messages by definition aren't primitive with pytest.raises(TypeError): @@ -430,10 +448,10 @@ def test_mock_value_original_type_message(): # Special case for map entries entry_msg = make_message( - name='MessageEntry', + name="MessageEntry", fields=( - make_field(name='key', type='TYPE_STRING'), - make_field(name='value', type='TYPE_STRING'), + make_field(name="key", type="TYPE_STRING"), + make_field(name="value", type="TYPE_STRING"), ), options=descriptor_pb2.MessageOptions(map_entry=True), ) @@ -442,30 +460,36 @@ def test_mock_value_original_type_message(): type_name="stuff.MessageEntry", message=entry_msg, label=3, - type='TYPE_MESSAGE', + type="TYPE_MESSAGE", ) assert entry_field.mock_value_original_type == {} - assert any_message.fields['type_url'].primitive_mock( - ) == "type.googleapis.com/google.protobuf.Empty" + assert ( + any_message.fields["type_url"].primitive_mock() + == "type.googleapis.com/google.protobuf.Empty" + ) def test_merged_mock_value_message(): - subfields = collections.OrderedDict(( - ('foo', make_field(name='foo', type='TYPE_INT32')), - ('bar', make_field(name='bar', type='TYPE_STRING')) - )) + subfields = collections.OrderedDict( + ( + ("foo", make_field(name="foo", type="TYPE_INT32")), + ("bar", make_field(name="bar", type="TYPE_STRING")), + ) + ) message = wrappers.MessageType( fields=subfields, - message_pb=descriptor_pb2.DescriptorProto(name="Message", field=[ - i.field_pb for i in subfields.values() - ]), - meta=metadata.Metadata(address=metadata.Address( - module="bogus", - name="Message", - )), + message_pb=descriptor_pb2.DescriptorProto( + name="Message", field=[i.field_pb for i in subfields.values()] + ), + meta=metadata.Metadata( + address=metadata.Address( + module="bogus", + name="Message", + ) + ), nested_enums={}, nested_messages={}, ) @@ -541,7 +565,10 @@ def test_mock_value_original_type_enum_repeated(): @pytest.mark.parametrize( "mock_method,expected", [ - ("mock_value", "ac_turtle.Turtle(turtle=ac_turtle.Turtle(turtle=turtle.Turtle(turtle=None)))"), + ( + "mock_value", + "ac_turtle.Turtle(turtle=ac_turtle.Turtle(turtle=turtle.Turtle(turtle=None)))", + ), ("mock_value_original_type", {"turtle": {}}), ], ) @@ -592,7 +619,7 @@ def test_field_name_kword_disambiguation(): def test_field_resource_reference(): - field = make_field(name='parent', type='TYPE_STRING') + field = make_field(name="parent", type="TYPE_STRING") def test_extended_operation_properties(): diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py index ffa70cbfeace..a13f62c02b9b 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_message.py @@ -35,79 +35,80 @@ def test_message_properties(): - message = make_message('MyMessage') - assert message.name == 'MyMessage' + message = make_message("MyMessage") + assert message.name == "MyMessage" def test_message_docstring(): L = descriptor_pb2.SourceCodeInfo.Location - meta = metadata.Metadata(documentation=L( - leading_comments='Lorem ipsum', - trailing_comments='dolor set amet', - )) - message = make_message('Name', meta=meta) - assert message.meta.doc == 'Lorem ipsum' + meta = metadata.Metadata( + documentation=L( + leading_comments="Lorem ipsum", + trailing_comments="dolor set amet", + ) + ) + message = make_message("Name", meta=meta) + assert message.meta.doc == "Lorem ipsum" def test_message_ident(): - message = make_message('Baz', package='foo.v1', module='bar') - assert str(message.ident) == 'bar.Baz' - assert message.ident.sphinx == 'foo.v1.bar.Baz' + message = make_message("Baz", package="foo.v1", module="bar") + assert str(message.ident) == "bar.Baz" + assert message.ident.sphinx == "foo.v1.bar.Baz" def test_message_ident_collisions(): - message = make_message('Baz', package='foo.v1', module='bar').with_context( - collisions=frozenset({'bar'}), + message = make_message("Baz", package="foo.v1", module="bar").with_context( + collisions=frozenset({"bar"}), ) - assert str(message.ident) == 'fv_bar.Baz' - assert message.ident.sphinx == 'foo.v1.bar.Baz' + assert str(message.ident) == "fv_bar.Baz" + assert message.ident.sphinx == "foo.v1.bar.Baz" def test_message_pb2_sphinx_ident(): meta = metadata.Metadata( address=metadata.Address( - name='Timestamp', - package=('google', 'protobuf'), - module='timestamp', - api_naming=naming.NewNaming( - proto_package="foo.bar" - ) + name="Timestamp", + package=("google", "protobuf"), + module="timestamp", + api_naming=naming.NewNaming(proto_package="foo.bar"), ) ) - message = make_message("Timestamp", package='google.protobuf', - module='timestamp', meta=meta) - assert message.ident.sphinx == 'google.protobuf.timestamp_pb2.Timestamp' + message = make_message( + "Timestamp", package="google.protobuf", module="timestamp", meta=meta + ) + assert message.ident.sphinx == "google.protobuf.timestamp_pb2.Timestamp" def test_get_field(): - fields = (make_field('field_one'), make_field('field_two')) - message = make_message('Message', fields=fields) - field_one = message.get_field('field_one') + fields = (make_field("field_one"), make_field("field_two")) + message = make_message("Message", fields=fields) + field_one = message.get_field("field_one") assert isinstance(field_one, wrappers.Field) - assert field_one.name == 'field_one' + assert field_one.name == "field_one" def test_field_types(): # Create the inner message. inner_msg = make_message( - 'InnerMessage', + "InnerMessage", fields=( make_field( - 'hidden_message', - message=make_message('HiddenMessage'), + "hidden_message", + message=make_message("HiddenMessage"), ), - ) + ), ) - inner_enum = make_enum('InnerEnum') + inner_enum = make_enum("InnerEnum") # Create the outer message, which contains an Inner as a field. fields = ( - make_field('inner_message', message=inner_msg), - make_field('inner_enum', enum=inner_enum), - make_field('not_interesting'), + make_field("inner_message", message=inner_msg), + make_field("inner_enum", enum=inner_enum), + make_field("not_interesting"), ) - outer = make_message('Outer', fields=fields) + outer = make_message("Outer", fields=fields) # Assert that composite field types are recognized but primitives are not. assert len(outer.field_types) == 2 @@ -116,25 +117,19 @@ def test_field_types(): def test_field_types_recursive(): - enumeration = make_enum('Enumeration') + enumeration = make_enum("Enumeration") innest_msg = make_message( - 'InnestMessage', - fields=( - make_field('enumeration', enum=enumeration), - ) + "InnestMessage", fields=(make_field("enumeration", enum=enumeration),) ) inner_msg = make_message( - 'InnerMessage', - fields=( - make_field('innest_message', message=innest_msg), - ) + "InnerMessage", fields=(make_field("innest_message", message=innest_msg),) ) topmost_msg = make_message( - 'TopmostMessage', + "TopmostMessage", fields=( - make_field('inner_message', message=inner_msg), - make_field('uninteresting') - ) + make_field("inner_message", message=inner_msg), + make_field("uninteresting"), + ), ) actual = {t.name for t in topmost_msg.recursive_field_types} @@ -144,58 +139,56 @@ def test_field_types_recursive(): def test_get_field_recursive(): # Create the inner message. - inner_fields = (make_field('zero'), make_field('one')) - inner = make_message('Inner', fields=inner_fields, package='foo.v1') + inner_fields = (make_field("zero"), make_field("one")) + inner = make_message("Inner", fields=inner_fields, package="foo.v1") # Create the outer message, which contains an Inner as a field. - outer_field = make_field('inner', message=inner) - outer = make_message('Outer', fields=(outer_field,)) + outer_field = make_field("inner", message=inner) + outer = make_message("Outer", fields=(outer_field,)) # Assert that a recursive retrieval works. - assert outer.get_field('inner', 'zero') == inner_fields[0] - assert outer.get_field('inner', 'one') == inner_fields[1] - assert outer.get_field('inner.one') == inner_fields[1] + assert outer.get_field("inner", "zero") == inner_fields[0] + assert outer.get_field("inner", "one") == inner_fields[1] + assert outer.get_field("inner.one") == inner_fields[1] def test_get_field_nested_not_found_error(): # Create the inner message. - inner_field = make_field('zero') - inner = make_message('Inner', fields=(inner_field,), package='foo.v1') + inner_field = make_field("zero") + inner = make_message("Inner", fields=(inner_field,), package="foo.v1") # Create the outer message, which contains an Inner as a field. - outer_field = make_field('inner', message=inner) - outer = make_message('Outer', fields=(outer_field,)) + outer_field = make_field("inner", message=inner) + outer = make_message("Outer", fields=(outer_field,)) # Assert that a recursive retrieval fails. with pytest.raises(KeyError): - assert outer.get_field('inner', 'zero', 'beyond') + assert outer.get_field("inner", "zero", "beyond") def test_get_field_nonterminal_repeated_error(): # Create the inner message. - inner_fields = (make_field('zero'), make_field('one')) - inner = make_message('Inner', fields=inner_fields, package='foo.v1') + inner_fields = (make_field("zero"), make_field("one")) + inner = make_message("Inner", fields=inner_fields, package="foo.v1") # Create the outer message, which contains an Inner as a field. - outer_field = make_field('inner', message=inner, repeated=True) - outer = make_message('Outer', fields=(outer_field,)) + outer_field = make_field("inner", message=inner, repeated=True) + outer = make_message("Outer", fields=(outer_field,)) # Assert that a recursive retrieval fails. with pytest.raises(KeyError): - assert outer.get_field('inner', 'zero') == inner_fields[0] + assert outer.get_field("inner", "zero") == inner_fields[0] with pytest.raises(KeyError): - assert outer.get_field('inner', 'one') == inner_fields[1] + assert outer.get_field("inner", "one") == inner_fields[1] def test_resource_path(): options = descriptor_pb2.MessageOptions() resource = options.Extensions[resource_pb2.resource] - resource.pattern.append( - "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}") - resource.pattern.append( - "kingdoms/{kingdom}/divisions/{division}/classes/{klass}") + resource.pattern.append("kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}") + resource.pattern.append("kingdoms/{kingdom}/divisions/{division}/classes/{klass}") resource.type = "taxonomy.biology.com/Class" - message = make_message('Squid', options=options) + message = make_message("Squid", options=options) assert message.resource_path == "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}" assert message.resource_path_args == ["kingdom", "phylum", "klass"] @@ -205,23 +198,31 @@ def test_resource_path(): def test_resource_path_with_wildcard(): options = descriptor_pb2.MessageOptions() resource = options.Extensions[resource_pb2.resource] - resource.pattern.append( - "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass=**}") - resource.pattern.append( - "kingdoms/{kingdom}/divisions/{division}/classes/{klass}") + resource.pattern.append("kingdoms/{kingdom}/phyla/{phylum}/classes/{klass=**}") + resource.pattern.append("kingdoms/{kingdom}/divisions/{division}/classes/{klass}") resource.type = "taxonomy.biology.com/Class" - message = make_message('Squid', options=options) + message = make_message("Squid", options=options) - assert message.resource_path == "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass=**}" + assert ( + message.resource_path == "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass=**}" + ) assert message.resource_path_args == ["kingdom", "phylum", "klass"] assert message.resource_type == "Class" - assert re.match(message.path_regex_str, - "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass") - assert re.match(message.path_regex_str, - "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass/additional-segment") - assert re.match(message.path_regex_str, - "kingdoms/my-kingdom/phyla/my-phylum/classes/") is None - assert message.resource_path_formatted == "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}" + assert re.match( + message.path_regex_str, "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass" + ) + assert re.match( + message.path_regex_str, + "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass/additional-segment", + ) + assert ( + re.match(message.path_regex_str, "kingdoms/my-kingdom/phyla/my-phylum/classes/") + is None + ) + assert ( + message.resource_path_formatted + == "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}" + ) def test_resource_path_pure_wildcard(): @@ -229,7 +230,7 @@ def test_resource_path_pure_wildcard(): resource = options.Extensions[resource_pb2.resource] resource.pattern.append("*") resource.type = "taxonomy.biology.com/Class" - message = make_message('Squid', options=options) + message = make_message("Squid", options=options) # Pure wildcard resource names do not really help construct resources # but they are a part of the spec so we need to support them, which means at @@ -239,29 +240,31 @@ def test_resource_path_pure_wildcard(): assert message.resource_type == "Class" # Pure wildcard resource names match everything... - assert re.match(message.path_regex_str, - "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass") - assert re.match(message.path_regex_str, - "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass/additional-segment") - assert re.match(message.path_regex_str, - "kingdoms/my-kingdom/phyla/my-phylum/classes/") + assert re.match( + message.path_regex_str, "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass" + ) + assert re.match( + message.path_regex_str, + "kingdoms/my-kingdom/phyla/my-phylum/classes/my-klass/additional-segment", + ) + assert re.match( + message.path_regex_str, "kingdoms/my-kingdom/phyla/my-phylum/classes/" + ) def test_parse_resource_path(): options = descriptor_pb2.MessageOptions() resource = options.Extensions[resource_pb2.resource] - resource.pattern.append( - "kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}" - ) + resource.pattern.append("kingdoms/{kingdom}/phyla/{phylum}/classes/{klass}") resource.type = "taxonomy.biology.com/Klass" - message = make_message('Klass', options=options) + message = make_message("Klass", options=options) # Plausible resource ID path path = "kingdoms/animalia/phyla/mollusca/classes/cephalopoda" expected = { - 'kingdom': 'animalia', - 'phylum': 'mollusca', - 'klass': 'cephalopoda', + "kingdom": "animalia", + "phylum": "mollusca", + "klass": "cephalopoda", } actual = re.match(message.path_regex_str, path).groupdict() @@ -269,18 +272,16 @@ def test_parse_resource_path(): options2 = descriptor_pb2.MessageOptions() resource2 = options2.Extensions[resource_pb2.resource] - resource2.pattern.append( - "kingdoms-{kingdom}_{phylum}#classes%{klass}" - ) + resource2.pattern.append("kingdoms-{kingdom}_{phylum}#classes%{klass}") resource2.type = "taxonomy.biology.com/Klass" - message2 = make_message('Klass', options=options2) + message2 = make_message("Klass", options=options2) # Plausible resource ID path from a non-standard schema path2 = "kingdoms-Animalia/_Mollusca~#classes%Cephalopoda" expected2 = { - 'kingdom': 'Animalia/', - 'phylum': 'Mollusca~', - 'klass': 'Cephalopoda', + "kingdom": "Animalia/", + "phylum": "Mollusca~", + "klass": "Cephalopoda", } actual2 = re.match(message2.path_regex_str, path2).groupdict() @@ -290,14 +291,14 @@ def test_parse_resource_path(): def test_field_map(): # Create an Entry message. entry_msg = make_message( - name='FooEntry', + name="FooEntry", fields=( - make_field(name='key', type=9), - make_field(name='value', type=9), + make_field(name="key", type=9), + make_field(name="value", type=9), ), options=descriptor_pb2.MessageOptions(map_entry=True), ) - entry_field = make_field('foos', message=entry_msg, repeated=True) + entry_field = make_field("foos", message=entry_msg, repeated=True) assert entry_msg.map assert entry_field.map @@ -327,22 +328,16 @@ def test_oneof_fields(): def test_required_fields(): - REQUIRED = field_behavior_pb2.FieldBehavior.Value('REQUIRED') + REQUIRED = field_behavior_pb2.FieldBehavior.Value("REQUIRED") mass_kg = make_field(name="mass_kg", type=5) - mass_kg.options.Extensions[field_behavior_pb2.field_behavior].append( - REQUIRED - ) + mass_kg.options.Extensions[field_behavior_pb2.field_behavior].append(REQUIRED) length_m = make_field(name="length_m", type=5) - length_m.options.Extensions[field_behavior_pb2.field_behavior].append( - REQUIRED - ) + length_m.options.Extensions[field_behavior_pb2.field_behavior].append(REQUIRED) color = make_field(name="color", type=5) - color.options.Extensions[field_behavior_pb2.field_behavior].append( - REQUIRED - ) + color.options.Extensions[field_behavior_pb2.field_behavior].append(REQUIRED) request = make_message( name="CreateMolluscRequest", @@ -365,8 +360,10 @@ def test_is_extended_operation(): name="Operation", fields=tuple( make_field(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) - ) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) + ), ) for f in operation.fields.values(): options = descriptor_pb2.FieldOptions() @@ -384,7 +381,7 @@ def test_is_extended_operation(): make_field(name=name, type=T.Value("TYPE_STRING"), number=i) # Missing error_message for i, name in enumerate(("name", "status", "error_code"), start=1) - ) + ), ) for f in missing.fields.values(): options = descriptor_pb2.FieldOptions() @@ -399,8 +396,10 @@ def test_is_extended_operation(): name="MyMessage", fields=tuple( make_field(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) - ) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) + ), ) for f in my_message.fields.values(): options = descriptor_pb2.FieldOptions() @@ -415,8 +414,10 @@ def test_is_extended_operation(): name="Operation", fields=tuple( make_field(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) - ) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) + ), ) for f in duplicate.fields.values(): options = descriptor_pb2.FieldOptions() @@ -464,7 +465,7 @@ def test_extended_operation_request_response_fields(): fields=[ make_field(name=name, type=T.Value("TYPE_STRING"), number=i) for i, name in enumerate(("name", "rank", "affinity", "serial")) - ] + ], ) expected = (poll_request.fields["name"], poll_request.fields["affinity"]) for field in expected: diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index d0b31d3bb76d..9338d73f673d 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -36,501 +36,488 @@ def test_method_types(): - input_msg = make_message(name='Input', module='baz') - output_msg = make_message(name='Output', module='baz') - method = make_method('DoSomething', input_msg, output_msg, - package='foo.bar', module='bacon') - assert method.name == 'DoSomething' - assert method.input.name == 'Input' - assert method.output.name == 'Output' + input_msg = make_message(name="Input", module="baz") + output_msg = make_message(name="Output", module="baz") + method = make_method( + "DoSomething", input_msg, output_msg, package="foo.bar", module="bacon" + ) + assert method.name == "DoSomething" + assert method.input.name == "Input" + assert method.output.name == "Output" def test_method_void(): - empty = make_message(name='Empty', package='google.protobuf') - method = make_method('Meh', output_message=empty) + empty = make_message(name="Empty", package="google.protobuf") + method = make_method("Meh", output_message=empty) assert method.void def test_method_not_void(): - not_empty = make_message(name='OutputMessage', package='foo.bar.v1') - method = make_method('Meh', output_message=not_empty) + not_empty = make_message(name="OutputMessage", package="foo.bar.v1") + method = make_method("Meh", output_message=not_empty) assert not method.void def test_method_deprecated(): - method = make_method('DeprecatedMethod', is_deprecated=True) + method = make_method("DeprecatedMethod", is_deprecated=True) assert method.is_deprecated def test_method_client_output(): - output = make_message(name='Input', module='baz') - method = make_method('DoStuff', output_message=output) + output = make_message(name="Input", module="baz") + method = make_method("DoStuff", output_message=output) assert method.client_output is method.output def test_method_client_output_empty(): - empty = make_message(name='Empty', package='google.protobuf') - method = make_method('Meh', output_message=empty) + empty = make_message(name="Empty", package="google.protobuf") + method = make_method("Meh", output_message=empty) assert method.client_output == wrappers.PrimitiveType.build(None) def test_method_client_output_paged(): - paged = make_field(name='foos', message=make_message('Foo'), repeated=True) - parent = make_field(name='parent', type=9) # str - page_size = make_field(name='page_size', type=5) # int - page_token = make_field(name='page_token', type=9) # str - - input_msg = make_message(name='ListFoosRequest', fields=( - parent, - page_size, - page_token, - )) - output_msg = make_message(name='ListFoosResponse', fields=( - paged, - make_field(name='next_page_token', type=9), # str - )) + paged = make_field(name="foos", message=make_message("Foo"), repeated=True) + parent = make_field(name="parent", type=9) # str + page_size = make_field(name="page_size", type=5) # int + page_token = make_field(name="page_token", type=9) # str + + input_msg = make_message( + name="ListFoosRequest", + fields=( + parent, + page_size, + page_token, + ), + ) + output_msg = make_message( + name="ListFoosResponse", + fields=( + paged, + make_field(name="next_page_token", type=9), # str + ), + ) method = make_method( - 'ListFoos', + "ListFoos", input_message=input_msg, output_message=output_msg, ) assert method.paged_result_field == paged - assert method.client_output.ident.name == 'ListFoosPager' - - max_results = make_field(name='max_results', type=5) # int - input_msg = make_message(name='ListFoosRequest', fields=( - parent, - max_results, - page_token, - )) + assert method.client_output.ident.name == "ListFoosPager" + + max_results = make_field(name="max_results", type=5) # int + input_msg = make_message( + name="ListFoosRequest", + fields=( + parent, + max_results, + page_token, + ), + ) method = make_method( - 'ListFoos', + "ListFoos", input_message=input_msg, output_message=output_msg, ) assert method.paged_result_field == paged - assert method.client_output.ident.name == 'ListFoosPager' + assert method.client_output.ident.name == "ListFoosPager" def test_method_client_output_async_empty(): - empty = make_message(name='Empty', package='google.protobuf') - method = make_method('Meh', output_message=empty) + empty = make_message(name="Empty", package="google.protobuf") + method = make_method("Meh", output_message=empty) assert method.client_output_async == wrappers.PrimitiveType.build(None) def test_method_paged_result_field_not_first(): - paged = make_field(name='foos', message=make_message('Foo'), repeated=True) - input_msg = make_message(name='ListFoosRequest', fields=( - make_field(name='parent', type=9), # str - make_field(name='page_size', type=5), # int - make_field(name='page_token', type=9), # str - )) - output_msg = make_message(name='ListFoosResponse', fields=( - make_field(name='next_page_token', type=9), # str - paged, - )) - method = make_method('ListFoos', - input_message=input_msg, - output_message=output_msg, - ) + paged = make_field(name="foos", message=make_message("Foo"), repeated=True) + input_msg = make_message( + name="ListFoosRequest", + fields=( + make_field(name="parent", type=9), # str + make_field(name="page_size", type=5), # int + make_field(name="page_token", type=9), # str + ), + ) + output_msg = make_message( + name="ListFoosResponse", + fields=( + make_field(name="next_page_token", type=9), # str + paged, + ), + ) + method = make_method( + "ListFoos", + input_message=input_msg, + output_message=output_msg, + ) assert method.paged_result_field == paged def test_method_paged_result_field_no_page_field(): - input_msg = make_message(name='ListFoosRequest', fields=( - make_field(name='parent', type=9), # str - make_field(name='page_size', type=5), # int - make_field(name='page_token', type=9), # str - )) - output_msg = make_message(name='ListFoosResponse', fields=( - make_field(name='foos', message=make_message('Foo'), repeated=False), - make_field(name='next_page_token', type=9), # str - )) - method = make_method('ListFoos', - input_message=input_msg, - output_message=output_msg, - ) + input_msg = make_message( + name="ListFoosRequest", + fields=( + make_field(name="parent", type=9), # str + make_field(name="page_size", type=5), # int + make_field(name="page_token", type=9), # str + ), + ) + output_msg = make_message( + name="ListFoosResponse", + fields=( + make_field(name="foos", message=make_message("Foo"), repeated=False), + make_field(name="next_page_token", type=9), # str + ), + ) + method = make_method( + "ListFoos", + input_message=input_msg, + output_message=output_msg, + ) assert method.paged_result_field is None method = make_method( - name='Foo', + name="Foo", input_message=make_message( - name='FooRequest', - fields=(make_field(name='page_token', type=9),) # str + name="FooRequest", fields=(make_field(name="page_token", type=9),) # str ), output_message=make_message( - name='FooResponse', - fields=(make_field(name='next_page_token', type=9),) # str - ) + name="FooResponse", + fields=(make_field(name="next_page_token", type=9),), # str + ), ) assert method.paged_result_field is None def test_method_paged_result_ref_types(): input_msg = make_message( - name='ListSquidsRequest', + name="ListSquidsRequest", fields=( - make_field(name='parent', type=9), # str - make_field(name='page_size', type=5), # int - make_field(name='page_token', type=9), # str + make_field(name="parent", type=9), # str + make_field(name="page_size", type=5), # int + make_field(name="page_token", type=9), # str ), - module='squid', + module="squid", ) - mollusc_msg = make_message('Mollusc', module='mollusc') + mollusc_msg = make_message("Mollusc", module="mollusc") output_msg = make_message( - name='ListMolluscsResponse', + name="ListMolluscsResponse", fields=( - make_field(name='molluscs', message=mollusc_msg, repeated=True), - make_field(name='next_page_token', type=9) # str + make_field(name="molluscs", message=mollusc_msg, repeated=True), + make_field(name="next_page_token", type=9), # str ), - module='mollusc' + module="mollusc", ) method = make_method( - 'ListSquids', - input_message=input_msg, - output_message=output_msg, - module='squid' + "ListSquids", input_message=input_msg, output_message=output_msg, module="squid" ) ref_type_names = {t.name for t in method.ref_types} assert ref_type_names == { - 'ListSquidsRequest', - 'ListSquidsPager', - 'ListSquidsAsyncPager', - 'Mollusc', + "ListSquidsRequest", + "ListSquidsPager", + "ListSquidsAsyncPager", + "Mollusc", } def test_flattened_ref_types(): method = make_method( - 'IdentifyMollusc', + "IdentifyMollusc", input_message=make_message( - 'IdentifyMolluscRequest', + "IdentifyMolluscRequest", fields=( make_field( - 'cephalopod', + "cephalopod", message=make_message( - 'Cephalopod', + "Cephalopod", fields=( - make_field('mass_kg', type='TYPE_INT32'), + make_field("mass_kg", type="TYPE_INT32"), make_field( - 'squid', + "squid", number=2, - message=make_message('Squid'), + message=make_message("Squid"), ), make_field( - 'clam', + "clam", number=3, - message=make_message('Clam'), + message=make_message("Clam"), ), ), ), ), make_field( - 'stratum', + "stratum", enum=make_enum( - 'Stratum', - ) + "Stratum", + ), ), ), ), - signatures=('cephalopod.squid,stratum',), - output_message=make_message('Mollusc'), + signatures=("cephalopod.squid,stratum",), + output_message=make_message("Mollusc"), ) expected_flat_ref_type_names = { - 'IdentifyMolluscRequest', - 'Squid', - 'Stratum', - 'Mollusc', + "IdentifyMolluscRequest", + "Squid", + "Stratum", + "Mollusc", } actual_flat_ref_type_names = {t.name for t in method.flat_ref_types} assert expected_flat_ref_type_names == actual_flat_ref_type_names def test_method_paged_result_primitive(): - paged = make_field(name='squids', type=9, repeated=True) # str + paged = make_field(name="squids", type=9, repeated=True) # str input_msg = make_message( - name='ListSquidsRequest', + name="ListSquidsRequest", + fields=( + make_field(name="parent", type=9), # str + make_field(name="page_size", type=5), # int + make_field(name="page_token", type=9), # str + ), + ) + output_msg = make_message( + name="ListFoosResponse", fields=( - make_field(name='parent', type=9), # str - make_field(name='page_size', type=5), # int - make_field(name='page_token', type=9), # str + paged, + make_field(name="next_page_token", type=9), # str ), ) - output_msg = make_message(name='ListFoosResponse', fields=( - paged, - make_field(name='next_page_token', type=9), # str - )) method = make_method( - 'ListSquids', + "ListSquids", input_message=input_msg, output_message=output_msg, ) assert method.paged_result_field == paged - assert method.client_output.ident.name == 'ListSquidsPager' + assert method.client_output.ident.name == "ListSquidsPager" def test_method_field_headers_none(): - method = make_method('DoSomething') + method = make_method("DoSomething") assert isinstance(method.field_headers, collections.abc.Sequence) def test_method_field_headers_present(): verbs = [ - 'get', - 'put', - 'post', - 'delete', - 'patch', + "get", + "put", + "post", + "delete", + "patch", ] for v in verbs: - rule = http_pb2.HttpRule(**{v: '/v1/{parent=projects/*}/topics'}) - method = make_method('DoSomething', http_rule=rule) - assert method.field_headers == (wrappers.FieldHeader('parent'),) - assert method.field_headers[0].raw == 'parent' - assert method.field_headers[0].disambiguated == 'parent' + rule = http_pb2.HttpRule(**{v: "/v1/{parent=projects/*}/topics"}) + method = make_method("DoSomething", http_rule=rule) + assert method.field_headers == (wrappers.FieldHeader("parent"),) + assert method.field_headers[0].raw == "parent" + assert method.field_headers[0].disambiguated == "parent" # test that reserved keyword in field header is disambiguated - rule = http_pb2.HttpRule(**{v: '/v1/{object=objects/*}/topics'}) - method = make_method('DoSomething', http_rule=rule) - assert method.field_headers == (wrappers.FieldHeader('object'),) - assert method.field_headers[0].raw == 'object' - assert method.field_headers[0].disambiguated == 'object_' + rule = http_pb2.HttpRule(**{v: "/v1/{object=objects/*}/topics"}) + method = make_method("DoSomething", http_rule=rule) + assert method.field_headers == (wrappers.FieldHeader("object"),) + assert method.field_headers[0].raw == "object" + assert method.field_headers[0].disambiguated == "object_" # test w/o equal sign - rule = http_pb2.HttpRule(**{v: '/v1/{parent}/topics'}) - method = make_method('DoSomething', http_rule=rule) - assert method.field_headers == (wrappers.FieldHeader('parent'),) - assert method.field_headers[0].raw == 'parent' - assert method.field_headers[0].disambiguated == 'parent' + rule = http_pb2.HttpRule(**{v: "/v1/{parent}/topics"}) + method = make_method("DoSomething", http_rule=rule) + assert method.field_headers == (wrappers.FieldHeader("parent"),) + assert method.field_headers[0].raw == "parent" + assert method.field_headers[0].disambiguated == "parent" def test_method_routing_rule(): routing_rule = routing_pb2.RoutingRule() param = routing_rule.routing_parameters.add() - param.field = 'table_name' - param.path_template = 'projects/*/{table_location=instances/*}/tables/*' - method = make_method('DoSomething', routing_rule=routing_rule) + param.field = "table_name" + param.path_template = "projects/*/{table_location=instances/*}/tables/*" + method = make_method("DoSomething", routing_rule=routing_rule) assert method.explicit_routing - assert method.routing_rule.routing_parameters == [wrappers.RoutingParameter( - x.field, x.path_template) for x in routing_rule.routing_parameters] + assert method.routing_rule.routing_parameters == [ + wrappers.RoutingParameter(x.field, x.path_template) + for x in routing_rule.routing_parameters + ] assert method.routing_rule.routing_parameters[0].sample_request is not None def test_method_routing_rule_empty_routing_parameters(): routing_rule = routing_pb2.RoutingRule() - method = make_method('DoSomething', routing_rule=routing_rule) + method = make_method("DoSomething", routing_rule=routing_rule) assert method.routing_rule is None def test_method_routing_rule_not_set(): - method = make_method('DoSomething') + method = make_method("DoSomething") assert method.routing_rule is None def test_method_http_opt(): - http_rule = http_pb2.HttpRule( - post='/v1/{parent=projects/*}/topics', - body='*' - ) - method = make_method('DoSomething', http_rule=http_rule) + http_rule = http_pb2.HttpRule(post="/v1/{parent=projects/*}/topics", body="*") + method = make_method("DoSomething", http_rule=http_rule) assert method.http_opt == { - 'verb': 'post', - 'url': '/v1/{parent=projects/*}/topics', - 'body': '*' + "verb": "post", + "url": "/v1/{parent=projects/*}/topics", + "body": "*", } + + # TODO(yon-mg) to test: grpc transcoding, # correct handling of path/query params # correct handling of body & additional binding def test_method_http_opt_no_body(): - http_rule = http_pb2.HttpRule(post='/v1/{parent=projects/*}/topics') - method = make_method('DoSomething', http_rule=http_rule) - assert method.http_opt == { - 'verb': 'post', - 'url': '/v1/{parent=projects/*}/topics' - } + http_rule = http_pb2.HttpRule(post="/v1/{parent=projects/*}/topics") + method = make_method("DoSomething", http_rule=http_rule) + assert method.http_opt == {"verb": "post", "url": "/v1/{parent=projects/*}/topics"} def test_method_http_opt_no_http_rule(): - method = make_method('DoSomething') + method = make_method("DoSomething") assert method.http_opt == None def test_method_path_params(): # tests only the basic case of grpc transcoding - http_rule = http_pb2.HttpRule(post='/v1/{project}/topics') - method = make_method('DoSomething', http_rule=http_rule) - assert method.path_params == ['project'] + http_rule = http_pb2.HttpRule(post="/v1/{project}/topics") + method = make_method("DoSomething", http_rule=http_rule) + assert method.path_params == ["project"] - http_rule2 = http_pb2.HttpRule(post='/v1beta1/{name=rooms/*/blurbs/*}') + http_rule2 = http_pb2.HttpRule(post="/v1beta1/{name=rooms/*/blurbs/*}") method2 = make_method("DoSomething", http_rule=http_rule2) assert method2.path_params == ["name"] def test_method_path_params_no_http_rule(): - method = make_method('DoSomething') + method = make_method("DoSomething") assert method.path_params == [] def test_body_fields(): - http_rule = http_pb2.HttpRule( - post='/v1/{arms_shape=arms/*}/squids', - body='mantle' - ) - - mantle_stuff = make_field(name='mantle_stuff', type=9) - message = make_message('Mantle', fields=(mantle_stuff,)) - mantle = make_field('mantle', type=11, type_name='Mantle', message=message) - arms_shape = make_field('arms_shape', type=9) - input_message = make_message('Squid', fields=(mantle, arms_shape)) - method = make_method( - 'PutSquid', input_message=input_message, http_rule=http_rule) - assert set(method.body_fields) == {'mantle'} - mock_value = method.body_fields['mantle'].mock_value + http_rule = http_pb2.HttpRule(post="/v1/{arms_shape=arms/*}/squids", body="mantle") + + mantle_stuff = make_field(name="mantle_stuff", type=9) + message = make_message("Mantle", fields=(mantle_stuff,)) + mantle = make_field("mantle", type=11, type_name="Mantle", message=message) + arms_shape = make_field("arms_shape", type=9) + input_message = make_message("Squid", fields=(mantle, arms_shape)) + method = make_method("PutSquid", input_message=input_message, http_rule=http_rule) + assert set(method.body_fields) == {"mantle"} + mock_value = method.body_fields["mantle"].mock_value assert mock_value == "baz.Mantle(mantle_stuff='mantle_stuff_value')" def test_body_fields_no_body(): http_rule = http_pb2.HttpRule( - post='/v1/{arms_shape=arms/*}/squids', + post="/v1/{arms_shape=arms/*}/squids", ) - method = make_method( - 'PutSquid', http_rule=http_rule) + method = make_method("PutSquid", http_rule=http_rule) assert not method.body_fields def test_method_http_options(): - verbs = [ - 'get', - 'put', - 'post', - 'delete', - 'patch' - ] + verbs = ["get", "put", "post", "delete", "patch"] for v in verbs: - http_rule = http_pb2.HttpRule(**{v: '/v1/{parent=projects/*}/topics'}) - method = make_method('DoSomething', http_rule=http_rule) - assert [dataclasses.asdict(http) for http in method.http_options] == [{ - 'method': v, - 'uri': '/v1/{parent=projects/*}/topics', - 'body': None - }] + http_rule = http_pb2.HttpRule(**{v: "/v1/{parent=projects/*}/topics"}) + method = make_method("DoSomething", http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [ + {"method": v, "uri": "/v1/{parent=projects/*}/topics", "body": None} + ] def test_method_http_options_empty_http_rule(): http_rule = http_pb2.HttpRule() - method = make_method('DoSomething', http_rule=http_rule) + method = make_method("DoSomething", http_rule=http_rule) assert method.http_options == [] - http_rule = http_pb2.HttpRule(get='') - method = make_method('DoSomething', http_rule=http_rule) + http_rule = http_pb2.HttpRule(get="") + method = make_method("DoSomething", http_rule=http_rule) assert method.http_options == [] def test_method_http_options_no_http_rule(): - method = make_method('DoSomething') + method = make_method("DoSomething") assert method.path_params == [] def test_method_http_options_body_star(): - http_rule = http_pb2.HttpRule( - post='/v1/{parent=projects/*}/topics', - body='*' - ) - method = make_method('DoSomething', http_rule=http_rule) - assert [dataclasses.asdict(http) for http in method.http_options] == [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/topics', - 'body': '*' - }] + http_rule = http_pb2.HttpRule(post="/v1/{parent=projects/*}/topics", body="*") + method = make_method("DoSomething", http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [ + {"method": "post", "uri": "/v1/{parent=projects/*}/topics", "body": "*"} + ] def test_method_http_options_body_field(): http_rule = http_pb2.HttpRule( - post='/v1/{parent=projects/*}/topics', - body='body_field' + post="/v1/{parent=projects/*}/topics", body="body_field" ) - method = make_method('DoSomething', http_rule=http_rule) - assert [dataclasses.asdict(http) for http in method.http_options] == [{ - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/topics', - 'body': 'body_field' - }] + method = make_method("DoSomething", http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/topics", + "body": "body_field", + } + ] def test_method_http_options_additional_bindings(): http_rule = http_pb2.HttpRule( - post='/v1/{parent=projects/*}/topics', - body='*', + post="/v1/{parent=projects/*}/topics", + body="*", additional_bindings=[ http_pb2.HttpRule( - post='/v1/{parent=projects/*/regions/*}/topics', - body='*', + post="/v1/{parent=projects/*/regions/*}/topics", + body="*", ), http_pb2.HttpRule( - post='/v1/projects/p1/topics', - body='body_field', + post="/v1/projects/p1/topics", + body="body_field", ), - ] + ], ) - method = make_method('DoSomething', http_rule=http_rule) + method = make_method("DoSomething", http_rule=http_rule) assert [dataclasses.asdict(http) for http in method.http_options] == [ + {"method": "post", "uri": "/v1/{parent=projects/*}/topics", "body": "*"}, { - 'method': 'post', - 'uri': '/v1/{parent=projects/*}/topics', - 'body': '*' - }, - { - 'method': 'post', - 'uri': '/v1/{parent=projects/*/regions/*}/topics', - 'body': '*' - }, - { - 'method': 'post', - 'uri': '/v1/projects/p1/topics', - 'body': 'body_field' - }] + "method": "post", + "uri": "/v1/{parent=projects/*/regions/*}/topics", + "body": "*", + }, + {"method": "post", "uri": "/v1/projects/p1/topics", "body": "body_field"}, + ] def test_method_http_options_reserved_name_in_url(): - http_rule = http_pb2.HttpRule( - post='/v1/license/{license=lic/*}', - body='*' - ) - method = make_method('DoSomething', http_rule=http_rule) - assert [dataclasses.asdict(http) for http in method.http_options] == [{ - 'method': 'post', - 'uri': '/v1/license/{license_=lic/*}', - 'body': '*' - }] + http_rule = http_pb2.HttpRule(post="/v1/license/{license=lic/*}", body="*") + method = make_method("DoSomething", http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [ + {"method": "post", "uri": "/v1/license/{license_=lic/*}", "body": "*"} + ] def test_method_http_options_reserved_name_in_body(): - http_rule = http_pb2.HttpRule( - post='/v1/license/{license=lic/*}', - body='breakpoint' - ) - method = make_method('DoSomething', http_rule=http_rule) - assert [dataclasses.asdict(http) for http in method.http_options] == [{ - 'method': 'post', - 'uri': '/v1/license/{license_=lic/*}', - 'body': 'breakpoint_' - }] + http_rule = http_pb2.HttpRule(post="/v1/license/{license=lic/*}", body="breakpoint") + method = make_method("DoSomething", http_rule=http_rule) + assert [dataclasses.asdict(http) for http in method.http_options] == [ + {"method": "post", "uri": "/v1/license/{license_=lic/*}", "body": "breakpoint_"} + ] def test_method_http_options_generate_sample(): http_rule = http_pb2.HttpRule( - get='/v1/{resource.id=projects/*/regions/*/id/**}/stuff', + get="/v1/{resource.id=projects/*/regions/*/id/**}/stuff", ) method = make_method( - 'DoSomething', + "DoSomething", make_message( name="Input", fields=[ @@ -550,16 +537,15 @@ def test_method_http_options_generate_sample(): http_rule=http_rule, ) sample = method.http_options[0].sample_request(method) - assert sample == {'resource': { - 'id': 'projects/sample1/regions/sample2/id/sample3'}} + assert sample == {"resource": {"id": "projects/sample1/regions/sample2/id/sample3"}} def test_method_http_options_generate_sample_implicit_template(): http_rule = http_pb2.HttpRule( - get='/v1/{resource.id}/stuff', + get="/v1/{resource.id}/stuff", ) method = make_method( - 'DoSomething', + "DoSomething", make_message( name="Input", fields=[ @@ -579,120 +565,108 @@ def test_method_http_options_generate_sample_implicit_template(): ) sample = method.http_options[0].sample_request(method) - assert sample == {'resource': { - 'id': 'sample1'}} + assert sample == {"resource": {"id": "sample1"}} def test_method_query_params(): # tests only the basic case of grpc transcoding - http_rule = http_pb2.HttpRule( - post='/v1/{project}/topics', - body='address' - ) + http_rule = http_pb2.HttpRule(post="/v1/{project}/topics", body="address") input_message = make_message( - 'MethodInput', - fields=( - make_field('region'), - make_field('project'), - make_field('address') - ) + "MethodInput", + fields=(make_field("region"), make_field("project"), make_field("address")), + ) + method = make_method( + "DoSomething", http_rule=http_rule, input_message=input_message ) - method = make_method('DoSomething', http_rule=http_rule, - input_message=input_message) - assert method.query_params == {'region'} + assert method.query_params == {"region"} def test_method_query_params_no_body(): # tests only the basic case of grpc transcoding - http_rule = http_pb2.HttpRule(post='/v1/{project}/topics') + http_rule = http_pb2.HttpRule(post="/v1/{project}/topics") input_message = make_message( - 'MethodInput', + "MethodInput", fields=( - make_field('region'), - make_field('project'), - ) + make_field("region"), + make_field("project"), + ), ) - method = make_method('DoSomething', http_rule=http_rule, - input_message=input_message) - assert method.query_params == {'region'} + method = make_method( + "DoSomething", http_rule=http_rule, input_message=input_message + ) + assert method.query_params == {"region"} def test_method_query_params_star_body(): # tests only the basic case of grpc transcoding - http_rule = http_pb2.HttpRule( - post='/v1/{project}/topics', - body='*' - ) + http_rule = http_pb2.HttpRule(post="/v1/{project}/topics", body="*") input_message = make_message( - 'MethodInput', - fields=( - make_field('region'), - make_field('project'), - make_field('address') - ) + "MethodInput", + fields=(make_field("region"), make_field("project"), make_field("address")), + ) + method = make_method( + "DoSomething", http_rule=http_rule, input_message=input_message ) - method = make_method('DoSomething', http_rule=http_rule, - input_message=input_message) assert method.query_params == set() def test_method_query_params_no_http_rule(): - method = make_method('DoSomething') + method = make_method("DoSomething") assert method.query_params == set() def test_method_idempotent_yes(): - http_rule = http_pb2.HttpRule(get='/v1/{parent=projects/*}/topics') - method = make_method('DoSomething', http_rule=http_rule) + http_rule = http_pb2.HttpRule(get="/v1/{parent=projects/*}/topics") + method = make_method("DoSomething", http_rule=http_rule) assert method.idempotent is True def test_method_idempotent_no(): - http_rule = http_pb2.HttpRule(post='/v1/{parent=projects/*}/topics') - method = make_method('DoSomething', http_rule=http_rule) + http_rule = http_pb2.HttpRule(post="/v1/{parent=projects/*}/topics") + method = make_method("DoSomething", http_rule=http_rule) assert method.idempotent is False def test_method_idempotent_no_http_rule(): - method = make_method('DoSomething') + method = make_method("DoSomething") assert method.idempotent is False def test_method_unary_unary(): - method = make_method('F', client_streaming=False, server_streaming=False) - assert method.grpc_stub_type == 'unary_unary' + method = make_method("F", client_streaming=False, server_streaming=False) + assert method.grpc_stub_type == "unary_unary" def test_method_unary_stream(): - method = make_method('F', client_streaming=False, server_streaming=True) - assert method.grpc_stub_type == 'unary_stream' + method = make_method("F", client_streaming=False, server_streaming=True) + assert method.grpc_stub_type == "unary_stream" def test_method_stream_unary(): - method = make_method('F', client_streaming=True, server_streaming=False) - assert method.grpc_stub_type == 'stream_unary' + method = make_method("F", client_streaming=True, server_streaming=False) + assert method.grpc_stub_type == "stream_unary" def test_method_stream_stream(): - method = make_method('F', client_streaming=True, server_streaming=True) - assert method.grpc_stub_type == 'stream_stream' + method = make_method("F", client_streaming=True, server_streaming=True) + assert method.grpc_stub_type == "stream_stream" def test_method_flattened_fields(): - a = make_field('a', type=5) # int - b = make_field('b', type=5) - input_msg = make_message('Z', fields=(a, b)) - method = make_method('F', input_message=input_msg, signatures=('a,b',)) + a = make_field("a", type=5) # int + b = make_field("b", type=5) + input_msg = make_message("Z", fields=(a, b)) + method = make_method("F", input_message=input_msg, signatures=("a,b",)) assert len(method.flattened_fields) == 2 - assert 'a' in method.flattened_fields - assert 'b' in method.flattened_fields + assert "a" in method.flattened_fields + assert "b" in method.flattened_fields def test_method_flattened_fields_empty_sig(): - a = make_field('a', type=5) # int - b = make_field('b', type=5) - input_msg = make_message('Z', fields=(a, b)) - method = make_method('F', input_message=input_msg, signatures=('',)) + a = make_field("a", type=5) # int + b = make_field("b", type=5) + input_msg = make_message("Z", fields=(a, b)) + method = make_method("F", input_message=input_msg, signatures=("",)) assert len(method.flattened_fields) == 0 @@ -706,53 +680,56 @@ def test_method_flattened_fields_different_package_non_primitive(): # directly to its fields, which complicates request construction. # The easiest solution in this case is to just prohibit these fields # in the method flattening. - message = make_message('Mantle', - package="mollusc.cephalopod.v1", module="squid") - mantle = make_field('mantle', type=11, type_name='Mantle', - message=message, meta=message.meta) - arms_count = make_field('arms_count', type=5, meta=message.meta) + message = make_message("Mantle", package="mollusc.cephalopod.v1", module="squid") + mantle = make_field( + "mantle", type=11, type_name="Mantle", message=message, meta=message.meta + ) + arms_count = make_field("arms_count", type=5, meta=message.meta) input_message = make_message( - 'Squid', fields=(mantle, arms_count), + "Squid", + fields=(mantle, arms_count), package=".".join(message.meta.address.package), - module=message.meta.address.module + module=message.meta.address.module, + ) + method = make_method( + "PutSquid", + input_message=input_message, + package="remote.package.v1", + module="module", + signatures=("mantle,arms_count",), ) - method = make_method('PutSquid', input_message=input_message, - package="remote.package.v1", module="module", signatures=("mantle,arms_count",)) - assert set(method.flattened_fields) == {'arms_count'} + assert set(method.flattened_fields) == {"arms_count"} def test_method_include_flattened_message_fields(): - a = make_field('a', type=5) - b = make_field('b', type=11, type_name='Eggs', - message=make_message('Eggs')) - input_msg = make_message('Z', fields=(a, b)) - method = make_method('F', input_message=input_msg, signatures=('a,b',)) + a = make_field("a", type=5) + b = make_field("b", type=11, type_name="Eggs", message=make_message("Eggs")) + input_msg = make_message("Z", fields=(a, b)) + method = make_method("F", input_message=input_msg, signatures=("a,b",)) assert len(method.flattened_fields) == 2 def test_method_legacy_flattened_fields(): required_options = descriptor_pb2.FieldOptions() required_options.Extensions[field_behavior_pb2.field_behavior].append( - field_behavior_pb2.FieldBehavior.Value("REQUIRED")) + field_behavior_pb2.FieldBehavior.Value("REQUIRED") + ) # Cephalopods are required. squid = make_field(name="squid", options=required_options) octopus = make_field( name="octopus", message=make_message( - name="Octopus", - fields=[make_field(name="mass", options=required_options)] + name="Octopus", fields=[make_field(name="mass", options=required_options)] ), - options=required_options) + options=required_options, + ) # Bivalves are optional. clam = make_field(name="clam") oyster = make_field( name="oyster", - message=make_message( - name="Oyster", - fields=[make_field(name="has_pearl")] - ) + message=make_message(name="Oyster", fields=[make_field(name="has_pearl")]), ) # Interleave required and optional fields to make sure @@ -763,20 +740,14 @@ def test_method_legacy_flattened_fields(): name="CreateMolluscs", input_message=request, # Signatures should be ignored. - signatures=[ - "squid,octopus.mass", - "squid,octopus,oyster.has_pearl" - ] + signatures=["squid,octopus.mass", "squid,octopus,oyster.has_pearl"], ) # Use an ordered dict because ordering is important: # required fields should come first. - expected = collections.OrderedDict([ - ("squid", squid), - ("octopus", octopus), - ("clam", clam), - ("oyster", oyster) - ]) + expected = collections.OrderedDict( + [("squid", squid), ("octopus", octopus), ("clam", clam), ("oyster", oyster)] + ) assert method.legacy_flattened_fields == expected @@ -818,7 +789,7 @@ def test_flattened_oneof_fields(): "mantle.mass_kg,", "mantle.mass_lbs,", "color", - ] + ], ) expected = {"mass": [mass_kg, mass_lbs], "length": [length_m, length_f]} @@ -844,7 +815,9 @@ def test_is_operation_polling_method(): name="Operation", fields=[ make_field(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ], ) for f in operation.fields.values(): @@ -855,9 +828,7 @@ def test_is_operation_polling_method(): request = make_message( name="GetOperation", - fields=[ - make_field(name="name", type=T.Value("TYPE_STRING"), number=1) - ], + fields=[make_field(name="name", type=T.Value("TYPE_STRING"), number=1)], ) # Correct positive @@ -888,7 +859,7 @@ def test_is_operation_polling_method(): name="Get", input_message=request, output_message=response, - options=options, # Reuse options from the actual polling method + options=options, # Reuse options from the actual polling method ) assert not invalid_method.is_operation_polling_method @@ -950,17 +921,18 @@ def test_differently_named_extended_operation_fields( number=i, ) for i, name in enumerate(all_field_names, start=1) - ] + ], ) for f in operation.fields.values(): options = descriptor_pb2.FieldOptions() options.Extensions[ex_ops_pb2.operation_field] = f.number f.options.MergeFrom(options) - expected = { - k: operation.fields[v] - for k, v in canonical_name_to_field_name.items() - } if canonical_name_to_field_name is not None else None + expected = ( + {k: operation.fields[v] for k, v in canonical_name_to_field_name.items()} + if canonical_name_to_field_name is not None + else None + ) actual = operation.differently_named_extended_operation_fields assert expected == actual @@ -969,13 +941,17 @@ def test_differently_named_extended_operation_fields( def test_transport_safe_name(): unsafe_methods = { name: make_method(name=name) - for name in ["CreateChannel", "GrpcChannel", "OperationsClient", "import", "Import", "Raise"] + for name in [ + "CreateChannel", + "GrpcChannel", + "OperationsClient", + "import", + "Import", + "Raise", + ] } - safe_methods = { - name: make_method(name=name) - for name in ["Call", "Put", "Hold"] - } + safe_methods = {name: make_method(name=name) for name in ["Call", "Put", "Hold"]} for name, method in safe_methods.items(): assert method.transport_safe_name == name @@ -986,14 +962,10 @@ def test_transport_safe_name(): def test_safe_name(): unsafe_methods = { - name: make_method(name=name) - for name in ["import", "Import", "Raise"] + name: make_method(name=name) for name in ["import", "Import", "Raise"] } - safe_methods = { - name: make_method(name=name) - for name in ["Call", "Put", "Hold"] - } + safe_methods = {name: make_method(name=name) for name in ["Call", "Put", "Hold"]} for name, method in safe_methods.items(): assert method.safe_name == name @@ -1003,17 +975,13 @@ def test_safe_name(): def test_mixin_rule(): - m = wrappers.MixinHttpRule( - 'get', '/v1beta1/{name=projects/*}/locations', None) - e = { - 'name': 'projects/sample1' - } + m = wrappers.MixinHttpRule("get", "/v1beta1/{name=projects/*}/locations", None) + e = {"name": "projects/sample1"} assert e == m.sample_request - m = wrappers.MixinHttpRule( - 'get', '/v1beta1/{name=projects/*}/locations', 'city') + m = wrappers.MixinHttpRule("get", "/v1beta1/{name=projects/*}/locations", "city") e = { - 'name': 'projects/sample1', - 'city': {}, + "name": "projects/sample1", + "city": {}, } assert e == m.sample_request diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py index 7e5082aedca7..f85e40e7c9c5 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_python.py @@ -19,15 +19,21 @@ def test_python_eq(): - meta = metadata.Metadata(address=metadata.Address( - name='Foo', module='bar', package=('google', 'api'), - )) + meta = metadata.Metadata( + address=metadata.Address( + name="Foo", + module="bar", + package=("google", "api"), + ) + ) assert wrappers.PythonType(meta=meta) == wrappers.PythonType( meta=copy.copy(meta), ) - assert wrappers.PythonType(meta=metadata.Metadata( - address=metadata.Address(name='Baz', module='bar', package=()), - )) != wrappers.PythonType(meta=meta) + assert wrappers.PythonType( + meta=metadata.Metadata( + address=metadata.Address(name="Baz", module="bar", package=()), + ) + ) != wrappers.PythonType(meta=meta) def test_primitive_eq(): @@ -42,4 +48,4 @@ def test_primitive_eq(): def test_primitive_name(): - assert wrappers.PrimitiveType.build(int).name == 'int' + assert wrappers.PrimitiveType.build(int).name == "int" diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py index f4e6215bca41..1535fae62af8 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_routing.py @@ -27,8 +27,7 @@ class RoutingTestRequest(proto.Message): @pytest.mark.parametrize( "req, expected", [ - (RoutingTestRequest(app_profile_id="foo.123"), - {"app_profile_id": "foo.123"}), + (RoutingTestRequest(app_profile_id="foo.123"), {"app_profile_id": "foo.123"}), ( RoutingTestRequest(app_profile_id="projects/100"), {"app_profile_id": "projects/100"}, @@ -37,19 +36,16 @@ class RoutingTestRequest(proto.Message): ], ) def test_routing_rule_resolve_simple_extraction(req, expected): - rule = wrappers.RoutingRule( - [wrappers.RoutingParameter("app_profile_id", "")]) - assert wrappers.RoutingRule.resolve( - rule, - RoutingTestRequest.to_dict(req) - ) == expected + rule = wrappers.RoutingRule([wrappers.RoutingParameter("app_profile_id", "")]) + assert ( + wrappers.RoutingRule.resolve(rule, RoutingTestRequest.to_dict(req)) == expected + ) @pytest.mark.parametrize( "req, expected", [ - (RoutingTestRequest(app_profile_id="foo.123"), - {"routing_id": "foo.123"}), + (RoutingTestRequest(app_profile_id="foo.123"), {"routing_id": "foo.123"}), ( RoutingTestRequest(app_profile_id="projects/100"), {"routing_id": "projects/100"}, @@ -61,10 +57,9 @@ def test_routing_rule_resolve_rename_extraction(req, expected): rule = wrappers.RoutingRule( [wrappers.RoutingParameter("app_profile_id", "{routing_id=**}")] ) - assert wrappers.RoutingRule.resolve( - rule, - RoutingTestRequest.to_dict(req) - ) == expected + assert ( + wrappers.RoutingRule.resolve(rule, RoutingTestRequest.to_dict(req)) == expected + ) @pytest.mark.parametrize( @@ -75,8 +70,7 @@ def test_routing_rule_resolve_rename_extraction(req, expected): {"table_name": "projects/100/instances/200"}, ), ( - RoutingTestRequest( - table_name="projects/100/instances/200/whatever"), + RoutingTestRequest(table_name="projects/100/instances/200/whatever"), {"table_name": "projects/100/instances/200/whatever"}, ), (RoutingTestRequest(table_name="foo"), {}), @@ -93,10 +87,9 @@ def test_routing_rule_resolve_field_match(req, expected): ), ] ) - assert wrappers.RoutingRule.resolve( - rule, - RoutingTestRequest.to_dict(req) - ) == expected + assert ( + wrappers.RoutingRule.resolve(rule, RoutingTestRequest.to_dict(req)) == expected + ) @pytest.mark.parametrize( @@ -108,8 +101,7 @@ def test_routing_rule_resolve_field_match(req, expected): "table_name", "{project_id=projects/*}/instances/*/**" ) ], - RoutingTestRequest( - table_name="projects/100/instances/200/tables/300"), + RoutingTestRequest(table_name="projects/100/instances/200/tables/300"), {"project_id": "projects/100"}, ), ( @@ -124,17 +116,14 @@ def test_routing_rule_resolve_field_match(req, expected): "doesnotexist", "projects/*/{instance_id=instances/*}/**" ), ], - RoutingTestRequest( - table_name="projects/100/instances/200/tables/300"), + RoutingTestRequest(table_name="projects/100/instances/200/tables/300"), {"project_id": "projects/100", "instance_id": "instances/200"}, ), ], ) def test_routing_rule_resolve(routing_parameters, req, expected): rule = wrappers.RoutingRule(routing_parameters) - got = wrappers.RoutingRule.resolve( - rule, RoutingTestRequest.to_dict(req) - ) + got = wrappers.RoutingRule.resolve(rule, RoutingTestRequest.to_dict(req)) assert got == expected rule = wrappers.RoutingRule(routing_parameters) @@ -148,8 +137,7 @@ def test_routing_rule_resolve(routing_parameters, req, expected): "field, path_template, expected", [ ("table_name", "{project_id=projects/*}/instances/*/**", "project_id"), - ("table_name", - "projects/*/{instance_id=instances/*}/**", "instance_id"), + ("table_name", "projects/*/{instance_id=instances/*}/**", "instance_id"), ("table_name", "projects/*/{instance_id}/**", "instance_id"), ], ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py index 57fe95f84e7a..eb54577b2e51 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_service.py @@ -45,7 +45,8 @@ def make_resource_opts(*args): "/".join(f"{arg}/{{{arg}}}" for arg in args) ) opts.Extensions[resource_pb2.resource].type = "/".join( - f"{arg}/{{{arg}}}" for arg in args) + f"{arg}/{{{arg}}}" for arg in args + ) return opts @@ -55,29 +56,29 @@ def make_resource_opts(*args): def test_service_properties(): - service = make_service(name='ThingDoer') - assert service.name == 'ThingDoer' - assert service.client_name == 'ThingDoerClient' - assert service.client_package_version == '' - assert service.async_client_name == 'ThingDoerAsyncClient' - assert service.transport_name == 'ThingDoerTransport' - assert service.grpc_transport_name == 'ThingDoerGrpcTransport' - assert service.grpc_asyncio_transport_name == 'ThingDoerGrpcAsyncIOTransport' - assert service.rest_transport_name == 'ThingDoerRestTransport' + service = make_service(name="ThingDoer") + assert service.name == "ThingDoer" + assert service.client_name == "ThingDoerClient" + assert service.client_package_version == "" + assert service.async_client_name == "ThingDoerAsyncClient" + assert service.transport_name == "ThingDoerTransport" + assert service.grpc_transport_name == "ThingDoerGrpcTransport" + assert service.grpc_asyncio_transport_name == "ThingDoerGrpcAsyncIOTransport" + assert service.rest_transport_name == "ThingDoerRestTransport" def test_service_host(): - service = make_service(host='thingdoer.googleapis.com') - assert service.host == 'thingdoer.googleapis.com' + service = make_service(host="thingdoer.googleapis.com") + assert service.host == "thingdoer.googleapis.com" def test_service_api_version_not_specified(): - service = make_service(host='thingdoer.googleapis.com') + service = make_service(host="thingdoer.googleapis.com") assert not service.version def test_service_api_version_exists(): - service = make_service(host='thingdoer.googleapis.com', version="goose") + service = make_service(host="thingdoer.googleapis.com", version="goose") assert service.version == "goose" @@ -87,30 +88,49 @@ def test_service_no_host(): def test_service_scopes(): - service = make_service(scopes=('https://foo/user/', 'https://foo/admin/')) - assert 'https://foo/user/' in service.oauth_scopes - assert 'https://foo/admin/' in service.oauth_scopes + service = make_service(scopes=("https://foo/user/", "https://foo/admin/")) + assert "https://foo/user/" in service.oauth_scopes + assert "https://foo/admin/" in service.oauth_scopes def test_service_names(): - service = make_service(name='ThingDoer', methods=( - get_method('DoThing', 'foo.bar.ThingRequest', 'foo.baz.ThingResponse'), - get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), - get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), - )) - expected_names = {'ThingDoer', 'ThingDoerClient', 'ThingDoerAsyncClient', - 'do_thing', 'jump', 'yawn'} + service = make_service( + name="ThingDoer", + methods=( + get_method("DoThing", "foo.bar.ThingRequest", "foo.baz.ThingResponse"), + get_method("Jump", "foo.bacon.JumpRequest", "foo.bacon.JumpResponse"), + get_method("Yawn", "a.b.v1.c.YawnRequest", "x.y.v1.z.YawnResponse"), + ), + ) + expected_names = { + "ThingDoer", + "ThingDoerClient", + "ThingDoerAsyncClient", + "do_thing", + "jump", + "yawn", + } assert service.names == expected_names def test_service_name_colliding_modules(): - service = make_service(name='ThingDoer', methods=( - get_method('DoThing', 'foo.bar.ThingRequest', 'foo.bar.ThingResponse'), - get_method('Jump', 'bacon.bar.JumpRequest', 'bacon.bar.JumpResponse'), - get_method('Yawn', 'a.b.v1.c.YawnRequest', 'a.b.v1.c.YawnResponse'), - )) - expected_names = {'ThingDoer', 'ThingDoerClient', 'ThingDoerAsyncClient', - 'do_thing', 'jump', 'yawn', 'bar'} + service = make_service( + name="ThingDoer", + methods=( + get_method("DoThing", "foo.bar.ThingRequest", "foo.bar.ThingResponse"), + get_method("Jump", "bacon.bar.JumpRequest", "bacon.bar.JumpResponse"), + get_method("Yawn", "a.b.v1.c.YawnRequest", "a.b.v1.c.YawnResponse"), + ), + ) + expected_names = { + "ThingDoer", + "ThingDoerClient", + "ThingDoerAsyncClient", + "do_thing", + "jump", + "yawn", + "bar", + } assert service.names == expected_names @@ -120,35 +140,35 @@ def test_service_no_scopes(): def test_service_python_modules(): - service = make_service(methods=( - get_method('DoThing', 'foo.bar.ThingRequest', 'foo.baz.ThingResponse'), - get_method('Jump', 'foo.bacon.JumpRequest', 'foo.bacon.JumpResponse'), - get_method('Yawn', 'a.b.v1.c.YawnRequest', 'x.y.v1.z.YawnResponse'), - )) + service = make_service( + methods=( + get_method("DoThing", "foo.bar.ThingRequest", "foo.baz.ThingResponse"), + get_method("Jump", "foo.bacon.JumpRequest", "foo.bacon.JumpResponse"), + get_method("Yawn", "a.b.v1.c.YawnRequest", "x.y.v1.z.YawnResponse"), + ) + ) imports = { - i.ident.python_import - for m in service.methods.values() - for i in m.ref_types + i.ident.python_import for m in service.methods.values() for i in m.ref_types } assert imports == { - imp.Import(package=('a', 'b', 'v1'), module='c'), - imp.Import(package=('foo',), module='bacon'), - imp.Import(package=('foo',), module='bar'), - imp.Import(package=('foo',), module='baz'), - imp.Import(package=('x', 'y', 'v1'), module='z'), + imp.Import(package=("a", "b", "v1"), module="c"), + imp.Import(package=("foo",), module="bacon"), + imp.Import(package=("foo",), module="bar"), + imp.Import(package=("foo",), module="baz"), + imp.Import(package=("x", "y", "v1"), module="z"), } def test_service_python_modules_lro(): service = make_service_with_method_options() - method = service.methods['DoBigThing'] + method = service.methods["DoBigThing"] imports = {i.ident.python_import for i in method.ref_types} assert imports == { - imp.Import(package=('foo',), module='bar'), - imp.Import(package=('foo',), module='baz'), - imp.Import(package=('foo',), module='qux'), - imp.Import(package=('google', 'api_core'), module='operation'), - imp.Import(package=('google', 'api_core'), module='operation_async'), + imp.Import(package=("foo",), module="bar"), + imp.Import(package=("foo",), module="baz"), + imp.Import(package=("foo",), module="qux"), + imp.Import(package=("google", "api_core"), module="operation"), + imp.Import(package=("google", "api_core"), module="operation_async"), } @@ -156,26 +176,26 @@ def test_service_python_modules_signature(): service = make_service_with_method_options( in_fields=( # type=5 is int, so nothing is added. - descriptor_pb2.FieldDescriptorProto(name='secs', type=5), + descriptor_pb2.FieldDescriptorProto(name="secs", type=5), descriptor_pb2.FieldDescriptorProto( - name='d', + name="d", type=14, # enum - type_name='a.b.c.v2.D', + type_name="a.b.c.v2.D", ), ), - method_signature='secs,d', + method_signature="secs,d", ) # Ensure that the service will have the expected imports. - method = service.methods['DoBigThing'] + method = service.methods["DoBigThing"] imports = {i.ident.python_import for i in method.ref_types} assert imports == { - imp.Import(package=('a', 'b', 'c'), module='v2'), - imp.Import(package=('foo',), module='bar'), - imp.Import(package=('foo',), module='baz'), - imp.Import(package=('foo',), module='qux'), - imp.Import(package=('google', 'api_core'), module='operation'), - imp.Import(package=('google', 'api_core'), module='operation_async'), + imp.Import(package=("a", "b", "c"), module="v2"), + imp.Import(package=("foo",), module="bar"), + imp.Import(package=("foo",), module="baz"), + imp.Import(package=("foo",), module="qux"), + imp.Import(package=("google", "api_core"), module="operation"), + imp.Import(package=("google", "api_core"), module="operation_async"), } @@ -190,8 +210,8 @@ def test_service_has_lro(): def test_module_name(): - service = make_service(name='MyService') - assert service.module_name == 'my_service' + service = make_service(name="MyService") + assert service.module_name == "my_service" def test_resource_messages(): @@ -199,9 +219,7 @@ def test_resource_messages(): squid_resource = make_message("Squid", options=make_resource_opts("squid")) squid_request = make_message( "CreateSquid", - fields=( - make_field('squid', message=squid_resource), - ), + fields=(make_field("squid", message=squid_resource),), ) # Nested resource @@ -212,17 +230,15 @@ def test_resource_messages(): clam_resource = make_message( "Clam", options=make_resource_opts("clam"), - fields=( - make_field('squamosa', message=squamosa_message), - ), + fields=(make_field("squamosa", message=squamosa_message),), ) clam_request = make_message( - 'CreateClam', + "CreateClam", fields=( - make_field('clam', message=clam_resource), + make_field("clam", message=clam_resource), # Red herring, not resources :) - make_field('zone', 2, enum=make_enum('Zone')), - make_field('pearls', 3, True, message=make_message('Pearl')), + make_field("zone", 2, enum=make_enum("Zone")), + make_field("pearls", 3, True, message=make_message("Pearl")), ), ) @@ -232,13 +248,11 @@ def test_resource_messages(): # Not a resource octopus_request = make_message( "CreateOctopus", - fields=( - make_field('Octopus', message=make_message('Octopus')), - ), + fields=(make_field("Octopus", message=make_message("Octopus")),), ) service = make_service( - 'Molluscs', + "Molluscs", methods=( make_method( f"{message.name}", @@ -250,7 +264,7 @@ def test_resource_messages(): whelk_resource, octopus_request, ) - ) + ), ) expected = { @@ -268,9 +282,7 @@ def test_resource_messages_dict(): squid_resource = make_message("Squid", options=make_resource_opts("squid")) squid_request = make_message( "CreateSquid", - fields=( - make_field('squid', message=squid_resource), - ), + fields=(make_field("squid", message=squid_resource),), ) # Nested resource @@ -281,17 +293,15 @@ def test_resource_messages_dict(): clam_resource = make_message( "Clam", options=make_resource_opts("clam"), - fields=( - make_field('squamosa', message=squamosa_message), - ), + fields=(make_field("squamosa", message=squamosa_message),), ) clam_request = make_message( - 'CreateClam', + "CreateClam", fields=( - make_field('clam', message=clam_resource), + make_field("clam", message=clam_resource), # Red herring, not resources :) - make_field('zone', 2, enum=make_enum('Zone')), - make_field('pearls', 3, True, message=make_message('Pearl')), + make_field("zone", 2, enum=make_enum("Zone")), + make_field("pearls", 3, True, message=make_message("Pearl")), ), ) @@ -301,13 +311,11 @@ def test_resource_messages_dict(): # Not a resource octopus_request = make_message( "CreateOctopus", - fields=( - make_field('Octopus', message=make_message('Octopus')), - ), + fields=(make_field("Octopus", message=make_message("Octopus")),), ) service = make_service( - 'Molluscs', + "Molluscs", methods=( make_method( f"{message.name}", @@ -319,7 +327,7 @@ def test_resource_messages_dict(): whelk_resource, octopus_request, ) - ) + ), ) expected = { @@ -329,16 +337,21 @@ def test_resource_messages_dict(): "whelk/{whelk}": whelk_resource, "clam/{clam}/squamosa/{squamosa}": squamosa_message, # Common resources - "cloudresourcemanager.googleapis.com/Project": - service.common_resources["cloudresourcemanager.googleapis.com/Project"].message_type, - "cloudresourcemanager.googleapis.com/Organization": - service.common_resources["cloudresourcemanager.googleapis.com/Organization"].message_type, - "cloudresourcemanager.googleapis.com/Folder": - service.common_resources["cloudresourcemanager.googleapis.com/Folder"].message_type, - "cloudbilling.googleapis.com/BillingAccount": - service.common_resources["cloudbilling.googleapis.com/BillingAccount"].message_type, - "locations.googleapis.com/Location": - service.common_resources["locations.googleapis.com/Location"].message_type + "cloudresourcemanager.googleapis.com/Project": service.common_resources[ + "cloudresourcemanager.googleapis.com/Project" + ].message_type, + "cloudresourcemanager.googleapis.com/Organization": service.common_resources[ + "cloudresourcemanager.googleapis.com/Organization" + ].message_type, + "cloudresourcemanager.googleapis.com/Folder": service.common_resources[ + "cloudresourcemanager.googleapis.com/Folder" + ].message_type, + "cloudbilling.googleapis.com/BillingAccount": service.common_resources[ + "cloudbilling.googleapis.com/BillingAccount" + ].message_type, + "locations.googleapis.com/Location": service.common_resources[ + "locations.googleapis.com/Location" + ].message_type, } actual = service.resource_messages_dict assert expected == actual @@ -352,7 +365,11 @@ def test_service_unknown_resource_reference(): squid_request = make_message( "CreateSquid", fields=( - make_field("parent", type="TYPE_STRING", options=opts,), + make_field( + "parent", + type="TYPE_STRING", + options=opts, + ), ), ) squid_service = make_service( @@ -371,7 +388,7 @@ def test_service_unknown_resource_reference(): def test_service_any_streaming(): for client, server in itertools.product((True, False), (True, False)): service = make_service( - f'ClientStream{client}:ServerStream{server}', + f"ClientStream{client}:ServerStream{server}", methods=( ( make_method( @@ -386,7 +403,7 @@ def test_service_any_streaming(): server_streaming=server, ), ) - ) + ), ) assert service.any_client_streaming == client @@ -395,74 +412,77 @@ def test_service_any_streaming(): def test_service_any_deprecated(): service = make_service( - name='Service', + name="Service", methods=( - ( - make_method( - f"GetMollusc", - input_message=make_message( - "GetMolluscRequest", - ), - output_message=make_message( - "GetMolluscResponse", - ), + ( + make_method( + f"GetMollusc", + input_message=make_message( + "GetMolluscRequest", ), - ) - )) + output_message=make_message( + "GetMolluscResponse", + ), + ), + ) + ), + ) assert service.any_deprecated == False deprecated_service = make_service( - name='ServiceWithDeprecatedMethod', + name="ServiceWithDeprecatedMethod", methods=( - ( - make_method( - f"GetMollusc", - input_message=make_message( - "GetMolluscRequest", - ), - output_message=make_message( - "GetMolluscResponse", - ), - is_deprecated=True, + ( + make_method( + f"GetMollusc", + input_message=make_message( + "GetMolluscRequest", ), - ) - )) + output_message=make_message( + "GetMolluscResponse", + ), + is_deprecated=True, + ), + ) + ), + ) assert deprecated_service.any_deprecated == True def test_has_pagers(): - paged = make_field(name='foos', message=make_message('Foo'), repeated=True) + paged = make_field(name="foos", message=make_message("Foo"), repeated=True) input_msg = make_message( - name='ListFoosRequest', + name="ListFoosRequest", fields=( - make_field(name='parent', type=9), # str - make_field(name='page_size', type=5), # int - make_field(name='page_token', type=9), # str + make_field(name="parent", type=9), # str + make_field(name="page_size", type=5), # int + make_field(name="page_token", type=9), # str ), ) output_msg = make_message( - name='ListFoosResponse', + name="ListFoosResponse", fields=( paged, - make_field(name='next_page_token', type=9), # str + make_field(name="next_page_token", type=9), # str ), ) method = make_method( - 'ListFoos', + "ListFoos", input_message=input_msg, output_message=output_msg, ) - service = make_service(name="Fooer", methods=(method,),) + service = make_service( + name="Fooer", + methods=(method,), + ) assert service.has_pagers other_service = make_service( name="Unfooer", - methods=( - get_method("Unfoo", "foo.bar.UnfooReq", "foo.bar.UnFooResp"), - ), + methods=(get_method("Unfoo", "foo.bar.UnfooReq", "foo.bar.UnFooResp"),), ) assert not other_service.has_pagers @@ -501,10 +521,16 @@ def test_common_resource_patterns(): ) species_msg = species.message_type - assert species_msg.resource_path == "families/{family}/genera/{genus}/species/{species}" + assert ( + species_msg.resource_path + == "families/{family}/genera/{genus}/species/{species}" + ) assert species_msg.resource_type == "Species" assert species_msg.resource_path_args == ["family", "genus", "species"] - assert species_msg.path_regex_str == '^families/(?P.+?)/genera/(?P.+?)/species/(?P.+?)$' + assert ( + species_msg.path_regex_str + == "^families/(?P.+?)/genera/(?P.+?)/species/(?P.+?)$" + ) def test_resource_response(): @@ -516,9 +542,7 @@ def test_resource_response(): clam_resource = make_message("Clam", options=make_resource_opts("clam")) clam_response = make_message( "CreateClamResponse", - fields=( - make_field('clam', message=clam_resource), - ), + fields=(make_field("clam", message=clam_resource),), ) clam_request = make_message("CreateClamRequest") @@ -545,7 +569,9 @@ def test_operation_polling_method(): name="Operation", fields=[ make_field(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ], ) for f in operation.fields.values(): @@ -556,9 +582,7 @@ def test_operation_polling_method(): request = make_message( name="GetOperation", - fields=[ - make_field(name="name", type=T.Value("TYPE_STRING"), number=1) - ], + fields=[make_field(name="name", type=T.Value("TYPE_STRING"), number=1)], ) options = descriptor_pb2.MethodOptions() @@ -607,7 +631,9 @@ def test_extended_operations_lro_detection(): name="Operation", fields=[ make_field(name=name, type=T.Value("TYPE_STRING"), number=i) - for i, name in enumerate(("name", "status", "error_code", "error_message"), start=1) + for i, name in enumerate( + ("name", "status", "error_code", "error_message"), start=1 + ) ], ) for f in operation.fields.values(): @@ -618,9 +644,7 @@ def test_extended_operations_lro_detection(): request = make_message( name="GetOperation", - fields=[ - make_field(name="name", type=T.Value("TYPE_STRING"), number=1) - ], + fields=[make_field(name="name", type=T.Value("TYPE_STRING"), number=1)], ) options = descriptor_pb2.MethodOptions() diff --git a/packages/gapic-generator/tests/unit/utils/test_case.py b/packages/gapic-generator/tests/unit/utils/test_case.py index 83406ca43e1a..b31300cbb74e 100644 --- a/packages/gapic-generator/tests/unit/utils/test_case.py +++ b/packages/gapic-generator/tests/unit/utils/test_case.py @@ -16,28 +16,28 @@ def test_pascal_to_snake(): - assert case.to_snake_case('PascalCaseThing') == 'pascal_case_thing' + assert case.to_snake_case("PascalCaseThing") == "pascal_case_thing" def test_camel_to_snake(): - assert case.to_snake_case('camelCaseThing') == 'camel_case_thing' + assert case.to_snake_case("camelCaseThing") == "camel_case_thing" def test_constant_to_snake(): - assert case.to_snake_case('CONSTANT_CASE_THING') == 'constant_case_thing' + assert case.to_snake_case("CONSTANT_CASE_THING") == "constant_case_thing" def test_pascal_to_camel(): - assert case.to_camel_case('PascalCaseThing') == 'pascalCaseThing' + assert case.to_camel_case("PascalCaseThing") == "pascalCaseThing" def test_snake_to_camel(): - assert case.to_camel_case('snake_case_thing') == 'snakeCaseThing' + assert case.to_camel_case("snake_case_thing") == "snakeCaseThing" def test_constant_to_camel(): - assert case.to_camel_case('CONSTANT_CASE_THING') == 'constantCaseThing' + assert case.to_camel_case("CONSTANT_CASE_THING") == "constantCaseThing" def test_kebab_to_camel(): - assert case.to_camel_case('kebab-case-thing') == 'kebabCaseThing' + assert case.to_camel_case("kebab-case-thing") == "kebabCaseThing" diff --git a/packages/gapic-generator/tests/unit/utils/test_checks.py b/packages/gapic-generator/tests/unit/utils/test_checks.py index 32d5b33b493b..ce908013a920 100644 --- a/packages/gapic-generator/tests/unit/utils/test_checks.py +++ b/packages/gapic-generator/tests/unit/utils/test_checks.py @@ -17,18 +17,18 @@ def test_is_str_field_pb(): - msg_field = make_field('msg_field', message=make_message('test_msg')) - str_field = make_field('str_field', type=9) - int_field = make_field('int_field', type=5) + msg_field = make_field("msg_field", message=make_message("test_msg")) + str_field = make_field("str_field", type=9) + int_field = make_field("int_field", type=5) assert not checks.is_str_field_pb(msg_field.field_pb) assert checks.is_str_field_pb(str_field.field_pb) assert not checks.is_str_field_pb(int_field.field_pb) def test_is_msg_field_pb(): - msg_field = make_field('msg_field', message=make_message('test_msg')) - str_field = make_field('str_field', type=9) - int_field = make_field('int_field', type=5) + msg_field = make_field("msg_field", message=make_message("test_msg")) + str_field = make_field("str_field", type=9) + int_field = make_field("int_field", type=5) assert checks.is_msg_field_pb(msg_field.field_pb) assert not checks.is_msg_field_pb(str_field.field_pb) assert not checks.is_msg_field_pb(int_field.field_pb) diff --git a/packages/gapic-generator/tests/unit/utils/test_code.py b/packages/gapic-generator/tests/unit/utils/test_code.py index 021c2f2732c1..b288b424fb2e 100644 --- a/packages/gapic-generator/tests/unit/utils/test_code.py +++ b/packages/gapic-generator/tests/unit/utils/test_code.py @@ -17,23 +17,23 @@ def test_empty_empty(): - assert code.empty('') + assert code.empty("") def test_empty_comments(): - assert code.empty('# The rain in Wales...\n# falls mainly...') + assert code.empty("# The rain in Wales...\n# falls mainly...") def test_empty_whitespace(): - assert code.empty(' ') + assert code.empty(" ") def test_empty_whitespace_comments(): - assert code.empty(' # The rain in Wales...') + assert code.empty(" # The rain in Wales...") def test_empty_code(): - assert not code.empty('import this') + assert not code.empty("import this") def test_nth(): @@ -47,7 +47,7 @@ def test_nth(): def test_make_private(): # Regular case - assert code.make_private('private_func') == '_private_func' + assert code.make_private("private_func") == "_private_func" # Starts with underscore - assert code.make_private('_no_dunder_plz') == '_no_dunder_plz' + assert code.make_private("_no_dunder_plz") == "_no_dunder_plz" diff --git a/packages/gapic-generator/tests/unit/utils/test_filename.py b/packages/gapic-generator/tests/unit/utils/test_filename.py index ad18869b0b57..7ac49ee69fde 100644 --- a/packages/gapic-generator/tests/unit/utils/test_filename.py +++ b/packages/gapic-generator/tests/unit/utils/test_filename.py @@ -16,14 +16,14 @@ def test_to_valid_filename(): - assert filename.to_valid_filename('foo bar.py') == 'foo-bar.py' - assert filename.to_valid_filename('FOO') == 'foo' - assert filename.to_valid_filename('nom%&nom@nom.py') == 'nom-nom-nom.py' - assert filename.to_valid_filename('num_bear.py') == 'num_bear.py' + assert filename.to_valid_filename("foo bar.py") == "foo-bar.py" + assert filename.to_valid_filename("FOO") == "foo" + assert filename.to_valid_filename("nom%&nom@nom.py") == "nom-nom-nom.py" + assert filename.to_valid_filename("num_bear.py") == "num_bear.py" def test_to_valid_module_name(): - assert filename.to_valid_module_name('foo bar.py') == 'foo_bar.py' - assert filename.to_valid_module_name('FOO') == 'foo' - assert filename.to_valid_module_name('nom%&nom.py') == 'nom_nom.py' - assert filename.to_valid_module_name('num_bear.py') == 'num_bear.py' + assert filename.to_valid_module_name("foo bar.py") == "foo_bar.py" + assert filename.to_valid_module_name("FOO") == "foo" + assert filename.to_valid_module_name("nom%&nom.py") == "nom_nom.py" + assert filename.to_valid_module_name("num_bear.py") == "num_bear.py" diff --git a/packages/gapic-generator/tests/unit/utils/test_lines.py b/packages/gapic-generator/tests/unit/utils/test_lines.py index 9642b0f01c5e..d7cc8158a6de 100644 --- a/packages/gapic-generator/tests/unit/utils/test_lines.py +++ b/packages/gapic-generator/tests/unit/utils/test_lines.py @@ -16,81 +16,110 @@ def test_sort_lines(): - assert lines.sort_lines( - 'import foo\nimport bar', - ) == 'import bar\nimport foo' + assert ( + lines.sort_lines( + "import foo\nimport bar", + ) + == "import bar\nimport foo" + ) def test_sort_lines_keeps_leading_newline(): - assert lines.sort_lines( - '\nimport foo\nimport bar', - ) == '\nimport bar\nimport foo' + assert ( + lines.sort_lines( + "\nimport foo\nimport bar", + ) + == "\nimport bar\nimport foo" + ) def test_sort_lines_keeps_trailing_newline(): - assert lines.sort_lines( - 'import foo\nimport bar\n', - ) == 'import bar\nimport foo\n' + assert ( + lines.sort_lines( + "import foo\nimport bar\n", + ) + == "import bar\nimport foo\n" + ) def test_sort_lines_eliminates_blank_lines(): - assert lines.sort_lines( - 'import foo\n\n\nimport bar', - ) == 'import bar\nimport foo' + assert ( + lines.sort_lines( + "import foo\n\n\nimport bar", + ) + == "import bar\nimport foo" + ) def test_sort_lines_dedupe(): - assert lines.sort_lines( - 'import foo\nimport bar\nimport foo', - ) == 'import bar\nimport foo' + assert ( + lines.sort_lines( + "import foo\nimport bar\nimport foo", + ) + == "import bar\nimport foo" + ) def test_sort_lines_no_dedupe(): - assert lines.sort_lines( - 'import foo\nimport bar\nimport foo', - dedupe=False, - ) == 'import bar\nimport foo\nimport foo' + assert ( + lines.sort_lines( + "import foo\nimport bar\nimport foo", + dedupe=False, + ) + == "import bar\nimport foo\nimport foo" + ) def test_wrap_noop(): - assert lines.wrap('foo bar baz', width=80) == 'foo bar baz' + assert lines.wrap("foo bar baz", width=80) == "foo bar baz" def test_wrap_empty_text(): - assert lines.wrap('', width=80) == '' + assert lines.wrap("", width=80) == "" def test_wrap_simple(): - assert lines.wrap('foo bar baz', width=5) == 'foo\nbar\nbaz' + assert lines.wrap("foo bar baz", width=5) == "foo\nbar\nbaz" def test_wrap_strips(): - assert lines.wrap('foo bar baz ', width=80) == 'foo bar baz' + assert lines.wrap("foo bar baz ", width=80) == "foo bar baz" def test_wrap_subsequent_offset(): - assert lines.wrap('foo bar baz', - width=5, offset=0, indent=2, - ) == 'foo\n bar\n baz' + assert ( + lines.wrap( + "foo bar baz", + width=5, + offset=0, + indent=2, + ) + == "foo\n bar\n baz" + ) def test_wrap_initial_offset(): - assert lines.wrap( - 'The hail in Wales falls mainly on the snails.', - width=20, offset=12, indent=0, - ) == 'The hail\nin Wales falls\nmainly on the\nsnails.' + assert ( + lines.wrap( + "The hail in Wales falls mainly on the snails.", + width=20, + offset=12, + indent=0, + ) + == "The hail\nin Wales falls\nmainly on the\nsnails." + ) def test_wrap_indent_short(): - assert lines.wrap('foo bar', width=30, indent=10) == 'foo bar' + assert lines.wrap("foo bar", width=30, indent=10) == "foo bar" def test_wrap_short_line_preserved(): - assert lines.wrap('foo\nbar\nbaz', width=80) == 'foo\nbar\nbaz' + assert lines.wrap("foo\nbar\nbaz", width=80) == "foo\nbar\nbaz" def test_wrap_does_not_break_hyphenated_word(): - assert lines.wrap('do-not-break', width=5) == 'do-not-break' + assert lines.wrap("do-not-break", width=5) == "do-not-break" def test_wrap_with_short_lines(): diff --git a/packages/gapic-generator/tests/unit/utils/test_rst.py b/packages/gapic-generator/tests/unit/utils/test_rst.py index f99894429999..08c42a35b90c 100644 --- a/packages/gapic-generator/tests/unit/utils/test_rst.py +++ b/packages/gapic-generator/tests/unit/utils/test_rst.py @@ -20,43 +20,43 @@ def test_rst_unformatted(): - with mock.patch.object(pypandoc, 'convert_text') as convert_text: - assert utils.rst('The hail in Wales') == 'The hail in Wales' + with mock.patch.object(pypandoc, "convert_text") as convert_text: + assert utils.rst("The hail in Wales") == "The hail in Wales" assert convert_text.call_count == 0 def test_rst_formatted(): - with mock.patch.object(pypandoc, 'convert_text') as convert_text: - convert_text.side_effect = lambda *a, **kw: a[0].replace('`', '``') - assert utils.rst('The hail in `Wales`') == 'The hail in ``Wales``' + with mock.patch.object(pypandoc, "convert_text") as convert_text: + convert_text.side_effect = lambda *a, **kw: a[0].replace("`", "``") + assert utils.rst("The hail in `Wales`") == "The hail in ``Wales``" assert convert_text.call_count == 1 - assert convert_text.mock_calls[0][1][1] == 'rst' - assert convert_text.mock_calls[0][2]['format'] == 'commonmark' + assert convert_text.mock_calls[0][1][1] == "rst" + assert convert_text.mock_calls[0][2]["format"] == "commonmark" def test_rst_add_newline(): - with mock.patch.object(pypandoc, 'convert_text') as convert_text: - s = 'The hail in Wales\nfalls mainly on the snails.' - assert utils.rst(s) == s + '\n' + with mock.patch.object(pypandoc, "convert_text") as convert_text: + s = "The hail in Wales\nfalls mainly on the snails." + assert utils.rst(s) == s + "\n" assert convert_text.call_count == 0 def test_rst_force_add_newline(): - with mock.patch.object(pypandoc, 'convert_text') as convert_text: - s = 'The hail in Wales' - assert utils.rst(s, nl=True) == s + '\n' + with mock.patch.object(pypandoc, "convert_text") as convert_text: + s = "The hail in Wales" + assert utils.rst(s, nl=True) == s + "\n" assert convert_text.call_count == 0 def test_rst_disable_add_newline(): - with mock.patch.object(pypandoc, 'convert_text') as convert_text: - s = 'The hail in Wales\nfalls mainly on the snails.' + with mock.patch.object(pypandoc, "convert_text") as convert_text: + s = "The hail in Wales\nfalls mainly on the snails." assert utils.rst(s, nl=False) == s assert convert_text.call_count == 0 def test_rst_pad_close_quote(): - with mock.patch.object(pypandoc, 'convert_text') as convert_text: + with mock.patch.object(pypandoc, "convert_text") as convert_text: s = 'A value, as in "foo"' - assert utils.rst(s) == s + '.' + assert utils.rst(s) == s + "." assert convert_text.call_count == 0 diff --git a/packages/gapic-generator/tests/unit/utils/test_uri_sample.py b/packages/gapic-generator/tests/unit/utils/test_uri_sample.py index d9ce42364095..dce1d630415d 100644 --- a/packages/gapic-generator/tests/unit/utils/test_uri_sample.py +++ b/packages/gapic-generator/tests/unit/utils/test_uri_sample.py @@ -19,13 +19,11 @@ def test_sample_from_path_template_inner(): field = "table_name" path_template = "{project_id=projects/*}/instances/*/tables/*" res = uri_sample.sample_from_path_template(field, path_template) - assert res == { - "table_name": "projects/sample1/instances/sample2/tables/sample3"} + assert res == {"table_name": "projects/sample1/instances/sample2/tables/sample3"} def test_sample_from_path_template_no_inner(): field = "table_name" path_template = "projects/*/instances/*/tables/*" res = uri_sample.sample_from_path_template(field, path_template) - assert res == { - "table_name": "projects/sample1/instances/sample2/tables/sample3"} + assert res == {"table_name": "projects/sample1/instances/sample2/tables/sample3"} From 22030ab5c7c7f50b3ac7cf50de11545f2e7b2160 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2025 10:55:47 -0800 Subject: [PATCH 1252/1339] chore(main): release 1.23.1 (#2335) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index b720c3e20810..beb6db24d9de 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.23.1](https://github.com/googleapis/gapic-generator-python/compare/v1.23.0...v1.23.1) (2025-02-14) + + +### Documentation + +* Fix minor typos ([#2318](https://github.com/googleapis/gapic-generator-python/issues/2318)) ([7be3a55](https://github.com/googleapis/gapic-generator-python/commit/7be3a559d59b778c5733e21b4c0169b17bd05c4a)) + ## [1.23.0](https://github.com/googleapis/gapic-generator-python/compare/v1.22.1...v1.23.0) (2025-02-14) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 58e929348b7d..a128ad0632eb 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.23.0" +version = "1.23.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 5dfd771836f00892b25b02e40eb85b78a2865c24 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 28 Feb 2025 12:17:17 -0500 Subject: [PATCH 1253/1339] fix: resolve rare issue where ValueError is not raised when both request and flattened param are set (#2258) --- .../%sub/services/%service/client.py.j2 | 3 +- .../%sub/services/%service/_client_macros.j2 | 3 +- .../%sub/services/%service/async_client.py.j2 | 3 +- .../fragments/test_nested_messages.proto | 41 ++++++++++++++ .../services/asset_service/async_client.py | 48 +++++++++++------ .../asset_v1/services/asset_service/client.py | 48 +++++++++++------ .../services/iam_credentials/async_client.py | 12 +++-- .../services/iam_credentials/client.py | 12 +++-- .../services/eventarc/async_client.py | 54 ++++++++++++------- .../eventarc_v1/services/eventarc/client.py | 54 ++++++++++++------- .../config_service_v2/async_client.py | 54 ++++++++++++------- .../services/config_service_v2/client.py | 54 ++++++++++++------- .../logging_service_v2/async_client.py | 12 +++-- .../services/logging_service_v2/client.py | 12 +++-- .../metrics_service_v2/async_client.py | 15 ++++-- .../services/metrics_service_v2/client.py | 15 ++++-- .../config_service_v2/async_client.py | 54 ++++++++++++------- .../services/config_service_v2/client.py | 54 ++++++++++++------- .../logging_service_v2/async_client.py | 12 +++-- .../services/logging_service_v2/client.py | 12 +++-- .../metrics_service_v2/async_client.py | 15 ++++-- .../services/metrics_service_v2/client.py | 15 ++++-- .../services/cloud_redis/async_client.py | 33 ++++++++---- .../redis_v1/services/cloud_redis/client.py | 33 ++++++++---- .../services/cloud_redis/async_client.py | 15 ++++-- .../redis_v1/services/cloud_redis/client.py | 15 ++++-- 26 files changed, 479 insertions(+), 219 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/test_nested_messages.proto diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 5555339c4d1f..72f96abdb005 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -410,7 +410,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if method.flattened_fields %} # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) + flattened_params = [{{ method.flattened_fields.values()|join(", ", attribute="name") }}] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 index 97fa01773cd4..9029ac8eca23 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_client_macros.j2 @@ -90,7 +90,8 @@ {% if method.flattened_fields %} # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) + flattened_params = [{{ method.flattened_fields.values()|join(", ", attribute="name") }}] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 2513ce10c0ee..1f32c9acee29 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -339,7 +339,8 @@ class {{ service.async_client_name }}: {% if method.flattened_fields %} # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([{{ method.flattened_fields.values()|join(", ", attribute="name") }}]) + flattened_params = [{{ method.flattened_fields.values()|join(", ", attribute="name") }}] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/fragments/test_nested_messages.proto b/packages/gapic-generator/tests/fragments/test_nested_messages.proto new file mode 100644 index 000000000000..6c98cb15d22b --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_nested_messages.proto @@ -0,0 +1,41 @@ +// Copyright (C) 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +syntax = "proto3"; + +package google.fragment; + +import "google/protobuf/struct.proto"; +import "google/api/client.proto"; + +service MyServiceWithNestedMessages { + option (google.api.default_host) = "my.example.com"; + + rpc MyMethod(MethodRequest) returns (MethodResponse) { + option (google.api.method_signature) = "some_message"; + } +} + +message AnotherMessage {}; + +message SomeMessage{ + AnotherMessage another_message = 1; +} + +message MethodRequest { + SomeMessage some_message = 1; +} + +message MethodResponse { + google.protobuf.Value result = 1; +} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 334e5067fff0..bfd8ed8e00e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -459,7 +459,8 @@ async def sample_list_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -682,7 +683,8 @@ async def sample_create_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -794,7 +796,8 @@ async def sample_get_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -901,7 +904,8 @@ async def sample_list_feeds(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1017,7 +1021,8 @@ async def sample_update_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([feed]) + flattened_params = [feed] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1114,7 +1119,8 @@ async def sample_delete_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1328,7 +1334,8 @@ async def sample_search_all_resources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query, asset_types]) + flattened_params = [scope, query, asset_types] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1523,7 +1530,8 @@ async def sample_search_all_iam_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) + flattened_params = [scope, query] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2067,7 +2075,8 @@ async def sample_create_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, saved_query, saved_query_id]) + flattened_params = [parent, saved_query, saved_query_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2179,7 +2188,8 @@ async def sample_get_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2293,7 +2303,8 @@ async def sample_list_saved_queries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2421,7 +2432,8 @@ async def sample_update_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([saved_query, update_mask]) + flattened_params = [saved_query, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2522,7 +2534,8 @@ async def sample_delete_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2744,7 +2757,8 @@ async def sample_analyze_org_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2898,7 +2912,8 @@ async def sample_analyze_org_policy_governed_containers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3081,7 +3096,8 @@ async def sample_analyze_org_policy_governed_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 8013724696cb..95f0c102079f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -862,7 +862,8 @@ def sample_list_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1084,7 +1085,8 @@ def sample_create_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1195,7 +1197,8 @@ def sample_get_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1301,7 +1304,8 @@ def sample_list_feeds(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1416,7 +1420,8 @@ def sample_update_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([feed]) + flattened_params = [feed] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1512,7 +1517,8 @@ def sample_delete_feed(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1725,7 +1731,8 @@ def sample_search_all_resources(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query, asset_types]) + flattened_params = [scope, query, asset_types] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1919,7 +1926,8 @@ def sample_search_all_iam_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, query]) + flattened_params = [scope, query] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2462,7 +2470,8 @@ def sample_create_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, saved_query, saved_query_id]) + flattened_params = [parent, saved_query, saved_query_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2573,7 +2582,8 @@ def sample_get_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2686,7 +2696,8 @@ def sample_list_saved_queries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2813,7 +2824,8 @@ def sample_update_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([saved_query, update_mask]) + flattened_params = [saved_query, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2913,7 +2925,8 @@ def sample_delete_saved_query(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3134,7 +3147,8 @@ def sample_analyze_org_policies(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3287,7 +3301,8 @@ def sample_analyze_org_policy_governed_containers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3469,7 +3484,8 @@ def sample_analyze_org_policy_governed_assets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([scope, constraint, filter]) + flattened_params = [scope, constraint, filter] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 3ade51402e91..29ea194a7ad0 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -370,7 +370,8 @@ async def sample_generate_access_token(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, delegates, scope, lifetime]) + flattened_params = [name, delegates, scope, lifetime] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -521,7 +522,8 @@ async def sample_generate_id_token(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, delegates, audience, include_email]) + flattened_params = [name, delegates, audience, include_email] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -660,7 +662,8 @@ async def sample_sign_blob(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, delegates, payload]) + flattened_params = [name, delegates, payload] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -800,7 +803,8 @@ async def sample_sign_jwt(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, delegates, payload]) + flattened_params = [name, delegates, payload] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 3736372a4351..b6656c626716 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -719,7 +719,8 @@ def sample_generate_access_token(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, delegates, scope, lifetime]) + flattened_params = [name, delegates, scope, lifetime] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -869,7 +870,8 @@ def sample_generate_id_token(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, delegates, audience, include_email]) + flattened_params = [name, delegates, audience, include_email] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1007,7 +1009,8 @@ def sample_sign_blob(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, delegates, payload]) + flattened_params = [name, delegates, payload] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1146,7 +1149,8 @@ def sample_sign_jwt(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, delegates, payload]) + flattened_params = [name, delegates, payload] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 501680371f8f..4200dfc1bfc8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -351,7 +351,8 @@ async def sample_get_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -459,7 +460,8 @@ async def sample_list_triggers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -607,7 +609,8 @@ async def sample_create_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, trigger, trigger_id]) + flattened_params = [parent, trigger, trigger_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -748,7 +751,8 @@ async def sample_update_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([trigger, update_mask, allow_missing]) + flattened_params = [trigger, update_mask, allow_missing] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -882,7 +886,8 @@ async def sample_delete_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, allow_missing]) + flattened_params = [name, allow_missing] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1003,7 +1008,8 @@ async def sample_get_channel(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1111,7 +1117,8 @@ async def sample_list_channels(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1259,7 +1266,8 @@ async def sample_create_channel(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, channel, channel_id]) + flattened_params = [parent, channel, channel_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1394,7 +1402,8 @@ async def sample_update_channel(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([channel, update_mask]) + flattened_params = [channel, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1520,7 +1529,8 @@ async def sample_delete_channel(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1633,7 +1643,8 @@ async def sample_get_provider(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1741,7 +1752,8 @@ async def sample_list_providers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1862,7 +1874,8 @@ async def sample_get_channel_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1971,7 +1984,8 @@ async def sample_list_channel_connections(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2119,7 +2133,8 @@ async def sample_create_channel_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, channel_connection, channel_connection_id]) + flattened_params = [parent, channel_connection, channel_connection_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2245,7 +2260,8 @@ async def sample_delete_channel_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2364,7 +2380,8 @@ async def sample_get_google_channel_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2486,7 +2503,8 @@ async def sample_update_google_channel_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([google_channel_config, update_mask]) + flattened_params = [google_channel_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 2de8d7404f83..551bf1eb3b55 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -781,7 +781,8 @@ def sample_get_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -888,7 +889,8 @@ def sample_list_triggers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1035,7 +1037,8 @@ def sample_create_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, trigger, trigger_id]) + flattened_params = [parent, trigger, trigger_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1175,7 +1178,8 @@ def sample_update_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([trigger, update_mask, allow_missing]) + flattened_params = [trigger, update_mask, allow_missing] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1308,7 +1312,8 @@ def sample_delete_trigger(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, allow_missing]) + flattened_params = [name, allow_missing] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1428,7 +1433,8 @@ def sample_get_channel(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1535,7 +1541,8 @@ def sample_list_channels(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1682,7 +1689,8 @@ def sample_create_channel(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, channel, channel_id]) + flattened_params = [parent, channel, channel_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1816,7 +1824,8 @@ def sample_update_channel(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([channel, update_mask]) + flattened_params = [channel, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1941,7 +1950,8 @@ def sample_delete_channel(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2053,7 +2063,8 @@ def sample_get_provider(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2160,7 +2171,8 @@ def sample_list_providers(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2280,7 +2292,8 @@ def sample_get_channel_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2388,7 +2401,8 @@ def sample_list_channel_connections(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2535,7 +2549,8 @@ def sample_create_channel_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, channel_connection, channel_connection_id]) + flattened_params = [parent, channel_connection, channel_connection_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2660,7 +2675,8 @@ def sample_delete_channel_connection(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2778,7 +2794,8 @@ def sample_get_google_channel_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2899,7 +2916,8 @@ def sample_update_google_channel_config(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([google_channel_config, update_mask]) + flattened_params = [google_channel_config, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 615219a62117..193cae126a93 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -342,7 +342,8 @@ async def sample_list_buckets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1084,7 +1085,8 @@ async def sample_list_views(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1542,7 +1544,8 @@ async def sample_list_sinks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1674,7 +1677,8 @@ async def sample_get_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1812,7 +1816,8 @@ async def sample_create_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) + flattened_params = [parent, sink] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1977,7 +1982,8 @@ async def sample_update_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) + flattened_params = [sink_name, sink, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2088,7 +2094,8 @@ async def sample_delete_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2222,7 +2229,8 @@ async def sample_create_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, link, link_id]) + flattened_params = [parent, link, link_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2357,7 +2365,8 @@ async def sample_delete_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2478,7 +2487,8 @@ async def sample_list_links(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2597,7 +2607,8 @@ async def sample_get_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2712,7 +2723,8 @@ async def sample_list_exclusions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2842,7 +2854,8 @@ async def sample_get_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2979,7 +2992,8 @@ async def sample_create_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + flattened_params = [parent, exclusion] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3130,7 +3144,8 @@ async def sample_update_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + flattened_params = [name, exclusion, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3240,7 +3255,8 @@ async def sample_delete_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3595,7 +3611,8 @@ async def sample_get_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3740,7 +3757,8 @@ async def sample_update_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([settings, update_mask]) + flattened_params = [settings, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index e85efa74867d..61409f5b4fd5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -743,7 +743,8 @@ def sample_list_buckets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1484,7 +1485,8 @@ def sample_list_views(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1941,7 +1943,8 @@ def sample_list_sinks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2072,7 +2075,8 @@ def sample_get_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2209,7 +2213,8 @@ def sample_create_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) + flattened_params = [parent, sink] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2373,7 +2378,8 @@ def sample_update_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) + flattened_params = [sink_name, sink, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2483,7 +2489,8 @@ def sample_delete_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2616,7 +2623,8 @@ def sample_create_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, link, link_id]) + flattened_params = [parent, link, link_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2750,7 +2758,8 @@ def sample_delete_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2870,7 +2879,8 @@ def sample_list_links(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2988,7 +2998,8 @@ def sample_get_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3102,7 +3113,8 @@ def sample_list_exclusions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3231,7 +3243,8 @@ def sample_get_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3367,7 +3380,8 @@ def sample_create_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + flattened_params = [parent, exclusion] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3517,7 +3531,8 @@ def sample_update_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + flattened_params = [name, exclusion, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3626,7 +3641,8 @@ def sample_delete_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3980,7 +3996,8 @@ def sample_get_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -4124,7 +4141,8 @@ def sample_update_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([settings, update_mask]) + flattened_params = [settings, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index eb900db2378d..ee9e78158367 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -318,7 +318,8 @@ async def sample_delete_log(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) + flattened_params = [log_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -508,7 +509,8 @@ async def sample_write_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) + flattened_params = [log_name, resource, labels, entries] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -659,7 +661,8 @@ async def sample_list_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) + flattened_params = [resource_names, filter, order_by] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -872,7 +875,8 @@ async def sample_list_logs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 2ca0aa9b3c31..a52186153a84 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -665,7 +665,8 @@ def sample_delete_log(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) + flattened_params = [log_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -854,7 +855,8 @@ def sample_write_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) + flattened_params = [log_name, resource, labels, entries] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1003,7 +1005,8 @@ def sample_list_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) + flattened_params = [resource_names, filter, order_by] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1215,7 +1218,8 @@ def sample_list_logs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 8f1ec70d9c3b..cb20bb0327d7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -321,7 +321,8 @@ async def sample_list_log_metrics(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -449,7 +450,8 @@ async def sample_get_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -583,7 +585,8 @@ async def sample_create_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) + flattened_params = [parent, metric] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -718,7 +721,8 @@ async def sample_update_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) + flattened_params = [metric_name, metric] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -818,7 +822,8 @@ async def sample_delete_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index afca747efbce..ab976fd84450 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -668,7 +668,8 @@ def sample_list_log_metrics(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -795,7 +796,8 @@ def sample_get_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -928,7 +930,8 @@ def sample_create_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) + flattened_params = [parent, metric] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1062,7 +1065,8 @@ def sample_update_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) + flattened_params = [metric_name, metric] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1161,7 +1165,8 @@ def sample_delete_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py index ea6f8531a563..e4da77044e6a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -342,7 +342,8 @@ async def sample_list_buckets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1084,7 +1085,8 @@ async def sample__list_views(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1542,7 +1544,8 @@ async def sample__list_sinks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1674,7 +1677,8 @@ async def sample__get_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1812,7 +1816,8 @@ async def sample__create_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) + flattened_params = [parent, sink] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1977,7 +1982,8 @@ async def sample__update_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) + flattened_params = [sink_name, sink, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2088,7 +2094,8 @@ async def sample__delete_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2222,7 +2229,8 @@ async def sample__create_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, link, link_id]) + flattened_params = [parent, link, link_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2357,7 +2365,8 @@ async def sample__delete_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2478,7 +2487,8 @@ async def sample__list_links(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2597,7 +2607,8 @@ async def sample__get_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2712,7 +2723,8 @@ async def sample__list_exclusions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2842,7 +2854,8 @@ async def sample__get_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -2979,7 +2992,8 @@ async def sample__create_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + flattened_params = [parent, exclusion] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3130,7 +3144,8 @@ async def sample__update_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + flattened_params = [name, exclusion, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3240,7 +3255,8 @@ async def sample__delete_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3595,7 +3611,8 @@ async def sample__get_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -3740,7 +3757,8 @@ async def sample__update_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([settings, update_mask]) + flattened_params = [settings, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index 4c7eedaf662f..6119170ecc98 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -743,7 +743,8 @@ def sample_list_buckets(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1484,7 +1485,8 @@ def sample__list_views(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1941,7 +1943,8 @@ def sample__list_sinks(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2072,7 +2075,8 @@ def sample__get_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2209,7 +2213,8 @@ def sample__create_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, sink]) + flattened_params = [parent, sink] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2373,7 +2378,8 @@ def sample__update_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name, sink, update_mask]) + flattened_params = [sink_name, sink, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2483,7 +2489,8 @@ def sample__delete_sink(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([sink_name]) + flattened_params = [sink_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2616,7 +2623,8 @@ def sample__create_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, link, link_id]) + flattened_params = [parent, link, link_id] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2750,7 +2758,8 @@ def sample__delete_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2870,7 +2879,8 @@ def sample__list_links(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2988,7 +2998,8 @@ def sample__get_link(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3102,7 +3113,8 @@ def sample__list_exclusions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3231,7 +3243,8 @@ def sample__get_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3367,7 +3380,8 @@ def sample__create_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, exclusion]) + flattened_params = [parent, exclusion] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3517,7 +3531,8 @@ def sample__update_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, exclusion, update_mask]) + flattened_params = [name, exclusion, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3626,7 +3641,8 @@ def sample__delete_exclusion(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -3980,7 +3996,8 @@ def sample__get_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -4124,7 +4141,8 @@ def sample__update_settings(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([settings, update_mask]) + flattened_params = [settings, update_mask] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py index eb900db2378d..ee9e78158367 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -318,7 +318,8 @@ async def sample_delete_log(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) + flattened_params = [log_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -508,7 +509,8 @@ async def sample_write_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) + flattened_params = [log_name, resource, labels, entries] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -659,7 +661,8 @@ async def sample_list_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) + flattened_params = [resource_names, filter, order_by] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -872,7 +875,8 @@ async def sample_list_logs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py index 2ca0aa9b3c31..a52186153a84 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -665,7 +665,8 @@ def sample_delete_log(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name]) + flattened_params = [log_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -854,7 +855,8 @@ def sample_write_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([log_name, resource, labels, entries]) + flattened_params = [log_name, resource, labels, entries] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1003,7 +1005,8 @@ def sample_list_log_entries(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([resource_names, filter, order_by]) + flattened_params = [resource_names, filter, order_by] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1215,7 +1218,8 @@ def sample_list_logs(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 826e222ea3bd..951c77479f9d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -321,7 +321,8 @@ async def sample__list_log_metrics(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -449,7 +450,8 @@ async def sample__get_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -583,7 +585,8 @@ async def sample__create_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) + flattened_params = [parent, metric] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -718,7 +721,8 @@ async def sample__update_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) + flattened_params = [metric_name, metric] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -818,7 +822,8 @@ async def sample__delete_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py index b3a02dc538f1..64bb863edd77 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -668,7 +668,8 @@ def sample__list_log_metrics(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -795,7 +796,8 @@ def sample__get_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -928,7 +930,8 @@ def sample__create_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, metric]) + flattened_params = [parent, metric] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1062,7 +1065,8 @@ def sample__update_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name, metric]) + flattened_params = [metric_name, metric] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1161,7 +1165,8 @@ def sample__delete_log_metric(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([metric_name]) + flattened_params = [metric_name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 2ba2cb14de65..339e572e8ed5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -356,7 +356,8 @@ async def sample_list_instances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -471,7 +472,8 @@ async def sample_get_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -578,7 +580,8 @@ async def sample_get_instance_auth_string(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -733,7 +736,8 @@ async def sample_create_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, instance]) + flattened_params = [parent, instance_id, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -883,7 +887,8 @@ async def sample_update_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, instance]) + flattened_params = [update_mask, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1016,7 +1021,8 @@ async def sample_upgrade_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, redis_version]) + flattened_params = [name, redis_version] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1159,7 +1165,8 @@ async def sample_import_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config]) + flattened_params = [name, input_config] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1299,7 +1306,8 @@ async def sample_export_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1433,7 +1441,8 @@ async def sample_failover_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, data_protection_mode]) + flattened_params = [name, data_protection_mode] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1564,7 +1573,8 @@ async def sample_delete_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -1704,7 +1714,8 @@ async def sample_reschedule_maintenance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, reschedule_type, schedule_time]) + flattened_params = [name, reschedule_type, schedule_time] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 56c333e6cc3a..aa1e0675cde5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -737,7 +737,8 @@ def sample_list_instances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -851,7 +852,8 @@ def sample_get_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -957,7 +959,8 @@ def sample_get_instance_auth_string(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1111,7 +1114,8 @@ def sample_create_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, instance]) + flattened_params = [parent, instance_id, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1260,7 +1264,8 @@ def sample_update_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, instance]) + flattened_params = [update_mask, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1392,7 +1397,8 @@ def sample_upgrade_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, redis_version]) + flattened_params = [name, redis_version] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1534,7 +1540,8 @@ def sample_import_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, input_config]) + flattened_params = [name, input_config] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1673,7 +1680,8 @@ def sample_export_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, output_config]) + flattened_params = [name, output_config] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1806,7 +1814,8 @@ def sample_failover_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, data_protection_mode]) + flattened_params = [name, data_protection_mode] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1936,7 +1945,8 @@ def sample_delete_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -2075,7 +2085,8 @@ def sample_reschedule_maintenance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, reschedule_type, schedule_time]) + flattened_params = [name, reschedule_type, schedule_time] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py index 233a17a6edf9..dfb52bc1b522 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -356,7 +356,8 @@ async def sample_list_instances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -471,7 +472,8 @@ async def sample_get_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -626,7 +628,8 @@ async def sample_create_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, instance]) + flattened_params = [parent, instance_id, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -776,7 +779,8 @@ async def sample_update_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, instance]) + flattened_params = [update_mask, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") @@ -907,7 +911,8 @@ async def sample_delete_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError("If the `request` argument is set, then none of " "the individual field arguments should be set.") diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py index 65444427a950..8119e081115e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -737,7 +737,8 @@ def sample_list_instances(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -851,7 +852,8 @@ def sample_get_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1005,7 +1007,8 @@ def sample_create_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, instance_id, instance]) + flattened_params = [parent, instance_id, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1154,7 +1157,8 @@ def sample_update_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([update_mask, instance]) + flattened_params = [update_mask, instance] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') @@ -1284,7 +1288,8 @@ def sample_delete_instance(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = len([param for param in flattened_params if param is not None]) > 0 if request is not None and has_flattened_params: raise ValueError('If the `request` argument is set, then none of ' 'the individual field arguments should be set.') From 25f7b565b7caa5df8f2e457b0f40a8d4a3873e98 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 28 Feb 2025 12:46:32 -0500 Subject: [PATCH 1254/1339] chore(main): release 1.23.2 (#2345) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index beb6db24d9de..5e5d3e5eb727 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.23.2](https://github.com/googleapis/gapic-generator-python/compare/v1.23.1...v1.23.2) (2025-02-28) + + +### Bug Fixes + +* Resolve rare issue where ValueError is not raised when both request and flattened param are set ([#2258](https://github.com/googleapis/gapic-generator-python/issues/2258)) ([4ecfbeb](https://github.com/googleapis/gapic-generator-python/commit/4ecfbeb7028dc1856692f5cda95a8767e4cb69e4)) + ## [1.23.1](https://github.com/googleapis/gapic-generator-python/compare/v1.23.0...v1.23.1) (2025-02-14) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index a128ad0632eb..4392e20f8d14 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.23.1" +version = "1.23.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 033cd95e666fb09f3dd9de87c7c797768446573c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 2 Mar 2025 12:17:45 -0500 Subject: [PATCH 1255/1339] chore(python): conditionally load credentials in .kokoro/build.sh (#2341) Co-authored-by: Owl Bot --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 ++-- packages/gapic-generator/.kokoro/build.sh | 20 +++++++++++++------ 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 4c0027ff1c61..3f7634f25f8e 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a -# created: 2025-01-16T15:24:11.364245182Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/packages/gapic-generator/.kokoro/build.sh b/packages/gapic-generator/.kokoro/build.sh index 1cebf75e7312..d41b45aa1dd0 100755 --- a/packages/gapic-generator/.kokoro/build.sh +++ b/packages/gapic-generator/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/gapic-generator-python" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -46,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi From 9177ce6e50b3b1ef8f79b6d1d1a4ae89e9eda4f1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 3 Mar 2025 10:54:12 -0500 Subject: [PATCH 1256/1339] fix: resolve issue where pre-release versions of dependencies are installed (#2347) --- .../gapic/ads-templates/setup.py.j2 | 6 +++--- .../gapic/templates/_pypi_packages.j2 | 20 +++++++++---------- .../gapic/templates/setup.py.j2 | 14 ++++++------- .../templates/testing/constraints-3.7.txt.j2 | 2 +- packages/gapic-generator/setup.py | 10 +++++----- .../tests/integration/goldens/asset/setup.py | 16 +++++++-------- .../goldens/asset/testing/constraints-3.7.txt | 2 +- .../integration/goldens/credentials/setup.py | 10 +++++----- .../credentials/testing/constraints-3.7.txt | 2 +- .../integration/goldens/eventarc/setup.py | 12 +++++------ .../eventarc/testing/constraints-3.7.txt | 2 +- .../integration/goldens/logging/setup.py | 10 +++++----- .../logging/testing/constraints-3.7.txt | 2 +- .../goldens/logging_internal/setup.py | 10 +++++----- .../testing/constraints-3.7.txt | 2 +- .../tests/integration/goldens/redis/setup.py | 14 ++++++------- .../goldens/redis/testing/constraints-3.7.txt | 2 +- .../goldens/redis_selective/setup.py | 14 ++++++------- .../testing/constraints-3.7.txt | 2 +- 19 files changed, 76 insertions(+), 76 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index be5e116888ba..b041173f8500 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -29,11 +29,11 @@ else: release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 2.10.0, < 3.0.0dev", - "google-auth >= 2.14.1, <3.0.0dev", + "google-api-core[grpc] >= 2.10.0, < 3.0.0", + "google-auth >= 2.14.1, <3.0.0", "googleapis-common-protos >= 1.53.0", "grpcio >= 1.10.0", - "proto-plus >= 1.22.3, <2.0.0dev", + "proto-plus >= 1.22.3, <2.0.0", {% if api.requires_package(('google', 'iam', 'v1')) %} "grpc-google-iam-v1", {% endif %} diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index 88ce71734072..f90af07b3682 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -7,15 +7,15 @@ allowed version. Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has support for `barcode` in `google.cloud.documentai.types` --> {% set pypi_packages = { - ("google", "apps", "card", "v1"): {"package_name": "google-apps-card", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, - ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0dev"}, - ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0dev"}, - ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0dev"}, - ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "4.0.0dev"}, - ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "4.0.0dev"}, - ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0dev"}, - ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.14.0", "upper_bound": "1.0.0dev"}, - ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0dev"}, - ("google", "shopping", "type"): {"package_name": "google-shopping-type", "lower_bound": "0.1.6", "upper_bound": "1.0.0dev"} + ("google", "apps", "card", "v1"): {"package_name": "google-apps-card", "lower_bound": "0.1.2", "upper_bound": "1.0.0"}, + ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0"}, + ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0"}, + ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0"}, + ("google", "cloud", "documentai", "v1"): {"package_name": "google-cloud-documentai", "lower_bound": "2.0.0", "upper_bound": "4.0.0"}, + ("google", "cloud", "kms", "v1"): {"package_name": "google-cloud-kms", "lower_bound": "2.3.0", "upper_bound": "4.0.0"}, + ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0"}, + ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.14.0", "upper_bound": "1.0.0"}, + ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0"}, + ("google", "shopping", "type"): {"package_name": "google-shopping-type", "lower_bound": "0.1.6", "upper_bound": "1.0.0"} } %} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 8c1f0b224c47..3874d8f1f8d3 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -33,14 +33,14 @@ else: release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {% for package_tuple, package_info in pypi_packages.items() %} {# Quick check to make sure the package is different from this setup.py #} {% if api.naming.warehouse_package_name != package_info.package_name %} @@ -53,8 +53,8 @@ dependencies = [ extras = { {% if rest_async_io_enabled %} "async_rest": [ - "google-api-core[grpc] >= 2.21.0, < 3.0.0dev", - "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" + "google-api-core[grpc] >= 2.21.0, < 3.0.0", + "google-auth[aiohttp] >= 2.35.0, <3.0.0" ], {% endif %} } diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index 3dbaa6f3825b..8dc46d8b47f7 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -3,7 +3,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 4392e20f8d14..55a37d3cce74 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -28,19 +28,19 @@ # Ensure that the lower bounds of these dependencies match what we have in the # templated setup.py.j2: https://github.com/googleapis/gapic-generator-python/blob/main/gapic/templates/setup.py.j2 "click >= 6.7", - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "googleapis-common-protos >= 1.55.0", "grpcio >= 1.24.3", # 2.11.0 is required which adds the `default` argument to `jinja-filters.map()` # https://jinja.palletsprojects.com/en/3.0.x/templates/#jinja-filters.map # https://jinja.palletsprojects.com/en/2.11.x/changelog/#version-2-11-0 "jinja2 >= 2.11", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "pypandoc >= 1.4", "PyYAML >= 5.1.1", - "grpc-google-iam-v1 >= 0.14.0, < 1.0.0dev", - "libcst >= 0.4.9, < 2.0.0dev", - "inflection >= 0.5.1, < 1.0.0dev", + "grpc-google-iam-v1 >= 0.14.0, < 1.0.0", + "libcst >= 0.4.9, < 2.0.0", + "inflection >= 0.5.1, < 1.0.0", ] package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 46140c197e4c..1cbbc630a477 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -39,16 +39,16 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-cloud-access-context-manager >= 0.1.2, <1.0.0dev", - "google-cloud-os-config >= 1.0.0, <2.0.0dev", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-cloud-access-context-manager >= 0.1.2, <1.0.0", + "google-cloud-os-config >= 1.0.0, <2.0.0", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index 231036469dff..aafb24324a45 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -2,7 +2,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 36e57a170563..084e35153ec5 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -39,13 +39,13 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt index fc812592b0ee..a77f12bc13e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -2,7 +2,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index e89fbba5f645..8bc8e368685e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -39,14 +39,14 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.14.0, <1.0.0dev", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.14.0, <1.0.0", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index fb7e93a1b473..56affbd9bd75 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -2,7 +2,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 69fe4db623ff..ea57cc1c4002 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -39,13 +39,13 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt index fc812592b0ee..a77f12bc13e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -2,7 +2,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py index 69fe4db623ff..ea57cc1c4002 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -39,13 +39,13 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt index fc812592b0ee..a77f12bc13e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt @@ -2,7 +2,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 220725b65cc0..75afe5324765 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -39,18 +39,18 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { "async_rest": [ - "google-api-core[grpc] >= 2.21.0, < 3.0.0dev", - "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" + "google-api-core[grpc] >= 2.21.0, < 3.0.0", + "google-auth[aiohttp] >= 2.35.0, <3.0.0" ], } url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt index fc812592b0ee..a77f12bc13e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -2,7 +2,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py index 220725b65cc0..75afe5324765 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -39,18 +39,18 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.34.1, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "google-api-core[grpc] >= 1.34.1, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "proto-plus >= 1.22.3, <2.0.0dev", - "proto-plus >= 1.25.0, <2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "proto-plus >= 1.22.3, <2.0.0", + "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { "async_rest": [ - "google-api-core[grpc] >= 2.21.0, < 3.0.0dev", - "google-auth[aiohttp] >= 2.35.0, <3.0.0dev" + "google-api-core[grpc] >= 2.21.0, < 3.0.0", + "google-auth[aiohttp] >= 2.35.0, <3.0.0" ], } url = "https://github.com/googleapis/google-cloud-python/tree/main/packages/google-cloud-redis" diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt index fc812592b0ee..a77f12bc13e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt @@ -2,7 +2,7 @@ # are correct in setup.py # List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 From d79055888af67043c6ee7c68abcabdd008f826b3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 16:16:01 +0000 Subject: [PATCH 1257/1339] chore(main): release 1.23.3 (#2348) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 5e5d3e5eb727..eb7f05b0ca49 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.23.3](https://github.com/googleapis/gapic-generator-python/compare/v1.23.2...v1.23.3) (2025-03-03) + + +### Bug Fixes + +* Resolve issue where pre-release versions of dependencies are installed ([#2347](https://github.com/googleapis/gapic-generator-python/issues/2347)) ([aea00b2](https://github.com/googleapis/gapic-generator-python/commit/aea00b2137ac926bdc527eb9c4666eb8b1ca70e1)) + ## [1.23.2](https://github.com/googleapis/gapic-generator-python/compare/v1.23.1...v1.23.2) (2025-02-28) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 55a37d3cce74..5a57c6b6fea6 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.23.2" +version = "1.23.3" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From be7f65e624abfbc5e32cb9b1bbe0e10f5b8cc180 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 4 Mar 2025 12:51:26 -0500 Subject: [PATCH 1258/1339] fix: allow proto files to be included in the output of py_gapic_assembly_pkg (#2349) --- .../gapic-generator/rules_python_gapic/py_gapic_pkg.bzl | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl index e85981653734..e23e4c719ce1 100644 --- a/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl +++ b/packages/gapic-generator/rules_python_gapic/py_gapic_pkg.bzl @@ -19,6 +19,13 @@ def _py_gapic_src_pkg_impl(ctx): dir_srcs = [] py_srcs = [] for dep in ctx.attr.deps: + # Check if this is a `proto_library` dependency which has `ProtoInfo` + # See https://bazel.build/versions/6.5.0/rules/lib/ProtoInfo + if ProtoInfo in dep: + # Add `*.proto` files to the output which exist in `direct_sources` + # https://bazel.build/versions/6.5.0/rules/lib/ProtoInfo#direct_sources + for source in dep[ProtoInfo].direct_sources: + py_srcs.append(source) for f in dep.files.to_list(): if f.is_directory: dir_srcs.append(f) From cd3195265a2a0e4d819a531d20cd5297f00154a4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 4 Mar 2025 18:13:26 +0000 Subject: [PATCH 1259/1339] chore(main): release 1.23.4 (#2350) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index eb7f05b0ca49..d4e327b3e3bf 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.23.4](https://github.com/googleapis/gapic-generator-python/compare/v1.23.3...v1.23.4) (2025-03-04) + + +### Bug Fixes + +* Allow proto files to be included in the output of py_gapic_assembly_pkg ([#2349](https://github.com/googleapis/gapic-generator-python/issues/2349)) ([b301bef](https://github.com/googleapis/gapic-generator-python/commit/b301befcda29ae1f9c204e0836656c03c2d74a2a)) + ## [1.23.3](https://github.com/googleapis/gapic-generator-python/compare/v1.23.2...v1.23.3) (2025-03-03) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 5a57c6b6fea6..1ecb054012d4 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.23.3" +version = "1.23.4" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 422d926f3bf1a232d9014ed082902c79054c16d7 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 5 Mar 2025 16:41:06 -0500 Subject: [PATCH 1260/1339] fix: allow Protobuf 6.x (#2352) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 +- packages/gapic-generator/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/eventarc/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/logging/setup.py | 2 +- .../tests/integration/goldens/logging_internal/setup.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 +- .../tests/integration/goldens/redis_selective/setup.py | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 3874d8f1f8d3..db64df84e994 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -40,7 +40,7 @@ dependencies = [ "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {% for package_tuple, package_info in pypi_packages.items() %} {# Quick check to make sure the package is different from this setup.py #} {% if api.naming.warehouse_package_name != package_info.package_name %} diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1ecb054012d4..eece841d64d3 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -35,7 +35,7 @@ # https://jinja.palletsprojects.com/en/3.0.x/templates/#jinja-filters.map # https://jinja.palletsprojects.com/en/2.11.x/changelog/#version-2-11-0 "jinja2 >= 2.11", - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "pypandoc >= 1.4", "PyYAML >= 5.1.1", "grpc-google-iam-v1 >= 0.14.0, < 1.0.0", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 1cbbc630a477..616d19e58bf5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -45,7 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "google-cloud-access-context-manager >= 0.1.2, <1.0.0", "google-cloud-os-config >= 1.0.0, <2.0.0", "grpc-google-iam-v1 >= 0.14.0, <1.0.0", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 084e35153ec5..8e03c21975ef 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -45,7 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 8bc8e368685e..4a4608bb2201 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -45,7 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.14.0, <1.0.0", ] extras = { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index ea57cc1c4002..1b7e5c1be295 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -45,7 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py index ea57cc1c4002..1b7e5c1be295 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -45,7 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { } diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 75afe5324765..0de70b17e015 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -45,7 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { "async_rest": [ diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py index 75afe5324765..0de70b17e015 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -45,7 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = { "async_rest": [ From d59d6b43c67d9eb992817decc6ee9b66045345f0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 6 Mar 2025 10:15:20 -0500 Subject: [PATCH 1261/1339] build(deps): bump jinja2 from 3.1.5 to 3.1.6 (#2354) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 8c346db80386..60298a4e5784 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -401,9 +401,9 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -jinja2==3.1.5 \ - --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ - --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb +jinja2==3.1.6 \ + --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ + --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 # via -r requirements.in libcst==1.6.0 \ --hash=sha256:05c32de72553cb93ff606c7d2421ce1eab1f0740c8c4b715444e2ae42f42b1b6 \ From b4cd7282a6a71bcf0be315e2ac995e4be5914fab Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 6 Mar 2025 16:06:21 +0000 Subject: [PATCH 1262/1339] chore(main): release 1.23.5 (#2353) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index d4e327b3e3bf..bdf0a4821c74 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.23.5](https://github.com/googleapis/gapic-generator-python/compare/v1.23.4...v1.23.5) (2025-03-06) + + +### Bug Fixes + +* Allow Protobuf 6.x ([#2352](https://github.com/googleapis/gapic-generator-python/issues/2352)) ([bb2d557](https://github.com/googleapis/gapic-generator-python/commit/bb2d557bf266599669c15efd81d8e900d48e7c3e)) + ## [1.23.4](https://github.com/googleapis/gapic-generator-python/compare/v1.23.3...v1.23.4) (2025-03-04) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index eece841d64d3..35fc65762a61 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.23.4" +version = "1.23.5" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 86378856d43549c3bbba025cfad1825da9ab3948 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 10:30:30 -0400 Subject: [PATCH 1263/1339] chore: remove unused files (#2356) Co-authored-by: ohmayr --- .../gapic-generator/.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/Dockerfile | 89 ------ .../.kokoro/docker/docs/requirements.in | 2 - .../.kokoro/docker/docs/requirements.txt | 297 ------------------ .../gapic-generator/.kokoro/docs/common.cfg | 67 ---- .../.kokoro/docs/docs-presubmit.cfg | 28 -- .../gapic-generator/.kokoro/docs/docs.cfg | 1 - .../gapic-generator/.kokoro/publish-docs.sh | 45 --- 8 files changed, 2 insertions(+), 531 deletions(-) delete mode 100644 packages/gapic-generator/.kokoro/docker/docs/Dockerfile delete mode 100644 packages/gapic-generator/.kokoro/docker/docs/requirements.in delete mode 100644 packages/gapic-generator/.kokoro/docker/docs/requirements.txt delete mode 100644 packages/gapic-generator/.kokoro/docs/common.cfg delete mode 100644 packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg delete mode 100644 packages/gapic-generator/.kokoro/docs/docs.cfg delete mode 100755 packages/gapic-generator/.kokoro/publish-docs.sh diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml index 3f7634f25f8e..c631e1f7d7e9 100644 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ b/packages/gapic-generator/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf -# created: 2025-02-21T19:32:52.01306189Z + digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 +# created: 2025-03-05 diff --git a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile b/packages/gapic-generator/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index e5410e296bd8..000000000000 --- a/packages/gapic-generator/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:24.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - - -###################### Install python 3.10.14 for docs/docfx session - -# Download python 3.10.14 -RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz - -# Extract files -RUN tar -xvf Python-3.10.14.tgz - -# Install python 3.10.14 -RUN ./Python-3.10.14/configure --enable-optimizations -RUN make altinstall - -ENV PATH /usr/local/bin/python3.10:$PATH - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.10 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3.10 -m pip - -# Install build requirements -COPY requirements.txt /requirements.txt -RUN python3.10 -m pip install --require-hashes -r requirements.txt - -CMD ["python3.10"] diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.in b/packages/gapic-generator/.kokoro/docker/docs/requirements.in deleted file mode 100644 index 586bd07037ae..000000000000 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.in +++ /dev/null @@ -1,2 +0,0 @@ -nox -gcp-docuploader diff --git a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt b/packages/gapic-generator/.kokoro/docker/docs/requirements.txt deleted file mode 100644 index a9360a25b707..000000000000 --- a/packages/gapic-generator/.kokoro/docker/docs/requirements.txt +++ /dev/null @@ -1,297 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.3 \ - --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ - --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 - # via nox -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db - # via requests -charset-normalizer==3.4.1 \ - --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ - --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ - --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ - --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ - --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ - --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ - --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ - --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ - --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ - --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ - --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ - --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ - --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ - --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ - --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ - --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ - --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ - --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ - --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ - --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ - --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ - --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ - --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ - --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ - --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ - --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ - --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ - --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ - --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ - --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ - --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ - --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ - --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ - --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ - --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ - --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ - --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ - --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ - --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ - --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ - --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ - --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ - --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ - --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ - --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ - --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ - --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ - --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ - --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ - --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ - --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ - --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ - --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ - --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ - --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ - --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ - --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ - --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ - --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ - --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ - --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ - --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ - --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ - --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ - --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ - --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ - --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ - --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ - --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ - --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ - --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ - --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ - --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ - --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ - --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ - --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ - --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ - --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ - --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ - --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ - --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ - --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ - --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ - --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ - --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ - --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ - --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ - --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ - --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ - --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ - --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ - --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 - # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a - # via gcp-docuploader -colorlog==6.9.0 \ - --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ - --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via - # gcp-docuploader - # nox -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.19.0 \ - --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ - --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.2 \ - --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ - --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f - # via nox -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 - # via google-api-core -protobuf==5.29.3 \ - --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ - --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ - --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ - --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ - --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ - --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ - --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ - --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ - --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ - --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ - --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 - # via - # gcp-docuploader - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # google-api-core - # google-cloud-storage -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -six==1.17.0 \ - --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ - --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 - # via gcp-docuploader -tomli==2.2.1 \ - --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ - --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ - --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ - --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ - --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ - --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ - --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ - --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ - --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ - --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ - --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ - --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ - --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ - --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ - --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ - --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ - --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ - --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ - --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ - --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ - --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ - --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ - --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ - --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ - --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ - --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ - --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ - --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ - --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ - --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ - --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ - --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 - # via nox -urllib3==2.3.0 \ - --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ - --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d - # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 - # via nox diff --git a/packages/gapic-generator/.kokoro/docs/common.cfg b/packages/gapic-generator/.kokoro/docs/common.cfg deleted file mode 100644 index d3e42b1bb714..000000000000 --- a/packages/gapic-generator/.kokoro/docs/common.cfg +++ /dev/null @@ -1,67 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "gapic-generator-python/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/gapic-generator-python/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push non-cloud library docs to `docs-staging-v2-dev` instead of the - # Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2-dev" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} diff --git a/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg b/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index 1c32c81df282..000000000000 --- a/packages/gapic-generator/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/gapic-generator-python/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs" -} diff --git a/packages/gapic-generator/.kokoro/docs/docs.cfg b/packages/gapic-generator/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/gapic-generator/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/publish-docs.sh b/packages/gapic-generator/.kokoro/publish-docs.sh deleted file mode 100755 index 322d8522ef0d..000000000000 --- a/packages/gapic-generator/.kokoro/publish-docs.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# Install nox -python3 -m pip install --user --upgrade --quiet nox -python3 -m nox --version - -# build docs -nox -s docs - -python3 -m pip install --user gcp-docuploader - -# create metadata -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" From b0f5836d4682e46b11075f5a82e917cad4eab361 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 12:04:50 -0400 Subject: [PATCH 1264/1339] chore: fix docs link in README (#2346) Co-authored-by: ohmayr --- packages/gapic-generator/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/README.rst b/packages/gapic-generator/README.rst index ef24297730a6..5b38c1d7fbbf 100644 --- a/packages/gapic-generator/README.rst +++ b/packages/gapic-generator/README.rst @@ -97,7 +97,7 @@ Documentation See the `documentation`_. -.. _documentation: https://googleapis.dev/python/gapic-generator-python/latest +.. _documentation: https://googleapis.dev/python/gapic-generator/latest/ .. |release level| image:: https://img.shields.io/badge/support-stable-gold.svg :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability From 90f394656d69f4704517306023c655cd95bb4ff5 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 12:26:59 -0400 Subject: [PATCH 1265/1339] build: clean up generated noxfile (#2343) Co-authored-by: ohmayr --- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 9 ++++----- .../tests/integration/goldens/asset/noxfile.py | 9 ++++----- .../tests/integration/goldens/credentials/noxfile.py | 9 ++++----- .../tests/integration/goldens/eventarc/noxfile.py | 9 ++++----- .../tests/integration/goldens/logging/noxfile.py | 9 ++++----- .../integration/goldens/logging_internal/noxfile.py | 9 ++++----- .../tests/integration/goldens/redis/noxfile.py | 9 ++++----- .../tests/integration/goldens/redis_selective/noxfile.py | 9 ++++----- 8 files changed, 32 insertions(+), 40 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 18505d5434c7..fca28de83427 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -29,7 +29,7 @@ PACKAGE_NAME = '{{ api.naming.warehouse_package_name }}' BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] nox.sessions = [ "unit", @@ -51,7 +51,7 @@ nox.sessions = [ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -85,7 +85,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -120,8 +120,7 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", + "grpcio", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 2ca01db6293c..9fc1f1b8fe1f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -40,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] nox.sessions = [ "unit", @@ -62,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -95,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -130,8 +130,7 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", + "grpcio", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 3b9e7366bb9f..119817e5f947 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -40,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] nox.sessions = [ "unit", @@ -62,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -95,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -130,8 +130,7 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", + "grpcio", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 685faed09bd5..ba82ee1ba998 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -40,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] nox.sessions = [ "unit", @@ -62,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -95,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -130,8 +130,7 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", + "grpcio", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 8d723fa93820..f50952a8cf86 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -40,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] nox.sessions = [ "unit", @@ -62,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -95,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -130,8 +130,7 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", + "grpcio", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index 8d723fa93820..f50952a8cf86 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -40,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] nox.sessions = [ "unit", @@ -62,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -95,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -130,8 +130,7 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", + "grpcio", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 755a3329f3c1..2e308e51ccff 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -40,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] nox.sessions = [ "unit", @@ -62,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -95,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -130,8 +130,7 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", + "grpcio", "grpcio-status", "protobuf", "proto-plus", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index 755a3329f3c1..2e308e51ccff 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -40,7 +40,7 @@ BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] nox.sessions = [ "unit", @@ -62,7 +62,7 @@ def unit(session, protobuf_implementation): """Run the unit test suite.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') @@ -95,7 +95,7 @@ def unit(session, protobuf_implementation): def prerelease_deps(session, protobuf_implementation): """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and float(session.python) > 3.10: session.skip("cpp implementation is not supported in python 3.11+") # Install test environment dependencies @@ -130,8 +130,7 @@ def prerelease_deps(session, protobuf_implementation): "googleapis-common-protos", "google-api-core", "google-auth", - # Exclude grpcio!=1.67.0rc1 which does not support python 3.13 - "grpcio!=1.67.0rc1", + "grpcio", "grpcio-status", "protobuf", "proto-plus", From be6efe053fee858b97c349f4f6a9a3e1593ddb98 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 17 Mar 2025 13:39:31 -0400 Subject: [PATCH 1266/1339] docs: update copyright year (#2363) --- packages/gapic-generator/gapic/ads-templates/_license.j2 | 2 +- packages/gapic-generator/gapic/templates/_license.j2 | 2 +- .../tests/integration/goldens/asset/docs/conf.py | 2 +- .../integration/goldens/asset/google/cloud/asset/__init__.py | 2 +- .../goldens/asset/google/cloud/asset/gapic_version.py | 2 +- .../integration/goldens/asset/google/cloud/asset_v1/__init__.py | 2 +- .../goldens/asset/google/cloud/asset_v1/gapic_version.py | 2 +- .../goldens/asset/google/cloud/asset_v1/services/__init__.py | 2 +- .../google/cloud/asset_v1/services/asset_service/__init__.py | 2 +- .../cloud/asset_v1/services/asset_service/async_client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/client.py | 2 +- .../google/cloud/asset_v1/services/asset_service/pagers.py | 2 +- .../asset_v1/services/asset_service/transports/__init__.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/base.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/grpc.py | 2 +- .../asset_v1/services/asset_service/transports/grpc_asyncio.py | 2 +- .../cloud/asset_v1/services/asset_service/transports/rest.py | 2 +- .../asset_v1/services/asset_service/transports/rest_base.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/__init__.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/asset_service.py | 2 +- .../goldens/asset/google/cloud/asset_v1/types/assets.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/noxfile.py | 2 +- ...asset_v1_generated_asset_service_analyze_iam_policy_async.py | 2 +- ...erated_asset_service_analyze_iam_policy_longrunning_async.py | 2 +- ...nerated_asset_service_analyze_iam_policy_longrunning_sync.py | 2 +- ...dasset_v1_generated_asset_service_analyze_iam_policy_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_analyze_move_async.py | 2 +- .../cloudasset_v1_generated_asset_service_analyze_move_sync.py | 2 +- ...set_v1_generated_asset_service_analyze_org_policies_async.py | 2 +- ...sset_v1_generated_asset_service_analyze_org_policies_sync.py | 2 +- ...ed_asset_service_analyze_org_policy_governed_assets_async.py | 2 +- ...ted_asset_service_analyze_org_policy_governed_assets_sync.py | 2 +- ...sset_service_analyze_org_policy_governed_containers_async.py | 2 +- ...asset_service_analyze_org_policy_governed_containers_sync.py | 2 +- ...v1_generated_asset_service_batch_get_assets_history_async.py | 2 +- ..._v1_generated_asset_service_batch_get_assets_history_sync.py | 2 +- ...ated_asset_service_batch_get_effective_iam_policies_async.py | 2 +- ...rated_asset_service_batch_get_effective_iam_policies_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_create_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_create_feed_sync.py | 2 +- ...asset_v1_generated_asset_service_create_saved_query_async.py | 2 +- ...dasset_v1_generated_asset_service_create_saved_query_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_delete_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_delete_feed_sync.py | 2 +- ...asset_v1_generated_asset_service_delete_saved_query_async.py | 2 +- ...dasset_v1_generated_asset_service_delete_saved_query_sync.py | 2 +- ...cloudasset_v1_generated_asset_service_export_assets_async.py | 2 +- .../cloudasset_v1_generated_asset_service_export_assets_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_get_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_get_feed_sync.py | 2 +- ...oudasset_v1_generated_asset_service_get_saved_query_async.py | 2 +- ...loudasset_v1_generated_asset_service_get_saved_query_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_list_assets_async.py | 2 +- .../cloudasset_v1_generated_asset_service_list_assets_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_list_feeds_async.py | 2 +- .../cloudasset_v1_generated_asset_service_list_feeds_sync.py | 2 +- ...asset_v1_generated_asset_service_list_saved_queries_async.py | 2 +- ...dasset_v1_generated_asset_service_list_saved_queries_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_query_assets_async.py | 2 +- .../cloudasset_v1_generated_asset_service_query_assets_sync.py | 2 +- ..._v1_generated_asset_service_search_all_iam_policies_async.py | 2 +- ...t_v1_generated_asset_service_search_all_iam_policies_sync.py | 2 +- ...set_v1_generated_asset_service_search_all_resources_async.py | 2 +- ...sset_v1_generated_asset_service_search_all_resources_sync.py | 2 +- .../cloudasset_v1_generated_asset_service_update_feed_async.py | 2 +- .../cloudasset_v1_generated_asset_service_update_feed_sync.py | 2 +- ...asset_v1_generated_asset_service_update_saved_query_async.py | 2 +- ...dasset_v1_generated_asset_service_update_saved_query_sync.py | 2 +- .../goldens/asset/scripts/fixup_asset_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 +- .../tests/integration/goldens/asset/tests/__init__.py | 2 +- .../tests/integration/goldens/asset/tests/unit/__init__.py | 2 +- .../integration/goldens/asset/tests/unit/gapic/__init__.py | 2 +- .../goldens/asset/tests/unit/gapic/asset_v1/__init__.py | 2 +- .../asset/tests/unit/gapic/asset_v1/test_asset_service.py | 2 +- .../tests/integration/goldens/credentials/docs/conf.py | 2 +- .../goldens/credentials/google/iam/credentials/__init__.py | 2 +- .../goldens/credentials/google/iam/credentials/gapic_version.py | 2 +- .../goldens/credentials/google/iam/credentials_v1/__init__.py | 2 +- .../credentials/google/iam/credentials_v1/gapic_version.py | 2 +- .../credentials/google/iam/credentials_v1/services/__init__.py | 2 +- .../iam/credentials_v1/services/iam_credentials/__init__.py | 2 +- .../iam/credentials_v1/services/iam_credentials/async_client.py | 2 +- .../iam/credentials_v1/services/iam_credentials/client.py | 2 +- .../services/iam_credentials/transports/__init__.py | 2 +- .../credentials_v1/services/iam_credentials/transports/base.py | 2 +- .../credentials_v1/services/iam_credentials/transports/grpc.py | 2 +- .../services/iam_credentials/transports/grpc_asyncio.py | 2 +- .../credentials_v1/services/iam_credentials/transports/rest.py | 2 +- .../services/iam_credentials/transports/rest_base.py | 2 +- .../credentials/google/iam/credentials_v1/types/__init__.py | 2 +- .../credentials/google/iam/credentials_v1/types/common.py | 2 +- .../google/iam/credentials_v1/types/iamcredentials.py | 2 +- .../tests/integration/goldens/credentials/noxfile.py | 2 +- ..._v1_generated_iam_credentials_generate_access_token_async.py | 2 +- ...s_v1_generated_iam_credentials_generate_access_token_sync.py | 2 +- ...ials_v1_generated_iam_credentials_generate_id_token_async.py | 2 +- ...tials_v1_generated_iam_credentials_generate_id_token_sync.py | 2 +- ...mcredentials_v1_generated_iam_credentials_sign_blob_async.py | 2 +- ...amcredentials_v1_generated_iam_credentials_sign_blob_sync.py | 2 +- ...amcredentials_v1_generated_iam_credentials_sign_jwt_async.py | 2 +- ...iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py | 2 +- .../credentials/scripts/fixup_credentials_v1_keywords.py | 2 +- .../tests/integration/goldens/credentials/setup.py | 2 +- .../tests/integration/goldens/credentials/tests/__init__.py | 2 +- .../integration/goldens/credentials/tests/unit/__init__.py | 2 +- .../goldens/credentials/tests/unit/gapic/__init__.py | 2 +- .../credentials/tests/unit/gapic/credentials_v1/__init__.py | 2 +- .../tests/unit/gapic/credentials_v1/test_iam_credentials.py | 2 +- .../tests/integration/goldens/eventarc/docs/conf.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/__init__.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py | 2 +- .../eventarc/google/cloud/eventarc_v1/services/__init__.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/__init__.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/async_client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/client.py | 2 +- .../google/cloud/eventarc_v1/services/eventarc/pagers.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/__init__.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/base.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/grpc.py | 2 +- .../eventarc_v1/services/eventarc/transports/grpc_asyncio.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/rest.py | 2 +- .../cloud/eventarc_v1/services/eventarc/transports/rest_base.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/types/channel.py | 2 +- .../google/cloud/eventarc_v1/types/channel_connection.py | 2 +- .../eventarc/google/cloud/eventarc_v1/types/discovery.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py | 2 +- .../google/cloud/eventarc_v1/types/google_channel_config.py | 2 +- .../goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py | 2 +- .../tests/integration/goldens/eventarc/noxfile.py | 2 +- .../eventarc_v1_generated_eventarc_create_channel_async.py | 2 +- ...arc_v1_generated_eventarc_create_channel_connection_async.py | 2 +- ...tarc_v1_generated_eventarc_create_channel_connection_sync.py | 2 +- .../eventarc_v1_generated_eventarc_create_channel_sync.py | 2 +- .../eventarc_v1_generated_eventarc_create_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_create_trigger_sync.py | 2 +- .../eventarc_v1_generated_eventarc_delete_channel_async.py | 2 +- ...arc_v1_generated_eventarc_delete_channel_connection_async.py | 2 +- ...tarc_v1_generated_eventarc_delete_channel_connection_sync.py | 2 +- .../eventarc_v1_generated_eventarc_delete_channel_sync.py | 2 +- .../eventarc_v1_generated_eventarc_delete_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_delete_trigger_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_channel_async.py | 2 +- ...entarc_v1_generated_eventarc_get_channel_connection_async.py | 2 +- ...ventarc_v1_generated_eventarc_get_channel_connection_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_channel_sync.py | 2 +- ...arc_v1_generated_eventarc_get_google_channel_config_async.py | 2 +- ...tarc_v1_generated_eventarc_get_google_channel_config_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_provider_async.py | 2 +- .../eventarc_v1_generated_eventarc_get_provider_sync.py | 2 +- .../eventarc_v1_generated_eventarc_get_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_get_trigger_sync.py | 2 +- ...tarc_v1_generated_eventarc_list_channel_connections_async.py | 2 +- ...ntarc_v1_generated_eventarc_list_channel_connections_sync.py | 2 +- .../eventarc_v1_generated_eventarc_list_channels_async.py | 2 +- .../eventarc_v1_generated_eventarc_list_channels_sync.py | 2 +- .../eventarc_v1_generated_eventarc_list_providers_async.py | 2 +- .../eventarc_v1_generated_eventarc_list_providers_sync.py | 2 +- .../eventarc_v1_generated_eventarc_list_triggers_async.py | 2 +- .../eventarc_v1_generated_eventarc_list_triggers_sync.py | 2 +- .../eventarc_v1_generated_eventarc_update_channel_async.py | 2 +- .../eventarc_v1_generated_eventarc_update_channel_sync.py | 2 +- ..._v1_generated_eventarc_update_google_channel_config_async.py | 2 +- ...c_v1_generated_eventarc_update_google_channel_config_sync.py | 2 +- .../eventarc_v1_generated_eventarc_update_trigger_async.py | 2 +- .../eventarc_v1_generated_eventarc_update_trigger_sync.py | 2 +- .../goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/eventarc/setup.py | 2 +- .../tests/integration/goldens/eventarc/tests/__init__.py | 2 +- .../tests/integration/goldens/eventarc/tests/unit/__init__.py | 2 +- .../integration/goldens/eventarc/tests/unit/gapic/__init__.py | 2 +- .../goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py | 2 +- .../eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py | 2 +- .../tests/integration/goldens/logging/docs/conf.py | 2 +- .../goldens/logging/google/cloud/logging/__init__.py | 2 +- .../goldens/logging/google/cloud/logging/gapic_version.py | 2 +- .../goldens/logging/google/cloud/logging_v2/__init__.py | 2 +- .../goldens/logging/google/cloud/logging_v2/gapic_version.py | 2 +- .../logging/google/cloud/logging_v2/services/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/pagers.py | 2 +- .../services/config_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/config_service_v2/transports/base.py | 2 +- .../logging_v2/services/config_service_v2/transports/grpc.py | 2 +- .../services/config_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/pagers.py | 2 +- .../services/logging_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/transports/base.py | 2 +- .../logging_v2/services/logging_service_v2/transports/grpc.py | 2 +- .../services/logging_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/pagers.py | 2 +- .../services/metrics_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/base.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/grpc.py | 2 +- .../services/metrics_service_v2/transports/grpc_asyncio.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/__init__.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/log_entry.py | 2 +- .../goldens/logging/google/cloud/logging_v2/types/logging.py | 2 +- .../logging/google/cloud/logging_v2/types/logging_config.py | 2 +- .../logging/google/cloud/logging_v2/types/logging_metrics.py | 2 +- .../tests/integration/goldens/logging/noxfile.py | 2 +- ...ing_v2_generated_config_service_v2_copy_log_entries_async.py | 2 +- ...ging_v2_generated_config_service_v2_copy_log_entries_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_create_bucket_async.py | 2 +- ..._v2_generated_config_service_v2_create_bucket_async_async.py | 2 +- ...g_v2_generated_config_service_v2_create_bucket_async_sync.py | 2 +- ...logging_v2_generated_config_service_v2_create_bucket_sync.py | 2 +- ...ing_v2_generated_config_service_v2_create_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_create_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_link_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_create_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_create_view_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_delete_bucket_async.py | 2 +- ...logging_v2_generated_config_service_v2_delete_bucket_sync.py | 2 +- ...ing_v2_generated_config_service_v2_delete_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_delete_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_link_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_delete_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_delete_view_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_sync.py | 2 +- ...ng_v2_generated_config_service_v2_get_cmek_settings_async.py | 2 +- ...ing_v2_generated_config_service_v2_get_cmek_settings_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_get_exclusion_async.py | 2 +- ...logging_v2_generated_config_service_v2_get_exclusion_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_link_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_link_sync.py | 2 +- ...logging_v2_generated_config_service_v2_get_settings_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_settings_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_view_sync.py | 2 +- ...logging_v2_generated_config_service_v2_list_buckets_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_buckets_sync.py | 2 +- ...ging_v2_generated_config_service_v2_list_exclusions_async.py | 2 +- ...gging_v2_generated_config_service_v2_list_exclusions_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_links_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_links_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_sinks_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_sinks_sync.py | 2 +- .../logging_v2_generated_config_service_v2_list_views_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_views_sync.py | 2 +- ...ging_v2_generated_config_service_v2_undelete_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_undelete_bucket_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_update_bucket_async.py | 2 +- ..._v2_generated_config_service_v2_update_bucket_async_async.py | 2 +- ...g_v2_generated_config_service_v2_update_bucket_async_sync.py | 2 +- ...logging_v2_generated_config_service_v2_update_bucket_sync.py | 2 +- ...v2_generated_config_service_v2_update_cmek_settings_async.py | 2 +- ..._v2_generated_config_service_v2_update_cmek_settings_sync.py | 2 +- ...ing_v2_generated_config_service_v2_update_exclusion_async.py | 2 +- ...ging_v2_generated_config_service_v2_update_exclusion_sync.py | 2 +- ...ging_v2_generated_config_service_v2_update_settings_async.py | 2 +- ...gging_v2_generated_config_service_v2_update_settings_sync.py | 2 +- .../logging_v2_generated_config_service_v2_update_sink_async.py | 2 +- .../logging_v2_generated_config_service_v2_update_sink_sync.py | 2 +- .../logging_v2_generated_config_service_v2_update_view_async.py | 2 +- .../logging_v2_generated_config_service_v2_update_view_sync.py | 2 +- .../logging_v2_generated_logging_service_v2_delete_log_async.py | 2 +- .../logging_v2_generated_logging_service_v2_delete_log_sync.py | 2 +- ...ng_v2_generated_logging_service_v2_list_log_entries_async.py | 2 +- ...ing_v2_generated_logging_service_v2_list_log_entries_sync.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_async.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_sync.py | 2 +- ...ging_service_v2_list_monitored_resource_descriptors_async.py | 2 +- ...gging_service_v2_list_monitored_resource_descriptors_sync.py | 2 +- ...ng_v2_generated_logging_service_v2_tail_log_entries_async.py | 2 +- ...ing_v2_generated_logging_service_v2_tail_log_entries_sync.py | 2 +- ...g_v2_generated_logging_service_v2_write_log_entries_async.py | 2 +- ...ng_v2_generated_logging_service_v2_write_log_entries_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_create_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_create_log_metric_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_delete_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_delete_log_metric_sync.py | 2 +- ...ging_v2_generated_metrics_service_v2_get_log_metric_async.py | 2 +- ...gging_v2_generated_metrics_service_v2_get_log_metric_sync.py | 2 +- ...ng_v2_generated_metrics_service_v2_list_log_metrics_async.py | 2 +- ...ing_v2_generated_metrics_service_v2_list_log_metrics_sync.py | 2 +- ...g_v2_generated_metrics_service_v2_update_log_metric_async.py | 2 +- ...ng_v2_generated_metrics_service_v2_update_log_metric_sync.py | 2 +- .../goldens/logging/scripts/fixup_logging_v2_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/logging/setup.py | 2 +- .../tests/integration/goldens/logging/tests/__init__.py | 2 +- .../tests/integration/goldens/logging/tests/unit/__init__.py | 2 +- .../integration/goldens/logging/tests/unit/gapic/__init__.py | 2 +- .../goldens/logging/tests/unit/gapic/logging_v2/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 2 +- .../tests/integration/goldens/logging_internal/docs/conf.py | 2 +- .../goldens/logging_internal/google/cloud/logging/__init__.py | 2 +- .../logging_internal/google/cloud/logging/gapic_version.py | 2 +- .../logging_internal/google/cloud/logging_v2/__init__.py | 2 +- .../logging_internal/google/cloud/logging_v2/gapic_version.py | 2 +- .../google/cloud/logging_v2/services/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/__init__.py | 2 +- .../cloud/logging_v2/services/config_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/client.py | 2 +- .../cloud/logging_v2/services/config_service_v2/pagers.py | 2 +- .../services/config_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/config_service_v2/transports/base.py | 2 +- .../logging_v2/services/config_service_v2/transports/grpc.py | 2 +- .../services/config_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/client.py | 2 +- .../cloud/logging_v2/services/logging_service_v2/pagers.py | 2 +- .../services/logging_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/logging_service_v2/transports/base.py | 2 +- .../logging_v2/services/logging_service_v2/transports/grpc.py | 2 +- .../services/logging_service_v2/transports/grpc_asyncio.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/async_client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/client.py | 2 +- .../cloud/logging_v2/services/metrics_service_v2/pagers.py | 2 +- .../services/metrics_service_v2/transports/__init__.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/base.py | 2 +- .../logging_v2/services/metrics_service_v2/transports/grpc.py | 2 +- .../services/metrics_service_v2/transports/grpc_asyncio.py | 2 +- .../logging_internal/google/cloud/logging_v2/types/__init__.py | 2 +- .../logging_internal/google/cloud/logging_v2/types/log_entry.py | 2 +- .../logging_internal/google/cloud/logging_v2/types/logging.py | 2 +- .../google/cloud/logging_v2/types/logging_config.py | 2 +- .../google/cloud/logging_v2/types/logging_metrics.py | 2 +- .../tests/integration/goldens/logging_internal/noxfile.py | 2 +- ...erated_config_service_v2__copy_log_entries_async_internal.py | 2 +- ...nerated_config_service_v2__copy_log_entries_sync_internal.py | 2 +- ...erated_config_service_v2__create_exclusion_async_internal.py | 2 +- ...nerated_config_service_v2__create_exclusion_sync_internal.py | 2 +- ...2_generated_config_service_v2__create_link_async_internal.py | 2 +- ...v2_generated_config_service_v2__create_link_sync_internal.py | 2 +- ...2_generated_config_service_v2__create_sink_async_internal.py | 2 +- ...v2_generated_config_service_v2__create_sink_sync_internal.py | 2 +- ...2_generated_config_service_v2__create_view_async_internal.py | 2 +- ...v2_generated_config_service_v2__create_view_sync_internal.py | 2 +- ...erated_config_service_v2__delete_exclusion_async_internal.py | 2 +- ...nerated_config_service_v2__delete_exclusion_sync_internal.py | 2 +- ...2_generated_config_service_v2__delete_link_async_internal.py | 2 +- ...v2_generated_config_service_v2__delete_link_sync_internal.py | 2 +- ...2_generated_config_service_v2__delete_sink_async_internal.py | 2 +- ...v2_generated_config_service_v2__delete_sink_sync_internal.py | 2 +- ...2_generated_config_service_v2__delete_view_async_internal.py | 2 +- ...v2_generated_config_service_v2__delete_view_sync_internal.py | 2 +- ...rated_config_service_v2__get_cmek_settings_async_internal.py | 2 +- ...erated_config_service_v2__get_cmek_settings_sync_internal.py | 2 +- ...generated_config_service_v2__get_exclusion_async_internal.py | 2 +- ..._generated_config_service_v2__get_exclusion_sync_internal.py | 2 +- ...g_v2_generated_config_service_v2__get_link_async_internal.py | 2 +- ...ng_v2_generated_config_service_v2__get_link_sync_internal.py | 2 +- ..._generated_config_service_v2__get_settings_async_internal.py | 2 +- ...2_generated_config_service_v2__get_settings_sync_internal.py | 2 +- ...g_v2_generated_config_service_v2__get_sink_async_internal.py | 2 +- ...ng_v2_generated_config_service_v2__get_sink_sync_internal.py | 2 +- ...g_v2_generated_config_service_v2__get_view_async_internal.py | 2 +- ...ng_v2_generated_config_service_v2__get_view_sync_internal.py | 2 +- ...nerated_config_service_v2__list_exclusions_async_internal.py | 2 +- ...enerated_config_service_v2__list_exclusions_sync_internal.py | 2 +- ...v2_generated_config_service_v2__list_links_async_internal.py | 2 +- ..._v2_generated_config_service_v2__list_links_sync_internal.py | 2 +- ...v2_generated_config_service_v2__list_sinks_async_internal.py | 2 +- ..._v2_generated_config_service_v2__list_sinks_sync_internal.py | 2 +- ...v2_generated_config_service_v2__list_views_async_internal.py | 2 +- ..._v2_generated_config_service_v2__list_views_sync_internal.py | 2 +- ...ed_config_service_v2__update_cmek_settings_async_internal.py | 2 +- ...ted_config_service_v2__update_cmek_settings_sync_internal.py | 2 +- ...erated_config_service_v2__update_exclusion_async_internal.py | 2 +- ...nerated_config_service_v2__update_exclusion_sync_internal.py | 2 +- ...nerated_config_service_v2__update_settings_async_internal.py | 2 +- ...enerated_config_service_v2__update_settings_sync_internal.py | 2 +- ...2_generated_config_service_v2__update_sink_async_internal.py | 2 +- ...v2_generated_config_service_v2__update_sink_sync_internal.py | 2 +- ...2_generated_config_service_v2__update_view_async_internal.py | 2 +- ...v2_generated_config_service_v2__update_view_sync_internal.py | 2 +- ...ogging_v2_generated_config_service_v2_create_bucket_async.py | 2 +- ..._v2_generated_config_service_v2_create_bucket_async_async.py | 2 +- ...g_v2_generated_config_service_v2_create_bucket_async_sync.py | 2 +- ...logging_v2_generated_config_service_v2_create_bucket_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_delete_bucket_async.py | 2 +- ...logging_v2_generated_config_service_v2_delete_bucket_sync.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_async.py | 2 +- .../logging_v2_generated_config_service_v2_get_bucket_sync.py | 2 +- ...logging_v2_generated_config_service_v2_list_buckets_async.py | 2 +- .../logging_v2_generated_config_service_v2_list_buckets_sync.py | 2 +- ...ging_v2_generated_config_service_v2_undelete_bucket_async.py | 2 +- ...gging_v2_generated_config_service_v2_undelete_bucket_sync.py | 2 +- ...ogging_v2_generated_config_service_v2_update_bucket_async.py | 2 +- ..._v2_generated_config_service_v2_update_bucket_async_async.py | 2 +- ...g_v2_generated_config_service_v2_update_bucket_async_sync.py | 2 +- ...logging_v2_generated_config_service_v2_update_bucket_sync.py | 2 +- .../logging_v2_generated_logging_service_v2_delete_log_async.py | 2 +- .../logging_v2_generated_logging_service_v2_delete_log_sync.py | 2 +- ...ng_v2_generated_logging_service_v2_list_log_entries_async.py | 2 +- ...ing_v2_generated_logging_service_v2_list_log_entries_sync.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_async.py | 2 +- .../logging_v2_generated_logging_service_v2_list_logs_sync.py | 2 +- ...ging_service_v2_list_monitored_resource_descriptors_async.py | 2 +- ...gging_service_v2_list_monitored_resource_descriptors_sync.py | 2 +- ...ng_v2_generated_logging_service_v2_tail_log_entries_async.py | 2 +- ...ing_v2_generated_logging_service_v2_tail_log_entries_sync.py | 2 +- ...g_v2_generated_logging_service_v2_write_log_entries_async.py | 2 +- ...ng_v2_generated_logging_service_v2_write_log_entries_sync.py | 2 +- ...ated_metrics_service_v2__create_log_metric_async_internal.py | 2 +- ...rated_metrics_service_v2__create_log_metric_sync_internal.py | 2 +- ...ated_metrics_service_v2__delete_log_metric_async_internal.py | 2 +- ...rated_metrics_service_v2__delete_log_metric_sync_internal.py | 2 +- ...nerated_metrics_service_v2__get_log_metric_async_internal.py | 2 +- ...enerated_metrics_service_v2__get_log_metric_sync_internal.py | 2 +- ...rated_metrics_service_v2__list_log_metrics_async_internal.py | 2 +- ...erated_metrics_service_v2__list_log_metrics_sync_internal.py | 2 +- ...ated_metrics_service_v2__update_log_metric_async_internal.py | 2 +- ...rated_metrics_service_v2__update_log_metric_sync_internal.py | 2 +- .../logging_internal/scripts/fixup_logging_v2_keywords.py | 2 +- .../tests/integration/goldens/logging_internal/setup.py | 2 +- .../integration/goldens/logging_internal/tests/__init__.py | 2 +- .../integration/goldens/logging_internal/tests/unit/__init__.py | 2 +- .../goldens/logging_internal/tests/unit/gapic/__init__.py | 2 +- .../logging_internal/tests/unit/gapic/logging_v2/__init__.py | 2 +- .../tests/unit/gapic/logging_v2/test_config_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_logging_service_v2.py | 2 +- .../tests/unit/gapic/logging_v2/test_metrics_service_v2.py | 2 +- .../tests/integration/goldens/redis/docs/conf.py | 2 +- .../integration/goldens/redis/google/cloud/redis/__init__.py | 2 +- .../goldens/redis/google/cloud/redis/gapic_version.py | 2 +- .../integration/goldens/redis/google/cloud/redis_v1/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/gapic_version.py | 2 +- .../goldens/redis/google/cloud/redis_v1/services/__init__.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/__init__.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/async_client.py | 2 +- .../redis/google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- .../redis/google/cloud/redis_v1/services/cloud_redis/pagers.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/__init__.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/base.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/grpc.py | 2 +- .../redis_v1/services/cloud_redis/transports/grpc_asyncio.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/rest.py | 2 +- .../redis_v1/services/cloud_redis/transports/rest_asyncio.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/rest_base.py | 2 +- .../goldens/redis/google/cloud/redis_v1/types/__init__.py | 2 +- .../goldens/redis/google/cloud/redis_v1/types/cloud_redis.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/noxfile.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_export_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_export_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_failover_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_failover_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_async.py | 2 +- ...s_v1_generated_cloud_redis_get_instance_auth_string_async.py | 2 +- ...is_v1_generated_cloud_redis_get_instance_auth_string_sync.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_import_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_import_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_async.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_sync.py | 2 +- ...dis_v1_generated_cloud_redis_reschedule_maintenance_async.py | 2 +- ...edis_v1_generated_cloud_redis_reschedule_maintenance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_upgrade_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_upgrade_instance_sync.py | 2 +- .../goldens/redis/scripts/fixup_redis_v1_keywords.py | 2 +- .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 +- .../tests/integration/goldens/redis/tests/__init__.py | 2 +- .../tests/integration/goldens/redis/tests/unit/__init__.py | 2 +- .../integration/goldens/redis/tests/unit/gapic/__init__.py | 2 +- .../goldens/redis/tests/unit/gapic/redis_v1/__init__.py | 2 +- .../goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py | 2 +- .../tests/integration/goldens/redis_selective/docs/conf.py | 2 +- .../goldens/redis_selective/google/cloud/redis/__init__.py | 2 +- .../goldens/redis_selective/google/cloud/redis/gapic_version.py | 2 +- .../goldens/redis_selective/google/cloud/redis_v1/__init__.py | 2 +- .../redis_selective/google/cloud/redis_v1/gapic_version.py | 2 +- .../redis_selective/google/cloud/redis_v1/services/__init__.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/__init__.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/async_client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/client.py | 2 +- .../google/cloud/redis_v1/services/cloud_redis/pagers.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/__init__.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/base.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/grpc.py | 2 +- .../redis_v1/services/cloud_redis/transports/grpc_asyncio.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/rest.py | 2 +- .../redis_v1/services/cloud_redis/transports/rest_asyncio.py | 2 +- .../cloud/redis_v1/services/cloud_redis/transports/rest_base.py | 2 +- .../redis_selective/google/cloud/redis_v1/types/__init__.py | 2 +- .../redis_selective/google/cloud/redis_v1/types/cloud_redis.py | 2 +- .../tests/integration/goldens/redis_selective/noxfile.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_create_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_delete_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_get_instance_sync.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_async.py | 2 +- .../redis_v1_generated_cloud_redis_list_instances_sync.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_async.py | 2 +- .../redis_v1_generated_cloud_redis_update_instance_sync.py | 2 +- .../goldens/redis_selective/scripts/fixup_redis_v1_keywords.py | 2 +- .../tests/integration/goldens/redis_selective/setup.py | 2 +- .../tests/integration/goldens/redis_selective/tests/__init__.py | 2 +- .../integration/goldens/redis_selective/tests/unit/__init__.py | 2 +- .../goldens/redis_selective/tests/unit/gapic/__init__.py | 2 +- .../redis_selective/tests/unit/gapic/redis_v1/__init__.py | 2 +- .../tests/unit/gapic/redis_v1/test_cloud_redis.py | 2 +- .../mollusca_v1_generated_snippets_list_resources_async.py | 2 +- .../mollusca_v1_generated_snippets_list_resources_sync.py | 2 +- ...ollusca_v1_generated_snippets_method_bidi_streaming_async.py | 2 +- ...mollusca_v1_generated_snippets_method_bidi_streaming_sync.py | 2 +- ...ollusca_v1_generated_snippets_method_lro_signatures_async.py | 2 +- ...mollusca_v1_generated_snippets_method_lro_signatures_sync.py | 2 +- ...mollusca_v1_generated_snippets_method_one_signature_async.py | 2 +- .../mollusca_v1_generated_snippets_method_one_signature_sync.py | 2 +- ...lusca_v1_generated_snippets_method_server_streaming_async.py | 2 +- ...llusca_v1_generated_snippets_method_server_streaming_sync.py | 2 +- .../mollusca_v1_generated_snippets_one_of_method_async.py | 2 +- ..._v1_generated_snippets_one_of_method_required_field_async.py | 2 +- ...a_v1_generated_snippets_one_of_method_required_field_sync.py | 2 +- .../mollusca_v1_generated_snippets_one_of_method_sync.py | 2 +- .../tests/unit/samplegen/golden_snippets/sample_basic.py | 2 +- .../tests/unit/samplegen/golden_snippets/sample_basic_async.py | 2 +- .../samplegen/golden_snippets/sample_basic_unflattenable.py | 2 +- .../unit/samplegen/golden_snippets/sample_basic_void_method.py | 2 +- 540 files changed, 540 insertions(+), 540 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/_license.j2 b/packages/gapic-generator/gapic/ads-templates/_license.j2 index 1ee14b782014..3d87ee089ead 100644 --- a/packages/gapic-generator/gapic/ads-templates/_license.j2 +++ b/packages/gapic-generator/gapic/ads-templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/gapic/templates/_license.j2 b/packages/gapic-generator/gapic/templates/_license.j2 index 1ee14b782014..3d87ee089ead 100644 --- a/packages/gapic-generator/gapic/templates/_license.j2 +++ b/packages/gapic-generator/gapic/templates/_license.j2 @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index fa7647914fb5..6ef2567dd078 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py index de2660efe3aa..fd9404f4b60c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index f189f4430667..03ee9ec521b7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py index 8f6cf068242c..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py index 1ad75a011889..b35796f58242 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index bfd8ed8e00e3..ef5bc7163f02 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 95f0c102079f..217fa16d0c21 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py index 5620c82f3151..5d14be97dd31 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py index 315eb22bd6cb..be001a49d9f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index a1690041f744..0c968b0c7d9f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 519ecdec2bd3..8c3af5208e93 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 54666f93ecc5..96833f741bcc 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index f222a8a46d87..0c936ad2968b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py index 7c4d227f9999..91d6091fe114 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py index ccb88282b582..d8a9b7f9108d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 312683d50eeb..29138c3ea3b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index 15cfb5b14265..a62b5696f22c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 9fc1f1b8fe1f..5b4265d72615 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py index 8369402d5e70..972a32690352 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py index 90a5878ac05a..677d79556efa 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py index 1f246a28937e..603d16e9c0fe 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_longrunning_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py index 73dd37f5cab0..2e623980824a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_iam_policy_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py index ab5d6f2a7560..1a09c5464486 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py index e886b920c49a..e1c4d693611b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_move_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py index 552d1ecc6eb2..b7c1b35df750 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py index b56d918846fb..315543e7d5a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policies_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py index adb76c21c6a3..8ed6cc039b28 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py index 63efeb54057e..a5e3393f985d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py index 66dc1dab9063..215869302790 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py index edd19ed69fbb..4f52cbdb913c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_analyze_org_policy_governed_containers_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py index 9655b814eb65..185ec5b55028 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py index 954d23bf7dce..a4dbf5d89897 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_assets_history_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py index 7bf645687401..2a2112e96bb8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py index 8e173d62bda9..03874bb7410f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_batch_get_effective_iam_policies_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py index 6cc4b2f9521a..0bcd5fb1e550 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py index f5237ca5e998..2f8e11296274 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py index 7c9bf95a48d1..a8b64d40ecac 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py index 746fcbda3656..53cb726f86a8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_create_saved_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py index 67a09ebd009a..7f9122f68176 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py index f86f14507e30..37ae42c9d076 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py index daf5c52ac19c..6f0c87b6daa1 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py index f556d9389662..e97235ad6be5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_delete_saved_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py index d6ce3ef29c0a..61934e88f076 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py index 3d1227c3717a..696b353c3757 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_export_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py index 8f28ebec4b74..884b7d0034f5 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py index 64a8156dd3e3..712a533b7155 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py index bd5835b75eba..a24420bb5fc7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py index 68c585047a7f..291b88589c9b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_get_saved_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py index 36af8779f4ea..6afb977deb07 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py index 19e076231458..f7fc8801a1e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py index b0c43ba61881..64284321eb2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py index cb5da42cf621..b9733d98cdb2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_feeds_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py index 98079450807b..3d5f2095cb55 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py index 09b57384c4da..9c096d9e60ef 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_list_saved_queries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py index 7616115c304b..baaade80cdee 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py index 177f021b5382..a52f6818bd64 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_query_assets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py index 4417191b56ad..a62518e66584 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py index 23e8b1f9fecc..71536d46c51d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_iam_policies_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py index 6ff7c119df12..aa480039bd46 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py index c6ef7ed258ac..26ea1bffc470 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_search_all_resources_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py index 2c15208140d6..23cc1a6f0d91 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py index f867fd26d768..36745fa8c988 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_feed_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py index b55c69cba951..269098a2574a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py index f187066a055a..fb8864825e7e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/cloudasset_v1_generated_asset_service_update_saved_query_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py index 113392c7d9b5..a31f2d16b055 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 616d19e58bf5..5d506dc2d915 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 9a4489e98d53..10cb01ca87b9 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index ddea8370c741..87daeb37dc7f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py index 2041446f63fc..9c0382dd3319 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index dc7f393a7568..2be4471eb82d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py index 8f6cf068242c..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py index 1e7e3fa347f0..988290706545 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index 29ea194a7ad0..a8b182fd1bec 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index b6656c626716..45a1f615b0f6 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py index cc75211251f1..0214f01a40ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 1c7ac928006f..5b9d0cf1b4e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 0e55e2978514..6bb7b4401922 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index 8ac65146dc67..c97b00af64b9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index 9dc7959ac59a..cbb5d611b125 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py index 8d1bc465012b..a03907410f78 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py index c93152d4ac27..f257b889924c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py index a275bf02a07e..f267a4b7a58a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py index f17a81e5542c..17d048bd5d33 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/types/iamcredentials.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 119817e5f947..bc738155412a 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py index 21e2f06dad40..23cdbcb65d98 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py index b29ff5b52e70..b7890bd0106e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_access_token_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py index 3bbb2348993d..0215ccea7f69 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py index 4d7e1050a689..cf2199eae2ca 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_generate_id_token_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py index 7003b8f58fd3..9dd1113be682 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py index b35aaaa4d908..62752da45472 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_blob_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py index 2e6958a96efe..3022b82a96d2 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py index 86ecac2a308e..3c6faea9b194 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_v1_generated_iam_credentials_sign_jwt_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py index 40cebee0e91b..c2854aa54c44 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 8e03c21975ef..6ffe75b3ba84 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 693c6d60a733..62b329f3e526 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py index 4545505bf3da..15e37e84a4fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index 1f990ec5d30b..a34386728483 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py index 8f6cf068242c..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py index 6c6ea026d4bc..c604e58a85da 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 4200dfc1bfc8..7fa2037fe228 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 551bf1eb3b55..791d872e9574 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py index 3acf89e71bd6..0b116f59f3fa 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py index afed78b17c2c..c6c13c6a4c6f 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 4ad308359ab1..650e0a956960 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 8c602c7fd895..107b8db45d68 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 38009d7f9076..fdbfeab462dc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 32211ddf9e74..1c79b9ce6fd6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py index c2bafca03b57..6ff0278064bd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py index 02e5a64aa84d..0521e2f2f27d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py index 13ab06e15858..0c3d920b1a46 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py index 59d79edf4331..5279252f195a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/channel_connection.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py index d8424067300d..430bae6e41ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/discovery.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index ec6bce60dfbf..b114b533e678 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py index c99e4bc09554..2e17f40ae1bc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/google_channel_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py index 562f0b4d59b6..83174e8ccee0 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/trigger.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index ba82ee1ba998..66f44ba3ebc5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py index 609801b7ede9..48d01b7a94aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py index 27908ff206e8..be468f4ec6e8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py index a4d3d5321f73..2ec59ef191cd 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_connection_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py index 36a0696246ae..15c6ce7f870e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_channel_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py index a962abf7b2ec..92756c216367 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py index 1255f73e32f0..0df4f6f6d168 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_create_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py index 9dad97f48420..eb88c5b18eec 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py index 92f3c64550fd..7bc0ecfb2bc6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py index c9a0a1e35252..bb5c5a46a4a9 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_connection_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py index f2efb782d664..dc9b80adbcd5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_channel_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py index c028d40e01ce..3f117943296b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py index df0bd0efe944..ced373f70528 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_delete_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py index a2bdc034a30d..c642f6e60bbe 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py index d1f83e2be2ff..3ef14c182dae 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py index e91dfb82ba94..3e32910246f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_connection_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py index 24f6e592ab2a..b5a95d779890 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_channel_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py index 43968fed3093..08fac9576f1d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py index 87cb63ab4e1f..b7a368375fb6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_google_channel_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py index ee87d696f717..ca30f56b8538 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py index 9c19d1923ae4..182e4f03ba5c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_provider_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py index 36655bfbf3b6..3e7f4f00cf98 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py index 8354eb167ebc..22097c52adf1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_get_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py index 24a4ac3b6b03..0f4c7160a0f8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py index a8a5ec2ab3fd..4b24c83ca86e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channel_connections_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py index cb70db05a623..1a7522bc78f1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py index 405dd6d39da9..cbb9fcdb5658 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_channels_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py index 9b1a282ece1f..af58a0d5d41e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py index bcaaa019d697..fbe34f043b8e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_providers_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py index 9878997a7732..1f125cfc7a71 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py index 31558bea7a97..fa5d923b931e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_list_triggers_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py index 909bc26c904e..a2ce3afb32d1 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py index c478b155f896..aed47673859b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_channel_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py index ef74cb1afd0b..343afc4d6c15 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py index d20d7c495395..816b92da0099 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_google_channel_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py index c237b16bf935..0eba9dd1a60c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py index bed421304638..4954ec9087c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/eventarc_v1_generated_eventarc_update_trigger_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py index afc415fb519a..e7d3db475b60 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 4a4608bb2201..4cba69da0208 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 325b396d0cc9..66ac1829f142 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index e1d58773a736..9f5f212faa62 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py index 8a740d4fffe6..0e30784fd911 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 3ad08618f71f..e8b59e0bb511 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py index 8f6cf068242c..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py index 7e81edbc6394..7c1b69fb603f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 193cae126a93..8a04c81bf371 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 61409f5b4fd5..638be89e420b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py index ff7d5d233510..1af6b54c9924 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 1239a292b25a..cc3da21c119f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 6dbea3b48393..479f4fdb171c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 02789efdee5a..0d7ee84124e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 78495902e822..46dd72d79d94 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py index 3533e0cda4ae..d9820f09067b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index ee9e78158367..326e225072a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a52186153a84..9104ad8b3be6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py index c4f0739ea163..ee183ce8057c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 4e814dcca94b..25058513ec9e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index ff6d8c8dd20a..e7cc266faeff 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 718d4ac6713c..e64b36f15b3c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 6933d0f2ba34..1604ba62bc45 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 4068a59662bf..50d894b6003c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index cb20bb0327d7..65907d2cb66a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index ab976fd84450..5bbcc4bbc14a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index fc24bb44c900..3d44cf6e4c67 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 994b47887efd..a6eb39e80fa0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 3fdfd91333d0..596f3beeddde 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index f393fd814ed9..4a2d98cc240e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index b382f48506bf..dcadfbe957b7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py index ce693111d295..3023b14aa8ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index e9dac2e5ebcb..c82bebc0ec4c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index b294739dddea..e86337ea863f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 3115109aa03d..93233511d1b2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py index d31f7e39732a..d26267d8cf81 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index f50952a8cf86..c1b4a5788d1d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py index d7a99dde2eec..2d10cffeea49 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py index dfb1d6649a9b..cfa20a4443d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index 33fbfdc82062..5ee585345cca 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index 63cfbe6809a6..6813a6f3a349 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index 7aa41cc72357..e279c8af429b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index 45eb3008c868..9cee676e79ed 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py index 4e6a7a4b65c8..a40632d36959 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py index aaf80170866b..05428dd18de9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py index ad37d01172a8..f4521b6e1a17 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py index c76b96582041..2564dddc6249 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py index 3a1905617454..ae12b2a42007 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py index bae648ae4a9c..9fa8b6fd74f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py index 316cffd4e0a3..f885d552594a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py index 8f2ce706d64a..e2689d1d0cf1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index 22c65b7c831d..397a18535bc7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index df954b6733ed..dac2b06b26d5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py index cf42f9ea10dc..1efc4425461e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py index 8ffd1c8e0837..699507c81aa6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py index 8e9546500ea7..2637bae7aa67 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py index 542d327dad1e..26018c30339b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py index 2ccbdea9bb04..498364c0b719 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py index 5bc0e2a9cdb5..982958785341 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py index 2ed78b67996b..7ea47035ea2c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py index 1683711af60a..2a39e0058914 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 65bac322829d..2dc4d347b70f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index e540e3c2bde7..2c8728a187f9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py index 6a725158774a..96570c459f11 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py index 5c259791926a..dba97688293b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py index d9561edfed3b..34a231caa741 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py index a83707e78240..a8820f65ab4a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py index 6ebb21533096..ee2aefa35041 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py index 9ea61a18c813..e29e7cd54ccd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py index e3f3eb215a5c..f7c974545c69 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py index c5c2dbdbdb22..0511154a762e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py index 6117f303c91e..508004fe95d0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py index 5e368652d475..6b62d1fc795e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py index 3c73ceb7b686..5a5d1924651e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py index 4c85e51bb0dd..a3dd884f135a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index fd1c75ee2644..0022cc9ad990 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index 1100c49463e3..bba54f0a35a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py index 8a84e02dc9fb..2ebd52d1e1da 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py index 36c2ef9b238b..4610d4d89961 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py index 7357700c8b10..b58baa85d69f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py index 35758efbfee1..9b390a50f28b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py index 08e56ea67e0b..7309d5947176 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py index 3c343f13400c..8ce72154b09f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py index 08c9f5ec4bc0..165ac3c53ceb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py index f04009b8b7d8..9f17c32de4a5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index 00ba706111f3..ad09de047af0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index c85952a369a5..a9645f2be7b5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index 88042ee699a4..5ad81f1298b9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index 8e615977c75a..7edf95a4c6d3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index 847e83ef1295..ec3a04b544ea 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index fdf74e7d8679..cb29502c3c40 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py index f8dd087b9b31..7d8189d4407b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py index 42a511d0d5e5..ea785cb38035 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py index 20301b70581a..f61b9f391ebf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py index a3ac5db82a51..c721650365bb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py index 486ff4f0905e..0fc190a8dcd8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py index 57b588e71725..7992e5b91ad7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py index f820c78a88ff..e3ca79038521 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py index d10840436a40..e7f6ad63cccb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py index cc2d8a998cde..b9eddde3d6da 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py index 5f5c2b231d27..ff51d39661f2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 4f6f42664dd5..51eac2939ad5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 056be6957768..c0ecb7beecd0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index 2d3e52ebe184..c421061799d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index d1a86427f185..06ca2f113fd9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 278a2c47012e..d33584db700d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index bc2071cf4784..11ef98ea7222 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index b9224990329b..574a533dd739 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index 78f5bc605553..ca6b21d3b0e0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 3047a4f49db7..646ec1476b63 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 2080458cfaa0..aab2284789e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index bdc98630dac8..75c30e24dd70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index 884ef7a296cd..78b2c8242f9d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py index 23c4ebf2461f..c753d03b1585 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py index a7954dddedae..307c0ac42604 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py index fda8be673408..02721dc20474 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py index d57818dff81a..47db39bd8485 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py index 232cd5ee4944..997d7dcec72c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py index 62458a437a6e..71c1f95dd1ea 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py index 3655223bae09..102121053298 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py index 780f276ec59d..9f8d13b6fcd1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py index 09a259a2705f..6b2ad146c62a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py index 159576050279..59a49b611fa2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py index 8f01527b8d4a..30efcd1cd080 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 1b7e5c1be295..23a3a12a2b64 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index b73f557e1242..5de14eaafe9a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index bd8dd4158030..195d45601748 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 8f9bc8e768c1..a7b6c70e076e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py index e1d58773a736..9f5f212faa62 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py index c747bb3607b6..8e644e881d12 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py index f70b54f4818e..3e0f9c2155ff 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/__init__.py index 8f6cf068242c..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py index 068c5d2db9ca..189db0d9c351 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py index e4da77044e6a..4dec03baf71d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index 6119170ecc98..abffbaec718d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py index 1fd90927372d..6f4fce6dc3e8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py index 1239a292b25a..cc3da21c119f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 1604e0a7663a..82f9f7d683e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index de56227328fe..8b9ca93a0702 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 884a984ca825..d89249f948eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py index 3533e0cda4ae..d9820f09067b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py index ee9e78158367..326e225072a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py index a52186153a84..9104ad8b3be6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py index c4f0739ea163..ee183ce8057c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py index 4e814dcca94b..25058513ec9e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index ff6d8c8dd20a..e7cc266faeff 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 718d4ac6713c..e64b36f15b3c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 6933d0f2ba34..1604ba62bc45 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py index 78a857aefbac..27ad0c062212 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 951c77479f9d..aac48baba9bf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py index 64bb863edd77..3c4c03cd40c3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index b7d888ff8393..7024e3a7b362 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py index 994b47887efd..a6eb39e80fa0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 40cb303a6233..a2d6ff535979 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 174066a7ba87..bfb8a1e8e392 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 1ad75cbf05df..3760cc25b928 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py index ce693111d295..3023b14aa8ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py index e9dac2e5ebcb..c82bebc0ec4c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py index b294739dddea..e86337ea863f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py index 3115109aa03d..93233511d1b2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py index d31f7e39732a..d26267d8cf81 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_metrics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index f50952a8cf86..c1b4a5788d1d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py index c72d71144d36..28219651a265 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py index 28a94850ec52..be1d43fd90a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py index 72cac228d628..d9e42b574535 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py index 4b6efb42f072..ecead9501d29 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py index 50d9dd7d79d3..3d418d09c3c1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py index 3da3496391aa..2cc40f8b6d0c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py index d90e17666384..d5e266b6b2f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py index 460e6fb0ab16..fa6de4343106 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py index e746a27d2d40..57a38bc4f09b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py index 2c140e7852b7..b469117115ef 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py index 742320fa71de..c0bae20206f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py index 2e111fa747ad..e51bd9646f1d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py index 93b085612bb7..346f157820c4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py index 6b6e2d114c31..71f2c15bb928 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py index fc97001cbb90..6c8430dacf6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py index bfb465f43064..6439a659ec11 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py index aaa5bf7ccca1..a3b6d8e165b4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py index 388721cadf17..0f4f6bed4a09 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py index 201149cee02e..b24d8f6eb4ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py index b43d8adea143..e4b45ba330f6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py index 295a59e9536b..88339ae5ca39 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py index 011112649658..5a6315464c21 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py index 43b6825bd042..b62bee5af31e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py index 7f70bec5427c..b6d674cbc90d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py index bbe5934f442b..ad89d377831c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py index 25eb8923b3d2..66304337e6f8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py index bb5d38c0288a..c4d3487fb5c8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py index 47cccaaa8bd2..5f575b014f5c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py index 6d8981da9ad0..f82665334c8b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py index 8def9a82b4b5..20814792b201 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py index 1a46095422ce..fb24733e17a9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py index 2497e6cd35d4..24510000dd0e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py index 38c3f77d1e77..2676cf38a306 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py index 45a26e31fbb3..e82b176c6c37 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py index 54bdbc6c5c0e..c26e55bd338f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py index 6f9841f1f96c..915bf6c832e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py index 9c5478064b9c..56f1054a8f1e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py index c088c8d6082a..92b918941152 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py index f767ebca6a76..7c63081b593c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py index 3fec55b7f6b8..6acb574f75c9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py index b55f944edbb3..61b8f4075c67 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py index b28402d20d6e..7fda4351a173 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py index f2f262e24a6a..0e95402d4803 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py index d0ba8a3f788e..2ab3cee694e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py index 716a0b07e96c..d111947839b2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py index 3dc375fa0243..ef13e3552050 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py index 983bfd16b87f..aa58cb9ce0d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py index 204613cc4c3d..d4cedbbdc1f3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py index 711a4a39422f..fb323dcf6c9e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py index 7f926256f75d..d5125a9cdfcc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py index c9608c7c5660..4b137facd3c3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py index c13c727af381..60feee2dc575 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py index ee3df2fd9791..9e80f3f68f04 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py index d747c6530bd5..d7ed7983dc96 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py index 72093053348f..54af8efd76bb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py index 9ec27d9deb40..9a4020018c62 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py index ae5936d9f66c..ee91e82647b4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py index edfe2fd7d227..306a29952f8b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_buckets_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py index 5ff8813cd716..b876e32171c5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py index c83ab9dde0fc..1bc23d198636 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_undelete_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py index b55fd51a5d05..e1f67c09803a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py index c9fcb689628c..dfad5f4a5267 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py index deb199aaf250..e6782e0affa5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_async_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py index e173f6497dad..512c1bc09762 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_bucket_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py index 4f6f42664dd5..51eac2939ad5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py index 056be6957768..c0ecb7beecd0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_delete_log_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py index 2d3e52ebe184..c421061799d6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py index d1a86427f185..06ca2f113fd9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py index 278a2c47012e..d33584db700d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py index bc2071cf4784..11ef98ea7222 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_logs_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py index b9224990329b..574a533dd739 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py index 78f5bc605553..ca6b21d3b0e0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_list_monitored_resource_descriptors_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py index 3047a4f49db7..646ec1476b63 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py index 2080458cfaa0..aab2284789e3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_tail_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py index bdc98630dac8..75c30e24dd70 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py index 884ef7a296cd..78b2c8242f9d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_logging_service_v2_write_log_entries_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py index e1e54ba56b14..2cf02a3533f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py index 2bab5195b917..2d86ccb0f75d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py index 9373ac0d4b5a..4d1acdd83c79 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py index 431e26a197a9..e7b1e64c690e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py index 458a5fc6887a..a46b0c2685af 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py index f1c293e2e080..0d0f38a06969 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py index d084304f4a90..d93f42fe9a69 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py index 305a5b39b355..4e111ba58f08 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py index 4546c68e4229..fa5699c778aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py index 07fa1d67f1af..aedb26d9a1d4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py index 3e183618dfe0..a4d2fe8003ca 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py index 1b7e5c1be295..23a3a12a2b64 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py index 1c67fff5278c..09812660a7c8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py index bd8dd4158030..195d45601748 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index ba8812f5792f..1dcfe45d9f97 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index 0764f832c6f8..975c25960061 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py index ededc2d374b0..98170c766532 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index e89fe0cc8a10..af3d250ccdf8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py index 8f6cf068242c..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py index 17e93eea6bfe..f302df64f13f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 339e572e8ed5..1291288de28e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index aa1e0675cde5..7313c321753b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py index e3d2c05b83d6..4e0e4cb32343 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 563cd5dd7682..1cbbf54c2525 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 57832b4d914e..1b11ec1f69df 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 76c07f74bb53..abb362ff0fa7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index aa699fc707f3..3731518549ef 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 702c01fb2b4f..7a4160a2ca74 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 24f3370c48b6..36a9c0101409 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index e63a3d850341..56df2832192a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py index 6afcaca198a8..4103832ed203 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index 50ca82ff0299..d6af078cafb0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 2e308e51ccff..15ea2482945d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index 3385bd5f99e4..e263e176c702 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py index 4ea53ed516c6..ea546b2795ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index 462ec0e4ee0d..720fc67ee399 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py index d933dd8a4521..610246d96510 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py index bc2a58ff39b6..321ab7af35f2 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py index a3e059a90f56..b6abe0bd10d0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_export_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py index bc5eb5bf71e1..1aeb9312d798 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py index 221da9018e87..34c7d735f872 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_failover_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py index e13bffbd78cc..241282db64fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py index e79a81d7b704..832355185758 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py index 440b4db35ba4..0f8c811919e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_auth_string_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py index 4a6053e1d95d..5de5dd3bba9f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py index 8384b9c1870e..2914c06d1b44 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py index 43e5fb5a1798..3a3f1fac1a51 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_import_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py index 19396f14baeb..67c9a3c86e46 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py index ec07ceff62c3..2922fb554895 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py index 9ff86bb0c7a9..b5c91c28e631 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py index be55eed88654..ac2089381ae2 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_reschedule_maintenance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index 9a6d69923d3f..4d31d03f75aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py index 2429f106358d..b5ae40fbab41 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py index d72856938573..7fe2c6032322 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py index 2682878bbb74..89cdfffef676 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/redis_v1_generated_cloud_redis_upgrade_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py index 5ea0edd0793a..c6070fe6a836 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 0de70b17e015..54fece2f918d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 275fc26b2c85..4ff5644cdc5c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py index 0764f832c6f8..975c25960061 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py index 527bad46ac9f..045bcae4c55c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py index 797596a67970..760fe80f6c14 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py index 558c8aab67c5..20a9cd975b02 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/__init__.py index 8f6cf068242c..cbf94b283c70 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py index 17e93eea6bfe..f302df64f13f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py index dfb52bc1b522..92351953759c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py index 8119e081115e..644df88a69ba 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py index e3d2c05b83d6..4e0e4cb32343 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 563cd5dd7682..1cbbf54c2525 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 2c311ca2c4ca..1802b33f7bb8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 07a5b114b047..55222f44434a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index dc833c8e5000..23337db892de 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 7b3fbccb9ffb..91410d213928 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 8028c4273108..3739e94e0567 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index df8a6cd7c841..85b3522cb282 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py index a051e1e14471..1e420395cc1d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py index a831851aa15d..8e73ea0855bf 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index 2e308e51ccff..15ea2482945d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py index 3385bd5f99e4..e263e176c702 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py index 4ea53ed516c6..ea546b2795ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_create_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py index 462ec0e4ee0d..720fc67ee399 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py index d933dd8a4521..610246d96510 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_delete_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py index e13bffbd78cc..241282db64fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py index 4a6053e1d95d..5de5dd3bba9f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_get_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py index 19396f14baeb..67c9a3c86e46 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py index ec07ceff62c3..2922fb554895 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_list_instances_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py index 9a6d69923d3f..4d31d03f75aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py index 2429f106358d..b5ae40fbab41 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/redis_v1_generated_cloud_redis_update_instance_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py index 7022cedcf37c..f84bacfd1394 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py index 0de70b17e015..54fece2f918d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py index 7b3de3117f38..191773d5572d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py index 05cb264cdb78..fd46390682ed 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py index 63c508e4b039..5d1f49a4daca 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py index d557c056c521..63ead5078e2c 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_list_resources_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py index 619f1e00b638..d217b7b36e69 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py index 903281ab4c46..5fe826cddf2c 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_bidi_streaming_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py index 678e3a60927f..14976e879c9a 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py index c878afbe7cb8..5e126783b114 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_lro_signatures_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py index 29a2ee485323..7401848a032e 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py index b840b45536a8..a0115000ae20 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_one_signature_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py index 76ad17bb6a83..2c7308dc72f2 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py index f148f2c53a9e..f08e0bbb6b76 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_method_server_streaming_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py index faedde72a35e..8077748eac63 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py index 352996bd1c77..da47d3740b3b 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py index 84b972a47089..13727405f18c 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_required_field_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py index 5474658577d8..60dbe379904f 100644 --- a/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py +++ b/packages/gapic-generator/tests/snippetgen/goldens/mollusca_v1_generated_snippets_one_of_method_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py index c02a3dd584b1..3c516f9f3138 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py index 7fff3fed8772..80848b821b0d 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py index c02a3dd584b1..3c516f9f3138 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_unflattenable.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py index 7f679080dc20..e5bd3cc9bcc6 100644 --- a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_void_method.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From a896aed79232340da40db5f8a8129f56ea3c035a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 17 Mar 2025 18:01:39 +0000 Subject: [PATCH 1267/1339] chore(main): release 1.23.6 (#2364) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index bdf0a4821c74..c79b6373422f 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.23.6](https://github.com/googleapis/gapic-generator-python/compare/v1.23.5...v1.23.6) (2025-03-17) + + +### Documentation + +* Update copyright year ([#2363](https://github.com/googleapis/gapic-generator-python/issues/2363)) ([745ce7a](https://github.com/googleapis/gapic-generator-python/commit/745ce7af2978eff293e9abbad9a622fdfe7c782a)) + ## [1.23.5](https://github.com/googleapis/gapic-generator-python/compare/v1.23.4...v1.23.5) (2025-03-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 35fc65762a61..5d2bd2ff7565 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.23.5" +version = "1.23.6" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 094283cdd1f9edbe77b8d329c2b79840ce6ba93e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 19 Mar 2025 12:02:00 -0400 Subject: [PATCH 1268/1339] build: update constraints file to test latest major versions (#2362) --- .../gapic/templates/setup.py.j2 | 2 +- .../templates/testing/_default_constraints.j2 | 2 +- .../templates/testing/constraints-3.13.txt.j2 | 24 +++++++++++++++---- .../templates/testing/constraints-3.7.txt.j2 | 2 +- .../asset/testing/constraints-3.13.txt | 21 +++++++++------- .../credentials/testing/constraints-3.13.txt | 15 ++++++++---- .../eventarc/testing/constraints-3.13.txt | 17 ++++++++----- .../logging/testing/constraints-3.13.txt | 15 ++++++++---- .../testing/constraints-3.13.txt | 15 ++++++++---- .../redis/testing/constraints-3.13.txt | 15 ++++++++---- .../testing/constraints-3.13.txt | 15 ++++++++---- 11 files changed, 97 insertions(+), 46 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index db64df84e994..2f265dd994f1 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -42,7 +42,7 @@ dependencies = [ {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", {% for package_tuple, package_info in pypi_packages.items() %} - {# Quick check to make sure the package is different from this setup.py #} + {# Quick check to make sure `package_info.package_name` is not the package being generated so we don't circularly include this package in its own constraints file. #} {% if api.naming.warehouse_package_name != package_info.package_name %} {% if api.requires_package(package_tuple) %} "{{ package_info.package_name }} >= {{ package_info.lower_bound }}, <{{ package_info.upper_bound }}", diff --git a/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 b/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 index 6a62cfc52d5e..74049d395c52 100644 --- a/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 +++ b/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 @@ -5,7 +5,7 @@ google-api-core proto-plus protobuf {% for package_tuple, package_info in pypi_packages.items() %} -{# Quick check to make sure the package is different from this setup.py #} +{# Quick check to make sure `package_info.package_name` is not the package being generated so we don't circularly include this package in its own constraints file. #} {% if api.naming.warehouse_package_name != package_info.package_name %} {% if api.requires_package(package_tuple) %} {{ package_info.package_name }} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 index 615c99518a63..ef07e240643b 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 @@ -1,4 +1,20 @@ -# -*- coding: utf-8 -*- -{% block constraints %} -{% include "testing/_default_constraints.j2" %} -{% endblock %} +{% from '_pypi_packages.j2' import pypi_packages %} +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +{% for package_tuple, package_info in pypi_packages.items() %} +{# Quick check to make sure `package_info.package_name` is not the package being generated so we don't circularly include this package in its own constraints file. #} +{% if api.naming.warehouse_package_name != package_info.package_name %} +{% if api.requires_package(package_tuple) %} +{{ package_info.package_name }}>={{ (package_info.upper_bound.split(".")[0] | int) - 1 }} +{% endif %} +{% endif %} +{% endfor %} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index 8dc46d8b47f7..74f8f1ad374a 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -10,7 +10,7 @@ google-auth==2.14.1 proto-plus==1.22.3 protobuf==3.20.2 {% for package_tuple, package_info in pypi_packages.items() %} -{# Quick check to make sure the package is different from this setup.py #} +{# Quick check to make sure `package_info.package_name` is not the package being generated so we don't circularly include this package in its own constraints file. #} {% if api.naming.warehouse_package_name != package_info.package_name %} {% if api.requires_package(package_tuple) %} {{ package_info.package_name }}=={{ package_info.lower_bound }} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt index 70744e58974a..763099d82412 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt @@ -1,9 +1,14 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -google-cloud-access-context-manager -google-cloud-os-config -grpc-google-iam-v1 +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +google-cloud-access-context-manager>=0 +google-cloud-os-config>=1 +grpc-google-iam-v1>=0 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt index ed7f9aed2559..c20a77817caa 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt @@ -1,6 +1,11 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt index ad3f0fa58e2d..2010e549cceb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt @@ -1,7 +1,12 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt index ed7f9aed2559..c20a77817caa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt @@ -1,6 +1,11 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt index ed7f9aed2559..c20a77817caa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt @@ -1,6 +1,11 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt index ed7f9aed2559..c20a77817caa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt @@ -1,6 +1,11 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt index ed7f9aed2559..c20a77817caa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt @@ -1,6 +1,11 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 From 68f3da8654413513694668c98eed98dd8ed8e4d4 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 19 Mar 2025 15:55:49 -0400 Subject: [PATCH 1269/1339] fix: Fixed internal method generation naming issues (#2365) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../%sub/services/%service/client.py.j2 | 2 +- .../%name_%version/%sub/test_%service.py.j2 | 12 +- .../gapic/samplegen/samplegen.py | 10 +- packages/gapic-generator/gapic/schema/api.py | 2 +- .../gapic-generator/gapic/schema/wrappers.py | 30 +- .../%sub/services/%service/async_client.py.j2 | 2 +- .../%sub/services/%service/client.py.j2 | 6 +- .../templates/examples/feature_fragments.j2 | 16 +- .../gapic/templates/examples/sample.py.j2 | 4 +- .../%name_%version/%sub/test_%service.py.j2 | 2 +- .../gapic/%name_%version/%sub/test_macros.j2 | 32 +- .../gapic-generator/test_utils/test_utils.py | 2 + .../cloud/logging_v2/gapic_metadata.json | 252 +-- .../config_service_v2/async_client.py | 120 +- .../services/config_service_v2/client.py | 120 +- .../services/config_service_v2/pagers.py | 48 +- .../config_service_v2/transports/base.py | 144 +- .../config_service_v2/transports/grpc.py | 240 +- .../transports/grpc_asyncio.py | 336 +-- .../metrics_service_v2/async_client.py | 26 +- .../services/metrics_service_v2/client.py | 26 +- .../services/metrics_service_v2/pagers.py | 12 +- .../metrics_service_v2/transports/base.py | 30 +- .../metrics_service_v2/transports/grpc.py | 50 +- .../transports/grpc_asyncio.py | 70 +- ...ice_v2_copy_log_entries_async_internal.py} | 8 +- ...vice_v2_copy_log_entries_sync_internal.py} | 8 +- ...ice_v2_create_exclusion_async_internal.py} | 8 +- ...vice_v2_create_exclusion_sync_internal.py} | 8 +- ..._service_v2_create_link_async_internal.py} | 8 +- ...g_service_v2_create_link_sync_internal.py} | 8 +- ..._service_v2_create_sink_async_internal.py} | 8 +- ...g_service_v2_create_sink_sync_internal.py} | 8 +- ..._service_v2_create_view_async_internal.py} | 8 +- ...g_service_v2_create_view_sync_internal.py} | 8 +- ...ice_v2_delete_exclusion_async_internal.py} | 8 +- ...vice_v2_delete_exclusion_sync_internal.py} | 8 +- ..._service_v2_delete_link_async_internal.py} | 8 +- ...g_service_v2_delete_link_sync_internal.py} | 8 +- ..._service_v2_delete_sink_async_internal.py} | 8 +- ...g_service_v2_delete_sink_sync_internal.py} | 8 +- ..._service_v2_delete_view_async_internal.py} | 8 +- ...g_service_v2_delete_view_sync_internal.py} | 8 +- ...ce_v2_get_cmek_settings_async_internal.py} | 8 +- ...ice_v2_get_cmek_settings_sync_internal.py} | 8 +- ...ervice_v2_get_exclusion_async_internal.py} | 8 +- ...service_v2_get_exclusion_sync_internal.py} | 8 +- ...fig_service_v2_get_link_async_internal.py} | 8 +- ...nfig_service_v2_get_link_sync_internal.py} | 8 +- ...service_v2_get_settings_async_internal.py} | 8 +- ..._service_v2_get_settings_sync_internal.py} | 8 +- ...fig_service_v2_get_sink_async_internal.py} | 8 +- ...nfig_service_v2_get_sink_sync_internal.py} | 8 +- ...fig_service_v2_get_view_async_internal.py} | 8 +- ...nfig_service_v2_get_view_sync_internal.py} | 8 +- ...vice_v2_list_exclusions_async_internal.py} | 8 +- ...rvice_v2_list_exclusions_sync_internal.py} | 8 +- ...g_service_v2_list_links_async_internal.py} | 8 +- ...ig_service_v2_list_links_sync_internal.py} | 8 +- ...g_service_v2_list_sinks_async_internal.py} | 8 +- ...ig_service_v2_list_sinks_sync_internal.py} | 8 +- ...g_service_v2_list_views_async_internal.py} | 8 +- ...ig_service_v2_list_views_sync_internal.py} | 8 +- ...v2_update_cmek_settings_async_internal.py} | 8 +- ..._v2_update_cmek_settings_sync_internal.py} | 8 +- ...ice_v2_update_exclusion_async_internal.py} | 8 +- ...vice_v2_update_exclusion_sync_internal.py} | 8 +- ...vice_v2_update_settings_async_internal.py} | 8 +- ...rvice_v2_update_settings_sync_internal.py} | 8 +- ..._service_v2_update_sink_async_internal.py} | 8 +- ...g_service_v2_update_sink_sync_internal.py} | 8 +- ..._service_v2_update_view_async_internal.py} | 8 +- ...g_service_v2_update_view_sync_internal.py} | 8 +- ...ce_v2_create_log_metric_async_internal.py} | 8 +- ...ice_v2_create_log_metric_sync_internal.py} | 8 +- ...ce_v2_delete_log_metric_async_internal.py} | 8 +- ...ice_v2_delete_log_metric_sync_internal.py} | 8 +- ...rvice_v2_get_log_metric_async_internal.py} | 8 +- ...ervice_v2_get_log_metric_sync_internal.py} | 8 +- ...ice_v2_list_log_metrics_async_internal.py} | 8 +- ...vice_v2_list_log_metrics_sync_internal.py} | 8 +- ...ce_v2_update_log_metric_async_internal.py} | 8 +- ...ice_v2_update_log_metric_sync_internal.py} | 8 +- .../snippet_metadata_google.logging.v2.json | 1940 ++++++++--------- .../scripts/fixup_logging_v2_keywords.py | 58 +- .../logging_v2/test_config_service_v2.py | 688 +++--- .../logging_v2/test_metrics_service_v2.py | 152 +- .../tests/unit/common_types.py | 7 + .../golden_snippets/sample_basic_internal.py | 61 + .../tests/unit/samplegen/test_integration.py | 126 ++ .../tests/unit/samplegen/test_samplegen.py | 12 +- .../tests/unit/samplegen/test_template.py | 216 +- .../tests/unit/schema/test_api.py | 161 +- .../tests/unit/schema/wrappers/test_method.py | 20 +- 94 files changed, 3015 insertions(+), 2484 deletions(-) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py => logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py => logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__create_exclusion_async_internal.py => logging_v2_generated_config_service_v2_create_exclusion_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py => logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__create_link_async_internal.py => logging_v2_generated_config_service_v2_create_link_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__create_link_sync_internal.py => logging_v2_generated_config_service_v2_create_link_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__create_sink_async_internal.py => logging_v2_generated_config_service_v2_create_sink_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__create_sink_sync_internal.py => logging_v2_generated_config_service_v2_create_sink_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__create_view_async_internal.py => logging_v2_generated_config_service_v2_create_view_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__create_view_sync_internal.py => logging_v2_generated_config_service_v2_create_view_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py => logging_v2_generated_config_service_v2_delete_exclusion_async_internal.py} (87%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py => logging_v2_generated_config_service_v2_delete_exclusion_sync_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__delete_link_async_internal.py => logging_v2_generated_config_service_v2_delete_link_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__delete_link_sync_internal.py => logging_v2_generated_config_service_v2_delete_link_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__delete_sink_async_internal.py => logging_v2_generated_config_service_v2_delete_sink_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__delete_sink_sync_internal.py => logging_v2_generated_config_service_v2_delete_sink_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__delete_view_async_internal.py => logging_v2_generated_config_service_v2_delete_view_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__delete_view_sync_internal.py => logging_v2_generated_config_service_v2_delete_view_sync_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py => logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py => logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_exclusion_async_internal.py => logging_v2_generated_config_service_v2_get_exclusion_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py => logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_link_async_internal.py => logging_v2_generated_config_service_v2_get_link_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_link_sync_internal.py => logging_v2_generated_config_service_v2_get_link_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_settings_async_internal.py => logging_v2_generated_config_service_v2_get_settings_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_settings_sync_internal.py => logging_v2_generated_config_service_v2_get_settings_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_sink_async_internal.py => logging_v2_generated_config_service_v2_get_sink_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_sink_sync_internal.py => logging_v2_generated_config_service_v2_get_sink_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_view_async_internal.py => logging_v2_generated_config_service_v2_get_view_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__get_view_sync_internal.py => logging_v2_generated_config_service_v2_get_view_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__list_exclusions_async_internal.py => logging_v2_generated_config_service_v2_list_exclusions_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py => logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__list_links_async_internal.py => logging_v2_generated_config_service_v2_list_links_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__list_links_sync_internal.py => logging_v2_generated_config_service_v2_list_links_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__list_sinks_async_internal.py => logging_v2_generated_config_service_v2_list_sinks_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__list_sinks_sync_internal.py => logging_v2_generated_config_service_v2_list_sinks_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__list_views_async_internal.py => logging_v2_generated_config_service_v2_list_views_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__list_views_sync_internal.py => logging_v2_generated_config_service_v2_list_views_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py => logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py} (87%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py => logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py} (87%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_exclusion_async_internal.py => logging_v2_generated_config_service_v2_update_exclusion_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py => logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_settings_async_internal.py => logging_v2_generated_config_service_v2_update_settings_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_settings_sync_internal.py => logging_v2_generated_config_service_v2_update_settings_sync_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_sink_async_internal.py => logging_v2_generated_config_service_v2_update_sink_async_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_sink_sync_internal.py => logging_v2_generated_config_service_v2_update_sink_sync_internal.py} (90%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_view_async_internal.py => logging_v2_generated_config_service_v2_update_view_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_config_service_v2__update_view_sync_internal.py => logging_v2_generated_config_service_v2_update_view_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py => logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py => logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py} (89%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py => logging_v2_generated_metrics_service_v2_delete_log_metric_async_internal.py} (87%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py => logging_v2_generated_metrics_service_v2_delete_log_metric_sync_internal.py} (87%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py => logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py => logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py => logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py => logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py => logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py} (88%) rename packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/{logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py => logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py} (89%) create mode 100644 packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_internal.py diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 72f96abdb005..fef86ae05230 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -345,7 +345,7 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% if method.operation_service %}{# Extended Operations LRO #} def {{ method.name|snake_case }}_unary(self, {% else %} - def {{ method.safe_name|snake_case }}(self, + def {{ method.client_method_name|snake_case }}(self, {% endif %}{# Extended Operations LRO #} {% if not method.client_streaming %} request: Optional[Union[{{ method.input.ident }}, dict]] = None, diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 3c839f4c1177..cd6c05c44ee9 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -516,7 +516,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% endif %} -{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} +{% for method in service.methods.values() if 'grpc' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.client_method_name|snake_case %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -579,7 +579,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): ) {% endif %} {% if method.client_streaming %} - response = client.{{ method.safe_name|snake_case }}(iter(requests)) + response = client.{{ method.client_method_name|snake_case }}(iter(requests)) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -1053,7 +1053,7 @@ def test_{{ method_name }}_raw_page_lro(): {% endfor %} {# method in methods for grpc #} -{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %}{% if method.http_options %} +{% for method in service.methods.values() if 'rest' in opts.transport %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.client_method_name|snake_case %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when client streaming are supported. #} {% if not method.client_streaming %} @pytest.mark.parametrize("request_type", [ @@ -1252,7 +1252,7 @@ def test_{{ method.name|snake_case }}_rest(request_type): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.client_streaming %} - response = client.{{ method.safe_name|snake_case }}(iter(requests)) + response = client.{{ method.client_method_name|snake_case }}(iter(requests)) {% elif method.server_streaming %} with mock.patch.object(response_value, 'iter_content') as iter_content: iter_content.return_value = iter(json_return_value) @@ -1550,7 +1550,7 @@ def test_{{ method_name }}_rest_bad_request(transport: str = 'rest', request_typ req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} {% if method.client_streaming %} - client.{{ method.safe_name|snake_case }}(iter(requests)) + client.{{ method.client_method_name|snake_case }}(iter(requests)) {% else %} client.{{ method_name }}(request) {% endif %} @@ -1818,7 +1818,7 @@ def test_{{ method_name }}_rest_no_http_options(): {% endfor -%} {#- method in methods for rest #} {% for method in service.methods.values() if 'rest' in opts.transport and - not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} + not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.client_method_name|snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/gapic-generator/gapic/samplegen/samplegen.py b/packages/gapic-generator/gapic/samplegen/samplegen.py index cdc6ee3d142e..ada7dab13026 100644 --- a/packages/gapic-generator/gapic/samplegen/samplegen.py +++ b/packages/gapic-generator/gapic/samplegen/samplegen.py @@ -1063,8 +1063,9 @@ def generate_sample_specs( # [{START|END} ${apishortname}_${apiVersion}_generated_${serviceName}_${rpcName}_{sync|async|rest}] region_tag = f"{api_short_name}_{api_version}_generated_{service_name}_{rpc_name}_{sync_or_async}" - # We assume that the only methods that start with an underscore are internal methods. - is_internal = rpc_name.startswith("_") + is_internal = api_schema.all_methods[ + f"{api_schema.naming.proto_package}.{service_name}.{rpc_name}" + ].is_internal if is_internal: region_tag += "_internal" @@ -1119,7 +1120,9 @@ def _fill_sample_metadata(sample: dict, api_schema: api.API): # Client Method setattr(snippet_metadata.client_method, "async", async_) - snippet_metadata.client_method.short_name = utils.to_snake_case(method.name) + snippet_metadata.client_method.short_name = utils.to_snake_case( + method.client_method_name + ) snippet_metadata.client_method.full_name = f"{snippet_metadata.client_method.client.full_name}.{snippet_metadata.client_method.short_name}" if not method.void: @@ -1217,6 +1220,7 @@ def generate_sample( ) calling_form = types.CallingForm.method_default(rpc) + sample["is_internal"] = rpc.is_internal v = Validator(rpc, api_schema) # Tweak some small aspects of the sample to set defaults for optional diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index b9e95c304e5c..071bbd29339b 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -687,7 +687,7 @@ def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: transport.library_client = client_name for method in methods: method_desc = transport.rpcs.get_or_create(method.name) - method_desc.methods.append(to_snake_case(method.name)) + method_desc.methods.append(to_snake_case(method.client_method_name)) return gm diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 41bd2e202dd4..3982afea6c67 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1499,11 +1499,14 @@ def __getattr__(self, name): return getattr(self.method_pb, name) @property - def safe_name(self) -> str: - # Used to prevent collisions with python keywords at the client level + def client_method_name(self) -> str: + """Returns the name of the generated method name. - name = self.name - return name + "_" if name.lower() in keyword.kwlist else name + This is used to prevent collisions with Python keywords at the + client level, as well as prefix internal method names with underscore. + """ + name = self.name + "_" if self.name.lower() in keyword.kwlist else self.name + return make_private(name) if self.is_internal else name @property def transport_safe_name(self) -> str: @@ -1534,14 +1537,6 @@ def is_operation_polling_method(self): and self.options.Extensions[ex_ops_pb2.operation_polling_method] ) - @utils.cached_property - def name(self): - return ( - make_private(self.method_pb.name) - if self.is_internal - else self.method_pb.name - ) - @utils.cached_property def client_output(self): return self._client_output(enable_asyncio=False) @@ -2435,17 +2430,10 @@ def with_internal_methods(self, *, public_methods: Set[str]) -> "Service": Service: A version of this `Service` with `Method` objects corresponding to methods not in `public_methods` marked as internal. """ - - # Internal methods need to be keyed with underscore prefixed method names - # (e.g. google.Service.Method -> google.Service._Method) in order for - # samplegen to work properly. return dataclasses.replace( self, methods={ - meth.name: meth - for meth in ( - meth.with_internal_methods(public_methods=public_methods) - for meth in self.methods.values() - ) + k: v.with_internal_methods(public_methods=public_methods) + for k, v in self.methods.items() }, ) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 1f32c9acee29..d5425984cf7e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -266,7 +266,7 @@ class {{ service.async_client_name }}: ) {% for method in service.methods.values() %} - {% with method_name = method.safe_name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} + {% with method_name = method.client_method_name|snake_case + "_unary" if method.operation_service else method.client_method_name|snake_case %} {%+ if not method.server_streaming %}async {% endif %}def {{ method_name }}(self, {% endwith %} {% if not method.client_streaming %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 66ea9d12adf4..eefd22509094 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -654,11 +654,11 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): {% for method in service.methods.values() %} {% if method.operation_service %}{# Uses extended operations #} -{{ macros.client_method(method, method.safe_name|snake_case + "_unary", snippet_index, api, service) }} +{{ macros.client_method(method, method.client_method_name|snake_case + "_unary", snippet_index, api, service) }} -{{ macros.client_method(method, method.safe_name|snake_case, snippet_index, api, service, full_extended_lro=True) }} +{{ macros.client_method(method, method.client_method_name|snake_case, snippet_index, api, service, full_extended_lro=True) }} {% else %} -{{ macros.client_method(method, method.safe_name|snake_case, snippet_index, api, service) }} +{{ macros.client_method(method, method.client_method_name|snake_case, snippet_index, api, service) }} {% endif %} {% endfor %} diff --git a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 index aa1ae3208b3b..66815cbe8f11 100644 --- a/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 +++ b/packages/gapic-generator/gapic/templates/examples/feature_fragments.j2 @@ -235,10 +235,10 @@ await{{ " "}} {%- endif -%} {% if calling_form in [calling_form_enum.RequestStreamingBidi, calling_form_enum.RequestStreamingClient] %} -client.{{ sample.rpc|snake_case }}(requests=request_generator()) +client.{{ render_method_name(sample)|trim }}(requests=request_generator()) {% else %}{# TODO: deal with flattening #} {# TODO: set up client streaming once some questions are answered #} -client.{{ sample.rpc|snake_case }}({{ render_request_params_unary(sample.request)|trim }}) +client.{{ render_method_name(sample)|trim }}({{ render_request_params_unary(sample.request)|trim }}) {% endif %} {% endmacro %} @@ -305,11 +305,15 @@ response = {{ operation_text|trim }}.result() {% endif %} {% endmacro %} -{% macro render_method_name(method_name) %} -{{ method_name|snake_case }} +{% macro render_method_name(sample) %} +{% if sample.is_internal %} +_{{ sample.rpc|snake_case }} +{% else %} +{{ sample.rpc|snake_case }} +{% endif %} {% endmacro %} -{% macro render_main_block(method_name, full_request) %} +{% macro render_main_block(sample, full_request) %} def main(): import argparse @@ -330,7 +334,7 @@ def main(): {% endfor %} args = parser.parse_args() - sample_{{ render_method_name(method_name)|trim }}({{ arg_list|join(", ") }}) + sample_{{ render_method_name(sample)|trim }}({{ arg_list|join(", ") }}) if __name__ == "__main__": diff --git a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 index 1da8ecf6b2f9..68713f812b85 100644 --- a/packages/gapic-generator/gapic/templates/examples/sample.py.j2 +++ b/packages/gapic-generator/gapic/templates/examples/sample.py.j2 @@ -32,7 +32,7 @@ {# also need calling form #} -{% if sample.transport == "grpc-async" %}async {% endif %}def sample_{{ frags.render_method_name(sample.rpc)|trim }}({{ frags.print_input_params(sample.request)|trim }}): +{% if sample.transport == "grpc-async" %}async {% endif %}def sample_{{ sample.rpc|snake_case|trim }}({{ frags.print_input_params(sample.request)|trim }}): {{ frags.render_client_setup(sample.module_name, sample.client_name)|indent }} {{ frags.render_request_setup(sample.request, sample.request_module_name, sample.request_type, calling_form, calling_form_enum)|indent }} {% with method_call = frags.render_method_call(sample, calling_form, calling_form_enum, sample.transport) %} @@ -41,5 +41,5 @@ # [END {{ sample.id }}] {# TODO: Enable main block (or decide to remove main block from python sample) #} -{# {{ frags.render_main_block(sample.rpc, sample.request) }} #} +{# {{ frags.render_main_block(sample, sample.request) }} #} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index d0b8cf51bec5..a69026767cd1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -927,7 +927,7 @@ def test_{{ service.client_name|snake_case }}_create_channel_credentials_file(cl {% endfor -%} {#- method in methods for rest #} {% for method in service.methods.values() if 'rest' in opts.transport and - not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} + not method.http_options %}{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.client_method_name|snake_case %} def test_{{ method_name }}_rest_error(): client = {{ service.client_name }}( credentials=ga_credentials.AnonymousCredentials(), diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index 1a31294dee85..f15326d6703a 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1,5 +1,5 @@ {% macro grpc_required_tests(method, service, api, full_extended_lro=False) %} -{% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.safe_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %} +{% with method_name = method.client_method_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.client_method_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %} @pytest.mark.parametrize("request_type", [ {{ method.input.ident }}, dict, @@ -59,7 +59,7 @@ def test_{{ method_name }}(request_type, transport: str = 'grpc'): ) {% endif %} {% if method.client_streaming %} - response = client.{{ method.safe_name|snake_case }}(iter(requests)) + response = client.{{ method.client_method_name|snake_case }}(iter(requests)) {% else %} response = client.{{ method_name }}(request) {% endif %} @@ -186,7 +186,7 @@ def test_{{ method_name }}_use_cached_wrapped_rpc(): client._transport._wrapped_methods[client._transport.{{method.transport_safe_name|snake_case}}] = mock_rpc {% if method.client_streaming %} request = [{}] - client.{{ method.safe_name|snake_case }}(request) + client.{{ method.client_method_name|snake_case }}(request) {% else %} request = {} client.{{ method_name }}(request) @@ -203,7 +203,7 @@ def test_{{ method_name }}_use_cached_wrapped_rpc(): {% endif %} {% if method.client_streaming %} - client.{{ method.safe_name|snake_case }}(request) + client.{{ method.client_method_name|snake_case }}(request) {% else %} client.{{ method_name }}(request) {% endif %} @@ -238,7 +238,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " {% if method.client_streaming %} request = [{}] - await client.{{ method.safe_name|snake_case }}(request) + await client.{{ method.client_method_name|snake_case }}(request) {% else %} request = {} await client.{{ method_name }}(request) @@ -255,7 +255,7 @@ async def test_{{ method_name }}_async_use_cached_wrapped_rpc(transport: str = " {% endif %} {% if method.client_streaming %} - await client.{{ method.safe_name|snake_case }}(request) + await client.{{ method.client_method_name|snake_case }}(request) {% else %} await client.{{ method_name }}(request) {% endif %} @@ -321,9 +321,9 @@ async def test_{{ method_name }}_async(transport: str = 'grpc_asyncio', request_ )) {% endif %} {% if method.client_streaming and method.server_streaming %} - response = await client.{{ method.safe_name|snake_case }}(iter(requests)) + response = await client.{{ method.client_method_name|snake_case }}(iter(requests)) {% elif method.client_streaming and not method.server_streaming %} - response = await (await client.{{ method.safe_name|snake_case }}(iter(requests))) + response = await (await client.{{ method.client_method_name|snake_case }}(iter(requests))) {% else %} response = await client.{{ method_name }}(request) {% endif %} @@ -1001,7 +1001,7 @@ def test_{{ method_name }}_raw_page_lro(): {% endmacro %} {% macro rest_required_tests(method, service, numeric_enums=False, full_extended_lro=False) %} -{% with method_name = method.safe_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.safe_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} +{% with method_name = method.client_method_name|snake_case + "_unary" if method.extended_lro and not full_extended_lro else method.client_method_name|snake_case, method_output = method.extended_lro.operation_type if method.extended_lro and not full_extended_lro else method.output %}{% if method.http_options %} {# TODO(kbandes): remove this if condition when lro and client streaming are supported. #} {% if not method.client_streaming %} def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): @@ -1027,7 +1027,7 @@ def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): {% if method.client_streaming %} request = [{}] - client.{{ method.safe_name|snake_case }}(request) + client.{{ method.client_method_name|snake_case }}(request) {% else %} request = {} client.{{ method_name }}(request) @@ -1043,7 +1043,7 @@ def test_{{ method_name }}_rest_use_cached_wrapped_rpc(): {% endif %} {% if method.client_streaming %} - client.{{ method.safe_name|snake_case }}(request) + client.{{ method.client_method_name|snake_case }}(request) {% else %} client.{{ method_name }}(request) {% endif %} @@ -1462,7 +1462,7 @@ def test_{{ method_name }}_rest_no_http_options(): #} {% macro method_call_test_generic(test_name, method, service, api, transport, request_dict, is_async=False, routing_param=None) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.safe_name|snake_case %} +{% with method_name = method.name|snake_case + "_unary" if method.operation_service else method.client_method_name|snake_case %} {% set async_method_prefix = "async " if is_async else "" %} {% if is_async %} @pytest.mark.asyncio @@ -1715,7 +1715,7 @@ def test_unsupported_parameter_rest_asyncio(): {% set async_prefix = get_async_prefix(is_async) %} {% set async_decorator = get_async_decorator(is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% set method_name = method.safe_name|snake_case %} +{% set method_name = method.client_method_name|snake_case %} {{async_decorator}} {{async_prefix}}def test_{{ method_name }}_{{transport_name}}_error(): {% if transport_name == 'rest_asyncio' %} @@ -1765,7 +1765,7 @@ def test_initialize_client_w_{{transport_name}}(): {% set async_prefix = get_async_prefix(is_async) %} {% set async_decorator = get_async_decorator(is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% set method_name = method.safe_name|snake_case %} +{% set method_name = method.client_method_name|snake_case %} {% set mocked_session = "AsyncAuthorizedSession" if is_async else "Session" %} {{ async_decorator }} {{ async_prefix }}def test_{{ method_name }}_{{transport_name}}_bad_request(request_type={{ method.input.ident }}): @@ -1866,7 +1866,7 @@ def test_initialize_client_w_{{transport_name}}(): {% set async_prefix = get_async_prefix(is_async) %} {% set async_decorator = get_async_decorator(is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% set method_name = method.safe_name|snake_case %} +{% set method_name = method.client_method_name|snake_case %} {# NOTE: set method_output to method.extended_lro.operation_type for the following method types: # (method.extended_lro and not full_extended_lro) #} @@ -2190,7 +2190,7 @@ def test_initialize_client_w_{{transport_name}}(): {% set async_prefix = get_async_prefix(is_async) %} {% set async_decorator = get_async_decorator(is_async) %} {% set transport_name = get_transport_name(transport, is_async) %} -{% set method_name = method.safe_name|snake_case %} +{% set method_name = method.client_method_name|snake_case %} {% set async_method_prefix = "Async" if is_async else "" %} {{async_decorator}} @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/gapic-generator/test_utils/test_utils.py b/packages/gapic-generator/test_utils/test_utils.py index c9fdc41bed4e..6006e00f176f 100644 --- a/packages/gapic-generator/test_utils/test_utils.py +++ b/packages/gapic-generator/test_utils/test_utils.py @@ -178,6 +178,7 @@ def make_method( signatures: typing.Sequence[str] = (), is_deprecated: bool = False, routing_rule: routing_pb2.RoutingRule = None, + is_internal: bool = False, **kwargs, ) -> wrappers.Method: # Use default input and output messages if they are not provided. @@ -225,6 +226,7 @@ def make_method( parent=(f"{name}Service",), ) ), + is_internal=is_internal, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_metadata.json b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_metadata.json index 812fb89795a2..874731025667 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_metadata.json +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/gapic_metadata.json @@ -10,6 +10,11 @@ "grpc": { "libraryClient": "BaseConfigServiceV2Client", "rpcs": { + "CopyLogEntries": { + "methods": [ + "_copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" @@ -20,152 +25,147 @@ "create_bucket_async" ] }, - "DeleteBucket": { - "methods": [ - "delete_bucket" - ] - }, - "GetBucket": { - "methods": [ - "get_bucket" - ] - }, - "ListBuckets": { - "methods": [ - "list_buckets" - ] - }, - "UndeleteBucket": { - "methods": [ - "undelete_bucket" - ] - }, - "UpdateBucket": { - "methods": [ - "update_bucket" - ] - }, - "UpdateBucketAsync": { - "methods": [ - "update_bucket_async" - ] - }, - "_CopyLogEntries": { - "methods": [ - "_copy_log_entries" - ] - }, - "_CreateExclusion": { + "CreateExclusion": { "methods": [ "_create_exclusion" ] }, - "_CreateLink": { + "CreateLink": { "methods": [ "_create_link" ] }, - "_CreateSink": { + "CreateSink": { "methods": [ "_create_sink" ] }, - "_CreateView": { + "CreateView": { "methods": [ "_create_view" ] }, - "_DeleteExclusion": { + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { "methods": [ "_delete_exclusion" ] }, - "_DeleteLink": { + "DeleteLink": { "methods": [ "_delete_link" ] }, - "_DeleteSink": { + "DeleteSink": { "methods": [ "_delete_sink" ] }, - "_DeleteView": { + "DeleteView": { "methods": [ "_delete_view" ] }, - "_GetCmekSettings": { + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { "methods": [ "_get_cmek_settings" ] }, - "_GetExclusion": { + "GetExclusion": { "methods": [ "_get_exclusion" ] }, - "_GetLink": { + "GetLink": { "methods": [ "_get_link" ] }, - "_GetSettings": { + "GetSettings": { "methods": [ "_get_settings" ] }, - "_GetSink": { + "GetSink": { "methods": [ "_get_sink" ] }, - "_GetView": { + "GetView": { "methods": [ "_get_view" ] }, - "_ListExclusions": { + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { "methods": [ "_list_exclusions" ] }, - "_ListLinks": { + "ListLinks": { "methods": [ "_list_links" ] }, - "_ListSinks": { + "ListSinks": { "methods": [ "_list_sinks" ] }, - "_ListViews": { + "ListViews": { "methods": [ "_list_views" ] }, - "_UpdateCmekSettings": { + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, + "UpdateCmekSettings": { "methods": [ "_update_cmek_settings" ] }, - "_UpdateExclusion": { + "UpdateExclusion": { "methods": [ "_update_exclusion" ] }, - "_UpdateSettings": { + "UpdateSettings": { "methods": [ "_update_settings" ] }, - "_UpdateSink": { + "UpdateSink": { "methods": [ "_update_sink" ] }, - "_UpdateView": { + "UpdateView": { "methods": [ "_update_view" ] @@ -175,6 +175,11 @@ "grpc-async": { "libraryClient": "BaseConfigServiceV2AsyncClient", "rpcs": { + "CopyLogEntries": { + "methods": [ + "_copy_log_entries" + ] + }, "CreateBucket": { "methods": [ "create_bucket" @@ -185,152 +190,147 @@ "create_bucket_async" ] }, - "DeleteBucket": { - "methods": [ - "delete_bucket" - ] - }, - "GetBucket": { - "methods": [ - "get_bucket" - ] - }, - "ListBuckets": { - "methods": [ - "list_buckets" - ] - }, - "UndeleteBucket": { - "methods": [ - "undelete_bucket" - ] - }, - "UpdateBucket": { - "methods": [ - "update_bucket" - ] - }, - "UpdateBucketAsync": { - "methods": [ - "update_bucket_async" - ] - }, - "_CopyLogEntries": { - "methods": [ - "_copy_log_entries" - ] - }, - "_CreateExclusion": { + "CreateExclusion": { "methods": [ "_create_exclusion" ] }, - "_CreateLink": { + "CreateLink": { "methods": [ "_create_link" ] }, - "_CreateSink": { + "CreateSink": { "methods": [ "_create_sink" ] }, - "_CreateView": { + "CreateView": { "methods": [ "_create_view" ] }, - "_DeleteExclusion": { + "DeleteBucket": { + "methods": [ + "delete_bucket" + ] + }, + "DeleteExclusion": { "methods": [ "_delete_exclusion" ] }, - "_DeleteLink": { + "DeleteLink": { "methods": [ "_delete_link" ] }, - "_DeleteSink": { + "DeleteSink": { "methods": [ "_delete_sink" ] }, - "_DeleteView": { + "DeleteView": { "methods": [ "_delete_view" ] }, - "_GetCmekSettings": { + "GetBucket": { + "methods": [ + "get_bucket" + ] + }, + "GetCmekSettings": { "methods": [ "_get_cmek_settings" ] }, - "_GetExclusion": { + "GetExclusion": { "methods": [ "_get_exclusion" ] }, - "_GetLink": { + "GetLink": { "methods": [ "_get_link" ] }, - "_GetSettings": { + "GetSettings": { "methods": [ "_get_settings" ] }, - "_GetSink": { + "GetSink": { "methods": [ "_get_sink" ] }, - "_GetView": { + "GetView": { "methods": [ "_get_view" ] }, - "_ListExclusions": { + "ListBuckets": { + "methods": [ + "list_buckets" + ] + }, + "ListExclusions": { "methods": [ "_list_exclusions" ] }, - "_ListLinks": { + "ListLinks": { "methods": [ "_list_links" ] }, - "_ListSinks": { + "ListSinks": { "methods": [ "_list_sinks" ] }, - "_ListViews": { + "ListViews": { "methods": [ "_list_views" ] }, - "_UpdateCmekSettings": { + "UndeleteBucket": { + "methods": [ + "undelete_bucket" + ] + }, + "UpdateBucket": { + "methods": [ + "update_bucket" + ] + }, + "UpdateBucketAsync": { + "methods": [ + "update_bucket_async" + ] + }, + "UpdateCmekSettings": { "methods": [ "_update_cmek_settings" ] }, - "_UpdateExclusion": { + "UpdateExclusion": { "methods": [ "_update_exclusion" ] }, - "_UpdateSettings": { + "UpdateSettings": { "methods": [ "_update_settings" ] }, - "_UpdateSink": { + "UpdateSink": { "methods": [ "_update_sink" ] }, - "_UpdateView": { + "UpdateView": { "methods": [ "_update_view" ] @@ -418,27 +418,27 @@ "grpc": { "libraryClient": "BaseMetricsServiceV2Client", "rpcs": { - "_CreateLogMetric": { + "CreateLogMetric": { "methods": [ "_create_log_metric" ] }, - "_DeleteLogMetric": { + "DeleteLogMetric": { "methods": [ "_delete_log_metric" ] }, - "_GetLogMetric": { + "GetLogMetric": { "methods": [ "_get_log_metric" ] }, - "_ListLogMetrics": { + "ListLogMetrics": { "methods": [ "_list_log_metrics" ] }, - "_UpdateLogMetric": { + "UpdateLogMetric": { "methods": [ "_update_log_metric" ] @@ -448,27 +448,27 @@ "grpc-async": { "libraryClient": "BaseMetricsServiceV2AsyncClient", "rpcs": { - "_CreateLogMetric": { + "CreateLogMetric": { "methods": [ "_create_log_metric" ] }, - "_DeleteLogMetric": { + "DeleteLogMetric": { "methods": [ "_delete_log_metric" ] }, - "_GetLogMetric": { + "GetLogMetric": { "methods": [ "_get_log_metric" ] }, - "_ListLogMetrics": { + "ListLogMetrics": { "methods": [ "_list_log_metrics" ] }, - "_UpdateLogMetric": { + "UpdateLogMetric": { "methods": [ "_update_log_metric" ] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py index 4dec03baf71d..e53cf09afde6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -1022,7 +1022,7 @@ async def _list_views(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListViewsAsyncPager: + ) -> pagers.ListViewsAsyncPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1036,7 +1036,7 @@ async def _list_views(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__list_views(): + async def sample_list_views(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -1074,7 +1074,7 @@ async def sample__list_views(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers._ListViewsAsyncPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager: The response from ListViews. Iterating over this object will yield @@ -1103,7 +1103,7 @@ async def sample__list_views(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._list_views] + rpc = self._client._transport._wrapped_methods[self._client._transport.list_views] # Certain fields should be provided within the metadata header; # add these here. @@ -1126,7 +1126,7 @@ async def sample__list_views(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers._ListViewsAsyncPager( + response = pagers.ListViewsAsyncPager( method=rpc, request=request, response=response, @@ -1158,7 +1158,7 @@ async def _get_view(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__get_view(): + async def sample_get_view(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -1198,7 +1198,7 @@ async def sample__get_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._get_view] + rpc = self._client._transport._wrapped_methods[self._client._transport.get_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1243,7 +1243,7 @@ async def _create_view(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__create_view(): + async def sample_create_view(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -1284,7 +1284,7 @@ async def sample__create_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._create_view] + rpc = self._client._transport._wrapped_methods[self._client._transport.create_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1332,7 +1332,7 @@ async def _update_view(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__update_view(): + async def sample_update_view(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -1372,7 +1372,7 @@ async def sample__update_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._update_view] + rpc = self._client._transport._wrapped_methods[self._client._transport.update_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1419,7 +1419,7 @@ async def _delete_view(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__delete_view(): + async def sample_delete_view(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -1450,7 +1450,7 @@ async def sample__delete_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._delete_view] + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1478,7 +1478,7 @@ async def _list_sinks(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListSinksAsyncPager: + ) -> pagers.ListSinksAsyncPager: r"""Lists sinks. .. code-block:: python @@ -1492,7 +1492,7 @@ async def _list_sinks(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__list_sinks(): + async def sample_list_sinks(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -1534,7 +1534,7 @@ async def sample__list_sinks(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers._ListSinksAsyncPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager: Result returned from ListSinks. Iterating over this object will yield results and @@ -1562,7 +1562,7 @@ async def sample__list_sinks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._list_sinks] + rpc = self._client._transport._wrapped_methods[self._client._transport.list_sinks] # Certain fields should be provided within the metadata header; # add these here. @@ -1585,7 +1585,7 @@ async def sample__list_sinks(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers._ListSinksAsyncPager( + response = pagers.ListSinksAsyncPager( method=rpc, request=request, response=response, @@ -1618,7 +1618,7 @@ async def _get_sink(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__get_sink(): + async def sample_get_sink(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -1695,7 +1695,7 @@ async def sample__get_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._get_sink] + rpc = self._client._transport._wrapped_methods[self._client._transport.get_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -1745,7 +1745,7 @@ async def _create_sink(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__create_sink(): + async def sample_create_sink(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -1836,7 +1836,7 @@ async def sample__create_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._create_sink] + rpc = self._client._transport._wrapped_methods[self._client._transport.create_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -1888,7 +1888,7 @@ async def _update_sink(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__update_sink(): + async def sample_update_sink(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -2004,7 +2004,7 @@ async def sample__update_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._update_sink] + rpc = self._client._transport._wrapped_methods[self._client._transport.update_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -2050,7 +2050,7 @@ async def _delete_sink(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__delete_sink(): + async def sample_delete_sink(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -2112,7 +2112,7 @@ async def sample__delete_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._delete_sink] + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -2159,7 +2159,7 @@ async def _create_link(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__create_link(): + async def sample_create_link(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -2251,7 +2251,7 @@ async def sample__create_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._create_link] + rpc = self._client._transport._wrapped_methods[self._client._transport.create_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2305,7 +2305,7 @@ async def _delete_link(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__delete_link(): + async def sample_delete_link(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -2383,7 +2383,7 @@ async def sample__delete_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._delete_link] + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2422,7 +2422,7 @@ async def _list_links(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListLinksAsyncPager: + ) -> pagers.ListLinksAsyncPager: r"""Lists links. .. code-block:: python @@ -2436,7 +2436,7 @@ async def _list_links(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__list_links(): + async def sample_list_links(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -2476,7 +2476,7 @@ async def sample__list_links(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers._ListLinksAsyncPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager: The response from ListLinks. Iterating over this object will yield @@ -2505,7 +2505,7 @@ async def sample__list_links(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._list_links] + rpc = self._client._transport._wrapped_methods[self._client._transport.list_links] # Certain fields should be provided within the metadata header; # add these here. @@ -2528,7 +2528,7 @@ async def sample__list_links(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers._ListLinksAsyncPager( + response = pagers.ListLinksAsyncPager( method=rpc, request=request, response=response, @@ -2561,7 +2561,7 @@ async def _get_link(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__get_link(): + async def sample_get_link(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -2625,7 +2625,7 @@ async def sample__get_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._get_link] + rpc = self._client._transport._wrapped_methods[self._client._transport.get_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2656,7 +2656,7 @@ async def _list_exclusions(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListExclusionsAsyncPager: + ) -> pagers.ListExclusionsAsyncPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -2671,7 +2671,7 @@ async def _list_exclusions(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__list_exclusions(): + async def sample_list_exclusions(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -2713,7 +2713,7 @@ async def sample__list_exclusions(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers._ListExclusionsAsyncPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager: Result returned from ListExclusions. Iterating over this object will yield results and @@ -2741,7 +2741,7 @@ async def sample__list_exclusions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._list_exclusions] + rpc = self._client._transport._wrapped_methods[self._client._transport.list_exclusions] # Certain fields should be provided within the metadata header; # add these here. @@ -2764,7 +2764,7 @@ async def sample__list_exclusions(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers._ListExclusionsAsyncPager( + response = pagers.ListExclusionsAsyncPager( method=rpc, request=request, response=response, @@ -2797,7 +2797,7 @@ async def _get_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__get_exclusion(): + async def sample_get_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -2872,7 +2872,7 @@ async def sample__get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._get_exclusion] + rpc = self._client._transport._wrapped_methods[self._client._transport.get_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -2920,7 +2920,7 @@ async def _create_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__create_exclusion(): + async def sample_create_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -3012,7 +3012,7 @@ async def sample__create_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._create_exclusion] + rpc = self._client._transport._wrapped_methods[self._client._transport.create_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3060,7 +3060,7 @@ async def _update_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__update_exclusion(): + async def sample_update_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -3166,7 +3166,7 @@ async def sample__update_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._update_exclusion] + rpc = self._client._transport._wrapped_methods[self._client._transport.update_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3211,7 +3211,7 @@ async def _delete_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__delete_exclusion(): + async def sample_delete_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -3273,7 +3273,7 @@ async def sample__delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._delete_exclusion] + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3323,7 +3323,7 @@ async def _get_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__get_cmek_settings(): + async def sample_get_cmek_settings(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -3378,7 +3378,7 @@ async def sample__get_cmek_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._get_cmek_settings] + rpc = self._client._transport._wrapped_methods[self._client._transport.get_cmek_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3436,7 +3436,7 @@ async def _update_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__update_cmek_settings(): + async def sample_update_cmek_settings(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -3491,7 +3491,7 @@ async def sample__update_cmek_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._update_cmek_settings] + rpc = self._client._transport._wrapped_methods[self._client._transport.update_cmek_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3546,7 +3546,7 @@ async def _get_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__get_settings(): + async def sample_get_settings(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -3629,7 +3629,7 @@ async def sample__get_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._get_settings] + rpc = self._client._transport._wrapped_methods[self._client._transport.get_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3692,7 +3692,7 @@ async def _update_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__update_settings(): + async def sample_update_settings(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -3777,7 +3777,7 @@ async def sample__update_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._update_settings] + rpc = self._client._transport._wrapped_methods[self._client._transport.update_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3822,7 +3822,7 @@ async def _copy_log_entries(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__copy_log_entries(): + async def sample_copy_log_entries(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -3871,7 +3871,7 @@ async def sample__copy_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._copy_log_entries] + rpc = self._client._transport._wrapped_methods[self._client._transport.copy_log_entries] # Validate the universe domain. self._client._validate_universe_domain() diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index abffbaec718d..a9adf37b3efa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -1422,7 +1422,7 @@ def _list_views(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListViewsPager: + ) -> pagers.ListViewsPager: r"""Lists views on a log bucket. .. code-block:: python @@ -1436,7 +1436,7 @@ def _list_views(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__list_views(): + def sample_list_views(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -1474,7 +1474,7 @@ def sample__list_views(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers._ListViewsPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager: The response from ListViews. Iterating over this object will yield @@ -1502,7 +1502,7 @@ def sample__list_views(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._list_views] + rpc = self._transport._wrapped_methods[self._transport.list_views] # Certain fields should be provided within the metadata header; # add these here. @@ -1525,7 +1525,7 @@ def sample__list_views(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers._ListViewsPager( + response = pagers.ListViewsPager( method=rpc, request=request, response=response, @@ -1557,7 +1557,7 @@ def _get_view(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__get_view(): + def sample_get_view(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -1597,7 +1597,7 @@ def sample__get_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._get_view] + rpc = self._transport._wrapped_methods[self._transport.get_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1642,7 +1642,7 @@ def _create_view(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__create_view(): + def sample_create_view(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -1683,7 +1683,7 @@ def sample__create_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._create_view] + rpc = self._transport._wrapped_methods[self._transport.create_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1731,7 +1731,7 @@ def _update_view(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__update_view(): + def sample_update_view(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -1771,7 +1771,7 @@ def sample__update_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._update_view] + rpc = self._transport._wrapped_methods[self._transport.update_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1818,7 +1818,7 @@ def _delete_view(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__delete_view(): + def sample_delete_view(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -1849,7 +1849,7 @@ def sample__delete_view(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._delete_view] + rpc = self._transport._wrapped_methods[self._transport.delete_view] # Certain fields should be provided within the metadata header; # add these here. @@ -1877,7 +1877,7 @@ def _list_sinks(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListSinksPager: + ) -> pagers.ListSinksPager: r"""Lists sinks. .. code-block:: python @@ -1891,7 +1891,7 @@ def _list_sinks(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__list_sinks(): + def sample_list_sinks(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -1933,7 +1933,7 @@ def sample__list_sinks(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers._ListSinksPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager: Result returned from ListSinks. Iterating over this object will yield results and @@ -1960,7 +1960,7 @@ def sample__list_sinks(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._list_sinks] + rpc = self._transport._wrapped_methods[self._transport.list_sinks] # Certain fields should be provided within the metadata header; # add these here. @@ -1983,7 +1983,7 @@ def sample__list_sinks(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers._ListSinksPager( + response = pagers.ListSinksPager( method=rpc, request=request, response=response, @@ -2016,7 +2016,7 @@ def _get_sink(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__get_sink(): + def sample_get_sink(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -2092,7 +2092,7 @@ def sample__get_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._get_sink] + rpc = self._transport._wrapped_methods[self._transport.get_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -2142,7 +2142,7 @@ def _create_sink(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__create_sink(): + def sample_create_sink(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -2232,7 +2232,7 @@ def sample__create_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._create_sink] + rpc = self._transport._wrapped_methods[self._transport.create_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -2284,7 +2284,7 @@ def _update_sink(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__update_sink(): + def sample_update_sink(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -2399,7 +2399,7 @@ def sample__update_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._update_sink] + rpc = self._transport._wrapped_methods[self._transport.update_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -2445,7 +2445,7 @@ def _delete_sink(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__delete_sink(): + def sample_delete_sink(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -2506,7 +2506,7 @@ def sample__delete_sink(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._delete_sink] + rpc = self._transport._wrapped_methods[self._transport.delete_sink] # Certain fields should be provided within the metadata header; # add these here. @@ -2553,7 +2553,7 @@ def _create_link(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__create_link(): + def sample_create_link(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -2644,7 +2644,7 @@ def sample__create_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._create_link] + rpc = self._transport._wrapped_methods[self._transport.create_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2698,7 +2698,7 @@ def _delete_link(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__delete_link(): + def sample_delete_link(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -2775,7 +2775,7 @@ def sample__delete_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._delete_link] + rpc = self._transport._wrapped_methods[self._transport.delete_link] # Certain fields should be provided within the metadata header; # add these here. @@ -2814,7 +2814,7 @@ def _list_links(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListLinksPager: + ) -> pagers.ListLinksPager: r"""Lists links. .. code-block:: python @@ -2828,7 +2828,7 @@ def _list_links(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__list_links(): + def sample_list_links(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -2868,7 +2868,7 @@ def sample__list_links(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers._ListLinksPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager: The response from ListLinks. Iterating over this object will yield @@ -2896,7 +2896,7 @@ def sample__list_links(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._list_links] + rpc = self._transport._wrapped_methods[self._transport.list_links] # Certain fields should be provided within the metadata header; # add these here. @@ -2919,7 +2919,7 @@ def sample__list_links(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers._ListLinksPager( + response = pagers.ListLinksPager( method=rpc, request=request, response=response, @@ -2952,7 +2952,7 @@ def _get_link(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__get_link(): + def sample_get_link(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -3015,7 +3015,7 @@ def sample__get_link(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._get_link] + rpc = self._transport._wrapped_methods[self._transport.get_link] # Certain fields should be provided within the metadata header; # add these here. @@ -3046,7 +3046,7 @@ def _list_exclusions(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListExclusionsPager: + ) -> pagers.ListExclusionsPager: r"""Lists all the exclusions on the \_Default sink in a parent resource. @@ -3061,7 +3061,7 @@ def _list_exclusions(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__list_exclusions(): + def sample_list_exclusions(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -3103,7 +3103,7 @@ def sample__list_exclusions(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.config_service_v2.pagers._ListExclusionsPager: + google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager: Result returned from ListExclusions. Iterating over this object will yield results and @@ -3130,7 +3130,7 @@ def sample__list_exclusions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._list_exclusions] + rpc = self._transport._wrapped_methods[self._transport.list_exclusions] # Certain fields should be provided within the metadata header; # add these here. @@ -3153,7 +3153,7 @@ def sample__list_exclusions(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers._ListExclusionsPager( + response = pagers.ListExclusionsPager( method=rpc, request=request, response=response, @@ -3186,7 +3186,7 @@ def _get_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__get_exclusion(): + def sample_get_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -3260,7 +3260,7 @@ def sample__get_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._get_exclusion] + rpc = self._transport._wrapped_methods[self._transport.get_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3308,7 +3308,7 @@ def _create_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__create_exclusion(): + def sample_create_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -3399,7 +3399,7 @@ def sample__create_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._create_exclusion] + rpc = self._transport._wrapped_methods[self._transport.create_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3447,7 +3447,7 @@ def _update_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__update_exclusion(): + def sample_update_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -3552,7 +3552,7 @@ def sample__update_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._update_exclusion] + rpc = self._transport._wrapped_methods[self._transport.update_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3597,7 +3597,7 @@ def _delete_exclusion(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__delete_exclusion(): + def sample_delete_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -3658,7 +3658,7 @@ def sample__delete_exclusion(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._delete_exclusion] + rpc = self._transport._wrapped_methods[self._transport.delete_exclusion] # Certain fields should be provided within the metadata header; # add these here. @@ -3708,7 +3708,7 @@ def _get_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__get_cmek_settings(): + def sample_get_cmek_settings(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -3763,7 +3763,7 @@ def sample__get_cmek_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._get_cmek_settings] + rpc = self._transport._wrapped_methods[self._transport.get_cmek_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3821,7 +3821,7 @@ def _update_cmek_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__update_cmek_settings(): + def sample_update_cmek_settings(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -3876,7 +3876,7 @@ def sample__update_cmek_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._update_cmek_settings] + rpc = self._transport._wrapped_methods[self._transport.update_cmek_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -3931,7 +3931,7 @@ def _get_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__get_settings(): + def sample_get_settings(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -4013,7 +4013,7 @@ def sample__get_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._get_settings] + rpc = self._transport._wrapped_methods[self._transport.get_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -4076,7 +4076,7 @@ def _update_settings(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__update_settings(): + def sample_update_settings(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -4160,7 +4160,7 @@ def sample__update_settings(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._update_settings] + rpc = self._transport._wrapped_methods[self._transport.update_settings] # Certain fields should be provided within the metadata header; # add these here. @@ -4205,7 +4205,7 @@ def _copy_log_entries(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__copy_log_entries(): + def sample_copy_log_entries(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -4254,7 +4254,7 @@ def sample__copy_log_entries(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._copy_log_entries] + rpc = self._transport._wrapped_methods[self._transport.copy_log_entries] # Validate the universe domain. self._validate_universe_domain() diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py index 6f4fce6dc3e8..1af6b54c9924 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/pagers.py @@ -166,8 +166,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListViewsPager: - """A pager for iterating through ``_list_views`` requests. +class ListViewsPager: + """A pager for iterating through ``list_views`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and @@ -175,7 +175,7 @@ class _ListViewsPager: ``views`` field. If there are more pages, the ``__iter__`` method will make additional - ``_ListViews`` requests and continue to iterate + ``ListViews`` requests and continue to iterate through the ``views`` field on the corresponding responses. @@ -234,8 +234,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListViewsAsyncPager: - """A pager for iterating through ``_list_views`` requests. +class ListViewsAsyncPager: + """A pager for iterating through ``list_views`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListViewsResponse` object, and @@ -243,7 +243,7 @@ class _ListViewsAsyncPager: ``views`` field. If there are more pages, the ``__aiter__`` method will make additional - ``_ListViews`` requests and continue to iterate + ``ListViews`` requests and continue to iterate through the ``views`` field on the corresponding responses. @@ -305,8 +305,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListSinksPager: - """A pager for iterating through ``_list_sinks`` requests. +class ListSinksPager: + """A pager for iterating through ``list_sinks`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and @@ -314,7 +314,7 @@ class _ListSinksPager: ``sinks`` field. If there are more pages, the ``__iter__`` method will make additional - ``_ListSinks`` requests and continue to iterate + ``ListSinks`` requests and continue to iterate through the ``sinks`` field on the corresponding responses. @@ -373,8 +373,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListSinksAsyncPager: - """A pager for iterating through ``_list_sinks`` requests. +class ListSinksAsyncPager: + """A pager for iterating through ``list_sinks`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListSinksResponse` object, and @@ -382,7 +382,7 @@ class _ListSinksAsyncPager: ``sinks`` field. If there are more pages, the ``__aiter__`` method will make additional - ``_ListSinks`` requests and continue to iterate + ``ListSinks`` requests and continue to iterate through the ``sinks`` field on the corresponding responses. @@ -444,8 +444,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListLinksPager: - """A pager for iterating through ``_list_links`` requests. +class ListLinksPager: + """A pager for iterating through ``list_links`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and @@ -453,7 +453,7 @@ class _ListLinksPager: ``links`` field. If there are more pages, the ``__iter__`` method will make additional - ``_ListLinks`` requests and continue to iterate + ``ListLinks`` requests and continue to iterate through the ``links`` field on the corresponding responses. @@ -512,8 +512,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListLinksAsyncPager: - """A pager for iterating through ``_list_links`` requests. +class ListLinksAsyncPager: + """A pager for iterating through ``list_links`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListLinksResponse` object, and @@ -521,7 +521,7 @@ class _ListLinksAsyncPager: ``links`` field. If there are more pages, the ``__aiter__`` method will make additional - ``_ListLinks`` requests and continue to iterate + ``ListLinks`` requests and continue to iterate through the ``links`` field on the corresponding responses. @@ -583,8 +583,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListExclusionsPager: - """A pager for iterating through ``_list_exclusions`` requests. +class ListExclusionsPager: + """A pager for iterating through ``list_exclusions`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and @@ -592,7 +592,7 @@ class _ListExclusionsPager: ``exclusions`` field. If there are more pages, the ``__iter__`` method will make additional - ``_ListExclusions`` requests and continue to iterate + ``ListExclusions`` requests and continue to iterate through the ``exclusions`` field on the corresponding responses. @@ -651,8 +651,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListExclusionsAsyncPager: - """A pager for iterating through ``_list_exclusions`` requests. +class ListExclusionsAsyncPager: + """A pager for iterating through ``list_exclusions`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListExclusionsResponse` object, and @@ -660,7 +660,7 @@ class _ListExclusionsAsyncPager: ``exclusions`` field. If there are more pages, the ``__aiter__`` method will make additional - ``_ListExclusions`` requests and continue to iterate + ``ListExclusions`` requests and continue to iterate through the ``exclusions`` field on the corresponding responses. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 82f9f7d683e6..479f4fdb171c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -165,33 +165,33 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self._list_views: gapic_v1.method.wrap_method( - self._list_views, + self.list_views: gapic_v1.method.wrap_method( + self.list_views, default_timeout=None, client_info=client_info, ), - self._get_view: gapic_v1.method.wrap_method( - self._get_view, + self.get_view: gapic_v1.method.wrap_method( + self.get_view, default_timeout=None, client_info=client_info, ), - self._create_view: gapic_v1.method.wrap_method( - self._create_view, + self.create_view: gapic_v1.method.wrap_method( + self.create_view, default_timeout=None, client_info=client_info, ), - self._update_view: gapic_v1.method.wrap_method( - self._update_view, + self.update_view: gapic_v1.method.wrap_method( + self.update_view, default_timeout=None, client_info=client_info, ), - self._delete_view: gapic_v1.method.wrap_method( - self._delete_view, + self.delete_view: gapic_v1.method.wrap_method( + self.delete_view, default_timeout=None, client_info=client_info, ), - self._list_sinks: gapic_v1.method.wrap_method( - self._list_sinks, + self.list_sinks: gapic_v1.method.wrap_method( + self.list_sinks, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -206,8 +206,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._get_sink: gapic_v1.method.wrap_method( - self._get_sink, + self.get_sink: gapic_v1.method.wrap_method( + self.get_sink, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -222,13 +222,13 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._create_sink: gapic_v1.method.wrap_method( - self._create_sink, + self.create_sink: gapic_v1.method.wrap_method( + self.create_sink, default_timeout=120.0, client_info=client_info, ), - self._update_sink: gapic_v1.method.wrap_method( - self._update_sink, + self.update_sink: gapic_v1.method.wrap_method( + self.update_sink, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -243,8 +243,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._delete_sink: gapic_v1.method.wrap_method( - self._delete_sink, + self.delete_sink: gapic_v1.method.wrap_method( + self.delete_sink, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -259,28 +259,28 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._create_link: gapic_v1.method.wrap_method( - self._create_link, + self.create_link: gapic_v1.method.wrap_method( + self.create_link, default_timeout=None, client_info=client_info, ), - self._delete_link: gapic_v1.method.wrap_method( - self._delete_link, + self.delete_link: gapic_v1.method.wrap_method( + self.delete_link, default_timeout=None, client_info=client_info, ), - self._list_links: gapic_v1.method.wrap_method( - self._list_links, + self.list_links: gapic_v1.method.wrap_method( + self.list_links, default_timeout=None, client_info=client_info, ), - self._get_link: gapic_v1.method.wrap_method( - self._get_link, + self.get_link: gapic_v1.method.wrap_method( + self.get_link, default_timeout=None, client_info=client_info, ), - self._list_exclusions: gapic_v1.method.wrap_method( - self._list_exclusions, + self.list_exclusions: gapic_v1.method.wrap_method( + self.list_exclusions, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -295,8 +295,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._get_exclusion: gapic_v1.method.wrap_method( - self._get_exclusion, + self.get_exclusion: gapic_v1.method.wrap_method( + self.get_exclusion, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -311,18 +311,18 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._create_exclusion: gapic_v1.method.wrap_method( - self._create_exclusion, + self.create_exclusion: gapic_v1.method.wrap_method( + self.create_exclusion, default_timeout=120.0, client_info=client_info, ), - self._update_exclusion: gapic_v1.method.wrap_method( - self._update_exclusion, + self.update_exclusion: gapic_v1.method.wrap_method( + self.update_exclusion, default_timeout=120.0, client_info=client_info, ), - self._delete_exclusion: gapic_v1.method.wrap_method( - self._delete_exclusion, + self.delete_exclusion: gapic_v1.method.wrap_method( + self.delete_exclusion, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -337,28 +337,28 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._get_cmek_settings: gapic_v1.method.wrap_method( - self._get_cmek_settings, + self.get_cmek_settings: gapic_v1.method.wrap_method( + self.get_cmek_settings, default_timeout=None, client_info=client_info, ), - self._update_cmek_settings: gapic_v1.method.wrap_method( - self._update_cmek_settings, + self.update_cmek_settings: gapic_v1.method.wrap_method( + self.update_cmek_settings, default_timeout=None, client_info=client_info, ), - self._get_settings: gapic_v1.method.wrap_method( - self._get_settings, + self.get_settings: gapic_v1.method.wrap_method( + self.get_settings, default_timeout=None, client_info=client_info, ), - self._update_settings: gapic_v1.method.wrap_method( - self._update_settings, + self.update_settings: gapic_v1.method.wrap_method( + self.update_settings, default_timeout=None, client_info=client_info, ), - self._copy_log_entries: gapic_v1.method.wrap_method( - self._copy_log_entries, + self.copy_log_entries: gapic_v1.method.wrap_method( + self.copy_log_entries, default_timeout=None, client_info=client_info, ), @@ -466,7 +466,7 @@ def undelete_bucket(self) -> Callable[ raise NotImplementedError() @property - def _list_views(self) -> Callable[ + def list_views(self) -> Callable[ [logging_config.ListViewsRequest], Union[ logging_config.ListViewsResponse, @@ -475,7 +475,7 @@ def _list_views(self) -> Callable[ raise NotImplementedError() @property - def _get_view(self) -> Callable[ + def get_view(self) -> Callable[ [logging_config.GetViewRequest], Union[ logging_config.LogView, @@ -484,7 +484,7 @@ def _get_view(self) -> Callable[ raise NotImplementedError() @property - def _create_view(self) -> Callable[ + def create_view(self) -> Callable[ [logging_config.CreateViewRequest], Union[ logging_config.LogView, @@ -493,7 +493,7 @@ def _create_view(self) -> Callable[ raise NotImplementedError() @property - def _update_view(self) -> Callable[ + def update_view(self) -> Callable[ [logging_config.UpdateViewRequest], Union[ logging_config.LogView, @@ -502,7 +502,7 @@ def _update_view(self) -> Callable[ raise NotImplementedError() @property - def _delete_view(self) -> Callable[ + def delete_view(self) -> Callable[ [logging_config.DeleteViewRequest], Union[ empty_pb2.Empty, @@ -511,7 +511,7 @@ def _delete_view(self) -> Callable[ raise NotImplementedError() @property - def _list_sinks(self) -> Callable[ + def list_sinks(self) -> Callable[ [logging_config.ListSinksRequest], Union[ logging_config.ListSinksResponse, @@ -520,7 +520,7 @@ def _list_sinks(self) -> Callable[ raise NotImplementedError() @property - def _get_sink(self) -> Callable[ + def get_sink(self) -> Callable[ [logging_config.GetSinkRequest], Union[ logging_config.LogSink, @@ -529,7 +529,7 @@ def _get_sink(self) -> Callable[ raise NotImplementedError() @property - def _create_sink(self) -> Callable[ + def create_sink(self) -> Callable[ [logging_config.CreateSinkRequest], Union[ logging_config.LogSink, @@ -538,7 +538,7 @@ def _create_sink(self) -> Callable[ raise NotImplementedError() @property - def _update_sink(self) -> Callable[ + def update_sink(self) -> Callable[ [logging_config.UpdateSinkRequest], Union[ logging_config.LogSink, @@ -547,7 +547,7 @@ def _update_sink(self) -> Callable[ raise NotImplementedError() @property - def _delete_sink(self) -> Callable[ + def delete_sink(self) -> Callable[ [logging_config.DeleteSinkRequest], Union[ empty_pb2.Empty, @@ -556,7 +556,7 @@ def _delete_sink(self) -> Callable[ raise NotImplementedError() @property - def _create_link(self) -> Callable[ + def create_link(self) -> Callable[ [logging_config.CreateLinkRequest], Union[ operations_pb2.Operation, @@ -565,7 +565,7 @@ def _create_link(self) -> Callable[ raise NotImplementedError() @property - def _delete_link(self) -> Callable[ + def delete_link(self) -> Callable[ [logging_config.DeleteLinkRequest], Union[ operations_pb2.Operation, @@ -574,7 +574,7 @@ def _delete_link(self) -> Callable[ raise NotImplementedError() @property - def _list_links(self) -> Callable[ + def list_links(self) -> Callable[ [logging_config.ListLinksRequest], Union[ logging_config.ListLinksResponse, @@ -583,7 +583,7 @@ def _list_links(self) -> Callable[ raise NotImplementedError() @property - def _get_link(self) -> Callable[ + def get_link(self) -> Callable[ [logging_config.GetLinkRequest], Union[ logging_config.Link, @@ -592,7 +592,7 @@ def _get_link(self) -> Callable[ raise NotImplementedError() @property - def _list_exclusions(self) -> Callable[ + def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], Union[ logging_config.ListExclusionsResponse, @@ -601,7 +601,7 @@ def _list_exclusions(self) -> Callable[ raise NotImplementedError() @property - def _get_exclusion(self) -> Callable[ + def get_exclusion(self) -> Callable[ [logging_config.GetExclusionRequest], Union[ logging_config.LogExclusion, @@ -610,7 +610,7 @@ def _get_exclusion(self) -> Callable[ raise NotImplementedError() @property - def _create_exclusion(self) -> Callable[ + def create_exclusion(self) -> Callable[ [logging_config.CreateExclusionRequest], Union[ logging_config.LogExclusion, @@ -619,7 +619,7 @@ def _create_exclusion(self) -> Callable[ raise NotImplementedError() @property - def _update_exclusion(self) -> Callable[ + def update_exclusion(self) -> Callable[ [logging_config.UpdateExclusionRequest], Union[ logging_config.LogExclusion, @@ -628,7 +628,7 @@ def _update_exclusion(self) -> Callable[ raise NotImplementedError() @property - def _delete_exclusion(self) -> Callable[ + def delete_exclusion(self) -> Callable[ [logging_config.DeleteExclusionRequest], Union[ empty_pb2.Empty, @@ -637,7 +637,7 @@ def _delete_exclusion(self) -> Callable[ raise NotImplementedError() @property - def _get_cmek_settings(self) -> Callable[ + def get_cmek_settings(self) -> Callable[ [logging_config.GetCmekSettingsRequest], Union[ logging_config.CmekSettings, @@ -646,7 +646,7 @@ def _get_cmek_settings(self) -> Callable[ raise NotImplementedError() @property - def _update_cmek_settings(self) -> Callable[ + def update_cmek_settings(self) -> Callable[ [logging_config.UpdateCmekSettingsRequest], Union[ logging_config.CmekSettings, @@ -655,7 +655,7 @@ def _update_cmek_settings(self) -> Callable[ raise NotImplementedError() @property - def _get_settings(self) -> Callable[ + def get_settings(self) -> Callable[ [logging_config.GetSettingsRequest], Union[ logging_config.Settings, @@ -664,7 +664,7 @@ def _get_settings(self) -> Callable[ raise NotImplementedError() @property - def _update_settings(self) -> Callable[ + def update_settings(self) -> Callable[ [logging_config.UpdateSettingsRequest], Union[ logging_config.Settings, @@ -673,7 +673,7 @@ def _update_settings(self) -> Callable[ raise NotImplementedError() @property - def _copy_log_entries(self) -> Callable[ + def copy_log_entries(self) -> Callable[ [logging_config.CopyLogEntriesRequest], Union[ operations_pb2.Operation, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 8b9ca93a0702..0d7ee84124e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -561,7 +561,7 @@ def undelete_bucket(self) -> Callable[ return self._stubs['undelete_bucket'] @property - def _list_views(self) -> Callable[ + def list_views(self) -> Callable[ [logging_config.ListViewsRequest], logging_config.ListViewsResponse]: r"""Return a callable for the list views method over gRPC. @@ -578,16 +578,16 @@ def _list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_views' not in self._stubs: - self._stubs['_list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_ListViews', + if 'list_views' not in self._stubs: + self._stubs['list_views'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListViews', request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['_list_views'] + return self._stubs['list_views'] @property - def _get_view(self) -> Callable[ + def get_view(self) -> Callable[ [logging_config.GetViewRequest], logging_config.LogView]: r"""Return a callable for the get view method over gRPC. @@ -604,16 +604,16 @@ def _get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_view' not in self._stubs: - self._stubs['_get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetView', + if 'get_view' not in self._stubs: + self._stubs['get_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetView', request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['_get_view'] + return self._stubs['get_view'] @property - def _create_view(self) -> Callable[ + def create_view(self) -> Callable[ [logging_config.CreateViewRequest], logging_config.LogView]: r"""Return a callable for the create view method over gRPC. @@ -631,16 +631,16 @@ def _create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_view' not in self._stubs: - self._stubs['_create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CreateView', + if 'create_view' not in self._stubs: + self._stubs['create_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateView', request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['_create_view'] + return self._stubs['create_view'] @property - def _update_view(self) -> Callable[ + def update_view(self) -> Callable[ [logging_config.UpdateViewRequest], logging_config.LogView]: r"""Return a callable for the update view method over gRPC. @@ -661,16 +661,16 @@ def _update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_view' not in self._stubs: - self._stubs['_update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateView', + if 'update_view' not in self._stubs: + self._stubs['update_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateView', request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['_update_view'] + return self._stubs['update_view'] @property - def _delete_view(self) -> Callable[ + def delete_view(self) -> Callable[ [logging_config.DeleteViewRequest], empty_pb2.Empty]: r"""Return a callable for the delete view method over gRPC. @@ -690,16 +690,16 @@ def _delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_view' not in self._stubs: - self._stubs['_delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_DeleteView', + if 'delete_view' not in self._stubs: + self._stubs['delete_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteView', request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['_delete_view'] + return self._stubs['delete_view'] @property - def _list_sinks(self) -> Callable[ + def list_sinks(self) -> Callable[ [logging_config.ListSinksRequest], logging_config.ListSinksResponse]: r"""Return a callable for the list sinks method over gRPC. @@ -716,16 +716,16 @@ def _list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_sinks' not in self._stubs: - self._stubs['_list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_ListSinks', + if 'list_sinks' not in self._stubs: + self._stubs['list_sinks'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['_list_sinks'] + return self._stubs['list_sinks'] @property - def _get_sink(self) -> Callable[ + def get_sink(self) -> Callable[ [logging_config.GetSinkRequest], logging_config.LogSink]: r"""Return a callable for the get sink method over gRPC. @@ -742,16 +742,16 @@ def _get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_sink' not in self._stubs: - self._stubs['_get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetSink', + if 'get_sink' not in self._stubs: + self._stubs['get_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['_get_sink'] + return self._stubs['get_sink'] @property - def _create_sink(self) -> Callable[ + def create_sink(self) -> Callable[ [logging_config.CreateSinkRequest], logging_config.LogSink]: r"""Return a callable for the create sink method over gRPC. @@ -772,16 +772,16 @@ def _create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_sink' not in self._stubs: - self._stubs['_create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CreateSink', + if 'create_sink' not in self._stubs: + self._stubs['create_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['_create_sink'] + return self._stubs['create_sink'] @property - def _update_sink(self) -> Callable[ + def update_sink(self) -> Callable[ [logging_config.UpdateSinkRequest], logging_config.LogSink]: r"""Return a callable for the update sink method over gRPC. @@ -803,16 +803,16 @@ def _update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_sink' not in self._stubs: - self._stubs['_update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateSink', + if 'update_sink' not in self._stubs: + self._stubs['update_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['_update_sink'] + return self._stubs['update_sink'] @property - def _delete_sink(self) -> Callable[ + def delete_sink(self) -> Callable[ [logging_config.DeleteSinkRequest], empty_pb2.Empty]: r"""Return a callable for the delete sink method over gRPC. @@ -830,16 +830,16 @@ def _delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_sink' not in self._stubs: - self._stubs['_delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_DeleteSink', + if 'delete_sink' not in self._stubs: + self._stubs['delete_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['_delete_sink'] + return self._stubs['delete_sink'] @property - def _create_link(self) -> Callable[ + def create_link(self) -> Callable[ [logging_config.CreateLinkRequest], operations_pb2.Operation]: r"""Return a callable for the create link method over gRPC. @@ -859,16 +859,16 @@ def _create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_link' not in self._stubs: - self._stubs['_create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CreateLink', + if 'create_link' not in self._stubs: + self._stubs['create_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateLink', request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['_create_link'] + return self._stubs['create_link'] @property - def _delete_link(self) -> Callable[ + def delete_link(self) -> Callable[ [logging_config.DeleteLinkRequest], operations_pb2.Operation]: r"""Return a callable for the delete link method over gRPC. @@ -886,16 +886,16 @@ def _delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_link' not in self._stubs: - self._stubs['_delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_DeleteLink', + if 'delete_link' not in self._stubs: + self._stubs['delete_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteLink', request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['_delete_link'] + return self._stubs['delete_link'] @property - def _list_links(self) -> Callable[ + def list_links(self) -> Callable[ [logging_config.ListLinksRequest], logging_config.ListLinksResponse]: r"""Return a callable for the list links method over gRPC. @@ -912,16 +912,16 @@ def _list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_links' not in self._stubs: - self._stubs['_list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_ListLinks', + if 'list_links' not in self._stubs: + self._stubs['list_links'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListLinks', request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['_list_links'] + return self._stubs['list_links'] @property - def _get_link(self) -> Callable[ + def get_link(self) -> Callable[ [logging_config.GetLinkRequest], logging_config.Link]: r"""Return a callable for the get link method over gRPC. @@ -938,16 +938,16 @@ def _get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_link' not in self._stubs: - self._stubs['_get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetLink', + if 'get_link' not in self._stubs: + self._stubs['get_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetLink', request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['_get_link'] + return self._stubs['get_link'] @property - def _list_exclusions(self) -> Callable[ + def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], logging_config.ListExclusionsResponse]: r"""Return a callable for the list exclusions method over gRPC. @@ -965,16 +965,16 @@ def _list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_exclusions' not in self._stubs: - self._stubs['_list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_ListExclusions', + if 'list_exclusions' not in self._stubs: + self._stubs['list_exclusions'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['_list_exclusions'] + return self._stubs['list_exclusions'] @property - def _get_exclusion(self) -> Callable[ + def get_exclusion(self) -> Callable[ [logging_config.GetExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the get exclusion method over gRPC. @@ -991,16 +991,16 @@ def _get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_exclusion' not in self._stubs: - self._stubs['_get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetExclusion', + if 'get_exclusion' not in self._stubs: + self._stubs['get_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['_get_exclusion'] + return self._stubs['get_exclusion'] @property - def _create_exclusion(self) -> Callable[ + def create_exclusion(self) -> Callable[ [logging_config.CreateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the create exclusion method over gRPC. @@ -1019,16 +1019,16 @@ def _create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_exclusion' not in self._stubs: - self._stubs['_create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CreateExclusion', + if 'create_exclusion' not in self._stubs: + self._stubs['create_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['_create_exclusion'] + return self._stubs['create_exclusion'] @property - def _update_exclusion(self) -> Callable[ + def update_exclusion(self) -> Callable[ [logging_config.UpdateExclusionRequest], logging_config.LogExclusion]: r"""Return a callable for the update exclusion method over gRPC. @@ -1046,16 +1046,16 @@ def _update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_exclusion' not in self._stubs: - self._stubs['_update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateExclusion', + if 'update_exclusion' not in self._stubs: + self._stubs['update_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['_update_exclusion'] + return self._stubs['update_exclusion'] @property - def _delete_exclusion(self) -> Callable[ + def delete_exclusion(self) -> Callable[ [logging_config.DeleteExclusionRequest], empty_pb2.Empty]: r"""Return a callable for the delete exclusion method over gRPC. @@ -1072,16 +1072,16 @@ def _delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_exclusion' not in self._stubs: - self._stubs['_delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_DeleteExclusion', + if 'delete_exclusion' not in self._stubs: + self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['_delete_exclusion'] + return self._stubs['delete_exclusion'] @property - def _get_cmek_settings(self) -> Callable[ + def get_cmek_settings(self) -> Callable[ [logging_config.GetCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the get cmek settings method over gRPC. @@ -1107,16 +1107,16 @@ def _get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_cmek_settings' not in self._stubs: - self._stubs['_get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetCmekSettings', + if 'get_cmek_settings' not in self._stubs: + self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetCmekSettings', request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['_get_cmek_settings'] + return self._stubs['get_cmek_settings'] @property - def _update_cmek_settings(self) -> Callable[ + def update_cmek_settings(self) -> Callable[ [logging_config.UpdateCmekSettingsRequest], logging_config.CmekSettings]: r"""Return a callable for the update cmek settings method over gRPC. @@ -1147,16 +1147,16 @@ def _update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_cmek_settings' not in self._stubs: - self._stubs['_update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateCmekSettings', + if 'update_cmek_settings' not in self._stubs: + self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['_update_cmek_settings'] + return self._stubs['update_cmek_settings'] @property - def _get_settings(self) -> Callable[ + def get_settings(self) -> Callable[ [logging_config.GetSettingsRequest], logging_config.Settings]: r"""Return a callable for the get settings method over gRPC. @@ -1183,16 +1183,16 @@ def _get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_settings' not in self._stubs: - self._stubs['_get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetSettings', + if 'get_settings' not in self._stubs: + self._stubs['get_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSettings', request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['_get_settings'] + return self._stubs['get_settings'] @property - def _update_settings(self) -> Callable[ + def update_settings(self) -> Callable[ [logging_config.UpdateSettingsRequest], logging_config.Settings]: r"""Return a callable for the update settings method over gRPC. @@ -1226,16 +1226,16 @@ def _update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_settings' not in self._stubs: - self._stubs['_update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateSettings', + if 'update_settings' not in self._stubs: + self._stubs['update_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSettings', request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['_update_settings'] + return self._stubs['update_settings'] @property - def _copy_log_entries(self) -> Callable[ + def copy_log_entries(self) -> Callable[ [logging_config.CopyLogEntriesRequest], operations_pb2.Operation]: r"""Return a callable for the copy log entries method over gRPC. @@ -1253,13 +1253,13 @@ def _copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_copy_log_entries' not in self._stubs: - self._stubs['_copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CopyLogEntries', + if 'copy_log_entries' not in self._stubs: + self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CopyLogEntries', request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['_copy_log_entries'] + return self._stubs['copy_log_entries'] def close(self): self._logged_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index d89249f948eb..46dd72d79d94 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -566,7 +566,7 @@ def undelete_bucket(self) -> Callable[ return self._stubs['undelete_bucket'] @property - def _list_views(self) -> Callable[ + def list_views(self) -> Callable[ [logging_config.ListViewsRequest], Awaitable[logging_config.ListViewsResponse]]: r"""Return a callable for the list views method over gRPC. @@ -583,16 +583,16 @@ def _list_views(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_views' not in self._stubs: - self._stubs['_list_views'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_ListViews', + if 'list_views' not in self._stubs: + self._stubs['list_views'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListViews', request_serializer=logging_config.ListViewsRequest.serialize, response_deserializer=logging_config.ListViewsResponse.deserialize, ) - return self._stubs['_list_views'] + return self._stubs['list_views'] @property - def _get_view(self) -> Callable[ + def get_view(self) -> Callable[ [logging_config.GetViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the get view method over gRPC. @@ -609,16 +609,16 @@ def _get_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_view' not in self._stubs: - self._stubs['_get_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetView', + if 'get_view' not in self._stubs: + self._stubs['get_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetView', request_serializer=logging_config.GetViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['_get_view'] + return self._stubs['get_view'] @property - def _create_view(self) -> Callable[ + def create_view(self) -> Callable[ [logging_config.CreateViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the create view method over gRPC. @@ -636,16 +636,16 @@ def _create_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_view' not in self._stubs: - self._stubs['_create_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CreateView', + if 'create_view' not in self._stubs: + self._stubs['create_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateView', request_serializer=logging_config.CreateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['_create_view'] + return self._stubs['create_view'] @property - def _update_view(self) -> Callable[ + def update_view(self) -> Callable[ [logging_config.UpdateViewRequest], Awaitable[logging_config.LogView]]: r"""Return a callable for the update view method over gRPC. @@ -666,16 +666,16 @@ def _update_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_view' not in self._stubs: - self._stubs['_update_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateView', + if 'update_view' not in self._stubs: + self._stubs['update_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateView', request_serializer=logging_config.UpdateViewRequest.serialize, response_deserializer=logging_config.LogView.deserialize, ) - return self._stubs['_update_view'] + return self._stubs['update_view'] @property - def _delete_view(self) -> Callable[ + def delete_view(self) -> Callable[ [logging_config.DeleteViewRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete view method over gRPC. @@ -695,16 +695,16 @@ def _delete_view(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_view' not in self._stubs: - self._stubs['_delete_view'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_DeleteView', + if 'delete_view' not in self._stubs: + self._stubs['delete_view'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteView', request_serializer=logging_config.DeleteViewRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['_delete_view'] + return self._stubs['delete_view'] @property - def _list_sinks(self) -> Callable[ + def list_sinks(self) -> Callable[ [logging_config.ListSinksRequest], Awaitable[logging_config.ListSinksResponse]]: r"""Return a callable for the list sinks method over gRPC. @@ -721,16 +721,16 @@ def _list_sinks(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_sinks' not in self._stubs: - self._stubs['_list_sinks'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_ListSinks', + if 'list_sinks' not in self._stubs: + self._stubs['list_sinks'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListSinks', request_serializer=logging_config.ListSinksRequest.serialize, response_deserializer=logging_config.ListSinksResponse.deserialize, ) - return self._stubs['_list_sinks'] + return self._stubs['list_sinks'] @property - def _get_sink(self) -> Callable[ + def get_sink(self) -> Callable[ [logging_config.GetSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the get sink method over gRPC. @@ -747,16 +747,16 @@ def _get_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_sink' not in self._stubs: - self._stubs['_get_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetSink', + if 'get_sink' not in self._stubs: + self._stubs['get_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSink', request_serializer=logging_config.GetSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['_get_sink'] + return self._stubs['get_sink'] @property - def _create_sink(self) -> Callable[ + def create_sink(self) -> Callable[ [logging_config.CreateSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the create sink method over gRPC. @@ -777,16 +777,16 @@ def _create_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_sink' not in self._stubs: - self._stubs['_create_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CreateSink', + if 'create_sink' not in self._stubs: + self._stubs['create_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateSink', request_serializer=logging_config.CreateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['_create_sink'] + return self._stubs['create_sink'] @property - def _update_sink(self) -> Callable[ + def update_sink(self) -> Callable[ [logging_config.UpdateSinkRequest], Awaitable[logging_config.LogSink]]: r"""Return a callable for the update sink method over gRPC. @@ -808,16 +808,16 @@ def _update_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_sink' not in self._stubs: - self._stubs['_update_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateSink', + if 'update_sink' not in self._stubs: + self._stubs['update_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSink', request_serializer=logging_config.UpdateSinkRequest.serialize, response_deserializer=logging_config.LogSink.deserialize, ) - return self._stubs['_update_sink'] + return self._stubs['update_sink'] @property - def _delete_sink(self) -> Callable[ + def delete_sink(self) -> Callable[ [logging_config.DeleteSinkRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete sink method over gRPC. @@ -835,16 +835,16 @@ def _delete_sink(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_sink' not in self._stubs: - self._stubs['_delete_sink'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_DeleteSink', + if 'delete_sink' not in self._stubs: + self._stubs['delete_sink'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteSink', request_serializer=logging_config.DeleteSinkRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['_delete_sink'] + return self._stubs['delete_sink'] @property - def _create_link(self) -> Callable[ + def create_link(self) -> Callable[ [logging_config.CreateLinkRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the create link method over gRPC. @@ -864,16 +864,16 @@ def _create_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_link' not in self._stubs: - self._stubs['_create_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CreateLink', + if 'create_link' not in self._stubs: + self._stubs['create_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateLink', request_serializer=logging_config.CreateLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['_create_link'] + return self._stubs['create_link'] @property - def _delete_link(self) -> Callable[ + def delete_link(self) -> Callable[ [logging_config.DeleteLinkRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the delete link method over gRPC. @@ -891,16 +891,16 @@ def _delete_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_link' not in self._stubs: - self._stubs['_delete_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_DeleteLink', + if 'delete_link' not in self._stubs: + self._stubs['delete_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteLink', request_serializer=logging_config.DeleteLinkRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['_delete_link'] + return self._stubs['delete_link'] @property - def _list_links(self) -> Callable[ + def list_links(self) -> Callable[ [logging_config.ListLinksRequest], Awaitable[logging_config.ListLinksResponse]]: r"""Return a callable for the list links method over gRPC. @@ -917,16 +917,16 @@ def _list_links(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_links' not in self._stubs: - self._stubs['_list_links'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_ListLinks', + if 'list_links' not in self._stubs: + self._stubs['list_links'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListLinks', request_serializer=logging_config.ListLinksRequest.serialize, response_deserializer=logging_config.ListLinksResponse.deserialize, ) - return self._stubs['_list_links'] + return self._stubs['list_links'] @property - def _get_link(self) -> Callable[ + def get_link(self) -> Callable[ [logging_config.GetLinkRequest], Awaitable[logging_config.Link]]: r"""Return a callable for the get link method over gRPC. @@ -943,16 +943,16 @@ def _get_link(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_link' not in self._stubs: - self._stubs['_get_link'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetLink', + if 'get_link' not in self._stubs: + self._stubs['get_link'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetLink', request_serializer=logging_config.GetLinkRequest.serialize, response_deserializer=logging_config.Link.deserialize, ) - return self._stubs['_get_link'] + return self._stubs['get_link'] @property - def _list_exclusions(self) -> Callable[ + def list_exclusions(self) -> Callable[ [logging_config.ListExclusionsRequest], Awaitable[logging_config.ListExclusionsResponse]]: r"""Return a callable for the list exclusions method over gRPC. @@ -970,16 +970,16 @@ def _list_exclusions(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_exclusions' not in self._stubs: - self._stubs['_list_exclusions'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_ListExclusions', + if 'list_exclusions' not in self._stubs: + self._stubs['list_exclusions'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/ListExclusions', request_serializer=logging_config.ListExclusionsRequest.serialize, response_deserializer=logging_config.ListExclusionsResponse.deserialize, ) - return self._stubs['_list_exclusions'] + return self._stubs['list_exclusions'] @property - def _get_exclusion(self) -> Callable[ + def get_exclusion(self) -> Callable[ [logging_config.GetExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the get exclusion method over gRPC. @@ -996,16 +996,16 @@ def _get_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_exclusion' not in self._stubs: - self._stubs['_get_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetExclusion', + if 'get_exclusion' not in self._stubs: + self._stubs['get_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetExclusion', request_serializer=logging_config.GetExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['_get_exclusion'] + return self._stubs['get_exclusion'] @property - def _create_exclusion(self) -> Callable[ + def create_exclusion(self) -> Callable[ [logging_config.CreateExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the create exclusion method over gRPC. @@ -1024,16 +1024,16 @@ def _create_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_exclusion' not in self._stubs: - self._stubs['_create_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CreateExclusion', + if 'create_exclusion' not in self._stubs: + self._stubs['create_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CreateExclusion', request_serializer=logging_config.CreateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['_create_exclusion'] + return self._stubs['create_exclusion'] @property - def _update_exclusion(self) -> Callable[ + def update_exclusion(self) -> Callable[ [logging_config.UpdateExclusionRequest], Awaitable[logging_config.LogExclusion]]: r"""Return a callable for the update exclusion method over gRPC. @@ -1051,16 +1051,16 @@ def _update_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_exclusion' not in self._stubs: - self._stubs['_update_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateExclusion', + if 'update_exclusion' not in self._stubs: + self._stubs['update_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateExclusion', request_serializer=logging_config.UpdateExclusionRequest.serialize, response_deserializer=logging_config.LogExclusion.deserialize, ) - return self._stubs['_update_exclusion'] + return self._stubs['update_exclusion'] @property - def _delete_exclusion(self) -> Callable[ + def delete_exclusion(self) -> Callable[ [logging_config.DeleteExclusionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete exclusion method over gRPC. @@ -1077,16 +1077,16 @@ def _delete_exclusion(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_exclusion' not in self._stubs: - self._stubs['_delete_exclusion'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_DeleteExclusion', + if 'delete_exclusion' not in self._stubs: + self._stubs['delete_exclusion'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/DeleteExclusion', request_serializer=logging_config.DeleteExclusionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['_delete_exclusion'] + return self._stubs['delete_exclusion'] @property - def _get_cmek_settings(self) -> Callable[ + def get_cmek_settings(self) -> Callable[ [logging_config.GetCmekSettingsRequest], Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the get cmek settings method over gRPC. @@ -1112,16 +1112,16 @@ def _get_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_cmek_settings' not in self._stubs: - self._stubs['_get_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetCmekSettings', + if 'get_cmek_settings' not in self._stubs: + self._stubs['get_cmek_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetCmekSettings', request_serializer=logging_config.GetCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['_get_cmek_settings'] + return self._stubs['get_cmek_settings'] @property - def _update_cmek_settings(self) -> Callable[ + def update_cmek_settings(self) -> Callable[ [logging_config.UpdateCmekSettingsRequest], Awaitable[logging_config.CmekSettings]]: r"""Return a callable for the update cmek settings method over gRPC. @@ -1152,16 +1152,16 @@ def _update_cmek_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_cmek_settings' not in self._stubs: - self._stubs['_update_cmek_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateCmekSettings', + if 'update_cmek_settings' not in self._stubs: + self._stubs['update_cmek_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateCmekSettings', request_serializer=logging_config.UpdateCmekSettingsRequest.serialize, response_deserializer=logging_config.CmekSettings.deserialize, ) - return self._stubs['_update_cmek_settings'] + return self._stubs['update_cmek_settings'] @property - def _get_settings(self) -> Callable[ + def get_settings(self) -> Callable[ [logging_config.GetSettingsRequest], Awaitable[logging_config.Settings]]: r"""Return a callable for the get settings method over gRPC. @@ -1188,16 +1188,16 @@ def _get_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_settings' not in self._stubs: - self._stubs['_get_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_GetSettings', + if 'get_settings' not in self._stubs: + self._stubs['get_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/GetSettings', request_serializer=logging_config.GetSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['_get_settings'] + return self._stubs['get_settings'] @property - def _update_settings(self) -> Callable[ + def update_settings(self) -> Callable[ [logging_config.UpdateSettingsRequest], Awaitable[logging_config.Settings]]: r"""Return a callable for the update settings method over gRPC. @@ -1231,16 +1231,16 @@ def _update_settings(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_settings' not in self._stubs: - self._stubs['_update_settings'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_UpdateSettings', + if 'update_settings' not in self._stubs: + self._stubs['update_settings'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/UpdateSettings', request_serializer=logging_config.UpdateSettingsRequest.serialize, response_deserializer=logging_config.Settings.deserialize, ) - return self._stubs['_update_settings'] + return self._stubs['update_settings'] @property - def _copy_log_entries(self) -> Callable[ + def copy_log_entries(self) -> Callable[ [logging_config.CopyLogEntriesRequest], Awaitable[operations_pb2.Operation]]: r"""Return a callable for the copy log entries method over gRPC. @@ -1258,13 +1258,13 @@ def _copy_log_entries(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_copy_log_entries' not in self._stubs: - self._stubs['_copy_log_entries'] = self._logged_channel.unary_unary( - '/google.logging.v2.ConfigServiceV2/_CopyLogEntries', + if 'copy_log_entries' not in self._stubs: + self._stubs['copy_log_entries'] = self._logged_channel.unary_unary( + '/google.logging.v2.ConfigServiceV2/CopyLogEntries', request_serializer=logging_config.CopyLogEntriesRequest.serialize, response_deserializer=operations_pb2.Operation.FromString, ) - return self._stubs['_copy_log_entries'] + return self._stubs['copy_log_entries'] def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" @@ -1309,33 +1309,33 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self._list_views: self._wrap_method( - self._list_views, + self.list_views: self._wrap_method( + self.list_views, default_timeout=None, client_info=client_info, ), - self._get_view: self._wrap_method( - self._get_view, + self.get_view: self._wrap_method( + self.get_view, default_timeout=None, client_info=client_info, ), - self._create_view: self._wrap_method( - self._create_view, + self.create_view: self._wrap_method( + self.create_view, default_timeout=None, client_info=client_info, ), - self._update_view: self._wrap_method( - self._update_view, + self.update_view: self._wrap_method( + self.update_view, default_timeout=None, client_info=client_info, ), - self._delete_view: self._wrap_method( - self._delete_view, + self.delete_view: self._wrap_method( + self.delete_view, default_timeout=None, client_info=client_info, ), - self._list_sinks: self._wrap_method( - self._list_sinks, + self.list_sinks: self._wrap_method( + self.list_sinks, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -1350,8 +1350,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._get_sink: self._wrap_method( - self._get_sink, + self.get_sink: self._wrap_method( + self.get_sink, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -1366,13 +1366,13 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._create_sink: self._wrap_method( - self._create_sink, + self.create_sink: self._wrap_method( + self.create_sink, default_timeout=120.0, client_info=client_info, ), - self._update_sink: self._wrap_method( - self._update_sink, + self.update_sink: self._wrap_method( + self.update_sink, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -1387,8 +1387,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._delete_sink: self._wrap_method( - self._delete_sink, + self.delete_sink: self._wrap_method( + self.delete_sink, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -1403,28 +1403,28 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._create_link: self._wrap_method( - self._create_link, + self.create_link: self._wrap_method( + self.create_link, default_timeout=None, client_info=client_info, ), - self._delete_link: self._wrap_method( - self._delete_link, + self.delete_link: self._wrap_method( + self.delete_link, default_timeout=None, client_info=client_info, ), - self._list_links: self._wrap_method( - self._list_links, + self.list_links: self._wrap_method( + self.list_links, default_timeout=None, client_info=client_info, ), - self._get_link: self._wrap_method( - self._get_link, + self.get_link: self._wrap_method( + self.get_link, default_timeout=None, client_info=client_info, ), - self._list_exclusions: self._wrap_method( - self._list_exclusions, + self.list_exclusions: self._wrap_method( + self.list_exclusions, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -1439,8 +1439,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._get_exclusion: self._wrap_method( - self._get_exclusion, + self.get_exclusion: self._wrap_method( + self.get_exclusion, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -1455,18 +1455,18 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._create_exclusion: self._wrap_method( - self._create_exclusion, + self.create_exclusion: self._wrap_method( + self.create_exclusion, default_timeout=120.0, client_info=client_info, ), - self._update_exclusion: self._wrap_method( - self._update_exclusion, + self.update_exclusion: self._wrap_method( + self.update_exclusion, default_timeout=120.0, client_info=client_info, ), - self._delete_exclusion: self._wrap_method( - self._delete_exclusion, + self.delete_exclusion: self._wrap_method( + self.delete_exclusion, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -1481,28 +1481,28 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._get_cmek_settings: self._wrap_method( - self._get_cmek_settings, + self.get_cmek_settings: self._wrap_method( + self.get_cmek_settings, default_timeout=None, client_info=client_info, ), - self._update_cmek_settings: self._wrap_method( - self._update_cmek_settings, + self.update_cmek_settings: self._wrap_method( + self.update_cmek_settings, default_timeout=None, client_info=client_info, ), - self._get_settings: self._wrap_method( - self._get_settings, + self.get_settings: self._wrap_method( + self.get_settings, default_timeout=None, client_info=client_info, ), - self._update_settings: self._wrap_method( - self._update_settings, + self.update_settings: self._wrap_method( + self.update_settings, default_timeout=None, client_info=client_info, ), - self._copy_log_entries: self._wrap_method( - self._copy_log_entries, + self.copy_log_entries: self._wrap_method( + self.copy_log_entries, default_timeout=None, client_info=client_info, ), diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index aac48baba9bf..fe606e940da1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -257,7 +257,7 @@ async def _list_log_metrics(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListLogMetricsAsyncPager: + ) -> pagers.ListLogMetricsAsyncPager: r"""Lists logs-based metrics. .. code-block:: python @@ -271,7 +271,7 @@ async def _list_log_metrics(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__list_log_metrics(): + async def sample_list_log_metrics(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -310,7 +310,7 @@ async def sample__list_log_metrics(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.metrics_service_v2.pagers._ListLogMetricsAsyncPager: + google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager: Result returned from ListLogMetrics. Iterating over this object will yield @@ -339,7 +339,7 @@ async def sample__list_log_metrics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._list_log_metrics] + rpc = self._client._transport._wrapped_methods[self._client._transport.list_log_metrics] # Certain fields should be provided within the metadata header; # add these here. @@ -362,7 +362,7 @@ async def sample__list_log_metrics(): # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. - response = pagers._ListLogMetricsAsyncPager( + response = pagers.ListLogMetricsAsyncPager( method=rpc, request=request, response=response, @@ -395,7 +395,7 @@ async def _get_log_metric(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__get_log_metric(): + async def sample_get_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -468,7 +468,7 @@ async def sample__get_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._get_log_metric] + rpc = self._client._transport._wrapped_methods[self._client._transport.get_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -514,7 +514,7 @@ async def _create_log_metric(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__create_log_metric(): + async def sample_create_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -605,7 +605,7 @@ async def sample__create_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._create_log_metric] + rpc = self._client._transport._wrapped_methods[self._client._transport.create_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -651,7 +651,7 @@ async def _update_log_metric(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__update_log_metric(): + async def sample_update_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -741,7 +741,7 @@ async def sample__update_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._update_log_metric] + rpc = self._client._transport._wrapped_methods[self._client._transport.update_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -786,7 +786,7 @@ async def _delete_log_metric(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - async def sample__delete_log_metric(): + async def sample_delete_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -840,7 +840,7 @@ async def sample__delete_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._client._transport._wrapped_methods[self._client._transport._delete_log_metric] + rpc = self._client._transport._wrapped_methods[self._client._transport.delete_log_metric] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py index 3c4c03cd40c3..617f0838b1cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -604,7 +604,7 @@ def _list_log_metrics(self, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> pagers._ListLogMetricsPager: + ) -> pagers.ListLogMetricsPager: r"""Lists logs-based metrics. .. code-block:: python @@ -618,7 +618,7 @@ def _list_log_metrics(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__list_log_metrics(): + def sample_list_log_metrics(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -657,7 +657,7 @@ def sample__list_log_metrics(): be of type `bytes`. Returns: - google.cloud.logging_v2.services.metrics_service_v2.pagers._ListLogMetricsPager: + google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager: Result returned from ListLogMetrics. Iterating over this object will yield @@ -685,7 +685,7 @@ def sample__list_log_metrics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._list_log_metrics] + rpc = self._transport._wrapped_methods[self._transport.list_log_metrics] # Certain fields should be provided within the metadata header; # add these here. @@ -708,7 +708,7 @@ def sample__list_log_metrics(): # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. - response = pagers._ListLogMetricsPager( + response = pagers.ListLogMetricsPager( method=rpc, request=request, response=response, @@ -741,7 +741,7 @@ def _get_log_metric(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__get_log_metric(): + def sample_get_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -813,7 +813,7 @@ def sample__get_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._get_log_metric] + rpc = self._transport._wrapped_methods[self._transport.get_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -859,7 +859,7 @@ def _create_log_metric(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__create_log_metric(): + def sample_create_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -949,7 +949,7 @@ def sample__create_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._create_log_metric] + rpc = self._transport._wrapped_methods[self._transport.create_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -995,7 +995,7 @@ def _update_log_metric(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__update_log_metric(): + def sample_update_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -1084,7 +1084,7 @@ def sample__update_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._update_log_metric] + rpc = self._transport._wrapped_methods[self._transport.update_log_metric] # Certain fields should be provided within the metadata header; # add these here. @@ -1129,7 +1129,7 @@ def _delete_log_metric(self, # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import logging_v2 - def sample__delete_log_metric(): + def sample_delete_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -1182,7 +1182,7 @@ def sample__delete_log_metric(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport._delete_log_metric] + rpc = self._transport._wrapped_methods[self._transport.delete_log_metric] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py index 7024e3a7b362..3d44cf6e4c67 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/pagers.py @@ -27,8 +27,8 @@ from google.cloud.logging_v2.types import logging_metrics -class _ListLogMetricsPager: - """A pager for iterating through ``_list_log_metrics`` requests. +class ListLogMetricsPager: + """A pager for iterating through ``list_log_metrics`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and @@ -36,7 +36,7 @@ class _ListLogMetricsPager: ``metrics`` field. If there are more pages, the ``__iter__`` method will make additional - ``_ListLogMetrics`` requests and continue to iterate + ``ListLogMetrics`` requests and continue to iterate through the ``metrics`` field on the corresponding responses. @@ -95,8 +95,8 @@ def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) -class _ListLogMetricsAsyncPager: - """A pager for iterating through ``_list_log_metrics`` requests. +class ListLogMetricsAsyncPager: + """A pager for iterating through ``list_log_metrics`` requests. This class thinly wraps an initial :class:`google.cloud.logging_v2.types.ListLogMetricsResponse` object, and @@ -104,7 +104,7 @@ class _ListLogMetricsAsyncPager: ``metrics`` field. If there are more pages, the ``__aiter__`` method will make additional - ``_ListLogMetrics`` requests and continue to iterate + ``ListLogMetrics`` requests and continue to iterate through the ``metrics`` field on the corresponding responses. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index a2d6ff535979..596f3beeddde 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -125,8 +125,8 @@ def host(self): def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { - self._list_log_metrics: gapic_v1.method.wrap_method( - self._list_log_metrics, + self.list_log_metrics: gapic_v1.method.wrap_method( + self.list_log_metrics, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -141,8 +141,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._get_log_metric: gapic_v1.method.wrap_method( - self._get_log_metric, + self.get_log_metric: gapic_v1.method.wrap_method( + self.get_log_metric, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -157,13 +157,13 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._create_log_metric: gapic_v1.method.wrap_method( - self._create_log_metric, + self.create_log_metric: gapic_v1.method.wrap_method( + self.create_log_metric, default_timeout=60.0, client_info=client_info, ), - self._update_log_metric: gapic_v1.method.wrap_method( - self._update_log_metric, + self.update_log_metric: gapic_v1.method.wrap_method( + self.update_log_metric, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -178,8 +178,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._delete_log_metric: gapic_v1.method.wrap_method( - self._delete_log_metric, + self.delete_log_metric: gapic_v1.method.wrap_method( + self.delete_log_metric, default_retry=retries.Retry( initial=0.1, maximum=60.0, @@ -221,7 +221,7 @@ def close(self): raise NotImplementedError() @property - def _list_log_metrics(self) -> Callable[ + def list_log_metrics(self) -> Callable[ [logging_metrics.ListLogMetricsRequest], Union[ logging_metrics.ListLogMetricsResponse, @@ -230,7 +230,7 @@ def _list_log_metrics(self) -> Callable[ raise NotImplementedError() @property - def _get_log_metric(self) -> Callable[ + def get_log_metric(self) -> Callable[ [logging_metrics.GetLogMetricRequest], Union[ logging_metrics.LogMetric, @@ -239,7 +239,7 @@ def _get_log_metric(self) -> Callable[ raise NotImplementedError() @property - def _create_log_metric(self) -> Callable[ + def create_log_metric(self) -> Callable[ [logging_metrics.CreateLogMetricRequest], Union[ logging_metrics.LogMetric, @@ -248,7 +248,7 @@ def _create_log_metric(self) -> Callable[ raise NotImplementedError() @property - def _update_log_metric(self) -> Callable[ + def update_log_metric(self) -> Callable[ [logging_metrics.UpdateLogMetricRequest], Union[ logging_metrics.LogMetric, @@ -257,7 +257,7 @@ def _update_log_metric(self) -> Callable[ raise NotImplementedError() @property - def _delete_log_metric(self) -> Callable[ + def delete_log_metric(self) -> Callable[ [logging_metrics.DeleteLogMetricRequest], Union[ empty_pb2.Empty, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index bfb8a1e8e392..4a2d98cc240e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -311,7 +311,7 @@ def grpc_channel(self) -> grpc.Channel: return self._grpc_channel @property - def _list_log_metrics(self) -> Callable[ + def list_log_metrics(self) -> Callable[ [logging_metrics.ListLogMetricsRequest], logging_metrics.ListLogMetricsResponse]: r"""Return a callable for the list log metrics method over gRPC. @@ -328,16 +328,16 @@ def _list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_log_metrics' not in self._stubs: - self._stubs['_list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_ListLogMetrics', + if 'list_log_metrics' not in self._stubs: + self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['_list_log_metrics'] + return self._stubs['list_log_metrics'] @property - def _get_log_metric(self) -> Callable[ + def get_log_metric(self) -> Callable[ [logging_metrics.GetLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the get log metric method over gRPC. @@ -354,16 +354,16 @@ def _get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_log_metric' not in self._stubs: - self._stubs['_get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_GetLogMetric', + if 'get_log_metric' not in self._stubs: + self._stubs['get_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['_get_log_metric'] + return self._stubs['get_log_metric'] @property - def _create_log_metric(self) -> Callable[ + def create_log_metric(self) -> Callable[ [logging_metrics.CreateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the create log metric method over gRPC. @@ -380,16 +380,16 @@ def _create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_log_metric' not in self._stubs: - self._stubs['_create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_CreateLogMetric', + if 'create_log_metric' not in self._stubs: + self._stubs['create_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['_create_log_metric'] + return self._stubs['create_log_metric'] @property - def _update_log_metric(self) -> Callable[ + def update_log_metric(self) -> Callable[ [logging_metrics.UpdateLogMetricRequest], logging_metrics.LogMetric]: r"""Return a callable for the update log metric method over gRPC. @@ -406,16 +406,16 @@ def _update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_log_metric' not in self._stubs: - self._stubs['_update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_UpdateLogMetric', + if 'update_log_metric' not in self._stubs: + self._stubs['update_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['_update_log_metric'] + return self._stubs['update_log_metric'] @property - def _delete_log_metric(self) -> Callable[ + def delete_log_metric(self) -> Callable[ [logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]: r"""Return a callable for the delete log metric method over gRPC. @@ -432,13 +432,13 @@ def _delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_log_metric' not in self._stubs: - self._stubs['_delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_DeleteLogMetric', + if 'delete_log_metric' not in self._stubs: + self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['_delete_log_metric'] + return self._stubs['delete_log_metric'] def close(self): self._logged_channel.close() diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 3760cc25b928..dcadfbe957b7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -316,7 +316,7 @@ def grpc_channel(self) -> aio.Channel: return self._grpc_channel @property - def _list_log_metrics(self) -> Callable[ + def list_log_metrics(self) -> Callable[ [logging_metrics.ListLogMetricsRequest], Awaitable[logging_metrics.ListLogMetricsResponse]]: r"""Return a callable for the list log metrics method over gRPC. @@ -333,16 +333,16 @@ def _list_log_metrics(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_list_log_metrics' not in self._stubs: - self._stubs['_list_log_metrics'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_ListLogMetrics', + if 'list_log_metrics' not in self._stubs: + self._stubs['list_log_metrics'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/ListLogMetrics', request_serializer=logging_metrics.ListLogMetricsRequest.serialize, response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize, ) - return self._stubs['_list_log_metrics'] + return self._stubs['list_log_metrics'] @property - def _get_log_metric(self) -> Callable[ + def get_log_metric(self) -> Callable[ [logging_metrics.GetLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the get log metric method over gRPC. @@ -359,16 +359,16 @@ def _get_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_get_log_metric' not in self._stubs: - self._stubs['_get_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_GetLogMetric', + if 'get_log_metric' not in self._stubs: + self._stubs['get_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/GetLogMetric', request_serializer=logging_metrics.GetLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['_get_log_metric'] + return self._stubs['get_log_metric'] @property - def _create_log_metric(self) -> Callable[ + def create_log_metric(self) -> Callable[ [logging_metrics.CreateLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the create log metric method over gRPC. @@ -385,16 +385,16 @@ def _create_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_create_log_metric' not in self._stubs: - self._stubs['_create_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_CreateLogMetric', + if 'create_log_metric' not in self._stubs: + self._stubs['create_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/CreateLogMetric', request_serializer=logging_metrics.CreateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['_create_log_metric'] + return self._stubs['create_log_metric'] @property - def _update_log_metric(self) -> Callable[ + def update_log_metric(self) -> Callable[ [logging_metrics.UpdateLogMetricRequest], Awaitable[logging_metrics.LogMetric]]: r"""Return a callable for the update log metric method over gRPC. @@ -411,16 +411,16 @@ def _update_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_update_log_metric' not in self._stubs: - self._stubs['_update_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_UpdateLogMetric', + if 'update_log_metric' not in self._stubs: + self._stubs['update_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/UpdateLogMetric', request_serializer=logging_metrics.UpdateLogMetricRequest.serialize, response_deserializer=logging_metrics.LogMetric.deserialize, ) - return self._stubs['_update_log_metric'] + return self._stubs['update_log_metric'] @property - def _delete_log_metric(self) -> Callable[ + def delete_log_metric(self) -> Callable[ [logging_metrics.DeleteLogMetricRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete log metric method over gRPC. @@ -437,19 +437,19 @@ def _delete_log_metric(self) -> Callable[ # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. - if '_delete_log_metric' not in self._stubs: - self._stubs['_delete_log_metric'] = self._logged_channel.unary_unary( - '/google.logging.v2.MetricsServiceV2/_DeleteLogMetric', + if 'delete_log_metric' not in self._stubs: + self._stubs['delete_log_metric'] = self._logged_channel.unary_unary( + '/google.logging.v2.MetricsServiceV2/DeleteLogMetric', request_serializer=logging_metrics.DeleteLogMetricRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, ) - return self._stubs['_delete_log_metric'] + return self._stubs['delete_log_metric'] def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self._list_log_metrics: self._wrap_method( - self._list_log_metrics, + self.list_log_metrics: self._wrap_method( + self.list_log_metrics, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -464,8 +464,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._get_log_metric: self._wrap_method( - self._get_log_metric, + self.get_log_metric: self._wrap_method( + self.get_log_metric, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -480,13 +480,13 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._create_log_metric: self._wrap_method( - self._create_log_metric, + self.create_log_metric: self._wrap_method( + self.create_log_metric, default_timeout=60.0, client_info=client_info, ), - self._update_log_metric: self._wrap_method( - self._update_log_metric, + self.update_log_metric: self._wrap_method( + self.update_log_metric, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, @@ -501,8 +501,8 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self._delete_log_metric: self._wrap_method( - self._delete_log_metric, + self.delete_log_metric: self._wrap_method( + self.delete_log_metric, default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py index 28219651a265..9b38e9b48e4e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CopyLogEntries +# Snippet for CopyLogEntries # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CopyLogEntries_async_internal] +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__copy_log_entries(): +async def sample_copy_log_entries(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -54,4 +54,4 @@ async def sample__copy_log_entries(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CopyLogEntries_async_internal] +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py index be1d43fd90a6..18bd5e92ec18 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CopyLogEntries +# Snippet for CopyLogEntries # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CopyLogEntries_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__copy_log_entries(): +def sample_copy_log_entries(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -54,4 +54,4 @@ def sample__copy_log_entries(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CopyLogEntries_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async_internal.py index d9e42b574535..5e91a182dce4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateExclusion +# Snippet for CreateExclusion # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CreateExclusion_async_internal] +# [START logging_v2_generated_ConfigServiceV2_CreateExclusion_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__create_exclusion(): +async def sample_create_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -54,4 +54,4 @@ async def sample__create_exclusion(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CreateExclusion_async_internal] +# [END logging_v2_generated_ConfigServiceV2_CreateExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py index ecead9501d29..a01f0852d211 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateExclusion +# Snippet for CreateExclusion # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CreateExclusion_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_CreateExclusion_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__create_exclusion(): +def sample_create_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -54,4 +54,4 @@ def sample__create_exclusion(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CreateExclusion_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_CreateExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async_internal.py index 3d418d09c3c1..58675aa3d09a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateLink +# Snippet for CreateLink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CreateLink_async_internal] +# [START logging_v2_generated_ConfigServiceV2_CreateLink_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__create_link(): +async def sample_create_link(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -54,4 +54,4 @@ async def sample__create_link(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CreateLink_async_internal] +# [END logging_v2_generated_ConfigServiceV2_CreateLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync_internal.py index 2cc40f8b6d0c..eb17f0d59d52 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_link_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateLink +# Snippet for CreateLink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CreateLink_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_CreateLink_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__create_link(): +def sample_create_link(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -54,4 +54,4 @@ def sample__create_link(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CreateLink_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_CreateLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async_internal.py index d5e266b6b2f4..a60e9c5ad231 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateSink +# Snippet for CreateSink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CreateSink_async_internal] +# [START logging_v2_generated_ConfigServiceV2_CreateSink_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__create_sink(): +async def sample_create_sink(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -54,4 +54,4 @@ async def sample__create_sink(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CreateSink_async_internal] +# [END logging_v2_generated_ConfigServiceV2_CreateSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync_internal.py index fa6de4343106..191ba174f415 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_sink_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateSink +# Snippet for CreateSink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CreateSink_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_CreateSink_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__create_sink(): +def sample_create_sink(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -54,4 +54,4 @@ def sample__create_sink(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CreateSink_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_CreateSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async_internal.py index 57a38bc4f09b..683be2c311d4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateView +# Snippet for CreateView # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CreateView_async_internal] +# [START logging_v2_generated_ConfigServiceV2_CreateView_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__create_view(): +async def sample_create_view(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -50,4 +50,4 @@ async def sample__create_view(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CreateView_async_internal] +# [END logging_v2_generated_ConfigServiceV2_CreateView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync_internal.py index b469117115ef..86a7a77e2f14 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__create_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_create_view_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateView +# Snippet for CreateView # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__CreateView_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_CreateView_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__create_view(): +def sample_create_view(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -50,4 +50,4 @@ def sample__create_view(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__CreateView_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_CreateView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async_internal.py similarity index 87% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async_internal.py index c0bae20206f4..b95197fdae03 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteExclusion +# Snippet for DeleteExclusion # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__DeleteExclusion_async_internal] +# [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__delete_exclusion(): +async def sample_delete_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -47,4 +47,4 @@ async def sample__delete_exclusion(): await client._delete_exclusion(request=request) -# [END logging_v2_generated_ConfigServiceV2__DeleteExclusion_async_internal] +# [END logging_v2_generated_ConfigServiceV2_DeleteExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync_internal.py index e51bd9646f1d..efa4eb8d135d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_exclusion_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteExclusion +# Snippet for DeleteExclusion # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__DeleteExclusion_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__delete_exclusion(): +def sample_delete_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -47,4 +47,4 @@ def sample__delete_exclusion(): client._delete_exclusion(request=request) -# [END logging_v2_generated_ConfigServiceV2__DeleteExclusion_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async_internal.py index 346f157820c4..1429cfaa7c2a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteLink +# Snippet for DeleteLink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__DeleteLink_async_internal] +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__delete_link(): +async def sample_delete_link(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -53,4 +53,4 @@ async def sample__delete_link(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__DeleteLink_async_internal] +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync_internal.py index 71f2c15bb928..6921f5b35d58 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_link_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteLink +# Snippet for DeleteLink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__DeleteLink_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_DeleteLink_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__delete_link(): +def sample_delete_link(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -53,4 +53,4 @@ def sample__delete_link(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__DeleteLink_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_DeleteLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async_internal.py index 6c8430dacf6e..f24fed1906a4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteSink +# Snippet for DeleteSink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__DeleteSink_async_internal] +# [START logging_v2_generated_ConfigServiceV2_DeleteSink_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__delete_sink(): +async def sample_delete_sink(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -47,4 +47,4 @@ async def sample__delete_sink(): await client._delete_sink(request=request) -# [END logging_v2_generated_ConfigServiceV2__DeleteSink_async_internal] +# [END logging_v2_generated_ConfigServiceV2_DeleteSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync_internal.py index 6439a659ec11..517274b2c16f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_sink_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteSink +# Snippet for DeleteSink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__DeleteSink_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_DeleteSink_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__delete_sink(): +def sample_delete_sink(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -47,4 +47,4 @@ def sample__delete_sink(): client._delete_sink(request=request) -# [END logging_v2_generated_ConfigServiceV2__DeleteSink_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_DeleteSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async_internal.py index a3b6d8e165b4..8fbd6fc91e69 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteView +# Snippet for DeleteView # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__DeleteView_async_internal] +# [START logging_v2_generated_ConfigServiceV2_DeleteView_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__delete_view(): +async def sample_delete_view(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -47,4 +47,4 @@ async def sample__delete_view(): await client._delete_view(request=request) -# [END logging_v2_generated_ConfigServiceV2__DeleteView_async_internal] +# [END logging_v2_generated_ConfigServiceV2_DeleteView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync_internal.py index 0f4f6bed4a09..a2a2e8a44f54 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__delete_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_delete_view_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteView +# Snippet for DeleteView # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__DeleteView_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_DeleteView_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__delete_view(): +def sample_delete_view(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -47,4 +47,4 @@ def sample__delete_view(): client._delete_view(request=request) -# [END logging_v2_generated_ConfigServiceV2__DeleteView_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_DeleteView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py index b24d8f6eb4ac..ec8b0f5cdaca 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetCmekSettings +# Snippet for GetCmekSettings # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetCmekSettings_async_internal] +# [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__get_cmek_settings(): +async def sample_get_cmek_settings(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__get_cmek_settings(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetCmekSettings_async_internal] +# [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py index e4b45ba330f6..a7a498879b34 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetCmekSettings +# Snippet for GetCmekSettings # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetCmekSettings_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__get_cmek_settings(): +def sample_get_cmek_settings(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__get_cmek_settings(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetCmekSettings_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async_internal.py index 88339ae5ca39..cd23fc5e3b10 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetExclusion +# Snippet for GetExclusion # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetExclusion_async_internal] +# [START logging_v2_generated_ConfigServiceV2_GetExclusion_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__get_exclusion(): +async def sample_get_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__get_exclusion(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetExclusion_async_internal] +# [END logging_v2_generated_ConfigServiceV2_GetExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py index 5a6315464c21..e12a5a4951eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetExclusion +# Snippet for GetExclusion # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetExclusion_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_GetExclusion_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__get_exclusion(): +def sample_get_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__get_exclusion(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetExclusion_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_GetExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async_internal.py index b62bee5af31e..7b65e4a65094 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetLink +# Snippet for GetLink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetLink_async_internal] +# [START logging_v2_generated_ConfigServiceV2_GetLink_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__get_link(): +async def sample_get_link(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__get_link(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetLink_async_internal] +# [END logging_v2_generated_ConfigServiceV2_GetLink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync_internal.py index b6d674cbc90d..0530e845acae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_link_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_link_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetLink +# Snippet for GetLink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetLink_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_GetLink_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__get_link(): +def sample_get_link(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__get_link(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetLink_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_GetLink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async_internal.py index ad89d377831c..09f1cc42a447 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetSettings +# Snippet for GetSettings # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetSettings_async_internal] +# [START logging_v2_generated_ConfigServiceV2_GetSettings_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__get_settings(): +async def sample_get_settings(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__get_settings(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetSettings_async_internal] +# [END logging_v2_generated_ConfigServiceV2_GetSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync_internal.py index 66304337e6f8..e0409e255d33 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_settings_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetSettings +# Snippet for GetSettings # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetSettings_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_GetSettings_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__get_settings(): +def sample_get_settings(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__get_settings(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetSettings_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_GetSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async_internal.py index c4d3487fb5c8..53643669d446 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetSink +# Snippet for GetSink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetSink_async_internal] +# [START logging_v2_generated_ConfigServiceV2_GetSink_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__get_sink(): +async def sample_get_sink(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__get_sink(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetSink_async_internal] +# [END logging_v2_generated_ConfigServiceV2_GetSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync_internal.py index 5f575b014f5c..72be15ec0d5b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_sink_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetSink +# Snippet for GetSink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetSink_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_GetSink_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__get_sink(): +def sample_get_sink(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__get_sink(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetSink_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_GetSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async_internal.py index f82665334c8b..f34ca9b4ca54 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetView +# Snippet for GetView # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetView_async_internal] +# [START logging_v2_generated_ConfigServiceV2_GetView_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__get_view(): +async def sample_get_view(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__get_view(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetView_async_internal] +# [END logging_v2_generated_ConfigServiceV2_GetView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync_internal.py index 20814792b201..a2249d9d0cbd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__get_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_get_view_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetView +# Snippet for GetView # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__GetView_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_GetView_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__get_view(): +def sample_get_view(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__get_view(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__GetView_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_GetView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async_internal.py index fb24733e17a9..86146b6a70f8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListExclusions +# Snippet for ListExclusions # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__ListExclusions_async_internal] +# [START logging_v2_generated_ConfigServiceV2_ListExclusions_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__list_exclusions(): +async def sample_list_exclusions(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -50,4 +50,4 @@ async def sample__list_exclusions(): async for response in page_result: print(response) -# [END logging_v2_generated_ConfigServiceV2__ListExclusions_async_internal] +# [END logging_v2_generated_ConfigServiceV2_ListExclusions_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py index 24510000dd0e..f8a73223ac01 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListExclusions +# Snippet for ListExclusions # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__ListExclusions_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_ListExclusions_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__list_exclusions(): +def sample_list_exclusions(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -50,4 +50,4 @@ def sample__list_exclusions(): for response in page_result: print(response) -# [END logging_v2_generated_ConfigServiceV2__ListExclusions_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_ListExclusions_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async_internal.py index 2676cf38a306..6f1bd5b9ed90 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListLinks +# Snippet for ListLinks # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__ListLinks_async_internal] +# [START logging_v2_generated_ConfigServiceV2_ListLinks_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__list_links(): +async def sample_list_links(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -50,4 +50,4 @@ async def sample__list_links(): async for response in page_result: print(response) -# [END logging_v2_generated_ConfigServiceV2__ListLinks_async_internal] +# [END logging_v2_generated_ConfigServiceV2_ListLinks_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync_internal.py index e82b176c6c37..ed645aefa7b2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_links_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_links_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListLinks +# Snippet for ListLinks # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__ListLinks_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_ListLinks_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__list_links(): +def sample_list_links(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -50,4 +50,4 @@ def sample__list_links(): for response in page_result: print(response) -# [END logging_v2_generated_ConfigServiceV2__ListLinks_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_ListLinks_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async_internal.py index c26e55bd338f..7907bdee177c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListSinks +# Snippet for ListSinks # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__ListSinks_async_internal] +# [START logging_v2_generated_ConfigServiceV2_ListSinks_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__list_sinks(): +async def sample_list_sinks(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -50,4 +50,4 @@ async def sample__list_sinks(): async for response in page_result: print(response) -# [END logging_v2_generated_ConfigServiceV2__ListSinks_async_internal] +# [END logging_v2_generated_ConfigServiceV2_ListSinks_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync_internal.py index 915bf6c832e6..a3a8c24551df 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_sinks_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_sinks_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListSinks +# Snippet for ListSinks # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__ListSinks_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_ListSinks_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__list_sinks(): +def sample_list_sinks(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -50,4 +50,4 @@ def sample__list_sinks(): for response in page_result: print(response) -# [END logging_v2_generated_ConfigServiceV2__ListSinks_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_ListSinks_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async_internal.py index 56f1054a8f1e..e37a37fc4c2a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListViews +# Snippet for ListViews # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__ListViews_async_internal] +# [START logging_v2_generated_ConfigServiceV2_ListViews_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__list_views(): +async def sample_list_views(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -50,4 +50,4 @@ async def sample__list_views(): async for response in page_result: print(response) -# [END logging_v2_generated_ConfigServiceV2__ListViews_async_internal] +# [END logging_v2_generated_ConfigServiceV2_ListViews_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync_internal.py index 92b918941152..af7165fcd9c3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__list_views_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_list_views_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListViews +# Snippet for ListViews # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__ListViews_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_ListViews_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__list_views(): +def sample_list_views(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -50,4 +50,4 @@ def sample__list_views(): for response in page_result: print(response) -# [END logging_v2_generated_ConfigServiceV2__ListViews_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_ListViews_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py similarity index 87% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py index 7c63081b593c..ee425c0b293d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateCmekSettings +# Snippet for UpdateCmekSettings # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_async_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__update_cmek_settings(): +async def sample_update_cmek_settings(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__update_cmek_settings(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_async_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py similarity index 87% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py index 6acb574f75c9..d65874e81c2b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateCmekSettings +# Snippet for UpdateCmekSettings # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__update_cmek_settings(): +def sample_update_cmek_settings(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__update_cmek_settings(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async_internal.py index 61b8f4075c67..e5ecd49b246f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateExclusion +# Snippet for UpdateExclusion # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateExclusion_async_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__update_exclusion(): +async def sample_update_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -54,4 +54,4 @@ async def sample__update_exclusion(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateExclusion_async_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py index 7fda4351a173..35e8e57f64d1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateExclusion +# Snippet for UpdateExclusion # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateExclusion_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__update_exclusion(): +def sample_update_exclusion(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -54,4 +54,4 @@ def sample__update_exclusion(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateExclusion_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async_internal.py index 0e95402d4803..85b2289de5a7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateSettings +# Snippet for UpdateSettings # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateSettings_async_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__update_settings(): +async def sample_update_settings(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__update_settings(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateSettings_async_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync_internal.py index 2ab3cee694e7..94d7c529145f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_settings_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_settings_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateSettings +# Snippet for UpdateSettings # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateSettings_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateSettings_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__update_settings(): +def sample_update_settings(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__update_settings(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateSettings_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateSettings_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async_internal.py index d111947839b2..30aba394e4f8 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateSink +# Snippet for UpdateSink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateSink_async_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateSink_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__update_sink(): +async def sample_update_sink(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -54,4 +54,4 @@ async def sample__update_sink(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateSink_async_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateSink_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync_internal.py similarity index 90% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync_internal.py index ef13e3552050..dd0bc17a9152 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_sink_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_sink_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateSink +# Snippet for UpdateSink # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateSink_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateSink_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__update_sink(): +def sample_update_sink(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -54,4 +54,4 @@ def sample__update_sink(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateSink_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateSink_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async_internal.py index aa58cb9ce0d8..7b0143d20812 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateView +# Snippet for UpdateView # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateView_async_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateView_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__update_view(): +async def sample_update_view(): # Create a client client = logging_v2.BaseConfigServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__update_view(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateView_async_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateView_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync_internal.py index d4cedbbdc1f3..d9701082d680 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2__update_view_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_config_service_v2_update_view_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateView +# Snippet for UpdateView # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_ConfigServiceV2__UpdateView_sync_internal] +# [START logging_v2_generated_ConfigServiceV2_UpdateView_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__update_view(): +def sample_update_view(): # Create a client client = logging_v2.BaseConfigServiceV2Client() @@ -49,4 +49,4 @@ def sample__update_view(): # Handle the response print(response) -# [END logging_v2_generated_ConfigServiceV2__UpdateView_sync_internal] +# [END logging_v2_generated_ConfigServiceV2_UpdateView_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py index 2cf02a3533f0..31a1abf2191a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateLogMetric +# Snippet for CreateLogMetric # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__CreateLogMetric_async_internal] +# [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__create_log_metric(): +async def sample_create_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -54,4 +54,4 @@ async def sample__create_log_metric(): # Handle the response print(response) -# [END logging_v2_generated_MetricsServiceV2__CreateLogMetric_async_internal] +# [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py index 2d86ccb0f75d..c5ebd5a87288 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _CreateLogMetric +# Snippet for CreateLogMetric # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__CreateLogMetric_sync_internal] +# [START logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__create_log_metric(): +def sample_create_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -54,4 +54,4 @@ def sample__create_log_metric(): # Handle the response print(response) -# [END logging_v2_generated_MetricsServiceV2__CreateLogMetric_sync_internal] +# [END logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async_internal.py similarity index 87% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async_internal.py index 4d1acdd83c79..129ecdfbbc7a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteLogMetric +# Snippet for DeleteLogMetric # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__DeleteLogMetric_async_internal] +# [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__delete_log_metric(): +async def sample_delete_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -47,4 +47,4 @@ async def sample__delete_log_metric(): await client._delete_log_metric(request=request) -# [END logging_v2_generated_MetricsServiceV2__DeleteLogMetric_async_internal] +# [END logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync_internal.py similarity index 87% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync_internal.py index e7b1e64c690e..aa5a3f23010e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_delete_log_metric_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _DeleteLogMetric +# Snippet for DeleteLogMetric # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__DeleteLogMetric_sync_internal] +# [START logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__delete_log_metric(): +def sample_delete_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -47,4 +47,4 @@ def sample__delete_log_metric(): client._delete_log_metric(request=request) -# [END logging_v2_generated_MetricsServiceV2__DeleteLogMetric_sync_internal] +# [END logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py index a46b0c2685af..442c5d4ccc9f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetLogMetric +# Snippet for GetLogMetric # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__GetLogMetric_async_internal] +# [START logging_v2_generated_MetricsServiceV2_GetLogMetric_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__get_log_metric(): +async def sample_get_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -49,4 +49,4 @@ async def sample__get_log_metric(): # Handle the response print(response) -# [END logging_v2_generated_MetricsServiceV2__GetLogMetric_async_internal] +# [END logging_v2_generated_MetricsServiceV2_GetLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py index 0d0f38a06969..571bdc3799b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _GetLogMetric +# Snippet for GetLogMetric # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__GetLogMetric_sync_internal] +# [START logging_v2_generated_MetricsServiceV2_GetLogMetric_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__get_log_metric(): +def sample_get_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -49,4 +49,4 @@ def sample__get_log_metric(): # Handle the response print(response) -# [END logging_v2_generated_MetricsServiceV2__GetLogMetric_sync_internal] +# [END logging_v2_generated_MetricsServiceV2_GetLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py index d93f42fe9a69..82973e7b77a5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListLogMetrics +# Snippet for ListLogMetrics # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__ListLogMetrics_async_internal] +# [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__list_log_metrics(): +async def sample_list_log_metrics(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -50,4 +50,4 @@ async def sample__list_log_metrics(): async for response in page_result: print(response) -# [END logging_v2_generated_MetricsServiceV2__ListLogMetrics_async_internal] +# [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py index 4e111ba58f08..34392032135d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _ListLogMetrics +# Snippet for ListLogMetrics # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__ListLogMetrics_sync_internal] +# [START logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__list_log_metrics(): +def sample_list_log_metrics(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -50,4 +50,4 @@ def sample__list_log_metrics(): for response in page_result: print(response) -# [END logging_v2_generated_MetricsServiceV2__ListLogMetrics_sync_internal] +# [END logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py similarity index 88% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py index fa5699c778aa..8d1e95542024 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateLogMetric +# Snippet for UpdateLogMetric # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__UpdateLogMetric_async_internal] +# [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -async def sample__update_log_metric(): +async def sample_update_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2AsyncClient() @@ -54,4 +54,4 @@ async def sample__update_log_metric(): # Handle the response print(response) -# [END logging_v2_generated_MetricsServiceV2__UpdateLogMetric_async_internal] +# [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py similarity index 89% rename from packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py rename to packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py index aedb26d9a1d4..a1a8e5f9f6ad 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for _UpdateLogMetric +# Snippet for UpdateLogMetric # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-logging -# [START logging_v2_generated_MetricsServiceV2__UpdateLogMetric_sync_internal] +# [START logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync_internal] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,7 +34,7 @@ from google.cloud import logging_v2 -def sample__update_log_metric(): +def sample_update_log_metric(): # Create a client client = logging_v2.BaseMetricsServiceV2Client() @@ -54,4 +54,4 @@ def sample__update_log_metric(): # Handle the response print(response) -# [END logging_v2_generated_MetricsServiceV2__UpdateLogMetric_sync_internal] +# [END logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync_internal] diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json index 4c8ad2490e4f..55fe86894de2 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -19,19 +19,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.create_bucket_async", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._copy_log_entries", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateBucketAsync" + "shortName": "CopyLogEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateBucketRequest" + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" }, { "name": "retry", @@ -47,13 +47,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_bucket_async" + "shortName": "_copy_log_entries" }, - "description": "Sample for CreateBucketAsync", - "file": "logging_v2_generated_config_service_v2_create_bucket_async_async.py", + "description": "Sample for CopyLogEntries", + "file": "logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_async_internal", "segments": [ { "end": 56, @@ -86,7 +86,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_bucket_async_async.py" + "title": "logging_v2_generated_config_service_v2_copy_log_entries_async_internal.py" }, { "canonical": true, @@ -95,19 +95,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.create_bucket_async", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._copy_log_entries", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", + "fullName": "google.logging.v2.ConfigServiceV2.CopyLogEntries", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateBucketAsync" + "shortName": "CopyLogEntries" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateBucketRequest" + "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" }, { "name": "retry", @@ -123,13 +123,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_bucket_async" + "shortName": "_copy_log_entries" }, - "description": "Sample for CreateBucketAsync", - "file": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py", + "description": "Sample for CopyLogEntries", + "file": "logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CopyLogEntries_sync_internal", "segments": [ { "end": 56, @@ -162,7 +162,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py" + "title": "logging_v2_generated_config_service_v2_copy_log_entries_sync_internal.py" }, { "canonical": true, @@ -172,14 +172,14 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.create_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.create_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateBucket" + "shortName": "CreateBucketAsync" }, "parameters": [ { @@ -199,22 +199,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "create_bucket" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_bucket_async" }, - "description": "Sample for CreateBucket", - "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_async", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -229,17 +229,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 49, + "end": 53, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async_async.py" }, { "canonical": true, @@ -248,14 +248,14 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.create_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.create_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "CreateBucket" + "shortName": "CreateBucketAsync" }, "parameters": [ { @@ -275,22 +275,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "create_bucket" + "resultType": "google.api_core.operation.Operation", + "shortName": "create_bucket_async" }, - "description": "Sample for CreateBucket", - "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", + "description": "Sample for CreateBucketAsync", + "file": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucketAsync_sync", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -305,17 +305,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 49, + "end": 53, "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async_sync.py" }, { "canonical": true, @@ -325,19 +325,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.delete_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.create_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteBucket" + "shortName": "CreateBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -352,21 +352,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_bucket" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", + "description": "Sample for CreateBucket", + "file": "logging_v2_generated_config_service_v2_create_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_async", "segments": [ { - "end": 49, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 52, "start": 27, "type": "SHORT" }, @@ -376,20 +377,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_async.py" }, { "canonical": true, @@ -398,19 +401,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.delete_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.create_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "DeleteBucket" + "shortName": "CreateBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteBucketRequest" + "type": "google.cloud.logging_v2.types.CreateBucketRequest" }, { "name": "retry", @@ -425,21 +428,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_bucket" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "create_bucket" }, - "description": "Sample for DeleteBucket", - "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", + "description": "Sample for CreateBucket", + "file": "logging_v2_generated_config_service_v2_create_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateBucket_sync", "segments": [ { - "end": 49, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 52, "start": 27, "type": "SHORT" }, @@ -449,20 +453,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_create_bucket_sync.py" }, { "canonical": true, @@ -472,19 +478,27 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.get_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "CreateExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" }, { "name": "retry", @@ -499,22 +513,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_create_exclusion" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", + "description": "Sample for CreateExclusion", + "file": "logging_v2_generated_config_service_v2_create_exclusion_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_async_internal", "segments": [ { - "end": 51, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 56, "start": 27, "type": "SHORT" }, @@ -524,22 +538,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_create_exclusion_async_internal.py" }, { "canonical": true, @@ -548,19 +562,27 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.get_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "GetBucket" + "shortName": "CreateExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetBucketRequest" + "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "exclusion", + "type": "google.cloud.logging_v2.types.LogExclusion" }, { "name": "retry", @@ -575,22 +597,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "get_bucket" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_create_exclusion" }, - "description": "Sample for GetBucket", - "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", + "description": "Sample for CreateExclusion", + "file": "logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateExclusion_sync_internal", "segments": [ { - "end": 51, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 56, "start": 27, "type": "SHORT" }, @@ -600,22 +622,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_create_exclusion_sync_internal.py" }, { "canonical": true, @@ -625,24 +647,32 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.list_buckets", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "CreateLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.CreateLinkRequest" }, { "name": "parent", "type": "str" }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -656,22 +686,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", - "shortName": "list_buckets" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "_create_link" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_async_internal", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -681,22 +711,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" + "title": "logging_v2_generated_config_service_v2_create_link_async_internal.py" }, { "canonical": true, @@ -705,24 +735,32 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.list_buckets", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", + "fullName": "google.logging.v2.ConfigServiceV2.CreateLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "ListBuckets" + "shortName": "CreateLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListBucketsRequest" + "type": "google.cloud.logging_v2.types.CreateLinkRequest" }, { "name": "parent", "type": "str" }, + { + "name": "link", + "type": "google.cloud.logging_v2.types.Link" + }, + { + "name": "link_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -736,22 +774,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", - "shortName": "list_buckets" + "resultType": "google.api_core.operation.Operation", + "shortName": "_create_link" }, - "description": "Sample for ListBuckets", - "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", + "description": "Sample for CreateLink", + "file": "logging_v2_generated_config_service_v2_create_link_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateLink_sync_internal", "segments": [ { - "end": 52, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 56, "start": 27, "type": "SHORT" }, @@ -761,22 +799,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" + "title": "logging_v2_generated_config_service_v2_create_link_sync_internal.py" }, { "canonical": true, @@ -786,19 +824,27 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.undelete_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UndeleteBucket" + "shortName": "CreateSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" }, { "name": "retry", @@ -813,21 +859,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "undelete_bucket" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_create_sink" }, - "description": "Sample for UndeleteBucket", - "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_async_internal", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, @@ -837,20 +884,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_create_sink_async_internal.py" }, { "canonical": true, @@ -859,19 +908,27 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.undelete_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", + "fullName": "google.logging.v2.ConfigServiceV2.CreateSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UndeleteBucket" + "shortName": "CreateSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" + "type": "google.cloud.logging_v2.types.CreateSinkRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "sink", + "type": "google.cloud.logging_v2.types.LogSink" }, { "name": "retry", @@ -886,21 +943,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "undelete_bucket" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_create_sink" }, - "description": "Sample for UndeleteBucket", - "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", + "description": "Sample for CreateSink", + "file": "logging_v2_generated_config_service_v2_create_sink_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateSink_sync_internal", "segments": [ { - "end": 49, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 56, "start": 27, "type": "SHORT" }, @@ -910,20 +968,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 50, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 53, + "start": 51, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_create_sink_sync_internal.py" }, { "canonical": true, @@ -933,19 +993,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.update_bucket_async", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UpdateBucketAsync" + "shortName": "CreateView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + "type": "google.cloud.logging_v2.types.CreateViewRequest" }, { "name": "retry", @@ -960,22 +1020,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "update_bucket_async" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_create_view" }, - "description": "Sample for UpdateBucketAsync", - "file": "logging_v2_generated_config_service_v2_update_bucket_async_async.py", + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_async_internal", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -985,22 +1045,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_update_bucket_async_async.py" + "title": "logging_v2_generated_config_service_v2_create_view_async_internal.py" }, { "canonical": true, @@ -1009,19 +1069,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.update_bucket_async", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", + "fullName": "google.logging.v2.ConfigServiceV2.CreateView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UpdateBucketAsync" + "shortName": "CreateView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + "type": "google.cloud.logging_v2.types.CreateViewRequest" }, { "name": "retry", @@ -1036,22 +1096,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "update_bucket_async" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_create_view" }, - "description": "Sample for UpdateBucketAsync", - "file": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py", + "description": "Sample for CreateView", + "file": "logging_v2_generated_config_service_v2_create_view_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_CreateView_sync_internal", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1061,22 +1121,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py" + "title": "logging_v2_generated_config_service_v2_create_view_sync_internal.py" }, { "canonical": true, @@ -1086,19 +1146,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.update_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.delete_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UpdateBucket" + "shortName": "DeleteBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" }, { "name": "retry", @@ -1113,22 +1173,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "update_bucket" + "shortName": "delete_bucket" }, - "description": "Sample for UpdateBucket", - "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_async", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1143,17 +1202,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_update_bucket_async.py" + "title": "logging_v2_generated_config_service_v2_delete_bucket_async.py" }, { "canonical": true, @@ -1162,19 +1219,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.update_bucket", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.delete_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "UpdateBucket" + "shortName": "DeleteBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.UpdateBucketRequest" + "type": "google.cloud.logging_v2.types.DeleteBucketRequest" }, { "name": "retry", @@ -1189,22 +1246,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogBucket", - "shortName": "update_bucket" + "shortName": "delete_bucket" }, - "description": "Sample for UpdateBucket", - "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", + "description": "Sample for DeleteBucket", + "file": "logging_v2_generated_config_service_v2_delete_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteBucket_sync", "segments": [ { - "end": 51, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1219,17 +1275,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2_update_bucket_sync.py" + "title": "logging_v2_generated_config_service_v2_delete_bucket_sync.py" }, { "canonical": true, @@ -1239,19 +1293,23 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._copy_log_entries", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CopyLogEntries", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CopyLogEntries" + "shortName": "DeleteExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1266,22 +1324,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "_copy_log_entries" + "shortName": "_delete_exclusion" }, - "description": "Sample for _CopyLogEntries", - "file": "logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py", + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CopyLogEntries_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_async_internal", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1291,22 +1348,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__copy_log_entries_async_internal.py" + "title": "logging_v2_generated_config_service_v2_delete_exclusion_async_internal.py" }, { "canonical": true, @@ -1315,19 +1370,23 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._copy_log_entries", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CopyLogEntries", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CopyLogEntries" + "shortName": "DeleteExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CopyLogEntriesRequest" + "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1342,22 +1401,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "_copy_log_entries" + "shortName": "_delete_exclusion" }, - "description": "Sample for _CopyLogEntries", - "file": "logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py", + "description": "Sample for DeleteExclusion", + "file": "logging_v2_generated_config_service_v2_delete_exclusion_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CopyLogEntries_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteExclusion_sync_internal", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1367,22 +1425,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__copy_log_entries_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_delete_exclusion_sync_internal.py" }, { "canonical": true, @@ -1392,28 +1448,24 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_exclusion", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CreateExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CreateExclusion" + "shortName": "DeleteLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, - { - "name": "exclusion", - "type": "google.cloud.logging_v2.types.LogExclusion" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1427,22 +1479,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "_create_exclusion" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "_delete_link" }, - "description": "Sample for _CreateExclusion", - "file": "logging_v2_generated_config_service_v2__create_exclusion_async_internal.py", + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CreateExclusion_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_async_internal", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1452,22 +1504,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__create_exclusion_async_internal.py" + "title": "logging_v2_generated_config_service_v2_delete_link_async_internal.py" }, { "canonical": true, @@ -1476,28 +1528,24 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_exclusion", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CreateExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CreateExclusion" + "shortName": "DeleteLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateExclusionRequest" + "type": "google.cloud.logging_v2.types.DeleteLinkRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, - { - "name": "exclusion", - "type": "google.cloud.logging_v2.types.LogExclusion" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1511,22 +1559,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "_create_exclusion" + "resultType": "google.api_core.operation.Operation", + "shortName": "_delete_link" }, - "description": "Sample for _CreateExclusion", - "file": "logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py", + "description": "Sample for DeleteLink", + "file": "logging_v2_generated_config_service_v2_delete_link_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CreateExclusion_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteLink_sync_internal", "segments": [ { - "end": 56, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1536,22 +1584,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__create_exclusion_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_delete_link_sync_internal.py" }, { "canonical": true, @@ -1561,30 +1609,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_link", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CreateLink", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CreateLink" + "shortName": "DeleteSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateLinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "link", - "type": "google.cloud.logging_v2.types.Link" + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" }, { - "name": "link_id", + "name": "sink_name", "type": "str" }, { @@ -1600,22 +1640,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "_create_link" + "shortName": "_delete_sink" }, - "description": "Sample for _CreateLink", - "file": "logging_v2_generated_config_service_v2__create_link_async_internal.py", + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CreateLink_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_async_internal", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1625,22 +1664,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__create_link_async_internal.py" + "title": "logging_v2_generated_config_service_v2_delete_sink_async_internal.py" }, { "canonical": true, @@ -1649,30 +1686,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_link", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CreateLink", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CreateLink" + "shortName": "DeleteSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateLinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "link", - "type": "google.cloud.logging_v2.types.Link" + "type": "google.cloud.logging_v2.types.DeleteSinkRequest" }, { - "name": "link_id", + "name": "sink_name", "type": "str" }, { @@ -1688,22 +1717,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "_create_link" + "shortName": "_delete_sink" }, - "description": "Sample for _CreateLink", - "file": "logging_v2_generated_config_service_v2__create_link_sync_internal.py", + "description": "Sample for DeleteSink", + "file": "logging_v2_generated_config_service_v2_delete_sink_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CreateLink_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteSink_sync_internal", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1713,22 +1741,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__create_link_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_delete_sink_sync_internal.py" }, { "canonical": true, @@ -1738,27 +1764,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_sink", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CreateSink" + "shortName": "DeleteView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" + "type": "google.cloud.logging_v2.types.DeleteViewRequest" }, { "name": "retry", @@ -1773,22 +1791,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "_create_sink" + "shortName": "_delete_view" }, - "description": "Sample for _CreateSink", - "file": "logging_v2_generated_config_service_v2__create_sink_async_internal.py", + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CreateSink_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_async_internal", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1798,22 +1815,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__create_sink_async_internal.py" + "title": "logging_v2_generated_config_service_v2_delete_view_async_internal.py" }, { "canonical": true, @@ -1822,27 +1837,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_sink", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CreateSink", + "fullName": "google.logging.v2.ConfigServiceV2.DeleteView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CreateSink" + "shortName": "DeleteView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateSinkRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "sink", - "type": "google.cloud.logging_v2.types.LogSink" + "type": "google.cloud.logging_v2.types.DeleteViewRequest" }, { "name": "retry", @@ -1857,22 +1864,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "_create_sink" + "shortName": "_delete_view" }, - "description": "Sample for _CreateSink", - "file": "logging_v2_generated_config_service_v2__create_sink_sync_internal.py", + "description": "Sample for DeleteView", + "file": "logging_v2_generated_config_service_v2_delete_view_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CreateSink_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_DeleteView_sync_internal", "segments": [ { - "end": 56, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 49, "start": 27, "type": "SHORT" }, @@ -1882,22 +1888,20 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 50, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 51, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__create_sink_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_delete_view_sync_internal.py" }, { "canonical": true, @@ -1907,19 +1911,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._create_view", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.get_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CreateView" + "shortName": "GetBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.GetBucketRequest" }, { "name": "retry", @@ -1934,22 +1938,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "_create_view" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, - "description": "Sample for _CreateView", - "file": "logging_v2_generated_config_service_v2__create_view_async_internal.py", + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CreateView_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1959,22 +1963,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__create_view_async_internal.py" + "title": "logging_v2_generated_config_service_v2_get_bucket_async.py" }, { "canonical": true, @@ -1983,19 +1987,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._create_view", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.get_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._CreateView", + "fullName": "google.logging.v2.ConfigServiceV2.GetBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_CreateView" + "shortName": "GetBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.CreateViewRequest" + "type": "google.cloud.logging_v2.types.GetBucketRequest" }, { "name": "retry", @@ -2010,22 +2014,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "_create_view" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "get_bucket" }, - "description": "Sample for _CreateView", - "file": "logging_v2_generated_config_service_v2__create_view_sync_internal.py", + "description": "Sample for GetBucket", + "file": "logging_v2_generated_config_service_v2_get_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__CreateView_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetBucket_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2035,22 +2039,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__create_view_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_get_bucket_sync.py" }, { "canonical": true, @@ -2060,23 +2064,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_exclusion", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_DeleteExclusion" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -2091,21 +2091,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "_delete_exclusion" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "_get_cmek_settings" }, - "description": "Sample for _DeleteExclusion", - "file": "logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteExclusion_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_async_internal", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2120,15 +2121,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__delete_exclusion_async_internal.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_async_internal.py" }, { "canonical": true, @@ -2137,23 +2140,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_exclusion", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._DeleteExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_DeleteExclusion" + "shortName": "GetCmekSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" }, { "name": "retry", @@ -2168,21 +2167,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "_delete_exclusion" + "resultType": "google.cloud.logging_v2.types.CmekSettings", + "shortName": "_get_cmek_settings" }, - "description": "Sample for _DeleteExclusion", - "file": "logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py", + "description": "Sample for GetCmekSettings", + "file": "logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteExclusion_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetCmekSettings_sync_internal", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2197,15 +2197,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__delete_exclusion_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_get_cmek_settings_sync_internal.py" }, { "canonical": true, @@ -2215,19 +2217,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_link", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._DeleteLink", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_DeleteLink" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" }, { "name": "name", @@ -2246,22 +2248,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "_delete_link" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_get_exclusion" }, - "description": "Sample for _DeleteLink", - "file": "logging_v2_generated_config_service_v2__delete_link_async_internal.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteLink_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_async_internal", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2276,17 +2278,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__delete_link_async_internal.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_async_internal.py" }, { "canonical": true, @@ -2295,19 +2297,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_link", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._DeleteLink", + "fullName": "google.logging.v2.ConfigServiceV2.GetExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_DeleteLink" + "shortName": "GetExclusion" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteLinkRequest" + "type": "google.cloud.logging_v2.types.GetExclusionRequest" }, { "name": "name", @@ -2326,22 +2328,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "_delete_link" + "resultType": "google.cloud.logging_v2.types.LogExclusion", + "shortName": "_get_exclusion" }, - "description": "Sample for _DeleteLink", - "file": "logging_v2_generated_config_service_v2__delete_link_sync_internal.py", + "description": "Sample for GetExclusion", + "file": "logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteLink_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetExclusion_sync_internal", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2356,17 +2358,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__delete_link_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_get_exclusion_sync_internal.py" }, { "canonical": true, @@ -2376,22 +2378,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_sink", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_DeleteSink" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" }, { - "name": "sink_name", + "name": "name", "type": "str" }, { @@ -2407,21 +2409,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "_delete_sink" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "_get_link" }, - "description": "Sample for _DeleteSink", - "file": "logging_v2_generated_config_service_v2__delete_sink_async_internal.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteSink_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_async_internal", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2436,15 +2439,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__delete_sink_async_internal.py" + "title": "logging_v2_generated_config_service_v2_get_link_async_internal.py" }, { "canonical": true, @@ -2453,22 +2458,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_sink", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_link", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._DeleteSink", + "fullName": "google.logging.v2.ConfigServiceV2.GetLink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_DeleteSink" + "shortName": "GetLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteSinkRequest" + "type": "google.cloud.logging_v2.types.GetLinkRequest" }, { - "name": "sink_name", + "name": "name", "type": "str" }, { @@ -2484,21 +2489,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "_delete_sink" + "resultType": "google.cloud.logging_v2.types.Link", + "shortName": "_get_link" }, - "description": "Sample for _DeleteSink", - "file": "logging_v2_generated_config_service_v2__delete_sink_sync_internal.py", + "description": "Sample for GetLink", + "file": "logging_v2_generated_config_service_v2_get_link_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteSink_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetLink_sync_internal", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2513,15 +2519,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__delete_sink_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_get_link_sync_internal.py" }, { "canonical": true, @@ -2531,19 +2539,23 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._delete_view", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_DeleteView" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -2558,21 +2570,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "_delete_view" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "_get_settings" }, - "description": "Sample for _DeleteView", - "file": "logging_v2_generated_config_service_v2__delete_view_async_internal.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteView_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_async_internal", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2587,15 +2600,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__delete_view_async_internal.py" + "title": "logging_v2_generated_config_service_v2_get_settings_async_internal.py" }, { "canonical": true, @@ -2604,19 +2619,23 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._delete_view", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._DeleteView", + "fullName": "google.logging.v2.ConfigServiceV2.GetSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_DeleteView" + "shortName": "GetSettings" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.DeleteViewRequest" + "type": "google.cloud.logging_v2.types.GetSettingsRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -2631,21 +2650,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "_delete_view" + "resultType": "google.cloud.logging_v2.types.Settings", + "shortName": "_get_settings" }, - "description": "Sample for _DeleteView", - "file": "logging_v2_generated_config_service_v2__delete_view_sync_internal.py", + "description": "Sample for GetSettings", + "file": "logging_v2_generated_config_service_v2_get_settings_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__DeleteView_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSettings_sync_internal", "segments": [ { - "end": 49, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 51, "start": 27, "type": "SHORT" }, @@ -2660,15 +2680,17 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__delete_view_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_get_settings_sync_internal.py" }, { "canonical": true, @@ -2678,19 +2700,23 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_cmek_settings", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetCmekSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -2705,14 +2731,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "_get_cmek_settings" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_get_sink" }, - "description": "Sample for _GetCmekSettings", - "file": "logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py", + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetCmekSettings_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_async_internal", "segments": [ { "end": 51, @@ -2745,7 +2771,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_cmek_settings_async_internal.py" + "title": "logging_v2_generated_config_service_v2_get_sink_async_internal.py" }, { "canonical": true, @@ -2754,19 +2780,23 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_cmek_settings", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.GetSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetCmekSettings" + "shortName": "GetSink" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetCmekSettingsRequest" + "type": "google.cloud.logging_v2.types.GetSinkRequest" + }, + { + "name": "sink_name", + "type": "str" }, { "name": "retry", @@ -2781,14 +2811,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.CmekSettings", - "shortName": "_get_cmek_settings" + "resultType": "google.cloud.logging_v2.types.LogSink", + "shortName": "_get_sink" }, - "description": "Sample for _GetCmekSettings", - "file": "logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py", + "description": "Sample for GetSink", + "file": "logging_v2_generated_config_service_v2_get_sink_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetCmekSettings_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetSink_sync_internal", "segments": [ { "end": 51, @@ -2821,7 +2851,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_cmek_settings_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_get_sink_sync_internal.py" }, { "canonical": true, @@ -2831,23 +2861,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_exclusion", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetExclusion" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -2862,14 +2888,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "_get_exclusion" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_get_view" }, - "description": "Sample for _GetExclusion", - "file": "logging_v2_generated_config_service_v2__get_exclusion_async_internal.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetExclusion_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_async_internal", "segments": [ { "end": 51, @@ -2902,7 +2928,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_exclusion_async_internal.py" + "title": "logging_v2_generated_config_service_v2_get_view_async_internal.py" }, { "canonical": true, @@ -2911,23 +2937,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_exclusion", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.GetView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetExclusion" + "shortName": "GetView" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetExclusionRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.logging_v2.types.GetViewRequest" }, { "name": "retry", @@ -2942,14 +2964,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogExclusion", - "shortName": "_get_exclusion" + "resultType": "google.cloud.logging_v2.types.LogView", + "shortName": "_get_view" }, - "description": "Sample for _GetExclusion", - "file": "logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py", + "description": "Sample for GetView", + "file": "logging_v2_generated_config_service_v2_get_view_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetExclusion_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_GetView_sync_internal", "segments": [ { "end": 51, @@ -2982,7 +3004,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_exclusion_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_get_view_sync_internal.py" }, { "canonical": true, @@ -2992,22 +3014,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_link", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetLink", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetLink" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetLinkRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3023,22 +3045,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.Link", - "shortName": "_get_link" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsAsyncPager", + "shortName": "list_buckets" }, - "description": "Sample for _GetLink", - "file": "logging_v2_generated_config_service_v2__get_link_async_internal.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetLink_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3058,12 +3080,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_link_async_internal.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_async.py" }, { "canonical": true, @@ -3072,22 +3094,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_link", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.list_buckets", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetLink", + "fullName": "google.logging.v2.ConfigServiceV2.ListBuckets", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetLink" + "shortName": "ListBuckets" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetLinkRequest" + "type": "google.cloud.logging_v2.types.ListBucketsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3103,22 +3125,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.Link", - "shortName": "_get_link" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListBucketsPager", + "shortName": "list_buckets" }, - "description": "Sample for _GetLink", - "file": "logging_v2_generated_config_service_v2__get_link_sync_internal.py", + "description": "Sample for ListBuckets", + "file": "logging_v2_generated_config_service_v2_list_buckets_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetLink_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListBuckets_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3138,12 +3160,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_link_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_list_buckets_sync.py" }, { "canonical": true, @@ -3153,22 +3175,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_settings", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetSettings", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetSettings" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSettingsRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3184,22 +3206,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.Settings", - "shortName": "_get_settings" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsAsyncPager", + "shortName": "_list_exclusions" }, - "description": "Sample for _GetSettings", - "file": "logging_v2_generated_config_service_v2__get_settings_async_internal.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetSettings_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_async_internal", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3219,12 +3241,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_settings_async_internal.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_async_internal.py" }, { "canonical": true, @@ -3233,22 +3255,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_settings", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_exclusions", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetSettings", + "fullName": "google.logging.v2.ConfigServiceV2.ListExclusions", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetSettings" + "shortName": "ListExclusions" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSettingsRequest" + "type": "google.cloud.logging_v2.types.ListExclusionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -3264,22 +3286,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.Settings", - "shortName": "_get_settings" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListExclusionsPager", + "shortName": "_list_exclusions" }, - "description": "Sample for _GetSettings", - "file": "logging_v2_generated_config_service_v2__get_settings_sync_internal.py", + "description": "Sample for ListExclusions", + "file": "logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetSettings_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListExclusions_sync_internal", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3299,12 +3321,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_settings_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_list_exclusions_sync_internal.py" }, { "canonical": true, @@ -3314,22 +3336,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_sink", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetSink" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { - "name": "sink_name", + "name": "parent", "type": "str" }, { @@ -3345,22 +3367,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "_get_sink" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksAsyncPager", + "shortName": "_list_links" }, - "description": "Sample for _GetSink", - "file": "logging_v2_generated_config_service_v2__get_sink_async_internal.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetSink_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_async_internal", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3380,12 +3402,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_sink_async_internal.py" + "title": "logging_v2_generated_config_service_v2_list_links_async_internal.py" }, { "canonical": true, @@ -3394,22 +3416,22 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_sink", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_links", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetSink", + "fullName": "google.logging.v2.ConfigServiceV2.ListLinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetSink" + "shortName": "ListLinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetSinkRequest" + "type": "google.cloud.logging_v2.types.ListLinksRequest" }, { - "name": "sink_name", + "name": "parent", "type": "str" }, { @@ -3425,22 +3447,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogSink", - "shortName": "_get_sink" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListLinksPager", + "shortName": "_list_links" }, - "description": "Sample for _GetSink", - "file": "logging_v2_generated_config_service_v2__get_sink_sync_internal.py", + "description": "Sample for ListLinks", + "file": "logging_v2_generated_config_service_v2_list_links_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetSink_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListLinks_sync_internal", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3460,12 +3482,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_sink_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_list_links_sync_internal.py" }, { "canonical": true, @@ -3475,19 +3497,23 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._get_view", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_sinks", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetView" + "shortName": "ListSinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -3502,22 +3528,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "_get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksAsyncPager", + "shortName": "_list_sinks" }, - "description": "Sample for _GetView", - "file": "logging_v2_generated_config_service_v2__get_view_async_internal.py", + "description": "Sample for ListSinks", + "file": "logging_v2_generated_config_service_v2_list_sinks_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetView_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_async_internal", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3537,12 +3563,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_view_async_internal.py" + "title": "logging_v2_generated_config_service_v2_list_sinks_async_internal.py" }, { "canonical": true, @@ -3551,19 +3577,23 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._get_view", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_sinks", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._GetView", + "fullName": "google.logging.v2.ConfigServiceV2.ListSinks", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_GetView" + "shortName": "ListSinks" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.GetViewRequest" + "type": "google.cloud.logging_v2.types.ListSinksRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -3578,22 +3608,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.types.LogView", - "shortName": "_get_view" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListSinksPager", + "shortName": "_list_sinks" }, - "description": "Sample for _GetView", - "file": "logging_v2_generated_config_service_v2__get_view_sync_internal.py", + "description": "Sample for ListSinks", + "file": "logging_v2_generated_config_service_v2_list_sinks_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__GetView_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListSinks_sync_internal", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -3613,12 +3643,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__get_view_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_list_sinks_sync_internal.py" }, { "canonical": true, @@ -3628,19 +3658,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_exclusions", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_views", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_ListExclusions" + "shortName": "ListViews" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListViewsRequest" }, { "name": "parent", @@ -3659,14 +3689,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListExclusionsAsyncPager", - "shortName": "_list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsAsyncPager", + "shortName": "_list_views" }, - "description": "Sample for _ListExclusions", - "file": "logging_v2_generated_config_service_v2__list_exclusions_async_internal.py", + "description": "Sample for ListViews", + "file": "logging_v2_generated_config_service_v2_list_views_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__ListExclusions_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_async_internal", "segments": [ { "end": 52, @@ -3699,7 +3729,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__list_exclusions_async_internal.py" + "title": "logging_v2_generated_config_service_v2_list_views_async_internal.py" }, { "canonical": true, @@ -3708,19 +3738,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_exclusions", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_views", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._ListExclusions", + "fullName": "google.logging.v2.ConfigServiceV2.ListViews", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_ListExclusions" + "shortName": "ListViews" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListExclusionsRequest" + "type": "google.cloud.logging_v2.types.ListViewsRequest" }, { "name": "parent", @@ -3739,14 +3769,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListExclusionsPager", - "shortName": "_list_exclusions" + "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers.ListViewsPager", + "shortName": "_list_views" }, - "description": "Sample for _ListExclusions", - "file": "logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py", + "description": "Sample for ListViews", + "file": "logging_v2_generated_config_service_v2_list_views_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__ListExclusions_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_ListViews_sync_internal", "segments": [ { "end": 52, @@ -3779,7 +3809,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__list_exclusions_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_list_views_sync_internal.py" }, { "canonical": true, @@ -3789,23 +3819,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_links", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.undelete_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._ListLinks", + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_ListLinks" + "shortName": "UndeleteBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListLinksRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" }, { "name": "retry", @@ -3820,22 +3846,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListLinksAsyncPager", - "shortName": "_list_links" + "shortName": "undelete_bucket" }, - "description": "Sample for _ListLinks", - "file": "logging_v2_generated_config_service_v2__list_links_async_internal.py", + "description": "Sample for UndeleteBucket", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__ListLinks_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -3850,17 +3875,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__list_links_async_internal.py" + "title": "logging_v2_generated_config_service_v2_undelete_bucket_async.py" }, { "canonical": true, @@ -3869,23 +3892,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_links", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.undelete_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._ListLinks", + "fullName": "google.logging.v2.ConfigServiceV2.UndeleteBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_ListLinks" + "shortName": "UndeleteBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListLinksRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UndeleteBucketRequest" }, { "name": "retry", @@ -3900,22 +3919,21 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListLinksPager", - "shortName": "_list_links" + "shortName": "undelete_bucket" }, - "description": "Sample for _ListLinks", - "file": "logging_v2_generated_config_service_v2__list_links_sync_internal.py", + "description": "Sample for UndeleteBucket", + "file": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__ListLinks_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UndeleteBucket_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -3930,17 +3948,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__list_links_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_undelete_bucket_sync.py" }, { "canonical": true, @@ -3950,23 +3966,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_sinks", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.update_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._ListSinks", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_ListSinks" + "shortName": "UpdateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListSinksRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" }, { "name": "retry", @@ -3981,22 +3993,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListSinksAsyncPager", - "shortName": "_list_sinks" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_bucket_async" }, - "description": "Sample for _ListSinks", - "file": "logging_v2_generated_config_service_v2__list_sinks_async_internal.py", + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__ListSinks_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -4011,17 +4023,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__list_sinks_async_internal.py" + "title": "logging_v2_generated_config_service_v2_update_bucket_async_async.py" }, { "canonical": true, @@ -4030,23 +4042,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_sinks", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.update_bucket_async", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._ListSinks", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucketAsync", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_ListSinks" + "shortName": "UpdateBucketAsync" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListSinksRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" }, { "name": "retry", @@ -4061,22 +4069,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListSinksPager", - "shortName": "_list_sinks" + "resultType": "google.api_core.operation.Operation", + "shortName": "update_bucket_async" }, - "description": "Sample for _ListSinks", - "file": "logging_v2_generated_config_service_v2__list_sinks_sync_internal.py", + "description": "Sample for UpdateBucketAsync", + "file": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__ListSinks_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucketAsync_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -4091,17 +4099,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__list_sinks_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_update_bucket_async_sync.py" }, { "canonical": true, @@ -4111,23 +4119,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient", "shortName": "BaseConfigServiceV2AsyncClient" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._list_views", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient.update_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._ListViews", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_ListViews" + "shortName": "UpdateBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListViewsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" }, { "name": "retry", @@ -4142,22 +4146,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListViewsAsyncPager", - "shortName": "_list_views" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" }, - "description": "Sample for _ListViews", - "file": "logging_v2_generated_config_service_v2__list_views_async_internal.py", + "description": "Sample for UpdateBucket", + "file": "logging_v2_generated_config_service_v2_update_bucket_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__ListViews_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4177,12 +4181,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__list_views_async_internal.py" + "title": "logging_v2_generated_config_service_v2_update_bucket_async.py" }, { "canonical": true, @@ -4191,23 +4195,19 @@ "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client", "shortName": "BaseConfigServiceV2Client" }, - "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._list_views", + "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client.update_bucket", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._ListViews", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateBucket", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_ListViews" + "shortName": "UpdateBucket" }, "parameters": [ { "name": "request", - "type": "google.cloud.logging_v2.types.ListViewsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.logging_v2.types.UpdateBucketRequest" }, { "name": "retry", @@ -4222,22 +4222,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.config_service_v2.pagers._ListViewsPager", - "shortName": "_list_views" + "resultType": "google.cloud.logging_v2.types.LogBucket", + "shortName": "update_bucket" }, - "description": "Sample for _ListViews", - "file": "logging_v2_generated_config_service_v2__list_views_sync_internal.py", + "description": "Sample for UpdateBucket", + "file": "logging_v2_generated_config_service_v2_update_bucket_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__ListViews_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateBucket_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -4257,12 +4257,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__list_views_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_update_bucket_sync.py" }, { "canonical": true, @@ -4274,12 +4274,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateCmekSettings" + "shortName": "UpdateCmekSettings" }, "parameters": [ { @@ -4302,11 +4302,11 @@ "resultType": "google.cloud.logging_v2.types.CmekSettings", "shortName": "_update_cmek_settings" }, - "description": "Sample for _UpdateCmekSettings", - "file": "logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py", + "description": "Sample for UpdateCmekSettings", + "file": "logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_async_internal", "segments": [ { "end": 51, @@ -4339,7 +4339,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_cmek_settings_async_internal.py" + "title": "logging_v2_generated_config_service_v2_update_cmek_settings_async_internal.py" }, { "canonical": true, @@ -4350,12 +4350,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_cmek_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateCmekSettings", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateCmekSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateCmekSettings" + "shortName": "UpdateCmekSettings" }, "parameters": [ { @@ -4378,11 +4378,11 @@ "resultType": "google.cloud.logging_v2.types.CmekSettings", "shortName": "_update_cmek_settings" }, - "description": "Sample for _UpdateCmekSettings", - "file": "logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py", + "description": "Sample for UpdateCmekSettings", + "file": "logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateCmekSettings_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateCmekSettings_sync_internal", "segments": [ { "end": 51, @@ -4415,7 +4415,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_cmek_settings_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_update_cmek_settings_sync_internal.py" }, { "canonical": true, @@ -4427,12 +4427,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateExclusion" + "shortName": "UpdateExclusion" }, "parameters": [ { @@ -4467,11 +4467,11 @@ "resultType": "google.cloud.logging_v2.types.LogExclusion", "shortName": "_update_exclusion" }, - "description": "Sample for _UpdateExclusion", - "file": "logging_v2_generated_config_service_v2__update_exclusion_async_internal.py", + "description": "Sample for UpdateExclusion", + "file": "logging_v2_generated_config_service_v2_update_exclusion_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateExclusion_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_async_internal", "segments": [ { "end": 56, @@ -4504,7 +4504,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_exclusion_async_internal.py" + "title": "logging_v2_generated_config_service_v2_update_exclusion_async_internal.py" }, { "canonical": true, @@ -4515,12 +4515,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_exclusion", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateExclusion", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateExclusion", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateExclusion" + "shortName": "UpdateExclusion" }, "parameters": [ { @@ -4555,11 +4555,11 @@ "resultType": "google.cloud.logging_v2.types.LogExclusion", "shortName": "_update_exclusion" }, - "description": "Sample for _UpdateExclusion", - "file": "logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py", + "description": "Sample for UpdateExclusion", + "file": "logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateExclusion_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateExclusion_sync_internal", "segments": [ { "end": 56, @@ -4592,7 +4592,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_exclusion_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_update_exclusion_sync_internal.py" }, { "canonical": true, @@ -4604,12 +4604,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateSettings", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateSettings" + "shortName": "UpdateSettings" }, "parameters": [ { @@ -4640,11 +4640,11 @@ "resultType": "google.cloud.logging_v2.types.Settings", "shortName": "_update_settings" }, - "description": "Sample for _UpdateSettings", - "file": "logging_v2_generated_config_service_v2__update_settings_async_internal.py", + "description": "Sample for UpdateSettings", + "file": "logging_v2_generated_config_service_v2_update_settings_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateSettings_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_async_internal", "segments": [ { "end": 51, @@ -4677,7 +4677,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_settings_async_internal.py" + "title": "logging_v2_generated_config_service_v2_update_settings_async_internal.py" }, { "canonical": true, @@ -4688,12 +4688,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_settings", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateSettings", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSettings", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateSettings" + "shortName": "UpdateSettings" }, "parameters": [ { @@ -4724,11 +4724,11 @@ "resultType": "google.cloud.logging_v2.types.Settings", "shortName": "_update_settings" }, - "description": "Sample for _UpdateSettings", - "file": "logging_v2_generated_config_service_v2__update_settings_sync_internal.py", + "description": "Sample for UpdateSettings", + "file": "logging_v2_generated_config_service_v2_update_settings_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateSettings_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSettings_sync_internal", "segments": [ { "end": 51, @@ -4761,7 +4761,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_settings_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_update_settings_sync_internal.py" }, { "canonical": true, @@ -4773,12 +4773,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateSink", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateSink" + "shortName": "UpdateSink" }, "parameters": [ { @@ -4813,11 +4813,11 @@ "resultType": "google.cloud.logging_v2.types.LogSink", "shortName": "_update_sink" }, - "description": "Sample for _UpdateSink", - "file": "logging_v2_generated_config_service_v2__update_sink_async_internal.py", + "description": "Sample for UpdateSink", + "file": "logging_v2_generated_config_service_v2_update_sink_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateSink_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_async_internal", "segments": [ { "end": 56, @@ -4850,7 +4850,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_sink_async_internal.py" + "title": "logging_v2_generated_config_service_v2_update_sink_async_internal.py" }, { "canonical": true, @@ -4861,12 +4861,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_sink", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateSink", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateSink", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateSink" + "shortName": "UpdateSink" }, "parameters": [ { @@ -4901,11 +4901,11 @@ "resultType": "google.cloud.logging_v2.types.LogSink", "shortName": "_update_sink" }, - "description": "Sample for _UpdateSink", - "file": "logging_v2_generated_config_service_v2__update_sink_sync_internal.py", + "description": "Sample for UpdateSink", + "file": "logging_v2_generated_config_service_v2_update_sink_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateSink_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateSink_sync_internal", "segments": [ { "end": 56, @@ -4938,7 +4938,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_sink_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_update_sink_sync_internal.py" }, { "canonical": true, @@ -4950,12 +4950,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2AsyncClient._update_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateView", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateView" + "shortName": "UpdateView" }, "parameters": [ { @@ -4978,11 +4978,11 @@ "resultType": "google.cloud.logging_v2.types.LogView", "shortName": "_update_view" }, - "description": "Sample for _UpdateView", - "file": "logging_v2_generated_config_service_v2__update_view_async_internal.py", + "description": "Sample for UpdateView", + "file": "logging_v2_generated_config_service_v2_update_view_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateView_async_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_async_internal", "segments": [ { "end": 51, @@ -5015,7 +5015,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_view_async_internal.py" + "title": "logging_v2_generated_config_service_v2_update_view_async_internal.py" }, { "canonical": true, @@ -5026,12 +5026,12 @@ }, "fullName": "google.cloud.logging_v2.BaseConfigServiceV2Client._update_view", "method": { - "fullName": "google.logging.v2.ConfigServiceV2._UpdateView", + "fullName": "google.logging.v2.ConfigServiceV2.UpdateView", "service": { "fullName": "google.logging.v2.ConfigServiceV2", "shortName": "ConfigServiceV2" }, - "shortName": "_UpdateView" + "shortName": "UpdateView" }, "parameters": [ { @@ -5054,11 +5054,11 @@ "resultType": "google.cloud.logging_v2.types.LogView", "shortName": "_update_view" }, - "description": "Sample for _UpdateView", - "file": "logging_v2_generated_config_service_v2__update_view_sync_internal.py", + "description": "Sample for UpdateView", + "file": "logging_v2_generated_config_service_v2_update_view_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_ConfigServiceV2__UpdateView_sync_internal", + "regionTag": "logging_v2_generated_ConfigServiceV2_UpdateView_sync_internal", "segments": [ { "end": 51, @@ -5091,7 +5091,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_config_service_v2__update_view_sync_internal.py" + "title": "logging_v2_generated_config_service_v2_update_view_sync_internal.py" }, { "canonical": true, @@ -6087,12 +6087,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._create_log_metric", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._CreateLogMetric", + "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_CreateLogMetric" + "shortName": "CreateLogMetric" }, "parameters": [ { @@ -6123,11 +6123,11 @@ "resultType": "google.cloud.logging_v2.types.LogMetric", "shortName": "_create_log_metric" }, - "description": "Sample for _CreateLogMetric", - "file": "logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py", + "description": "Sample for CreateLogMetric", + "file": "logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__CreateLogMetric_async_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_async_internal", "segments": [ { "end": 56, @@ -6160,7 +6160,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__create_log_metric_async_internal.py" + "title": "logging_v2_generated_metrics_service_v2_create_log_metric_async_internal.py" }, { "canonical": true, @@ -6171,12 +6171,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._create_log_metric", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._CreateLogMetric", + "fullName": "google.logging.v2.MetricsServiceV2.CreateLogMetric", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_CreateLogMetric" + "shortName": "CreateLogMetric" }, "parameters": [ { @@ -6207,11 +6207,11 @@ "resultType": "google.cloud.logging_v2.types.LogMetric", "shortName": "_create_log_metric" }, - "description": "Sample for _CreateLogMetric", - "file": "logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py", + "description": "Sample for CreateLogMetric", + "file": "logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__CreateLogMetric_sync_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_CreateLogMetric_sync_internal", "segments": [ { "end": 56, @@ -6244,7 +6244,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__create_log_metric_sync_internal.py" + "title": "logging_v2_generated_metrics_service_v2_create_log_metric_sync_internal.py" }, { "canonical": true, @@ -6256,12 +6256,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._delete_log_metric", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._DeleteLogMetric", + "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_DeleteLogMetric" + "shortName": "DeleteLogMetric" }, "parameters": [ { @@ -6287,11 +6287,11 @@ ], "shortName": "_delete_log_metric" }, - "description": "Sample for _DeleteLogMetric", - "file": "logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py", + "description": "Sample for DeleteLogMetric", + "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__DeleteLogMetric_async_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_async_internal", "segments": [ { "end": 49, @@ -6322,7 +6322,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__delete_log_metric_async_internal.py" + "title": "logging_v2_generated_metrics_service_v2_delete_log_metric_async_internal.py" }, { "canonical": true, @@ -6333,12 +6333,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._delete_log_metric", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._DeleteLogMetric", + "fullName": "google.logging.v2.MetricsServiceV2.DeleteLogMetric", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_DeleteLogMetric" + "shortName": "DeleteLogMetric" }, "parameters": [ { @@ -6364,11 +6364,11 @@ ], "shortName": "_delete_log_metric" }, - "description": "Sample for _DeleteLogMetric", - "file": "logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py", + "description": "Sample for DeleteLogMetric", + "file": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__DeleteLogMetric_sync_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_DeleteLogMetric_sync_internal", "segments": [ { "end": 49, @@ -6399,7 +6399,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__delete_log_metric_sync_internal.py" + "title": "logging_v2_generated_metrics_service_v2_delete_log_metric_sync_internal.py" }, { "canonical": true, @@ -6411,12 +6411,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._get_log_metric", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._GetLogMetric", + "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_GetLogMetric" + "shortName": "GetLogMetric" }, "parameters": [ { @@ -6443,11 +6443,11 @@ "resultType": "google.cloud.logging_v2.types.LogMetric", "shortName": "_get_log_metric" }, - "description": "Sample for _GetLogMetric", - "file": "logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py", + "description": "Sample for GetLogMetric", + "file": "logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__GetLogMetric_async_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_async_internal", "segments": [ { "end": 51, @@ -6480,7 +6480,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__get_log_metric_async_internal.py" + "title": "logging_v2_generated_metrics_service_v2_get_log_metric_async_internal.py" }, { "canonical": true, @@ -6491,12 +6491,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._get_log_metric", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._GetLogMetric", + "fullName": "google.logging.v2.MetricsServiceV2.GetLogMetric", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_GetLogMetric" + "shortName": "GetLogMetric" }, "parameters": [ { @@ -6523,11 +6523,11 @@ "resultType": "google.cloud.logging_v2.types.LogMetric", "shortName": "_get_log_metric" }, - "description": "Sample for _GetLogMetric", - "file": "logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py", + "description": "Sample for GetLogMetric", + "file": "logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__GetLogMetric_sync_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_GetLogMetric_sync_internal", "segments": [ { "end": 51, @@ -6560,7 +6560,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__get_log_metric_sync_internal.py" + "title": "logging_v2_generated_metrics_service_v2_get_log_metric_sync_internal.py" }, { "canonical": true, @@ -6572,12 +6572,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._list_log_metrics", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._ListLogMetrics", + "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_ListLogMetrics" + "shortName": "ListLogMetrics" }, "parameters": [ { @@ -6601,14 +6601,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers._ListLogMetricsAsyncPager", + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsAsyncPager", "shortName": "_list_log_metrics" }, - "description": "Sample for _ListLogMetrics", - "file": "logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py", + "description": "Sample for ListLogMetrics", + "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__ListLogMetrics_async_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_async_internal", "segments": [ { "end": 52, @@ -6641,7 +6641,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__list_log_metrics_async_internal.py" + "title": "logging_v2_generated_metrics_service_v2_list_log_metrics_async_internal.py" }, { "canonical": true, @@ -6652,12 +6652,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._list_log_metrics", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._ListLogMetrics", + "fullName": "google.logging.v2.MetricsServiceV2.ListLogMetrics", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_ListLogMetrics" + "shortName": "ListLogMetrics" }, "parameters": [ { @@ -6681,14 +6681,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers._ListLogMetricsPager", + "resultType": "google.cloud.logging_v2.services.metrics_service_v2.pagers.ListLogMetricsPager", "shortName": "_list_log_metrics" }, - "description": "Sample for _ListLogMetrics", - "file": "logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py", + "description": "Sample for ListLogMetrics", + "file": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__ListLogMetrics_sync_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_ListLogMetrics_sync_internal", "segments": [ { "end": 52, @@ -6721,7 +6721,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__list_log_metrics_sync_internal.py" + "title": "logging_v2_generated_metrics_service_v2_list_log_metrics_sync_internal.py" }, { "canonical": true, @@ -6733,12 +6733,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2AsyncClient._update_log_metric", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._UpdateLogMetric", + "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_UpdateLogMetric" + "shortName": "UpdateLogMetric" }, "parameters": [ { @@ -6769,11 +6769,11 @@ "resultType": "google.cloud.logging_v2.types.LogMetric", "shortName": "_update_log_metric" }, - "description": "Sample for _UpdateLogMetric", - "file": "logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py", + "description": "Sample for UpdateLogMetric", + "file": "logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__UpdateLogMetric_async_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_async_internal", "segments": [ { "end": 56, @@ -6806,7 +6806,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__update_log_metric_async_internal.py" + "title": "logging_v2_generated_metrics_service_v2_update_log_metric_async_internal.py" }, { "canonical": true, @@ -6817,12 +6817,12 @@ }, "fullName": "google.cloud.logging_v2.BaseMetricsServiceV2Client._update_log_metric", "method": { - "fullName": "google.logging.v2.MetricsServiceV2._UpdateLogMetric", + "fullName": "google.logging.v2.MetricsServiceV2.UpdateLogMetric", "service": { "fullName": "google.logging.v2.MetricsServiceV2", "shortName": "MetricsServiceV2" }, - "shortName": "_UpdateLogMetric" + "shortName": "UpdateLogMetric" }, "parameters": [ { @@ -6853,11 +6853,11 @@ "resultType": "google.cloud.logging_v2.types.LogMetric", "shortName": "_update_log_metric" }, - "description": "Sample for _UpdateLogMetric", - "file": "logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py", + "description": "Sample for UpdateLogMetric", + "file": "logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "logging_v2_generated_MetricsServiceV2__UpdateLogMetric_sync_internal", + "regionTag": "logging_v2_generated_MetricsServiceV2_UpdateLogMetric_sync_internal", "segments": [ { "end": 56, @@ -6890,7 +6890,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "logging_v2_generated_metrics_service_v2__update_log_metric_sync_internal.py" + "title": "logging_v2_generated_metrics_service_v2_update_log_metric_sync_internal.py" } ] } diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py index a4d2fe8003ca..30efcd1cd080 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py @@ -39,48 +39,48 @@ def partition( class loggingCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - '_copy_log_entries': ('name', 'destination', 'filter', ), - '_create_exclusion': ('parent', 'exclusion', ), - '_create_link': ('parent', 'link', 'link_id', ), - '_create_log_metric': ('parent', 'metric', ), - '_create_sink': ('parent', 'sink', 'unique_writer_identity', ), - '_create_view': ('parent', 'view_id', 'view', ), - '_delete_exclusion': ('name', ), - '_delete_link': ('name', ), - '_delete_log_metric': ('metric_name', ), - '_delete_sink': ('sink_name', ), - '_delete_view': ('name', ), - '_get_cmek_settings': ('name', ), - '_get_exclusion': ('name', ), - '_get_link': ('name', ), - '_get_log_metric': ('metric_name', ), - '_get_settings': ('name', ), - '_get_sink': ('sink_name', ), - '_get_view': ('name', ), - '_list_exclusions': ('parent', 'page_token', 'page_size', ), - '_list_links': ('parent', 'page_token', 'page_size', ), - '_list_log_metrics': ('parent', 'page_token', 'page_size', ), - '_list_sinks': ('parent', 'page_token', 'page_size', ), - '_list_views': ('parent', 'page_token', 'page_size', ), - '_update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), - '_update_exclusion': ('name', 'exclusion', 'update_mask', ), - '_update_log_metric': ('metric_name', 'metric', ), - '_update_settings': ('name', 'settings', 'update_mask', ), - '_update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), - '_update_view': ('name', 'view', 'update_mask', ), + 'copy_log_entries': ('name', 'destination', 'filter', ), 'create_bucket': ('parent', 'bucket_id', 'bucket', ), 'create_bucket_async': ('parent', 'bucket_id', 'bucket', ), + 'create_exclusion': ('parent', 'exclusion', ), + 'create_link': ('parent', 'link', 'link_id', ), + 'create_log_metric': ('parent', 'metric', ), + 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), + 'create_view': ('parent', 'view_id', 'view', ), 'delete_bucket': ('name', ), + 'delete_exclusion': ('name', ), + 'delete_link': ('name', ), 'delete_log': ('log_name', ), + 'delete_log_metric': ('metric_name', ), + 'delete_sink': ('sink_name', ), + 'delete_view': ('name', ), 'get_bucket': ('name', ), + 'get_cmek_settings': ('name', ), + 'get_exclusion': ('name', ), + 'get_link': ('name', ), + 'get_log_metric': ('metric_name', ), + 'get_settings': ('name', ), + 'get_sink': ('sink_name', ), + 'get_view': ('name', ), 'list_buckets': ('parent', 'page_token', 'page_size', ), + 'list_exclusions': ('parent', 'page_token', 'page_size', ), + 'list_links': ('parent', 'page_token', 'page_size', ), 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), + 'list_log_metrics': ('parent', 'page_token', 'page_size', ), 'list_logs': ('parent', 'resource_names', 'page_size', 'page_token', ), 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), + 'list_sinks': ('parent', 'page_token', 'page_size', ), + 'list_views': ('parent', 'page_token', 'page_size', ), 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), 'undelete_bucket': ('name', ), 'update_bucket': ('name', 'bucket', 'update_mask', ), 'update_bucket_async': ('name', 'bucket', 'update_mask', ), + 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), + 'update_exclusion': ('name', 'exclusion', 'update_mask', ), + 'update_log_metric': ('metric_name', 'metric', ), + 'update_settings': ('name', 'settings', 'update_mask', ), + 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), + 'update_view': ('name', 'view', 'update_mask', ), 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), } diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py index 09812660a7c8..e206a2486bb0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -2940,7 +2940,7 @@ def test__list_views(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse( @@ -2955,7 +2955,7 @@ def test__list_views(request_type, transport: str = 'grpc'): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListViewsPager) + assert isinstance(response, pagers.ListViewsPager) assert response.next_page_token == 'next_page_token_value' @@ -2977,7 +2977,7 @@ def test__list_views_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._list_views(request=request) @@ -3002,12 +3002,12 @@ def test__list_views_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._list_views in client._transport._wrapped_methods + assert client._transport.list_views in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._list_views] = mock_rpc + client._transport._wrapped_methods[client._transport.list_views] = mock_rpc request = {} client._list_views(request) @@ -3035,12 +3035,12 @@ async def test__list_views_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._list_views in client._client._transport._wrapped_methods + assert client._client._transport.list_views in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._list_views] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.list_views] = mock_rpc request = {} await client._list_views(request) @@ -3067,7 +3067,7 @@ async def test__list_views_async(transport: str = 'grpc_asyncio', request_type=l # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( @@ -3082,7 +3082,7 @@ async def test__list_views_async(transport: str = 'grpc_asyncio', request_type=l assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListViewsAsyncPager) + assert isinstance(response, pagers.ListViewsAsyncPager) assert response.next_page_token == 'next_page_token_value' @@ -3103,7 +3103,7 @@ def test__list_views_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: call.return_value = logging_config.ListViewsResponse() client._list_views(request) @@ -3135,7 +3135,7 @@ async def test__list_views_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse()) await client._list_views(request) @@ -3160,7 +3160,7 @@ def test__list_views_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() @@ -3200,7 +3200,7 @@ async def test__list_views_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListViewsResponse() @@ -3243,7 +3243,7 @@ def test__list_views_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -3300,7 +3300,7 @@ def test__list_views_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -3342,7 +3342,7 @@ async def test__list_views_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3391,7 +3391,7 @@ async def test__list_views_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3447,7 +3447,7 @@ def test__get_view(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_view), + type(client.transport.get_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( @@ -3487,7 +3487,7 @@ def test__get_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_view), + type(client.transport.get_view), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._get_view(request=request) @@ -3511,12 +3511,12 @@ def test__get_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._get_view in client._transport._wrapped_methods + assert client._transport.get_view in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._get_view] = mock_rpc + client._transport._wrapped_methods[client._transport.get_view] = mock_rpc request = {} client._get_view(request) @@ -3544,12 +3544,12 @@ async def test__get_view_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._get_view in client._client._transport._wrapped_methods + assert client._client._transport.get_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._get_view] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.get_view] = mock_rpc request = {} await client._get_view(request) @@ -3576,7 +3576,7 @@ async def test__get_view_async(transport: str = 'grpc_asyncio', request_type=log # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_view), + type(client.transport.get_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( @@ -3616,7 +3616,7 @@ def test__get_view_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_view), + type(client.transport.get_view), '__call__') as call: call.return_value = logging_config.LogView() client._get_view(request) @@ -3648,7 +3648,7 @@ async def test__get_view_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_view), + type(client.transport.get_view), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) await client._get_view(request) @@ -3682,7 +3682,7 @@ def test__create_view(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_view), + type(client.transport.create_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( @@ -3723,7 +3723,7 @@ def test__create_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_view), + type(client.transport.create_view), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._create_view(request=request) @@ -3748,12 +3748,12 @@ def test__create_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._create_view in client._transport._wrapped_methods + assert client._transport.create_view in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._create_view] = mock_rpc + client._transport._wrapped_methods[client._transport.create_view] = mock_rpc request = {} client._create_view(request) @@ -3781,12 +3781,12 @@ async def test__create_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._create_view in client._client._transport._wrapped_methods + assert client._client._transport.create_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._create_view] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.create_view] = mock_rpc request = {} await client._create_view(request) @@ -3813,7 +3813,7 @@ async def test__create_view_async(transport: str = 'grpc_asyncio', request_type= # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_view), + type(client.transport.create_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( @@ -3853,7 +3853,7 @@ def test__create_view_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_view), + type(client.transport.create_view), '__call__') as call: call.return_value = logging_config.LogView() client._create_view(request) @@ -3885,7 +3885,7 @@ async def test__create_view_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_view), + type(client.transport.create_view), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) await client._create_view(request) @@ -3919,7 +3919,7 @@ def test__update_view(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_view), + type(client.transport.update_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogView( @@ -3959,7 +3959,7 @@ def test__update_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_view), + type(client.transport.update_view), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._update_view(request=request) @@ -3983,12 +3983,12 @@ def test__update_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._update_view in client._transport._wrapped_methods + assert client._transport.update_view in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._update_view] = mock_rpc + client._transport._wrapped_methods[client._transport.update_view] = mock_rpc request = {} client._update_view(request) @@ -4016,12 +4016,12 @@ async def test__update_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._update_view in client._client._transport._wrapped_methods + assert client._client._transport.update_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._update_view] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.update_view] = mock_rpc request = {} await client._update_view(request) @@ -4048,7 +4048,7 @@ async def test__update_view_async(transport: str = 'grpc_asyncio', request_type= # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_view), + type(client.transport.update_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( @@ -4088,7 +4088,7 @@ def test__update_view_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_view), + type(client.transport.update_view), '__call__') as call: call.return_value = logging_config.LogView() client._update_view(request) @@ -4120,7 +4120,7 @@ async def test__update_view_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_view), + type(client.transport.update_view), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView()) await client._update_view(request) @@ -4154,7 +4154,7 @@ def test__delete_view(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_view), + type(client.transport.delete_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4187,7 +4187,7 @@ def test__delete_view_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_view), + type(client.transport.delete_view), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._delete_view(request=request) @@ -4211,12 +4211,12 @@ def test__delete_view_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._delete_view in client._transport._wrapped_methods + assert client._transport.delete_view in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._delete_view] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_view] = mock_rpc request = {} client._delete_view(request) @@ -4244,12 +4244,12 @@ async def test__delete_view_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._delete_view in client._client._transport._wrapped_methods + assert client._client._transport.delete_view in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._delete_view] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.delete_view] = mock_rpc request = {} await client._delete_view(request) @@ -4276,7 +4276,7 @@ async def test__delete_view_async(transport: str = 'grpc_asyncio', request_type= # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_view), + type(client.transport.delete_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -4309,7 +4309,7 @@ def test__delete_view_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_view), + type(client.transport.delete_view), '__call__') as call: call.return_value = None client._delete_view(request) @@ -4341,7 +4341,7 @@ async def test__delete_view_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_view), + type(client.transport.delete_view), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_view(request) @@ -4375,7 +4375,7 @@ def test__list_sinks(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse( @@ -4390,7 +4390,7 @@ def test__list_sinks(request_type, transport: str = 'grpc'): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListSinksPager) + assert isinstance(response, pagers.ListSinksPager) assert response.next_page_token == 'next_page_token_value' @@ -4412,7 +4412,7 @@ def test__list_sinks_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._list_sinks(request=request) @@ -4437,12 +4437,12 @@ def test__list_sinks_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._list_sinks in client._transport._wrapped_methods + assert client._transport.list_sinks in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._list_sinks] = mock_rpc + client._transport._wrapped_methods[client._transport.list_sinks] = mock_rpc request = {} client._list_sinks(request) @@ -4470,12 +4470,12 @@ async def test__list_sinks_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._list_sinks in client._client._transport._wrapped_methods + assert client._client._transport.list_sinks in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._list_sinks] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.list_sinks] = mock_rpc request = {} await client._list_sinks(request) @@ -4502,7 +4502,7 @@ async def test__list_sinks_async(transport: str = 'grpc_asyncio', request_type=l # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( @@ -4517,7 +4517,7 @@ async def test__list_sinks_async(transport: str = 'grpc_asyncio', request_type=l assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListSinksAsyncPager) + assert isinstance(response, pagers.ListSinksAsyncPager) assert response.next_page_token == 'next_page_token_value' @@ -4538,7 +4538,7 @@ def test__list_sinks_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: call.return_value = logging_config.ListSinksResponse() client._list_sinks(request) @@ -4570,7 +4570,7 @@ async def test__list_sinks_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse()) await client._list_sinks(request) @@ -4595,7 +4595,7 @@ def test__list_sinks_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() @@ -4635,7 +4635,7 @@ async def test__list_sinks_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListSinksResponse() @@ -4678,7 +4678,7 @@ def test__list_sinks_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -4735,7 +4735,7 @@ def test__list_sinks_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -4777,7 +4777,7 @@ async def test__list_sinks_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -4826,7 +4826,7 @@ async def test__list_sinks_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -4882,7 +4882,7 @@ def test__get_sink(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( @@ -4932,7 +4932,7 @@ def test__get_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._get_sink(request=request) @@ -4956,12 +4956,12 @@ def test__get_sink_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._get_sink in client._transport._wrapped_methods + assert client._transport.get_sink in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._get_sink] = mock_rpc + client._transport._wrapped_methods[client._transport.get_sink] = mock_rpc request = {} client._get_sink(request) @@ -4989,12 +4989,12 @@ async def test__get_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._get_sink in client._client._transport._wrapped_methods + assert client._client._transport.get_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._get_sink] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.get_sink] = mock_rpc request = {} await client._get_sink(request) @@ -5021,7 +5021,7 @@ async def test__get_sink_async(transport: str = 'grpc_asyncio', request_type=log # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( @@ -5071,7 +5071,7 @@ def test__get_sink_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: call.return_value = logging_config.LogSink() client._get_sink(request) @@ -5103,7 +5103,7 @@ async def test__get_sink_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) await client._get_sink(request) @@ -5128,7 +5128,7 @@ def test__get_sink_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() @@ -5168,7 +5168,7 @@ async def test__get_sink_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() @@ -5219,7 +5219,7 @@ def test__create_sink(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( @@ -5269,7 +5269,7 @@ def test__create_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._create_sink(request=request) @@ -5293,12 +5293,12 @@ def test__create_sink_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._create_sink in client._transport._wrapped_methods + assert client._transport.create_sink in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._create_sink] = mock_rpc + client._transport._wrapped_methods[client._transport.create_sink] = mock_rpc request = {} client._create_sink(request) @@ -5326,12 +5326,12 @@ async def test__create_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._create_sink in client._client._transport._wrapped_methods + assert client._client._transport.create_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._create_sink] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.create_sink] = mock_rpc request = {} await client._create_sink(request) @@ -5358,7 +5358,7 @@ async def test__create_sink_async(transport: str = 'grpc_asyncio', request_type= # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( @@ -5408,7 +5408,7 @@ def test__create_sink_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: call.return_value = logging_config.LogSink() client._create_sink(request) @@ -5440,7 +5440,7 @@ async def test__create_sink_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) await client._create_sink(request) @@ -5465,7 +5465,7 @@ def test__create_sink_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() @@ -5510,7 +5510,7 @@ async def test__create_sink_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() @@ -5566,7 +5566,7 @@ def test__update_sink(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink( @@ -5616,7 +5616,7 @@ def test__update_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._update_sink(request=request) @@ -5640,12 +5640,12 @@ def test__update_sink_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._update_sink in client._transport._wrapped_methods + assert client._transport.update_sink in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._update_sink] = mock_rpc + client._transport._wrapped_methods[client._transport.update_sink] = mock_rpc request = {} client._update_sink(request) @@ -5673,12 +5673,12 @@ async def test__update_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._update_sink in client._client._transport._wrapped_methods + assert client._client._transport.update_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._update_sink] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.update_sink] = mock_rpc request = {} await client._update_sink(request) @@ -5705,7 +5705,7 @@ async def test__update_sink_async(transport: str = 'grpc_asyncio', request_type= # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( @@ -5755,7 +5755,7 @@ def test__update_sink_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: call.return_value = logging_config.LogSink() client._update_sink(request) @@ -5787,7 +5787,7 @@ async def test__update_sink_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink()) await client._update_sink(request) @@ -5812,7 +5812,7 @@ def test__update_sink_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() @@ -5862,7 +5862,7 @@ async def test__update_sink_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogSink() @@ -5923,7 +5923,7 @@ def test__delete_sink(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5956,7 +5956,7 @@ def test__delete_sink_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._delete_sink(request=request) @@ -5980,12 +5980,12 @@ def test__delete_sink_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._delete_sink in client._transport._wrapped_methods + assert client._transport.delete_sink in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._delete_sink] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_sink] = mock_rpc request = {} client._delete_sink(request) @@ -6013,12 +6013,12 @@ async def test__delete_sink_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._delete_sink in client._client._transport._wrapped_methods + assert client._client._transport.delete_sink in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._delete_sink] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.delete_sink] = mock_rpc request = {} await client._delete_sink(request) @@ -6045,7 +6045,7 @@ async def test__delete_sink_async(transport: str = 'grpc_asyncio', request_type= # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -6078,7 +6078,7 @@ def test__delete_sink_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: call.return_value = None client._delete_sink(request) @@ -6110,7 +6110,7 @@ async def test__delete_sink_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_sink(request) @@ -6135,7 +6135,7 @@ def test__delete_sink_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -6175,7 +6175,7 @@ async def test__delete_sink_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -6226,7 +6226,7 @@ def test__create_link(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/spam') @@ -6260,7 +6260,7 @@ def test__create_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._create_link(request=request) @@ -6285,12 +6285,12 @@ def test__create_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._create_link in client._transport._wrapped_methods + assert client._transport.create_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._create_link] = mock_rpc + client._transport._wrapped_methods[client._transport.create_link] = mock_rpc request = {} client._create_link(request) @@ -6323,12 +6323,12 @@ async def test__create_link_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._create_link in client._client._transport._wrapped_methods + assert client._client._transport.create_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._create_link] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.create_link] = mock_rpc request = {} await client._create_link(request) @@ -6360,7 +6360,7 @@ async def test__create_link_async(transport: str = 'grpc_asyncio', request_type= # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -6395,7 +6395,7 @@ def test__create_link_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client._create_link(request) @@ -6427,7 +6427,7 @@ async def test__create_link_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) await client._create_link(request) @@ -6452,7 +6452,7 @@ def test__create_link_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/op') @@ -6502,7 +6502,7 @@ async def test__create_link_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/op') @@ -6565,7 +6565,7 @@ def test__delete_link(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/spam') @@ -6598,7 +6598,7 @@ def test__delete_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._delete_link(request=request) @@ -6622,12 +6622,12 @@ def test__delete_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._delete_link in client._transport._wrapped_methods + assert client._transport.delete_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._delete_link] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_link] = mock_rpc request = {} client._delete_link(request) @@ -6660,12 +6660,12 @@ async def test__delete_link_async_use_cached_wrapped_rpc(transport: str = "grpc_ wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._delete_link in client._client._transport._wrapped_methods + assert client._client._transport.delete_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._delete_link] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.delete_link] = mock_rpc request = {} await client._delete_link(request) @@ -6697,7 +6697,7 @@ async def test__delete_link_async(transport: str = 'grpc_asyncio', request_type= # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -6732,7 +6732,7 @@ def test__delete_link_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client._delete_link(request) @@ -6764,7 +6764,7 @@ async def test__delete_link_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op')) await client._delete_link(request) @@ -6789,7 +6789,7 @@ def test__delete_link_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/op') @@ -6829,7 +6829,7 @@ async def test__delete_link_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/op') @@ -6882,7 +6882,7 @@ def test__list_links(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse( @@ -6897,7 +6897,7 @@ def test__list_links(request_type, transport: str = 'grpc'): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListLinksPager) + assert isinstance(response, pagers.ListLinksPager) assert response.next_page_token == 'next_page_token_value' @@ -6919,7 +6919,7 @@ def test__list_links_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._list_links(request=request) @@ -6944,12 +6944,12 @@ def test__list_links_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._list_links in client._transport._wrapped_methods + assert client._transport.list_links in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._list_links] = mock_rpc + client._transport._wrapped_methods[client._transport.list_links] = mock_rpc request = {} client._list_links(request) @@ -6977,12 +6977,12 @@ async def test__list_links_async_use_cached_wrapped_rpc(transport: str = "grpc_a wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._list_links in client._client._transport._wrapped_methods + assert client._client._transport.list_links in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._list_links] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.list_links] = mock_rpc request = {} await client._list_links(request) @@ -7009,7 +7009,7 @@ async def test__list_links_async(transport: str = 'grpc_asyncio', request_type=l # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( @@ -7024,7 +7024,7 @@ async def test__list_links_async(transport: str = 'grpc_asyncio', request_type=l assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListLinksAsyncPager) + assert isinstance(response, pagers.ListLinksAsyncPager) assert response.next_page_token == 'next_page_token_value' @@ -7045,7 +7045,7 @@ def test__list_links_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: call.return_value = logging_config.ListLinksResponse() client._list_links(request) @@ -7077,7 +7077,7 @@ async def test__list_links_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse()) await client._list_links(request) @@ -7102,7 +7102,7 @@ def test__list_links_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse() @@ -7142,7 +7142,7 @@ async def test__list_links_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListLinksResponse() @@ -7185,7 +7185,7 @@ def test__list_links_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -7242,7 +7242,7 @@ def test__list_links_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -7284,7 +7284,7 @@ async def test__list_links_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -7333,7 +7333,7 @@ async def test__list_links_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -7389,7 +7389,7 @@ def test__get_link(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link( @@ -7429,7 +7429,7 @@ def test__get_link_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._get_link(request=request) @@ -7453,12 +7453,12 @@ def test__get_link_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._get_link in client._transport._wrapped_methods + assert client._transport.get_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._get_link] = mock_rpc + client._transport._wrapped_methods[client._transport.get_link] = mock_rpc request = {} client._get_link(request) @@ -7486,12 +7486,12 @@ async def test__get_link_async_use_cached_wrapped_rpc(transport: str = "grpc_asy wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._get_link in client._client._transport._wrapped_methods + assert client._client._transport.get_link in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._get_link] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.get_link] = mock_rpc request = {} await client._get_link(request) @@ -7518,7 +7518,7 @@ async def test__get_link_async(transport: str = 'grpc_asyncio', request_type=log # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( @@ -7558,7 +7558,7 @@ def test__get_link_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: call.return_value = logging_config.Link() client._get_link(request) @@ -7590,7 +7590,7 @@ async def test__get_link_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link()) await client._get_link(request) @@ -7615,7 +7615,7 @@ def test__get_link_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link() @@ -7655,7 +7655,7 @@ async def test__get_link_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Link() @@ -7706,7 +7706,7 @@ def test__list_exclusions(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse( @@ -7721,7 +7721,7 @@ def test__list_exclusions(request_type, transport: str = 'grpc'): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListExclusionsPager) + assert isinstance(response, pagers.ListExclusionsPager) assert response.next_page_token == 'next_page_token_value' @@ -7743,7 +7743,7 @@ def test__list_exclusions_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._list_exclusions(request=request) @@ -7768,12 +7768,12 @@ def test__list_exclusions_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._list_exclusions in client._transport._wrapped_methods + assert client._transport.list_exclusions in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._list_exclusions] = mock_rpc + client._transport._wrapped_methods[client._transport.list_exclusions] = mock_rpc request = {} client._list_exclusions(request) @@ -7801,12 +7801,12 @@ async def test__list_exclusions_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._list_exclusions in client._client._transport._wrapped_methods + assert client._client._transport.list_exclusions in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._list_exclusions] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.list_exclusions] = mock_rpc request = {} await client._list_exclusions(request) @@ -7833,7 +7833,7 @@ async def test__list_exclusions_async(transport: str = 'grpc_asyncio', request_t # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( @@ -7848,7 +7848,7 @@ async def test__list_exclusions_async(transport: str = 'grpc_asyncio', request_t assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListExclusionsAsyncPager) + assert isinstance(response, pagers.ListExclusionsAsyncPager) assert response.next_page_token == 'next_page_token_value' @@ -7869,7 +7869,7 @@ def test__list_exclusions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: call.return_value = logging_config.ListExclusionsResponse() client._list_exclusions(request) @@ -7901,7 +7901,7 @@ async def test__list_exclusions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse()) await client._list_exclusions(request) @@ -7926,7 +7926,7 @@ def test__list_exclusions_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() @@ -7966,7 +7966,7 @@ async def test__list_exclusions_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.ListExclusionsResponse() @@ -8009,7 +8009,7 @@ def test__list_exclusions_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -8066,7 +8066,7 @@ def test__list_exclusions_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -8108,7 +8108,7 @@ async def test__list_exclusions_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -8157,7 +8157,7 @@ async def test__list_exclusions_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -8213,7 +8213,7 @@ def test__get_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( @@ -8255,7 +8255,7 @@ def test__get_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._get_exclusion(request=request) @@ -8279,12 +8279,12 @@ def test__get_exclusion_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._get_exclusion in client._transport._wrapped_methods + assert client._transport.get_exclusion in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._get_exclusion] = mock_rpc + client._transport._wrapped_methods[client._transport.get_exclusion] = mock_rpc request = {} client._get_exclusion(request) @@ -8312,12 +8312,12 @@ async def test__get_exclusion_async_use_cached_wrapped_rpc(transport: str = "grp wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._get_exclusion in client._client._transport._wrapped_methods + assert client._client._transport.get_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._get_exclusion] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.get_exclusion] = mock_rpc request = {} await client._get_exclusion(request) @@ -8344,7 +8344,7 @@ async def test__get_exclusion_async(transport: str = 'grpc_asyncio', request_typ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( @@ -8386,7 +8386,7 @@ def test__get_exclusion_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client._get_exclusion(request) @@ -8418,7 +8418,7 @@ async def test__get_exclusion_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) await client._get_exclusion(request) @@ -8443,7 +8443,7 @@ def test__get_exclusion_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() @@ -8483,7 +8483,7 @@ async def test__get_exclusion_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() @@ -8534,7 +8534,7 @@ def test__create_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( @@ -8576,7 +8576,7 @@ def test__create_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._create_exclusion(request=request) @@ -8600,12 +8600,12 @@ def test__create_exclusion_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._create_exclusion in client._transport._wrapped_methods + assert client._transport.create_exclusion in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._create_exclusion] = mock_rpc + client._transport._wrapped_methods[client._transport.create_exclusion] = mock_rpc request = {} client._create_exclusion(request) @@ -8633,12 +8633,12 @@ async def test__create_exclusion_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._create_exclusion in client._client._transport._wrapped_methods + assert client._client._transport.create_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._create_exclusion] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.create_exclusion] = mock_rpc request = {} await client._create_exclusion(request) @@ -8665,7 +8665,7 @@ async def test__create_exclusion_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( @@ -8707,7 +8707,7 @@ def test__create_exclusion_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client._create_exclusion(request) @@ -8739,7 +8739,7 @@ async def test__create_exclusion_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) await client._create_exclusion(request) @@ -8764,7 +8764,7 @@ def test__create_exclusion_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() @@ -8809,7 +8809,7 @@ async def test__create_exclusion_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() @@ -8865,7 +8865,7 @@ def test__update_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion( @@ -8907,7 +8907,7 @@ def test__update_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._update_exclusion(request=request) @@ -8931,12 +8931,12 @@ def test__update_exclusion_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._update_exclusion in client._transport._wrapped_methods + assert client._transport.update_exclusion in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._update_exclusion] = mock_rpc + client._transport._wrapped_methods[client._transport.update_exclusion] = mock_rpc request = {} client._update_exclusion(request) @@ -8964,12 +8964,12 @@ async def test__update_exclusion_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._update_exclusion in client._client._transport._wrapped_methods + assert client._client._transport.update_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._update_exclusion] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.update_exclusion] = mock_rpc request = {} await client._update_exclusion(request) @@ -8996,7 +8996,7 @@ async def test__update_exclusion_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( @@ -9038,7 +9038,7 @@ def test__update_exclusion_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client._update_exclusion(request) @@ -9070,7 +9070,7 @@ async def test__update_exclusion_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion()) await client._update_exclusion(request) @@ -9095,7 +9095,7 @@ def test__update_exclusion_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() @@ -9145,7 +9145,7 @@ async def test__update_exclusion_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.LogExclusion() @@ -9206,7 +9206,7 @@ def test__delete_exclusion(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -9239,7 +9239,7 @@ def test__delete_exclusion_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._delete_exclusion(request=request) @@ -9263,12 +9263,12 @@ def test__delete_exclusion_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._delete_exclusion in client._transport._wrapped_methods + assert client._transport.delete_exclusion in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._delete_exclusion] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_exclusion] = mock_rpc request = {} client._delete_exclusion(request) @@ -9296,12 +9296,12 @@ async def test__delete_exclusion_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._delete_exclusion in client._client._transport._wrapped_methods + assert client._client._transport.delete_exclusion in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._delete_exclusion] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.delete_exclusion] = mock_rpc request = {} await client._delete_exclusion(request) @@ -9328,7 +9328,7 @@ async def test__delete_exclusion_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -9361,7 +9361,7 @@ def test__delete_exclusion_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: call.return_value = None client._delete_exclusion(request) @@ -9393,7 +9393,7 @@ async def test__delete_exclusion_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_exclusion(request) @@ -9418,7 +9418,7 @@ def test__delete_exclusion_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -9458,7 +9458,7 @@ async def test__delete_exclusion_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -9509,7 +9509,7 @@ def test__get_cmek_settings(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_cmek_settings), + type(client.transport.get_cmek_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( @@ -9551,7 +9551,7 @@ def test__get_cmek_settings_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_cmek_settings), + type(client.transport.get_cmek_settings), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._get_cmek_settings(request=request) @@ -9575,12 +9575,12 @@ def test__get_cmek_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._get_cmek_settings in client._transport._wrapped_methods + assert client._transport.get_cmek_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._get_cmek_settings] = mock_rpc + client._transport._wrapped_methods[client._transport.get_cmek_settings] = mock_rpc request = {} client._get_cmek_settings(request) @@ -9608,12 +9608,12 @@ async def test__get_cmek_settings_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._get_cmek_settings in client._client._transport._wrapped_methods + assert client._client._transport.get_cmek_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._get_cmek_settings] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.get_cmek_settings] = mock_rpc request = {} await client._get_cmek_settings(request) @@ -9640,7 +9640,7 @@ async def test__get_cmek_settings_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_cmek_settings), + type(client.transport.get_cmek_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( @@ -9682,7 +9682,7 @@ def test__get_cmek_settings_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_cmek_settings), + type(client.transport.get_cmek_settings), '__call__') as call: call.return_value = logging_config.CmekSettings() client._get_cmek_settings(request) @@ -9714,7 +9714,7 @@ async def test__get_cmek_settings_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_cmek_settings), + type(client.transport.get_cmek_settings), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) await client._get_cmek_settings(request) @@ -9748,7 +9748,7 @@ def test__update_cmek_settings(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_cmek_settings), + type(client.transport.update_cmek_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.CmekSettings( @@ -9790,7 +9790,7 @@ def test__update_cmek_settings_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_cmek_settings), + type(client.transport.update_cmek_settings), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._update_cmek_settings(request=request) @@ -9814,12 +9814,12 @@ def test__update_cmek_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._update_cmek_settings in client._transport._wrapped_methods + assert client._transport.update_cmek_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._update_cmek_settings] = mock_rpc + client._transport._wrapped_methods[client._transport.update_cmek_settings] = mock_rpc request = {} client._update_cmek_settings(request) @@ -9847,12 +9847,12 @@ async def test__update_cmek_settings_async_use_cached_wrapped_rpc(transport: str wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._update_cmek_settings in client._client._transport._wrapped_methods + assert client._client._transport.update_cmek_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._update_cmek_settings] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.update_cmek_settings] = mock_rpc request = {} await client._update_cmek_settings(request) @@ -9879,7 +9879,7 @@ async def test__update_cmek_settings_async(transport: str = 'grpc_asyncio', requ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_cmek_settings), + type(client.transport.update_cmek_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( @@ -9921,7 +9921,7 @@ def test__update_cmek_settings_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_cmek_settings), + type(client.transport.update_cmek_settings), '__call__') as call: call.return_value = logging_config.CmekSettings() client._update_cmek_settings(request) @@ -9953,7 +9953,7 @@ async def test__update_cmek_settings_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_cmek_settings), + type(client.transport.update_cmek_settings), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings()) await client._update_cmek_settings(request) @@ -9987,7 +9987,7 @@ def test__get_settings(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( @@ -10031,7 +10031,7 @@ def test__get_settings_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._get_settings(request=request) @@ -10055,12 +10055,12 @@ def test__get_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._get_settings in client._transport._wrapped_methods + assert client._transport.get_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._get_settings] = mock_rpc + client._transport._wrapped_methods[client._transport.get_settings] = mock_rpc request = {} client._get_settings(request) @@ -10088,12 +10088,12 @@ async def test__get_settings_async_use_cached_wrapped_rpc(transport: str = "grpc wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._get_settings in client._client._transport._wrapped_methods + assert client._client._transport.get_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._get_settings] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.get_settings] = mock_rpc request = {} await client._get_settings(request) @@ -10120,7 +10120,7 @@ async def test__get_settings_async(transport: str = 'grpc_asyncio', request_type # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( @@ -10164,7 +10164,7 @@ def test__get_settings_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: call.return_value = logging_config.Settings() client._get_settings(request) @@ -10196,7 +10196,7 @@ async def test__get_settings_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) await client._get_settings(request) @@ -10221,7 +10221,7 @@ def test__get_settings_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() @@ -10261,7 +10261,7 @@ async def test__get_settings_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() @@ -10312,7 +10312,7 @@ def test__update_settings(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings( @@ -10356,7 +10356,7 @@ def test__update_settings_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._update_settings(request=request) @@ -10380,12 +10380,12 @@ def test__update_settings_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._update_settings in client._transport._wrapped_methods + assert client._transport.update_settings in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._update_settings] = mock_rpc + client._transport._wrapped_methods[client._transport.update_settings] = mock_rpc request = {} client._update_settings(request) @@ -10413,12 +10413,12 @@ async def test__update_settings_async_use_cached_wrapped_rpc(transport: str = "g wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._update_settings in client._client._transport._wrapped_methods + assert client._client._transport.update_settings in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._update_settings] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.update_settings] = mock_rpc request = {} await client._update_settings(request) @@ -10445,7 +10445,7 @@ async def test__update_settings_async(transport: str = 'grpc_asyncio', request_t # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( @@ -10489,7 +10489,7 @@ def test__update_settings_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: call.return_value = logging_config.Settings() client._update_settings(request) @@ -10521,7 +10521,7 @@ async def test__update_settings_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings()) await client._update_settings(request) @@ -10546,7 +10546,7 @@ def test__update_settings_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() @@ -10591,7 +10591,7 @@ async def test__update_settings_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_config.Settings() @@ -10647,7 +10647,7 @@ def test__copy_log_entries(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._copy_log_entries), + type(client.transport.copy_log_entries), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name='operations/spam') @@ -10682,7 +10682,7 @@ def test__copy_log_entries_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._copy_log_entries), + type(client.transport.copy_log_entries), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._copy_log_entries(request=request) @@ -10708,12 +10708,12 @@ def test__copy_log_entries_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._copy_log_entries in client._transport._wrapped_methods + assert client._transport.copy_log_entries in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._copy_log_entries] = mock_rpc + client._transport._wrapped_methods[client._transport.copy_log_entries] = mock_rpc request = {} client._copy_log_entries(request) @@ -10746,12 +10746,12 @@ async def test__copy_log_entries_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._copy_log_entries in client._client._transport._wrapped_methods + assert client._client._transport.copy_log_entries in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._copy_log_entries] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.copy_log_entries] = mock_rpc request = {} await client._copy_log_entries(request) @@ -10783,7 +10783,7 @@ async def test__copy_log_entries_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._copy_log_entries), + type(client.transport.copy_log_entries), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -11101,7 +11101,7 @@ def test__list_views_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: call.return_value = logging_config.ListViewsResponse() client._list_views(request=None) @@ -11124,7 +11124,7 @@ def test__get_view_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_view), + type(client.transport.get_view), '__call__') as call: call.return_value = logging_config.LogView() client._get_view(request=None) @@ -11147,7 +11147,7 @@ def test__create_view_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_view), + type(client.transport.create_view), '__call__') as call: call.return_value = logging_config.LogView() client._create_view(request=None) @@ -11170,7 +11170,7 @@ def test__update_view_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_view), + type(client.transport.update_view), '__call__') as call: call.return_value = logging_config.LogView() client._update_view(request=None) @@ -11193,7 +11193,7 @@ def test__delete_view_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_view), + type(client.transport.delete_view), '__call__') as call: call.return_value = None client._delete_view(request=None) @@ -11216,7 +11216,7 @@ def test__list_sinks_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: call.return_value = logging_config.ListSinksResponse() client._list_sinks(request=None) @@ -11239,7 +11239,7 @@ def test__get_sink_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: call.return_value = logging_config.LogSink() client._get_sink(request=None) @@ -11262,7 +11262,7 @@ def test__create_sink_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: call.return_value = logging_config.LogSink() client._create_sink(request=None) @@ -11285,7 +11285,7 @@ def test__update_sink_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: call.return_value = logging_config.LogSink() client._update_sink(request=None) @@ -11308,7 +11308,7 @@ def test__delete_sink_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: call.return_value = None client._delete_sink(request=None) @@ -11331,7 +11331,7 @@ def test__create_link_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client._create_link(request=None) @@ -11354,7 +11354,7 @@ def test__delete_link_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client._delete_link(request=None) @@ -11377,7 +11377,7 @@ def test__list_links_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: call.return_value = logging_config.ListLinksResponse() client._list_links(request=None) @@ -11400,7 +11400,7 @@ def test__get_link_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: call.return_value = logging_config.Link() client._get_link(request=None) @@ -11423,7 +11423,7 @@ def test__list_exclusions_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: call.return_value = logging_config.ListExclusionsResponse() client._list_exclusions(request=None) @@ -11446,7 +11446,7 @@ def test__get_exclusion_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client._get_exclusion(request=None) @@ -11469,7 +11469,7 @@ def test__create_exclusion_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client._create_exclusion(request=None) @@ -11492,7 +11492,7 @@ def test__update_exclusion_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: call.return_value = logging_config.LogExclusion() client._update_exclusion(request=None) @@ -11515,7 +11515,7 @@ def test__delete_exclusion_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: call.return_value = None client._delete_exclusion(request=None) @@ -11538,7 +11538,7 @@ def test__get_cmek_settings_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_cmek_settings), + type(client.transport.get_cmek_settings), '__call__') as call: call.return_value = logging_config.CmekSettings() client._get_cmek_settings(request=None) @@ -11561,7 +11561,7 @@ def test__update_cmek_settings_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_cmek_settings), + type(client.transport.update_cmek_settings), '__call__') as call: call.return_value = logging_config.CmekSettings() client._update_cmek_settings(request=None) @@ -11584,7 +11584,7 @@ def test__get_settings_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: call.return_value = logging_config.Settings() client._get_settings(request=None) @@ -11607,7 +11607,7 @@ def test__update_settings_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: call.return_value = logging_config.Settings() client._update_settings(request=None) @@ -11630,7 +11630,7 @@ def test__copy_log_entries_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._copy_log_entries), + type(client.transport.copy_log_entries), '__call__') as call: call.return_value = operations_pb2.Operation(name='operations/op') client._copy_log_entries(request=None) @@ -11899,7 +11899,7 @@ async def test__list_views_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_views), + type(client.transport.list_views), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListViewsResponse( @@ -11926,7 +11926,7 @@ async def test__get_view_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_view), + type(client.transport.get_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( @@ -11955,7 +11955,7 @@ async def test__create_view_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_view), + type(client.transport.create_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( @@ -11984,7 +11984,7 @@ async def test__update_view_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_view), + type(client.transport.update_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogView( @@ -12013,7 +12013,7 @@ async def test__delete_view_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_view), + type(client.transport.delete_view), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -12038,7 +12038,7 @@ async def test__list_sinks_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_sinks), + type(client.transport.list_sinks), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListSinksResponse( @@ -12065,7 +12065,7 @@ async def test__get_sink_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_sink), + type(client.transport.get_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( @@ -12099,7 +12099,7 @@ async def test__create_sink_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_sink), + type(client.transport.create_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( @@ -12133,7 +12133,7 @@ async def test__update_sink_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_sink), + type(client.transport.update_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogSink( @@ -12167,7 +12167,7 @@ async def test__delete_sink_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_sink), + type(client.transport.delete_sink), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -12192,7 +12192,7 @@ async def test__create_link_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_link), + type(client.transport.create_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -12219,7 +12219,7 @@ async def test__delete_link_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_link), + type(client.transport.delete_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -12246,7 +12246,7 @@ async def test__list_links_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_links), + type(client.transport.list_links), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListLinksResponse( @@ -12273,7 +12273,7 @@ async def test__get_link_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_link), + type(client.transport.get_link), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Link( @@ -12302,7 +12302,7 @@ async def test__list_exclusions_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_exclusions), + type(client.transport.list_exclusions), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.ListExclusionsResponse( @@ -12329,7 +12329,7 @@ async def test__get_exclusion_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_exclusion), + type(client.transport.get_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( @@ -12359,7 +12359,7 @@ async def test__create_exclusion_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_exclusion), + type(client.transport.create_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( @@ -12389,7 +12389,7 @@ async def test__update_exclusion_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_exclusion), + type(client.transport.update_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.LogExclusion( @@ -12419,7 +12419,7 @@ async def test__delete_exclusion_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_exclusion), + type(client.transport.delete_exclusion), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -12444,7 +12444,7 @@ async def test__get_cmek_settings_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_cmek_settings), + type(client.transport.get_cmek_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( @@ -12474,7 +12474,7 @@ async def test__update_cmek_settings_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_cmek_settings), + type(client.transport.update_cmek_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.CmekSettings( @@ -12504,7 +12504,7 @@ async def test__get_settings_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_settings), + type(client.transport.get_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( @@ -12535,7 +12535,7 @@ async def test__update_settings_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_settings), + type(client.transport.update_settings), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_config.Settings( @@ -12566,7 +12566,7 @@ async def test__copy_log_entries_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._copy_log_entries), + type(client.transport.copy_log_entries), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -12620,30 +12620,30 @@ def test_config_service_v2_base_transport(): 'update_bucket', 'delete_bucket', 'undelete_bucket', - '_list_views', - '_get_view', - '_create_view', - '_update_view', - '_delete_view', - '_list_sinks', - '_get_sink', - '_create_sink', - '_update_sink', - '_delete_sink', - '_create_link', - '_delete_link', - '_list_links', - '_get_link', - '_list_exclusions', - '_get_exclusion', - '_create_exclusion', - '_update_exclusion', - '_delete_exclusion', - '_get_cmek_settings', - '_update_cmek_settings', - '_get_settings', - '_update_settings', - '_copy_log_entries', + 'list_views', + 'get_view', + 'create_view', + 'update_view', + 'delete_view', + 'list_sinks', + 'get_sink', + 'create_sink', + 'update_sink', + 'delete_sink', + 'create_link', + 'delete_link', + 'list_links', + 'get_link', + 'list_exclusions', + 'get_exclusion', + 'create_exclusion', + 'update_exclusion', + 'delete_exclusion', + 'get_cmek_settings', + 'update_cmek_settings', + 'get_settings', + 'update_settings', + 'copy_log_entries', 'get_operation', 'cancel_operation', 'list_operations', diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 1dcfe45d9f97..35cb37f70ddc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -768,7 +768,7 @@ def test__list_log_metrics(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse( @@ -783,7 +783,7 @@ def test__list_log_metrics(request_type, transport: str = 'grpc'): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListLogMetricsPager) + assert isinstance(response, pagers.ListLogMetricsPager) assert response.next_page_token == 'next_page_token_value' @@ -805,7 +805,7 @@ def test__list_log_metrics_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._list_log_metrics(request=request) @@ -830,12 +830,12 @@ def test__list_log_metrics_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._list_log_metrics in client._transport._wrapped_methods + assert client._transport.list_log_metrics in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._list_log_metrics] = mock_rpc + client._transport._wrapped_methods[client._transport.list_log_metrics] = mock_rpc request = {} client._list_log_metrics(request) @@ -863,12 +863,12 @@ async def test__list_log_metrics_async_use_cached_wrapped_rpc(transport: str = " wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._list_log_metrics in client._client._transport._wrapped_methods + assert client._client._transport.list_log_metrics in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._list_log_metrics] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.list_log_metrics] = mock_rpc request = {} await client._list_log_metrics(request) @@ -895,7 +895,7 @@ async def test__list_log_metrics_async(transport: str = 'grpc_asyncio', request_ # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( @@ -910,7 +910,7 @@ async def test__list_log_metrics_async(transport: str = 'grpc_asyncio', request_ assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers._ListLogMetricsAsyncPager) + assert isinstance(response, pagers.ListLogMetricsAsyncPager) assert response.next_page_token == 'next_page_token_value' @@ -931,7 +931,7 @@ def test__list_log_metrics_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: call.return_value = logging_metrics.ListLogMetricsResponse() client._list_log_metrics(request) @@ -963,7 +963,7 @@ async def test__list_log_metrics_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse()) await client._list_log_metrics(request) @@ -988,7 +988,7 @@ def test__list_log_metrics_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() @@ -1028,7 +1028,7 @@ async def test__list_log_metrics_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.ListLogMetricsResponse() @@ -1071,7 +1071,7 @@ def test__list_log_metrics_pager(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -1128,7 +1128,7 @@ def test__list_log_metrics_pages(transport_name: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: # Set the response to a series of pages. call.side_effect = ( @@ -1170,7 +1170,7 @@ async def test__list_log_metrics_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1219,7 +1219,7 @@ async def test__list_log_metrics_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__', new_callable=mock.AsyncMock) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1275,7 +1275,7 @@ def test__get_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( @@ -1323,7 +1323,7 @@ def test__get_log_metric_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._get_log_metric(request=request) @@ -1347,12 +1347,12 @@ def test__get_log_metric_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._get_log_metric in client._transport._wrapped_methods + assert client._transport.get_log_metric in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._get_log_metric] = mock_rpc + client._transport._wrapped_methods[client._transport.get_log_metric] = mock_rpc request = {} client._get_log_metric(request) @@ -1380,12 +1380,12 @@ async def test__get_log_metric_async_use_cached_wrapped_rpc(transport: str = "gr wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._get_log_metric in client._client._transport._wrapped_methods + assert client._client._transport.get_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._get_log_metric] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.get_log_metric] = mock_rpc request = {} await client._get_log_metric(request) @@ -1412,7 +1412,7 @@ async def test__get_log_metric_async(transport: str = 'grpc_asyncio', request_ty # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( @@ -1460,7 +1460,7 @@ def test__get_log_metric_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client._get_log_metric(request) @@ -1492,7 +1492,7 @@ async def test__get_log_metric_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) await client._get_log_metric(request) @@ -1517,7 +1517,7 @@ def test__get_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() @@ -1557,7 +1557,7 @@ async def test__get_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() @@ -1608,7 +1608,7 @@ def test__create_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( @@ -1656,7 +1656,7 @@ def test__create_log_metric_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._create_log_metric(request=request) @@ -1680,12 +1680,12 @@ def test__create_log_metric_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._create_log_metric in client._transport._wrapped_methods + assert client._transport.create_log_metric in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._create_log_metric] = mock_rpc + client._transport._wrapped_methods[client._transport.create_log_metric] = mock_rpc request = {} client._create_log_metric(request) @@ -1713,12 +1713,12 @@ async def test__create_log_metric_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._create_log_metric in client._client._transport._wrapped_methods + assert client._client._transport.create_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._create_log_metric] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.create_log_metric] = mock_rpc request = {} await client._create_log_metric(request) @@ -1745,7 +1745,7 @@ async def test__create_log_metric_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( @@ -1793,7 +1793,7 @@ def test__create_log_metric_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client._create_log_metric(request) @@ -1825,7 +1825,7 @@ async def test__create_log_metric_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) await client._create_log_metric(request) @@ -1850,7 +1850,7 @@ def test__create_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() @@ -1895,7 +1895,7 @@ async def test__create_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() @@ -1951,7 +1951,7 @@ def test__update_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric( @@ -1999,7 +1999,7 @@ def test__update_log_metric_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._update_log_metric(request=request) @@ -2023,12 +2023,12 @@ def test__update_log_metric_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._update_log_metric in client._transport._wrapped_methods + assert client._transport.update_log_metric in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._update_log_metric] = mock_rpc + client._transport._wrapped_methods[client._transport.update_log_metric] = mock_rpc request = {} client._update_log_metric(request) @@ -2056,12 +2056,12 @@ async def test__update_log_metric_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._update_log_metric in client._client._transport._wrapped_methods + assert client._client._transport.update_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._update_log_metric] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.update_log_metric] = mock_rpc request = {} await client._update_log_metric(request) @@ -2088,7 +2088,7 @@ async def test__update_log_metric_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( @@ -2136,7 +2136,7 @@ def test__update_log_metric_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client._update_log_metric(request) @@ -2168,7 +2168,7 @@ async def test__update_log_metric_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric()) await client._update_log_metric(request) @@ -2193,7 +2193,7 @@ def test__update_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() @@ -2238,7 +2238,7 @@ async def test__update_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = logging_metrics.LogMetric() @@ -2294,7 +2294,7 @@ def test__delete_log_metric(request_type, transport: str = 'grpc'): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2327,7 +2327,7 @@ def test__delete_log_metric_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: call.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. client._delete_log_metric(request=request) @@ -2351,12 +2351,12 @@ def test__delete_log_metric_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport._delete_log_metric in client._transport._wrapped_methods + assert client._transport.delete_log_metric in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = "foo" # operation_request.operation in compute client(s) expect a string. - client._transport._wrapped_methods[client._transport._delete_log_metric] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_log_metric] = mock_rpc request = {} client._delete_log_metric(request) @@ -2384,12 +2384,12 @@ async def test__delete_log_metric_async_use_cached_wrapped_rpc(transport: str = wrapper_fn.reset_mock() # Ensure method has been cached - assert client._client._transport._delete_log_metric in client._client._transport._wrapped_methods + assert client._client._transport.delete_log_metric in client._client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() - client._client._transport._wrapped_methods[client._client._transport._delete_log_metric] = mock_rpc + client._client._transport._wrapped_methods[client._client._transport.delete_log_metric] = mock_rpc request = {} await client._delete_log_metric(request) @@ -2416,7 +2416,7 @@ async def test__delete_log_metric_async(transport: str = 'grpc_asyncio', request # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2449,7 +2449,7 @@ def test__delete_log_metric_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: call.return_value = None client._delete_log_metric(request) @@ -2481,7 +2481,7 @@ async def test__delete_log_metric_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client._delete_log_metric(request) @@ -2506,7 +2506,7 @@ def test__delete_log_metric_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2546,7 +2546,7 @@ async def test__delete_log_metric_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2692,7 +2692,7 @@ def test__list_log_metrics_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: call.return_value = logging_metrics.ListLogMetricsResponse() client._list_log_metrics(request=None) @@ -2715,7 +2715,7 @@ def test__get_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client._get_log_metric(request=None) @@ -2738,7 +2738,7 @@ def test__create_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client._create_log_metric(request=None) @@ -2761,7 +2761,7 @@ def test__update_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: call.return_value = logging_metrics.LogMetric() client._update_log_metric(request=None) @@ -2784,7 +2784,7 @@ def test__delete_log_metric_empty_call_grpc(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: call.return_value = None client._delete_log_metric(request=None) @@ -2823,7 +2823,7 @@ async def test__list_log_metrics_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._list_log_metrics), + type(client.transport.list_log_metrics), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.ListLogMetricsResponse( @@ -2850,7 +2850,7 @@ async def test__get_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._get_log_metric), + type(client.transport.get_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( @@ -2883,7 +2883,7 @@ async def test__create_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._create_log_metric), + type(client.transport.create_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( @@ -2916,7 +2916,7 @@ async def test__update_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._update_log_metric), + type(client.transport.update_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(logging_metrics.LogMetric( @@ -2949,7 +2949,7 @@ async def test__delete_log_metric_empty_call_grpc_asyncio(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport._delete_log_metric), + type(client.transport.delete_log_metric), '__call__') as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2993,11 +2993,11 @@ def test_metrics_service_v2_base_transport(): # Every method on the transport should just blindly # raise NotImplementedError. methods = ( - '_list_log_metrics', - '_get_log_metric', - '_create_log_metric', - '_update_log_metric', - '_delete_log_metric', + 'list_log_metrics', + 'get_log_metric', + 'create_log_metric', + 'update_log_metric', + 'delete_log_metric', 'get_operation', 'cancel_operation', 'list_operations', diff --git a/packages/gapic-generator/tests/unit/common_types.py b/packages/gapic-generator/tests/unit/common_types.py index db9eb1d10d34..8a6c2a44bc91 100644 --- a/packages/gapic-generator/tests/unit/common_types.py +++ b/packages/gapic-generator/tests/unit/common_types.py @@ -23,6 +23,8 @@ from gapic.schema import metadata from gapic.schema import wrappers +from test_utils.test_utils import make_method + # Injected dummy test types @@ -41,6 +43,11 @@ class DummyMethod: client_output_async: bool = False is_internal: bool = False + @property + def client_method_name(self): + method_wrapper = make_method(name=self.name, is_internal=self.is_internal) + return method_wrapper.client_method_name + DummyIdent = namedtuple("DummyIdent", ["name", "sphinx"]) DummyIdent.__new__.__defaults__ = (False,) * len(DummyIdent._fields) diff --git a/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_internal.py b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_internal.py new file mode 100644 index 000000000000..0c7246838947 --- /dev/null +++ b/packages/gapic-generator/tests/unit/samplegen/golden_snippets/sample_basic_internal.py @@ -0,0 +1,61 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Classify +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install molluscs-v1-molluscclient + + +# [START mollusc_classify_sync_internal] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from molluscs.v1 import molluscclient + + +def sample_classify(video, location): + # Create a client + client = molluscclient.BaseMolluscServiceClient() + + # Initialize request argument(s) + classify_target = molluscclient.ClassifyTarget() + + # video = "path/to/mollusc/video.mkv" + with open(video, "rb") as f: + classify_target.video = f.read() + + # location = "New Zealand" + classify_target.location_annotation = location + + request = molluscclient.molluscs.v1.ClassifyRequest( + classify_target=classify_target, + ) + + # Make the request + response = client._classify(request=request) + + # Handle the response + print(f"Mollusc is a \"{response.taxonomy}\"") + +# [END mollusc_classify_sync_internal] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_integration.py b/packages/gapic-generator/tests/unit/samplegen/test_integration.py index 860f3efaa470..c937f78bddd9 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_integration.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_integration.py @@ -547,6 +547,132 @@ def test_generate_sample_void_method(): } +def test_generate_sample_basic_internal(): + # Note: the sample integration tests are needfully large + # and difficult to eyeball parse. They are intended to be integration tests + # that catch errors in behavior that is emergent from combining smaller features + # or in features that are sufficiently small and trivial that it doesn't make sense + # to have standalone tests. + + classify_target_field = DummyField( + name="classify_target", + type=DummyMessageTypePB(name="ClassifyTarget"), + message=DummyMessage( + type="CLASSIFY TYPE", + fields={ + "video": DummyField( + type=DummyMessageTypePB(name="Video"), + message=DummyMessage(type="VIDEO TYPE"), + ), + "location_annotation": DummyField( + type=DummyMessageTypePB(name="Location"), + message=DummyMessage(type="LOCATION TYPE"), + ), + }, + ), + ident=DummyIdent(sphinx="molluscs_v1.ClassifyTarget"), + ) + + input_type = DummyMessage( + type="REQUEST TYPE", + fields={"classify_target": classify_target_field}, + ident=DummyIdent( + name="molluscs.v1.ClassifyRequest", sphinx="molluscs_v1.classify_request" + ), + ) + + output_type = DummyMessage( + type="RESPONSE TYPE", + fields={ + "classification": DummyField( + type=DummyMessageTypePB(name="Classification"), + ) + }, + ident=DummyIdent(sphinx="molluscs_v1.classification"), + ) + + api_naming = naming.NewNaming(name="MolluscClient", namespace=("molluscs", "v1")) + service = wrappers.Service( + service_pb=namedtuple("service_pb", ["name"])("MolluscService"), + methods={ + "Classify": DummyMethod( + name="Classify", + input=input_type, + output=message_factory("$resp.taxonomy"), + client_output=output_type, + flattened_fields={"classify_target": classify_target_field}, + is_internal=True, + ) + }, + visible_resources={}, + ) + + schema = DummyApiSchema( + services={"animalia.mollusca.v1.Mollusc": service}, + naming=api_naming, + ) + + sample = { + "service": "animalia.mollusca.v1.Mollusc", + "region_tag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync_internal", + "rpc": "Classify", + "id": "mollusc_classify_sync_internal", + "description": "Determine the full taxonomy of input mollusc", + "request": [ + { + "field": "classify_target.video", + "value": "path/to/mollusc/video.mkv", + "input_parameter": "video", + "value_is_file": True, + }, + { + "field": "classify_target.location_annotation", + "value": "New Zealand", + "input_parameter": "location", + }, + ], + "response": [{"print": ['Mollusc is a "%s"', "$resp.taxonomy"]}], + } + + sample_str, metadata = samplegen.generate_sample( + sample, schema, env.get_template("examples/sample.py.j2") + ) + + assert sample_str == golden_snippet("sample_basic_internal.py") + + assert json_format.MessageToDict(metadata) == { + "regionTag": "molluscs_generated_molluscs_v1_Mollusc_Classify_sync_internal", + "description": "Sample for Classify", + "language": "PYTHON", + "clientMethod": { + "shortName": "_classify", + "fullName": "molluscs.v1.molluscclient.BaseMolluscServiceClient._classify", + "parameters": [ + {"type": "molluscs_v1.classify_request", "name": "request"}, + {"type": "molluscs_v1.ClassifyTarget", "name": "classify_target"}, + {"type": "google.api_core.retry.Retry", "name": "retry"}, + {"type": "float", "name": "timeout"}, + {"type": "Sequence[Tuple[str, Union[str, bytes]]]", "name": "metadata"}, + ], + "resultType": "molluscs_v1.classification", + "client": { + "shortName": "BaseMolluscServiceClient", + "fullName": "molluscs.v1.molluscclient.BaseMolluscServiceClient", + }, + "method": { + "shortName": "Classify", + "fullName": ".MolluscService.Classify", + "service": { + "shortName": "MolluscService", + "fullName": ".MolluscService", + }, + }, + }, + "canonical": True, + "origin": "API_DEFINITION", + } + + def test_generate_sample_service_not_found(): schema = DummyApiSchema({}, DummyNaming("pkg_name")) sample = {"service": "Mollusc"} diff --git a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py index a884c262410a..c6a1f7675d1b 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_samplegen.py @@ -2300,18 +2300,18 @@ def test_generate_sample_spec_internal_method(): "description": "Snippet for bighead", }, { - "rpc": "Ramshorn", + "rpc": "NotRamshorn", "transport": "rest", "service": "animalia.mollusca.v1.Squid", - "region_tag": "example_v1_generated_Squid_Ramshorn_sync", - "description": "Snippet for ramshorn", + "region_tag": "example_v1_generated_Squid_NotRamshorn_sync_internal", + "description": "Snippet for not_ramshorn", }, { - "rpc": "_NotRamshorn", + "rpc": "Ramshorn", "transport": "rest", "service": "animalia.mollusca.v1.Squid", - "region_tag": "example_v1_generated_Squid__NotRamshorn_sync_internal", - "description": "Snippet for _not_ramshorn", + "region_tag": "example_v1_generated_Squid_Ramshorn_sync", + "description": "Snippet for ramshorn", }, ] diff --git a/packages/gapic-generator/tests/unit/samplegen/test_template.py b/packages/gapic-generator/tests/unit/samplegen/test_template.py index b63cffbe3616..9d984e0e1bf9 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_template.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_template.py @@ -972,7 +972,10 @@ def test_render_method_call_basic(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": False, + }, calling_form, calling_form_enum, transport) }} """, """ @@ -995,7 +998,10 @@ def test_render_method_call_basic_async(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": False, + }, calling_form, calling_form_enum, transport) }} """, """ @@ -1014,15 +1020,18 @@ def test_render_method_call_basic_async(): ) -def test_render_method_call_basic_async(): +def test_render_method_call_basic_flattenable(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": False, + }, calling_form, calling_form_enum, transport) }} """, """ - await client.categorize_mollusc(request=request) + client.categorize_mollusc(video=video, audio=audio, guess=guess) """, request=samplegen.FullRequest( request_list=[ @@ -1030,22 +1039,174 @@ def test_render_method_call_basic_async(): samplegen.TransformedRequest(base="audio", body=True, single=None), samplegen.TransformedRequest(base="guess", body=True, single=None), ], + flattenable=True, ), calling_form_enum=CallingForm, calling_form=CallingForm.Request, + transport="grpc", + ) + + +def test_render_method_call_bidi(): + check_template( + """ + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": False, + }, + calling_form, calling_form_enum, transport) }} + """, + """ + client.categorize_mollusc(requests=request_generator()) + """, + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", body=True, single=None) + ] + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingBidi, + transport="grpc", + ) + + +def test_render_method_call_bidi_async(): + check_template( + """ + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": False, + }, + calling_form, calling_form_enum, transport) }} + """, + """ + await client.categorize_mollusc(requests=request_generator()) + """, + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", body=True, single=None) + ] + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingBidi, transport="grpc-async", ) -def test_render_method_call_basic_flattenable(): +def test_render_method_call_client(): + check_template( + """ + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": False, + }, + calling_form, calling_form_enum, transport) }} + """, + """ + client.categorize_mollusc(requests=request_generator()) + """, + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", body=True, single=None) + ] + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingClient, + transport="grpc", + ) + + +def test_render_method_call_client_async(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": False, + }, + calling_form, calling_form_enum, transport) }} + """, + """ + await client.categorize_mollusc(requests=request_generator()) + """, + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", body=True, single=None) + ] + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.RequestStreamingClient, + transport="grpc-async", + ) + + +def test_render_method_call_basic_internal(): + check_template( + """ + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": True, + }, calling_form, calling_form_enum, transport) }} """, """ - client.categorize_mollusc(video=video, audio=audio, guess=guess) + client._categorize_mollusc(request=request) + """, + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", body=True, single=None), + samplegen.TransformedRequest(base="audio", body=True, single=None), + samplegen.TransformedRequest(base="guess", body=True, single=None), + ], + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.Request, + transport="grpc", + ) + + +def test_render_method_call_basic_async_internal(): + check_template( + """ + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": True, + }, + calling_form, calling_form_enum, transport) }} + """, + """ + await client._categorize_mollusc(request=request) + """, + request=samplegen.FullRequest( + request_list=[ + samplegen.TransformedRequest(base="video", body=True, single=None), + samplegen.TransformedRequest(base="audio", body=True, single=None), + samplegen.TransformedRequest(base="guess", body=True, single=None), + ], + ), + calling_form_enum=CallingForm, + calling_form=CallingForm.Request, + transport="grpc-async", + ) + + +def test_render_method_call_basic_flattenable_internal(): + check_template( + """ + {% import "feature_fragments.j2" as frags %} + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": True, + }, + calling_form, calling_form_enum, transport) }} + """, + """ + client._categorize_mollusc(video=video, audio=audio, guess=guess) """, request=samplegen.FullRequest( request_list=[ @@ -1061,15 +1222,18 @@ def test_render_method_call_basic_flattenable(): ) -def test_render_method_call_bidi(): +def test_render_method_call_bidi_internal(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": True, + }, calling_form, calling_form_enum, transport) }} """, """ - client.categorize_mollusc(requests=request_generator()) + client._categorize_mollusc(requests=request_generator()) """, request=samplegen.FullRequest( request_list=[ @@ -1082,15 +1246,18 @@ def test_render_method_call_bidi(): ) -def test_render_method_call_bidi_async(): +def test_render_method_call_bidi_async_internal(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": True, + }, calling_form, calling_form_enum, transport) }} """, """ - await client.categorize_mollusc(requests=request_generator()) + await client._categorize_mollusc(requests=request_generator()) """, request=samplegen.FullRequest( request_list=[ @@ -1103,15 +1270,18 @@ def test_render_method_call_bidi_async(): ) -def test_render_method_call_client(): +def test_render_method_call_client_internal(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": True, + }, calling_form, calling_form_enum, transport) }} """, """ - client.categorize_mollusc(requests=request_generator()) + client._categorize_mollusc(requests=request_generator()) """, request=samplegen.FullRequest( request_list=[ @@ -1124,15 +1294,18 @@ def test_render_method_call_client(): ) -def test_render_method_call_client_async(): +def test_render_method_call_client_async_internal(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_method_call({"rpc": "CategorizeMollusc", "request": request}, + {{ frags.render_method_call({"rpc": "CategorizeMollusc", + "request": request, + "is_internal": True, + }, calling_form, calling_form_enum, transport) }} """, """ - await client.categorize_mollusc(requests=request_generator()) + await client._categorize_mollusc(requests=request_generator()) """, request=samplegen.FullRequest( request_list=[ @@ -1186,7 +1359,8 @@ def test_main_block(): check_template( """ {% import "feature_fragments.j2" as frags %} - {{ frags.render_main_block("ListMolluscs", request) }} + {{ frags.render_main_block({"rpc": "ListMolluscs", "is_internal": False}, + request) }} """, """ def main(): diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 758e4020cc26..8b399c54d5a1 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -2186,6 +2186,155 @@ def test_gapic_metadata(): assert expected == actual +def test_gapic_metadata_internal_method(): + # Only have squid.Ramshorn as an internal method. + service_yaml = get_service_yaml_for_selective_gapic_tests( + apis=[ + "animalia.mollusca.v1.Squid", + "animalia.mollusca.v1.Octopus", + ], + version="animalia.mollusca.v1", + methods=[ + "animalia.mollusca.v1.Squid.Humboldt", + "animalia.mollusca.v1.Squid.Giant", + "animalia.mollusca.v1.Octopus.GiantPacific", + "animalia.mollusca.v1.Octopus.BlueSpot", + ], + generate_omitted_as_internal=True, + ) + + api_schema = api.API.build( + file_descriptors=[ + descriptor_pb2.FileDescriptorProto( + name="cephalopod.proto", + package="animalia.mollusca.v1", + message_type=[ + descriptor_pb2.DescriptorProto( + name="MolluscRequest", + ), + descriptor_pb2.DescriptorProto( + name="Mollusc", + ), + ], + service=[ + descriptor_pb2.ServiceDescriptorProto( + name="Squid", + method=[ + descriptor_pb2.MethodDescriptorProto( + name="Ramshorn", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + descriptor_pb2.MethodDescriptorProto( + name="Humboldt", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + descriptor_pb2.MethodDescriptorProto( + name="Giant", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + ], + ), + descriptor_pb2.ServiceDescriptorProto( + name="Octopus", + method=[ + descriptor_pb2.MethodDescriptorProto( + name="GiantPacific", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + descriptor_pb2.MethodDescriptorProto( + name="BlueSpot", + input_type="animalia.mollusca.v1.MolluscRequest", + output_type="animalia.mollusca.v1.Mollusc", + ), + ], + ), + ], + ) + ], + package="animalia.mollusca.v1", + opts=Options(service_yaml_config=service_yaml), + ) + + opts = Options.build("transport=grpc") + expected = gapic_metadata_pb2.GapicMetadata( + schema="1.0", + comment="This file maps proto services/RPCs to the corresponding library clients/methods", + language="python", + proto_package="animalia.mollusca.v1", + library_package="animalia.mollusca_v1", + services={ + "Octopus": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + clients={ + "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="OctopusClient", + rpcs={ + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["blue_spot"] + ), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant_pacific"] + ), + }, + ), + "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="OctopusAsyncClient", + rpcs={ + "BlueSpot": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["blue_spot"] + ), + "GiantPacific": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant_pacific"] + ), + }, + ), + } + ), + "Squid": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + clients={ + "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="BaseSquidClient", + rpcs={ + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant"] + ), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["humboldt"] + ), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["_ramshorn"] + ), + }, + ), + "grpc-async": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( + library_client="BaseSquidAsyncClient", + rpcs={ + "Giant": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["giant"] + ), + "Humboldt": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["humboldt"] + ), + "Ramshorn": gapic_metadata_pb2.GapicMetadata.MethodList( + methods=["_ramshorn"] + ), + }, + ), + } + ), + }, + ) + + actual = api_schema.gapic_metadata(opts) + assert expected == actual + expected = MessageToJson(expected, sort_keys=True) + actual = api_schema.gapic_metadata_json(opts) + assert expected == actual + + def test_http_options(fs): fd = ( make_file_pb2( @@ -3060,6 +3209,7 @@ def test_python_settings_selective_gapic_version_mismatch_method_raises_error(): def get_service_yaml_for_selective_gapic_tests( apis: Sequence[str] = ["google.example.v1.FooService"], + version: str = "google.example.v1", methods=["google.example.v1.FooService.GetFoo"], generate_omitted_as_internal=False, ) -> Dict[str, Any]: @@ -3068,7 +3218,7 @@ def get_service_yaml_for_selective_gapic_tests( "publishing": { "library_settings": [ { - "version": "google.example.v1", + "version": version, "python_settings": { "experimental_features": {"rest_async_io_enabled": True}, "common": { @@ -3957,13 +4107,12 @@ def test_selective_gapic_api_build_generate_omitted_as_internal(): assert "google.example.v1.ServiceOne" in api_schema.services assert "google.example.v1.ServiceTwo" in api_schema.services - assert "google.example.v1.ServiceOne.InternalMethod" not in api_schema.all_methods - assert "google.example.v1.ServiceOne._InternalMethod" in api_schema.all_methods + assert "google.example.v1.ServiceOne.InternalMethod" in api_schema.all_methods assert api_schema.services["google.example.v1.ServiceOne"].is_internal assert not api_schema.services["google.example.v1.ServiceTwo"].is_internal assert api_schema.all_methods[ - "google.example.v1.ServiceOne._InternalMethod" + "google.example.v1.ServiceOne.InternalMethod" ].is_internal assert not api_schema.all_methods[ "google.example.v1.ServiceOne.NonInternalMethod" @@ -3981,7 +4130,9 @@ def test_selective_gapic_api_build_generate_omitted_as_internal(): == "BaseServiceOneAsyncClient" ) assert ( - api_schema.all_methods["google.example.v1.ServiceOne._InternalMethod"].name + api_schema.all_methods[ + "google.example.v1.ServiceOne.InternalMethod" + ].client_method_name == "_InternalMethod" ) diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index 9338d73f673d..bd46506e92a7 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -960,18 +960,32 @@ def test_transport_safe_name(): assert method.transport_safe_name == f"{name}_" -def test_safe_name(): +def test_client_method_name(): unsafe_methods = { name: make_method(name=name) for name in ["import", "Import", "Raise"] } safe_methods = {name: make_method(name=name) for name in ["Call", "Put", "Hold"]} + internal_unsafe_methods = { + k: dataclasses.replace(v, is_internal=True) for k, v in unsafe_methods.items() + } + + internal_safe_methods = { + k: dataclasses.replace(v, is_internal=True) for k, v in safe_methods.items() + } + for name, method in safe_methods.items(): - assert method.safe_name == name + assert method.client_method_name == name for name, method in unsafe_methods.items(): - assert method.safe_name == f"{name}_" + assert method.client_method_name == f"{name}_" + + for name, method in internal_safe_methods.items(): + assert method.client_method_name == f"_{name}" + + for name, method in internal_unsafe_methods.items(): + assert method.client_method_name == f"_{name}_" def test_mixin_rule(): From deda31bfa737bb97bf44b8bb2b010bd28a1fd859 Mon Sep 17 00:00:00 2001 From: Andrew Z Allen Date: Sat, 22 Mar 2025 05:10:08 -0600 Subject: [PATCH 1270/1339] fix: missing DEFAULT_HOST should still result in compiling code (#2051) Co-authored-by: Victor Chudnovsky Co-authored-by: Anthonios Partheniou --- .../%name_%version/%sub/services/%service/transports/base.py.j2 | 2 +- .../cloud/asset_v1/services/asset_service/transports/base.py | 1 + .../credentials_v1/services/iam_credentials/transports/base.py | 1 + .../cloud/eventarc_v1/services/eventarc/transports/base.py | 1 + .../logging_v2/services/config_service_v2/transports/base.py | 1 + .../logging_v2/services/logging_service_v2/transports/base.py | 1 + .../logging_v2/services/metrics_service_v2/transports/base.py | 1 + .../logging_v2/services/config_service_v2/transports/base.py | 1 + .../logging_v2/services/logging_service_v2/transports/base.py | 1 + .../logging_v2/services/metrics_service_v2/transports/base.py | 1 + .../cloud/redis_v1/services/cloud_redis/transports/base.py | 1 + .../cloud/redis_v1/services/cloud_redis/transports/base.py | 1 + 12 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 0daf45041a13..3596da1bb289 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -61,7 +61,7 @@ class {{ service.name }}Transport(abc.ABC): {% endfor %} ) - DEFAULT_HOST: str = {% if service.host %}'{{ service.host }}'{% else %}{{ '' }}{% endif %} + DEFAULT_HOST: str = '{% if service.host %}{{ service.host }}{% endif %}' def __init__( self, *, diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 0c968b0c7d9f..a788aa0a6af2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -42,6 +42,7 @@ class AssetServiceTransport(abc.ABC): ) DEFAULT_HOST: str = 'cloudasset.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 5b9d0cf1b4e6..941b1e0d32e2 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -39,6 +39,7 @@ class IAMCredentialsTransport(abc.ABC): ) DEFAULT_HOST: str = 'iamcredentials.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 650e0a956960..ff8e9e74699e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -50,6 +50,7 @@ class EventarcTransport(abc.ABC): ) DEFAULT_HOST: str = 'eventarc.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 479f4fdb171c..865c16bb657a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -45,6 +45,7 @@ class ConfigServiceV2Transport(abc.ABC): ) DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index e7cc266faeff..d8d2c3edc1cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -45,6 +45,7 @@ class LoggingServiceV2Transport(abc.ABC): ) DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 596f3beeddde..d5a7b7f1f33b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -45,6 +45,7 @@ class MetricsServiceV2Transport(abc.ABC): ) DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 479f4fdb171c..865c16bb657a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -45,6 +45,7 @@ class ConfigServiceV2Transport(abc.ABC): ) DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index e7cc266faeff..d8d2c3edc1cf 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -45,6 +45,7 @@ class LoggingServiceV2Transport(abc.ABC): ) DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 596f3beeddde..d5a7b7f1f33b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -45,6 +45,7 @@ class MetricsServiceV2Transport(abc.ABC): ) DEFAULT_HOST: str = 'logging.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 1b11ec1f69df..4e2923b1e747 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -42,6 +42,7 @@ class CloudRedisTransport(abc.ABC): ) DEFAULT_HOST: str = 'redis.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 1802b33f7bb8..8cdb19ba64cc 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -42,6 +42,7 @@ class CloudRedisTransport(abc.ABC): ) DEFAULT_HOST: str = 'redis.googleapis.com' + def __init__( self, *, host: str = DEFAULT_HOST, From 8c6dda44013d5a1625e8c00ca1833b0f0add1097 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 24 Mar 2025 15:53:31 -0400 Subject: [PATCH 1271/1339] build: remove showcase integration tests ads (#2370) --- .../.github/sync-repo-settings.yaml | 3 --- .../.github/workflows/tests.yaml | 4 ++-- packages/gapic-generator/noxfile.py | 22 ------------------- 3 files changed, 2 insertions(+), 27 deletions(-) diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml index 5c94998733d2..5a0de29a10bf 100644 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ b/packages/gapic-generator/.github/sync-repo-settings.yaml @@ -15,14 +15,11 @@ branchProtectionRules: - 'mypy (3.13)' - 'showcase (3.7, showcase)' - 'showcase (3.13, showcase)' - - 'showcase (3.7, showcase_alternative_templates)' - - 'showcase (3.13, showcase_alternative_templates)' - 'showcase (3.7, showcase_w_rest_async)' - 'showcase (3.13, showcase_w_rest_async)' # TODO(dovs): reenable these when the mtls tests have been debugged and fixed # See #1218 for details # - 'showcase-mtls (showcase_mtls)' - # - 'showcase-mtls (showcase_mtls_alternative_templates)' - 'showcase-mypy' - 'showcase-mypy (_alternative_templates)' - 'showcase-unit (3.7)' diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index ce5307d59a75..5b2efc098d5a 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -57,7 +57,7 @@ jobs: matrix: # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `showcase_w_rest_async` target when async rest is GA. python: ["3.7", "3.13"] - target: [showcase, showcase_alternative_templates, showcase_w_rest_async] + target: [showcase, showcase_w_rest_async] logging_scope: ["", "google"] # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. @@ -102,7 +102,7 @@ jobs: if: ${{ false }} # TODO(dovs): reenable when #1218 is fixed strategy: matrix: - target: [showcase_mtls, showcase_mtls_alternative_templates] + target: [showcase_mtls] max-parallel: 1 runs-on: ubuntu-latest steps: diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 3198eb964149..b4f8b203de61 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -461,28 +461,6 @@ def showcase_mtls( ) -@nox.session(python=ALL_PYTHON) -def showcase_alternative_templates(session): - templates = path.join(path.dirname(__file__), "gapic", "ads-templates") - showcase( - session, - templates=templates, - other_opts=("old-naming",), - env={"GAPIC_PYTHON_ASYNC": "False", "IGNORE_FILE": "test_universe_domain.py"}, - ) - - -@nox.session(python=NEWEST_PYTHON) -def showcase_mtls_alternative_templates(session): - templates = path.join(path.dirname(__file__), "gapic", "ads-templates") - showcase_mtls( - session, - templates=templates, - other_opts=("old-naming",), - env={"GAPIC_PYTHON_ASYNC": "False", "IGNORE_FILE": "test_universe_domain.py"}, - ) - - def run_showcase_unit_tests(session, fail_under=100, rest_async_io_enabled=False): session.install( "coverage", From 45eb8b43319be5a0c0750082c5a26392754e0972 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 28 Mar 2025 16:30:06 -0400 Subject: [PATCH 1272/1339] tests: add showcase tests for reading grpc/rest response metadata (#2300) --- .../gapic-generator/tests/system/conftest.py | 176 ++++++++++++++++-- .../system/test_grpc_interceptor_streams.py | 12 +- .../tests/system/test_response_metadata.py | 107 +++++++++++ 3 files changed, 280 insertions(+), 15 deletions(-) create mode 100644 packages/gapic-generator/tests/system/test_response_metadata.py diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index f45c63ee12aa..a7967d4f5d08 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -12,13 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -import collections + import grpc from unittest import mock import os import pytest +from typing import Sequence, Tuple + from google.api_core.client_options import ClientOptions # type: ignore +from google.showcase_v1beta1.services.echo.transports import EchoRestInterceptor try: from google.auth.aio import credentials as ga_credentials_async @@ -42,6 +45,7 @@ try: from google.showcase_v1beta1.services.echo.transports import ( AsyncEchoRestTransport, + AsyncEchoRestInterceptor, ) HAS_ASYNC_REST_ECHO_TRANSPORT = True @@ -248,7 +252,51 @@ def messaging(use_mtls, request): return construct_client(MessagingClient, use_mtls, transport_name=request.param) -class MetadataClientInterceptor( +class EchoMetadataClientRestInterceptor(EchoRestInterceptor): + request_metadata: Sequence[Tuple[str, str]] = [] + response_metadata: Sequence[Tuple[str, str]] = [] + + def pre_echo(self, request, metadata): + self.request_metadata = metadata + return request, metadata + + def post_echo_with_metadata(self, request, metadata): + self.response_metadata = metadata + return request, metadata + + def pre_expand(self, request, metadata): + self.request_metadata = metadata + return request, metadata + + def post_expand_with_metadata(self, request, metadata): + self.response_metadata = metadata + return request, metadata + + +if HAS_ASYNC_REST_ECHO_TRANSPORT: + + class EchoMetadataClientRestAsyncInterceptor(AsyncEchoRestInterceptor): + request_metadata: Sequence[Tuple[str, str]] = [] + response_metadata: Sequence[Tuple[str, str]] = [] + + async def pre_echo(self, request, metadata): + self.request_metadata = metadata + return request, metadata + + async def post_echo_with_metadata(self, request, metadata): + self.response_metadata = metadata + return request, metadata + + async def pre_expand(self, request, metadata): + self.request_metadata = metadata + return request, metadata + + async def post_expand_with_metadata(self, request, metadata): + self.response_metadata = metadata + return request, metadata + + +class EchoMetadataClientGrpcInterceptor( grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamClientInterceptor, grpc.StreamUnaryClientInterceptor, @@ -257,42 +305,94 @@ class MetadataClientInterceptor( def __init__(self, key, value): self._key = key self._value = value + self.request_metadata = [] + self.response_metadata = [] - def _add_metadata(self, client_call_details): + def _add_request_metadata(self, client_call_details): if client_call_details.metadata is not None: client_call_details.metadata.append((self._key, self._value)) + self.request_metadata = client_call_details.metadata def intercept_unary_unary(self, continuation, client_call_details, request): - self._add_metadata(client_call_details) + self._add_request_metadata(client_call_details) response = continuation(client_call_details, request) + metadata = [(k, str(v)) for k, v in response.trailing_metadata()] + self.response_metadata = metadata return response def intercept_unary_stream(self, continuation, client_call_details, request): - self._add_metadata(client_call_details) + self._add_request_metadata(client_call_details) response_it = continuation(client_call_details, request) return response_it def intercept_stream_unary( self, continuation, client_call_details, request_iterator ): - self._add_metadata(client_call_details) + self._add_request_metadata(client_call_details) response = continuation(client_call_details, request_iterator) return response def intercept_stream_stream( self, continuation, client_call_details, request_iterator ): - self._add_metadata(client_call_details) + self._add_request_metadata(client_call_details) + response_it = continuation(client_call_details, request_iterator) + return response_it + + +class EchoMetadataClientGrpcAsyncInterceptor( + grpc.aio.UnaryUnaryClientInterceptor, + grpc.aio.UnaryStreamClientInterceptor, + grpc.aio.StreamUnaryClientInterceptor, + grpc.aio.StreamStreamClientInterceptor, +): + def __init__(self, key, value): + self._key = key + self._value = value + self.request_metadata = [] + self.response_metadata = [] + + async def _add_request_metadata(self, client_call_details): + if client_call_details.metadata is not None: + client_call_details.metadata.append((self._key, self._value)) + self.request_metadata = client_call_details.metadata + + async def intercept_unary_unary(self, continuation, client_call_details, request): + await self._add_request_metadata(client_call_details) + response = await continuation(client_call_details, request) + metadata = [(k, str(v)) for k, v in await response.trailing_metadata()] + self.response_metadata = metadata + return response + + async def intercept_unary_stream(self, continuation, client_call_details, request): + self._add_request_metadata(client_call_details) + response_it = continuation(client_call_details, request) + return response_it + + async def intercept_stream_unary( + self, continuation, client_call_details, request_iterator + ): + self._add_request_metadata(client_call_details) + response = continuation(client_call_details, request_iterator) + return response + + async def intercept_stream_stream( + self, continuation, client_call_details, request_iterator + ): + self._add_request_metadata(client_call_details) response_it = continuation(client_call_details, request_iterator) return response_it @pytest.fixture -def intercepted_echo(use_mtls): +def intercepted_echo_grpc(use_mtls): # The interceptor adds 'showcase-trailer' client metadata. Showcase server - # echos any metadata with key 'showcase-trailer', so the same metadata + # echoes any metadata with key 'showcase-trailer', so the same metadata # should appear as trailing metadata in the response. - interceptor = MetadataClientInterceptor("showcase-trailer", "intercepted") + interceptor = EchoMetadataClientGrpcInterceptor( + "showcase-trailer", + "intercepted", + ) host = "localhost:7469" channel = ( grpc.secure_channel(host, ssl_credentials) @@ -304,4 +404,58 @@ def intercepted_echo(use_mtls): credentials=ga_credentials.AnonymousCredentials(), channel=intercept_channel, ) - return EchoClient(transport=transport) + return EchoClient(transport=transport), interceptor + + +@pytest.fixture +def intercepted_echo_grpc_async(): + # The interceptor adds 'showcase-trailer' client metadata. Showcase server + # echoes any metadata with key 'showcase-trailer', so the same metadata + # should appear as trailing metadata in the response. + interceptor = EchoMetadataClientGrpcAsyncInterceptor( + "showcase-trailer", + "intercepted", + ) + host = "localhost:7469" + channel = grpc.aio.insecure_channel(host, interceptors=[interceptor]) + # intercept_channel = grpc.aio.intercept_channel(channel, interceptor) + transport = EchoAsyncClient.get_transport_class("grpc_asyncio")( + credentials=ga_credentials.AnonymousCredentials(), + channel=channel, + ) + return EchoAsyncClient(transport=transport), interceptor + + +@pytest.fixture +def intercepted_echo_rest(): + transport_name = "rest" + transport_cls = EchoClient.get_transport_class(transport_name) + interceptor = EchoMetadataClientRestInterceptor() + + # The custom host explicitly bypasses https. + transport = transport_cls( + credentials=ga_credentials.AnonymousCredentials(), + host="localhost:7469", + url_scheme="http", + interceptor=interceptor, + ) + return EchoClient(transport=transport), interceptor + + +@pytest.fixture +def intercepted_echo_rest_async(): + if not HAS_ASYNC_REST_ECHO_TRANSPORT: + pytest.skip("Skipping test with async rest.") + + transport_name = "rest_asyncio" + transport_cls = EchoAsyncClient.get_transport_class(transport_name) + interceptor = EchoMetadataClientRestAsyncInterceptor() + + # The custom host explicitly bypasses https. + transport = transport_cls( + credentials=async_anonymous_credentials(), + host="localhost:7469", + url_scheme="http", + interceptor=interceptor, + ) + return EchoAsyncClient(transport=transport), interceptor diff --git a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py index d0eb86e4fc2c..50c5bee31d30 100644 --- a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py @@ -21,9 +21,10 @@ intercepted_metadata = (("showcase-trailer", "intercepted"),) -def test_unary_stream(intercepted_echo): +def test_unary_stream(intercepted_echo_grpc): + client, interceptor = intercepted_echo_grpc content = "The hail in Wales falls mainly on the snails." - responses = intercepted_echo.expand( + responses = client.expand( { "content": content, } @@ -37,13 +38,15 @@ def test_unary_stream(intercepted_echo): (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] assert intercepted_metadata[0] in response_metadata + interceptor.response_metadata = response_metadata -def test_stream_stream(intercepted_echo): +def test_stream_stream(intercepted_echo_grpc): + client, interceptor = intercepted_echo_grpc requests = [] requests.append(showcase.EchoRequest(content="hello")) requests.append(showcase.EchoRequest(content="world!")) - responses = intercepted_echo.chat(iter(requests)) + responses = client.chat(iter(requests)) contents = [response.content for response in responses] assert contents == ["hello", "world!"] @@ -52,3 +55,4 @@ def test_stream_stream(intercepted_echo): (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] assert intercepted_metadata[0] in response_metadata + interceptor.response_metadata = response_metadata diff --git a/packages/gapic-generator/tests/system/test_response_metadata.py b/packages/gapic-generator/tests/system/test_response_metadata.py new file mode 100644 index 000000000000..f99abf3e11fa --- /dev/null +++ b/packages/gapic-generator/tests/system/test_response_metadata.py @@ -0,0 +1,107 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pytest + +from google import showcase + + +@pytest.mark.parametrize( + "transport,response_metadata", + [ + ("grpc", ("something1", "something_value1")), + ("rest", ("X-Showcase-Request-Something1", "something_value1")), + ], +) +def test_metadata_response_unary( + intercepted_echo_rest, intercepted_echo_grpc, transport, response_metadata +): + request_content = "The hail in Wales falls mainly on the snails." + request_metadata = ("something1", "something_value1") + if transport == "grpc": + client, interceptor = intercepted_echo_grpc + else: + client, interceptor = intercepted_echo_rest + response = client.echo( + request=showcase.EchoRequest(content=request_content), + metadata=(request_metadata,), + ) + assert response.content == request_content + assert request_metadata in interceptor.request_metadata + assert response_metadata in interceptor.response_metadata + + +def test_metadata_response_rest_streams(intercepted_echo_rest): + request_content = "The hail in Wales falls mainly on the snails." + request_metadata = ("something2", "something_value2") + response_metadata = ("X-Showcase-Request-Something2", "something_value2") + client, interceptor = intercepted_echo_rest + client.expand( + { + "content": request_content, + }, + metadata=(request_metadata,), + ) + + assert request_metadata in interceptor.request_metadata + assert response_metadata in interceptor.response_metadata + + +if os.environ.get("GAPIC_PYTHON_ASYNC", "true") == "true": + + @pytest.mark.asyncio + async def test_metadata_response_rest_streams_async(intercepted_echo_rest_async): + request_content = "The hail in Wales falls mainly on the snails." + request_metadata = ("something2", "something_value2") + response_metadata = ("X-Showcase-Request-Something2", "something_value2") + client, interceptor = intercepted_echo_rest_async + await client.expand( + { + "content": request_content, + }, + metadata=(request_metadata,), + ) + + assert request_metadata in interceptor.request_metadata + assert response_metadata in interceptor.response_metadata + + @pytest.mark.parametrize( + "transport,response_metadata", + [ + ("grpc_asyncio", ("something3", "something_value3")), + ("rest_asyncio", ("X-Showcase-Request-Something3", "something_value3")), + ], + ) + @pytest.mark.asyncio + async def test_metadata_response_unary_async( + intercepted_echo_grpc_async, + intercepted_echo_rest_async, + transport, + response_metadata, + ): + request_content = "The hail in Wales falls mainly on the snails." + request_metadata = ("something3", "something_value3") + + if transport == "grpc_asyncio": + client, interceptor = intercepted_echo_grpc_async + else: + client, interceptor = intercepted_echo_rest_async + response = await client.echo( + request=showcase.EchoRequest(content=request_content), + metadata=(request_metadata,), + ) + assert response.content == request_content + assert request_metadata in interceptor.request_metadata + assert response_metadata in interceptor.response_metadata From 85e587bf5a758fa0c6cfb42463987932eedb1f35 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 28 Mar 2025 19:41:39 -0400 Subject: [PATCH 1273/1339] refactor: reduce code duplication (#2373) --- .../%sub/services/%service/_shared_macros.j2 | 36 +++++++++++++++++++ .../services/%service/transports/grpc.py.j2 | 36 ++----------------- .../%service/transports/grpc_asyncio.py.j2 | 35 ++---------------- .../services/asset_service/transports/grpc.py | 3 +- .../iam_credentials/transports/grpc.py | 3 +- .../services/eventarc/transports/grpc.py | 3 +- .../config_service_v2/transports/grpc.py | 3 +- .../logging_service_v2/transports/grpc.py | 3 +- .../metrics_service_v2/transports/grpc.py | 3 +- .../config_service_v2/transports/grpc.py | 3 +- .../logging_service_v2/transports/grpc.py | 3 +- .../metrics_service_v2/transports/grpc.py | 3 +- .../services/cloud_redis/transports/grpc.py | 3 +- .../services/cloud_redis/transports/grpc.py | 3 +- 14 files changed, 53 insertions(+), 87 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index cca0717282d1..6db274e82f0d 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -264,6 +264,42 @@ def _get_http_options(): {% endmacro %} +{% macro unary_request_interceptor_common(service) %} + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic + or wait for next gen protobuf. + #} + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra = { + "serviceName": "{{ service.meta.address.proto }}", + "rpcName": str(client_call_details.method), + "request": grpc_request, + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport. #} + "metadata": grpc_request["metadata"], + }, + ) +{%- endmacro %} + + {% macro prep_wrapped_messages_async_method(api, service) %} def _prep_wrapped_messages(self, client_info): """ Precompute the wrapped methods, overriding the base class method to use async wrappers.""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index cb3ee8cfa666..7158a4c430df 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -1,5 +1,7 @@ {% extends '_base.py.j2' %} +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + {% block content %} import json @@ -59,39 +61,7 @@ _LOGGER = std_logging.getLogger(__name__) class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic - or wait for next gen protobuf. - #} - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "{{ service.meta.address.proto }}", - "rpcName": client_call_details.method, - "request": grpc_request, - {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport. #} - "metadata": grpc_request["metadata"], - }, - ) - +{{ shared_macros.unary_request_interceptor_common(service) }} response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 650f4a2c6533..64aeb0abf68b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -1,5 +1,7 @@ {% extends '_base.py.j2' %} +{% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} + {% block content %} {% import "%namespace/%name_%version/%sub/services/%service/_shared_macros.j2" as shared_macros %} @@ -64,38 +66,7 @@ _LOGGER = std_logging.getLogger(__name__) class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): - logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER - request_metadata = client_call_details.metadata - if isinstance(request, proto.Message): - {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2293): Investigate if we can improve this logic - or wait for next gen protobuf. - #} - request_payload = type(request).to_json(request) - elif isinstance(request, google.protobuf.message.Message): - request_payload = MessageToJson(request) - else: - request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" - - request_metadata = { - key: value.decode("utf-8") if isinstance(value, bytes) else value - for key, value in request_metadata - } - grpc_request = { - "payload": request_payload, - "requestMethod": "grpc", - "metadata": dict(request_metadata), - } - _LOGGER.debug( - f"Sending request for {client_call_details.method}", - extra = { - "serviceName": "{{ service.meta.address.proto }}", - "rpcName": str(client_call_details.method), - "request": grpc_request, - {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2275): logging `metadata` seems repetitive and may need to be cleaned up. We're including it within "request" for consistency with REST transport.' #} - "metadata": grpc_request["metadata"], - }, - ) +{{ shared_macros.unary_request_interceptor_common(service) }} response = await continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = await response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 8c3af5208e93..2f1d30c41875 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -70,12 +70,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.cloud.asset.v1.AssetService", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 6bb7b4401922..4472c1835245 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -67,12 +67,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.iam.credentials.v1.IAMCredentials", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 107b8db45d68..5ec53389bb27 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -78,12 +78,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.cloud.eventarc.v1.Eventarc", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 0d7ee84124e9..8e56dd2231fa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -70,12 +70,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.logging.v2.ConfigServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index e64b36f15b3c..ec9737507ebd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -69,12 +69,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.logging.v2.LoggingServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 4a2d98cc240e..70a0c89da1c7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -69,12 +69,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.logging.v2.MetricsServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 0d7ee84124e9..8e56dd2231fa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -70,12 +70,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.logging.v2.ConfigServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index e64b36f15b3c..ec9737507ebd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -69,12 +69,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.logging.v2.LoggingServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 4a2d98cc240e..70a0c89da1c7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -69,12 +69,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.logging.v2.MetricsServiceV2", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index abb362ff0fa7..6267d2c78aa5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -70,12 +70,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 55222f44434a..fcaefca376cc 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -70,12 +70,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra = { "serviceName": "google.cloud.redis.v1.CloudRedis", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() From f0005b145ae8207824dd9dc98d26428f43151af8 Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Wed, 9 Apr 2025 14:13:07 -0400 Subject: [PATCH 1274/1339] feat: adds augmented pagination to account for BQ family of APIs (#2372) Co-authored-by: Anthonios Partheniou --- .../gapic-generator/gapic/schema/wrappers.py | 38 +++++- ...t_pagination_max_results_and_wrapper.proto | 36 ++++++ .../tests/unit/schema/wrappers/test_method.py | 118 ++++++++++++++++++ 3 files changed, 186 insertions(+), 6 deletions(-) create mode 100644 packages/gapic-generator/tests/fragments/test_pagination_max_results_and_wrapper.proto diff --git a/packages/gapic-generator/gapic/schema/wrappers.py b/packages/gapic-generator/gapic/schema/wrappers.py index 3982afea6c67..17a78327566c 100644 --- a/packages/gapic-generator/gapic/schema/wrappers.py +++ b/packages/gapic-generator/gapic/schema/wrappers.py @@ -1830,13 +1830,34 @@ def ident(self) -> metadata.Address: """Return the identifier data to be used in templates.""" return self.meta.address + def _validate_paged_field_size_type(self, page_field_size) -> bool: + """Validates allowed paged_field_size type(s). + + Confirms whether the paged_field_size.type is an allowed wrapper type: + The norm is for type to be int, but an additional check is included to + account for BigQuery legacy APIs which allowed UInt32Value and + Int32Value. + """ + + pb_type = page_field_size.type + + return pb_type == int or ( + isinstance(pb_type, MessageType) + and pb_type.message_pb.name in {"UInt32Value", "Int32Value"} + ) + @utils.cached_property def paged_result_field(self) -> Optional[Field]: - """Return the response pagination field if the method is paginated.""" - # If the request field lacks any of the expected pagination fields, - # then the method is not paginated. + """Return the response pagination field if the method is paginated. + + The request field must have a page_token field and a page_size field (or + for legacy APIs, a max_results field) and the response field + must have a next_token_field and a repeated field. + + For the purposes of supporting legacy APIs, additional wrapper types are + allowed. + """ - # The request must have page_token and next_page_token as they keep track of pages for source, source_type, name in ( (self.input, str, "page_token"), (self.output, str, "next_page_token"), @@ -1845,13 +1866,18 @@ def paged_result_field(self) -> Optional[Field]: if not field or field.type != source_type: return None - # The request must have max_results or page_size + # The request must have page_size (or max_results if legacy API) page_fields = ( self.input.fields.get("max_results", None), self.input.fields.get("page_size", None), ) page_field_size = next((field for field in page_fields if field), None) - if not page_field_size or page_field_size.type != int: + + if not page_field_size: + return None + + # Confirm whether the paged_field_size is an allowed type. + if not self._validate_paged_field_size_type(page_field_size=page_field_size): return None # Return the first repeated field. diff --git a/packages/gapic-generator/tests/fragments/test_pagination_max_results_and_wrapper.proto b/packages/gapic-generator/tests/fragments/test_pagination_max_results_and_wrapper.proto new file mode 100644 index 000000000000..f305fc0e3072 --- /dev/null +++ b/packages/gapic-generator/tests/fragments/test_pagination_max_results_and_wrapper.proto @@ -0,0 +1,36 @@ +// Copyright (C) 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.fragment; + +import "google/api/client.proto"; +import "google/protobuf/wrappers.proto"; + +service MaxResultsDatasetService { + option (google.api.default_host) = "my.example.com"; + rpc ListMaxResultsDataset(ListMaxResultsDatasetRequest) returns (ListMaxResultsDatasetResponse) { + } +} + +message ListMaxResultsDatasetRequest { + google.protobuf.UInt32Value max_results = 2; + string page_token = 3; +} + +message ListMaxResultsDatasetResponse { + string next_page_token = 3; + repeated string datasets = 4; +} diff --git a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py index bd46506e92a7..088958e06bb4 100644 --- a/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py +++ b/packages/gapic-generator/tests/unit/schema/wrappers/test_method.py @@ -23,6 +23,7 @@ from google.api import routing_pb2 from google.cloud import extended_operations_pb2 as ex_ops_pb2 from google.protobuf import descriptor_pb2 +from google.protobuf import wrappers_pb2 from gapic.schema import metadata from gapic.schema import wrappers @@ -189,6 +190,63 @@ def test_method_paged_result_field_no_page_field(): assert method.paged_result_field is None +def test_method_paged_result_field_invalid_wrapper_type(): + """Validate paged_result_field() returns None if page_size/max_results wrappertypes + are not allowed types. + """ + + # page_size is not allowed wrappertype + parent = make_field(name="parent", type="TYPE_STRING") + page_size = make_field(name="page_size", type="TYPE_DOUBLE") # not an allowed type + page_token = make_field(name="page_token", type="TYPE_STRING") + foos = make_field(name="foos", message=make_message("Foo"), repeated=True) + next_page_token = make_field(name="next_page_token", type="TYPE_STRING") + + input_msg = make_message( + name="ListFoosRequest", + fields=( + parent, + page_size, + page_token, + ), + ) + output_msg = make_message( + name="ListFoosResponse", + fields=( + foos, + next_page_token, + ), + ) + method = make_method( + "ListFoos", + input_message=input_msg, + output_message=output_msg, + ) + assert method.paged_result_field is None + + # max_results is not allowed wrappertype + max_results = make_field( + name="max_results", type="TYPE_STRING" + ) # not an allowed type + + input_msg = make_message( + name="ListFoosRequest", + fields=( + parent, + max_results, + page_token, + ), + ) + + method = make_method( + "ListFoos", + input_message=input_msg, + output_message=output_msg, + ) + + assert method.paged_result_field is None + + def test_method_paged_result_ref_types(): input_msg = make_message( name="ListSquidsRequest", @@ -999,3 +1057,63 @@ def test_mixin_rule(): "city": {}, } assert e == m.sample_request + + +@pytest.mark.parametrize( + "field_type, pb_type, expected", + [ + # valid paged_result_field candidates + (int, "TYPE_INT32", True), + (wrappers_pb2.UInt32Value, "TYPE_MESSAGE", True), + (wrappers_pb2.Int32Value, "TYPE_MESSAGE", True), + # invalid paged_result_field candidates + (float, "TYPE_DOUBLE", False), + (wrappers_pb2.UInt32Value, "TYPE_DOUBLE", False), + (wrappers_pb2.Int32Value, "TYPE_DOUBLE", False), + ], +) +def test__validate_paged_field_size_type(field_type, pb_type, expected): + """Test _validate_paged_field_size_type with wrapper types and type indicators.""" + + # Setup + if pb_type in {"TYPE_INT32", "TYPE_DOUBLE"}: + page_size = make_field(name="page_size", type=pb_type) + else: + # expecting TYPE_MESSAGE which in this context is associated with + # *Int32Value in legacy APIs such as BigQuery. + # See: https://github.com/googleapis/gapic-generator-python/blob/c8b7229ba2865d6a2f5966aa151be121de81f92d/gapic/schema/wrappers.py#L378C1-L411C10 + page_size = make_field( + name="max_results", + type=pb_type, + message=make_message(name=field_type.DESCRIPTOR.name), + ) + + parent = make_field(name="parent", type="TYPE_STRING") + page_token = make_field(name="page_token", type="TYPE_STRING") + next_page_token = make_field(name="next_page_token", type="TYPE_STRING") + + input_msg = make_message( + name="ListFoosRequest", + fields=( + parent, + page_size, + page_token, + ), + ) + + output_msg = make_message( + name="ListFoosResponse", + fields=( + make_field(name="foos", message=make_message("Foo"), repeated=True), + next_page_token, + ), + ) + + method = make_method( + "ListFoos", + input_message=input_msg, + output_message=output_msg, + ) + + actual = method._validate_paged_field_size_type(page_field_size=page_size) + assert actual == expected From fe88aa293f2a5603a3f98dab4ad1092cd21050ed Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 9 Apr 2025 15:31:20 -0400 Subject: [PATCH 1275/1339] chore: update generated .flake8 configuration file (#2376) --- .../gapic/templates/.flake8.j2 | 30 +++++++------------ .../tests/integration/goldens/asset/.flake8 | 17 ++++++----- .../integration/goldens/credentials/.flake8 | 17 ++++++----- .../integration/goldens/eventarc/.flake8 | 17 ++++++----- .../tests/integration/goldens/logging/.flake8 | 17 ++++++----- .../goldens/logging_internal/.flake8 | 17 ++++++----- .../tests/integration/goldens/redis/.flake8 | 17 ++++++----- .../goldens/redis_selective/.flake8 | 17 ++++++----- 8 files changed, 74 insertions(+), 75 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/.flake8.j2 b/packages/gapic-generator/gapic/templates/.flake8.j2 index 29227d4cf419..e5b2c576d35e 100644 --- a/packages/gapic-generator/gapic/templates/.flake8.j2 +++ b/packages/gapic-generator/gapic/templates/.flake8.j2 @@ -1,28 +1,18 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. +{% extends "_base.py.j2" %} + +{% block content %} -# Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. @@ -31,3 +21,5 @@ exclude = .git, *.pyc, conf.py + +{% endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/.flake8 b/packages/gapic-generator/tests/integration/goldens/asset/.flake8 index 29227d4cf419..90316de21489 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/.flake8 +++ b/packages/gapic-generator/tests/integration/goldens/asset/.flake8 @@ -1,28 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 b/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 index 29227d4cf419..90316de21489 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 +++ b/packages/gapic-generator/tests/integration/goldens/credentials/.flake8 @@ -1,28 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 b/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 index 29227d4cf419..90316de21489 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/.flake8 @@ -1,28 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/.flake8 b/packages/gapic-generator/tests/integration/goldens/logging/.flake8 index 29227d4cf419..90316de21489 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/.flake8 +++ b/packages/gapic-generator/tests/integration/goldens/logging/.flake8 @@ -1,28 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/.flake8 b/packages/gapic-generator/tests/integration/goldens/logging_internal/.flake8 index 29227d4cf419..90316de21489 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/.flake8 +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/.flake8 @@ -1,28 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/.flake8 b/packages/gapic-generator/tests/integration/goldens/redis/.flake8 index 29227d4cf419..90316de21489 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/.flake8 +++ b/packages/gapic-generator/tests/integration/goldens/redis/.flake8 @@ -1,28 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/.flake8 b/packages/gapic-generator/tests/integration/goldens/redis_selective/.flake8 index 29227d4cf419..90316de21489 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/.flake8 +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/.flake8 @@ -1,28 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. From 0f1a253632feda56803e6621e91c621a103080c7 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 9 Apr 2025 17:40:25 -0400 Subject: [PATCH 1276/1339] chore: add LICENSE to generated output (#2377) --- .../gapic/templates/LICENSE.j2 | 202 ++++++++++++++++++ .../tests/integration/goldens/asset/LICENSE | 202 ++++++++++++++++++ .../integration/goldens/credentials/LICENSE | 202 ++++++++++++++++++ .../integration/goldens/eventarc/LICENSE | 202 ++++++++++++++++++ .../tests/integration/goldens/logging/LICENSE | 202 ++++++++++++++++++ .../goldens/logging_internal/LICENSE | 202 ++++++++++++++++++ .../tests/integration/goldens/redis/LICENSE | 202 ++++++++++++++++++ .../goldens/redis_selective/LICENSE | 202 ++++++++++++++++++ 8 files changed, 1616 insertions(+) create mode 100644 packages/gapic-generator/gapic/templates/LICENSE.j2 create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/LICENSE create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/LICENSE create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/LICENSE create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/LICENSE create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/LICENSE create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/LICENSE create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/LICENSE diff --git a/packages/gapic-generator/gapic/templates/LICENSE.j2 b/packages/gapic-generator/gapic/templates/LICENSE.j2 new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/LICENSE.j2 @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/LICENSE b/packages/gapic-generator/tests/integration/goldens/asset/LICENSE new file mode 100755 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/LICENSE b/packages/gapic-generator/tests/integration/goldens/credentials/LICENSE new file mode 100755 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/LICENSE b/packages/gapic-generator/tests/integration/goldens/eventarc/LICENSE new file mode 100755 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/LICENSE b/packages/gapic-generator/tests/integration/goldens/logging/LICENSE new file mode 100755 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/LICENSE b/packages/gapic-generator/tests/integration/goldens/logging_internal/LICENSE new file mode 100755 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/LICENSE b/packages/gapic-generator/tests/integration/goldens/redis/LICENSE new file mode 100755 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/LICENSE b/packages/gapic-generator/tests/integration/goldens/redis_selective/LICENSE new file mode 100755 index 000000000000..d64569567334 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From 46fb9863c52858ae4346df1c62ad00df4de28729 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 10 Apr 2025 15:01:31 -0400 Subject: [PATCH 1277/1339] chore: update generated MANIFEST.in (#2378) --- .../gapic/templates/MANIFEST.in.j2 | 17 ++++++++++++-- .../integration/goldens/asset/MANIFEST.in | 22 +++++++++++++++++-- .../goldens/credentials/MANIFEST.in | 22 +++++++++++++++++-- .../integration/goldens/eventarc/MANIFEST.in | 22 +++++++++++++++++-- .../integration/goldens/logging/MANIFEST.in | 22 +++++++++++++++++-- .../goldens/logging_internal/MANIFEST.in | 22 +++++++++++++++++-- .../integration/goldens/redis/MANIFEST.in | 22 +++++++++++++++++-- .../goldens/redis_selective/MANIFEST.in | 22 +++++++++++++++++-- 8 files changed, 155 insertions(+), 16 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/MANIFEST.in.j2 b/packages/gapic-generator/gapic/templates/MANIFEST.in.j2 index b7239d5404a9..7e3949d690ed 100644 --- a/packages/gapic-generator/gapic/templates/MANIFEST.in.j2 +++ b/packages/gapic-generator/gapic/templates/MANIFEST.in.j2 @@ -1,2 +1,15 @@ -recursive-include {{ '/'.join(api.naming.module_namespace + (api.naming.module_name,)) }} *.py -recursive-include {{ '/'.join(api.naming.module_namespace + (api.naming.versioned_module_name,)) }} *.py +{% extends "_base.py.j2" %} + +{% block content %} + +include README.rst LICENSE +{% if api.naming.module_namespace %} +recursive-include {{ api.naming.module_namespace[0] }} *.py *.pyi *.json *.proto py.typed +{% else %} +recursive-include {{ api.naming.versioned_module_name }} *.py *.pyi *.json *.proto py.typed +{% endif %} +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ + +{% endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in index 5c97e27612a3..dae249ec8976 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in +++ b/packages/gapic-generator/tests/integration/goldens/asset/MANIFEST.in @@ -1,2 +1,20 @@ -recursive-include google/cloud/asset *.py -recursive-include google/cloud/asset_v1 *.py +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in index a17a81a99b35..dae249ec8976 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in +++ b/packages/gapic-generator/tests/integration/goldens/credentials/MANIFEST.in @@ -1,2 +1,20 @@ -recursive-include google/iam/credentials *.py -recursive-include google/iam/credentials_v1 *.py +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in index a7ef8bafb9c6..dae249ec8976 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/MANIFEST.in @@ -1,2 +1,20 @@ -recursive-include google/cloud/eventarc *.py -recursive-include google/cloud/eventarc_v1 *.py +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in index f8c276f2cce8..dae249ec8976 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in +++ b/packages/gapic-generator/tests/integration/goldens/logging/MANIFEST.in @@ -1,2 +1,20 @@ -recursive-include google/cloud/logging *.py -recursive-include google/cloud/logging_v2 *.py +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/logging_internal/MANIFEST.in index f8c276f2cce8..dae249ec8976 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/MANIFEST.in +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/MANIFEST.in @@ -1,2 +1,20 @@ -recursive-include google/cloud/logging *.py -recursive-include google/cloud/logging_v2 *.py +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in index 5a95b2698cbb..dae249ec8976 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in +++ b/packages/gapic-generator/tests/integration/goldens/redis/MANIFEST.in @@ -1,2 +1,20 @@ -recursive-include google/cloud/redis *.py -recursive-include google/cloud/redis_v1 *.py +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/MANIFEST.in b/packages/gapic-generator/tests/integration/goldens/redis_selective/MANIFEST.in index 5a95b2698cbb..dae249ec8976 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/MANIFEST.in +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/MANIFEST.in @@ -1,2 +1,20 @@ -recursive-include google/cloud/redis *.py -recursive-include google/cloud/redis_v1 *.py +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +include README.rst LICENSE +recursive-include google *.py *.pyi *.json *.proto py.typed +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ From e2d6d7eb4905210c284e9e4d0a93c1654c3a4e18 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 11 Apr 2025 17:46:57 -0400 Subject: [PATCH 1278/1339] chore: update generated noxfile.py (#2379) --- .../gapic/templates/noxfile.py.j2 | 647 +++++++++++++----- .../integration/goldens/asset/noxfile.py | 634 ++++++++++++----- .../goldens/credentials/noxfile.py | 634 ++++++++++++----- .../integration/goldens/eventarc/noxfile.py | 634 ++++++++++++----- .../integration/goldens/logging/noxfile.py | 634 ++++++++++++----- .../goldens/logging_internal/noxfile.py | 634 ++++++++++++----- .../integration/goldens/redis/noxfile.py | 634 ++++++++++++----- .../goldens/redis_selective/noxfile.py | 634 ++++++++++++----- 8 files changed, 3792 insertions(+), 1293 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index fca28de83427..505ed2cd51d6 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -6,11 +6,16 @@ import os import pathlib import re import shutil -import subprocess -import sys +from typing import Dict, List +import warnings -import nox # type: ignore +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", @@ -22,149 +27,306 @@ ALL_PYTHON = [ "3.13", ] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = '{{ api.naming.warehouse_package_name }}' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +PACKAGE_NAME = "{{ api.naming.warehouse_package_name }}" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -nox.sessions = [ +nox.options.sessions = [ "unit", + "system", "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", "lint", - "prerelease_deps", + "lint_setup_py", + "blacken", + "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + {% if api.naming.module_namespace %} + "{{ api.naming.module_namespace[0] }}", + {% else %} + "{{ api.naming.versioned_module_name }}", + {% endif %} + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - [ "python", "upb", "cpp" ], + ["python", "upb", "cpp"], ) def unit(session, protobuf_implementation): - """Run the unit test suite.""" + # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and float(session.python) > 3.10: + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": session.install("protobuf<4") + # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov={{ api.naming.module_namespace|join("/") }}/{{ api.naming.versioned_module_name }}/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + {% if api.naming.module_namespace %} + "--cov={{ api.naming.module_namespace[0] }}", + {% else %} + "--cov={{ api.naming.versioned_module_name }}", + {% endif %} + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, env={ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2201) Add a `unit_rest_async` nox session to run tests with [async_rest] extra installed. #} -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - if protobuf_implementation == "cpp" and float(session.python) > 3.10: - session.skip("cpp implementation is not supported in python 3.11+") +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - # Install the package without dependencies - session.install('-e', '.', '--no-deps') + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - session.install(*constraints_deps) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) - - # Print out prerelease package versions - - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - session.run( - 'py.test', - '--quiet', - '--cov={{ api.naming.module_namespace|join("/") }}/{{ api.naming.versioned_module_name }}/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ @@ -174,70 +336,78 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") session.install( - 'mypy', - 'types-requests', - 'types-protobuf' + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", ) - session.install('.') - session.run( - 'mypy', - '-p', - {% if api.naming.module_namespace %} - '{{ api.naming.module_namespace[0] }}', - {% else %} - '{{ api.naming.versioned_module_name }}', - {% endif %} - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", @@ -248,28 +418,173 @@ def docs(session): @nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """ - session.install("flake8", BLACK_VERSION) + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + session.run( - "black", - "--check", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) - session.run("flake8", "google", "tests", "samples") @nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) {% endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 5b4265d72615..49691dd51e79 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -17,11 +17,16 @@ import pathlib import re import shutil -import subprocess -import sys +from typing import Dict, List +import warnings -import nox # type: ignore +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", @@ -33,148 +38,297 @@ "3.13", ] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-asset' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +PACKAGE_NAME = "google-cloud-asset" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -nox.sessions = [ +nox.options.sessions = [ "unit", + "system", "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", "lint", - "prerelease_deps", + "lint_setup_py", + "blacken", + "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - [ "python", "upb", "cpp" ], + ["python", "upb", "cpp"], ) def unit(session, protobuf_implementation): - """Run the unit test suite.""" + # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and float(session.python) > 3.10: + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": session.install("protobuf<4") + # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, env={ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and float(session.python) > 3.10: - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - session.install(*constraints_deps) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] - # Print out prerelease package versions + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/asset_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ @@ -184,66 +338,78 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") session.install( - 'mypy', - 'types-requests', - 'types-protobuf' + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", @@ -254,26 +420,172 @@ def docs(session): @nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """ - session.install("flake8", BLACK_VERSION) + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + session.run( - "black", - "--check", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) - session.run("flake8", "google", "tests", "samples") @nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + session.run( - "black", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index bc738155412a..442d98c88f8f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -17,11 +17,16 @@ import pathlib import re import shutil -import subprocess -import sys +from typing import Dict, List +import warnings -import nox # type: ignore +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", @@ -33,148 +38,297 @@ "3.13", ] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-iam-credentials' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +PACKAGE_NAME = "google-iam-credentials" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -nox.sessions = [ +nox.options.sessions = [ "unit", + "system", "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", "lint", - "prerelease_deps", + "lint_setup_py", + "blacken", + "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - [ "python", "upb", "cpp" ], + ["python", "upb", "cpp"], ) def unit(session, protobuf_implementation): - """Run the unit test suite.""" + # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and float(session.python) > 3.10: + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": session.install("protobuf<4") + # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google/iam/credentials_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, env={ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and float(session.python) > 3.10: - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - session.install(*constraints_deps) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] - # Print out prerelease package versions + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - session.run( - 'py.test', - '--quiet', - '--cov=google/iam/credentials_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ @@ -184,66 +338,78 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") session.install( - 'mypy', - 'types-requests', - 'types-protobuf' + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", @@ -254,26 +420,172 @@ def docs(session): @nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """ - session.install("flake8", BLACK_VERSION) + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + session.run( - "black", - "--check", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) - session.run("flake8", "google", "tests", "samples") @nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + session.run( - "black", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 66f44ba3ebc5..f7d75738e40d 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -17,11 +17,16 @@ import pathlib import re import shutil -import subprocess -import sys +from typing import Dict, List +import warnings -import nox # type: ignore +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", @@ -33,148 +38,297 @@ "3.13", ] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-eventarc' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +PACKAGE_NAME = "google-cloud-eventarc" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -nox.sessions = [ +nox.options.sessions = [ "unit", + "system", "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", "lint", - "prerelease_deps", + "lint_setup_py", + "blacken", + "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - [ "python", "upb", "cpp" ], + ["python", "upb", "cpp"], ) def unit(session, protobuf_implementation): - """Run the unit test suite.""" + # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and float(session.python) > 3.10: + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": session.install("protobuf<4") + # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/eventarc_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, env={ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and float(session.python) > 3.10: - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - session.install(*constraints_deps) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] - # Print out prerelease package versions + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/eventarc_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ @@ -184,66 +338,78 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") session.install( - 'mypy', - 'types-requests', - 'types-protobuf' + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", @@ -254,26 +420,172 @@ def docs(session): @nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """ - session.install("flake8", BLACK_VERSION) + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + session.run( - "black", - "--check", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) - session.run("flake8", "google", "tests", "samples") @nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + session.run( - "black", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index c1b4a5788d1d..18f1f2bc16f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -17,11 +17,16 @@ import pathlib import re import shutil -import subprocess -import sys +from typing import Dict, List +import warnings -import nox # type: ignore +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", @@ -33,148 +38,297 @@ "3.13", ] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-logging' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +PACKAGE_NAME = "google-cloud-logging" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -nox.sessions = [ +nox.options.sessions = [ "unit", + "system", "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", "lint", - "prerelease_deps", + "lint_setup_py", + "blacken", + "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - [ "python", "upb", "cpp" ], + ["python", "upb", "cpp"], ) def unit(session, protobuf_implementation): - """Run the unit test suite.""" + # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and float(session.python) > 3.10: + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": session.install("protobuf<4") + # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/logging_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, env={ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and float(session.python) > 3.10: - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - session.install(*constraints_deps) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] - # Print out prerelease package versions + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/logging_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ @@ -184,66 +338,78 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") session.install( - 'mypy', - 'types-requests', - 'types-protobuf' + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", @@ -254,26 +420,172 @@ def docs(session): @nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """ - session.install("flake8", BLACK_VERSION) + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + session.run( - "black", - "--check", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) - session.run("flake8", "google", "tests", "samples") @nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + session.run( - "black", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index c1b4a5788d1d..18f1f2bc16f4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -17,11 +17,16 @@ import pathlib import re import shutil -import subprocess -import sys +from typing import Dict, List +import warnings -import nox # type: ignore +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", @@ -33,148 +38,297 @@ "3.13", ] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-logging' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +PACKAGE_NAME = "google-cloud-logging" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -nox.sessions = [ +nox.options.sessions = [ "unit", + "system", "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", "lint", - "prerelease_deps", + "lint_setup_py", + "blacken", + "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - [ "python", "upb", "cpp" ], + ["python", "upb", "cpp"], ) def unit(session, protobuf_implementation): - """Run the unit test suite.""" + # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and float(session.python) > 3.10: + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": session.install("protobuf<4") + # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/logging_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, env={ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and float(session.python) > 3.10: - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - session.install(*constraints_deps) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] - # Print out prerelease package versions + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/logging_v2/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ @@ -184,66 +338,78 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") session.install( - 'mypy', - 'types-requests', - 'types-protobuf' + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", @@ -254,26 +420,172 @@ def docs(session): @nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """ - session.install("flake8", BLACK_VERSION) + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + session.run( - "black", - "--check", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) - session.run("flake8", "google", "tests", "samples") @nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + session.run( - "black", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 15ea2482945d..89ab6bcec931 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -17,11 +17,16 @@ import pathlib import re import shutil -import subprocess -import sys +from typing import Dict, List +import warnings -import nox # type: ignore +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", @@ -33,148 +38,297 @@ "3.13", ] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-redis' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +PACKAGE_NAME = "google-cloud-redis" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -nox.sessions = [ +nox.options.sessions = [ "unit", + "system", "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", "lint", - "prerelease_deps", + "lint_setup_py", + "blacken", + "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - [ "python", "upb", "cpp" ], + ["python", "upb", "cpp"], ) def unit(session, protobuf_implementation): - """Run the unit test suite.""" + # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and float(session.python) > 3.10: + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": session.install("protobuf<4") + # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/redis_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, env={ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and float(session.python) > 3.10: - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - session.install(*constraints_deps) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] - # Print out prerelease package versions + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/redis_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ @@ -184,66 +338,78 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") session.install( - 'mypy', - 'types-requests', - 'types-protobuf' + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", @@ -254,26 +420,172 @@ def docs(session): @nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """ - session.install("flake8", BLACK_VERSION) + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + session.run( - "black", - "--check", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) - session.run("flake8", "google", "tests", "samples") @nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + session.run( - "black", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index 15ea2482945d..89ab6bcec931 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -17,11 +17,16 @@ import pathlib import re import shutil -import subprocess -import sys +from typing import Dict, List +import warnings -import nox # type: ignore +import nox + +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" + +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", @@ -33,148 +38,297 @@ "3.13", ] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] + CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = 'google-cloud-redis' - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +PACKAGE_NAME = "google-cloud-redis" + +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -nox.sessions = [ +nox.options.sessions = [ "unit", + "system", "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", "lint", - "prerelease_deps", + "lint_setup_py", + "blacken", + "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + "mypy", + "types-requests", + "types-protobuf", + ) + session.install(".") + session.run( + "mypy", + "-p", + "google", + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "update", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install("google-cloud-testutils") + session.install(".") + + session.run( + "lower-bound-checker", + "check", + "--package-name", + PACKAGE_NAME, + "--constraints-file", + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=ALL_PYTHON) @nox.parametrize( "protobuf_implementation", - [ "python", "upb", "cpp" ], + ["python", "upb", "cpp"], ) def unit(session, protobuf_implementation): - """Run the unit test suite.""" + # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and float(session.python) > 3.10: + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.', "-c", f"testing/constraints-{session.python}.txt") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": session.install("protobuf<4") + # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/redis_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, env={ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, }, ) -@nox.session(python=ALL_PYTHON[-1]) -@nox.parametrize( - "protobuf_implementation", - [ "python", "upb", "cpp" ], -) -def prerelease_deps(session, protobuf_implementation): - """Run the unit test suite against pre-release versions of dependencies.""" - - if protobuf_implementation == "cpp" and float(session.python) > 3.10: - session.skip("cpp implementation is not supported in python 3.11+") - - # Install test environment dependencies - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - # Install the package without dependencies - session.install('-e', '.', '--no-deps') - - # We test the minimum dependency versions using the minimum Python - # version so the lowest python runtime that we test has a corresponding constraints - # file, located at `testing/constraints--.txt`, which contains all of the - # dependencies and extras. - with open( - CURRENT_DIRECTORY - / "testing" - / f"constraints-{ALL_PYTHON[0]}.txt", - encoding="utf-8", - ) as constraints_file: - constraints_text = constraints_file.read() +def install_systemtest_dependencies(session, *constraints): + session.install("--pre", "grpcio") - # Ignore leading whitespace and comment lines. - constraints_deps = [ - match.group(1) - for match in re.finditer( - r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE - ) - ] + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - session.install(*constraints_deps) + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - prerel_deps = [ - "googleapis-common-protos", - "google-api-core", - "google-auth", - "grpcio", - "grpcio-status", - "protobuf", - "proto-plus", - ] + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - # Remaining dependencies - other_deps = [ - "requests", - ] - session.install(*other_deps) + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] - # Print out prerelease package versions + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) - session.run("python", "-c", "import google.api_core; print(google.api_core.__version__)") - session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("python", "-c", "import grpc; print(grpc.__version__)") - session.run( - "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" - ) - session.run( - "python", "-c", "import proto; print(proto.__version__)" - ) - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/redis_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)), - env={ - "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, - }, +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if system_test_folder_exists: + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ @@ -184,66 +338,78 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") session.install( - 'mypy', - 'types-requests', - 'types-protobuf' + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", ) - session.install('.') - session.run( - 'mypy', - '-p', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx==7.0.1", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", @@ -254,26 +420,172 @@ def docs(session): @nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """ - session.install("flake8", BLACK_VERSION) + Run all tests with pre-release versions of dependencies installed + rather than the standard non pre-release versions. + Pre-release versions can be installed using + `pip install --pre `. + """ + + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # Note: If a dependency is added to the `prerel_deps` list, + # the `core_dependencies_from_source` list in the `core_deps_from_source` + # nox session should also be updated. + prerel_deps = [ + "googleapis-common-protos", + "google-api-core", + "google-auth", + "grpc-google-iam-v1", + "grpcio", + "grpcio-status", + "protobuf", + "proto-plus", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--ignore-installed", dep) + # TODO(https://github.com/grpc/grpc/issues/38965): Add `grpcio-status`` + # to the dictionary below once this bug is fixed. + # TODO(https://github.com/googleapis/google-cloud-python/issues/13643): Add + # `googleapis-common-protos` and `grpc-google-iam-v1` to the dictionary below + # once this bug is fixed. + package_namespaces = { + "google-api-core": "google.api_core", + "google-auth": "google.auth", + "grpcio": "grpc", + "protobuf": "google.protobuf", + "proto-plus": "proto", + } + + version_namespace = package_namespaces.get(dep) + + print(f"Installed {dep}") + if version_namespace: + session.run( + "python", + "-c", + f"import {version_namespace}; print({version_namespace}.__version__)", + ) + session.run( - "black", - "--check", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) - session.run("flake8", "google", "tests", "samples") @nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb"], +) +def core_deps_from_source(session, protobuf_implementation): + """Run all tests with core dependencies installed from source + rather than pulling the dependencies from PyPI. + """ + + # Install all dependencies + session.install("-e", ".") + + # Install dependencies for the unit test environment + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + + # Install dependencies for the system test environment + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY / "testing" / f"constraints-{ALL_PYTHON[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Install dependencies specified in `testing/constraints-X.txt`. + session.install(*constraints_deps) + + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2358): `grpcio` and + # `grpcio-status` should be added to the list below so that they are installed from source, + # rather than PyPI. + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2357): `protobuf` should be + # added to the list below so that it is installed from source, rather than PyPI + # Note: If a dependency is added to the `core_dependencies_from_source` list, + # the `prerel_deps` list in the `prerelease_deps` nox session should also be updated. + core_dependencies_from_source = [ + "googleapis-common-protos @ git+https://github.com/googleapis/google-cloud-python#egg=googleapis-common-protos&subdirectory=packages/googleapis-common-protos", + "google-api-core @ git+https://github.com/googleapis/python-api-core.git", + "google-auth @ git+https://github.com/googleapis/google-auth-library-python.git", + "grpc-google-iam-v1 @ git+https://github.com/googleapis/google-cloud-python#egg=grpc-google-iam-v1&subdirectory=packages/grpc-google-iam-v1", + "proto-plus @ git+https://github.com/googleapis/proto-plus-python.git", + ] + + for dep in core_dependencies_from_source: + session.install(dep, "--no-deps", "--ignore-installed") + print(f"Installed {dep}") + session.run( - "black", - *BLACK_PATHS, + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) From 3ed3d163102877b7d529df59d68395c31ef5ad0a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 07:10:15 -0400 Subject: [PATCH 1279/1339] chore(main): release 1.24.0 (#2367) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c79b6373422f..cad6cec65846 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.24.0](https://github.com/googleapis/gapic-generator-python/compare/v1.23.6...v1.24.0) (2025-04-11) + + +### Features + +* Adds augmented pagination to account for BQ family of APIs ([#2372](https://github.com/googleapis/gapic-generator-python/issues/2372)) ([30cd1a4](https://github.com/googleapis/gapic-generator-python/commit/30cd1a49353048fc190114cfb7a73e14ec2eff31)) + + +### Bug Fixes + +* Fixed internal method generation naming issues ([#2365](https://github.com/googleapis/gapic-generator-python/issues/2365)) ([868f201](https://github.com/googleapis/gapic-generator-python/commit/868f201957b271c9390bf41374897a2ce728a5e2)) +* Missing DEFAULT_HOST should still result in compiling code ([#2051](https://github.com/googleapis/gapic-generator-python/issues/2051)) ([dc6d4f7](https://github.com/googleapis/gapic-generator-python/commit/dc6d4f7ec0853557bb7c25a8276fbe262c5bda5d)) + ## [1.23.6](https://github.com/googleapis/gapic-generator-python/compare/v1.23.5...v1.23.6) (2025-03-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 5d2bd2ff7565..e17ab214f60c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.23.6" +version = "1.24.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From bfa6ab1d61722a0f12b913bed33880d2c81060de Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 15 Apr 2025 16:53:13 -0400 Subject: [PATCH 1280/1339] chore: update generated noxfile.py to cater for grafeas (#2380) --- .../gapic-generator/gapic/templates/noxfile.py.j2 | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 505ed2cd51d6..35a19afdda46 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -15,7 +15,11 @@ import nox BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +{% if api.naming.module_namespace %} +LINT_PATHS = ["docs", "{{ api.naming.module_namespace[0] }}", "tests", "noxfile.py", "setup.py"] +{% else %} +LINT_PATHS = ["docs", "{{ api.naming.versioned_module_name }}", "tests", "noxfile.py", "setup.py"] +{% endif %} ALL_PYTHON = [ "3.7", @@ -139,7 +143,11 @@ def lint(session): *LINT_PATHS, ) - session.run("flake8", "google", "tests") +{% if api.naming.module_namespace %} + session.run("flake8", "{{ api.naming.module_namespace[0] }}", "tests") +{% else %} + session.run("flake8", "{{ api.naming.versioned_module_name }}, "tests") +{% endif %} @nox.session(python=DEFAULT_PYTHON_VERSION) From 28e8aeb34a67b582c0063c69d05a3b5b94a3485f Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 16 Apr 2025 18:34:18 -0400 Subject: [PATCH 1281/1339] chore: update generated docs (#2381) Co-authored-by: Owl Bot --- packages/gapic-generator/docs/conf.py | 2 +- .../gapic/ads-templates/docs/conf.py.j2 | 2 +- .../templates/docs/_static/custom.css.j2 | 19 ++++++- .../templates/docs/_templates/layout.html.j2 | 51 +++++++++++++++++ .../gapic/templates/docs/conf.py.j2 | 55 +++++++++++-------- .../gapic/templates/docs/index.rst.j2 | 3 + .../templates/docs/multiprocessing.rst.j2 | 7 +++ .../goldens/asset/docs/_static/custom.css | 17 ++++++ .../goldens/asset/docs/_templates/layout.html | 50 +++++++++++++++++ .../integration/goldens/asset/docs/conf.py | 55 +++++++++++-------- .../integration/goldens/asset/docs/index.rst | 3 + .../goldens/asset/docs/multiprocessing.rst | 7 +++ .../credentials/docs/_static/custom.css | 17 ++++++ .../credentials/docs/_templates/layout.html | 50 +++++++++++++++++ .../goldens/credentials/docs/conf.py | 55 +++++++++++-------- .../goldens/credentials/docs/index.rst | 3 + .../credentials/docs/multiprocessing.rst | 7 +++ .../goldens/eventarc/docs/_static/custom.css | 17 ++++++ .../eventarc/docs/_templates/layout.html | 50 +++++++++++++++++ .../integration/goldens/eventarc/docs/conf.py | 55 +++++++++++-------- .../goldens/eventarc/docs/index.rst | 3 + .../goldens/eventarc/docs/multiprocessing.rst | 7 +++ .../goldens/logging/docs/_static/custom.css | 17 ++++++ .../logging/docs/_templates/layout.html | 50 +++++++++++++++++ .../integration/goldens/logging/docs/conf.py | 55 +++++++++++-------- .../goldens/logging/docs/index.rst | 3 + .../goldens/logging/docs/multiprocessing.rst | 7 +++ .../logging_internal/docs/_static/custom.css | 17 ++++++ .../docs/_templates/layout.html | 50 +++++++++++++++++ .../goldens/logging_internal/docs/conf.py | 55 +++++++++++-------- .../goldens/logging_internal/docs/index.rst | 3 + .../logging_internal/docs/multiprocessing.rst | 7 +++ .../goldens/redis/docs/_static/custom.css | 17 ++++++ .../goldens/redis/docs/_templates/layout.html | 50 +++++++++++++++++ .../integration/goldens/redis/docs/conf.py | 55 +++++++++++-------- .../integration/goldens/redis/docs/index.rst | 3 + .../goldens/redis/docs/multiprocessing.rst | 7 +++ .../redis_selective/docs/_static/custom.css | 17 ++++++ .../docs/_templates/layout.html | 50 +++++++++++++++++ .../goldens/redis_selective/docs/conf.py | 55 +++++++++++-------- .../goldens/redis_selective/docs/index.rst | 3 + .../redis_selective/docs/multiprocessing.rst | 7 +++ 42 files changed, 876 insertions(+), 187 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/docs/_templates/layout.html.j2 create mode 100644 packages/gapic-generator/gapic/templates/docs/multiprocessing.rst.j2 create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/docs/_templates/layout.html create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/docs/multiprocessing.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/docs/_templates/layout.html create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/docs/multiprocessing.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/docs/_templates/layout.html create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/docs/multiprocessing.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/_templates/layout.html create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/docs/multiprocessing.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_templates/layout.html create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/docs/multiprocessing.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/docs/_templates/layout.html create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/docs/multiprocessing.rst create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_templates/layout.html create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/docs/multiprocessing.rst diff --git a/packages/gapic-generator/docs/conf.py b/packages/gapic-generator/docs/conf.py index 5d7d48e9d1e4..cb605ff6a1b5 100644 --- a/packages/gapic-generator/docs/conf.py +++ b/packages/gapic-generator/docs/conf.py @@ -40,7 +40,7 @@ # If your documentation needs a minimal Sphinx version, state it here. # -# needs_sphinx = '1.0' +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 index 80e398ddf7ab..dcce591ca3f5 100644 --- a/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/docs/conf.py.j2 @@ -28,7 +28,7 @@ __version__ = "0.1.0" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/gapic-generator/gapic/templates/docs/_static/custom.css.j2 b/packages/gapic-generator/gapic/templates/docs/_static/custom.css.j2 index c4e78bac6354..b0a295464b23 100644 --- a/packages/gapic-generator/gapic/templates/docs/_static/custom.css.j2 +++ b/packages/gapic-generator/gapic/templates/docs/_static/custom.css.j2 @@ -1,3 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px -} \ No newline at end of file +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/gapic-generator/gapic/templates/docs/_templates/layout.html.j2 b/packages/gapic-generator/gapic/templates/docs/_templates/layout.html.j2 new file mode 100644 index 000000000000..0ff79a38f432 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/docs/_templates/layout.html.j2 @@ -0,0 +1,51 @@ +{% raw %} +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} +{% endraw %} diff --git a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 index b1eb707e8c69..b509706ab6db 100644 --- a/packages/gapic-generator/gapic/templates/docs/conf.py.j2 +++ b/packages/gapic-generator/gapic/templates/docs/conf.py.j2 @@ -23,12 +23,16 @@ import shlex # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -38,26 +42,25 @@ extensions = [ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -68,8 +71,8 @@ root_doc = "index" # General information about the project. project = u"{{ api.naming.warehouse_package_name }}" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +copyright = u"2025, Google, LLC" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -85,7 +88,7 @@ version = ".".join(release.split(".")[0:2]) # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -95,7 +98,13 @@ language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -135,7 +144,7 @@ html_theme = "alabaster" # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "{{ api.naming.namespace|join(' ') }} Client Libraries for Python", + "description": "{{ api.naming.namespace|join(' ') }} Client Libraries for {{ api.naming.warehouse_package_name }}", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -298,7 +307,7 @@ man_pages = [ ( root_doc, "{{ api.naming.warehouse_package_name }}", - u"{{ api.naming.long_name }} Documentation", + "{{ api.naming.warehouse_package_name }} Documentation", [author], 1, ) @@ -317,10 +326,10 @@ texinfo_documents = [ ( root_doc, "{{ api.naming.warehouse_package_name }}", - u"{{ api.naming.warehouse_package_name }} Documentation", + "{{ api.naming.warehouse_package_name }} Documentation", author, "{{ api.naming.warehouse_package_name }}", - "GAPIC library for {{ api.naming.long_name }} API", + "{{ api.naming.warehouse_package_name }} Library", "APIs", ) ] @@ -340,14 +349,14 @@ texinfo_documents = [ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/gapic/templates/docs/index.rst.j2 b/packages/gapic-generator/gapic/templates/docs/index.rst.j2 index 890509be79d4..4e55a05ff92c 100644 --- a/packages/gapic-generator/gapic/templates/docs/index.rst.j2 +++ b/packages/gapic-generator/gapic/templates/docs/index.rst.j2 @@ -1,3 +1,6 @@ +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: diff --git a/packages/gapic-generator/gapic/templates/docs/multiprocessing.rst.j2 b/packages/gapic-generator/gapic/templates/docs/multiprocessing.rst.j2 new file mode 100644 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/gapic-generator/gapic/templates/docs/multiprocessing.rst.j2 @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/asset/docs/_static/custom.css index 06423be0b592..b0a295464b23 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/_static/custom.css +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/_static/custom.css @@ -1,3 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/_templates/layout.html b/packages/gapic-generator/tests/integration/goldens/asset/docs/_templates/layout.html new file mode 100755 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py index 6ef2567dd078..6c82dd6667c9 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/conf.py @@ -34,12 +34,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +53,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -79,8 +82,8 @@ # General information about the project. project = u"google-cloud-asset" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +copyright = u"2025, Google, LLC" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +99,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +109,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +155,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-asset", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -309,7 +318,7 @@ ( root_doc, "google-cloud-asset", - u"Google Cloud Asset Documentation", + "google-cloud-asset Documentation", [author], 1, ) @@ -328,10 +337,10 @@ ( root_doc, "google-cloud-asset", - u"google-cloud-asset Documentation", + "google-cloud-asset Documentation", author, "google-cloud-asset", - "GAPIC library for Google Cloud Asset API", + "google-cloud-asset Library", "APIs", ) ] @@ -351,14 +360,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst index df4eb53564ad..fab3d489ef3b 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/index.rst @@ -1,3 +1,6 @@ +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: diff --git a/packages/gapic-generator/tests/integration/goldens/asset/docs/multiprocessing.rst b/packages/gapic-generator/tests/integration/goldens/asset/docs/multiprocessing.rst new file mode 100755 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/credentials/docs/_static/custom.css index 06423be0b592..b0a295464b23 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/_static/custom.css +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/_static/custom.css @@ -1,3 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/_templates/layout.html b/packages/gapic-generator/tests/integration/goldens/credentials/docs/_templates/layout.html new file mode 100755 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py index 87daeb37dc7f..0d3872aa87c1 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/conf.py @@ -34,12 +34,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +53,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -79,8 +82,8 @@ # General information about the project. project = u"google-iam-credentials" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +copyright = u"2025, Google, LLC" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +99,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +109,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +155,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Iam Client Libraries for Python", + "description": "Google Iam Client Libraries for google-iam-credentials", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -309,7 +318,7 @@ ( root_doc, "google-iam-credentials", - u"Google Iam Credentials Documentation", + "google-iam-credentials Documentation", [author], 1, ) @@ -328,10 +337,10 @@ ( root_doc, "google-iam-credentials", - u"google-iam-credentials Documentation", + "google-iam-credentials Documentation", author, "google-iam-credentials", - "GAPIC library for Google Iam Credentials API", + "google-iam-credentials Library", "APIs", ) ] @@ -351,14 +360,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst index 2113270ae152..a2f172d3d006 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/index.rst @@ -1,3 +1,6 @@ +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/docs/multiprocessing.rst b/packages/gapic-generator/tests/integration/goldens/credentials/docs/multiprocessing.rst new file mode 100755 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_static/custom.css index 06423be0b592..b0a295464b23 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_static/custom.css +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_static/custom.css @@ -1,3 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_templates/layout.html b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_templates/layout.html new file mode 100755 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py index 15e37e84a4fb..33c2c7b6418e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/conf.py @@ -34,12 +34,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +53,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -79,8 +82,8 @@ # General information about the project. project = u"google-cloud-eventarc" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +copyright = u"2025, Google, LLC" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +99,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +109,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +155,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-eventarc", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -309,7 +318,7 @@ ( root_doc, "google-cloud-eventarc", - u"Google Cloud Eventarc Documentation", + "google-cloud-eventarc Documentation", [author], 1, ) @@ -328,10 +337,10 @@ ( root_doc, "google-cloud-eventarc", - u"google-cloud-eventarc Documentation", + "google-cloud-eventarc Documentation", author, "google-cloud-eventarc", - "GAPIC library for Google Cloud Eventarc API", + "google-cloud-eventarc Library", "APIs", ) ] @@ -351,14 +360,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst index 0a20636626b1..e1c01ae7c916 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/index.rst @@ -1,3 +1,6 @@ +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/docs/multiprocessing.rst b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/multiprocessing.rst new file mode 100755 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/logging/docs/_static/custom.css index 06423be0b592..b0a295464b23 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/_static/custom.css +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/_static/custom.css @@ -1,3 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/_templates/layout.html b/packages/gapic-generator/tests/integration/goldens/logging/docs/_templates/layout.html new file mode 100755 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py index 9f5f212faa62..0fe50dca9699 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/conf.py @@ -34,12 +34,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +53,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -79,8 +82,8 @@ # General information about the project. project = u"google-cloud-logging" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +copyright = u"2025, Google, LLC" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +99,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +109,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +155,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-logging", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -309,7 +318,7 @@ ( root_doc, "google-cloud-logging", - u"Google Cloud Logging Documentation", + "google-cloud-logging Documentation", [author], 1, ) @@ -328,10 +337,10 @@ ( root_doc, "google-cloud-logging", - u"google-cloud-logging Documentation", + "google-cloud-logging Documentation", author, "google-cloud-logging", - "GAPIC library for Google Cloud Logging API", + "google-cloud-logging Library", "APIs", ) ] @@ -351,14 +360,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst index 51acc96292ab..2be5cab9659a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/index.rst @@ -1,3 +1,6 @@ +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: diff --git a/packages/gapic-generator/tests/integration/goldens/logging/docs/multiprocessing.rst b/packages/gapic-generator/tests/integration/goldens/logging/docs/multiprocessing.rst new file mode 100755 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_static/custom.css index 06423be0b592..b0a295464b23 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_static/custom.css +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_static/custom.css @@ -1,3 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_templates/layout.html b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_templates/layout.html new file mode 100755 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py index 9f5f212faa62..0fe50dca9699 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/conf.py @@ -34,12 +34,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +53,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -79,8 +82,8 @@ # General information about the project. project = u"google-cloud-logging" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +copyright = u"2025, Google, LLC" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +99,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +109,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +155,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-logging", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -309,7 +318,7 @@ ( root_doc, "google-cloud-logging", - u"Google Cloud Logging Documentation", + "google-cloud-logging Documentation", [author], 1, ) @@ -328,10 +337,10 @@ ( root_doc, "google-cloud-logging", - u"google-cloud-logging Documentation", + "google-cloud-logging Documentation", author, "google-cloud-logging", - "GAPIC library for Google Cloud Logging API", + "google-cloud-logging Library", "APIs", ) ] @@ -351,14 +360,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/index.rst index 51acc96292ab..2be5cab9659a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/index.rst @@ -1,3 +1,6 @@ +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/multiprocessing.rst b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/multiprocessing.rst new file mode 100755 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/redis/docs/_static/custom.css index 06423be0b592..b0a295464b23 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/_static/custom.css +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/_static/custom.css @@ -1,3 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/_templates/layout.html b/packages/gapic-generator/tests/integration/goldens/redis/docs/_templates/layout.html new file mode 100755 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py index 975c25960061..ee9513cfe04e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/conf.py @@ -34,12 +34,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +53,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -79,8 +82,8 @@ # General information about the project. project = u"google-cloud-redis" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +copyright = u"2025, Google, LLC" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +99,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +109,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +155,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-redis", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -309,7 +318,7 @@ ( root_doc, "google-cloud-redis", - u"Google Cloud Redis Documentation", + "google-cloud-redis Documentation", [author], 1, ) @@ -328,10 +337,10 @@ ( root_doc, "google-cloud-redis", - u"google-cloud-redis Documentation", + "google-cloud-redis Documentation", author, "google-cloud-redis", - "GAPIC library for Google Cloud Redis API", + "google-cloud-redis Library", "APIs", ) ] @@ -351,14 +360,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst index 0b346d85a90f..59c3a95dee9b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/index.rst @@ -1,3 +1,6 @@ +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: diff --git a/packages/gapic-generator/tests/integration/goldens/redis/docs/multiprocessing.rst b/packages/gapic-generator/tests/integration/goldens/redis/docs/multiprocessing.rst new file mode 100755 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_static/custom.css b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_static/custom.css index 06423be0b592..b0a295464b23 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_static/custom.css +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_static/custom.css @@ -1,3 +1,20 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_templates/layout.html b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_templates/layout.html new file mode 100755 index 000000000000..95e9c77fcfe1 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/_templates/layout.html @@ -0,0 +1,50 @@ + +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py index 975c25960061..ee9513cfe04e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/conf.py @@ -34,12 +34,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + +__version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" +needs_sphinx = "4.5.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -49,26 +53,25 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] source_suffix = [".rst", ".md"] # The encoding of source files. @@ -79,8 +82,8 @@ # General information about the project. project = u"google-cloud-redis" -copyright = u"2023, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit +copyright = u"2025, Google, LLC" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -96,7 +99,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -106,7 +109,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -146,7 +155,7 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-redis", "github_user": "googleapis", "github_repo": "google-cloud-python", "github_banner": True, @@ -309,7 +318,7 @@ ( root_doc, "google-cloud-redis", - u"Google Cloud Redis Documentation", + "google-cloud-redis Documentation", [author], 1, ) @@ -328,10 +337,10 @@ ( root_doc, "google-cloud-redis", - u"google-cloud-redis Documentation", + "google-cloud-redis Documentation", author, "google-cloud-redis", - "GAPIC library for Google Cloud Redis API", + "google-cloud-redis Library", "APIs", ) ] @@ -351,14 +360,14 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/index.rst b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/index.rst index 0b346d85a90f..59c3a95dee9b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/index.rst +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/index.rst @@ -1,3 +1,6 @@ +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/multiprocessing.rst b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/multiprocessing.rst new file mode 100755 index 000000000000..536d17b2ea65 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpc` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or + :class:`multiprocessing.Process`. From 2439c99966f4ac2407285dcd66ac8847b81d0464 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 10:55:34 -0400 Subject: [PATCH 1282/1339] chore(main): release 1.24.1 (#2382) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index cad6cec65846..1ca988f0054e 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.24.1](https://github.com/googleapis/gapic-generator-python/compare/v1.24.0...v1.24.1) (2025-04-17) + + +### Bug Fixes + +* Update generated docs to match synthtool templates ([3f62f31](https://github.com/googleapis/gapic-generator-python/commit/3f62f31d5146147c4bc6393fbeef18ce1d9a1a42)) + ## [1.24.0](https://github.com/googleapis/gapic-generator-python/compare/v1.23.6...v1.24.0) (2025-04-11) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e17ab214f60c..803600ae8193 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.24.0" +version = "1.24.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 34b692a4c433653a9a1b8cd79dbab4aa947f8570 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 6 May 2025 18:59:17 -0400 Subject: [PATCH 1283/1339] feat: Add protobuf runtime version to `x-goog-api-client` header (#2368) --- .../%sub/services/%service/async_client.py.j2 | 4 ++++ .../%name_%version/%sub/services/%service/client.py.j2 | 3 +++ .../%sub/services/%service/transports/base.py.j2 | 4 ++++ .../%sub/services/%service/transports/rest.py.j2 | 4 ++++ .../%sub/services/%service/transports/rest_asyncio.py.j2 | 5 ++++- packages/gapic-generator/noxfile.py | 8 ++++++++ .../cloud/asset_v1/services/asset_service/async_client.py | 4 ++++ .../cloud/asset_v1/services/asset_service/client.py | 3 +++ .../asset_v1/services/asset_service/transports/base.py | 4 ++++ .../asset_v1/services/asset_service/transports/rest.py | 4 ++++ .../services/iam_credentials/async_client.py | 4 ++++ .../iam/credentials_v1/services/iam_credentials/client.py | 3 +++ .../services/iam_credentials/transports/base.py | 4 ++++ .../services/iam_credentials/transports/rest.py | 4 ++++ .../cloud/eventarc_v1/services/eventarc/async_client.py | 4 ++++ .../google/cloud/eventarc_v1/services/eventarc/client.py | 3 +++ .../eventarc_v1/services/eventarc/transports/base.py | 4 ++++ .../eventarc_v1/services/eventarc/transports/rest.py | 4 ++++ .../logging_v2/services/config_service_v2/async_client.py | 4 ++++ .../cloud/logging_v2/services/config_service_v2/client.py | 3 +++ .../services/config_service_v2/transports/base.py | 4 ++++ .../services/logging_service_v2/async_client.py | 4 ++++ .../logging_v2/services/logging_service_v2/client.py | 3 +++ .../services/logging_service_v2/transports/base.py | 4 ++++ .../services/metrics_service_v2/async_client.py | 4 ++++ .../logging_v2/services/metrics_service_v2/client.py | 3 +++ .../services/metrics_service_v2/transports/base.py | 4 ++++ .../logging_v2/services/config_service_v2/async_client.py | 4 ++++ .../cloud/logging_v2/services/config_service_v2/client.py | 3 +++ .../services/config_service_v2/transports/base.py | 4 ++++ .../services/logging_service_v2/async_client.py | 4 ++++ .../logging_v2/services/logging_service_v2/client.py | 3 +++ .../services/logging_service_v2/transports/base.py | 4 ++++ .../services/metrics_service_v2/async_client.py | 4 ++++ .../logging_v2/services/metrics_service_v2/client.py | 3 +++ .../services/metrics_service_v2/transports/base.py | 4 ++++ .../cloud/redis_v1/services/cloud_redis/async_client.py | 4 ++++ .../google/cloud/redis_v1/services/cloud_redis/client.py | 3 +++ .../redis_v1/services/cloud_redis/transports/base.py | 4 ++++ .../redis_v1/services/cloud_redis/transports/rest.py | 4 ++++ .../services/cloud_redis/transports/rest_asyncio.py | 5 ++++- .../cloud/redis_v1/services/cloud_redis/async_client.py | 4 ++++ .../google/cloud/redis_v1/services/cloud_redis/client.py | 3 +++ .../redis_v1/services/cloud_redis/transports/base.py | 4 ++++ .../redis_v1/services/cloud_redis/transports/rest.py | 4 ++++ .../services/cloud_redis/transports/rest_asyncio.py | 5 ++++- 46 files changed, 176 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index d5425984cf7e..689d3e062009 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -24,6 +24,7 @@ from google.api_core import gapic_v1 from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf {{ shared_macros.add_google_api_core_version_header_import(service.version) }} try: @@ -726,6 +727,9 @@ class {{ service.async_client_name }}: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "{{ service.async_client_name }}", diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index eefd22509094..9f945bc55685 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -36,6 +36,7 @@ from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf {{ shared_macros.add_google_api_core_version_header_import(service.version) }} try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -991,6 +992,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "{{ service.client_name }}", diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 3596da1bb289..7176ad949d75 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -18,6 +18,7 @@ from google.api_core import operations_v1 {% endif %} from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf {% filter sort_lines %} {% set import_ns = namespace(has_operations_mixin=false) %} @@ -51,6 +52,9 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class {{ service.name }}Transport(abc.ABC): """Abstract transport class for {{ service.name }}.""" diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index d419a2d3c3cd..4e69136b461e 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -13,6 +13,7 @@ from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format {% if service.has_lro %} @@ -55,6 +56,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + {{ shared_macros.create_interceptor_class(api, service, method, is_async=False) }} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 8664b7077dc2..3de616d0221a 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -34,7 +34,7 @@ from google.cloud.location import locations_pb2 # type: ignore from google.api_core import retry_async as retries from google.api_core import rest_helpers from google.api_core import rest_streaming_async # type: ignore - +import google.protobuf from google.protobuf import json_format {% if service.has_lro %} @@ -80,6 +80,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=f"google-auth@{google.auth.__version__}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + {{ shared_macros.create_interceptor_class(api, service, method, is_async=True) }} @dataclasses.dataclass diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index b4f8b203de61..117ca47c5e70 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -365,6 +365,14 @@ def showcase_library( "-r", constraints_path, ) + # Exclude `google-auth==2.40.0` which contains a regression + # https://github.com/googleapis/gapic-generator-python/issues/2385 + session.install( + "--no-cache-dir", + "--force-reinstall", + "--upgrade", + "google-auth[aiohttp]!=2.40.0", + ) else: # The ads templates do not have constraints files. # See https://github.com/googleapis/gapic-generator-python/issues/1788 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index ef5bc7163f02..262069a990e4 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -3213,6 +3214,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "AssetServiceAsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 217fa16d0c21..d8e1793012e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -3619,6 +3620,8 @@ def get_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "AssetServiceClient", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index a788aa0a6af2..5e8d7fc1186f 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.asset_v1.types import asset_service from google.longrunning import operations_pb2 # type: ignore @@ -33,6 +34,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class AssetServiceTransport(abc.ABC): """Abstract transport class for AssetService.""" diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 0c936ad2968b..ca847c3aa8c2 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -60,6 +61,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class AssetServiceRestInterceptor: """Interceptor for AssetService. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index a8b182fd1bec..eac708473c7c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -857,6 +858,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "IAMCredentialsAsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 45a1f615b0f6..6eb63b8a4f3c 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1215,6 +1216,8 @@ def __exit__(self, type, value, traceback): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "IAMCredentialsClient", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 941b1e0d32e2..c25966502066 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -25,11 +25,15 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.iam.credentials_v1.types import common DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class IAMCredentialsTransport(abc.ABC): """Abstract transport class for IAMCredentials.""" diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index cbb5d611b125..ea5815b579ac 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format @@ -57,6 +58,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class IAMCredentialsRestInterceptor: """Interceptor for IAMCredentials. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 7fa2037fe228..b972f969634b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -3160,6 +3161,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "EventarcAsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 791d872e9574..961929b1783c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -3608,6 +3609,8 @@ def list_locations( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "EventarcClient", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index ff8e9e74699e..e37d17237f50 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.eventarc_v1.types import channel from google.cloud.eventarc_v1.types import channel_connection @@ -41,6 +42,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class EventarcTransport(abc.ABC): """Abstract transport class for Eventarc.""" diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index 1c79b9ce6fd6..fab70abdd90a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -68,6 +69,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class EventarcRestInterceptor: """Interceptor for Eventarc. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 8a04c81bf371..d3d1edb157b6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -4058,6 +4059,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "ConfigServiceV2AsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 638be89e420b..3cac25632814 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -4462,6 +4463,8 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "ConfigServiceV2Client", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 865c16bb657a..28f11c65de53 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore @@ -33,6 +34,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 326e225072a3..774601dec829 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1173,6 +1174,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "LoggingServiceV2AsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 9104ad8b3be6..4bc6cd56e115 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1536,6 +1537,8 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "LoggingServiceV2Client", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index d8d2c3edc1cf..287081712a7e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging from google.longrunning import operations_pb2 # type: ignore @@ -32,6 +33,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 65907d2cb66a..ad659243d7e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1024,6 +1025,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "MetricsServiceV2AsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 5bbcc4bbc14a..6cf913c0dbba 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1387,6 +1388,8 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "MetricsServiceV2Client", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index d5a7b7f1f33b..012973c62892 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging_metrics from google.longrunning import operations_pb2 # type: ignore @@ -32,6 +33,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py index e53cf09afde6..68db5574ea20 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -4058,6 +4059,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "BaseConfigServiceV2AsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index a9adf37b3efa..d25e9be1c544 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -4462,6 +4463,8 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "BaseConfigServiceV2Client", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 865c16bb657a..28f11c65de53 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging_config from google.longrunning import operations_pb2 # type: ignore @@ -33,6 +34,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ConfigServiceV2Transport(abc.ABC): """Abstract transport class for ConfigServiceV2.""" diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 326e225072a3..774601dec829 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1173,6 +1174,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "LoggingServiceV2AsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py index 9104ad8b3be6..4bc6cd56e115 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1536,6 +1537,8 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "LoggingServiceV2Client", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index d8d2c3edc1cf..287081712a7e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging from google.longrunning import operations_pb2 # type: ignore @@ -32,6 +33,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class LoggingServiceV2Transport(abc.ABC): """Abstract transport class for LoggingServiceV2.""" diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index fe606e940da1..5f0bf0c782ae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1024,6 +1025,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "BaseMetricsServiceV2AsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py index 617f0838b1cf..377daf633c2a 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1387,6 +1388,8 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "BaseMetricsServiceV2Client", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index d5a7b7f1f33b..012973c62892 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.logging_v2.types import logging_metrics from google.longrunning import operations_pb2 # type: ignore @@ -32,6 +33,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class MetricsServiceV2Transport(abc.ABC): """Abstract transport class for MetricsServiceV2.""" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 1291288de28e..39f32df5f447 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -2145,6 +2146,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "CloudRedisAsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 7313c321753b..3d9553229e7f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -2543,6 +2544,8 @@ def list_locations( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "CloudRedisClient", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 4e2923b1e747..7bbc2e5550e2 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis @@ -33,6 +34,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class CloudRedisTransport(abc.ABC): """Abstract transport class for CloudRedis.""" diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 7a4160a2ca74..3f3e74eaad17 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -60,6 +61,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class CloudRedisRestInterceptor: """Interceptor for CloudRedis. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 36a9c0101409..c136b41f72eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -32,7 +32,7 @@ from google.api_core import retry_async as retries from google.api_core import rest_helpers from google.api_core import rest_streaming_async # type: ignore - +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -73,6 +73,9 @@ rest_version=f"google-auth@{google.auth.__version__}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class AsyncCloudRedisRestInterceptor: """Asynchronous Interceptor for CloudRedis. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py index 92351953759c..725e60942957 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -26,6 +26,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1338,6 +1339,9 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ( "CloudRedisAsyncClient", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py index 644df88a69ba..a8ff60e651bb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -33,6 +33,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1742,6 +1743,8 @@ def list_locations( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( "CloudRedisClient", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 8cdb19ba64cc..fe7ff474529f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -26,6 +26,7 @@ from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.location import locations_pb2 # type: ignore from google.cloud.redis_v1.types import cloud_redis @@ -33,6 +34,9 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class CloudRedisTransport(abc.ABC): """Abstract transport class for CloudRedis.""" diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 91410d213928..49531cd4cbc7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -60,6 +61,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class CloudRedisRestInterceptor: """Interceptor for CloudRedis. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 3739e94e0567..62b7b33c8f0b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -32,7 +32,7 @@ from google.api_core import retry_async as retries from google.api_core import rest_helpers from google.api_core import rest_streaming_async # type: ignore - +import google.protobuf from google.protobuf import json_format from google.api_core import operations_v1 @@ -73,6 +73,9 @@ rest_version=f"google-auth@{google.auth.__version__}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class AsyncCloudRedisRestInterceptor: """Asynchronous Interceptor for CloudRedis. From 720a2b15a9601a1810f7fbd376afb7a00d436269 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 7 May 2025 01:08:38 +0200 Subject: [PATCH 1284/1339] chore(deps): update all dependencies (#2371) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/.bazelrc | 4 +- packages/gapic-generator/WORKSPACE | 29 +- packages/gapic-generator/requirements.txt | 1422 +++++++++++---------- 3 files changed, 760 insertions(+), 695 deletions(-) diff --git a/packages/gapic-generator/.bazelrc b/packages/gapic-generator/.bazelrc index 19ab83c1cdb4..736b9ae3cebe 100644 --- a/packages/gapic-generator/.bazelrc +++ b/packages/gapic-generator/.bazelrc @@ -1,2 +1,2 @@ -# New boringssl requires C++14 -build --repo_env=BAZEL_CXXOPTS="-std=c++14" +# New protobuf requires C++17 +build --repo_env=BAZEL_CXXOPTS="-std=c++17" diff --git a/packages/gapic-generator/WORKSPACE b/packages/gapic-generator/WORKSPACE index a21f84581d8e..4da1de3cc7d4 100644 --- a/packages/gapic-generator/WORKSPACE +++ b/packages/gapic-generator/WORKSPACE @@ -60,9 +60,9 @@ gapic_generator_python() gapic_generator_register_toolchains() -_grpc_version = "1.67.1" +_grpc_version = "1.71.0" -_grpc_sha256 = "f83aedc91b84d4c396d30b0b2a30f7113c651fe5bc180c8ac08a5f0ff7dcffd2" +_grpc_sha256 = "9313c3f8f4dd3341597f152d506a50caf571fe40f886e24ea9078891990df285" http_archive( name = "com_github_grpc_grpc", @@ -71,29 +71,38 @@ http_archive( urls = ["https://github.com/grpc/grpc/archive/v%s.zip" % _grpc_version], ) # instantiated in grpc_deps(). + +_protobuf_version = "30.2" + +_protobuf_sha256 = "07a43d88fe5a38e434c7f94129cad56a4c43a51f99336074d0799c2f7d4e44c5" + http_archive( name = "com_google_protobuf", - sha256 = "008a11cc56f9b96679b4c285fd05f46d317d685be3ab524b2a310be0fbad987e", - strip_prefix = "protobuf-29.3", - urls = ["https://github.com/protocolbuffers/protobuf/archive/v29.3.tar.gz"], + sha256 = _protobuf_sha256, + strip_prefix = "protobuf-%s" % _protobuf_version, + urls = ["https://github.com/protocolbuffers/protobuf/archive/v%s.tar.gz" % _protobuf_version], ) load("@com_github_grpc_grpc//bazel:grpc_deps.bzl", "grpc_deps") grpc_deps() -# Pin the version of rules_cc to the version that is present in -# https://github.com/protocolbuffers/protobuf/blob/29.x/protobuf_deps.bzl#L92-L98 http_archive( name = "rules_cc", - urls = ["https://github.com/bazelbuild/rules_cc/releases/download/0.0.16/rules_cc-0.0.16.tar.gz"], - sha256 = "bbf1ae2f83305b7053b11e4467d317a7ba3517a12cef608543c1b1c5bf48a4df", - strip_prefix = "rules_cc-0.0.16", + urls = ["https://github.com/bazelbuild/rules_cc/releases/download/0.1.1/rules_cc-0.1.1.tar.gz"], + sha256 = "712d77868b3152dd618c4d64faaddefcc5965f90f5de6e6dd1d5ddcd0be82d42", + strip_prefix = "rules_cc-0.1.1", ) load("@com_google_protobuf//:protobuf_deps.bzl", "protobuf_deps", "PROTOBUF_MAVEN_ARTIFACTS") # This is actually already done within grpc_deps but calling this for Bazel convention. protobuf_deps() +# Add rules_java to resolve the following error +# `The repository '@compatibility_proxy' could not be resolved: Repository '@compatibility_proxy' is not defined` +load("@rules_java//java:rules_java_deps.bzl", "rules_java_dependencies") + +rules_java_dependencies() + # gRPC enforces a specific version of Go toolchain which conflicts with our build. # All the relevant parts of grpc_extra_deps() are imported in this WORKSPACE file # explicitly, that is why we do not call grpc_extra_deps() here and call diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 60298a4e5784..4352428cdbcb 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -4,92 +4,92 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -aiohappyeyeballs==2.4.6 \ - --hash=sha256:147ec992cf873d74f5062644332c539fcd42956dc69453fe5204195e560517e1 \ - --hash=sha256:9b05052f9042985d32ecbe4b59a77ae19c006a78f1344d7fdad69d28ded3d0b0 +aiohappyeyeballs==2.6.1 \ + --hash=sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558 \ + --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8 # via aiohttp -aiohttp==3.11.12 \ - --hash=sha256:0450ada317a65383b7cce9576096150fdb97396dcfe559109b403c7242faffef \ - --hash=sha256:0b5263dcede17b6b0c41ef0c3ccce847d82a7da98709e75cf7efde3e9e3b5cae \ - --hash=sha256:0d5176f310a7fe6f65608213cc74f4228e4f4ce9fd10bcb2bb6da8fc66991462 \ - --hash=sha256:0ed49efcd0dc1611378beadbd97beb5d9ca8fe48579fc04a6ed0844072261b6a \ - --hash=sha256:145a73850926018ec1681e734cedcf2716d6a8697d90da11284043b745c286d5 \ - --hash=sha256:1987770fb4887560363b0e1a9b75aa303e447433c41284d3af2840a2f226d6e0 \ - --hash=sha256:246067ba0cf5560cf42e775069c5d80a8989d14a7ded21af529a4e10e3e0f0e6 \ - --hash=sha256:2c311e2f63e42c1bf86361d11e2c4a59f25d9e7aabdbdf53dc38b885c5435cdb \ - --hash=sha256:2cee3b117a8d13ab98b38d5b6bdcd040cfb4181068d05ce0c474ec9db5f3c5bb \ - --hash=sha256:2de1378f72def7dfb5dbd73d86c19eda0ea7b0a6873910cc37d57e80f10d64e1 \ - --hash=sha256:30f546358dfa0953db92ba620101fefc81574f87b2346556b90b5f3ef16e55ce \ - --hash=sha256:34245498eeb9ae54c687a07ad7f160053911b5745e186afe2d0c0f2898a1ab8a \ - --hash=sha256:392432a2dde22b86f70dd4a0e9671a349446c93965f261dbaecfaf28813e5c42 \ - --hash=sha256:3c0600bcc1adfaaac321422d615939ef300df81e165f6522ad096b73439c0f58 \ - --hash=sha256:4016e383f91f2814e48ed61e6bda7d24c4d7f2402c75dd28f7e1027ae44ea204 \ - --hash=sha256:40cd36749a1035c34ba8d8aaf221b91ca3d111532e5ccb5fa8c3703ab1b967ed \ - --hash=sha256:413ad794dccb19453e2b97c2375f2ca3cdf34dc50d18cc2693bd5aed7d16f4b9 \ - --hash=sha256:4a93d28ed4b4b39e6f46fd240896c29b686b75e39cc6992692e3922ff6982b4c \ - --hash=sha256:4ee84c2a22a809c4f868153b178fe59e71423e1f3d6a8cd416134bb231fbf6d3 \ - --hash=sha256:50c5c7b8aa5443304c55c262c5693b108c35a3b61ef961f1e782dd52a2f559c7 \ - --hash=sha256:525410e0790aab036492eeea913858989c4cb070ff373ec3bc322d700bdf47c1 \ - --hash=sha256:526c900397f3bbc2db9cb360ce9c35134c908961cdd0ac25b1ae6ffcaa2507ff \ - --hash=sha256:54775858c7f2f214476773ce785a19ee81d1294a6bedc5cc17225355aab74802 \ - --hash=sha256:584096938a001378484aa4ee54e05dc79c7b9dd933e271c744a97b3b6f644957 \ - --hash=sha256:6130459189e61baac5a88c10019b21e1f0c6d00ebc770e9ce269475650ff7f73 \ - --hash=sha256:67453e603cea8e85ed566b2700efa1f6916aefbc0c9fcb2e86aaffc08ec38e78 \ - --hash=sha256:68d54234c8d76d8ef74744f9f9fc6324f1508129e23da8883771cdbb5818cbef \ - --hash=sha256:6dfe7f984f28a8ae94ff3a7953cd9678550dbd2a1f9bda5dd9c5ae627744c78e \ - --hash=sha256:74bd573dde27e58c760d9ca8615c41a57e719bff315c9adb6f2a4281a28e8798 \ - --hash=sha256:7603ca26d75b1b86160ce1bbe2787a0b706e592af5b2504e12caa88a217767b0 \ - --hash=sha256:76719dd521c20a58a6c256d058547b3a9595d1d885b830013366e27011ffe804 \ - --hash=sha256:7c3623053b85b4296cd3925eeb725e386644fd5bc67250b3bb08b0f144803e7b \ - --hash=sha256:7e44eba534381dd2687be50cbd5f2daded21575242ecfdaf86bbeecbc38dae8e \ - --hash=sha256:7fe3d65279bfbee8de0fb4f8c17fc4e893eed2dba21b2f680e930cc2b09075c5 \ - --hash=sha256:8340def6737118f5429a5df4e88f440746b791f8f1c4ce4ad8a595f42c980bd5 \ - --hash=sha256:84ede78acde96ca57f6cf8ccb8a13fbaf569f6011b9a52f870c662d4dc8cd854 \ - --hash=sha256:850ff6155371fd802a280f8d369d4e15d69434651b844bde566ce97ee2277420 \ - --hash=sha256:87a2e00bf17da098d90d4145375f1d985a81605267e7f9377ff94e55c5d769eb \ - --hash=sha256:88d385b8e7f3a870146bf5ea31786ef7463e99eb59e31db56e2315535d811f55 \ - --hash=sha256:8a2fb742ef378284a50766e985804bd6adb5adb5aa781100b09befdbfa757b65 \ - --hash=sha256:8dc0fba9a74b471c45ca1a3cb6e6913ebfae416678d90529d188886278e7f3f6 \ - --hash=sha256:8fa1510b96c08aaad49303ab11f8803787c99222288f310a62f493faf883ede1 \ - --hash=sha256:8fd12d0f989c6099e7b0f30dc6e0d1e05499f3337461f0b2b0dadea6c64b89df \ - --hash=sha256:9060addfa4ff753b09392efe41e6af06ea5dd257829199747b9f15bfad819460 \ - --hash=sha256:930ffa1925393381e1e0a9b82137fa7b34c92a019b521cf9f41263976666a0d6 \ - --hash=sha256:936d8a4f0f7081327014742cd51d320296b56aa6d324461a13724ab05f4b2933 \ - --hash=sha256:97fe431f2ed646a3b56142fc81d238abcbaff08548d6912acb0b19a0cadc146b \ - --hash=sha256:9bd8695be2c80b665ae3f05cb584093a1e59c35ecb7d794d1edd96e8cc9201d7 \ - --hash=sha256:9dec0000d2d8621d8015c293e24589d46fa218637d820894cb7356c77eca3259 \ - --hash=sha256:a478aa11b328983c4444dacb947d4513cb371cd323f3845e53caeda6be5589d5 \ - --hash=sha256:a481a574af914b6e84624412666cbfbe531a05667ca197804ecc19c97b8ab1b0 \ - --hash=sha256:a4ac6a0f0f6402854adca4e3259a623f5c82ec3f0c049374133bcb243132baf9 \ - --hash=sha256:a5e69046f83c0d3cb8f0d5bd9b8838271b1bc898e01562a04398e160953e8eb9 \ - --hash=sha256:a7442662afebbf7b4c6d28cb7aab9e9ce3a5df055fc4116cc7228192ad6cb484 \ - --hash=sha256:aa8a8caca81c0a3e765f19c6953416c58e2f4cc1b84829af01dd1c771bb2f91f \ - --hash=sha256:ab3247d58b393bda5b1c8f31c9edece7162fc13265334217785518dd770792b8 \ - --hash=sha256:b10a47e5390c4b30a0d58ee12581003be52eedd506862ab7f97da7a66805befb \ - --hash=sha256:b34508f1cd928ce915ed09682d11307ba4b37d0708d1f28e5774c07a7674cac9 \ - --hash=sha256:b8d3bb96c147b39c02d3db086899679f31958c5d81c494ef0fc9ef5bb1359b3d \ - --hash=sha256:b9d45dbb3aaec05cf01525ee1a7ac72de46a8c425cb75c003acd29f76b1ffe94 \ - --hash=sha256:bf4480a5438f80e0f1539e15a7eb8b5f97a26fe087e9828e2c0ec2be119a9f72 \ - --hash=sha256:c160a04283c8c6f55b5bf6d4cad59bb9c5b9c9cd08903841b25f1f7109ef1259 \ - --hash=sha256:c96a43822f1f9f69cc5c3706af33239489a6294be486a0447fb71380070d4d5f \ - --hash=sha256:c9fd9dcf9c91affe71654ef77426f5cf8489305e1c66ed4816f5a21874b094b9 \ - --hash=sha256:cddb31f8474695cd61fc9455c644fc1606c164b93bff2490390d90464b4655df \ - --hash=sha256:ce1bb21fc7d753b5f8a5d5a4bae99566386b15e716ebdb410154c16c91494d7f \ - --hash=sha256:d1c031a7572f62f66f1257db37ddab4cb98bfaf9b9434a3b4840bf3560f5e788 \ - --hash=sha256:d589264dbba3b16e8951b6f145d1e6b883094075283dafcab4cdd564a9e353a0 \ - --hash=sha256:dc065a4285307607df3f3686363e7f8bdd0d8ab35f12226362a847731516e42c \ - --hash=sha256:e10c440d142fa8b32cfdb194caf60ceeceb3e49807072e0dc3a8887ea80e8c16 \ - --hash=sha256:e3552fe98e90fdf5918c04769f338a87fa4f00f3b28830ea9b78b1bdc6140e0d \ - --hash=sha256:e392804a38353900c3fd8b7cacbea5132888f7129f8e241915e90b85f00e3250 \ - --hash=sha256:e4cecdb52aaa9994fbed6b81d4568427b6002f0a91c322697a4bfcc2b2363f5a \ - --hash=sha256:e5148ca8955affdfeb864aca158ecae11030e952b25b3ae15d4e2b5ba299bad2 \ - --hash=sha256:e6b2732ef3bafc759f653a98881b5b9cdef0716d98f013d376ee8dfd7285abf1 \ - --hash=sha256:ea756b5a7bac046d202a9a3889b9a92219f885481d78cd318db85b15cc0b7bcf \ - --hash=sha256:edb69b9589324bdc40961cdf0657815df674f1743a8d5ad9ab56a99e4833cfdd \ - --hash=sha256:f0203433121484b32646a5f5ea93ae86f3d9559d7243f07e8c0eab5ff8e3f70e \ - --hash=sha256:f6a19bcab7fbd8f8649d6595624856635159a6527861b9cdc3447af288a00c00 \ - --hash=sha256:f752e80606b132140883bb262a457c475d219d7163d996dc9072434ffb0784c4 \ - --hash=sha256:f7914ab70d2ee8ab91c13e5402122edbc77821c66d2758abb53aabe87f013287 +aiohttp==3.11.18 \ + --hash=sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717 \ + --hash=sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d \ + --hash=sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8 \ + --hash=sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda \ + --hash=sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421 \ + --hash=sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9 \ + --hash=sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d \ + --hash=sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8 \ + --hash=sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3 \ + --hash=sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3 \ + --hash=sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361 \ + --hash=sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137 \ + --hash=sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b \ + --hash=sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd \ + --hash=sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8 \ + --hash=sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f \ + --hash=sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d \ + --hash=sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec \ + --hash=sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb \ + --hash=sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0 \ + --hash=sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756 \ + --hash=sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6 \ + --hash=sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9 \ + --hash=sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009 \ + --hash=sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08 \ + --hash=sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533 \ + --hash=sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811 \ + --hash=sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721 \ + --hash=sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118 \ + --hash=sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55 \ + --hash=sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609 \ + --hash=sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1 \ + --hash=sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66 \ + --hash=sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2 \ + --hash=sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef \ + --hash=sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f \ + --hash=sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2 \ + --hash=sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804 \ + --hash=sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f \ + --hash=sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94 \ + --hash=sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30 \ + --hash=sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e \ + --hash=sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1 \ + --hash=sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4 \ + --hash=sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f \ + --hash=sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4 \ + --hash=sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f \ + --hash=sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6 \ + --hash=sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4 \ + --hash=sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f \ + --hash=sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7 \ + --hash=sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421 \ + --hash=sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e \ + --hash=sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935 \ + --hash=sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c \ + --hash=sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea \ + --hash=sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7 \ + --hash=sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868 \ + --hash=sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a \ + --hash=sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc \ + --hash=sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a \ + --hash=sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643 \ + --hash=sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01 \ + --hash=sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829 \ + --hash=sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93 \ + --hash=sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9 \ + --hash=sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78 \ + --hash=sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508 \ + --hash=sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd \ + --hash=sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1 \ + --hash=sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2 \ + --hash=sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6 \ + --hash=sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261 \ + --hash=sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863 \ + --hash=sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1 \ + --hash=sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb \ + --hash=sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415 \ + --hash=sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000 \ + --hash=sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1 \ + --hash=sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7 \ + --hash=sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798 # via -r requirements.in aiosignal==1.3.2 \ --hash=sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5 \ @@ -99,111 +99,111 @@ async-timeout==5.0.1 \ --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ --hash=sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3 # via aiohttp -attrs==25.1.0 \ - --hash=sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e \ - --hash=sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a +attrs==25.3.0 \ + --hash=sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3 \ + --hash=sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b # via aiohttp -cachetools==5.5.1 \ - --hash=sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95 \ - --hash=sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb +cachetools==5.5.2 \ + --hash=sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4 \ + --hash=sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a # via google-auth -certifi==2025.1.31 \ - --hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \ - --hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe +certifi==2025.4.26 \ + --hash=sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6 \ + --hash=sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3 # via requests -charset-normalizer==3.4.1 \ - --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ - --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ - --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ - --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ - --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ - --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ - --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ - --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ - --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ - --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ - --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ - --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ - --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ - --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ - --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ - --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ - --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ - --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ - --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ - --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ - --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ - --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ - --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ - --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ - --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ - --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ - --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ - --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ - --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ - --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ - --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ - --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ - --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ - --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ - --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ - --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ - --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ - --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ - --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ - --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ - --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ - --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ - --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ - --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ - --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ - --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ - --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ - --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ - --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ - --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ - --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ - --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ - --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ - --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ - --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ - --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ - --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ - --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ - --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ - --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ - --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ - --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ - --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ - --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ - --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ - --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ - --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ - --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ - --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ - --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ - --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ - --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ - --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ - --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ - --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ - --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ - --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ - --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ - --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ - --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ - --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ - --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ - --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ - --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ - --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ - --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ - --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ - --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ - --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ - --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ - --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ - --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 +charset-normalizer==3.4.2 \ + --hash=sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4 \ + --hash=sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45 \ + --hash=sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7 \ + --hash=sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0 \ + --hash=sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7 \ + --hash=sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d \ + --hash=sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d \ + --hash=sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0 \ + --hash=sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184 \ + --hash=sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db \ + --hash=sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b \ + --hash=sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64 \ + --hash=sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b \ + --hash=sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8 \ + --hash=sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff \ + --hash=sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344 \ + --hash=sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58 \ + --hash=sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e \ + --hash=sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471 \ + --hash=sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148 \ + --hash=sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a \ + --hash=sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836 \ + --hash=sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e \ + --hash=sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63 \ + --hash=sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c \ + --hash=sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1 \ + --hash=sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01 \ + --hash=sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366 \ + --hash=sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58 \ + --hash=sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5 \ + --hash=sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c \ + --hash=sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2 \ + --hash=sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a \ + --hash=sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597 \ + --hash=sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b \ + --hash=sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5 \ + --hash=sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb \ + --hash=sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f \ + --hash=sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0 \ + --hash=sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941 \ + --hash=sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0 \ + --hash=sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86 \ + --hash=sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7 \ + --hash=sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7 \ + --hash=sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455 \ + --hash=sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6 \ + --hash=sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4 \ + --hash=sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0 \ + --hash=sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3 \ + --hash=sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1 \ + --hash=sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6 \ + --hash=sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981 \ + --hash=sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c \ + --hash=sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980 \ + --hash=sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645 \ + --hash=sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7 \ + --hash=sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12 \ + --hash=sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa \ + --hash=sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd \ + --hash=sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef \ + --hash=sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f \ + --hash=sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2 \ + --hash=sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d \ + --hash=sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5 \ + --hash=sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02 \ + --hash=sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3 \ + --hash=sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd \ + --hash=sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e \ + --hash=sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214 \ + --hash=sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd \ + --hash=sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a \ + --hash=sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c \ + --hash=sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681 \ + --hash=sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba \ + --hash=sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f \ + --hash=sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a \ + --hash=sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28 \ + --hash=sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691 \ + --hash=sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82 \ + --hash=sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a \ + --hash=sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027 \ + --hash=sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7 \ + --hash=sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518 \ + --hash=sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf \ + --hash=sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b \ + --hash=sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9 \ + --hash=sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544 \ + --hash=sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da \ + --hash=sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509 \ + --hash=sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f \ + --hash=sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a \ + --hash=sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f # via requests click==8.1.8 \ --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ @@ -213,177 +213,185 @@ exceptiongroup==1.2.2 \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc # via pytest -frozenlist==1.5.0 \ - --hash=sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e \ - --hash=sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf \ - --hash=sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6 \ - --hash=sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a \ - --hash=sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d \ - --hash=sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f \ - --hash=sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28 \ - --hash=sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b \ - --hash=sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9 \ - --hash=sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2 \ - --hash=sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec \ - --hash=sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2 \ - --hash=sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c \ - --hash=sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336 \ - --hash=sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4 \ - --hash=sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d \ - --hash=sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b \ - --hash=sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c \ - --hash=sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10 \ - --hash=sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08 \ - --hash=sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942 \ - --hash=sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8 \ - --hash=sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f \ - --hash=sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10 \ - --hash=sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5 \ - --hash=sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6 \ - --hash=sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21 \ - --hash=sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c \ - --hash=sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d \ - --hash=sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923 \ - --hash=sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608 \ - --hash=sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de \ - --hash=sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17 \ - --hash=sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0 \ - --hash=sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f \ - --hash=sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641 \ - --hash=sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c \ - --hash=sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a \ - --hash=sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0 \ - --hash=sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9 \ - --hash=sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab \ - --hash=sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f \ - --hash=sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3 \ - --hash=sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a \ - --hash=sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784 \ - --hash=sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604 \ - --hash=sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d \ - --hash=sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5 \ - --hash=sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03 \ - --hash=sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e \ - --hash=sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953 \ - --hash=sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee \ - --hash=sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d \ - --hash=sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817 \ - --hash=sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3 \ - --hash=sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039 \ - --hash=sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f \ - --hash=sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9 \ - --hash=sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf \ - --hash=sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76 \ - --hash=sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba \ - --hash=sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171 \ - --hash=sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb \ - --hash=sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439 \ - --hash=sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631 \ - --hash=sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972 \ - --hash=sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d \ - --hash=sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869 \ - --hash=sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9 \ - --hash=sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411 \ - --hash=sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723 \ - --hash=sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2 \ - --hash=sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b \ - --hash=sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99 \ - --hash=sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e \ - --hash=sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840 \ - --hash=sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3 \ - --hash=sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb \ - --hash=sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3 \ - --hash=sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0 \ - --hash=sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca \ - --hash=sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45 \ - --hash=sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e \ - --hash=sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f \ - --hash=sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5 \ - --hash=sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307 \ - --hash=sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e \ - --hash=sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2 \ - --hash=sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778 \ - --hash=sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a \ - --hash=sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30 \ - --hash=sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a +frozenlist==1.6.0 \ + --hash=sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117 \ + --hash=sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2 \ + --hash=sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42 \ + --hash=sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812 \ + --hash=sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3 \ + --hash=sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a \ + --hash=sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572 \ + --hash=sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa \ + --hash=sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b \ + --hash=sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626 \ + --hash=sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e \ + --hash=sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d \ + --hash=sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c \ + --hash=sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530 \ + --hash=sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878 \ + --hash=sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e \ + --hash=sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869 \ + --hash=sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd \ + --hash=sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603 \ + --hash=sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606 \ + --hash=sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85 \ + --hash=sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64 \ + --hash=sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f \ + --hash=sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0 \ + --hash=sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c \ + --hash=sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4 \ + --hash=sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103 \ + --hash=sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02 \ + --hash=sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191 \ + --hash=sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0 \ + --hash=sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e \ + --hash=sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791 \ + --hash=sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983 \ + --hash=sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f \ + --hash=sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff \ + --hash=sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8 \ + --hash=sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860 \ + --hash=sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8 \ + --hash=sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25 \ + --hash=sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f \ + --hash=sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba \ + --hash=sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24 \ + --hash=sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e \ + --hash=sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd \ + --hash=sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911 \ + --hash=sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c \ + --hash=sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595 \ + --hash=sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c \ + --hash=sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc \ + --hash=sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2 \ + --hash=sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75 \ + --hash=sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4 \ + --hash=sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0 \ + --hash=sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe \ + --hash=sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249 \ + --hash=sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c \ + --hash=sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576 \ + --hash=sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b \ + --hash=sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770 \ + --hash=sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046 \ + --hash=sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584 \ + --hash=sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497 \ + --hash=sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f \ + --hash=sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f \ + --hash=sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b \ + --hash=sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f \ + --hash=sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d \ + --hash=sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a \ + --hash=sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e \ + --hash=sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68 \ + --hash=sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189 \ + --hash=sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9 \ + --hash=sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8 \ + --hash=sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1 \ + --hash=sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0 \ + --hash=sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3 \ + --hash=sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29 \ + --hash=sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0 \ + --hash=sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215 \ + --hash=sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590 \ + --hash=sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c \ + --hash=sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821 \ + --hash=sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1 \ + --hash=sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769 \ + --hash=sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506 \ + --hash=sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3 \ + --hash=sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348 \ + --hash=sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad \ + --hash=sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a \ + --hash=sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad \ + --hash=sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6 \ + --hash=sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45 \ + --hash=sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188 \ + --hash=sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e \ + --hash=sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70 \ + --hash=sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70 \ + --hash=sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1 \ + --hash=sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106 \ + --hash=sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd \ + --hash=sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc \ + --hash=sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352 \ + --hash=sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91 \ + --hash=sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1 \ + --hash=sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f # via # aiohttp # aiosignal -google-api-core==2.24.1 \ - --hash=sha256:bc78d608f5a5bf853b80bd70a795f703294de656c096c0968320830a4bc280f1 \ - --hash=sha256:f8b36f5456ab0dd99a1b693a40a31d1e7757beea380ad1b38faaf8941eae9d8a +google-api-core==2.24.2 \ + --hash=sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9 \ + --hash=sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696 # via -r requirements.in -google-auth==2.38.0 \ - --hash=sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4 \ - --hash=sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a +google-auth==2.40.0 \ + --hash=sha256:c277cf39f7c192d8540eb6331c08b5a0796e8041af8343ae73dd6b269732ca6c \ + --hash=sha256:dc3a5078acb1043c3e43685c22d628afe40af8559cf561de388e0c939280fcc8 # via google-api-core -googleapis-common-protos[grpc]==1.67.0 \ - --hash=sha256:21398025365f138be356d5923e9168737d94d46a72aefee4a6110a1f23463c86 \ - --hash=sha256:579de760800d13616f51cf8be00c876f00a9f146d3e6510e19d1f4111758b741 +googleapis-common-protos[grpc]==1.70.0 \ + --hash=sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257 \ + --hash=sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8 # via # -r requirements.in # google-api-core # grpc-google-iam-v1 -grpc-google-iam-v1==0.14.0 \ - --hash=sha256:c66e07aa642e39bb37950f9e7f491f70dad150ac9801263b42b2814307c2df99 \ - --hash=sha256:fb4a084b30099ba3ab07d61d620a0d4429570b13ff53bd37bac75235f98b7da4 +grpc-google-iam-v1==0.14.2 \ + --hash=sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351 \ + --hash=sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20 # via -r requirements.in -grpcio==1.70.0 \ - --hash=sha256:0495c86a55a04a874c7627fd33e5beaee771917d92c0e6d9d797628ac40e7655 \ - --hash=sha256:07269ff4940f6fb6710951116a04cd70284da86d0a4368fd5a3b552744511f5a \ - --hash=sha256:0a5c78d5198a1f0aa60006cd6eb1c912b4a1520b6a3968e677dbcba215fabb40 \ - --hash=sha256:0ba0a173f4feacf90ee618fbc1a27956bfd21260cd31ced9bc707ef551ff7dc7 \ - --hash=sha256:0cd430b9215a15c10b0e7d78f51e8a39d6cf2ea819fd635a7214fae600b1da27 \ - --hash=sha256:0de706c0a5bb9d841e353f6343a9defc9fc35ec61d6eb6111802f3aa9fef29e1 \ - --hash=sha256:17325b0be0c068f35770f944124e8839ea3185d6d54862800fc28cc2ffad205a \ - --hash=sha256:2394e3381071045a706ee2eeb6e08962dd87e8999b90ac15c55f56fa5a8c9597 \ - --hash=sha256:27cc75e22c5dba1fbaf5a66c778e36ca9b8ce850bf58a9db887754593080d839 \ - --hash=sha256:2b0d02e4b25a5c1f9b6c7745d4fa06efc9fd6a611af0fb38d3ba956786b95199 \ - --hash=sha256:374d014f29f9dfdb40510b041792e0e2828a1389281eb590df066e1cc2b404e5 \ - --hash=sha256:3b0f01f6ed9994d7a0b27eeddea43ceac1b7e6f3f9d86aeec0f0064b8cf50fdb \ - --hash=sha256:4119fed8abb7ff6c32e3d2255301e59c316c22d31ab812b3fbcbaf3d0d87cc68 \ - --hash=sha256:412faabcc787bbc826f51be261ae5fa996b21263de5368a55dc2cf824dc5090e \ - --hash=sha256:4f1937f47c77392ccd555728f564a49128b6a197a05a5cd527b796d36f3387d0 \ - --hash=sha256:5413549fdf0b14046c545e19cfc4eb1e37e9e1ebba0ca390a8d4e9963cab44d2 \ - --hash=sha256:558c386ecb0148f4f99b1a65160f9d4b790ed3163e8610d11db47838d452512d \ - --hash=sha256:58ad9ba575b39edef71f4798fdb5c7b6d02ad36d47949cd381d4392a5c9cbcd3 \ - --hash=sha256:5ea67c72101d687d44d9c56068328da39c9ccba634cabb336075fae2eab0d04b \ - --hash=sha256:7385b1cb064734005204bc8994eed7dcb801ed6c2eda283f613ad8c6c75cf873 \ - --hash=sha256:7c73c42102e4a5ec76608d9b60227d917cea46dff4d11d372f64cbeb56d259d0 \ - --hash=sha256:8058667a755f97407fca257c844018b80004ae8035565ebc2812cc550110718d \ - --hash=sha256:879a61bf52ff8ccacbedf534665bb5478ec8e86ad483e76fe4f729aaef867cab \ - --hash=sha256:880bfb43b1bb8905701b926274eafce5c70a105bc6b99e25f62e98ad59cb278e \ - --hash=sha256:8d1584a68d5922330025881e63a6c1b54cc8117291d382e4fa69339b6d914c56 \ - --hash=sha256:95469d1977429f45fe7df441f586521361e235982a0b39e33841549143ae2851 \ - --hash=sha256:9e654c4b17d07eab259d392e12b149c3a134ec52b11ecdc6a515b39aceeec898 \ - --hash=sha256:a31d7e3b529c94e930a117b2175b2efd179d96eb3c7a21ccb0289a8ab05b645c \ - --hash=sha256:aa47688a65643afd8b166928a1da6247d3f46a2784d301e48ca1cc394d2ffb40 \ - --hash=sha256:aa573896aeb7d7ce10b1fa425ba263e8dddd83d71530d1322fd3a16f31257b4a \ - --hash=sha256:aba19419aef9b254e15011b230a180e26e0f6864c90406fdbc255f01d83bc83c \ - --hash=sha256:ac073fe1c4cd856ebcf49e9ed6240f4f84d7a4e6ee95baa5d66ea05d3dd0df7f \ - --hash=sha256:b3c76701428d2df01964bc6479422f20e62fcbc0a37d82ebd58050b86926ef8c \ - --hash=sha256:b745d2c41b27650095e81dea7091668c040457483c9bdb5d0d9de8f8eb25e59f \ - --hash=sha256:bb491125103c800ec209d84c9b51f1c60ea456038e4734688004f377cfacc113 \ - --hash=sha256:c1af8e15b0f0fe0eac75195992a63df17579553b0c4af9f8362cc7cc99ccddf4 \ - --hash=sha256:c78b339869f4dbf89881e0b6fbf376313e4f845a42840a7bdf42ee6caed4b11f \ - --hash=sha256:cb5277db254ab7586769e490b7b22f4ddab3876c490da0a1a9d7c695ccf0bf77 \ - --hash=sha256:cbce24409beaee911c574a3d75d12ffb8c3e3dd1b813321b1d7a96bbcac46bf4 \ - --hash=sha256:cd24d2d9d380fbbee7a5ac86afe9787813f285e684b0271599f95a51bce33528 \ - --hash=sha256:ce7df14b2dcd1102a2ec32f621cc9fab6695effef516efbc6b063ad749867295 \ - --hash=sha256:d24035d49e026353eb042bf7b058fb831db3e06d52bee75c5f2f3ab453e71aca \ - --hash=sha256:d405b005018fd516c9ac529f4b4122342f60ec1cee181788249372524e6db429 \ - --hash=sha256:d63764963412e22f0491d0d32833d71087288f4e24cbcddbae82476bfa1d81fd \ - --hash=sha256:dbe41ad140df911e796d4463168e33ef80a24f5d21ef4d1e310553fcd2c4a386 \ - --hash=sha256:dfa089a734f24ee5f6880c83d043e4f46bf812fcea5181dcb3a572db1e79e01c \ - --hash=sha256:e27585831aa6b57b9250abaf147003e126cd3a6c6ca0c531a01996f31709bed1 \ - --hash=sha256:e7831a0fc1beeeb7759f737f5acd9fdcda520e955049512d68fda03d91186eea \ - --hash=sha256:ed9718f17fbdb472e33b869c77a16d0b55e166b100ec57b016dc7de9c8d236bf \ - --hash=sha256:ef4c14508299b1406c32bdbb9fb7b47612ab979b04cf2b27686ea31882387cff \ - --hash=sha256:f19375f0300b96c0117aca118d400e76fede6db6e91f3c34b7b035822e06c35f \ - --hash=sha256:f2af68a6f5c8f78d56c145161544ad0febbd7479524a59c16b3e25053f39c87f \ - --hash=sha256:f32090238b720eb585248654db8e3afc87b48d26ac423c8dde8334a232ff53c9 \ - --hash=sha256:fe9dbd916df3b60e865258a8c72ac98f3ac9e2a9542dcb72b7a34d236242a5ce \ - --hash=sha256:ff4a8112a79464919bb21c18e956c54add43ec9a4850e3949da54f61c241a4a6 +grpcio==1.71.0 \ + --hash=sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea \ + --hash=sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7 \ + --hash=sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537 \ + --hash=sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b \ + --hash=sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41 \ + --hash=sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366 \ + --hash=sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b \ + --hash=sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c \ + --hash=sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033 \ + --hash=sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3 \ + --hash=sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79 \ + --hash=sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29 \ + --hash=sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7 \ + --hash=sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e \ + --hash=sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67 \ + --hash=sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a \ + --hash=sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8 \ + --hash=sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d \ + --hash=sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb \ + --hash=sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3 \ + --hash=sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4 \ + --hash=sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a \ + --hash=sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3 \ + --hash=sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3 \ + --hash=sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509 \ + --hash=sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97 \ + --hash=sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6 \ + --hash=sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b \ + --hash=sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e \ + --hash=sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637 \ + --hash=sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a \ + --hash=sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d \ + --hash=sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7 \ + --hash=sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd \ + --hash=sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69 \ + --hash=sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d \ + --hash=sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379 \ + --hash=sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7 \ + --hash=sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32 \ + --hash=sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c \ + --hash=sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef \ + --hash=sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444 \ + --hash=sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec \ + --hash=sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594 \ + --hash=sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804 \ + --hash=sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7 \ + --hash=sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73 \ + --hash=sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5 \ + --hash=sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db \ + --hash=sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db \ + --hash=sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455 # via # googleapis-common-protos # grpc-google-iam-v1 @@ -397,51 +405,51 @@ inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ --hash=sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2 # via -r requirements.in -iniconfig==2.0.0 \ - --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ - --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 +iniconfig==2.1.0 \ + --hash=sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7 \ + --hash=sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760 # via pytest jinja2==3.1.6 \ --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 # via -r requirements.in -libcst==1.6.0 \ - --hash=sha256:05c32de72553cb93ff606c7d2421ce1eab1f0740c8c4b715444e2ae42f42b1b6 \ - --hash=sha256:0c0fb2f7b74605832cc38d79e9d104f92a8aaeec7bf8f2759b20c5ba3786a321 \ - --hash=sha256:1b8370d0f7092a17b7fcda0e1539d0162cf35a0c19af94842b09c9dddc382acd \ - --hash=sha256:1bd00399d20bf93590b6f02647f8be08e2b730e050e6b7360f669254e69c98f5 \ - --hash=sha256:1bd11863889b630fe41543b4eb5e2dd445447a7f89e6b58229e83c9e52a74942 \ - --hash=sha256:2f02d0da6dfbad44e6ec4d1e5791e17afe95d9fe89bce4374bf109fd9c103a50 \ - --hash=sha256:2f3c85602e5a6d3aec0a8fc74230363f943004d7c2b2a6a1c09b320b61692241 \ - --hash=sha256:31e45f88d4a9a8e5b690ed14a564fcbace14b10f5e7b6797d6d97f4226b395da \ - --hash=sha256:38f3f25d4f5d8713cdb6a7bd41d75299de3c2416b9890a34d9b05417b8e64c1d \ - --hash=sha256:3fb953fc0155532f366ff40f6a23f191250134d6928e02074ae4eb3531fa6c30 \ - --hash=sha256:48406225378ee9208edb1e5a10451bea810262473af1a2f2473737fd16d34e3a \ - --hash=sha256:4cd011fcd79b76be216440ec296057780223674bc2566662c4bc50d3c5ecd58e \ - --hash=sha256:5786240358b122ad901bb0b7e6b7467085b2317333233d7c7d7cac46388fbd77 \ - --hash=sha256:5ac6d68364031f0b554d8920a69b33f25ec6ef351fa31b4e8f3676abb729ce36 \ - --hash=sha256:63a8893dfc344b9b08bfaf4e433b16a7e2e9361f8362fa73eaecc4d379c328ba \ - --hash=sha256:69b705f5b1faa66f115ede52a970d7613d3a8fb988834f853f7fb46870a041d2 \ - --hash=sha256:6a12a4766ce5874ccb31a1cc095cff47e2fb35755954965fe77458d9e5b361a8 \ - --hash=sha256:8bf59a21e9968dc4e7c301fac660bf54bc7d4dcadc0b1abf31b1cac34e800555 \ - --hash=sha256:8e4fcd791cab0fe8287b6edd0d78512b6475b87d906562a5d2d0999cb6d23b8d \ - --hash=sha256:91242ccbae6e7a070b33ebe03d3677c54bf678653538fbaa89597a59e4a13b2d \ - --hash=sha256:96506807dc01c9efcea8ab57d9ea18fdc87b85514cc8ee2f8568fab6df861f02 \ - --hash=sha256:984512829a80f963bfc1803342219a4264a8d4206df0a30eae9bce921357a938 \ - --hash=sha256:a9e71a046b4a91950125967f5ee67389f25a2511103e5595508f0591a5f50bc0 \ - --hash=sha256:b3d274115d134a550fe8a0b38780a28a659d4a35ac6068c7c92fffe6661b519c \ - --hash=sha256:bdc95df61838d708adb37e18af1615491f6cac59557fd11077664dd956fe4528 \ - --hash=sha256:bfcd78a5e775f155054ed50d047a260cd23f0f6a89ef2a57e10bdb9c697680b8 \ - --hash=sha256:c4486921bebd33d67bbbd605aff8bfaefd2d13dc73c20c1fde2fb245880b7fd6 \ - --hash=sha256:c527472093b5b64ffa65d33c472da38952827abbca18c786d559d6d6122bc891 \ - --hash=sha256:cd2b28688dabf0f7a166b47ab1c7d5c0b6ef8c9a05ad932618471a33fe591a4a \ - --hash=sha256:d25132f24edc24895082589645dbb8972c0eff6c9716ff71932fa72643d7c74f \ - --hash=sha256:d45513f6cd3dbb2a80cf21a53bc6e6e560414edea17c474c784100e10aebe921 \ - --hash=sha256:d65550ac686bff9395398afacbc88fe812363703a4161108e8a6db066d30b96e \ - --hash=sha256:dac722aade8796a1e78662c3ed424f0ab9f1dc0e8fdf3088610354cdd709e53f \ - --hash=sha256:df3f452e074893dfad7746a041caeb3cde75bd9fbca4ea7b223012e112d1da8c \ - --hash=sha256:e80ecdbe3fa43b3793cae8fa0b07a985bd9a693edbe6e9d076f5422ecadbf0db \ - --hash=sha256:f8c70a124d7a7d326abdc9a6261013c57d36f21c6c6370de5dd3e6a040c4ee5e +libcst==1.7.0 \ + --hash=sha256:0456381c939169c4f11caecdb30f7aca6f234640731f8f965849c1631930536b \ + --hash=sha256:09a5530b40a15dbe6fac842ef2ad87ad561760779380ccf3ade6850854d81406 \ + --hash=sha256:14e5c1d427c33d50df75be6bc999a7b2d7c6b7840e2361a18a6f354db50cb18e \ + --hash=sha256:1560598f5c56681adbd32f4b08e9cffcd45a021921d1d784370a7d4d9a2fac11 \ + --hash=sha256:340054c57abcd42953248af18ed278be651a03b1c2a1616f7e1f1ef90b6018ce \ + --hash=sha256:3923a341a787c1f454909e726a6213dd59c3db26c6e56d0a1fc4f2f7e96b45d7 \ + --hash=sha256:3d2ec10015e86a4402c3d2084ede6c7c9268faea1ecb99592fe9e291c515aaa2 \ + --hash=sha256:4c568e14d29489f09faf4915af18235f805d5aa60fa194023b4fadf3209f0c94 \ + --hash=sha256:57a6bcfc8ca8a0bb9e89a2dbf63ee8f0c7e8353a130528dcb47c9e59c2dc8c94 \ + --hash=sha256:5d5ba9314569865effd5baff3a58ceb2cced52228e181824759c68486a7ec8f4 \ + --hash=sha256:5e22738ec2855803f8242e6bf78057389d10f8954db34bf7079c82abab1b8b95 \ + --hash=sha256:5e50e6960ecc3ed67f39fec63aa329e772d5d27f8e2334e30f19a94aa14489f1 \ + --hash=sha256:6137fe549bfbb017283c3cf85419eb0dfaa20a211ad6d525538a2494e248a84b \ + --hash=sha256:61bfc90c8a4594296f8b68702f494dfdfec6e745a4abc0cfa8069d7f22061424 \ + --hash=sha256:6523731bfbdbc045ff8649130fe14a46b31ad6925f67acdc0e0d80a0c61719fd \ + --hash=sha256:71a8f59f3472fe8c0f6e2fad457825ea2ccad8c4c713cca55a91ff2cbfa9bc03 \ + --hash=sha256:81036e820249937608db7e72d0799180122d40d76d0c0414c454f8aa2ffa9c51 \ + --hash=sha256:8f6e693281d6e9a62414205fb300ec228ddc902ca9cb965a09f11561dc10aa94 \ + --hash=sha256:932a4c4508bd4cf5248c99b7218bb86af97d87fefa2bdab7ea8a0c28c270724a \ + --hash=sha256:93417d36c2a1b70d651d0e970ff73339e8dcd64d341672b68823fa0039665022 \ + --hash=sha256:9370c23a3f609280c3f2296d61d34dd32afd7a1c9b19e4e29cc35cb2e2544363 \ + --hash=sha256:94acd51ea1206460c20dea764c59222e62c45ae8a486f22024f063d11a7bca88 \ + --hash=sha256:9add619a825d6f176774110d79dc3137f353a236c1e3bcd6e063ca6d93d6e0ae \ + --hash=sha256:9cd5ab15b12a37f0e9994d8847d5670da936a93d98672c442a956fab34ea0c15 \ + --hash=sha256:a252fa03ea00986f03100379f11e15d381103a09667900fb0fa2076cec19081a \ + --hash=sha256:a63f44ffa81292f183656234c7f2848653ff45c17d867db83c9335119e28aafa \ + --hash=sha256:b52692a28d0d958ebfabcf8bfce5fcf2c8582967310d35e6111a6e2d4db96659 \ + --hash=sha256:c3445dce908fd4971ce9bb5fef5742e26c984027676e3dcf24875fbed1ff7e4c \ + --hash=sha256:c8d6176a667d2db0132d133dad6bbf965f915f3071559342ca2cdbbec537ed12 \ + --hash=sha256:ca4e91aa854758040fa6fe7036fbe7f90a36a7d283fa1df8587b6f73084fc997 \ + --hash=sha256:cdae6e632d222d8db7cb98d7cecb45597c21b8e3841d0c98d4fca79c49dad04b \ + --hash=sha256:d12ffe199ff677a37abfb6b21aba1407eb02246dc7e6bcaf4f8e24a195ec4ad6 \ + --hash=sha256:d894c48f682b0061fdb2c983d5e64c30334db6ce0783560dbbb9df0163179c0c \ + --hash=sha256:e635eadb6043d5f967450af27125811c6ccc7eeb4d8c5fd4f1bece9d96418781 \ + --hash=sha256:e7d9a796c2f3d5b71dd06b7578e8d1fb1c031d2eb8d59e7b40e288752ae1b210 \ + --hash=sha256:fa519d4391326329f37860c2f2aaf80cb11a6122d14afa2f4f00dde6fcfa7ae4 # via -r requirements.in markupsafe==3.0.2 \ --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ @@ -508,214 +516,240 @@ markupsafe==3.0.2 \ # via # -r requirements.in # jinja2 -multidict==6.1.0 \ - --hash=sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f \ - --hash=sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056 \ - --hash=sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761 \ - --hash=sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3 \ - --hash=sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b \ - --hash=sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6 \ - --hash=sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748 \ - --hash=sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966 \ - --hash=sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f \ - --hash=sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1 \ - --hash=sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6 \ - --hash=sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada \ - --hash=sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305 \ - --hash=sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2 \ - --hash=sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d \ - --hash=sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a \ - --hash=sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef \ - --hash=sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c \ - --hash=sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb \ - --hash=sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60 \ - --hash=sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6 \ - --hash=sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4 \ - --hash=sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478 \ - --hash=sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81 \ - --hash=sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7 \ - --hash=sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56 \ - --hash=sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3 \ - --hash=sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6 \ - --hash=sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30 \ - --hash=sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb \ - --hash=sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506 \ - --hash=sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0 \ - --hash=sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925 \ - --hash=sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c \ - --hash=sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6 \ - --hash=sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e \ - --hash=sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95 \ - --hash=sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2 \ - --hash=sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133 \ - --hash=sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2 \ - --hash=sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa \ - --hash=sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3 \ - --hash=sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3 \ - --hash=sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436 \ - --hash=sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657 \ - --hash=sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581 \ - --hash=sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492 \ - --hash=sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43 \ - --hash=sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2 \ - --hash=sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2 \ - --hash=sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926 \ - --hash=sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057 \ - --hash=sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc \ - --hash=sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80 \ - --hash=sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255 \ - --hash=sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1 \ - --hash=sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972 \ - --hash=sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53 \ - --hash=sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1 \ - --hash=sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423 \ - --hash=sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a \ - --hash=sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160 \ - --hash=sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c \ - --hash=sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd \ - --hash=sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa \ - --hash=sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5 \ - --hash=sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b \ - --hash=sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa \ - --hash=sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef \ - --hash=sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44 \ - --hash=sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4 \ - --hash=sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156 \ - --hash=sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753 \ - --hash=sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28 \ - --hash=sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d \ - --hash=sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a \ - --hash=sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304 \ - --hash=sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008 \ - --hash=sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429 \ - --hash=sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72 \ - --hash=sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399 \ - --hash=sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3 \ - --hash=sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392 \ - --hash=sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167 \ - --hash=sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c \ - --hash=sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774 \ - --hash=sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351 \ - --hash=sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76 \ - --hash=sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875 \ - --hash=sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd \ - --hash=sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28 \ - --hash=sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db +multidict==6.4.3 \ + --hash=sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756 \ + --hash=sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8 \ + --hash=sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef \ + --hash=sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c \ + --hash=sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5 \ + --hash=sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8 \ + --hash=sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db \ + --hash=sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713 \ + --hash=sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44 \ + --hash=sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378 \ + --hash=sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5 \ + --hash=sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676 \ + --hash=sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08 \ + --hash=sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea \ + --hash=sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9 \ + --hash=sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9 \ + --hash=sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e \ + --hash=sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b \ + --hash=sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508 \ + --hash=sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1 \ + --hash=sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852 \ + --hash=sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac \ + --hash=sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde \ + --hash=sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8 \ + --hash=sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504 \ + --hash=sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5 \ + --hash=sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02 \ + --hash=sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4 \ + --hash=sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec \ + --hash=sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a \ + --hash=sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666 \ + --hash=sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc \ + --hash=sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf \ + --hash=sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790 \ + --hash=sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8 \ + --hash=sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589 \ + --hash=sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d \ + --hash=sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07 \ + --hash=sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56 \ + --hash=sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21 \ + --hash=sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7 \ + --hash=sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9 \ + --hash=sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343 \ + --hash=sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9 \ + --hash=sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4 \ + --hash=sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a \ + --hash=sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427 \ + --hash=sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459 \ + --hash=sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6 \ + --hash=sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208 \ + --hash=sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229 \ + --hash=sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0 \ + --hash=sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474 \ + --hash=sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817 \ + --hash=sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd \ + --hash=sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618 \ + --hash=sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5 \ + --hash=sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3 \ + --hash=sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124 \ + --hash=sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1 \ + --hash=sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb \ + --hash=sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7 \ + --hash=sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3 \ + --hash=sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375 \ + --hash=sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39 \ + --hash=sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752 \ + --hash=sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0 \ + --hash=sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188 \ + --hash=sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451 \ + --hash=sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078 \ + --hash=sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7 \ + --hash=sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7 \ + --hash=sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f \ + --hash=sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b \ + --hash=sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f \ + --hash=sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c \ + --hash=sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291 \ + --hash=sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897 \ + --hash=sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887 \ + --hash=sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1 \ + --hash=sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685 \ + --hash=sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf \ + --hash=sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6 \ + --hash=sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731 \ + --hash=sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507 \ + --hash=sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b \ + --hash=sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae \ + --hash=sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777 \ + --hash=sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7 \ + --hash=sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be \ + --hash=sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df \ + --hash=sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054 \ + --hash=sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2 \ + --hash=sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124 \ + --hash=sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c \ + --hash=sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840 \ + --hash=sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8 \ + --hash=sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd \ + --hash=sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8 \ + --hash=sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3 \ + --hash=sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e \ + --hash=sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2 \ + --hash=sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1 \ + --hash=sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad # via # aiohttp # yarl -packaging==24.2 \ - --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ - --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f +packaging==25.0 \ + --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ + --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f # via pytest pluggy==1.5.0 \ --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 # via pytest -propcache==0.2.1 \ - --hash=sha256:03ff9d3f665769b2a85e6157ac8b439644f2d7fd17615a82fa55739bc97863f4 \ - --hash=sha256:049324ee97bb67285b49632132db351b41e77833678432be52bdd0289c0e05e4 \ - --hash=sha256:081a430aa8d5e8876c6909b67bd2d937bfd531b0382d3fdedb82612c618bc41a \ - --hash=sha256:0f022d381747f0dfe27e99d928e31bc51a18b65bb9e481ae0af1380a6725dd1f \ - --hash=sha256:12d1083f001ace206fe34b6bdc2cb94be66d57a850866f0b908972f90996b3e9 \ - --hash=sha256:14d86fe14b7e04fa306e0c43cdbeebe6b2c2156a0c9ce56b815faacc193e320d \ - --hash=sha256:160291c60081f23ee43d44b08a7e5fb76681221a8e10b3139618c5a9a291b84e \ - --hash=sha256:1672137af7c46662a1c2be1e8dc78cb6d224319aaa40271c9257d886be4363a6 \ - --hash=sha256:19a0f89a7bb9d8048d9c4370c9c543c396e894c76be5525f5e1ad287f1750ddf \ - --hash=sha256:1ac2f5fe02fa75f56e1ad473f1175e11f475606ec9bd0be2e78e4734ad575034 \ - --hash=sha256:1cd9a1d071158de1cc1c71a26014dcdfa7dd3d5f4f88c298c7f90ad6f27bb46d \ - --hash=sha256:1ffc3cca89bb438fb9c95c13fc874012f7b9466b89328c3c8b1aa93cdcfadd16 \ - --hash=sha256:297878dc9d0a334358f9b608b56d02e72899f3b8499fc6044133f0d319e2ec30 \ - --hash=sha256:2d3af2e79991102678f53e0dbf4c35de99b6b8b58f29a27ca0325816364caaba \ - --hash=sha256:30b43e74f1359353341a7adb783c8f1b1c676367b011709f466f42fda2045e95 \ - --hash=sha256:3156628250f46a0895f1f36e1d4fbe062a1af8718ec3ebeb746f1d23f0c5dc4d \ - --hash=sha256:31f5af773530fd3c658b32b6bdc2d0838543de70eb9a2156c03e410f7b0d3aae \ - --hash=sha256:3935bfa5fede35fb202c4b569bb9c042f337ca4ff7bd540a0aa5e37131659348 \ - --hash=sha256:39d51fbe4285d5db5d92a929e3e21536ea3dd43732c5b177c7ef03f918dff9f2 \ - --hash=sha256:3f77ce728b19cb537714499928fe800c3dda29e8d9428778fc7c186da4c09a64 \ - --hash=sha256:4160d9283bd382fa6c0c2b5e017acc95bc183570cd70968b9202ad6d8fc48dce \ - --hash=sha256:4a571d97dbe66ef38e472703067021b1467025ec85707d57e78711c085984e54 \ - --hash=sha256:4e6281aedfca15301c41f74d7005e6e3f4ca143584ba696ac69df4f02f40d629 \ - --hash=sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54 \ - --hash=sha256:556fc6c10989f19a179e4321e5d678db8eb2924131e64652a51fe83e4c3db0e1 \ - --hash=sha256:574faa3b79e8ebac7cb1d7930f51184ba1ccf69adfdec53a12f319a06030a68b \ - --hash=sha256:58791550b27d5488b1bb52bc96328456095d96206a250d28d874fafe11b3dfaf \ - --hash=sha256:5b750a8e5a1262434fb1517ddf64b5de58327f1adc3524a5e44c2ca43305eb0b \ - --hash=sha256:5d97151bc92d2b2578ff7ce779cdb9174337390a535953cbb9452fb65164c587 \ - --hash=sha256:5eee736daafa7af6d0a2dc15cc75e05c64f37fc37bafef2e00d77c14171c2097 \ - --hash=sha256:6445804cf4ec763dc70de65a3b0d9954e868609e83850a47ca4f0cb64bd79fea \ - --hash=sha256:647894f5ae99c4cf6bb82a1bb3a796f6e06af3caa3d32e26d2350d0e3e3faf24 \ - --hash=sha256:66d4cfda1d8ed687daa4bc0274fcfd5267873db9a5bc0418c2da19273040eeb7 \ - --hash=sha256:6a9a8c34fb7bb609419a211e59da8887eeca40d300b5ea8e56af98f6fbbb1541 \ - --hash=sha256:6b3f39a85d671436ee3d12c017f8fdea38509e4f25b28eb25877293c98c243f6 \ - --hash=sha256:6b6fb63ae352e13748289f04f37868099e69dba4c2b3e271c46061e82c745634 \ - --hash=sha256:70693319e0b8fd35dd863e3e29513875eb15c51945bf32519ef52927ca883bc3 \ - --hash=sha256:781e65134efaf88feb447e8c97a51772aa75e48b794352f94cb7ea717dedda0d \ - --hash=sha256:819ce3b883b7576ca28da3861c7e1a88afd08cc8c96908e08a3f4dd64a228034 \ - --hash=sha256:857112b22acd417c40fa4595db2fe28ab900c8c5fe4670c7989b1c0230955465 \ - --hash=sha256:887d9b0a65404929641a9fabb6452b07fe4572b269d901d622d8a34a4e9043b2 \ - --hash=sha256:8b3489ff1ed1e8315674d0775dc7d2195fb13ca17b3808721b54dbe9fd020faf \ - --hash=sha256:92fc4500fcb33899b05ba73276dfb684a20d31caa567b7cb5252d48f896a91b1 \ - --hash=sha256:9403db39be1393618dd80c746cb22ccda168efce239c73af13c3763ef56ffc04 \ - --hash=sha256:98110aa363f1bb4c073e8dcfaefd3a5cea0f0834c2aab23dda657e4dab2f53b5 \ - --hash=sha256:999779addc413181912e984b942fbcc951be1f5b3663cd80b2687758f434c583 \ - --hash=sha256:9caac6b54914bdf41bcc91e7eb9147d331d29235a7c967c150ef5df6464fd1bb \ - --hash=sha256:a7a078f5d37bee6690959c813977da5291b24286e7b962e62a94cec31aa5188b \ - --hash=sha256:a7e65eb5c003a303b94aa2c3852ef130230ec79e349632d030e9571b87c4698c \ - --hash=sha256:a96dc1fa45bd8c407a0af03b2d5218392729e1822b0c32e62c5bf7eeb5fb3958 \ - --hash=sha256:aca405706e0b0a44cc6bfd41fbe89919a6a56999157f6de7e182a990c36e37bc \ - --hash=sha256:accb6150ce61c9c4b7738d45550806aa2b71c7668c6942f17b0ac182b6142fd4 \ - --hash=sha256:ad1af54a62ffe39cf34db1aa6ed1a1873bd548f6401db39d8e7cd060b9211f82 \ - --hash=sha256:ae1aa1cd222c6d205853b3013c69cd04515f9d6ab6de4b0603e2e1c33221303e \ - --hash=sha256:b2d0a12018b04f4cb820781ec0dffb5f7c7c1d2a5cd22bff7fb055a2cb19ebce \ - --hash=sha256:b480c6a4e1138e1aa137c0079b9b6305ec6dcc1098a8ca5196283e8a49df95a9 \ - --hash=sha256:b74c261802d3d2b85c9df2dfb2fa81b6f90deeef63c2db9f0e029a3cac50b518 \ - --hash=sha256:ba278acf14471d36316159c94a802933d10b6a1e117b8554fe0d0d9b75c9d536 \ - --hash=sha256:bb6178c241278d5fe853b3de743087be7f5f4c6f7d6d22a3b524d323eecec505 \ - --hash=sha256:bf72af5e0fb40e9babf594308911436c8efde3cb5e75b6f206c34ad18be5c052 \ - --hash=sha256:bfd3223c15bebe26518d58ccf9a39b93948d3dcb3e57a20480dfdd315356baff \ - --hash=sha256:c214999039d4f2a5b2073ac506bba279945233da8c786e490d411dfc30f855c1 \ - --hash=sha256:c2f992c07c0fca81655066705beae35fc95a2fa7366467366db627d9f2ee097f \ - --hash=sha256:cba4cfa1052819d16699e1d55d18c92b6e094d4517c41dd231a8b9f87b6fa681 \ - --hash=sha256:cea7daf9fc7ae6687cf1e2c049752f19f146fdc37c2cc376e7d0032cf4f25347 \ - --hash=sha256:cf6c4150f8c0e32d241436526f3c3f9cbd34429492abddbada2ffcff506c51af \ - --hash=sha256:d09c333d36c1409d56a9d29b3a1b800a42c76a57a5a8907eacdbce3f18768246 \ - --hash=sha256:d27b84d5880f6d8aa9ae3edb253c59d9f6642ffbb2c889b78b60361eed449787 \ - --hash=sha256:d2ccec9ac47cf4e04897619c0e0c1a48c54a71bdf045117d3a26f80d38ab1fb0 \ - --hash=sha256:d71264a80f3fcf512eb4f18f59423fe82d6e346ee97b90625f283df56aee103f \ - --hash=sha256:d93f3307ad32a27bda2e88ec81134b823c240aa3abb55821a8da553eed8d9439 \ - --hash=sha256:d9631c5e8b5b3a0fda99cb0d29c18133bca1e18aea9effe55adb3da1adef80d3 \ - --hash=sha256:ddfab44e4489bd79bda09d84c430677fc7f0a4939a73d2bba3073036f487a0a6 \ - --hash=sha256:e7048abd75fe40712005bcfc06bb44b9dfcd8e101dda2ecf2f5aa46115ad07ca \ - --hash=sha256:e73091191e4280403bde6c9a52a6999d69cdfde498f1fdf629105247599b57ec \ - --hash=sha256:e800776a79a5aabdb17dcc2346a7d66d0777e942e4cd251defeb084762ecd17d \ - --hash=sha256:edc9fc7051e3350643ad929df55c451899bb9ae6d24998a949d2e4c87fb596d3 \ - --hash=sha256:f089118d584e859c62b3da0892b88a83d611c2033ac410e929cb6754eec0ed16 \ - --hash=sha256:f174bbd484294ed9fdf09437f889f95807e5f229d5d93588d34e92106fbf6717 \ - --hash=sha256:f508b0491767bb1f2b87fdfacaba5f7eddc2f867740ec69ece6d1946d29029a6 \ - --hash=sha256:f7a31fc1e1bd362874863fdeed71aed92d348f5336fd84f2197ba40c59f061bd \ - --hash=sha256:f9479aa06a793c5aeba49ce5c5692ffb51fcd9a7016e017d555d5e2b0045d212 +propcache==0.3.1 \ + --hash=sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e \ + --hash=sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b \ + --hash=sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf \ + --hash=sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b \ + --hash=sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5 \ + --hash=sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c \ + --hash=sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c \ + --hash=sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a \ + --hash=sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf \ + --hash=sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8 \ + --hash=sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5 \ + --hash=sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42 \ + --hash=sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035 \ + --hash=sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0 \ + --hash=sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e \ + --hash=sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46 \ + --hash=sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d \ + --hash=sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24 \ + --hash=sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d \ + --hash=sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de \ + --hash=sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf \ + --hash=sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7 \ + --hash=sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371 \ + --hash=sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833 \ + --hash=sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259 \ + --hash=sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136 \ + --hash=sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25 \ + --hash=sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005 \ + --hash=sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef \ + --hash=sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7 \ + --hash=sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f \ + --hash=sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53 \ + --hash=sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0 \ + --hash=sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb \ + --hash=sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566 \ + --hash=sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a \ + --hash=sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908 \ + --hash=sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf \ + --hash=sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458 \ + --hash=sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64 \ + --hash=sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9 \ + --hash=sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71 \ + --hash=sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b \ + --hash=sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5 \ + --hash=sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037 \ + --hash=sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5 \ + --hash=sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894 \ + --hash=sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe \ + --hash=sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757 \ + --hash=sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3 \ + --hash=sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976 \ + --hash=sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6 \ + --hash=sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641 \ + --hash=sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7 \ + --hash=sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649 \ + --hash=sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120 \ + --hash=sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd \ + --hash=sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40 \ + --hash=sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e \ + --hash=sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229 \ + --hash=sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c \ + --hash=sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7 \ + --hash=sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111 \ + --hash=sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654 \ + --hash=sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f \ + --hash=sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294 \ + --hash=sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da \ + --hash=sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f \ + --hash=sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7 \ + --hash=sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0 \ + --hash=sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073 \ + --hash=sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7 \ + --hash=sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11 \ + --hash=sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f \ + --hash=sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27 \ + --hash=sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70 \ + --hash=sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7 \ + --hash=sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519 \ + --hash=sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5 \ + --hash=sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180 \ + --hash=sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f \ + --hash=sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee \ + --hash=sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18 \ + --hash=sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815 \ + --hash=sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e \ + --hash=sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a \ + --hash=sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7 \ + --hash=sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6 \ + --hash=sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c \ + --hash=sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc \ + --hash=sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8 \ + --hash=sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98 \ + --hash=sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256 \ + --hash=sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5 \ + --hash=sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744 \ + --hash=sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723 \ + --hash=sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277 \ + --hash=sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5 # via # aiohttp # yarl -proto-plus==1.26.0 \ - --hash=sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22 \ - --hash=sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7 +proto-plus==1.26.1 \ + --hash=sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66 \ + --hash=sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012 # via # -r requirements.in # google-api-core -protobuf==5.29.3 \ - --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ - --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ - --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ - --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ - --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ - --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ - --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ - --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ - --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ - --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ - --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 +protobuf==6.30.2 \ + --hash=sha256:0eb523c550a66a09a0c20f86dd554afbf4d32b02af34ae53d93268c1f73bc65b \ + --hash=sha256:35c859ae076d8c56054c25b59e5e59638d86545ed6e2b6efac6be0b6ea3ba048 \ + --hash=sha256:4f6c687ae8efae6cf6093389a596548214467778146b7245e886f35e1485315d \ + --hash=sha256:50f32cc9fd9cb09c783ebc275611b4f19dfdfb68d1ee55d2f0c7fa040df96815 \ + --hash=sha256:524afedc03b31b15586ca7f64d877a98b184f007180ce25183d1a5cb230ee72b \ + --hash=sha256:7653c99774f73fe6b9301b87da52af0e69783a2e371e8b599b3e9cb4da4b12b9 \ + --hash=sha256:acec579c39c88bd8fbbacab1b8052c793efe83a0a5bd99db4a31423a25c0a0e2 \ + --hash=sha256:ae86b030e69a98e08c77beab574cbcb9fff6d031d57209f574a5aea1445f4b51 \ + --hash=sha256:b12ef7df7b9329886e66404bef5e9ce6a26b54069d7f7436a0853ccdeb91c103 # via # -r requirements.in # google-api-core @@ -728,21 +762,21 @@ pyasn1==0.6.1 \ # via # pyasn1-modules # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c +pyasn1-modules==0.4.2 \ + --hash=sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a \ + --hash=sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6 # via google-auth pypandoc==1.15 \ --hash=sha256:4ededcc76c8770f27aaca6dff47724578428eca84212a31479403a9731fc2b16 \ --hash=sha256:ea25beebe712ae41d63f7410c08741a3cab0e420f6703f95bc9b3a749192ce13 # via -r requirements.in -pytest==8.3.4 \ - --hash=sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6 \ - --hash=sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761 +pytest==8.3.5 \ + --hash=sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820 \ + --hash=sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845 # via pytest-asyncio -pytest-asyncio==0.25.3 \ - --hash=sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3 \ - --hash=sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a +pytest-asyncio==0.26.0 \ + --hash=sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0 \ + --hash=sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f # via -r requirements.in pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ @@ -805,9 +839,9 @@ requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via google-api-core -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 +rsa==4.9.1 \ + --hash=sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762 \ + --hash=sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75 # via google-auth tomli==2.2.1 \ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ @@ -843,95 +877,117 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via pytest -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 +typing-extensions==4.13.2 \ + --hash=sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c \ + --hash=sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef # via multidict -urllib3==2.3.0 \ - --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ - --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d +urllib3==2.4.0 \ + --hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \ + --hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813 # via requests -yarl==1.18.3 \ - --hash=sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba \ - --hash=sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193 \ - --hash=sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318 \ - --hash=sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee \ - --hash=sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e \ - --hash=sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1 \ - --hash=sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a \ - --hash=sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186 \ - --hash=sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1 \ - --hash=sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50 \ - --hash=sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640 \ - --hash=sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb \ - --hash=sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8 \ - --hash=sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc \ - --hash=sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5 \ - --hash=sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58 \ - --hash=sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2 \ - --hash=sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393 \ - --hash=sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24 \ - --hash=sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b \ - --hash=sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910 \ - --hash=sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c \ - --hash=sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272 \ - --hash=sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed \ - --hash=sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1 \ - --hash=sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04 \ - --hash=sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d \ - --hash=sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5 \ - --hash=sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d \ - --hash=sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889 \ - --hash=sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae \ - --hash=sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b \ - --hash=sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c \ - --hash=sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576 \ - --hash=sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34 \ - --hash=sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477 \ - --hash=sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990 \ - --hash=sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2 \ - --hash=sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512 \ - --hash=sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069 \ - --hash=sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a \ - --hash=sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6 \ - --hash=sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0 \ - --hash=sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8 \ - --hash=sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb \ - --hash=sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa \ - --hash=sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8 \ - --hash=sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e \ - --hash=sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e \ - --hash=sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985 \ - --hash=sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8 \ - --hash=sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1 \ - --hash=sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5 \ - --hash=sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690 \ - --hash=sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10 \ - --hash=sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789 \ - --hash=sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b \ - --hash=sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca \ - --hash=sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e \ - --hash=sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5 \ - --hash=sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59 \ - --hash=sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9 \ - --hash=sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8 \ - --hash=sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db \ - --hash=sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde \ - --hash=sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7 \ - --hash=sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb \ - --hash=sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3 \ - --hash=sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6 \ - --hash=sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285 \ - --hash=sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb \ - --hash=sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8 \ - --hash=sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482 \ - --hash=sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd \ - --hash=sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75 \ - --hash=sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760 \ - --hash=sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782 \ - --hash=sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53 \ - --hash=sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2 \ - --hash=sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1 \ - --hash=sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719 \ - --hash=sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62 +yarl==1.20.0 \ + --hash=sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9 \ + --hash=sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa \ + --hash=sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61 \ + --hash=sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2 \ + --hash=sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2 \ + --hash=sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33 \ + --hash=sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902 \ + --hash=sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2 \ + --hash=sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914 \ + --hash=sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0 \ + --hash=sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0 \ + --hash=sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569 \ + --hash=sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f \ + --hash=sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7 \ + --hash=sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20 \ + --hash=sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00 \ + --hash=sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1 \ + --hash=sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc \ + --hash=sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f \ + --hash=sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a \ + --hash=sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd \ + --hash=sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c \ + --hash=sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f \ + --hash=sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5 \ + --hash=sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d \ + --hash=sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501 \ + --hash=sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3 \ + --hash=sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0 \ + --hash=sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26 \ + --hash=sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2 \ + --hash=sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c \ + --hash=sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c \ + --hash=sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae \ + --hash=sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de \ + --hash=sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a \ + --hash=sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe \ + --hash=sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8 \ + --hash=sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124 \ + --hash=sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb \ + --hash=sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc \ + --hash=sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2 \ + --hash=sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac \ + --hash=sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307 \ + --hash=sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58 \ + --hash=sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e \ + --hash=sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e \ + --hash=sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62 \ + --hash=sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b \ + --hash=sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe \ + --hash=sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda \ + --hash=sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5 \ + --hash=sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64 \ + --hash=sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229 \ + --hash=sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62 \ + --hash=sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6 \ + --hash=sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d \ + --hash=sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791 \ + --hash=sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672 \ + --hash=sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb \ + --hash=sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94 \ + --hash=sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a \ + --hash=sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10 \ + --hash=sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e \ + --hash=sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9 \ + --hash=sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5 \ + --hash=sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da \ + --hash=sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c \ + --hash=sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a \ + --hash=sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d \ + --hash=sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195 \ + --hash=sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594 \ + --hash=sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8 \ + --hash=sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634 \ + --hash=sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051 \ + --hash=sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8 \ + --hash=sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e \ + --hash=sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384 \ + --hash=sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076 \ + --hash=sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656 \ + --hash=sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018 \ + --hash=sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19 \ + --hash=sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a \ + --hash=sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4 \ + --hash=sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2 \ + --hash=sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64 \ + --hash=sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145 \ + --hash=sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7 \ + --hash=sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995 \ + --hash=sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6 \ + --hash=sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f \ + --hash=sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f \ + --hash=sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487 \ + --hash=sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9 \ + --hash=sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a \ + --hash=sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d \ + --hash=sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f \ + --hash=sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1 \ + --hash=sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22 \ + --hash=sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d \ + --hash=sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c \ + --hash=sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877 \ + --hash=sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5 \ + --hash=sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867 \ + --hash=sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3 # via aiohttp From 9ae7a3a1bbcdfba793efd867022db03cb0dd577c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 7 May 2025 00:19:42 +0000 Subject: [PATCH 1285/1339] chore(main): release 1.25.0 (#2387) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 1ca988f0054e..86c1f6b164d5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.25.0](https://github.com/googleapis/gapic-generator-python/compare/v1.24.1...v1.25.0) (2025-05-06) + + +### Features + +* Add protobuf runtime version to `x-goog-api-client` header ([#2368](https://github.com/googleapis/gapic-generator-python/issues/2368)) ([9c05dbe](https://github.com/googleapis/gapic-generator-python/commit/9c05dbeb9285919f45924be09c445b707cb85b54)) + ## [1.24.1](https://github.com/googleapis/gapic-generator-python/compare/v1.24.0...v1.24.1) (2025-04-17) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 803600ae8193..d5ed1c93249c 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.24.1" +version = "1.25.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 9dfe21e721e92656a89dbb6f83e7883f24d201bb Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 9 May 2025 10:34:03 -0400 Subject: [PATCH 1286/1339] feat: add client debug logging support for server side streaming REST calls (#2340) Co-authored-by: Victor Chudnovsky --- .../%sub/services/%service/transports/rest.py.j2 | 12 +++++++++--- .../services/%service/transports/rest_asyncio.py.j2 | 12 +++++++++--- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 4e69136b461e..825756eb54dc 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -262,17 +262,24 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): resp = self._interceptor.post_{{ method.name|snake_case }}(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_{{ method.name|snake_case }}_with_metadata(resp, response_metadata) - {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} - {% if not method.server_streaming %} if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2389): #} + {# Depending how we want to log (a) receiving a streaming response vs (b) exposing the next streamed item to the user, we could possibly want to log something here #} + {# (a) should always happen in api-core #} + {# (b) could happen in api-core, or it could happen here when we iterate to the next streamed item that was previously received. #} + {% if not method.server_streaming %} try: response_payload = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(resp){% endif %} except: {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2283): Remove try/except once unit tests are updated. #} response_payload = None + {% endif %}{# if not method.server_streaming #} http_response = { + {# Not logging response payload for server streaming here. See comment above. #} + {% if not method.server_streaming %} "payload": response_payload, + {% endif %}{# if not method.server_streaming #} "headers": dict(response.headers), "status": response.status_code, } @@ -286,7 +293,6 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): "httpResponse": http_response, }, ) - {% endif %}{# if not method.server_streaming #} return resp {% endif %}{# method.void #} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 3de616d0221a..1d6ec8737459 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -222,17 +222,24 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): resp = await self._interceptor.post_{{ method.name|snake_case }}(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_{{ method.name|snake_case }}_with_metadata(resp, response_metadata) - {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2279): Add logging support for rest streaming. #} - {% if not method.server_streaming %} if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2389): #} + {# Depending how we want to log (a) receiving a streaming response vs (b) exposing the next streamed item to the user, we could possibly want to log something here #} + {# (a) should always happen in api-core #} + {# (b) could happen in api-core, or it could happen here when we iterate to the next streamed item that was previously received. #} + {% if not method.server_streaming %} try: response_payload = {% if method.output.ident.is_proto_plus_type %}{{ method.output.ident }}.to_json(response){% else %}json_format.MessageToJson(resp){% endif %} except: {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2283): Remove try/except once unit tests are updated. #} response_payload = None + {% endif %}{# if not method.server_streaming #} http_response = { + {# Not logging response payload for server streaming here. See comment above. #} + {% if not method.server_streaming %} "payload": response_payload, + {% endif %}{# if not method.server_streaming #} "headers": dict(response.headers), "status": "OK", # need to obtain this properly } @@ -246,7 +253,6 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): }, ) - {% endif %}{# if not method.server_streaming #} return resp {% endif %}{# method.void #} From ab4cc4aefc798194ca445b64a98892c12639500b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 9 May 2025 11:18:23 -0400 Subject: [PATCH 1287/1339] build: update docs build (#2339) Co-authored-by: Owl Bot --- .../.github/workflows/tests.yaml | 15 ++++ .../gapic/templates/noxfile.py.j2 | 7 +- packages/gapic-generator/noxfile.py | 69 ++++++++++++++++--- packages/gapic-generator/owlbot.py | 2 +- .../integration/goldens/asset/noxfile.py | 7 +- .../goldens/credentials/noxfile.py | 7 +- .../integration/goldens/eventarc/noxfile.py | 7 +- .../integration/goldens/logging/noxfile.py | 7 +- .../goldens/logging_internal/noxfile.py | 7 +- .../integration/goldens/redis/noxfile.py | 7 +- .../goldens/redis_selective/noxfile.py | 7 +- 11 files changed, 99 insertions(+), 43 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 5b2efc098d5a..2e6b314b264e 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -33,6 +33,21 @@ jobs: run: python -m pip install nox - name: Build the documentation. run: nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 + # Use python 3.10 for docs to match the version for the sphinx plugin + # https://github.com/googleapis/synthtool/pull/1891 + with: + python-version: "3.10" + cache: 'pip' + - name: Install nox. + run: python -m pip install nox + - name: Build the documentation. + run: nox -s docfx mypy: strategy: matrix: diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 35a19afdda46..6408f306dcb7 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -370,10 +370,9 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 117ca47c5e70..bb858fcda603 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -630,7 +630,9 @@ def snippetgen(session): @nox.session(python="3.10") def docs(session): - """Build the docs.""" + """Build the docs for this generator.""" + + session.install("-e", ".") session.install( # We need to pin to specific versions of the `sphinxcontrib-*` packages @@ -643,21 +645,68 @@ def docs(session): "sphinxcontrib-qthelp==1.0.3", "sphinxcontrib-serializinghtml==1.1.5", "sphinx==4.5.0", - "sphinx_rtd_theme", + "sphinx-rtd-theme", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", # builder + "-d", + os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "sphinx-rtd-theme", ) - session.install(".") - # Build the docs! - session.run("rm", "-rf", "docs/_build/") + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", + "-T", # show full traceback on exception + "-N", # no colors + "-D", # Override configuration values set in the conf.py file + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", - "html", + "html", # builder "-d", - "docs/_build/doctrees", - "docs/", - "docs/_build/html/", + os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/gapic-generator/owlbot.py b/packages/gapic-generator/owlbot.py index 814c6732573d..43ea9f15fe29 100644 --- a/packages/gapic-generator/owlbot.py +++ b/packages/gapic-generator/owlbot.py @@ -18,7 +18,7 @@ templated_files = CommonTemplates().py_library() s.move( templated_files / ".kokoro", - excludes=["samples/**/*", "test-samples*", "publish-docs.sh", "*/prerelease-deps.cfg"], + excludes=["samples/**/*", "test-samples*", "*/prerelease-deps.cfg"], ) # needed for docs build diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 49691dd51e79..4efb1b1161eb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -364,10 +364,9 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 442d98c88f8f..3aea2c0c1b6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -364,10 +364,9 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index f7d75738e40d..ba8fbf5fc254 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -364,10 +364,9 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 18f1f2bc16f4..2e9dd52f17e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -364,10 +364,9 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index 18f1f2bc16f4..2e9dd52f17e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -364,10 +364,9 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 89ab6bcec931..57680446610c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -364,10 +364,9 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index 89ab6bcec931..57680446610c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -364,10 +364,9 @@ def docs(session): "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), + "-b", "html", # builder + "-d", os.path.join("docs", "_build", "doctrees", ""), # cache directory + # paths to build: os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) From 09ba01e4cdf6f36f6964bcceeb35cbe845f6a45a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 12 May 2025 16:57:00 +0200 Subject: [PATCH 1288/1339] chore(deps): update all dependencies (#2391) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 4352428cdbcb..97ad1022f8ff 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -205,13 +205,13 @@ charset-normalizer==3.4.2 \ --hash=sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a \ --hash=sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a +click==8.2.0 \ + --hash=sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c \ + --hash=sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d # via -r requirements.in -exceptiongroup==1.2.2 \ - --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ - --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc +exceptiongroup==1.3.0 \ + --hash=sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10 \ + --hash=sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88 # via pytest frozenlist==1.6.0 \ --hash=sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117 \ @@ -325,9 +325,9 @@ google-api-core==2.24.2 \ --hash=sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9 \ --hash=sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696 # via -r requirements.in -google-auth==2.40.0 \ - --hash=sha256:c277cf39f7c192d8540eb6331c08b5a0796e8041af8343ae73dd6b269732ca6c \ - --hash=sha256:dc3a5078acb1043c3e43685c22d628afe40af8559cf561de388e0c939280fcc8 +google-auth==2.40.1 \ + --hash=sha256:58f0e8416a9814c1d86c9b7f6acf6816b51aba167b2c76821965271bac275540 \ + --hash=sha256:ed4cae4f5c46b41bae1d19c036e06f6c371926e97b19e816fc854eff811974ee # via google-api-core googleapis-common-protos[grpc]==1.70.0 \ --hash=sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257 \ From 1c2b69da0068649f1b674785d6def47d9c7febfb Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 19 May 2025 12:29:52 +0200 Subject: [PATCH 1289/1339] chore(deps): update all dependencies (#2392) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 26 +++++++++++------------ 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 97ad1022f8ff..d79b068ebf4a 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -628,9 +628,9 @@ packaging==25.0 \ --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f # via pytest -pluggy==1.5.0 \ - --hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \ - --hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669 +pluggy==1.6.0 \ + --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \ + --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 # via pytest propcache==0.3.1 \ --hash=sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e \ @@ -740,16 +740,16 @@ proto-plus==1.26.1 \ # via # -r requirements.in # google-api-core -protobuf==6.30.2 \ - --hash=sha256:0eb523c550a66a09a0c20f86dd554afbf4d32b02af34ae53d93268c1f73bc65b \ - --hash=sha256:35c859ae076d8c56054c25b59e5e59638d86545ed6e2b6efac6be0b6ea3ba048 \ - --hash=sha256:4f6c687ae8efae6cf6093389a596548214467778146b7245e886f35e1485315d \ - --hash=sha256:50f32cc9fd9cb09c783ebc275611b4f19dfdfb68d1ee55d2f0c7fa040df96815 \ - --hash=sha256:524afedc03b31b15586ca7f64d877a98b184f007180ce25183d1a5cb230ee72b \ - --hash=sha256:7653c99774f73fe6b9301b87da52af0e69783a2e371e8b599b3e9cb4da4b12b9 \ - --hash=sha256:acec579c39c88bd8fbbacab1b8052c793efe83a0a5bd99db4a31423a25c0a0e2 \ - --hash=sha256:ae86b030e69a98e08c77beab574cbcb9fff6d031d57209f574a5aea1445f4b51 \ - --hash=sha256:b12ef7df7b9329886e66404bef5e9ce6a26b54069d7f7436a0853ccdeb91c103 +protobuf==6.31.0 \ + --hash=sha256:00a873c06efdfb854145d9ded730b09cf57d206075c38132674093370e2edabb \ + --hash=sha256:10bd62802dfa0588649740a59354090eaf54b8322f772fbdcca19bc78d27f0d6 \ + --hash=sha256:2c812f0f96ceb6b514448cefeb1df54ec06dde456783f5099c0e2f8a0f2caa89 \ + --hash=sha256:314fab1a6a316469dc2dd46f993cbbe95c861ea6807da910becfe7475bc26ffe \ + --hash=sha256:3e987c99fd634be8347246a02123250f394ba20573c953de133dc8b2c107dd71 \ + --hash=sha256:5353e38844168a327acd2b2aa440044411cd8d1b6774d5701008bd1dba067c79 \ + --hash=sha256:67ce50195e4e584275623b8e6bc6d3d3dfd93924bf6116b86b3b8975ab9e4571 \ + --hash=sha256:6ac2e82556e822c17a8d23aa1190bbc1d06efb9c261981da95c71c9da09e9e23 \ + --hash=sha256:96d8da25c83b11db5fe9e0376351ce25e7205e13224d939e097b6f82a72af824 # via # -r requirements.in # google-api-core From a46fcdb4de527c2fe7ef44a987724ad97f9e429a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 28 May 2025 11:10:32 -0400 Subject: [PATCH 1290/1339] chore: use yaml.safe_load (#2398) --- packages/gapic-generator/gapic/utils/options.py | 2 +- packages/gapic-generator/noxfile.py | 8 ++++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index ef6497e39a38..af2677630cae 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -147,7 +147,7 @@ def tweak_path(p): if service_yaml_paths: # Just use the last file specified. with open(service_yaml_paths[-1]) as f: - service_yaml_config = yaml.load(f, Loader=yaml.Loader) + service_yaml_config = yaml.safe_load(f) # The yaml service files typically have this field, # but it is not a field in the gogle.api.Service proto. service_yaml_config.pop("type", None) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index bb858fcda603..7b686cb7db20 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -392,7 +392,9 @@ def showcase( """Run the Showcase test suite.""" with showcase_library(session, templates=templates, other_opts=other_opts): - session.install("pytest", "pytest-asyncio") + # Exclude pytest-asyncio==1.0.0 while we investigate the recent failure described in + # https://github.com/googleapis/gapic-generator-python/issues/2399 + session.install("pytest", "pytest-asyncio!=1.0.0") test_directory = Path("tests", "system") ignore_file = env.get("IGNORE_FILE") pytest_command = [ @@ -422,7 +424,9 @@ def showcase_w_rest_async( with showcase_library( session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True ): - session.install("pytest", "pytest-asyncio") + # Exclude pytest-asyncio==1.0.0 while we investigate the recent failure described in + # https://github.com/googleapis/gapic-generator-python/issues/2399 + session.install("pytest", "pytest-asyncio!=1.0.0") test_directory = Path("tests", "system") ignore_file = env.get("IGNORE_FILE") pytest_command = [ From 4532b1f5f7f80b1a3c6934265c8fbd74128b9e94 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 28 May 2025 14:32:20 -0400 Subject: [PATCH 1291/1339] chore: remove unused scripts (#2400) Co-authored-by: Owl Bot --- packages/gapic-generator/.dockerignore | 4 - packages/gapic-generator/Dockerfile | 20 --- packages/gapic-generator/docker-entrypoint.sh | 50 ------- .../docs/getting-started/_verifying.rst | 4 +- .../docs/getting-started/docker-shortcut.rst | 28 ---- .../docs/getting-started/docker.rst | 139 ------------------ .../docs/getting-started/index.rst | 18 +-- .../docs/getting-started/local.rst | 6 +- packages/gapic-generator/docs/templates.rst | 28 ---- packages/gapic-generator/gapic.sh | 105 ------------- 10 files changed, 4 insertions(+), 398 deletions(-) delete mode 100644 packages/gapic-generator/Dockerfile delete mode 100755 packages/gapic-generator/docker-entrypoint.sh delete mode 100644 packages/gapic-generator/docs/getting-started/docker-shortcut.rst delete mode 100644 packages/gapic-generator/docs/getting-started/docker.rst delete mode 100755 packages/gapic-generator/gapic.sh diff --git a/packages/gapic-generator/.dockerignore b/packages/gapic-generator/.dockerignore index 67cb2311afa1..1fefa15a3bba 100644 --- a/packages/gapic-generator/.dockerignore +++ b/packages/gapic-generator/.dockerignore @@ -2,10 +2,6 @@ .git .gitignore -# Docker scaffolding -Dockerfile -.dockerignore - # Python scaffolding *.py[cod] *.egg diff --git a/packages/gapic-generator/Dockerfile b/packages/gapic-generator/Dockerfile deleted file mode 100644 index 71a201f1b97f..000000000000 --- a/packages/gapic-generator/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -FROM python:3.13-slim - -# Install system packages. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - pandoc \ - && rm -rf /var/lib/apt/lists/* - -# Add protoc and our common protos. -COPY --from=gcr.io/gapic-images/api-common-protos:latest /usr/local/bin/protoc /usr/local/bin/protoc -COPY --from=gcr.io/gapic-images/api-common-protos:latest /protos/ /protos/ - -# Add our code to the Docker image. -ADD . /usr/src/gapic-generator-python/ - -# Install the tool within the image. -RUN pip install /usr/src/gapic-generator-python - -# Define the generator as an entry point. -ENTRYPOINT ["/usr/src/gapic-generator-python/docker-entrypoint.sh"] diff --git a/packages/gapic-generator/docker-entrypoint.sh b/packages/gapic-generator/docker-entrypoint.sh deleted file mode 100755 index 944ab9297ca6..000000000000 --- a/packages/gapic-generator/docker-entrypoint.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -PLUGIN_OPTIONS="" - -# Parse out options. -while [ -n "$1" ]; do - case "$1" in - -- ) - shift - break - ;; - * ) - # If this switch begins with "--python-gapic-" or "--gapic-", then it is - # meant for us. - if [[ $1 == --python-gapic-* ]]; then - PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" - shift 2 - elif [[ $1 == --gapic-* ]]; then - PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" - shift 2 - elif [[ $1 == --samples* ]]; then - # --samples is a special option that all generators should recognize. - PLUGIN_OPTIONS="$PLUGIN_OPTIONS,$1=$2" - shift 2 - else - # Ignore anything we do not recognize. - shift - fi - ;; - esac -done - -protoc --proto_path=/protos/ --proto_path=/in/ \ - --experimental_allow_proto3_optional \ - --python_gapic_out=/out/ \ - --python_gapic_opt=${PLUGIN_OPTIONS:1} \ - `find /in/ -name *.proto` diff --git a/packages/gapic-generator/docs/getting-started/_verifying.rst b/packages/gapic-generator/docs/getting-started/_verifying.rst index 3b25c4537ead..325090ed1aa3 100644 --- a/packages/gapic-generator/docs/getting-started/_verifying.rst +++ b/packages/gapic-generator/docs/getting-started/_verifying.rst @@ -1,8 +1,8 @@ Verifying the Library --------------------- -Once you have compiled a client library, whether using a Docker image, -local installation or bazel, it is time for the fun part: actually running it! +Once you have compiled a client library, whether using a local installation +or bazel, it is time for the fun part: actually running it! Create a virtual environment for the library: diff --git a/packages/gapic-generator/docs/getting-started/docker-shortcut.rst b/packages/gapic-generator/docs/getting-started/docker-shortcut.rst deleted file mode 100644 index 88f3ef80967c..000000000000 --- a/packages/gapic-generator/docs/getting-started/docker-shortcut.rst +++ /dev/null @@ -1,28 +0,0 @@ -:orphan: - -.. _docker-shortcut: - -Docker Shortcut Script ----------------------- - -Because code generation requires two mounts from the host machine into -the Docker image, and because the paths are somewhat pedantic, you may -find this shortcut script to be handy: - -.. literalinclude:: ../../gapic.sh - :language: shell - -Place it somewhere on your system, marked executable. - -Once available, it can be invoked using: - -.. code-block:: shell - - # This is assumed to be from the "proto root" directory. - $ gapic.sh --image gcr.io/gapic-images/gapic-generator-python \ - --in path/to/src/protos/ - --out dest/ - - -It will work not only with the Python code generator, but all of our code -generators that implement this Docker interface. diff --git a/packages/gapic-generator/docs/getting-started/docker.rst b/packages/gapic-generator/docs/getting-started/docker.rst deleted file mode 100644 index e7600974c4b0..000000000000 --- a/packages/gapic-generator/docs/getting-started/docker.rst +++ /dev/null @@ -1,139 +0,0 @@ -.. _getting-started/docker: - -Docker Image -============ - -If you are just getting started with code generation for protobuf-based APIs, -or if you do not have a robust Python environment already available, we -recommend using our `Docker`_ image to build client libraries. - -However, this tool offers first-class support for local execution using -protoc: :ref:`getting-started/local`. It is still reasonably easy, but -initial setup will take a bit longer. - -.. note:: - - If you are interested in contributing, using a local installation - is recommended. - -.. _Docker: https://docker.com/ - - -Installing ----------- - -Docker -~~~~~~ - -In order to use a Docker image, you must have `Docker`_ installed. -Docker is a container management service, and is available on Linux, Mac, -and Windows (although most of these instructions will be biased toward -Linux and Mac). - -Install Docker according to their `installation instructions`_. - -.. note:: - - This image requires Docker 17.05 or later. - -.. _installation instructions: https://docs.docker.com/install/ - -Pull the Docker Image -~~~~~~~~~~~~~~~~~~~~~ - -Once Docker is installed, simply pull the Docker image for this tool: - -.. parsed-literal:: - - $ docker pull gcr.io/gapic-images/gapic-generator-python:\ |version|\ - - -Usage ------ - -.. include:: _usage_intro.rst - -Example -~~~~~~~ - -.. include:: _example.rst - - -Compiling an API -~~~~~~~~~~~~~~~~ - -.. note:: - - If you are running code generation repeatedly, executing the - long ``docker run`` command may be cumbersome. While you should ensure - you understand this section, a :ref:`shortcut script` - is available to make iterative work easier. - -Compile the API into a client library by invoking the Docker image. - -It is worth noting that the image must interact with the host machine -(your local machine) for two things: reading in the protos you wish to compile, -and writing the output. This means that when you run the image, two mount -points are required in order for anything useful to happen. - -In particular, the input protos are expected to be mounted into ``/in/``, -and the desired output location is expected to be mounted into ``/out/``. -The output directory must also be writable. - -.. note:: - - The ``/in/`` and ``/out/`` directories inside the image are - hard-coded; they can not be altered where they appear in the command - below. - -Docker requires the output directory to pre-exist; create a directory where -you want the generated code to go: - -.. code-block:: sh - - $ mkdir dest/ - -Perform the actual code generation step with ``docker run``: - -.. code-block:: shell - - # This is assumed to be run from the `googleapis` project root. - $ docker run \ - --mount type=bind,source=$(pwd)/google/cloud/vision/v1/,destination=/in/google/cloud/vision/v1/,readonly \ - --mount type=bind,source=$(pwd)/dest/,destination=/out/ \ - --rm \ - --user $UID \ - gcr.io/gapic-images/gapic-generator-python - -.. warning:: - - ``protoc`` is *very* picky about paths, and the exact construction here - matters a lot. The source is ``google/cloud/vision/v1/``, and then - the destination is that full directory path after the ``/in/`` root; - therefore: ``/in/google/cloud/vision/v1/``. - - This matters because of how proto imports are resolved. The ``import`` - statement imports a *file*, relative to a base directory or set of - base directories, called the ``proto_path``. This is assumed - (and hard-coded) to ``/in/`` in the Docker image, and so any directory - structure present in the imports of the proto files must be preserved - beneath this for compilation to succeed. - -.. include:: _samplegen.rst - -.. code-block:: shell - - # Multiple sample paths or directories can be passed simultaneously by duplicating - # the 'samples' option. - # If no 'samples' option is passed, the generator does not generate a manifest. - $ docker run \ - --mount type=bind,source=$(pwd)/path/to/proto/dir,destination=/in/path/to/proto,readonly \ - --mount type=bind,source=$(pwd)/dest/,destination=/out/ \ - --rm \ - --user $UID \ - gcr.io/gapic-images/gapic-generator-python \ - --samples path/to/sample/config.yaml \ - --samples path/to/sample/dir/ - - -.. include:: _verifying.rst diff --git a/packages/gapic-generator/docs/getting-started/index.rst b/packages/gapic-generator/docs/getting-started/index.rst index 4d4893613911..6f288f6bb5c2 100644 --- a/packages/gapic-generator/docs/getting-started/index.rst +++ b/packages/gapic-generator/docs/getting-started/index.rst @@ -5,29 +5,13 @@ This code generator is implemented as a plugin to ``protoc``, the compiler for `protocol buffers`_, and will run in any environment that Python 3.7+ and protocol buffers do. -Because dependency management and such can be a significant undertaking, we -offer a Docker image and interface which requires you only to have Docker -installed and provide the protos for your API. Alternatively, the generator is -also invocable via bazel rules. +It is recommended to install the tool locally and run it through ``protoc``. -It is also possible to install the tool locally and run it through ``protoc``, -and this approach is fully supported. - -.. note:: - - The Docker approach is recommended for users new to this ecosystem, or - those which do not have a robust Python environment available. If you want - to experiment with generating client libraries but do not want to make - changes to the generator itself, try the Docker image first. - - The bazel approach is recommended for established pipelines. It is more - lightweight than the Docker image but may take some more effort to set up. .. _protocol buffers: https://developers.google.com/protocol-buffers/ .. toctree:: :maxdepth: 4 - docker local bazel diff --git a/packages/gapic-generator/docs/getting-started/local.rst b/packages/gapic-generator/docs/getting-started/local.rst index db474e3a2d4e..e86ba51797d5 100644 --- a/packages/gapic-generator/docs/getting-started/local.rst +++ b/packages/gapic-generator/docs/getting-started/local.rst @@ -3,11 +3,7 @@ Local Installation ================== -If you are just getting started with code generation for protobuf-based APIs, -or if you do not have a robust Python environment already available, it is -probably easier to get started using Docker: :ref:`getting-started/docker` - -However, this tool offers first-class support for local execution using +This tool offers first-class support for local execution using ``protoc``. It is still reasonably easy, but initial setup will take a bit longer. diff --git a/packages/gapic-generator/docs/templates.rst b/packages/gapic-generator/docs/templates.rst index 6cba34941333..09807ad9c7d4 100644 --- a/packages/gapic-generator/docs/templates.rst +++ b/packages/gapic-generator/docs/templates.rst @@ -130,33 +130,5 @@ provided by this library, use the special `DEFAULT` string: --python_gapic_opt="python-gapic-templates=/path/to/templates" --python_gapic_opt="python-gapic-templates=DEFAULT" -Building with Docker -~~~~~~~~~~~~~~~~~~~~ - -When building with Docker, you instead provide the ``--python-gapic-templates`` -argument after the ``docker run`` command: - -.. code-block:: shell - - $ docker run \ - --mount type=bind,source=google/cloud/vision/v1/,destination=/in/google/cloud/vision/v1/,readonly \ - --mount type=bind,source=dest/,destination=/out/ \ - --mount type=bind,source=/path/to/templates,destination=/templates/,readonly \ - --rm \ - --user $UID \ - gcr.io/gapic-images/gapic-generator-python \ - --python-gapic-templates /templates/ \ - --python-gapic-templates DEFAULT - -As before, to provide more than one location for templates, specify the -argument more than once. - -.. warning:: - - If you are using custom templates with Docker, be sure to also mount - the directory with the templates into the Docker image; otherwise - the generator will not be able to read that directory. When specifying - the ``--python-gapic-templates`` argument, it is the path *inside* - the Docker image that matters! .. _Jinja: http://jinja.pocoo.org/docs/2.10/ diff --git a/packages/gapic-generator/gapic.sh b/packages/gapic-generator/gapic.sh deleted file mode 100755 index 17f5c563a392..000000000000 --- a/packages/gapic-generator/gapic.sh +++ /dev/null @@ -1,105 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -CMD="$0" - -# Set variables used by this script. -# All of these are set in options below, and all but $PATH are required. -IMAGE= -IN= -OUT= -PLUGIN_OPTIONS= -PROTO_PATH=`pwd` - -# Print help and exit. -function show_help { - echo "Usage: $CMD --image IMAGE --in IN_DIR --out OUT_DIR [--path PATH_DIR]" - echo "" - echo "Required arguments:" - echo " --image The Docker image to use. The script will attempt to pull" - echo " it if it is not present." - echo " -i, --in A directory containing the protos describing the API" - echo " to be generated." - echo " -o, --out Destination directory for the completed client library." - echo "" - echo "Optional arguments:" - echo " -p, --path The base import path for the protos. Assumed to be the" - echo " current working directory if unspecified." - echo " -h, --help This help information." - exit 0 -} - -# Parse out options. -while true; do - case "$1" in - -h | --help ) show_help ;; - --image ) IMAGE="$2"; shift 2 ;; - -i | --in ) IN="$2"; shift 2 ;; - -o | --out ) OUT="$2"; shift 2 ;; - -p | --path ) PROTO_PATH=$2; shift 2 ;; - --* ) PLUGIN_OPTIONS="$PLUGIN_OPTIONS $1 $2"; shift 2 ;; - -- ) shift; break; ;; - * ) break ;; - esac -done - -# Ensure that all required options are set. -if [ -z "$IMAGE" ] || [ -z "$IN" ] || [ -z "$OUT" ]; then - >&2 echo "Required argument missing." - >&2 echo "The --image, --in, and --out arguments are all required." - >&2 echo "Run $CMD --help for more information." - exit 64 -fi - -# Ensure that the input directory exists (and is a directory). -if ! [ -d "${PROTO_PATH}/$IN" ]; then - >&2 echo "Directory does not exist: ${PROTO_PATH}/$IN" - exit 2 -fi - -# Ensure Docker is running and seems healthy. -# This is mostly a check to bubble useful errors quickly. -if ! docker ps > /dev/null; then - exit $? -fi - -# If the output directory does not exist, create it. -if ! mkdir -p $OUT ; then - exit $? -fi - -# If the output directory is not empty, warn (but continue). -if [ "$(ls -A $OUT )"]; then - >&2 echo "Warning: Output directory is not empty." -fi - -# Convert IN and OUT to absolute paths for Docker -CWD=`pwd` -cd ${PROTO_PATH}/$IN -ABS_IN=`pwd` -cd $CWD -cd $OUT -ABS_OUT=`pwd` -cd $CWD - -# Generate the client library. -docker run \ - --mount type=bind,source=${ABS_IN},destination=/in/${IN},readonly \ - --mount type=bind,source=${ABS_OUT},destination=/out \ - --rm \ - --user $UID \ - $IMAGE \ - $PLUGIN_OPTIONS -exit $? From c439722b263395949b295357fdcf005c9c92f4d0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Jun 2025 09:24:09 -0700 Subject: [PATCH 1292/1339] chore(main): release 1.26.0 (#2390) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 86c1f6b164d5..d82f589cc36d 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.26.0](https://github.com/googleapis/gapic-generator-python/compare/v1.25.0...v1.26.0) (2025-05-28) + + +### Features + +* Add client debug logging support for server side streaming REST calls ([#2340](https://github.com/googleapis/gapic-generator-python/issues/2340)) ([8a705c5](https://github.com/googleapis/gapic-generator-python/commit/8a705c513fe63b025eca738feb64a1e414f9f4b6)) + ## [1.25.0](https://github.com/googleapis/gapic-generator-python/compare/v1.24.1...v1.25.0) (2025-05-06) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index d5ed1c93249c..ce33fac8f471 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.25.0" +version = "1.26.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From edf13716b7a03b888e4684269e827a513f2ba2a8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 27 Aug 2025 20:31:46 -0400 Subject: [PATCH 1293/1339] fix: Temporarily disable `test__api_version_header_async` which is not marked as async (#2429) --- .../gapic-generator/gapic/templates/noxfile.py.j2 | 3 ++- .../gapic/%name_%version/%sub/test_%service.py.j2 | 6 +++++- packages/gapic-generator/noxfile.py | 11 ++++++----- .../tests/integration/goldens/asset/noxfile.py | 3 ++- .../tests/integration/goldens/credentials/noxfile.py | 3 ++- .../tests/integration/goldens/eventarc/noxfile.py | 3 ++- .../tests/integration/goldens/logging/noxfile.py | 3 ++- .../integration/goldens/logging_internal/noxfile.py | 3 ++- .../tests/integration/goldens/redis/noxfile.py | 3 ++- .../integration/goldens/redis_selective/noxfile.py | 3 ++- 10 files changed, 27 insertions(+), 14 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 6408f306dcb7..a9c09b337e96 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -81,7 +81,8 @@ nox.options.error_on_missing_interpreters = True def mypy(session): """Run the type checker.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-requests", "types-protobuf", ) diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index a69026767cd1..31ab20095b16 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -224,7 +224,11 @@ def test__get_api_endpoint(): {% if service.version %} {% for method in service.methods.values() %}{% with method_name = method.name|snake_case %} -{% for mode in ["", "async"] %} +{# TODO(https://github.com/googleapis/gapic-generator-python/issues/2428): Restore async test ` + # once it properly runs as an async test with `@pytest.mark.asyncio` + # Add "async" to the list below `for mode in ["", "async"]` +#} +{% for mode in [""] %} {% if mode == "async" %} async def test_{{ method_name }}_api_version_header_async(transport_name="grpc"): client = {{ service.async_client_name }}(credentials=async_anonymous_credentials(), transport=transport_name) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 7b686cb7db20..2d688e139b9e 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -392,9 +392,9 @@ def showcase( """Run the Showcase test suite.""" with showcase_library(session, templates=templates, other_opts=other_opts): - # Exclude pytest-asyncio==1.0.0 while we investigate the recent failure described in + # Use pytest-asyncio<1.0.0 while we investigate the recent failure described in # https://github.com/googleapis/gapic-generator-python/issues/2399 - session.install("pytest", "pytest-asyncio!=1.0.0") + session.install("pytest", "pytest-asyncio<1.0.0") test_directory = Path("tests", "system") ignore_file = env.get("IGNORE_FILE") pytest_command = [ @@ -424,9 +424,9 @@ def showcase_w_rest_async( with showcase_library( session, templates=templates, other_opts=other_opts, rest_async_io_enabled=True ): - # Exclude pytest-asyncio==1.0.0 while we investigate the recent failure described in + # Use pytest-asyncio<1.0.0 while we investigate the recent failure described in # https://github.com/googleapis/gapic-generator-python/issues/2399 - session.install("pytest", "pytest-asyncio!=1.0.0") + session.install("pytest", "pytest-asyncio<1.0.0") test_directory = Path("tests", "system") ignore_file = env.get("IGNORE_FILE") pytest_command = [ @@ -589,7 +589,8 @@ def showcase_mypy( """Perform typecheck analysis on the generated Showcase library.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-setuptools", "types-protobuf", "types-requests", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 4efb1b1161eb..db793cd23ecf 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -88,7 +88,8 @@ def mypy(session): """Run the type checker.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-requests", "types-protobuf", ) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 3aea2c0c1b6e..9955f98f4fde 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -88,7 +88,8 @@ def mypy(session): """Run the type checker.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-requests", "types-protobuf", ) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index ba8fbf5fc254..30d3a401d130 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -88,7 +88,8 @@ def mypy(session): """Run the type checker.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-requests", "types-protobuf", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 2e9dd52f17e7..ea4d1e89e65f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -88,7 +88,8 @@ def mypy(session): """Run the type checker.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-requests", "types-protobuf", ) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index 2e9dd52f17e7..ea4d1e89e65f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -88,7 +88,8 @@ def mypy(session): """Run the type checker.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-requests", "types-protobuf", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 57680446610c..9fa5bc71996c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -88,7 +88,8 @@ def mypy(session): """Run the type checker.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-requests", "types-protobuf", ) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index 57680446610c..9fa5bc71996c 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -88,7 +88,8 @@ def mypy(session): """Run the type checker.""" session.install( - "mypy", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2410): Use the latest version of mypy + "mypy<1.16.0", "types-requests", "types-protobuf", ) From 46b16465b25f179f47c6bb8c7fc2a5563a3f17be Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Wed, 27 Aug 2025 18:01:29 -0700 Subject: [PATCH 1294/1339] chore: depend on stable version of google.shopping.type (#2423) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index f90af07b3682..6ce1f8773d83 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -16,6 +16,6 @@ Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has sup ("google", "cloud", "osconfig", "v1"): {"package_name": "google-cloud-os-config", "lower_bound": "1.0.0", "upper_bound": "2.0.0"}, ("google", "iam", "v1"): {"package_name": "grpc-google-iam-v1", "lower_bound": "0.14.0", "upper_bound": "1.0.0"}, ("google", "iam", "v2"): {"package_name": "google-cloud-iam", "lower_bound": "2.12.2", "upper_bound": "3.0.0"}, - ("google", "shopping", "type"): {"package_name": "google-shopping-type", "lower_bound": "0.1.6", "upper_bound": "1.0.0"} + ("google", "shopping", "type"): {"package_name": "google-shopping-type", "lower_bound": "1.0.0", "upper_bound": "2.0.0"} } %} From 2270ab94ce40d80ad60561286b2f5219f0c09081 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 28 Aug 2025 01:07:30 +0000 Subject: [PATCH 1295/1339] chore(main): release 1.26.1 (#2430) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index d82f589cc36d..c7e10c962ca8 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.26.1](https://github.com/googleapis/gapic-generator-python/compare/v1.26.0...v1.26.1) (2025-08-28) + + +### Bug Fixes + +* Temporarily disable `test__api_version_header_async` which is not marked as async ([9382ac3](https://github.com/googleapis/gapic-generator-python/commit/9382ac3546f335116665e10c4132369bc7637a67)) + ## [1.26.0](https://github.com/googleapis/gapic-generator-python/compare/v1.25.0...v1.26.0) (2025-05-28) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index ce33fac8f471..acf0b267d9e5 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.26.0" +version = "1.26.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From d6687cd691193dd99c27c741b3f37edf2032c0eb Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 28 Aug 2025 03:38:37 +0200 Subject: [PATCH 1296/1339] chore(deps): update dependency protobuf to v6.31.1 [security] (#2422) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index d79b068ebf4a..1df175068eaf 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -740,16 +740,16 @@ proto-plus==1.26.1 \ # via # -r requirements.in # google-api-core -protobuf==6.31.0 \ - --hash=sha256:00a873c06efdfb854145d9ded730b09cf57d206075c38132674093370e2edabb \ - --hash=sha256:10bd62802dfa0588649740a59354090eaf54b8322f772fbdcca19bc78d27f0d6 \ - --hash=sha256:2c812f0f96ceb6b514448cefeb1df54ec06dde456783f5099c0e2f8a0f2caa89 \ - --hash=sha256:314fab1a6a316469dc2dd46f993cbbe95c861ea6807da910becfe7475bc26ffe \ - --hash=sha256:3e987c99fd634be8347246a02123250f394ba20573c953de133dc8b2c107dd71 \ - --hash=sha256:5353e38844168a327acd2b2aa440044411cd8d1b6774d5701008bd1dba067c79 \ - --hash=sha256:67ce50195e4e584275623b8e6bc6d3d3dfd93924bf6116b86b3b8975ab9e4571 \ - --hash=sha256:6ac2e82556e822c17a8d23aa1190bbc1d06efb9c261981da95c71c9da09e9e23 \ - --hash=sha256:96d8da25c83b11db5fe9e0376351ce25e7205e13224d939e097b6f82a72af824 +protobuf==6.31.1 \ + --hash=sha256:0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16 \ + --hash=sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447 \ + --hash=sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6 \ + --hash=sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402 \ + --hash=sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e \ + --hash=sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9 \ + --hash=sha256:8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9 \ + --hash=sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39 \ + --hash=sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a # via # -r requirements.in # google-api-core From 6da96b9f4e527979003aef8559100ffaa55a6cd1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 27 Aug 2025 22:10:36 -0400 Subject: [PATCH 1297/1339] build(deps): bump aiohttp from 3.11.18 to 3.12.14 (#2416) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 187 ++++++++++++---------- 1 file changed, 98 insertions(+), 89 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 1df175068eaf..e49b7af4838b 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,92 +8,97 @@ aiohappyeyeballs==2.6.1 \ --hash=sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558 \ --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8 # via aiohttp -aiohttp==3.11.18 \ - --hash=sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717 \ - --hash=sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d \ - --hash=sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8 \ - --hash=sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda \ - --hash=sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421 \ - --hash=sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9 \ - --hash=sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d \ - --hash=sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8 \ - --hash=sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3 \ - --hash=sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3 \ - --hash=sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361 \ - --hash=sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137 \ - --hash=sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b \ - --hash=sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd \ - --hash=sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8 \ - --hash=sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f \ - --hash=sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d \ - --hash=sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec \ - --hash=sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb \ - --hash=sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0 \ - --hash=sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756 \ - --hash=sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6 \ - --hash=sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9 \ - --hash=sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009 \ - --hash=sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08 \ - --hash=sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533 \ - --hash=sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811 \ - --hash=sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721 \ - --hash=sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118 \ - --hash=sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55 \ - --hash=sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609 \ - --hash=sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1 \ - --hash=sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66 \ - --hash=sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2 \ - --hash=sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef \ - --hash=sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f \ - --hash=sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2 \ - --hash=sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804 \ - --hash=sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f \ - --hash=sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94 \ - --hash=sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30 \ - --hash=sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e \ - --hash=sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1 \ - --hash=sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4 \ - --hash=sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f \ - --hash=sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4 \ - --hash=sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f \ - --hash=sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6 \ - --hash=sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4 \ - --hash=sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f \ - --hash=sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7 \ - --hash=sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421 \ - --hash=sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e \ - --hash=sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935 \ - --hash=sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c \ - --hash=sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea \ - --hash=sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7 \ - --hash=sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868 \ - --hash=sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a \ - --hash=sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc \ - --hash=sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a \ - --hash=sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643 \ - --hash=sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01 \ - --hash=sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829 \ - --hash=sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93 \ - --hash=sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9 \ - --hash=sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78 \ - --hash=sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508 \ - --hash=sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd \ - --hash=sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1 \ - --hash=sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2 \ - --hash=sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6 \ - --hash=sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261 \ - --hash=sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863 \ - --hash=sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1 \ - --hash=sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb \ - --hash=sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415 \ - --hash=sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000 \ - --hash=sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1 \ - --hash=sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7 \ - --hash=sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798 +aiohttp==3.12.14 \ + --hash=sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3 \ + --hash=sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a \ + --hash=sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179 \ + --hash=sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3 \ + --hash=sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d \ + --hash=sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086 \ + --hash=sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced \ + --hash=sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729 \ + --hash=sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0 \ + --hash=sha256:196858b8820d7f60578f8b47e5669b3195c21d8ab261e39b1d705346458f445f \ + --hash=sha256:1b07ccef62950a2519f9bfc1e5b294de5dd84329f444ca0b329605ea787a3de5 \ + --hash=sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245 \ + --hash=sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda \ + --hash=sha256:23e1332fff36bebd3183db0c7a547a1da9d3b4091509f6d818e098855f2f27d3 \ + --hash=sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0 \ + --hash=sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d \ + --hash=sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088 \ + --hash=sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767 \ + --hash=sha256:3779ed96105cd70ee5e85ca4f457adbce3d9ff33ec3d0ebcdf6c5727f26b21b3 \ + --hash=sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c \ + --hash=sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9 \ + --hash=sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338 \ + --hash=sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e \ + --hash=sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa \ + --hash=sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641 \ + --hash=sha256:4710f77598c0092239bc12c1fcc278a444e16c7032d91babf5abbf7166463f7b \ + --hash=sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63 \ + --hash=sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7 \ + --hash=sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288 \ + --hash=sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da \ + --hash=sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b \ + --hash=sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656 \ + --hash=sha256:5f9c8d55d6802086edd188e3a7d85a77787e50d56ce3eb4757a3205fa4657922 \ + --hash=sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4 \ + --hash=sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2 \ + --hash=sha256:717a0680729b4ebd7569c1dcd718c46b09b360745fd8eb12317abc74b14d14d0 \ + --hash=sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8 \ + --hash=sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419 \ + --hash=sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425 \ + --hash=sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635 \ + --hash=sha256:79b29053ff3ad307880d94562cca80693c62062a098a5776ea8ef5ef4b28d140 \ + --hash=sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393 \ + --hash=sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb \ + --hash=sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88 \ + --hash=sha256:8bc784302b6b9f163b54c4e93d7a6f09563bd01ff2b841b29ed3ac126e5040bf \ + --hash=sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28 \ + --hash=sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248 \ + --hash=sha256:938bd3ca6259e7e48b38d84f753d548bd863e0c222ed6ee6ace3fd6752768a84 \ + --hash=sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660 \ + --hash=sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933 \ + --hash=sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c \ + --hash=sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22 \ + --hash=sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab \ + --hash=sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b \ + --hash=sha256:a3416f95961dd7d5393ecff99e3f41dc990fb72eda86c11f2a60308ac6dcd7a0 \ + --hash=sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5 \ + --hash=sha256:a564188ce831fd110ea76bcc97085dd6c625b427db3f1dbb14ca4baa1447dcbc \ + --hash=sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff \ + --hash=sha256:a7a1b4302f70bb3ec40ca86de82def532c97a80db49cac6a6700af0de41af5ee \ + --hash=sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7 \ + --hash=sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95 \ + --hash=sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61 \ + --hash=sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe \ + --hash=sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb \ + --hash=sha256:b5dd3a2ef7c7e968dbbac8f5574ebeac4d2b813b247e8cec28174a2ba3627170 \ + --hash=sha256:b8cc6b05e94d837bcd71c6531e2344e1ff0fb87abe4ad78a9261d67ef5d83eae \ + --hash=sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3 \ + --hash=sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb \ + --hash=sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869 \ + --hash=sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd \ + --hash=sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db \ + --hash=sha256:d1dcb015ac6a3b8facd3677597edd5ff39d11d937456702f0bb2b762e390a21b \ + --hash=sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab \ + --hash=sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8 \ + --hash=sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc \ + --hash=sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151 \ + --hash=sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663 \ + --hash=sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1 \ + --hash=sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c \ + --hash=sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026 \ + --hash=sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7 \ + --hash=sha256:f3e9f75ae842a6c22a195d4a127263dbf87cbab729829e0bd7857fb1672400b2 \ + --hash=sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597 \ + --hash=sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3 \ + --hash=sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758 \ + --hash=sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd # via -r requirements.in -aiosignal==1.3.2 \ - --hash=sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5 \ - --hash=sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54 +aiosignal==1.4.0 \ + --hash=sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e \ + --hash=sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7 # via aiohttp async-timeout==5.0.1 \ --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ @@ -205,9 +210,9 @@ charset-normalizer==3.4.2 \ --hash=sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a \ --hash=sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f # via requests -click==8.2.0 \ - --hash=sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c \ - --hash=sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a # via -r requirements.in exceptiongroup==1.3.0 \ --hash=sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10 \ @@ -880,7 +885,11 @@ tomli==2.2.1 \ typing-extensions==4.13.2 \ --hash=sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c \ --hash=sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef - # via multidict + # via + # aiosignal + # exceptiongroup + # multidict + # pytest-asyncio urllib3==2.4.0 \ --hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \ --hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813 From 73b37d3bc164a3ea3a96325f3e70c5380d604cd7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 28 Aug 2025 16:44:31 +0200 Subject: [PATCH 1298/1339] chore(deps): update dependency urllib3 to v2.5.0 [security] (#2413) --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e49b7af4838b..07ae0fa309f5 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -890,9 +890,9 @@ typing-extensions==4.13.2 \ # exceptiongroup # multidict # pytest-asyncio -urllib3==2.4.0 \ - --hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \ - --hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813 +urllib3==2.5.0 \ + --hash=sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760 \ + --hash=sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc # via requests yarl==1.20.0 \ --hash=sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9 \ From 6d6d12602d148663cf183f0fe1a9a538dc07da04 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 28 Aug 2025 16:58:13 +0200 Subject: [PATCH 1299/1339] chore(deps): update dependency requests to v2.32.4 [security] (#2403) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 07ae0fa309f5..343672d3cc4b 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -840,9 +840,9 @@ pyyaml==6.0.2 \ # via # -r requirements.in # libcst -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 +requests==2.32.4 \ + --hash=sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c \ + --hash=sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422 # via google-api-core rsa==4.9.1 \ --hash=sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762 \ From 69b49f6121b199882add293b02269bf0fea90afb Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 28 Aug 2025 18:13:19 +0200 Subject: [PATCH 1300/1339] chore(deps): update all dependencies (#2401) Co-authored-by: Anthonios Partheniou --- .../.github/workflows/tests.yaml | 32 +- packages/gapic-generator/requirements.txt | 1455 +++++++++-------- 2 files changed, 757 insertions(+), 730 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 2e6b314b264e..4f73366add06 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -21,7 +21,7 @@ jobs: docs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v5 # Use python 3.10 for docs to match the version for the sphinx plugin @@ -36,7 +36,7 @@ jobs: docfx: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python uses: actions/setup-python@v5 # Use python 3.10 for docs to match the version for the sphinx plugin @@ -56,7 +56,7 @@ jobs: python: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v5 with: @@ -79,7 +79,7 @@ jobs: # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v5 with: @@ -121,7 +121,7 @@ jobs: max-parallel: 1 runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup temp directory run: | sudo mkdir -p /tmp/workspace/tests/cert/ @@ -171,7 +171,7 @@ jobs: # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python "${{ matrix.python }}" uses: actions/setup-python@v5 with: @@ -199,7 +199,7 @@ jobs: showcase-unit-add-iam-methods: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python "3.13" uses: actions/setup-python@v5 with: @@ -227,7 +227,7 @@ jobs: matrix: variant: ['', _alternative_templates] steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python "3.13" uses: actions/setup-python@v5 with: @@ -252,7 +252,7 @@ jobs: snippetgen: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python "3.13" uses: actions/setup-python@v5 with: @@ -275,7 +275,7 @@ jobs: # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v5 with: @@ -300,7 +300,7 @@ jobs: # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v5 with: @@ -319,7 +319,7 @@ jobs: runs-on: ubuntu-latest container: gcr.io/gapic-images/googleapis steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Cache Bazel files id: cache-bazel uses: actions/cache@v4 @@ -347,7 +347,7 @@ jobs: goldens-lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python 3.13 uses: actions/setup-python@v5 with: @@ -366,7 +366,7 @@ jobs: goldens-unit: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python 3.13 uses: actions/setup-python@v5 with: @@ -387,7 +387,7 @@ jobs: goldens-prerelease: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python 3.13 uses: actions/setup-python@v5 with: @@ -408,7 +408,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Set up Python "3.13" uses: actions/setup-python@v5 with: diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 343672d3cc4b..e071bc5e9132 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,93 +8,93 @@ aiohappyeyeballs==2.6.1 \ --hash=sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558 \ --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8 # via aiohttp -aiohttp==3.12.14 \ - --hash=sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3 \ - --hash=sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a \ - --hash=sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179 \ - --hash=sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3 \ - --hash=sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d \ - --hash=sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086 \ - --hash=sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced \ - --hash=sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729 \ - --hash=sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0 \ - --hash=sha256:196858b8820d7f60578f8b47e5669b3195c21d8ab261e39b1d705346458f445f \ - --hash=sha256:1b07ccef62950a2519f9bfc1e5b294de5dd84329f444ca0b329605ea787a3de5 \ - --hash=sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245 \ - --hash=sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda \ - --hash=sha256:23e1332fff36bebd3183db0c7a547a1da9d3b4091509f6d818e098855f2f27d3 \ - --hash=sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0 \ - --hash=sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d \ - --hash=sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088 \ - --hash=sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767 \ - --hash=sha256:3779ed96105cd70ee5e85ca4f457adbce3d9ff33ec3d0ebcdf6c5727f26b21b3 \ - --hash=sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c \ - --hash=sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9 \ - --hash=sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338 \ - --hash=sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e \ - --hash=sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa \ - --hash=sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641 \ - --hash=sha256:4710f77598c0092239bc12c1fcc278a444e16c7032d91babf5abbf7166463f7b \ - --hash=sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63 \ - --hash=sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7 \ - --hash=sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288 \ - --hash=sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da \ - --hash=sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b \ - --hash=sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656 \ - --hash=sha256:5f9c8d55d6802086edd188e3a7d85a77787e50d56ce3eb4757a3205fa4657922 \ - --hash=sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4 \ - --hash=sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2 \ - --hash=sha256:717a0680729b4ebd7569c1dcd718c46b09b360745fd8eb12317abc74b14d14d0 \ - --hash=sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8 \ - --hash=sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419 \ - --hash=sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425 \ - --hash=sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635 \ - --hash=sha256:79b29053ff3ad307880d94562cca80693c62062a098a5776ea8ef5ef4b28d140 \ - --hash=sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393 \ - --hash=sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb \ - --hash=sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88 \ - --hash=sha256:8bc784302b6b9f163b54c4e93d7a6f09563bd01ff2b841b29ed3ac126e5040bf \ - --hash=sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28 \ - --hash=sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248 \ - --hash=sha256:938bd3ca6259e7e48b38d84f753d548bd863e0c222ed6ee6ace3fd6752768a84 \ - --hash=sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660 \ - --hash=sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933 \ - --hash=sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c \ - --hash=sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22 \ - --hash=sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab \ - --hash=sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b \ - --hash=sha256:a3416f95961dd7d5393ecff99e3f41dc990fb72eda86c11f2a60308ac6dcd7a0 \ - --hash=sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5 \ - --hash=sha256:a564188ce831fd110ea76bcc97085dd6c625b427db3f1dbb14ca4baa1447dcbc \ - --hash=sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff \ - --hash=sha256:a7a1b4302f70bb3ec40ca86de82def532c97a80db49cac6a6700af0de41af5ee \ - --hash=sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7 \ - --hash=sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95 \ - --hash=sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61 \ - --hash=sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe \ - --hash=sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb \ - --hash=sha256:b5dd3a2ef7c7e968dbbac8f5574ebeac4d2b813b247e8cec28174a2ba3627170 \ - --hash=sha256:b8cc6b05e94d837bcd71c6531e2344e1ff0fb87abe4ad78a9261d67ef5d83eae \ - --hash=sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3 \ - --hash=sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb \ - --hash=sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869 \ - --hash=sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd \ - --hash=sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db \ - --hash=sha256:d1dcb015ac6a3b8facd3677597edd5ff39d11d937456702f0bb2b762e390a21b \ - --hash=sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab \ - --hash=sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8 \ - --hash=sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc \ - --hash=sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151 \ - --hash=sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663 \ - --hash=sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1 \ - --hash=sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c \ - --hash=sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026 \ - --hash=sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7 \ - --hash=sha256:f3e9f75ae842a6c22a195d4a127263dbf87cbab729829e0bd7857fb1672400b2 \ - --hash=sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597 \ - --hash=sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3 \ - --hash=sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758 \ - --hash=sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd +aiohttp==3.12.15 \ + --hash=sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe \ + --hash=sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645 \ + --hash=sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af \ + --hash=sha256:0a146708808c9b7a988a4af3821379e379e0f0e5e466ca31a73dbdd0325b0263 \ + --hash=sha256:0a23918fedc05806966a2438489dcffccbdf83e921a1170773b6178d04ade142 \ + --hash=sha256:0c643f4d75adea39e92c0f01b3fb83d57abdec8c9279b3078b68a3a52b3933b6 \ + --hash=sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6 \ + --hash=sha256:14954a2988feae3987f1eb49c706bff39947605f4b6fa4027c1d75743723eb09 \ + --hash=sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84 \ + --hash=sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1 \ + --hash=sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50 \ + --hash=sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a \ + --hash=sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79 \ + --hash=sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c \ + --hash=sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd \ + --hash=sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0 \ + --hash=sha256:3bdd6e17e16e1dbd3db74d7f989e8af29c4d2e025f9828e6ef45fbdee158ec75 \ + --hash=sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77 \ + --hash=sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c \ + --hash=sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab \ + --hash=sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4 \ + --hash=sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9 \ + --hash=sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421 \ + --hash=sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685 \ + --hash=sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b \ + --hash=sha256:46749be6e89cd78d6068cdf7da51dbcfa4321147ab8e4116ee6678d9a056a0cf \ + --hash=sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693 \ + --hash=sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c \ + --hash=sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2 \ + --hash=sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519 \ + --hash=sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d \ + --hash=sha256:536ad7234747a37e50e7b6794ea868833d5220b49c92806ae2d7e8a9d6b5de02 \ + --hash=sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea \ + --hash=sha256:57d16590a351dfc914670bd72530fd78344b885a00b250e992faea565b7fdc05 \ + --hash=sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b \ + --hash=sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0 \ + --hash=sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd \ + --hash=sha256:691d203c2bdf4f4637792efbbcdcd157ae11e55eaeb5e9c360c1206fb03d4d98 \ + --hash=sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb \ + --hash=sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8 \ + --hash=sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f \ + --hash=sha256:74bdd8c864b36c3673741023343565d95bfbd778ffe1eb4d412c135a28a8dc89 \ + --hash=sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16 \ + --hash=sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64 \ + --hash=sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb \ + --hash=sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7 \ + --hash=sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728 \ + --hash=sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7 \ + --hash=sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830 \ + --hash=sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d \ + --hash=sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8 \ + --hash=sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d \ + --hash=sha256:8e995e1abc4ed2a454c731385bf4082be06f875822adc4c6d9eaadf96e20d406 \ + --hash=sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2 \ + --hash=sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9 \ + --hash=sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315 \ + --hash=sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d \ + --hash=sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd \ + --hash=sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d \ + --hash=sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51 \ + --hash=sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3 \ + --hash=sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34 \ + --hash=sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461 \ + --hash=sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b \ + --hash=sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc \ + --hash=sha256:b7011a70b56facde58d6d26da4fec3280cc8e2a78c714c96b7a01a87930a9530 \ + --hash=sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5 \ + --hash=sha256:b784d6ed757f27574dca1c336f968f4e81130b27595e458e69457e6878251f5d \ + --hash=sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7 \ + --hash=sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5 \ + --hash=sha256:bc9a0f6569ff990e0bbd75506c8d8fe7214c8f6579cca32f0546e54372a3bb54 \ + --hash=sha256:bd44d5936ab3193c617bfd6c9a7d8d1085a8dc8c3f44d5f1dcf554d17d04cf7d \ + --hash=sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7 \ + --hash=sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117 \ + --hash=sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4 \ + --hash=sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1 \ + --hash=sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676 \ + --hash=sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b \ + --hash=sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d \ + --hash=sha256:f0adb4177fa748072546fb650d9bd7398caaf0e15b370ed3317280b13f4083b0 \ + --hash=sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d \ + --hash=sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444 \ + --hash=sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0 \ + --hash=sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065 \ + --hash=sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545 \ + --hash=sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d # via -r requirements.in aiosignal==1.4.0 \ --hash=sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e \ @@ -108,107 +108,98 @@ attrs==25.3.0 \ --hash=sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3 \ --hash=sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b # via aiohttp +backports-asyncio-runner==1.2.0 \ + --hash=sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5 \ + --hash=sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162 + # via pytest-asyncio cachetools==5.5.2 \ --hash=sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4 \ --hash=sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a # via google-auth -certifi==2025.4.26 \ - --hash=sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6 \ - --hash=sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3 +certifi==2025.8.3 \ + --hash=sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407 \ + --hash=sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5 # via requests -charset-normalizer==3.4.2 \ - --hash=sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4 \ - --hash=sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45 \ - --hash=sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7 \ - --hash=sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0 \ - --hash=sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7 \ - --hash=sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d \ - --hash=sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d \ - --hash=sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0 \ - --hash=sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184 \ - --hash=sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db \ - --hash=sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b \ - --hash=sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64 \ - --hash=sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b \ - --hash=sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8 \ - --hash=sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff \ - --hash=sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344 \ - --hash=sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58 \ - --hash=sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e \ - --hash=sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471 \ - --hash=sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148 \ - --hash=sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a \ - --hash=sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836 \ - --hash=sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e \ - --hash=sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63 \ - --hash=sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c \ - --hash=sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1 \ - --hash=sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01 \ - --hash=sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366 \ - --hash=sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58 \ - --hash=sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5 \ - --hash=sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c \ - --hash=sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2 \ - --hash=sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a \ - --hash=sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597 \ - --hash=sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b \ - --hash=sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5 \ - --hash=sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb \ - --hash=sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f \ - --hash=sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0 \ - --hash=sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941 \ - --hash=sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0 \ - --hash=sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86 \ - --hash=sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7 \ - --hash=sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7 \ - --hash=sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455 \ - --hash=sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6 \ - --hash=sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4 \ - --hash=sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0 \ - --hash=sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3 \ - --hash=sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1 \ - --hash=sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6 \ - --hash=sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981 \ - --hash=sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c \ - --hash=sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980 \ - --hash=sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645 \ - --hash=sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7 \ - --hash=sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12 \ - --hash=sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa \ - --hash=sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd \ - --hash=sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef \ - --hash=sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f \ - --hash=sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2 \ - --hash=sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d \ - --hash=sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5 \ - --hash=sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02 \ - --hash=sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3 \ - --hash=sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd \ - --hash=sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e \ - --hash=sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214 \ - --hash=sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd \ - --hash=sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a \ - --hash=sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c \ - --hash=sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681 \ - --hash=sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba \ - --hash=sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f \ - --hash=sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a \ - --hash=sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28 \ - --hash=sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691 \ - --hash=sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82 \ - --hash=sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a \ - --hash=sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027 \ - --hash=sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7 \ - --hash=sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518 \ - --hash=sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf \ - --hash=sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b \ - --hash=sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9 \ - --hash=sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544 \ - --hash=sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da \ - --hash=sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509 \ - --hash=sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f \ - --hash=sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a \ - --hash=sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f +charset-normalizer==3.4.3 \ + --hash=sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91 \ + --hash=sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0 \ + --hash=sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154 \ + --hash=sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601 \ + --hash=sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884 \ + --hash=sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07 \ + --hash=sha256:0f2be7e0cf7754b9a30eb01f4295cc3d4358a479843b31f328afd210e2c7598c \ + --hash=sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64 \ + --hash=sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe \ + --hash=sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f \ + --hash=sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432 \ + --hash=sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc \ + --hash=sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa \ + --hash=sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9 \ + --hash=sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae \ + --hash=sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19 \ + --hash=sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d \ + --hash=sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e \ + --hash=sha256:252098c8c7a873e17dd696ed98bbe91dbacd571da4b87df3736768efa7a792e4 \ + --hash=sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7 \ + --hash=sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312 \ + --hash=sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92 \ + --hash=sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31 \ + --hash=sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c \ + --hash=sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f \ + --hash=sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99 \ + --hash=sha256:3653fad4fe3ed447a596ae8638b437f827234f01a8cd801842e43f3d0a6b281b \ + --hash=sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15 \ + --hash=sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392 \ + --hash=sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f \ + --hash=sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8 \ + --hash=sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491 \ + --hash=sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0 \ + --hash=sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc \ + --hash=sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0 \ + --hash=sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f \ + --hash=sha256:5b413b0b1bfd94dbf4023ad6945889f374cd24e3f62de58d6bb102c4d9ae534a \ + --hash=sha256:5d8d01eac18c423815ed4f4a2ec3b439d654e55ee4ad610e153cf02faf67ea40 \ + --hash=sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927 \ + --hash=sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849 \ + --hash=sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce \ + --hash=sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14 \ + --hash=sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05 \ + --hash=sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c \ + --hash=sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c \ + --hash=sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a \ + --hash=sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc \ + --hash=sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34 \ + --hash=sha256:8999f965f922ae054125286faf9f11bc6932184b93011d138925a1773830bbe9 \ + --hash=sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096 \ + --hash=sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14 \ + --hash=sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30 \ + --hash=sha256:a2d08ac246bb48479170408d6c19f6385fa743e7157d716e144cad849b2dd94b \ + --hash=sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b \ + --hash=sha256:b5e3b2d152e74e100a9e9573837aba24aab611d39428ded46f4e4022ea7d1942 \ + --hash=sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db \ + --hash=sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5 \ + --hash=sha256:c60e092517a73c632ec38e290eba714e9627abe9d301c8c8a12ec32c314a2a4b \ + --hash=sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce \ + --hash=sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669 \ + --hash=sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0 \ + --hash=sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018 \ + --hash=sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93 \ + --hash=sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe \ + --hash=sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049 \ + --hash=sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a \ + --hash=sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef \ + --hash=sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2 \ + --hash=sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca \ + --hash=sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16 \ + --hash=sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f \ + --hash=sha256:d95bfb53c211b57198bb91c46dd5a2d8018b3af446583aab40074bf7988401cb \ + --hash=sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1 \ + --hash=sha256:ec557499516fc90fd374bf2e32349a2887a876fbf162c160e3c01b6849eaf557 \ + --hash=sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37 \ + --hash=sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7 \ + --hash=sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72 \ + --hash=sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c \ + --hash=sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9 # via requests click==8.1.8 \ --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ @@ -218,121 +209,121 @@ exceptiongroup==1.3.0 \ --hash=sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10 \ --hash=sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88 # via pytest -frozenlist==1.6.0 \ - --hash=sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117 \ - --hash=sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2 \ - --hash=sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42 \ - --hash=sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812 \ - --hash=sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3 \ - --hash=sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a \ - --hash=sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572 \ - --hash=sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa \ - --hash=sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b \ - --hash=sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626 \ - --hash=sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e \ - --hash=sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d \ - --hash=sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c \ - --hash=sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530 \ - --hash=sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878 \ - --hash=sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e \ - --hash=sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869 \ - --hash=sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd \ - --hash=sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603 \ - --hash=sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606 \ - --hash=sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85 \ - --hash=sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64 \ - --hash=sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f \ - --hash=sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0 \ - --hash=sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c \ - --hash=sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4 \ - --hash=sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103 \ - --hash=sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02 \ - --hash=sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191 \ - --hash=sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0 \ - --hash=sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e \ - --hash=sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791 \ - --hash=sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983 \ - --hash=sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f \ - --hash=sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff \ - --hash=sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8 \ - --hash=sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860 \ - --hash=sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8 \ - --hash=sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25 \ - --hash=sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f \ - --hash=sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba \ - --hash=sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24 \ - --hash=sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e \ - --hash=sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd \ - --hash=sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911 \ - --hash=sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c \ - --hash=sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595 \ - --hash=sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c \ - --hash=sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc \ - --hash=sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2 \ - --hash=sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75 \ - --hash=sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4 \ - --hash=sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0 \ - --hash=sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe \ - --hash=sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249 \ - --hash=sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c \ - --hash=sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576 \ - --hash=sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b \ - --hash=sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770 \ - --hash=sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046 \ - --hash=sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584 \ - --hash=sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497 \ - --hash=sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f \ - --hash=sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f \ - --hash=sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b \ - --hash=sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f \ - --hash=sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d \ - --hash=sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a \ - --hash=sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e \ - --hash=sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68 \ - --hash=sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189 \ - --hash=sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9 \ - --hash=sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8 \ - --hash=sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1 \ - --hash=sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0 \ - --hash=sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3 \ - --hash=sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29 \ - --hash=sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0 \ - --hash=sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215 \ - --hash=sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590 \ - --hash=sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c \ - --hash=sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821 \ - --hash=sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1 \ - --hash=sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769 \ - --hash=sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506 \ - --hash=sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3 \ - --hash=sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348 \ - --hash=sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad \ - --hash=sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a \ - --hash=sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad \ - --hash=sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6 \ - --hash=sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45 \ - --hash=sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188 \ - --hash=sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e \ - --hash=sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70 \ - --hash=sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70 \ - --hash=sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1 \ - --hash=sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106 \ - --hash=sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd \ - --hash=sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc \ - --hash=sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352 \ - --hash=sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91 \ - --hash=sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1 \ - --hash=sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f +frozenlist==1.7.0 \ + --hash=sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f \ + --hash=sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b \ + --hash=sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949 \ + --hash=sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615 \ + --hash=sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6 \ + --hash=sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718 \ + --hash=sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df \ + --hash=sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf \ + --hash=sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677 \ + --hash=sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5 \ + --hash=sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50 \ + --hash=sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb \ + --hash=sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56 \ + --hash=sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa \ + --hash=sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7 \ + --hash=sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43 \ + --hash=sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f \ + --hash=sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938 \ + --hash=sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c \ + --hash=sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd \ + --hash=sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c \ + --hash=sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e \ + --hash=sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d \ + --hash=sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81 \ + --hash=sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e \ + --hash=sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657 \ + --hash=sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478 \ + --hash=sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2 \ + --hash=sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca \ + --hash=sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e \ + --hash=sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e \ + --hash=sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3 \ + --hash=sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63 \ + --hash=sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898 \ + --hash=sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd \ + --hash=sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca \ + --hash=sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2 \ + --hash=sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104 \ + --hash=sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba \ + --hash=sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a \ + --hash=sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1 \ + --hash=sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae \ + --hash=sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577 \ + --hash=sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60 \ + --hash=sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee \ + --hash=sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464 \ + --hash=sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61 \ + --hash=sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86 \ + --hash=sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01 \ + --hash=sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87 \ + --hash=sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb \ + --hash=sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f \ + --hash=sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71 \ + --hash=sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8 \ + --hash=sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d \ + --hash=sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2 \ + --hash=sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00 \ + --hash=sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b \ + --hash=sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b \ + --hash=sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146 \ + --hash=sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59 \ + --hash=sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878 \ + --hash=sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08 \ + --hash=sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890 \ + --hash=sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e \ + --hash=sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750 \ + --hash=sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb \ + --hash=sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d \ + --hash=sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30 \ + --hash=sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3 \ + --hash=sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d \ + --hash=sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a \ + --hash=sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8 \ + --hash=sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c \ + --hash=sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1 \ + --hash=sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9 \ + --hash=sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e \ + --hash=sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e \ + --hash=sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384 \ + --hash=sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98 \ + --hash=sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb \ + --hash=sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4 \ + --hash=sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65 \ + --hash=sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08 \ + --hash=sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb \ + --hash=sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43 \ + --hash=sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a \ + --hash=sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7 \ + --hash=sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630 \ + --hash=sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d \ + --hash=sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31 \ + --hash=sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d \ + --hash=sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44 \ + --hash=sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319 \ + --hash=sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e \ + --hash=sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025 \ + --hash=sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35 \ + --hash=sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee \ + --hash=sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1 \ + --hash=sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd \ + --hash=sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74 \ + --hash=sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b \ + --hash=sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981 \ + --hash=sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5 # via # aiohttp # aiosignal -google-api-core==2.24.2 \ - --hash=sha256:810a63ac95f3c441b7c0e43d344e372887f62ce9071ba972eacf32672e072de9 \ - --hash=sha256:81718493daf06d96d6bc76a91c23874dbf2fac0adbbf542831b805ee6e974696 +google-api-core==2.25.1 \ + --hash=sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7 \ + --hash=sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8 # via -r requirements.in -google-auth==2.40.1 \ - --hash=sha256:58f0e8416a9814c1d86c9b7f6acf6816b51aba167b2c76821965271bac275540 \ - --hash=sha256:ed4cae4f5c46b41bae1d19c036e06f6c371926e97b19e816fc854eff811974ee +google-auth==2.40.3 \ + --hash=sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca \ + --hash=sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77 # via google-api-core googleapis-common-protos[grpc]==1.70.0 \ --hash=sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257 \ @@ -345,58 +336,58 @@ grpc-google-iam-v1==0.14.2 \ --hash=sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351 \ --hash=sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20 # via -r requirements.in -grpcio==1.71.0 \ - --hash=sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea \ - --hash=sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7 \ - --hash=sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537 \ - --hash=sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b \ - --hash=sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41 \ - --hash=sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366 \ - --hash=sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b \ - --hash=sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c \ - --hash=sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033 \ - --hash=sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3 \ - --hash=sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79 \ - --hash=sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29 \ - --hash=sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7 \ - --hash=sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e \ - --hash=sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67 \ - --hash=sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a \ - --hash=sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8 \ - --hash=sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d \ - --hash=sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb \ - --hash=sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3 \ - --hash=sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4 \ - --hash=sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a \ - --hash=sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3 \ - --hash=sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3 \ - --hash=sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509 \ - --hash=sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97 \ - --hash=sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6 \ - --hash=sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b \ - --hash=sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e \ - --hash=sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637 \ - --hash=sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a \ - --hash=sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d \ - --hash=sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7 \ - --hash=sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd \ - --hash=sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69 \ - --hash=sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d \ - --hash=sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379 \ - --hash=sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7 \ - --hash=sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32 \ - --hash=sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c \ - --hash=sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef \ - --hash=sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444 \ - --hash=sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec \ - --hash=sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594 \ - --hash=sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804 \ - --hash=sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7 \ - --hash=sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73 \ - --hash=sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5 \ - --hash=sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db \ - --hash=sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db \ - --hash=sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455 +grpcio==1.74.0 \ + --hash=sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f \ + --hash=sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc \ + --hash=sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7 \ + --hash=sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7 \ + --hash=sha256:1a2b06afe2e50ebfd46247ac3ba60cac523f54ec7792ae9ba6073c12daf26f0a \ + --hash=sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4 \ + --hash=sha256:22b834cef33429ca6cc28303c9c327ba9a3fafecbf62fae17e9a7b7163cc43ac \ + --hash=sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6 \ + --hash=sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89 \ + --hash=sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3 \ + --hash=sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49 \ + --hash=sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20 \ + --hash=sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f \ + --hash=sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc \ + --hash=sha256:4bc5fca10aaf74779081e16c2bcc3d5ec643ffd528d9e7b1c9039000ead73bae \ + --hash=sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82 \ + --hash=sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b \ + --hash=sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91 \ + --hash=sha256:5f251c355167b2360537cf17bea2cf0197995e551ab9da6a0a59b3da5e8704f9 \ + --hash=sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5 \ + --hash=sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362 \ + --hash=sha256:655726919b75ab3c34cdad39da5c530ac6fa32696fb23119e36b64adcfca174a \ + --hash=sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d \ + --hash=sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb \ + --hash=sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31 \ + --hash=sha256:6bab67d15ad617aff094c382c882e0177637da73cbc5532d52c07b4ee887a87b \ + --hash=sha256:7d95d71ff35291bab3f1c52f52f474c632db26ea12700c2ff0ea0532cb0b5854 \ + --hash=sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1 \ + --hash=sha256:834988b6c34515545b3edd13e902c1acdd9f2465d386ea5143fb558f153a7176 \ + --hash=sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8 \ + --hash=sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907 \ + --hash=sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11 \ + --hash=sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c \ + --hash=sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4 \ + --hash=sha256:8f7b5882fb50632ab1e48cb3122d6df55b9afabc265582808036b6e51b9fd6b7 \ + --hash=sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707 \ + --hash=sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5 \ + --hash=sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce \ + --hash=sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa \ + --hash=sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01 \ + --hash=sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9 \ + --hash=sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182 \ + --hash=sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b \ + --hash=sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486 \ + --hash=sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249 \ + --hash=sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3 \ + --hash=sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11 \ + --hash=sha256:ecde9ab49f58433abe02f9ed076c7b5be839cf0153883a6d23995937a82392fa \ + --hash=sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e \ + --hash=sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24 \ + --hash=sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e # via # googleapis-common-protos # grpc-google-iam-v1 @@ -418,43 +409,68 @@ jinja2==3.1.6 \ --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 # via -r requirements.in -libcst==1.7.0 \ - --hash=sha256:0456381c939169c4f11caecdb30f7aca6f234640731f8f965849c1631930536b \ - --hash=sha256:09a5530b40a15dbe6fac842ef2ad87ad561760779380ccf3ade6850854d81406 \ - --hash=sha256:14e5c1d427c33d50df75be6bc999a7b2d7c6b7840e2361a18a6f354db50cb18e \ - --hash=sha256:1560598f5c56681adbd32f4b08e9cffcd45a021921d1d784370a7d4d9a2fac11 \ - --hash=sha256:340054c57abcd42953248af18ed278be651a03b1c2a1616f7e1f1ef90b6018ce \ - --hash=sha256:3923a341a787c1f454909e726a6213dd59c3db26c6e56d0a1fc4f2f7e96b45d7 \ - --hash=sha256:3d2ec10015e86a4402c3d2084ede6c7c9268faea1ecb99592fe9e291c515aaa2 \ - --hash=sha256:4c568e14d29489f09faf4915af18235f805d5aa60fa194023b4fadf3209f0c94 \ - --hash=sha256:57a6bcfc8ca8a0bb9e89a2dbf63ee8f0c7e8353a130528dcb47c9e59c2dc8c94 \ - --hash=sha256:5d5ba9314569865effd5baff3a58ceb2cced52228e181824759c68486a7ec8f4 \ - --hash=sha256:5e22738ec2855803f8242e6bf78057389d10f8954db34bf7079c82abab1b8b95 \ - --hash=sha256:5e50e6960ecc3ed67f39fec63aa329e772d5d27f8e2334e30f19a94aa14489f1 \ - --hash=sha256:6137fe549bfbb017283c3cf85419eb0dfaa20a211ad6d525538a2494e248a84b \ - --hash=sha256:61bfc90c8a4594296f8b68702f494dfdfec6e745a4abc0cfa8069d7f22061424 \ - --hash=sha256:6523731bfbdbc045ff8649130fe14a46b31ad6925f67acdc0e0d80a0c61719fd \ - --hash=sha256:71a8f59f3472fe8c0f6e2fad457825ea2ccad8c4c713cca55a91ff2cbfa9bc03 \ - --hash=sha256:81036e820249937608db7e72d0799180122d40d76d0c0414c454f8aa2ffa9c51 \ - --hash=sha256:8f6e693281d6e9a62414205fb300ec228ddc902ca9cb965a09f11561dc10aa94 \ - --hash=sha256:932a4c4508bd4cf5248c99b7218bb86af97d87fefa2bdab7ea8a0c28c270724a \ - --hash=sha256:93417d36c2a1b70d651d0e970ff73339e8dcd64d341672b68823fa0039665022 \ - --hash=sha256:9370c23a3f609280c3f2296d61d34dd32afd7a1c9b19e4e29cc35cb2e2544363 \ - --hash=sha256:94acd51ea1206460c20dea764c59222e62c45ae8a486f22024f063d11a7bca88 \ - --hash=sha256:9add619a825d6f176774110d79dc3137f353a236c1e3bcd6e063ca6d93d6e0ae \ - --hash=sha256:9cd5ab15b12a37f0e9994d8847d5670da936a93d98672c442a956fab34ea0c15 \ - --hash=sha256:a252fa03ea00986f03100379f11e15d381103a09667900fb0fa2076cec19081a \ - --hash=sha256:a63f44ffa81292f183656234c7f2848653ff45c17d867db83c9335119e28aafa \ - --hash=sha256:b52692a28d0d958ebfabcf8bfce5fcf2c8582967310d35e6111a6e2d4db96659 \ - --hash=sha256:c3445dce908fd4971ce9bb5fef5742e26c984027676e3dcf24875fbed1ff7e4c \ - --hash=sha256:c8d6176a667d2db0132d133dad6bbf965f915f3071559342ca2cdbbec537ed12 \ - --hash=sha256:ca4e91aa854758040fa6fe7036fbe7f90a36a7d283fa1df8587b6f73084fc997 \ - --hash=sha256:cdae6e632d222d8db7cb98d7cecb45597c21b8e3841d0c98d4fca79c49dad04b \ - --hash=sha256:d12ffe199ff677a37abfb6b21aba1407eb02246dc7e6bcaf4f8e24a195ec4ad6 \ - --hash=sha256:d894c48f682b0061fdb2c983d5e64c30334db6ce0783560dbbb9df0163179c0c \ - --hash=sha256:e635eadb6043d5f967450af27125811c6ccc7eeb4d8c5fd4f1bece9d96418781 \ - --hash=sha256:e7d9a796c2f3d5b71dd06b7578e8d1fb1c031d2eb8d59e7b40e288752ae1b210 \ - --hash=sha256:fa519d4391326329f37860c2f2aaf80cb11a6122d14afa2f4f00dde6fcfa7ae4 +libcst==1.8.2 \ + --hash=sha256:08a8c7d9922ca6eed24e2c13a3c552b3c186af8fc78e5d4820b58487d780ec19 \ + --hash=sha256:08e9dca4ab6f8551794ce7ec146f86def6a82da41750cbed2c07551345fa10d3 \ + --hash=sha256:0be639f5b2e1999a4b4a82a0f4633969f97336f052d0c131627983589af52f56 \ + --hash=sha256:0f23409add2aaebbb6d8e881babab43c2d979f051b8bd8aed5fe779ea180a4e8 \ + --hash=sha256:11ea148902e3e1688afa392087c728ac3a843e54a87d334d1464d2097d3debb7 \ + --hash=sha256:1ba85f9e6a7f37ef998168aa3fd28d263d7f83016bd306a4508a2394e5e793b4 \ + --hash=sha256:207575dec2dae722acf6ab39b4b361151c65f8f895fd37edf9d384f5541562e1 \ + --hash=sha256:22c9473a2cc53faabcc95a0ac6ca4e52d127017bf34ba9bc0f8e472e44f7b38e \ + --hash=sha256:2b5c57a3c1976c365678eb0730bcb140d40510990cb77df9a91bb5c41d587ba6 \ + --hash=sha256:2e264307ec49b2c72480422abafe80457f90b4e6e693b7ddf8a23d24b5c24001 \ + --hash=sha256:2e8c1dfa854e700fcf6cd79b2796aa37d55697a74646daf5ea47c7c764bac31c \ + --hash=sha256:36d5ab95f39f855521585b0e819dc2d4d1b2a4080bad04c2f3de1e387a5d2233 \ + --hash=sha256:3ece08ba778b6eeea74d9c705e9af2d1b4e915e9bc6de67ad173b962e575fcc0 \ + --hash=sha256:41613fe08e647213546c7c59a5a1fc5484666e7d4cab6e80260c612acbb20e8c \ + --hash=sha256:43ccaa6c54daa1749cec53710c70d47150965574d4c6d4c4f2e3f87b9bf9f591 \ + --hash=sha256:449f9ff8a5025dcd5c8d4ad28f6c291de5de89e4c044b0bda96b45bef8999b75 \ + --hash=sha256:460fcf3562f078781e1504983cb11909eb27a1d46eaa99e65c4b0fafdc298298 \ + --hash=sha256:4f14f5045766646ed9e8826b959c6d07194788babed1e0ba08c94ea4f39517e3 \ + --hash=sha256:51bbafdd847529e8a16d1965814ed17831af61452ee31943c414cb23451de926 \ + --hash=sha256:52a1067cf31d9e9e4be514b253bea6276f1531dd7de6ab0917df8ce5b468a820 \ + --hash=sha256:59e8f611c977206eba294c296c2d29a1c1b1b88206cb97cd0d4847c1a3d923e7 \ + --hash=sha256:5c513e64eff0f7bf2a908e2d987a98653eb33e1062ce2afd3a84af58159a24f9 \ + --hash=sha256:5efd1bf6ee5840d1b0b82ec8e0b9c64f182fa5a7c8aad680fbd918c4fa3826e0 \ + --hash=sha256:66e82cedba95a6176194a817be4232c720312f8be6d2c8f3847f3317d95a0c7f \ + --hash=sha256:6753e50904e05c27915933da41518ecd7a8ca4dd3602112ba44920c6e353a455 \ + --hash=sha256:67d9720d91f507c87b3e5f070627ad640a00bc6cfdf5635f8c6ee9f2964cf71c \ + --hash=sha256:688a03bac4dfb9afc5078ec01d53c21556381282bdf1a804dd0dbafb5056de2a \ + --hash=sha256:706d07106af91c343150be86caeae1ea3851b74aa0730fcbbf8cd089e817f818 \ + --hash=sha256:7fe762c4c390039b79b818cbc725d8663586b25351dc18a2704b0e357d69b924 \ + --hash=sha256:8310521f2ccb79b5c4345750d475b88afa37bad930ab5554735f85ad5e3add30 \ + --hash=sha256:8a81d816c2088d2055112af5ecd82fdfbe8ff277600e94255e2639b07de10234 \ + --hash=sha256:94b7c032b72566077614a02baab1929739fd0af0cc1d46deaba4408b870faef2 \ + --hash=sha256:96e2363e1f6e44bd7256bbbf3a53140743f821b5133046e6185491e0d9183447 \ + --hash=sha256:9c2bd4ac288a9cdb7ffc3229a9ce8027a66a3fd3f2ab9e13da60f5fbfe91f3b2 \ + --hash=sha256:a50618f4819a97ef897e055ac7aaf1cad5df84c206f33be35b0759d671574197 \ + --hash=sha256:a553d452004e44b841788f6faa7231a02157527ddecc89dbbe5b689b74822226 \ + --hash=sha256:a5d5519962ce7c72d81888fb0c09e58e308ba4c376e76bcd853b48151063d6a8 \ + --hash=sha256:a70e40ce7600e1b32e293bb9157e9de3b69170e2318ccb219102f1abb826c94a \ + --hash=sha256:ae22376633cfa3db21c4eed2870d1c36b5419289975a41a45f34a085b2d9e6ea \ + --hash=sha256:b0110140738be1287e3724080a101e7cec6ae708008b7650c9d8a1c1788ec03a \ + --hash=sha256:b5269b96367e65793a7714608f6d906418eb056d59eaac9bba980486aabddbed \ + --hash=sha256:b62aa11d6b74ed5545e58ac613d3f63095e5fd0254b3e0d1168fda991b9a6b41 \ + --hash=sha256:b68ea4a6018abfea1f68d50f74de7d399172684c264eb09809023e2c8696fc23 \ + --hash=sha256:b88e9104c456590ad0ef0e82851d4fc03e9aa9d621fa8fdd4cd0907152a825ae \ + --hash=sha256:bba7c2b5063e8ada5a5477f9fa0c01710645426b5a8628ec50d558542a0a292e \ + --hash=sha256:be821d874ce8b26cbadd7277fa251a9b37f6d2326f8b5682b6fc8966b50a3a59 \ + --hash=sha256:c1381ddbd1066d543e05d580c15beacf671e1469a0b2adb6dba58fec311f4eed \ + --hash=sha256:c34060ff2991707c710250463ae9f415ebb21653f2f5b013c61c9c376ff9b715 \ + --hash=sha256:d11992561de0ad29ec2800230fbdcbef9efaa02805d5c633a73ab3cf2ba51bf1 \ + --hash=sha256:d20e932ddd9a389da57b060c26e84a24118c96ff6fc5dcc7b784da24e823b694 \ + --hash=sha256:d2194ae959630aae4176a4b75bd320b3274c20bef2a5ca6b8d6fc96d3c608edf \ + --hash=sha256:d97c9fe13aacfbefded6861f5200dcb8e837da7391a9bdeb44ccb133705990af \ + --hash=sha256:da2d8b008aff72acd5a4a588491abdda1b446f17508e700f26df9be80d8442ae \ + --hash=sha256:dd4310ea8ddc49cc8872e083737cf806299b17f93159a1f354d59aa08993e876 \ + --hash=sha256:e5ba3ea570c8fb6fc44f71aa329edc7c668e2909311913123d0d7ab8c65fc357 \ + --hash=sha256:e9bb599c175dc34a4511f0e26d5b5374fbcc91ea338871701a519e95d52f3c28 \ + --hash=sha256:f5391d71bd7e9e6c73dcb3ee8d8c63b09efc14ce6e4dad31568d4838afc9aae0 \ + --hash=sha256:f54f5c4176d60e7cd6b0880e18fb3fa8501ae046069151721cab457c7c538a3d \ + --hash=sha256:f69582e24667715e3860d80d663f1caeb2398110077e23cc0a1e0066a851f5ab \ + --hash=sha256:f74b0bc7378ad5afcf25ac9d0367b4dbba50f6f6468faa41f5dfddcf8bf9c0f8 \ + --hash=sha256:fa3b807c2d2b34397c135d19ad6abb20c47a2ddb7bf65d90455f2040f7797e1e # via -r requirements.in markupsafe==3.0.2 \ --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ @@ -521,111 +537,117 @@ markupsafe==3.0.2 \ # via # -r requirements.in # jinja2 -multidict==6.4.3 \ - --hash=sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756 \ - --hash=sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8 \ - --hash=sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef \ - --hash=sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c \ - --hash=sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5 \ - --hash=sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8 \ - --hash=sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db \ - --hash=sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713 \ - --hash=sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44 \ - --hash=sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378 \ - --hash=sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5 \ - --hash=sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676 \ - --hash=sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08 \ - --hash=sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea \ - --hash=sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9 \ - --hash=sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9 \ - --hash=sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e \ - --hash=sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b \ - --hash=sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508 \ - --hash=sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1 \ - --hash=sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852 \ - --hash=sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac \ - --hash=sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde \ - --hash=sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8 \ - --hash=sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504 \ - --hash=sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5 \ - --hash=sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02 \ - --hash=sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4 \ - --hash=sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec \ - --hash=sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a \ - --hash=sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666 \ - --hash=sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc \ - --hash=sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf \ - --hash=sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790 \ - --hash=sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8 \ - --hash=sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589 \ - --hash=sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d \ - --hash=sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07 \ - --hash=sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56 \ - --hash=sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21 \ - --hash=sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7 \ - --hash=sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9 \ - --hash=sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343 \ - --hash=sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9 \ - --hash=sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4 \ - --hash=sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a \ - --hash=sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427 \ - --hash=sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459 \ - --hash=sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6 \ - --hash=sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208 \ - --hash=sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229 \ - --hash=sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0 \ - --hash=sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474 \ - --hash=sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817 \ - --hash=sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd \ - --hash=sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618 \ - --hash=sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5 \ - --hash=sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3 \ - --hash=sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124 \ - --hash=sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1 \ - --hash=sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb \ - --hash=sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7 \ - --hash=sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3 \ - --hash=sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375 \ - --hash=sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39 \ - --hash=sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752 \ - --hash=sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0 \ - --hash=sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188 \ - --hash=sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451 \ - --hash=sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078 \ - --hash=sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7 \ - --hash=sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7 \ - --hash=sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f \ - --hash=sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b \ - --hash=sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f \ - --hash=sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c \ - --hash=sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291 \ - --hash=sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897 \ - --hash=sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887 \ - --hash=sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1 \ - --hash=sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685 \ - --hash=sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf \ - --hash=sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6 \ - --hash=sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731 \ - --hash=sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507 \ - --hash=sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b \ - --hash=sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae \ - --hash=sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777 \ - --hash=sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7 \ - --hash=sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be \ - --hash=sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df \ - --hash=sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054 \ - --hash=sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2 \ - --hash=sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124 \ - --hash=sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c \ - --hash=sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840 \ - --hash=sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8 \ - --hash=sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd \ - --hash=sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8 \ - --hash=sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3 \ - --hash=sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e \ - --hash=sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2 \ - --hash=sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1 \ - --hash=sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad +multidict==6.6.4 \ + --hash=sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9 \ + --hash=sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729 \ + --hash=sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5 \ + --hash=sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e \ + --hash=sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138 \ + --hash=sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495 \ + --hash=sha256:0b2e886624be5773e69cf32bcb8534aecdeb38943520b240fed3d5596a430f2f \ + --hash=sha256:0c5cbac6b55ad69cb6aa17ee9343dfbba903118fd530348c330211dc7aa756d1 \ + --hash=sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e \ + --hash=sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6 \ + --hash=sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8 \ + --hash=sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded \ + --hash=sha256:10a68a9191f284fe9d501fef4efe93226e74df92ce7a24e301371293bd4918ae \ + --hash=sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69 \ + --hash=sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364 \ + --hash=sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f \ + --hash=sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f \ + --hash=sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e \ + --hash=sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3 \ + --hash=sha256:21f216669109e02ef3e2415ede07f4f8987f00de8cdfa0cc0b3440d42534f9f0 \ + --hash=sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657 \ + --hash=sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c \ + --hash=sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb \ + --hash=sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7 \ + --hash=sha256:350f6b0fe1ced61e778037fdc7613f4051c8baf64b1ee19371b42a3acdb016a0 \ + --hash=sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d \ + --hash=sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b \ + --hash=sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141 \ + --hash=sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf \ + --hash=sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f \ + --hash=sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf \ + --hash=sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f \ + --hash=sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24 \ + --hash=sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a \ + --hash=sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa \ + --hash=sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f \ + --hash=sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b \ + --hash=sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0 \ + --hash=sha256:4d09384e75788861e046330308e7af54dd306aaf20eb760eb1d0de26b2bea2cb \ + --hash=sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d \ + --hash=sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879 \ + --hash=sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c \ + --hash=sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a \ + --hash=sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d \ + --hash=sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812 \ + --hash=sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da \ + --hash=sha256:630f70c32b8066ddfd920350bc236225814ad94dfa493fe1910ee17fe4365cbb \ + --hash=sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e \ + --hash=sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287 \ + --hash=sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb \ + --hash=sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb \ + --hash=sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4 \ + --hash=sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad \ + --hash=sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f \ + --hash=sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395 \ + --hash=sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5 \ + --hash=sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0 \ + --hash=sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793 \ + --hash=sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e \ + --hash=sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db \ + --hash=sha256:8e42332cf8276bb7645d310cdecca93a16920256a5b01bebf747365f86a1675b \ + --hash=sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c \ + --hash=sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45 \ + --hash=sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987 \ + --hash=sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796 \ + --hash=sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92 \ + --hash=sha256:a59c63061f1a07b861c004e53869eb1211ffd1a4acbca330e3322efa6dd02978 \ + --hash=sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802 \ + --hash=sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438 \ + --hash=sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6 \ + --hash=sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a \ + --hash=sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace \ + --hash=sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f \ + --hash=sha256:af7618b591bae552b40dbb6f93f5518328a949dac626ee75927bba1ecdeea9f4 \ + --hash=sha256:b6819f83aef06f560cb15482d619d0e623ce9bf155115150a85ab11b8342a665 \ + --hash=sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f \ + --hash=sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402 \ + --hash=sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9 \ + --hash=sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb \ + --hash=sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7 \ + --hash=sha256:be5bf4b3224948032a845d12ab0f69f208293742df96dc14c4ff9b09e508fc17 \ + --hash=sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb \ + --hash=sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c \ + --hash=sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877 \ + --hash=sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683 \ + --hash=sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e \ + --hash=sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0 \ + --hash=sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3 \ + --hash=sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8 \ + --hash=sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd \ + --hash=sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e \ + --hash=sha256:d9890d68c45d1aeac5178ded1d1cccf3bc8d7accf1f976f79bf63099fb16e4bd \ + --hash=sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0 \ + --hash=sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7 \ + --hash=sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7 \ + --hash=sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52 \ + --hash=sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0 \ + --hash=sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50 \ + --hash=sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb \ + --hash=sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2 \ + --hash=sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6 \ + --hash=sha256:edfdcae97cdc5d1a89477c436b61f472c4d40971774ac4729c613b4b133163cb \ + --hash=sha256:ee25f82f53262f9ac93bd7e58e47ea1bdcc3393cef815847e397cba17e284210 \ + --hash=sha256:f3be27440f7644ab9a13a6fc86f09cdd90b347c3c5e30c6d6d860de822d7cb53 \ + --hash=sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e \ + --hash=sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605 \ + --hash=sha256:f8d4916a81697faec6cb724a273bd5457e4c6c43d82b29f9dc02c5542fd21fc9 \ + --hash=sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e \ + --hash=sha256:f9867e55590e0855bcec60d4f9a092b69476db64573c9fe17e92b0c50614c16a \ + --hash=sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773 # via # aiohttp # yarl @@ -637,105 +659,105 @@ pluggy==1.6.0 \ --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \ --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 # via pytest -propcache==0.3.1 \ - --hash=sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e \ - --hash=sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b \ - --hash=sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf \ - --hash=sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b \ - --hash=sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5 \ - --hash=sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c \ - --hash=sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c \ - --hash=sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a \ - --hash=sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf \ - --hash=sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8 \ - --hash=sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5 \ - --hash=sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42 \ - --hash=sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035 \ - --hash=sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0 \ - --hash=sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e \ - --hash=sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46 \ - --hash=sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d \ - --hash=sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24 \ - --hash=sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d \ - --hash=sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de \ - --hash=sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf \ - --hash=sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7 \ - --hash=sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371 \ - --hash=sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833 \ - --hash=sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259 \ - --hash=sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136 \ - --hash=sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25 \ - --hash=sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005 \ - --hash=sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef \ - --hash=sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7 \ - --hash=sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f \ - --hash=sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53 \ - --hash=sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0 \ - --hash=sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb \ - --hash=sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566 \ - --hash=sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a \ - --hash=sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908 \ - --hash=sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf \ - --hash=sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458 \ - --hash=sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64 \ - --hash=sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9 \ - --hash=sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71 \ - --hash=sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b \ - --hash=sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5 \ - --hash=sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037 \ - --hash=sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5 \ - --hash=sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894 \ - --hash=sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe \ - --hash=sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757 \ - --hash=sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3 \ - --hash=sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976 \ - --hash=sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6 \ - --hash=sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641 \ - --hash=sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7 \ - --hash=sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649 \ - --hash=sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120 \ - --hash=sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd \ - --hash=sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40 \ - --hash=sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e \ - --hash=sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229 \ - --hash=sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c \ - --hash=sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7 \ - --hash=sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111 \ - --hash=sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654 \ - --hash=sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f \ - --hash=sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294 \ - --hash=sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da \ - --hash=sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f \ - --hash=sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7 \ - --hash=sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0 \ - --hash=sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073 \ - --hash=sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7 \ - --hash=sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11 \ - --hash=sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f \ - --hash=sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27 \ - --hash=sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70 \ - --hash=sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7 \ - --hash=sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519 \ - --hash=sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5 \ - --hash=sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180 \ - --hash=sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f \ - --hash=sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee \ - --hash=sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18 \ - --hash=sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815 \ - --hash=sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e \ - --hash=sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a \ - --hash=sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7 \ - --hash=sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6 \ - --hash=sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c \ - --hash=sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc \ - --hash=sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8 \ - --hash=sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98 \ - --hash=sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256 \ - --hash=sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5 \ - --hash=sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744 \ - --hash=sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723 \ - --hash=sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277 \ - --hash=sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5 +propcache==0.3.2 \ + --hash=sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c \ + --hash=sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81 \ + --hash=sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f \ + --hash=sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6 \ + --hash=sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535 \ + --hash=sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be \ + --hash=sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba \ + --hash=sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3 \ + --hash=sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0 \ + --hash=sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4 \ + --hash=sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168 \ + --hash=sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b \ + --hash=sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea \ + --hash=sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770 \ + --hash=sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2 \ + --hash=sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892 \ + --hash=sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154 \ + --hash=sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf \ + --hash=sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb \ + --hash=sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1 \ + --hash=sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef \ + --hash=sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe \ + --hash=sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897 \ + --hash=sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3 \ + --hash=sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70 \ + --hash=sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330 \ + --hash=sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44 \ + --hash=sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0 \ + --hash=sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88 \ + --hash=sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1 \ + --hash=sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3 \ + --hash=sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43 \ + --hash=sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4 \ + --hash=sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1 \ + --hash=sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220 \ + --hash=sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7 \ + --hash=sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9 \ + --hash=sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50 \ + --hash=sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e \ + --hash=sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2 \ + --hash=sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66 \ + --hash=sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1 \ + --hash=sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb \ + --hash=sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe \ + --hash=sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c \ + --hash=sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7 \ + --hash=sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9 \ + --hash=sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e \ + --hash=sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701 \ + --hash=sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9 \ + --hash=sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8 \ + --hash=sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b \ + --hash=sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f \ + --hash=sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e \ + --hash=sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02 \ + --hash=sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e \ + --hash=sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1 \ + --hash=sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10 \ + --hash=sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387 \ + --hash=sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198 \ + --hash=sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f \ + --hash=sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b \ + --hash=sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e \ + --hash=sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614 \ + --hash=sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252 \ + --hash=sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9 \ + --hash=sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5 \ + --hash=sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c \ + --hash=sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1 \ + --hash=sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770 \ + --hash=sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339 \ + --hash=sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251 \ + --hash=sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db \ + --hash=sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf \ + --hash=sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95 \ + --hash=sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df \ + --hash=sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2 \ + --hash=sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945 \ + --hash=sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474 \ + --hash=sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b \ + --hash=sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615 \ + --hash=sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06 \ + --hash=sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33 \ + --hash=sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec \ + --hash=sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886 \ + --hash=sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb \ + --hash=sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1 \ + --hash=sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05 \ + --hash=sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d \ + --hash=sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39 \ + --hash=sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67 \ + --hash=sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e \ + --hash=sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28 \ + --hash=sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a \ + --hash=sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394 \ + --hash=sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725 \ + --hash=sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c \ + --hash=sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206 # via # aiohttp # yarl @@ -745,16 +767,16 @@ proto-plus==1.26.1 \ # via # -r requirements.in # google-api-core -protobuf==6.31.1 \ - --hash=sha256:0414e3aa5a5f3ff423828e1e6a6e907d6c65c1d5b7e6e975793d5590bdeecc16 \ - --hash=sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447 \ - --hash=sha256:4ee898bf66f7a8b0bd21bce523814e6fbd8c6add948045ce958b73af7e8878c6 \ - --hash=sha256:6f1227473dc43d44ed644425268eb7c2e488ae245d51c6866d19fe158e207402 \ - --hash=sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e \ - --hash=sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9 \ - --hash=sha256:8764cf4587791e7564051b35524b72844f845ad0bb011704c3736cce762d8fe9 \ - --hash=sha256:a40fc12b84c154884d7d4c4ebd675d5b3b5283e155f324049ae396b95ddebc39 \ - --hash=sha256:d8cac4c982f0b957a4dc73a80e2ea24fab08e679c0de9deb835f4a12d69aca9a +protobuf==6.32.0 \ + --hash=sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3 \ + --hash=sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1 \ + --hash=sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c \ + --hash=sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb \ + --hash=sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741 \ + --hash=sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2 \ + --hash=sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e \ + --hash=sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783 \ + --hash=sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0 # via # -r requirements.in # google-api-core @@ -771,17 +793,21 @@ pyasn1-modules==0.4.2 \ --hash=sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a \ --hash=sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6 # via google-auth +pygments==2.19.2 \ + --hash=sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887 \ + --hash=sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b + # via pytest pypandoc==1.15 \ --hash=sha256:4ededcc76c8770f27aaca6dff47724578428eca84212a31479403a9731fc2b16 \ --hash=sha256:ea25beebe712ae41d63f7410c08741a3cab0e420f6703f95bc9b3a749192ce13 # via -r requirements.in -pytest==8.3.5 \ - --hash=sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820 \ - --hash=sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845 +pytest==8.4.1 \ + --hash=sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7 \ + --hash=sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c # via pytest-asyncio -pytest-asyncio==0.26.0 \ - --hash=sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0 \ - --hash=sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f +pytest-asyncio==1.1.0 \ + --hash=sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf \ + --hash=sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea # via -r requirements.in pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ @@ -840,9 +866,9 @@ pyyaml==6.0.2 \ # via # -r requirements.in # libcst -requests==2.32.4 \ - --hash=sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c \ - --hash=sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422 +requests==2.32.5 \ + --hash=sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6 \ + --hash=sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf # via google-api-core rsa==4.9.1 \ --hash=sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762 \ @@ -882,121 +908,122 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via pytest -typing-extensions==4.13.2 \ - --hash=sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c \ - --hash=sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef +typing-extensions==4.15.0 \ + --hash=sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466 \ + --hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548 # via # aiosignal # exceptiongroup + # libcst # multidict # pytest-asyncio urllib3==2.5.0 \ --hash=sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760 \ --hash=sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc # via requests -yarl==1.20.0 \ - --hash=sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9 \ - --hash=sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa \ - --hash=sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61 \ - --hash=sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2 \ - --hash=sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2 \ - --hash=sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33 \ - --hash=sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902 \ - --hash=sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2 \ - --hash=sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914 \ - --hash=sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0 \ - --hash=sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0 \ - --hash=sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569 \ - --hash=sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f \ - --hash=sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7 \ - --hash=sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20 \ - --hash=sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00 \ - --hash=sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1 \ - --hash=sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc \ - --hash=sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f \ - --hash=sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a \ - --hash=sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd \ - --hash=sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c \ - --hash=sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f \ - --hash=sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5 \ - --hash=sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d \ - --hash=sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501 \ - --hash=sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3 \ - --hash=sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0 \ - --hash=sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26 \ - --hash=sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2 \ - --hash=sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c \ - --hash=sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c \ - --hash=sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae \ - --hash=sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de \ - --hash=sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a \ - --hash=sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe \ - --hash=sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8 \ - --hash=sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124 \ - --hash=sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb \ - --hash=sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc \ - --hash=sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2 \ - --hash=sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac \ - --hash=sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307 \ - --hash=sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58 \ - --hash=sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e \ - --hash=sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e \ - --hash=sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62 \ - --hash=sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b \ - --hash=sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe \ - --hash=sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda \ - --hash=sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5 \ - --hash=sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64 \ - --hash=sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229 \ - --hash=sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62 \ - --hash=sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6 \ - --hash=sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d \ - --hash=sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791 \ - --hash=sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672 \ - --hash=sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb \ - --hash=sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94 \ - --hash=sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a \ - --hash=sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10 \ - --hash=sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e \ - --hash=sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9 \ - --hash=sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5 \ - --hash=sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da \ - --hash=sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c \ - --hash=sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a \ - --hash=sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d \ - --hash=sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195 \ - --hash=sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594 \ - --hash=sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8 \ - --hash=sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634 \ - --hash=sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051 \ - --hash=sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8 \ - --hash=sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e \ - --hash=sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384 \ - --hash=sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076 \ - --hash=sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656 \ - --hash=sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018 \ - --hash=sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19 \ - --hash=sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a \ - --hash=sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4 \ - --hash=sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2 \ - --hash=sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64 \ - --hash=sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145 \ - --hash=sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7 \ - --hash=sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995 \ - --hash=sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6 \ - --hash=sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f \ - --hash=sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f \ - --hash=sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487 \ - --hash=sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9 \ - --hash=sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a \ - --hash=sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d \ - --hash=sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f \ - --hash=sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1 \ - --hash=sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22 \ - --hash=sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d \ - --hash=sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c \ - --hash=sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877 \ - --hash=sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5 \ - --hash=sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867 \ - --hash=sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3 +yarl==1.20.1 \ + --hash=sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845 \ + --hash=sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53 \ + --hash=sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a \ + --hash=sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed \ + --hash=sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2 \ + --hash=sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02 \ + --hash=sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf \ + --hash=sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010 \ + --hash=sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3 \ + --hash=sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef \ + --hash=sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04 \ + --hash=sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23 \ + --hash=sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e \ + --hash=sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6 \ + --hash=sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e \ + --hash=sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a \ + --hash=sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a \ + --hash=sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8 \ + --hash=sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805 \ + --hash=sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2 \ + --hash=sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458 \ + --hash=sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc \ + --hash=sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d \ + --hash=sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b \ + --hash=sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73 \ + --hash=sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7 \ + --hash=sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309 \ + --hash=sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e \ + --hash=sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698 \ + --hash=sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c \ + --hash=sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691 \ + --hash=sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16 \ + --hash=sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f \ + --hash=sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f \ + --hash=sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004 \ + --hash=sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3 \ + --hash=sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240 \ + --hash=sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28 \ + --hash=sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513 \ + --hash=sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773 \ + --hash=sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba \ + --hash=sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4 \ + --hash=sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e \ + --hash=sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1 \ + --hash=sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31 \ + --hash=sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16 \ + --hash=sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819 \ + --hash=sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d \ + --hash=sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3 \ + --hash=sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8 \ + --hash=sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf \ + --hash=sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723 \ + --hash=sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13 \ + --hash=sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1 \ + --hash=sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b \ + --hash=sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f \ + --hash=sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d \ + --hash=sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30 \ + --hash=sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77 \ + --hash=sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a \ + --hash=sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389 \ + --hash=sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e \ + --hash=sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e \ + --hash=sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c \ + --hash=sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1 \ + --hash=sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833 \ + --hash=sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b \ + --hash=sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee \ + --hash=sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000 \ + --hash=sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38 \ + --hash=sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8 \ + --hash=sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd \ + --hash=sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16 \ + --hash=sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d \ + --hash=sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a \ + --hash=sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06 \ + --hash=sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb \ + --hash=sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4 \ + --hash=sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9 \ + --hash=sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8 \ + --hash=sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390 \ + --hash=sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8 \ + --hash=sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be \ + --hash=sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c \ + --hash=sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac \ + --hash=sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b \ + --hash=sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5 \ + --hash=sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4 \ + --hash=sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e \ + --hash=sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f \ + --hash=sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee \ + --hash=sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5 \ + --hash=sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70 \ + --hash=sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1 \ + --hash=sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24 \ + --hash=sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653 \ + --hash=sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3 \ + --hash=sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00 \ + --hash=sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983 \ + --hash=sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d \ + --hash=sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7 \ + --hash=sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce \ + --hash=sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e \ + --hash=sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5 # via aiohttp From 4bf299fe69fc9ac6481a9118ec1e39d8e6daca92 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 29 Aug 2025 19:44:02 -0400 Subject: [PATCH 1301/1339] docs: minor formatting improvements (#2432) --- packages/gapic-generator/BUILD.bazel | 28 +- packages/gapic-generator/repositories.bzl | 43 +- .../services/asset_service/async_client.py | 272 +++++---- .../asset_v1/services/asset_service/client.py | 272 +++++---- .../services/asset_service/transports/grpc.py | 20 +- .../asset_service/transports/grpc_asyncio.py | 20 +- .../cloud/asset_v1/types/asset_service.py | 560 +++++++++--------- .../google/cloud/asset_v1/types/assets.py | 255 ++++---- .../services/eventarc/async_client.py | 6 +- .../eventarc_v1/services/eventarc/client.py | 6 +- .../cloud/eventarc_v1/types/eventarc.py | 6 +- .../config_service_v2/async_client.py | 10 +- .../services/config_service_v2/client.py | 10 +- .../logging_service_v2/async_client.py | 40 +- .../services/logging_service_v2/client.py | 40 +- .../cloud/logging_v2/types/log_entry.py | 8 +- .../google/cloud/logging_v2/types/logging.py | 72 +-- .../cloud/logging_v2/types/logging_config.py | 22 +- .../config_service_v2/async_client.py | 10 +- .../services/config_service_v2/client.py | 10 +- .../logging_service_v2/async_client.py | 40 +- .../services/logging_service_v2/client.py | 40 +- .../cloud/logging_v2/types/log_entry.py | 8 +- .../google/cloud/logging_v2/types/logging.py | 72 +-- .../cloud/logging_v2/types/logging_config.py | 22 +- .../services/cloud_redis/async_client.py | 42 +- .../redis_v1/services/cloud_redis/client.py | 42 +- .../services/cloud_redis/transports/grpc.py | 20 +- .../cloud_redis/transports/grpc_asyncio.py | 20 +- .../services/cloud_redis/transports/rest.py | 18 +- .../cloud_redis/transports/rest_asyncio.py | 18 +- .../cloud/redis_v1/types/cloud_redis.py | 45 +- .../services/cloud_redis/async_client.py | 42 +- .../redis_v1/services/cloud_redis/client.py | 42 +- .../services/cloud_redis/transports/grpc.py | 20 +- .../cloud_redis/transports/grpc_asyncio.py | 20 +- .../services/cloud_redis/transports/rest.py | 18 +- .../cloud_redis/transports/rest_asyncio.py | 18 +- .../cloud/redis_v1/types/cloud_redis.py | 45 +- 39 files changed, 1172 insertions(+), 1130 deletions(-) diff --git a/packages/gapic-generator/BUILD.bazel b/packages/gapic-generator/BUILD.bazel index 7dab7fa25386..4062c585ca37 100644 --- a/packages/gapic-generator/BUILD.bazel +++ b/packages/gapic-generator/BUILD.bazel @@ -7,12 +7,28 @@ toolchain_type( visibility = ["//visibility:public"], ) +pandoc_toolchain( + exec_compatible_with = [ + "@platforms//os:linux", + "@platforms//cpu:arm64", + ], + platform = "linux_arm_64", +) + pandoc_toolchain( exec_compatible_with = [ "@platforms//os:linux", "@platforms//cpu:x86_64", ], - platform = "linux", + platform = "linux_x86_64", +) + +pandoc_toolchain( + exec_compatible_with = [ + "@platforms//os:osx", + "@platforms//cpu:arm64", + ], + platform = "macOS_arm_64", ) pandoc_toolchain( @@ -20,7 +36,15 @@ pandoc_toolchain( "@platforms//os:osx", "@platforms//cpu:x86_64", ], - platform = "macOS", + platform = "macOS_x86_64", +) + +pandoc_toolchain( + exec_compatible_with = [ + "@platforms//os:windows", + "@platforms//cpu:x86_64", + ], + platform = "windows_x86_64", ) pandoc_binary( diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index b79bc62f98ee..480ae74586ce 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -39,18 +39,42 @@ def gapic_generator_python(): _maybe( http_archive, - name = "pandoc_linux", + name = "pandoc_linux_arm_64", build_file_content = _PANDOC_BUILD_FILE, - strip_prefix = "pandoc-2.2.1", - url = "https://github.com/jgm/pandoc/releases/download/2.2.1/pandoc-2.2.1-linux.tar.gz", + strip_prefix = "pandoc-3.7.0.2", + url = "https://github.com/jgm/pandoc/releases/download/3.7.0.2/pandoc-3.7.0.2-linux-arm64.tar.gz", ) _maybe( http_archive, - name = "pandoc_macOS", + name = "pandoc_linux_x86_64", build_file_content = _PANDOC_BUILD_FILE, - strip_prefix = "pandoc-2.2.1", - url = "https://github.com/jgm/pandoc/releases/download/2.2.1/pandoc-2.2.1-macOS.zip", + strip_prefix = "pandoc-3.7.0.2", + url = "https://github.com/jgm/pandoc/releases/download/3.7.0.2/pandoc-3.7.0.2-linux-amd64.tar.gz", + ) + + _maybe( + http_archive, + name = "pandoc_macOS_arm_64", + build_file_content = _PANDOC_BUILD_FILE, + strip_prefix = "pandoc-3.7.0.2", + url = "https://github.com/jgm/pandoc/releases/download/3.7.0.2/pandoc-3.7.0.2-arm64-macOS.zip", + ) + + _maybe( + http_archive, + name = "pandoc_macOS_x86_64", + build_file_content = _PANDOC_BUILD_FILE, + strip_prefix = "pandoc-3.7.0.2", + url = "https://github.com/jgm/pandoc/releases/download/3.7.0.2/pandoc-3.7.0.2-x86_64-macOS.zip", + ) + + _maybe( + http_archive, + name = "pandoc_windows_x86_64", + build_file_content = _PANDOC_BUILD_FILE, + strip_prefix = "pandoc-3.7.0.2", + url = "https://github.com/jgm/pandoc/releases/download/3.7.0.2/pandoc-3.7.0.2-windows-x86_64.zip", ) _rules_gapic_version = "0.5.4" @@ -70,8 +94,11 @@ def gapic_generator_python(): def gapic_generator_register_toolchains(): native.register_toolchains( - "@gapic_generator_python//:pandoc_toolchain_linux", - "@gapic_generator_python//:pandoc_toolchain_macOS", + "@gapic_generator_python//:pandoc_toolchain_linux_arm_64", + "@gapic_generator_python//:pandoc_toolchain_linux_x86_64", + "@gapic_generator_python//:pandoc_toolchain_macOS_arm_64", + "@gapic_generator_python//:pandoc_toolchain_macOS_x86_64", + "@gapic_generator_python//:pandoc_toolchain_windows_x86_64", ) def _maybe(repo_rule, name, strip_repo_prefix = "", **kwargs): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 262069a990e4..38379ce9d719 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -1213,11 +1213,11 @@ async def sample_search_all_resources(): The allowed values are: - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this @@ -1230,64 +1230,64 @@ async def sample_search_all_resources(): Examples: - - ``name:Important`` to find Google Cloud resources - whose name contains "Important" as a word. - - ``name=Important`` to find the Google Cloud resource - whose name is exactly "Important". - - ``displayName:Impor*`` to find Google Cloud resources - whose display name contains "Impor" as a prefix of - any word in the field. - - ``location:us-west*`` to find Google Cloud resources - whose location contains both "us" and "west" as - prefixes. - - ``labels:prod`` to find Google Cloud resources whose - labels contain "prod" as a key or value. - - ``labels.env:prod`` to find Google Cloud resources - that have a label "env" and its value is "prod". - - ``labels.env:*`` to find Google Cloud resources that - have a label "env". - - ``kmsKey:key`` to find Google Cloud resources - encrypted with a customer-managed encryption key - whose name contains "key" as a word. This field is - deprecated. Please use the ``kmsKeys`` field to - retrieve Cloud KMS key information. - - ``kmsKeys:key`` to find Google Cloud resources - encrypted with customer-managed encryption keys whose - name contains the word "key". - - ``relationships:instance-group-1`` to find Google - Cloud resources that have relationships with - "instance-group-1" in the related resource name. - - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find - Compute Engine instances that have relationships of - type "INSTANCE_TO_INSTANCEGROUP". - - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` - to find Compute Engine instances that have - relationships with "instance-group-1" in the Compute - Engine instance group resource name, for relationship - type "INSTANCE_TO_INSTANCEGROUP". - - ``state:ACTIVE`` to find Google Cloud resources whose - state contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find Google Cloud resources - whose state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Google Cloud - resources that were created before "2021-01-01 - 00:00:00 UTC". 1609459200 is the epoch timestamp of - "2021-01-01 00:00:00 UTC" in seconds. - - ``updateTime>1609459200`` to find Google Cloud - resources that were updated after "2021-01-01 - 00:00:00 UTC". 1609459200 is the epoch timestamp of - "2021-01-01 00:00:00 UTC" in seconds. - - ``Important`` to find Google Cloud resources that - contain "Important" as a word in any of the - searchable fields. - - ``Impor*`` to find Google Cloud resources that - contain "Impor" as a prefix of any word in any of the - searchable fields. - - ``Important location:(us-west1 OR global)`` to find - Google Cloud resources that contain "Important" as a - word in any of the searchable fields and are also - located in the "us-west1" region or the "global" - location. + - ``name:Important`` to find Google Cloud resources + whose name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource + whose name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of any + word in the field. + - ``location:us-west*`` to find Google Cloud resources + whose location contains both "us" and "west" as + prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources + that have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that + have a label "env". + - ``kmsKey:key`` to find Google Cloud resources + encrypted with a customer-managed encryption key whose + name contains "key" as a word. This field is + deprecated. Please use the ``kmsKeys`` field to + retrieve Cloud KMS key information. + - ``kmsKeys:key`` to find Google Cloud resources + encrypted with customer-managed encryption keys whose + name contains the word "key". + - ``relationships:instance-group-1`` to find Google + Cloud resources that have relationships with + "instance-group-1" in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of + type "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have + relationships with "instance-group-1" in the Compute + Engine instance group resource name, for relationship + type "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources + whose state doesn't contain "ACTIVE" as a word. + - ``createTime<1609459200`` to find Google Cloud + resources that were created before "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud + resources that were updated after "2021-01-01 00:00:00 + UTC". 1609459200 is the epoch timestamp of "2021-01-01 + 00:00:00 UTC" in seconds. + - ``Important`` to find Google Cloud resources that + contain "Important" as a word in any of the searchable + fields. + - ``Impor*`` to find Google Cloud resources that contain + "Impor" as a prefix of any word in any of the + searchable fields. + - ``Important location:(us-west1 OR global)`` to find + Google Cloud resources that contain "Important" as a + word in any of the searchable fields and are also + located in the "us-west1" region or the "global" + location. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1300,12 +1300,12 @@ async def sample_search_all_resources(): Regular expressions are also supported. For example: - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type - ends with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". + - "compute.googleapis.com.\*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type ends + with "Instance". + - ".\ *Instance.*" snapshots resources whose asset type + contains "Instance". See `RE2 `__ for all supported regular expression syntax. If the @@ -1445,11 +1445,11 @@ async def sample_search_all_iam_policies(): The allowed values are: - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this @@ -1469,44 +1469,42 @@ async def sample_search_all_iam_policies(): Examples: - - ``policy:amy@gmail.com`` to find IAM policy bindings - that specify user "amy@gmail.com". - - ``policy:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``policy:comp*`` to find IAM policy bindings that - contain "comp" as a prefix of any word in the - binding. - - ``policy.role.permissions:storage.buckets.update`` to - find IAM policy bindings that specify a role - containing "storage.buckets.update" permission. Note - that if callers don't have ``iam.roles.get`` access - to a role's included permissions, policy bindings - that specify this role will be dropped from the - search results. - - ``policy.role.permissions:upd*`` to find IAM policy - bindings that specify a role containing "upd" as a - prefix of any word in the role permission. Note that - if callers don't have ``iam.roles.get`` access to a - role's included permissions, policy bindings that - specify this role will be dropped from the search - results. - - ``resource:organizations/123456`` to find IAM policy - bindings that are set on "organizations/123456". - - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` - to find IAM policy bindings that are set on the - project named "myproject". - - ``Important`` to find IAM policy bindings that - contain "Important" as a word in any of the - searchable fields (except for the included - permissions). - - ``resource:(instance1 OR instance2) policy:amy`` to - find IAM policy bindings that are set on resources - "instance1" or "instance2" and also specify user - "amy". - - ``roles:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``memberTypes:user`` to find IAM policy bindings that - contain the principal type "user". + - ``policy:amy@gmail.com`` to find IAM policy bindings + that specify user "amy@gmail.com". + - ``policy:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``policy:comp*`` to find IAM policy bindings that + contain "comp" as a prefix of any word in the binding. + - ``policy.role.permissions:storage.buckets.update`` to + find IAM policy bindings that specify a role + containing "storage.buckets.update" permission. Note + that if callers don't have ``iam.roles.get`` access to + a role's included permissions, policy bindings that + specify this role will be dropped from the search + results. + - ``policy.role.permissions:upd*`` to find IAM policy + bindings that specify a role containing "upd" as a + prefix of any word in the role permission. Note that + if callers don't have ``iam.roles.get`` access to a + role's included permissions, policy bindings that + specify this role will be dropped from the search + results. + - ``resource:organizations/123456`` to find IAM policy + bindings that are set on "organizations/123456". + - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` + to find IAM policy bindings that are set on the + project named "myproject". + - ``Important`` to find IAM policy bindings that contain + "Important" as a word in any of the searchable fields + (except for the included permissions). + - ``resource:(instance1 OR instance2) policy:amy`` to + find IAM policy bindings that are set on resources + "instance1" or "instance2" and also specify user + "amy". + - ``roles:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``memberTypes:user`` to find IAM policy bindings that + contain the principal type "user". This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -2165,9 +2163,9 @@ async def sample_get_saved_query(): Required. The name of the saved query and it must be in the format of: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2402,9 +2400,9 @@ async def sample_update_saved_query(): The saved query's ``name`` field is used to identify the one to update, which has format as below: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id This corresponds to the ``saved_query`` field on the ``request`` instance; if ``request`` is provided, this @@ -2517,9 +2515,9 @@ async def sample_delete_saved_query(): Required. The name of the saved query to delete. It must be in the format of: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2708,8 +2706,8 @@ async def sample_analyze_org_policies(): Required. The organization to scope the request. Only organization policies within the scope will be analyzed. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this @@ -2866,8 +2864,8 @@ async def sample_analyze_org_policy_governed_containers(): The output containers will also be limited to the ones governed by those in-scope organization policies. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this @@ -2984,16 +2982,16 @@ async def analyze_org_policy_governed_assets(self, resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: - - storage.uniformBucketLevelAccess - - iam.disableServiceAccountKeyCreation - - iam.allowedPolicyMemberDomains - - compute.vmExternalIpAccess - - appengine.enforceServiceAccountActAsCheck - - gcp.resourceLocations - - compute.trustedImageProjects - - compute.skipDefaultNetworkCreation - - compute.requireOsLogin - - compute.disableNestedVirtualization + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization This RPC only returns either resources of types supported by `searchable asset @@ -3038,8 +3036,8 @@ async def sample_analyze_org_policy_governed_assets(): The output assets will also be limited to the ones governed by those in-scope organization policies. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index d8e1793012e7..76df912ab0fb 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -1610,11 +1610,11 @@ def sample_search_all_resources(): The allowed values are: - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this @@ -1627,64 +1627,64 @@ def sample_search_all_resources(): Examples: - - ``name:Important`` to find Google Cloud resources - whose name contains "Important" as a word. - - ``name=Important`` to find the Google Cloud resource - whose name is exactly "Important". - - ``displayName:Impor*`` to find Google Cloud resources - whose display name contains "Impor" as a prefix of - any word in the field. - - ``location:us-west*`` to find Google Cloud resources - whose location contains both "us" and "west" as - prefixes. - - ``labels:prod`` to find Google Cloud resources whose - labels contain "prod" as a key or value. - - ``labels.env:prod`` to find Google Cloud resources - that have a label "env" and its value is "prod". - - ``labels.env:*`` to find Google Cloud resources that - have a label "env". - - ``kmsKey:key`` to find Google Cloud resources - encrypted with a customer-managed encryption key - whose name contains "key" as a word. This field is - deprecated. Please use the ``kmsKeys`` field to - retrieve Cloud KMS key information. - - ``kmsKeys:key`` to find Google Cloud resources - encrypted with customer-managed encryption keys whose - name contains the word "key". - - ``relationships:instance-group-1`` to find Google - Cloud resources that have relationships with - "instance-group-1" in the related resource name. - - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find - Compute Engine instances that have relationships of - type "INSTANCE_TO_INSTANCEGROUP". - - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` - to find Compute Engine instances that have - relationships with "instance-group-1" in the Compute - Engine instance group resource name, for relationship - type "INSTANCE_TO_INSTANCEGROUP". - - ``state:ACTIVE`` to find Google Cloud resources whose - state contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find Google Cloud resources - whose state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Google Cloud - resources that were created before "2021-01-01 - 00:00:00 UTC". 1609459200 is the epoch timestamp of - "2021-01-01 00:00:00 UTC" in seconds. - - ``updateTime>1609459200`` to find Google Cloud - resources that were updated after "2021-01-01 - 00:00:00 UTC". 1609459200 is the epoch timestamp of - "2021-01-01 00:00:00 UTC" in seconds. - - ``Important`` to find Google Cloud resources that - contain "Important" as a word in any of the - searchable fields. - - ``Impor*`` to find Google Cloud resources that - contain "Impor" as a prefix of any word in any of the - searchable fields. - - ``Important location:(us-west1 OR global)`` to find - Google Cloud resources that contain "Important" as a - word in any of the searchable fields and are also - located in the "us-west1" region or the "global" - location. + - ``name:Important`` to find Google Cloud resources + whose name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource + whose name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of any + word in the field. + - ``location:us-west*`` to find Google Cloud resources + whose location contains both "us" and "west" as + prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources + that have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that + have a label "env". + - ``kmsKey:key`` to find Google Cloud resources + encrypted with a customer-managed encryption key whose + name contains "key" as a word. This field is + deprecated. Please use the ``kmsKeys`` field to + retrieve Cloud KMS key information. + - ``kmsKeys:key`` to find Google Cloud resources + encrypted with customer-managed encryption keys whose + name contains the word "key". + - ``relationships:instance-group-1`` to find Google + Cloud resources that have relationships with + "instance-group-1" in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of + type "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have + relationships with "instance-group-1" in the Compute + Engine instance group resource name, for relationship + type "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources + whose state doesn't contain "ACTIVE" as a word. + - ``createTime<1609459200`` to find Google Cloud + resources that were created before "2021-01-01 + 00:00:00 UTC". 1609459200 is the epoch timestamp of + "2021-01-01 00:00:00 UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud + resources that were updated after "2021-01-01 00:00:00 + UTC". 1609459200 is the epoch timestamp of "2021-01-01 + 00:00:00 UTC" in seconds. + - ``Important`` to find Google Cloud resources that + contain "Important" as a word in any of the searchable + fields. + - ``Impor*`` to find Google Cloud resources that contain + "Impor" as a prefix of any word in any of the + searchable fields. + - ``Important location:(us-west1 OR global)`` to find + Google Cloud resources that contain "Important" as a + word in any of the searchable fields and are also + located in the "us-west1" region or the "global" + location. This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -1697,12 +1697,12 @@ def sample_search_all_resources(): Regular expressions are also supported. For example: - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type - ends with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". + - "compute.googleapis.com.\*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type ends + with "Instance". + - ".\ *Instance.*" snapshots resources whose asset type + contains "Instance". See `RE2 `__ for all supported regular expression syntax. If the @@ -1841,11 +1841,11 @@ def sample_search_all_iam_policies(): The allowed values are: - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this @@ -1865,44 +1865,42 @@ def sample_search_all_iam_policies(): Examples: - - ``policy:amy@gmail.com`` to find IAM policy bindings - that specify user "amy@gmail.com". - - ``policy:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``policy:comp*`` to find IAM policy bindings that - contain "comp" as a prefix of any word in the - binding. - - ``policy.role.permissions:storage.buckets.update`` to - find IAM policy bindings that specify a role - containing "storage.buckets.update" permission. Note - that if callers don't have ``iam.roles.get`` access - to a role's included permissions, policy bindings - that specify this role will be dropped from the - search results. - - ``policy.role.permissions:upd*`` to find IAM policy - bindings that specify a role containing "upd" as a - prefix of any word in the role permission. Note that - if callers don't have ``iam.roles.get`` access to a - role's included permissions, policy bindings that - specify this role will be dropped from the search - results. - - ``resource:organizations/123456`` to find IAM policy - bindings that are set on "organizations/123456". - - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` - to find IAM policy bindings that are set on the - project named "myproject". - - ``Important`` to find IAM policy bindings that - contain "Important" as a word in any of the - searchable fields (except for the included - permissions). - - ``resource:(instance1 OR instance2) policy:amy`` to - find IAM policy bindings that are set on resources - "instance1" or "instance2" and also specify user - "amy". - - ``roles:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``memberTypes:user`` to find IAM policy bindings that - contain the principal type "user". + - ``policy:amy@gmail.com`` to find IAM policy bindings + that specify user "amy@gmail.com". + - ``policy:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``policy:comp*`` to find IAM policy bindings that + contain "comp" as a prefix of any word in the binding. + - ``policy.role.permissions:storage.buckets.update`` to + find IAM policy bindings that specify a role + containing "storage.buckets.update" permission. Note + that if callers don't have ``iam.roles.get`` access to + a role's included permissions, policy bindings that + specify this role will be dropped from the search + results. + - ``policy.role.permissions:upd*`` to find IAM policy + bindings that specify a role containing "upd" as a + prefix of any word in the role permission. Note that + if callers don't have ``iam.roles.get`` access to a + role's included permissions, policy bindings that + specify this role will be dropped from the search + results. + - ``resource:organizations/123456`` to find IAM policy + bindings that are set on "organizations/123456". + - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` + to find IAM policy bindings that are set on the + project named "myproject". + - ``Important`` to find IAM policy bindings that contain + "Important" as a word in any of the searchable fields + (except for the included permissions). + - ``resource:(instance1 OR instance2) policy:amy`` to + find IAM policy bindings that are set on resources + "instance1" or "instance2" and also specify user + "amy". + - ``roles:roles/compute.admin`` to find IAM policy + bindings that specify the Compute Admin role. + - ``memberTypes:user`` to find IAM policy bindings that + contain the principal type "user". This corresponds to the ``query`` field on the ``request`` instance; if ``request`` is provided, this @@ -2559,9 +2557,9 @@ def sample_get_saved_query(): Required. The name of the saved query and it must be in the format of: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2794,9 +2792,9 @@ def sample_update_saved_query(): The saved query's ``name`` field is used to identify the one to update, which has format as below: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id This corresponds to the ``saved_query`` field on the ``request`` instance; if ``request`` is provided, this @@ -2908,9 +2906,9 @@ def sample_delete_saved_query(): Required. The name of the saved query to delete. It must be in the format of: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3098,8 +3096,8 @@ def sample_analyze_org_policies(): Required. The organization to scope the request. Only organization policies within the scope will be analyzed. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this @@ -3255,8 +3253,8 @@ def sample_analyze_org_policy_governed_containers(): The output containers will also be limited to the ones governed by those in-scope organization policies. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this @@ -3372,16 +3370,16 @@ def analyze_org_policy_governed_assets(self, resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: - - storage.uniformBucketLevelAccess - - iam.disableServiceAccountKeyCreation - - iam.allowedPolicyMemberDomains - - compute.vmExternalIpAccess - - appengine.enforceServiceAccountActAsCheck - - gcp.resourceLocations - - compute.trustedImageProjects - - compute.skipDefaultNetworkCreation - - compute.requireOsLogin - - compute.disableNestedVirtualization + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization This RPC only returns either resources of types supported by `searchable asset @@ -3426,8 +3424,8 @@ def sample_analyze_org_policy_governed_assets(): The output assets will also be limited to the ones governed by those in-scope organization policies. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") This corresponds to the ``scope`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 2f1d30c41875..755d23eec517 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -976,16 +976,16 @@ def analyze_org_policy_governed_assets(self) -> Callable[ resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: - - storage.uniformBucketLevelAccess - - iam.disableServiceAccountKeyCreation - - iam.allowedPolicyMemberDomains - - compute.vmExternalIpAccess - - appengine.enforceServiceAccountActAsCheck - - gcp.resourceLocations - - compute.trustedImageProjects - - compute.skipDefaultNetworkCreation - - compute.requireOsLogin - - compute.disableNestedVirtualization + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization This RPC only returns either resources of types supported by `searchable asset diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 96833f741bcc..92ca7f4fd078 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -982,16 +982,16 @@ def analyze_org_policy_governed_assets(self) -> Callable[ resources or policies) under a scope. This RPC supports custom constraints and the following 10 canned constraints: - - storage.uniformBucketLevelAccess - - iam.disableServiceAccountKeyCreation - - iam.allowedPolicyMemberDomains - - compute.vmExternalIpAccess - - appengine.enforceServiceAccountActAsCheck - - gcp.resourceLocations - - compute.trustedImageProjects - - compute.skipDefaultNetworkCreation - - compute.requireOsLogin - - compute.disableNestedVirtualization + - storage.uniformBucketLevelAccess + - iam.disableServiceAccountKeyCreation + - iam.allowedPolicyMemberDomains + - compute.vmExternalIpAccess + - appengine.enforceServiceAccountActAsCheck + - gcp.resourceLocations + - compute.trustedImageProjects + - compute.skipDefaultNetworkCreation + - compute.requireOsLogin + - compute.disableNestedVirtualization This RPC only returns either resources of types supported by `searchable asset diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py index 29138c3ea3b6..3caaefee47e6 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/asset_service.py @@ -169,12 +169,12 @@ class ExportAssetsRequest(proto.Message): Regular expressions are also supported. For example: - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type ends - with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". + - "compute.googleapis.com.\*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type ends + with "Instance". + - ".\ *Instance.*" snapshots resources whose asset type + contains "Instance". See `RE2 `__ for all supported regular expression syntax. If the regular @@ -197,18 +197,18 @@ class ExportAssetsRequest(proto.Message): ``INSTANCE_TO_INSTANCEGROUP``. This field should only be specified if content_type=RELATIONSHIP. - - If specified: it snapshots specified relationships. It - returns an error if any of the [relationship_types] - doesn't belong to the supported relationship types of the - [asset_types] or if any of the [asset_types] doesn't - belong to the source types of the [relationship_types]. - - Otherwise: it snapshots the supported relationships for - all [asset_types] or returns an error if any of the - [asset_types] has no relationship support. An unspecified - asset types field means all supported asset_types. See - `Introduction to Cloud Asset - Inventory `__ - for all supported asset types and relationship types. + - If specified: it snapshots specified relationships. It + returns an error if any of the [relationship_types] + doesn't belong to the supported relationship types of the + [asset_types] or if any of the [asset_types] doesn't + belong to the source types of the [relationship_types]. + - Otherwise: it snapshots the supported relationships for + all [asset_types] or returns an error if any of the + [asset_types] has no relationship support. An unspecified + asset types field means all supported asset_types. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -306,12 +306,12 @@ class ListAssetsRequest(proto.Message): Regular expression is also supported. For example: - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type ends - with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". + - "compute.googleapis.com.\*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type ends + with "Instance". + - ".\ *Instance.*" snapshots resources whose asset type + contains "Instance". See `RE2 `__ for all supported regular expression syntax. If the regular @@ -341,18 +341,18 @@ class ListAssetsRequest(proto.Message): ``INSTANCE_TO_INSTANCEGROUP``. This field should only be specified if content_type=RELATIONSHIP. - - If specified: it snapshots specified relationships. It - returns an error if any of the [relationship_types] - doesn't belong to the supported relationship types of the - [asset_types] or if any of the [asset_types] doesn't - belong to the source types of the [relationship_types]. - - Otherwise: it snapshots the supported relationships for - all [asset_types] or returns an error if any of the - [asset_types] has no relationship support. An unspecified - asset types field means all supported asset_types. See - `Introduction to Cloud Asset - Inventory `__ - for all supported asset types and relationship types. + - If specified: it snapshots specified relationships. It + returns an error if any of the [relationship_types] + doesn't belong to the supported relationship types of the + [asset_types] or if any of the [asset_types] doesn't + belong to the source types of the [relationship_types]. + - Otherwise: it snapshots the supported relationships for + all [asset_types] or returns an error if any of the + [asset_types] has no relationship support. An unspecified + asset types field means all supported asset_types. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -457,18 +457,18 @@ class BatchGetAssetsHistoryRequest(proto.Message): example: ``INSTANCE_TO_INSTANCEGROUP``. This field should only be specified if content_type=RELATIONSHIP. - - If specified: it outputs specified relationships' history - on the [asset_names]. It returns an error if any of the - [relationship_types] doesn't belong to the supported - relationship types of the [asset_names] or if any of the - [asset_names]'s types doesn't belong to the source types - of the [relationship_types]. - - Otherwise: it outputs the supported relationships' - history on the [asset_names] or returns an error if any - of the [asset_names]'s types has no relationship support. - See `Introduction to Cloud Asset - Inventory `__ - for all supported asset types and relationship types. + - If specified: it outputs specified relationships' history + on the [asset_names]. It returns an error if any of the + [relationship_types] doesn't belong to the supported + relationship types of the [asset_names] or if any of the + [asset_names]'s types doesn't belong to the source types + of the [relationship_types]. + - Otherwise: it outputs the supported relationships' history + on the [asset_names] or returns an error if any of the + [asset_names]'s types has no relationship support. See + `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ parent: str = proto.Field( @@ -1021,20 +1021,19 @@ class Feed(proto.Message): ``INSTANCE_TO_INSTANCEGROUP``. This field should only be specified if content_type=RELATIONSHIP. - - If specified: it outputs specified relationship updates - on the [asset_names] or the [asset_types]. It returns an - error if any of the [relationship_types] doesn't belong - to the supported relationship types of the [asset_names] - or [asset_types], or any of the [asset_names] or the - [asset_types] doesn't belong to the source types of the - [relationship_types]. - - Otherwise: it outputs the supported relationships of the - types of [asset_names] and [asset_types] or returns an - error if any of the [asset_names] or the [asset_types] - has no replationship support. See `Introduction to Cloud - Asset - Inventory `__ - for all supported asset types and relationship types. + - If specified: it outputs specified relationship updates on + the [asset_names] or the [asset_types]. It returns an + error if any of the [relationship_types] doesn't belong to + the supported relationship types of the [asset_names] or + [asset_types], or any of the [asset_names] or the + [asset_types] doesn't belong to the source types of the + [relationship_types]. + - Otherwise: it outputs the supported relationships of the + types of [asset_names] and [asset_types] or returns an + error if any of the [asset_names] or the [asset_types] has + no replationship support. See `Introduction to Cloud Asset + Inventory `__ + for all supported asset types and relationship types. """ name: str = proto.Field( @@ -1083,11 +1082,11 @@ class SearchAllResourcesRequest(proto.Message): The allowed values are: - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") query (str): Optional. The query statement. See `how to construct a query `__ @@ -1096,61 +1095,60 @@ class SearchAllResourcesRequest(proto.Message): Examples: - - ``name:Important`` to find Google Cloud resources whose - name contains "Important" as a word. - - ``name=Important`` to find the Google Cloud resource - whose name is exactly "Important". - - ``displayName:Impor*`` to find Google Cloud resources - whose display name contains "Impor" as a prefix of any - word in the field. - - ``location:us-west*`` to find Google Cloud resources - whose location contains both "us" and "west" as prefixes. - - ``labels:prod`` to find Google Cloud resources whose - labels contain "prod" as a key or value. - - ``labels.env:prod`` to find Google Cloud resources that - have a label "env" and its value is "prod". - - ``labels.env:*`` to find Google Cloud resources that have - a label "env". - - ``kmsKey:key`` to find Google Cloud resources encrypted - with a customer-managed encryption key whose name - contains "key" as a word. This field is deprecated. - Please use the ``kmsKeys`` field to retrieve Cloud KMS - key information. - - ``kmsKeys:key`` to find Google Cloud resources encrypted - with customer-managed encryption keys whose name contains - the word "key". - - ``relationships:instance-group-1`` to find Google Cloud - resources that have relationships with "instance-group-1" - in the related resource name. - - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find - Compute Engine instances that have relationships of type - "INSTANCE_TO_INSTANCEGROUP". - - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` - to find Compute Engine instances that have relationships - with "instance-group-1" in the Compute Engine instance - group resource name, for relationship type - "INSTANCE_TO_INSTANCEGROUP". - - ``state:ACTIVE`` to find Google Cloud resources whose - state contains "ACTIVE" as a word. - - ``NOT state:ACTIVE`` to find Google Cloud resources whose - state doesn't contain "ACTIVE" as a word. - - ``createTime<1609459200`` to find Google Cloud resources - that were created before "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 00:00:00 - UTC" in seconds. - - ``updateTime>1609459200`` to find Google Cloud resources - that were updated after "2021-01-01 00:00:00 UTC". - 1609459200 is the epoch timestamp of "2021-01-01 00:00:00 - UTC" in seconds. - - ``Important`` to find Google Cloud resources that contain - "Important" as a word in any of the searchable fields. - - ``Impor*`` to find Google Cloud resources that contain - "Impor" as a prefix of any word in any of the searchable - fields. - - ``Important location:(us-west1 OR global)`` to find - Google Cloud resources that contain "Important" as a word - in any of the searchable fields and are also located in - the "us-west1" region or the "global" location. + - ``name:Important`` to find Google Cloud resources whose + name contains "Important" as a word. + - ``name=Important`` to find the Google Cloud resource whose + name is exactly "Important". + - ``displayName:Impor*`` to find Google Cloud resources + whose display name contains "Impor" as a prefix of any + word in the field. + - ``location:us-west*`` to find Google Cloud resources whose + location contains both "us" and "west" as prefixes. + - ``labels:prod`` to find Google Cloud resources whose + labels contain "prod" as a key or value. + - ``labels.env:prod`` to find Google Cloud resources that + have a label "env" and its value is "prod". + - ``labels.env:*`` to find Google Cloud resources that have + a label "env". + - ``kmsKey:key`` to find Google Cloud resources encrypted + with a customer-managed encryption key whose name contains + "key" as a word. This field is deprecated. Please use the + ``kmsKeys`` field to retrieve Cloud KMS key information. + - ``kmsKeys:key`` to find Google Cloud resources encrypted + with customer-managed encryption keys whose name contains + the word "key". + - ``relationships:instance-group-1`` to find Google Cloud + resources that have relationships with "instance-group-1" + in the related resource name. + - ``relationships:INSTANCE_TO_INSTANCEGROUP`` to find + Compute Engine instances that have relationships of type + "INSTANCE_TO_INSTANCEGROUP". + - ``relationships.INSTANCE_TO_INSTANCEGROUP:instance-group-1`` + to find Compute Engine instances that have relationships + with "instance-group-1" in the Compute Engine instance + group resource name, for relationship type + "INSTANCE_TO_INSTANCEGROUP". + - ``state:ACTIVE`` to find Google Cloud resources whose + state contains "ACTIVE" as a word. + - ``NOT state:ACTIVE`` to find Google Cloud resources whose + state doesn't contain "ACTIVE" as a word. + - ``createTime<1609459200`` to find Google Cloud resources + that were created before "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 00:00:00 + UTC" in seconds. + - ``updateTime>1609459200`` to find Google Cloud resources + that were updated after "2021-01-01 00:00:00 UTC". + 1609459200 is the epoch timestamp of "2021-01-01 00:00:00 + UTC" in seconds. + - ``Important`` to find Google Cloud resources that contain + "Important" as a word in any of the searchable fields. + - ``Impor*`` to find Google Cloud resources that contain + "Impor" as a prefix of any word in any of the searchable + fields. + - ``Important location:(us-west1 OR global)`` to find Google + Cloud resources that contain "Important" as a word in any + of the searchable fields and are also located in the + "us-west1" region or the "global" location. asset_types (MutableSequence[str]): Optional. A list of asset types that this request searches for. If empty, it will search all the `searchable asset @@ -1158,12 +1156,12 @@ class SearchAllResourcesRequest(proto.Message): Regular expressions are also supported. For example: - - "compute.googleapis.com.*" snapshots resources whose - asset type starts with "compute.googleapis.com". - - ".*Instance" snapshots resources whose asset type ends - with "Instance". - - ".*Instance.*" snapshots resources whose asset type - contains "Instance". + - "compute.googleapis.com.\*" snapshots resources whose + asset type starts with "compute.googleapis.com". + - ".*Instance" snapshots resources whose asset type ends + with "Instance". + - ".\ *Instance.*" snapshots resources whose asset type + contains "Instance". See `RE2 `__ for all supported regular expression syntax. If the regular @@ -1190,17 +1188,17 @@ class SearchAllResourcesRequest(proto.Message): Example: "location DESC, name". Only singular primitive fields in the response are sortable: - - name - - assetType - - project - - displayName - - description - - location - - createTime - - updateTime - - state - - parentFullResourceName - - parentAssetType + - name + - assetType + - project + - displayName + - description + - location + - createTime + - updateTime + - state + - parentFullResourceName + - parentAssetType All the other fields such as repeated fields (e.g., ``networkTags``, ``kmsKeys``), map fields (e.g., ``labels``) @@ -1208,7 +1206,7 @@ class SearchAllResourcesRequest(proto.Message): supported. read_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. A comma-separated list of fields specifying which - fields to be returned in ResourceSearchResult. Only '*' or + fields to be returned in ResourceSearchResult. Only '\*' or combination of top level fields can be specified. Field names of both snake_case and camelCase are supported. Examples: ``"*"``, ``"name,location"``, @@ -1217,28 +1215,28 @@ class SearchAllResourcesRequest(proto.Message): The read_mask paths must be valid field paths listed but not limited to (both snake_case and camelCase are supported): - - name - - assetType - - project - - displayName - - description - - location - - tagKeys - - tagValues - - tagValueIds - - labels - - networkTags - - kmsKey (This field is deprecated. Please use the - ``kmsKeys`` field to retrieve Cloud KMS key information.) - - kmsKeys - - createTime - - updateTime - - state - - additionalAttributes - - versionedResources + - name + - assetType + - project + - displayName + - description + - location + - tagKeys + - tagValues + - tagValueIds + - labels + - networkTags + - kmsKey (This field is deprecated. Please use the + ``kmsKeys`` field to retrieve Cloud KMS key information.) + - kmsKeys + - createTime + - updateTime + - state + - additionalAttributes + - versionedResources If read_mask is not specified, all fields except - versionedResources will be returned. If only '*' is + versionedResources will be returned. If only '\*' is specified, all fields including versionedResources will be returned. Any invalid field path will trigger INVALID_ARGUMENT error. @@ -1318,11 +1316,11 @@ class SearchAllIamPoliciesRequest(proto.Message): The allowed values are: - - projects/{PROJECT_ID} (e.g., "projects/foo-bar") - - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") - - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - projects/{PROJECT_ID} (e.g., "projects/foo-bar") + - projects/{PROJECT_NUMBER} (e.g., "projects/12345678") + - folders/{FOLDER_NUMBER} (e.g., "folders/1234567") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") query (str): Optional. The query statement. See `how to construct a query `__ @@ -1337,39 +1335,39 @@ class SearchAllIamPoliciesRequest(proto.Message): Examples: - - ``policy:amy@gmail.com`` to find IAM policy bindings that - specify user "amy@gmail.com". - - ``policy:roles/compute.admin`` to find IAM policy - bindings that specify the Compute Admin role. - - ``policy:comp*`` to find IAM policy bindings that contain - "comp" as a prefix of any word in the binding. - - ``policy.role.permissions:storage.buckets.update`` to - find IAM policy bindings that specify a role containing - "storage.buckets.update" permission. Note that if callers - don't have ``iam.roles.get`` access to a role's included - permissions, policy bindings that specify this role will - be dropped from the search results. - - ``policy.role.permissions:upd*`` to find IAM policy - bindings that specify a role containing "upd" as a prefix - of any word in the role permission. Note that if callers - don't have ``iam.roles.get`` access to a role's included - permissions, policy bindings that specify this role will - be dropped from the search results. - - ``resource:organizations/123456`` to find IAM policy - bindings that are set on "organizations/123456". - - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` - to find IAM policy bindings that are set on the project - named "myproject". - - ``Important`` to find IAM policy bindings that contain - "Important" as a word in any of the searchable fields - (except for the included permissions). - - ``resource:(instance1 OR instance2) policy:amy`` to find - IAM policy bindings that are set on resources "instance1" - or "instance2" and also specify user "amy". - - ``roles:roles/compute.admin`` to find IAM policy bindings - that specify the Compute Admin role. - - ``memberTypes:user`` to find IAM policy bindings that - contain the principal type "user". + - ``policy:amy@gmail.com`` to find IAM policy bindings that + specify user "amy@gmail.com". + - ``policy:roles/compute.admin`` to find IAM policy bindings + that specify the Compute Admin role. + - ``policy:comp*`` to find IAM policy bindings that contain + "comp" as a prefix of any word in the binding. + - ``policy.role.permissions:storage.buckets.update`` to find + IAM policy bindings that specify a role containing + "storage.buckets.update" permission. Note that if callers + don't have ``iam.roles.get`` access to a role's included + permissions, policy bindings that specify this role will + be dropped from the search results. + - ``policy.role.permissions:upd*`` to find IAM policy + bindings that specify a role containing "upd" as a prefix + of any word in the role permission. Note that if callers + don't have ``iam.roles.get`` access to a role's included + permissions, policy bindings that specify this role will + be dropped from the search results. + - ``resource:organizations/123456`` to find IAM policy + bindings that are set on "organizations/123456". + - ``resource=//cloudresourcemanager.googleapis.com/projects/myproject`` + to find IAM policy bindings that are set on the project + named "myproject". + - ``Important`` to find IAM policy bindings that contain + "Important" as a word in any of the searchable fields + (except for the included permissions). + - ``resource:(instance1 OR instance2) policy:amy`` to find + IAM policy bindings that are set on resources "instance1" + or "instance2" and also specify user "amy". + - ``roles:roles/compute.admin`` to find IAM policy bindings + that specify the Compute Admin role. + - ``memberTypes:user`` to find IAM policy bindings that + contain the principal type "user". page_size (int): Optional. The page size for search result pagination. Page size is capped at 500 even if a larger value is given. If @@ -1391,13 +1389,13 @@ class SearchAllIamPoliciesRequest(proto.Message): Regular expressions are also supported. For example: - - "compute.googleapis.com.*" snapshots IAM policies - attached to asset type starts with - "compute.googleapis.com". - - ".*Instance" snapshots IAM policies attached to asset - type ends with "Instance". - - ".*Instance.*" snapshots IAM policies attached to asset - type contains "Instance". + - "compute.googleapis.com.\*" snapshots IAM policies + attached to asset type starts with + "compute.googleapis.com". + - ".*Instance" snapshots IAM policies attached to asset type + ends with "Instance". + - ".\ *Instance.*" snapshots IAM policies attached to asset + type contains "Instance". See `RE2 `__ for all supported regular expression syntax. If the regular @@ -1411,11 +1409,11 @@ class SearchAllIamPoliciesRequest(proto.Message): Example: "assetType DESC, resource". Only singular primitive fields in the response are sortable: - - resource - - assetType - - project All the other fields such as repeated fields - (e.g., ``folders``) and non-primitive fields (e.g., - ``policy``) are not supported. + - resource + - assetType + - project All the other fields such as repeated fields + (e.g., ``folders``) and non-primitive fields (e.g., + ``policy``) are not supported. """ scope: str = proto.Field( @@ -1675,12 +1673,12 @@ class Options(proto.Message): Only the following permissions are considered in this analysis: - - ``iam.serviceAccounts.actAs`` - - ``iam.serviceAccounts.signBlob`` - - ``iam.serviceAccounts.signJwt`` - - ``iam.serviceAccounts.getAccessToken`` - - ``iam.serviceAccounts.getOpenIdToken`` - - ``iam.serviceAccounts.implicitDelegation`` + - ``iam.serviceAccounts.actAs`` + - ``iam.serviceAccounts.signBlob`` + - ``iam.serviceAccounts.signJwt`` + - ``iam.serviceAccounts.getAccessToken`` + - ``iam.serviceAccounts.getOpenIdToken`` + - ``iam.serviceAccounts.implicitDelegation`` Default is false. """ @@ -1774,9 +1772,9 @@ class AnalyzeIamPolicyRequest(proto.Message): Optional. The name of a saved query, which must be in the format of: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id If both ``analysis_query`` and ``saved_analysis_query`` are provided, they will be merged together with the @@ -1956,12 +1954,12 @@ class BigQueryDestination(proto.Message): analysis results will be written. Tables will be created based on this table_prefix if not exist: - - _analysis table will contain export - operation's metadata. - - _analysis_result will contain all the - [IamPolicyAnalysisResult][google.cloud.asset.v1.IamPolicyAnalysisResult]. - When [partition_key] is specified, both tables will be - partitioned based on the [partition_key]. + - _analysis table will contain export + operation's metadata. + - _analysis_result will contain all the + [IamPolicyAnalysisResult][google.cloud.asset.v1.IamPolicyAnalysisResult]. + When [partition_key] is specified, both tables will be + partitioned based on the [partition_key]. partition_key (google.cloud.asset_v1.types.IamPolicyAnalysisOutputConfig.BigQueryDestination.PartitionKey): The partition key for BigQuery partitioned table. @@ -1970,14 +1968,14 @@ class BigQueryDestination(proto.Message): destination table or partition already exists. The following values are supported: - - WRITE_TRUNCATE: If the table or partition already exists, - BigQuery overwrites the entire table or all the - partitions data. - - WRITE_APPEND: If the table or partition already exists, - BigQuery appends the data to the table or the latest - partition. - - WRITE_EMPTY: If the table already exists and contains - data, an error is returned. + - WRITE_TRUNCATE: If the table or partition already exists, + BigQuery overwrites the entire table or all the partitions + data. + - WRITE_APPEND: If the table or partition already exists, + BigQuery appends the data to the table or the latest + partition. + - WRITE_EMPTY: If the table already exists and contains + data, an error is returned. The default value is WRITE_APPEND. Each action is atomic and only occurs if BigQuery is able to complete the job @@ -2048,9 +2046,9 @@ class AnalyzeIamPolicyLongrunningRequest(proto.Message): Optional. The name of a saved query, which must be in the format of: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id If both ``analysis_query`` and ``saved_analysis_query`` are provided, they will be merged together with the @@ -2098,9 +2096,9 @@ class SavedQuery(proto.Message): name (str): The resource name of the saved query. The format must be: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id description (str): The description of this saved query. This value should be fewer than 255 characters. @@ -2237,9 +2235,9 @@ class GetSavedQueryRequest(proto.Message): Required. The name of the saved query and it must be in the format of: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id """ name: str = proto.Field( @@ -2340,9 +2338,9 @@ class UpdateSavedQueryRequest(proto.Message): The saved query's ``name`` field is used to identify the one to update, which has format as below: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. The list of fields to update. """ @@ -2367,9 +2365,9 @@ class DeleteSavedQueryRequest(proto.Message): Required. The name of the saved query to delete. It must be in the format of: - - projects/project_number/savedQueries/saved_query_id - - folders/folder_number/savedQueries/saved_query_id - - organizations/organization_number/savedQueries/saved_query_id + - projects/project_number/savedQueries/saved_query_id + - folders/folder_number/savedQueries/saved_query_id + - organizations/organization_number/savedQueries/saved_query_id """ name: str = proto.Field( @@ -2568,14 +2566,14 @@ class BigQueryDestination(proto.Message): partition already exists. The following values are supported: - - WRITE_TRUNCATE: If the table or partition already exists, - BigQuery overwrites the entire table or all the - partitions data. - - WRITE_APPEND: If the table or partition already exists, - BigQuery appends the data to the table or the latest - partition. - - WRITE_EMPTY: If the table already exists and contains - data, a 'duplicate' error is returned in the job result. + - WRITE_TRUNCATE: If the table or partition already exists, + BigQuery overwrites the entire table or all the partitions + data. + - WRITE_APPEND: If the table or partition already exists, + BigQuery appends the data to the table or the latest + partition. + - WRITE_EMPTY: If the table already exists and contains + data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. """ @@ -2859,26 +2857,26 @@ class TableFieldSchema(proto.Message): Attributes: field (str): The field name. The name must contain only letters (a-z, - A-Z), numbers (0-9), or underscores (_), and must start with - a letter or underscore. The maximum length is 128 + A-Z), numbers (0-9), or underscores (\_), and must start + with a letter or underscore. The maximum length is 128 characters. type_ (str): The field data type. Possible values include - - STRING - - BYTES - - INTEGER - - FLOAT - - BOOLEAN - - TIMESTAMP - - DATE - - TIME - - DATETIME - - GEOGRAPHY, - - NUMERIC, - - BIGNUMERIC, - - RECORD (where RECORD indicates that the field contains a - nested schema). + - STRING + - BYTES + - INTEGER + - FLOAT + - BOOLEAN + - TIMESTAMP + - DATE + - TIME + - DATETIME + - GEOGRAPHY, + - NUMERIC, + - BIGNUMERIC, + - RECORD (where RECORD indicates that the field contains a + nested schema). mode (str): The field mode. Possible values include NULLABLE, REQUIRED and REPEATED. The default @@ -3220,7 +3218,7 @@ class Constraint(proto.Message): The unique name of the constraint. Format of the name should be - - ``constraints/{constraint_name}`` + - ``constraints/{constraint_name}`` For example, ``constraints/compute.disableSerialPortAccess``. @@ -3341,7 +3339,7 @@ class CustomConstraint(proto.Message): Name of the constraint. This is unique within the organization. Format of the name should be - - ``organizations/{organization_id}/customConstraints/{custom_constraint_id}`` + - ``organizations/{organization_id}/customConstraints/{custom_constraint_id}`` Example : "organizations/123/customConstraints/custom.createOnlyE2TypeVms". @@ -3349,7 +3347,7 @@ class CustomConstraint(proto.Message): The Resource Instance type on which this policy applies to. Format will be of the form : "/" Example: - - ``compute.googleapis.com/Instance``. + - ``compute.googleapis.com/Instance``. method_types (MutableSequence[google.cloud.asset_v1.types.AnalyzerOrgPolicyConstraint.CustomConstraint.MethodType]): All the operations being applied for this constraint. @@ -3461,8 +3459,8 @@ class AnalyzeOrgPoliciesRequest(proto.Message): Required. The organization to scope the request. Only organization policies within the scope will be analyzed. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") constraint (str): Required. The name of the constraint to analyze organization policies for. The response @@ -3596,8 +3594,8 @@ class AnalyzeOrgPolicyGovernedContainersRequest(proto.Message): output containers will also be limited to the ones governed by those in-scope organization policies. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") constraint (str): Required. The name of the constraint to analyze governed containers for. The analysis @@ -3746,8 +3744,8 @@ class AnalyzeOrgPolicyGovernedAssetsRequest(proto.Message): output assets will also be limited to the ones governed by those in-scope organization policies. - - organizations/{ORGANIZATION_NUMBER} (e.g., - "organizations/123456") + - organizations/{ORGANIZATION_NUMBER} (e.g., + "organizations/123456") constraint (str): Required. The name of the constraint to analyze governed assets for. The analysis only diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py index a62b5696f22c..d2097a07a4d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/types/assets.py @@ -523,15 +523,15 @@ class ResourceSearchResult(proto.Message): To search against the ``name``: - - Use a field query. Example: ``name:instance1`` - - Use a free text query. Example: ``instance1`` + - Use a field query. Example: ``name:instance1`` + - Use a free text query. Example: ``instance1`` asset_type (str): The type of this resource. Example: ``compute.googleapis.com/Disk``. To search against the ``asset_type``: - - Specify the ``asset_type`` field in your search request. + - Specify the ``asset_type`` field in your search request. project (str): The project that this resource belongs to, in the form of projects/{PROJECT_NUMBER}. This field is available when the @@ -539,10 +539,10 @@ class ResourceSearchResult(proto.Message): To search against ``project``: - - Use a field query. Example: ``project:12345`` - - Use a free text query. Example: ``12345`` - - Specify the ``scope`` field as this project in your - search request. + - Use a field query. Example: ``project:12345`` + - Use a free text query. Example: ``12345`` + - Specify the ``scope`` field as this project in your search + request. folders (MutableSequence[str]): The folder(s) that this resource belongs to, in the form of folders/{FOLDER_NUMBER}. This field is available when the @@ -550,10 +550,10 @@ class ResourceSearchResult(proto.Message): To search against ``folders``: - - Use a field query. Example: ``folders:(123 OR 456)`` - - Use a free text query. Example: ``123`` - - Specify the ``scope`` field as this folder in your search - request. + - Use a field query. Example: ``folders:(123 OR 456)`` + - Use a free text query. Example: ``123`` + - Specify the ``scope`` field as this folder in your search + request. organization (str): The organization that this resource belongs to, in the form of organizations/{ORGANIZATION_NUMBER}. This field is @@ -561,18 +561,18 @@ class ResourceSearchResult(proto.Message): To search against ``organization``: - - Use a field query. Example: ``organization:123`` - - Use a free text query. Example: ``123`` - - Specify the ``scope`` field as this organization in your - search request. + - Use a field query. Example: ``organization:123`` + - Use a free text query. Example: ``123`` + - Specify the ``scope`` field as this organization in your + search request. display_name (str): The display name of this resource. This field is available only when the resource's Protobuf contains it. To search against the ``display_name``: - - Use a field query. Example: ``displayName:"My Instance"`` - - Use a free text query. Example: ``"My Instance"`` + - Use a field query. Example: ``displayName:"My Instance"`` + - Use a free text query. Example: ``"My Instance"`` description (str): One or more paragraphs of text description of this resource. Maximum length could be up to 1M bytes. This field is @@ -580,9 +580,9 @@ class ResourceSearchResult(proto.Message): To search against the ``description``: - - Use a field query. Example: - ``description:"important instance"`` - - Use a free text query. Example: ``"important instance"`` + - Use a field query. Example: + ``description:"important instance"`` + - Use a free text query. Example: ``"important instance"`` location (str): Location can be ``global``, regional like ``us-east1``, or zonal like ``us-west1-b``. This field is available only when @@ -590,8 +590,8 @@ class ResourceSearchResult(proto.Message): To search against the ``location``: - - Use a field query. Example: ``location:us-west*`` - - Use a free text query. Example: ``us-west*`` + - Use a field query. Example: ``location:us-west*`` + - Use a free text query. Example: ``us-west*`` labels (MutableMapping[str, str]): Labels associated with this resource. See `Labelling and grouping Google Cloud @@ -601,15 +601,15 @@ class ResourceSearchResult(proto.Message): To search against the ``labels``: - - Use a field query: + - Use a field query: - - query on any label's key or value. Example: - ``labels:prod`` - - query by a given label. Example: ``labels.env:prod`` - - query by a given label's existence. Example: - ``labels.env:*`` + - query on any label's key or value. Example: + ``labels:prod`` + - query by a given label. Example: ``labels.env:prod`` + - query by a given label's existence. Example: + ``labels.env:*`` - - Use a free text query. Example: ``prod`` + - Use a free text query. Example: ``prod`` network_tags (MutableSequence[str]): Network tags associated with this resource. Like labels, network tags are a type of annotations used to group Google @@ -620,8 +620,8 @@ class ResourceSearchResult(proto.Message): To search against the ``network_tags``: - - Use a field query. Example: ``networkTags:internal`` - - Use a free text query. Example: ``internal`` + - Use a field query. Example: ``networkTags:internal`` + - Use a free text query. Example: ``internal`` kms_key (str): The Cloud KMS `CryptoKey `__ @@ -639,8 +639,8 @@ class ResourceSearchResult(proto.Message): To search against the ``kms_key``: - - Use a field query. Example: ``kmsKey:key`` - - Use a free text query. Example: ``key`` + - Use a field query. Example: ``kmsKey:key`` + - Use a free text query. Example: ``key`` kms_keys (MutableSequence[str]): The Cloud KMS `CryptoKey `__ @@ -651,8 +651,8 @@ class ResourceSearchResult(proto.Message): To search against the ``kms_keys``: - - Use a field query. Example: ``kmsKeys:key`` - - Use a free text query. Example: ``key`` + - Use a field query. Example: ``kmsKeys:key`` + - Use a free text query. Example: ``key`` create_time (google.protobuf.timestamp_pb2.Timestamp): The create timestamp of this resource, at which the resource was created. The granularity is in seconds. Timestamp.nanos @@ -661,14 +661,14 @@ class ResourceSearchResult(proto.Message): To search against ``create_time``: - - Use a field query. + - Use a field query. - - value in seconds since unix epoch. Example: - ``createTime > 1609459200`` - - value in date string. Example: - ``createTime > 2021-01-01`` - - value in date-time string (must be quoted). Example: - ``createTime > "2021-01-01T00:00:00"`` + - value in seconds since unix epoch. Example: + ``createTime > 1609459200`` + - value in date string. Example: + ``createTime > 2021-01-01`` + - value in date-time string (must be quoted). Example: + ``createTime > "2021-01-01T00:00:00"`` update_time (google.protobuf.timestamp_pb2.Timestamp): The last update timestamp of this resource, at which the resource was last modified or deleted. The granularity is in @@ -677,14 +677,14 @@ class ResourceSearchResult(proto.Message): To search against ``update_time``: - - Use a field query. + - Use a field query. - - value in seconds since unix epoch. Example: - ``updateTime < 1609459200`` - - value in date string. Example: - ``updateTime < 2021-01-01`` - - value in date-time string (must be quoted). Example: - ``updateTime < "2021-01-01T00:00:00"`` + - value in seconds since unix epoch. Example: + ``updateTime < 1609459200`` + - value in date string. Example: + ``updateTime < 2021-01-01`` + - value in date-time string (must be quoted). Example: + ``updateTime < "2021-01-01T00:00:00"`` state (str): The state of this resource. Different resources types have different state definitions that are mapped from various @@ -704,8 +704,8 @@ class ResourceSearchResult(proto.Message): To search against the ``state``: - - Use a field query. Example: ``state:RUNNING`` - - Use a free text query. Example: ``RUNNING`` + - Use a field query. Example: ``state:RUNNING`` + - Use a free text query. Example: ``RUNNING`` additional_attributes (google.protobuf.struct_pb2.Struct): The additional searchable attributes of this resource. The attributes may vary from one resource type to another. @@ -726,17 +726,17 @@ class ResourceSearchResult(proto.Message): To search against the ``additional_attributes``: - - Use a free text query to match the attributes values. - Example: to search - ``additional_attributes = { dnsName: "foobar" }``, you - can issue a query ``foobar``. + - Use a free text query to match the attributes values. + Example: to search + ``additional_attributes = { dnsName: "foobar" }``, you can + issue a query ``foobar``. parent_full_resource_name (str): The full resource name of this resource's parent, if it has one. To search against the ``parent_full_resource_name``: - - Use a field query. Example: - ``parentFullResourceName:"project-name"`` - - Use a free text query. Example: ``project-name`` + - Use a field query. Example: + ``parentFullResourceName:"project-name"`` + - Use a free text query. Example: ``project-name`` versioned_resources (MutableSequence[google.cloud.asset_v1.types.VersionedResource]): Versioned resource representations of this resource. This is repeated because there could be multiple versions of @@ -759,7 +759,7 @@ class ResourceSearchResult(proto.Message): relationships (MutableMapping[str, google.cloud.asset_v1.types.RelatedResources]): A map of related resources of this resource, keyed by the relationship type. A relationship type is in the format of - {SourceType}*{ACTION}*\ {DestType}. Example: + {SourceType}\ *{ACTION}*\ {DestType}. Example: ``DISK_TO_INSTANCE``, ``DISK_TO_NETWORK``, ``INSTANCE_TO_INSTANCEGROUP``. See `supported relationship types `__. @@ -768,52 +768,52 @@ class ResourceSearchResult(proto.Message): {ORG_ID}/{TAG_KEY_SHORT_NAME}. To search against the ``tagKeys``: - - Use a field query. Example: + - Use a field query. Example: - - ``tagKeys:"123456789/env*"`` - - ``tagKeys="123456789/env"`` - - ``tagKeys:"env"`` + - ``tagKeys:"123456789/env*"`` + - ``tagKeys="123456789/env"`` + - ``tagKeys:"env"`` - - Use a free text query. Example: + - Use a free text query. Example: - - ``env`` + - ``env`` tag_values (MutableSequence[str]): TagValue namespaced names, in the format of {ORG_ID}/{TAG_KEY_SHORT_NAME}/{TAG_VALUE_SHORT_NAME}. To search against the ``tagValues``: - - Use a field query. Example: + - Use a field query. Example: - - ``tagValues:"env"`` - - ``tagValues:"env/prod"`` - - ``tagValues:"123456789/env/prod*"`` - - ``tagValues="123456789/env/prod"`` + - ``tagValues:"env"`` + - ``tagValues:"env/prod"`` + - ``tagValues:"123456789/env/prod*"`` + - ``tagValues="123456789/env/prod"`` - - Use a free text query. Example: + - Use a free text query. Example: - - ``prod`` + - ``prod`` tag_value_ids (MutableSequence[str]): TagValue IDs, in the format of tagValues/{TAG_VALUE_ID}. To search against the ``tagValueIds``: - - Use a field query. Example: + - Use a field query. Example: - - ``tagValueIds:"456"`` - - ``tagValueIds="tagValues/456"`` + - ``tagValueIds:"456"`` + - ``tagValueIds="tagValues/456"`` - - Use a free text query. Example: + - Use a free text query. Example: - - ``456`` + - ``456`` parent_asset_type (str): The type of this resource's immediate parent, if there is one. To search against the ``parent_asset_type``: - - Use a field query. Example: - ``parentAssetType:"cloudresourcemanager.googleapis.com/Project"`` - - Use a free text query. Example: - ``cloudresourcemanager.googleapis.com/Project`` + - Use a field query. Example: + ``parentAssetType:"cloudresourcemanager.googleapis.com/Project"`` + - Use a free text query. Example: + ``cloudresourcemanager.googleapis.com/Project`` """ name: str = proto.Field( @@ -1045,15 +1045,14 @@ class IamPolicySearchResult(proto.Message): To search against the ``resource``: - - use a field query. Example: - ``resource:organizations/123`` + - use a field query. Example: ``resource:organizations/123`` asset_type (str): The type of the resource associated with this IAM policy. Example: ``compute.googleapis.com/Disk``. To search against the ``asset_type``: - - specify the ``asset_types`` field in your search request. + - specify the ``asset_types`` field in your search request. project (str): The project that the associated Google Cloud resource belongs to, in the form of projects/{PROJECT_NUMBER}. If an @@ -1064,8 +1063,8 @@ class IamPolicySearchResult(proto.Message): To search against the ``project``: - - specify the ``scope`` field as this project in your - search request. + - specify the ``scope`` field as this project in your search + request. folders (MutableSequence[str]): The folder(s) that the IAM policy belongs to, in the form of folders/{FOLDER_NUMBER}. This field is available when the @@ -1073,10 +1072,10 @@ class IamPolicySearchResult(proto.Message): To search against ``folders``: - - use a field query. Example: ``folders:(123 OR 456)`` - - use a free text query. Example: ``123`` - - specify the ``scope`` field as this folder in your search - request. + - use a field query. Example: ``folders:(123 OR 456)`` + - use a free text query. Example: ``123`` + - specify the ``scope`` field as this folder in your search + request. organization (str): The organization that the IAM policy belongs to, in the form of organizations/{ORGANIZATION_NUMBER}. This field is @@ -1084,10 +1083,10 @@ class IamPolicySearchResult(proto.Message): To search against ``organization``: - - use a field query. Example: ``organization:123`` - - use a free text query. Example: ``123`` - - specify the ``scope`` field as this organization in your - search request. + - use a field query. Example: ``organization:123`` + - use a free text query. Example: ``123`` + - specify the ``scope`` field as this organization in your + search request. policy (google.iam.v1.policy_pb2.Policy): The IAM policy directly set on the given resource. Note that the original IAM policy can contain multiple bindings. This @@ -1097,15 +1096,15 @@ class IamPolicySearchResult(proto.Message): To search against the ``policy`` bindings: - - use a field query: + - use a field query: - - query by the policy contained members. Example: - ``policy:amy@gmail.com`` - - query by the policy contained roles. Example: - ``policy:roles/compute.admin`` - - query by the policy contained roles' included - permissions. Example: - ``policy.role.permissions:compute.instances.create`` + - query by the policy contained members. Example: + ``policy:amy@gmail.com`` + - query by the policy contained roles. Example: + ``policy:roles/compute.admin`` + - query by the policy contained roles' included + permissions. Example: + ``policy.role.permissions:compute.instances.create`` explanation (google.cloud.asset_v1.types.IamPolicySearchResult.Explanation): Explanation about the IAM policy search result. It contains additional information to @@ -1191,12 +1190,12 @@ class IamPolicyAnalysisState(proto.Message): The Google standard error code that best describes the state. For example: - - OK means the analysis on this entity has been - successfully finished; - - PERMISSION_DENIED means an access denied error is - encountered; - - DEADLINE_EXCEEDED means the analysis on this entity - hasn't been started in time; + - OK means the analysis on this entity has been successfully + finished; + - PERMISSION_DENIED means an access denied error is + encountered; + - DEADLINE_EXCEEDED means the analysis on this entity hasn't + been started in time; cause (str): The human-readable description of the cause of failure. @@ -1346,13 +1345,13 @@ class Identity(proto.Message): binding `__, such as: - - user:foo@google.com - - group:group1@google.com - - serviceAccount:s1@prj1.iam.gserviceaccount.com - - projectOwner:some_project_id - - domain:google.com - - allUsers - - etc. + - user:foo@google.com + - group:group1@google.com + - serviceAccount:s1@prj1.iam.gserviceaccount.com + - projectOwner:some_project_id + - domain:google.com + - allUsers + - etc. analysis_state (google.cloud.asset_v1.types.IamPolicyAnalysisState): The analysis state of this identity. """ @@ -1402,27 +1401,27 @@ class AccessControlList(proto.Message): For example, assume we have the following cases in one IAM policy binding: - - Permission P1 and P2 apply to resource R1 and R2; - - Permission P3 applies to resource R2 and R3; + - Permission P1 and P2 apply to resource R1 and R2; + - Permission P3 applies to resource R2 and R3; This will result in the following access control lists: - - AccessControlList 1: [R1, R2], [P1, P2] - - AccessControlList 2: [R2, R3], [P3] + - AccessControlList 1: [R1, R2], [P1, P2] + - AccessControlList 2: [R2, R3], [P3] Attributes: resources (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Resource]): The resources that match one of the following conditions: - - The resource_selector, if it is specified in request; - - Otherwise, resources reachable from the policy attached - resource. + - The resource_selector, if it is specified in request; + - Otherwise, resources reachable from the policy attached + resource. accesses (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Access]): The accesses that match one of the following conditions: - - The access_selector, if it is specified in request; - - Otherwise, access specifiers reachable from the policy - binding's role. + - The access_selector, if it is specified in request; + - Otherwise, access specifiers reachable from the policy + binding's role. resource_edges (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): Resource edges of the graph starting from the policy attached resource to any descendant resources. The @@ -1467,9 +1466,9 @@ class IdentityList(proto.Message): Only the identities that match one of the following conditions will be presented: - - The identity_selector, if it is specified in request; - - Otherwise, identities reachable from the policy binding's - members. + - The identity_selector, if it is specified in request; + - Otherwise, identities reachable from the policy binding's + members. group_edges (MutableSequence[google.cloud.asset_v1.types.IamPolicyAnalysisResult.Edge]): Group identity edges of the graph starting from the binding's group members to any node of the diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index b972f969634b..09b53be781f5 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -719,7 +719,7 @@ async def sample_update_trigger(): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update - all fields, provide a field mask of "*". + all fields, provide a field mask of "\*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1375,7 +1375,7 @@ async def sample_update_channel(): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update - all fields, provide a field mask of "*". + all fields, provide a field mask of "\*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2476,7 +2476,7 @@ async def sample_update_google_channel_config(): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update - all fields, provide a field mask of "*". + all fields, provide a field mask of "\*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 961929b1783c..9e42d07b6777 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -1146,7 +1146,7 @@ def sample_update_trigger(): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update - all fields, provide a field mask of "*". + all fields, provide a field mask of "\*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1797,7 +1797,7 @@ def sample_update_channel(): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update - all fields, provide a field mask of "*". + all fields, provide a field mask of "\*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2889,7 +2889,7 @@ def sample_update_google_channel_config(): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update - all fields, provide a field mask of "*". + all fields, provide a field mask of "\*". This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py index b114b533e678..4f1d3c88bf78 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/types/eventarc.py @@ -207,7 +207,7 @@ class UpdateTriggerRequest(proto.Message): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update all fields, - provide a field mask of "*". + provide a field mask of "\*". allow_missing (bool): If set to true, and the trigger is not found, a new trigger will be created. In this situation, ``update_mask`` is @@ -414,7 +414,7 @@ class UpdateChannelRequest(proto.Message): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update all fields, - provide a field mask of "*". + provide a field mask of "\*". validate_only (bool): Required. If set, validate the request and preview the review, but do not post it. @@ -694,7 +694,7 @@ class UpdateGoogleChannelConfigRequest(proto.Message): The fields to be updated; only fields explicitly provided are updated. If no field mask is provided, all provided fields in the request are updated. To update all fields, - provide a field mask of "*". + provide a field mask of "\*". """ google_channel_config: gce_google_channel_config.GoogleChannelConfig = proto.Field( diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index d3d1edb157b6..b5d7d26514b5 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -2849,7 +2849,7 @@ async def sample_get_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -2987,7 +2987,7 @@ async def sample_create_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3139,7 +3139,7 @@ async def sample_update_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3367,7 +3367,7 @@ async def sample_get_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ @@ -3480,7 +3480,7 @@ async def sample_update_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 3cac25632814..b6f74fe61442 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -3238,7 +3238,7 @@ def sample_get_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3375,7 +3375,7 @@ def sample_create_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3526,7 +3526,7 @@ def sample_update_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3752,7 +3752,7 @@ def sample_get_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ @@ -3865,7 +3865,7 @@ def sample_update_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 774601dec829..f84e9f6ec5ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -293,10 +293,10 @@ async def sample_delete_log(): log_name (:class:`str`): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -414,10 +414,10 @@ async def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -600,17 +600,17 @@ async def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be @@ -848,10 +848,10 @@ async def sample_list_logs(): parent (:class:`str`): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 4bc6cd56e115..a2d4b1a336a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -640,10 +640,10 @@ def sample_delete_log(): log_name (str): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -760,10 +760,10 @@ def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -944,17 +944,17 @@ def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be @@ -1191,10 +1191,10 @@ def sample_list_logs(): parent (str): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py index c82bebc0ec4c..695393863f46 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/log_entry.py @@ -214,10 +214,10 @@ class LogEntry(proto.Message): Example values: - - ``000000000000004a`` - - ``7a2190356c3fc94b`` - - ``0000f00300090021`` - - ``d39223e101960076`` + - ``000000000000004a`` + - ``7a2190356c3fc94b`` + - ``0000f00300090021`` + - ``d39223e101960076`` trace_sampled (bool): Optional. The sampling decision of the trace associated with the log entry. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py index e86337ea863f..5b1dd80ceb61 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging.py @@ -51,10 +51,10 @@ class DeleteLogRequest(proto.Message): log_name (str): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -79,10 +79,10 @@ class WriteLogEntriesRequest(proto.Message): all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -225,17 +225,17 @@ class ListLogEntriesRequest(proto.Message): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be specified in a @@ -394,24 +394,24 @@ class ListLogsRequest(proto.Message): parent (str): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` resource_names (MutableSequence[str]): Optional. List of resource names to list logs for: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` To support legacy queries, it could also be: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` The resource name in the ``parent`` field is added to this list. @@ -483,17 +483,17 @@ class TailLogEntriesRequest(proto.Message): Required. Name of a parent resource from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): Optional. Only log entries that match the filter are returned. An empty filter matches all log entries in the diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py index 93233511d1b2..292f690774f7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/types/logging_config.py @@ -350,9 +350,9 @@ class LogView(proto.Message): Filters are restricted to be a logical AND of ==/!= of any of the following: - - originating project/folder/organization/billing account. - - resource type - - log id + - originating project/folder/organization/billing account. + - resource type + - log id For example: @@ -1299,14 +1299,14 @@ class UpdateSinkRequest(proto.Message): the updated sink depends on both the old and new values of this field: - - If the old and new values of this field are both false or - both true, then there is no change to the sink's - ``writer_identity``. - - If the old value is false and the new value is true, then - ``writer_identity`` is changed to a unique service - account. - - It is an error if the old value is true and the new value - is set to false or defaulted to false. + - If the old and new values of this field are both false or + both true, then there is no change to the sink's + ``writer_identity``. + - If the old value is false and the new value is true, then + ``writer_identity`` is changed to a unique service + account. + - It is an error if the old value is true and the new value + is set to false or defaulted to false. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask that specifies the fields in ``sink`` that need an update. A sink field will be overwritten if, diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py index 68db5574ea20..ef5a9151a1f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -2849,7 +2849,7 @@ async def sample_get_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -2987,7 +2987,7 @@ async def sample_create_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3139,7 +3139,7 @@ async def sample_update_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3367,7 +3367,7 @@ async def sample_get_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ @@ -3480,7 +3480,7 @@ async def sample_update_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index d25e9be1c544..696ac4be7cb4 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -3238,7 +3238,7 @@ def sample_get_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3375,7 +3375,7 @@ def sample_create_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3526,7 +3526,7 @@ def sample_update_exclusion(): chargeable logs. Note that exclusions on organization-level and folder-level sinks don't apply to child resources. Note also that you cannot modify - the \_Required sink or exclude logs from it. + the Required sink or exclude logs from it. """ # Create or coerce a protobuf request object. @@ -3752,7 +3752,7 @@ def sample_get_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ @@ -3865,7 +3865,7 @@ def sample_update_cmek_settings(): the Google Cloud organization. See [Enabling CMEK for Log - Router](\ https://cloud.google.com/logging/docs/routing/managed-encryption) + Router](https://cloud.google.com/logging/docs/routing/managed-encryption) for more information. """ diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 774601dec829..f84e9f6ec5ab 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -293,10 +293,10 @@ async def sample_delete_log(): log_name (:class:`str`): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -414,10 +414,10 @@ async def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -600,17 +600,17 @@ async def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be @@ -848,10 +848,10 @@ async def sample_list_logs(): parent (:class:`str`): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py index 4bc6cd56e115..a2d4b1a336a3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -640,10 +640,10 @@ def sample_delete_log(): log_name (str): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -760,10 +760,10 @@ def sample_write_log_entries(): to all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -944,17 +944,17 @@ def sample_list_log_entries(): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be @@ -1191,10 +1191,10 @@ def sample_list_logs(): parent (str): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py index c82bebc0ec4c..695393863f46 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/log_entry.py @@ -214,10 +214,10 @@ class LogEntry(proto.Message): Example values: - - ``000000000000004a`` - - ``7a2190356c3fc94b`` - - ``0000f00300090021`` - - ``d39223e101960076`` + - ``000000000000004a`` + - ``7a2190356c3fc94b`` + - ``0000f00300090021`` + - ``d39223e101960076`` trace_sampled (bool): Optional. The sampling decision of the trace associated with the log entry. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py index e86337ea863f..5b1dd80ceb61 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging.py @@ -51,10 +51,10 @@ class DeleteLogRequest(proto.Message): log_name (str): Required. The resource name of the log to delete: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example, ``"projects/my-project-id/logs/syslog"``, @@ -79,10 +79,10 @@ class WriteLogEntriesRequest(proto.Message): all log entries in ``entries`` that do not specify a value for ``log_name``: - - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` - - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` - - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` + - ``projects/[PROJECT_ID]/logs/[LOG_ID]`` + - ``organizations/[ORGANIZATION_ID]/logs/[LOG_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]`` + - ``folders/[FOLDER_ID]/logs/[LOG_ID]`` ``[LOG_ID]`` must be URL-encoded. For example: @@ -225,17 +225,17 @@ class ListLogEntriesRequest(proto.Message): Required. Names of one or more parent resources from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` Projects listed in the ``project_ids`` field are added to this list. A maximum of 100 resources may be specified in a @@ -394,24 +394,24 @@ class ListLogsRequest(proto.Message): parent (str): Required. The resource name to list logs for: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` resource_names (MutableSequence[str]): Optional. List of resource names to list logs for: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` To support legacy queries, it could also be: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` The resource name in the ``parent`` field is added to this list. @@ -483,17 +483,17 @@ class TailLogEntriesRequest(proto.Message): Required. Name of a parent resource from which to retrieve log entries: - - ``projects/[PROJECT_ID]`` - - ``organizations/[ORGANIZATION_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]`` - - ``folders/[FOLDER_ID]`` + - ``projects/[PROJECT_ID]`` + - ``organizations/[ORGANIZATION_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]`` + - ``folders/[FOLDER_ID]`` May alternatively be one or more views: - - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` - - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``projects/[PROJECT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``organizations/[ORGANIZATION_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``billingAccounts/[BILLING_ACCOUNT_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` + - ``folders/[FOLDER_ID]/locations/[LOCATION_ID]/buckets/[BUCKET_ID]/views/[VIEW_ID]`` filter (str): Optional. Only log entries that match the filter are returned. An empty filter matches all log entries in the diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py index 93233511d1b2..292f690774f7 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/types/logging_config.py @@ -350,9 +350,9 @@ class LogView(proto.Message): Filters are restricted to be a logical AND of ==/!= of any of the following: - - originating project/folder/organization/billing account. - - resource type - - log id + - originating project/folder/organization/billing account. + - resource type + - log id For example: @@ -1299,14 +1299,14 @@ class UpdateSinkRequest(proto.Message): the updated sink depends on both the old and new values of this field: - - If the old and new values of this field are both false or - both true, then there is no change to the sink's - ``writer_identity``. - - If the old value is false and the new value is true, then - ``writer_identity`` is changed to a unique service - account. - - It is an error if the old value is true and the new value - is set to false or defaulted to false. + - If the old and new values of this field are both false or + both true, then there is no change to the sink's + ``writer_identity``. + - If the old value is false and the new value is true, then + ``writer_identity`` is changed to a unique service + account. + - It is an error if the old value is true and the new value + is set to false or defaulted to false. update_mask (google.protobuf.field_mask_pb2.FieldMask): Optional. Field mask that specifies the fields in ``sink`` that need an update. A sink field will be overwritten if, diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 39f32df5f447..ea3e960a8e40 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -64,19 +64,19 @@ class CloudRedisAsyncClient: Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` """ _client: CloudRedisClient @@ -291,7 +291,7 @@ async def list_instances(self, The location should have the following format: - - ``projects/{project_id}/locations/{location_id}`` + - ``projects/{project_id}/locations/{location_id}`` If ``location_id`` is specified as ``-`` (wildcard), then all regions available to the project are queried, and the results @@ -702,12 +702,12 @@ async def sample_create_instance(): Required. The logical name of the Redis instance in the customer project with the following restrictions: - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the customer project / location + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -852,11 +852,11 @@ async def sample_update_instance(): repeated paths field may only include these fields from [Instance][google.cloud.redis.v1.Instance]: - - ``displayName`` - - ``labels`` - - ``memorySizeGb`` - - ``redisConfig`` - - ``replica_count`` + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index 3d9553229e7f..e02ecf33f133 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -115,19 +115,19 @@ class CloudRedisClient(metaclass=CloudRedisClientMeta): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` """ @staticmethod @@ -672,7 +672,7 @@ def list_instances(self, The location should have the following format: - - ``projects/{project_id}/locations/{location_id}`` + - ``projects/{project_id}/locations/{location_id}`` If ``location_id`` is specified as ``-`` (wildcard), then all regions available to the project are queried, and the results @@ -1080,12 +1080,12 @@ def sample_create_instance(): Required. The logical name of the Redis instance in the customer project with the following restrictions: - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the customer project / location + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -1229,11 +1229,11 @@ def sample_update_instance(): repeated paths field may only include these fields from [Instance][google.cloud.redis.v1.Instance]: - - ``displayName`` - - ``labels`` - - ``memorySizeGb`` - - ``redisConfig`` - - ``replica_count`` + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 6267d2c78aa5..9ca4b3c18cb7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -115,19 +115,19 @@ class CloudRedisGrpcTransport(CloudRedisTransport): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -358,7 +358,7 @@ def list_instances(self) -> Callable[ The location should have the following format: - - ``projects/{project_id}/locations/{location_id}`` + - ``projects/{project_id}/locations/{location_id}`` If ``location_id`` is specified as ``-`` (wildcard), then all regions available to the project are queried, and the results diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 3731518549ef..8c15a6dba158 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -119,19 +119,19 @@ class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -364,7 +364,7 @@ def list_instances(self) -> Callable[ The location should have the following format: - - ``projects/{project_id}/locations/{location_id}`` + - ``projects/{project_id}/locations/{location_id}`` If ``location_id`` is specified as ``-`` (wildcard), then all regions available to the project are queried, and the results diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 3f3e74eaad17..620805e12072 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -735,19 +735,19 @@ class CloudRedisRestTransport(_BaseCloudRedisRestTransport): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index c136b41f72eb..832be441c1f7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -746,19 +746,19 @@ class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py index d6af078cafb0..8e6a18eba64f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/types/cloud_redis.py @@ -126,10 +126,10 @@ class Instance(proto.Message): latest supported version will be used. Currently, the supported values are: - - ``REDIS_3_2`` for Redis 3.2 compatibility - - ``REDIS_4_0`` for Redis 4.0 compatibility (default) - - ``REDIS_5_0`` for Redis 5.0 compatibility - - ``REDIS_6_X`` for Redis 6.x compatibility + - ``REDIS_3_2`` for Redis 3.2 compatibility + - ``REDIS_4_0`` for Redis 4.0 compatibility (default) + - ``REDIS_5_0`` for Redis 5.0 compatibility + - ``REDIS_6_X`` for Redis 6.x compatibility reserved_ip_range (str): Optional. For DIRECT_PEERING mode, the CIDR range of internal addresses that are reserved for this instance. @@ -175,20 +175,20 @@ class Instance(proto.Message): Redis version 3.2 and newer: - - maxmemory-policy - - notify-keyspace-events + - maxmemory-policy + - notify-keyspace-events Redis version 4.0 and newer: - - activedefrag - - lfu-decay-time - - lfu-log-factor - - maxmemory-gb + - activedefrag + - lfu-decay-time + - lfu-log-factor + - maxmemory-gb Redis version 5.0 and newer: - - stream-node-max-bytes - - stream-node-max-entries + - stream-node-max-bytes + - stream-node-max-entries tier (google.cloud.redis_v1.types.Instance.Tier): Required. The service tier of the instance. memory_size_gb (int): @@ -945,12 +945,11 @@ class CreateInstanceRequest(proto.Message): Required. The logical name of the Redis instance in the customer project with the following restrictions: - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the customer project / location + - Must contain only lowercase letters, numbers, and hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location instance (google.cloud.redis_v1.types.Instance): Required. A Redis [Instance] resource """ @@ -981,11 +980,11 @@ class UpdateInstanceRequest(proto.Message): paths field may only include these fields from [Instance][google.cloud.redis.v1.Instance]: - - ``displayName`` - - ``labels`` - - ``memorySizeGb`` - - ``redisConfig`` - - ``replica_count`` + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` instance (google.cloud.redis_v1.types.Instance): Required. Update description. Only fields specified in update_mask are updated. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py index 725e60942957..6e826449cef2 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -64,19 +64,19 @@ class CloudRedisAsyncClient: Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` """ _client: CloudRedisClient @@ -291,7 +291,7 @@ async def list_instances(self, The location should have the following format: - - ``projects/{project_id}/locations/{location_id}`` + - ``projects/{project_id}/locations/{location_id}`` If ``location_id`` is specified as ``-`` (wildcard), then all regions available to the project are queried, and the results @@ -594,12 +594,12 @@ async def sample_create_instance(): Required. The logical name of the Redis instance in the customer project with the following restrictions: - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the customer project / location + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -744,11 +744,11 @@ async def sample_update_instance(): repeated paths field may only include these fields from [Instance][google.cloud.redis.v1.Instance]: - - ``displayName`` - - ``labels`` - - ``memorySizeGb`` - - ``redisConfig`` - - ``replica_count`` + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py index a8ff60e651bb..420273052888 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -115,19 +115,19 @@ class CloudRedisClient(metaclass=CloudRedisClientMeta): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` """ @staticmethod @@ -672,7 +672,7 @@ def list_instances(self, The location should have the following format: - - ``projects/{project_id}/locations/{location_id}`` + - ``projects/{project_id}/locations/{location_id}`` If ``location_id`` is specified as ``-`` (wildcard), then all regions available to the project are queried, and the results @@ -973,12 +973,12 @@ def sample_create_instance(): Required. The logical name of the Redis instance in the customer project with the following restrictions: - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the customer project / location + - Must contain only lowercase letters, numbers, and + hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location This corresponds to the ``instance_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -1122,11 +1122,11 @@ def sample_update_instance(): repeated paths field may only include these fields from [Instance][google.cloud.redis.v1.Instance]: - - ``displayName`` - - ``labels`` - - ``memorySizeGb`` - - ``redisConfig`` - - ``replica_count`` + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index fcaefca376cc..38227ed3e965 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -115,19 +115,19 @@ class CloudRedisGrpcTransport(CloudRedisTransport): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -358,7 +358,7 @@ def list_instances(self) -> Callable[ The location should have the following format: - - ``projects/{project_id}/locations/{location_id}`` + - ``projects/{project_id}/locations/{location_id}`` If ``location_id`` is specified as ``-`` (wildcard), then all regions available to the project are queried, and the results diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 23337db892de..1df810cc89e8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -119,19 +119,19 @@ class CloudRedisGrpcAsyncIOTransport(CloudRedisTransport): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -364,7 +364,7 @@ def list_instances(self) -> Callable[ The location should have the following format: - - ``projects/{project_id}/locations/{location_id}`` + - ``projects/{project_id}/locations/{location_id}`` If ``location_id`` is specified as ``-`` (wildcard), then all regions available to the project are queried, and the results diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 49531cd4cbc7..1061ad6afb86 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -471,19 +471,19 @@ class CloudRedisRestTransport(_BaseCloudRedisRestTransport): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 62b7b33c8f0b..fdb993eadacf 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -482,19 +482,19 @@ class AsyncCloudRedisRestTransport(_BaseCloudRedisRestTransport): Memorystore for Redis API and defines the following resource model for managing Redis instances: - - The service works with a collection of cloud projects, named: - ``/projects/*`` - - Each project has a collection of available locations, named: - ``/locations/*`` - - Each location has a collection of Redis instances, named: - ``/instances/*`` - - As such, Redis instances are resources of the form: - ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + - The service works with a collection of cloud projects, named: + ``/projects/*`` + - Each project has a collection of available locations, named: + ``/locations/*`` + - Each location has a collection of Redis instances, named: + ``/instances/*`` + - As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be referring to a GCP ``region``; for example: - - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + - ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py index 8e73ea0855bf..016aa43a64c9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/types/cloud_redis.py @@ -113,10 +113,10 @@ class Instance(proto.Message): latest supported version will be used. Currently, the supported values are: - - ``REDIS_3_2`` for Redis 3.2 compatibility - - ``REDIS_4_0`` for Redis 4.0 compatibility (default) - - ``REDIS_5_0`` for Redis 5.0 compatibility - - ``REDIS_6_X`` for Redis 6.x compatibility + - ``REDIS_3_2`` for Redis 3.2 compatibility + - ``REDIS_4_0`` for Redis 4.0 compatibility (default) + - ``REDIS_5_0`` for Redis 5.0 compatibility + - ``REDIS_6_X`` for Redis 6.x compatibility reserved_ip_range (str): Optional. For DIRECT_PEERING mode, the CIDR range of internal addresses that are reserved for this instance. @@ -162,20 +162,20 @@ class Instance(proto.Message): Redis version 3.2 and newer: - - maxmemory-policy - - notify-keyspace-events + - maxmemory-policy + - notify-keyspace-events Redis version 4.0 and newer: - - activedefrag - - lfu-decay-time - - lfu-log-factor - - maxmemory-gb + - activedefrag + - lfu-decay-time + - lfu-log-factor + - maxmemory-gb Redis version 5.0 and newer: - - stream-node-max-bytes - - stream-node-max-entries + - stream-node-max-bytes + - stream-node-max-entries tier (google.cloud.redis_v1.types.Instance.Tier): Required. The service tier of the instance. memory_size_gb (int): @@ -846,12 +846,11 @@ class CreateInstanceRequest(proto.Message): Required. The logical name of the Redis instance in the customer project with the following restrictions: - - Must contain only lowercase letters, numbers, and - hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the customer project / location + - Must contain only lowercase letters, numbers, and hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location instance (google.cloud.redis_v1.types.Instance): Required. A Redis [Instance] resource """ @@ -882,11 +881,11 @@ class UpdateInstanceRequest(proto.Message): paths field may only include these fields from [Instance][google.cloud.redis.v1.Instance]: - - ``displayName`` - - ``labels`` - - ``memorySizeGb`` - - ``redisConfig`` - - ``replica_count`` + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + - ``replica_count`` instance (google.cloud.redis_v1.types.Instance): Required. Update description. Only fields specified in update_mask are updated. From 842f4c82a1a91c578c8602a1aff8a5ca154c63f9 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 29 Aug 2025 20:13:20 -0400 Subject: [PATCH 1302/1339] chore: remove unused protobuf dependency in repositories.bzl (#2433) --- packages/gapic-generator/repositories.bzl | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/packages/gapic-generator/repositories.bzl b/packages/gapic-generator/repositories.bzl index 480ae74586ce..2a593a5a757e 100644 --- a/packages/gapic-generator/repositories.bzl +++ b/packages/gapic-generator/repositories.bzl @@ -9,17 +9,6 @@ filegroup( def gapic_generator_python(): - _protobuf_version = "3.21.12" - _protobuf_sha256 = "930c2c3b5ecc6c9c12615cf5ad93f1cd6e12d0aba862b572e076259970ac3a53" - _protobuf_version_in_link = "v{}".format(_protobuf_version) - _maybe( - http_archive, - name = "com_google_protobuf", - sha256 = _protobuf_sha256, - url = "https://github.com/protocolbuffers/protobuf/archive/refs/tags/{}.tar.gz".format(_protobuf_version_in_link), - strip_prefix = "protobuf-{}".format(_protobuf_version), - ) - _maybe( http_archive, name = "bazel_skylib", From af571ef0807b2cf42ea0ea627cfde15e5803668c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 2 Sep 2025 11:19:25 -0400 Subject: [PATCH 1303/1339] chore(main): release 1.26.2 (#2434) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index c7e10c962ca8..85172ea5bc0f 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.26.2](https://github.com/googleapis/gapic-generator-python/compare/v1.26.1...v1.26.2) (2025-08-30) + + +### Documentation + +* Minor formatting improvements ([#2432](https://github.com/googleapis/gapic-generator-python/issues/2432)) ([17d6db5](https://github.com/googleapis/gapic-generator-python/commit/17d6db5355b3d64c04dcdfe9b8433493b264c60c)) + ## [1.26.1](https://github.com/googleapis/gapic-generator-python/compare/v1.26.0...v1.26.1) (2025-08-28) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index acf0b267d9e5..683ad68595ac 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.26.1" +version = "1.26.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 5568d7a0dc1c693af36b716d15d7f91024861bd3 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 8 Sep 2025 18:29:43 +0200 Subject: [PATCH 1304/1339] chore(deps): update all dependencies (#2436) Co-authored-by: Anthonios Partheniou --- .../.github/workflows/tests.yaml | 30 +++++++++---------- packages/gapic-generator/requirements.txt | 18 +++++------ 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 4f73366add06..7ba9e493a987 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -23,7 +23,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 # Use python 3.10 for docs to match the version for the sphinx plugin # https://github.com/googleapis/synthtool/pull/1891 with: @@ -38,7 +38,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 # Use python 3.10 for docs to match the version for the sphinx plugin # https://github.com/googleapis/synthtool/pull/1891 with: @@ -58,7 +58,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "${{ matrix.python }}" cache: 'pip' @@ -81,7 +81,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "${{ matrix.python }}" cache: 'pip' @@ -127,7 +127,7 @@ jobs: sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - name: Set up Python "3.13" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" cache: 'pip' @@ -173,7 +173,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "${{ matrix.python }}" cache: 'pip' @@ -201,7 +201,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python "3.13" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" cache: 'pip' @@ -229,7 +229,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python "3.13" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" cache: 'pip' @@ -254,7 +254,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python "3.13" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" cache: 'pip' @@ -277,7 +277,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python }} cache: 'pip' @@ -302,7 +302,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python }} cache: 'pip' @@ -349,7 +349,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python 3.13 - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" cache: 'pip' @@ -368,7 +368,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python 3.13 - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" cache: 'pip' @@ -389,7 +389,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python 3.13 - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" cache: 'pip' @@ -410,7 +410,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Set up Python "3.13" - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.13" cache: 'pip' diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index e071bc5e9132..af18eeb5e242 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -112,9 +112,9 @@ backports-asyncio-runner==1.2.0 \ --hash=sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5 \ --hash=sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162 # via pytest-asyncio -cachetools==5.5.2 \ - --hash=sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4 \ - --hash=sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a +cachetools==6.2.0 \ + --hash=sha256:1c76a8960c0041fcc21097e357f882197c79da0dbff766e7317890a65d7d8ba6 \ + --hash=sha256:38b328c0889450f05f5e120f56ab68c8abaf424e1275522b138ffc93253f7e32 # via google-auth certifi==2025.8.3 \ --hash=sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407 \ @@ -201,9 +201,9 @@ charset-normalizer==3.4.3 \ --hash=sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c \ --hash=sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9 # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a +click==8.2.1 \ + --hash=sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202 \ + --hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b # via -r requirements.in exceptiongroup==1.3.0 \ --hash=sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10 \ @@ -801,9 +801,9 @@ pypandoc==1.15 \ --hash=sha256:4ededcc76c8770f27aaca6dff47724578428eca84212a31479403a9731fc2b16 \ --hash=sha256:ea25beebe712ae41d63f7410c08741a3cab0e420f6703f95bc9b3a749192ce13 # via -r requirements.in -pytest==8.4.1 \ - --hash=sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7 \ - --hash=sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c +pytest==8.4.2 \ + --hash=sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01 \ + --hash=sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79 # via pytest-asyncio pytest-asyncio==1.1.0 \ --hash=sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf \ From b2907915c69e78931161c22af035359d175a8510 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 18 Sep 2025 22:39:35 +0200 Subject: [PATCH 1305/1339] chore(deps): update all dependencies (#2437) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 154 +++++++++++----------- 1 file changed, 79 insertions(+), 75 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index af18eeb5e242..1cfecea4571d 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -409,68 +409,72 @@ jinja2==3.1.6 \ --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 # via -r requirements.in -libcst==1.8.2 \ - --hash=sha256:08a8c7d9922ca6eed24e2c13a3c552b3c186af8fc78e5d4820b58487d780ec19 \ - --hash=sha256:08e9dca4ab6f8551794ce7ec146f86def6a82da41750cbed2c07551345fa10d3 \ - --hash=sha256:0be639f5b2e1999a4b4a82a0f4633969f97336f052d0c131627983589af52f56 \ - --hash=sha256:0f23409add2aaebbb6d8e881babab43c2d979f051b8bd8aed5fe779ea180a4e8 \ - --hash=sha256:11ea148902e3e1688afa392087c728ac3a843e54a87d334d1464d2097d3debb7 \ - --hash=sha256:1ba85f9e6a7f37ef998168aa3fd28d263d7f83016bd306a4508a2394e5e793b4 \ - --hash=sha256:207575dec2dae722acf6ab39b4b361151c65f8f895fd37edf9d384f5541562e1 \ - --hash=sha256:22c9473a2cc53faabcc95a0ac6ca4e52d127017bf34ba9bc0f8e472e44f7b38e \ - --hash=sha256:2b5c57a3c1976c365678eb0730bcb140d40510990cb77df9a91bb5c41d587ba6 \ - --hash=sha256:2e264307ec49b2c72480422abafe80457f90b4e6e693b7ddf8a23d24b5c24001 \ - --hash=sha256:2e8c1dfa854e700fcf6cd79b2796aa37d55697a74646daf5ea47c7c764bac31c \ - --hash=sha256:36d5ab95f39f855521585b0e819dc2d4d1b2a4080bad04c2f3de1e387a5d2233 \ - --hash=sha256:3ece08ba778b6eeea74d9c705e9af2d1b4e915e9bc6de67ad173b962e575fcc0 \ - --hash=sha256:41613fe08e647213546c7c59a5a1fc5484666e7d4cab6e80260c612acbb20e8c \ - --hash=sha256:43ccaa6c54daa1749cec53710c70d47150965574d4c6d4c4f2e3f87b9bf9f591 \ - --hash=sha256:449f9ff8a5025dcd5c8d4ad28f6c291de5de89e4c044b0bda96b45bef8999b75 \ - --hash=sha256:460fcf3562f078781e1504983cb11909eb27a1d46eaa99e65c4b0fafdc298298 \ - --hash=sha256:4f14f5045766646ed9e8826b959c6d07194788babed1e0ba08c94ea4f39517e3 \ - --hash=sha256:51bbafdd847529e8a16d1965814ed17831af61452ee31943c414cb23451de926 \ - --hash=sha256:52a1067cf31d9e9e4be514b253bea6276f1531dd7de6ab0917df8ce5b468a820 \ - --hash=sha256:59e8f611c977206eba294c296c2d29a1c1b1b88206cb97cd0d4847c1a3d923e7 \ - --hash=sha256:5c513e64eff0f7bf2a908e2d987a98653eb33e1062ce2afd3a84af58159a24f9 \ - --hash=sha256:5efd1bf6ee5840d1b0b82ec8e0b9c64f182fa5a7c8aad680fbd918c4fa3826e0 \ - --hash=sha256:66e82cedba95a6176194a817be4232c720312f8be6d2c8f3847f3317d95a0c7f \ - --hash=sha256:6753e50904e05c27915933da41518ecd7a8ca4dd3602112ba44920c6e353a455 \ - --hash=sha256:67d9720d91f507c87b3e5f070627ad640a00bc6cfdf5635f8c6ee9f2964cf71c \ - --hash=sha256:688a03bac4dfb9afc5078ec01d53c21556381282bdf1a804dd0dbafb5056de2a \ - --hash=sha256:706d07106af91c343150be86caeae1ea3851b74aa0730fcbbf8cd089e817f818 \ - --hash=sha256:7fe762c4c390039b79b818cbc725d8663586b25351dc18a2704b0e357d69b924 \ - --hash=sha256:8310521f2ccb79b5c4345750d475b88afa37bad930ab5554735f85ad5e3add30 \ - --hash=sha256:8a81d816c2088d2055112af5ecd82fdfbe8ff277600e94255e2639b07de10234 \ - --hash=sha256:94b7c032b72566077614a02baab1929739fd0af0cc1d46deaba4408b870faef2 \ - --hash=sha256:96e2363e1f6e44bd7256bbbf3a53140743f821b5133046e6185491e0d9183447 \ - --hash=sha256:9c2bd4ac288a9cdb7ffc3229a9ce8027a66a3fd3f2ab9e13da60f5fbfe91f3b2 \ - --hash=sha256:a50618f4819a97ef897e055ac7aaf1cad5df84c206f33be35b0759d671574197 \ - --hash=sha256:a553d452004e44b841788f6faa7231a02157527ddecc89dbbe5b689b74822226 \ - --hash=sha256:a5d5519962ce7c72d81888fb0c09e58e308ba4c376e76bcd853b48151063d6a8 \ - --hash=sha256:a70e40ce7600e1b32e293bb9157e9de3b69170e2318ccb219102f1abb826c94a \ - --hash=sha256:ae22376633cfa3db21c4eed2870d1c36b5419289975a41a45f34a085b2d9e6ea \ - --hash=sha256:b0110140738be1287e3724080a101e7cec6ae708008b7650c9d8a1c1788ec03a \ - --hash=sha256:b5269b96367e65793a7714608f6d906418eb056d59eaac9bba980486aabddbed \ - --hash=sha256:b62aa11d6b74ed5545e58ac613d3f63095e5fd0254b3e0d1168fda991b9a6b41 \ - --hash=sha256:b68ea4a6018abfea1f68d50f74de7d399172684c264eb09809023e2c8696fc23 \ - --hash=sha256:b88e9104c456590ad0ef0e82851d4fc03e9aa9d621fa8fdd4cd0907152a825ae \ - --hash=sha256:bba7c2b5063e8ada5a5477f9fa0c01710645426b5a8628ec50d558542a0a292e \ - --hash=sha256:be821d874ce8b26cbadd7277fa251a9b37f6d2326f8b5682b6fc8966b50a3a59 \ - --hash=sha256:c1381ddbd1066d543e05d580c15beacf671e1469a0b2adb6dba58fec311f4eed \ - --hash=sha256:c34060ff2991707c710250463ae9f415ebb21653f2f5b013c61c9c376ff9b715 \ - --hash=sha256:d11992561de0ad29ec2800230fbdcbef9efaa02805d5c633a73ab3cf2ba51bf1 \ - --hash=sha256:d20e932ddd9a389da57b060c26e84a24118c96ff6fc5dcc7b784da24e823b694 \ - --hash=sha256:d2194ae959630aae4176a4b75bd320b3274c20bef2a5ca6b8d6fc96d3c608edf \ - --hash=sha256:d97c9fe13aacfbefded6861f5200dcb8e837da7391a9bdeb44ccb133705990af \ - --hash=sha256:da2d8b008aff72acd5a4a588491abdda1b446f17508e700f26df9be80d8442ae \ - --hash=sha256:dd4310ea8ddc49cc8872e083737cf806299b17f93159a1f354d59aa08993e876 \ - --hash=sha256:e5ba3ea570c8fb6fc44f71aa329edc7c668e2909311913123d0d7ab8c65fc357 \ - --hash=sha256:e9bb599c175dc34a4511f0e26d5b5374fbcc91ea338871701a519e95d52f3c28 \ - --hash=sha256:f5391d71bd7e9e6c73dcb3ee8d8c63b09efc14ce6e4dad31568d4838afc9aae0 \ - --hash=sha256:f54f5c4176d60e7cd6b0880e18fb3fa8501ae046069151721cab457c7c538a3d \ - --hash=sha256:f69582e24667715e3860d80d663f1caeb2398110077e23cc0a1e0066a851f5ab \ - --hash=sha256:f74b0bc7378ad5afcf25ac9d0367b4dbba50f6f6468faa41f5dfddcf8bf9c0f8 \ - --hash=sha256:fa3b807c2d2b34397c135d19ad6abb20c47a2ddb7bf65d90455f2040f7797e1e +libcst==1.8.4 \ + --hash=sha256:0352c7d662c89243e730a28edf41577f87e28649c18ee365dd373c5fbdab2434 \ + --hash=sha256:056733760ba5ac1fd4cd518cddd5a43b3adbe2e0f6c7ce02532a114f7cd5d85b \ + --hash=sha256:062e424042c36a102abd11d8e9e27ac6be68e1a934b0ecfc9fb8fea017240d2f \ + --hash=sha256:074a3b17e270237fb36d3b94d7492fb137cb74217674484ba25e015e8d3d8bdc \ + --hash=sha256:114343271f70a79e6d08bc395f5dfa150227341fab646cc0a58e80550e7659b7 \ + --hash=sha256:14bda1e4ea0b04d3926d41f6dafbfd311a951b75a60fe0d79bb5a8249c1cef5b \ + --hash=sha256:1d468514a21cf3444dc3f3a4b1effc6c05255c98cc79e02af394652d260139f0 \ + --hash=sha256:259737faf90552a0589d95393dcaa3d3028be03ab3ea87478d46a1a4f922dd91 \ + --hash=sha256:27eeb16edb7dc0711d67e28bb8c0288e4147210aeb2434f08c16ac5db6b559e5 \ + --hash=sha256:2b1e570ba816da408b5ee40ac479b34e56d995bf32dcca6f0ddb3d69b08e77de \ + --hash=sha256:2c6d8f7087e9eaf005efde573f3f36d1d40366160155c195a6c4230d4c8a5839 \ + --hash=sha256:2d71e7e5982776f78cca9102286bb0895ef6f7083f76c0c9bc5ba4e9e40aee38 \ + --hash=sha256:2e156760fc741bbf2fa68f4e3b15f019e924ea852f02276d0a53b7375cf70445 \ + --hash=sha256:2e24d11a1be0b1791f7bace9d406f5a70b8691ef77be377b606950803de4657d \ + --hash=sha256:2fcff2130824f2cb5f4fd9c4c74fb639c5f02bc4228654461f6dc6b1006f20c0 \ + --hash=sha256:33664117fcb2913fdbd7de07a009193b660a16e7af18f7c1b4449e428f3b0f95 \ + --hash=sha256:3ad7f0a32ddcdff00a3eddfd35cfd8485d9f357a32e4c67558476570199f808f \ + --hash=sha256:3de575f0b5b466f2e9656b963f5848103cc518c6f3581902c6f430b07864584f \ + --hash=sha256:3eeba4edb40b2291c2460fe8d7e43f47e5fcc33f186675db5d364395adca3401 \ + --hash=sha256:43cbb6b41bc2c4785136f59a66692287d527aeb022789c4af44ad6e85b7b2baa \ + --hash=sha256:4a718e5f6b398a07ca5d533e6593c1590d69fe65c539323281959733d6d541dd \ + --hash=sha256:4b1cbadd988fee59b25ea154708cfed99cfaf45f9685707be422ad736371a9fe \ + --hash=sha256:52c9376ba11ede5430e40aa205101dfc41202465103c6540f24591f898afb3d6 \ + --hash=sha256:5db0b484670aac7ea442213afaa9addb1de0d9540a34ad44d376bec12242bc3a \ + --hash=sha256:5f167bf83dce662c9b499f1ea078ec2f2fee138e80f7d7dbd59c89ed28dc935f \ + --hash=sha256:64cc34d74c9543b30ec3d7481dd644cb1bb3888076b486592d7fa0f22632f1c6 \ + --hash=sha256:65364c214251ed5720f3f6d0c4ef1338aac91ad4bbc5d30253eac21832b0943a \ + --hash=sha256:6840e4011b583e9b7a71c00e7ab4281aea7456877b3ea6ecedb68a39a000bc64 \ + --hash=sha256:69b672c1afac5fe00d689f585ba57ac5facc4632f39b977d4b3e4711571c76e2 \ + --hash=sha256:6cc8b7e33f6c4677e220dd7025e1e980da4d3f497b9b8ee0320e36dd54597f68 \ + --hash=sha256:71e12101ef2a6e05b7610badb2bfa597379289f1408e305a8d19faacdb872f47 \ + --hash=sha256:7832ee448fbdf18884a1f9af5fba1be6d5e98deb560514d92339fd6318aef651 \ + --hash=sha256:783f52b7c8d82046f0d93812f62a25eb82c3834f198e6cbfd5bb03ca68b593c8 \ + --hash=sha256:7fe97d432d95b6bcb1694a6d0fa7e07dde8fa687a637958126410ee2ced94b81 \ + --hash=sha256:846aad04bac624a42d182add526d019e417e6a2b8a4c0bf690d32f9e1f3075ff \ + --hash=sha256:870a49df8575c11ea4f5319d54750f95d2d06370a263bd42d924a9cf23cf0cbe \ + --hash=sha256:873dd4e8b896f7cb0e78118badda55ec1f42e9301a4a948cc438955ff3ae2257 \ + --hash=sha256:8e7baaa6f01b6b6ea4b28d60204fddc679a3cd56d312beee200bd5f8f9711f0b \ + --hash=sha256:8e8d5158f976a5ee140ad0d3391e1a1b84b2ce5da62f16e48feab4bc21b91967 \ + --hash=sha256:929798ca38ea76a5056f725221d66c6923e749caa9fa7f4cc86e914a3698493d \ + --hash=sha256:93c76ab41d736b66d6fb3df32cd33184eed17666d7dc3ce047cf7ccdfe80b5b1 \ + --hash=sha256:9a5cd7beef667e5de3c5fb0ec387dc19aeda5cd4606ff541d0e8613bb3ef3b23 \ + --hash=sha256:9be5b1b7d416900ff9bcdb4945692e6252fdcbd95514e98439f81568568c9e02 \ + --hash=sha256:a179c712f38acb85e81d8949e80e05a422c92dcf5a00d8f4976f7e547a9f0916 \ + --hash=sha256:a334dd11cdea34275df91c2ae9cc5933ec7e0ad5698264966708d637d110b627 \ + --hash=sha256:a4270123c988e130cec94bfe1b54d34784a40b34b2d5ac0507720c1272bd3209 \ + --hash=sha256:a65e3c409ef16ae369600d085d23a3897d4fccf4fdcc09294a402c513ac35906 \ + --hash=sha256:a90c80e4d89222e11c7a734bc1b7f930bc2aba7750ad149bde1b136f839ea788 \ + --hash=sha256:b376ef7fa30bef611d4fb32af1da0e767b801b00322028a874ab3a441686b6a9 \ + --hash=sha256:b69e94625702825309fd9e50760e77a5a60bd1e7a8e039862c8dd3011a6e1530 \ + --hash=sha256:c9c775bc473225a0ad8422150fd9cf18ed2eebd7040996772937ac558f294d6c \ + --hash=sha256:cb188ebd4114144e14f6beb5499e43bebd0ca3ce7f2beb20921d49138c67b814 \ + --hash=sha256:d011d731c2e673fbd9c84794418230a913ae3c98fc86f27814612b6b6d53d26b \ + --hash=sha256:d130f3e2d40c5f48cbbc804710ddf5b4db9dd7c0118f3b35f109164a555860d2 \ + --hash=sha256:d3d4111f971632e9ddf8191aeef4576595e18ef3fa7b3016bfe15a08fa8554df \ + --hash=sha256:e4c5055e255d12745c7cc60fb5fb31c0f82855864c15dc9ad33a44f829b92600 \ + --hash=sha256:e6f309c0f42e323c527d8c9007f583fd1668e45884208184a70644d916f27829 \ + --hash=sha256:ea74c56cb11a1fdca9f8ab258965adce23e049ef525fdcc5c254a093e3de25cb \ + --hash=sha256:eff724c17df10e059915000eaf59f4e79998b66a7d35681e934a9a48667df931 \ + --hash=sha256:f0f105d32c49baf712df2be360d496de67a2375bcf4e9707e643b7efc2f9a55a \ + --hash=sha256:f5bd0bcdd2a8da9dad47d36d71757d8ba87baf887ae6982e2cb8621846610c49 \ + --hash=sha256:fa870f34018c7241ee9227723cac0787599a2a8a2bfd53eacfbbe1ea1a272ae6 \ + --hash=sha256:fbadca1bc31f696875c955080c407a40b2d1aa7f79ca174a65dcb0542a57db6c \ + --hash=sha256:fceb17616f1afe528c88243e3e7f78f84f0cc287463f04f3c1243e20a469e869 \ + --hash=sha256:fedfd33e5dda2200d582554e6476626d4706aa1fa2794bfb271879f8edff89b9 # via -r requirements.in markupsafe==3.0.2 \ --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ @@ -767,16 +771,16 @@ proto-plus==1.26.1 \ # via # -r requirements.in # google-api-core -protobuf==6.32.0 \ - --hash=sha256:15eba1b86f193a407607112ceb9ea0ba9569aed24f93333fe9a497cf2fda37d3 \ - --hash=sha256:501fe6372fd1c8ea2a30b4d9be8f87955a64d6be9c88a973996cef5ef6f0abf1 \ - --hash=sha256:75a2aab2bd1aeb1f5dc7c5f33bcb11d82ea8c055c9becbb41c26a8c43fd7092c \ - --hash=sha256:7db8ed09024f115ac877a1427557b838705359f047b2ff2f2b2364892d19dacb \ - --hash=sha256:84f9e3c1ff6fb0308dbacb0950d8aa90694b0d0ee68e75719cb044b7078fe741 \ - --hash=sha256:a81439049127067fc49ec1d36e25c6ee1d1a2b7be930675f919258d03c04e7d2 \ - --hash=sha256:a8bdbb2f009cfc22a36d031f22a625a38b615b5e19e558a7b756b3279723e68e \ - --hash=sha256:ba377e5b67b908c8f3072a57b63e2c6a4cbd18aea4ed98d2584350dbf46f2783 \ - --hash=sha256:d52691e5bee6c860fff9a1c86ad26a13afbeb4b168cd4445c922b7e2cf85aaf0 +protobuf==6.32.1 \ + --hash=sha256:2601b779fc7d32a866c6b4404f9d42a3f67c5b9f3f15b4db3cccabe06b95c346 \ + --hash=sha256:2f5b80a49e1eb7b86d85fcd23fe92df154b9730a725c3b38c4e43b9d77018bf4 \ + --hash=sha256:68ff170bac18c8178f130d1ccb94700cf72852298e016a2443bdb9502279e5f1 \ + --hash=sha256:a8a32a84bc9f2aad712041b8b366190f71dde248926da517bde9e832e4412085 \ + --hash=sha256:b00a7d8c25fa471f16bc8153d0e53d6c9e827f0953f3c09aaa4331c718cae5e1 \ + --hash=sha256:b1864818300c297265c83a4982fd3169f97122c299f56a56e2445c3698d34710 \ + --hash=sha256:d0975d0b2f3e6957111aa3935d08a0eb7e006b1505d825f862a1fffc8348e122 \ + --hash=sha256:d8c7e6eb619ffdf105ee4ab76af5a68b60a9d0f66da3ea12d1640e6d8dab7281 \ + --hash=sha256:ee2469e4a021474ab9baafea6cd070e5bf27c7d29433504ddea1a4ee5850f68d # via # -r requirements.in # google-api-core @@ -805,9 +809,9 @@ pytest==8.4.2 \ --hash=sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01 \ --hash=sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79 # via pytest-asyncio -pytest-asyncio==1.1.0 \ - --hash=sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf \ - --hash=sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea +pytest-asyncio==1.2.0 \ + --hash=sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99 \ + --hash=sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57 # via -r requirements.in pyyaml==6.0.2 \ --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ From e6f2616325ae7320df959d57c0d9f073609c7d30 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 24 Sep 2025 11:00:13 -0400 Subject: [PATCH 1306/1339] test: fix async showcase test (#2441) --- .../gapic-generator/tests/system/conftest.py | 39 ++++++++++++++++--- .../system/test_grpc_interceptor_streams.py | 8 +++- .../tests/system/test_streams.py | 3 ++ 3 files changed, 42 insertions(+), 8 deletions(-) diff --git a/packages/gapic-generator/tests/system/conftest.py b/packages/gapic-generator/tests/system/conftest.py index a7967d4f5d08..180e48b8d59a 100644 --- a/packages/gapic-generator/tests/system/conftest.py +++ b/packages/gapic-generator/tests/system/conftest.py @@ -17,6 +17,7 @@ from unittest import mock import os import pytest +import pytest_asyncio from typing import Sequence, Tuple @@ -60,6 +61,8 @@ except: HAS_ASYNC_REST_IDENTITY_TRANSPORT = False + _GRPC_VERSION = grpc.__version__ + # TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. # See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. def async_anonymous_credentials(): @@ -79,7 +82,7 @@ def async_anonymous_credentials(): def event_loop(): return asyncio.get_event_loop() - @pytest.fixture(params=["grpc_asyncio", "rest_asyncio"]) + @pytest_asyncio.fixture(params=["grpc_asyncio", "rest_asyncio"]) def async_echo(use_mtls, request, event_loop): transport = request.param if transport == "rest_asyncio" and not HAS_ASYNC_REST_ECHO_TRANSPORT: @@ -94,7 +97,7 @@ def async_echo(use_mtls, request, event_loop): credentials=async_anonymous_credentials(), ) - @pytest.fixture(params=["grpc_asyncio", "rest_asyncio"]) + @pytest_asyncio.fixture(params=["grpc_asyncio", "rest_asyncio"]) def async_identity(use_mtls, request, event_loop): transport = request.param if transport == "rest_asyncio" and not HAS_ASYNC_REST_IDENTITY_TRANSPORT: @@ -310,9 +313,18 @@ def __init__(self, key, value): def _add_request_metadata(self, client_call_details): if client_call_details.metadata is not None: + # https://grpc.github.io/grpc/python/glossary.html#term-metadata. + # For sync, `ClientCallDetails.metadata` is a list. + # Whereas for async, `ClientCallDetails.metadata` is a mapping. + # https://grpc.github.io/grpc/python/grpc_asyncio.html#grpc.aio.Metadata client_call_details.metadata.append((self._key, self._value)) self.request_metadata = client_call_details.metadata + def _read_response_metadata_stream(self): + # Access the metadata via the original stream object + if hasattr(self, "_original_stream"): + self.response_metadata = self._original_stream.trailing_metadata() + def intercept_unary_unary(self, continuation, client_call_details, request): self._add_request_metadata(client_call_details) response = continuation(client_call_details, request) @@ -323,6 +335,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): def intercept_unary_stream(self, continuation, client_call_details, request): self._add_request_metadata(client_call_details) response_it = continuation(client_call_details, request) + self._original_stream = response_it return response_it def intercept_stream_unary( @@ -337,6 +350,7 @@ def intercept_stream_stream( ): self._add_request_metadata(client_call_details) response_it = continuation(client_call_details, request_iterator) + self._original_stream = response_it return response_it @@ -354,8 +368,21 @@ def __init__(self, key, value): async def _add_request_metadata(self, client_call_details): if client_call_details.metadata is not None: - client_call_details.metadata.append((self._key, self._value)) - self.request_metadata = client_call_details.metadata + # As of gRPC 1.75.0 and newer, + # https://grpc.github.io/grpc/python/grpc_asyncio.html#grpc.aio.Metadata + # Note that for async, `ClientCallDetails.metadata` is a mapping. + # Whereas for sync, `ClientCallDetails.metadata` is a list. + # https://grpc.github.io/grpc/python/glossary.html#term-metadata. + # Prior to gRPC 1.75.0, `ClientCallDetails.metadata` is a list + # for both sync and async. + grpc_major, grpc_minor = [ + int(part) for part in _GRPC_VERSION.split(".")[0:2] + ] + if grpc_major == 1 and grpc_minor < 75: + client_call_details.metadata.append((self._key, self._value)) + else: + client_call_details.metadata[self._key] = self._value + self.request_metadata = list(client_call_details.metadata) async def intercept_unary_unary(self, continuation, client_call_details, request): await self._add_request_metadata(client_call_details) @@ -407,8 +434,8 @@ def intercepted_echo_grpc(use_mtls): return EchoClient(transport=transport), interceptor -@pytest.fixture -def intercepted_echo_grpc_async(): +@pytest_asyncio.fixture +async def intercepted_echo_grpc_async(): # The interceptor adds 'showcase-trailer' client metadata. Showcase server # echoes any metadata with key 'showcase-trailer', so the same metadata # should appear as trailing metadata in the response. diff --git a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py index 50c5bee31d30..dca865a571e3 100644 --- a/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py +++ b/packages/gapic-generator/tests/system/test_grpc_interceptor_streams.py @@ -37,8 +37,10 @@ def test_unary_stream(intercepted_echo_grpc): response_metadata = [ (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] + assert intercepted_metadata[0] in interceptor.request_metadata assert intercepted_metadata[0] in response_metadata - interceptor.response_metadata = response_metadata + interceptor._read_response_metadata_stream() + assert intercepted_metadata[0] in interceptor.response_metadata def test_stream_stream(intercepted_echo_grpc): @@ -54,5 +56,7 @@ def test_stream_stream(intercepted_echo_grpc): response_metadata = [ (metadata.key, metadata.value) for metadata in responses.trailing_metadata() ] + assert intercepted_metadata[0] in interceptor.request_metadata assert intercepted_metadata[0] in response_metadata - interceptor.response_metadata = response_metadata + interceptor._read_response_metadata_stream() + assert intercepted_metadata[0] in interceptor.response_metadata diff --git a/packages/gapic-generator/tests/system/test_streams.py b/packages/gapic-generator/tests/system/test_streams.py index 38a2e47745fe..e7fbc1c94bbc 100644 --- a/packages/gapic-generator/tests/system/test_streams.py +++ b/packages/gapic-generator/tests/system/test_streams.py @@ -19,6 +19,9 @@ from google import showcase +# `_METADATA` will be sent as part of the request, and the +# showcase server will echo it (since it has key 'showcase-trailer') as trailing +# metadata. _METADATA = (("showcase-trailer", "hello world"),) From ca106d26cf5131f02f3a0eee52e3fc754eaf14f2 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 24 Sep 2025 19:12:22 +0200 Subject: [PATCH 1307/1339] chore(deps): update all dependencies (#2442) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 110 +++++++++++----------- 1 file changed, 55 insertions(+), 55 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 1cfecea4571d..aa753841aa5f 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -201,9 +201,9 @@ charset-normalizer==3.4.3 \ --hash=sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c \ --hash=sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9 # via requests -click==8.2.1 \ - --hash=sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202 \ - --hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b +click==8.3.0 \ + --hash=sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc \ + --hash=sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4 # via -r requirements.in exceptiongroup==1.3.0 \ --hash=sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10 \ @@ -336,58 +336,58 @@ grpc-google-iam-v1==0.14.2 \ --hash=sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351 \ --hash=sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20 # via -r requirements.in -grpcio==1.74.0 \ - --hash=sha256:0f87bddd6e27fc776aacf7ebfec367b6d49cad0455123951e4488ea99d9b9b8f \ - --hash=sha256:136b53c91ac1d02c8c24201bfdeb56f8b3ac3278668cbb8e0ba49c88069e1bdc \ - --hash=sha256:1733969040989f7acc3d94c22f55b4a9501a30f6aaacdbccfaba0a3ffb255ab7 \ - --hash=sha256:176d60a5168d7948539def20b2a3adcce67d72454d9ae05969a2e73f3a0feee7 \ - --hash=sha256:1a2b06afe2e50ebfd46247ac3ba60cac523f54ec7792ae9ba6073c12daf26f0a \ - --hash=sha256:1bf949792cee20d2078323a9b02bacbbae002b9e3b9e2433f2741c15bdeba1c4 \ - --hash=sha256:22b834cef33429ca6cc28303c9c327ba9a3fafecbf62fae17e9a7b7163cc43ac \ - --hash=sha256:2918948864fec2a11721d91568effffbe0a02b23ecd57f281391d986847982f6 \ - --hash=sha256:2bc2d7d8d184e2362b53905cb1708c84cb16354771c04b490485fa07ce3a1d89 \ - --hash=sha256:2f609a39f62a6f6f05c7512746798282546358a37ea93c1fcbadf8b2fed162e3 \ - --hash=sha256:3601274bc0523f6dc07666c0e01682c94472402ac2fd1226fd96e079863bfa49 \ - --hash=sha256:3b03d8f2a07f0fea8c8f74deb59f8352b770e3900d143b3d1475effcb08eec20 \ - --hash=sha256:3d14e3c4d65e19d8430a4e28ceb71ace4728776fd6c3ce34016947474479683f \ - --hash=sha256:42f8fee287427b94be63d916c90399ed310ed10aadbf9e2e5538b3e497d269bc \ - --hash=sha256:4bc5fca10aaf74779081e16c2bcc3d5ec643ffd528d9e7b1c9039000ead73bae \ - --hash=sha256:4e4181bfc24413d1e3a37a0b7889bea68d973d4b45dd2bc68bb766c140718f82 \ - --hash=sha256:55b453812fa7c7ce2f5c88be3018fb4a490519b6ce80788d5913f3f9d7da8c7b \ - --hash=sha256:566b9395b90cc3d0d0c6404bc8572c7c18786ede549cdb540ae27b58afe0fb91 \ - --hash=sha256:5f251c355167b2360537cf17bea2cf0197995e551ab9da6a0a59b3da5e8704f9 \ - --hash=sha256:60d2d48b0580e70d2e1954d0d19fa3c2e60dd7cbed826aca104fff518310d1c5 \ - --hash=sha256:64229c1e9cea079420527fa8ac45d80fc1e8d3f94deaa35643c381fa8d98f362 \ - --hash=sha256:655726919b75ab3c34cdad39da5c530ac6fa32696fb23119e36b64adcfca174a \ - --hash=sha256:662456c4513e298db6d7bd9c3b8df6f75f8752f0ba01fb653e252ed4a59b5a5d \ - --hash=sha256:68c8ebcca945efff9d86d8d6d7bfb0841cf0071024417e2d7f45c5e46b5b08eb \ - --hash=sha256:69e1a8180868a2576f02356565f16635b99088da7df3d45aaa7e24e73a054e31 \ - --hash=sha256:6bab67d15ad617aff094c382c882e0177637da73cbc5532d52c07b4ee887a87b \ - --hash=sha256:7d95d71ff35291bab3f1c52f52f474c632db26ea12700c2ff0ea0532cb0b5854 \ - --hash=sha256:80d1f4fbb35b0742d3e3d3bb654b7381cd5f015f8497279a1e9c21ba623e01b1 \ - --hash=sha256:834988b6c34515545b3edd13e902c1acdd9f2465d386ea5143fb558f153a7176 \ - --hash=sha256:8533e6e9c5bd630ca98062e3a1326249e6ada07d05acf191a77bc33f8948f3d8 \ - --hash=sha256:85bd5cdf4ed7b2d6438871adf6afff9af7096486fcf51818a81b77ef4dd30907 \ - --hash=sha256:86ad489db097141a907c559988c29718719aa3e13370d40e20506f11b4de0d11 \ - --hash=sha256:885912559974df35d92219e2dc98f51a16a48395f37b92865ad45186f294096c \ - --hash=sha256:8efe72fde5500f47aca1ef59495cb59c885afe04ac89dd11d810f2de87d935d4 \ - --hash=sha256:8f7b5882fb50632ab1e48cb3122d6df55b9afabc265582808036b6e51b9fd6b7 \ - --hash=sha256:9e7c4389771855a92934b2846bd807fc25a3dfa820fd912fe6bd8136026b2707 \ - --hash=sha256:9e912d3c993a29df6c627459af58975b2e5c897d93287939b9d5065f000249b5 \ - --hash=sha256:a8f0302f9ac4e9923f98d8e243939a6fb627cd048f5cd38595c97e38020dffce \ - --hash=sha256:b6a73b2ba83e663b2480a90b82fdae6a7aa6427f62bf43b29912c0cfd1aa2bfa \ - --hash=sha256:c14e803037e572c177ba54a3e090d6eb12efd795d49327c5ee2b3bddb836bf01 \ - --hash=sha256:c3d7bd6e3929fd2ea7fbc3f562e4987229ead70c9ae5f01501a46701e08f1ad9 \ - --hash=sha256:c98e0b7434a7fa4e3e63f250456eaef52499fba5ae661c58cc5b5477d11e7182 \ - --hash=sha256:cce634b10aeab37010449124814b05a62fb5f18928ca878f1bf4750d1f0c815b \ - --hash=sha256:e154d230dc1bbbd78ad2fdc3039fa50ad7ffcf438e4eb2fa30bce223a70c7486 \ - --hash=sha256:e1ea6176d7dfd5b941ea01c2ec34de9531ba494d541fe2057c904e601879f249 \ - --hash=sha256:e759f9e8bc908aaae0412642afe5416c9f983a80499448fcc7fab8692ae044c3 \ - --hash=sha256:e8978003816c7b9eabe217f88c78bc26adc8f9304bf6a594b02e5a49b2ef9c11 \ - --hash=sha256:ecde9ab49f58433abe02f9ed076c7b5be839cf0153883a6d23995937a82392fa \ - --hash=sha256:f6ec94f0e50eb8fa1744a731088b966427575e40c2944a980049798b127a687e \ - --hash=sha256:fd3c71aeee838299c5887230b8a1822795325ddfea635edd82954c1eaa831e24 \ - --hash=sha256:fe0f540750a13fd8e5da4b3eaba91a785eea8dca5ccd2bc2ffe978caa403090e +grpcio==1.75.0 \ + --hash=sha256:050760fd29c8508844a720f06c5827bb00de8f5e02f58587eb21a4444ad706e5 \ + --hash=sha256:06d22e1d8645e37bc110f4c589cb22c283fd3de76523065f821d6e81de33f5d4 \ + --hash=sha256:0aa795198b28807d28570c0a5f07bb04d5facca7d3f27affa6ae247bbd7f312a \ + --hash=sha256:0b85f4ebe6b56d2a512201bb0e5f192c273850d349b0a74ac889ab5d38959d16 \ + --hash=sha256:0c40f368541945bb664857ecd7400acb901053a1abbcf9f7896361b2cfa66798 \ + --hash=sha256:0c91d5b16eff3cbbe76b7a1eaaf3d91e7a954501e9d4f915554f87c470475c3d \ + --hash=sha256:0fcb77f2d718c1e58cc04ef6d3b51e0fa3b26cf926446e86c7eba105727b6cd4 \ + --hash=sha256:153c5a7655022c3626ad70be3d4c2974cb0967f3670ee49ece8b45b7a139665f \ + --hash=sha256:1bb78d052948d8272c820bb928753f16a614bb2c42fbf56ad56636991b427518 \ + --hash=sha256:1ec2937fd92b5b4598cbe65f7e57d66039f82b9e2b7f7a5f9149374057dde77d \ + --hash=sha256:1ec9cbaec18d9597c718b1ed452e61748ac0b36ba350d558f9ded1a94cc15ec7 \ + --hash=sha256:222b0851e20c04900c63f60153503e918b08a5a0fad8198401c0b1be13c6815b \ + --hash=sha256:266fa6209b68a537b2728bb2552f970e7e78c77fe43c6e9cbbe1f476e9e5c35f \ + --hash=sha256:2e8e752ab5cc0a9c5b949808c000ca7586223be4f877b729f034b912364c3964 \ + --hash=sha256:352dbdf25495eef584c8de809db280582093bc3961d95a9d78f0dfb7274023a2 \ + --hash=sha256:36764a4ad9dc1eb891042fab51e8cdf7cc014ad82cee807c10796fb708455041 \ + --hash=sha256:38d665f44b980acdbb2f0e1abf67605ba1899f4d2443908df9ec8a6f26d2ed88 \ + --hash=sha256:3a6788b30aa8e6f207c417874effe3f79c2aa154e91e78e477c4825e8b431ce0 \ + --hash=sha256:437eeb16091d31498585d73b133b825dc80a8db43311e332c08facf820d36894 \ + --hash=sha256:494dcbade5606128cb9f530ce00331a90ecf5e7c5b243d373aebdb18e503c346 \ + --hash=sha256:50a6e43a9adc6938e2a16c9d9f8a2da9dd557ddd9284b73b07bd03d0e098d1e9 \ + --hash=sha256:53067c590ac3638ad0c04272f2a5e7e32a99fec8824c31b73bc3ef93160511fa \ + --hash=sha256:55a2d5ae79cd0f68783fb6ec95509be23746e3c239290b2ee69c69a38daa961a \ + --hash=sha256:55dfb9122973cc69520b23d39867726722cafb32e541435707dc10249a1bdbc6 \ + --hash=sha256:585147859ff4603798e92605db28f4a97c821c69908e7754c44771c27b239bbd \ + --hash=sha256:597340a41ad4b619aaa5c9b94f7e6ba4067885386342ab0af039eda945c255cd \ + --hash=sha256:678b649171f229fb16bda1a2473e820330aa3002500c4f9fd3a74b786578e90f \ + --hash=sha256:68c95b1c1e3bf96ceadf98226e9dfe2bc92155ce352fa0ee32a1603040e61856 \ + --hash=sha256:6b365f37a9c9543a9e91c6b4103d68d38d5bcb9965b11d5092b3c157bd6a5ee7 \ + --hash=sha256:725e67c010f63ef17fc052b261004942763c0b18dcd84841e6578ddacf1f9d10 \ + --hash=sha256:78dcc025a144319b66df6d088bd0eda69e1719eb6ac6127884a36188f336df19 \ + --hash=sha256:7a9337ac4ce61c388e02019d27fa837496c4b7837cbbcec71b05934337e51531 \ + --hash=sha256:7ee5ee42bfae8238b66a275f9ebcf6f295724375f2fa6f3b52188008b6380faf \ + --hash=sha256:7f89d6d0cd43170a80ebb4605cad54c7d462d21dc054f47688912e8bf08164af \ + --hash=sha256:851194eec47755101962da423f575ea223c9dd7f487828fe5693920e8745227e \ + --hash=sha256:9146e40378f551eed66c887332afc807fcce593c43c698e21266a4227d4e20d2 \ + --hash=sha256:91fbfc43f605c5ee015c9056d580a70dd35df78a7bad97e05426795ceacdb59f \ + --hash=sha256:9880c323595d851292785966cadb6c708100b34b163cab114e3933f5773cba2d \ + --hash=sha256:9dc4a02796394dd04de0b9673cb79a78901b90bb16bf99ed8cb528c61ed9372e \ + --hash=sha256:b989e8b09489478c2d19fecc744a298930f40d8b27c3638afbfe84d22f36ce4e \ + --hash=sha256:bb58e38a50baed9b21492c4b3f3263462e4e37270b7ea152fc10124b4bd1c318 \ + --hash=sha256:c2c39984e846bd5da45c5f7bcea8fafbe47c98e1ff2b6f40e57921b0c23a52d0 \ + --hash=sha256:c8cfc780b7a15e06253aae5f228e1e84c0d3c4daa90faf5bc26b751174da4bf9 \ + --hash=sha256:ca123db0813eef80625a4242a0c37563cb30a3edddebe5ee65373854cf187215 \ + --hash=sha256:cb6c5b075c2d092f81138646a755f0dad94e4622300ebef089f94e6308155d82 \ + --hash=sha256:dce15597ca11913b78e1203c042d5723e3ea7f59e7095a1abd0621be0e05b895 \ + --hash=sha256:eafbe3563f9cb378370a3fa87ef4870539cf158124721f3abee9f11cd8162460 \ + --hash=sha256:ee16e232e3d0974750ab5f4da0ab92b59d6473872690b5e40dcec9a22927f22e \ + --hash=sha256:fa35ccd9501ffdd82b861809cbfc4b5b13f4b4c5dc3434d2d9170b9ed38a9054 \ + --hash=sha256:fb64dd62face3d687a7b56cd881e2ea39417af80f75e8b36f0f81dfd93071651 \ + --hash=sha256:ffc33e67cab6141c54e75d85acd5dec616c5095a957ff997b4330a6395aa9b51 # via # googleapis-common-protos # grpc-google-iam-v1 From 75735ce196cc0dd18de8b4011eb4eadbd730ddf0 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 24 Sep 2025 13:18:40 -0400 Subject: [PATCH 1308/1339] feat: add gapic-version generator option (#2440) Co-authored-by: Owl Bot --- .../gapic-generator/gapic/samplegen_utils/snippet_index.py | 4 +--- packages/gapic-generator/gapic/schema/api.py | 7 ++++++- .../gapic/templates/%namespace/%name/gapic_version.py.j2 | 2 +- .../%namespace/%name_%version/gapic_version.py.j2 | 2 +- packages/gapic-generator/gapic/utils/options.py | 3 +++ packages/gapic-generator/tests/integration/BUILD.bazel | 1 + .../goldens/asset/google/cloud/asset/gapic_version.py | 2 +- .../goldens/asset/google/cloud/asset_v1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.asset.v1.json | 2 +- .../snippet_metadata_google.iam.credentials.v1.json | 2 +- .../snippet_metadata_google.cloud.eventarc.v1.json | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- .../snippet_metadata_google.logging.v2.json | 2 +- .../snippet_metadata_google.cloud.redis.v1.json | 2 +- .../snippet_metadata_google.cloud.redis.v1.json | 2 +- packages/gapic-generator/tests/unit/common_types.py | 6 ++++-- .../gapic-generator/tests/unit/generator/test_generator.py | 4 ++-- .../tests/unit/samplegen/test_snippet_index.py | 2 +- 18 files changed, 29 insertions(+), 20 deletions(-) diff --git a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py index 07003d701ef1..945b43e61f6a 100644 --- a/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py +++ b/packages/gapic-generator/gapic/samplegen_utils/snippet_index.py @@ -119,9 +119,7 @@ def __init__(self, api_schema: api.API): ) self.metadata_index.client_library.language = snippet_metadata_pb2.Language.PYTHON # type: ignore - # This is just a placeholder. release-please is responsible for - # updating the metadata file to the correct library version. - self.metadata_index.client_library.version = "0.1.0" + self.metadata_index.client_library.version = api_schema.gapic_version self.metadata_index.client_library.apis.append( snippet_metadata_pb2.Api( # type: ignore diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 071bbd29339b..46fe9d3fb55a 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -378,6 +378,7 @@ class API: all_protos: Mapping[str, Proto] service_yaml_config: service_pb2.Service subpackage_view: Tuple[str, ...] = dataclasses.field(default_factory=tuple) + gapic_version: str = "0.0.0" @classmethod def build( @@ -493,12 +494,16 @@ def disambiguate_keyword_sanitize_fname( ParseDict( opts.service_yaml_config, service_yaml_config, ignore_unknown_fields=True ) + gapic_version = opts.gapic_version # Third pass for various selective GAPIC settings; these require # settings in the service.yaml and so we build the API object # before doing another pass. api = cls( - naming=naming, all_protos=protos, service_yaml_config=service_yaml_config + naming=naming, + all_protos=protos, + service_yaml_config=service_yaml_config, + gapic_version=gapic_version, ) if package in api.all_library_settings: diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 index b3243d8eff10..7764662cda25 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name/gapic_version.py.j2 @@ -1,5 +1,5 @@ {% extends '_base.py.j2' %} {% block content %} -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "{{ api.gapic_version }}" # {x-release-please-version} {% endblock %} diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 index b3243d8eff10..7764662cda25 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/gapic_version.py.j2 @@ -1,5 +1,5 @@ {% extends '_base.py.j2' %} {% block content %} -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "{{ api.gapic_version }}" # {x-release-please-version} {% endblock %} diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index af2677630cae..f46d9e920c6e 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -50,6 +50,7 @@ class Options: service_yaml_config: Dict[str, Any] = dataclasses.field(default_factory=dict) rest_numeric_enums: bool = False proto_plus_deps: Tuple[str, ...] = dataclasses.field(default=("",)) + gapic_version: str = "0.0.0" # Class constants PYTHON_GAPIC_PREFIX: str = "python-gapic-" @@ -71,6 +72,7 @@ class Options: # proto plus dependencies delineated by '+' # For example, 'google.cloud.api.v1+google.cloud.anotherapi.v2' "proto-plus-deps", + "gapic-version", # A version string following https://peps.python.org/pep-0440 ) ) @@ -197,6 +199,7 @@ def tweak_path(p): service_yaml_config=service_yaml_config, rest_numeric_enums=bool(opts.pop("rest-numeric-enums", False)), proto_plus_deps=proto_plus_deps, + gapic_version=opts.pop("gapic-version", ["0.0.0"]).pop(), ) # Note: if we ever need to recursively check directories for sample diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index be1f5cd15e99..3e8a344a97e8 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -55,6 +55,7 @@ py_gapic_library( grpc_service_config = "cloudasset_grpc_service_config.json", opt_args = [ "autogen-snippets", + "gapic-version=1.2.99", ], service_yaml = "cloudasset_v1.yaml", transport = "grpc+rest", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py index 20a9cd975b02..bbc191a8fe2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.2.99" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py index 20a9cd975b02..bbc191a8fe2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.2.99" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json index 211efb19c2ad..d28891aab54c 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/asset/samples/generated_samples/snippet_metadata_google.cloud.asset.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-asset", - "version": "0.1.0" + "version": "1.2.99" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json index 636249950f11..ab281c03b792 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/credentials/samples/generated_samples/snippet_metadata_google.iam.credentials.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-iam-credentials", - "version": "0.1.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json index 247771789f02..d6c04697d507 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/samples/generated_samples/snippet_metadata_google.cloud.eventarc.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-eventarc", - "version": "0.1.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json index 50c444f70b85..10c88271fc58 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json index 55fe86894de2..023969927e1b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/samples/generated_samples/snippet_metadata_google.logging.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-logging", - "version": "0.1.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 78f872bc4aef..5156ddc6970b 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis", - "version": "0.1.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 467e4c62cea1..1b46e699ceb0 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis", - "version": "0.1.0" + "version": "0.0.0" }, "snippets": [ { diff --git a/packages/gapic-generator/tests/unit/common_types.py b/packages/gapic-generator/tests/unit/common_types.py index 8a6c2a44bc91..d8f8375499b5 100644 --- a/packages/gapic-generator/tests/unit/common_types.py +++ b/packages/gapic-generator/tests/unit/common_types.py @@ -126,8 +126,10 @@ def resource_path_args(self): ) DummyService.__new__.__defaults__ = (False,) * len(DummyService._fields) -DummyApiSchema = namedtuple("DummyApiSchema", ["services", "naming", "messages"]) -DummyApiSchema.__new__.__defaults__ = (False,) * len(DummyApiSchema._fields) +DummyApiSchema = namedtuple( + "DummyApiSchema", ["services", "naming", "messages", "gapic_version"] +) +DummyApiSchema.__new__.__defaults__ = (False, False, False, "0.0.0") DummyNaming = namedtuple( "DummyNaming", diff --git a/packages/gapic-generator/tests/unit/generator/test_generator.py b/packages/gapic-generator/tests/unit/generator/test_generator.py index 85bcf582177a..1e4578b51692 100644 --- a/packages/gapic-generator/tests/unit/generator/test_generator.py +++ b/packages/gapic-generator/tests/unit/generator/test_generator.py @@ -563,7 +563,7 @@ def test_samplegen_config_to_output_files(mock_gmtime, fs): "apis": [{"id": "google.mollusca.v1", "version": "v1"}], "language": "PYTHON", "name": "mollusc-cephalopod-teuthida-", - "version": "0.1.0", + "version": "0.0.0", }, "snippets": [ { @@ -725,7 +725,7 @@ def test_samplegen_id_disambiguation(mock_gmtime, fs): "apis": [{"id": "google.mollusca.v1", "version": "v1"}], "language": "PYTHON", "name": "mollusc-cephalopod-teuthida-", - "version": "0.1.0", + "version": "0.0.0", }, "snippets": [ { diff --git a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py index c480adf9200d..6fedda0dce74 100644 --- a/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py +++ b/packages/gapic-generator/tests/unit/samplegen/test_snippet_index.py @@ -267,7 +267,7 @@ def test_get_metadata_json(sample_str): "apis": [{"id": "google.mollusca", "version": "v1"}], "language": "PYTHON", "name": "google-mollusca", - "version": "0.1.0", + "version": "0.0.0", }, "snippets": [ { From 174e2cbcfdf662ea1f9d16465298f7d1a9e35967 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 24 Sep 2025 17:47:35 +0000 Subject: [PATCH 1309/1339] chore(main): release 1.27.0 (#2445) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 85172ea5bc0f..9a006218dcf5 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.27.0](https://github.com/googleapis/gapic-generator-python/compare/v1.26.2...v1.27.0) (2025-09-24) + + +### Features + +* Add gapic-version generator option ([#2440](https://github.com/googleapis/gapic-generator-python/issues/2440)) ([75eb349](https://github.com/googleapis/gapic-generator-python/commit/75eb349efcb907dc443b6e8a2c11b2056a476009)) + ## [1.26.2](https://github.com/googleapis/gapic-generator-python/compare/v1.26.1...v1.26.2) (2025-08-30) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 683ad68595ac..50f9bfa3b9ef 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.26.2" +version = "1.27.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 1ba620987135c799e22e807d750552d95525c0a2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 26 Sep 2025 16:18:17 -0400 Subject: [PATCH 1310/1339] tests: add testing for Python 3.14 Pre-release (#2344) --- .../.github/workflows/tests.yaml | 112 ++++++++++++------ packages/gapic-generator/gapic/schema/api.py | 4 +- .../gapic/templates/noxfile.py.j2 | 18 ++- .../templates/testing/constraints-3.14.txt.j2 | 20 ++++ packages/gapic-generator/noxfile.py | 3 +- .../integration/goldens/asset/noxfile.py | 18 ++- .../asset/testing/constraints-3.14.txt | 14 +++ .../goldens/credentials/noxfile.py | 18 ++- .../credentials/testing/constraints-3.14.txt | 11 ++ .../integration/goldens/eventarc/noxfile.py | 18 ++- .../eventarc/testing/constraints-3.14.txt | 12 ++ .../integration/goldens/logging/noxfile.py | 18 ++- .../logging/testing/constraints-3.14.txt | 11 ++ .../goldens/logging_internal/noxfile.py | 18 ++- .../testing/constraints-3.14.txt | 11 ++ .../integration/goldens/redis/noxfile.py | 18 ++- .../redis/testing/constraints-3.14.txt | 11 ++ .../goldens/redis_selective/noxfile.py | 18 ++- .../testing/constraints-3.14.txt | 11 ++ 19 files changed, 280 insertions(+), 84 deletions(-) create mode 100644 packages/gapic-generator/gapic/templates/testing/constraints-3.14.txt.j2 create mode 100755 packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.14.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.14.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.14.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.14.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.14.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.14.txt create mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.14.txt diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index 7ba9e493a987..e5e3c22d195d 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -16,8 +16,40 @@ concurrency: env: SHOWCASE_VERSION: 0.35.0 PROTOC_VERSION: 3.20.2 + OLDEST_PYTHON: 3.7 + LATEST_STABLE_PYTHON: 3.13 + PRE_RELEASE_PYTHON: 3.14 + ALL_PYTHON: "['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14']" + +permissions: + contents: read jobs: + # `all_python_setup` amd `python_config` are a workaround for a known issue where it's not possible to use environment variables outside of `steps`. + # Some jobs set up a text matrix which is outside of `steps` and environment variables can't be used directly. + # This requires a workaround based on the discussion in: + # https://github.com/actions/runner/issues/2372 + # The limitation is captured here where certain job configurations don't have access to `env` + # https://docs.github.com/en/enterprise-cloud@latest/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#context-availability + all_python_setup: + runs-on: ubuntu-latest + outputs: + all_python: ${{ env.ALL_PYTHON }} + steps: + - name: Set up all python + id: all_python + run: | + echo 'all_python={{ "${{ env.ALL_PYTHON }}" }}' + python_config: + runs-on: ubuntu-latest + outputs: + oldest_python: ${{ env.OLDEST_PYTHON }} + latest_stable_python: ${{ env.LATEST_STABLE_PYTHON }} + steps: + - name: Print env variables for `python_config` + run: | + echo 'oldest_python={{ "${{ env.OLDEST_PYTHON }}" }}' + echo 'latest_stable_python={{ "${{ env.LATEST_STABLE_PYTHON }}" }}' docs: runs-on: ubuntu-latest steps: @@ -49,11 +81,15 @@ jobs: - name: Build the documentation. run: nox -s docfx mypy: + needs: all_python_setup strategy: matrix: # Run mypy on all of the supported python versions listed in setup.py # https://github.com/python/mypy/blob/master/setup.py - python: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + python: ${{ fromJSON(needs.all_python_setup.outputs.all_python) }} + exclude: + # Remove once https://github.com/googleapis/gapic-generator-python/issues/2303 is fixed + - python: '3.7' runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 @@ -62,16 +98,18 @@ jobs: with: python-version: "${{ matrix.python }}" cache: 'pip' + allow-prereleases: true - name: Install nox. run: python -m pip install nox - name: Check type annotations. run: nox -s mypy-${{ matrix.python }} showcase: + needs: python_config strategy: # Run showcase tests on the lowest and highest supported runtimes matrix: # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `showcase_w_rest_async` target when async rest is GA. - python: ["3.7", "3.13"] + python: ["${{ needs.python_config.outputs.oldest_python }}", "${{ needs.python_config.outputs.latest_stable_python }}"] target: [showcase, showcase_w_rest_async] logging_scope: ["", "google"] @@ -126,10 +164,10 @@ jobs: run: | sudo mkdir -p /tmp/workspace/tests/cert/ sudo chown -R ${USER} /tmp/workspace/ - - name: Set up Python "3.13" + - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: "${{ env.LATEST_STABLE_PYTHON }}" cache: 'pip' - name: Copy mtls files run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ @@ -160,9 +198,10 @@ jobs: nox -s ${{ matrix.target }} # TODO(yon-mg): add compute unit tests showcase-unit: + needs: all_python_setup strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + python: ${{ fromJSON(needs.all_python_setup.outputs.all_python) }} # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `_w_rest_async` variant when async rest is GA. variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins, _w_rest_async] logging_scope: ["", "google"] @@ -177,6 +216,7 @@ jobs: with: python-version: "${{ matrix.python }}" cache: 'pip' + allow-prereleases: true - name: Install system dependencies. run: | sudo apt-get update @@ -200,10 +240,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python "3.13" + - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: "${{ env.LATEST_STABLE_PYTHON }}" cache: 'pip' - name: Install system dependencies. run: | @@ -228,10 +268,10 @@ jobs: variant: ['', _alternative_templates] steps: - uses: actions/checkout@v5 - - name: Set up Python "3.13" + - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: "${{ env.LATEST_STABLE_PYTHON }}" cache: 'pip' - name: Install system dependencies. run: | @@ -250,13 +290,14 @@ jobs: - name: Typecheck the generated output. run: nox -s showcase_mypy${{ matrix.variant }} snippetgen: + needs: all_python_setup runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python "3.13" + - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: "${{ env.LATEST_STABLE_PYTHON }}" cache: 'pip' - name: Install system dependencies. run: | @@ -267,10 +308,10 @@ jobs: - name: Check autogenerated snippets. run: nox -s snippetgen unit: + needs: all_python_setup strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] - + python: ${{ fromJSON(needs.all_python_setup.outputs.all_python) }} # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix runs-on: ubuntu-22.04 @@ -279,8 +320,9 @@ jobs: - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v6 with: - python-version: ${{ matrix.python }} + python-version: "${{ matrix.python }}" cache: 'pip' + allow-prereleases: true - name: Install pandoc run: | sudo apt-get update @@ -291,11 +333,11 @@ jobs: - name: Run unit tests. run: nox -s unit-${{ matrix.python }} fragment: + needs: all_python_setup strategy: matrix: - python: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + python: ${{ fromJSON(needs.all_python_setup.outputs.all_python) }} variant: ['', _alternative_templates] - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix runs-on: ubuntu-22.04 @@ -304,8 +346,9 @@ jobs: - name: Set up Python ${{ matrix.python }} uses: actions/setup-python@v6 with: - python-version: ${{ matrix.python }} + python-version: "${{ matrix.python }}" cache: 'pip' + allow-prereleases: true - name: Install pandoc run: | sudo apt-get update @@ -348,29 +391,29 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python 3.13 + - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: ${{ env.LATEST_STABLE_PYTHON }} cache: 'pip' - name: Install nox. run: | python -m pip install nox - name: Run blacken and lint on the generated output. run: | - nox -f tests/integration/goldens/asset/noxfile.py -s mypy-3.13 blacken lint - nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-3.13 blacken lint - nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-3.13 blacken lint - nox -f tests/integration/goldens/logging/noxfile.py -s mypy-3.13 blacken lint - nox -f tests/integration/goldens/redis/noxfile.py -s mypy-3.13 blacken lint + nox -f tests/integration/goldens/asset/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint + nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint + nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint + nox -f tests/integration/goldens/logging/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint + nox -f tests/integration/goldens/redis/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint goldens-unit: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python 3.13 + - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: "${{ env.LATEST_STABLE_PYTHON }}" cache: 'pip' - name: Install nox. run: | @@ -380,19 +423,20 @@ jobs: # in order to run unit tests # See https://github.com/googleapis/gapic-generator-python/issues/1806 run: | - nox -f tests/integration/goldens/credentials/noxfile.py -s unit-3.13 - nox -f tests/integration/goldens/eventarc/noxfile.py -s unit-3.13 - nox -f tests/integration/goldens/logging/noxfile.py -s unit-3.13 - nox -f tests/integration/goldens/redis/noxfile.py -s unit-3.13 + nox -f tests/integration/goldens/credentials/noxfile.py -s unit-${{ env.LATEST_STABLE_PYTHON }} + nox -f tests/integration/goldens/eventarc/noxfile.py -s unit-${{ env.LATEST_STABLE_PYTHON }} + nox -f tests/integration/goldens/logging/noxfile.py -s unit-${{ env.LATEST_STABLE_PYTHON }} + nox -f tests/integration/goldens/redis/noxfile.py -s unit-${{ env.LATEST_STABLE_PYTHON }} goldens-prerelease: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python 3.13 + - name: Set up Python ${{ env.PRE_RELEASE_PYTHON }} uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: "${{ env.PRE_RELEASE_PYTHON }}" cache: 'pip' + allow-prereleases: true - name: Install nox. run: | python -m pip install nox @@ -409,10 +453,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python "3.13" + - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: "${{ env.LATEST_STABLE_PYTHON }}" cache: 'pip' - name: Install nox. run: | diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 46fe9d3fb55a..2c01b07363ab 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -935,11 +935,13 @@ def enforce_valid_library_settings( # Check to see if selective gapic generation methods are valid. selective_gapic_errors = {} + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2446): + # Workaround issue in Python 3.14 related to code coverage by adding `# pragma: no branch` for ( method_name ) in ( library_settings.python_settings.common.selective_gapic_generation.methods - ): + ): # pragma: no branch if method_name not in self.all_methods: selective_gapic_errors[method_name] = "Method does not exist." elif not method_name.startswith(library_settings.version): diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index a9c09b337e96..e4c356cbf6df 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -29,9 +29,11 @@ ALL_PYTHON = [ "3.11", "3.12", "3.13", + "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] +PRE_RELEASE_PYTHON = ALL_PYTHON[-1] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -51,7 +53,7 @@ UNIT_TEST_DEPENDENCIES: List[str] = [] UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -224,7 +226,9 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -232,7 +236,7 @@ def unit(session, protobuf_implementation): ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/synthtool/issues/1976): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": @@ -425,7 +429,7 @@ def docfx(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python=PRE_RELEASE_PYTHON) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -438,7 +442,9 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.14.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.14.txt.j2 new file mode 100644 index 000000000000..ef07e240643b --- /dev/null +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.14.txt.j2 @@ -0,0 +1,20 @@ +{% from '_pypi_packages.j2' import pypi_packages %} +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +{% for package_tuple, package_info in pypi_packages.items() %} +{# Quick check to make sure `package_info.package_name` is not the package being generated so we don't circularly include this package in its own constraints file. #} +{% if api.naming.warehouse_package_name != package_info.package_name %} +{% if api.requires_package(package_tuple) %} +{{ package_info.package_name }}>={{ (package_info.upper_bound.split(".")[0] | int) - 1 }} +{% endif %} +{% endif %} +{% endfor %} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 2d688e139b9e..99dab8a44171 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -44,9 +44,10 @@ "3.11", "3.12", "3.13", + "3.14", ) -NEWEST_PYTHON = ALL_PYTHON[-1] +NEWEST_PYTHON = ALL_PYTHON[-2] @nox.session(python=ALL_PYTHON) diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index db793cd23ecf..b61714a5795a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -36,9 +36,11 @@ "3.11", "3.12", "3.13", + "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] +PRE_RELEASE_PYTHON = ALL_PYTHON[-1] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -58,7 +60,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -223,7 +225,9 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -231,7 +235,7 @@ def unit(session, protobuf_implementation): ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/synthtool/issues/1976): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": @@ -419,7 +423,7 @@ def docfx(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python=PRE_RELEASE_PYTHON) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -432,7 +436,9 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.14.txt new file mode 100755 index 000000000000..763099d82412 --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.14.txt @@ -0,0 +1,14 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +google-cloud-access-context-manager>=0 +google-cloud-os-config>=1 +grpc-google-iam-v1>=0 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 9955f98f4fde..9dbefde7b929 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -36,9 +36,11 @@ "3.11", "3.12", "3.13", + "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] +PRE_RELEASE_PYTHON = ALL_PYTHON[-1] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -58,7 +60,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -223,7 +225,9 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -231,7 +235,7 @@ def unit(session, protobuf_implementation): ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/synthtool/issues/1976): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": @@ -419,7 +423,7 @@ def docfx(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python=PRE_RELEASE_PYTHON) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -432,7 +436,9 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.14.txt new file mode 100755 index 000000000000..c20a77817caa --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.14.txt @@ -0,0 +1,11 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 30d3a401d130..8a915a5d099b 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -36,9 +36,11 @@ "3.11", "3.12", "3.13", + "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] +PRE_RELEASE_PYTHON = ALL_PYTHON[-1] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -58,7 +60,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -223,7 +225,9 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -231,7 +235,7 @@ def unit(session, protobuf_implementation): ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/synthtool/issues/1976): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": @@ -419,7 +423,7 @@ def docfx(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python=PRE_RELEASE_PYTHON) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -432,7 +436,9 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.14.txt new file mode 100755 index 000000000000..2010e549cceb --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.14.txt @@ -0,0 +1,12 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index ea4d1e89e65f..d397b029d259 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -36,9 +36,11 @@ "3.11", "3.12", "3.13", + "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] +PRE_RELEASE_PYTHON = ALL_PYTHON[-1] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -58,7 +60,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -223,7 +225,9 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -231,7 +235,7 @@ def unit(session, protobuf_implementation): ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/synthtool/issues/1976): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": @@ -419,7 +423,7 @@ def docfx(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python=PRE_RELEASE_PYTHON) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -432,7 +436,9 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.14.txt new file mode 100755 index 000000000000..c20a77817caa --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.14.txt @@ -0,0 +1,11 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index ea4d1e89e65f..d397b029d259 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -36,9 +36,11 @@ "3.11", "3.12", "3.13", + "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] +PRE_RELEASE_PYTHON = ALL_PYTHON[-1] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -58,7 +60,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -223,7 +225,9 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -231,7 +235,7 @@ def unit(session, protobuf_implementation): ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/synthtool/issues/1976): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": @@ -419,7 +423,7 @@ def docfx(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python=PRE_RELEASE_PYTHON) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -432,7 +436,9 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.14.txt new file mode 100755 index 000000000000..c20a77817caa --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.14.txt @@ -0,0 +1,11 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index 9fa5bc71996c..d798ed9a3726 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -36,9 +36,11 @@ "3.11", "3.12", "3.13", + "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] +PRE_RELEASE_PYTHON = ALL_PYTHON[-1] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -58,7 +60,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -223,7 +225,9 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -231,7 +235,7 @@ def unit(session, protobuf_implementation): ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/synthtool/issues/1976): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": @@ -419,7 +423,7 @@ def docfx(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python=PRE_RELEASE_PYTHON) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -432,7 +436,9 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.14.txt new file mode 100755 index 000000000000..c20a77817caa --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.14.txt @@ -0,0 +1,11 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index 9fa5bc71996c..d798ed9a3726 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -36,9 +36,11 @@ "3.11", "3.12", "3.13", + "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] +PRE_RELEASE_PYTHON = ALL_PYTHON[-1] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -58,7 +60,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ALL_PYTHON SYSTEM_TEST_STANDARD_DEPENDENCIES = [ "mock", "pytest", @@ -223,7 +225,9 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -231,7 +235,7 @@ def unit(session, protobuf_implementation): ) install_unittest_dependencies(session, "-c", constraints_path) - # TODO(https://github.com/googleapis/synthtool/issues/1976): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. # The 'cpp' implementation requires Protobuf<4. if protobuf_implementation == "cpp": @@ -419,7 +423,7 @@ def docfx(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python=PRE_RELEASE_PYTHON) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -432,7 +436,9 @@ def prerelease_deps(session, protobuf_implementation): `pip install --pre `. """ - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2388): + # Remove this check once support for Protobuf 3.x is dropped. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13", "3.14"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.14.txt new file mode 100755 index 000000000000..c20a77817caa --- /dev/null +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.14.txt @@ -0,0 +1,11 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 From 4817b713b8d289c93a27bc20859185590b9a573a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 29 Sep 2025 09:44:21 -0400 Subject: [PATCH 1311/1339] chore(main): release 1.27.1 (#2447) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 9a006218dcf5..70fa69f8ed26 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.27.1](https://github.com/googleapis/gapic-generator-python/compare/v1.27.0...v1.27.1) (2025-09-26) + + +### Bug Fixes + +* Add testing for Python 3.14 Pre-release ([7f7d50e](https://github.com/googleapis/gapic-generator-python/commit/7f7d50e3154d548e8066d251b85a589afbf183d7)) + ## [1.27.0](https://github.com/googleapis/gapic-generator-python/compare/v1.26.2...v1.27.0) (2025-09-24) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 50f9bfa3b9ef..bc48d16750df 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.27.0" +version = "1.27.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From c2ff28cfdee4ac94903f9cadc958cbee1951809e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 1 Oct 2025 05:44:28 -0400 Subject: [PATCH 1312/1339] fix: deprecate credentials_file (#2454) --- .../%sub/services/%service/transports/base.py.j2 | 5 +++-- .../%sub/services/%service/transports/grpc.py.j2 | 8 +++++--- .../%sub/services/%service/transports/grpc_asyncio.py.j2 | 8 +++++--- .../%sub/services/%service/transports/rest.py.j2 | 5 +++-- .../asset_v1/services/asset_service/transports/base.py | 5 +++-- .../asset_v1/services/asset_service/transports/grpc.py | 8 +++++--- .../services/asset_service/transports/grpc_asyncio.py | 8 +++++--- .../asset_v1/services/asset_service/transports/rest.py | 5 +++-- .../services/iam_credentials/transports/base.py | 5 +++-- .../services/iam_credentials/transports/grpc.py | 8 +++++--- .../services/iam_credentials/transports/grpc_asyncio.py | 8 +++++--- .../services/iam_credentials/transports/rest.py | 5 +++-- .../eventarc_v1/services/eventarc/transports/base.py | 5 +++-- .../eventarc_v1/services/eventarc/transports/grpc.py | 8 +++++--- .../services/eventarc/transports/grpc_asyncio.py | 8 +++++--- .../eventarc_v1/services/eventarc/transports/rest.py | 5 +++-- .../services/config_service_v2/transports/base.py | 5 +++-- .../services/config_service_v2/transports/grpc.py | 8 +++++--- .../services/config_service_v2/transports/grpc_asyncio.py | 8 +++++--- .../services/logging_service_v2/transports/base.py | 5 +++-- .../services/logging_service_v2/transports/grpc.py | 8 +++++--- .../logging_service_v2/transports/grpc_asyncio.py | 8 +++++--- .../services/metrics_service_v2/transports/base.py | 5 +++-- .../services/metrics_service_v2/transports/grpc.py | 8 +++++--- .../metrics_service_v2/transports/grpc_asyncio.py | 8 +++++--- .../services/config_service_v2/transports/base.py | 5 +++-- .../services/config_service_v2/transports/grpc.py | 8 +++++--- .../services/config_service_v2/transports/grpc_asyncio.py | 8 +++++--- .../services/logging_service_v2/transports/base.py | 5 +++-- .../services/logging_service_v2/transports/grpc.py | 8 +++++--- .../logging_service_v2/transports/grpc_asyncio.py | 8 +++++--- .../services/metrics_service_v2/transports/base.py | 5 +++-- .../services/metrics_service_v2/transports/grpc.py | 8 +++++--- .../metrics_service_v2/transports/grpc_asyncio.py | 8 +++++--- .../redis_v1/services/cloud_redis/transports/base.py | 5 +++-- .../redis_v1/services/cloud_redis/transports/grpc.py | 8 +++++--- .../services/cloud_redis/transports/grpc_asyncio.py | 8 +++++--- .../redis_v1/services/cloud_redis/transports/rest.py | 5 +++-- .../redis_v1/services/cloud_redis/transports/base.py | 5 +++-- .../redis_v1/services/cloud_redis/transports/grpc.py | 8 +++++--- .../services/cloud_redis/transports/grpc_asyncio.py | 8 +++++--- .../redis_v1/services/cloud_redis/transports/rest.py | 5 +++-- 42 files changed, 174 insertions(+), 108 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 7176ad949d75..7cdaf82da5ea 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -89,9 +89,10 @@ class {{ service.name }}Transport(abc.ABC): credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 index 7158a4c430df..23e7e19839bf 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc.py.j2 @@ -135,9 +135,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -270,9 +271,10 @@ class {{ service.name }}GrpcTransport({{ service.name }}Transport): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index 64aeb0abf68b..d5f692442f24 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -132,8 +132,9 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -182,9 +183,10 @@ class {{ service.grpc_asyncio_transport_name }}({{ service.name }}Transport): are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 825756eb54dc..95efafb38928 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -114,9 +114,10 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index 5e8d7fc1186f..4aebab917078 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -69,9 +69,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index 755d23eec517..1edf7351948a 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -144,9 +144,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -276,9 +277,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index 92ca7f4fd078..a2b4aeb11a73 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -140,8 +140,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -190,9 +191,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index ca847c3aa8c2..b7f32b5fc7d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -1103,9 +1103,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index c25966502066..c9c8fd9472c1 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -66,9 +66,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 4472c1835245..5baaeb8e5e89 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -150,9 +150,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -281,9 +282,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index c97b00af64b9..a793abd09cfe 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -146,8 +146,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -196,9 +197,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index ea5815b579ac..c0ad2e73ea99 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -316,9 +316,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index e37d17237f50..ca6cf24f129e 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -77,9 +77,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 5ec53389bb27..887700548d06 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -154,9 +154,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -286,9 +287,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index fdbfeab462dc..6c940b3a829c 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -150,8 +150,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -200,9 +201,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index fab70abdd90a..1d4a69adf0fe 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -1125,9 +1125,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 28f11c65de53..625a38859405 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -72,9 +72,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 8e56dd2231fa..ac3a4393a80f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -144,9 +144,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -276,9 +277,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 46dd72d79d94..fc7af0c06bae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -140,8 +140,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -190,9 +191,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 287081712a7e..f9d40b56218c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -72,9 +72,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index ec9737507ebd..8edc617027b0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -143,9 +143,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -274,9 +275,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 1604ba62bc45..92aa1d5256bc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -139,8 +139,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -189,9 +190,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 012973c62892..fef46ac38e4d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -72,9 +72,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 70a0c89da1c7..d2c41d01e813 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -143,9 +143,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -274,9 +275,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index dcadfbe957b7..15b1ab3ad842 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -139,8 +139,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -189,9 +190,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 28f11c65de53..625a38859405 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -72,9 +72,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index 8e56dd2231fa..ac3a4393a80f 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -144,9 +144,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -276,9 +277,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 46dd72d79d94..fc7af0c06bae 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -140,8 +140,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -190,9 +191,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 287081712a7e..f9d40b56218c 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -72,9 +72,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index ec9737507ebd..8edc617027b0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -143,9 +143,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -274,9 +275,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 1604ba62bc45..92aa1d5256bc 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -139,8 +139,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -189,9 +190,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 012973c62892..fef46ac38e4d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -72,9 +72,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 70a0c89da1c7..d2c41d01e813 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -143,9 +143,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -274,9 +275,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index dcadfbe957b7..15b1ab3ad842 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -139,8 +139,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -189,9 +190,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 7bbc2e5550e2..0eb82cc696c8 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -69,9 +69,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 9ca4b3c18cb7..ac164835f5bc 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -164,9 +164,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -296,9 +297,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 8c15a6dba158..46f96b5c5ceb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -160,8 +160,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -210,9 +211,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 620805e12072..e9a1c7a48403 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -785,9 +785,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py index fe7ff474529f..8e1decc08191 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -69,9 +69,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index 38227ed3e965..fe65a8baf82e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -164,9 +164,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -296,9 +297,10 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 1df810cc89e8..a3393990f222 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -160,8 +160,9 @@ def create_channel(cls, credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -210,9 +211,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 1061ad6afb86..7e43d25df44e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -521,9 +521,10 @@ def __init__(self, *, are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client From 004de63371fae5c18bdd6bf4598841632653af7c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 1 Oct 2025 13:34:09 -0400 Subject: [PATCH 1313/1339] fix: resolve issue where rest-numeric-enums=False is not supported (#2456) --- packages/gapic-generator/gapic/utils/options.py | 12 +++++++++++- .../tests/unit/generator/test_options.py | 3 +++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/utils/options.py b/packages/gapic-generator/gapic/utils/options.py index f46d9e920c6e..30184f12c0a9 100644 --- a/packages/gapic-generator/gapic/utils/options.py +++ b/packages/gapic-generator/gapic/utils/options.py @@ -167,6 +167,16 @@ def tweak_path(p): "TRUE", ) + # `rest-numeric-enums` is False by default. Make sure users can also disable + # it by passing `rest-numeric-enums=False`. + rest_numeric_enums = opts.pop("rest-numeric-enums", ["False"])[0] in ( + "True", + "true", + "T", + "t", + "TRUE", + ) + # NOTE: Snippets are not currently correct for the alternative (Ads) templates # so always disable snippetgen in that case # https://github.com/googleapis/gapic-generator-python/issues/1052 @@ -197,7 +207,7 @@ def tweak_path(p): # transport should include desired transports delimited by '+', e.g. transport='grpc+rest' transport=opts.pop("transport", ["grpc"])[0].split("+"), service_yaml_config=service_yaml_config, - rest_numeric_enums=bool(opts.pop("rest-numeric-enums", False)), + rest_numeric_enums=rest_numeric_enums, proto_plus_deps=proto_plus_deps, gapic_version=opts.pop("gapic-version", ["0.0.0"]).pop(), ) diff --git a/packages/gapic-generator/tests/unit/generator/test_options.py b/packages/gapic-generator/tests/unit/generator/test_options.py index 9c1effb76967..49ae6e216d2b 100644 --- a/packages/gapic-generator/tests/unit/generator/test_options.py +++ b/packages/gapic-generator/tests/unit/generator/test_options.py @@ -212,6 +212,9 @@ def test_options_bool_flags(): options = Options.build("autogen-snippets=False") assert not options.autogen_snippets + options = Options.build("rest-numeric-enums=False") + assert not options.rest_numeric_enums + def test_options_autogen_snippets_false_for_old_naming(): # NOTE: Snippets are not currently correct for the alternative (Ads) templates From 10b759049ec9bf16489456d360c24a612fbd6049 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 1 Oct 2025 21:30:03 -0400 Subject: [PATCH 1314/1339] chore(main): release 1.27.2 (#2455) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 8 ++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 70fa69f8ed26..27ee7732cea7 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,14 @@ # Changelog +## [1.27.2](https://github.com/googleapis/gapic-generator-python/compare/v1.27.1...v1.27.2) (2025-10-01) + + +### Bug Fixes + +* Deprecate credentials_file ([#2454](https://github.com/googleapis/gapic-generator-python/issues/2454)) ([48c5f38](https://github.com/googleapis/gapic-generator-python/commit/48c5f3895aa638a1def7df3f96c749e746450680)) +* Resolve issue where rest-numeric-enums=False is not supported ([#2456](https://github.com/googleapis/gapic-generator-python/issues/2456)) ([3d24a50](https://github.com/googleapis/gapic-generator-python/commit/3d24a50ac992631649ad06bb2dc6c90abe88988a)) + ## [1.27.1](https://github.com/googleapis/gapic-generator-python/compare/v1.27.0...v1.27.1) (2025-09-26) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index bc48d16750df..89bbd29d5a96 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.27.1" +version = "1.27.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 4a9db1ed8b818a0a5f9fef34eb1731cb850f9546 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 3 Oct 2025 10:19:02 -0400 Subject: [PATCH 1315/1339] chore: remove .github/sync-repo-settings.yaml (#2449) --- .../.github/sync-repo-settings.yaml | 98 ------------------- 1 file changed, 98 deletions(-) delete mode 100644 packages/gapic-generator/.github/sync-repo-settings.yaml diff --git a/packages/gapic-generator/.github/sync-repo-settings.yaml b/packages/gapic-generator/.github/sync-repo-settings.yaml deleted file mode 100644 index 5a0de29a10bf..000000000000 --- a/packages/gapic-generator/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,98 +0,0 @@ -rebaseMergeAllowed: true -squashMergeAllowed: true -mergeCommitAllowed: false -branchProtectionRules: -- pattern: main - isAdminEnforced: true - requiredStatusCheckContexts: - - 'cla/google' - - 'docs' - - 'mypy (3.8)' - - 'mypy (3.9)' - - 'mypy (3.10)' - - 'mypy (3.11)' - - 'mypy (3.12)' - - 'mypy (3.13)' - - 'showcase (3.7, showcase)' - - 'showcase (3.13, showcase)' - - 'showcase (3.7, showcase_w_rest_async)' - - 'showcase (3.13, showcase_w_rest_async)' - # TODO(dovs): reenable these when the mtls tests have been debugged and fixed - # See #1218 for details - # - 'showcase-mtls (showcase_mtls)' - - 'showcase-mypy' - - 'showcase-mypy (_alternative_templates)' - - 'showcase-unit (3.7)' - - 'showcase-unit (3.8)' - - 'showcase-unit (3.9)' - - 'showcase-unit (3.10)' - - 'showcase-unit (3.11)' - - 'showcase-unit (3.12)' - - 'showcase-unit (3.13)' - - 'showcase-unit (3.7, _alternative_templates)' - - 'showcase-unit (3.8, _alternative_templates)' - - 'showcase-unit (3.9, _alternative_templates)' - - 'showcase-unit (3.10, _alternative_templates)' - - 'showcase-unit (3.11, _alternative_templates)' - - 'showcase-unit (3.12, _alternative_templates)' - - 'showcase-unit (3.13, _alternative_templates)' - - 'showcase-unit (3.7, _alternative_templates_mixins)' - - 'showcase-unit (3.8, _alternative_templates_mixins)' - - 'showcase-unit (3.9, _alternative_templates_mixins)' - - 'showcase-unit (3.10, _alternative_templates_mixins)' - - 'showcase-unit (3.11, _alternative_templates_mixins)' - - 'showcase-unit (3.12, _alternative_templates_mixins)' - - 'showcase-unit (3.13, _alternative_templates_mixins)' - - 'showcase-unit (3.7, _mixins)' - - 'showcase-unit (3.8, _mixins)' - - 'showcase-unit (3.9, _mixins)' - - 'showcase-unit (3.10, _mixins)' - - 'showcase-unit (3.11, _mixins)' - - 'showcase-unit (3.12, _mixins)' - - 'showcase-unit (3.13, _mixins)' - - 'showcase-unit (3.7, _w_rest_async)' - - 'showcase-unit (3.8, _w_rest_async)' - - 'showcase-unit (3.9, _w_rest_async)' - - 'showcase-unit (3.10, _w_rest_async)' - - 'showcase-unit (3.11, _w_rest_async)' - - 'showcase-unit (3.12, _w_rest_async)' - - 'showcase-unit (3.13, _w_rest_async)' - - 'showcase-unit-add-iam-methods' - - 'integration' - - 'goldens-lint' - - 'goldens-prerelease' - - 'goldens-unit' - - 'lint' - - 'snippetgen' - - 'unit (3.7)' - - 'unit (3.8)' - - 'unit (3.9)' - - 'unit (3.10)' - - 'unit (3.11)' - - 'unit (3.12)' - - 'unit (3.13)' - - 'fragment (3.7)' - - 'fragment (3.8)' - - 'fragment (3.9)' - - 'fragment (3.10)' - - 'fragment (3.11)' - - 'fragment (3.12)' - - 'fragment (3.13)' - - 'fragment (3.7, _alternative_templates)' - - 'fragment (3.8, _alternative_templates)' - - 'fragment (3.9, _alternative_templates)' - - 'fragment (3.10, _alternative_templates)' - - 'fragment (3.11, _alternative_templates)' - - 'fragment (3.12, _alternative_templates)' - - 'fragment (3.13, _alternative_templates)' - - 'OwlBot Post Processor' - requiredApprovingReviewCount: 1 - requiresCodeOwnerReviews: true - requiresStrictStatusChecks: true -permissionRules: - - team: actools - permission: admin - - team: yoshi-python - permission: push - - team: actools-python - permission: push From 10073df1386be60f63124b96162f151fd238ac9d Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 6 Oct 2025 15:06:51 -0400 Subject: [PATCH 1316/1339] fix: require grpcio >= 1.75.1 for Python 3.14 (#2452) --- packages/gapic-generator/gapic/templates/setup.py.j2 | 2 ++ .../gapic/templates/testing/_default_constraints.j2 | 2 ++ .../gapic/templates/testing/constraints-3.13.txt.j2 | 1 + .../gapic/templates/testing/constraints-3.14.txt.j2 | 1 + .../gapic/templates/testing/constraints-3.7.txt.j2 | 2 ++ .../gapic-generator/tests/integration/goldens/asset/setup.py | 2 ++ .../integration/goldens/asset/testing/constraints-3.10.txt | 2 ++ .../integration/goldens/asset/testing/constraints-3.11.txt | 2 ++ .../integration/goldens/asset/testing/constraints-3.12.txt | 2 ++ .../integration/goldens/asset/testing/constraints-3.13.txt | 1 + .../integration/goldens/asset/testing/constraints-3.14.txt | 1 + .../tests/integration/goldens/asset/testing/constraints-3.7.txt | 2 ++ .../tests/integration/goldens/asset/testing/constraints-3.8.txt | 2 ++ .../tests/integration/goldens/asset/testing/constraints-3.9.txt | 2 ++ .../tests/integration/goldens/credentials/setup.py | 2 ++ .../goldens/credentials/testing/constraints-3.10.txt | 2 ++ .../goldens/credentials/testing/constraints-3.11.txt | 2 ++ .../goldens/credentials/testing/constraints-3.12.txt | 2 ++ .../goldens/credentials/testing/constraints-3.13.txt | 1 + .../goldens/credentials/testing/constraints-3.14.txt | 1 + .../integration/goldens/credentials/testing/constraints-3.7.txt | 2 ++ .../integration/goldens/credentials/testing/constraints-3.8.txt | 2 ++ .../integration/goldens/credentials/testing/constraints-3.9.txt | 2 ++ .../gapic-generator/tests/integration/goldens/eventarc/setup.py | 2 ++ .../integration/goldens/eventarc/testing/constraints-3.10.txt | 2 ++ .../integration/goldens/eventarc/testing/constraints-3.11.txt | 2 ++ .../integration/goldens/eventarc/testing/constraints-3.12.txt | 2 ++ .../integration/goldens/eventarc/testing/constraints-3.13.txt | 1 + .../integration/goldens/eventarc/testing/constraints-3.14.txt | 1 + .../integration/goldens/eventarc/testing/constraints-3.7.txt | 2 ++ .../integration/goldens/eventarc/testing/constraints-3.8.txt | 2 ++ .../integration/goldens/eventarc/testing/constraints-3.9.txt | 2 ++ .../gapic-generator/tests/integration/goldens/logging/setup.py | 2 ++ .../integration/goldens/logging/testing/constraints-3.10.txt | 2 ++ .../integration/goldens/logging/testing/constraints-3.11.txt | 2 ++ .../integration/goldens/logging/testing/constraints-3.12.txt | 2 ++ .../integration/goldens/logging/testing/constraints-3.13.txt | 1 + .../integration/goldens/logging/testing/constraints-3.14.txt | 1 + .../integration/goldens/logging/testing/constraints-3.7.txt | 2 ++ .../integration/goldens/logging/testing/constraints-3.8.txt | 2 ++ .../integration/goldens/logging/testing/constraints-3.9.txt | 2 ++ .../tests/integration/goldens/logging_internal/setup.py | 2 ++ .../goldens/logging_internal/testing/constraints-3.10.txt | 2 ++ .../goldens/logging_internal/testing/constraints-3.11.txt | 2 ++ .../goldens/logging_internal/testing/constraints-3.12.txt | 2 ++ .../goldens/logging_internal/testing/constraints-3.13.txt | 1 + .../goldens/logging_internal/testing/constraints-3.14.txt | 1 + .../goldens/logging_internal/testing/constraints-3.7.txt | 2 ++ .../goldens/logging_internal/testing/constraints-3.8.txt | 2 ++ .../goldens/logging_internal/testing/constraints-3.9.txt | 2 ++ .../gapic-generator/tests/integration/goldens/redis/setup.py | 2 ++ .../integration/goldens/redis/testing/constraints-3.10.txt | 2 ++ .../integration/goldens/redis/testing/constraints-3.11.txt | 2 ++ .../integration/goldens/redis/testing/constraints-3.12.txt | 2 ++ .../integration/goldens/redis/testing/constraints-3.13.txt | 1 + .../integration/goldens/redis/testing/constraints-3.14.txt | 1 + .../tests/integration/goldens/redis/testing/constraints-3.7.txt | 2 ++ .../tests/integration/goldens/redis/testing/constraints-3.8.txt | 2 ++ .../tests/integration/goldens/redis/testing/constraints-3.9.txt | 2 ++ .../tests/integration/goldens/redis_selective/setup.py | 2 ++ .../goldens/redis_selective/testing/constraints-3.10.txt | 2 ++ .../goldens/redis_selective/testing/constraints-3.11.txt | 2 ++ .../goldens/redis_selective/testing/constraints-3.12.txt | 2 ++ .../goldens/redis_selective/testing/constraints-3.13.txt | 1 + .../goldens/redis_selective/testing/constraints-3.14.txt | 1 + .../goldens/redis_selective/testing/constraints-3.7.txt | 2 ++ .../goldens/redis_selective/testing/constraints-3.8.txt | 2 ++ .../goldens/redis_selective/testing/constraints-3.9.txt | 2 ++ 68 files changed, 120 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 2f265dd994f1..f796c56c23e3 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -37,6 +37,8 @@ dependencies = [ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} diff --git a/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 b/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 index 74049d395c52..19da09d53751 100644 --- a/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 +++ b/packages/gapic-generator/gapic/templates/testing/_default_constraints.j2 @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf {% for package_tuple, package_info in pypi_packages.items() %} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 index ef07e240643b..c2e7b8a9934c 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.13.txt.j2 @@ -8,6 +8,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 {% for package_tuple, package_info in pypi_packages.items() %} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.14.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.14.txt.j2 index ef07e240643b..c2e7b8a9934c 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.14.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.14.txt.j2 @@ -8,6 +8,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 {% for package_tuple, package_info in pypi_packages.items() %} diff --git a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 index 74f8f1ad374a..d6d5cd5dd312 100644 --- a/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 +++ b/packages/gapic-generator/gapic/templates/testing/constraints-3.7.txt.j2 @@ -7,6 +7,8 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files proto-plus==1.22.3 protobuf==3.20.2 {% for package_tuple, package_info in pypi_packages.items() %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 5d506dc2d915..827da67203fc 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -43,6 +43,8 @@ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt index 70744e58974a..763d545abe78 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.10.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf google-cloud-access-context-manager diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt index 70744e58974a..763d545abe78 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.11.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf google-cloud-access-context-manager diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt index 70744e58974a..763d545abe78 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.12.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf google-cloud-access-context-manager diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt index 763099d82412..c3db09a0c746 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.13.txt @@ -7,6 +7,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 google-cloud-access-context-manager>=0 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.14.txt index 763099d82412..c3db09a0c746 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.14.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.14.txt @@ -7,6 +7,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 google-cloud-access-context-manager>=0 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt index aafb24324a45..45771b18c6a9 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.7.txt @@ -6,6 +6,8 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files proto-plus==1.22.3 protobuf==3.20.2 google-cloud-access-context-manager==0.1.2 diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt index 70744e58974a..763d545abe78 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.8.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf google-cloud-access-context-manager diff --git a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt index 70744e58974a..763d545abe78 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/asset/testing/constraints-3.9.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf google-cloud-access-context-manager diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index 6ffe75b3ba84..d2f399fbdab3 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -43,6 +43,8 @@ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.10.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.11.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.12.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.13.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.14.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.14.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.14.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt index a77f12bc13e4..5d29dea38698 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.7.txt @@ -6,5 +6,7 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files proto-plus==1.22.3 protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.8.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/credentials/testing/constraints-3.9.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 4cba69da0208..c7d3e0990799 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -43,6 +43,8 @@ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt index ad3f0fa58e2d..ef1c92ffffeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.10.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt index ad3f0fa58e2d..ef1c92ffffeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.11.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt index ad3f0fa58e2d..ef1c92ffffeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.12.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt index 2010e549cceb..2ae5a677e852 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.13.txt @@ -7,6 +7,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 grpc-google-iam-v1>=0 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.14.txt index 2010e549cceb..2ae5a677e852 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.14.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.14.txt @@ -7,6 +7,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 grpc-google-iam-v1>=0 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt index 56affbd9bd75..5b1ee6c35add 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.7.txt @@ -6,6 +6,8 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files proto-plus==1.22.3 protobuf==3.20.2 grpc-google-iam-v1==0.14.0 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt index ad3f0fa58e2d..ef1c92ffffeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.8.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt index ad3f0fa58e2d..ef1c92ffffeb 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/testing/constraints-3.9.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 23a3a12a2b64..654744319a91 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -43,6 +43,8 @@ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.10.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.11.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.12.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.13.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.14.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.14.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.14.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt index a77f12bc13e4..5d29dea38698 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.7.txt @@ -6,5 +6,7 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files proto-plus==1.22.3 protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.8.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging/testing/constraints-3.9.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py index 23a3a12a2b64..654744319a91 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -43,6 +43,8 @@ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.10.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.10.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.11.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.11.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.12.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.12.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.13.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.14.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.14.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.14.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt index a77f12bc13e4..5d29dea38698 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.7.txt @@ -6,5 +6,7 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files proto-plus==1.22.3 protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.8.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.8.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.9.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/testing/constraints-3.9.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 54fece2f918d..6f42e465170f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -43,6 +43,8 @@ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.10.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.11.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.12.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.13.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.14.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.14.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.14.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt index a77f12bc13e4..5d29dea38698 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.7.txt @@ -6,5 +6,7 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files proto-plus==1.22.3 protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.8.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis/testing/constraints-3.9.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py index 54fece2f918d..6f42e465170f 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -43,6 +43,8 @@ # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.10.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.10.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.10.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.10.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.11.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.11.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.11.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.11.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.12.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.12.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.12.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.12.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.13.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.14.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.14.txt index c20a77817caa..1e93c60e50aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.14.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.14.txt @@ -7,5 +7,6 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt index a77f12bc13e4..5d29dea38698 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.7.txt @@ -6,5 +6,7 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.1 google-auth==2.14.1 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2453) +# Add the minimum supported version of grpcio to constraints files proto-plus==1.22.3 protobuf==3.20.2 diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.8.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.8.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.8.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.8.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.9.txt b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.9.txt index ed7f9aed2559..93e6826f2af9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.9.txt +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/testing/constraints-3.9.txt @@ -2,5 +2,7 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf From ed73859780d2bdd4270ff19034e4b3100dc96787 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 7 Oct 2025 17:41:15 -0400 Subject: [PATCH 1317/1339] feat: Add support for Python 3.14 (#2451) --- .../gapic-generator/.github/workflows/tests.yaml | 14 ++++++++++---- .../gapic/ads-templates/noxfile.py.j2 | 4 ++++ .../gapic/ads-templates/setup.py.j2 | 1 + .../gapic-generator/gapic/templates/noxfile.py.j2 | 10 +++++++--- .../gapic-generator/gapic/templates/setup.py.j2 | 1 + packages/gapic-generator/noxfile.py | 2 +- packages/gapic-generator/setup.py | 1 + .../tests/integration/goldens/asset/noxfile.py | 10 +++++++--- .../tests/integration/goldens/asset/setup.py | 1 + .../integration/goldens/credentials/noxfile.py | 10 +++++++--- .../tests/integration/goldens/credentials/setup.py | 1 + .../tests/integration/goldens/eventarc/noxfile.py | 10 +++++++--- .../tests/integration/goldens/eventarc/setup.py | 1 + .../tests/integration/goldens/logging/noxfile.py | 10 +++++++--- .../tests/integration/goldens/logging/setup.py | 1 + .../goldens/logging_internal/noxfile.py | 10 +++++++--- .../integration/goldens/logging_internal/setup.py | 1 + .../tests/integration/goldens/redis/noxfile.py | 10 +++++++--- .../tests/integration/goldens/redis/setup.py | 1 + .../integration/goldens/redis_selective/noxfile.py | 10 +++++++--- .../integration/goldens/redis_selective/setup.py | 1 + 21 files changed, 81 insertions(+), 29 deletions(-) diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml index e5e3c22d195d..9db56f8b0cde 100644 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ b/packages/gapic-generator/.github/workflows/tests.yaml @@ -17,8 +17,14 @@ env: SHOWCASE_VERSION: 0.35.0 PROTOC_VERSION: 3.20.2 OLDEST_PYTHON: 3.7 - LATEST_STABLE_PYTHON: 3.13 - PRE_RELEASE_PYTHON: 3.14 + LATEST_STABLE_PYTHON: 3.14 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): + # Switch this to Python 3.15 alpha1 pre-release + # https://peps.python.org/pep-0790/ + PREVIEW_PYTHON_VERSION: 3.14 + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): + # Add Python 3.15 alpha1 pre-release + # https://peps.python.org/pep-0790/ ALL_PYTHON: "['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14']" permissions: @@ -431,10 +437,10 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v5 - - name: Set up Python ${{ env.PRE_RELEASE_PYTHON }} + - name: Set up Python ${{ env.PREVIEW_PYTHON_VERSION }} uses: actions/setup-python@v6 with: - python-version: "${{ env.PRE_RELEASE_PYTHON }}" + python-version: "${{ env.PREVIEW_PYTHON_VERSION }}" cache: 'pip' allow-prereleases: true - name: Install nox. diff --git a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 index 9a2836cb8f27..48646feb5e90 100644 --- a/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/noxfile.py.j2 @@ -7,6 +7,9 @@ import os import nox # type: ignore +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Add tests for Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ ALL_PYTHON = [ "3.7", "3.8", @@ -15,6 +18,7 @@ ALL_PYTHON = [ "3.11", "3.12", "3.13", + "3.14", ] @nox.session(python=ALL_PYTHON) diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index b041173f8500..97ed76176dbe 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -73,6 +73,7 @@ setuptools.setup( "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index e4c356cbf6df..976fa2ccadf8 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -32,8 +32,12 @@ ALL_PYTHON = [ "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] -PRE_RELEASE_PYTHON = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -429,7 +433,7 @@ def docfx(session): ) -@nox.session(python=PRE_RELEASE_PYTHON) +@nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index f796c56c23e3..6573b942db08 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -96,6 +96,7 @@ setuptools.setup( "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 99dab8a44171..bfc4fff8b442 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -47,7 +47,7 @@ "3.14", ) -NEWEST_PYTHON = ALL_PYTHON[-2] +NEWEST_PYTHON = ALL_PYTHON[-1] @nox.session(python=ALL_PYTHON) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 89bbd29d5a96..b8ae013dd25e 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -77,6 +77,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Software Development :: Code Generators", "Topic :: Software Development :: Libraries :: Python Modules", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index b61714a5795a..842750346c25 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -39,8 +39,12 @@ "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] -PRE_RELEASE_PYTHON = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -423,7 +427,7 @@ def docfx(session): ) -@nox.session(python=PRE_RELEASE_PYTHON) +@nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index 827da67203fc..db0a03c5906d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -90,6 +90,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index 9dbefde7b929..e5f8864a33a2 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -39,8 +39,12 @@ "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] -PRE_RELEASE_PYTHON = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -423,7 +427,7 @@ def docfx(session): ) -@nox.session(python=PRE_RELEASE_PYTHON) +@nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index d2f399fbdab3..eaba6dcd0ca9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -87,6 +87,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 8a915a5d099b..9af468e682d8 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -39,8 +39,12 @@ "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] -PRE_RELEASE_PYTHON = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -423,7 +427,7 @@ def docfx(session): ) -@nox.session(python=PRE_RELEASE_PYTHON) +@nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index c7d3e0990799..8f8887b4d57a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -88,6 +88,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index d397b029d259..0aa3b63010f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -39,8 +39,12 @@ "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] -PRE_RELEASE_PYTHON = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -423,7 +427,7 @@ def docfx(session): ) -@nox.session(python=PRE_RELEASE_PYTHON) +@nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 654744319a91..cb6015c69b10 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -87,6 +87,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index d397b029d259..0aa3b63010f0 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -39,8 +39,12 @@ "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] -PRE_RELEASE_PYTHON = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -423,7 +427,7 @@ def docfx(session): ) -@nox.session(python=PRE_RELEASE_PYTHON) +@nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py index 654744319a91..cb6015c69b10 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -87,6 +87,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index d798ed9a3726..d52df038ca21 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -39,8 +39,12 @@ "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] -PRE_RELEASE_PYTHON = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -423,7 +427,7 @@ def docfx(session): ) -@nox.session(python=PRE_RELEASE_PYTHON) +@nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 6f42e465170f..026674b2d3e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -91,6 +91,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index d798ed9a3726..d52df038ca21 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -39,8 +39,12 @@ "3.14", ] -DEFAULT_PYTHON_VERSION = ALL_PYTHON[-2] -PRE_RELEASE_PYTHON = ALL_PYTHON[-1] +DEFAULT_PYTHON_VERSION = "3.14" + +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): +# Switch this to Python 3.15 alpha1 +# https://peps.python.org/pep-0790/ +PREVIEW_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -423,7 +427,7 @@ def docfx(session): ) -@nox.session(python=PRE_RELEASE_PYTHON) +@nox.session(python=PREVIEW_PYTHON_VERSION) @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py index 6f42e465170f..026674b2d3e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -91,6 +91,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], From 6730b9f1940d876e33ad8b8ccb53752b6153a565 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 21:48:29 +0000 Subject: [PATCH 1318/1339] chore(main): release 1.28.0 (#2459) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 13 +++++++++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 27ee7732cea7..190f81983720 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,19 @@ # Changelog +## [1.28.0](https://github.com/googleapis/gapic-generator-python/compare/v1.27.2...v1.28.0) (2025-10-07) + + +### Features + +* Add support for Python 3.14 ([#2451](https://github.com/googleapis/gapic-generator-python/issues/2451)) ([3a854db](https://github.com/googleapis/gapic-generator-python/commit/3a854dbd2448a384dd67d683332a4c438805f5a6)) + + +### Bug Fixes + +* Require grpcio >= 1.33.2 ([ccf132f](https://github.com/googleapis/gapic-generator-python/commit/ccf132f7a8510ce7ef3728fa569806ac03ccea6e)) +* Require grpcio >= 1.75.1 for Python 3.14 ([ccf132f](https://github.com/googleapis/gapic-generator-python/commit/ccf132f7a8510ce7ef3728fa569806ac03ccea6e)) + ## [1.27.2](https://github.com/googleapis/gapic-generator-python/compare/v1.27.1...v1.27.2) (2025-10-01) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index b8ae013dd25e..e718a1ece42d 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.27.2" +version = "1.28.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 695b27ab3051c2283c90c78b0584d4681f247fa2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 14 Oct 2025 17:00:36 -0400 Subject: [PATCH 1319/1339] fix: fix gapic-version generator option for selective gapic generation (#2460) --- packages/gapic-generator/gapic/schema/api.py | 1 + packages/gapic-generator/tests/integration/BUILD.bazel | 1 + .../goldens/redis_selective/google/cloud/redis/gapic_version.py | 2 +- .../redis_selective/google/cloud/redis_v1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.redis.v1.json | 2 +- 5 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 2c01b07363ab..6fe99b73d114 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -559,6 +559,7 @@ def disambiguate_keyword_sanitize_fname( naming=naming, all_protos=new_all_protos, service_yaml_config=service_yaml_config, + gapic_version=gapic_version, ) return api diff --git a/packages/gapic-generator/tests/integration/BUILD.bazel b/packages/gapic-generator/tests/integration/BUILD.bazel index 3e8a344a97e8..7d4db3417b5b 100644 --- a/packages/gapic-generator/tests/integration/BUILD.bazel +++ b/packages/gapic-generator/tests/integration/BUILD.bazel @@ -178,6 +178,7 @@ py_gapic_library( grpc_service_config = "redis_grpc_service_config.json", opt_args = [ "autogen-snippets", + "gapic-version=1.2.99", ], service_yaml = "redis_selective_v1.yaml", transport = "grpc+rest", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py index 20a9cd975b02..bbc191a8fe2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.2.99" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py index 20a9cd975b02..bbc191a8fe2d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.0.0" # {x-release-please-version} +__version__ = "1.2.99" # {x-release-please-version} diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json index 1b46e699ceb0..4202dce4b20a 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/samples/generated_samples/snippet_metadata_google.cloud.redis.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-redis", - "version": "0.0.0" + "version": "1.2.99" }, "snippets": [ { From 7c8c97e69e57571199ce02be2a350e7416d00547 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 14 Oct 2025 21:31:36 +0000 Subject: [PATCH 1320/1339] chore(main): release 1.28.1 (#2461) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 190f81983720..25588379a959 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.28.1](https://github.com/googleapis/gapic-generator-python/compare/v1.28.0...v1.28.1) (2025-10-14) + + +### Bug Fixes + +* Fix gapic-version generator option for selective gapic generation ([#2460](https://github.com/googleapis/gapic-generator-python/issues/2460)) ([d7c6072](https://github.com/googleapis/gapic-generator-python/commit/d7c6072cc65e8f166d4b5a78bef250068dca43bf)) + ## [1.28.0](https://github.com/googleapis/gapic-generator-python/compare/v1.27.2...v1.28.0) (2025-10-07) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index e718a1ece42d..4a7daed639a5 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.28.0" +version = "1.28.1" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 1450630183bd9a35e0870de1a4e506539f386cc8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 20 Oct 2025 11:53:37 -0400 Subject: [PATCH 1321/1339] chore: add samples directory to lint paths (#2438) --- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 4 ++-- .../tests/integration/goldens/asset/noxfile.py | 2 +- .../tests/integration/goldens/credentials/noxfile.py | 2 +- .../tests/integration/goldens/eventarc/noxfile.py | 2 +- .../tests/integration/goldens/logging/noxfile.py | 2 +- .../tests/integration/goldens/logging_internal/noxfile.py | 2 +- .../tests/integration/goldens/redis/noxfile.py | 2 +- .../tests/integration/goldens/redis_selective/noxfile.py | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 976fa2ccadf8..05cea49d8c75 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -16,9 +16,9 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" {% if api.naming.module_namespace %} -LINT_PATHS = ["docs", "{{ api.naming.module_namespace[0] }}", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "{{ api.naming.module_namespace[0] }}", "samples", "tests", "noxfile.py", "setup.py"] {% else %} -LINT_PATHS = ["docs", "{{ api.naming.versioned_module_name }}", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "{{ api.naming.versioned_module_name }}", "samples", "tests", "noxfile.py", "setup.py"] {% endif %} ALL_PYTHON = [ diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index 842750346c25..c778978b5bbc 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index e5f8864a33a2..c167302ac26d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 9af468e682d8..44fb4c3427a2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index 0aa3b63010f0..dea66d6299b1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index 0aa3b63010f0..dea66d6299b1 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index d52df038ca21..d814779a9e38 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index d52df038ca21..d814779a9e38 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -26,7 +26,7 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] ALL_PYTHON = [ "3.7", From c0d87830f01c32482604f8ee626a48b845a7024c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 20 Oct 2025 12:07:26 -0400 Subject: [PATCH 1322/1339] fix: bump the minimum version of google-apps-card to 0.3.0 (#2464) --- packages/gapic-generator/gapic/templates/_pypi_packages.j2 | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 index 6ce1f8773d83..920d6f72bfc7 100644 --- a/packages/gapic-generator/gapic/templates/_pypi_packages.j2 +++ b/packages/gapic-generator/gapic/templates/_pypi_packages.j2 @@ -7,7 +7,7 @@ allowed version. Note: Set the minimum version for google-cloud-documentai to 2.0.0 which has support for `barcode` in `google.cloud.documentai.types` --> {% set pypi_packages = { - ("google", "apps", "card", "v1"): {"package_name": "google-apps-card", "lower_bound": "0.1.2", "upper_bound": "1.0.0"}, + ("google", "apps", "card", "v1"): {"package_name": "google-apps-card", "lower_bound": "0.3.0", "upper_bound": "1.0.0"}, ("google", "apps", "script", "type"): {"package_name": "google-apps-script-type", "lower_bound": "0.2.0", "upper_bound": "1.0.0"}, ("google", "geo", "type"): {"package_name": "google-geo-type", "lower_bound": "0.1.0", "upper_bound": "1.0.0"}, ("google", "identity", "accesscontextmanager", "v1"): {"package_name": "google-cloud-access-context-manager", "lower_bound": "0.1.2", "upper_bound": "1.0.0"}, From 3c7c67a35081fe2503c6e686e25c4b2597f46753 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 20 Oct 2025 16:38:19 +0000 Subject: [PATCH 1323/1339] chore(main): release 1.28.2 (#2465) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 25588379a959..d26d5a98fbd9 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.28.2](https://github.com/googleapis/gapic-generator-python/compare/v1.28.1...v1.28.2) (2025-10-20) + + +### Bug Fixes + +* Bump the minimum version of google-apps-card to 0.3.0 ([#2464](https://github.com/googleapis/gapic-generator-python/issues/2464)) ([bfb87e2](https://github.com/googleapis/gapic-generator-python/commit/bfb87e27a38a9c3b77902d1f61899ebc545082e8)) + ## [1.28.1](https://github.com/googleapis/gapic-generator-python/compare/v1.28.0...v1.28.1) (2025-10-14) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 4a7daed639a5..1feeafc9153b 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.28.1" +version = "1.28.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 3cbbcd3c35ba569fd5e3feea8826c27ec7670db1 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 21 Oct 2025 11:54:09 -0400 Subject: [PATCH 1324/1339] fix: only format samples if the directory exists (#2466) --- packages/gapic-generator/gapic/templates/noxfile.py.j2 | 8 ++++++-- .../tests/integration/goldens/asset/noxfile.py | 6 +++++- .../tests/integration/goldens/credentials/noxfile.py | 6 +++++- .../tests/integration/goldens/eventarc/noxfile.py | 6 +++++- .../tests/integration/goldens/logging/noxfile.py | 6 +++++- .../tests/integration/goldens/logging_internal/noxfile.py | 6 +++++- .../tests/integration/goldens/redis/noxfile.py | 6 +++++- .../tests/integration/goldens/redis_selective/noxfile.py | 6 +++++- 8 files changed, 41 insertions(+), 9 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/noxfile.py.j2 b/packages/gapic-generator/gapic/templates/noxfile.py.j2 index 05cea49d8c75..274ca368e0e3 100644 --- a/packages/gapic-generator/gapic/templates/noxfile.py.j2 +++ b/packages/gapic-generator/gapic/templates/noxfile.py.j2 @@ -16,11 +16,15 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" {% if api.naming.module_namespace %} -LINT_PATHS = ["docs", "{{ api.naming.module_namespace[0] }}", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "{{ api.naming.module_namespace[0] }}", "tests", "noxfile.py", "setup.py"] {% else %} -LINT_PATHS = ["docs", "{{ api.naming.versioned_module_name }}", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "{{ api.naming.versioned_module_name }}", "tests", "noxfile.py", "setup.py"] {% endif %} +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") + ALL_PYTHON = [ "3.7", "3.8", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py index c778978b5bbc..487acba7258e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/noxfile.py @@ -26,7 +26,11 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py index c167302ac26d..af4ba3e2853e 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/noxfile.py @@ -26,7 +26,11 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py index 44fb4c3427a2..c718640903e2 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/noxfile.py @@ -26,7 +26,11 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py index dea66d6299b1..8bf90a69f4a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/noxfile.py @@ -26,7 +26,11 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py index dea66d6299b1..8bf90a69f4a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/noxfile.py @@ -26,7 +26,11 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py index d814779a9e38..4349f5147dce 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/noxfile.py @@ -26,7 +26,11 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") ALL_PYTHON = [ "3.7", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py index d814779a9e38..4349f5147dce 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/noxfile.py @@ -26,7 +26,11 @@ BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "samples", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +# Add samples to the list of directories to format if the directory exists. +if os.path.isdir("samples"): + LINT_PATHS.append("samples") ALL_PYTHON = [ "3.7", From 350391161c2a620d7d31ca729b695aed518c1f08 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 21 Oct 2025 16:23:46 +0000 Subject: [PATCH 1325/1339] chore(main): release 1.28.3 (#2467) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index d26d5a98fbd9..cf7e166df7c7 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.28.3](https://github.com/googleapis/gapic-generator-python/compare/v1.28.2...v1.28.3) (2025-10-21) + + +### Bug Fixes + +* Only format samples if the directory exists ([#2466](https://github.com/googleapis/gapic-generator-python/issues/2466)) ([2d85dee](https://github.com/googleapis/gapic-generator-python/commit/2d85dee33cc1cd2815dcf1e9304905013ec561b6)) + ## [1.28.2](https://github.com/googleapis/gapic-generator-python/compare/v1.28.1...v1.28.2) (2025-10-20) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 1feeafc9153b..852d9648bdad 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.28.2" +version = "1.28.3" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 32e15ebc8f3e125820fee6f554ed1ccd9a118c20 Mon Sep 17 00:00:00 2001 From: Victor Chudnovsky Date: Thu, 23 Oct 2025 16:33:56 -0700 Subject: [PATCH 1326/1339] feat: check Python and dependency versions in generated GAPICs (#2419) --- .../%name_%version/%sub/__init__.py.j2 | 86 +++++++++++++++++++ .../gapic/templates/setup.py.j2 | 1 + .../%name_%version/%sub/test_%service.py.j2 | 1 + packages/gapic-generator/noxfile.py | 8 ++ .../asset/google/cloud/asset_v1/__init__.py | 79 +++++++++++++++++ .../tests/integration/goldens/asset/setup.py | 1 + .../unit/gapic/asset_v1/test_asset_service.py | 1 + .../google/iam/credentials_v1/__init__.py | 79 +++++++++++++++++ .../integration/goldens/credentials/setup.py | 1 + .../credentials_v1/test_iam_credentials.py | 1 + .../google/cloud/eventarc_v1/__init__.py | 79 +++++++++++++++++ .../integration/goldens/eventarc/setup.py | 1 + .../unit/gapic/eventarc_v1/test_eventarc.py | 1 + .../google/cloud/logging_v2/__init__.py | 79 +++++++++++++++++ .../integration/goldens/logging/setup.py | 1 + .../logging_v2/test_config_service_v2.py | 1 + .../logging_v2/test_logging_service_v2.py | 1 + .../logging_v2/test_metrics_service_v2.py | 1 + .../google/cloud/logging_v2/__init__.py | 79 +++++++++++++++++ .../goldens/logging_internal/setup.py | 1 + .../logging_v2/test_config_service_v2.py | 1 + .../logging_v2/test_logging_service_v2.py | 1 + .../logging_v2/test_metrics_service_v2.py | 1 + .../redis/google/cloud/redis_v1/__init__.py | 79 +++++++++++++++++ .../tests/integration/goldens/redis/setup.py | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 1 + .../google/cloud/redis_v1/__init__.py | 79 +++++++++++++++++ .../goldens/redis_selective/setup.py | 1 + .../unit/gapic/redis_v1/test_cloud_redis.py | 1 + .../tests/system/test_response_metadata.py | 1 + 30 files changed, 668 insertions(+) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 8ee16704313e..abe33f857575 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -7,6 +7,92 @@ from {{package_path}} import gapic_version as package_version __version__ = package_version.__version__ + +import google.api_core as api_core + +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + {# TODO(api_core): remove `type:ignore` below when minimum version of api_core makes the else clause unnecessary. #} + api_core.check_python_version("{{package_path}}") # type: ignore + api_core.check_dependency_versions("{{package_path}}") # type: ignore +else: # pragma: NO COVER +{# TODO(api_core): Remove this try-catch when we require api-core at a version that + supports the changes in https://github.com/googleapis/python-api-core/pull/832 + + In the meantime, please ensure the functionality here mirrors the + equivalent functionality in api_core, in those two functions above. +#} + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "{{package_path}}" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + from packaging.version import parse as parse_version + + if sys.version_info < (3, 8): + import pkg_resources + + def _get_version(dependency_name): + try: + version_string = pkg_resources.get_distribution(dependency_name).version + return (parse_version(version_string), version_string) + except pkg_resources.DistributionNotFound: + return (None, "--") + else: + from importlib import metadata + + def _get_version(dependency_name): + try: + version_string = metadata.version("requests") + parsed_version = parse_version(version_string) + return (parsed_version.release, version_string) + except metadata.PackageNotFoundError: + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + {# Import subpackages. -#} {% for subpackage, _ in api.subpackages|dictsort %} from . import {{ subpackage }} diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 6573b942db08..5f2f803315cd 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -39,6 +39,7 @@ dependencies = [ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 31ab20095b16..6391c95768fc 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -1472,6 +1472,7 @@ def test_{{ service.name|snake_case }}_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.{{ service.grpc_transport_name }}, transports.{{ service.grpc_asyncio_transport_name }}]) def test_{{ service.name|snake_case }}_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index bfc4fff8b442..5571594f7f65 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -61,6 +61,8 @@ def unit(session): "pyfakefs", "grpcio-status", "proto-plus", + "setuptools", # TODO: Remove when not needed in __init__.py.j2 + "packaging", # TODO: Remove when not needed in __init__.py.j2 ) session.install("-e", ".") session.run( @@ -482,6 +484,8 @@ def run_showcase_unit_tests(session, fail_under=100, rest_async_io_enabled=False "pytest-xdist", "asyncmock; python_version < '3.8'", "pytest-asyncio", + "setuptools", # TODO: Remove when not needed in __init__.py.j2 + "packaging", # TODO: Remove when not needed in __init__.py.j2 ) # Run the tests. # NOTE: async rest is not supported against the minimum supported version of google-api-core. @@ -596,6 +600,8 @@ def showcase_mypy( "types-protobuf", "types-requests", "types-dataclasses", + "setuptools", # TODO: Remove when not needed in __init__.py.j2 + "packaging", # TODO: Remove when not needed in __init__.py.j2 ) with showcase_library(session, templates=templates, other_opts=other_opts) as lib: @@ -726,6 +732,8 @@ def mypy(session): "types-PyYAML", "types-dataclasses", "click==8.1.3", + "setuptools", # TODO: Remove when not needed in __init__.py.j2 + "packaging", # TODO: Remove when not needed in __init__.py.j2 ) session.install(".") session.run("mypy", "-p", "gapic") diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 03ee9ec521b7..4f4362db8ddd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -18,6 +18,85 @@ __version__ = package_version.__version__ +import google.api_core as api_core + +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.asset_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.asset_v1" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + from packaging.version import parse as parse_version + + if sys.version_info < (3, 8): + import pkg_resources + + def _get_version(dependency_name): + try: + version_string = pkg_resources.get_distribution(dependency_name).version + return (parse_version(version_string), version_string) + except pkg_resources.DistributionNotFound: + return (None, "--") + else: + from importlib import metadata + + def _get_version(dependency_name): + try: + version_string = metadata.version("requests") + parsed_version = parse_version(version_string) + return (parsed_version.release, version_string) + except metadata.PackageNotFoundError: + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + + from .services.asset_service import AssetServiceClient from .services.asset_service import AssetServiceAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index db0a03c5906d..f7d2a2bdf837 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -45,6 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index 10cb01ca87b9..edd142a8084e 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -17712,6 +17712,7 @@ def test_asset_service_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.AssetServiceGrpcTransport, transports.AssetServiceGrpcAsyncIOTransport]) def test_asset_service_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 2be4471eb82d..3dd77183a791 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -18,6 +18,85 @@ __version__ = package_version.__version__ +import google.api_core as api_core + +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.iam.credentials_v1") # type: ignore + api_core.check_dependency_versions("google.iam.credentials_v1") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.iam.credentials_v1" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + from packaging.version import parse as parse_version + + if sys.version_info < (3, 8): + import pkg_resources + + def _get_version(dependency_name): + try: + version_string = pkg_resources.get_distribution(dependency_name).version + return (parse_version(version_string), version_string) + except pkg_resources.DistributionNotFound: + return (None, "--") + else: + from importlib import metadata + + def _get_version(dependency_name): + try: + version_string = metadata.version("requests") + parsed_version = parse_version(version_string) + return (parsed_version.release, version_string) + except metadata.PackageNotFoundError: + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + + from .services.iam_credentials import IAMCredentialsClient from .services.iam_credentials import IAMCredentialsAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index eaba6dcd0ca9..f4ebe518e8e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -45,6 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 62b329f3e526..90ddb8c449e9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -3972,6 +3972,7 @@ def test_iam_credentials_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.IAMCredentialsGrpcTransport, transports.IAMCredentialsGrpcAsyncIOTransport]) def test_iam_credentials_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index a34386728483..714542db6118 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -18,6 +18,85 @@ __version__ = package_version.__version__ +import google.api_core as api_core + +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.eventarc_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.eventarc_v1") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.eventarc_v1" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + from packaging.version import parse as parse_version + + if sys.version_info < (3, 8): + import pkg_resources + + def _get_version(dependency_name): + try: + version_string = pkg_resources.get_distribution(dependency_name).version + return (parse_version(version_string), version_string) + except pkg_resources.DistributionNotFound: + return (None, "--") + else: + from importlib import metadata + + def _get_version(dependency_name): + try: + version_string = metadata.version("requests") + parsed_version = parse_version(version_string) + return (parsed_version.release, version_string) + except metadata.PackageNotFoundError: + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + + from .services.eventarc import EventarcClient from .services.eventarc import EventarcAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index 8f8887b4d57a..aeb6fdeb1c6a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -45,6 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index 66ac1829f142..c815caafc5be 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -15378,6 +15378,7 @@ def test_eventarc_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.EventarcGrpcTransport, transports.EventarcGrpcAsyncIOTransport]) def test_eventarc_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index e8b59e0bb511..65abc3b298da 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -18,6 +18,85 @@ __version__ = package_version.__version__ +import google.api_core as api_core + +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.logging_v2") # type: ignore + api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.logging_v2" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + from packaging.version import parse as parse_version + + if sys.version_info < (3, 8): + import pkg_resources + + def _get_version(dependency_name): + try: + version_string = pkg_resources.get_distribution(dependency_name).version + return (parse_version(version_string), version_string) + except pkg_resources.DistributionNotFound: + return (None, "--") + else: + from importlib import metadata + + def _get_version(dependency_name): + try: + version_string = metadata.version("requests") + parsed_version = parse_version(version_string) + return (parsed_version.release, version_string) + except metadata.PackageNotFoundError: + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + + from .services.config_service_v2 import ConfigServiceV2Client from .services.config_service_v2 import ConfigServiceV2AsyncClient from .services.logging_service_v2 import LoggingServiceV2Client diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index cb6015c69b10..0c103cd02f06 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -45,6 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index 5de14eaafe9a..c284e58f8b7d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -12897,6 +12897,7 @@ def test_config_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) def test_config_service_v2_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 195d45601748..3a3740300bfb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -3445,6 +3445,7 @@ def test_logging_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index a7b6c70e076e..4633e315a160 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -3249,6 +3249,7 @@ def test_metrics_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py index 3e0f9c2155ff..ab630bbc65f9 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py @@ -18,6 +18,85 @@ __version__ = package_version.__version__ +import google.api_core as api_core + +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.logging_v2") # type: ignore + api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.logging_v2" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + from packaging.version import parse as parse_version + + if sys.version_info < (3, 8): + import pkg_resources + + def _get_version(dependency_name): + try: + version_string = pkg_resources.get_distribution(dependency_name).version + return (parse_version(version_string), version_string) + except pkg_resources.DistributionNotFound: + return (None, "--") + else: + from importlib import metadata + + def _get_version(dependency_name): + try: + version_string = metadata.version("requests") + parsed_version = parse_version(version_string) + return (parsed_version.release, version_string) + except metadata.PackageNotFoundError: + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + + from .services.config_service_v2 import BaseConfigServiceV2Client from .services.config_service_v2 import BaseConfigServiceV2AsyncClient from .services.logging_service_v2 import LoggingServiceV2Client diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py index cb6015c69b10..0c103cd02f06 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -45,6 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py index e206a2486bb0..97d56ab135ef 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -12897,6 +12897,7 @@ def test_config_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.ConfigServiceV2GrpcTransport, transports.ConfigServiceV2GrpcAsyncIOTransport]) def test_config_service_v2_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 195d45601748..3a3740300bfb 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -3445,6 +3445,7 @@ def test_logging_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.LoggingServiceV2GrpcTransport, transports.LoggingServiceV2GrpcAsyncIOTransport]) def test_logging_service_v2_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 35cb37f70ddc..bcb2c8346fbd 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -3249,6 +3249,7 @@ def test_metrics_service_v2_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.MetricsServiceV2GrpcTransport, transports.MetricsServiceV2GrpcAsyncIOTransport]) def test_metrics_service_v2_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index af3d250ccdf8..8db2b26e6532 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -18,6 +18,85 @@ __version__ = package_version.__version__ +import google.api_core as api_core + +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.redis_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.redis_v1" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + from packaging.version import parse as parse_version + + if sys.version_info < (3, 8): + import pkg_resources + + def _get_version(dependency_name): + try: + version_string = pkg_resources.get_distribution(dependency_name).version + return (parse_version(version_string), version_string) + except pkg_resources.DistributionNotFound: + return (None, "--") + else: + from importlib import metadata + + def _get_version(dependency_name): + try: + version_string = metadata.version("requests") + parsed_version = parse_version(version_string) + return (parsed_version.release, version_string) + except metadata.PackageNotFoundError: + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + + from .services.cloud_redis import CloudRedisClient from .services.cloud_redis import CloudRedisAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index 026674b2d3e7..cf35d014a47d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -45,6 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 4ff5644cdc5c..02549e2f2f17 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -11657,6 +11657,7 @@ def test_cloud_redis_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) def test_cloud_redis_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py index 760fe80f6c14..b781ff92cdb5 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py @@ -18,6 +18,85 @@ __version__ = package_version.__version__ +import google.api_core as api_core + +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.redis_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.redis_v1" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + from packaging.version import parse as parse_version + + if sys.version_info < (3, 8): + import pkg_resources + + def _get_version(dependency_name): + try: + version_string = pkg_resources.get_distribution(dependency_name).version + return (parse_version(version_string), version_string) + except pkg_resources.DistributionNotFound: + return (None, "--") + else: + from importlib import metadata + + def _get_version(dependency_name): + try: + version_string = metadata.version("requests") + parsed_version = parse_version(version_string) + return (parsed_version.release, version_string) + except metadata.PackageNotFoundError: + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + + from .services.cloud_redis import CloudRedisClient from .services.cloud_redis import CloudRedisAsyncClient diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py index 026674b2d3e7..cf35d014a47d 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -45,6 +45,7 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py index fd46390682ed..2aed5640c0aa 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -6823,6 +6823,7 @@ def test_cloud_redis_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize("transport_class", [transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport]) def test_cloud_redis_transport_channel_mtls_with_client_cert_source( transport_class diff --git a/packages/gapic-generator/tests/system/test_response_metadata.py b/packages/gapic-generator/tests/system/test_response_metadata.py index f99abf3e11fa..2cc868145539 100644 --- a/packages/gapic-generator/tests/system/test_response_metadata.py +++ b/packages/gapic-generator/tests/system/test_response_metadata.py @@ -84,6 +84,7 @@ async def test_metadata_response_rest_streams_async(intercepted_echo_rest_async) ("rest_asyncio", ("X-Showcase-Request-Something3", "something_value3")), ], ) + @pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.asyncio async def test_metadata_response_unary_async( intercepted_echo_grpc_async, From 5ed689ddf247a1aa10020b45f569bc4ec6f1bc50 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 23 Oct 2025 18:53:41 -0700 Subject: [PATCH 1327/1339] chore(main): release 1.29.0 (#2468) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/gapic-generator/CHANGELOG.md | 7 +++++++ packages/gapic-generator/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index cf7e166df7c7..a9d964d73809 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,6 +1,13 @@ # Changelog +## [1.29.0](https://github.com/googleapis/gapic-generator-python/compare/v1.28.3...v1.29.0) (2025-10-23) + + +### Features + +* Check Python and dependency versions in generated GAPICs ([#2419](https://github.com/googleapis/gapic-generator-python/issues/2419)) ([a9484c6](https://github.com/googleapis/gapic-generator-python/commit/a9484c6a29687c0f3ffb63f271e350054547d495)) + ## [1.28.3](https://github.com/googleapis/gapic-generator-python/compare/v1.28.2...v1.28.3) (2025-10-21) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 852d9648bdad..3f8b4e2e1aa1 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.28.3" +version = "1.29.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From fc32e4729b53eb7483d1116b1e0b1547c78f8a46 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 4 Nov 2025 09:49:04 -0500 Subject: [PATCH 1328/1339] tests: use older google-api-core when running tools with older runtimes (#2474) --- packages/gapic-generator/noxfile.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 5571594f7f65..557f762a77f8 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -175,6 +175,13 @@ def fragment(session, use_ads_templates=False): ) session.install("-e", ".") + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2473): + # Warnings emitted from google-api-core starting in 2.28 + # appear to cause issues when running protoc. + # The specific failure is `Plugin output is unparseable` + if session.python in ("3.7", "3.8", "3.9", "3.10"): + session.install("google-api-core<2.28") + frag_files = ( [Path(f) for f in session.posargs] if session.posargs else FRAGMENT_FILES ) @@ -238,6 +245,13 @@ def showcase_library( # Install grpcio-tools for protoc session.install("grpcio-tools") + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2473): + # Warnings emitted from google-api-core starting in 2.28 + # appear to cause issues when running protoc. + # The specific failure is `Plugin output is unparseable` + if session.python in ("3.7", "3.8", "3.9", "3.10"): + session.install("google-api-core<2.28") + # Install a client library for Showcase. with tempfile.TemporaryDirectory() as tmp_dir: # Download the Showcase descriptor. From cc86c71543e737845173b7ea373c7b014b87ba32 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 6 Nov 2025 21:36:04 +0000 Subject: [PATCH 1329/1339] chore(deps): update all dependencies (#2469) Co-authored-by: Anthonios Partheniou --- packages/gapic-generator/requirements.txt | 2031 ++++++++++++--------- 1 file changed, 1140 insertions(+), 891 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index aa753841aa5f..0639da5720b7 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -8,93 +8,127 @@ aiohappyeyeballs==2.6.1 \ --hash=sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558 \ --hash=sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8 # via aiohttp -aiohttp==3.12.15 \ - --hash=sha256:010cc9bbd06db80fe234d9003f67e97a10fe003bfbedb40da7d71c1008eda0fe \ - --hash=sha256:049ec0360f939cd164ecbfd2873eaa432613d5e77d6b04535e3d1fbae5a9e645 \ - --hash=sha256:098e92835b8119b54c693f2f88a1dec690e20798ca5f5fe5f0520245253ee0af \ - --hash=sha256:0a146708808c9b7a988a4af3821379e379e0f0e5e466ca31a73dbdd0325b0263 \ - --hash=sha256:0a23918fedc05806966a2438489dcffccbdf83e921a1170773b6178d04ade142 \ - --hash=sha256:0c643f4d75adea39e92c0f01b3fb83d57abdec8c9279b3078b68a3a52b3933b6 \ - --hash=sha256:1004e67962efabbaf3f03b11b4c43b834081c9e3f9b32b16a7d97d4708a9abe6 \ - --hash=sha256:14954a2988feae3987f1eb49c706bff39947605f4b6fa4027c1d75743723eb09 \ - --hash=sha256:1a649001580bdb37c6fdb1bebbd7e3bc688e8ec2b5c6f52edbb664662b17dc84 \ - --hash=sha256:2776c7ec89c54a47029940177e75c8c07c29c66f73464784971d6a81904ce9d1 \ - --hash=sha256:2abbb216a1d3a2fe86dbd2edce20cdc5e9ad0be6378455b05ec7f77361b3ab50 \ - --hash=sha256:2c7d81a277fa78b2203ab626ced1487420e8c11a8e373707ab72d189fcdad20a \ - --hash=sha256:2ce13fcfb0bb2f259fb42106cdc63fa5515fb85b7e87177267d89a771a660b79 \ - --hash=sha256:2e5a495cb1be69dae4b08f35a6c4579c539e9b5706f606632102c0f855bcba7c \ - --hash=sha256:2ee8a8ac39ce45f3e55663891d4b1d15598c157b4d494a4613e704c8b43112cd \ - --hash=sha256:3b6f0af863cf17e6222b1735a756d664159e58855da99cfe965134a3ff63b0b0 \ - --hash=sha256:3bdd6e17e16e1dbd3db74d7f989e8af29c4d2e025f9828e6ef45fbdee158ec75 \ - --hash=sha256:3beb14f053222b391bf9cf92ae82e0171067cc9c8f52453a0f1ec7c37df12a77 \ - --hash=sha256:3c5092ce14361a73086b90c6efb3948ffa5be2f5b6fbcf52e8d8c8b8848bb97c \ - --hash=sha256:3ead1c00f8521a5c9070fcb88f02967b1d8a0544e6d85c253f6968b785e1a2ab \ - --hash=sha256:3eae49032c29d356b94eee45a3f39fdf4b0814b397638c2f718e96cfadf4c4e4 \ - --hash=sha256:3f9d7c55b41ed687b9d7165b17672340187f87a773c98236c987f08c858145a9 \ - --hash=sha256:40b3fee496a47c3b4a39a731954c06f0bd9bd3e8258c059a4beb76ac23f8e421 \ - --hash=sha256:421da6fd326460517873274875c6c5a18ff225b40da2616083c5a34a7570b685 \ - --hash=sha256:4420cf9d179ec8dfe4be10e7d0fe47d6d606485512ea2265b0d8c5113372771b \ - --hash=sha256:46749be6e89cd78d6068cdf7da51dbcfa4321147ab8e4116ee6678d9a056a0cf \ - --hash=sha256:47f6b962246f0a774fbd3b6b7be25d59b06fdb2f164cf2513097998fc6a29693 \ - --hash=sha256:4c39e87afe48aa3e814cac5f535bc6199180a53e38d3f51c5e2530f5aa4ec58c \ - --hash=sha256:4fc61385e9c98d72fcdf47e6dd81833f47b2f77c114c29cd64a361be57a763a2 \ - --hash=sha256:5015082477abeafad7203757ae44299a610e89ee82a1503e3d4184e6bafdd519 \ - --hash=sha256:5346b93e62ab51ee2a9d68e8f73c7cf96ffb73568a23e683f931e52450e4148d \ - --hash=sha256:536ad7234747a37e50e7b6794ea868833d5220b49c92806ae2d7e8a9d6b5de02 \ - --hash=sha256:56822ff5ddfd1b745534e658faba944012346184fbfe732e0d6134b744516eea \ - --hash=sha256:57d16590a351dfc914670bd72530fd78344b885a00b250e992faea565b7fdc05 \ - --hash=sha256:5fa5d9eb82ce98959fc1031c28198b431b4d9396894f385cb63f1e2f3f20ca6b \ - --hash=sha256:6404dfc8cdde35c69aaa489bb3542fb86ef215fc70277c892be8af540e5e21c0 \ - --hash=sha256:6443cca89553b7a5485331bc9bedb2342b08d073fa10b8c7d1c60579c4a7b9bd \ - --hash=sha256:691d203c2bdf4f4637792efbbcdcd157ae11e55eaeb5e9c360c1206fb03d4d98 \ - --hash=sha256:6990ef617f14450bc6b34941dba4f12d5613cbf4e33805932f853fbd1cf18bfb \ - --hash=sha256:6c5f40ec615e5264f44b4282ee27628cea221fcad52f27405b80abb346d9f3f8 \ - --hash=sha256:6d86a2fbdd14192e2f234a92d3b494dd4457e683ba07e5905a0b3ee25389ac9f \ - --hash=sha256:74bdd8c864b36c3673741023343565d95bfbd778ffe1eb4d412c135a28a8dc89 \ - --hash=sha256:74dad41b3458dbb0511e760fb355bb0b6689e0630de8a22b1b62a98777136e16 \ - --hash=sha256:760fb7db442f284996e39cf9915a94492e1896baac44f06ae551974907922b64 \ - --hash=sha256:79b26fe467219add81d5e47b4a4ba0f2394e8b7c7c3198ed36609f9ba161aecb \ - --hash=sha256:7c7dd29c7b5bda137464dc9bfc738d7ceea46ff70309859ffde8c022e9b08ba7 \ - --hash=sha256:7fbc8a7c410bb3ad5d595bb7118147dfbb6449d862cc1125cf8867cb337e8728 \ - --hash=sha256:802d3868f5776e28f7bf69d349c26fc0efadb81676d0afa88ed00d98a26340b7 \ - --hash=sha256:83603f881e11f0f710f8e2327817c82e79431ec976448839f3cd05d7afe8f830 \ - --hash=sha256:8466151554b593909d30a0a125d638b4e5f3836e5aecde85b66b80ded1cb5b0d \ - --hash=sha256:86ceded4e78a992f835209e236617bffae649371c4a50d5e5a3987f237db84b8 \ - --hash=sha256:894261472691d6fe76ebb7fcf2e5870a2ac284c7406ddc95823c8598a1390f0d \ - --hash=sha256:8e995e1abc4ed2a454c731385bf4082be06f875822adc4c6d9eaadf96e20d406 \ - --hash=sha256:8faa08fcc2e411f7ab91d1541d9d597d3a90e9004180edb2072238c085eac8c2 \ - --hash=sha256:9b2af240143dd2765e0fb661fd0361a1b469cab235039ea57663cda087250ea9 \ - --hash=sha256:9f922ffd05034d439dde1c77a20461cf4a1b0831e6caa26151fe7aa8aaebc315 \ - --hash=sha256:a041e7e2612041a6ddf1c6a33b883be6a421247c7afd47e885969ee4cc58bd8d \ - --hash=sha256:aaa2234bb60c4dbf82893e934d8ee8dea30446f0647e024074237a56a08c01bd \ - --hash=sha256:ac77f709a2cde2cc71257ab2d8c74dd157c67a0558a0d2799d5d571b4c63d44d \ - --hash=sha256:ad702e57dc385cae679c39d318def49aef754455f237499d5b99bea4ef582e51 \ - --hash=sha256:b2acbbfff69019d9014508c4ba0401822e8bae5a5fdc3b6814285b71231b60f3 \ - --hash=sha256:b390ef5f62bb508a9d67cb3bba9b8356e23b3996da7062f1a57ce1a79d2b3d34 \ - --hash=sha256:b52dcf013b57464b6d1e51b627adfd69a8053e84b7103a7cd49c030f9ca44461 \ - --hash=sha256:b5b7fe4972d48a4da367043b8e023fb70a04d1490aa7d68800e465d1b97e493b \ - --hash=sha256:b6fc902bff74d9b1879ad55f5404153e2b33a82e72a95c89cec5eb6cc9e92fbc \ - --hash=sha256:b7011a70b56facde58d6d26da4fec3280cc8e2a78c714c96b7a01a87930a9530 \ - --hash=sha256:b761bac1192ef24e16706d761aefcb581438b34b13a2f069a6d343ec8fb693a5 \ - --hash=sha256:b784d6ed757f27574dca1c336f968f4e81130b27595e458e69457e6878251f5d \ - --hash=sha256:b97752ff12cc12f46a9b20327104448042fce5c33a624f88c18f66f9368091c7 \ - --hash=sha256:bc4fbc61bb3548d3b482f9ac7ddd0f18c67e4225aaa4e8552b9f1ac7e6bda9e5 \ - --hash=sha256:bc9a0f6569ff990e0bbd75506c8d8fe7214c8f6579cca32f0546e54372a3bb54 \ - --hash=sha256:bd44d5936ab3193c617bfd6c9a7d8d1085a8dc8c3f44d5f1dcf554d17d04cf7d \ - --hash=sha256:ced339d7c9b5030abad5854aa5413a77565e5b6e6248ff927d3e174baf3badf7 \ - --hash=sha256:d3ce17ce0220383a0f9ea07175eeaa6aa13ae5a41f30bc61d84df17f0e9b1117 \ - --hash=sha256:d5f1b4ce5bc528a6ee38dbf5f39bbf11dd127048726323b72b8e85769319ffc4 \ - --hash=sha256:d849b0901b50f2185874b9a232f38e26b9b3d4810095a7572eacea939132d4e1 \ - --hash=sha256:db71ce547012a5420a39c1b744d485cfb823564d01d5d20805977f5ea1345676 \ - --hash=sha256:e153e8adacfe2af562861b72f8bc47f8a5c08e010ac94eebbe33dc21d677cd5b \ - --hash=sha256:edd533a07da85baa4b423ee8839e3e91681c7bfa19b04260a469ee94b778bf6d \ - --hash=sha256:f0adb4177fa748072546fb650d9bd7398caaf0e15b370ed3317280b13f4083b0 \ - --hash=sha256:f0fa751efb11a541f57db59c1dd821bec09031e01452b2b6217319b3a1f34f3d \ - --hash=sha256:f2800614cd560287be05e33a679638e586a2d7401f4ddf99e304d98878c29444 \ - --hash=sha256:f813c3e9032331024de2eb2e32a88d86afb69291fbc37a3a3ae81cc9917fb3d0 \ - --hash=sha256:fc49c4de44977aa8601a00edbf157e9a421f227aa7eb477d9e3df48343311065 \ - --hash=sha256:fd736ed420f4db2b8148b52b46b88ed038d0354255f9a73196b7bbce3ea97545 \ - --hash=sha256:fe086edf38b2222328cdf89af0dde2439ee173b8ad7cb659b4e4c6f385b2be3d +aiohttp==3.13.2 \ + --hash=sha256:04c3971421576ed24c191f610052bcb2f059e395bc2489dd99e397f9bc466329 \ + --hash=sha256:05c4dd3c48fb5f15db31f57eb35374cb0c09afdde532e7fb70a75aede0ed30f6 \ + --hash=sha256:070599407f4954021509193404c4ac53153525a19531051661440644728ba9a7 \ + --hash=sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254 \ + --hash=sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742 \ + --hash=sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad \ + --hash=sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906 \ + --hash=sha256:0e87dff73f46e969af38ab3f7cb75316a7c944e2e574ff7c933bc01b10def7f5 \ + --hash=sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811 \ + --hash=sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded \ + --hash=sha256:1f9b2c2d4b9d958b1f9ae0c984ec1dd6b6689e15c75045be8ccb4011426268ca \ + --hash=sha256:204ffff2426c25dfda401ba08da85f9c59525cdc42bda26660463dd1cbcfec6f \ + --hash=sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98 \ + --hash=sha256:20db2d67985d71ca033443a1ba2001c4b5693fe09b0e29f6d9358a99d4d62a8a \ + --hash=sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a \ + --hash=sha256:2372b15a5f62ed37789a6b383ff7344fc5b9f243999b0cd9b629d8bc5f5b4155 \ + --hash=sha256:23ad365e30108c422d0b4428cf271156dd56790f6dd50d770b8e360e6c5ab2e6 \ + --hash=sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e \ + --hash=sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b \ + --hash=sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e \ + --hash=sha256:29562998ec66f988d49fb83c9b01694fa927186b781463f376c5845c121e4e0b \ + --hash=sha256:2adebd4577724dcae085665f294cc57c8701ddd4d26140504db622b8d566d7aa \ + --hash=sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf \ + --hash=sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5 \ + --hash=sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc \ + --hash=sha256:364f55663085d658b8462a1c3f17b2b84a5c2e1ba858e1b79bff7b2e24ad1514 \ + --hash=sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f \ + --hash=sha256:3a92cf4b9bea33e15ecbaa5c59921be0f23222608143d025c989924f7e3e0c07 \ + --hash=sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca \ + --hash=sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e \ + --hash=sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9 \ + --hash=sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0 \ + --hash=sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782 \ + --hash=sha256:4dd3db9d0f4ebca1d887d76f7cdbcd1116ac0d05a9221b9dad82c64a62578c4d \ + --hash=sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213 \ + --hash=sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293 \ + --hash=sha256:53b07472f235eb80e826ad038c9d106c2f653584753f3ddab907c83f49eedead \ + --hash=sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04 \ + --hash=sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61 \ + --hash=sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3 \ + --hash=sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4 \ + --hash=sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b \ + --hash=sha256:6315fb6977f1d0dd41a107c527fee2ed5ab0550b7d885bc15fee20ccb17891da \ + --hash=sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6 \ + --hash=sha256:6c00dbcf5f0d88796151e264a8eab23de2997c9303dd7c0bf622e23b24d3ce22 \ + --hash=sha256:6e7352512f763f760baaed2637055c49134fd1d35b37c2dedfac35bfe5cf8725 \ + --hash=sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be \ + --hash=sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661 \ + --hash=sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e \ + --hash=sha256:7c3a50345635a02db61792c85bb86daffac05330f6473d524f1a4e3ef9d0046d \ + --hash=sha256:7fbdf5ad6084f1940ce88933de34b62358d0f4a0b6ec097362dcd3e5a65a4989 \ + --hash=sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c \ + --hash=sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec \ + --hash=sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45 \ + --hash=sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a \ + --hash=sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8 \ + --hash=sha256:8b2f1414f6a1e0683f212ec80e813f4abef94c739fd090b66c9adf9d2a05feac \ + --hash=sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694 \ + --hash=sha256:939ced4a7add92296b0ad38892ce62b98c619288a081170695c6babe4f50e636 \ + --hash=sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169 \ + --hash=sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a \ + --hash=sha256:960c2fc686ba27b535f9fd2b52d87ecd7e4fd1cf877f6a5cba8afb5b4a8bd204 \ + --hash=sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a \ + --hash=sha256:97a0895a8e840ab3520e2288db7cace3a1981300d48babeb50e7425609e2e0ab \ + --hash=sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae \ + --hash=sha256:99c5280a329d5fa18ef30fd10c793a190d996567667908bef8a7f81f8202b948 \ + --hash=sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8 \ + --hash=sha256:9c705601e16c03466cb72011bd1af55d68fa65b045356d8f96c216e5f6db0fa5 \ + --hash=sha256:9e8f8afb552297aca127c90cb840e9a1d4bfd6a10d7d8f2d9176e1acc69bad30 \ + --hash=sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16 \ + --hash=sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693 \ + --hash=sha256:9f377d0a924e5cc94dc620bc6366fc3e889586a7f18b748901cf016c916e2084 \ + --hash=sha256:a09a6d073fb5789456545bdee2474d14395792faa0527887f2f4ec1a486a59d3 \ + --hash=sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4 \ + --hash=sha256:a3b6fb0c207cc661fa0bf8c66d8d9b657331ccc814f4719468af61034b478592 \ + --hash=sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49 \ + --hash=sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a \ + --hash=sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e \ + --hash=sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4 \ + --hash=sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9 \ + --hash=sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b \ + --hash=sha256:b395bbca716c38bef3c764f187860e88c724b342c26275bc03e906142fc5964f \ + --hash=sha256:b59d13c443f8e049d9e94099c7e412e34610f1f49be0f230ec656a10692a5802 \ + --hash=sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb \ + --hash=sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b \ + --hash=sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011 \ + --hash=sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3 \ + --hash=sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7 \ + --hash=sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c \ + --hash=sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734 \ + --hash=sha256:d7bc4b7f9c4921eba72677cd9fedd2308f4a4ca3e12fab58935295ad9ea98700 \ + --hash=sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d \ + --hash=sha256:dacd50501cd017f8cccb328da0c90823511d70d24a323196826d923aad865901 \ + --hash=sha256:e036a3a645fe92309ec34b918394bb377950cbb43039a97edae6c08db64b23e2 \ + --hash=sha256:e09a0a06348a2dd73e7213353c90d709502d9786219f69b731f6caa0efeb46f5 \ + --hash=sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23 \ + --hash=sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8 \ + --hash=sha256:e2a9ea08e8c58bb17655630198833109227dea914cd20be660f52215f6de5613 \ + --hash=sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb \ + --hash=sha256:e574a7d61cf10351d734bcddabbe15ede0eaa8a02070d85446875dc11189a251 \ + --hash=sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6 \ + --hash=sha256:e736c93e9c274fce6419af4aac199984d866e55f8a4cec9114671d0ea9688780 \ + --hash=sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd \ + --hash=sha256:e7f8659a48995edee7229522984bd1009c1213929c769c2daa80b40fe49a180c \ + --hash=sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf \ + --hash=sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa \ + --hash=sha256:ed2f9c7216e53c3df02264f25d824b079cc5914f9e2deba94155190ef648ee40 \ + --hash=sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7 \ + --hash=sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673 \ + --hash=sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb \ + --hash=sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61 \ + --hash=sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940 \ + --hash=sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be \ + --hash=sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b \ + --hash=sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4 \ + --hash=sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476 \ + --hash=sha256:fed38a5edb7945f4d1bcabe2fcd05db4f6ec7e0e82560088b754f7e08d93772d \ + --hash=sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f \ + --hash=sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248 \ + --hash=sha256:ff5e771f5dcbc81c64898c597a434f7682f2259e0cd666932a913d53d1341d1a # via -r requirements.in aiosignal==1.4.0 \ --hash=sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e \ @@ -104,102 +138,136 @@ async-timeout==5.0.1 \ --hash=sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c \ --hash=sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3 # via aiohttp -attrs==25.3.0 \ - --hash=sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3 \ - --hash=sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b +attrs==25.4.0 \ + --hash=sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11 \ + --hash=sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373 # via aiohttp backports-asyncio-runner==1.2.0 \ --hash=sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5 \ --hash=sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162 # via pytest-asyncio -cachetools==6.2.0 \ - --hash=sha256:1c76a8960c0041fcc21097e357f882197c79da0dbff766e7317890a65d7d8ba6 \ - --hash=sha256:38b328c0889450f05f5e120f56ab68c8abaf424e1275522b138ffc93253f7e32 +cachetools==6.2.1 \ + --hash=sha256:09868944b6dde876dfd44e1d47e18484541eaf12f26f29b7af91b26cc892d701 \ + --hash=sha256:3f391e4bd8f8bf0931169baf7456cc822705f4e2a31f840d218f445b9a854201 # via google-auth -certifi==2025.8.3 \ - --hash=sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407 \ - --hash=sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5 +certifi==2025.10.5 \ + --hash=sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de \ + --hash=sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43 # via requests -charset-normalizer==3.4.3 \ - --hash=sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91 \ - --hash=sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0 \ - --hash=sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154 \ - --hash=sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601 \ - --hash=sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884 \ - --hash=sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07 \ - --hash=sha256:0f2be7e0cf7754b9a30eb01f4295cc3d4358a479843b31f328afd210e2c7598c \ - --hash=sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64 \ - --hash=sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe \ - --hash=sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f \ - --hash=sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432 \ - --hash=sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc \ - --hash=sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa \ - --hash=sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9 \ - --hash=sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae \ - --hash=sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19 \ - --hash=sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d \ - --hash=sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e \ - --hash=sha256:252098c8c7a873e17dd696ed98bbe91dbacd571da4b87df3736768efa7a792e4 \ - --hash=sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7 \ - --hash=sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312 \ - --hash=sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92 \ - --hash=sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31 \ - --hash=sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c \ - --hash=sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f \ - --hash=sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99 \ - --hash=sha256:3653fad4fe3ed447a596ae8638b437f827234f01a8cd801842e43f3d0a6b281b \ - --hash=sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15 \ - --hash=sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392 \ - --hash=sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f \ - --hash=sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8 \ - --hash=sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491 \ - --hash=sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0 \ - --hash=sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc \ - --hash=sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0 \ - --hash=sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f \ - --hash=sha256:5b413b0b1bfd94dbf4023ad6945889f374cd24e3f62de58d6bb102c4d9ae534a \ - --hash=sha256:5d8d01eac18c423815ed4f4a2ec3b439d654e55ee4ad610e153cf02faf67ea40 \ - --hash=sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927 \ - --hash=sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849 \ - --hash=sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce \ - --hash=sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14 \ - --hash=sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05 \ - --hash=sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c \ - --hash=sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c \ - --hash=sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a \ - --hash=sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc \ - --hash=sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34 \ - --hash=sha256:8999f965f922ae054125286faf9f11bc6932184b93011d138925a1773830bbe9 \ - --hash=sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096 \ - --hash=sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14 \ - --hash=sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30 \ - --hash=sha256:a2d08ac246bb48479170408d6c19f6385fa743e7157d716e144cad849b2dd94b \ - --hash=sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b \ - --hash=sha256:b5e3b2d152e74e100a9e9573837aba24aab611d39428ded46f4e4022ea7d1942 \ - --hash=sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db \ - --hash=sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5 \ - --hash=sha256:c60e092517a73c632ec38e290eba714e9627abe9d301c8c8a12ec32c314a2a4b \ - --hash=sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce \ - --hash=sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669 \ - --hash=sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0 \ - --hash=sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018 \ - --hash=sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93 \ - --hash=sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe \ - --hash=sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049 \ - --hash=sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a \ - --hash=sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef \ - --hash=sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2 \ - --hash=sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca \ - --hash=sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16 \ - --hash=sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f \ - --hash=sha256:d95bfb53c211b57198bb91c46dd5a2d8018b3af446583aab40074bf7988401cb \ - --hash=sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1 \ - --hash=sha256:ec557499516fc90fd374bf2e32349a2887a876fbf162c160e3c01b6849eaf557 \ - --hash=sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37 \ - --hash=sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7 \ - --hash=sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72 \ - --hash=sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c \ - --hash=sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9 +charset-normalizer==3.4.4 \ + --hash=sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad \ + --hash=sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93 \ + --hash=sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394 \ + --hash=sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89 \ + --hash=sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc \ + --hash=sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86 \ + --hash=sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63 \ + --hash=sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d \ + --hash=sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f \ + --hash=sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8 \ + --hash=sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0 \ + --hash=sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505 \ + --hash=sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161 \ + --hash=sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af \ + --hash=sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152 \ + --hash=sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318 \ + --hash=sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72 \ + --hash=sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4 \ + --hash=sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e \ + --hash=sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3 \ + --hash=sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576 \ + --hash=sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c \ + --hash=sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1 \ + --hash=sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8 \ + --hash=sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1 \ + --hash=sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2 \ + --hash=sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44 \ + --hash=sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26 \ + --hash=sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88 \ + --hash=sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016 \ + --hash=sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede \ + --hash=sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf \ + --hash=sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a \ + --hash=sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc \ + --hash=sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0 \ + --hash=sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84 \ + --hash=sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db \ + --hash=sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1 \ + --hash=sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7 \ + --hash=sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed \ + --hash=sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8 \ + --hash=sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133 \ + --hash=sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e \ + --hash=sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef \ + --hash=sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14 \ + --hash=sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2 \ + --hash=sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0 \ + --hash=sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d \ + --hash=sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828 \ + --hash=sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f \ + --hash=sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf \ + --hash=sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6 \ + --hash=sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328 \ + --hash=sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090 \ + --hash=sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa \ + --hash=sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381 \ + --hash=sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c \ + --hash=sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb \ + --hash=sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc \ + --hash=sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a \ + --hash=sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec \ + --hash=sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc \ + --hash=sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac \ + --hash=sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e \ + --hash=sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313 \ + --hash=sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569 \ + --hash=sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3 \ + --hash=sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d \ + --hash=sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525 \ + --hash=sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894 \ + --hash=sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3 \ + --hash=sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9 \ + --hash=sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a \ + --hash=sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9 \ + --hash=sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14 \ + --hash=sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25 \ + --hash=sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50 \ + --hash=sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf \ + --hash=sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1 \ + --hash=sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3 \ + --hash=sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac \ + --hash=sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e \ + --hash=sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815 \ + --hash=sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c \ + --hash=sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6 \ + --hash=sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6 \ + --hash=sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e \ + --hash=sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4 \ + --hash=sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84 \ + --hash=sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69 \ + --hash=sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15 \ + --hash=sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191 \ + --hash=sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0 \ + --hash=sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897 \ + --hash=sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd \ + --hash=sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2 \ + --hash=sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794 \ + --hash=sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d \ + --hash=sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074 \ + --hash=sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3 \ + --hash=sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224 \ + --hash=sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838 \ + --hash=sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a \ + --hash=sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d \ + --hash=sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d \ + --hash=sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f \ + --hash=sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8 \ + --hash=sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490 \ + --hash=sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966 \ + --hash=sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9 \ + --hash=sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3 \ + --hash=sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e \ + --hash=sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608 # via requests click==8.3.0 \ --hash=sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc \ @@ -209,191 +277,227 @@ exceptiongroup==1.3.0 \ --hash=sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10 \ --hash=sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88 # via pytest -frozenlist==1.7.0 \ - --hash=sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f \ - --hash=sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b \ - --hash=sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949 \ - --hash=sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615 \ - --hash=sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6 \ - --hash=sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718 \ - --hash=sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df \ - --hash=sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf \ - --hash=sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677 \ - --hash=sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5 \ - --hash=sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50 \ - --hash=sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb \ - --hash=sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56 \ - --hash=sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa \ - --hash=sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7 \ - --hash=sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43 \ - --hash=sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f \ - --hash=sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938 \ - --hash=sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c \ - --hash=sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd \ - --hash=sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c \ - --hash=sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e \ - --hash=sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d \ - --hash=sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81 \ - --hash=sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e \ - --hash=sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657 \ - --hash=sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478 \ - --hash=sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2 \ - --hash=sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca \ - --hash=sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e \ - --hash=sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e \ - --hash=sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3 \ - --hash=sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63 \ - --hash=sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898 \ - --hash=sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd \ - --hash=sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca \ - --hash=sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2 \ - --hash=sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104 \ - --hash=sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba \ - --hash=sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a \ - --hash=sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1 \ - --hash=sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae \ - --hash=sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577 \ - --hash=sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60 \ - --hash=sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee \ - --hash=sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464 \ - --hash=sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61 \ - --hash=sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86 \ - --hash=sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01 \ - --hash=sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87 \ - --hash=sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb \ - --hash=sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f \ - --hash=sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71 \ - --hash=sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8 \ - --hash=sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d \ - --hash=sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2 \ - --hash=sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00 \ - --hash=sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b \ - --hash=sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b \ - --hash=sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146 \ - --hash=sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59 \ - --hash=sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878 \ - --hash=sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08 \ - --hash=sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890 \ - --hash=sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e \ - --hash=sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750 \ - --hash=sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb \ - --hash=sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d \ - --hash=sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30 \ - --hash=sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3 \ - --hash=sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d \ - --hash=sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a \ - --hash=sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8 \ - --hash=sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c \ - --hash=sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1 \ - --hash=sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9 \ - --hash=sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e \ - --hash=sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e \ - --hash=sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384 \ - --hash=sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98 \ - --hash=sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb \ - --hash=sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4 \ - --hash=sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65 \ - --hash=sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08 \ - --hash=sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb \ - --hash=sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43 \ - --hash=sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a \ - --hash=sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7 \ - --hash=sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630 \ - --hash=sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d \ - --hash=sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31 \ - --hash=sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d \ - --hash=sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44 \ - --hash=sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319 \ - --hash=sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e \ - --hash=sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025 \ - --hash=sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35 \ - --hash=sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee \ - --hash=sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1 \ - --hash=sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd \ - --hash=sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74 \ - --hash=sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b \ - --hash=sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981 \ - --hash=sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5 +frozenlist==1.8.0 \ + --hash=sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686 \ + --hash=sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0 \ + --hash=sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121 \ + --hash=sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd \ + --hash=sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7 \ + --hash=sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c \ + --hash=sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84 \ + --hash=sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d \ + --hash=sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b \ + --hash=sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79 \ + --hash=sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967 \ + --hash=sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f \ + --hash=sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4 \ + --hash=sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7 \ + --hash=sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef \ + --hash=sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9 \ + --hash=sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3 \ + --hash=sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd \ + --hash=sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087 \ + --hash=sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068 \ + --hash=sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7 \ + --hash=sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed \ + --hash=sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b \ + --hash=sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f \ + --hash=sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25 \ + --hash=sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe \ + --hash=sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143 \ + --hash=sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e \ + --hash=sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930 \ + --hash=sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37 \ + --hash=sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128 \ + --hash=sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2 \ + --hash=sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675 \ + --hash=sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f \ + --hash=sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746 \ + --hash=sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df \ + --hash=sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8 \ + --hash=sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c \ + --hash=sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0 \ + --hash=sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad \ + --hash=sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82 \ + --hash=sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29 \ + --hash=sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c \ + --hash=sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30 \ + --hash=sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf \ + --hash=sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62 \ + --hash=sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5 \ + --hash=sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383 \ + --hash=sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c \ + --hash=sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52 \ + --hash=sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d \ + --hash=sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1 \ + --hash=sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a \ + --hash=sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714 \ + --hash=sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65 \ + --hash=sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95 \ + --hash=sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1 \ + --hash=sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506 \ + --hash=sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888 \ + --hash=sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6 \ + --hash=sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41 \ + --hash=sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459 \ + --hash=sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a \ + --hash=sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608 \ + --hash=sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa \ + --hash=sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8 \ + --hash=sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1 \ + --hash=sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186 \ + --hash=sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6 \ + --hash=sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed \ + --hash=sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e \ + --hash=sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52 \ + --hash=sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231 \ + --hash=sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450 \ + --hash=sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496 \ + --hash=sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a \ + --hash=sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3 \ + --hash=sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24 \ + --hash=sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178 \ + --hash=sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695 \ + --hash=sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7 \ + --hash=sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4 \ + --hash=sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e \ + --hash=sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e \ + --hash=sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61 \ + --hash=sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca \ + --hash=sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad \ + --hash=sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b \ + --hash=sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a \ + --hash=sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8 \ + --hash=sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51 \ + --hash=sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011 \ + --hash=sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8 \ + --hash=sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103 \ + --hash=sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b \ + --hash=sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda \ + --hash=sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806 \ + --hash=sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042 \ + --hash=sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e \ + --hash=sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b \ + --hash=sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef \ + --hash=sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d \ + --hash=sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567 \ + --hash=sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a \ + --hash=sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2 \ + --hash=sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0 \ + --hash=sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e \ + --hash=sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b \ + --hash=sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d \ + --hash=sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a \ + --hash=sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52 \ + --hash=sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47 \ + --hash=sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1 \ + --hash=sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94 \ + --hash=sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f \ + --hash=sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff \ + --hash=sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822 \ + --hash=sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a \ + --hash=sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11 \ + --hash=sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581 \ + --hash=sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51 \ + --hash=sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565 \ + --hash=sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40 \ + --hash=sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92 \ + --hash=sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2 \ + --hash=sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5 \ + --hash=sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4 \ + --hash=sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93 \ + --hash=sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027 \ + --hash=sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd # via # aiohttp # aiosignal -google-api-core==2.25.1 \ - --hash=sha256:8a2a56c1fef82987a524371f99f3bd0143702fecc670c72e600c1cda6bf8dbb7 \ - --hash=sha256:d2aaa0b13c78c61cb3f4282c464c046e45fbd75755683c9c525e6e8f7ed0a5e8 +google-api-core==2.28.1 \ + --hash=sha256:2b405df02d68e68ce0fbc138559e6036559e685159d148ae5861013dc201baf8 \ + --hash=sha256:4021b0f8ceb77a6fb4de6fde4502cecab45062e66ff4f2895169e0b35bc9466c # via -r requirements.in -google-auth==2.40.3 \ - --hash=sha256:1370d4593e86213563547f97a92752fc658456fe4514c809544f330fed45a7ca \ - --hash=sha256:500c3a29adedeb36ea9cf24b8d10858e152f2412e3ca37829b3fa18e33d63b77 +google-auth==2.43.0 \ + --hash=sha256:88228eee5fc21b62a1b5fe773ca15e67778cb07dc8363adcb4a8827b52d81483 \ + --hash=sha256:af628ba6fa493f75c7e9dbe9373d148ca9f4399b5ea29976519e0a3848eddd16 # via google-api-core -googleapis-common-protos[grpc]==1.70.0 \ - --hash=sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257 \ - --hash=sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8 +googleapis-common-protos[grpc]==1.72.0 \ + --hash=sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038 \ + --hash=sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5 # via # -r requirements.in # google-api-core # grpc-google-iam-v1 -grpc-google-iam-v1==0.14.2 \ - --hash=sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351 \ - --hash=sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20 +grpc-google-iam-v1==0.14.3 \ + --hash=sha256:7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6 \ + --hash=sha256:879ac4ef33136c5491a6300e27575a9ec760f6cdf9a2518798c1b8977a5dc389 # via -r requirements.in -grpcio==1.75.0 \ - --hash=sha256:050760fd29c8508844a720f06c5827bb00de8f5e02f58587eb21a4444ad706e5 \ - --hash=sha256:06d22e1d8645e37bc110f4c589cb22c283fd3de76523065f821d6e81de33f5d4 \ - --hash=sha256:0aa795198b28807d28570c0a5f07bb04d5facca7d3f27affa6ae247bbd7f312a \ - --hash=sha256:0b85f4ebe6b56d2a512201bb0e5f192c273850d349b0a74ac889ab5d38959d16 \ - --hash=sha256:0c40f368541945bb664857ecd7400acb901053a1abbcf9f7896361b2cfa66798 \ - --hash=sha256:0c91d5b16eff3cbbe76b7a1eaaf3d91e7a954501e9d4f915554f87c470475c3d \ - --hash=sha256:0fcb77f2d718c1e58cc04ef6d3b51e0fa3b26cf926446e86c7eba105727b6cd4 \ - --hash=sha256:153c5a7655022c3626ad70be3d4c2974cb0967f3670ee49ece8b45b7a139665f \ - --hash=sha256:1bb78d052948d8272c820bb928753f16a614bb2c42fbf56ad56636991b427518 \ - --hash=sha256:1ec2937fd92b5b4598cbe65f7e57d66039f82b9e2b7f7a5f9149374057dde77d \ - --hash=sha256:1ec9cbaec18d9597c718b1ed452e61748ac0b36ba350d558f9ded1a94cc15ec7 \ - --hash=sha256:222b0851e20c04900c63f60153503e918b08a5a0fad8198401c0b1be13c6815b \ - --hash=sha256:266fa6209b68a537b2728bb2552f970e7e78c77fe43c6e9cbbe1f476e9e5c35f \ - --hash=sha256:2e8e752ab5cc0a9c5b949808c000ca7586223be4f877b729f034b912364c3964 \ - --hash=sha256:352dbdf25495eef584c8de809db280582093bc3961d95a9d78f0dfb7274023a2 \ - --hash=sha256:36764a4ad9dc1eb891042fab51e8cdf7cc014ad82cee807c10796fb708455041 \ - --hash=sha256:38d665f44b980acdbb2f0e1abf67605ba1899f4d2443908df9ec8a6f26d2ed88 \ - --hash=sha256:3a6788b30aa8e6f207c417874effe3f79c2aa154e91e78e477c4825e8b431ce0 \ - --hash=sha256:437eeb16091d31498585d73b133b825dc80a8db43311e332c08facf820d36894 \ - --hash=sha256:494dcbade5606128cb9f530ce00331a90ecf5e7c5b243d373aebdb18e503c346 \ - --hash=sha256:50a6e43a9adc6938e2a16c9d9f8a2da9dd557ddd9284b73b07bd03d0e098d1e9 \ - --hash=sha256:53067c590ac3638ad0c04272f2a5e7e32a99fec8824c31b73bc3ef93160511fa \ - --hash=sha256:55a2d5ae79cd0f68783fb6ec95509be23746e3c239290b2ee69c69a38daa961a \ - --hash=sha256:55dfb9122973cc69520b23d39867726722cafb32e541435707dc10249a1bdbc6 \ - --hash=sha256:585147859ff4603798e92605db28f4a97c821c69908e7754c44771c27b239bbd \ - --hash=sha256:597340a41ad4b619aaa5c9b94f7e6ba4067885386342ab0af039eda945c255cd \ - --hash=sha256:678b649171f229fb16bda1a2473e820330aa3002500c4f9fd3a74b786578e90f \ - --hash=sha256:68c95b1c1e3bf96ceadf98226e9dfe2bc92155ce352fa0ee32a1603040e61856 \ - --hash=sha256:6b365f37a9c9543a9e91c6b4103d68d38d5bcb9965b11d5092b3c157bd6a5ee7 \ - --hash=sha256:725e67c010f63ef17fc052b261004942763c0b18dcd84841e6578ddacf1f9d10 \ - --hash=sha256:78dcc025a144319b66df6d088bd0eda69e1719eb6ac6127884a36188f336df19 \ - --hash=sha256:7a9337ac4ce61c388e02019d27fa837496c4b7837cbbcec71b05934337e51531 \ - --hash=sha256:7ee5ee42bfae8238b66a275f9ebcf6f295724375f2fa6f3b52188008b6380faf \ - --hash=sha256:7f89d6d0cd43170a80ebb4605cad54c7d462d21dc054f47688912e8bf08164af \ - --hash=sha256:851194eec47755101962da423f575ea223c9dd7f487828fe5693920e8745227e \ - --hash=sha256:9146e40378f551eed66c887332afc807fcce593c43c698e21266a4227d4e20d2 \ - --hash=sha256:91fbfc43f605c5ee015c9056d580a70dd35df78a7bad97e05426795ceacdb59f \ - --hash=sha256:9880c323595d851292785966cadb6c708100b34b163cab114e3933f5773cba2d \ - --hash=sha256:9dc4a02796394dd04de0b9673cb79a78901b90bb16bf99ed8cb528c61ed9372e \ - --hash=sha256:b989e8b09489478c2d19fecc744a298930f40d8b27c3638afbfe84d22f36ce4e \ - --hash=sha256:bb58e38a50baed9b21492c4b3f3263462e4e37270b7ea152fc10124b4bd1c318 \ - --hash=sha256:c2c39984e846bd5da45c5f7bcea8fafbe47c98e1ff2b6f40e57921b0c23a52d0 \ - --hash=sha256:c8cfc780b7a15e06253aae5f228e1e84c0d3c4daa90faf5bc26b751174da4bf9 \ - --hash=sha256:ca123db0813eef80625a4242a0c37563cb30a3edddebe5ee65373854cf187215 \ - --hash=sha256:cb6c5b075c2d092f81138646a755f0dad94e4622300ebef089f94e6308155d82 \ - --hash=sha256:dce15597ca11913b78e1203c042d5723e3ea7f59e7095a1abd0621be0e05b895 \ - --hash=sha256:eafbe3563f9cb378370a3fa87ef4870539cf158124721f3abee9f11cd8162460 \ - --hash=sha256:ee16e232e3d0974750ab5f4da0ab92b59d6473872690b5e40dcec9a22927f22e \ - --hash=sha256:fa35ccd9501ffdd82b861809cbfc4b5b13f4b4c5dc3434d2d9170b9ed38a9054 \ - --hash=sha256:fb64dd62face3d687a7b56cd881e2ea39417af80f75e8b36f0f81dfd93071651 \ - --hash=sha256:ffc33e67cab6141c54e75d85acd5dec616c5095a957ff997b4330a6395aa9b51 +grpcio==1.76.0 \ + --hash=sha256:035d90bc79eaa4bed83f524331d55e35820725c9fbb00ffa1904d5550ed7ede3 \ + --hash=sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280 \ + --hash=sha256:063065249d9e7e0782d03d2bca50787f53bd0fb89a67de9a7b521c4a01f1989b \ + --hash=sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd \ + --hash=sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465 \ + --hash=sha256:0aaa82d0813fd4c8e589fac9b65d7dd88702555f702fb10417f96e2a2a6d4c0f \ + --hash=sha256:0b7604868b38c1bfd5cf72d768aedd7db41d78cb6a4a18585e33fb0f9f2363fd \ + --hash=sha256:0c37db8606c258e2ee0c56b78c62fc9dee0e901b5dbdcf816c2dd4ad652b8b0c \ + --hash=sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc \ + --hash=sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054 \ + --hash=sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba \ + --hash=sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03 \ + --hash=sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2 \ + --hash=sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a \ + --hash=sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749 \ + --hash=sha256:3bf0f392c0b806905ed174dcd8bdd5e418a40d5567a05615a030a5aeddea692d \ + --hash=sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb \ + --hash=sha256:40ad3afe81676fd9ec6d9d406eda00933f218038433980aa19d401490e46ecde \ + --hash=sha256:4215d3a102bd95e2e11b5395c78562967959824156af11fa93d18fdd18050990 \ + --hash=sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958 \ + --hash=sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468 \ + --hash=sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc \ + --hash=sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09 \ + --hash=sha256:49ce47231818806067aea3324d4bf13825b658ad662d3b25fada0bdad9b8a6af \ + --hash=sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980 \ + --hash=sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d \ + --hash=sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f \ + --hash=sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882 \ + --hash=sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae \ + --hash=sha256:65a20de41e85648e00305c1bb09a3598f840422e522277641145a32d42dcefcc \ + --hash=sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77 \ + --hash=sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e \ + --hash=sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73 \ + --hash=sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8 \ + --hash=sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3 \ + --hash=sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da \ + --hash=sha256:8cc3309d8e08fd79089e13ed4819d0af72aa935dd8f435a195fd152796752ff2 \ + --hash=sha256:8ebe63ee5f8fa4296b1b8cfc743f870d10e902ca18afc65c68cf46fd39bb0783 \ + --hash=sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397 \ + --hash=sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e \ + --hash=sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42 \ + --hash=sha256:971fd5a1d6e62e00d945423a567e42eb1fa678ba89072832185ca836a94daaa6 \ + --hash=sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6 \ + --hash=sha256:9d9adda641db7207e800a7f089068f6f645959f2df27e870ee81d44701dd9db3 \ + --hash=sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11 \ + --hash=sha256:a6ae758eb08088d36812dd5d9af7a9859c05b1e0f714470ea243694b49278e7b \ + --hash=sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c \ + --hash=sha256:acab0277c40eff7143c2323190ea57b9ee5fd353d8190ee9652369fae735668a \ + --hash=sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a \ + --hash=sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347 \ + --hash=sha256:d099566accf23d21037f18a2a63d323075bebace807742e4b0ac210971d4dd70 \ + --hash=sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4 \ + --hash=sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00 \ + --hash=sha256:e6d1db20594d9daba22f90da738b1a0441a7427552cc6e2e3d1297aeddc00378 \ + --hash=sha256:ebea5cc3aa8ea72e04df9913492f9a96d9348db876f9dda3ad729cfedf7ac416 \ + --hash=sha256:ebebf83299b0cb1721a8859ea98f3a77811e35dce7609c5c963b9ad90728f886 \ + --hash=sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48 \ + --hash=sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8 \ + --hash=sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8 \ + --hash=sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc \ + --hash=sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62 # via # googleapis-common-protos # grpc-google-iam-v1 -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 +idna==3.11 \ + --hash=sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea \ + --hash=sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902 # via # requests # yarl @@ -401,257 +505,321 @@ inflection==0.5.1 \ --hash=sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417 \ --hash=sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2 # via -r requirements.in -iniconfig==2.1.0 \ - --hash=sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7 \ - --hash=sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760 +iniconfig==2.3.0 \ + --hash=sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730 \ + --hash=sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12 # via pytest jinja2==3.1.6 \ --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 # via -r requirements.in -libcst==1.8.4 \ - --hash=sha256:0352c7d662c89243e730a28edf41577f87e28649c18ee365dd373c5fbdab2434 \ - --hash=sha256:056733760ba5ac1fd4cd518cddd5a43b3adbe2e0f6c7ce02532a114f7cd5d85b \ - --hash=sha256:062e424042c36a102abd11d8e9e27ac6be68e1a934b0ecfc9fb8fea017240d2f \ - --hash=sha256:074a3b17e270237fb36d3b94d7492fb137cb74217674484ba25e015e8d3d8bdc \ - --hash=sha256:114343271f70a79e6d08bc395f5dfa150227341fab646cc0a58e80550e7659b7 \ - --hash=sha256:14bda1e4ea0b04d3926d41f6dafbfd311a951b75a60fe0d79bb5a8249c1cef5b \ - --hash=sha256:1d468514a21cf3444dc3f3a4b1effc6c05255c98cc79e02af394652d260139f0 \ - --hash=sha256:259737faf90552a0589d95393dcaa3d3028be03ab3ea87478d46a1a4f922dd91 \ - --hash=sha256:27eeb16edb7dc0711d67e28bb8c0288e4147210aeb2434f08c16ac5db6b559e5 \ - --hash=sha256:2b1e570ba816da408b5ee40ac479b34e56d995bf32dcca6f0ddb3d69b08e77de \ - --hash=sha256:2c6d8f7087e9eaf005efde573f3f36d1d40366160155c195a6c4230d4c8a5839 \ - --hash=sha256:2d71e7e5982776f78cca9102286bb0895ef6f7083f76c0c9bc5ba4e9e40aee38 \ - --hash=sha256:2e156760fc741bbf2fa68f4e3b15f019e924ea852f02276d0a53b7375cf70445 \ - --hash=sha256:2e24d11a1be0b1791f7bace9d406f5a70b8691ef77be377b606950803de4657d \ - --hash=sha256:2fcff2130824f2cb5f4fd9c4c74fb639c5f02bc4228654461f6dc6b1006f20c0 \ - --hash=sha256:33664117fcb2913fdbd7de07a009193b660a16e7af18f7c1b4449e428f3b0f95 \ - --hash=sha256:3ad7f0a32ddcdff00a3eddfd35cfd8485d9f357a32e4c67558476570199f808f \ - --hash=sha256:3de575f0b5b466f2e9656b963f5848103cc518c6f3581902c6f430b07864584f \ - --hash=sha256:3eeba4edb40b2291c2460fe8d7e43f47e5fcc33f186675db5d364395adca3401 \ - --hash=sha256:43cbb6b41bc2c4785136f59a66692287d527aeb022789c4af44ad6e85b7b2baa \ - --hash=sha256:4a718e5f6b398a07ca5d533e6593c1590d69fe65c539323281959733d6d541dd \ - --hash=sha256:4b1cbadd988fee59b25ea154708cfed99cfaf45f9685707be422ad736371a9fe \ - --hash=sha256:52c9376ba11ede5430e40aa205101dfc41202465103c6540f24591f898afb3d6 \ - --hash=sha256:5db0b484670aac7ea442213afaa9addb1de0d9540a34ad44d376bec12242bc3a \ - --hash=sha256:5f167bf83dce662c9b499f1ea078ec2f2fee138e80f7d7dbd59c89ed28dc935f \ - --hash=sha256:64cc34d74c9543b30ec3d7481dd644cb1bb3888076b486592d7fa0f22632f1c6 \ - --hash=sha256:65364c214251ed5720f3f6d0c4ef1338aac91ad4bbc5d30253eac21832b0943a \ - --hash=sha256:6840e4011b583e9b7a71c00e7ab4281aea7456877b3ea6ecedb68a39a000bc64 \ - --hash=sha256:69b672c1afac5fe00d689f585ba57ac5facc4632f39b977d4b3e4711571c76e2 \ - --hash=sha256:6cc8b7e33f6c4677e220dd7025e1e980da4d3f497b9b8ee0320e36dd54597f68 \ - --hash=sha256:71e12101ef2a6e05b7610badb2bfa597379289f1408e305a8d19faacdb872f47 \ - --hash=sha256:7832ee448fbdf18884a1f9af5fba1be6d5e98deb560514d92339fd6318aef651 \ - --hash=sha256:783f52b7c8d82046f0d93812f62a25eb82c3834f198e6cbfd5bb03ca68b593c8 \ - --hash=sha256:7fe97d432d95b6bcb1694a6d0fa7e07dde8fa687a637958126410ee2ced94b81 \ - --hash=sha256:846aad04bac624a42d182add526d019e417e6a2b8a4c0bf690d32f9e1f3075ff \ - --hash=sha256:870a49df8575c11ea4f5319d54750f95d2d06370a263bd42d924a9cf23cf0cbe \ - --hash=sha256:873dd4e8b896f7cb0e78118badda55ec1f42e9301a4a948cc438955ff3ae2257 \ - --hash=sha256:8e7baaa6f01b6b6ea4b28d60204fddc679a3cd56d312beee200bd5f8f9711f0b \ - --hash=sha256:8e8d5158f976a5ee140ad0d3391e1a1b84b2ce5da62f16e48feab4bc21b91967 \ - --hash=sha256:929798ca38ea76a5056f725221d66c6923e749caa9fa7f4cc86e914a3698493d \ - --hash=sha256:93c76ab41d736b66d6fb3df32cd33184eed17666d7dc3ce047cf7ccdfe80b5b1 \ - --hash=sha256:9a5cd7beef667e5de3c5fb0ec387dc19aeda5cd4606ff541d0e8613bb3ef3b23 \ - --hash=sha256:9be5b1b7d416900ff9bcdb4945692e6252fdcbd95514e98439f81568568c9e02 \ - --hash=sha256:a179c712f38acb85e81d8949e80e05a422c92dcf5a00d8f4976f7e547a9f0916 \ - --hash=sha256:a334dd11cdea34275df91c2ae9cc5933ec7e0ad5698264966708d637d110b627 \ - --hash=sha256:a4270123c988e130cec94bfe1b54d34784a40b34b2d5ac0507720c1272bd3209 \ - --hash=sha256:a65e3c409ef16ae369600d085d23a3897d4fccf4fdcc09294a402c513ac35906 \ - --hash=sha256:a90c80e4d89222e11c7a734bc1b7f930bc2aba7750ad149bde1b136f839ea788 \ - --hash=sha256:b376ef7fa30bef611d4fb32af1da0e767b801b00322028a874ab3a441686b6a9 \ - --hash=sha256:b69e94625702825309fd9e50760e77a5a60bd1e7a8e039862c8dd3011a6e1530 \ - --hash=sha256:c9c775bc473225a0ad8422150fd9cf18ed2eebd7040996772937ac558f294d6c \ - --hash=sha256:cb188ebd4114144e14f6beb5499e43bebd0ca3ce7f2beb20921d49138c67b814 \ - --hash=sha256:d011d731c2e673fbd9c84794418230a913ae3c98fc86f27814612b6b6d53d26b \ - --hash=sha256:d130f3e2d40c5f48cbbc804710ddf5b4db9dd7c0118f3b35f109164a555860d2 \ - --hash=sha256:d3d4111f971632e9ddf8191aeef4576595e18ef3fa7b3016bfe15a08fa8554df \ - --hash=sha256:e4c5055e255d12745c7cc60fb5fb31c0f82855864c15dc9ad33a44f829b92600 \ - --hash=sha256:e6f309c0f42e323c527d8c9007f583fd1668e45884208184a70644d916f27829 \ - --hash=sha256:ea74c56cb11a1fdca9f8ab258965adce23e049ef525fdcc5c254a093e3de25cb \ - --hash=sha256:eff724c17df10e059915000eaf59f4e79998b66a7d35681e934a9a48667df931 \ - --hash=sha256:f0f105d32c49baf712df2be360d496de67a2375bcf4e9707e643b7efc2f9a55a \ - --hash=sha256:f5bd0bcdd2a8da9dad47d36d71757d8ba87baf887ae6982e2cb8621846610c49 \ - --hash=sha256:fa870f34018c7241ee9227723cac0787599a2a8a2bfd53eacfbbe1ea1a272ae6 \ - --hash=sha256:fbadca1bc31f696875c955080c407a40b2d1aa7f79ca174a65dcb0542a57db6c \ - --hash=sha256:fceb17616f1afe528c88243e3e7f78f84f0cc287463f04f3c1243e20a469e869 \ - --hash=sha256:fedfd33e5dda2200d582554e6476626d4706aa1fa2794bfb271879f8edff89b9 +libcst==1.8.6 \ + --hash=sha256:04030ea4d39d69a65873b1d4d877def1c3951a7ada1824242539e399b8763d30 \ + --hash=sha256:06fc56335a45d61b7c1b856bfab4587b84cfe31e9d6368f60bb3c9129d900f58 \ + --hash=sha256:089c58e75cb142ec33738a1a4ea7760a28b40c078ab2fd26b270dac7d2633a4d \ + --hash=sha256:08bd63a8ce674be431260649e70fca1d43f1554f1591eac657f403ff8ef82c7a \ + --hash=sha256:0c13d5bd3d8414a129e9dccaf0e5785108a4441e9b266e1e5e9d1f82d1b943c9 \ + --hash=sha256:0cbe17067055829607c5ba4afa46bfa4d0dd554c0b5a583546e690b7367a29b6 \ + --hash=sha256:16cfe0cfca5fd840e1fb2c30afb628b023d3085b30c3484a79b61eae9d6fe7ba \ + --hash=sha256:1a3a5e4ee870907aa85a4076c914ae69066715a2741b821d9bf16f9579de1105 \ + --hash=sha256:1dc3b897c8b0f7323412da3f4ad12b16b909150efc42238e19cbf19b561cc330 \ + --hash=sha256:203ec2a83f259baf686b9526268cd23d048d38be5589594ef143aee50a4faf7e \ + --hash=sha256:207481197afd328aa91d02670c15b48d0256e676ce1ad4bafb6dc2b593cc58f1 \ + --hash=sha256:25eaeae6567091443b5374b4c7d33a33636a2d58f5eda02135e96fc6c8807786 \ + --hash=sha256:25fc7a1303cad7639ad45ec38c06789b4540b7258e9a108924aaa2c132af4aca \ + --hash=sha256:2f04d3672bde1704f383a19e8f8331521abdbc1ed13abb349325a02ac56e5012 \ + --hash=sha256:351ab879c2fd20d9cb2844ed1ea3e617ed72854d3d1e2b0880ede9c3eea43ba8 \ + --hash=sha256:36473e47cb199b7e6531d653ee6ffed057de1d179301e6c67f651f3af0b499d6 \ + --hash=sha256:3649a813660fbffd7bc24d3f810b1f75ac98bd40d9d6f56d1f0ee38579021073 \ + --hash=sha256:375965f34cc6f09f5f809244d3ff9bd4f6cb6699f571121cebce53622e7e0b86 \ + --hash=sha256:3a926a4b42015ee24ddfc8ae940c97bd99483d286b315b3ce82f3bafd9f53474 \ + --hash=sha256:3f4fbb7f569e69fd9e89d9d9caa57ca42c577c28ed05062f96a8c207594e75b8 \ + --hash=sha256:42a4f68121e2e9c29f49c97f6154e8527cd31021809cc4a941c7270aa64f41aa \ + --hash=sha256:44f38139fa95e488db0f8976f9c7ca39a64d6bc09f2eceef260aa1f6da6a2e42 \ + --hash=sha256:455f49a93aea4070132c30ebb6c07c2dea0ba6c1fde5ffde59fc45dbb9cfbe4b \ + --hash=sha256:4d7bbdd35f3abdfb5ac5d1a674923572dab892b126a58da81ff2726102d6ec2e \ + --hash=sha256:4fc3fef8a2c983e7abf5d633e1884c5dd6fa0dcb8f6e32035abd3d3803a3a196 \ + --hash=sha256:536567441182a62fb706e7aa954aca034827b19746832205953b2c725d254a93 \ + --hash=sha256:5432e785322aba3170352f6e72b32bea58d28abd141ac37cc9b0bf6b7c778f58 \ + --hash=sha256:55ec021a296960c92e5a33b8d93e8ad4182b0eab657021f45262510a58223de1 \ + --hash=sha256:59a7e388c57d21d63722018978a8ddba7b176e3a99bd34b9b84a576ed53f2978 \ + --hash=sha256:5dcaaebc835dfe5755bc85f9b186fb7e2895dda78e805e577fef1011d51d5a5c \ + --hash=sha256:6366ab2107425bf934b0c83311177f2a371bfc757ee8c6ad4a602d7cbcc2f363 \ + --hash=sha256:6421a930b028c5ef4a943b32a5a78b7f1bf15138214525a2088f11acbb7d3d64 \ + --hash=sha256:6609291c41f7ad0bac570bfca5af8fea1f4a27987d30a1fa8b67fe5e67e6c78d \ + --hash=sha256:6a65f844d813ab4ef351443badffa0ae358f98821561d19e18b3190f59e71996 \ + --hash=sha256:6aa11df6c58812f731172b593fcb485d7ba09ccc3b52fea6c7f26a43377dc748 \ + --hash=sha256:6b23d14a7fc0addd9795795763af26b185deb7c456b1e7cc4d5228e69dab5ce8 \ + --hash=sha256:6cad63e3a26556b020b634d25a8703b605c0e0b491426b3e6b9e12ed20f09100 \ + --hash=sha256:6d8b67874f2188399a71a71731e1ba2d1a2c3173b7565d1cc7ffb32e8fbaba5b \ + --hash=sha256:72cca15800ffc00ba25788e4626189fe0bc5fe2a0c1cb4294bce2e4df21cc073 \ + --hash=sha256:7445479ebe7d1aff0ee094ab5a1c7718e1ad78d33e3241e1a1ec65dcdbc22ffb \ + --hash=sha256:7f04febcd70e1e67917be7de513c8d4749d2e09206798558d7fe632134426ea4 \ + --hash=sha256:8066f1b70f21a2961e96bedf48649f27dfd5ea68be5cd1bed3742b047f14acde \ + --hash=sha256:819c8081e2948635cab60c603e1bbdceccdfe19104a242530ad38a36222cb88f \ + --hash=sha256:85b7025795b796dea5284d290ff69de5089fc8e989b25d6f6f15b6800be7167f \ + --hash=sha256:87e74f7d7dfcba9efa91127081e22331d7c42515f0a0ac6e81d4cf2c3ed14661 \ + --hash=sha256:8a434c521fadaf9680788b50d5c21f4048fa85ed19d7d70bd40549fbaeeecab1 \ + --hash=sha256:98fa1ca321c81fb1f02e5c43f956ca543968cc1a30b264fd8e0a2e1b0b0bf106 \ + --hash=sha256:a20c5182af04332cc94d8520792befda06d73daf2865e6dddc5161c72ea92cb9 \ + --hash=sha256:b0d8c364c44ae343937f474b2e492c1040df96d94530377c2f9263fb77096e4f \ + --hash=sha256:b188e626ce61de5ad1f95161b8557beb39253de4ec74fc9b1f25593324a0279c \ + --hash=sha256:b6c1248cc62952a3a005792b10cdef2a4e130847be9c74f33a7d617486f7e532 \ + --hash=sha256:ba9ab2b012fbd53b36cafd8f4440a6b60e7e487cd8b87428e57336b7f38409a4 \ + --hash=sha256:bb9b4077bdf8857b2483879cbbf70f1073bc255b057ec5aac8a70d901bb838e9 \ + --hash=sha256:bdb14bc4d4d83a57062fed2c5da93ecb426ff65b0dc02ddf3481040f5f074a82 \ + --hash=sha256:bff00e1c766658adbd09a175267f8b2f7616e5ee70ce45db3d7c4ce6d9f6bec7 \ + --hash=sha256:c0a0cc80aebd8aa15609dd4d330611cbc05e9b4216bcaeabba7189f99ef07c28 \ + --hash=sha256:c188d06b583900e662cd791a3f962a8c96d3dfc9b36ea315be39e0a4c4792ebf \ + --hash=sha256:c41c76e034a1094afed7057023b1d8967f968782433f7299cd170eaa01ec033e \ + --hash=sha256:c9d7aeafb1b07d25a964b148c0dda9451efb47bbbf67756e16eeae65004b0eb5 \ + --hash=sha256:cb2679ef532f9fa5be5c5a283b6357cb6e9888a8dd889c4bb2b01845a29d8c0b \ + --hash=sha256:da95b38693b989eaa8d32e452e8261cfa77fe5babfef1d8d2ac25af8c4aa7e6d \ + --hash=sha256:e00e275d4ba95d4963431ea3e409aa407566a74ee2bf309a402f84fc744abe47 \ + --hash=sha256:f1472eeafd67cdb22544e59cf3bfc25d23dc94058a68cf41f6654ff4fcb92e09 \ + --hash=sha256:f729c37c9317126da9475bdd06a7208eb52fcbd180a6341648b45a56b4ba708b \ + --hash=sha256:fea5c7fa26556eedf277d4f72779c5ede45ac3018650721edd77fd37ccd4a2d4 # via -r requirements.in -markupsafe==3.0.2 \ - --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ - --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ - --hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \ - --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ - --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ - --hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \ - --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ - --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ - --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ - --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ - --hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \ - --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ - --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ - --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ - --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ - --hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \ - --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ - --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ - --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ - --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ - --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ - --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ - --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ - --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ - --hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \ - --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ - --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ - --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ - --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ - --hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \ - --hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \ - --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ - --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ - --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ - --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ - --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ - --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ - --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ - --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ - --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ - --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ - --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ - --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ - --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ - --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ - --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ - --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ - --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ - --hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \ - --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ - --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ - --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ - --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ - --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ - --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ - --hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \ - --hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \ - --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ - --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ - --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ - --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 +MarkupSafe==3.0.3 \ + --hash=sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f \ + --hash=sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a \ + --hash=sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf \ + --hash=sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19 \ + --hash=sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf \ + --hash=sha256:0f4b68347f8c5eab4a13419215bdfd7f8c9b19f2b25520968adfad23eb0ce60c \ + --hash=sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175 \ + --hash=sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219 \ + --hash=sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb \ + --hash=sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6 \ + --hash=sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab \ + --hash=sha256:15d939a21d546304880945ca1ecb8a039db6b4dc49b2c5a400387cdae6a62e26 \ + --hash=sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1 \ + --hash=sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce \ + --hash=sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218 \ + --hash=sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634 \ + --hash=sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695 \ + --hash=sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad \ + --hash=sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73 \ + --hash=sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c \ + --hash=sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe \ + --hash=sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa \ + --hash=sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559 \ + --hash=sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa \ + --hash=sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37 \ + --hash=sha256:3537e01efc9d4dccdf77221fb1cb3b8e1a38d5428920e0657ce299b20324d758 \ + --hash=sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f \ + --hash=sha256:38664109c14ffc9e7437e86b4dceb442b0096dfe3541d7864d9cbe1da4cf36c8 \ + --hash=sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d \ + --hash=sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c \ + --hash=sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97 \ + --hash=sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a \ + --hash=sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19 \ + --hash=sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9 \ + --hash=sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9 \ + --hash=sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc \ + --hash=sha256:591ae9f2a647529ca990bc681daebdd52c8791ff06c2bfa05b65163e28102ef2 \ + --hash=sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4 \ + --hash=sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354 \ + --hash=sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50 \ + --hash=sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698 \ + --hash=sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9 \ + --hash=sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b \ + --hash=sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc \ + --hash=sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115 \ + --hash=sha256:7c3fb7d25180895632e5d3148dbdc29ea38ccb7fd210aa27acbd1201a1902c6e \ + --hash=sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485 \ + --hash=sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f \ + --hash=sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12 \ + --hash=sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025 \ + --hash=sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009 \ + --hash=sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d \ + --hash=sha256:949b8d66bc381ee8b007cd945914c721d9aba8e27f71959d750a46f7c282b20b \ + --hash=sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a \ + --hash=sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5 \ + --hash=sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f \ + --hash=sha256:a320721ab5a1aba0a233739394eb907f8c8da5c98c9181d1161e77a0c8e36f2d \ + --hash=sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1 \ + --hash=sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287 \ + --hash=sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6 \ + --hash=sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f \ + --hash=sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581 \ + --hash=sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed \ + --hash=sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b \ + --hash=sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c \ + --hash=sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026 \ + --hash=sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8 \ + --hash=sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676 \ + --hash=sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6 \ + --hash=sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e \ + --hash=sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d \ + --hash=sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d \ + --hash=sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01 \ + --hash=sha256:df2449253ef108a379b8b5d6b43f4b1a8e81a061d6537becd5582fba5f9196d7 \ + --hash=sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419 \ + --hash=sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795 \ + --hash=sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1 \ + --hash=sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5 \ + --hash=sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d \ + --hash=sha256:e8fc20152abba6b83724d7ff268c249fa196d8259ff481f3b1476383f8f24e42 \ + --hash=sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe \ + --hash=sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda \ + --hash=sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e \ + --hash=sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737 \ + --hash=sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523 \ + --hash=sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591 \ + --hash=sha256:f71a396b3bf33ecaa1626c255855702aca4d3d9fea5e051b41ac59a9c1c41edc \ + --hash=sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a \ + --hash=sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50 # via # -r requirements.in # jinja2 -multidict==6.6.4 \ - --hash=sha256:01368e3c94032ba6ca0b78e7ccb099643466cf24f8dc8eefcfdc0571d56e58f9 \ - --hash=sha256:01d0959807a451fe9fdd4da3e139cb5b77f7328baf2140feeaf233e1d777b729 \ - --hash=sha256:024ce601f92d780ca1617ad4be5ac15b501cc2414970ffa2bb2bbc2bd5a68fa5 \ - --hash=sha256:047d9425860a8c9544fed1b9584f0c8bcd31bcde9568b047c5e567a1025ecd6e \ - --hash=sha256:0a2088c126b6f72db6c9212ad827d0ba088c01d951cee25e758c450da732c138 \ - --hash=sha256:0af5f9dee472371e36d6ae38bde009bd8ce65ac7335f55dcc240379d7bed1495 \ - --hash=sha256:0b2e886624be5773e69cf32bcb8534aecdeb38943520b240fed3d5596a430f2f \ - --hash=sha256:0c5cbac6b55ad69cb6aa17ee9343dfbba903118fd530348c330211dc7aa756d1 \ - --hash=sha256:0e0558693063c75f3d952abf645c78f3c5dfdd825a41d8c4d8156fc0b0da6e7e \ - --hash=sha256:0f37bed7319b848097085d7d48116f545985db988e2256b2e6f00563a3416ee6 \ - --hash=sha256:0ffb87be160942d56d7b87b0fdf098e81ed565add09eaa1294268c7f3caac4c8 \ - --hash=sha256:105245cc6b76f51e408451a844a54e6823bbd5a490ebfe5bdfc79798511ceded \ - --hash=sha256:10a68a9191f284fe9d501fef4efe93226e74df92ce7a24e301371293bd4918ae \ - --hash=sha256:14616a30fe6d0a48d0a48d1a633ab3b8bec4cf293aac65f32ed116f620adfd69 \ - --hash=sha256:14754eb72feaa1e8ae528468f24250dd997b8e2188c3d2f593f9eba259e4b364 \ - --hash=sha256:163c7ea522ea9365a8a57832dea7618e6cbdc3cd75f8c627663587459a4e328f \ - --hash=sha256:17d2cbbfa6ff20821396b25890f155f40c986f9cfbce5667759696d83504954f \ - --hash=sha256:190766dac95aab54cae5b152a56520fd99298f32a1266d66d27fdd1b5ac00f4e \ - --hash=sha256:1a0ccbfe93ca114c5d65a2471d52d8829e56d467c97b0e341cf5ee45410033b3 \ - --hash=sha256:21f216669109e02ef3e2415ede07f4f8987f00de8cdfa0cc0b3440d42534f9f0 \ - --hash=sha256:22e38b2bc176c5eb9c0a0e379f9d188ae4cd8b28c0f53b52bce7ab0a9e534657 \ - --hash=sha256:27d8f8e125c07cb954e54d75d04905a9bba8a439c1d84aca94949d4d03d8601c \ - --hash=sha256:2a4c6875c37aae9794308ec43e3530e4aa0d36579ce38d89979bbf89582002bb \ - --hash=sha256:34d8f2a5ffdceab9dcd97c7a016deb2308531d5f0fced2bb0c9e1df45b3363d7 \ - --hash=sha256:350f6b0fe1ced61e778037fdc7613f4051c8baf64b1ee19371b42a3acdb016a0 \ - --hash=sha256:37b7187197da6af3ee0b044dbc9625afd0c885f2800815b228a0e70f9a7f473d \ - --hash=sha256:38a0956dd92d918ad5feff3db8fcb4a5eb7dba114da917e1a88475619781b57b \ - --hash=sha256:3ba5aaf600edaf2a868a391779f7a85d93bed147854925f34edd24cc70a3e141 \ - --hash=sha256:3bb0eae408fa1996d87247ca0d6a57b7fc1dcf83e8a5c47ab82c558c250d4adf \ - --hash=sha256:3f8e2384cb83ebd23fd07e9eada8ba64afc4c759cd94817433ab8c81ee4b403f \ - --hash=sha256:40cd05eaeb39e2bc8939451f033e57feaa2ac99e07dbca8afe2be450a4a3b6cf \ - --hash=sha256:43868297a5759a845fa3a483fb4392973a95fb1de891605a3728130c52b8f40f \ - --hash=sha256:452ff5da78d4720d7516a3a2abd804957532dd69296cb77319c193e3ffb87e24 \ - --hash=sha256:467fe64138cfac771f0e949b938c2e1ada2b5af22f39692aa9258715e9ea613a \ - --hash=sha256:49517449b58d043023720aa58e62b2f74ce9b28f740a0b5d33971149553d72aa \ - --hash=sha256:497a2954adc25c08daff36f795077f63ad33e13f19bfff7736e72c785391534f \ - --hash=sha256:4a1fb393a2c9d202cb766c76208bd7945bc194eba8ac920ce98c6e458f0b524b \ - --hash=sha256:4bb7627fd7a968f41905a4d6343b0d63244a0623f006e9ed989fa2b78f4438a0 \ - --hash=sha256:4d09384e75788861e046330308e7af54dd306aaf20eb760eb1d0de26b2bea2cb \ - --hash=sha256:4fefd4a815e362d4f011919d97d7b4a1e566f1dde83dc4ad8cfb5b41de1df68d \ - --hash=sha256:52e3c8d43cdfff587ceedce9deb25e6ae77daba560b626e97a56ddcad3756879 \ - --hash=sha256:55624b3f321d84c403cb7d8e6e982f41ae233d85f85db54ba6286f7295dc8a9c \ - --hash=sha256:56c6b3652f945c9bc3ac6c8178cd93132b8d82dd581fcbc3a00676c51302bc1a \ - --hash=sha256:580b643b7fd2c295d83cad90d78419081f53fd532d1f1eb67ceb7060f61cff0d \ - --hash=sha256:59e8d40ab1f5a8597abcef00d04845155a5693b5da00d2c93dbe88f2050f2812 \ - --hash=sha256:5df8afd26f162da59e218ac0eefaa01b01b2e6cd606cffa46608f699539246da \ - --hash=sha256:630f70c32b8066ddfd920350bc236225814ad94dfa493fe1910ee17fe4365cbb \ - --hash=sha256:66247d72ed62d5dd29752ffc1d3b88f135c6a8de8b5f63b7c14e973ef5bda19e \ - --hash=sha256:6865f6d3b7900ae020b495d599fcf3765653bc927951c1abb959017f81ae8287 \ - --hash=sha256:6bf2f10f70acc7a2446965ffbc726e5fc0b272c97a90b485857e5c70022213eb \ - --hash=sha256:6c84378acd4f37d1b507dfa0d459b449e2321b3ba5f2338f9b085cf7a7ba95eb \ - --hash=sha256:6d46a180acdf6e87cc41dc15d8f5c2986e1e8739dc25dbb7dac826731ef381a4 \ - --hash=sha256:756989334015e3335d087a27331659820d53ba432befdef6a718398b0a8493ad \ - --hash=sha256:75aa52fba2d96bf972e85451b99d8e19cc37ce26fd016f6d4aa60da9ab2b005f \ - --hash=sha256:7dd57515bebffd8ebd714d101d4c434063322e4fe24042e90ced41f18b6d3395 \ - --hash=sha256:7f683a551e92bdb7fac545b9c6f9fa2aebdeefa61d607510b3533286fcab67f5 \ - --hash=sha256:87a32d20759dc52a9e850fe1061b6e41ab28e2998d44168a8a341b99ded1dba0 \ - --hash=sha256:8c2fcb12136530ed19572bbba61b407f655e3953ba669b96a35036a11a485793 \ - --hash=sha256:8c91cdb30809a96d9ecf442ec9bc45e8cfaa0f7f8bdf534e082c2443a196727e \ - --hash=sha256:8c9854df0eaa610a23494c32a6f44a3a550fb398b6b51a56e8c6b9b3689578db \ - --hash=sha256:8e42332cf8276bb7645d310cdecca93a16920256a5b01bebf747365f86a1675b \ - --hash=sha256:8fe323540c255db0bffee79ad7f048c909f2ab0edb87a597e1c17da6a54e493c \ - --hash=sha256:967af5f238ebc2eb1da4e77af5492219fbd9b4b812347da39a7b5f5c72c0fa45 \ - --hash=sha256:9a950b7cf54099c1209f455ac5970b1ea81410f2af60ed9eb3c3f14f0bfcf987 \ - --hash=sha256:a1b20a9d56b2d81e2ff52ecc0670d583eaabaa55f402e8d16dd062373dbbe796 \ - --hash=sha256:a506a77ddee1efcca81ecbeae27ade3e09cdf21a8ae854d766c2bb4f14053f92 \ - --hash=sha256:a59c63061f1a07b861c004e53869eb1211ffd1a4acbca330e3322efa6dd02978 \ - --hash=sha256:a650629970fa21ac1fb06ba25dabfc5b8a2054fcbf6ae97c758aa956b8dba802 \ - --hash=sha256:a693fc5ed9bdd1c9e898013e0da4dcc640de7963a371c0bd458e50e046bf6438 \ - --hash=sha256:aaea28ba20a9026dfa77f4b80369e51cb767c61e33a2d4043399c67bd95fb7c6 \ - --hash=sha256:ad8850921d3a8d8ff6fbef790e773cecfc260bbfa0566998980d3fa8f520bc4a \ - --hash=sha256:ad887a8250eb47d3ab083d2f98db7f48098d13d42eb7a3b67d8a5c795f224ace \ - --hash=sha256:ae9408439537c5afdca05edd128a63f56a62680f4b3c234301055d7a2000220f \ - --hash=sha256:af7618b591bae552b40dbb6f93f5518328a949dac626ee75927bba1ecdeea9f4 \ - --hash=sha256:b6819f83aef06f560cb15482d619d0e623ce9bf155115150a85ab11b8342a665 \ - --hash=sha256:b8aa6f0bd8125ddd04a6593437bad6a7e70f300ff4180a531654aa2ab3f6d58f \ - --hash=sha256:b8eb3025f17b0a4c3cd08cda49acf312a19ad6e8a4edd9dbd591e6506d999402 \ - --hash=sha256:b95494daf857602eccf4c18ca33337dd2be705bccdb6dddbfc9d513e6addb9d9 \ - --hash=sha256:b9e5853bbd7264baca42ffc53391b490d65fe62849bf2c690fa3f6273dbcd0cb \ - --hash=sha256:bbc14f0365534d35a06970d6a83478b249752e922d662dc24d489af1aa0d1be7 \ - --hash=sha256:be5bf4b3224948032a845d12ab0f69f208293742df96dc14c4ff9b09e508fc17 \ - --hash=sha256:c5c97aa666cf70e667dfa5af945424ba1329af5dd988a437efeb3a09430389fb \ - --hash=sha256:c7a0e9b561e6460484318a7612e725df1145d46b0ef57c6b9866441bf6e27e0c \ - --hash=sha256:caebafea30ed049c57c673d0b36238b1748683be2593965614d7b0e99125c877 \ - --hash=sha256:cbbc54e58b34c3bae389ef00046be0961f30fef7cb0dd9c7756aee376a4f7683 \ - --hash=sha256:cc356250cffd6e78416cf5b40dc6a74f1edf3be8e834cf8862d9ed5265cf9b0e \ - --hash=sha256:ce9a40fbe52e57e7edf20113a4eaddfacac0561a0879734e636aa6d4bb5e3fb0 \ - --hash=sha256:d191de6cbab2aff5de6c5723101705fd044b3e4c7cfd587a1929b5028b9714b3 \ - --hash=sha256:d24f351e4d759f5054b641c81e8291e5d122af0fca5c72454ff77f7cbe492de8 \ - --hash=sha256:d2d4e4787672911b48350df02ed3fa3fffdc2f2e8ca06dd6afdf34189b76a9dd \ - --hash=sha256:d8c112f7a90d8ca5d20213aa41eac690bb50a76da153e3afb3886418e61cb22e \ - --hash=sha256:d9890d68c45d1aeac5178ded1d1cccf3bc8d7accf1f976f79bf63099fb16e4bd \ - --hash=sha256:dadf95aa862714ea468a49ad1e09fe00fcc9ec67d122f6596a8d40caf6cec7d0 \ - --hash=sha256:db6a3810eec08280a172a6cd541ff4a5f6a97b161d93ec94e6c4018917deb6b7 \ - --hash=sha256:db9801fe021f59a5b375ab778973127ca0ac52429a26e2fd86aa9508f4d26eb7 \ - --hash=sha256:e167bf899c3d724f9662ef00b4f7fef87a19c22b2fead198a6f68b263618df52 \ - --hash=sha256:e1b93790ed0bc26feb72e2f08299691ceb6da5e9e14a0d13cc74f1869af327a0 \ - --hash=sha256:e5b1413361cef15340ab9dc61523e653d25723e82d488ef7d60a12878227ed50 \ - --hash=sha256:ecab51ad2462197a4c000b6d5701fc8585b80eecb90583635d7e327b7b6923eb \ - --hash=sha256:ed3b94c5e362a8a84d69642dbeac615452e8af9b8eb825b7bc9f31a53a1051e2 \ - --hash=sha256:ed8358ae7d94ffb7c397cecb62cbac9578a83ecefc1eba27b9090ee910e2efb6 \ - --hash=sha256:edfdcae97cdc5d1a89477c436b61f472c4d40971774ac4729c613b4b133163cb \ - --hash=sha256:ee25f82f53262f9ac93bd7e58e47ea1bdcc3393cef815847e397cba17e284210 \ - --hash=sha256:f3be27440f7644ab9a13a6fc86f09cdd90b347c3c5e30c6d6d860de822d7cb53 \ - --hash=sha256:f46a6e8597f9bd71b31cc708195d42b634c8527fecbcf93febf1052cacc1f16e \ - --hash=sha256:f6eb37d511bfae9e13e82cb4d1af36b91150466f24d9b2b8a9785816deb16605 \ - --hash=sha256:f8d4916a81697faec6cb724a273bd5457e4c6c43d82b29f9dc02c5542fd21fc9 \ - --hash=sha256:f93b2b2279883d1d0a9e1bd01f312d6fc315c5e4c1f09e112e4736e2f650bc4e \ - --hash=sha256:f9867e55590e0855bcec60d4f9a092b69476db64573c9fe17e92b0c50614c16a \ - --hash=sha256:f996b87b420995a9174b2a7c1a8daf7db4750be6848b03eb5e639674f7963773 +multidict==6.7.0 \ + --hash=sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3 \ + --hash=sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec \ + --hash=sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd \ + --hash=sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b \ + --hash=sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb \ + --hash=sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32 \ + --hash=sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f \ + --hash=sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7 \ + --hash=sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36 \ + --hash=sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd \ + --hash=sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff \ + --hash=sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8 \ + --hash=sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d \ + --hash=sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721 \ + --hash=sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0 \ + --hash=sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3 \ + --hash=sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d \ + --hash=sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa \ + --hash=sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10 \ + --hash=sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202 \ + --hash=sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0 \ + --hash=sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718 \ + --hash=sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e \ + --hash=sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6 \ + --hash=sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1 \ + --hash=sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2 \ + --hash=sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754 \ + --hash=sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c \ + --hash=sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390 \ + --hash=sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128 \ + --hash=sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912 \ + --hash=sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c \ + --hash=sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3 \ + --hash=sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6 \ + --hash=sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2 \ + --hash=sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f \ + --hash=sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84 \ + --hash=sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842 \ + --hash=sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9 \ + --hash=sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6 \ + --hash=sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd \ + --hash=sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8 \ + --hash=sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599 \ + --hash=sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62 \ + --hash=sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec \ + --hash=sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34 \ + --hash=sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0 \ + --hash=sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e \ + --hash=sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6 \ + --hash=sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc \ + --hash=sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc \ + --hash=sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c \ + --hash=sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7 \ + --hash=sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4 \ + --hash=sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4 \ + --hash=sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38 \ + --hash=sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5 \ + --hash=sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111 \ + --hash=sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e \ + --hash=sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84 \ + --hash=sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c \ + --hash=sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1 \ + --hash=sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546 \ + --hash=sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a \ + --hash=sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c \ + --hash=sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036 \ + --hash=sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38 \ + --hash=sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99 \ + --hash=sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64 \ + --hash=sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e \ + --hash=sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f \ + --hash=sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159 \ + --hash=sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e \ + --hash=sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12 \ + --hash=sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1 \ + --hash=sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0 \ + --hash=sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184 \ + --hash=sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851 \ + --hash=sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb \ + --hash=sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32 \ + --hash=sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b \ + --hash=sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288 \ + --hash=sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81 \ + --hash=sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd \ + --hash=sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45 \ + --hash=sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a \ + --hash=sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca \ + --hash=sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5 \ + --hash=sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb \ + --hash=sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349 \ + --hash=sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b \ + --hash=sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f \ + --hash=sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32 \ + --hash=sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5 \ + --hash=sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34 \ + --hash=sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c \ + --hash=sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4 \ + --hash=sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17 \ + --hash=sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60 \ + --hash=sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394 \ + --hash=sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff \ + --hash=sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00 \ + --hash=sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85 \ + --hash=sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7 \ + --hash=sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304 \ + --hash=sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13 \ + --hash=sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e \ + --hash=sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e \ + --hash=sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792 \ + --hash=sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329 \ + --hash=sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb \ + --hash=sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b \ + --hash=sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000 \ + --hash=sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6 \ + --hash=sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62 \ + --hash=sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63 \ + --hash=sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5 \ + --hash=sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e \ + --hash=sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c \ + --hash=sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827 \ + --hash=sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8 \ + --hash=sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91 \ + --hash=sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96 \ + --hash=sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad \ + --hash=sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6 \ + --hash=sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40 \ + --hash=sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7 \ + --hash=sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4 \ + --hash=sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648 \ + --hash=sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064 \ + --hash=sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73 \ + --hash=sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b \ + --hash=sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762 \ + --hash=sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e \ + --hash=sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4 \ + --hash=sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e \ + --hash=sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546 \ + --hash=sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046 \ + --hash=sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6 \ + --hash=sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9 \ + --hash=sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d \ + --hash=sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf \ + --hash=sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687 \ + --hash=sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e \ + --hash=sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885 \ + --hash=sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7 # via # aiohttp # yarl @@ -663,105 +831,129 @@ pluggy==1.6.0 \ --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \ --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 # via pytest -propcache==0.3.2 \ - --hash=sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c \ - --hash=sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81 \ - --hash=sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f \ - --hash=sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6 \ - --hash=sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535 \ - --hash=sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be \ - --hash=sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba \ - --hash=sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3 \ - --hash=sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0 \ - --hash=sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4 \ - --hash=sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168 \ - --hash=sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b \ - --hash=sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea \ - --hash=sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770 \ - --hash=sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2 \ - --hash=sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892 \ - --hash=sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154 \ - --hash=sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf \ - --hash=sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb \ - --hash=sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1 \ - --hash=sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef \ - --hash=sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe \ - --hash=sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897 \ - --hash=sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3 \ - --hash=sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70 \ - --hash=sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330 \ - --hash=sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44 \ - --hash=sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0 \ - --hash=sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88 \ - --hash=sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1 \ - --hash=sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3 \ - --hash=sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43 \ - --hash=sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4 \ - --hash=sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1 \ - --hash=sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220 \ - --hash=sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7 \ - --hash=sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9 \ - --hash=sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50 \ - --hash=sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e \ - --hash=sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2 \ - --hash=sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66 \ - --hash=sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1 \ - --hash=sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb \ - --hash=sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe \ - --hash=sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c \ - --hash=sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7 \ - --hash=sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9 \ - --hash=sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e \ - --hash=sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701 \ - --hash=sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9 \ - --hash=sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8 \ - --hash=sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b \ - --hash=sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f \ - --hash=sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e \ - --hash=sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02 \ - --hash=sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e \ - --hash=sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1 \ - --hash=sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10 \ - --hash=sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387 \ - --hash=sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198 \ - --hash=sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f \ - --hash=sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b \ - --hash=sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e \ - --hash=sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614 \ - --hash=sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252 \ - --hash=sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9 \ - --hash=sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5 \ - --hash=sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c \ - --hash=sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1 \ - --hash=sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770 \ - --hash=sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339 \ - --hash=sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251 \ - --hash=sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db \ - --hash=sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf \ - --hash=sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95 \ - --hash=sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df \ - --hash=sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2 \ - --hash=sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945 \ - --hash=sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474 \ - --hash=sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b \ - --hash=sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615 \ - --hash=sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06 \ - --hash=sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33 \ - --hash=sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec \ - --hash=sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886 \ - --hash=sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb \ - --hash=sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1 \ - --hash=sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05 \ - --hash=sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d \ - --hash=sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39 \ - --hash=sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67 \ - --hash=sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e \ - --hash=sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28 \ - --hash=sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a \ - --hash=sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394 \ - --hash=sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725 \ - --hash=sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c \ - --hash=sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206 +propcache==0.4.1 \ + --hash=sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e \ + --hash=sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4 \ + --hash=sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be \ + --hash=sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3 \ + --hash=sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85 \ + --hash=sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b \ + --hash=sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367 \ + --hash=sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf \ + --hash=sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393 \ + --hash=sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888 \ + --hash=sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37 \ + --hash=sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8 \ + --hash=sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60 \ + --hash=sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1 \ + --hash=sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4 \ + --hash=sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717 \ + --hash=sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7 \ + --hash=sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc \ + --hash=sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe \ + --hash=sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb \ + --hash=sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75 \ + --hash=sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6 \ + --hash=sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e \ + --hash=sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff \ + --hash=sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566 \ + --hash=sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12 \ + --hash=sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367 \ + --hash=sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874 \ + --hash=sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf \ + --hash=sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566 \ + --hash=sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a \ + --hash=sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc \ + --hash=sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a \ + --hash=sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1 \ + --hash=sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6 \ + --hash=sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61 \ + --hash=sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726 \ + --hash=sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49 \ + --hash=sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44 \ + --hash=sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af \ + --hash=sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa \ + --hash=sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153 \ + --hash=sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc \ + --hash=sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5 \ + --hash=sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938 \ + --hash=sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf \ + --hash=sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925 \ + --hash=sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8 \ + --hash=sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c \ + --hash=sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85 \ + --hash=sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e \ + --hash=sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0 \ + --hash=sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1 \ + --hash=sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0 \ + --hash=sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992 \ + --hash=sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db \ + --hash=sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f \ + --hash=sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d \ + --hash=sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1 \ + --hash=sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e \ + --hash=sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900 \ + --hash=sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89 \ + --hash=sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a \ + --hash=sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b \ + --hash=sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f \ + --hash=sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f \ + --hash=sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1 \ + --hash=sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183 \ + --hash=sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66 \ + --hash=sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21 \ + --hash=sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db \ + --hash=sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded \ + --hash=sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb \ + --hash=sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19 \ + --hash=sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0 \ + --hash=sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165 \ + --hash=sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778 \ + --hash=sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455 \ + --hash=sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f \ + --hash=sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b \ + --hash=sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237 \ + --hash=sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81 \ + --hash=sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859 \ + --hash=sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c \ + --hash=sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835 \ + --hash=sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393 \ + --hash=sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5 \ + --hash=sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641 \ + --hash=sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144 \ + --hash=sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74 \ + --hash=sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db \ + --hash=sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac \ + --hash=sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403 \ + --hash=sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9 \ + --hash=sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f \ + --hash=sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311 \ + --hash=sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581 \ + --hash=sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36 \ + --hash=sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00 \ + --hash=sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a \ + --hash=sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f \ + --hash=sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2 \ + --hash=sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7 \ + --hash=sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239 \ + --hash=sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757 \ + --hash=sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72 \ + --hash=sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9 \ + --hash=sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4 \ + --hash=sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24 \ + --hash=sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207 \ + --hash=sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e \ + --hash=sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1 \ + --hash=sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d \ + --hash=sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37 \ + --hash=sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c \ + --hash=sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e \ + --hash=sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570 \ + --hash=sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af \ + --hash=sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f \ + --hash=sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88 \ + --hash=sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48 \ + --hash=sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781 # via # aiohttp # yarl @@ -771,16 +963,17 @@ proto-plus==1.26.1 \ # via # -r requirements.in # google-api-core -protobuf==6.32.1 \ - --hash=sha256:2601b779fc7d32a866c6b4404f9d42a3f67c5b9f3f15b4db3cccabe06b95c346 \ - --hash=sha256:2f5b80a49e1eb7b86d85fcd23fe92df154b9730a725c3b38c4e43b9d77018bf4 \ - --hash=sha256:68ff170bac18c8178f130d1ccb94700cf72852298e016a2443bdb9502279e5f1 \ - --hash=sha256:a8a32a84bc9f2aad712041b8b366190f71dde248926da517bde9e832e4412085 \ - --hash=sha256:b00a7d8c25fa471f16bc8153d0e53d6c9e827f0953f3c09aaa4331c718cae5e1 \ - --hash=sha256:b1864818300c297265c83a4982fd3169f97122c299f56a56e2445c3698d34710 \ - --hash=sha256:d0975d0b2f3e6957111aa3935d08a0eb7e006b1505d825f862a1fffc8348e122 \ - --hash=sha256:d8c7e6eb619ffdf105ee4ab76af5a68b60a9d0f66da3ea12d1640e6d8dab7281 \ - --hash=sha256:ee2469e4a021474ab9baafea6cd070e5bf27c7d29433504ddea1a4ee5850f68d +protobuf==6.33.0 \ + --hash=sha256:140303d5c8d2037730c548f8c7b93b20bb1dc301be280c378b82b8894589c954 \ + --hash=sha256:25c9e1963c6734448ea2d308cfa610e692b801304ba0908d7bfa564ac5132995 \ + --hash=sha256:35be49fd3f4fefa4e6e2aacc35e8b837d6703c37a2168a55ac21e9b1bc7559ef \ + --hash=sha256:905b07a65f1a4b72412314082c7dbfae91a9e8b68a0cc1577515f8df58ecf455 \ + --hash=sha256:9a031d10f703f03768f2743a1c403af050b6ae1f3480e9c140f39c45f81b13ee \ + --hash=sha256:c963e86c3655af3a917962c9619e1a6b9670540351d7af9439d06064e3317cc9 \ + --hash=sha256:cd33a8e38ea3e39df66e1bbc462b076d6e5ba3a4ebbde58219d777223a7873d3 \ + --hash=sha256:d6101ded078042a8f17959eccd9236fb7a9ca20d3b0098bbcb91533a5680d035 \ + --hash=sha256:e0697ece353e6239b90ee43a9231318302ad8353c70e6e45499fa52396debf90 \ + --hash=sha256:e0a1715e4f27355afd9570f3ea369735afc853a6c3951a6afe1f80d8569ad298 # via # -r requirements.in # google-api-core @@ -813,60 +1006,80 @@ pytest-asyncio==1.2.0 \ --hash=sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99 \ --hash=sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57 # via -r requirements.in -pyyaml==6.0.2 \ - --hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \ - --hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \ - --hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \ - --hash=sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e \ - --hash=sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133 \ - --hash=sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5 \ - --hash=sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484 \ - --hash=sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee \ - --hash=sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5 \ - --hash=sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68 \ - --hash=sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a \ - --hash=sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf \ - --hash=sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99 \ - --hash=sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8 \ - --hash=sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85 \ - --hash=sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19 \ - --hash=sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc \ - --hash=sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a \ - --hash=sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1 \ - --hash=sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317 \ - --hash=sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c \ - --hash=sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631 \ - --hash=sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d \ - --hash=sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652 \ - --hash=sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5 \ - --hash=sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e \ - --hash=sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b \ - --hash=sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8 \ - --hash=sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476 \ - --hash=sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706 \ - --hash=sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563 \ - --hash=sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237 \ - --hash=sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b \ - --hash=sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083 \ - --hash=sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180 \ - --hash=sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425 \ - --hash=sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e \ - --hash=sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f \ - --hash=sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725 \ - --hash=sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183 \ - --hash=sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab \ - --hash=sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774 \ - --hash=sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725 \ - --hash=sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e \ - --hash=sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5 \ - --hash=sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d \ - --hash=sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290 \ - --hash=sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44 \ - --hash=sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed \ - --hash=sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4 \ - --hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \ - --hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \ - --hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4 +PyYAML==6.0.3 \ + --hash=sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c \ + --hash=sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a \ + --hash=sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3 \ + --hash=sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956 \ + --hash=sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6 \ + --hash=sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c \ + --hash=sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65 \ + --hash=sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a \ + --hash=sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0 \ + --hash=sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b \ + --hash=sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1 \ + --hash=sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6 \ + --hash=sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7 \ + --hash=sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e \ + --hash=sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007 \ + --hash=sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310 \ + --hash=sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4 \ + --hash=sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9 \ + --hash=sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295 \ + --hash=sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea \ + --hash=sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0 \ + --hash=sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e \ + --hash=sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac \ + --hash=sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9 \ + --hash=sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7 \ + --hash=sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35 \ + --hash=sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb \ + --hash=sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b \ + --hash=sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69 \ + --hash=sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5 \ + --hash=sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b \ + --hash=sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c \ + --hash=sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369 \ + --hash=sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd \ + --hash=sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824 \ + --hash=sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198 \ + --hash=sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065 \ + --hash=sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c \ + --hash=sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c \ + --hash=sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764 \ + --hash=sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196 \ + --hash=sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b \ + --hash=sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00 \ + --hash=sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac \ + --hash=sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8 \ + --hash=sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e \ + --hash=sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28 \ + --hash=sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3 \ + --hash=sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5 \ + --hash=sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4 \ + --hash=sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b \ + --hash=sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf \ + --hash=sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5 \ + --hash=sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702 \ + --hash=sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8 \ + --hash=sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788 \ + --hash=sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da \ + --hash=sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d \ + --hash=sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc \ + --hash=sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c \ + --hash=sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba \ + --hash=sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f \ + --hash=sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917 \ + --hash=sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5 \ + --hash=sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26 \ + --hash=sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f \ + --hash=sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b \ + --hash=sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be \ + --hash=sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c \ + --hash=sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3 \ + --hash=sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6 \ + --hash=sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926 \ + --hash=sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0 # via # -r requirements.in # libcst @@ -878,39 +1091,49 @@ rsa==4.9.1 \ --hash=sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762 \ --hash=sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75 # via google-auth -tomli==2.2.1 \ - --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ - --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ - --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ - --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ - --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ - --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ - --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ - --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ - --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ - --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ - --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ - --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ - --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ - --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ - --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ - --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ - --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ - --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ - --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ - --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ - --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ - --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ - --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ - --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ - --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ - --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ - --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ - --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ - --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ - --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ - --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ - --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 +tomli==2.3.0 \ + --hash=sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456 \ + --hash=sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845 \ + --hash=sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999 \ + --hash=sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0 \ + --hash=sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878 \ + --hash=sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf \ + --hash=sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3 \ + --hash=sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be \ + --hash=sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52 \ + --hash=sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b \ + --hash=sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67 \ + --hash=sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549 \ + --hash=sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba \ + --hash=sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22 \ + --hash=sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c \ + --hash=sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f \ + --hash=sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6 \ + --hash=sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba \ + --hash=sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45 \ + --hash=sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f \ + --hash=sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77 \ + --hash=sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606 \ + --hash=sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441 \ + --hash=sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0 \ + --hash=sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f \ + --hash=sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530 \ + --hash=sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05 \ + --hash=sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8 \ + --hash=sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005 \ + --hash=sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879 \ + --hash=sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae \ + --hash=sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc \ + --hash=sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b \ + --hash=sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b \ + --hash=sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e \ + --hash=sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf \ + --hash=sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac \ + --hash=sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8 \ + --hash=sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b \ + --hash=sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf \ + --hash=sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463 \ + --hash=sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876 # via pytest typing-extensions==4.15.0 \ --hash=sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466 \ @@ -925,109 +1148,135 @@ urllib3==2.5.0 \ --hash=sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760 \ --hash=sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc # via requests -yarl==1.20.1 \ - --hash=sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845 \ - --hash=sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53 \ - --hash=sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a \ - --hash=sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed \ - --hash=sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2 \ - --hash=sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02 \ - --hash=sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf \ - --hash=sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010 \ - --hash=sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3 \ - --hash=sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef \ - --hash=sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04 \ - --hash=sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23 \ - --hash=sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e \ - --hash=sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6 \ - --hash=sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e \ - --hash=sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a \ - --hash=sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a \ - --hash=sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8 \ - --hash=sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805 \ - --hash=sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2 \ - --hash=sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458 \ - --hash=sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc \ - --hash=sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d \ - --hash=sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b \ - --hash=sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73 \ - --hash=sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7 \ - --hash=sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309 \ - --hash=sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e \ - --hash=sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698 \ - --hash=sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c \ - --hash=sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691 \ - --hash=sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16 \ - --hash=sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f \ - --hash=sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f \ - --hash=sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004 \ - --hash=sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3 \ - --hash=sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240 \ - --hash=sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28 \ - --hash=sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513 \ - --hash=sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773 \ - --hash=sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba \ - --hash=sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4 \ - --hash=sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e \ - --hash=sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1 \ - --hash=sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31 \ - --hash=sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16 \ - --hash=sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819 \ - --hash=sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d \ - --hash=sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3 \ - --hash=sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8 \ - --hash=sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf \ - --hash=sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723 \ - --hash=sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13 \ - --hash=sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1 \ - --hash=sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b \ - --hash=sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f \ - --hash=sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d \ - --hash=sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30 \ - --hash=sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77 \ - --hash=sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a \ - --hash=sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389 \ - --hash=sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e \ - --hash=sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e \ - --hash=sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c \ - --hash=sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1 \ - --hash=sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833 \ - --hash=sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b \ - --hash=sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee \ - --hash=sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000 \ - --hash=sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38 \ - --hash=sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8 \ - --hash=sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd \ - --hash=sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16 \ - --hash=sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d \ - --hash=sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a \ - --hash=sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06 \ - --hash=sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb \ - --hash=sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4 \ - --hash=sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9 \ - --hash=sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8 \ - --hash=sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390 \ - --hash=sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8 \ - --hash=sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be \ - --hash=sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c \ - --hash=sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac \ - --hash=sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b \ - --hash=sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5 \ - --hash=sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4 \ - --hash=sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e \ - --hash=sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f \ - --hash=sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee \ - --hash=sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5 \ - --hash=sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70 \ - --hash=sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1 \ - --hash=sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24 \ - --hash=sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653 \ - --hash=sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3 \ - --hash=sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00 \ - --hash=sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983 \ - --hash=sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d \ - --hash=sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7 \ - --hash=sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce \ - --hash=sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e \ - --hash=sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5 +yarl==1.22.0 \ + --hash=sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a \ + --hash=sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8 \ + --hash=sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b \ + --hash=sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da \ + --hash=sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf \ + --hash=sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890 \ + --hash=sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093 \ + --hash=sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6 \ + --hash=sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79 \ + --hash=sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683 \ + --hash=sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed \ + --hash=sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2 \ + --hash=sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff \ + --hash=sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02 \ + --hash=sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b \ + --hash=sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03 \ + --hash=sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511 \ + --hash=sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c \ + --hash=sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124 \ + --hash=sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c \ + --hash=sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da \ + --hash=sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2 \ + --hash=sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0 \ + --hash=sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba \ + --hash=sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d \ + --hash=sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53 \ + --hash=sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138 \ + --hash=sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4 \ + --hash=sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748 \ + --hash=sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7 \ + --hash=sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d \ + --hash=sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503 \ + --hash=sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d \ + --hash=sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2 \ + --hash=sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa \ + --hash=sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737 \ + --hash=sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f \ + --hash=sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1 \ + --hash=sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d \ + --hash=sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694 \ + --hash=sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3 \ + --hash=sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a \ + --hash=sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d \ + --hash=sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b \ + --hash=sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a \ + --hash=sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6 \ + --hash=sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b \ + --hash=sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea \ + --hash=sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5 \ + --hash=sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f \ + --hash=sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df \ + --hash=sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f \ + --hash=sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b \ + --hash=sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba \ + --hash=sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9 \ + --hash=sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0 \ + --hash=sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6 \ + --hash=sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b \ + --hash=sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967 \ + --hash=sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2 \ + --hash=sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708 \ + --hash=sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda \ + --hash=sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8 \ + --hash=sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10 \ + --hash=sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c \ + --hash=sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b \ + --hash=sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028 \ + --hash=sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e \ + --hash=sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147 \ + --hash=sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33 \ + --hash=sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca \ + --hash=sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590 \ + --hash=sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c \ + --hash=sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53 \ + --hash=sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74 \ + --hash=sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60 \ + --hash=sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f \ + --hash=sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1 \ + --hash=sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27 \ + --hash=sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520 \ + --hash=sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e \ + --hash=sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467 \ + --hash=sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca \ + --hash=sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859 \ + --hash=sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273 \ + --hash=sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e \ + --hash=sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601 \ + --hash=sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054 \ + --hash=sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376 \ + --hash=sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7 \ + --hash=sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b \ + --hash=sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb \ + --hash=sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65 \ + --hash=sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784 \ + --hash=sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71 \ + --hash=sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b \ + --hash=sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a \ + --hash=sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c \ + --hash=sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face \ + --hash=sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d \ + --hash=sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e \ + --hash=sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e \ + --hash=sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca \ + --hash=sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9 \ + --hash=sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb \ + --hash=sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95 \ + --hash=sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed \ + --hash=sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf \ + --hash=sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca \ + --hash=sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2 \ + --hash=sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62 \ + --hash=sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df \ + --hash=sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a \ + --hash=sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67 \ + --hash=sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f \ + --hash=sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529 \ + --hash=sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486 \ + --hash=sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a \ + --hash=sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e \ + --hash=sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b \ + --hash=sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74 \ + --hash=sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d \ + --hash=sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b \ + --hash=sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc \ + --hash=sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2 \ + --hash=sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e \ + --hash=sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8 \ + --hash=sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82 \ + --hash=sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd \ + --hash=sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249 # via aiohttp From 67eb1f0cf3ed2511a94966474568525c73f40aa7 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Nov 2025 14:51:17 -0500 Subject: [PATCH 1330/1339] tests: temporarily pin coverage to 7.11.0 (#2480) --- packages/gapic-generator/noxfile.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 557f762a77f8..0201b17dfe91 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -54,7 +54,10 @@ def unit(session): """Run the unit test suite.""" session.install( - "coverage", + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2478): + # Temporarily pin coverage to 7.11.0 + # See https://github.com/nedbat/coveragepy/issues/2077 + "coverage<=7.11.0", "pytest-cov", "pytest", "pytest-xdist", From cbf42e5035d9b4b56bd34433cd3478044e2a7b79 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 11 Nov 2025 10:50:22 -0500 Subject: [PATCH 1331/1339] chore(librarian): onboard to librarian (#2479) --- .../gapic-generator/.github/.OwlBot.lock.yaml | 17 ----------- packages/gapic-generator/.github/.OwlBot.yaml | 18 ----------- .../.github/release-please.yml | 2 -- .../.github/release-trigger.yml | 2 -- .../gapic-generator/.librarian/state.yaml | 10 +++++++ packages/gapic-generator/CHANGELOG.md | 4 +++ packages/gapic-generator/owlbot.py | 30 ------------------- 7 files changed, 14 insertions(+), 69 deletions(-) delete mode 100644 packages/gapic-generator/.github/.OwlBot.lock.yaml delete mode 100644 packages/gapic-generator/.github/.OwlBot.yaml delete mode 100644 packages/gapic-generator/.github/release-please.yml delete mode 100644 packages/gapic-generator/.github/release-trigger.yml create mode 100644 packages/gapic-generator/.librarian/state.yaml delete mode 100644 packages/gapic-generator/owlbot.py diff --git a/packages/gapic-generator/.github/.OwlBot.lock.yaml b/packages/gapic-generator/.github/.OwlBot.lock.yaml deleted file mode 100644 index c631e1f7d7e9..000000000000 --- a/packages/gapic-generator/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 -# created: 2025-03-05 diff --git a/packages/gapic-generator/.github/.OwlBot.yaml b/packages/gapic-generator/.github/.OwlBot.yaml deleted file mode 100644 index 0afcbd1eff16..000000000000 --- a/packages/gapic-generator/.github/.OwlBot.yaml +++ /dev/null @@ -1,18 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - -begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 diff --git a/packages/gapic-generator/.github/release-please.yml b/packages/gapic-generator/.github/release-please.yml deleted file mode 100644 index 466597e5b196..000000000000 --- a/packages/gapic-generator/.github/release-please.yml +++ /dev/null @@ -1,2 +0,0 @@ -releaseType: python -handleGHRelease: true diff --git a/packages/gapic-generator/.github/release-trigger.yml b/packages/gapic-generator/.github/release-trigger.yml deleted file mode 100644 index 21ed4182c81b..000000000000 --- a/packages/gapic-generator/.github/release-trigger.yml +++ /dev/null @@ -1,2 +0,0 @@ -enabled: true -multiScmName: gapic-generator-python diff --git a/packages/gapic-generator/.librarian/state.yaml b/packages/gapic-generator/.librarian/state.yaml new file mode 100644 index 000000000000..9d2284a957ca --- /dev/null +++ b/packages/gapic-generator/.librarian/state.yaml @@ -0,0 +1,10 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:4e3486fee9eba44b75a18c67f0c60fa0c4af675132305e3ff3fae2e9b94f94bb +libraries: + - id: gapic-generator + version: 1.29.0 + apis: [] + source_roots: + - . + preserve_regex: [] + remove_regex: [] + tag_format: v{version} diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index a9d964d73809..66122f589683 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +[PyPI History][1] + +[1]: https://pypi.org/project/gapic-generator/#history + ## [1.29.0](https://github.com/googleapis/gapic-generator-python/compare/v1.28.3...v1.29.0) (2025-10-23) diff --git a/packages/gapic-generator/owlbot.py b/packages/gapic-generator/owlbot.py deleted file mode 100644 index 43ea9f15fe29..000000000000 --- a/packages/gapic-generator/owlbot.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import synthtool as s -from synthtool.gcp import CommonTemplates - -templated_files = CommonTemplates().py_library() -s.move( - templated_files / ".kokoro", - excludes=["samples/**/*", "test-samples*", "*/prerelease-deps.cfg"], -) - -# needed for docs build -s.move(templated_files / ".trampolinerc") - -s.move(templated_files / "LICENSE") -s.move(templated_files / ".github", excludes=["workflows", "CODEOWNERS", "auto-approve.yml"]) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 46767d859d1b15bb6ea1a05efd8ca49655021b54 Mon Sep 17 00:00:00 2001 From: Noah Dietz Date: Tue, 11 Nov 2025 07:55:24 -0800 Subject: [PATCH 1332/1339] fix: add api_version to gapic_metadata.json (#2476) --- packages/gapic-generator/gapic/schema/api.py | 3 +++ .../gapic-generator/tests/unit/schema/test_api.py | 14 +++++++++++--- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/packages/gapic-generator/gapic/schema/api.py b/packages/gapic-generator/gapic/schema/api.py index 6fe99b73d114..b05f6b6dec31 100644 --- a/packages/gapic-generator/gapic/schema/api.py +++ b/packages/gapic-generator/gapic/schema/api.py @@ -676,6 +676,9 @@ def gapic_metadata(self, options: Options) -> gapic_metadata_pb2.GapicMetadata: for service in sorted(self.services.values(), key=lambda s: s.name): service_desc = gm.services.get_or_create(service.name) + if service.version: + service_desc.api_version = service.version + # At least one of "grpc" or "rest" is guaranteed to be present because # of the way that Options instances are created. # This assumes the options are generated by the class method factory. diff --git a/packages/gapic-generator/tests/unit/schema/test_api.py b/packages/gapic-generator/tests/unit/schema/test_api.py index 8b399c54d5a1..300e8cab8943 100644 --- a/packages/gapic-generator/tests/unit/schema/test_api.py +++ b/packages/gapic-generator/tests/unit/schema/test_api.py @@ -1908,6 +1908,10 @@ def test_map_field_name_disambiguation(): def test_gapic_metadata(): + apiVersion = "2025-11-03" + serviceOptions = descriptor_pb2.ServiceOptions() + serviceOptions.Extensions[client_pb2.api_version] = apiVersion + api_schema = api.API.build( file_descriptors=[ descriptor_pb2.FileDescriptorProto( @@ -1941,6 +1945,7 @@ def test_gapic_metadata(): output_type="animalia.mollusca.v1.Mollusc", ), ], + options=serviceOptions, ), descriptor_pb2.ServiceDescriptorProto( name="Octopus", @@ -1997,6 +2002,7 @@ def test_gapic_metadata(): } ), "Squid": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + api_version=apiVersion, clients={ "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidClient", @@ -2026,7 +2032,7 @@ def test_gapic_metadata(): ), }, ), - } + }, ), }, ) @@ -2060,6 +2066,7 @@ def test_gapic_metadata(): } ), "Squid": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + api_version=apiVersion, clients={ "rest": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidClient", @@ -2075,7 +2082,7 @@ def test_gapic_metadata(): ), }, ), - } + }, ), }, ) @@ -2131,6 +2138,7 @@ def test_gapic_metadata(): } ), "Squid": gapic_metadata_pb2.GapicMetadata.ServiceForTransport( + api_version=apiVersion, clients={ "grpc": gapic_metadata_pb2.GapicMetadata.ServiceAsClient( library_client="SquidClient", @@ -2174,7 +2182,7 @@ def test_gapic_metadata(): ), }, ), - } + }, ), }, ) From d1e68809a8e338696e6045ca603e88665abe6ed2 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 12 Nov 2025 14:56:21 -0500 Subject: [PATCH 1333/1339] chore(deps): update requirements.txt (#2482) --- packages/gapic-generator/requirements.txt | 25 +++++++++++------------ 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/packages/gapic-generator/requirements.txt b/packages/gapic-generator/requirements.txt index 0639da5720b7..dfba7dd8d829 100644 --- a/packages/gapic-generator/requirements.txt +++ b/packages/gapic-generator/requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in @@ -580,7 +580,7 @@ libcst==1.8.6 \ --hash=sha256:f729c37c9317126da9475bdd06a7208eb52fcbd180a6341648b45a56b4ba708b \ --hash=sha256:fea5c7fa26556eedf277d4f72779c5ede45ac3018650721edd77fd37ccd4a2d4 # via -r requirements.in -MarkupSafe==3.0.3 \ +markupsafe==3.0.3 \ --hash=sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f \ --hash=sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a \ --hash=sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf \ @@ -994,19 +994,18 @@ pygments==2.19.2 \ --hash=sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887 \ --hash=sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b # via pytest -pypandoc==1.15 \ - --hash=sha256:4ededcc76c8770f27aaca6dff47724578428eca84212a31479403a9731fc2b16 \ - --hash=sha256:ea25beebe712ae41d63f7410c08741a3cab0e420f6703f95bc9b3a749192ce13 +pypandoc==1.16 \ + --hash=sha256:868f390d48388743e7a5885915cbbaa005dea36a825ecdfd571f8c523416c822 # via -r requirements.in -pytest==8.4.2 \ - --hash=sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01 \ - --hash=sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79 +pytest==9.0.0 \ + --hash=sha256:8f44522eafe4137b0f35c9ce3072931a788a21ee40a2ed279e817d3cc16ed21e \ + --hash=sha256:e5ccdf10b0bac554970ee88fc1a4ad0ee5d221f8ef22321f9b7e4584e19d7f96 # via pytest-asyncio -pytest-asyncio==1.2.0 \ - --hash=sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99 \ - --hash=sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57 +pytest-asyncio==1.3.0 \ + --hash=sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5 \ + --hash=sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5 # via -r requirements.in -PyYAML==6.0.3 \ +pyyaml==6.0.3 \ --hash=sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c \ --hash=sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a \ --hash=sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3 \ @@ -1141,7 +1140,7 @@ typing-extensions==4.15.0 \ # via # aiosignal # exceptiongroup - # libcst + # grpcio # multidict # pytest-asyncio urllib3==2.5.0 \ From 6490322bb19c352c52d7e1fe69c702729e7598d8 Mon Sep 17 00:00:00 2001 From: agrawalradhika-cell Date: Wed, 12 Nov 2025 13:09:10 -0800 Subject: [PATCH 1334/1339] feat: auto-enable mTLS when supported certificates are detected (#2472) Signed-off-by: Radhika Agrawal Co-authored-by: Daniel Sanche --- .../%sub/services/%service/client.py.j2 | 17 +- .../%name_%version/%sub/test_%service.py.j2 | 5 +- .../%sub/services/%service/client.py.j2 | 38 ++- .../%name_%version/%sub/test_%service.py.j2 | 226 ++++++++++++++++-- .../asset_v1/services/asset_service/client.py | 38 ++- .../unit/gapic/asset_v1/test_asset_service.py | 226 ++++++++++++++++-- .../services/iam_credentials/client.py | 38 ++- .../credentials_v1/test_iam_credentials.py | 226 ++++++++++++++++-- .../eventarc_v1/services/eventarc/client.py | 38 ++- .../unit/gapic/eventarc_v1/test_eventarc.py | 226 ++++++++++++++++-- .../services/config_service_v2/client.py | 38 ++- .../services/logging_service_v2/client.py | 38 ++- .../services/metrics_service_v2/client.py | 38 ++- .../logging_v2/test_config_service_v2.py | 226 ++++++++++++++++-- .../logging_v2/test_logging_service_v2.py | 226 ++++++++++++++++-- .../logging_v2/test_metrics_service_v2.py | 226 ++++++++++++++++-- .../services/config_service_v2/client.py | 38 ++- .../services/logging_service_v2/client.py | 38 ++- .../services/metrics_service_v2/client.py | 38 ++- .../logging_v2/test_config_service_v2.py | 226 ++++++++++++++++-- .../logging_v2/test_logging_service_v2.py | 226 ++++++++++++++++-- .../logging_v2/test_metrics_service_v2.py | 226 ++++++++++++++++-- .../redis_v1/services/cloud_redis/client.py | 38 ++- .../unit/gapic/redis_v1/test_cloud_redis.py | 226 ++++++++++++++++-- .../redis_v1/services/cloud_redis/client.py | 38 ++- .../unit/gapic/redis_v1/test_cloud_redis.py | 226 ++++++++++++++++-- 26 files changed, 2885 insertions(+), 305 deletions(-) diff --git a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index fef86ae05230..2b85bd9e76b1 100644 --- a/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -279,9 +279,20 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): client_options = cast(client_options_lib.ClientOptions, client_options) # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + + # Check if google-auth version supports should_use_client_cert for automatic + # mTLS enablement + if hasattr(mtls,"should_use_client_cert"): + use_client_cert = mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + use_client_cert = use_client_cert_str == "true" client_cert_source_func = None is_mtls = False diff --git a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index cd6c05c44ee9..ac385e285d71 100644 --- a/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -278,8 +278,9 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 9f945bc55685..0d60f47bb6b2 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -183,6 +183,32 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -297,16 +323,14 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = {{ service.client_name }}._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -336,14 +360,12 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = {{ service.client_name }}._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index 6391c95768fc..c0e92cd9d6d1 100644 --- a/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/packages/gapic-generator/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -163,10 +163,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert {{ service.client_name }}._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - {{ service.client_name }}._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + {{ service.client_name }}._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert {{ service.client_name }}._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert {{ service.client_name }}._read_environment_variables() == (False, "never", None) @@ -185,6 +197,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert {{ service.client_name }}._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert {{ service.client_name }}._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert {{ service.client_name }}._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert {{ service.client_name }}._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert {{ service.client_name }}._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert {{ service.client_name }}._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert {{ service.client_name }}._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert {{ service.client_name }}._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert {{ service.client_name }}._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert {{ service.client_name }}._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + {{ service.client_name }}._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert {{ service.client_name }}._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert {{ service.client_name }}._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -503,12 +600,6 @@ def test_{{ service.client_name|snake_case }}_client_options(client_class, trans client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -667,6 +758,114 @@ def test_{{ service.client_name|snake_case }}_get_mtls_endpoint_and_cert_source( assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -702,13 +901,6 @@ def test_{{ service.client_name|snake_case }}_get_mtls_endpoint_and_cert_source( assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ {% if 'grpc' in opts.transport %} {{ service.client_name }}, {{ service.async_client_name }} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 76df912ab0fb..843790205a95 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -139,6 +139,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "cloudasset.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -356,16 +382,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = AssetServiceClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -395,14 +419,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = AssetServiceClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index edd142a8084e..bd17439996cd 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -131,10 +131,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert AssetServiceClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - AssetServiceClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + AssetServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert AssetServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert AssetServiceClient._read_environment_variables() == (False, "never", None) @@ -153,6 +165,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert AssetServiceClient._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert AssetServiceClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert AssetServiceClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AssetServiceClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AssetServiceClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert AssetServiceClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert AssetServiceClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert AssetServiceClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert AssetServiceClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert AssetServiceClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + AssetServiceClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert AssetServiceClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert AssetServiceClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -396,12 +493,6 @@ def test_asset_service_client_client_options(client_class, transport_class, tran client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -548,6 +639,114 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -583,13 +782,6 @@ def test_asset_service_client_get_mtls_endpoint_and_cert_source(client_class): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ AssetServiceClient, AssetServiceAsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index 6eb63b8a4f3c..4f2406bad68f 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -142,6 +142,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "iamcredentials.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -293,16 +319,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = IAMCredentialsClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -332,14 +356,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = IAMCredentialsClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 90ddb8c449e9..d9f2f0a4beb6 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -121,10 +121,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert IAMCredentialsClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - IAMCredentialsClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + IAMCredentialsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert IAMCredentialsClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert IAMCredentialsClient._read_environment_variables() == (False, "never", None) @@ -143,6 +155,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert IAMCredentialsClient._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert IAMCredentialsClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert IAMCredentialsClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert IAMCredentialsClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert IAMCredentialsClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert IAMCredentialsClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert IAMCredentialsClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert IAMCredentialsClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert IAMCredentialsClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert IAMCredentialsClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + IAMCredentialsClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert IAMCredentialsClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert IAMCredentialsClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -386,12 +483,6 @@ def test_iam_credentials_client_client_options(client_class, transport_class, tr client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -538,6 +629,114 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -573,13 +772,6 @@ def test_iam_credentials_client_get_mtls_endpoint_and_cert_source(client_class): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ IAMCredentialsClient, IAMCredentialsAsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index 9e42d07b6777..3accde24c683 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -151,6 +151,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "eventarc.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -401,16 +427,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = EventarcClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -440,14 +464,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = EventarcClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index c815caafc5be..88e728b9cac6 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -141,10 +141,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert EventarcClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - EventarcClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + EventarcClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert EventarcClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert EventarcClient._read_environment_variables() == (False, "never", None) @@ -163,6 +175,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert EventarcClient._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert EventarcClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert EventarcClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert EventarcClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert EventarcClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert EventarcClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert EventarcClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert EventarcClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert EventarcClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert EventarcClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + EventarcClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert EventarcClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert EventarcClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -406,12 +503,6 @@ def test_eventarc_client_client_options(client_class, transport_class, transport client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -558,6 +649,114 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -593,13 +792,6 @@ def test_eventarc_client_get_mtls_endpoint_and_cert_source(client_class): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ EventarcClient, EventarcAsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index b6f74fe61442..b2e701b09517 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -135,6 +135,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -352,16 +378,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = ConfigServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -391,14 +415,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = ConfigServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index a2d4b1a336a3..8c37951cb535 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -132,6 +132,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -283,16 +309,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = LoggingServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -322,14 +346,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = LoggingServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index 6cf913c0dbba..6ff3b7ac0250 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -133,6 +133,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -284,16 +310,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = MetricsServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -323,14 +347,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = MetricsServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index c284e58f8b7d..9df459055fea 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -122,10 +122,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - ConfigServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + ConfigServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert ConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert ConfigServiceV2Client._read_environment_variables() == (False, "never", None) @@ -144,6 +156,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert ConfigServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert ConfigServiceV2Client._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert ConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ConfigServiceV2Client._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert ConfigServiceV2Client._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert ConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert ConfigServiceV2Client._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert ConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert ConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + ConfigServiceV2Client._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert ConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert ConfigServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -376,12 +473,6 @@ def test_config_service_v2_client_client_options(client_class, transport_class, client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -526,6 +617,114 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -561,13 +760,6 @@ def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ ConfigServiceV2Client, ConfigServiceV2AsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 3a3740300bfb..614126cfdb9d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -123,10 +123,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - LoggingServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert LoggingServiceV2Client._read_environment_variables() == (False, "never", None) @@ -145,6 +157,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert LoggingServiceV2Client._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LoggingServiceV2Client._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert LoggingServiceV2Client._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert LoggingServiceV2Client._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + LoggingServiceV2Client._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -377,12 +474,6 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -527,6 +618,114 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -562,13 +761,6 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ LoggingServiceV2Client, LoggingServiceV2AsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 4633e315a160..027c8ab3725b 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -121,10 +121,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - MetricsServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + MetricsServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert MetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert MetricsServiceV2Client._read_environment_variables() == (False, "never", None) @@ -143,6 +155,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert MetricsServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert MetricsServiceV2Client._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert MetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert MetricsServiceV2Client._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert MetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert MetricsServiceV2Client._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert MetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert MetricsServiceV2Client._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert MetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert MetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + MetricsServiceV2Client._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert MetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert MetricsServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -375,12 +472,6 @@ def test_metrics_service_v2_client_client_options(client_class, transport_class, client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -525,6 +616,114 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -560,13 +759,6 @@ def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ MetricsServiceV2Client, MetricsServiceV2AsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index 696ac4be7cb4..7889ba044b81 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -135,6 +135,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -352,16 +378,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = BaseConfigServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -391,14 +415,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = BaseConfigServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py index a2d4b1a336a3..8c37951cb535 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -132,6 +132,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -283,16 +309,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = LoggingServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -322,14 +346,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = LoggingServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py index 377daf633c2a..479ae7c0329e 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -133,6 +133,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "logging.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -284,16 +310,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = BaseMetricsServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -323,14 +347,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = BaseMetricsServiceV2Client._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py index 97d56ab135ef..ca28dea89cc3 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -122,10 +122,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - BaseConfigServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + BaseConfigServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert BaseConfigServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert BaseConfigServiceV2Client._read_environment_variables() == (False, "never", None) @@ -144,6 +156,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert BaseConfigServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert BaseConfigServiceV2Client._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert BaseConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BaseConfigServiceV2Client._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BaseConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert BaseConfigServiceV2Client._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert BaseConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert BaseConfigServiceV2Client._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert BaseConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert BaseConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + BaseConfigServiceV2Client._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert BaseConfigServiceV2Client._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert BaseConfigServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -376,12 +473,6 @@ def test_base_config_service_v2_client_client_options(client_class, transport_cl client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -526,6 +617,114 @@ def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_ assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -561,13 +760,6 @@ def test_base_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_ assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ BaseConfigServiceV2Client, BaseConfigServiceV2AsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 3a3740300bfb..614126cfdb9d 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -123,10 +123,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - LoggingServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + LoggingServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert LoggingServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert LoggingServiceV2Client._read_environment_variables() == (False, "never", None) @@ -145,6 +157,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert LoggingServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert LoggingServiceV2Client._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert LoggingServiceV2Client._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert LoggingServiceV2Client._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert LoggingServiceV2Client._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + LoggingServiceV2Client._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert LoggingServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -377,12 +474,6 @@ def test_logging_service_v2_client_client_options(client_class, transport_class, client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -527,6 +618,114 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -562,13 +761,6 @@ def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_clas assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ LoggingServiceV2Client, LoggingServiceV2AsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index bcb2c8346fbd..d843efe3c1ba 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -121,10 +121,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - BaseMetricsServiceV2Client._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + BaseMetricsServiceV2Client._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert BaseMetricsServiceV2Client._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "never", None) @@ -143,6 +155,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert BaseMetricsServiceV2Client._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + BaseMetricsServiceV2Client._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert BaseMetricsServiceV2Client._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -375,12 +472,6 @@ def test_base_metrics_service_v2_client_client_options(client_class, transport_c client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -525,6 +616,114 @@ def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -560,13 +759,6 @@ def test_base_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ BaseMetricsServiceV2Client, BaseMetricsServiceV2AsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index e02ecf33f133..f3da10c2fccb 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -169,6 +169,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "redis.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -320,16 +346,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = CloudRedisClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -359,14 +383,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = CloudRedisClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index 02549e2f2f17..ae66b5a92c6e 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -139,10 +139,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert CloudRedisClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - CloudRedisClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + CloudRedisClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert CloudRedisClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert CloudRedisClient._read_environment_variables() == (False, "never", None) @@ -161,6 +173,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert CloudRedisClient._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert CloudRedisClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CloudRedisClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert CloudRedisClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert CloudRedisClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + CloudRedisClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert CloudRedisClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -404,12 +501,6 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -556,6 +647,114 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -591,13 +790,6 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ CloudRedisClient, CloudRedisAsyncClient ]) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py index 420273052888..32c43e15f1a6 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -169,6 +169,32 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "redis.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -320,16 +346,14 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_optio DeprecationWarning) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = CloudRedisClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -359,14 +383,12 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false").lower() + use_client_cert = CloudRedisClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py index 2aed5640c0aa..44403c662760 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -139,10 +139,22 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): assert CloudRedisClient._read_environment_variables() == (False, "auto", None) - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - CloudRedisClient._read_environment_variables() - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + CloudRedisClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert CloudRedisClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert CloudRedisClient._read_environment_variables() == (False, "never", None) @@ -161,6 +173,91 @@ def test__read_environment_variables(): with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): assert CloudRedisClient._read_environment_variables() == (False, "auto", "foo.com") + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=True): + assert CloudRedisClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch("google.auth.transport.mtls.should_use_client_cert", return_value=False): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert CloudRedisClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert CloudRedisClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"}): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert CloudRedisClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"}): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + with pytest.raises(ValueError): + CloudRedisClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"}): + assert CloudRedisClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert CloudRedisClient._use_client_cert_effective() is False + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -404,12 +501,6 @@ def test_cloud_redis_client_client_options(client_class, transport_class, transp client = client_class(transport=transport_name) assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, '__init__') as patched: @@ -556,6 +647,114 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -591,13 +790,6 @@ def test_cloud_redis_client_get_mtls_endpoint_and_cert_source(client_class): assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert str(excinfo.value) == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - @pytest.mark.parametrize("client_class", [ CloudRedisClient, CloudRedisAsyncClient ]) From d9a2b48629edd3102bf0c0942ca5f4c25953eb69 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 14 Nov 2025 10:23:17 -0500 Subject: [PATCH 1335/1339] fix: drop packaging and pkg_resources (#2477) Co-authored-by: Daniel Sanche --- .../%name_%version/%sub/__init__.py.j2 | 115 ++++++------ .../gapic/templates/setup.py.j2 | 1 - packages/gapic-generator/noxfile.py | 8 - .../asset/google/cloud/asset_v1/__init__.py | 169 ++++++++++-------- .../tests/integration/goldens/asset/setup.py | 1 - .../google/iam/credentials_v1/__init__.py | 81 +++++---- .../integration/goldens/credentials/setup.py | 1 - .../google/cloud/eventarc_v1/__init__.py | 139 +++++++------- .../integration/goldens/eventarc/setup.py | 1 - .../google/cloud/logging_v2/__init__.py | 169 ++++++++++-------- .../integration/goldens/logging/setup.py | 1 - .../google/cloud/logging_v2/__init__.py | 169 ++++++++++-------- .../goldens/logging_internal/setup.py | 1 - .../redis/google/cloud/redis_v1/__init__.py | 119 ++++++------ .../tests/integration/goldens/redis/setup.py | 1 - .../google/cloud/redis_v1/__init__.py | 93 ++++++---- .../goldens/redis_selective/setup.py | 1 - 17 files changed, 587 insertions(+), 483 deletions(-) diff --git a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index abe33f857575..9eb968dd011b 100644 --- a/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/packages/gapic-generator/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -5,10 +5,46 @@ {% set package_path = api.naming.module_namespace|join('.') + "." + api.naming.versioned_module_name %} from {{package_path}} import gapic_version as package_version +import google.api_core as api_core +import sys + __version__ = package_version.__version__ +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata -import google.api_core as api_core +{# Import subpackages. -#} +{% for subpackage, _ in api.subpackages|dictsort %} +from . import {{ subpackage }} +{% endfor %} + +{# Import services for this package. -#} +{% for service in api.services.values()|sort(attribute='name') + if service.meta.address.subpackage == api.subpackage_view %} +from .services.{{ service.name|snake_case }} import {{ service.client_name }} +{% if 'grpc' in opts.transport %} +from .services.{{ service.name|snake_case }} import {{ service.async_client_name }} +{% endif %} +{% endfor %} + +{# Import messages and enums from each proto. + It is safe to import all of the messages into the same namespace here, + because protocol buffers itself enforces selector uniqueness within + a proto package. +-#} +{% for proto in api.protos.values()|sort(attribute='name') + if proto.meta.address.subpackage == api.subpackage_view %} +{% for message in proto.messages.values()|sort(attribute='name') %} +from .types.{{ proto.module_name }} import {{ message.name }} +{% endfor %} +{% for enum in proto.enums.values()|sort(attribute='name') %} +from .types.{{ proto.module_name }} import {{ enum.name }} +{% endfor %} +{% endfor %} if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER {# TODO(api_core): remove `type:ignore` below when minimum version of api_core makes the else clause unnecessary. #} @@ -45,27 +81,35 @@ else: # pragma: NO COVER f"then update {_package_label}.", FutureWarning) - from packaging.version import parse as parse_version - - if sys.version_info < (3, 8): - import pkg_resources - - def _get_version(dependency_name): - try: - version_string = pkg_resources.get_distribution(dependency_name).version - return (parse_version(version_string), version_string) - except pkg_resources.DistributionNotFound: - return (None, "--") - else: - from importlib import metadata - - def _get_version(dependency_name): + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): try: - version_string = metadata.version("requests") - parsed_version = parse_version(version_string) - return (parsed_version.release, version_string) - except metadata.PackageNotFoundError: - return (None, "--") + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) + + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple + return (None, "--") _dependency_package = "google.protobuf" _next_supported_version = "4.25.8" @@ -93,35 +137,6 @@ else: # pragma: NO COVER "using a supported version of Python; see " + "https://devguide.python.org/versions/") -{# Import subpackages. -#} -{% for subpackage, _ in api.subpackages|dictsort %} -from . import {{ subpackage }} -{% endfor %} - -{# Import services for this package. -#} -{% for service in api.services.values()|sort(attribute='name') - if service.meta.address.subpackage == api.subpackage_view %} -from .services.{{ service.name|snake_case }} import {{ service.client_name }} -{% if 'grpc' in opts.transport %} -from .services.{{ service.name|snake_case }} import {{ service.async_client_name }} -{% endif %} -{% endfor %} - -{# Import messages and enums from each proto. - It is safe to import all of the messages into the same namespace here, - because protocol buffers itself enforces selector uniqueness within - a proto package. --#} -{% for proto in api.protos.values()|sort(attribute='name') - if proto.meta.address.subpackage == api.subpackage_view %} -{% for message in proto.messages.values()|sort(attribute='name') %} -from .types.{{ proto.module_name }} import {{ message.name }} -{% endfor %} -{% for enum in proto.enums.values()|sort(attribute='name') %} -from .types.{{ proto.module_name }} import {{ enum.name }} -{% endfor %} -{% endfor %} - {# Define __all__. This requires the full set of imported names, so we iterate over them again. diff --git a/packages/gapic-generator/gapic/templates/setup.py.j2 b/packages/gapic-generator/gapic/templates/setup.py.j2 index 5f2f803315cd..6573b942db08 100644 --- a/packages/gapic-generator/gapic/templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/templates/setup.py.j2 @@ -39,7 +39,6 @@ dependencies = [ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", {# Explicitly exclude protobuf versions mentioned in https://cloud.google.com/support/bulletins#GCP-2022-019 #} diff --git a/packages/gapic-generator/noxfile.py b/packages/gapic-generator/noxfile.py index 0201b17dfe91..e99bfad597ec 100644 --- a/packages/gapic-generator/noxfile.py +++ b/packages/gapic-generator/noxfile.py @@ -64,8 +64,6 @@ def unit(session): "pyfakefs", "grpcio-status", "proto-plus", - "setuptools", # TODO: Remove when not needed in __init__.py.j2 - "packaging", # TODO: Remove when not needed in __init__.py.j2 ) session.install("-e", ".") session.run( @@ -501,8 +499,6 @@ def run_showcase_unit_tests(session, fail_under=100, rest_async_io_enabled=False "pytest-xdist", "asyncmock; python_version < '3.8'", "pytest-asyncio", - "setuptools", # TODO: Remove when not needed in __init__.py.j2 - "packaging", # TODO: Remove when not needed in __init__.py.j2 ) # Run the tests. # NOTE: async rest is not supported against the minimum supported version of google-api-core. @@ -617,8 +613,6 @@ def showcase_mypy( "types-protobuf", "types-requests", "types-dataclasses", - "setuptools", # TODO: Remove when not needed in __init__.py.j2 - "packaging", # TODO: Remove when not needed in __init__.py.j2 ) with showcase_library(session, templates=templates, other_opts=other_opts) as lib: @@ -749,8 +743,6 @@ def mypy(session): "types-PyYAML", "types-dataclasses", "click==8.1.3", - "setuptools", # TODO: Remove when not needed in __init__.py.j2 - "packaging", # TODO: Remove when not needed in __init__.py.j2 ) session.install(".") session.run("mypy", "-p", "gapic") diff --git a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 4f4362db8ddd..31068ac47299 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -15,86 +15,17 @@ # from google.cloud.asset_v1 import gapic_version as package_version -__version__ = package_version.__version__ - - import google.api_core as api_core +import sys -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.asset_v1") # type: ignore - api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore -else: # pragma: NO COVER - # An older version of api_core is installed which does not define the - # functions above. We do equivalent checks manually. - try: - import warnings - import sys - - _py_version_str = sys.version.split()[0] - _package_label = "google.cloud.asset_v1" - if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) - if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) - - from packaging.version import parse as parse_version - - if sys.version_info < (3, 8): - import pkg_resources - - def _get_version(dependency_name): - try: - version_string = pkg_resources.get_distribution(dependency_name).version - return (parse_version(version_string), version_string) - except pkg_resources.DistributionNotFound: - return (None, "--") - else: - from importlib import metadata - - def _get_version(dependency_name): - try: - version_string = metadata.version("requests") - parsed_version = parse_version(version_string) - return (parsed_version.release, version_string) - except metadata.PackageNotFoundError: - return (None, "--") +__version__ = package_version.__version__ - _dependency_package = "google.protobuf" - _next_supported_version = "4.25.8" - _next_supported_version_tuple = (4, 25, 8) - _recommendation = " (we recommend 6.x)" - (_version_used, _version_used_string) = _get_version(_dependency_package) - if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) - except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata from .services.asset_service import AssetServiceClient @@ -178,6 +109,90 @@ def _get_version(dependency_name): from .types.assets import TimeWindow from .types.assets import VersionedResource +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.asset_v1") # type: ignore + api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.asset_v1" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) + + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + __all__ = ( 'AssetServiceAsyncClient', 'AnalyzeIamPolicyLongrunningMetadata', diff --git a/packages/gapic-generator/tests/integration/goldens/asset/setup.py b/packages/gapic-generator/tests/integration/goldens/asset/setup.py index f7d2a2bdf837..db0a03c5906d 100755 --- a/packages/gapic-generator/tests/integration/goldens/asset/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/asset/setup.py @@ -45,7 +45,6 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 3dd77183a791..2890169a652d 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -15,10 +15,30 @@ # from google.iam.credentials_v1 import gapic_version as package_version +import google.api_core as api_core +import sys + __version__ = package_version.__version__ +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata -import google.api_core as api_core + +from .services.iam_credentials import IAMCredentialsClient +from .services.iam_credentials import IAMCredentialsAsyncClient + +from .types.common import GenerateAccessTokenRequest +from .types.common import GenerateAccessTokenResponse +from .types.common import GenerateIdTokenRequest +from .types.common import GenerateIdTokenResponse +from .types.common import SignBlobRequest +from .types.common import SignBlobResponse +from .types.common import SignJwtRequest +from .types.common import SignJwtResponse if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER api_core.check_python_version("google.iam.credentials_v1") # type: ignore @@ -48,27 +68,35 @@ f"then update {_package_label}.", FutureWarning) - from packaging.version import parse as parse_version - - if sys.version_info < (3, 8): - import pkg_resources + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) - def _get_version(dependency_name): - try: - version_string = pkg_resources.get_distribution(dependency_name).version - return (parse_version(version_string), version_string) - except pkg_resources.DistributionNotFound: + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple return (None, "--") - else: - from importlib import metadata - - def _get_version(dependency_name): - try: - version_string = metadata.version("requests") - parsed_version = parse_version(version_string) - return (parsed_version.release, version_string) - except metadata.PackageNotFoundError: - return (None, "--") _dependency_package = "google.protobuf" _next_supported_version = "4.25.8" @@ -96,19 +124,6 @@ def _get_version(dependency_name): "using a supported version of Python; see " + "https://devguide.python.org/versions/") - -from .services.iam_credentials import IAMCredentialsClient -from .services.iam_credentials import IAMCredentialsAsyncClient - -from .types.common import GenerateAccessTokenRequest -from .types.common import GenerateAccessTokenResponse -from .types.common import GenerateIdTokenRequest -from .types.common import GenerateIdTokenResponse -from .types.common import SignBlobRequest -from .types.common import SignBlobResponse -from .types.common import SignJwtRequest -from .types.common import SignJwtResponse - __all__ = ( 'IAMCredentialsAsyncClient', 'GenerateAccessTokenRequest', diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py index f4ebe518e8e9..eaba6dcd0ca9 100755 --- a/packages/gapic-generator/tests/integration/goldens/credentials/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/credentials/setup.py @@ -45,7 +45,6 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index 714542db6118..20d981ec3ecc 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -15,10 +15,59 @@ # from google.cloud.eventarc_v1 import gapic_version as package_version +import google.api_core as api_core +import sys + __version__ = package_version.__version__ +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata -import google.api_core as api_core + +from .services.eventarc import EventarcClient +from .services.eventarc import EventarcAsyncClient + +from .types.channel import Channel +from .types.channel_connection import ChannelConnection +from .types.discovery import EventType +from .types.discovery import FilteringAttribute +from .types.discovery import Provider +from .types.eventarc import CreateChannelConnectionRequest +from .types.eventarc import CreateChannelRequest +from .types.eventarc import CreateTriggerRequest +from .types.eventarc import DeleteChannelConnectionRequest +from .types.eventarc import DeleteChannelRequest +from .types.eventarc import DeleteTriggerRequest +from .types.eventarc import GetChannelConnectionRequest +from .types.eventarc import GetChannelRequest +from .types.eventarc import GetGoogleChannelConfigRequest +from .types.eventarc import GetProviderRequest +from .types.eventarc import GetTriggerRequest +from .types.eventarc import ListChannelConnectionsRequest +from .types.eventarc import ListChannelConnectionsResponse +from .types.eventarc import ListChannelsRequest +from .types.eventarc import ListChannelsResponse +from .types.eventarc import ListProvidersRequest +from .types.eventarc import ListProvidersResponse +from .types.eventarc import ListTriggersRequest +from .types.eventarc import ListTriggersResponse +from .types.eventarc import OperationMetadata +from .types.eventarc import UpdateChannelRequest +from .types.eventarc import UpdateGoogleChannelConfigRequest +from .types.eventarc import UpdateTriggerRequest +from .types.google_channel_config import GoogleChannelConfig +from .types.trigger import CloudRun +from .types.trigger import Destination +from .types.trigger import EventFilter +from .types.trigger import GKE +from .types.trigger import Pubsub +from .types.trigger import StateCondition +from .types.trigger import Transport +from .types.trigger import Trigger if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER api_core.check_python_version("google.cloud.eventarc_v1") # type: ignore @@ -48,27 +97,35 @@ f"then update {_package_label}.", FutureWarning) - from packaging.version import parse as parse_version - - if sys.version_info < (3, 8): - import pkg_resources + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) - def _get_version(dependency_name): - try: - version_string = pkg_resources.get_distribution(dependency_name).version - return (parse_version(version_string), version_string) - except pkg_resources.DistributionNotFound: + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple return (None, "--") - else: - from importlib import metadata - - def _get_version(dependency_name): - try: - version_string = metadata.version("requests") - parsed_version = parse_version(version_string) - return (parsed_version.release, version_string) - except metadata.PackageNotFoundError: - return (None, "--") _dependency_package = "google.protobuf" _next_supported_version = "4.25.8" @@ -96,48 +153,6 @@ def _get_version(dependency_name): "using a supported version of Python; see " + "https://devguide.python.org/versions/") - -from .services.eventarc import EventarcClient -from .services.eventarc import EventarcAsyncClient - -from .types.channel import Channel -from .types.channel_connection import ChannelConnection -from .types.discovery import EventType -from .types.discovery import FilteringAttribute -from .types.discovery import Provider -from .types.eventarc import CreateChannelConnectionRequest -from .types.eventarc import CreateChannelRequest -from .types.eventarc import CreateTriggerRequest -from .types.eventarc import DeleteChannelConnectionRequest -from .types.eventarc import DeleteChannelRequest -from .types.eventarc import DeleteTriggerRequest -from .types.eventarc import GetChannelConnectionRequest -from .types.eventarc import GetChannelRequest -from .types.eventarc import GetGoogleChannelConfigRequest -from .types.eventarc import GetProviderRequest -from .types.eventarc import GetTriggerRequest -from .types.eventarc import ListChannelConnectionsRequest -from .types.eventarc import ListChannelConnectionsResponse -from .types.eventarc import ListChannelsRequest -from .types.eventarc import ListChannelsResponse -from .types.eventarc import ListProvidersRequest -from .types.eventarc import ListProvidersResponse -from .types.eventarc import ListTriggersRequest -from .types.eventarc import ListTriggersResponse -from .types.eventarc import OperationMetadata -from .types.eventarc import UpdateChannelRequest -from .types.eventarc import UpdateGoogleChannelConfigRequest -from .types.eventarc import UpdateTriggerRequest -from .types.google_channel_config import GoogleChannelConfig -from .types.trigger import CloudRun -from .types.trigger import Destination -from .types.trigger import EventFilter -from .types.trigger import GKE -from .types.trigger import Pubsub -from .types.trigger import StateCondition -from .types.trigger import Transport -from .types.trigger import Trigger - __all__ = ( 'EventarcAsyncClient', 'Channel', diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py index aeb6fdeb1c6a..8f8887b4d57a 100755 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/eventarc/setup.py @@ -45,7 +45,6 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 65abc3b298da..242a43882502 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -15,86 +15,17 @@ # from google.cloud.logging_v2 import gapic_version as package_version -__version__ = package_version.__version__ - - import google.api_core as api_core +import sys -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.logging_v2") # type: ignore - api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore -else: # pragma: NO COVER - # An older version of api_core is installed which does not define the - # functions above. We do equivalent checks manually. - try: - import warnings - import sys - - _py_version_str = sys.version.split()[0] - _package_label = "google.cloud.logging_v2" - if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) - if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) - - from packaging.version import parse as parse_version - - if sys.version_info < (3, 8): - import pkg_resources - - def _get_version(dependency_name): - try: - version_string = pkg_resources.get_distribution(dependency_name).version - return (parse_version(version_string), version_string) - except pkg_resources.DistributionNotFound: - return (None, "--") - else: - from importlib import metadata - - def _get_version(dependency_name): - try: - version_string = metadata.version("requests") - parsed_version = parse_version(version_string) - return (parsed_version.release, version_string) - except metadata.PackageNotFoundError: - return (None, "--") +__version__ = package_version.__version__ - _dependency_package = "google.protobuf" - _next_supported_version = "4.25.8" - _next_supported_version_tuple = (4, 25, 8) - _recommendation = " (we recommend 6.x)" - (_version_used, _version_used_string) = _get_version(_dependency_package) - if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) - except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata from .services.config_service_v2 import ConfigServiceV2Client @@ -181,6 +112,90 @@ def _get_version(dependency_name): from .types.logging_metrics import LogMetric from .types.logging_metrics import UpdateLogMetricRequest +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.logging_v2") # type: ignore + api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.logging_v2" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) + + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + __all__ = ( 'ConfigServiceV2AsyncClient', 'LoggingServiceV2AsyncClient', diff --git a/packages/gapic-generator/tests/integration/goldens/logging/setup.py b/packages/gapic-generator/tests/integration/goldens/logging/setup.py index 0c103cd02f06..cb6015c69b10 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging/setup.py @@ -45,7 +45,6 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py index ab630bbc65f9..38f3dc49f500 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py @@ -15,86 +15,17 @@ # from google.cloud.logging_v2 import gapic_version as package_version -__version__ = package_version.__version__ - - import google.api_core as api_core +import sys -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER - api_core.check_python_version("google.cloud.logging_v2") # type: ignore - api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore -else: # pragma: NO COVER - # An older version of api_core is installed which does not define the - # functions above. We do equivalent checks manually. - try: - import warnings - import sys - - _py_version_str = sys.version.split()[0] - _package_label = "google.cloud.logging_v2" - if sys.version_info < (3, 9): - warnings.warn("You are using a non-supported Python version " + - f"({_py_version_str}). Google will not post any further " + - f"updates to {_package_label} supporting this Python version. " + - "Please upgrade to the latest Python version, or at " + - f"least to Python 3.9, and then update {_package_label}.", - FutureWarning) - if sys.version_info[:2] == (3, 9): - warnings.warn(f"You are using a Python version ({_py_version_str}) " + - f"which Google will stop supporting in {_package_label} in " + - "January 2026. Please " + - "upgrade to the latest Python version, or at " + - "least to Python 3.10, before then, and " + - f"then update {_package_label}.", - FutureWarning) - - from packaging.version import parse as parse_version - - if sys.version_info < (3, 8): - import pkg_resources - - def _get_version(dependency_name): - try: - version_string = pkg_resources.get_distribution(dependency_name).version - return (parse_version(version_string), version_string) - except pkg_resources.DistributionNotFound: - return (None, "--") - else: - from importlib import metadata - - def _get_version(dependency_name): - try: - version_string = metadata.version("requests") - parsed_version = parse_version(version_string) - return (parsed_version.release, version_string) - except metadata.PackageNotFoundError: - return (None, "--") +__version__ = package_version.__version__ - _dependency_package = "google.protobuf" - _next_supported_version = "4.25.8" - _next_supported_version_tuple = (4, 25, 8) - _recommendation = " (we recommend 6.x)" - (_version_used, _version_used_string) = _get_version(_dependency_package) - if _version_used and _version_used < _next_supported_version_tuple: - warnings.warn(f"Package {_package_label} depends on " + - f"{_dependency_package}, currently installed at version " + - f"{_version_used_string}. Future updates to " + - f"{_package_label} will require {_dependency_package} at " + - f"version {_next_supported_version} or higher{_recommendation}." + - " Please ensure " + - "that either (a) your Python environment doesn't pin the " + - f"version of {_dependency_package}, so that updates to " + - f"{_package_label} can require the higher version, or " + - "(b) you manually update your Python environment to use at " + - f"least version {_next_supported_version} of " + - f"{_dependency_package}.", - FutureWarning) - except Exception: - warnings.warn("Could not determine the version of Python " + - "currently being used. To continue receiving " + - "updates for {_package_label}, ensure you are " + - "using a supported version of Python; see " + - "https://devguide.python.org/versions/") +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata from .services.config_service_v2 import BaseConfigServiceV2Client @@ -181,6 +112,90 @@ def _get_version(dependency_name): from .types.logging_metrics import LogMetric from .types.logging_metrics import UpdateLogMetricRequest +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER + api_core.check_python_version("google.cloud.logging_v2") # type: ignore + api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.cloud.logging_v2" + if sys.version_info < (3, 9): + warnings.warn("You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning) + if sys.version_info[:2] == (3, 9): + warnings.warn(f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning) + + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) + + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn(f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning) + except Exception: + warnings.warn("Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/") + __all__ = ( 'BaseConfigServiceV2AsyncClient', 'BaseMetricsServiceV2AsyncClient', diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py index 0c103cd02f06..cb6015c69b10 100755 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/logging_internal/setup.py @@ -45,7 +45,6 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index 8db2b26e6532..c5182e2f8447 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -15,10 +15,49 @@ # from google.cloud.redis_v1 import gapic_version as package_version +import google.api_core as api_core +import sys + __version__ = package_version.__version__ +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata -import google.api_core as api_core + +from .services.cloud_redis import CloudRedisClient +from .services.cloud_redis import CloudRedisAsyncClient + +from .types.cloud_redis import CreateInstanceRequest +from .types.cloud_redis import DeleteInstanceRequest +from .types.cloud_redis import ExportInstanceRequest +from .types.cloud_redis import FailoverInstanceRequest +from .types.cloud_redis import GcsDestination +from .types.cloud_redis import GcsSource +from .types.cloud_redis import GetInstanceAuthStringRequest +from .types.cloud_redis import GetInstanceRequest +from .types.cloud_redis import ImportInstanceRequest +from .types.cloud_redis import InputConfig +from .types.cloud_redis import Instance +from .types.cloud_redis import InstanceAuthString +from .types.cloud_redis import ListInstancesRequest +from .types.cloud_redis import ListInstancesResponse +from .types.cloud_redis import LocationMetadata +from .types.cloud_redis import MaintenancePolicy +from .types.cloud_redis import MaintenanceSchedule +from .types.cloud_redis import NodeInfo +from .types.cloud_redis import OperationMetadata +from .types.cloud_redis import OutputConfig +from .types.cloud_redis import PersistenceConfig +from .types.cloud_redis import RescheduleMaintenanceRequest +from .types.cloud_redis import TlsCertificate +from .types.cloud_redis import UpdateInstanceRequest +from .types.cloud_redis import UpgradeInstanceRequest +from .types.cloud_redis import WeeklyMaintenanceWindow +from .types.cloud_redis import ZoneMetadata if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER api_core.check_python_version("google.cloud.redis_v1") # type: ignore @@ -48,27 +87,35 @@ f"then update {_package_label}.", FutureWarning) - from packaging.version import parse as parse_version - - if sys.version_info < (3, 8): - import pkg_resources + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) - def _get_version(dependency_name): - try: - version_string = pkg_resources.get_distribution(dependency_name).version - return (parse_version(version_string), version_string) - except pkg_resources.DistributionNotFound: + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple return (None, "--") - else: - from importlib import metadata - - def _get_version(dependency_name): - try: - version_string = metadata.version("requests") - parsed_version = parse_version(version_string) - return (parsed_version.release, version_string) - except metadata.PackageNotFoundError: - return (None, "--") _dependency_package = "google.protobuf" _next_supported_version = "4.25.8" @@ -96,38 +143,6 @@ def _get_version(dependency_name): "using a supported version of Python; see " + "https://devguide.python.org/versions/") - -from .services.cloud_redis import CloudRedisClient -from .services.cloud_redis import CloudRedisAsyncClient - -from .types.cloud_redis import CreateInstanceRequest -from .types.cloud_redis import DeleteInstanceRequest -from .types.cloud_redis import ExportInstanceRequest -from .types.cloud_redis import FailoverInstanceRequest -from .types.cloud_redis import GcsDestination -from .types.cloud_redis import GcsSource -from .types.cloud_redis import GetInstanceAuthStringRequest -from .types.cloud_redis import GetInstanceRequest -from .types.cloud_redis import ImportInstanceRequest -from .types.cloud_redis import InputConfig -from .types.cloud_redis import Instance -from .types.cloud_redis import InstanceAuthString -from .types.cloud_redis import ListInstancesRequest -from .types.cloud_redis import ListInstancesResponse -from .types.cloud_redis import LocationMetadata -from .types.cloud_redis import MaintenancePolicy -from .types.cloud_redis import MaintenanceSchedule -from .types.cloud_redis import NodeInfo -from .types.cloud_redis import OperationMetadata -from .types.cloud_redis import OutputConfig -from .types.cloud_redis import PersistenceConfig -from .types.cloud_redis import RescheduleMaintenanceRequest -from .types.cloud_redis import TlsCertificate -from .types.cloud_redis import UpdateInstanceRequest -from .types.cloud_redis import UpgradeInstanceRequest -from .types.cloud_redis import WeeklyMaintenanceWindow -from .types.cloud_redis import ZoneMetadata - __all__ = ( 'CloudRedisAsyncClient', 'CloudRedisClient', diff --git a/packages/gapic-generator/tests/integration/goldens/redis/setup.py b/packages/gapic-generator/tests/integration/goldens/redis/setup.py index cf35d014a47d..026674b2d3e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis/setup.py @@ -45,7 +45,6 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py index b781ff92cdb5..1f7bad3796c9 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py @@ -15,10 +15,36 @@ # from google.cloud.redis_v1 import gapic_version as package_version +import google.api_core as api_core +import sys + __version__ = package_version.__version__ +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata -import google.api_core as api_core + +from .services.cloud_redis import CloudRedisClient +from .services.cloud_redis import CloudRedisAsyncClient + +from .types.cloud_redis import CreateInstanceRequest +from .types.cloud_redis import DeleteInstanceRequest +from .types.cloud_redis import GetInstanceRequest +from .types.cloud_redis import Instance +from .types.cloud_redis import ListInstancesRequest +from .types.cloud_redis import ListInstancesResponse +from .types.cloud_redis import MaintenancePolicy +from .types.cloud_redis import MaintenanceSchedule +from .types.cloud_redis import NodeInfo +from .types.cloud_redis import OperationMetadata +from .types.cloud_redis import PersistenceConfig +from .types.cloud_redis import TlsCertificate +from .types.cloud_redis import UpdateInstanceRequest +from .types.cloud_redis import WeeklyMaintenanceWindow if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER api_core.check_python_version("google.cloud.redis_v1") # type: ignore @@ -48,27 +74,35 @@ f"then update {_package_label}.", FutureWarning) - from packaging.version import parse as parse_version - - if sys.version_info < (3, 8): - import pkg_resources + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) - def _get_version(dependency_name): - try: - version_string = pkg_resources.get_distribution(dependency_name).version - return (parse_version(version_string), version_string) - except pkg_resources.DistributionNotFound: + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple return (None, "--") - else: - from importlib import metadata - - def _get_version(dependency_name): - try: - version_string = metadata.version("requests") - parsed_version = parse_version(version_string) - return (parsed_version.release, version_string) - except metadata.PackageNotFoundError: - return (None, "--") _dependency_package = "google.protobuf" _next_supported_version = "4.25.8" @@ -96,25 +130,6 @@ def _get_version(dependency_name): "using a supported version of Python; see " + "https://devguide.python.org/versions/") - -from .services.cloud_redis import CloudRedisClient -from .services.cloud_redis import CloudRedisAsyncClient - -from .types.cloud_redis import CreateInstanceRequest -from .types.cloud_redis import DeleteInstanceRequest -from .types.cloud_redis import GetInstanceRequest -from .types.cloud_redis import Instance -from .types.cloud_redis import ListInstancesRequest -from .types.cloud_redis import ListInstancesResponse -from .types.cloud_redis import MaintenancePolicy -from .types.cloud_redis import MaintenanceSchedule -from .types.cloud_redis import NodeInfo -from .types.cloud_redis import OperationMetadata -from .types.cloud_redis import PersistenceConfig -from .types.cloud_redis import TlsCertificate -from .types.cloud_redis import UpdateInstanceRequest -from .types.cloud_redis import WeeklyMaintenanceWindow - __all__ = ( 'CloudRedisAsyncClient', 'CloudRedisClient', diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py index cf35d014a47d..026674b2d3e7 100755 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py +++ b/packages/gapic-generator/tests/integration/goldens/redis_selective/setup.py @@ -45,7 +45,6 @@ "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", "grpcio >= 1.33.2, < 2.0.0", "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", - "packaging", # TODO: Remove once we require versions of api core that include this "proto-plus >= 1.22.3, <2.0.0", "proto-plus >= 1.25.0, <2.0.0; python_version >= '3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", From 01cbbf23038d820dcd4ef7f713ccb91417aa30e3 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Mon, 17 Nov 2025 15:40:53 -0500 Subject: [PATCH 1336/1339] chore: librarian release pull request: 20251114T152556Z (#2487) Co-authored-by: Victor Chudnovsky Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> --- packages/gapic-generator/.librarian/state.yaml | 3 ++- packages/gapic-generator/CHANGELOG.md | 11 +++++++++++ packages/gapic-generator/setup.py | 2 +- 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/packages/gapic-generator/.librarian/state.yaml b/packages/gapic-generator/.librarian/state.yaml index 9d2284a957ca..cd753cf12d83 100644 --- a/packages/gapic-generator/.librarian/state.yaml +++ b/packages/gapic-generator/.librarian/state.yaml @@ -1,7 +1,8 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:4e3486fee9eba44b75a18c67f0c60fa0c4af675132305e3ff3fae2e9b94f94bb libraries: - id: gapic-generator - version: 1.29.0 + version: 1.30.0 + last_generated_commit: "" apis: [] source_roots: - . diff --git a/packages/gapic-generator/CHANGELOG.md b/packages/gapic-generator/CHANGELOG.md index 66122f589683..ee1765942598 100644 --- a/packages/gapic-generator/CHANGELOG.md +++ b/packages/gapic-generator/CHANGELOG.md @@ -4,6 +4,17 @@ [1]: https://pypi.org/project/gapic-generator/#history +## [1.30.0](https://github.com/googleapis/gapic-generator-python/compare/v1.29.0...v1.30.0) (2025-11-14) + +### Features + +* auto-enable mTLS when supported certificates are detected (#2472) ([4748760fe6d489d80cb17306157edb43d25f72b3](https://github.com/googleapis/gapic-generator-python/commit/4748760fe6d489d80cb17306157edb43d25f72b3)) + +### Bug Fixes + +* drop packaging and pkg_resources (#2477) ([b4824c50ce06642d7c5bf5cb04e7a4adc02892a8](https://github.com/googleapis/gapic-generator-python/commit/b4824c50ce06642d7c5bf5cb04e7a4adc02892a8)) +* add api_version to gapic_metadata.json (#2476) ([c424e33d5e9cfd75b383b4b0c7b77211f3604b79](https://github.com/googleapis/gapic-generator-python/commit/c424e33d5e9cfd75b383b4b0c7b77211f3604b79)) + ## [1.29.0](https://github.com/googleapis/gapic-generator-python/compare/v1.28.3...v1.29.0) (2025-10-23) diff --git a/packages/gapic-generator/setup.py b/packages/gapic-generator/setup.py index 3f8b4e2e1aa1..c02c9af9c0b6 100644 --- a/packages/gapic-generator/setup.py +++ b/packages/gapic-generator/setup.py @@ -22,7 +22,7 @@ name = "gapic-generator" description = "Google API Client Generator for Python" url = "https://github.com/googleapis/gapic-generator-python" -version = "1.29.0" +version = "1.30.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # Ensure that the lower bounds of these dependencies match what we have in the From 1fdf60b3d835c19182269ac7f52dfe5e1babe6b6 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 18 Nov 2025 14:17:17 -0500 Subject: [PATCH 1337/1339] chore: remove obsolete scripts from generated code (#2489) --- .../fixup_%name_%version_keywords.py.j2 | 178 -------------- .../gapic/ads-templates/setup.py.j2 | 3 - .../fixup_%name_%version_keywords.py.j2 | 183 --------------- .../asset/scripts/fixup_asset_v1_keywords.py | 198 ---------------- .../scripts/fixup_credentials_v1_keywords.py | 179 -------------- .../scripts/fixup_eventarc_v1_keywords.py | 193 ---------------- .../scripts/fixup_logging_v2_keywords.py | 218 ------------------ .../scripts/fixup_logging_v2_keywords.py | 218 ------------------ .../redis/scripts/fixup_redis_v1_keywords.py | 186 --------------- .../scripts/fixup_redis_v1_keywords.py | 180 --------------- 10 files changed, 1736 deletions(-) delete mode 100644 packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 delete mode 100644 packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 delete mode 100755 packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py delete mode 100755 packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py diff --git a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 deleted file mode 100644 index f197e8512d4f..000000000000 --- a/packages/gapic-generator/gapic/ads-templates/scripts/fixup_%name_%version_keywords.py.j2 +++ /dev/null @@ -1,178 +0,0 @@ -#! /usr/bin/env python3 -{% extends '_base.py.j2' %} -{% block content %} - -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - {% with all_methods = [] %} - {% for service in api.services.values() %}{% for method in service.methods.values() %} - {% do all_methods.append(method) %} - {% endfor %}{% endfor %} - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - {% for method in all_methods|sort(attribute='name')|unique(attribute='name') %} - '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), - {% endfor %} - } - {% endwith %} - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), - {# Inline comments and formatting are currently stripped out. -#} - {# My current attempts at preverving comments and formatting -#} - {# keep the comments, but the formatting is run through a log -#} - {# chipper, and an extra comma gets added, which causes a -#} - {# parse error. -#} - cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer={{ api.naming.module_name }}CallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the {{ api.naming.module_name }} client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) -{% endblock %} diff --git a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 index 97ed76176dbe..0824bc697c27 100644 --- a/packages/gapic-generator/gapic/ads-templates/setup.py.j2 +++ b/packages/gapic-generator/gapic/ads-templates/setup.py.j2 @@ -57,9 +57,6 @@ setuptools.setup( setup_requires=[ 'libcst >= 0.2.5', ], - scripts=[ - 'scripts/fixup_{{ api.naming.versioned_module_name }}_keywords.py', - ], classifiers=[ release_status, "Intended Audience :: Developers", diff --git a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 b/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 deleted file mode 100644 index e408cfd34d47..000000000000 --- a/packages/gapic-generator/gapic/templates/scripts/fixup_%name_%version_keywords.py.j2 +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -{% extends '_base.py.j2' %} -{% block content %} - -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class {{ api.naming.module_name }}CallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - {% with all_methods = [] %} - {% for service in api.services.values() %}{% for method in service.methods.values() %} - {% do all_methods.append(method) %} - {% endfor %}{% endfor %} - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - {% for method in all_methods|sort(attribute='name')|unique(attribute='name') %} - '{{ method.name|snake_case }}': ({% for field in method.legacy_flattened_fields.values() %}'{{ field.name }}', {% endfor %}), - {% endfor %} - {% if opts.add_iam_methods %} - 'get_iam_policy': ('resource', 'options', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - {% endif %} - } - {% endwith %} - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), - {# Inline comments and formatting are currently stripped out. -#} - {# My current attempts at preverving comments and formatting -#} - {# keep the comments, but the formatting is run through a log -#} - {# chipper, and an extra comma gets added, which causes a -#} - {# parse error. -#} - cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer={{ api.naming.module_name }}CallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the {{ api.naming.module_name }} client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) -{% endblock %} diff --git a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py deleted file mode 100755 index a31f2d16b055..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/asset/scripts/fixup_asset_v1_keywords.py +++ /dev/null @@ -1,198 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class assetCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'analyze_iam_policy': ('analysis_query', 'saved_analysis_query', 'execution_timeout', ), - 'analyze_iam_policy_longrunning': ('analysis_query', 'output_config', 'saved_analysis_query', ), - 'analyze_move': ('resource', 'destination_parent', 'view', ), - 'analyze_org_policies': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), - 'analyze_org_policy_governed_assets': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), - 'analyze_org_policy_governed_containers': ('scope', 'constraint', 'filter', 'page_size', 'page_token', ), - 'batch_get_assets_history': ('parent', 'asset_names', 'content_type', 'read_time_window', 'relationship_types', ), - 'batch_get_effective_iam_policies': ('scope', 'names', ), - 'create_feed': ('parent', 'feed_id', 'feed', ), - 'create_saved_query': ('parent', 'saved_query', 'saved_query_id', ), - 'delete_feed': ('name', ), - 'delete_saved_query': ('name', ), - 'export_assets': ('parent', 'output_config', 'read_time', 'asset_types', 'content_type', 'relationship_types', ), - 'get_feed': ('name', ), - 'get_saved_query': ('name', ), - 'list_assets': ('parent', 'read_time', 'asset_types', 'content_type', 'page_size', 'page_token', 'relationship_types', ), - 'list_feeds': ('parent', ), - 'list_saved_queries': ('parent', 'filter', 'page_size', 'page_token', ), - 'query_assets': ('parent', 'statement', 'job_reference', 'page_size', 'page_token', 'timeout', 'read_time_window', 'read_time', 'output_config', ), - 'search_all_iam_policies': ('scope', 'query', 'page_size', 'page_token', 'asset_types', 'order_by', ), - 'search_all_resources': ('scope', 'query', 'asset_types', 'page_size', 'page_token', 'order_by', 'read_mask', ), - 'update_feed': ('feed', 'update_mask', ), - 'update_saved_query': ('saved_query', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=assetCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the asset client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py deleted file mode 100755 index c2854aa54c44..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/credentials/scripts/fixup_credentials_v1_keywords.py +++ /dev/null @@ -1,179 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class credentialsCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'generate_access_token': ('name', 'scope', 'delegates', 'lifetime', ), - 'generate_id_token': ('name', 'audience', 'delegates', 'include_email', ), - 'sign_blob': ('name', 'payload', 'delegates', ), - 'sign_jwt': ('name', 'payload', 'delegates', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=credentialsCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the credentials client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py deleted file mode 100755 index e7d3db475b60..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/eventarc/scripts/fixup_eventarc_v1_keywords.py +++ /dev/null @@ -1,193 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class eventarcCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_channel': ('parent', 'channel', 'channel_id', 'validate_only', ), - 'create_channel_connection': ('parent', 'channel_connection', 'channel_connection_id', ), - 'create_trigger': ('parent', 'trigger', 'trigger_id', 'validate_only', ), - 'delete_channel': ('name', 'validate_only', ), - 'delete_channel_connection': ('name', ), - 'delete_trigger': ('name', 'validate_only', 'etag', 'allow_missing', ), - 'get_channel': ('name', ), - 'get_channel_connection': ('name', ), - 'get_google_channel_config': ('name', ), - 'get_provider': ('name', ), - 'get_trigger': ('name', ), - 'list_channel_connections': ('parent', 'page_size', 'page_token', ), - 'list_channels': ('parent', 'page_size', 'page_token', 'order_by', ), - 'list_providers': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), - 'list_triggers': ('parent', 'page_size', 'page_token', 'order_by', 'filter', ), - 'update_channel': ('validate_only', 'channel', 'update_mask', ), - 'update_google_channel_config': ('google_channel_config', 'update_mask', ), - 'update_trigger': ('validate_only', 'trigger', 'update_mask', 'allow_missing', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=eventarcCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the eventarc client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py deleted file mode 100755 index 30efcd1cd080..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/logging/scripts/fixup_logging_v2_keywords.py +++ /dev/null @@ -1,218 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class loggingCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'copy_log_entries': ('name', 'destination', 'filter', ), - 'create_bucket': ('parent', 'bucket_id', 'bucket', ), - 'create_bucket_async': ('parent', 'bucket_id', 'bucket', ), - 'create_exclusion': ('parent', 'exclusion', ), - 'create_link': ('parent', 'link', 'link_id', ), - 'create_log_metric': ('parent', 'metric', ), - 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), - 'create_view': ('parent', 'view_id', 'view', ), - 'delete_bucket': ('name', ), - 'delete_exclusion': ('name', ), - 'delete_link': ('name', ), - 'delete_log': ('log_name', ), - 'delete_log_metric': ('metric_name', ), - 'delete_sink': ('sink_name', ), - 'delete_view': ('name', ), - 'get_bucket': ('name', ), - 'get_cmek_settings': ('name', ), - 'get_exclusion': ('name', ), - 'get_link': ('name', ), - 'get_log_metric': ('metric_name', ), - 'get_settings': ('name', ), - 'get_sink': ('sink_name', ), - 'get_view': ('name', ), - 'list_buckets': ('parent', 'page_token', 'page_size', ), - 'list_exclusions': ('parent', 'page_token', 'page_size', ), - 'list_links': ('parent', 'page_token', 'page_size', ), - 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), - 'list_log_metrics': ('parent', 'page_token', 'page_size', ), - 'list_logs': ('parent', 'resource_names', 'page_size', 'page_token', ), - 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), - 'list_sinks': ('parent', 'page_token', 'page_size', ), - 'list_views': ('parent', 'page_token', 'page_size', ), - 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), - 'undelete_bucket': ('name', ), - 'update_bucket': ('name', 'bucket', 'update_mask', ), - 'update_bucket_async': ('name', 'bucket', 'update_mask', ), - 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), - 'update_exclusion': ('name', 'exclusion', 'update_mask', ), - 'update_log_metric': ('metric_name', 'metric', ), - 'update_settings': ('name', 'settings', 'update_mask', ), - 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), - 'update_view': ('name', 'view', 'update_mask', ), - 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=loggingCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the logging client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py b/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py deleted file mode 100755 index 30efcd1cd080..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/logging_internal/scripts/fixup_logging_v2_keywords.py +++ /dev/null @@ -1,218 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class loggingCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'copy_log_entries': ('name', 'destination', 'filter', ), - 'create_bucket': ('parent', 'bucket_id', 'bucket', ), - 'create_bucket_async': ('parent', 'bucket_id', 'bucket', ), - 'create_exclusion': ('parent', 'exclusion', ), - 'create_link': ('parent', 'link', 'link_id', ), - 'create_log_metric': ('parent', 'metric', ), - 'create_sink': ('parent', 'sink', 'unique_writer_identity', ), - 'create_view': ('parent', 'view_id', 'view', ), - 'delete_bucket': ('name', ), - 'delete_exclusion': ('name', ), - 'delete_link': ('name', ), - 'delete_log': ('log_name', ), - 'delete_log_metric': ('metric_name', ), - 'delete_sink': ('sink_name', ), - 'delete_view': ('name', ), - 'get_bucket': ('name', ), - 'get_cmek_settings': ('name', ), - 'get_exclusion': ('name', ), - 'get_link': ('name', ), - 'get_log_metric': ('metric_name', ), - 'get_settings': ('name', ), - 'get_sink': ('sink_name', ), - 'get_view': ('name', ), - 'list_buckets': ('parent', 'page_token', 'page_size', ), - 'list_exclusions': ('parent', 'page_token', 'page_size', ), - 'list_links': ('parent', 'page_token', 'page_size', ), - 'list_log_entries': ('resource_names', 'filter', 'order_by', 'page_size', 'page_token', ), - 'list_log_metrics': ('parent', 'page_token', 'page_size', ), - 'list_logs': ('parent', 'resource_names', 'page_size', 'page_token', ), - 'list_monitored_resource_descriptors': ('page_size', 'page_token', ), - 'list_sinks': ('parent', 'page_token', 'page_size', ), - 'list_views': ('parent', 'page_token', 'page_size', ), - 'tail_log_entries': ('resource_names', 'filter', 'buffer_window', ), - 'undelete_bucket': ('name', ), - 'update_bucket': ('name', 'bucket', 'update_mask', ), - 'update_bucket_async': ('name', 'bucket', 'update_mask', ), - 'update_cmek_settings': ('name', 'cmek_settings', 'update_mask', ), - 'update_exclusion': ('name', 'exclusion', 'update_mask', ), - 'update_log_metric': ('metric_name', 'metric', ), - 'update_settings': ('name', 'settings', 'update_mask', ), - 'update_sink': ('sink_name', 'sink', 'unique_writer_identity', 'update_mask', ), - 'update_view': ('name', 'view', 'update_mask', ), - 'write_log_entries': ('entries', 'log_name', 'resource', 'labels', 'partial_success', 'dry_run', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=loggingCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the logging client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py deleted file mode 100755 index c6070fe6a836..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/redis/scripts/fixup_redis_v1_keywords.py +++ /dev/null @@ -1,186 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class redisCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'export_instance': ('name', 'output_config', ), - 'failover_instance': ('name', 'data_protection_mode', ), - 'get_instance': ('name', ), - 'get_instance_auth_string': ('name', ), - 'import_instance': ('name', 'input_config', ), - 'list_instances': ('parent', 'page_size', 'page_token', ), - 'reschedule_maintenance': ('name', 'reschedule_type', 'schedule_time', ), - 'update_instance': ('update_mask', 'instance', ), - 'upgrade_instance': ('name', 'redis_version', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=redisCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the redis client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py b/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py deleted file mode 100755 index f84bacfd1394..000000000000 --- a/packages/gapic-generator/tests/integration/goldens/redis_selective/scripts/fixup_redis_v1_keywords.py +++ /dev/null @@ -1,180 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class redisCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_instance': ('parent', 'instance_id', 'instance', ), - 'delete_instance': ('name', ), - 'get_instance': ('name', ), - 'list_instances': ('parent', 'page_size', 'page_token', ), - 'update_instance': ('update_mask', 'instance', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=redisCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the redis client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) From 1dd8325a9b8955e11749b215ea8a637f757e052d Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 24 Nov 2025 22:05:46 +0000 Subject: [PATCH 1338/1339] Trigger owlbot post-processor --- .../gapic-generator/gapic-generator/gapic-generator.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 owl-bot-staging/gapic-generator/gapic-generator/gapic-generator.txt diff --git a/owl-bot-staging/gapic-generator/gapic-generator/gapic-generator.txt b/owl-bot-staging/gapic-generator/gapic-generator/gapic-generator.txt new file mode 100644 index 000000000000..e69de29bb2d1 From 45f5a090167d17a25d8875fe5e72564ee86a129a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 24 Nov 2025 22:05:52 +0000 Subject: [PATCH 1339/1339] build: gapic-generator migration: adjust owlbot-related files --- packages/gapic-generator/.github/CODEOWNERS | 4 - .../gapic-generator/.github/CONTRIBUTING.md | 28 - .../.github/ISSUE_TEMPLATE/bug_report.md | 43 -- .../.github/ISSUE_TEMPLATE/feature_request.md | 18 - .../.github/ISSUE_TEMPLATE/support_request.md | 7 - .../.github/PULL_REQUEST_TEMPLATE.md | 7 - .../gapic-generator/.github/auto-label.yaml | 20 - .../gapic-generator/.github/blunderbuss.yml | 17 - .../.github/header-checker-lint.yml | 15 - .../gapic-generator/.github/snippet-bot.yml | 0 .../.github/workflows/tests.yaml | 472 ----------------- packages/gapic-generator/.kokoro/build.sh | 60 --- .../.kokoro/continuous/common.cfg | 27 - .../.kokoro/continuous/continuous.cfg | 1 - .../.kokoro/populate-secrets.sh | 43 -- .../.kokoro/presubmit/common.cfg | 27 - .../.kokoro/presubmit/presubmit.cfg | 1 - .../gapic-generator/.kokoro/trampoline.sh | 28 - .../gapic-generator/.kokoro/trampoline_v2.sh | 487 ------------------ packages/gapic-generator/.trampolinerc | 61 --- 20 files changed, 1366 deletions(-) delete mode 100644 packages/gapic-generator/.github/CODEOWNERS delete mode 100644 packages/gapic-generator/.github/CONTRIBUTING.md delete mode 100644 packages/gapic-generator/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 packages/gapic-generator/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 packages/gapic-generator/.github/ISSUE_TEMPLATE/support_request.md delete mode 100644 packages/gapic-generator/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 packages/gapic-generator/.github/auto-label.yaml delete mode 100644 packages/gapic-generator/.github/blunderbuss.yml delete mode 100644 packages/gapic-generator/.github/header-checker-lint.yml delete mode 100644 packages/gapic-generator/.github/snippet-bot.yml delete mode 100644 packages/gapic-generator/.github/workflows/tests.yaml delete mode 100755 packages/gapic-generator/.kokoro/build.sh delete mode 100644 packages/gapic-generator/.kokoro/continuous/common.cfg delete mode 100644 packages/gapic-generator/.kokoro/continuous/continuous.cfg delete mode 100755 packages/gapic-generator/.kokoro/populate-secrets.sh delete mode 100644 packages/gapic-generator/.kokoro/presubmit/common.cfg delete mode 100644 packages/gapic-generator/.kokoro/presubmit/presubmit.cfg delete mode 100755 packages/gapic-generator/.kokoro/trampoline.sh delete mode 100755 packages/gapic-generator/.kokoro/trampoline_v2.sh delete mode 100644 packages/gapic-generator/.trampolinerc diff --git a/packages/gapic-generator/.github/CODEOWNERS b/packages/gapic-generator/.github/CODEOWNERS deleted file mode 100644 index 993a4580d773..000000000000 --- a/packages/gapic-generator/.github/CODEOWNERS +++ /dev/null @@ -1,4 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. - -* @googleapis/python-core-client-libraries diff --git a/packages/gapic-generator/.github/CONTRIBUTING.md b/packages/gapic-generator/.github/CONTRIBUTING.md deleted file mode 100644 index 939e5341e74d..000000000000 --- a/packages/gapic-generator/.github/CONTRIBUTING.md +++ /dev/null @@ -1,28 +0,0 @@ -# How to Contribute - -We'd love to accept your patches and contributions to this project. There are -just a few small guidelines you need to follow. - -## Contributor License Agreement - -Contributions to this project must be accompanied by a Contributor License -Agreement. You (or your employer) retain the copyright to your contribution; -this simply gives us permission to use and redistribute your contributions as -part of the project. Head over to to see -your current agreements on file or to sign a new one. - -You generally only need to submit a CLA once, so if you've already submitted one -(even if it was for a different project), you probably don't need to do it -again. - -## Code reviews - -All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. - -## Community Guidelines - -This project follows [Google's Open Source Community -Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/gapic-generator/.github/ISSUE_TEMPLATE/bug_report.md b/packages/gapic-generator/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index bb9de926a0e2..000000000000 --- a/packages/gapic-generator/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -Please run down the following list and make sure you've tried the usual "quick fixes": - - - Search the issues already opened: https://github.com/googleapis/gapic-generator-python/issues - - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python - -If you are still having issues, please be sure to include as much information as possible: - -#### Environment details - - - OS type and version: - - Python version: `python --version` - - pip version: `pip --version` - - `gapic-generator` version: `pip show gapic-generator` - -#### Steps to reproduce - - 1. ? - 2. ? - -#### Code example - -```python -# example -``` - -#### Stack trace -``` -# example -``` - -Making sure to follow these steps will guarantee the quickest resolution possible. - -Thanks! diff --git a/packages/gapic-generator/.github/ISSUE_TEMPLATE/feature_request.md b/packages/gapic-generator/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 6365857f33c6..000000000000 --- a/packages/gapic-generator/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - - **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - **Describe the solution you'd like** -A clear and concise description of what you want to happen. - **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/packages/gapic-generator/.github/ISSUE_TEMPLATE/support_request.md b/packages/gapic-generator/.github/ISSUE_TEMPLATE/support_request.md deleted file mode 100644 index 995869032125..000000000000 --- a/packages/gapic-generator/.github/ISSUE_TEMPLATE/support_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Support request -about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/gapic-generator/.github/PULL_REQUEST_TEMPLATE.md b/packages/gapic-generator/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 40d1f1146bf4..000000000000 --- a/packages/gapic-generator/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,7 +0,0 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/gapic-generator-python/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea -- [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) - -Fixes # 🦕 diff --git a/packages/gapic-generator/.github/auto-label.yaml b/packages/gapic-generator/.github/auto-label.yaml deleted file mode 100644 index 21786a4eb085..000000000000 --- a/packages/gapic-generator/.github/auto-label.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -requestsize: - enabled: true - -path: - pullrequest: true - paths: - samples: "samples" diff --git a/packages/gapic-generator/.github/blunderbuss.yml b/packages/gapic-generator/.github/blunderbuss.yml deleted file mode 100644 index 1618464d1073..000000000000 --- a/packages/gapic-generator/.github/blunderbuss.yml +++ /dev/null @@ -1,17 +0,0 @@ -# Blunderbuss config -# -# This file controls who is assigned for pull requests and issues. -# Note: This file is autogenerated. To make changes to the assignee -# team, please update `codeowner_team` in `.repo-metadata.json`. -assign_issues: - - googleapis/actools-python - -assign_issues_by: - - labels: - - "samples" - to: - - googleapis/python-samples-reviewers - - googleapis/actools-python - -assign_prs: - - googleapis/actools-python diff --git a/packages/gapic-generator/.github/header-checker-lint.yml b/packages/gapic-generator/.github/header-checker-lint.yml deleted file mode 100644 index 6fe78aa7987a..000000000000 --- a/packages/gapic-generator/.github/header-checker-lint.yml +++ /dev/null @@ -1,15 +0,0 @@ -{"allowedCopyrightHolders": ["Google LLC"], - "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], - "sourceFileExtensions": [ - "ts", - "js", - "java", - "sh", - "Dockerfile", - "yaml", - "py", - "html", - "txt" - ] -} \ No newline at end of file diff --git a/packages/gapic-generator/.github/snippet-bot.yml b/packages/gapic-generator/.github/snippet-bot.yml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/gapic-generator/.github/workflows/tests.yaml b/packages/gapic-generator/.github/workflows/tests.yaml deleted file mode 100644 index 9db56f8b0cde..000000000000 --- a/packages/gapic-generator/.github/workflows/tests.yaml +++ /dev/null @@ -1,472 +0,0 @@ -name: Tests - -# Controls when the action will run. -on: - pull_request: - push: - branches: [ $default-branch ] - - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -concurrency: - group: tests-${{ github.head_ref }} - cancel-in-progress: true - -env: - SHOWCASE_VERSION: 0.35.0 - PROTOC_VERSION: 3.20.2 - OLDEST_PYTHON: 3.7 - LATEST_STABLE_PYTHON: 3.14 - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): - # Switch this to Python 3.15 alpha1 pre-release - # https://peps.python.org/pep-0790/ - PREVIEW_PYTHON_VERSION: 3.14 - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2450): - # Add Python 3.15 alpha1 pre-release - # https://peps.python.org/pep-0790/ - ALL_PYTHON: "['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14']" - -permissions: - contents: read - -jobs: - # `all_python_setup` amd `python_config` are a workaround for a known issue where it's not possible to use environment variables outside of `steps`. - # Some jobs set up a text matrix which is outside of `steps` and environment variables can't be used directly. - # This requires a workaround based on the discussion in: - # https://github.com/actions/runner/issues/2372 - # The limitation is captured here where certain job configurations don't have access to `env` - # https://docs.github.com/en/enterprise-cloud@latest/actions/writing-workflows/choosing-what-your-workflow-does/accessing-contextual-information-about-workflow-runs#context-availability - all_python_setup: - runs-on: ubuntu-latest - outputs: - all_python: ${{ env.ALL_PYTHON }} - steps: - - name: Set up all python - id: all_python - run: | - echo 'all_python={{ "${{ env.ALL_PYTHON }}" }}' - python_config: - runs-on: ubuntu-latest - outputs: - oldest_python: ${{ env.OLDEST_PYTHON }} - latest_stable_python: ${{ env.LATEST_STABLE_PYTHON }} - steps: - - name: Print env variables for `python_config` - run: | - echo 'oldest_python={{ "${{ env.OLDEST_PYTHON }}" }}' - echo 'latest_stable_python={{ "${{ env.LATEST_STABLE_PYTHON }}" }}' - docs: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python - uses: actions/setup-python@v6 - # Use python 3.10 for docs to match the version for the sphinx plugin - # https://github.com/googleapis/synthtool/pull/1891 - with: - python-version: "3.10" - cache: 'pip' - - name: Install nox. - run: python -m pip install nox - - name: Build the documentation. - run: nox -s docs - docfx: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python - uses: actions/setup-python@v6 - # Use python 3.10 for docs to match the version for the sphinx plugin - # https://github.com/googleapis/synthtool/pull/1891 - with: - python-version: "3.10" - cache: 'pip' - - name: Install nox. - run: python -m pip install nox - - name: Build the documentation. - run: nox -s docfx - mypy: - needs: all_python_setup - strategy: - matrix: - # Run mypy on all of the supported python versions listed in setup.py - # https://github.com/python/mypy/blob/master/setup.py - python: ${{ fromJSON(needs.all_python_setup.outputs.all_python) }} - exclude: - # Remove once https://github.com/googleapis/gapic-generator-python/issues/2303 is fixed - - python: '3.7' - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v6 - with: - python-version: "${{ matrix.python }}" - cache: 'pip' - allow-prereleases: true - - name: Install nox. - run: python -m pip install nox - - name: Check type annotations. - run: nox -s mypy-${{ matrix.python }} - showcase: - needs: python_config - strategy: - # Run showcase tests on the lowest and highest supported runtimes - matrix: - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `showcase_w_rest_async` target when async rest is GA. - python: ["${{ needs.python_config.outputs.oldest_python }}", "${{ needs.python_config.outputs.latest_stable_python }}"] - target: [showcase, showcase_w_rest_async] - logging_scope: ["", "google"] - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. - # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v5 - - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v6 - with: - python-version: "${{ matrix.python }}" - cache: 'pip' - - name: Install system dependencies. - run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip - - name: Install showcase - run: | - sudo mkdir -p /usr/src/showcase - sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/googleapis/gapic-showcase/releases/download/v${SHOWCASE_VERSION}/gapic-showcase-${SHOWCASE_VERSION}-linux-amd64.tar.gz --output /usr/src/showcase/showcase-${SHOWCASE_VERSION}-linux-amd64.tar.gz - cd /usr/src/showcase/ - tar -xf showcase-* - ./gapic-showcase run & - cd - - - name: Install nox. - run: python -m pip install nox - - name: Install protoc - run: | - sudo mkdir -p /usr/src/protoc/ - sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip - cd /usr/src/protoc/ - unzip protoc-${PROTOC_VERSION}.zip - sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - name: Run showcase tests. - env: - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2286): Construct nox sessions with logging enabled. - GOOGLE_SDK_PYTHON_LOGGING_SCOPE: ${{ matrix.logging_scope }} - run: nox -s ${{ matrix.target }}-${{ matrix.python }} - showcase-mtls: - if: ${{ false }} # TODO(dovs): reenable when #1218 is fixed - strategy: - matrix: - target: [showcase_mtls] - max-parallel: 1 - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Setup temp directory - run: | - sudo mkdir -p /tmp/workspace/tests/cert/ - sudo chown -R ${USER} /tmp/workspace/ - - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} - uses: actions/setup-python@v6 - with: - python-version: "${{ env.LATEST_STABLE_PYTHON }}" - cache: 'pip' - - name: Copy mtls files - run: cp tests/cert/mtls.* /tmp/workspace/tests/cert/ - - name: Install system dependencies. - run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip - - name: Install nox. - run: python -m pip install nox - - name: Install protoc. - run: | - sudo mkdir -p /usr/src/protoc/ - sudo chown -R ${USER} /usr/src - curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip - cd /usr/src/protoc/ - unzip protoc-${PROTOC_VERSION}.zip - sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - cd - - - name: Run showcase tests. - run: | - sudo mkdir gapic_showcase - sudo chown ${USER} gapic_showcase - cd gapic_showcase - curl -sSL https://github.com/googleapis/gapic-showcase/releases/download/v${SHOWCASE_VERSION}/gapic-showcase-${SHOWCASE_VERSION}-linux-amd64.tar.gz | tar xz - ./gapic-showcase run --mtls-ca-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-cert=/tmp/workspace/tests/cert/mtls.crt --mtls-key=/tmp/workspace/tests/cert/mtls.key & - - cd .. - nox -s ${{ matrix.target }} - # TODO(yon-mg): add compute unit tests - showcase-unit: - needs: all_python_setup - strategy: - matrix: - python: ${{ fromJSON(needs.all_python_setup.outputs.all_python) }} - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2121) Remove `_w_rest_async` variant when async rest is GA. - variant: ['', _alternative_templates, _mixins, _alternative_templates_mixins, _w_rest_async] - logging_scope: ["", "google"] - - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. - # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v5 - - name: Set up Python "${{ matrix.python }}" - uses: actions/setup-python@v6 - with: - python-version: "${{ matrix.python }}" - cache: 'pip' - allow-prereleases: true - - name: Install system dependencies. - run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip - - name: Install protoc. - run: | - sudo mkdir -p /usr/src/protoc/ - sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip - cd /usr/src/protoc/ - unzip protoc-${PROTOC_VERSION}.zip - sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - name: Install nox. - run: python -m pip install nox - - name: Run unit tests. - env: - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2286): Construct nox sessions with logging enabled. - GOOGLE_SDK_PYTHON_LOGGING_SCOPE: ${{ matrix.logging_scope }} - run: nox -s showcase_unit${{ matrix.variant }}-${{ matrix.python }} - showcase-unit-add-iam-methods: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} - uses: actions/setup-python@v6 - with: - python-version: "${{ env.LATEST_STABLE_PYTHON }}" - cache: 'pip' - - name: Install system dependencies. - run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip - - name: Install protoc. - run: | - sudo mkdir -p /usr/src/protoc/ - sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip - cd /usr/src/protoc/ - unzip protoc-${PROTOC_VERSION}.zip - sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - name: Install nox. - run: python -m pip install nox - - name: Run unit tests. - run: nox -s showcase_unit_add_iam_methods - showcase-mypy: - runs-on: ubuntu-latest - strategy: - matrix: - variant: ['', _alternative_templates] - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} - uses: actions/setup-python@v6 - with: - python-version: "${{ env.LATEST_STABLE_PYTHON }}" - cache: 'pip' - - name: Install system dependencies. - run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip - - name: Install protoc. - run: | - sudo mkdir -p /usr/src/protoc/ - sudo chown -R ${USER} /usr/src/ - curl --location https://github.com/google/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip --output /usr/src/protoc/protoc-${PROTOC_VERSION}.zip - cd /usr/src/protoc/ - unzip protoc-${PROTOC_VERSION}.zip - sudo ln -s /usr/src/protoc/bin/protoc /usr/local/bin/protoc - - name: Install nox. - run: python -m pip install nox - - name: Typecheck the generated output. - run: nox -s showcase_mypy${{ matrix.variant }} - snippetgen: - needs: all_python_setup - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} - uses: actions/setup-python@v6 - with: - python-version: "${{ env.LATEST_STABLE_PYTHON }}" - cache: 'pip' - - name: Install system dependencies. - run: | - sudo apt-get update - sudo apt-get install -y curl pandoc unzip - - name: Install nox. - run: python -m pip install nox - - name: Check autogenerated snippets. - run: nox -s snippetgen - unit: - needs: all_python_setup - strategy: - matrix: - python: ${{ fromJSON(needs.all_python_setup.outputs.all_python) }} - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. - # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v6 - with: - python-version: "${{ matrix.python }}" - cache: 'pip' - allow-prereleases: true - - name: Install pandoc - run: | - sudo apt-get update - sudo apt-get install -y pandoc gcc git - - name: Install nox. - run: | - python -m pip install nox - - name: Run unit tests. - run: nox -s unit-${{ matrix.python }} - fragment: - needs: all_python_setup - strategy: - matrix: - python: ${{ fromJSON(needs.all_python_setup.outputs.all_python) }} - variant: ['', _alternative_templates] - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. - # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ matrix.python }} - uses: actions/setup-python@v6 - with: - python-version: "${{ matrix.python }}" - cache: 'pip' - allow-prereleases: true - - name: Install pandoc - run: | - sudo apt-get update - sudo apt-get install -y pandoc gcc git - - name: Install nox. - run: | - python -m pip install nox - - name: Run fragment tests. - run: nox -s fragment${{ matrix.variant }}-${{ matrix.python }} - integration: - runs-on: ubuntu-latest - container: gcr.io/gapic-images/googleapis - steps: - - uses: actions/checkout@v5 - - name: Cache Bazel files - id: cache-bazel - uses: actions/cache@v4 - with: - path: ~/.cache/bazel - # Note: if the container is updated, the key needs to be updated as well. - key: ${{ runner.os }}-bazel-20210105-${{ secrets.CACHE_VERSION }} - - name: Cache not found - if: steps.cache-bazel.outputs.cache-hit != 'true' - run: | - echo "No cache found." - - name: Cache found - if: steps.cache-bazel.outputs.cache-hit == 'true' - run: | - echo -n "Cache found. Cache size: " - du -sh ~/.cache/bazel - echo "If the cache seems broken, update the CACHE_VERSION secret in" - echo "https://github.com/googleapis/gapic-generator-python/settings/secrets/actions" - echo "(use any random string, any GUID will work)" - echo "and it will start over with a clean cache." - echo "The old one will disappear after 7 days." - - name: Integration Tests - run: bazelisk test //tests/integration/... --test_output=errors - - goldens-lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} - uses: actions/setup-python@v6 - with: - python-version: ${{ env.LATEST_STABLE_PYTHON }} - cache: 'pip' - - name: Install nox. - run: | - python -m pip install nox - - name: Run blacken and lint on the generated output. - run: | - nox -f tests/integration/goldens/asset/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint - nox -f tests/integration/goldens/credentials/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint - nox -f tests/integration/goldens/eventarc/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint - nox -f tests/integration/goldens/logging/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint - nox -f tests/integration/goldens/redis/noxfile.py -s mypy-${{ env.LATEST_STABLE_PYTHON }} blacken lint - goldens-unit: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} - uses: actions/setup-python@v6 - with: - python-version: "${{ env.LATEST_STABLE_PYTHON }}" - cache: 'pip' - - name: Install nox. - run: | - python -m pip install nox - - name: Run the `unit` nox session - # Exclude testing for asset which requires dependency google-cloud-org-policy - # in order to run unit tests - # See https://github.com/googleapis/gapic-generator-python/issues/1806 - run: | - nox -f tests/integration/goldens/credentials/noxfile.py -s unit-${{ env.LATEST_STABLE_PYTHON }} - nox -f tests/integration/goldens/eventarc/noxfile.py -s unit-${{ env.LATEST_STABLE_PYTHON }} - nox -f tests/integration/goldens/logging/noxfile.py -s unit-${{ env.LATEST_STABLE_PYTHON }} - nox -f tests/integration/goldens/redis/noxfile.py -s unit-${{ env.LATEST_STABLE_PYTHON }} - goldens-prerelease: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ env.PREVIEW_PYTHON_VERSION }} - uses: actions/setup-python@v6 - with: - python-version: "${{ env.PREVIEW_PYTHON_VERSION }}" - cache: 'pip' - allow-prereleases: true - - name: Install nox. - run: | - python -m pip install nox - - name: Run the prerelease_deps nox session - # Exclude testing for asset which requires dependency google-cloud-org-policy - # in order to run unit tests - # See https://github.com/googleapis/gapic-generator-python/issues/1806 - run: | - nox -f tests/integration/goldens/credentials/noxfile.py -s prerelease_deps - nox -f tests/integration/goldens/eventarc/noxfile.py -s prerelease_deps - nox -f tests/integration/goldens/logging/noxfile.py -s prerelease_deps - nox -f tests/integration/goldens/redis/noxfile.py -s prerelease_deps - lint: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v5 - - name: Set up Python ${{ env.LATEST_STABLE_PYTHON }} - uses: actions/setup-python@v6 - with: - python-version: "${{ env.LATEST_STABLE_PYTHON }}" - cache: 'pip' - - name: Install nox. - run: | - python -m pip install nox - - name: Run lint - run: | - nox -s lint diff --git a/packages/gapic-generator/.kokoro/build.sh b/packages/gapic-generator/.kokoro/build.sh deleted file mode 100755 index d41b45aa1dd0..000000000000 --- a/packages/gapic-generator/.kokoro/build.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") - -if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") -fi - -pushd "${PROJECT_ROOT}" - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Setup service account credentials. -if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] -then - export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json -fi - -# Setup project id. -if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] -then - export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -fi - -# If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then - cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - } - trap cleanup EXIT HUP -fi - -# If NOX_SESSION is set, it only runs the specified session, -# otherwise run all the sessions. -if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} -else - python3 -m nox -fi diff --git a/packages/gapic-generator/.kokoro/continuous/common.cfg b/packages/gapic-generator/.kokoro/continuous/common.cfg deleted file mode 100644 index edae760eb5ef..000000000000 --- a/packages/gapic-generator/.kokoro/continuous/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "gapic-generator-python/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/gapic-generator-python/.kokoro/build.sh" -} diff --git a/packages/gapic-generator/.kokoro/continuous/continuous.cfg b/packages/gapic-generator/.kokoro/continuous/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/gapic-generator/.kokoro/continuous/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/populate-secrets.sh b/packages/gapic-generator/.kokoro/populate-secrets.sh deleted file mode 100755 index c435402f473e..000000000000 --- a/packages/gapic-generator/.kokoro/populate-secrets.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} -function msg { println "$*" >&2 ;} -function println { printf '%s\n' "$(now) $*" ;} - - -# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: -# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com -SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" -msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" -mkdir -p ${SECRET_LOCATION} -for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") -do - msg "Retrieving secret ${key}" - docker run --entrypoint=gcloud \ - --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ - gcr.io/google.com/cloudsdktool/cloud-sdk \ - secrets versions access latest \ - --project cloud-devrel-kokoro-resources \ - --secret ${key} > \ - "${SECRET_LOCATION}/${key}" - if [[ $? == 0 ]]; then - msg "Secret written to ${SECRET_LOCATION}/${key}" - else - msg "Error retrieving secret ${key}" - fi -done diff --git a/packages/gapic-generator/.kokoro/presubmit/common.cfg b/packages/gapic-generator/.kokoro/presubmit/common.cfg deleted file mode 100644 index edae760eb5ef..000000000000 --- a/packages/gapic-generator/.kokoro/presubmit/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "gapic-generator-python/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/gapic-generator-python/.kokoro/build.sh" -} diff --git a/packages/gapic-generator/.kokoro/presubmit/presubmit.cfg b/packages/gapic-generator/.kokoro/presubmit/presubmit.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/gapic-generator/.kokoro/presubmit/presubmit.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/trampoline.sh b/packages/gapic-generator/.kokoro/trampoline.sh deleted file mode 100755 index 48f79699706e..000000000000 --- a/packages/gapic-generator/.kokoro/trampoline.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Always run the cleanup script, regardless of the success of bouncing into -# the container. -function cleanup() { - chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - echo "cleanup"; -} -trap cleanup EXIT - -$(dirname $0)/populate-secrets.sh # Secret Manager secrets. -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/gapic-generator/.kokoro/trampoline_v2.sh b/packages/gapic-generator/.kokoro/trampoline_v2.sh deleted file mode 100755 index 35fa529231dc..000000000000 --- a/packages/gapic-generator/.kokoro/trampoline_v2.sh +++ /dev/null @@ -1,487 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# trampoline_v2.sh -# -# This script does 3 things. -# -# 1. Prepare the Docker image for the test -# 2. Run the Docker with appropriate flags to run the test -# 3. Upload the newly built Docker image -# -# in a way that is somewhat compatible with trampoline_v1. -# -# To run this script, first download few files from gcs to /dev/shm. -# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). -# -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm -# -# Then run the script. -# .kokoro/trampoline_v2.sh -# -# These environment variables are required: -# TRAMPOLINE_IMAGE: The docker image to use. -# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. -# -# You can optionally change these environment variables: -# TRAMPOLINE_IMAGE_UPLOAD: -# (true|false): Whether to upload the Docker image after the -# successful builds. -# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. -# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. -# Defaults to /workspace. -# Potentially there are some repo specific envvars in .trampolinerc in -# the project root. - - -set -euo pipefail - -TRAMPOLINE_VERSION="2.0.5" - -if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then - readonly IO_COLOR_RED="$(tput setaf 1)" - readonly IO_COLOR_GREEN="$(tput setaf 2)" - readonly IO_COLOR_YELLOW="$(tput setaf 3)" - readonly IO_COLOR_RESET="$(tput sgr0)" -else - readonly IO_COLOR_RED="" - readonly IO_COLOR_GREEN="" - readonly IO_COLOR_YELLOW="" - readonly IO_COLOR_RESET="" -fi - -function function_exists { - [ $(LC_ALL=C type -t $1)"" == "function" ] -} - -# Logs a message using the given color. The first argument must be one -# of the IO_COLOR_* variables defined above, such as -# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the -# given color. The log message will also have an RFC-3339 timestamp -# prepended (in UTC). You can disable the color output by setting -# TERM=vt100. -function log_impl() { - local color="$1" - shift - local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" - echo "================================================================" - echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" - echo "================================================================" -} - -# Logs the given message with normal coloring and a timestamp. -function log() { - log_impl "${IO_COLOR_RESET}" "$@" -} - -# Logs the given message in green with a timestamp. -function log_green() { - log_impl "${IO_COLOR_GREEN}" "$@" -} - -# Logs the given message in yellow with a timestamp. -function log_yellow() { - log_impl "${IO_COLOR_YELLOW}" "$@" -} - -# Logs the given message in red with a timestamp. -function log_red() { - log_impl "${IO_COLOR_RED}" "$@" -} - -readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) -readonly tmphome="${tmpdir}/h" -mkdir -p "${tmphome}" - -function cleanup() { - rm -rf "${tmpdir}" -} -trap cleanup EXIT - -RUNNING_IN_CI="${RUNNING_IN_CI:-false}" - -# The workspace in the container, defaults to /workspace. -TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" - -pass_down_envvars=( - # TRAMPOLINE_V2 variables. - # Tells scripts whether they are running as part of CI or not. - "RUNNING_IN_CI" - # Indicates which CI system we're in. - "TRAMPOLINE_CI" - # Indicates the version of the script. - "TRAMPOLINE_VERSION" -) - -log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" - -# Detect which CI systems we're in. If we're in any of the CI systems -# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be -# the name of the CI system. Both envvars will be passing down to the -# container for telling which CI system we're in. -if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then - # descriptive env var for indicating it's on CI. - RUNNING_IN_CI="true" - TRAMPOLINE_CI="kokoro" - if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then - if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then - log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." - exit 1 - fi - # This service account will be activated later. - TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" - else - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - gcloud auth list - fi - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet - fi - pass_down_envvars+=( - # KOKORO dynamic variables. - "KOKORO_BUILD_NUMBER" - "KOKORO_BUILD_ID" - "KOKORO_JOB_NAME" - "KOKORO_GIT_COMMIT" - "KOKORO_GITHUB_COMMIT" - "KOKORO_GITHUB_PULL_REQUEST_NUMBER" - "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For FlakyBot - "KOKORO_GITHUB_COMMIT_URL" - "KOKORO_GITHUB_PULL_REQUEST_URL" - ) -elif [[ "${TRAVIS:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="travis" - pass_down_envvars+=( - "TRAVIS_BRANCH" - "TRAVIS_BUILD_ID" - "TRAVIS_BUILD_NUMBER" - "TRAVIS_BUILD_WEB_URL" - "TRAVIS_COMMIT" - "TRAVIS_COMMIT_MESSAGE" - "TRAVIS_COMMIT_RANGE" - "TRAVIS_JOB_NAME" - "TRAVIS_JOB_NUMBER" - "TRAVIS_JOB_WEB_URL" - "TRAVIS_PULL_REQUEST" - "TRAVIS_PULL_REQUEST_BRANCH" - "TRAVIS_PULL_REQUEST_SHA" - "TRAVIS_PULL_REQUEST_SLUG" - "TRAVIS_REPO_SLUG" - "TRAVIS_SECURE_ENV_VARS" - "TRAVIS_TAG" - ) -elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="github-workflow" - pass_down_envvars+=( - "GITHUB_WORKFLOW" - "GITHUB_RUN_ID" - "GITHUB_RUN_NUMBER" - "GITHUB_ACTION" - "GITHUB_ACTIONS" - "GITHUB_ACTOR" - "GITHUB_REPOSITORY" - "GITHUB_EVENT_NAME" - "GITHUB_EVENT_PATH" - "GITHUB_SHA" - "GITHUB_REF" - "GITHUB_HEAD_REF" - "GITHUB_BASE_REF" - ) -elif [[ "${CIRCLECI:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="circleci" - pass_down_envvars+=( - "CIRCLE_BRANCH" - "CIRCLE_BUILD_NUM" - "CIRCLE_BUILD_URL" - "CIRCLE_COMPARE_URL" - "CIRCLE_JOB" - "CIRCLE_NODE_INDEX" - "CIRCLE_NODE_TOTAL" - "CIRCLE_PREVIOUS_BUILD_NUM" - "CIRCLE_PROJECT_REPONAME" - "CIRCLE_PROJECT_USERNAME" - "CIRCLE_REPOSITORY_URL" - "CIRCLE_SHA1" - "CIRCLE_STAGE" - "CIRCLE_USERNAME" - "CIRCLE_WORKFLOW_ID" - "CIRCLE_WORKFLOW_JOB_ID" - "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" - "CIRCLE_WORKFLOW_WORKSPACE_ID" - ) -fi - -# Configure the service account for pulling the docker image. -function repo_root() { - local dir="$1" - while [[ ! -d "${dir}/.git" ]]; do - dir="$(dirname "$dir")" - done - echo "${dir}" -} - -# Detect the project root. In CI builds, we assume the script is in -# the git tree and traverse from there, otherwise, traverse from `pwd` -# to find `.git` directory. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - PROGRAM_PATH="$(realpath "$0")" - PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" - PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" -else - PROJECT_ROOT="$(repo_root $(pwd))" -fi - -log_yellow "Changing to the project root: ${PROJECT_ROOT}." -cd "${PROJECT_ROOT}" - -# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need -# to use this environment variable in `PROJECT_ROOT`. -if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then - - mkdir -p "${tmpdir}/gcloud" - gcloud_config_dir="${tmpdir}/gcloud" - - log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." - export CLOUDSDK_CONFIG="${gcloud_config_dir}" - - log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." - gcloud auth activate-service-account \ - --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet -fi - -required_envvars=( - # The basic trampoline configurations. - "TRAMPOLINE_IMAGE" - "TRAMPOLINE_BUILD_FILE" -) - -if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then - source "${PROJECT_ROOT}/.trampolinerc" -fi - -log_yellow "Checking environment variables." -for e in "${required_envvars[@]}" -do - if [[ -z "${!e:-}" ]]; then - log "Missing ${e} env var. Aborting." - exit 1 - fi -done - -# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 -# script: e.g. "github/repo-name/.kokoro/run_tests.sh" -TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" -log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" - -# ignore error on docker operations and test execution -set +e - -log_yellow "Preparing Docker image." -# We only download the docker image in CI builds. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - # Download the docker image specified by `TRAMPOLINE_IMAGE` - - # We may want to add --max-concurrent-downloads flag. - - log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." - if docker pull "${TRAMPOLINE_IMAGE}"; then - log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="true" - else - log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="false" - fi -else - # For local run, check if we have the image. - if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then - has_image="true" - else - has_image="false" - fi -fi - - -# The default user for a Docker container has uid 0 (root). To avoid -# creating root-owned files in the build directory we tell docker to -# use the current user ID. -user_uid="$(id -u)" -user_gid="$(id -g)" -user_name="$(id -un)" - -# To allow docker in docker, we add the user to the docker group in -# the host os. -docker_gid=$(cut -d: -f3 < <(getent group docker)) - -update_cache="false" -if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then - # Build the Docker image from the source. - context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") - docker_build_flags=( - "-f" "${TRAMPOLINE_DOCKERFILE}" - "-t" "${TRAMPOLINE_IMAGE}" - "--build-arg" "UID=${user_uid}" - "--build-arg" "USERNAME=${user_name}" - ) - if [[ "${has_image}" == "true" ]]; then - docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") - fi - - log_yellow "Start building the docker image." - if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then - echo "docker build" "${docker_build_flags[@]}" "${context_dir}" - fi - - # ON CI systems, we want to suppress docker build logs, only - # output the logs when it fails. - if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - if docker build "${docker_build_flags[@]}" "${context_dir}" \ - > "${tmpdir}/docker_build.log" 2>&1; then - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - cat "${tmpdir}/docker_build.log" - fi - - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - log_yellow "Dumping the build logs:" - cat "${tmpdir}/docker_build.log" - exit 1 - fi - else - if docker build "${docker_build_flags[@]}" "${context_dir}"; then - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - exit 1 - fi - fi -else - if [[ "${has_image}" != "true" ]]; then - log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." - exit 1 - fi -fi - -# We use an array for the flags so they are easier to document. -docker_flags=( - # Remove the container after it exists. - "--rm" - - # Use the host network. - "--network=host" - - # Run in priviledged mode. We are not using docker for sandboxing or - # isolation, just for packaging our dev tools. - "--privileged" - - # Run the docker script with the user id. Because the docker image gets to - # write in ${PWD} you typically want this to be your user id. - # To allow docker in docker, we need to use docker gid on the host. - "--user" "${user_uid}:${docker_gid}" - - # Pass down the USER. - "--env" "USER=${user_name}" - - # Mount the project directory inside the Docker container. - "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" - "--workdir" "${TRAMPOLINE_WORKSPACE}" - "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" - - # Mount the temporary home directory. - "--volume" "${tmphome}:/h" - "--env" "HOME=/h" - - # Allow docker in docker. - "--volume" "/var/run/docker.sock:/var/run/docker.sock" - - # Mount the /tmp so that docker in docker can mount the files - # there correctly. - "--volume" "/tmp:/tmp" - # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR - # TODO(tmatsuo): This part is not portable. - "--env" "TRAMPOLINE_SECRET_DIR=/secrets" - "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" - "--env" "KOKORO_GFILE_DIR=/secrets/gfile" - "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" - "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" -) - -# Add an option for nicer output if the build gets a tty. -if [[ -t 0 ]]; then - docker_flags+=("-it") -fi - -# Passing down env vars -for e in "${pass_down_envvars[@]}" -do - if [[ -n "${!e:-}" ]]; then - docker_flags+=("--env" "${e}=${!e}") - fi -done - -# If arguments are given, all arguments will become the commands run -# in the container, otherwise run TRAMPOLINE_BUILD_FILE. -if [[ $# -ge 1 ]]; then - log_yellow "Running the given commands '" "${@:1}" "' in the container." - readonly commands=("${@:1}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" -else - log_yellow "Running the tests in a Docker container." - docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" -fi - - -test_retval=$? - -if [[ ${test_retval} -eq 0 ]]; then - log_green "Build finished with ${test_retval}" -else - log_red "Build finished with ${test_retval}" -fi - -# Only upload it when the test passes. -if [[ "${update_cache}" == "true" ]] && \ - [[ $test_retval == 0 ]] && \ - [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then - log_yellow "Uploading the Docker image." - if docker push "${TRAMPOLINE_IMAGE}"; then - log_green "Finished uploading the Docker image." - else - log_red "Failed uploading the Docker image." - fi - # Call trampoline_after_upload_hook if it's defined. - if function_exists trampoline_after_upload_hook; then - trampoline_after_upload_hook - fi - -fi - -exit "${test_retval}" diff --git a/packages/gapic-generator/.trampolinerc b/packages/gapic-generator/.trampolinerc deleted file mode 100644 index 0080152373d5..000000000000 --- a/packages/gapic-generator/.trampolinerc +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Add required env vars here. -required_envvars+=( -) - -# Add env vars which are passed down into the container here. -pass_down_envvars+=( - "NOX_SESSION" - ############### - # Docs builds - ############### - "STAGING_BUCKET" - "V2_STAGING_BUCKET" - ################## - # Samples builds - ################## - "INSTALL_LIBRARY_FROM_SOURCE" - "RUN_TESTS_SESSION" - "BUILD_SPECIFIC_GCLOUD_PROJECT" - # Target directories. - "RUN_TESTS_DIRS" - # The nox session to run. - "RUN_TESTS_SESSION" -) - -# Prevent unintentional override on the default image. -if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ - [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." - exit 1 -fi - -# Define the default value if it makes sense. -if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then - TRAMPOLINE_IMAGE_UPLOAD="" -fi - -if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - TRAMPOLINE_IMAGE="" -fi - -if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then - TRAMPOLINE_DOCKERFILE="" -fi - -if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then - TRAMPOLINE_BUILD_FILE="" -fi